summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/tools
diff options
context:
space:
mode:
Diffstat (limited to 'testing/web-platform/tests/tools')
-rw-r--r--testing/web-platform/tests/tools/.coveragerc29
-rw-r--r--testing/web-platform/tests/tools/.gitignore17
-rw-r--r--testing/web-platform/tests/tools/META.yml4
-rw-r--r--testing/web-platform/tests/tools/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/certs/README.md99
-rw-r--r--testing/web-platform/tests/tools/certs/cacert.key30
-rw-r--r--testing/web-platform/tests/tools/certs/cacert.pem125
-rw-r--r--testing/web-platform/tests/tools/certs/config.json17
-rw-r--r--testing/web-platform/tests/tools/certs/web-platform.test.key28
-rw-r--r--testing/web-platform/tests/tools/certs/web-platform.test.pem133
-rw-r--r--testing/web-platform/tests/tools/ci/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/ci/azure/README.md2
-rw-r--r--testing/web-platform/tests/tools/ci/azure/affected_tests.yml29
-rw-r--r--testing/web-platform/tests/tools/ci/azure/checkout.yml4
-rw-r--r--testing/web-platform/tests/tools/ci/azure/com.apple.SafariTechnologyPreview.plist8
-rw-r--r--testing/web-platform/tests/tools/ci/azure/fyi_hook.yml23
-rw-r--r--testing/web-platform/tests/tools/ci/azure/install_certs.yml11
-rw-r--r--testing/web-platform/tests/tools/ci/azure/install_chrome.yml11
-rw-r--r--testing/web-platform/tests/tools/ci/azure/install_edge.yml61
-rw-r--r--testing/web-platform/tests/tools/ci/azure/install_firefox.yml9
-rw-r--r--testing/web-platform/tests/tools/ci/azure/install_fonts.yml7
-rw-r--r--testing/web-platform/tests/tools/ci/azure/install_safari.yml29
-rw-r--r--testing/web-platform/tests/tools/ci/azure/pip_install.yml6
-rw-r--r--testing/web-platform/tests/tools/ci/azure/publish_logs.yml7
-rw-r--r--testing/web-platform/tests/tools/ci/azure/sysdiagnose.yml13
-rw-r--r--testing/web-platform/tests/tools/ci/azure/system_info.yml4
-rw-r--r--testing/web-platform/tests/tools/ci/azure/tox_pytest.yml20
-rw-r--r--testing/web-platform/tests/tools/ci/azure/update_hosts.yml12
-rw-r--r--testing/web-platform/tests/tools/ci/azure/update_manifest.yml4
-rwxr-xr-xtesting/web-platform/tests/tools/ci/ci_built_diff.sh30
-rwxr-xr-xtesting/web-platform/tests/tools/ci/ci_resources_unittest.sh19
-rwxr-xr-xtesting/web-platform/tests/tools/ci/ci_tools_integration_test.sh23
-rwxr-xr-xtesting/web-platform/tests/tools/ci/ci_tools_unittest.sh36
-rwxr-xr-xtesting/web-platform/tests/tools/ci/ci_wptrunner_infrastructure.sh25
-rw-r--r--testing/web-platform/tests/tools/ci/commands.json73
-rwxr-xr-xtesting/web-platform/tests/tools/ci/epochs_update.sh58
-rwxr-xr-xtesting/web-platform/tests/tools/ci/interfaces_update.sh45
-rw-r--r--testing/web-platform/tests/tools/ci/jobs.py149
-rw-r--r--testing/web-platform/tests/tools/ci/make_hosts_file.py23
-rw-r--r--testing/web-platform/tests/tools/ci/manifest_build.py200
-rw-r--r--testing/web-platform/tests/tools/ci/regen_certs.py102
-rw-r--r--testing/web-platform/tests/tools/ci/requirements_build.txt5
-rw-r--r--testing/web-platform/tests/tools/ci/requirements_tc.txt4
-rwxr-xr-xtesting/web-platform/tests/tools/ci/run_tc.py424
-rwxr-xr-xtesting/web-platform/tests/tools/ci/taskcluster-run.py121
-rw-r--r--testing/web-platform/tests/tools/ci/tc/README.md243
-rw-r--r--testing/web-platform/tests/tools/ci/tc/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/ci/tc/decision.py404
-rw-r--r--testing/web-platform/tests/tools/ci/tc/download.py111
-rw-r--r--testing/web-platform/tests/tools/ci/tc/github_checks_output.py40
-rw-r--r--testing/web-platform/tests/tools/ci/tc/sink_task.py65
-rw-r--r--testing/web-platform/tests/tools/ci/tc/taskgraph.py171
-rw-r--r--testing/web-platform/tests/tools/ci/tc/tasks/test.yml558
-rw-r--r--testing/web-platform/tests/tools/ci/tc/testdata/epochs_daily_push_event.json460
-rw-r--r--testing/web-platform/tests/tools/ci/tc/testdata/master_push_event.json214
-rw-r--r--testing/web-platform/tests/tools/ci/tc/testdata/pr_event.json577
-rw-r--r--testing/web-platform/tests/tools/ci/tc/testdata/pr_event_tests_affected.json505
-rw-r--r--testing/web-platform/tests/tools/ci/tc/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/ci/tc/tests/test_decision.py56
-rw-r--r--testing/web-platform/tests/tools/ci/tc/tests/test_taskgraph.py148
-rw-r--r--testing/web-platform/tests/tools/ci/tc/tests/test_valid.py292
-rw-r--r--testing/web-platform/tests/tools/ci/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/ci/tests/test_jobs.py132
-rw-r--r--testing/web-platform/tests/tools/ci/update_built.py72
-rwxr-xr-xtesting/web-platform/tests/tools/ci/website_build.sh86
-rw-r--r--testing/web-platform/tests/tools/conftest.py15
-rw-r--r--testing/web-platform/tests/tools/docker/.bashrc4
-rw-r--r--testing/web-platform/tests/tools/docker/Dockerfile110
-rw-r--r--testing/web-platform/tests/tools/docker/README.md16
-rw-r--r--testing/web-platform/tests/tools/docker/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/docker/commands.json25
-rw-r--r--testing/web-platform/tests/tools/docker/frontend.py141
-rw-r--r--testing/web-platform/tests/tools/docker/requirements.txt2
-rwxr-xr-xtesting/web-platform/tests/tools/docker/retry.py56
-rw-r--r--testing/web-platform/tests/tools/docker/seccomp.json798
-rwxr-xr-xtesting/web-platform/tests/tools/docker/start.sh29
-rw-r--r--testing/web-platform/tests/tools/flake8.ini26
-rw-r--r--testing/web-platform/tests/tools/gitignore/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/gitignore/gitignore.py292
-rw-r--r--testing/web-platform/tests/tools/gitignore/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/gitignore/tests/test_gitignore.py113
-rw-r--r--testing/web-platform/tests/tools/lint/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/lint/commands.json3
-rw-r--r--testing/web-platform/tests/tools/lint/fnmatch.py40
-rw-r--r--testing/web-platform/tests/tools/lint/lint.py1144
-rw-r--r--testing/web-platform/tests/tools/lint/rules.py542
-rw-r--r--testing/web-platform/tests/tools/lint/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/lint/tests/base.py9
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/about_blank.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/broken.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/broken_ignored.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/a-ref.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/a.html4
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/a-ref.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/a.html4
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/support/a.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/support/tools/a.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/tools/a.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/a-ref.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/a.html4
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/support/a.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/tools/a.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/relative-testharness-interact.html5
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/relative-testharness.html4
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/selectors/a.html4
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/support/a.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/support/tools/a.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/tools/a.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/dependency.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/lint.ignore1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/okay.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/ref/absolute.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_relative-ref.html0
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_relative.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_root_relative.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/ref/non_existent_relative.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/ref/non_existent_root_relative.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/ref/same_file_empty.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/ref/same_file_path.html1
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.html4
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.js6
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.xhtml5
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/b.html4
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/tests/dir2/a.xhtml5
-rw-r--r--testing/web-platform/tests/tools/lint/tests/dummy/tests/relative-testharness-manual.html3
-rw-r--r--testing/web-platform/tests/tools/lint/tests/test_file_lints.py933
-rw-r--r--testing/web-platform/tests/tools/lint/tests/test_lint.py560
-rw-r--r--testing/web-platform/tests/tools/lint/tests/test_path_lints.py167
-rw-r--r--testing/web-platform/tests/tools/localpaths.py36
-rw-r--r--testing/web-platform/tests/tools/manifest/XMLParser.py151
-rw-r--r--testing/web-platform/tests/tools/manifest/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/manifest/catalog/xhtml.dtd2125
-rw-r--r--testing/web-platform/tests/tools/manifest/commands.json23
-rw-r--r--testing/web-platform/tests/tools/manifest/download.py207
-rw-r--r--testing/web-platform/tests/tools/manifest/item.py385
-rw-r--r--testing/web-platform/tests/tools/manifest/jsonlib.py139
-rw-r--r--testing/web-platform/tests/tools/manifest/log.py11
-rw-r--r--testing/web-platform/tests/tools/manifest/manifest.py449
-rw-r--r--testing/web-platform/tests/tools/manifest/requirements.txt1
-rw-r--r--testing/web-platform/tests/tools/manifest/sourcefile.py1144
-rw-r--r--testing/web-platform/tests/tools/manifest/testpaths.py112
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/test_XMLParser.py56
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/test_item.py160
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/test_manifest.py310
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/test_sourcefile.py911
-rw-r--r--testing/web-platform/tests/tools/manifest/tests/test_utils.py15
-rw-r--r--testing/web-platform/tests/tools/manifest/typedata.py336
-rwxr-xr-xtesting/web-platform/tests/tools/manifest/update.py105
-rw-r--r--testing/web-platform/tests/tools/manifest/utils.py93
-rw-r--r--testing/web-platform/tests/tools/manifest/vcs.py319
-rw-r--r--testing/web-platform/tests/tools/mypy.ini114
-rw-r--r--testing/web-platform/tests/tools/pytest.ini9
-rw-r--r--testing/web-platform/tests/tools/requirements_flake8.txt4
-rw-r--r--testing/web-platform/tests/tools/requirements_mypy.txt12
-rw-r--r--testing/web-platform/tests/tools/requirements_pytest.txt3
-rw-r--r--testing/web-platform/tests/tools/requirements_tests.txt5
-rw-r--r--testing/web-platform/tests/tools/runner/css/bootstrap-theme.min.css5
-rw-r--r--testing/web-platform/tests/tools/runner/css/bootstrap.min.css5
-rw-r--r--testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.eotbin0 -> 20335 bytes
-rw-r--r--testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.svg229
-rw-r--r--testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.ttfbin0 -> 41280 bytes
-rw-r--r--testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.woffbin0 -> 23320 bytes
-rw-r--r--testing/web-platform/tests/tools/runner/index.html235
-rw-r--r--testing/web-platform/tests/tools/runner/report.css43
-rw-r--r--testing/web-platform/tests/tools/runner/report.py308
-rw-r--r--testing/web-platform/tests/tools/runner/runner.css216
-rw-r--r--testing/web-platform/tests/tools/runner/runner.js910
-rw-r--r--testing/web-platform/tests/tools/runner/update_manifest.py38
-rw-r--r--testing/web-platform/tests/tools/serve/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/serve/commands.json17
-rw-r--r--testing/web-platform/tests/tools/serve/serve.py1237
-rw-r--r--testing/web-platform/tests/tools/serve/test_functional.py81
-rw-r--r--testing/web-platform/tests/tools/serve/test_serve.py149
-rw-r--r--testing/web-platform/tests/tools/serve/wave.py134
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/.gitignore9
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/.travis.yml35
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/CONTRIBUTING.rst11
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/LICENSE19
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/MANIFEST.in6
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/Makefile2
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/README.rst102
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/appveyor.yml18
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/atomicwrites/__init__.py201
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/docs/Makefile177
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/docs/conf.py107
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/docs/index.rst35
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/docs/make.bat242
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/setup.cfg2
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/setup.py27
-rw-r--r--testing/web-platform/tests/tools/third_party/atomicwrites/tox.ini11
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/CODE_OF_CONDUCT.md133
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/CONTRIBUTING.md230
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/FUNDING.yml5
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/PULL_REQUEST_TEMPLATE.md34
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/SECURITY.md2
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.github/workflows/main.yml113
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.gitignore13
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.pre-commit-config.yaml43
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/.readthedocs.yml16
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/AUTHORS.rst11
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/CHANGELOG.rst1027
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/LICENSE21
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/MANIFEST.in24
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/README.rst135
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/changelog.d/.gitignore0
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/changelog.d/towncrier_template.rst35
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/conftest.py29
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/Makefile177
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.pngbin0 -> 7639 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.svg10
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo_white.svg10
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/api.rst826
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/changelog.rst1
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/comparison.rst66
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/conf.py155
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/docutils.conf3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/examples.rst709
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/extending.rst313
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/glossary.rst104
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/hashing.rst86
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/how-does-it-work.rst109
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/index.rst100
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/init.rst489
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/license.rst8
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/names.rst122
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/overview.rst58
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/python-2.rst25
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/types.rst108
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/docs/why.rst290
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/mypy.ini3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/pyproject.toml71
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/setup.py151
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.py80
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.pyi484
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.py154
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.pyi13
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_compat.py261
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_config.py33
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_funcs.py422
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_make.py3173
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_next_gen.py216
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.py87
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.pyi9
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.py155
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.pyi13
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.py94
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.pyi17
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.py54
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.pyi6
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.py79
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.pyi19
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.py561
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.pyi78
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.py70
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.pyi63
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/converters.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/exceptions.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/filters.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/setters.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/src/attrs/validators.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/attr_import_star.py10
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/dataclass_transform_example.py45
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/strategies.py198
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_3rd_party.py31
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_annotations.py671
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_cmp.py510
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_compat.py52
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_config.py45
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_converters.py163
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_dunders.py1008
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_filters.py111
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_funcs.py680
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_functional.py790
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_hooks.py209
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_import.py11
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_init_subclass.py48
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_make.py2462
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_mypy.yml1395
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_next_gen.py440
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_pattern_matching.py101
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_pyright.py71
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_setattr.py437
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_slots.py740
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_validators.py952
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/test_version_info.py62
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/typing_example.py420
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tests/utils.py86
-rw-r--r--testing/web-platform/tests/tools/third_party/attrs/tox.ini129
-rw-r--r--testing/web-platform/tests/tools/third_party/certifi/LICENSE21
-rw-r--r--testing/web-platform/tests/tools/third_party/certifi/MANIFEST.in1
-rw-r--r--testing/web-platform/tests/tools/third_party/certifi/PKG-INFO69
-rw-r--r--testing/web-platform/tests/tools/third_party/certifi/README.rst46
-rw-r--r--testing/web-platform/tests/tools/third_party/certifi/certifi/__init__.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/certifi/certifi/__main__.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/certifi/certifi/cacert.pem4400
-rw-r--r--testing/web-platform/tests/tools/third_party/certifi/certifi/core.py37
-rw-r--r--testing/web-platform/tests/tools/third_party/certifi/setup.cfg11
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/certifi/setup.py67
-rw-r--r--testing/web-platform/tests/tools/third_party/enum/MANIFEST.in9
-rw-r--r--testing/web-platform/tests/tools/third_party/enum/PKG-INFO60
-rw-r--r--testing/web-platform/tests/tools/third_party/enum/README3
-rw-r--r--testing/web-platform/tests/tools/third_party/enum/enum/LICENSE32
-rw-r--r--testing/web-platform/tests/tools/third_party/enum/enum/README3
-rw-r--r--testing/web-platform/tests/tools/third_party/enum/enum/__init__.py838
-rw-r--r--testing/web-platform/tests/tools/third_party/enum/enum/doc/enum.rst735
-rw-r--r--testing/web-platform/tests/tools/third_party/enum/enum/test.py1841
-rw-r--r--testing/web-platform/tests/tools/third_party/enum/setup.cfg4
-rw-r--r--testing/web-platform/tests/tools/third_party/enum/setup.py105
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/.coveragerc6
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/.gitignore19
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/.travis.yml18
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/CHANGELOG24
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/LICENSE13
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/MANIFEST.in7
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/Makefile39
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/README.rst353
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/docs/Makefile153
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/docs/_templates/page.html9
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/docs/conf.py251
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/funcsigs/__init__.py829
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/funcsigs/version.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/requirements/development.txt5
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/setup.cfg2
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/setup.py52
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/tests/test_formatannotation.py17
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/tests/test_funcsigs.py91
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/tests/test_inspect.py1002
-rw-r--r--testing/web-platform/tests/tools/third_party/funcsigs/tox.ini8
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/.coveragerc18
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/CONTRIBUTORS.rst115
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/HISTORY.rst760
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/LICENSE21
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/MANIFEST.in8
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/Makefile9
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/README.rst65
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/Makefile177
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/make.bat242
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/_static/.keep0
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/_static/h2.connection.H2ConnectionStateMachine.dot.pngbin0 -> 714520 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/_static/h2.stream.H2StreamStateMachine.dot.pngbin0 -> 1856508 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/advanced-usage.rst325
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/api.rst169
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/asyncio-example.rst17
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/basic-usage.rst746
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/conf.py270
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/contributors.rst4
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/curio-example.rst17
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/eventlet-example.rst19
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/examples.rst28
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/index.rst41
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/installation.rst18
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/low-level.rst159
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/negotiating-http2.rst103
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/release-notes.rst101
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/release-process.rst56
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/testimonials.rst9
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/tornado-example.rst16
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-example.rst18
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-head-example.rst17
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-post-example.rst18
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/docs/source/wsgi-example.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/asyncio/asyncio-server.py210
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/asyncio/cert.crt21
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/asyncio/cert.key27
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/asyncio/wsgi-server.py760
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/curio/curio-server.py206
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/curio/localhost.crt.pem21
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/curio/localhost.key27
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/eventlet/eventlet-server.py102
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/eventlet/server.crt20
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/eventlet/server.key27
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/fragments/client_https_setup_fragment.py110
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/fragments/client_upgrade_fragment.py103
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/fragments/server_https_setup_fragment.py112
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/fragments/server_upgrade_fragment.py100
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/tornado/server.crt20
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/tornado/server.key27
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/h2/examples/tornado/tornado-server.py92
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/twisted/head_request.py111
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/twisted/post_request.py249
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.crt20
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.csr17
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.key27
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/examples/twisted/twisted-server.py192
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/h2/__init__.py8
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/h2/config.py170
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/h2/connection.py2048
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/h2/errors.py75
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/h2/events.py648
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/h2/exceptions.py186
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/h2/frame_buffer.py175
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/h2/settings.py339
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/h2/stream.py1369
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/h2/utilities.py660
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/h2/windows.py139
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/setup.cfg10
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/setup.py76
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/conftest.py16
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/coroutine_tests.py74
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/helpers.py176
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_basic_logic.py1877
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_closed_streams.py555
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_complex_logic.py586
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_config.py130
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_events.py367
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_exceptions.py15
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_flow_control_window.py952
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_h2_upgrade.py302
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_head_request.py55
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_header_indexing.py637
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_informational_responses.py444
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_interacting_stacks.py120
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_invalid_content_lengths.py136
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_invalid_frame_sequences.py488
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_invalid_headers.py952
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_priority.py358
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_related_events.py370
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_rfc7838.py447
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_settings.py470
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_state_machines.py163
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_stream_reset.py137
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test/test_utility_functions.py226
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/test_requirements.txt5
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/tox.ini48
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/h2/utils/backport.sh31
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/visualizer/NOTICES.visualizer24
-rw-r--r--testing/web-platform/tests/tools/third_party/h2/visualizer/visualize.py252
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/CONTRIBUTORS.rst62
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/HISTORY.rst134
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/LICENSE21
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/MANIFEST.in2
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/PKG-INFO199
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/README.rst41
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/hpack/__init__.py20
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/hpack/compat.py42
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/hpack/exceptions.py49
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/hpack/hpack.py629
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/hpack/huffman.py68
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/hpack/huffman_constants.py288
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/hpack/huffman_table.py4739
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/hpack/struct.py39
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/hpack/table.py215
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/setup.cfg12
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/setup.py57
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/test/test_encode_decode.py141
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/test/test_hpack.py828
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/test/test_hpack_integration.py75
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/test/test_huffman.py55
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/test/test_struct.py77
-rw-r--r--testing/web-platform/tests/tools/third_party/hpack/test/test_table.py158
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/.appveyor.yml31
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/.coveragerc8
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/.gitignore85
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/.prospector.yaml21
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/.pylintrc10
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/.pytest.expect1322
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/.travis.yml32
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/AUTHORS.rst66
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/CHANGES.rst359
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/CONTRIBUTING.rst60
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/LICENSE20
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/MANIFEST.in10
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/README.rst151
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/bench_html.py57
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/bench_wpt.py45
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/README.md8
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/html.html5000
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/LICENSE.md11
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/README.md52
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/001.html3
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/background-origin-007-ref.html18
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/background_shorthand_css_relative_url.html24
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/beforeunload-on-history-back-1.html5
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/euckr-encode-form.html52
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/frame-ancestors-self-allow.html16
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/grouping-dl.html30
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/heavy-styling-005.html15
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/htb-ltr-ltr.html74
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/idbindex_get8.htm27
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/idlharness.html34
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/li-type-unsupported-ref.html13
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/moz-css21-float-page-break-inside-avoid-6.html19
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/shape-outside-content-box-002.html66
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/worker-constructor.https.html86
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/2d.composite.image.destination-over.html33
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/align-content-wrap-002.html108
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/big5_chars_extra.html1
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/fetch.http.html143
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/filter-turbulence-invalid-001.html51
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/grid-auto-fill-rows-001.html184
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/image-orientation-from-image-content-images-ref.html86
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/masonry-item-placement-006.html149
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/moz-css21-table-page-break-inside-avoid-2.html29
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/position-sticky-table-th-bottom-ref.html62
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/pre-float-001.html36
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/resize-004.html20
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/test-plan.src.html1616
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/toBlob.png.html17
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/will-change-abspos-cb-001.html30
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/debug-info.py37
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/Makefile177
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/changes.rst3
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/conf.py123
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.filters.rst58
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.rst38
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treeadapters.rst20
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treebuilders.rst42
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treewalkers.rst50
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/index.rst22
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/license.rst4
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/make.bat242
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/modules.rst7
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/doc/movingparts.rst165
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/html5lib/flake8-run.sh9
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/__init__.py35
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/_ihatexml.py289
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/_inputstream.py918
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/_tokenizer.py1735
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/__init__.py5
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/_base.py40
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/py.py67
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/_utils.py159
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/constants.py2946
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/alphabeticalattributes.py29
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/base.py12
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/inject_meta_charset.py73
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/lint.py93
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/optionaltags.py207
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/sanitizer.py916
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/whitespace.py38
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/html5parser.py2795
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/serializer.py409
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/conftest.py108
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/sanitizer-testdata/tests1.dat433
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/sanitizer.py51
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/core.test395
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/injectmeta.test350
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/optionaltags.test3254
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/options.test334
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/whitespace.test198
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/support.py199
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_alphabeticalattributes.py78
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_encoding.py117
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_meta.py41
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_optionaltags_filter.py7
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_parser2.py94
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_sanitizer.py133
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_serializer.py226
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_stream.py325
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_tokenizer2.py66
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_treeadapters.py40
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_treewalkers.py205
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_whitespace_filter.py125
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tokenizer.py253
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tokenizertotree.py69
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tree_construction.py205
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/us-ascii.html3
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/utf-8-bom.html3
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/__init__.py30
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/genshi.py54
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/sax.py50
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/__init__.py88
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/base.py417
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/dom.py239
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/etree.py343
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/etree_lxml.py392
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/__init__.py154
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/base.py252
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/dom.py43
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/etree.py131
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/etree_lxml.py215
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/genshi.py69
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/html5lib/parse.py236
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/pytest.ini17
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/html5lib/requirements-install.sh15
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/requirements-optional.txt13
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/requirements-test.txt10
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/requirements.txt2
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/setup.cfg11
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/setup.py127
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/tox.ini20
-rw-r--r--testing/web-platform/tests/tools/third_party/html5lib/utils/entities.py101
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/CONTRIBUTORS.rst56
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/HISTORY.rst179
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/LICENSE21
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/MANIFEST.in2
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/PKG-INFO242
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/README.rst39
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/__init__.py8
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/exceptions.py41
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/flags.py50
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/frame.py822
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/setup.cfg10
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/setup.py59
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/test/test_flags.py35
-rw-r--r--testing/web-platform/tests/tools/third_party/hyperframe/test/test_frames.py791
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/.github/workflows/main.yml126
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/.gitignore13
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/.readthedocs.yml5
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/LICENSE13
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/MANIFEST.in5
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/README.rst42
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/codecov.yml2
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/coverage.ini24
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/coverplug.py21
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/docs/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/docs/changelog.rst314
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/docs/conf.py185
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/docs/index.rst52
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/docs/using.rst260
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/importlib_metadata/__init__.py627
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/importlib_metadata/_compat.py152
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/prepare/example/example/__init__.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/prepare/example/setup.py10
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/pyproject.toml2
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/setup.cfg47
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/setup.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/example-21.12-py3-none-any.whlbin0 -> 1455 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/example-21.12-py3.6.eggbin0 -> 1497 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/tests/fixtures.py263
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/tests/py39compat.py4
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_api.py196
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_integration.py54
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_main.py285
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_zip.py80
-rw-r--r--testing/web-platform/tests/tools/third_party/importlib_metadata/tox.ini97
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/.gitignore8
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/.landscape.yml5
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/.travis.yml18
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/CHANGELOG32
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/LICENSE19
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/MANIFEST.in5
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/README.txt51
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/example.ini10
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/pyproject.toml5
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/setup.cfg2
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/setup.py46
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/__init__.py165
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/__init__.pyi31
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/testing/conftest.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/testing/test_iniconfig.py314
-rw-r--r--testing/web-platform/tests/tools/third_party/iniconfig/tox.ini14
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/.gitignore34
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/.travis.yml26
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/LICENSE19
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/MANIFEST.in8
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/README.rst59
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/docs/Makefile153
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/docs/api.rst234
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/docs/conf.py244
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/docs/index.rst16
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/docs/license.rst16
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/docs/make.bat190
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/docs/testing.rst19
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/docs/versions.rst237
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/__init__.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/more.py2068
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/recipes.py565
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/test_more.py1848
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/test_recipes.py607
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/setup.cfg3
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/setup.py59
-rw-r--r--testing/web-platform/tests/tools/third_party/more-itertools/tox.ini5
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/.coveragerc9
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/.flake83
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/.github/workflows/docs.yml30
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/.github/workflows/lint.yml59
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/.github/workflows/test.yml56
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/.gitignore18
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/.pre-commit-config.yaml39
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/.readthedocs.yml15
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/CHANGELOG.rst347
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/CONTRIBUTING.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/LICENSE3
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/LICENSE.APACHE177
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/LICENSE.BSD23
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/MANIFEST.in24
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/README.rst73
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/Makefile153
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/_static/.empty0
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/changelog.rst1
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/conf.py111
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/development/getting-started.rst77
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/development/index.rst19
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/development/release-process.rst25
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/development/reviewing-patches.rst37
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/development/submitting-patches.rst74
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/index.rst38
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/markers.rst93
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/requirements.rst89
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/requirements.txt1
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/security.rst18
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/specifiers.rst222
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/tags.rst225
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/utils.rst92
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/docs/version.rst292
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/mypy.ini17
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/noxfile.py321
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/__about__.py26
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/__init__.py25
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/_manylinux.py301
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/_musllinux.py136
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/_structures.py61
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/markers.py304
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/requirements.py146
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/specifiers.py802
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/tags.py487
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/utils.py136
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/packaging/version.py504
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/pyproject.toml3
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/setup.cfg3
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/setup.py70
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tasks/__init__.py9
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tasks/check.py141
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tasks/paths.py9
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tasks/requirements.txt3
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/__init__.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/hello-world.c7
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/packaging/tests/manylinux/build.sh39
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-armv7l-armelbin0 -> 52 bytes
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-armv7l-armhfbin0 -> 52 bytes
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-classbin0 -> 52 bytes
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-databin0 -> 52 bytes
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-magicbin0 -> 52 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-s390x-s390xbin0 -> 64 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-too-shortbin0 -> 40 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-amd64bin0 -> 64 bytes
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-i386bin0 -> 52 bytes
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-x32bin0 -> 52 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/musllinux/build.sh61
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/test_manylinux.py253
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/test_markers.py310
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/test_musllinux.py146
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/test_requirements.py197
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/test_specifiers.py998
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/test_structures.py59
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/test_tags.py1191
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/test_utils.py124
-rw-r--r--testing/web-platform/tests/tools/third_party/packaging/tests/test_version.py904
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/.gitignore54
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/.travis.yml47
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/CHANGELOG.rst163
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/LICENSE.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/MANIFEST.in10
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/README.rst66
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/VERSION1
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/appveyor.yml30
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/appveyor/install.ps144
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/codecov.yml1
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/pathlib2/__init__.py1809
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/requirements.txt3
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/setup.cfg8
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/setup.py48
-rw-r--r--testing/web-platform/tests/tools/third_party/pathlib2/tests/test_pathlib2.py2406
-rw-r--r--testing/web-platform/tests/tools/third_party/pdf_js/LICENSE177
-rw-r--r--testing/web-platform/tests/tools/third_party/pdf_js/pdf.js24624
-rw-r--r--testing/web-platform/tests/tools/third_party/pdf_js/pdf.worker.js56199
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/.coveragerc14
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/.github/workflows/main.yml148
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/.gitignore64
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/.pre-commit-config.yaml34
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/CHANGELOG.rst409
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/LICENSE21
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/MANIFEST.in7
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/README.rst101
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/RELEASING.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/changelog/README.rst32
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/changelog/_template.rst40
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/codecov.yml7
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/_static/img/plug.pngbin0 -> 9350 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/api_reference.rst19
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/changelog.rst1
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/conf.py87
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample-spam/eggsample_spam.py22
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample-spam/setup.py8
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/__init__.py4
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/hookspecs.py21
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/host.py57
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/lib.py14
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/setup.py8
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/examples/toy-example.py41
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/docs/index.rst957
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/pyproject.toml47
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/scripts/release.py69
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pluggy/scripts/upload-coverage.sh16
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/setup.cfg52
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/setup.py5
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/__init__.py18
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_callers.py60
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_hooks.py325
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_manager.py373
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_result.py60
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_tracing.py62
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/testing/benchmark.py102
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/testing/conftest.py26
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/testing/test_details.py135
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/testing/test_helpers.py84
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/testing/test_hookcaller.py272
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/testing/test_invocations.py215
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/testing/test_multicall.py147
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/testing/test_pluginmanager.py544
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/testing/test_tracer.py78
-rw-r--r--testing/web-platform/tests/tools/third_party/pluggy/tox.ini57
-rw-r--r--testing/web-platform/tests/tools/third_party/py/.flake84
-rw-r--r--testing/web-platform/tests/tools/third_party/py/.github/workflows/main.yml66
-rw-r--r--testing/web-platform/tests/tools/third_party/py/.gitignore15
-rw-r--r--testing/web-platform/tests/tools/third_party/py/AUTHORS25
-rw-r--r--testing/web-platform/tests/tools/third_party/py/CHANGELOG.rst1236
-rw-r--r--testing/web-platform/tests/tools/third_party/py/LICENSE19
-rw-r--r--testing/web-platform/tests/tools/third_party/py/MANIFEST.in11
-rw-r--r--testing/web-platform/tests/tools/third_party/py/README.rst31
-rw-r--r--testing/web-platform/tests/tools/third_party/py/RELEASING.rst17
-rw-r--r--testing/web-platform/tests/tools/third_party/py/bench/localpath.py73
-rw-r--r--testing/web-platform/tests/tools/third_party/py/codecov.yml7
-rw-r--r--testing/web-platform/tests/tools/third_party/py/conftest.py60
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/Makefile133
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/_templates/layout.html18
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-0.9.0.txt7
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-0.9.2.txt27
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.0.txt63
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.1.txt48
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.2.txt5
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.1.0.txt115
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.1.1.txt48
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.2.0.txt116
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.2.1.txt66
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.0.txt580
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.1.txt104
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.2.txt720
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.3.txt26
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.4.txt22
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.4.0.txt47
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.4.1.txt47
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/announce/releases.txt16
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/changelog.txt3
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/code.txt150
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/conf.py263
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/download.html18
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/example/genhtml.py13
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/example/genhtmlcss.py23
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/example/genxml.py17
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/faq.txt170
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/img/pylib.pngbin0 -> 8276 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/index.txt39
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/install.txt91
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/io.txt59
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/links.inc15
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/log.txt208
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/misc.txt93
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/path.txt264
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/style.css1044
-rw-r--r--testing/web-platform/tests/tools/third_party/py/doc/xml.txt164
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/__init__.py156
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/__init__.pyi20
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/__metainfo.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_builtin.py149
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_code/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_code/_assertionnew.py322
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_code/_assertionold.py556
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_code/_py2traceback.py79
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_code/assertion.py90
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_code/code.py796
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_code/source.py410
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_error.py91
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_io/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_io/capture.py371
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_io/saferepr.py71
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_io/terminalwriter.py423
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_log/__init__.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_log/log.py206
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_log/warning.py79
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_path/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_path/cacheutil.py114
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_path/common.py459
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_path/local.py1030
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_path/svnurl.py380
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_path/svnwc.py1240
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_process/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_process/cmdexec.py49
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_process/forkedfunc.py120
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_process/killproc.py23
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_std.py27
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/INSTALLER1
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/LICENSE18
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/METADATA125
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/RECORD11
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/REQUESTED0
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/WHEEL6
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/top_level.txt1
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg/__init__.py217
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg/version.py5
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/INSTALLER1
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/LICENSE19
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/METADATA78
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/RECORD11
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/REQUESTED0
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/WHEEL6
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/top_level.txt1
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/__init__.py165
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/__init__.pyi31
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/_xmlgen.py255
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/error.pyi129
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/iniconfig.pyi31
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/io.pyi130
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/path.pyi197
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/test.py10
-rw-r--r--testing/web-platform/tests/tools/third_party/py/py/xml.pyi25
-rw-r--r--testing/web-platform/tests/tools/third_party/py/pyproject.toml6
-rw-r--r--testing/web-platform/tests/tools/third_party/py/setup.cfg8
-rw-r--r--testing/web-platform/tests/tools/third_party/py/setup.py48
-rw-r--r--testing/web-platform/tests/tools/third_party/py/tasks/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/py/tasks/vendoring.py41
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/code/test_assertion.py305
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/code/test_code.py159
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/code/test_excinfo.py956
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/code/test_source.py656
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/conftest.py3
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/io_/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/io_/test_capture.py501
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/io_/test_saferepr.py75
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/io_/test_terminalwriter.py341
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/io_/test_terminalwriter_linewidth.py56
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/log/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/log/test_log.py191
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/log/test_warning.py85
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/path/common.py492
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/path/conftest.py80
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/path/repotest.dump228
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/path/svntestbase.py31
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/path/test_cacheutil.py89
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/path/test_local.py1078
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/path/test_svnauth.py460
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/path/test_svnurl.py95
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/path/test_svnwc.py557
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/process/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/process/test_cmdexec.py41
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/process/test_forkedfunc.py173
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/process/test_killproc.py18
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/root/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/root/test_builtin.py156
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/root/test_error.py76
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/root/test_py_imports.py71
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/root/test_std.py13
-rw-r--r--testing/web-platform/tests/tools/third_party/py/testing/root/test_xmlgen.py146
-rw-r--r--testing/web-platform/tests/tools/third_party/py/tox.ini44
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/LICENSE202
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/PKG-INFO302
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/README.rst281
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/PKG-INFO302
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/SOURCES.txt12
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/dependency_links.txt1
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/entry_points.txt3
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/requires.txt9
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/top_level.txt1
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/__init__.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/plugin.py240
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.cfg18
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.py54
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.coveragerc31
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.gitblameignore28
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.github/FUNDING.yml5
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/1_bug_report.md16
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/2_feature_request.md25
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/config.yml5
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.github/PULL_REQUEST_TEMPLATE.md26
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.github/config.yml2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.github/dependabot.yml11
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.github/labels.toml149
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.github/workflows/main.yml231
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.github/workflows/prepare-release-pr.yml52
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.github/workflows/update-plugin-list.yml49
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.gitignore58
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.pre-commit-config.yaml99
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/.readthedocs.yml19
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/AUTHORS356
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/CHANGELOG.rst7
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/CITATION16
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/CODE_OF_CONDUCT.md83
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/CONTRIBUTING.rst481
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/LICENSE21
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/OPENCOLLECTIVE.rst44
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/README.rst167
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/RELEASING.rst173
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/TIDELIFT.rst60
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/bench/bench.py13
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/bench/bench_argcomplete.py19
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/bench/empty.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/bench/manyparam.py14
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/bench/skip.py9
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/bench/unit_test.py13
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/bench/xunit.py11
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/changelog/README.rst37
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/changelog/_template.rst40
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/codecov.yml6
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/Makefile43
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/globaltoc.html34
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/layout.html52
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/links.html7
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/relations.html19
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/sidebarintro.html5
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/slim_searchbox.html15
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/adopt.rst78
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/index.rst154
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.0.rst129
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.1.rst67
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.2.rst73
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.3.rst39
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.0.rst47
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.1.rst36
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.2.rst32
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.3.rst32
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.0.rst95
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.1.rst41
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.2.rst43
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.4.rst38
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.0.rst133
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.1.rst39
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.2.rst57
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.3.rst61
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.4.rst39
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.5.rst96
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.0.rst223
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.1.rst25
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.2.rst39
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.0.rst174
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.1.rst46
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.2.rst63
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.0.rst153
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.1.rst58
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.2.rst51
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.3.rst51
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.0.rst100
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.1.rst58
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.2.rst57
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.2.rst44
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.3.rst58
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.4.rst52
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.5.rst39
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.6.rst67
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.7.rst31
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.0.rst134
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.1.rst57
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.2.rst65
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.0.rst82
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.1.rst26
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.2.rst24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.3.rst27
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.4.rst29
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.5.rst27
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.6.rst33
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.7.rst33
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.0.rst61
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.1.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.2.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.3.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.10.0.rst43
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.10.1.rst24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.0.rst48
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.1.rst22
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.2.rst28
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.3.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.4.rst36
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.5.rst18
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.0.rst50
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.1.rst25
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.2.rst28
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.0.rst52
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.1.rst27
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.2.rst28
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.5.0.rst51
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.5.1.rst30
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.0.rst41
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.1.rst24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.2.rst29
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.3.rst27
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.4.rst24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.0.rst41
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.1.rst21
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.2.rst25
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.3.rst32
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.4.rst22
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.0.rst38
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.1.rst25
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.2.rst28
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.0.rst43
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.1.rst20
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.2.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.3.rst24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.0.rst30
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.1.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.2.rst24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.1.0.rst44
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.1.1.rst27
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.2.0.rst37
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.2.1.rst30
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.3.0.rst36
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.3.1.rst28
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.0.rst39
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.1.rst20
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.2.rst33
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.5.0.rst34
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.0.rst43
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.1.rst19
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.2.rst18
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.3.rst21
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.4.rst22
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.5.rst21
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.6.rst20
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.7.rst19
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.8.rst20
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.9.rst21
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.0.0.rst46
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.0.1.rst25
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.0.rst56
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.1.rst24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.2.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.3.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.0.rst35
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.1.rst23
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.2.rst29
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.3.rst28
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.4.rst22
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.0.rst45
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.1.rst26
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.2.rst26
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.3.rst30
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.4.rst20
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.5.rst19
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.0.rst59
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.1.rst18
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.2.rst22
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.3.rst21
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.0.rst40
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.0rc1.rst67
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.1.rst21
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.2.rst19
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.0.rst44
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.1.rst18
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.2.rst22
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.0.rst76
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.1.rst20
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.2.rst21
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.3.rst19
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.4.rst22
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.5.rst30
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.0.rst74
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.0rc1.rst74
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.1.rst20
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/sprint2016.rst64
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/backwards-compatibility.rst79
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/builtin.rst197
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/changelog.rst9044
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/conf.py478
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/conftest.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/contact.rst54
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/contents.rst116
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/contributing.rst3
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/deprecations.rst954
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/development_guide.rst7
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/failure_demo.py281
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/global_testmodule_config/conftest.py14
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/global_testmodule_config/test_hello_world.py5
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/test_failures.py13
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/test_setup_flow_example.py44
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/attic.rst83
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/conftest.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/fixture_availability.svg132
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/fixture_availability_plugins.svg142
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse.py45
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse.svg64
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_flat.svg56
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.py31
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.svg76
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.py36
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.svg100
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies.py45
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies.svg60
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies_flat.svg51
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies_unclear.svg60
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_scope.py36
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_scope.svg55
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_request_different_scope.py29
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_request_different_scope.svg115
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/index.rst34
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/markers.rst734
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/multipython.py72
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython.rst102
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/conftest.py47
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/test_simple.yaml7
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/parametrize.rst708
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/pythoncollection.py14
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/pythoncollection.rst321
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/reportingdemo.rst708
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/simple.rst1086
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/special.rst84
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/example/xfail_demo.py38
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/anatomy.rst46
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/fixtures.rst174
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/flaky.rst126
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/goodpractices.rst288
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/index.rst15
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/pythonpath.rst133
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/funcarg_compare.rst230
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/funcargs.rst13
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/getting-started.rst257
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/historical-notes.rst312
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/history.rst145
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/assert.rst336
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/bash-completion.rst33
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/cache.rst329
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/capture-stdout-stderr.rst170
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/capture-warnings.rst443
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/doctest.rst312
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/existingtestsuite.rst34
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/failures.rst160
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/fixtures.rst1887
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/index.rst64
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/logging.rst292
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/mark.rst93
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/monkeypatch.rst444
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/nose.rst79
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/output.rst710
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/parametrize.rst298
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/plugins.rst136
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/skipping.rst430
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/tmp_path.rst139
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/unittest.rst251
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/usage.rst214
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/writing_hook_functions.rst352
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/writing_plugins.rst458
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/xunit_setup.rst117
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/img/cramer2.pngbin0 -> 25291 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/img/favicon.pngbin0 -> 1334 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/img/freiburg2.jpgbin0 -> 104057 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/img/gaynor3.pngbin0 -> 23032 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/img/keleshev.pngbin0 -> 23246 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pullrequest.pngbin0 -> 17035 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pylib.pngbin0 -> 8276 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pytest1.pngbin0 -> 6010 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pytest_logo_curves.svg29
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/img/theuni.pngbin0 -> 31476 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/index.rst148
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/license.rst32
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/naming20.rst20
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/proposals/parametrize_with_fixtures.rst164
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/py27-py34-deprecation.rst99
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/pytest.ini2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/recwarn.rst3
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/customize.rst248
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/exit-codes.rst26
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/fixtures.rst455
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/index.rst15
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/plugin_list.rst7728
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/reference.rst2101
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/requirements.txt7
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/sponsor.rst26
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/talks.rst109
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/tidelift.rst45
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/doc/en/yieldfixture.rst18
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/extra/get_issues.py85
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/extra/setup-py.test/setup.py11
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/pyproject.toml116
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/scripts/prepare-release-pr.py174
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/scripts/publish-gh-release-notes.py102
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/scripts/release.major.rst24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/scripts/release.minor.rst24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/scripts/release.patch.rst17
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/scripts/release.pre.rst29
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/scripts/release.py131
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/scripts/towncrier-draft-to-file.py15
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/scripts/update-plugin-list.py140
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/setup.cfg105
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/setup.py4
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/__init__.py9
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_argcomplete.py117
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/__init__.py22
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/code.py1274
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/source.py217
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/__init__.py8
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/saferepr.py153
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/terminalwriter.py233
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/wcwidth.py55
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_version.py5
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/__init__.py181
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/rewrite.py1136
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/truncate.py94
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/util.py498
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/cacheprovider.py580
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/capture.py942
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/compat.py417
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/__init__.py1697
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/argparsing.py535
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/compat.py71
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/exceptions.py11
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/findpaths.py213
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/debugging.py388
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/deprecated.py155
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/doctest.py734
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/faulthandler.py97
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/fixtures.py1686
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/freeze_support.py44
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/helpconfig.py264
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/hookspec.py928
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/junitxml.py696
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/legacypath.py467
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/logging.py831
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/main.py896
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/__init__.py282
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/expression.py225
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/structures.py595
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/monkeypatch.py383
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/nodes.py762
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/nose.py42
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/outcomes.py307
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pastebin.py110
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pathlib.py724
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pytester.py1748
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pytester_assertions.py75
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python.py1764
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python_api.py961
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python_path.py24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/recwarn.py296
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/reports.py598
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/runner.py548
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/scope.py91
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/setuponly.py97
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/setupplan.py40
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/skipping.py296
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/stash.py112
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/stepwise.py122
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/terminal.py1394
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/threadexception.py88
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/timing.py12
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/tmpdir.py211
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/unittest.py414
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/unraisableexception.py93
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/warning_types.py145
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/_pytest/warnings.py141
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/pytest/__init__.py171
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/pytest/__main__.py5
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/pytest/collect.py38
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/src/pytest/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/acceptance_test.py1297
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/code/test_code.py212
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/code/test_excinfo.py1470
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/code/test_source.py656
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/conftest.py216
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/deprecated_test.py310
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/README.rst9
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/acceptance/fixture_mock_integration.py16
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/pytest.ini2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/tests/__init__.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/tests/test_foo.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/__init__.pyi0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/conftest.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/tests/test_basic.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_init_given_as_arg/pkg/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_init_given_as_arg/pkg/test_foo.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/__init__.pyi0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/conftest.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/test_foo.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/conftest_usageerror/__init__.pyi0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/conftest_usageerror/conftest.py8
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses.py14
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_field_comparison_off.py14
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_verbose.py14
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_recursive_dataclasses.py44
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_two_different_dataclasses.py19
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/doctest/main_py/__main__.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/doctest/main_py/test_normal_module.py6
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/__init__.pyi0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/conftest.py15
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/foo/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/foo/test_foo.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/conftest.py7
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/test_in_sub1.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/conftest.py6
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/test_in_sub2.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_detect_recursive_dependency_error.py15
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/__init__.pyi0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/conftest.py6
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/conftest.py6
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/test_spam.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/__init__.pyi0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/conftest.py6
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/test_extend_fixture_conftest_module.py10
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_module_class.py15
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_basic.py15
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_classlevel.py10
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_modulelevel.py15
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookupfails.py10
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/test_fixture_named_request.py10
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/test_getfixturevalue_dynamic.py20
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/__init__.pyi0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/conftest.py14
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/test_hello.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue_519.py53
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/junit-10.xsd147
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/__init__.pyi0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/conftest.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/test_marks_as_keywords.py6
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/.gitignore1
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/generate_folders.py27
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/template_test.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/pytest.ini2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/tmpdir/tmp_path_fixture.py7
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_parametrized_fixture_error_message.py14
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip.py13
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip_class.py14
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip_module.py12
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_asyncio.py25
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_asynctest.py23
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_plain_async.py6
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message.py21
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_1.py21
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_2.py5
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/examples/test_issue519.py7
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/freeze/.gitignore3
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/freeze/create_executable.py11
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/freeze/runtests_script.py10
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tests/test_doctest.txt6
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tests/test_trivial.py6
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tox_run.py12
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/io/test_saferepr.py181
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/io/test_terminalwriter.py293
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/io/test_wcwidth.py38
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_fixture.py310
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_formatter.py173
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_reporting.py1167
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/.gitignore2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/README.rst13
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/bdd_wallet.feature9
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/bdd_wallet.py39
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/django_settings.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest.ini5
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_anyio_integration.py8
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_asyncio_integration.py8
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_mock_integration.py2
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_trio_integration.py8
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_twisted_integration.py18
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/requirements.txt15
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/simple_integration.py10
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/python/approx.py872
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/python/collect.py1493
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/python/fixtures.py4474
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/python/integration.py503
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/python/metafunc.py1907
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/python/raises.py298
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/python/show_fixtures_per_test.py254
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_argcomplete.py95
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_assertion.py1685
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_assertrewrite.py1841
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_cacheprovider.py1251
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_capture.py1666
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_collection.py1506
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_compat.py265
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_config.py2115
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_conftest.py696
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_debugging.py1327
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_doctest.py1572
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_entry_points.py7
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_error_diffs.py283
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_faulthandler.py172
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_findpaths.py135
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_helpconfig.py124
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_junitxml.py1703
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_legacypath.py180
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_link_resolve.py80
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_main.py264
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_mark.py1130
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_mark_expression.py195
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_meta.py32
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_monkeypatch.py455
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_nodes.py167
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_nose.py498
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_parseopt.py344
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_pastebin.py184
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_pathlib.py574
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_pluginmanager.py427
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_pytester.py855
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_python_path.py110
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_recwarn.py410
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_reports.py488
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_runner.py1061
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_runner_xunit.py297
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_scope.py39
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_session.py369
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_setuponly.py318
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_setupplan.py120
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_skipping.py1533
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_stash.py67
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_stepwise.py280
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_terminal.py2486
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_threadexception.py137
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_tmpdir.py480
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_unittest.py1500
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_unraisableexception.py133
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_warning_types.py38
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/test_warnings.py775
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/testing/typing_checks.py24
-rw-r--r--testing/web-platform/tests/tools/third_party/pytest/tox.ini184
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/.gitignore4
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/.travis.yml17
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/CONTRIBUTING30
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/LICENSE28
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/MANIFEST.in6
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/README.md36
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/abort_handshake_wsh.py43
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/abort_wsh.py43
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/arraybuffer_benchmark.html134
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/bench_wsh.py59
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark.html175
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark.js238
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark_helper_wsh.py84
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/example/cgi-bin/hi.py5
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/close_wsh.py70
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/console.html317
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/cookie_wsh.py54
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/example/echo_client.py699
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/echo_noext_wsh.py62
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/echo_wsh.py55
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/handler_map.txt11
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/hsts_wsh.py40
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/internal_error_wsh.py42
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/origin_check_wsh.py44
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/performance_test_iframe.html37
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/performance_test_iframe.js86
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/example/special_headers.cgi26
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/util.js323
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/util_main.js89
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/example/util_worker.js44
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/__init__.py172
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/_stream_exceptions.py82
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/common.py273
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/dispatch.py385
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/extensions.py474
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/fast_masking.i98
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/__init__.py101
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/base.py396
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/hybi.py223
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/http_header_util.py254
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/memorizingfile.py99
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/msgutil.py214
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/request_handler.py319
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/server_util.py87
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/standalone.py481
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/stream.py950
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/util.py386
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/websocket_server.py285
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/setup.py73
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/cacert.pem17
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/cert.pem61
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/client_cert.p12bin0 -> 2582 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/key.pem15
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/client_for_testing.py726
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/mock.py227
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/run_all.py88
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/set_sys_path.py41
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/test_dispatch.py298
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/test_endtoend.py738
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/test_extensions.py192
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/test_handshake.py172
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/test_handshake_hybi.py422
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/test_http_header_util.py93
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/test_memorizingfile.py100
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/test_mock.py137
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/test_msgutil.py912
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/test_stream.py70
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/pywebsocket3/test/test_util.py191
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/README1
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/abort_by_user_wsh.py41
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/blank_wsh.py30
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/origin_check_wsh.py43
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/exception_in_transfer_wsh.py42
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/no_wsh_at_the_end.py43
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/non_callable_wsh.py35
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/plain_wsh.py41
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/wrong_handshake_sig_wsh.py43
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/wrong_transfer_sig_wsh.py43
-rw-r--r--testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/hello.pl32
-rw-r--r--testing/web-platform/tests/tools/third_party/six/CHANGES315
-rw-r--r--testing/web-platform/tests/tools/third_party/six/LICENSE18
-rw-r--r--testing/web-platform/tests/tools/third_party/six/MANIFEST.in6
-rw-r--r--testing/web-platform/tests/tools/third_party/six/README.rst32
-rw-r--r--testing/web-platform/tests/tools/third_party/six/documentation/Makefile130
-rw-r--r--testing/web-platform/tests/tools/third_party/six/documentation/conf.py217
-rw-r--r--testing/web-platform/tests/tools/third_party/six/documentation/index.rst875
-rw-r--r--testing/web-platform/tests/tools/third_party/six/setup.cfg20
-rw-r--r--testing/web-platform/tests/tools/third_party/six/setup.py58
-rw-r--r--testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/INSTALLER1
-rw-r--r--testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/LICENSE18
-rw-r--r--testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/METADATA49
-rw-r--r--testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/RECORD8
-rw-r--r--testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/WHEEL6
-rw-r--r--testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/top_level.txt1
-rw-r--r--testing/web-platform/tests/tools/third_party/six/six.py982
-rw-r--r--testing/web-platform/tests/tools/third_party/six/test_six.py1052
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/tooltool/tooltool.py1316
-rw-r--r--testing/web-platform/tests/tools/third_party/webencodings/PKG-INFO50
-rw-r--r--testing/web-platform/tests/tools/third_party/webencodings/README.rst25
-rw-r--r--testing/web-platform/tests/tools/third_party/webencodings/setup.cfg14
-rw-r--r--testing/web-platform/tests/tools/third_party/webencodings/setup.py47
-rw-r--r--testing/web-platform/tests/tools/third_party/webencodings/webencodings/__init__.py342
-rw-r--r--testing/web-platform/tests/tools/third_party/webencodings/webencodings/labels.py231
-rw-r--r--testing/web-platform/tests/tools/third_party/webencodings/webencodings/mklabels.py59
-rw-r--r--testing/web-platform/tests/tools/third_party/webencodings/webencodings/tests.py153
-rw-r--r--testing/web-platform/tests/tools/third_party/webencodings/webencodings/x_user_defined.py325
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/.appveyor.yml27
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/.circleci/config.yml55
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/.github/FUNDING.yml1
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/.gitignore12
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/.readthedocs.yml7
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/.travis.yml36
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/CODE_OF_CONDUCT.md46
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/LICENSE25
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/MANIFEST.in2
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/Makefile29
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/README.rst154
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/compliance/README.rst50
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/compliance/fuzzingclient.json11
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/compliance/fuzzingserver.json12
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/compliance/test_client.py49
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/compliance/test_server.py27
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/Makefile160
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/_static/tidelift.pngbin0 -> 4069 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/_static/websockets.svg31
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/api.rst152
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/changelog.rst563
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/cheatsheet.rst109
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/conf.py272
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/contributing.rst61
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/deployment.rst162
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/design.rst571
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/extensions.rst87
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/faq.rst261
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/index.rst99
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/intro.rst209
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/license.rst4
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/lifecycle.grafflebin0 -> 3134 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/lifecycle.svg3
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/limitations.rst10
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/protocol.grafflebin0 -> 4740 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/protocol.svg3
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/requirements.txt4
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/security.rst39
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/spelling_wordlist.txt39
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/docs/tidelift.rst112
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/basic_auth_client.py14
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/basic_auth_server.py20
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/client.py19
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/example/counter.html80
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/counter.py69
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/echo.py13
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/health_check_server.py22
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/hello.py12
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/example/localhost.pem48
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/secure_client.py27
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/secure_server.py28
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/server.py20
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/example/show_time.html20
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/show_time.py19
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/shutdown.py22
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/unix_client.py19
-rwxr-xr-xtesting/web-platform/tests/tools/third_party/websockets/example/unix_server.py22
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/logo/horizontal.svg31
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/logo/icon.svg15
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/logo/old.svg14
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/logo/tidelift.pngbin0 -> 4069 bytes
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/logo/vertical.svg31
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/performance/mem_client.py54
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/performance/mem_server.py63
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/setup.cfg30
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/setup.py66
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/__init__.py55
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/__main__.py206
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/auth.py160
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/client.py584
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/exceptions.py366
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/base.py119
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/permessage_deflate.py588
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/framing.py342
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/handshake.py185
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/headers.py515
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/http.py360
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/protocol.py1429
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/py.typed0
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/server.py996
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/speedups.c206
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/speedups.pyi1
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/typing.py49
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/uri.py81
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/utils.py18
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/src/websockets/version.py1
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/__init__.py5
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/extensions/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/extensions/test_base.py4
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/extensions/test_permessage_deflate.py792
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_auth.py139
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_client_server.py1546
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_exceptions.py145
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_exports.py22
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_framing.py242
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_handshake.py190
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_headers.py185
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_http.py249
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_localhost.cnf26
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_localhost.pem48
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_protocol.py1475
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_uri.py33
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/test_utils.py92
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tests/utils.py93
-rw-r--r--testing/web-platform/tests/tools/third_party/websockets/tox.ini28
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/.flake89
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/.github/workflows/main.yml42
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/.pre-commit-config.yaml5
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/.readthedocs.yml5
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/.travis.yml28
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/CHANGES.rst100
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/LICENSE7
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/PKG-INFO39
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/README.rst21
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/appveyor.yml24
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/conftest.py0
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/docs/conf.py26
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/docs/history.rst8
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/docs/index.rst22
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/mypy.ini2
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/pyproject.toml6
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/pytest.ini9
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/setup.cfg45
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/setup.py6
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/skeleton.md137
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/test_zipp.py245
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/tox.ini36
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/PKG-INFO39
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/SOURCES.txt24
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/dependency_links.txt1
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/requires.txt14
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/top_level.txt1
-rw-r--r--testing/web-platform/tests/tools/third_party/zipp/zipp.py286
-rw-r--r--testing/web-platform/tests/tools/tox.ini24
-rw-r--r--testing/web-platform/tests/tools/wave/.gitignore3
-rw-r--r--testing/web-platform/tests/tools/wave/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wave/config.default.json49
-rw-r--r--testing/web-platform/tests/tools/wave/configuration_loader.py97
-rw-r--r--testing/web-platform/tests/tools/wave/data/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wave/data/client.py8
-rw-r--r--testing/web-platform/tests/tools/wave/data/device.py8
-rw-r--r--testing/web-platform/tests/tools/wave/data/event_listener.py10
-rw-r--r--testing/web-platform/tests/tools/wave/data/exceptions/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wave/data/exceptions/duplicate_exception.py2
-rw-r--r--testing/web-platform/tests/tools/wave/data/exceptions/invalid_data_exception.py2
-rw-r--r--testing/web-platform/tests/tools/wave/data/exceptions/not_found_exception.py2
-rw-r--r--testing/web-platform/tests/tools/wave/data/exceptions/permission_denied_exception.py2
-rw-r--r--testing/web-platform/tests/tools/wave/data/http_polling_client.py13
-rw-r--r--testing/web-platform/tests/tools/wave/data/http_polling_event_listener.py13
-rw-r--r--testing/web-platform/tests/tools/wave/data/session.py78
-rw-r--r--testing/web-platform/tests/tools/wave/docs/README.md14
-rw-r--r--testing/web-platform/tests/tools/wave/docs/config.md326
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/configuration_page_bottom.jpgbin0 -> 126736 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_malfunctioning.jpgbin0 -> 139829 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_prev_excluded.jpgbin0 -> 139651 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_raw.jpgbin0 -> 129514 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/configuration_page_top.jpgbin0 -> 120429 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/landing_page.jpgbin0 -> 108212 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions.jpgbin0 -> 89660 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions_filtered.jpgbin0 -> 105746 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions_pinned_recent.jpgbin0 -> 129519 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/overview_page_top.jpgbin0 -> 56339 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/results_page_api_results.jpgbin0 -> 127186 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/results_page_api_results_export.jpgbin0 -> 129230 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/results_page_bottom.jpgbin0 -> 98504 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/results_page_last_timed_out.jpgbin0 -> 87467 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/results_page_malfunctioning_list.jpgbin0 -> 88697 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/res/results_page_top.jpgbin0 -> 77941 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/README.md76
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/create.md33
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/event-types.md37
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/read-device.md41
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/read-devices.md47
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/register-global.md54
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/register.md52
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/send-event.md43
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/send-global-event.md46
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/general-api/status.md41
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/guides/README.md10
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/guides/session-events.md52
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/guides/session-start-devices-api.md60
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/results-api/config.md34
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/results-api/create.md65
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/results-api/download.md127
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/results-api/import.md66
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/results-api/read-compact.md59
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/results-api/read.md63
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/results-api/view.md61
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/control.md25
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/create.md103
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/delete.md25
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/event-types.md27
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/events.md104
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/find.md29
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/labels.md75
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read-public.md30
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read.md90
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read_sessions.md123
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/status.md48
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/update.md102
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-all.md43
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-available-apis.md43
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-last-completed.md47
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-malfunctioning.md30
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-next.md29
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-session.md61
-rw-r--r--testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/update-malfunctioning.md56
-rw-r--r--testing/web-platform/tests/tools/wave/docs/usage/usage.md231
-rw-r--r--testing/web-platform/tests/tools/wave/export/css/bulma.min.css1
-rw-r--r--testing/web-platform/tests/tools/wave/export/css/result.css75
-rw-r--r--testing/web-platform/tests/tools/wave/export/index.html375
-rw-r--r--testing/web-platform/tests/tools/wave/export/lib/ui.js64
-rw-r--r--testing/web-platform/tests/tools/wave/export/lib/utils.js40
-rw-r--r--testing/web-platform/tests/tools/wave/export/res/wavelogo_2016.jpgbin0 -> 15570 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/network/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wave/network/api/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wave/network/api/api_handler.py99
-rw-r--r--testing/web-platform/tests/tools/wave/network/api/devices_api_handler.py202
-rw-r--r--testing/web-platform/tests/tools/wave/network/api/general_api_handler.py76
-rw-r--r--testing/web-platform/tests/tools/wave/network/api/results_api_handler.py232
-rw-r--r--testing/web-platform/tests/tools/wave/network/api/sessions_api_handler.py458
-rw-r--r--testing/web-platform/tests/tools/wave/network/api/tests_api_handler.py298
-rw-r--r--testing/web-platform/tests/tools/wave/network/http_handler.py122
-rw-r--r--testing/web-platform/tests/tools/wave/network/static_handler.py60
-rw-r--r--testing/web-platform/tests/tools/wave/package-lock.json35
-rw-r--r--testing/web-platform/tests/tools/wave/package.json7
-rw-r--r--testing/web-platform/tests/tools/wave/requirements.txt2
-rw-r--r--testing/web-platform/tests/tools/wave/resources/testharnessreport.js284
-rw-r--r--testing/web-platform/tests/tools/wave/test/WAVE Local.postman_environment.json34
-rw-r--r--testing/web-platform/tests/tools/wave/test/WAVE Server REST API Tests.postman_collection.json9833
-rw-r--r--testing/web-platform/tests/tools/wave/testing/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wave/testing/devices_manager.py119
-rw-r--r--testing/web-platform/tests/tools/wave/testing/event_dispatcher.py148
-rw-r--r--testing/web-platform/tests/tools/wave/testing/results_manager.py674
-rw-r--r--testing/web-platform/tests/tools/wave/testing/sessions_manager.py478
-rw-r--r--testing/web-platform/tests/tools/wave/testing/test_loader.py200
-rw-r--r--testing/web-platform/tests/tools/wave/testing/tests_manager.py374
-rw-r--r--testing/web-platform/tests/tools/wave/testing/wpt_report.py57
-rw-r--r--testing/web-platform/tests/tools/wave/tests/WAVE Local.postman_environment.json34
-rw-r--r--testing/web-platform/tests/tools/wave/tests/WAVE Server REST API Tests.postman_collection.json8549
-rw-r--r--testing/web-platform/tests/tools/wave/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wave/tests/config.json6
-rw-r--r--testing/web-platform/tests/tools/wave/tests/test_wave.py54
-rw-r--r--testing/web-platform/tests/tools/wave/tox.ini18
-rw-r--r--testing/web-platform/tests/tools/wave/utils/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wave/utils/deserializer.py118
-rw-r--r--testing/web-platform/tests/tools/wave/utils/serializer.py47
-rw-r--r--testing/web-platform/tests/tools/wave/utils/user_agent_parser.py43
-rw-r--r--testing/web-platform/tests/tools/wave/wave_server.py145
-rw-r--r--testing/web-platform/tests/tools/wave/www/comparison.html444
-rw-r--r--testing/web-platform/tests/tools/wave/www/configuration.html1586
-rw-r--r--testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.css10599
-rw-r--r--testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.css.map1
-rw-r--r--testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.min.css1
-rw-r--r--testing/web-platform/tests/tools/wave/www/css/fontawesome-5.7.2.min.css1
-rw-r--r--testing/web-platform/tests/tools/wave/www/css/fontawesome.min.css5
-rw-r--r--testing/web-platform/tests/tools/wave/www/css/main.css101
-rw-r--r--testing/web-platform/tests/tools/wave/www/css/result.css75
-rw-r--r--testing/web-platform/tests/tools/wave/www/css/style.css86
-rw-r--r--testing/web-platform/tests/tools/wave/www/favicon.icobin0 -> 90022 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/finish.html215
-rw-r--r--testing/web-platform/tests/tools/wave/www/index.html263
-rw-r--r--testing/web-platform/tests/tools/wave/www/lib/davidshimjs/LICENSE14
-rw-r--r--testing/web-platform/tests/tools/wave/www/lib/davidshimjs/qrcode.js1533
-rw-r--r--testing/web-platform/tests/tools/wave/www/lib/jszip.min.js15
-rw-r--r--testing/web-platform/tests/tools/wave/www/lib/keycodes.js88
-rw-r--r--testing/web-platform/tests/tools/wave/www/lib/qrcode.js1533
-rw-r--r--testing/web-platform/tests/tools/wave/www/lib/query-parser.js12
-rw-r--r--testing/web-platform/tests/tools/wave/www/lib/screen-console.js16
-rw-r--r--testing/web-platform/tests/tools/wave/www/lib/ui.js100
-rw-r--r--testing/web-platform/tests/tools/wave/www/lib/utils.js57
-rw-r--r--testing/web-platform/tests/tools/wave/www/lib/wave-service.js866
-rw-r--r--testing/web-platform/tests/tools/wave/www/newsession.html257
-rw-r--r--testing/web-platform/tests/tools/wave/www/next.html33
-rw-r--r--testing/web-platform/tests/tools/wave/www/overview.html1315
-rw-r--r--testing/web-platform/tests/tools/wave/www/pause.html224
-rw-r--r--testing/web-platform/tests/tools/wave/www/res/spinner-solid.svg1
-rw-r--r--testing/web-platform/tests/tools/wave/www/res/wavelogo_2016.jpgbin0 -> 15570 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/results.html1565
-rw-r--r--testing/web-platform/tests/tools/wave/www/submitresult.html63
-rw-r--r--testing/web-platform/tests/tools/wave/www/test.html155
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.eotbin0 -> 125320 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.svg3296
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.ttfbin0 -> 125016 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.woffbin0 -> 84564 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.woff2bin0 -> 72112 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.eotbin0 -> 34388 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.svg799
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.ttfbin0 -> 34092 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.woffbin0 -> 16812 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.woff2bin0 -> 13592 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.eotbin0 -> 186512 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.svg4516
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.ttfbin0 -> 186228 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.woffbin0 -> 96244 bytes
-rw-r--r--testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.woff2bin0 -> 74348 bytes
-rw-r--r--testing/web-platform/tests/tools/webdriver/.gitignore2
-rw-r--r--testing/web-platform/tests/tools/webdriver/README.md73
-rw-r--r--testing/web-platform/tests/tools/webdriver/setup.py14
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/__init__.py39
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/bidi/__init__.py3
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/bidi/client.py226
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/bidi/error.py70
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/__init__.py5
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/_module.py99
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/browsing_context.py82
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/script.py136
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/session.py31
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/bidi/transport.py76
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/client.py900
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/error.py232
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/protocol.py49
-rw-r--r--testing/web-platform/tests/tools/webdriver/webdriver/transport.py267
-rw-r--r--testing/web-platform/tests/tools/webtransport/META.yml3
-rw-r--r--testing/web-platform/tests/tools/webtransport/README.md78
-rw-r--r--testing/web-platform/tests/tools/webtransport/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/webtransport/h3/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/webtransport/h3/capsule.py111
-rw-r--r--testing/web-platform/tests/tools/webtransport/h3/handler.py76
-rw-r--r--testing/web-platform/tests/tools/webtransport/h3/test_capsule.py130
-rw-r--r--testing/web-platform/tests/tools/webtransport/h3/webtransport_h3_server.py545
-rw-r--r--testing/web-platform/tests/tools/webtransport/requirements.txt4
-rw-r--r--testing/web-platform/tests/tools/wpt/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wpt/android.py181
-rw-r--r--testing/web-platform/tests/tools/wpt/browser.py2048
-rw-r--r--testing/web-platform/tests/tools/wpt/commands.json94
-rw-r--r--testing/web-platform/tests/tools/wpt/create.py133
-rw-r--r--testing/web-platform/tests/tools/wpt/install.py120
-rw-r--r--testing/web-platform/tests/tools/wpt/markdown.py44
-rw-r--r--testing/web-platform/tests/tools/wpt/paths7
-rw-r--r--testing/web-platform/tests/tools/wpt/requirements.txt1
-rw-r--r--testing/web-platform/tests/tools/wpt/requirements_install.txt1
-rw-r--r--testing/web-platform/tests/tools/wpt/revlist.py107
-rw-r--r--testing/web-platform/tests/tools/wpt/run.py873
-rw-r--r--testing/web-platform/tests/tools/wpt/testfiles.py442
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/latest_mozilla_central.txt20834
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2018-05-17.html30
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2018-09-19.html33
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-06-04.html33
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-07-16.html36
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-11-14.html33
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2021-06-08.html33
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2021-10-28.html31
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-05-29.html44
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-06-22.html46
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-07-05.html37
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-07-07.html46
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-08-25.html48
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/test_browser.py386
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/test_install.py81
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/test_markdown.py37
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/test_revlist.py156
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/test_run.py76
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/test_testfiles.py71
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/test_update_expectations.py130
-rw-r--r--testing/web-platform/tests/tools/wpt/tests/test_wpt.py406
-rw-r--r--testing/web-platform/tests/tools/wpt/tox.ini19
-rw-r--r--testing/web-platform/tests/tools/wpt/update.py56
-rw-r--r--testing/web-platform/tests/tools/wpt/utils.py168
-rw-r--r--testing/web-platform/tests/tools/wpt/virtualenv.py137
-rw-r--r--testing/web-platform/tests/tools/wpt/wpt.py240
-rw-r--r--testing/web-platform/tests/tools/wptrunner/.gitignore8
-rw-r--r--testing/web-platform/tests/tools/wptrunner/MANIFEST.in6
-rw-r--r--testing/web-platform/tests/tools/wptrunner/README.rst14
-rw-r--r--testing/web-platform/tests/tools/wptrunner/docs/architecture.svg1
-rw-r--r--testing/web-platform/tests/tools/wptrunner/docs/commands.rst79
-rw-r--r--testing/web-platform/tests/tools/wptrunner/docs/design.rst108
-rw-r--r--testing/web-platform/tests/tools/wptrunner/docs/expectation.rst366
-rw-r--r--testing/web-platform/tests/tools/wptrunner/docs/internals.rst23
-rw-r--r--testing/web-platform/tests/tools/wptrunner/requirements.txt9
-rw-r--r--testing/web-platform/tests/tools/wptrunner/requirements_chromium.txt4
-rw-r--r--testing/web-platform/tests/tools/wptrunner/requirements_edge.txt1
-rw-r--r--testing/web-platform/tests/tools/wptrunner/requirements_firefox.txt9
-rw-r--r--testing/web-platform/tests/tools/wptrunner/requirements_ie.txt2
-rw-r--r--testing/web-platform/tests/tools/wptrunner/requirements_opera.txt2
-rw-r--r--testing/web-platform/tests/tools/wptrunner/requirements_safari.txt1
-rw-r--r--testing/web-platform/tests/tools/wptrunner/requirements_sauce.txt2
-rw-r--r--testing/web-platform/tests/tools/wptrunner/setup.py66
-rw-r--r--testing/web-platform/tests/tools/wptrunner/tox.ini25
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner.default.ini11
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/__init__.py45
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/android_weblayer.py105
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/android_webview.py103
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/base.py409
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome.py157
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_android.py244
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_ios.py58
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_spki_certs.py13
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chromium.py57
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/content_shell.py203
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edge.py109
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edge_webdriver.py27
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edgechromium.py97
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/epiphany.py75
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/firefox.py969
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/firefox_android.py367
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/ie.py50
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/opera.py70
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/safari.py207
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce.py249
-rwxr-xr-xtesting/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce_setup/edge-prerun.bat9
-rwxr-xr-xtesting/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce_setup/safari-prerun.sh3
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/servo.py118
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/servodriver.py184
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/webkit.py83
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/webkitgtk_minibrowser.py82
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/config.py63
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/environment.py331
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/__init__.py5
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/actions.py269
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/base.py781
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorchrome.py114
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorcontentshell.py269
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executormarionette.py1323
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorselenium.py485
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorservo.py363
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorservodriver.py303
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorwebdriver.py694
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/process.py22
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/protocol.py689
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/pytestrunner/__init__.py1
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/pytestrunner/runner.py171
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/reftest.js1
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/runner.js1
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/test-wait.js55
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/testharness_servodriver.js2
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/testharness_webdriver_resume.js5
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/executors/window-loaded.js9
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/expected.py16
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/expectedtree.py132
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/font.py144
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/chromium.py335
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/tests/test_chromium.py828
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/wptreport.py137
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/wptscreenshot.py49
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/instruments.py121
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/manifestexpected.py542
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/manifestinclude.py156
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/manifestupdate.py967
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/metadata.py836
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/mpcontext.py13
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/print_reftest_runner.html33
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/products.py67
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/stability.py417
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/testdriver-extra.js259
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/testdriver-vendor.js1
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/testharness_runner.html6
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-content-shell.js25
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-servo.js17
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-servodriver.js23
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport.js88
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/testloader.py534
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/testrunner.py984
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/__init__.py9
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/base.py63
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/test_sauce.py170
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/test_webkitgtk.py74
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_executors.py17
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_expectedtree.py120
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_formatters.py152
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_manifestexpected.py36
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_metadata.py47
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_products.py57
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_stability.py186
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_testloader.py95
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_update.py1853
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_wpttest.py232
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/update/__init__.py47
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/update/base.py69
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/update/metadata.py62
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/update/state.py159
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/update/sync.py150
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/update/tree.py407
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/update/update.py191
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/vcs.py67
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptcommandline.py777
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptlogging.py109
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/__init__.py5
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/base.py221
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/conditional.py402
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/static.py102
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/node.py173
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/parser.py873
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/serializer.py160
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_conditional.py143
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_parser.py155
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_serializer.py356
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_static.py98
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_tokenizer.py385
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wptrunner.py536
-rw-r--r--testing/web-platform/tests/tools/wptrunner/wptrunner/wpttest.py715
-rw-r--r--testing/web-platform/tests/tools/wptserve/.coveragerc11
-rw-r--r--testing/web-platform/tests/tools/wptserve/.gitignore40
-rw-r--r--testing/web-platform/tests/tools/wptserve/MANIFEST.in1
-rw-r--r--testing/web-platform/tests/tools/wptserve/README.md6
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/Makefile153
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/conf.py242
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/handlers.rst108
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/index.rst27
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/introduction.rst51
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/make.bat190
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/pipes.rst8
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/request.rst10
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/response.rst41
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/router.rst78
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/server.rst20
-rw-r--r--testing/web-platform/tests/tools/wptserve/docs/stash.rst31
-rw-r--r--testing/web-platform/tests/tools/wptserve/setup.py23
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/base.py148
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/bar.any.worker.js10
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/document.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.html15
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.serviceworker.html15
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.sharedworker.html9
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.worker.html9
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.window.html8
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.worker.html9
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/invalid.py3
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/no_main.py3
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub.sub.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash.sub.txt6
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash_subject.txt2
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash_unrecognized.sub.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_header_or_default.sub.txt2
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_headers.sub.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_headers.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_location.sub.txt8
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_params.sub.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_params.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_url_base.sub.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_uuid.sub.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_var.sub.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/__init__.py0
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/example_module.py2
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/file.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/import_handler.py5
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/sub_path.sub.txt3
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test.asis5
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_data.py2
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_headers.py3
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_headers_data.py6
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_string.py3
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_tuple_2.py2
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_tuple_3.py2
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/with_headers.txt1
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/docroot/with_headers.txt.sub.headers6
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/test_cookies.py66
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/test_handlers.py439
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/test_input_file.py149
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/test_pipes.py233
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/test_request.py183
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/test_response.py323
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/test_server.py118
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/functional/test_stash.py44
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/test_config.py384
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/test_replacement_tokenizer.py38
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/test_request.py104
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/test_response.py32
-rw-r--r--testing/web-platform/tests/tools/wptserve/tests/test_stash.py146
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/__init__.py3
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/config.py328
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/constants.py98
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/handlers.py512
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/logger.py5
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/pipes.py561
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/ranges.py96
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/request.py690
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/response.py818
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/router.py180
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/routes.py6
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/server.py927
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/sslutils/__init__.py16
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/sslutils/base.py19
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/sslutils/openssl.py424
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/sslutils/pregenerated.py28
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/stash.py235
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/utils.py195
-rwxr-xr-xtesting/web-platform/tests/tools/wptserve/wptserve/wptserve.py35
-rw-r--r--testing/web-platform/tests/tools/wptserve/wptserve/ws_h2_handshake.py72
2220 files changed, 542857 insertions, 0 deletions
diff --git a/testing/web-platform/tests/tools/.coveragerc b/testing/web-platform/tests/tools/.coveragerc
new file mode 100644
index 0000000000..3da14ad3b7
--- /dev/null
+++ b/testing/web-platform/tests/tools/.coveragerc
@@ -0,0 +1,29 @@
+[run]
+branch = True
+parallel = True
+omit =
+ html5lib/*
+ py/*
+ pytest/*
+ pywebsocket/*
+ six/*
+ webdriver/*
+ */site-packages/*
+ */lib_pypy/*
+ wpt/*
+ wptrunner/*
+ */tests/*
+ quic/*
+
+[paths]
+html5lib =
+ html5lib/html5lib
+ .tox/**/site-packages/html5lib
+
+pytest =
+ pytest/_pytest
+ .tox/**/site-packages/_pytest
+
+py =
+ py/py
+ .tox/**/site-packages/py
diff --git a/testing/web-platform/tests/tools/.gitignore b/testing/web-platform/tests/tools/.gitignore
new file mode 100644
index 0000000000..c59f948fdb
--- /dev/null
+++ b/testing/web-platform/tests/tools/.gitignore
@@ -0,0 +1,17 @@
+*#
+.coverage
+.coverage.*
+htmlcov/
+coverage.xml
+.cache/
+.hypothesis/
+*.py[co]
+*.sw[po]
+*~
+\#*
+runner/MANIFEST.json
+
+# WAVE
+!wave/www/lib
+!wave/export/lib
+!wave/export/css
diff --git a/testing/web-platform/tests/tools/META.yml b/testing/web-platform/tests/tools/META.yml
new file mode 100644
index 0000000000..db7d4d786b
--- /dev/null
+++ b/testing/web-platform/tests/tools/META.yml
@@ -0,0 +1,4 @@
+suggested_reviewers:
+ - jgraham
+ - foolip
+ - DanielRyanSmith
diff --git a/testing/web-platform/tests/tools/__init__.py b/testing/web-platform/tests/tools/__init__.py
new file mode 100644
index 0000000000..8110d5952a
--- /dev/null
+++ b/testing/web-platform/tests/tools/__init__.py
@@ -0,0 +1 @@
+from . import localpaths as _localpaths # noqa: F401
diff --git a/testing/web-platform/tests/tools/certs/README.md b/testing/web-platform/tests/tools/certs/README.md
new file mode 100644
index 0000000000..95d9fbd2a8
--- /dev/null
+++ b/testing/web-platform/tests/tools/certs/README.md
@@ -0,0 +1,99 @@
+# WPT Test Certificates
+
+The web-platform-tests project maintains a set of SSL certificates to allow
+contributors to execute tests requiring HTTPS locally.
+
+## Trusting Root CA
+
+To prevent browser SSL warnings when running HTTPS tests locally, the
+web-platform-tests Root CA file `cacert.pem` in the `tools/certs/` directory
+must be added as a trusted certificate in your OS/browser.
+
+For Firefox, go to about:preferences and search for "certificates".
+
+For browsers that use the Certificate Authorities of the underlying OS, such as
+Chrome and Safari, you need to adjust the OS. For macOS, go to Keychain Access
+and add the certificate under **login**.
+
+**NOTE**: The CA should not be installed in any browser profile used
+outside of tests, since it may be used to generate fake
+certificates. For browsers that use the OS certificate store, tests
+should therefore not be run manually outside a dedicated OS instance
+(e.g. a VM). To avoid this problem when running tests in Chrome or
+Firefox, use `wpt run`, which disables certificate checks and therefore
+doesn't require the root CA to be trusted.
+
+## Regenerating certificates
+
+The easiest way to regenerate the pregenerated certificates is to the
+the command
+
+```
+wpt regen-certs
+```
+
+By default this will not generate new certificates unless the existing
+ones are about to expire. In cases where the certificates need to be
+updated anyway (e.g. because the server configuration changed), this
+can be overridden with `--force`.
+
+Generating the certificates requires OpenSSL to be installed.
+
+### Implementation Details
+
+If you wish to manually generate new certificates for any reason, it's
+possible to use OpenSSL when starting the server, or starting a test
+run, by providing the `--ssl-type=openssl` argument to the `wpt serve`
+or `wpt run` commands.
+
+If you installed OpenSSL in such a way that running `openssl` at a
+command line doesn't work, you also need to adjust the path to the
+OpenSSL binary. This can be done by adding a section to `config.json`
+like:
+
+```
+"ssl": {"openssl": {"binary": "/path/to/openssl"}}
+```
+
+### Windows-specific Instructions
+
+For Windows users, the easiest approach is likely to be using
+[WSL](https://docs.microsoft.com/en-us/windows/wsl/) and generate
+certificates in a Linux environment. However it is possible to install
+OpenSSL and generate the certificates without using WSL.
+
+[Shining Light](https://slproweb.com/products/Win32OpenSSL.html)
+provide a convenient installer that is known to work, but requires a
+little extra setup:
+
+Run the installer for Win32_OpenSSL_v1.1.0b (30MB). During installation,
+change the default location for where to Copy OpenSSL Dlls from the
+System directory to the /bin directory.
+
+After installation, ensure that the path to OpenSSL (typically,
+this will be `C:\OpenSSL-Win32\bin`) is in your `%Path%`
+[Environment Variable](http://www.computerhope.com/issues/ch000549.htm).
+If you forget to do this part, you will most likely see a 'File Not Found'
+error when you start wptserve.
+
+Finally, set the path value in the server configuration file to the
+default OpenSSL configuration file location. To do this, create a file
+called `config.json`. Then add the OpenSSL configuration below,
+ensuring that the key `ssl/openssl/base_conf_path` has a value that is
+the path to the OpenSSL config file (typically this will be
+`C:\\OpenSSL-Win32\\bin\\openssl.cfg`):
+
+```
+{
+ "ssl": {
+ "type": "openssl",
+ "encrypt_after_connect": false,
+ "openssl": {
+ "openssl_binary": "openssl",
+ "base_path: "_certs",
+ "force_regenerate": false,
+ "base_conf_path": "C:\\OpenSSL-Win32\\bin\\openssl.cfg"
+ },
+ },
+}
+```
diff --git a/testing/web-platform/tests/tools/certs/cacert.key b/testing/web-platform/tests/tools/certs/cacert.key
new file mode 100644
index 0000000000..a02adcacab
--- /dev/null
+++ b/testing/web-platform/tests/tools/certs/cacert.key
@@ -0,0 +1,30 @@
+-----BEGIN ENCRYPTED PRIVATE KEY-----
+MIIFHDBOBgkqhkiG9w0BBQ0wQTApBgkqhkiG9w0BBQwwHAQI6zyhjDZlpjsCAggA
+MAwGCCqGSIb3DQIJBQAwFAYIKoZIhvcNAwcECKlIS+BRd8lLBIIEyCwK12KwvZLZ
+v0G+K0yv6s3h45NNywn2Jn8e8GxSMbsGvUswNlNIzYbloibVnu7KpdZ58D4upbTu
+dJCrxCtvfP44h7aYswgVk7+thutpAl9roeDWe9e3C+iQMA4UHiIiSmSNe5KrLmu2
+DMezpSyQjqX+OJPpL/3Yiwd45Ae4Okd/c6DqnFEMWFdBqWt0Ft8+HOWELTQyQgYs
+8cma4rqDqtX5QBZ4QBPm8hOp7+LhHITV1qpyaa/nnQQfZx/e1Cyq/h/hcnkbrwuE
+I26s7hcpCaFXN7mSt7GJcZAt5oBqzmDw735iM+FlShokdBsYJEQqWaOVfuzeQYYx
+xt0fz8CReOskIg06jJMXyiWPauutHpiNmQTdcFAE9oO2pUJQCZ3ytaSYG3oP0Byb
+QgEyxhiw6mKv64ZtiAeHPBv7nBl/UPlOSNrXQ+QIyimohdQ+QFeay0eSPgnBwC/H
+MnRaKG6341j5Tk3Itl/5cB6b9VZxvaDgvQNmRNf82EtqCj3YDx7cJF19C7Ut1M0W
+mGLYkudLXne+pLkCWuhKWKEjDp+uDQHF7Zq2q3+l1Wk4DROCtEIO3ruOBLYgNUQf
+flEA6FCw94YlrGKzjRzusXGvBriSrcwlvIc/d55goTZWIZiztPz9c0O93Qg2vYiC
+eJU7D/73byozX0hYOL6d6gWPq4yvbtV/IAZzr+7JuT4j+MVcRkm7lbFhqge/RSbf
+q2iwOOKLs3bpNlMI8S2jtBxPqZ5U/B/eI5slap8Z3xbMuqVAJr4vW4kDGC2WCDFG
+3PXtJvfk4+slbU6cNSzHVk88cS73fRzB4IYyU/txNicyEL0R2iHm7scYKRFJqcKX
+OnDqy4oc5afdv1jfmjb6hA2Xx6T5rC8FMcHBx0M6gi9Y6A/7OadHcRyZySG3F7Xq
++3eub3bwrLwOBEnpm4nl0ucU19WS8kI64gvZbDoQf9kxvOHHFZmjYC0KPFruEWG2
+jFq6xINzA9oTCsR7KWsn+/q3zu5yic9lHtQFN2WnzDzuDoZaVvLY/xqq61ZfROHT
+Q14CfWQMXHwXOn/k9knkm2pNzjti07b+v/X2Wb3eYp85ysn+/jT6ubq4qCtXwHRU
+++IZFpeL3xiuW6uyv+d1Am7Jt/m1JR743WFEOcLV3DZYikbzqG0Y2rWN0L95uhUm
+eCxGLhPoj+j32dSgLhyK/R2n/ka2Ggp+YD+BAyctNUdpVVoqp4IOdCurPBNzNjAv
+Z1qf2SJPKtIk4072VhyuOVUrz8eEQP5+gTUTMNMxFu001EVPrLtAH15KqBay2WAg
+7tdiBZhvXZuICODwMHl8T+hOfLKYGRvxzsjhLU9Ja2b/0jyblt+9pGIwR4RcOBnO
+iZTjKeUlq0R+5P5juqiOSEsB1gazaqu8t/xPzILAxBKz2PypeiVgDLxoRZaa2jGZ
+4SAbE53d58NteP6N0wNPtMix0n2JfH1iwdghHXPF+BS+OHjl+4SriJJw+ylmIpjW
+Sj5u5vQFyl+vnxCiAvo0396MsNsuvIgw8LIcDCI+xgtziYhXapi7vz+U0v2MnglG
+GIVsfsJpMV92B7ch9OodKzitrBZxMRhnKOGekNkjJaGK/iBiRFbDCfnVqWK0Qete
+hPlZzINynSbHPifLVeyfZg==
+-----END ENCRYPTED PRIVATE KEY-----
diff --git a/testing/web-platform/tests/tools/certs/cacert.pem b/testing/web-platform/tests/tools/certs/cacert.pem
new file mode 100644
index 0000000000..101ed61741
--- /dev/null
+++ b/testing/web-platform/tests/tools/certs/cacert.pem
@@ -0,0 +1,125 @@
+-----BEGIN CERTIFICATE-----
+MIIW5zCCFc+gAwIBAgIDCrFbMA0GCSqGSIb3DQEBCwUAMB0xGzAZBgNVBAMMEndl
+Yi1wbGF0Zm9ybS10ZXN0czAeFw0yMjA3MTIxMjQ1MTdaFw0yMzA3MTIxMjQ1MTda
+MB0xGzAZBgNVBAMMEndlYi1wbGF0Zm9ybS10ZXN0czCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAOIOEkl98dlmCeI7SCy2wyAALDEblG813gVVErnMjOPl
+o6BpynugeJx7OX85cGJXpn/xiPZi7922W+swGnBCtoe073kMif6WQRylplkJQhqf
+I92IaDecX+f5ROTr7mCcloolt2yRohsAODe1TLZ7VqXouVEMws4+Y5Sp+TMG011Y
+mB+qFEYIjhmvH0w4UoQxz5TBtvj6nkFoRdZRgnLbIkJZ0HVtefy1AhEsOgnz7yba
+pstR97iZTuro0E6hcnMAdEKpXXXSYqV3HYPZzn9YAA8BloAM0GHSS9yWMRoV7UnS
+07YjsorpbE6iQAV/CAWZtBaLmMlNY+cxefEK2NRPqhECAwEAAaOCFC4wghQqMAwG
+A1UdEwQFMAMBAf8wHQYDVR0OBBYEFMo8kufcUDkL5UQ/32+RXOKIKjrWMEcGA1Ud
+IwRAMD6AFMo8kufcUDkL5UQ/32+RXOKIKjrWoSGkHzAdMRswGQYDVQQDDBJ3ZWIt
+cGxhdGZvcm0tdGVzdHOCAwqxWzALBgNVHQ8EBAMCAgQwggoFBgNVHR4Eggn8MIIJ
++KCCCfQwE4IRd2ViLXBsYXRmb3JtLnRlc3QwF4IVd3d3LndlYi1wbGF0Zm9ybS50
+ZXN0MBeCFW5vdC13ZWItcGxhdGZvcm0udGVzdDAYghZ3d3cxLndlYi1wbGF0Zm9y
+bS50ZXN0MBiCFnd3dzIud2ViLXBsYXRmb3JtLnRlc3QwG4IZd3d3Lnd3dy53ZWIt
+cGxhdGZvcm0udGVzdDAbghl3d3cubm90LXdlYi1wbGF0Zm9ybS50ZXN0MByCGnd3
+dy53d3cyLndlYi1wbGF0Zm9ybS50ZXN0MByCGnd3dy53d3cxLndlYi1wbGF0Zm9y
+bS50ZXN0MByCGnd3dzIubm90LXdlYi1wbGF0Zm9ybS50ZXN0MByCGnd3dzEud3d3
+LndlYi1wbGF0Zm9ybS50ZXN0MByCGnd3dzEubm90LXdlYi1wbGF0Zm9ybS50ZXN0
+MByCGnd3dzIud3d3LndlYi1wbGF0Zm9ybS50ZXN0MB2CG3d3dzEud3d3MS53ZWIt
+cGxhdGZvcm0udGVzdDAdght3d3cyLnd3dzEud2ViLXBsYXRmb3JtLnRlc3QwHYIb
+d3d3MS53d3cyLndlYi1wbGF0Zm9ybS50ZXN0MB2CG3d3dzIud3d3Mi53ZWItcGxh
+dGZvcm0udGVzdDAfgh13d3cud3d3Lm5vdC13ZWItcGxhdGZvcm0udGVzdDAggh53
+d3cyLnd3dy5ub3Qtd2ViLXBsYXRmb3JtLnRlc3QwIIIed3d3Lnd3dzEubm90LXdl
+Yi1wbGF0Zm9ybS50ZXN0MCCCHnd3dzEud3d3Lm5vdC13ZWItcGxhdGZvcm0udGVz
+dDAggh54bi0tbHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3QwIIIed3d3Lnd3dzIu
+bm90LXdlYi1wbGF0Zm9ybS50ZXN0MCGCH3d3dzEud3d3MS5ub3Qtd2ViLXBsYXRm
+b3JtLnRlc3QwIYIfd3d3Mi53d3cyLm5vdC13ZWItcGxhdGZvcm0udGVzdDAhgh93
+d3cyLnd3dzEubm90LXdlYi1wbGF0Zm9ybS50ZXN0MCGCH3d3dzEud3d3Mi5ub3Qt
+d2ViLXBsYXRmb3JtLnRlc3QwJIIieG4tLWx2ZS02bGFkLm5vdC13ZWItcGxhdGZv
+cm0udGVzdDAkgiJ3d3cueG4tLWx2ZS02bGFkLndlYi1wbGF0Zm9ybS50ZXN0MCSC
+InhuLS1sdmUtNmxhZC53d3cud2ViLXBsYXRmb3JtLnRlc3QwJYIjeG4tLWx2ZS02
+bGFkLnd3dzIud2ViLXBsYXRmb3JtLnRlc3QwJYIjeG4tLWx2ZS02bGFkLnd3dzEu
+d2ViLXBsYXRmb3JtLnRlc3QwJYIjd3d3Mi54bi0tbHZlLTZsYWQud2ViLXBsYXRm
+b3JtLnRlc3QwJYIjd3d3MS54bi0tbHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3Qw
+KIImeG4tLWx2ZS02bGFkLnd3dy5ub3Qtd2ViLXBsYXRmb3JtLnRlc3QwKIImd3d3
+LnhuLS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3QwKYIneG4tLWx2ZS02
+bGFkLnd3dzIubm90LXdlYi1wbGF0Zm9ybS50ZXN0MCmCJ3huLS1sdmUtNmxhZC53
+d3cxLm5vdC13ZWItcGxhdGZvcm0udGVzdDApgid3d3cxLnhuLS1sdmUtNmxhZC5u
+b3Qtd2ViLXBsYXRmb3JtLnRlc3QwKYInd3d3Mi54bi0tbHZlLTZsYWQubm90LXdl
+Yi1wbGF0Zm9ybS50ZXN0MCuCKXhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLndlYi1w
+bGF0Zm9ybS50ZXN0MC2CK3huLS1sdmUtNmxhZC54bi0tbHZlLTZsYWQud2ViLXBs
+YXRmb3JtLnRlc3QwL4IteG4tLW44ajZkczUzbHd3a3JxaHYyOGEud3d3LndlYi1w
+bGF0Zm9ybS50ZXN0MC+CLXd3dy54bi0tbjhqNmRzNTNsd3drcnFodjI4YS53ZWIt
+cGxhdGZvcm0udGVzdDAvgi14bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2Vi
+LXBsYXRmb3JtLnRlc3QwMIIud3d3Mi54bi0tbjhqNmRzNTNsd3drcnFodjI4YS53
+ZWItcGxhdGZvcm0udGVzdDAwgi54bi0tbjhqNmRzNTNsd3drcnFodjI4YS53d3cx
+LndlYi1wbGF0Zm9ybS50ZXN0MDCCLnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnd3
+dzIud2ViLXBsYXRmb3JtLnRlc3QwMIIud3d3MS54bi0tbjhqNmRzNTNsd3drcnFo
+djI4YS53ZWItcGxhdGZvcm0udGVzdDAxgi94bi0tbHZlLTZsYWQueG4tLWx2ZS02
+bGFkLm5vdC13ZWItcGxhdGZvcm0udGVzdDAzgjF4bi0tbjhqNmRzNTNsd3drcnFo
+djI4YS53d3cubm90LXdlYi1wbGF0Zm9ybS50ZXN0MDOCMXd3dy54bi0tbjhqNmRz
+NTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3QwNIIyeG4tLW44ajZk
+czUzbHd3a3JxaHYyOGEud3d3Mi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3QwNIIyeG4t
+LW44ajZkczUzbHd3a3JxaHYyOGEud3d3MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3Qw
+NIIyd3d3Mi54bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3Jt
+LnRlc3QwNIIyd3d3MS54bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBs
+YXRmb3JtLnRlc3QwOII2eG4tLWx2ZS02bGFkLnhuLS1uOGo2ZHM1M2x3d2tycWh2
+MjhhLndlYi1wbGF0Zm9ybS50ZXN0MDiCNnhuLS1uOGo2ZHM1M2x3d2tycWh2Mjhh
+LnhuLS1sdmUtNmxhZC53ZWItcGxhdGZvcm0udGVzdDA8gjp4bi0tbjhqNmRzNTNs
+d3drcnFodjI4YS54bi0tbHZlLTZsYWQubm90LXdlYi1wbGF0Zm9ybS50ZXN0MDyC
+OnhuLS1sdmUtNmxhZC54bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBs
+YXRmb3JtLnRlc3QwQ4JBeG4tLW44ajZkczUzbHd3a3JxaHYyOGEueG4tLW44ajZk
+czUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3QwR4JFeG4tLW44ajZkczUz
+bHd3a3JxaHYyOGEueG4tLW44ajZkczUzbHd3a3JxaHYyOGEubm90LXdlYi1wbGF0
+Zm9ybS50ZXN0MBMGA1UdJQQMMAoGCCsGAQUFBwMBMIIJhQYDVR0RBIIJfDCCCXiC
+EXdlYi1wbGF0Zm9ybS50ZXN0ghV3d3cud2ViLXBsYXRmb3JtLnRlc3SCFW5vdC13
+ZWItcGxhdGZvcm0udGVzdIIWd3d3MS53ZWItcGxhdGZvcm0udGVzdIIWd3d3Mi53
+ZWItcGxhdGZvcm0udGVzdIIZd3d3Lnd3dy53ZWItcGxhdGZvcm0udGVzdIIZd3d3
+Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIad3d3Lnd3dzIud2ViLXBsYXRmb3JtLnRl
+c3SCGnd3dy53d3cxLndlYi1wbGF0Zm9ybS50ZXN0ghp3d3cyLm5vdC13ZWItcGxh
+dGZvcm0udGVzdIIad3d3MS53d3cud2ViLXBsYXRmb3JtLnRlc3SCGnd3dzEubm90
+LXdlYi1wbGF0Zm9ybS50ZXN0ghp3d3cyLnd3dy53ZWItcGxhdGZvcm0udGVzdIIb
+d3d3MS53d3cxLndlYi1wbGF0Zm9ybS50ZXN0ght3d3cyLnd3dzEud2ViLXBsYXRm
+b3JtLnRlc3SCG3d3dzEud3d3Mi53ZWItcGxhdGZvcm0udGVzdIIbd3d3Mi53d3cy
+LndlYi1wbGF0Zm9ybS50ZXN0gh13d3cud3d3Lm5vdC13ZWItcGxhdGZvcm0udGVz
+dIIed3d3Mi53d3cubm90LXdlYi1wbGF0Zm9ybS50ZXN0gh53d3cud3d3MS5ub3Qt
+d2ViLXBsYXRmb3JtLnRlc3SCHnd3dzEud3d3Lm5vdC13ZWItcGxhdGZvcm0udGVz
+dIIeeG4tLWx2ZS02bGFkLndlYi1wbGF0Zm9ybS50ZXN0gh53d3cud3d3Mi5ub3Qt
+d2ViLXBsYXRmb3JtLnRlc3SCH3d3dzEud3d3MS5ub3Qtd2ViLXBsYXRmb3JtLnRl
+c3SCH3d3dzIud3d3Mi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCH3d3dzIud3d3MS5u
+b3Qtd2ViLXBsYXRmb3JtLnRlc3SCH3d3dzEud3d3Mi5ub3Qtd2ViLXBsYXRmb3Jt
+LnRlc3SCInhuLS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCInd3dy54
+bi0tbHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3SCInhuLS1sdmUtNmxhZC53d3cu
+d2ViLXBsYXRmb3JtLnRlc3SCI3huLS1sdmUtNmxhZC53d3cyLndlYi1wbGF0Zm9y
+bS50ZXN0giN4bi0tbHZlLTZsYWQud3d3MS53ZWItcGxhdGZvcm0udGVzdIIjd3d3
+Mi54bi0tbHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3SCI3d3dzEueG4tLWx2ZS02
+bGFkLndlYi1wbGF0Zm9ybS50ZXN0giZ4bi0tbHZlLTZsYWQud3d3Lm5vdC13ZWIt
+cGxhdGZvcm0udGVzdIImd3d3LnhuLS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRmb3Jt
+LnRlc3SCJ3huLS1sdmUtNmxhZC53d3cyLm5vdC13ZWItcGxhdGZvcm0udGVzdIIn
+eG4tLWx2ZS02bGFkLnd3dzEubm90LXdlYi1wbGF0Zm9ybS50ZXN0gid3d3cxLnhu
+LS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCJ3d3dzIueG4tLWx2ZS02
+bGFkLm5vdC13ZWItcGxhdGZvcm0udGVzdIIpeG4tLW44ajZkczUzbHd3a3JxaHYy
+OGEud2ViLXBsYXRmb3JtLnRlc3SCK3huLS1sdmUtNmxhZC54bi0tbHZlLTZsYWQu
+d2ViLXBsYXRmb3JtLnRlc3SCLXhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnd3dy53
+ZWItcGxhdGZvcm0udGVzdIItd3d3LnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLndl
+Yi1wbGF0Zm9ybS50ZXN0gi14bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2Vi
+LXBsYXRmb3JtLnRlc3SCLnd3dzIueG4tLW44ajZkczUzbHd3a3JxaHYyOGEud2Vi
+LXBsYXRmb3JtLnRlc3SCLnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnd3dzEud2Vi
+LXBsYXRmb3JtLnRlc3SCLnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnd3dzIud2Vi
+LXBsYXRmb3JtLnRlc3SCLnd3dzEueG4tLW44ajZkczUzbHd3a3JxaHYyOGEud2Vi
+LXBsYXRmb3JtLnRlc3SCL3huLS1sdmUtNmxhZC54bi0tbHZlLTZsYWQubm90LXdl
+Yi1wbGF0Zm9ybS50ZXN0gjF4bi0tbjhqNmRzNTNsd3drcnFodjI4YS53d3cubm90
+LXdlYi1wbGF0Zm9ybS50ZXN0gjF3d3cueG4tLW44ajZkczUzbHd3a3JxaHYyOGEu
+bm90LXdlYi1wbGF0Zm9ybS50ZXN0gjJ4bi0tbjhqNmRzNTNsd3drcnFodjI4YS53
+d3cyLm5vdC13ZWItcGxhdGZvcm0udGVzdIIyeG4tLW44ajZkczUzbHd3a3JxaHYy
+OGEud3d3MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCMnd3dzIueG4tLW44ajZkczUz
+bHd3a3JxaHYyOGEubm90LXdlYi1wbGF0Zm9ybS50ZXN0gjJ3d3cxLnhuLS1uOGo2
+ZHM1M2x3d2tycWh2MjhhLm5vdC13ZWItcGxhdGZvcm0udGVzdII2eG4tLWx2ZS02
+bGFkLnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLndlYi1wbGF0Zm9ybS50ZXN0gjZ4
+bi0tbjhqNmRzNTNsd3drcnFodjI4YS54bi0tbHZlLTZsYWQud2ViLXBsYXRmb3Jt
+LnRlc3SCOnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnhuLS1sdmUtNmxhZC5ub3Qt
+d2ViLXBsYXRmb3JtLnRlc3SCOnhuLS1sdmUtNmxhZC54bi0tbjhqNmRzNTNsd3dr
+cnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCQXhuLS1uOGo2ZHM1M2x3d2ty
+cWh2MjhhLnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLndlYi1wbGF0Zm9ybS50ZXN0
+gkV4bi0tbjhqNmRzNTNsd3drcnFodjI4YS54bi0tbjhqNmRzNTNsd3drcnFodjI4
+YS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3QwDQYJKoZIhvcNAQELBQADggEBAFgUyc+X
+AMTXjKv+IuKOSKSNp5WsA7tNEK8HHvKyKraX611JTPUejxohoSZv7scL2+8GOMsq
+9I9mmju4l0F45FiphfrR3swVquSM1jK6k9BDTchQU5FQOoBhunvrtwb7Y64/AiCy
+ZRE7nkOtV/6mIjrGjFu97JT0OwbZ+SFw+bNVWmf6RMea550ODCS/jsEo3Scbihya
+oHPCVFk29oEBqOOlWITasEc5Z4IpZRn31mT3YV/+GHfMDJ16lEENwg2iXIrjcU0a
+a0/KCLgRZKBNorlVt3E/3awQVJV2qn4Ei7sMaIjPn4oMen166poZe7oA8mKHaavp
+VKvWQ5Gh5LHGhZs=
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/certs/config.json b/testing/web-platform/tests/tools/certs/config.json
new file mode 100644
index 0000000000..df93c6fc34
--- /dev/null
+++ b/testing/web-platform/tests/tools/certs/config.json
@@ -0,0 +1,17 @@
+{
+ "ports": {
+ "http": [],
+ "https": [],
+ "ws": [],
+ "wss": []
+ },
+ "check_subdomains": false,
+ "ssl": {
+ "type": "openssl",
+ "openssl": {
+ "duration": 365,
+ "force_regenerate": true,
+ "base_path": "tools/certs"
+ }
+ }
+}
diff --git a/testing/web-platform/tests/tools/certs/web-platform.test.key b/testing/web-platform/tests/tools/certs/web-platform.test.key
new file mode 100644
index 0000000000..26b926f7a5
--- /dev/null
+++ b/testing/web-platform/tests/tools/certs/web-platform.test.key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDf1DWXJxBWd17d
+IofHTgQIBfsq32PiaxjN/89gNhC+rB4RHwhnCX3cuvLPDOvvMpA4GCXAX3pYeDSV
+OBks6xuZNx25BQTvV0ydCDhf7YVkkRZfj1x0bRPknwSNDjCE0NPH40AghmS/h+Tf
+GU80rQDFji0/FapcswmLMmKcB9kl+dvudmaSWqCnWZTpT/kyLBWSd5+mkA6S1br2
+lQSIE6N5O9oaU1jAK1E6jQ2ld3DFM8JINRW9fsx7u5OMu2gds84AbGPc1bPOW9ad
+gFrcxngiYdHGZ0hvYLi7RofbD3OR0CTir4txNd0YXewTcENKBcBb7i2DRP9J8nuy
+5YUhycgXAgMBAAECggEAGMEPbDgVxnpNM4euHIy0RW7j6Enscg4GIEI6XRmdoHjI
+mnwkIcEa+l5ynBOBxGKDMLD6+9uTXFVu+1HI9v7Jfphp4Rx8/ztYbcWK9pD+P7EM
+GGVyhC3MFWzAxBjdpNKzmVe4r+AtJEGTPlk+OUUo4LBosKFkxfvHAm4AGeGEUn6q
+ppoaJbGjjbeKzZFkS6RdHqpJJ4b/o8oOlraHAEZb/vJk31pgokUBV1Hh0BYeLuHA
+ugQT2+a8sX3MwbHC8+6poPR5kpmQUzoVEuN+Iz/ccr9ruUeD10TFcQYl/4gx/gl4
+isjLK3mDCAt7FWGWeVuIjxyBOTrCeyHP0cZza8oKgQKBgQD5MYy3FNXdH9TowU54
+0J+0nAkoSz3w6Tsg9J2jdUtJ+l4yITiFd4p4512q8aVcaTIwQCCGEWxE3Y7Cbt09
+YAK+BLNnXaGQPYomIvt9W8zOrjp/5sB1ac40q5e90Ng9czVHWIGDxE5pEpg8eHza
+PxxdhCGAkEtkahKpYn6nqT7S+wKBgQDl8U0e05DerY36hxSmWVSv+P7ccwzq/FWS
+oke1CUTO2njt/IWiPI/Di5VZl0jQ7vChcHVFpFn/Igr4sWq30fqdRvNjGV78Htyy
+2Jd6XpLSDOHdJr4Ykx0WORenXerdvZB1+OV78I2m1BgVS/OOhOgwK9LhTYlMHVIn
+hU1ASbo0lQKBgQCzoT44M3K+xVVquv3gJTMatGf9fc+Wu+l9pheEXub42tvP8xaV
+T0ioQs/IjAyRgttE4NURD7omIjfofJZKyczwQN86Icp2+YV+XDVBdk06p2lQDXyq
+pqyYVyE5GTTnhwbe+TPTUkzbfSX0YT5O7LN7zeWAkYNnTPdK1s9J+LxqxQKBgQCR
+vzUmyOsWeeN4XI6WMiEfuS+yA6q4d2C2JO5WrZ1Tj97+2gTKJKDrTVpJAz9AT3Nu
+oJ10mwwAT0+s+qeuBAZaElLLCZXPMLCrG+ZHxhJB2bCAQMJVSUoyf9WiXEE2c6VR
+YYXz3TGsANuTBCVIvK1/ewebj03rVNpx+mhQy2qfUQKBgQDYzRKBO21bpw0IcD1v
+r35D688y55bG2vTjV2vFoUpanLV6gftY473C/gwNJXAs69uJIeT9AM+KE6n/3bEK
+8rS8M6TcN8I3cRYwNnx+OPFF+YD8KVsbv8I2y2Gu1xGrMd+PyAHYnQmXcaYqN7eU
+hqH7BXYpwNm96Lm4De1W+tXPJA==
+-----END PRIVATE KEY-----
diff --git a/testing/web-platform/tests/tools/certs/web-platform.test.pem b/testing/web-platform/tests/tools/certs/web-platform.test.pem
new file mode 100644
index 0000000000..be1bfd5a1f
--- /dev/null
+++ b/testing/web-platform/tests/tools/certs/web-platform.test.pem
@@ -0,0 +1,133 @@
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 700764 (0xab15c)
+ Signature Algorithm: sha256WithRSAEncryption
+ Issuer: CN=web-platform-tests
+ Validity
+ Not Before: Jul 12 12:45:17 2022 GMT
+ Not After : Jul 12 12:45:17 2023 GMT
+ Subject: CN=web-platform.test
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ RSA Public-Key: (2048 bit)
+ Modulus:
+ 00:df:d4:35:97:27:10:56:77:5e:dd:22:87:c7:4e:
+ 04:08:05:fb:2a:df:63:e2:6b:18:cd:ff:cf:60:36:
+ 10:be:ac:1e:11:1f:08:67:09:7d:dc:ba:f2:cf:0c:
+ eb:ef:32:90:38:18:25:c0:5f:7a:58:78:34:95:38:
+ 19:2c:eb:1b:99:37:1d:b9:05:04:ef:57:4c:9d:08:
+ 38:5f:ed:85:64:91:16:5f:8f:5c:74:6d:13:e4:9f:
+ 04:8d:0e:30:84:d0:d3:c7:e3:40:20:86:64:bf:87:
+ e4:df:19:4f:34:ad:00:c5:8e:2d:3f:15:aa:5c:b3:
+ 09:8b:32:62:9c:07:d9:25:f9:db:ee:76:66:92:5a:
+ a0:a7:59:94:e9:4f:f9:32:2c:15:92:77:9f:a6:90:
+ 0e:92:d5:ba:f6:95:04:88:13:a3:79:3b:da:1a:53:
+ 58:c0:2b:51:3a:8d:0d:a5:77:70:c5:33:c2:48:35:
+ 15:bd:7e:cc:7b:bb:93:8c:bb:68:1d:b3:ce:00:6c:
+ 63:dc:d5:b3:ce:5b:d6:9d:80:5a:dc:c6:78:22:61:
+ d1:c6:67:48:6f:60:b8:bb:46:87:db:0f:73:91:d0:
+ 24:e2:af:8b:71:35:dd:18:5d:ec:13:70:43:4a:05:
+ c0:5b:ee:2d:83:44:ff:49:f2:7b:b2:e5:85:21:c9:
+ c8:17
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Basic Constraints:
+ CA:FALSE
+ X509v3 Subject Key Identifier:
+ 53:34:14:49:85:A6:DE:7F:A4:F6:2C:42:A9:1A:9C:F7:D9:20:50:C1
+ X509v3 Authority Key Identifier:
+ keyid:CA:3C:92:E7:DC:50:39:0B:E5:44:3F:DF:6F:91:5C:E2:88:2A:3A:D6
+
+ X509v3 Key Usage:
+ Digital Signature, Non Repudiation, Key Encipherment
+ X509v3 Extended Key Usage:
+ TLS Web Server Authentication
+ X509v3 Subject Alternative Name:
+ DNS:web-platform.test, DNS:www.web-platform.test, DNS:not-web-platform.test, DNS:www1.web-platform.test, DNS:www2.web-platform.test, DNS:www.www.web-platform.test, DNS:www.not-web-platform.test, DNS:www.www2.web-platform.test, DNS:www.www1.web-platform.test, DNS:www2.not-web-platform.test, DNS:www1.www.web-platform.test, DNS:www1.not-web-platform.test, DNS:www2.www.web-platform.test, DNS:www1.www1.web-platform.test, DNS:www2.www1.web-platform.test, DNS:www1.www2.web-platform.test, DNS:www2.www2.web-platform.test, DNS:www.www.not-web-platform.test, DNS:www2.www.not-web-platform.test, DNS:www.www1.not-web-platform.test, DNS:www1.www.not-web-platform.test, DNS:xn--lve-6lad.web-platform.test, DNS:www.www2.not-web-platform.test, DNS:www1.www1.not-web-platform.test, DNS:www2.www2.not-web-platform.test, DNS:www2.www1.not-web-platform.test, DNS:www1.www2.not-web-platform.test, DNS:xn--lve-6lad.not-web-platform.test, DNS:www.xn--lve-6lad.web-platform.test, DNS:xn--lve-6lad.www.web-platform.test, DNS:xn--lve-6lad.www2.web-platform.test, DNS:xn--lve-6lad.www1.web-platform.test, DNS:www2.xn--lve-6lad.web-platform.test, DNS:www1.xn--lve-6lad.web-platform.test, DNS:xn--lve-6lad.www.not-web-platform.test, DNS:www.xn--lve-6lad.not-web-platform.test, DNS:xn--lve-6lad.www2.not-web-platform.test, DNS:xn--lve-6lad.www1.not-web-platform.test, DNS:www1.xn--lve-6lad.not-web-platform.test, DNS:www2.xn--lve-6lad.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--lve-6lad.xn--lve-6lad.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www.web-platform.test, DNS:www.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:www2.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www1.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www2.web-platform.test, DNS:www1.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--lve-6lad.xn--lve-6lad.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www.not-web-platform.test, DNS:www.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www2.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www1.not-web-platform.test, DNS:www2.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:www1.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:xn--lve-6lad.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--lve-6lad.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--lve-6lad.not-web-platform.test, DNS:xn--lve-6lad.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test
+ Signature Algorithm: sha256WithRSAEncryption
+ 74:00:12:c7:43:34:fd:87:4e:f8:67:58:34:e2:c7:42:be:a8:
+ fc:61:df:11:9c:77:18:d5:0b:e6:92:a1:c2:d9:9b:20:a5:ee:
+ e8:81:ab:62:fb:55:67:35:95:fa:8b:b6:a5:3b:52:f7:d4:21:
+ e8:7c:b5:37:12:58:02:24:b5:7e:93:4f:6c:ce:5b:4b:af:8f:
+ fc:22:e2:03:2d:47:d2:87:43:c0:de:82:a7:5b:6a:97:2f:e9:
+ f7:46:f4:41:d2:35:07:62:c5:a4:e8:08:a2:90:1d:44:4a:3d:
+ 71:d6:c5:f5:40:2d:99:0d:33:4f:0f:31:69:f7:02:bd:c5:44:
+ b9:42:9e:f2:94:36:44:66:b8:b8:81:99:06:d2:b7:df:cb:b0:
+ 01:c8:5b:64:15:a4:75:78:da:71:b1:92:01:5b:55:7c:76:94:
+ a2:f3:1a:23:78:37:e5:65:51:0f:ca:66:91:d4:62:1b:b7:22:
+ d2:75:37:7e:67:ac:b5:9d:b3:50:c8:db:76:29:a1:73:21:74:
+ 71:9e:25:de:25:2a:2c:20:fb:e8:68:eb:9f:fc:f1:a7:85:69:
+ a4:a8:0d:93:4b:ff:f5:9d:32:31:d3:ca:7e:0e:79:f3:45:32:
+ d8:fc:35:12:d4:08:71:fd:e2:0e:64:55:a2:5a:f4:2f:be:19:
+ f1:2d:be:3f
+-----BEGIN CERTIFICATE-----
+MIIMsjCCC5qgAwIBAgIDCrFcMA0GCSqGSIb3DQEBCwUAMB0xGzAZBgNVBAMMEndl
+Yi1wbGF0Zm9ybS10ZXN0czAeFw0yMjA3MTIxMjQ1MTdaFw0yMzA3MTIxMjQ1MTda
+MBwxGjAYBgNVBAMMEXdlYi1wbGF0Zm9ybS50ZXN0MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEA39Q1lycQVnde3SKHx04ECAX7Kt9j4msYzf/PYDYQvqwe
+ER8IZwl93Lryzwzr7zKQOBglwF96WHg0lTgZLOsbmTcduQUE71dMnQg4X+2FZJEW
+X49cdG0T5J8EjQ4whNDTx+NAIIZkv4fk3xlPNK0AxY4tPxWqXLMJizJinAfZJfnb
+7nZmklqgp1mU6U/5MiwVknefppAOktW69pUEiBOjeTvaGlNYwCtROo0NpXdwxTPC
+SDUVvX7Me7uTjLtoHbPOAGxj3NWzzlvWnYBa3MZ4ImHRxmdIb2C4u0aH2w9zkdAk
+4q+LcTXdGF3sE3BDSgXAW+4tg0T/SfJ7suWFIcnIFwIDAQABo4IJ+jCCCfYwCQYD
+VR0TBAIwADAdBgNVHQ4EFgQUUzQUSYWm3n+k9ixCqRqc99kgUMEwHwYDVR0jBBgw
+FoAUyjyS59xQOQvlRD/fb5Fc4ogqOtYwCwYDVR0PBAQDAgXgMBMGA1UdJQQMMAoG
+CCsGAQUFBwMBMIIJhQYDVR0RBIIJfDCCCXiCEXdlYi1wbGF0Zm9ybS50ZXN0ghV3
+d3cud2ViLXBsYXRmb3JtLnRlc3SCFW5vdC13ZWItcGxhdGZvcm0udGVzdIIWd3d3
+MS53ZWItcGxhdGZvcm0udGVzdIIWd3d3Mi53ZWItcGxhdGZvcm0udGVzdIIZd3d3
+Lnd3dy53ZWItcGxhdGZvcm0udGVzdIIZd3d3Lm5vdC13ZWItcGxhdGZvcm0udGVz
+dIIad3d3Lnd3dzIud2ViLXBsYXRmb3JtLnRlc3SCGnd3dy53d3cxLndlYi1wbGF0
+Zm9ybS50ZXN0ghp3d3cyLm5vdC13ZWItcGxhdGZvcm0udGVzdIIad3d3MS53d3cu
+d2ViLXBsYXRmb3JtLnRlc3SCGnd3dzEubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghp3
+d3cyLnd3dy53ZWItcGxhdGZvcm0udGVzdIIbd3d3MS53d3cxLndlYi1wbGF0Zm9y
+bS50ZXN0ght3d3cyLnd3dzEud2ViLXBsYXRmb3JtLnRlc3SCG3d3dzEud3d3Mi53
+ZWItcGxhdGZvcm0udGVzdIIbd3d3Mi53d3cyLndlYi1wbGF0Zm9ybS50ZXN0gh13
+d3cud3d3Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIed3d3Mi53d3cubm90LXdlYi1w
+bGF0Zm9ybS50ZXN0gh53d3cud3d3MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCHnd3
+dzEud3d3Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIeeG4tLWx2ZS02bGFkLndlYi1w
+bGF0Zm9ybS50ZXN0gh53d3cud3d3Mi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCH3d3
+dzEud3d3MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCH3d3dzIud3d3Mi5ub3Qtd2Vi
+LXBsYXRmb3JtLnRlc3SCH3d3dzIud3d3MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC
+H3d3dzEud3d3Mi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCInhuLS1sdmUtNmxhZC5u
+b3Qtd2ViLXBsYXRmb3JtLnRlc3SCInd3dy54bi0tbHZlLTZsYWQud2ViLXBsYXRm
+b3JtLnRlc3SCInhuLS1sdmUtNmxhZC53d3cud2ViLXBsYXRmb3JtLnRlc3SCI3hu
+LS1sdmUtNmxhZC53d3cyLndlYi1wbGF0Zm9ybS50ZXN0giN4bi0tbHZlLTZsYWQu
+d3d3MS53ZWItcGxhdGZvcm0udGVzdIIjd3d3Mi54bi0tbHZlLTZsYWQud2ViLXBs
+YXRmb3JtLnRlc3SCI3d3dzEueG4tLWx2ZS02bGFkLndlYi1wbGF0Zm9ybS50ZXN0
+giZ4bi0tbHZlLTZsYWQud3d3Lm5vdC13ZWItcGxhdGZvcm0udGVzdIImd3d3Lnhu
+LS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCJ3huLS1sdmUtNmxhZC53
+d3cyLm5vdC13ZWItcGxhdGZvcm0udGVzdIIneG4tLWx2ZS02bGFkLnd3dzEubm90
+LXdlYi1wbGF0Zm9ybS50ZXN0gid3d3cxLnhuLS1sdmUtNmxhZC5ub3Qtd2ViLXBs
+YXRmb3JtLnRlc3SCJ3d3dzIueG4tLWx2ZS02bGFkLm5vdC13ZWItcGxhdGZvcm0u
+dGVzdIIpeG4tLW44ajZkczUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3SC
+K3huLS1sdmUtNmxhZC54bi0tbHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3SCLXhu
+LS1uOGo2ZHM1M2x3d2tycWh2MjhhLnd3dy53ZWItcGxhdGZvcm0udGVzdIItd3d3
+LnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLndlYi1wbGF0Zm9ybS50ZXN0gi14bi0t
+bjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCLnd3dzIu
+eG4tLW44ajZkczUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3SCLnhuLS1u
+OGo2ZHM1M2x3d2tycWh2MjhhLnd3dzEud2ViLXBsYXRmb3JtLnRlc3SCLnhuLS1u
+OGo2ZHM1M2x3d2tycWh2MjhhLnd3dzIud2ViLXBsYXRmb3JtLnRlc3SCLnd3dzEu
+eG4tLW44ajZkczUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3SCL3huLS1s
+dmUtNmxhZC54bi0tbHZlLTZsYWQubm90LXdlYi1wbGF0Zm9ybS50ZXN0gjF4bi0t
+bjhqNmRzNTNsd3drcnFodjI4YS53d3cubm90LXdlYi1wbGF0Zm9ybS50ZXN0gjF3
+d3cueG4tLW44ajZkczUzbHd3a3JxaHYyOGEubm90LXdlYi1wbGF0Zm9ybS50ZXN0
+gjJ4bi0tbjhqNmRzNTNsd3drcnFodjI4YS53d3cyLm5vdC13ZWItcGxhdGZvcm0u
+dGVzdIIyeG4tLW44ajZkczUzbHd3a3JxaHYyOGEud3d3MS5ub3Qtd2ViLXBsYXRm
+b3JtLnRlc3SCMnd3dzIueG4tLW44ajZkczUzbHd3a3JxaHYyOGEubm90LXdlYi1w
+bGF0Zm9ybS50ZXN0gjJ3d3cxLnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLm5vdC13
+ZWItcGxhdGZvcm0udGVzdII2eG4tLWx2ZS02bGFkLnhuLS1uOGo2ZHM1M2x3d2ty
+cWh2MjhhLndlYi1wbGF0Zm9ybS50ZXN0gjZ4bi0tbjhqNmRzNTNsd3drcnFodjI4
+YS54bi0tbHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3SCOnhuLS1uOGo2ZHM1M2x3
+d2tycWh2MjhhLnhuLS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCOnhu
+LS1sdmUtNmxhZC54bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRm
+b3JtLnRlc3SCQXhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnhuLS1uOGo2ZHM1M2x3
+d2tycWh2MjhhLndlYi1wbGF0Zm9ybS50ZXN0gkV4bi0tbjhqNmRzNTNsd3drcnFo
+djI4YS54bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRl
+c3QwDQYJKoZIhvcNAQELBQADggEBAHQAEsdDNP2HTvhnWDTix0K+qPxh3xGcdxjV
+C+aSocLZmyCl7uiBq2L7VWc1lfqLtqU7UvfUIeh8tTcSWAIktX6TT2zOW0uvj/wi
+4gMtR9KHQ8Degqdbapcv6fdG9EHSNQdixaToCKKQHURKPXHWxfVALZkNM08PMWn3
+Ar3FRLlCnvKUNkRmuLiBmQbSt9/LsAHIW2QVpHV42nGxkgFbVXx2lKLzGiN4N+Vl
+UQ/KZpHUYhu3ItJ1N35nrLWds1DI23YpoXMhdHGeJd4lKiwg++ho65/88aeFaaSo
+DZNL//WdMjHTyn4OefNFMtj8NRLUCHH94g5kVaJa9C++GfEtvj8=
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/ci/__init__.py b/testing/web-platform/tests/tools/ci/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/__init__.py
diff --git a/testing/web-platform/tests/tools/ci/azure/README.md b/testing/web-platform/tests/tools/ci/azure/README.md
new file mode 100644
index 0000000000..afe5021efc
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/README.md
@@ -0,0 +1,2 @@
+These are step templates for Azure Pipelines, used in `.azure-pipelines.yml`
+in the root of the repository.
diff --git a/testing/web-platform/tests/tools/ci/azure/affected_tests.yml b/testing/web-platform/tests/tools/ci/azure/affected_tests.yml
new file mode 100644
index 0000000000..ffb1fe1f6f
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/affected_tests.yml
@@ -0,0 +1,29 @@
+parameters:
+ checkoutCommit: ''
+ affectedRange: 'HEAD^1'
+ artifactName: ''
+
+steps:
+- template: checkout.yml
+- ${{ if ne(parameters.checkoutCommit, '') }}:
+ - script: |
+ set -eux -o pipefail
+ git checkout ${{ parameters.checkoutCommit }}
+ displayName: 'Checkout ${{ parameters.checkoutCommit }}'
+- template: pip_install.yml
+ parameters:
+ packages: virtualenv
+- template: install_certs.yml
+- template: install_safari.yml
+- template: update_hosts.yml
+- template: update_manifest.yml
+- script: |
+ set -eux -o pipefail
+ export SYSTEM_VERSION_COMPAT=0
+ ./wpt run --yes --no-pause --no-fail-on-unexpected --no-restart-on-unexpected --affected ${{ parameters.affectedRange }} --log-wptreport $(Build.ArtifactStagingDirectory)/wpt_report.json --log-wptscreenshot $(Build.ArtifactStagingDirectory)/wpt_screenshot.txt --channel preview --kill-safari safari
+ displayName: 'Run tests'
+- task: PublishBuildArtifacts@1
+ displayName: 'Publish results'
+ inputs:
+ artifactName: '${{ parameters.artifactName }}'
+ condition: succeededOrFailed()
diff --git a/testing/web-platform/tests/tools/ci/azure/checkout.yml b/testing/web-platform/tests/tools/ci/azure/checkout.yml
new file mode 100644
index 0000000000..618c571465
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/checkout.yml
@@ -0,0 +1,4 @@
+steps:
+- checkout: self
+ fetchDepth: 50
+ submodules: false
diff --git a/testing/web-platform/tests/tools/ci/azure/com.apple.SafariTechnologyPreview.plist b/testing/web-platform/tests/tools/ci/azure/com.apple.SafariTechnologyPreview.plist
new file mode 100644
index 0000000000..122080972c
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/com.apple.SafariTechnologyPreview.plist
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>AllowRemoteAutomation</key>
+ <true/>
+</dict>
+</plist>
diff --git a/testing/web-platform/tests/tools/ci/azure/fyi_hook.yml b/testing/web-platform/tests/tools/ci/azure/fyi_hook.yml
new file mode 100644
index 0000000000..df92ce1066
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/fyi_hook.yml
@@ -0,0 +1,23 @@
+# This job is used to get a run into wpt.fyi and staging.wpt.fyi, by notifying
+# them with the build number and artifact to use.
+
+parameters:
+ dependsOn: ''
+ artifactName: ''
+
+jobs:
+- job: ${{ parameters.dependsOn }}_hook
+ displayName: 'wpt.fyi hook: ${{ parameters.artifactName }}'
+ dependsOn: ${{ parameters.dependsOn }}
+ pool:
+ vmImage: 'ubuntu-20.04'
+ steps:
+ - checkout: none
+ - script: |
+ set -eux -o pipefail
+ curl -f -s -S -d "artifact=${{ parameters.artifactName }}" -X POST https://wpt.fyi/api/checks/azure/$(Build.BuildId)
+ displayName: 'Invoke wpt.fyi hook'
+ - script: |
+ set -eux -o pipefail
+ curl -f -s -S -d "artifact=${{ parameters.artifactName }}" -X POST https://staging.wpt.fyi/api/checks/azure/$(Build.BuildId)
+ displayName: 'Invoke staging.wpt.fyi hook'
diff --git a/testing/web-platform/tests/tools/ci/azure/install_certs.yml b/testing/web-platform/tests/tools/ci/azure/install_certs.yml
new file mode 100644
index 0000000000..bee5ab084e
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/install_certs.yml
@@ -0,0 +1,11 @@
+steps:
+- script: |
+ set -eux -o pipefail
+ # https://github.com/web-platform-tests/results-collection/blob/master/src/scripts/trust-root-ca.sh
+ # only run this on macOS < 11
+ [ "11" = "`echo -e $( sw_vers -productVersion )\\\n11 | sort -V | head -n1`" ] || sudo security add-trusted-cert -d -r trustRoot -k /Library/Keychains/System.keychain tools/certs/cacert.pem
+ displayName: 'Install web-platform.test certificate (macOS)'
+ condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin'))
+- script: certutil –addstore -enterprise –f "Root" tools\certs\cacert.pem
+ displayName: 'Install web-platform.test certificate (Windows)'
+ condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'))
diff --git a/testing/web-platform/tests/tools/ci/azure/install_chrome.yml b/testing/web-platform/tests/tools/ci/azure/install_chrome.yml
new file mode 100644
index 0000000000..7599321be2
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/install_chrome.yml
@@ -0,0 +1,11 @@
+steps:
+# The conflicting google-chrome and chromedriver casks are first uninstalled.
+# The raw google-chrome-dev cask URL is used to bypass caching.
+- script: |
+ set -eux -o pipefail
+ HOMEBREW_NO_AUTO_UPDATE=1 brew uninstall --cask google-chrome || true
+ HOMEBREW_NO_AUTO_UPDATE=1 brew uninstall --cask chromedriver || true
+ curl https://raw.githubusercontent.com/Homebrew/homebrew-cask-versions/master/Casks/google-chrome-dev.rb > google-chrome-dev.rb
+ HOMEBREW_NO_AUTO_UPDATE=1 brew install --cask google-chrome-dev.rb
+ displayName: 'Install Chrome Dev'
+ condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin'))
diff --git a/testing/web-platform/tests/tools/ci/azure/install_edge.yml b/testing/web-platform/tests/tools/ci/azure/install_edge.yml
new file mode 100644
index 0000000000..297b6e60b2
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/install_edge.yml
@@ -0,0 +1,61 @@
+parameters:
+ channel: dev
+
+# Should match https://web-platform-tests.org/running-tests/chrome.html
+# Just replace chrome with edgechromium
+steps:
+- ${{ if eq(parameters.channel, 'stable') }}:
+ - powershell: |
+ $edgeInstallerName = 'MicrosoftEdgeSetup.exe'
+ # Link to Stable channel installer
+ Start-BitsTransfer -Source 'https://go.microsoft.com/fwlink/?linkid=2108834&Channel=Stable&language=en' -Destination MicrosoftEdgeSetup.exe
+ if (-not (Test-Path $edgeInstallerName)) {
+ Throw "Failed to download Edge installer to $edgeInstallerName."
+ }
+ cmd /c START /WAIT $edgeInstallerName /silent /install
+ $edgePath = "$env:systemdrive\Program Files (x86)\Microsoft\Edge\Application"
+ if (Test-Path $edgePath) {
+ Write-Host "##vso[task.prependpath]$edgePath"
+ Write-Host "Edge Stable installed at $edgePath."
+ (Get-Item -Path "$edgePath\msedge.exe").VersionInfo | Format-List
+ } else {
+ Copy-Item -Path "$env:temp\*edge*.log" -Destination $(Build.ArtifactStagingDirectory) -Force
+ Throw "Failed to install Edge at $edgePath"
+ }
+ displayName: 'Install Edge Stable'
+
+- ${{ if eq(parameters.channel, 'canary') }}:
+ - powershell: |
+ $edgeInstallerName = 'MicrosoftEdgeSetup.exe'
+ # Link to Canary channel installer
+ Start-BitsTransfer -Source 'https://go.microsoft.com/fwlink/?linkid=2084649&Channel=Canary&language=en-us' -Destination MicrosoftEdgeSetup.exe
+ if (-not (Test-Path $edgeInstallerName)) {
+ Throw "Failed to download Edge installer to $edgeInstallerName."
+ }
+ cmd /c START /WAIT $edgeInstallerName /silent /install
+ $edgePath = "$env:localappdata\Microsoft\Edge SxS\Application"
+ if (Test-Path $edgePath) {
+ Write-Host "##vso[task.prependpath]$edgePath"
+ Write-Host "Edge Canary installed at $edgePath."
+ (Get-Item -Path "$edgePath\msedge.exe").VersionInfo | Format-List
+ } else {
+ Copy-Item -Path "$env:temp\*edge*.log" -Destination $(Build.ArtifactStagingDirectory) -Force
+ Throw "Failed to install Edge Canary at $edgePath"
+ }
+ displayName: 'Install Edge Canary'
+- ${{ if eq(parameters.channel, 'dev') }}:
+ - powershell: |
+ $edgeInstallerName = 'MicrosoftEdgeSetup.exe'
+ # Link to Dev channel installer
+ Start-BitsTransfer -Source 'https://go.microsoft.com/fwlink/?linkid=2069324&Channel=Dev&language=en-us' -Destination MicrosoftEdgeSetup.exe
+ cmd /c START /WAIT $edgeInstallerName /silent /install
+ $edgePath = "$env:systemdrive\Program Files (x86)\Microsoft\Edge Dev\Application"
+ if (Test-Path $edgePath) {
+ Write-Host "##vso[task.prependpath]$edgePath"
+ Write-Host "Edge Canary installed at $edgePath."
+ (Get-Item -Path "$edgePath\msedge.exe").VersionInfo | Format-List
+ } else {
+ Copy-Item -Path "$env:temp\*edge*.log" -Destination $(Build.ArtifactStagingDirectory) -Force
+ Throw "Failed to install Edge Dev at $edgePath"
+ }
+ displayName: 'Install Edge Dev'
diff --git a/testing/web-platform/tests/tools/ci/azure/install_firefox.yml b/testing/web-platform/tests/tools/ci/azure/install_firefox.yml
new file mode 100644
index 0000000000..73af597665
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/install_firefox.yml
@@ -0,0 +1,9 @@
+steps:
+# This is equivalent to `Homebrew/homebrew-cask-versions/firefox-nightly`,
+# but the raw URL is used to bypass caching.
+- script: |
+ set -eux -o pipefail
+ curl https://raw.githubusercontent.com/Homebrew/homebrew-cask-versions/master/Casks/firefox-nightly.rb > firefox-nightly.rb
+ HOMEBREW_NO_AUTO_UPDATE=1 brew install --cask firefox-nightly.rb
+ displayName: 'Install Firefox Nightly'
+ condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin'))
diff --git a/testing/web-platform/tests/tools/ci/azure/install_fonts.yml b/testing/web-platform/tests/tools/ci/azure/install_fonts.yml
new file mode 100644
index 0000000000..279c262c7e
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/install_fonts.yml
@@ -0,0 +1,7 @@
+steps:
+# Installig Ahem in /Library/Fonts instead of using --install-fonts is a
+# workaround for https://github.com/web-platform-tests/wpt/issues/13803.
+- script: |
+ set -eux -o pipefail
+ sudo cp fonts/Ahem.ttf /Library/Fonts
+ displayName: 'Install Ahem font'
diff --git a/testing/web-platform/tests/tools/ci/azure/install_safari.yml b/testing/web-platform/tests/tools/ci/azure/install_safari.yml
new file mode 100644
index 0000000000..1a398532da
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/install_safari.yml
@@ -0,0 +1,29 @@
+parameters:
+ channel: preview
+
+# Should match https://web-platform-tests.org/running-tests/safari.html
+steps:
+- script: defaults write com.apple.WebDriver DiagnosticsEnabled 1
+ displayName: 'Enable safaridriver diagnostics'
+ condition: eq(variables['safaridriver_diagnose'], true)
+- ${{ if eq(parameters.channel, 'preview') }}:
+ - script: |
+ set -eux -o pipefail
+ export SYSTEM_VERSION_COMPAT=0
+ ./wpt install --channel preview --download-only -d . --rename STP safari browser
+ sudo installer -pkg STP.pkg -target LocalSystem
+ # Workaround for `sudo safardriver --enable` not working on Catalina:
+ # https://github.com/web-platform-tests/wpt/issues/21751
+ mkdir -p ~/Library/WebDriver/
+ cp tools/ci/azure/com.apple.SafariTechnologyPreview.plist ~/Library/WebDriver/
+ defaults write com.apple.SafariTechnologyPreview WebKitJavaScriptCanOpenWindowsAutomatically 1
+ defaults write com.apple.SafariTechnologyPreview ExperimentalServerTimingEnabled 1
+ displayName: 'Install Safari Technology Preview'
+- ${{ if eq(parameters.channel, 'stable') }}:
+ - script: |
+ set -eux -o pipefail
+ export SYSTEM_VERSION_COMPAT=0
+ sudo softwareupdate --install $( softwareupdate -l | grep -o '\* Label: \(Safari.*\)' | sed -e 's/* Label: //' )
+ sudo safaridriver --enable
+ defaults write com.apple.Safari WebKitJavaScriptCanOpenWindowsAutomatically 1
+ displayName: 'Configure Safari'
diff --git a/testing/web-platform/tests/tools/ci/azure/pip_install.yml b/testing/web-platform/tests/tools/ci/azure/pip_install.yml
new file mode 100644
index 0000000000..18d1879e97
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/pip_install.yml
@@ -0,0 +1,6 @@
+parameters:
+ packages: ''
+
+steps:
+- script: pip --disable-pip-version-check install --upgrade ${{ parameters.packages }}
+ displayName: 'Install Python packages'
diff --git a/testing/web-platform/tests/tools/ci/azure/publish_logs.yml b/testing/web-platform/tests/tools/ci/azure/publish_logs.yml
new file mode 100644
index 0000000000..a49397a91a
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/publish_logs.yml
@@ -0,0 +1,7 @@
+steps:
+- task: PublishBuildArtifacts@1
+ displayName: 'Publish safaridriver logs'
+ inputs:
+ pathtoPublish: /Users/runner/Library/Logs/com.apple.WebDriver/
+ artifactName: safaridriver-logs
+ condition: eq(variables['safaridriver_diagnose'], true)
diff --git a/testing/web-platform/tests/tools/ci/azure/sysdiagnose.yml b/testing/web-platform/tests/tools/ci/azure/sysdiagnose.yml
new file mode 100644
index 0000000000..97646f6914
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/sysdiagnose.yml
@@ -0,0 +1,13 @@
+steps:
+- script: |
+ mkdir -p /Users/runner/sysdiagnose
+ # No UI, and no time sensitive, generated, or archived logs
+ sudo sysdiagnose -ub -PGR -f /Users/runner/sysdiagnose -A sysdiagnose_$(System.JobPositionInPhase)
+ displayName: 'Collect sysdiagnose'
+ condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin'))
+- task: PublishBuildArtifacts@1
+ displayName: 'Publish sysdiagnose'
+ inputs:
+ pathtoPublish: /Users/runner/sysdiagnose/
+ artifactName: sysdiagnose
+ condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin'))
diff --git a/testing/web-platform/tests/tools/ci/azure/system_info.yml b/testing/web-platform/tests/tools/ci/azure/system_info.yml
new file mode 100644
index 0000000000..cc37b35b85
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/system_info.yml
@@ -0,0 +1,4 @@
+steps:
+- script: systeminfo
+ displayName: 'Show system info (Windows)'
+ condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'))
diff --git a/testing/web-platform/tests/tools/ci/azure/tox_pytest.yml b/testing/web-platform/tests/tools/ci/azure/tox_pytest.yml
new file mode 100644
index 0000000000..3704ecc5be
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/tox_pytest.yml
@@ -0,0 +1,20 @@
+parameters:
+ directory: ''
+ toxenv: 'ALL'
+
+steps:
+- template: pip_install.yml
+ parameters:
+ packages: tox
+
+- script: |
+ set -eux -o pipefail
+ tox -c ${{ parameters.directory }} -e ${{ parameters.toxenv }} -- --junitxml=results.xml
+ displayName: 'Run tests'
+
+- task: PublishTestResults@2
+ inputs:
+ testResultsFiles: '${{ parameters.directory }}/results.xml'
+ testRunTitle: '${{ parameters.directory }}'
+ displayName: 'Publish results'
+ condition: succeededOrFailed()
diff --git a/testing/web-platform/tests/tools/ci/azure/update_hosts.yml b/testing/web-platform/tests/tools/ci/azure/update_hosts.yml
new file mode 100644
index 0000000000..bcb8536a5c
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/update_hosts.yml
@@ -0,0 +1,12 @@
+steps:
+- script: |
+ set -eux -o pipefail
+ ./wpt make-hosts-file | sudo tee -a /etc/hosts
+ displayName: 'Update hosts (macOS)'
+ condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin'))
+- powershell: |
+ $hostFile = "$env:systemroot\System32\drivers\etc\hosts"
+ Copy-Item -Path $hostFile -Destination "$hostFile.back" -Force
+ python wpt make-hosts-file | Out-File $env:systemroot\System32\drivers\etc\hosts -Encoding ascii -Append
+ displayName: 'Update hosts (Windows)'
+ condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'))
diff --git a/testing/web-platform/tests/tools/ci/azure/update_manifest.yml b/testing/web-platform/tests/tools/ci/azure/update_manifest.yml
new file mode 100644
index 0000000000..453ac2ac3c
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/azure/update_manifest.yml
@@ -0,0 +1,4 @@
+steps:
+# `python wpt` instead of `./wpt` is to make this work on Windows:
+- script: python wpt manifest
+ displayName: 'Update manifest'
diff --git a/testing/web-platform/tests/tools/ci/ci_built_diff.sh b/testing/web-platform/tests/tools/ci/ci_built_diff.sh
new file mode 100755
index 0000000000..fad946d297
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/ci_built_diff.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+set -ex
+
+SCRIPT_DIR=$(cd $(dirname "$0") && pwd -P)
+WPT_ROOT=$SCRIPT_DIR/../..
+cd $WPT_ROOT
+
+main() {
+ # Diff PNGs based on pixel-for-pixel identity
+ echo -e '[diff "img"]\n textconv = identify -quiet -format "%#"' >> .git/config
+ echo -e '*.png diff=img' >> .git/info/attributes
+
+ # Exclude tests that rely on font rendering
+ excluded=(
+ 'html/canvas/element/drawing-text-to-the-canvas/2d.text.draw.fill.basic.png'
+ 'html/canvas/element/drawing-text-to-the-canvas/2d.text.draw.fill.maxWidth.large.png'
+ 'html/canvas/element/drawing-text-to-the-canvas/2d.text.draw.fill.rtl.png'
+ 'html/canvas/element/drawing-text-to-the-canvas/2d.text.draw.stroke.basic.png'
+ 'html/canvas/offscreen/text/2d.text.draw.fill.basic.png'
+ 'html/canvas/offscreen/text/2d.text.draw.fill.maxWidth.large.png'
+ 'html/canvas/offscreen/text/2d.text.draw.fill.rtl.png'
+ 'html/canvas/offscreen/text/2d.text.draw.stroke.basic.png'
+ )
+
+ ./wpt update-built
+ git update-index --assume-unchanged ${excluded[*]}
+ git diff --exit-code
+}
+
+main
diff --git a/testing/web-platform/tests/tools/ci/ci_resources_unittest.sh b/testing/web-platform/tests/tools/ci/ci_resources_unittest.sh
new file mode 100755
index 0000000000..11190fc58d
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/ci_resources_unittest.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+set -ex
+
+SCRIPT_DIR=$(cd $(dirname "$0") && pwd -P)
+WPT_ROOT=$SCRIPT_DIR/../..
+cd $WPT_ROOT
+
+main() {
+ cd $WPT_ROOT
+ pip install --user -U tox
+ ./wpt install firefox browser --destination $HOME
+ ./wpt install firefox webdriver --destination $HOME/firefox
+ export PATH=$HOME/firefox:$PATH
+
+ cd $WPT_ROOT/resources/test
+ tox -- --binary=$HOME/browsers/nightly/firefox/firefox
+}
+
+main
diff --git a/testing/web-platform/tests/tools/ci/ci_tools_integration_test.sh b/testing/web-platform/tests/tools/ci/ci_tools_integration_test.sh
new file mode 100755
index 0000000000..76f682e254
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/ci_tools_integration_test.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+set -e
+
+SCRIPT_DIR=$(cd $(dirname "$0") && pwd -P)
+WPT_ROOT=$SCRIPT_DIR/../..
+cd $WPT_ROOT
+
+main() {
+ git fetch --quiet --unshallow https://github.com/web-platform-tests/wpt.git +refs/heads/*:refs/remotes/origin/*
+ pip install --user -U tox
+
+ # wpt commands integration tests
+ cd tools/wpt
+ tox
+ cd $WPT_ROOT
+
+ # WMAS test runner integration tests
+ cd tools/wave
+ tox
+ cd $WPT_ROOT
+}
+
+main
diff --git a/testing/web-platform/tests/tools/ci/ci_tools_unittest.sh b/testing/web-platform/tests/tools/ci/ci_tools_unittest.sh
new file mode 100755
index 0000000000..8e16ee18de
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/ci_tools_unittest.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+set -ex
+
+SCRIPT_DIR=$(cd $(dirname "$0") && pwd -P)
+WPT_ROOT=$SCRIPT_DIR/../..
+cd $WPT_ROOT
+
+run_applicable_tox () {
+ # instead of just running TOXENV (e.g., py38)
+ # run all environments that start with TOXENV
+ # (e.g., py38-firefox as well as py38)
+ local OLD_TOXENV="$TOXENV"
+ unset TOXENV
+ local RUN_ENVS=$(tox -l | grep "^${OLD_TOXENV}\(\-\|\$\)" | tr "\n" ",")
+ if [[ -n "$RUN_ENVS" ]]; then
+ tox -e "$RUN_ENVS"
+ fi
+ export TOXENV="$OLD_TOXENV"
+}
+
+if ./wpt test-jobs --includes tools_unittest; then
+ pip install --user -U tox
+ cd tools
+ run_applicable_tox
+ cd $WPT_ROOT
+else
+ echo "Skipping tools unittest"
+fi
+
+if ./wpt test-jobs --includes wptrunner_unittest; then
+ cd tools/wptrunner
+ run_applicable_tox
+ cd $WPT_ROOT
+else
+ echo "Skipping wptrunner unittest"
+fi
diff --git a/testing/web-platform/tests/tools/ci/ci_wptrunner_infrastructure.sh b/testing/web-platform/tests/tools/ci/ci_wptrunner_infrastructure.sh
new file mode 100755
index 0000000000..ef61601bdc
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/ci_wptrunner_infrastructure.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+set -ex
+
+SCRIPT_DIR=$(cd $(dirname "$0") && pwd -P)
+WPT_ROOT=$SCRIPT_DIR/../..
+cd $WPT_ROOT
+
+test_infrastructure() {
+ TERM=dumb ./wpt run --log-mach - --yes --manifest ~/meta/MANIFEST.json --metadata infrastructure/metadata/ --install-fonts --install-webdriver $1 $PRODUCT infrastructure/
+}
+
+main() {
+ PRODUCTS=( "firefox" "chrome" )
+ ./wpt manifest --rebuild -p ~/meta/MANIFEST.json
+ for PRODUCT in "${PRODUCTS[@]}"; do
+ if [[ "$PRODUCT" == "chrome" ]]; then
+ # Taskcluster machines do not have GPUs, so use software rendering via --enable-swiftshader.
+ test_infrastructure "--binary=$(which google-chrome-unstable) --enable-swiftshader --channel dev" "$1"
+ else
+ test_infrastructure "--binary=~/build/firefox/firefox" "$1"
+ fi
+ done
+}
+
+main $1
diff --git a/testing/web-platform/tests/tools/ci/commands.json b/testing/web-platform/tests/tools/ci/commands.json
new file mode 100644
index 0000000000..5946314496
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/commands.json
@@ -0,0 +1,73 @@
+{
+ "test-jobs": {
+ "path": "jobs.py",
+ "script": "run",
+ "parser": "create_parser",
+ "help": "List test jobs that should run for a set of commits",
+ "virtualenv": false
+ },
+ "make-hosts-file": {
+ "path": "make_hosts_file.py",
+ "script": "run",
+ "parser": "create_parser",
+ "help": "Output a hosts file to stdout",
+ "virtualenv": false
+ },
+ "regen-certs": {
+ "path": "regen_certs.py",
+ "script": "run",
+ "parser": "get_parser",
+ "help": "Regenerate the WPT certificates",
+ "virtualenv": false
+ },
+ "update-built": {
+ "path": "update_built.py",
+ "script": "run",
+ "parser": "get_parser",
+ "help": "Update built tests",
+ "virtualenv": true,
+ "requirements": [
+ "requirements_build.txt"
+ ]
+ },
+ "tc-download": {
+ "path": "tc/download.py",
+ "script": "run",
+ "parser": "get_parser",
+ "parse_known": true,
+ "help": "Download logs from taskcluster",
+ "virtualenv": true,
+ "requirements": [
+ "requirements_tc.txt"
+ ]
+ },
+ "tc-taskgraph": {
+ "path": "tc/taskgraph.py",
+ "script": "run",
+ "help": "Build the taskgraph",
+ "virtualenv": true,
+ "requirements": [
+ "requirements_tc.txt"
+ ]
+ },
+ "tc-decision": {
+ "path": "tc/decision.py",
+ "parser": "get_parser",
+ "script": "run",
+ "help": "Run the decision task",
+ "virtualenv": true,
+ "requirements": [
+ "requirements_tc.txt"
+ ]
+ },
+ "tc-sink-task": {
+ "path": "tc/sink_task.py",
+ "parser": "get_parser",
+ "script": "run",
+ "help": "Run the sink task",
+ "virtualenv": true,
+ "requirements": [
+ "requirements_tc.txt"
+ ]
+ }
+}
diff --git a/testing/web-platform/tests/tools/ci/epochs_update.sh b/testing/web-platform/tests/tools/ci/epochs_update.sh
new file mode 100755
index 0000000000..1c7edf15aa
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/epochs_update.sh
@@ -0,0 +1,58 @@
+#!/bin/bash
+set -eux -o pipefail
+
+SCRIPT_DIR=$(cd $(dirname "$0") && pwd -P)
+WPT_ROOT=$SCRIPT_DIR/../..
+
+EPOCHS=(
+epochs/three_hourly::3h
+epochs/six_hourly::6h
+epochs/twelve_hourly::12h
+epochs/daily::1d
+epochs/weekly::1w
+)
+
+function get_epoch_branch_name () {
+ echo ${1} | awk -F '::' '{print $1}'
+}
+
+function get_epoch_timeval () {
+ echo ${1} | awk -F '::' '{print $2}'
+}
+
+main () {
+ ALL_BRANCHES_NAMES=""
+ for e in "${EPOCHS[@]}";
+ do
+ EPOCH=$(get_epoch_timeval ${e})
+ EPOCH_BRANCH_NAME=$(get_epoch_branch_name ${e})
+ EPOCH_SHA=$(./wpt rev-list --epoch ${EPOCH})
+ if [ "${EPOCH_SHA}" = "" ]; then
+ echo "ERROR: Empty SHA returned from ./wpt rev-list"
+ exit 1
+ fi
+ git branch "${EPOCH_BRANCH_NAME}" "${EPOCH_SHA}"
+
+ # Only set epoch tag if is not already tagged from a previous run.
+ if ! git tag --points-at "${EPOCH_SHA}" | grep "${EPOCH_BRANCH_NAME}"; then
+ EPOCH_STAMP="$(date +%Y-%m-%d_%HH)"
+ git tag "${EPOCH_BRANCH_NAME}/${EPOCH_STAMP}" "${EPOCH_SHA}"
+ fi
+
+ ALL_BRANCHES_NAMES="${ALL_BRANCHES_NAMES} ${EPOCH_BRANCH_NAME}"
+ done
+ # This is safe because `git push` will by default fail for a non-fast-forward
+ # push, for example if the remote branch is ahead of the local branch.
+ git push --tags ${REMOTE} ${ALL_BRANCHES_NAMES}
+}
+
+cd $WPT_ROOT
+
+if [ -z "$GITHUB_TOKEN" ]; then
+ echo "GITHUB_TOKEN must be set as an environment variable"
+ exit 1
+fi
+
+REMOTE=https://x-access-token:$GITHUB_TOKEN@github.com/web-platform-tests/wpt.git
+
+main
diff --git a/testing/web-platform/tests/tools/ci/interfaces_update.sh b/testing/web-platform/tests/tools/ci/interfaces_update.sh
new file mode 100755
index 0000000000..6bf7c7c712
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/interfaces_update.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+set -eux -o pipefail
+
+SCRIPT_DIR=$(cd $(dirname "$0") && pwd -P)
+WPT_ROOT=$SCRIPT_DIR/../..
+
+main () {
+ # Find the latest version of the package to install.
+ VERSION=$(npm info @webref/idl version)
+
+ # Install @webref/idl in a temporary directory.
+ TMPDIR=$(mktemp -d)
+ cd $TMPDIR
+ npm install @webref/idl@$VERSION
+
+ # Delete interfaces/*.idl except tentative ones
+ cd $WPT_ROOT
+ find interfaces/ -name '*.idl' -not -name '*.tentative.idl' -delete
+
+ # Handle cssom.idl with preamble first.
+ cat <<EOF > interfaces/cssom.idl
+// GENERATED PREAMBLE - DO NOT EDIT
+// CSSOMString is an implementation-defined type of either DOMString or
+// USVString in CSSOM: https://drafts.csswg.org/cssom/#cssomstring-type
+// For web-platform-tests, use DOMString because USVString has additional
+// requirements in type conversion and could result in spurious failures for
+// implementations that use DOMString.
+typedef DOMString CSSOMString;
+
+EOF
+ cat $TMPDIR/node_modules/@webref/idl/cssom.idl >> interfaces/cssom.idl
+ rm $TMPDIR/node_modules/@webref/idl/cssom.idl
+
+ # Move remaining *.idl from @webref/idl to interfaces/
+ mv $TMPDIR/node_modules/@webref/idl/*.idl interfaces/
+
+ # Cleanup
+ rm -rf $TMPDIR
+
+ if [ -n "$GITHUB_ENV" ]; then
+ echo webref_idl_version=$VERSION >> $GITHUB_ENV
+ fi
+}
+
+main
diff --git a/testing/web-platform/tests/tools/ci/jobs.py b/testing/web-platform/tests/tools/ci/jobs.py
new file mode 100644
index 0000000000..5b2ef277a9
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/jobs.py
@@ -0,0 +1,149 @@
+# mypy: allow-untyped-defs
+
+import argparse
+import os
+import re
+from ..wpt.testfiles import branch_point, files_changed
+
+from tools import localpaths # noqa: F401
+
+wpt_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+
+# Common exclusions between affected_tests and stability jobs.
+# Files in these dirs would trigger the execution of too many tests.
+EXCLUDES = [
+ "!tools/",
+ "!docs/",
+ "!conformance-checkers/",
+ "!.*/OWNERS",
+ "!.*/META.yml",
+ "!.*/tools/",
+ "!.*/README",
+ "!css/[^/]*$"
+]
+
+# Rules are just regex on the path, with a leading ! indicating a regex that must not
+# match for the job. Paths should be kept in sync with update-built-tests.sh.
+job_path_map = {
+ "affected_tests": [".*/.*", "!resources/(?!idlharness.js)"] + EXCLUDES,
+ "stability": [".*/.*", "!resources/.*"] + EXCLUDES,
+ "lint": [".*"],
+ "manifest_upload": [".*"],
+ "resources_unittest": ["resources/", "tools/"],
+ "tools_unittest": ["tools/"],
+ "wptrunner_unittest": ["tools/"],
+ "update_built": ["update-built-tests\\.sh",
+ "conformance-checkers/",
+ "css/css-ui/",
+ "html/",
+ "infrastructure/",
+ "mimesniff/"],
+ "wpt_integration": ["tools/"],
+ "wptrunner_infrastructure": ["infrastructure/",
+ "tools/",
+ "resources/",
+ "webdriver/tests/support"],
+}
+
+
+def _path_norm(path):
+ """normalize a path for both case and slashes (to /)"""
+ path = os.path.normcase(path)
+ if os.path.sep != "/":
+ # this must be after the normcase call as that does slash normalization
+ path = path.replace(os.path.sep, "/")
+ return path
+
+
+class Ruleset:
+ def __init__(self, rules):
+ self.include = []
+ self.exclude = []
+ for rule in rules:
+ rule = _path_norm(rule)
+ self.add_rule(rule)
+
+ def add_rule(self, rule):
+ if rule.startswith("!"):
+ target = self.exclude
+ rule = rule[1:]
+ else:
+ target = self.include
+
+ target.append(re.compile("^%s" % rule))
+
+ def __call__(self, path):
+ path = _path_norm(path)
+ for item in self.exclude:
+ if item.match(path):
+ return False
+ for item in self.include:
+ if item.match(path):
+ return True
+ return False
+
+ def __repr__(self):
+ subs = tuple(",".join(item.pattern for item in target)
+ for target in (self.include, self.exclude))
+ return "Rules<include:[%s] exclude:[%s]>" % subs
+
+
+def get_paths(**kwargs):
+ if kwargs["revish"] is None:
+ revish = "%s..HEAD" % branch_point()
+ else:
+ revish = kwargs["revish"]
+
+ changed, _ = files_changed(revish, ignore_rules=[])
+ all_changed = {os.path.relpath(item, wpt_root) for item in set(changed)}
+ return all_changed
+
+
+def get_jobs(paths, **kwargs):
+ if kwargs.get("all"):
+ return set(job_path_map.keys())
+
+ jobs = set()
+
+ rules = {}
+ includes = kwargs.get("includes")
+ if includes is not None:
+ includes = set(includes)
+ for key, value in job_path_map.items():
+ if includes is None or key in includes:
+ rules[key] = Ruleset(value)
+
+ for path in paths:
+ for job in list(rules.keys()):
+ ruleset = rules[job]
+ if ruleset(path):
+ rules.pop(job)
+ jobs.add(job)
+ if not rules:
+ break
+
+ # Default jobs should run even if there were no changes
+ if not paths:
+ for job, path_re in job_path_map.items():
+ if ".*" in path_re:
+ jobs.add(job)
+
+ return jobs
+
+
+def create_parser():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("revish", default=None, help="Commits to consider. Defaults to the commits on the current branch", nargs="?")
+ parser.add_argument("--all", help="List all jobs unconditionally.", action="store_true")
+ parser.add_argument("--includes", default=None, help="Jobs to check for. Return code is 0 if all jobs are found, otherwise 1", nargs="*")
+ return parser
+
+
+def run(**kwargs):
+ paths = get_paths(**kwargs)
+ jobs = get_jobs(paths, **kwargs)
+ if not kwargs["includes"]:
+ for item in sorted(jobs):
+ print(item)
+ else:
+ return 0 if set(kwargs["includes"]).issubset(jobs) else 1
diff --git a/testing/web-platform/tests/tools/ci/make_hosts_file.py b/testing/web-platform/tests/tools/ci/make_hosts_file.py
new file mode 100644
index 0000000000..f3397281b7
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/make_hosts_file.py
@@ -0,0 +1,23 @@
+# mypy: allow-untyped-defs
+
+import argparse
+import os
+
+from ..localpaths import repo_root
+
+from ..serve.serve import build_config, make_hosts_file
+
+
+def create_parser():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("address", default="127.0.0.1", nargs="?",
+ help="Address that hosts should point at")
+ return parser
+
+
+def run(**kwargs):
+ config_builder = build_config(os.path.join(repo_root, "config.json"),
+ ssl={"type": "none"})
+
+ with config_builder as config:
+ print(make_hosts_file(config, kwargs["address"]))
diff --git a/testing/web-platform/tests/tools/ci/manifest_build.py b/testing/web-platform/tests/tools/ci/manifest_build.py
new file mode 100644
index 0000000000..25a1a5f740
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/manifest_build.py
@@ -0,0 +1,200 @@
+# mypy: allow-untyped-defs
+
+import json
+import logging
+import os
+import subprocess
+import sys
+import tempfile
+
+import requests
+
+here = os.path.abspath(os.path.dirname(__file__))
+wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
+
+if not(wpt_root in sys.path):
+ sys.path.append(wpt_root)
+
+from tools.wpt.testfiles import get_git_cmd
+
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+
+class Status:
+ SUCCESS = 0
+ FAIL = 1
+
+
+def run(cmd, return_stdout=False, **kwargs):
+ logger.info(" ".join(cmd))
+ if return_stdout:
+ f = subprocess.check_output
+ else:
+ f = subprocess.check_call
+ return f(cmd, **kwargs)
+
+
+def create_manifest(path):
+ run(["./wpt", "manifest", "-p", path])
+
+
+def compress_manifest(path):
+ for args in [["gzip", "-k", "-f", "--best"],
+ ["bzip2", "-k", "-f", "--best"],
+ ["zstd", "-k", "-f", "--ultra", "-22", "-q"]]:
+ run(args + [path])
+
+
+def request(url, desc, method=None, data=None, json_data=None, params=None, headers=None):
+ github_token = os.environ.get("GITHUB_TOKEN")
+ default_headers = {
+ "Authorization": "token %s" % github_token,
+ "Accept": "application/vnd.github.machine-man-preview+json"
+ }
+
+ _headers = default_headers
+ if headers is not None:
+ _headers.update(headers)
+
+ kwargs = {"params": params,
+ "headers": _headers}
+ try:
+ logger.info("Requesting URL %s" % url)
+ if json_data is not None or data is not None:
+ if method is None:
+ method = requests.post
+ kwargs["json"] = json_data
+ kwargs["data"] = data
+ elif method is None:
+ method = requests.get
+
+ resp = method(url, **kwargs)
+
+ except Exception as e:
+ logger.error(f"{desc} failed:\n{e}")
+ return None
+
+ try:
+ resp.raise_for_status()
+ except requests.HTTPError:
+ logger.error("%s failed: Got HTTP status %s. Response:" %
+ (desc, resp.status_code))
+ logger.error(resp.text)
+ return None
+
+ try:
+ return resp.json()
+ except ValueError:
+ logger.error("%s failed: Returned data was not JSON Response:" % desc)
+ logger.error(resp.text)
+
+
+def get_pr(owner, repo, sha):
+ data = request("https://api.github.com/search/issues?q=type:pr+is:merged+repo:%s/%s+sha:%s" %
+ (owner, repo, sha), "Getting PR")
+ if data is None:
+ return None
+
+ items = data["items"]
+ if len(items) == 0:
+ logger.error("No PR found for %s" % sha)
+ return None
+ if len(items) > 1:
+ logger.warning("Found multiple PRs for %s" % sha)
+
+ pr = items[0]
+
+ return pr["number"]
+
+
+def create_release(manifest_path, owner, repo, sha, tag, body):
+ logger.info(f"Creating a release for tag='{tag}', target_commitish='{sha}'")
+ create_url = f"https://api.github.com/repos/{owner}/{repo}/releases"
+ create_data = {"tag_name": tag,
+ "target_commitish": sha,
+ "name": tag,
+ "body": body,
+ "draft": True}
+ create_resp = request(create_url, "Release creation", json_data=create_data)
+ if not create_resp:
+ return False
+
+ # Upload URL contains '{?name,label}' at the end which we want to remove
+ upload_url = create_resp["upload_url"].split("{", 1)[0]
+
+ upload_exts = [".gz", ".bz2", ".zst"]
+ for upload_ext in upload_exts:
+ upload_filename = f"MANIFEST-{sha}.json{upload_ext}"
+ params = {"name": upload_filename,
+ "label": "MANIFEST.json%s" % upload_ext}
+
+ with open(f"{manifest_path}{upload_ext}", "rb") as f:
+ upload_data = f.read()
+
+ logger.info("Uploading %s bytes" % len(upload_data))
+
+ upload_resp = request(upload_url, "Manifest upload", data=upload_data, params=params,
+ headers={'Content-Type': 'application/octet-stream'})
+ if not upload_resp:
+ return False
+
+ release_id = create_resp["id"]
+ edit_url = f"https://api.github.com/repos/{owner}/{repo}/releases/{release_id}"
+ edit_data = create_data.copy()
+ edit_data["draft"] = False
+ edit_resp = request(edit_url, "Release publishing", method=requests.patch, json_data=edit_data)
+ if not edit_resp:
+ return False
+
+ logger.info("Released %s" % edit_resp["html_url"])
+ return True
+
+
+def should_dry_run():
+ with open(os.environ["GITHUB_EVENT_PATH"]) as f:
+ event = json.load(f)
+ logger.info(json.dumps(event, indent=2))
+
+ if "pull_request" in event:
+ logger.info("Dry run for PR")
+ return True
+ if event.get("ref") != "refs/heads/master":
+ logger.info("Dry run for ref %s" % event.get("ref"))
+ return True
+ return False
+
+
+def main():
+ dry_run = should_dry_run()
+
+ manifest_path = os.path.join(tempfile.mkdtemp(), "MANIFEST.json")
+
+ create_manifest(manifest_path)
+
+ compress_manifest(manifest_path)
+
+ owner, repo = os.environ["GITHUB_REPOSITORY"].split("/", 1)
+
+ git = get_git_cmd(wpt_root)
+ head_rev = git("rev-parse", "HEAD").strip()
+ body = git("show", "--no-patch", "--format=%B", "HEAD")
+
+ if dry_run:
+ return Status.SUCCESS
+
+ pr = get_pr(owner, repo, head_rev)
+ if pr is None:
+ return Status.FAIL
+ tag_name = "merge_pr_%s" % pr
+
+ if not create_release(manifest_path, owner, repo, head_rev, tag_name, body):
+ return Status.FAIL
+
+ return Status.SUCCESS
+
+
+if __name__ == "__main__":
+ code = main() # type: ignore
+ assert isinstance(code, int)
+ sys.exit(code)
diff --git a/testing/web-platform/tests/tools/ci/regen_certs.py b/testing/web-platform/tests/tools/ci/regen_certs.py
new file mode 100644
index 0000000000..8f3abdcad6
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/regen_certs.py
@@ -0,0 +1,102 @@
+# mypy: allow-untyped-defs
+
+import argparse
+import base64
+import logging
+import subprocess
+import sys
+
+
+logger = logging.getLogger(__name__)
+
+
+# TODO(Issue #24180): Regenerate SXG fingerprint too.
+CHROME_SPKI_CERTS_CONTENT = """\
+# This file is automatically generated by 'wpt regen-certs'
+# DO NOT EDIT MANUALLY.
+
+# tools/certs/web-platform.test.pem
+WPT_FINGERPRINT = '{wpt_fingerprint}'
+
+# signed-exchange/resources/127.0.0.1.sxg.pem
+SXG_WPT_FINGERPRINT = '0Rt4mT6SJXojEMHTnKnlJ/hBKMBcI4kteBlhR1eTTdk='
+
+IGNORE_CERTIFICATE_ERRORS_SPKI_LIST = [
+ WPT_FINGERPRINT,
+ SXG_WPT_FINGERPRINT
+]
+"""
+
+
+def get_parser():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--checkend-seconds", type=int, default=5184000,
+ help="The number of seconds the certificates must be valid for")
+ parser.add_argument("--force", action="store_true",
+ help="Regenerate certificates even if not reaching expiry")
+ return parser
+
+
+def check_cert(certificate, checkend_seconds):
+ """Checks whether an x509 certificate will expire within a set period.
+
+ Returns 0 if the certificate will not expire, non-zero otherwise."""
+ cmd = [
+ "openssl", "x509",
+ "-checkend", str(checkend_seconds),
+ "-noout",
+ "-in", certificate
+ ]
+ logger.info("Running '%s'" % " ".join(cmd))
+ return subprocess.call(cmd)
+
+
+def regen_certs():
+ """Regenerate the wpt openssl certificates, by delegating to wptserve."""
+ cmd = [
+ sys.executable, "wpt", "serve",
+ "--config", "tools/certs/config.json",
+ "--exit-after-start",
+ ]
+ logger.info("Running '%s'" % " ".join(cmd))
+ subprocess.check_call(cmd)
+
+
+def regen_chrome_spki():
+ """Regenerate the SPKI fingerprints for Chrome's ignore-cert list.
+
+ Chrome requires us to explicitly list which certificates are ignored by its
+ security-checking, by listing a base64 hash of the public key. This will
+ change every time we replace our certificates, so we store the hashes in a
+ file and regenerate it here.
+ """
+ wpt_spki = calculate_spki("tools/certs/web-platform.test.pem")
+ with open("tools/wptrunner/wptrunner/browsers/chrome_spki_certs.py", "w") as f:
+ f.write(CHROME_SPKI_CERTS_CONTENT.format(wpt_fingerprint=wpt_spki))
+
+
+def calculate_spki(cert_path):
+ """Calculate the SPKI fingerprint for a given x509 certificate."""
+ # We use shell=True as we control the input |cert_path|, and piping
+ # correctly across processes is non-trivial in Python.
+ cmd = (f"openssl x509 -noout -pubkey -in {cert_path} | " +
+ "openssl pkey -pubin -outform der | " +
+ "openssl dgst -sha256 -binary")
+ dgst_output = subprocess.check_output(cmd, shell=True)
+
+ return base64.b64encode(dgst_output).decode('utf-8')
+
+
+def run(**kwargs):
+ logging.basicConfig()
+
+ if kwargs["force"]:
+ logger.info("Force regenerating WPT certificates")
+ checkend_seconds = kwargs["checkend_seconds"]
+ if (kwargs["force"] or
+ check_cert("tools/certs/cacert.pem", checkend_seconds) or
+ check_cert("tools/certs/web-platform.test.pem", checkend_seconds)):
+ regen_certs()
+ regen_chrome_spki()
+ else:
+ logger.info("Certificates are still valid for at least %s seconds, skipping regeneration" % checkend_seconds)
diff --git a/testing/web-platform/tests/tools/ci/requirements_build.txt b/testing/web-platform/tests/tools/ci/requirements_build.txt
new file mode 100644
index 0000000000..da3f93cd3d
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/requirements_build.txt
@@ -0,0 +1,5 @@
+cairocffi==1.3.0
+fonttools==4.33.2
+genshi==0.7.7
+jinja2==3.1.2
+pyyaml==6.0
diff --git a/testing/web-platform/tests/tools/ci/requirements_tc.txt b/testing/web-platform/tests/tools/ci/requirements_tc.txt
new file mode 100644
index 0000000000..0c67c0805a
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/requirements_tc.txt
@@ -0,0 +1,4 @@
+pygithub==1.56
+pyyaml==6.0
+requests==2.27.1
+taskcluster==44.22.1
diff --git a/testing/web-platform/tests/tools/ci/run_tc.py b/testing/web-platform/tests/tools/ci/run_tc.py
new file mode 100755
index 0000000000..46e3d613a9
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/run_tc.py
@@ -0,0 +1,424 @@
+#!/usr/bin/env python3
+# mypy: allow-untyped-defs
+
+"""Wrapper script for running jobs in Taskcluster
+
+This is intended for running test jobs in Taskcluster. The script
+takes a two positional arguments which are the name of the test job
+and the script to actually run.
+
+The name of the test job is used to determine whether the script should be run
+for this push (this is in lieu of having a proper decision task). There are
+several ways that the script can be scheduled to run
+
+1. The output of wpt test-jobs includes the job name
+2. The job name is included in a job declaration (see below)
+3. The string "all" is included in the job declaration
+4. The job name is set to "all"
+
+A job declaration is a line appearing in the pull request body (for
+pull requests) or first commit message (for pushes) of the form:
+
+tc-jobs: job1,job2,[...]
+
+In addition, there are a number of keyword arguments used to set options for the
+environment in which the jobs run. Documentation for these is in the command help.
+
+As well as running the script, the script sets two environment variables;
+GITHUB_BRANCH which is the branch that the commits will merge into (if it's a PR)
+or the branch that the commits are on (if it's a push), and GITHUB_PULL_REQUEST
+which is the string "false" if the event triggering this job wasn't a pull request
+or the pull request number if it was. The semantics of these variables are chosen
+to match the corresponding TRAVIS_* variables.
+
+Note: for local testing in the Docker image the script ought to still work, but
+full functionality requires that the TASK_EVENT environment variable is set to
+the serialization of a GitHub event payload.
+"""
+
+import argparse
+import fnmatch
+import json
+import os
+import subprocess
+import sys
+import tarfile
+import tempfile
+import zipfile
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+from wpt.utils import get_download_to_descriptor # type: ignore
+
+root = os.path.abspath(
+ os.path.join(os.path.dirname(__file__),
+ os.pardir,
+ os.pardir))
+
+
+def run(cmd, return_stdout=False, **kwargs):
+ print(" ".join(cmd))
+ if return_stdout:
+ f = subprocess.check_output
+ if "encoding" not in kwargs:
+ kwargs["encoding"] = "utf-8"
+ else:
+ f = subprocess.check_call
+ return f(cmd, **kwargs)
+
+
+def start(cmd):
+ print(" ".join(cmd))
+ subprocess.Popen(cmd)
+
+
+def get_parser():
+ p = argparse.ArgumentParser()
+ p.add_argument("--oom-killer",
+ action="store_true",
+ default=False,
+ help="Run userspace OOM killer")
+ p.add_argument("--hosts",
+ dest="hosts_file",
+ action="store_true",
+ default=True,
+ help="Setup wpt entries in hosts file")
+ p.add_argument("--no-hosts",
+ dest="hosts_file",
+ action="store_false",
+ help="Don't setup wpt entries in hosts file")
+ p.add_argument("--browser",
+ action="append",
+ default=[],
+ help="Browsers that will be used in the job")
+ p.add_argument("--channel",
+ default=None,
+ choices=["experimental", "dev", "nightly", "beta", "stable"],
+ help="Chrome browser channel")
+ p.add_argument("--xvfb",
+ action="store_true",
+ help="Start xvfb")
+ p.add_argument("--install-certificates", action="store_true", default=None,
+ help="Install web-platform.test certificates to UA store")
+ p.add_argument("--no-install-certificates", action="store_false", default=None,
+ help="Don't install web-platform.test certificates to UA store")
+ p.add_argument("--no-setup-repository", action="store_false", dest="setup_repository",
+ help="Don't run any repository setup steps, instead use the existing worktree. "
+ "This is useful for local testing.")
+ p.add_argument("--checkout",
+ help="Revision to checkout before starting job")
+ p.add_argument("--ref",
+ help="Git ref for the commit that should be run")
+ p.add_argument("--head-rev",
+ help="Commit at the head of the branch when the decision task ran")
+ p.add_argument("--merge-rev",
+ help="Provisional merge commit for PR when the decision task ran")
+ p.add_argument("script",
+ help="Script to run for the job")
+ p.add_argument("script_args",
+ nargs=argparse.REMAINDER,
+ help="Additional arguments to pass to the script")
+ return p
+
+
+def start_userspace_oom_killer():
+ # Start userspace OOM killer: https://github.com/rfjakob/earlyoom
+ # It will report memory usage every minute and prefer to kill browsers.
+ start(["sudo", "earlyoom", "-p", "-r", "60", "--prefer=(chrome|firefox)", "--avoid=python"])
+
+
+def make_hosts_file():
+ run(["sudo", "sh", "-c", "./wpt make-hosts-file >> /etc/hosts"])
+
+
+def checkout_revision(rev):
+ run(["git", "checkout", "--quiet", rev])
+
+
+def install_certificates():
+ run(["sudo", "cp", "tools/certs/cacert.pem",
+ "/usr/local/share/ca-certificates/cacert.crt"])
+ run(["sudo", "update-ca-certificates"])
+
+
+def install_chrome(channel):
+ if channel in ("experimental", "dev"):
+ deb_archive = "google-chrome-unstable_current_amd64.deb"
+ elif channel == "beta":
+ deb_archive = "google-chrome-beta_current_amd64.deb"
+ elif channel == "stable":
+ deb_archive = "google-chrome-stable_current_amd64.deb"
+ else:
+ raise ValueError("Unrecognized release channel: %s" % channel)
+
+ dest = os.path.join("/tmp", deb_archive)
+ deb_url = "https://dl.google.com/linux/direct/%s" % deb_archive
+ with open(dest, "wb") as f:
+ get_download_to_descriptor(f, deb_url)
+
+ run(["sudo", "apt-get", "-qqy", "update"])
+ run(["sudo", "gdebi", "-qn", "/tmp/%s" % deb_archive])
+
+
+def start_xvfb():
+ start(["sudo", "Xvfb", os.environ["DISPLAY"], "-screen", "0",
+ "%sx%sx%s" % (os.environ["SCREEN_WIDTH"],
+ os.environ["SCREEN_HEIGHT"],
+ os.environ["SCREEN_DEPTH"])])
+ start(["sudo", "fluxbox", "-display", os.environ["DISPLAY"]])
+
+
+def set_variables(event):
+ # Set some variables that we use to get the commits on the current branch
+ ref_prefix = "refs/heads/"
+ pull_request = "false"
+ branch = None
+ if "pull_request" in event:
+ pull_request = str(event["pull_request"]["number"])
+ # Note that this is the branch that a PR will merge to,
+ # not the branch name for the PR
+ branch = event["pull_request"]["base"]["ref"]
+ elif "ref" in event:
+ branch = event["ref"]
+ if branch.startswith(ref_prefix):
+ branch = branch[len(ref_prefix):]
+
+ os.environ["GITHUB_PULL_REQUEST"] = pull_request
+ if branch:
+ os.environ["GITHUB_BRANCH"] = branch
+
+
+def task_url(task_id):
+ root_url = os.environ['TASKCLUSTER_ROOT_URL']
+ if root_url == 'https://taskcluster.net':
+ queue_base = "https://queue.taskcluster.net/v1/task"
+ else:
+ queue_base = root_url + "/api/queue/v1/task"
+
+ return "%s/%s" % (queue_base, task_id)
+
+
+def download_artifacts(artifacts):
+ artifact_list_by_task = {}
+ for artifact in artifacts:
+ base_url = task_url(artifact["task"])
+ if artifact["task"] not in artifact_list_by_task:
+ with tempfile.TemporaryFile() as f:
+ get_download_to_descriptor(f, base_url + "/artifacts")
+ f.seek(0)
+ artifacts_data = json.load(f)
+ artifact_list_by_task[artifact["task"]] = artifacts_data
+
+ artifacts_data = artifact_list_by_task[artifact["task"]]
+ print("DEBUG: Got artifacts %s" % artifacts_data)
+ found = False
+ for candidate in artifacts_data["artifacts"]:
+ print("DEBUG: candidate: %s glob: %s" % (candidate["name"], artifact["glob"]))
+ if fnmatch.fnmatch(candidate["name"], artifact["glob"]):
+ found = True
+ print("INFO: Fetching aritfact %s from task %s" % (candidate["name"], artifact["task"]))
+ file_name = candidate["name"].rsplit("/", 1)[1]
+ url = base_url + "/artifacts/" + candidate["name"]
+ dest_path = os.path.expanduser(os.path.join("~", artifact["dest"], file_name))
+ dest_dir = os.path.dirname(dest_path)
+ if not os.path.exists(dest_dir):
+ os.makedirs(dest_dir)
+ with open(dest_path, "wb") as f:
+ get_download_to_descriptor(f, url)
+
+ if artifact.get("extract"):
+ unpack(dest_path)
+ if not found:
+ print("WARNING: No artifact found matching %s in task %s" % (artifact["glob"], artifact["task"]))
+
+
+def unpack(path):
+ dest = os.path.dirname(path)
+ if tarfile.is_tarfile(path):
+ run(["tar", "-xf", path], cwd=os.path.dirname(path))
+ elif zipfile.is_zipfile(path):
+ with zipfile.ZipFile(path) as archive:
+ archive.extractall(dest)
+ else:
+ print("ERROR: Don't know how to extract %s" % path)
+ raise Exception
+
+
+def setup_environment(args):
+ if "TASK_ARTIFACTS" in os.environ:
+ artifacts = json.loads(os.environ["TASK_ARTIFACTS"])
+ download_artifacts(artifacts)
+
+ if args.hosts_file:
+ make_hosts_file()
+
+ if args.install_certificates:
+ install_certificates()
+
+ if "chrome" in args.browser:
+ assert args.channel is not None
+ install_chrome(args.channel)
+
+ if args.xvfb:
+ start_xvfb()
+
+ if args.oom_killer:
+ start_userspace_oom_killer()
+
+
+def setup_repository(args):
+ is_pr = os.environ.get("GITHUB_PULL_REQUEST", "false") != "false"
+
+ # Initially task_head points at the same commit as the ref we want to test.
+ # However that may not be the same commit as we actually want to test if
+ # the branch changed since the decision task ran. The branch may have
+ # changed because someone has pushed more commits (either to the PR
+ # or later commits to the branch), or because someone has pushed to the
+ # base branch for the PR.
+ #
+ # In that case we take a different approach depending on whether this is a
+ # PR or a push to a branch.
+ # If this is a push to a branch, and the original commit is still fetchable,
+ # we try to fetch that (it may not be in the case of e.g. a force push).
+ # If it's not fetchable then we fail the run.
+ # For a PR we are testing the provisional merge commit. If that's changed it
+ # could be that the PR branch was updated or the base branch was updated. In the
+ # former case we fail the run because testing an old commit is a waste of
+ # resources. In the latter case we assume it's OK to use the current merge
+ # instead of the one at the time the decision task ran.
+
+ if args.ref:
+ if is_pr:
+ assert args.ref.endswith("/merge")
+ expected_head = args.merge_rev
+ else:
+ expected_head = args.head_rev
+
+ task_head = run(["git", "rev-parse", "task_head"], return_stdout=True).strip()
+
+ if task_head != expected_head:
+ if not is_pr:
+ try:
+ run(["git", "fetch", "origin", expected_head])
+ run(["git", "reset", "--hard", expected_head])
+ except subprocess.CalledProcessError:
+ print("CRITICAL: task_head points at %s, expected %s and "
+ "unable to fetch expected commit.\n"
+ "This may be because the branch was updated" % (task_head, expected_head))
+ sys.exit(1)
+ else:
+ # Convert the refs/pulls/<id>/merge to refs/pulls/<id>/head
+ head_ref = args.ref.rsplit("/", 1)[0] + "/head"
+ try:
+ remote_head = run(["git", "ls-remote", "origin", head_ref],
+ return_stdout=True).split("\t")[0]
+ except subprocess.CalledProcessError:
+ print("CRITICAL: Failed to read remote ref %s" % head_ref)
+ sys.exit(1)
+ if remote_head != args.head_rev:
+ print("CRITICAL: task_head points at %s, expected %s. "
+ "This may be because the branch was updated" % (task_head, expected_head))
+ sys.exit(1)
+ print("INFO: Merge commit changed from %s to %s due to base branch changes. "
+ "Running task anyway." % (expected_head, task_head))
+
+ if os.environ.get("GITHUB_PULL_REQUEST", "false") != "false":
+ parents = run(["git", "rev-parse", "task_head^@"],
+ return_stdout=True).strip().split()
+ if len(parents) == 2:
+ base_head = parents[0]
+ pr_head = parents[1]
+
+ run(["git", "branch", "base_head", base_head])
+ run(["git", "branch", "pr_head", pr_head])
+ else:
+ print("ERROR: Pull request HEAD wasn't a 2-parent merge commit; "
+ "expected to test the merge of PR into the base")
+ commit = run(["git", "rev-parse", "task_head"],
+ return_stdout=True).strip()
+ print("HEAD: %s" % commit)
+ print("Parents: %s" % ", ".join(parents))
+ sys.exit(1)
+
+ branch = os.environ.get("GITHUB_BRANCH")
+ if branch:
+ # Ensure that the remote base branch exists
+ # TODO: move this somewhere earlier in the task
+ run(["git", "fetch", "--quiet", "origin", "%s:%s" % (branch, branch)])
+
+ checkout_rev = args.checkout if args.checkout is not None else "task_head"
+ checkout_revision(checkout_rev)
+
+ refs = run(["git", "for-each-ref", "refs/heads"], return_stdout=True)
+ print("INFO: git refs:\n%s" % refs)
+ print("INFO: checked out commit:\n%s" % run(["git", "rev-parse", "HEAD"],
+ return_stdout=True))
+
+
+def fetch_event_data():
+ try:
+ task_id = os.environ["TASK_ID"]
+ except KeyError:
+ print("WARNING: Missing TASK_ID environment variable")
+ # For example under local testing
+ return None
+
+ with tempfile.TemporaryFile() as f:
+ get_download_to_descriptor(f, task_url(task_id))
+ f.seek(0)
+ task_data = json.load(f)
+ event_data = task_data.get("extra", {}).get("github_event")
+ if event_data is not None:
+ return json.loads(event_data)
+
+
+def include_job(job):
+ # Only for supporting pre decision-task PRs
+ # Special case things that unconditionally run on pushes,
+ # assuming a higher layer is filtering the required list of branches
+ if "GITHUB_PULL_REQUEST" not in os.environ:
+ return True
+
+ if (os.environ["GITHUB_PULL_REQUEST"] == "false" and
+ job == "run-all"):
+ return True
+
+ jobs_str = run([os.path.join(root, "wpt"),
+ "test-jobs"], return_stdout=True)
+ print(jobs_str)
+ return job in set(jobs_str.splitlines())
+
+
+def main():
+ args = get_parser().parse_args()
+
+ if "TASK_EVENT" in os.environ:
+ event = json.loads(os.environ["TASK_EVENT"])
+ else:
+ event = fetch_event_data()
+
+ if event:
+ set_variables(event)
+
+ if args.setup_repository:
+ setup_repository(args)
+
+ # Hack for backwards compatibility
+ if args.script in ["run-all", "lint", "update_built", "tools_unittest",
+ "wpt_integration", "resources_unittest",
+ "wptrunner_infrastructure", "stability", "affected_tests"]:
+ job = args.script
+ if not include_job(job):
+ return
+ args.script = args.script_args[0]
+ args.script_args = args.script_args[1:]
+
+ # Run the job
+ setup_environment(args)
+ os.chdir(root)
+ cmd = [args.script] + args.script_args
+ print(" ".join(cmd))
+ sys.exit(subprocess.call(cmd))
+
+
+if __name__ == "__main__":
+ main() # type: ignore
diff --git a/testing/web-platform/tests/tools/ci/taskcluster-run.py b/testing/web-platform/tests/tools/ci/taskcluster-run.py
new file mode 100755
index 0000000000..490c6f7554
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/taskcluster-run.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python3
+# mypy: allow-untyped-defs
+
+import argparse
+import gzip
+import logging
+import os
+import shutil
+import subprocess
+import sys
+
+
+def get_browser_args(product, channel):
+ if product == "firefox":
+ local_binary = os.path.expanduser(os.path.join("~", "build", "firefox", "firefox"))
+ if os.path.exists(local_binary):
+ return ["--binary=%s" % local_binary]
+ print("WARNING: Local firefox binary not found")
+ return ["--install-browser", "--install-webdriver"]
+ if product == "servo":
+ return ["--install-browser", "--processes=12"]
+ if product == "chrome" or product == "chromium":
+ # Taskcluster machines do not have GPUs, so use software rendering via --enable-swiftshader.
+ args = ["--enable-swiftshader"]
+ if channel == "nightly":
+ args.extend(["--install-browser", "--install-webdriver"])
+ return args
+ if product == "webkitgtk_minibrowser":
+ return ["--install-browser"]
+ return []
+
+
+def find_wptreport(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--log-wptreport', action='store')
+ return parser.parse_known_args(args)[0].log_wptreport
+
+
+def find_wptscreenshot(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--log-wptscreenshot', action='store')
+ return parser.parse_known_args(args)[0].log_wptscreenshot
+
+
+def gzip_file(filename, delete_original=True):
+ with open(filename, 'rb') as f_in:
+ with gzip.open('%s.gz' % filename, 'wb') as f_out:
+ shutil.copyfileobj(f_in, f_out)
+ if delete_original:
+ os.unlink(filename)
+
+
+def main(product, channel, commit_range, wpt_args):
+ """Invoke the `wpt run` command according to the needs of the Taskcluster
+ continuous integration service."""
+
+ logger = logging.getLogger("tc-run")
+ logger.setLevel(logging.INFO)
+ handler = logging.StreamHandler()
+ handler.setFormatter(
+ logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
+ )
+ logger.addHandler(handler)
+
+ subprocess.call(['python3', './wpt', 'manifest-download'])
+
+ if commit_range:
+ logger.info(
+ "Running tests affected in range '%s'..." % commit_range
+ )
+ wpt_args += ['--affected', commit_range]
+ else:
+ logger.info("Running all tests")
+
+ wpt_args += [
+ "--log-mach-level=info",
+ "--log-mach=-",
+ "-y",
+ "--no-pause",
+ "--no-restart-on-unexpected",
+ "--install-fonts",
+ "--no-headless",
+ "--verify-log-full"
+ ]
+ wpt_args += get_browser_args(product, channel)
+
+ # Hack to run servo with one process only for wdspec
+ if product == "servo" and "--test-type=wdspec" in wpt_args:
+ wpt_args = [item for item in wpt_args if not item.startswith("--processes")]
+
+ command = ["python3", "./wpt", "run"] + wpt_args + [product]
+
+ logger.info("Executing command: %s" % " ".join(command))
+ with open("/home/test/artifacts/checkrun.md", "a") as f:
+ f.write("\n**WPT Command:** `%s`\n\n" % " ".join(command))
+
+ retcode = subprocess.call(command, env=dict(os.environ, TERM="dumb"))
+ if retcode != 0:
+ sys.exit(retcode)
+
+ wptreport = find_wptreport(wpt_args)
+ if wptreport:
+ gzip_file(wptreport)
+ wptscreenshot = find_wptscreenshot(wpt_args)
+ if wptscreenshot:
+ gzip_file(wptscreenshot)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description=main.__doc__)
+ parser.add_argument("--commit-range", action="store",
+ help="""Git commit range. If specified, this will be
+ supplied to the `wpt tests-affected` command to
+ determine the list of test to execute""")
+ parser.add_argument("product", action="store",
+ help="Browser to run tests in")
+ parser.add_argument("channel", action="store",
+ help="Channel of the browser")
+ parser.add_argument("wpt_args", nargs="*",
+ help="Arguments to forward to `wpt run` command")
+ main(**vars(parser.parse_args())) # type: ignore
diff --git a/testing/web-platform/tests/tools/ci/tc/README.md b/testing/web-platform/tests/tools/ci/tc/README.md
new file mode 100644
index 0000000000..785c82cca3
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/README.md
@@ -0,0 +1,243 @@
+# Taskgraph Setup
+
+The taskgraph is built from a YAML file. This file has two top-level
+properties: `components` and `tasks`. The full list of tasks is
+defined by the `tasks` object; each task is an object with a single
+property representing the task with the corresponding value an object
+representing the task properties. Each task requires the following
+top-level properties:
+
+* `provisionerId`: String. Name of Taskcluster provisioner
+* `schedulerId`: String. Name of Taskcluster scheduler
+* `deadline`: String. Time until the task expires
+* `image`: String. Name of docker image to use for task
+* `maxRunTime`: Number. Maximum time in seconds for which the task can
+ run.
+* `artifacts`: Object. List of artifacts and directories to upload; see
+ Taskcluster documentation.
+* `command`: String. Command to run. This is automatically wrapped in a
+ run_tc command
+* `options`: Optional Object. Options to pass into run_tc
+ - xvfb: Boolean. Enable Xvfb for run
+ - oom-killer: Boolean. Enable xvfb for run
+ - hosts: Boolean. Update hosts file with wpt hosts before run
+ - install-certificates: Boolean. Install wpt certs into OS
+ certificate store for run
+ - browser: List. List of browser names for run
+ - channel: String. Browser channel for run
+* `trigger`: Object. Conditions on which to consider task. One or more
+ of following properties:
+ - branch: List. List of branch names on which to trigger.
+ - pull-request: No value. Trigger for pull request actions
+* `schedule-if`: Optional Object. Conditions on which task should be
+ scheduled given it meets the trigger conditions.
+ - `run-job`: List. Job names for which this task should be considered,
+ matching the output from `./wpt test-jobs`
+* `env`: Optional Object. Environment variables to set when running task.
+* `depends-on`: Optional list. List of task names that must be complete
+ before the current task is scheduled.
+* `description`: String. Task description.
+* `name`: Optional String. Name to use for the task overriding the
+ property name. This is useful in combination with substitutions
+ described below.
+* `download-artifacts`: Optional Object. An artifact to download from
+ a task that this task depends on. This has the following properties:
+ - `task` - Name of the task producing the artifact
+ - `glob` - A glob pattern for the filename of the artifact
+ - `dest` - A directory reltive to the home directory in which to place
+ the artifact
+ - `extract` - Optional. A boolean indicating whether an archive artifact
+ should be extracted in-place.
+
+## Task Expansions
+
+Using the above syntax it's possble to describe each task
+directly. But typically in a taskgraph there are many common
+properties between tasks so it's tedious and error prone to repeat
+information that's common to multiple tasks. Therefore the taskgraph
+format provides several mechanisms to reuse partial task definitions
+across multiple tasks.
+
+### Components
+
+The other top-level property in the taskgraph format is
+`components`. The value of this property is an object containing named
+partial task definitions. Each task definition may contain a property called
+`use` which is a list of components to use as the basis for the task
+definition. The components list is evaluated in order. If a property
+is not previously defined in the output it is added to the output. If
+it was previously defined, the value is updated according to the type:
+ * Strings and numbers are replaced with a new value
+ * Lists are extended with the additional values
+ * Objects are updated recursively following the above rules
+This means that types must always match between components and the
+final value.
+
+For example
+```
+components:
+ example-1:
+ list_prop:
+ - first
+ - second
+ object_prop:
+ key1: value1
+ key2: base_value
+ example-2:
+ list_prop:
+ - third
+ - fourth
+ object_prop:
+ key3:
+ - value3-1
+
+tasks:
+ - example-task:
+ use:
+ - example-1
+ - example-2
+ object_prop:
+ key2: value2
+ key3:
+ - value3-2
+```
+
+will evaluate to the following task:
+
+```
+example-task:
+ list_prop:
+ - first
+ - second
+ - third
+ - fourth
+ object_prop:
+ key1: value1
+ key2: value2
+ key3:
+ - value3-1
+ - value3-2
+```
+
+Note that components cannot currently define `use` properties of their own.
+
+## Substitutions
+
+Components and tasks can define a property `vars` that holds variables
+which are later substituted into the task definition using the syntax
+`${vars.property-name}`. For example:
+
+```
+components:
+ generic-component:
+ prop: ${vars.value}
+
+tasks:
+ - first:
+ use:
+ - generic-component
+ vars:
+ value: value1
+ - second:
+ use:
+ - generic-component
+ vars:
+ value: value2
+```
+
+Results in the following tasks:
+
+```
+first:
+ prop: value1
+second:
+ prop: value2
+```
+
+## Maps
+
+Instead of defining a task directly, an item in the tasks property may
+be an object with a single property `$map`. This object itself has two
+child properties; `for` and `do`. The value of `for` is a list of
+objects, and the value of `do` is either an object or a list of
+objects. For each object in the `for` property, a set of tasks is
+created by taking a copy of that object for each task in the `do`
+property, updating the object with the properties from the
+corresponding `do` object, using the same rules as for components
+above, and then processing as for a normal task. `$map` rules can also
+be nested.
+
+Note: Although `$map` shares a name with the `$map` used in json-e
+(used. in `.taskcluster.yml`), the semantics are different.
+
+For example
+
+```
+components: {}
+tasks:
+ $map:
+ for:
+ - vars:
+ example: value1
+ - vars:
+ example: value2
+ do:
+ example-${vars.example}
+ prop: ${vars.example}
+```
+
+Results in the tasks
+
+```
+example-value1:
+ prop: value1
+example-value2:
+ prop: value2
+```
+
+Note that in combination with `$map`, variable substitutions are
+applied *twice*; once after the `$map` is evaluated and once after the
+`use` statements are evaluated.
+
+## Chunks
+
+A common requirements for tasks is that they are "chunked" into N
+partial tasks. This is handled specially in the syntax. A top level
+property `chunks` can be used to define the number of individual
+chunks to create for a specific task. Each chunked task is created
+with a `chunks` property set to an object containing an `id` property
+containing the one-based index of the chunk an a `total` property
+containing the total number of chunks. These can be substituted into
+the task definition using the same syntax as for `vars` above
+e.g. `${chunks.id}`. Note that because task names must be unique, it's
+common to specify a `name` property on the task that will override the
+property name e.g.
+
+```
+components: {}
+tasks:
+ - chunked-task:
+ chunks:2
+ command: "task-run --chunk=${chunks.id} --totalChunks=${chunks.total}"
+ name: task-chunk-${chunks.id}
+```
+
+creates tasks:
+
+```
+task-chunk-1:
+ command: "task-run --chunk=1 --totalChunks=2"
+task-chunk-2:
+ command: "task-run --chunk=2 --totalChunks=2"
+```
+
+# Overall processing model
+
+The overall processing model for tasks is as follows:
+ * Evaluate maps
+ * Perform subsitutions
+ * Evaluate use statements
+ * Expand chunks
+ * Perform subsitutions
+
+At each point after maps are evaluated tasks must have a unique name.
diff --git a/testing/web-platform/tests/tools/ci/tc/__init__.py b/testing/web-platform/tests/tools/ci/tc/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/__init__.py
diff --git a/testing/web-platform/tests/tools/ci/tc/decision.py b/testing/web-platform/tests/tools/ci/tc/decision.py
new file mode 100644
index 0000000000..0a6d03ab6c
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/decision.py
@@ -0,0 +1,404 @@
+# mypy: allow-untyped-defs
+
+import argparse
+import json
+import logging
+import os
+import re
+import subprocess
+from collections import OrderedDict
+
+import taskcluster
+
+from . import taskgraph
+
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+logging.basicConfig()
+logger = logging.getLogger()
+
+
+def get_triggers(event):
+ # Set some variables that we use to get the commits on the current branch
+ ref_prefix = "refs/heads/"
+ is_pr = "pull_request" in event
+ branch = None
+ if not is_pr and "ref" in event:
+ branch = event["ref"]
+ if branch.startswith(ref_prefix):
+ branch = branch[len(ref_prefix):]
+
+ return is_pr, branch
+
+
+def fetch_event_data(queue):
+ try:
+ task_id = os.environ["TASK_ID"]
+ except KeyError:
+ logger.warning("Missing TASK_ID environment variable")
+ # For example under local testing
+ return None
+
+ task_data = queue.task(task_id)
+
+ return task_data.get("extra", {}).get("github_event")
+
+
+def filter_triggers(event, all_tasks):
+ is_pr, branch = get_triggers(event)
+ triggered = OrderedDict()
+ for name, task in all_tasks.items():
+ if "trigger" in task:
+ if is_pr and "pull-request" in task["trigger"]:
+ triggered[name] = task
+ elif branch is not None and "branch" in task["trigger"]:
+ for trigger_branch in task["trigger"]["branch"]:
+ if (trigger_branch == branch or
+ trigger_branch.endswith("*") and branch.startswith(trigger_branch[:-1])):
+ triggered[name] = task
+ logger.info("Triggers match tasks:\n * %s" % "\n * ".join(triggered.keys()))
+ return triggered
+
+
+def get_run_jobs(event):
+ from tools.ci import jobs
+ revish = "%s..%s" % (event["pull_request"]["base"]["sha"]
+ if "pull_request" in event
+ else event["before"],
+ event["pull_request"]["head"]["sha"]
+ if "pull_request" in event
+ else event["after"])
+ logger.info("Looking for changes in range %s" % revish)
+ paths = jobs.get_paths(revish=revish)
+ logger.info("Found changes in paths:%s" % "\n".join(paths))
+ path_jobs = jobs.get_jobs(paths)
+ all_jobs = path_jobs | get_extra_jobs(event)
+ logger.info("Including jobs:\n * %s" % "\n * ".join(all_jobs))
+ return all_jobs
+
+
+def get_extra_jobs(event):
+ body = None
+ jobs = set()
+ if "commits" in event and event["commits"]:
+ body = event["commits"][0]["message"]
+ elif "pull_request" in event:
+ body = event["pull_request"]["body"]
+
+ if not body:
+ return jobs
+
+ regexp = re.compile(r"\s*tc-jobs:(.*)$")
+
+ for line in body.splitlines():
+ m = regexp.match(line)
+ if m:
+ items = m.group(1)
+ for item in items.split(","):
+ jobs.add(item.strip())
+ break
+ return jobs
+
+
+def filter_excluded_users(tasks, event):
+ # Some users' pull requests are excluded from tasks,
+ # such as pull requests from automated exports.
+ try:
+ submitter = event["pull_request"]["user"]["login"]
+ except KeyError:
+ # Just ignore excluded users if the
+ # username cannot be pulled from the event.
+ logger.debug("Unable to read username from event. Continuing.")
+ return
+
+ excluded_tasks = []
+ # A separate list of items for tasks is needed to iterate over
+ # because removing an item during iteration will raise an error.
+ for name, task in list(tasks.items()):
+ if submitter in task.get("exclude-users", []):
+ excluded_tasks.append(name)
+ tasks.pop(name) # removing excluded task
+ if excluded_tasks:
+ logger.info(
+ f"Tasks excluded for user {submitter}:\n * " +
+ "\n * ".join(excluded_tasks)
+ )
+
+
+def filter_schedule_if(event, tasks):
+ scheduled = OrderedDict()
+ run_jobs = None
+ for name, task in tasks.items():
+ if "schedule-if" in task:
+ if "run-job" in task["schedule-if"]:
+ if run_jobs is None:
+ run_jobs = get_run_jobs(event)
+ if "all" in run_jobs or any(item in run_jobs for item in task["schedule-if"]["run-job"]):
+ scheduled[name] = task
+ else:
+ scheduled[name] = task
+ logger.info("Scheduling rules match tasks:\n * %s" % "\n * ".join(scheduled.keys()))
+ return scheduled
+
+
+def get_fetch_rev(event):
+ is_pr, _ = get_triggers(event)
+ if is_pr:
+ # Try to get the actual rev so that all non-decision tasks are pinned to that
+ rv = ["refs/pull/%s/merge" % event["pull_request"]["number"]]
+ # For every PR GitHub maintains a 'head' branch with commits from the
+ # PR, and a 'merge' branch containing a merge commit between the base
+ # branch and the PR.
+ for ref_type in ["head", "merge"]:
+ ref = "refs/pull/%s/%s" % (event["pull_request"]["number"], ref_type)
+ sha = None
+ try:
+ output = subprocess.check_output(["git", "ls-remote", "origin", ref])
+ except subprocess.CalledProcessError:
+ import traceback
+ logger.error(traceback.format_exc())
+ logger.error("Failed to get commit sha1 for %s" % ref)
+ else:
+ if not output:
+ logger.error("Failed to get commit for %s" % ref)
+ else:
+ sha = output.decode("utf-8").split()[0]
+ rv.append(sha)
+ rv = tuple(rv)
+ else:
+ # For a branch push we have a ref and a head but no merge SHA
+ rv = (event["ref"], event["after"], None)
+ assert len(rv) == 3
+ return rv
+
+
+def build_full_command(event, task):
+ fetch_ref, head_sha, merge_sha = get_fetch_rev(event)
+ cmd_args = {
+ "task_name": task["name"],
+ "repo_url": event["repository"]["clone_url"],
+ "fetch_ref": fetch_ref,
+ "task_cmd": task["command"],
+ "install_str": "",
+ }
+
+ options = task.get("options", {})
+ options_args = []
+ options_args.append("--ref=%s" % fetch_ref)
+ if head_sha is not None:
+ options_args.append("--head-rev=%s" % head_sha)
+ if merge_sha is not None:
+ options_args.append("--merge-rev=%s" % merge_sha)
+ if options.get("oom-killer"):
+ options_args.append("--oom-killer")
+ if options.get("xvfb"):
+ options_args.append("--xvfb")
+ if not options.get("hosts"):
+ options_args.append("--no-hosts")
+ else:
+ options_args.append("--hosts")
+ # Check out the expected SHA unless it is overridden (e.g. to base_head).
+ if options.get("checkout"):
+ options_args.append("--checkout=%s" % options["checkout"])
+ for browser in options.get("browser", []):
+ options_args.append("--browser=%s" % browser)
+ if options.get("channel"):
+ options_args.append("--channel=%s" % options["channel"])
+ if options.get("install-certificates"):
+ options_args.append("--install-certificates")
+
+ cmd_args["options_str"] = " ".join(str(item) for item in options_args)
+
+ install_packages = task.get("install")
+ if install_packages:
+ install_items = ["apt update -qqy"]
+ install_items.extend("apt install -qqy %s" % item
+ for item in install_packages)
+ cmd_args["install_str"] = "\n".join("sudo %s;" % item for item in install_items)
+
+ return ["/bin/bash",
+ "--login",
+ "-xc",
+ """
+~/start.sh \
+ %(repo_url)s \
+ %(fetch_ref)s;
+%(install_str)s
+cd web-platform-tests;
+./tools/ci/run_tc.py %(options_str)s -- %(task_cmd)s;
+""" % cmd_args]
+
+
+def get_owner(event):
+ if "pusher" in event:
+ pusher = event.get("pusher", {}).get("email", "")
+ if pusher and "@" in pusher:
+ return pusher
+ return "web-platform-tests@users.noreply.github.com"
+
+
+def create_tc_task(event, task, taskgroup_id, depends_on_ids, env_extra=None):
+ command = build_full_command(event, task)
+ task_id = taskcluster.slugId()
+ task_data = {
+ "taskGroupId": taskgroup_id,
+ "created": taskcluster.fromNowJSON(""),
+ "deadline": taskcluster.fromNowJSON(task["deadline"]),
+ "provisionerId": task["provisionerId"],
+ "schedulerId": task["schedulerId"],
+ "workerType": task["workerType"],
+ "metadata": {
+ "name": task["name"],
+ "description": task.get("description", ""),
+ "owner": get_owner(event),
+ "source": event["repository"]["clone_url"]
+ },
+ "payload": {
+ "artifacts": task.get("artifacts"),
+ "command": command,
+ "image": task.get("image"),
+ "maxRunTime": task.get("maxRunTime"),
+ "env": task.get("env", {}),
+ },
+ "extra": {
+ "github_event": json.dumps(event)
+ },
+ "routes": ["checks"]
+ }
+ if "extra" in task:
+ task_data["extra"].update(task["extra"])
+ if env_extra:
+ task_data["payload"]["env"].update(env_extra)
+ if depends_on_ids:
+ task_data["dependencies"] = depends_on_ids
+ task_data["requires"] = task.get("requires", "all-completed")
+ return task_id, task_data
+
+
+def get_artifact_data(artifact, task_id_map):
+ task_id, data = task_id_map[artifact["task"]]
+ return {
+ "task": task_id,
+ "glob": artifact["glob"],
+ "dest": artifact["dest"],
+ "extract": artifact.get("extract", False)
+ }
+
+
+def build_task_graph(event, all_tasks, tasks):
+ task_id_map = OrderedDict()
+ taskgroup_id = os.environ.get("TASK_ID", taskcluster.slugId())
+
+ def add_task(task_name, task):
+ depends_on_ids = []
+ if "depends-on" in task:
+ for depends_name in task["depends-on"]:
+ if depends_name not in task_id_map:
+ add_task(depends_name,
+ all_tasks[depends_name])
+ depends_on_ids.append(task_id_map[depends_name][0])
+ env_extra = {}
+ if "download-artifacts" in task:
+ env_extra["TASK_ARTIFACTS"] = json.dumps(
+ [get_artifact_data(artifact, task_id_map)
+ for artifact in task["download-artifacts"]])
+
+ task_id, task_data = create_tc_task(event, task, taskgroup_id, depends_on_ids,
+ env_extra=env_extra)
+ task_id_map[task_name] = (task_id, task_data)
+
+ for task_name, task in tasks.items():
+ if task_name == "sink-task":
+ # sink-task will be created below at the end of the ordered dict,
+ # so that it can depend on all other tasks.
+ continue
+ add_task(task_name, task)
+
+ # GitHub branch protection for pull requests needs us to name explicit
+ # required tasks - which doesn't suffice when using a dynamic task graph.
+ # To work around this we declare a sink task that depends on all the other
+ # tasks completing, and checks if they have succeeded. We can then
+ # make the sink task the sole required task for pull requests.
+ sink_task = tasks.get("sink-task")
+ if sink_task:
+ logger.info("Scheduling sink-task")
+ depends_on_ids = [x[0] for x in task_id_map.values()]
+ sink_task["command"] += " {}".format(" ".join(depends_on_ids))
+ task_id_map["sink-task"] = create_tc_task(
+ event, sink_task, taskgroup_id, depends_on_ids)
+ else:
+ logger.info("sink-task is not scheduled")
+
+ return task_id_map
+
+
+def create_tasks(queue, task_id_map):
+ for (task_id, task_data) in task_id_map.values():
+ queue.createTask(task_id, task_data)
+
+
+def get_event(queue, event_path):
+ if event_path is not None:
+ try:
+ with open(event_path) as f:
+ event_str = f.read()
+ except OSError:
+ logger.error("Missing event file at path %s" % event_path)
+ raise
+ elif "TASK_EVENT" in os.environ:
+ event_str = os.environ["TASK_EVENT"]
+ else:
+ event_str = fetch_event_data(queue)
+ if not event_str:
+ raise ValueError("Can't find GitHub event definition; for local testing pass --event-path")
+ try:
+ return json.loads(event_str)
+ except ValueError:
+ logger.error("Event was not valid JSON")
+ raise
+
+
+def decide(event):
+ all_tasks = taskgraph.load_tasks_from_path(os.path.join(here, "tasks", "test.yml"))
+
+ triggered_tasks = filter_triggers(event, all_tasks)
+ scheduled_tasks = filter_schedule_if(event, triggered_tasks)
+ filter_excluded_users(scheduled_tasks, event)
+
+ logger.info("UNSCHEDULED TASKS:\n %s" % "\n ".join(sorted(set(all_tasks.keys()) -
+ set(scheduled_tasks.keys()))))
+ logger.info("SCHEDULED TASKS:\n %s" % "\n ".join(sorted(scheduled_tasks.keys())))
+
+ task_id_map = build_task_graph(event, all_tasks, scheduled_tasks)
+ return task_id_map
+
+
+def get_parser():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--event-path",
+ help="Path to file containing serialized GitHub event")
+ parser.add_argument("--dry-run", action="store_true",
+ help="Don't actually create the tasks, just output the tasks that "
+ "would be created")
+ parser.add_argument("--tasks-path",
+ help="Path to file in which to write payload for all scheduled tasks")
+ return parser
+
+
+def run(venv, **kwargs):
+ queue = taskcluster.Queue({'rootUrl': os.environ['TASKCLUSTER_PROXY_URL']})
+ event = get_event(queue, event_path=kwargs["event_path"])
+
+ task_id_map = decide(event)
+
+ try:
+ if not kwargs["dry_run"]:
+ create_tasks(queue, task_id_map)
+ else:
+ print(json.dumps(task_id_map, indent=2))
+ finally:
+ if kwargs["tasks_path"]:
+ with open(kwargs["tasks_path"], "w") as f:
+ json.dump(task_id_map, f, indent=2)
diff --git a/testing/web-platform/tests/tools/ci/tc/download.py b/testing/web-platform/tests/tools/ci/tc/download.py
new file mode 100644
index 0000000000..6a78935be4
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/download.py
@@ -0,0 +1,111 @@
+# mypy: allow-untyped-defs
+
+import argparse
+import os
+import logging
+
+import requests
+
+import github
+
+
+logging.basicConfig()
+logger = logging.getLogger("tc-download")
+
+# The root URL of the Taskcluster deployment from which to download wpt reports
+# (after https://bugzilla.mozilla.org/show_bug.cgi?id=1574668 lands, this will
+# be https://community-tc.services.mozilla.com)
+TASKCLUSTER_ROOT_URL = 'https://taskcluster.net'
+
+
+def get_parser():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--ref", action="store", default="master",
+ help="Branch (in the GitHub repository) or commit to fetch logs for")
+ parser.add_argument("--artifact-name", action="store", default="wpt_report.json.gz",
+ help="Log type to fetch")
+ parser.add_argument("--repo-name", action="store", default="web-platform-tests/wpt",
+ help="GitHub repo name in the format owner/repo. "
+ "This must be the repo from which the Taskcluster run was scheduled "
+ "(for PRs this is the repo into which the PR would merge)")
+ parser.add_argument("--token-file", action="store",
+ help="File containing GitHub token")
+ parser.add_argument("--out-dir", action="store", default=".",
+ help="Path to save the logfiles")
+ return parser
+
+
+def get_json(url, key=None):
+ resp = requests.get(url)
+ resp.raise_for_status()
+ data = resp.json()
+ if key:
+ data = data[key]
+ return data
+
+
+def get(url, dest, name):
+ resp = requests.get(url)
+ resp.raise_for_status()
+ path = os.path.join(dest, name)
+ with open(path, "w") as f:
+ f.write(resp.content)
+ return path
+
+
+def run(*args, **kwargs):
+ if not os.path.exists(kwargs["out_dir"]):
+ os.mkdir(kwargs["out_dir"])
+
+ if kwargs["token_file"]:
+ with open(kwargs["token_file"]) as f:
+ gh = github.Github(f.read().strip())
+ else:
+ gh = github.Github()
+
+ repo = gh.get_repo(kwargs["repo_name"])
+ commit = repo.get_commit(kwargs["ref"])
+ statuses = commit.get_statuses()
+ taskgroups = set()
+
+ for status in statuses:
+ if not status.context.startswith("Taskcluster "):
+ continue
+ if status.state == "pending":
+ continue
+ taskgroup_id = status.target_url.rsplit("/", 1)[1]
+ taskgroups.add(taskgroup_id)
+
+ if not taskgroups:
+ logger.error("No complete Taskcluster runs found for ref %s" % kwargs["ref"])
+ return 1
+
+ for taskgroup in taskgroups:
+ if TASKCLUSTER_ROOT_URL == 'https://taskcluster.net':
+ # NOTE: this condition can be removed after November 9, 2019
+ taskgroup_url = "https://queue.taskcluster.net/v1/task-group/%s/list"
+ artifacts_list_url = "https://queue.taskcluster.net/v1/task/%s/artifacts"
+ else:
+ taskgroup_url = TASKCLUSTER_ROOT_URL + "/api/queue/v1/task-group/%s/list"
+ artifacts_list_url = TASKCLUSTER_ROOT_URL + "/api/queue/v1/task/%s/artifacts"
+ tasks = get_json(taskgroup_url % taskgroup, "tasks")
+ for task in tasks:
+ task_id = task["status"]["taskId"]
+ url = artifacts_list_url % (task_id,)
+ for artifact in get_json(url, "artifacts"):
+ if artifact["name"].endswith(kwargs["artifact_name"]):
+ filename = "%s-%s-%s" % (task["task"]["metadata"]["name"],
+ task_id,
+ kwargs["artifact_name"])
+ path = get("%s/%s" % (url, artifact["name"]), kwargs["out_dir"], filename)
+ logger.info(path)
+
+
+def main():
+ kwargs = get_parser().parse_args()
+
+ run(None, vars(kwargs))
+
+
+if __name__ == "__main__":
+ main() # type: ignore
diff --git a/testing/web-platform/tests/tools/ci/tc/github_checks_output.py b/testing/web-platform/tests/tools/ci/tc/github_checks_output.py
new file mode 100644
index 0000000000..e982ca33d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/github_checks_output.py
@@ -0,0 +1,40 @@
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Optional, Text
+
+
+class GitHubChecksOutputter:
+ """Provides a method to output data to be shown in the GitHub Checks UI.
+
+ This can be useful to provide a summary of a given check (e.g. the lint)
+ to enable developers to quickly understand what has gone wrong. The output
+ supports markdown format.
+
+ https://docs.taskcluster.net/docs/reference/integrations/github/checks#custom-text-output-in-checks
+ """
+ def __init__(self, path):
+ # type: (Text) -> None
+ self.path = path
+
+ def output(self, line):
+ # type: (Text) -> None
+ with open(self.path, mode="a") as f:
+ f.write(line)
+ f.write("\n")
+
+
+__outputter = None
+
+
+def get_gh_checks_outputter(filepath):
+ # type: (Optional[Text]) -> Optional[GitHubChecksOutputter]
+ """Return the outputter for GitHub Checks output, if enabled.
+
+ :param filepath: The filepath to write GitHub Check output information to,
+ or None if not enabled.
+ """
+ global __outputter
+ if filepath and __outputter is None:
+ __outputter = GitHubChecksOutputter(filepath)
+ return __outputter
diff --git a/testing/web-platform/tests/tools/ci/tc/sink_task.py b/testing/web-platform/tests/tools/ci/tc/sink_task.py
new file mode 100644
index 0000000000..ec3d5a47ca
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/sink_task.py
@@ -0,0 +1,65 @@
+# mypy: allow-untyped-defs
+
+import argparse
+import logging
+import os
+
+import taskcluster
+
+from .github_checks_output import get_gh_checks_outputter
+
+
+logging.basicConfig()
+logger = logging.getLogger()
+
+
+def check_task_statuses(task_ids, github_checks_outputter):
+ """Verifies whether a set of Taskcluster tasks completed successfully or not.
+
+ Returns 0 if all tasks passed completed successfully, 1 otherwise."""
+
+ queue = taskcluster.Queue({'rootUrl': os.environ['TASKCLUSTER_ROOT_URL']})
+ failed_tasks = []
+ for task in task_ids:
+ status = queue.status(task)
+ state = status['status']['state']
+ if state == 'failed' or state == 'exception':
+ logger.error(f'Task {task} failed with state "{state}"')
+ failed_tasks.append(status)
+ elif state != 'completed':
+ logger.error(f'Task {task} had unexpected state "{state}"')
+ failed_tasks.append(status)
+
+ if failed_tasks and github_checks_outputter:
+ github_checks_outputter.output('Failed tasks:')
+ for task in failed_tasks:
+ # We need to make an additional call to get the task name.
+ task_id = task['status']['taskId']
+ task_name = queue.task(task_id)['metadata']['name']
+ github_checks_outputter.output('* `{}` failed with status `{}`'.format(task_name, task['status']['state']))
+ else:
+ logger.info('All tasks completed successfully')
+ if github_checks_outputter:
+ github_checks_outputter.output('All tasks completed successfully')
+ return 1 if failed_tasks else 0
+
+
+def get_parser():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--github-checks-text-file", type=str,
+ help="Path to GitHub checks output file for Taskcluster runs")
+ parser.add_argument("tasks", nargs="+",
+ help="A set of Taskcluster task ids to verify the state of.")
+ return parser
+
+
+def run(venv, **kwargs):
+ github_checks_outputter = get_gh_checks_outputter(kwargs["github_checks_text_file"])
+
+ if github_checks_outputter:
+ github_checks_outputter.output(
+ "This check acts as a 'sink' for all other Taskcluster-based checks. "
+ "A failure here means that some other check has failed, which is the "
+ "real blocker.\n"
+ )
+ return check_task_statuses(kwargs['tasks'], github_checks_outputter)
diff --git a/testing/web-platform/tests/tools/ci/tc/taskgraph.py b/testing/web-platform/tests/tools/ci/tc/taskgraph.py
new file mode 100644
index 0000000000..d270dad460
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/taskgraph.py
@@ -0,0 +1,171 @@
+# mypy: allow-untyped-defs
+
+import json
+import os
+import re
+from collections import OrderedDict
+from copy import deepcopy
+
+import yaml
+
+here = os.path.dirname(__file__)
+
+
+def first(iterable):
+ # First item from a list or iterator
+ if not hasattr(iterable, "next"):
+ if hasattr(iterable, "__iter__"):
+ iterable = iter(iterable)
+ else:
+ raise ValueError("Object isn't iterable")
+ return next(iterable)
+
+
+def load_task_file(path):
+ with open(path) as f:
+ return yaml.safe_load(f)
+
+
+def update_recursive(data, update_data):
+ for key, value in update_data.items():
+ if key not in data:
+ data[key] = value
+ else:
+ initial_value = data[key]
+ if isinstance(value, dict):
+ if not isinstance(initial_value, dict):
+ raise ValueError("Variable %s has inconsistent types "
+ "(expected object)" % key)
+ update_recursive(initial_value, value)
+ elif isinstance(value, list):
+ if not isinstance(initial_value, list):
+ raise ValueError("Variable %s has inconsistent types "
+ "(expected list)" % key)
+ initial_value.extend(value)
+ else:
+ data[key] = value
+
+
+def resolve_use(task_data, templates):
+ rv = {}
+ if "use" in task_data:
+ for template_name in task_data["use"]:
+ update_recursive(rv, deepcopy(templates[template_name]))
+ update_recursive(rv, task_data)
+ rv.pop("use", None)
+ return rv
+
+
+def resolve_name(task_data, default_name):
+ if "name" not in task_data:
+ task_data["name"] = default_name
+ return task_data
+
+
+def resolve_chunks(task_data):
+ if "chunks" not in task_data:
+ return [task_data]
+ rv = []
+ total_chunks = task_data["chunks"]
+ for i in range(1, total_chunks + 1):
+ chunk_data = deepcopy(task_data)
+ chunk_data["chunks"] = {"id": i,
+ "total": total_chunks}
+ rv.append(chunk_data)
+ return rv
+
+
+def replace_vars(input_string, variables):
+ # TODO: support replacing as a non-string type?
+ variable_re = re.compile(r"(?<!\\)\${([^}]+)}")
+
+ def replacer(m):
+ var = m.group(1).split(".")
+ repl = variables
+ for part in var:
+ try:
+ repl = repl[part]
+ except Exception:
+ # Don't substitute
+ return m.group(0)
+ return str(repl)
+
+ return variable_re.sub(replacer, input_string)
+
+
+def sub_variables(data, variables):
+ if isinstance(data, str):
+ return replace_vars(data, variables)
+ if isinstance(data, list):
+ return [sub_variables(item, variables) for item in data]
+ if isinstance(data, dict):
+ return {key: sub_variables(value, variables)
+ for key, value in data.items()}
+ return data
+
+
+def substitute_variables(task):
+ variables = {"vars": task.get("vars", {}),
+ "chunks": task.get("chunks", {})}
+
+ return sub_variables(task, variables)
+
+
+def expand_maps(task):
+ name = first(task.keys())
+ if name != "$map":
+ return [task]
+
+ map_data = task["$map"]
+ if set(map_data.keys()) != {"for", "do"}:
+ raise ValueError("$map objects must have exactly two properties named 'for' "
+ "and 'do' (got %s)" % ("no properties" if not map_data.keys()
+ else ", ". join(map_data.keys())))
+ rv = []
+ for for_data in map_data["for"]:
+ do_items = map_data["do"]
+ if not isinstance(do_items, list):
+ do_items = expand_maps(do_items)
+ for do_data in do_items:
+ task_data = deepcopy(for_data)
+ if len(do_data.keys()) != 1:
+ raise ValueError("Each item in the 'do' list must be an object "
+ "with a single property")
+ name = first(do_data.keys())
+ update_recursive(task_data, deepcopy(do_data[name]))
+ rv.append({name: task_data})
+ return rv
+
+
+def load_tasks(tasks_data):
+ map_resolved_tasks = OrderedDict()
+ tasks = []
+
+ for task in tasks_data["tasks"]:
+ if len(task.keys()) != 1:
+ raise ValueError("Each task must be an object with a single property")
+ for task in expand_maps(task):
+ if len(task.keys()) != 1:
+ raise ValueError("Each task must be an object with a single property")
+ name = first(task.keys())
+ data = task[name]
+ new_name = sub_variables(name, {"vars": data.get("vars", {})})
+ if new_name in map_resolved_tasks:
+ raise ValueError("Got duplicate task name %s" % new_name)
+ map_resolved_tasks[new_name] = substitute_variables(data)
+
+ for task_default_name, data in map_resolved_tasks.items():
+ task = resolve_use(data, tasks_data["components"])
+ task = resolve_name(task, task_default_name)
+ tasks.extend(resolve_chunks(task))
+
+ tasks = [substitute_variables(task_data) for task_data in tasks]
+ return OrderedDict([(t["name"], t) for t in tasks])
+
+
+def load_tasks_from_path(path):
+ return load_tasks(load_task_file(path))
+
+
+def run(venv, **kwargs):
+ print(json.dumps(load_tasks_from_path(os.path.join(here, "tasks", "test.yml")), indent=2))
diff --git a/testing/web-platform/tests/tools/ci/tc/tasks/test.yml b/testing/web-platform/tests/tools/ci/tc/tasks/test.yml
new file mode 100644
index 0000000000..fd0fda9c99
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/tasks/test.yml
@@ -0,0 +1,558 @@
+components:
+ wpt-base:
+ provisionerId: proj-wpt
+ workerType: ci
+ schedulerId: taskcluster-github
+ deadline: "24 hours"
+ image: webplatformtests/wpt:0.52
+ maxRunTime: 7200
+ artifacts:
+ public/results:
+ path: /home/test/artifacts
+ type: directory
+ extra:
+ github:
+ customCheckRun:
+ textArtifactName: public/results/checkrun.md
+
+ wpt-testharness:
+ chunks: 16
+ maxRunTime: 10800
+ vars:
+ test-type: testharness
+
+ wpt-reftest:
+ chunks: 5
+ vars:
+ test-type: reftest
+
+ wpt-print-reftest:
+ chunks: 1
+ vars:
+ test-type: print-reftest
+
+ wpt-wdspec:
+ chunks: 2
+ vars:
+ test-type: wdspec
+
+ wpt-crashtest:
+ chunks: 1
+ vars:
+ test-type: crashtest
+
+ run-options:
+ options:
+ xvfb: true
+ oom-killer: true
+ hosts: true
+ install-certificates: true
+
+ wpt-run:
+ name: wpt-${vars.browser}-${vars.channel}-${vars.suite}-${chunks.id}
+ options:
+ browser:
+ - ${vars.browser}
+ channel: ${vars.channel}
+ command: >-
+ ./tools/ci/taskcluster-run.py
+ ${vars.browser}
+ ${vars.channel}
+ --
+ --channel=${vars.channel}
+ --log-wptreport=../artifacts/wpt_report.json
+ --log-wptscreenshot=../artifacts/wpt_screenshot.txt
+ --no-fail-on-unexpected
+ --this-chunk=${chunks.id}
+ --total-chunks=${chunks.total}
+ --test-type=${vars.suite}
+
+ trigger-master:
+ trigger:
+ branch:
+ - master
+
+ trigger-push:
+ trigger:
+ branch:
+ - triggers/${vars.browser}_${vars.channel}
+
+ trigger-daily:
+ trigger:
+ branch:
+ - epochs/daily
+
+ trigger-weekly:
+ trigger:
+ branch:
+ - epochs/weekly
+
+ trigger-pr:
+ trigger:
+ pull-request:
+
+ browser-firefox:
+ depends-on:
+ - download-firefox-${vars.channel}
+ download-artifacts:
+ - task: download-firefox-${vars.channel}
+ glob: public/results/firefox-${vars.channel}.*
+ dest: build/
+ extract: true
+
+ browser-webkitgtk_minibrowser: {}
+
+ browser-chrome: {}
+
+ browser-chromium: {}
+
+ browser-servo: {}
+
+ tox-python3_6:
+ env:
+ TOXENV: py36
+ PY_COLORS: 0
+ install:
+ - python3.6
+ - python3.6-distutils
+ - python3.6-dev
+
+ tox-python3_10:
+ env:
+ TOXENV: py310
+ PY_COLORS: 0
+ install:
+ - python3.10
+ - python3.10-distutils
+ - python3.10-dev
+
+ tests-affected:
+ options:
+ browser:
+ - ${vars.browser}
+ channel: ${vars.channel}
+ schedule-if:
+ run-job:
+ - affected_tests
+
+tasks:
+ # The scheduling order of tasks is NOT determined by the order in which they
+ # are defined, but by their dependencies (depends-on).
+
+ # Run full suites on push
+ - $map:
+ for:
+ - vars:
+ suite: testharness
+ - vars:
+ suite: reftest
+ - vars:
+ suite: wdspec
+ - vars:
+ suite: crashtest
+ do:
+ $map:
+ for:
+ - vars:
+ browser: firefox
+ channel: nightly
+ use:
+ - trigger-master
+ - trigger-push
+ - vars:
+ browser: firefox
+ channel: beta
+ use:
+ - trigger-weekly
+ - trigger-push
+ - vars:
+ browser: firefox
+ channel: stable
+ use:
+ - trigger-daily
+ - trigger-push
+ - vars:
+ # Chromium ToT
+ browser: chromium
+ channel: nightly
+ use:
+ - trigger-daily
+ - trigger-push
+ - vars:
+ browser: chrome
+ channel: dev
+ use:
+ - trigger-master
+ - trigger-push
+ - vars:
+ browser: chrome
+ channel: beta
+ use:
+ - trigger-weekly
+ - trigger-push
+ - vars:
+ browser: chrome
+ channel: stable
+ use:
+ - trigger-daily
+ - trigger-push
+ - vars:
+ browser: webkitgtk_minibrowser
+ channel: nightly
+ use:
+ - trigger-daily
+ - trigger-push
+ - vars:
+ browser: webkitgtk_minibrowser
+ channel: stable
+ use:
+ - trigger-weekly
+ - trigger-push
+ - vars:
+ browser: webkitgtk_minibrowser
+ channel: beta
+ use:
+ - trigger-weekly
+ - trigger-push
+ - vars:
+ browser: servo
+ channel: nightly
+ use:
+ - trigger-weekly
+ - trigger-push
+ do:
+ - ${vars.browser}-${vars.channel}-${vars.suite}:
+ use:
+ - wpt-base
+ - run-options
+ - wpt-run
+ - browser-${vars.browser}
+ - wpt-${vars.suite}
+ description: >-
+ A subset of WPT's "${vars.suite}" tests (chunk number ${chunks.id}
+ of ${chunks.total}), run in the ${vars.channel} release of
+ ${vars.browser}.
+
+ # print-reftest are currently only supported by Chrome and Firefox.
+ - $map:
+ for:
+ - vars:
+ suite: print-reftest
+ do:
+ $map:
+ for:
+ - vars:
+ browser: firefox
+ channel: nightly
+ use:
+ - trigger-master
+ - trigger-push
+ - vars:
+ browser: firefox
+ channel: beta
+ use:
+ - trigger-weekly
+ - trigger-push
+ - vars:
+ browser: firefox
+ channel: stable
+ use:
+ - trigger-daily
+ - trigger-push
+ - vars:
+ # Chromium ToT
+ browser: chromium
+ channel: nightly
+ use:
+ - trigger-daily
+ - trigger-push
+ - vars:
+ browser: chrome
+ channel: dev
+ use:
+ - trigger-master
+ - trigger-push
+ - vars:
+ browser: chrome
+ channel: beta
+ use:
+ - trigger-weekly
+ - trigger-push
+ - vars:
+ browser: chrome
+ channel: stable
+ use:
+ - trigger-daily
+ - trigger-push
+ do:
+ - ${vars.browser}-${vars.channel}-${vars.suite}:
+ use:
+ - wpt-base
+ - run-options
+ - wpt-run
+ - browser-${vars.browser}
+ - wpt-${vars.suite}
+ description: >-
+ A subset of WPT's "${vars.suite}" tests (chunk number ${chunks.id}
+ of ${chunks.total}), run in the ${vars.channel} release of
+ ${vars.browser}.
+
+ - $map:
+ for:
+ - vars:
+ browser: firefox
+ channel: nightly
+ stability-exclude-users:
+ - moz-wptsync-bot
+ - vars:
+ browser: chrome
+ channel: dev
+ stability-exclude-users:
+ - chromium-wpt-export-bot
+ do:
+ - wpt-${vars.browser}-${vars.channel}-stability:
+ use:
+ - wpt-base
+ - run-options
+ - browser-${vars.browser}
+ - trigger-pr
+ - tests-affected
+ description: >-
+ Verify that all tests affected by a pull request are stable
+ when executed in ${vars.browser}.
+ command: >-
+ ./tools/ci/taskcluster-run.py
+ --commit-range base_head
+ ${vars.browser}
+ ${vars.channel}
+ --
+ --channel=${vars.channel}
+ --verify
+ --verify-no-chaos-mode
+ --verify-repeat-loop=0
+ --verify-repeat-restart=10
+ --github-checks-text-file="/home/test/artifacts/checkrun.md"
+ exclude-users: ${vars.stability-exclude-users}
+
+ - wpt-${vars.browser}-${vars.channel}-results:
+ use:
+ - wpt-base
+ - run-options
+ - browser-${vars.browser}
+ - trigger-pr
+ - tests-affected
+ description: >-
+ Collect results for all tests affected by a pull request in
+ ${vars.browser}.
+ command: >-
+ ./tools/ci/taskcluster-run.py
+ --commit-range base_head
+ ${vars.browser}
+ ${vars.channel}
+ --
+ --channel=${vars.channel}
+ --no-fail-on-unexpected
+ --log-wptreport=../artifacts/wpt_report.json
+ --log-wptscreenshot=../artifacts/wpt_screenshot.txt
+
+ - wpt-${vars.browser}-${vars.channel}-results-without-changes:
+ use:
+ - wpt-base
+ - run-options
+ - browser-${vars.browser}
+ - trigger-pr
+ - tests-affected
+ options:
+ checkout: base_head
+ description: >-
+ Collect results for all tests affected by a pull request in
+ ${vars.browser} but without the changes in the PR.
+ command: >-
+ ./tools/ci/taskcluster-run.py
+ --commit-range task_head
+ ${vars.browser}
+ ${vars.channel}
+ --
+ --channel=${vars.channel}
+ --no-fail-on-unexpected
+ --log-wptreport=../artifacts/wpt_report.json
+ --log-wptscreenshot=../artifacts/wpt_screenshot.txt
+ - $map:
+ for:
+ - vars:
+ channel: nightly
+ - vars:
+ channel: beta
+ - vars:
+ channel: stable
+ do:
+ download-firefox-${vars.channel}:
+ use:
+ - wpt-base
+ command: "./wpt install --download-only --destination /home/test/artifacts/ --channel=${vars.channel} --rename=firefox-${vars.channel} firefox browser"
+
+ - lint:
+ use:
+ - wpt-base
+ - trigger-master
+ - trigger-pr
+ description: >-
+ Lint for wpt-specific requirements
+ command: "./wpt lint --all --github-checks-text-file=/home/test/artifacts/checkrun.md"
+
+ - update-built:
+ use:
+ - wpt-base
+ - trigger-pr
+ schedule-if:
+ run-job:
+ - update_built
+ command: "./tools/ci/ci_built_diff.sh"
+
+ - tools/ unittests (Python 3.6):
+ description: >-
+ Unit tests for tools running under Python 3.6, excluding wptrunner
+ use:
+ - wpt-base
+ - trigger-pr
+ - tox-python3_6
+ command: ./tools/ci/ci_tools_unittest.sh
+ env:
+ HYPOTHESIS_PROFILE: ci
+ schedule-if:
+ run-job:
+ - tools_unittest
+
+ - tools/ unittests (Python 3.10):
+ description: >-
+ Unit tests for tools running under Python 3.10, excluding wptrunner
+ use:
+ - wpt-base
+ - trigger-pr
+ - tox-python3_10
+ command: ./tools/ci/ci_tools_unittest.sh
+ env:
+ HYPOTHESIS_PROFILE: ci
+ schedule-if:
+ run-job:
+ - tools_unittest
+
+ - tools/ integration tests (Python 3.6):
+ description: >-
+ Integration tests for tools running under Python 3.6
+ use:
+ - wpt-base
+ - trigger-pr
+ - tox-python3_6
+ command: ./tools/ci/ci_tools_integration_test.sh
+ install:
+ - libnss3-tools
+ options:
+ oom-killer: true
+ browser:
+ - firefox
+ - chrome
+ channel: experimental
+ xvfb: true
+ hosts: true
+ schedule-if:
+ run-job:
+ - wpt_integration
+
+ - tools/ integration tests (Python 3.10):
+ description: >-
+ Integration tests for tools running under Python 3.10
+ use:
+ - wpt-base
+ - trigger-pr
+ - tox-python3_10
+ command: ./tools/ci/ci_tools_integration_test.sh
+ install:
+ - libnss3-tools
+ options:
+ oom-killer: true
+ browser:
+ - firefox
+ - chrome
+ channel: experimental
+ xvfb: true
+ hosts: true
+ schedule-if:
+ run-job:
+ - wpt_integration
+
+ - resources/ tests (Python 3.6):
+ description: >-
+ Tests for testharness.js and other files in resources/ under Python 3.6
+ use:
+ - wpt-base
+ - trigger-pr
+ - tox-python3_6
+ command: ./tools/ci/ci_resources_unittest.sh
+ install:
+ - libnss3-tools
+ options:
+ browser:
+ - firefox
+ xvfb: true
+ hosts: true
+ schedule-if:
+ run-job:
+ - resources_unittest
+
+ - resources/ tests (Python 3.10):
+ description: >-
+ Tests for testharness.js and other files in resources/ under Python 3.10
+ use:
+ - wpt-base
+ - trigger-pr
+ - tox-python3_10
+ command: ./tools/ci/ci_resources_unittest.sh
+ install:
+ - libnss3-tools
+ options:
+ browser:
+ - firefox
+ xvfb: true
+ hosts: true
+ schedule-if:
+ run-job:
+ - resources_unittest
+
+ - infrastructure/ tests:
+ description: >-
+ Smoketests for wptrunner
+ vars:
+ channel: nightly
+ use:
+ - wpt-base
+ - trigger-pr
+ - browser-firefox
+ command: ./tools/ci/ci_wptrunner_infrastructure.sh
+ install:
+ - python3-pip
+ - libnss3-tools
+ - libappindicator1
+ - fonts-liberation
+ options:
+ oom-killer: true
+ browser:
+ - firefox
+ - chrome
+ channel: experimental
+ xvfb: true
+ hosts: false
+ schedule-if:
+ run-job:
+ - wptrunner_infrastructure
+
+ # Note: even though sink-task does not have `depends-on`, it depends on all
+ # other tasks (dynamically added by tools/ci/tc/decision.py).
+ - sink-task:
+ description: >-
+ Sink task for all other tasks; indicates success
+ use:
+ - wpt-base
+ - trigger-pr
+ command: "./wpt tc-sink-task --github-checks-text-file=/home/test/artifacts/checkrun.md"
+ requires: all-resolved
diff --git a/testing/web-platform/tests/tools/ci/tc/testdata/epochs_daily_push_event.json b/testing/web-platform/tests/tools/ci/tc/testdata/epochs_daily_push_event.json
new file mode 100644
index 0000000000..0f74c315d2
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/testdata/epochs_daily_push_event.json
@@ -0,0 +1,460 @@
+{
+ "ref": "refs/heads/epochs/daily",
+ "before": "20bb1ca5db519ee5d37ece6492868f8a6b65a2e7",
+ "after": "5df56b25e1cb81f81fe16c88be839f9fd538b41e",
+ "repository": {
+ "id": 3618133,
+ "node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
+ "name": "wpt",
+ "full_name": "web-platform-tests/wpt",
+ "private": false,
+ "owner": {
+ "name": "web-platform-tests",
+ "email": null,
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/web-platform-tests/wpt",
+ "description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
+ "fork": false,
+ "url": "https://github.com/web-platform-tests/wpt",
+ "forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
+ "keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
+ "hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
+ "issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
+ "assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
+ "blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
+ "stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
+ "contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
+ "subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
+ "subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
+ "commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
+ "archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
+ "issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
+ "releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
+ "created_at": 1330865891,
+ "updated_at": "2019-11-30T21:34:30Z",
+ "pushed_at": 1575160610,
+ "git_url": "git://github.com/web-platform-tests/wpt.git",
+ "ssh_url": "git@github.com:web-platform-tests/wpt.git",
+ "clone_url": "https://github.com/web-platform-tests/wpt.git",
+ "svn_url": "https://github.com/web-platform-tests/wpt",
+ "homepage": "https://web-platform-tests.org/",
+ "size": 329465,
+ "stargazers_count": 2543,
+ "watchers_count": 2543,
+ "language": "HTML",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": true,
+ "forks_count": 1838,
+ "mirror_url": null,
+ "archived": false,
+ "disabled": false,
+ "open_issues_count": 1590,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": "NOASSERTION",
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 1838,
+ "open_issues": 1590,
+ "watchers": 2543,
+ "default_branch": "master",
+ "stargazers": 2543,
+ "master_branch": "master",
+ "organization": "web-platform-tests"
+ },
+ "pusher": {
+ "name": "github-actions[bot]",
+ "email": null
+ },
+ "organization": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "url": "https://api.github.com/orgs/web-platform-tests",
+ "repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
+ "events_url": "https://api.github.com/orgs/web-platform-tests/events",
+ "hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks",
+ "issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
+ "members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
+ "public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "description": ""
+ },
+ "sender": {
+ "login": "github-actions[bot]",
+ "id": 41898282,
+ "node_id": "MDM6Qm90NDE4OTgyODI=",
+ "avatar_url": "https://avatars2.githubusercontent.com/in/15368?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/github-actions%5Bbot%5D",
+ "html_url": "https://github.com/apps/github-actions",
+ "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers",
+ "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}",
+ "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions",
+ "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs",
+ "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos",
+ "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events",
+ "type": "Bot",
+ "site_admin": false
+ },
+ "created": false,
+ "deleted": false,
+ "forced": false,
+ "base_ref": "refs/heads/epochs/six_hourly",
+ "compare": "https://github.com/web-platform-tests/wpt/compare/20bb1ca5db51...5df56b25e1cb",
+ "commits": [
+ {
+ "id": "3503c50a6452e153bde906a9c6644cb6237224fc",
+ "tree_id": "b735fa0ae88ebe0abd6764a1afd63aea815ac18e",
+ "distinct": false,
+ "message": "[LayoutNG] Pixel-snap column rules.\n\nBug: 829028\nChange-Id: I252901109502256f14bc68e64d4303006db50a13\nReviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1944350\nCommit-Queue: Xianzhu Wang <wangxianzhu@chromium.org>\nReviewed-by: Xianzhu Wang <wangxianzhu@chromium.org>\nCr-Commit-Position: refs/heads/master@{#720302}",
+ "timestamp": "2019-11-29T16:25:44-08:00",
+ "url": "https://github.com/web-platform-tests/wpt/commit/3503c50a6452e153bde906a9c6644cb6237224fc",
+ "author": {
+ "name": "Morten Stenshorne",
+ "email": "mstensho@chromium.org",
+ "username": "mstensho"
+ },
+ "committer": {
+ "name": "Blink WPT Bot",
+ "email": "blink-w3c-test-autoroller@chromium.org",
+ "username": "chromium-wpt-export-bot"
+ },
+ "added": [
+ "css/css-multicol/equal-gap-and-rule.html"
+ ],
+ "removed": [
+
+ ],
+ "modified": [
+
+ ]
+ },
+ {
+ "id": "561b765308e6d188618f3ba73091bb598d8357ce",
+ "tree_id": "775ac4481c03e020819910d03019f0ec93def868",
+ "distinct": false,
+ "message": "Fix parser mXSS sanitizer bypass for <p> and <br> within foreign context\n\nPrior to this CL, the following code:\n <svg></p></svg>\nparsed to this innerHTML: <svg><p></p></svg>\n\nThis is in contrast to this code:\n <svg><p></svg>\nwhich parses to <svg></svg><p></p>\n\nThe fact that the </p> is left inside the <svg> allowed sanitizer\nbypasses as detailed in [1]. Please also see [2] for the spec\ndiscussion.\n\nWith this CL, </p> and </br> within a foreign context now cause\nthe closing of the foreign context.\n\n[1] https://research.securitum.com/dompurify-bypass-using-mxss/\n[2] https://github.com/whatwg/html/issues/5113\n\nBug: 1005713\nChange-Id: Ic07ee50de4eb1ef19b73a075bd83785c99f4f891\nReviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1940722\nReviewed-by: Kouhei Ueno <kouhei@chromium.org>\nCommit-Queue: Mason Freed <masonfreed@chromium.org>\nCr-Commit-Position: refs/heads/master@{#720315}",
+ "timestamp": "2019-11-30T00:22:29-08:00",
+ "url": "https://github.com/web-platform-tests/wpt/commit/561b765308e6d188618f3ba73091bb598d8357ce",
+ "author": {
+ "name": "Mason Freed",
+ "email": "masonfreed@chromium.org",
+ "username": "mfreed7"
+ },
+ "committer": {
+ "name": "Blink WPT Bot",
+ "email": "blink-w3c-test-autoroller@chromium.org",
+ "username": "chromium-wpt-export-bot"
+ },
+ "added": [
+ "html/syntax/parsing/html_content_in_foreign_context.html"
+ ],
+ "removed": [
+
+ ],
+ "modified": [
+
+ ]
+ },
+ {
+ "id": "d31800185dab8e194294620c8ad6bf40f25bf752",
+ "tree_id": "c718a913e9a5197e1896f5b2ee0434f896d6725b",
+ "distinct": false,
+ "message": "[css-text-4] tests for word-boundary-expansion\n\nhttps://drafts.csswg.org/css-text-4/#word-boundary-expansion",
+ "timestamp": "2019-11-30T18:09:49+09:00",
+ "url": "https://github.com/web-platform-tests/wpt/commit/d31800185dab8e194294620c8ad6bf40f25bf752",
+ "author": {
+ "name": "Florian Rivoal",
+ "email": "git@florian.rivoal.net",
+ "username": "frivoal"
+ },
+ "committer": {
+ "name": "Florian Rivoal",
+ "email": "git@florian.rivoal.net",
+ "username": "frivoal"
+ },
+ "added": [
+ "css/css-text/parsing/word-boundary-expansion-computed.html",
+ "css/css-text/parsing/word-boundary-expansion-invalid.html",
+ "css/css-text/parsing/word-boundary-expansion-valid.html",
+ "css/css-text/word-boundary/reference/word-boundary-001-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-002-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-004-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-007-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-008-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-009-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-010-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-011-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-012-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-013-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-014-ref.html",
+ "css/css-text/word-boundary/word-boundary-001.html",
+ "css/css-text/word-boundary/word-boundary-002.html",
+ "css/css-text/word-boundary/word-boundary-003.html",
+ "css/css-text/word-boundary/word-boundary-004.html",
+ "css/css-text/word-boundary/word-boundary-005.html",
+ "css/css-text/word-boundary/word-boundary-006.html",
+ "css/css-text/word-boundary/word-boundary-007.html",
+ "css/css-text/word-boundary/word-boundary-008.html",
+ "css/css-text/word-boundary/word-boundary-009.html",
+ "css/css-text/word-boundary/word-boundary-010.html",
+ "css/css-text/word-boundary/word-boundary-011.html",
+ "css/css-text/word-boundary/word-boundary-012.html",
+ "css/css-text/word-boundary/word-boundary-013.html",
+ "css/css-text/word-boundary/word-boundary-014.html",
+ "css/css-text/word-boundary/word-boundary-015-manual.html"
+ ],
+ "removed": [
+
+ ],
+ "modified": [
+
+ ]
+ },
+ {
+ "id": "37ddab0528d8ab49db1371188e36f68133ff5c1c",
+ "tree_id": "832644a697a1cdcaf93126d4f95c89f71b4f5d47",
+ "distinct": false,
+ "message": "[css-text-4] tests for word-boundary-detection\n\nhttps://drafts.csswg.org/css-text-4/#word-boundary-detection",
+ "timestamp": "2019-11-30T18:09:49+09:00",
+ "url": "https://github.com/web-platform-tests/wpt/commit/37ddab0528d8ab49db1371188e36f68133ff5c1c",
+ "author": {
+ "name": "Florian Rivoal",
+ "email": "git@florian.rivoal.net",
+ "username": "frivoal"
+ },
+ "committer": {
+ "name": "Florian Rivoal",
+ "email": "git@florian.rivoal.net",
+ "username": "frivoal"
+ },
+ "added": [
+ "css/css-text/word-boundary/reference/word-boundary-101-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-102-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-103-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-104-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-105-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-106-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-107-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-108-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-109-a-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-109-b-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-110-a-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-110-b-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-111-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-112-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-113-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-114-a-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-114-b-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-115-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-116-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-117-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-119-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-120-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-121-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-122-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-123-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-124-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-125-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-126-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-127-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-128-ref.html",
+ "css/css-text/word-boundary/reference/word-boundary-129-ref.html",
+ "css/css-text/word-boundary/word-boundary-101.html",
+ "css/css-text/word-boundary/word-boundary-102.html",
+ "css/css-text/word-boundary/word-boundary-103.html",
+ "css/css-text/word-boundary/word-boundary-104.html",
+ "css/css-text/word-boundary/word-boundary-105.html",
+ "css/css-text/word-boundary/word-boundary-106.html",
+ "css/css-text/word-boundary/word-boundary-107.html",
+ "css/css-text/word-boundary/word-boundary-108.html",
+ "css/css-text/word-boundary/word-boundary-109.html",
+ "css/css-text/word-boundary/word-boundary-110.html",
+ "css/css-text/word-boundary/word-boundary-111.html",
+ "css/css-text/word-boundary/word-boundary-112.html",
+ "css/css-text/word-boundary/word-boundary-113.html",
+ "css/css-text/word-boundary/word-boundary-114.html",
+ "css/css-text/word-boundary/word-boundary-115.html",
+ "css/css-text/word-boundary/word-boundary-116.html",
+ "css/css-text/word-boundary/word-boundary-117.html",
+ "css/css-text/word-boundary/word-boundary-118.html",
+ "css/css-text/word-boundary/word-boundary-119.html",
+ "css/css-text/word-boundary/word-boundary-120.html",
+ "css/css-text/word-boundary/word-boundary-121.html",
+ "css/css-text/word-boundary/word-boundary-122.html",
+ "css/css-text/word-boundary/word-boundary-123.html",
+ "css/css-text/word-boundary/word-boundary-124.html",
+ "css/css-text/word-boundary/word-boundary-125.html",
+ "css/css-text/word-boundary/word-boundary-126.html",
+ "css/css-text/word-boundary/word-boundary-127.html",
+ "css/css-text/word-boundary/word-boundary-128.html",
+ "css/css-text/word-boundary/word-boundary-129.html"
+ ],
+ "removed": [
+
+ ],
+ "modified": [
+
+ ]
+ },
+ {
+ "id": "054edcc23aa1e0ebee50d7ddf1ce6115dd940ece",
+ "tree_id": "b58ca36bb7e928d440bed734e323580467dd32c7",
+ "distinct": false,
+ "message": "[css-text] Fix typo",
+ "timestamp": "2019-11-30T19:08:12+09:00",
+ "url": "https://github.com/web-platform-tests/wpt/commit/054edcc23aa1e0ebee50d7ddf1ce6115dd940ece",
+ "author": {
+ "name": "Florian Rivoal",
+ "email": "git@florian.rivoal.net",
+ "username": "frivoal"
+ },
+ "committer": {
+ "name": "Florian Rivoal",
+ "email": "git@florian.rivoal.net",
+ "username": "frivoal"
+ },
+ "added": [
+
+ ],
+ "removed": [
+
+ ],
+ "modified": [
+ "css/css-text/parsing/word-boundary-expansion-computed.html"
+ ]
+ },
+ {
+ "id": "d15d6d91834108a38070771025b548124d44026b",
+ "tree_id": "a1476d347b04acb59ed2562f0d4f845e8252e6d0",
+ "distinct": false,
+ "message": "[css-text add parsing tests for word-boundary-detection",
+ "timestamp": "2019-11-30T19:08:12+09:00",
+ "url": "https://github.com/web-platform-tests/wpt/commit/d15d6d91834108a38070771025b548124d44026b",
+ "author": {
+ "name": "Florian Rivoal",
+ "email": "git@florian.rivoal.net",
+ "username": "frivoal"
+ },
+ "committer": {
+ "name": "Florian Rivoal",
+ "email": "git@florian.rivoal.net",
+ "username": "frivoal"
+ },
+ "added": [
+ "css/css-text/parsing/word-boundary-detection-computed.html",
+ "css/css-text/parsing/word-boundary-detection-invalid.html",
+ "css/css-text/parsing/word-boundary-detection-valid.html"
+ ],
+ "removed": [
+
+ ],
+ "modified": [
+
+ ]
+ },
+ {
+ "id": "5df56b25e1cb81f81fe16c88be839f9fd538b41e",
+ "tree_id": "18da87e7701bd6218f4437b4d2d49cefe1f56af9",
+ "distinct": false,
+ "message": "Delete invalid test. (#20547)\n\nIt seems it expects `transform: rotate(1deg, 20px, 20px)` to somehow be valid.\r\n\r\nIt was introduced in cdc3032f56c86cc68121e54e169485441d9cdb1a, pointing to https://www.w3.org/TR/css-transforms-1/#svg-transform-functions, which doesn't say anything like that.\r\n\r\nDoesn't pass in any browser.",
+ "timestamp": "2019-11-30T13:34:24-08:00",
+ "url": "https://github.com/web-platform-tests/wpt/commit/5df56b25e1cb81f81fe16c88be839f9fd538b41e",
+ "author": {
+ "name": "Emilio Cobos Ãlvarez",
+ "email": "emilio@crisal.io",
+ "username": "emilio"
+ },
+ "committer": {
+ "name": "L. David Baron",
+ "email": "dbaron@dbaron.org",
+ "username": "dbaron"
+ },
+ "added": [
+
+ ],
+ "removed": [
+ "css/css-transforms/external-styles/svg-external-styles-012.html"
+ ],
+ "modified": [
+ "css/css-transforms/external-styles/support/svg-external-styles.css"
+ ]
+ }
+ ],
+ "head_commit": {
+ "id": "5df56b25e1cb81f81fe16c88be839f9fd538b41e",
+ "tree_id": "18da87e7701bd6218f4437b4d2d49cefe1f56af9",
+ "distinct": false,
+ "message": "Delete invalid test. (#20547)\n\nIt seems it expects `transform: rotate(1deg, 20px, 20px)` to somehow be valid.\r\n\r\nIt was introduced in cdc3032f56c86cc68121e54e169485441d9cdb1a, pointing to https://www.w3.org/TR/css-transforms-1/#svg-transform-functions, which doesn't say anything like that.\r\n\r\nDoesn't pass in any browser.",
+ "timestamp": "2019-11-30T13:34:24-08:00",
+ "url": "https://github.com/web-platform-tests/wpt/commit/5df56b25e1cb81f81fe16c88be839f9fd538b41e",
+ "author": {
+ "name": "Emilio Cobos Ãlvarez",
+ "email": "emilio@crisal.io",
+ "username": "emilio"
+ },
+ "committer": {
+ "name": "L. David Baron",
+ "email": "dbaron@dbaron.org",
+ "username": "dbaron"
+ },
+ "added": [
+
+ ],
+ "removed": [
+ "css/css-transforms/external-styles/svg-external-styles-012.html"
+ ],
+ "modified": [
+ "css/css-transforms/external-styles/support/svg-external-styles.css"
+ ]
+ }
+}
diff --git a/testing/web-platform/tests/tools/ci/tc/testdata/master_push_event.json b/testing/web-platform/tests/tools/ci/tc/testdata/master_push_event.json
new file mode 100644
index 0000000000..8e79f75373
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/testdata/master_push_event.json
@@ -0,0 +1,214 @@
+{
+ "ref": "refs/heads/master",
+ "before": "a4bfa25bfc35e6dd8aabf9bc5af714bf3d70d712",
+ "after": "5baef702c26b8580f5a4e5e1a34ac75bb9d496ae",
+ "created": false,
+ "deleted": false,
+ "forced": false,
+ "base_ref": null,
+ "compare": "https://github.com/web-platform-tests/wpt/compare/a4bfa25bfc35...5baef702c26b",
+ "commits": [
+ {
+ "id": "5baef702c26b8580f5a4e5e1a34ac75bb9d496ae",
+ "tree_id": "045949cd04598b19f5ed1bebf2d5cbed647f3c86",
+ "distinct": true,
+ "message": "Add support for verifying taskcluster config (#15593)\n\nAdds as wpt tc-verify command that verifies that the TaskCluster\r\nconfig is a valid yaml file and computes the tasks that will run on a\r\nPR synchronize event. This can be expanded to more events and pushes\r\nin the future.",
+ "timestamp": "2019-03-01T14:43:07Z",
+ "url": "https://github.com/web-platform-tests/wpt/commit/5baef702c26b8580f5a4e5e1a34ac75bb9d496ae",
+ "author": {
+ "name": "jgraham",
+ "email": "james@hoppipolla.co.uk",
+ "username": "jgraham"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "username": "web-flow"
+ },
+ "added": [
+ "tools/taskcluster/__init__.py",
+ "tools/taskcluster/commands.json",
+ "tools/taskcluster/testdata/pr_event.json",
+ "tools/taskcluster/verify.py"
+ ],
+ "removed": [
+
+ ],
+ "modified": [
+ "tools/wpt/paths"
+ ]
+ }
+ ],
+ "head_commit": {
+ "id": "5baef702c26b8580f5a4e5e1a34ac75bb9d496ae",
+ "tree_id": "045949cd04598b19f5ed1bebf2d5cbed647f3c86",
+ "distinct": true,
+ "message": "Add support for verifying taskcluster config (#15593)\n\nAdds as wpt tc-verify command that verifies that the TaskCluster\r\nconfig is a valid yaml file and computes the tasks that will run on a\r\nPR synchronize event. This can be expanded to more events and pushes\r\nin the future.",
+ "timestamp": "2019-03-01T14:43:07Z",
+ "url": "https://github.com/web-platform-tests/wpt/commit/5baef702c26b8580f5a4e5e1a34ac75bb9d496ae",
+ "author": {
+ "name": "jgraham",
+ "email": "james@hoppipolla.co.uk",
+ "username": "jgraham"
+ },
+ "committer": {
+ "name": "GitHub",
+ "email": "noreply@github.com",
+ "username": "web-flow"
+ },
+ "added": [
+ "tools/taskcluster/__init__.py",
+ "tools/taskcluster/commands.json",
+ "tools/taskcluster/testdata/pr_event.json",
+ "tools/taskcluster/verify.py"
+ ],
+ "removed": [
+
+ ],
+ "modified": [
+ "tools/wpt/paths"
+ ]
+ },
+ "repository": {
+ "id": 3618133,
+ "node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
+ "name": "wpt",
+ "full_name": "web-platform-tests/wpt",
+ "private": false,
+ "owner": {
+ "name": "web-platform-tests",
+ "email": "",
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/web-platform-tests/wpt",
+ "description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
+ "fork": false,
+ "url": "https://github.com/web-platform-tests/wpt",
+ "forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
+ "keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
+ "hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
+ "issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
+ "assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
+ "blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
+ "stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
+ "contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
+ "subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
+ "subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
+ "commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
+ "archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
+ "issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
+ "releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
+ "created_at": 1330865891,
+ "updated_at": "2019-03-01T14:16:52Z",
+ "pushed_at": 1551451389,
+ "git_url": "git://github.com/web-platform-tests/wpt.git",
+ "ssh_url": "git@github.com:web-platform-tests/wpt.git",
+ "clone_url": "https://github.com/web-platform-tests/wpt.git",
+ "svn_url": "https://github.com/web-platform-tests/wpt",
+ "homepage": "http://irc.w3.org/?channels=testing",
+ "size": 324722,
+ "stargazers_count": 2060,
+ "watchers_count": 2060,
+ "language": "HTML",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": true,
+ "forks_count": 1605,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 1355,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": "NOASSERTION",
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 1605,
+ "open_issues": 1366,
+ "watchers": 2060,
+ "default_branch": "master",
+ "stargazers": 2060,
+ "master_branch": "master",
+ "organization": "web-platform-tests"
+ },
+ "pusher": {
+ "name": "jgraham",
+ "email": "james@hoppipolla.co.uk"
+ },
+ "organization": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "url": "https://api.github.com/orgs/web-platform-tests",
+ "repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
+ "events_url": "https://api.github.com/orgs/web-platform-tests/events",
+ "hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks",
+ "issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
+ "members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
+ "public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "description": ""
+ },
+ "sender": {
+ "login": "jgraham",
+ "id": 294864,
+ "node_id": "MDQ6VXNlcjI5NDg2NA==",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/294864?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jgraham",
+ "html_url": "https://github.com/jgraham",
+ "followers_url": "https://api.github.com/users/jgraham/followers",
+ "following_url": "https://api.github.com/users/jgraham/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jgraham/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jgraham/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jgraham/subscriptions",
+ "organizations_url": "https://api.github.com/users/jgraham/orgs",
+ "repos_url": "https://api.github.com/users/jgraham/repos",
+ "events_url": "https://api.github.com/users/jgraham/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jgraham/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+}
diff --git a/testing/web-platform/tests/tools/ci/tc/testdata/pr_event.json b/testing/web-platform/tests/tools/ci/tc/testdata/pr_event.json
new file mode 100644
index 0000000000..5acc1a6010
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/testdata/pr_event.json
@@ -0,0 +1,577 @@
+{
+ "action": "synchronize",
+ "number": 15574,
+ "pull_request": {
+ "url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574",
+ "id": 256653065,
+ "node_id": "MDExOlB1bGxSZXF1ZXN0MjU2NjUzMDY1",
+ "html_url": "https://github.com/web-platform-tests/wpt/pull/15574",
+ "diff_url": "https://github.com/web-platform-tests/wpt/pull/15574.diff",
+ "patch_url": "https://github.com/web-platform-tests/wpt/pull/15574.patch",
+ "issue_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/15574",
+ "number": 15574,
+ "state": "open",
+ "locked": false,
+ "title": "Move the lint from Travis to TaskCluster",
+ "user": {
+ "login": "jgraham",
+ "id": 294864,
+ "node_id": "MDQ6VXNlcjI5NDg2NA==",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/294864?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jgraham",
+ "html_url": "https://github.com/jgraham",
+ "followers_url": "https://api.github.com/users/jgraham/followers",
+ "following_url": "https://api.github.com/users/jgraham/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jgraham/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jgraham/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jgraham/subscriptions",
+ "organizations_url": "https://api.github.com/users/jgraham/orgs",
+ "repos_url": "https://api.github.com/users/jgraham/repos",
+ "events_url": "https://api.github.com/users/jgraham/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jgraham/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "body": "",
+ "created_at": "2019-02-27T12:03:38Z",
+ "updated_at": "2019-02-28T13:43:17Z",
+ "closed_at": null,
+ "merged_at": null,
+ "merge_commit_sha": "70a272296dad0db4f0be1133a59aa97f0a72d9ac",
+ "assignee": {
+ "login": "gsnedders",
+ "id": 176218,
+ "node_id": "MDQ6VXNlcjE3NjIxOA==",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/176218?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/gsnedders",
+ "html_url": "https://github.com/gsnedders",
+ "followers_url": "https://api.github.com/users/gsnedders/followers",
+ "following_url": "https://api.github.com/users/gsnedders/following{/other_user}",
+ "gists_url": "https://api.github.com/users/gsnedders/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/gsnedders/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/gsnedders/subscriptions",
+ "organizations_url": "https://api.github.com/users/gsnedders/orgs",
+ "repos_url": "https://api.github.com/users/gsnedders/repos",
+ "events_url": "https://api.github.com/users/gsnedders/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/gsnedders/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "assignees": [
+ {
+ "login": "gsnedders",
+ "id": 176218,
+ "node_id": "MDQ6VXNlcjE3NjIxOA==",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/176218?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/gsnedders",
+ "html_url": "https://github.com/gsnedders",
+ "followers_url": "https://api.github.com/users/gsnedders/followers",
+ "following_url": "https://api.github.com/users/gsnedders/following{/other_user}",
+ "gists_url": "https://api.github.com/users/gsnedders/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/gsnedders/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/gsnedders/subscriptions",
+ "organizations_url": "https://api.github.com/users/gsnedders/orgs",
+ "repos_url": "https://api.github.com/users/gsnedders/repos",
+ "events_url": "https://api.github.com/users/gsnedders/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/gsnedders/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+ ],
+ "requested_reviewers": [
+ {
+ "login": "gsnedders",
+ "id": 176218,
+ "node_id": "MDQ6VXNlcjE3NjIxOA==",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/176218?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/gsnedders",
+ "html_url": "https://github.com/gsnedders",
+ "followers_url": "https://api.github.com/users/gsnedders/followers",
+ "following_url": "https://api.github.com/users/gsnedders/following{/other_user}",
+ "gists_url": "https://api.github.com/users/gsnedders/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/gsnedders/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/gsnedders/subscriptions",
+ "organizations_url": "https://api.github.com/users/gsnedders/orgs",
+ "repos_url": "https://api.github.com/users/gsnedders/repos",
+ "events_url": "https://api.github.com/users/gsnedders/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/gsnedders/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ {
+ "login": "jugglinmike",
+ "id": 677252,
+ "node_id": "MDQ6VXNlcjY3NzI1Mg==",
+ "avatar_url": "https://avatars2.githubusercontent.com/u/677252?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jugglinmike",
+ "html_url": "https://github.com/jugglinmike",
+ "followers_url": "https://api.github.com/users/jugglinmike/followers",
+ "following_url": "https://api.github.com/users/jugglinmike/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jugglinmike/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jugglinmike/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jugglinmike/subscriptions",
+ "organizations_url": "https://api.github.com/users/jugglinmike/orgs",
+ "repos_url": "https://api.github.com/users/jugglinmike/repos",
+ "events_url": "https://api.github.com/users/jugglinmike/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jugglinmike/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+ ],
+ "requested_teams": [
+
+ ],
+ "labels": [
+ {
+ "id": 1012999603,
+ "node_id": "MDU6TGFiZWwxMDEyOTk5NjAz",
+ "url": "https://api.github.com/repos/web-platform-tests/wpt/labels/ci",
+ "name": "ci",
+ "color": "fef2c0",
+ "default": false
+ },
+ {
+ "id": 45230790,
+ "node_id": "MDU6TGFiZWw0NTIzMDc5MA==",
+ "url": "https://api.github.com/repos/web-platform-tests/wpt/labels/infra",
+ "name": "infra",
+ "color": "fbca04",
+ "default": false
+ }
+ ],
+ "milestone": null,
+ "commits_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574/commits",
+ "review_comments_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574/comments",
+ "review_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/comments{/number}",
+ "comments_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/15574/comments",
+ "statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/fef69c5d47196433234d6c37a0ff987491bd2dfc",
+ "head": {
+ "label": "web-platform-tests:taskcluster_lint",
+ "ref": "taskcluster_lint",
+ "sha": "fef69c5d47196433234d6c37a0ff987491bd2dfc",
+ "user": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 3618133,
+ "node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
+ "name": "wpt",
+ "full_name": "web-platform-tests/wpt",
+ "private": false,
+ "owner": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/web-platform-tests/wpt",
+ "description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
+ "fork": false,
+ "url": "https://api.github.com/repos/web-platform-tests/wpt",
+ "forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
+ "keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
+ "hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
+ "issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
+ "assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
+ "blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
+ "stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
+ "contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
+ "subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
+ "subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
+ "commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
+ "archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
+ "issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
+ "releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
+ "created_at": "2012-03-04T12:58:11Z",
+ "updated_at": "2019-02-28T12:41:33Z",
+ "pushed_at": "2019-02-28T13:43:16Z",
+ "git_url": "git://github.com/web-platform-tests/wpt.git",
+ "ssh_url": "git@github.com:web-platform-tests/wpt.git",
+ "clone_url": "https://github.com/web-platform-tests/wpt.git",
+ "svn_url": "https://github.com/web-platform-tests/wpt",
+ "homepage": "http://irc.w3.org/?channels=testing",
+ "size": 324641,
+ "stargazers_count": 2058,
+ "watchers_count": 2058,
+ "language": "HTML",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": true,
+ "forks_count": 1604,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 1354,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": "NOASSERTION",
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 1604,
+ "open_issues": 1354,
+ "watchers": 2058,
+ "default_branch": "master"
+ }
+ },
+ "base": {
+ "label": "web-platform-tests:master",
+ "ref": "master",
+ "sha": "bb657c4bd0cc4729daa27c1f3a1e1f86ef5a1dc0",
+ "user": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 3618133,
+ "node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
+ "name": "wpt",
+ "full_name": "web-platform-tests/wpt",
+ "private": false,
+ "owner": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/web-platform-tests/wpt",
+ "description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
+ "fork": false,
+ "url": "https://api.github.com/repos/web-platform-tests/wpt",
+ "forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
+ "keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
+ "hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
+ "issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
+ "assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
+ "blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
+ "stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
+ "contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
+ "subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
+ "subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
+ "commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
+ "archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
+ "issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
+ "releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
+ "created_at": "2012-03-04T12:58:11Z",
+ "updated_at": "2019-02-28T12:41:33Z",
+ "pushed_at": "2019-02-28T13:43:16Z",
+ "git_url": "git://github.com/web-platform-tests/wpt.git",
+ "ssh_url": "git@github.com:web-platform-tests/wpt.git",
+ "clone_url": "https://github.com/web-platform-tests/wpt.git",
+ "svn_url": "https://github.com/web-platform-tests/wpt",
+ "homepage": "http://irc.w3.org/?channels=testing",
+ "size": 324641,
+ "stargazers_count": 2058,
+ "watchers_count": 2058,
+ "language": "HTML",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": true,
+ "forks_count": 1604,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 1354,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": "NOASSERTION",
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 1604,
+ "open_issues": 1354,
+ "watchers": 2058,
+ "default_branch": "master"
+ }
+ },
+ "_links": {
+ "self": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574"
+ },
+ "html": {
+ "href": "https://github.com/web-platform-tests/wpt/pull/15574"
+ },
+ "issue": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/issues/15574"
+ },
+ "comments": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/issues/15574/comments"
+ },
+ "review_comments": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574/comments"
+ },
+ "review_comment": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/comments{/number}"
+ },
+ "commits": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/15574/commits"
+ },
+ "statuses": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/statuses/fef69c5d47196433234d6c37a0ff987491bd2dfc"
+ }
+ },
+ "author_association": "CONTRIBUTOR",
+ "draft": false,
+ "merged": false,
+ "mergeable": null,
+ "rebaseable": null,
+ "mergeable_state": "unknown",
+ "merged_by": null,
+ "comments": 2,
+ "review_comments": 3,
+ "maintainer_can_modify": false,
+ "commits": 2,
+ "additions": 55,
+ "deletions": 7,
+ "changed_files": 3
+ },
+ "before": "46d2f316ae10b83726dfb43150b321533bc9539f",
+ "after": "fef69c5d47196433234d6c37a0ff987491bd2dfc",
+ "repository": {
+ "id": 3618133,
+ "node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
+ "name": "wpt",
+ "full_name": "web-platform-tests/wpt",
+ "private": false,
+ "owner": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/web-platform-tests/wpt",
+ "description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
+ "fork": false,
+ "url": "https://api.github.com/repos/web-platform-tests/wpt",
+ "forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
+ "keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
+ "hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
+ "issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
+ "assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
+ "blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
+ "stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
+ "contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
+ "subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
+ "subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
+ "commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
+ "archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
+ "issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
+ "releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
+ "created_at": "2012-03-04T12:58:11Z",
+ "updated_at": "2019-02-28T12:41:33Z",
+ "pushed_at": "2019-02-28T13:43:16Z",
+ "git_url": "git://github.com/web-platform-tests/wpt.git",
+ "ssh_url": "git@github.com:web-platform-tests/wpt.git",
+ "clone_url": "https://github.com/web-platform-tests/wpt.git",
+ "svn_url": "https://github.com/web-platform-tests/wpt",
+ "homepage": "http://irc.w3.org/?channels=testing",
+ "size": 324641,
+ "stargazers_count": 2058,
+ "watchers_count": 2058,
+ "language": "HTML",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": true,
+ "forks_count": 1604,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 1354,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": "NOASSERTION",
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 1604,
+ "open_issues": 1354,
+ "watchers": 2058,
+ "default_branch": "master"
+ },
+ "organization": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "url": "https://api.github.com/orgs/web-platform-tests",
+ "repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
+ "events_url": "https://api.github.com/orgs/web-platform-tests/events",
+ "hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks",
+ "issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
+ "members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
+ "public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "description": ""
+ },
+ "sender": {
+ "login": "jgraham",
+ "id": 294864,
+ "node_id": "MDQ6VXNlcjI5NDg2NA==",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/294864?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/jgraham",
+ "html_url": "https://github.com/jgraham",
+ "followers_url": "https://api.github.com/users/jgraham/followers",
+ "following_url": "https://api.github.com/users/jgraham/following{/other_user}",
+ "gists_url": "https://api.github.com/users/jgraham/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/jgraham/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/jgraham/subscriptions",
+ "organizations_url": "https://api.github.com/users/jgraham/orgs",
+ "repos_url": "https://api.github.com/users/jgraham/repos",
+ "events_url": "https://api.github.com/users/jgraham/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/jgraham/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+}
diff --git a/testing/web-platform/tests/tools/ci/tc/testdata/pr_event_tests_affected.json b/testing/web-platform/tests/tools/ci/tc/testdata/pr_event_tests_affected.json
new file mode 100644
index 0000000000..792ea1bccc
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/testdata/pr_event_tests_affected.json
@@ -0,0 +1,505 @@
+{
+ "action": "synchronize",
+ "number": 20378,
+ "pull_request": {
+ "url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/20378",
+ "id": 344287920,
+ "node_id": "MDExOlB1bGxSZXF1ZXN0MzQ0Mjg3OTIw",
+ "html_url": "https://github.com/web-platform-tests/wpt/pull/20378",
+ "diff_url": "https://github.com/web-platform-tests/wpt/pull/20378.diff",
+ "patch_url": "https://github.com/web-platform-tests/wpt/pull/20378.patch",
+ "issue_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/20378",
+ "number": 20378,
+ "state": "open",
+ "locked": false,
+ "title": "Migrate more layout instability tests to WPT.",
+ "user": {
+ "login": "chromium-wpt-export-bot",
+ "id": 25752892,
+ "node_id": "MDQ6VXNlcjI1NzUyODky",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/25752892?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/chromium-wpt-export-bot",
+ "html_url": "https://github.com/chromium-wpt-export-bot",
+ "followers_url": "https://api.github.com/users/chromium-wpt-export-bot/followers",
+ "following_url": "https://api.github.com/users/chromium-wpt-export-bot/following{/other_user}",
+ "gists_url": "https://api.github.com/users/chromium-wpt-export-bot/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/chromium-wpt-export-bot/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/chromium-wpt-export-bot/subscriptions",
+ "organizations_url": "https://api.github.com/users/chromium-wpt-export-bot/orgs",
+ "repos_url": "https://api.github.com/users/chromium-wpt-export-bot/repos",
+ "events_url": "https://api.github.com/users/chromium-wpt-export-bot/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/chromium-wpt-export-bot/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "body": "Bug: 984109\nChange-Id: Ie31c63995f63f8acbfa26d97966ffe30016edd3c\nReviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1925447\nCommit-Queue: Steve Kobes \\<skobes@chromium.org>\nReviewed-by: Nicolás Peña Moreno \\<npm@chromium.org>\nCr-Commit-Position: refs/heads/master@{#718648}\n\n",
+ "created_at": "2019-11-21T23:53:58Z",
+ "updated_at": "2019-11-25T15:54:19Z",
+ "closed_at": null,
+ "merged_at": null,
+ "merge_commit_sha": "36726e3e992a83e80608acf47c886fc440390691",
+ "assignee": null,
+ "assignees": [
+
+ ],
+ "requested_reviewers": [
+
+ ],
+ "requested_teams": [
+
+ ],
+ "labels": [
+ {
+ "id": 490891502,
+ "node_id": "MDU6TGFiZWw0OTA4OTE1MDI=",
+ "url": "https://api.github.com/repos/web-platform-tests/wpt/labels/chromium-export",
+ "name": "chromium-export",
+ "color": "4788f4",
+ "default": false,
+ "description": null
+ },
+ {
+ "id": 1363199651,
+ "node_id": "MDU6TGFiZWwxMzYzMTk5NjUx",
+ "url": "https://api.github.com/repos/web-platform-tests/wpt/labels/layout-instability",
+ "name": "layout-instability",
+ "color": "ededed",
+ "default": false,
+ "description": null
+ }
+ ],
+ "milestone": null,
+ "commits_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/20378/commits",
+ "review_comments_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/20378/comments",
+ "review_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls/comments{/number}",
+ "comments_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/20378/comments",
+ "statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/7df84e3f87f05860a2b86d0b80dc3eb06d8e7103",
+ "head": {
+ "label": "web-platform-tests:chromium-export-cl-1925447",
+ "ref": "chromium-export-cl-1925447",
+ "sha": "7df84e3f87f05860a2b86d0b80dc3eb06d8e7103",
+ "user": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 3618133,
+ "node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
+ "name": "wpt",
+ "full_name": "web-platform-tests/wpt",
+ "private": false,
+ "owner": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/web-platform-tests/wpt",
+ "description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
+ "fork": false,
+ "url": "https://api.github.com/repos/web-platform-tests/wpt",
+ "forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
+ "keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
+ "hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
+ "issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
+ "assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
+ "blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
+ "stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
+ "contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
+ "subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
+ "subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
+ "commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
+ "archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
+ "issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
+ "releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
+ "created_at": "2012-03-04T12:58:11Z",
+ "updated_at": "2019-11-25T15:40:59Z",
+ "pushed_at": "2019-11-25T15:54:18Z",
+ "git_url": "git://github.com/web-platform-tests/wpt.git",
+ "ssh_url": "git@github.com:web-platform-tests/wpt.git",
+ "clone_url": "https://github.com/web-platform-tests/wpt.git",
+ "svn_url": "https://github.com/web-platform-tests/wpt",
+ "homepage": "https://web-platform-tests.org/",
+ "size": 329278,
+ "stargazers_count": 2536,
+ "watchers_count": 2536,
+ "language": "HTML",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": true,
+ "forks_count": 1837,
+ "mirror_url": null,
+ "archived": false,
+ "disabled": false,
+ "open_issues_count": 1576,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": "NOASSERTION",
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 1837,
+ "open_issues": 1576,
+ "watchers": 2536,
+ "default_branch": "master"
+ }
+ },
+ "base": {
+ "label": "web-platform-tests:master",
+ "ref": "master",
+ "sha": "90ebe5c33c44090271759f8e9dc43d0b5bd0f8f7",
+ "user": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 3618133,
+ "node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
+ "name": "wpt",
+ "full_name": "web-platform-tests/wpt",
+ "private": false,
+ "owner": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/web-platform-tests/wpt",
+ "description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
+ "fork": false,
+ "url": "https://api.github.com/repos/web-platform-tests/wpt",
+ "forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
+ "keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
+ "hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
+ "issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
+ "assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
+ "blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
+ "stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
+ "contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
+ "subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
+ "subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
+ "commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
+ "archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
+ "issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
+ "releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
+ "created_at": "2012-03-04T12:58:11Z",
+ "updated_at": "2019-11-25T15:40:59Z",
+ "pushed_at": "2019-11-25T15:54:18Z",
+ "git_url": "git://github.com/web-platform-tests/wpt.git",
+ "ssh_url": "git@github.com:web-platform-tests/wpt.git",
+ "clone_url": "https://github.com/web-platform-tests/wpt.git",
+ "svn_url": "https://github.com/web-platform-tests/wpt",
+ "homepage": "https://web-platform-tests.org/",
+ "size": 329278,
+ "stargazers_count": 2536,
+ "watchers_count": 2536,
+ "language": "HTML",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": true,
+ "forks_count": 1837,
+ "mirror_url": null,
+ "archived": false,
+ "disabled": false,
+ "open_issues_count": 1576,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": "NOASSERTION",
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 1837,
+ "open_issues": 1576,
+ "watchers": 2536,
+ "default_branch": "master"
+ }
+ },
+ "_links": {
+ "self": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/20378"
+ },
+ "html": {
+ "href": "https://github.com/web-platform-tests/wpt/pull/20378"
+ },
+ "issue": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/issues/20378"
+ },
+ "comments": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/issues/20378/comments"
+ },
+ "review_comments": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/20378/comments"
+ },
+ "review_comment": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/comments{/number}"
+ },
+ "commits": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/pulls/20378/commits"
+ },
+ "statuses": {
+ "href": "https://api.github.com/repos/web-platform-tests/wpt/statuses/7df84e3f87f05860a2b86d0b80dc3eb06d8e7103"
+ }
+ },
+ "author_association": "COLLABORATOR",
+ "draft": false,
+ "merged": false,
+ "mergeable": null,
+ "rebaseable": null,
+ "mergeable_state": "unknown",
+ "merged_by": null,
+ "comments": 0,
+ "review_comments": 0,
+ "maintainer_can_modify": false,
+ "commits": 1,
+ "additions": 431,
+ "deletions": 0,
+ "changed_files": 11
+ },
+ "before": "86742511fa37e1e2c1635b77431bd46958ecfb92",
+ "after": "7df84e3f87f05860a2b86d0b80dc3eb06d8e7103",
+ "repository": {
+ "id": 3618133,
+ "node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
+ "name": "wpt",
+ "full_name": "web-platform-tests/wpt",
+ "private": false,
+ "owner": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/web-platform-tests",
+ "html_url": "https://github.com/web-platform-tests",
+ "followers_url": "https://api.github.com/users/web-platform-tests/followers",
+ "following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
+ "gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
+ "organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
+ "repos_url": "https://api.github.com/users/web-platform-tests/repos",
+ "events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
+ "type": "Organization",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/web-platform-tests/wpt",
+ "description": "Test suites for Web platform specs — including WHATWG, W3C, and others",
+ "fork": false,
+ "url": "https://api.github.com/repos/web-platform-tests/wpt",
+ "forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
+ "keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
+ "hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
+ "issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
+ "assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
+ "blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
+ "stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
+ "contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
+ "subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
+ "subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
+ "commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
+ "archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
+ "issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
+ "releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
+ "created_at": "2012-03-04T12:58:11Z",
+ "updated_at": "2019-11-25T15:40:59Z",
+ "pushed_at": "2019-11-25T15:54:18Z",
+ "git_url": "git://github.com/web-platform-tests/wpt.git",
+ "ssh_url": "git@github.com:web-platform-tests/wpt.git",
+ "clone_url": "https://github.com/web-platform-tests/wpt.git",
+ "svn_url": "https://github.com/web-platform-tests/wpt",
+ "homepage": "https://web-platform-tests.org/",
+ "size": 329278,
+ "stargazers_count": 2536,
+ "watchers_count": 2536,
+ "language": "HTML",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": true,
+ "forks_count": 1837,
+ "mirror_url": null,
+ "archived": false,
+ "disabled": false,
+ "open_issues_count": 1576,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": "NOASSERTION",
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 1837,
+ "open_issues": 1576,
+ "watchers": 2536,
+ "default_branch": "master"
+ },
+ "organization": {
+ "login": "web-platform-tests",
+ "id": 37226233,
+ "node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
+ "url": "https://api.github.com/orgs/web-platform-tests",
+ "repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
+ "events_url": "https://api.github.com/orgs/web-platform-tests/events",
+ "hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks",
+ "issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
+ "members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
+ "public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
+ "avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
+ "description": ""
+ },
+ "sender": {
+ "login": "chromium-wpt-export-bot",
+ "id": 25752892,
+ "node_id": "MDQ6VXNlcjI1NzUyODky",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/25752892?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/chromium-wpt-export-bot",
+ "html_url": "https://github.com/chromium-wpt-export-bot",
+ "followers_url": "https://api.github.com/users/chromium-wpt-export-bot/followers",
+ "following_url": "https://api.github.com/users/chromium-wpt-export-bot/following{/other_user}",
+ "gists_url": "https://api.github.com/users/chromium-wpt-export-bot/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/chromium-wpt-export-bot/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/chromium-wpt-export-bot/subscriptions",
+ "organizations_url": "https://api.github.com/users/chromium-wpt-export-bot/orgs",
+ "repos_url": "https://api.github.com/users/chromium-wpt-export-bot/repos",
+ "events_url": "https://api.github.com/users/chromium-wpt-export-bot/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/chromium-wpt-export-bot/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+}
diff --git a/testing/web-platform/tests/tools/ci/tc/tests/__init__.py b/testing/web-platform/tests/tools/ci/tc/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/ci/tc/tests/test_decision.py b/testing/web-platform/tests/tools/ci/tc/tests/test_decision.py
new file mode 100644
index 0000000000..f702466421
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/tests/test_decision.py
@@ -0,0 +1,56 @@
+# mypy: allow-untyped-defs
+
+from unittest import mock
+
+import pytest
+
+from tools.ci.tc import decision
+
+
+@pytest.mark.parametrize("run_jobs,tasks,expected", [
+ ([], {"task-no-schedule-if": {}}, ["task-no-schedule-if"]),
+ ([], {"task-schedule-if-no-run-job": {"schedule-if": {}}}, []),
+ (["job"],
+ {"job-present": {"schedule-if": {"run-job": ["other-job", "job"]}}},
+ ["job-present"]),
+ (["job"], {"job-missing": {"schedule-if": {"run-job": ["other-job"]}}}, []),
+ (["all"], {"job-all": {"schedule-if": {"run-job": ["other-job"]}}}, ["job-all"]),
+ (["job"],
+ {"job-1": {"schedule-if": {"run-job": ["job"]}},
+ "job-2": {"schedule-if": {"run-job": ["other-job"]}}},
+ ["job-1"]),
+])
+def test_filter_schedule_if(run_jobs, tasks, expected):
+ with mock.patch("tools.ci.tc.decision.get_run_jobs",
+ return_value=run_jobs) as get_run_jobs:
+ assert (decision.filter_schedule_if({}, tasks) ==
+ {name: tasks[name] for name in expected})
+ get_run_jobs.call_count in (0, 1)
+
+
+@pytest.mark.parametrize("msg,expected", [
+ ("Some initial line\n\ntc-jobs:foo,bar", {"foo", "bar"}),
+ ("Some initial line\n\ntc-jobs:foo, bar", {"foo", "bar"}),
+ ("tc-jobs:foo, bar \nbaz", {"foo", "bar"}),
+ ("tc-jobs:all", {"all"}),
+ ("", set()),
+ ("tc-jobs:foo\ntc-jobs:bar", {"foo"})])
+@pytest.mark.parametrize("event", [
+ {"commits": [{"message": "<message>"}]},
+ {"pull_request": {"body": "<message>"}}
+])
+def test_extra_jobs_pr(msg, expected, event):
+ def sub(obj):
+ """Copy obj, except if it's a string with the value <message>
+ replace it with the value of the msg argument"""
+ if isinstance(obj, dict):
+ return {key: sub(value) for (key, value) in obj.items()}
+ elif isinstance(obj, list):
+ return [sub(value) for value in obj]
+ elif obj == "<message>":
+ return msg
+ return obj
+
+ event = sub(event)
+
+ assert decision.get_extra_jobs(event) == expected
diff --git a/testing/web-platform/tests/tools/ci/tc/tests/test_taskgraph.py b/testing/web-platform/tests/tools/ci/tc/tests/test_taskgraph.py
new file mode 100644
index 0000000000..57528a6659
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/tests/test_taskgraph.py
@@ -0,0 +1,148 @@
+# mypy: allow-untyped-defs
+
+import pytest
+import yaml
+
+from tools.ci.tc import taskgraph
+
+@pytest.mark.parametrize("data, update_data, expected", [
+ ({"a": 1}, {"b": 2}, {"a": 1, "b": 2}),
+ ({"a": 1}, {"a": 2}, {"a": 2}),
+ ({"a": [1]}, {"a": [2]}, {"a": [1, 2]}),
+ ({"a": {"b": 1, "c": 2}}, {"a": {"b": 2, "d": 3}}, {"a": {"b": 2, "c": 2, "d": 3}}),
+ ({"a": {"b": [1]}}, {"a": {"b": [2]}}, {"a": {"b": [1, 2]}}),
+]
+)
+def test_update_recursive(data, update_data, expected):
+ taskgraph.update_recursive(data, update_data)
+ assert data == expected
+
+
+def test_use():
+ data = """
+components:
+ component1:
+ a: 1
+ b: [1]
+ c: "c"
+ component2:
+ a: 2
+ b: [2]
+ d: "d"
+tasks:
+ - task1:
+ use:
+ - component1
+ - component2
+ b: [3]
+ c: "e"
+"""
+ tasks_data = yaml.safe_load(data)
+ assert taskgraph.load_tasks(tasks_data) == {
+ "task1": {
+ "a": 2,
+ "b": [1,2,3],
+ "c": "e",
+ "d": "d",
+ "name": "task1"
+ }
+ }
+
+
+def test_var():
+ data = """
+components:
+ component1:
+ a: ${vars.value}
+tasks:
+ - task1:
+ use:
+ - component1
+ vars:
+ value: 1
+"""
+ tasks_data = yaml.safe_load(data)
+ assert taskgraph.load_tasks(tasks_data) == {
+ "task1": {
+ "a": "1",
+ "vars": {"value": 1},
+ "name": "task1"
+ }
+ }
+
+
+def test_map():
+ data = """
+components: {}
+tasks:
+ - $map:
+ for:
+ - vars:
+ a: 1
+ b: [1]
+ - vars:
+ a: 2
+ b: [2]
+ do:
+ - task1-${vars.a}:
+ a: ${vars.a}
+ b: [3]
+ - task2-${vars.a}:
+ a: ${vars.a}
+ b: [4]
+"""
+ tasks_data = yaml.safe_load(data)
+ assert taskgraph.load_tasks(tasks_data) == {
+ "task1-1": {
+ "a": "1",
+ "b": [1, 3],
+ "vars": {"a": 1},
+ "name": "task1-1"
+ },
+ "task1-2": {
+ "a": "2",
+ "b": [2, 3],
+ "vars": {"a": 2},
+ "name": "task1-2"
+ },
+ "task2-1": {
+ "a": "1",
+ "b": [1, 4],
+ "vars": {"a": 1},
+ "name": "task2-1"
+ },
+ "task2-2": {
+ "a": "2",
+ "b": [2, 4],
+ "vars": {"a": 2},
+ "name": "task2-2"
+ },
+
+ }
+
+
+def test_chunks():
+ data = """
+components: {}
+tasks:
+ - task1:
+ name: task1-${chunks.id}
+ chunks: 2
+"""
+ tasks_data = yaml.safe_load(data)
+ assert taskgraph.load_tasks(tasks_data) == {
+ "task1-1": {
+ "name": "task1-1",
+ "chunks": {
+ "id": 1,
+ "total": 2
+ }
+ },
+ "task1-2": {
+ "name": "task1-2",
+ "chunks": {
+ "id": 2,
+ "total": 2
+ }
+ }
+ }
diff --git a/testing/web-platform/tests/tools/ci/tc/tests/test_valid.py b/testing/web-platform/tests/tools/ci/tc/tests/test_valid.py
new file mode 100644
index 0000000000..31c07943ca
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tc/tests/test_valid.py
@@ -0,0 +1,292 @@
+# mypy: ignore-errors
+
+import json
+import os
+from unittest import mock
+
+import httpx
+import jsone
+import pytest
+import yaml
+from jsonschema import validate
+
+from tools.ci.tc import decision
+
+here = os.path.dirname(__file__)
+root = os.path.abspath(os.path.join(here, "..", "..", "..", ".."))
+
+
+def data_path(filename):
+ return os.path.join(here, "..", "testdata", filename)
+
+
+def test_verify_taskcluster_yml():
+ """Verify that the json-e in the .taskcluster.yml is valid"""
+ with open(os.path.join(root, ".taskcluster.yml"), encoding="utf8") as f:
+ template = yaml.safe_load(f)
+
+ events = [("pr_event.json", "github-pull-request", "Pull Request"),
+ ("master_push_event.json", "github-push", "Push to master")]
+
+ for filename, tasks_for, title in events:
+ with open(data_path(filename), encoding="utf8") as f:
+ event = json.load(f)
+
+ context = {"tasks_for": tasks_for,
+ "event": event,
+ "as_slugid": lambda x: x}
+
+ jsone.render(template, context)
+
+
+@pytest.mark.parametrize("event_path,expected",
+ [("pr_event.json",
+ frozenset(["lint", "wpt-chrome-dev-stability"])),
+ ("pr_event_tests_affected.json", frozenset(["lint"]))]
+ )
+def test_exclude_users(event_path, expected):
+ """Verify that tasks excluded by the PR submitter are properly excluded"""
+ tasks = {
+ "lint": {
+ "commands": "wpt example"
+ },
+ "wpt-chrome-dev-stability": {
+ "commands": "wpt example",
+ "exclude-users": ["chromium-wpt-export-bot"]
+ }
+ }
+ with open(data_path(event_path), encoding="utf8") as f:
+ event = json.load(f)
+ decision.filter_excluded_users(tasks, event)
+ assert set(tasks) == expected
+
+
+def test_verify_payload():
+ """Verify that the decision task produces tasks with a valid payload"""
+ from tools.ci.tc.decision import decide
+
+ r = httpx.get("https://community-tc.services.mozilla.com/schemas/queue/v1/create-task-request.json")
+ r.raise_for_status()
+ create_task_schema = r.json()
+
+ r = httpx.get("https://community-tc.services.mozilla.com/references/schemas/docker-worker/v1/payload.json")
+ r.raise_for_status()
+ payload_schema = r.json()
+
+ jobs = ["lint",
+ "manifest_upload",
+ "resources_unittest",
+ "tools_unittest",
+ "wpt_integration",
+ "wptrunner_infrastructure",
+ "wptrunner_unittest"]
+
+ for filename in ["pr_event.json", "master_push_event.json"]:
+ with open(data_path(filename), encoding="utf8") as f:
+ event = json.load(f)
+
+ with mock.patch("tools.ci.tc.decision.get_fetch_rev", return_value=(None, event["after"], None)):
+ with mock.patch("tools.ci.tc.decision.get_run_jobs", return_value=set(jobs)):
+ task_id_map = decide(event)
+ for name, (task_id, task_data) in task_id_map.items():
+ try:
+ validate(instance=task_data, schema=create_task_schema)
+ validate(instance=task_data["payload"], schema=payload_schema)
+ except Exception as e:
+ print(f"Validation failed for task '{name}':\n{json.dumps(task_data, indent=2)}")
+ raise e
+
+
+@pytest.mark.parametrize("event_path,is_pr,files_changed,expected", [
+ ("master_push_event.json", False, None,
+ ['download-firefox-nightly',
+ 'wpt-firefox-nightly-testharness-1',
+ 'wpt-firefox-nightly-testharness-2',
+ 'wpt-firefox-nightly-testharness-3',
+ 'wpt-firefox-nightly-testharness-4',
+ 'wpt-firefox-nightly-testharness-5',
+ 'wpt-firefox-nightly-testharness-6',
+ 'wpt-firefox-nightly-testharness-7',
+ 'wpt-firefox-nightly-testharness-8',
+ 'wpt-firefox-nightly-testharness-9',
+ 'wpt-firefox-nightly-testharness-10',
+ 'wpt-firefox-nightly-testharness-11',
+ 'wpt-firefox-nightly-testharness-12',
+ 'wpt-firefox-nightly-testharness-13',
+ 'wpt-firefox-nightly-testharness-14',
+ 'wpt-firefox-nightly-testharness-15',
+ 'wpt-firefox-nightly-testharness-16',
+ 'wpt-chrome-dev-testharness-1',
+ 'wpt-chrome-dev-testharness-2',
+ 'wpt-chrome-dev-testharness-3',
+ 'wpt-chrome-dev-testharness-4',
+ 'wpt-chrome-dev-testharness-5',
+ 'wpt-chrome-dev-testharness-6',
+ 'wpt-chrome-dev-testharness-7',
+ 'wpt-chrome-dev-testharness-8',
+ 'wpt-chrome-dev-testharness-9',
+ 'wpt-chrome-dev-testharness-10',
+ 'wpt-chrome-dev-testharness-11',
+ 'wpt-chrome-dev-testharness-12',
+ 'wpt-chrome-dev-testharness-13',
+ 'wpt-chrome-dev-testharness-14',
+ 'wpt-chrome-dev-testharness-15',
+ 'wpt-chrome-dev-testharness-16',
+ 'wpt-firefox-nightly-reftest-1',
+ 'wpt-firefox-nightly-reftest-2',
+ 'wpt-firefox-nightly-reftest-3',
+ 'wpt-firefox-nightly-reftest-4',
+ 'wpt-firefox-nightly-reftest-5',
+ 'wpt-chrome-dev-reftest-1',
+ 'wpt-chrome-dev-reftest-2',
+ 'wpt-chrome-dev-reftest-3',
+ 'wpt-chrome-dev-reftest-4',
+ 'wpt-chrome-dev-reftest-5',
+ 'wpt-firefox-nightly-wdspec-1',
+ 'wpt-firefox-nightly-wdspec-2',
+ 'wpt-chrome-dev-wdspec-1',
+ 'wpt-chrome-dev-wdspec-2',
+ 'wpt-firefox-nightly-crashtest-1',
+ 'wpt-chrome-dev-crashtest-1',
+ 'wpt-firefox-nightly-print-reftest-1',
+ 'wpt-chrome-dev-print-reftest-1',
+ 'lint']),
+ ("pr_event.json", True, {".taskcluster.yml", ".travis.yml", "tools/ci/start.sh"},
+ ['lint',
+ 'tools/ unittests (Python 3.6)',
+ 'tools/ unittests (Python 3.10)',
+ 'tools/ integration tests (Python 3.6)',
+ 'tools/ integration tests (Python 3.10)',
+ 'resources/ tests (Python 3.6)',
+ 'resources/ tests (Python 3.10)',
+ 'download-firefox-nightly',
+ 'infrastructure/ tests',
+ 'sink-task']),
+ # More tests are affected in the actual PR but it shouldn't affect the scheduled tasks
+ ("pr_event_tests_affected.json", True, {"layout-instability/clip-negative-bottom-margin.html",
+ "layout-instability/composited-element-movement.html"},
+ ['download-firefox-nightly',
+ 'wpt-firefox-nightly-stability',
+ 'wpt-firefox-nightly-results',
+ 'wpt-firefox-nightly-results-without-changes',
+ 'wpt-chrome-dev-results',
+ 'wpt-chrome-dev-results-without-changes',
+ 'lint',
+ 'sink-task']),
+ ("pr_event_tests_affected.json", True, {"resources/testharness.js"},
+ ['lint',
+ 'resources/ tests (Python 3.6)',
+ 'resources/ tests (Python 3.10)',
+ 'download-firefox-nightly',
+ 'infrastructure/ tests',
+ 'sink-task']),
+ ("epochs_daily_push_event.json", False, None,
+ ['download-firefox-stable',
+ 'wpt-firefox-stable-testharness-1',
+ 'wpt-firefox-stable-testharness-2',
+ 'wpt-firefox-stable-testharness-3',
+ 'wpt-firefox-stable-testharness-4',
+ 'wpt-firefox-stable-testharness-5',
+ 'wpt-firefox-stable-testharness-6',
+ 'wpt-firefox-stable-testharness-7',
+ 'wpt-firefox-stable-testharness-8',
+ 'wpt-firefox-stable-testharness-9',
+ 'wpt-firefox-stable-testharness-10',
+ 'wpt-firefox-stable-testharness-11',
+ 'wpt-firefox-stable-testharness-12',
+ 'wpt-firefox-stable-testharness-13',
+ 'wpt-firefox-stable-testharness-14',
+ 'wpt-firefox-stable-testharness-15',
+ 'wpt-firefox-stable-testharness-16',
+ 'wpt-chromium-nightly-testharness-1',
+ 'wpt-chromium-nightly-testharness-2',
+ 'wpt-chromium-nightly-testharness-3',
+ 'wpt-chromium-nightly-testharness-4',
+ 'wpt-chromium-nightly-testharness-5',
+ 'wpt-chromium-nightly-testharness-6',
+ 'wpt-chromium-nightly-testharness-7',
+ 'wpt-chromium-nightly-testharness-8',
+ 'wpt-chromium-nightly-testharness-9',
+ 'wpt-chromium-nightly-testharness-10',
+ 'wpt-chromium-nightly-testharness-11',
+ 'wpt-chromium-nightly-testharness-12',
+ 'wpt-chromium-nightly-testharness-13',
+ 'wpt-chromium-nightly-testharness-14',
+ 'wpt-chromium-nightly-testharness-15',
+ 'wpt-chromium-nightly-testharness-16',
+ 'wpt-chrome-stable-testharness-1',
+ 'wpt-chrome-stable-testharness-2',
+ 'wpt-chrome-stable-testharness-3',
+ 'wpt-chrome-stable-testharness-4',
+ 'wpt-chrome-stable-testharness-5',
+ 'wpt-chrome-stable-testharness-6',
+ 'wpt-chrome-stable-testharness-7',
+ 'wpt-chrome-stable-testharness-8',
+ 'wpt-chrome-stable-testharness-9',
+ 'wpt-chrome-stable-testharness-10',
+ 'wpt-chrome-stable-testharness-11',
+ 'wpt-chrome-stable-testharness-12',
+ 'wpt-chrome-stable-testharness-13',
+ 'wpt-chrome-stable-testharness-14',
+ 'wpt-chrome-stable-testharness-15',
+ 'wpt-chrome-stable-testharness-16',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-1',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-2',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-3',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-4',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-5',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-6',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-7',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-8',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-9',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-10',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-11',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-12',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-13',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-14',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-15',
+ 'wpt-webkitgtk_minibrowser-nightly-testharness-16',
+ 'wpt-firefox-stable-reftest-1',
+ 'wpt-firefox-stable-reftest-2',
+ 'wpt-firefox-stable-reftest-3',
+ 'wpt-firefox-stable-reftest-4',
+ 'wpt-firefox-stable-reftest-5',
+ 'wpt-chromium-nightly-reftest-1',
+ 'wpt-chromium-nightly-reftest-2',
+ 'wpt-chromium-nightly-reftest-3',
+ 'wpt-chromium-nightly-reftest-4',
+ 'wpt-chromium-nightly-reftest-5',
+ 'wpt-chrome-stable-reftest-1',
+ 'wpt-chrome-stable-reftest-2',
+ 'wpt-chrome-stable-reftest-3',
+ 'wpt-chrome-stable-reftest-4',
+ 'wpt-chrome-stable-reftest-5',
+ 'wpt-webkitgtk_minibrowser-nightly-reftest-1',
+ 'wpt-webkitgtk_minibrowser-nightly-reftest-2',
+ 'wpt-webkitgtk_minibrowser-nightly-reftest-3',
+ 'wpt-webkitgtk_minibrowser-nightly-reftest-4',
+ 'wpt-webkitgtk_minibrowser-nightly-reftest-5',
+ 'wpt-firefox-stable-wdspec-1',
+ 'wpt-firefox-stable-wdspec-2',
+ 'wpt-chromium-nightly-wdspec-1',
+ 'wpt-chromium-nightly-wdspec-2',
+ 'wpt-chrome-stable-wdspec-1',
+ 'wpt-chrome-stable-wdspec-2',
+ 'wpt-webkitgtk_minibrowser-nightly-wdspec-1',
+ 'wpt-webkitgtk_minibrowser-nightly-wdspec-2',
+ 'wpt-firefox-stable-crashtest-1',
+ 'wpt-chromium-nightly-crashtest-1',
+ 'wpt-chrome-stable-crashtest-1',
+ 'wpt-webkitgtk_minibrowser-nightly-crashtest-1',
+ 'wpt-firefox-stable-print-reftest-1',
+ 'wpt-chromium-nightly-print-reftest-1',
+ 'wpt-chrome-stable-print-reftest-1'])
+])
+def test_schedule_tasks(event_path, is_pr, files_changed, expected):
+ with mock.patch("tools.ci.tc.decision.get_fetch_rev", return_value=(None, None, None)):
+ with mock.patch("tools.wpt.testfiles.repo_files_changed",
+ return_value=files_changed):
+ with open(data_path(event_path), encoding="utf8") as event_file:
+ event = json.load(event_file)
+ scheduled = decision.decide(event)
+ assert list(scheduled.keys()) == expected
diff --git a/testing/web-platform/tests/tools/ci/tests/__init__.py b/testing/web-platform/tests/tools/ci/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/ci/tests/test_jobs.py b/testing/web-platform/tests/tools/ci/tests/test_jobs.py
new file mode 100644
index 0000000000..421af78fef
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/tests/test_jobs.py
@@ -0,0 +1,132 @@
+# mypy: allow-untyped-defs
+
+from tools.ci import jobs
+
+all_jobs = {
+ "lint",
+ "manifest_upload",
+ "resources_unittest",
+ "affected_tests",
+ "stability",
+ "tools_unittest",
+ "update_built",
+ "wpt_integration",
+ "wptrunner_infrastructure",
+ "wptrunner_unittest",
+}
+
+default_jobs = {"lint", "manifest_upload"}
+
+
+def test_all():
+ assert jobs.get_jobs(["README.md"], all=True) == all_jobs
+
+
+def test_default():
+ assert jobs.get_jobs(["README.md"]) == default_jobs
+
+
+def test_testharness():
+ assert jobs.get_jobs(["resources/testharness.js"]) == default_jobs | {"resources_unittest",
+ "wptrunner_infrastructure"}
+ assert jobs.get_jobs(["resources/testharness.js"],
+ includes=["resources_unittest"]) == {"resources_unittest"}
+ assert jobs.get_jobs(["tools/wptserve/wptserve/config.py"],
+ includes=["resources_unittest"]) == {"resources_unittest"}
+ assert jobs.get_jobs(["foo/resources/testharness.js"],
+ includes=["resources_unittest"]) == set()
+
+
+def test_stability():
+ assert jobs.get_jobs(["dom/historical.html"],
+ includes=["stability"]) == {"stability"}
+ assert jobs.get_jobs(["tools/pytest.ini"],
+ includes=["stability"]) == set()
+ assert jobs.get_jobs(["serve"],
+ includes=["stability"]) == set()
+ assert jobs.get_jobs(["resources/testharness.js"],
+ includes=["stability"]) == set()
+ assert jobs.get_jobs(["docs/.gitignore"],
+ includes=["stability"]) == set()
+ assert jobs.get_jobs(["dom/tools/example.py"],
+ includes=["stability"]) == set()
+ assert jobs.get_jobs(["conformance-checkers/test.html"],
+ includes=["stability"]) == set()
+ assert jobs.get_jobs(["dom/README.md"],
+ includes=["stability"]) == set()
+ assert jobs.get_jobs(["css/build-css-testsuite.sh"],
+ includes=["stability"]) == set()
+ assert jobs.get_jobs(["css/CSS21/test-001.html"],
+ includes=["stability"]) == {"stability"}
+ assert jobs.get_jobs(["css/build-css-testsuite.sh",
+ "css/CSS21/test-001.html"],
+ includes=["stability"]) == {"stability"}
+
+def test_affected_tests():
+ assert jobs.get_jobs(["dom/historical.html"],
+ includes=["affected_tests"]) == {"affected_tests"}
+ assert jobs.get_jobs(["tools/pytest.ini"],
+ includes=["affected_tests"]) == set()
+ assert jobs.get_jobs(["serve"],
+ includes=["affected_tests"]) == set()
+ assert jobs.get_jobs(["resources/testharness.js"],
+ includes=["affected_tests"]) == set()
+ assert jobs.get_jobs(["docs/.gitignore"],
+ includes=["affected_tests"]) == set()
+ assert jobs.get_jobs(["dom/tools/example.py"],
+ includes=["affected_tests"]) == set()
+ assert jobs.get_jobs(["conformance-checkers/test.html"],
+ includes=["affected_tests"]) == set()
+ assert jobs.get_jobs(["dom/README.md"],
+ includes=["affected_tests"]) == set()
+ assert jobs.get_jobs(["css/build-css-testsuite.sh"],
+ includes=["affected_tests"]) == set()
+ assert jobs.get_jobs(["css/CSS21/test-001.html"],
+ includes=["affected_tests"]) == {"affected_tests"}
+ assert jobs.get_jobs(["css/build-css-testsuite.sh",
+ "css/CSS21/test-001.html"],
+ includes=["affected_tests"]) == {"affected_tests"}
+ assert jobs.get_jobs(["resources/idlharness.js"],
+ includes=["affected_tests"]) == {"affected_tests"}
+
+def test_tools_unittest():
+ assert jobs.get_jobs(["tools/ci/test/test_jobs.py"],
+ includes=["tools_unittest"]) == {"tools_unittest"}
+ assert jobs.get_jobs(["dom/tools/example.py"],
+ includes=["tools_unittest"]) == set()
+ assert jobs.get_jobs(["dom/historical.html"],
+ includes=["tools_unittest"]) == set()
+
+
+def test_wptrunner_unittest():
+ assert jobs.get_jobs(["tools/wptrunner/wptrunner/wptrunner.py"],
+ includes=["wptrunner_unittest"]) == {"wptrunner_unittest"}
+ assert jobs.get_jobs(["tools/example.py"],
+ includes=["wptrunner_unittest"]) == {"wptrunner_unittest"}
+
+
+def test_update_built():
+ assert jobs.get_jobs(["html/canvas/element/foo.html"],
+ includes=["update_built"]) == {"update_built"}
+ assert jobs.get_jobs(["html/foo.html"],
+ includes=["update_built"]) == {"update_built"}
+ assert jobs.get_jobs(["html/canvas/offscreen/foo.html"],
+ includes=["update_built"]) == {"update_built"}
+
+
+def test_wpt_integration():
+ assert jobs.get_jobs(["tools/wpt/wpt.py"],
+ includes=["wpt_integration"]) == {"wpt_integration"}
+ assert jobs.get_jobs(["tools/wptrunner/wptrunner/wptrunner.py"],
+ includes=["wpt_integration"]) == {"wpt_integration"}
+
+
+def test_wpt_infrastructure():
+ assert jobs.get_jobs(["tools/hammer.html"],
+ includes=["wptrunner_infrastructure"]) == {"wptrunner_infrastructure"}
+ assert jobs.get_jobs(["infrastructure/assumptions/ahem.html"],
+ includes=["wptrunner_infrastructure"]) == {"wptrunner_infrastructure"}
+
+def test_wdspec_support():
+ assert jobs.get_jobs(["webdriver/tests/support/__init__.py"],
+ includes=["wptrunner_infrastructure"]) == {"wptrunner_infrastructure"}
diff --git a/testing/web-platform/tests/tools/ci/update_built.py b/testing/web-platform/tests/tools/ci/update_built.py
new file mode 100644
index 0000000000..d3850d3ef8
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/update_built.py
@@ -0,0 +1,72 @@
+# mypy: allow-untyped-defs
+
+import logging
+import os
+import subprocess
+from argparse import ArgumentParser
+
+logger = logging.getLogger()
+
+wpt_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+
+scripts = {
+ "canvas": ["html/canvas/tools/gentest.py"],
+ "conformance-checkers": ["conformance-checkers/tools/dl.py",
+ "conformance-checkers/tools/ins-del-datetime.py",
+ "conformance-checkers/tools/picture.py",
+ "conformance-checkers/tools/url.py"],
+ "css-ui": ["css/css-ui/tools/appearance-build-webkit-reftests.py"],
+ # FIXME: https://github.com/web-platform-tests/wpt/issues/32060
+ # "css-text": ["css/css-text/line-breaking/tools/generate-segment-break-transformation-rules-tests.py"],
+ # "css-text-decor": ["css/css-text-decor/tools/generate-text-emphasis-line-height-tests.py",
+ # "css/css-text-decor/tools/generate-text-emphasis-position-property-tests.py",
+ # "css/css-text-decor/tools/generate-text-emphasis-ruby-tests.py",
+ # "css/css-text-decor/tools/generate-text-emphasis-style-property-tests.py"],
+ "fetch": ["fetch/metadata/tools/generate.py"],
+ "html5lib": ["html/tools/update_html5lib_tests.py"],
+ "infrastructure": ["infrastructure/assumptions/tools/ahem-generate-table.py"],
+ "mimesniff": ["mimesniff/mime-types/resources/generated-mime-types.py"],
+ "speculative-parsing": ["html/syntax/speculative-parsing/tools/generate.py"]
+}
+
+
+def get_parser():
+ parser = ArgumentParser()
+ parser.add_argument("--list", action="store_true",
+ help="List suites that can be updated and the related script files")
+ parser.add_argument("--include", nargs="*", choices=scripts.keys(), default=None,
+ help="Suites to update (default is to update everything)")
+ return parser
+
+
+def list_suites(include):
+ for name, script_paths in scripts.items():
+ if name in include:
+ print(name)
+ for script_path in script_paths:
+ print(f" {script_path}")
+
+
+def run(venv, **kwargs):
+ include = kwargs["include"]
+ if include is None:
+ include = list(scripts.keys())
+
+ if kwargs["list"]:
+ list_suites(include)
+ return 0
+
+ failed = False
+
+ for target in include:
+ for script in scripts[target]:
+ script_path = script.replace("/", os.path.sep)
+ cmd = [os.path.join(venv.bin_path, "python3"), os.path.join(wpt_root, script_path)]
+ logger.info(f"Running {' '.join(cmd)}")
+ try:
+ subprocess.check_call(cmd, cwd=os.path.dirname(script_path))
+ except subprocess.CalledProcessError:
+ logger.error(f"Update script {script} failed")
+ failed = True
+
+ return 1 if failed else 0
diff --git a/testing/web-platform/tests/tools/ci/website_build.sh b/testing/web-platform/tests/tools/ci/website_build.sh
new file mode 100755
index 0000000000..adfdf41ae7
--- /dev/null
+++ b/testing/web-platform/tests/tools/ci/website_build.sh
@@ -0,0 +1,86 @@
+#!/bin/bash
+
+set -ex
+
+neutral_status=0
+source_revision=$(git rev-parse HEAD)
+# The token available in the `GITHUB_TOKEN` variable may be used to push to the
+# repository, but GitHub Pages will not rebuild the website in response to such
+# events. Use an access token generated for the project's machine user,
+# wpt-pr-bot.
+#
+# https://help.github.com/en/articles/generic-jekyll-build-failures
+remote_url=https://${DEPLOY_TOKEN}@github.com/${GITHUB_REPOSITORY}.git
+wpt_root=$PWD
+
+function json_property {
+ cat ${1} | \
+ python -c "import json, sys; print(json.load(sys.stdin).get(\"${2}\", \"\"))"
+}
+
+function is_pull_request {
+ test -n "$(json_property ${GITHUB_EVENT_PATH} pull_request)"
+}
+
+function targets_master {
+ test $(json_property ${GITHUB_EVENT_PATH} ref) == 'refs/heads/master'
+}
+
+git config --global user.email "wpt-pr-bot@users.noreply.github.com"
+git config --global user.name "wpt-pr-bot"
+
+# Prepare the output directory so that the new build can be pushed to the
+# repository as an incremental change to the prior build.
+mkdir -p docs/_build
+cd docs/_build
+git init
+git fetch --depth 1 ${remote_url} gh-pages
+git checkout FETCH_HEAD
+git rm -rf .
+
+# Build the website
+unset NODE_ENV
+cd ${wpt_root}/docs
+npm install .
+export PATH="$PWD/node_modules/.bin:$PATH"
+cd ${wpt_root}
+
+./wpt build-docs
+
+cd docs/_build
+# Configure DNS
+echo web-platform-tests.org > CNAME
+# Disable Jekyll
+# https://github.blog/2009-12-29-bypassing-jekyll-on-github-pages/
+touch .nojekyll
+
+# Publish the website by pushing the built contents to the `gh-pages` branch
+git add .
+
+echo This submission alters the compiled files as follows
+
+git diff --staged
+
+if is_pull_request ; then
+ echo Submission comes from a pull request. Exiting without publishing.
+
+ exit ${neutral_status}
+fi
+
+if ! targets_master ; then
+ echo Submission does not target the 'master' branch. Exiting without publishing.
+
+ exit ${neutral_status}
+fi
+
+if git diff --exit-code --quiet --staged ; then
+ echo No change to the website contents. Exiting without publishing.
+
+ exit ${neutral_status}
+fi
+
+git commit --message "Build documentation
+
+These files were generated from commit ${source_revision}"
+
+git push --force ${remote_url} HEAD:gh-pages
diff --git a/testing/web-platform/tests/tools/conftest.py b/testing/web-platform/tests/tools/conftest.py
new file mode 100644
index 0000000000..021a49fc29
--- /dev/null
+++ b/testing/web-platform/tests/tools/conftest.py
@@ -0,0 +1,15 @@
+import platform
+import os
+
+from hypothesis import settings, HealthCheck
+
+impl = platform.python_implementation()
+
+settings.register_profile("ci", settings(max_examples=1000,
+ deadline=None,
+ suppress_health_check=[HealthCheck.too_slow]))
+settings.register_profile("pypy", settings(deadline=None,
+ suppress_health_check=[HealthCheck.too_slow]))
+
+settings.load_profile(os.getenv("HYPOTHESIS_PROFILE",
+ "default" if impl != "PyPy" else "pypy"))
diff --git a/testing/web-platform/tests/tools/docker/.bashrc b/testing/web-platform/tests/tools/docker/.bashrc
new file mode 100644
index 0000000000..bbe03c489a
--- /dev/null
+++ b/testing/web-platform/tests/tools/docker/.bashrc
@@ -0,0 +1,4 @@
+function xvfb_start() {
+ GEOMETRY="$SCREEN_WIDTH""x""$SCREEN_HEIGHT""x""$SCREEN_DEPTH"
+ xvfb-run --server-args="-screen 0 $GEOMETRY -ac +extension RANDR" $@
+}
diff --git a/testing/web-platform/tests/tools/docker/Dockerfile b/testing/web-platform/tests/tools/docker/Dockerfile
new file mode 100644
index 0000000000..b7eae6440d
--- /dev/null
+++ b/testing/web-platform/tests/tools/docker/Dockerfile
@@ -0,0 +1,110 @@
+FROM ubuntu:20.04
+
+# No interactive frontend during docker build
+ENV DEBIAN_FRONTEND=noninteractive \
+ DEBCONF_NONINTERACTIVE_SEEN=true
+
+# General requirements not in the base image
+RUN apt-get -qqy update \
+ && apt-get -qqy install \
+ bridge-utils \
+ bzip2 \
+ ca-certificates \
+ curl \
+ dbus-x11 \
+ earlyoom \
+ fluxbox \
+ gdebi \
+ git \
+ glib-networking-services \
+ gstreamer1.0-plugins-bad \
+ gstreamer1.0-gl \
+ libosmesa6-dev \
+ libproxy1-plugin-webkit \
+ libvirt-daemon-system \
+ libvirt-clients \
+ libunwind8 \
+ libxcb-shape0-dev \
+ locales \
+ openjdk-8-jre-headless \
+ pulseaudio \
+ python3 \
+ python3-dev \
+ python3-pip \
+ software-properties-common \
+ qemu-kvm \
+ tzdata \
+ sudo \
+ unzip \
+ wget \
+ xvfb
+
+# python3.6 is not available by default in new versions of Ubuntu.
+RUN apt-add-repository -y ppa:deadsnakes/ppa
+
+# Ensure a `python` binary exists
+RUN apt-get -qqy update \
+ && apt-get install -qqy python-is-python3
+
+# Installing just the deps of firefox and chrome is moderately tricky, so
+# just install the default versions of them, and some extra deps we happen
+# to know that chrome requires
+
+RUN apt-get -qqy install \
+ firefox \
+ libnss3-tools \
+ fonts-liberation \
+ indicator-application \
+ libappindicator1 \
+ libappindicator3-1 \
+ libdbusmenu-gtk3-4 \
+ libindicator3-7 \
+ libindicator7
+
+RUN apt-get -y autoremove
+
+RUN pip install --upgrade pip
+RUN pip install virtualenv
+
+ENV TZ "UTC"
+RUN echo "${TZ}" > /etc/timezone \
+ && dpkg-reconfigure --frontend noninteractive tzdata
+
+# Set the locale
+RUN locale-gen en_US.UTF-8
+ENV LANG en_US.UTF-8
+ENV LANGUAGE en_US:en
+ENV LC_ALL en_US.UTF-8
+
+RUN useradd test \
+ --shell /bin/bash \
+ --create-home \
+ && usermod -a -G sudo test \
+ && usermod -a -G libvirt test \
+ && usermod -a -G libvirt-qemu test \
+ && echo 'ALL ALL = (ALL) NOPASSWD: ALL' >> /etc/sudoers \
+ && echo 'test:secret' | chpasswd
+
+ENV SCREEN_WIDTH 1280
+ENV SCREEN_HEIGHT 1024
+ENV SCREEN_DEPTH 24
+ENV DISPLAY :99.0
+
+USER test
+
+WORKDIR /home/test
+
+# Remove information on how to use sudo on login
+RUN sudo echo ""
+
+RUN mkdir -p /home/test/artifacts
+RUN mkdir -p /home/test/bin
+
+ENV PATH="/home/test/bin:/home/test/.local/bin:${PATH}"
+
+WORKDIR /home/test/
+
+COPY .bashrc /home/test/.bashrc
+
+COPY start.sh /home/test/start.sh
+COPY retry.py /home/test/bin/retry
diff --git a/testing/web-platform/tests/tools/docker/README.md b/testing/web-platform/tests/tools/docker/README.md
new file mode 100644
index 0000000000..bc98d19861
--- /dev/null
+++ b/testing/web-platform/tests/tools/docker/README.md
@@ -0,0 +1,16 @@
+This docker images is used for testing Chrome, Firefox, WebKitGTK and running
+other tasks on Taskcluster. When any of the files in this directory change, the
+images must be updated as well. Doing this requires you be part of the
+'webplatformtests' organization on Docker Hub; ping @foolip or @jpchase
+if you are not a member.
+
+The tag for a new docker image is of the form
+`webplatformtests/wpt:{current-version + 0.01}`
+
+To update the docker image:
+
+* Update the following Taskcluster configurations to use the new image:
+ - `.taskcluster.yml` (the decision task)
+ - `tools/ci/tc/tasks/test.yml` (all the other tasks)
+
+* Run `wpt docker-push`
diff --git a/testing/web-platform/tests/tools/docker/__init__.py b/testing/web-platform/tests/tools/docker/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/docker/__init__.py
diff --git a/testing/web-platform/tests/tools/docker/commands.json b/testing/web-platform/tests/tools/docker/commands.json
new file mode 100644
index 0000000000..eb6d7c9d82
--- /dev/null
+++ b/testing/web-platform/tests/tools/docker/commands.json
@@ -0,0 +1,25 @@
+{
+ "docker-run": {
+ "path": "frontend.py",
+ "script": "run",
+ "parser": "parser_run",
+ "help": "Run wpt docker image",
+ "virtualenv": false
+ },
+ "docker-build": {
+ "path": "frontend.py",
+ "script": "build",
+ "help": "Build wpt docker image",
+ "virtualenv": false
+ },
+ "docker-push": {
+ "path": "frontend.py",
+ "script": "push",
+ "parser": "parser_push",
+ "help": "Build and push wpt docker image",
+ "virtualenv": true,
+ "requirements": [
+ "requirements.txt"
+ ]
+ }
+}
diff --git a/testing/web-platform/tests/tools/docker/frontend.py b/testing/web-platform/tests/tools/docker/frontend.py
new file mode 100644
index 0000000000..6ee2d20633
--- /dev/null
+++ b/testing/web-platform/tests/tools/docker/frontend.py
@@ -0,0 +1,141 @@
+# mypy: allow-untyped-defs
+
+import argparse
+import logging
+import os
+import re
+import subprocess
+import sys
+
+here = os.path.abspath(os.path.dirname(__file__))
+wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
+
+logger = logging.getLogger()
+
+
+def build(tag="wpt:local", *args, **kwargs):
+ subprocess.check_call(["docker",
+ "build",
+ "--pull",
+ "--tag", tag,
+ here])
+
+
+def parser_push():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--tag", action="store",
+ help="Tag to use (default is taken from .taskcluster.yml)")
+ parser.add_argument("--force", action="store_true",
+ help="Ignore warnings and push anyway")
+ return parser
+
+
+def walk_yaml(root, target):
+ rv = []
+ if isinstance(root, list):
+ for value in root:
+ if isinstance(value, (dict, list)):
+ rv.extend(walk_yaml(value, target))
+ elif isinstance(root, dict):
+ for key, value in root.items():
+ if isinstance(value, (dict, list)):
+ rv.extend(walk_yaml(value, target))
+ elif key == target:
+ rv.append(value)
+ return rv
+
+
+def read_image_name():
+ import yaml
+ with open(os.path.join(wpt_root, ".taskcluster.yml")) as f:
+ taskcluster_data = yaml.safe_load(f)
+ taskcluster_values = set(walk_yaml(taskcluster_data, "image"))
+ with open(os.path.join(wpt_root, "tools", "ci", "tc", "tasks", "test.yml")) as f:
+ test_data = yaml.safe_load(f)
+ tests_value = test_data["components"]["wpt-base"]["image"]
+ return taskcluster_values, tests_value
+
+
+def lookup_tag(tag):
+ import requests
+ org, repo_version = tag.split("/", 1)
+ repo, version = repo_version.rsplit(":", 1)
+ resp = requests.get("https://hub.docker.com/v2/repositories/%s/%s/tags/%s" %
+ (org, repo, version))
+ if resp.status_code == 200:
+ return True
+ if resp.status_code == 404:
+ return False
+ resp.raise_for_status()
+
+
+def push(venv, tag=None, force=False, *args, **kwargs):
+ taskcluster_tags, tests_tag = read_image_name()
+
+ taskcluster_tag = taskcluster_tags.pop()
+
+ error_log = logger.warning if force else logger.error
+ if len(taskcluster_tags) != 0 or tests_tag != taskcluster_tag:
+ error_log("Image names in .taskcluster.yml and tools/ci/tc/tasks/test.yml "
+ "don't match.")
+ if not force:
+ sys.exit(1)
+ if tag is not None and tag != taskcluster_tag:
+ error_log("Supplied tag doesn't match .taskcluster.yml or "
+ "tools/ci/tc/tasks/test.yml; remember to update before pushing")
+ if not force:
+ sys.exit(1)
+ if tag is None:
+ logger.info("Using tag %s from .taskcluster.yml" % taskcluster_tag)
+ tag = taskcluster_tag
+
+ tag_re = re.compile(r"webplatformtests/wpt:\d\.\d+")
+ if not tag_re.match(tag):
+ error_log("Tag doesn't match expected format webplatformtests/wpt:0.x")
+ if not force:
+ sys.exit(1)
+
+ if lookup_tag(tag):
+ # No override for this case
+ logger.critical("Tag %s already exists" % tag)
+ sys.exit(1)
+
+ build(tag)
+ subprocess.check_call(["docker",
+ "push",
+ tag])
+
+
+def parser_run():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--rebuild", action="store_true", help="Force rebuild of image")
+ parser.add_argument("--checkout", action="store",
+ help="Revision to checkout in the image. "
+ "If this is not supplied we mount the wpt checkout on the host as "
+ "/home/test/web-platform-tests/")
+ parser.add_argument("--privileged", action="store_true",
+ help="Run the image in priviledged mode (required for emulators)")
+ parser.add_argument("--tag", action="store", default="wpt:local",
+ help="Docker image tag to use (default wpt:local)")
+ return parser
+
+
+def run(*args, **kwargs):
+ if kwargs["rebuild"]:
+ build()
+
+ args = ["docker", "run"]
+ args.extend(["--security-opt", "seccomp:%s" %
+ os.path.join(wpt_root, "tools", "docker", "seccomp.json")])
+ if kwargs["privileged"]:
+ args.append("--privileged")
+ if kwargs["checkout"]:
+ args.extend(["--env", "REF==%s" % kwargs["checkout"]])
+ else:
+ args.extend(["--mount",
+ "type=bind,source=%s,target=/home/test/web-platform-tests" % wpt_root])
+ args.extend(["-it", kwargs["tag"]])
+
+ proc = subprocess.Popen(args)
+ proc.wait()
+ return proc.returncode
diff --git a/testing/web-platform/tests/tools/docker/requirements.txt b/testing/web-platform/tests/tools/docker/requirements.txt
new file mode 100644
index 0000000000..06d168a18d
--- /dev/null
+++ b/testing/web-platform/tests/tools/docker/requirements.txt
@@ -0,0 +1,2 @@
+pyyaml==6.0
+requests==2.27.1
diff --git a/testing/web-platform/tests/tools/docker/retry.py b/testing/web-platform/tests/tools/docker/retry.py
new file mode 100755
index 0000000000..8ecf5874fe
--- /dev/null
+++ b/testing/web-platform/tests/tools/docker/retry.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python3
+import argparse
+import subprocess
+import time
+import sys
+
+
+def get_args() -> argparse.ArgumentParser:
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--delay", action="store", type=float, default=3, help="Initial delay before retry, in seconds")
+ parser.add_argument("--count", action="store", type=int, default=5, help="Total number of tries")
+ parser.add_argument("--factor", action="store", type=float, default=2, help="Exponential backoff factor")
+ parser.add_argument("cmd", nargs=argparse.REMAINDER)
+ return parser
+
+
+def log(value: str) -> None:
+ print(value)
+ sys.stdout.flush()
+
+
+def main() -> None:
+ args = get_args().parse_args()
+
+ if not args.cmd:
+ log("No command supplied")
+ sys.exit(1)
+
+ retcode = None
+
+ for n in range(args.count):
+ try:
+ log("Running %s [try %d/%d]" % (" ".join(args.cmd), (n+1), args.count))
+ subprocess.check_call(args.cmd)
+ except subprocess.CalledProcessError as e:
+ retcode = e.returncode
+ else:
+ log("Command succeeded")
+ retcode = 0
+ break
+
+ if args.factor == 0:
+ wait_time = (n+1) * args.delay
+ else:
+ wait_time = args.factor**n * args.delay
+ if n < args.count - 1:
+ log("Command failed, waiting %s seconds to retry" % wait_time)
+ time.sleep(wait_time)
+ else:
+ log("Command failed, out of retries")
+
+ sys.exit(retcode)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/testing/web-platform/tests/tools/docker/seccomp.json b/testing/web-platform/tests/tools/docker/seccomp.json
new file mode 100644
index 0000000000..8e8274ce32
--- /dev/null
+++ b/testing/web-platform/tests/tools/docker/seccomp.json
@@ -0,0 +1,798 @@
+{
+ "defaultAction": "SCMP_ACT_ERRNO",
+ "archMap": [
+ {
+ "architecture": "SCMP_ARCH_X86_64",
+ "subArchitectures": [
+ "SCMP_ARCH_X86",
+ "SCMP_ARCH_X32"
+ ]
+ },
+ {
+ "architecture": "SCMP_ARCH_AARCH64",
+ "subArchitectures": [
+ "SCMP_ARCH_ARM"
+ ]
+ },
+ {
+ "architecture": "SCMP_ARCH_MIPS64",
+ "subArchitectures": [
+ "SCMP_ARCH_MIPS",
+ "SCMP_ARCH_MIPS64N32"
+ ]
+ },
+ {
+ "architecture": "SCMP_ARCH_MIPS64N32",
+ "subArchitectures": [
+ "SCMP_ARCH_MIPS",
+ "SCMP_ARCH_MIPS64"
+ ]
+ },
+ {
+ "architecture": "SCMP_ARCH_MIPSEL64",
+ "subArchitectures": [
+ "SCMP_ARCH_MIPSEL",
+ "SCMP_ARCH_MIPSEL64N32"
+ ]
+ },
+ {
+ "architecture": "SCMP_ARCH_MIPSEL64N32",
+ "subArchitectures": [
+ "SCMP_ARCH_MIPSEL",
+ "SCMP_ARCH_MIPSEL64"
+ ]
+ },
+ {
+ "architecture": "SCMP_ARCH_S390X",
+ "subArchitectures": [
+ "SCMP_ARCH_S390"
+ ]
+ }
+ ],
+ "syscalls": [
+ {
+ "names": [
+ "accept",
+ "accept4",
+ "access",
+ "adjtimex",
+ "alarm",
+ "bind",
+ "brk",
+ "capget",
+ "capset",
+ "chdir",
+ "chmod",
+ "chown",
+ "chown32",
+ "clock_getres",
+ "clock_gettime",
+ "clock_nanosleep",
+ "clone",
+ "close",
+ "connect",
+ "copy_file_range",
+ "creat",
+ "dup",
+ "dup2",
+ "dup3",
+ "epoll_create",
+ "epoll_create1",
+ "epoll_ctl",
+ "epoll_ctl_old",
+ "epoll_pwait",
+ "epoll_wait",
+ "epoll_wait_old",
+ "eventfd",
+ "eventfd2",
+ "execve",
+ "execveat",
+ "exit",
+ "exit_group",
+ "faccessat",
+ "fadvise64",
+ "fadvise64_64",
+ "fallocate",
+ "fanotify_mark",
+ "fchdir",
+ "fchmod",
+ "fchmodat",
+ "fchown",
+ "fchown32",
+ "fchownat",
+ "fcntl",
+ "fcntl64",
+ "fdatasync",
+ "fgetxattr",
+ "flistxattr",
+ "flock",
+ "fork",
+ "fremovexattr",
+ "fsetxattr",
+ "fstat",
+ "fstat64",
+ "fstatat64",
+ "fstatfs",
+ "fstatfs64",
+ "fsync",
+ "ftruncate",
+ "ftruncate64",
+ "futex",
+ "futimesat",
+ "getcpu",
+ "getcwd",
+ "getdents",
+ "getdents64",
+ "getegid",
+ "getegid32",
+ "geteuid",
+ "geteuid32",
+ "getgid",
+ "getgid32",
+ "getgroups",
+ "getgroups32",
+ "getitimer",
+ "getpeername",
+ "getpgid",
+ "getpgrp",
+ "getpid",
+ "getppid",
+ "getpriority",
+ "getrandom",
+ "getresgid",
+ "getresgid32",
+ "getresuid",
+ "getresuid32",
+ "getrlimit",
+ "get_robust_list",
+ "getrusage",
+ "getsid",
+ "getsockname",
+ "getsockopt",
+ "get_thread_area",
+ "gettid",
+ "gettimeofday",
+ "getuid",
+ "getuid32",
+ "getxattr",
+ "inotify_add_watch",
+ "inotify_init",
+ "inotify_init1",
+ "inotify_rm_watch",
+ "io_cancel",
+ "ioctl",
+ "io_destroy",
+ "io_getevents",
+ "io_pgetevents",
+ "ioprio_get",
+ "ioprio_set",
+ "io_setup",
+ "io_submit",
+ "io_uring_enter",
+ "io_uring_register",
+ "io_uring_setup",
+ "ipc",
+ "kill",
+ "lchown",
+ "lchown32",
+ "lgetxattr",
+ "link",
+ "linkat",
+ "listen",
+ "listxattr",
+ "llistxattr",
+ "_llseek",
+ "lremovexattr",
+ "lseek",
+ "lsetxattr",
+ "lstat",
+ "lstat64",
+ "madvise",
+ "memfd_create",
+ "mincore",
+ "mkdir",
+ "mkdirat",
+ "mknod",
+ "mknodat",
+ "mlock",
+ "mlock2",
+ "mlockall",
+ "mmap",
+ "mmap2",
+ "mprotect",
+ "mq_getsetattr",
+ "mq_notify",
+ "mq_open",
+ "mq_timedreceive",
+ "mq_timedsend",
+ "mq_unlink",
+ "mremap",
+ "msgctl",
+ "msgget",
+ "msgrcv",
+ "msgsnd",
+ "msync",
+ "munlock",
+ "munlockall",
+ "munmap",
+ "nanosleep",
+ "newfstatat",
+ "_newselect",
+ "open",
+ "openat",
+ "pause",
+ "pipe",
+ "pipe2",
+ "poll",
+ "ppoll",
+ "prctl",
+ "pread64",
+ "preadv",
+ "preadv2",
+ "prlimit64",
+ "pselect6",
+ "pwrite64",
+ "pwritev",
+ "pwritev2",
+ "read",
+ "readahead",
+ "readlink",
+ "readlinkat",
+ "readv",
+ "recv",
+ "recvfrom",
+ "recvmmsg",
+ "recvmsg",
+ "remap_file_pages",
+ "removexattr",
+ "rename",
+ "renameat",
+ "renameat2",
+ "restart_syscall",
+ "rmdir",
+ "rt_sigaction",
+ "rt_sigpending",
+ "rt_sigprocmask",
+ "rt_sigqueueinfo",
+ "rt_sigreturn",
+ "rt_sigsuspend",
+ "rt_sigtimedwait",
+ "rt_tgsigqueueinfo",
+ "sched_getaffinity",
+ "sched_getattr",
+ "sched_getparam",
+ "sched_get_priority_max",
+ "sched_get_priority_min",
+ "sched_getscheduler",
+ "sched_rr_get_interval",
+ "sched_setaffinity",
+ "sched_setattr",
+ "sched_setparam",
+ "sched_setscheduler",
+ "sched_yield",
+ "seccomp",
+ "select",
+ "semctl",
+ "semget",
+ "semop",
+ "semtimedop",
+ "send",
+ "sendfile",
+ "sendfile64",
+ "sendmmsg",
+ "sendmsg",
+ "sendto",
+ "setfsgid",
+ "setfsgid32",
+ "setfsuid",
+ "setfsuid32",
+ "setgid",
+ "setgid32",
+ "setgroups",
+ "setgroups32",
+ "setitimer",
+ "setpgid",
+ "setpriority",
+ "setregid",
+ "setregid32",
+ "setresgid",
+ "setresgid32",
+ "setresuid",
+ "setresuid32",
+ "setreuid",
+ "setreuid32",
+ "setrlimit",
+ "set_robust_list",
+ "setsid",
+ "setsockopt",
+ "set_thread_area",
+ "set_tid_address",
+ "setuid",
+ "setuid32",
+ "setxattr",
+ "shmat",
+ "shmctl",
+ "shmdt",
+ "shmget",
+ "shutdown",
+ "sigaltstack",
+ "signalfd",
+ "signalfd4",
+ "sigprocmask",
+ "sigreturn",
+ "socket",
+ "socketcall",
+ "socketpair",
+ "splice",
+ "stat",
+ "stat64",
+ "statfs",
+ "statfs64",
+ "statx",
+ "symlink",
+ "symlinkat",
+ "sync",
+ "sync_file_range",
+ "syncfs",
+ "sysinfo",
+ "tee",
+ "tgkill",
+ "time",
+ "timer_create",
+ "timer_delete",
+ "timerfd_create",
+ "timerfd_gettime",
+ "timerfd_settime",
+ "timer_getoverrun",
+ "timer_gettime",
+ "timer_settime",
+ "times",
+ "tkill",
+ "truncate",
+ "truncate64",
+ "ugetrlimit",
+ "umask",
+ "uname",
+ "unlink",
+ "unlinkat",
+ "unshare",
+ "utime",
+ "utimensat",
+ "utimes",
+ "vfork",
+ "vmsplice",
+ "wait4",
+ "waitid",
+ "waitpid",
+ "write",
+ "writev"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": null,
+ "excludes": null
+ },
+ {
+ "names": [
+ "ptrace"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": null,
+ "comment": "",
+ "includes": {
+ "minKernel": "4.8"
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "personality"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [
+ {
+ "index": 0,
+ "value": 0,
+ "valueTwo": 0,
+ "op": "SCMP_CMP_EQ"
+ }
+ ],
+ "comment": "",
+ "includes": null,
+ "excludes": null
+ },
+ {
+ "names": [
+ "personality"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [
+ {
+ "index": 0,
+ "value": 8,
+ "valueTwo": 0,
+ "op": "SCMP_CMP_EQ"
+ }
+ ],
+ "comment": "",
+ "includes": null,
+ "excludes": null
+ },
+ {
+ "names": [
+ "personality"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [
+ {
+ "index": 0,
+ "value": 131072,
+ "valueTwo": 0,
+ "op": "SCMP_CMP_EQ"
+ }
+ ],
+ "comment": "",
+ "includes": null,
+ "excludes": null
+ },
+ {
+ "names": [
+ "personality"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [
+ {
+ "index": 0,
+ "value": 131080,
+ "valueTwo": 0,
+ "op": "SCMP_CMP_EQ"
+ }
+ ],
+ "comment": "",
+ "includes": null,
+ "excludes": null
+ },
+ {
+ "names": [
+ "personality"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [
+ {
+ "index": 0,
+ "value": 4294967295,
+ "valueTwo": 0,
+ "op": "SCMP_CMP_EQ"
+ }
+ ],
+ "comment": "",
+ "includes": null,
+ "excludes": null
+ },
+ {
+ "names": [
+ "sync_file_range2"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "arches": [
+ "ppc64le"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "arm_fadvise64_64",
+ "arm_sync_file_range",
+ "sync_file_range2",
+ "breakpoint",
+ "cacheflush",
+ "set_tls"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "arches": [
+ "arm",
+ "arm64"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "arch_prctl"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "arches": [
+ "amd64",
+ "x32"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "modify_ldt"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "arches": [
+ "amd64",
+ "x32",
+ "x86"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "s390_pci_mmio_read",
+ "s390_pci_mmio_write",
+ "s390_runtime_instr"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "arches": [
+ "s390",
+ "s390x"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "open_by_handle_at"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_DAC_READ_SEARCH"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "bpf",
+ "fanotify_init",
+ "lookup_dcookie",
+ "mount",
+ "name_to_handle_at",
+ "perf_event_open",
+ "quotactl",
+ "setdomainname",
+ "sethostname",
+ "setns",
+ "syslog",
+ "umount",
+ "umount2"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_SYS_ADMIN"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "clone"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [
+ {
+ "index": 0,
+ "value": 2114060288,
+ "valueTwo": 0,
+ "op": "SCMP_CMP_MASKED_EQ"
+ }
+ ],
+ "comment": "",
+ "includes": null,
+ "excludes": {
+ "caps": [
+ "CAP_SYS_ADMIN"
+ ],
+ "arches": [
+ "s390",
+ "s390x"
+ ]
+ }
+ },
+ {
+ "names": [
+ "clone"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [
+ {
+ "index": 1,
+ "value": 2114060288,
+ "valueTwo": 0,
+ "op": "SCMP_CMP_MASKED_EQ"
+ }
+ ],
+ "comment": "s390 parameter ordering for clone is different",
+ "includes": {
+ "arches": [
+ "s390",
+ "s390x"
+ ]
+ },
+ "excludes": {
+ "caps": [
+ "CAP_SYS_ADMIN"
+ ]
+ }
+ },
+ {
+ "names": [
+ "reboot"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_SYS_BOOT"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "chroot"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_SYS_CHROOT"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "delete_module",
+ "init_module",
+ "finit_module",
+ "query_module"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_SYS_MODULE"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "acct"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_SYS_PACCT"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "kcmp",
+ "process_vm_readv",
+ "process_vm_writev",
+ "ptrace"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_SYS_PTRACE"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "iopl",
+ "ioperm"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_SYS_RAWIO"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "settimeofday",
+ "stime",
+ "clock_settime"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_SYS_TIME"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "vhangup"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_SYS_TTY_CONFIG"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "get_mempolicy",
+ "mbind",
+ "set_mempolicy"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_SYS_NICE"
+ ]
+ },
+ "excludes": null
+ },
+ {
+ "names": [
+ "syslog"
+ ],
+ "action": "SCMP_ACT_ALLOW",
+ "args": [],
+ "comment": "",
+ "includes": {
+ "caps": [
+ "CAP_SYSLOG"
+ ]
+ },
+ "excludes": null
+ }
+ ]
+}
diff --git a/testing/web-platform/tests/tools/docker/start.sh b/testing/web-platform/tests/tools/docker/start.sh
new file mode 100755
index 0000000000..3427670cc9
--- /dev/null
+++ b/testing/web-platform/tests/tools/docker/start.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+set -ex
+
+REMOTE=${1:-https://github.com/web-platform-tests/wpt}
+REF=${2:-master}
+
+cd ~
+
+if [ -e /dev/kvm ]; then
+ # If kvm is present ensure that the test user can access it
+ # Ideally this could be done by adding the test user to the
+ # owning group, but then we need to re-login to evaluate the
+ # group membership. This chmod doesn't affect the host.
+ sudo chmod a+rw /dev/kvm
+fi
+
+if [ ! -d web-platform-tests ]; then
+ mkdir web-platform-tests
+ cd web-platform-tests
+
+ git init
+ git remote add origin ${REMOTE}
+
+ # Initially we just fetch 50 commits in order to save several minutes of fetching
+ retry git fetch --quiet --depth=50 --tags origin ${REF}:task_head
+
+ git checkout --quiet task_head
+fi
diff --git a/testing/web-platform/tests/tools/flake8.ini b/testing/web-platform/tests/tools/flake8.ini
new file mode 100644
index 0000000000..9b745acccd
--- /dev/null
+++ b/testing/web-platform/tests/tools/flake8.ini
@@ -0,0 +1,26 @@
+[flake8]
+select = E,W,F,N
+# E128: continuation line under-indented for visual indent
+# E129: visually indented line with same indent as next logical line
+# E226: missing whitespace around arithmetic operator
+# E231: missing whitespace after ‘,’, ‘;’, or ‘:’
+# E251: unexpected spaces around keyword / parameter equals
+# E265: block comment should start with ‘# ‘
+# E302: expected 2 blank lines, found 0
+# E303: too many blank lines (3)
+# E305: expected 2 blank lines after end of function or class
+# E402: module level import not at top of file
+# E731: do not assign a lambda expression, use a def
+# W504: line break after binary operator
+# W601: .has_key() is deprecated, use ‘in’
+# N801: class names should use CapWords convention
+# N802: function name should be lowercase
+# N806: variable in function should be lowercase
+# N818: exception should be named with an Error suffix
+ignore = E128,E129,E226,E231,E251,E265,E302,E303,E305,E402,E731,W504,W601,N801,N802,N806,N818
+exclude =
+ .tox,
+ third_party,
+ wptserve/docs/conf.py,
+ wptserve/tests/functional/docroot/invalid.py
+max-line-length = 141
diff --git a/testing/web-platform/tests/tools/gitignore/__init__.py b/testing/web-platform/tests/tools/gitignore/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/gitignore/__init__.py
diff --git a/testing/web-platform/tests/tools/gitignore/gitignore.py b/testing/web-platform/tests/tools/gitignore/gitignore.py
new file mode 100644
index 0000000000..2e41a9fc65
--- /dev/null
+++ b/testing/web-platform/tests/tools/gitignore/gitignore.py
@@ -0,0 +1,292 @@
+import re
+import os
+import itertools
+from collections import defaultdict
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any
+ from typing import Dict
+ from typing import Iterable
+ from typing import List
+ from typing import MutableMapping
+ from typing import Optional
+ from typing import Pattern
+ from typing import Tuple
+ from typing import TypeVar
+ from typing import Union
+ from typing import cast
+
+ T = TypeVar('T')
+
+
+end_space = re.compile(r"([^\\]\s)*$")
+
+
+def fnmatch_translate(pat):
+ # type: (bytes) -> Tuple[bool, Pattern[bytes]]
+ parts = []
+ seq = None # type: Optional[int]
+ i = 0
+ any_char = b"[^/]"
+ if pat[0:1] == b"/":
+ parts.append(b"^")
+ pat = pat[1:]
+ else:
+ # By default match the entire path up to a /
+ # but if / doesn't appear in the pattern we will mark is as
+ # a name pattern and just produce a pattern that matches against
+ # the filename
+ parts.append(b"^(?:.*/)?")
+
+ name_pattern = True
+ if pat[-1:] == b"/":
+ # If the last character is / match this directory or any subdirectory
+ pat = pat[:-1]
+ suffix = b"(?:/|$)"
+ else:
+ suffix = b"$"
+ while i < len(pat):
+ c = pat[i:i+1]
+ if c == b"\\":
+ if i < len(pat) - 1:
+ i += 1
+ c = pat[i:i+1]
+ parts.append(re.escape(c))
+ else:
+ raise ValueError
+ elif seq is not None:
+ # TODO: this doesn't really handle invalid sequences in the right way
+ if c == b"]":
+ seq = None
+ if parts[-1] == b"[":
+ parts = parts[:-1]
+ elif parts[-1] == b"^" and parts[-2] == b"[":
+ raise ValueError
+ else:
+ parts.append(c)
+ elif c == b"-":
+ parts.append(c)
+ elif c == b"[":
+ raise ValueError
+ else:
+ parts.append(re.escape(c))
+ elif c == b"[":
+ parts.append(b"[")
+ if i < len(pat) - 1 and pat[i+1:i+2] in (b"!", b"^"):
+ parts.append(b"^")
+ i += 1
+ seq = i
+ elif c == b"*":
+ if i < len(pat) - 1 and pat[i+1:i+2] == b"*":
+ if i > 0 and pat[i-1:i] != b"/":
+ raise ValueError
+ parts.append(b".*")
+ i += 1
+ if i < len(pat) - 1 and pat[i+1:i+2] != b"/":
+ raise ValueError
+ else:
+ parts.append(any_char + b"*")
+ elif c == b"?":
+ parts.append(any_char)
+ elif c == b"/" and not seq:
+ name_pattern = False
+ parts.append(c)
+ else:
+ parts.append(re.escape(c))
+ i += 1
+
+ if name_pattern:
+ parts[0] = b"^"
+
+ if seq is not None:
+ raise ValueError
+ parts.append(suffix)
+ try:
+ return name_pattern, re.compile(b"".join(parts))
+ except Exception:
+ raise ValueError
+
+# Regexp matching rules that have to be converted to patterns
+pattern_re = re.compile(br".*[\*\[\?]")
+
+
+def parse_line(line):
+ # type: (bytes) -> Optional[Tuple[bool, bool, bool, Union[Tuple[bytes, ...], Tuple[bool, Pattern[bytes]]]]]
+ line = line.rstrip()
+ if not line or line[0:1] == b"#":
+ return None
+
+ invert = line[0:1] == b"!"
+ if invert:
+ line = line[1:]
+
+ dir_only = line[-1:] == b"/"
+
+ if dir_only:
+ line = line[:-1]
+
+ # Could make a special case for **/foo, but we don't have any patterns like that
+ if not invert and not pattern_re.match(line):
+ literal = True
+ pattern = tuple(line.rsplit(b"/", 1)) # type: Union[Tuple[bytes, ...], Tuple[bool, Pattern[bytes]]]
+ else:
+ pattern = fnmatch_translate(line)
+ literal = False
+
+ return invert, dir_only, literal, pattern
+
+
+class PathFilter:
+ def __init__(self, root, extras=None, cache=None):
+ # type: (bytes, Optional[List[bytes]], Optional[MutableMapping[bytes, bool]]) -> None
+ if root:
+ ignore_path = os.path.join(root, b".gitignore") # type: Optional[bytes]
+ else:
+ ignore_path = None
+ if not ignore_path and not extras:
+ self.trivial = True
+ return
+ self.trivial = False
+
+ self.literals_file = defaultdict(dict) # type: Dict[Optional[bytes], Dict[bytes, List[Tuple[bool, Pattern[bytes]]]]]
+ self.literals_dir = defaultdict(dict) # type: Dict[Optional[bytes], Dict[bytes, List[Tuple[bool, Pattern[bytes]]]]]
+ self.patterns_file = [] # type: List[Tuple[Tuple[bool, Pattern[bytes]], List[Tuple[bool, Pattern[bytes]]]]]
+ self.patterns_dir = [] # type: List[Tuple[Tuple[bool, Pattern[bytes]], List[Tuple[bool, Pattern[bytes]]]]]
+
+ if cache is None:
+ cache = {}
+ self.cache = cache # type: MutableMapping[bytes, bool]
+
+ if extras is None:
+ extras = []
+
+ if ignore_path and os.path.exists(ignore_path):
+ args = ignore_path, extras # type: Tuple[Optional[bytes], List[bytes]]
+ else:
+ args = None, extras
+ self._read_ignore(*args)
+
+ def _read_ignore(self, ignore_path, extras):
+ # type: (Optional[bytes], List[bytes]) -> None
+ if ignore_path is not None:
+ with open(ignore_path, "rb") as f:
+ for line in f:
+ self._read_line(line)
+ for line in extras:
+ self._read_line(line)
+
+ def _read_line(self, line):
+ # type: (bytes) -> None
+ parsed = parse_line(line)
+ if not parsed:
+ return
+ invert, dir_only, literal, rule = parsed
+
+ if invert:
+ # For exclude rules, we attach the rules to all preceeding patterns, so
+ # that we can match patterns out of order and check if they were later
+ # overriden by an exclude rule
+ assert not literal
+ if MYPY:
+ rule = cast(Tuple[bool, Pattern[bytes]], rule)
+ if not dir_only:
+ rules_iter = itertools.chain(
+ itertools.chain(*(item.items() for item in self.literals_dir.values())),
+ itertools.chain(*(item.items() for item in self.literals_file.values())),
+ self.patterns_dir,
+ self.patterns_file) # type: Iterable[Tuple[Any, List[Tuple[bool, Pattern[bytes]]]]]
+ else:
+ rules_iter = itertools.chain(
+ itertools.chain(*(item.items() for item in self.literals_dir.values())),
+ self.patterns_dir)
+
+ for rules in rules_iter:
+ rules[1].append(rule)
+ else:
+ if literal:
+ if MYPY:
+ rule = cast(Tuple[bytes, ...], rule)
+ if len(rule) == 1:
+ dir_name, pattern = None, rule[0] # type: Tuple[Optional[bytes], bytes]
+ else:
+ dir_name, pattern = rule
+ self.literals_dir[dir_name][pattern] = []
+ if not dir_only:
+ self.literals_file[dir_name][pattern] = []
+ else:
+ if MYPY:
+ rule = cast(Tuple[bool, Pattern[bytes]], rule)
+ self.patterns_dir.append((rule, []))
+ if not dir_only:
+ self.patterns_file.append((rule, []))
+
+ def filter(self,
+ iterator # type: Iterable[Tuple[bytes, List[Tuple[bytes, T]], List[Tuple[bytes, T]]]]
+ ):
+ # type: (...) -> Iterable[Tuple[bytes, List[Tuple[bytes, T]], List[Tuple[bytes, T]]]]
+ empty = {} # type: Dict[Any, Any]
+ for dirpath, dirnames, filenames in iterator:
+ orig_dirpath = dirpath
+ path_sep = os.path.sep.encode()
+ if path_sep != b"/":
+ dirpath = dirpath.replace(path_sep, b"/")
+
+ keep_dirs = [] # type: List[Tuple[bytes, T]]
+ keep_files = [] # type: List[Tuple[bytes, T]]
+
+ for iter_items, literals, patterns, target, suffix in [
+ (dirnames, self.literals_dir, self.patterns_dir, keep_dirs, b"/"),
+ (filenames, self.literals_file, self.patterns_file, keep_files, b"")]:
+ for item in iter_items:
+ name = item[0]
+ if dirpath:
+ path = b"%s/%s" % (dirpath, name) + suffix
+ else:
+ path = name + suffix
+ if path in self.cache:
+ if not self.cache[path]:
+ target.append(item)
+ continue
+ for rule_dir in [None, dirpath if dirpath != b"." else b""]:
+ if name in literals.get(rule_dir, empty):
+ exclude = literals[rule_dir][name]
+ if not any(rule.match(name if name_only else path)
+ for name_only, rule in exclude):
+ # Skip this item
+ self.cache[path] = True
+ break
+ else:
+ for (component_only, pattern), exclude in patterns:
+ if component_only:
+ match = pattern.match(name)
+ else:
+ match = pattern.match(path)
+ if match:
+ if not any(rule.match(name if name_only else path)
+ for name_only, rule in exclude):
+ # Skip this item
+ self.cache[path] = True
+ break
+ else:
+ self.cache[path] = False
+ target.append(item)
+
+ dirnames[:] = keep_dirs
+ assert not any(b".git" == name for name, _ in dirnames)
+ yield orig_dirpath, dirnames, keep_files
+
+ def __call__(self,
+ iterator # type: Iterable[Tuple[bytes, List[Tuple[bytes, T]], List[Tuple[bytes, T]]]]
+ ):
+ # type: (...) -> Iterable[Tuple[bytes, List[Tuple[bytes, T]], List[Tuple[bytes, T]]]]
+ if self.trivial:
+ return iterator
+
+ return self.filter(iterator)
+
+
+def has_ignore(dirpath):
+ # type: (bytes) -> bool
+ return os.path.exists(os.path.join(dirpath, b".gitignore"))
diff --git a/testing/web-platform/tests/tools/gitignore/tests/__init__.py b/testing/web-platform/tests/tools/gitignore/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/gitignore/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/gitignore/tests/test_gitignore.py b/testing/web-platform/tests/tools/gitignore/tests/test_gitignore.py
new file mode 100644
index 0000000000..fa7c915d2b
--- /dev/null
+++ b/testing/web-platform/tests/tools/gitignore/tests/test_gitignore.py
@@ -0,0 +1,113 @@
+# mypy: allow-untyped-defs
+
+import pytest
+
+from ..gitignore import fnmatch_translate, PathFilter
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Tuple
+ from typing import Iterable
+ from typing import Sequence
+
+match_data = [
+ (b"foo", True, [b"a/foo", b"foo"]),
+ (b"*.a", True, [b"foo.a", b"a/foo.a", b"a/b/foo.a", b"a.a/foo.a"]),
+ (b"*.py[co]", True, [b"a.pyc", b"a.pyo", b"a/b/c.pyc"]),
+ (b"\\#*", True, [b"#a", b"a/#b"]),
+ (b"*#", True, [b"a#", b"a/b#", b"#a#"]),
+ (b"/*.c", True, [b"a.c", b".c"]),
+ (b"**/b", False, [b"a/b", b"a/c/b"]),
+ (b"*b", True, [b"ab"]),
+ (b"*b", True, [b"a/b"]),
+ (b"**/b", False, [b"a/b"]),
+ (b"a/", True, [b"a"]),
+ (b"a[/]b", True, []),
+ (b"**/b", False, [b"a/c/b"]),
+ (b"a?c", True, [b"abc"]),
+ (b"a[^b]c", True, [b"acc"]),
+ (b"a[b-c]c", True, [b"abc", b"acc"]),
+ (b"a[]c", True, [b"ac"]),
+] # type: Sequence[Tuple[bytes, bool, Iterable[bytes]]]
+
+mismatch_data = [
+ (b"foo", True, [b"foob", b"afoo"]),
+ (b"*.a", True, [b"a", b"foo:a", b"a.a/foo"]),
+ (b"*.py[co]", True, [b"a.pyd", b"pyo", b"a.py"]),
+ (b"a", True, [b"ab"]),
+ (b"a?c", True, [b"ac", b"abbc"]),
+ (b"a[^b]c", True, [b"abc"]),
+ (b"a[b-c]c", True, [b"adc"]),
+] # type: Sequence[Tuple[bytes, bool, Iterable[bytes]]]
+
+invalid_data = [
+ b"[a",
+ b"***/foo",
+ b"a\\",
+ b"**b",
+ b"b**/",
+ b"[[]",
+ b"a[^]c",
+]
+
+filter_data = [
+ ([b"foo", b"bar/", b"/a", b"*.py"],
+ [(b"", [b"foo", b"bar", b"baz"], [b"a"]),
+ (b"baz", [b"a"], [b"foo", b"bar"])],
+ [([b"baz"], []),
+ ([b"a"], [b"bar"])]),
+ ([b"#foo", b"", b"a*", b"!a.py"],
+ [(b"", [b"foo"], [b"a", b"a.foo", b"a.py"])],
+ [([b"foo"], [b"a.py"])]),
+ ([b"a.foo", b"!a.py"],
+ [(b"", [b"foo"], [b"a", b"a.foo", b"a.py"])],
+ [([b"foo"], [b"a", b"a.py"])]),
+]
+
+
+def expand_data(compact_data):
+ # type: (Sequence[Tuple[bytes, bool, Iterable[bytes]]]) -> Iterable[Tuple[bytes, bool, bytes]]
+ for pattern, name_only, inputs in compact_data:
+ for input in inputs:
+ yield pattern, name_only, input
+
+
+@pytest.mark.parametrize("pattern, name_only, input", expand_data(match_data))
+def tests_match(pattern, name_only, input):
+ name_only_result, regexp = fnmatch_translate(pattern)
+ assert name_only_result == name_only
+ if name_only:
+ input = input.rsplit(b"/", 1)[-1]
+ assert regexp.match(input) is not None
+
+
+@pytest.mark.parametrize("pattern, name_only, input", expand_data(mismatch_data))
+def tests_no_match(pattern, name_only, input):
+ name_only_result, regexp = fnmatch_translate(pattern)
+ assert name_only_result == name_only
+ if name_only:
+ input = input.rsplit(b"/", 1)[-1]
+ assert regexp.match(input) is None
+
+
+@pytest.mark.parametrize("pattern", invalid_data)
+def tests_invalid(pattern):
+ with pytest.raises(ValueError):
+ fnmatch_translate(pattern)
+
+
+@pytest.mark.parametrize("rules, input, expected", filter_data)
+def test_path_filter(rules, input, expected):
+ f = PathFilter(None, rules)
+ # Add some fake stat data
+ for i, item in enumerate(input):
+ repl = [input[i][0]]
+ for j in [1, 2]:
+ repl.append([(name, None) for name in input[i][j]])
+ input[i] = tuple(repl)
+
+ for i, output in enumerate(f(input)):
+ assert output[0] == input[i][0]
+ for j in [1, 2]:
+ assert [item[0] for item in output[j]] == expected[i][j-1]
diff --git a/testing/web-platform/tests/tools/lint/__init__.py b/testing/web-platform/tests/tools/lint/__init__.py
new file mode 100644
index 0000000000..f3d12ed9a7
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/__init__.py
@@ -0,0 +1 @@
+from . import lint # noqa: F401
diff --git a/testing/web-platform/tests/tools/lint/commands.json b/testing/web-platform/tests/tools/lint/commands.json
new file mode 100644
index 0000000000..a8e9844faf
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/commands.json
@@ -0,0 +1,3 @@
+{"lint":
+ {"path": "lint.py", "script": "main", "parser": "create_parser", "help": "Run the lint",
+ "virtualenv": false}}
diff --git a/testing/web-platform/tests/tools/lint/fnmatch.py b/testing/web-platform/tests/tools/lint/fnmatch.py
new file mode 100644
index 0000000000..143cb436f2
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/fnmatch.py
@@ -0,0 +1,40 @@
+import fnmatch as _stdlib_fnmatch
+import os
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Iterable
+ from typing import List
+ from typing import Text
+
+
+__all__ = ["fnmatch", "fnmatchcase", "filter", "translate"]
+
+
+def fnmatch(name, pat):
+ # type: (Text, Text) -> bool
+ name = os.path.normcase(name)
+ pat = os.path.normcase(pat)
+ return fnmatchcase(name, pat)
+
+
+def fnmatchcase(name, pat):
+ # type: (Text, Text) -> bool
+ if '?' not in pat and '[' not in pat:
+ wildcards = pat.count("*")
+ if wildcards == 0:
+ return name == pat
+ elif wildcards == 1 and pat[0] == "*":
+ return name.endswith(pat[1:])
+ elif wildcards == 1 and pat[-1] == "*":
+ return name.startswith(pat[:-1])
+ return _stdlib_fnmatch.fnmatchcase(name, pat)
+
+
+def filter(names, pat):
+ # type: (Iterable[Text], Text) -> List[Text]
+ return [n for n in names if fnmatch(n, pat)]
+
+
+translate = _stdlib_fnmatch.translate
diff --git a/testing/web-platform/tests/tools/lint/lint.py b/testing/web-platform/tests/tools/lint/lint.py
new file mode 100644
index 0000000000..f51e451a4d
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/lint.py
@@ -0,0 +1,1144 @@
+import abc
+import argparse
+import ast
+import json
+import logging
+import multiprocessing
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+from collections import defaultdict
+from urllib.parse import urlsplit, urljoin
+
+from . import fnmatch
+from . import rules
+from .. import localpaths
+from ..ci.tc.github_checks_output import get_gh_checks_outputter, GitHubChecksOutputter
+from ..gitignore.gitignore import PathFilter
+from ..wpt import testfiles
+from ..manifest.vcs import walk
+
+from ..manifest.sourcefile import SourceFile, js_meta_re, python_meta_re, space_chars, get_any_variants
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any
+ from typing import Callable
+ from typing import Dict
+ from typing import IO
+ from typing import Iterable
+ from typing import List
+ from typing import Optional
+ from typing import Sequence
+ from typing import Set
+ from typing import Text
+ from typing import Tuple
+ from typing import Type
+ from typing import TypeVar
+
+ # The Ignorelist is a two level dictionary. The top level is indexed by
+ # error names (e.g. 'TRAILING WHITESPACE'). Each of those then has a map of
+ # file patterns (e.g. 'foo/*') to a set of specific line numbers for the
+ # exception. The line numbers are optional; if missing the entire file
+ # ignores the error.
+ Ignorelist = Dict[str, Dict[str, Set[Optional[int]]]]
+
+ # Define an arbitrary typevar
+ T = TypeVar("T")
+
+ try:
+ from xml.etree import cElementTree as ElementTree
+ except ImportError:
+ from xml.etree import ElementTree as ElementTree # type: ignore
+
+
+logger = None # type: Optional[logging.Logger]
+
+
+def setup_logging(prefix=False):
+ # type: (bool) -> None
+ global logger
+ if logger is None:
+ logger = logging.getLogger(os.path.basename(os.path.splitext(__file__)[0]))
+ handler = logging.StreamHandler(sys.stdout) # type: logging.Handler
+ # Only add a handler if the parent logger is missing a handler
+ parent = logger.parent
+ assert isinstance(parent, logging.Logger)
+ if parent and len(parent.handlers) == 0:
+ handler = logging.StreamHandler(sys.stdout)
+ logger.addHandler(handler)
+ if prefix:
+ format = logging.BASIC_FORMAT
+ else:
+ format = "%(message)s"
+ formatter = logging.Formatter(format)
+ for handler in logger.handlers:
+ handler.setFormatter(formatter)
+ logger.setLevel(logging.DEBUG)
+
+
+setup_logging()
+
+
+ERROR_MSG = """You must fix all errors; for details on how to fix them, see
+https://web-platform-tests.org/writing-tests/lint-tool.html
+
+However, instead of fixing a particular error, it's sometimes
+OK to add a line to the lint.ignore file in the root of the
+web-platform-tests directory to make the lint tool ignore it.
+
+For example, to make the lint tool ignore all '%s'
+errors in the %s file,
+you could add the following line to the lint.ignore file.
+
+%s: %s"""
+
+
+def all_filesystem_paths(repo_root, subdir=None):
+ # type: (Text, Optional[Text]) -> Iterable[Text]
+ path_filter = PathFilter(repo_root.encode("utf8"),
+ extras=[b".git/"])
+ if subdir:
+ expanded_path = subdir.encode("utf8")
+ subdir_str = expanded_path
+ else:
+ expanded_path = repo_root.encode("utf8")
+ for dirpath, dirnames, filenames in path_filter(walk(expanded_path)):
+ for filename, _ in filenames:
+ path = os.path.join(dirpath, filename)
+ if subdir:
+ path = os.path.join(subdir_str, path)
+ assert not os.path.isabs(path), path
+ yield path.decode("utf8")
+
+
+def _all_files_equal(paths):
+ # type: (Iterable[Text]) -> bool
+ """
+ Checks all the paths are files that are byte-for-byte identical
+
+ :param paths: the list of paths to compare
+ :returns: True if they are all identical
+ """
+ paths = list(paths)
+ if len(paths) < 2:
+ return True
+
+ first = paths.pop()
+ size = os.path.getsize(first)
+ if any(os.path.getsize(path) != size for path in paths):
+ return False
+
+ # Chunk this to avoid eating up memory and file descriptors
+ bufsize = 4096*4 # 16KB, a "reasonable" number of disk sectors
+ groupsize = 8 # Hypothesised to be large enough in the common case that everything fits in one group
+ with open(first, "rb") as first_f:
+ for start in range(0, len(paths), groupsize):
+ path_group = paths[start:start+groupsize]
+ first_f.seek(0)
+ try:
+ files = [open(x, "rb") for x in path_group]
+ for _ in range(0, size, bufsize):
+ a = first_f.read(bufsize)
+ for f in files:
+ b = f.read(bufsize)
+ if a != b:
+ return False
+ finally:
+ for f in files:
+ f.close()
+
+ return True
+
+
+def check_path_length(repo_root, path):
+ # type: (Text, Text) -> List[rules.Error]
+ if len(path) + 1 > 150:
+ return [rules.PathLength.error(path, (path, len(path) + 1))]
+ return []
+
+
+def check_file_type(repo_root, path):
+ # type: (Text, Text) -> List[rules.Error]
+ if os.path.islink(path):
+ return [rules.FileType.error(path, (path, "symlink"))]
+ return []
+
+
+def check_worker_collision(repo_root, path):
+ # type: (Text, Text) -> List[rules.Error]
+ endings = [(".any.html", ".any.js"),
+ (".any.worker.html", ".any.js"),
+ (".worker.html", ".worker.js")]
+ for path_ending, generated in endings:
+ if path.endswith(path_ending):
+ return [rules.WorkerCollision.error(path, (path_ending, generated))]
+ return []
+
+
+def check_gitignore_file(repo_root, path):
+ # type: (Text, Text) -> List[rules.Error]
+ if not path.endswith(".gitignore"):
+ return []
+
+ path_parts = path.split(os.path.sep)
+ if len(path_parts) == 1:
+ return []
+
+ if path_parts[-1] != ".gitignore":
+ return []
+
+ if (path_parts[0] in ["tools", "docs"] or
+ path_parts[:2] == ["resources", "webidl2"] or
+ path_parts[:3] == ["css", "tools", "apiclient"]):
+ return []
+
+ return [rules.GitIgnoreFile.error(path)]
+
+
+def check_mojom_js(repo_root, path):
+ # type: (Text, Text) -> List[rules.Error]
+ if path.endswith(".mojom.js"):
+ return [rules.MojomJSFile.error(path)]
+ return []
+
+
+def check_ahem_copy(repo_root, path):
+ # type: (Text, Text) -> List[rules.Error]
+ lpath = path.lower()
+ if "ahem" in lpath and lpath.endswith(".ttf"):
+ return [rules.AhemCopy.error(path)]
+ return []
+
+
+def check_tentative_directories(repo_root, path):
+ # type: (Text, Text) -> List[rules.Error]
+ path_parts = path.split(os.path.sep)
+ for directory in path_parts[:-1]:
+ if "tentative" in directory and directory != "tentative":
+ return [rules.TentativeDirectoryName.error(path)]
+ return []
+
+
+def check_git_ignore(repo_root, paths):
+ # type: (Text, List[Text]) -> List[rules.Error]
+ errors = []
+
+ with tempfile.TemporaryFile('w+', newline='') as f:
+ for path in paths:
+ f.write('%s\n' % os.path.join(repo_root, path))
+ f.seek(0)
+ try:
+ matches = subprocess.check_output(
+ ["git", "check-ignore", "--verbose", "--no-index", "--stdin"], stdin=f)
+ for match in matches.strip().split(b'\n'):
+ match_filter, path_bytes = match.split()
+ _, _, filter_string = match_filter.split(b':')
+ # If the matching filter reported by check-ignore is a special-case exception,
+ # that's fine. Otherwise, it requires a new special-case exception.
+ if filter_string[0:1] != b'!':
+ path = path_bytes.decode("utf8")
+ errors.append(rules.IgnoredPath.error(path, (path,)))
+ except subprocess.CalledProcessError:
+ # Nonzero return code means that no match exists.
+ pass
+ return errors
+
+
+drafts_csswg_re = re.compile(r"https?\:\/\/drafts\.csswg\.org\/([^/?#]+)")
+w3c_tr_re = re.compile(r"https?\:\/\/www\.w3c?\.org\/TR\/([^/?#]+)")
+w3c_dev_re = re.compile(r"https?\:\/\/dev\.w3c?\.org\/[^/?#]+\/([^/?#]+)")
+
+
+def check_css_globally_unique(repo_root, paths):
+ # type: (Text, List[Text]) -> List[rules.Error]
+ """
+ Checks that CSS filenames are sufficiently unique
+
+ This groups files by path classifying them as "test", "reference", or
+ "support".
+
+ "test" files must have a unique name across files that share links to the
+ same spec.
+
+ "reference" and "support" files, on the other hand, must have globally
+ unique names.
+
+ :param repo_root: the repository root
+ :param paths: list of all paths
+ :returns: a list of errors found in ``paths``
+
+ """
+ test_files = defaultdict(set) # type: Dict[Text, Set[Text]]
+ ref_files = defaultdict(set) # type: Dict[Text, Set[Text]]
+ support_files = defaultdict(set) # type: Dict[Text, Set[Text]]
+
+ for path in paths:
+ if os.name == "nt":
+ path = path.replace("\\", "/")
+
+ if not path.startswith("css/"):
+ continue
+
+ source_file = SourceFile(repo_root, path, "/")
+ if source_file.name_is_non_test:
+ # If we're name_is_non_test for a reason apart from support, ignore it.
+ # We care about support because of the requirement all support files in css/ to be in
+ # a support directory; see the start of check_parsed.
+ offset = path.find("/support/")
+ if offset == -1:
+ continue
+
+ parts = source_file.dir_path.split(os.path.sep)
+ if (parts[0] in source_file.root_dir_non_test or
+ any(item in source_file.dir_non_test - {"support"} for item in parts) or
+ any(parts[:len(non_test_path)] == list(non_test_path) for non_test_path in source_file.dir_path_non_test)):
+ continue
+
+ support_name = path[offset+1:]
+ support_files[support_name].add(path)
+ elif source_file.name_is_reference:
+ ref_files[source_file.name].add(path)
+ else:
+ test_name = source_file.name # type: Text
+ test_name = test_name.replace('-manual', '')
+ test_files[test_name].add(path)
+
+ errors = []
+
+ for name, colliding in test_files.items():
+ if len(colliding) > 1:
+ if not _all_files_equal([os.path.join(repo_root, x) for x in colliding]):
+ # Only compute by_spec if there are prima-facie collisions because of cost
+ by_spec = defaultdict(set) # type: Dict[Text, Set[Text]]
+ for path in colliding:
+ source_file = SourceFile(repo_root, path, "/")
+ for link in source_file.spec_links:
+ for r in (drafts_csswg_re, w3c_tr_re, w3c_dev_re):
+ m = r.match(link)
+ if m:
+ spec = m.group(1)
+ break
+ else:
+ continue
+ by_spec[spec].add(path)
+
+ for spec, spec_paths in by_spec.items():
+ if not _all_files_equal([os.path.join(repo_root, x) for x in spec_paths]):
+ for x in spec_paths:
+ context1 = (name, spec, ", ".join(sorted(spec_paths)))
+ errors.append(rules.CSSCollidingTestName.error(x,
+ context1))
+
+ for rule_class, d in [(rules.CSSCollidingRefName, ref_files),
+ (rules.CSSCollidingSupportName, support_files)]:
+ for name, colliding in d.items():
+ if len(colliding) > 1:
+ if not _all_files_equal([os.path.join(repo_root, x) for x in colliding]):
+ context2 = (name, ", ".join(sorted(colliding)))
+
+ for x in colliding:
+ errors.append(rule_class.error(x, context2))
+
+ return errors
+
+
+def check_unique_testharness_basenames(repo_root, paths):
+ # type: (Text, List[Text]) -> List[rules.Error]
+ """
+ Checks that all testharness files have unique basename paths.
+
+ The 'basename path' refers to the entire path excluding the extension. For
+ example, 'foo/bar/baz.html' and 'foo/bar/baz.xhtml' have the same basename
+ path, but 'foo/bar/baz.html' and 'foo/qux/baz.html' do not.
+
+ Testharness files with identical basenames have caused issues in downstream
+ infrastructure (see https://github.com/web-platform-tests/wpt/issues/7570),
+ and may cause confusion in general.
+
+ :param repo_root: the repository root
+ :param paths: list of all paths
+ :returns: a list of errors found in ``paths``
+ """
+
+ errors = []
+ file_dict = defaultdict(list)
+ for path in paths:
+ source_file = SourceFile(repo_root, path, "/")
+ if "testharness" not in source_file.possible_types:
+ continue
+ file_name, file_extension = os.path.splitext(path)
+ file_dict[file_name].append(file_extension)
+ for k, v in file_dict.items():
+ if len(v) == 1:
+ continue
+ context = (', '.join(v),)
+ for extension in v:
+ errors.append(rules.DuplicateBasenamePath.error(k + extension, context))
+ return errors
+
+
+def check_unique_case_insensitive_paths(repo_root, paths):
+ # type: (Text, List[Text]) -> List[rules.Error]
+ seen = {} # type: Dict[Text, Text]
+ errors = []
+ for path in paths:
+ lower_path = path.lower()
+ if lower_path in seen:
+ context = (seen[lower_path],)
+ errors.append(rules.DuplicatePathCaseInsensitive.error(path, context))
+ else:
+ seen[lower_path] = path
+ return errors
+
+
+def parse_ignorelist(f):
+ # type: (IO[Text]) -> Tuple[Ignorelist, Set[Text]]
+ """
+ Parse the ignorelist file given by `f`, and return the parsed structure.
+
+ :returns: a tuple of an Ignorelist and a set of files that are completely
+ skipped by the linter (i.e. have a '*' entry).
+ """
+
+ data = defaultdict(lambda:defaultdict(set)) # type: Ignorelist
+ skipped_files = set() # type: Set[Text]
+
+ for line in f:
+ line = line.strip()
+ if not line or line.startswith("#"):
+ continue
+ parts = [item.strip() for item in line.split(":")]
+
+ if len(parts) == 2:
+ error_types_s, file_match = parts
+ line_number = None # type: Optional[int]
+ else:
+ error_types_s, file_match, line_number_s = parts
+ line_number = int(line_number_s)
+
+ error_types = {item.strip() for item in error_types_s.split(",")}
+ file_match = os.path.normcase(file_match)
+
+ if "*" in error_types:
+ skipped_files.add(file_match)
+ else:
+ for error_type in error_types:
+ data[error_type][file_match].add(line_number)
+
+ return data, skipped_files
+
+
+def filter_ignorelist_errors(data, errors):
+ # type: (Ignorelist, Sequence[rules.Error]) -> List[rules.Error]
+ """
+ Filter out those errors that are ignored in `data`.
+ """
+
+ if not errors:
+ return []
+
+ skipped = [False for item in range(len(errors))]
+
+ for i, (error_type, msg, path, line) in enumerate(errors):
+ normpath = os.path.normcase(path)
+ # Allow skipping all lint errors except the IGNORED PATH lint,
+ # which explains how to fix it correctly and shouldn't be skipped.
+ if error_type in data and error_type != "IGNORED PATH":
+ wl_files = data[error_type]
+ for file_match, allowed_lines in wl_files.items():
+ if None in allowed_lines or line in allowed_lines:
+ if fnmatch.fnmatchcase(normpath, file_match):
+ skipped[i] = True
+
+ return [item for i, item in enumerate(errors) if not skipped[i]]
+
+
+regexps = [item() for item in # type: ignore
+ [rules.TrailingWhitespaceRegexp,
+ rules.TabsRegexp,
+ rules.CRRegexp,
+ rules.SetTimeoutRegexp,
+ rules.W3CTestOrgRegexp,
+ rules.WebPlatformTestRegexp,
+ rules.Webidl2Regexp,
+ rules.ConsoleRegexp,
+ rules.GenerateTestsRegexp,
+ rules.PrintRegexp,
+ rules.LayoutTestsRegexp,
+ rules.MissingDepsRegexp,
+ rules.SpecialPowersRegexp,
+ rules.AssertThrowsRegexp,
+ rules.PromiseRejectsRegexp,
+ rules.AssertPreconditionRegexp]]
+
+
+def check_regexp_line(repo_root, path, f):
+ # type: (Text, Text, IO[bytes]) -> List[rules.Error]
+ errors = [] # type: List[rules.Error]
+
+ applicable_regexps = [regexp for regexp in regexps if regexp.applies(path)]
+
+ for i, line in enumerate(f):
+ for regexp in applicable_regexps:
+ if regexp.search(line):
+ errors.append((regexp.name, regexp.description, path, i+1))
+
+ return errors
+
+
+def check_parsed(repo_root, path, f):
+ # type: (Text, Text, IO[bytes]) -> List[rules.Error]
+ source_file = SourceFile(repo_root, path, "/", contents=f.read())
+
+ errors = [] # type: List[rules.Error]
+
+ if path.startswith("css/"):
+ if (source_file.type == "support" and
+ not source_file.name_is_non_test and
+ not source_file.name_is_reference):
+ return [rules.SupportWrongDir.error(path)]
+
+ if (source_file.type != "support" and
+ not source_file.name_is_reference and
+ not source_file.name_is_tentative and
+ not source_file.name_is_crashtest and
+ not source_file.spec_links):
+ return [rules.MissingLink.error(path)]
+
+ if source_file.name_is_non_test:
+ return []
+
+ if source_file.markup_type is None:
+ return []
+
+ if source_file.root is None:
+ return [rules.ParseFailed.error(path)]
+
+ if source_file.type == "manual" and not source_file.name_is_manual:
+ errors.append(rules.ContentManual.error(path))
+
+ if source_file.type == "visual" and not source_file.name_is_visual:
+ errors.append(rules.ContentVisual.error(path))
+
+ about_blank_parts = urlsplit("about:blank")
+ for reftest_node in source_file.reftest_nodes:
+ href = reftest_node.attrib.get("href", "").strip(space_chars)
+ parts = urlsplit(href)
+
+ if parts == about_blank_parts:
+ continue
+
+ if (parts.scheme or parts.netloc):
+ errors.append(rules.AbsoluteUrlRef.error(path, (href,)))
+ continue
+
+ ref_url = urljoin(source_file.url, href)
+ ref_parts = urlsplit(ref_url)
+
+ if source_file.url == ref_url:
+ errors.append(rules.SameFileRef.error(path))
+ continue
+
+ assert ref_parts.path != ""
+
+ reference_file = os.path.join(repo_root, ref_parts.path[1:])
+ reference_rel = reftest_node.attrib.get("rel", "")
+
+ if not os.path.isfile(reference_file):
+ errors.append(rules.NonexistentRef.error(path,
+ (reference_rel, href)))
+
+ if len(source_file.timeout_nodes) > 1:
+ errors.append(rules.MultipleTimeout.error(path))
+
+ for timeout_node in source_file.timeout_nodes:
+ timeout_value = timeout_node.attrib.get("content", "").lower()
+ if timeout_value != "long":
+ errors.append(rules.InvalidTimeout.error(path, (timeout_value,)))
+
+ required_elements = [] # type: List[Text]
+
+ testharnessreport_nodes = [] # type: List[ElementTree.Element]
+ if source_file.testharness_nodes:
+ test_type = source_file.manifest_items()[0]
+ if test_type not in ("testharness", "manual"):
+ errors.append(rules.TestharnessInOtherType.error(path, (test_type,)))
+ if len(source_file.testharness_nodes) > 1:
+ errors.append(rules.MultipleTestharness.error(path))
+
+ testharnessreport_nodes = source_file.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testharnessreport.js']")
+ if not testharnessreport_nodes:
+ errors.append(rules.MissingTestharnessReport.error(path))
+ else:
+ if len(testharnessreport_nodes) > 1:
+ errors.append(rules.MultipleTestharnessReport.error(path))
+
+ for element in source_file.variant_nodes:
+ if "content" not in element.attrib:
+ errors.append(rules.VariantMissing.error(path))
+ else:
+ variant = element.attrib["content"]
+ if variant != "":
+ if (variant[0] not in ("?", "#") or
+ len(variant) == 1 or
+ (variant[0] == "?" and variant[1] == "#")):
+ errors.append(rules.MalformedVariant.error(path, (path,)))
+
+ required_elements.extend(key for key, value in {"testharness": True,
+ "testharnessreport": len(testharnessreport_nodes) > 0,
+ "timeout": len(source_file.timeout_nodes) > 0}.items()
+ if value)
+
+ testdriver_vendor_nodes = [] # type: List[ElementTree.Element]
+ if source_file.testdriver_nodes:
+ if len(source_file.testdriver_nodes) > 1:
+ errors.append(rules.MultipleTestdriver.error(path))
+
+ testdriver_vendor_nodes = source_file.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testdriver-vendor.js']")
+ if not testdriver_vendor_nodes:
+ errors.append(rules.MissingTestdriverVendor.error(path))
+ else:
+ if len(testdriver_vendor_nodes) > 1:
+ errors.append(rules.MultipleTestdriverVendor.error(path))
+
+ required_elements.append("testdriver")
+ if len(testdriver_vendor_nodes) > 0:
+ required_elements.append("testdriver-vendor")
+
+ if required_elements:
+ seen_elements = defaultdict(bool)
+
+ for elem in source_file.root.iter():
+ if source_file.timeout_nodes and elem == source_file.timeout_nodes[0]:
+ seen_elements["timeout"] = True
+ if seen_elements["testharness"]:
+ errors.append(rules.LateTimeout.error(path))
+
+ elif source_file.testharness_nodes and elem == source_file.testharness_nodes[0]:
+ seen_elements["testharness"] = True
+
+ elif testharnessreport_nodes and elem == testharnessreport_nodes[0]:
+ seen_elements["testharnessreport"] = True
+ if not seen_elements["testharness"]:
+ errors.append(rules.EarlyTestharnessReport.error(path))
+
+ elif source_file.testdriver_nodes and elem == source_file.testdriver_nodes[0]:
+ seen_elements["testdriver"] = True
+
+ elif testdriver_vendor_nodes and elem == testdriver_vendor_nodes[0]:
+ seen_elements["testdriver-vendor"] = True
+ if not seen_elements["testdriver"]:
+ errors.append(rules.EarlyTestdriverVendor.error(path))
+
+ if all(seen_elements[name] for name in required_elements):
+ break
+
+ for element in source_file.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src]"):
+ src = element.attrib["src"]
+
+ def incorrect_path(script, src):
+ # type: (Text, Text) -> bool
+ return (script == src or
+ ("/%s" % script in src and src != "/resources/%s" % script))
+
+ if incorrect_path("testharness.js", src):
+ errors.append(rules.TestharnessPath.error(path))
+
+ if incorrect_path("testharnessreport.js", src):
+ errors.append(rules.TestharnessReportPath.error(path))
+
+ if incorrect_path("testdriver.js", src):
+ errors.append(rules.TestdriverPath.error(path))
+
+ if incorrect_path("testdriver-vendor.js", src):
+ errors.append(rules.TestdriverVendorPath.error(path))
+
+ script_path = None
+ try:
+ script_path = urlsplit(urljoin(source_file.url, src)).path
+ except ValueError:
+ # This happens if the contents of src isn't something that looks like a URL to Python
+ pass
+ if (script_path == "/common/reftest-wait.js" and
+ "reftest-wait" not in source_file.root.attrib.get("class", "").split()):
+ errors.append(rules.MissingReftestWait.error(path))
+
+ return errors
+
+class ASTCheck(metaclass=abc.ABCMeta):
+ @abc.abstractproperty
+ def rule(self):
+ # type: () -> Type[rules.Rule]
+ pass
+
+ @abc.abstractmethod
+ def check(self, root):
+ # type: (ast.AST) -> List[int]
+ pass
+
+class OpenModeCheck(ASTCheck):
+ rule = rules.OpenNoMode
+
+ def check(self, root):
+ # type: (ast.AST) -> List[int]
+ errors = []
+ for node in ast.walk(root):
+ if isinstance(node, ast.Call):
+ if hasattr(node.func, "id") and node.func.id in ("open", "file"): # type: ignore
+ if (len(node.args) < 2 and
+ all(item.arg != "mode" for item in node.keywords)):
+ errors.append(node.lineno)
+ return errors
+
+ast_checkers = [item() for item in [OpenModeCheck]]
+
+def check_python_ast(repo_root, path, f):
+ # type: (Text, Text, IO[bytes]) -> List[rules.Error]
+ if not path.endswith(".py"):
+ return []
+
+ try:
+ root = ast.parse(f.read())
+ except SyntaxError as e:
+ return [rules.ParseFailed.error(path, line_no=e.lineno)]
+
+ errors = []
+ for checker in ast_checkers:
+ for lineno in checker.check(root):
+ errors.append(checker.rule.error(path, line_no=lineno))
+ return errors
+
+
+broken_js_metadata = re.compile(br"//\s*META:")
+broken_python_metadata = re.compile(br"#\s*META:")
+
+
+def check_global_metadata(value):
+ # type: (bytes) -> Iterable[Tuple[Type[rules.Rule], Tuple[Any, ...]]]
+ global_values = {item.strip().decode("utf8") for item in value.split(b",") if item.strip()}
+
+ # TODO: this could check for duplicates and such
+ for global_value in global_values:
+ if not get_any_variants(global_value):
+ yield (rules.UnknownGlobalMetadata, ())
+
+
+def check_script_metadata(repo_root, path, f):
+ # type: (Text, Text, IO[bytes]) -> List[rules.Error]
+ if path.endswith((".worker.js", ".any.js")):
+ meta_re = js_meta_re
+ broken_metadata = broken_js_metadata
+ elif path.endswith(".py"):
+ meta_re = python_meta_re
+ broken_metadata = broken_python_metadata
+ else:
+ return []
+
+ done = False
+ errors = []
+ for idx, line in enumerate(f):
+ assert isinstance(line, bytes), line
+
+ m = meta_re.match(line)
+ if m:
+ key, value = m.groups()
+ if key == b"global":
+ for rule_class, context in check_global_metadata(value):
+ errors.append(rule_class.error(path, context, idx + 1))
+ elif key == b"timeout":
+ if value != b"long":
+ errors.append(rules.UnknownTimeoutMetadata.error(path,
+ line_no=idx + 1))
+ elif key not in (b"title", b"script", b"variant", b"quic"):
+ errors.append(rules.UnknownMetadata.error(path,
+ line_no=idx + 1))
+ else:
+ done = True
+
+ if done:
+ if meta_re.match(line):
+ errors.append(rules.StrayMetadata.error(path, line_no=idx + 1))
+ elif meta_re.search(line):
+ errors.append(rules.IndentedMetadata.error(path,
+ line_no=idx + 1))
+ elif broken_metadata.search(line):
+ errors.append(rules.BrokenMetadata.error(path, line_no=idx + 1))
+
+ return errors
+
+
+ahem_font_re = re.compile(br"font.*:.*ahem", flags=re.IGNORECASE)
+# Ahem can appear either in the global location or in the support
+# directory for legacy Mozilla imports
+ahem_stylesheet_re = re.compile(br"\/fonts\/ahem\.css|support\/ahem.css",
+ flags=re.IGNORECASE)
+
+
+def check_ahem_system_font(repo_root, path, f):
+ # type: (Text, Text, IO[bytes]) -> List[rules.Error]
+ if not path.endswith((".html", ".htm", ".xht", ".xhtml")):
+ return []
+ contents = f.read()
+ errors = []
+ if ahem_font_re.search(contents) and not ahem_stylesheet_re.search(contents):
+ errors.append(rules.AhemSystemFont.error(path))
+ return errors
+
+
+def check_path(repo_root, path):
+ # type: (Text, Text) -> List[rules.Error]
+ """
+ Runs lints that check the file path.
+
+ :param repo_root: the repository root
+ :param path: the path of the file within the repository
+ :returns: a list of errors found in ``path``
+ """
+
+ errors = []
+ for path_fn in path_lints:
+ errors.extend(path_fn(repo_root, path))
+ return errors
+
+
+def check_all_paths(repo_root, paths):
+ # type: (Text, List[Text]) -> List[rules.Error]
+ """
+ Runs lints that check all paths globally.
+
+ :param repo_root: the repository root
+ :param paths: a list of all the paths within the repository
+ :returns: a list of errors found in ``f``
+ """
+
+ errors = []
+ for paths_fn in all_paths_lints:
+ errors.extend(paths_fn(repo_root, paths))
+ return errors
+
+
+def check_file_contents(repo_root, path, f=None):
+ # type: (Text, Text, Optional[IO[bytes]]) -> List[rules.Error]
+ """
+ Runs lints that check the file contents.
+
+ :param repo_root: the repository root
+ :param path: the path of the file within the repository
+ :param f: a file-like object with the file contents
+ :returns: a list of errors found in ``f``
+ """
+ if f is None:
+ f = open(os.path.join(repo_root, path), 'rb')
+ with f:
+ errors = []
+ for file_fn in file_lints:
+ errors.extend(file_fn(repo_root, path, f))
+ f.seek(0)
+ return errors
+
+
+def check_file_contents_apply(args):
+ # type: (Tuple[Text, Text]) -> List[rules.Error]
+ return check_file_contents(*args)
+
+
+def output_errors_text(log, errors):
+ # type: (Callable[[Any], None], List[rules.Error]) -> None
+ for error_type, description, path, line_number in errors:
+ pos_string = path
+ if line_number:
+ pos_string += ":%s" % line_number
+ log(f"{pos_string}: {description} ({error_type})")
+
+
+def output_errors_markdown(log, errors):
+ # type: (Callable[[Any], None], List[rules.Error]) -> None
+ if not errors:
+ return
+ heading = """Got lint errors:
+
+| Error Type | Position | Message |
+|------------|----------|---------|"""
+ for line in heading.split("\n"):
+ log(line)
+ for error_type, description, path, line_number in errors:
+ pos_string = path
+ if line_number:
+ pos_string += ":%s" % line_number
+ log(f"{error_type} | {pos_string} | {description} |")
+
+
+def output_errors_json(log, errors):
+ # type: (Callable[[Any], None], List[rules.Error]) -> None
+ for error_type, error, path, line_number in errors:
+ # We use 'print' rather than the log function to ensure that the output
+ # is valid JSON (e.g. with no logger preamble).
+ print(json.dumps({"path": path, "lineno": line_number,
+ "rule": error_type, "message": error}))
+
+
+def output_errors_github_checks(outputter, errors, first_reported):
+ # type: (GitHubChecksOutputter, List[rules.Error], bool) -> None
+ """Output errors to the GitHub Checks output markdown format.
+
+ :param outputter: the GitHub Checks outputter
+ :param errors: a list of error tuples (error type, message, path, line number)
+ :param first_reported: True if these are the first reported errors
+ """
+ if first_reported:
+ outputter.output(
+ "\nChanges in this PR contain lint errors, listed below. These "
+ "errors must either be fixed or added to the list of ignored "
+ "errors; see [the documentation]("
+ "https://web-platform-tests.org/writing-tests/lint-tool.html). "
+ "For help, please tag `@web-platform-tests/wpt-core-team` in a "
+ "comment.\n")
+ outputter.output("```")
+ output_errors_text(outputter.output, errors)
+
+
+def output_error_count(error_count):
+ # type: (Dict[Text, int]) -> None
+ if not error_count:
+ return
+
+ assert logger is not None
+ by_type = " ".join("%s: %d" % item for item in error_count.items())
+ count = sum(error_count.values())
+ logger.info("")
+ if count == 1:
+ logger.info(f"There was 1 error ({by_type})")
+ else:
+ logger.info("There were %d errors (%s)" % (count, by_type))
+
+
+def changed_files(wpt_root):
+ # type: (Text) -> List[Text]
+ revish = testfiles.get_revish(revish=None)
+ changed, _ = testfiles.files_changed(revish, None, include_uncommitted=True, include_new=True)
+ return [os.path.relpath(item, wpt_root) for item in changed]
+
+
+def lint_paths(kwargs, wpt_root):
+ # type: (Dict[Text, Any], Text) -> List[Text]
+ if kwargs.get("paths"):
+ paths = []
+ for path in kwargs.get("paths", []):
+ if os.path.isdir(path):
+ path_dir = list(all_filesystem_paths(wpt_root, path))
+ paths.extend(path_dir)
+ elif os.path.isfile(path):
+ paths.append(os.path.relpath(os.path.abspath(path), wpt_root))
+ elif kwargs["all"]:
+ paths = list(all_filesystem_paths(wpt_root))
+ elif kwargs["paths_file"]:
+ paths = []
+ with open(kwargs["paths_file"], 'r', newline='') as f:
+ for line in f.readlines():
+ path = line.strip()
+ if os.path.isdir(path):
+ path_dir = list(all_filesystem_paths(wpt_root, path))
+ paths.extend(path_dir)
+ elif os.path.isfile(path):
+ paths.append(os.path.relpath(os.path.abspath(path), wpt_root))
+ else:
+ changed_paths = changed_files(wpt_root)
+ force_all = False
+ for path in changed_paths:
+ path = path.replace(os.path.sep, "/")
+ if path == "lint.ignore" or path.startswith("tools/lint/"):
+ force_all = True
+ break
+ paths = (list(changed_paths) if not force_all
+ else list(all_filesystem_paths(wpt_root)))
+
+ return paths
+
+
+def create_parser():
+ # type: () -> argparse.ArgumentParser
+ parser = argparse.ArgumentParser()
+ parser.add_argument("paths", nargs="*",
+ help="List of paths to lint")
+ parser.add_argument("--json", action="store_true",
+ help="Output machine-readable JSON format")
+ parser.add_argument("--markdown", action="store_true",
+ help="Output markdown")
+ parser.add_argument("--repo-root", type=str,
+ help="The WPT directory. Use this "
+ "option if the lint script exists outside the repository")
+ parser.add_argument("--ignore-glob", type=str, action="append",
+ help="Additional file glob to ignore (repeat to add more). "
+ "Globs are matched against paths relative to REPO_ROOT "
+ "using fnmatch, except that path separators are normalized.")
+ parser.add_argument("--all", action="store_true", help="If no paths are passed, try to lint the whole "
+ "working directory, not just files that changed")
+ parser.add_argument("--github-checks-text-file", type=str,
+ help="Path to GitHub checks output file for Taskcluster runs")
+ parser.add_argument("-j", "--jobs", type=int, default=0,
+ help="Level to parallelism to use (defaults to 0, which detects the number of CPUs)")
+ parser.add_argument("--paths-file", help="File containing a list of files to lint, one per line")
+ return parser
+
+
+def main(**kwargs):
+ # type: (**Any) -> int
+
+ assert logger is not None
+ if kwargs.get("json") and kwargs.get("markdown"):
+ logger.critical("Cannot specify --json and --markdown")
+ sys.exit(2)
+
+ repo_root = kwargs.get('repo_root') or localpaths.repo_root
+ output_format = {(True, False): "json",
+ (False, True): "markdown",
+ (False, False): "normal"}[(kwargs.get("json", False),
+ kwargs.get("markdown", False))]
+
+ if output_format == "markdown":
+ setup_logging(True)
+
+ paths = lint_paths(kwargs, repo_root)
+
+ ignore_glob = kwargs.get("ignore_glob", [])
+
+ github_checks_outputter = get_gh_checks_outputter(kwargs["github_checks_text_file"])
+
+ jobs = kwargs.get("jobs", 0)
+
+ return lint(repo_root, paths, output_format, ignore_glob, github_checks_outputter, jobs)
+
+
+# best experimental guess at a decent cut-off for using the parallel path
+MIN_FILES_FOR_PARALLEL = 80
+
+
+def lint(repo_root, paths, output_format, ignore_glob=None, github_checks_outputter=None, jobs=0):
+ # type: (Text, List[Text], Text, Optional[List[Text]], Optional[GitHubChecksOutputter], int) -> int
+ error_count = defaultdict(int) # type: Dict[Text, int]
+ last = None
+
+ if jobs == 0:
+ jobs = multiprocessing.cpu_count()
+ if sys.platform == 'win32':
+ # Using too many child processes in Python 3 hits either hangs or a
+ # ValueError exception, and, has diminishing returns. Clamp to 56 to
+ # give margin for error.
+ jobs = min(jobs, 56)
+
+ with open(os.path.join(repo_root, "lint.ignore")) as f:
+ ignorelist, skipped_files = parse_ignorelist(f)
+
+ if ignore_glob:
+ skipped_files |= set(ignore_glob)
+
+ output_errors = {"json": output_errors_json,
+ "markdown": output_errors_markdown,
+ "normal": output_errors_text}[output_format]
+
+ def process_errors(errors):
+ # type: (List[rules.Error]) -> Optional[Tuple[Text, Text]]
+ """
+ Filters and prints the errors, and updates the ``error_count`` object.
+
+ :param errors: a list of error tuples (error type, message, path, line number)
+ :returns: ``None`` if there were no errors, or
+ a tuple of the error type and the path otherwise
+ """
+
+ errors = filter_ignorelist_errors(ignorelist, errors)
+ if not errors:
+ return None
+
+ assert logger is not None
+ output_errors(logger.error, errors)
+
+ if github_checks_outputter:
+ first_output = len(error_count) == 0
+ output_errors_github_checks(github_checks_outputter, errors, first_output)
+
+ for error_type, error, path, line in errors:
+ error_count[error_type] += 1
+
+ return (errors[-1][0], path)
+
+ to_check_content = []
+ skip = set()
+
+ for path in paths:
+ abs_path = os.path.join(repo_root, path)
+ if not os.path.exists(abs_path):
+ skip.add(path)
+ continue
+
+ if any(fnmatch.fnmatch(path, file_match) for file_match in skipped_files):
+ skip.add(path)
+ continue
+
+ errors = check_path(repo_root, path)
+ last = process_errors(errors) or last
+
+ if not os.path.isdir(abs_path):
+ to_check_content.append((repo_root, path))
+
+ paths = [p for p in paths if p not in skip]
+
+ if jobs > 1 and len(to_check_content) >= MIN_FILES_FOR_PARALLEL:
+ pool = multiprocessing.Pool(jobs)
+ # submit this job first, as it's the longest running
+ all_paths_result = pool.apply_async(check_all_paths, (repo_root, paths))
+ # each item tends to be quick, so pass things in large chunks to avoid too much IPC overhead
+ errors_it = pool.imap_unordered(check_file_contents_apply, to_check_content, chunksize=40)
+ pool.close()
+ for errors in errors_it:
+ last = process_errors(errors) or last
+
+ errors = all_paths_result.get()
+ pool.join()
+ last = process_errors(errors) or last
+ else:
+ for item in to_check_content:
+ errors = check_file_contents(*item)
+ last = process_errors(errors) or last
+
+ errors = check_all_paths(repo_root, paths)
+ last = process_errors(errors) or last
+
+ if output_format in ("normal", "markdown"):
+ output_error_count(error_count)
+ if error_count:
+ assert last is not None
+ assert logger is not None
+ for line in (ERROR_MSG % (last[0], last[1], last[0], last[1])).split("\n"):
+ logger.info(line)
+
+ if error_count and github_checks_outputter:
+ github_checks_outputter.output("```")
+
+ return sum(error_count.values())
+
+
+path_lints = [check_file_type, check_path_length, check_worker_collision, check_ahem_copy,
+ check_mojom_js, check_tentative_directories, check_gitignore_file]
+all_paths_lints = [check_css_globally_unique, check_unique_testharness_basenames,
+ check_unique_case_insensitive_paths]
+file_lints = [check_regexp_line, check_parsed, check_python_ast, check_script_metadata,
+ check_ahem_system_font]
+
+# Don't break users of the lint that don't have git installed.
+try:
+ subprocess.check_output(["git", "--version"])
+ all_paths_lints += [check_git_ignore]
+except subprocess.CalledProcessError:
+ print('No git present; skipping .gitignore lint.')
+
+if __name__ == "__main__":
+ args = create_parser().parse_args()
+ error_count = main(**vars(args))
+ if error_count > 0:
+ sys.exit(1)
diff --git a/testing/web-platform/tests/tools/lint/rules.py b/testing/web-platform/tests/tools/lint/rules.py
new file mode 100644
index 0000000000..7b9a54b9db
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/rules.py
@@ -0,0 +1,542 @@
+import abc
+import inspect
+import os
+import re
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any, List, Match, Optional, Pattern, Text, Tuple, cast
+ Error = Tuple[str, str, str, Optional[int]]
+
+
+def collapse(text):
+ # type: (Text) -> Text
+ return inspect.cleandoc(str(text)).replace("\n", " ")
+
+
+class Rule(metaclass=abc.ABCMeta):
+ @abc.abstractproperty
+ def name(self):
+ # type: () -> Text
+ pass
+
+ @abc.abstractproperty
+ def description(self):
+ # type: () -> Text
+ pass
+
+ to_fix = None # type: Optional[Text]
+
+ @classmethod
+ def error(cls, path, context=(), line_no=None):
+ # type: (Text, Tuple[Any, ...], Optional[int]) -> Error
+ if MYPY:
+ name = cast(str, cls.name)
+ description = cast(str, cls.description)
+ else:
+ name = cls.name
+ description = cls.description
+ description = description % context
+ return (name, description, path, line_no)
+
+
+class MissingLink(Rule):
+ name = "MISSING-LINK"
+ description = "Testcase file must have a link to a spec"
+ to_fix = """
+ Ensure that there is a `<link rel="help" href="[url]">` for the spec.
+ `MISSING-LINK` is designed to ensure that the CSS build tool can find
+ the tests. Note that the CSS build system is primarily used by
+ [test.csswg.org/](http://test.csswg.org/), which doesn't use
+ `wptserve`, so `*.any.js` and similar tests won't work there; stick
+ with the `.html` equivalent.
+ """
+
+
+class PathLength(Rule):
+ name = "PATH LENGTH"
+ description = "/%s longer than maximum path length (%d > 150)"
+ to_fix = "use shorter filename to rename the test file"
+
+
+class FileType(Rule):
+ name = "FILE TYPE"
+ description = "/%s is an unsupported file type (%s)"
+
+
+class WorkerCollision(Rule):
+ name = "WORKER COLLISION"
+ description = collapse("""
+ path ends with %s which collides with generated tests from %s files
+ """)
+
+
+class GitIgnoreFile(Rule):
+ name = "GITIGNORE"
+ description = ".gitignore found outside the root"
+
+
+class MojomJSFile(Rule):
+ name = "MOJOM-JS"
+ description = "Don't check *.mojom.js files into WPT"
+ to_fix = """
+ Check if the file is already included in mojojs.zip:
+ https://source.chromium.org/chromium/chromium/src/+/master:chrome/tools/build/linux/FILES.cfg
+ If yes, use `loadMojoResources` from `resources/test-only-api.js` to load
+ it; if not, contact ecosystem-infra@chromium.org for adding new files
+ to mojojs.zip.
+ """
+
+
+class AhemCopy(Rule):
+ name = "AHEM COPY"
+ description = "Don't add extra copies of Ahem, use /fonts/Ahem.ttf"
+
+
+class AhemSystemFont(Rule):
+ name = "AHEM SYSTEM FONT"
+ description = "Don't use Ahem as a system font, use /fonts/ahem.css"
+
+
+# TODO: Add tests for this rule
+class IgnoredPath(Rule):
+ name = "IGNORED PATH"
+ description = collapse("""
+ %s matches an ignore filter in .gitignore - please add a .gitignore
+ exception
+ """)
+
+
+class CSSCollidingTestName(Rule):
+ name = "CSS-COLLIDING-TEST-NAME"
+ description = "The filename %s in the %s testsuite is shared by: %s"
+
+
+class CSSCollidingRefName(Rule):
+ name = "CSS-COLLIDING-REF-NAME"
+ description = "The filename %s is shared by: %s"
+
+
+class CSSCollidingSupportName(Rule):
+ name = "CSS-COLLIDING-SUPPORT-NAME"
+ description = "The filename %s is shared by: %s"
+
+
+class SupportWrongDir(Rule):
+ name = "SUPPORT-WRONG-DIR"
+ description = "Support file not in support directory"
+
+
+class ParseFailed(Rule):
+ name = "PARSE-FAILED"
+ description = "Unable to parse file"
+ to_fix = """
+ examine the file to find the causes of any parse errors, and fix them.
+ """
+
+
+class ContentManual(Rule):
+ name = "CONTENT-MANUAL"
+ description = "Manual test whose filename doesn't end in '-manual'"
+
+
+class ContentVisual(Rule):
+ name = "CONTENT-VISUAL"
+ description = "Visual test whose filename doesn't end in '-visual'"
+
+
+class AbsoluteUrlRef(Rule):
+ name = "ABSOLUTE-URL-REF"
+ description = collapse("""
+ Reference test with a reference file specified via an absolute URL:
+ '%s'
+ """)
+
+
+class SameFileRef(Rule):
+ name = "SAME-FILE-REF"
+ description = "Reference test which points at itself as a reference"
+
+
+class NonexistentRef(Rule):
+ name = "NON-EXISTENT-REF"
+ description = collapse("""
+ Reference test with a non-existent '%s' relationship reference: '%s'
+ """)
+
+
+class MultipleTimeout(Rule):
+ name = "MULTIPLE-TIMEOUT"
+ description = "More than one meta name='timeout'"
+ to_fix = """
+ ensure each test file has only one instance of a `<meta
+ name="timeout"...>` element
+ """
+
+
+class InvalidTimeout(Rule):
+ name = "INVALID-TIMEOUT"
+ description = collapse("""
+ Test file with `<meta name='timeout'...>` element that has a `content`
+ attribute whose value is not `long`: %s
+ """)
+ to_fix = "replace the value of the `content` attribute with `long`"
+
+
+class MultipleTestharness(Rule):
+ name = "MULTIPLE-TESTHARNESS"
+ description = "More than one `<script src='/resources/testharness.js'>`"
+ to_fix = """
+ ensure each test has only one `<script
+ src='/resources/testharnessreport.js'>` instance
+ """
+
+
+class MissingReftestWait(Rule):
+ name = "MISSING-REFTESTWAIT"
+ description = "Missing `class=reftest-wait`"
+ to_fix = """
+ ensure tests that include reftest-wait.js also use class=reftest-wait on the root element.
+ """
+
+
+class MissingTestharnessReport(Rule):
+ name = "MISSING-TESTHARNESSREPORT"
+ description = "Missing `<script src='/resources/testharnessreport.js'>`"
+ to_fix = """
+ ensure each test file contains `<script
+ src='/resources/testharnessreport.js'>`
+ """
+
+
+class MultipleTestharnessReport(Rule):
+ name = "MULTIPLE-TESTHARNESSREPORT"
+ description = "More than one `<script src='/resources/testharnessreport.js'>`"
+
+
+class VariantMissing(Rule):
+ name = "VARIANT-MISSING"
+ description = collapse("""
+ Test file with a `<meta name='variant'...>` element that's missing a
+ `content` attribute
+ """)
+ to_fix = """
+ add a `content` attribute with an appropriate value to the `<meta
+ name='variant'...>` element
+ """
+
+
+class MalformedVariant(Rule):
+ name = "MALFORMED-VARIANT"
+ description = collapse("""
+ %s `<meta name=variant>` 'content' attribute must be the empty string
+ or start with '?' or '#'
+ """)
+
+
+class LateTimeout(Rule):
+ name = "LATE-TIMEOUT"
+ description = "`<meta name=timeout>` seen after testharness.js script"
+ description = collapse("""
+ Test file with `<meta name='timeout'...>` element after `<script
+ src='/resources/testharnessreport.js'>` element
+ """)
+ to_fix = """
+ move the `<meta name="timeout"...>` element to precede the `script`
+ element.
+ """
+
+
+class EarlyTestharnessReport(Rule):
+ name = "EARLY-TESTHARNESSREPORT"
+ description = collapse("""
+ Test file has an instance of
+ `<script src='/resources/testharnessreport.js'>` prior to
+ `<script src='/resources/testharness.js'>`
+ """)
+ to_fix = "flip the order"
+
+
+class EarlyTestdriverVendor(Rule):
+ name = "EARLY-TESTDRIVER-VENDOR"
+ description = collapse("""
+ Test file has an instance of
+ `<script src='/resources/testdriver-vendor.js'>` prior to
+ `<script src='/resources/testdriver.js'>`
+ """)
+ to_fix = "flip the order"
+
+
+class MultipleTestdriver(Rule):
+ name = "MULTIPLE-TESTDRIVER"
+ description = "More than one `<script src='/resources/testdriver.js'>`"
+
+
+class MissingTestdriverVendor(Rule):
+ name = "MISSING-TESTDRIVER-VENDOR"
+ description = "Missing `<script src='/resources/testdriver-vendor.js'>`"
+
+
+class MultipleTestdriverVendor(Rule):
+ name = "MULTIPLE-TESTDRIVER-VENDOR"
+ description = "More than one `<script src='/resources/testdriver-vendor.js'>`"
+
+
+class TestharnessPath(Rule):
+ name = "TESTHARNESS-PATH"
+ description = "testharness.js script seen with incorrect path"
+
+
+class TestharnessReportPath(Rule):
+ name = "TESTHARNESSREPORT-PATH"
+ description = "testharnessreport.js script seen with incorrect path"
+
+
+class TestdriverPath(Rule):
+ name = "TESTDRIVER-PATH"
+ description = "testdriver.js script seen with incorrect path"
+
+
+class TestdriverVendorPath(Rule):
+ name = "TESTDRIVER-VENDOR-PATH"
+ description = "testdriver-vendor.js script seen with incorrect path"
+
+
+class OpenNoMode(Rule):
+ name = "OPEN-NO-MODE"
+ description = "File opened without providing an explicit mode (note: binary files must be read with 'b' in the mode flags)"
+
+
+class UnknownGlobalMetadata(Rule):
+ name = "UNKNOWN-GLOBAL-METADATA"
+ description = "Unexpected value for global metadata"
+
+
+class BrokenGlobalMetadata(Rule):
+ name = "BROKEN-GLOBAL-METADATA"
+ description = "Invalid global metadata: %s"
+
+
+class UnknownTimeoutMetadata(Rule):
+ name = "UNKNOWN-TIMEOUT-METADATA"
+ description = "Unexpected value for timeout metadata"
+
+
+class UnknownMetadata(Rule):
+ name = "UNKNOWN-METADATA"
+ description = "Unexpected kind of metadata"
+
+
+class StrayMetadata(Rule):
+ name = "STRAY-METADATA"
+ description = "Metadata comments should start the file"
+
+
+class IndentedMetadata(Rule):
+ name = "INDENTED-METADATA"
+ description = "Metadata comments should start the line"
+
+
+class BrokenMetadata(Rule):
+ name = "BROKEN-METADATA"
+ description = "Metadata comment is not formatted correctly"
+
+
+class TestharnessInOtherType(Rule):
+ name = "TESTHARNESS-IN-OTHER-TYPE"
+ description = "testharness.js included in a %s test"
+
+
+class DuplicateBasenamePath(Rule):
+ name = "DUPLICATE-BASENAME-PATH"
+ description = collapse("""
+ File has identical basename path (path excluding extension) as
+ other file(s) (found extensions: %s)
+ """)
+ to_fix = "rename files so they have unique basename paths"
+
+
+class DuplicatePathCaseInsensitive(Rule):
+ name = "DUPLICATE-CASE-INSENSITIVE-PATH"
+ description = collapse("""
+ Path differs from path %s only in case
+ """)
+ to_fix = "rename files so they are unique irrespective of case"
+
+
+class TentativeDirectoryName(Rule):
+ name = "TENTATIVE-DIRECTORY-NAME"
+ description = "Directories for tentative tests must be named exactly 'tentative'"
+ to_fix = "rename directory to be called 'tentative'"
+
+
+class Regexp(metaclass=abc.ABCMeta):
+ @abc.abstractproperty
+ def pattern(self):
+ # type: () -> bytes
+ pass
+
+ @abc.abstractproperty
+ def name(self):
+ # type: () -> Text
+ pass
+
+ @abc.abstractproperty
+ def description(self):
+ # type: () -> Text
+ pass
+
+ file_extensions = None # type: Optional[List[Text]]
+
+ def __init__(self):
+ # type: () -> None
+ self._re = re.compile(self.pattern) # type: Pattern[bytes]
+
+ def applies(self, path):
+ # type: (Text) -> bool
+ return (self.file_extensions is None or
+ os.path.splitext(path)[1] in self.file_extensions)
+
+ def search(self, line):
+ # type: (bytes) -> Optional[Match[bytes]]
+ return self._re.search(line)
+
+
+class TabsRegexp(Regexp):
+ pattern = b"^\t"
+ name = "INDENT TABS"
+ description = "Test-file line starts with one or more tab characters"
+ to_fix = "use spaces to replace any tab characters at beginning of lines"
+
+
+class CRRegexp(Regexp):
+ pattern = b"\r$"
+ name = "CR AT EOL"
+ description = "Test-file line ends with CR (U+000D) character"
+ to_fix = """
+ reformat file so each line just has LF (U+000A) line ending (standard,
+ cross-platform "Unix" line endings instead of, e.g., DOS line endings).
+ """
+
+
+class SetTimeoutRegexp(Regexp):
+ pattern = br"setTimeout\s*\("
+ name = "SET TIMEOUT"
+ file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
+ description = "setTimeout used"
+ to_fix = """
+ replace all `setTimeout(...)` calls with `step_timeout(...)` calls
+ """
+
+
+class W3CTestOrgRegexp(Regexp):
+ pattern = br"w3c\-test\.org"
+ name = "W3C-TEST.ORG"
+ description = "Test-file line has the string `w3c-test.org`"
+ to_fix = """
+ either replace the `w3c-test.org` string with the expression
+ `{{host}}:{{ports[http][0]}}` or a generic hostname like `example.org`
+ """
+
+
+class WebPlatformTestRegexp(Regexp):
+ pattern = br"web\-platform\.test"
+ name = "WEB-PLATFORM.TEST"
+ description = "Internal web-platform.test domain used"
+ to_fix = """
+ use [server-side substitution](https://web-platform-tests.org/writing-tests/server-pipes.html#sub),
+ along with the [`.sub` filename-flag](https://web-platform-tests.org/writing-tests/file-names.html#test-features),
+ to replace web-platform.test with `{{domains[]}}`
+ """
+
+
+class Webidl2Regexp(Regexp):
+ pattern = br"webidl2\.js"
+ name = "WEBIDL2.JS"
+ description = "Legacy webidl2.js script used"
+
+
+class ConsoleRegexp(Regexp):
+ pattern = br"console\.[a-zA-Z]+\s*\("
+ name = "CONSOLE"
+ file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
+ description = "Test-file line has a `console.*(...)` call"
+ to_fix = """
+ remove the `console.*(...)` call (and in some cases, consider adding an
+ `assert_*` of some kind in place of it)
+ """
+
+
+class GenerateTestsRegexp(Regexp):
+ pattern = br"generate_tests\s*\("
+ name = "GENERATE_TESTS"
+ file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
+ description = "Test file line has a generate_tests call"
+ to_fix = "remove the call and call `test()` a number of times instead"
+
+
+class PrintRegexp(Regexp):
+ pattern = br"print(?:\s|\s*\()"
+ name = "PRINT STATEMENT"
+ file_extensions = [".py"]
+ description = collapse("""
+ A server-side python support file contains a `print` statement
+ """)
+ to_fix = """
+ remove the `print` statement or replace it with something else that
+ achieves the intended effect (e.g., a logging call)
+ """
+
+
+class LayoutTestsRegexp(Regexp):
+ pattern = br"(eventSender|testRunner|internals)\."
+ name = "LAYOUTTESTS APIS"
+ file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
+ description = "eventSender/testRunner/internals used; these are LayoutTests-specific APIs (WebKit/Blink)"
+
+
+class MissingDepsRegexp(Regexp):
+ pattern = br"[^\w]/gen/"
+ name = "MISSING DEPENDENCY"
+ file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
+ description = "Chromium-specific content referenced"
+ to_fix = "Reimplement the test to use well-documented testing interfaces"
+
+
+class SpecialPowersRegexp(Regexp):
+ pattern = b"SpecialPowers"
+ name = "SPECIALPOWERS API"
+ file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
+ description = "SpecialPowers used; this is gecko-specific and not supported in wpt"
+
+
+class TrailingWhitespaceRegexp(Regexp):
+ name = "TRAILING WHITESPACE"
+ description = "Whitespace at EOL"
+ pattern = b"[ \t\f\v]$"
+ to_fix = """Remove trailing whitespace from all lines in the file."""
+
+
+class AssertThrowsRegexp(Regexp):
+ pattern = br"[^.]assert_throws\("
+ name = "ASSERT_THROWS"
+ file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
+ description = "Test-file line has an `assert_throws(...)` call"
+ to_fix = """Replace with `assert_throws_dom` or `assert_throws_js` or `assert_throws_exactly`"""
+
+
+class PromiseRejectsRegexp(Regexp):
+ pattern = br"promise_rejects\("
+ name = "PROMISE_REJECTS"
+ file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
+ description = "Test-file line has a `promise_rejects(...)` call"
+ to_fix = """Replace with promise_rejects_dom or promise_rejects_js or `promise_rejects_exactly`"""
+
+
+class AssertPreconditionRegexp(Regexp):
+ pattern = br"[^.]assert_precondition\("
+ name = "ASSERT-PRECONDITION"
+ file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
+ description = "Test-file line has an `assert_precondition(...)` call"
+ to_fix = """Replace with `assert_implements` or `assert_implements_optional`"""
diff --git a/testing/web-platform/tests/tools/lint/tests/__init__.py b/testing/web-platform/tests/tools/lint/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/lint/tests/base.py b/testing/web-platform/tests/tools/lint/tests/base.py
new file mode 100644
index 0000000000..7740bb25ca
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/base.py
@@ -0,0 +1,9 @@
+# mypy: allow-untyped-defs
+
+def check_errors(errors):
+ for e in errors:
+ error_type, description, path, line_number = e
+ assert isinstance(error_type, str)
+ assert isinstance(description, str)
+ assert isinstance(path, str)
+ assert line_number is None or isinstance(line_number, int)
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/about_blank.html b/testing/web-platform/tests/tools/lint/tests/dummy/about_blank.html
new file mode 100644
index 0000000000..8f940b540f
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/about_blank.html
@@ -0,0 +1 @@
+<link rel="match" href=about:blank>
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/broken.html b/testing/web-platform/tests/tools/lint/tests/dummy/broken.html
new file mode 100644
index 0000000000..74793c43ca
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/broken.html
@@ -0,0 +1 @@
+THIS LINE HAS TRAILING WHITESPACE
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/broken_ignored.html b/testing/web-platform/tests/tools/lint/tests/dummy/broken_ignored.html
new file mode 100644
index 0000000000..74793c43ca
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/broken_ignored.html
@@ -0,0 +1 @@
+THIS LINE HAS TRAILING WHITESPACE
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/a-ref.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/a-ref.html
new file mode 100644
index 0000000000..d00491fd7e
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/a-ref.html
@@ -0,0 +1 @@
+1
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/a.html
new file mode 100644
index 0000000000..73c5d0bc37
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/a.html
@@ -0,0 +1,4 @@
+<link rel="help" href="https://www.w3.org/TR/CSS21/aural.html#propdef-stress">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+1
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/a-ref.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/a-ref.html
new file mode 100644
index 0000000000..d00491fd7e
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/a-ref.html
@@ -0,0 +1 @@
+1
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/a.html
new file mode 100644
index 0000000000..73c5d0bc37
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/a.html
@@ -0,0 +1,4 @@
+<link rel="help" href="https://www.w3.org/TR/CSS21/aural.html#propdef-stress">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+1
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/support/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/support/a.html
new file mode 100644
index 0000000000..d00491fd7e
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/support/a.html
@@ -0,0 +1 @@
+1
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/support/tools/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/support/tools/a.html
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/support/tools/a.html
@@ -0,0 +1 @@
+2
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/tools/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/tools/a.html
new file mode 100644
index 0000000000..d00491fd7e
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/match/tools/a.html
@@ -0,0 +1 @@
+1
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/a-ref.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/a-ref.html
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/a-ref.html
@@ -0,0 +1 @@
+2
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/a.html
new file mode 100644
index 0000000000..4b0ce383a2
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/a.html
@@ -0,0 +1,4 @@
+<link rel="help" href="https://www.w3.org/TR/CSS21/aural.html#propdef-stress">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+2
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/support/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/support/a.html
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/support/a.html
@@ -0,0 +1 @@
+2
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/tools/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/tools/a.html
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/not-match/tools/a.html
@@ -0,0 +1 @@
+2
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/relative-testharness-interact.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/relative-testharness-interact.html
new file mode 100644
index 0000000000..a50ef983a9
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/relative-testharness-interact.html
@@ -0,0 +1,5 @@
+<!doctype html>
+<link rel=help href="https://www.w3.org/TR/CSS2/visufx.html#overflow-clipping">
+<meta name=flags content=interact>
+<script src="../../../resources/testharness.js"></script>
+<script src="../../../resources/testharnessreport.js"></script>
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/relative-testharness.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/relative-testharness.html
new file mode 100644
index 0000000000..f15649e3aa
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/relative-testharness.html
@@ -0,0 +1,4 @@
+<!doctype html>
+<link rel=help href="https://www.w3.org/TR/CSS2/visufx.html#overflow-clipping">
+<script src="../../../resources/testharness.js"></script>
+<script src="../../../resources/testharnessreport.js"></script>
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/selectors/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/selectors/a.html
new file mode 100644
index 0000000000..0d63c6bfed
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/selectors/a.html
@@ -0,0 +1,4 @@
+<link rel="help" href="https://drafts.csswg.org/selectors-3/#type-selectors">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+1
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/support/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/support/a.html
new file mode 100644
index 0000000000..d00491fd7e
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/support/a.html
@@ -0,0 +1 @@
+1
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/support/tools/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/support/tools/a.html
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/support/tools/a.html
@@ -0,0 +1 @@
+2
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/tools/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/tools/a.html
new file mode 100644
index 0000000000..d00491fd7e
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/css/css-unique/tools/a.html
@@ -0,0 +1 @@
+1
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/dependency.html b/testing/web-platform/tests/tools/lint/tests/dummy/dependency.html
new file mode 100644
index 0000000000..29296f4c58
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/dependency.html
@@ -0,0 +1 @@
+This file is used to demonstrate acceptance of root-relative reftest references.
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/lint.ignore b/testing/web-platform/tests/tools/lint/tests/dummy/lint.ignore
new file mode 100644
index 0000000000..a763e4432e
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/lint.ignore
@@ -0,0 +1 @@
+*:broken_ignored.html
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/okay.html b/testing/web-platform/tests/tools/lint/tests/dummy/okay.html
new file mode 100644
index 0000000000..a3178a3c83
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/okay.html
@@ -0,0 +1 @@
+THIS LINE HAS NO TRAILING WHITESPACE
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/ref/absolute.html b/testing/web-platform/tests/tools/lint/tests/dummy/ref/absolute.html
new file mode 100644
index 0000000000..4b47bc8836
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/ref/absolute.html
@@ -0,0 +1 @@
+<link rel="match" href="http://example.com/reference.html">
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_relative-ref.html b/testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_relative-ref.html
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_relative-ref.html
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_relative.html b/testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_relative.html
new file mode 100644
index 0000000000..29364ee5fb
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_relative.html
@@ -0,0 +1 @@
+<link rel="match" href="existent_relative-ref.html">
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_root_relative.html b/testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_root_relative.html
new file mode 100644
index 0000000000..2bedc2d309
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/ref/existent_root_relative.html
@@ -0,0 +1 @@
+<link rel="match" href="/dependency.html">
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/ref/non_existent_relative.html b/testing/web-platform/tests/tools/lint/tests/dummy/ref/non_existent_relative.html
new file mode 100644
index 0000000000..009a1d5eb0
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/ref/non_existent_relative.html
@@ -0,0 +1 @@
+<link rel="match" href="non_existent_file.html">
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/ref/non_existent_root_relative.html b/testing/web-platform/tests/tools/lint/tests/dummy/ref/non_existent_root_relative.html
new file mode 100644
index 0000000000..b1812013aa
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/ref/non_existent_root_relative.html
@@ -0,0 +1 @@
+<link rel="match" href="/non_existent_file.html">
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/ref/same_file_empty.html b/testing/web-platform/tests/tools/lint/tests/dummy/ref/same_file_empty.html
new file mode 100644
index 0000000000..eaa18e9c01
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/ref/same_file_empty.html
@@ -0,0 +1 @@
+<link rel="match" href="">
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/ref/same_file_path.html b/testing/web-platform/tests/tools/lint/tests/dummy/ref/same_file_path.html
new file mode 100644
index 0000000000..6a80c1f20d
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/ref/same_file_path.html
@@ -0,0 +1 @@
+<link rel="match" href="same_file_path.html">
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.html b/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.html
new file mode 100644
index 0000000000..f412593b04
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.html
@@ -0,0 +1,4 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.js b/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.js
new file mode 100644
index 0000000000..a855dab6a7
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.js
@@ -0,0 +1,6 @@
+// This is a dummy JavaScript file, meant to indicate a 'support' file of
+// sorts that may be included by some test.
+
+function helloWorld() {
+ return 'Hello, world!';
+}
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.xhtml b/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.xhtml
new file mode 100644
index 0000000000..c8b4cc2e52
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/a.xhtml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</html>
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/b.html b/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/b.html
new file mode 100644
index 0000000000..f412593b04
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir1/b.html
@@ -0,0 +1,4 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir2/a.xhtml b/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir2/a.xhtml
new file mode 100644
index 0000000000..c8b4cc2e52
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/tests/dir2/a.xhtml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</html>
diff --git a/testing/web-platform/tests/tools/lint/tests/dummy/tests/relative-testharness-manual.html b/testing/web-platform/tests/tools/lint/tests/dummy/tests/relative-testharness-manual.html
new file mode 100644
index 0000000000..15693b990c
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/dummy/tests/relative-testharness-manual.html
@@ -0,0 +1,3 @@
+<!doctype html>
+<script src="../../../resources/testharness.js"></script>
+<script src="../../../resources/testharnessreport.js"></script>
diff --git a/testing/web-platform/tests/tools/lint/tests/test_file_lints.py b/testing/web-platform/tests/tools/lint/tests/test_file_lints.py
new file mode 100644
index 0000000000..b438e45e62
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/test_file_lints.py
@@ -0,0 +1,933 @@
+# mypy: allow-untyped-defs
+
+from ..lint import check_file_contents
+from .base import check_errors
+import io
+import os
+import pytest
+
+INTERESTING_FILE_NAMES = {
+ "python": [
+ "test.py",
+ ],
+ "js": [
+ "test.js",
+ ],
+ "web-lax": [
+ "test.htm",
+ "test.html",
+ ],
+ "web-strict": [
+ "test.svg",
+ "test.xht",
+ "test.xhtml",
+ ],
+}
+
+def check_with_files(input_bytes):
+ return {
+ filename: (check_file_contents("", filename, io.BytesIO(input_bytes)), kind)
+ for (filename, kind) in
+ (
+ (os.path.join("html", filename), kind)
+ for (kind, filenames) in INTERESTING_FILE_NAMES.items()
+ for filename in filenames
+ )
+ }
+
+
+def test_trailing_whitespace():
+ error_map = check_with_files(b"test; ")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ expected = [("TRAILING WHITESPACE", "Whitespace at EOL", filename, 1)]
+ if kind == "web-strict":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, None))
+ assert errors == expected
+
+
+def test_indent_tabs():
+ error_map = check_with_files(b"def foo():\n\x09pass")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ expected = [("INDENT TABS", "Test-file line starts with one or more tab characters", filename, 2)]
+ if kind == "web-strict":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, None))
+ assert errors == expected
+
+
+def test_cr_not_at_eol():
+ error_map = check_with_files(b"line1\rline2\r")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ expected = [("CR AT EOL", "Test-file line ends with CR (U+000D) character", filename, 1)]
+ if kind == "web-strict":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, None))
+ assert errors == expected
+
+
+def test_cr_at_eol():
+ error_map = check_with_files(b"line1\r\nline2\r\n")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ expected = [
+ ("CR AT EOL", "Test-file line ends with CR (U+000D) character", filename, 1),
+ ("CR AT EOL", "Test-file line ends with CR (U+000D) character", filename, 2),
+ ]
+ if kind == "web-strict":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, None))
+ assert errors == expected
+
+
+def test_w3c_test_org():
+ error_map = check_with_files(b"import('http://www.w3c-test.org/')")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ expected = [("W3C-TEST.ORG", "Test-file line has the string `w3c-test.org`", filename, 1)]
+ if kind == "python":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, 1))
+ elif kind == "web-strict":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, None))
+ assert errors == expected
+
+def test_web_platform_test():
+ error_map = check_with_files(b"import('http://web-platform.test/')")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ expected = [("WEB-PLATFORM.TEST", "Internal web-platform.test domain used", filename, 1)]
+ if kind == "python":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, 1))
+ elif kind == "web-strict":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, None))
+ assert errors == expected
+
+
+def test_webidl2_js():
+ error_map = check_with_files(b"<script src=/resources/webidl2.js>")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ expected = [("WEBIDL2.JS", "Legacy webidl2.js script used", filename, 1)]
+ if kind == "python":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, 1))
+ elif kind == "web-strict":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, None))
+ assert errors == expected
+
+
+def test_console():
+ error_map = check_with_files(b"<script>\nconsole.log('error');\nconsole.error ('log')\n</script>")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind in ["web-lax", "web-strict", "js"]:
+ assert errors == [
+ ("CONSOLE", "Test-file line has a `console.*(...)` call", filename, 2),
+ ("CONSOLE", "Test-file line has a `console.*(...)` call", filename, 3),
+ ]
+ else:
+ assert errors == [("PARSE-FAILED", "Unable to parse file", filename, 1)]
+
+
+def test_setTimeout():
+ error_map = check_with_files(b"<script>setTimeout(() => 1, 10)</script>")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ assert errors == [("PARSE-FAILED", "Unable to parse file", filename, 1)]
+ else:
+ assert errors == [('SET TIMEOUT',
+ 'setTimeout used',
+ filename,
+ 1)]
+
+
+def test_eventSender():
+ error_map = check_with_files(b"<script>eventSender.mouseDown()</script>")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ assert errors == [("PARSE-FAILED", "Unable to parse file", filename, 1)]
+ else:
+ assert errors == [('LAYOUTTESTS APIS',
+ 'eventSender/testRunner/internals used; these are LayoutTests-specific APIs (WebKit/Blink)',
+ filename,
+ 1)]
+
+
+def test_testRunner():
+ error_map = check_with_files(b"<script>if (window.testRunner) { testRunner.waitUntilDone(); }</script>")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ assert errors == [("PARSE-FAILED", "Unable to parse file", filename, 1)]
+ else:
+ assert errors == [('LAYOUTTESTS APIS',
+ 'eventSender/testRunner/internals used; these are LayoutTests-specific APIs (WebKit/Blink)',
+ filename,
+ 1)]
+
+
+def test_internals():
+ error_map = check_with_files(b"<script>if (window.internals) { internals.doAThing(); }</script>")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ assert errors == [("PARSE-FAILED", "Unable to parse file", filename, 1)]
+ else:
+ assert errors == [('LAYOUTTESTS APIS',
+ 'eventSender/testRunner/internals used; these are LayoutTests-specific APIs (WebKit/Blink)',
+ filename,
+ 1)]
+
+
+def test_missing_deps():
+ error_map = check_with_files(b"<script src='/gen/foo.js'></script>")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ assert errors == [("PARSE-FAILED", "Unable to parse file", filename, 1)]
+ else:
+ assert errors == [('MISSING DEPENDENCY',
+ 'Chromium-specific content referenced',
+ filename,
+ 1)]
+
+
+def test_no_missing_deps():
+ error_map = check_with_files(b"""<head>
+<script src='/foo/gen/foo.js'></script>
+<script src='/gens/foo.js'></script>
+</head>""")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ assert errors == [("PARSE-FAILED", "Unable to parse file", filename, 1)]
+ else:
+ assert errors == []
+
+
+def test_meta_timeout():
+ code = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<meta name="timeout" />
+<meta name="timeout" content="short" />
+<meta name="timeout" content="long" />
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind in ["web-lax", "web-strict"]:
+ assert errors == [
+ ("MULTIPLE-TIMEOUT", "More than one meta name='timeout'", filename, None),
+ ("INVALID-TIMEOUT",
+ "Test file with `<meta name='timeout'...>` element that has a `content` attribute whose value is not `long`: ",
+ filename,
+ None),
+ ("INVALID-TIMEOUT",
+ "Test file with `<meta name='timeout'...>` element that has a `content` attribute whose value is not `long`: short",
+ filename,
+ None),
+ ]
+ elif kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 2),
+ ]
+
+
+def test_early_testharnessreport():
+ code = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testharness.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind in ["web-lax", "web-strict"]:
+ assert errors == [
+ ("EARLY-TESTHARNESSREPORT",
+ "Test file has an instance of "
+ "`<script src='/resources/testharnessreport.js'>` "
+ "prior to `<script src='/resources/testharness.js'>`",
+ filename,
+ None),
+ ]
+ elif kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 2),
+ ]
+
+
+def test_multiple_testharness():
+ code = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharness.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind in ["web-lax", "web-strict"]:
+ assert errors == [
+ ("MULTIPLE-TESTHARNESS", "More than one `<script src='/resources/testharness.js'>`", filename, None),
+ ("MISSING-TESTHARNESSREPORT", "Missing `<script src='/resources/testharnessreport.js'>`", filename, None),
+ ]
+ elif kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 2),
+ ]
+
+
+def test_multiple_testharnessreport():
+ code = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind in ["web-lax", "web-strict"]:
+ assert errors == [
+ ("MULTIPLE-TESTHARNESSREPORT", "More than one `<script src='/resources/testharnessreport.js'>`", filename, None),
+ ]
+ elif kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 2),
+ ]
+
+
+def test_early_testdriver_vendor():
+ code = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testdriver-vendor.js"></script>
+<script src="/resources/testdriver.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind in ["web-lax", "web-strict"]:
+ assert errors == [
+ ("EARLY-TESTDRIVER-VENDOR",
+ "Test file has an instance of "
+ "`<script src='/resources/testdriver-vendor.js'>` "
+ "prior to `<script src='/resources/testdriver.js'>`",
+ filename,
+ None),
+ ]
+ elif kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 2),
+ ]
+
+
+def test_multiple_testdriver():
+ code = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind in ["web-lax", "web-strict"]:
+ assert errors == [
+ ("MULTIPLE-TESTDRIVER", "More than one `<script src='/resources/testdriver.js'>`", filename, None),
+ ]
+ elif kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 2),
+ ]
+
+
+def test_multiple_testdriver_vendor():
+ code = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind in ["web-lax", "web-strict"]:
+ assert errors == [
+ ("MULTIPLE-TESTDRIVER-VENDOR", "More than one `<script src='/resources/testdriver-vendor.js'>`", filename, None),
+ ]
+ elif kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 2),
+ ]
+
+
+def test_missing_testdriver_vendor():
+ code = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind in ["web-lax", "web-strict"]:
+ assert errors == [
+ ("MISSING-TESTDRIVER-VENDOR", "Missing `<script src='/resources/testdriver-vendor.js'>`", filename, None),
+ ]
+ elif kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 2),
+ ]
+
+
+def test_testharness_path():
+ code = b"""\
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="testharness.js"></script>
+<script src="resources/testharness.js"></script>
+<script src="../resources/testharness.js"></script>
+<script src="http://w3c-test.org/resources/testharness.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ expected = [("W3C-TEST.ORG", "Test-file line has the string `w3c-test.org`", filename, 5)]
+ if kind == "python":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, 1))
+ elif kind in ["web-lax", "web-strict"]:
+ expected.extend([
+ ("TESTHARNESS-PATH", "testharness.js script seen with incorrect path", filename, None),
+ ("TESTHARNESS-PATH", "testharness.js script seen with incorrect path", filename, None),
+ ("TESTHARNESS-PATH", "testharness.js script seen with incorrect path", filename, None),
+ ("TESTHARNESS-PATH", "testharness.js script seen with incorrect path", filename, None),
+ ])
+ assert errors == expected
+
+
+def test_testharnessreport_path():
+ code = b"""\
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="testharnessreport.js"></script>
+<script src="resources/testharnessreport.js"></script>
+<script src="../resources/testharnessreport.js"></script>
+<script src="http://w3c-test.org/resources/testharnessreport.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ expected = [("W3C-TEST.ORG", "Test-file line has the string `w3c-test.org`", filename, 5)]
+ if kind == "python":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, 1))
+ elif kind in ["web-lax", "web-strict"]:
+ expected.extend([
+ ("TESTHARNESSREPORT-PATH", "testharnessreport.js script seen with incorrect path", filename, None),
+ ("TESTHARNESSREPORT-PATH", "testharnessreport.js script seen with incorrect path", filename, None),
+ ("TESTHARNESSREPORT-PATH", "testharnessreport.js script seen with incorrect path", filename, None),
+ ("TESTHARNESSREPORT-PATH", "testharnessreport.js script seen with incorrect path", filename, None),
+ ])
+ assert errors == expected
+
+
+def test_testdriver_path():
+ code = b"""\
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="testdriver.js"></script>
+<script src="/elsewhere/testdriver.js"></script>
+<script src="/elsewhere/resources/testdriver.js"></script>
+<script src="/resources/elsewhere/testdriver.js"></script>
+<script src="../resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ expected = []
+ if kind == "python":
+ expected.append(("PARSE-FAILED", "Unable to parse file", filename, 1))
+ elif kind in ["web-lax", "web-strict"]:
+ expected.extend([
+ ("TESTDRIVER-PATH", "testdriver.js script seen with incorrect path", filename, None),
+ ("TESTDRIVER-PATH", "testdriver.js script seen with incorrect path", filename, None),
+ ("TESTDRIVER-PATH", "testdriver.js script seen with incorrect path", filename, None),
+ ("TESTDRIVER-PATH", "testdriver.js script seen with incorrect path", filename, None),
+ ("TESTDRIVER-PATH", "testdriver.js script seen with incorrect path", filename, None)
+ ])
+ assert errors == expected
+
+
+def test_testdriver_vendor_path():
+ code = b"""\
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="testdriver-vendor.js"></script>
+<script src="/elsewhere/testdriver-vendor.js"></script>
+<script src="/elsewhere/resources/testdriver-vendor.js"></script>
+<script src="/resources/elsewhere/testdriver-vendor.js"></script>
+<script src="../resources/testdriver-vendor.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ expected = {("PARSE-FAILED", "Unable to parse file", filename, 1)}
+ elif kind in ["web-lax", "web-strict"]:
+ expected = {
+ ("MISSING-TESTDRIVER-VENDOR", "Missing `<script src='/resources/testdriver-vendor.js'>`", filename, None),
+ ("TESTDRIVER-VENDOR-PATH", "testdriver-vendor.js script seen with incorrect path", filename, None),
+ ("TESTDRIVER-VENDOR-PATH", "testdriver-vendor.js script seen with incorrect path", filename, None),
+ ("TESTDRIVER-VENDOR-PATH", "testdriver-vendor.js script seen with incorrect path", filename, None),
+ ("TESTDRIVER-VENDOR-PATH", "testdriver-vendor.js script seen with incorrect path", filename, None),
+ ("TESTDRIVER-VENDOR-PATH", "testdriver-vendor.js script seen with incorrect path", filename, None)
+ }
+ else:
+ expected = set()
+
+ assert set(errors) == expected
+
+
+def test_not_testharness_path():
+ code = b"""\
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="resources/webperftestharness.js"></script>
+<script src="/resources/testdriver.js"></script>
+<script src="/resources/testdriver-vendor.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 1),
+ ]
+ else:
+ assert errors == []
+
+
+def test_variant_missing():
+ code = b"""\
+<html xmlns="http://www.w3.org/1999/xhtml">
+<meta name="variant">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 1),
+ ]
+ elif kind == "web-lax":
+ assert errors == [
+ ("VARIANT-MISSING",
+ "Test file with a `<meta name='variant'...>` element that's missing a `content` attribute",
+ filename,
+ None)
+ ]
+
+
+# A corresponding "positive" test cannot be written because the manifest
+# SourceFile implementation raises a runtime exception for the condition this
+# linting rule describes
+@pytest.mark.parametrize("content", ["",
+ "?foo"
+ "#bar"])
+def test_variant_malformed_negative(content):
+ code = """\
+<html xmlns="http://www.w3.org/1999/xhtml">
+<meta name="variant" content="{}">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</html>
+""".format(content).encode("utf-8")
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 1),
+ ]
+ elif kind == "web-lax":
+ assert errors == []
+
+
+def test_late_timeout():
+ code = b"""\
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<meta name="timeout" content="long">
+<script src="/resources/testharnessreport.js"></script>
+</html>
+"""
+ error_map = check_with_files(code)
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, 1),
+ ]
+ elif kind == "web-lax":
+ assert errors == [
+ ("LATE-TIMEOUT",
+ "Test file with `<meta name='timeout'...>` element after `<script src='/resources/testharnessreport.js'>` element",
+ filename,
+ None)
+ ]
+
+
+def test_print_function():
+ error_map = check_with_files(b"def foo():\n print('function')\n")
+
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if kind == "python":
+ assert errors == [
+ ("PRINT STATEMENT", "A server-side python support file contains a `print` statement", filename, 2),
+ ]
+ elif kind == "web-strict":
+ assert errors == [
+ ("PARSE-FAILED", "Unable to parse file", filename, None),
+ ]
+ else:
+ assert errors == []
+
+
+def test_ahem_system_font():
+ code = b"""\
+<html>
+<style>
+body {
+ font-family: aHEm, sans-serif;
+}
+</style>
+</html>
+"""
+ error_map = check_with_files(code)
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if filename.endswith((".htm", ".html", ".xht", ".xhtml")):
+ assert errors == [
+ ("AHEM SYSTEM FONT", "Don't use Ahem as a system font, use /fonts/ahem.css", filename, None)
+ ]
+
+
+def test_ahem_web_font():
+ code = b"""\
+<html>
+<link rel="stylesheet" type="text/css" href="/fonts/ahem.css" />
+<style>
+body {
+ font-family: aHEm, sans-serif;
+}
+</style>
+</html>
+"""
+ error_map = check_with_files(code)
+ for (filename, (errors, kind)) in error_map.items():
+ check_errors(errors)
+
+ if filename.endswith((".htm", ".html", ".xht", ".xhtml")):
+ assert errors == []
+
+
+open_mode_code = """
+def first():
+ return {0}("test.png")
+
+def second():
+ return {0}("test.png", "r")
+
+def third():
+ return {0}("test.png", "rb")
+
+def fourth():
+ return {0}("test.png", encoding="utf-8")
+
+def fifth():
+ return {0}("test.png", mode="rb")
+"""
+
+
+def test_open_mode():
+ for method in ["open", "file"]:
+ code = open_mode_code.format(method).encode("utf-8")
+ errors = check_file_contents("", "test.py", io.BytesIO(code))
+ check_errors(errors)
+
+ message = ("File opened without providing an explicit mode (note: " +
+ "binary files must be read with 'b' in the mode flags)")
+
+ assert errors == [
+ ("OPEN-NO-MODE", message, "test.py", 3),
+ ("OPEN-NO-MODE", message, "test.py", 12),
+ ]
+
+
+@pytest.mark.parametrize(
+ "filename,expect_error",
+ [
+ ("foo/bar.html", False),
+ ("css/bar.html", True),
+ ])
+def test_css_support_file(filename, expect_error):
+ errors = check_file_contents("", filename, io.BytesIO(b""))
+ check_errors(errors)
+
+ if expect_error:
+ assert errors == [
+ ('SUPPORT-WRONG-DIR',
+ 'Support file not in support directory',
+ filename,
+ None),
+ ]
+ else:
+ assert errors == []
+
+
+def test_css_missing_file_in_css():
+ code = b"""\
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</html>
+"""
+ errors = check_file_contents("", "css/foo/bar.html", io.BytesIO(code))
+ check_errors(errors)
+
+ assert errors == [
+ ('MISSING-LINK',
+ 'Testcase file must have a link to a spec',
+ "css/foo/bar.html",
+ None),
+ ]
+
+
+def test_css_missing_file_manual():
+ errors = check_file_contents("", "css/foo/bar-manual.html", io.BytesIO(b""))
+ check_errors(errors)
+
+ assert errors == [
+ ('MISSING-LINK',
+ 'Testcase file must have a link to a spec',
+ "css/foo/bar-manual.html",
+ None),
+ ]
+
+
+def test_css_missing_file_tentative():
+ code = b"""\
+<html xmlns="http://www.w3.org/1999/xhtml">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</html>
+"""
+
+ # The tentative flag covers tests that make assertions 'not yet required by
+ # any specification', so they need not have a specification link.
+ errors = check_file_contents("", "css/foo/bar.tentative.html", io.BytesIO(code))
+ assert not errors
+
+
+@pytest.mark.parametrize("filename", [
+ "foo.worker.js",
+ "foo.any.js",
+])
+@pytest.mark.parametrize("input,error", [
+ (b"""//META: title=foo\n""", None),
+ (b"""//META: timeout=long\n""", None),
+ (b"""// META: timeout=long\n""", None),
+ (b"""// META: timeout=long\n""", None),
+ (b"""// META: script=foo.js\n""", None),
+ (b"""// META: variant=\n""", None),
+ (b"""// META: variant=?wss\n""", None),
+ (b"""# META:\n""", None),
+ (b"""\n// META: timeout=long\n""", (2, "STRAY-METADATA")),
+ (b""" // META: timeout=long\n""", (1, "INDENTED-METADATA")),
+ (b"""// META: timeout=long\n// META: timeout=long\n""", None),
+ (b"""// META: timeout=long\n\n// META: timeout=long\n""", (3, "STRAY-METADATA")),
+ (b"""// META: timeout=long\n// Start of the test\n// META: timeout=long\n""", (3, "STRAY-METADATA")),
+ (b"""// META:\n""", (1, "BROKEN-METADATA")),
+ (b"""// META: foobar\n""", (1, "BROKEN-METADATA")),
+ (b"""// META: foo=bar\n""", (1, "UNKNOWN-METADATA")),
+ (b"""// META: timeout=bar\n""", (1, "UNKNOWN-TIMEOUT-METADATA")),
+])
+def test_script_metadata(filename, input, error):
+ errors = check_file_contents("", filename, io.BytesIO(input))
+ check_errors(errors)
+
+ if error is not None:
+ line, kind = error
+ messages = {
+ "STRAY-METADATA": "Metadata comments should start the file",
+ "INDENTED-METADATA": "Metadata comments should start the line",
+ "BROKEN-METADATA": "Metadata comment is not formatted correctly",
+ "UNKNOWN-TIMEOUT-METADATA": "Unexpected value for timeout metadata",
+ "UNKNOWN-METADATA": "Unexpected kind of metadata",
+ }
+ assert errors == [
+ (kind,
+ messages[kind],
+ filename,
+ line),
+ ]
+ else:
+ assert errors == []
+
+
+@pytest.mark.parametrize("globals,error", [
+ (b"", None),
+ (b"default", "UNKNOWN-GLOBAL-METADATA"),
+ (b"!default", "UNKNOWN-GLOBAL-METADATA"),
+ (b"window", None),
+ (b"!window", "UNKNOWN-GLOBAL-METADATA"),
+ (b"!dedicatedworker", "UNKNOWN-GLOBAL-METADATA"),
+ (b"window, !window", "UNKNOWN-GLOBAL-METADATA"),
+ (b"!serviceworker", "UNKNOWN-GLOBAL-METADATA"),
+ (b"serviceworker, !serviceworker", "UNKNOWN-GLOBAL-METADATA"),
+ (b"worker, !dedicatedworker", "UNKNOWN-GLOBAL-METADATA"),
+ (b"worker, !serviceworker", "UNKNOWN-GLOBAL-METADATA"),
+ (b"!worker", "UNKNOWN-GLOBAL-METADATA"),
+ (b"foo", "UNKNOWN-GLOBAL-METADATA"),
+ (b"!foo", "UNKNOWN-GLOBAL-METADATA"),
+])
+def test_script_globals_metadata(globals, error):
+ filename = "foo.any.js"
+ input = b"""// META: global=%s\n""" % globals
+ errors = check_file_contents("", filename, io.BytesIO(input))
+ check_errors(errors)
+
+ if error is not None:
+ errors = [(k, f, l) for (k, _, f, l) in errors]
+ assert errors == [
+ (error,
+ filename,
+ 1),
+ ]
+ else:
+ assert errors == []
+
+
+@pytest.mark.parametrize("input,error", [
+ (b"""#META: timeout=long\n""", None),
+ (b"""# META: timeout=long\n""", None),
+ (b"""# META: timeout=long\n""", None),
+ (b""""// META:"\n""", None),
+ (b"""\n# META: timeout=long\n""", (2, "STRAY-METADATA")),
+ (b""" # META: timeout=long\n""", (1, "INDENTED-METADATA")),
+ (b"""# META: timeout=long\n# META: timeout=long\n""", None),
+ (b"""# META: timeout=long\n\n# META: timeout=long\n""", (3, "STRAY-METADATA")),
+ (b"""# META: timeout=long\n# Start of the test\n# META: timeout=long\n""", (3, "STRAY-METADATA")),
+ (b"""# META:\n""", (1, "BROKEN-METADATA")),
+ (b"""# META: foobar\n""", (1, "BROKEN-METADATA")),
+ (b"""# META: foo=bar\n""", (1, "UNKNOWN-METADATA")),
+ (b"""# META: timeout=bar\n""", (1, "UNKNOWN-TIMEOUT-METADATA")),
+])
+def test_python_metadata(input, error):
+ filename = "test.py"
+ errors = check_file_contents("", filename, io.BytesIO(input))
+ check_errors(errors)
+
+ if error is not None:
+ line, kind = error
+ messages = {
+ "STRAY-METADATA": "Metadata comments should start the file",
+ "INDENTED-METADATA": "Metadata comments should start the line",
+ "BROKEN-METADATA": "Metadata comment is not formatted correctly",
+ "UNKNOWN-TIMEOUT-METADATA": "Unexpected value for timeout metadata",
+ "UNKNOWN-METADATA": "Unexpected kind of metadata",
+ }
+ assert errors == [
+ (kind,
+ messages[kind],
+ filename,
+ line),
+ ]
+ else:
+ assert errors == []
diff --git a/testing/web-platform/tests/tools/lint/tests/test_lint.py b/testing/web-platform/tests/tools/lint/tests/test_lint.py
new file mode 100644
index 0000000000..11ce20353f
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/test_lint.py
@@ -0,0 +1,560 @@
+# mypy: allow-untyped-defs
+
+import io
+import os
+import sys
+from unittest import mock
+
+from ...localpaths import repo_root
+from .. import lint as lint_mod
+from ..lint import filter_ignorelist_errors, parse_ignorelist, lint, create_parser
+
+_dummy_repo = os.path.join(os.path.dirname(__file__), "dummy")
+
+def _mock_lint(name, **kwargs):
+ wrapped = getattr(lint_mod, name)
+ return mock.patch(lint_mod.__name__ + "." + name, wraps=wrapped, **kwargs)
+
+
+def test_filter_ignorelist_errors():
+ ignorelist = {
+ 'CONSOLE': {
+ 'svg/*': {12}
+ },
+ 'INDENT TABS': {
+ 'svg/*': {None}
+ }
+ }
+ # parse_ignorelist normalises the case/path of the match string so need to do the same
+ ignorelist = {e: {os.path.normcase(k): v for k, v in p.items()}
+ for e, p in ignorelist.items()}
+ # paths passed into filter_ignorelist_errors are always Unix style
+ filteredfile = 'svg/test.html'
+ unfilteredfile = 'html/test.html'
+ # Tests for passing no errors
+ filtered = filter_ignorelist_errors(ignorelist, [])
+ assert filtered == []
+ filtered = filter_ignorelist_errors(ignorelist, [])
+ assert filtered == []
+ # Tests for filtering on file and line number
+ filtered = filter_ignorelist_errors(ignorelist, [['CONSOLE', '', filteredfile, 12]])
+ assert filtered == []
+ filtered = filter_ignorelist_errors(ignorelist, [['CONSOLE', '', unfilteredfile, 12]])
+ assert filtered == [['CONSOLE', '', unfilteredfile, 12]]
+ filtered = filter_ignorelist_errors(ignorelist, [['CONSOLE', '', filteredfile, 11]])
+ assert filtered == [['CONSOLE', '', filteredfile, 11]]
+ # Tests for filtering on just file
+ filtered = filter_ignorelist_errors(ignorelist, [['INDENT TABS', '', filteredfile, 12]])
+ assert filtered == []
+ filtered = filter_ignorelist_errors(ignorelist, [['INDENT TABS', '', filteredfile, 11]])
+ assert filtered == []
+ filtered = filter_ignorelist_errors(ignorelist, [['INDENT TABS', '', unfilteredfile, 11]])
+ assert filtered == [['INDENT TABS', '', unfilteredfile, 11]]
+
+
+def test_parse_ignorelist():
+ input_buffer = io.StringIO("""
+# Comment
+CR AT EOL: svg/import/*
+CR AT EOL: streams/resources/test-utils.js
+
+INDENT TABS: .gitmodules
+INDENT TABS: app-uri/*
+INDENT TABS: svg/*
+
+TRAILING WHITESPACE: app-uri/*
+
+CONSOLE:streams/resources/test-utils.js: 12
+
+CR AT EOL, INDENT TABS: html/test.js
+
+CR AT EOL, INDENT TABS: html/test2.js: 42
+
+*:*.pdf
+*:resources/*
+
+*, CR AT EOL: *.png
+""")
+
+ expected_data = {
+ 'INDENT TABS': {
+ '.gitmodules': {None},
+ 'app-uri/*': {None},
+ 'svg/*': {None},
+ 'html/test.js': {None},
+ 'html/test2.js': {42},
+ },
+ 'TRAILING WHITESPACE': {
+ 'app-uri/*': {None},
+ },
+ 'CONSOLE': {
+ 'streams/resources/test-utils.js': {12},
+ },
+ 'CR AT EOL': {
+ 'streams/resources/test-utils.js': {None},
+ 'svg/import/*': {None},
+ 'html/test.js': {None},
+ 'html/test2.js': {42},
+ }
+ }
+ expected_data = {e: {os.path.normcase(k): v for k, v in p.items()}
+ for e, p in expected_data.items()}
+ expected_skipped = {os.path.normcase(x) for x in {"*.pdf", "resources/*", "*.png"}}
+ data, skipped_files = parse_ignorelist(input_buffer)
+ assert data == expected_data
+ assert skipped_files == expected_skipped
+
+
+def test_lint_no_files(caplog):
+ rv = lint(_dummy_repo, [], "normal")
+ assert rv == 0
+ assert caplog.text == ""
+
+
+def test_lint_ignored_file(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["broken_ignored.html"], "normal")
+ assert rv == 0
+ assert not mocked_check_path.called
+ assert not mocked_check_file_contents.called
+ assert caplog.text == ""
+
+
+def test_lint_not_existing_file(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ # really long path-linted filename
+ name = "a" * 256 + ".html"
+ rv = lint(_dummy_repo, [name], "normal")
+ assert rv == 0
+ assert not mocked_check_path.called
+ assert not mocked_check_file_contents.called
+ assert caplog.text == ""
+
+
+def test_lint_passing(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["okay.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 1
+ assert mocked_check_file_contents.call_count == 1
+ assert caplog.text == ""
+
+
+def test_lint_failing(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["broken.html"], "normal")
+ assert rv == 1
+ assert mocked_check_path.call_count == 1
+ assert mocked_check_file_contents.call_count == 1
+ assert "TRAILING WHITESPACE" in caplog.text
+ assert "broken.html:1" in caplog.text
+
+
+def test_ref_existent_relative(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["ref/existent_relative.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 1
+ assert mocked_check_file_contents.call_count == 1
+ assert caplog.text == ""
+
+
+def test_ref_existent_root_relative(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["ref/existent_root_relative.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 1
+ assert mocked_check_file_contents.call_count == 1
+ assert caplog.text == ""
+
+
+def test_ref_non_existent_relative(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["ref/non_existent_relative.html"], "normal")
+ assert rv == 1
+ assert mocked_check_path.call_count == 1
+ assert mocked_check_file_contents.call_count == 1
+ assert "NON-EXISTENT-REF" in caplog.text
+ assert "ref/non_existent_relative.html" in caplog.text
+ assert "non_existent_file.html" in caplog.text
+
+
+def test_ref_non_existent_root_relative(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["ref/non_existent_root_relative.html"], "normal")
+ assert rv == 1
+ assert mocked_check_path.call_count == 1
+ assert mocked_check_file_contents.call_count == 1
+ assert "NON-EXISTENT-REF" in caplog.text
+ assert "ref/non_existent_root_relative.html" in caplog.text
+ assert "/non_existent_file.html" in caplog.text
+
+
+
+def test_ref_absolute_url(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["ref/absolute.html"], "normal")
+ assert rv == 1
+ assert mocked_check_path.call_count == 1
+ assert mocked_check_file_contents.call_count == 1
+ assert "ABSOLUTE-URL-REF" in caplog.text
+ assert "http://example.com/reference.html" in caplog.text
+ assert "ref/absolute.html" in caplog.text
+
+
+def test_about_blank_as_ref(caplog):
+ with _mock_lint("check_path"):
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["about_blank.html"], "normal")
+ assert rv == 0
+ assert mocked_check_file_contents.call_count == 1
+ assert caplog.text == ""
+
+
+def test_ref_same_file_empty(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["ref/same_file_empty.html"], "normal")
+ assert rv == 1
+ assert mocked_check_path.call_count == 1
+ assert mocked_check_file_contents.call_count == 1
+ assert "SAME-FILE-REF" in caplog.text
+ assert "same_file_empty.html" in caplog.text
+
+
+def test_ref_same_file_path(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["ref/same_file_path.html"], "normal")
+ assert rv == 1
+ assert mocked_check_path.call_count == 1
+ assert mocked_check_file_contents.call_count == 1
+ assert "SAME-FILE-REF" in caplog.text
+ assert "same_file_path.html" in caplog.text
+
+
+def test_manual_path_testharness(caplog):
+ rv = lint(_dummy_repo, ["tests/relative-testharness-manual.html"], "normal")
+ assert rv == 2
+ assert "TESTHARNESS-PATH" in caplog.text
+ assert "TESTHARNESSREPORT-PATH" in caplog.text
+
+
+def test_css_visual_path_testharness(caplog):
+ rv = lint(_dummy_repo, ["css/css-unique/relative-testharness.html"], "normal")
+ assert rv == 3
+ assert "CONTENT-VISUAL" in caplog.text
+ assert "TESTHARNESS-PATH" in caplog.text
+ assert "TESTHARNESSREPORT-PATH" in caplog.text
+
+
+def test_css_manual_path_testharness(caplog):
+ rv = lint(_dummy_repo, ["css/css-unique/relative-testharness-interact.html"], "normal")
+ assert rv == 3
+ assert "CONTENT-MANUAL" in caplog.text
+ assert "TESTHARNESS-PATH" in caplog.text
+ assert "TESTHARNESSREPORT-PATH" in caplog.text
+
+
+def test_lint_passing_and_failing(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["broken.html", "okay.html"], "normal")
+ assert rv == 1
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert "TRAILING WHITESPACE" in caplog.text
+ assert "broken.html:1" in caplog.text
+ assert "okay.html" not in caplog.text
+
+
+def test_check_css_globally_unique_identical_test(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/match/a.html", "css/css-unique/a.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert caplog.text == ""
+
+
+def test_check_css_globally_unique_different_test(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/not-match/a.html", "css/css-unique/a.html"], "normal")
+ assert rv == 2
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert "CSS-COLLIDING-TEST-NAME" in caplog.text
+
+
+def test_check_css_globally_unique_different_spec_test(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/selectors/a.html", "css/css-unique/a.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert caplog.text == ""
+
+
+def test_check_css_globally_unique_support_ignored(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/support/a.html", "css/css-unique/support/tools/a.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert caplog.text == ""
+
+
+def test_check_css_globally_unique_support_identical(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/support/a.html", "css/css-unique/match/support/a.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert caplog.text == ""
+
+
+def test_check_css_globally_unique_support_different(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/not-match/support/a.html", "css/css-unique/support/a.html"], "normal")
+ assert rv == 2
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert "CSS-COLLIDING-SUPPORT-NAME" in caplog.text
+
+
+def test_check_css_globally_unique_test_support(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/support/a.html", "css/css-unique/a.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert caplog.text == ""
+
+
+def test_check_css_globally_unique_ref_identical(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/a-ref.html", "css/css-unique/match/a-ref.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert caplog.text == ""
+
+
+def test_check_css_globally_unique_ref_different(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/not-match/a-ref.html", "css/css-unique/a-ref.html"], "normal")
+ assert rv == 2
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert "CSS-COLLIDING-REF-NAME" in caplog.text
+
+
+def test_check_css_globally_unique_test_ref(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/a-ref.html", "css/css-unique/a.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert caplog.text == ""
+
+
+def test_check_css_globally_unique_ignored(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/tools/a.html", "css/css-unique/not-match/tools/a.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert caplog.text == ""
+
+
+def test_check_css_globally_unique_ignored_dir(caplog):
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["css/css-unique/support/a.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 1
+ assert mocked_check_file_contents.call_count == 1
+ assert caplog.text == ""
+
+
+def test_check_unique_testharness_basename_same_basename(caplog):
+ # Precondition: There are testharness files with conflicting basename paths.
+ assert os.path.exists(os.path.join(_dummy_repo, 'tests', 'dir1', 'a.html'))
+ assert os.path.exists(os.path.join(_dummy_repo, 'tests', 'dir1', 'a.xhtml'))
+
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["tests/dir1/a.html", "tests/dir1/a.xhtml"], "normal")
+ # There will be one failure for each file.
+ assert rv == 2
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert "DUPLICATE-BASENAME-PATH" in caplog.text
+
+
+def test_check_unique_testharness_basename_different_name(caplog):
+ # Precondition: There are two testharness files in the same directory with
+ # different names.
+ assert os.path.exists(os.path.join(_dummy_repo, 'tests', 'dir1', 'a.html'))
+ assert os.path.exists(os.path.join(_dummy_repo, 'tests', 'dir1', 'b.html'))
+
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["tests/dir1/a.html", "tests/dir1/b.html"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert caplog.text == ""
+
+
+def test_check_unique_testharness_basename_different_dir(caplog):
+ # Precondition: There are two testharness files in different directories
+ # with the same basename.
+ assert os.path.exists(os.path.join(_dummy_repo, 'tests', 'dir1', 'a.html'))
+ assert os.path.exists(os.path.join(_dummy_repo, 'tests', 'dir2', 'a.xhtml'))
+
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["tests/dir1/a.html", "tests/dir2/a.xhtml"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert caplog.text == ""
+
+
+def test_check_unique_testharness_basename_not_testharness(caplog):
+ # Precondition: There are non-testharness files with conflicting basename paths.
+ assert os.path.exists(os.path.join(_dummy_repo, 'tests', 'dir1', 'a.html'))
+ assert os.path.exists(os.path.join(_dummy_repo, 'tests', 'dir1', 'a.js'))
+
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["tests/dir1/a.html", "tests/dir1/a.js"], "normal")
+ assert rv == 0
+ assert mocked_check_path.call_count == 2
+ assert mocked_check_file_contents.call_count == 2
+ assert caplog.text == ""
+
+
+def test_ignore_glob(caplog):
+ # Lint two files in the ref/ directory, and pass in ignore_glob to omit one
+ # of them.
+ # When we omit absolute.html, no lint errors appear since the other file is
+ # clean.
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo,
+ ["broken.html", "ref/absolute.html", "ref/existent_relative.html"],
+ "normal",
+ ["broken*", "*solu*"])
+ assert rv == 0
+ # Also confirm that only one file is checked
+ assert mocked_check_path.call_count == 1
+ assert mocked_check_file_contents.call_count == 1
+ assert caplog.text == ""
+ # However, linting the same two files without ignore_glob yields lint errors.
+ with _mock_lint("check_path") as mocked_check_path:
+ with _mock_lint("check_file_contents") as mocked_check_file_contents:
+ rv = lint(_dummy_repo, ["broken.html", "ref/absolute.html", "ref/existent_relative.html"], "normal")
+ assert rv == 2
+ assert mocked_check_path.call_count == 3
+ assert mocked_check_file_contents.call_count == 3
+ assert "TRAILING WHITESPACE" in caplog.text
+ assert "ABSOLUTE-URL-REF" in caplog.text
+
+
+def test_all_filesystem_paths():
+ with mock.patch(
+ 'tools.lint.lint.walk',
+ return_value=[(b'',
+ [(b'dir_a', None), (b'dir_b', None)],
+ [(b'file_a', None), (b'file_b', None)]),
+ (b'dir_a',
+ [],
+ [(b'file_c', None), (b'file_d', None)])]
+ ):
+ got = list(lint_mod.all_filesystem_paths('.'))
+ assert got == ['file_a',
+ 'file_b',
+ os.path.join('dir_a', 'file_c'),
+ os.path.join('dir_a', 'file_d')]
+
+
+def test_filesystem_paths_subdir():
+ with mock.patch(
+ 'tools.lint.lint.walk',
+ return_value=[(b'',
+ [(b'dir_a', None), (b'dir_b', None)],
+ [(b'file_a', None), (b'file_b', None)]),
+ (b'dir_a',
+ [],
+ [(b'file_c', None), (b'file_d', None)])]
+ ):
+ got = list(lint_mod.all_filesystem_paths('.', 'dir'))
+ assert got == [os.path.join('dir', 'file_a'),
+ os.path.join('dir', 'file_b'),
+ os.path.join('dir', 'dir_a', 'file_c'),
+ os.path.join('dir', 'dir_a', 'file_d')]
+
+
+def test_main_with_args():
+ orig_argv = sys.argv
+ try:
+ sys.argv = ['./lint', 'a', 'b', 'c']
+ with mock.patch(lint_mod.__name__ + ".os.path.isfile") as mock_isfile:
+ mock_isfile.return_value = True
+ with _mock_lint('lint', return_value=True) as m:
+ lint_mod.main(**vars(create_parser().parse_args()))
+ m.assert_called_once_with(repo_root,
+ [os.path.relpath(os.path.join(os.getcwd(), x), repo_root)
+ for x in ['a', 'b', 'c']],
+ "normal",
+ None,
+ None,
+ 0)
+ finally:
+ sys.argv = orig_argv
+
+
+def test_main_no_args():
+ orig_argv = sys.argv
+ try:
+ sys.argv = ['./lint']
+ with _mock_lint('lint', return_value=True) as m:
+ with _mock_lint('changed_files', return_value=['foo', 'bar']):
+ lint_mod.main(**vars(create_parser().parse_args()))
+ m.assert_called_once_with(repo_root, ['foo', 'bar'], "normal", None, None, 0)
+ finally:
+ sys.argv = orig_argv
+
+
+def test_main_all():
+ orig_argv = sys.argv
+ try:
+ sys.argv = ['./lint', '--all']
+ with _mock_lint('lint', return_value=True) as m:
+ with _mock_lint('all_filesystem_paths', return_value=['foo', 'bar']):
+ lint_mod.main(**vars(create_parser().parse_args()))
+ m.assert_called_once_with(repo_root, ['foo', 'bar'], "normal", None, None, 0)
+ finally:
+ sys.argv = orig_argv
diff --git a/testing/web-platform/tests/tools/lint/tests/test_path_lints.py b/testing/web-platform/tests/tools/lint/tests/test_path_lints.py
new file mode 100644
index 0000000000..f2cc2409f2
--- /dev/null
+++ b/testing/web-platform/tests/tools/lint/tests/test_path_lints.py
@@ -0,0 +1,167 @@
+# mypy: allow-untyped-defs
+
+import os
+from unittest import mock
+
+from ..lint import check_path, check_unique_case_insensitive_paths
+from .base import check_errors
+import pytest
+
+
+def test_allowed_path_length():
+ basename = 29 * "test/"
+
+ for idx in range(5):
+ filename = basename + idx * "a"
+
+ errors = check_path("/foo/", filename)
+ check_errors(errors)
+ assert errors == []
+
+
+def test_forbidden_path_length():
+ basename = 29 * "test/"
+
+ for idx in range(5, 10):
+ filename = basename + idx * "a"
+ message = f"/{filename} longer than maximum path length ({146 + idx} > 150)"
+
+ errors = check_path("/foo/", filename)
+ check_errors(errors)
+ assert errors == [("PATH LENGTH", message, filename, None)]
+
+
+@pytest.mark.parametrize("path_ending,generated", [(".worker.html", ".worker.js"),
+ (".any.worker.html", ".any.js"),
+ (".any.html", ".any.js")])
+def test_forbidden_path_endings(path_ending, generated):
+ path = "test/test" + path_ending
+
+ message = ("path ends with %s which collides with generated tests from %s files" %
+ (path_ending, generated))
+
+ errors = check_path("/foo/", path)
+ check_errors(errors)
+ assert errors == [("WORKER COLLISION", message, path, None)]
+
+
+def test_file_type():
+ path = "test/test"
+
+ message = f"/{path} is an unsupported file type (symlink)"
+
+ with mock.patch("os.path.islink", returnvalue=True):
+ errors = check_path("/foo/", path)
+
+ assert errors == [("FILE TYPE", message, path, None)]
+
+
+@pytest.mark.parametrize("path", ["ahem.ttf",
+ "Ahem.ttf",
+ "ahem.tTf",
+ "not-ahem.ttf",
+ "support/ahem.ttf",
+ "ahem/other.ttf"])
+def test_ahem_copy(path):
+ expected_error = ("AHEM COPY",
+ "Don't add extra copies of Ahem, use /fonts/Ahem.ttf",
+ path,
+ None)
+
+ errors = check_path("/foo/", path)
+
+ assert errors == [expected_error]
+
+
+@pytest.mark.parametrize("path", ["ahem.woff",
+ "ahem.ttff",
+ "support/ahem.woff",
+ "ahem/other.woff"])
+def test_ahem_copy_negative(path):
+ errors = check_path("/foo/", path)
+
+ assert errors == []
+
+
+def test_mojom_js_file():
+ path = "resources/fake_device.mojom.js"
+ errors = check_path("/foo/", path)
+ assert errors == [("MOJOM-JS",
+ "Don't check *.mojom.js files into WPT",
+ path,
+ None)]
+
+
+@pytest.mark.parametrize("path", ["css/foo.tentative/bar.html",
+ "css/.tentative/bar.html",
+ "css/tentative.bar/baz.html",
+ "css/bar-tentative/baz.html"])
+def test_tentative_directories(path):
+ path = os.path.join(*path.split("/"))
+ expected_error = ("TENTATIVE-DIRECTORY-NAME",
+ "Directories for tentative tests must be named exactly 'tentative'",
+ path,
+ None)
+
+ errors = check_path("/foo/", path)
+
+ assert errors == [expected_error]
+
+
+@pytest.mark.parametrize("path", ["css/bar.html",
+ "css/tentative/baz.html"])
+def test_tentative_directories_negative(path):
+ path = os.path.join(*path.split("/"))
+ errors = check_path("/foo/", path)
+
+ assert errors == []
+
+
+@pytest.mark.parametrize("path", ["elsewhere/.gitignore",
+ "else/where/.gitignore"
+ "elsewhere/tools/.gitignore",
+ "elsewhere/docs/.gitignore",
+ "elsewhere/resources/webidl2/.gitignore",
+ "elsewhere/css/tools/apiclient/.gitignore"])
+def test_gitignore_file(path):
+ path = os.path.join(*path.split("/"))
+
+ expected_error = ("GITIGNORE",
+ ".gitignore found outside the root",
+ path,
+ None)
+
+ errors = check_path("/foo/", path)
+
+ assert errors == [expected_error]
+
+
+@pytest.mark.parametrize("path", [".gitignore",
+ "elsewhere/.gitignores",
+ "elsewhere/name.gitignore",
+ "tools/.gitignore",
+ "tools/elsewhere/.gitignore",
+ "docs/.gitignore"
+ "docs/elsewhere/.gitignore",
+ "resources/webidl2/.gitignore",
+ "resources/webidl2/elsewhere/.gitignore",
+ "css/tools/apiclient/.gitignore",
+ "css/tools/apiclient/elsewhere/.gitignore"])
+def test_gitignore_negative(path):
+ path = os.path.join(*path.split("/"))
+
+ errors = check_path("/foo/", path)
+
+ assert errors == []
+
+
+@pytest.mark.parametrize("paths,errors",
+ [(["a/b.html", "a/B.html"], ["a/B.html"]),
+ (["A/b.html", "a/b.html"], ["a/b.html"]),
+ (["a/b.html", "a/c.html"], [])])
+def test_unique_case_insensitive_paths(paths, errors):
+ got_errors = check_unique_case_insensitive_paths(None, paths)
+ assert len(got_errors) == len(errors)
+ for (name, _, path, _), expected_path in zip(got_errors, errors):
+ assert name == "DUPLICATE-CASE-INSENSITIVE-PATH"
+ assert path == expected_path
diff --git a/testing/web-platform/tests/tools/localpaths.py b/testing/web-platform/tests/tools/localpaths.py
new file mode 100644
index 0000000000..a7b887885b
--- /dev/null
+++ b/testing/web-platform/tests/tools/localpaths.py
@@ -0,0 +1,36 @@
+import os
+import sys
+
+here = os.path.abspath(os.path.dirname(__file__))
+repo_root = os.path.abspath(os.path.join(here, os.pardir))
+
+sys.path.insert(0, os.path.join(here))
+sys.path.insert(0, os.path.join(here, "wptserve"))
+sys.path.insert(0, os.path.join(here, "third_party", "pywebsocket3"))
+sys.path.insert(0, os.path.join(here, "third_party", "atomicwrites"))
+sys.path.insert(0, os.path.join(here, "third_party", "attrs", "src"))
+sys.path.insert(0, os.path.join(here, "third_party", "funcsigs"))
+sys.path.insert(0, os.path.join(here, "third_party", "html5lib"))
+sys.path.insert(0, os.path.join(here, "third_party", "zipp"))
+sys.path.insert(0, os.path.join(here, "third_party", "more-itertools"))
+sys.path.insert(0, os.path.join(here, "third_party", "packaging"))
+sys.path.insert(0, os.path.join(here, "third_party", "pathlib2"))
+sys.path.insert(0, os.path.join(here, "third_party", "pluggy", "src"))
+sys.path.insert(0, os.path.join(here, "third_party", "py"))
+sys.path.insert(0, os.path.join(here, "third_party", "pytest"))
+sys.path.insert(0, os.path.join(here, "third_party", "pytest", "src"))
+sys.path.insert(0, os.path.join(here, "third_party", "pytest-asyncio"))
+sys.path.insert(0, os.path.join(here, "third_party", "six"))
+sys.path.insert(0, os.path.join(here, "third_party", "webencodings"))
+sys.path.insert(0, os.path.join(here, "third_party", "h2"))
+sys.path.insert(0, os.path.join(here, "third_party", "hpack"))
+sys.path.insert(0, os.path.join(here, "third_party", "hyperframe"))
+sys.path.insert(0, os.path.join(here, "third_party", "certifi"))
+sys.path.insert(0, os.path.join(here, "third_party", "hyper"))
+sys.path.insert(0, os.path.join(here, "third_party", "websockets", "src"))
+sys.path.insert(0, os.path.join(here, "third_party", "iniconfig", "src"))
+if sys.version_info < (3, 8):
+ sys.path.insert(0, os.path.join(here, "third_party", "importlib_metadata"))
+sys.path.insert(0, os.path.join(here, "webdriver"))
+sys.path.insert(0, os.path.join(here, "wptrunner"))
+sys.path.insert(0, os.path.join(here, "webtransport"))
diff --git a/testing/web-platform/tests/tools/manifest/XMLParser.py b/testing/web-platform/tests/tools/manifest/XMLParser.py
new file mode 100644
index 0000000000..689533421d
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/XMLParser.py
@@ -0,0 +1,151 @@
+from os.path import dirname, join
+
+from collections import OrderedDict
+
+from xml.parsers import expat
+import xml.etree.ElementTree as etree # noqa: N813
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Dict
+ from typing import List
+ from typing import Optional
+ from typing import Text
+ from typing import Union
+
+_catalog = join(dirname(__file__), "catalog")
+
+def _wrap_error(e):
+ # type: (expat.error) -> etree.ParseError
+ err = etree.ParseError(e)
+ err.code = e.code
+ err.position = e.lineno, e.offset
+ raise err
+
+_names = {} # type: Dict[Text, Text]
+def _fixname(key):
+ # type: (Text) -> Text
+ try:
+ name = _names[key]
+ except KeyError:
+ name = key
+ if "}" in name:
+ name = "{" + name
+ _names[key] = name
+ return name
+
+
+_undefined_entity_code = expat.errors.codes[expat.errors.XML_ERROR_UNDEFINED_ENTITY] # type: int
+
+
+class XMLParser:
+ """
+ An XML parser with support for XHTML DTDs and all Python-supported encodings
+
+ This implements the API defined by
+ xml.etree.ElementTree.XMLParser, but supports XHTML DTDs
+ (therefore allowing XHTML entities) and supports all encodings
+ Python does, rather than just those supported by expat.
+ """
+ def __init__(self, encoding=None):
+ # type: (Optional[Text]) -> None
+ self._parser = expat.ParserCreate(encoding, "}")
+ self._target = etree.TreeBuilder()
+ # parser settings
+ self._parser.buffer_text = True
+ self._parser.ordered_attributes = True
+ self._parser.SetParamEntityParsing(expat.XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE)
+ # parser callbacks
+ self._parser.XmlDeclHandler = self._xml_decl
+ self._parser.StartElementHandler = self._start
+ self._parser.EndElementHandler = self._end
+ self._parser.CharacterDataHandler = self._data
+ self._parser.ExternalEntityRefHandler = self._external
+ self._parser.SkippedEntityHandler = self._skipped # type: ignore
+ # used for our horrible re-encoding hack
+ self._fed_data = [] # type: Optional[List[bytes]]
+ self._read_encoding = None # type: Optional[Text]
+
+ def _xml_decl(self, version, encoding, standalone):
+ # type: (Text, Optional[Text], int) -> None
+ self._read_encoding = encoding
+
+ def _start(self, tag, attrib_in):
+ # type: (Text, List[str]) -> etree.Element
+ assert isinstance(tag, str)
+ self._fed_data = None
+ tag = _fixname(tag)
+ attrib = OrderedDict() # type: Dict[Union[bytes, Text], Union[bytes, Text]]
+ if attrib_in:
+ for i in range(0, len(attrib_in), 2):
+ attrib[_fixname(attrib_in[i])] = attrib_in[i+1]
+ return self._target.start(tag, attrib)
+
+ def _data(self, text):
+ # type: (Text) -> None
+ self._target.data(text)
+
+ def _end(self, tag):
+ # type: (Text) -> etree.Element
+ return self._target.end(_fixname(tag))
+
+ def _external(self, context, base, system_id, public_id):
+ # type: (Text, Optional[Text], Optional[Text], Optional[Text]) -> bool
+ if public_id in {
+ "-//W3C//DTD XHTML 1.0 Transitional//EN",
+ "-//W3C//DTD XHTML 1.1//EN",
+ "-//W3C//DTD XHTML 1.0 Strict//EN",
+ "-//W3C//DTD XHTML 1.0 Frameset//EN",
+ "-//W3C//DTD XHTML Basic 1.0//EN",
+ "-//W3C//DTD XHTML 1.1 plus MathML 2.0//EN",
+ "-//W3C//DTD XHTML 1.1 plus MathML 2.0 plus SVG 1.1//EN",
+ "-//W3C//DTD MathML 2.0//EN",
+ "-//WAPFORUM//DTD XHTML Mobile 1.0//EN"
+ }:
+ parser = self._parser.ExternalEntityParserCreate(context)
+ with open(join(_catalog, "xhtml.dtd"), "rb") as fp:
+ try:
+ parser.ParseFile(fp)
+ except expat.error:
+ return False
+
+ return True
+
+ def _skipped(self, name, is_parameter_entity):
+ # type: (Text, bool) -> None
+ err = expat.error("undefined entity %s: line %d, column %d" %
+ (name, self._parser.ErrorLineNumber,
+ self._parser.ErrorColumnNumber))
+ err.code = _undefined_entity_code
+ err.lineno = self._parser.ErrorLineNumber
+ err.offset = self._parser.ErrorColumnNumber
+ raise err
+
+ def feed(self, data):
+ # type: (bytes) -> None
+ if self._fed_data is not None:
+ self._fed_data.append(data)
+ try:
+ self._parser.Parse(data, False)
+ except expat.error as v:
+ _wrap_error(v)
+ except ValueError as e:
+ if e.args[0] == 'multi-byte encodings are not supported':
+ assert self._read_encoding is not None
+ assert self._fed_data is not None
+ xml = b"".join(self._fed_data).decode(self._read_encoding).encode("utf-8")
+ new_parser = XMLParser("utf-8")
+ self._parser = new_parser._parser
+ self._target = new_parser._target
+ self._fed_data = None
+ self.feed(xml)
+
+ def close(self):
+ # type: () -> etree.Element
+ try:
+ self._parser.Parse("", True)
+ except expat.error as v:
+ _wrap_error(v)
+ tree = self._target.close()
+ return tree
diff --git a/testing/web-platform/tests/tools/manifest/__init__.py b/testing/web-platform/tests/tools/manifest/__init__.py
new file mode 100644
index 0000000000..8c8f189070
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/__init__.py
@@ -0,0 +1 @@
+from . import item, manifest, sourcefile, update # noqa: F401
diff --git a/testing/web-platform/tests/tools/manifest/catalog/xhtml.dtd b/testing/web-platform/tests/tools/manifest/catalog/xhtml.dtd
new file mode 100644
index 0000000000..4307b1c2c4
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/catalog/xhtml.dtd
@@ -0,0 +1,2125 @@
+<!ENTITY Tab "&#x9;">
+<!ENTITY NewLine "&#xA;">
+<!ENTITY excl "&#x21;">
+<!ENTITY quot "&#x22;">
+<!ENTITY QUOT "&#x22;">
+<!ENTITY num "&#x23;">
+<!ENTITY dollar "&#x24;">
+<!ENTITY percnt "&#x25;">
+<!ENTITY amp "&#x26;#x26;">
+<!ENTITY AMP "&#x26;#x26;">
+<!ENTITY apos "&#x27;">
+<!ENTITY lpar "&#x28;">
+<!ENTITY rpar "&#x29;">
+<!ENTITY ast "&#x2A;">
+<!ENTITY midast "&#x2A;">
+<!ENTITY plus "&#x2B;">
+<!ENTITY comma "&#x2C;">
+<!ENTITY period "&#x2E;">
+<!ENTITY sol "&#x2F;">
+<!ENTITY colon "&#x3A;">
+<!ENTITY semi "&#x3B;">
+<!ENTITY lt "&#x26;#x3C;">
+<!ENTITY LT "&#x26;#x3C;">
+<!ENTITY nvlt "&#x26;#x3C;&#x20D2;">
+<!ENTITY equals "&#x3D;">
+<!ENTITY bne "&#x3D;&#x20E5;">
+<!ENTITY gt "&#x3E;">
+<!ENTITY GT "&#x3E;">
+<!ENTITY nvgt "&#x3E;&#x20D2;">
+<!ENTITY quest "&#x3F;">
+<!ENTITY commat "&#x40;">
+<!ENTITY lsqb "&#x5B;">
+<!ENTITY lbrack "&#x5B;">
+<!ENTITY bsol "&#x5C;">
+<!ENTITY rsqb "&#x5D;">
+<!ENTITY rbrack "&#x5D;">
+<!ENTITY Hat "&#x5E;">
+<!ENTITY lowbar "&#x5F;">
+<!ENTITY UnderBar "&#x5F;">
+<!ENTITY grave "&#x60;">
+<!ENTITY DiacriticalGrave "&#x60;">
+<!ENTITY fjlig "&#x66;&#x6A;">
+<!ENTITY lcub "&#x7B;">
+<!ENTITY lbrace "&#x7B;">
+<!ENTITY verbar "&#x7C;">
+<!ENTITY vert "&#x7C;">
+<!ENTITY VerticalLine "&#x7C;">
+<!ENTITY rcub "&#x7D;">
+<!ENTITY rbrace "&#x7D;">
+<!ENTITY nbsp "&#xA0;">
+<!ENTITY NonBreakingSpace "&#xA0;">
+<!ENTITY iexcl "&#xA1;">
+<!ENTITY cent "&#xA2;">
+<!ENTITY pound "&#xA3;">
+<!ENTITY curren "&#xA4;">
+<!ENTITY yen "&#xA5;">
+<!ENTITY brvbar "&#xA6;">
+<!ENTITY sect "&#xA7;">
+<!ENTITY Dot "&#xA8;">
+<!ENTITY die "&#xA8;">
+<!ENTITY DoubleDot "&#xA8;">
+<!ENTITY uml "&#xA8;">
+<!ENTITY copy "&#xA9;">
+<!ENTITY COPY "&#xA9;">
+<!ENTITY ordf "&#xAA;">
+<!ENTITY laquo "&#xAB;">
+<!ENTITY not "&#xAC;">
+<!ENTITY shy "&#xAD;">
+<!ENTITY reg "&#xAE;">
+<!ENTITY circledR "&#xAE;">
+<!ENTITY REG "&#xAE;">
+<!ENTITY macr "&#xAF;">
+<!ENTITY strns "&#xAF;">
+<!ENTITY deg "&#xB0;">
+<!ENTITY plusmn "&#xB1;">
+<!ENTITY pm "&#xB1;">
+<!ENTITY PlusMinus "&#xB1;">
+<!ENTITY sup2 "&#xB2;">
+<!ENTITY sup3 "&#xB3;">
+<!ENTITY acute "&#xB4;">
+<!ENTITY DiacriticalAcute "&#xB4;">
+<!ENTITY micro "&#xB5;">
+<!ENTITY para "&#xB6;">
+<!ENTITY middot "&#xB7;">
+<!ENTITY centerdot "&#xB7;">
+<!ENTITY CenterDot "&#xB7;">
+<!ENTITY cedil "&#xB8;">
+<!ENTITY Cedilla "&#xB8;">
+<!ENTITY sup1 "&#xB9;">
+<!ENTITY ordm "&#xBA;">
+<!ENTITY raquo "&#xBB;">
+<!ENTITY frac14 "&#xBC;">
+<!ENTITY frac12 "&#xBD;">
+<!ENTITY half "&#xBD;">
+<!ENTITY frac34 "&#xBE;">
+<!ENTITY iquest "&#xBF;">
+<!ENTITY Agrave "&#xC0;">
+<!ENTITY Aacute "&#xC1;">
+<!ENTITY Acirc "&#xC2;">
+<!ENTITY Atilde "&#xC3;">
+<!ENTITY Auml "&#xC4;">
+<!ENTITY Aring "&#xC5;">
+<!ENTITY angst "&#xC5;">
+<!ENTITY AElig "&#xC6;">
+<!ENTITY Ccedil "&#xC7;">
+<!ENTITY Egrave "&#xC8;">
+<!ENTITY Eacute "&#xC9;">
+<!ENTITY Ecirc "&#xCA;">
+<!ENTITY Euml "&#xCB;">
+<!ENTITY Igrave "&#xCC;">
+<!ENTITY Iacute "&#xCD;">
+<!ENTITY Icirc "&#xCE;">
+<!ENTITY Iuml "&#xCF;">
+<!ENTITY ETH "&#xD0;">
+<!ENTITY Ntilde "&#xD1;">
+<!ENTITY Ograve "&#xD2;">
+<!ENTITY Oacute "&#xD3;">
+<!ENTITY Ocirc "&#xD4;">
+<!ENTITY Otilde "&#xD5;">
+<!ENTITY Ouml "&#xD6;">
+<!ENTITY times "&#xD7;">
+<!ENTITY Oslash "&#xD8;">
+<!ENTITY Ugrave "&#xD9;">
+<!ENTITY Uacute "&#xDA;">
+<!ENTITY Ucirc "&#xDB;">
+<!ENTITY Uuml "&#xDC;">
+<!ENTITY Yacute "&#xDD;">
+<!ENTITY THORN "&#xDE;">
+<!ENTITY szlig "&#xDF;">
+<!ENTITY agrave "&#xE0;">
+<!ENTITY aacute "&#xE1;">
+<!ENTITY acirc "&#xE2;">
+<!ENTITY atilde "&#xE3;">
+<!ENTITY auml "&#xE4;">
+<!ENTITY aring "&#xE5;">
+<!ENTITY aelig "&#xE6;">
+<!ENTITY ccedil "&#xE7;">
+<!ENTITY egrave "&#xE8;">
+<!ENTITY eacute "&#xE9;">
+<!ENTITY ecirc "&#xEA;">
+<!ENTITY euml "&#xEB;">
+<!ENTITY igrave "&#xEC;">
+<!ENTITY iacute "&#xED;">
+<!ENTITY icirc "&#xEE;">
+<!ENTITY iuml "&#xEF;">
+<!ENTITY eth "&#xF0;">
+<!ENTITY ntilde "&#xF1;">
+<!ENTITY ograve "&#xF2;">
+<!ENTITY oacute "&#xF3;">
+<!ENTITY ocirc "&#xF4;">
+<!ENTITY otilde "&#xF5;">
+<!ENTITY ouml "&#xF6;">
+<!ENTITY divide "&#xF7;">
+<!ENTITY div "&#xF7;">
+<!ENTITY oslash "&#xF8;">
+<!ENTITY ugrave "&#xF9;">
+<!ENTITY uacute "&#xFA;">
+<!ENTITY ucirc "&#xFB;">
+<!ENTITY uuml "&#xFC;">
+<!ENTITY yacute "&#xFD;">
+<!ENTITY thorn "&#xFE;">
+<!ENTITY yuml "&#xFF;">
+<!ENTITY Amacr "&#x100;">
+<!ENTITY amacr "&#x101;">
+<!ENTITY Abreve "&#x102;">
+<!ENTITY abreve "&#x103;">
+<!ENTITY Aogon "&#x104;">
+<!ENTITY aogon "&#x105;">
+<!ENTITY Cacute "&#x106;">
+<!ENTITY cacute "&#x107;">
+<!ENTITY Ccirc "&#x108;">
+<!ENTITY ccirc "&#x109;">
+<!ENTITY Cdot "&#x10A;">
+<!ENTITY cdot "&#x10B;">
+<!ENTITY Ccaron "&#x10C;">
+<!ENTITY ccaron "&#x10D;">
+<!ENTITY Dcaron "&#x10E;">
+<!ENTITY dcaron "&#x10F;">
+<!ENTITY Dstrok "&#x110;">
+<!ENTITY dstrok "&#x111;">
+<!ENTITY Emacr "&#x112;">
+<!ENTITY emacr "&#x113;">
+<!ENTITY Edot "&#x116;">
+<!ENTITY edot "&#x117;">
+<!ENTITY Eogon "&#x118;">
+<!ENTITY eogon "&#x119;">
+<!ENTITY Ecaron "&#x11A;">
+<!ENTITY ecaron "&#x11B;">
+<!ENTITY Gcirc "&#x11C;">
+<!ENTITY gcirc "&#x11D;">
+<!ENTITY Gbreve "&#x11E;">
+<!ENTITY gbreve "&#x11F;">
+<!ENTITY Gdot "&#x120;">
+<!ENTITY gdot "&#x121;">
+<!ENTITY Gcedil "&#x122;">
+<!ENTITY Hcirc "&#x124;">
+<!ENTITY hcirc "&#x125;">
+<!ENTITY Hstrok "&#x126;">
+<!ENTITY hstrok "&#x127;">
+<!ENTITY Itilde "&#x128;">
+<!ENTITY itilde "&#x129;">
+<!ENTITY Imacr "&#x12A;">
+<!ENTITY imacr "&#x12B;">
+<!ENTITY Iogon "&#x12E;">
+<!ENTITY iogon "&#x12F;">
+<!ENTITY Idot "&#x130;">
+<!ENTITY imath "&#x131;">
+<!ENTITY inodot "&#x131;">
+<!ENTITY IJlig "&#x132;">
+<!ENTITY ijlig "&#x133;">
+<!ENTITY Jcirc "&#x134;">
+<!ENTITY jcirc "&#x135;">
+<!ENTITY Kcedil "&#x136;">
+<!ENTITY kcedil "&#x137;">
+<!ENTITY kgreen "&#x138;">
+<!ENTITY Lacute "&#x139;">
+<!ENTITY lacute "&#x13A;">
+<!ENTITY Lcedil "&#x13B;">
+<!ENTITY lcedil "&#x13C;">
+<!ENTITY Lcaron "&#x13D;">
+<!ENTITY lcaron "&#x13E;">
+<!ENTITY Lmidot "&#x13F;">
+<!ENTITY lmidot "&#x140;">
+<!ENTITY Lstrok "&#x141;">
+<!ENTITY lstrok "&#x142;">
+<!ENTITY Nacute "&#x143;">
+<!ENTITY nacute "&#x144;">
+<!ENTITY Ncedil "&#x145;">
+<!ENTITY ncedil "&#x146;">
+<!ENTITY Ncaron "&#x147;">
+<!ENTITY ncaron "&#x148;">
+<!ENTITY napos "&#x149;">
+<!ENTITY ENG "&#x14A;">
+<!ENTITY eng "&#x14B;">
+<!ENTITY Omacr "&#x14C;">
+<!ENTITY omacr "&#x14D;">
+<!ENTITY Odblac "&#x150;">
+<!ENTITY odblac "&#x151;">
+<!ENTITY OElig "&#x152;">
+<!ENTITY oelig "&#x153;">
+<!ENTITY Racute "&#x154;">
+<!ENTITY racute "&#x155;">
+<!ENTITY Rcedil "&#x156;">
+<!ENTITY rcedil "&#x157;">
+<!ENTITY Rcaron "&#x158;">
+<!ENTITY rcaron "&#x159;">
+<!ENTITY Sacute "&#x15A;">
+<!ENTITY sacute "&#x15B;">
+<!ENTITY Scirc "&#x15C;">
+<!ENTITY scirc "&#x15D;">
+<!ENTITY Scedil "&#x15E;">
+<!ENTITY scedil "&#x15F;">
+<!ENTITY Scaron "&#x160;">
+<!ENTITY scaron "&#x161;">
+<!ENTITY Tcedil "&#x162;">
+<!ENTITY tcedil "&#x163;">
+<!ENTITY Tcaron "&#x164;">
+<!ENTITY tcaron "&#x165;">
+<!ENTITY Tstrok "&#x166;">
+<!ENTITY tstrok "&#x167;">
+<!ENTITY Utilde "&#x168;">
+<!ENTITY utilde "&#x169;">
+<!ENTITY Umacr "&#x16A;">
+<!ENTITY umacr "&#x16B;">
+<!ENTITY Ubreve "&#x16C;">
+<!ENTITY ubreve "&#x16D;">
+<!ENTITY Uring "&#x16E;">
+<!ENTITY uring "&#x16F;">
+<!ENTITY Udblac "&#x170;">
+<!ENTITY udblac "&#x171;">
+<!ENTITY Uogon "&#x172;">
+<!ENTITY uogon "&#x173;">
+<!ENTITY Wcirc "&#x174;">
+<!ENTITY wcirc "&#x175;">
+<!ENTITY Ycirc "&#x176;">
+<!ENTITY ycirc "&#x177;">
+<!ENTITY Yuml "&#x178;">
+<!ENTITY Zacute "&#x179;">
+<!ENTITY zacute "&#x17A;">
+<!ENTITY Zdot "&#x17B;">
+<!ENTITY zdot "&#x17C;">
+<!ENTITY Zcaron "&#x17D;">
+<!ENTITY zcaron "&#x17E;">
+<!ENTITY fnof "&#x192;">
+<!ENTITY imped "&#x1B5;">
+<!ENTITY gacute "&#x1F5;">
+<!ENTITY jmath "&#x237;">
+<!ENTITY circ "&#x2C6;">
+<!ENTITY caron "&#x2C7;">
+<!ENTITY Hacek "&#x2C7;">
+<!ENTITY breve "&#x2D8;">
+<!ENTITY Breve "&#x2D8;">
+<!ENTITY dot "&#x2D9;">
+<!ENTITY DiacriticalDot "&#x2D9;">
+<!ENTITY ring "&#x2DA;">
+<!ENTITY ogon "&#x2DB;">
+<!ENTITY tilde "&#x2DC;">
+<!ENTITY DiacriticalTilde "&#x2DC;">
+<!ENTITY dblac "&#x2DD;">
+<!ENTITY DiacriticalDoubleAcute "&#x2DD;">
+<!ENTITY DownBreve "&#x311;">
+<!ENTITY Alpha "&#x391;">
+<!ENTITY Beta "&#x392;">
+<!ENTITY Gamma "&#x393;">
+<!ENTITY Delta "&#x394;">
+<!ENTITY Epsilon "&#x395;">
+<!ENTITY Zeta "&#x396;">
+<!ENTITY Eta "&#x397;">
+<!ENTITY Theta "&#x398;">
+<!ENTITY Iota "&#x399;">
+<!ENTITY Kappa "&#x39A;">
+<!ENTITY Lambda "&#x39B;">
+<!ENTITY Mu "&#x39C;">
+<!ENTITY Nu "&#x39D;">
+<!ENTITY Xi "&#x39E;">
+<!ENTITY Omicron "&#x39F;">
+<!ENTITY Pi "&#x3A0;">
+<!ENTITY Rho "&#x3A1;">
+<!ENTITY Sigma "&#x3A3;">
+<!ENTITY Tau "&#x3A4;">
+<!ENTITY Upsilon "&#x3A5;">
+<!ENTITY Phi "&#x3A6;">
+<!ENTITY Chi "&#x3A7;">
+<!ENTITY Psi "&#x3A8;">
+<!ENTITY Omega "&#x3A9;">
+<!ENTITY ohm "&#x3A9;">
+<!ENTITY alpha "&#x3B1;">
+<!ENTITY beta "&#x3B2;">
+<!ENTITY gamma "&#x3B3;">
+<!ENTITY delta "&#x3B4;">
+<!ENTITY epsi "&#x3B5;">
+<!ENTITY epsilon "&#x3B5;">
+<!ENTITY zeta "&#x3B6;">
+<!ENTITY eta "&#x3B7;">
+<!ENTITY theta "&#x3B8;">
+<!ENTITY iota "&#x3B9;">
+<!ENTITY kappa "&#x3BA;">
+<!ENTITY lambda "&#x3BB;">
+<!ENTITY mu "&#x3BC;">
+<!ENTITY nu "&#x3BD;">
+<!ENTITY xi "&#x3BE;">
+<!ENTITY omicron "&#x3BF;">
+<!ENTITY pi "&#x3C0;">
+<!ENTITY rho "&#x3C1;">
+<!ENTITY sigmav "&#x3C2;">
+<!ENTITY varsigma "&#x3C2;">
+<!ENTITY sigmaf "&#x3C2;">
+<!ENTITY sigma "&#x3C3;">
+<!ENTITY tau "&#x3C4;">
+<!ENTITY upsi "&#x3C5;">
+<!ENTITY upsilon "&#x3C5;">
+<!ENTITY phi "&#x3C6;">
+<!ENTITY chi "&#x3C7;">
+<!ENTITY psi "&#x3C8;">
+<!ENTITY omega "&#x3C9;">
+<!ENTITY thetav "&#x3D1;">
+<!ENTITY vartheta "&#x3D1;">
+<!ENTITY thetasym "&#x3D1;">
+<!ENTITY Upsi "&#x3D2;">
+<!ENTITY upsih "&#x3D2;">
+<!ENTITY straightphi "&#x3D5;">
+<!ENTITY phiv "&#x3D5;">
+<!ENTITY varphi "&#x3D5;">
+<!ENTITY piv "&#x3D6;">
+<!ENTITY varpi "&#x3D6;">
+<!ENTITY Gammad "&#x3DC;">
+<!ENTITY gammad "&#x3DD;">
+<!ENTITY digamma "&#x3DD;">
+<!ENTITY kappav "&#x3F0;">
+<!ENTITY varkappa "&#x3F0;">
+<!ENTITY rhov "&#x3F1;">
+<!ENTITY varrho "&#x3F1;">
+<!ENTITY epsiv "&#x3F5;">
+<!ENTITY straightepsilon "&#x3F5;">
+<!ENTITY varepsilon "&#x3F5;">
+<!ENTITY bepsi "&#x3F6;">
+<!ENTITY backepsilon "&#x3F6;">
+<!ENTITY IOcy "&#x401;">
+<!ENTITY DJcy "&#x402;">
+<!ENTITY GJcy "&#x403;">
+<!ENTITY Jukcy "&#x404;">
+<!ENTITY DScy "&#x405;">
+<!ENTITY Iukcy "&#x406;">
+<!ENTITY YIcy "&#x407;">
+<!ENTITY Jsercy "&#x408;">
+<!ENTITY LJcy "&#x409;">
+<!ENTITY NJcy "&#x40A;">
+<!ENTITY TSHcy "&#x40B;">
+<!ENTITY KJcy "&#x40C;">
+<!ENTITY Ubrcy "&#x40E;">
+<!ENTITY DZcy "&#x40F;">
+<!ENTITY Acy "&#x410;">
+<!ENTITY Bcy "&#x411;">
+<!ENTITY Vcy "&#x412;">
+<!ENTITY Gcy "&#x413;">
+<!ENTITY Dcy "&#x414;">
+<!ENTITY IEcy "&#x415;">
+<!ENTITY ZHcy "&#x416;">
+<!ENTITY Zcy "&#x417;">
+<!ENTITY Icy "&#x418;">
+<!ENTITY Jcy "&#x419;">
+<!ENTITY Kcy "&#x41A;">
+<!ENTITY Lcy "&#x41B;">
+<!ENTITY Mcy "&#x41C;">
+<!ENTITY Ncy "&#x41D;">
+<!ENTITY Ocy "&#x41E;">
+<!ENTITY Pcy "&#x41F;">
+<!ENTITY Rcy "&#x420;">
+<!ENTITY Scy "&#x421;">
+<!ENTITY Tcy "&#x422;">
+<!ENTITY Ucy "&#x423;">
+<!ENTITY Fcy "&#x424;">
+<!ENTITY KHcy "&#x425;">
+<!ENTITY TScy "&#x426;">
+<!ENTITY CHcy "&#x427;">
+<!ENTITY SHcy "&#x428;">
+<!ENTITY SHCHcy "&#x429;">
+<!ENTITY HARDcy "&#x42A;">
+<!ENTITY Ycy "&#x42B;">
+<!ENTITY SOFTcy "&#x42C;">
+<!ENTITY Ecy "&#x42D;">
+<!ENTITY YUcy "&#x42E;">
+<!ENTITY YAcy "&#x42F;">
+<!ENTITY acy "&#x430;">
+<!ENTITY bcy "&#x431;">
+<!ENTITY vcy "&#x432;">
+<!ENTITY gcy "&#x433;">
+<!ENTITY dcy "&#x434;">
+<!ENTITY iecy "&#x435;">
+<!ENTITY zhcy "&#x436;">
+<!ENTITY zcy "&#x437;">
+<!ENTITY icy "&#x438;">
+<!ENTITY jcy "&#x439;">
+<!ENTITY kcy "&#x43A;">
+<!ENTITY lcy "&#x43B;">
+<!ENTITY mcy "&#x43C;">
+<!ENTITY ncy "&#x43D;">
+<!ENTITY ocy "&#x43E;">
+<!ENTITY pcy "&#x43F;">
+<!ENTITY rcy "&#x440;">
+<!ENTITY scy "&#x441;">
+<!ENTITY tcy "&#x442;">
+<!ENTITY ucy "&#x443;">
+<!ENTITY fcy "&#x444;">
+<!ENTITY khcy "&#x445;">
+<!ENTITY tscy "&#x446;">
+<!ENTITY chcy "&#x447;">
+<!ENTITY shcy "&#x448;">
+<!ENTITY shchcy "&#x449;">
+<!ENTITY hardcy "&#x44A;">
+<!ENTITY ycy "&#x44B;">
+<!ENTITY softcy "&#x44C;">
+<!ENTITY ecy "&#x44D;">
+<!ENTITY yucy "&#x44E;">
+<!ENTITY yacy "&#x44F;">
+<!ENTITY iocy "&#x451;">
+<!ENTITY djcy "&#x452;">
+<!ENTITY gjcy "&#x453;">
+<!ENTITY jukcy "&#x454;">
+<!ENTITY dscy "&#x455;">
+<!ENTITY iukcy "&#x456;">
+<!ENTITY yicy "&#x457;">
+<!ENTITY jsercy "&#x458;">
+<!ENTITY ljcy "&#x459;">
+<!ENTITY njcy "&#x45A;">
+<!ENTITY tshcy "&#x45B;">
+<!ENTITY kjcy "&#x45C;">
+<!ENTITY ubrcy "&#x45E;">
+<!ENTITY dzcy "&#x45F;">
+<!ENTITY ensp "&#x2002;">
+<!ENTITY emsp "&#x2003;">
+<!ENTITY emsp13 "&#x2004;">
+<!ENTITY emsp14 "&#x2005;">
+<!ENTITY numsp "&#x2007;">
+<!ENTITY puncsp "&#x2008;">
+<!ENTITY thinsp "&#x2009;">
+<!ENTITY ThinSpace "&#x2009;">
+<!ENTITY hairsp "&#x200A;">
+<!ENTITY VeryThinSpace "&#x200A;">
+<!ENTITY ZeroWidthSpace "&#x200B;">
+<!ENTITY NegativeVeryThinSpace "&#x200B;">
+<!ENTITY NegativeThinSpace "&#x200B;">
+<!ENTITY NegativeMediumSpace "&#x200B;">
+<!ENTITY NegativeThickSpace "&#x200B;">
+<!ENTITY zwnj "&#x200C;">
+<!ENTITY zwj "&#x200D;">
+<!ENTITY lrm "&#x200E;">
+<!ENTITY rlm "&#x200F;">
+<!ENTITY hyphen "&#x2010;">
+<!ENTITY dash "&#x2010;">
+<!ENTITY ndash "&#x2013;">
+<!ENTITY mdash "&#x2014;">
+<!ENTITY horbar "&#x2015;">
+<!ENTITY Verbar "&#x2016;">
+<!ENTITY Vert "&#x2016;">
+<!ENTITY lsquo "&#x2018;">
+<!ENTITY OpenCurlyQuote "&#x2018;">
+<!ENTITY rsquo "&#x2019;">
+<!ENTITY rsquor "&#x2019;">
+<!ENTITY CloseCurlyQuote "&#x2019;">
+<!ENTITY lsquor "&#x201A;">
+<!ENTITY sbquo "&#x201A;">
+<!ENTITY ldquo "&#x201C;">
+<!ENTITY OpenCurlyDoubleQuote "&#x201C;">
+<!ENTITY rdquo "&#x201D;">
+<!ENTITY rdquor "&#x201D;">
+<!ENTITY CloseCurlyDoubleQuote "&#x201D;">
+<!ENTITY ldquor "&#x201E;">
+<!ENTITY bdquo "&#x201E;">
+<!ENTITY dagger "&#x2020;">
+<!ENTITY Dagger "&#x2021;">
+<!ENTITY ddagger "&#x2021;">
+<!ENTITY bull "&#x2022;">
+<!ENTITY bullet "&#x2022;">
+<!ENTITY nldr "&#x2025;">
+<!ENTITY hellip "&#x2026;">
+<!ENTITY mldr "&#x2026;">
+<!ENTITY permil "&#x2030;">
+<!ENTITY pertenk "&#x2031;">
+<!ENTITY prime "&#x2032;">
+<!ENTITY Prime "&#x2033;">
+<!ENTITY tprime "&#x2034;">
+<!ENTITY bprime "&#x2035;">
+<!ENTITY backprime "&#x2035;">
+<!ENTITY lsaquo "&#x2039;">
+<!ENTITY rsaquo "&#x203A;">
+<!ENTITY oline "&#x203E;">
+<!ENTITY OverBar "&#x203E;">
+<!ENTITY caret "&#x2041;">
+<!ENTITY hybull "&#x2043;">
+<!ENTITY frasl "&#x2044;">
+<!ENTITY bsemi "&#x204F;">
+<!ENTITY qprime "&#x2057;">
+<!ENTITY MediumSpace "&#x205F;">
+<!ENTITY ThickSpace "&#x205F;&#x200A;">
+<!ENTITY NoBreak "&#x2060;">
+<!ENTITY ApplyFunction "&#x2061;">
+<!ENTITY af "&#x2061;">
+<!ENTITY InvisibleTimes "&#x2062;">
+<!ENTITY it "&#x2062;">
+<!ENTITY InvisibleComma "&#x2063;">
+<!ENTITY ic "&#x2063;">
+<!ENTITY euro "&#x20AC;">
+<!ENTITY tdot "&#x20DB;">
+<!ENTITY TripleDot "&#x20DB;">
+<!ENTITY DotDot "&#x20DC;">
+<!ENTITY Copf "&#x2102;">
+<!ENTITY complexes "&#x2102;">
+<!ENTITY incare "&#x2105;">
+<!ENTITY gscr "&#x210A;">
+<!ENTITY hamilt "&#x210B;">
+<!ENTITY HilbertSpace "&#x210B;">
+<!ENTITY Hscr "&#x210B;">
+<!ENTITY Hfr "&#x210C;">
+<!ENTITY Poincareplane "&#x210C;">
+<!ENTITY quaternions "&#x210D;">
+<!ENTITY Hopf "&#x210D;">
+<!ENTITY planckh "&#x210E;">
+<!ENTITY planck "&#x210F;">
+<!ENTITY hbar "&#x210F;">
+<!ENTITY plankv "&#x210F;">
+<!ENTITY hslash "&#x210F;">
+<!ENTITY Iscr "&#x2110;">
+<!ENTITY imagline "&#x2110;">
+<!ENTITY image "&#x2111;">
+<!ENTITY Im "&#x2111;">
+<!ENTITY imagpart "&#x2111;">
+<!ENTITY Ifr "&#x2111;">
+<!ENTITY Lscr "&#x2112;">
+<!ENTITY lagran "&#x2112;">
+<!ENTITY Laplacetrf "&#x2112;">
+<!ENTITY ell "&#x2113;">
+<!ENTITY Nopf "&#x2115;">
+<!ENTITY naturals "&#x2115;">
+<!ENTITY numero "&#x2116;">
+<!ENTITY copysr "&#x2117;">
+<!ENTITY weierp "&#x2118;">
+<!ENTITY wp "&#x2118;">
+<!ENTITY Popf "&#x2119;">
+<!ENTITY primes "&#x2119;">
+<!ENTITY rationals "&#x211A;">
+<!ENTITY Qopf "&#x211A;">
+<!ENTITY Rscr "&#x211B;">
+<!ENTITY realine "&#x211B;">
+<!ENTITY real "&#x211C;">
+<!ENTITY Re "&#x211C;">
+<!ENTITY realpart "&#x211C;">
+<!ENTITY Rfr "&#x211C;">
+<!ENTITY reals "&#x211D;">
+<!ENTITY Ropf "&#x211D;">
+<!ENTITY rx "&#x211E;">
+<!ENTITY trade "&#x2122;">
+<!ENTITY TRADE "&#x2122;">
+<!ENTITY integers "&#x2124;">
+<!ENTITY Zopf "&#x2124;">
+<!ENTITY mho "&#x2127;">
+<!ENTITY Zfr "&#x2128;">
+<!ENTITY zeetrf "&#x2128;">
+<!ENTITY iiota "&#x2129;">
+<!ENTITY bernou "&#x212C;">
+<!ENTITY Bernoullis "&#x212C;">
+<!ENTITY Bscr "&#x212C;">
+<!ENTITY Cfr "&#x212D;">
+<!ENTITY Cayleys "&#x212D;">
+<!ENTITY escr "&#x212F;">
+<!ENTITY Escr "&#x2130;">
+<!ENTITY expectation "&#x2130;">
+<!ENTITY Fscr "&#x2131;">
+<!ENTITY Fouriertrf "&#x2131;">
+<!ENTITY phmmat "&#x2133;">
+<!ENTITY Mellintrf "&#x2133;">
+<!ENTITY Mscr "&#x2133;">
+<!ENTITY order "&#x2134;">
+<!ENTITY orderof "&#x2134;">
+<!ENTITY oscr "&#x2134;">
+<!ENTITY alefsym "&#x2135;">
+<!ENTITY aleph "&#x2135;">
+<!ENTITY beth "&#x2136;">
+<!ENTITY gimel "&#x2137;">
+<!ENTITY daleth "&#x2138;">
+<!ENTITY CapitalDifferentialD "&#x2145;">
+<!ENTITY DD "&#x2145;">
+<!ENTITY DifferentialD "&#x2146;">
+<!ENTITY dd "&#x2146;">
+<!ENTITY ExponentialE "&#x2147;">
+<!ENTITY exponentiale "&#x2147;">
+<!ENTITY ee "&#x2147;">
+<!ENTITY ImaginaryI "&#x2148;">
+<!ENTITY ii "&#x2148;">
+<!ENTITY frac13 "&#x2153;">
+<!ENTITY frac23 "&#x2154;">
+<!ENTITY frac15 "&#x2155;">
+<!ENTITY frac25 "&#x2156;">
+<!ENTITY frac35 "&#x2157;">
+<!ENTITY frac45 "&#x2158;">
+<!ENTITY frac16 "&#x2159;">
+<!ENTITY frac56 "&#x215A;">
+<!ENTITY frac18 "&#x215B;">
+<!ENTITY frac38 "&#x215C;">
+<!ENTITY frac58 "&#x215D;">
+<!ENTITY frac78 "&#x215E;">
+<!ENTITY larr "&#x2190;">
+<!ENTITY leftarrow "&#x2190;">
+<!ENTITY LeftArrow "&#x2190;">
+<!ENTITY slarr "&#x2190;">
+<!ENTITY ShortLeftArrow "&#x2190;">
+<!ENTITY uarr "&#x2191;">
+<!ENTITY uparrow "&#x2191;">
+<!ENTITY UpArrow "&#x2191;">
+<!ENTITY ShortUpArrow "&#x2191;">
+<!ENTITY rarr "&#x2192;">
+<!ENTITY rightarrow "&#x2192;">
+<!ENTITY RightArrow "&#x2192;">
+<!ENTITY srarr "&#x2192;">
+<!ENTITY ShortRightArrow "&#x2192;">
+<!ENTITY darr "&#x2193;">
+<!ENTITY downarrow "&#x2193;">
+<!ENTITY DownArrow "&#x2193;">
+<!ENTITY ShortDownArrow "&#x2193;">
+<!ENTITY harr "&#x2194;">
+<!ENTITY leftrightarrow "&#x2194;">
+<!ENTITY LeftRightArrow "&#x2194;">
+<!ENTITY varr "&#x2195;">
+<!ENTITY updownarrow "&#x2195;">
+<!ENTITY UpDownArrow "&#x2195;">
+<!ENTITY nwarr "&#x2196;">
+<!ENTITY UpperLeftArrow "&#x2196;">
+<!ENTITY nwarrow "&#x2196;">
+<!ENTITY nearr "&#x2197;">
+<!ENTITY UpperRightArrow "&#x2197;">
+<!ENTITY nearrow "&#x2197;">
+<!ENTITY searr "&#x2198;">
+<!ENTITY searrow "&#x2198;">
+<!ENTITY LowerRightArrow "&#x2198;">
+<!ENTITY swarr "&#x2199;">
+<!ENTITY swarrow "&#x2199;">
+<!ENTITY LowerLeftArrow "&#x2199;">
+<!ENTITY nlarr "&#x219A;">
+<!ENTITY nleftarrow "&#x219A;">
+<!ENTITY nrarr "&#x219B;">
+<!ENTITY nrightarrow "&#x219B;">
+<!ENTITY rarrw "&#x219D;">
+<!ENTITY rightsquigarrow "&#x219D;">
+<!ENTITY nrarrw "&#x219D;&#x338;">
+<!ENTITY Larr "&#x219E;">
+<!ENTITY twoheadleftarrow "&#x219E;">
+<!ENTITY Uarr "&#x219F;">
+<!ENTITY Rarr "&#x21A0;">
+<!ENTITY twoheadrightarrow "&#x21A0;">
+<!ENTITY Darr "&#x21A1;">
+<!ENTITY larrtl "&#x21A2;">
+<!ENTITY leftarrowtail "&#x21A2;">
+<!ENTITY rarrtl "&#x21A3;">
+<!ENTITY rightarrowtail "&#x21A3;">
+<!ENTITY LeftTeeArrow "&#x21A4;">
+<!ENTITY mapstoleft "&#x21A4;">
+<!ENTITY UpTeeArrow "&#x21A5;">
+<!ENTITY mapstoup "&#x21A5;">
+<!ENTITY map "&#x21A6;">
+<!ENTITY RightTeeArrow "&#x21A6;">
+<!ENTITY mapsto "&#x21A6;">
+<!ENTITY DownTeeArrow "&#x21A7;">
+<!ENTITY mapstodown "&#x21A7;">
+<!ENTITY larrhk "&#x21A9;">
+<!ENTITY hookleftarrow "&#x21A9;">
+<!ENTITY rarrhk "&#x21AA;">
+<!ENTITY hookrightarrow "&#x21AA;">
+<!ENTITY larrlp "&#x21AB;">
+<!ENTITY looparrowleft "&#x21AB;">
+<!ENTITY rarrlp "&#x21AC;">
+<!ENTITY looparrowright "&#x21AC;">
+<!ENTITY harrw "&#x21AD;">
+<!ENTITY leftrightsquigarrow "&#x21AD;">
+<!ENTITY nharr "&#x21AE;">
+<!ENTITY nleftrightarrow "&#x21AE;">
+<!ENTITY lsh "&#x21B0;">
+<!ENTITY Lsh "&#x21B0;">
+<!ENTITY rsh "&#x21B1;">
+<!ENTITY Rsh "&#x21B1;">
+<!ENTITY ldsh "&#x21B2;">
+<!ENTITY rdsh "&#x21B3;">
+<!ENTITY crarr "&#x21B5;">
+<!ENTITY cularr "&#x21B6;">
+<!ENTITY curvearrowleft "&#x21B6;">
+<!ENTITY curarr "&#x21B7;">
+<!ENTITY curvearrowright "&#x21B7;">
+<!ENTITY olarr "&#x21BA;">
+<!ENTITY circlearrowleft "&#x21BA;">
+<!ENTITY orarr "&#x21BB;">
+<!ENTITY circlearrowright "&#x21BB;">
+<!ENTITY lharu "&#x21BC;">
+<!ENTITY LeftVector "&#x21BC;">
+<!ENTITY leftharpoonup "&#x21BC;">
+<!ENTITY lhard "&#x21BD;">
+<!ENTITY leftharpoondown "&#x21BD;">
+<!ENTITY DownLeftVector "&#x21BD;">
+<!ENTITY uharr "&#x21BE;">
+<!ENTITY upharpoonright "&#x21BE;">
+<!ENTITY RightUpVector "&#x21BE;">
+<!ENTITY uharl "&#x21BF;">
+<!ENTITY upharpoonleft "&#x21BF;">
+<!ENTITY LeftUpVector "&#x21BF;">
+<!ENTITY rharu "&#x21C0;">
+<!ENTITY RightVector "&#x21C0;">
+<!ENTITY rightharpoonup "&#x21C0;">
+<!ENTITY rhard "&#x21C1;">
+<!ENTITY rightharpoondown "&#x21C1;">
+<!ENTITY DownRightVector "&#x21C1;">
+<!ENTITY dharr "&#x21C2;">
+<!ENTITY RightDownVector "&#x21C2;">
+<!ENTITY downharpoonright "&#x21C2;">
+<!ENTITY dharl "&#x21C3;">
+<!ENTITY LeftDownVector "&#x21C3;">
+<!ENTITY downharpoonleft "&#x21C3;">
+<!ENTITY rlarr "&#x21C4;">
+<!ENTITY rightleftarrows "&#x21C4;">
+<!ENTITY RightArrowLeftArrow "&#x21C4;">
+<!ENTITY udarr "&#x21C5;">
+<!ENTITY UpArrowDownArrow "&#x21C5;">
+<!ENTITY lrarr "&#x21C6;">
+<!ENTITY leftrightarrows "&#x21C6;">
+<!ENTITY LeftArrowRightArrow "&#x21C6;">
+<!ENTITY llarr "&#x21C7;">
+<!ENTITY leftleftarrows "&#x21C7;">
+<!ENTITY uuarr "&#x21C8;">
+<!ENTITY upuparrows "&#x21C8;">
+<!ENTITY rrarr "&#x21C9;">
+<!ENTITY rightrightarrows "&#x21C9;">
+<!ENTITY ddarr "&#x21CA;">
+<!ENTITY downdownarrows "&#x21CA;">
+<!ENTITY lrhar "&#x21CB;">
+<!ENTITY ReverseEquilibrium "&#x21CB;">
+<!ENTITY leftrightharpoons "&#x21CB;">
+<!ENTITY rlhar "&#x21CC;">
+<!ENTITY rightleftharpoons "&#x21CC;">
+<!ENTITY Equilibrium "&#x21CC;">
+<!ENTITY nlArr "&#x21CD;">
+<!ENTITY nLeftarrow "&#x21CD;">
+<!ENTITY nhArr "&#x21CE;">
+<!ENTITY nLeftrightarrow "&#x21CE;">
+<!ENTITY nrArr "&#x21CF;">
+<!ENTITY nRightarrow "&#x21CF;">
+<!ENTITY lArr "&#x21D0;">
+<!ENTITY Leftarrow "&#x21D0;">
+<!ENTITY DoubleLeftArrow "&#x21D0;">
+<!ENTITY uArr "&#x21D1;">
+<!ENTITY Uparrow "&#x21D1;">
+<!ENTITY DoubleUpArrow "&#x21D1;">
+<!ENTITY rArr "&#x21D2;">
+<!ENTITY Rightarrow "&#x21D2;">
+<!ENTITY Implies "&#x21D2;">
+<!ENTITY DoubleRightArrow "&#x21D2;">
+<!ENTITY dArr "&#x21D3;">
+<!ENTITY Downarrow "&#x21D3;">
+<!ENTITY DoubleDownArrow "&#x21D3;">
+<!ENTITY hArr "&#x21D4;">
+<!ENTITY Leftrightarrow "&#x21D4;">
+<!ENTITY DoubleLeftRightArrow "&#x21D4;">
+<!ENTITY iff "&#x21D4;">
+<!ENTITY vArr "&#x21D5;">
+<!ENTITY Updownarrow "&#x21D5;">
+<!ENTITY DoubleUpDownArrow "&#x21D5;">
+<!ENTITY nwArr "&#x21D6;">
+<!ENTITY neArr "&#x21D7;">
+<!ENTITY seArr "&#x21D8;">
+<!ENTITY swArr "&#x21D9;">
+<!ENTITY lAarr "&#x21DA;">
+<!ENTITY Lleftarrow "&#x21DA;">
+<!ENTITY rAarr "&#x21DB;">
+<!ENTITY Rrightarrow "&#x21DB;">
+<!ENTITY zigrarr "&#x21DD;">
+<!ENTITY larrb "&#x21E4;">
+<!ENTITY LeftArrowBar "&#x21E4;">
+<!ENTITY rarrb "&#x21E5;">
+<!ENTITY RightArrowBar "&#x21E5;">
+<!ENTITY duarr "&#x21F5;">
+<!ENTITY DownArrowUpArrow "&#x21F5;">
+<!ENTITY loarr "&#x21FD;">
+<!ENTITY roarr "&#x21FE;">
+<!ENTITY hoarr "&#x21FF;">
+<!ENTITY forall "&#x2200;">
+<!ENTITY ForAll "&#x2200;">
+<!ENTITY comp "&#x2201;">
+<!ENTITY complement "&#x2201;">
+<!ENTITY part "&#x2202;">
+<!ENTITY PartialD "&#x2202;">
+<!ENTITY npart "&#x2202;&#x338;">
+<!ENTITY exist "&#x2203;">
+<!ENTITY Exists "&#x2203;">
+<!ENTITY nexist "&#x2204;">
+<!ENTITY NotExists "&#x2204;">
+<!ENTITY nexists "&#x2204;">
+<!ENTITY empty "&#x2205;">
+<!ENTITY emptyset "&#x2205;">
+<!ENTITY emptyv "&#x2205;">
+<!ENTITY varnothing "&#x2205;">
+<!ENTITY nabla "&#x2207;">
+<!ENTITY Del "&#x2207;">
+<!ENTITY isin "&#x2208;">
+<!ENTITY isinv "&#x2208;">
+<!ENTITY Element "&#x2208;">
+<!ENTITY in "&#x2208;">
+<!ENTITY notin "&#x2209;">
+<!ENTITY NotElement "&#x2209;">
+<!ENTITY notinva "&#x2209;">
+<!ENTITY niv "&#x220B;">
+<!ENTITY ReverseElement "&#x220B;">
+<!ENTITY ni "&#x220B;">
+<!ENTITY SuchThat "&#x220B;">
+<!ENTITY notni "&#x220C;">
+<!ENTITY notniva "&#x220C;">
+<!ENTITY NotReverseElement "&#x220C;">
+<!ENTITY prod "&#x220F;">
+<!ENTITY Product "&#x220F;">
+<!ENTITY coprod "&#x2210;">
+<!ENTITY Coproduct "&#x2210;">
+<!ENTITY sum "&#x2211;">
+<!ENTITY Sum "&#x2211;">
+<!ENTITY minus "&#x2212;">
+<!ENTITY mnplus "&#x2213;">
+<!ENTITY mp "&#x2213;">
+<!ENTITY MinusPlus "&#x2213;">
+<!ENTITY plusdo "&#x2214;">
+<!ENTITY dotplus "&#x2214;">
+<!ENTITY setmn "&#x2216;">
+<!ENTITY setminus "&#x2216;">
+<!ENTITY Backslash "&#x2216;">
+<!ENTITY ssetmn "&#x2216;">
+<!ENTITY smallsetminus "&#x2216;">
+<!ENTITY lowast "&#x2217;">
+<!ENTITY compfn "&#x2218;">
+<!ENTITY SmallCircle "&#x2218;">
+<!ENTITY radic "&#x221A;">
+<!ENTITY Sqrt "&#x221A;">
+<!ENTITY prop "&#x221D;">
+<!ENTITY propto "&#x221D;">
+<!ENTITY Proportional "&#x221D;">
+<!ENTITY vprop "&#x221D;">
+<!ENTITY varpropto "&#x221D;">
+<!ENTITY infin "&#x221E;">
+<!ENTITY angrt "&#x221F;">
+<!ENTITY ang "&#x2220;">
+<!ENTITY angle "&#x2220;">
+<!ENTITY nang "&#x2220;&#x20D2;">
+<!ENTITY angmsd "&#x2221;">
+<!ENTITY measuredangle "&#x2221;">
+<!ENTITY angsph "&#x2222;">
+<!ENTITY mid "&#x2223;">
+<!ENTITY VerticalBar "&#x2223;">
+<!ENTITY smid "&#x2223;">
+<!ENTITY shortmid "&#x2223;">
+<!ENTITY nmid "&#x2224;">
+<!ENTITY NotVerticalBar "&#x2224;">
+<!ENTITY nsmid "&#x2224;">
+<!ENTITY nshortmid "&#x2224;">
+<!ENTITY par "&#x2225;">
+<!ENTITY parallel "&#x2225;">
+<!ENTITY DoubleVerticalBar "&#x2225;">
+<!ENTITY spar "&#x2225;">
+<!ENTITY shortparallel "&#x2225;">
+<!ENTITY npar "&#x2226;">
+<!ENTITY nparallel "&#x2226;">
+<!ENTITY NotDoubleVerticalBar "&#x2226;">
+<!ENTITY nspar "&#x2226;">
+<!ENTITY nshortparallel "&#x2226;">
+<!ENTITY and "&#x2227;">
+<!ENTITY wedge "&#x2227;">
+<!ENTITY or "&#x2228;">
+<!ENTITY vee "&#x2228;">
+<!ENTITY cap "&#x2229;">
+<!ENTITY caps "&#x2229;&#xFE00;">
+<!ENTITY cup "&#x222A;">
+<!ENTITY cups "&#x222A;&#xFE00;">
+<!ENTITY int "&#x222B;">
+<!ENTITY Integral "&#x222B;">
+<!ENTITY Int "&#x222C;">
+<!ENTITY tint "&#x222D;">
+<!ENTITY iiint "&#x222D;">
+<!ENTITY conint "&#x222E;">
+<!ENTITY oint "&#x222E;">
+<!ENTITY ContourIntegral "&#x222E;">
+<!ENTITY Conint "&#x222F;">
+<!ENTITY DoubleContourIntegral "&#x222F;">
+<!ENTITY Cconint "&#x2230;">
+<!ENTITY cwint "&#x2231;">
+<!ENTITY cwconint "&#x2232;">
+<!ENTITY ClockwiseContourIntegral "&#x2232;">
+<!ENTITY awconint "&#x2233;">
+<!ENTITY CounterClockwiseContourIntegral "&#x2233;">
+<!ENTITY there4 "&#x2234;">
+<!ENTITY therefore "&#x2234;">
+<!ENTITY Therefore "&#x2234;">
+<!ENTITY becaus "&#x2235;">
+<!ENTITY because "&#x2235;">
+<!ENTITY Because "&#x2235;">
+<!ENTITY ratio "&#x2236;">
+<!ENTITY Colon "&#x2237;">
+<!ENTITY Proportion "&#x2237;">
+<!ENTITY minusd "&#x2238;">
+<!ENTITY dotminus "&#x2238;">
+<!ENTITY mDDot "&#x223A;">
+<!ENTITY homtht "&#x223B;">
+<!ENTITY sim "&#x223C;">
+<!ENTITY Tilde "&#x223C;">
+<!ENTITY thksim "&#x223C;">
+<!ENTITY thicksim "&#x223C;">
+<!ENTITY nvsim "&#x223C;&#x20D2;">
+<!ENTITY bsim "&#x223D;">
+<!ENTITY backsim "&#x223D;">
+<!ENTITY race "&#x223D;&#x331;">
+<!ENTITY ac "&#x223E;">
+<!ENTITY mstpos "&#x223E;">
+<!ENTITY acE "&#x223E;&#x333;">
+<!ENTITY acd "&#x223F;">
+<!ENTITY wreath "&#x2240;">
+<!ENTITY VerticalTilde "&#x2240;">
+<!ENTITY wr "&#x2240;">
+<!ENTITY nsim "&#x2241;">
+<!ENTITY NotTilde "&#x2241;">
+<!ENTITY esim "&#x2242;">
+<!ENTITY EqualTilde "&#x2242;">
+<!ENTITY eqsim "&#x2242;">
+<!ENTITY NotEqualTilde "&#x2242;&#x338;">
+<!ENTITY nesim "&#x2242;&#x338;">
+<!ENTITY sime "&#x2243;">
+<!ENTITY TildeEqual "&#x2243;">
+<!ENTITY simeq "&#x2243;">
+<!ENTITY nsime "&#x2244;">
+<!ENTITY nsimeq "&#x2244;">
+<!ENTITY NotTildeEqual "&#x2244;">
+<!ENTITY cong "&#x2245;">
+<!ENTITY TildeFullEqual "&#x2245;">
+<!ENTITY simne "&#x2246;">
+<!ENTITY ncong "&#x2247;">
+<!ENTITY NotTildeFullEqual "&#x2247;">
+<!ENTITY asymp "&#x2248;">
+<!ENTITY ap "&#x2248;">
+<!ENTITY TildeTilde "&#x2248;">
+<!ENTITY approx "&#x2248;">
+<!ENTITY thkap "&#x2248;">
+<!ENTITY thickapprox "&#x2248;">
+<!ENTITY nap "&#x2249;">
+<!ENTITY NotTildeTilde "&#x2249;">
+<!ENTITY napprox "&#x2249;">
+<!ENTITY ape "&#x224A;">
+<!ENTITY approxeq "&#x224A;">
+<!ENTITY apid "&#x224B;">
+<!ENTITY napid "&#x224B;&#x338;">
+<!ENTITY bcong "&#x224C;">
+<!ENTITY backcong "&#x224C;">
+<!ENTITY asympeq "&#x224D;">
+<!ENTITY CupCap "&#x224D;">
+<!ENTITY nvap "&#x224D;&#x20D2;">
+<!ENTITY bump "&#x224E;">
+<!ENTITY HumpDownHump "&#x224E;">
+<!ENTITY Bumpeq "&#x224E;">
+<!ENTITY NotHumpDownHump "&#x224E;&#x338;">
+<!ENTITY nbump "&#x224E;&#x338;">
+<!ENTITY bumpe "&#x224F;">
+<!ENTITY HumpEqual "&#x224F;">
+<!ENTITY bumpeq "&#x224F;">
+<!ENTITY nbumpe "&#x224F;&#x338;">
+<!ENTITY NotHumpEqual "&#x224F;&#x338;">
+<!ENTITY esdot "&#x2250;">
+<!ENTITY DotEqual "&#x2250;">
+<!ENTITY doteq "&#x2250;">
+<!ENTITY nedot "&#x2250;&#x338;">
+<!ENTITY eDot "&#x2251;">
+<!ENTITY doteqdot "&#x2251;">
+<!ENTITY efDot "&#x2252;">
+<!ENTITY fallingdotseq "&#x2252;">
+<!ENTITY erDot "&#x2253;">
+<!ENTITY risingdotseq "&#x2253;">
+<!ENTITY colone "&#x2254;">
+<!ENTITY coloneq "&#x2254;">
+<!ENTITY Assign "&#x2254;">
+<!ENTITY ecolon "&#x2255;">
+<!ENTITY eqcolon "&#x2255;">
+<!ENTITY ecir "&#x2256;">
+<!ENTITY eqcirc "&#x2256;">
+<!ENTITY cire "&#x2257;">
+<!ENTITY circeq "&#x2257;">
+<!ENTITY wedgeq "&#x2259;">
+<!ENTITY veeeq "&#x225A;">
+<!ENTITY trie "&#x225C;">
+<!ENTITY triangleq "&#x225C;">
+<!ENTITY equest "&#x225F;">
+<!ENTITY questeq "&#x225F;">
+<!ENTITY ne "&#x2260;">
+<!ENTITY NotEqual "&#x2260;">
+<!ENTITY equiv "&#x2261;">
+<!ENTITY Congruent "&#x2261;">
+<!ENTITY bnequiv "&#x2261;&#x20E5;">
+<!ENTITY nequiv "&#x2262;">
+<!ENTITY NotCongruent "&#x2262;">
+<!ENTITY le "&#x2264;">
+<!ENTITY leq "&#x2264;">
+<!ENTITY nvle "&#x2264;&#x20D2;">
+<!ENTITY ge "&#x2265;">
+<!ENTITY GreaterEqual "&#x2265;">
+<!ENTITY geq "&#x2265;">
+<!ENTITY nvge "&#x2265;&#x20D2;">
+<!ENTITY lE "&#x2266;">
+<!ENTITY LessFullEqual "&#x2266;">
+<!ENTITY leqq "&#x2266;">
+<!ENTITY nlE "&#x2266;&#x338;">
+<!ENTITY nleqq "&#x2266;&#x338;">
+<!ENTITY gE "&#x2267;">
+<!ENTITY GreaterFullEqual "&#x2267;">
+<!ENTITY geqq "&#x2267;">
+<!ENTITY ngE "&#x2267;&#x338;">
+<!ENTITY ngeqq "&#x2267;&#x338;">
+<!ENTITY NotGreaterFullEqual "&#x2267;&#x338;">
+<!ENTITY lnE "&#x2268;">
+<!ENTITY lneqq "&#x2268;">
+<!ENTITY lvnE "&#x2268;&#xFE00;">
+<!ENTITY lvertneqq "&#x2268;&#xFE00;">
+<!ENTITY gnE "&#x2269;">
+<!ENTITY gneqq "&#x2269;">
+<!ENTITY gvnE "&#x2269;&#xFE00;">
+<!ENTITY gvertneqq "&#x2269;&#xFE00;">
+<!ENTITY Lt "&#x226A;">
+<!ENTITY NestedLessLess "&#x226A;">
+<!ENTITY ll "&#x226A;">
+<!ENTITY nLtv "&#x226A;&#x338;">
+<!ENTITY NotLessLess "&#x226A;&#x338;">
+<!ENTITY nLt "&#x226A;&#x20D2;">
+<!ENTITY Gt "&#x226B;">
+<!ENTITY NestedGreaterGreater "&#x226B;">
+<!ENTITY gg "&#x226B;">
+<!ENTITY nGtv "&#x226B;&#x338;">
+<!ENTITY NotGreaterGreater "&#x226B;&#x338;">
+<!ENTITY nGt "&#x226B;&#x20D2;">
+<!ENTITY twixt "&#x226C;">
+<!ENTITY between "&#x226C;">
+<!ENTITY NotCupCap "&#x226D;">
+<!ENTITY nlt "&#x226E;">
+<!ENTITY NotLess "&#x226E;">
+<!ENTITY nless "&#x226E;">
+<!ENTITY ngt "&#x226F;">
+<!ENTITY NotGreater "&#x226F;">
+<!ENTITY ngtr "&#x226F;">
+<!ENTITY nle "&#x2270;">
+<!ENTITY NotLessEqual "&#x2270;">
+<!ENTITY nleq "&#x2270;">
+<!ENTITY nge "&#x2271;">
+<!ENTITY NotGreaterEqual "&#x2271;">
+<!ENTITY ngeq "&#x2271;">
+<!ENTITY lsim "&#x2272;">
+<!ENTITY LessTilde "&#x2272;">
+<!ENTITY lesssim "&#x2272;">
+<!ENTITY gsim "&#x2273;">
+<!ENTITY gtrsim "&#x2273;">
+<!ENTITY GreaterTilde "&#x2273;">
+<!ENTITY nlsim "&#x2274;">
+<!ENTITY NotLessTilde "&#x2274;">
+<!ENTITY ngsim "&#x2275;">
+<!ENTITY NotGreaterTilde "&#x2275;">
+<!ENTITY lg "&#x2276;">
+<!ENTITY lessgtr "&#x2276;">
+<!ENTITY LessGreater "&#x2276;">
+<!ENTITY gl "&#x2277;">
+<!ENTITY gtrless "&#x2277;">
+<!ENTITY GreaterLess "&#x2277;">
+<!ENTITY ntlg "&#x2278;">
+<!ENTITY NotLessGreater "&#x2278;">
+<!ENTITY ntgl "&#x2279;">
+<!ENTITY NotGreaterLess "&#x2279;">
+<!ENTITY pr "&#x227A;">
+<!ENTITY Precedes "&#x227A;">
+<!ENTITY prec "&#x227A;">
+<!ENTITY sc "&#x227B;">
+<!ENTITY Succeeds "&#x227B;">
+<!ENTITY succ "&#x227B;">
+<!ENTITY prcue "&#x227C;">
+<!ENTITY PrecedesSlantEqual "&#x227C;">
+<!ENTITY preccurlyeq "&#x227C;">
+<!ENTITY sccue "&#x227D;">
+<!ENTITY SucceedsSlantEqual "&#x227D;">
+<!ENTITY succcurlyeq "&#x227D;">
+<!ENTITY prsim "&#x227E;">
+<!ENTITY precsim "&#x227E;">
+<!ENTITY PrecedesTilde "&#x227E;">
+<!ENTITY scsim "&#x227F;">
+<!ENTITY succsim "&#x227F;">
+<!ENTITY SucceedsTilde "&#x227F;">
+<!ENTITY NotSucceedsTilde "&#x227F;&#x338;">
+<!ENTITY npr "&#x2280;">
+<!ENTITY nprec "&#x2280;">
+<!ENTITY NotPrecedes "&#x2280;">
+<!ENTITY nsc "&#x2281;">
+<!ENTITY nsucc "&#x2281;">
+<!ENTITY NotSucceeds "&#x2281;">
+<!ENTITY sub "&#x2282;">
+<!ENTITY subset "&#x2282;">
+<!ENTITY vnsub "&#x2282;&#x20D2;">
+<!ENTITY nsubset "&#x2282;&#x20D2;">
+<!ENTITY NotSubset "&#x2282;&#x20D2;">
+<!ENTITY sup "&#x2283;">
+<!ENTITY supset "&#x2283;">
+<!ENTITY Superset "&#x2283;">
+<!ENTITY vnsup "&#x2283;&#x20D2;">
+<!ENTITY nsupset "&#x2283;&#x20D2;">
+<!ENTITY NotSuperset "&#x2283;&#x20D2;">
+<!ENTITY nsub "&#x2284;">
+<!ENTITY nsup "&#x2285;">
+<!ENTITY sube "&#x2286;">
+<!ENTITY SubsetEqual "&#x2286;">
+<!ENTITY subseteq "&#x2286;">
+<!ENTITY supe "&#x2287;">
+<!ENTITY supseteq "&#x2287;">
+<!ENTITY SupersetEqual "&#x2287;">
+<!ENTITY nsube "&#x2288;">
+<!ENTITY nsubseteq "&#x2288;">
+<!ENTITY NotSubsetEqual "&#x2288;">
+<!ENTITY nsupe "&#x2289;">
+<!ENTITY nsupseteq "&#x2289;">
+<!ENTITY NotSupersetEqual "&#x2289;">
+<!ENTITY subne "&#x228A;">
+<!ENTITY subsetneq "&#x228A;">
+<!ENTITY vsubne "&#x228A;&#xFE00;">
+<!ENTITY varsubsetneq "&#x228A;&#xFE00;">
+<!ENTITY supne "&#x228B;">
+<!ENTITY supsetneq "&#x228B;">
+<!ENTITY vsupne "&#x228B;&#xFE00;">
+<!ENTITY varsupsetneq "&#x228B;&#xFE00;">
+<!ENTITY cupdot "&#x228D;">
+<!ENTITY uplus "&#x228E;">
+<!ENTITY UnionPlus "&#x228E;">
+<!ENTITY sqsub "&#x228F;">
+<!ENTITY SquareSubset "&#x228F;">
+<!ENTITY sqsubset "&#x228F;">
+<!ENTITY NotSquareSubset "&#x228F;&#x338;">
+<!ENTITY sqsup "&#x2290;">
+<!ENTITY SquareSuperset "&#x2290;">
+<!ENTITY sqsupset "&#x2290;">
+<!ENTITY NotSquareSuperset "&#x2290;&#x338;">
+<!ENTITY sqsube "&#x2291;">
+<!ENTITY SquareSubsetEqual "&#x2291;">
+<!ENTITY sqsubseteq "&#x2291;">
+<!ENTITY sqsupe "&#x2292;">
+<!ENTITY SquareSupersetEqual "&#x2292;">
+<!ENTITY sqsupseteq "&#x2292;">
+<!ENTITY sqcap "&#x2293;">
+<!ENTITY SquareIntersection "&#x2293;">
+<!ENTITY sqcaps "&#x2293;&#xFE00;">
+<!ENTITY sqcup "&#x2294;">
+<!ENTITY SquareUnion "&#x2294;">
+<!ENTITY sqcups "&#x2294;&#xFE00;">
+<!ENTITY oplus "&#x2295;">
+<!ENTITY CirclePlus "&#x2295;">
+<!ENTITY ominus "&#x2296;">
+<!ENTITY CircleMinus "&#x2296;">
+<!ENTITY otimes "&#x2297;">
+<!ENTITY CircleTimes "&#x2297;">
+<!ENTITY osol "&#x2298;">
+<!ENTITY odot "&#x2299;">
+<!ENTITY CircleDot "&#x2299;">
+<!ENTITY ocir "&#x229A;">
+<!ENTITY circledcirc "&#x229A;">
+<!ENTITY oast "&#x229B;">
+<!ENTITY circledast "&#x229B;">
+<!ENTITY odash "&#x229D;">
+<!ENTITY circleddash "&#x229D;">
+<!ENTITY plusb "&#x229E;">
+<!ENTITY boxplus "&#x229E;">
+<!ENTITY minusb "&#x229F;">
+<!ENTITY boxminus "&#x229F;">
+<!ENTITY timesb "&#x22A0;">
+<!ENTITY boxtimes "&#x22A0;">
+<!ENTITY sdotb "&#x22A1;">
+<!ENTITY dotsquare "&#x22A1;">
+<!ENTITY vdash "&#x22A2;">
+<!ENTITY RightTee "&#x22A2;">
+<!ENTITY dashv "&#x22A3;">
+<!ENTITY LeftTee "&#x22A3;">
+<!ENTITY top "&#x22A4;">
+<!ENTITY DownTee "&#x22A4;">
+<!ENTITY bottom "&#x22A5;">
+<!ENTITY bot "&#x22A5;">
+<!ENTITY perp "&#x22A5;">
+<!ENTITY UpTee "&#x22A5;">
+<!ENTITY models "&#x22A7;">
+<!ENTITY vDash "&#x22A8;">
+<!ENTITY DoubleRightTee "&#x22A8;">
+<!ENTITY Vdash "&#x22A9;">
+<!ENTITY Vvdash "&#x22AA;">
+<!ENTITY VDash "&#x22AB;">
+<!ENTITY nvdash "&#x22AC;">
+<!ENTITY nvDash "&#x22AD;">
+<!ENTITY nVdash "&#x22AE;">
+<!ENTITY nVDash "&#x22AF;">
+<!ENTITY prurel "&#x22B0;">
+<!ENTITY vltri "&#x22B2;">
+<!ENTITY vartriangleleft "&#x22B2;">
+<!ENTITY LeftTriangle "&#x22B2;">
+<!ENTITY vrtri "&#x22B3;">
+<!ENTITY vartriangleright "&#x22B3;">
+<!ENTITY RightTriangle "&#x22B3;">
+<!ENTITY ltrie "&#x22B4;">
+<!ENTITY trianglelefteq "&#x22B4;">
+<!ENTITY LeftTriangleEqual "&#x22B4;">
+<!ENTITY nvltrie "&#x22B4;&#x20D2;">
+<!ENTITY rtrie "&#x22B5;">
+<!ENTITY trianglerighteq "&#x22B5;">
+<!ENTITY RightTriangleEqual "&#x22B5;">
+<!ENTITY nvrtrie "&#x22B5;&#x20D2;">
+<!ENTITY origof "&#x22B6;">
+<!ENTITY imof "&#x22B7;">
+<!ENTITY mumap "&#x22B8;">
+<!ENTITY multimap "&#x22B8;">
+<!ENTITY hercon "&#x22B9;">
+<!ENTITY intcal "&#x22BA;">
+<!ENTITY intercal "&#x22BA;">
+<!ENTITY veebar "&#x22BB;">
+<!ENTITY barvee "&#x22BD;">
+<!ENTITY angrtvb "&#x22BE;">
+<!ENTITY lrtri "&#x22BF;">
+<!ENTITY xwedge "&#x22C0;">
+<!ENTITY Wedge "&#x22C0;">
+<!ENTITY bigwedge "&#x22C0;">
+<!ENTITY xvee "&#x22C1;">
+<!ENTITY Vee "&#x22C1;">
+<!ENTITY bigvee "&#x22C1;">
+<!ENTITY xcap "&#x22C2;">
+<!ENTITY Intersection "&#x22C2;">
+<!ENTITY bigcap "&#x22C2;">
+<!ENTITY xcup "&#x22C3;">
+<!ENTITY Union "&#x22C3;">
+<!ENTITY bigcup "&#x22C3;">
+<!ENTITY diam "&#x22C4;">
+<!ENTITY diamond "&#x22C4;">
+<!ENTITY Diamond "&#x22C4;">
+<!ENTITY sdot "&#x22C5;">
+<!ENTITY sstarf "&#x22C6;">
+<!ENTITY Star "&#x22C6;">
+<!ENTITY divonx "&#x22C7;">
+<!ENTITY divideontimes "&#x22C7;">
+<!ENTITY bowtie "&#x22C8;">
+<!ENTITY ltimes "&#x22C9;">
+<!ENTITY rtimes "&#x22CA;">
+<!ENTITY lthree "&#x22CB;">
+<!ENTITY leftthreetimes "&#x22CB;">
+<!ENTITY rthree "&#x22CC;">
+<!ENTITY rightthreetimes "&#x22CC;">
+<!ENTITY bsime "&#x22CD;">
+<!ENTITY backsimeq "&#x22CD;">
+<!ENTITY cuvee "&#x22CE;">
+<!ENTITY curlyvee "&#x22CE;">
+<!ENTITY cuwed "&#x22CF;">
+<!ENTITY curlywedge "&#x22CF;">
+<!ENTITY Sub "&#x22D0;">
+<!ENTITY Subset "&#x22D0;">
+<!ENTITY Sup "&#x22D1;">
+<!ENTITY Supset "&#x22D1;">
+<!ENTITY Cap "&#x22D2;">
+<!ENTITY Cup "&#x22D3;">
+<!ENTITY fork "&#x22D4;">
+<!ENTITY pitchfork "&#x22D4;">
+<!ENTITY epar "&#x22D5;">
+<!ENTITY ltdot "&#x22D6;">
+<!ENTITY lessdot "&#x22D6;">
+<!ENTITY gtdot "&#x22D7;">
+<!ENTITY gtrdot "&#x22D7;">
+<!ENTITY Ll "&#x22D8;">
+<!ENTITY nLl "&#x22D8;&#x338;">
+<!ENTITY Gg "&#x22D9;">
+<!ENTITY ggg "&#x22D9;">
+<!ENTITY nGg "&#x22D9;&#x338;">
+<!ENTITY leg "&#x22DA;">
+<!ENTITY LessEqualGreater "&#x22DA;">
+<!ENTITY lesseqgtr "&#x22DA;">
+<!ENTITY lesg "&#x22DA;&#xFE00;">
+<!ENTITY gel "&#x22DB;">
+<!ENTITY gtreqless "&#x22DB;">
+<!ENTITY GreaterEqualLess "&#x22DB;">
+<!ENTITY gesl "&#x22DB;&#xFE00;">
+<!ENTITY cuepr "&#x22DE;">
+<!ENTITY curlyeqprec "&#x22DE;">
+<!ENTITY cuesc "&#x22DF;">
+<!ENTITY curlyeqsucc "&#x22DF;">
+<!ENTITY nprcue "&#x22E0;">
+<!ENTITY NotPrecedesSlantEqual "&#x22E0;">
+<!ENTITY nsccue "&#x22E1;">
+<!ENTITY NotSucceedsSlantEqual "&#x22E1;">
+<!ENTITY nsqsube "&#x22E2;">
+<!ENTITY NotSquareSubsetEqual "&#x22E2;">
+<!ENTITY nsqsupe "&#x22E3;">
+<!ENTITY NotSquareSupersetEqual "&#x22E3;">
+<!ENTITY lnsim "&#x22E6;">
+<!ENTITY gnsim "&#x22E7;">
+<!ENTITY prnsim "&#x22E8;">
+<!ENTITY precnsim "&#x22E8;">
+<!ENTITY scnsim "&#x22E9;">
+<!ENTITY succnsim "&#x22E9;">
+<!ENTITY nltri "&#x22EA;">
+<!ENTITY ntriangleleft "&#x22EA;">
+<!ENTITY NotLeftTriangle "&#x22EA;">
+<!ENTITY nrtri "&#x22EB;">
+<!ENTITY ntriangleright "&#x22EB;">
+<!ENTITY NotRightTriangle "&#x22EB;">
+<!ENTITY nltrie "&#x22EC;">
+<!ENTITY ntrianglelefteq "&#x22EC;">
+<!ENTITY NotLeftTriangleEqual "&#x22EC;">
+<!ENTITY nrtrie "&#x22ED;">
+<!ENTITY ntrianglerighteq "&#x22ED;">
+<!ENTITY NotRightTriangleEqual "&#x22ED;">
+<!ENTITY vellip "&#x22EE;">
+<!ENTITY ctdot "&#x22EF;">
+<!ENTITY utdot "&#x22F0;">
+<!ENTITY dtdot "&#x22F1;">
+<!ENTITY disin "&#x22F2;">
+<!ENTITY isinsv "&#x22F3;">
+<!ENTITY isins "&#x22F4;">
+<!ENTITY isindot "&#x22F5;">
+<!ENTITY notindot "&#x22F5;&#x338;">
+<!ENTITY notinvc "&#x22F6;">
+<!ENTITY notinvb "&#x22F7;">
+<!ENTITY isinE "&#x22F9;">
+<!ENTITY notinE "&#x22F9;&#x338;">
+<!ENTITY nisd "&#x22FA;">
+<!ENTITY xnis "&#x22FB;">
+<!ENTITY nis "&#x22FC;">
+<!ENTITY notnivc "&#x22FD;">
+<!ENTITY notnivb "&#x22FE;">
+<!ENTITY barwed "&#x2305;">
+<!ENTITY barwedge "&#x2305;">
+<!ENTITY Barwed "&#x2306;">
+<!ENTITY doublebarwedge "&#x2306;">
+<!ENTITY lceil "&#x2308;">
+<!ENTITY LeftCeiling "&#x2308;">
+<!ENTITY rceil "&#x2309;">
+<!ENTITY RightCeiling "&#x2309;">
+<!ENTITY lfloor "&#x230A;">
+<!ENTITY LeftFloor "&#x230A;">
+<!ENTITY rfloor "&#x230B;">
+<!ENTITY RightFloor "&#x230B;">
+<!ENTITY drcrop "&#x230C;">
+<!ENTITY dlcrop "&#x230D;">
+<!ENTITY urcrop "&#x230E;">
+<!ENTITY ulcrop "&#x230F;">
+<!ENTITY bnot "&#x2310;">
+<!ENTITY profline "&#x2312;">
+<!ENTITY profsurf "&#x2313;">
+<!ENTITY telrec "&#x2315;">
+<!ENTITY target "&#x2316;">
+<!ENTITY ulcorn "&#x231C;">
+<!ENTITY ulcorner "&#x231C;">
+<!ENTITY urcorn "&#x231D;">
+<!ENTITY urcorner "&#x231D;">
+<!ENTITY dlcorn "&#x231E;">
+<!ENTITY llcorner "&#x231E;">
+<!ENTITY drcorn "&#x231F;">
+<!ENTITY lrcorner "&#x231F;">
+<!ENTITY frown "&#x2322;">
+<!ENTITY sfrown "&#x2322;">
+<!ENTITY smile "&#x2323;">
+<!ENTITY ssmile "&#x2323;">
+<!ENTITY cylcty "&#x232D;">
+<!ENTITY profalar "&#x232E;">
+<!ENTITY topbot "&#x2336;">
+<!ENTITY ovbar "&#x233D;">
+<!ENTITY solbar "&#x233F;">
+<!ENTITY angzarr "&#x237C;">
+<!ENTITY lmoust "&#x23B0;">
+<!ENTITY lmoustache "&#x23B0;">
+<!ENTITY rmoust "&#x23B1;">
+<!ENTITY rmoustache "&#x23B1;">
+<!ENTITY tbrk "&#x23B4;">
+<!ENTITY OverBracket "&#x23B4;">
+<!ENTITY bbrk "&#x23B5;">
+<!ENTITY UnderBracket "&#x23B5;">
+<!ENTITY bbrktbrk "&#x23B6;">
+<!ENTITY OverParenthesis "&#x23DC;">
+<!ENTITY UnderParenthesis "&#x23DD;">
+<!ENTITY OverBrace "&#x23DE;">
+<!ENTITY UnderBrace "&#x23DF;">
+<!ENTITY trpezium "&#x23E2;">
+<!ENTITY elinters "&#x23E7;">
+<!ENTITY blank "&#x2423;">
+<!ENTITY oS "&#x24C8;">
+<!ENTITY circledS "&#x24C8;">
+<!ENTITY boxh "&#x2500;">
+<!ENTITY HorizontalLine "&#x2500;">
+<!ENTITY boxv "&#x2502;">
+<!ENTITY boxdr "&#x250C;">
+<!ENTITY boxdl "&#x2510;">
+<!ENTITY boxur "&#x2514;">
+<!ENTITY boxul "&#x2518;">
+<!ENTITY boxvr "&#x251C;">
+<!ENTITY boxvl "&#x2524;">
+<!ENTITY boxhd "&#x252C;">
+<!ENTITY boxhu "&#x2534;">
+<!ENTITY boxvh "&#x253C;">
+<!ENTITY boxH "&#x2550;">
+<!ENTITY boxV "&#x2551;">
+<!ENTITY boxdR "&#x2552;">
+<!ENTITY boxDr "&#x2553;">
+<!ENTITY boxDR "&#x2554;">
+<!ENTITY boxdL "&#x2555;">
+<!ENTITY boxDl "&#x2556;">
+<!ENTITY boxDL "&#x2557;">
+<!ENTITY boxuR "&#x2558;">
+<!ENTITY boxUr "&#x2559;">
+<!ENTITY boxUR "&#x255A;">
+<!ENTITY boxuL "&#x255B;">
+<!ENTITY boxUl "&#x255C;">
+<!ENTITY boxUL "&#x255D;">
+<!ENTITY boxvR "&#x255E;">
+<!ENTITY boxVr "&#x255F;">
+<!ENTITY boxVR "&#x2560;">
+<!ENTITY boxvL "&#x2561;">
+<!ENTITY boxVl "&#x2562;">
+<!ENTITY boxVL "&#x2563;">
+<!ENTITY boxHd "&#x2564;">
+<!ENTITY boxhD "&#x2565;">
+<!ENTITY boxHD "&#x2566;">
+<!ENTITY boxHu "&#x2567;">
+<!ENTITY boxhU "&#x2568;">
+<!ENTITY boxHU "&#x2569;">
+<!ENTITY boxvH "&#x256A;">
+<!ENTITY boxVh "&#x256B;">
+<!ENTITY boxVH "&#x256C;">
+<!ENTITY uhblk "&#x2580;">
+<!ENTITY lhblk "&#x2584;">
+<!ENTITY block "&#x2588;">
+<!ENTITY blk14 "&#x2591;">
+<!ENTITY blk12 "&#x2592;">
+<!ENTITY blk34 "&#x2593;">
+<!ENTITY squ "&#x25A1;">
+<!ENTITY square "&#x25A1;">
+<!ENTITY Square "&#x25A1;">
+<!ENTITY squf "&#x25AA;">
+<!ENTITY squarf "&#x25AA;">
+<!ENTITY blacksquare "&#x25AA;">
+<!ENTITY FilledVerySmallSquare "&#x25AA;">
+<!ENTITY EmptyVerySmallSquare "&#x25AB;">
+<!ENTITY rect "&#x25AD;">
+<!ENTITY marker "&#x25AE;">
+<!ENTITY fltns "&#x25B1;">
+<!ENTITY xutri "&#x25B3;">
+<!ENTITY bigtriangleup "&#x25B3;">
+<!ENTITY utrif "&#x25B4;">
+<!ENTITY blacktriangle "&#x25B4;">
+<!ENTITY utri "&#x25B5;">
+<!ENTITY triangle "&#x25B5;">
+<!ENTITY rtrif "&#x25B8;">
+<!ENTITY blacktriangleright "&#x25B8;">
+<!ENTITY rtri "&#x25B9;">
+<!ENTITY triangleright "&#x25B9;">
+<!ENTITY xdtri "&#x25BD;">
+<!ENTITY bigtriangledown "&#x25BD;">
+<!ENTITY dtrif "&#x25BE;">
+<!ENTITY blacktriangledown "&#x25BE;">
+<!ENTITY dtri "&#x25BF;">
+<!ENTITY triangledown "&#x25BF;">
+<!ENTITY ltrif "&#x25C2;">
+<!ENTITY blacktriangleleft "&#x25C2;">
+<!ENTITY ltri "&#x25C3;">
+<!ENTITY triangleleft "&#x25C3;">
+<!ENTITY loz "&#x25CA;">
+<!ENTITY lozenge "&#x25CA;">
+<!ENTITY cir "&#x25CB;">
+<!ENTITY tridot "&#x25EC;">
+<!ENTITY xcirc "&#x25EF;">
+<!ENTITY bigcirc "&#x25EF;">
+<!ENTITY ultri "&#x25F8;">
+<!ENTITY urtri "&#x25F9;">
+<!ENTITY lltri "&#x25FA;">
+<!ENTITY EmptySmallSquare "&#x25FB;">
+<!ENTITY FilledSmallSquare "&#x25FC;">
+<!ENTITY starf "&#x2605;">
+<!ENTITY bigstar "&#x2605;">
+<!ENTITY star "&#x2606;">
+<!ENTITY phone "&#x260E;">
+<!ENTITY female "&#x2640;">
+<!ENTITY male "&#x2642;">
+<!ENTITY spades "&#x2660;">
+<!ENTITY spadesuit "&#x2660;">
+<!ENTITY clubs "&#x2663;">
+<!ENTITY clubsuit "&#x2663;">
+<!ENTITY hearts "&#x2665;">
+<!ENTITY heartsuit "&#x2665;">
+<!ENTITY diams "&#x2666;">
+<!ENTITY diamondsuit "&#x2666;">
+<!ENTITY sung "&#x266A;">
+<!ENTITY flat "&#x266D;">
+<!ENTITY natur "&#x266E;">
+<!ENTITY natural "&#x266E;">
+<!ENTITY sharp "&#x266F;">
+<!ENTITY check "&#x2713;">
+<!ENTITY checkmark "&#x2713;">
+<!ENTITY cross "&#x2717;">
+<!ENTITY malt "&#x2720;">
+<!ENTITY maltese "&#x2720;">
+<!ENTITY sext "&#x2736;">
+<!ENTITY VerticalSeparator "&#x2758;">
+<!ENTITY lbbrk "&#x2772;">
+<!ENTITY rbbrk "&#x2773;">
+<!ENTITY bsolhsub "&#x27C8;">
+<!ENTITY suphsol "&#x27C9;">
+<!ENTITY lobrk "&#x27E6;">
+<!ENTITY LeftDoubleBracket "&#x27E6;">
+<!ENTITY robrk "&#x27E7;">
+<!ENTITY RightDoubleBracket "&#x27E7;">
+<!ENTITY lang "&#x27E8;">
+<!ENTITY LeftAngleBracket "&#x27E8;">
+<!ENTITY langle "&#x27E8;">
+<!ENTITY rang "&#x27E9;">
+<!ENTITY RightAngleBracket "&#x27E9;">
+<!ENTITY rangle "&#x27E9;">
+<!ENTITY Lang "&#x27EA;">
+<!ENTITY Rang "&#x27EB;">
+<!ENTITY loang "&#x27EC;">
+<!ENTITY roang "&#x27ED;">
+<!ENTITY xlarr "&#x27F5;">
+<!ENTITY longleftarrow "&#x27F5;">
+<!ENTITY LongLeftArrow "&#x27F5;">
+<!ENTITY xrarr "&#x27F6;">
+<!ENTITY longrightarrow "&#x27F6;">
+<!ENTITY LongRightArrow "&#x27F6;">
+<!ENTITY xharr "&#x27F7;">
+<!ENTITY longleftrightarrow "&#x27F7;">
+<!ENTITY LongLeftRightArrow "&#x27F7;">
+<!ENTITY xlArr "&#x27F8;">
+<!ENTITY Longleftarrow "&#x27F8;">
+<!ENTITY DoubleLongLeftArrow "&#x27F8;">
+<!ENTITY xrArr "&#x27F9;">
+<!ENTITY Longrightarrow "&#x27F9;">
+<!ENTITY DoubleLongRightArrow "&#x27F9;">
+<!ENTITY xhArr "&#x27FA;">
+<!ENTITY Longleftrightarrow "&#x27FA;">
+<!ENTITY DoubleLongLeftRightArrow "&#x27FA;">
+<!ENTITY xmap "&#x27FC;">
+<!ENTITY longmapsto "&#x27FC;">
+<!ENTITY dzigrarr "&#x27FF;">
+<!ENTITY nvlArr "&#x2902;">
+<!ENTITY nvrArr "&#x2903;">
+<!ENTITY nvHarr "&#x2904;">
+<!ENTITY Map "&#x2905;">
+<!ENTITY lbarr "&#x290C;">
+<!ENTITY rbarr "&#x290D;">
+<!ENTITY bkarow "&#x290D;">
+<!ENTITY lBarr "&#x290E;">
+<!ENTITY rBarr "&#x290F;">
+<!ENTITY dbkarow "&#x290F;">
+<!ENTITY RBarr "&#x2910;">
+<!ENTITY drbkarow "&#x2910;">
+<!ENTITY DDotrahd "&#x2911;">
+<!ENTITY UpArrowBar "&#x2912;">
+<!ENTITY DownArrowBar "&#x2913;">
+<!ENTITY Rarrtl "&#x2916;">
+<!ENTITY latail "&#x2919;">
+<!ENTITY ratail "&#x291A;">
+<!ENTITY lAtail "&#x291B;">
+<!ENTITY rAtail "&#x291C;">
+<!ENTITY larrfs "&#x291D;">
+<!ENTITY rarrfs "&#x291E;">
+<!ENTITY larrbfs "&#x291F;">
+<!ENTITY rarrbfs "&#x2920;">
+<!ENTITY nwarhk "&#x2923;">
+<!ENTITY nearhk "&#x2924;">
+<!ENTITY searhk "&#x2925;">
+<!ENTITY hksearow "&#x2925;">
+<!ENTITY swarhk "&#x2926;">
+<!ENTITY hkswarow "&#x2926;">
+<!ENTITY nwnear "&#x2927;">
+<!ENTITY nesear "&#x2928;">
+<!ENTITY toea "&#x2928;">
+<!ENTITY seswar "&#x2929;">
+<!ENTITY tosa "&#x2929;">
+<!ENTITY swnwar "&#x292A;">
+<!ENTITY rarrc "&#x2933;">
+<!ENTITY nrarrc "&#x2933;&#x338;">
+<!ENTITY cudarrr "&#x2935;">
+<!ENTITY ldca "&#x2936;">
+<!ENTITY rdca "&#x2937;">
+<!ENTITY cudarrl "&#x2938;">
+<!ENTITY larrpl "&#x2939;">
+<!ENTITY curarrm "&#x293C;">
+<!ENTITY cularrp "&#x293D;">
+<!ENTITY rarrpl "&#x2945;">
+<!ENTITY harrcir "&#x2948;">
+<!ENTITY Uarrocir "&#x2949;">
+<!ENTITY lurdshar "&#x294A;">
+<!ENTITY ldrushar "&#x294B;">
+<!ENTITY LeftRightVector "&#x294E;">
+<!ENTITY RightUpDownVector "&#x294F;">
+<!ENTITY DownLeftRightVector "&#x2950;">
+<!ENTITY LeftUpDownVector "&#x2951;">
+<!ENTITY LeftVectorBar "&#x2952;">
+<!ENTITY RightVectorBar "&#x2953;">
+<!ENTITY RightUpVectorBar "&#x2954;">
+<!ENTITY RightDownVectorBar "&#x2955;">
+<!ENTITY DownLeftVectorBar "&#x2956;">
+<!ENTITY DownRightVectorBar "&#x2957;">
+<!ENTITY LeftUpVectorBar "&#x2958;">
+<!ENTITY LeftDownVectorBar "&#x2959;">
+<!ENTITY LeftTeeVector "&#x295A;">
+<!ENTITY RightTeeVector "&#x295B;">
+<!ENTITY RightUpTeeVector "&#x295C;">
+<!ENTITY RightDownTeeVector "&#x295D;">
+<!ENTITY DownLeftTeeVector "&#x295E;">
+<!ENTITY DownRightTeeVector "&#x295F;">
+<!ENTITY LeftUpTeeVector "&#x2960;">
+<!ENTITY LeftDownTeeVector "&#x2961;">
+<!ENTITY lHar "&#x2962;">
+<!ENTITY uHar "&#x2963;">
+<!ENTITY rHar "&#x2964;">
+<!ENTITY dHar "&#x2965;">
+<!ENTITY luruhar "&#x2966;">
+<!ENTITY ldrdhar "&#x2967;">
+<!ENTITY ruluhar "&#x2968;">
+<!ENTITY rdldhar "&#x2969;">
+<!ENTITY lharul "&#x296A;">
+<!ENTITY llhard "&#x296B;">
+<!ENTITY rharul "&#x296C;">
+<!ENTITY lrhard "&#x296D;">
+<!ENTITY udhar "&#x296E;">
+<!ENTITY UpEquilibrium "&#x296E;">
+<!ENTITY duhar "&#x296F;">
+<!ENTITY ReverseUpEquilibrium "&#x296F;">
+<!ENTITY RoundImplies "&#x2970;">
+<!ENTITY erarr "&#x2971;">
+<!ENTITY simrarr "&#x2972;">
+<!ENTITY larrsim "&#x2973;">
+<!ENTITY rarrsim "&#x2974;">
+<!ENTITY rarrap "&#x2975;">
+<!ENTITY ltlarr "&#x2976;">
+<!ENTITY gtrarr "&#x2978;">
+<!ENTITY subrarr "&#x2979;">
+<!ENTITY suplarr "&#x297B;">
+<!ENTITY lfisht "&#x297C;">
+<!ENTITY rfisht "&#x297D;">
+<!ENTITY ufisht "&#x297E;">
+<!ENTITY dfisht "&#x297F;">
+<!ENTITY lopar "&#x2985;">
+<!ENTITY ropar "&#x2986;">
+<!ENTITY lbrke "&#x298B;">
+<!ENTITY rbrke "&#x298C;">
+<!ENTITY lbrkslu "&#x298D;">
+<!ENTITY rbrksld "&#x298E;">
+<!ENTITY lbrksld "&#x298F;">
+<!ENTITY rbrkslu "&#x2990;">
+<!ENTITY langd "&#x2991;">
+<!ENTITY rangd "&#x2992;">
+<!ENTITY lparlt "&#x2993;">
+<!ENTITY rpargt "&#x2994;">
+<!ENTITY gtlPar "&#x2995;">
+<!ENTITY ltrPar "&#x2996;">
+<!ENTITY vzigzag "&#x299A;">
+<!ENTITY vangrt "&#x299C;">
+<!ENTITY angrtvbd "&#x299D;">
+<!ENTITY ange "&#x29A4;">
+<!ENTITY range "&#x29A5;">
+<!ENTITY dwangle "&#x29A6;">
+<!ENTITY uwangle "&#x29A7;">
+<!ENTITY angmsdaa "&#x29A8;">
+<!ENTITY angmsdab "&#x29A9;">
+<!ENTITY angmsdac "&#x29AA;">
+<!ENTITY angmsdad "&#x29AB;">
+<!ENTITY angmsdae "&#x29AC;">
+<!ENTITY angmsdaf "&#x29AD;">
+<!ENTITY angmsdag "&#x29AE;">
+<!ENTITY angmsdah "&#x29AF;">
+<!ENTITY bemptyv "&#x29B0;">
+<!ENTITY demptyv "&#x29B1;">
+<!ENTITY cemptyv "&#x29B2;">
+<!ENTITY raemptyv "&#x29B3;">
+<!ENTITY laemptyv "&#x29B4;">
+<!ENTITY ohbar "&#x29B5;">
+<!ENTITY omid "&#x29B6;">
+<!ENTITY opar "&#x29B7;">
+<!ENTITY operp "&#x29B9;">
+<!ENTITY olcross "&#x29BB;">
+<!ENTITY odsold "&#x29BC;">
+<!ENTITY olcir "&#x29BE;">
+<!ENTITY ofcir "&#x29BF;">
+<!ENTITY olt "&#x29C0;">
+<!ENTITY ogt "&#x29C1;">
+<!ENTITY cirscir "&#x29C2;">
+<!ENTITY cirE "&#x29C3;">
+<!ENTITY solb "&#x29C4;">
+<!ENTITY bsolb "&#x29C5;">
+<!ENTITY boxbox "&#x29C9;">
+<!ENTITY trisb "&#x29CD;">
+<!ENTITY rtriltri "&#x29CE;">
+<!ENTITY LeftTriangleBar "&#x29CF;">
+<!ENTITY NotLeftTriangleBar "&#x29CF;&#x338;">
+<!ENTITY RightTriangleBar "&#x29D0;">
+<!ENTITY NotRightTriangleBar "&#x29D0;&#x338;">
+<!ENTITY iinfin "&#x29DC;">
+<!ENTITY infintie "&#x29DD;">
+<!ENTITY nvinfin "&#x29DE;">
+<!ENTITY eparsl "&#x29E3;">
+<!ENTITY smeparsl "&#x29E4;">
+<!ENTITY eqvparsl "&#x29E5;">
+<!ENTITY lozf "&#x29EB;">
+<!ENTITY blacklozenge "&#x29EB;">
+<!ENTITY RuleDelayed "&#x29F4;">
+<!ENTITY dsol "&#x29F6;">
+<!ENTITY xodot "&#x2A00;">
+<!ENTITY bigodot "&#x2A00;">
+<!ENTITY xoplus "&#x2A01;">
+<!ENTITY bigoplus "&#x2A01;">
+<!ENTITY xotime "&#x2A02;">
+<!ENTITY bigotimes "&#x2A02;">
+<!ENTITY xuplus "&#x2A04;">
+<!ENTITY biguplus "&#x2A04;">
+<!ENTITY xsqcup "&#x2A06;">
+<!ENTITY bigsqcup "&#x2A06;">
+<!ENTITY qint "&#x2A0C;">
+<!ENTITY iiiint "&#x2A0C;">
+<!ENTITY fpartint "&#x2A0D;">
+<!ENTITY cirfnint "&#x2A10;">
+<!ENTITY awint "&#x2A11;">
+<!ENTITY rppolint "&#x2A12;">
+<!ENTITY scpolint "&#x2A13;">
+<!ENTITY npolint "&#x2A14;">
+<!ENTITY pointint "&#x2A15;">
+<!ENTITY quatint "&#x2A16;">
+<!ENTITY intlarhk "&#x2A17;">
+<!ENTITY pluscir "&#x2A22;">
+<!ENTITY plusacir "&#x2A23;">
+<!ENTITY simplus "&#x2A24;">
+<!ENTITY plusdu "&#x2A25;">
+<!ENTITY plussim "&#x2A26;">
+<!ENTITY plustwo "&#x2A27;">
+<!ENTITY mcomma "&#x2A29;">
+<!ENTITY minusdu "&#x2A2A;">
+<!ENTITY loplus "&#x2A2D;">
+<!ENTITY roplus "&#x2A2E;">
+<!ENTITY Cross "&#x2A2F;">
+<!ENTITY timesd "&#x2A30;">
+<!ENTITY timesbar "&#x2A31;">
+<!ENTITY smashp "&#x2A33;">
+<!ENTITY lotimes "&#x2A34;">
+<!ENTITY rotimes "&#x2A35;">
+<!ENTITY otimesas "&#x2A36;">
+<!ENTITY Otimes "&#x2A37;">
+<!ENTITY odiv "&#x2A38;">
+<!ENTITY triplus "&#x2A39;">
+<!ENTITY triminus "&#x2A3A;">
+<!ENTITY tritime "&#x2A3B;">
+<!ENTITY iprod "&#x2A3C;">
+<!ENTITY intprod "&#x2A3C;">
+<!ENTITY amalg "&#x2A3F;">
+<!ENTITY capdot "&#x2A40;">
+<!ENTITY ncup "&#x2A42;">
+<!ENTITY ncap "&#x2A43;">
+<!ENTITY capand "&#x2A44;">
+<!ENTITY cupor "&#x2A45;">
+<!ENTITY cupcap "&#x2A46;">
+<!ENTITY capcup "&#x2A47;">
+<!ENTITY cupbrcap "&#x2A48;">
+<!ENTITY capbrcup "&#x2A49;">
+<!ENTITY cupcup "&#x2A4A;">
+<!ENTITY capcap "&#x2A4B;">
+<!ENTITY ccups "&#x2A4C;">
+<!ENTITY ccaps "&#x2A4D;">
+<!ENTITY ccupssm "&#x2A50;">
+<!ENTITY And "&#x2A53;">
+<!ENTITY Or "&#x2A54;">
+<!ENTITY andand "&#x2A55;">
+<!ENTITY oror "&#x2A56;">
+<!ENTITY orslope "&#x2A57;">
+<!ENTITY andslope "&#x2A58;">
+<!ENTITY andv "&#x2A5A;">
+<!ENTITY orv "&#x2A5B;">
+<!ENTITY andd "&#x2A5C;">
+<!ENTITY ord "&#x2A5D;">
+<!ENTITY wedbar "&#x2A5F;">
+<!ENTITY sdote "&#x2A66;">
+<!ENTITY simdot "&#x2A6A;">
+<!ENTITY congdot "&#x2A6D;">
+<!ENTITY ncongdot "&#x2A6D;&#x338;">
+<!ENTITY easter "&#x2A6E;">
+<!ENTITY apacir "&#x2A6F;">
+<!ENTITY apE "&#x2A70;">
+<!ENTITY napE "&#x2A70;&#x338;">
+<!ENTITY eplus "&#x2A71;">
+<!ENTITY pluse "&#x2A72;">
+<!ENTITY Esim "&#x2A73;">
+<!ENTITY Colone "&#x2A74;">
+<!ENTITY Equal "&#x2A75;">
+<!ENTITY eDDot "&#x2A77;">
+<!ENTITY ddotseq "&#x2A77;">
+<!ENTITY equivDD "&#x2A78;">
+<!ENTITY ltcir "&#x2A79;">
+<!ENTITY gtcir "&#x2A7A;">
+<!ENTITY ltquest "&#x2A7B;">
+<!ENTITY gtquest "&#x2A7C;">
+<!ENTITY les "&#x2A7D;">
+<!ENTITY LessSlantEqual "&#x2A7D;">
+<!ENTITY leqslant "&#x2A7D;">
+<!ENTITY nles "&#x2A7D;&#x338;">
+<!ENTITY NotLessSlantEqual "&#x2A7D;&#x338;">
+<!ENTITY nleqslant "&#x2A7D;&#x338;">
+<!ENTITY ges "&#x2A7E;">
+<!ENTITY GreaterSlantEqual "&#x2A7E;">
+<!ENTITY geqslant "&#x2A7E;">
+<!ENTITY nges "&#x2A7E;&#x338;">
+<!ENTITY NotGreaterSlantEqual "&#x2A7E;&#x338;">
+<!ENTITY ngeqslant "&#x2A7E;&#x338;">
+<!ENTITY lesdot "&#x2A7F;">
+<!ENTITY gesdot "&#x2A80;">
+<!ENTITY lesdoto "&#x2A81;">
+<!ENTITY gesdoto "&#x2A82;">
+<!ENTITY lesdotor "&#x2A83;">
+<!ENTITY gesdotol "&#x2A84;">
+<!ENTITY lap "&#x2A85;">
+<!ENTITY lessapprox "&#x2A85;">
+<!ENTITY gap "&#x2A86;">
+<!ENTITY gtrapprox "&#x2A86;">
+<!ENTITY lne "&#x2A87;">
+<!ENTITY lneq "&#x2A87;">
+<!ENTITY gne "&#x2A88;">
+<!ENTITY gneq "&#x2A88;">
+<!ENTITY lnap "&#x2A89;">
+<!ENTITY lnapprox "&#x2A89;">
+<!ENTITY gnap "&#x2A8A;">
+<!ENTITY gnapprox "&#x2A8A;">
+<!ENTITY lEg "&#x2A8B;">
+<!ENTITY lesseqqgtr "&#x2A8B;">
+<!ENTITY gEl "&#x2A8C;">
+<!ENTITY gtreqqless "&#x2A8C;">
+<!ENTITY lsime "&#x2A8D;">
+<!ENTITY gsime "&#x2A8E;">
+<!ENTITY lsimg "&#x2A8F;">
+<!ENTITY gsiml "&#x2A90;">
+<!ENTITY lgE "&#x2A91;">
+<!ENTITY glE "&#x2A92;">
+<!ENTITY lesges "&#x2A93;">
+<!ENTITY gesles "&#x2A94;">
+<!ENTITY els "&#x2A95;">
+<!ENTITY eqslantless "&#x2A95;">
+<!ENTITY egs "&#x2A96;">
+<!ENTITY eqslantgtr "&#x2A96;">
+<!ENTITY elsdot "&#x2A97;">
+<!ENTITY egsdot "&#x2A98;">
+<!ENTITY el "&#x2A99;">
+<!ENTITY eg "&#x2A9A;">
+<!ENTITY siml "&#x2A9D;">
+<!ENTITY simg "&#x2A9E;">
+<!ENTITY simlE "&#x2A9F;">
+<!ENTITY simgE "&#x2AA0;">
+<!ENTITY LessLess "&#x2AA1;">
+<!ENTITY NotNestedLessLess "&#x2AA1;&#x338;">
+<!ENTITY GreaterGreater "&#x2AA2;">
+<!ENTITY NotNestedGreaterGreater "&#x2AA2;&#x338;">
+<!ENTITY glj "&#x2AA4;">
+<!ENTITY gla "&#x2AA5;">
+<!ENTITY ltcc "&#x2AA6;">
+<!ENTITY gtcc "&#x2AA7;">
+<!ENTITY lescc "&#x2AA8;">
+<!ENTITY gescc "&#x2AA9;">
+<!ENTITY smt "&#x2AAA;">
+<!ENTITY lat "&#x2AAB;">
+<!ENTITY smte "&#x2AAC;">
+<!ENTITY smtes "&#x2AAC;&#xFE00;">
+<!ENTITY late "&#x2AAD;">
+<!ENTITY lates "&#x2AAD;&#xFE00;">
+<!ENTITY bumpE "&#x2AAE;">
+<!ENTITY pre "&#x2AAF;">
+<!ENTITY preceq "&#x2AAF;">
+<!ENTITY PrecedesEqual "&#x2AAF;">
+<!ENTITY npre "&#x2AAF;&#x338;">
+<!ENTITY npreceq "&#x2AAF;&#x338;">
+<!ENTITY NotPrecedesEqual "&#x2AAF;&#x338;">
+<!ENTITY sce "&#x2AB0;">
+<!ENTITY succeq "&#x2AB0;">
+<!ENTITY SucceedsEqual "&#x2AB0;">
+<!ENTITY nsce "&#x2AB0;&#x338;">
+<!ENTITY nsucceq "&#x2AB0;&#x338;">
+<!ENTITY NotSucceedsEqual "&#x2AB0;&#x338;">
+<!ENTITY prE "&#x2AB3;">
+<!ENTITY scE "&#x2AB4;">
+<!ENTITY prnE "&#x2AB5;">
+<!ENTITY precneqq "&#x2AB5;">
+<!ENTITY scnE "&#x2AB6;">
+<!ENTITY succneqq "&#x2AB6;">
+<!ENTITY prap "&#x2AB7;">
+<!ENTITY precapprox "&#x2AB7;">
+<!ENTITY scap "&#x2AB8;">
+<!ENTITY succapprox "&#x2AB8;">
+<!ENTITY prnap "&#x2AB9;">
+<!ENTITY precnapprox "&#x2AB9;">
+<!ENTITY scnap "&#x2ABA;">
+<!ENTITY succnapprox "&#x2ABA;">
+<!ENTITY Pr "&#x2ABB;">
+<!ENTITY Sc "&#x2ABC;">
+<!ENTITY subdot "&#x2ABD;">
+<!ENTITY supdot "&#x2ABE;">
+<!ENTITY subplus "&#x2ABF;">
+<!ENTITY supplus "&#x2AC0;">
+<!ENTITY submult "&#x2AC1;">
+<!ENTITY supmult "&#x2AC2;">
+<!ENTITY subedot "&#x2AC3;">
+<!ENTITY supedot "&#x2AC4;">
+<!ENTITY subE "&#x2AC5;">
+<!ENTITY subseteqq "&#x2AC5;">
+<!ENTITY nsubE "&#x2AC5;&#x338;">
+<!ENTITY nsubseteqq "&#x2AC5;&#x338;">
+<!ENTITY supE "&#x2AC6;">
+<!ENTITY supseteqq "&#x2AC6;">
+<!ENTITY nsupE "&#x2AC6;&#x338;">
+<!ENTITY nsupseteqq "&#x2AC6;&#x338;">
+<!ENTITY subsim "&#x2AC7;">
+<!ENTITY supsim "&#x2AC8;">
+<!ENTITY subnE "&#x2ACB;">
+<!ENTITY subsetneqq "&#x2ACB;">
+<!ENTITY vsubnE "&#x2ACB;&#xFE00;">
+<!ENTITY varsubsetneqq "&#x2ACB;&#xFE00;">
+<!ENTITY supnE "&#x2ACC;">
+<!ENTITY supsetneqq "&#x2ACC;">
+<!ENTITY vsupnE "&#x2ACC;&#xFE00;">
+<!ENTITY varsupsetneqq "&#x2ACC;&#xFE00;">
+<!ENTITY csub "&#x2ACF;">
+<!ENTITY csup "&#x2AD0;">
+<!ENTITY csube "&#x2AD1;">
+<!ENTITY csupe "&#x2AD2;">
+<!ENTITY subsup "&#x2AD3;">
+<!ENTITY supsub "&#x2AD4;">
+<!ENTITY subsub "&#x2AD5;">
+<!ENTITY supsup "&#x2AD6;">
+<!ENTITY suphsub "&#x2AD7;">
+<!ENTITY supdsub "&#x2AD8;">
+<!ENTITY forkv "&#x2AD9;">
+<!ENTITY topfork "&#x2ADA;">
+<!ENTITY mlcp "&#x2ADB;">
+<!ENTITY Dashv "&#x2AE4;">
+<!ENTITY DoubleLeftTee "&#x2AE4;">
+<!ENTITY Vdashl "&#x2AE6;">
+<!ENTITY Barv "&#x2AE7;">
+<!ENTITY vBar "&#x2AE8;">
+<!ENTITY vBarv "&#x2AE9;">
+<!ENTITY Vbar "&#x2AEB;">
+<!ENTITY Not "&#x2AEC;">
+<!ENTITY bNot "&#x2AED;">
+<!ENTITY rnmid "&#x2AEE;">
+<!ENTITY cirmid "&#x2AEF;">
+<!ENTITY midcir "&#x2AF0;">
+<!ENTITY topcir "&#x2AF1;">
+<!ENTITY nhpar "&#x2AF2;">
+<!ENTITY parsim "&#x2AF3;">
+<!ENTITY parsl "&#x2AFD;">
+<!ENTITY nparsl "&#x2AFD;&#x20E5;">
+<!ENTITY fflig "&#xFB00;">
+<!ENTITY filig "&#xFB01;">
+<!ENTITY fllig "&#xFB02;">
+<!ENTITY ffilig "&#xFB03;">
+<!ENTITY ffllig "&#xFB04;">
+<!ENTITY Ascr "&#x1D49C;">
+<!ENTITY Cscr "&#x1D49E;">
+<!ENTITY Dscr "&#x1D49F;">
+<!ENTITY Gscr "&#x1D4A2;">
+<!ENTITY Jscr "&#x1D4A5;">
+<!ENTITY Kscr "&#x1D4A6;">
+<!ENTITY Nscr "&#x1D4A9;">
+<!ENTITY Oscr "&#x1D4AA;">
+<!ENTITY Pscr "&#x1D4AB;">
+<!ENTITY Qscr "&#x1D4AC;">
+<!ENTITY Sscr "&#x1D4AE;">
+<!ENTITY Tscr "&#x1D4AF;">
+<!ENTITY Uscr "&#x1D4B0;">
+<!ENTITY Vscr "&#x1D4B1;">
+<!ENTITY Wscr "&#x1D4B2;">
+<!ENTITY Xscr "&#x1D4B3;">
+<!ENTITY Yscr "&#x1D4B4;">
+<!ENTITY Zscr "&#x1D4B5;">
+<!ENTITY ascr "&#x1D4B6;">
+<!ENTITY bscr "&#x1D4B7;">
+<!ENTITY cscr "&#x1D4B8;">
+<!ENTITY dscr "&#x1D4B9;">
+<!ENTITY fscr "&#x1D4BB;">
+<!ENTITY hscr "&#x1D4BD;">
+<!ENTITY iscr "&#x1D4BE;">
+<!ENTITY jscr "&#x1D4BF;">
+<!ENTITY kscr "&#x1D4C0;">
+<!ENTITY lscr "&#x1D4C1;">
+<!ENTITY mscr "&#x1D4C2;">
+<!ENTITY nscr "&#x1D4C3;">
+<!ENTITY pscr "&#x1D4C5;">
+<!ENTITY qscr "&#x1D4C6;">
+<!ENTITY rscr "&#x1D4C7;">
+<!ENTITY sscr "&#x1D4C8;">
+<!ENTITY tscr "&#x1D4C9;">
+<!ENTITY uscr "&#x1D4CA;">
+<!ENTITY vscr "&#x1D4CB;">
+<!ENTITY wscr "&#x1D4CC;">
+<!ENTITY xscr "&#x1D4CD;">
+<!ENTITY yscr "&#x1D4CE;">
+<!ENTITY zscr "&#x1D4CF;">
+<!ENTITY Afr "&#x1D504;">
+<!ENTITY Bfr "&#x1D505;">
+<!ENTITY Dfr "&#x1D507;">
+<!ENTITY Efr "&#x1D508;">
+<!ENTITY Ffr "&#x1D509;">
+<!ENTITY Gfr "&#x1D50A;">
+<!ENTITY Jfr "&#x1D50D;">
+<!ENTITY Kfr "&#x1D50E;">
+<!ENTITY Lfr "&#x1D50F;">
+<!ENTITY Mfr "&#x1D510;">
+<!ENTITY Nfr "&#x1D511;">
+<!ENTITY Ofr "&#x1D512;">
+<!ENTITY Pfr "&#x1D513;">
+<!ENTITY Qfr "&#x1D514;">
+<!ENTITY Sfr "&#x1D516;">
+<!ENTITY Tfr "&#x1D517;">
+<!ENTITY Ufr "&#x1D518;">
+<!ENTITY Vfr "&#x1D519;">
+<!ENTITY Wfr "&#x1D51A;">
+<!ENTITY Xfr "&#x1D51B;">
+<!ENTITY Yfr "&#x1D51C;">
+<!ENTITY afr "&#x1D51E;">
+<!ENTITY bfr "&#x1D51F;">
+<!ENTITY cfr "&#x1D520;">
+<!ENTITY dfr "&#x1D521;">
+<!ENTITY efr "&#x1D522;">
+<!ENTITY ffr "&#x1D523;">
+<!ENTITY gfr "&#x1D524;">
+<!ENTITY hfr "&#x1D525;">
+<!ENTITY ifr "&#x1D526;">
+<!ENTITY jfr "&#x1D527;">
+<!ENTITY kfr "&#x1D528;">
+<!ENTITY lfr "&#x1D529;">
+<!ENTITY mfr "&#x1D52A;">
+<!ENTITY nfr "&#x1D52B;">
+<!ENTITY ofr "&#x1D52C;">
+<!ENTITY pfr "&#x1D52D;">
+<!ENTITY qfr "&#x1D52E;">
+<!ENTITY rfr "&#x1D52F;">
+<!ENTITY sfr "&#x1D530;">
+<!ENTITY tfr "&#x1D531;">
+<!ENTITY ufr "&#x1D532;">
+<!ENTITY vfr "&#x1D533;">
+<!ENTITY wfr "&#x1D534;">
+<!ENTITY xfr "&#x1D535;">
+<!ENTITY yfr "&#x1D536;">
+<!ENTITY zfr "&#x1D537;">
+<!ENTITY Aopf "&#x1D538;">
+<!ENTITY Bopf "&#x1D539;">
+<!ENTITY Dopf "&#x1D53B;">
+<!ENTITY Eopf "&#x1D53C;">
+<!ENTITY Fopf "&#x1D53D;">
+<!ENTITY Gopf "&#x1D53E;">
+<!ENTITY Iopf "&#x1D540;">
+<!ENTITY Jopf "&#x1D541;">
+<!ENTITY Kopf "&#x1D542;">
+<!ENTITY Lopf "&#x1D543;">
+<!ENTITY Mopf "&#x1D544;">
+<!ENTITY Oopf "&#x1D546;">
+<!ENTITY Sopf "&#x1D54A;">
+<!ENTITY Topf "&#x1D54B;">
+<!ENTITY Uopf "&#x1D54C;">
+<!ENTITY Vopf "&#x1D54D;">
+<!ENTITY Wopf "&#x1D54E;">
+<!ENTITY Xopf "&#x1D54F;">
+<!ENTITY Yopf "&#x1D550;">
+<!ENTITY aopf "&#x1D552;">
+<!ENTITY bopf "&#x1D553;">
+<!ENTITY copf "&#x1D554;">
+<!ENTITY dopf "&#x1D555;">
+<!ENTITY eopf "&#x1D556;">
+<!ENTITY fopf "&#x1D557;">
+<!ENTITY gopf "&#x1D558;">
+<!ENTITY hopf "&#x1D559;">
+<!ENTITY iopf "&#x1D55A;">
+<!ENTITY jopf "&#x1D55B;">
+<!ENTITY kopf "&#x1D55C;">
+<!ENTITY lopf "&#x1D55D;">
+<!ENTITY mopf "&#x1D55E;">
+<!ENTITY nopf "&#x1D55F;">
+<!ENTITY oopf "&#x1D560;">
+<!ENTITY popf "&#x1D561;">
+<!ENTITY qopf "&#x1D562;">
+<!ENTITY ropf "&#x1D563;">
+<!ENTITY sopf "&#x1D564;">
+<!ENTITY topf "&#x1D565;">
+<!ENTITY uopf "&#x1D566;">
+<!ENTITY vopf "&#x1D567;">
+<!ENTITY wopf "&#x1D568;">
+<!ENTITY xopf "&#x1D569;">
+<!ENTITY yopf "&#x1D56A;">
+<!ENTITY zopf "&#x1D56B;">
diff --git a/testing/web-platform/tests/tools/manifest/commands.json b/testing/web-platform/tests/tools/manifest/commands.json
new file mode 100644
index 0000000000..769675e0ee
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/commands.json
@@ -0,0 +1,23 @@
+{
+ "manifest": {
+ "path": "update.py",
+ "script": "run",
+ "parser": "create_parser",
+ "help": "Update the MANIFEST.json file",
+ "virtualenv": false
+ },
+ "manifest-download": {
+ "path": "download.py",
+ "script": "run",
+ "parser": "create_parser",
+ "help": "Download recent pregenerated MANIFEST.json file",
+ "virtualenv": false
+ },
+ "test-paths": {
+ "path": "testpaths.py",
+ "script": "run",
+ "parser": "create_parser",
+ "help": "Print test paths given a set of test ids",
+ "virtualenv": false
+ }
+}
diff --git a/testing/web-platform/tests/tools/manifest/download.py b/testing/web-platform/tests/tools/manifest/download.py
new file mode 100644
index 0000000000..4a8b6fc347
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/download.py
@@ -0,0 +1,207 @@
+import argparse
+import bz2
+import gzip
+import json
+import io
+import os
+from datetime import datetime, timedelta
+from urllib.request import urlopen
+
+try:
+ import zstandard
+except ImportError:
+ zstandard = None
+
+from .utils import git
+
+from . import log
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any
+ from typing import Callable
+ from typing import List
+ from typing import Optional
+ from typing import Text
+
+here = os.path.dirname(__file__)
+
+wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
+logger = log.get_logger()
+
+
+def abs_path(path):
+ # type: (Text) -> Text
+ return os.path.abspath(os.path.expanduser(path))
+
+
+def should_download(manifest_path, rebuild_time=timedelta(days=5)):
+ # type: (Text, timedelta) -> bool
+ if not os.path.exists(manifest_path):
+ return True
+ mtime = datetime.fromtimestamp(os.path.getmtime(manifest_path))
+ if mtime < datetime.now() - rebuild_time:
+ return True
+ logger.info("Skipping manifest download because existing file is recent")
+ return False
+
+
+def merge_pr_tags(repo_root, max_count=50):
+ # type: (Text, int) -> List[Text]
+ gitfunc = git(repo_root)
+ tags = [] # type: List[Text]
+ if gitfunc is None:
+ return tags
+ for line in gitfunc("log", "--format=%D", "--max-count=%s" % max_count).split("\n"):
+ for ref in line.split(", "):
+ if ref.startswith("tag: merge_pr_"):
+ tags.append(ref[5:])
+ return tags
+
+
+def score_name(name):
+ # type: (Text) -> Optional[int]
+ """Score how much we like each filename, lower wins, None rejects"""
+
+ # Accept both ways of naming the manifest asset, even though
+ # there's no longer a reason to include the commit sha.
+ if name.startswith("MANIFEST-") or name.startswith("MANIFEST."):
+ if zstandard and name.endswith("json.zst"):
+ return 1
+ if name.endswith(".json.bz2"):
+ return 2
+ if name.endswith(".json.gz"):
+ return 3
+ return None
+
+
+def github_url(tags):
+ # type: (List[Text]) -> Optional[List[Text]]
+ for tag in tags:
+ url = "https://api.github.com/repos/web-platform-tests/wpt/releases/tags/%s" % tag
+ try:
+ resp = urlopen(url)
+ except Exception:
+ logger.warning("Fetching %s failed" % url)
+ continue
+
+ if resp.code != 200:
+ logger.warning("Fetching %s failed; got HTTP status %d" % (url, resp.code))
+ continue
+
+ try:
+ release = json.load(resp.fp)
+ except ValueError:
+ logger.warning("Response was not valid JSON")
+ return None
+
+ candidates = []
+ for item in release["assets"]:
+ score = score_name(item["name"])
+ if score is not None:
+ candidates.append((score, item["browser_download_url"]))
+
+ return [item[1] for item in sorted(candidates)]
+
+ return None
+
+
+def download_manifest(
+ manifest_path, # type: Text
+ tags_func, # type: Callable[[], List[Text]]
+ url_func, # type: Callable[[List[Text]], Optional[List[Text]]]
+ force=False # type: bool
+):
+ # type: (...) -> bool
+ if not force and not should_download(manifest_path):
+ return False
+
+ tags = tags_func()
+
+ urls = url_func(tags)
+ if not urls:
+ logger.warning("No generated manifest found")
+ return False
+
+ for url in urls:
+ logger.info("Downloading manifest from %s" % url)
+ try:
+ resp = urlopen(url)
+ except Exception:
+ logger.warning("Downloading pregenerated manifest failed")
+ continue
+
+ if resp.code != 200:
+ logger.warning("Downloading pregenerated manifest failed; got HTTP status %d" %
+ resp.code)
+ continue
+
+ if url.endswith(".zst"):
+ if not zstandard:
+ continue
+ try:
+ dctx = zstandard.ZstdDecompressor()
+ decompressed = dctx.decompress(resp.read())
+ except OSError:
+ logger.warning("Failed to decompress downloaded file")
+ continue
+ elif url.endswith(".bz2"):
+ try:
+ decompressed = bz2.decompress(resp.read())
+ except OSError:
+ logger.warning("Failed to decompress downloaded file")
+ continue
+ elif url.endswith(".gz"):
+ fileobj = io.BytesIO(resp.read())
+ try:
+ with gzip.GzipFile(fileobj=fileobj) as gzf:
+ data = gzf.read()
+ decompressed = data
+ except OSError:
+ logger.warning("Failed to decompress downloaded file")
+ continue
+ else:
+ logger.warning("Unknown file extension: %s" % url)
+ continue
+ break
+ else:
+ return False
+
+ try:
+ with open(manifest_path, "wb") as f:
+ f.write(decompressed)
+ except Exception:
+ logger.warning("Failed to write manifest")
+ return False
+ logger.info("Manifest downloaded")
+ return True
+
+
+def create_parser():
+ # type: () -> argparse.ArgumentParser
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-p", "--path", type=abs_path, help="Path to manifest file.")
+ parser.add_argument(
+ "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.")
+ parser.add_argument(
+ "--force", action="store_true",
+ help="Always download, even if the existing manifest is recent")
+ return parser
+
+
+def download_from_github(path, tests_root, force=False):
+ # type: (Text, Text, bool) -> bool
+ return download_manifest(path, lambda: merge_pr_tags(tests_root), github_url,
+ force=force)
+
+
+def run(**kwargs):
+ # type: (**Any) -> int
+ if kwargs["path"] is None:
+ path = os.path.join(kwargs["tests_root"], "MANIFEST.json")
+ else:
+ path = kwargs["path"]
+ success = download_from_github(path, kwargs["tests_root"], kwargs["force"])
+ return 0 if success else 1
diff --git a/testing/web-platform/tests/tools/manifest/item.py b/testing/web-platform/tests/tools/manifest/item.py
new file mode 100644
index 0000000000..02a72eeb29
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/item.py
@@ -0,0 +1,385 @@
+import os.path
+from inspect import isabstract
+from urllib.parse import urljoin, urlparse, parse_qs
+from abc import ABCMeta, abstractproperty
+
+from .utils import to_os_path
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any, Dict, Hashable, List, Optional, Sequence, Text, Tuple, Type, Union, cast
+ from .manifest import Manifest
+ Fuzzy = Dict[Optional[Tuple[str, str, str]], List[int]]
+ PageRanges = Dict[str, List[int]]
+
+item_types = {} # type: Dict[str, Type[ManifestItem]]
+
+
+class ManifestItemMeta(ABCMeta):
+ """Custom metaclass that registers all the subclasses in the
+ item_types dictionary according to the value of their item_type
+ attribute, and otherwise behaves like an ABCMeta."""
+
+ def __new__(cls, name, bases, attrs):
+ # type: (Type[ManifestItemMeta], str, Tuple[type], Dict[str, Any]) -> ManifestItemMeta
+ inst = super().__new__(cls, name, bases, attrs)
+ if isabstract(inst):
+ return inst
+
+ assert issubclass(inst, ManifestItem)
+ if MYPY:
+ inst_ = cast(Type[ManifestItem], inst)
+ item_type = cast(str, inst_.item_type)
+ else:
+ inst_ = inst
+ assert isinstance(inst_.item_type, str)
+ item_type = inst_.item_type
+
+ item_types[item_type] = inst_
+
+ return inst_
+
+
+class ManifestItem(metaclass=ManifestItemMeta):
+ __slots__ = ("_tests_root", "path")
+
+ def __init__(self, tests_root, path):
+ # type: (Text, Text) -> None
+ self._tests_root = tests_root
+ self.path = path
+
+ @abstractproperty
+ def id(self):
+ # type: () -> Text
+ """The test's id (usually its url)"""
+ pass
+
+ @abstractproperty
+ def item_type(self):
+ # type: () -> str
+ """The item's type"""
+ pass
+
+ @property
+ def path_parts(self):
+ # type: () -> Tuple[Text, ...]
+ return tuple(self.path.split(os.path.sep))
+
+ def key(self):
+ # type: () -> Hashable
+ """A unique identifier for the test"""
+ return (self.item_type, self.id)
+
+ def __eq__(self, other):
+ # type: (Any) -> bool
+ if not hasattr(other, "key"):
+ return False
+ return bool(self.key() == other.key())
+
+ def __hash__(self):
+ # type: () -> int
+ return hash(self.key())
+
+ def __repr__(self):
+ # type: () -> str
+ return f"<{self.__module__}.{self.__class__.__name__} id={self.id!r}, path={self.path!r}>"
+
+ def to_json(self):
+ # type: () -> Tuple[Any, ...]
+ return ()
+
+ @classmethod
+ def from_json(cls,
+ manifest, # type: Manifest
+ path, # type: Text
+ obj # type: Any
+ ):
+ # type: (...) -> ManifestItem
+ path = to_os_path(path)
+ tests_root = manifest.tests_root
+ assert tests_root is not None
+ return cls(tests_root, path)
+
+
+class URLManifestItem(ManifestItem):
+ __slots__ = ("url_base", "_url", "_extras", "_flags")
+
+ def __init__(self,
+ tests_root, # type: Text
+ path, # type: Text
+ url_base, # type: Text
+ url, # type: Optional[Text]
+ **extras # type: Any
+ ):
+ # type: (...) -> None
+ super().__init__(tests_root, path)
+ assert url_base[0] == "/"
+ self.url_base = url_base
+ assert url is None or url[0] != "/"
+ self._url = url
+ self._extras = extras
+ parsed_url = urlparse(self.url)
+ self._flags = (set(parsed_url.path.rsplit("/", 1)[1].split(".")[1:-1]) |
+ set(parse_qs(parsed_url.query).get("wpt_flags", [])))
+
+ @property
+ def id(self):
+ # type: () -> Text
+ return self.url
+
+ @property
+ def url(self):
+ # type: () -> Text
+ rel_url = self._url or self.path.replace(os.path.sep, "/")
+ # we can outperform urljoin, because we know we just have path relative URLs
+ if self.url_base == "/":
+ return "/" + rel_url
+ return urljoin(self.url_base, rel_url)
+
+ @property
+ def https(self):
+ # type: () -> bool
+ return "https" in self._flags or "serviceworker" in self._flags or "serviceworker-module" in self._flags
+
+ @property
+ def h2(self):
+ # type: () -> bool
+ return "h2" in self._flags
+
+ @property
+ def subdomain(self):
+ # type: () -> bool
+ # Note: this is currently hard-coded to check for `www`, rather than
+ # all possible valid subdomains. It can be extended if needed.
+ return "www" in self._flags
+
+ def to_json(self):
+ # type: () -> Tuple[Optional[Text], Dict[Any, Any]]
+ rel_url = None if self._url == self.path.replace(os.path.sep, "/") else self._url
+ rv = (rel_url, {}) # type: Tuple[Optional[Text], Dict[Any, Any]]
+ return rv
+
+ @classmethod
+ def from_json(cls,
+ manifest, # type: Manifest
+ path, # type: Text
+ obj # type: Tuple[Text, Dict[Any, Any]]
+ ):
+ # type: (...) -> URLManifestItem
+ path = to_os_path(path)
+ url, extras = obj
+ tests_root = manifest.tests_root
+ assert tests_root is not None
+ return cls(tests_root,
+ path,
+ manifest.url_base,
+ url,
+ **extras)
+
+
+class TestharnessTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "testharness"
+
+ @property
+ def timeout(self):
+ # type: () -> Optional[Text]
+ return self._extras.get("timeout")
+
+ @property
+ def pac(self):
+ # type: () -> Optional[Text]
+ return self._extras.get("pac")
+
+ @property
+ def testdriver(self):
+ # type: () -> Optional[Text]
+ return self._extras.get("testdriver")
+
+ @property
+ def jsshell(self):
+ # type: () -> Optional[Text]
+ return self._extras.get("jsshell")
+
+ @property
+ def script_metadata(self):
+ # type: () -> Optional[List[Tuple[Text, Text]]]
+ return self._extras.get("script_metadata")
+
+ def to_json(self):
+ # type: () -> Tuple[Optional[Text], Dict[Text, Any]]
+ rv = super().to_json()
+ if self.timeout is not None:
+ rv[-1]["timeout"] = self.timeout
+ if self.pac is not None:
+ rv[-1]["pac"] = self.pac
+ if self.testdriver:
+ rv[-1]["testdriver"] = self.testdriver
+ if self.jsshell:
+ rv[-1]["jsshell"] = True
+ if self.script_metadata:
+ rv[-1]["script_metadata"] = [(k, v) for (k,v) in self.script_metadata]
+ return rv
+
+
+class RefTest(URLManifestItem):
+ __slots__ = ("references",)
+
+ item_type = "reftest"
+
+ def __init__(self,
+ tests_root, # type: Text
+ path, # type: Text
+ url_base, # type: Text
+ url, # type: Optional[Text]
+ references=None, # type: Optional[List[Tuple[Text, Text]]]
+ **extras # type: Any
+ ):
+ super().__init__(tests_root, path, url_base, url, **extras)
+ if references is None:
+ self.references = [] # type: List[Tuple[Text, Text]]
+ else:
+ self.references = references
+
+ @property
+ def timeout(self):
+ # type: () -> Optional[Text]
+ return self._extras.get("timeout")
+
+ @property
+ def viewport_size(self):
+ # type: () -> Optional[Text]
+ return self._extras.get("viewport_size")
+
+ @property
+ def dpi(self):
+ # type: () -> Optional[Text]
+ return self._extras.get("dpi")
+
+ @property
+ def fuzzy(self):
+ # type: () -> Fuzzy
+ fuzzy = self._extras.get("fuzzy", {}) # type: Union[Fuzzy, List[Tuple[Optional[Sequence[Text]], List[int]]]]
+ if not isinstance(fuzzy, list):
+ return fuzzy
+
+ rv = {} # type: Fuzzy
+ for k, v in fuzzy: # type: Tuple[Optional[Sequence[Text]], List[int]]
+ if k is None:
+ key = None # type: Optional[Tuple[Text, Text, Text]]
+ else:
+ # mypy types this as Tuple[Text, ...]
+ assert len(k) == 3
+ key = tuple(k) # type: ignore
+ rv[key] = v
+ return rv
+
+ def to_json(self): # type: ignore
+ # type: () -> Tuple[Optional[Text], List[Tuple[Text, Text]], Dict[Text, Any]]
+ rel_url = None if self._url == self.path else self._url
+ rv = (rel_url, self.references, {}) # type: Tuple[Optional[Text], List[Tuple[Text, Text]], Dict[Text, Any]]
+ extras = rv[-1]
+ if self.timeout is not None:
+ extras["timeout"] = self.timeout
+ if self.viewport_size is not None:
+ extras["viewport_size"] = self.viewport_size
+ if self.dpi is not None:
+ extras["dpi"] = self.dpi
+ if self.fuzzy:
+ extras["fuzzy"] = list(self.fuzzy.items())
+ return rv
+
+ @classmethod
+ def from_json(cls, # type: ignore
+ manifest, # type: Manifest
+ path, # type: Text
+ obj # type: Tuple[Text, List[Tuple[Text, Text]], Dict[Any, Any]]
+ ):
+ # type: (...) -> RefTest
+ tests_root = manifest.tests_root
+ assert tests_root is not None
+ path = to_os_path(path)
+ url, references, extras = obj
+ return cls(tests_root,
+ path,
+ manifest.url_base,
+ url,
+ references,
+ **extras)
+
+
+class PrintRefTest(RefTest):
+ __slots__ = ("references",)
+
+ item_type = "print-reftest"
+
+ @property
+ def page_ranges(self):
+ # type: () -> PageRanges
+ return self._extras.get("page_ranges", {})
+
+ def to_json(self): # type: ignore
+ rv = super().to_json()
+ if self.page_ranges:
+ rv[-1]["page_ranges"] = self.page_ranges
+ return rv
+
+
+class ManualTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "manual"
+
+
+class ConformanceCheckerTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "conformancechecker"
+
+
+class VisualTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "visual"
+
+
+class CrashTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "crashtest"
+
+ @property
+ def timeout(self):
+ # type: () -> Optional[Text]
+ return None
+
+
+class WebDriverSpecTest(URLManifestItem):
+ __slots__ = ()
+
+ item_type = "wdspec"
+
+ @property
+ def timeout(self):
+ # type: () -> Optional[Text]
+ return self._extras.get("timeout")
+
+ def to_json(self):
+ # type: () -> Tuple[Optional[Text], Dict[Text, Any]]
+ rv = super().to_json()
+ if self.timeout is not None:
+ rv[-1]["timeout"] = self.timeout
+ return rv
+
+
+class SupportFile(ManifestItem):
+ __slots__ = ()
+
+ item_type = "support"
+
+ @property
+ def id(self):
+ # type: () -> Text
+ return self.path
diff --git a/testing/web-platform/tests/tools/manifest/jsonlib.py b/testing/web-platform/tests/tools/manifest/jsonlib.py
new file mode 100644
index 0000000000..49eaf02e80
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/jsonlib.py
@@ -0,0 +1,139 @@
+import re
+import json
+
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any, AnyStr, Callable, Dict, IO, Text
+
+
+__all__ = ["load", "dump_local", "dump_local", "dump_dist", "dumps_dist"]
+
+
+try:
+ import ujson
+except ImportError:
+ has_ujson = False
+else:
+ has_ujson = True
+
+#
+# load
+#
+
+if has_ujson:
+ load = ujson.load # type: Callable[[IO[AnyStr]], Any]
+
+else:
+ load = json.load
+
+
+#
+# loads
+#
+
+if has_ujson:
+ loads = ujson.loads # type: Callable[[AnyStr], Any]
+
+else:
+ loads = json.loads
+
+
+#
+# dump/dumps_local options for some libraries
+#
+_ujson_dump_local_kwargs = {
+ 'ensure_ascii': False,
+ 'escape_forward_slashes': False,
+ 'indent': 1,
+ 'reject_bytes': True,
+} # type: Dict[str, Any]
+
+
+_json_dump_local_kwargs = {
+ 'ensure_ascii': False,
+ 'indent': 1,
+ 'separators': (',', ': '),
+} # type: Dict[str, Any]
+
+
+#
+# dump_local (for local, non-distributed usage of JSON)
+#
+
+if has_ujson:
+ def dump_local(obj, fp):
+ # type: (Any, IO[str]) -> None
+ return ujson.dump(obj, fp, **_ujson_dump_local_kwargs)
+
+else:
+ def dump_local(obj, fp):
+ # type: (Any, IO[str]) -> None
+ return json.dump(obj, fp, **_json_dump_local_kwargs)
+
+
+#
+# dumps_local (for local, non-distributed usage of JSON)
+#
+
+if has_ujson:
+ def dumps_local(obj):
+ # type: (Any) -> Text
+ return ujson.dumps(obj, **_ujson_dump_local_kwargs)
+
+else:
+ def dumps_local(obj):
+ # type: (Any) -> Text
+ return json.dumps(obj, **_json_dump_local_kwargs)
+
+
+#
+# dump/dumps_dist (for distributed usage of JSON where files should safely roundtrip)
+#
+
+_ujson_dump_dist_kwargs = {
+ 'sort_keys': True,
+ 'indent': 1,
+ 'reject_bytes': True,
+} # type: Dict[str, Any]
+
+
+_json_dump_dist_kwargs = {
+ 'sort_keys': True,
+ 'indent': 1,
+ 'separators': (',', ': '),
+} # type: Dict[str, Any]
+
+
+if has_ujson:
+ if ujson.dumps([], indent=1) == "[]":
+ # optimistically see if https://github.com/ultrajson/ultrajson/issues/429 is fixed
+ def _ujson_fixup(s):
+ # type: (str) -> str
+ return s
+ else:
+ _ujson_fixup_re = re.compile(r"([\[{])[\n\x20]+([}\]])")
+
+ def _ujson_fixup(s):
+ # type: (str) -> str
+ return _ujson_fixup_re.sub(
+ lambda m: m.group(1) + m.group(2),
+ s
+ )
+
+ def dump_dist(obj, fp):
+ # type: (Any, IO[str]) -> None
+ fp.write(_ujson_fixup(ujson.dumps(obj, **_ujson_dump_dist_kwargs)))
+
+ def dumps_dist(obj):
+ # type: (Any) -> Text
+ return _ujson_fixup(ujson.dumps(obj, **_ujson_dump_dist_kwargs))
+else:
+ def dump_dist(obj, fp):
+ # type: (Any, IO[str]) -> None
+ json.dump(obj, fp, **_json_dump_dist_kwargs)
+
+ def dumps_dist(obj):
+ # type: (Any) -> Text
+ return json.dumps(obj, **_json_dump_dist_kwargs)
diff --git a/testing/web-platform/tests/tools/manifest/log.py b/testing/web-platform/tests/tools/manifest/log.py
new file mode 100644
index 0000000000..6551c2b5f7
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/log.py
@@ -0,0 +1,11 @@
+import logging
+
+logger = logging.getLogger("manifest")
+
+def enable_debug_logging():
+ # type: () -> None
+ logger.setLevel(logging.DEBUG)
+
+def get_logger():
+ # type: () -> logging.Logger
+ return logger
diff --git a/testing/web-platform/tests/tools/manifest/manifest.py b/testing/web-platform/tests/tools/manifest/manifest.py
new file mode 100644
index 0000000000..4b7792ec00
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/manifest.py
@@ -0,0 +1,449 @@
+import os
+import sys
+from atomicwrites import atomic_write
+from copy import deepcopy
+from multiprocessing import Pool, cpu_count
+
+from . import jsonlib
+from . import vcs
+from .item import (ConformanceCheckerTest,
+ CrashTest,
+ ManifestItem,
+ ManualTest,
+ PrintRefTest,
+ RefTest,
+ SupportFile,
+ TestharnessTest,
+ VisualTest,
+ WebDriverSpecTest)
+from .log import get_logger
+from .sourcefile import SourceFile
+from .typedata import TypeData
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from logging import Logger
+ from typing import Any
+ from typing import Container
+ from typing import Dict
+ from typing import IO
+ from typing import Iterator
+ from typing import Iterable
+ from typing import Optional
+ from typing import Set
+ from typing import Text
+ from typing import Tuple
+ from typing import Type
+ from typing import Union
+
+
+CURRENT_VERSION = 8 # type: int
+
+
+class ManifestError(Exception):
+ pass
+
+
+class ManifestVersionMismatch(ManifestError):
+ pass
+
+
+class InvalidCacheError(Exception):
+ pass
+
+
+item_classes = {"testharness": TestharnessTest,
+ "reftest": RefTest,
+ "print-reftest": PrintRefTest,
+ "crashtest": CrashTest,
+ "manual": ManualTest,
+ "wdspec": WebDriverSpecTest,
+ "conformancechecker": ConformanceCheckerTest,
+ "visual": VisualTest,
+ "support": SupportFile} # type: Dict[Text, Type[ManifestItem]]
+
+
+def compute_manifest_items(source_file):
+ # type: (SourceFile) -> Tuple[Tuple[Text, ...], Text, Set[ManifestItem], Text]
+ rel_path_parts = source_file.rel_path_parts
+ new_type, manifest_items = source_file.manifest_items()
+ file_hash = source_file.hash
+ return rel_path_parts, new_type, set(manifest_items), file_hash
+
+
+if MYPY:
+ ManifestDataType = Dict[Any, TypeData]
+else:
+ ManifestDataType = dict
+
+
+class ManifestData(ManifestDataType):
+ def __init__(self, manifest):
+ # type: (Manifest) -> None
+ """Dictionary subclass containing a TypeData instance for each test type,
+ keyed by type name"""
+ self.initialized = False # type: bool
+ for key, value in item_classes.items():
+ self[key] = TypeData(manifest, value)
+ self.initialized = True
+ self.json_obj = None # type: None
+
+ def __setitem__(self, key, value):
+ # type: (Text, TypeData) -> None
+ if self.initialized:
+ raise AttributeError
+ dict.__setitem__(self, key, value)
+
+ def paths(self):
+ # type: () -> Set[Text]
+ """Get a list of all paths containing test items
+ without actually constructing all the items"""
+ rv = set() # type: Set[Text]
+ for item_data in self.values():
+ for item in item_data:
+ rv.add(os.path.sep.join(item))
+ return rv
+
+ def type_by_path(self):
+ # type: () -> Dict[Tuple[Text, ...], Text]
+ rv = {}
+ for item_type, item_data in self.items():
+ for item in item_data:
+ rv[item] = item_type
+ return rv
+
+
+class Manifest:
+ def __init__(self, tests_root, url_base="/"):
+ # type: (Text, Text) -> None
+ assert url_base is not None
+ self._data = ManifestData(self) # type: ManifestData
+ self.tests_root = tests_root # type: Text
+ self.url_base = url_base # type: Text
+
+ def __iter__(self):
+ # type: () -> Iterator[Tuple[Text, Text, Set[ManifestItem]]]
+ return self.itertypes()
+
+ def itertypes(self, *types):
+ # type: (*Text) -> Iterator[Tuple[Text, Text, Set[ManifestItem]]]
+ for item_type in (types or sorted(self._data.keys())):
+ for path in self._data[item_type]:
+ rel_path = os.sep.join(path)
+ tests = self._data[item_type][path]
+ yield item_type, rel_path, tests
+
+ def iterpath(self, path):
+ # type: (Text) -> Iterable[ManifestItem]
+ tpath = tuple(path.split(os.path.sep))
+
+ for type_tests in self._data.values():
+ i = type_tests.get(tpath, set())
+ assert i is not None
+ yield from i
+
+ def iterdir(self, dir_name):
+ # type: (Text) -> Iterable[ManifestItem]
+ tpath = tuple(dir_name.split(os.path.sep))
+ tpath_len = len(tpath)
+
+ for type_tests in self._data.values():
+ for path, tests in type_tests.items():
+ if path[:tpath_len] == tpath:
+ yield from tests
+
+ def update(self, tree, parallel=True):
+ # type: (Iterable[Tuple[Text, Optional[Text], bool]], bool) -> bool
+ """Update the manifest given an iterable of items that make up the updated manifest.
+
+ The iterable must either generate tuples of the form (SourceFile, True) for paths
+ that are to be updated, or (path, False) for items that are not to be updated. This
+ unusual API is designed as an optimistaion meaning that SourceFile items need not be
+ constructed in the case we are not updating a path, but the absence of an item from
+ the iterator may be used to remove defunct entries from the manifest."""
+
+ logger = get_logger()
+
+ changed = False
+
+ # Create local variable references to these dicts so we avoid the
+ # attribute access in the hot loop below
+ data = self._data
+
+ types = data.type_by_path()
+ remaining_manifest_paths = set(types)
+
+ to_update = []
+
+ for path, file_hash, updated in tree:
+ path_parts = tuple(path.split(os.path.sep))
+ is_new = path_parts not in remaining_manifest_paths
+
+ if not updated and is_new:
+ # This is kind of a bandaid; if we ended up here the cache
+ # was invalid but we've been using it anyway. That's obviously
+ # bad; we should fix the underlying issue that we sometimes
+ # use an invalid cache. But at least this fixes the immediate
+ # problem
+ raise InvalidCacheError
+
+ if not updated:
+ remaining_manifest_paths.remove(path_parts)
+ else:
+ assert self.tests_root is not None
+ source_file = SourceFile(self.tests_root,
+ path,
+ self.url_base,
+ file_hash)
+
+ hash_changed = False # type: bool
+
+ if not is_new:
+ if file_hash is None:
+ file_hash = source_file.hash
+ remaining_manifest_paths.remove(path_parts)
+ old_type = types[path_parts]
+ old_hash = data[old_type].hashes[path_parts]
+ if old_hash != file_hash:
+ hash_changed = True
+ del data[old_type][path_parts]
+
+ if is_new or hash_changed:
+ to_update.append(source_file)
+
+ if to_update:
+ logger.debug("Computing manifest update for %s items" % len(to_update))
+ changed = True
+
+
+ # 25 items was derived experimentally (2020-01) to be approximately the
+ # point at which it is quicker to create a Pool and parallelize update.
+ pool = None
+ if parallel and len(to_update) > 25 and cpu_count() > 1:
+ # On Python 3 on Windows, using >= MAXIMUM_WAIT_OBJECTS processes
+ # causes a crash in the multiprocessing module. Whilst this enum
+ # can technically have any value, it is usually 64. For safety,
+ # restrict manifest regeneration to 48 processes on Windows.
+ #
+ # See https://bugs.python.org/issue26903 and https://bugs.python.org/issue40263
+ processes = cpu_count()
+ if sys.platform == "win32" and processes > 48:
+ processes = 48
+ pool = Pool(processes)
+
+ # chunksize set > 1 when more than 10000 tests, because
+ # chunking is a net-gain once we get to very large numbers
+ # of items (again, experimentally, 2020-01)
+ chunksize = max(1, len(to_update) // 10000)
+ logger.debug("Doing a multiprocessed update. CPU count: %s, "
+ "processes: %s, chunksize: %s" % (cpu_count(), processes, chunksize))
+ results = pool.imap_unordered(compute_manifest_items,
+ to_update,
+ chunksize=chunksize
+ ) # type: Iterator[Tuple[Tuple[Text, ...], Text, Set[ManifestItem], Text]]
+ else:
+ results = map(compute_manifest_items, to_update)
+
+ for result in results:
+ rel_path_parts, new_type, manifest_items, file_hash = result
+ data[new_type][rel_path_parts] = manifest_items
+ data[new_type].hashes[rel_path_parts] = file_hash
+
+ # Make sure to terminate the Pool, to avoid hangs on Python 3.
+ # https://docs.python.org/3/library/multiprocessing.html#multiprocessing.pool.Pool
+ if pool is not None:
+ pool.terminate()
+
+ if remaining_manifest_paths:
+ changed = True
+ for rel_path_parts in remaining_manifest_paths:
+ for test_data in data.values():
+ if rel_path_parts in test_data:
+ del test_data[rel_path_parts]
+
+ return changed
+
+ def to_json(self, caller_owns_obj=True):
+ # type: (bool) -> Dict[Text, Any]
+ """Dump a manifest into a object which can be serialized as JSON
+
+ If caller_owns_obj is False, then the return value remains
+ owned by the manifest; it is _vitally important_ that _no_
+ (even read) operation is done on the manifest, as otherwise
+ objects within the object graph rooted at the return value can
+ be mutated. This essentially makes this mode very dangerous
+ and only to be used under extreme care.
+
+ """
+ out_items = {
+ test_type: type_paths.to_json()
+ for test_type, type_paths in self._data.items() if type_paths
+ }
+
+ if caller_owns_obj:
+ out_items = deepcopy(out_items)
+
+ rv = {"url_base": self.url_base,
+ "items": out_items,
+ "version": CURRENT_VERSION} # type: Dict[Text, Any]
+ return rv
+
+ @classmethod
+ def from_json(cls, tests_root, obj, types=None, callee_owns_obj=False):
+ # type: (Text, Dict[Text, Any], Optional[Container[Text]], bool) -> Manifest
+ """Load a manifest from a JSON object
+
+ This loads a manifest for a given local test_root path from an
+ object obj, potentially partially loading it to only load the
+ types given by types.
+
+ If callee_owns_obj is True, then ownership of obj transfers
+ to this function when called, and the caller must never mutate
+ the obj or anything referred to in the object graph rooted at
+ obj.
+
+ """
+ version = obj.get("version")
+ if version != CURRENT_VERSION:
+ raise ManifestVersionMismatch
+
+ self = cls(tests_root, url_base=obj.get("url_base", "/"))
+ if not hasattr(obj, "items"):
+ raise ManifestError
+
+ for test_type, type_paths in obj["items"].items():
+ if test_type not in item_classes:
+ raise ManifestError
+
+ if types and test_type not in types:
+ continue
+
+ if not callee_owns_obj:
+ type_paths = deepcopy(type_paths)
+
+ self._data[test_type].set_json(type_paths)
+
+ return self
+
+
+def load(tests_root, manifest, types=None):
+ # type: (Text, Union[IO[bytes], Text], Optional[Container[Text]]) -> Optional[Manifest]
+ logger = get_logger()
+
+ logger.warning("Prefer load_and_update instead")
+ return _load(logger, tests_root, manifest, types)
+
+
+__load_cache = {} # type: Dict[Text, Manifest]
+
+
+def _load(logger, # type: Logger
+ tests_root, # type: Text
+ manifest, # type: Union[IO[bytes], Text]
+ types=None, # type: Optional[Container[Text]]
+ allow_cached=True # type: bool
+ ):
+ # type: (...) -> Optional[Manifest]
+ manifest_path = (manifest if isinstance(manifest, str)
+ else manifest.name)
+ if allow_cached and manifest_path in __load_cache:
+ return __load_cache[manifest_path]
+
+ if isinstance(manifest, str):
+ if os.path.exists(manifest):
+ logger.debug("Opening manifest at %s" % manifest)
+ else:
+ logger.debug("Creating new manifest at %s" % manifest)
+ try:
+ with open(manifest, encoding="utf-8") as f:
+ rv = Manifest.from_json(tests_root,
+ jsonlib.load(f),
+ types=types,
+ callee_owns_obj=True)
+ except OSError:
+ return None
+ except ValueError:
+ logger.warning("%r may be corrupted", manifest)
+ return None
+ else:
+ rv = Manifest.from_json(tests_root,
+ jsonlib.load(manifest),
+ types=types,
+ callee_owns_obj=True)
+
+ if allow_cached:
+ __load_cache[manifest_path] = rv
+ return rv
+
+
+def load_and_update(tests_root, # type: Text
+ manifest_path, # type: Text
+ url_base, # type: Text
+ update=True, # type: bool
+ rebuild=False, # type: bool
+ metadata_path=None, # type: Optional[Text]
+ cache_root=None, # type: Optional[Text]
+ working_copy=True, # type: bool
+ types=None, # type: Optional[Container[Text]]
+ write_manifest=True, # type: bool
+ allow_cached=True, # type: bool
+ parallel=True # type: bool
+ ):
+ # type: (...) -> Manifest
+
+ logger = get_logger()
+
+ manifest = None
+ if not rebuild:
+ try:
+ manifest = _load(logger,
+ tests_root,
+ manifest_path,
+ types=types,
+ allow_cached=allow_cached)
+ except ManifestVersionMismatch:
+ logger.info("Manifest version changed, rebuilding")
+ except ManifestError:
+ logger.warning("Failed to load manifest, rebuilding")
+
+ if manifest is not None and manifest.url_base != url_base:
+ logger.info("Manifest url base did not match, rebuilding")
+ manifest = None
+
+ if manifest is None:
+ manifest = Manifest(tests_root, url_base)
+ rebuild = True
+ update = True
+
+ if rebuild or update:
+ logger.info("Updating manifest")
+ for retry in range(2):
+ try:
+ tree = vcs.get_tree(tests_root, manifest, manifest_path, cache_root,
+ working_copy, rebuild)
+ changed = manifest.update(tree, parallel)
+ break
+ except InvalidCacheError:
+ logger.warning("Manifest cache was invalid, doing a complete rebuild")
+ rebuild = True
+ else:
+ # If we didn't break there was an error
+ raise
+ if write_manifest and changed:
+ write(manifest, manifest_path)
+ tree.dump_caches()
+
+ return manifest
+
+
+def write(manifest, manifest_path):
+ # type: (Manifest, Text) -> None
+ dir_name = os.path.dirname(manifest_path)
+ if not os.path.exists(dir_name):
+ os.makedirs(dir_name)
+ with atomic_write(manifest_path, overwrite=True) as f:
+ # Use ',' instead of the default ', ' separator to prevent trailing
+ # spaces: https://docs.python.org/2/library/json.html#json.dump
+ jsonlib.dump_dist(manifest.to_json(caller_owns_obj=True), f)
+ f.write("\n")
diff --git a/testing/web-platform/tests/tools/manifest/requirements.txt b/testing/web-platform/tests/tools/manifest/requirements.txt
new file mode 100644
index 0000000000..9f5bc8a143
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/requirements.txt
@@ -0,0 +1 @@
+zstandard==0.17.0
diff --git a/testing/web-platform/tests/tools/manifest/sourcefile.py b/testing/web-platform/tests/tools/manifest/sourcefile.py
new file mode 100644
index 0000000000..3919b5ac10
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/sourcefile.py
@@ -0,0 +1,1144 @@
+import hashlib
+import re
+import os
+from collections import deque
+from io import BytesIO
+from urllib.parse import urljoin
+from fnmatch import fnmatch
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any
+ from typing import BinaryIO
+ from typing import Callable
+ from typing import Deque
+ from typing import Dict
+ from typing import Iterable
+ from typing import List
+ from typing import Optional
+ from typing import Pattern
+ from typing import Set
+ from typing import Text
+ from typing import Tuple
+ from typing import Union
+ from typing import cast
+
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ from xml.etree import ElementTree as ElementTree # type: ignore
+
+import html5lib
+
+from . import XMLParser
+from .item import (ConformanceCheckerTest,
+ CrashTest,
+ ManifestItem,
+ ManualTest,
+ PrintRefTest,
+ RefTest,
+ SupportFile,
+ TestharnessTest,
+ VisualTest,
+ WebDriverSpecTest)
+from .utils import cached_property
+
+wd_pattern = "*.py"
+js_meta_re = re.compile(br"//\s*META:\s*(\w*)=(.*)$")
+python_meta_re = re.compile(br"#\s*META:\s*(\w*)=(.*)$")
+
+reference_file_re = re.compile(r'(^|[\-_])(not)?ref[0-9]*([\-_]|$)')
+
+space_chars = "".join(html5lib.constants.spaceCharacters) # type: Text
+
+
+def replace_end(s, old, new):
+ # type: (Text, Text, Text) -> Text
+ """
+ Given a string `s` that ends with `old`, replace that occurrence of `old`
+ with `new`.
+ """
+ assert s.endswith(old)
+ return s[:-len(old)] + new
+
+
+def read_script_metadata(f, regexp):
+ # type: (BinaryIO, Pattern[bytes]) -> Iterable[Tuple[Text, Text]]
+ """
+ Yields any metadata (pairs of strings) from the file-like object `f`,
+ as specified according to a supplied regexp.
+
+ `regexp` - Regexp containing two groups containing the metadata name and
+ value.
+ """
+ for line in f:
+ assert isinstance(line, bytes), line
+ m = regexp.match(line)
+ if not m:
+ break
+
+ yield (m.groups()[0].decode("utf8"), m.groups()[1].decode("utf8"))
+
+
+_any_variants = {
+ "window": {"suffix": ".any.html"},
+ "serviceworker": {"force_https": True},
+ "serviceworker-module": {"force_https": True},
+ "sharedworker": {},
+ "sharedworker-module": {},
+ "dedicatedworker": {"suffix": ".any.worker.html"},
+ "dedicatedworker-module": {"suffix": ".any.worker-module.html"},
+ "worker": {"longhand": {"dedicatedworker", "sharedworker", "serviceworker"}},
+ "worker-module": {},
+ "shadowrealm": {},
+ "jsshell": {"suffix": ".any.js"},
+} # type: Dict[Text, Dict[Text, Any]]
+
+
+def get_any_variants(item):
+ # type: (Text) -> Set[Text]
+ """
+ Returns a set of variants (strings) defined by the given keyword.
+ """
+ assert isinstance(item, str), item
+
+ variant = _any_variants.get(item, None)
+ if variant is None:
+ return set()
+
+ return variant.get("longhand", {item})
+
+
+def get_default_any_variants():
+ # type: () -> Set[Text]
+ """
+ Returns a set of variants (strings) that will be used by default.
+ """
+ return set({"window", "dedicatedworker"})
+
+
+def parse_variants(value):
+ # type: (Text) -> Set[Text]
+ """
+ Returns a set of variants (strings) defined by a comma-separated value.
+ """
+ assert isinstance(value, str), value
+
+ if value == "":
+ return get_default_any_variants()
+
+ globals = set()
+ for item in value.split(","):
+ item = item.strip()
+ globals |= get_any_variants(item)
+ return globals
+
+
+def global_suffixes(value):
+ # type: (Text) -> Set[Tuple[Text, bool]]
+ """
+ Yields tuples of the relevant filename suffix (a string) and whether the
+ variant is intended to run in a JS shell, for the variants defined by the
+ given comma-separated value.
+ """
+ assert isinstance(value, str), value
+
+ rv = set()
+
+ global_types = parse_variants(value)
+ for global_type in global_types:
+ variant = _any_variants[global_type]
+ suffix = variant.get("suffix", ".any.%s.html" % global_type)
+ rv.add((suffix, global_type == "jsshell"))
+
+ return rv
+
+
+def global_variant_url(url, suffix):
+ # type: (Text, Text) -> Text
+ """
+ Returns a url created from the given url and suffix (all strings).
+ """
+ url = url.replace(".any.", ".")
+ # If the url must be loaded over https, ensure that it will have
+ # the form .https.any.js
+ if ".https." in url and suffix.startswith(".https."):
+ url = url.replace(".https.", ".")
+ elif ".h2." in url and suffix.startswith(".h2."):
+ url = url.replace(".h2.", ".")
+ return replace_end(url, ".js", suffix)
+
+
+def _parse_html(f):
+ # type: (BinaryIO) -> ElementTree.Element
+ doc = html5lib.parse(f, treebuilder="etree", useChardet=False)
+ if MYPY:
+ return cast(ElementTree.Element, doc)
+ else:
+ # (needs to be in else for mypy to believe this is reachable)
+ return doc
+
+def _parse_xml(f):
+ # type: (BinaryIO) -> ElementTree.Element
+ try:
+ # raises ValueError with an unsupported encoding,
+ # ParseError when there's an undefined entity
+ return ElementTree.parse(f).getroot()
+ except (ValueError, ElementTree.ParseError):
+ f.seek(0)
+ return ElementTree.parse(f, XMLParser.XMLParser()).getroot() # type: ignore
+
+
+class SourceFile:
+ parsers = {"html":_parse_html,
+ "xhtml":_parse_xml,
+ "svg":_parse_xml} # type: Dict[Text, Callable[[BinaryIO], ElementTree.Element]]
+
+ root_dir_non_test = {"common"}
+
+ dir_non_test = {"resources",
+ "support",
+ "tools"}
+
+ dir_path_non_test = {("css21", "archive"),
+ ("css", "CSS2", "archive"),
+ ("css", "common")} # type: Set[Tuple[Text, ...]]
+
+ def __init__(self, tests_root, rel_path, url_base, hash=None, contents=None):
+ # type: (Text, Text, Text, Optional[Text], Optional[bytes]) -> None
+ """Object representing a file in a source tree.
+
+ :param tests_root: Path to the root of the source tree
+ :param rel_path_str: File path relative to tests_root
+ :param url_base: Base URL used when converting file paths to urls
+ :param contents: Byte array of the contents of the file or ``None``.
+ """
+
+ assert not os.path.isabs(rel_path), rel_path
+ if os.name == "nt":
+ # do slash normalization on Windows
+ rel_path = rel_path.replace("/", "\\")
+
+ dir_path, filename = os.path.split(rel_path)
+ name, ext = os.path.splitext(filename)
+
+ type_flag = None
+ if "-" in name:
+ type_flag = name.rsplit("-", 1)[1].split(".")[0]
+
+ meta_flags = name.split(".")[1:]
+
+ self.tests_root = tests_root # type: Text
+ self.rel_path = rel_path # type: Text
+ self.dir_path = dir_path # type: Text
+ self.filename = filename # type: Text
+ self.name = name # type: Text
+ self.ext = ext # type: Text
+ self.type_flag = type_flag # type: Optional[Text]
+ self.meta_flags = meta_flags # type: Union[List[bytes], List[Text]]
+ self.url_base = url_base
+ self.contents = contents
+ self.items_cache = None # type: Optional[Tuple[Text, List[ManifestItem]]]
+ self._hash = hash
+
+ def __getstate__(self):
+ # type: () -> Dict[str, Any]
+ # Remove computed properties if we pickle this class
+ rv = self.__dict__.copy()
+
+ if "__cached_properties__" in rv:
+ cached_properties = rv["__cached_properties__"]
+ rv = {key:value for key, value in rv.items() if key not in cached_properties}
+ del rv["__cached_properties__"]
+ return rv
+
+ def name_prefix(self, prefix):
+ # type: (Text) -> bool
+ """Check if the filename starts with a given prefix
+
+ :param prefix: The prefix to check"""
+ return self.name.startswith(prefix)
+
+ def is_dir(self):
+ # type: () -> bool
+ """Return whether this file represents a directory."""
+ if self.contents is not None:
+ return False
+
+ return os.path.isdir(self.rel_path)
+
+ def open(self):
+ # type: () -> BinaryIO
+ """
+ Return either
+ * the contents specified in the constructor, if any;
+ * a File object opened for reading the file contents.
+ """
+ if self.contents is not None:
+ file_obj = BytesIO(self.contents) # type: BinaryIO
+ else:
+ file_obj = open(self.path, 'rb')
+ return file_obj
+
+ @cached_property
+ def rel_path_parts(self):
+ # type: () -> Tuple[Text, ...]
+ return tuple(self.rel_path.split(os.path.sep))
+
+ @cached_property
+ def path(self):
+ # type: () -> Text
+ return os.path.join(self.tests_root, self.rel_path)
+
+ @cached_property
+ def rel_url(self):
+ # type: () -> Text
+ assert not os.path.isabs(self.rel_path), self.rel_path
+ return self.rel_path.replace(os.sep, "/")
+
+ @cached_property
+ def url(self):
+ # type: () -> Text
+ return urljoin(self.url_base, self.rel_url)
+
+ @cached_property
+ def hash(self):
+ # type: () -> Text
+ if not self._hash:
+ with self.open() as f:
+ content = f.read()
+
+ data = b"".join((b"blob ", b"%d" % len(content), b"\0", content))
+ self._hash = str(hashlib.sha1(data).hexdigest())
+
+ return self._hash
+
+ def in_non_test_dir(self):
+ # type: () -> bool
+ if self.dir_path == "":
+ return True
+
+ parts = self.rel_path_parts
+
+ if (parts[0] in self.root_dir_non_test or
+ any(item in self.dir_non_test for item in parts) or
+ any(parts[:len(path)] == path for path in self.dir_path_non_test)):
+ return True
+ return False
+
+ def in_conformance_checker_dir(self):
+ # type: () -> bool
+ return self.rel_path_parts[0] == "conformance-checkers"
+
+ @property
+ def name_is_non_test(self):
+ # type: () -> bool
+ """Check if the file name matches the conditions for the file to
+ be a non-test file"""
+ return (self.is_dir() or
+ self.name_prefix("MANIFEST") or
+ self.filename == "META.yml" or
+ self.filename.startswith(".") or
+ self.filename.endswith(".headers") or
+ self.filename.endswith(".ini") or
+ self.in_non_test_dir())
+
+ @property
+ def name_is_conformance(self):
+ # type: () -> bool
+ return (self.in_conformance_checker_dir() and
+ self.type_flag in ("is-valid", "no-valid"))
+
+ @property
+ def name_is_conformance_support(self):
+ # type: () -> bool
+ return self.in_conformance_checker_dir()
+
+ @property
+ def name_is_manual(self):
+ # type: () -> bool
+ """Check if the file name matches the conditions for the file to
+ be a manual test file"""
+ return self.type_flag == "manual"
+
+ @property
+ def name_is_visual(self):
+ # type: () -> bool
+ """Check if the file name matches the conditions for the file to
+ be a visual test file"""
+ return self.type_flag == "visual"
+
+ @property
+ def name_is_multi_global(self):
+ # type: () -> bool
+ """Check if the file name matches the conditions for the file to
+ be a multi-global js test file"""
+ return "any" in self.meta_flags and self.ext == ".js"
+
+ @property
+ def name_is_worker(self):
+ # type: () -> bool
+ """Check if the file name matches the conditions for the file to
+ be a worker js test file"""
+ return "worker" in self.meta_flags and self.ext == ".js"
+
+ @property
+ def name_is_window(self):
+ # type: () -> bool
+ """Check if the file name matches the conditions for the file to
+ be a window js test file"""
+ return "window" in self.meta_flags and self.ext == ".js"
+
+ @property
+ def name_is_webdriver(self):
+ # type: () -> bool
+ """Check if the file name matches the conditions for the file to
+ be a webdriver spec test file"""
+ # wdspec tests are in subdirectories of /webdriver excluding __init__.py
+ # files.
+ rel_path_parts = self.rel_path_parts
+ return (((rel_path_parts[0] == "webdriver" and len(rel_path_parts) > 1) or
+ (rel_path_parts[:2] == ("infrastructure", "webdriver") and
+ len(rel_path_parts) > 2)) and
+ self.filename not in ("__init__.py", "conftest.py") and
+ fnmatch(self.filename, wd_pattern))
+
+ @property
+ def name_is_reference(self):
+ # type: () -> bool
+ """Check if the file name matches the conditions for the file to
+ be a reference file (not a reftest)"""
+ return "/reference/" in self.url or bool(reference_file_re.search(self.name))
+
+ @property
+ def name_is_crashtest(self):
+ # type: () -> bool
+ return (self.markup_type is not None and
+ (self.type_flag == "crash" or "crashtests" in self.dir_path.split(os.path.sep)))
+
+ @property
+ def name_is_tentative(self):
+ # type: () -> bool
+ """Check if the file name matches the conditions for the file to be a
+ tentative file.
+
+ See https://web-platform-tests.org/writing-tests/file-names.html#test-features"""
+ return "tentative" in self.meta_flags or "tentative" in self.dir_path.split(os.path.sep)
+
+ @property
+ def name_is_print_reftest(self):
+ # type: () -> bool
+ return (self.markup_type is not None and
+ (self.type_flag == "print" or "print" in self.dir_path.split(os.path.sep)))
+
+ @property
+ def markup_type(self):
+ # type: () -> Optional[Text]
+ """Return the type of markup contained in a file, based on its extension,
+ or None if it doesn't contain markup"""
+ ext = self.ext
+
+ if not ext:
+ return None
+ if ext[0] == ".":
+ ext = ext[1:]
+ if ext in ["html", "htm"]:
+ return "html"
+ if ext in ["xhtml", "xht", "xml"]:
+ return "xhtml"
+ if ext == "svg":
+ return "svg"
+ return None
+
+ @cached_property
+ def root(self):
+ # type: () -> Optional[ElementTree.Element]
+ """Return an ElementTree Element for the root node of the file if it contains
+ markup, or None if it does not"""
+ if not self.markup_type:
+ return None
+
+ parser = self.parsers[self.markup_type]
+
+ with self.open() as f:
+ try:
+ tree = parser(f)
+ except Exception:
+ return None
+
+ return tree
+
+ @cached_property
+ def timeout_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify timeouts"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='timeout']")
+
+ @cached_property
+ def pac_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify PAC (proxy auto-config)"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='pac']")
+
+ @cached_property
+ def script_metadata(self):
+ # type: () -> Optional[List[Tuple[Text, Text]]]
+ if self.name_is_worker or self.name_is_multi_global or self.name_is_window:
+ regexp = js_meta_re
+ elif self.name_is_webdriver:
+ regexp = python_meta_re
+ else:
+ return None
+
+ with self.open() as f:
+ return list(read_script_metadata(f, regexp))
+
+ @cached_property
+ def timeout(self):
+ # type: () -> Optional[Text]
+ """The timeout of a test or reference file. "long" if the file has an extended timeout
+ or None otherwise"""
+ if self.script_metadata:
+ if any(m == ("timeout", "long") for m in self.script_metadata):
+ return "long"
+
+ if self.root is None:
+ return None
+
+ if self.timeout_nodes:
+ timeout_str = self.timeout_nodes[0].attrib.get("content", None) # type: Optional[Text]
+ if timeout_str and timeout_str.lower() == "long":
+ return "long"
+
+ return None
+
+ @cached_property
+ def pac(self):
+ # type: () -> Optional[Text]
+ """The PAC (proxy config) of a test or reference file. A URL or null"""
+ if self.script_metadata:
+ for (meta, content) in self.script_metadata:
+ if meta == 'pac':
+ return content
+
+ if self.root is None:
+ return None
+
+ if self.pac_nodes:
+ return self.pac_nodes[0].attrib.get("content", None)
+
+ return None
+
+ @cached_property
+ def viewport_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify viewport sizes"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='viewport-size']")
+
+ @cached_property
+ def viewport_size(self):
+ # type: () -> Optional[Text]
+ """The viewport size of a test or reference file"""
+ if self.root is None:
+ return None
+
+ if not self.viewport_nodes:
+ return None
+
+ return self.viewport_nodes[0].attrib.get("content", None)
+
+ @cached_property
+ def dpi_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify device pixel ratios"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='device-pixel-ratio']")
+
+ @cached_property
+ def dpi(self):
+ # type: () -> Optional[Text]
+ """The device pixel ratio of a test or reference file"""
+ if self.root is None:
+ return None
+
+ if not self.dpi_nodes:
+ return None
+
+ return self.dpi_nodes[0].attrib.get("content", None)
+
+ def parse_ref_keyed_meta(self, node):
+ # type: (ElementTree.Element) -> Tuple[Optional[Tuple[Text, Text, Text]], Text]
+ item = node.attrib.get("content", "") # type: Text
+
+ parts = item.rsplit(":", 1)
+ if len(parts) == 1:
+ key = None # type: Optional[Tuple[Text, Text, Text]]
+ value = parts[0]
+ else:
+ key_part = urljoin(self.url, parts[0])
+ reftype = None
+ for ref in self.references: # type: Tuple[Text, Text]
+ if ref[0] == key_part:
+ reftype = ref[1]
+ break
+ if reftype not in ("==", "!="):
+ raise ValueError("Key %s doesn't correspond to a reference" % key_part)
+ key = (self.url, key_part, reftype)
+ value = parts[1]
+
+ return key, value
+
+
+ @cached_property
+ def fuzzy_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify reftest fuzziness"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='fuzzy']")
+
+
+ @cached_property
+ def fuzzy(self):
+ # type: () -> Dict[Optional[Tuple[Text, Text, Text]], List[List[int]]]
+ rv = {} # type: Dict[Optional[Tuple[Text, Text, Text]], List[List[int]]]
+ if self.root is None:
+ return rv
+
+ if not self.fuzzy_nodes:
+ return rv
+
+ args = ["maxDifference", "totalPixels"]
+
+ for node in self.fuzzy_nodes:
+ key, value = self.parse_ref_keyed_meta(node)
+ ranges = value.split(";")
+ if len(ranges) != 2:
+ raise ValueError("Malformed fuzzy value %s" % value)
+ arg_values = {} # type: Dict[Text, List[int]]
+ positional_args = deque() # type: Deque[List[int]]
+ for range_str_value in ranges: # type: Text
+ name = None # type: Optional[Text]
+ if "=" in range_str_value:
+ name, range_str_value = (part.strip()
+ for part in range_str_value.split("=", 1))
+ if name not in args:
+ raise ValueError("%s is not a valid fuzzy property" % name)
+ if arg_values.get(name):
+ raise ValueError("Got multiple values for argument %s" % name)
+ if "-" in range_str_value:
+ range_min, range_max = range_str_value.split("-")
+ else:
+ range_min = range_str_value
+ range_max = range_str_value
+ try:
+ range_value = [int(x.strip()) for x in (range_min, range_max)]
+ except ValueError:
+ raise ValueError("Fuzzy value %s must be a range of integers" %
+ range_str_value)
+ if name is None:
+ positional_args.append(range_value)
+ else:
+ arg_values[name] = range_value
+ rv[key] = []
+ for arg_name in args:
+ if arg_values.get(arg_name):
+ arg_value = arg_values.pop(arg_name)
+ else:
+ arg_value = positional_args.popleft()
+ rv[key].append(arg_value)
+ assert len(arg_values) == 0 and len(positional_args) == 0
+ return rv
+
+ @cached_property
+ def page_ranges_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify print-reftest """
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='reftest-pages']")
+
+ @cached_property
+ def page_ranges(self):
+ # type: () -> Dict[Text, List[List[Optional[int]]]]
+ """List of ElementTree Elements corresponding to nodes in a test that
+ specify print-reftest page ranges"""
+ rv = {} # type: Dict[Text, List[List[Optional[int]]]]
+ for node in self.page_ranges_nodes:
+ key_data, value = self.parse_ref_keyed_meta(node)
+ # Just key by url
+ if key_data is None:
+ key = self.url
+ else:
+ key = key_data[1]
+ if key in rv:
+ raise ValueError("Duplicate page-ranges value")
+ rv[key] = []
+ for range_str in value.split(","):
+ range_str = range_str.strip()
+ if "-" in range_str:
+ range_parts_str = [item.strip() for item in range_str.split("-")]
+ try:
+ range_parts = [int(item) if item else None for item in range_parts_str]
+ except ValueError:
+ raise ValueError("Malformed page-range value %s" % range_str)
+ if any(item == 0 for item in range_parts):
+ raise ValueError("Malformed page-range value %s" % range_str)
+ else:
+ try:
+ range_parts = [int(range_str)]
+ except ValueError:
+ raise ValueError("Malformed page-range value %s" % range_str)
+ rv[key].append(range_parts)
+ return rv
+
+ @cached_property
+ def testharness_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes representing a
+ testharness.js script"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testharness.js']")
+
+ @cached_property
+ def content_is_testharness(self):
+ # type: () -> Optional[bool]
+ """Boolean indicating whether the file content represents a
+ testharness.js test"""
+ if self.root is None:
+ return None
+ return bool(self.testharness_nodes)
+
+ @cached_property
+ def variant_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes representing a
+ test variant"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='variant']")
+
+ @cached_property
+ def test_variants(self):
+ # type: () -> List[Text]
+ rv = [] # type: List[Text]
+ if self.ext == ".js":
+ script_metadata = self.script_metadata
+ assert script_metadata is not None
+ for (key, value) in script_metadata:
+ if key == "variant":
+ rv.append(value)
+ else:
+ for element in self.variant_nodes:
+ if "content" in element.attrib:
+ variant = element.attrib["content"] # type: Text
+ rv.append(variant)
+
+ for variant in rv:
+ if variant != "":
+ if variant[0] not in ("#", "?"):
+ raise ValueError("Non-empty variant must start with either a ? or a #")
+ if len(variant) == 1 or (variant[0] == "?" and variant[1] == "#"):
+ raise ValueError("Variants must not have empty fragment or query " +
+ "(omit the empty part instead)")
+
+ if not rv:
+ rv = [""]
+
+ return rv
+
+ @cached_property
+ def testdriver_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes representing a
+ testdriver.js script"""
+ assert self.root is not None
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testdriver.js']")
+
+ @cached_property
+ def has_testdriver(self):
+ # type: () -> Optional[bool]
+ """Boolean indicating whether the file content represents a
+ testharness.js test"""
+ if self.root is None:
+ return None
+ return bool(self.testdriver_nodes)
+
+ @cached_property
+ def reftest_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes representing a
+ to a reftest <link>"""
+ if self.root is None:
+ return []
+
+ match_links = self.root.findall(".//{http://www.w3.org/1999/xhtml}link[@rel='match']")
+ mismatch_links = self.root.findall(".//{http://www.w3.org/1999/xhtml}link[@rel='mismatch']")
+ return match_links + mismatch_links
+
+ @cached_property
+ def references(self):
+ # type: () -> List[Tuple[Text, Text]]
+ """List of (ref_url, relation) tuples for any reftest references specified in
+ the file"""
+ rv = [] # type: List[Tuple[Text, Text]]
+ rel_map = {"match": "==", "mismatch": "!="}
+ for item in self.reftest_nodes:
+ if "href" in item.attrib:
+ ref_url = urljoin(self.url, item.attrib["href"].strip(space_chars))
+ ref_type = rel_map[item.attrib["rel"]]
+ rv.append((ref_url, ref_type))
+ return rv
+
+ @cached_property
+ def content_is_ref_node(self):
+ # type: () -> bool
+ """Boolean indicating whether the file is a non-leaf node in a reftest
+ graph (i.e. if it contains any <link rel=[mis]match>"""
+ return bool(self.references)
+
+ @cached_property
+ def css_flag_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes representing a
+ flag <meta>"""
+ if self.root is None:
+ return []
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='flags']")
+
+ @cached_property
+ def css_flags(self):
+ # type: () -> Set[Text]
+ """Set of flags specified in the file"""
+ rv = set() # type: Set[Text]
+ for item in self.css_flag_nodes:
+ if "content" in item.attrib:
+ for flag in item.attrib["content"].split():
+ rv.add(flag)
+ return rv
+
+ @cached_property
+ def content_is_css_manual(self):
+ # type: () -> Optional[bool]
+ """Boolean indicating whether the file content represents a
+ CSS WG-style manual test"""
+ if self.root is None:
+ return None
+ # return True if the intersection between the two sets is non-empty
+ return bool(self.css_flags & {"animated", "font", "history", "interact", "paged", "speech", "userstyle"})
+
+ @cached_property
+ def spec_link_nodes(self):
+ # type: () -> List[ElementTree.Element]
+ """List of ElementTree Elements corresponding to nodes representing a
+ <link rel=help>, used to point to specs"""
+ if self.root is None:
+ return []
+ return self.root.findall(".//{http://www.w3.org/1999/xhtml}link[@rel='help']")
+
+ @cached_property
+ def spec_links(self):
+ # type: () -> Set[Text]
+ """Set of spec links specified in the file"""
+ rv = set() # type: Set[Text]
+ for item in self.spec_link_nodes:
+ if "href" in item.attrib:
+ rv.add(item.attrib["href"].strip(space_chars))
+ return rv
+
+ @cached_property
+ def content_is_css_visual(self):
+ # type: () -> Optional[bool]
+ """Boolean indicating whether the file content represents a
+ CSS WG-style visual test"""
+ if self.root is None:
+ return None
+ return bool(self.ext in {'.xht', '.html', '.xhtml', '.htm', '.xml', '.svg'} and
+ self.spec_links)
+
+ @property
+ def type(self):
+ # type: () -> Text
+ possible_types = self.possible_types
+ if len(possible_types) == 1:
+ return possible_types.pop()
+
+ rv, _ = self.manifest_items()
+ return rv
+
+ @property
+ def possible_types(self):
+ # type: () -> Set[Text]
+ """Determines the set of possible types without reading the file"""
+
+ if self.items_cache:
+ return {self.items_cache[0]}
+
+ if self.name_is_non_test:
+ return {SupportFile.item_type}
+
+ if self.name_is_manual:
+ return {ManualTest.item_type}
+
+ if self.name_is_conformance:
+ return {ConformanceCheckerTest.item_type}
+
+ if self.name_is_conformance_support:
+ return {SupportFile.item_type}
+
+ if self.name_is_webdriver:
+ return {WebDriverSpecTest.item_type}
+
+ if self.name_is_visual:
+ return {VisualTest.item_type}
+
+ if self.name_is_crashtest:
+ return {CrashTest.item_type}
+
+ if self.name_is_print_reftest:
+ return {PrintRefTest.item_type}
+
+ if self.name_is_multi_global:
+ return {TestharnessTest.item_type}
+
+ if self.name_is_worker:
+ return {TestharnessTest.item_type}
+
+ if self.name_is_window:
+ return {TestharnessTest.item_type}
+
+ if self.markup_type is None:
+ return {SupportFile.item_type}
+
+ if not self.name_is_reference:
+ return {ManualTest.item_type,
+ TestharnessTest.item_type,
+ RefTest.item_type,
+ VisualTest.item_type,
+ SupportFile.item_type}
+
+ return {TestharnessTest.item_type,
+ RefTest.item_type,
+ SupportFile.item_type}
+
+ def manifest_items(self):
+ # type: () -> Tuple[Text, List[ManifestItem]]
+ """List of manifest items corresponding to the file. There is typically one
+ per test, but in the case of reftests a node may have corresponding manifest
+ items without being a test itself."""
+
+ if self.items_cache:
+ return self.items_cache
+
+ drop_cached = "root" not in self.__dict__
+
+ if self.name_is_non_test:
+ rv = "support", [
+ SupportFile(
+ self.tests_root,
+ self.rel_path
+ )] # type: Tuple[Text, List[ManifestItem]]
+
+ elif self.name_is_manual:
+ rv = ManualTest.item_type, [
+ ManualTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ elif self.name_is_conformance:
+ rv = ConformanceCheckerTest.item_type, [
+ ConformanceCheckerTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ elif self.name_is_conformance_support:
+ rv = "support", [
+ SupportFile(
+ self.tests_root,
+ self.rel_path
+ )]
+
+ elif self.name_is_webdriver:
+ rv = WebDriverSpecTest.item_type, [
+ WebDriverSpecTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url,
+ timeout=self.timeout
+ )]
+
+ elif self.name_is_visual:
+ rv = VisualTest.item_type, [
+ VisualTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ elif self.name_is_crashtest:
+ rv = CrashTest.item_type, [
+ CrashTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ elif self.name_is_print_reftest:
+ references = self.references
+ if not references:
+ raise ValueError("%s detected as print reftest but doesn't have any refs" %
+ self.path)
+ rv = PrintRefTest.item_type, [
+ PrintRefTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url,
+ references=references,
+ timeout=self.timeout,
+ viewport_size=self.viewport_size,
+ fuzzy=self.fuzzy,
+ page_ranges=self.page_ranges,
+ )]
+
+ elif self.name_is_multi_global:
+ globals = ""
+ script_metadata = self.script_metadata
+ assert script_metadata is not None
+ for (key, value) in script_metadata:
+ if key == "global":
+ globals = value
+ break
+
+ tests = [
+ TestharnessTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ global_variant_url(self.rel_url, suffix) + variant,
+ timeout=self.timeout,
+ pac=self.pac,
+ jsshell=jsshell,
+ script_metadata=self.script_metadata
+ )
+ for (suffix, jsshell) in sorted(global_suffixes(globals))
+ for variant in self.test_variants
+ ] # type: List[ManifestItem]
+ rv = TestharnessTest.item_type, tests
+
+ elif self.name_is_worker:
+ test_url = replace_end(self.rel_url, ".worker.js", ".worker.html")
+ tests = [
+ TestharnessTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ test_url + variant,
+ timeout=self.timeout,
+ pac=self.pac,
+ script_metadata=self.script_metadata
+ )
+ for variant in self.test_variants
+ ]
+ rv = TestharnessTest.item_type, tests
+
+ elif self.name_is_window:
+ test_url = replace_end(self.rel_url, ".window.js", ".window.html")
+ tests = [
+ TestharnessTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ test_url + variant,
+ timeout=self.timeout,
+ pac=self.pac,
+ script_metadata=self.script_metadata
+ )
+ for variant in self.test_variants
+ ]
+ rv = TestharnessTest.item_type, tests
+
+ elif self.content_is_css_manual and not self.name_is_reference:
+ rv = ManualTest.item_type, [
+ ManualTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ elif self.content_is_testharness:
+ rv = TestharnessTest.item_type, []
+ testdriver = self.has_testdriver
+ for variant in self.test_variants:
+ url = self.rel_url + variant
+ rv[1].append(TestharnessTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ url,
+ timeout=self.timeout,
+ pac=self.pac,
+ testdriver=testdriver,
+ script_metadata=self.script_metadata
+ ))
+
+ elif self.content_is_ref_node:
+ rv = RefTest.item_type, [
+ RefTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url,
+ references=self.references,
+ timeout=self.timeout,
+ viewport_size=self.viewport_size,
+ dpi=self.dpi,
+ fuzzy=self.fuzzy
+ )]
+
+ elif self.content_is_css_visual and not self.name_is_reference:
+ rv = VisualTest.item_type, [
+ VisualTest(
+ self.tests_root,
+ self.rel_path,
+ self.url_base,
+ self.rel_url
+ )]
+
+ else:
+ rv = "support", [
+ SupportFile(
+ self.tests_root,
+ self.rel_path
+ )]
+
+ assert rv[0] in self.possible_types
+ assert len(rv[1]) == len(set(rv[1]))
+
+ self.items_cache = rv
+
+ if drop_cached and "__cached_properties__" in self.__dict__:
+ cached_properties = self.__dict__["__cached_properties__"]
+ for prop in cached_properties:
+ if prop in self.__dict__:
+ del self.__dict__[prop]
+ del self.__dict__["__cached_properties__"]
+
+ return rv
diff --git a/testing/web-platform/tests/tools/manifest/testpaths.py b/testing/web-platform/tests/tools/manifest/testpaths.py
new file mode 100644
index 0000000000..6902f0c063
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/testpaths.py
@@ -0,0 +1,112 @@
+import argparse
+import json
+import os
+from collections import defaultdict
+
+from .manifest import load_and_update, Manifest
+from .log import get_logger
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any
+ from typing import Dict
+ from typing import Iterable
+ from typing import List
+ from typing import Text
+
+wpt_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+
+logger = get_logger()
+
+
+def abs_path(path):
+ # type: (str) -> str
+ return os.path.abspath(os.path.expanduser(path))
+
+
+def create_parser():
+ # type: () -> argparse.ArgumentParser
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-p", "--path", type=abs_path, help="Path to manifest file.")
+ parser.add_argument(
+ "--src-root", type=abs_path, default=None, help="Path to root of sourcetree.")
+ parser.add_argument(
+ "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.")
+ parser.add_argument(
+ "--no-update", dest="update", action="store_false", default=True,
+ help="Don't update manifest before continuing")
+ parser.add_argument(
+ "-r", "--rebuild", action="store_true", default=False,
+ help="Force a full rebuild of the manifest.")
+ parser.add_argument(
+ "--url-base", action="store", default="/",
+ help="Base url to use as the mount point for tests in this manifest.")
+ parser.add_argument(
+ "--cache-root", action="store", default=os.path.join(wpt_root, ".wptcache"),
+ help="Path in which to store any caches (default <tests_root>/.wptcache/)")
+ parser.add_argument(
+ "--json", action="store_true", default=False,
+ help="Output as JSON")
+ parser.add_argument(
+ "test_ids", action="store", nargs="+",
+ help="Test ids for which to get paths")
+ return parser
+
+
+def get_path_id_map(src_root, tests_root, manifest_file, test_ids):
+ # type: (Text, Text, Manifest, Iterable[Text]) -> Dict[Text, List[Text]]
+ test_ids = set(test_ids)
+ path_id_map = defaultdict(list) # type: Dict[Text, List[Text]]
+
+ compute_rel_path = src_root != tests_root
+
+ for item_type, path, tests in manifest_file:
+ for test in tests:
+ if test.id in test_ids:
+ if compute_rel_path:
+ rel_path = os.path.relpath(os.path.join(tests_root, path),
+ src_root)
+ else:
+ rel_path = path
+ path_id_map[rel_path].append(test.id)
+ return path_id_map
+
+
+def get_paths(**kwargs):
+ # type: (**Any) -> Dict[Text, List[Text]]
+ tests_root = kwargs["tests_root"]
+ assert tests_root is not None
+ path = kwargs["path"]
+ if path is None:
+ path = os.path.join(kwargs["tests_root"], "MANIFEST.json")
+ src_root = kwargs["src_root"]
+ if src_root is None:
+ src_root = tests_root
+
+ manifest_file = load_and_update(tests_root,
+ path,
+ kwargs["url_base"],
+ update=kwargs["update"],
+ rebuild=kwargs["rebuild"],
+ cache_root=kwargs["cache_root"])
+
+ return get_path_id_map(src_root, tests_root, manifest_file, kwargs["test_ids"])
+
+
+def write_output(path_id_map, as_json):
+ # type: (Dict[Text, List[Text]], bool) -> None
+ if as_json:
+ print(json.dumps(path_id_map))
+ else:
+ for path, test_ids in sorted(path_id_map.items()):
+ print(path)
+ for test_id in sorted(test_ids):
+ print(" " + test_id)
+
+
+def run(**kwargs):
+ # type: (**Any) -> None
+ path_id_map = get_paths(**kwargs)
+ write_output(path_id_map, as_json=kwargs["json"])
diff --git a/testing/web-platform/tests/tools/manifest/tests/__init__.py b/testing/web-platform/tests/tools/manifest/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/manifest/tests/test_XMLParser.py b/testing/web-platform/tests/tools/manifest/tests/test_XMLParser.py
new file mode 100644
index 0000000000..d2d349d11e
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/test_XMLParser.py
@@ -0,0 +1,56 @@
+# mypy: allow-untyped-defs
+
+from xml.etree.ElementTree import ParseError
+
+import pytest
+
+from ..XMLParser import XMLParser
+
+
+@pytest.mark.parametrize("s", [
+ '<foo>&nbsp;</foo>',
+ '<!DOCTYPE foo><foo>&nbsp;</foo>',
+ '<!DOCTYPE foo PUBLIC "fake" "id"><foo>&nbsp;</foo>',
+ '<!DOCTYPE foo PUBLIC "fake" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo>&nbsp;</foo>',
+ '<!DOCTYPE foo PUBLIC "fake-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo>&nbsp;</foo>'
+])
+def test_undefined_entity(s):
+ with pytest.raises(ParseError):
+ p = XMLParser()
+ p.feed(s)
+ p.close()
+
+
+@pytest.mark.parametrize("s", [
+ '<!DOCTYPE foo PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo>&nbsp;</foo>'
+])
+def test_defined_entity(s):
+ p = XMLParser()
+ p.feed(s)
+ d = p.close()
+ assert d.tag == "foo"
+ assert d.text == "\u00A0"
+
+
+def test_pi():
+ p = XMLParser()
+ p.feed('<foo><?foo bar?></foo>')
+ d = p.close()
+ assert d.tag == "foo"
+ assert len(d) == 0
+
+
+def test_comment():
+ p = XMLParser()
+ p.feed('<foo><!-- data --></foo>')
+ d = p.close()
+ assert d.tag == "foo"
+ assert len(d) == 0
+
+
+def test_unsupported_encoding():
+ p = XMLParser()
+ p.feed("<?xml version='1.0' encoding='Shift-JIS'?><foo>\u3044</foo>".encode("shift-jis"))
+ d = p.close()
+ assert d.tag == "foo"
+ assert d.text == "\u3044"
diff --git a/testing/web-platform/tests/tools/manifest/tests/test_item.py b/testing/web-platform/tests/tools/manifest/tests/test_item.py
new file mode 100644
index 0000000000..7640e9262c
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/test_item.py
@@ -0,0 +1,160 @@
+# mypy: allow-untyped-defs
+
+import inspect
+import json
+
+import pytest
+
+from ..manifest import Manifest
+# Prevent pytest from treating TestharnessTest as a test class
+from ..item import TestharnessTest as HarnessTest
+from ..item import RefTest, item_types
+
+
+@pytest.mark.parametrize("path", [
+ "a.https.c",
+ "a.b.https.c",
+ "a.https.b.c",
+ "a.b.https.c.d",
+ "a.serviceworker.c",
+ "a.b.serviceworker.c",
+ "a.serviceworker.b.c",
+ "a.b.serviceworker.c.d",
+])
+def test_url_https(path):
+ m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path)
+
+ assert m.https is True
+
+
+@pytest.mark.parametrize("path", [
+ "https",
+ "a.https",
+ "a.b.https",
+ "https.a",
+ "https.a.b",
+ "a.bhttps.c",
+ "a.httpsb.c",
+ "serviceworker",
+ "a.serviceworker",
+ "a.b.serviceworker",
+ "serviceworker.a",
+ "serviceworker.a.b",
+ "a.bserviceworker.c",
+ "a.serviceworkerb.c",
+])
+def test_url_not_https(path):
+ m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path)
+
+ assert m.https is False
+
+
+@pytest.mark.parametrize("path", [
+ "a.www.c",
+ "a.b.www.c",
+ "a.www.b.c",
+ "a.b.www.c.d",
+ "a.https.www.c",
+ "a.b.https.www.c",
+ "a.https.www.b.c",
+ "a.b.https.www.c.d",
+])
+def test_url_subdomain(path):
+ m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path)
+
+ assert m.subdomain is True
+
+
+@pytest.mark.parametrize("path", [
+ "www",
+ "a.www",
+ "a.b.www",
+ "www.a",
+ "www.a.b",
+ "a.bwwww.c",
+ "a.wwwwb.c",
+])
+def test_url_not_subdomain(path):
+ m = HarnessTest("/foo", "bar/" + path, "/", "bar/" + path)
+
+ assert m.subdomain is False
+
+
+@pytest.mark.parametrize("fuzzy", [
+ {('/foo/test.html', '/foo/ref.html', '=='): [[1, 1], [200, 200]]},
+ {('/foo/test.html', '/foo/ref.html', '=='): [[0, 1], [100, 200]]},
+ {None: [[0, 1], [100, 200]]},
+ {None: [[1, 1], [200, 200]]},
+])
+def test_reftest_fuzzy(fuzzy):
+ t = RefTest('/',
+ 'foo/test.html',
+ '/',
+ 'foo/test.html',
+ [('/foo/ref.html', '==')],
+ fuzzy=fuzzy)
+ assert fuzzy == t.fuzzy
+
+ json_obj = t.to_json()
+
+ m = Manifest("/", "/")
+ t2 = RefTest.from_json(m, t.path, json_obj)
+ assert fuzzy == t2.fuzzy
+
+ # test the roundtrip case, given tuples become lists
+ roundtrip = json.loads(json.dumps(json_obj))
+ t3 = RefTest.from_json(m, t.path, roundtrip)
+ assert fuzzy == t3.fuzzy
+
+
+@pytest.mark.parametrize("fuzzy", [
+ {('/foo/test.html', '/foo/ref-2.html', '=='): [[0, 1], [100, 200]]},
+ {None: [[1, 1], [200, 200]], ('/foo/test.html', '/foo/ref-2.html', '=='): [[0, 1], [100, 200]]},
+])
+def test_reftest_fuzzy_multi(fuzzy):
+ t = RefTest('/',
+ 'foo/test.html',
+ '/',
+ 'foo/test.html',
+ [('/foo/ref-1.html', '=='), ('/foo/ref-2.html', '==')],
+ fuzzy=fuzzy)
+ assert fuzzy == t.fuzzy
+
+ json_obj = t.to_json()
+
+ m = Manifest("/", "/")
+ t2 = RefTest.from_json(m, t.path, json_obj)
+ assert fuzzy == t2.fuzzy
+
+ # test the roundtrip case, given tuples become lists
+ roundtrip = json.loads(json.dumps(json_obj))
+ t3 = RefTest.from_json(m, t.path, roundtrip)
+ assert fuzzy == t3.fuzzy
+
+
+def test_item_types():
+ for key, value in item_types.items():
+ assert isinstance(key, str)
+ assert not inspect.isabstract(value)
+
+
+def test_wpt_flags():
+ m1 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=www")
+ assert m1.subdomain is True
+ assert m1.https is False
+ assert m1.h2 is False
+
+ m2 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=https")
+ assert m2.subdomain is False
+ assert m2.https is True
+ assert m2.h2 is False
+
+ m3 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=h2")
+ assert m3.subdomain is False
+ assert m3.https is False
+ assert m3.h2 is True
+
+ m4 = HarnessTest("/foo", "bar", "/", "bar" + "?wpt_flags=https&wpt_flags=www")
+ assert m4.subdomain is True
+ assert m4.https is True
+ assert m4.h2 is False
diff --git a/testing/web-platform/tests/tools/manifest/tests/test_manifest.py b/testing/web-platform/tests/tools/manifest/tests/test_manifest.py
new file mode 100644
index 0000000000..a7f3d315f0
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/test_manifest.py
@@ -0,0 +1,310 @@
+# mypy: ignore-errors
+
+import os
+from unittest import mock
+
+import hypothesis as h
+import hypothesis.strategies as hs
+
+from .. import manifest, sourcefile, item, utils
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any
+ from typing import Type
+
+
+def SourceFileWithTest(path, hash, cls, **kwargs):
+ # type: (str, str, Type[item.ManifestItem], **Any) -> sourcefile.SourceFile
+ rel_path_parts = tuple(path.split(os.path.sep))
+ s = mock.Mock(rel_path=path,
+ rel_path_parts=rel_path_parts,
+ hash=hash)
+ if cls == item.SupportFile:
+ test = cls("/foobar", path)
+ else:
+ assert issubclass(cls, item.URLManifestItem)
+ test = cls("/foobar", path, "/", utils.from_os_path(path), **kwargs)
+ s.manifest_items = mock.Mock(return_value=(cls.item_type, [test]))
+ return s # type: ignore
+
+
+def SourceFileWithTests(path, hash, cls, variants):
+ # type: (str, str, Type[item.URLManifestItem], **Any) -> sourcefile.SourceFile
+ rel_path_parts = tuple(path.split(os.path.sep))
+ s = mock.Mock(rel_path=path,
+ rel_path_parts=rel_path_parts,
+ hash=hash)
+ tests = [cls("/foobar", path, "/", item[0], **item[1]) for item in variants]
+ s.manifest_items = mock.Mock(return_value=(cls.item_type, tests))
+ return s # type: ignore
+
+
+def tree_and_sourcefile_mocks(source_files):
+ paths_dict = {}
+ tree = []
+ for source_file, file_hash, updated in source_files:
+ paths_dict[source_file.rel_path] = source_file
+ tree.append([source_file.rel_path, file_hash, updated])
+
+ def MockSourceFile(tests_root, path, url_base, file_hash):
+ return paths_dict[path]
+
+ return tree, MockSourceFile
+
+
+@hs.composite
+def sourcefile_strategy(draw):
+ item_classes = [item.TestharnessTest, item.RefTest, item.PrintRefTest,
+ item.ManualTest, item.WebDriverSpecTest,
+ item.ConformanceCheckerTest, item.SupportFile]
+ cls = draw(hs.sampled_from(item_classes))
+
+ path = "a"
+ rel_path_parts = tuple(path.split(os.path.sep))
+ hash = draw(hs.text(alphabet="0123456789abcdef", min_size=40, max_size=40))
+ s = mock.Mock(rel_path=path,
+ rel_path_parts=rel_path_parts,
+ hash=hash)
+
+ if cls in (item.RefTest, item.PrintRefTest):
+ ref_path = "b"
+ ref_eq = draw(hs.sampled_from(["==", "!="]))
+ test = cls("/foobar", path, "/", utils.from_os_path(path), references=[(utils.from_os_path(ref_path), ref_eq)])
+ elif cls is item.SupportFile:
+ test = cls("/foobar", path)
+ else:
+ test = cls("/foobar", path, "/", "foobar")
+
+ s.manifest_items = mock.Mock(return_value=(cls.item_type, [test]))
+ return s
+
+
+@hs.composite
+def manifest_tree(draw):
+ names = hs.text(alphabet=hs.characters(blacklist_characters="\0/\\:*\"?<>|"), min_size=1)
+ tree = hs.recursive(sourcefile_strategy(),
+ lambda children: hs.dictionaries(names, children, min_size=1),
+ max_leaves=10)
+
+ generated_root = draw(tree)
+ h.assume(isinstance(generated_root, dict))
+
+ reftest_urls = []
+ output = []
+ stack = [((k,), v) for k, v in generated_root.items()]
+ while stack:
+ path, node = stack.pop()
+ if isinstance(node, dict):
+ stack.extend((path + (k,), v) for k, v in node.items())
+ else:
+ rel_path = os.path.sep.join(path)
+ node.rel_path = rel_path
+ node.rel_path_parts = tuple(path)
+ for test_item in node.manifest_items.return_value[1]:
+ test_item.path = rel_path
+ if isinstance(test_item, item.RefTest):
+ if reftest_urls:
+ possible_urls = hs.sampled_from(reftest_urls) | names
+ else:
+ possible_urls = names
+ reference = hs.tuples(hs.sampled_from(["==", "!="]),
+ possible_urls)
+ references = hs.lists(reference, min_size=1, unique=True)
+ test_item.references = draw(references)
+ reftest_urls.append(test_item.url)
+ output.append(node)
+
+ return output
+
+
+@h.given(manifest_tree())
+# FIXME: Workaround for https://github.com/web-platform-tests/wpt/issues/22758
+@h.settings(suppress_health_check=(h.HealthCheck.too_slow,))
+@h.example([SourceFileWithTest("a", "0"*40, item.ConformanceCheckerTest)])
+def test_manifest_to_json(s):
+ m = manifest.Manifest("")
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in s)
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ assert m.update(tree) is True
+
+ json_str = m.to_json()
+ loaded = manifest.Manifest.from_json("/", json_str)
+
+ assert list(loaded) == list(m)
+
+ assert loaded.to_json() == json_str
+
+
+@h.given(manifest_tree())
+# FIXME: Workaround for https://github.com/web-platform-tests/wpt/issues/22758
+@h.settings(suppress_health_check=(h.HealthCheck.too_slow,))
+@h.example([SourceFileWithTest("a", "0"*40, item.TestharnessTest)])
+@h.example([SourceFileWithTest("a", "0"*40, item.RefTest, references=[("/aa", "==")])])
+def test_manifest_idempotent(s):
+ m = manifest.Manifest("")
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in s)
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ assert m.update(tree) is True
+
+ m1 = list(m)
+
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ assert m.update(tree) is False
+
+ assert list(m) == m1
+
+
+def test_manifest_to_json_forwardslash():
+ m = manifest.Manifest("")
+
+ s = SourceFileWithTest("a" + os.path.sep + "b", "0"*40, item.TestharnessTest)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ assert m.update(tree) is True
+
+ assert m.to_json() == {
+ 'version': 8,
+ 'url_base': '/',
+ 'items': {
+ 'testharness': {'a': {'b': [
+ '0000000000000000000000000000000000000000',
+ (None, {})
+ ]}},
+ }
+ }
+
+
+def test_reftest_computation_chain():
+ m = manifest.Manifest("")
+
+ s1 = SourceFileWithTest("test1", "0"*40, item.RefTest, references=[("/test2", "==")])
+ s2 = SourceFileWithTest("test2", "0"*40, item.RefTest, references=[("/test3", "==")])
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True), (s2, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ test1 = s1.manifest_items()[1][0]
+ test2 = s2.manifest_items()[1][0]
+
+ assert list(m) == [("reftest", test1.path, {test1}),
+ ("reftest", test2.path, {test2})]
+
+
+def test_iterpath():
+ m = manifest.Manifest("")
+
+ sources = [SourceFileWithTest("test1", "0"*40, item.RefTest, references=[("/test1-ref", "==")]),
+ SourceFileWithTests("test2", "1"*40, item.TestharnessTest, [("test2-1.html", {}),
+ ("test2-2.html", {})]),
+ SourceFileWithTest("test3", "0"*40, item.TestharnessTest)]
+ tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in sources)
+ assert len(tree) == len(sources)
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ assert {item.url for item in m.iterpath("test2")} == {"/test2-1.html",
+ "/test2-2.html"}
+ assert set(m.iterpath("missing")) == set()
+
+
+def test_no_update():
+ m = manifest.Manifest("")
+
+ s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest)
+ s2 = SourceFileWithTest("test2", "0"*40, item.TestharnessTest)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in [s1, s2])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ test1 = s1.manifest_items()[1][0]
+ test2 = s2.manifest_items()[1][0]
+
+ assert list(m) == [("testharness", test1.path, {test1}),
+ ("testharness", test2.path, {test2})]
+
+ s1_1 = SourceFileWithTest("test1", "1"*40, item.ManualTest)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1_1, None, True), (s2, None, False)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ test1_1 = s1_1.manifest_items()[1][0]
+
+ assert list(m) == [("manual", test1_1.path, {test1_1}),
+ ("testharness", test2.path, {test2})]
+
+
+def test_no_update_delete():
+ m = manifest.Manifest("")
+
+ s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest)
+ s2 = SourceFileWithTest("test2", "0"*40, item.TestharnessTest)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True), (s2, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ test1 = s1.manifest_items()[1][0]
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, False)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ assert list(m) == [("testharness", test1.path, {test1})]
+
+
+def test_update_from_json():
+ m = manifest.Manifest("")
+
+ s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest)
+ s2 = SourceFileWithTest("test2", "0"*40, item.TestharnessTest)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True), (s2, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ json_str = m.to_json()
+ m = manifest.Manifest.from_json("/", json_str)
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+
+ test1 = s1.manifest_items()[1][0]
+
+ assert list(m) == [("testharness", test1.path, {test1})]
+
+
+def test_update_from_json_modified():
+ # Create the original manifest
+ m = manifest.Manifest("")
+ s1 = SourceFileWithTest("test1", "0"*40, item.TestharnessTest)
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s1, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+ json_str = m.to_json()
+
+ # Reload it from JSON
+ m = manifest.Manifest.from_json("/", json_str)
+
+ # Update it with timeout="long"
+ s2 = SourceFileWithTest("test1", "1"*40, item.TestharnessTest, timeout="long", pac="proxy.pac")
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s2, None, True)])
+ with mock.patch("tools.manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+ json_str = m.to_json()
+ assert json_str == {
+ 'items': {'testharness': {'test1': [
+ "1"*40,
+ (None, {'timeout': 'long', 'pac': 'proxy.pac'})
+ ]}},
+ 'url_base': '/',
+ 'version': 8
+ }
diff --git a/testing/web-platform/tests/tools/manifest/tests/test_sourcefile.py b/testing/web-platform/tests/tools/manifest/tests/test_sourcefile.py
new file mode 100644
index 0000000000..c0b281d244
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/test_sourcefile.py
@@ -0,0 +1,911 @@
+# mypy: allow-untyped-defs
+
+import os
+
+import pytest
+
+from io import BytesIO
+from ...lint.lint import check_global_metadata
+from ..sourcefile import SourceFile, read_script_metadata, js_meta_re, python_meta_re
+
+
+def create(filename, contents=b""):
+ assert isinstance(contents, bytes)
+ return SourceFile("/", filename, "/", contents=contents)
+
+
+def items(s):
+ item_type, items = s.manifest_items()
+ if item_type == "support":
+ return []
+ else:
+ return [(item_type, item.url) for item in items]
+
+
+@pytest.mark.parametrize("rel_path", [
+ ".gitignore",
+ ".travis.yml",
+ "MANIFEST.json",
+ "tools/test.html",
+ "resources/test.html",
+ "common/test.html",
+ "support/test.html",
+ "css21/archive/test.html",
+ "conformance-checkers/test.html",
+ "conformance-checkers/README.md",
+ "conformance-checkers/html/Makefile",
+ "conformance-checkers/html/test.html",
+ "foo/tools/test.html",
+ "foo/resources/test.html",
+ "foo/support/test.html",
+ "foo/foo-manual.html.headers",
+ "crashtests/foo.html.ini",
+ "css/common/test.html",
+ "css/CSS2/archive/test.html",
+])
+def test_name_is_non_test(rel_path):
+ s = create(rel_path)
+ assert s.name_is_non_test or s.name_is_conformance_support
+
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+@pytest.mark.parametrize("rel_path", [
+ "foo/common/test.html",
+ "foo/conformance-checkers/test.html",
+ "foo/_certs/test.html",
+ "foo/css21/archive/test.html",
+ "foo/CSS2/archive/test.html",
+ "css/css21/archive/test.html",
+ "foo/test-support.html",
+])
+def test_not_name_is_non_test(rel_path):
+ s = create(rel_path)
+ assert not (s.name_is_non_test or s.name_is_conformance_support)
+ # We aren't actually asserting what type of test these are, just their
+ # name doesn't prohibit them from being tests.
+
+
+@pytest.mark.parametrize("rel_path", [
+ "foo/foo-manual.html",
+ "html/test-manual.html",
+ "html/test-manual.xhtml",
+ "html/test-manual.https.html",
+ "html/test-manual.https.xhtml"
+])
+def test_name_is_manual(rel_path):
+ s = create(rel_path)
+ assert not s.name_is_non_test
+ assert s.name_is_manual
+
+ assert not s.content_is_testharness
+
+ assert items(s) == [("manual", "/" + rel_path)]
+
+
+@pytest.mark.parametrize("rel_path", [
+ "html/test-visual.html",
+ "html/test-visual.xhtml",
+])
+def test_name_is_visual(rel_path):
+ s = create(rel_path)
+ assert not s.name_is_non_test
+ assert s.name_is_visual
+
+ assert not s.content_is_testharness
+
+ assert items(s) == [("visual", "/" + rel_path)]
+
+
+@pytest.mark.parametrize("rel_path", [
+ "css-namespaces-3/reftest/ref-lime-1.xml",
+ "css21/reference/pass_if_box_ahem.html",
+ "css21/csswg-issues/submitted/css2.1/reference/ref-green-box-100x100.xht",
+ "selectors-3/selectors-empty-001-ref.xml",
+ "css21/text/text-indent-wrap-001-notref-block-margin.xht",
+ "css21/text/text-indent-wrap-001-notref-block-margin.xht",
+ "css21/css-e-notation-ref-1.html",
+ "css21/floats/floats-placement-vertical-004-ref2.xht",
+ "css21/box/rtl-linebreak-notref1.xht",
+ "css21/box/rtl-linebreak-notref2.xht",
+ "html/canvas/element/drawing-images-to-the-canvas/drawimage_html_image_5_ref.html",
+ "html/canvas/element/line-styles/lineto_ref.html",
+ "html/rendering/non-replaced-elements/the-fieldset-element-0/ref.html"
+])
+def test_name_is_reference(rel_path):
+ s = create(rel_path)
+ assert not s.name_is_non_test
+ assert s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+def test_name_is_tentative():
+ s = create("css/css-ui/appearance-revert-001.tentative.html")
+ assert s.name_is_tentative
+
+ s = create("css/css-ui/tentative/appearance-revert-001.html")
+ assert s.name_is_tentative
+
+ s = create("css/css-ui/appearance-revert-001.html")
+ assert not s.name_is_tentative
+
+
+@pytest.mark.parametrize("rel_path", [
+ "webdriver/tests/foo.py",
+ "webdriver/tests/print/foo.py",
+ "webdriver/tests/foo-crash.py",
+ "webdriver/tests/foo-visual.py",
+])
+def test_name_is_webdriver(rel_path):
+ s = create(rel_path)
+ assert s.name_is_webdriver
+
+ item_type, items = s.manifest_items()
+ assert item_type == "wdspec"
+
+
+def test_worker():
+ s = create("html/test.worker.js")
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert s.name_is_worker
+ assert not s.name_is_window
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected_urls = [
+ "/html/test.worker.html",
+ ]
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+def test_window():
+ s = create("html/test.window.js")
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert s.name_is_window
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected_urls = [
+ "/html/test.window.html",
+ ]
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+def test_worker_long_timeout():
+ contents = b"""// META: timeout=long
+importScripts('/resources/testharness.js')
+test()"""
+
+ metadata = list(read_script_metadata(BytesIO(contents), js_meta_re))
+ assert metadata == [("timeout", "long")]
+
+ s = create("html/test.worker.js", contents=contents)
+ assert s.name_is_worker
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ for item in items:
+ assert item.timeout == "long"
+
+
+def test_window_long_timeout():
+ contents = b"""// META: timeout=long
+test()"""
+
+ metadata = list(read_script_metadata(BytesIO(contents), js_meta_re))
+ assert metadata == [("timeout", "long")]
+
+ s = create("html/test.window.js", contents=contents)
+ assert s.name_is_window
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ for item in items:
+ assert item.timeout == "long"
+
+
+def test_worker_with_variants():
+ contents = b"""// META: variant=
+// META: variant=?wss
+test()"""
+
+ s = create("html/test.worker.js", contents=contents)
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert s.name_is_worker
+ assert not s.name_is_window
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected_urls = [
+ "/html/test.worker.html" + suffix
+ for suffix in ["", "?wss"]
+ ]
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+def test_window_with_variants():
+ contents = b"""// META: variant=
+// META: variant=?wss
+test()"""
+
+ s = create("html/test.window.js", contents=contents)
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert s.name_is_window
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected_urls = [
+ "/html/test.window.html" + suffix
+ for suffix in ["", "?wss"]
+ ]
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+def test_python_long_timeout():
+ contents = b"""# META: timeout=long
+
+"""
+
+ metadata = list(read_script_metadata(BytesIO(contents),
+ python_meta_re))
+ assert metadata == [("timeout", "long")]
+
+ s = create("webdriver/test.py", contents=contents)
+ assert s.name_is_webdriver
+
+ item_type, items = s.manifest_items()
+ assert item_type == "wdspec"
+
+ for item in items:
+ assert item.timeout == "long"
+
+
+def test_multi_global():
+ s = create("html/test.any.js")
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected_urls = [
+ "/html/test.any.html",
+ "/html/test.any.worker.html",
+ ]
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+def test_multi_global_long_timeout():
+ contents = b"""// META: timeout=long
+importScripts('/resources/testharness.js')
+test()"""
+
+ metadata = list(read_script_metadata(BytesIO(contents), js_meta_re))
+ assert metadata == [("timeout", "long")]
+
+ s = create("html/test.any.js", contents=contents)
+ assert s.name_is_multi_global
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ for item in items:
+ assert item.timeout == "long"
+
+
+@pytest.mark.parametrize("input,expected", [
+ (b"window", {"window"}),
+ (b"sharedworker", {"sharedworker"}),
+ (b"sharedworker,serviceworker", {"serviceworker", "sharedworker"}),
+ (b"worker", {"dedicatedworker", "serviceworker", "sharedworker"}),
+])
+def test_multi_global_with_custom_globals(input, expected):
+ contents = b"""// META: global=%s
+test()""" % input
+
+ assert list(check_global_metadata(input)) == []
+
+ s = create("html/test.any.js", contents=contents)
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ urls = {
+ "dedicatedworker": "/html/test.any.worker.html",
+ "serviceworker": "/html/test.any.serviceworker.html",
+ "sharedworker": "/html/test.any.sharedworker.html",
+ "window": "/html/test.any.html",
+ }
+
+ expected_urls = sorted(urls[ty] for ty in expected)
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.jsshell is False
+ assert item.timeout is None
+
+
+def test_multi_global_with_jsshell_globals():
+ contents = b"""// META: global=window,dedicatedworker,jsshell
+test()"""
+
+ s = create("html/test.any.js", contents=contents)
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ expected = [
+ ("/html/test.any.html", False),
+ ("/html/test.any.js", True),
+ ("/html/test.any.worker.html", False),
+ ]
+ assert len(items) == len(expected)
+
+ for item, (url, jsshell) in zip(items, expected):
+ assert item.url == url
+ assert item.jsshell == jsshell
+ assert item.timeout is None
+
+
+def test_multi_global_with_variants():
+ contents = b"""// META: global=window,worker
+// META: variant=
+// META: variant=?wss
+test()"""
+
+ s = create("html/test.any.js", contents=contents)
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ item_type, items = s.manifest_items()
+ assert item_type == "testharness"
+
+ urls = {
+ "dedicatedworker": "/html/test.any.worker.html",
+ "serviceworker": "/html/test.any.serviceworker.html",
+ "sharedworker": "/html/test.any.sharedworker.html",
+ "window": "/html/test.any.html",
+ }
+
+ expected_urls = sorted(
+ urls[ty] + suffix
+ for ty in ["dedicatedworker", "serviceworker", "sharedworker", "window"]
+ for suffix in ["", "?wss"]
+ )
+ assert len(items) == len(expected_urls)
+
+ for item, url in zip(items, expected_urls):
+ assert item.url == url
+ assert item.timeout is None
+
+
+@pytest.mark.parametrize("input,expected", [
+ (b"""//META: foo=bar\n""", [("foo", "bar")]),
+ (b"""// META: foo=bar\n""", [("foo", "bar")]),
+ (b"""// META: foo=bar\n""", [("foo", "bar")]),
+ (b"""\n// META: foo=bar\n""", []),
+ (b""" // META: foo=bar\n""", []),
+ (b"""// META: foo=bar\n// META: baz=quux\n""", [("foo", "bar"), ("baz", "quux")]),
+ (b"""// META: foo=bar\n\n// META: baz=quux\n""", [("foo", "bar")]),
+ (b"""// META: foo=bar\n// Start of the test\n// META: baz=quux\n""", [("foo", "bar")]),
+ (b"""// META:\n""", []),
+ (b"""// META: foobar\n""", []),
+])
+def test_script_metadata(input, expected):
+ metadata = read_script_metadata(BytesIO(input), js_meta_re)
+ assert list(metadata) == expected
+
+
+@pytest.mark.parametrize("ext", ["htm", "html"])
+def test_testharness(ext):
+ content = b"<script src=/resources/testharness.js></script>"
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert s.content_is_testharness
+
+ assert items(s) == [("testharness", "/" + filename)]
+
+
+@pytest.mark.parametrize("variant", ["", "?foo", "#bar", "?foo#bar"])
+def test_testharness_variant(variant):
+ content = (b"<meta name=variant content=\"%s\">" % variant.encode("utf-8") +
+ b"<meta name=variant content=\"?fixed\">" +
+ b"<script src=/resources/testharness.js></script>")
+
+ filename = "html/test.html"
+ s = create(filename, content)
+
+ s.test_variants = [variant, "?fixed"]
+
+
+@pytest.mark.parametrize("variant", ["?", "#", "?#bar"])
+def test_testharness_variant_invalid(variant):
+ content = (b"<meta name=variant content=\"%s\">" % variant.encode("utf-8") +
+ b"<meta name=variant content=\"?fixed\">" +
+ b"<script src=/resources/testharness.js></script>")
+
+ filename = "html/test.html"
+ s = create(filename, content)
+
+ with pytest.raises(ValueError):
+ s.test_variants
+
+
+@pytest.mark.parametrize("ext", ["htm", "html"])
+def test_relative_testharness(ext):
+ content = b"<script src=../resources/testharness.js></script>"
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+@pytest.mark.parametrize("ext", ["xhtml", "xht", "xml"])
+def test_testharness_xhtml(ext):
+ content = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</head>
+<body/>
+</html>
+"""
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert s.content_is_testharness
+
+ assert items(s) == [("testharness", "/" + filename)]
+
+
+@pytest.mark.parametrize("ext", ["xhtml", "xht", "xml"])
+def test_relative_testharness_xhtml(ext):
+ content = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<script src="../resources/testharness.js"></script>
+<script src="../resources/testharnessreport.js"></script>
+</head>
+<body/>
+</html>
+"""
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+def test_testharness_svg():
+ content = b"""\
+<?xml version="1.0" encoding="UTF-8"?>
+<svg xmlns="http://www.w3.org/2000/svg"
+ xmlns:h="http://www.w3.org/1999/xhtml"
+ version="1.1"
+ width="100%" height="100%" viewBox="0 0 400 400">
+<title>Null test</title>
+<h:script src="/resources/testharness.js"/>
+<h:script src="/resources/testharnessreport.js"/>
+</svg>
+"""
+
+ filename = "html/test.svg"
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert s.root is not None
+ assert s.content_is_testharness
+
+ assert items(s) == [("testharness", "/" + filename)]
+
+
+def test_relative_testharness_svg():
+ content = b"""\
+<?xml version="1.0" encoding="UTF-8"?>
+<svg xmlns="http://www.w3.org/2000/svg"
+ xmlns:h="http://www.w3.org/1999/xhtml"
+ version="1.1"
+ width="100%" height="100%" viewBox="0 0 400 400">
+<title>Null test</title>
+<h:script src="../resources/testharness.js"/>
+<h:script src="../resources/testharnessreport.js"/>
+</svg>
+"""
+
+ filename = "html/test.svg"
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert s.root is not None
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+@pytest.mark.parametrize("filename", ["test", "test.test"])
+def test_testharness_ext(filename):
+ content = b"<script src=/resources/testharness.js></script>"
+
+ s = create("html/" + filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+
+ assert not s.root
+ assert not s.content_is_testharness
+
+ assert items(s) == []
+
+
+@pytest.mark.parametrize("ext", ["htm", "html"])
+def test_testdriver(ext):
+ content = b"<script src=/resources/testdriver.js></script>"
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert s.has_testdriver
+
+
+@pytest.mark.parametrize("ext", ["htm", "html"])
+def test_relative_testdriver(ext):
+ content = b"<script src=../resources/testdriver.js></script>"
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.has_testdriver
+
+
+@pytest.mark.parametrize("ext", ["htm", "html"])
+def test_reftest(ext):
+ content = b"<link rel=match href=ref.html>"
+
+ filename = "foo/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+ assert not s.content_is_testharness
+
+ assert s.content_is_ref_node
+
+ assert items(s) == [("reftest", "/" + filename)]
+
+
+@pytest.mark.parametrize("ext", ["xht", "html", "xhtml", "htm", "xml", "svg"])
+def test_css_visual(ext):
+ content = b"""
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<link rel="help" href="http://www.w3.org/TR/CSS21/box.html#bidi-box-model"/>
+</head>
+<body></body>
+</html>
+"""
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+ assert not s.content_is_testharness
+ assert not s.content_is_ref_node
+
+ assert s.content_is_css_visual
+
+ assert items(s) == [("visual", "/" + filename)]
+
+
+@pytest.mark.parametrize("ext", ["xht", "xhtml", "xml"])
+def test_xhtml_with_entity(ext):
+ content = b"""
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+&nbsp;
+</html>
+"""
+
+ filename = "html/test." + ext
+ s = create(filename, content)
+
+ assert s.root is not None
+
+ assert items(s) == []
+
+
+def test_no_parse():
+ s = create("foo/bar.xml", "\uFFFF".encode("utf-8"))
+
+ assert not s.name_is_non_test
+ assert not s.name_is_manual
+ assert not s.name_is_visual
+ assert not s.name_is_multi_global
+ assert not s.name_is_worker
+ assert not s.name_is_reference
+ assert not s.content_is_testharness
+ assert not s.content_is_ref_node
+ assert not s.content_is_css_visual
+
+ assert items(s) == []
+
+
+@pytest.mark.parametrize("input,expected", [
+ ("aA", "aA"),
+ ("a/b", "a/b" if os.name != "nt" else "a\\b"),
+ ("a\\b", "a\\b")
+])
+def test_relpath_normalized(input, expected):
+ s = create(input, b"")
+ assert s.rel_path == expected
+
+
+@pytest.mark.parametrize("url", [b"ref.html",
+ b"\x20ref.html",
+ b"ref.html\x20",
+ b"\x09\x0a\x0c\x0d\x20ref.html\x09\x0a\x0c\x0d\x20"])
+def test_reftest_url_whitespace(url):
+ content = b"<link rel=match href='%s'>" % url
+ s = create("foo/test.html", content)
+ assert s.references == [("/foo/ref.html", "==")]
+
+
+@pytest.mark.parametrize("url", [b"http://example.com/",
+ b"\x20http://example.com/",
+ b"http://example.com/\x20",
+ b"\x09\x0a\x0c\x0d\x20http://example.com/\x09\x0a\x0c\x0d\x20"])
+def test_spec_links_whitespace(url):
+ content = b"<link rel=help href='%s'>" % url
+ s = create("foo/test.html", content)
+ assert s.spec_links == {"http://example.com/"}
+
+
+def test_url_base():
+ contents = b"""// META: global=window,worker
+// META: variant=
+// META: variant=?wss
+test()"""
+
+ s = SourceFile("/", "html/test.any.js", "/_fake_base/", contents=contents)
+ item_type, items = s.manifest_items()
+
+ assert item_type == "testharness"
+
+ assert [item.url for item in items] == ['/_fake_base/html/test.any.html',
+ '/_fake_base/html/test.any.html?wss',
+ '/_fake_base/html/test.any.serviceworker.html',
+ '/_fake_base/html/test.any.serviceworker.html?wss',
+ '/_fake_base/html/test.any.sharedworker.html',
+ '/_fake_base/html/test.any.sharedworker.html?wss',
+ '/_fake_base/html/test.any.worker.html',
+ '/_fake_base/html/test.any.worker.html?wss']
+
+ assert items[0].url_base == "/_fake_base/"
+
+
+@pytest.mark.parametrize("fuzzy, expected", [
+ (b"ref.html:1;200", {("/foo/test.html", "/foo/ref.html", "=="): [[1, 1], [200, 200]]}),
+ (b"ref.html:0-1;100-200", {("/foo/test.html", "/foo/ref.html", "=="): [[0, 1], [100, 200]]}),
+ (b"0-1;100-200", {None: [[0,1], [100, 200]]}),
+ (b"maxDifference=1;totalPixels=200", {None: [[1, 1], [200, 200]]}),
+ (b"totalPixels=200;maxDifference=1", {None: [[1, 1], [200, 200]]}),
+ (b"totalPixels=200;1", {None: [[1, 1], [200, 200]]}),
+ (b"maxDifference=1;200", {None: [[1, 1], [200, 200]]}),])
+def test_reftest_fuzzy(fuzzy, expected):
+ content = b"""<link rel=match href=ref.html>
+<meta name=fuzzy content="%s">
+""" % fuzzy
+
+ s = create("foo/test.html", content)
+
+ assert s.content_is_ref_node
+ assert s.fuzzy == expected
+
+@pytest.mark.parametrize("fuzzy, expected", [
+ ([b"1;200"], {None: [[1, 1], [200, 200]]}),
+ ([b"ref-2.html:0-1;100-200"], {("/foo/test.html", "/foo/ref-2.html", "=="): [[0, 1], [100, 200]]}),
+ ([b"1;200", b"ref-2.html:0-1;100-200"],
+ {None: [[1, 1], [200, 200]],
+ ("/foo/test.html", "/foo/ref-2.html", "=="): [[0,1], [100, 200]]})])
+def test_reftest_fuzzy_multi(fuzzy, expected):
+ content = b"""<link rel=match href=ref-1.html>
+<link rel=match href=ref-2.html>
+"""
+ for item in fuzzy:
+ content += b'\n<meta name=fuzzy content="%s">' % item
+
+ s = create("foo/test.html", content)
+
+ assert s.content_is_ref_node
+ assert s.fuzzy == expected
+
+@pytest.mark.parametrize("pac, expected", [
+ (b"proxy.pac", "proxy.pac")])
+def test_pac(pac, expected):
+ content = b"""
+<meta name=pac content="%s">
+""" % pac
+
+ s = create("foo/test.html", content)
+ assert s.pac == expected
+
+@pytest.mark.parametrize("page_ranges, expected", [
+ (b"1-2", [[1, 2]]),
+ (b"1-1,3-4", [[1, 1], [3, 4]]),
+ (b"1,3", [[1], [3]]),
+ (b"2-", [[2, None]]),
+ (b"-2", [[None, 2]]),
+ (b"-2,2-", [[None, 2], [2, None]]),
+ (b"1,6-7,8", [[1], [6, 7], [8]])])
+def test_page_ranges(page_ranges, expected):
+ content = b"""<link rel=match href=ref.html>
+<meta name=reftest-pages content="%s">
+""" % page_ranges
+
+ s = create("foo/test-print.html", content)
+
+ assert s.page_ranges == {"/foo/test-print.html": expected}
+
+
+@pytest.mark.parametrize("page_ranges", [b"a", b"1-a", b"1=2", b"1-2:2-3"])
+def test_page_ranges_invalid(page_ranges):
+ content = b"""<link rel=match href=ref.html>
+<meta name=reftest-pages content="%s">
+""" % page_ranges
+
+ s = create("foo/test-print.html", content)
+ with pytest.raises(ValueError):
+ s.page_ranges
+
+
+def test_hash():
+ s = SourceFile("/", "foo", "/", contents=b"Hello, World!")
+ assert "b45ef6fec89518d314f546fd6c3025367b721684" == s.hash
diff --git a/testing/web-platform/tests/tools/manifest/tests/test_utils.py b/testing/web-platform/tests/tools/manifest/tests/test_utils.py
new file mode 100644
index 0000000000..e8cf1ad689
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/tests/test_utils.py
@@ -0,0 +1,15 @@
+# mypy: allow-untyped-defs
+
+import os
+import subprocess
+from unittest import mock
+
+from .. import utils
+
+
+def test_git_for_path_no_git():
+ this_dir = os.path.dirname(__file__)
+ with mock.patch(
+ "subprocess.check_output",
+ side_effect=subprocess.CalledProcessError(1, "foo")):
+ assert utils.git(this_dir) is None
diff --git a/testing/web-platform/tests/tools/manifest/typedata.py b/testing/web-platform/tests/tools/manifest/typedata.py
new file mode 100644
index 0000000000..4061c9e610
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/typedata.py
@@ -0,0 +1,336 @@
+from collections.abc import MutableMapping
+
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any
+ from typing import Dict
+ from typing import Iterator
+ from typing import List
+ from typing import Optional
+ from typing import Set
+ from typing import Text
+ from typing import Tuple
+ from typing import Type
+ from typing import Union
+
+ # avoid actually importing these, they're only used by type comments
+ from . import item
+ from . import manifest
+
+
+if MYPY:
+ TypeDataType = MutableMapping[Tuple[str, ...], Set[item.ManifestItem]]
+ PathHashType = MutableMapping[Tuple[str, ...], str]
+else:
+ TypeDataType = MutableMapping
+ PathHashType = MutableMapping
+
+
+class TypeData(TypeDataType):
+ def __init__(self, m, type_cls):
+ # type: (manifest.Manifest, Type[item.ManifestItem]) -> None
+ """Dict-like object containing the TestItems for each test type.
+
+ Loading an actual Item class for each test is unnecessarily
+ slow, so this class allows lazy-loading of the test
+ items. When the manifest is loaded we store the raw json
+ corresponding to the test type, and only create an Item
+ subclass when the test is accessed. In order to remain
+ API-compatible with consumers that depend on getting an Item
+ from iteration, we do egerly load all items when iterating
+ over the class."""
+ self._manifest = m
+ self._type_cls = type_cls # type: Type[item.ManifestItem]
+ self._json_data = {} # type: Dict[Text, Any]
+ self._data = {} # type: Dict[Text, Any]
+ self._hashes = {} # type: Dict[Tuple[Text, ...], Text]
+ self.hashes = PathHash(self)
+
+ def _delete_node(self, data, key):
+ # type: (Dict[Text, Any], Tuple[Text, ...]) -> None
+ """delete a path from a Dict data with a given key"""
+ path = []
+ node = data
+ for pathseg in key[:-1]:
+ path.append((node, pathseg))
+ node = node[pathseg]
+ if not isinstance(node, dict):
+ raise KeyError(key)
+
+ del node[key[-1]]
+ while path:
+ node, pathseg = path.pop()
+ if len(node[pathseg]) == 0:
+ del node[pathseg]
+ else:
+ break
+
+ def __getitem__(self, key):
+ # type: (Tuple[Text, ...]) -> Set[item.ManifestItem]
+ node = self._data # type: Union[Dict[Text, Any], Set[item.ManifestItem], List[Any]]
+ for pathseg in key:
+ if isinstance(node, dict) and pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ if isinstance(node, set):
+ return node
+ else:
+ raise KeyError(key)
+
+ node = self._json_data
+ found = False
+ for pathseg in key:
+ if isinstance(node, dict) and pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ found = True
+
+ if not found:
+ raise KeyError(key)
+
+ if not isinstance(node, list):
+ raise KeyError(key)
+
+ self._hashes[key] = node[0]
+
+ data = set()
+ path = "/".join(key)
+ for test in node[1:]:
+ manifest_item = self._type_cls.from_json(self._manifest, path, test)
+ data.add(manifest_item)
+
+ node = self._data
+ assert isinstance(node, dict)
+ for pathseg in key[:-1]:
+ node = node.setdefault(pathseg, {})
+ assert isinstance(node, dict)
+ assert key[-1] not in node
+ node[key[-1]] = data
+
+ self._delete_node(self._json_data, key)
+
+ return data
+
+ def __setitem__(self, key, value):
+ # type: (Tuple[Text, ...], Set[item.ManifestItem]) -> None
+ try:
+ self._delete_node(self._json_data, key)
+ except KeyError:
+ pass
+
+ node = self._data
+ for i, pathseg in enumerate(key[:-1]):
+ node = node.setdefault(pathseg, {})
+ if not isinstance(node, dict):
+ raise KeyError(f"{key!r} is a child of a test ({key[:i+1]!r})")
+ node[key[-1]] = value
+
+ def __delitem__(self, key):
+ # type: (Tuple[Text, ...]) -> None
+ try:
+ self._delete_node(self._data, key)
+ except KeyError:
+ self._delete_node(self._json_data, key)
+ else:
+ try:
+ del self._hashes[key]
+ except KeyError:
+ pass
+
+ def __iter__(self):
+ # type: () -> Iterator[Tuple[Text, ...]]
+ """Iterator over keys in the TypeData in codepoint order"""
+ data_node = self._data # type: Optional[Union[Dict[Text, Any], Set[item.ManifestItem]]]
+ json_node = self._json_data # type: Optional[Union[Dict[Text, Any], List[Any]]]
+ path = tuple() # type: Tuple[Text, ...]
+ stack = [(data_node, json_node, path)]
+ while stack:
+ data_node, json_node, path = stack.pop()
+ if isinstance(data_node, set) or isinstance(json_node, list):
+ assert data_node is None or json_node is None
+ yield path
+ else:
+ assert data_node is None or isinstance(data_node, dict)
+ assert json_node is None or isinstance(json_node, dict)
+
+ keys = set() # type: Set[Text]
+ if data_node is not None:
+ keys |= set(iter(data_node))
+ if json_node is not None:
+ keys |= set(iter(json_node))
+
+ for key in sorted(keys, reverse=True):
+ stack.append((data_node.get(key) if data_node is not None else None,
+ json_node.get(key) if json_node is not None else None,
+ path + (key,)))
+
+ def __len__(self):
+ # type: () -> int
+ count = 0
+
+ stack = [self._data] # type: List[Union[Dict[Text, Any], Set[item.ManifestItem]]]
+ while stack:
+ v = stack.pop()
+ if isinstance(v, set):
+ count += 1
+ else:
+ stack.extend(v.values())
+
+ json_stack = [self._json_data] # type: List[Union[Dict[Text, Any], List[Any]]]
+ while json_stack:
+ json_v = json_stack.pop()
+ if isinstance(json_v, list):
+ count += 1
+ else:
+ json_stack.extend(json_v.values())
+
+ return count
+
+ def __nonzero__(self):
+ # type: () -> bool
+ return bool(self._data) or bool(self._json_data)
+
+ __bool__ = __nonzero__
+
+ def __contains__(self, key):
+ # type: (Any) -> bool
+ # we provide our own impl of this to avoid calling __getitem__ and generating items for
+ # those in self._json_data
+ node = self._data
+ for pathseg in key:
+ if pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ return bool(isinstance(node, set))
+
+ node = self._json_data
+ for pathseg in key:
+ if pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ return bool(isinstance(node, list))
+
+ return False
+
+ def clear(self):
+ # type: () -> None
+ # much, much simpler/quicker than that defined in MutableMapping
+ self._json_data.clear()
+ self._data.clear()
+ self._hashes.clear()
+
+ def set_json(self, json_data):
+ # type: (Dict[Text, Any]) -> None
+ """Provide the object with a raw JSON blob
+
+ Note that this object graph is assumed to be owned by the TypeData
+ object after the call, so the caller must not mutate any part of the
+ graph.
+ """
+ if self._json_data:
+ raise ValueError("set_json call when JSON data is not empty")
+
+ self._json_data = json_data
+
+ def to_json(self):
+ # type: () -> Dict[Text, Any]
+ """Convert the current data to JSON
+
+ Note that the returned object may contain references to the internal
+ data structures, and is only guaranteed to be valid until the next
+ __getitem__, __setitem__, __delitem__ call, so the caller must not
+ mutate any part of the returned object graph.
+
+ """
+ json_rv = self._json_data.copy()
+
+ def safe_sorter(element):
+ # type: (Tuple[str,str]) -> Tuple[str,str]
+ """key function to sort lists with None values."""
+ if element and not element[0]:
+ return ("", element[1])
+ else:
+ return element
+
+ stack = [(self._data, json_rv, tuple())] # type: List[Tuple[Dict[Text, Any], Dict[Text, Any], Tuple[Text, ...]]]
+ while stack:
+ data_node, json_node, par_full_key = stack.pop()
+ for k, v in data_node.items():
+ full_key = par_full_key + (k,)
+ if isinstance(v, set):
+ assert k not in json_node
+ json_node[k] = [self._hashes.get(
+ full_key)] + [t for t in sorted((test.to_json() for test in v), key=safe_sorter)]
+ else:
+ json_node[k] = json_node.get(k, {}).copy()
+ stack.append((v, json_node[k], full_key))
+
+ return json_rv
+
+
+class PathHash(PathHashType):
+ def __init__(self, data):
+ # type: (TypeData) -> None
+ self._data = data
+
+ def __getitem__(self, k):
+ # type: (Tuple[Text, ...]) -> Text
+ if k not in self._data:
+ raise KeyError
+
+ if k in self._data._hashes:
+ return self._data._hashes[k]
+
+ node = self._data._json_data
+ for pathseg in k:
+ if pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ return node[0] # type: ignore
+
+ assert False, "unreachable"
+ raise KeyError
+
+ def __setitem__(self, k, v):
+ # type: (Tuple[Text, ...], Text) -> None
+ if k not in self._data:
+ raise KeyError
+
+ if k in self._data._hashes:
+ self._data._hashes[k] = v
+
+ node = self._data._json_data
+ for pathseg in k:
+ if pathseg in node:
+ node = node[pathseg]
+ else:
+ break
+ else:
+ node[0] = v # type: ignore
+ return
+
+ self._data._hashes[k] = v
+
+ def __delitem__(self, k):
+ # type: (Tuple[Text, ...]) -> None
+ raise ValueError("keys here must match underlying data")
+
+ def __iter__(self):
+ # type: () -> Iterator[Tuple[Text, ...]]
+ return iter(self._data)
+
+ def __len__(self):
+ # type: () -> int
+ return len(self._data)
diff --git a/testing/web-platform/tests/tools/manifest/update.py b/testing/web-platform/tests/tools/manifest/update.py
new file mode 100755
index 0000000000..d7ef2082eb
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/update.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python3
+import argparse
+import os
+
+from . import manifest
+from . import vcs
+from .log import get_logger, enable_debug_logging
+from .download import download_from_github
+
+here = os.path.dirname(__file__)
+
+wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
+
+logger = get_logger()
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any
+ from typing import Optional
+ from .manifest import Manifest # avoid cyclic import
+
+
+def update(tests_root, # type: str
+ manifest, # type: Manifest
+ manifest_path=None, # type: Optional[str]
+ working_copy=True, # type: bool
+ cache_root=None, # type: Optional[str]
+ rebuild=False, # type: bool
+ parallel=True # type: bool
+ ):
+ # type: (...) -> bool
+ logger.warning("Deprecated; use manifest.load_and_update instead")
+ logger.info("Updating manifest")
+
+ tree = vcs.get_tree(tests_root, manifest, manifest_path, cache_root,
+ working_copy, rebuild)
+ return manifest.update(tree, parallel)
+
+
+def update_from_cli(**kwargs):
+ # type: (**Any) -> None
+ tests_root = kwargs["tests_root"]
+ path = kwargs["path"]
+ assert tests_root is not None
+
+ if not kwargs["rebuild"] and kwargs["download"]:
+ download_from_github(path, tests_root)
+
+ manifest.load_and_update(tests_root,
+ path,
+ kwargs["url_base"],
+ update=True,
+ rebuild=kwargs["rebuild"],
+ cache_root=kwargs["cache_root"],
+ parallel=kwargs["parallel"])
+
+
+def abs_path(path):
+ # type: (str) -> str
+ return os.path.abspath(os.path.expanduser(path))
+
+
+def create_parser():
+ # type: () -> argparse.ArgumentParser
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-v", "--verbose", dest="verbose", action="store_true", default=False,
+ help="Turn on verbose logging")
+ parser.add_argument(
+ "-p", "--path", type=abs_path, help="Path to manifest file.")
+ parser.add_argument(
+ "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.")
+ parser.add_argument(
+ "-r", "--rebuild", action="store_true", default=False,
+ help="Force a full rebuild of the manifest.")
+ parser.add_argument(
+ "--url-base", action="store", default="/",
+ help="Base url to use as the mount point for tests in this manifest.")
+ parser.add_argument(
+ "--no-download", dest="download", action="store_false", default=True,
+ help="Never attempt to download the manifest.")
+ parser.add_argument(
+ "--cache-root", action="store", default=os.path.join(wpt_root, ".wptcache"),
+ help="Path in which to store any caches (default <tests_root>/.wptcache/)")
+ parser.add_argument(
+ "--no-parallel", dest="parallel", action="store_false", default=True,
+ help="Do not parallelize building the manifest")
+ return parser
+
+
+def run(*args, **kwargs):
+ # type: (*Any, **Any) -> None
+ if kwargs["path"] is None:
+ kwargs["path"] = os.path.join(kwargs["tests_root"], "MANIFEST.json")
+ if kwargs["verbose"]:
+ enable_debug_logging()
+ update_from_cli(**kwargs)
+
+
+def main():
+ # type: () -> None
+ opts = create_parser().parse_args()
+
+ run(**vars(opts))
diff --git a/testing/web-platform/tests/tools/manifest/utils.py b/testing/web-platform/tests/tools/manifest/utils.py
new file mode 100644
index 0000000000..59ddb66378
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/utils.py
@@ -0,0 +1,93 @@
+import os
+import subprocess
+import sys
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Text
+ from typing import Callable
+ from typing import Any
+ from typing import Generic
+ from typing import TypeVar
+ from typing import Optional
+ T = TypeVar("T")
+else:
+ # eww, eww, ewwww
+ Generic = {}
+ T = object()
+ Generic[T] = object
+
+
+def rel_path_to_url(rel_path, url_base="/"):
+ # type: (Text, Text) -> Text
+ assert not os.path.isabs(rel_path), rel_path
+ if url_base[0] != "/":
+ url_base = "/" + url_base
+ if url_base[-1] != "/":
+ url_base += "/"
+ return url_base + rel_path.replace(os.sep, "/")
+
+
+def from_os_path(path):
+ # type: (Text) -> Text
+ assert os.path.sep == "/" or sys.platform == "win32"
+ if "/" == os.path.sep:
+ rv = path
+ else:
+ rv = path.replace(os.path.sep, "/")
+ if "\\" in rv:
+ raise ValueError("path contains \\ when separator is %s" % os.path.sep)
+ return rv
+
+
+def to_os_path(path):
+ # type: (Text) -> Text
+ assert os.path.sep == "/" or sys.platform == "win32"
+ if "\\" in path:
+ raise ValueError("normalised path contains \\")
+ if "/" == os.path.sep:
+ return path
+ return path.replace("/", os.path.sep)
+
+
+def git(path):
+ # type: (Text) -> Optional[Callable[..., Text]]
+ def gitfunc(cmd, *args):
+ # type: (Text, *Text) -> Text
+ full_cmd = ["git", cmd] + list(args)
+ try:
+ return subprocess.check_output(full_cmd, cwd=path, stderr=subprocess.STDOUT).decode('utf8')
+ except Exception as e:
+ if sys.platform == "win32" and isinstance(e, WindowsError):
+ full_cmd[0] = "git.bat"
+ return subprocess.check_output(full_cmd, cwd=path, stderr=subprocess.STDOUT).decode('utf8')
+ else:
+ raise
+
+ try:
+ # this needs to be a command that fails if we aren't in a git repo
+ gitfunc("rev-parse", "--show-toplevel")
+ except (subprocess.CalledProcessError, OSError):
+ return None
+ else:
+ return gitfunc
+
+
+class cached_property(Generic[T]):
+ def __init__(self, func):
+ # type: (Callable[[Any], T]) -> None
+ self.func = func
+ self.__doc__ = getattr(func, "__doc__")
+ self.name = func.__name__
+
+ def __get__(self, obj, cls=None):
+ # type: (Any, Optional[type]) -> T
+ if obj is None:
+ return self # type: ignore
+
+ # we can unconditionally assign as next time this won't be called
+ assert self.name not in obj.__dict__
+ rv = obj.__dict__[self.name] = self.func(obj)
+ obj.__dict__.setdefault("__cached_properties__", set()).add(self.name)
+ return rv
diff --git a/testing/web-platform/tests/tools/manifest/vcs.py b/testing/web-platform/tests/tools/manifest/vcs.py
new file mode 100644
index 0000000000..ec59f42a31
--- /dev/null
+++ b/testing/web-platform/tests/tools/manifest/vcs.py
@@ -0,0 +1,319 @@
+import abc
+import os
+import stat
+from collections import deque
+from collections.abc import MutableMapping
+
+from . import jsonlib
+from .utils import git
+
+# Cannot do `from ..gitignore import gitignore` because
+# relative import beyond toplevel throws *ImportError*!
+from gitignore import gitignore # type: ignore
+
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Dict, Optional, List, Set, Text, Iterable, Any, Tuple, Iterator
+ from .manifest import Manifest # cyclic import under MYPY guard
+ stat_result = os.stat_result
+
+ GitIgnoreCacheType = MutableMapping[bytes, bool]
+else:
+ GitIgnoreCacheType = MutableMapping
+
+
+def get_tree(tests_root, manifest, manifest_path, cache_root,
+ working_copy=True, rebuild=False):
+ # type: (Text, Manifest, Optional[Text], Optional[Text], bool, bool) -> FileSystem
+ tree = None
+ if cache_root is None:
+ cache_root = os.path.join(tests_root, ".wptcache")
+ if not os.path.exists(cache_root):
+ try:
+ os.makedirs(cache_root)
+ except OSError:
+ cache_root = None
+
+ if not working_copy:
+ raise ValueError("working_copy=False unsupported")
+
+ if tree is None:
+ tree = FileSystem(tests_root,
+ manifest.url_base,
+ manifest_path=manifest_path,
+ cache_path=cache_root,
+ rebuild=rebuild)
+ return tree
+
+
+class GitHasher:
+ def __init__(self, path):
+ # type: (Text) -> None
+ self.git = git(path)
+
+ def _local_changes(self):
+ # type: () -> Set[Text]
+ """get a set of files which have changed between HEAD and working copy"""
+ assert self.git is not None
+ # note that git runs the command with tests_root as the cwd, which may
+ # not be the root of the git repo (e.g., within a browser repo)
+ cmd = ["diff-index", "--relative", "--no-renames", "--name-only", "-z", "HEAD"]
+ data = self.git(*cmd)
+ return set(data.split("\0"))
+
+ def hash_cache(self):
+ # type: () -> Dict[Text, Optional[Text]]
+ """
+ A dict of rel_path -> current git object id if the working tree matches HEAD else None
+ """
+ hash_cache = {} # type: Dict[Text, Optional[Text]]
+
+ if self.git is None:
+ return hash_cache
+
+ # note that git runs the command with tests_root as the cwd, which may
+ # not be the root of the git repo (e.g., within a browser repo)
+ cmd = ["ls-tree", "-r", "-z", "HEAD"]
+ local_changes = self._local_changes()
+ for result in self.git(*cmd).split("\0")[:-1]: # type: Text
+ data, rel_path = result.rsplit("\t", 1)
+ hash_cache[rel_path] = None if rel_path in local_changes else data.split(" ", 3)[2]
+
+ return hash_cache
+
+
+
+class FileSystem:
+ def __init__(self, tests_root, url_base, cache_path, manifest_path=None, rebuild=False):
+ # type: (Text, Text, Optional[Text], Optional[Text], bool) -> None
+ self.tests_root = tests_root
+ self.url_base = url_base
+ self.ignore_cache = None
+ self.mtime_cache = None
+ tests_root_bytes = tests_root.encode("utf8")
+ if cache_path is not None:
+ if manifest_path is not None:
+ self.mtime_cache = MtimeCache(cache_path, tests_root, manifest_path, rebuild)
+ if gitignore.has_ignore(tests_root_bytes):
+ self.ignore_cache = GitIgnoreCache(cache_path, tests_root, rebuild)
+ self.path_filter = gitignore.PathFilter(tests_root_bytes,
+ extras=[b".git/"],
+ cache=self.ignore_cache)
+ git = GitHasher(tests_root)
+ self.hash_cache = git.hash_cache()
+
+ def __iter__(self):
+ # type: () -> Iterator[Tuple[Text, Optional[Text], bool]]
+ mtime_cache = self.mtime_cache
+ for dirpath, dirnames, filenames in self.path_filter(
+ walk(self.tests_root.encode("utf8"))):
+ for filename, path_stat in filenames:
+ path = os.path.join(dirpath, filename).decode("utf8")
+ if mtime_cache is None or mtime_cache.updated(path, path_stat):
+ file_hash = self.hash_cache.get(path, None)
+ yield path, file_hash, True
+ else:
+ yield path, None, False
+
+ def dump_caches(self):
+ # type: () -> None
+ for cache in [self.mtime_cache, self.ignore_cache]:
+ if cache is not None:
+ cache.dump()
+
+
+class CacheFile(metaclass=abc.ABCMeta):
+ def __init__(self, cache_root, tests_root, rebuild=False):
+ # type: (Text, Text, bool) -> None
+ self.tests_root = tests_root
+ if not os.path.exists(cache_root):
+ os.makedirs(cache_root)
+ self.path = os.path.join(cache_root, self.file_name)
+ self.modified = False
+ self.data = self.load(rebuild)
+
+ @abc.abstractproperty
+ def file_name(self):
+ # type: () -> Text
+ pass
+
+ def dump(self):
+ # type: () -> None
+ if not self.modified:
+ return
+ with open(self.path, 'w') as f:
+ jsonlib.dump_local(self.data, f)
+
+ def load(self, rebuild=False):
+ # type: (bool) -> Dict[Text, Any]
+ data = {} # type: Dict[Text, Any]
+ try:
+ if not rebuild:
+ with open(self.path) as f:
+ try:
+ data = jsonlib.load(f)
+ except ValueError:
+ pass
+ data = self.check_valid(data)
+ except OSError:
+ pass
+ return data
+
+ def check_valid(self, data):
+ # type: (Dict[Text, Any]) -> Dict[Text, Any]
+ """Check if the cached data is valid and return an updated copy of the
+ cache containing only data that can be used."""
+ return data
+
+
+class MtimeCache(CacheFile):
+ file_name = "mtime.json"
+
+ def __init__(self, cache_root, tests_root, manifest_path, rebuild=False):
+ # type: (Text, Text, Text, bool) -> None
+ self.manifest_path = manifest_path
+ super().__init__(cache_root, tests_root, rebuild)
+
+ def updated(self, rel_path, stat):
+ # type: (Text, stat_result) -> bool
+ """Return a boolean indicating whether the file changed since the cache was last updated.
+
+ This implicitly updates the cache with the new mtime data."""
+ mtime = stat.st_mtime
+ if mtime != self.data.get(rel_path):
+ self.modified = True
+ self.data[rel_path] = mtime
+ return True
+ return False
+
+ def check_valid(self, data):
+ # type: (Dict[Any, Any]) -> Dict[Any, Any]
+ if data.get("/tests_root") != self.tests_root:
+ self.modified = True
+ else:
+ if self.manifest_path is not None and os.path.exists(self.manifest_path):
+ mtime = os.path.getmtime(self.manifest_path)
+ if data.get("/manifest_path") != [self.manifest_path, mtime]:
+ self.modified = True
+ else:
+ self.modified = True
+ if self.modified:
+ data = {}
+ data["/tests_root"] = self.tests_root
+ return data
+
+ def dump(self):
+ # type: () -> None
+ if self.manifest_path is None:
+ raise ValueError
+ if not os.path.exists(self.manifest_path):
+ return
+ mtime = os.path.getmtime(self.manifest_path)
+ self.data["/manifest_path"] = [self.manifest_path, mtime]
+ self.data["/tests_root"] = self.tests_root
+ super().dump()
+
+
+class GitIgnoreCache(CacheFile, GitIgnoreCacheType):
+ file_name = "gitignore2.json"
+
+ def check_valid(self, data):
+ # type: (Dict[Any, Any]) -> Dict[Any, Any]
+ ignore_path = os.path.join(self.tests_root, ".gitignore")
+ mtime = os.path.getmtime(ignore_path)
+ if data.get("/gitignore_file") != [ignore_path, mtime]:
+ self.modified = True
+ data = {}
+ data["/gitignore_file"] = [ignore_path, mtime]
+ return data
+
+ def __contains__(self, key):
+ # type: (Any) -> bool
+ try:
+ key = key.decode("utf-8")
+ except Exception:
+ return False
+
+ return key in self.data
+
+ def __getitem__(self, key):
+ # type: (bytes) -> bool
+ real_key = key.decode("utf-8")
+ v = self.data[real_key]
+ assert isinstance(v, bool)
+ return v
+
+ def __setitem__(self, key, value):
+ # type: (bytes, bool) -> None
+ real_key = key.decode("utf-8")
+ if self.data.get(real_key) != value:
+ self.modified = True
+ self.data[real_key] = value
+
+ def __delitem__(self, key):
+ # type: (bytes) -> None
+ real_key = key.decode("utf-8")
+ del self.data[real_key]
+
+ def __iter__(self):
+ # type: () -> Iterator[bytes]
+ return (key.encode("utf-8") for key in self.data)
+
+ def __len__(self):
+ # type: () -> int
+ return len(self.data)
+
+
+def walk(root):
+ # type: (bytes) -> Iterable[Tuple[bytes, List[Tuple[bytes, stat_result]], List[Tuple[bytes, stat_result]]]]
+ """Re-implementation of os.walk. Returns an iterator over
+ (dirpath, dirnames, filenames), with some semantic differences
+ to os.walk.
+
+ This has a similar interface to os.walk, with the important difference
+ that instead of lists of filenames and directory names, it yields
+ lists of tuples of the form [(name, stat)] where stat is the result of
+ os.stat for the file. That allows reusing the same stat data in the
+ caller. It also always returns the dirpath relative to the root, with
+ the root iself being returned as the empty string.
+
+ Unlike os.walk the implementation is not recursive."""
+
+ get_stat = os.stat
+ is_dir = stat.S_ISDIR
+ is_link = stat.S_ISLNK
+ join = os.path.join
+ listdir = os.listdir
+ relpath = os.path.relpath
+
+ root = os.path.abspath(root)
+ stack = deque([(root, b"")])
+
+ while stack:
+ dir_path, rel_path = stack.popleft()
+ try:
+ # Note that listdir and error are globals in this module due
+ # to earlier import-*.
+ names = listdir(dir_path)
+ except OSError:
+ continue
+
+ dirs, non_dirs = [], []
+ for name in names:
+ path = join(dir_path, name)
+ try:
+ path_stat = get_stat(path)
+ except OSError:
+ continue
+ if is_dir(path_stat.st_mode):
+ dirs.append((name, path_stat))
+ else:
+ non_dirs.append((name, path_stat))
+
+ yield rel_path, dirs, non_dirs
+ for name, path_stat in dirs:
+ new_path = join(dir_path, name)
+ if not is_link(path_stat.st_mode):
+ stack.append((new_path, relpath(new_path, root)))
diff --git a/testing/web-platform/tests/tools/mypy.ini b/testing/web-platform/tests/tools/mypy.ini
new file mode 100644
index 0000000000..fb9b5c81c2
--- /dev/null
+++ b/testing/web-platform/tests/tools/mypy.ini
@@ -0,0 +1,114 @@
+[mypy]
+# Here some paths are excluded from even being parsed, working around cases of
+# invalid syntax we can't fix (yet) or where the code isn't in a package and
+# there's no good place to add __init__.py files. The following are excluded:
+# - tools/third_party/ (vendored dependencies)
+# - All setup.py files (avoiding duplicate module named "setup")
+# - tools/wptserve/docs/conf.py (generated code)
+# - tools/wptserve/tests/ (deliberately invalid syntax)
+exclude = (^tools/third_party/|/setup\.py$|^tools/wptserve/docs/conf.py|^tools/wptserve/tests/)
+#check_untyped_defs = True
+disallow_any_generics = True
+disallow_incomplete_defs = True
+disallow_subclassing_any = True
+disallow_untyped_calls = True
+disallow_untyped_decorators = True
+disallow_untyped_defs = True
+no_implicit_optional = True
+strict_equality = True
+warn_redundant_casts = True
+warn_return_any = True
+warn_unused_configs = True
+warn_unused_ignores = True
+warn_unreachable = True
+
+show_error_codes = True
+
+# Ignore missing or untyped libraries.
+
+[mypy-github.*]
+ignore_missing_imports = True
+
+[mypy-h2.*]
+ignore_missing_imports = True
+
+[mypy-hpack.*]
+ignore_missing_imports = True
+
+[mypy-html5lib.*]
+ignore_missing_imports = True
+
+[mypy-hyperframe.*]
+ignore_missing_imports = True
+
+[mypy-hypothesis.*]
+ignore_missing_imports = True
+
+[mypy-marionette_driver.*]
+ignore_missing_imports = True
+
+[mypy-mod_pywebsocket.*]
+ignore_missing_imports = True
+
+[mypy-mozcrash.*]
+ignore_missing_imports = True
+
+[mypy-mozdebug.*]
+ignore_missing_imports = True
+
+[mypy-mozdevice.*]
+ignore_missing_imports = True
+
+[mypy-mozinfo.*]
+ignore_missing_imports = True
+
+[mypy-mozinstall.*]
+ignore_missing_imports = True
+
+[mypy-mozleak.*]
+ignore_missing_imports = True
+
+[mypy-mozlog.*]
+ignore_missing_imports = True
+
+[mypy-moznetwork.*]
+ignore_missing_imports = True
+
+[mypy-mozprocess.*]
+ignore_missing_imports = True
+
+[mypy-mozprofile.*]
+ignore_missing_imports = True
+
+[mypy-mozrunner.*]
+ignore_missing_imports = True
+
+[mypy-mozversion.*]
+ignore_missing_imports = True
+
+[mypy-PIL.*]
+ignore_missing_imports = True
+
+[mypy-packaging.*]
+ignore_missing_imports = True
+
+[mypy-psutil.*]
+ignore_missing_imports = True
+
+[mypy-pytest.*]
+ignore_missing_imports = True
+
+[mypy-selenium.*]
+ignore_missing_imports = True
+
+[mypy-taskcluster.*]
+ignore_missing_imports = True
+
+[mypy-ua_parser.*]
+ignore_missing_imports = True
+
+[mypy-websockets.*]
+ignore_missing_imports = True
+
+[mypy-zstandard.*]
+ignore_missing_imports = True
diff --git a/testing/web-platform/tests/tools/pytest.ini b/testing/web-platform/tests/tools/pytest.ini
new file mode 100644
index 0000000000..c1e428d071
--- /dev/null
+++ b/testing/web-platform/tests/tools/pytest.ini
@@ -0,0 +1,9 @@
+[pytest]
+# Directories with their own tox.ini: wave, wpt, wptrunner
+# Python 3 only: quic (it should have its own tox.ini eventually)
+norecursedirs = .* {arch} *.egg third_party wave wpt wptrunner quic
+xfail_strict = true
+addopts = --strict-markers
+markers =
+ slow: marks tests as slow (deselect with '-m "not slow"')
+ remote_network
diff --git a/testing/web-platform/tests/tools/requirements_flake8.txt b/testing/web-platform/tests/tools/requirements_flake8.txt
new file mode 100644
index 0000000000..93cacd4384
--- /dev/null
+++ b/testing/web-platform/tests/tools/requirements_flake8.txt
@@ -0,0 +1,4 @@
+flake8==4.0.1
+pycodestyle==2.8.0
+pyflakes==2.4.0
+pep8-naming==0.13.0
diff --git a/testing/web-platform/tests/tools/requirements_mypy.txt b/testing/web-platform/tests/tools/requirements_mypy.txt
new file mode 100644
index 0000000000..7948c20358
--- /dev/null
+++ b/testing/web-platform/tests/tools/requirements_mypy.txt
@@ -0,0 +1,12 @@
+mypy==0.961
+mypy-extensions==0.4.3
+toml==0.10.2
+typed-ast==1.5.4
+types-atomicwrites==1.4.5
+types-python-dateutil==2.8.19
+types-PyYAML==6.0.9
+types-requests==2.28.11.5
+types-setuptools==63.2.3
+types-six==1.16.19
+types-ujson==4.2.1
+typing-extensions==4.1.1
diff --git a/testing/web-platform/tests/tools/requirements_pytest.txt b/testing/web-platform/tests/tools/requirements_pytest.txt
new file mode 100644
index 0000000000..14fd0f7692
--- /dev/null
+++ b/testing/web-platform/tests/tools/requirements_pytest.txt
@@ -0,0 +1,3 @@
+pytest==7.0.1
+pytest-cov==3.0.0
+hypothesis==6.31.3
diff --git a/testing/web-platform/tests/tools/requirements_tests.txt b/testing/web-platform/tests/tools/requirements_tests.txt
new file mode 100644
index 0000000000..4e1e357d28
--- /dev/null
+++ b/testing/web-platform/tests/tools/requirements_tests.txt
@@ -0,0 +1,5 @@
+httpx[http2]==0.22.0
+json-e==4.4.3
+jsonschema==3.2.0
+pyyaml==6.0
+taskcluster==44.22.1
diff --git a/testing/web-platform/tests/tools/runner/css/bootstrap-theme.min.css b/testing/web-platform/tests/tools/runner/css/bootstrap-theme.min.css
new file mode 100644
index 0000000000..61358b13d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/css/bootstrap-theme.min.css
@@ -0,0 +1,5 @@
+/*!
+ * Bootstrap v3.3.5 (http://getbootstrap.com)
+ * Copyright 2011-2015 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ */.btn-danger,.btn-default,.btn-info,.btn-primary,.btn-success,.btn-warning{text-shadow:0 -1px 0 rgba(0,0,0,.2);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-danger.active,.btn-danger:active,.btn-default.active,.btn-default:active,.btn-info.active,.btn-info:active,.btn-primary.active,.btn-primary:active,.btn-success.active,.btn-success:active,.btn-warning.active,.btn-warning:active{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-danger.disabled,.btn-danger[disabled],.btn-default.disabled,.btn-default[disabled],.btn-info.disabled,.btn-info[disabled],.btn-primary.disabled,.btn-primary[disabled],.btn-success.disabled,.btn-success[disabled],.btn-warning.disabled,.btn-warning[disabled],fieldset[disabled] .btn-danger,fieldset[disabled] .btn-default,fieldset[disabled] .btn-info,fieldset[disabled] .btn-primary,fieldset[disabled] .btn-success,fieldset[disabled] .btn-warning{-webkit-box-shadow:none;box-shadow:none}.btn-danger .badge,.btn-default .badge,.btn-info .badge,.btn-primary .badge,.btn-success .badge,.btn-warning .badge{text-shadow:none}.btn.active,.btn:active{background-image:none}.btn-default{text-shadow:0 1px 0 #fff;background-image:-webkit-linear-gradient(top,#fff 0,#e0e0e0 100%);background-image:-o-linear-gradient(top,#fff 0,#e0e0e0 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#fff),to(#e0e0e0));background-image:linear-gradient(to bottom,#fff 0,#e0e0e0 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#ffe0e0e0', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);background-repeat:repeat-x;border-color:#dbdbdb;border-color:#ccc}.btn-default:focus,.btn-default:hover{background-color:#e0e0e0;background-position:0 -15px}.btn-default.active,.btn-default:active{background-color:#e0e0e0;border-color:#dbdbdb}.btn-default.disabled,.btn-default.disabled.active,.btn-default.disabled.focus,.btn-default.disabled:active,.btn-default.disabled:focus,.btn-default.disabled:hover,.btn-default[disabled],.btn-default[disabled].active,.btn-default[disabled].focus,.btn-default[disabled]:active,.btn-default[disabled]:focus,.btn-default[disabled]:hover,fieldset[disabled] .btn-default,fieldset[disabled] .btn-default.active,fieldset[disabled] .btn-default.focus,fieldset[disabled] .btn-default:active,fieldset[disabled] .btn-default:focus,fieldset[disabled] .btn-default:hover{background-color:#e0e0e0;background-image:none}.btn-primary{background-image:-webkit-linear-gradient(top,#337ab7 0,#265a88 100%);background-image:-o-linear-gradient(top,#337ab7 0,#265a88 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#337ab7),to(#265a88));background-image:linear-gradient(to bottom,#337ab7 0,#265a88 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff265a88', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);background-repeat:repeat-x;border-color:#245580}.btn-primary:focus,.btn-primary:hover{background-color:#265a88;background-position:0 -15px}.btn-primary.active,.btn-primary:active{background-color:#265a88;border-color:#245580}.btn-primary.disabled,.btn-primary.disabled.active,.btn-primary.disabled.focus,.btn-primary.disabled:active,.btn-primary.disabled:focus,.btn-primary.disabled:hover,.btn-primary[disabled],.btn-primary[disabled].active,.btn-primary[disabled].focus,.btn-primary[disabled]:active,.btn-primary[disabled]:focus,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary,fieldset[disabled] .btn-primary.active,fieldset[disabled] .btn-primary.focus,fieldset[disabled] .btn-primary:active,fieldset[disabled] .btn-primary:focus,fieldset[disabled] .btn-primary:hover{background-color:#265a88;background-image:none}.btn-success{background-image:-webkit-linear-gradient(top,#5cb85c 0,#419641 100%);background-image:-o-linear-gradient(top,#5cb85c 0,#419641 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#5cb85c),to(#419641));background-image:linear-gradient(to bottom,#5cb85c 0,#419641 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff419641', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);background-repeat:repeat-x;border-color:#3e8f3e}.btn-success:focus,.btn-success:hover{background-color:#419641;background-position:0 -15px}.btn-success.active,.btn-success:active{background-color:#419641;border-color:#3e8f3e}.btn-success.disabled,.btn-success.disabled.active,.btn-success.disabled.focus,.btn-success.disabled:active,.btn-success.disabled:focus,.btn-success.disabled:hover,.btn-success[disabled],.btn-success[disabled].active,.btn-success[disabled].focus,.btn-success[disabled]:active,.btn-success[disabled]:focus,.btn-success[disabled]:hover,fieldset[disabled] .btn-success,fieldset[disabled] .btn-success.active,fieldset[disabled] .btn-success.focus,fieldset[disabled] .btn-success:active,fieldset[disabled] .btn-success:focus,fieldset[disabled] .btn-success:hover{background-color:#419641;background-image:none}.btn-info{background-image:-webkit-linear-gradient(top,#5bc0de 0,#2aabd2 100%);background-image:-o-linear-gradient(top,#5bc0de 0,#2aabd2 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#5bc0de),to(#2aabd2));background-image:linear-gradient(to bottom,#5bc0de 0,#2aabd2 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff2aabd2', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);background-repeat:repeat-x;border-color:#28a4c9}.btn-info:focus,.btn-info:hover{background-color:#2aabd2;background-position:0 -15px}.btn-info.active,.btn-info:active{background-color:#2aabd2;border-color:#28a4c9}.btn-info.disabled,.btn-info.disabled.active,.btn-info.disabled.focus,.btn-info.disabled:active,.btn-info.disabled:focus,.btn-info.disabled:hover,.btn-info[disabled],.btn-info[disabled].active,.btn-info[disabled].focus,.btn-info[disabled]:active,.btn-info[disabled]:focus,.btn-info[disabled]:hover,fieldset[disabled] .btn-info,fieldset[disabled] .btn-info.active,fieldset[disabled] .btn-info.focus,fieldset[disabled] .btn-info:active,fieldset[disabled] .btn-info:focus,fieldset[disabled] .btn-info:hover{background-color:#2aabd2;background-image:none}.btn-warning{background-image:-webkit-linear-gradient(top,#f0ad4e 0,#eb9316 100%);background-image:-o-linear-gradient(top,#f0ad4e 0,#eb9316 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#f0ad4e),to(#eb9316));background-image:linear-gradient(to bottom,#f0ad4e 0,#eb9316 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffeb9316', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);background-repeat:repeat-x;border-color:#e38d13}.btn-warning:focus,.btn-warning:hover{background-color:#eb9316;background-position:0 -15px}.btn-warning.active,.btn-warning:active{background-color:#eb9316;border-color:#e38d13}.btn-warning.disabled,.btn-warning.disabled.active,.btn-warning.disabled.focus,.btn-warning.disabled:active,.btn-warning.disabled:focus,.btn-warning.disabled:hover,.btn-warning[disabled],.btn-warning[disabled].active,.btn-warning[disabled].focus,.btn-warning[disabled]:active,.btn-warning[disabled]:focus,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning,fieldset[disabled] .btn-warning.active,fieldset[disabled] .btn-warning.focus,fieldset[disabled] .btn-warning:active,fieldset[disabled] .btn-warning:focus,fieldset[disabled] .btn-warning:hover{background-color:#eb9316;background-image:none}.btn-danger{background-image:-webkit-linear-gradient(top,#d9534f 0,#c12e2a 100%);background-image:-o-linear-gradient(top,#d9534f 0,#c12e2a 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#d9534f),to(#c12e2a));background-image:linear-gradient(to bottom,#d9534f 0,#c12e2a 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc12e2a', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);background-repeat:repeat-x;border-color:#b92c28}.btn-danger:focus,.btn-danger:hover{background-color:#c12e2a;background-position:0 -15px}.btn-danger.active,.btn-danger:active{background-color:#c12e2a;border-color:#b92c28}.btn-danger.disabled,.btn-danger.disabled.active,.btn-danger.disabled.focus,.btn-danger.disabled:active,.btn-danger.disabled:focus,.btn-danger.disabled:hover,.btn-danger[disabled],.btn-danger[disabled].active,.btn-danger[disabled].focus,.btn-danger[disabled]:active,.btn-danger[disabled]:focus,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger,fieldset[disabled] .btn-danger.active,fieldset[disabled] .btn-danger.focus,fieldset[disabled] .btn-danger:active,fieldset[disabled] .btn-danger:focus,fieldset[disabled] .btn-danger:hover{background-color:#c12e2a;background-image:none}.img-thumbnail,.thumbnail{-webkit-box-shadow:0 1px 2px rgba(0,0,0,.075);box-shadow:0 1px 2px rgba(0,0,0,.075)}.dropdown-menu>li>a:focus,.dropdown-menu>li>a:hover{background-color:#e8e8e8;background-image:-webkit-linear-gradient(top,#f5f5f5 0,#e8e8e8 100%);background-image:-o-linear-gradient(top,#f5f5f5 0,#e8e8e8 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#f5f5f5),to(#e8e8e8));background-image:linear-gradient(to bottom,#f5f5f5 0,#e8e8e8 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0);background-repeat:repeat-x}.dropdown-menu>.active>a,.dropdown-menu>.active>a:focus,.dropdown-menu>.active>a:hover{background-color:#2e6da4;background-image:-webkit-linear-gradient(top,#337ab7 0,#2e6da4 100%);background-image:-o-linear-gradient(top,#337ab7 0,#2e6da4 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#337ab7),to(#2e6da4));background-image:linear-gradient(to bottom,#337ab7 0,#2e6da4 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff2e6da4', GradientType=0);background-repeat:repeat-x}.navbar-default{background-image:-webkit-linear-gradient(top,#fff 0,#f8f8f8 100%);background-image:-o-linear-gradient(top,#fff 0,#f8f8f8 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#fff),to(#f8f8f8));background-image:linear-gradient(to bottom,#fff 0,#f8f8f8 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#fff8f8f8', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);background-repeat:repeat-x;border-radius:4px;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 5px rgba(0,0,0,.075);box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 5px rgba(0,0,0,.075)}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.open>a{background-image:-webkit-linear-gradient(top,#dbdbdb 0,#e2e2e2 100%);background-image:-o-linear-gradient(top,#dbdbdb 0,#e2e2e2 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#dbdbdb),to(#e2e2e2));background-image:linear-gradient(to bottom,#dbdbdb 0,#e2e2e2 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdbdbdb', endColorstr='#ffe2e2e2', GradientType=0);background-repeat:repeat-x;-webkit-box-shadow:inset 0 3px 9px rgba(0,0,0,.075);box-shadow:inset 0 3px 9px rgba(0,0,0,.075)}.navbar-brand,.navbar-nav>li>a{text-shadow:0 1px 0 rgba(255,255,255,.25)}.navbar-inverse{background-image:-webkit-linear-gradient(top,#3c3c3c 0,#222 100%);background-image:-o-linear-gradient(top,#3c3c3c 0,#222 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#3c3c3c),to(#222));background-image:linear-gradient(to bottom,#3c3c3c 0,#222 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff3c3c3c', endColorstr='#ff222222', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);background-repeat:repeat-x;border-radius:4px}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.open>a{background-image:-webkit-linear-gradient(top,#080808 0,#0f0f0f 100%);background-image:-o-linear-gradient(top,#080808 0,#0f0f0f 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#080808),to(#0f0f0f));background-image:linear-gradient(to bottom,#080808 0,#0f0f0f 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff080808', endColorstr='#ff0f0f0f', GradientType=0);background-repeat:repeat-x;-webkit-box-shadow:inset 0 3px 9px rgba(0,0,0,.25);box-shadow:inset 0 3px 9px rgba(0,0,0,.25)}.navbar-inverse .navbar-brand,.navbar-inverse .navbar-nav>li>a{text-shadow:0 -1px 0 rgba(0,0,0,.25)}.navbar-fixed-bottom,.navbar-fixed-top,.navbar-static-top{border-radius:0}@media (max-width:767px){.navbar .navbar-nav .open .dropdown-menu>.active>a,.navbar .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar .navbar-nav .open .dropdown-menu>.active>a:hover{color:#fff;background-image:-webkit-linear-gradient(top,#337ab7 0,#2e6da4 100%);background-image:-o-linear-gradient(top,#337ab7 0,#2e6da4 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#337ab7),to(#2e6da4));background-image:linear-gradient(to bottom,#337ab7 0,#2e6da4 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff2e6da4', GradientType=0);background-repeat:repeat-x}}.alert{text-shadow:0 1px 0 rgba(255,255,255,.2);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.25),0 1px 2px rgba(0,0,0,.05);box-shadow:inset 0 1px 0 rgba(255,255,255,.25),0 1px 2px rgba(0,0,0,.05)}.alert-success{background-image:-webkit-linear-gradient(top,#dff0d8 0,#c8e5bc 100%);background-image:-o-linear-gradient(top,#dff0d8 0,#c8e5bc 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#dff0d8),to(#c8e5bc));background-image:linear-gradient(to bottom,#dff0d8 0,#c8e5bc 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffc8e5bc', GradientType=0);background-repeat:repeat-x;border-color:#b2dba1}.alert-info{background-image:-webkit-linear-gradient(top,#d9edf7 0,#b9def0 100%);background-image:-o-linear-gradient(top,#d9edf7 0,#b9def0 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#d9edf7),to(#b9def0));background-image:linear-gradient(to bottom,#d9edf7 0,#b9def0 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffb9def0', GradientType=0);background-repeat:repeat-x;border-color:#9acfea}.alert-warning{background-image:-webkit-linear-gradient(top,#fcf8e3 0,#f8efc0 100%);background-image:-o-linear-gradient(top,#fcf8e3 0,#f8efc0 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#fcf8e3),to(#f8efc0));background-image:linear-gradient(to bottom,#fcf8e3 0,#f8efc0 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fff8efc0', GradientType=0);background-repeat:repeat-x;border-color:#f5e79e}.alert-danger{background-image:-webkit-linear-gradient(top,#f2dede 0,#e7c3c3 100%);background-image:-o-linear-gradient(top,#f2dede 0,#e7c3c3 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#f2dede),to(#e7c3c3));background-image:linear-gradient(to bottom,#f2dede 0,#e7c3c3 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffe7c3c3', GradientType=0);background-repeat:repeat-x;border-color:#dca7a7}.progress{background-image:-webkit-linear-gradient(top,#ebebeb 0,#f5f5f5 100%);background-image:-o-linear-gradient(top,#ebebeb 0,#f5f5f5 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#ebebeb),to(#f5f5f5));background-image:linear-gradient(to bottom,#ebebeb 0,#f5f5f5 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffebebeb', endColorstr='#fff5f5f5', GradientType=0);background-repeat:repeat-x}.progress-bar{background-image:-webkit-linear-gradient(top,#337ab7 0,#286090 100%);background-image:-o-linear-gradient(top,#337ab7 0,#286090 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#337ab7),to(#286090));background-image:linear-gradient(to bottom,#337ab7 0,#286090 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff286090', GradientType=0);background-repeat:repeat-x}.progress-bar-success{background-image:-webkit-linear-gradient(top,#5cb85c 0,#449d44 100%);background-image:-o-linear-gradient(top,#5cb85c 0,#449d44 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#5cb85c),to(#449d44));background-image:linear-gradient(to bottom,#5cb85c 0,#449d44 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff449d44', GradientType=0);background-repeat:repeat-x}.progress-bar-info{background-image:-webkit-linear-gradient(top,#5bc0de 0,#31b0d5 100%);background-image:-o-linear-gradient(top,#5bc0de 0,#31b0d5 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#5bc0de),to(#31b0d5));background-image:linear-gradient(to bottom,#5bc0de 0,#31b0d5 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff31b0d5', GradientType=0);background-repeat:repeat-x}.progress-bar-warning{background-image:-webkit-linear-gradient(top,#f0ad4e 0,#ec971f 100%);background-image:-o-linear-gradient(top,#f0ad4e 0,#ec971f 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#f0ad4e),to(#ec971f));background-image:linear-gradient(to bottom,#f0ad4e 0,#ec971f 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffec971f', GradientType=0);background-repeat:repeat-x}.progress-bar-danger{background-image:-webkit-linear-gradient(top,#d9534f 0,#c9302c 100%);background-image:-o-linear-gradient(top,#d9534f 0,#c9302c 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#d9534f),to(#c9302c));background-image:linear-gradient(to bottom,#d9534f 0,#c9302c 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc9302c', GradientType=0);background-repeat:repeat-x}.progress-bar-striped{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.list-group{border-radius:4px;-webkit-box-shadow:0 1px 2px rgba(0,0,0,.075);box-shadow:0 1px 2px rgba(0,0,0,.075)}.list-group-item.active,.list-group-item.active:focus,.list-group-item.active:hover{text-shadow:0 -1px 0 #286090;background-image:-webkit-linear-gradient(top,#337ab7 0,#2b669a 100%);background-image:-o-linear-gradient(top,#337ab7 0,#2b669a 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#337ab7),to(#2b669a));background-image:linear-gradient(to bottom,#337ab7 0,#2b669a 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff2b669a', GradientType=0);background-repeat:repeat-x;border-color:#2b669a}.list-group-item.active .badge,.list-group-item.active:focus .badge,.list-group-item.active:hover .badge{text-shadow:none}.panel{-webkit-box-shadow:0 1px 2px rgba(0,0,0,.05);box-shadow:0 1px 2px rgba(0,0,0,.05)}.panel-default>.panel-heading{background-image:-webkit-linear-gradient(top,#f5f5f5 0,#e8e8e8 100%);background-image:-o-linear-gradient(top,#f5f5f5 0,#e8e8e8 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#f5f5f5),to(#e8e8e8));background-image:linear-gradient(to bottom,#f5f5f5 0,#e8e8e8 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0);background-repeat:repeat-x}.panel-primary>.panel-heading{background-image:-webkit-linear-gradient(top,#337ab7 0,#2e6da4 100%);background-image:-o-linear-gradient(top,#337ab7 0,#2e6da4 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#337ab7),to(#2e6da4));background-image:linear-gradient(to bottom,#337ab7 0,#2e6da4 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff2e6da4', GradientType=0);background-repeat:repeat-x}.panel-success>.panel-heading{background-image:-webkit-linear-gradient(top,#dff0d8 0,#d0e9c6 100%);background-image:-o-linear-gradient(top,#dff0d8 0,#d0e9c6 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#dff0d8),to(#d0e9c6));background-image:linear-gradient(to bottom,#dff0d8 0,#d0e9c6 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffd0e9c6', GradientType=0);background-repeat:repeat-x}.panel-info>.panel-heading{background-image:-webkit-linear-gradient(top,#d9edf7 0,#c4e3f3 100%);background-image:-o-linear-gradient(top,#d9edf7 0,#c4e3f3 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#d9edf7),to(#c4e3f3));background-image:linear-gradient(to bottom,#d9edf7 0,#c4e3f3 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffc4e3f3', GradientType=0);background-repeat:repeat-x}.panel-warning>.panel-heading{background-image:-webkit-linear-gradient(top,#fcf8e3 0,#faf2cc 100%);background-image:-o-linear-gradient(top,#fcf8e3 0,#faf2cc 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#fcf8e3),to(#faf2cc));background-image:linear-gradient(to bottom,#fcf8e3 0,#faf2cc 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fffaf2cc', GradientType=0);background-repeat:repeat-x}.panel-danger>.panel-heading{background-image:-webkit-linear-gradient(top,#f2dede 0,#ebcccc 100%);background-image:-o-linear-gradient(top,#f2dede 0,#ebcccc 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#f2dede),to(#ebcccc));background-image:linear-gradient(to bottom,#f2dede 0,#ebcccc 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffebcccc', GradientType=0);background-repeat:repeat-x}.well{background-image:-webkit-linear-gradient(top,#e8e8e8 0,#f5f5f5 100%);background-image:-o-linear-gradient(top,#e8e8e8 0,#f5f5f5 100%);background-image:-webkit-gradient(linear,left top,left bottom,from(#e8e8e8),to(#f5f5f5));background-image:linear-gradient(to bottom,#e8e8e8 0,#f5f5f5 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffe8e8e8', endColorstr='#fff5f5f5', GradientType=0);background-repeat:repeat-x;border-color:#dcdcdc;-webkit-box-shadow:inset 0 1px 3px rgba(0,0,0,.05),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 3px rgba(0,0,0,.05),0 1px 0 rgba(255,255,255,.1)} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/runner/css/bootstrap.min.css b/testing/web-platform/tests/tools/runner/css/bootstrap.min.css
new file mode 100644
index 0000000000..d65c66b1ba
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/css/bootstrap.min.css
@@ -0,0 +1,5 @@
+/*!
+ * Bootstrap v3.3.5 (http://getbootstrap.com)
+ * Copyright 2011-2015 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */html{font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{margin:.67em 0;font-size:2em}mark{color:#000;background:#ff0}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{height:0;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;font-size:1em}button,input,optgroup,select,textarea{margin:0;font:inherit;color:inherit}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}input{line-height:normal}input[type=checkbox],input[type=radio]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{padding:.35em .625em .75em;margin:0 2px;border:1px solid silver}legend{padding:0;border:0}textarea{overflow:auto}optgroup{font-weight:700}table{border-spacing:0;border-collapse:collapse}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,:after,:before{color:#000!important;text-shadow:none!important;background:0 0!important;-webkit-box-shadow:none!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}blockquote,pre{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered td,.table-bordered th{border:1px solid #ddd!important}}@font-face{font-family:'Glyphicons Halflings';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format('embedded-opentype'),url(../fonts/glyphicons-halflings-regular.woff2) format('woff2'),url(../fonts/glyphicons-halflings-regular.woff) format('woff'),url(../fonts/glyphicons-halflings-regular.ttf) format('truetype'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\2a"}.glyphicon-plus:before{content:"\2b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before{content:"\e227"}.glyphicon-btc:before{content:"\e227"}.glyphicon-xbt:before{content:"\e227"}.glyphicon-yen:before{content:"\00a5"}.glyphicon-jpy:before{content:"\00a5"}.glyphicon-ruble:before{content:"\20bd"}.glyphicon-rub:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}:after,:before{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#333;background-color:#fff}button,input,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#337ab7;text-decoration:none}a:focus,a:hover{color:#23527c;text-decoration:underline}a:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.carousel-inner>.item>a>img,.carousel-inner>.item>img,.img-responsive,.thumbnail a>img,.thumbnail>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{display:inline-block;max-width:100%;height:auto;padding:4px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role=button]{cursor:pointer}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-weight:400;line-height:1;color:#777}.h1,.h2,.h3,h1,h2,h3{margin-top:20px;margin-bottom:10px}.h1 .small,.h1 small,.h2 .small,.h2 small,.h3 .small,.h3 small,h1 .small,h1 small,h2 .small,h2 small,h3 .small,h3 small{font-size:65%}.h4,.h5,.h6,h4,h5,h6{margin-top:10px;margin-bottom:10px}.h4 .small,.h4 small,.h5 .small,.h5 small,.h6 .small,.h6 small,h4 .small,h4 small,h5 .small,h5 small,h6 .small,h6 small{font-size:75%}.h1,h1{font-size:36px}.h2,h2{font-size:30px}.h3,h3{font-size:24px}.h4,h4{font-size:18px}.h5,h5{font-size:14px}.h6,h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}.small,small{font-size:85%}.mark,mark{padding:.2em;background-color:#fcf8e3}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#777}.text-primary{color:#337ab7}a.text-primary:focus,a.text-primary:hover{color:#286090}.text-success{color:#3c763d}a.text-success:focus,a.text-success:hover{color:#2b542c}.text-info{color:#31708f}a.text-info:focus,a.text-info:hover{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:focus,a.text-warning:hover{color:#66512c}.text-danger{color:#a94442}a.text-danger:focus,a.text-danger:hover{color:#843534}.bg-primary{color:#fff;background-color:#337ab7}a.bg-primary:focus,a.bg-primary:hover{background-color:#286090}.bg-success{background-color:#dff0d8}a.bg-success:focus,a.bg-success:hover{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:focus,a.bg-info:hover{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:focus,a.bg-warning:hover{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:focus,a.bg-danger:hover{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}ol,ul{margin-top:0;margin-bottom:10px}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;margin-left:-5px;list-style:none}.list-inline>li{display:inline-block;padding-right:5px;padding-left:5px}dl{margin-top:0;margin-bottom:20px}dd,dt{line-height:1.42857143}dt{font-weight:700}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[data-original-title],abbr[title]{cursor:help;border-bottom:1px dotted #777}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote ol:last-child,blockquote p:last-child,blockquote ul:last-child{margin-bottom:0}blockquote .small,blockquote footer,blockquote small{display:block;font-size:80%;line-height:1.42857143;color:#777}blockquote .small:before,blockquote footer:before,blockquote small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;text-align:right;border-right:5px solid #eee;border-left:0}.blockquote-reverse .small:before,.blockquote-reverse footer:before,.blockquote-reverse small:before,blockquote.pull-right .small:before,blockquote.pull-right footer:before,blockquote.pull-right small:before{content:''}.blockquote-reverse .small:after,.blockquote-reverse footer:after,.blockquote-reverse small:after,blockquote.pull-right .small:after,blockquote.pull-right footer:after,blockquote.pull-right small:after{content:'\00A0 \2014'}address{margin-bottom:20px;font-style:normal;line-height:1.42857143}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{padding:2px 4px;font-size:90%;color:#c7254e;background-color:#f9f2f4;border-radius:4px}kbd{padding:2px 4px;font-size:90%;color:#fff;background-color:#333;border-radius:3px;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.25);box-shadow:inset 0 -1px 0 rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;font-weight:700;-webkit-box-shadow:none;box-shadow:none}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857143;color:#333;word-break:break-all;word-wrap:break-word;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.row{margin-right:-15px;margin-left:-15px}.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{position:relative;min-height:1px;padding-right:15px;padding-left:15px}.col-xs-1,.col-xs-10,.col-xs-11,.col-xs-12,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9{float:left}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:auto}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:auto}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:auto}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:auto}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:auto}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:auto}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:auto}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:auto}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}table{background-color:transparent}caption{padding-top:8px;padding-bottom:8px;color:#777;text-align:left}th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>tbody>tr>td,.table>tbody>tr>th,.table>tfoot>tr>td,.table>tfoot>tr>th,.table>thead>tr>td,.table>thead>tr>th{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>td,.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>td,.table>thead:first-child>tr:first-child>th{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>tbody>tr>td,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>td,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>thead>tr>th{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>tbody>tr>td,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>td,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border:1px solid #ddd}.table-bordered>thead>tr>td,.table-bordered>thead>tr>th{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}table col[class*=col-]{position:static;display:table-column;float:none}table td[class*=col-],table th[class*=col-]{position:static;display:table-cell;float:none}.table>tbody>tr.active>td,.table>tbody>tr.active>th,.table>tbody>tr>td.active,.table>tbody>tr>th.active,.table>tfoot>tr.active>td,.table>tfoot>tr.active>th,.table>tfoot>tr>td.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>thead>tr.active>th,.table>thead>tr>td.active,.table>thead>tr>th.active{background-color:#f5f5f5}.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr.active:hover>th,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover{background-color:#e8e8e8}.table>tbody>tr.success>td,.table>tbody>tr.success>th,.table>tbody>tr>td.success,.table>tbody>tr>th.success,.table>tfoot>tr.success>td,.table>tfoot>tr.success>th,.table>tfoot>tr>td.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>thead>tr.success>th,.table>thead>tr>td.success,.table>thead>tr>th.success{background-color:#dff0d8}.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr.success:hover>th,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover{background-color:#d0e9c6}.table>tbody>tr.info>td,.table>tbody>tr.info>th,.table>tbody>tr>td.info,.table>tbody>tr>th.info,.table>tfoot>tr.info>td,.table>tfoot>tr.info>th,.table>tfoot>tr>td.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>thead>tr.info>th,.table>thead>tr>td.info,.table>thead>tr>th.info{background-color:#d9edf7}.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr.info:hover>th,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover{background-color:#c4e3f3}.table>tbody>tr.warning>td,.table>tbody>tr.warning>th,.table>tbody>tr>td.warning,.table>tbody>tr>th.warning,.table>tfoot>tr.warning>td,.table>tfoot>tr.warning>th,.table>tfoot>tr>td.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>thead>tr.warning>th,.table>thead>tr>td.warning,.table>thead>tr>th.warning{background-color:#fcf8e3}.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr.warning:hover>th,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover{background-color:#faf2cc}.table>tbody>tr.danger>td,.table>tbody>tr.danger>th,.table>tbody>tr>td.danger,.table>tbody>tr>th.danger,.table>tfoot>tr.danger>td,.table>tfoot>tr.danger>th,.table>tfoot>tr>td.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>thead>tr.danger>th,.table>thead>tr>td.danger,.table>thead>tr>th.danger{background-color:#f2dede}.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr.danger:hover>th,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover{background-color:#ebcccc}.table-responsive{min-height:.01%;overflow-x:auto}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>td,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>thead>tr>th{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:inherit;color:#333;border:0;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px;font-weight:700}input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=checkbox],input[type=radio]{margin:4px 0 0;margin-top:1px\9;line-height:normal}input[type=file]{display:block}input[type=range]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type=file]:focus,input[type=checkbox]:focus,input[type=radio]:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{display:block;padding-top:7px;font-size:14px;line-height:1.42857143;color:#555}.form-control{display:block;width:100%;height:34px;padding:6px 12px;font-size:14px;line-height:1.42857143;color:#555;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color ease-in-out .15s,-webkit-box-shadow ease-in-out .15s;-o-transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#eee;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}input[type=search]{-webkit-appearance:none}@media screen and (-webkit-min-device-pixel-ratio:0){input[type=date].form-control,input[type=time].form-control,input[type=datetime-local].form-control,input[type=month].form-control{line-height:34px}.input-group-sm input[type=date],.input-group-sm input[type=time],.input-group-sm input[type=datetime-local],.input-group-sm input[type=month],input[type=date].input-sm,input[type=time].input-sm,input[type=datetime-local].input-sm,input[type=month].input-sm{line-height:30px}.input-group-lg input[type=date],.input-group-lg input[type=time],.input-group-lg input[type=datetime-local],.input-group-lg input[type=month],input[type=date].input-lg,input[type=time].input-lg,input[type=datetime-local].input-lg,input[type=month].input-lg{line-height:46px}}.form-group{margin-bottom:15px}.checkbox,.radio{position:relative;display:block;margin-top:10px;margin-bottom:10px}.checkbox label,.radio label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.checkbox input[type=checkbox],.checkbox-inline input[type=checkbox],.radio input[type=radio],.radio-inline input[type=radio]{position:absolute;margin-top:4px\9;margin-left:-20px}.checkbox+.checkbox,.radio+.radio{margin-top:-5px}.checkbox-inline,.radio-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;font-weight:400;vertical-align:middle;cursor:pointer}.checkbox-inline+.checkbox-inline,.radio-inline+.radio-inline{margin-top:0;margin-left:10px}fieldset[disabled] input[type=checkbox],fieldset[disabled] input[type=radio],input[type=checkbox].disabled,input[type=checkbox][disabled],input[type=radio].disabled,input[type=radio][disabled]{cursor:not-allowed}.checkbox-inline.disabled,.radio-inline.disabled,fieldset[disabled] .checkbox-inline,fieldset[disabled] .radio-inline{cursor:not-allowed}.checkbox.disabled label,.radio.disabled label,fieldset[disabled] .checkbox label,fieldset[disabled] .radio label{cursor:not-allowed}.form-control-static{min-height:34px;padding-top:7px;padding-bottom:7px;margin-bottom:0}.form-control-static.input-lg,.form-control-static.input-sm{padding-right:0;padding-left:0}.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-sm{height:30px;line-height:30px}select[multiple].input-sm,textarea.input-sm{height:auto}.form-group-sm .form-control{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm select[multiple].form-control,.form-group-sm textarea.form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-lg{height:46px;line-height:46px}select[multiple].input-lg,textarea.input-lg{height:auto}.form-group-lg .form-control{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg select[multiple].form-control,.form-group-lg textarea.form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.3333333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:2;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.form-group-lg .form-control+.form-control-feedback,.input-group-lg+.form-control-feedback,.input-lg+.form-control-feedback{width:46px;height:46px;line-height:46px}.form-group-sm .form-control+.form-control-feedback,.input-group-sm+.form-control-feedback,.input-sm+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .checkbox,.has-success .checkbox-inline,.has-success .control-label,.has-success .help-block,.has-success .radio,.has-success .radio-inline,.has-success.checkbox label,.has-success.checkbox-inline label,.has-success.radio label,.has-success.radio-inline label{color:#3c763d}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;background-color:#dff0d8;border-color:#3c763d}.has-success .form-control-feedback{color:#3c763d}.has-warning .checkbox,.has-warning .checkbox-inline,.has-warning .control-label,.has-warning .help-block,.has-warning .radio,.has-warning .radio-inline,.has-warning.checkbox label,.has-warning.checkbox-inline label,.has-warning.radio label,.has-warning.radio-inline label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;background-color:#fcf8e3;border-color:#8a6d3b}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .checkbox,.has-error .checkbox-inline,.has-error .control-label,.has-error .help-block,.has-error .radio,.has-error .radio-inline,.has-error.checkbox label,.has-error.checkbox-inline label,.has-error.radio label,.has-error.radio-inline label{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;background-color:#f2dede;border-color:#a94442}.has-error .form-control-feedback{color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-static{display:inline-block}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .form-control,.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .checkbox,.form-inline .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .checkbox label,.form-inline .radio label{padding-left:0}.form-inline .checkbox input[type=checkbox],.form-inline .radio input[type=radio]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .checkbox,.form-horizontal .checkbox-inline,.form-horizontal .radio,.form-horizontal .radio-inline{padding-top:7px;margin-top:0;margin-bottom:0}.form-horizontal .checkbox,.form-horizontal .radio{min-height:27px}.form-horizontal .form-group{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.form-horizontal .control-label{padding-top:7px;margin-bottom:0;text-align:right}}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:14.33px;font-size:18px}}@media (min-width:768px){.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;padding:6px 12px;margin-bottom:0;font-size:14px;font-weight:400;line-height:1.42857143;text-align:center;white-space:nowrap;vertical-align:middle;-ms-touch-action:manipulation;touch-action:manipulation;cursor:pointer;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-image:none;border:1px solid transparent;border-radius:4px}.btn.active.focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn:active:focus,.btn:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.focus,.btn:focus,.btn:hover{color:#333;text-decoration:none}.btn.active,.btn:active{background-image:none;outline:0;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none;opacity:.65}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default.focus,.btn-default:focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default:hover{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default.active.focus,.btn-default.active:focus,.btn-default.active:hover,.btn-default:active.focus,.btn-default:active:focus,.btn-default:active:hover,.open>.dropdown-toggle.btn-default.focus,.open>.dropdown-toggle.btn-default:focus,.open>.dropdown-toggle.btn-default:hover{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default.active,.btn-default:active,.open>.dropdown-toggle.btn-default{background-image:none}.btn-default.disabled,.btn-default.disabled.active,.btn-default.disabled.focus,.btn-default.disabled:active,.btn-default.disabled:focus,.btn-default.disabled:hover,.btn-default[disabled],.btn-default[disabled].active,.btn-default[disabled].focus,.btn-default[disabled]:active,.btn-default[disabled]:focus,.btn-default[disabled]:hover,fieldset[disabled] .btn-default,fieldset[disabled] .btn-default.active,fieldset[disabled] .btn-default.focus,fieldset[disabled] .btn-default:active,fieldset[disabled] .btn-default:focus,fieldset[disabled] .btn-default:hover{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#337ab7;border-color:#2e6da4}.btn-primary.focus,.btn-primary:focus{color:#fff;background-color:#286090;border-color:#122b40}.btn-primary:hover{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary.active.focus,.btn-primary.active:focus,.btn-primary.active:hover,.btn-primary:active.focus,.btn-primary:active:focus,.btn-primary:active:hover,.open>.dropdown-toggle.btn-primary.focus,.open>.dropdown-toggle.btn-primary:focus,.open>.dropdown-toggle.btn-primary:hover{color:#fff;background-color:#204d74;border-color:#122b40}.btn-primary.active,.btn-primary:active,.open>.dropdown-toggle.btn-primary{background-image:none}.btn-primary.disabled,.btn-primary.disabled.active,.btn-primary.disabled.focus,.btn-primary.disabled:active,.btn-primary.disabled:focus,.btn-primary.disabled:hover,.btn-primary[disabled],.btn-primary[disabled].active,.btn-primary[disabled].focus,.btn-primary[disabled]:active,.btn-primary[disabled]:focus,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary,fieldset[disabled] .btn-primary.active,fieldset[disabled] .btn-primary.focus,fieldset[disabled] .btn-primary:active,fieldset[disabled] .btn-primary:focus,fieldset[disabled] .btn-primary:hover{background-color:#337ab7;border-color:#2e6da4}.btn-primary .badge{color:#337ab7;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success.focus,.btn-success:focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success:hover{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{color:#fff;background-color:#449d44;border-color:#398439}.btn-success.active.focus,.btn-success.active:focus,.btn-success.active:hover,.btn-success:active.focus,.btn-success:active:focus,.btn-success:active:hover,.open>.dropdown-toggle.btn-success.focus,.open>.dropdown-toggle.btn-success:focus,.open>.dropdown-toggle.btn-success:hover{color:#fff;background-color:#398439;border-color:#255625}.btn-success.active,.btn-success:active,.open>.dropdown-toggle.btn-success{background-image:none}.btn-success.disabled,.btn-success.disabled.active,.btn-success.disabled.focus,.btn-success.disabled:active,.btn-success.disabled:focus,.btn-success.disabled:hover,.btn-success[disabled],.btn-success[disabled].active,.btn-success[disabled].focus,.btn-success[disabled]:active,.btn-success[disabled]:focus,.btn-success[disabled]:hover,fieldset[disabled] .btn-success,fieldset[disabled] .btn-success.active,fieldset[disabled] .btn-success.focus,fieldset[disabled] .btn-success:active,fieldset[disabled] .btn-success:focus,fieldset[disabled] .btn-success:hover{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info.focus,.btn-info:focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info:hover{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info.active.focus,.btn-info.active:focus,.btn-info.active:hover,.btn-info:active.focus,.btn-info:active:focus,.btn-info:active:hover,.open>.dropdown-toggle.btn-info.focus,.open>.dropdown-toggle.btn-info:focus,.open>.dropdown-toggle.btn-info:hover{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info.active,.btn-info:active,.open>.dropdown-toggle.btn-info{background-image:none}.btn-info.disabled,.btn-info.disabled.active,.btn-info.disabled.focus,.btn-info.disabled:active,.btn-info.disabled:focus,.btn-info.disabled:hover,.btn-info[disabled],.btn-info[disabled].active,.btn-info[disabled].focus,.btn-info[disabled]:active,.btn-info[disabled]:focus,.btn-info[disabled]:hover,fieldset[disabled] .btn-info,fieldset[disabled] .btn-info.active,fieldset[disabled] .btn-info.focus,fieldset[disabled] .btn-info:active,fieldset[disabled] .btn-info:focus,fieldset[disabled] .btn-info:hover{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning.focus,.btn-warning:focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning:hover{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning.active.focus,.btn-warning.active:focus,.btn-warning.active:hover,.btn-warning:active.focus,.btn-warning:active:focus,.btn-warning:active:hover,.open>.dropdown-toggle.btn-warning.focus,.open>.dropdown-toggle.btn-warning:focus,.open>.dropdown-toggle.btn-warning:hover{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning.active,.btn-warning:active,.open>.dropdown-toggle.btn-warning{background-image:none}.btn-warning.disabled,.btn-warning.disabled.active,.btn-warning.disabled.focus,.btn-warning.disabled:active,.btn-warning.disabled:focus,.btn-warning.disabled:hover,.btn-warning[disabled],.btn-warning[disabled].active,.btn-warning[disabled].focus,.btn-warning[disabled]:active,.btn-warning[disabled]:focus,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning,fieldset[disabled] .btn-warning.active,fieldset[disabled] .btn-warning.focus,fieldset[disabled] .btn-warning:active,fieldset[disabled] .btn-warning:focus,fieldset[disabled] .btn-warning:hover{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger.focus,.btn-danger:focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger:hover{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger.active.focus,.btn-danger.active:focus,.btn-danger.active:hover,.btn-danger:active.focus,.btn-danger:active:focus,.btn-danger:active:hover,.open>.dropdown-toggle.btn-danger.focus,.open>.dropdown-toggle.btn-danger:focus,.open>.dropdown-toggle.btn-danger:hover{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger.active,.btn-danger:active,.open>.dropdown-toggle.btn-danger{background-image:none}.btn-danger.disabled,.btn-danger.disabled.active,.btn-danger.disabled.focus,.btn-danger.disabled:active,.btn-danger.disabled:focus,.btn-danger.disabled:hover,.btn-danger[disabled],.btn-danger[disabled].active,.btn-danger[disabled].focus,.btn-danger[disabled]:active,.btn-danger[disabled]:focus,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger,fieldset[disabled] .btn-danger.active,fieldset[disabled] .btn-danger.focus,fieldset[disabled] .btn-danger:active,fieldset[disabled] .btn-danger:focus,fieldset[disabled] .btn-danger:hover{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{font-weight:400;color:#337ab7;border-radius:0}.btn-link,.btn-link.active,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:active,.btn-link:focus,.btn-link:hover{border-color:transparent}.btn-link:focus,.btn-link:hover{color:#23527c;text-decoration:underline;background-color:transparent}.btn-link[disabled]:focus,.btn-link[disabled]:hover,fieldset[disabled] .btn-link:focus,fieldset[disabled] .btn-link:hover{color:#777;text-decoration:none}.btn-group-lg>.btn,.btn-lg{padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.btn-group-sm>.btn,.btn-sm{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-group-xs>.btn,.btn-xs{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition-timing-function:ease;-o-transition-timing-function:ease;transition-timing-function:ease;-webkit-transition-duration:.35s;-o-transition-duration:.35s;transition-duration:.35s;-webkit-transition-property:height,visibility;-o-transition-property:height,visibility;transition-property:height,visibility}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px dashed;border-top:4px solid\9;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown,.dropup{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;font-size:14px;text-align:left;list-style:none;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,.175);box-shadow:0 6px 12px rgba(0,0,0,.175)}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857143;color:#333;white-space:nowrap}.dropdown-menu>li>a:focus,.dropdown-menu>li>a:hover{color:#262626;text-decoration:none;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:focus,.dropdown-menu>.active>a:hover{color:#fff;text-decoration:none;background-color:#337ab7;outline:0}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{color:#777}.dropdown-menu>.disabled>a:focus,.dropdown-menu>.disabled>a:hover{text-decoration:none;cursor:not-allowed;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{right:0;left:auto}.dropdown-menu-left{right:auto;left:0}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857143;color:#777;white-space:nowrap}.dropdown-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:990}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{content:"";border-top:0;border-bottom:4px dashed;border-bottom:4px solid\9}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{right:0;left:auto}.navbar-right .dropdown-menu-left{right:auto;left:0}}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;float:left}.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:hover,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus,.btn-group>.btn:hover{z-index:2}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-bottom-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-right:8px;padding-left:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-right:12px;padding-left:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn .caret{margin-left:0}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-left-radius:0;border-top-right-radius:0;border-bottom-left-radius:4px}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-left-radius:0;border-top-right-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{display:table-cell;float:none;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle=buttons]>.btn input[type=checkbox],[data-toggle=buttons]>.btn input[type=radio],[data-toggle=buttons]>.btn-group>.btn input[type=checkbox],[data-toggle=buttons]>.btn-group>.btn input[type=radio]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*=col-]{float:none;padding-right:0;padding-left:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn,textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn,textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn{height:auto}.input-group .form-control,.input-group-addon,.input-group-btn{display:table-cell}.input-group .form-control:not(:first-child):not(:last-child),.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type=checkbox],.input-group-addon input[type=radio]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn-group:not(:last-child)>.btn,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:first-child>.btn-group:not(:first-child)>.btn,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:active,.input-group-btn>.btn:focus,.input-group-btn>.btn:hover{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:2;margin-left:-1px}.nav{padding-left:0;margin-bottom:0;list-style:none}.nav>li{position:relative;display:block}.nav>li>a{position:relative;display:block;padding:10px 15px}.nav>li>a:focus,.nav>li>a:hover{text-decoration:none;background-color:#eee}.nav>li.disabled>a{color:#777}.nav>li.disabled>a:focus,.nav>li.disabled>a:hover{color:#777;text-decoration:none;cursor:not-allowed;background-color:transparent}.nav .open>a,.nav .open>a:focus,.nav .open>a:hover{background-color:#eee;border-color:#337ab7}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:focus,.nav-tabs>li.active>a:hover{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0}}.nav-tabs.nav-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:focus,.nav-tabs.nav-justified>.active>a:hover{border-bottom-color:#fff}}.nav-pills>li{float:left}.nav-pills>li>a{border-radius:4px}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:focus,.nav-pills>li.active>a:hover{color:#fff;background-color:#337ab7}.nav-stacked>li{float:none}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li{float:none}.nav-justified>li>a{margin-bottom:5px;text-align:center}.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border:1px solid #ddd}@media (min-width:768px){.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:focus,.nav-tabs-justified>.active>a:hover{border-bottom-color:#fff}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}@media (min-width:768px){.navbar{border-radius:4px}}@media (min-width:768px){.navbar-header{float:left}}.navbar-collapse{padding-right:15px;padding-left:15px;overflow-x:visible;-webkit-overflow-scrolling:touch;border-top:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1)}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar-collapse{width:auto;border-top:0;-webkit-box-shadow:none;box-shadow:none}.navbar-collapse.collapse{display:block!important;height:auto!important;padding-bottom:0;overflow:visible!important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse{padding-right:0;padding-left:0}}.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:340px}@media (max-device-width:480px) and (orientation:landscape){.navbar-fixed-bottom .navbar-collapse,.navbar-fixed-top .navbar-collapse{max-height:200px}}.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.container-fluid>.navbar-collapse,.container-fluid>.navbar-header,.container>.navbar-collapse,.container>.navbar-header{margin-right:0;margin-left:0}}.navbar-static-top{z-index:1000;border-width:0 0 1px}@media (min-width:768px){.navbar-static-top{border-radius:0}}.navbar-fixed-bottom,.navbar-fixed-top{position:fixed;right:0;left:0;z-index:1030}@media (min-width:768px){.navbar-fixed-bottom,.navbar-fixed-top{border-radius:0}}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;height:50px;padding:15px 15px;font-size:18px;line-height:20px}.navbar-brand:focus,.navbar-brand:hover{text-decoration:none}.navbar-brand>img{display:block}@media (min-width:768px){.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;padding:9px 10px;margin-top:8px;margin-right:15px;margin-bottom:8px;background-color:transparent;background-image:none;border:1px solid transparent;border-radius:4px}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}@media (min-width:768px){.navbar-toggle{display:none}}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;-webkit-box-shadow:none;box-shadow:none}.navbar-nav .open .dropdown-menu .dropdown-header,.navbar-nav .open .dropdown-menu>li>a{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:focus,.navbar-nav .open .dropdown-menu>li>a:hover{background-image:none}}@media (min-width:768px){.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}}.navbar-form{padding:10px 15px;margin-top:8px;margin-right:-15px;margin-bottom:8px;margin-left:-15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1)}@media (min-width:768px){.navbar-form .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .form-control-static{display:inline-block}.navbar-form .input-group{display:inline-table;vertical-align:middle}.navbar-form .input-group .form-control,.navbar-form .input-group .input-group-addon,.navbar-form .input-group .input-group-btn{width:auto}.navbar-form .input-group>.form-control{width:100%}.navbar-form .control-label{margin-bottom:0;vertical-align:middle}.navbar-form .checkbox,.navbar-form .radio{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.navbar-form .checkbox label,.navbar-form .radio label{padding-left:0}.navbar-form .checkbox input[type=checkbox],.navbar-form .radio input[type=radio]{position:relative;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}.navbar-form .form-group:last-child{margin-bottom:0}}@media (min-width:768px){.navbar-form{width:auto;padding-top:0;padding-bottom:0;margin-right:0;margin-left:0;border:0;-webkit-box-shadow:none;box-shadow:none}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-left-radius:0;border-top-right-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{margin-bottom:0;border-top-left-radius:4px;border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-right:15px;margin-left:15px}}@media (min-width:768px){.navbar-left{float:left!important}.navbar-right{float:right!important;margin-right:-15px}.navbar-right~.navbar-right{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:focus,.navbar-default .navbar-brand:hover{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a{color:#777}.navbar-default .navbar-nav>li>a:focus,.navbar-default .navbar-nav>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:focus,.navbar-default .navbar-nav>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:focus,.navbar-default .navbar-nav>.disabled>a:hover{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:focus,.navbar-default .navbar-toggle:hover{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:focus,.navbar-default .navbar-nav>.open>a:hover{color:#555;background-color:#e7e7e7}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-default .btn-link{color:#777}.navbar-default .btn-link:focus,.navbar-default .btn-link:hover{color:#333}.navbar-default .btn-link[disabled]:focus,.navbar-default .btn-link[disabled]:hover,fieldset[disabled] .navbar-default .btn-link:focus,fieldset[disabled] .navbar-default .btn-link:hover{color:#ccc}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#9d9d9d}.navbar-inverse .navbar-brand:focus,.navbar-inverse .navbar-brand:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-text{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a:focus,.navbar-inverse .navbar-nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:focus,.navbar-inverse .navbar-nav>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:focus,.navbar-inverse .navbar-nav>.disabled>a:hover{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:focus,.navbar-inverse .navbar-toggle:hover{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:focus,.navbar-inverse .navbar-nav>.open>a:hover{color:#fff;background-color:#080808}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#9d9d9d}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .btn-link{color:#9d9d9d}.navbar-inverse .btn-link:focus,.navbar-inverse .btn-link:hover{color:#fff}.navbar-inverse .btn-link[disabled]:focus,.navbar-inverse .btn-link[disabled]:hover,fieldset[disabled] .navbar-inverse .btn-link:focus,fieldset[disabled] .navbar-inverse .btn-link:hover{color:#444}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:4px}.breadcrumb>li{display:inline-block}.breadcrumb>li+li:before{padding:0 5px;color:#ccc;content:"/\00a0"}.breadcrumb>.active{color:#777}.pagination{display:inline-block;padding-left:0;margin:20px 0;border-radius:4px}.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;margin-left:-1px;line-height:1.42857143;color:#337ab7;text-decoration:none;background-color:#fff;border:1px solid #ddd}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-top-left-radius:4px;border-bottom-left-radius:4px}.pagination>li:last-child>a,.pagination>li:last-child>span{border-top-right-radius:4px;border-bottom-right-radius:4px}.pagination>li>a:focus,.pagination>li>a:hover,.pagination>li>span:focus,.pagination>li>span:hover{z-index:3;color:#23527c;background-color:#eee;border-color:#ddd}.pagination>.active>a,.pagination>.active>a:focus,.pagination>.active>a:hover,.pagination>.active>span,.pagination>.active>span:focus,.pagination>.active>span:hover{z-index:2;color:#fff;cursor:default;background-color:#337ab7;border-color:#337ab7}.pagination>.disabled>a,.pagination>.disabled>a:focus,.pagination>.disabled>a:hover,.pagination>.disabled>span,.pagination>.disabled>span:focus,.pagination>.disabled>span:hover{color:#777;cursor:not-allowed;background-color:#fff;border-color:#ddd}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px;line-height:1.3333333}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-top-left-radius:6px;border-bottom-left-radius:6px}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-top-right-radius:6px;border-bottom-right-radius:6px}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px;line-height:1.5}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-top-left-radius:3px;border-bottom-left-radius:3px}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-top-right-radius:3px;border-bottom-right-radius:3px}.pager{padding-left:0;margin:20px 0;text-align:center;list-style:none}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:focus,.pager li>a:hover{text-decoration:none;background-color:#eee}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:focus,.pager .disabled>a:hover,.pager .disabled>span{color:#777;cursor:not-allowed;background-color:#fff}.label{display:inline;padding:.2em .6em .3em;font-size:75%;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25em}a.label:focus,a.label:hover{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.btn .label{position:relative;top:-1px}.label-default{background-color:#777}.label-default[href]:focus,.label-default[href]:hover{background-color:#5e5e5e}.label-primary{background-color:#337ab7}.label-primary[href]:focus,.label-primary[href]:hover{background-color:#286090}.label-success{background-color:#5cb85c}.label-success[href]:focus,.label-success[href]:hover{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:focus,.label-info[href]:hover{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:focus,.label-warning[href]:hover{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:focus,.label-danger[href]:hover{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:middle;background-color:#777;border-radius:10px}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.btn-group-xs>.btn .badge,.btn-xs .badge{top:0;padding:1px 5px}a.badge:focus,a.badge:hover{color:#fff;text-decoration:none;cursor:pointer}.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#337ab7;background-color:#fff}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding-top:30px;padding-bottom:30px;margin-bottom:30px;color:inherit;background-color:#eee}.jumbotron .h1,.jumbotron h1{color:inherit}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.jumbotron>hr{border-top-color:#d5d5d5}.container .jumbotron,.container-fluid .jumbotron{border-radius:6px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron,.container-fluid .jumbotron{padding-right:60px;padding-left:60px}.jumbotron .h1,.jumbotron h1{font-size:63px}}.thumbnail{display:block;padding:4px;margin-bottom:20px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:border .2s ease-in-out;-o-transition:border .2s ease-in-out;transition:border .2s ease-in-out}.thumbnail a>img,.thumbnail>img{margin-right:auto;margin-left:auto}a.thumbnail.active,a.thumbnail:focus,a.thumbnail:hover{border-color:#337ab7}.thumbnail .caption{padding:9px;color:#333}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:700}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f5f5f5;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.1);box-shadow:inset 0 1px 2px rgba(0,0,0,.1)}.progress-bar{float:left;width:0;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#337ab7;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress-bar-striped,.progress-striped .progress-bar{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;background-size:40px 40px}.progress-bar.active,.progress.active .progress-bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.media{margin-top:15px}.media:first-child{margin-top:0}.media,.media-body{overflow:hidden;zoom:1}.media-body{width:10000px}.media-object{display:block}.media-object.img-thumbnail{max-width:none}.media-right,.media>.pull-right{padding-left:10px}.media-left,.media>.pull-left{padding-right:10px}.media-body,.media-left,.media-right{display:table-cell;vertical-align:top}.media-middle{vertical-align:middle}.media-bottom{vertical-align:bottom}.media-heading{margin-top:0;margin-bottom:5px}.media-list{padding-left:0;list-style:none}.list-group{padding-left:0;margin-bottom:20px}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-left-radius:4px;border-top-right-radius:4px}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}a.list-group-item,button.list-group-item{color:#555}a.list-group-item .list-group-item-heading,button.list-group-item .list-group-item-heading{color:#333}a.list-group-item:focus,a.list-group-item:hover,button.list-group-item:focus,button.list-group-item:hover{color:#555;text-decoration:none;background-color:#f5f5f5}button.list-group-item{width:100%;text-align:left}.list-group-item.disabled,.list-group-item.disabled:focus,.list-group-item.disabled:hover{color:#777;cursor:not-allowed;background-color:#eee}.list-group-item.disabled .list-group-item-heading,.list-group-item.disabled:focus .list-group-item-heading,.list-group-item.disabled:hover .list-group-item-heading{color:inherit}.list-group-item.disabled .list-group-item-text,.list-group-item.disabled:focus .list-group-item-text,.list-group-item.disabled:hover .list-group-item-text{color:#777}.list-group-item.active,.list-group-item.active:focus,.list-group-item.active:hover{z-index:2;color:#fff;background-color:#337ab7;border-color:#337ab7}.list-group-item.active .list-group-item-heading,.list-group-item.active .list-group-item-heading>.small,.list-group-item.active .list-group-item-heading>small,.list-group-item.active:focus .list-group-item-heading,.list-group-item.active:focus .list-group-item-heading>.small,.list-group-item.active:focus .list-group-item-heading>small,.list-group-item.active:hover .list-group-item-heading,.list-group-item.active:hover .list-group-item-heading>.small,.list-group-item.active:hover .list-group-item-heading>small{color:inherit}.list-group-item.active .list-group-item-text,.list-group-item.active:focus .list-group-item-text,.list-group-item.active:hover .list-group-item-text{color:#c7ddef}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success,button.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading,button.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:focus,a.list-group-item-success:hover,button.list-group-item-success:focus,button.list-group-item-success:hover{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,a.list-group-item-success.active:focus,a.list-group-item-success.active:hover,button.list-group-item-success.active,button.list-group-item-success.active:focus,button.list-group-item-success.active:hover{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info,button.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading,button.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:focus,a.list-group-item-info:hover,button.list-group-item-info:focus,button.list-group-item-info:hover{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,a.list-group-item-info.active:focus,a.list-group-item-info.active:hover,button.list-group-item-info.active,button.list-group-item-info.active:focus,button.list-group-item-info.active:hover{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning,button.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading,button.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:focus,a.list-group-item-warning:hover,button.list-group-item-warning:focus,button.list-group-item-warning:hover{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,a.list-group-item-warning.active:focus,a.list-group-item-warning.active:hover,button.list-group-item-warning.active,button.list-group-item-warning.active:focus,button.list-group-item-warning.active:hover{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger,button.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading,button.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:focus,a.list-group-item-danger:hover,button.list-group-item-danger:focus,button.list-group-item-danger:hover{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,a.list-group-item-danger.active:focus,a.list-group-item-danger.active:hover,button.list-group-item-danger.active,button.list-group-item-danger.active:focus,button.list-group-item-danger.active:hover{color:#fff;background-color:#a94442;border-color:#a94442}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0,0,0,.05);box-shadow:0 1px 1px rgba(0,0,0,.05)}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-left-radius:3px;border-top-right-radius:3px}.panel-heading>.dropdown .dropdown-toggle{color:inherit}.panel-title{margin-top:0;margin-bottom:0;font-size:16px;color:inherit}.panel-title>.small,.panel-title>.small>a,.panel-title>a,.panel-title>small,.panel-title>small>a{color:inherit}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.list-group,.panel>.panel-collapse>.list-group{margin-bottom:0}.panel>.list-group .list-group-item,.panel>.panel-collapse>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel>.list-group:first-child .list-group-item:first-child,.panel>.panel-collapse>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-left-radius:3px;border-top-right-radius:3px}.panel>.list-group:last-child .list-group-item:last-child,.panel>.panel-collapse>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.panel-heading+.panel-collapse>.list-group .list-group-item:first-child{border-top-left-radius:0;border-top-right-radius:0}.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.list-group+.panel-footer{border-top-width:0}.panel>.panel-collapse>.table,.panel>.table,.panel>.table-responsive>.table{margin-bottom:0}.panel>.panel-collapse>.table caption,.panel>.table caption,.panel>.table-responsive>.table caption{padding-right:15px;padding-left:15px}.panel>.table-responsive:first-child>.table:first-child,.panel>.table:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child,.panel>.table:first-child>thead:first-child>tr:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child{border-top-left-radius:3px}.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child{border-top-right-radius:3px}.panel>.table-responsive:last-child>.table:last-child,.panel>.table:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:3px}.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:3px}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive,.panel>.table+.panel-body,.panel>.table-responsive+.panel-body{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child td,.panel>.table>tbody:first-child>tr:first-child th{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child{border-left:0}.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child{border-right:0}.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th{border-bottom:0}.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}.panel>.table-responsive{margin-bottom:0;border:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:4px}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading{border-bottom:0}.panel-group .panel-heading+.panel-collapse>.list-group,.panel-group .panel-heading+.panel-collapse>.panel-body{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#333;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ddd}.panel-default>.panel-heading .badge{color:#f5f5f5;background-color:#333}.panel-default>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#337ab7}.panel-primary>.panel-heading{color:#fff;background-color:#337ab7;border-color:#337ab7}.panel-primary>.panel-heading+.panel-collapse>.panel-body{border-top-color:#337ab7}.panel-primary>.panel-heading .badge{color:#337ab7;background-color:#fff}.panel-primary>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#337ab7}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse>.panel-body{border-top-color:#d6e9c6}.panel-success>.panel-heading .badge{color:#dff0d8;background-color:#3c763d}.panel-success>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse>.panel-body{border-top-color:#bce8f1}.panel-info>.panel-heading .badge{color:#d9edf7;background-color:#31708f}.panel-info>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse>.panel-body{border-top-color:#faebcc}.panel-warning>.panel-heading .badge{color:#fcf8e3;background-color:#8a6d3b}.panel-warning>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ebccd1}.panel-danger>.panel-heading .badge{color:#f2dede;background-color:#a94442}.panel-danger>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ebccd1}.embed-responsive{position:relative;display:block;height:0;padding:0;overflow:hidden}.embed-responsive .embed-responsive-item,.embed-responsive embed,.embed-responsive iframe,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-16by9{padding-bottom:56.25%}.embed-responsive-4by3{padding-bottom:75%}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px rgba(0,0,0,.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,.15)}.well-lg{padding:24px;border-radius:6px}.well-sm{padding:9px;border-radius:3px}.close{float:right;font-size:21px;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;filter:alpha(opacity=20);opacity:.2}.close:focus,.close:hover{color:#000;text-decoration:none;cursor:pointer;filter:alpha(opacity=50);opacity:.5}button.close{-webkit-appearance:none;padding:0;cursor:pointer;background:0 0;border:0}.modal-open{overflow:hidden}.modal{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;display:none;overflow:hidden;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transition:-webkit-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out;-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);-o-transform:translate(0,-25%);transform:translate(0,-25%)}.modal.in .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);-o-transform:translate(0,0);transform:translate(0,0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #999;border:1px solid rgba(0,0,0,.2);border-radius:6px;outline:0;-webkit-box-shadow:0 3px 9px rgba(0,0,0,.5);box-shadow:0 3px 9px rgba(0,0,0,.5)}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{filter:alpha(opacity=0);opacity:0}.modal-backdrop.in{filter:alpha(opacity=50);opacity:.5}.modal-header{min-height:16.43px;padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,.5);box-shadow:0 5px 15px rgba(0,0,0,.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1070;display:block;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:12px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;filter:alpha(opacity=0);opacity:0;line-break:auto}.tooltip.in{filter:alpha(opacity=90);opacity:.9}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{right:5px;bottom:0;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-right .tooltip-arrow{bottom:0;left:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-left .tooltip-arrow{top:0;right:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-right .tooltip-arrow{top:0;left:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;font-style:normal;font-weight:400;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;word-wrap:normal;white-space:normal;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2);line-break:auto}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover>.arrow{border-width:11px}.popover>.arrow:after{content:"";border-width:10px}.popover.top>.arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,.25);border-bottom-width:0}.popover.top>.arrow:after{bottom:1px;margin-left:-10px;content:" ";border-top-color:#fff;border-bottom-width:0}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,.25);border-left-width:0}.popover.right>.arrow:after{bottom:-10px;left:1px;content:" ";border-right-color:#fff;border-left-width:0}.popover.bottom>.arrow{top:-11px;left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,.25)}.popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,.25)}.popover.left>.arrow:after{right:1px;bottom:-10px;content:" ";border-right-width:0;border-left-color:#fff}.carousel{position:relative}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>a>img,.carousel-inner>.item>img{line-height:1}@media all and (transform-3d),(-webkit-transform-3d){.carousel-inner>.item{-webkit-transition:-webkit-transform .6s ease-in-out;-o-transition:-o-transform .6s ease-in-out;transition:transform .6s ease-in-out;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-inner>.item.active.right,.carousel-inner>.item.next{left:0;-webkit-transform:translate3d(100%,0,0);transform:translate3d(100%,0,0)}.carousel-inner>.item.active.left,.carousel-inner>.item.prev{left:0;-webkit-transform:translate3d(-100%,0,0);transform:translate3d(-100%,0,0)}.carousel-inner>.item.active,.carousel-inner>.item.next.left,.carousel-inner>.item.prev.right{left:0;-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;bottom:0;left:0;width:15%;font-size:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6);filter:alpha(opacity=50);opacity:.5}.carousel-control.left{background-image:-webkit-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.5)),to(rgba(0,0,0,.0001)));background-image:linear-gradient(to right,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1);background-repeat:repeat-x}.carousel-control.right{right:0;left:auto;background-image:-webkit-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-o-linear-gradient(left,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-image:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.0001)),to(rgba(0,0,0,.5)));background-image:linear-gradient(to right,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1);background-repeat:repeat-x}.carousel-control:focus,.carousel-control:hover{color:#fff;text-decoration:none;filter:alpha(opacity=90);outline:0;opacity:.9}.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{position:absolute;top:50%;z-index:5;display:inline-block;margin-top:-10px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{left:50%;margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{right:50%;margin-right:-10px}.carousel-control .icon-next,.carousel-control .icon-prev{width:20px;height:20px;font-family:serif;line-height:1}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;padding-left:0;margin-left:-30%;text-align:center;list-style:none}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;cursor:pointer;background-color:#000\9;background-color:rgba(0,0,0,0);border:1px solid #fff;border-radius:10px}.carousel-indicators .active{width:12px;height:12px;margin:0;background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6)}.carousel-caption .btn{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next,.carousel-control .icon-prev{width:30px;height:30px;margin-top:-15px;font-size:30px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{margin-left:-15px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{margin-right:-15px}.carousel-caption{right:20%;left:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.btn-group-vertical>.btn-group:after,.btn-group-vertical>.btn-group:before,.btn-toolbar:after,.btn-toolbar:before,.clearfix:after,.clearfix:before,.container-fluid:after,.container-fluid:before,.container:after,.container:before,.dl-horizontal dd:after,.dl-horizontal dd:before,.form-horizontal .form-group:after,.form-horizontal .form-group:before,.modal-footer:after,.modal-footer:before,.nav:after,.nav:before,.navbar-collapse:after,.navbar-collapse:before,.navbar-header:after,.navbar-header:before,.navbar:after,.navbar:before,.pager:after,.pager:before,.panel-body:after,.panel-body:before,.row:after,.row:before{display:table;content:" "}.btn-group-vertical>.btn-group:after,.btn-toolbar:after,.clearfix:after,.container-fluid:after,.container:after,.dl-horizontal dd:after,.form-horizontal .form-group:after,.modal-footer:after,.nav:after,.navbar-collapse:after,.navbar-header:after,.navbar:after,.pager:after,.panel-body:after,.row:after{clear:both}.center-block{display:block;margin-right:auto;margin-left:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none!important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-lg,.visible-md,.visible-sm,.visible-xs{display:none!important}.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block{display:none!important}@media (max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table!important}tr.visible-xs{display:table-row!important}td.visible-xs,th.visible-xs{display:table-cell!important}}@media (max-width:767px){.visible-xs-block{display:block!important}}@media (max-width:767px){.visible-xs-inline{display:inline!important}}@media (max-width:767px){.visible-xs-inline-block{display:inline-block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table!important}tr.visible-sm{display:table-row!important}td.visible-sm,th.visible-sm{display:table-cell!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-block{display:block!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline{display:inline!important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline-block{display:inline-block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table!important}tr.visible-md{display:table-row!important}td.visible-md,th.visible-md{display:table-cell!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-block{display:block!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline{display:inline!important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline-block{display:inline-block!important}}@media (min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table!important}tr.visible-lg{display:table-row!important}td.visible-lg,th.visible-lg{display:table-cell!important}}@media (min-width:1200px){.visible-lg-block{display:block!important}}@media (min-width:1200px){.visible-lg-inline{display:inline!important}}@media (min-width:1200px){.visible-lg-inline-block{display:inline-block!important}}@media (max-width:767px){.hidden-xs{display:none!important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none!important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none!important}}@media (min-width:1200px){.hidden-lg{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table!important}tr.visible-print{display:table-row!important}td.visible-print,th.visible-print{display:table-cell!important}}.visible-print-block{display:none!important}@media print{.visible-print-block{display:block!important}}.visible-print-inline{display:none!important}@media print{.visible-print-inline{display:inline!important}}.visible-print-inline-block{display:none!important}@media print{.visible-print-inline-block{display:inline-block!important}}@media print{.hidden-print{display:none!important}} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.eot b/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.eot
new file mode 100644
index 0000000000..4a4ca865d6
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.eot
Binary files differ
diff --git a/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.svg b/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.svg
new file mode 100644
index 0000000000..e3e2dc739d
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.svg
@@ -0,0 +1,229 @@
+<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
+<svg xmlns="http://www.w3.org/2000/svg">
+<metadata></metadata>
+<defs>
+<font id="glyphicons_halflingsregular" horiz-adv-x="1200" >
+<font-face units-per-em="1200" ascent="960" descent="-240" />
+<missing-glyph horiz-adv-x="500" />
+<glyph />
+<glyph />
+<glyph unicode="&#xd;" />
+<glyph unicode=" " />
+<glyph unicode="*" d="M100 500v200h259l-183 183l141 141l183 -183v259h200v-259l183 183l141 -141l-183 -183h259v-200h-259l183 -183l-141 -141l-183 183v-259h-200v259l-183 -183l-141 141l183 183h-259z" />
+<glyph unicode="+" d="M0 400v300h400v400h300v-400h400v-300h-400v-400h-300v400h-400z" />
+<glyph unicode="&#xa0;" />
+<glyph unicode="&#x2000;" horiz-adv-x="652" />
+<glyph unicode="&#x2001;" horiz-adv-x="1304" />
+<glyph unicode="&#x2002;" horiz-adv-x="652" />
+<glyph unicode="&#x2003;" horiz-adv-x="1304" />
+<glyph unicode="&#x2004;" horiz-adv-x="434" />
+<glyph unicode="&#x2005;" horiz-adv-x="326" />
+<glyph unicode="&#x2006;" horiz-adv-x="217" />
+<glyph unicode="&#x2007;" horiz-adv-x="217" />
+<glyph unicode="&#x2008;" horiz-adv-x="163" />
+<glyph unicode="&#x2009;" horiz-adv-x="260" />
+<glyph unicode="&#x200a;" horiz-adv-x="72" />
+<glyph unicode="&#x202f;" horiz-adv-x="260" />
+<glyph unicode="&#x205f;" horiz-adv-x="326" />
+<glyph unicode="&#x20ac;" d="M100 500l100 100h113q0 47 5 100h-218l100 100h135q37 167 112 257q117 141 297 141q242 0 354 -189q60 -103 66 -209h-181q0 55 -25.5 99t-63.5 68t-75 36.5t-67 12.5q-24 0 -52.5 -10t-62.5 -32t-65.5 -67t-50.5 -107h379l-100 -100h-300q-6 -46 -6 -100h406l-100 -100 h-300q9 -74 33 -132t52.5 -91t62 -54.5t59 -29t46.5 -7.5q29 0 66 13t75 37t63.5 67.5t25.5 96.5h174q-31 -172 -128 -278q-107 -117 -274 -117q-205 0 -324 158q-36 46 -69 131.5t-45 205.5h-217z" />
+<glyph unicode="&#x2212;" d="M200 400h900v300h-900v-300z" />
+<glyph unicode="&#x25fc;" horiz-adv-x="500" d="M0 0z" />
+<glyph unicode="&#x2601;" d="M-14 494q0 -80 56.5 -137t135.5 -57h750q120 0 205 86.5t85 207.5t-85 207t-205 86q-46 0 -90 -14q-44 97 -134.5 156.5t-200.5 59.5q-152 0 -260 -107.5t-108 -260.5q0 -25 2 -37q-66 -14 -108.5 -67.5t-42.5 -122.5z" />
+<glyph unicode="&#x2709;" d="M0 100l400 400l200 -200l200 200l400 -400h-1200zM0 300v600l300 -300zM0 1100l600 -603l600 603h-1200zM900 600l300 300v-600z" />
+<glyph unicode="&#x270f;" d="M-13 -13l333 112l-223 223zM187 403l214 -214l614 614l-214 214zM887 1103l214 -214l99 92q13 13 13 32.5t-13 33.5l-153 153q-15 13 -33 13t-33 -13z" />
+<glyph unicode="&#xe001;" d="M0 1200h1200l-500 -550v-550h300v-100h-800v100h300v550z" />
+<glyph unicode="&#xe002;" d="M14 84q18 -55 86 -75.5t147 5.5q65 21 109 69t44 90v606l600 155v-521q-64 16 -138 -7q-79 -26 -122.5 -83t-25.5 -111q18 -55 86 -75.5t147 4.5q70 23 111.5 63.5t41.5 95.5v881q0 10 -7 15.5t-17 2.5l-752 -193q-10 -3 -17 -12.5t-7 -19.5v-689q-64 17 -138 -7 q-79 -25 -122.5 -82t-25.5 -112z" />
+<glyph unicode="&#xe003;" d="M23 693q0 200 142 342t342 142t342 -142t142 -342q0 -142 -78 -261l300 -300q7 -8 7 -18t-7 -18l-109 -109q-8 -7 -18 -7t-18 7l-300 300q-119 -78 -261 -78q-200 0 -342 142t-142 342zM176 693q0 -136 97 -233t234 -97t233.5 96.5t96.5 233.5t-96.5 233.5t-233.5 96.5 t-234 -97t-97 -233z" />
+<glyph unicode="&#xe005;" d="M100 784q0 64 28 123t73 100.5t104.5 64t119 20.5t120 -38.5t104.5 -104.5q48 69 109.5 105t121.5 38t118.5 -20.5t102.5 -64t71 -100.5t27 -123q0 -57 -33.5 -117.5t-94 -124.5t-126.5 -127.5t-150 -152.5t-146 -174q-62 85 -145.5 174t-149.5 152.5t-126.5 127.5 t-94 124.5t-33.5 117.5z" />
+<glyph unicode="&#xe006;" d="M-72 800h479l146 400h2l146 -400h472l-382 -278l145 -449l-384 275l-382 -275l146 447zM168 71l2 1z" />
+<glyph unicode="&#xe007;" d="M-72 800h479l146 400h2l146 -400h472l-382 -278l145 -449l-384 275l-382 -275l146 447zM168 71l2 1zM237 700l196 -142l-73 -226l192 140l195 -141l-74 229l193 140h-235l-77 211l-78 -211h-239z" />
+<glyph unicode="&#xe008;" d="M0 0v143l400 257v100q-37 0 -68.5 74.5t-31.5 125.5v200q0 124 88 212t212 88t212 -88t88 -212v-200q0 -51 -31.5 -125.5t-68.5 -74.5v-100l400 -257v-143h-1200z" />
+<glyph unicode="&#xe009;" d="M0 0v1100h1200v-1100h-1200zM100 100h100v100h-100v-100zM100 300h100v100h-100v-100zM100 500h100v100h-100v-100zM100 700h100v100h-100v-100zM100 900h100v100h-100v-100zM300 100h600v400h-600v-400zM300 600h600v400h-600v-400zM1000 100h100v100h-100v-100z M1000 300h100v100h-100v-100zM1000 500h100v100h-100v-100zM1000 700h100v100h-100v-100zM1000 900h100v100h-100v-100z" />
+<glyph unicode="&#xe010;" d="M0 50v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400q0 -21 -14.5 -35.5t-35.5 -14.5h-400q-21 0 -35.5 14.5t-14.5 35.5zM0 650v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400q0 -21 -14.5 -35.5t-35.5 -14.5h-400 q-21 0 -35.5 14.5t-14.5 35.5zM600 50v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400q0 -21 -14.5 -35.5t-35.5 -14.5h-400q-21 0 -35.5 14.5t-14.5 35.5zM600 650v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400 q0 -21 -14.5 -35.5t-35.5 -14.5h-400q-21 0 -35.5 14.5t-14.5 35.5z" />
+<glyph unicode="&#xe011;" d="M0 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM0 450v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200 q-21 0 -35.5 14.5t-14.5 35.5zM0 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5 t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 450v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5 v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM800 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM800 450v200q0 21 14.5 35.5t35.5 14.5h200 q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM800 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5z" />
+<glyph unicode="&#xe012;" d="M0 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM0 450q0 -21 14.5 -35.5t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v200q0 21 -14.5 35.5t-35.5 14.5h-200q-21 0 -35.5 -14.5 t-14.5 -35.5v-200zM0 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 50v200q0 21 14.5 35.5t35.5 14.5h700q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5 t-35.5 -14.5h-700q-21 0 -35.5 14.5t-14.5 35.5zM400 450v200q0 21 14.5 35.5t35.5 14.5h700q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-700q-21 0 -35.5 14.5t-14.5 35.5zM400 850v200q0 21 14.5 35.5t35.5 14.5h700q21 0 35.5 -14.5t14.5 -35.5 v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-700q-21 0 -35.5 14.5t-14.5 35.5z" />
+<glyph unicode="&#xe013;" d="M29 454l419 -420l818 820l-212 212l-607 -607l-206 207z" />
+<glyph unicode="&#xe014;" d="M106 318l282 282l-282 282l212 212l282 -282l282 282l212 -212l-282 -282l282 -282l-212 -212l-282 282l-282 -282z" />
+<glyph unicode="&#xe015;" d="M23 693q0 200 142 342t342 142t342 -142t142 -342q0 -142 -78 -261l300 -300q7 -8 7 -18t-7 -18l-109 -109q-8 -7 -18 -7t-18 7l-300 300q-119 -78 -261 -78q-200 0 -342 142t-142 342zM176 693q0 -136 97 -233t234 -97t233.5 96.5t96.5 233.5t-96.5 233.5t-233.5 96.5 t-234 -97t-97 -233zM300 600v200h100v100h200v-100h100v-200h-100v-100h-200v100h-100z" />
+<glyph unicode="&#xe016;" d="M23 694q0 200 142 342t342 142t342 -142t142 -342q0 -141 -78 -262l300 -299q7 -7 7 -18t-7 -18l-109 -109q-8 -8 -18 -8t-18 8l-300 300q-119 -78 -261 -78q-200 0 -342 142t-142 342zM176 694q0 -136 97 -233t234 -97t233.5 97t96.5 233t-96.5 233t-233.5 97t-234 -97 t-97 -233zM300 601h400v200h-400v-200z" />
+<glyph unicode="&#xe017;" d="M23 600q0 183 105 331t272 210v-166q-103 -55 -165 -155t-62 -220q0 -177 125 -302t302 -125t302 125t125 302q0 120 -62 220t-165 155v166q167 -62 272 -210t105 -331q0 -118 -45.5 -224.5t-123 -184t-184 -123t-224.5 -45.5t-224.5 45.5t-184 123t-123 184t-45.5 224.5 zM500 750q0 -21 14.5 -35.5t35.5 -14.5h100q21 0 35.5 14.5t14.5 35.5v400q0 21 -14.5 35.5t-35.5 14.5h-100q-21 0 -35.5 -14.5t-14.5 -35.5v-400z" />
+<glyph unicode="&#xe018;" d="M100 1h200v300h-200v-300zM400 1v500h200v-500h-200zM700 1v800h200v-800h-200zM1000 1v1200h200v-1200h-200z" />
+<glyph unicode="&#xe019;" d="M26 601q0 -33 6 -74l151 -38l2 -6q14 -49 38 -93l3 -5l-80 -134q45 -59 105 -105l133 81l5 -3q45 -26 94 -39l5 -2l38 -151q40 -5 74 -5q27 0 74 5l38 151l6 2q46 13 93 39l5 3l134 -81q56 44 104 105l-80 134l3 5q24 44 39 93l1 6l152 38q5 40 5 74q0 28 -5 73l-152 38 l-1 6q-16 51 -39 93l-3 5l80 134q-44 58 -104 105l-134 -81l-5 3q-45 25 -93 39l-6 1l-38 152q-40 5 -74 5q-27 0 -74 -5l-38 -152l-5 -1q-50 -14 -94 -39l-5 -3l-133 81q-59 -47 -105 -105l80 -134l-3 -5q-25 -47 -38 -93l-2 -6l-151 -38q-6 -48 -6 -73zM385 601 q0 88 63 151t152 63t152 -63t63 -151q0 -89 -63 -152t-152 -63t-152 63t-63 152z" />
+<glyph unicode="&#xe020;" d="M100 1025v50q0 10 7.5 17.5t17.5 7.5h275v100q0 41 29.5 70.5t70.5 29.5h300q41 0 70.5 -29.5t29.5 -70.5v-100h275q10 0 17.5 -7.5t7.5 -17.5v-50q0 -11 -7 -18t-18 -7h-1050q-11 0 -18 7t-7 18zM200 100v800h900v-800q0 -41 -29.5 -71t-70.5 -30h-700q-41 0 -70.5 30 t-29.5 71zM300 100h100v700h-100v-700zM500 100h100v700h-100v-700zM500 1100h300v100h-300v-100zM700 100h100v700h-100v-700zM900 100h100v700h-100v-700z" />
+<glyph unicode="&#xe021;" d="M1 601l656 644l644 -644h-200v-600h-300v400h-300v-400h-300v600h-200z" />
+<glyph unicode="&#xe022;" d="M100 25v1150q0 11 7 18t18 7h475v-500h400v-675q0 -11 -7 -18t-18 -7h-850q-11 0 -18 7t-7 18zM700 800v300l300 -300h-300z" />
+<glyph unicode="&#xe023;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM500 500v400h100 v-300h200v-100h-300z" />
+<glyph unicode="&#xe024;" d="M-100 0l431 1200h209l-21 -300h162l-20 300h208l431 -1200h-538l-41 400h-242l-40 -400h-539zM488 500h224l-27 300h-170z" />
+<glyph unicode="&#xe025;" d="M0 0v400h490l-290 300h200v500h300v-500h200l-290 -300h490v-400h-1100zM813 200h175v100h-175v-100z" />
+<glyph unicode="&#xe026;" d="M1 600q0 122 47.5 233t127.5 191t191 127.5t233 47.5t233 -47.5t191 -127.5t127.5 -191t47.5 -233t-47.5 -233t-127.5 -191t-191 -127.5t-233 -47.5t-233 47.5t-191 127.5t-127.5 191t-47.5 233zM188 600q0 -170 121 -291t291 -121t291 121t121 291t-121 291t-291 121 t-291 -121t-121 -291zM350 600h150v300h200v-300h150l-250 -300z" />
+<glyph unicode="&#xe027;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM350 600l250 300 l250 -300h-150v-300h-200v300h-150z" />
+<glyph unicode="&#xe028;" d="M0 25v475l200 700h800l199 -700l1 -475q0 -11 -7 -18t-18 -7h-1150q-11 0 -18 7t-7 18zM200 500h200l50 -200h300l50 200h200l-97 500h-606z" />
+<glyph unicode="&#xe029;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -172 121.5 -293t292.5 -121t292.5 121t121.5 293q0 171 -121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM500 397v401 l297 -200z" />
+<glyph unicode="&#xe030;" d="M23 600q0 -118 45.5 -224.5t123 -184t184 -123t224.5 -45.5t224.5 45.5t184 123t123 184t45.5 224.5h-150q0 -177 -125 -302t-302 -125t-302 125t-125 302t125 302t302 125q136 0 246 -81l-146 -146h400v400l-145 -145q-157 122 -355 122q-118 0 -224.5 -45.5t-184 -123 t-123 -184t-45.5 -224.5z" />
+<glyph unicode="&#xe031;" d="M23 600q0 118 45.5 224.5t123 184t184 123t224.5 45.5q198 0 355 -122l145 145v-400h-400l147 147q-112 80 -247 80q-177 0 -302 -125t-125 -302h-150zM100 0v400h400l-147 -147q112 -80 247 -80q177 0 302 125t125 302h150q0 -118 -45.5 -224.5t-123 -184t-184 -123 t-224.5 -45.5q-198 0 -355 122z" />
+<glyph unicode="&#xe032;" d="M100 0h1100v1200h-1100v-1200zM200 100v900h900v-900h-900zM300 200v100h100v-100h-100zM300 400v100h100v-100h-100zM300 600v100h100v-100h-100zM300 800v100h100v-100h-100zM500 200h500v100h-500v-100zM500 400v100h500v-100h-500zM500 600v100h500v-100h-500z M500 800v100h500v-100h-500z" />
+<glyph unicode="&#xe033;" d="M0 100v600q0 41 29.5 70.5t70.5 29.5h100v200q0 82 59 141t141 59h300q82 0 141 -59t59 -141v-200h100q41 0 70.5 -29.5t29.5 -70.5v-600q0 -41 -29.5 -70.5t-70.5 -29.5h-900q-41 0 -70.5 29.5t-29.5 70.5zM400 800h300v150q0 21 -14.5 35.5t-35.5 14.5h-200 q-21 0 -35.5 -14.5t-14.5 -35.5v-150z" />
+<glyph unicode="&#xe034;" d="M100 0v1100h100v-1100h-100zM300 400q60 60 127.5 84t127.5 17.5t122 -23t119 -30t110 -11t103 42t91 120.5v500q-40 -81 -101.5 -115.5t-127.5 -29.5t-138 25t-139.5 40t-125.5 25t-103 -29.5t-65 -115.5v-500z" />
+<glyph unicode="&#xe035;" d="M0 275q0 -11 7 -18t18 -7h50q11 0 18 7t7 18v300q0 127 70.5 231.5t184.5 161.5t245 57t245 -57t184.5 -161.5t70.5 -231.5v-300q0 -11 7 -18t18 -7h50q11 0 18 7t7 18v300q0 116 -49.5 227t-131 192.5t-192.5 131t-227 49.5t-227 -49.5t-192.5 -131t-131 -192.5 t-49.5 -227v-300zM200 20v460q0 8 6 14t14 6h160q8 0 14 -6t6 -14v-460q0 -8 -6 -14t-14 -6h-160q-8 0 -14 6t-6 14zM800 20v460q0 8 6 14t14 6h160q8 0 14 -6t6 -14v-460q0 -8 -6 -14t-14 -6h-160q-8 0 -14 6t-6 14z" />
+<glyph unicode="&#xe036;" d="M0 400h300l300 -200v800l-300 -200h-300v-400zM688 459l141 141l-141 141l71 71l141 -141l141 141l71 -71l-141 -141l141 -141l-71 -71l-141 141l-141 -141z" />
+<glyph unicode="&#xe037;" d="M0 400h300l300 -200v800l-300 -200h-300v-400zM700 857l69 53q111 -135 111 -310q0 -169 -106 -302l-67 54q86 110 86 248q0 146 -93 257z" />
+<glyph unicode="&#xe038;" d="M0 401v400h300l300 200v-800l-300 200h-300zM702 858l69 53q111 -135 111 -310q0 -170 -106 -303l-67 55q86 110 86 248q0 145 -93 257zM889 951l7 -8q123 -151 123 -344q0 -189 -119 -339l-7 -8l81 -66l6 8q142 178 142 405q0 230 -144 408l-6 8z" />
+<glyph unicode="&#xe039;" d="M0 0h500v500h-200v100h-100v-100h-200v-500zM0 600h100v100h400v100h100v100h-100v300h-500v-600zM100 100v300h300v-300h-300zM100 800v300h300v-300h-300zM200 200v100h100v-100h-100zM200 900h100v100h-100v-100zM500 500v100h300v-300h200v-100h-100v-100h-200v100 h-100v100h100v200h-200zM600 0v100h100v-100h-100zM600 1000h100v-300h200v-300h300v200h-200v100h200v500h-600v-200zM800 800v300h300v-300h-300zM900 0v100h300v-100h-300zM900 900v100h100v-100h-100zM1100 200v100h100v-100h-100z" />
+<glyph unicode="&#xe040;" d="M0 200h100v1000h-100v-1000zM100 0v100h300v-100h-300zM200 200v1000h100v-1000h-100zM500 0v91h100v-91h-100zM500 200v1000h200v-1000h-200zM700 0v91h100v-91h-100zM800 200v1000h100v-1000h-100zM900 0v91h200v-91h-200zM1000 200v1000h200v-1000h-200z" />
+<glyph unicode="&#xe041;" d="M0 700l1 475q0 10 7.5 17.5t17.5 7.5h474l700 -700l-500 -500zM148 953q0 -42 29 -71q30 -30 71.5 -30t71.5 30q29 29 29 71t-29 71q-30 30 -71.5 30t-71.5 -30q-29 -29 -29 -71z" />
+<glyph unicode="&#xe042;" d="M1 700l1 475q0 11 7 18t18 7h474l700 -700l-500 -500zM148 953q0 -42 30 -71q29 -30 71 -30t71 30q30 29 30 71t-30 71q-29 30 -71 30t-71 -30q-30 -29 -30 -71zM701 1200h100l700 -700l-500 -500l-50 50l450 450z" />
+<glyph unicode="&#xe043;" d="M100 0v1025l175 175h925v-1000l-100 -100v1000h-750l-100 -100h750v-1000h-900z" />
+<glyph unicode="&#xe044;" d="M200 0l450 444l450 -443v1150q0 20 -14.5 35t-35.5 15h-800q-21 0 -35.5 -15t-14.5 -35v-1151z" />
+<glyph unicode="&#xe045;" d="M0 100v700h200l100 -200h600l100 200h200v-700h-200v200h-800v-200h-200zM253 829l40 -124h592l62 124l-94 346q-2 11 -10 18t-18 7h-450q-10 0 -18 -7t-10 -18zM281 24l38 152q2 10 11.5 17t19.5 7h500q10 0 19.5 -7t11.5 -17l38 -152q2 -10 -3.5 -17t-15.5 -7h-600 q-10 0 -15.5 7t-3.5 17z" />
+<glyph unicode="&#xe046;" d="M0 200q0 -41 29.5 -70.5t70.5 -29.5h1000q41 0 70.5 29.5t29.5 70.5v600q0 41 -29.5 70.5t-70.5 29.5h-150q-4 8 -11.5 21.5t-33 48t-53 61t-69 48t-83.5 21.5h-200q-41 0 -82 -20.5t-70 -50t-52 -59t-34 -50.5l-12 -20h-150q-41 0 -70.5 -29.5t-29.5 -70.5v-600z M356 500q0 100 72 172t172 72t172 -72t72 -172t-72 -172t-172 -72t-172 72t-72 172zM494 500q0 -44 31 -75t75 -31t75 31t31 75t-31 75t-75 31t-75 -31t-31 -75zM900 700v100h100v-100h-100z" />
+<glyph unicode="&#xe047;" d="M53 0h365v66q-41 0 -72 11t-49 38t1 71l92 234h391l82 -222q16 -45 -5.5 -88.5t-74.5 -43.5v-66h417v66q-34 1 -74 43q-18 19 -33 42t-21 37l-6 13l-385 998h-93l-399 -1006q-24 -48 -52 -75q-12 -12 -33 -25t-36 -20l-15 -7v-66zM416 521l178 457l46 -140l116 -317h-340 z" />
+<glyph unicode="&#xe048;" d="M100 0v89q41 7 70.5 32.5t29.5 65.5v827q0 28 -1 39.5t-5.5 26t-15.5 21t-29 14t-49 14.5v71l471 -1q120 0 213 -88t93 -228q0 -55 -11.5 -101.5t-28 -74t-33.5 -47.5t-28 -28l-12 -7q8 -3 21.5 -9t48 -31.5t60.5 -58t47.5 -91.5t21.5 -129q0 -84 -59 -156.5t-142 -111 t-162 -38.5h-500zM400 200h161q89 0 153 48.5t64 132.5q0 90 -62.5 154.5t-156.5 64.5h-159v-400zM400 700h139q76 0 130 61.5t54 138.5q0 82 -84 130.5t-239 48.5v-379z" />
+<glyph unicode="&#xe049;" d="M200 0v57q77 7 134.5 40.5t65.5 80.5l173 849q10 56 -10 74t-91 37q-6 1 -10.5 2.5t-9.5 2.5v57h425l2 -57q-33 -8 -62 -25.5t-46 -37t-29.5 -38t-17.5 -30.5l-5 -12l-128 -825q-10 -52 14 -82t95 -36v-57h-500z" />
+<glyph unicode="&#xe050;" d="M-75 200h75v800h-75l125 167l125 -167h-75v-800h75l-125 -167zM300 900v300h150h700h150v-300h-50q0 29 -8 48.5t-18.5 30t-33.5 15t-39.5 5.5t-50.5 1h-200v-850l100 -50v-100h-400v100l100 50v850h-200q-34 0 -50.5 -1t-40 -5.5t-33.5 -15t-18.5 -30t-8.5 -48.5h-49z " />
+<glyph unicode="&#xe051;" d="M33 51l167 125v-75h800v75l167 -125l-167 -125v75h-800v-75zM100 901v300h150h700h150v-300h-50q0 29 -8 48.5t-18 30t-33.5 15t-40 5.5t-50.5 1h-200v-650l100 -50v-100h-400v100l100 50v650h-200q-34 0 -50.5 -1t-39.5 -5.5t-33.5 -15t-18.5 -30t-8 -48.5h-50z" />
+<glyph unicode="&#xe052;" d="M0 50q0 -20 14.5 -35t35.5 -15h1100q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM0 350q0 -20 14.5 -35t35.5 -15h800q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-800q-21 0 -35.5 -14.5t-14.5 -35.5 v-100zM0 650q0 -20 14.5 -35t35.5 -15h1000q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1000q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM0 950q0 -20 14.5 -35t35.5 -15h600q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-600q-21 0 -35.5 -14.5 t-14.5 -35.5v-100z" />
+<glyph unicode="&#xe053;" d="M0 50q0 -20 14.5 -35t35.5 -15h1100q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM0 650q0 -20 14.5 -35t35.5 -15h1100q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5 v-100zM200 350q0 -20 14.5 -35t35.5 -15h700q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-700q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM200 950q0 -20 14.5 -35t35.5 -15h700q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-700q-21 0 -35.5 -14.5 t-14.5 -35.5v-100z" />
+<glyph unicode="&#xe054;" d="M0 50v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15t-14.5 35zM100 650v100q0 21 14.5 35.5t35.5 14.5h1000q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1000q-21 0 -35.5 15 t-14.5 35zM300 350v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800q-21 0 -35.5 15t-14.5 35zM500 950v100q0 21 14.5 35.5t35.5 14.5h600q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-600 q-21 0 -35.5 15t-14.5 35z" />
+<glyph unicode="&#xe055;" d="M0 50v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15t-14.5 35zM0 350v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15 t-14.5 35zM0 650v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15t-14.5 35zM0 950v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100 q-21 0 -35.5 15t-14.5 35z" />
+<glyph unicode="&#xe056;" d="M0 50v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15t-14.5 35zM0 350v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15 t-14.5 35zM0 650v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15t-14.5 35zM0 950v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15 t-14.5 35zM300 50v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800q-21 0 -35.5 15t-14.5 35zM300 350v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800 q-21 0 -35.5 15t-14.5 35zM300 650v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800q-21 0 -35.5 15t-14.5 35zM300 950v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15 h-800q-21 0 -35.5 15t-14.5 35z" />
+<glyph unicode="&#xe057;" d="M-101 500v100h201v75l166 -125l-166 -125v75h-201zM300 0h100v1100h-100v-1100zM500 50q0 -20 14.5 -35t35.5 -15h600q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-600q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM500 350q0 -20 14.5 -35t35.5 -15h300q20 0 35 15t15 35 v100q0 21 -15 35.5t-35 14.5h-300q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM500 650q0 -20 14.5 -35t35.5 -15h500q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-500q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM500 950q0 -20 14.5 -35t35.5 -15h100q20 0 35 15t15 35v100 q0 21 -15 35.5t-35 14.5h-100q-21 0 -35.5 -14.5t-14.5 -35.5v-100z" />
+<glyph unicode="&#xe058;" d="M1 50q0 -20 14.5 -35t35.5 -15h600q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-600q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM1 350q0 -20 14.5 -35t35.5 -15h300q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-300q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM1 650 q0 -20 14.5 -35t35.5 -15h500q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-500q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM1 950q0 -20 14.5 -35t35.5 -15h100q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-100q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM801 0v1100h100v-1100 h-100zM934 550l167 -125v75h200v100h-200v75z" />
+<glyph unicode="&#xe059;" d="M0 275v650q0 31 22 53t53 22h750q31 0 53 -22t22 -53v-650q0 -31 -22 -53t-53 -22h-750q-31 0 -53 22t-22 53zM900 600l300 300v-600z" />
+<glyph unicode="&#xe060;" d="M0 44v1012q0 18 13 31t31 13h1112q19 0 31.5 -13t12.5 -31v-1012q0 -18 -12.5 -31t-31.5 -13h-1112q-18 0 -31 13t-13 31zM100 263l247 182l298 -131l-74 156l293 318l236 -288v500h-1000v-737zM208 750q0 56 39 95t95 39t95 -39t39 -95t-39 -95t-95 -39t-95 39t-39 95z " />
+<glyph unicode="&#xe062;" d="M148 745q0 124 60.5 231.5t165 172t226.5 64.5q123 0 227 -63t164.5 -169.5t60.5 -229.5t-73 -272q-73 -114 -166.5 -237t-150.5 -189l-57 -66q-10 9 -27 26t-66.5 70.5t-96 109t-104 135.5t-100.5 155q-63 139 -63 262zM342 772q0 -107 75.5 -182.5t181.5 -75.5 q107 0 182.5 75.5t75.5 182.5t-75.5 182t-182.5 75t-182 -75.5t-75 -181.5z" />
+<glyph unicode="&#xe063;" d="M1 600q0 122 47.5 233t127.5 191t191 127.5t233 47.5t233 -47.5t191 -127.5t127.5 -191t47.5 -233t-47.5 -233t-127.5 -191t-191 -127.5t-233 -47.5t-233 47.5t-191 127.5t-127.5 191t-47.5 233zM173 600q0 -177 125.5 -302t301.5 -125v854q-176 0 -301.5 -125 t-125.5 -302z" />
+<glyph unicode="&#xe064;" d="M117 406q0 94 34 186t88.5 172.5t112 159t115 177t87.5 194.5q21 -71 57.5 -142.5t76 -130.5t83 -118.5t82 -117t70 -116t50 -125.5t18.5 -136q0 -89 -39 -165.5t-102 -126.5t-140 -79.5t-156 -33.5q-114 6 -211.5 53t-161.5 139t-64 210zM243 414q14 -82 59.5 -136 t136.5 -80l16 98q-7 6 -18 17t-34 48t-33 77q-15 73 -14 143.5t10 122.5l9 51q-92 -110 -119.5 -185t-12.5 -156z" />
+<glyph unicode="&#xe065;" d="M0 400v300q0 165 117.5 282.5t282.5 117.5q366 -6 397 -14l-186 -186h-311q-41 0 -70.5 -29.5t-29.5 -70.5v-500q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v125l200 200v-225q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5 t-117.5 282.5zM436 341l161 50l412 412l-114 113l-405 -405zM995 1015l113 -113l113 113l-21 85l-92 28z" />
+<glyph unicode="&#xe066;" d="M0 400v300q0 165 117.5 282.5t282.5 117.5h261l2 -80q-133 -32 -218 -120h-145q-41 0 -70.5 -29.5t-29.5 -70.5v-500q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5l200 153v-53q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5t-117.5 282.5 zM423 524q30 38 81.5 64t103 35.5t99 14t77.5 3.5l29 -1v-209l360 324l-359 318v-216q-7 0 -19 -1t-48 -8t-69.5 -18.5t-76.5 -37t-76.5 -59t-62 -88t-39.5 -121.5z" />
+<glyph unicode="&#xe067;" d="M0 400v300q0 165 117.5 282.5t282.5 117.5h300q61 0 127 -23l-178 -177h-349q-41 0 -70.5 -29.5t-29.5 -70.5v-500q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v69l200 200v-169q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5 t-117.5 282.5zM342 632l283 -284l567 567l-137 137l-430 -431l-146 147z" />
+<glyph unicode="&#xe068;" d="M0 603l300 296v-198h200v200h-200l300 300l295 -300h-195v-200h200v198l300 -296l-300 -300v198h-200v-200h195l-295 -300l-300 300h200v200h-200v-198z" />
+<glyph unicode="&#xe069;" d="M200 50v1000q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-437l500 487v-1100l-500 488v-438q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5z" />
+<glyph unicode="&#xe070;" d="M0 50v1000q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-437l500 487v-487l500 487v-1100l-500 488v-488l-500 488v-438q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5z" />
+<glyph unicode="&#xe071;" d="M136 550l564 550v-487l500 487v-1100l-500 488v-488z" />
+<glyph unicode="&#xe072;" d="M200 0l900 550l-900 550v-1100z" />
+<glyph unicode="&#xe073;" d="M200 150q0 -21 14.5 -35.5t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v800q0 21 -14.5 35.5t-35.5 14.5h-200q-21 0 -35.5 -14.5t-14.5 -35.5v-800zM600 150q0 -21 14.5 -35.5t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v800q0 21 -14.5 35.5t-35.5 14.5h-200 q-21 0 -35.5 -14.5t-14.5 -35.5v-800z" />
+<glyph unicode="&#xe074;" d="M200 150q0 -20 14.5 -35t35.5 -15h800q21 0 35.5 15t14.5 35v800q0 21 -14.5 35.5t-35.5 14.5h-800q-21 0 -35.5 -14.5t-14.5 -35.5v-800z" />
+<glyph unicode="&#xe075;" d="M0 0v1100l500 -487v487l564 -550l-564 -550v488z" />
+<glyph unicode="&#xe076;" d="M0 0v1100l500 -487v487l500 -487v437q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-1000q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5v438l-500 -488v488z" />
+<glyph unicode="&#xe077;" d="M300 0v1100l500 -487v437q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-1000q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5v438z" />
+<glyph unicode="&#xe078;" d="M100 250v100q0 21 14.5 35.5t35.5 14.5h1000q21 0 35.5 -14.5t14.5 -35.5v-100q0 -21 -14.5 -35.5t-35.5 -14.5h-1000q-21 0 -35.5 14.5t-14.5 35.5zM100 500h1100l-550 564z" />
+<glyph unicode="&#xe079;" d="M185 599l592 -592l240 240l-353 353l353 353l-240 240z" />
+<glyph unicode="&#xe080;" d="M272 194l353 353l-353 353l241 240l572 -571l21 -22l-1 -1v-1l-592 -591z" />
+<glyph unicode="&#xe081;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -299.5t-217.5 -217.5t-299.5 -80t-299.5 80t-217.5 217.5t-80 299.5zM300 500h200v-200h200v200h200v200h-200v200h-200v-200h-200v-200z" />
+<glyph unicode="&#xe082;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -299.5t-217.5 -217.5t-299.5 -80t-299.5 80t-217.5 217.5t-80 299.5zM300 500h600v200h-600v-200z" />
+<glyph unicode="&#xe083;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -299.5t-217.5 -217.5t-299.5 -80t-299.5 80t-217.5 217.5t-80 299.5zM246 459l213 -213l141 142l141 -142l213 213l-142 141l142 141l-213 212l-141 -141l-141 142l-212 -213l141 -141 z" />
+<glyph unicode="&#xe084;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -299.5t-217.5 -217.5t-299.5 -80t-299.5 80t-217.5 217.5t-80 299.5zM270 551l276 -277l411 411l-175 174l-236 -236l-102 102z" />
+<glyph unicode="&#xe085;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -299.5t-217.5 -217.5t-299.5 -80t-299.5 80t-217.5 217.5t-80 299.5zM364 700h143q4 0 11.5 -1t11 -1t6.5 3t3 9t1 11t3.5 8.5t3.5 6t5.5 4t6.5 2.5t9 1.5t9 0.5h11.5h12.5 q19 0 30 -10t11 -26q0 -22 -4 -28t-27 -22q-5 -1 -12.5 -3t-27 -13.5t-34 -27t-26.5 -46t-11 -68.5h200q5 3 14 8t31.5 25.5t39.5 45.5t31 69t14 94q0 51 -17.5 89t-42 58t-58.5 32t-58.5 15t-51.5 3q-50 0 -90.5 -12t-75 -38.5t-53.5 -74.5t-19 -114zM500 300h200v100h-200 v-100z" />
+<glyph unicode="&#xe086;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -299.5t-217.5 -217.5t-299.5 -80t-299.5 80t-217.5 217.5t-80 299.5zM400 300h400v100h-100v300h-300v-100h100v-200h-100v-100zM500 800h200v100h-200v-100z" />
+<glyph unicode="&#xe087;" d="M0 500v200h195q31 125 98.5 199.5t206.5 100.5v200h200v-200q54 -20 113 -60t112.5 -105.5t71.5 -134.5h203v-200h-203q-25 -102 -116.5 -186t-180.5 -117v-197h-200v197q-140 27 -208 102.5t-98 200.5h-194zM290 500q24 -73 79.5 -127.5t130.5 -78.5v206h200v-206 q149 48 201 206h-201v200h200q-25 74 -75.5 127t-124.5 77v-204h-200v203q-75 -23 -130 -77t-79 -126h209v-200h-210z" />
+<glyph unicode="&#xe088;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM356 465l135 135 l-135 135l109 109l135 -135l135 135l109 -109l-135 -135l135 -135l-109 -109l-135 135l-135 -135z" />
+<glyph unicode="&#xe089;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM322 537l141 141 l87 -87l204 205l142 -142l-346 -345z" />
+<glyph unicode="&#xe090;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -115 62 -215l568 567q-100 62 -216 62q-171 0 -292.5 -121.5t-121.5 -292.5zM391 245q97 -59 209 -59q171 0 292.5 121.5t121.5 292.5 q0 112 -59 209z" />
+<glyph unicode="&#xe091;" d="M0 547l600 453v-300h600v-300h-600v-301z" />
+<glyph unicode="&#xe092;" d="M0 400v300h600v300l600 -453l-600 -448v301h-600z" />
+<glyph unicode="&#xe093;" d="M204 600l450 600l444 -600h-298v-600h-300v600h-296z" />
+<glyph unicode="&#xe094;" d="M104 600h296v600h300v-600h298l-449 -600z" />
+<glyph unicode="&#xe095;" d="M0 200q6 132 41 238.5t103.5 193t184 138t271.5 59.5v271l600 -453l-600 -448v301q-95 -2 -183 -20t-170 -52t-147 -92.5t-100 -135.5z" />
+<glyph unicode="&#xe096;" d="M0 0v400l129 -129l294 294l142 -142l-294 -294l129 -129h-400zM635 777l142 -142l294 294l129 -129v400h-400l129 -129z" />
+<glyph unicode="&#xe097;" d="M34 176l295 295l-129 129h400v-400l-129 130l-295 -295zM600 600v400l129 -129l295 295l142 -141l-295 -295l129 -130h-400z" />
+<glyph unicode="&#xe101;" d="M23 600q0 118 45.5 224.5t123 184t184 123t224.5 45.5t224.5 -45.5t184 -123t123 -184t45.5 -224.5t-45.5 -224.5t-123 -184t-184 -123t-224.5 -45.5t-224.5 45.5t-184 123t-123 184t-45.5 224.5zM456 851l58 -302q4 -20 21.5 -34.5t37.5 -14.5h54q20 0 37.5 14.5 t21.5 34.5l58 302q4 20 -8 34.5t-32 14.5h-207q-21 0 -33 -14.5t-8 -34.5zM500 300h200v100h-200v-100z" />
+<glyph unicode="&#xe102;" d="M0 800h100v-200h400v300h200v-300h400v200h100v100h-111q1 1 1 6.5t-1.5 15t-3.5 17.5l-34 172q-11 39 -41.5 63t-69.5 24q-32 0 -61 -17l-239 -144q-22 -13 -40 -35q-19 24 -40 36l-238 144q-33 18 -62 18q-39 0 -69.5 -23t-40.5 -61l-35 -177q-2 -8 -3 -18t-1 -15v-6 h-111v-100zM100 0h400v400h-400v-400zM200 900q-3 0 14 48t36 96l18 47l213 -191h-281zM700 0v400h400v-400h-400zM731 900l202 197q5 -12 12 -32.5t23 -64t25 -72t7 -28.5h-269z" />
+<glyph unicode="&#xe103;" d="M0 -22v143l216 193q-9 53 -13 83t-5.5 94t9 113t38.5 114t74 124q47 60 99.5 102.5t103 68t127.5 48t145.5 37.5t184.5 43.5t220 58.5q0 -189 -22 -343t-59 -258t-89 -181.5t-108.5 -120t-122 -68t-125.5 -30t-121.5 -1.5t-107.5 12.5t-87.5 17t-56.5 7.5l-99 -55z M238.5 300.5q19.5 -6.5 86.5 76.5q55 66 367 234q70 38 118.5 69.5t102 79t99 111.5t86.5 148q22 50 24 60t-6 19q-7 5 -17 5t-26.5 -14.5t-33.5 -39.5q-35 -51 -113.5 -108.5t-139.5 -89.5l-61 -32q-369 -197 -458 -401q-48 -111 -28.5 -117.5z" />
+<glyph unicode="&#xe104;" d="M111 408q0 -33 5 -63q9 -56 44 -119.5t105 -108.5q31 -21 64 -16t62 23.5t57 49.5t48 61.5t35 60.5q32 66 39 184.5t-13 157.5q79 -80 122 -164t26 -184q-5 -33 -20.5 -69.5t-37.5 -80.5q-10 -19 -14.5 -29t-12 -26t-9 -23.5t-3 -19t2.5 -15.5t11 -9.5t19.5 -5t30.5 2.5 t42 8q57 20 91 34t87.5 44.5t87 64t65.5 88.5t47 122q38 172 -44.5 341.5t-246.5 278.5q22 -44 43 -129q39 -159 -32 -154q-15 2 -33 9q-79 33 -120.5 100t-44 175.5t48.5 257.5q-13 -8 -34 -23.5t-72.5 -66.5t-88.5 -105.5t-60 -138t-8 -166.5q2 -12 8 -41.5t8 -43t6 -39.5 t3.5 -39.5t-1 -33.5t-6 -31.5t-13.5 -24t-21 -20.5t-31 -12q-38 -10 -67 13t-40.5 61.5t-15 81.5t10.5 75q-52 -46 -83.5 -101t-39 -107t-7.5 -85z" />
+<glyph unicode="&#xe105;" d="M-61 600l26 40q6 10 20 30t49 63.5t74.5 85.5t97 90t116.5 83.5t132.5 59t145.5 23.5t145.5 -23.5t132.5 -59t116.5 -83.5t97 -90t74.5 -85.5t49 -63.5t20 -30l26 -40l-26 -40q-6 -10 -20 -30t-49 -63.5t-74.5 -85.5t-97 -90t-116.5 -83.5t-132.5 -59t-145.5 -23.5 t-145.5 23.5t-132.5 59t-116.5 83.5t-97 90t-74.5 85.5t-49 63.5t-20 30zM120 600q7 -10 40.5 -58t56 -78.5t68 -77.5t87.5 -75t103 -49.5t125 -21.5t123.5 20t100.5 45.5t85.5 71.5t66.5 75.5t58 81.5t47 66q-1 1 -28.5 37.5t-42 55t-43.5 53t-57.5 63.5t-58.5 54 q49 -74 49 -163q0 -124 -88 -212t-212 -88t-212 88t-88 212q0 85 46 158q-102 -87 -226 -258zM377 656q49 -124 154 -191l105 105q-37 24 -75 72t-57 84l-20 36z" />
+<glyph unicode="&#xe106;" d="M-61 600l26 40q6 10 20 30t49 63.5t74.5 85.5t97 90t116.5 83.5t132.5 59t145.5 23.5q61 0 121 -17l37 142h148l-314 -1200h-148l37 143q-82 21 -165 71.5t-140 102t-109.5 112t-72 88.5t-29.5 43zM120 600q210 -282 393 -336l37 141q-107 18 -178.5 101.5t-71.5 193.5 q0 85 46 158q-102 -87 -226 -258zM377 656q49 -124 154 -191l47 47l23 87q-30 28 -59 69t-44 68l-14 26zM780 161l38 145q22 15 44.5 34t46 44t40.5 44t41 50.5t33.5 43.5t33 44t24.5 34q-97 127 -140 175l39 146q67 -54 131.5 -125.5t87.5 -103.5t36 -52l26 -40l-26 -40 q-7 -12 -25.5 -38t-63.5 -79.5t-95.5 -102.5t-124 -100t-146.5 -79z" />
+<glyph unicode="&#xe107;" d="M-97.5 34q13.5 -34 50.5 -34h1294q37 0 50.5 35.5t-7.5 67.5l-642 1056q-20 34 -48 36.5t-48 -29.5l-642 -1066q-21 -32 -7.5 -66zM155 200l445 723l445 -723h-345v100h-200v-100h-345zM500 600l100 -300l100 300v100h-200v-100z" />
+<glyph unicode="&#xe108;" d="M100 262v41q0 20 11 44.5t26 38.5l363 325v339q0 62 44 106t106 44t106 -44t44 -106v-339l363 -325q15 -14 26 -38.5t11 -44.5v-41q0 -20 -12 -26.5t-29 5.5l-359 249v-263q100 -91 100 -113v-64q0 -20 -13 -28.5t-32 0.5l-94 78h-222l-94 -78q-19 -9 -32 -0.5t-13 28.5 v64q0 22 100 113v263l-359 -249q-17 -12 -29 -5.5t-12 26.5z" />
+<glyph unicode="&#xe109;" d="M0 50q0 -20 14.5 -35t35.5 -15h1000q21 0 35.5 15t14.5 35v750h-1100v-750zM0 900h1100v150q0 21 -14.5 35.5t-35.5 14.5h-150v100h-100v-100h-500v100h-100v-100h-150q-21 0 -35.5 -14.5t-14.5 -35.5v-150zM100 100v100h100v-100h-100zM100 300v100h100v-100h-100z M100 500v100h100v-100h-100zM300 100v100h100v-100h-100zM300 300v100h100v-100h-100zM300 500v100h100v-100h-100zM500 100v100h100v-100h-100zM500 300v100h100v-100h-100zM500 500v100h100v-100h-100zM700 100v100h100v-100h-100zM700 300v100h100v-100h-100zM700 500 v100h100v-100h-100zM900 100v100h100v-100h-100zM900 300v100h100v-100h-100zM900 500v100h100v-100h-100z" />
+<glyph unicode="&#xe110;" d="M0 200v200h259l600 600h241v198l300 -295l-300 -300v197h-159l-600 -600h-341zM0 800h259l122 -122l141 142l-181 180h-341v-200zM678 381l141 142l122 -123h159v198l300 -295l-300 -300v197h-241z" />
+<glyph unicode="&#xe111;" d="M0 400v600q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-600q0 -41 -29.5 -70.5t-70.5 -29.5h-596l-304 -300v300h-100q-41 0 -70.5 29.5t-29.5 70.5z" />
+<glyph unicode="&#xe112;" d="M100 600v200h300v-250q0 -113 6 -145q17 -92 102 -117q39 -11 92 -11q37 0 66.5 5.5t50 15.5t36 24t24 31.5t14 37.5t7 42t2.5 45t0 47v25v250h300v-200q0 -42 -3 -83t-15 -104t-31.5 -116t-58 -109.5t-89 -96.5t-129 -65.5t-174.5 -25.5t-174.5 25.5t-129 65.5t-89 96.5 t-58 109.5t-31.5 116t-15 104t-3 83zM100 900v300h300v-300h-300zM800 900v300h300v-300h-300z" />
+<glyph unicode="&#xe113;" d="M-30 411l227 -227l352 353l353 -353l226 227l-578 579z" />
+<glyph unicode="&#xe114;" d="M70 797l580 -579l578 579l-226 227l-353 -353l-352 353z" />
+<glyph unicode="&#xe115;" d="M-198 700l299 283l300 -283h-203v-400h385l215 -200h-800v600h-196zM402 1000l215 -200h381v-400h-198l299 -283l299 283h-200v600h-796z" />
+<glyph unicode="&#xe116;" d="M18 939q-5 24 10 42q14 19 39 19h896l38 162q5 17 18.5 27.5t30.5 10.5h94q20 0 35 -14.5t15 -35.5t-15 -35.5t-35 -14.5h-54l-201 -961q-2 -4 -6 -10.5t-19 -17.5t-33 -11h-31v-50q0 -20 -14.5 -35t-35.5 -15t-35.5 15t-14.5 35v50h-300v-50q0 -20 -14.5 -35t-35.5 -15 t-35.5 15t-14.5 35v50h-50q-21 0 -35.5 15t-14.5 35q0 21 14.5 35.5t35.5 14.5h535l48 200h-633q-32 0 -54.5 21t-27.5 43z" />
+<glyph unicode="&#xe117;" d="M0 0v800h1200v-800h-1200zM0 900v100h200q0 41 29.5 70.5t70.5 29.5h300q41 0 70.5 -29.5t29.5 -70.5h500v-100h-1200z" />
+<glyph unicode="&#xe118;" d="M1 0l300 700h1200l-300 -700h-1200zM1 400v600h200q0 41 29.5 70.5t70.5 29.5h300q41 0 70.5 -29.5t29.5 -70.5h500v-200h-1000z" />
+<glyph unicode="&#xe119;" d="M302 300h198v600h-198l298 300l298 -300h-198v-600h198l-298 -300z" />
+<glyph unicode="&#xe120;" d="M0 600l300 298v-198h600v198l300 -298l-300 -297v197h-600v-197z" />
+<glyph unicode="&#xe121;" d="M0 100v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM31 400l172 739q5 22 23 41.5t38 19.5h672q19 0 37.5 -22.5t23.5 -45.5l172 -732h-1138zM800 100h100v100h-100v-100z M1000 100h100v100h-100v-100z" />
+<glyph unicode="&#xe122;" d="M-101 600v50q0 24 25 49t50 38l25 13v-250l-11 5.5t-24 14t-30 21.5t-24 27.5t-11 31.5zM100 500v250v8v8v7t0.5 7t1.5 5.5t2 5t3 4t4.5 3.5t6 1.5t7.5 0.5h200l675 250v-850l-675 200h-38l47 -276q2 -12 -3 -17.5t-11 -6t-21 -0.5h-8h-83q-20 0 -34.5 14t-18.5 35 q-55 337 -55 351zM1100 200v850q0 21 14.5 35.5t35.5 14.5q20 0 35 -14.5t15 -35.5v-850q0 -20 -15 -35t-35 -15q-21 0 -35.5 15t-14.5 35z" />
+<glyph unicode="&#xe123;" d="M74 350q0 21 13.5 35.5t33.5 14.5h18l117 173l63 327q15 77 76 140t144 83l-18 32q-6 19 3 32t29 13h94q20 0 29 -10.5t3 -29.5q-18 -36 -18 -37q83 -19 144 -82.5t76 -140.5l63 -327l118 -173h17q20 0 33.5 -14.5t13.5 -35.5q0 -20 -13 -40t-31 -27q-8 -3 -23 -8.5 t-65 -20t-103 -25t-132.5 -19.5t-158.5 -9q-125 0 -245.5 20.5t-178.5 40.5l-58 20q-18 7 -31 27.5t-13 40.5zM497 110q12 -49 40 -79.5t63 -30.5t63 30.5t39 79.5q-48 -6 -102 -6t-103 6z" />
+<glyph unicode="&#xe124;" d="M21 445l233 -45l-78 -224l224 78l45 -233l155 179l155 -179l45 233l224 -78l-78 224l234 45l-180 155l180 156l-234 44l78 225l-224 -78l-45 233l-155 -180l-155 180l-45 -233l-224 78l78 -225l-233 -44l179 -156z" />
+<glyph unicode="&#xe125;" d="M0 200h200v600h-200v-600zM300 275q0 -75 100 -75h61q124 -100 139 -100h250q46 0 83 57l238 344q29 31 29 74v100q0 44 -30.5 84.5t-69.5 40.5h-328q28 118 28 125v150q0 44 -30.5 84.5t-69.5 40.5h-50q-27 0 -51 -20t-38 -48l-96 -198l-145 -196q-20 -26 -20 -63v-400z M400 300v375l150 213l100 212h50v-175l-50 -225h450v-125l-250 -375h-214l-136 100h-100z" />
+<glyph unicode="&#xe126;" d="M0 400v600h200v-600h-200zM300 525v400q0 75 100 75h61q124 100 139 100h250q46 0 83 -57l238 -344q29 -31 29 -74v-100q0 -44 -30.5 -84.5t-69.5 -40.5h-328q28 -118 28 -125v-150q0 -44 -30.5 -84.5t-69.5 -40.5h-50q-27 0 -51 20t-38 48l-96 198l-145 196 q-20 26 -20 63zM400 525l150 -212l100 -213h50v175l-50 225h450v125l-250 375h-214l-136 -100h-100v-375z" />
+<glyph unicode="&#xe127;" d="M8 200v600h200v-600h-200zM308 275v525q0 17 14 35.5t28 28.5l14 9l362 230q14 6 25 6q17 0 29 -12l109 -112q14 -14 14 -34q0 -18 -11 -32l-85 -121h302q85 0 138.5 -38t53.5 -110t-54.5 -111t-138.5 -39h-107l-130 -339q-7 -22 -20.5 -41.5t-28.5 -19.5h-341 q-7 0 -90 81t-83 94zM408 289l100 -89h293l131 339q6 21 19.5 41t28.5 20h203q16 0 25 15t9 36q0 20 -9 34.5t-25 14.5h-457h-6.5h-7.5t-6.5 0.5t-6 1t-5 1.5t-5.5 2.5t-4 4t-4 5.5q-5 12 -5 20q0 14 10 27l147 183l-86 83l-339 -236v-503z" />
+<glyph unicode="&#xe128;" d="M-101 651q0 72 54 110t139 38l302 -1l-85 121q-11 16 -11 32q0 21 14 34l109 113q13 12 29 12q11 0 25 -6l365 -230q7 -4 17 -10.5t26.5 -26t16.5 -36.5v-526q0 -13 -86 -93.5t-94 -80.5h-341q-16 0 -29.5 20t-19.5 41l-130 339h-107q-84 0 -139 39t-55 111zM-1 601h222 q15 0 28.5 -20.5t19.5 -40.5l131 -339h293l107 89v502l-343 237l-87 -83l145 -184q10 -11 10 -26q0 -11 -5 -20q-1 -3 -3.5 -5.5l-4 -4t-5 -2.5t-5.5 -1.5t-6.5 -1t-6.5 -0.5h-7.5h-6.5h-476v-100zM1000 201v600h200v-600h-200z" />
+<glyph unicode="&#xe129;" d="M97 719l230 -363q4 -6 10.5 -15.5t26 -25t36.5 -15.5h525q13 0 94 83t81 90v342q0 15 -20 28.5t-41 19.5l-339 131v106q0 84 -39 139t-111 55t-110 -53.5t-38 -138.5v-302l-121 84q-15 12 -33.5 11.5t-32.5 -13.5l-112 -110q-22 -22 -6 -53zM172 739l83 86l183 -146 q22 -18 47 -5q3 1 5.5 3.5l4 4t2.5 5t1.5 5.5t1 6.5t0.5 6.5v7.5v6.5v456q0 22 25 31t50 -0.5t25 -30.5v-202q0 -16 20 -29.5t41 -19.5l339 -130v-294l-89 -100h-503zM400 0v200h600v-200h-600z" />
+<glyph unicode="&#xe130;" d="M2 585q-16 -31 6 -53l112 -110q13 -13 32 -13.5t34 10.5l121 85q0 -51 -0.5 -153.5t-0.5 -148.5q0 -84 38.5 -138t110.5 -54t111 55t39 139v106l339 131q20 6 40.5 19.5t20.5 28.5v342q0 7 -81 90t-94 83h-525q-17 0 -35.5 -14t-28.5 -28l-10 -15zM77 565l236 339h503 l89 -100v-294l-340 -130q-20 -6 -40 -20t-20 -29v-202q0 -22 -25 -31t-50 0t-25 31v456v14.5t-1.5 11.5t-5 12t-9.5 7q-24 13 -46 -5l-184 -146zM305 1104v200h600v-200h-600z" />
+<glyph unicode="&#xe131;" d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q162 0 299.5 -80t217.5 -218t80 -300t-80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM298 701l2 -201h300l-2 -194l402 294l-402 298v-197h-300z" />
+<glyph unicode="&#xe132;" d="M0 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t231.5 47.5q122 0 232.5 -47.5t190.5 -127.5t127.5 -190.5t47.5 -232.5q0 -162 -80 -299.5t-218 -217.5t-300 -80t-299.5 80t-217.5 217.5t-80 299.5zM200 600l402 -294l-2 194h300l2 201h-300v197z" />
+<glyph unicode="&#xe133;" d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q162 0 299.5 -80t217.5 -218t80 -300t-80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM300 600h200v-300h200v300h200l-300 400z" />
+<glyph unicode="&#xe134;" d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q162 0 299.5 -80t217.5 -218t80 -300t-80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM300 600l300 -400l300 400h-200v300h-200v-300h-200z" />
+<glyph unicode="&#xe135;" d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q121 0 231.5 -47.5t190.5 -127.5t127.5 -190.5t47.5 -232.5q0 -162 -80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM254 780q-8 -33 5.5 -92.5t7.5 -87.5q0 -9 17 -44t16 -60 q12 0 23 -5.5t23 -15t20 -13.5q24 -12 108 -42q22 -8 53 -31.5t59.5 -38.5t57.5 -11q8 -18 -15 -55t-20 -57q42 -71 87 -80q0 -6 -3 -15.5t-3.5 -14.5t4.5 -17q104 -3 221 112q30 29 47 47t34.5 49t20.5 62q-14 9 -37 9.5t-36 7.5q-14 7 -49 15t-52 19q-9 0 -39.5 -0.5 t-46.5 -1.5t-39 -6.5t-39 -16.5q-50 -35 -66 -12q-4 2 -3.5 25.5t0.5 25.5q-6 13 -26.5 17t-24.5 7q2 22 -2 41t-16.5 28t-38.5 -20q-23 -25 -42 4q-19 28 -8 58q6 16 22 22q6 -1 26 -1.5t33.5 -4t19.5 -13.5q12 -19 32 -37.5t34 -27.5l14 -8q0 3 9.5 39.5t5.5 57.5 q-4 23 14.5 44.5t22.5 31.5q5 14 10 35t8.5 31t15.5 22.5t34 21.5q-6 18 10 37q8 0 23.5 -1.5t24.5 -1.5t20.5 4.5t20.5 15.5q-10 23 -30.5 42.5t-38 30t-49 26.5t-43.5 23q11 39 2 44q31 -13 58 -14.5t39 3.5l11 4q7 36 -16.5 53.5t-64.5 28.5t-56 23q-19 -3 -37 0 q-15 -12 -36.5 -21t-34.5 -12t-44 -8t-39 -6q-15 -3 -45.5 0.5t-45.5 -2.5q-21 -7 -52 -26.5t-34 -34.5q-3 -11 6.5 -22.5t8.5 -18.5q-3 -34 -27.5 -90.5t-29.5 -79.5zM518 916q3 12 16 30t16 25q10 -10 18.5 -10t14 6t14.5 14.5t16 12.5q0 -24 17 -66.5t17 -43.5 q-9 2 -31 5t-36 5t-32 8t-30 14zM692 1003h1h-1z" />
+<glyph unicode="&#xe136;" d="M0 164.5q0 21.5 15 37.5l600 599q-33 101 6 201.5t135 154.5q164 92 306 -9l-259 -138l145 -232l251 126q13 -175 -151 -267q-123 -70 -253 -23l-596 -596q-15 -16 -36.5 -16t-36.5 16l-111 110q-15 15 -15 36.5z" />
+<glyph unicode="&#xe137;" horiz-adv-x="1220" d="M0 196v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM0 596v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000 q-41 0 -70.5 29.5t-29.5 70.5zM0 996v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM600 596h500v100h-500v-100zM800 196h300v100h-300v-100zM900 996h200v100h-200v-100z" />
+<glyph unicode="&#xe138;" d="M100 1100v100h1000v-100h-1000zM150 1000h900l-350 -500v-300l-200 -200v500z" />
+<glyph unicode="&#xe139;" d="M0 200v200h1200v-200q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM0 500v400q0 41 29.5 70.5t70.5 29.5h300v100q0 41 29.5 70.5t70.5 29.5h200q41 0 70.5 -29.5t29.5 -70.5v-100h300q41 0 70.5 -29.5t29.5 -70.5v-400h-500v100h-200v-100h-500z M500 1000h200v100h-200v-100z" />
+<glyph unicode="&#xe140;" d="M0 0v400l129 -129l200 200l142 -142l-200 -200l129 -129h-400zM0 800l129 129l200 -200l142 142l-200 200l129 129h-400v-400zM729 329l142 142l200 -200l129 129v-400h-400l129 129zM729 871l200 200l-129 129h400v-400l-129 129l-200 -200z" />
+<glyph unicode="&#xe141;" d="M0 596q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM182 596q0 -172 121.5 -293t292.5 -121t292.5 121t121.5 293q0 171 -121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM291 655 q0 23 15.5 38.5t38.5 15.5t39 -16t16 -38q0 -23 -16 -39t-39 -16q-22 0 -38 16t-16 39zM400 850q0 22 16 38.5t39 16.5q22 0 38 -16t16 -39t-16 -39t-38 -16q-23 0 -39 16.5t-16 38.5zM514 609q0 32 20.5 56.5t51.5 29.5l122 126l1 1q-9 14 -9 28q0 22 16 38.5t39 16.5 q22 0 38 -16t16 -39t-16 -39t-38 -16q-14 0 -29 10l-55 -145q17 -22 17 -51q0 -36 -25.5 -61.5t-61.5 -25.5t-61.5 25.5t-25.5 61.5zM800 655q0 22 16 38t39 16t38.5 -15.5t15.5 -38.5t-16 -39t-38 -16q-23 0 -39 16t-16 39z" />
+<glyph unicode="&#xe142;" d="M-40 375q-13 -95 35 -173q35 -57 94 -89t129 -32q63 0 119 28q33 16 65 40.5t52.5 45.5t59.5 64q40 44 57 61l394 394q35 35 47 84t-3 96q-27 87 -117 104q-20 2 -29 2q-46 0 -78.5 -16.5t-67.5 -51.5l-389 -396l-7 -7l69 -67l377 373q20 22 39 38q23 23 50 23 q38 0 53 -36q16 -39 -20 -75l-547 -547q-52 -52 -125 -52q-55 0 -100 33t-54 96q-5 35 2.5 66t31.5 63t42 50t56 54q24 21 44 41l348 348q52 52 82.5 79.5t84 54t107.5 26.5q25 0 48 -4q95 -17 154 -94.5t51 -175.5q-7 -101 -98 -192l-252 -249l-253 -256l7 -7l69 -60 l517 511q67 67 95 157t11 183q-16 87 -67 154t-130 103q-69 33 -152 33q-107 0 -197 -55q-40 -24 -111 -95l-512 -512q-68 -68 -81 -163z" />
+<glyph unicode="&#xe143;" d="M80 784q0 131 98.5 229.5t230.5 98.5q143 0 241 -129q103 129 246 129q129 0 226 -98.5t97 -229.5q0 -46 -17.5 -91t-61 -99t-77 -89.5t-104.5 -105.5q-197 -191 -293 -322l-17 -23l-16 23q-43 58 -100 122.5t-92 99.5t-101 100q-71 70 -104.5 105.5t-77 89.5t-61 99 t-17.5 91zM250 784q0 -27 30.5 -70t61.5 -75.5t95 -94.5l22 -22q93 -90 190 -201q82 92 195 203l12 12q64 62 97.5 97t64.5 79t31 72q0 71 -48 119.5t-105 48.5q-74 0 -132 -83l-118 -171l-114 174q-51 80 -123 80q-60 0 -109.5 -49.5t-49.5 -118.5z" />
+<glyph unicode="&#xe144;" d="M57 353q0 -95 66 -159l141 -142q68 -66 159 -66q93 0 159 66l283 283q66 66 66 159t-66 159l-141 141q-8 9 -19 17l-105 -105l212 -212l-389 -389l-247 248l95 95l-18 18q-46 45 -75 101l-55 -55q-66 -66 -66 -159zM269 706q0 -93 66 -159l141 -141q7 -7 19 -17l105 105 l-212 212l389 389l247 -247l-95 -96l18 -17q47 -49 77 -100l29 29q35 35 62.5 88t27.5 96q0 93 -66 159l-141 141q-66 66 -159 66q-95 0 -159 -66l-283 -283q-66 -64 -66 -159z" />
+<glyph unicode="&#xe145;" d="M200 100v953q0 21 30 46t81 48t129 38t163 15t162 -15t127 -38t79 -48t29 -46v-953q0 -41 -29.5 -70.5t-70.5 -29.5h-600q-41 0 -70.5 29.5t-29.5 70.5zM300 300h600v700h-600v-700zM496 150q0 -43 30.5 -73.5t73.5 -30.5t73.5 30.5t30.5 73.5t-30.5 73.5t-73.5 30.5 t-73.5 -30.5t-30.5 -73.5z" />
+<glyph unicode="&#xe146;" d="M0 0l303 380l207 208l-210 212h300l267 279l-35 36q-15 14 -15 35t15 35q14 15 35 15t35 -15l283 -282q15 -15 15 -36t-15 -35q-14 -15 -35 -15t-35 15l-36 35l-279 -267v-300l-212 210l-208 -207z" />
+<glyph unicode="&#xe148;" d="M295 433h139q5 -77 48.5 -126.5t117.5 -64.5v335q-6 1 -15.5 4t-11.5 3q-46 14 -79 26.5t-72 36t-62.5 52t-40 72.5t-16.5 99q0 92 44 159.5t109 101t144 40.5v78h100v-79q38 -4 72.5 -13.5t75.5 -31.5t71 -53.5t51.5 -84t24.5 -118.5h-159q-8 72 -35 109.5t-101 50.5 v-307l64 -14q34 -7 64 -16.5t70 -31.5t67.5 -52t47.5 -80.5t20 -112.5q0 -139 -89 -224t-244 -96v-77h-100v78q-152 17 -237 104q-40 40 -52.5 93.5t-15.5 139.5zM466 889q0 -29 8 -51t16.5 -34t29.5 -22.5t31 -13.5t38 -10q7 -2 11 -3v274q-61 -8 -97.5 -37.5t-36.5 -102.5 zM700 237q170 18 170 151q0 64 -44 99.5t-126 60.5v-311z" />
+<glyph unicode="&#xe149;" d="M100 600v100h166q-24 49 -44 104q-10 26 -14.5 55.5t-3 72.5t25 90t68.5 87q97 88 263 88q129 0 230 -89t101 -208h-153q0 52 -34 89.5t-74 51.5t-76 14q-37 0 -79 -14.5t-62 -35.5q-41 -44 -41 -101q0 -28 16.5 -69.5t28 -62.5t41.5 -72h241v-100h-197q8 -50 -2.5 -115 t-31.5 -94q-41 -59 -99 -113q35 11 84 18t70 7q33 1 103 -16t103 -17q76 0 136 30l50 -147q-41 -25 -80.5 -36.5t-59 -13t-61.5 -1.5q-23 0 -128 33t-155 29q-39 -4 -82 -17t-66 -25l-24 -11l-55 145l16.5 11t15.5 10t13.5 9.5t14.5 12t14.5 14t17.5 18.5q48 55 54 126.5 t-30 142.5h-221z" />
+<glyph unicode="&#xe150;" d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM602 900l298 300l298 -300h-198v-900h-200v900h-198z" />
+<glyph unicode="&#xe151;" d="M2 300h198v900h200v-900h198l-298 -300zM700 0v200h100v-100h200v-100h-300zM700 400v100h300v-200h-99v-100h-100v100h99v100h-200zM700 700v500h300v-500h-100v100h-100v-100h-100zM801 900h100v200h-100v-200z" />
+<glyph unicode="&#xe152;" d="M2 300h198v900h200v-900h198l-298 -300zM700 0v500h300v-500h-100v100h-100v-100h-100zM700 700v200h100v-100h200v-100h-300zM700 1100v100h300v-200h-99v-100h-100v100h99v100h-200zM801 200h100v200h-100v-200z" />
+<glyph unicode="&#xe153;" d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM800 100v400h300v-500h-100v100h-200zM800 1100v100h200v-500h-100v400h-100zM901 200h100v200h-100v-200z" />
+<glyph unicode="&#xe154;" d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM800 400v100h200v-500h-100v400h-100zM800 800v400h300v-500h-100v100h-200zM901 900h100v200h-100v-200z" />
+<glyph unicode="&#xe155;" d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM700 100v200h500v-200h-500zM700 400v200h400v-200h-400zM700 700v200h300v-200h-300zM700 1000v200h200v-200h-200z" />
+<glyph unicode="&#xe156;" d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM700 100v200h200v-200h-200zM700 400v200h300v-200h-300zM700 700v200h400v-200h-400zM700 1000v200h500v-200h-500z" />
+<glyph unicode="&#xe157;" d="M0 400v300q0 165 117.5 282.5t282.5 117.5h300q162 0 281 -118.5t119 -281.5v-300q0 -165 -118.5 -282.5t-281.5 -117.5h-300q-165 0 -282.5 117.5t-117.5 282.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500z" />
+<glyph unicode="&#xe158;" d="M0 400v300q0 163 119 281.5t281 118.5h300q165 0 282.5 -117.5t117.5 -282.5v-300q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-163 0 -281.5 117.5t-118.5 282.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM400 300l333 250l-333 250v-500z" />
+<glyph unicode="&#xe159;" d="M0 400v300q0 163 117.5 281.5t282.5 118.5h300q163 0 281.5 -119t118.5 -281v-300q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5t-117.5 282.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM300 700l250 -333l250 333h-500z" />
+<glyph unicode="&#xe160;" d="M0 400v300q0 165 117.5 282.5t282.5 117.5h300q165 0 282.5 -117.5t117.5 -282.5v-300q0 -162 -118.5 -281t-281.5 -119h-300q-165 0 -282.5 118.5t-117.5 281.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM300 400h500l-250 333z" />
+<glyph unicode="&#xe161;" d="M0 400v300h300v200l400 -350l-400 -350v200h-300zM500 0v200h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5h-500v200h400q165 0 282.5 -117.5t117.5 -282.5v-300q0 -165 -117.5 -282.5t-282.5 -117.5h-400z" />
+<glyph unicode="&#xe162;" d="M217 519q8 -19 31 -19h302q-155 -438 -160 -458q-5 -21 4 -32l9 -8h9q14 0 26 15q11 13 274.5 321.5t264.5 308.5q14 19 5 36q-8 17 -31 17l-301 -1q1 4 78 219.5t79 227.5q2 15 -5 27l-9 9h-9q-15 0 -25 -16q-4 -6 -98 -111.5t-228.5 -257t-209.5 -237.5q-16 -19 -6 -41 z" />
+<glyph unicode="&#xe163;" d="M0 400q0 -165 117.5 -282.5t282.5 -117.5h300q47 0 100 15v185h-500q-41 0 -70.5 29.5t-29.5 70.5v500q0 41 29.5 70.5t70.5 29.5h500v185q-14 4 -114 7.5t-193 5.5l-93 2q-165 0 -282.5 -117.5t-117.5 -282.5v-300zM600 400v300h300v200l400 -350l-400 -350v200h-300z " />
+<glyph unicode="&#xe164;" d="M0 400q0 -165 117.5 -282.5t282.5 -117.5h300q163 0 281.5 117.5t118.5 282.5v98l-78 73l-122 -123v-148q0 -41 -29.5 -70.5t-70.5 -29.5h-500q-41 0 -70.5 29.5t-29.5 70.5v500q0 41 29.5 70.5t70.5 29.5h156l118 122l-74 78h-100q-165 0 -282.5 -117.5t-117.5 -282.5 v-300zM496 709l353 342l-149 149h500v-500l-149 149l-342 -353z" />
+<glyph unicode="&#xe165;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM406 600 q0 80 57 137t137 57t137 -57t57 -137t-57 -137t-137 -57t-137 57t-57 137z" />
+<glyph unicode="&#xe166;" d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 800l445 -500l450 500h-295v400h-300v-400h-300zM900 150h100v50h-100v-50z" />
+<glyph unicode="&#xe167;" d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 700h300v-300h300v300h295l-445 500zM900 150h100v50h-100v-50z" />
+<glyph unicode="&#xe168;" d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 705l305 -305l596 596l-154 155l-442 -442l-150 151zM900 150h100v50h-100v-50z" />
+<glyph unicode="&#xe169;" d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 988l97 -98l212 213l-97 97zM200 400l697 1l3 699l-250 -239l-149 149l-212 -212l149 -149zM900 150h100v50h-100v-50z" />
+<glyph unicode="&#xe170;" d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM200 612l212 -212l98 97l-213 212zM300 1200l239 -250l-149 -149l212 -212l149 148l249 -237l-1 697zM900 150h100v50h-100v-50z" />
+<glyph unicode="&#xe171;" d="M23 415l1177 784v-1079l-475 272l-310 -393v416h-392zM494 210l672 938l-672 -712v-226z" />
+<glyph unicode="&#xe172;" d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-850q0 -21 -15 -35.5t-35 -14.5h-150v400h-700v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 1000h100v200h-100v-200z" />
+<glyph unicode="&#xe173;" d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-218l-276 -275l-120 120l-126 -127h-378v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM581 306l123 123l120 -120l353 352l123 -123l-475 -476zM600 1000h100v200h-100v-200z" />
+<glyph unicode="&#xe174;" d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-269l-103 -103l-170 170l-298 -298h-329v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 1000h100v200h-100v-200zM700 133l170 170l-170 170l127 127l170 -170l170 170l127 -128l-170 -169l170 -170 l-127 -127l-170 170l-170 -170z" />
+<glyph unicode="&#xe175;" d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-300h-400v-200h-500v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 300l300 -300l300 300h-200v300h-200v-300h-200zM600 1000v200h100v-200h-100z" />
+<glyph unicode="&#xe176;" d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-402l-200 200l-298 -298h-402v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 300h200v-300h200v300h200l-300 300zM600 1000v200h100v-200h-100z" />
+<glyph unicode="&#xe177;" d="M0 250q0 -21 14.5 -35.5t35.5 -14.5h1100q21 0 35.5 14.5t14.5 35.5v550h-1200v-550zM0 900h1200v150q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5v-150zM100 300v200h400v-200h-400z" />
+<glyph unicode="&#xe178;" d="M0 400l300 298v-198h400v-200h-400v-198zM100 800v200h100v-200h-100zM300 800v200h100v-200h-100zM500 800v200h400v198l300 -298l-300 -298v198h-400zM800 300v200h100v-200h-100zM1000 300h100v200h-100v-200z" />
+<glyph unicode="&#xe179;" d="M100 700v400l50 100l50 -100v-300h100v300l50 100l50 -100v-300h100v300l50 100l50 -100v-400l-100 -203v-447q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5v447zM800 597q0 -29 10.5 -55.5t25 -43t29 -28.5t25.5 -18l10 -5v-397q0 -21 14.5 -35.5 t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v1106q0 31 -18 40.5t-44 -7.5l-276 -116q-25 -17 -43.5 -51.5t-18.5 -65.5v-359z" />
+<glyph unicode="&#xe180;" d="M100 0h400v56q-75 0 -87.5 6t-12.5 44v394h500v-394q0 -38 -12.5 -44t-87.5 -6v-56h400v56q-4 0 -11 0.5t-24 3t-30 7t-24 15t-11 24.5v888q0 22 25 34.5t50 13.5l25 2v56h-400v-56q75 0 87.5 -6t12.5 -44v-394h-500v394q0 38 12.5 44t87.5 6v56h-400v-56q4 0 11 -0.5 t24 -3t30 -7t24 -15t11 -24.5v-888q0 -22 -25 -34.5t-50 -13.5l-25 -2v-56z" />
+<glyph unicode="&#xe181;" d="M0 300q0 -41 29.5 -70.5t70.5 -29.5h300q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5h-300q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM100 100h400l200 200h105l295 98v-298h-425l-100 -100h-375zM100 300v200h300v-200h-300zM100 600v200h300v-200h-300z M100 1000h400l200 -200v-98l295 98h105v200h-425l-100 100h-375zM700 402v163l400 133v-163z" />
+<glyph unicode="&#xe182;" d="M16.5 974.5q0.5 -21.5 16 -90t46.5 -140t104 -177.5t175 -208q103 -103 207.5 -176t180 -103.5t137 -47t92.5 -16.5l31 1l163 162q17 18 13.5 41t-22.5 37l-192 136q-19 14 -45 12t-42 -19l-118 -118q-142 101 -268 227t-227 268l118 118q17 17 20 41.5t-11 44.5 l-139 194q-14 19 -36.5 22t-40.5 -14l-162 -162q-1 -11 -0.5 -32.5z" />
+<glyph unicode="&#xe183;" d="M0 50v212q0 20 10.5 45.5t24.5 39.5l365 303v50q0 4 1 10.5t12 22.5t30 28.5t60 23t97 10.5t97 -10t60 -23.5t30 -27.5t12 -24l1 -10v-50l365 -303q14 -14 24.5 -39.5t10.5 -45.5v-212q0 -21 -14.5 -35.5t-35.5 -14.5h-1100q-20 0 -35 14.5t-15 35.5zM0 712 q0 -21 14.5 -33.5t34.5 -8.5l202 33q20 4 34.5 21t14.5 38v146q141 24 300 24t300 -24v-146q0 -21 14.5 -38t34.5 -21l202 -33q20 -4 34.5 8.5t14.5 33.5v200q-6 8 -19 20.5t-63 45t-112 57t-171 45t-235 20.5q-92 0 -175 -10.5t-141.5 -27t-108.5 -36.5t-81.5 -40 t-53.5 -36.5t-31 -27.5l-9 -10v-200z" />
+<glyph unicode="&#xe184;" d="M100 0v100h1100v-100h-1100zM175 200h950l-125 150v250l100 100v400h-100v-200h-100v200h-200v-200h-100v200h-200v-200h-100v200h-100v-400l100 -100v-250z" />
+<glyph unicode="&#xe185;" d="M100 0h300v400q0 41 -29.5 70.5t-70.5 29.5h-100q-41 0 -70.5 -29.5t-29.5 -70.5v-400zM500 0v1000q0 41 29.5 70.5t70.5 29.5h100q41 0 70.5 -29.5t29.5 -70.5v-1000h-300zM900 0v700q0 41 29.5 70.5t70.5 29.5h100q41 0 70.5 -29.5t29.5 -70.5v-700h-300z" />
+<glyph unicode="&#xe186;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v300h-200v100h200v100h-300v-300h200v-100h-200v-100zM600 300h200v100h100v300h-100v100h-200v-500 zM700 400v300h100v-300h-100z" />
+<glyph unicode="&#xe187;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h100v200h100v-200h100v500h-100v-200h-100v200h-100v-500zM600 300h200v100h100v300h-100v100h-200v-500 zM700 400v300h100v-300h-100z" />
+<glyph unicode="&#xe188;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v100h-200v300h200v100h-300v-500zM600 300h300v100h-200v300h200v100h-300v-500z" />
+<glyph unicode="&#xe189;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 550l300 -150v300zM600 400l300 150l-300 150v-300z" />
+<glyph unicode="&#xe190;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300v500h700v-500h-700zM300 400h130q41 0 68 42t27 107t-28.5 108t-66.5 43h-130v-300zM575 549 q0 -65 27 -107t68 -42h130v300h-130q-38 0 -66.5 -43t-28.5 -108z" />
+<glyph unicode="&#xe191;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v300h-200v100h200v100h-300v-300h200v-100h-200v-100zM601 300h100v100h-100v-100zM700 700h100 v-400h100v500h-200v-100z" />
+<glyph unicode="&#xe192;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v400h-200v100h-100v-500zM301 400v200h100v-200h-100zM601 300h100v100h-100v-100zM700 700h100 v-400h100v500h-200v-100z" />
+<glyph unicode="&#xe193;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 700v100h300v-300h-99v-100h-100v100h99v200h-200zM201 300v100h100v-100h-100zM601 300v100h100v-100h-100z M700 700v100h200v-500h-100v400h-100z" />
+<glyph unicode="&#xe194;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM400 500v200 l100 100h300v-100h-300v-200h300v-100h-300z" />
+<glyph unicode="&#xe195;" d="M0 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM182 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM400 400v400h300 l100 -100v-100h-100v100h-200v-100h200v-100h-200v-100h-100zM700 400v100h100v-100h-100z" />
+<glyph unicode="&#xe197;" d="M-14 494q0 -80 56.5 -137t135.5 -57h222v300h400v-300h128q120 0 205 86.5t85 207.5t-85 207t-205 86q-46 0 -90 -14q-44 97 -134.5 156.5t-200.5 59.5q-152 0 -260 -107.5t-108 -260.5q0 -25 2 -37q-66 -14 -108.5 -67.5t-42.5 -122.5zM300 200h200v300h200v-300h200 l-300 -300z" />
+<glyph unicode="&#xe198;" d="M-14 494q0 -80 56.5 -137t135.5 -57h8l414 414l403 -403q94 26 154.5 104.5t60.5 178.5q0 120 -85 206.5t-205 86.5q-46 0 -90 -14q-44 97 -134.5 156.5t-200.5 59.5q-152 0 -260 -107.5t-108 -260.5q0 -25 2 -37q-66 -14 -108.5 -67.5t-42.5 -122.5zM300 200l300 300 l300 -300h-200v-300h-200v300h-200z" />
+<glyph unicode="&#xe199;" d="M100 200h400v-155l-75 -45h350l-75 45v155h400l-270 300h170l-270 300h170l-300 333l-300 -333h170l-270 -300h170z" />
+<glyph unicode="&#xe200;" d="M121 700q0 -53 28.5 -97t75.5 -65q-4 -16 -4 -38q0 -74 52.5 -126.5t126.5 -52.5q56 0 100 30v-306l-75 -45h350l-75 45v306q46 -30 100 -30q74 0 126.5 52.5t52.5 126.5q0 24 -9 55q50 32 79.5 83t29.5 112q0 90 -61.5 155.5t-150.5 71.5q-26 89 -99.5 145.5 t-167.5 56.5q-116 0 -197.5 -81.5t-81.5 -197.5q0 -4 1 -11.5t1 -11.5q-14 2 -23 2q-74 0 -126.5 -52.5t-52.5 -126.5z" />
+</font>
+</defs></svg> \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.ttf b/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.ttf
new file mode 100644
index 0000000000..67fa00bf83
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.ttf
Binary files differ
diff --git a/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.woff b/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.woff
new file mode 100644
index 0000000000..8c54182aa5
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/fonts/glyphicons-halflings-regular.woff
Binary files differ
diff --git a/testing/web-platform/tests/tools/runner/index.html b/testing/web-platform/tests/tools/runner/index.html
new file mode 100644
index 0000000000..57940d02d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/index.html
@@ -0,0 +1,235 @@
+<!DOCTYPE html>
+<html lang=en>
+<meta charset=UTF-8>
+<title>web-platform-tests Runner</title>
+<link rel='stylesheet' href='css/bootstrap.min.css'>
+<link rel='stylesheet' href='css/bootstrap-theme.min.css'>
+<link rel='stylesheet' href='runner.css'>
+<script src='/common/get-host-info.sub.js'></script>
+<script src='runner.js'></script>
+
+<header class="navbar navbar-inverse navbar-fixed-top">
+ <div class="container">
+ <div class="navbar-header">
+ <a class="navbar-brand" href="#">
+ <img src='/images/wpt-logo/wpt-logo-lightblue-bg.svg' width='50' height='50' style='border-radius: 50%' alt='WPT Logo'>
+ web-platform-tests Runner
+ </a>
+ </div>
+ </div>
+</header>
+
+<div class="container">
+ <div class="alert alert-warning">
+ This runner does not fully support all test types and has a number of
+ <a href="https://github.com/web-platform-tests/wpt/labels/runner">known issues</a>.
+ <span id="runner-contact-info"></span>
+ <code>./wpt run</code> is a more well-supported runner, see the documentation on
+ <a href="https://web-platform-tests.org/running-tests/">running tests</a>.
+ <script>
+ if (location.host == "w3c-test.org") {
+ document.getElementById("runner-contact-info")
+ .innerHTML = ' If this runner isn’t working, contact'
+ + ' <a href="mailto:mike@w3.org">mike@w3.org</a>. '
+ }
+ </script>
+ </div>
+
+ <div id="testControl" class="panel panel-default">
+ <div class="panel-body">
+ <form id='options' class='horizontal-form' onsubmit='return false;'>
+
+ <div class="form-group">
+ <label class="col-sm-3 control-label">Test types to include</label>
+ <div class="col-sm-9">
+ <label>
+ <input type=checkbox checked value="testharness" id='th' class='test-type'>
+ JavaScript tests
+ </label>
+ <label>
+ <input type=checkbox checked value="reftest" id='ref' class='test-type'>
+ Reftests
+ </label>
+ <label>
+ <input type=checkbox checked value="manual" id='man' class='test-type'>
+ Manual tests
+ </label>
+ </div>
+ </div>
+
+ <div class="form-group">
+ <label for="path" class="col-sm-3 control-label">Run tests under path</label>
+ <div class="col-sm-9">
+ <input value="/" id='path' class='path form-control' disabled>
+ <label>
+ <input type=checkbox id='use_regex'>
+ Use regular expression
+ </label>
+ </div>
+ </div>
+
+ <div class="form-group">
+ <label for="timeout_multiplier" class="col-sm-3 control-label">Timeout multiplier</label>
+ <div class="col-sm-9">
+ <input type=number value="1" id='timeout_multiplier' class='timeout_multiplier form-control'>
+ </div>
+ </div>
+
+ <div class="form-group">
+ <label class="col-sm-3 control-label">Debug options</label>
+ <div class="col-sm-9">
+ <label>
+ <input type=checkbox id='render' value='render' class='render'>
+ Show output
+ </label>
+ <label>
+ <input type=checkbox id='dumpit'>
+ Dump JSON
+ </label>
+ </div>
+ </div>
+
+ <div class="form-group">
+ <label class="col-sm-3 control-label">Count of matching tests</label>
+ <div class="col-sm-9" id="testcount">
+ </div>
+ </div>
+
+ <div class="form-group">
+ <div class="col-sm-offset-3 col-sm-9">
+ <button type="submit" class="btn btn-success toggleStart" disabled>Start</button>
+ <button type='submit' class="btn btn-info togglePause" disabled>Pause</button>
+ </div>
+ </div>
+ </form>
+ </div>
+ </div>
+
+ <div class="instructions">
+ <p>
+ To run a set of
+ <a href="https://github.com/web-platform-tests/wpt/blob/master/README.md">web-platform-tests</a>
+ tests, specify a path value in the <b>Run tests under path</b> field above. Example paths:
+ </p>
+ <ul>
+ <li><code>/</code> - runs all of the tests from the root down</li>
+ <li><code>/websockets</code> - runs all of the
+ <a href="http://w3c-test.org/websockets/">websockets</a> tests</li>
+ <li><code>/websockets/constructor</code> - runs all of the
+ <a href="http://w3c-test.org/websockets/constructor/">websockets/constructor</a> tests</li>
+ <li><code>/html/syntax/parsing</code> - runs all of the
+ <a href="http://w3c-test.org/html/syntax/parsing/">html/syntax/parsing</a> tests</li>
+ </ul>
+ <p>
+ Multiple test paths can be specified by separating them with comma or whitespace. For example,
+ <code>/js, /html</code> will run the <a href="http://w3c-test.org/js/">js</a> <em>and</em> <a href="http://w3c-test.org/html/">html</a>
+ tests.
+ </p>
+ <p>
+ <a href="http://www.w3schools.com/jsref/jsref_obj_regexp.asp" target="_blank">Javascript regular expressions</a> are also supported for filtering. When the option is checked,
+ only a test path matching the regex pattern will run. For example, you can specify <code>^/js/|^/html/</code>
+ to run the <a href="http://w3c-test.org/js/">js</a> <em>and</em> <a href="http://w3c-test.org/html/">html</a>
+ tests.
+ </p>
+ <p>
+ If the test runner is run online, the set of tests available to run can be found in the
+ <a href="http://w3c-test.org/">w3c-test.org</a> test repository.
+ </p>
+ <p>
+ Tests will run in a new window. For reftests and manual tests it’s best
+ to put that window side-by-side with this one.
+ </p>
+ </div>
+
+ <div id="output">
+ <div class="summary clearfix">
+ <h4>Progress</h4>
+ <div class="progress">
+ <div class="progress-bar" role="progressbar"
+ aria-valuenow="0" aria-valuemin="0" aria-valuemax="0" style="width: 0">
+ 0%
+ </div>
+ </div>
+ <div id="current_test">
+ <label>Current test:</label><a></a>
+ </div>
+ <table class='table'>
+ <thead>
+ <tr>
+ <th></th>
+ <th>Passed</th>
+ <th>Failed</th>
+ <th>Timeouts</th>
+ <th>Errors</th>
+ <th>Not Run</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr>
+ <td></td>
+ <td class='PASS'>0</td>
+ <td class='FAIL'>0</td>
+ <td class='TIMEOUT'>0</td>
+ <td class='ERROR'>0</td>
+ <td class='NOTRUN'>0</td>
+ </tr>
+ <tr>
+ <td>
+ <label>
+ Display:
+ </label>
+ </td>
+ <td><input type=checkbox class="result-display-filter" value="PASS" checked></td>
+ <td><input type=checkbox class="result-display-filter" value="FAIL" checked></td>
+ <td><input type=checkbox class="result-display-filter" value="TIMEOUT" checked></td>
+ <td><input type=checkbox class="result-display-filter" value="ERROR" checked></td>
+ <td><input type=checkbox class="result-display-filter" value="NOTRUN" checked></td>
+ </tr>
+ </tbody>
+ </table>
+ <a class="jsonResults btn btn-primary pull-right">Download JSON results</a>
+ </div>
+
+ <div class="results">
+ <div id="manualUI">
+ <div class='panel panel-primary'>
+ <div class='panel-heading'>
+ <h4 class='panel-title'>Manual Testing</h4>
+ </div>
+ <div class="panel-body reftestUI">
+ <p>
+ The current test requires manual result marking.
+ Test and ref should compare <strong class="refType text-primary"></strong>
+ </p>
+ <p>
+ <button class="btn btn-info test">Show Test</button>
+ <button class="btn btn-info ref">Show Reference</button>
+ <span class="reftestWarn"></span>
+ </p>
+ </div>
+ <div class="panel-footer">
+ The test has:
+ <button class="btn btn-success pass">Passed</button>
+ <button class="btn btn-info skip">Not Run</button>
+ <button class="btn btn-danger fail">Failed</button>
+ </div>
+ </div>
+ </div>
+
+ <hr>
+ <h4>Details</h4>
+ <table class='table'>
+ <thead>
+ <tr>
+ <th>Test
+ <th>Status
+ <th>Message
+ <th>Subtest Pass Rate
+ </tr>
+ </thead>
+ <tbody></tbody>
+ </table>
+ </div>
+ </div>
+
+</div>
diff --git a/testing/web-platform/tests/tools/runner/report.css b/testing/web-platform/tests/tools/runner/report.css
new file mode 100644
index 0000000000..bc25f9e1bd
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/report.css
@@ -0,0 +1,43 @@
+table {
+ border-collapse: collapse;
+}
+
+tbody {
+ border-top: thin solid;
+ border-bottom: thin solid;
+}
+
+.status {
+ font-variant:small-caps;
+ color:white;
+ text-align:center;
+ font-variant:small-caps;
+}
+
+.PASS, .OK {
+ background-color:green;
+}
+
+.FAIL {
+ background-color:red;
+}
+
+.ERROR, .NOTRUN, .NONE {
+ background-color:black;
+}
+
+.TIMEOUT {
+ background-color:blue;
+}
+
+td {
+ padding:0.25em;
+}
+
+tr.test {
+ background-color:#ddd;
+}
+
+tr.subtest {
+ background-color:#eee;
+}
diff --git a/testing/web-platform/tests/tools/runner/report.py b/testing/web-platform/tests/tools/runner/report.py
new file mode 100644
index 0000000000..3dc6cfb8bb
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/report.py
@@ -0,0 +1,308 @@
+# flake8: noqa
+# mypy: ignore-errors
+
+import argparse
+import json
+import sys
+import types
+
+from cgi import escape
+from collections import defaultdict
+
+
+def html_escape(item, escape_quote=False):
+ if isinstance(item, types.StringTypes):
+ rv = escape(item)
+ if escape_quote:
+ rv = rv.replace('"', "&quot;")
+ return rv
+ else:
+ return item
+
+
+class Raw:
+ """Simple wrapper around a string to stop it being escaped by html_escape"""
+ def __init__(self, value):
+ self.value = value
+
+ def __unicode__(self):
+ return unicode(self.value)
+
+
+class Node:
+ """Node structure used when building HTML"""
+ def __init__(self, name, attrs, children):
+ #Need list of void elements
+ self.name = name
+ self.attrs = attrs
+ self.children = children
+
+ def __unicode__(self):
+ if self.attrs:
+ #Need to escape
+ attrs_unicode = " " + " ".join("%s=\"%s\"" % (html_escape(key),
+ html_escape(value,
+ escape_quote=True))
+ for key, value in self.attrs.items())
+ else:
+ attrs_unicode = ""
+ return "<%s%s>%s</%s>\n" % (self.name,
+ attrs_unicode,
+ "".join(unicode(html_escape(item))
+ for item in self.children),
+ self.name)
+
+ def __str__(self):
+ return unicode(self).encode("utf8")
+
+
+class RootNode:
+ """Special Node representing the document root"""
+ def __init__(self, *children):
+ self.children = ["<!DOCTYPE html>"] + list(children)
+
+ def __unicode__(self):
+ return "".join(unicode(item) for item in self.children)
+
+ def __str__(self):
+ return unicode(self).encode("utf8")
+
+
+def flatten(iterable):
+ """Flatten a list of lists by one level so that
+ [1,["abc"], "def",[2, [3]]]
+ becomes
+ [1, "abc", "def", 2, [3]]"""
+ rv = []
+ for item in iterable:
+ if hasattr(item, "__iter__") and not isinstance(item, types.StringTypes):
+ rv.extend(item)
+ else:
+ rv.append(item)
+ return rv
+
+
+class HTML:
+ """Simple HTML templating system. An instance of this class can create
+ element nodes by calling methods with the same name as the element,
+ passing in children as positional arguments or as a list, and attributes
+ as keyword arguments, with _ replacing - and trailing _ for python keywords
+
+ e.g.
+
+ h = HTML()
+ print(h.html(
+ html.head(),
+ html.body([html.h1("Hello World!")], class_="body-class")
+ ))
+ Would give
+ <!DOCTYPE html><html><head></head><body class="body-class"><h1>Hello World!</h1></body></html>"""
+ def __getattr__(self, name):
+ def make_html(self, *content, **attrs):
+ for attr_name in attrs.keys():
+ if "_" in attr_name:
+ new_name = attr_name.replace("_", "-")
+ if new_name.endswith("-"):
+ new_name = new_name[:-1]
+ attrs[new_name] = attrs.pop(attr_name)
+ return Node(name, attrs, flatten(content))
+
+ method = types.MethodType(make_html, self, HTML)
+ setattr(self, name, method)
+ return method
+
+ def __call__(self, *children):
+ return RootNode(*flatten(children))
+
+
+h = HTML()
+
+
+class TestResult:
+ """Simple holder for the results of a single test in a single UA"""
+ def __init__(self, test):
+ self.test = test
+ self.results = {}
+
+ def __cmp__(self, other):
+ return self.test == other.test
+
+ def __hash__(self):
+ return hash(self.test)
+
+
+def load_data(args):
+ """Load data treating args as a list of UA name, filename pairs"""
+ pairs = []
+ for i in xrange(0, len(args), 2):
+ pairs.append(args[i:i+2])
+
+ rv = {}
+ for UA, filename in pairs:
+ with open(filename) as f:
+ rv[UA] = json.load(f)
+
+ return rv
+
+
+def test_id(id):
+ """Convert a test id in JSON into an immutable object that
+ can be used as a dictionary key"""
+ if isinstance(id, list):
+ return tuple(id)
+ else:
+ return id
+
+
+def all_tests(data):
+ tests = defaultdict(set)
+ for UA, results in iteritems(data):
+ for result in results["results"]:
+ id = test_id(result["test"])
+ tests[id] |= {subtest["name"] for subtest in result["subtests"]}
+ return tests
+
+
+def group_results(data):
+ """Produce a list of UAs and a dictionary mapping specific tests to their
+ status in all UAs e.g.
+ ["UA1", "UA2"], {"test_id":{"harness":{"UA1": (status1, message1),
+ "UA2": (status2, message2)},
+ "subtests":{"subtest1": "UA1": (status1-1, message1-1),
+ "UA2": (status2-1, message2-1)}}}
+ Status and message are None if the test didn't run in a particular UA.
+ Message is None if the test didn't produce a message"""
+ tests = all_tests(data)
+
+ UAs = data.keys()
+
+ def result():
+ return {
+ "harness": {UA: (None, None) for UA in UAs},
+ "subtests": None # init this later
+ }
+
+ results_by_test = defaultdict(result)
+
+ for UA, results in iteritems(data):
+ for test_data in results["results"]:
+ id = test_id(test_data["test"])
+ result = results_by_test[id]
+
+ if result["subtests"] is None:
+ result["subtests"] = {
+ name: {UA: (None, None) for UA in UAs} for name in tests[id]
+ }
+
+ result["harness"][UA] = (test_data["status"], test_data["message"])
+ for subtest in test_data["subtests"]:
+ result["subtests"][subtest["name"]][UA] = (subtest["status"],
+ subtest["message"])
+
+ return UAs, results_by_test
+
+
+def status_cell(status, message=None):
+ """Produce a table cell showing the status of a test"""
+ status = status if status is not None else "NONE"
+ kwargs = {}
+ if message:
+ kwargs["title"] = message
+ status_text = status.title()
+ return h.td(status_text, class_="status " + status,
+ **kwargs)
+
+
+def test_link(test_id, subtest=None):
+ """Produce an <a> element linking to a test"""
+ if isinstance(test_id, types.StringTypes):
+ rv = [h.a(test_id, href=test_id)]
+ else:
+ rv = [h.a(test_id[0], href=test_id[0]),
+ " %s " % test_id[1],
+ h.a(test_id[2], href=test_id[2])]
+ if subtest is not None:
+ rv.append(" [%s]" % subtest)
+ return rv
+
+
+def summary(UAs, results_by_test):
+ """Render the implementation report summary"""
+ not_passing = []
+ for test, results in iteritems(results_by_test):
+ if not any(item[0] in ("PASS", "OK") for item in results["harness"].values()):
+ not_passing.append((test, None))
+ for subtest_name, subtest_results in iteritems(results["subtests"]):
+ if not any(item[0] == "PASS" for item in subtest_results.values()):
+ not_passing.append((test, subtest_name))
+ if not_passing:
+ rv = [
+ h.p("The following tests failed to pass in all UAs:"),
+ h.ul([h.li(test_link(test, subtest))
+ for test, subtest in not_passing])
+ ]
+ else:
+ rv = "All tests passed in at least one UA"
+ return rv
+
+
+def result_rows(UAs, test, result):
+ """Render the results for each test run"""
+ yield h.tr(
+ h.td(
+ test_link(test),
+ rowspan=(1 + len(result["subtests"]))
+ ),
+ h.td(),
+ [status_cell(status, message)
+ for UA, (status, message) in sorted(result["harness"].items())],
+ class_="test"
+ )
+
+ for name, subtest_result in sorted(iteritems(result["subtests"])):
+ yield h.tr(
+ h.td(name),
+ [status_cell(status, message)
+ for UA, (status, message) in sorted(subtest_result.items())],
+ class_="subtest"
+ )
+
+
+def result_bodies(UAs, results_by_test):
+ return [h.tbody(result_rows(UAs, test, result))
+ for test, result in sorted(iteritems(results_by_test))]
+
+
+def generate_html(UAs, results_by_test):
+ """Generate all the HTML output"""
+ return h(h.html(
+ h.head(
+ h.meta(charset="utf8"),
+ h.title("Implementation Report"),
+ h.link(href="report.css", rel="stylesheet")),
+ h.body(
+ h.h1("Implementation Report"),
+ h.h2("Summary"),
+ summary(UAs, results_by_test),
+ h.h2("Full Results"),
+ h.table(
+ h.thead(
+ h.tr(
+ h.th("Test"),
+ h.th("Subtest"),
+ [h.th(UA) for UA in sorted(UAs)])),
+ result_bodies(UAs, results_by_test)))))
+
+
+def main(filenames):
+ data = load_data(filenames)
+ UAs, results_by_test = group_results(data)
+ return generate_html(UAs, results_by_test)
+
+
+if __name__ == "__main__":
+ if not sys.argv[1:]:
+ print("""Please supply a list of UA name, filename pairs e.g.
+
+python report.py Firefox firefox.json Chrome chrome.json IE internet_explorer.json""")
+ print(main(sys.argv[1:]))
diff --git a/testing/web-platform/tests/tools/runner/runner.css b/testing/web-platform/tests/tools/runner/runner.css
new file mode 100644
index 0000000000..ec72acb83e
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/runner.css
@@ -0,0 +1,216 @@
+
+html {
+ margin: 0 8px;
+}
+
+body {
+ margin: 0;
+}
+
+html.done {
+ border: 2px solid #32cd32;
+ margin: 3px;
+ padding: 3px;
+}
+
+html:not(.done) {
+ height: 100%;
+}
+
+html:not(.done) body {
+ height: 100%;
+}
+
+html:not(.done) #wrapper {
+ height: 100%;
+ margin-top: -1.5em;
+ padding-top: 1.5em;
+ -moz-box-sizing: border-box;
+ -webkit-box-sizing: border-box;
+ box-sizing: border-box;
+}
+
+section {
+ display: block;
+ border: thin solid black;
+ padding: 0.5em 0;
+}
+
+section h1 {
+ margin: 0;
+ font-size: 1em;
+}
+
+html.done section h1 {
+ text-align: center;
+}
+
+section ol {
+ padding: 0;
+ margin: 0;
+ list-style-position: inside;
+}
+
+html.done section ol {
+ -webkit-column-count: 3;
+ column-count: 3;
+}
+
+section li {
+ padding: 0.1em 0.5em;
+}
+
+section li.pass:nth-child(odd) {
+ background: #e5ffe5;
+}
+section li.pass:nth-child(even) {
+ background: #def8de;
+}
+
+section li.fail:nth-child(odd) {
+ background: #ffe5e5;
+}
+
+section li.fail:nth-child(even) {
+ background: #f8dede;
+}
+
+section p {
+ margin: 0;
+}
+
+html:not(.done) section {
+ border-top: none;
+}
+
+html.done section + section {
+ border-top: none;
+}
+
+#manualUI {
+ position: fixed;
+ z-index: 2000;
+ top: -20px;
+ left: 0;
+ right: 0;
+ display: block;
+ padding: 40px;
+ background: rgba(255, 200, 0, 0.5);
+}
+
+#current_test {
+ visibility: hidden;
+}
+
+#current_test a {
+ display: inline-block;
+ margin-left: 1em;
+}
+
+#current_test .match {
+ background-color: #BDF1FF;
+}
+
+#manualUI .panel {
+ max-width: 800px;
+ margin: auto;
+ box-shadow: 2px 2px 10px #666;
+}
+
+body > p {
+ text-align: center;
+}
+
+body > p > textarea {
+ width: 90%;
+ height: 20em;
+}
+
+body {
+ padding-top: 70px;
+}
+
+.container {
+ max-width: 800px;
+}
+
+.navbar-brand > img {
+ display: inline;
+}
+
+.navbar-inverse .navbar-brand {
+ color: #fff;
+}
+
+.form-group {
+ clear:both;
+}
+
+.horizontal-form .form-group {
+ padding: 6px;
+}
+
+header.navbar-inverse {
+ background: linear-gradient(to bottom, rgb(7, 62, 128) 0px, rgb(0, 45, 80) 100%);
+}
+
+ul.error, ul.warning {
+ padding: 0;
+}
+
+td.PASS {
+ color: #48cfad;
+}
+
+td.FAIL {
+ color: #ed5565;
+}
+
+td.TIMEOUT {
+ color: #f6bb42;
+}
+
+td.NOTRUN {
+ color: #00c;
+}
+
+td.ERROR {
+ color: #da4453;
+ font-weight: bold;
+}
+.stopped {
+ background-image: linear-gradient(to bottom, #fc0000 0, #770000 100%);
+}
+
+.col-sm-9 label {
+ margin-right: 20px;
+}
+
+.instructions {
+ padding-left: 8px;
+ padding-right: 8px;
+}
+
+@keyframes alert_updating {
+ from {
+ background-color: inherit;
+ }
+ to {
+ background-color: #f6bb42;
+ }
+}
+
+.loading-manifest {
+ background-image: none;
+ color: #333;
+ animation-duration: 1.5s;
+ animation-name: alert_updating;
+ animation-iteration-count: infinite;
+ animation-direction: alternate;
+}
+
+.reftestWarn {
+ color: yellow;
+ background: black;
+ padding: 8px;
+}
diff --git a/testing/web-platform/tests/tools/runner/runner.js b/testing/web-platform/tests/tools/runner/runner.js
new file mode 100644
index 0000000000..1577228b2f
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/runner.js
@@ -0,0 +1,910 @@
+/*jshint nonew: false */
+(function() {
+"use strict";
+var runner;
+var testharness_properties = {output:false,
+ timeout_multiplier:1};
+
+function Manifest(path) {
+ this.data = null;
+ this.path = path;
+ this.num_tests = null;
+}
+
+Manifest.prototype = {
+ load: function(loaded_callback) {
+ this.generate(loaded_callback);
+ },
+
+ generate: function(loaded_callback) {
+ var xhr = new XMLHttpRequest();
+ xhr.onreadystatechange = function() {
+ if (xhr.readyState !== 4) {
+ return;
+ }
+ if (!(xhr.status === 200 || xhr.status === 0)) {
+ throw new Error("Manifest generation failed");
+ }
+ this.data = JSON.parse(xhr.responseText);
+ loaded_callback();
+ }.bind(this);
+ xhr.open("POST", "update_manifest.py");
+ xhr.send(null);
+ },
+
+ by_type:function(type) {
+ var ret = [] ;
+ if (this.data.items.hasOwnProperty(type)) {
+ for (var propertyName in this.data.items[type]) {
+ var arr = this.data.items[type][propertyName][0];
+ var item = arr[arr.length - 1];
+ item.path = propertyName;
+ if ('string' === typeof arr[0]) {
+ item.url = arr[0];
+ }
+ if (Array.isArray(arr[1])) {
+ item.references = arr[1];
+ }
+ ret.push(item);
+ }
+ }
+ return ret ;
+ }
+};
+
+function ManifestIterator(manifest, path, test_types, use_regex) {
+ this.manifest = manifest;
+ this.paths = null;
+ this.regex_pattern = null;
+ this.test_types = test_types;
+ this.test_types_index = -1;
+ this.test_list = null;
+ this.test_index = null;
+
+ if (use_regex) {
+ this.regex_pattern = path;
+ } else {
+ // Split paths by either a comma or whitespace, and ignore empty sub-strings.
+ this.paths = path.split(/[,\s]+/).filter(function(s) { return s.length > 0; });
+ }
+}
+
+ManifestIterator.prototype = {
+ next: function() {
+ var manifest_item = null;
+
+ if (this.test_types.length === 0) {
+ return null;
+ }
+
+ while (!manifest_item) {
+ while (this.test_list === null || this.test_index >= this.test_list.length) {
+ this.test_types_index++;
+ if (this.test_types_index >= this.test_types.length) {
+ return null;
+ }
+ this.test_index = 0;
+ this.test_list = this.manifest.by_type(this.test_types[this.test_types_index]);
+ }
+
+ manifest_item = this.test_list[this.test_index++];
+ while (manifest_item && !this.matches(manifest_item)) {
+ manifest_item = this.test_list[this.test_index++];
+ }
+ if (manifest_item) {
+ return this.to_test(manifest_item);
+ }
+ }
+ },
+
+ // Calculate the location of a match within a provided URL.
+ //
+ // @param {string} url - Valid URL
+ //
+ // @returns {null|object} - null if the URL does not satisfy the iterator's
+ // filtering criteria. Otherwise, an object with
+ // the following properties:
+ //
+ // - index - the zero-indexed offset of the start
+ // of the match
+ // - width - the total number of matching
+ // characters
+ match_location: function(url) {
+ var match;
+
+ if (this.regex_pattern) {
+ match = url.match(this.regex_pattern);
+
+ if (!match) {
+ return null;
+ }
+
+ return { index: match.index, width: match[0].length };
+ }
+
+ this.paths.some(function(path) {
+ if (url.indexOf(path) === 0) {
+ match = path;
+ return true;
+ }
+ return false;
+ });
+
+ if (!match) {
+ return null;
+ }
+
+ return { index: 0, width: match.length };
+ },
+
+ matches: function(manifest_item) {
+ var url_base = this.manifest.data.url_base;
+ if (url_base.charAt(url_base.length - 1) !== "/") {
+ url_base = url_base + "/";
+ }
+ var url = url_base + manifest_item.url;
+ return this.match_location(url) !== null;
+ },
+
+ to_test: function(manifest_item) {
+ var url_base = this.manifest.data.url_base;
+ if (url_base.charAt(url_base.length - 1) !== "/") {
+ url_base = url_base + "/";
+ }
+ var test = {
+ type: this.test_types[this.test_types_index],
+ url: url_base + manifest_item.url
+ };
+ if (manifest_item.hasOwnProperty("references")) {
+ test.ref_length = manifest_item.references.length;
+ test.ref_type = manifest_item.references[0][1];
+ test.ref_url = manifest_item.references[0][0];
+ }
+ return test;
+ },
+
+ count: function() {
+ return this.test_types.reduce(function(prev, current) {
+ var matches = this.manifest.by_type(current).filter(function(x) {
+ return this.matches(x);
+ }.bind(this));
+ return prev + matches.length;
+ }.bind(this), 0);
+ }
+};
+
+function VisualOutput(elem, runner) {
+ this.elem = elem;
+ this.runner = runner;
+ this.results_table = null;
+ this.section_wrapper = null;
+ this.results_table = this.elem.querySelector(".results > table");
+ this.section = null;
+ this.progress = this.elem.querySelector(".summary .progress");
+ this.meter = this.progress.querySelector(".progress-bar");
+ this.result_count = null;
+ this.json_results_area = this.elem.querySelector("textarea");
+ this.instructions = document.querySelector(".instructions");
+
+ this.elem.style.display = "none";
+ this.runner.manifest_wait_callbacks.push(this.on_manifest_wait.bind(this));
+ this.runner.start_callbacks.push(this.on_start.bind(this));
+ this.runner.result_callbacks.push(this.on_result.bind(this));
+ this.runner.done_callbacks.push(this.on_done.bind(this));
+
+ this.display_filter_state = {};
+
+ var visual_output = this;
+ var display_filter_inputs = this.elem.querySelectorAll(".result-display-filter");
+ for (var i = 0; i < display_filter_inputs.length; ++i) {
+ var display_filter_input = display_filter_inputs[i];
+ this.display_filter_state[display_filter_input.value] = display_filter_input.checked;
+ display_filter_input.addEventListener("change", function(e) {
+ visual_output.apply_display_filter(e.target.value, e.target.checked);
+ })
+ }
+}
+
+VisualOutput.prototype = {
+ clear: function() {
+ this.result_count = {"PASS":0,
+ "FAIL":0,
+ "ERROR":0,
+ "TIMEOUT":0,
+ "NOTRUN":0};
+ for (var p in this.result_count) {
+ if (this.result_count.hasOwnProperty(p)) {
+ this.elem.querySelector("td." + p).textContent = 0;
+ }
+ }
+ if (this.json_results_area) {
+ this.json_results_area.parentNode.removeChild(this.json_results_area);
+ }
+ this.meter.style.width = '0px';
+ this.meter.textContent = '0%';
+ this.meter.classList.remove("stopped", "loading-manifest");
+ this.elem.querySelector(".jsonResults").style.display = "none";
+ this.results_table.removeChild(this.results_table.tBodies[0]);
+ this.results_table.appendChild(document.createElement("tbody"));
+ },
+
+ on_manifest_wait: function() {
+ this.clear();
+ this.instructions.style.display = "none";
+ this.elem.style.display = "block";
+ this.steady_status("loading-manifest");
+ },
+
+ on_start: function() {
+ this.clear();
+ this.instructions.style.display = "none";
+ this.elem.style.display = "block";
+ this.meter.classList.add("progress-striped", "active");
+ },
+
+ on_result: function(test, status, message, subtests) {
+ var row = document.createElement("tr");
+
+ var subtest_pass_count = subtests.reduce(function(prev, current) {
+ return (current.status === "PASS") ? prev + 1 : prev;
+ }, 0);
+
+ var subtest_notrun_count = subtests.reduce(function(prev, current) {
+ return (current.status === "NOTRUN") ? prev +1 : prev;
+ }, 0);
+
+ var subtests_count = subtests.length;
+
+ var test_status;
+ if (subtest_pass_count === subtests_count &&
+ (status == "OK" || status == "PASS")) {
+ test_status = "PASS";
+ } else if ((!subtests_count && status === "NOTRUN") ||
+ (subtests_count && (subtest_notrun_count == subtests_count) ) ) {
+ test_status = "NOTRUN";
+ } else if (subtests_count > 0 && status === "OK") {
+ test_status = "FAIL";
+ } else {
+ test_status = status;
+ }
+
+ subtests.forEach(function(subtest) {
+ if (this.result_count.hasOwnProperty(subtest.status)) {
+ this.result_count[subtest.status] += 1;
+ }
+ }.bind(this));
+ if (this.result_count.hasOwnProperty(status)) {
+ this.result_count[status] += 1;
+ }
+
+ var name_node = row.appendChild(document.createElement("td"));
+ name_node.appendChild(this.test_name_node(test));
+
+ var status_node = row.appendChild(document.createElement("td"));
+ status_node.textContent = test_status;
+ status_node.className = test_status;
+
+ var message_node = row.appendChild(document.createElement("td"));
+ message_node.textContent = message || "";
+
+ var subtests_node = row.appendChild(document.createElement("td"));
+ if (subtests_count) {
+ subtests_node.textContent = subtest_pass_count + "/" + subtests_count;
+ } else {
+ if (status == "PASS") {
+ subtests_node.textContent = "1/1";
+ } else {
+ subtests_node.textContent = "0/1";
+ }
+ }
+
+ var status_arr = ["PASS", "FAIL", "ERROR", "TIMEOUT", "NOTRUN"];
+ for (var i = 0; i < status_arr.length; i++) {
+ this.elem.querySelector("td." + status_arr[i]).textContent = this.result_count[status_arr[i]];
+ }
+
+ this.apply_display_filter_to_result_row(row, this.display_filter_state[test_status]);
+ this.results_table.tBodies[0].appendChild(row);
+ this.update_meter(this.runner.progress(), this.runner.results.count(), this.runner.test_count());
+ },
+
+ steady_status: function(statusName) {
+ var statusTexts = {
+ done: "Done!",
+ stopped: "Stopped",
+ "loading-manifest": "Updating and loading test manifest; this may take several minutes."
+ };
+ var textContent = statusTexts[statusName];
+
+ this.meter.setAttribute("aria-valuenow", this.meter.getAttribute("aria-valuemax"));
+ this.meter.style.width = "100%";
+ this.meter.textContent = textContent;
+ this.meter.classList.remove("progress-striped", "active", "stopped", "loading-manifest");
+ this.meter.classList.add(statusName);
+ this.runner.display_current_test(null);
+ },
+
+ on_done: function() {
+ this.steady_status(this.runner.stop_flag ? "stopped" : "done");
+ //add the json serialization of the results
+ var a = this.elem.querySelector(".jsonResults");
+ var json = this.runner.results.to_json();
+
+ if (document.getElementById("dumpit").checked) {
+ this.json_results_area = Array.prototype.slice.call(this.elem.querySelectorAll("textarea"));
+ for(var i = 0,t = this.json_results_area.length; i < t; i++){
+ this.elem.removeChild(this.json_results_area[i]);
+ }
+ this.json_results_area = document.createElement("textarea");
+ this.json_results_area.style.width = "100%";
+ this.json_results_area.setAttribute("rows", "50");
+ this.elem.appendChild(this.json_results_area);
+ this.json_results_area.textContent = json;
+ }
+ var blob = new Blob([json], { type: "application/json" });
+ a.href = window.URL.createObjectURL(blob);
+ a.download = "runner-results.json";
+ a.textContent = "Download JSON results";
+ if (!a.getAttribute("download")) a.textContent += " (right-click and save as to download)";
+ a.style.display = "inline";
+ },
+
+ test_name_node: function(test) {
+ if (!test.hasOwnProperty("ref_url")) {
+ return this.link(test.url);
+ } else {
+ var wrapper = document.createElement("span");
+ wrapper.appendChild(this.link(test.url));
+ wrapper.appendChild(document.createTextNode(" " + test.ref_type + " "));
+ wrapper.appendChild(this.link(test.ref_url));
+ return wrapper;
+ }
+ },
+
+ link: function(href) {
+ var link = document.createElement("a");
+ link.href = this.runner.server + href;
+ link.textContent = href;
+ return link;
+ },
+
+ update_meter: function(progress, count, total) {
+ this.meter.setAttribute("aria-valuenow", count);
+ this.meter.setAttribute("aria-valuemax", total);
+ this.meter.textContent = this.meter.style.width = (progress * 100).toFixed(1) + "%";
+ },
+
+ apply_display_filter: function(test_status, display_state) {
+ this.display_filter_state[test_status] = display_state;
+ var result_cells = this.elem.querySelectorAll(".results > table tr td." + test_status);
+ for (var i = 0; i < result_cells.length; ++i) {
+ this.apply_display_filter_to_result_row(result_cells[i].parentNode, display_state)
+ }
+ },
+
+ apply_display_filter_to_result_row: function(result_row, display_state) {
+ result_row.style.display = display_state ? "" : "none";
+ }
+};
+
+function ManualUI(elem, runner) {
+ this.elem = elem;
+ this.runner = runner;
+ this.pass_button = this.elem.querySelector("button.pass");
+ this.fail_button = this.elem.querySelector("button.fail");
+ this.skip_button = this.elem.querySelector("button.skip");
+ this.ref_buttons = this.elem.querySelector(".reftestUI");
+ this.ref_type = this.ref_buttons.querySelector(".refType");
+ this.ref_warning = this.elem.querySelector(".reftestWarn");
+ this.test_button = this.ref_buttons.querySelector("button.test");
+ this.ref_button = this.ref_buttons.querySelector("button.ref");
+
+ this.hide();
+
+ this.runner.test_start_callbacks.push(this.on_test_start.bind(this));
+ this.runner.test_pause_callbacks.push(this.hide.bind(this));
+ this.runner.done_callbacks.push(this.on_done.bind(this));
+
+ this.pass_button.onclick = function() {
+ this.disable_buttons();
+ this.runner.on_result("PASS", "", []);
+ }.bind(this);
+
+ this.skip_button.onclick = function() {
+ this.disable_buttons();
+ this.runner.on_result("NOTRUN", "", []);
+ }.bind(this);
+
+ this.fail_button.onclick = function() {
+ this.disable_buttons();
+ this.runner.on_result("FAIL", "", []);
+ }.bind(this);
+}
+
+ManualUI.prototype = {
+ show: function() {
+ this.elem.style.display = "block";
+ setTimeout(this.enable_buttons.bind(this), 200);
+ },
+
+ hide: function() {
+ this.elem.style.display = "none";
+ },
+
+ show_ref: function() {
+ this.ref_buttons.style.display = "block";
+ this.test_button.onclick = function() {
+ this.runner.load(this.runner.current_test.url);
+ }.bind(this);
+ this.ref_button.onclick = function() {
+ this.runner.load(this.runner.current_test.ref_url);
+ }.bind(this);
+ },
+
+ hide_ref: function() {
+ this.ref_buttons.style.display = "none";
+ },
+
+ disable_buttons: function() {
+ this.pass_button.disabled = true;
+ this.fail_button.disabled = true;
+ },
+
+ enable_buttons: function() {
+ this.pass_button.disabled = false;
+ this.fail_button.disabled = false;
+ },
+
+ on_test_start: function(test) {
+ if (test.type == "manual" || test.type == "reftest") {
+ this.show();
+ } else {
+ this.hide();
+ }
+ if (test.type == "reftest") {
+ this.show_ref();
+ this.ref_type.textContent = test.ref_type === "==" ? "equal" : "unequal";
+ if (test.ref_length > 1) {
+ this.ref_warning.textContent = "WARNING: only presenting first of " + test.ref_length + " references";
+ this.ref_warning.style.display = "inline";
+ } else {
+ this.ref_warning.textContent = "";
+ this.ref_warning.style.display = "none";
+ }
+ } else {
+ this.hide_ref();
+ }
+ },
+
+ on_done: function() {
+ this.hide();
+ }
+};
+
+function TestControl(elem, runner) {
+ this.elem = elem;
+ this.path_input = this.elem.querySelector(".path");
+ this.path_input.addEventListener("change", function() {
+ this.set_counts();
+ }.bind(this), false);
+ this.use_regex_input = this.elem.querySelector("#use_regex");
+ this.use_regex_input.addEventListener("change", function() {
+ this.set_counts();
+ }.bind(this), false);
+ this.pause_button = this.elem.querySelector("button.togglePause");
+ this.start_button = this.elem.querySelector("button.toggleStart");
+ this.type_checkboxes = Array.prototype.slice.call(
+ this.elem.querySelectorAll("input[type=checkbox].test-type"));
+ this.type_checkboxes.forEach(function(elem) {
+ elem.addEventListener("change", function() {
+ this.set_counts();
+ }.bind(this),
+ false);
+ elem.addEventListener("click", function() {
+ this.start_button.disabled = this.get_test_types().length < 1;
+ }.bind(this),
+ false);
+ }.bind(this));
+
+ this.timeout_input = this.elem.querySelector(".timeout_multiplier");
+ this.render_checkbox = this.elem.querySelector(".render");
+ this.testcount_area = this.elem.querySelector("#testcount");
+ this.runner = runner;
+ this.runner.done_callbacks.push(this.on_done.bind(this));
+ this.set_start();
+ this.set_counts();
+}
+
+TestControl.prototype = {
+ set_start: function() {
+ this.start_button.disabled = this.get_test_types().length < 1;
+ this.pause_button.disabled = true;
+ this.start_button.textContent = "Start";
+ this.path_input.disabled = false;
+ this.type_checkboxes.forEach(function(elem) {
+ elem.disabled = false;
+ });
+ this.start_button.onclick = function() {
+ var path = this.get_path();
+ var test_types = this.get_test_types();
+ var settings = this.get_testharness_settings();
+ var use_regex = this.get_use_regex();
+ this.runner.start(path, test_types, settings, use_regex);
+ this.set_stop();
+ this.set_pause();
+ }.bind(this);
+ },
+
+ set_stop: function() {
+ clearTimeout(this.runner.timeout);
+ this.pause_button.disabled = false;
+ this.start_button.textContent = "Stop";
+ this.path_input.disabled = true;
+ this.type_checkboxes.forEach(function(elem) {
+ elem.disabled = true;
+ });
+ this.start_button.onclick = function() {
+ this.runner.stop_flag = true;
+ this.runner.done();
+ }.bind(this);
+ },
+
+ set_pause: function() {
+ this.pause_button.textContent = "Pause";
+ this.pause_button.onclick = function() {
+ this.runner.pause();
+ this.set_resume();
+ }.bind(this);
+ },
+
+ set_resume: function() {
+ this.pause_button.textContent = "Resume";
+ this.pause_button.onclick = function() {
+ this.runner.unpause();
+ this.set_pause();
+ }.bind(this);
+
+ },
+
+ set_counts: function() {
+ if (this.runner.manifest_loading) {
+ setTimeout(function() {
+ this.set_counts();
+ }.bind(this), 1000);
+ return;
+ }
+ var path = this.get_path();
+ var test_types = this.get_test_types();
+ var use_regex = this.get_use_regex();
+ var iterator = new ManifestIterator(this.runner.manifest, path, test_types, use_regex);
+ var count = iterator.count();
+ this.testcount_area.textContent = count;
+ },
+
+ get_path: function() {
+ return this.path_input.value;
+ },
+
+ get_test_types: function() {
+ return this.type_checkboxes.filter(function(elem) {
+ return elem.checked;
+ }).map(function(elem) {
+ return elem.value;
+ });
+ },
+
+ get_testharness_settings: function() {
+ return {timeout_multiplier: parseFloat(this.timeout_input.value),
+ output: this.render_checkbox.checked};
+ },
+
+ get_use_regex: function() {
+ return this.use_regex_input.checked;
+ },
+
+ on_done: function() {
+ this.set_pause();
+ this.set_start();
+ }
+};
+
+function Results(runner) {
+ this.test_results = null;
+ this.runner = runner;
+
+ this.runner.start_callbacks.push(this.on_start.bind(this));
+}
+
+Results.prototype = {
+ on_start: function() {
+ this.test_results = [];
+ },
+
+ set: function(test, status, message, subtests) {
+ this.test_results.push({"test":test,
+ "subtests":subtests,
+ "status":status,
+ "message":message});
+ },
+
+ count: function() {
+ return this.test_results.length;
+ },
+
+ to_json: function() {
+ var test_results = this.test_results || [];
+ var data = {
+ "results": test_results.map(function(result) {
+ var rv = {"test":(result.test.hasOwnProperty("ref_url") ?
+ [result.test.url, result.test.ref_type, result.test.ref_url] :
+ result.test.url),
+ "subtests":result.subtests,
+ "status":result.status,
+ "message":result.message};
+ return rv;
+ })
+ };
+ return JSON.stringify(data, null, 2);
+ }
+};
+
+function Runner(manifest_path) {
+ this.server = get_host_info().HTTP_ORIGIN;
+ this.https_server = get_host_info().HTTPS_ORIGIN;
+ this.manifest = new Manifest(manifest_path);
+ this.path = null;
+ this.test_types = null;
+ this.manifest_iterator = null;
+
+ this.test_window = null;
+ this.test_div = document.getElementById('current_test');
+ this.test_url = this.test_div.getElementsByTagName('a')[0];
+ this.current_test = null;
+ this.timeout = null;
+ this.num_tests = null;
+ this.pause_flag = false;
+ this.stop_flag = false;
+ this.done_flag = false;
+
+ this.manifest_wait_callbacks = [];
+ this.start_callbacks = [];
+ this.test_start_callbacks = [];
+ this.test_pause_callbacks = [];
+ this.result_callbacks = [];
+ this.done_callbacks = [];
+
+ this.results = new Results(this);
+
+ this.start_after_manifest_load = false;
+ this.manifest_loading = true;
+ this.manifest.load(this.manifest_loaded.bind(this));
+}
+
+Runner.prototype = {
+ test_timeout: 20000, //ms
+
+ currentTest: function() {
+ return this.manifest[this.mTestCount];
+ },
+
+ ensure_test_window: function() {
+ if (!this.test_window || this.test_window.location === null) {
+ this.test_window = window.open("about:blank", 800, 600);
+ }
+ },
+
+ manifest_loaded: function() {
+ this.manifest_loading = false;
+ if (this.start_after_manifest_load) {
+ this.do_start();
+ }
+ },
+
+ start: function(path, test_types, testharness_settings, use_regex) {
+ this.pause_flag = false;
+ this.stop_flag = false;
+ this.done_flag = false;
+ this.path = path;
+ this.use_regex = use_regex;
+ this.test_types = test_types;
+ window.testharness_properties = testharness_settings;
+ this.manifest_iterator = new ManifestIterator(this.manifest, this.path, this.test_types, this.use_regex);
+ this.num_tests = null;
+
+ this.ensure_test_window();
+ if (this.manifest.data === null) {
+ this.wait_for_manifest();
+ } else {
+ this.do_start();
+ }
+ },
+
+ wait_for_manifest: function() {
+ this.start_after_manifest_load = true;
+ this.manifest_wait_callbacks.forEach(function(callback) {
+ callback();
+ });
+ },
+
+ do_start: function() {
+ if (this.manifest_iterator.count() > 0) {
+ this.start_callbacks.forEach(function(callback) {
+ callback();
+ });
+ this.run_next_test();
+ } else {
+ var tests = "tests";
+ if (this.test_types.length < 3) {
+ tests = this.test_types.join(" tests or ") + " tests";
+ }
+ var message = "No " + tests + " found in this path."
+ document.querySelector(".path").setCustomValidity(message);
+ this.done();
+ }
+ },
+
+ pause: function() {
+ this.pause_flag = true;
+ this.test_pause_callbacks.forEach(function(callback) {
+ callback(this.current_test);
+ }.bind(this));
+ },
+
+ unpause: function() {
+ this.pause_flag = false;
+ this.run_next_test();
+ },
+
+ on_result: function(status, message, subtests) {
+ clearTimeout(this.timeout);
+ this.results.set(this.current_test, status, message, subtests);
+ this.result_callbacks.forEach(function(callback) {
+ callback(this.current_test, status, message, subtests);
+ }.bind(this));
+ this.run_next_test();
+ },
+
+ on_timeout: function() {
+ this.on_result("TIMEOUT", "", []);
+ },
+
+ done: function() {
+ this.done_flag = true;
+ if (this.test_window) {
+ this.test_window.close();
+ this.test_window = undefined;
+ }
+ this.done_callbacks.forEach(function(callback) {
+ callback();
+ });
+ },
+
+ run_next_test: function() {
+ if (this.pause_flag) {
+ return;
+ }
+ var next_test = this.manifest_iterator.next();
+ if (next_test === null||this.done_flag) {
+ this.done();
+ return;
+ }
+
+ this.current_test = next_test;
+
+ if (next_test.type === "testharness") {
+ this.timeout = setTimeout(this.on_timeout.bind(this),
+ this.test_timeout * window.testharness_properties.timeout_multiplier);
+ }
+ this.display_current_test(this.current_test.url);
+ this.load(this.current_test.url);
+
+ this.test_start_callbacks.forEach(function(callback) {
+ callback(this.current_test);
+ }.bind(this));
+ },
+
+ display_current_test: function(url) {
+ var match_location, index, width;
+
+ if (url === null) {
+ this.test_div.style.visibility = "hidden";
+ this.test_url.removeAttribute("href");
+ this.test_url.textContent = "";
+ return;
+ }
+
+ match_location = this.manifest_iterator.match_location(url);
+ index = match_location.index;
+ width = match_location.width;
+
+ this.test_url.setAttribute("href", url);
+ this.test_url.innerHTML = url.substring(0, index) +
+ "<span class='match'>" +
+ url.substring(index, index + width) +
+ "</span>" +
+ url.substring(index + width);
+ this.test_div.style.visibility = "visible";
+ },
+
+ load: function(path) {
+ this.ensure_test_window();
+ if (path.match(/\.https\./))
+ this.test_window.location.href = this.https_server + path;
+ else
+ this.test_window.location.href = this.server + path;
+ },
+
+ progress: function() {
+ return this.results.count() / this.test_count();
+ },
+
+ test_count: function() {
+ if (this.num_tests === null) {
+ this.num_tests = this.manifest_iterator.count();
+ }
+ return this.num_tests;
+ },
+
+ on_complete: function(tests, status) {
+ var harness_status_map = {0:"OK", 1:"ERROR", 2:"TIMEOUT", 3:"NOTRUN"};
+ var subtest_status_map = {0:"PASS", 1:"FAIL", 2:"TIMEOUT", 3:"NOTRUN"};
+
+ // this ugly hack is because IE really insists on holding on to the objects it creates in
+ // other windows, and on losing track of them when the window gets closed
+ var subtest_results = JSON.parse(JSON.stringify(
+ tests.map(function (test) {
+ return {name: test.name,
+ status: subtest_status_map[test.status],
+ message: test.message};
+ })
+ ));
+
+ runner.on_result(harness_status_map[status.status],
+ status.message,
+ subtest_results);
+ }
+};
+
+
+function parseOptions() {
+ var options = {
+ test_types: ["testharness", "reftest", "manual"]
+ };
+
+ var optionstrings = location.search.substring(1).split("&");
+ for (var i = 0, il = optionstrings.length; i < il; ++i) {
+ var opt = optionstrings[i];
+ //TODO: fix this for complex-valued options
+ options[opt.substring(0, opt.indexOf("="))] =
+ opt.substring(opt.indexOf("=") + 1);
+ }
+ return options;
+}
+
+function setup() {
+ var options = parseOptions();
+
+ if (options.path) {
+ document.getElementById('path').value = options.path;
+ }
+
+ runner = new Runner("/MANIFEST.json", options);
+ var test_control = new TestControl(document.getElementById("testControl"), runner);
+ new ManualUI(document.getElementById("manualUI"), runner);
+ new VisualOutput(document.getElementById("output"), runner);
+
+ window.addEventListener("message", function(e) {
+ if (e.data.type === "complete")
+ runner.on_complete(e.data.tests, e.data.status);
+ });
+
+ if (options.autorun === "1") {
+ runner.start(test_control.get_path(),
+ test_control.get_test_types(),
+ test_control.get_testharness_settings(),
+ test_control.get_use_regex());
+ }
+}
+
+window.addEventListener("DOMContentLoaded", setup, false);
+})();
diff --git a/testing/web-platform/tests/tools/runner/update_manifest.py b/testing/web-platform/tests/tools/runner/update_manifest.py
new file mode 100644
index 0000000000..a7f72b35b3
--- /dev/null
+++ b/testing/web-platform/tests/tools/runner/update_manifest.py
@@ -0,0 +1,38 @@
+# mypy: ignore-errors
+
+import imp
+import json
+import os
+
+here = os.path.dirname(__file__)
+localpaths = imp.load_source("localpaths", os.path.abspath(os.path.join(here, os.pardir, "localpaths.py")))
+
+root = localpaths.repo_root
+
+from manifest import manifest
+
+def main(request, response):
+ path = os.path.join(root, "MANIFEST.json")
+
+ # TODO make this download rather than rebuilding from scratch when possible
+ manifest_file = manifest.load_and_update(root, path, "/", parallel=False)
+
+ supported_types = ["testharness", "reftest", "manual"]
+ data = {"items": {},
+ "url_base": "/"}
+ for item_type in supported_types:
+ data["items"][item_type] = {}
+ for item_type, path, tests in manifest_file.itertypes(*supported_types):
+ tests_data = []
+ for item in tests:
+ test_data = [item.url[1:]]
+ if item_type == "reftest":
+ test_data.append(item.references)
+ test_data.append({})
+ if item_type != "manual":
+ test_data[-1]["timeout"] = item.timeout
+ tests_data.append(test_data)
+ assert path not in data["items"][item_type]
+ data["items"][item_type][path] = tests_data
+
+ return [("Content-Type", "application/json")], json.dumps(data)
diff --git a/testing/web-platform/tests/tools/serve/__init__.py b/testing/web-platform/tests/tools/serve/__init__.py
new file mode 100644
index 0000000000..40da7e9fd3
--- /dev/null
+++ b/testing/web-platform/tests/tools/serve/__init__.py
@@ -0,0 +1 @@
+from . import serve # noqa: F401
diff --git a/testing/web-platform/tests/tools/serve/commands.json b/testing/web-platform/tests/tools/serve/commands.json
new file mode 100644
index 0000000000..ed1d72e60e
--- /dev/null
+++ b/testing/web-platform/tests/tools/serve/commands.json
@@ -0,0 +1,17 @@
+{
+ "serve": {
+ "path": "serve.py",
+ "script": "run",
+ "parser": "get_parser",
+ "help": "Run wptserve server",
+ "virtualenv": false
+ },
+ "serve-wave": {
+ "path": "wave.py",
+ "script": "run",
+ "parser": "get_parser",
+ "help": "Run wptserve server for WAVE",
+ "virtualenv": true,
+ "requirements": ["../wave/requirements.txt"]
+ }
+}
diff --git a/testing/web-platform/tests/tools/serve/serve.py b/testing/web-platform/tests/tools/serve/serve.py
new file mode 100644
index 0000000000..6eba2153a1
--- /dev/null
+++ b/testing/web-platform/tests/tools/serve/serve.py
@@ -0,0 +1,1237 @@
+# mypy: allow-untyped-defs
+
+import abc
+import argparse
+import importlib
+import json
+import logging
+import multiprocessing
+import os
+import platform
+import subprocess
+import sys
+import threading
+import time
+import traceback
+import urllib
+import uuid
+from collections import defaultdict, OrderedDict
+from io import IOBase
+from itertools import chain, product
+from html5lib import html5parser
+from typing import ClassVar, List, Set, Tuple
+
+from localpaths import repo_root # type: ignore
+
+from manifest.sourcefile import read_script_metadata, js_meta_re, parse_variants # type: ignore
+from wptserve import server as wptserve, handlers
+from wptserve import stash
+from wptserve import config
+from wptserve.handlers import filesystem_path, wrap_pipeline
+from wptserve.response import ResponseHeaders
+from wptserve.utils import get_port, HTTPException, http2_compatible
+from mod_pywebsocket import standalone as pywebsocket
+
+
+EDIT_HOSTS_HELP = ("Please ensure all the necessary WPT subdomains "
+ "are mapped to a loopback device in /etc/hosts.\n"
+ "See https://web-platform-tests.org/running-tests/from-local-system.html#system-setup "
+ "for instructions.")
+
+
+def replace_end(s, old, new):
+ """
+ Given a string `s` that ends with `old`, replace that occurrence of `old`
+ with `new`.
+ """
+ assert s.endswith(old)
+ return s[:-len(old)] + new
+
+
+def domains_are_distinct(a, b):
+ a_parts = a.split(".")
+ b_parts = b.split(".")
+ min_length = min(len(a_parts), len(b_parts))
+ slice_index = -1 * min_length
+
+ return a_parts[slice_index:] != b_parts[slice_index:]
+
+
+def inject_script(html, script_tag):
+ # Tokenize and find the position of the first content (e.g. after the
+ # doctype, html, and head opening tags if present but before any other tags).
+ token_types = html5parser.tokenTypes
+ after_tags = {"html", "head"}
+ before_tokens = {token_types["EndTag"], token_types["EmptyTag"],
+ token_types["Characters"]}
+ error_tokens = {token_types["ParseError"]}
+
+ tokenizer = html5parser._tokenizer.HTMLTokenizer(html)
+ stream = tokenizer.stream
+ offset = 0
+ error = False
+ for item in tokenizer:
+ if item["type"] == token_types["StartTag"]:
+ if not item["name"].lower() in after_tags:
+ break
+ elif item["type"] in before_tokens:
+ break
+ elif item["type"] in error_tokens:
+ error = True
+ break
+ offset = stream.chunkOffset
+ else:
+ error = True
+
+ if not error and stream.prevNumCols or stream.prevNumLines:
+ # We're outside the first chunk, so we don't know what to do
+ error = True
+
+ if error:
+ return html
+ else:
+ return html[:offset] + script_tag + html[offset:]
+
+
+class WrapperHandler:
+
+ __meta__ = abc.ABCMeta
+
+ headers = [] # type: ClassVar[List[Tuple[str, str]]]
+
+ def __init__(self, base_path=None, url_base="/"):
+ self.base_path = base_path
+ self.url_base = url_base
+ self.handler = handlers.handler(self.handle_request)
+
+ def __call__(self, request, response):
+ self.handler(request, response)
+
+ def handle_request(self, request, response):
+ headers = self.headers + handlers.load_headers(
+ request, self._get_filesystem_path(request))
+ for header_name, header_value in headers:
+ response.headers.set(header_name, header_value)
+
+ self.check_exposure(request)
+
+ path = self._get_path(request.url_parts.path, True)
+ query = request.url_parts.query
+ if query:
+ query = "?" + query
+ meta = "\n".join(self._get_meta(request))
+ script = "\n".join(self._get_script(request))
+ response.content = self.wrapper % {"meta": meta, "script": script, "path": path, "query": query}
+ wrap_pipeline(path, request, response)
+
+ def _get_path(self, path, resource_path):
+ """Convert the path from an incoming request into a path corresponding to an "unwrapped"
+ resource e.g. the file on disk that will be loaded in the wrapper.
+
+ :param path: Path from the HTTP request
+ :param resource_path: Boolean used to control whether to get the path for the resource that
+ this wrapper will load or the associated file on disk.
+ Typically these are the same but may differ when there are multiple
+ layers of wrapping e.g. for a .any.worker.html input the underlying disk file is
+ .any.js but the top level html file loads a resource with a
+ .any.worker.js extension, which itself loads the .any.js file.
+ If True return the path to the resource that the wrapper will load,
+ otherwise return the path to the underlying file on disk."""
+ for item in self.path_replace:
+ if len(item) == 2:
+ src, dest = item
+ else:
+ assert len(item) == 3
+ src = item[0]
+ dest = item[2 if resource_path else 1]
+ if path.endswith(src):
+ path = replace_end(path, src, dest)
+ return path
+
+ def _get_filesystem_path(self, request):
+ """Get the path of the underlying resource file on disk."""
+ return self._get_path(filesystem_path(self.base_path, request, self.url_base), False)
+
+ def _get_metadata(self, request):
+ """Get an iterator over script metadata based on // META comments in the
+ associated js file.
+
+ :param request: The Request being processed.
+ """
+ path = self._get_filesystem_path(request)
+ try:
+ with open(path, "rb") as f:
+ yield from read_script_metadata(f, js_meta_re)
+ except OSError:
+ raise HTTPException(404)
+
+ def _get_meta(self, request):
+ """Get an iterator over strings to inject into the wrapper document
+ based on // META comments in the associated js file.
+
+ :param request: The Request being processed.
+ """
+ for key, value in self._get_metadata(request):
+ replacement = self._meta_replacement(key, value)
+ if replacement:
+ yield replacement
+
+ def _get_script(self, request):
+ """Get an iterator over strings to inject into the wrapper document
+ based on // META comments in the associated js file.
+
+ :param request: The Request being processed.
+ """
+ for key, value in self._get_metadata(request):
+ replacement = self._script_replacement(key, value)
+ if replacement:
+ yield replacement
+
+ @abc.abstractproperty
+ def path_replace(self):
+ # A list containing a mix of 2 item tuples with (input suffix, output suffix)
+ # and 3-item tuples with (input suffix, filesystem suffix, resource suffix)
+ # for the case where we want a different path in the generated resource to
+ # the actual path on the filesystem (e.g. when there is another handler
+ # that will wrap the file).
+ return None
+
+ @abc.abstractproperty
+ def wrapper(self):
+ # String template with variables path and meta for wrapper document
+ return None
+
+ @abc.abstractmethod
+ def _meta_replacement(self, key, value):
+ # Get the string to insert into the wrapper document, given
+ # a specific metadata key: value pair.
+ pass
+
+ @abc.abstractmethod
+ def check_exposure(self, request):
+ # Raise an exception if this handler shouldn't be exposed after all.
+ pass
+
+
+class HtmlWrapperHandler(WrapperHandler):
+ global_type = None # type: ClassVar[str]
+ headers = [('Content-Type', 'text/html')]
+
+ def check_exposure(self, request):
+ if self.global_type:
+ globals = ""
+ for (key, value) in self._get_metadata(request):
+ if key == "global":
+ globals = value
+ break
+
+ if self.global_type not in parse_variants(globals):
+ raise HTTPException(404, "This test cannot be loaded in %s mode" %
+ self.global_type)
+
+ def _meta_replacement(self, key, value):
+ if key == "timeout":
+ if value == "long":
+ return '<meta name="timeout" content="long">'
+ if key == "title":
+ value = value.replace("&", "&amp;").replace("<", "&lt;")
+ return '<title>%s</title>' % value
+ return None
+
+ def _script_replacement(self, key, value):
+ if key == "script":
+ attribute = value.replace("&", "&amp;").replace('"', "&quot;")
+ return '<script src="%s"></script>' % attribute
+ return None
+
+
+class HtmlScriptInjectorHandlerWrapper:
+ def __init__(self, inject="", wrap=None):
+ self.inject = inject
+ self.wrap = wrap
+
+ def __call__(self, request, response):
+ self.wrap(request, response)
+ # If the response content type isn't html, don't modify it.
+ if not isinstance(response.headers, ResponseHeaders) or response.headers.get("Content-Type")[0] != b"text/html":
+ return response
+
+ # Skip injection on custom streaming responses.
+ if not isinstance(response.content, (bytes, str, IOBase)) and not hasattr(response, "read"):
+ return response
+
+ response.content = inject_script(
+ b"".join(response.iter_content(read_file=True)),
+ b"<script>\n" +
+ self.inject + b"\n" +
+ (b"// Remove the injected script tag from the DOM.\n"
+ b"document.currentScript.remove();\n"
+ b"</script>\n"))
+ return response
+
+
+class WorkersHandler(HtmlWrapperHandler):
+ global_type = "dedicatedworker"
+ path_replace = [(".any.worker.html", ".any.js", ".any.worker.js"),
+ (".worker.html", ".worker.js")]
+ wrapper = """<!doctype html>
+<meta charset=utf-8>
+%(meta)s
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id=log></div>
+<script>
+fetch_tests_from_worker(new Worker("%(path)s%(query)s"));
+</script>
+"""
+
+
+class WorkerModulesHandler(HtmlWrapperHandler):
+ global_type = "dedicatedworker-module"
+ path_replace = [(".any.worker-module.html", ".any.js", ".any.worker-module.js"),
+ (".worker.html", ".worker.js")]
+ wrapper = """<!doctype html>
+<meta charset=utf-8>
+%(meta)s
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id=log></div>
+<script>
+fetch_tests_from_worker(new Worker("%(path)s%(query)s", { type: "module" }));
+</script>
+"""
+
+
+class WindowHandler(HtmlWrapperHandler):
+ path_replace = [(".window.html", ".window.js")]
+ wrapper = """<!doctype html>
+<meta charset=utf-8>
+%(meta)s
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+%(script)s
+<div id=log></div>
+<script src="%(path)s"></script>
+"""
+
+
+class AnyHtmlHandler(HtmlWrapperHandler):
+ global_type = "window"
+ path_replace = [(".any.html", ".any.js")]
+ wrapper = """<!doctype html>
+<meta charset=utf-8>
+%(meta)s
+<script>
+self.GLOBAL = {
+ isWindow: function() { return true; },
+ isWorker: function() { return false; },
+ isShadowRealm: function() { return false; },
+};
+</script>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+%(script)s
+<div id=log></div>
+<script src="%(path)s"></script>
+"""
+
+
+class SharedWorkersHandler(HtmlWrapperHandler):
+ global_type = "sharedworker"
+ path_replace = [(".any.sharedworker.html", ".any.js", ".any.worker.js")]
+ wrapper = """<!doctype html>
+<meta charset=utf-8>
+%(meta)s
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id=log></div>
+<script>
+fetch_tests_from_worker(new SharedWorker("%(path)s%(query)s"));
+</script>
+"""
+
+
+class SharedWorkerModulesHandler(HtmlWrapperHandler):
+ global_type = "sharedworker-module"
+ path_replace = [(".any.sharedworker-module.html", ".any.js", ".any.worker-module.js")]
+ wrapper = """<!doctype html>
+<meta charset=utf-8>
+%(meta)s
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id=log></div>
+<script>
+fetch_tests_from_worker(new SharedWorker("%(path)s%(query)s", { type: "module" }));
+</script>
+"""
+
+
+class ServiceWorkersHandler(HtmlWrapperHandler):
+ global_type = "serviceworker"
+ path_replace = [(".any.serviceworker.html", ".any.js", ".any.worker.js")]
+ wrapper = """<!doctype html>
+<meta charset=utf-8>
+%(meta)s
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id=log></div>
+<script>
+(async function() {
+ const scope = 'does/not/exist';
+ let reg = await navigator.serviceWorker.getRegistration(scope);
+ if (reg) await reg.unregister();
+ reg = await navigator.serviceWorker.register("%(path)s%(query)s", {scope});
+ fetch_tests_from_worker(reg.installing);
+})();
+</script>
+"""
+
+
+class ServiceWorkerModulesHandler(HtmlWrapperHandler):
+ global_type = "serviceworker-module"
+ path_replace = [(".any.serviceworker-module.html",
+ ".any.js", ".any.worker-module.js")]
+ wrapper = """<!doctype html>
+<meta charset=utf-8>
+%(meta)s
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id=log></div>
+<script>
+(async function() {
+ const scope = 'does/not/exist';
+ let reg = await navigator.serviceWorker.getRegistration(scope);
+ if (reg) await reg.unregister();
+ reg = await navigator.serviceWorker.register(
+ "%(path)s%(query)s",
+ { scope, type: 'module' },
+ );
+ fetch_tests_from_worker(reg.installing);
+})();
+</script>
+"""
+
+class ShadowRealmHandler(HtmlWrapperHandler):
+ global_type = "shadowrealm"
+ path_replace = [(".any.shadowrealm.html", ".any.js")]
+
+ wrapper = """<!doctype html>
+<meta charset=utf-8>
+%(meta)s
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script>
+(async function() {
+ const r = new ShadowRealm();
+
+ await new Promise(r.evaluate(`
+ (resolve, reject) => {
+ (async () => {
+ globalThis.self.GLOBAL = {
+ isWindow: function() { return false; },
+ isWorker: function() { return false; },
+ isShadowRealm: function() { return true; },
+ };
+ await import("/resources/testharness.js");
+ %(script)s
+ await import("%(path)s");
+ })().then(resolve, (e) => reject(e.toString()));
+ }
+ `));
+
+ await fetch_tests_from_shadow_realm(r);
+ done();
+})();
+</script>
+"""
+
+ def _script_replacement(self, key, value):
+ if key == "script":
+ return 'await import("%s");' % value
+ return None
+
+
+class BaseWorkerHandler(WrapperHandler):
+ headers = [('Content-Type', 'text/javascript')]
+
+ def _meta_replacement(self, key, value):
+ return None
+
+ @abc.abstractmethod
+ def _create_script_import(self, attribute):
+ # Take attribute (a string URL to a JS script) and return JS source to import the script
+ # into the worker.
+ pass
+
+ def _script_replacement(self, key, value):
+ if key == "script":
+ attribute = value.replace("\\", "\\\\").replace('"', '\\"')
+ return self._create_script_import(attribute)
+ if key == "title":
+ value = value.replace("\\", "\\\\").replace('"', '\\"')
+ return 'self.META_TITLE = "%s";' % value
+ return None
+
+
+class ClassicWorkerHandler(BaseWorkerHandler):
+ path_replace = [(".any.worker.js", ".any.js")]
+ wrapper = """%(meta)s
+self.GLOBAL = {
+ isWindow: function() { return false; },
+ isWorker: function() { return true; },
+ isShadowRealm: function() { return false; },
+};
+importScripts("/resources/testharness.js");
+%(script)s
+importScripts("%(path)s");
+done();
+"""
+
+ def _create_script_import(self, attribute):
+ return 'importScripts("%s")' % attribute
+
+
+class ModuleWorkerHandler(BaseWorkerHandler):
+ path_replace = [(".any.worker-module.js", ".any.js")]
+ wrapper = """%(meta)s
+self.GLOBAL = {
+ isWindow: function() { return false; },
+ isWorker: function() { return true; },
+ isShadowRealm: function() { return false; },
+};
+import "/resources/testharness.js";
+%(script)s
+import "%(path)s";
+done();
+"""
+
+ def _create_script_import(self, attribute):
+ return 'import "%s";' % attribute
+
+
+rewrites = [("GET", "/resources/WebIDLParser.js", "/resources/webidl2/lib/webidl2.js")]
+
+
+class RoutesBuilder:
+ def __init__(self, inject_script = None):
+ self.forbidden_override = [("GET", "/tools/runner/*", handlers.file_handler),
+ ("POST", "/tools/runner/update_manifest.py",
+ handlers.python_script_handler)]
+
+ self.forbidden = [("*", "/_certs/*", handlers.ErrorHandler(404)),
+ ("*", "/tools/*", handlers.ErrorHandler(404)),
+ ("*", "{spec}/tools/*", handlers.ErrorHandler(404)),
+ ("*", "/results/", handlers.ErrorHandler(404))]
+
+ self.extra = []
+ self.inject_script_data = None
+ if inject_script is not None:
+ with open(inject_script, 'rb') as f:
+ self.inject_script_data = f.read()
+
+ self.mountpoint_routes = OrderedDict()
+
+ self.add_mount_point("/", None)
+
+ def get_routes(self):
+ routes = self.forbidden_override + self.forbidden + self.extra
+ # Using reversed here means that mount points that are added later
+ # get higher priority. This makes sense since / is typically added
+ # first.
+ for item in reversed(self.mountpoint_routes.values()):
+ routes.extend(item)
+ return routes
+
+ def add_handler(self, method, route, handler):
+ self.extra.append((str(method), str(route), handler))
+
+ def add_static(self, path, format_args, content_type, route, headers=None):
+ if headers is None:
+ headers = {}
+ handler = handlers.StaticHandler(path, format_args, content_type, **headers)
+ self.add_handler("GET", str(route), handler)
+
+ def add_mount_point(self, url_base, path):
+ url_base = "/%s/" % url_base.strip("/") if url_base != "/" else "/"
+
+ self.mountpoint_routes[url_base] = []
+
+ routes = [
+ ("GET", "*.worker.html", WorkersHandler),
+ ("GET", "*.worker-module.html", WorkerModulesHandler),
+ ("GET", "*.window.html", WindowHandler),
+ ("GET", "*.any.html", AnyHtmlHandler),
+ ("GET", "*.any.sharedworker.html", SharedWorkersHandler),
+ ("GET", "*.any.sharedworker-module.html", SharedWorkerModulesHandler),
+ ("GET", "*.any.serviceworker.html", ServiceWorkersHandler),
+ ("GET", "*.any.serviceworker-module.html", ServiceWorkerModulesHandler),
+ ("GET", "*.any.shadowrealm.html", ShadowRealmHandler),
+ ("GET", "*.any.worker.js", ClassicWorkerHandler),
+ ("GET", "*.any.worker-module.js", ModuleWorkerHandler),
+ ("GET", "*.asis", handlers.AsIsHandler),
+ ("*", "/.well-known/attribution-reporting/report-event-attribution", handlers.PythonScriptHandler),
+ ("*", "/.well-known/attribution-reporting/debug/report-event-attribution", handlers.PythonScriptHandler),
+ ("*", "/.well-known/attribution-reporting/report-aggregate-attribution", handlers.PythonScriptHandler),
+ ("*", "/.well-known/attribution-reporting/debug/report-aggregate-attribution", handlers.PythonScriptHandler),
+ ("*", "/.well-known/attribution-reporting/debug/verbose", handlers.PythonScriptHandler),
+ ("*", "/.well-known/web-identity", handlers.PythonScriptHandler),
+ ("*", "*.py", handlers.PythonScriptHandler),
+ ("GET", "*", handlers.FileHandler)
+ ]
+
+ for (method, suffix, handler_cls) in routes:
+ handler = handler_cls(base_path=path, url_base=url_base)
+ if self.inject_script_data is not None:
+ handler = HtmlScriptInjectorHandlerWrapper(inject=self.inject_script_data, wrap=handler)
+
+ self.mountpoint_routes[url_base].append(
+ (method,
+ "%s%s" % (url_base if url_base != "/" else "", suffix),
+ handler))
+
+ def add_file_mount_point(self, file_url, base_path):
+ assert file_url.startswith("/")
+ url_base = file_url[0:file_url.rfind("/") + 1]
+ self.mountpoint_routes[file_url] = [("GET", file_url, handlers.FileHandler(base_path=base_path, url_base=url_base))]
+
+
+def get_route_builder(logger, aliases, config):
+ builder = RoutesBuilder(config.inject_script)
+ for alias in aliases:
+ url = alias["url-path"]
+ directory = alias["local-dir"]
+ if not url.startswith("/") or len(directory) == 0:
+ logger.error("\"url-path\" value must start with '/'.")
+ continue
+ if url.endswith("/"):
+ builder.add_mount_point(url, directory)
+ else:
+ builder.add_file_mount_point(url, directory)
+ return builder
+
+
+class ServerProc:
+ def __init__(self, mp_context, scheme=None):
+ self.proc = None
+ self.daemon = None
+ self.mp_context = mp_context
+ self.stop_flag = mp_context.Event()
+ self.scheme = scheme
+
+ def start(self, init_func, host, port, paths, routes, bind_address, config, log_handlers, **kwargs):
+ self.proc = self.mp_context.Process(target=self.create_daemon,
+ args=(init_func, host, port, paths, routes, bind_address,
+ config, log_handlers),
+ name='%s on port %s' % (self.scheme, port),
+ kwargs=kwargs)
+ self.proc.daemon = True
+ self.proc.start()
+
+ def create_daemon(self, init_func, host, port, paths, routes, bind_address,
+ config, log_handlers, **kwargs):
+ # Ensure that when we start this in a new process we have the global lock
+ # in the logging module unlocked
+ importlib.reload(logging)
+
+ logger = get_logger(config.log_level, log_handlers)
+
+ if sys.platform == "darwin":
+ # on Darwin, NOFILE starts with a very low limit (256), so bump it up a little
+ # by way of comparison, Debian starts with a limit of 1024, Windows 512
+ import resource # local, as it only exists on Unix-like systems
+ maxfilesperproc = int(subprocess.check_output(
+ ["sysctl", "-n", "kern.maxfilesperproc"]
+ ).strip())
+ soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
+ # 2048 is somewhat arbitrary, but gives us some headroom for wptrunner --parallel
+ # note that it's expected that 2048 will be the min here
+ new_soft = min(2048, maxfilesperproc, hard)
+ if soft < new_soft:
+ resource.setrlimit(resource.RLIMIT_NOFILE, (new_soft, hard))
+ try:
+ self.daemon = init_func(logger, host, port, paths, routes, bind_address, config, **kwargs)
+ except OSError:
+ logger.critical("Socket error on port %s" % port, file=sys.stderr)
+ raise
+ except Exception:
+ logger.critical(traceback.format_exc())
+ raise
+
+ if self.daemon:
+ try:
+ self.daemon.start()
+ try:
+ self.stop_flag.wait()
+ except KeyboardInterrupt:
+ pass
+ finally:
+ self.daemon.stop()
+ except Exception:
+ logger.critical(traceback.format_exc())
+ raise
+
+ def request_shutdown(self):
+ if self.is_alive():
+ self.stop_flag.set()
+
+ def wait(self, timeout=None):
+ self.proc.join(timeout)
+
+ def is_alive(self):
+ return self.proc.is_alive()
+
+
+def check_subdomains(logger, config, routes, mp_context, log_handlers):
+ paths = config.paths
+ bind_address = config.bind_address
+
+ host = config.server_host
+ port = get_port()
+ logger.debug("Going to use port %d to check subdomains" % port)
+
+ wrapper = ServerProc(mp_context)
+ wrapper.start(start_http_server, host, port, paths, routes,
+ bind_address, config, log_handlers)
+
+ url = f"http://{host}:{port}/"
+ connected = False
+ for i in range(10):
+ try:
+ urllib.request.urlopen(url)
+ connected = True
+ break
+ except urllib.error.URLError:
+ time.sleep(1)
+
+ if not connected:
+ logger.critical("Failed to connect to test server "
+ "on {}. {}".format(url, EDIT_HOSTS_HELP))
+ sys.exit(1)
+
+ for domain in config.domains_set:
+ if domain == host:
+ continue
+
+ try:
+ urllib.request.urlopen("http://%s:%d/" % (domain, port))
+ except Exception:
+ logger.critical(f"Failed probing domain {domain}. {EDIT_HOSTS_HELP}")
+ sys.exit(1)
+
+ wrapper.request_shutdown()
+ wrapper.wait()
+
+
+def make_hosts_file(config, host):
+ rv = []
+
+ for domain in config.domains_set:
+ rv.append("%s\t%s\n" % (host, domain))
+
+ # Windows interpets the IP address 0.0.0.0 as non-existent, making it an
+ # appropriate alias for non-existent hosts. However, UNIX-like systems
+ # interpret the same address to mean any IP address, which is inappropraite
+ # for this context. These systems do not reserve any value for this
+ # purpose, so the inavailability of the domains must be taken for granted.
+ #
+ # https://github.com/web-platform-tests/wpt/issues/10560
+ if platform.uname()[0] == "Windows":
+ for not_domain in config.not_domains_set:
+ rv.append("0.0.0.0\t%s\n" % not_domain)
+
+ return "".join(rv)
+
+
+def start_servers(logger, host, ports, paths, routes, bind_address, config,
+ mp_context, log_handlers, **kwargs):
+ servers = defaultdict(list)
+ for scheme, ports in ports.items():
+ assert len(ports) == {"http": 2, "https": 2}.get(scheme, 1)
+
+ # If trying to start HTTP/2.0 server, check compatibility
+ if scheme == 'h2' and not http2_compatible():
+ logger.error('Cannot start HTTP/2.0 server as the environment is not compatible. ' +
+ 'Requires OpenSSL 1.0.2+')
+ continue
+
+ # Skip WebTransport over HTTP/3 server unless if is enabled explicitly.
+ if scheme == 'webtransport-h3' and not kwargs.get("webtransport_h3"):
+ continue
+
+ for port in ports:
+ if port is None:
+ continue
+
+ init_func = {
+ "http": start_http_server,
+ "http-private": start_http_server,
+ "http-public": start_http_server,
+ "https": start_https_server,
+ "https-private": start_https_server,
+ "https-public": start_https_server,
+ "h2": start_http2_server,
+ "ws": start_ws_server,
+ "wss": start_wss_server,
+ "webtransport-h3": start_webtransport_h3_server,
+ }[scheme]
+
+ server_proc = ServerProc(mp_context, scheme=scheme)
+ server_proc.start(init_func, host, port, paths, routes, bind_address,
+ config, log_handlers, **kwargs)
+ servers[scheme].append((port, server_proc))
+
+ return servers
+
+
+def startup_failed(logger):
+ logger.critical(EDIT_HOSTS_HELP)
+ sys.exit(1)
+
+
+def start_http_server(logger, host, port, paths, routes, bind_address, config, **kwargs):
+ try:
+ return wptserve.WebTestHttpd(host=host,
+ port=port,
+ doc_root=paths["doc_root"],
+ routes=routes,
+ rewrites=rewrites,
+ bind_address=bind_address,
+ config=config,
+ use_ssl=False,
+ key_file=None,
+ certificate=None,
+ latency=kwargs.get("latency"))
+ except Exception:
+ startup_failed(logger)
+
+
+def start_https_server(logger, host, port, paths, routes, bind_address, config, **kwargs):
+ try:
+ return wptserve.WebTestHttpd(host=host,
+ port=port,
+ doc_root=paths["doc_root"],
+ routes=routes,
+ rewrites=rewrites,
+ bind_address=bind_address,
+ config=config,
+ use_ssl=True,
+ key_file=config.ssl_config["key_path"],
+ certificate=config.ssl_config["cert_path"],
+ encrypt_after_connect=config.ssl_config["encrypt_after_connect"],
+ latency=kwargs.get("latency"))
+ except Exception:
+ startup_failed(logger)
+
+
+def start_http2_server(logger, host, port, paths, routes, bind_address, config, **kwargs):
+ try:
+ return wptserve.WebTestHttpd(host=host,
+ port=port,
+ handler_cls=wptserve.Http2WebTestRequestHandler,
+ doc_root=paths["doc_root"],
+ ws_doc_root=paths["ws_doc_root"],
+ routes=routes,
+ rewrites=rewrites,
+ bind_address=bind_address,
+ config=config,
+ use_ssl=True,
+ key_file=config.ssl_config["key_path"],
+ certificate=config.ssl_config["cert_path"],
+ encrypt_after_connect=config.ssl_config["encrypt_after_connect"],
+ latency=kwargs.get("latency"),
+ http2=True)
+ except Exception:
+ startup_failed(logger)
+
+
+class WebSocketDaemon:
+ def __init__(self, host, port, doc_root, handlers_root, bind_address, ssl_config):
+ logger = logging.getLogger()
+ self.host = host
+ cmd_args = ["-p", port,
+ "-d", doc_root,
+ "-w", handlers_root]
+
+ if ssl_config is not None:
+ cmd_args += ["--tls",
+ "--private-key", ssl_config["key_path"],
+ "--certificate", ssl_config["cert_path"]]
+
+ if (bind_address):
+ cmd_args = ["-H", host] + cmd_args
+ opts, args = pywebsocket._parse_args_and_config(cmd_args)
+ opts.cgi_directories = []
+ opts.is_executable_method = None
+ self.server = pywebsocket.WebSocketServer(opts)
+ ports = [item[0].getsockname()[1] for item in self.server._sockets]
+ if not ports:
+ # TODO: Fix the logging configuration in WebSockets processes
+ # see https://github.com/web-platform-tests/wpt/issues/22719
+ logger.critical("Failed to start websocket server on port %s, "
+ "is something already using that port?" % port, file=sys.stderr)
+ raise OSError()
+ assert all(item == ports[0] for item in ports)
+ self.port = ports[0]
+ self.started = False
+ self.server_thread = None
+
+ def start(self):
+ self.started = True
+ self.server_thread = threading.Thread(target=self.server.serve_forever)
+ self.server_thread.setDaemon(True) # don't hang on exit
+ self.server_thread.start()
+
+ def stop(self):
+ """
+ Stops the server.
+
+ If the server is not running, this method has no effect.
+ """
+ if self.started:
+ try:
+ self.server.shutdown()
+ self.server.server_close()
+ self.server_thread.join()
+ self.server_thread = None
+ except AttributeError:
+ pass
+ self.started = False
+ self.server = None
+
+
+def start_ws_server(logger, host, port, paths, routes, bind_address, config, **kwargs):
+ try:
+ return WebSocketDaemon(host,
+ str(port),
+ repo_root,
+ config.paths["ws_doc_root"],
+ bind_address,
+ ssl_config=None)
+ except Exception:
+ startup_failed(logger)
+
+
+def start_wss_server(logger, host, port, paths, routes, bind_address, config, **kwargs):
+ try:
+ return WebSocketDaemon(host,
+ str(port),
+ repo_root,
+ config.paths["ws_doc_root"],
+ bind_address,
+ config.ssl_config)
+ except Exception:
+ startup_failed(logger)
+
+
+def start_webtransport_h3_server(logger, host, port, paths, routes, bind_address, config, **kwargs):
+ try:
+ # TODO(bashi): Move the following import to the beginning of this file
+ # once WebTransportH3Server is enabled by default.
+ from webtransport.h3.webtransport_h3_server import WebTransportH3Server # type: ignore
+ return WebTransportH3Server(host=host,
+ port=port,
+ doc_root=paths["doc_root"],
+ cert_path=config.ssl_config["cert_path"],
+ key_path=config.ssl_config["key_path"],
+ logger=logger)
+ except Exception as error:
+ logger.critical(
+ f"Failed to start WebTransport over HTTP/3 server: {error}")
+ sys.exit(0)
+
+
+def start(logger, config, routes, mp_context, log_handlers, **kwargs):
+ host = config["server_host"]
+ ports = config.ports
+ paths = config.paths
+ bind_address = config["bind_address"]
+
+ logger.debug("Using ports: %r" % ports)
+
+ servers = start_servers(logger, host, ports, paths, routes, bind_address, config, mp_context,
+ log_handlers, **kwargs)
+
+ return servers
+
+
+def iter_servers(servers):
+ for servers in servers.values():
+ for port, server in servers:
+ yield server
+
+
+def _make_subdomains_product(s: Set[str], depth: int = 2) -> Set[str]:
+ return {".".join(x) for x in chain(*(product(s, repeat=i) for i in range(1, depth+1)))}
+
+
+_subdomains = {"www",
+ "www1",
+ "www2",
+ "天気ã®è‰¯ã„æ—¥",
+ "élève"}
+
+_not_subdomains = {"nonexistent"}
+
+_subdomains = _make_subdomains_product(_subdomains)
+
+_not_subdomains = _make_subdomains_product(_not_subdomains)
+
+
+class ConfigBuilder(config.ConfigBuilder):
+ """serve config
+
+ This subclasses wptserve.config.ConfigBuilder to add serve config options.
+ """
+
+ _default = {
+ "browser_host": "web-platform.test",
+ "alternate_hosts": {
+ "alt": "not-web-platform.test"
+ },
+ "doc_root": repo_root,
+ "ws_doc_root": os.path.join(repo_root, "websockets", "handlers"),
+ "server_host": None,
+ "ports": {
+ "http": [8000, "auto"],
+ "http-private": ["auto"],
+ "http-public": ["auto"],
+ "https": [8443, 8444],
+ "https-private": ["auto"],
+ "https-public": ["auto"],
+ "ws": ["auto"],
+ "wss": ["auto"],
+ "webtransport-h3": ["auto"],
+ },
+ "check_subdomains": True,
+ "log_level": "info",
+ "bind_address": True,
+ "ssl": {
+ "type": "pregenerated",
+ "encrypt_after_connect": False,
+ "openssl": {
+ "openssl_binary": "openssl",
+ "base_path": "_certs",
+ "password": "web-platform-tests",
+ "force_regenerate": False,
+ "duration": 30,
+ "base_conf_path": None
+ },
+ "pregenerated": {
+ "host_key_path": os.path.join(repo_root, "tools", "certs", "web-platform.test.key"),
+ "host_cert_path": os.path.join(repo_root, "tools", "certs", "web-platform.test.pem")
+ },
+ "none": {}
+ },
+ "aliases": []
+ }
+
+ computed_properties = ["ws_doc_root"] + config.ConfigBuilder.computed_properties
+
+ def __init__(self, logger, *args, **kwargs):
+ if "subdomains" not in kwargs:
+ kwargs["subdomains"] = _subdomains
+ if "not_subdomains" not in kwargs:
+ kwargs["not_subdomains"] = _not_subdomains
+ super().__init__(
+ logger,
+ *args,
+ **kwargs
+ )
+ with self as c:
+ browser_host = c.get("browser_host")
+ alternate_host = c.get("alternate_hosts", {}).get("alt")
+
+ if not domains_are_distinct(browser_host, alternate_host):
+ raise ValueError(
+ "Alternate host must be distinct from browser host"
+ )
+
+ def _get_ws_doc_root(self, data):
+ if data["ws_doc_root"] is not None:
+ return data["ws_doc_root"]
+ else:
+ return os.path.join(data["doc_root"], "websockets", "handlers")
+
+ def _get_paths(self, data):
+ rv = super()._get_paths(data)
+ rv["ws_doc_root"] = data["ws_doc_root"]
+ return rv
+
+
+def build_config(logger, override_path=None, config_cls=ConfigBuilder, **kwargs):
+ rv = config_cls(logger)
+
+ enable_http2 = kwargs.get("h2")
+ if enable_http2 is None:
+ enable_http2 = True
+ if enable_http2:
+ rv._default["ports"]["h2"] = [9000]
+
+ if override_path and os.path.exists(override_path):
+ with open(override_path) as f:
+ override_obj = json.load(f)
+ rv.update(override_obj)
+
+ if kwargs.get("config_path"):
+ other_path = os.path.abspath(os.path.expanduser(kwargs.get("config_path")))
+ if os.path.exists(other_path):
+ with open(other_path) as f:
+ override_obj = json.load(f)
+ rv.update(override_obj)
+ else:
+ raise ValueError("Config path %s does not exist" % other_path)
+
+ if kwargs.get("verbose"):
+ rv.log_level = "debug"
+
+ setattr(rv, "inject_script", kwargs.get("inject_script"))
+
+ overriding_path_args = [("doc_root", "Document root"),
+ ("ws_doc_root", "WebSockets document root")]
+ for key, title in overriding_path_args:
+ value = kwargs.get(key)
+ if value is None:
+ continue
+ value = os.path.abspath(os.path.expanduser(value))
+ if not os.path.exists(value):
+ raise ValueError("%s path %s does not exist" % (title, value))
+ setattr(rv, key, value)
+
+ return rv
+
+
+def get_parser():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--latency", type=int,
+ help="Artificial latency to add before sending http responses, in ms")
+ parser.add_argument("--config", action="store", dest="config_path",
+ help="Path to external config file")
+ parser.add_argument("--doc_root", action="store", dest="doc_root",
+ help="Path to document root. Overrides config.")
+ parser.add_argument("--ws_doc_root", action="store", dest="ws_doc_root",
+ help="Path to WebSockets document root. Overrides config.")
+ parser.add_argument("--inject-script", default=None,
+ help="Path to script file to inject, useful for testing polyfills.")
+ parser.add_argument("--alias_file", action="store", dest="alias_file",
+ help="File with entries for aliases/multiple doc roots. In form of `/ALIAS_NAME/, DOC_ROOT\\n`")
+ parser.add_argument("--h2", action="store_true", dest="h2", default=None,
+ help=argparse.SUPPRESS)
+ parser.add_argument("--no-h2", action="store_false", dest="h2", default=None,
+ help="Disable the HTTP/2.0 server")
+ parser.add_argument("--webtransport-h3", action="store_true",
+ help="Enable WebTransport over HTTP/3 server")
+ parser.add_argument("--exit-after-start", action="store_true", help="Exit after starting servers")
+ parser.add_argument("--verbose", action="store_true", help="Enable verbose logging")
+ parser.set_defaults(report=False)
+ parser.set_defaults(is_wave=False)
+ return parser
+
+
+class MpContext:
+ def __getattr__(self, name):
+ return getattr(multiprocessing, name)
+
+
+def get_logger(log_level, log_handlers):
+ """Get a logger configured to log at level log_level
+
+ If the logger has existing handlers the log_handlers argument is ignored.
+ Otherwise the handlers in log_handlers are added to the logger. If there are
+ no log_handlers passed and no configured handlers, a stream handler is added
+ to the logger.
+
+ Typically this is called once per process to set up logging in that process.
+
+ :param log_level: - A string representing a log level e.g. "info"
+ :param log_handlers: - Optional list of Handler objects.
+ """
+ logger = logging.getLogger()
+ logger.setLevel(getattr(logging, log_level.upper()))
+ if not logger.hasHandlers():
+ if log_handlers is not None:
+ for handler in log_handlers:
+ logger.addHandler(handler)
+ else:
+ handler = logging.StreamHandler(sys.stdout)
+ formatter = logging.Formatter("[%(asctime)s %(processName)s] %(levelname)s - %(message)s")
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+ return logger
+
+
+def run(config_cls=ConfigBuilder, route_builder=None, mp_context=None, log_handlers=None,
+ **kwargs):
+ logger = get_logger("INFO", log_handlers)
+
+ if mp_context is None:
+ if hasattr(multiprocessing, "get_context"):
+ mp_context = multiprocessing.get_context()
+ else:
+ mp_context = MpContext()
+
+ with build_config(logger,
+ os.path.join(repo_root, "config.json"),
+ config_cls=config_cls,
+ **kwargs) as config:
+ # This sets the right log level
+ logger = get_logger(config.log_level, log_handlers)
+
+ bind_address = config["bind_address"]
+
+ if kwargs.get("alias_file"):
+ with open(kwargs["alias_file"]) as alias_file:
+ for line in alias_file:
+ alias, doc_root = (x.strip() for x in line.split(','))
+ config["aliases"].append({
+ 'url-path': alias,
+ 'local-dir': doc_root,
+ })
+
+ if route_builder is None:
+ route_builder = get_route_builder
+ routes = route_builder(logger, config.aliases, config).get_routes()
+
+ if config["check_subdomains"]:
+ check_subdomains(logger, config, routes, mp_context, log_handlers)
+
+ stash_address = None
+ if bind_address:
+ stash_address = (config.server_host, get_port(""))
+ logger.debug("Going to use port %d for stash" % stash_address[1])
+
+ with stash.StashServer(stash_address, authkey=str(uuid.uuid4())):
+ servers = start(logger, config, routes, mp_context, log_handlers, **kwargs)
+
+ if not kwargs.get("exit_after_start"):
+ try:
+ # Periodically check if all the servers are alive
+ server_process_exited = False
+ while not server_process_exited:
+ for server in iter_servers(servers):
+ server.proc.join(1)
+ if not server.proc.is_alive():
+ server_process_exited = True
+ break
+ except KeyboardInterrupt:
+ pass
+
+ failed_subproc = 0
+ for server in iter_servers(servers):
+ logger.info('Status of subprocess "%s": running', server.proc.name)
+ server.request_shutdown()
+
+ for server in iter_servers(servers):
+ server.wait(timeout=1)
+ if server.proc.exitcode == 0:
+ logger.info('Status of subprocess "%s": exited correctly', server.proc.name)
+ else:
+ subproc = server.proc
+ logger.warning('Status of subprocess "%s": failed. Exit with non-zero status: %d',
+ subproc.name, subproc.exitcode)
+ failed_subproc += 1
+ return failed_subproc
+
+
+def main():
+ kwargs = vars(get_parser().parse_args())
+ return run(**kwargs)
diff --git a/testing/web-platform/tests/tools/serve/test_functional.py b/testing/web-platform/tests/tools/serve/test_functional.py
new file mode 100644
index 0000000000..94bedb60f7
--- /dev/null
+++ b/testing/web-platform/tests/tools/serve/test_functional.py
@@ -0,0 +1,81 @@
+# mypy: allow-untyped-defs
+
+try:
+ from importlib import reload
+except ImportError:
+ pass
+import json
+import os
+import queue
+import tempfile
+import threading
+
+import pytest
+
+from . import serve
+from wptserve import logger
+
+
+class ServerProcSpy(serve.ServerProc):
+ instances = None
+
+ def start(self, *args, **kwargs):
+ result = super().start(*args, **kwargs)
+
+ if ServerProcSpy.instances is not None:
+ ServerProcSpy.instances.put(self)
+
+ return result
+
+
+serve.ServerProc = ServerProcSpy # type: ignore
+
+
+@pytest.fixture()
+def server_subprocesses():
+ ServerProcSpy.instances = queue.Queue()
+ yield ServerProcSpy.instances
+ ServerProcSpy.instances = None
+
+
+@pytest.fixture()
+def tempfile_name():
+ fd, name = tempfile.mkstemp()
+ yield name
+ os.close(fd)
+ os.remove(name)
+
+
+def test_subprocess_exit(server_subprocesses, tempfile_name):
+ timeout = 30
+
+ def target():
+ # By default, the server initially creates a child process to validate
+ # local system configuration. That process is unrelated to the behavior
+ # under test, but at the time of this writing, the parent uses the same
+ # constructor that is also used to create the long-running processes
+ # which are relevant to this functionality. Disable the check so that
+ # the constructor is only used to create relevant processes.
+ with open(tempfile_name, 'w') as handle:
+ json.dump({"check_subdomains": False, "bind_address": False}, handle)
+
+ # The `logger` module from the wptserver package uses a singleton
+ # pattern which resists testing. In order to avoid conflicting with
+ # other tests which rely on that module, pre-existing state is
+ # discarded through an explicit "reload" operation.
+ reload(logger)
+
+ serve.run(config_path=tempfile_name)
+
+ thread = threading.Thread(target=target)
+
+ thread.start()
+
+ server_subprocesses.get(True, timeout)
+ subprocess = server_subprocesses.get(True, timeout)
+ subprocess.request_shutdown()
+ subprocess.wait()
+
+ thread.join(timeout)
+
+ assert not thread.is_alive()
diff --git a/testing/web-platform/tests/tools/serve/test_serve.py b/testing/web-platform/tests/tools/serve/test_serve.py
new file mode 100644
index 0000000000..e590b49870
--- /dev/null
+++ b/testing/web-platform/tests/tools/serve/test_serve.py
@@ -0,0 +1,149 @@
+# mypy: allow-untyped-defs
+
+import logging
+import os
+import pickle
+import platform
+
+import pytest
+
+import localpaths # type: ignore
+from . import serve
+from .serve import ConfigBuilder, inject_script
+
+
+logger = logging.getLogger()
+
+@pytest.mark.skipif(platform.uname()[0] == "Windows",
+ reason="Expected contents are platform-dependent")
+def test_make_hosts_file_nix():
+ with ConfigBuilder(logger,
+ ports={"http": [8000]},
+ browser_host="foo.bar",
+ alternate_hosts={"alt": "foo2.bar"},
+ subdomains={"a", "b"},
+ not_subdomains={"x, y"}) as c:
+ hosts = serve.make_hosts_file(c, "192.168.42.42")
+ lines = hosts.split("\n")
+ assert set(lines) == {"",
+ "192.168.42.42\tfoo.bar",
+ "192.168.42.42\tfoo2.bar",
+ "192.168.42.42\ta.foo.bar",
+ "192.168.42.42\ta.foo2.bar",
+ "192.168.42.42\tb.foo.bar",
+ "192.168.42.42\tb.foo2.bar"}
+ assert lines[-1] == ""
+
+@pytest.mark.skipif(platform.uname()[0] != "Windows",
+ reason="Expected contents are platform-dependent")
+def test_make_hosts_file_windows():
+ with ConfigBuilder(logger,
+ ports={"http": [8000]},
+ browser_host="foo.bar",
+ alternate_hosts={"alt": "foo2.bar"},
+ subdomains={"a", "b"},
+ not_subdomains={"x", "y"}) as c:
+ hosts = serve.make_hosts_file(c, "192.168.42.42")
+ lines = hosts.split("\n")
+ assert set(lines) == {"",
+ "0.0.0.0\tx.foo.bar",
+ "0.0.0.0\tx.foo2.bar",
+ "0.0.0.0\ty.foo.bar",
+ "0.0.0.0\ty.foo2.bar",
+ "192.168.42.42\tfoo.bar",
+ "192.168.42.42\tfoo2.bar",
+ "192.168.42.42\ta.foo.bar",
+ "192.168.42.42\ta.foo2.bar",
+ "192.168.42.42\tb.foo.bar",
+ "192.168.42.42\tb.foo2.bar"}
+ assert lines[-1] == ""
+
+
+def test_ws_doc_root_default():
+ with ConfigBuilder(logger) as c:
+ assert c.doc_root == localpaths.repo_root
+ assert c.ws_doc_root == os.path.join(localpaths.repo_root, "websockets", "handlers")
+ assert c.paths["ws_doc_root"] == c.ws_doc_root
+
+
+def test_init_ws_doc_root():
+ with ConfigBuilder(logger, ws_doc_root="/") as c:
+ assert c.doc_root == localpaths.repo_root # check this hasn't changed
+ assert c.ws_doc_root == "/"
+ assert c.paths["ws_doc_root"] == c.ws_doc_root
+
+
+def test_set_ws_doc_root():
+ cb = ConfigBuilder(logger)
+ cb.ws_doc_root = "/"
+ with cb as c:
+ assert c.doc_root == localpaths.repo_root # check this hasn't changed
+ assert c.ws_doc_root == "/"
+ assert c.paths["ws_doc_root"] == c.ws_doc_root
+
+
+def test_pickle():
+ # Ensure that the config object can be pickled
+ with ConfigBuilder(logger) as c:
+ pickle.dumps(c)
+
+
+def test_alternate_host_unspecified():
+ ConfigBuilder(logger, browser_host="web-platform.test")
+
+
+@pytest.mark.parametrize("primary, alternate", [
+ ("web-platform.test", "web-platform.test"),
+ ("a.web-platform.test", "web-platform.test"),
+ ("web-platform.test", "a.web-platform.test"),
+ ("a.web-platform.test", "a.web-platform.test"),
+])
+def test_alternate_host_invalid(primary, alternate):
+ with pytest.raises(ValueError):
+ ConfigBuilder(logger, browser_host=primary, alternate_hosts={"alt": alternate})
+
+@pytest.mark.parametrize("primary, alternate", [
+ ("web-platform.test", "not-web-platform.test"),
+ ("a.web-platform.test", "b.web-platform.test"),
+ ("web-platform-tests.dev", "web-platform-tests.live"),
+])
+def test_alternate_host_valid(primary, alternate):
+ ConfigBuilder(logger, browser_host=primary, alternate_hosts={"alt": alternate})
+
+
+# A token marking the location of expected script injection.
+INJECT_SCRIPT_MARKER = b"<!-- inject here -->"
+
+
+def test_inject_script_after_head():
+ html = b"""<!DOCTYPE html>
+ <html>
+ <head>
+ <!-- inject here --><script src="test.js"></script>
+ </head>
+ <body>
+ </body>
+ </html>"""
+ assert INJECT_SCRIPT_MARKER in html
+ assert inject_script(html.replace(INJECT_SCRIPT_MARKER, b""), INJECT_SCRIPT_MARKER) == html
+
+
+def test_inject_script_no_html_head():
+ html = b"""<!DOCTYPE html>
+ <!-- inject here --><div></div>"""
+ assert INJECT_SCRIPT_MARKER in html
+ assert inject_script(html.replace(INJECT_SCRIPT_MARKER, b""), INJECT_SCRIPT_MARKER) == html
+
+
+def test_inject_script_no_doctype():
+ html = b"""<!-- inject here --><div></div>"""
+ assert INJECT_SCRIPT_MARKER in html
+ assert inject_script(html.replace(INJECT_SCRIPT_MARKER, b""), INJECT_SCRIPT_MARKER) == html
+
+
+def test_inject_script_parse_error():
+ html = b"""<!--<!-- inject here --><div></div>"""
+ assert INJECT_SCRIPT_MARKER in html
+ # On a parse error, the script should not be injected and the original content should be
+ # returned.
+ assert INJECT_SCRIPT_MARKER not in inject_script(html.replace(INJECT_SCRIPT_MARKER, b""), INJECT_SCRIPT_MARKER)
diff --git a/testing/web-platform/tests/tools/serve/wave.py b/testing/web-platform/tests/tools/serve/wave.py
new file mode 100644
index 0000000000..b24ec1cc81
--- /dev/null
+++ b/testing/web-platform/tests/tools/serve/wave.py
@@ -0,0 +1,134 @@
+# The ./wpt serve-wave command is broken, so mypy errors are ignored instead of
+# making untestable changes to the problematic imports.
+# See https://github.com/web-platform-tests/wpt/issues/29024.
+# mypy: ignore-errors
+
+import subprocess
+from manifest import manifest
+import localpaths
+import os
+
+try:
+ from serve import serve
+except ImportError:
+ import serve
+
+from tools.wpt import wpt
+
+
+class WaveHandler:
+ def __init__(self, server):
+ self.server = server
+
+ def __call__(self, request, response):
+ self.server.handle_request(request, response)
+
+
+def get_route_builder_func(report):
+ def get_route_builder(logger, aliases, config):
+ wave_cfg = None
+ if config is not None and "wave" in config:
+ wave_cfg = config["wave"]
+ builder = serve.get_route_builder(logger, aliases, config)
+ logger.debug("Loading manifest ...")
+ data = load_manifest()
+ from ..wave.wave_server import WaveServer
+ wave_server = WaveServer()
+ wave_server.initialize(
+ configuration_file_path=os.path.abspath("./config.json"),
+ reports_enabled=report,
+ tests=data["items"])
+
+ web_root = "wave"
+ if wave_cfg is not None and "web_root" in wave_cfg:
+ web_root = wave_cfg["web_root"]
+ if not web_root.startswith("/"):
+ web_root = "/" + web_root
+
+ wave_handler = WaveHandler(wave_server)
+ builder.add_handler("*", web_root + "*", wave_handler)
+ # serving wave specifc testharnessreport.js
+ file_path = os.path.join(wpt.localpaths.repo_root, "tools/wave/resources/testharnessreport.js")
+ builder.add_static(
+ file_path,
+ {},
+ "text/javascript;charset=utf8",
+ "/resources/testharnessreport.js")
+
+ return builder
+ return get_route_builder
+
+
+class ConfigBuilder(serve.ConfigBuilder):
+ _default = serve.ConfigBuilder._default
+ _default.update({
+ "wave": { # wave specific configuration parameters
+ "results": "./results",
+ "timeouts": {
+ "automatic": 60000,
+ "manual": 300000
+ },
+ "enable_results_import": False,
+ "web_root": "/_wave",
+ "persisting_interval": 20,
+ "api_titles": []
+ }
+ })
+
+
+def get_parser():
+ parser = serve.get_parser()
+ # Added wave specific arguments
+ parser.add_argument("--report", action="store_true", dest="report",
+ help="Flag for enabling the WPTReporting server")
+ return parser
+
+
+def run(venv=None, **kwargs):
+ if venv is not None:
+ venv.start()
+ else:
+ raise Exception("Missing virtualenv for serve-wave.")
+
+ if kwargs['report'] is True:
+ if not is_wptreport_installed():
+ raise Exception("wptreport is not installed. Please install it from https://github.com/w3c/wptreport")
+
+ serve.run(config_cls=ConfigBuilder,
+ route_builder=get_route_builder_func(kwargs["report"]),
+ log_handlers=None,
+ **kwargs)
+
+
+# execute wptreport version check
+def is_wptreport_installed():
+ try:
+ subprocess.check_output(["wptreport", "--help"])
+ return True
+ except Exception:
+ return False
+
+
+def load_manifest():
+ root = localpaths.repo_root
+ path = os.path.join(root, "MANIFEST.json")
+ manifest_file = manifest.load_and_update(root, path, "/", parallel=False)
+
+ supported_types = ["testharness", "manual"]
+ data = {"items": {},
+ "url_base": "/"}
+ for item_type in supported_types:
+ data["items"][item_type] = {}
+ for item_type, path, tests in manifest_file.itertypes(*supported_types):
+ tests_data = []
+ for item in tests:
+ test_data = [item.url[1:]]
+ if item_type == "reftest":
+ test_data.append(item.references)
+ test_data.append({})
+ if item_type != "manual":
+ test_data[-1]["timeout"] = item.timeout
+ tests_data.append(test_data)
+ assert path not in data["items"][item_type]
+ data["items"][item_type][path] = tests_data
+ return data
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/.gitignore b/testing/web-platform/tests/tools/third_party/atomicwrites/.gitignore
new file mode 100644
index 0000000000..2b2d312875
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/.gitignore
@@ -0,0 +1,9 @@
+.tox
+*.pyc
+*.pyo
+__pycache__
+*.egg-info
+docs/_build
+build
+dist
+.cache
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/.travis.yml b/testing/web-platform/tests/tools/third_party/atomicwrites/.travis.yml
new file mode 100644
index 0000000000..e9779018ad
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/.travis.yml
@@ -0,0 +1,35 @@
+sudo: false
+os: linux
+language: python
+
+matrix:
+ include:
+ - os: osx
+ language: generic
+ env: TOXENV_SUFFIX=test
+
+python:
+ - "2.6"
+ - "2.7"
+ - "pypy"
+ - "3.3"
+ - "3.4"
+
+install:
+ - # The OS X VM doesn't have any Python support at all
+ # See https://github.com/travis-ci/travis-ci/issues/2312
+ if [ "$TRAVIS_OS_NAME" = "osx" ]; then
+ brew update;
+ brew install python3;
+ virtualenv -p python3 $HOME/osx-py3;
+ . $HOME/osx-py3/bin/activate;
+ export TRAVIS_PYTHON_VERSION="$(python --version | cut -d ' ' -f 2 | cut -d . -f -2)";
+ fi
+ - pip install tox
+
+script:
+ - export TOX_PY="$(echo py$TRAVIS_PYTHON_VERSION | tr -d . | sed -e 's/pypypy/pypy/')"
+ - tox -e $TOX_PY-test
+ - if [ "$TRAVIS_PYTHON_VERSION" = "2.7" ] || [ "$TRAVIS_PYTHON_VERSION" = ".3.5" ]; then
+ tox -e $TOX_PY-stylecheck;
+ fi
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/CONTRIBUTING.rst b/testing/web-platform/tests/tools/third_party/atomicwrites/CONTRIBUTING.rst
new file mode 100644
index 0000000000..86d3e4a65e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/CONTRIBUTING.rst
@@ -0,0 +1,11 @@
+Thanks for contributing to python-atomicwrites! This document is a
+work-in-progress. Below are a few notes that are useful for writing patches.
+
+Running the tests
+=================
+
+::
+
+ pip install tox
+ tox -e py-test
+ tox -e py-stylecheck
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/LICENSE b/testing/web-platform/tests/tools/third_party/atomicwrites/LICENSE
new file mode 100644
index 0000000000..3bbadc3af2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2015-2016 Markus Unterwaditzer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/MANIFEST.in b/testing/web-platform/tests/tools/third_party/atomicwrites/MANIFEST.in
new file mode 100644
index 0000000000..1b28469174
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/MANIFEST.in
@@ -0,0 +1,6 @@
+include LICENSE
+include README.rst
+
+recursive-include docs *
+recursive-include tests *
+prune docs/_build
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/Makefile b/testing/web-platform/tests/tools/third_party/atomicwrites/Makefile
new file mode 100644
index 0000000000..d257e7b673
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/Makefile
@@ -0,0 +1,2 @@
+release:
+ python setup.py sdist bdist_wheel upload
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/README.rst b/testing/web-platform/tests/tools/third_party/atomicwrites/README.rst
new file mode 100644
index 0000000000..3a5658cbd8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/README.rst
@@ -0,0 +1,102 @@
+===================
+python-atomicwrites
+===================
+
+.. image:: https://travis-ci.org/untitaker/python-atomicwrites.svg?branch=master
+ :target: https://travis-ci.org/untitaker/python-atomicwrites
+
+.. image:: https://ci.appveyor.com/api/projects/status/vadc4le3c27to59x/branch/master?svg=true
+ :target: https://ci.appveyor.com/project/untitaker/python-atomicwrites/branch/master
+
+Atomic file writes.
+
+.. code-block:: python
+
+ from atomicwrites import atomic_write
+
+ with atomic_write('foo.txt', overwrite=True) as f:
+ f.write('Hello world.')
+ # "foo.txt" doesn't exist yet.
+
+ # Now it does.
+
+
+Features that distinguish it from other similar libraries (see `Alternatives and Credit`_):
+
+- Race-free assertion that the target file doesn't yet exist. This can be
+ controlled with the ``overwrite`` parameter.
+
+- Windows support, although not well-tested. The MSDN resources are not very
+ explicit about which operations are atomic.
+
+- Simple high-level API that wraps a very flexible class-based API.
+
+- Consistent error handling across platforms.
+
+
+How it works
+============
+
+It uses a temporary file in the same directory as the given path. This ensures
+that the temporary file resides on the same filesystem.
+
+The temporary file will then be atomically moved to the target location: On
+POSIX, it will use ``rename`` if files should be overwritten, otherwise a
+combination of ``link`` and ``unlink``. On Windows, it uses MoveFileEx_ through
+stdlib's ``ctypes`` with the appropriate flags.
+
+Note that with ``link`` and ``unlink``, there's a timewindow where the file
+might be available under two entries in the filesystem: The name of the
+temporary file, and the name of the target file.
+
+Also note that the permissions of the target file may change this way. In some
+situations a ``chmod`` can be issued without any concurrency problems, but
+since that is not always the case, this library doesn't do it by itself.
+
+.. _MoveFileEx: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365240%28v=vs.85%29.aspx
+
+fsync
+-----
+
+On POSIX, ``fsync`` is invoked on the temporary file after it is written (to
+flush file content and metadata), and on the parent directory after the file is
+moved (to flush filename).
+
+``fsync`` does not take care of disks' internal buffers, but there don't seem
+to be any standard POSIX APIs for that. On OS X, ``fcntl`` is used with
+``F_FULLFSYNC`` instead of ``fsync`` for that reason.
+
+On Windows, `_commit <https://msdn.microsoft.com/en-us/library/17618685.aspx>`_
+is used, but there are no guarantees about disk internal buffers.
+
+Alternatives and Credit
+=======================
+
+Atomicwrites is directly inspired by the following libraries (and shares a
+minimal amount of code):
+
+- The Trac project's `utility functions
+ <http://www.edgewall.org/docs/tags-trac-0.11.7/epydoc/trac.util-pysrc.html>`_,
+ also used in `Werkzeug <http://werkzeug.pocoo.org/>`_ and
+ `mitsuhiko/python-atomicfile
+ <https://github.com/mitsuhiko/python-atomicfile>`_. The idea to use
+ ``ctypes`` instead of ``PyWin32`` originated there.
+
+- `abarnert/fatomic <https://github.com/abarnert/fatomic>`_. Windows support
+ (based on ``PyWin32``) was originally taken from there.
+
+Other alternatives to atomicwrites include:
+
+- `sashka/atomicfile <https://github.com/sashka/atomicfile>`_. Originally I
+ considered using that, but at the time it was lacking a lot of features I
+ needed (Windows support, overwrite-parameter, overriding behavior through
+ subclassing).
+
+- The `Boltons library collection <https://github.com/mahmoud/boltons>`_
+ features a class for atomic file writes, which seems to have a very similar
+ ``overwrite`` parameter. It is lacking Windows support though.
+
+License
+=======
+
+Licensed under the MIT, see ``LICENSE``.
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/appveyor.yml b/testing/web-platform/tests/tools/third_party/atomicwrites/appveyor.yml
new file mode 100644
index 0000000000..a5d47a0768
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/appveyor.yml
@@ -0,0 +1,18 @@
+build: false # Not a C# project, build stuff at the test step instead.
+environment:
+ matrix:
+ - PYTHON: "C:/Python27"
+ - PYTHON: "C:/Python33"
+ - PYTHON: "C:/Python34"
+
+init:
+ - "ECHO %PYTHON%"
+ - ps: "ls C:/Python*"
+
+install:
+ - ps: (new-object net.webclient).DownloadFile('https://bootstrap.pypa.io/get-pip.py', 'C:/get-pip.py')
+ - "%PYTHON%/python.exe C:/get-pip.py"
+ - "%PYTHON%/Scripts/pip.exe install tox"
+
+test_script:
+ - "%PYTHON%/Scripts/tox.exe -e py-test"
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/atomicwrites/__init__.py b/testing/web-platform/tests/tools/third_party/atomicwrites/atomicwrites/__init__.py
new file mode 100644
index 0000000000..a182c07afd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/atomicwrites/__init__.py
@@ -0,0 +1,201 @@
+import contextlib
+import os
+import sys
+import tempfile
+
+try:
+ import fcntl
+except ImportError:
+ fcntl = None
+
+__version__ = '1.1.5'
+
+
+PY2 = sys.version_info[0] == 2
+
+text_type = unicode if PY2 else str # noqa
+
+
+def _path_to_unicode(x):
+ if not isinstance(x, text_type):
+ return x.decode(sys.getfilesystemencoding())
+ return x
+
+
+_proper_fsync = os.fsync
+
+
+if sys.platform != 'win32':
+ if hasattr(fcntl, 'F_FULLFSYNC'):
+ def _proper_fsync(fd):
+ # https://lists.apple.com/archives/darwin-dev/2005/Feb/msg00072.html
+ # https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man2/fsync.2.html
+ # https://github.com/untitaker/python-atomicwrites/issues/6
+ fcntl.fcntl(fd, fcntl.F_FULLFSYNC)
+
+ def _sync_directory(directory):
+ # Ensure that filenames are written to disk
+ fd = os.open(directory, 0)
+ try:
+ _proper_fsync(fd)
+ finally:
+ os.close(fd)
+
+ def _replace_atomic(src, dst):
+ os.rename(src, dst)
+ _sync_directory(os.path.normpath(os.path.dirname(dst)))
+
+ def _move_atomic(src, dst):
+ os.link(src, dst)
+ os.unlink(src)
+
+ src_dir = os.path.normpath(os.path.dirname(src))
+ dst_dir = os.path.normpath(os.path.dirname(dst))
+ _sync_directory(dst_dir)
+ if src_dir != dst_dir:
+ _sync_directory(src_dir)
+else:
+ from ctypes import windll, WinError
+
+ _MOVEFILE_REPLACE_EXISTING = 0x1
+ _MOVEFILE_WRITE_THROUGH = 0x8
+ _windows_default_flags = _MOVEFILE_WRITE_THROUGH
+
+ def _handle_errors(rv):
+ if not rv:
+ raise WinError()
+
+ def _replace_atomic(src, dst):
+ _handle_errors(windll.kernel32.MoveFileExW(
+ _path_to_unicode(src), _path_to_unicode(dst),
+ _windows_default_flags | _MOVEFILE_REPLACE_EXISTING
+ ))
+
+ def _move_atomic(src, dst):
+ _handle_errors(windll.kernel32.MoveFileExW(
+ _path_to_unicode(src), _path_to_unicode(dst),
+ _windows_default_flags
+ ))
+
+
+def replace_atomic(src, dst):
+ '''
+ Move ``src`` to ``dst``. If ``dst`` exists, it will be silently
+ overwritten.
+
+ Both paths must reside on the same filesystem for the operation to be
+ atomic.
+ '''
+ return _replace_atomic(src, dst)
+
+
+def move_atomic(src, dst):
+ '''
+ Move ``src`` to ``dst``. There might a timewindow where both filesystem
+ entries exist. If ``dst`` already exists, :py:exc:`FileExistsError` will be
+ raised.
+
+ Both paths must reside on the same filesystem for the operation to be
+ atomic.
+ '''
+ return _move_atomic(src, dst)
+
+
+class AtomicWriter(object):
+ '''
+ A helper class for performing atomic writes. Usage::
+
+ with AtomicWriter(path).open() as f:
+ f.write(...)
+
+ :param path: The destination filepath. May or may not exist.
+ :param mode: The filemode for the temporary file.
+ :param overwrite: If set to false, an error is raised if ``path`` exists.
+ Errors are only raised after the file has been written to. Either way,
+ the operation is atomic.
+
+ If you need further control over the exact behavior, you are encouraged to
+ subclass.
+ '''
+
+ def __init__(self, path, mode='w', overwrite=False):
+ if 'a' in mode:
+ raise ValueError(
+ 'Appending to an existing file is not supported, because that '
+ 'would involve an expensive `copy`-operation to a temporary '
+ 'file. Open the file in normal `w`-mode and copy explicitly '
+ 'if that\'s what you\'re after.'
+ )
+ if 'x' in mode:
+ raise ValueError('Use the `overwrite`-parameter instead.')
+ if 'w' not in mode:
+ raise ValueError('AtomicWriters can only be written to.')
+
+ self._path = path
+ self._mode = mode
+ self._overwrite = overwrite
+
+ def open(self):
+ '''
+ Open the temporary file.
+ '''
+ return self._open(self.get_fileobject)
+
+ @contextlib.contextmanager
+ def _open(self, get_fileobject):
+ f = None # make sure f exists even if get_fileobject() fails
+ try:
+ success = False
+ with get_fileobject() as f:
+ yield f
+ self.sync(f)
+ self.commit(f)
+ success = True
+ finally:
+ if not success:
+ try:
+ self.rollback(f)
+ except Exception:
+ pass
+
+ def get_fileobject(self, dir=None, **kwargs):
+ '''Return the temporary file to use.'''
+ if dir is None:
+ dir = os.path.normpath(os.path.dirname(self._path))
+ return tempfile.NamedTemporaryFile(mode=self._mode, dir=dir,
+ delete=False, **kwargs)
+
+ def sync(self, f):
+ '''responsible for clearing as many file caches as possible before
+ commit'''
+ f.flush()
+ _proper_fsync(f.fileno())
+
+ def commit(self, f):
+ '''Move the temporary file to the target location.'''
+ if self._overwrite:
+ replace_atomic(f.name, self._path)
+ else:
+ move_atomic(f.name, self._path)
+
+ def rollback(self, f):
+ '''Clean up all temporary resources.'''
+ os.unlink(f.name)
+
+
+def atomic_write(path, writer_cls=AtomicWriter, **cls_kwargs):
+ '''
+ Simple atomic writes. This wraps :py:class:`AtomicWriter`::
+
+ with atomic_write(path) as f:
+ f.write(...)
+
+ :param path: The target path to write to.
+ :param writer_cls: The writer class to use. This parameter is useful if you
+ subclassed :py:class:`AtomicWriter` to change some behavior and want to
+ use that new subclass.
+
+ Additional keyword arguments are passed to the writer class. See
+ :py:class:`AtomicWriter`.
+ '''
+ return writer_cls(path, **cls_kwargs).open()
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/docs/Makefile b/testing/web-platform/tests/tools/third_party/atomicwrites/docs/Makefile
new file mode 100644
index 0000000000..af5f9d9aa5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/docs/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/atomicwrites.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/atomicwrites.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/atomicwrites"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/atomicwrites"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/docs/conf.py b/testing/web-platform/tests/tools/third_party/atomicwrites/docs/conf.py
new file mode 100644
index 0000000000..b7c7b59c4d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/docs/conf.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+
+import os
+import sys
+import pkg_resources
+
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.viewcode',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'atomicwrites'
+copyright = '2015, Markus Unterwaditzer'
+
+try:
+ # The full version, including alpha/beta/rc tags.
+ release = pkg_resources.require('atomicwrites')[0].version
+except pkg_resources.DistributionNotFound:
+ print('To build the documentation, the distribution information of '
+ 'atomicwrites has to be available. Run "setup.py develop" to do '
+ 'this.')
+ sys.exit(1)
+
+version = '.'.join(release.split('.')[:2]) # The short X.Y version.
+
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+
+try:
+ import sphinx_rtd_theme
+ html_theme = 'sphinx_rtd_theme'
+ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+except ImportError:
+ html_theme = 'default'
+ if not on_rtd:
+ print('-' * 74)
+ print('Warning: sphinx-rtd-theme not installed, building with default '
+ 'theme.')
+ print('-' * 74)
+
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'atomicwritesdoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ ('index', 'atomicwrites.tex', 'atomicwrites Documentation',
+ 'Markus Unterwaditzer', 'manual'),
+]
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'atomicwrites', 'atomicwrites Documentation',
+ ['Markus Unterwaditzer'], 1)
+]
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'atomicwrites', 'atomicwrites Documentation',
+ 'Markus Unterwaditzer', 'atomicwrites', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Bibliographic Dublin Core info.
+epub_title = 'atomicwrites'
+epub_author = 'Markus Unterwaditzer'
+epub_publisher = 'Markus Unterwaditzer'
+epub_copyright = '2015, Markus Unterwaditzer'
+
+# A list of files that should not be packed into the epub file.
+epub_exclude_files = ['search.html']
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {'http://docs.python.org/': None}
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/docs/index.rst b/testing/web-platform/tests/tools/third_party/atomicwrites/docs/index.rst
new file mode 100644
index 0000000000..0391c04477
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/docs/index.rst
@@ -0,0 +1,35 @@
+.. include:: ../README.rst
+
+.. module:: atomicwrites
+
+API
+===
+
+.. autofunction:: atomic_write
+
+
+Errorhandling
+-------------
+
+All filesystem errors are subclasses of :py:exc:`OSError`.
+
+- On UNIX systems, errors from the Python stdlib calls are thrown.
+- On Windows systems, errors from Python's ``ctypes`` are thrown.
+
+In either case, the ``errno`` attribute on the thrown exception maps to an
+errorcode in the ``errno`` module.
+
+Low-level API
+-------------
+
+.. autofunction:: replace_atomic
+
+.. autofunction:: move_atomic
+
+.. autoclass:: AtomicWriter
+ :members:
+
+License
+=======
+
+.. include:: ../LICENSE
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/docs/make.bat b/testing/web-platform/tests/tools/third_party/atomicwrites/docs/make.bat
new file mode 100644
index 0000000000..36fd3f6baf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/docs/make.bat
@@ -0,0 +1,242 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. xml to make Docutils-native XML files
+ echo. pseudoxml to make pseudoxml-XML files for display purposes
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+
+%SPHINXBUILD% 2> nul
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.http://sphinx-doc.org/
+ exit /b 1
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\atomicwrites.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\atomicwrites.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdf" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf
+ cd %BUILDDIR%/..
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdfja" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf-ja
+ cd %BUILDDIR%/..
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+if "%1" == "xml" (
+ %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The XML files are in %BUILDDIR%/xml.
+ goto end
+)
+
+if "%1" == "pseudoxml" (
+ %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
+ goto end
+)
+
+:end
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/setup.cfg b/testing/web-platform/tests/tools/third_party/atomicwrites/setup.cfg
new file mode 100644
index 0000000000..5e4090017a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/setup.cfg
@@ -0,0 +1,2 @@
+[wheel]
+universal = 1
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/setup.py b/testing/web-platform/tests/tools/third_party/atomicwrites/setup.py
new file mode 100644
index 0000000000..98488e9b98
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/setup.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+import ast
+import re
+
+from setuptools import find_packages, setup
+
+
+_version_re = re.compile(r'__version__\s+=\s+(.*)')
+
+
+with open('atomicwrites/__init__.py', 'rb') as f:
+ version = str(ast.literal_eval(_version_re.search(
+ f.read().decode('utf-8')).group(1)))
+
+setup(
+ name='atomicwrites',
+ version=version,
+ author='Markus Unterwaditzer',
+ author_email='markus@unterwaditzer.net',
+ url='https://github.com/untitaker/python-atomicwrites',
+ description='Atomic file writes.',
+ license='MIT',
+ long_description=open('README.rst').read(),
+ packages=find_packages(exclude=['tests.*', 'tests']),
+ include_package_data=True,
+)
diff --git a/testing/web-platform/tests/tools/third_party/atomicwrites/tox.ini b/testing/web-platform/tests/tools/third_party/atomicwrites/tox.ini
new file mode 100644
index 0000000000..dfadf03336
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/atomicwrites/tox.ini
@@ -0,0 +1,11 @@
+[tox]
+envlist = py{26,27,py,33,34,35}-{test,stylecheck}
+
+[testenv]
+deps =
+ test: pytest
+ stylecheck: flake8
+ stylecheck: flake8-import-order
+commands =
+ test: py.test []
+ stylecheck: flake8 []
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/CODE_OF_CONDUCT.md b/testing/web-platform/tests/tools/third_party/attrs/.github/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..1d8ad1833e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/CODE_OF_CONDUCT.md
@@ -0,0 +1,133 @@
+
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+We as members, contributors, and leaders pledge to make participation in our
+community a harassment-free experience for everyone, regardless of age, body
+size, visible or invisible disability, ethnicity, sex characteristics, gender
+identity and expression, level of experience, education, socio-economic status,
+nationality, personal appearance, race, caste, color, religion, or sexual
+identity and orientation.
+
+We pledge to act and interact in ways that contribute to an open, welcoming,
+diverse, inclusive, and healthy community.
+
+## Our Standards
+
+Examples of behavior that contributes to a positive environment for our
+community include:
+
+* Demonstrating empathy and kindness toward other people
+* Being respectful of differing opinions, viewpoints, and experiences
+* Giving and gracefully accepting constructive feedback
+* Accepting responsibility and apologizing to those affected by our mistakes,
+ and learning from the experience
+* Focusing on what is best not just for us as individuals, but for the overall
+ community
+
+Examples of unacceptable behavior include:
+
+* The use of sexualized language or imagery, and sexual attention or advances of
+ any kind
+* Trolling, insulting or derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or email address,
+ without their explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Enforcement Responsibilities
+
+Community leaders are responsible for clarifying and enforcing our standards of
+acceptable behavior and will take appropriate and fair corrective action in
+response to any behavior that they deem inappropriate, threatening, offensive,
+or harmful.
+
+Community leaders have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, and will communicate reasons for moderation
+decisions when appropriate.
+
+## Scope
+
+This Code of Conduct applies within all community spaces, and also applies when
+an individual is officially representing the community in public spaces.
+Examples of representing our community include using an official e-mail address,
+posting via an official social media account, or acting as an appointed
+representative at an online or offline event.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported to the community leaders responsible for enforcement at
+<mailto:hs@ox.cx>.
+All complaints will be reviewed and investigated promptly and fairly.
+
+All community leaders are obligated to respect the privacy and security of the
+reporter of any incident.
+
+## Enforcement Guidelines
+
+Community leaders will follow these Community Impact Guidelines in determining
+the consequences for any action they deem in violation of this Code of Conduct:
+
+### 1. Correction
+
+**Community Impact**: Use of inappropriate language or other behavior deemed
+unprofessional or unwelcome in the community.
+
+**Consequence**: A private, written warning from community leaders, providing
+clarity around the nature of the violation and an explanation of why the
+behavior was inappropriate. A public apology may be requested.
+
+### 2. Warning
+
+**Community Impact**: A violation through a single incident or series of
+actions.
+
+**Consequence**: A warning with consequences for continued behavior. No
+interaction with the people involved, including unsolicited interaction with
+those enforcing the Code of Conduct, for a specified period of time. This
+includes avoiding interactions in community spaces as well as external channels
+like social media. Violating these terms may lead to a temporary or permanent
+ban.
+
+### 3. Temporary Ban
+
+**Community Impact**: A serious violation of community standards, including
+sustained inappropriate behavior.
+
+**Consequence**: A temporary ban from any sort of interaction or public
+communication with the community for a specified period of time. No public or
+private interaction with the people involved, including unsolicited interaction
+with those enforcing the Code of Conduct, is allowed during this period.
+Violating these terms may lead to a permanent ban.
+
+### 4. Permanent Ban
+
+**Community Impact**: Demonstrating a pattern of violation of community
+standards, including sustained inappropriate behavior, harassment of an
+individual, or aggression toward or disparagement of classes of individuals.
+
+**Consequence**: A permanent ban from any sort of public interaction within the
+community.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage],
+version 2.1, available at
+[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
+
+Community Impact Guidelines were inspired by
+[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
+
+For answers to common questions about this code of conduct, see the FAQ at
+[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
+[https://www.contributor-covenant.org/translations][translations].
+
+[homepage]: https://www.contributor-covenant.org
+[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
+[Mozilla CoC]: https://github.com/mozilla/diversity
+[FAQ]: https://www.contributor-covenant.org/faq
+[translations]: https://www.contributor-covenant.org/translations
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/CONTRIBUTING.md b/testing/web-platform/tests/tools/third_party/attrs/.github/CONTRIBUTING.md
new file mode 100644
index 0000000000..bbdc20f193
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/CONTRIBUTING.md
@@ -0,0 +1,230 @@
+# How To Contribute
+
+First off, thank you for considering contributing to `attrs`!
+It's people like *you* who make it such a great tool for everyone.
+
+This document intends to make contribution more accessible by codifying tribal knowledge and expectations.
+Don't be afraid to open half-finished PRs, and ask questions if something is unclear!
+
+Please note that this project is released with a Contributor [Code of Conduct](https://github.com/python-attrs/attrs/blob/main/.github/CODE_OF_CONDUCT.md).
+By participating in this project you agree to abide by its terms.
+Please report any harm to [Hynek Schlawack] in any way you find appropriate.
+
+
+## Support
+
+In case you'd like to help out but don't want to deal with GitHub, there's a great opportunity:
+help your fellow developers on [Stack Overflow](https://stackoverflow.com/questions/tagged/python-attrs)!
+
+The official tag is `python-attrs` and helping out in support frees us up to improve `attrs` instead!
+
+
+## Workflow
+
+- No contribution is too small!
+ Please submit as many fixes for typos and grammar bloopers as you can!
+- Try to limit each pull request to *one* change only.
+- Since we squash on merge, it's up to you how you handle updates to the main branch.
+ Whether you prefer to rebase on main or merge main into your branch, do whatever is more comfortable for you.
+- *Always* add tests and docs for your code.
+ This is a hard rule; patches with missing tests or documentation can't be merged.
+- Make sure your changes pass our [CI].
+ You won't get any feedback until it's green unless you ask for it.
+- For the CI to pass, the coverage must be 100%.
+ If you have problems to test something, open anyway and ask for advice.
+ In some situations, we may agree to add an `# pragma: no cover`.
+- Once you've addressed review feedback, make sure to bump the pull request with a short note, so we know you're done.
+- Don’t break backwards compatibility.
+
+
+## Code
+
+- Obey [PEP 8](https://www.python.org/dev/peps/pep-0008/) and [PEP 257](https://www.python.org/dev/peps/pep-0257/).
+ We use the `"""`-on-separate-lines style for docstrings:
+
+ ```python
+ def func(x):
+ """
+ Do something.
+
+ :param str x: A very important parameter.
+
+ :rtype: str
+ """
+ ```
+- If you add or change public APIs, tag the docstring using `.. versionadded:: 16.0.0 WHAT` or `.. versionchanged:: 16.2.0 WHAT`.
+- We use [*isort*](https://github.com/PyCQA/isort) to sort our imports, and we use [*Black*](https://github.com/psf/black) with line length of 79 characters to format our code.
+ As long as you run our full [*tox*] suite before committing, or install our [*pre-commit*] hooks (ideally you'll do both – see [*Local Development Environment*](#local-development-environment) below), you won't have to spend any time on formatting your code at all.
+ If you don't, [CI] will catch it for you – but that seems like a waste of your time!
+
+
+## Tests
+
+- Write your asserts as `expected == actual` to line them up nicely:
+
+ ```python
+ x = f()
+
+ assert 42 == x.some_attribute
+ assert "foo" == x._a_private_attribute
+ ```
+
+- To run the test suite, all you need is a recent [*tox*].
+ It will ensure the test suite runs with all dependencies against all Python versions just as it will in our [CI].
+ If you lack some Python versions, you can can always limit the environments like `tox -e py27,py38`, or make it a non-failure using `tox --skip-missing-interpreters`.
+
+ In that case you should look into [*asdf*](https://asdf-vm.com) or [*pyenv*](https://github.com/pyenv/pyenv), which make it very easy to install many different Python versions in parallel.
+- Write [good test docstrings](https://jml.io/pages/test-docstrings.html).
+- To ensure new features work well with the rest of the system, they should be also added to our [*Hypothesis*](https://hypothesis.readthedocs.io/) testing strategy, which can be found in `tests/strategies.py`.
+- If you've changed or added public APIs, please update our type stubs (files ending in `.pyi`).
+
+
+## Documentation
+
+- Use [semantic newlines] in [*reStructuredText*] files (files ending in `.rst`):
+
+ ```rst
+ This is a sentence.
+ This is another sentence.
+ ```
+
+- If you start a new section, add two blank lines before and one blank line after the header, except if two headers follow immediately after each other:
+
+ ```rst
+ Last line of previous section.
+
+
+ Header of New Top Section
+ -------------------------
+
+ Header of New Section
+ ^^^^^^^^^^^^^^^^^^^^^
+
+ First line of new section.
+ ```
+
+- If you add a new feature, demonstrate its awesomeness on the [examples page](https://github.com/python-attrs/attrs/blob/main/docs/examples.rst)!
+
+
+### Changelog
+
+If your change is noteworthy, there needs to be a changelog entry so our users can learn about it!
+
+To avoid merge conflicts, we use the [*towncrier*](https://pypi.org/project/towncrier) package to manage our changelog.
+*towncrier* uses independent files for each pull request – so called *news fragments* – instead of one monolithic changelog file.
+On release, those news fragments are compiled into our [`CHANGELOG.rst`](https://github.com/python-attrs/attrs/blob/main/CHANGELOG.rst).
+
+You don't need to install *towncrier* yourself, you just have to abide by a few simple rules:
+
+- For each pull request, add a new file into `changelog.d` with a filename adhering to the `pr#.(change|deprecation|breaking).rst` schema:
+ For example, `changelog.d/42.change.rst` for a non-breaking change that is proposed in pull request #42.
+- As with other docs, please use [semantic newlines] within news fragments.
+- Wrap symbols like modules, functions, or classes into double backticks so they are rendered in a `monospace font`.
+- Wrap arguments into asterisks like in docstrings:
+ `Added new argument *an_argument*.`
+- If you mention functions or other callables, add parentheses at the end of their names:
+ `attrs.func()` or `attrs.Class.method()`.
+ This makes the changelog a lot more readable.
+- Prefer simple past tense or constructions with "now".
+ For example:
+
+ + Added `attrs.validators.func()`.
+ + `attrs.func()` now doesn't crash the Large Hadron Collider anymore when passed the *foobar* argument.
+- If you want to reference multiple issues, copy the news fragment to another filename.
+ *towncrier* will merge all news fragments with identical contents into one entry with multiple links to the respective pull requests.
+
+Example entries:
+
+ ```rst
+ Added ``attrs.validators.func()``.
+ The feature really *is* awesome.
+ ```
+
+or:
+
+ ```rst
+ ``attrs.func()`` now doesn't crash the Large Hadron Collider anymore when passed the *foobar* argument.
+ The bug really *was* nasty.
+ ```
+
+---
+
+``tox -e changelog`` will render the current changelog to the terminal if you have any doubts.
+
+
+## Local Development Environment
+
+You can (and should) run our test suite using [*tox*].
+However, you’ll probably want a more traditional environment as well.
+We highly recommend to develop using the latest Python release because we try to take advantage of modern features whenever possible.
+
+First create a [virtual environment](https://virtualenv.pypa.io/) so you don't break your system-wide Python installation.
+It’s out of scope for this document to list all the ways to manage virtual environments in Python, but if you don’t already have a pet way, take some time to look at tools like [*direnv*](https://hynek.me/til/python-project-local-venvs/), [*virtualfish*](https://virtualfish.readthedocs.io/), and [*virtualenvwrapper*](https://virtualenvwrapper.readthedocs.io/).
+
+Next, get an up to date checkout of the `attrs` repository:
+
+```console
+$ git clone git@github.com:python-attrs/attrs.git
+```
+
+or if you want to use git via `https`:
+
+```console
+$ git clone https://github.com/python-attrs/attrs.git
+```
+
+Change into the newly created directory and **after activating your virtual environment** install an editable version of `attrs` along with its tests and docs requirements:
+
+```console
+$ cd attrs
+$ pip install --upgrade pip setuptools # PLEASE don't skip this step
+$ pip install -e '.[dev]'
+```
+
+At this point,
+
+```console
+$ python -m pytest
+```
+
+should work and pass, as should:
+
+```console
+$ cd docs
+$ make html
+```
+
+The built documentation can then be found in `docs/_build/html/`.
+
+To avoid committing code that violates our style guide, we strongly advise you to install [*pre-commit*] [^dev] hooks:
+
+```console
+$ pre-commit install
+```
+
+You can also run them anytime (as our tox does) using:
+
+```console
+$ pre-commit run --all-files
+```
+
+[^dev]: *pre-commit* should have been installed into your virtualenv automatically when you ran `pip install -e '.[dev]'` above.
+ If *pre-commit* is missing, your probably need to run `pip install -e '.[dev]'` again.
+
+
+## Governance
+
+`attrs` is maintained by [team of volunteers](https://github.com/python-attrs) that is always open to new members that share our vision of a fast, lean, and magic-free library that empowers programmers to write better code with less effort.
+If you'd like to join, just get a pull request merged and ask to be added in the very same pull request!
+
+**The simple rule is that everyone is welcome to review/merge pull requests of others but nobody is allowed to merge their own code.**
+
+[Hynek Schlawack] acts reluctantly as the [BDFL](https://en.wikipedia.org/wiki/Benevolent_dictator_for_life) and has the final say over design decisions.
+
+
+[CI]: https://github.com/python-attrs/attrs/actions?query=workflow%3ACI
+[Hynek Schlawack]: https://hynek.me/about/
+[*pre-commit*]: https://pre-commit.com/
+[*tox*]: https://https://tox.wiki/
+[semantic newlines]: https://rhodesmill.org/brandon/2012/one-sentence-per-line/
+[*reStructuredText*]: https://www.sphinx-doc.org/en/stable/usage/restructuredtext/basics.html
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/FUNDING.yml b/testing/web-platform/tests/tools/third_party/attrs/.github/FUNDING.yml
new file mode 100644
index 0000000000..ef4f212162
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/FUNDING.yml
@@ -0,0 +1,5 @@
+---
+
+github: hynek
+ko_fi: the_hynek
+tidelift: "pypi/attrs"
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/PULL_REQUEST_TEMPLATE.md b/testing/web-platform/tests/tools/third_party/attrs/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..88f6415e96
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,34 @@
+# Summary
+
+<!-- Please tell us what your pull request is about here. -->
+
+
+# Pull Request Check List
+
+<!--
+This is just a friendly reminder about the most common mistakes.
+Please make sure that you tick all boxes.
+But please read our [contribution guide](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md) at least once, it will save you unnecessary review cycles!
+
+If an item doesn't apply to your pull request, **check it anyway** to make it apparent that there's nothing left to do.
+If your pull request is a documentation fix or a trivial typo, feel free to delete the whole thing.
+-->
+
+- [ ] Added **tests** for changed code.
+ Our CI fails if coverage is not 100%.
+- [ ] New features have been added to our [Hypothesis testing strategy](https://github.com/python-attrs/attrs/blob/main/tests/strategies.py).
+- [ ] Changes or additions to public APIs are reflected in our type stubs (files ending in ``.pyi``).
+ - [ ] ...and used in the stub test file `tests/typing_example.py`.
+ - [ ] If they've been added to `attr/__init__.pyi`, they've *also* been re-imported in `attrs/__init__.pyi`.
+- [ ] Updated **documentation** for changed code.
+ - [ ] New functions/classes have to be added to `docs/api.rst` by hand.
+ - [ ] Changes to the signature of `@attr.s()` have to be added by hand too.
+ - [ ] Changed/added classes/methods/functions have appropriate `versionadded`, `versionchanged`, or `deprecated` [directives](http://www.sphinx-doc.org/en/stable/markup/para.html#directive-versionadded).
+ Find the appropriate next version in our [``__init__.py``](https://github.com/python-attrs/attrs/blob/main/src/attr/__init__.py) file.
+- [ ] Documentation in `.rst` files is written using [semantic newlines](https://rhodesmill.org/brandon/2012/one-sentence-per-line/).
+- [ ] Changes (and possible deprecations) have news fragments in [`changelog.d`](https://github.com/python-attrs/attrs/blob/main/changelog.d).
+
+<!--
+If you have *any* questions to *any* of the points above, just **submit and ask**!
+This checklist is here to *help* you, not to deter you from contributing!
+-->
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/SECURITY.md b/testing/web-platform/tests/tools/third_party/attrs/.github/SECURITY.md
new file mode 100644
index 0000000000..5e565ec19c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/SECURITY.md
@@ -0,0 +1,2 @@
+To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security).
+Tidelift will coordinate the fix and disclosure.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.github/workflows/main.yml b/testing/web-platform/tests/tools/third_party/attrs/.github/workflows/main.yml
new file mode 100644
index 0000000000..f38fd91509
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.github/workflows/main.yml
@@ -0,0 +1,113 @@
+---
+name: CI
+
+on:
+ push:
+ branches: ["main"]
+ pull_request:
+ branches: ["main"]
+ workflow_dispatch:
+
+env:
+ FORCE_COLOR: "1" # Make tools pretty.
+ TOX_TESTENV_PASSENV: FORCE_COLOR
+ PYTHON_LATEST: "3.10"
+
+
+jobs:
+ tests:
+ name: tox on ${{ matrix.python-version }}
+ runs-on: ubuntu-latest
+
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "pypy-2.7", "pypy-3.7", "pypy-3.8"]
+
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: "Install dependencies"
+ run: |
+ python -VV
+ python -m site
+ python -m pip install --upgrade pip setuptools wheel
+ python -m pip install --upgrade virtualenv tox tox-gh-actions
+
+ - run: "python -m tox"
+
+ - name: Upload coverage data
+ uses: "actions/upload-artifact@v2"
+ with:
+ name: coverage-data
+ path: ".coverage.*"
+ if-no-files-found: ignore
+
+
+ coverage:
+ runs-on: ubuntu-latest
+ needs: tests
+
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ # Use latest Python, so it understands all syntax.
+ python-version: ${{env.PYTHON_LATEST}}
+
+ - run: python -m pip install --upgrade coverage[toml]
+
+ - name: Download coverage data
+ uses: actions/download-artifact@v2
+ with:
+ name: coverage-data
+
+ - name: Combine coverage and fail if it's <100%.
+ run: |
+ python -m coverage combine
+ python -m coverage html --skip-covered --skip-empty
+ python -m coverage report --fail-under=100
+
+ - name: Upload HTML report if check failed.
+ uses: actions/upload-artifact@v2
+ with:
+ name: html-report
+ path: htmlcov
+ if: ${{ failure() }}
+
+
+ package:
+ name: Build & verify package
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ python-version: ${{env.PYTHON_LATEST}}
+
+ - run: python -m pip install build twine check-wheel-contents
+ - run: python -m build --sdist --wheel .
+ - run: ls -l dist
+ - run: check-wheel-contents dist/*.whl
+ - name: Check long_description
+ run: python -m twine check dist/*
+
+
+ install-dev:
+ name: Verify dev env
+ runs-on: ${{ matrix.os }}
+ strategy:
+ matrix:
+ os: ["ubuntu-latest", "windows-latest"]
+
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ python-version: ${{env.PYTHON_LATEST}}
+ - run: python -m pip install -e .[dev]
+ - run: python -c 'import attr; print(attr.__version__)'
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.gitignore b/testing/web-platform/tests/tools/third_party/attrs/.gitignore
new file mode 100644
index 0000000000..d054dc6267
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.gitignore
@@ -0,0 +1,13 @@
+*.egg-info
+*.pyc
+.cache
+.coverage*
+.hypothesis
+.mypy_cache
+.pytest_cache
+.tox
+build
+dist
+docs/_build/
+htmlcov
+pip-wheel-metadata
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.pre-commit-config.yaml b/testing/web-platform/tests/tools/third_party/attrs/.pre-commit-config.yaml
new file mode 100644
index 0000000000..a913b068f5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.pre-commit-config.yaml
@@ -0,0 +1,43 @@
+---
+ci:
+ autoupdate_schedule: monthly
+
+repos:
+ - repo: https://github.com/psf/black
+ rev: 21.12b0
+ hooks:
+ - id: black
+ exclude: tests/test_pattern_matching.py
+ language_version: python3.10
+
+ - repo: https://github.com/PyCQA/isort
+ rev: 5.10.1
+ hooks:
+ - id: isort
+ additional_dependencies: [toml]
+ files: \.py$
+ language_version: python3.10
+
+ - repo: https://github.com/PyCQA/flake8
+ rev: 4.0.1
+ hooks:
+ - id: flake8
+ language_version: python3.10
+
+ - repo: https://github.com/econchick/interrogate
+ rev: 1.5.0
+ hooks:
+ - id: interrogate
+ exclude: tests/test_pattern_matching.py
+ args: [tests]
+ language_version: python3.10
+
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.1.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: debug-statements
+ language_version: python3.10
+ - id: check-toml
+ - id: check-yaml
diff --git a/testing/web-platform/tests/tools/third_party/attrs/.readthedocs.yml b/testing/web-platform/tests/tools/third_party/attrs/.readthedocs.yml
new file mode 100644
index 0000000000..d335c40d56
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/.readthedocs.yml
@@ -0,0 +1,16 @@
+---
+version: 2
+formats: all
+
+build:
+ os: ubuntu-20.04
+ tools:
+ # Keep version in sync with tox.ini (docs and gh-actions).
+ python: "3.10"
+
+python:
+ install:
+ - method: pip
+ path: .
+ extra_requirements:
+ - docs
diff --git a/testing/web-platform/tests/tools/third_party/attrs/AUTHORS.rst b/testing/web-platform/tests/tools/third_party/attrs/AUTHORS.rst
new file mode 100644
index 0000000000..f14ef6c607
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/AUTHORS.rst
@@ -0,0 +1,11 @@
+Credits
+=======
+
+``attrs`` is written and maintained by `Hynek Schlawack <https://hynek.me/>`_.
+
+The development is kindly supported by `Variomedia AG <https://www.variomedia.de/>`_.
+
+A full list of contributors can be found in `GitHub's overview <https://github.com/python-attrs/attrs/graphs/contributors>`_.
+
+It’s the spiritual successor of `characteristic <https://characteristic.readthedocs.io/>`_ and aspires to fix some of it clunkiness and unfortunate decisions.
+Both were inspired by Twisted’s `FancyEqMixin <https://twistedmatrix.com/documents/current/api/twisted.python.util.FancyEqMixin.html>`_ but both are implemented using class decorators because `subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, m’kay?
diff --git a/testing/web-platform/tests/tools/third_party/attrs/CHANGELOG.rst b/testing/web-platform/tests/tools/third_party/attrs/CHANGELOG.rst
new file mode 100644
index 0000000000..1d194add22
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/CHANGELOG.rst
@@ -0,0 +1,1027 @@
+Changelog
+=========
+
+Versions follow `CalVer <https://calver.org>`_ with a strict backwards-compatibility policy.
+
+The **first number** of the version is the year.
+The **second number** is incremented with each release, starting at 1 for each year.
+The **third number** is when we need to start branches for older releases (only for emergencies).
+
+Put simply, you shouldn't ever be afraid to upgrade ``attrs`` if you're only using its public APIs.
+Whenever there is a need to break compatibility, it is announced here in the changelog, and raises a ``DeprecationWarning`` for a year (if possible) before it's finally really broken.
+
+.. warning::
+
+ The structure of the `attrs.Attribute` class is exempt from this rule.
+ It *will* change in the future, but since it should be considered read-only, that shouldn't matter.
+
+ However if you intend to build extensions on top of ``attrs`` you have to anticipate that.
+
+.. towncrier release notes start
+
+21.4.0 (2021-12-29)
+-------------------
+
+Changes
+^^^^^^^
+
+- Fixed the test suite on PyPy3.8 where ``cloudpickle`` does not work.
+ `#892 <https://github.com/python-attrs/attrs/issues/892>`_
+- Fixed ``coverage report`` for projects that use ``attrs`` and don't set a ``--source``.
+ `#895 <https://github.com/python-attrs/attrs/issues/895>`_,
+ `#896 <https://github.com/python-attrs/attrs/issues/896>`_
+
+
+----
+
+
+21.3.0 (2021-12-28)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- When using ``@define``, converters are now run by default when setting an attribute on an instance -- additionally to validators.
+ I.e. the new default is ``on_setattr=[attrs.setters.convert, attrs.setters.validate]``.
+
+ This is unfortunately a breaking change, but it was an oversight, impossible to raise a ``DeprecationWarning`` about, and it's better to fix it now while the APIs are very fresh with few users.
+ `#835 <https://github.com/python-attrs/attrs/issues/835>`_,
+ `#886 <https://github.com/python-attrs/attrs/issues/886>`_
+- ``import attrs`` has finally landed!
+ As of this release, you can finally import ``attrs`` using its proper name.
+
+ Not all names from the ``attr`` namespace have been transferred; most notably ``attr.s`` and ``attr.ib`` are missing.
+ See ``attrs.define`` and ``attrs.field`` if you haven't seen our next-generation APIs yet.
+ A more elaborate explanation can be found `On The Core API Names <https://www.attrs.org/en/latest/names.html>`_
+
+ This feature is at least for one release **provisional**.
+ We don't *plan* on changing anything, but such a big change is unlikely to go perfectly on the first strike.
+
+ The API docs have been mostly updated, but it will be an ongoing effort to change everything to the new APIs.
+ Please note that we have **not** moved -- or even removed -- anything from ``attr``!
+
+ Please do report any bugs or documentation inconsistencies!
+ `#887 <https://github.com/python-attrs/attrs/issues/887>`_
+
+
+Changes
+^^^^^^^
+
+- ``attr.asdict(retain_collection_types=False)`` (default) dumps collection-esque keys as tuples.
+ `#646 <https://github.com/python-attrs/attrs/issues/646>`_,
+ `#888 <https://github.com/python-attrs/attrs/issues/888>`_
+- ``__match_args__`` are now generated to support Python 3.10's
+ `Structural Pattern Matching <https://docs.python.org/3.10/whatsnew/3.10.html#pep-634-structural-pattern-matching>`_.
+ This can be controlled by the ``match_args`` argument to the class decorators on Python 3.10 and later.
+ On older versions, it is never added and the argument is ignored.
+ `#815 <https://github.com/python-attrs/attrs/issues/815>`_
+- If the class-level *on_setattr* is set to ``attrs.setters.validate`` (default in ``@define`` and ``@mutable``) but no field defines a validator, pretend that it's not set.
+ `#817 <https://github.com/python-attrs/attrs/issues/817>`_
+- The generated ``__repr__`` is significantly faster on Pythons with f-strings.
+ `#819 <https://github.com/python-attrs/attrs/issues/819>`_
+- Attributes transformed via ``field_transformer`` are wrapped with ``AttrsClass`` again.
+ `#824 <https://github.com/python-attrs/attrs/issues/824>`_
+- Generated source code is now cached more efficiently for identical classes.
+ `#828 <https://github.com/python-attrs/attrs/issues/828>`_
+- Added ``attrs.converters.to_bool()``.
+ `#830 <https://github.com/python-attrs/attrs/issues/830>`_
+- ``attrs.resolve_types()`` now resolves types of subclasses after the parents are resolved.
+ `#842 <https://github.com/python-attrs/attrs/issues/842>`_
+ `#843 <https://github.com/python-attrs/attrs/issues/843>`_
+- Added new validators: ``lt(val)`` (< val), ``le(va)`` (≤ val), ``ge(val)`` (≥ val), ``gt(val)`` (> val), and ``maxlen(n)``.
+ `#845 <https://github.com/python-attrs/attrs/issues/845>`_
+- ``attrs`` classes are now fully compatible with `cloudpickle <https://github.com/cloudpipe/cloudpickle>`_ (no need to disable ``repr`` anymore).
+ `#857 <https://github.com/python-attrs/attrs/issues/857>`_
+- Added new context manager ``attrs.validators.disabled()`` and functions ``attrs.validators.(set|get)_disabled()``.
+ They deprecate ``attrs.(set|get)_run_validators()``.
+ All functions are interoperable and modify the same internal state.
+ They are not – and never were – thread-safe, though.
+ `#859 <https://github.com/python-attrs/attrs/issues/859>`_
+- ``attrs.validators.matches_re()`` now accepts pre-compiled regular expressions in addition to pattern strings.
+ `#877 <https://github.com/python-attrs/attrs/issues/877>`_
+
+
+----
+
+
+21.2.0 (2021-05-07)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- We had to revert the recursive feature for ``attr.evolve()`` because it broke some use-cases -- sorry!
+ `#806 <https://github.com/python-attrs/attrs/issues/806>`_
+- Python 3.4 is now blocked using packaging metadata because ``attrs`` can't be imported on it anymore.
+ To ensure that 3.4 users can keep installing ``attrs`` easily, we will `yank <https://pypi.org/help/#yanked>`_ 21.1.0 from PyPI.
+ This has **no** consequences if you pin ``attrs`` to 21.1.0.
+ `#807 <https://github.com/python-attrs/attrs/issues/807>`_
+
+
+----
+
+
+21.1.0 (2021-05-06)
+-------------------
+
+Deprecations
+^^^^^^^^^^^^
+
+- The long-awaited, much-talked-about, little-delivered ``import attrs`` is finally upon us!
+
+ Since the NG APIs have now been proclaimed stable, the **next** release of ``attrs`` will allow you to actually ``import attrs``.
+ We're taking this opportunity to replace some defaults in our APIs that made sense in 2015, but don't in 2021.
+
+ So please, if you have any pet peeves about defaults in ``attrs``'s APIs, *now* is the time to air your grievances in #487!
+ We're not gonna get such a chance for a second time, without breaking our backward-compatibility guarantees, or long deprecation cycles.
+ Therefore, speak now or forever hold you peace!
+ `#487 <https://github.com/python-attrs/attrs/issues/487>`_
+- The *cmp* argument to ``attr.s()`` and `attr.ib()` has been **undeprecated**
+ It will continue to be supported as syntactic sugar to set *eq* and *order* in one go.
+
+ I'm terribly sorry for the hassle around this argument!
+ The reason we're bringing it back is it's usefulness regarding customization of equality/ordering.
+
+ The ``cmp`` attribute and argument on ``attr.Attribute`` remains deprecated and will be removed later this year.
+ `#773 <https://github.com/python-attrs/attrs/issues/773>`_
+
+
+Changes
+^^^^^^^
+
+- It's now possible to customize the behavior of ``eq`` and ``order`` by passing in a callable.
+ `#435 <https://github.com/python-attrs/attrs/issues/435>`_,
+ `#627 <https://github.com/python-attrs/attrs/issues/627>`_
+- The instant favorite next-generation APIs are not provisional anymore!
+
+ They are also officially supported by Mypy as of their `0.800 release <https://mypy-lang.blogspot.com/2021/01/mypy-0800-released.html>`_.
+
+ We hope the next release will already contain an (additional) importable package called ``attrs``.
+ `#668 <https://github.com/python-attrs/attrs/issues/668>`_,
+ `#786 <https://github.com/python-attrs/attrs/issues/786>`_
+- If an attribute defines a converter, the type of its parameter is used as type annotation for its corresponding ``__init__`` parameter.
+
+ If an ``attr.converters.pipe`` is used, the first one's is used.
+ `#710 <https://github.com/python-attrs/attrs/issues/710>`_
+- Fixed the creation of an extra slot for an ``attr.ib`` when the parent class already has a slot with the same name.
+ `#718 <https://github.com/python-attrs/attrs/issues/718>`_
+- ``__attrs__init__()`` will now be injected if ``init=False``, or if ``auto_detect=True`` and a user-defined ``__init__()`` exists.
+
+ This enables users to do "pre-init" work in their ``__init__()`` (such as ``super().__init__()``).
+
+ ``__init__()`` can then delegate constructor argument processing to ``self.__attrs_init__(*args, **kwargs)``.
+ `#731 <https://github.com/python-attrs/attrs/issues/731>`_
+- ``bool(attr.NOTHING)`` is now ``False``.
+ `#732 <https://github.com/python-attrs/attrs/issues/732>`_
+- It's now possible to use ``super()`` inside of properties of slotted classes.
+ `#747 <https://github.com/python-attrs/attrs/issues/747>`_
+- Allow for a ``__attrs_pre_init__()`` method that -- if defined -- will get called at the beginning of the ``attrs``-generated ``__init__()`` method.
+ `#750 <https://github.com/python-attrs/attrs/issues/750>`_
+- Added forgotten ``attr.Attribute.evolve()`` to type stubs.
+ `#752 <https://github.com/python-attrs/attrs/issues/752>`_
+- ``attrs.evolve()`` now works recursively with nested ``attrs`` classes.
+ `#759 <https://github.com/python-attrs/attrs/issues/759>`_
+- Python 3.10 is now officially supported.
+ `#763 <https://github.com/python-attrs/attrs/issues/763>`_
+- ``attr.resolve_types()`` now takes an optional *attrib* argument to work inside a ``field_transformer``.
+ `#774 <https://github.com/python-attrs/attrs/issues/774>`_
+- ``ClassVar``\ s are now also detected if they come from `typing-extensions <https://pypi.org/project/typing-extensions/>`_.
+ `#782 <https://github.com/python-attrs/attrs/issues/782>`_
+- To make it easier to customize attribute comparison (#435), we have added the ``attr.cmp_with()`` helper.
+
+ See the `new docs on comparison <https://www.attrs.org/en/stable/comparison.html>`_ for more details.
+ `#787 <https://github.com/python-attrs/attrs/issues/787>`_
+- Added **provisional** support for static typing in ``pyright`` via the `dataclass_transforms specification <https://github.com/microsoft/pyright/blob/main/specs/dataclass_transforms.md>`_.
+ Both the ``pyright`` specification and ``attrs`` implementation may change in future versions of both projects.
+
+ Your constructive feedback is welcome in both `attrs#795 <https://github.com/python-attrs/attrs/issues/795>`_ and `pyright#1782 <https://github.com/microsoft/pyright/discussions/1782>`_.
+ `#796 <https://github.com/python-attrs/attrs/issues/796>`_
+
+
+----
+
+
+20.3.0 (2020-11-05)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- ``attr.define()``, ``attr.frozen()``, ``attr.mutable()``, and ``attr.field()`` remain **provisional**.
+
+ This release does **not** change anything about them and they are already used widely in production though.
+
+ If you wish to use them together with mypy, you can simply drop `this plugin <https://gist.github.com/hynek/1e3844d0c99e479e716169034b5fa963#file-attrs_ng_plugin-py>`_ into your project.
+
+ Feel free to provide feedback to them in the linked issue #668.
+
+ We will release the ``attrs`` namespace once we have the feeling that the APIs have properly settled.
+ `#668 <https://github.com/python-attrs/attrs/issues/668>`_
+
+
+Changes
+^^^^^^^
+
+- ``attr.s()`` now has a *field_transformer* hook that is called for all ``Attribute``\ s and returns a (modified or updated) list of ``Attribute`` instances.
+ ``attr.asdict()`` has a *value_serializer* hook that can change the way values are converted.
+ Both hooks are meant to help with data (de-)serialization workflows.
+ `#653 <https://github.com/python-attrs/attrs/issues/653>`_
+- ``kw_only=True`` now works on Python 2.
+ `#700 <https://github.com/python-attrs/attrs/issues/700>`_
+- ``raise from`` now works on frozen classes on PyPy.
+ `#703 <https://github.com/python-attrs/attrs/issues/703>`_,
+ `#712 <https://github.com/python-attrs/attrs/issues/712>`_
+- ``attr.asdict()`` and ``attr.astuple()`` now treat ``frozenset``\ s like ``set``\ s with regards to the *retain_collection_types* argument.
+ `#704 <https://github.com/python-attrs/attrs/issues/704>`_
+- The type stubs for ``attr.s()`` and ``attr.make_class()`` are not missing the *collect_by_mro* argument anymore.
+ `#711 <https://github.com/python-attrs/attrs/issues/711>`_
+
+
+----
+
+
+20.2.0 (2020-09-05)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- ``attr.define()``, ``attr.frozen()``, ``attr.mutable()``, and ``attr.field()`` remain **provisional**.
+
+ This release fixes a bunch of bugs and ergonomics but they remain mostly unchanged.
+
+ If you wish to use them together with mypy, you can simply drop `this plugin <https://gist.github.com/hynek/1e3844d0c99e479e716169034b5fa963#file-attrs_ng_plugin-py>`_ into your project.
+
+ Feel free to provide feedback to them in the linked issue #668.
+
+ We will release the ``attrs`` namespace once we have the feeling that the APIs have properly settled.
+ `#668 <https://github.com/python-attrs/attrs/issues/668>`_
+
+
+Changes
+^^^^^^^
+
+- ``attr.define()`` et al now correct detect ``__eq__`` and ``__ne__``.
+ `#671 <https://github.com/python-attrs/attrs/issues/671>`_
+- ``attr.define()`` et al's hybrid behavior now also works correctly when arguments are passed.
+ `#675 <https://github.com/python-attrs/attrs/issues/675>`_
+- It's possible to define custom ``__setattr__`` methods on slotted classes again.
+ `#681 <https://github.com/python-attrs/attrs/issues/681>`_
+- In 20.1.0 we introduced the ``inherited`` attribute on the ``attr.Attribute`` class to differentiate attributes that have been inherited and those that have been defined directly on the class.
+
+ It has shown to be problematic to involve that attribute when comparing instances of ``attr.Attribute`` though, because when sub-classing, attributes from base classes are suddenly not equal to themselves in a super class.
+
+ Therefore the ``inherited`` attribute will now be ignored when hashing and comparing instances of ``attr.Attribute``.
+ `#684 <https://github.com/python-attrs/attrs/issues/684>`_
+- ``zope.interface`` is now a "soft dependency" when running the test suite; if ``zope.interface`` is not installed when running the test suite, the interface-related tests will be automatically skipped.
+ `#685 <https://github.com/python-attrs/attrs/issues/685>`_
+- The ergonomics of creating frozen classes using ``@define(frozen=True)`` and sub-classing frozen classes has been improved:
+ you don't have to set ``on_setattr=None`` anymore.
+ `#687 <https://github.com/python-attrs/attrs/issues/687>`_
+
+
+----
+
+
+20.1.0 (2020-08-20)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Python 3.4 is not supported anymore.
+ It has been unsupported by the Python core team for a while now, its PyPI downloads are negligible, and our CI provider removed it as a supported option.
+
+ It's very unlikely that ``attrs`` will break under 3.4 anytime soon, which is why we do *not* block its installation on Python 3.4.
+ But we don't test it anymore and will block it once someone reports breakage.
+ `#608 <https://github.com/python-attrs/attrs/issues/608>`_
+
+
+Deprecations
+^^^^^^^^^^^^
+
+- Less of a deprecation and more of a heads up: the next release of ``attrs`` will introduce an ``attrs`` namespace.
+ That means that you'll finally be able to run ``import attrs`` with new functions that aren't cute abbreviations and that will carry better defaults.
+
+ This should not break any of your code, because project-local packages have priority before installed ones.
+ If this is a problem for you for some reason, please report it to our bug tracker and we'll figure something out.
+
+ The old ``attr`` namespace isn't going anywhere and its defaults are not changing – this is a purely additive measure.
+ Please check out the linked issue for more details.
+
+ These new APIs have been added *provisionally* as part of #666 so you can try them out today and provide feedback.
+ Learn more in the `API docs <https://www.attrs.org/en/stable/api.html>`_.
+ `#408 <https://github.com/python-attrs/attrs/issues/408>`_
+
+
+Changes
+^^^^^^^
+
+- Added ``attr.resolve_types()``.
+ It ensures that all forward-references and types in string form are resolved into concrete types.
+
+ You need this only if you need concrete types at runtime.
+ That means that if you only use types for static type checking, you do **not** need this function.
+ `#288 <https://github.com/python-attrs/attrs/issues/288>`_,
+ `#302 <https://github.com/python-attrs/attrs/issues/302>`_
+- Added ``@attr.s(collect_by_mro=False)`` argument that if set to ``True`` fixes the collection of attributes from base classes.
+
+ It's only necessary for certain cases of multiple-inheritance but is kept off for now for backward-compatibility reasons.
+ It will be turned on by default in the future.
+
+ As a side-effect, ``attr.Attribute`` now *always* has an ``inherited`` attribute indicating whether an attribute on a class was directly defined or inherited.
+ `#428 <https://github.com/python-attrs/attrs/issues/428>`_,
+ `#635 <https://github.com/python-attrs/attrs/issues/635>`_
+- On Python 3, all generated methods now have a docstring explaining that they have been created by ``attrs``.
+ `#506 <https://github.com/python-attrs/attrs/issues/506>`_
+- It is now possible to prevent ``attrs`` from auto-generating the ``__setstate__`` and ``__getstate__`` methods that are required for pickling of slotted classes.
+
+ Either pass ``@attr.s(getstate_setstate=False)`` or pass ``@attr.s(auto_detect=True)`` and implement them yourself:
+ if ``attrs`` finds either of the two methods directly on the decorated class, it assumes implicitly ``getstate_setstate=False`` (and implements neither).
+
+ This option works with dict classes but should never be necessary.
+ `#512 <https://github.com/python-attrs/attrs/issues/512>`_,
+ `#513 <https://github.com/python-attrs/attrs/issues/513>`_,
+ `#642 <https://github.com/python-attrs/attrs/issues/642>`_
+- Fixed a ``ValueError: Cell is empty`` bug that could happen in some rare edge cases.
+ `#590 <https://github.com/python-attrs/attrs/issues/590>`_
+- ``attrs`` can now automatically detect your own implementations and infer ``init=False``, ``repr=False``, ``eq=False``, ``order=False``, and ``hash=False`` if you set ``@attr.s(auto_detect=True)``.
+ ``attrs`` will ignore inherited methods.
+ If the argument implies more than one method (e.g. ``eq=True`` creates both ``__eq__`` and ``__ne__``), it's enough for *one* of them to exist and ``attrs`` will create *neither*.
+
+ This feature requires Python 3.
+ `#607 <https://github.com/python-attrs/attrs/issues/607>`_
+- Added ``attr.converters.pipe()``.
+ The feature allows combining multiple conversion callbacks into one by piping the value through all of them, and retuning the last result.
+
+ As part of this feature, we had to relax the type information for converter callables.
+ `#618 <https://github.com/python-attrs/attrs/issues/618>`_
+- Fixed serialization behavior of non-slots classes with ``cache_hash=True``.
+ The hash cache will be cleared on operations which make "deep copies" of instances of classes with hash caching,
+ though the cache will not be cleared with shallow copies like those made by ``copy.copy()``.
+
+ Previously, ``copy.deepcopy()`` or serialization and deserialization with ``pickle`` would result in an un-initialized object.
+
+ This change also allows the creation of ``cache_hash=True`` classes with a custom ``__setstate__``,
+ which was previously forbidden (`#494 <https://github.com/python-attrs/attrs/issues/494>`_).
+ `#620 <https://github.com/python-attrs/attrs/issues/620>`_
+- It is now possible to specify hooks that are called whenever an attribute is set **after** a class has been instantiated.
+
+ You can pass ``on_setattr`` both to ``@attr.s()`` to set the default for all attributes on a class, and to ``@attr.ib()`` to overwrite it for individual attributes.
+
+ ``attrs`` also comes with a new module ``attr.setters`` that brings helpers that run validators, converters, or allow to freeze a subset of attributes.
+ `#645 <https://github.com/python-attrs/attrs/issues/645>`_,
+ `#660 <https://github.com/python-attrs/attrs/issues/660>`_
+- **Provisional** APIs called ``attr.define()``, ``attr.mutable()``, and ``attr.frozen()`` have been added.
+
+ They are only available on Python 3.6 and later, and call ``attr.s()`` with different default values.
+
+ If nothing comes up, they will become the official way for creating classes in 20.2.0 (see above).
+
+ **Please note** that it may take some time until mypy – and other tools that have dedicated support for ``attrs`` – recognize these new APIs.
+ Please **do not** open issues on our bug tracker, there is nothing we can do about it.
+ `#666 <https://github.com/python-attrs/attrs/issues/666>`_
+- We have also provisionally added ``attr.field()`` that supplants ``attr.ib()``.
+ It also requires at least Python 3.6 and is keyword-only.
+ Other than that, it only dropped a few arguments, but changed no defaults.
+
+ As with ``attr.s()``: ``attr.ib()`` is not going anywhere.
+ `#669 <https://github.com/python-attrs/attrs/issues/669>`_
+
+
+----
+
+
+19.3.0 (2019-10-15)
+-------------------
+
+Changes
+^^^^^^^
+
+- Fixed ``auto_attribs`` usage when default values cannot be compared directly with ``==``, such as ``numpy`` arrays.
+ `#585 <https://github.com/python-attrs/attrs/issues/585>`_
+
+
+----
+
+
+19.2.0 (2019-10-01)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Removed deprecated ``Attribute`` attribute ``convert`` per scheduled removal on 2019/1.
+ This planned deprecation is tracked in issue `#307 <https://github.com/python-attrs/attrs/issues/307>`_.
+ `#504 <https://github.com/python-attrs/attrs/issues/504>`_
+- ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` do not consider subclasses comparable anymore.
+
+ This has been deprecated since 18.2.0 and was raising a ``DeprecationWarning`` for over a year.
+ `#570 <https://github.com/python-attrs/attrs/issues/570>`_
+
+
+Deprecations
+^^^^^^^^^^^^
+
+- The ``cmp`` argument to ``attr.s()`` and ``attr.ib()`` is now deprecated.
+
+ Please use ``eq`` to add equality methods (``__eq__`` and ``__ne__``) and ``order`` to add ordering methods (``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``) instead – just like with `dataclasses <https://docs.python.org/3/library/dataclasses.html>`_.
+
+ Both are effectively ``True`` by default but it's enough to set ``eq=False`` to disable both at once.
+ Passing ``eq=False, order=True`` explicitly will raise a ``ValueError`` though.
+
+ Since this is arguably a deeper backward-compatibility break, it will have an extended deprecation period until 2021-06-01.
+ After that day, the ``cmp`` argument will be removed.
+
+ ``attr.Attribute`` also isn't orderable anymore.
+ `#574 <https://github.com/python-attrs/attrs/issues/574>`_
+
+
+Changes
+^^^^^^^
+
+- Updated ``attr.validators.__all__`` to include new validators added in `#425`_.
+ `#517 <https://github.com/python-attrs/attrs/issues/517>`_
+- Slotted classes now use a pure Python mechanism to rewrite the ``__class__`` cell when rebuilding the class, so ``super()`` works even on environments where ``ctypes`` is not installed.
+ `#522 <https://github.com/python-attrs/attrs/issues/522>`_
+- When collecting attributes using ``@attr.s(auto_attribs=True)``, attributes with a default of ``None`` are now deleted too.
+ `#523 <https://github.com/python-attrs/attrs/issues/523>`_,
+ `#556 <https://github.com/python-attrs/attrs/issues/556>`_
+- Fixed ``attr.validators.deep_iterable()`` and ``attr.validators.deep_mapping()`` type stubs.
+ `#533 <https://github.com/python-attrs/attrs/issues/533>`_
+- ``attr.validators.is_callable()`` validator now raises an exception ``attr.exceptions.NotCallableError``, a subclass of ``TypeError``, informing the received value.
+ `#536 <https://github.com/python-attrs/attrs/issues/536>`_
+- ``@attr.s(auto_exc=True)`` now generates classes that are hashable by ID, as the documentation always claimed it would.
+ `#543 <https://github.com/python-attrs/attrs/issues/543>`_,
+ `#563 <https://github.com/python-attrs/attrs/issues/563>`_
+- Added ``attr.validators.matches_re()`` that checks string attributes whether they match a regular expression.
+ `#552 <https://github.com/python-attrs/attrs/issues/552>`_
+- Keyword-only attributes (``kw_only=True``) and attributes that are excluded from the ``attrs``'s ``__init__`` (``init=False``) now can appear before mandatory attributes.
+ `#559 <https://github.com/python-attrs/attrs/issues/559>`_
+- The fake filename for generated methods is now more stable.
+ It won't change when you restart the process.
+ `#560 <https://github.com/python-attrs/attrs/issues/560>`_
+- The value passed to ``@attr.ib(repr=…)`` can now be either a boolean (as before) or a callable.
+ That callable must return a string and is then used for formatting the attribute by the generated ``__repr__()`` method.
+ `#568 <https://github.com/python-attrs/attrs/issues/568>`_
+- Added ``attr.__version_info__`` that can be used to reliably check the version of ``attrs`` and write forward- and backward-compatible code.
+ Please check out the `section on deprecated APIs <http://www.attrs.org/en/stable/api.html#deprecated-apis>`_ on how to use it.
+ `#580 <https://github.com/python-attrs/attrs/issues/580>`_
+
+ .. _`#425`: https://github.com/python-attrs/attrs/issues/425
+
+
+----
+
+
+19.1.0 (2019-03-03)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Fixed a bug where deserialized objects with ``cache_hash=True`` could have incorrect hash code values.
+ This change breaks classes with ``cache_hash=True`` when a custom ``__setstate__`` is present.
+ An exception will be thrown when applying the ``attrs`` annotation to such a class.
+ This limitation is tracked in issue `#494 <https://github.com/python-attrs/attrs/issues/494>`_.
+ `#482 <https://github.com/python-attrs/attrs/issues/482>`_
+
+
+Changes
+^^^^^^^
+
+- Add ``is_callable``, ``deep_iterable``, and ``deep_mapping`` validators.
+
+ * ``is_callable``: validates that a value is callable
+ * ``deep_iterable``: Allows recursion down into an iterable,
+ applying another validator to every member in the iterable
+ as well as applying an optional validator to the iterable itself.
+ * ``deep_mapping``: Allows recursion down into the items in a mapping object,
+ applying a key validator and a value validator to the key and value in every item.
+ Also applies an optional validator to the mapping object itself.
+
+ You can find them in the ``attr.validators`` package.
+ `#425`_
+- Fixed stub files to prevent errors raised by mypy's ``disallow_any_generics = True`` option.
+ `#443 <https://github.com/python-attrs/attrs/issues/443>`_
+- Attributes with ``init=False`` now can follow after ``kw_only=True`` attributes.
+ `#450 <https://github.com/python-attrs/attrs/issues/450>`_
+- ``attrs`` now has first class support for defining exception classes.
+
+ If you define a class using ``@attr.s(auto_exc=True)`` and subclass an exception, the class will behave like a well-behaved exception class including an appropriate ``__str__`` method, and all attributes additionally available in an ``args`` attribute.
+ `#500 <https://github.com/python-attrs/attrs/issues/500>`_
+- Clarified documentation for hashing to warn that hashable objects should be deeply immutable (in their usage, even if this is not enforced).
+ `#503 <https://github.com/python-attrs/attrs/issues/503>`_
+
+
+----
+
+
+18.2.0 (2018-09-01)
+-------------------
+
+Deprecations
+^^^^^^^^^^^^
+
+- Comparing subclasses using ``<``, ``>``, ``<=``, and ``>=`` is now deprecated.
+ The docs always claimed that instances are only compared if the types are identical, so this is a first step to conform to the docs.
+
+ Equality operators (``==`` and ``!=``) were always strict in this regard.
+ `#394 <https://github.com/python-attrs/attrs/issues/394>`_
+
+
+Changes
+^^^^^^^
+
+- ``attrs`` now ships its own `PEP 484 <https://www.python.org/dev/peps/pep-0484/>`_ type hints.
+ Together with `mypy <http://mypy-lang.org>`_'s ``attrs`` plugin, you've got all you need for writing statically typed code in both Python 2 and 3!
+
+ At that occasion, we've also added `narrative docs <https://www.attrs.org/en/stable/types.html>`_ about type annotations in ``attrs``.
+ `#238 <https://github.com/python-attrs/attrs/issues/238>`_
+- Added *kw_only* arguments to ``attr.ib`` and ``attr.s``, and a corresponding *kw_only* attribute to ``attr.Attribute``.
+ This change makes it possible to have a generated ``__init__`` with keyword-only arguments on Python 3, relaxing the required ordering of default and non-default valued attributes.
+ `#281 <https://github.com/python-attrs/attrs/issues/281>`_,
+ `#411 <https://github.com/python-attrs/attrs/issues/411>`_
+- The test suite now runs with ``hypothesis.HealthCheck.too_slow`` disabled to prevent CI breakage on slower computers.
+ `#364 <https://github.com/python-attrs/attrs/issues/364>`_,
+ `#396 <https://github.com/python-attrs/attrs/issues/396>`_
+- ``attr.validators.in_()`` now raises a ``ValueError`` with a useful message even if the options are a string and the value is not a string.
+ `#383 <https://github.com/python-attrs/attrs/issues/383>`_
+- ``attr.asdict()`` now properly handles deeply nested lists and dictionaries.
+ `#395 <https://github.com/python-attrs/attrs/issues/395>`_
+- Added ``attr.converters.default_if_none()`` that allows to replace ``None`` values in attributes.
+ For example ``attr.ib(converter=default_if_none(""))`` replaces ``None`` by empty strings.
+ `#400 <https://github.com/python-attrs/attrs/issues/400>`_,
+ `#414 <https://github.com/python-attrs/attrs/issues/414>`_
+- Fixed a reference leak where the original class would remain live after being replaced when ``slots=True`` is set.
+ `#407 <https://github.com/python-attrs/attrs/issues/407>`_
+- Slotted classes can now be made weakly referenceable by passing ``@attr.s(weakref_slot=True)``.
+ `#420 <https://github.com/python-attrs/attrs/issues/420>`_
+- Added *cache_hash* option to ``@attr.s`` which causes the hash code to be computed once and stored on the object.
+ `#426 <https://github.com/python-attrs/attrs/issues/426>`_
+- Attributes can be named ``property`` and ``itemgetter`` now.
+ `#430 <https://github.com/python-attrs/attrs/issues/430>`_
+- It is now possible to override a base class' class variable using only class annotations.
+ `#431 <https://github.com/python-attrs/attrs/issues/431>`_
+
+
+----
+
+
+18.1.0 (2018-05-03)
+-------------------
+
+Changes
+^^^^^^^
+
+- ``x=X(); x.cycle = x; repr(x)`` will no longer raise a ``RecursionError``, and will instead show as ``X(x=...)``.
+
+ `#95 <https://github.com/python-attrs/attrs/issues/95>`_
+- ``attr.ib(factory=f)`` is now syntactic sugar for the common case of ``attr.ib(default=attr.Factory(f))``.
+
+ `#178 <https://github.com/python-attrs/attrs/issues/178>`_,
+ `#356 <https://github.com/python-attrs/attrs/issues/356>`_
+- Added ``attr.field_dict()`` to return an ordered dictionary of ``attrs`` attributes for a class, whose keys are the attribute names.
+
+ `#290 <https://github.com/python-attrs/attrs/issues/290>`_,
+ `#349 <https://github.com/python-attrs/attrs/issues/349>`_
+- The order of attributes that are passed into ``attr.make_class()`` or the *these* argument of ``@attr.s()`` is now retained if the dictionary is ordered (i.e. ``dict`` on Python 3.6 and later, ``collections.OrderedDict`` otherwise).
+
+ Before, the order was always determined by the order in which the attributes have been defined which may not be desirable when creating classes programatically.
+
+ `#300 <https://github.com/python-attrs/attrs/issues/300>`_,
+ `#339 <https://github.com/python-attrs/attrs/issues/339>`_,
+ `#343 <https://github.com/python-attrs/attrs/issues/343>`_
+- In slotted classes, ``__getstate__`` and ``__setstate__`` now ignore the ``__weakref__`` attribute.
+
+ `#311 <https://github.com/python-attrs/attrs/issues/311>`_,
+ `#326 <https://github.com/python-attrs/attrs/issues/326>`_
+- Setting the cell type is now completely best effort.
+ This fixes ``attrs`` on Jython.
+
+ We cannot make any guarantees regarding Jython though, because our test suite cannot run due to dependency incompatabilities.
+
+ `#321 <https://github.com/python-attrs/attrs/issues/321>`_,
+ `#334 <https://github.com/python-attrs/attrs/issues/334>`_
+- If ``attr.s`` is passed a *these* argument, it will no longer attempt to remove attributes with the same name from the class body.
+
+ `#322 <https://github.com/python-attrs/attrs/issues/322>`_,
+ `#323 <https://github.com/python-attrs/attrs/issues/323>`_
+- The hash of ``attr.NOTHING`` is now vegan and faster on 32bit Python builds.
+
+ `#331 <https://github.com/python-attrs/attrs/issues/331>`_,
+ `#332 <https://github.com/python-attrs/attrs/issues/332>`_
+- The overhead of instantiating frozen dict classes is virtually eliminated.
+ `#336 <https://github.com/python-attrs/attrs/issues/336>`_
+- Generated ``__init__`` methods now have an ``__annotations__`` attribute derived from the types of the fields.
+
+ `#363 <https://github.com/python-attrs/attrs/issues/363>`_
+- We have restructured the documentation a bit to account for ``attrs``' growth in scope.
+ Instead of putting everything into the `examples <https://www.attrs.org/en/stable/examples.html>`_ page, we have started to extract narrative chapters.
+
+ So far, we've added chapters on `initialization <https://www.attrs.org/en/stable/init.html>`_ and `hashing <https://www.attrs.org/en/stable/hashing.html>`_.
+
+ Expect more to come!
+
+ `#369 <https://github.com/python-attrs/attrs/issues/369>`_,
+ `#370 <https://github.com/python-attrs/attrs/issues/370>`_
+
+
+----
+
+
+17.4.0 (2017-12-30)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- The traversal of MROs when using multiple inheritance was backward:
+ If you defined a class ``C`` that subclasses ``A`` and ``B`` like ``C(A, B)``, ``attrs`` would have collected the attributes from ``B`` *before* those of ``A``.
+
+ This is now fixed and means that in classes that employ multiple inheritance, the output of ``__repr__`` and the order of positional arguments in ``__init__`` changes.
+ Because of the nature of this bug, a proper deprecation cycle was unfortunately impossible.
+
+ Generally speaking, it's advisable to prefer ``kwargs``-based initialization anyways – *especially* if you employ multiple inheritance and diamond-shaped hierarchies.
+
+ `#298 <https://github.com/python-attrs/attrs/issues/298>`_,
+ `#299 <https://github.com/python-attrs/attrs/issues/299>`_,
+ `#304 <https://github.com/python-attrs/attrs/issues/304>`_
+- The ``__repr__`` set by ``attrs`` no longer produces an ``AttributeError`` when the instance is missing some of the specified attributes (either through deleting or after using ``init=False`` on some attributes).
+
+ This can break code that relied on ``repr(attr_cls_instance)`` raising ``AttributeError`` to check if any ``attrs``-specified members were unset.
+
+ If you were using this, you can implement a custom method for checking this::
+
+ def has_unset_members(self):
+ for field in attr.fields(type(self)):
+ try:
+ getattr(self, field.name)
+ except AttributeError:
+ return True
+ return False
+
+ `#308 <https://github.com/python-attrs/attrs/issues/308>`_
+
+
+Deprecations
+^^^^^^^^^^^^
+
+- The ``attr.ib(convert=callable)`` option is now deprecated in favor of ``attr.ib(converter=callable)``.
+
+ This is done to achieve consistency with other noun-based arguments like *validator*.
+
+ *convert* will keep working until at least January 2019 while raising a ``DeprecationWarning``.
+
+ `#307 <https://github.com/python-attrs/attrs/issues/307>`_
+
+
+Changes
+^^^^^^^
+
+- Generated ``__hash__`` methods now hash the class type along with the attribute values.
+ Until now the hashes of two classes with the same values were identical which was a bug.
+
+ The generated method is also *much* faster now.
+
+ `#261 <https://github.com/python-attrs/attrs/issues/261>`_,
+ `#295 <https://github.com/python-attrs/attrs/issues/295>`_,
+ `#296 <https://github.com/python-attrs/attrs/issues/296>`_
+- ``attr.ib``\ ’s *metadata* argument now defaults to a unique empty ``dict`` instance instead of sharing a common empty ``dict`` for all.
+ The singleton empty ``dict`` is still enforced.
+
+ `#280 <https://github.com/python-attrs/attrs/issues/280>`_
+- ``ctypes`` is optional now however if it's missing, a bare ``super()`` will not work in slotted classes.
+ This should only happen in special environments like Google App Engine.
+
+ `#284 <https://github.com/python-attrs/attrs/issues/284>`_,
+ `#286 <https://github.com/python-attrs/attrs/issues/286>`_
+- The attribute redefinition feature introduced in 17.3.0 now takes into account if an attribute is redefined via multiple inheritance.
+ In that case, the definition that is closer to the base of the class hierarchy wins.
+
+ `#285 <https://github.com/python-attrs/attrs/issues/285>`_,
+ `#287 <https://github.com/python-attrs/attrs/issues/287>`_
+- Subclasses of ``auto_attribs=True`` can be empty now.
+
+ `#291 <https://github.com/python-attrs/attrs/issues/291>`_,
+ `#292 <https://github.com/python-attrs/attrs/issues/292>`_
+- Equality tests are *much* faster now.
+
+ `#306 <https://github.com/python-attrs/attrs/issues/306>`_
+- All generated methods now have correct ``__module__``, ``__name__``, and (on Python 3) ``__qualname__`` attributes.
+
+ `#309 <https://github.com/python-attrs/attrs/issues/309>`_
+
+
+----
+
+
+17.3.0 (2017-11-08)
+-------------------
+
+Backward-incompatible Changes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Attributes are no longer defined on the class body.
+
+ This means that if you define a class ``C`` with an attribute ``x``, the class will *not* have an attribute ``x`` for introspection.
+ Instead of ``C.x``, use ``attr.fields(C).x`` or look at ``C.__attrs_attrs__``.
+ The old behavior has been deprecated since version 16.1.
+ (`#253 <https://github.com/python-attrs/attrs/issues/253>`_)
+
+
+Changes
+^^^^^^^
+
+- ``super()`` and ``__class__`` now work with slotted classes on Python 3.
+ (`#102 <https://github.com/python-attrs/attrs/issues/102>`_, `#226 <https://github.com/python-attrs/attrs/issues/226>`_, `#269 <https://github.com/python-attrs/attrs/issues/269>`_, `#270 <https://github.com/python-attrs/attrs/issues/270>`_, `#272 <https://github.com/python-attrs/attrs/issues/272>`_)
+- Added *type* argument to ``attr.ib()`` and corresponding ``type`` attribute to ``attr.Attribute``.
+
+ This change paves the way for automatic type checking and serialization (though as of this release ``attrs`` does not make use of it).
+ In Python 3.6 or higher, the value of ``attr.Attribute.type`` can alternately be set using variable type annotations
+ (see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_).
+ (`#151 <https://github.com/python-attrs/attrs/issues/151>`_, `#214 <https://github.com/python-attrs/attrs/issues/214>`_, `#215 <https://github.com/python-attrs/attrs/issues/215>`_, `#239 <https://github.com/python-attrs/attrs/issues/239>`_)
+- The combination of ``str=True`` and ``slots=True`` now works on Python 2.
+ (`#198 <https://github.com/python-attrs/attrs/issues/198>`_)
+- ``attr.Factory`` is hashable again.
+ (`#204 <https://github.com/python-attrs/attrs/issues/204>`_)
+- Subclasses now can overwrite attribute definitions of their base classes.
+
+ That means that you can -- for example -- change the default value for an attribute by redefining it.
+ (`#221 <https://github.com/python-attrs/attrs/issues/221>`_, `#229 <https://github.com/python-attrs/attrs/issues/229>`_)
+- Added new option *auto_attribs* to ``@attr.s`` that allows to collect annotated fields without setting them to ``attr.ib()``.
+
+ Setting a field to an ``attr.ib()`` is still possible to supply options like validators.
+ Setting it to any other value is treated like it was passed as ``attr.ib(default=value)`` -- passing an instance of ``attr.Factory`` also works as expected.
+ (`#262 <https://github.com/python-attrs/attrs/issues/262>`_, `#277 <https://github.com/python-attrs/attrs/issues/277>`_)
+- Instances of classes created using ``attr.make_class()`` can now be pickled.
+ (`#282 <https://github.com/python-attrs/attrs/issues/282>`_)
+
+
+----
+
+
+17.2.0 (2017-05-24)
+-------------------
+
+
+Changes:
+^^^^^^^^
+
+- Validators are hashable again.
+ Note that validators may become frozen in the future, pending availability of no-overhead frozen classes.
+ `#192 <https://github.com/python-attrs/attrs/issues/192>`_
+
+
+----
+
+
+17.1.0 (2017-05-16)
+-------------------
+
+To encourage more participation, the project has also been moved into a `dedicated GitHub organization <https://github.com/python-attrs/>`_ and everyone is most welcome to join!
+
+``attrs`` also has a logo now!
+
+.. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png
+ :alt: attrs logo
+
+
+Backward-incompatible Changes:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- ``attrs`` will set the ``__hash__()`` method to ``None`` by default now.
+ The way hashes were handled before was in conflict with `Python's specification <https://docs.python.org/3/reference/datamodel.html#object.__hash__>`_.
+ This *may* break some software although this breakage is most likely just surfacing of latent bugs.
+ You can always make ``attrs`` create the ``__hash__()`` method using ``@attr.s(hash=True)``.
+ See `#136`_ for the rationale of this change.
+
+ .. warning::
+
+ Please *do not* upgrade blindly and *do* test your software!
+ *Especially* if you use instances as dict keys or put them into sets!
+
+- Correspondingly, ``attr.ib``'s *hash* argument is ``None`` by default too and mirrors the *cmp* argument as it should.
+
+
+Deprecations:
+^^^^^^^^^^^^^
+
+- ``attr.assoc()`` is now deprecated in favor of ``attr.evolve()`` and will stop working in 2018.
+
+
+Changes:
+^^^^^^^^
+
+- Fix default hashing behavior.
+ Now *hash* mirrors the value of *cmp* and classes are unhashable by default.
+ `#136`_
+ `#142 <https://github.com/python-attrs/attrs/issues/142>`_
+- Added ``attr.evolve()`` that, given an instance of an ``attrs`` class and field changes as keyword arguments, will instantiate a copy of the given instance with the changes applied.
+ ``evolve()`` replaces ``assoc()``, which is now deprecated.
+ ``evolve()`` is significantly faster than ``assoc()``, and requires the class have an initializer that can take the field values as keyword arguments (like ``attrs`` itself can generate).
+ `#116 <https://github.com/python-attrs/attrs/issues/116>`_
+ `#124 <https://github.com/python-attrs/attrs/pull/124>`_
+ `#135 <https://github.com/python-attrs/attrs/pull/135>`_
+- ``FrozenInstanceError`` is now raised when trying to delete an attribute from a frozen class.
+ `#118 <https://github.com/python-attrs/attrs/pull/118>`_
+- Frozen-ness of classes is now inherited.
+ `#128 <https://github.com/python-attrs/attrs/pull/128>`_
+- ``__attrs_post_init__()`` is now run if validation is disabled.
+ `#130 <https://github.com/python-attrs/attrs/pull/130>`_
+- Added ``attr.validators.in_(options)`` that, given the allowed ``options``, checks whether the attribute value is in it.
+ This can be used to check constants, enums, mappings, etc.
+ `#181 <https://github.com/python-attrs/attrs/pull/181>`_
+- Added ``attr.validators.and_()`` that composes multiple validators into one.
+ `#161 <https://github.com/python-attrs/attrs/issues/161>`_
+- For convenience, the *validator* argument of ``@attr.s`` now can take a list of validators that are wrapped using ``and_()``.
+ `#138 <https://github.com/python-attrs/attrs/issues/138>`_
+- Accordingly, ``attr.validators.optional()`` now can take a list of validators too.
+ `#161 <https://github.com/python-attrs/attrs/issues/161>`_
+- Validators can now be defined conveniently inline by using the attribute as a decorator.
+ Check out the `validator examples <http://www.attrs.org/en/stable/init.html#decorator>`_ to see it in action!
+ `#143 <https://github.com/python-attrs/attrs/issues/143>`_
+- ``attr.Factory()`` now has a *takes_self* argument that makes the initializer to pass the partially initialized instance into the factory.
+ In other words you can define attribute defaults based on other attributes.
+ `#165`_
+ `#189 <https://github.com/python-attrs/attrs/issues/189>`_
+- Default factories can now also be defined inline using decorators.
+ They are *always* passed the partially initialized instance.
+ `#165`_
+- Conversion can now be made optional using ``attr.converters.optional()``.
+ `#105 <https://github.com/python-attrs/attrs/issues/105>`_
+ `#173 <https://github.com/python-attrs/attrs/pull/173>`_
+- ``attr.make_class()`` now accepts the keyword argument ``bases`` which allows for subclassing.
+ `#152 <https://github.com/python-attrs/attrs/pull/152>`_
+- Metaclasses are now preserved with ``slots=True``.
+ `#155 <https://github.com/python-attrs/attrs/pull/155>`_
+
+.. _`#136`: https://github.com/python-attrs/attrs/issues/136
+.. _`#165`: https://github.com/python-attrs/attrs/issues/165
+
+
+----
+
+
+16.3.0 (2016-11-24)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Attributes now can have user-defined metadata which greatly improves ``attrs``'s extensibility.
+ `#96 <https://github.com/python-attrs/attrs/pull/96>`_
+- Allow for a ``__attrs_post_init__()`` method that -- if defined -- will get called at the end of the ``attrs``-generated ``__init__()`` method.
+ `#111 <https://github.com/python-attrs/attrs/pull/111>`_
+- Added ``@attr.s(str=True)`` that will optionally create a ``__str__()`` method that is identical to ``__repr__()``.
+ This is mainly useful with ``Exception``\ s and other classes that rely on a useful ``__str__()`` implementation but overwrite the default one through a poor own one.
+ Default Python class behavior is to use ``__repr__()`` as ``__str__()`` anyways.
+
+ If you tried using ``attrs`` with ``Exception``\ s and were puzzled by the tracebacks: this option is for you.
+- ``__name__`` is no longer overwritten with ``__qualname__`` for ``attr.s(slots=True)`` classes.
+ `#99 <https://github.com/python-attrs/attrs/issues/99>`_
+
+
+----
+
+
+16.2.0 (2016-09-17)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Added ``attr.astuple()`` that -- similarly to ``attr.asdict()`` -- returns the instance as a tuple.
+ `#77 <https://github.com/python-attrs/attrs/issues/77>`_
+- Converters now work with frozen classes.
+ `#76 <https://github.com/python-attrs/attrs/issues/76>`_
+- Instantiation of ``attrs`` classes with converters is now significantly faster.
+ `#80 <https://github.com/python-attrs/attrs/pull/80>`_
+- Pickling now works with slotted classes.
+ `#81 <https://github.com/python-attrs/attrs/issues/81>`_
+- ``attr.assoc()`` now works with slotted classes.
+ `#84 <https://github.com/python-attrs/attrs/issues/84>`_
+- The tuple returned by ``attr.fields()`` now also allows to access the ``Attribute`` instances by name.
+ Yes, we've subclassed ``tuple`` so you don't have to!
+ Therefore ``attr.fields(C).x`` is equivalent to the deprecated ``C.x`` and works with slotted classes.
+ `#88 <https://github.com/python-attrs/attrs/issues/88>`_
+
+
+----
+
+
+16.1.0 (2016-08-30)
+-------------------
+
+Backward-incompatible Changes:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- All instances where function arguments were called ``cl`` have been changed to the more Pythonic ``cls``.
+ Since it was always the first argument, it's doubtful anyone ever called those function with in the keyword form.
+ If so, sorry for any breakage but there's no practical deprecation path to solve this ugly wart.
+
+
+Deprecations:
+^^^^^^^^^^^^^
+
+- Accessing ``Attribute`` instances on class objects is now deprecated and will stop working in 2017.
+ If you need introspection please use the ``__attrs_attrs__`` attribute or the ``attr.fields()`` function that carry them too.
+ In the future, the attributes that are defined on the class body and are usually overwritten in your ``__init__`` method are simply removed after ``@attr.s`` has been applied.
+
+ This will remove the confusing error message if you write your own ``__init__`` and forget to initialize some attribute.
+ Instead you will get a straightforward ``AttributeError``.
+ In other words: decorated classes will work more like plain Python classes which was always ``attrs``'s goal.
+- The serious business aliases ``attr.attributes`` and ``attr.attr`` have been deprecated in favor of ``attr.attrs`` and ``attr.attrib`` which are much more consistent and frankly obvious in hindsight.
+ They will be purged from documentation immediately but there are no plans to actually remove them.
+
+
+Changes:
+^^^^^^^^
+
+- ``attr.asdict()``\ 's ``dict_factory`` arguments is now propagated on recursion.
+ `#45 <https://github.com/python-attrs/attrs/issues/45>`_
+- ``attr.asdict()``, ``attr.has()`` and ``attr.fields()`` are significantly faster.
+ `#48 <https://github.com/python-attrs/attrs/issues/48>`_
+ `#51 <https://github.com/python-attrs/attrs/issues/51>`_
+- Add ``attr.attrs`` and ``attr.attrib`` as a more consistent aliases for ``attr.s`` and ``attr.ib``.
+- Add *frozen* option to ``attr.s`` that will make instances best-effort immutable.
+ `#60 <https://github.com/python-attrs/attrs/issues/60>`_
+- ``attr.asdict()`` now takes ``retain_collection_types`` as an argument.
+ If ``True``, it does not convert attributes of type ``tuple`` or ``set`` to ``list``.
+ `#69 <https://github.com/python-attrs/attrs/issues/69>`_
+
+
+----
+
+
+16.0.0 (2016-05-23)
+-------------------
+
+Backward-incompatible Changes:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Python 3.3 and 2.6 are no longer supported.
+ They may work by chance but any effort to keep them working has ceased.
+
+ The last Python 2.6 release was on October 29, 2013 and is no longer supported by the CPython core team.
+ Major Python packages like Django and Twisted dropped Python 2.6 a while ago already.
+
+ Python 3.3 never had a significant user base and wasn't part of any distribution's LTS release.
+
+Changes:
+^^^^^^^^
+
+- ``__slots__`` have arrived!
+ Classes now can automatically be `slotted <https://docs.python.org/3/reference/datamodel.html#slots>`_-style (and save your precious memory) just by passing ``slots=True``.
+ `#35 <https://github.com/python-attrs/attrs/issues/35>`_
+- Allow the case of initializing attributes that are set to ``init=False``.
+ This allows for clean initializer parameter lists while being able to initialize attributes to default values.
+ `#32 <https://github.com/python-attrs/attrs/issues/32>`_
+- ``attr.asdict()`` can now produce arbitrary mappings instead of Python ``dict``\ s when provided with a ``dict_factory`` argument.
+ `#40 <https://github.com/python-attrs/attrs/issues/40>`_
+- Multiple performance improvements.
+
+
+----
+
+
+15.2.0 (2015-12-08)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Added a ``convert`` argument to ``attr.ib``, which allows specifying a function to run on arguments.
+ This allows for simple type conversions, e.g. with ``attr.ib(convert=int)``.
+ `#26 <https://github.com/python-attrs/attrs/issues/26>`_
+- Speed up object creation when attribute validators are used.
+ `#28 <https://github.com/python-attrs/attrs/issues/28>`_
+
+
+----
+
+
+15.1.0 (2015-08-20)
+-------------------
+
+Changes:
+^^^^^^^^
+
+- Added ``attr.validators.optional()`` that wraps other validators allowing attributes to be ``None``.
+ `#16 <https://github.com/python-attrs/attrs/issues/16>`_
+- Multi-level inheritance now works.
+ `#24 <https://github.com/python-attrs/attrs/issues/24>`_
+- ``__repr__()`` now works with non-redecorated subclasses.
+ `#20 <https://github.com/python-attrs/attrs/issues/20>`_
+
+
+----
+
+
+15.0.0 (2015-04-15)
+-------------------
+
+Changes:
+^^^^^^^^
+
+Initial release.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/LICENSE b/testing/web-platform/tests/tools/third_party/attrs/LICENSE
new file mode 100644
index 0000000000..7ae3df9309
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Hynek Schlawack
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/MANIFEST.in b/testing/web-platform/tests/tools/third_party/attrs/MANIFEST.in
new file mode 100644
index 0000000000..3d68bf9c5d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/MANIFEST.in
@@ -0,0 +1,24 @@
+include LICENSE *.rst *.toml *.yml *.yaml *.ini
+graft .github
+
+# Stubs
+recursive-include src *.pyi
+recursive-include src py.typed
+
+# Tests
+include tox.ini conftest.py
+recursive-include tests *.py
+recursive-include tests *.yml
+
+# Documentation
+include docs/Makefile docs/docutils.conf
+recursive-include docs *.png
+recursive-include docs *.svg
+recursive-include docs *.py
+recursive-include docs *.rst
+prune docs/_build
+
+# Just to keep check-manifest happy; on releases those files are gone.
+# Last rule wins!
+exclude changelog.d/*.rst
+include changelog.d/towncrier_template.rst
diff --git a/testing/web-platform/tests/tools/third_party/attrs/README.rst b/testing/web-platform/tests/tools/third_party/attrs/README.rst
new file mode 100644
index 0000000000..709bba83d7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/README.rst
@@ -0,0 +1,135 @@
+.. raw:: html
+
+ <p align="center">
+ <a href="https://www.attrs.org/">
+ <img src="./docs/_static/attrs_logo.svg" width="35%" alt="attrs" />
+ </a>
+ </p>
+ <p align="center">
+ <a href="https://www.attrs.org/en/stable/?badge=stable">
+ <img src="https://img.shields.io/badge/Docs-Read%20The%20Docs-black" alt="Documentation" />
+ </a>
+ <a href="https://github.com/python-attrs/attrs/blob/main/LICENSE">
+ <img src="https://img.shields.io/badge/license-MIT-C06524" alt="License: MIT" />
+ </a>
+ <a href="https://pypi.org/project/attrs/">
+ <img src="https://img.shields.io/pypi/v/attrs" />
+ </a>
+ </p>
+
+.. teaser-begin
+
+``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder methods <https://www.attrs.org/en/latest/glossary.html#term-dunder-methods>`_).
+`Trusted by NASA <https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-badge>`_ for Mars missions since 2020!
+
+Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
+
+.. teaser-end
+
+For that, it gives you a class decorator and a way to declaratively define the attributes on that class:
+
+.. -code-begin-
+
+.. code-block:: pycon
+
+ >>> from attrs import asdict, define, make_class, Factory
+
+ >>> @define
+ ... class SomeClass:
+ ... a_number: int = 42
+ ... list_of_numbers: list[int] = Factory(list)
+ ...
+ ... def hard_math(self, another_number):
+ ... return self.a_number + sum(self.list_of_numbers) * another_number
+
+
+ >>> sc = SomeClass(1, [1, 2, 3])
+ >>> sc
+ SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
+
+ >>> sc.hard_math(3)
+ 19
+ >>> sc == SomeClass(1, [1, 2, 3])
+ True
+ >>> sc != SomeClass(2, [3, 2, 1])
+ True
+
+ >>> asdict(sc)
+ {'a_number': 1, 'list_of_numbers': [1, 2, 3]}
+
+ >>> SomeClass()
+ SomeClass(a_number=42, list_of_numbers=[])
+
+ >>> C = make_class("C", ["a", "b"])
+ >>> C("foo", "bar")
+ C(a='foo', b='bar')
+
+
+After *declaring* your attributes ``attrs`` gives you:
+
+- a concise and explicit overview of the class's attributes,
+- a nice human-readable ``__repr__``,
+- a equality-checking methods,
+- an initializer,
+- and much more,
+
+*without* writing dull boilerplate code again and again and *without* runtime performance penalties.
+
+**Hate type annotations**!?
+No problem!
+Types are entirely **optional** with ``attrs``.
+Simply assign ``attrs.field()`` to the attributes instead of annotating them with types.
+
+----
+
+This example uses ``attrs``'s modern APIs that have been introduced in version 20.1.0, and the ``attrs`` package import name that has been added in version 21.3.0.
+The classic APIs (``@attr.s``, ``attr.ib``, plus their serious business aliases) and the ``attr`` package import name will remain **indefinitely**.
+
+Please check out `On The Core API Names <https://www.attrs.org/en/latest/names.html>`_ for a more in-depth explanation.
+
+
+Data Classes
+============
+
+On the tin, ``attrs`` might remind you of ``dataclasses`` (and indeed, ``dataclasses`` are a descendant of ``attrs``).
+In practice it does a lot more and is more flexible.
+For instance it allows you to define `special handling of NumPy arrays for equality checks <https://www.attrs.org/en/stable/comparison.html#customization>`_, or allows more ways to `plug into the initialization process <https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization>`_.
+
+For more details, please refer to our `comparison page <https://www.attrs.org/en/stable/why.html#data-classes>`_.
+
+
+.. -getting-help-
+
+Getting Help
+============
+
+Please use the ``python-attrs`` tag on `Stack Overflow <https://stackoverflow.com/questions/tagged/python-attrs>`_ to get help.
+
+Answering questions of your fellow developers is also a great way to help the project!
+
+
+.. -project-information-
+
+Project Information
+===================
+
+``attrs`` is released under the `MIT <https://choosealicense.com/licenses/mit/>`_ license,
+its documentation lives at `Read the Docs <https://www.attrs.org/>`_,
+the code on `GitHub <https://github.com/python-attrs/attrs>`_,
+and the latest release on `PyPI <https://pypi.org/project/attrs/>`_.
+It’s rigorously tested on Python 2.7, 3.5+, and PyPy.
+
+We collect information on **third-party extensions** in our `wiki <https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs>`_.
+Feel free to browse and add your own!
+
+If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide <https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md>`_ to get you started!
+
+
+``attrs`` for Enterprise
+------------------------
+
+Available as part of the Tidelift Subscription.
+
+The maintainers of ``attrs`` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications.
+Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use.
+`Learn more. <https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=referral&utm_campaign=enterprise&utm_term=repo>`_
diff --git a/testing/web-platform/tests/tools/third_party/attrs/changelog.d/.gitignore b/testing/web-platform/tests/tools/third_party/attrs/changelog.d/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/changelog.d/.gitignore
diff --git a/testing/web-platform/tests/tools/third_party/attrs/changelog.d/towncrier_template.rst b/testing/web-platform/tests/tools/third_party/attrs/changelog.d/towncrier_template.rst
new file mode 100644
index 0000000000..29ca74c4e8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/changelog.d/towncrier_template.rst
@@ -0,0 +1,35 @@
+{% for section, _ in sections.items() %}
+{% set underline = underlines[0] %}{% if section %}{{section}}
+{{ underline * section|length }}{% set underline = underlines[1] %}
+
+{% endif %}
+
+{% if sections[section] %}
+{% for category, val in definitions.items() if category in sections[section]%}
+{{ definitions[category]['name'] }}
+{{ underline * definitions[category]['name']|length }}
+
+{% if definitions[category]['showcontent'] %}
+{% for text, values in sections[section][category].items() %}
+- {{ text }}
+ {{ values|join(',\n ') }}
+{% endfor %}
+
+{% else %}
+- {{ sections[section][category]['']|join(', ') }}
+
+{% endif %}
+{% if sections[section][category]|length == 0 %}
+No significant changes.
+
+{% else %}
+{% endif %}
+
+{% endfor %}
+{% else %}
+No significant changes.
+
+
+{% endif %}
+{% endfor %}
+----
diff --git a/testing/web-platform/tests/tools/third_party/attrs/conftest.py b/testing/web-platform/tests/tools/third_party/attrs/conftest.py
new file mode 100644
index 0000000000..0d539a115c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/conftest.py
@@ -0,0 +1,29 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+from hypothesis import HealthCheck, settings
+
+from attr._compat import PY36, PY310
+
+
+def pytest_configure(config):
+ # HealthCheck.too_slow causes more trouble than good -- especially in CIs.
+ settings.register_profile(
+ "patience", settings(suppress_health_check=[HealthCheck.too_slow])
+ )
+ settings.load_profile("patience")
+
+
+collect_ignore = []
+if not PY36:
+ collect_ignore.extend(
+ [
+ "tests/test_annotations.py",
+ "tests/test_hooks.py",
+ "tests/test_init_subclass.py",
+ "tests/test_next_gen.py",
+ ]
+ )
+if not PY310:
+ collect_ignore.extend(["tests/test_pattern_matching.py"])
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/Makefile b/testing/web-platform/tests/tools/third_party/attrs/docs/Makefile
new file mode 100644
index 0000000000..3143891daf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/attrs.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/attrs.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/attrs"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/attrs"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.png b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.png
new file mode 100644
index 0000000000..11b6e6fe3f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.svg b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.svg
new file mode 100644
index 0000000000..b02ae6c025
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo.svg
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 142 118" version="1.1" fill="#222">
+ <path d="M 88.984375 4.2460938 L 88.269531 6.40625 A 13.133 13.133 0 0 0 85.244141 6.9023438 L 83.9375 5.0625 L 83.699219 5.1523438 A 14.916 14.916 0 0 0 80.007812 7.0859375 L 79.8125 7.2265625 L 80.507812 9.40625 A 12.958 12.958 0 0 0 78.490234 11.496094 L 76.351562 10.785156 L 76.214844 10.996094 A 15.179 15.179 0 0 0 74.369141 14.8125 L 74.304688 15.035156 L 76.15625 16.398438 C 76.14425 16.418437 76.140719 16.441031 76.136719 16.457031 A 12.874 12.874 0 0 0 75.796875 19.035156 L 73.640625 19.71875 L 73.652344 19.964844 C 73.706344 21.432844 73.959109 22.867563 74.412109 24.226562 L 74.488281 24.453125 L 76.796875 24.464844 C 77.109875 25.214844 77.488594 25.930281 77.933594 26.613281 L 76.585938 28.441406 L 76.734375 28.636719 A 15.028 15.028 0 0 0 79.863281 31.710938 L 80.054688 31.851562 L 81.921875 30.515625 C 82.562875 30.917625 83.24975 31.265687 83.96875 31.554688 L 83.951172 33.835938 L 84.183594 33.910156 C 84.570594 34.031156 84.960281 34.144188 85.363281 34.242188 C 86.425281 34.488188 87.48425 34.621531 88.53125 34.644531 L 88.773438 34.648438 L 89.490234 32.484375 A 12.819 12.819 0 0 0 91.787109 32.167969 L 93.123047 34.03125 L 93.355469 33.957031 A 15.097 15.097 0 0 0 97.300781 32.070312 L 97.503906 31.933594 L 96.824219 29.773438 A 13.195 13.195 0 0 0 98.628906 28.085938 L 100.8125 28.8125 L 100.95508 28.621094 A 14.78 14.78 0 0 0 103.04688 24.859375 L 103.13672 24.621094 L 101.32031 23.285156 C 101.40631 23.008156 101.48078 22.726313 101.55078 22.445312 C 101.69178 21.832313 101.78875 21.226 101.84375 20.625 L 104.0332 19.929688 L 104.0332 19.691406 C 104.0332 19.605406 104.04297 19.518687 104.04297 19.429688 A 15.365 15.365 0 0 0 103.51953 15.5 L 103.45117 15.257812 L 101.19922 15.246094 A 13.253 13.253 0 0 0 99.941406 12.582031 L 101.29297 10.738281 L 101.15625 10.546875 A 15.367 15.367 0 0 0 98.287109 7.5429688 L 98.09375 7.3867188 L 96.253906 8.703125 A 13.082 13.082 0 0 0 93.53125 7.265625 L 93.542969 5 L 93.3125 4.9257812 A 18.186 18.186 0 0 0 92.320312 4.6523438 A 15.815 15.815 0 0 0 89.234375 4.25 L 88.984375 4.2460938 z M 88.759766 15.541016 A 3.914 3.914 0 0 1 89.740234 15.644531 A 3.913 3.913 0 0 1 92.753906 19.441406 C 92.753906 19.742406 92.722344 20.04275 92.652344 20.34375 A 3.92 3.92 0 0 1 88.847656 23.359375 A 3.72 3.72 0 0 1 87.949219 23.25 C 86.144219 22.836 84.9375 21.226125 84.9375 19.453125 C 84.9375 19.156125 84.967203 18.858688 85.033203 18.554688 A 3.914 3.914 0 0 1 88.759766 15.541016 z "/>
+ <path d="M 60.488281 22.824219 C 58.968281 22.824219 57.488594 22.98425 56.058594 23.28125 L 55.78125 23.332031 L 55.488281 26.582031 C 54.023281 26.992031 52.624219 27.5785 51.324219 28.3125 L 48.886719 26.179688 L 48.648438 26.335938 A 21.852 21.852 0 0 0 44.152344 30.230469 L 43.972656 30.4375 L 45.65625 33.257812 A 18.478 18.478 0 0 0 43.46875 36.933594 L 40.248047 36.644531 L 40.15625 36.910156 A 21.157 21.157 0 0 0 38.84375 42.828125 L 38.820312 43.09375 L 41.855469 44.390625 C 41.851469 44.437625 41.851562 44.488063 41.851562 44.539062 C 41.851562 45.828063 41.988281 47.093687 42.238281 48.304688 L 39.455078 49.960938 L 39.527344 50.234375 A 21.58 21.58 0 0 0 41.980469 55.90625 L 42.119141 56.132812 L 45.34375 55.402344 A 18.763 18.763 0 0 0 47.714844 58.105469 L 46.4375 61.097656 L 46.648438 61.277344 A 21.703 21.703 0 0 0 52.007812 64.535156 L 52.248047 64.640625 L 54.425781 62.160156 C 55.484781 62.527156 56.601281 62.80075 57.738281 62.96875 L 58.464844 66.15625 L 58.744141 66.175781 C 59.307141 66.222781 59.894281 66.253906 60.488281 66.253906 C 62.042281 66.253906 63.558437 66.086437 65.023438 65.773438 L 65.296875 65.714844 L 65.589844 62.460938 A 19.053 19.053 0 0 0 68.792969 61.21875 L 71.269531 63.382812 L 71.503906 63.246094 A 21.892 21.892 0 0 0 76.378906 59.328125 L 76.574219 59.125 L 74.908203 56.335938 A 18.426 18.426 0 0 0 76.9375 53.289062 L 80.230469 53.585938 L 80.335938 53.335938 A 21.627 21.627 0 0 0 82.007812 47.414062 L 82.042969 47.128906 L 79.066406 45.859375 C 79.097406 45.430375 79.119141 44.988062 79.119141 44.539062 C 79.119141 43.625062 79.054781 42.734375 78.925781 41.859375 L 81.757812 40.171875 L 81.699219 39.90625 A 21.733 21.733 0 0 0 79.613281 34.246094 L 79.476562 33.992188 L 76.320312 34.714844 A 18.63 18.63 0 0 0 73.617188 31.320312 L 74.902344 28.308594 L 74.701172 28.132812 A 22.087 22.087 0 0 0 69.726562 24.886719 L 69.472656 24.769531 L 67.335938 27.210938 A 18.403 18.403 0 0 0 62.949219 26.074219 L 62.222656 22.898438 L 61.945312 22.882812 A 19.927 19.927 0 0 0 60.488281 22.824219 z M 60.488281 38.824219 C 63.644281 38.836219 66.199219 41.387062 66.199219 44.539062 A 5.715 5.715 0 0 1 60.488281 50.253906 A 5.717 5.717 0 0 1 54.773438 44.539062 A 5.725 5.725 0 0 1 60.488281 38.824219 z "/>
+ <path d="m 134.226,94.281 c 0,0 0.445,2.621 -0.574,7.356 -1.024,4.796 -2.559,7.351 -2.559,7.351 a 31.76,31.76 0 0 1 -10.809,1.922 c -3.773,0 -7.16,-0.707 -9.976,-1.922 0,0 -0.383,-1.726 0.129,-4.988 1.406,0.387 6.457,1.793 10.933,1.793 2.497,0 5.375,-0.703 5.375,-0.703 0,0 0.704,-1.153 1.149,-3.453 0.512,-2.305 0.32,-3.454 0.32,-3.454 0,0 -2.558,-0.703 -5.051,-0.703 -3.902,0 -7.226,-0.64 -10.039,-1.855 0,0 -0.386,-2.879 0.383,-6.524 0.766,-3.707 2.43,-6.585 2.43,-6.585 3.324,-1.153 7.035,-1.856 10.808,-1.856 3.77,0 7.161,0.703 9.973,1.856 0,0.128 0.387,2.046 -0.062,5.179 -1.536,-0.449 -7.165,-1.918 -11,-1.918 -2.493,0 -5.372,0.703 -5.372,0.703 0,0 -0.64,1.024 -0.957,2.621 -0.32,1.598 -0.195,2.622 -0.195,2.622 0,0 2.496,0.64 5.117,0.703 3.774,0 7.164,0.64 9.977,1.855 z"/>
+ <path d="m 105.511,80.66 c 1.984,0 3.84,0.191 5.629,0.578 -0.703,1.727 -1.469,3.324 -2.367,4.86 -1.406,-0.192 -2.813,-0.321 -4.348,-0.321 -2.492,0 -5.242,0.703 -5.242,0.703 0,0 -1.856,6.075 -2.496,9.274 L 93.62,110.269 H 87.8 l 3.07,-14.515 a 84.252,84.252 0 0 1 3.836,-13.238 c 3.325,-1.153 7.035,-1.856 10.805,-1.856 z"/>
+ <path d="m 77.374,105.793 c 2.817,0 5.629,-0.512 7.867,-1.024 -0.765,1.981 -1.664,3.774 -2.621,5.5 -2.046,0.383 -4.156,0.641 -6.332,0.641 -3.773,0 -7.097,-0.707 -9.91,-1.922 0.125,-3.965 0.766,-8.441 1.789,-13.234 l 1.918,-9.082 h -6.457 c 0.703,-1.789 1.469,-3.453 2.492,-5.051 h 5.055 l 1.34,-6.332 a 31.365,31.365 0 0 1 6.074,-1.535 l -1.66,7.867 h 11.125 c -0.703,1.789 -1.535,3.516 -2.492,5.051 h -9.719 l -1.922,9.082 c -0.703,3.262 -1.469,9.336 -1.469,9.336 0,0 2.493,0.703 4.922,0.703 z"/>
+ <path d="m 49.878,105.793 c 2.813,0 5.629,-0.512 7.867,-1.024 -0.769,1.981 -1.664,3.774 -2.621,5.5 -2.047,0.383 -4.16,0.641 -6.332,0.641 -3.773,0 -7.097,-0.707 -9.914,-1.922 0.129,-3.965 0.77,-8.441 1.793,-13.234 l 1.918,-9.082 h -6.461 c 0.707,-1.789 1.473,-3.453 2.496,-5.051 h 5.051 l 1.344,-6.332 a 31.365,31.365 0 0 1 6.074,-1.535 l -1.66,7.867 h 11.125 c -0.703,1.789 -1.535,3.516 -2.492,5.051 h -9.723 l -1.918,9.082 c -0.703,3.262 -1.469,9.336 -1.469,9.336 0,0 2.493,0.703 4.922,0.703 z"/>
+ <path d="M 22.574219 80.660156 C 18.800219 80.660156 15.093625 81.362625 11.765625 82.515625 C 11.128625 84.112625 10.616109 85.715406 10.037109 87.441406 C 12.022109 86.863406 16.624281 85.777344 21.488281 85.777344 C 23.980281 85.777344 26.476562 86.480469 26.476562 86.480469 C 26.476562 86.480469 26.089531 89.101062 25.644531 91.789062 C 23.980531 91.469062 22.191938 91.277344 20.335938 91.277344 A 32.101 32.101 0 0 0 9.4648438 93.195312 C 9.4648437 93.195312 7.8003437 96.328 6.7773438 101.125 C 5.7533437 105.855 6.140625 108.98828 6.140625 108.98828 C 8.952625 110.20328 12.343281 110.91016 16.113281 110.91016 A 31.74 31.74 0 0 0 26.921875 108.98828 C 28.456875 105.02328 29.734813 100.54691 30.757812 95.753906 C 31.718813 91.018906 32.359781 86.542625 32.550781 82.515625 C 29.734781 81.362625 26.347219 80.660156 22.574219 80.660156 z M 19.248047 96.390625 A 21.116 21.116 0 0 1 24.619141 97.09375 C 23.850141 100.42175 22.511719 105.08984 22.511719 105.08984 C 22.511719 105.08984 19.951172 105.79297 17.201172 105.79297 C 14.705172 105.79297 12.152344 105.08984 12.152344 105.08984 C 12.152344 105.08984 12.085656 103.426 12.597656 101.125 C 13.109656 98.758 13.8125 97.09375 13.8125 97.09375 C 13.8125 97.09375 16.752047 96.390625 19.248047 96.390625 z "/>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo_white.svg b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo_white.svg
new file mode 100644
index 0000000000..daad798da0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/_static/attrs_logo_white.svg
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 142 118" version="1.1" fill="#fff">
+ <path d="M 88.984375 4.2460938 L 88.269531 6.40625 A 13.133 13.133 0 0 0 85.244141 6.9023438 L 83.9375 5.0625 L 83.699219 5.1523438 A 14.916 14.916 0 0 0 80.007812 7.0859375 L 79.8125 7.2265625 L 80.507812 9.40625 A 12.958 12.958 0 0 0 78.490234 11.496094 L 76.351562 10.785156 L 76.214844 10.996094 A 15.179 15.179 0 0 0 74.369141 14.8125 L 74.304688 15.035156 L 76.15625 16.398438 C 76.14425 16.418437 76.140719 16.441031 76.136719 16.457031 A 12.874 12.874 0 0 0 75.796875 19.035156 L 73.640625 19.71875 L 73.652344 19.964844 C 73.706344 21.432844 73.959109 22.867563 74.412109 24.226562 L 74.488281 24.453125 L 76.796875 24.464844 C 77.109875 25.214844 77.488594 25.930281 77.933594 26.613281 L 76.585938 28.441406 L 76.734375 28.636719 A 15.028 15.028 0 0 0 79.863281 31.710938 L 80.054688 31.851562 L 81.921875 30.515625 C 82.562875 30.917625 83.24975 31.265687 83.96875 31.554688 L 83.951172 33.835938 L 84.183594 33.910156 C 84.570594 34.031156 84.960281 34.144188 85.363281 34.242188 C 86.425281 34.488188 87.48425 34.621531 88.53125 34.644531 L 88.773438 34.648438 L 89.490234 32.484375 A 12.819 12.819 0 0 0 91.787109 32.167969 L 93.123047 34.03125 L 93.355469 33.957031 A 15.097 15.097 0 0 0 97.300781 32.070312 L 97.503906 31.933594 L 96.824219 29.773438 A 13.195 13.195 0 0 0 98.628906 28.085938 L 100.8125 28.8125 L 100.95508 28.621094 A 14.78 14.78 0 0 0 103.04688 24.859375 L 103.13672 24.621094 L 101.32031 23.285156 C 101.40631 23.008156 101.48078 22.726313 101.55078 22.445312 C 101.69178 21.832313 101.78875 21.226 101.84375 20.625 L 104.0332 19.929688 L 104.0332 19.691406 C 104.0332 19.605406 104.04297 19.518687 104.04297 19.429688 A 15.365 15.365 0 0 0 103.51953 15.5 L 103.45117 15.257812 L 101.19922 15.246094 A 13.253 13.253 0 0 0 99.941406 12.582031 L 101.29297 10.738281 L 101.15625 10.546875 A 15.367 15.367 0 0 0 98.287109 7.5429688 L 98.09375 7.3867188 L 96.253906 8.703125 A 13.082 13.082 0 0 0 93.53125 7.265625 L 93.542969 5 L 93.3125 4.9257812 A 18.186 18.186 0 0 0 92.320312 4.6523438 A 15.815 15.815 0 0 0 89.234375 4.25 L 88.984375 4.2460938 z M 88.759766 15.541016 A 3.914 3.914 0 0 1 89.740234 15.644531 A 3.913 3.913 0 0 1 92.753906 19.441406 C 92.753906 19.742406 92.722344 20.04275 92.652344 20.34375 A 3.92 3.92 0 0 1 88.847656 23.359375 A 3.72 3.72 0 0 1 87.949219 23.25 C 86.144219 22.836 84.9375 21.226125 84.9375 19.453125 C 84.9375 19.156125 84.967203 18.858688 85.033203 18.554688 A 3.914 3.914 0 0 1 88.759766 15.541016 z "/>
+ <path d="M 60.488281 22.824219 C 58.968281 22.824219 57.488594 22.98425 56.058594 23.28125 L 55.78125 23.332031 L 55.488281 26.582031 C 54.023281 26.992031 52.624219 27.5785 51.324219 28.3125 L 48.886719 26.179688 L 48.648438 26.335938 A 21.852 21.852 0 0 0 44.152344 30.230469 L 43.972656 30.4375 L 45.65625 33.257812 A 18.478 18.478 0 0 0 43.46875 36.933594 L 40.248047 36.644531 L 40.15625 36.910156 A 21.157 21.157 0 0 0 38.84375 42.828125 L 38.820312 43.09375 L 41.855469 44.390625 C 41.851469 44.437625 41.851562 44.488063 41.851562 44.539062 C 41.851562 45.828063 41.988281 47.093687 42.238281 48.304688 L 39.455078 49.960938 L 39.527344 50.234375 A 21.58 21.58 0 0 0 41.980469 55.90625 L 42.119141 56.132812 L 45.34375 55.402344 A 18.763 18.763 0 0 0 47.714844 58.105469 L 46.4375 61.097656 L 46.648438 61.277344 A 21.703 21.703 0 0 0 52.007812 64.535156 L 52.248047 64.640625 L 54.425781 62.160156 C 55.484781 62.527156 56.601281 62.80075 57.738281 62.96875 L 58.464844 66.15625 L 58.744141 66.175781 C 59.307141 66.222781 59.894281 66.253906 60.488281 66.253906 C 62.042281 66.253906 63.558437 66.086437 65.023438 65.773438 L 65.296875 65.714844 L 65.589844 62.460938 A 19.053 19.053 0 0 0 68.792969 61.21875 L 71.269531 63.382812 L 71.503906 63.246094 A 21.892 21.892 0 0 0 76.378906 59.328125 L 76.574219 59.125 L 74.908203 56.335938 A 18.426 18.426 0 0 0 76.9375 53.289062 L 80.230469 53.585938 L 80.335938 53.335938 A 21.627 21.627 0 0 0 82.007812 47.414062 L 82.042969 47.128906 L 79.066406 45.859375 C 79.097406 45.430375 79.119141 44.988062 79.119141 44.539062 C 79.119141 43.625062 79.054781 42.734375 78.925781 41.859375 L 81.757812 40.171875 L 81.699219 39.90625 A 21.733 21.733 0 0 0 79.613281 34.246094 L 79.476562 33.992188 L 76.320312 34.714844 A 18.63 18.63 0 0 0 73.617188 31.320312 L 74.902344 28.308594 L 74.701172 28.132812 A 22.087 22.087 0 0 0 69.726562 24.886719 L 69.472656 24.769531 L 67.335938 27.210938 A 18.403 18.403 0 0 0 62.949219 26.074219 L 62.222656 22.898438 L 61.945312 22.882812 A 19.927 19.927 0 0 0 60.488281 22.824219 z M 60.488281 38.824219 C 63.644281 38.836219 66.199219 41.387062 66.199219 44.539062 A 5.715 5.715 0 0 1 60.488281 50.253906 A 5.717 5.717 0 0 1 54.773438 44.539062 A 5.725 5.725 0 0 1 60.488281 38.824219 z "/>
+ <path d="m 134.226,94.281 c 0,0 0.445,2.621 -0.574,7.356 -1.024,4.796 -2.559,7.351 -2.559,7.351 a 31.76,31.76 0 0 1 -10.809,1.922 c -3.773,0 -7.16,-0.707 -9.976,-1.922 0,0 -0.383,-1.726 0.129,-4.988 1.406,0.387 6.457,1.793 10.933,1.793 2.497,0 5.375,-0.703 5.375,-0.703 0,0 0.704,-1.153 1.149,-3.453 0.512,-2.305 0.32,-3.454 0.32,-3.454 0,0 -2.558,-0.703 -5.051,-0.703 -3.902,0 -7.226,-0.64 -10.039,-1.855 0,0 -0.386,-2.879 0.383,-6.524 0.766,-3.707 2.43,-6.585 2.43,-6.585 3.324,-1.153 7.035,-1.856 10.808,-1.856 3.77,0 7.161,0.703 9.973,1.856 0,0.128 0.387,2.046 -0.062,5.179 -1.536,-0.449 -7.165,-1.918 -11,-1.918 -2.493,0 -5.372,0.703 -5.372,0.703 0,0 -0.64,1.024 -0.957,2.621 -0.32,1.598 -0.195,2.622 -0.195,2.622 0,0 2.496,0.64 5.117,0.703 3.774,0 7.164,0.64 9.977,1.855 z"/>
+ <path d="m 105.511,80.66 c 1.984,0 3.84,0.191 5.629,0.578 -0.703,1.727 -1.469,3.324 -2.367,4.86 -1.406,-0.192 -2.813,-0.321 -4.348,-0.321 -2.492,0 -5.242,0.703 -5.242,0.703 0,0 -1.856,6.075 -2.496,9.274 L 93.62,110.269 H 87.8 l 3.07,-14.515 a 84.252,84.252 0 0 1 3.836,-13.238 c 3.325,-1.153 7.035,-1.856 10.805,-1.856 z"/>
+ <path d="m 77.374,105.793 c 2.817,0 5.629,-0.512 7.867,-1.024 -0.765,1.981 -1.664,3.774 -2.621,5.5 -2.046,0.383 -4.156,0.641 -6.332,0.641 -3.773,0 -7.097,-0.707 -9.91,-1.922 0.125,-3.965 0.766,-8.441 1.789,-13.234 l 1.918,-9.082 h -6.457 c 0.703,-1.789 1.469,-3.453 2.492,-5.051 h 5.055 l 1.34,-6.332 a 31.365,31.365 0 0 1 6.074,-1.535 l -1.66,7.867 h 11.125 c -0.703,1.789 -1.535,3.516 -2.492,5.051 h -9.719 l -1.922,9.082 c -0.703,3.262 -1.469,9.336 -1.469,9.336 0,0 2.493,0.703 4.922,0.703 z"/>
+ <path d="m 49.878,105.793 c 2.813,0 5.629,-0.512 7.867,-1.024 -0.769,1.981 -1.664,3.774 -2.621,5.5 -2.047,0.383 -4.16,0.641 -6.332,0.641 -3.773,0 -7.097,-0.707 -9.914,-1.922 0.129,-3.965 0.77,-8.441 1.793,-13.234 l 1.918,-9.082 h -6.461 c 0.707,-1.789 1.473,-3.453 2.496,-5.051 h 5.051 l 1.344,-6.332 a 31.365,31.365 0 0 1 6.074,-1.535 l -1.66,7.867 h 11.125 c -0.703,1.789 -1.535,3.516 -2.492,5.051 h -9.723 l -1.918,9.082 c -0.703,3.262 -1.469,9.336 -1.469,9.336 0,0 2.493,0.703 4.922,0.703 z"/>
+ <path d="M 22.574219 80.660156 C 18.800219 80.660156 15.093625 81.362625 11.765625 82.515625 C 11.128625 84.112625 10.616109 85.715406 10.037109 87.441406 C 12.022109 86.863406 16.624281 85.777344 21.488281 85.777344 C 23.980281 85.777344 26.476562 86.480469 26.476562 86.480469 C 26.476562 86.480469 26.089531 89.101062 25.644531 91.789062 C 23.980531 91.469062 22.191938 91.277344 20.335938 91.277344 A 32.101 32.101 0 0 0 9.4648438 93.195312 C 9.4648437 93.195312 7.8003437 96.328 6.7773438 101.125 C 5.7533437 105.855 6.140625 108.98828 6.140625 108.98828 C 8.952625 110.20328 12.343281 110.91016 16.113281 110.91016 A 31.74 31.74 0 0 0 26.921875 108.98828 C 28.456875 105.02328 29.734813 100.54691 30.757812 95.753906 C 31.718813 91.018906 32.359781 86.542625 32.550781 82.515625 C 29.734781 81.362625 26.347219 80.660156 22.574219 80.660156 z M 19.248047 96.390625 A 21.116 21.116 0 0 1 24.619141 97.09375 C 23.850141 100.42175 22.511719 105.08984 22.511719 105.08984 C 22.511719 105.08984 19.951172 105.79297 17.201172 105.79297 C 14.705172 105.79297 12.152344 105.08984 12.152344 105.08984 C 12.152344 105.08984 12.085656 103.426 12.597656 101.125 C 13.109656 98.758 13.8125 97.09375 13.8125 97.09375 C 13.8125 97.09375 16.752047 96.390625 19.248047 96.390625 z "/>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/api.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/api.rst
new file mode 100644
index 0000000000..02aed52ad5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/api.rst
@@ -0,0 +1,826 @@
+API Reference
+=============
+
+.. currentmodule:: attr
+
+``attrs`` works by decorating a class using `attrs.define` or `attr.s` and then optionally defining attributes on the class using `attrs.field`, `attr.ib`, or a type annotation.
+
+If you're confused by the many names, please check out `names` for clarification.
+
+What follows is the API explanation, if you'd like a more hands-on introduction, have a look at `examples`.
+
+As of version 21.3.0, ``attrs`` consists of **two** to-level package names:
+
+- The classic ``attr`` that powered the venerable `attr.s` and `attr.ib`
+- The modern ``attrs`` that only contains most modern APIs and relies on `attrs.define` and `attrs.field` to define your classes.
+ Additionally it offers some ``attr`` APIs with nicer defaults (e.g. `attrs.asdict`).
+ Using this namespace requires Python 3.6 or later.
+
+The ``attrs`` namespace is built *on top of* ``attr`` which will *never* go away.
+
+
+Core
+----
+
+.. note::
+
+ Please note that the ``attrs`` namespace has been added in version 21.3.0.
+ Most of the objects are simply re-imported from ``attr``.
+ Therefore if a class, method, or function claims that it has been added in an older version, it is only available in the ``attr`` namespace.
+
+.. autodata:: attrs.NOTHING
+
+.. autofunction:: attrs.define
+
+.. function:: attrs.mutable(same_as_define)
+
+ Alias for `attrs.define`.
+
+ .. versionadded:: 20.1.0
+
+.. function:: attrs.frozen(same_as_define)
+
+ Behaves the same as `attrs.define` but sets *frozen=True* and *on_setattr=None*.
+
+ .. versionadded:: 20.1.0
+
+.. autofunction:: attrs.field
+
+.. function:: define
+
+ Old import path for `attrs.define`.
+
+.. function:: mutable
+
+ Old import path for `attrs.mutable`.
+
+.. function:: frozen
+
+ Old import path for `attrs.frozen`.
+
+.. function:: field
+
+ Old import path for `attrs.field`.
+
+.. autoclass:: attrs.Attribute
+ :members: evolve
+
+ For example:
+
+ .. doctest::
+
+ >>> import attr
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib()
+ >>> attr.fields(C).x
+ Attribute(name='x', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None)
+
+
+.. autofunction:: attrs.make_class
+
+ This is handy if you want to programmatically create classes.
+
+ For example:
+
+ .. doctest::
+
+ >>> C1 = attr.make_class("C1", ["x", "y"])
+ >>> C1(1, 2)
+ C1(x=1, y=2)
+ >>> C2 = attr.make_class("C2", {"x": attr.ib(default=42),
+ ... "y": attr.ib(default=attr.Factory(list))})
+ >>> C2()
+ C2(x=42, y=[])
+
+
+.. autoclass:: attrs.Factory
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib(default=attr.Factory(list))
+ ... y = attr.ib(default=attr.Factory(
+ ... lambda self: set(self.x),
+ ... takes_self=True)
+ ... )
+ >>> C()
+ C(x=[], y=set())
+ >>> C([1, 2, 3])
+ C(x=[1, 2, 3], y={1, 2, 3})
+
+
+Classic
+~~~~~~~
+
+.. data:: attr.NOTHING
+
+ Same as `attrs.NOTHING`.
+
+.. autofunction:: attr.s(these=None, repr_ns=None, repr=None, cmp=None, hash=None, init=None, slots=False, frozen=False, weakref_slot=True, str=False, auto_attribs=False, kw_only=False, cache_hash=False, auto_exc=False, eq=None, order=None, auto_detect=False, collect_by_mro=False, getstate_setstate=None, on_setattr=None, field_transformer=None, match_args=True)
+
+ .. note::
+
+ ``attrs`` also comes with a serious business alias ``attr.attrs``.
+
+ For example:
+
+ .. doctest::
+
+ >>> import attr
+ >>> @attr.s
+ ... class C(object):
+ ... _private = attr.ib()
+ >>> C(private=42)
+ C(_private=42)
+ >>> class D(object):
+ ... def __init__(self, x):
+ ... self.x = x
+ >>> D(1)
+ <D object at ...>
+ >>> D = attr.s(these={"x": attr.ib()}, init=False)(D)
+ >>> D(1)
+ D(x=1)
+ >>> @attr.s(auto_exc=True)
+ ... class Error(Exception):
+ ... x = attr.ib()
+ ... y = attr.ib(default=42, init=False)
+ >>> Error("foo")
+ Error(x='foo', y=42)
+ >>> raise Error("foo")
+ Traceback (most recent call last):
+ ...
+ Error: ('foo', 42)
+ >>> raise ValueError("foo", 42) # for comparison
+ Traceback (most recent call last):
+ ...
+ ValueError: ('foo', 42)
+
+
+.. autofunction:: attr.ib
+
+ .. note::
+
+ ``attrs`` also comes with a serious business alias ``attr.attrib``.
+
+ The object returned by `attr.ib` also allows for setting the default and the validator using decorators:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib()
+ ... y = attr.ib()
+ ... @x.validator
+ ... def _any_name_except_a_name_of_an_attribute(self, attribute, value):
+ ... if value < 0:
+ ... raise ValueError("x must be positive")
+ ... @y.default
+ ... def _any_name_except_a_name_of_an_attribute(self):
+ ... return self.x + 1
+ >>> C(1)
+ C(x=1, y=2)
+ >>> C(-1)
+ Traceback (most recent call last):
+ ...
+ ValueError: x must be positive
+
+
+
+Exceptions
+----------
+
+All exceptions are available from both ``attr.exceptions`` and ``attrs.exceptions`` and are the same thing.
+That means that it doesn't matter from from which namespace they've been raised and/or caught:
+
+.. doctest::
+
+ >>> import attrs, attr
+ >>> try:
+ ... raise attrs.exceptions.FrozenError()
+ ... except attr.exceptions.FrozenError:
+ ... print("this works!")
+ this works!
+
+.. autoexception:: attrs.exceptions.PythonTooOldError
+.. autoexception:: attrs.exceptions.FrozenError
+.. autoexception:: attrs.exceptions.FrozenInstanceError
+.. autoexception:: attrs.exceptions.FrozenAttributeError
+.. autoexception:: attrs.exceptions.AttrsAttributeNotFoundError
+.. autoexception:: attrs.exceptions.NotAnAttrsClassError
+.. autoexception:: attrs.exceptions.DefaultAlreadySetError
+.. autoexception:: attrs.exceptions.UnannotatedAttributeError
+.. autoexception:: attrs.exceptions.NotCallableError
+
+ For example::
+
+ @attr.s(auto_attribs=True)
+ class C:
+ x: int
+ y = attr.ib() # <- ERROR!
+
+
+.. _helpers:
+
+Helpers
+-------
+
+``attrs`` comes with a bunch of helper methods that make working with it easier:
+
+.. autofunction:: attrs.cmp_using
+.. function:: attr.cmp_using
+
+ Same as `attrs.cmp_using`.
+
+.. autofunction:: attrs.fields
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib()
+ ... y = attr.ib()
+ >>> attrs.fields(C)
+ (Attribute(name='x', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None), Attribute(name='y', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None))
+ >>> attrs.fields(C)[1]
+ Attribute(name='y', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None)
+ >>> attrs.fields(C).y is attrs.fields(C)[1]
+ True
+
+.. function:: attr.fields
+
+ Same as `attrs.fields`.
+
+.. autofunction:: attrs.fields_dict
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib()
+ ... y = attr.ib()
+ >>> attrs.fields_dict(C)
+ {'x': Attribute(name='x', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None), 'y': Attribute(name='y', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None)}
+ >>> attr.fields_dict(C)['y']
+ Attribute(name='y', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False, inherited=False, on_setattr=None)
+ >>> attrs.fields_dict(C)['y'] is attrs.fields(C).y
+ True
+
+.. function:: attr.fields_dict
+
+ Same as `attrs.fields_dict`.
+
+.. autofunction:: attrs.has
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... pass
+ >>> attr.has(C)
+ True
+ >>> attr.has(object)
+ False
+
+.. function:: attr.has
+
+ Same as `attrs.has`.
+
+.. autofunction:: attrs.resolve_types
+
+ For example:
+
+ .. doctest::
+
+ >>> import typing
+ >>> @attrs.define
+ ... class A:
+ ... a: typing.List['A']
+ ... b: 'B'
+ ...
+ >>> @attrs.define
+ ... class B:
+ ... a: A
+ ...
+ >>> attrs.fields(A).a.type
+ typing.List[ForwardRef('A')]
+ >>> attrs.fields(A).b.type
+ 'B'
+ >>> attrs.resolve_types(A, globals(), locals())
+ <class 'A'>
+ >>> attrs.fields(A).a.type
+ typing.List[A]
+ >>> attrs.fields(A).b.type
+ <class 'B'>
+
+.. function:: attr.resolve_types
+
+ Same as `attrs.resolve_types`.
+
+.. autofunction:: attrs.asdict
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x: int
+ ... y: int
+ >>> attrs.asdict(C(1, C(2, 3)))
+ {'x': 1, 'y': {'x': 2, 'y': 3}}
+
+.. autofunction:: attr.asdict
+
+.. autofunction:: attrs.astuple
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attr.field()
+ ... y = attr.field()
+ >>> attrs.astuple(C(1,2))
+ (1, 2)
+
+.. autofunction:: attr.astuple
+
+
+``attrs`` includes some handy helpers for filtering the attributes in `attrs.asdict` and `attrs.astuple`:
+
+.. autofunction:: attrs.filters.include
+
+.. autofunction:: attrs.filters.exclude
+
+.. function:: attr.filters.include
+
+ Same as `attrs.filters.include`.
+
+.. function:: attr.filters.exclude
+
+ Same as `attrs.filters.exclude`.
+
+See :func:`attrs.asdict` for examples.
+
+All objects from ``attrs.filters`` are also available from ``attr.filters``.
+
+----
+
+.. autofunction:: attrs.evolve
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x: int
+ ... y: int
+ >>> i1 = C(1, 2)
+ >>> i1
+ C(x=1, y=2)
+ >>> i2 = attrs.evolve(i1, y=3)
+ >>> i2
+ C(x=1, y=3)
+ >>> i1 == i2
+ False
+
+ ``evolve`` creates a new instance using ``__init__``.
+ This fact has several implications:
+
+ * private attributes should be specified without the leading underscore, just like in ``__init__``.
+ * attributes with ``init=False`` can't be set with ``evolve``.
+ * the usual ``__init__`` validators will validate the new values.
+
+.. function:: attr.evolve
+
+ Same as `attrs.evolve`.
+
+.. autofunction:: attrs.validate
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define(on_setattr=attrs.setters.NO_OP)
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.instance_of(int))
+ >>> i = C(1)
+ >>> i.x = "1"
+ >>> attrs.validate(i)
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '1' that is a <class 'str'>).", ...)
+
+.. function:: attr.validate
+
+ Same as `attrs.validate`.
+
+
+Validators can be globally disabled if you want to run them only in development and tests but not in production because you fear their performance impact:
+
+.. autofunction:: set_run_validators
+
+.. autofunction:: get_run_validators
+
+
+.. _api_validators:
+
+Validators
+----------
+
+``attrs`` comes with some common validators in the ``attrs.validators`` module.
+All objects from ``attrs.converters`` are also available from ``attr.converters``.
+
+
+.. autofunction:: attrs.validators.lt
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.lt(42))
+ >>> C(41)
+ C(x=41)
+ >>> C(42)
+ Traceback (most recent call last):
+ ...
+ ValueError: ("'x' must be < 42: 42")
+
+.. autofunction:: attrs.validators.le
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C(object):
+ ... x = attrs.field(validator=attr.validators.le(42))
+ >>> C(42)
+ C(x=42)
+ >>> C(43)
+ Traceback (most recent call last):
+ ...
+ ValueError: ("'x' must be <= 42: 43")
+
+.. autofunction:: attrs.validators.ge
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.ge(42))
+ >>> C(42)
+ C(x=42)
+ >>> C(41)
+ Traceback (most recent call last):
+ ...
+ ValueError: ("'x' must be => 42: 41")
+
+.. autofunction:: attrs.validators.gt
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attr.field(validator=attrs.validators.gt(42))
+ >>> C(43)
+ C(x=43)
+ >>> C(42)
+ Traceback (most recent call last):
+ ...
+ ValueError: ("'x' must be > 42: 42")
+
+.. autofunction:: attrs.validators.max_len
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.max_len(4))
+ >>> C("spam")
+ C(x='spam')
+ >>> C("bacon")
+ Traceback (most recent call last):
+ ...
+ ValueError: ("Length of 'x' must be <= 4: 5")
+
+.. autofunction:: attrs.validators.instance_of
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.instance_of(int))
+ >>> C(42)
+ C(x=42)
+ >>> C("42")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <type 'int'> (got '42' that is a <type 'str'>).", Attribute(name='x', default=NOTHING, validator=<instance_of validator for type <type 'int'>>, type=None, kw_only=False), <type 'int'>, '42')
+ >>> C(None)
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <type 'int'> (got None that is a <type 'NoneType'>).", Attribute(name='x', default=NOTHING, validator=<instance_of validator for type <type 'int'>>, repr=True, cmp=True, hash=None, init=True, type=None, kw_only=False), <type 'int'>, None)
+
+.. autofunction:: attrs.validators.in_
+
+ For example:
+
+ .. doctest::
+
+ >>> import enum
+ >>> class State(enum.Enum):
+ ... ON = "on"
+ ... OFF = "off"
+ >>> @attrs.define
+ ... class C:
+ ... state = attrs.field(validator=attrs.validators.in_(State))
+ ... val = attrs.field(validator=attrs.validators.in_([1, 2, 3]))
+ >>> C(State.ON, 1)
+ C(state=<State.ON: 'on'>, val=1)
+ >>> C("on", 1)
+ Traceback (most recent call last):
+ ...
+ ValueError: 'state' must be in <enum 'State'> (got 'on')
+ >>> C(State.ON, 4)
+ Traceback (most recent call last):
+ ...
+ ValueError: 'val' must be in [1, 2, 3] (got 4)
+
+.. autofunction:: attrs.validators.provides
+
+.. autofunction:: attrs.validators.and_
+
+ For convenience, it's also possible to pass a list to `attrs.field`'s validator argument.
+
+ Thus the following two statements are equivalent::
+
+ x = attrs.field(validator=attrs.validators.and_(v1, v2, v3))
+ x = attrs.field(validator=[v1, v2, v3])
+
+.. autofunction:: attrs.validators.optional
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.optional(attr.validators.instance_of(int)))
+ >>> C(42)
+ C(x=42)
+ >>> C("42")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <type 'int'> (got '42' that is a <type 'str'>).", Attribute(name='x', default=NOTHING, validator=<instance_of validator for type <type 'int'>>, type=None, kw_only=False), <type 'int'>, '42')
+ >>> C(None)
+ C(x=None)
+
+
+.. autofunction:: attrs.validators.is_callable
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.is_callable())
+ >>> C(isinstance)
+ C(x=<built-in function isinstance>)
+ >>> C("not a callable")
+ Traceback (most recent call last):
+ ...
+ attr.exceptions.NotCallableError: 'x' must be callable (got 'not a callable' that is a <class 'str'>).
+
+
+.. autofunction:: attrs.validators.matches_re
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class User:
+ ... email = attrs.field(validator=attrs.validators.matches_re(
+ ... "(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"))
+ >>> User(email="user@example.com")
+ User(email='user@example.com')
+ >>> User(email="user@example.com@test.com")
+ Traceback (most recent call last):
+ ...
+ ValueError: ("'email' must match regex '(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\\\.[a-zA-Z0-9-.]+$)' ('user@example.com@test.com' doesn't)", Attribute(name='email', default=NOTHING, validator=<matches_re validator for pattern re.compile('(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\.[a-zA-Z0-9-.]+$)')>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), re.compile('(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\.[a-zA-Z0-9-.]+$)'), 'user@example.com@test.com')
+
+
+.. autofunction:: attrs.validators.deep_iterable
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.deep_iterable(
+ ... member_validator=attrs.validators.instance_of(int),
+ ... iterable_validator=attrs.validators.instance_of(list)
+ ... ))
+ >>> C(x=[1, 2, 3])
+ C(x=[1, 2, 3])
+ >>> C(x=set([1, 2, 3]))
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'list'> (got {1, 2, 3} that is a <class 'set'>).", Attribute(name='x', default=NOTHING, validator=<deep_iterable validator for <instance_of validator for type <class 'list'>> iterables of <instance_of validator for type <class 'int'>>>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), <class 'list'>, {1, 2, 3})
+ >>> C(x=[1, 2, "3"])
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '3' that is a <class 'str'>).", Attribute(name='x', default=NOTHING, validator=<deep_iterable validator for <instance_of validator for type <class 'list'>> iterables of <instance_of validator for type <class 'int'>>>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), <class 'int'>, '3')
+
+
+.. autofunction:: attrs.validators.deep_mapping
+
+ For example:
+
+ .. doctest::
+
+ >>> @attrs.define
+ ... class C:
+ ... x = attrs.field(validator=attrs.validators.deep_mapping(
+ ... key_validator=attrs.validators.instance_of(str),
+ ... value_validator=attrs.validators.instance_of(int),
+ ... mapping_validator=attrs.validators.instance_of(dict)
+ ... ))
+ >>> C(x={"a": 1, "b": 2})
+ C(x={'a': 1, 'b': 2})
+ >>> C(x=None)
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'dict'> (got None that is a <class 'NoneType'>).", Attribute(name='x', default=NOTHING, validator=<deep_mapping validator for objects mapping <instance_of validator for type <class 'str'>> to <instance_of validator for type <class 'int'>>>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), <class 'dict'>, None)
+ >>> C(x={"a": 1.0, "b": 2})
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got 1.0 that is a <class 'float'>).", Attribute(name='x', default=NOTHING, validator=<deep_mapping validator for objects mapping <instance_of validator for type <class 'str'>> to <instance_of validator for type <class 'int'>>>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), <class 'int'>, 1.0)
+ >>> C(x={"a": 1, 7: 2})
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'str'> (got 7 that is a <class 'int'>).", Attribute(name='x', default=NOTHING, validator=<deep_mapping validator for objects mapping <instance_of validator for type <class 'str'>> to <instance_of validator for type <class 'int'>>>, repr=True, cmp=True, hash=None, init=True, metadata=mappingproxy({}), type=None, converter=None, kw_only=False), <class 'str'>, 7)
+
+Validators can be both globally and locally disabled:
+
+.. autofunction:: attrs.validators.set_disabled
+
+.. autofunction:: attrs.validators.get_disabled
+
+.. autofunction:: attrs.validators.disabled
+
+
+Converters
+----------
+
+All objects from ``attrs.converters`` are also available from ``attr.converters``.
+
+.. autofunction:: attrs.converters.pipe
+
+ For convenience, it's also possible to pass a list to `attr.ib`'s converter argument.
+
+ Thus the following two statements are equivalent::
+
+ x = attr.ib(converter=attr.converter.pipe(c1, c2, c3))
+ x = attr.ib(converter=[c1, c2, c3])
+
+.. autofunction:: attrs.converters.optional
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib(converter=attr.converters.optional(int))
+ >>> C(None)
+ C(x=None)
+ >>> C(42)
+ C(x=42)
+
+
+.. autofunction:: attrs.converters.default_if_none
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib(
+ ... converter=attr.converters.default_if_none("")
+ ... )
+ >>> C(None)
+ C(x='')
+
+
+.. autofunction:: attrs.converters.to_bool
+
+ For example:
+
+ .. doctest::
+
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib(
+ ... converter=attr.converters.to_bool
+ ... )
+ >>> C("yes")
+ C(x=True)
+ >>> C(0)
+ C(x=False)
+ >>> C("foo")
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ ValueError: Cannot convert value to bool: foo
+
+
+
+.. _api_setters:
+
+Setters
+-------
+
+These are helpers that you can use together with `attrs.define`'s and `attrs.fields`'s ``on_setattr`` arguments.
+All setters in ``attrs.setters`` are also available from ``attr.setters``.
+
+.. autofunction:: attrs.setters.frozen
+.. autofunction:: attrs.setters.validate
+.. autofunction:: attrs.setters.convert
+.. autofunction:: attrs.setters.pipe
+.. autodata:: attrs.setters.NO_OP
+
+ For example, only ``x`` is frozen here:
+
+ .. doctest::
+
+ >>> @attrs.define(on_setattr=attr.setters.frozen)
+ ... class C:
+ ... x = attr.field()
+ ... y = attr.field(on_setattr=attr.setters.NO_OP)
+ >>> c = C(1, 2)
+ >>> c.y = 3
+ >>> c.y
+ 3
+ >>> c.x = 4
+ Traceback (most recent call last):
+ ...
+ attrs.exceptions.FrozenAttributeError: ()
+
+ N.B. Please use `attrs.define`'s *frozen* argument (or `attrs.frozen`) to freeze whole classes; it is more efficient.
+
+
+Deprecated APIs
+---------------
+
+.. _version-info:
+
+To help you write backward compatible code that doesn't throw warnings on modern releases, the ``attr`` module has an ``__version_info__`` attribute as of version 19.2.0.
+It behaves similarly to `sys.version_info` and is an instance of `VersionInfo`:
+
+.. autoclass:: VersionInfo
+
+ With its help you can write code like this:
+
+ >>> if getattr(attr, "__version_info__", (0,)) >= (19, 2):
+ ... cmp_off = {"eq": False}
+ ... else:
+ ... cmp_off = {"cmp": False}
+ >>> cmp_off == {"eq": False}
+ True
+ >>> @attr.s(**cmp_off)
+ ... class C(object):
+ ... pass
+
+
+----
+
+The serious business aliases used to be called ``attr.attributes`` and ``attr.attr``.
+There are no plans to remove them but they shouldn't be used in new code.
+
+.. autofunction:: assoc
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/changelog.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/changelog.rst
new file mode 100644
index 0000000000..565b0521d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/changelog.rst
@@ -0,0 +1 @@
+.. include:: ../CHANGELOG.rst
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/comparison.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/comparison.rst
new file mode 100644
index 0000000000..760124ca3b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/comparison.rst
@@ -0,0 +1,66 @@
+Comparison
+==========
+
+By default, two instances of ``attrs`` classes are equal if all their fields are equal.
+For that, ``attrs`` writes ``__eq__`` and ``__ne__`` methods for you.
+
+Additionally, if you pass ``order=True`` (which is the default if you use the `attr.s` decorator), ``attrs`` will also create a full set of ordering methods that are based on the defined fields: ``__le__``, ``__lt__``, ``__ge__``, and ``__gt__``.
+
+
+.. _custom-comparison:
+
+Customization
+-------------
+
+As with other features, you can exclude fields from being involved in comparison operations:
+
+.. doctest::
+
+ >>> from attr import define, field
+
+ >>> @define
+ ... class C:
+ ... x: int
+ ... y: int = field(eq=False)
+
+ >>> C(1, 2) == C(1, 3)
+ True
+
+Additionally you can also pass a *callable* instead of a bool to both *eq* and *order*.
+It is then used as a key function like you may know from `sorted`:
+
+.. doctest::
+
+ >>> from attr import define, field
+
+ >>> @define
+ ... class S:
+ ... x: str = field(eq=str.lower)
+
+ >>> S("foo") == S("FOO")
+ True
+
+ >>> @define(order=True)
+ ... class C:
+ ... x: str = field(order=int)
+
+ >>> C("10") > C("2")
+ True
+
+This is especially useful when you have fields with objects that have atypical comparison properties.
+Common examples of such objects are `NumPy arrays <https://github.com/python-attrs/attrs/issues/435>`_.
+
+To save you unnecessary boilerplate, ``attrs`` comes with the `attr.cmp_using` helper to create such functions.
+For NumPy arrays it would look like this::
+
+ import numpy
+
+ @define(order=False)
+ class C:
+ an_array = field(eq=attr.cmp_using(eq=numpy.array_equal))
+
+
+.. warning::
+
+ Please note that *eq* and *order* are set *independently*, because *order* is `False` by default in `attrs.define` (but not in `attr.s`).
+ You can set both at once by using the *cmp* argument that we've undeprecated just for this use-case.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/conf.py b/testing/web-platform/tests/tools/third_party/attrs/docs/conf.py
new file mode 100644
index 0000000000..0cc80be6a6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/conf.py
@@ -0,0 +1,155 @@
+# SPDX-License-Identifier: MIT
+
+from importlib import metadata
+
+
+# -- General configuration ------------------------------------------------
+
+doctest_global_setup = """
+from attr import define, frozen, field, validators, Factory
+"""
+
+linkcheck_ignore = [
+ # We run into GitHub's rate limits.
+ r"https://github.com/.*/(issues|pull)/\d+",
+ # It never finds the anchor even though it's there.
+ "https://github.com/microsoft/pyright/blob/main/specs/"
+ "dataclass_transforms.md#attrs",
+]
+
+# In nitpick mode (-n), still ignore any of the following "broken" references
+# to non-types.
+nitpick_ignore = [
+ ("py:class", "Any value"),
+ ("py:class", "callable"),
+ ("py:class", "callables"),
+ ("py:class", "tuple of types"),
+]
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ "sphinx.ext.autodoc",
+ "sphinx.ext.doctest",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.todo",
+ "notfound.extension",
+]
+
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# The suffix of source filenames.
+source_suffix = ".rst"
+
+# The master toctree document.
+master_doc = "index"
+
+# General information about the project.
+project = "attrs"
+author = "Hynek Schlawack"
+copyright = f"2015, {author}"
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+
+# The full version, including alpha/beta/rc tags.
+release = metadata.version("attrs")
+# The short X.Y version.
+version = release.rsplit(".", 1)[0]
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ["_build"]
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+default_role = "any"
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+add_function_parentheses = True
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+
+html_theme = "furo"
+html_theme_options = {
+ "sidebar_hide_name": True,
+ "light_logo": "attrs_logo.svg",
+ "dark_logo": "attrs_logo_white.svg",
+}
+
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ["_static"]
+
+# If false, no module index is generated.
+html_domain_indices = True
+
+# If false, no index is generated.
+html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+html_show_sourcelink = False
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = "attrsdoc"
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [("index", "attrs", "attrs Documentation", ["Hynek Schlawack"], 1)]
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (
+ "index",
+ "attrs",
+ "attrs Documentation",
+ "Hynek Schlawack",
+ "attrs",
+ "Python Clases Without Boilerplate",
+ "Miscellaneous",
+ )
+]
+
+epub_description = "Python Clases Without Boilerplate"
+
+intersphinx_mapping = {
+ "https://docs.python.org/3": None,
+}
+
+# Allow non-local URIs so we can have images in CHANGELOG etc.
+suppress_warnings = ["image.nonlocal_uri"]
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/docutils.conf b/testing/web-platform/tests/tools/third_party/attrs/docs/docutils.conf
new file mode 100644
index 0000000000..db8ca82c74
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/docutils.conf
@@ -0,0 +1,3 @@
+[parsers]
+[restructuredtext parser]
+smart_quotes=yes
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/examples.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/examples.rst
new file mode 100644
index 0000000000..ba5343d4ad
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/examples.rst
@@ -0,0 +1,709 @@
+``attrs`` by Example
+====================
+
+
+Basics
+------
+
+The simplest possible usage is:
+
+.. doctest::
+
+ >>> from attrs import define
+ >>> @define
+ ... class Empty:
+ ... pass
+ >>> Empty()
+ Empty()
+ >>> Empty() == Empty()
+ True
+ >>> Empty() is Empty()
+ False
+
+So in other words: ``attrs`` is useful even without actual attributes!
+
+But you'll usually want some data on your classes, so let's add some:
+
+.. doctest::
+
+ >>> @define
+ ... class Coordinates:
+ ... x: int
+ ... y: int
+
+By default, all features are added, so you immediately have a fully functional data class with a nice ``repr`` string and comparison methods.
+
+.. doctest::
+
+ >>> c1 = Coordinates(1, 2)
+ >>> c1
+ Coordinates(x=1, y=2)
+ >>> c2 = Coordinates(x=2, y=1)
+ >>> c2
+ Coordinates(x=2, y=1)
+ >>> c1 == c2
+ False
+
+As shown, the generated ``__init__`` method allows for both positional and keyword arguments.
+
+For private attributes, ``attrs`` will strip the leading underscores for keyword arguments:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... _x: int
+ >>> C(x=1)
+ C(_x=1)
+
+If you want to initialize your private attributes yourself, you can do that too:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... _x: int = field(init=False, default=42)
+ >>> C()
+ C(_x=42)
+ >>> C(23)
+ Traceback (most recent call last):
+ ...
+ TypeError: __init__() takes exactly 1 argument (2 given)
+
+An additional way of defining attributes is supported too.
+This is useful in times when you want to enhance classes that are not yours (nice ``__repr__`` for Django models anyone?):
+
+.. doctest::
+
+ >>> class SomethingFromSomeoneElse:
+ ... def __init__(self, x):
+ ... self.x = x
+ >>> SomethingFromSomeoneElse = define(
+ ... these={
+ ... "x": field()
+ ... }, init=False)(SomethingFromSomeoneElse)
+ >>> SomethingFromSomeoneElse(1)
+ SomethingFromSomeoneElse(x=1)
+
+
+`Subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, but ``attrs`` will still do what you'd hope for:
+
+.. doctest::
+
+ >>> @define(slots=False)
+ ... class A:
+ ... a: int
+ ... def get_a(self):
+ ... return self.a
+ >>> @define(slots=False)
+ ... class B:
+ ... b: int
+ >>> @define(slots=False)
+ ... class C(B, A):
+ ... c: int
+ >>> i = C(1, 2, 3)
+ >>> i
+ C(a=1, b=2, c=3)
+ >>> i == C(1, 2, 3)
+ True
+ >>> i.get_a()
+ 1
+
+:term:`Slotted classes <slotted classes>`, which are the default for the new APIs, don't play well with multiple inheritance so we don't use them in the example.
+
+The order of the attributes is defined by the `MRO <https://www.python.org/download/releases/2.3/mro/>`_.
+
+Keyword-only Attributes
+~~~~~~~~~~~~~~~~~~~~~~~
+
+You can also add `keyword-only <https://docs.python.org/3/glossary.html#keyword-only-parameter>`_ attributes:
+
+.. doctest::
+
+ >>> @define
+ ... class A:
+ ... a: int = field(kw_only=True)
+ >>> A()
+ Traceback (most recent call last):
+ ...
+ TypeError: A() missing 1 required keyword-only argument: 'a'
+ >>> A(a=1)
+ A(a=1)
+
+``kw_only`` may also be specified at via ``define``, and will apply to all attributes:
+
+.. doctest::
+
+ >>> @define(kw_only=True)
+ ... class A:
+ ... a: int
+ ... b: int
+ >>> A(1, 2)
+ Traceback (most recent call last):
+ ...
+ TypeError: __init__() takes 1 positional argument but 3 were given
+ >>> A(a=1, b=2)
+ A(a=1, b=2)
+
+
+
+If you create an attribute with ``init=False``, the ``kw_only`` argument is ignored.
+
+Keyword-only attributes allow subclasses to add attributes without default values, even if the base class defines attributes with default values:
+
+.. doctest::
+
+ >>> @define
+ ... class A:
+ ... a: int = 0
+ >>> @define
+ ... class B(A):
+ ... b: int = field(kw_only=True)
+ >>> B(b=1)
+ B(a=0, b=1)
+ >>> B()
+ Traceback (most recent call last):
+ ...
+ TypeError: B() missing 1 required keyword-only argument: 'b'
+
+If you don't set ``kw_only=True``, then there's is no valid attribute ordering and you'll get an error:
+
+.. doctest::
+
+ >>> @define
+ ... class A:
+ ... a: int = 0
+ >>> @define
+ ... class B(A):
+ ... b: int
+ Traceback (most recent call last):
+ ...
+ ValueError: No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: Attribute(name='b', default=NOTHING, validator=None, repr=True, cmp=True, hash=None, init=True, converter=None, metadata=mappingproxy({}), type=int, kw_only=False)
+
+.. _asdict:
+
+Converting to Collections Types
+-------------------------------
+
+When you have a class with data, it often is very convenient to transform that class into a `dict` (for example if you want to serialize it to JSON):
+
+.. doctest::
+
+ >>> from attrs import asdict
+
+ >>> asdict(Coordinates(x=1, y=2))
+ {'x': 1, 'y': 2}
+
+Some fields cannot or should not be transformed.
+For that, `attrs.asdict` offers a callback that decides whether an attribute should be included:
+
+.. doctest::
+
+ >>> @define
+ ... class User(object):
+ ... email: str
+ ... password: str
+
+ >>> @define
+ ... class UserList:
+ ... users: list[User]
+
+ >>> asdict(UserList([User("jane@doe.invalid", "s33kred"),
+ ... User("joe@doe.invalid", "p4ssw0rd")]),
+ ... filter=lambda attr, value: attr.name != "password")
+ {'users': [{'email': 'jane@doe.invalid'}, {'email': 'joe@doe.invalid'}]}
+
+For the common case where you want to `include <attr.filters.include>` or `exclude <attr.filters.exclude>` certain types or attributes, ``attrs`` ships with a few helpers:
+
+.. doctest::
+
+ >>> from attrs import asdict, filters, fields
+
+ >>> @define
+ ... class User:
+ ... login: str
+ ... password: str
+ ... id: int
+
+ >>> asdict(
+ ... User("jane", "s33kred", 42),
+ ... filter=filters.exclude(fields(User).password, int))
+ {'login': 'jane'}
+
+ >>> @define
+ ... class C:
+ ... x: str
+ ... y: str
+ ... z: int
+
+ >>> asdict(C("foo", "2", 3),
+ ... filter=filters.include(int, fields(C).x))
+ {'x': 'foo', 'z': 3}
+
+Other times, all you want is a tuple and ``attrs`` won't let you down:
+
+.. doctest::
+
+ >>> import sqlite3
+ >>> from attrs import astuple
+
+ >>> @define
+ ... class Foo:
+ ... a: int
+ ... b: int
+
+ >>> foo = Foo(2, 3)
+ >>> with sqlite3.connect(":memory:") as conn:
+ ... c = conn.cursor()
+ ... c.execute("CREATE TABLE foo (x INTEGER PRIMARY KEY ASC, y)") #doctest: +ELLIPSIS
+ ... c.execute("INSERT INTO foo VALUES (?, ?)", astuple(foo)) #doctest: +ELLIPSIS
+ ... foo2 = Foo(*c.execute("SELECT x, y FROM foo").fetchone())
+ <sqlite3.Cursor object at ...>
+ <sqlite3.Cursor object at ...>
+ >>> foo == foo2
+ True
+
+For more advanced transformations and conversions, we recommend you look at a companion library (such as `cattrs <https://github.com/python-attrs/cattrs>`_).
+
+Defaults
+--------
+
+Sometimes you want to have default values for your initializer.
+And sometimes you even want mutable objects as default values (ever accidentally used ``def f(arg=[])``?).
+``attrs`` has you covered in both cases:
+
+.. doctest::
+
+ >>> import collections
+
+ >>> @define
+ ... class Connection:
+ ... socket: int
+ ... @classmethod
+ ... def connect(cls, db_string):
+ ... # ... connect somehow to db_string ...
+ ... return cls(socket=42)
+
+ >>> @define
+ ... class ConnectionPool:
+ ... db_string: str
+ ... pool: collections.deque = Factory(collections.deque)
+ ... debug: bool = False
+ ... def get_connection(self):
+ ... try:
+ ... return self.pool.pop()
+ ... except IndexError:
+ ... if self.debug:
+ ... print("New connection!")
+ ... return Connection.connect(self.db_string)
+ ... def free_connection(self, conn):
+ ... if self.debug:
+ ... print("Connection returned!")
+ ... self.pool.appendleft(conn)
+ ...
+ >>> cp = ConnectionPool("postgres://localhost")
+ >>> cp
+ ConnectionPool(db_string='postgres://localhost', pool=deque([]), debug=False)
+ >>> conn = cp.get_connection()
+ >>> conn
+ Connection(socket=42)
+ >>> cp.free_connection(conn)
+ >>> cp
+ ConnectionPool(db_string='postgres://localhost', pool=deque([Connection(socket=42)]), debug=False)
+
+More information on why class methods for constructing objects are awesome can be found in this insightful `blog post <https://web.archive.org/web/20210130220433/http://as.ynchrono.us/2014/12/asynchronous-object-initialization.html>`_.
+
+Default factories can also be set using the ``factory`` argument to ``field``, and using a decorator.
+The method receives the partially initialized instance which enables you to base a default value on other attributes:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int = 1
+ ... y: int = field()
+ ... @y.default
+ ... def _any_name_except_a_name_of_an_attribute(self):
+ ... return self.x + 1
+ ... z: list = field(factory=list)
+ >>> C()
+ C(x=1, y=2, z=[])
+
+
+.. _examples_validators:
+
+Validators
+----------
+
+Although your initializers should do as little as possible (ideally: just initialize your instance according to the arguments!), it can come in handy to do some kind of validation on the arguments.
+
+``attrs`` offers two ways to define validators for each attribute and it's up to you to choose which one suits your style and project better.
+
+You can use a decorator:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int = field()
+ ... @x.validator
+ ... def check(self, attribute, value):
+ ... if value > 42:
+ ... raise ValueError("x must be smaller or equal to 42")
+ >>> C(42)
+ C(x=42)
+ >>> C(43)
+ Traceback (most recent call last):
+ ...
+ ValueError: x must be smaller or equal to 42
+
+...or a callable...
+
+.. doctest::
+
+ >>> from attrs import validators
+
+ >>> def x_smaller_than_y(instance, attribute, value):
+ ... if value >= instance.y:
+ ... raise ValueError("'x' has to be smaller than 'y'!")
+ >>> @define
+ ... class C:
+ ... x: int = field(validator=[validators.instance_of(int),
+ ... x_smaller_than_y])
+ ... y: int
+ >>> C(x=3, y=4)
+ C(x=3, y=4)
+ >>> C(x=4, y=3)
+ Traceback (most recent call last):
+ ...
+ ValueError: 'x' has to be smaller than 'y'!
+
+...or both at once:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int = field(validator=validators.instance_of(int))
+ ... @x.validator
+ ... def fits_byte(self, attribute, value):
+ ... if not 0 <= value < 256:
+ ... raise ValueError("value out of bounds")
+ >>> C(128)
+ C(x=128)
+ >>> C("128")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '128' that is a <class 'str'>).", Attribute(name='x', default=NOTHING, validator=[<instance_of validator for type <class 'int'>>, <function fits_byte at 0x10fd7a0d0>], repr=True, cmp=True, hash=True, init=True, metadata=mappingproxy({}), type=int, converter=None, kw_only=False), <class 'int'>, '128')
+ >>> C(256)
+ Traceback (most recent call last):
+ ...
+ ValueError: value out of bounds
+
+Please note that the decorator approach only works if -- and only if! -- the attribute in question has a ``field`` assigned.
+Therefore if you use ``@default``, it is *not* enough to annotate said attribute with a type.
+
+``attrs`` ships with a bunch of validators, make sure to `check them out <api_validators>` before writing your own:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int = field(validator=validators.instance_of(int))
+ >>> C(42)
+ C(x=42)
+ >>> C("42")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <type 'int'> (got '42' that is a <type 'str'>).", Attribute(name='x', default=NOTHING, factory=NOTHING, validator=<instance_of validator for type <type 'int'>>, type=None, kw_only=False), <type 'int'>, '42')
+
+Please note that if you use `attr.s` (and not `attrs.define`) to define your class, validators only run on initialization by default.
+This behavior can be changed using the ``on_setattr`` argument.
+
+Check out `validators` for more details.
+
+
+Conversion
+----------
+
+Attributes can have a ``converter`` function specified, which will be called with the attribute's passed-in value to get a new value to use.
+This can be useful for doing type-conversions on values that you don't want to force your callers to do.
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int = field(converter=int)
+ >>> o = C("1")
+ >>> o.x
+ 1
+
+Please note that converters only run on initialization.
+
+Check out `converters` for more details.
+
+
+.. _metadata:
+
+Metadata
+--------
+
+All ``attrs`` attributes may include arbitrary metadata in the form of a read-only dictionary.
+
+.. doctest::
+
+ >>> from attrs import fields
+
+ >>> @define
+ ... class C:
+ ... x = field(metadata={'my_metadata': 1})
+ >>> fields(C).x.metadata
+ mappingproxy({'my_metadata': 1})
+ >>> fields(C).x.metadata['my_metadata']
+ 1
+
+Metadata is not used by ``attrs``, and is meant to enable rich functionality in third-party libraries.
+The metadata dictionary follows the normal dictionary rules: keys need to be hashable, and both keys and values are recommended to be immutable.
+
+If you're the author of a third-party library with ``attrs`` integration, please see `Extending Metadata <extending_metadata>`.
+
+
+Types
+-----
+
+``attrs`` also allows you to associate a type with an attribute using either the *type* argument to `attr.ib` or -- as of Python 3.6 -- using `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_-annotations:
+
+
+.. doctest::
+
+ >>> from attrs import fields
+
+ >>> @define
+ ... class C:
+ ... x: int
+ >>> fields(C).x.type
+ <class 'int'>
+
+ >>> import attr
+ >>> @attr.s
+ ... class C(object):
+ ... x = attr.ib(type=int)
+ >>> fields(C).x.type
+ <class 'int'>
+
+If you don't mind annotating *all* attributes, you can even drop the `attrs.field` and assign default values instead:
+
+.. doctest::
+
+ >>> import typing
+ >>> from attrs import fields
+
+ >>> @define
+ ... class AutoC:
+ ... cls_var: typing.ClassVar[int] = 5 # this one is ignored
+ ... l: list[int] = Factory(list)
+ ... x: int = 1
+ ... foo: str = "every attrib needs a type if auto_attribs=True"
+ ... bar: typing.Any = None
+ >>> fields(AutoC).l.type
+ list[int]
+ >>> fields(AutoC).x.type
+ <class 'int'>
+ >>> fields(AutoC).foo.type
+ <class 'str'>
+ >>> fields(AutoC).bar.type
+ typing.Any
+ >>> AutoC()
+ AutoC(l=[], x=1, foo='every attrib needs a type if auto_attribs=True', bar=None)
+ >>> AutoC.cls_var
+ 5
+
+The generated ``__init__`` method will have an attribute called ``__annotations__`` that contains this type information.
+
+If your annotations contain strings (e.g. forward references),
+you can resolve these after all references have been defined by using :func:`attrs.resolve_types`.
+This will replace the *type* attribute in the respective fields.
+
+.. doctest::
+
+ >>> from attrs import fields, resolve_types
+
+ >>> @define
+ ... class A:
+ ... a: 'list[A]'
+ ... b: 'B'
+ ...
+ >>> @define
+ ... class B:
+ ... a: A
+ ...
+ >>> fields(A).a.type
+ 'list[A]'
+ >>> fields(A).b.type
+ 'B'
+ >>> resolve_types(A, globals(), locals())
+ <class 'A'>
+ >>> fields(A).a.type
+ list[A]
+ >>> fields(A).b.type
+ <class 'B'>
+
+.. note::
+
+ If you find yourself using string type annotations to handle forward references, wrap the entire type annotation in quotes instead of only the type you need a forward reference to (so ``'list[A]'`` instead of ``list['A']``).
+ This is a limitation of the Python typing system.
+
+.. warning::
+
+ ``attrs`` itself doesn't have any features that work on top of type metadata *yet*.
+ However it's useful for writing your own validators or serialization frameworks.
+
+
+Slots
+-----
+
+:term:`Slotted classes <slotted classes>` have several advantages on CPython.
+Defining ``__slots__`` by hand is tedious, in ``attrs`` it's just a matter of using `attrs.define` or passing ``slots=True`` to `attr.s`:
+
+.. doctest::
+
+ >>> import attr
+
+ >>> @attr.s(slots=True)
+ ... class Coordinates:
+ ... x: int
+ ... y: int
+
+
+Immutability
+------------
+
+Sometimes you have instances that shouldn't be changed after instantiation.
+Immutability is especially popular in functional programming and is generally a very good thing.
+If you'd like to enforce it, ``attrs`` will try to help:
+
+.. doctest::
+
+ >>> @frozen
+ ... class C:
+ ... x: int
+ >>> i = C(1)
+ >>> i.x = 2
+ Traceback (most recent call last):
+ ...
+ attr.exceptions.FrozenInstanceError: can't set attribute
+ >>> i.x
+ 1
+
+Please note that true immutability is impossible in Python but it will `get <how-frozen>` you 99% there.
+By themselves, immutable classes are useful for long-lived objects that should never change; like configurations for example.
+
+In order to use them in regular program flow, you'll need a way to easily create new instances with changed attributes.
+In Clojure that function is called `assoc <https://clojuredocs.org/clojure.core/assoc>`_ and ``attrs`` shamelessly imitates it: `attr.evolve`:
+
+.. doctest::
+
+ >>> from attrs import evolve
+
+ >>> @frozen
+ ... class C:
+ ... x: int
+ ... y: int
+ >>> i1 = C(1, 2)
+ >>> i1
+ C(x=1, y=2)
+ >>> i2 = evolve(i1, y=3)
+ >>> i2
+ C(x=1, y=3)
+ >>> i1 == i2
+ False
+
+
+Other Goodies
+-------------
+
+Sometimes you may want to create a class programmatically.
+``attrs`` won't let you down and gives you `attrs.make_class` :
+
+.. doctest::
+
+ >>> from attrs import fields, make_class
+ >>> @define
+ ... class C1:
+ ... x = field()
+ ... y = field()
+ >>> C2 = make_class("C2", ["x", "y"])
+ >>> fields(C1) == fields(C2)
+ True
+
+You can still have power over the attributes if you pass a dictionary of name: ``field`` mappings and can pass arguments to ``@attr.s``:
+
+.. doctest::
+
+ >>> from attrs import make_class
+
+ >>> C = make_class("C", {"x": field(default=42),
+ ... "y": field(default=Factory(list))},
+ ... repr=False)
+ >>> i = C()
+ >>> i # no repr added!
+ <__main__.C object at ...>
+ >>> i.x
+ 42
+ >>> i.y
+ []
+
+If you need to dynamically make a class with `attrs.make_class` and it needs to be a subclass of something else than ``object``, use the ``bases`` argument:
+
+.. doctest::
+
+ >>> from attrs import make_class
+
+ >>> class D:
+ ... def __eq__(self, other):
+ ... return True # arbitrary example
+ >>> C = make_class("C", {}, bases=(D,), cmp=False)
+ >>> isinstance(C(), D)
+ True
+
+Sometimes, you want to have your class's ``__init__`` method do more than just
+the initialization, validation, etc. that gets done for you automatically when
+using ``@define``.
+To do this, just define a ``__attrs_post_init__`` method in your class.
+It will get called at the end of the generated ``__init__`` method.
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int
+ ... y: int
+ ... z: int = field(init=False)
+ ...
+ ... def __attrs_post_init__(self):
+ ... self.z = self.x + self.y
+ >>> obj = C(x=1, y=2)
+ >>> obj
+ C(x=1, y=2, z=3)
+
+You can exclude single attributes from certain methods:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... user: str
+ ... password: str = field(repr=False)
+ >>> C("me", "s3kr3t")
+ C(user='me')
+
+Alternatively, to influence how the generated ``__repr__()`` method formats a specific attribute, specify a custom callable to be used instead of the ``repr()`` built-in function:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... user: str
+ ... password: str = field(repr=lambda value: '***')
+ >>> C("me", "s3kr3t")
+ C(user='me', password=***)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/extending.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/extending.rst
new file mode 100644
index 0000000000..faf71afd91
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/extending.rst
@@ -0,0 +1,313 @@
+Extending
+=========
+
+Each ``attrs``-decorated class has a ``__attrs_attrs__`` class attribute.
+It's a tuple of `attrs.Attribute` carrying metadata about each attribute.
+
+So it is fairly simple to build your own decorators on top of ``attrs``:
+
+.. doctest::
+
+ >>> from attr import define
+ >>> def print_attrs(cls):
+ ... print(cls.__attrs_attrs__)
+ ... return cls
+ >>> @print_attrs
+ ... @define
+ ... class C:
+ ... a: int
+ (Attribute(name='a', default=NOTHING, validator=None, repr=True, eq=True, eq_key=None, order=True, order_key=None, hash=None, init=True, metadata=mappingproxy({}), type=<class 'int'>, converter=None, kw_only=False, inherited=False, on_setattr=None),)
+
+
+.. warning::
+
+ The `attrs.define`/`attr.s` decorator **must** be applied first because it puts ``__attrs_attrs__`` in place!
+ That means that is has to come *after* your decorator because::
+
+ @a
+ @b
+ def f():
+ pass
+
+ is just `syntactic sugar <https://en.wikipedia.org/wiki/Syntactic_sugar>`_ for::
+
+ def original_f():
+ pass
+
+ f = a(b(original_f))
+
+
+Wrapping the Decorator
+----------------------
+
+A more elegant way can be to wrap ``attrs`` altogether and build a class `DSL <https://en.wikipedia.org/wiki/Domain-specific_language>`_ on top of it.
+
+An example for that is the package `environ-config <https://github.com/hynek/environ-config>`_ that uses ``attrs`` under the hood to define environment-based configurations declaratively without exposing ``attrs`` APIs at all.
+
+Another common use case is to overwrite ``attrs``'s defaults.
+
+Mypy
+^^^^
+
+Unfortunately, decorator wrapping currently `confuses <https://github.com/python/mypy/issues/5406>`_ mypy's ``attrs`` plugin.
+At the moment, the best workaround is to hold your nose, write a fake mypy plugin, and mutate a bunch of global variables::
+
+ from mypy.plugin import Plugin
+ from mypy.plugins.attrs import (
+ attr_attrib_makers,
+ attr_class_makers,
+ attr_dataclass_makers,
+ )
+
+ # These work just like `attr.dataclass`.
+ attr_dataclass_makers.add("my_module.method_looks_like_attr_dataclass")
+
+ # This works just like `attr.s`.
+ attr_class_makers.add("my_module.method_looks_like_attr_s")
+
+ # These are our `attr.ib` makers.
+ attr_attrib_makers.add("my_module.method_looks_like_attrib")
+
+ class MyPlugin(Plugin):
+ # Our plugin does nothing but it has to exist so this file gets loaded.
+ pass
+
+
+ def plugin(version):
+ return MyPlugin
+
+
+Then tell mypy about your plugin using your project's ``mypy.ini``:
+
+.. code:: ini
+
+ [mypy]
+ plugins=<path to file>
+
+
+.. warning::
+ Please note that it is currently *impossible* to let mypy know that you've changed defaults like *eq* or *order*.
+ You can only use this trick to tell mypy that a class is actually an ``attrs`` class.
+
+Pyright
+^^^^^^^
+
+Generic decorator wrapping is supported in `pyright <https://github.com/microsoft/pyright>`_ via their dataclass_transform_ specification.
+
+For a custom wrapping of the form::
+
+ def custom_define(f):
+ return attr.define(f)
+
+This is implemented via a ``__dataclass_transform__`` type decorator in the custom extension's ``.pyi`` of the form::
+
+ def __dataclass_transform__(
+ *,
+ eq_default: bool = True,
+ order_default: bool = False,
+ kw_only_default: bool = False,
+ field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
+ ) -> Callable[[_T], _T]: ...
+
+ @__dataclass_transform__(field_descriptors=(attr.attrib, attr.field))
+ def custom_define(f): ...
+
+.. warning::
+
+ ``dataclass_transform`` is supported **provisionally** as of ``pyright`` 1.1.135.
+
+ Both the ``pyright`` dataclass_transform_ specification and ``attrs`` implementation may change in future versions.
+
+
+Types
+-----
+
+``attrs`` offers two ways of attaching type information to attributes:
+
+- `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_ annotations on Python 3.6 and later,
+- and the *type* argument to `attr.ib`.
+
+This information is available to you:
+
+.. doctest::
+
+ >>> from attr import attrib, define, field, fields
+ >>> @define
+ ... class C:
+ ... x: int = field()
+ ... y = attrib(type=str)
+ >>> fields(C).x.type
+ <class 'int'>
+ >>> fields(C).y.type
+ <class 'str'>
+
+Currently, ``attrs`` doesn't do anything with this information but it's very useful if you'd like to write your own validators or serializers!
+
+
+.. _extending_metadata:
+
+Metadata
+--------
+
+If you're the author of a third-party library with ``attrs`` integration, you may want to take advantage of attribute metadata.
+
+Here are some tips for effective use of metadata:
+
+- Try making your metadata keys and values immutable.
+ This keeps the entire ``Attribute`` instances immutable too.
+
+- To avoid metadata key collisions, consider exposing your metadata keys from your modules.::
+
+ from mylib import MY_METADATA_KEY
+
+ @define
+ class C:
+ x = field(metadata={MY_METADATA_KEY: 1})
+
+ Metadata should be composable, so consider supporting this approach even if you decide implementing your metadata in one of the following ways.
+
+- Expose ``field`` wrappers for your specific metadata.
+ This is a more graceful approach if your users don't require metadata from other libraries.
+
+ .. doctest::
+
+ >>> from attr import fields, NOTHING
+ >>> MY_TYPE_METADATA = '__my_type_metadata'
+ >>>
+ >>> def typed(
+ ... cls, default=NOTHING, validator=None, repr=True,
+ ... eq=True, order=None, hash=None, init=True, metadata={},
+ ... converter=None
+ ... ):
+ ... metadata = dict() if not metadata else metadata
+ ... metadata[MY_TYPE_METADATA] = cls
+ ... return field(
+ ... default=default, validator=validator, repr=repr,
+ ... eq=eq, order=order, hash=hash, init=init,
+ ... metadata=metadata, converter=converter
+ ... )
+ >>>
+ >>> @define
+ ... class C:
+ ... x: int = typed(int, default=1, init=False)
+ >>> fields(C).x.metadata[MY_TYPE_METADATA]
+ <class 'int'>
+
+
+.. _transform-fields:
+
+Automatic Field Transformation and Modification
+-----------------------------------------------
+
+``attrs`` allows you to automatically modify or transform the class' fields while the class is being created.
+You do this by passing a *field_transformer* hook to `attr.define` (and its friends).
+Its main purpose is to automatically add converters to attributes based on their type to aid the development of API clients and other typed data loaders.
+
+This hook must have the following signature:
+
+.. function:: your_hook(cls: type, fields: list[attrs.Attribute]) -> list[attrs.Attribute]
+ :noindex:
+
+- *cls* is your class right *before* it is being converted into an attrs class.
+ This means it does not yet have the ``__attrs_attrs__`` attribute.
+
+- *fields* is a list of all `attrs.Attribute` instances that will later be set to ``__attrs_attrs__``.
+ You can modify these attributes any way you want:
+ You can add converters, change types, and even remove attributes completely or create new ones!
+
+For example, let's assume that you really don't like floats:
+
+.. doctest::
+
+ >>> def drop_floats(cls, fields):
+ ... return [f for f in fields if f.type not in {float, 'float'}]
+ ...
+ >>> @frozen(field_transformer=drop_floats)
+ ... class Data:
+ ... a: int
+ ... b: float
+ ... c: str
+ ...
+ >>> Data(42, "spam")
+ Data(a=42, c='spam')
+
+A more realistic example would be to automatically convert data that you, e.g., load from JSON:
+
+.. doctest::
+
+ >>> from datetime import datetime
+ >>>
+ >>> def auto_convert(cls, fields):
+ ... results = []
+ ... for field in fields:
+ ... if field.converter is not None:
+ ... results.append(field)
+ ... continue
+ ... if field.type in {datetime, 'datetime'}:
+ ... converter = (lambda d: datetime.fromisoformat(d) if isinstance(d, str) else d)
+ ... else:
+ ... converter = None
+ ... results.append(field.evolve(converter=converter))
+ ... return results
+ ...
+ >>> @frozen(field_transformer=auto_convert)
+ ... class Data:
+ ... a: int
+ ... b: str
+ ... c: datetime
+ ...
+ >>> from_json = {"a": 3, "b": "spam", "c": "2020-05-04T13:37:00"}
+ >>> Data(**from_json) # ****
+ Data(a=3, b='spam', c=datetime.datetime(2020, 5, 4, 13, 37))
+
+
+Customize Value Serialization in ``asdict()``
+---------------------------------------------
+
+``attrs`` allows you to serialize instances of ``attrs`` classes to dicts using the `attrs.asdict` function.
+However, the result can not always be serialized since most data types will remain as they are:
+
+.. doctest::
+
+ >>> import json
+ >>> import datetime
+ >>> from attrs import asdict
+ >>>
+ >>> @frozen
+ ... class Data:
+ ... dt: datetime.datetime
+ ...
+ >>> data = asdict(Data(datetime.datetime(2020, 5, 4, 13, 37)))
+ >>> data
+ {'dt': datetime.datetime(2020, 5, 4, 13, 37)}
+ >>> json.dumps(data)
+ Traceback (most recent call last):
+ ...
+ TypeError: Object of type datetime is not JSON serializable
+
+To help you with this, `attr.asdict` allows you to pass a *value_serializer* hook.
+It has the signature
+
+.. function:: your_hook(inst: type, field: attrs.Attribute, value: typing.Any) -> typing.Any
+ :noindex:
+
+.. doctest::
+
+ >>> from attr import asdict
+ >>> def serialize(inst, field, value):
+ ... if isinstance(value, datetime.datetime):
+ ... return value.isoformat()
+ ... return value
+ ...
+ >>> data = asdict(
+ ... Data(datetime.datetime(2020, 5, 4, 13, 37)),
+ ... value_serializer=serialize,
+ ... )
+ >>> data
+ {'dt': '2020-05-04T13:37:00'}
+ >>> json.dumps(data)
+ '{"dt": "2020-05-04T13:37:00"}'
+
+*****
+
+.. _dataclass_transform: https://github.com/microsoft/pyright/blob/master/specs/dataclass_transforms.md
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/glossary.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/glossary.rst
new file mode 100644
index 0000000000..5fd01f4fb1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/glossary.rst
@@ -0,0 +1,104 @@
+Glossary
+========
+
+.. glossary::
+
+ dunder methods
+ "Dunder" is a contraction of "double underscore".
+
+ It's methods like ``__init__`` or ``__eq__`` that are sometimes also called *magic methods* or it's said that they implement an *object protocol*.
+
+ In spoken form, you'd call ``__init__`` just "dunder init".
+
+ Its first documented use is a `mailing list posting <https://mail.python.org/pipermail/python-list/2002-September/155836.html>`_ by Mark Jackson from 2002.
+
+ dict classes
+ A regular class whose attributes are stored in the `object.__dict__` attribute of every single instance.
+ This is quite wasteful especially for objects with very few data attributes and the space consumption can become significant when creating large numbers of instances.
+
+ This is the type of class you get by default both with and without ``attrs`` (except with the next APIs `attr.define`, `attr.mutable`, and `attr.frozen`).
+
+ slotted classes
+ A class whose instances have no `object.__dict__` attribute and `define <https://docs.python.org/3/reference/datamodel.html#slots>`_ their attributes in a `object.__slots__` attribute instead.
+ In ``attrs``, they are created by passing ``slots=True`` to ``@attr.s`` (and are on by default in `attr.define`/`attr.mutable`/`attr.frozen`).
+
+
+ Their main advantage is that they use less memory on CPython [#pypy]_ and are slightly faster.
+
+ However they also come with several possibly surprising gotchas:
+
+ - Slotted classes don't allow for any other attribute to be set except for those defined in one of the class' hierarchies ``__slots__``:
+
+ .. doctest::
+
+ >>> from attr import define
+ >>> @define
+ ... class Coordinates:
+ ... x: int
+ ... y: int
+ ...
+ >>> c = Coordinates(x=1, y=2)
+ >>> c.z = 3
+ Traceback (most recent call last):
+ ...
+ AttributeError: 'Coordinates' object has no attribute 'z'
+
+ - Slotted classes can inherit from other classes just like non-slotted classes, but some of the benefits of slotted classes are lost if you do that.
+ If you must inherit from other classes, try to inherit only from other slotted classes.
+
+ - However, `it's not possible <https://docs.python.org/3/reference/datamodel.html#notes-on-using-slots>`_ to inherit from more than one class that has attributes in ``__slots__`` (you will get an ``TypeError: multiple bases have instance lay-out conflict``).
+
+ - It's not possible to monkeypatch methods on slotted classes.
+ This can feel limiting in test code, however the need to monkeypatch your own classes is usually a design smell.
+
+ If you really need to monkeypatch an instance in your tests, but don't want to give up on the advantages of slotted classes in production code, you can always subclass a slotted class as a dict class with no further changes and all the limitations go away:
+
+ .. doctest::
+
+ >>> import attr, unittest.mock
+ >>> @define
+ ... class Slotted:
+ ... x: int
+ ...
+ ... def method(self):
+ ... return self.x
+ >>> s = Slotted(42)
+ >>> s.method()
+ 42
+ >>> with unittest.mock.patch.object(s, "method", return_value=23):
+ ... pass
+ Traceback (most recent call last):
+ ...
+ AttributeError: 'Slotted' object attribute 'method' is read-only
+ >>> @define(slots=False)
+ ... class Dicted(Slotted):
+ ... pass
+ >>> d = Dicted(42)
+ >>> d.method()
+ 42
+ >>> with unittest.mock.patch.object(d, "method", return_value=23):
+ ... assert 23 == d.method()
+
+ - Slotted classes must implement :meth:`__getstate__ <object.__getstate__>` and :meth:`__setstate__ <object.__setstate__>` to be serializable with `pickle` protocol 0 and 1.
+ Therefore, ``attrs`` creates these methods automatically for ``slots=True`` classes (Python 2 uses protocol 0 by default).
+
+ .. note::
+
+ If the ``@attr.s(slots=True)`` decorated class already implements the :meth:`__getstate__ <object.__getstate__>` and :meth:`__setstate__ <object.__setstate__>` methods, they will be *overwritten* by ``attrs`` autogenerated implementation by default.
+
+ This can be avoided by setting ``@attr.s(getstate_setstate=False)`` or by setting ``@attr.s(auto_detect=True)``.
+
+ Also, `think twice <https://www.youtube.com/watch?v=7KnfGDajDQw>`_ before using `pickle`.
+
+ - Slotted classes are weak-referenceable by default.
+ This can be disabled in CPython by passing ``weakref_slot=False`` to ``@attr.s`` [#pypyweakref]_.
+
+ - Since it's currently impossible to make a class slotted after it's been created, ``attrs`` has to replace your class with a new one.
+ While it tries to do that as graciously as possible, certain metaclass features like `object.__init_subclass__` do not work with slotted classes.
+
+ - The `class.__subclasses__` attribute needs a garbage collection run (which can be manually triggered using `gc.collect`), for the original class to be removed.
+ See issue `#407 <https://github.com/python-attrs/attrs/issues/407>`_ for more details.
+
+
+.. [#pypy] On PyPy, there is no memory advantage in using slotted classes.
+.. [#pypyweakref] On PyPy, slotted classes are naturally weak-referenceable so ``weakref_slot=False`` has no effect.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/hashing.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/hashing.rst
new file mode 100644
index 0000000000..30888f97bb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/hashing.rst
@@ -0,0 +1,86 @@
+Hashing
+=======
+
+Hash Method Generation
+----------------------
+
+.. warning::
+
+ The overarching theme is to never set the ``@attr.s(hash=X)`` parameter yourself.
+ Leave it at ``None`` which means that ``attrs`` will do the right thing for you, depending on the other parameters:
+
+ - If you want to make objects hashable by value: use ``@attr.s(frozen=True)``.
+ - If you want hashing and equality by object identity: use ``@attr.s(eq=False)``
+
+ Setting ``hash`` yourself can have unexpected consequences so we recommend to tinker with it only if you know exactly what you're doing.
+
+Under certain circumstances, it's necessary for objects to be *hashable*.
+For example if you want to put them into a `set` or if you want to use them as keys in a `dict`.
+
+The *hash* of an object is an integer that represents the contents of an object.
+It can be obtained by calling `hash` on an object and is implemented by writing a ``__hash__`` method for your class.
+
+``attrs`` will happily write a ``__hash__`` method for you [#fn1]_, however it will *not* do so by default.
+Because according to the definition_ from the official Python docs, the returned hash has to fulfill certain constraints:
+
+#. Two objects that are equal, **must** have the same hash.
+ This means that if ``x == y``, it *must* follow that ``hash(x) == hash(y)``.
+
+ By default, Python classes are compared *and* hashed by their `id`.
+ That means that every instance of a class has a different hash, no matter what attributes it carries.
+
+ It follows that the moment you (or ``attrs``) change the way equality is handled by implementing ``__eq__`` which is based on attribute values, this constraint is broken.
+ For that reason Python 3 will make a class that has customized equality unhashable.
+ Python 2 on the other hand will happily let you shoot your foot off.
+ Unfortunately ``attrs`` currently mimics Python 2's behavior for backward compatibility reasons if you set ``hash=False``.
+
+ The *correct way* to achieve hashing by id is to set ``@attr.s(eq=False)``.
+ Setting ``@attr.s(hash=False)`` (which implies ``eq=True``) is almost certainly a *bug*.
+
+ .. warning::
+
+ Be careful when subclassing!
+ Setting ``eq=False`` on a class whose base class has a non-default ``__hash__`` method will *not* make ``attrs`` remove that ``__hash__`` for you.
+
+ It is part of ``attrs``'s philosophy to only *add* to classes so you have the freedom to customize your classes as you wish.
+ So if you want to *get rid* of methods, you'll have to do it by hand.
+
+ The easiest way to reset ``__hash__`` on a class is adding ``__hash__ = object.__hash__`` in the class body.
+
+#. If two object are not equal, their hash **should** be different.
+
+ While this isn't a requirement from a standpoint of correctness, sets and dicts become less effective if there are a lot of identical hashes.
+ The worst case is when all objects have the same hash which turns a set into a list.
+
+#. The hash of an object **must not** change.
+
+ If you create a class with ``@attr.s(frozen=True)`` this is fullfilled by definition, therefore ``attrs`` will write a ``__hash__`` function for you automatically.
+ You can also force it to write one with ``hash=True`` but then it's *your* responsibility to make sure that the object is not mutated.
+
+ This point is the reason why mutable structures like lists, dictionaries, or sets aren't hashable while immutable ones like tuples or frozensets are:
+ point 1 and 2 require that the hash changes with the contents but point 3 forbids it.
+
+For a more thorough explanation of this topic, please refer to this blog post: `Python Hashes and Equality`_.
+
+
+Hashing and Mutability
+----------------------
+
+Changing any field involved in hash code computation after the first call to ``__hash__`` (typically this would be after its insertion into a hash-based collection) can result in silent bugs.
+Therefore, it is strongly recommended that hashable classes be ``frozen``.
+Beware, however, that this is not a complete guarantee of safety:
+if a field points to an object and that object is mutated, the hash code may change, but ``frozen`` will not protect you.
+
+
+Hash Code Caching
+-----------------
+
+Some objects have hash codes which are expensive to compute.
+If such objects are to be stored in hash-based collections, it can be useful to compute the hash codes only once and then store the result on the object to make future hash code requests fast.
+To enable caching of hash codes, pass ``cache_hash=True`` to ``@attrs``.
+This may only be done if ``attrs`` is already generating a hash function for the object.
+
+.. [#fn1] The hash is computed by hashing a tuple that consists of an unique id for the class plus all attribute values.
+
+.. _definition: https://docs.python.org/3/glossary.html#term-hashable
+.. _`Python Hashes and Equality`: https://hynek.me/articles/hashes-and-equality/
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/how-does-it-work.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/how-does-it-work.rst
new file mode 100644
index 0000000000..f899740542
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/how-does-it-work.rst
@@ -0,0 +1,109 @@
+.. _how:
+
+How Does It Work?
+=================
+
+
+Boilerplate
+-----------
+
+``attrs`` certainly isn't the first library that aims to simplify class definition in Python.
+But its **declarative** approach combined with **no runtime overhead** lets it stand out.
+
+Once you apply the ``@attrs.define`` (or ``@attr.s``) decorator to a class, ``attrs`` searches the class object for instances of ``attr.ib``\ s.
+Internally they're a representation of the data passed into ``attr.ib`` along with a counter to preserve the order of the attributes.
+Alternatively, it's possible to define them using :doc:`types`.
+
+In order to ensure that subclassing works as you'd expect it to work, ``attrs`` also walks the class hierarchy and collects the attributes of all base classes.
+Please note that ``attrs`` does *not* call ``super()`` *ever*.
+It will write :term:`dunder methods` to work on *all* of those attributes which also has performance benefits due to fewer function calls.
+
+Once ``attrs`` knows what attributes it has to work on, it writes the requested :term:`dunder methods` and -- depending on whether you wish to have a :term:`dict <dict classes>` or :term:`slotted <slotted classes>` class -- creates a new class for you (``slots=True``) or attaches them to the original class (``slots=False``).
+While creating new classes is more elegant, we've run into several edge cases surrounding metaclasses that make it impossible to go this route unconditionally.
+
+To be very clear: if you define a class with a single attribute without a default value, the generated ``__init__`` will look *exactly* how you'd expect:
+
+.. doctest::
+
+ >>> import inspect
+ >>> from attr import define
+ >>> @define
+ ... class C:
+ ... x: int
+ >>> print(inspect.getsource(C.__init__))
+ def __init__(self, x):
+ self.x = x
+ <BLANKLINE>
+
+No magic, no meta programming, no expensive introspection at runtime.
+
+****
+
+Everything until this point happens exactly *once* when the class is defined.
+As soon as a class is done, it's done.
+And it's just a regular Python class like any other, except for a single ``__attrs_attrs__`` attribute that ``attrs`` uses internally.
+Much of the information is accessible via `attrs.fields` and other functions which can be used for introspection or for writing your own tools and decorators on top of ``attrs`` (like `attrs.asdict`).
+
+And once you start instantiating your classes, ``attrs`` is out of your way completely.
+
+This **static** approach was very much a design goal of ``attrs`` and what I strongly believe makes it distinct.
+
+
+.. _how-frozen:
+
+Immutability
+------------
+
+In order to give you immutability, ``attrs`` will attach a ``__setattr__`` method to your class that raises an `attrs.exceptions.FrozenInstanceError` whenever anyone tries to set an attribute.
+
+The same is true if you choose to freeze individual attributes using the `attrs.setters.frozen` *on_setattr* hook -- except that the exception becomes `attrs.exceptions.FrozenAttributeError`.
+
+Both errors subclass `attrs.exceptions.FrozenError`.
+
+-----
+
+Depending on whether a class is a dict class or a slotted class, ``attrs`` uses a different technique to circumvent that limitation in the ``__init__`` method.
+
+Once constructed, frozen instances don't differ in any way from regular ones except that you cannot change its attributes.
+
+
+Dict Classes
+++++++++++++
+
+Dict classes -- i.e. regular classes -- simply assign the value directly into the class' eponymous ``__dict__`` (and there's nothing we can do to stop the user to do the same).
+
+The performance impact is negligible.
+
+
+Slotted Classes
++++++++++++++++
+
+Slotted classes are more complicated.
+Here it uses (an aggressively cached) :meth:`object.__setattr__` to set your attributes.
+This is (still) slower than a plain assignment:
+
+.. code-block:: none
+
+ $ pyperf timeit --rigorous \
+ -s "import attr; C = attr.make_class('C', ['x', 'y', 'z'], slots=True)" \
+ "C(1, 2, 3)"
+ ........................................
+ Median +- std dev: 378 ns +- 12 ns
+
+ $ pyperf timeit --rigorous \
+ -s "import attr; C = attr.make_class('C', ['x', 'y', 'z'], slots=True, frozen=True)" \
+ "C(1, 2, 3)"
+ ........................................
+ Median +- std dev: 676 ns +- 16 ns
+
+So on a laptop computer the difference is about 300 nanoseconds (1 second is 1,000,000,000 nanoseconds).
+It's certainly something you'll feel in a hot loop but shouldn't matter in normal code.
+Pick what's more important to you.
+
+
+Summary
++++++++
+
+You should avoid instantiating lots of frozen slotted classes (i.e. ``@frozen``) in performance-critical code.
+
+Frozen dict classes have barely a performance impact, unfrozen slotted classes are even *faster* than unfrozen dict classes (i.e. regular classes).
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/index.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/index.rst
new file mode 100644
index 0000000000..ff65a6738c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/index.rst
@@ -0,0 +1,100 @@
+.. module:: attr
+.. module:: attrs
+
+======================================
+``attrs``: Classes Without Boilerplate
+======================================
+
+Release v\ |release| (`What's new? <changelog>`).
+
+.. include:: ../README.rst
+ :start-after: teaser-begin
+ :end-before: teaser-end
+
+
+Getting Started
+===============
+
+``attrs`` is a Python-only package `hosted on PyPI <https://pypi.org/project/attrs/>`_.
+The recommended installation method is `pip <https://pip.pypa.io/en/stable/>`_-installing into a `virtualenv <https://hynek.me/articles/virtualenv-lives/>`_:
+
+.. code-block:: console
+
+ $ python -m pip install attrs
+
+The next three steps should bring you up and running in no time:
+
+- `overview` will show you a simple example of ``attrs`` in action and introduce you to its philosophy.
+ Afterwards, you can start writing your own classes and understand what drives ``attrs``'s design.
+- `examples` will give you a comprehensive tour of ``attrs``'s features.
+ After reading, you will know about our advanced features and how to use them.
+- If you're confused by all the ``attr.s``, ``attr.ib``, ``attrs``, ``attrib``, ``define``, ``frozen``, and ``field``, head over to `names` for a very short explanation, and optionally a quick history lesson.
+- Finally `why` gives you a rundown of potential alternatives and why we think ``attrs`` is superior.
+ Yes, we've heard about ``namedtuple``\ s and Data Classes!
+- If at any point you get confused by some terminology, please check out our `glossary`.
+
+
+If you need any help while getting started, feel free to use the ``python-attrs`` tag on `Stack Overflow <https://stackoverflow.com/questions/tagged/python-attrs>`_ and someone will surely help you out!
+
+
+Day-to-Day Usage
+================
+
+- `types` help you to write *correct* and *self-documenting* code.
+ ``attrs`` has first class support for them, yet keeps them optional if you’re not convinced!
+- Instance initialization is one of ``attrs`` key feature areas.
+ Our goal is to relieve you from writing as much code as possible.
+ `init` gives you an overview what ``attrs`` has to offer and explains some related philosophies we believe in.
+- Comparing and ordering objects is a common task.
+ `comparison` shows you how ``attrs`` helps you with that and how you can customize it.
+- If you want to put objects into sets or use them as keys in dictionaries, they have to be hashable.
+ The simplest way to do that is to use frozen classes, but the topic is more complex than it seems and `hashing` will give you a primer on what to look out for.
+- Once you're comfortable with the concepts, our `api` contains all information you need to use ``attrs`` to its fullest.
+- ``attrs`` is built for extension from the ground up.
+ `extending` will show you the affordances it offers and how to make it a building block of your own projects.
+
+
+.. include:: ../README.rst
+ :start-after: -getting-help-
+ :end-before: -project-information-
+
+
+----
+
+
+Full Table of Contents
+======================
+
+.. toctree::
+ :maxdepth: 2
+
+ overview
+ why
+ examples
+ types
+ init
+ comparison
+ hashing
+ api
+ extending
+ how-does-it-work
+ names
+ glossary
+
+
+.. include:: ../README.rst
+ :start-after: -project-information-
+
+.. toctree::
+ :maxdepth: 1
+
+ license
+ python-2
+ changelog
+
+
+Indices and tables
+==================
+
+* `genindex`
+* `search`
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/init.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/init.rst
new file mode 100644
index 0000000000..fb276ded8a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/init.rst
@@ -0,0 +1,489 @@
+Initialization
+==============
+
+In Python, instance initialization happens in the ``__init__`` method.
+Generally speaking, you should keep as little logic as possible in it, and you should think about what the class needs and not how it is going to be instantiated.
+
+Passing complex objects into ``__init__`` and then using them to derive data for the class unnecessarily couples your new class with the old class which makes it harder to test and also will cause problems later.
+
+So assuming you use an ORM and want to extract 2D points from a row object, do not write code like this::
+
+ class Point(object):
+ def __init__(self, database_row):
+ self.x = database_row.x
+ self.y = database_row.y
+
+ pt = Point(row)
+
+Instead, write a `classmethod` that will extract it for you::
+
+ @define
+ class Point:
+ x: float
+ y: float
+
+ @classmethod
+ def from_row(cls, row):
+ return cls(row.x, row.y)
+
+ pt = Point.from_row(row)
+
+Now you can instantiate ``Point``\ s without creating fake row objects in your tests and you can have as many smart creation helpers as you want, in case more data sources appear.
+
+For similar reasons, we strongly discourage from patterns like::
+
+ pt = Point(**row.attributes)
+
+which couples your classes to the database data model.
+Try to design your classes in a way that is clean and convenient to use -- not based on your database format.
+The database format can change anytime and you're stuck with a bad class design that is hard to change.
+Embrace functions and classmethods as a filter between reality and what's best for you to work with.
+
+If you look for object serialization, there's a bunch of projects listed on our ``attrs`` extensions `Wiki page`_.
+Some of them even support nested schemas.
+
+
+Private Attributes
+------------------
+
+One thing people tend to find confusing is the treatment of private attributes that start with an underscore.
+``attrs`` follows the doctrine that `there is no such thing as a private argument`_ and strips the underscores from the name when writing the ``__init__`` method signature:
+
+.. doctest::
+
+ >>> import inspect, attr, attrs
+ >>> from attr import define
+ >>> @define
+ ... class C:
+ ... _x: int
+ >>> inspect.signature(C.__init__)
+ <Signature (self, x: int) -> None>
+
+There really isn't a right or wrong, it's a matter of taste.
+But it's important to be aware of it because it can lead to surprising syntax errors:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... _1: int
+ Traceback (most recent call last):
+ ...
+ SyntaxError: invalid syntax
+
+In this case a valid attribute name ``_1`` got transformed into an invalid argument name ``1``.
+
+
+Defaults
+--------
+
+Sometimes you don't want to pass all attribute values to a class.
+And sometimes, certain attributes aren't even intended to be passed but you want to allow for customization anyways for easier testing.
+
+This is when default values come into play:
+
+.. doctest::
+
+ >>> from attr import define, field, Factory
+
+ >>> @define
+ ... class C:
+ ... a: int = 42
+ ... b: list = field(factory=list)
+ ... c: list = Factory(list) # syntactic sugar for above
+ ... d: dict = field()
+ ... @d.default
+ ... def _any_name_except_a_name_of_an_attribute(self):
+ ... return {}
+ >>> C()
+ C(a=42, b=[], c=[], d={})
+
+It's important that the decorated method -- or any other method or property! -- doesn't have the same name as the attribute, otherwise it would overwrite the attribute definition.
+
+Please note that as with function and method signatures, ``default=[]`` will *not* do what you may think it might do:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x = []
+ >>> i = C()
+ >>> k = C()
+ >>> i.x.append(42)
+ >>> k.x
+ [42]
+
+
+This is why ``attrs`` comes with factory options.
+
+.. warning::
+
+ Please note that the decorator based defaults have one gotcha:
+ they are executed when the attribute is set, that means depending on the order of attributes, the ``self`` object may not be fully initialized when they're called.
+
+ Therefore you should use ``self`` as little as possible.
+
+ Even the smartest of us can `get confused`_ by what happens if you pass partially initialized objects around.
+
+
+.. _validators:
+
+Validators
+----------
+
+Another thing that definitely *does* belong in ``__init__`` is checking the resulting instance for invariants.
+This is why ``attrs`` has the concept of validators.
+
+
+Decorator
+~~~~~~~~~
+
+The most straightforward way is using the attribute's ``validator`` method as a decorator.
+
+The method has to accept three arguments:
+
+#. the *instance* that's being validated (aka ``self``),
+#. the *attribute* that it's validating, and finally
+#. the *value* that is passed for it.
+
+If the value does not pass the validator's standards, it just raises an appropriate exception.
+
+ >>> @define
+ ... class C:
+ ... x: int = field()
+ ... @x.validator
+ ... def _check_x(self, attribute, value):
+ ... if value > 42:
+ ... raise ValueError("x must be smaller or equal to 42")
+ >>> C(42)
+ C(x=42)
+ >>> C(43)
+ Traceback (most recent call last):
+ ...
+ ValueError: x must be smaller or equal to 42
+
+Again, it's important that the decorated method doesn't have the same name as the attribute and that the `attrs.field()` helper is used.
+
+
+Callables
+~~~~~~~~~
+
+If you want to re-use your validators, you should have a look at the ``validator`` argument to `attrs.field`.
+
+It takes either a callable or a list of callables (usually functions) and treats them as validators that receive the same arguments as with the decorator approach.
+
+Since the validators run *after* the instance is initialized, you can refer to other attributes while validating:
+
+.. doctest::
+
+ >>> def x_smaller_than_y(instance, attribute, value):
+ ... if value >= instance.y:
+ ... raise ValueError("'x' has to be smaller than 'y'!")
+ >>> @define
+ ... class C:
+ ... x = field(validator=[attrs.validators.instance_of(int),
+ ... x_smaller_than_y])
+ ... y = field()
+ >>> C(x=3, y=4)
+ C(x=3, y=4)
+ >>> C(x=4, y=3)
+ Traceback (most recent call last):
+ ...
+ ValueError: 'x' has to be smaller than 'y'!
+
+This example also shows of some syntactic sugar for using the `attrs.validators.and_` validator: if you pass a list, all validators have to pass.
+
+``attrs`` won't intercept your changes to those attributes but you can always call `attrs.validate` on any instance to verify that it's still valid:
+When using `attrs.define` or `attrs.frozen`, ``attrs`` will run the validators even when setting the attribute.
+
+.. doctest::
+
+ >>> i = C(4, 5)
+ >>> i.x = 5
+ Traceback (most recent call last):
+ ...
+ ValueError: 'x' has to be smaller than 'y'!
+
+``attrs`` ships with a bunch of validators, make sure to `check them out <api_validators>` before writing your own:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x = field(validator=attrs.validators.instance_of(int))
+ >>> C(42)
+ C(x=42)
+ >>> C("42")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <type 'int'> (got '42' that is a <type 'str'>).", Attribute(name='x', default=NOTHING, factory=NOTHING, validator=<instance_of validator for type <type 'int'>>, type=None), <type 'int'>, '42')
+
+Of course you can mix and match the two approaches at your convenience.
+If you define validators both ways for an attribute, they are both ran:
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x = field(validator=attrs.validators.instance_of(int))
+ ... @x.validator
+ ... def fits_byte(self, attribute, value):
+ ... if not 0 <= value < 256:
+ ... raise ValueError("value out of bounds")
+ >>> C(128)
+ C(x=128)
+ >>> C("128")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '128' that is a <class 'str'>).", Attribute(name='x', default=NOTHING, validator=[<instance_of validator for type <class 'int'>>, <function fits_byte at 0x10fd7a0d0>], repr=True, cmp=True, hash=True, init=True, metadata=mappingproxy({}), type=None, converter=one), <class 'int'>, '128')
+ >>> C(256)
+ Traceback (most recent call last):
+ ...
+ ValueError: value out of bounds
+
+And finally you can disable validators globally:
+
+ >>> attrs.validators.set_disabled(True)
+ >>> C("128")
+ C(x='128')
+ >>> attrs.validators.set_disabled(False)
+ >>> C("128")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '128' that is a <class 'str'>).", Attribute(name='x', default=NOTHING, validator=[<instance_of validator for type <class 'int'>>, <function fits_byte at 0x10fd7a0d0>], repr=True, cmp=True, hash=True, init=True, metadata=mappingproxy({}), type=None, converter=None), <class 'int'>, '128')
+
+You can achieve the same by using the context manager:
+
+ >>> with attrs.validators.disabled():
+ ... C("128")
+ C(x='128')
+ >>> C("128")
+ Traceback (most recent call last):
+ ...
+ TypeError: ("'x' must be <class 'int'> (got '128' that is a <class 'str'>).", Attribute(name='x', default=NOTHING, validator=[<instance_of validator for type <class 'int'>>, <function fits_byte at 0x10fd7a0d0>], repr=True, cmp=True, hash=True, init=True, metadata=mappingproxy({}), type=None, converter=None), <class 'int'>, '128')
+
+
+.. _converters:
+
+Converters
+----------
+
+Finally, sometimes you may want to normalize the values coming in.
+For that ``attrs`` comes with converters.
+
+Attributes can have a ``converter`` function specified, which will be called with the attribute's passed-in value to get a new value to use.
+This can be useful for doing type-conversions on values that you don't want to force your callers to do.
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x = field(converter=int)
+ >>> o = C("1")
+ >>> o.x
+ 1
+
+Converters are run *before* validators, so you can use validators to check the final form of the value.
+
+.. doctest::
+
+ >>> def validate_x(instance, attribute, value):
+ ... if value < 0:
+ ... raise ValueError("x must be at least 0.")
+ >>> @define
+ ... class C:
+ ... x = field(converter=int, validator=validate_x)
+ >>> o = C("0")
+ >>> o.x
+ 0
+ >>> C("-1")
+ Traceback (most recent call last):
+ ...
+ ValueError: x must be at least 0.
+
+
+Arguably, you can abuse converters as one-argument validators:
+
+.. doctest::
+
+ >>> C("x")
+ Traceback (most recent call last):
+ ...
+ ValueError: invalid literal for int() with base 10: 'x'
+
+
+If a converter's first argument has a type annotation, that type will appear in the signature for ``__init__``.
+A converter will override an explicit type annotation or ``type`` argument.
+
+.. doctest::
+
+ >>> def str2int(x: str) -> int:
+ ... return int(x)
+ >>> @define
+ ... class C:
+ ... x = field(converter=str2int)
+ >>> C.__init__.__annotations__
+ {'return': None, 'x': <class 'str'>}
+
+
+Hooking Yourself Into Initialization
+------------------------------------
+
+Generally speaking, the moment you think that you need finer control over how your class is instantiated than what ``attrs`` offers, it's usually best to use a classmethod factory or to apply the `builder pattern <https://en.wikipedia.org/wiki/Builder_pattern>`_.
+
+However, sometimes you need to do that one quick thing before or after your class is initialized.
+And for that ``attrs`` offers three means:
+
+- ``__attrs_pre_init__`` is automatically detected and run *before* ``attrs`` starts initializing.
+ This is useful if you need to inject a call to ``super().__init__()``.
+- ``__attrs_post_init__`` is automatically detected and run *after* ``attrs`` is done initializing your instance.
+ This is useful if you want to derive some attribute from others or perform some kind of validation over the whole instance.
+- ``__attrs_init__`` is written and attached to your class *instead* of ``__init__``, if ``attrs`` is told to not write one (i.e. ``init=False`` or a combination of ``auto_detect=True`` and a custom ``__init__``).
+ This is useful if you want full control over the initialization process, but don't want to set the attributes by hand.
+
+
+Pre Init
+~~~~~~~~
+
+The sole reason for the existance of ``__attrs_pre_init__`` is to give users the chance to call ``super().__init__()``, because some subclassing-based APIs require that.
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int
+ ... def __attrs_pre_init__(self):
+ ... super().__init__()
+ >>> C(42)
+ C(x=42)
+
+If you need more control, use the custom init approach described next.
+
+
+Custom Init
+~~~~~~~~~~~
+
+If you tell ``attrs`` to not write an ``__init__``, it will write an ``__attrs_init__`` instead, with the same code that it would have used for ``__init__``.
+You have full control over the initialization, but also have to type out the types of your arguments etc.
+Here's an example of a manual default value:
+
+.. doctest::
+
+ >>> from typing import Optional
+
+ >>> @define
+ ... class C:
+ ... x: int
+ ...
+ ... def __init__(self, x: int = 42):
+ ... self.__attrs_init__(x)
+ >>> C()
+ C(x=42)
+
+
+Post Init
+~~~~~~~~~
+
+.. doctest::
+
+ >>> @define
+ ... class C:
+ ... x: int
+ ... y: int = field(init=False)
+ ... def __attrs_post_init__(self):
+ ... self.y = self.x + 1
+ >>> C(1)
+ C(x=1, y=2)
+
+Please note that you can't directly set attributes on frozen classes:
+
+.. doctest::
+
+ >>> @frozen
+ ... class FrozenBroken:
+ ... x: int
+ ... y: int = field(init=False)
+ ... def __attrs_post_init__(self):
+ ... self.y = self.x + 1
+ >>> FrozenBroken(1)
+ Traceback (most recent call last):
+ ...
+ attrs.exceptions.FrozenInstanceError: can't set attribute
+
+If you need to set attributes on a frozen class, you'll have to resort to the `same trick <how-frozen>` as ``attrs`` and use :meth:`object.__setattr__`:
+
+.. doctest::
+
+ >>> @define
+ ... class Frozen:
+ ... x: int
+ ... y: int = field(init=False)
+ ... def __attrs_post_init__(self):
+ ... object.__setattr__(self, "y", self.x + 1)
+ >>> Frozen(1)
+ Frozen(x=1, y=2)
+
+Note that you *must not* access the hash code of the object in ``__attrs_post_init__`` if ``cache_hash=True``.
+
+
+Order of Execution
+------------------
+
+If present, the hooks are executed in the following order:
+
+1. ``__attrs_pre_init__`` (if present on *current* class)
+2. For each attribute, in the order it was declared:
+
+ a. default factory
+ b. converter
+
+3. *all* validators
+4. ``__attrs_post_init__`` (if present on *current* class)
+
+Notably this means, that you can access all attributes from within your validators, but your converters have to deal with invalid values and have to return a valid value.
+
+
+Derived Attributes
+------------------
+
+One of the most common ``attrs`` questions on *Stack Overflow* is how to have attributes that depend on other attributes.
+For example if you have an API token and want to instantiate a web client that uses it for authentication.
+Based on the previous sections, there's two approaches.
+
+The simpler one is using ``__attrs_post_init__``::
+
+ @define
+ class APIClient:
+ token: str
+ client: WebClient = field(init=False)
+
+ def __attrs_post_init__(self):
+ self.client = WebClient(self.token)
+
+The second one is using a decorator-based default::
+
+ @define
+ class APIClient:
+ token: str
+ client: WebClient = field() # needed! attr.ib works too
+
+ @client.default
+ def _client_factory(self):
+ return WebClient(self.token)
+
+That said, and as pointed out in the beginning of the chapter, a better approach would be to have a factory class method::
+
+ @define
+ class APIClient:
+ client: WebClient
+
+ @classmethod
+ def from_token(cls, token: str) -> SomeClass:
+ return cls(client=WebClient(token))
+
+This makes the class more testable.
+
+
+.. _`Wiki page`: https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs
+.. _`get confused`: https://github.com/python-attrs/attrs/issues/289
+.. _`there is no such thing as a private argument`: https://github.com/hynek/characteristic/issues/6
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/license.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/license.rst
new file mode 100644
index 0000000000..a341a31eb9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/license.rst
@@ -0,0 +1,8 @@
+===================
+License and Credits
+===================
+
+``attrs`` is licensed under the `MIT <https://choosealicense.com/licenses/mit/>`_ license.
+The full license text can be also found in the `source code repository <https://github.com/python-attrs/attrs/blob/main/LICENSE>`_.
+
+.. include:: ../AUTHORS.rst
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/names.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/names.rst
new file mode 100644
index 0000000000..0fe953e6a5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/names.rst
@@ -0,0 +1,122 @@
+On The Core API Names
+=====================
+
+You may be surprised seeing ``attrs`` classes being created using `attrs.define` and with type annotated fields, instead of `attr.s` and `attr.ib()`.
+
+Or, you wonder why the web and talks are full of this weird `attr.s` and `attr.ib` -- including people having strong opinions about it and using ``attr.attrs`` and ``attr.attrib`` instead.
+
+And what even is ``attr.dataclass`` that's not documented but commonly used!?
+
+
+TL;DR
+-----
+
+We recommend our modern APIs for new code:
+
+- `attrs.define()` to define a new class,
+- `attrs.mutable()` is an alias for `attrs.define()`,
+- `attrs.frozen()` is an alias for ``define(frozen=True)``
+- and `attrs.field()` to define an attribute.
+
+They have been added in ``attrs`` 20.1.0, they are expressive, and they have modern defaults like slots and type annotation awareness switched on by default.
+They are only available in Python 3.6 and later.
+Sometimes they're referred to as *next-generation* or *NG* APIs.
+As of ``attrs`` 21.3.0 you can also import them from the ``attrs`` package namespace.
+
+The traditional APIs `attr.s` / `attr.ib`, their serious business aliases ``attr.attrs`` / ``attr.attrib``, and the never-documented, but popular ``attr.dataclass`` easter egg will stay **forever**.
+
+``attrs`` will **never** force you to use type annotations.
+
+
+A Short History Lesson
+----------------------
+
+At this point, ``attrs`` is an old project.
+It had its first release in April 2015 -- back when most Python code was on Python 2.7 and Python 3.4 was the first Python 3 release that showed promise.
+``attrs`` was always Python 3-first, but `type annotations <https://www.python.org/dev/peps/pep-0484/>`_ came only into Python 3.5 that was released in September 2015 and were largely ignored until years later.
+
+At this time, if you didn't want to implement all the :term:`dunder methods`, the most common way to create a class with some attributes on it was to subclass a `collections.namedtuple`, or one of the many hacks that allowed you to access dictionary keys using attribute lookup.
+
+But ``attrs`` history goes even a bit further back, to the now-forgotten `characteristic <https://github.com/hynek/characteristic>`_ that came out in May 2014 and already used a class decorator, but was overall too unergonomic.
+
+In the wake of all of that, `glyph <https://twitter.com/glyph>`_ and `Hynek <https://twitter.com/hynek>`_ came together on IRC and brainstormed how to take the good ideas of ``characteristic``, but make them easier to use and read.
+At this point the plan was not to make ``attrs`` what it is now -- a flexible class building kit.
+All we wanted was an ergonomic little library to succinctly define classes with attributes.
+
+Under the impression of of the unwieldy ``characteristic`` name, we went to the other side and decided to make the package name part of the API, and keep the API functions very short.
+This led to the infamous `attr.s` and `attr.ib` which some found confusing and pronounced it as "attr dot s" or used a singular ``@s`` as the decorator.
+But it was really just a way to say ``attrs`` and ``attrib``\ [#attr]_.
+
+Some people hated this cutey API from day one, which is why we added aliases for them that we called *serious business*: ``@attr.attrs`` and ``attr.attrib()``.
+Fans of them usually imported the names and didn't use the package name in the first place.
+Unfortunately, the ``attr`` package name started creaking the moment we added ``attr.Factory``, since it couldn’t be morphed into something meaningful in any way.
+A problem that grew worse over time, as more APIs and even modules were added.
+
+But overall, ``attrs`` in this shape was a **huge** success -- especially after glyph's blog post `The One Python Library Everyone Needs <https://glyph.twistedmatrix.com/2016/08/attrs.html>`_ in August 2016 and `pytest <https://docs.pytest.org/>`_ adopting it.
+
+Being able to just write::
+
+ @attr.s
+ class Point(object):
+ x = attr.ib()
+ y = attr.ib()
+
+was a big step for those who wanted to write small, focused classes.
+
+
+Dataclasses Enter The Arena
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A big change happened in May 2017 when Hynek sat down with `Guido van Rossum <https://en.wikipedia.org/wiki/Guido_van_Rossum>`_ and `Eric V. Smith <https://github.com/ericvsmith>`_ at PyCon US 2017.
+
+Type annotations for class attributes have `just landed <https://www.python.org/dev/peps/pep-0526/>`_ in Python 3.6 and Guido felt like it would be a good mechanic to introduce something similar to ``attrs`` to the Python standard library.
+The result, of course, was `PEP 557 <https://www.python.org/dev/peps/pep-0557/>`_\ [#stdlib]_ which eventually became the `dataclasses` module in Python 3.7.
+
+``attrs`` at this point was lucky to have several people on board who were also very excited about type annotations and helped implementing it; including a `Mypy plugin <https://medium.com/@Pilot-EPD-Blog/mypy-and-attrs-e1b0225e9ac6>`_.
+And so it happened that ``attrs`` `shipped <https://www.attrs.org/en/17.3.0.post2/changelog.html>`_ the new method of defining classes more than half a year before Python 3.7 -- and thus `dataclasses` -- were released.
+
+-----
+
+Due to backward-compatibility concerns, this feature is off by default in the `attr.s` decorator and has to be activated using ``@attr.s(auto_attribs=True)``, though.
+As a little easter egg and to save ourselves some typing, we've also `added <https://github.com/python-attrs/attrs/commit/88aa1c897dfe2ee4aa987e4a56f2ba1344a17238#diff-4fc63db1f2fcb7c6e464ee9a77c3c74e90dd191d1c9ffc3bdd1234d3a6663dc0R48>`_ an alias called ``attr.dataclasses`` that just set ``auto_attribs=True``.
+It was never documented, but people found it and used it and loved it.
+
+Over the next months and years it became clear that type annotations have become the popular way to define classes and their attributes.
+However, it has also become clear that some people viscerally hate type annotations.
+We're determined to serve both.
+
+
+``attrs`` TNG
+^^^^^^^^^^^^^
+
+Over its existence, ``attrs`` never stood still.
+But since we also greatly care about backward compatibility and not breaking our users's code, many features and niceties have to be manually activated.
+
+That is not only annoying, it also leads to the problem that many of ``attrs``'s users don't even know what it can do for them.
+We've spent years alone explaining that defining attributes using type annotations is in no way unique to `dataclasses`.
+
+Finally we've decided to take the `Go route <https://go.dev/blog/module-compatibility>`_:
+instead of fiddling with the old APIs -- whose names felt anachronistic anyway -- we'd define new ones, with better defaults.
+So in July 2018, we `looked for better names <https://github.com/python-attrs/attrs/issues/408>`_ and came up with `attr.define`, `attr.field`, and friends.
+Then in January 2019, we `started looking for inconvenient defaults <https://github.com/python-attrs/attrs/issues/487>`_ that we now could fix without any repercussions.
+
+These APIs proved to be very popular, so we've finally changed the documentation to them in November of 2021.
+
+All of this took way too long, of course.
+One reason is the COVID-19 pandemic, but also our fear to fumble this historic chance to fix our APIs.
+
+Finally, in December 2021, we've added the ``attrs`` package namespace.
+
+We hope you like the result::
+
+ from attrs import define
+
+ @define
+ class Point:
+ x: int
+ y: int
+
+
+.. [#attr] We considered calling the PyPI package just ``attr`` too, but the name was already taken by an *ostensibly* inactive `package on PyPI <https://pypi.org/project/attr/#history>`_.
+.. [#stdlib] The highly readable PEP also explains why ``attrs`` wasn't just added to the standard library.
+ Don't believe the myths and rumors.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/overview.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/overview.rst
new file mode 100644
index 0000000000..b35f66f2dd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/overview.rst
@@ -0,0 +1,58 @@
+========
+Overview
+========
+
+In order to fulfill its ambitious goal of bringing back the joy to writing classes, it gives you a class decorator and a way to declaratively define the attributes on that class:
+
+.. include:: ../README.rst
+ :start-after: -code-begin-
+ :end-before: -getting-help-
+
+
+.. _philosophy:
+
+Philosophy
+==========
+
+**It's about regular classes.**
+ ``attrs`` is for creating well-behaved classes with a type, attributes, methods, and everything that comes with a class.
+ It can be used for data-only containers like ``namedtuple``\ s or ``types.SimpleNamespace`` but they're just a sub-genre of what ``attrs`` is good for.
+
+**The class belongs to the users.**
+ You define a class and ``attrs`` adds static methods to that class based on the attributes you declare.
+ The end.
+ It doesn't add metaclasses.
+ It doesn't add classes you've never heard of to your inheritance tree.
+ An ``attrs`` class in runtime is indistinguishable from a regular class: because it *is* a regular class with a few boilerplate-y methods attached.
+
+**Be light on API impact.**
+ As convenient as it seems at first, ``attrs`` will *not* tack on any methods to your classes except for the :term:`dunder ones <dunder methods>`.
+ Hence all the useful `tools <helpers>` that come with ``attrs`` live in functions that operate on top of instances.
+ Since they take an ``attrs`` instance as their first argument, you can attach them to your classes with one line of code.
+
+**Performance matters.**
+ ``attrs`` runtime impact is very close to zero because all the work is done when the class is defined.
+ Once you're instantiating it, ``attrs`` is out of the picture completely.
+
+**No surprises.**
+ ``attrs`` creates classes that arguably work the way a Python beginner would reasonably expect them to work.
+ It doesn't try to guess what you mean because explicit is better than implicit.
+ It doesn't try to be clever because software shouldn't be clever.
+
+Check out `how-does-it-work` if you'd like to know how it achieves all of the above.
+
+
+What ``attrs`` Is Not
+=====================
+
+``attrs`` does *not* invent some kind of magic system that pulls classes out of its hat using meta classes, runtime introspection, and shaky interdependencies.
+
+All ``attrs`` does is:
+
+1. take your declaration,
+2. write :term:`dunder methods` based on that information,
+3. and attach them to your class.
+
+It does *nothing* dynamic at runtime, hence zero runtime overhead.
+It's still *your* class.
+Do with it as you please.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/python-2.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/python-2.rst
new file mode 100644
index 0000000000..7ec9e5112c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/python-2.rst
@@ -0,0 +1,25 @@
+Python 2 Statement
+==================
+
+While ``attrs`` has always been a Python 3-first package, we the maintainers are aware that Python 2 has not magically disappeared in 2020.
+We are also aware that ``attrs`` is an important building block in many people's systems and livelihoods.
+
+As such, we do **not** have any immediate plans to drop Python 2 support in ``attrs``.
+We intend to support is as long as it will be technically feasible for us.
+
+Feasibility in this case means:
+
+1. Possibility to run the tests on our development computers,
+2. and **free** CI options.
+
+This can mean that we will have to run our tests on PyPy, whose maintainters have unequivocally declared that they do not intend to stop the development and maintenance of their Python 2-compatible line at all.
+And this can mean that at some point, a sponsor will have to step up and pay for bespoke CI setups.
+
+**However**: there is no promise of new features coming to ``attrs`` running under Python 2.
+It is up to our discretion alone, to decide whether the introduced complexity or awkwardness are worth it, or whether we choose to make a feature available on modern platforms only.
+
+
+Summary
+-------
+
+We will do our best to support existing users, but nobody is entitled to the latest and greatest features on a platform that is officially end of life.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/types.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/types.rst
new file mode 100644
index 0000000000..fbb90a7e93
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/types.rst
@@ -0,0 +1,108 @@
+Type Annotations
+================
+
+``attrs`` comes with first class support for type annotations for both Python 3.6 (:pep:`526`) and legacy syntax.
+
+However they will forever remain *optional*, therefore the example from the README could also be written as:
+
+.. doctest::
+
+ >>> from attrs import define, field
+
+ >>> @define
+ ... class SomeClass:
+ ... a_number = field(default=42)
+ ... list_of_numbers = field(factory=list)
+
+ >>> sc = SomeClass(1, [1, 2, 3])
+ >>> sc
+ SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
+
+You can choose freely between the approaches, but please remember that if you choose to use type annotations, you **must** annotate **all** attributes!
+
+----
+
+Even when going all-in an type annotations, you will need `attr.field` for some advanced features though.
+
+One of those features are the decorator-based features like defaults.
+It's important to remember that ``attrs`` doesn't do any magic behind your back.
+All the decorators are implemented using an object that is returned by the call to `attrs.field`.
+
+Attributes that only carry a class annotation do not have that object so trying to call a method on it will inevitably fail.
+
+*****
+
+Please note that types -- however added -- are *only metadata* that can be queried from the class and they aren't used for anything out of the box!
+
+Because Python does not allow references to a class object before the class is defined,
+types may be defined as string literals, so-called *forward references* (:pep:`526`).
+You can enable this automatically for a whole module by using ``from __future__ import annotations`` (:pep:`563`) as of Python 3.7.
+In this case ``attrs`` simply puts these string literals into the ``type`` attributes.
+If you need to resolve these to real types, you can call `attrs.resolve_types` which will update the attribute in place.
+
+In practice though, types show their biggest usefulness in combination with tools like mypy_, pytype_, or pyright_ that have dedicated support for ``attrs`` classes.
+
+The addition of static types is certainly one of the most exciting features in the Python ecosystem and helps you writing *correct* and *verified self-documenting* code.
+
+If you don't know where to start, Carl Meyer gave a great talk on `Type-checked Python in the Real World <https://www.youtube.com/watch?v=pMgmKJyWKn8>`_ at PyCon US 2018 that will help you to get started in no time.
+
+
+mypy
+----
+
+While having a nice syntax for type metadata is great, it's even greater that mypy_ as of 0.570 ships with a dedicated ``attrs`` plugin which allows you to statically check your code.
+
+Imagine you add another line that tries to instantiate the defined class using ``SomeClass("23")``.
+Mypy will catch that error for you:
+
+.. code-block:: console
+
+ $ mypy t.py
+ t.py:12: error: Argument 1 to "SomeClass" has incompatible type "str"; expected "int"
+
+This happens *without* running your code!
+
+And it also works with *both* Python 2-style annotation styles.
+To mypy, this code is equivalent to the one above:
+
+.. code-block:: python
+
+ @attr.s
+ class SomeClass(object):
+ a_number = attr.ib(default=42) # type: int
+ list_of_numbers = attr.ib(factory=list, type=list[int])
+
+
+pyright
+-------
+
+``attrs`` provides support for pyright_ though the dataclass_transform_ specification.
+This provides static type inference for a subset of ``attrs`` equivalent to standard-library ``dataclasses``,
+and requires explicit type annotations using the `attrs.define` or ``@attr.s(auto_attribs=True)`` API.
+
+Given the following definition, ``pyright`` will generate static type signatures for ``SomeClass`` attribute access, ``__init__``, ``__eq__``, and comparison methods::
+
+ @attr.define
+ class SomeClass:
+ a_number: int = 42
+ list_of_numbers: list[int] = attr.field(factory=list)
+
+.. warning::
+
+ The ``pyright`` inferred types are a subset of those supported by ``mypy``, including:
+
+ - The generated ``__init__`` signature only includes the attribute type annotations.
+ It currently does not include attribute ``converter`` types.
+
+ - The ``attr.frozen`` decorator is not typed with frozen attributes, which are properly typed via ``attr.define(frozen=True)``.
+
+ A `full list <https://github.com/microsoft/pyright/blob/main/specs/dataclass_transforms.md#attrs>`_ of limitations and incompatibilities can be found in pyright's repository.
+
+ Your constructive feedback is welcome in both `attrs#795 <https://github.com/python-attrs/attrs/issues/795>`_ and `pyright#1782 <https://github.com/microsoft/pyright/discussions/1782>`_.
+ Generally speaking, the decision on improving ``attrs`` support in pyright is entirely Microsoft's prerogative though.
+
+
+.. _mypy: http://mypy-lang.org
+.. _pytype: https://google.github.io/pytype/
+.. _pyright: https://github.com/microsoft/pyright
+.. _dataclass_transform: https://github.com/microsoft/pyright/blob/main/specs/dataclass_transforms.md
diff --git a/testing/web-platform/tests/tools/third_party/attrs/docs/why.rst b/testing/web-platform/tests/tools/third_party/attrs/docs/why.rst
new file mode 100644
index 0000000000..2c0ca4cd66
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/docs/why.rst
@@ -0,0 +1,290 @@
+Why not…
+========
+
+
+If you'd like third party's account why ``attrs`` is great, have a look at Glyph's `The One Python Library Everyone Needs <https://glyph.twistedmatrix.com/2016/08/attrs.html>`_!
+
+
+…Data Classes?
+--------------
+
+:pep:`557` added Data Classes to `Python 3.7 <https://docs.python.org/3.7/whatsnew/3.7.html#dataclasses>`_ that resemble ``attrs`` in many ways.
+
+They are the result of the Python community's `wish <https://mail.python.org/pipermail/python-ideas/2017-May/045618.html>`_ to have an easier way to write classes in the standard library that doesn't carry the problems of ``namedtuple``\ s.
+To that end, ``attrs`` and its developers were involved in the PEP process and while we may disagree with some minor decisions that have been made, it's a fine library and if it stops you from abusing ``namedtuple``\ s, they are a huge win.
+
+Nevertheless, there are still reasons to prefer ``attrs`` over Data Classes.
+Whether they're relevant to *you* depends on your circumstances:
+
+- Data Classes are *intentionally* less powerful than ``attrs``.
+ There is a long list of features that were sacrificed for the sake of simplicity and while the most obvious ones are validators, converters, :ref:`equality customization <custom-comparison>`, or :doc:`extensibility <extending>` in general, it permeates throughout all APIs.
+
+ On the other hand, Data Classes currently do not offer any significant feature that ``attrs`` doesn't already have.
+- ``attrs`` supports all mainstream Python versions, including CPython 2.7 and PyPy.
+- ``attrs`` doesn't force type annotations on you if you don't like them.
+- But since it **also** supports typing, it's the best way to embrace type hints *gradually*, too.
+- While Data Classes are implementing features from ``attrs`` every now and then, their presence is dependent on the Python version, not the package version.
+ For example, support for ``__slots__`` has only been added in Python 3.10.
+ That is especially painful for PyPI packages that support multiple Python versions.
+ This includes possible implementation bugs.
+- ``attrs`` can and will move faster.
+ We are not bound to any release schedules and we have a clear deprecation policy.
+
+ One of the `reasons <https://www.python.org/dev/peps/pep-0557/#why-not-just-use-attrs>`_ to not vendor ``attrs`` in the standard library was to not impede ``attrs``'s future development.
+
+One way to think about ``attrs`` vs Data Classes is that ``attrs`` is a fully-fledged toolkit to write powerful classes while Data Classes are an easy way to get a class with some attributes.
+Basically what ``attrs`` was in 2015.
+
+
+…pydantic?
+----------
+
+*pydantic* is first an foremost a *data validation library*.
+As such, it is a capable complement to class building libraries like ``attrs`` (or Data Classes!) for parsing and validating untrusted data.
+
+However, as convenient as it might be, using it for your business or data layer `is problematic in several ways <https://threeofwands.com/why-i-use-attrs-instead-of-pydantic/>`_:
+Is it really necessary to re-validate all your objects while reading them from a trusted database?
+In the parlance of `Form, Command, and Model Validation <https://verraes.net/2015/02/form-command-model-validation/>`_, *pydantic* is the right tool for *Commands*.
+
+`Separation of concerns <https://en.wikipedia.org/wiki/Separation_of_concerns>`_ feels tedious at times, but it's one of those things that you get to appreciate once you've shot your own foot often enough.
+
+
+…namedtuples?
+-------------
+
+`collections.namedtuple`\ s are tuples with names, not classes. [#history]_
+Since writing classes is tiresome in Python, every now and then someone discovers all the typing they could save and gets really excited.
+However, that convenience comes at a price.
+
+The most obvious difference between ``namedtuple``\ s and ``attrs``-based classes is that the latter are type-sensitive:
+
+.. doctest::
+
+ >>> import attr
+ >>> C1 = attr.make_class("C1", ["a"])
+ >>> C2 = attr.make_class("C2", ["a"])
+ >>> i1 = C1(1)
+ >>> i2 = C2(1)
+ >>> i1.a == i2.a
+ True
+ >>> i1 == i2
+ False
+
+…while a ``namedtuple`` is *intentionally* `behaving like a tuple`_ which means the type of a tuple is *ignored*:
+
+.. doctest::
+
+ >>> from collections import namedtuple
+ >>> NT1 = namedtuple("NT1", "a")
+ >>> NT2 = namedtuple("NT2", "b")
+ >>> t1 = NT1(1)
+ >>> t2 = NT2(1)
+ >>> t1 == t2 == (1,)
+ True
+
+Other often surprising behaviors include:
+
+- Since they are a subclass of tuples, ``namedtuple``\ s have a length and are both iterable and indexable.
+ That's not what you'd expect from a class and is likely to shadow subtle typo bugs.
+- Iterability also implies that it's easy to accidentally unpack a ``namedtuple`` which leads to hard-to-find bugs. [#iter]_
+- ``namedtuple``\ s have their methods *on your instances* whether you like it or not. [#pollution]_
+- ``namedtuple``\ s are *always* immutable.
+ Not only does that mean that you can't decide for yourself whether your instances should be immutable or not, it also means that if you want to influence your class' initialization (validation? default values?), you have to implement :meth:`__new__() <object.__new__>` which is a particularly hacky and error-prone requirement for a very common problem. [#immutable]_
+- To attach methods to a ``namedtuple`` you have to subclass it.
+ And if you follow the standard library documentation's recommendation of::
+
+ class Point(namedtuple('Point', ['x', 'y'])):
+ # ...
+
+ you end up with a class that has *two* ``Point``\ s in its :attr:`__mro__ <class.__mro__>`: ``[<class 'point.Point'>, <class 'point.Point'>, <type 'tuple'>, <type 'object'>]``.
+
+ That's not only confusing, it also has very practical consequences:
+ for example if you create documentation that includes class hierarchies like `Sphinx's autodoc <https://www.sphinx-doc.org/en/stable/usage/extensions/autodoc.html>`_ with ``show-inheritance``.
+ Again: common problem, hacky solution with confusing fallout.
+
+All these things make ``namedtuple``\ s a particularly poor choice for public APIs because all your objects are irrevocably tainted.
+With ``attrs`` your users won't notice a difference because it creates regular, well-behaved classes.
+
+.. admonition:: Summary
+
+ If you want a *tuple with names*, by all means: go for a ``namedtuple``. [#perf]_
+ But if you want a class with methods, you're doing yourself a disservice by relying on a pile of hacks that requires you to employ even more hacks as your requirements expand.
+
+ Other than that, ``attrs`` also adds nifty features like validators, converters, and (mutable!) default values.
+
+
+.. rubric:: Footnotes
+
+.. [#history] The word is that ``namedtuple``\ s were added to the Python standard library as a way to make tuples in return values more readable.
+ And indeed that is something you see throughout the standard library.
+
+ Looking at what the makers of ``namedtuple``\ s use it for themselves is a good guideline for deciding on your own use cases.
+.. [#pollution] ``attrs`` only adds a single attribute: ``__attrs_attrs__`` for introspection.
+ All helpers are functions in the ``attr`` package.
+ Since they take the instance as first argument, you can easily attach them to your classes under a name of your own choice.
+.. [#iter] `attr.astuple` can be used to get that behavior in ``attrs`` on *explicit demand*.
+.. [#immutable] ``attrs`` offers *optional* immutability through the ``frozen`` keyword.
+.. [#perf] Although ``attrs`` would serve you just as well!
+ Since both employ the same method of writing and compiling Python code for you, the performance penalty is negligible at worst and in some cases ``attrs`` is even faster if you use ``slots=True`` (which is generally a good idea anyway).
+
+.. _behaving like a tuple: https://docs.python.org/3/tutorial/datastructures.html#tuples-and-sequences
+
+
+…tuples?
+--------
+
+Readability
+^^^^^^^^^^^
+
+What makes more sense while debugging::
+
+ Point(x=1, y=2)
+
+or::
+
+ (1, 2)
+
+?
+
+Let's add even more ambiguity::
+
+ Customer(id=42, reseller=23, first_name="Jane", last_name="John")
+
+or::
+
+ (42, 23, "Jane", "John")
+
+?
+
+Why would you want to write ``customer[2]`` instead of ``customer.first_name``?
+
+Don't get me started when you add nesting.
+If you've never run into mysterious tuples you had no idea what the hell they meant while debugging, you're much smarter than yours truly.
+
+Using proper classes with names and types makes program code much more readable and comprehensible_.
+Especially when trying to grok a new piece of software or returning to old code after several months.
+
+.. _comprehensible: https://arxiv.org/pdf/1304.5257.pdf
+
+
+Extendability
+^^^^^^^^^^^^^
+
+Imagine you have a function that takes or returns a tuple.
+Especially if you use tuple unpacking (eg. ``x, y = get_point()``), adding additional data means that you have to change the invocation of that function *everywhere*.
+
+Adding an attribute to a class concerns only those who actually care about that attribute.
+
+
+…dicts?
+-------
+
+Dictionaries are not for fixed fields.
+
+If you have a dict, it maps something to something else.
+You should be able to add and remove values.
+
+``attrs`` lets you be specific about those expectations; a dictionary does not.
+It gives you a named entity (the class) in your code, which lets you explain in other places whether you take a parameter of that class or return a value of that class.
+
+In other words: if your dict has a fixed and known set of keys, it is an object, not a hash.
+So if you never iterate over the keys of a dict, you should use a proper class.
+
+
+…hand-written classes?
+----------------------
+
+While we're fans of all things artisanal, writing the same nine methods again and again doesn't qualify.
+I usually manage to get some typos inside and there's simply more code that can break and thus has to be tested.
+
+To bring it into perspective, the equivalent of
+
+.. doctest::
+
+ >>> @attr.s
+ ... class SmartClass(object):
+ ... a = attr.ib()
+ ... b = attr.ib()
+ >>> SmartClass(1, 2)
+ SmartClass(a=1, b=2)
+
+is roughly
+
+.. doctest::
+
+ >>> class ArtisanalClass(object):
+ ... def __init__(self, a, b):
+ ... self.a = a
+ ... self.b = b
+ ...
+ ... def __repr__(self):
+ ... return "ArtisanalClass(a={}, b={})".format(self.a, self.b)
+ ...
+ ... def __eq__(self, other):
+ ... if other.__class__ is self.__class__:
+ ... return (self.a, self.b) == (other.a, other.b)
+ ... else:
+ ... return NotImplemented
+ ...
+ ... def __ne__(self, other):
+ ... result = self.__eq__(other)
+ ... if result is NotImplemented:
+ ... return NotImplemented
+ ... else:
+ ... return not result
+ ...
+ ... def __lt__(self, other):
+ ... if other.__class__ is self.__class__:
+ ... return (self.a, self.b) < (other.a, other.b)
+ ... else:
+ ... return NotImplemented
+ ...
+ ... def __le__(self, other):
+ ... if other.__class__ is self.__class__:
+ ... return (self.a, self.b) <= (other.a, other.b)
+ ... else:
+ ... return NotImplemented
+ ...
+ ... def __gt__(self, other):
+ ... if other.__class__ is self.__class__:
+ ... return (self.a, self.b) > (other.a, other.b)
+ ... else:
+ ... return NotImplemented
+ ...
+ ... def __ge__(self, other):
+ ... if other.__class__ is self.__class__:
+ ... return (self.a, self.b) >= (other.a, other.b)
+ ... else:
+ ... return NotImplemented
+ ...
+ ... def __hash__(self):
+ ... return hash((self.__class__, self.a, self.b))
+ >>> ArtisanalClass(a=1, b=2)
+ ArtisanalClass(a=1, b=2)
+
+which is quite a mouthful and it doesn't even use any of ``attrs``'s more advanced features like validators or defaults values.
+Also: no tests whatsoever.
+And who will guarantee you, that you don't accidentally flip the ``<`` in your tenth implementation of ``__gt__``?
+
+It also should be noted that ``attrs`` is not an all-or-nothing solution.
+You can freely choose which features you want and disable those that you want more control over:
+
+.. doctest::
+
+ >>> @attr.s(repr=False)
+ ... class SmartClass(object):
+ ... a = attr.ib()
+ ... b = attr.ib()
+ ...
+ ... def __repr__(self):
+ ... return "<SmartClass(a=%d)>" % (self.a,)
+ >>> SmartClass(1, 2)
+ <SmartClass(a=1)>
+
+.. admonition:: Summary
+
+ If you don't care and like typing, we're not gonna stop you.
+
+ However it takes a lot of bias and determined rationalization to claim that ``attrs`` raises the mental burden on a project given how difficult it is to find the important bits in a hand-written class and how annoying it is to ensure you've copy-pasted your code correctly over all your classes.
+
+ In any case, if you ever get sick of the repetitiveness and drowning important code in a sea of boilerplate, ``attrs`` will be waiting for you.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/mypy.ini b/testing/web-platform/tests/tools/third_party/attrs/mypy.ini
new file mode 100644
index 0000000000..685c02599f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/mypy.ini
@@ -0,0 +1,3 @@
+[mypy]
+disallow_untyped_defs = True
+check_untyped_defs = True
diff --git a/testing/web-platform/tests/tools/third_party/attrs/pyproject.toml b/testing/web-platform/tests/tools/third_party/attrs/pyproject.toml
new file mode 100644
index 0000000000..52c0e49ec2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/pyproject.toml
@@ -0,0 +1,71 @@
+[build-system]
+requires = ["setuptools>=40.6.0", "wheel"]
+build-backend = "setuptools.build_meta"
+
+
+[tool.coverage.run]
+parallel = true
+branch = true
+source = ["attr", "attrs"]
+
+[tool.coverage.paths]
+source = ["src", ".tox/*/site-packages"]
+
+[tool.coverage.report]
+show_missing = true
+exclude_lines = [
+ "pragma: no cover",
+ # PyPy is unacceptably slow under coverage.
+ "if PYPY:",
+]
+
+
+[tool.black]
+line-length = 79
+extend-exclude = '''
+# Exclude pattern matching test till black gains Python 3.10 support
+.*test_pattern_matching.*
+'''
+
+
+[tool.interrogate]
+verbose = 2
+fail-under = 100
+whitelist-regex = ["test_.*"]
+
+
+[tool.check-wheel-contents]
+toplevel = ["attr", "attrs"]
+
+
+[tool.isort]
+profile = "attrs"
+
+
+[tool.towncrier]
+ package = "attr"
+ package_dir = "src"
+ filename = "CHANGELOG.rst"
+ template = "changelog.d/towncrier_template.rst"
+ issue_format = "`#{issue} <https://github.com/python-attrs/attrs/issues/{issue}>`_"
+ directory = "changelog.d"
+ title_format = "{version} ({project_date})"
+ underlines = ["-", "^"]
+
+ [[tool.towncrier.section]]
+ path = ""
+
+ [[tool.towncrier.type]]
+ directory = "breaking"
+ name = "Backward-incompatible Changes"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "deprecation"
+ name = "Deprecations"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "change"
+ name = "Changes"
+ showcontent = true
diff --git a/testing/web-platform/tests/tools/third_party/attrs/setup.py b/testing/web-platform/tests/tools/third_party/attrs/setup.py
new file mode 100644
index 0000000000..00e7b012ae
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/setup.py
@@ -0,0 +1,151 @@
+# SPDX-License-Identifier: MIT
+
+import codecs
+import os
+import platform
+import re
+import sys
+
+from setuptools import find_packages, setup
+
+
+###############################################################################
+
+NAME = "attrs"
+PACKAGES = find_packages(where="src")
+META_PATH = os.path.join("src", "attr", "__init__.py")
+KEYWORDS = ["class", "attribute", "boilerplate"]
+PROJECT_URLS = {
+ "Documentation": "https://www.attrs.org/",
+ "Changelog": "https://www.attrs.org/en/stable/changelog.html",
+ "Bug Tracker": "https://github.com/python-attrs/attrs/issues",
+ "Source Code": "https://github.com/python-attrs/attrs",
+ "Funding": "https://github.com/sponsors/hynek",
+ "Tidelift": "https://tidelift.com/subscription/pkg/pypi-attrs?"
+ "utm_source=pypi-attrs&utm_medium=pypi",
+ "Ko-fi": "https://ko-fi.com/the_hynek",
+}
+CLASSIFIERS = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "Natural Language :: English",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+]
+INSTALL_REQUIRES = []
+EXTRAS_REQUIRE = {
+ "docs": ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"],
+ "tests_no_zope": [
+ # For regression test to ensure cloudpickle compat doesn't break.
+ 'cloudpickle; python_implementation == "CPython"',
+ # 5.0 introduced toml; parallel was broken until 5.0.2
+ "coverage[toml]>=5.0.2",
+ "hypothesis",
+ "pympler",
+ "pytest>=4.3.0", # 4.3.0 dropped last use of `convert`
+ "six",
+ ],
+}
+if (
+ sys.version_info[:2] >= (3, 6)
+ and platform.python_implementation() != "PyPy"
+):
+ EXTRAS_REQUIRE["tests_no_zope"].extend(["mypy", "pytest-mypy-plugins"])
+
+EXTRAS_REQUIRE["tests"] = EXTRAS_REQUIRE["tests_no_zope"] + ["zope.interface"]
+EXTRAS_REQUIRE["dev"] = (
+ EXTRAS_REQUIRE["tests"] + EXTRAS_REQUIRE["docs"] + ["pre-commit"]
+)
+
+###############################################################################
+
+HERE = os.path.abspath(os.path.dirname(__file__))
+
+
+def read(*parts):
+ """
+ Build an absolute path from *parts* and return the contents of the
+ resulting file. Assume UTF-8 encoding.
+ """
+ with codecs.open(os.path.join(HERE, *parts), "rb", "utf-8") as f:
+ return f.read()
+
+
+META_FILE = read(META_PATH)
+
+
+def find_meta(meta):
+ """
+ Extract __*meta*__ from META_FILE.
+ """
+ meta_match = re.search(
+ r"^__{meta}__ = ['\"]([^'\"]*)['\"]".format(meta=meta), META_FILE, re.M
+ )
+ if meta_match:
+ return meta_match.group(1)
+ raise RuntimeError("Unable to find __{meta}__ string.".format(meta=meta))
+
+
+LOGO = """
+.. image:: https://www.attrs.org/en/stable/_static/attrs_logo.png
+ :alt: attrs logo
+ :align: center
+""" # noqa
+
+VERSION = find_meta("version")
+URL = find_meta("url")
+LONG = (
+ LOGO
+ + read("README.rst").split(".. teaser-begin")[1]
+ + "\n\n"
+ + "Release Information\n"
+ + "===================\n\n"
+ + re.search(
+ r"(\d+.\d.\d \(.*?\)\r?\n.*?)\r?\n\r?\n\r?\n----\r?\n\r?\n\r?\n",
+ read("CHANGELOG.rst"),
+ re.S,
+ ).group(1)
+ + "\n\n`Full changelog "
+ + "<{url}en/stable/changelog.html>`_.\n\n".format(url=URL)
+ + read("AUTHORS.rst")
+)
+
+
+if __name__ == "__main__":
+ setup(
+ name=NAME,
+ description=find_meta("description"),
+ license=find_meta("license"),
+ url=URL,
+ project_urls=PROJECT_URLS,
+ version=VERSION,
+ author=find_meta("author"),
+ author_email=find_meta("email"),
+ maintainer=find_meta("author"),
+ maintainer_email=find_meta("email"),
+ keywords=KEYWORDS,
+ long_description=LONG,
+ long_description_content_type="text/x-rst",
+ packages=PACKAGES,
+ package_dir={"": "src"},
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
+ zip_safe=False,
+ classifiers=CLASSIFIERS,
+ install_requires=INSTALL_REQUIRES,
+ extras_require=EXTRAS_REQUIRE,
+ include_package_data=True,
+ options={"bdist_wheel": {"universal": "1"}},
+ )
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.py
new file mode 100644
index 0000000000..f95c96dd57
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.py
@@ -0,0 +1,80 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+from functools import partial
+
+from . import converters, exceptions, filters, setters, validators
+from ._cmp import cmp_using
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
+from ._make import (
+ NOTHING,
+ Attribute,
+ Factory,
+ attrib,
+ attrs,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+from ._version_info import VersionInfo
+
+
+__version__ = "21.4.0"
+__version_info__ = VersionInfo._from_version_string(__version__)
+
+__title__ = "attrs"
+__description__ = "Classes Without Boilerplate"
+__url__ = "https://www.attrs.org/"
+__uri__ = __url__
+__doc__ = __description__ + " <" + __uri__ + ">"
+
+__author__ = "Hynek Schlawack"
+__email__ = "hs@ox.cx"
+
+__license__ = "MIT"
+__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
+
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
+
+__all__ = [
+ "Attribute",
+ "Factory",
+ "NOTHING",
+ "asdict",
+ "assoc",
+ "astuple",
+ "attr",
+ "attrib",
+ "attributes",
+ "attrs",
+ "cmp_using",
+ "converters",
+ "evolve",
+ "exceptions",
+ "fields",
+ "fields_dict",
+ "filters",
+ "get_run_validators",
+ "has",
+ "ib",
+ "make_class",
+ "resolve_types",
+ "s",
+ "set_run_validators",
+ "setters",
+ "validate",
+ "validators",
+]
+
+if sys.version_info[:2] >= (3, 6):
+ from ._next_gen import define, field, frozen, mutable # noqa: F401
+
+ __all__.extend(("define", "field", "frozen", "mutable"))
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.pyi
new file mode 100644
index 0000000000..c0a2126503
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/__init__.pyi
@@ -0,0 +1,484 @@
+import sys
+
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generic,
+ List,
+ Mapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+# `import X as X` is required to make these public
+from . import converters as converters
+from . import exceptions as exceptions
+from . import filters as filters
+from . import setters as setters
+from . import validators as validators
+from ._version_info import VersionInfo
+
+__version__: str
+__version_info__: VersionInfo
+__title__: str
+__description__: str
+__url__: str
+__uri__: str
+__author__: str
+__email__: str
+__license__: str
+__copyright__: str
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_EqOrderType = Union[bool, Callable[[Any], Any]]
+_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
+_ConverterType = Callable[[Any], Any]
+_FilterType = Callable[[Attribute[_T], _T], bool]
+_ReprType = Callable[[Any], str]
+_ReprArgType = Union[bool, _ReprType]
+_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
+_OnSetAttrArgType = Union[
+ _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
+]
+_FieldTransformer = Callable[
+ [type, List[Attribute[Any]]], List[Attribute[Any]]
+]
+_CompareWithType = Callable[[Any, Any], bool]
+# FIXME: in reality, if multiple validators are passed they must be in a list
+# or tuple, but those are invariant and so would prevent subtypes of
+# _ValidatorType from working when passed in a list or tuple.
+_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
+
+# _make --
+
+NOTHING: object
+
+# NOTE: Factory lies about its return type to make this possible:
+# `x: List[int] # = Factory(list)`
+# Work around mypy issue #4554 in the common case by using an overload.
+if sys.version_info >= (3, 8):
+ from typing import Literal
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[Any], _T],
+ takes_self: Literal[True],
+ ) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[], _T],
+ takes_self: Literal[False],
+ ) -> _T: ...
+
+else:
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Union[Callable[[Any], _T], Callable[[], _T]],
+ takes_self: bool = ...,
+ ) -> _T: ...
+
+# Static type inference support via __dataclass_transform__ implemented as per:
+# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
+# This annotation must be applied to all overloads of "define" and "attrs"
+#
+# NOTE: This is a typing construct and does not exist at runtime. Extensions
+# wrapping attrs decorators should declare a separate __dataclass_transform__
+# signature in the extension module using the specification linked above to
+# provide pyright support.
+def __dataclass_transform__(
+ *,
+ eq_default: bool = True,
+ order_default: bool = False,
+ kw_only_default: bool = False,
+ field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
+) -> Callable[[_T], _T]: ...
+
+class Attribute(Generic[_T]):
+ name: str
+ default: Optional[_T]
+ validator: Optional[_ValidatorType[_T]]
+ repr: _ReprArgType
+ cmp: _EqOrderType
+ eq: _EqOrderType
+ order: _EqOrderType
+ hash: Optional[bool]
+ init: bool
+ converter: Optional[_ConverterType]
+ metadata: Dict[Any, Any]
+ type: Optional[Type[_T]]
+ kw_only: bool
+ on_setattr: _OnSetAttrType
+ def evolve(self, **changes: Any) -> "Attribute[Any]": ...
+
+# NOTE: We had several choices for the annotation to use for type arg:
+# 1) Type[_T]
+# - Pros: Handles simple cases correctly
+# - Cons: Might produce less informative errors in the case of conflicting
+# TypeVars e.g. `attr.ib(default='bad', type=int)`
+# 2) Callable[..., _T]
+# - Pros: Better error messages than #1 for conflicting TypeVars
+# - Cons: Terrible error messages for validator checks.
+# e.g. attr.ib(type=int, validator=validate_str)
+# -> error: Cannot infer function type argument
+# 3) type (and do all of the work in the mypy plugin)
+# - Pros: Simple here, and we could customize the plugin with our own errors.
+# - Cons: Would need to write mypy plugin code to handle all the cases.
+# We chose option #1.
+
+# `attr` lies about its return type to make the following possible:
+# attr() -> Any
+# attr(8) -> int
+# attr(validator=<some callable>) -> Whatever the callable expects.
+# This makes this type of assignments possible:
+# x: int = attr(8)
+#
+# This form catches explicit None or no default but with no other arguments
+# returns Any.
+@overload
+def attrib(
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def attrib(
+ default: None = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def attrib(
+ default: _T,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def attrib(
+ default: Optional[_T] = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: object = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def field(
+ *,
+ default: _T,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def field(
+ *,
+ default: Optional[_T] = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+ maybe_cls: _C,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+ maybe_cls: None = ...,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+ maybe_cls: _C,
+ *,
+ these: Optional[Dict[str, Any]] = ...,
+ repr: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+ maybe_cls: None = ...,
+ *,
+ these: Optional[Dict[str, Any]] = ...,
+ repr: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+
+mutable = define
+frozen = define # they differ only in their defaults
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+class _Fields(Tuple[Attribute[Any], ...]):
+ def __getattr__(self, name: str) -> Attribute[Any]: ...
+
+def fields(cls: type) -> _Fields: ...
+def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ...
+def validate(inst: Any) -> None: ...
+def resolve_types(
+ cls: _C,
+ globalns: Optional[Dict[str, Any]] = ...,
+ localns: Optional[Dict[str, Any]] = ...,
+ attribs: Optional[List[Attribute[Any]]] = ...,
+) -> _C: ...
+
+# TODO: add support for returning a proper attrs class from the mypy plugin
+# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
+# [attr.ib()])` is valid
+def make_class(
+ name: str,
+ attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
+ bases: Tuple[type, ...] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ collect_by_mro: bool = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+) -> type: ...
+
+# _funcs --
+
+# TODO: add support for returning TypedDict from the mypy plugin
+# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
+# these:
+# https://github.com/python/mypy/issues/4236
+# https://github.com/python/typing/issues/253
+# XXX: remember to fix attrs.asdict/astuple too!
+def asdict(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ dict_factory: Type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+ value_serializer: Optional[
+ Callable[[type, Attribute[Any], Any], Any]
+ ] = ...,
+ tuple_keys: Optional[bool] = ...,
+) -> Dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ tuple_factory: Type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> Tuple[Any, ...]: ...
+def has(cls: type) -> bool: ...
+def assoc(inst: _T, **changes: Any) -> _T: ...
+def evolve(inst: _T, **changes: Any) -> _T: ...
+
+# _config --
+
+def set_run_validators(run: bool) -> None: ...
+def get_run_validators() -> bool: ...
+
+# aliases --
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.py
new file mode 100644
index 0000000000..6cffa4dbab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.py
@@ -0,0 +1,154 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import functools
+
+from ._compat import new_class
+from ._make import _make_ne
+
+
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+ eq=None,
+ lt=None,
+ le=None,
+ gt=None,
+ ge=None,
+ require_same_type=True,
+ class_name="Comparable",
+):
+ """
+ Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
+ ``cmp`` arguments to customize field comparison.
+
+ The resulting class will have a full set of ordering methods if
+ at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
+
+ :param Optional[callable] eq: `callable` used to evaluate equality
+ of two objects.
+ :param Optional[callable] lt: `callable` used to evaluate whether
+ one object is less than another object.
+ :param Optional[callable] le: `callable` used to evaluate whether
+ one object is less than or equal to another object.
+ :param Optional[callable] gt: `callable` used to evaluate whether
+ one object is greater than another object.
+ :param Optional[callable] ge: `callable` used to evaluate whether
+ one object is greater than or equal to another object.
+
+ :param bool require_same_type: When `True`, equality and ordering methods
+ will return `NotImplemented` if objects are not of the same type.
+
+ :param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
+
+ See `comparison` for more details.
+
+ .. versionadded:: 21.1.0
+ """
+
+ body = {
+ "__slots__": ["value"],
+ "__init__": _make_init(),
+ "_requirements": [],
+ "_is_comparable_to": _is_comparable_to,
+ }
+
+ # Add operations.
+ num_order_functions = 0
+ has_eq_function = False
+
+ if eq is not None:
+ has_eq_function = True
+ body["__eq__"] = _make_operator("eq", eq)
+ body["__ne__"] = _make_ne()
+
+ if lt is not None:
+ num_order_functions += 1
+ body["__lt__"] = _make_operator("lt", lt)
+
+ if le is not None:
+ num_order_functions += 1
+ body["__le__"] = _make_operator("le", le)
+
+ if gt is not None:
+ num_order_functions += 1
+ body["__gt__"] = _make_operator("gt", gt)
+
+ if ge is not None:
+ num_order_functions += 1
+ body["__ge__"] = _make_operator("ge", ge)
+
+ type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body))
+
+ # Add same type requirement.
+ if require_same_type:
+ type_._requirements.append(_check_same_type)
+
+ # Add total ordering if at least one operation was defined.
+ if 0 < num_order_functions < 4:
+ if not has_eq_function:
+ # functools.total_ordering requires __eq__ to be defined,
+ # so raise early error here to keep a nice stack.
+ raise ValueError(
+ "eq must be define is order to complete ordering from "
+ "lt, le, gt, ge."
+ )
+ type_ = functools.total_ordering(type_)
+
+ return type_
+
+
+def _make_init():
+ """
+ Create __init__ method.
+ """
+
+ def __init__(self, value):
+ """
+ Initialize object with *value*.
+ """
+ self.value = value
+
+ return __init__
+
+
+def _make_operator(name, func):
+ """
+ Create operator method.
+ """
+
+ def method(self, other):
+ if not self._is_comparable_to(other):
+ return NotImplemented
+
+ result = func(self.value, other.value)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return result
+
+ method.__name__ = "__%s__" % (name,)
+ method.__doc__ = "Return a %s b. Computed by attrs." % (
+ _operation_names[name],
+ )
+
+ return method
+
+
+def _is_comparable_to(self, other):
+ """
+ Check whether `other` is comparable to `self`.
+ """
+ for func in self._requirements:
+ if not func(self, other):
+ return False
+ return True
+
+
+def _check_same_type(self, other):
+ """
+ Return True if *self* and *other* are of the same type, False otherwise.
+ """
+ return other.value.__class__ is self.value.__class__
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.pyi
new file mode 100644
index 0000000000..e71aaff7a1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_cmp.pyi
@@ -0,0 +1,13 @@
+from typing import Type
+
+from . import _CompareWithType
+
+def cmp_using(
+ eq: Optional[_CompareWithType],
+ lt: Optional[_CompareWithType],
+ le: Optional[_CompareWithType],
+ gt: Optional[_CompareWithType],
+ ge: Optional[_CompareWithType],
+ require_same_type: bool,
+ class_name: str,
+) -> Type: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_compat.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_compat.py
new file mode 100644
index 0000000000..dc0cb02b64
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_compat.py
@@ -0,0 +1,261 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import platform
+import sys
+import threading
+import types
+import warnings
+
+
+PY2 = sys.version_info[0] == 2
+PYPY = platform.python_implementation() == "PyPy"
+PY36 = sys.version_info[:2] >= (3, 6)
+HAS_F_STRINGS = PY36
+PY310 = sys.version_info[:2] >= (3, 10)
+
+
+if PYPY or PY36:
+ ordered_dict = dict
+else:
+ from collections import OrderedDict
+
+ ordered_dict = OrderedDict
+
+
+if PY2:
+ from collections import Mapping, Sequence
+
+ from UserDict import IterableUserDict
+
+ # We 'bundle' isclass instead of using inspect as importing inspect is
+ # fairly expensive (order of 10-15 ms for a modern machine in 2016)
+ def isclass(klass):
+ return isinstance(klass, (type, types.ClassType))
+
+ def new_class(name, bases, kwds, exec_body):
+ """
+ A minimal stub of types.new_class that we need for make_class.
+ """
+ ns = {}
+ exec_body(ns)
+
+ return type(name, bases, ns)
+
+ # TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
+ TYPE = "type"
+
+ def iteritems(d):
+ return d.iteritems()
+
+ # Python 2 is bereft of a read-only dict proxy, so we make one!
+ class ReadOnlyDict(IterableUserDict):
+ """
+ Best-effort read-only dict wrapper.
+ """
+
+ def __setitem__(self, key, val):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise TypeError(
+ "'mappingproxy' object does not support item assignment"
+ )
+
+ def update(self, _):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'update'"
+ )
+
+ def __delitem__(self, _):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise TypeError(
+ "'mappingproxy' object does not support item deletion"
+ )
+
+ def clear(self):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'clear'"
+ )
+
+ def pop(self, key, default=None):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'pop'"
+ )
+
+ def popitem(self):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'popitem'"
+ )
+
+ def setdefault(self, key, default=None):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'setdefault'"
+ )
+
+ def __repr__(self):
+ # Override to be identical to the Python 3 version.
+ return "mappingproxy(" + repr(self.data) + ")"
+
+ def metadata_proxy(d):
+ res = ReadOnlyDict()
+ res.data.update(d) # We blocked update, so we have to do it like this.
+ return res
+
+ def just_warn(*args, **kw): # pragma: no cover
+ """
+ We only warn on Python 3 because we are not aware of any concrete
+ consequences of not setting the cell on Python 2.
+ """
+
+else: # Python 3 and later.
+ from collections.abc import Mapping, Sequence # noqa
+
+ def just_warn(*args, **kw):
+ """
+ We only warn on Python 3 because we are not aware of any concrete
+ consequences of not setting the cell on Python 2.
+ """
+ warnings.warn(
+ "Running interpreter doesn't sufficiently support code object "
+ "introspection. Some features like bare super() or accessing "
+ "__class__ will not work with slotted classes.",
+ RuntimeWarning,
+ stacklevel=2,
+ )
+
+ def isclass(klass):
+ return isinstance(klass, type)
+
+ TYPE = "class"
+
+ def iteritems(d):
+ return d.items()
+
+ new_class = types.new_class
+
+ def metadata_proxy(d):
+ return types.MappingProxyType(dict(d))
+
+
+def make_set_closure_cell():
+ """Return a function of two arguments (cell, value) which sets
+ the value stored in the closure cell `cell` to `value`.
+ """
+ # pypy makes this easy. (It also supports the logic below, but
+ # why not do the easy/fast thing?)
+ if PYPY:
+
+ def set_closure_cell(cell, value):
+ cell.__setstate__((value,))
+
+ return set_closure_cell
+
+ # Otherwise gotta do it the hard way.
+
+ # Create a function that will set its first cellvar to `value`.
+ def set_first_cellvar_to(value):
+ x = value
+ return
+
+ # This function will be eliminated as dead code, but
+ # not before its reference to `x` forces `x` to be
+ # represented as a closure cell rather than a local.
+ def force_x_to_be_a_cell(): # pragma: no cover
+ return x
+
+ try:
+ # Extract the code object and make sure our assumptions about
+ # the closure behavior are correct.
+ if PY2:
+ co = set_first_cellvar_to.func_code
+ else:
+ co = set_first_cellvar_to.__code__
+ if co.co_cellvars != ("x",) or co.co_freevars != ():
+ raise AssertionError # pragma: no cover
+
+ # Convert this code object to a code object that sets the
+ # function's first _freevar_ (not cellvar) to the argument.
+ if sys.version_info >= (3, 8):
+ # CPython 3.8+ has an incompatible CodeType signature
+ # (added a posonlyargcount argument) but also added
+ # CodeType.replace() to do this without counting parameters.
+ set_first_freevar_code = co.replace(
+ co_cellvars=co.co_freevars, co_freevars=co.co_cellvars
+ )
+ else:
+ args = [co.co_argcount]
+ if not PY2:
+ args.append(co.co_kwonlyargcount)
+ args.extend(
+ [
+ co.co_nlocals,
+ co.co_stacksize,
+ co.co_flags,
+ co.co_code,
+ co.co_consts,
+ co.co_names,
+ co.co_varnames,
+ co.co_filename,
+ co.co_name,
+ co.co_firstlineno,
+ co.co_lnotab,
+ # These two arguments are reversed:
+ co.co_cellvars,
+ co.co_freevars,
+ ]
+ )
+ set_first_freevar_code = types.CodeType(*args)
+
+ def set_closure_cell(cell, value):
+ # Create a function using the set_first_freevar_code,
+ # whose first closure cell is `cell`. Calling it will
+ # change the value of that cell.
+ setter = types.FunctionType(
+ set_first_freevar_code, {}, "setter", (), (cell,)
+ )
+ # And call it to set the cell.
+ setter(value)
+
+ # Make sure it works on this interpreter:
+ def make_func_with_cell():
+ x = None
+
+ def func():
+ return x # pragma: no cover
+
+ return func
+
+ if PY2:
+ cell = make_func_with_cell().func_closure[0]
+ else:
+ cell = make_func_with_cell().__closure__[0]
+ set_closure_cell(cell, 100)
+ if cell.cell_contents != 100:
+ raise AssertionError # pragma: no cover
+
+ except Exception:
+ return just_warn
+ else:
+ return set_closure_cell
+
+
+set_closure_cell = make_set_closure_cell()
+
+# Thread-local global to track attrs instances which are already being repr'd.
+# This is needed because there is no other (thread-safe) way to pass info
+# about the instances that are already being repr'd through the call stack
+# in order to ensure we don't perform infinite recursion.
+#
+# For instance, if an instance contains a dict which contains that instance,
+# we need to know that we're already repr'ing the outside instance from within
+# the dict's repr() call.
+#
+# This lives here rather than in _make.py so that the functions in _make.py
+# don't have a direct reference to the thread-local in their globals dict.
+# If they have such a reference, it breaks cloudpickle.
+repr_context = threading.local()
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_config.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_config.py
new file mode 100644
index 0000000000..fc9be29d00
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_config.py
@@ -0,0 +1,33 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+
+__all__ = ["set_run_validators", "get_run_validators"]
+
+_run_validators = True
+
+
+def set_run_validators(run):
+ """
+ Set whether or not validators are run. By default, they are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
+ instead.
+ """
+ if not isinstance(run, bool):
+ raise TypeError("'run' must be bool.")
+ global _run_validators
+ _run_validators = run
+
+
+def get_run_validators():
+ """
+ Return whether or not validators are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
+ instead.
+ """
+ return _run_validators
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_funcs.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_funcs.py
new file mode 100644
index 0000000000..4c90085a40
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_funcs.py
@@ -0,0 +1,422 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import copy
+
+from ._compat import iteritems
+from ._make import NOTHING, _obj_setattr, fields
+from .exceptions import AttrsAttributeNotFoundError
+
+
+def asdict(
+ inst,
+ recurse=True,
+ filter=None,
+ dict_factory=dict,
+ retain_collection_types=False,
+ value_serializer=None,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a dict.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the `attrs.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable dict_factory: A callable to produce dictionaries from. For
+ example, to produce ordered dictionaries instead of normal Python
+ dictionaries, pass in ``collections.OrderedDict``.
+ :param bool retain_collection_types: Do not convert to ``list`` when
+ encountering an attribute whose type is ``tuple`` or ``set``. Only
+ meaningful if ``recurse`` is ``True``.
+ :param Optional[callable] value_serializer: A hook that is called for every
+ attribute or dict key/value. It receives the current instance, field
+ and value and must return the (updated) value. The hook is run *after*
+ the optional *filter* has been applied.
+
+ :rtype: return type of *dict_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.0.0 *dict_factory*
+ .. versionadded:: 16.1.0 *retain_collection_types*
+ .. versionadded:: 20.3.0 *value_serializer*
+ .. versionadded:: 21.3.0 If a dict has a collection for a key, it is
+ serialized as a tuple.
+ """
+ attrs = fields(inst.__class__)
+ rv = dict_factory()
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+
+ if value_serializer is not None:
+ v = value_serializer(inst, a, v)
+
+ if recurse is True:
+ if has(v.__class__):
+ rv[a.name] = asdict(
+ v,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain_collection_types is True else list
+ rv[a.name] = cf(
+ [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in v
+ ]
+ )
+ elif isinstance(v, dict):
+ df = dict_factory
+ rv[a.name] = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in iteritems(v)
+ )
+ else:
+ rv[a.name] = v
+ else:
+ rv[a.name] = v
+ return rv
+
+
+def _asdict_anything(
+ val,
+ is_key,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+):
+ """
+ ``asdict`` only works on attrs instances, this works on anything.
+ """
+ if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+ # Attrs class.
+ rv = asdict(
+ val,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif isinstance(val, (tuple, list, set, frozenset)):
+ if retain_collection_types is True:
+ cf = val.__class__
+ elif is_key:
+ cf = tuple
+ else:
+ cf = list
+
+ rv = cf(
+ [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in val
+ ]
+ )
+ elif isinstance(val, dict):
+ df = dict_factory
+ rv = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in iteritems(val)
+ )
+ else:
+ rv = val
+ if value_serializer is not None:
+ rv = value_serializer(None, None, rv)
+
+ return rv
+
+
+def astuple(
+ inst,
+ recurse=True,
+ filter=None,
+ tuple_factory=tuple,
+ retain_collection_types=False,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a tuple.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the `attrs.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable tuple_factory: A callable to produce tuples from. For
+ example, to produce lists instead of tuples.
+ :param bool retain_collection_types: Do not convert to ``list``
+ or ``dict`` when encountering an attribute which type is
+ ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
+ ``True``.
+
+ :rtype: return type of *tuple_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.2.0
+ """
+ attrs = fields(inst.__class__)
+ rv = []
+ retain = retain_collection_types # Very long. :/
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+ if recurse is True:
+ if has(v.__class__):
+ rv.append(
+ astuple(
+ v,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain is True else list
+ rv.append(
+ cf(
+ [
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
+ for j in v
+ ]
+ )
+ )
+ elif isinstance(v, dict):
+ df = v.__class__ if retain is True else dict
+ rv.append(
+ df(
+ (
+ astuple(
+ kk,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk,
+ astuple(
+ vv,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv,
+ )
+ for kk, vv in iteritems(v)
+ )
+ )
+ else:
+ rv.append(v)
+ else:
+ rv.append(v)
+
+ return rv if tuple_factory is list else tuple_factory(rv)
+
+
+def has(cls):
+ """
+ Check whether *cls* is a class with ``attrs`` attributes.
+
+ :param type cls: Class to introspect.
+ :raise TypeError: If *cls* is not a class.
+
+ :rtype: bool
+ """
+ return getattr(cls, "__attrs_attrs__", None) is not None
+
+
+def assoc(inst, **changes):
+ """
+ Copy *inst* and apply *changes*.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
+ be found on *cls*.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. deprecated:: 17.1.0
+ Use `attrs.evolve` instead if you can.
+ This function will not be removed du to the slightly different approach
+ compared to `attrs.evolve`.
+ """
+ import warnings
+
+ warnings.warn(
+ "assoc is deprecated and will be removed after 2018/01.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ new = copy.copy(inst)
+ attrs = fields(inst.__class__)
+ for k, v in iteritems(changes):
+ a = getattr(attrs, k, NOTHING)
+ if a is NOTHING:
+ raise AttrsAttributeNotFoundError(
+ "{k} is not an attrs attribute on {cl}.".format(
+ k=k, cl=new.__class__
+ )
+ )
+ _obj_setattr(new, k, v)
+ return new
+
+
+def evolve(inst, **changes):
+ """
+ Create a new instance, based on *inst* with *changes* applied.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise TypeError: If *attr_name* couldn't be found in the class
+ ``__init__``.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 17.1.0
+ """
+ cls = inst.__class__
+ attrs = fields(cls)
+ for a in attrs:
+ if not a.init:
+ continue
+ attr_name = a.name # To deal with private attributes.
+ init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
+ if init_name not in changes:
+ changes[init_name] = getattr(inst, attr_name)
+
+ return cls(**changes)
+
+
+def resolve_types(cls, globalns=None, localns=None, attribs=None):
+ """
+ Resolve any strings and forward annotations in type annotations.
+
+ This is only required if you need concrete types in `Attribute`'s *type*
+ field. In other words, you don't need to resolve your types if you only
+ use them for static type checking.
+
+ With no arguments, names will be looked up in the module in which the class
+ was created. If this is not what you want, e.g. if the name only exists
+ inside a method, you may pass *globalns* or *localns* to specify other
+ dictionaries in which to look up these names. See the docs of
+ `typing.get_type_hints` for more details.
+
+ :param type cls: Class to resolve.
+ :param Optional[dict] globalns: Dictionary containing global variables.
+ :param Optional[dict] localns: Dictionary containing local variables.
+ :param Optional[list] attribs: List of attribs for the given class.
+ This is necessary when calling from inside a ``field_transformer``
+ since *cls* is not an ``attrs`` class yet.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class and you didn't pass any attribs.
+ :raise NameError: If types cannot be resolved because of missing variables.
+
+ :returns: *cls* so you can use this function also as a class decorator.
+ Please note that you have to apply it **after** `attrs.define`. That
+ means the decorator has to come in the line **before** `attrs.define`.
+
+ .. versionadded:: 20.1.0
+ .. versionadded:: 21.1.0 *attribs*
+
+ """
+ # Since calling get_type_hints is expensive we cache whether we've
+ # done it already.
+ if getattr(cls, "__attrs_types_resolved__", None) != cls:
+ import typing
+
+ hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
+ for field in fields(cls) if attribs is None else attribs:
+ if field.name in hints:
+ # Since fields have been frozen we must work around it.
+ _obj_setattr(field, "type", hints[field.name])
+ # We store the class we resolved so that subclasses know they haven't
+ # been resolved.
+ cls.__attrs_types_resolved__ = cls
+
+ # Return the class so you can use it as a decorator too.
+ return cls
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_make.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_make.py
new file mode 100644
index 0000000000..d46f8a3e7a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_make.py
@@ -0,0 +1,3173 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import copy
+import inspect
+import linecache
+import sys
+import warnings
+
+from operator import itemgetter
+
+# We need to import _compat itself in addition to the _compat members to avoid
+# having the thread-local in the globals here.
+from . import _compat, _config, setters
+from ._compat import (
+ HAS_F_STRINGS,
+ PY2,
+ PY310,
+ PYPY,
+ isclass,
+ iteritems,
+ metadata_proxy,
+ new_class,
+ ordered_dict,
+ set_closure_cell,
+)
+from .exceptions import (
+ DefaultAlreadySetError,
+ FrozenInstanceError,
+ NotAnAttrsClassError,
+ PythonTooOldError,
+ UnannotatedAttributeError,
+)
+
+
+if not PY2:
+ import typing
+
+
+# This is used at least twice, so cache it here.
+_obj_setattr = object.__setattr__
+_init_converter_pat = "__attr_converter_%s"
+_init_factory_pat = "__attr_factory_{}"
+_tuple_property_pat = (
+ " {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
+)
+_classvar_prefixes = (
+ "typing.ClassVar",
+ "t.ClassVar",
+ "ClassVar",
+ "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_hash_cache_field = "_attrs_cached_hash"
+
+_empty_metadata_singleton = metadata_proxy({})
+
+# Unique object for unequivocal getattr() defaults.
+_sentinel = object()
+
+_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate)
+
+
+class _Nothing(object):
+ """
+ Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
+
+ ``_Nothing`` is a singleton. There is only ever one of it.
+
+ .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
+ """
+
+ _singleton = None
+
+ def __new__(cls):
+ if _Nothing._singleton is None:
+ _Nothing._singleton = super(_Nothing, cls).__new__(cls)
+ return _Nothing._singleton
+
+ def __repr__(self):
+ return "NOTHING"
+
+ def __bool__(self):
+ return False
+
+ def __len__(self):
+ return 0 # __bool__ for Python 2
+
+
+NOTHING = _Nothing()
+"""
+Sentinel to indicate the lack of a value when ``None`` is ambiguous.
+"""
+
+
+class _CacheHashWrapper(int):
+ """
+ An integer subclass that pickles / copies as None
+
+ This is used for non-slots classes with ``cache_hash=True``, to avoid
+ serializing a potentially (even likely) invalid hash value. Since ``None``
+ is the default value for uncalculated hashes, whenever this is copied,
+ the copy's value for the hash should automatically reset.
+
+ See GH #613 for more details.
+ """
+
+ if PY2:
+ # For some reason `type(None)` isn't callable in Python 2, but we don't
+ # actually need a constructor for None objects, we just need any
+ # available function that returns None.
+ def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)):
+ return _none_constructor, _args
+
+ else:
+
+ def __reduce__(self, _none_constructor=type(None), _args=()):
+ return _none_constructor, _args
+
+
+def attrib(
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
+ Create a new attribute on a class.
+
+ .. warning::
+
+ Does *not* do anything unless the class is also decorated with
+ `attr.s`!
+
+ :param default: A value that is used if an ``attrs``-generated ``__init__``
+ is used and no value is passed while instantiating or the attribute is
+ excluded using ``init=False``.
+
+ If the value is an instance of `attrs.Factory`, its callable will be
+ used to construct a new value (useful for mutable data types like lists
+ or dicts).
+
+ If a default is not set (or set manually to `attrs.NOTHING`), a value
+ *must* be supplied when instantiating; otherwise a `TypeError`
+ will be raised.
+
+ The default can also be set using decorator notation as shown below.
+
+ :type default: Any value
+
+ :param callable factory: Syntactic sugar for
+ ``default=attr.Factory(factory)``.
+
+ :param validator: `callable` that is called by ``attrs``-generated
+ ``__init__`` methods after the instance has been initialized. They
+ receive the initialized instance, the :func:`~attrs.Attribute`, and the
+ passed value.
+
+ The return value is *not* inspected so the validator has to throw an
+ exception itself.
+
+ If a `list` is passed, its items are treated as validators and must
+ all pass.
+
+ Validators can be globally disabled and re-enabled using
+ `get_run_validators`.
+
+ The validator can also be set using decorator notation as shown below.
+
+ :type validator: `callable` or a `list` of `callable`\\ s.
+
+ :param repr: Include this attribute in the generated ``__repr__``
+ method. If ``True``, include the attribute; if ``False``, omit it. By
+ default, the built-in ``repr()`` function is used. To override how the
+ attribute value is formatted, pass a ``callable`` that takes a single
+ value and returns a string. Note that the resulting string is used
+ as-is, i.e. it will be used directly *instead* of calling ``repr()``
+ (the default).
+ :type repr: a `bool` or a `callable` to use a custom function.
+
+ :param eq: If ``True`` (default), include this attribute in the
+ generated ``__eq__`` and ``__ne__`` methods that check two instances
+ for equality. To override how the attribute value is compared,
+ pass a ``callable`` that takes a single value and returns the value
+ to be compared.
+ :type eq: a `bool` or a `callable`.
+
+ :param order: If ``True`` (default), include this attributes in the
+ generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods.
+ To override how the attribute value is ordered,
+ pass a ``callable`` that takes a single value and returns the value
+ to be ordered.
+ :type order: a `bool` or a `callable`.
+
+ :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the
+ same value. Must not be mixed with *eq* or *order*.
+ :type cmp: a `bool` or a `callable`.
+
+ :param Optional[bool] hash: Include this attribute in the generated
+ ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This
+ is the correct behavior according the Python spec. Setting this value
+ to anything else than ``None`` is *discouraged*.
+ :param bool init: Include this attribute in the generated ``__init__``
+ method. It is possible to set this to ``False`` and set a default
+ value. In that case this attributed is unconditionally initialized
+ with the specified default value or factory.
+ :param callable converter: `callable` that is called by
+ ``attrs``-generated ``__init__`` methods to convert attribute's value
+ to the desired format. It is given the passed-in value, and the
+ returned value will be used as the new value of the attribute. The
+ value is converted before being passed to the validator, if any.
+ :param metadata: An arbitrary mapping, to be used by third-party
+ components. See `extending_metadata`.
+ :param type: The type of the attribute. In Python 3.6 or greater, the
+ preferred method to specify the type is using a variable annotation
+ (see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_).
+ This argument is provided for backward compatibility.
+ Regardless of the approach used, the type will be stored on
+ ``Attribute.type``.
+
+ Please note that ``attrs`` doesn't do anything with this metadata by
+ itself. You can use it as part of your own code or for
+ `static type checking <types>`.
+ :param kw_only: Make this attribute keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param on_setattr: Allows to overwrite the *on_setattr* setting from
+ `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used.
+ Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this
+ attribute -- regardless of the setting in `attr.s`.
+ :type on_setattr: `callable`, or a list of callables, or `None`, or
+ `attrs.setters.NO_OP`
+
+ .. versionadded:: 15.2.0 *convert*
+ .. versionadded:: 16.3.0 *metadata*
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+ .. versionchanged:: 17.1.0
+ *hash* is ``None`` and therefore mirrors *eq* by default.
+ .. versionadded:: 17.3.0 *type*
+ .. deprecated:: 17.4.0 *convert*
+ .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
+ *convert* to achieve consistency with other noun-based arguments.
+ .. versionadded:: 18.1.0
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionchanged:: 19.2.0 *convert* keyword argument removed.
+ .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ """
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq, order, True
+ )
+
+ if hash is not None and hash is not True and hash is not False:
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+
+ if factory is not None:
+ if default is not NOTHING:
+ raise ValueError(
+ "The `default` and `factory` arguments are mutually "
+ "exclusive."
+ )
+ if not callable(factory):
+ raise ValueError("The `factory` argument must be a callable.")
+ default = Factory(factory)
+
+ if metadata is None:
+ metadata = {}
+
+ # Apply syntactic sugar by auto-wrapping.
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ if validator and isinstance(validator, (list, tuple)):
+ validator = and_(*validator)
+
+ if converter and isinstance(converter, (list, tuple)):
+ converter = pipe(*converter)
+
+ return _CountingAttr(
+ default=default,
+ validator=validator,
+ repr=repr,
+ cmp=None,
+ hash=hash,
+ init=init,
+ converter=converter,
+ metadata=metadata,
+ type=type,
+ kw_only=kw_only,
+ eq=eq,
+ eq_key=eq_key,
+ order=order,
+ order_key=order_key,
+ on_setattr=on_setattr,
+ )
+
+
+def _compile_and_eval(script, globs, locs=None, filename=""):
+ """
+ "Exec" the script with the given global (globs) and local (locs) variables.
+ """
+ bytecode = compile(script, filename, "exec")
+ eval(bytecode, globs, locs)
+
+
+def _make_method(name, script, filename, globs=None):
+ """
+ Create the method with the script given and return the method object.
+ """
+ locs = {}
+ if globs is None:
+ globs = {}
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ count = 1
+ base_filename = filename
+ while True:
+ linecache_tuple = (
+ len(script),
+ None,
+ script.splitlines(True),
+ filename,
+ )
+ old_val = linecache.cache.setdefault(filename, linecache_tuple)
+ if old_val == linecache_tuple:
+ break
+ else:
+ filename = "{}-{}>".format(base_filename[:-1], count)
+ count += 1
+
+ _compile_and_eval(script, globs, locs, filename)
+
+ return locs[name]
+
+
+def _make_attr_tuple_class(cls_name, attr_names):
+ """
+ Create a tuple subclass to hold `Attribute`s for an `attrs` class.
+
+ The subclass is a bare tuple with properties for names.
+
+ class MyClassAttributes(tuple):
+ __slots__ = ()
+ x = property(itemgetter(0))
+ """
+ attr_class_name = "{}Attributes".format(cls_name)
+ attr_class_template = [
+ "class {}(tuple):".format(attr_class_name),
+ " __slots__ = ()",
+ ]
+ if attr_names:
+ for i, attr_name in enumerate(attr_names):
+ attr_class_template.append(
+ _tuple_property_pat.format(index=i, attr_name=attr_name)
+ )
+ else:
+ attr_class_template.append(" pass")
+ globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
+ _compile_and_eval("\n".join(attr_class_template), globs)
+ return globs[attr_class_name]
+
+
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+_Attributes = _make_attr_tuple_class(
+ "_Attributes",
+ [
+ # all attributes to build dunder methods for
+ "attrs",
+ # attributes that have been inherited
+ "base_attrs",
+ # map inherited attributes to their originating classes
+ "base_attrs_map",
+ ],
+)
+
+
+def _is_class_var(annot):
+ """
+ Check whether *annot* is a typing.ClassVar.
+
+ The string comparison hack is used to avoid evaluating all string
+ annotations which would put attrs-based classes at a performance
+ disadvantage compared to plain old classes.
+ """
+ annot = str(annot)
+
+ # Annotation can be quoted.
+ if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+ annot = annot[1:-1]
+
+ return annot.startswith(_classvar_prefixes)
+
+
+def _has_own_attribute(cls, attrib_name):
+ """
+ Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
+
+ Requires Python 3.
+ """
+ attr = getattr(cls, attrib_name, _sentinel)
+ if attr is _sentinel:
+ return False
+
+ for base_cls in cls.__mro__[1:]:
+ a = getattr(base_cls, attrib_name, None)
+ if attr is a:
+ return False
+
+ return True
+
+
+def _get_annotations(cls):
+ """
+ Get annotations for *cls*.
+ """
+ if _has_own_attribute(cls, "__annotations__"):
+ return cls.__annotations__
+
+ return {}
+
+
+def _counter_getter(e):
+ """
+ Key function for sorting to avoid re-creating a lambda for every class.
+ """
+ return e[1].counter
+
+
+def _collect_base_attrs(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in reversed(cls.__mro__[1:-1]):
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.inherited or a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ # For each name, only keep the freshest definition i.e. the furthest at the
+ # back. base_attr_map is fine because it gets overwritten with every new
+ # instance.
+ filtered = []
+ seen = set()
+ for a in reversed(base_attrs):
+ if a.name in seen:
+ continue
+ filtered.insert(0, a)
+ seen.add(a.name)
+
+ return filtered, base_attr_map
+
+
+def _collect_base_attrs_broken(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+
+ N.B. *taken_attr_names* will be mutated.
+
+ Adhere to the old incorrect behavior.
+
+ Notably it collects from the front and considers inherited attributes which
+ leads to the buggy behavior reported in #428.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in cls.__mro__[1:-1]:
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ taken_attr_names.add(a.name)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ return base_attrs, base_attr_map
+
+
+def _transform_attrs(
+ cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
+):
+ """
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
+
+ If *these* is passed, use that and don't look for them on the class.
+
+ *collect_by_mro* is True, collect them in the correct MRO order, otherwise
+ use the old -- incorrect -- order. See #428.
+
+ Return an `_Attributes`.
+ """
+ cd = cls.__dict__
+ anns = _get_annotations(cls)
+
+ if these is not None:
+ ca_list = [(name, ca) for name, ca in iteritems(these)]
+
+ if not isinstance(these, ordered_dict):
+ ca_list.sort(key=_counter_getter)
+ elif auto_attribs is True:
+ ca_names = {
+ name
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ }
+ ca_list = []
+ annot_names = set()
+ for attr_name, type in anns.items():
+ if _is_class_var(type):
+ continue
+ annot_names.add(attr_name)
+ a = cd.get(attr_name, NOTHING)
+
+ if not isinstance(a, _CountingAttr):
+ if a is NOTHING:
+ a = attrib()
+ else:
+ a = attrib(default=a)
+ ca_list.append((attr_name, a))
+
+ unannotated = ca_names - annot_names
+ if len(unannotated) > 0:
+ raise UnannotatedAttributeError(
+ "The following `attr.ib`s lack a type annotation: "
+ + ", ".join(
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
+ )
+ + "."
+ )
+ else:
+ ca_list = sorted(
+ (
+ (name, attr)
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ ),
+ key=lambda e: e[1].counter,
+ )
+
+ own_attrs = [
+ Attribute.from_counting_attr(
+ name=attr_name, ca=ca, type=anns.get(attr_name)
+ )
+ for attr_name, ca in ca_list
+ ]
+
+ if collect_by_mro:
+ base_attrs, base_attr_map = _collect_base_attrs(
+ cls, {a.name for a in own_attrs}
+ )
+ else:
+ base_attrs, base_attr_map = _collect_base_attrs_broken(
+ cls, {a.name for a in own_attrs}
+ )
+
+ if kw_only:
+ own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+ base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+ attrs = base_attrs + own_attrs
+
+ # Mandatory vs non-mandatory attr order only matters when they are part of
+ # the __init__ signature and when they aren't kw_only (which are moved to
+ # the end and can be mandatory or non-mandatory in any order, as they will
+ # be specified as keyword args anyway). Check the order of those attrs:
+ had_default = False
+ for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+ if had_default is True and a.default is NOTHING:
+ raise ValueError(
+ "No mandatory attributes allowed after an attribute with a "
+ "default value or factory. Attribute in question: %r" % (a,)
+ )
+
+ if had_default is False and a.default is not NOTHING:
+ had_default = True
+
+ if field_transformer is not None:
+ attrs = field_transformer(cls, attrs)
+
+ # Create AttrsClass *after* applying the field_transformer since it may
+ # add or remove attributes!
+ attr_names = [a.name for a in attrs]
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+ return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map))
+
+
+if PYPY:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ if isinstance(self, BaseException) and name in (
+ "__cause__",
+ "__context__",
+ ):
+ BaseException.__setattr__(self, name, value)
+ return
+
+ raise FrozenInstanceError()
+
+else:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ raise FrozenInstanceError()
+
+
+def _frozen_delattrs(self, name):
+ """
+ Attached to frozen classes as __delattr__.
+ """
+ raise FrozenInstanceError()
+
+
+class _ClassBuilder(object):
+ """
+ Iteratively build *one* class.
+ """
+
+ __slots__ = (
+ "_attr_names",
+ "_attrs",
+ "_base_attr_map",
+ "_base_names",
+ "_cache_hash",
+ "_cls",
+ "_cls_dict",
+ "_delete_attribs",
+ "_frozen",
+ "_has_pre_init",
+ "_has_post_init",
+ "_is_exc",
+ "_on_setattr",
+ "_slots",
+ "_weakref_slot",
+ "_wrote_own_setattr",
+ "_has_custom_setattr",
+ )
+
+ def __init__(
+ self,
+ cls,
+ these,
+ slots,
+ frozen,
+ weakref_slot,
+ getstate_setstate,
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_custom_setattr,
+ field_transformer,
+ ):
+ attrs, base_attrs, base_map = _transform_attrs(
+ cls,
+ these,
+ auto_attribs,
+ kw_only,
+ collect_by_mro,
+ field_transformer,
+ )
+
+ self._cls = cls
+ self._cls_dict = dict(cls.__dict__) if slots else {}
+ self._attrs = attrs
+ self._base_names = set(a.name for a in base_attrs)
+ self._base_attr_map = base_map
+ self._attr_names = tuple(a.name for a in attrs)
+ self._slots = slots
+ self._frozen = frozen
+ self._weakref_slot = weakref_slot
+ self._cache_hash = cache_hash
+ self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+ self._delete_attribs = not bool(these)
+ self._is_exc = is_exc
+ self._on_setattr = on_setattr
+
+ self._has_custom_setattr = has_custom_setattr
+ self._wrote_own_setattr = False
+
+ self._cls_dict["__attrs_attrs__"] = self._attrs
+
+ if frozen:
+ self._cls_dict["__setattr__"] = _frozen_setattrs
+ self._cls_dict["__delattr__"] = _frozen_delattrs
+
+ self._wrote_own_setattr = True
+ elif on_setattr in (
+ _ng_default_on_setattr,
+ setters.validate,
+ setters.convert,
+ ):
+ has_validator = has_converter = False
+ for a in attrs:
+ if a.validator is not None:
+ has_validator = True
+ if a.converter is not None:
+ has_converter = True
+
+ if has_validator and has_converter:
+ break
+ if (
+ (
+ on_setattr == _ng_default_on_setattr
+ and not (has_validator or has_converter)
+ )
+ or (on_setattr == setters.validate and not has_validator)
+ or (on_setattr == setters.convert and not has_converter)
+ ):
+ # If class-level on_setattr is set to convert + validate, but
+ # there's no field to convert or validate, pretend like there's
+ # no on_setattr.
+ self._on_setattr = None
+
+ if getstate_setstate:
+ (
+ self._cls_dict["__getstate__"],
+ self._cls_dict["__setstate__"],
+ ) = self._make_getstate_setstate()
+
+ def __repr__(self):
+ return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__)
+
+ def build_class(self):
+ """
+ Finalize class based on the accumulated configuration.
+
+ Builder cannot be used after calling this method.
+ """
+ if self._slots is True:
+ return self._create_slots_class()
+ else:
+ return self._patch_original_class()
+
+ def _patch_original_class(self):
+ """
+ Apply accumulated methods and return the class.
+ """
+ cls = self._cls
+ base_names = self._base_names
+
+ # Clean class of attribute definitions (`attr.ib()`s).
+ if self._delete_attribs:
+ for name in self._attr_names:
+ if (
+ name not in base_names
+ and getattr(cls, name, _sentinel) is not _sentinel
+ ):
+ try:
+ delattr(cls, name)
+ except AttributeError:
+ # This can happen if a base class defines a class
+ # variable and we want to set an attribute with the
+ # same name by using only a type annotation.
+ pass
+
+ # Attach our dunder methods.
+ for name, value in self._cls_dict.items():
+ setattr(cls, name, value)
+
+ # If we've inherited an attrs __setattr__ and don't write our own,
+ # reset it to object's.
+ if not self._wrote_own_setattr and getattr(
+ cls, "__attrs_own_setattr__", False
+ ):
+ cls.__attrs_own_setattr__ = False
+
+ if not self._has_custom_setattr:
+ cls.__setattr__ = object.__setattr__
+
+ return cls
+
+ def _create_slots_class(self):
+ """
+ Build and return a new class with a `__slots__` attribute.
+ """
+ cd = {
+ k: v
+ for k, v in iteritems(self._cls_dict)
+ if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
+ }
+
+ # If our class doesn't have its own implementation of __setattr__
+ # (either from the user or by us), check the bases, if one of them has
+ # an attrs-made __setattr__, that needs to be reset. We don't walk the
+ # MRO because we only care about our immediate base classes.
+ # XXX: This can be confused by subclassing a slotted attrs class with
+ # XXX: a non-attrs class and subclass the resulting class with an attrs
+ # XXX: class. See `test_slotted_confused` for details. For now that's
+ # XXX: OK with us.
+ if not self._wrote_own_setattr:
+ cd["__attrs_own_setattr__"] = False
+
+ if not self._has_custom_setattr:
+ for base_cls in self._cls.__bases__:
+ if base_cls.__dict__.get("__attrs_own_setattr__", False):
+ cd["__setattr__"] = object.__setattr__
+ break
+
+ # Traverse the MRO to collect existing slots
+ # and check for an existing __weakref__.
+ existing_slots = dict()
+ weakref_inherited = False
+ for base_cls in self._cls.__mro__[1:-1]:
+ if base_cls.__dict__.get("__weakref__", None) is not None:
+ weakref_inherited = True
+ existing_slots.update(
+ {
+ name: getattr(base_cls, name)
+ for name in getattr(base_cls, "__slots__", [])
+ }
+ )
+
+ base_names = set(self._base_names)
+
+ names = self._attr_names
+ if (
+ self._weakref_slot
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
+ and "__weakref__" not in names
+ and not weakref_inherited
+ ):
+ names += ("__weakref__",)
+
+ # We only add the names of attributes that aren't inherited.
+ # Setting __slots__ to inherited attributes wastes memory.
+ slot_names = [name for name in names if name not in base_names]
+ # There are slots for attributes from current class
+ # that are defined in parent classes.
+ # As their descriptors may be overriden by a child class,
+ # we collect them here and update the class dict
+ reused_slots = {
+ slot: slot_descriptor
+ for slot, slot_descriptor in iteritems(existing_slots)
+ if slot in slot_names
+ }
+ slot_names = [name for name in slot_names if name not in reused_slots]
+ cd.update(reused_slots)
+ if self._cache_hash:
+ slot_names.append(_hash_cache_field)
+ cd["__slots__"] = tuple(slot_names)
+
+ qualname = getattr(self._cls, "__qualname__", None)
+ if qualname is not None:
+ cd["__qualname__"] = qualname
+
+ # Create new class based on old class and our methods.
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+ # The following is a fix for
+ # <https://github.com/python-attrs/attrs/issues/102>. On Python 3,
+ # if a method mentions `__class__` or uses the no-arg super(), the
+ # compiler will bake a reference to the class in the method itself
+ # as `method.__closure__`. Since we replace the class with a
+ # clone, we rewrite these references so it keeps working.
+ for item in cls.__dict__.values():
+ if isinstance(item, (classmethod, staticmethod)):
+ # Class- and staticmethods hide their functions inside.
+ # These might need to be rewritten as well.
+ closure_cells = getattr(item.__func__, "__closure__", None)
+ elif isinstance(item, property):
+ # Workaround for property `super()` shortcut (PY3-only).
+ # There is no universal way for other descriptors.
+ closure_cells = getattr(item.fget, "__closure__", None)
+ else:
+ closure_cells = getattr(item, "__closure__", None)
+
+ if not closure_cells: # Catch None or the empty list.
+ continue
+ for cell in closure_cells:
+ try:
+ match = cell.cell_contents is self._cls
+ except ValueError: # ValueError: Cell is empty
+ pass
+ else:
+ if match:
+ set_closure_cell(cell, cls)
+
+ return cls
+
+ def add_repr(self, ns):
+ self._cls_dict["__repr__"] = self._add_method_dunders(
+ _make_repr(self._attrs, ns, self._cls)
+ )
+ return self
+
+ def add_str(self):
+ repr = self._cls_dict.get("__repr__")
+ if repr is None:
+ raise ValueError(
+ "__str__ can only be generated if a __repr__ exists."
+ )
+
+ def __str__(self):
+ return self.__repr__()
+
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+ return self
+
+ def _make_getstate_setstate(self):
+ """
+ Create custom __setstate__ and __getstate__ methods.
+ """
+ # __weakref__ is not writable.
+ state_attr_names = tuple(
+ an for an in self._attr_names if an != "__weakref__"
+ )
+
+ def slots_getstate(self):
+ """
+ Automatically created by attrs.
+ """
+ return tuple(getattr(self, name) for name in state_attr_names)
+
+ hash_caching_enabled = self._cache_hash
+
+ def slots_setstate(self, state):
+ """
+ Automatically created by attrs.
+ """
+ __bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in zip(state_attr_names, state):
+ __bound_setattr(name, value)
+
+ # The hash code cache is not included when the object is
+ # serialized, but it still needs to be initialized to None to
+ # indicate that the first call to __hash__ should be a cache
+ # miss.
+ if hash_caching_enabled:
+ __bound_setattr(_hash_cache_field, None)
+
+ return slots_getstate, slots_setstate
+
+ def make_unhashable(self):
+ self._cls_dict["__hash__"] = None
+ return self
+
+ def add_hash(self):
+ self._cls_dict["__hash__"] = self._add_method_dunders(
+ _make_hash(
+ self._cls,
+ self._attrs,
+ frozen=self._frozen,
+ cache_hash=self._cache_hash,
+ )
+ )
+
+ return self
+
+ def add_init(self):
+ self._cls_dict["__init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=False,
+ )
+ )
+
+ return self
+
+ def add_match_args(self):
+ self._cls_dict["__match_args__"] = tuple(
+ field.name
+ for field in self._attrs
+ if field.init and not field.kw_only
+ )
+
+ def add_attrs_init(self):
+ self._cls_dict["__attrs_init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=True,
+ )
+ )
+
+ return self
+
+ def add_eq(self):
+ cd = self._cls_dict
+
+ cd["__eq__"] = self._add_method_dunders(
+ _make_eq(self._cls, self._attrs)
+ )
+ cd["__ne__"] = self._add_method_dunders(_make_ne())
+
+ return self
+
+ def add_order(self):
+ cd = self._cls_dict
+
+ cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+ self._add_method_dunders(meth)
+ for meth in _make_order(self._cls, self._attrs)
+ )
+
+ return self
+
+ def add_setattr(self):
+ if self._frozen:
+ return self
+
+ sa_attrs = {}
+ for a in self._attrs:
+ on_setattr = a.on_setattr or self._on_setattr
+ if on_setattr and on_setattr is not setters.NO_OP:
+ sa_attrs[a.name] = a, on_setattr
+
+ if not sa_attrs:
+ return self
+
+ if self._has_custom_setattr:
+ # We need to write a __setattr__ but there already is one!
+ raise ValueError(
+ "Can't combine custom __setattr__ with on_setattr hooks."
+ )
+
+ # docstring comes from _add_method_dunders
+ def __setattr__(self, name, val):
+ try:
+ a, hook = sa_attrs[name]
+ except KeyError:
+ nval = val
+ else:
+ nval = hook(self, a, val)
+
+ _obj_setattr(self, name, nval)
+
+ self._cls_dict["__attrs_own_setattr__"] = True
+ self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+ self._wrote_own_setattr = True
+
+ return self
+
+ def _add_method_dunders(self, method):
+ """
+ Add __module__ and __qualname__ to a *method* if possible.
+ """
+ try:
+ method.__module__ = self._cls.__module__
+ except AttributeError:
+ pass
+
+ try:
+ method.__qualname__ = ".".join(
+ (self._cls.__qualname__, method.__name__)
+ )
+ except AttributeError:
+ pass
+
+ try:
+ method.__doc__ = "Method generated by attrs for class %s." % (
+ self._cls.__qualname__,
+ )
+ except AttributeError:
+ pass
+
+ return method
+
+
+_CMP_DEPRECATION = (
+ "The usage of `cmp` is deprecated and will be removed on or after "
+ "2021-06-01. Please use `eq` and `order` instead."
+)
+
+
+def _determine_attrs_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ return cmp, cmp
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq = default_eq
+
+ if order is None:
+ order = eq
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, order
+
+
+def _determine_attrib_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ def decide_callable_or_boolean(value):
+ """
+ Decide whether a key function is used.
+ """
+ if callable(value):
+ value, key = True, value
+ else:
+ key = None
+ return value, key
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ cmp, cmp_key = decide_callable_or_boolean(cmp)
+ return cmp, cmp_key, cmp, cmp_key
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq, eq_key = default_eq, None
+ else:
+ eq, eq_key = decide_callable_or_boolean(eq)
+
+ if order is None:
+ order, order_key = eq, eq_key
+ else:
+ order, order_key = decide_callable_or_boolean(order)
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+ cls, flag, auto_detect, dunders, default=True
+):
+ """
+ Check whether we should implement a set of methods for *cls*.
+
+ *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
+ same as passed into @attr.s and *dunders* is a tuple of attribute names
+ whose presence signal that the user has implemented it themselves.
+
+ Return *default* if no reason for either for or against is found.
+
+ auto_detect must be False on Python 2.
+ """
+ if flag is True or flag is False:
+ return flag
+
+ if flag is None and auto_detect is False:
+ return default
+
+ # Logically, flag is None and auto_detect is True here.
+ for dunder in dunders:
+ if _has_own_attribute(cls, dunder):
+ return False
+
+ return default
+
+
+def attrs(
+ maybe_cls=None,
+ these=None,
+ repr_ns=None,
+ repr=None,
+ cmp=None,
+ hash=None,
+ init=None,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=False,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=False,
+ eq=None,
+ order=None,
+ auto_detect=False,
+ collect_by_mro=False,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+):
+ r"""
+ A class decorator that adds `dunder
+ <https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
+ specified attributes using `attr.ib` or the *these* argument.
+
+ :param these: A dictionary of name to `attr.ib` mappings. This is
+ useful to avoid the definition of your attributes within the class body
+ because you can't (e.g. if you want to add ``__repr__`` methods to
+ Django models) or don't want to.
+
+ If *these* is not ``None``, ``attrs`` will *not* search the class body
+ for attributes and will *not* remove any attributes from it.
+
+ If *these* is an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the attributes inside *these*. Otherwise the order
+ of the definition of the attributes is used.
+
+ :type these: `dict` of `str` to `attr.ib`
+
+ :param str repr_ns: When using nested classes, there's no way in Python 2
+ to automatically detect that. Therefore it's possible to set the
+ namespace explicitly for a more meaningful ``repr`` output.
+ :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*,
+ *order*, and *hash* arguments explicitly, assume they are set to
+ ``True`` **unless any** of the involved methods for one of the
+ arguments is implemented in the *current* class (i.e. it is *not*
+ inherited from some base class).
+
+ So for example by implementing ``__eq__`` on a class yourself,
+ ``attrs`` will deduce ``eq=False`` and will create *neither*
+ ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible
+ ``__ne__`` by default, so it *should* be enough to only implement
+ ``__eq__`` in most cases).
+
+ .. warning::
+
+ If you prevent ``attrs`` from creating the ordering methods for you
+ (``order=False``, e.g. by implementing ``__le__``), it becomes
+ *your* responsibility to make sure its ordering is sound. The best
+ way is to use the `functools.total_ordering` decorator.
+
+
+ Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*,
+ *cmp*, or *hash* overrides whatever *auto_detect* would determine.
+
+ *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises
+ an `attrs.exceptions.PythonTooOldError`.
+
+ :param bool repr: Create a ``__repr__`` method with a human readable
+ representation of ``attrs`` attributes..
+ :param bool str: Create a ``__str__`` method that is identical to
+ ``__repr__``. This is usually not necessary except for
+ `Exception`\ s.
+ :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__``
+ and ``__ne__`` methods that check two instances for equality.
+
+ They compare the instances as if they were tuples of their ``attrs``
+ attributes if and only if the types of both classes are *identical*!
+ :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``,
+ ``__gt__``, and ``__ge__`` methods that behave like *eq* above and
+ allow instances to be ordered. If ``None`` (default) mirror value of
+ *eq*.
+ :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq*
+ and *order* to the same value. Must not be mixed with *eq* or *order*.
+ :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method
+ is generated according how *eq* and *frozen* are set.
+
+ 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
+ 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to
+ None, marking it unhashable (which it is).
+ 3. If *eq* is False, ``__hash__`` will be left untouched meaning the
+ ``__hash__`` method of the base class will be used (if base class is
+ ``object``, this means it will fall back to id-based hashing.).
+
+ Although not recommended, you can decide for yourself and force
+ ``attrs`` to create one (e.g. if the class is immutable even though you
+ didn't freeze it programmatically) by passing ``True`` or not. Both of
+ these cases are rather special and should be used carefully.
+
+ See our documentation on `hashing`, Python's documentation on
+ `object.__hash__`, and the `GitHub issue that led to the default \
+ behavior <https://github.com/python-attrs/attrs/issues/136>`_ for more
+ details.
+ :param bool init: Create a ``__init__`` method that initializes the
+ ``attrs`` attributes. Leading underscores are stripped for the argument
+ name. If a ``__attrs_pre_init__`` method exists on the class, it will
+ be called before the class is initialized. If a ``__attrs_post_init__``
+ method exists on the class, it will be called after the class is fully
+ initialized.
+
+ If ``init`` is ``False``, an ``__attrs_init__`` method will be
+ injected instead. This allows you to define a custom ``__init__``
+ method that can do pre-init work such as ``super().__init__()``,
+ and then call ``__attrs_init__()`` and ``__attrs_post_init__()``.
+ :param bool slots: Create a `slotted class <slotted classes>` that's more
+ memory-efficient. Slotted classes are generally superior to the default
+ dict classes, but have some gotchas you should know about, so we
+ encourage you to read the `glossary entry <slotted classes>`.
+ :param bool frozen: Make instances immutable after initialization. If
+ someone attempts to modify a frozen instance,
+ `attr.exceptions.FrozenInstanceError` is raised.
+
+ .. note::
+
+ 1. This is achieved by installing a custom ``__setattr__`` method
+ on your class, so you can't implement your own.
+
+ 2. True immutability is impossible in Python.
+
+ 3. This *does* have a minor a runtime performance `impact
+ <how-frozen>` when initializing new instances. In other words:
+ ``__init__`` is slightly slower with ``frozen=True``.
+
+ 4. If a class is frozen, you cannot modify ``self`` in
+ ``__attrs_post_init__`` or a self-written ``__init__``. You can
+ circumvent that limitation by using
+ ``object.__setattr__(self, "attribute_name", value)``.
+
+ 5. Subclasses of a frozen class are frozen too.
+
+ :param bool weakref_slot: Make instances weak-referenceable. This has no
+ effect unless ``slots`` is also enabled.
+ :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated
+ attributes (Python 3.6 and later only) from the class body.
+
+ In this case, you **must** annotate every field. If ``attrs``
+ encounters a field that is set to an `attr.ib` but lacks a type
+ annotation, an `attr.exceptions.UnannotatedAttributeError` is
+ raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't
+ want to set a type.
+
+ If you assign a value to those attributes (e.g. ``x: int = 42``), that
+ value becomes the default value like if it were passed using
+ ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also
+ works as expected in most cases (see warning below).
+
+ Attributes annotated as `typing.ClassVar`, and attributes that are
+ neither annotated nor set to an `attr.ib` are **ignored**.
+
+ .. warning::
+ For features that use the attribute name to create decorators (e.g.
+ `validators <validators>`), you still *must* assign `attr.ib` to
+ them. Otherwise Python will either not find the name or try to use
+ the default value to call e.g. ``validator`` on it.
+
+ These errors can be quite confusing and probably the most common bug
+ report on our bug tracker.
+
+ .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/
+ :param bool kw_only: Make all attributes keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param bool cache_hash: Ensure that the object's hash code is computed
+ only once and stored on the object. If this is set to ``True``,
+ hashing must be either explicitly or implicitly enabled for this
+ class. If the hash code is cached, avoid any reassignments of
+ fields involved in hash code computation or mutations of the objects
+ those fields point to after object creation. If such changes occur,
+ the behavior of the object's hash code is undefined.
+ :param bool auto_exc: If the class subclasses `BaseException`
+ (which implicitly includes any subclass of any exception), the
+ following happens to behave like a well-behaved Python exceptions
+ class:
+
+ - the values for *eq*, *order*, and *hash* are ignored and the
+ instances compare and hash by the instance's ids (N.B. ``attrs`` will
+ *not* remove existing implementations of ``__hash__`` or the equality
+ methods. It just won't add own ones.),
+ - all attributes that are either passed into ``__init__`` or have a
+ default value are additionally available as a tuple in the ``args``
+ attribute,
+ - the value of *str* is ignored leaving ``__str__`` to base classes.
+ :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs``
+ collects attributes from base classes. The default behavior is
+ incorrect in certain cases of multiple inheritance. It should be on by
+ default but is kept off for backward-compatibility.
+
+ See issue `#428 <https://github.com/python-attrs/attrs/issues/428>`_ for
+ more details.
+
+ :param Optional[bool] getstate_setstate:
+ .. note::
+ This is usually only interesting for slotted classes and you should
+ probably just set *auto_detect* to `True`.
+
+ If `True`, ``__getstate__`` and
+ ``__setstate__`` are generated and attached to the class. This is
+ necessary for slotted classes to be pickleable. If left `None`, it's
+ `True` by default for slotted classes and ``False`` for dict classes.
+
+ If *auto_detect* is `True`, and *getstate_setstate* is left `None`,
+ and **either** ``__getstate__`` or ``__setstate__`` is detected directly
+ on the class (i.e. not inherited), it is set to `False` (this is usually
+ what you want).
+
+ :param on_setattr: A callable that is run whenever the user attempts to set
+ an attribute (either by assignment like ``i.x = 42`` or by using
+ `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments
+ as validators: the instance, the attribute that is being modified, and
+ the new value.
+
+ If no exception is raised, the attribute is set to the return value of
+ the callable.
+
+ If a list of callables is passed, they're automatically wrapped in an
+ `attrs.setters.pipe`.
+
+ :param Optional[callable] field_transformer:
+ A function that is called with the original class object and all
+ fields right before ``attrs`` finalizes the class. You can use
+ this, e.g., to automatically add converters or validators to
+ fields based on their types. See `transform-fields` for more details.
+
+ :param bool match_args:
+ If `True` (default), set ``__match_args__`` on the class to support
+ `PEP 634 <https://www.python.org/dev/peps/pep-0634/>`_ (Structural
+ Pattern Matching). It is a tuple of all positional-only ``__init__``
+ parameter names on Python 3.10 and later. Ignored on older Python
+ versions.
+
+ .. versionadded:: 16.0.0 *slots*
+ .. versionadded:: 16.1.0 *frozen*
+ .. versionadded:: 16.3.0 *str*
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+ .. versionchanged:: 17.1.0
+ *hash* supports ``None`` as value which is also the default now.
+ .. versionadded:: 17.3.0 *auto_attribs*
+ .. versionchanged:: 18.1.0
+ If *these* is passed, no attributes are deleted from the class body.
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+ .. versionadded:: 18.2.0 *weakref_slot*
+ .. deprecated:: 18.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+ `DeprecationWarning` if the classes compared are subclasses of
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+ to each other.
+ .. versionchanged:: 19.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+ subclasses comparable anymore.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *cache_hash*
+ .. versionadded:: 19.1.0 *auto_exc*
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *auto_detect*
+ .. versionadded:: 20.1.0 *collect_by_mro*
+ .. versionadded:: 20.1.0 *getstate_setstate*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionadded:: 20.3.0 *field_transformer*
+ .. versionchanged:: 21.1.0
+ ``init=False`` injects ``__attrs_init__``
+ .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ .. versionadded:: 21.3.0 *match_args*
+ """
+ if auto_detect and PY2:
+ raise PythonTooOldError(
+ "auto_detect only works on Python 3 and later."
+ )
+
+ eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+ hash_ = hash # work around the lack of nonlocal
+
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ def wrap(cls):
+
+ if getattr(cls, "__class__", None) is None:
+ raise TypeError("attrs only works with new-style classes.")
+
+ is_frozen = frozen or _has_frozen_base_class(cls)
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
+ has_own_setattr = auto_detect and _has_own_attribute(
+ cls, "__setattr__"
+ )
+
+ if has_own_setattr and is_frozen:
+ raise ValueError("Can't freeze a class with a custom __setattr__.")
+
+ builder = _ClassBuilder(
+ cls,
+ these,
+ slots,
+ is_frozen,
+ weakref_slot,
+ _determine_whether_to_implement(
+ cls,
+ getstate_setstate,
+ auto_detect,
+ ("__getstate__", "__setstate__"),
+ default=slots,
+ ),
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_own_setattr,
+ field_transformer,
+ )
+ if _determine_whether_to_implement(
+ cls, repr, auto_detect, ("__repr__",)
+ ):
+ builder.add_repr(repr_ns)
+ if str is True:
+ builder.add_str()
+
+ eq = _determine_whether_to_implement(
+ cls, eq_, auto_detect, ("__eq__", "__ne__")
+ )
+ if not is_exc and eq is True:
+ builder.add_eq()
+ if not is_exc and _determine_whether_to_implement(
+ cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
+ ):
+ builder.add_order()
+
+ builder.add_setattr()
+
+ if (
+ hash_ is None
+ and auto_detect is True
+ and _has_own_attribute(cls, "__hash__")
+ ):
+ hash = False
+ else:
+ hash = hash_
+ if hash is not True and hash is not False and hash is not None:
+ # Can't use `hash in` because 1 == True for example.
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+ elif hash is False or (hash is None and eq is False) or is_exc:
+ # Don't do anything. Should fall back to __object__'s __hash__
+ # which is by id.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ elif hash is True or (
+ hash is None and eq is True and is_frozen is True
+ ):
+ # Build a __hash__ if told so, or if it's safe.
+ builder.add_hash()
+ else:
+ # Raise TypeError on attempts to hash.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ builder.make_unhashable()
+
+ if _determine_whether_to_implement(
+ cls, init, auto_detect, ("__init__",)
+ ):
+ builder.add_init()
+ else:
+ builder.add_attrs_init()
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " init must be True."
+ )
+
+ if (
+ PY310
+ and match_args
+ and not _has_own_attribute(cls, "__match_args__")
+ ):
+ builder.add_match_args()
+
+ return builder.build_class()
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+if PY2:
+
+ def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return (
+ getattr(cls.__setattr__, "__module__", None)
+ == _frozen_setattrs.__module__
+ and cls.__setattr__.__name__ == _frozen_setattrs.__name__
+ )
+
+else:
+
+ def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return cls.__setattr__ == _frozen_setattrs
+
+
+def _generate_unique_filename(cls, func_name):
+ """
+ Create a "filename" suitable for a function being generated.
+ """
+ unique_filename = "<attrs generated {0} {1}.{2}>".format(
+ func_name,
+ cls.__module__,
+ getattr(cls, "__qualname__", cls.__name__),
+ )
+ return unique_filename
+
+
+def _make_hash(cls, attrs, frozen, cache_hash):
+ attrs = tuple(
+ a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+ )
+
+ tab = " "
+
+ unique_filename = _generate_unique_filename(cls, "hash")
+ type_hash = hash(unique_filename)
+
+ hash_def = "def __hash__(self"
+ hash_func = "hash(("
+ closing_braces = "))"
+ if not cache_hash:
+ hash_def += "):"
+ else:
+ if not PY2:
+ hash_def += ", *"
+
+ hash_def += (
+ ", _cache_wrapper="
+ + "__import__('attr._make')._make._CacheHashWrapper):"
+ )
+ hash_func = "_cache_wrapper(" + hash_func
+ closing_braces += ")"
+
+ method_lines = [hash_def]
+
+ def append_hash_computation_lines(prefix, indent):
+ """
+ Generate the code for actually computing the hash code.
+ Below this will either be returned directly or used to compute
+ a value which is then cached, depending on the value of cache_hash
+ """
+
+ method_lines.extend(
+ [
+ indent + prefix + hash_func,
+ indent + " %d," % (type_hash,),
+ ]
+ )
+
+ for a in attrs:
+ method_lines.append(indent + " self.%s," % a.name)
+
+ method_lines.append(indent + " " + closing_braces)
+
+ if cache_hash:
+ method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
+ if frozen:
+ append_hash_computation_lines(
+ "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab * 2 + ")") # close __setattr__
+ else:
+ append_hash_computation_lines(
+ "self.%s = " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab + "return self.%s" % _hash_cache_field)
+ else:
+ append_hash_computation_lines("return ", tab)
+
+ script = "\n".join(method_lines)
+ return _make_method("__hash__", script, unique_filename)
+
+
+def _add_hash(cls, attrs):
+ """
+ Add a hash method to *cls*.
+ """
+ cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
+ return cls
+
+
+def _make_ne():
+ """
+ Create __ne__ method.
+ """
+
+ def __ne__(self, other):
+ """
+ Check equality and either forward a NotImplemented or
+ return the result negated.
+ """
+ result = self.__eq__(other)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return not result
+
+ return __ne__
+
+
+def _make_eq(cls, attrs):
+ """
+ Create __eq__ method for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.eq]
+
+ unique_filename = _generate_unique_filename(cls, "eq")
+ lines = [
+ "def __eq__(self, other):",
+ " if other.__class__ is not self.__class__:",
+ " return NotImplemented",
+ ]
+
+ # We can't just do a big self.x = other.x and... clause due to
+ # irregularities like nan == nan is false but (nan,) == (nan,) is true.
+ globs = {}
+ if attrs:
+ lines.append(" return (")
+ others = [" ) == ("]
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = "_%s_key" % (a.name,)
+ # Add the key function to the global namespace
+ # of the evaluated function.
+ globs[cmp_name] = a.eq_key
+ lines.append(
+ " %s(self.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ others.append(
+ " %s(other.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ else:
+ lines.append(" self.%s," % (a.name,))
+ others.append(" other.%s," % (a.name,))
+
+ lines += others + [" )"]
+ else:
+ lines.append(" return True")
+
+ script = "\n".join(lines)
+
+ return _make_method("__eq__", script, unique_filename, globs)
+
+
+def _make_order(cls, attrs):
+ """
+ Create ordering methods for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.order]
+
+ def attrs_to_tuple(obj):
+ """
+ Save us some typing.
+ """
+ return tuple(
+ key(value) if key else value
+ for value, key in (
+ (getattr(obj, a.name), a.order_key) for a in attrs
+ )
+ )
+
+ def __lt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __le__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __gt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __ge__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) >= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ return __lt__, __le__, __gt__, __ge__
+
+
+def _add_eq(cls, attrs=None):
+ """
+ Add equality methods to *cls* with *attrs*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__eq__ = _make_eq(cls, attrs)
+ cls.__ne__ = _make_ne()
+
+ return cls
+
+
+if HAS_F_STRINGS:
+
+ def _make_repr(attrs, ns, cls):
+ unique_filename = _generate_unique_filename(cls, "repr")
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom
+ # callable.
+ attr_names_with_reprs = tuple(
+ (a.name, (repr if a.repr is True else a.repr), a.init)
+ for a in attrs
+ if a.repr is not False
+ )
+ globs = {
+ name + "_repr": r
+ for name, r, _ in attr_names_with_reprs
+ if r != repr
+ }
+ globs["_compat"] = _compat
+ globs["AttributeError"] = AttributeError
+ globs["NOTHING"] = NOTHING
+ attribute_fragments = []
+ for name, r, i in attr_names_with_reprs:
+ accessor = (
+ "self." + name
+ if i
+ else 'getattr(self, "' + name + '", NOTHING)'
+ )
+ fragment = (
+ "%s={%s!r}" % (name, accessor)
+ if r == repr
+ else "%s={%s_repr(%s)}" % (name, name, accessor)
+ )
+ attribute_fragments.append(fragment)
+ repr_fragment = ", ".join(attribute_fragments)
+
+ if ns is None:
+ cls_name_fragment = (
+ '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
+ )
+ else:
+ cls_name_fragment = ns + ".{self.__class__.__name__}"
+
+ lines = [
+ "def __repr__(self):",
+ " try:",
+ " already_repring = _compat.repr_context.already_repring",
+ " except AttributeError:",
+ " already_repring = {id(self),}",
+ " _compat.repr_context.already_repring = already_repring",
+ " else:",
+ " if id(self) in already_repring:",
+ " return '...'",
+ " else:",
+ " already_repring.add(id(self))",
+ " try:",
+ " return f'%s(%s)'" % (cls_name_fragment, repr_fragment),
+ " finally:",
+ " already_repring.remove(id(self))",
+ ]
+
+ return _make_method(
+ "__repr__", "\n".join(lines), unique_filename, globs=globs
+ )
+
+else:
+
+ def _make_repr(attrs, ns, _):
+ """
+ Make a repr method that includes relevant *attrs*, adding *ns* to the
+ full name.
+ """
+
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom
+ # callable.
+ attr_names_with_reprs = tuple(
+ (a.name, repr if a.repr is True else a.repr)
+ for a in attrs
+ if a.repr is not False
+ )
+
+ def __repr__(self):
+ """
+ Automatically created by attrs.
+ """
+ try:
+ already_repring = _compat.repr_context.already_repring
+ except AttributeError:
+ already_repring = set()
+ _compat.repr_context.already_repring = already_repring
+
+ if id(self) in already_repring:
+ return "..."
+ real_cls = self.__class__
+ if ns is None:
+ qualname = getattr(real_cls, "__qualname__", None)
+ if qualname is not None: # pragma: no cover
+ # This case only happens on Python 3.5 and 3.6. We exclude
+ # it from coverage, because we don't want to slow down our
+ # test suite by running them under coverage too for this
+ # one line.
+ class_name = qualname.rsplit(">.", 1)[-1]
+ else:
+ class_name = real_cls.__name__
+ else:
+ class_name = ns + "." + real_cls.__name__
+
+ # Since 'self' remains on the stack (i.e.: strongly referenced)
+ # for the duration of this call, it's safe to depend on id(...)
+ # stability, and not need to track the instance and therefore
+ # worry about properties like weakref- or hash-ability.
+ already_repring.add(id(self))
+ try:
+ result = [class_name, "("]
+ first = True
+ for name, attr_repr in attr_names_with_reprs:
+ if first:
+ first = False
+ else:
+ result.append(", ")
+ result.extend(
+ (name, "=", attr_repr(getattr(self, name, NOTHING)))
+ )
+ return "".join(result) + ")"
+ finally:
+ already_repring.remove(id(self))
+
+ return __repr__
+
+
+def _add_repr(cls, ns=None, attrs=None):
+ """
+ Add a repr method to *cls*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__repr__ = _make_repr(attrs, ns, cls)
+ return cls
+
+
+def fields(cls):
+ """
+ Return the tuple of ``attrs`` attributes for a class.
+
+ The tuple also allows accessing the fields by their names (see below for
+ examples).
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: tuple (with name accessors) of `attrs.Attribute`
+
+ .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
+ by name.
+ """
+ if not isclass(cls):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return attrs
+
+
+def fields_dict(cls):
+ """
+ Return an ordered dictionary of ``attrs`` attributes for a class, whose
+ keys are the attribute names.
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: an ordered dict where keys are attribute names and values are
+ `attrs.Attribute`\\ s. This will be a `dict` if it's
+ naturally ordered like on Python 3.6+ or an
+ :class:`~collections.OrderedDict` otherwise.
+
+ .. versionadded:: 18.1.0
+ """
+ if not isclass(cls):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return ordered_dict(((a.name, a) for a in attrs))
+
+
+def validate(inst):
+ """
+ Validate all attributes on *inst* that have a validator.
+
+ Leaves all exceptions through.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ """
+ if _config._run_validators is False:
+ return
+
+ for a in fields(inst.__class__):
+ v = a.validator
+ if v is not None:
+ v(inst, a, getattr(inst, a.name))
+
+
+def _is_slot_cls(cls):
+ return "__slots__" in cls.__dict__
+
+
+def _is_slot_attr(a_name, base_attr_map):
+ """
+ Check if the attribute name comes from a slot class.
+ """
+ return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name])
+
+
+def _make_init(
+ cls,
+ attrs,
+ pre_init,
+ post_init,
+ frozen,
+ slots,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ cls_on_setattr,
+ attrs_init,
+):
+ has_cls_on_setattr = (
+ cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
+ )
+
+ if frozen and has_cls_on_setattr:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = cache_hash or frozen
+ filtered_attrs = []
+ attr_dict = {}
+ for a in attrs:
+ if not a.init and a.default is NOTHING:
+ continue
+
+ filtered_attrs.append(a)
+ attr_dict[a.name] = a
+
+ if a.on_setattr is not None:
+ if frozen is True:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = True
+ elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
+ needs_cached_setattr = True
+
+ unique_filename = _generate_unique_filename(cls, "init")
+
+ script, globs, annotations = _attrs_to_init_script(
+ filtered_attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ needs_cached_setattr,
+ has_cls_on_setattr,
+ attrs_init,
+ )
+ if cls.__module__ in sys.modules:
+ # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+ globs.update(sys.modules[cls.__module__].__dict__)
+
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+ if needs_cached_setattr:
+ # Save the lookup overhead in __init__ if we need to circumvent
+ # setattr hooks.
+ globs["_cached_setattr"] = _obj_setattr
+
+ init = _make_method(
+ "__attrs_init__" if attrs_init else "__init__",
+ script,
+ unique_filename,
+ globs,
+ )
+ init.__annotations__ = annotations
+
+ return init
+
+
+def _setattr(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*.
+ """
+ return "_setattr('%s', %s)" % (attr_name, value_var)
+
+
+def _setattr_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*, but run
+ its converter first.
+ """
+ return "_setattr('%s', %s(%s))" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+def _assign(attr_name, value, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
+ relegate to _setattr.
+ """
+ if has_on_setattr:
+ return _setattr(attr_name, value, True)
+
+ return "self.%s = %s" % (attr_name, value)
+
+
+def _assign_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment after
+ conversion. Otherwise relegate to _setattr_with_converter.
+ """
+ if has_on_setattr:
+ return _setattr_with_converter(attr_name, value_var, True)
+
+ return "self.%s = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+if PY2:
+
+ def _unpack_kw_only_py2(attr_name, default=None):
+ """
+ Unpack *attr_name* from _kw_only dict.
+ """
+ if default is not None:
+ arg_default = ", %s" % default
+ else:
+ arg_default = ""
+ return "%s = _kw_only.pop('%s'%s)" % (
+ attr_name,
+ attr_name,
+ arg_default,
+ )
+
+ def _unpack_kw_only_lines_py2(kw_only_args):
+ """
+ Unpack all *kw_only_args* from _kw_only dict and handle errors.
+
+ Given a list of strings "{attr_name}" and "{attr_name}={default}"
+ generates list of lines of code that pop attrs from _kw_only dict and
+ raise TypeError similar to builtin if required attr is missing or
+ extra key is passed.
+
+ >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"])))
+ try:
+ a = _kw_only.pop('a')
+ b = _kw_only.pop('b', 42)
+ except KeyError as _key_error:
+ raise TypeError(
+ ...
+ if _kw_only:
+ raise TypeError(
+ ...
+ """
+ lines = ["try:"]
+ lines.extend(
+ " " + _unpack_kw_only_py2(*arg.split("="))
+ for arg in kw_only_args
+ )
+ lines += """\
+except KeyError as _key_error:
+ raise TypeError(
+ '__init__() missing required keyword-only argument: %s' % _key_error
+ )
+if _kw_only:
+ raise TypeError(
+ '__init__() got an unexpected keyword argument %r'
+ % next(iter(_kw_only))
+ )
+""".split(
+ "\n"
+ )
+ return lines
+
+
+def _attrs_to_init_script(
+ attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ needs_cached_setattr,
+ has_cls_on_setattr,
+ attrs_init,
+):
+ """
+ Return a script of an initializer for *attrs* and a dict of globals.
+
+ The globals are expected by the generated script.
+
+ If *frozen* is True, we cannot set the attributes directly so we use
+ a cached ``object.__setattr__``.
+ """
+ lines = []
+ if pre_init:
+ lines.append("self.__attrs_pre_init__()")
+
+ if needs_cached_setattr:
+ lines.append(
+ # Circumvent the __setattr__ descriptor to save one lookup per
+ # assignment.
+ # Note _setattr will be used again below if cache_hash is True
+ "_setattr = _cached_setattr.__get__(self, self.__class__)"
+ )
+
+ if frozen is True:
+ if slots is True:
+ fmt_setter = _setattr
+ fmt_setter_with_converter = _setattr_with_converter
+ else:
+ # Dict frozen classes assign directly to __dict__.
+ # But only if the attribute doesn't come from an ancestor slot
+ # class.
+ # Note _inst_dict will be used again below if cache_hash is True
+ lines.append("_inst_dict = self.__dict__")
+
+ def fmt_setter(attr_name, value_var, has_on_setattr):
+ if _is_slot_attr(attr_name, base_attr_map):
+ return _setattr(attr_name, value_var, has_on_setattr)
+
+ return "_inst_dict['%s'] = %s" % (attr_name, value_var)
+
+ def fmt_setter_with_converter(
+ attr_name, value_var, has_on_setattr
+ ):
+ if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+ return _setattr_with_converter(
+ attr_name, value_var, has_on_setattr
+ )
+
+ return "_inst_dict['%s'] = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+ else:
+ # Not frozen.
+ fmt_setter = _assign
+ fmt_setter_with_converter = _assign_with_converter
+
+ args = []
+ kw_only_args = []
+ attrs_to_validate = []
+
+ # This is a dictionary of names to validator and converter callables.
+ # Injecting this into __init__ globals lets us avoid lookups.
+ names_for_globals = {}
+ annotations = {"return": None}
+
+ for a in attrs:
+ if a.validator:
+ attrs_to_validate.append(a)
+
+ attr_name = a.name
+ has_on_setattr = a.on_setattr is not None or (
+ a.on_setattr is not setters.NO_OP and has_cls_on_setattr
+ )
+ arg_name = a.name.lstrip("_")
+
+ has_factory = isinstance(a.default, Factory)
+ if has_factory and a.default.takes_self:
+ maybe_self = "self"
+ else:
+ maybe_self = ""
+
+ if a.init is False:
+ if has_factory:
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
+ elif a.default is not NOTHING and not has_factory:
+ arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ elif has_factory:
+ arg = "%s=NOTHING" % (arg_name,)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ lines.append("if %s is not NOTHING:" % (arg_name,))
+
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(
+ " " + fmt_setter(attr_name, arg_name, has_on_setattr)
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.kw_only:
+ kw_only_args.append(arg_name)
+ else:
+ args.append(arg_name)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ if a.init is True:
+ if a.type is not None and a.converter is None:
+ annotations[arg_name] = a.type
+ elif a.converter is not None and not PY2:
+ # Try to get the type from the converter.
+ sig = None
+ try:
+ sig = inspect.signature(a.converter)
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ sig_params = list(sig.parameters.values())
+ if (
+ sig_params
+ and sig_params[0].annotation
+ is not inspect.Parameter.empty
+ ):
+ annotations[arg_name] = sig_params[0].annotation
+
+ if attrs_to_validate: # we can skip this if there are no validators.
+ names_for_globals["_config"] = _config
+ lines.append("if _config._run_validators is True:")
+ for a in attrs_to_validate:
+ val_name = "__attr_validator_" + a.name
+ attr_name = "__attr_" + a.name
+ lines.append(
+ " %s(self, %s, self.%s)" % (val_name, attr_name, a.name)
+ )
+ names_for_globals[val_name] = a.validator
+ names_for_globals[attr_name] = a
+
+ if post_init:
+ lines.append("self.__attrs_post_init__()")
+
+ # because this is set only after __attrs_post_init is called, a crash
+ # will result if post-init tries to access the hash code. This seemed
+ # preferable to setting this beforehand, in which case alteration to
+ # field values during post-init combined with post-init accessing the
+ # hash code would result in silent bugs.
+ if cache_hash:
+ if frozen:
+ if slots:
+ # if frozen and slots, then _setattr defined above
+ init_hash_cache = "_setattr('%s', %s)"
+ else:
+ # if frozen and not slots, then _inst_dict defined above
+ init_hash_cache = "_inst_dict['%s'] = %s"
+ else:
+ init_hash_cache = "self.%s = %s"
+ lines.append(init_hash_cache % (_hash_cache_field, "None"))
+
+ # For exceptions we rely on BaseException.__init__ for proper
+ # initialization.
+ if is_exc:
+ vals = ",".join("self." + a.name for a in attrs if a.init)
+
+ lines.append("BaseException.__init__(self, %s)" % (vals,))
+
+ args = ", ".join(args)
+ if kw_only_args:
+ if PY2:
+ lines = _unpack_kw_only_lines_py2(kw_only_args) + lines
+
+ args += "%s**_kw_only" % (", " if args else "",) # leading comma
+ else:
+ args += "%s*, %s" % (
+ ", " if args else "", # leading comma
+ ", ".join(kw_only_args), # kw_only args
+ )
+ return (
+ """\
+def {init_name}(self, {args}):
+ {lines}
+""".format(
+ init_name=("__attrs_init__" if attrs_init else "__init__"),
+ args=args,
+ lines="\n ".join(lines) if lines else "pass",
+ ),
+ names_for_globals,
+ annotations,
+ )
+
+
+class Attribute(object):
+ """
+ *Read-only* representation of an attribute.
+
+ The class has *all* arguments of `attr.ib` (except for ``factory``
+ which is only syntactic sugar for ``default=Factory(...)`` plus the
+ following:
+
+ - ``name`` (`str`): The name of the attribute.
+ - ``inherited`` (`bool`): Whether or not that attribute has been inherited
+ from a base class.
+ - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables
+ that are used for comparing and ordering objects by this attribute,
+ respectively. These are set by passing a callable to `attr.ib`'s ``eq``,
+ ``order``, or ``cmp`` arguments. See also :ref:`comparison customization
+ <custom-comparison>`.
+
+ Instances of this class are frequently used for introspection purposes
+ like:
+
+ - `fields` returns a tuple of them.
+ - Validators get them passed as the first argument.
+ - The :ref:`field transformer <transform-fields>` hook receives a list of
+ them.
+
+ .. versionadded:: 20.1.0 *inherited*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.2.0 *inherited* is not taken into account for
+ equality checks and hashing anymore.
+ .. versionadded:: 21.1.0 *eq_key* and *order_key*
+
+ For the full version history of the fields, see `attr.ib`.
+ """
+
+ __slots__ = (
+ "name",
+ "default",
+ "validator",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "type",
+ "converter",
+ "kw_only",
+ "inherited",
+ "on_setattr",
+ )
+
+ def __init__(
+ self,
+ name,
+ default,
+ validator,
+ repr,
+ cmp, # XXX: unused, remove along with other cmp code.
+ hash,
+ init,
+ inherited,
+ metadata=None,
+ type=None,
+ converter=None,
+ kw_only=False,
+ eq=None,
+ eq_key=None,
+ order=None,
+ order_key=None,
+ on_setattr=None,
+ ):
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq_key or eq, order_key or order, True
+ )
+
+ # Cache this descriptor here to speed things up later.
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+
+ # Despite the big red warning, people *do* instantiate `Attribute`
+ # themselves.
+ bound_setattr("name", name)
+ bound_setattr("default", default)
+ bound_setattr("validator", validator)
+ bound_setattr("repr", repr)
+ bound_setattr("eq", eq)
+ bound_setattr("eq_key", eq_key)
+ bound_setattr("order", order)
+ bound_setattr("order_key", order_key)
+ bound_setattr("hash", hash)
+ bound_setattr("init", init)
+ bound_setattr("converter", converter)
+ bound_setattr(
+ "metadata",
+ (
+ metadata_proxy(metadata)
+ if metadata
+ else _empty_metadata_singleton
+ ),
+ )
+ bound_setattr("type", type)
+ bound_setattr("kw_only", kw_only)
+ bound_setattr("inherited", inherited)
+ bound_setattr("on_setattr", on_setattr)
+
+ def __setattr__(self, name, value):
+ raise FrozenInstanceError()
+
+ @classmethod
+ def from_counting_attr(cls, name, ca, type=None):
+ # type holds the annotated value. deal with conflicts:
+ if type is None:
+ type = ca.type
+ elif ca.type is not None:
+ raise ValueError(
+ "Type annotation and type argument cannot both be present"
+ )
+ inst_dict = {
+ k: getattr(ca, k)
+ for k in Attribute.__slots__
+ if k
+ not in (
+ "name",
+ "validator",
+ "default",
+ "type",
+ "inherited",
+ ) # exclude methods and deprecated alias
+ }
+ return cls(
+ name=name,
+ validator=ca._validator,
+ default=ca._default,
+ type=type,
+ cmp=None,
+ inherited=False,
+ **inst_dict
+ )
+
+ @property
+ def cmp(self):
+ """
+ Simulate the presence of a cmp attribute and warn.
+ """
+ warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2)
+
+ return self.eq and self.order
+
+ # Don't use attr.evolve since fields(Attribute) doesn't work
+ def evolve(self, **changes):
+ """
+ Copy *self* and apply *changes*.
+
+ This works similarly to `attr.evolve` but that function does not work
+ with ``Attribute``.
+
+ It is mainly meant to be used for `transform-fields`.
+
+ .. versionadded:: 20.3.0
+ """
+ new = copy.copy(self)
+
+ new._setattrs(changes.items())
+
+ return new
+
+ # Don't use _add_pickle since fields(Attribute) doesn't work
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
+ for name in self.__slots__
+ )
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ self._setattrs(zip(self.__slots__, state))
+
+ def _setattrs(self, name_values_pairs):
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in name_values_pairs:
+ if name != "metadata":
+ bound_setattr(name, value)
+ else:
+ bound_setattr(
+ name,
+ metadata_proxy(value)
+ if value
+ else _empty_metadata_singleton,
+ )
+
+
+_a = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=(name != "metadata"),
+ init=True,
+ inherited=False,
+ )
+ for name in Attribute.__slots__
+]
+
+Attribute = _add_hash(
+ _add_eq(
+ _add_repr(Attribute, attrs=_a),
+ attrs=[a for a in _a if a.name != "inherited"],
+ ),
+ attrs=[a for a in _a if a.hash and a.name != "inherited"],
+)
+
+
+class _CountingAttr(object):
+ """
+ Intermediate representation of attributes that uses a counter to preserve
+ the order in which the attributes have been defined.
+
+ *Internal* data structure of the attrs library. Running into is most
+ likely the result of a bug like a forgotten `@attr.s` decorator.
+ """
+
+ __slots__ = (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "_validator",
+ "converter",
+ "type",
+ "kw_only",
+ "on_setattr",
+ )
+ __attrs_attrs__ = tuple(
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=True,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ )
+ for name in (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "order",
+ "hash",
+ "init",
+ "on_setattr",
+ )
+ ) + (
+ Attribute(
+ name="metadata",
+ default=None,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=False,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ ),
+ )
+ cls_counter = 0
+
+ def __init__(
+ self,
+ default,
+ validator,
+ repr,
+ cmp,
+ hash,
+ init,
+ converter,
+ metadata,
+ type,
+ kw_only,
+ eq,
+ eq_key,
+ order,
+ order_key,
+ on_setattr,
+ ):
+ _CountingAttr.cls_counter += 1
+ self.counter = _CountingAttr.cls_counter
+ self._default = default
+ self._validator = validator
+ self.converter = converter
+ self.repr = repr
+ self.eq = eq
+ self.eq_key = eq_key
+ self.order = order
+ self.order_key = order_key
+ self.hash = hash
+ self.init = init
+ self.metadata = metadata
+ self.type = type
+ self.kw_only = kw_only
+ self.on_setattr = on_setattr
+
+ def validator(self, meth):
+ """
+ Decorator that adds *meth* to the list of validators.
+
+ Returns *meth* unchanged.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._validator is None:
+ self._validator = meth
+ else:
+ self._validator = and_(self._validator, meth)
+ return meth
+
+ def default(self, meth):
+ """
+ Decorator that allows to set the default for an attribute.
+
+ Returns *meth* unchanged.
+
+ :raises DefaultAlreadySetError: If default has been set before.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._default is not NOTHING:
+ raise DefaultAlreadySetError()
+
+ self._default = Factory(meth, takes_self=True)
+
+ return meth
+
+
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
+
+
+class Factory(object):
+ """
+ Stores a factory callable.
+
+ If passed as the default value to `attrs.field`, the factory is used to
+ generate a new value.
+
+ :param callable factory: A callable that takes either none or exactly one
+ mandatory positional argument depending on *takes_self*.
+ :param bool takes_self: Pass the partially initialized instance that is
+ being initialized as a positional argument.
+
+ .. versionadded:: 17.1.0 *takes_self*
+ """
+
+ __slots__ = ("factory", "takes_self")
+
+ def __init__(self, factory, takes_self=False):
+ """
+ `Factory` is part of the default machinery so if we want a default
+ value here, we have to implement it ourselves.
+ """
+ self.factory = factory
+ self.takes_self = takes_self
+
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(getattr(self, name) for name in self.__slots__)
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ for name, value in zip(self.__slots__, state):
+ setattr(self, name, value)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in Factory.__slots__
+]
+
+Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
+
+
+def make_class(name, attrs, bases=(object,), **attributes_arguments):
+ """
+ A quick way to create a new class called *name* with *attrs*.
+
+ :param str name: The name for the new class.
+
+ :param attrs: A list of names or a dictionary of mappings of names to
+ attributes.
+
+ If *attrs* is a list or an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the names or attributes inside *attrs*. Otherwise the
+ order of the definition of the attributes is used.
+ :type attrs: `list` or `dict`
+
+ :param tuple bases: Classes that the new class will subclass.
+
+ :param attributes_arguments: Passed unmodified to `attr.s`.
+
+ :return: A new class with *attrs*.
+ :rtype: type
+
+ .. versionadded:: 17.1.0 *bases*
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
+ """
+ if isinstance(attrs, dict):
+ cls_dict = attrs
+ elif isinstance(attrs, (list, tuple)):
+ cls_dict = dict((a, attrib()) for a in attrs)
+ else:
+ raise TypeError("attrs argument must be a dict or a list.")
+
+ pre_init = cls_dict.pop("__attrs_pre_init__", None)
+ post_init = cls_dict.pop("__attrs_post_init__", None)
+ user_init = cls_dict.pop("__init__", None)
+
+ body = {}
+ if pre_init is not None:
+ body["__attrs_pre_init__"] = pre_init
+ if post_init is not None:
+ body["__attrs_post_init__"] = post_init
+ if user_init is not None:
+ body["__init__"] = user_init
+
+ type_ = new_class(name, bases, {}, lambda ns: ns.update(body))
+
+ # For pickling to work, the __module__ variable needs to be set to the
+ # frame where the class is created. Bypass this step in environments where
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
+ # defined for arguments greater than 0 (IronPython).
+ try:
+ type_.__module__ = sys._getframe(1).f_globals.get(
+ "__name__", "__main__"
+ )
+ except (AttributeError, ValueError):
+ pass
+
+ # We do it here for proper warnings with meaningful stacklevel.
+ cmp = attributes_arguments.pop("cmp", None)
+ (
+ attributes_arguments["eq"],
+ attributes_arguments["order"],
+ ) = _determine_attrs_eq_order(
+ cmp,
+ attributes_arguments.get("eq"),
+ attributes_arguments.get("order"),
+ True,
+ )
+
+ return _attrs(these=cls_dict, **attributes_arguments)(type_)
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.
+
+
+@attrs(slots=True, hash=True)
+class _AndValidator(object):
+ """
+ Compose many validators to a single one.
+ """
+
+ _validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self._validators:
+ v(inst, attr, value)
+
+
+def and_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators.
+
+ :param callables validators: Arbitrary number of validators.
+
+ .. versionadded:: 17.1.0
+ """
+ vals = []
+ for validator in validators:
+ vals.extend(
+ validator._validators
+ if isinstance(validator, _AndValidator)
+ else [validator]
+ )
+
+ return _AndValidator(tuple(vals))
+
+
+def pipe(*converters):
+ """
+ A converter that composes multiple converters into one.
+
+ When called on a value, it runs all wrapped converters, returning the
+ *last* value.
+
+ Type annotations will be inferred from the wrapped converters', if
+ they have any.
+
+ :param callables converters: Arbitrary number of converters.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def pipe_converter(val):
+ for converter in converters:
+ val = converter(val)
+
+ return val
+
+ if not PY2:
+ if not converters:
+ # If the converter list is empty, pipe_converter is the identity.
+ A = typing.TypeVar("A")
+ pipe_converter.__annotations__ = {"val": A, "return": A}
+ else:
+ # Get parameter type.
+ sig = None
+ try:
+ sig = inspect.signature(converters[0])
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ params = list(sig.parameters.values())
+ if (
+ params
+ and params[0].annotation is not inspect.Parameter.empty
+ ):
+ pipe_converter.__annotations__["val"] = params[
+ 0
+ ].annotation
+ # Get return type.
+ sig = None
+ try:
+ sig = inspect.signature(converters[-1])
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig and sig.return_annotation is not inspect.Signature().empty:
+ pipe_converter.__annotations__[
+ "return"
+ ] = sig.return_annotation
+
+ return pipe_converter
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_next_gen.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_next_gen.py
new file mode 100644
index 0000000000..068253688c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_next_gen.py
@@ -0,0 +1,216 @@
+# SPDX-License-Identifier: MIT
+
+"""
+These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
+`attr.ib` with different default values.
+"""
+
+
+from functools import partial
+
+from . import setters
+from ._funcs import asdict as _asdict
+from ._funcs import astuple as _astuple
+from ._make import (
+ NOTHING,
+ _frozen_setattrs,
+ _ng_default_on_setattr,
+ attrib,
+ attrs,
+)
+from .exceptions import UnannotatedAttributeError
+
+
+def define(
+ maybe_cls=None,
+ *,
+ these=None,
+ repr=None,
+ hash=None,
+ init=None,
+ slots=True,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=None,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=True,
+ eq=None,
+ order=False,
+ auto_detect=True,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+):
+ r"""
+ Define an ``attrs`` class.
+
+ Differences to the classic `attr.s` that it uses underneath:
+
+ - Automatically detect whether or not *auto_attribs* should be `True`
+ (c.f. *auto_attribs* parameter).
+ - If *frozen* is `False`, run converters and validators when setting an
+ attribute by default.
+ - *slots=True* (see :term:`slotted classes` for potentially surprising
+ behaviors)
+ - *auto_exc=True*
+ - *auto_detect=True*
+ - *order=False*
+ - *match_args=True*
+ - Some options that were only relevant on Python 2 or were kept around for
+ backwards-compatibility have been removed.
+
+ Please note that these are all defaults and you can change them as you
+ wish.
+
+ :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
+ exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
+
+ 1. If any attributes are annotated and no unannotated `attrs.fields`\ s
+ are found, it assumes *auto_attribs=True*.
+ 2. Otherwise it assumes *auto_attribs=False* and tries to collect
+ `attrs.fields`\ s.
+
+ For now, please refer to `attr.s` for the rest of the parameters.
+
+ .. versionadded:: 20.1.0
+ .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
+ """
+
+ def do_it(cls, auto_attribs):
+ return attrs(
+ maybe_cls=cls,
+ these=these,
+ repr=repr,
+ hash=hash,
+ init=init,
+ slots=slots,
+ frozen=frozen,
+ weakref_slot=weakref_slot,
+ str=str,
+ auto_attribs=auto_attribs,
+ kw_only=kw_only,
+ cache_hash=cache_hash,
+ auto_exc=auto_exc,
+ eq=eq,
+ order=order,
+ auto_detect=auto_detect,
+ collect_by_mro=True,
+ getstate_setstate=getstate_setstate,
+ on_setattr=on_setattr,
+ field_transformer=field_transformer,
+ match_args=match_args,
+ )
+
+ def wrap(cls):
+ """
+ Making this a wrapper ensures this code runs during class creation.
+
+ We also ensure that frozen-ness of classes is inherited.
+ """
+ nonlocal frozen, on_setattr
+
+ had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+ # By default, mutable classes convert & validate on setattr.
+ if frozen is False and on_setattr is None:
+ on_setattr = _ng_default_on_setattr
+
+ # However, if we subclass a frozen class, we inherit the immutability
+ # and disable on_setattr.
+ for base_cls in cls.__bases__:
+ if base_cls.__setattr__ is _frozen_setattrs:
+ if had_on_setattr:
+ raise ValueError(
+ "Frozen classes can't use on_setattr "
+ "(frozen-ness was inherited)."
+ )
+
+ on_setattr = setters.NO_OP
+ break
+
+ if auto_attribs is not None:
+ return do_it(cls, auto_attribs)
+
+ try:
+ return do_it(cls, True)
+ except UnannotatedAttributeError:
+ return do_it(cls, False)
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+mutable = define
+frozen = partial(define, frozen=True, on_setattr=None)
+
+
+def field(
+ *,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ hash=None,
+ init=True,
+ metadata=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
+ Identical to `attr.ib`, except keyword-only and with some arguments
+ removed.
+
+ .. versionadded:: 20.1.0
+ """
+ return attrib(
+ default=default,
+ validator=validator,
+ repr=repr,
+ hash=hash,
+ init=init,
+ metadata=metadata,
+ converter=converter,
+ factory=factory,
+ kw_only=kw_only,
+ eq=eq,
+ order=order,
+ on_setattr=on_setattr,
+ )
+
+
+def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
+ """
+ Same as `attr.asdict`, except that collections types are always retained
+ and dict is always used as *dict_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _asdict(
+ inst=inst,
+ recurse=recurse,
+ filter=filter,
+ value_serializer=value_serializer,
+ retain_collection_types=True,
+ )
+
+
+def astuple(inst, *, recurse=True, filter=None):
+ """
+ Same as `attr.astuple`, except that collections types are always retained
+ and `tuple` is always used as the *tuple_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _astuple(
+ inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
+ )
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.py
new file mode 100644
index 0000000000..cdaeec37a1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.py
@@ -0,0 +1,87 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo(object):
+ """
+ A version object that can be compared to tuple of length 1--4:
+
+ >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
+ True
+ >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+ True
+ >>> vi = attr.VersionInfo(19, 2, 0, "final")
+ >>> vi < (19, 1, 1)
+ False
+ >>> vi < (19,)
+ False
+ >>> vi == (19, 2,)
+ True
+ >>> vi == (19, 2, 1)
+ False
+
+ .. versionadded:: 19.2
+ """
+
+ year = attrib(type=int)
+ minor = attrib(type=int)
+ micro = attrib(type=int)
+ releaselevel = attrib(type=str)
+
+ @classmethod
+ def _from_version_string(cls, s):
+ """
+ Parse *s* and return a _VersionInfo.
+ """
+ v = s.split(".")
+ if len(v) == 3:
+ v.append("final")
+
+ return cls(
+ year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+ )
+
+ def _ensure_tuple(self, other):
+ """
+ Ensure *other* is a tuple of a valid length.
+
+ Returns a possibly transformed *other* and ourselves as a tuple of
+ the same length as *other*.
+ """
+
+ if self.__class__ is other.__class__:
+ other = astuple(other)
+
+ if not isinstance(other, tuple):
+ raise NotImplementedError
+
+ if not (1 <= len(other) <= 4):
+ raise NotImplementedError
+
+ return astuple(self)[: len(other)], other
+
+ def __eq__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ return us == them
+
+ def __lt__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+ # have to do anything special with releaselevel for now.
+ return us < them
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.pyi
new file mode 100644
index 0000000000..45ced08633
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/_version_info.pyi
@@ -0,0 +1,9 @@
+class VersionInfo:
+ @property
+ def year(self) -> int: ...
+ @property
+ def minor(self) -> int: ...
+ @property
+ def micro(self) -> int: ...
+ @property
+ def releaselevel(self) -> str: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.py
new file mode 100644
index 0000000000..1fb6c05d7b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.py
@@ -0,0 +1,155 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful converters.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from ._compat import PY2
+from ._make import NOTHING, Factory, pipe
+
+
+if not PY2:
+ import inspect
+ import typing
+
+
+__all__ = [
+ "default_if_none",
+ "optional",
+ "pipe",
+ "to_bool",
+]
+
+
+def optional(converter):
+ """
+ A converter that allows an attribute to be optional. An optional attribute
+ is one which can be set to ``None``.
+
+ Type annotations will be inferred from the wrapped converter's, if it
+ has any.
+
+ :param callable converter: the converter that is used for non-``None``
+ values.
+
+ .. versionadded:: 17.1.0
+ """
+
+ def optional_converter(val):
+ if val is None:
+ return None
+ return converter(val)
+
+ if not PY2:
+ sig = None
+ try:
+ sig = inspect.signature(converter)
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ params = list(sig.parameters.values())
+ if params and params[0].annotation is not inspect.Parameter.empty:
+ optional_converter.__annotations__["val"] = typing.Optional[
+ params[0].annotation
+ ]
+ if sig.return_annotation is not inspect.Signature.empty:
+ optional_converter.__annotations__["return"] = typing.Optional[
+ sig.return_annotation
+ ]
+
+ return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+ """
+ A converter that allows to replace ``None`` values by *default* or the
+ result of *factory*.
+
+ :param default: Value to be used if ``None`` is passed. Passing an instance
+ of `attrs.Factory` is supported, however the ``takes_self`` option
+ is *not*.
+ :param callable factory: A callable that takes no parameters whose result
+ is used if ``None`` is passed.
+
+ :raises TypeError: If **neither** *default* or *factory* is passed.
+ :raises TypeError: If **both** *default* and *factory* are passed.
+ :raises ValueError: If an instance of `attrs.Factory` is passed with
+ ``takes_self=True``.
+
+ .. versionadded:: 18.2.0
+ """
+ if default is NOTHING and factory is None:
+ raise TypeError("Must pass either `default` or `factory`.")
+
+ if default is not NOTHING and factory is not None:
+ raise TypeError(
+ "Must pass either `default` or `factory` but not both."
+ )
+
+ if factory is not None:
+ default = Factory(factory)
+
+ if isinstance(default, Factory):
+ if default.takes_self:
+ raise ValueError(
+ "`takes_self` is not supported by default_if_none."
+ )
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default.factory()
+
+ else:
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default
+
+ return default_if_none_converter
+
+
+def to_bool(val):
+ """
+ Convert "boolean" strings (e.g., from env. vars.) to real booleans.
+
+ Values mapping to :code:`True`:
+
+ - :code:`True`
+ - :code:`"true"` / :code:`"t"`
+ - :code:`"yes"` / :code:`"y"`
+ - :code:`"on"`
+ - :code:`"1"`
+ - :code:`1`
+
+ Values mapping to :code:`False`:
+
+ - :code:`False`
+ - :code:`"false"` / :code:`"f"`
+ - :code:`"no"` / :code:`"n"`
+ - :code:`"off"`
+ - :code:`"0"`
+ - :code:`0`
+
+ :raises ValueError: for any other value.
+
+ .. versionadded:: 21.3.0
+ """
+ if isinstance(val, str):
+ val = val.lower()
+ truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
+ falsy = {False, "false", "f", "no", "n", "off", "0", 0}
+ try:
+ if val in truthy:
+ return True
+ if val in falsy:
+ return False
+ except TypeError:
+ # Raised when "val" is not hashable (e.g., lists)
+ pass
+ raise ValueError("Cannot convert value to bool: {}".format(val))
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.pyi
new file mode 100644
index 0000000000..0f58088a37
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/converters.pyi
@@ -0,0 +1,13 @@
+from typing import Callable, Optional, TypeVar, overload
+
+from . import _ConverterType
+
+_T = TypeVar("_T")
+
+def pipe(*validators: _ConverterType) -> _ConverterType: ...
+def optional(converter: _ConverterType) -> _ConverterType: ...
+@overload
+def default_if_none(default: _T) -> _ConverterType: ...
+@overload
+def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
+def to_bool(val: str) -> bool: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.py
new file mode 100644
index 0000000000..b2f1edc32a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.py
@@ -0,0 +1,94 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+
+class FrozenError(AttributeError):
+ """
+ A frozen/immutable instance or attribute have been attempted to be
+ modified.
+
+ It mirrors the behavior of ``namedtuples`` by using the same error message
+ and subclassing `AttributeError`.
+
+ .. versionadded:: 20.1.0
+ """
+
+ msg = "can't set attribute"
+ args = [msg]
+
+
+class FrozenInstanceError(FrozenError):
+ """
+ A frozen instance has been attempted to be modified.
+
+ .. versionadded:: 16.1.0
+ """
+
+
+class FrozenAttributeError(FrozenError):
+ """
+ A frozen attribute has been attempted to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+
+
+class AttrsAttributeNotFoundError(ValueError):
+ """
+ An ``attrs`` function couldn't find an attribute that the user asked for.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class NotAnAttrsClassError(ValueError):
+ """
+ A non-``attrs`` class has been passed into an ``attrs`` function.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class DefaultAlreadySetError(RuntimeError):
+ """
+ A default has been set using ``attr.ib()`` and is attempted to be reset
+ using the decorator.
+
+ .. versionadded:: 17.1.0
+ """
+
+
+class UnannotatedAttributeError(RuntimeError):
+ """
+ A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
+ annotation.
+
+ .. versionadded:: 17.3.0
+ """
+
+
+class PythonTooOldError(RuntimeError):
+ """
+ It was attempted to use an ``attrs`` feature that requires a newer Python
+ version.
+
+ .. versionadded:: 18.2.0
+ """
+
+
+class NotCallableError(TypeError):
+ """
+ A ``attr.ib()`` requiring a callable has been set with a value
+ that is not callable.
+
+ .. versionadded:: 19.2.0
+ """
+
+ def __init__(self, msg, value):
+ super(TypeError, self).__init__(msg, value)
+ self.msg = msg
+ self.value = value
+
+ def __str__(self):
+ return str(self.msg)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.pyi
new file mode 100644
index 0000000000..f2680118b4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/exceptions.pyi
@@ -0,0 +1,17 @@
+from typing import Any
+
+class FrozenError(AttributeError):
+ msg: str = ...
+
+class FrozenInstanceError(FrozenError): ...
+class FrozenAttributeError(FrozenError): ...
+class AttrsAttributeNotFoundError(ValueError): ...
+class NotAnAttrsClassError(ValueError): ...
+class DefaultAlreadySetError(RuntimeError): ...
+class UnannotatedAttributeError(RuntimeError): ...
+class PythonTooOldError(RuntimeError): ...
+
+class NotCallableError(TypeError):
+ msg: str = ...
+ value: Any = ...
+ def __init__(self, msg: str, value: Any) -> None: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.py
new file mode 100644
index 0000000000..a1978a8775
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.py
@@ -0,0 +1,54 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful filters for `attr.asdict`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from ._compat import isclass
+from ._make import Attribute
+
+
+def _split_what(what):
+ """
+ Returns a tuple of `frozenset`s of classes and attributes.
+ """
+ return (
+ frozenset(cls for cls in what if isclass(cls)),
+ frozenset(cls for cls in what if isinstance(cls, Attribute)),
+ )
+
+
+def include(*what):
+ """
+ Include *what*.
+
+ :param what: What to include.
+ :type what: `list` of `type` or `attrs.Attribute`\\ s
+
+ :rtype: `callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def include_(attribute, value):
+ return value.__class__ in cls or attribute in attrs
+
+ return include_
+
+
+def exclude(*what):
+ """
+ Exclude *what*.
+
+ :param what: What to exclude.
+ :type what: `list` of classes or `attrs.Attribute`\\ s.
+
+ :rtype: `callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def exclude_(attribute, value):
+ return value.__class__ not in cls and attribute not in attrs
+
+ return exclude_
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.pyi
new file mode 100644
index 0000000000..993866865e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/filters.pyi
@@ -0,0 +1,6 @@
+from typing import Any, Union
+
+from . import Attribute, _FilterType
+
+def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
+def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/py.typed b/testing/web-platform/tests/tools/third_party/attrs/src/attr/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.py
new file mode 100644
index 0000000000..b1cbb5d83e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.py
@@ -0,0 +1,79 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly used hooks for on_setattr.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from . import _config
+from .exceptions import FrozenAttributeError
+
+
+def pipe(*setters):
+ """
+ Run all *setters* and return the return value of the last one.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def wrapped_pipe(instance, attrib, new_value):
+ rv = new_value
+
+ for setter in setters:
+ rv = setter(instance, attrib, rv)
+
+ return rv
+
+ return wrapped_pipe
+
+
+def frozen(_, __, ___):
+ """
+ Prevent an attribute to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+ raise FrozenAttributeError()
+
+
+def validate(instance, attrib, new_value):
+ """
+ Run *attrib*'s validator on *new_value* if it has one.
+
+ .. versionadded:: 20.1.0
+ """
+ if _config._run_validators is False:
+ return new_value
+
+ v = attrib.validator
+ if not v:
+ return new_value
+
+ v(instance, attrib, new_value)
+
+ return new_value
+
+
+def convert(instance, attrib, new_value):
+ """
+ Run *attrib*'s converter -- if it has one -- on *new_value* and return the
+ result.
+
+ .. versionadded:: 20.1.0
+ """
+ c = attrib.converter
+ if c:
+ return c(new_value)
+
+ return new_value
+
+
+NO_OP = object()
+"""
+Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+
+Does not work in `pipe` or within lists.
+
+.. versionadded:: 20.1.0
+"""
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.pyi
new file mode 100644
index 0000000000..3f5603c2b0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/setters.pyi
@@ -0,0 +1,19 @@
+from typing import Any, NewType, NoReturn, TypeVar, cast
+
+from . import Attribute, _OnSetAttrType
+
+_T = TypeVar("_T")
+
+def frozen(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> NoReturn: ...
+def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
+def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
+
+# convert is allowed to return Any, because they can be chained using pipe.
+def convert(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> Any: ...
+
+_NoOpType = NewType("_NoOpType", object)
+NO_OP: _NoOpType
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.py b/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.py
new file mode 100644
index 0000000000..0b0c8342f2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.py
@@ -0,0 +1,561 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful validators.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import operator
+import re
+
+from contextlib import contextmanager
+
+from ._config import get_run_validators, set_run_validators
+from ._make import _AndValidator, and_, attrib, attrs
+from .exceptions import NotCallableError
+
+
+try:
+ Pattern = re.Pattern
+except AttributeError: # Python <3.7 lacks a Pattern type.
+ Pattern = type(re.compile(""))
+
+
+__all__ = [
+ "and_",
+ "deep_iterable",
+ "deep_mapping",
+ "disabled",
+ "ge",
+ "get_disabled",
+ "gt",
+ "in_",
+ "instance_of",
+ "is_callable",
+ "le",
+ "lt",
+ "matches_re",
+ "max_len",
+ "optional",
+ "provides",
+ "set_disabled",
+]
+
+
+def set_disabled(disabled):
+ """
+ Globally disable or enable running validators.
+
+ By default, they are run.
+
+ :param disabled: If ``True``, disable running all validators.
+ :type disabled: bool
+
+ .. warning::
+
+ This function is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(not disabled)
+
+
+def get_disabled():
+ """
+ Return a bool indicating whether validators are currently disabled or not.
+
+ :return: ``True`` if validators are currently disabled.
+ :rtype: bool
+
+ .. versionadded:: 21.3.0
+ """
+ return not get_run_validators()
+
+
+@contextmanager
+def disabled():
+ """
+ Context manager that disables running validators within its context.
+
+ .. warning::
+
+ This context manager is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(False)
+ try:
+ yield
+ finally:
+ set_run_validators(True)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InstanceOfValidator(object):
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not isinstance(value, self.type):
+ raise TypeError(
+ "'{name}' must be {type!r} (got {value!r} that is a "
+ "{actual!r}).".format(
+ name=attr.name,
+ type=self.type,
+ actual=value.__class__,
+ value=value,
+ ),
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return "<instance_of validator for type {type!r}>".format(
+ type=self.type
+ )
+
+
+def instance_of(type):
+ """
+ A validator that raises a `TypeError` if the initializer is called
+ with a wrong type for this particular attribute (checks are performed using
+ `isinstance` therefore it's also valid to pass a tuple of types).
+
+ :param type: The type to check for.
+ :type type: type or tuple of types
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type `attrs.Attribute`), the expected type, and the value it
+ got.
+ """
+ return _InstanceOfValidator(type)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MatchesReValidator(object):
+ pattern = attrib()
+ match_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.match_func(value):
+ raise ValueError(
+ "'{name}' must match regex {pattern!r}"
+ " ({value!r} doesn't)".format(
+ name=attr.name, pattern=self.pattern.pattern, value=value
+ ),
+ attr,
+ self.pattern,
+ value,
+ )
+
+ def __repr__(self):
+ return "<matches_re validator for pattern {pattern!r}>".format(
+ pattern=self.pattern
+ )
+
+
+def matches_re(regex, flags=0, func=None):
+ r"""
+ A validator that raises `ValueError` if the initializer is called
+ with a string that doesn't match *regex*.
+
+ :param regex: a regex string or precompiled pattern to match against
+ :param int flags: flags that will be passed to the underlying re function
+ (default 0)
+ :param callable func: which underlying `re` function to call (options
+ are `re.fullmatch`, `re.search`, `re.match`, default
+ is ``None`` which means either `re.fullmatch` or an emulation of
+ it on Python 2). For performance reasons, they won't be used directly
+ but on a pre-`re.compile`\ ed pattern.
+
+ .. versionadded:: 19.2.0
+ .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
+ """
+ fullmatch = getattr(re, "fullmatch", None)
+ valid_funcs = (fullmatch, None, re.search, re.match)
+ if func not in valid_funcs:
+ raise ValueError(
+ "'func' must be one of {}.".format(
+ ", ".join(
+ sorted(
+ e and e.__name__ or "None" for e in set(valid_funcs)
+ )
+ )
+ )
+ )
+
+ if isinstance(regex, Pattern):
+ if flags:
+ raise TypeError(
+ "'flags' can only be used with a string pattern; "
+ "pass flags to re.compile() instead"
+ )
+ pattern = regex
+ else:
+ pattern = re.compile(regex, flags)
+
+ if func is re.match:
+ match_func = pattern.match
+ elif func is re.search:
+ match_func = pattern.search
+ elif fullmatch:
+ match_func = pattern.fullmatch
+ else: # Python 2 fullmatch emulation (https://bugs.python.org/issue16203)
+ pattern = re.compile(
+ r"(?:{})\Z".format(pattern.pattern), pattern.flags
+ )
+ match_func = pattern.match
+
+ return _MatchesReValidator(pattern, match_func)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _ProvidesValidator(object):
+ interface = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.interface.providedBy(value):
+ raise TypeError(
+ "'{name}' must provide {interface!r} which {value!r} "
+ "doesn't.".format(
+ name=attr.name, interface=self.interface, value=value
+ ),
+ attr,
+ self.interface,
+ value,
+ )
+
+ def __repr__(self):
+ return "<provides validator for interface {interface!r}>".format(
+ interface=self.interface
+ )
+
+
+def provides(interface):
+ """
+ A validator that raises a `TypeError` if the initializer is called
+ with an object that does not provide the requested *interface* (checks are
+ performed using ``interface.providedBy(value)`` (see `zope.interface
+ <https://zopeinterface.readthedocs.io/en/latest/>`_).
+
+ :param interface: The interface to check for.
+ :type interface: ``zope.interface.Interface``
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type `attrs.Attribute`), the expected interface, and the
+ value it got.
+ """
+ return _ProvidesValidator(interface)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _OptionalValidator(object):
+ validator = attrib()
+
+ def __call__(self, inst, attr, value):
+ if value is None:
+ return
+
+ self.validator(inst, attr, value)
+
+ def __repr__(self):
+ return "<optional validator for {what} or None>".format(
+ what=repr(self.validator)
+ )
+
+
+def optional(validator):
+ """
+ A validator that makes an attribute optional. An optional attribute is one
+ which can be set to ``None`` in addition to satisfying the requirements of
+ the sub-validator.
+
+ :param validator: A validator (or a list of validators) that is used for
+ non-``None`` values.
+ :type validator: callable or `list` of callables.
+
+ .. versionadded:: 15.1.0
+ .. versionchanged:: 17.1.0 *validator* can be a list of validators.
+ """
+ if isinstance(validator, list):
+ return _OptionalValidator(_AndValidator(validator))
+ return _OptionalValidator(validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InValidator(object):
+ options = attrib()
+
+ def __call__(self, inst, attr, value):
+ try:
+ in_options = value in self.options
+ except TypeError: # e.g. `1 in "abc"`
+ in_options = False
+
+ if not in_options:
+ raise ValueError(
+ "'{name}' must be in {options!r} (got {value!r})".format(
+ name=attr.name, options=self.options, value=value
+ )
+ )
+
+ def __repr__(self):
+ return "<in_ validator with options {options!r}>".format(
+ options=self.options
+ )
+
+
+def in_(options):
+ """
+ A validator that raises a `ValueError` if the initializer is called
+ with a value that does not belong in the options provided. The check is
+ performed using ``value in options``.
+
+ :param options: Allowed options.
+ :type options: list, tuple, `enum.Enum`, ...
+
+ :raises ValueError: With a human readable error message, the attribute (of
+ type `attrs.Attribute`), the expected options, and the value it
+ got.
+
+ .. versionadded:: 17.1.0
+ """
+ return _InValidator(options)
+
+
+@attrs(repr=False, slots=False, hash=True)
+class _IsCallableValidator(object):
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not callable(value):
+ message = (
+ "'{name}' must be callable "
+ "(got {value!r} that is a {actual!r})."
+ )
+ raise NotCallableError(
+ msg=message.format(
+ name=attr.name, value=value, actual=value.__class__
+ ),
+ value=value,
+ )
+
+ def __repr__(self):
+ return "<is_callable validator>"
+
+
+def is_callable():
+ """
+ A validator that raises a `attr.exceptions.NotCallableError` if the
+ initializer is called with a value for this particular attribute
+ that is not callable.
+
+ .. versionadded:: 19.1.0
+
+ :raises `attr.exceptions.NotCallableError`: With a human readable error
+ message containing the attribute (`attrs.Attribute`) name,
+ and the value it got.
+ """
+ return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepIterable(object):
+ member_validator = attrib(validator=is_callable())
+ iterable_validator = attrib(
+ default=None, validator=optional(is_callable())
+ )
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.iterable_validator is not None:
+ self.iterable_validator(inst, attr, value)
+
+ for member in value:
+ self.member_validator(inst, attr, member)
+
+ def __repr__(self):
+ iterable_identifier = (
+ ""
+ if self.iterable_validator is None
+ else " {iterable!r}".format(iterable=self.iterable_validator)
+ )
+ return (
+ "<deep_iterable validator for{iterable_identifier}"
+ " iterables of {member!r}>"
+ ).format(
+ iterable_identifier=iterable_identifier,
+ member=self.member_validator,
+ )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+ """
+ A validator that performs deep validation of an iterable.
+
+ :param member_validator: Validator to apply to iterable members
+ :param iterable_validator: Validator to apply to iterable itself
+ (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepMapping(object):
+ key_validator = attrib(validator=is_callable())
+ value_validator = attrib(validator=is_callable())
+ mapping_validator = attrib(default=None, validator=optional(is_callable()))
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.mapping_validator is not None:
+ self.mapping_validator(inst, attr, value)
+
+ for key in value:
+ self.key_validator(inst, attr, key)
+ self.value_validator(inst, attr, value[key])
+
+ def __repr__(self):
+ return (
+ "<deep_mapping validator for objects mapping {key!r} to {value!r}>"
+ ).format(key=self.key_validator, value=self.value_validator)
+
+
+def deep_mapping(key_validator, value_validator, mapping_validator=None):
+ """
+ A validator that performs deep validation of a dictionary.
+
+ :param key_validator: Validator to apply to dictionary keys
+ :param value_validator: Validator to apply to dictionary values
+ :param mapping_validator: Validator to apply to top-level mapping
+ attribute (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepMapping(key_validator, value_validator, mapping_validator)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _NumberValidator(object):
+ bound = attrib()
+ compare_op = attrib()
+ compare_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.compare_func(value, self.bound):
+ raise ValueError(
+ "'{name}' must be {op} {bound}: {value}".format(
+ name=attr.name,
+ op=self.compare_op,
+ bound=self.bound,
+ value=value,
+ )
+ )
+
+ def __repr__(self):
+ return "<Validator for x {op} {bound}>".format(
+ op=self.compare_op, bound=self.bound
+ )
+
+
+def lt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number larger or equal to *val*.
+
+ :param val: Exclusive upper bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<", operator.lt)
+
+
+def le(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number greater than *val*.
+
+ :param val: Inclusive upper bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<=", operator.le)
+
+
+def ge(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number smaller than *val*.
+
+ :param val: Inclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">=", operator.ge)
+
+
+def gt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number smaller or equal to *val*.
+
+ :param val: Exclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">", operator.gt)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MaxLengthValidator(object):
+ max_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) > self.max_length:
+ raise ValueError(
+ "Length of '{name}' must be <= {max}: {len}".format(
+ name=attr.name, max=self.max_length, len=len(value)
+ )
+ )
+
+ def __repr__(self):
+ return "<max_len validator for {max}>".format(max=self.max_length)
+
+
+def max_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is longer than *length*.
+
+ :param int length: Maximum length of the string or iterable
+
+ .. versionadded:: 21.3.0
+ """
+ return _MaxLengthValidator(length)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.pyi
new file mode 100644
index 0000000000..5e00b85433
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attr/validators.pyi
@@ -0,0 +1,78 @@
+from typing import (
+ Any,
+ AnyStr,
+ Callable,
+ Container,
+ ContextManager,
+ Iterable,
+ List,
+ Mapping,
+ Match,
+ Optional,
+ Pattern,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+from . import _ValidatorType
+
+_T = TypeVar("_T")
+_T1 = TypeVar("_T1")
+_T2 = TypeVar("_T2")
+_T3 = TypeVar("_T3")
+_I = TypeVar("_I", bound=Iterable)
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+_M = TypeVar("_M", bound=Mapping)
+
+def set_disabled(run: bool) -> None: ...
+def get_disabled() -> bool: ...
+def disabled() -> ContextManager[None]: ...
+
+# To be more precise on instance_of use some overloads.
+# If there are more than 3 items in the tuple then we fall back to Any
+@overload
+def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(
+ type: Tuple[Type[_T1], Type[_T2]]
+) -> _ValidatorType[Union[_T1, _T2]]: ...
+@overload
+def instance_of(
+ type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
+) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
+@overload
+def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
+def provides(interface: Any) -> _ValidatorType[Any]: ...
+def optional(
+ validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
+) -> _ValidatorType[Optional[_T]]: ...
+def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
+def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def matches_re(
+ regex: Union[Pattern[AnyStr], AnyStr],
+ flags: int = ...,
+ func: Optional[
+ Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
+ ] = ...,
+) -> _ValidatorType[AnyStr]: ...
+def deep_iterable(
+ member_validator: _ValidatorType[_T],
+ iterable_validator: Optional[_ValidatorType[_I]] = ...,
+) -> _ValidatorType[_I]: ...
+def deep_mapping(
+ key_validator: _ValidatorType[_K],
+ value_validator: _ValidatorType[_V],
+ mapping_validator: Optional[_ValidatorType[_M]] = ...,
+) -> _ValidatorType[_M]: ...
+def is_callable() -> _ValidatorType[_T]: ...
+def lt(val: _T) -> _ValidatorType[_T]: ...
+def le(val: _T) -> _ValidatorType[_T]: ...
+def ge(val: _T) -> _ValidatorType[_T]: ...
+def gt(val: _T) -> _ValidatorType[_T]: ...
+def max_len(length: int) -> _ValidatorType[_T]: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.py
new file mode 100644
index 0000000000..a704b8b56b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.py
@@ -0,0 +1,70 @@
+# SPDX-License-Identifier: MIT
+
+from attr import (
+ NOTHING,
+ Attribute,
+ Factory,
+ __author__,
+ __copyright__,
+ __description__,
+ __doc__,
+ __email__,
+ __license__,
+ __title__,
+ __url__,
+ __version__,
+ __version_info__,
+ assoc,
+ cmp_using,
+ define,
+ evolve,
+ field,
+ fields,
+ fields_dict,
+ frozen,
+ has,
+ make_class,
+ mutable,
+ resolve_types,
+ validate,
+)
+from attr._next_gen import asdict, astuple
+
+from . import converters, exceptions, filters, setters, validators
+
+
+__all__ = [
+ "__author__",
+ "__copyright__",
+ "__description__",
+ "__doc__",
+ "__email__",
+ "__license__",
+ "__title__",
+ "__url__",
+ "__version__",
+ "__version_info__",
+ "asdict",
+ "assoc",
+ "astuple",
+ "Attribute",
+ "cmp_using",
+ "converters",
+ "define",
+ "evolve",
+ "exceptions",
+ "Factory",
+ "field",
+ "fields_dict",
+ "fields",
+ "filters",
+ "frozen",
+ "has",
+ "make_class",
+ "mutable",
+ "NOTHING",
+ "resolve_types",
+ "setters",
+ "validate",
+ "validators",
+]
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.pyi b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.pyi
new file mode 100644
index 0000000000..7426fa5ddb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/__init__.pyi
@@ -0,0 +1,63 @@
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Mapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+)
+
+# Because we need to type our own stuff, we have to make everything from
+# attr explicitly public too.
+from attr import __author__ as __author__
+from attr import __copyright__ as __copyright__
+from attr import __description__ as __description__
+from attr import __email__ as __email__
+from attr import __license__ as __license__
+from attr import __title__ as __title__
+from attr import __url__ as __url__
+from attr import __version__ as __version__
+from attr import __version_info__ as __version_info__
+from attr import _FilterType
+from attr import assoc as assoc
+from attr import Attribute as Attribute
+from attr import define as define
+from attr import evolve as evolve
+from attr import Factory as Factory
+from attr import exceptions as exceptions
+from attr import field as field
+from attr import fields as fields
+from attr import fields_dict as fields_dict
+from attr import frozen as frozen
+from attr import has as has
+from attr import make_class as make_class
+from attr import mutable as mutable
+from attr import NOTHING as NOTHING
+from attr import resolve_types as resolve_types
+from attr import setters as setters
+from attr import validate as validate
+from attr import validators as validators
+
+# TODO: see definition of attr.asdict/astuple
+def asdict(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ dict_factory: Type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+ value_serializer: Optional[
+ Callable[[type, Attribute[Any], Any], Any]
+ ] = ...,
+ tuple_keys: bool = ...,
+) -> Dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ tuple_factory: Type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> Tuple[Any, ...]: ...
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/converters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/converters.py
new file mode 100644
index 0000000000..edfa8d3c16
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/converters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.converters import * # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/exceptions.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/exceptions.py
new file mode 100644
index 0000000000..bd9efed202
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/exceptions.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.exceptions import * # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/filters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/filters.py
new file mode 100644
index 0000000000..52959005b0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/filters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.filters import * # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/py.typed b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/setters.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/setters.py
new file mode 100644
index 0000000000..9b50770804
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/setters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.setters import * # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/src/attrs/validators.py b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/validators.py
new file mode 100644
index 0000000000..ab2c9b3024
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/src/attrs/validators.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.validators import * # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/__init__.py b/testing/web-platform/tests/tools/third_party/attrs/tests/__init__.py
new file mode 100644
index 0000000000..548d2d447d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/__init__.py
@@ -0,0 +1 @@
+# SPDX-License-Identifier: MIT
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/attr_import_star.py b/testing/web-platform/tests/tools/third_party/attrs/tests/attr_import_star.py
new file mode 100644
index 0000000000..eaec321bac
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/attr_import_star.py
@@ -0,0 +1,10 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import
+
+from attr import * # noqa: F401,F403
+
+
+# This is imported by test_import::test_from_attr_import_star; this must
+# be done indirectly because importing * is only allowed on module level,
+# so can't be done inside a test.
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/dataclass_transform_example.py b/testing/web-platform/tests/tools/third_party/attrs/tests/dataclass_transform_example.py
new file mode 100644
index 0000000000..49e09061a8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/dataclass_transform_example.py
@@ -0,0 +1,45 @@
+# SPDX-License-Identifier: MIT
+
+import attr
+
+
+@attr.define()
+class Define:
+ a: str
+ b: int
+
+
+reveal_type(Define.__init__) # noqa
+
+
+@attr.define()
+class DefineConverter:
+ # mypy plugin adapts the "int" method signature, pyright does not
+ with_converter: int = attr.field(converter=int)
+
+
+reveal_type(DefineConverter.__init__) # noqa
+
+
+# mypy plugin supports attr.frozen, pyright does not
+@attr.frozen()
+class Frozen:
+ a: str
+
+
+d = Frozen("a")
+d.a = "new"
+
+reveal_type(d.a) # noqa
+
+
+# but pyright supports attr.define(frozen)
+@attr.define(frozen=True)
+class FrozenDefine:
+ a: str
+
+
+d2 = FrozenDefine("a")
+d2.a = "new"
+
+reveal_type(d2.a) # noqa
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/strategies.py b/testing/web-platform/tests/tools/third_party/attrs/tests/strategies.py
new file mode 100644
index 0000000000..99f9f48536
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/strategies.py
@@ -0,0 +1,198 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Testing strategies for Hypothesis-based tests.
+"""
+
+import keyword
+import string
+
+from collections import OrderedDict
+
+from hypothesis import strategies as st
+
+import attr
+
+from .utils import make_class
+
+
+optional_bool = st.one_of(st.none(), st.booleans())
+
+
+def gen_attr_names():
+ """
+ Generate names for attributes, 'a'...'z', then 'aa'...'zz'.
+
+ ~702 different attribute names should be enough in practice.
+
+ Some short strings (such as 'as') are keywords, so we skip them.
+ """
+ lc = string.ascii_lowercase
+ for c in lc:
+ yield c
+ for outer in lc:
+ for inner in lc:
+ res = outer + inner
+ if keyword.iskeyword(res):
+ continue
+ yield outer + inner
+
+
+def maybe_underscore_prefix(source):
+ """
+ A generator to sometimes prepend an underscore.
+ """
+ to_underscore = False
+ for val in source:
+ yield val if not to_underscore else "_" + val
+ to_underscore = not to_underscore
+
+
+@st.composite
+def _create_hyp_nested_strategy(draw, simple_class_strategy):
+ """
+ Create a recursive attrs class.
+
+ Given a strategy for building (simpler) classes, create and return
+ a strategy for building classes that have as an attribute: either just
+ the simpler class, a list of simpler classes, a tuple of simpler classes,
+ an ordered dict or a dict mapping the string "cls" to a simpler class.
+ """
+ cls = draw(simple_class_strategy)
+ factories = [
+ cls,
+ lambda: [cls()],
+ lambda: (cls(),),
+ lambda: {"cls": cls()},
+ lambda: OrderedDict([("cls", cls())]),
+ ]
+ factory = draw(st.sampled_from(factories))
+ attrs = draw(list_of_attrs) + [attr.ib(default=attr.Factory(factory))]
+ return make_class("HypClass", dict(zip(gen_attr_names(), attrs)))
+
+
+bare_attrs = st.builds(attr.ib, default=st.none())
+int_attrs = st.integers().map(lambda i: attr.ib(default=i))
+str_attrs = st.text().map(lambda s: attr.ib(default=s))
+float_attrs = st.floats().map(lambda f: attr.ib(default=f))
+dict_attrs = st.dictionaries(keys=st.text(), values=st.integers()).map(
+ lambda d: attr.ib(default=d)
+)
+
+simple_attrs_without_metadata = (
+ bare_attrs | int_attrs | str_attrs | float_attrs | dict_attrs
+)
+
+
+@st.composite
+def simple_attrs_with_metadata(draw):
+ """
+ Create a simple attribute with arbitrary metadata.
+ """
+ c_attr = draw(simple_attrs)
+ keys = st.booleans() | st.binary() | st.integers() | st.text()
+ vals = st.booleans() | st.binary() | st.integers() | st.text()
+ metadata = draw(
+ st.dictionaries(keys=keys, values=vals, min_size=1, max_size=3)
+ )
+
+ return attr.ib(
+ default=c_attr._default,
+ validator=c_attr._validator,
+ repr=c_attr.repr,
+ eq=c_attr.eq,
+ order=c_attr.order,
+ hash=c_attr.hash,
+ init=c_attr.init,
+ metadata=metadata,
+ type=None,
+ converter=c_attr.converter,
+ )
+
+
+simple_attrs = simple_attrs_without_metadata | simple_attrs_with_metadata()
+
+# Python functions support up to 255 arguments.
+list_of_attrs = st.lists(simple_attrs, max_size=3)
+
+
+@st.composite
+def simple_classes(
+ draw, slots=None, frozen=None, weakref_slot=None, private_attrs=None
+):
+ """
+ A strategy that generates classes with default non-attr attributes.
+
+ For example, this strategy might generate a class such as:
+
+ @attr.s(slots=True, frozen=True, weakref_slot=True)
+ class HypClass:
+ a = attr.ib(default=1)
+ _b = attr.ib(default=None)
+ c = attr.ib(default='text')
+ _d = attr.ib(default=1.0)
+ c = attr.ib(default={'t': 1})
+
+ By default, all combinations of slots, frozen, and weakref_slot classes
+ will be generated. If `slots=True` is passed in, only slotted classes will
+ be generated, and if `slots=False` is passed in, no slotted classes will be
+ generated. The same applies to `frozen` and `weakref_slot`.
+
+ By default, some attributes will be private (i.e. prefixed with an
+ underscore). If `private_attrs=True` is passed in, all attributes will be
+ private, and if `private_attrs=False`, no attributes will be private.
+ """
+ attrs = draw(list_of_attrs)
+ frozen_flag = draw(st.booleans())
+ slots_flag = draw(st.booleans())
+ weakref_flag = draw(st.booleans())
+
+ if private_attrs is None:
+ attr_names = maybe_underscore_prefix(gen_attr_names())
+ elif private_attrs is True:
+ attr_names = ("_" + n for n in gen_attr_names())
+ elif private_attrs is False:
+ attr_names = gen_attr_names()
+
+ cls_dict = dict(zip(attr_names, attrs))
+ pre_init_flag = draw(st.booleans())
+ post_init_flag = draw(st.booleans())
+ init_flag = draw(st.booleans())
+
+ if pre_init_flag:
+
+ def pre_init(self):
+ pass
+
+ cls_dict["__attrs_pre_init__"] = pre_init
+
+ if post_init_flag:
+
+ def post_init(self):
+ pass
+
+ cls_dict["__attrs_post_init__"] = post_init
+
+ if not init_flag:
+
+ def init(self, *args, **kwargs):
+ self.__attrs_init__(*args, **kwargs)
+
+ cls_dict["__init__"] = init
+
+ return make_class(
+ "HypClass",
+ cls_dict,
+ slots=slots_flag if slots is None else slots,
+ frozen=frozen_flag if frozen is None else frozen,
+ weakref_slot=weakref_flag if weakref_slot is None else weakref_slot,
+ init=init_flag,
+ )
+
+
+# st.recursive works by taking a base strategy (in this case, simple_classes)
+# and a special function. This function receives a strategy, and returns
+# another strategy (building on top of the base strategy).
+nested_classes = st.recursive(
+ simple_classes(), _create_hyp_nested_strategy, max_leaves=3
+)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_3rd_party.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_3rd_party.py
new file mode 100644
index 0000000000..8866d7f6ef
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_3rd_party.py
@@ -0,0 +1,31 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for compatibility against other Python modules.
+"""
+
+import pytest
+
+from hypothesis import given
+
+from .strategies import simple_classes
+
+
+cloudpickle = pytest.importorskip("cloudpickle")
+
+
+class TestCloudpickleCompat(object):
+ """
+ Tests for compatibility with ``cloudpickle``.
+ """
+
+ @given(simple_classes())
+ def test_repr(self, cls):
+ """
+ attrs instances can be pickled and un-pickled with cloudpickle.
+ """
+ inst = cls()
+ # Exact values aren't a concern so long as neither direction
+ # raises an exception.
+ pkl = cloudpickle.dumps(inst)
+ cloudpickle.loads(pkl)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_annotations.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_annotations.py
new file mode 100644
index 0000000000..a201ebf7fa
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_annotations.py
@@ -0,0 +1,671 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for PEP-526 type annotations.
+
+Python 3.6+ only.
+"""
+
+import sys
+import types
+import typing
+
+import pytest
+
+import attr
+
+from attr._make import _is_class_var
+from attr.exceptions import UnannotatedAttributeError
+
+
+def assert_init_annotations(cls, **annotations):
+ """
+ Assert cls.__init__ has the correct annotations.
+ """
+ __tracebackhide__ = True
+
+ annotations["return"] = type(None)
+
+ assert annotations == typing.get_type_hints(cls.__init__)
+
+
+class TestAnnotations:
+ """
+ Tests for types derived from variable annotations (PEP-526).
+ """
+
+ def test_basic_annotations(self):
+ """
+ Sets the `Attribute.type` attr from basic type annotations.
+ """
+
+ @attr.resolve_types
+ @attr.s
+ class C:
+ x: int = attr.ib()
+ y = attr.ib(type=str)
+ z = attr.ib()
+
+ assert int is attr.fields(C).x.type
+ assert str is attr.fields(C).y.type
+ assert None is attr.fields(C).z.type
+ assert_init_annotations(C, x=int, y=str)
+
+ def test_catches_basic_type_conflict(self):
+ """
+ Raises ValueError if type is specified both ways.
+ """
+ with pytest.raises(ValueError) as e:
+
+ @attr.s
+ class C:
+ x: int = attr.ib(type=int)
+
+ assert (
+ "Type annotation and type argument cannot both be present",
+ ) == e.value.args
+
+ def test_typing_annotations(self):
+ """
+ Sets the `Attribute.type` attr from typing annotations.
+ """
+
+ @attr.resolve_types
+ @attr.s
+ class C:
+ x: typing.List[int] = attr.ib()
+ y = attr.ib(type=typing.Optional[str])
+
+ assert typing.List[int] is attr.fields(C).x.type
+ assert typing.Optional[str] is attr.fields(C).y.type
+ assert_init_annotations(C, x=typing.List[int], y=typing.Optional[str])
+
+ def test_only_attrs_annotations_collected(self):
+ """
+ Annotations that aren't set to an attr.ib are ignored.
+ """
+
+ @attr.resolve_types
+ @attr.s
+ class C:
+ x: typing.List[int] = attr.ib()
+ y: int
+
+ assert 1 == len(attr.fields(C))
+ assert_init_annotations(C, x=typing.List[int])
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_auto_attribs(self, slots):
+ """
+ If *auto_attribs* is True, bare annotations are collected too.
+ Defaults work and class variables are ignored.
+ """
+
+ @attr.s(auto_attribs=True, slots=slots)
+ class C:
+ cls_var: typing.ClassVar[int] = 23
+ a: int
+ x: typing.List[int] = attr.Factory(list)
+ y: int = 2
+ z: int = attr.ib(default=3)
+ foo: typing.Any = None
+
+ i = C(42)
+ assert "C(a=42, x=[], y=2, z=3, foo=None)" == repr(i)
+
+ attr_names = set(a.name for a in C.__attrs_attrs__)
+ assert "a" in attr_names # just double check that the set works
+ assert "cls_var" not in attr_names
+
+ attr.resolve_types(C)
+
+ assert int == attr.fields(C).a.type
+
+ assert attr.Factory(list) == attr.fields(C).x.default
+ assert typing.List[int] == attr.fields(C).x.type
+
+ assert int == attr.fields(C).y.type
+ assert 2 == attr.fields(C).y.default
+
+ assert int == attr.fields(C).z.type
+
+ assert typing.Any == attr.fields(C).foo.type
+
+ # Class body is clean.
+ if slots is False:
+ with pytest.raises(AttributeError):
+ C.y
+
+ assert 2 == i.y
+ else:
+ assert isinstance(C.y, types.MemberDescriptorType)
+
+ i.y = 23
+ assert 23 == i.y
+
+ assert_init_annotations(
+ C,
+ a=int,
+ x=typing.List[int],
+ y=int,
+ z=int,
+ foo=typing.Optional[typing.Any],
+ )
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_auto_attribs_unannotated(self, slots):
+ """
+ Unannotated `attr.ib`s raise an error.
+ """
+ with pytest.raises(UnannotatedAttributeError) as e:
+
+ @attr.s(slots=slots, auto_attribs=True)
+ class C:
+ v = attr.ib()
+ x: int
+ y = attr.ib()
+ z: str
+
+ assert (
+ "The following `attr.ib`s lack a type annotation: v, y.",
+ ) == e.value.args
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_auto_attribs_subclassing(self, slots):
+ """
+ Attributes from base classes are inherited, it doesn't matter if the
+ subclass has annotations or not.
+
+ Ref #291
+ """
+
+ @attr.resolve_types
+ @attr.s(slots=slots, auto_attribs=True)
+ class A:
+ a: int = 1
+
+ @attr.resolve_types
+ @attr.s(slots=slots, auto_attribs=True)
+ class B(A):
+ b: int = 2
+
+ @attr.resolve_types
+ @attr.s(slots=slots, auto_attribs=True)
+ class C(A):
+ pass
+
+ assert "B(a=1, b=2)" == repr(B())
+ assert "C(a=1)" == repr(C())
+ assert_init_annotations(A, a=int)
+ assert_init_annotations(B, a=int, b=int)
+ assert_init_annotations(C, a=int)
+
+ def test_converter_annotations(self):
+ """
+ An unannotated attribute with an annotated converter gets its
+ annotation from the converter.
+ """
+
+ def int2str(x: int) -> str:
+ return str(x)
+
+ @attr.s
+ class A:
+ a = attr.ib(converter=int2str)
+
+ assert_init_annotations(A, a=int)
+
+ def int2str_(x: int, y: str = ""):
+ return str(x)
+
+ @attr.s
+ class A:
+ a = attr.ib(converter=int2str_)
+
+ assert_init_annotations(A, a=int)
+
+ def test_converter_attrib_annotations(self):
+ """
+ If a converter is provided, an explicit type annotation has no
+ effect on an attribute's type annotation.
+ """
+
+ def int2str(x: int) -> str:
+ return str(x)
+
+ @attr.s
+ class A:
+ a: str = attr.ib(converter=int2str)
+ b = attr.ib(converter=int2str, type=str)
+
+ assert_init_annotations(A, a=int, b=int)
+
+ def test_non_introspectable_converter(self):
+ """
+ A non-introspectable converter doesn't cause a crash.
+ """
+
+ @attr.s
+ class A:
+ a = attr.ib(converter=print)
+
+ def test_nullary_converter(self):
+ """
+ A coverter with no arguments doesn't cause a crash.
+ """
+
+ def noop():
+ pass
+
+ @attr.s
+ class A:
+ a = attr.ib(converter=noop)
+
+ assert A.__init__.__annotations__ == {"return": None}
+
+ def test_pipe(self):
+ """
+ pipe() uses the input annotation of its first argument and the
+ output annotation of its last argument.
+ """
+
+ def int2str(x: int) -> str:
+ return str(x)
+
+ def strlen(y: str) -> int:
+ return len(y)
+
+ def identity(z):
+ return z
+
+ assert attr.converters.pipe(int2str).__annotations__ == {
+ "val": int,
+ "return": str,
+ }
+ assert attr.converters.pipe(int2str, strlen).__annotations__ == {
+ "val": int,
+ "return": int,
+ }
+ assert attr.converters.pipe(identity, strlen).__annotations__ == {
+ "return": int
+ }
+ assert attr.converters.pipe(int2str, identity).__annotations__ == {
+ "val": int
+ }
+
+ def int2str_(x: int, y: int = 0) -> str:
+ return str(x)
+
+ assert attr.converters.pipe(int2str_).__annotations__ == {
+ "val": int,
+ "return": str,
+ }
+
+ def test_pipe_empty(self):
+ """
+ pipe() with no converters is annotated like the identity.
+ """
+
+ p = attr.converters.pipe()
+ assert "val" in p.__annotations__
+ t = p.__annotations__["val"]
+ assert isinstance(t, typing.TypeVar)
+ assert p.__annotations__ == {"val": t, "return": t}
+
+ def test_pipe_non_introspectable(self):
+ """
+ pipe() doesn't crash when passed a non-introspectable converter.
+ """
+
+ assert attr.converters.pipe(print).__annotations__ == {}
+
+ def test_pipe_nullary(self):
+ """
+ pipe() doesn't crash when passed a nullary converter.
+ """
+
+ def noop():
+ pass
+
+ assert attr.converters.pipe(noop).__annotations__ == {}
+
+ def test_optional(self):
+ """
+ optional() uses the annotations of the converter it wraps.
+ """
+
+ def int2str(x: int) -> str:
+ return str(x)
+
+ def int_identity(x: int):
+ return x
+
+ def strify(x) -> str:
+ return str(x)
+
+ def identity(x):
+ return x
+
+ assert attr.converters.optional(int2str).__annotations__ == {
+ "val": typing.Optional[int],
+ "return": typing.Optional[str],
+ }
+ assert attr.converters.optional(int_identity).__annotations__ == {
+ "val": typing.Optional[int]
+ }
+ assert attr.converters.optional(strify).__annotations__ == {
+ "return": typing.Optional[str]
+ }
+ assert attr.converters.optional(identity).__annotations__ == {}
+
+ def int2str_(x: int, y: int = 0) -> str:
+ return str(x)
+
+ assert attr.converters.optional(int2str_).__annotations__ == {
+ "val": typing.Optional[int],
+ "return": typing.Optional[str],
+ }
+
+ def test_optional_non_introspectable(self):
+ """
+ optional() doesn't crash when passed a non-introspectable
+ converter.
+ """
+
+ assert attr.converters.optional(print).__annotations__ == {}
+
+ def test_optional_nullary(self):
+ """
+ optional() doesn't crash when passed a nullary converter.
+ """
+
+ def noop():
+ pass
+
+ assert attr.converters.optional(noop).__annotations__ == {}
+
+ @pytest.mark.xfail(
+ sys.version_info[:2] == (3, 6), reason="Does not work on 3.6."
+ )
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_annotations_strings(self, slots):
+ """
+ String annotations are passed into __init__ as is.
+
+ It fails on 3.6 due to a bug in Python.
+ """
+ import typing as t
+
+ from typing import ClassVar
+
+ @attr.s(auto_attribs=True, slots=slots)
+ class C:
+ cls_var1: "typing.ClassVar[int]" = 23
+ cls_var2: "ClassVar[int]" = 23
+ cls_var3: "t.ClassVar[int]" = 23
+ a: "int"
+ x: "typing.List[int]" = attr.Factory(list)
+ y: "int" = 2
+ z: "int" = attr.ib(default=3)
+ foo: "typing.Any" = None
+
+ attr.resolve_types(C, locals(), globals())
+
+ assert_init_annotations(
+ C,
+ a=int,
+ x=typing.List[int],
+ y=int,
+ z=int,
+ foo=typing.Optional[typing.Any],
+ )
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_typing_extensions_classvar(self, slots):
+ """
+ If ClassVar is coming from typing_extensions, it is recognized too.
+ """
+
+ @attr.s(auto_attribs=True, slots=slots)
+ class C:
+ cls_var: "typing_extensions.ClassVar" = 23 # noqa
+
+ assert_init_annotations(C)
+
+ def test_keyword_only_auto_attribs(self):
+ """
+ `kw_only` propagates to attributes defined via `auto_attribs`.
+ """
+
+ @attr.s(auto_attribs=True, kw_only=True)
+ class C:
+ x: int
+ y: int
+
+ with pytest.raises(TypeError):
+ C(0, 1)
+
+ with pytest.raises(TypeError):
+ C(x=0)
+
+ c = C(x=0, y=1)
+
+ assert c.x == 0
+ assert c.y == 1
+
+ def test_base_class_variable(self):
+ """
+ Base class' class variables can be overridden with an attribute
+ without resorting to using an explicit `attr.ib()`.
+ """
+
+ class Base:
+ x: int = 42
+
+ @attr.s(auto_attribs=True)
+ class C(Base):
+ x: int
+
+ assert 1 == C(1).x
+
+ def test_removes_none_too(self):
+ """
+ Regression test for #523: make sure defaults that are set to None are
+ removed too.
+ """
+
+ @attr.s(auto_attribs=True)
+ class C:
+ x: int = 42
+ y: typing.Any = None
+
+ with pytest.raises(AttributeError):
+ C.x
+
+ with pytest.raises(AttributeError):
+ C.y
+
+ def test_non_comparable_defaults(self):
+ """
+ Regression test for #585: objects that are not directly comparable
+ (for example numpy arrays) would cause a crash when used as
+ default values of an attrs auto-attrib class.
+ """
+
+ class NonComparable:
+ def __eq__(self, other):
+ raise ValueError
+
+ @attr.s(auto_attribs=True)
+ class C:
+ x: typing.Any = NonComparable()
+
+ def test_basic_resolve(self):
+ """
+ Resolve the `Attribute.type` attr from basic type annotations.
+ Unannotated types are ignored.
+ """
+
+ @attr.s
+ class C:
+ x: "int" = attr.ib()
+ y = attr.ib(type=str)
+ z = attr.ib()
+
+ attr.resolve_types(C)
+
+ assert int is attr.fields(C).x.type
+ assert str is attr.fields(C).y.type
+ assert None is attr.fields(C).z.type
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_resolve_types_auto_attrib(self, slots):
+ """
+ Types can be resolved even when strings are involved.
+ """
+
+ @attr.s(slots=slots, auto_attribs=True)
+ class A:
+ a: typing.List[int]
+ b: typing.List["int"]
+ c: "typing.List[int]"
+
+ # Note: I don't have to pass globals and locals here because
+ # int is a builtin and will be available in any scope.
+ attr.resolve_types(A)
+
+ assert typing.List[int] == attr.fields(A).a.type
+ assert typing.List[int] == attr.fields(A).b.type
+ assert typing.List[int] == attr.fields(A).c.type
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_resolve_types_decorator(self, slots):
+ """
+ Types can be resolved using it as a decorator.
+ """
+
+ @attr.resolve_types
+ @attr.s(slots=slots, auto_attribs=True)
+ class A:
+ a: typing.List[int]
+ b: typing.List["int"]
+ c: "typing.List[int]"
+
+ assert typing.List[int] == attr.fields(A).a.type
+ assert typing.List[int] == attr.fields(A).b.type
+ assert typing.List[int] == attr.fields(A).c.type
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_self_reference(self, slots):
+ """
+ References to self class using quotes can be resolved.
+ """
+
+ @attr.s(slots=slots, auto_attribs=True)
+ class A:
+ a: "A"
+ b: typing.Optional["A"] # noqa: will resolve below
+
+ attr.resolve_types(A, globals(), locals())
+
+ assert A == attr.fields(A).a.type
+ assert typing.Optional[A] == attr.fields(A).b.type
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_forward_reference(self, slots):
+ """
+ Forward references can be resolved.
+ """
+
+ @attr.s(slots=slots, auto_attribs=True)
+ class A:
+ a: typing.List["B"] # noqa: will resolve below
+
+ @attr.s(slots=slots, auto_attribs=True)
+ class B:
+ a: A
+
+ attr.resolve_types(A, globals(), locals())
+ attr.resolve_types(B, globals(), locals())
+
+ assert typing.List[B] == attr.fields(A).a.type
+ assert A == attr.fields(B).a.type
+
+ assert typing.List[B] == attr.fields(A).a.type
+ assert A == attr.fields(B).a.type
+
+ def test_init_type_hints(self):
+ """
+ Forward references in __init__ can be automatically resolved.
+ """
+
+ @attr.s
+ class C:
+ x = attr.ib(type="typing.List[int]")
+
+ assert_init_annotations(C, x=typing.List[int])
+
+ def test_init_type_hints_fake_module(self):
+ """
+ If you somehow set the __module__ to something that doesn't exist
+ you'll lose __init__ resolution.
+ """
+
+ class C:
+ x = attr.ib(type="typing.List[int]")
+
+ C.__module__ = "totally fake"
+ C = attr.s(C)
+
+ with pytest.raises(NameError):
+ typing.get_type_hints(C.__init__)
+
+ def test_inheritance(self):
+ """
+ Subclasses can be resolved after the parent is resolved.
+ """
+
+ @attr.define()
+ class A:
+ n: "int"
+
+ @attr.define()
+ class B(A):
+ pass
+
+ attr.resolve_types(A)
+ attr.resolve_types(B)
+
+ assert int == attr.fields(A).n.type
+ assert int == attr.fields(B).n.type
+
+ def test_resolve_twice(self):
+ """
+ You can call resolve_types as many times as you like.
+ This test is here mostly for coverage.
+ """
+
+ @attr.define()
+ class A:
+ n: "int"
+
+ attr.resolve_types(A)
+ assert int == attr.fields(A).n.type
+ attr.resolve_types(A)
+ assert int == attr.fields(A).n.type
+
+
+@pytest.mark.parametrize(
+ "annot",
+ [
+ typing.ClassVar,
+ "typing.ClassVar",
+ "'typing.ClassVar[dict]'",
+ "t.ClassVar[int]",
+ ],
+)
+def test_is_class_var(annot):
+ """
+ ClassVars are detected, even if they're a string or quoted.
+ """
+ assert _is_class_var(annot)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_cmp.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_cmp.py
new file mode 100644
index 0000000000..ec2c687489
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_cmp.py
@@ -0,0 +1,510 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for methods from `attrib._cmp`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import pytest
+
+from attr._cmp import cmp_using
+from attr._compat import PY2
+
+
+# Test parameters.
+EqCSameType = cmp_using(eq=lambda a, b: a == b, class_name="EqCSameType")
+PartialOrderCSameType = cmp_using(
+ eq=lambda a, b: a == b,
+ lt=lambda a, b: a < b,
+ class_name="PartialOrderCSameType",
+)
+FullOrderCSameType = cmp_using(
+ eq=lambda a, b: a == b,
+ lt=lambda a, b: a < b,
+ le=lambda a, b: a <= b,
+ gt=lambda a, b: a > b,
+ ge=lambda a, b: a >= b,
+ class_name="FullOrderCSameType",
+)
+
+EqCAnyType = cmp_using(
+ eq=lambda a, b: a == b, require_same_type=False, class_name="EqCAnyType"
+)
+PartialOrderCAnyType = cmp_using(
+ eq=lambda a, b: a == b,
+ lt=lambda a, b: a < b,
+ require_same_type=False,
+ class_name="PartialOrderCAnyType",
+)
+
+
+eq_data = [
+ (EqCSameType, True),
+ (EqCAnyType, False),
+]
+
+order_data = [
+ (PartialOrderCSameType, True),
+ (PartialOrderCAnyType, False),
+ (FullOrderCSameType, True),
+]
+
+eq_ids = [c[0].__name__ for c in eq_data]
+order_ids = [c[0].__name__ for c in order_data]
+
+cmp_data = eq_data + order_data
+cmp_ids = eq_ids + order_ids
+
+
+class TestEqOrder(object):
+ """
+ Tests for eq and order related methods.
+ """
+
+ #########
+ # eq
+ #########
+ @pytest.mark.parametrize("cls, requires_same_type", cmp_data, ids=cmp_ids)
+ def test_equal_same_type(self, cls, requires_same_type):
+ """
+ Equal objects are detected as equal.
+ """
+ assert cls(1) == cls(1)
+ assert not (cls(1) != cls(1))
+
+ @pytest.mark.parametrize("cls, requires_same_type", cmp_data, ids=cmp_ids)
+ def test_unequal_same_type(self, cls, requires_same_type):
+ """
+ Unequal objects of correct type are detected as unequal.
+ """
+ assert cls(1) != cls(2)
+ assert not (cls(1) == cls(2))
+
+ @pytest.mark.parametrize("cls, requires_same_type", cmp_data, ids=cmp_ids)
+ def test_equal_different_type(self, cls, requires_same_type):
+ """
+ Equal values of different types are detected appropriately.
+ """
+ assert (cls(1) == cls(1.0)) == (not requires_same_type)
+ assert not (cls(1) != cls(1.0)) == (not requires_same_type)
+
+ #########
+ # lt
+ #########
+ @pytest.mark.skipif(PY2, reason="PY2 does not raise TypeError")
+ @pytest.mark.parametrize("cls, requires_same_type", eq_data, ids=eq_ids)
+ def test_lt_unorderable(self, cls, requires_same_type):
+ """
+ TypeError is raised if class does not implement __lt__.
+ """
+ with pytest.raises(TypeError):
+ cls(1) < cls(2)
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_lt_same_type(self, cls, requires_same_type):
+ """
+ Less-than objects are detected appropriately.
+ """
+ assert cls(1) < cls(2)
+ assert not (cls(2) < cls(1))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_not_lt_same_type(self, cls, requires_same_type):
+ """
+ Not less-than objects are detected appropriately.
+ """
+ assert cls(2) >= cls(1)
+ assert not (cls(1) >= cls(2))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_lt_different_type(self, cls, requires_same_type):
+ """
+ Less-than values of different types are detected appropriately.
+ """
+ if requires_same_type:
+ # Unlike __eq__, NotImplemented will cause an exception to be
+ # raised from __lt__.
+ if not PY2:
+ with pytest.raises(TypeError):
+ cls(1) < cls(2.0)
+ else:
+ assert cls(1) < cls(2.0)
+ assert not (cls(2) < cls(1.0))
+
+ #########
+ # le
+ #########
+ @pytest.mark.skipif(PY2, reason="PY2 does not raise TypeError")
+ @pytest.mark.parametrize("cls, requires_same_type", eq_data, ids=eq_ids)
+ def test_le_unorderable(self, cls, requires_same_type):
+ """
+ TypeError is raised if class does not implement __le__.
+ """
+ with pytest.raises(TypeError):
+ cls(1) <= cls(2)
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_le_same_type(self, cls, requires_same_type):
+ """
+ Less-than-or-equal objects are detected appropriately.
+ """
+ assert cls(1) <= cls(1)
+ assert cls(1) <= cls(2)
+ assert not (cls(2) <= cls(1))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_not_le_same_type(self, cls, requires_same_type):
+ """
+ Not less-than-or-equal objects are detected appropriately.
+ """
+ assert cls(2) > cls(1)
+ assert not (cls(1) > cls(1))
+ assert not (cls(1) > cls(2))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_le_different_type(self, cls, requires_same_type):
+ """
+ Less-than-or-equal values of diff. types are detected appropriately.
+ """
+ if requires_same_type:
+ # Unlike __eq__, NotImplemented will cause an exception to be
+ # raised from __le__.
+ if not PY2:
+ with pytest.raises(TypeError):
+ cls(1) <= cls(2.0)
+ else:
+ assert cls(1) <= cls(2.0)
+ assert cls(1) <= cls(1.0)
+ assert not (cls(2) <= cls(1.0))
+
+ #########
+ # gt
+ #########
+ @pytest.mark.skipif(PY2, reason="PY2 does not raise TypeError")
+ @pytest.mark.parametrize("cls, requires_same_type", eq_data, ids=eq_ids)
+ def test_gt_unorderable(self, cls, requires_same_type):
+ """
+ TypeError is raised if class does not implement __gt__.
+ """
+ with pytest.raises(TypeError):
+ cls(2) > cls(1)
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_gt_same_type(self, cls, requires_same_type):
+ """
+ Greater-than objects are detected appropriately.
+ """
+ assert cls(2) > cls(1)
+ assert not (cls(1) > cls(2))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_not_gt_same_type(self, cls, requires_same_type):
+ """
+ Not greater-than objects are detected appropriately.
+ """
+ assert cls(1) <= cls(2)
+ assert not (cls(2) <= cls(1))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_gt_different_type(self, cls, requires_same_type):
+ """
+ Greater-than values of different types are detected appropriately.
+ """
+ if requires_same_type:
+ # Unlike __eq__, NotImplemented will cause an exception to be
+ # raised from __gt__.
+ if not PY2:
+ with pytest.raises(TypeError):
+ cls(2) > cls(1.0)
+ else:
+ assert cls(2) > cls(1.0)
+ assert not (cls(1) > cls(2.0))
+
+ #########
+ # ge
+ #########
+ @pytest.mark.skipif(PY2, reason="PY2 does not raise TypeError")
+ @pytest.mark.parametrize("cls, requires_same_type", eq_data, ids=eq_ids)
+ def test_ge_unorderable(self, cls, requires_same_type):
+ """
+ TypeError is raised if class does not implement __ge__.
+ """
+ with pytest.raises(TypeError):
+ cls(2) >= cls(1)
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_ge_same_type(self, cls, requires_same_type):
+ """
+ Greater-than-or-equal objects are detected appropriately.
+ """
+ assert cls(1) >= cls(1)
+ assert cls(2) >= cls(1)
+ assert not (cls(1) >= cls(2))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_not_ge_same_type(self, cls, requires_same_type):
+ """
+ Not greater-than-or-equal objects are detected appropriately.
+ """
+ assert cls(1) < cls(2)
+ assert not (cls(1) < cls(1))
+ assert not (cls(2) < cls(1))
+
+ @pytest.mark.parametrize(
+ "cls, requires_same_type", order_data, ids=order_ids
+ )
+ def test_ge_different_type(self, cls, requires_same_type):
+ """
+ Greater-than-or-equal values of diff. types are detected appropriately.
+ """
+ if requires_same_type:
+ # Unlike __eq__, NotImplemented will cause an exception to be
+ # raised from __ge__.
+ if not PY2:
+ with pytest.raises(TypeError):
+ cls(2) >= cls(1.0)
+ else:
+ assert cls(2) >= cls(2.0)
+ assert cls(2) >= cls(1.0)
+ assert not (cls(1) >= cls(2.0))
+
+
+class TestDundersUnnamedClass(object):
+ """
+ Tests for dunder attributes of unnamed classes.
+ """
+
+ cls = cmp_using(eq=lambda a, b: a == b)
+
+ def test_class(self):
+ """
+ Class name and qualified name should be well behaved.
+ """
+ assert self.cls.__name__ == "Comparable"
+ if not PY2:
+ assert self.cls.__qualname__ == "Comparable"
+
+ def test_eq(self):
+ """
+ __eq__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__eq__
+ assert method.__doc__.strip() == "Return a == b. Computed by attrs."
+ assert method.__name__ == "__eq__"
+
+ def test_ne(self):
+ """
+ __ne__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__ne__
+ assert method.__doc__.strip() == (
+ "Check equality and either forward a NotImplemented or\n"
+ " return the result negated."
+ )
+ assert method.__name__ == "__ne__"
+
+
+class TestTotalOrderingException(object):
+ """
+ Test for exceptions related to total ordering.
+ """
+
+ def test_eq_must_specified(self):
+ """
+ `total_ordering` requires `__eq__` to be specified.
+ """
+ with pytest.raises(ValueError) as ei:
+ cmp_using(lt=lambda a, b: a < b)
+
+ assert ei.value.args[0] == (
+ "eq must be define is order to complete ordering from "
+ "lt, le, gt, ge."
+ )
+
+
+class TestNotImplementedIsPropagated(object):
+ """
+ Test related to functions that return NotImplemented.
+ """
+
+ def test_not_implemented_is_propagated(self):
+ """
+ If the comparison function returns NotImplemented,
+ the dunder method should too.
+ """
+ C = cmp_using(eq=lambda a, b: NotImplemented if a == 1 else a == b)
+
+ assert C(2) == C(2)
+ assert C(1) != C(1)
+
+
+class TestDundersPartialOrdering(object):
+ """
+ Tests for dunder attributes of classes with partial ordering.
+ """
+
+ cls = PartialOrderCSameType
+
+ def test_class(self):
+ """
+ Class name and qualified name should be well behaved.
+ """
+ assert self.cls.__name__ == "PartialOrderCSameType"
+ if not PY2:
+ assert self.cls.__qualname__ == "PartialOrderCSameType"
+
+ def test_eq(self):
+ """
+ __eq__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__eq__
+ assert method.__doc__.strip() == "Return a == b. Computed by attrs."
+ assert method.__name__ == "__eq__"
+
+ def test_ne(self):
+ """
+ __ne__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__ne__
+ assert method.__doc__.strip() == (
+ "Check equality and either forward a NotImplemented or\n"
+ " return the result negated."
+ )
+ assert method.__name__ == "__ne__"
+
+ def test_lt(self):
+ """
+ __lt__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__lt__
+ assert method.__doc__.strip() == "Return a < b. Computed by attrs."
+ assert method.__name__ == "__lt__"
+
+ def test_le(self):
+ """
+ __le__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__le__
+ if PY2:
+ assert method.__doc__ == "x.__le__(y) <==> x<=y"
+ else:
+ assert method.__doc__.strip().startswith(
+ "Return a <= b. Computed by @total_ordering from"
+ )
+ assert method.__name__ == "__le__"
+
+ def test_gt(self):
+ """
+ __gt__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__gt__
+ if PY2:
+ assert method.__doc__ == "x.__gt__(y) <==> x>y"
+ else:
+ assert method.__doc__.strip().startswith(
+ "Return a > b. Computed by @total_ordering from"
+ )
+ assert method.__name__ == "__gt__"
+
+ def test_ge(self):
+ """
+ __ge__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__ge__
+ if PY2:
+ assert method.__doc__ == "x.__ge__(y) <==> x>=y"
+ else:
+ assert method.__doc__.strip().startswith(
+ "Return a >= b. Computed by @total_ordering from"
+ )
+ assert method.__name__ == "__ge__"
+
+
+class TestDundersFullOrdering(object):
+ """
+ Tests for dunder attributes of classes with full ordering.
+ """
+
+ cls = FullOrderCSameType
+
+ def test_class(self):
+ """
+ Class name and qualified name should be well behaved.
+ """
+ assert self.cls.__name__ == "FullOrderCSameType"
+ if not PY2:
+ assert self.cls.__qualname__ == "FullOrderCSameType"
+
+ def test_eq(self):
+ """
+ __eq__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__eq__
+ assert method.__doc__.strip() == "Return a == b. Computed by attrs."
+ assert method.__name__ == "__eq__"
+
+ def test_ne(self):
+ """
+ __ne__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__ne__
+ assert method.__doc__.strip() == (
+ "Check equality and either forward a NotImplemented or\n"
+ " return the result negated."
+ )
+ assert method.__name__ == "__ne__"
+
+ def test_lt(self):
+ """
+ __lt__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__lt__
+ assert method.__doc__.strip() == "Return a < b. Computed by attrs."
+ assert method.__name__ == "__lt__"
+
+ def test_le(self):
+ """
+ __le__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__le__
+ assert method.__doc__.strip() == "Return a <= b. Computed by attrs."
+ assert method.__name__ == "__le__"
+
+ def test_gt(self):
+ """
+ __gt__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__gt__
+ assert method.__doc__.strip() == "Return a > b. Computed by attrs."
+ assert method.__name__ == "__gt__"
+
+ def test_ge(self):
+ """
+ __ge__ docstring and qualified name should be well behaved.
+ """
+ method = self.cls.__ge__
+ assert method.__doc__.strip() == "Return a >= b. Computed by attrs."
+ assert method.__name__ == "__ge__"
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_compat.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_compat.py
new file mode 100644
index 0000000000..464b492f0f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_compat.py
@@ -0,0 +1,52 @@
+# SPDX-License-Identifier: MIT
+
+import pytest
+
+from attr._compat import metadata_proxy
+
+
+@pytest.fixture(name="mp")
+def _mp():
+ return metadata_proxy({"x": 42, "y": "foo"})
+
+
+class TestMetadataProxy:
+ """
+ Ensure properties of metadata_proxy independently of hypothesis strategies.
+ """
+
+ def test_repr(self, mp):
+ """
+ repr makes sense and is consistent across Python versions.
+ """
+ assert any(
+ [
+ "mappingproxy({'x': 42, 'y': 'foo'})" == repr(mp),
+ "mappingproxy({'y': 'foo', 'x': 42})" == repr(mp),
+ ]
+ )
+
+ def test_immutable(self, mp):
+ """
+ All mutating methods raise errors.
+ """
+ with pytest.raises(TypeError, match="not support item assignment"):
+ mp["z"] = 23
+
+ with pytest.raises(TypeError, match="not support item deletion"):
+ del mp["x"]
+
+ with pytest.raises(AttributeError, match="no attribute 'update'"):
+ mp.update({})
+
+ with pytest.raises(AttributeError, match="no attribute 'clear'"):
+ mp.clear()
+
+ with pytest.raises(AttributeError, match="no attribute 'pop'"):
+ mp.pop("x")
+
+ with pytest.raises(AttributeError, match="no attribute 'popitem'"):
+ mp.popitem()
+
+ with pytest.raises(AttributeError, match="no attribute 'setdefault'"):
+ mp.setdefault("x")
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_config.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_config.py
new file mode 100644
index 0000000000..bbf6756406
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_config.py
@@ -0,0 +1,45 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr._config`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import pytest
+
+from attr import _config
+
+
+class TestConfig(object):
+ def test_default(self):
+ """
+ Run validators by default.
+ """
+ assert True is _config._run_validators
+
+ def test_set_run_validators(self):
+ """
+ Sets `_run_validators`.
+ """
+ _config.set_run_validators(False)
+ assert False is _config._run_validators
+ _config.set_run_validators(True)
+ assert True is _config._run_validators
+
+ def test_get_run_validators(self):
+ """
+ Returns `_run_validators`.
+ """
+ _config._run_validators = False
+ assert _config._run_validators is _config.get_run_validators()
+ _config._run_validators = True
+ assert _config._run_validators is _config.get_run_validators()
+
+ def test_wrong_type(self):
+ """
+ Passing anything else than a boolean raises TypeError.
+ """
+ with pytest.raises(TypeError) as e:
+ _config.set_run_validators("False")
+ assert "'run' must be bool." == e.value.args[0]
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_converters.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_converters.py
new file mode 100644
index 0000000000..d0fc723eb1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_converters.py
@@ -0,0 +1,163 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr.converters`.
+"""
+
+from __future__ import absolute_import
+
+import pytest
+
+import attr
+
+from attr import Factory, attrib
+from attr.converters import default_if_none, optional, pipe, to_bool
+
+
+class TestOptional(object):
+ """
+ Tests for `optional`.
+ """
+
+ def test_success_with_type(self):
+ """
+ Wrapped converter is used as usual if value is not None.
+ """
+ c = optional(int)
+
+ assert c("42") == 42
+
+ def test_success_with_none(self):
+ """
+ Nothing happens if None.
+ """
+ c = optional(int)
+
+ assert c(None) is None
+
+ def test_fail(self):
+ """
+ Propagates the underlying conversion error when conversion fails.
+ """
+ c = optional(int)
+
+ with pytest.raises(ValueError):
+ c("not_an_int")
+
+
+class TestDefaultIfNone(object):
+ def test_missing_default(self):
+ """
+ Raises TypeError if neither default nor factory have been passed.
+ """
+ with pytest.raises(TypeError, match="Must pass either"):
+ default_if_none()
+
+ def test_too_many_defaults(self):
+ """
+ Raises TypeError if both default and factory are passed.
+ """
+ with pytest.raises(TypeError, match="but not both"):
+ default_if_none(True, lambda: 42)
+
+ def test_factory_takes_self(self):
+ """
+ Raises ValueError if passed Factory has takes_self=True.
+ """
+ with pytest.raises(ValueError, match="takes_self"):
+ default_if_none(Factory(list, takes_self=True))
+
+ @pytest.mark.parametrize("val", [1, 0, True, False, "foo", "", object()])
+ def test_not_none(self, val):
+ """
+ If a non-None value is passed, it's handed down.
+ """
+ c = default_if_none("nope")
+
+ assert val == c(val)
+
+ c = default_if_none(factory=list)
+
+ assert val == c(val)
+
+ def test_none_value(self):
+ """
+ Default values are returned when a None is passed.
+ """
+ c = default_if_none(42)
+
+ assert 42 == c(None)
+
+ def test_none_factory(self):
+ """
+ Factories are used if None is passed.
+ """
+ c = default_if_none(factory=list)
+
+ assert [] == c(None)
+
+ c = default_if_none(default=Factory(list))
+
+ assert [] == c(None)
+
+
+class TestPipe(object):
+ def test_success(self):
+ """
+ Succeeds if all wrapped converters succeed.
+ """
+ c = pipe(str, to_bool, bool)
+
+ assert True is c("True") is c(True)
+
+ def test_fail(self):
+ """
+ Fails if any wrapped converter fails.
+ """
+ c = pipe(str, to_bool)
+
+ # First wrapped converter fails:
+ with pytest.raises(ValueError):
+ c(33)
+
+ # Last wrapped converter fails:
+ with pytest.raises(ValueError):
+ c("33")
+
+ def test_sugar(self):
+ """
+ `pipe(c1, c2, c3)` and `[c1, c2, c3]` are equivalent.
+ """
+
+ @attr.s
+ class C(object):
+ a1 = attrib(default="True", converter=pipe(str, to_bool, bool))
+ a2 = attrib(default=True, converter=[str, to_bool, bool])
+
+ c = C()
+ assert True is c.a1 is c.a2
+
+
+class TestToBool(object):
+ def test_unhashable(self):
+ """
+ Fails if value is unhashable.
+ """
+ with pytest.raises(ValueError, match="Cannot convert value to bool"):
+ to_bool([])
+
+ def test_truthy(self):
+ """
+ Fails if truthy values are incorrectly converted.
+ """
+ assert to_bool("t")
+ assert to_bool("yes")
+ assert to_bool("on")
+
+ def test_falsy(self):
+ """
+ Fails if falsy values are incorrectly converted.
+ """
+ assert not to_bool("f")
+ assert not to_bool("no")
+ assert not to_bool("off")
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_dunders.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_dunders.py
new file mode 100644
index 0000000000..186762eb0d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_dunders.py
@@ -0,0 +1,1008 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for dunder methods from `attrib._make`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import copy
+import pickle
+
+import pytest
+
+from hypothesis import given
+from hypothesis.strategies import booleans
+
+import attr
+
+from attr._make import (
+ NOTHING,
+ Factory,
+ _add_repr,
+ _is_slot_cls,
+ _make_init,
+ _Nothing,
+ fields,
+ make_class,
+)
+from attr.validators import instance_of
+
+from .utils import simple_attr, simple_class
+
+
+EqC = simple_class(eq=True)
+EqCSlots = simple_class(eq=True, slots=True)
+OrderC = simple_class(order=True)
+OrderCSlots = simple_class(order=True, slots=True)
+ReprC = simple_class(repr=True)
+ReprCSlots = simple_class(repr=True, slots=True)
+
+
+@attr.s(eq=True)
+class EqCallableC(object):
+ a = attr.ib(eq=str.lower, order=False)
+ b = attr.ib(eq=True)
+
+
+@attr.s(eq=True, slots=True)
+class EqCallableCSlots(object):
+ a = attr.ib(eq=str.lower, order=False)
+ b = attr.ib(eq=True)
+
+
+@attr.s(order=True)
+class OrderCallableC(object):
+ a = attr.ib(eq=True, order=str.lower)
+ b = attr.ib(order=True)
+
+
+@attr.s(order=True, slots=True)
+class OrderCallableCSlots(object):
+ a = attr.ib(eq=True, order=str.lower)
+ b = attr.ib(order=True)
+
+
+# HashC is hashable by explicit definition while HashCSlots is hashable
+# implicitly. The "Cached" versions are the same, except with hash code
+# caching enabled
+HashC = simple_class(hash=True)
+HashCSlots = simple_class(hash=None, eq=True, frozen=True, slots=True)
+HashCCached = simple_class(hash=True, cache_hash=True)
+HashCSlotsCached = simple_class(
+ hash=None, eq=True, frozen=True, slots=True, cache_hash=True
+)
+# the cached hash code is stored slightly differently in this case
+# so it needs to be tested separately
+HashCFrozenNotSlotsCached = simple_class(
+ frozen=True, slots=False, hash=True, cache_hash=True
+)
+
+
+def _add_init(cls, frozen):
+ """
+ Add a __init__ method to *cls*. If *frozen* is True, make it immutable.
+
+ This function used to be part of _make. It wasn't used anymore however
+ the tests for it are still useful to test the behavior of _make_init.
+ """
+ cls.__init__ = _make_init(
+ cls,
+ cls.__attrs_attrs__,
+ getattr(cls, "__attrs_pre_init__", False),
+ getattr(cls, "__attrs_post_init__", False),
+ frozen,
+ _is_slot_cls(cls),
+ cache_hash=False,
+ base_attr_map={},
+ is_exc=False,
+ cls_on_setattr=None,
+ attrs_init=False,
+ )
+ return cls
+
+
+class InitC(object):
+ __attrs_attrs__ = [simple_attr("a"), simple_attr("b")]
+
+
+InitC = _add_init(InitC, False)
+
+
+class TestEqOrder(object):
+ """
+ Tests for eq and order related methods.
+ """
+
+ @given(booleans())
+ def test_eq_ignore_attrib(self, slots):
+ """
+ If `eq` is False for an attribute, ignore that attribute.
+ """
+ C = make_class(
+ "C", {"a": attr.ib(eq=False), "b": attr.ib()}, slots=slots
+ )
+
+ assert C(1, 2) == C(2, 2)
+
+ @pytest.mark.parametrize("cls", [EqC, EqCSlots])
+ def test_equal(self, cls):
+ """
+ Equal objects are detected as equal.
+ """
+ assert cls(1, 2) == cls(1, 2)
+ assert not (cls(1, 2) != cls(1, 2))
+
+ @pytest.mark.parametrize("cls", [EqCallableC, EqCallableCSlots])
+ def test_equal_callable(self, cls):
+ """
+ Equal objects are detected as equal.
+ """
+ assert cls("Test", 1) == cls("test", 1)
+ assert cls("Test", 1) != cls("test", 2)
+ assert not (cls("Test", 1) != cls("test", 1))
+ assert not (cls("Test", 1) == cls("test", 2))
+
+ @pytest.mark.parametrize("cls", [EqC, EqCSlots])
+ def test_unequal_same_class(self, cls):
+ """
+ Unequal objects of correct type are detected as unequal.
+ """
+ assert cls(1, 2) != cls(2, 1)
+ assert not (cls(1, 2) == cls(2, 1))
+
+ @pytest.mark.parametrize("cls", [EqCallableC, EqCallableCSlots])
+ def test_unequal_same_class_callable(self, cls):
+ """
+ Unequal objects of correct type are detected as unequal.
+ """
+ assert cls("Test", 1) != cls("foo", 2)
+ assert not (cls("Test", 1) == cls("foo", 2))
+
+ @pytest.mark.parametrize(
+ "cls", [EqC, EqCSlots, EqCallableC, EqCallableCSlots]
+ )
+ def test_unequal_different_class(self, cls):
+ """
+ Unequal objects of different type are detected even if their attributes
+ match.
+ """
+
+ class NotEqC(object):
+ a = 1
+ b = 2
+
+ assert cls(1, 2) != NotEqC()
+ assert not (cls(1, 2) == NotEqC())
+
+ @pytest.mark.parametrize("cls", [OrderC, OrderCSlots])
+ def test_lt(self, cls):
+ """
+ __lt__ compares objects as tuples of attribute values.
+ """
+ for a, b in [
+ ((1, 2), (2, 1)),
+ ((1, 2), (1, 3)),
+ (("a", "b"), ("b", "a")),
+ ]:
+ assert cls(*a) < cls(*b)
+
+ @pytest.mark.parametrize("cls", [OrderCallableC, OrderCallableCSlots])
+ def test_lt_callable(self, cls):
+ """
+ __lt__ compares objects as tuples of attribute values.
+ """
+ # Note: "A" < "a"
+ for a, b in [
+ (("test1", 1), ("Test1", 2)),
+ (("test0", 1), ("Test1", 1)),
+ ]:
+ assert cls(*a) < cls(*b)
+
+ @pytest.mark.parametrize(
+ "cls", [OrderC, OrderCSlots, OrderCallableC, OrderCallableCSlots]
+ )
+ def test_lt_unordable(self, cls):
+ """
+ __lt__ returns NotImplemented if classes differ.
+ """
+ assert NotImplemented == (cls(1, 2).__lt__(42))
+
+ @pytest.mark.parametrize("cls", [OrderC, OrderCSlots])
+ def test_le(self, cls):
+ """
+ __le__ compares objects as tuples of attribute values.
+ """
+ for a, b in [
+ ((1, 2), (2, 1)),
+ ((1, 2), (1, 3)),
+ ((1, 1), (1, 1)),
+ (("a", "b"), ("b", "a")),
+ (("a", "b"), ("a", "b")),
+ ]:
+ assert cls(*a) <= cls(*b)
+
+ @pytest.mark.parametrize("cls", [OrderCallableC, OrderCallableCSlots])
+ def test_le_callable(self, cls):
+ """
+ __le__ compares objects as tuples of attribute values.
+ """
+ # Note: "A" < "a"
+ for a, b in [
+ (("test1", 1), ("Test1", 1)),
+ (("test1", 1), ("Test1", 2)),
+ (("test0", 1), ("Test1", 1)),
+ (("test0", 2), ("Test1", 1)),
+ ]:
+ assert cls(*a) <= cls(*b)
+
+ @pytest.mark.parametrize(
+ "cls", [OrderC, OrderCSlots, OrderCallableC, OrderCallableCSlots]
+ )
+ def test_le_unordable(self, cls):
+ """
+ __le__ returns NotImplemented if classes differ.
+ """
+ assert NotImplemented == (cls(1, 2).__le__(42))
+
+ @pytest.mark.parametrize("cls", [OrderC, OrderCSlots])
+ def test_gt(self, cls):
+ """
+ __gt__ compares objects as tuples of attribute values.
+ """
+ for a, b in [
+ ((2, 1), (1, 2)),
+ ((1, 3), (1, 2)),
+ (("b", "a"), ("a", "b")),
+ ]:
+ assert cls(*a) > cls(*b)
+
+ @pytest.mark.parametrize("cls", [OrderCallableC, OrderCallableCSlots])
+ def test_gt_callable(self, cls):
+ """
+ __gt__ compares objects as tuples of attribute values.
+ """
+ # Note: "A" < "a"
+ for a, b in [
+ (("Test1", 2), ("test1", 1)),
+ (("Test1", 1), ("test0", 1)),
+ ]:
+ assert cls(*a) > cls(*b)
+
+ @pytest.mark.parametrize(
+ "cls", [OrderC, OrderCSlots, OrderCallableC, OrderCallableCSlots]
+ )
+ def test_gt_unordable(self, cls):
+ """
+ __gt__ returns NotImplemented if classes differ.
+ """
+ assert NotImplemented == (cls(1, 2).__gt__(42))
+
+ @pytest.mark.parametrize("cls", [OrderC, OrderCSlots])
+ def test_ge(self, cls):
+ """
+ __ge__ compares objects as tuples of attribute values.
+ """
+ for a, b in [
+ ((2, 1), (1, 2)),
+ ((1, 3), (1, 2)),
+ ((1, 1), (1, 1)),
+ (("b", "a"), ("a", "b")),
+ (("a", "b"), ("a", "b")),
+ ]:
+ assert cls(*a) >= cls(*b)
+
+ @pytest.mark.parametrize("cls", [OrderCallableC, OrderCallableCSlots])
+ def test_ge_callable(self, cls):
+ """
+ __ge__ compares objects as tuples of attribute values.
+ """
+ # Note: "A" < "a"
+ for a, b in [
+ (("Test1", 1), ("test1", 1)),
+ (("Test1", 2), ("test1", 1)),
+ (("Test1", 1), ("test0", 1)),
+ (("Test1", 1), ("test0", 2)),
+ ]:
+ assert cls(*a) >= cls(*b)
+
+ @pytest.mark.parametrize(
+ "cls", [OrderC, OrderCSlots, OrderCallableC, OrderCallableCSlots]
+ )
+ def test_ge_unordable(self, cls):
+ """
+ __ge__ returns NotImplemented if classes differ.
+ """
+ assert NotImplemented == (cls(1, 2).__ge__(42))
+
+
+class TestAddRepr(object):
+ """
+ Tests for `_add_repr`.
+ """
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_repr(self, slots):
+ """
+ If `repr` is False, ignore that attribute.
+ """
+ C = make_class(
+ "C", {"a": attr.ib(repr=False), "b": attr.ib()}, slots=slots
+ )
+
+ assert "C(b=2)" == repr(C(1, 2))
+
+ @pytest.mark.parametrize("cls", [ReprC, ReprCSlots])
+ def test_repr_works(self, cls):
+ """
+ repr returns a sensible value.
+ """
+ assert "C(a=1, b=2)" == repr(cls(1, 2))
+
+ def test_custom_repr_works(self):
+ """
+ repr returns a sensible value for attributes with a custom repr
+ callable.
+ """
+
+ def custom_repr(value):
+ return "foo:" + str(value)
+
+ @attr.s
+ class C(object):
+ a = attr.ib(repr=custom_repr)
+
+ assert "C(a=foo:1)" == repr(C(1))
+
+ def test_infinite_recursion(self):
+ """
+ In the presence of a cyclic graph, repr will emit an ellipsis and not
+ raise an exception.
+ """
+
+ @attr.s
+ class Cycle(object):
+ value = attr.ib(default=7)
+ cycle = attr.ib(default=None)
+
+ cycle = Cycle()
+ cycle.cycle = cycle
+ assert "Cycle(value=7, cycle=...)" == repr(cycle)
+
+ def test_infinite_recursion_long_cycle(self):
+ """
+ A cyclic graph can pass through other non-attrs objects, and repr will
+ still emit an ellipsis and not raise an exception.
+ """
+
+ @attr.s
+ class LongCycle(object):
+ value = attr.ib(default=14)
+ cycle = attr.ib(default=None)
+
+ cycle = LongCycle()
+ # Ensure that the reference cycle passes through a non-attrs object.
+ # This demonstrates the need for a thread-local "global" ID tracker.
+ cycle.cycle = {"cycle": [cycle]}
+ assert "LongCycle(value=14, cycle={'cycle': [...]})" == repr(cycle)
+
+ def test_underscores(self):
+ """
+ repr does not strip underscores.
+ """
+
+ class C(object):
+ __attrs_attrs__ = [simple_attr("_x")]
+
+ C = _add_repr(C)
+ i = C()
+ i._x = 42
+
+ assert "C(_x=42)" == repr(i)
+
+ def test_repr_uninitialized_member(self):
+ """
+ repr signals unset attributes
+ """
+ C = make_class("C", {"a": attr.ib(init=False)})
+
+ assert "C(a=NOTHING)" == repr(C())
+
+ @given(add_str=booleans(), slots=booleans())
+ def test_str(self, add_str, slots):
+ """
+ If str is True, it returns the same as repr.
+
+ This only makes sense when subclassing a class with an poor __str__
+ (like Exceptions).
+ """
+
+ @attr.s(str=add_str, slots=slots)
+ class Error(Exception):
+ x = attr.ib()
+
+ e = Error(42)
+
+ assert (str(e) == repr(e)) is add_str
+
+ def test_str_no_repr(self):
+ """
+ Raises a ValueError if repr=False and str=True.
+ """
+ with pytest.raises(ValueError) as e:
+ simple_class(repr=False, str=True)
+
+ assert (
+ "__str__ can only be generated if a __repr__ exists."
+ ) == e.value.args[0]
+
+
+# these are for use in TestAddHash.test_cache_hash_serialization
+# they need to be out here so they can be un-pickled
+@attr.attrs(hash=True, cache_hash=False)
+class HashCacheSerializationTestUncached(object):
+ foo_value = attr.ib()
+
+
+@attr.attrs(hash=True, cache_hash=True)
+class HashCacheSerializationTestCached(object):
+ foo_value = attr.ib()
+
+
+@attr.attrs(slots=True, hash=True, cache_hash=True)
+class HashCacheSerializationTestCachedSlots(object):
+ foo_value = attr.ib()
+
+
+class IncrementingHasher(object):
+ def __init__(self):
+ self.hash_value = 100
+
+ def __hash__(self):
+ rv = self.hash_value
+ self.hash_value += 1
+ return rv
+
+
+class TestAddHash(object):
+ """
+ Tests for `_add_hash`.
+ """
+
+ def test_enforces_type(self):
+ """
+ The `hash` argument to both attrs and attrib must be None, True, or
+ False.
+ """
+ exc_args = ("Invalid value for hash. Must be True, False, or None.",)
+
+ with pytest.raises(TypeError) as e:
+ make_class("C", {}, hash=1),
+
+ assert exc_args == e.value.args
+
+ with pytest.raises(TypeError) as e:
+ make_class("C", {"a": attr.ib(hash=1)}),
+
+ assert exc_args == e.value.args
+
+ def test_enforce_no_cache_hash_without_hash(self):
+ """
+ Ensure exception is thrown if caching the hash code is requested
+ but attrs is not requested to generate `__hash__`.
+ """
+ exc_args = (
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled.",
+ )
+ with pytest.raises(TypeError) as e:
+ make_class("C", {}, hash=False, cache_hash=True)
+ assert exc_args == e.value.args
+
+ # unhashable case
+ with pytest.raises(TypeError) as e:
+ make_class(
+ "C", {}, hash=None, eq=True, frozen=False, cache_hash=True
+ )
+ assert exc_args == e.value.args
+
+ def test_enforce_no_cached_hash_without_init(self):
+ """
+ Ensure exception is thrown if caching the hash code is requested
+ but attrs is not requested to generate `__init__`.
+ """
+ exc_args = (
+ "Invalid value for cache_hash. To use hash caching,"
+ " init must be True.",
+ )
+ with pytest.raises(TypeError) as e:
+ make_class("C", {}, init=False, hash=True, cache_hash=True)
+ assert exc_args == e.value.args
+
+ @given(booleans(), booleans())
+ def test_hash_attribute(self, slots, cache_hash):
+ """
+ If `hash` is False on an attribute, ignore that attribute.
+ """
+ C = make_class(
+ "C",
+ {"a": attr.ib(hash=False), "b": attr.ib()},
+ slots=slots,
+ hash=True,
+ cache_hash=cache_hash,
+ )
+
+ assert hash(C(1, 2)) == hash(C(2, 2))
+
+ @given(booleans())
+ def test_hash_attribute_mirrors_eq(self, eq):
+ """
+ If `hash` is None, the hash generation mirrors `eq`.
+ """
+ C = make_class("C", {"a": attr.ib(eq=eq)}, eq=True, frozen=True)
+
+ if eq:
+ assert C(1) != C(2)
+ assert hash(C(1)) != hash(C(2))
+ assert hash(C(1)) == hash(C(1))
+ else:
+ assert C(1) == C(2)
+ assert hash(C(1)) == hash(C(2))
+
+ @given(booleans())
+ def test_hash_mirrors_eq(self, eq):
+ """
+ If `hash` is None, the hash generation mirrors `eq`.
+ """
+ C = make_class("C", {"a": attr.ib()}, eq=eq, frozen=True)
+
+ i = C(1)
+
+ assert i == i
+ assert hash(i) == hash(i)
+
+ if eq:
+ assert C(1) == C(1)
+ assert hash(C(1)) == hash(C(1))
+ else:
+ assert C(1) != C(1)
+ assert hash(C(1)) != hash(C(1))
+
+ @pytest.mark.parametrize(
+ "cls",
+ [
+ HashC,
+ HashCSlots,
+ HashCCached,
+ HashCSlotsCached,
+ HashCFrozenNotSlotsCached,
+ ],
+ )
+ def test_hash_works(self, cls):
+ """
+ __hash__ returns different hashes for different values.
+ """
+ a = cls(1, 2)
+ b = cls(1, 1)
+ assert hash(a) != hash(b)
+ # perform the test again to test the pre-cached path through
+ # __hash__ for the cached-hash versions
+ assert hash(a) != hash(b)
+
+ def test_hash_default(self):
+ """
+ Classes are not hashable by default.
+ """
+ C = make_class("C", {})
+
+ with pytest.raises(TypeError) as e:
+ hash(C())
+
+ assert e.value.args[0] in (
+ "'C' objects are unhashable", # PyPy
+ "unhashable type: 'C'", # CPython
+ )
+
+ def test_cache_hashing(self):
+ """
+ Ensure that hash computation if cached if and only if requested
+ """
+
+ class HashCounter:
+ """
+ A class for testing which counts how many times its hash
+ has been requested
+ """
+
+ def __init__(self):
+ self.times_hash_called = 0
+
+ def __hash__(self):
+ self.times_hash_called += 1
+ return 12345
+
+ Uncached = make_class(
+ "Uncached",
+ {"hash_counter": attr.ib(factory=HashCounter)},
+ hash=True,
+ cache_hash=False,
+ )
+ Cached = make_class(
+ "Cached",
+ {"hash_counter": attr.ib(factory=HashCounter)},
+ hash=True,
+ cache_hash=True,
+ )
+
+ uncached_instance = Uncached()
+ cached_instance = Cached()
+
+ hash(uncached_instance)
+ hash(uncached_instance)
+ hash(cached_instance)
+ hash(cached_instance)
+
+ assert 2 == uncached_instance.hash_counter.times_hash_called
+ assert 1 == cached_instance.hash_counter.times_hash_called
+
+ @pytest.mark.parametrize("cache_hash", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_copy_hash_cleared(self, cache_hash, frozen, slots):
+ """
+ Test that the default hash is recalculated after a copy operation.
+ """
+
+ kwargs = dict(frozen=frozen, slots=slots, cache_hash=cache_hash)
+
+ # Give it an explicit hash if we don't have an implicit one
+ if not frozen:
+ kwargs["hash"] = True
+
+ @attr.s(**kwargs)
+ class C(object):
+ x = attr.ib()
+
+ a = C(IncrementingHasher())
+ # Ensure that any hash cache would be calculated before copy
+ orig_hash = hash(a)
+ b = copy.deepcopy(a)
+
+ if kwargs["cache_hash"]:
+ # For cache_hash classes, this call is cached
+ assert orig_hash == hash(a)
+
+ assert orig_hash != hash(b)
+
+ @pytest.mark.parametrize(
+ "klass,cached",
+ [
+ (HashCacheSerializationTestUncached, False),
+ (HashCacheSerializationTestCached, True),
+ (HashCacheSerializationTestCachedSlots, True),
+ ],
+ )
+ def test_cache_hash_serialization_hash_cleared(self, klass, cached):
+ """
+ Tests that the hash cache is cleared on deserialization to fix
+ https://github.com/python-attrs/attrs/issues/482 .
+
+ This test is intended to guard against a stale hash code surviving
+ across serialization (which may cause problems when the hash value
+ is different in different interpreters).
+ """
+
+ obj = klass(IncrementingHasher())
+ original_hash = hash(obj)
+ obj_rt = self._roundtrip_pickle(obj)
+
+ if cached:
+ assert original_hash == hash(obj)
+
+ assert original_hash != hash(obj_rt)
+
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_copy_two_arg_reduce(self, frozen):
+ """
+ If __getstate__ returns None, the tuple returned by object.__reduce__
+ won't contain the state dictionary; this test ensures that the custom
+ __reduce__ generated when cache_hash=True works in that case.
+ """
+
+ @attr.s(frozen=frozen, cache_hash=True, hash=True)
+ class C(object):
+ x = attr.ib()
+
+ def __getstate__(self):
+ return None
+
+ # By the nature of this test it doesn't really create an object that's
+ # in a valid state - it basically does the equivalent of
+ # `object.__new__(C)`, so it doesn't make much sense to assert anything
+ # about the result of the copy. This test will just check that it
+ # doesn't raise an *error*.
+ copy.deepcopy(C(1))
+
+ def _roundtrip_pickle(self, obj):
+ pickle_str = pickle.dumps(obj)
+ return pickle.loads(pickle_str)
+
+
+class TestAddInit(object):
+ """
+ Tests for `_add_init`.
+ """
+
+ @given(booleans(), booleans())
+ def test_init(self, slots, frozen):
+ """
+ If `init` is False, ignore that attribute.
+ """
+ C = make_class(
+ "C",
+ {"a": attr.ib(init=False), "b": attr.ib()},
+ slots=slots,
+ frozen=frozen,
+ )
+ with pytest.raises(TypeError) as e:
+ C(a=1, b=2)
+
+ assert e.value.args[0].endswith(
+ "__init__() got an unexpected keyword argument 'a'"
+ )
+
+ @given(booleans(), booleans())
+ def test_no_init_default(self, slots, frozen):
+ """
+ If `init` is False but a Factory is specified, don't allow passing that
+ argument but initialize it anyway.
+ """
+ C = make_class(
+ "C",
+ {
+ "_a": attr.ib(init=False, default=42),
+ "_b": attr.ib(init=False, default=Factory(list)),
+ "c": attr.ib(),
+ },
+ slots=slots,
+ frozen=frozen,
+ )
+ with pytest.raises(TypeError):
+ C(a=1, c=2)
+ with pytest.raises(TypeError):
+ C(b=1, c=2)
+
+ i = C(23)
+ assert (42, [], 23) == (i._a, i._b, i.c)
+
+ @given(booleans(), booleans())
+ def test_no_init_order(self, slots, frozen):
+ """
+ If an attribute is `init=False`, it's legal to come after a mandatory
+ attribute.
+ """
+ make_class(
+ "C",
+ {"a": attr.ib(default=Factory(list)), "b": attr.ib(init=False)},
+ slots=slots,
+ frozen=frozen,
+ )
+
+ def test_sets_attributes(self):
+ """
+ The attributes are initialized using the passed keywords.
+ """
+ obj = InitC(a=1, b=2)
+ assert 1 == obj.a
+ assert 2 == obj.b
+
+ def test_default(self):
+ """
+ If a default value is present, it's used as fallback.
+ """
+
+ class C(object):
+ __attrs_attrs__ = [
+ simple_attr(name="a", default=2),
+ simple_attr(name="b", default="hallo"),
+ simple_attr(name="c", default=None),
+ ]
+
+ C = _add_init(C, False)
+ i = C()
+ assert 2 == i.a
+ assert "hallo" == i.b
+ assert None is i.c
+
+ def test_factory(self):
+ """
+ If a default factory is present, it's used as fallback.
+ """
+
+ class D(object):
+ pass
+
+ class C(object):
+ __attrs_attrs__ = [
+ simple_attr(name="a", default=Factory(list)),
+ simple_attr(name="b", default=Factory(D)),
+ ]
+
+ C = _add_init(C, False)
+ i = C()
+
+ assert [] == i.a
+ assert isinstance(i.b, D)
+
+ def test_validator(self):
+ """
+ If a validator is passed, call it with the preliminary instance, the
+ Attribute, and the argument.
+ """
+
+ class VException(Exception):
+ pass
+
+ def raiser(*args):
+ raise VException(*args)
+
+ C = make_class("C", {"a": attr.ib("a", validator=raiser)})
+ with pytest.raises(VException) as e:
+ C(42)
+
+ assert (fields(C).a, 42) == e.value.args[1:]
+ assert isinstance(e.value.args[0], C)
+
+ def test_validator_slots(self):
+ """
+ If a validator is passed, call it with the preliminary instance, the
+ Attribute, and the argument.
+ """
+
+ class VException(Exception):
+ pass
+
+ def raiser(*args):
+ raise VException(*args)
+
+ C = make_class("C", {"a": attr.ib("a", validator=raiser)}, slots=True)
+ with pytest.raises(VException) as e:
+ C(42)
+
+ assert (fields(C)[0], 42) == e.value.args[1:]
+ assert isinstance(e.value.args[0], C)
+
+ @given(booleans())
+ def test_validator_others(self, slots):
+ """
+ Does not interfere when setting non-attrs attributes.
+ """
+ C = make_class(
+ "C", {"a": attr.ib("a", validator=instance_of(int))}, slots=slots
+ )
+ i = C(1)
+
+ assert 1 == i.a
+
+ if not slots:
+ i.b = "foo"
+ assert "foo" == i.b
+ else:
+ with pytest.raises(AttributeError):
+ i.b = "foo"
+
+ def test_underscores(self):
+ """
+ The argument names in `__init__` are without leading and trailing
+ underscores.
+ """
+
+ class C(object):
+ __attrs_attrs__ = [simple_attr("_private")]
+
+ C = _add_init(C, False)
+ i = C(private=42)
+ assert 42 == i._private
+
+
+class TestNothing(object):
+ """
+ Tests for `_Nothing`.
+ """
+
+ def test_copy(self):
+ """
+ __copy__ returns the same object.
+ """
+ n = _Nothing()
+ assert n is copy.copy(n)
+
+ def test_deepcopy(self):
+ """
+ __deepcopy__ returns the same object.
+ """
+ n = _Nothing()
+ assert n is copy.deepcopy(n)
+
+ def test_eq(self):
+ """
+ All instances are equal.
+ """
+ assert _Nothing() == _Nothing() == NOTHING
+ assert not (_Nothing() != _Nothing())
+ assert 1 != _Nothing()
+
+ def test_false(self):
+ """
+ NOTHING evaluates as falsey.
+ """
+ assert not NOTHING
+ assert False is bool(NOTHING)
+
+
+@attr.s(hash=True, order=True)
+class C(object):
+ pass
+
+
+# Store this class so that we recreate it.
+OriginalC = C
+
+
+@attr.s(hash=True, order=True)
+class C(object):
+ pass
+
+
+CopyC = C
+
+
+@attr.s(hash=True, order=True)
+class C(object):
+ """A different class, to generate different methods."""
+
+ a = attr.ib()
+
+
+class TestFilenames(object):
+ def test_filenames(self):
+ """
+ The created dunder methods have a "consistent" filename.
+ """
+ assert (
+ OriginalC.__init__.__code__.co_filename
+ == "<attrs generated init tests.test_dunders.C>"
+ )
+ assert (
+ OriginalC.__eq__.__code__.co_filename
+ == "<attrs generated eq tests.test_dunders.C>"
+ )
+ assert (
+ OriginalC.__hash__.__code__.co_filename
+ == "<attrs generated hash tests.test_dunders.C>"
+ )
+ assert (
+ CopyC.__init__.__code__.co_filename
+ == "<attrs generated init tests.test_dunders.C>"
+ )
+ assert (
+ CopyC.__eq__.__code__.co_filename
+ == "<attrs generated eq tests.test_dunders.C>"
+ )
+ assert (
+ CopyC.__hash__.__code__.co_filename
+ == "<attrs generated hash tests.test_dunders.C>"
+ )
+ assert (
+ C.__init__.__code__.co_filename
+ == "<attrs generated init tests.test_dunders.C-1>"
+ )
+ assert (
+ C.__eq__.__code__.co_filename
+ == "<attrs generated eq tests.test_dunders.C-1>"
+ )
+ assert (
+ C.__hash__.__code__.co_filename
+ == "<attrs generated hash tests.test_dunders.C-1>"
+ )
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_filters.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_filters.py
new file mode 100644
index 0000000000..d1ec24dc6c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_filters.py
@@ -0,0 +1,111 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr.filters`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import pytest
+
+import attr
+
+from attr import fields
+from attr.filters import _split_what, exclude, include
+
+
+@attr.s
+class C(object):
+ a = attr.ib()
+ b = attr.ib()
+
+
+class TestSplitWhat(object):
+ """
+ Tests for `_split_what`.
+ """
+
+ def test_splits(self):
+ """
+ Splits correctly.
+ """
+ assert (
+ frozenset((int, str)),
+ frozenset((fields(C).a,)),
+ ) == _split_what((str, fields(C).a, int))
+
+
+class TestInclude(object):
+ """
+ Tests for `include`.
+ """
+
+ @pytest.mark.parametrize(
+ "incl,value",
+ [
+ ((int,), 42),
+ ((str,), "hello"),
+ ((str, fields(C).a), 42),
+ ((str, fields(C).b), "hello"),
+ ],
+ )
+ def test_allow(self, incl, value):
+ """
+ Return True if a class or attribute is included.
+ """
+ i = include(*incl)
+ assert i(fields(C).a, value) is True
+
+ @pytest.mark.parametrize(
+ "incl,value",
+ [
+ ((str,), 42),
+ ((int,), "hello"),
+ ((str, fields(C).b), 42),
+ ((int, fields(C).b), "hello"),
+ ],
+ )
+ def test_drop_class(self, incl, value):
+ """
+ Return False on non-included classes and attributes.
+ """
+ i = include(*incl)
+ assert i(fields(C).a, value) is False
+
+
+class TestExclude(object):
+ """
+ Tests for `exclude`.
+ """
+
+ @pytest.mark.parametrize(
+ "excl,value",
+ [
+ ((str,), 42),
+ ((int,), "hello"),
+ ((str, fields(C).b), 42),
+ ((int, fields(C).b), "hello"),
+ ],
+ )
+ def test_allow(self, excl, value):
+ """
+ Return True if class or attribute is not excluded.
+ """
+ e = exclude(*excl)
+ assert e(fields(C).a, value) is True
+
+ @pytest.mark.parametrize(
+ "excl,value",
+ [
+ ((int,), 42),
+ ((str,), "hello"),
+ ((str, fields(C).a), 42),
+ ((str, fields(C).b), "hello"),
+ ],
+ )
+ def test_drop_class(self, excl, value):
+ """
+ Return True on non-excluded classes and attributes.
+ """
+ e = exclude(*excl)
+ assert e(fields(C).a, value) is False
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_funcs.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_funcs.py
new file mode 100644
index 0000000000..4490ed815a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_funcs.py
@@ -0,0 +1,680 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr._funcs`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from collections import OrderedDict
+
+import pytest
+
+from hypothesis import assume, given
+from hypothesis import strategies as st
+
+import attr
+
+from attr import asdict, assoc, astuple, evolve, fields, has
+from attr._compat import TYPE, Mapping, Sequence, ordered_dict
+from attr.exceptions import AttrsAttributeNotFoundError
+from attr.validators import instance_of
+
+from .strategies import nested_classes, simple_classes
+
+
+MAPPING_TYPES = (dict, OrderedDict)
+SEQUENCE_TYPES = (list, tuple)
+
+
+@pytest.fixture(scope="session", name="C")
+def _C():
+ """
+ Return a simple but fully featured attrs class with an x and a y attribute.
+ """
+ import attr
+
+ @attr.s
+ class C(object):
+ x = attr.ib()
+ y = attr.ib()
+
+ return C
+
+
+class TestAsDict(object):
+ """
+ Tests for `asdict`.
+ """
+
+ @given(st.sampled_from(MAPPING_TYPES))
+ def test_shallow(self, C, dict_factory):
+ """
+ Shallow asdict returns correct dict.
+ """
+ assert {"x": 1, "y": 2} == asdict(
+ C(x=1, y=2), False, dict_factory=dict_factory
+ )
+
+ @given(st.sampled_from(MAPPING_TYPES))
+ def test_recurse(self, C, dict_class):
+ """
+ Deep asdict returns correct dict.
+ """
+ assert {"x": {"x": 1, "y": 2}, "y": {"x": 3, "y": 4}} == asdict(
+ C(C(1, 2), C(3, 4)), dict_factory=dict_class
+ )
+
+ def test_nested_lists(self, C):
+ """
+ Test unstructuring deeply nested lists.
+ """
+ inner = C(1, 2)
+ outer = C([[inner]], None)
+
+ assert {"x": [[{"x": 1, "y": 2}]], "y": None} == asdict(outer)
+
+ def test_nested_dicts(self, C):
+ """
+ Test unstructuring deeply nested dictionaries.
+ """
+ inner = C(1, 2)
+ outer = C({1: {2: inner}}, None)
+
+ assert {"x": {1: {2: {"x": 1, "y": 2}}}, "y": None} == asdict(outer)
+
+ @given(nested_classes, st.sampled_from(MAPPING_TYPES))
+ def test_recurse_property(self, cls, dict_class):
+ """
+ Property tests for recursive asdict.
+ """
+ obj = cls()
+ obj_dict = asdict(obj, dict_factory=dict_class)
+
+ def assert_proper_dict_class(obj, obj_dict):
+ assert isinstance(obj_dict, dict_class)
+
+ for field in fields(obj.__class__):
+ field_val = getattr(obj, field.name)
+ if has(field_val.__class__):
+ # This field holds a class, recurse the assertions.
+ assert_proper_dict_class(field_val, obj_dict[field.name])
+ elif isinstance(field_val, Sequence):
+ dict_val = obj_dict[field.name]
+ for item, item_dict in zip(field_val, dict_val):
+ if has(item.__class__):
+ assert_proper_dict_class(item, item_dict)
+ elif isinstance(field_val, Mapping):
+ # This field holds a dictionary.
+ assert isinstance(obj_dict[field.name], dict_class)
+
+ for key, val in field_val.items():
+ if has(val.__class__):
+ assert_proper_dict_class(
+ val, obj_dict[field.name][key]
+ )
+
+ assert_proper_dict_class(obj, obj_dict)
+
+ @given(st.sampled_from(MAPPING_TYPES))
+ def test_filter(self, C, dict_factory):
+ """
+ Attributes that are supposed to be skipped are skipped.
+ """
+ assert {"x": {"x": 1}} == asdict(
+ C(C(1, 2), C(3, 4)),
+ filter=lambda a, v: a.name != "y",
+ dict_factory=dict_factory,
+ )
+
+ @given(container=st.sampled_from(SEQUENCE_TYPES))
+ def test_lists_tuples(self, container, C):
+ """
+ If recurse is True, also recurse into lists.
+ """
+ assert {
+ "x": 1,
+ "y": [{"x": 2, "y": 3}, {"x": 4, "y": 5}, "a"],
+ } == asdict(C(1, container([C(2, 3), C(4, 5), "a"])))
+
+ @given(container=st.sampled_from(SEQUENCE_TYPES))
+ def test_lists_tuples_retain_type(self, container, C):
+ """
+ If recurse and retain_collection_types are True, also recurse
+ into lists and do not convert them into list.
+ """
+ assert {
+ "x": 1,
+ "y": container([{"x": 2, "y": 3}, {"x": 4, "y": 5}, "a"]),
+ } == asdict(
+ C(1, container([C(2, 3), C(4, 5), "a"])),
+ retain_collection_types=True,
+ )
+
+ @given(set_type=st.sampled_from((set, frozenset)))
+ def test_sets_no_retain(self, C, set_type):
+ """
+ Set types are converted to lists if retain_collection_types=False.
+ """
+ d = asdict(
+ C(1, set_type((1, 2, 3))),
+ retain_collection_types=False,
+ recurse=True,
+ )
+
+ assert {"x": 1, "y": [1, 2, 3]} == d
+
+ @given(st.sampled_from(MAPPING_TYPES))
+ def test_dicts(self, C, dict_factory):
+ """
+ If recurse is True, also recurse into dicts.
+ """
+ res = asdict(C(1, {"a": C(4, 5)}), dict_factory=dict_factory)
+
+ assert {"x": 1, "y": {"a": {"x": 4, "y": 5}}} == res
+ assert isinstance(res, dict_factory)
+
+ @given(simple_classes(private_attrs=False), st.sampled_from(MAPPING_TYPES))
+ def test_roundtrip(self, cls, dict_class):
+ """
+ Test dumping to dicts and back for Hypothesis-generated classes.
+
+ Private attributes don't round-trip (the attribute name is different
+ than the initializer argument).
+ """
+ instance = cls()
+ dict_instance = asdict(instance, dict_factory=dict_class)
+
+ assert isinstance(dict_instance, dict_class)
+
+ roundtrip_instance = cls(**dict_instance)
+
+ assert instance == roundtrip_instance
+
+ @given(simple_classes())
+ def test_asdict_preserve_order(self, cls):
+ """
+ Field order should be preserved when dumping to an ordered_dict.
+ """
+ instance = cls()
+ dict_instance = asdict(instance, dict_factory=ordered_dict)
+
+ assert [a.name for a in fields(cls)] == list(dict_instance.keys())
+
+ def test_retain_keys_are_tuples(self):
+ """
+ retain_collect_types also retains keys.
+ """
+
+ @attr.s
+ class A(object):
+ a = attr.ib()
+
+ instance = A({(1,): 1})
+
+ assert {"a": {(1,): 1}} == attr.asdict(
+ instance, retain_collection_types=True
+ )
+
+ def test_tuple_keys(self):
+ """
+ If a key is collection type, retain_collection_types is False,
+ the key is serialized as a tuple.
+
+ See #646
+ """
+
+ @attr.s
+ class A(object):
+ a = attr.ib()
+
+ instance = A({(1,): 1})
+
+ assert {"a": {(1,): 1}} == attr.asdict(instance)
+
+
+class TestAsTuple(object):
+ """
+ Tests for `astuple`.
+ """
+
+ @given(st.sampled_from(SEQUENCE_TYPES))
+ def test_shallow(self, C, tuple_factory):
+ """
+ Shallow astuple returns correct dict.
+ """
+ assert tuple_factory([1, 2]) == astuple(
+ C(x=1, y=2), False, tuple_factory=tuple_factory
+ )
+
+ @given(st.sampled_from(SEQUENCE_TYPES))
+ def test_recurse(self, C, tuple_factory):
+ """
+ Deep astuple returns correct tuple.
+ """
+ assert tuple_factory(
+ [tuple_factory([1, 2]), tuple_factory([3, 4])]
+ ) == astuple(C(C(1, 2), C(3, 4)), tuple_factory=tuple_factory)
+
+ @given(nested_classes, st.sampled_from(SEQUENCE_TYPES))
+ def test_recurse_property(self, cls, tuple_class):
+ """
+ Property tests for recursive astuple.
+ """
+ obj = cls()
+ obj_tuple = astuple(obj, tuple_factory=tuple_class)
+
+ def assert_proper_tuple_class(obj, obj_tuple):
+ assert isinstance(obj_tuple, tuple_class)
+ for index, field in enumerate(fields(obj.__class__)):
+ field_val = getattr(obj, field.name)
+ if has(field_val.__class__):
+ # This field holds a class, recurse the assertions.
+ assert_proper_tuple_class(field_val, obj_tuple[index])
+
+ assert_proper_tuple_class(obj, obj_tuple)
+
+ @given(nested_classes, st.sampled_from(SEQUENCE_TYPES))
+ def test_recurse_retain(self, cls, tuple_class):
+ """
+ Property tests for asserting collection types are retained.
+ """
+ obj = cls()
+ obj_tuple = astuple(
+ obj, tuple_factory=tuple_class, retain_collection_types=True
+ )
+
+ def assert_proper_col_class(obj, obj_tuple):
+ # Iterate over all attributes, and if they are lists or mappings
+ # in the original, assert they are the same class in the dumped.
+ for index, field in enumerate(fields(obj.__class__)):
+ field_val = getattr(obj, field.name)
+ if has(field_val.__class__):
+ # This field holds a class, recurse the assertions.
+ assert_proper_col_class(field_val, obj_tuple[index])
+ elif isinstance(field_val, (list, tuple)):
+ # This field holds a sequence of something.
+ expected_type = type(obj_tuple[index])
+ assert type(field_val) is expected_type
+ for obj_e, obj_tuple_e in zip(field_val, obj_tuple[index]):
+ if has(obj_e.__class__):
+ assert_proper_col_class(obj_e, obj_tuple_e)
+ elif isinstance(field_val, dict):
+ orig = field_val
+ tupled = obj_tuple[index]
+ assert type(orig) is type(tupled)
+ for obj_e, obj_tuple_e in zip(
+ orig.items(), tupled.items()
+ ):
+ if has(obj_e[0].__class__): # Dict key
+ assert_proper_col_class(obj_e[0], obj_tuple_e[0])
+ if has(obj_e[1].__class__): # Dict value
+ assert_proper_col_class(obj_e[1], obj_tuple_e[1])
+
+ assert_proper_col_class(obj, obj_tuple)
+
+ @given(st.sampled_from(SEQUENCE_TYPES))
+ def test_filter(self, C, tuple_factory):
+ """
+ Attributes that are supposed to be skipped are skipped.
+ """
+ assert tuple_factory([tuple_factory([1])]) == astuple(
+ C(C(1, 2), C(3, 4)),
+ filter=lambda a, v: a.name != "y",
+ tuple_factory=tuple_factory,
+ )
+
+ @given(container=st.sampled_from(SEQUENCE_TYPES))
+ def test_lists_tuples(self, container, C):
+ """
+ If recurse is True, also recurse into lists.
+ """
+ assert (1, [(2, 3), (4, 5), "a"]) == astuple(
+ C(1, container([C(2, 3), C(4, 5), "a"]))
+ )
+
+ @given(st.sampled_from(SEQUENCE_TYPES))
+ def test_dicts(self, C, tuple_factory):
+ """
+ If recurse is True, also recurse into dicts.
+ """
+ res = astuple(C(1, {"a": C(4, 5)}), tuple_factory=tuple_factory)
+ assert tuple_factory([1, {"a": tuple_factory([4, 5])}]) == res
+ assert isinstance(res, tuple_factory)
+
+ @given(container=st.sampled_from(SEQUENCE_TYPES))
+ def test_lists_tuples_retain_type(self, container, C):
+ """
+ If recurse and retain_collection_types are True, also recurse
+ into lists and do not convert them into list.
+ """
+ assert (1, container([(2, 3), (4, 5), "a"])) == astuple(
+ C(1, container([C(2, 3), C(4, 5), "a"])),
+ retain_collection_types=True,
+ )
+
+ @given(container=st.sampled_from(MAPPING_TYPES))
+ def test_dicts_retain_type(self, container, C):
+ """
+ If recurse and retain_collection_types are True, also recurse
+ into lists and do not convert them into list.
+ """
+ assert (1, container({"a": (4, 5)})) == astuple(
+ C(1, container({"a": C(4, 5)})), retain_collection_types=True
+ )
+
+ @given(simple_classes(), st.sampled_from(SEQUENCE_TYPES))
+ def test_roundtrip(self, cls, tuple_class):
+ """
+ Test dumping to tuple and back for Hypothesis-generated classes.
+ """
+ instance = cls()
+ tuple_instance = astuple(instance, tuple_factory=tuple_class)
+
+ assert isinstance(tuple_instance, tuple_class)
+
+ roundtrip_instance = cls(*tuple_instance)
+
+ assert instance == roundtrip_instance
+
+ @given(set_type=st.sampled_from((set, frozenset)))
+ def test_sets_no_retain(self, C, set_type):
+ """
+ Set types are converted to lists if retain_collection_types=False.
+ """
+ d = astuple(
+ C(1, set_type((1, 2, 3))),
+ retain_collection_types=False,
+ recurse=True,
+ )
+
+ assert (1, [1, 2, 3]) == d
+
+
+class TestHas(object):
+ """
+ Tests for `has`.
+ """
+
+ def test_positive(self, C):
+ """
+ Returns `True` on decorated classes.
+ """
+ assert has(C)
+
+ def test_positive_empty(self):
+ """
+ Returns `True` on decorated classes even if there are no attributes.
+ """
+
+ @attr.s
+ class D(object):
+ pass
+
+ assert has(D)
+
+ def test_negative(self):
+ """
+ Returns `False` on non-decorated classes.
+ """
+ assert not has(object)
+
+
+class TestAssoc(object):
+ """
+ Tests for `assoc`.
+ """
+
+ @given(slots=st.booleans(), frozen=st.booleans())
+ def test_empty(self, slots, frozen):
+ """
+ Empty classes without changes get copied.
+ """
+
+ @attr.s(slots=slots, frozen=frozen)
+ class C(object):
+ pass
+
+ i1 = C()
+ with pytest.deprecated_call():
+ i2 = assoc(i1)
+
+ assert i1 is not i2
+ assert i1 == i2
+
+ @given(simple_classes())
+ def test_no_changes(self, C):
+ """
+ No changes means a verbatim copy.
+ """
+ i1 = C()
+ with pytest.deprecated_call():
+ i2 = assoc(i1)
+
+ assert i1 is not i2
+ assert i1 == i2
+
+ @given(simple_classes(), st.data())
+ def test_change(self, C, data):
+ """
+ Changes work.
+ """
+ # Take the first attribute, and change it.
+ assume(fields(C)) # Skip classes with no attributes.
+ field_names = [a.name for a in fields(C)]
+ original = C()
+ chosen_names = data.draw(st.sets(st.sampled_from(field_names)))
+ change_dict = {name: data.draw(st.integers()) for name in chosen_names}
+
+ with pytest.deprecated_call():
+ changed = assoc(original, **change_dict)
+
+ for k, v in change_dict.items():
+ assert getattr(changed, k) == v
+
+ @given(simple_classes())
+ def test_unknown(self, C):
+ """
+ Wanting to change an unknown attribute raises an
+ AttrsAttributeNotFoundError.
+ """
+ # No generated class will have a four letter attribute.
+ with pytest.raises(
+ AttrsAttributeNotFoundError
+ ) as e, pytest.deprecated_call():
+ assoc(C(), aaaa=2)
+
+ assert (
+ "aaaa is not an attrs attribute on {cls!r}.".format(cls=C),
+ ) == e.value.args
+
+ def test_frozen(self):
+ """
+ Works on frozen classes.
+ """
+
+ @attr.s(frozen=True)
+ class C(object):
+ x = attr.ib()
+ y = attr.ib()
+
+ with pytest.deprecated_call():
+ assert C(3, 2) == assoc(C(1, 2), x=3)
+
+ def test_warning(self):
+ """
+ DeprecationWarning points to the correct file.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib()
+
+ with pytest.warns(DeprecationWarning) as wi:
+ assert C(2) == assoc(C(1), x=2)
+
+ assert __file__ == wi.list[0].filename
+
+
+class TestEvolve(object):
+ """
+ Tests for `evolve`.
+ """
+
+ @given(slots=st.booleans(), frozen=st.booleans())
+ def test_empty(self, slots, frozen):
+ """
+ Empty classes without changes get copied.
+ """
+
+ @attr.s(slots=slots, frozen=frozen)
+ class C(object):
+ pass
+
+ i1 = C()
+ i2 = evolve(i1)
+
+ assert i1 is not i2
+ assert i1 == i2
+
+ @given(simple_classes())
+ def test_no_changes(self, C):
+ """
+ No changes means a verbatim copy.
+ """
+ i1 = C()
+ i2 = evolve(i1)
+
+ assert i1 is not i2
+ assert i1 == i2
+
+ @given(simple_classes(), st.data())
+ def test_change(self, C, data):
+ """
+ Changes work.
+ """
+ # Take the first attribute, and change it.
+ assume(fields(C)) # Skip classes with no attributes.
+ field_names = [a.name for a in fields(C)]
+ original = C()
+ chosen_names = data.draw(st.sets(st.sampled_from(field_names)))
+ # We pay special attention to private attributes, they should behave
+ # like in `__init__`.
+ change_dict = {
+ name.replace("_", ""): data.draw(st.integers())
+ for name in chosen_names
+ }
+ changed = evolve(original, **change_dict)
+ for name in chosen_names:
+ assert getattr(changed, name) == change_dict[name.replace("_", "")]
+
+ @given(simple_classes())
+ def test_unknown(self, C):
+ """
+ Wanting to change an unknown attribute raises an
+ AttrsAttributeNotFoundError.
+ """
+ # No generated class will have a four letter attribute.
+ with pytest.raises(TypeError) as e:
+ evolve(C(), aaaa=2)
+
+ if hasattr(C, "__attrs_init__"):
+ expected = (
+ "__attrs_init__() got an unexpected keyword argument 'aaaa'"
+ )
+ else:
+ expected = "__init__() got an unexpected keyword argument 'aaaa'"
+
+ assert e.value.args[0].endswith(expected)
+
+ def test_validator_failure(self):
+ """
+ TypeError isn't swallowed when validation fails within evolve.
+ """
+
+ @attr.s
+ class C(object):
+ a = attr.ib(validator=instance_of(int))
+
+ with pytest.raises(TypeError) as e:
+ evolve(C(a=1), a="some string")
+ m = e.value.args[0]
+
+ assert m.startswith("'a' must be <{type} 'int'>".format(type=TYPE))
+
+ def test_private(self):
+ """
+ evolve() acts as `__init__` with regards to private attributes.
+ """
+
+ @attr.s
+ class C(object):
+ _a = attr.ib()
+
+ assert evolve(C(1), a=2)._a == 2
+
+ with pytest.raises(TypeError):
+ evolve(C(1), _a=2)
+
+ with pytest.raises(TypeError):
+ evolve(C(1), a=3, _a=2)
+
+ def test_non_init_attrs(self):
+ """
+ evolve() handles `init=False` attributes.
+ """
+
+ @attr.s
+ class C(object):
+ a = attr.ib()
+ b = attr.ib(init=False, default=0)
+
+ assert evolve(C(1), a=2).a == 2
+
+ def test_regression_attrs_classes(self):
+ """
+ evolve() can evolve fields that are instances of attrs classes.
+
+ Regression test for #804
+ """
+
+ @attr.s
+ class Cls1(object):
+ param1 = attr.ib()
+
+ @attr.s
+ class Cls2(object):
+ param2 = attr.ib()
+
+ obj2a = Cls2(param2="a")
+ obj2b = Cls2(param2="b")
+
+ obj1a = Cls1(param1=obj2a)
+
+ assert Cls1(param1=Cls2(param2="b")) == attr.evolve(
+ obj1a, param1=obj2b
+ )
+
+ def test_dicts(self):
+ """
+ evolve() can replace an attrs class instance with a dict.
+
+ See #806
+ """
+
+ @attr.s
+ class Cls1(object):
+ param1 = attr.ib()
+
+ @attr.s
+ class Cls2(object):
+ param2 = attr.ib()
+
+ obj2a = Cls2(param2="a")
+ obj2b = {"foo": 42, "param2": 42}
+
+ obj1a = Cls1(param1=obj2a)
+
+ assert Cls1({"foo": 42, "param2": 42}) == attr.evolve(
+ obj1a, param1=obj2b
+ )
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_functional.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_functional.py
new file mode 100644
index 0000000000..9b6a27e2f4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_functional.py
@@ -0,0 +1,790 @@
+# SPDX-License-Identifier: MIT
+
+"""
+End-to-end tests.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import inspect
+import pickle
+
+from copy import deepcopy
+
+import pytest
+import six
+
+from hypothesis import assume, given
+from hypothesis.strategies import booleans
+
+import attr
+
+from attr._compat import PY2, PY36, TYPE
+from attr._make import NOTHING, Attribute
+from attr.exceptions import FrozenInstanceError
+
+from .strategies import optional_bool
+
+
+@attr.s
+class C1(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+
+@attr.s(slots=True)
+class C1Slots(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+
+foo = None
+
+
+@attr.s()
+class C2(object):
+ x = attr.ib(default=foo)
+ y = attr.ib(default=attr.Factory(list))
+
+
+@attr.s(slots=True)
+class C2Slots(object):
+ x = attr.ib(default=foo)
+ y = attr.ib(default=attr.Factory(list))
+
+
+@attr.s
+class Base(object):
+ x = attr.ib()
+
+ def meth(self):
+ return self.x
+
+
+@attr.s(slots=True)
+class BaseSlots(object):
+ x = attr.ib()
+
+ def meth(self):
+ return self.x
+
+
+@attr.s
+class Sub(Base):
+ y = attr.ib()
+
+
+@attr.s(slots=True)
+class SubSlots(BaseSlots):
+ y = attr.ib()
+
+
+@attr.s(frozen=True, slots=True)
+class Frozen(object):
+ x = attr.ib()
+
+
+@attr.s
+class SubFrozen(Frozen):
+ y = attr.ib()
+
+
+@attr.s(frozen=True, slots=False)
+class FrozenNoSlots(object):
+ x = attr.ib()
+
+
+class Meta(type):
+ pass
+
+
+@attr.s
+@six.add_metaclass(Meta)
+class WithMeta(object):
+ pass
+
+
+@attr.s(slots=True)
+@six.add_metaclass(Meta)
+class WithMetaSlots(object):
+ pass
+
+
+FromMakeClass = attr.make_class("FromMakeClass", ["x"])
+
+
+class TestFunctional(object):
+ """
+ Functional tests.
+ """
+
+ @pytest.mark.parametrize("cls", [C2, C2Slots])
+ def test_fields(self, cls):
+ """
+ `attr.fields` works.
+ """
+ assert (
+ Attribute(
+ name="x",
+ default=foo,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=True,
+ hash=None,
+ init=True,
+ inherited=False,
+ ),
+ Attribute(
+ name="y",
+ default=attr.Factory(list),
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=True,
+ hash=None,
+ init=True,
+ inherited=False,
+ ),
+ ) == attr.fields(cls)
+
+ @pytest.mark.parametrize("cls", [C1, C1Slots])
+ def test_asdict(self, cls):
+ """
+ `attr.asdict` works.
+ """
+ assert {"x": 1, "y": 2} == attr.asdict(cls(x=1, y=2))
+
+ @pytest.mark.parametrize("cls", [C1, C1Slots])
+ def test_validator(self, cls):
+ """
+ `instance_of` raises `TypeError` on type mismatch.
+ """
+ with pytest.raises(TypeError) as e:
+ cls("1", 2)
+
+ # Using C1 explicitly, since slotted classes don't support this.
+ assert (
+ "'x' must be <{type} 'int'> (got '1' that is a <{type} "
+ "'str'>).".format(type=TYPE),
+ attr.fields(C1).x,
+ int,
+ "1",
+ ) == e.value.args
+
+ @given(booleans())
+ def test_renaming(self, slots):
+ """
+ Private members are renamed but only in `__init__`.
+ """
+
+ @attr.s(slots=slots)
+ class C3(object):
+ _x = attr.ib()
+
+ assert "C3(_x=1)" == repr(C3(x=1))
+
+ @given(booleans(), booleans())
+ def test_programmatic(self, slots, frozen):
+ """
+ `attr.make_class` works.
+ """
+ PC = attr.make_class("PC", ["a", "b"], slots=slots, frozen=frozen)
+
+ assert (
+ Attribute(
+ name="a",
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=True,
+ hash=None,
+ init=True,
+ inherited=False,
+ ),
+ Attribute(
+ name="b",
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=True,
+ hash=None,
+ init=True,
+ inherited=False,
+ ),
+ ) == attr.fields(PC)
+
+ @pytest.mark.parametrize("cls", [Sub, SubSlots])
+ def test_subclassing_with_extra_attrs(self, cls):
+ """
+ Subclassing (where the subclass has extra attrs) does what you'd hope
+ for.
+ """
+ obj = object()
+ i = cls(x=obj, y=2)
+ assert i.x is i.meth() is obj
+ assert i.y == 2
+ if cls is Sub:
+ assert "Sub(x={obj}, y=2)".format(obj=obj) == repr(i)
+ else:
+ assert "SubSlots(x={obj}, y=2)".format(obj=obj) == repr(i)
+
+ @pytest.mark.parametrize("base", [Base, BaseSlots])
+ def test_subclass_without_extra_attrs(self, base):
+ """
+ Subclassing (where the subclass does not have extra attrs) still
+ behaves the same as a subclass with extra attrs.
+ """
+
+ class Sub2(base):
+ pass
+
+ obj = object()
+ i = Sub2(x=obj)
+ assert i.x is i.meth() is obj
+ assert "Sub2(x={obj})".format(obj=obj) == repr(i)
+
+ @pytest.mark.parametrize(
+ "frozen_class",
+ [
+ Frozen, # has slots=True
+ attr.make_class("FrozenToo", ["x"], slots=False, frozen=True),
+ ],
+ )
+ def test_frozen_instance(self, frozen_class):
+ """
+ Frozen instances can't be modified (easily).
+ """
+ frozen = frozen_class(1)
+
+ with pytest.raises(FrozenInstanceError) as e:
+ frozen.x = 2
+
+ with pytest.raises(FrozenInstanceError) as e:
+ del frozen.x
+
+ assert e.value.args[0] == "can't set attribute"
+ assert 1 == frozen.x
+
+ @pytest.mark.parametrize(
+ "cls",
+ [
+ C1,
+ C1Slots,
+ C2,
+ C2Slots,
+ Base,
+ BaseSlots,
+ Sub,
+ SubSlots,
+ Frozen,
+ FrozenNoSlots,
+ FromMakeClass,
+ ],
+ )
+ @pytest.mark.parametrize("protocol", range(2, pickle.HIGHEST_PROTOCOL + 1))
+ def test_pickle_attributes(self, cls, protocol):
+ """
+ Pickling/un-pickling of Attribute instances works.
+ """
+ for attribute in attr.fields(cls):
+ assert attribute == pickle.loads(pickle.dumps(attribute, protocol))
+
+ @pytest.mark.parametrize(
+ "cls",
+ [
+ C1,
+ C1Slots,
+ C2,
+ C2Slots,
+ Base,
+ BaseSlots,
+ Sub,
+ SubSlots,
+ Frozen,
+ FrozenNoSlots,
+ FromMakeClass,
+ ],
+ )
+ @pytest.mark.parametrize("protocol", range(2, pickle.HIGHEST_PROTOCOL + 1))
+ def test_pickle_object(self, cls, protocol):
+ """
+ Pickle object serialization works on all kinds of attrs classes.
+ """
+ if len(attr.fields(cls)) == 2:
+ obj = cls(123, 456)
+ else:
+ obj = cls(123)
+
+ assert repr(obj) == repr(pickle.loads(pickle.dumps(obj, protocol)))
+
+ def test_subclassing_frozen_gives_frozen(self):
+ """
+ The frozen-ness of classes is inherited. Subclasses of frozen classes
+ are also frozen and can be instantiated.
+ """
+ i = SubFrozen("foo", "bar")
+
+ assert i.x == "foo"
+ assert i.y == "bar"
+
+ with pytest.raises(FrozenInstanceError):
+ i.x = "baz"
+
+ @pytest.mark.parametrize("cls", [WithMeta, WithMetaSlots])
+ def test_metaclass_preserved(self, cls):
+ """
+ Metaclass data is preserved.
+ """
+ assert Meta == type(cls)
+
+ def test_default_decorator(self):
+ """
+ Default decorator sets the default and the respective method gets
+ called.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(default=1)
+ y = attr.ib()
+
+ @y.default
+ def compute(self):
+ return self.x + 1
+
+ assert C(1, 2) == C()
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ @pytest.mark.parametrize("weakref_slot", [True, False])
+ def test_attrib_overwrite(self, slots, frozen, weakref_slot):
+ """
+ Subclasses can overwrite attributes of their base class.
+ """
+
+ @attr.s(slots=slots, frozen=frozen, weakref_slot=weakref_slot)
+ class SubOverwrite(Base):
+ x = attr.ib(default=attr.Factory(list))
+
+ assert SubOverwrite([]) == SubOverwrite()
+
+ def test_dict_patch_class(self):
+ """
+ dict-classes are never replaced.
+ """
+
+ class C(object):
+ x = attr.ib()
+
+ C_new = attr.s(C)
+
+ assert C_new is C
+
+ def test_hash_by_id(self):
+ """
+ With dict classes, hashing by ID is active for hash=False even on
+ Python 3. This is incorrect behavior but we have to retain it for
+ backward compatibility.
+ """
+
+ @attr.s(hash=False)
+ class HashByIDBackwardCompat(object):
+ x = attr.ib()
+
+ assert hash(HashByIDBackwardCompat(1)) != hash(
+ HashByIDBackwardCompat(1)
+ )
+
+ @attr.s(hash=False, eq=False)
+ class HashByID(object):
+ x = attr.ib()
+
+ assert hash(HashByID(1)) != hash(HashByID(1))
+
+ @attr.s(hash=True)
+ class HashByValues(object):
+ x = attr.ib()
+
+ assert hash(HashByValues(1)) == hash(HashByValues(1))
+
+ def test_handles_different_defaults(self):
+ """
+ Unhashable defaults + subclassing values work.
+ """
+
+ @attr.s
+ class Unhashable(object):
+ pass
+
+ @attr.s
+ class C(object):
+ x = attr.ib(default=Unhashable())
+
+ @attr.s
+ class D(C):
+ pass
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_hash_false_eq_false(self, slots):
+ """
+ hash=False and eq=False make a class hashable by ID.
+ """
+
+ @attr.s(hash=False, eq=False, slots=slots)
+ class C(object):
+ pass
+
+ assert hash(C()) != hash(C())
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_eq_false(self, slots):
+ """
+ eq=False makes a class hashable by ID.
+ """
+
+ @attr.s(eq=False, slots=slots)
+ class C(object):
+ pass
+
+ # Ensure both objects live long enough such that their ids/hashes
+ # can't be recycled. Thanks to Ask Hjorth Larsen for pointing that
+ # out.
+ c1 = C()
+ c2 = C()
+
+ assert hash(c1) != hash(c2)
+
+ def test_overwrite_base(self):
+ """
+ Base classes can overwrite each other and the attributes are added
+ in the order they are defined.
+ """
+
+ @attr.s
+ class C(object):
+ c = attr.ib(default=100)
+ x = attr.ib(default=1)
+ b = attr.ib(default=23)
+
+ @attr.s
+ class D(C):
+ a = attr.ib(default=42)
+ x = attr.ib(default=2)
+ d = attr.ib(default=3.14)
+
+ @attr.s
+ class E(D):
+ y = attr.ib(default=3)
+ z = attr.ib(default=4)
+
+ assert "E(c=100, b=23, a=42, x=2, d=3.14, y=3, z=4)" == repr(E())
+
+ @pytest.mark.parametrize("base_slots", [True, False])
+ @pytest.mark.parametrize("sub_slots", [True, False])
+ @pytest.mark.parametrize("base_frozen", [True, False])
+ @pytest.mark.parametrize("sub_frozen", [True, False])
+ @pytest.mark.parametrize("base_weakref_slot", [True, False])
+ @pytest.mark.parametrize("sub_weakref_slot", [True, False])
+ @pytest.mark.parametrize("base_converter", [True, False])
+ @pytest.mark.parametrize("sub_converter", [True, False])
+ def test_frozen_slots_combo(
+ self,
+ base_slots,
+ sub_slots,
+ base_frozen,
+ sub_frozen,
+ base_weakref_slot,
+ sub_weakref_slot,
+ base_converter,
+ sub_converter,
+ ):
+ """
+ A class with a single attribute, inheriting from another class
+ with a single attribute.
+ """
+
+ @attr.s(
+ frozen=base_frozen,
+ slots=base_slots,
+ weakref_slot=base_weakref_slot,
+ )
+ class Base(object):
+ a = attr.ib(converter=int if base_converter else None)
+
+ @attr.s(
+ frozen=sub_frozen, slots=sub_slots, weakref_slot=sub_weakref_slot
+ )
+ class Sub(Base):
+ b = attr.ib(converter=int if sub_converter else None)
+
+ i = Sub("1", "2")
+
+ assert i.a == (1 if base_converter else "1")
+ assert i.b == (2 if sub_converter else "2")
+
+ if base_frozen or sub_frozen:
+ with pytest.raises(FrozenInstanceError):
+ i.a = "2"
+
+ with pytest.raises(FrozenInstanceError):
+ i.b = "3"
+
+ def test_tuple_class_aliasing(self):
+ """
+ itemgetter and property are legal attribute names.
+ """
+
+ @attr.s
+ class C(object):
+ property = attr.ib()
+ itemgetter = attr.ib()
+ x = attr.ib()
+
+ assert "property" == attr.fields(C).property.name
+ assert "itemgetter" == attr.fields(C).itemgetter.name
+ assert "x" == attr.fields(C).x.name
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_auto_exc(self, slots, frozen):
+ """
+ Classes with auto_exc=True have a Exception-style __str__, compare and
+ hash by id, and store the fields additionally in self.args.
+ """
+
+ @attr.s(auto_exc=True, slots=slots, frozen=frozen)
+ class FooError(Exception):
+ x = attr.ib()
+ y = attr.ib(init=False, default=42)
+ z = attr.ib(init=False)
+ a = attr.ib()
+
+ FooErrorMade = attr.make_class(
+ "FooErrorMade",
+ bases=(Exception,),
+ attrs={
+ "x": attr.ib(),
+ "y": attr.ib(init=False, default=42),
+ "z": attr.ib(init=False),
+ "a": attr.ib(),
+ },
+ auto_exc=True,
+ slots=slots,
+ frozen=frozen,
+ )
+
+ assert FooError(1, "foo") != FooError(1, "foo")
+ assert FooErrorMade(1, "foo") != FooErrorMade(1, "foo")
+
+ for cls in (FooError, FooErrorMade):
+ with pytest.raises(cls) as ei1:
+ raise cls(1, "foo")
+
+ with pytest.raises(cls) as ei2:
+ raise cls(1, "foo")
+
+ e1 = ei1.value
+ e2 = ei2.value
+
+ assert e1 is e1
+ assert e1 == e1
+ assert e2 == e2
+ assert e1 != e2
+ assert "(1, 'foo')" == str(e1) == str(e2)
+ assert (1, "foo") == e1.args == e2.args
+
+ hash(e1) == hash(e1)
+ hash(e2) == hash(e2)
+
+ if not frozen:
+ deepcopy(e1)
+ deepcopy(e2)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_auto_exc_one_attrib(self, slots, frozen):
+ """
+ Having one attribute works with auto_exc=True.
+
+ Easy to get wrong with tuple literals.
+ """
+
+ @attr.s(auto_exc=True, slots=slots, frozen=frozen)
+ class FooError(Exception):
+ x = attr.ib()
+
+ FooError(1)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_eq_only(self, slots, frozen):
+ """
+ Classes with order=False cannot be ordered.
+
+ Python 3 throws a TypeError, in Python2 we have to check for the
+ absence.
+ """
+
+ @attr.s(eq=True, order=False, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ if not PY2:
+ possible_errors = (
+ "unorderable types: C() < C()",
+ "'<' not supported between instances of 'C' and 'C'",
+ "unorderable types: C < C", # old PyPy 3
+ )
+
+ with pytest.raises(TypeError) as ei:
+ C(5) < C(6)
+
+ assert ei.value.args[0] in possible_errors
+ else:
+ i = C(42)
+ for m in ("lt", "le", "gt", "ge"):
+ assert None is getattr(i, "__%s__" % (m,), None)
+
+ @given(cmp=optional_bool, eq=optional_bool, order=optional_bool)
+ def test_cmp_deprecated_attribute(self, cmp, eq, order):
+ """
+ Accessing Attribute.cmp raises a deprecation warning but returns True
+ if cmp is True, or eq and order are *both* effectively True.
+ """
+ # These cases are invalid and raise a ValueError.
+ assume(cmp is None or (eq is None and order is None))
+ assume(not (eq is False and order is True))
+
+ if cmp is not None:
+ rv = cmp
+ elif eq is True or eq is None:
+ rv = order is None or order is True
+ elif cmp is None and eq is None and order is None:
+ rv = True
+ elif cmp is None or eq is None:
+ rv = False
+ else:
+ pytest.fail(
+ "Unexpected state: cmp=%r eq=%r order=%r" % (cmp, eq, order)
+ )
+
+ with pytest.deprecated_call() as dc:
+
+ @attr.s
+ class C(object):
+ x = attr.ib(cmp=cmp, eq=eq, order=order)
+
+ assert rv == attr.fields(C).x.cmp
+
+ (w,) = dc.list
+
+ assert (
+ "The usage of `cmp` is deprecated and will be removed on or after "
+ "2021-06-01. Please use `eq` and `order` instead."
+ == w.message.args[0]
+ )
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_no_setattr_if_validate_without_validators(self, slots):
+ """
+ If a class has on_setattr=attr.setters.validate (former default in NG
+ APIs) but sets no validators, don't use the (slower) setattr in
+ __init__.
+
+ Regression test for #816.
+ """
+
+ @attr.s(on_setattr=attr.setters.validate)
+ class C(object):
+ x = attr.ib()
+
+ @attr.s(on_setattr=attr.setters.validate)
+ class D(C):
+ y = attr.ib()
+
+ src = inspect.getsource(D.__init__)
+
+ assert "setattr" not in src
+ assert "self.x = x" in src
+ assert "self.y = y" in src
+ assert object.__setattr__ == D.__setattr__
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_no_setattr_if_convert_without_converters(self, slots):
+ """
+ If a class has on_setattr=attr.setters.convert but sets no validators,
+ don't use the (slower) setattr in __init__.
+ """
+
+ @attr.s(on_setattr=attr.setters.convert)
+ class C(object):
+ x = attr.ib()
+
+ @attr.s(on_setattr=attr.setters.convert)
+ class D(C):
+ y = attr.ib()
+
+ src = inspect.getsource(D.__init__)
+
+ assert "setattr" not in src
+ assert "self.x = x" in src
+ assert "self.y = y" in src
+ assert object.__setattr__ == D.__setattr__
+
+ @pytest.mark.skipif(not PY36, reason="NG APIs are 3.6+")
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_no_setattr_with_ng_defaults(self, slots):
+ """
+ If a class has the NG default on_setattr=[convert, validate] but sets
+ no validators or converters, don't use the (slower) setattr in
+ __init__.
+ """
+
+ @attr.define
+ class C(object):
+ x = attr.ib()
+
+ src = inspect.getsource(C.__init__)
+
+ assert "setattr" not in src
+ assert "self.x = x" in src
+ assert object.__setattr__ == C.__setattr__
+
+ @attr.define
+ class D(C):
+ y = attr.ib()
+
+ src = inspect.getsource(D.__init__)
+
+ assert "setattr" not in src
+ assert "self.x = x" in src
+ assert "self.y = y" in src
+ assert object.__setattr__ == D.__setattr__
+
+ def test_on_setattr_detect_inherited_validators(self):
+ """
+ _make_init detects the presence of a validator even if the field is
+ inherited.
+ """
+
+ @attr.s(on_setattr=attr.setters.validate)
+ class C(object):
+ x = attr.ib(validator=42)
+
+ @attr.s(on_setattr=attr.setters.validate)
+ class D(C):
+ y = attr.ib()
+
+ src = inspect.getsource(D.__init__)
+
+ assert "_setattr = _cached_setattr" in src
+ assert "_setattr('x', x)" in src
+ assert "_setattr('y', y)" in src
+ assert object.__setattr__ != D.__setattr__
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_hooks.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_hooks.py
new file mode 100644
index 0000000000..92fc2dcaab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_hooks.py
@@ -0,0 +1,209 @@
+# SPDX-License-Identifier: MIT
+
+from datetime import datetime
+from typing import Dict, List
+
+import attr
+
+
+class TestTransformHook:
+ """
+ Tests for `attrs(tranform_value_serializer=func)`
+ """
+
+ def test_hook_applied(self):
+ """
+ The transform hook is applied to all attributes. Types can be missing,
+ explicitly set, or annotated.
+ """
+ results = []
+
+ def hook(cls, attribs):
+ attr.resolve_types(cls, attribs=attribs)
+ results[:] = [(a.name, a.type) for a in attribs]
+ return attribs
+
+ @attr.s(field_transformer=hook)
+ class C:
+ x = attr.ib()
+ y = attr.ib(type=int)
+ z: float = attr.ib()
+
+ assert results == [("x", None), ("y", int), ("z", float)]
+
+ def test_hook_applied_auto_attrib(self):
+ """
+ The transform hook is applied to all attributes and type annotations
+ are detected.
+ """
+ results = []
+
+ def hook(cls, attribs):
+ attr.resolve_types(cls, attribs=attribs)
+ results[:] = [(a.name, a.type) for a in attribs]
+ return attribs
+
+ @attr.s(auto_attribs=True, field_transformer=hook)
+ class C:
+ x: int
+ y: str = attr.ib()
+
+ assert results == [("x", int), ("y", str)]
+
+ def test_hook_applied_modify_attrib(self):
+ """
+ The transform hook can modify attributes.
+ """
+
+ def hook(cls, attribs):
+ attr.resolve_types(cls, attribs=attribs)
+ return [a.evolve(converter=a.type) for a in attribs]
+
+ @attr.s(auto_attribs=True, field_transformer=hook)
+ class C:
+ x: int = attr.ib(converter=int)
+ y: float
+
+ c = C(x="3", y="3.14")
+ assert c == C(x=3, y=3.14)
+
+ def test_hook_remove_field(self):
+ """
+ It is possible to remove fields via the hook.
+ """
+
+ def hook(cls, attribs):
+ attr.resolve_types(cls, attribs=attribs)
+ return [a for a in attribs if a.type is not int]
+
+ @attr.s(auto_attribs=True, field_transformer=hook)
+ class C:
+ x: int
+ y: float
+
+ assert attr.asdict(C(2.7)) == {"y": 2.7}
+
+ def test_hook_add_field(self):
+ """
+ It is possible to add fields via the hook.
+ """
+
+ def hook(cls, attribs):
+ a1 = attribs[0]
+ a2 = a1.evolve(name="new")
+ return [a1, a2]
+
+ @attr.s(auto_attribs=True, field_transformer=hook)
+ class C:
+ x: int
+
+ assert attr.asdict(C(1, 2)) == {"x": 1, "new": 2}
+
+ def test_hook_with_inheritance(self):
+ """
+ The hook receives all fields from base classes.
+ """
+
+ def hook(cls, attribs):
+ assert [a.name for a in attribs] == ["x", "y"]
+ # Remove Base' "x"
+ return attribs[1:]
+
+ @attr.s(auto_attribs=True)
+ class Base:
+ x: int
+
+ @attr.s(auto_attribs=True, field_transformer=hook)
+ class Sub(Base):
+ y: int
+
+ assert attr.asdict(Sub(2)) == {"y": 2}
+
+ def test_attrs_attrclass(self):
+ """
+ The list of attrs returned by a field_transformer is converted to
+ "AttrsClass" again.
+
+ Regression test for #821.
+ """
+
+ @attr.s(auto_attribs=True, field_transformer=lambda c, a: list(a))
+ class C:
+ x: int
+
+ fields_type = type(attr.fields(C))
+ assert fields_type.__name__ == "CAttributes"
+ assert issubclass(fields_type, tuple)
+
+
+class TestAsDictHook:
+ def test_asdict(self):
+ """
+ asdict() calls the hooks in attrs classes and in other datastructures
+ like lists or dicts.
+ """
+
+ def hook(inst, a, v):
+ if isinstance(v, datetime):
+ return v.isoformat()
+ return v
+
+ @attr.dataclass
+ class Child:
+ x: datetime
+ y: List[datetime]
+
+ @attr.dataclass
+ class Parent:
+ a: Child
+ b: List[Child]
+ c: Dict[str, Child]
+ d: Dict[str, datetime]
+
+ inst = Parent(
+ a=Child(1, [datetime(2020, 7, 1)]),
+ b=[Child(2, [datetime(2020, 7, 2)])],
+ c={"spam": Child(3, [datetime(2020, 7, 3)])},
+ d={"eggs": datetime(2020, 7, 4)},
+ )
+
+ result = attr.asdict(inst, value_serializer=hook)
+ assert result == {
+ "a": {"x": 1, "y": ["2020-07-01T00:00:00"]},
+ "b": [{"x": 2, "y": ["2020-07-02T00:00:00"]}],
+ "c": {"spam": {"x": 3, "y": ["2020-07-03T00:00:00"]}},
+ "d": {"eggs": "2020-07-04T00:00:00"},
+ }
+
+ def test_asdict_calls(self):
+ """
+ The correct instances and attribute names are passed to the hook.
+ """
+ calls = []
+
+ def hook(inst, a, v):
+ calls.append((inst, a.name if a else a, v))
+ return v
+
+ @attr.dataclass
+ class Child:
+ x: int
+
+ @attr.dataclass
+ class Parent:
+ a: Child
+ b: List[Child]
+ c: Dict[str, Child]
+
+ inst = Parent(a=Child(1), b=[Child(2)], c={"spam": Child(3)})
+
+ attr.asdict(inst, value_serializer=hook)
+ assert calls == [
+ (inst, "a", inst.a),
+ (inst.a, "x", inst.a.x),
+ (inst, "b", inst.b),
+ (inst.b[0], "x", inst.b[0].x),
+ (inst, "c", inst.c),
+ (None, None, "spam"),
+ (inst.c["spam"], "x", inst.c["spam"].x),
+ ]
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_import.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_import.py
new file mode 100644
index 0000000000..423124319c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_import.py
@@ -0,0 +1,11 @@
+# SPDX-License-Identifier: MIT
+
+
+class TestImportStar(object):
+ def test_from_attr_import_star(self):
+ """
+ import * from attr
+ """
+ # attr_import_star contains `from attr import *`, which cannot
+ # be done here because *-imports are only allowed on module level.
+ from . import attr_import_star # noqa: F401
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_init_subclass.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_init_subclass.py
new file mode 100644
index 0000000000..863e794377
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_init_subclass.py
@@ -0,0 +1,48 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `__init_subclass__` related tests.
+
+Python 3.6+ only.
+"""
+
+import pytest
+
+import attr
+
+
+@pytest.mark.parametrize("slots", [True, False])
+def test_init_subclass_vanilla(slots):
+ """
+ `super().__init_subclass__` can be used if the subclass is not an attrs
+ class both with dict and slotted classes.
+ """
+
+ @attr.s(slots=slots)
+ class Base:
+ def __init_subclass__(cls, param, **kw):
+ super().__init_subclass__(**kw)
+ cls.param = param
+
+ class Vanilla(Base, param="foo"):
+ pass
+
+ assert "foo" == Vanilla().param
+
+
+def test_init_subclass_attrs():
+ """
+ `__init_subclass__` works with attrs classes as long as slots=False.
+ """
+
+ @attr.s(slots=False)
+ class Base:
+ def __init_subclass__(cls, param, **kw):
+ super().__init_subclass__(**kw)
+ cls.param = param
+
+ @attr.s
+ class Attrs(Base, param="foo"):
+ pass
+
+ assert "foo" == Attrs().param
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_make.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_make.py
new file mode 100644
index 0000000000..729d3a71f0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_make.py
@@ -0,0 +1,2462 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr._make`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import copy
+import functools
+import gc
+import inspect
+import itertools
+import sys
+
+from operator import attrgetter
+
+import pytest
+
+from hypothesis import assume, given
+from hypothesis.strategies import booleans, integers, lists, sampled_from, text
+
+import attr
+
+from attr import _config
+from attr._compat import PY2, PY310, ordered_dict
+from attr._make import (
+ Attribute,
+ Factory,
+ _AndValidator,
+ _Attributes,
+ _ClassBuilder,
+ _CountingAttr,
+ _determine_attrib_eq_order,
+ _determine_attrs_eq_order,
+ _determine_whether_to_implement,
+ _transform_attrs,
+ and_,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+from attr.exceptions import (
+ DefaultAlreadySetError,
+ NotAnAttrsClassError,
+ PythonTooOldError,
+)
+
+from .strategies import (
+ gen_attr_names,
+ list_of_attrs,
+ optional_bool,
+ simple_attrs,
+ simple_attrs_with_metadata,
+ simple_attrs_without_metadata,
+ simple_classes,
+)
+from .utils import simple_attr
+
+
+attrs_st = simple_attrs.map(lambda c: Attribute.from_counting_attr("name", c))
+
+
+class TestCountingAttr(object):
+ """
+ Tests for `attr`.
+ """
+
+ def test_returns_Attr(self):
+ """
+ Returns an instance of _CountingAttr.
+ """
+ a = attr.ib()
+
+ assert isinstance(a, _CountingAttr)
+
+ def test_validators_lists_to_wrapped_tuples(self):
+ """
+ If a list is passed as validator, it's just converted to a tuple.
+ """
+
+ def v1(_, __):
+ pass
+
+ def v2(_, __):
+ pass
+
+ a = attr.ib(validator=[v1, v2])
+
+ assert _AndValidator((v1, v2)) == a._validator
+
+ def test_validator_decorator_single(self):
+ """
+ If _CountingAttr.validator is used as a decorator and there is no
+ decorator set, the decorated method is used as the validator.
+ """
+ a = attr.ib()
+
+ @a.validator
+ def v():
+ pass
+
+ assert v == a._validator
+
+ @pytest.mark.parametrize(
+ "wrap", [lambda v: v, lambda v: [v], lambda v: and_(v)]
+ )
+ def test_validator_decorator(self, wrap):
+ """
+ If _CountingAttr.validator is used as a decorator and there is already
+ a decorator set, the decorators are composed using `and_`.
+ """
+
+ def v(_, __):
+ pass
+
+ a = attr.ib(validator=wrap(v))
+
+ @a.validator
+ def v2(self, _, __):
+ pass
+
+ assert _AndValidator((v, v2)) == a._validator
+
+ def test_default_decorator_already_set(self):
+ """
+ Raise DefaultAlreadySetError if the decorator is used after a default
+ has been set.
+ """
+ a = attr.ib(default=42)
+
+ with pytest.raises(DefaultAlreadySetError):
+
+ @a.default
+ def f(self):
+ pass
+
+ def test_default_decorator_sets(self):
+ """
+ Decorator wraps the method in a Factory with pass_self=True and sets
+ the default.
+ """
+ a = attr.ib()
+
+ @a.default
+ def f(self):
+ pass
+
+ assert Factory(f, True) == a._default
+
+
+def make_tc():
+ class TransformC(object):
+ z = attr.ib()
+ y = attr.ib()
+ x = attr.ib()
+ a = 42
+
+ return TransformC
+
+
+class TestTransformAttrs(object):
+ """
+ Tests for `_transform_attrs`.
+ """
+
+ def test_no_modifications(self):
+ """
+ Does not attach __attrs_attrs__ to the class.
+ """
+ C = make_tc()
+ _transform_attrs(C, None, False, False, True, None)
+
+ assert None is getattr(C, "__attrs_attrs__", None)
+
+ def test_normal(self):
+ """
+ Transforms every `_CountingAttr` and leaves others (a) be.
+ """
+ C = make_tc()
+ attrs, _, _ = _transform_attrs(C, None, False, False, True, None)
+
+ assert ["z", "y", "x"] == [a.name for a in attrs]
+
+ def test_empty(self):
+ """
+ No attributes works as expected.
+ """
+
+ @attr.s
+ class C(object):
+ pass
+
+ assert _Attributes(((), [], {})) == _transform_attrs(
+ C, None, False, False, True, None
+ )
+
+ def test_transforms_to_attribute(self):
+ """
+ All `_CountingAttr`s are transformed into `Attribute`s.
+ """
+ C = make_tc()
+ attrs, base_attrs, _ = _transform_attrs(
+ C, None, False, False, True, None
+ )
+
+ assert [] == base_attrs
+ assert 3 == len(attrs)
+ assert all(isinstance(a, Attribute) for a in attrs)
+
+ def test_conflicting_defaults(self):
+ """
+ Raises `ValueError` if attributes with defaults are followed by
+ mandatory attributes.
+ """
+
+ class C(object):
+ x = attr.ib(default=None)
+ y = attr.ib()
+
+ with pytest.raises(ValueError) as e:
+ _transform_attrs(C, None, False, False, True, None)
+ assert (
+ "No mandatory attributes allowed after an attribute with a "
+ "default value or factory. Attribute in question: Attribute"
+ "(name='y', default=NOTHING, validator=None, repr=True, "
+ "eq=True, eq_key=None, order=True, order_key=None, "
+ "hash=None, init=True, "
+ "metadata=mappingproxy({}), type=None, converter=None, "
+ "kw_only=False, inherited=False, on_setattr=None)",
+ ) == e.value.args
+
+ def test_kw_only(self):
+ """
+ Converts all attributes, including base class' attributes, if `kw_only`
+ is provided. Therefore, `kw_only` allows attributes with defaults to
+ preceed mandatory attributes.
+
+ Updates in the subclass *don't* affect the base class attributes.
+ """
+
+ @attr.s
+ class B(object):
+ b = attr.ib()
+
+ for b_a in B.__attrs_attrs__:
+ assert b_a.kw_only is False
+
+ class C(B):
+ x = attr.ib(default=None)
+ y = attr.ib()
+
+ attrs, base_attrs, _ = _transform_attrs(
+ C, None, False, True, True, None
+ )
+
+ assert len(attrs) == 3
+ assert len(base_attrs) == 1
+
+ for a in attrs:
+ assert a.kw_only is True
+
+ for b_a in B.__attrs_attrs__:
+ assert b_a.kw_only is False
+
+ def test_these(self):
+ """
+ If these is passed, use it and ignore body and base classes.
+ """
+
+ class Base(object):
+ z = attr.ib()
+
+ class C(Base):
+ y = attr.ib()
+
+ attrs, base_attrs, _ = _transform_attrs(
+ C, {"x": attr.ib()}, False, False, True, None
+ )
+
+ assert [] == base_attrs
+ assert (simple_attr("x"),) == attrs
+
+ def test_these_leave_body(self):
+ """
+ If these is passed, no attributes are removed from the body.
+ """
+
+ @attr.s(init=False, these={"x": attr.ib()})
+ class C(object):
+ x = 5
+
+ assert 5 == C().x
+ assert "C(x=5)" == repr(C())
+
+ def test_these_ordered(self):
+ """
+ If these is passed ordered attrs, their order respect instead of the
+ counter.
+ """
+ b = attr.ib(default=2)
+ a = attr.ib(default=1)
+
+ @attr.s(these=ordered_dict([("a", a), ("b", b)]))
+ class C(object):
+ pass
+
+ assert "C(a=1, b=2)" == repr(C())
+
+ def test_multiple_inheritance_old(self):
+ """
+ Old multiple inheritance attributre collection behavior is retained.
+
+ See #285
+ """
+
+ @attr.s
+ class A(object):
+ a1 = attr.ib(default="a1")
+ a2 = attr.ib(default="a2")
+
+ @attr.s
+ class B(A):
+ b1 = attr.ib(default="b1")
+ b2 = attr.ib(default="b2")
+
+ @attr.s
+ class C(B, A):
+ c1 = attr.ib(default="c1")
+ c2 = attr.ib(default="c2")
+
+ @attr.s
+ class D(A):
+ d1 = attr.ib(default="d1")
+ d2 = attr.ib(default="d2")
+
+ @attr.s
+ class E(C, D):
+ e1 = attr.ib(default="e1")
+ e2 = attr.ib(default="e2")
+
+ assert (
+ "E(a1='a1', a2='a2', b1='b1', b2='b2', c1='c1', c2='c2', d1='d1', "
+ "d2='d2', e1='e1', e2='e2')"
+ ) == repr(E())
+
+ def test_overwrite_proper_mro(self):
+ """
+ The proper MRO path works single overwrites too.
+ """
+
+ @attr.s(collect_by_mro=True)
+ class C(object):
+ x = attr.ib(default=1)
+
+ @attr.s(collect_by_mro=True)
+ class D(C):
+ x = attr.ib(default=2)
+
+ assert "D(x=2)" == repr(D())
+
+ def test_multiple_inheritance_proper_mro(self):
+ """
+ Attributes are collected according to the MRO.
+
+ See #428
+ """
+
+ @attr.s
+ class A(object):
+ a1 = attr.ib(default="a1")
+ a2 = attr.ib(default="a2")
+
+ @attr.s
+ class B(A):
+ b1 = attr.ib(default="b1")
+ b2 = attr.ib(default="b2")
+
+ @attr.s
+ class C(B, A):
+ c1 = attr.ib(default="c1")
+ c2 = attr.ib(default="c2")
+
+ @attr.s
+ class D(A):
+ d1 = attr.ib(default="d1")
+ d2 = attr.ib(default="d2")
+
+ @attr.s(collect_by_mro=True)
+ class E(C, D):
+ e1 = attr.ib(default="e1")
+ e2 = attr.ib(default="e2")
+
+ assert (
+ "E(a1='a1', a2='a2', d1='d1', d2='d2', b1='b1', b2='b2', c1='c1', "
+ "c2='c2', e1='e1', e2='e2')"
+ ) == repr(E())
+
+ def test_mro(self):
+ """
+ Attributes and methods are looked up the same way.
+
+ See #428
+ """
+
+ @attr.s(collect_by_mro=True)
+ class A(object):
+
+ x = attr.ib(10)
+
+ def xx(self):
+ return 10
+
+ @attr.s(collect_by_mro=True)
+ class B(A):
+ y = attr.ib(20)
+
+ @attr.s(collect_by_mro=True)
+ class C(A):
+ x = attr.ib(50)
+
+ def xx(self):
+ return 50
+
+ @attr.s(collect_by_mro=True)
+ class D(B, C):
+ pass
+
+ d = D()
+
+ assert d.x == d.xx()
+
+ def test_inherited(self):
+ """
+ Inherited Attributes have `.inherited` True, otherwise False.
+ """
+
+ @attr.s
+ class A(object):
+ a = attr.ib()
+
+ @attr.s
+ class B(A):
+ b = attr.ib()
+
+ @attr.s
+ class C(B):
+ a = attr.ib()
+ c = attr.ib()
+
+ f = attr.fields
+
+ assert False is f(A).a.inherited
+
+ assert True is f(B).a.inherited
+ assert False is f(B).b.inherited
+
+ assert False is f(C).a.inherited
+ assert True is f(C).b.inherited
+ assert False is f(C).c.inherited
+
+
+class TestAttributes(object):
+ """
+ Tests for the `attrs`/`attr.s` class decorator.
+ """
+
+ @pytest.mark.skipif(not PY2, reason="No old-style classes in Py3")
+ def test_catches_old_style(self):
+ """
+ Raises TypeError on old-style classes.
+ """
+ with pytest.raises(TypeError) as e:
+
+ @attr.s
+ class C:
+ pass
+
+ assert ("attrs only works with new-style classes.",) == e.value.args
+
+ def test_sets_attrs(self):
+ """
+ Sets the `__attrs_attrs__` class attribute with a list of `Attribute`s.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib()
+
+ assert "x" == C.__attrs_attrs__[0].name
+ assert all(isinstance(a, Attribute) for a in C.__attrs_attrs__)
+
+ def test_empty(self):
+ """
+ No attributes, no problems.
+ """
+
+ @attr.s
+ class C3(object):
+ pass
+
+ assert "C3()" == repr(C3())
+ assert C3() == C3()
+
+ @given(attr=attrs_st, attr_name=sampled_from(Attribute.__slots__))
+ def test_immutable(self, attr, attr_name):
+ """
+ Attribute instances are immutable.
+ """
+ with pytest.raises(AttributeError):
+ setattr(attr, attr_name, 1)
+
+ @pytest.mark.parametrize(
+ "method_name", ["__repr__", "__eq__", "__hash__", "__init__"]
+ )
+ def test_adds_all_by_default(self, method_name):
+ """
+ If no further arguments are supplied, all add_XXX functions except
+ add_hash are applied. __hash__ is set to None.
+ """
+ # Set the method name to a sentinel and check whether it has been
+ # overwritten afterwards.
+ sentinel = object()
+
+ class C(object):
+ x = attr.ib()
+
+ setattr(C, method_name, sentinel)
+
+ C = attr.s(C)
+ meth = getattr(C, method_name)
+
+ assert sentinel != meth
+ if method_name == "__hash__":
+ assert meth is None
+
+ @pytest.mark.parametrize(
+ "arg_name, method_name",
+ [
+ ("repr", "__repr__"),
+ ("eq", "__eq__"),
+ ("order", "__le__"),
+ ("hash", "__hash__"),
+ ("init", "__init__"),
+ ],
+ )
+ def test_respects_add_arguments(self, arg_name, method_name):
+ """
+ If a certain `XXX` is `False`, `__XXX__` is not added to the class.
+ """
+ # Set the method name to a sentinel and check whether it has been
+ # overwritten afterwards.
+ sentinel = object()
+
+ am_args = {
+ "repr": True,
+ "eq": True,
+ "order": True,
+ "hash": True,
+ "init": True,
+ }
+ am_args[arg_name] = False
+ if arg_name == "eq":
+ am_args["order"] = False
+
+ class C(object):
+ x = attr.ib()
+
+ setattr(C, method_name, sentinel)
+
+ C = attr.s(**am_args)(C)
+
+ assert sentinel == getattr(C, method_name)
+
+ @pytest.mark.parametrize("init", [True, False])
+ def test_respects_init_attrs_init(self, init):
+ """
+ If init=False, adds __attrs_init__ to the class.
+ Otherwise, it does not.
+ """
+
+ class C(object):
+ x = attr.ib()
+
+ C = attr.s(init=init)(C)
+ assert hasattr(C, "__attrs_init__") != init
+
+ @pytest.mark.skipif(PY2, reason="__qualname__ is PY3-only.")
+ @given(slots_outer=booleans(), slots_inner=booleans())
+ def test_repr_qualname(self, slots_outer, slots_inner):
+ """
+ On Python 3, the name in repr is the __qualname__.
+ """
+
+ @attr.s(slots=slots_outer)
+ class C(object):
+ @attr.s(slots=slots_inner)
+ class D(object):
+ pass
+
+ assert "C.D()" == repr(C.D())
+ assert "GC.D()" == repr(GC.D())
+
+ @given(slots_outer=booleans(), slots_inner=booleans())
+ def test_repr_fake_qualname(self, slots_outer, slots_inner):
+ """
+ Setting repr_ns overrides a potentially guessed namespace.
+ """
+
+ @attr.s(slots=slots_outer)
+ class C(object):
+ @attr.s(repr_ns="C", slots=slots_inner)
+ class D(object):
+ pass
+
+ assert "C.D()" == repr(C.D())
+
+ @pytest.mark.skipif(PY2, reason="__qualname__ is PY3-only.")
+ @given(slots_outer=booleans(), slots_inner=booleans())
+ def test_name_not_overridden(self, slots_outer, slots_inner):
+ """
+ On Python 3, __name__ is different from __qualname__.
+ """
+
+ @attr.s(slots=slots_outer)
+ class C(object):
+ @attr.s(slots=slots_inner)
+ class D(object):
+ pass
+
+ assert C.D.__name__ == "D"
+ assert C.D.__qualname__ == C.__qualname__ + ".D"
+
+ @pytest.mark.parametrize("with_validation", [True, False])
+ def test_pre_init(self, with_validation, monkeypatch):
+ """
+ Verify that __attrs_pre_init__ gets called if defined.
+ """
+ monkeypatch.setattr(_config, "_run_validators", with_validation)
+
+ @attr.s
+ class C(object):
+ def __attrs_pre_init__(self2):
+ self2.z = 30
+
+ c = C()
+
+ assert 30 == getattr(c, "z", None)
+
+ @pytest.mark.parametrize("with_validation", [True, False])
+ def test_post_init(self, with_validation, monkeypatch):
+ """
+ Verify that __attrs_post_init__ gets called if defined.
+ """
+ monkeypatch.setattr(_config, "_run_validators", with_validation)
+
+ @attr.s
+ class C(object):
+ x = attr.ib()
+ y = attr.ib()
+
+ def __attrs_post_init__(self2):
+ self2.z = self2.x + self2.y
+
+ c = C(x=10, y=20)
+
+ assert 30 == getattr(c, "z", None)
+
+ @pytest.mark.parametrize("with_validation", [True, False])
+ def test_pre_post_init_order(self, with_validation, monkeypatch):
+ """
+ Verify that __attrs_post_init__ gets called if defined.
+ """
+ monkeypatch.setattr(_config, "_run_validators", with_validation)
+
+ @attr.s
+ class C(object):
+ x = attr.ib()
+
+ def __attrs_pre_init__(self2):
+ self2.z = 30
+
+ def __attrs_post_init__(self2):
+ self2.z += self2.x
+
+ c = C(x=10)
+
+ assert 40 == getattr(c, "z", None)
+
+ def test_types(self):
+ """
+ Sets the `Attribute.type` attr from type argument.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(type=int)
+ y = attr.ib(type=str)
+ z = attr.ib()
+
+ assert int is fields(C).x.type
+ assert str is fields(C).y.type
+ assert None is fields(C).z.type
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_clean_class(self, slots):
+ """
+ Attribute definitions do not appear on the class body after @attr.s.
+ """
+
+ @attr.s(slots=slots)
+ class C(object):
+ x = attr.ib()
+
+ x = getattr(C, "x", None)
+
+ assert not isinstance(x, _CountingAttr)
+
+ def test_factory_sugar(self):
+ """
+ Passing factory=f is syntactic sugar for passing default=Factory(f).
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(factory=list)
+
+ assert Factory(list) == attr.fields(C).x.default
+
+ def test_sugar_factory_mutex(self):
+ """
+ Passing both default and factory raises ValueError.
+ """
+ with pytest.raises(ValueError, match="mutually exclusive"):
+
+ @attr.s
+ class C(object):
+ x = attr.ib(factory=list, default=Factory(list))
+
+ def test_sugar_callable(self):
+ """
+ Factory has to be a callable to prevent people from passing Factory
+ into it.
+ """
+ with pytest.raises(ValueError, match="must be a callable"):
+
+ @attr.s
+ class C(object):
+ x = attr.ib(factory=Factory(list))
+
+ def test_inherited_does_not_affect_hashing_and_equality(self):
+ """
+ Whether or not an Attribute has been inherited doesn't affect how it's
+ hashed and compared.
+ """
+
+ @attr.s
+ class BaseClass(object):
+ x = attr.ib()
+
+ @attr.s
+ class SubClass(BaseClass):
+ pass
+
+ ba = attr.fields(BaseClass)[0]
+ sa = attr.fields(SubClass)[0]
+
+ assert ba == sa
+ assert hash(ba) == hash(sa)
+
+
+class TestKeywordOnlyAttributes(object):
+ """
+ Tests for keyword-only attributes.
+ """
+
+ def test_adds_keyword_only_arguments(self):
+ """
+ Attributes can be added as keyword-only.
+ """
+
+ @attr.s
+ class C(object):
+ a = attr.ib()
+ b = attr.ib(default=2, kw_only=True)
+ c = attr.ib(kw_only=True)
+ d = attr.ib(default=attr.Factory(lambda: 4), kw_only=True)
+
+ c = C(1, c=3)
+
+ assert c.a == 1
+ assert c.b == 2
+ assert c.c == 3
+ assert c.d == 4
+
+ def test_ignores_kw_only_when_init_is_false(self):
+ """
+ Specifying ``kw_only=True`` when ``init=False`` is essentially a no-op.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(init=False, default=0, kw_only=True)
+ y = attr.ib()
+
+ c = C(1)
+
+ assert c.x == 0
+ assert c.y == 1
+
+ def test_keyword_only_attributes_presence(self):
+ """
+ Raises `TypeError` when keyword-only arguments are
+ not specified.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(kw_only=True)
+
+ with pytest.raises(TypeError) as e:
+ C()
+
+ if PY2:
+ assert (
+ "missing required keyword-only argument: 'x'"
+ ) in e.value.args[0]
+ else:
+ assert (
+ "missing 1 required keyword-only argument: 'x'"
+ ) in e.value.args[0]
+
+ def test_keyword_only_attributes_unexpected(self):
+ """
+ Raises `TypeError` when unexpected keyword argument passed.
+ """
+
+ @attr.s
+ class C(object):
+ x = attr.ib(kw_only=True)
+
+ with pytest.raises(TypeError) as e:
+ C(x=5, y=10)
+
+ assert "got an unexpected keyword argument 'y'" in e.value.args[0]
+
+ def test_keyword_only_attributes_can_come_in_any_order(self):
+ """
+ Mandatory vs non-mandatory attr order only matters when they are part
+ of the __init__ signature and when they aren't kw_only (which are
+ moved to the end and can be mandatory or non-mandatory in any order,
+ as they will be specified as keyword args anyway).
+ """
+
+ @attr.s
+ class C(object):
+ a = attr.ib(kw_only=True)
+ b = attr.ib(kw_only=True, default="b")
+ c = attr.ib(kw_only=True)
+ d = attr.ib()
+ e = attr.ib(default="e")
+ f = attr.ib(kw_only=True)
+ g = attr.ib(kw_only=True, default="g")
+ h = attr.ib(kw_only=True)
+ i = attr.ib(init=False)
+
+ c = C("d", a="a", c="c", f="f", h="h")
+
+ assert c.a == "a"
+ assert c.b == "b"
+ assert c.c == "c"
+ assert c.d == "d"
+ assert c.e == "e"
+ assert c.f == "f"
+ assert c.g == "g"
+ assert c.h == "h"
+
+ def test_keyword_only_attributes_allow_subclassing(self):
+ """
+ Subclass can define keyword-only attributed without defaults,
+ when the base class has attributes with defaults.
+ """
+
+ @attr.s
+ class Base(object):
+ x = attr.ib(default=0)
+
+ @attr.s
+ class C(Base):
+ y = attr.ib(kw_only=True)
+
+ c = C(y=1)
+
+ assert c.x == 0
+ assert c.y == 1
+
+ def test_keyword_only_class_level(self):
+ """
+ `kw_only` can be provided at the attr.s level, converting all
+ attributes to `kw_only.`
+ """
+
+ @attr.s(kw_only=True)
+ class C(object):
+ x = attr.ib()
+ y = attr.ib(kw_only=True)
+
+ with pytest.raises(TypeError):
+ C(0, y=1)
+
+ c = C(x=0, y=1)
+
+ assert c.x == 0
+ assert c.y == 1
+
+ def test_keyword_only_class_level_subclassing(self):
+ """
+ Subclass `kw_only` propagates to attrs inherited from the base,
+ allowing non-default following default.
+ """
+
+ @attr.s
+ class Base(object):
+ x = attr.ib(default=0)
+
+ @attr.s(kw_only=True)
+ class C(Base):
+ y = attr.ib()
+
+ with pytest.raises(TypeError):
+ C(1)
+
+ c = C(x=0, y=1)
+
+ assert c.x == 0
+ assert c.y == 1
+
+ def test_init_false_attribute_after_keyword_attribute(self):
+ """
+ A positional attribute cannot follow a `kw_only` attribute,
+ but an `init=False` attribute can because it won't appear
+ in `__init__`
+ """
+
+ @attr.s
+ class KwArgBeforeInitFalse(object):
+ kwarg = attr.ib(kw_only=True)
+ non_init_function_default = attr.ib(init=False)
+ non_init_keyword_default = attr.ib(
+ init=False, default="default-by-keyword"
+ )
+
+ @non_init_function_default.default
+ def _init_to_init(self):
+ return self.kwarg + "b"
+
+ c = KwArgBeforeInitFalse(kwarg="a")
+
+ assert c.kwarg == "a"
+ assert c.non_init_function_default == "ab"
+ assert c.non_init_keyword_default == "default-by-keyword"
+
+ def test_init_false_attribute_after_keyword_attribute_with_inheritance(
+ self,
+ ):
+ """
+ A positional attribute cannot follow a `kw_only` attribute,
+ but an `init=False` attribute can because it won't appear
+ in `__init__`. This test checks that we allow this
+ even when the `kw_only` attribute appears in a parent class
+ """
+
+ @attr.s
+ class KwArgBeforeInitFalseParent(object):
+ kwarg = attr.ib(kw_only=True)
+
+ @attr.s
+ class KwArgBeforeInitFalseChild(KwArgBeforeInitFalseParent):
+ non_init_function_default = attr.ib(init=False)
+ non_init_keyword_default = attr.ib(
+ init=False, default="default-by-keyword"
+ )
+
+ @non_init_function_default.default
+ def _init_to_init(self):
+ return self.kwarg + "b"
+
+ c = KwArgBeforeInitFalseChild(kwarg="a")
+
+ assert c.kwarg == "a"
+ assert c.non_init_function_default == "ab"
+ assert c.non_init_keyword_default == "default-by-keyword"
+
+
+@pytest.mark.skipif(not PY2, reason="PY2-specific keyword-only error behavior")
+class TestKeywordOnlyAttributesOnPy2(object):
+ """
+ Tests for keyword-only attribute behavior on py2.
+ """
+
+ def test_no_init(self):
+ """
+ Keyworld-only is a no-op, not any error, if ``init=false``.
+ """
+
+ @attr.s(kw_only=True, init=False)
+ class ClassLevel(object):
+ a = attr.ib()
+
+ @attr.s(init=False)
+ class AttrLevel(object):
+ a = attr.ib(kw_only=True)
+
+
+@attr.s
+class GC(object):
+ @attr.s
+ class D(object):
+ pass
+
+
+class TestMakeClass(object):
+ """
+ Tests for `make_class`.
+ """
+
+ @pytest.mark.parametrize("ls", [list, tuple])
+ def test_simple(self, ls):
+ """
+ Passing a list of strings creates attributes with default args.
+ """
+ C1 = make_class("C1", ls(["a", "b"]))
+
+ @attr.s
+ class C2(object):
+ a = attr.ib()
+ b = attr.ib()
+
+ assert C1.__attrs_attrs__ == C2.__attrs_attrs__
+
+ def test_dict(self):
+ """
+ Passing a dict of name: _CountingAttr creates an equivalent class.
+ """
+ C1 = make_class(
+ "C1", {"a": attr.ib(default=42), "b": attr.ib(default=None)}
+ )
+
+ @attr.s
+ class C2(object):
+ a = attr.ib(default=42)
+ b = attr.ib(default=None)
+
+ assert C1.__attrs_attrs__ == C2.__attrs_attrs__
+
+ def test_attr_args(self):
+ """
+ attributes_arguments are passed to attributes
+ """
+ C = make_class("C", ["x"], repr=False)
+
+ assert repr(C(1)).startswith("<tests.test_make.C object at 0x")
+
+ def test_catches_wrong_attrs_type(self):
+ """
+ Raise `TypeError` if an invalid type for attrs is passed.
+ """
+ with pytest.raises(TypeError) as e:
+ make_class("C", object())
+
+ assert ("attrs argument must be a dict or a list.",) == e.value.args
+
+ def test_bases(self):
+ """
+ Parameter bases default to (object,) and subclasses correctly
+ """
+
+ class D(object):
+ pass
+
+ cls = make_class("C", {})
+
+ assert cls.__mro__[-1] == object
+
+ cls = make_class("C", {}, bases=(D,))
+
+ assert D in cls.__mro__
+ assert isinstance(cls(), D)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_clean_class(self, slots):
+ """
+ Attribute definitions do not appear on the class body.
+ """
+ C = make_class("C", ["x"], slots=slots)
+
+ x = getattr(C, "x", None)
+
+ assert not isinstance(x, _CountingAttr)
+
+ def test_missing_sys_getframe(self, monkeypatch):
+ """
+ `make_class()` does not fail when `sys._getframe()` is not available.
+ """
+ monkeypatch.delattr(sys, "_getframe")
+ C = make_class("C", ["x"])
+
+ assert 1 == len(C.__attrs_attrs__)
+
+ def test_make_class_ordered(self):
+ """
+ If `make_class()` is passed ordered attrs, their order is respected
+ instead of the counter.
+ """
+ b = attr.ib(default=2)
+ a = attr.ib(default=1)
+
+ C = attr.make_class("C", ordered_dict([("a", a), ("b", b)]))
+
+ assert "C(a=1, b=2)" == repr(C())
+
+ @pytest.mark.skipif(PY2, reason="Python 3-only")
+ def test_generic_dynamic_class(self):
+ """
+ make_class can create generic dynamic classes.
+
+ https://github.com/python-attrs/attrs/issues/756
+ https://bugs.python.org/issue33188
+ """
+ from types import new_class
+ from typing import Generic, TypeVar
+
+ MyTypeVar = TypeVar("MyTypeVar")
+ MyParent = new_class("MyParent", (Generic[MyTypeVar],), {})
+
+ attr.make_class("test", {"id": attr.ib(type=str)}, (MyParent[int],))
+
+
+class TestFields(object):
+ """
+ Tests for `fields`.
+ """
+
+ @given(simple_classes())
+ def test_instance(self, C):
+ """
+ Raises `TypeError` on non-classes.
+ """
+ with pytest.raises(TypeError) as e:
+ fields(C())
+
+ assert "Passed object must be a class." == e.value.args[0]
+
+ def test_handler_non_attrs_class(self):
+ """
+ Raises `ValueError` if passed a non-``attrs`` instance.
+ """
+ with pytest.raises(NotAnAttrsClassError) as e:
+ fields(object)
+
+ assert (
+ "{o!r} is not an attrs-decorated class.".format(o=object)
+ ) == e.value.args[0]
+
+ @given(simple_classes())
+ def test_fields(self, C):
+ """
+ Returns a list of `Attribute`a.
+ """
+ assert all(isinstance(a, Attribute) for a in fields(C))
+
+ @given(simple_classes())
+ def test_fields_properties(self, C):
+ """
+ Fields returns a tuple with properties.
+ """
+ for attribute in fields(C):
+ assert getattr(fields(C), attribute.name) is attribute
+
+
+class TestFieldsDict(object):
+ """
+ Tests for `fields_dict`.
+ """
+
+ @given(simple_classes())
+ def test_instance(self, C):
+ """
+ Raises `TypeError` on non-classes.
+ """
+ with pytest.raises(TypeError) as e:
+ fields_dict(C())
+
+ assert "Passed object must be a class." == e.value.args[0]
+
+ def test_handler_non_attrs_class(self):
+ """
+ Raises `ValueError` if passed a non-``attrs`` instance.
+ """
+ with pytest.raises(NotAnAttrsClassError) as e:
+ fields_dict(object)
+
+ assert (
+ "{o!r} is not an attrs-decorated class.".format(o=object)
+ ) == e.value.args[0]
+
+ @given(simple_classes())
+ def test_fields_dict(self, C):
+ """
+ Returns an ordered dict of ``{attribute_name: Attribute}``.
+ """
+ d = fields_dict(C)
+
+ assert isinstance(d, ordered_dict)
+ assert list(fields(C)) == list(d.values())
+ assert [a.name for a in fields(C)] == [field_name for field_name in d]
+
+
+class TestConverter(object):
+ """
+ Tests for attribute conversion.
+ """
+
+ def test_convert(self):
+ """
+ Return value of converter is used as the attribute's value.
+ """
+ C = make_class(
+ "C", {"x": attr.ib(converter=lambda v: v + 1), "y": attr.ib()}
+ )
+ c = C(1, 2)
+
+ assert c.x == 2
+ assert c.y == 2
+
+ @given(integers(), booleans())
+ def test_convert_property(self, val, init):
+ """
+ Property tests for attributes using converter.
+ """
+ C = make_class(
+ "C",
+ {
+ "y": attr.ib(),
+ "x": attr.ib(
+ init=init, default=val, converter=lambda v: v + 1
+ ),
+ },
+ )
+ c = C(2)
+
+ assert c.x == val + 1
+ assert c.y == 2
+
+ @given(integers(), booleans())
+ def test_converter_factory_property(self, val, init):
+ """
+ Property tests for attributes with converter, and a factory default.
+ """
+ C = make_class(
+ "C",
+ ordered_dict(
+ [
+ ("y", attr.ib()),
+ (
+ "x",
+ attr.ib(
+ init=init,
+ default=Factory(lambda: val),
+ converter=lambda v: v + 1,
+ ),
+ ),
+ ]
+ ),
+ )
+ c = C(2)
+
+ assert c.x == val + 1
+ assert c.y == 2
+
+ def test_factory_takes_self(self):
+ """
+ If takes_self on factories is True, self is passed.
+ """
+ C = make_class(
+ "C",
+ {
+ "x": attr.ib(
+ default=Factory((lambda self: self), takes_self=True)
+ )
+ },
+ )
+
+ i = C()
+
+ assert i is i.x
+
+ def test_factory_hashable(self):
+ """
+ Factory is hashable.
+ """
+ assert hash(Factory(None, False)) == hash(Factory(None, False))
+
+ def test_convert_before_validate(self):
+ """
+ Validation happens after conversion.
+ """
+
+ def validator(inst, attr, val):
+ raise RuntimeError("foo")
+
+ C = make_class(
+ "C",
+ {
+ "x": attr.ib(validator=validator, converter=lambda v: 1 / 0),
+ "y": attr.ib(),
+ },
+ )
+ with pytest.raises(ZeroDivisionError):
+ C(1, 2)
+
+ def test_frozen(self):
+ """
+ Converters circumvent immutability.
+ """
+ C = make_class(
+ "C", {"x": attr.ib(converter=lambda v: int(v))}, frozen=True
+ )
+ C("1")
+
+
+class TestValidate(object):
+ """
+ Tests for `validate`.
+ """
+
+ def test_success(self):
+ """
+ If the validator succeeds, nothing gets raised.
+ """
+ C = make_class(
+ "C", {"x": attr.ib(validator=lambda *a: None), "y": attr.ib()}
+ )
+ validate(C(1, 2))
+
+ def test_propagates(self):
+ """
+ The exception of the validator is handed through.
+ """
+
+ def raiser(_, __, value):
+ if value == 42:
+ raise FloatingPointError
+
+ C = make_class("C", {"x": attr.ib(validator=raiser)})
+ i = C(1)
+ i.x = 42
+
+ with pytest.raises(FloatingPointError):
+ validate(i)
+
+ def test_run_validators(self):
+ """
+ Setting `_run_validators` to False prevents validators from running.
+ """
+ _config._run_validators = False
+ obj = object()
+
+ def raiser(_, __, ___):
+ raise Exception(obj)
+
+ C = make_class("C", {"x": attr.ib(validator=raiser)})
+ c = C(1)
+ validate(c)
+ assert 1 == c.x
+ _config._run_validators = True
+
+ with pytest.raises(Exception):
+ validate(c)
+
+ with pytest.raises(Exception) as e:
+ C(1)
+ assert (obj,) == e.value.args
+
+ def test_multiple_validators(self):
+ """
+ If a list is passed as a validator, all of its items are treated as one
+ and must pass.
+ """
+
+ def v1(_, __, value):
+ if value == 23:
+ raise TypeError("omg")
+
+ def v2(_, __, value):
+ if value == 42:
+ raise ValueError("omg")
+
+ C = make_class("C", {"x": attr.ib(validator=[v1, v2])})
+
+ validate(C(1))
+
+ with pytest.raises(TypeError) as e:
+ C(23)
+
+ assert "omg" == e.value.args[0]
+
+ with pytest.raises(ValueError) as e:
+ C(42)
+
+ assert "omg" == e.value.args[0]
+
+ def test_multiple_empty(self):
+ """
+ Empty list/tuple for validator is the same as None.
+ """
+ C1 = make_class("C", {"x": attr.ib(validator=[])})
+ C2 = make_class("C", {"x": attr.ib(validator=None)})
+
+ assert inspect.getsource(C1.__init__) == inspect.getsource(C2.__init__)
+
+
+# Hypothesis seems to cache values, so the lists of attributes come out
+# unsorted.
+sorted_lists_of_attrs = list_of_attrs.map(
+ lambda l: sorted(l, key=attrgetter("counter"))
+)
+
+
+class TestMetadata(object):
+ """
+ Tests for metadata handling.
+ """
+
+ @given(sorted_lists_of_attrs)
+ def test_metadata_present(self, list_of_attrs):
+ """
+ Assert dictionaries are copied and present.
+ """
+ C = make_class("C", dict(zip(gen_attr_names(), list_of_attrs)))
+
+ for hyp_attr, class_attr in zip(list_of_attrs, fields(C)):
+ if hyp_attr.metadata is None:
+ # The default is a singleton empty dict.
+ assert class_attr.metadata is not None
+ assert len(class_attr.metadata) == 0
+ else:
+ assert hyp_attr.metadata == class_attr.metadata
+
+ # Once more, just to assert getting items and iteration.
+ for k in class_attr.metadata:
+ assert hyp_attr.metadata[k] == class_attr.metadata[k]
+ assert hyp_attr.metadata.get(k) == class_attr.metadata.get(
+ k
+ )
+
+ @given(simple_classes(), text())
+ def test_metadata_immutability(self, C, string):
+ """
+ The metadata dict should be best-effort immutable.
+ """
+ for a in fields(C):
+ with pytest.raises(TypeError):
+ a.metadata[string] = string
+ with pytest.raises(AttributeError):
+ a.metadata.update({string: string})
+ with pytest.raises(AttributeError):
+ a.metadata.clear()
+ with pytest.raises(AttributeError):
+ a.metadata.setdefault(string, string)
+
+ for k in a.metadata:
+ # For some reason, Python 3's MappingProxyType throws an
+ # IndexError for deletes on a large integer key.
+ with pytest.raises((TypeError, IndexError)):
+ del a.metadata[k]
+ with pytest.raises(AttributeError):
+ a.metadata.pop(k)
+ with pytest.raises(AttributeError):
+ a.metadata.popitem()
+
+ @given(lists(simple_attrs_without_metadata, min_size=2, max_size=5))
+ def test_empty_metadata_singleton(self, list_of_attrs):
+ """
+ All empty metadata attributes share the same empty metadata dict.
+ """
+ C = make_class("C", dict(zip(gen_attr_names(), list_of_attrs)))
+ for a in fields(C)[1:]:
+ assert a.metadata is fields(C)[0].metadata
+
+ @given(lists(simple_attrs_without_metadata, min_size=2, max_size=5))
+ def test_empty_countingattr_metadata_independent(self, list_of_attrs):
+ """
+ All empty metadata attributes are independent before ``@attr.s``.
+ """
+ for x, y in itertools.combinations(list_of_attrs, 2):
+ assert x.metadata is not y.metadata
+
+ @given(lists(simple_attrs_with_metadata(), min_size=2, max_size=5))
+ def test_not_none_metadata(self, list_of_attrs):
+ """
+ Non-empty metadata attributes exist as fields after ``@attr.s``.
+ """
+ C = make_class("C", dict(zip(gen_attr_names(), list_of_attrs)))
+
+ assert len(fields(C)) > 0
+
+ for cls_a, raw_a in zip(fields(C), list_of_attrs):
+ assert cls_a.metadata != {}
+ assert cls_a.metadata == raw_a.metadata
+
+ def test_metadata(self):
+ """
+ If metadata that is not None is passed, it is used.
+
+ This is necessary for coverage because the previous test is
+ hypothesis-based.
+ """
+ md = {}
+ a = attr.ib(metadata=md)
+
+ assert md is a.metadata
+
+
+class TestClassBuilder(object):
+ """
+ Tests for `_ClassBuilder`.
+ """
+
+ def test_repr_str(self):
+ """
+ Trying to add a `__str__` without having a `__repr__` raises a
+ ValueError.
+ """
+ with pytest.raises(ValueError) as ei:
+ make_class("C", {}, repr=False, str=True)
+
+ assert (
+ "__str__ can only be generated if a __repr__ exists.",
+ ) == ei.value.args
+
+ def test_repr(self):
+ """
+ repr of builder itself makes sense.
+ """
+
+ class C(object):
+ pass
+
+ b = _ClassBuilder(
+ C,
+ None,
+ True,
+ True,
+ False,
+ False,
+ False,
+ False,
+ False,
+ False,
+ True,
+ None,
+ False,
+ None,
+ )
+
+ assert "<_ClassBuilder(cls=C)>" == repr(b)
+
+ def test_returns_self(self):
+ """
+ All methods return the builder for chaining.
+ """
+
+ class C(object):
+ x = attr.ib()
+
+ b = _ClassBuilder(
+ C,
+ None,
+ True,
+ True,
+ False,
+ False,
+ False,
+ False,
+ False,
+ False,
+ True,
+ None,
+ False,
+ None,
+ )
+
+ cls = (
+ b.add_eq()
+ .add_order()
+ .add_hash()
+ .add_init()
+ .add_attrs_init()
+ .add_repr("ns")
+ .add_str()
+ .build_class()
+ )
+
+ assert "ns.C(x=1)" == repr(cls(1))
+
+ @pytest.mark.parametrize(
+ "meth_name",
+ [
+ "__init__",
+ "__hash__",
+ "__repr__",
+ "__str__",
+ "__eq__",
+ "__ne__",
+ "__lt__",
+ "__le__",
+ "__gt__",
+ "__ge__",
+ ],
+ )
+ def test_attaches_meta_dunders(self, meth_name):
+ """
+ Generated methods have correct __module__, __name__, and __qualname__
+ attributes.
+ """
+
+ @attr.s(hash=True, str=True)
+ class C(object):
+ def organic(self):
+ pass
+
+ @attr.s(hash=True, str=True)
+ class D(object):
+ pass
+
+ meth_C = getattr(C, meth_name)
+ meth_D = getattr(D, meth_name)
+
+ assert meth_name == meth_C.__name__ == meth_D.__name__
+ assert C.organic.__module__ == meth_C.__module__ == meth_D.__module__
+ if not PY2:
+ # This is assertion that would fail if a single __ne__ instance
+ # was reused across multiple _make_eq calls.
+ organic_prefix = C.organic.__qualname__.rsplit(".", 1)[0]
+ assert organic_prefix + "." + meth_name == meth_C.__qualname__
+
+ def test_handles_missing_meta_on_class(self):
+ """
+ If the class hasn't a __module__ or __qualname__, the method hasn't
+ either.
+ """
+
+ class C(object):
+ pass
+
+ b = _ClassBuilder(
+ C,
+ these=None,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ getstate_setstate=False,
+ auto_attribs=False,
+ is_exc=False,
+ kw_only=False,
+ cache_hash=False,
+ collect_by_mro=True,
+ on_setattr=None,
+ has_custom_setattr=False,
+ field_transformer=None,
+ )
+ b._cls = {} # no __module__; no __qualname__
+
+ def fake_meth(self):
+ pass
+
+ fake_meth.__module__ = "42"
+ fake_meth.__qualname__ = "23"
+
+ rv = b._add_method_dunders(fake_meth)
+
+ assert "42" == rv.__module__ == fake_meth.__module__
+ assert "23" == rv.__qualname__ == fake_meth.__qualname__
+
+ def test_weakref_setstate(self):
+ """
+ __weakref__ is not set on in setstate because it's not writable in
+ slotted classes.
+ """
+
+ @attr.s(slots=True)
+ class C(object):
+ __weakref__ = attr.ib(
+ init=False, hash=False, repr=False, eq=False, order=False
+ )
+
+ assert C() == copy.deepcopy(C())
+
+ def test_no_references_to_original(self):
+ """
+ When subclassing a slotted class, there are no stray references to the
+ original class.
+ """
+
+ @attr.s(slots=True)
+ class C(object):
+ pass
+
+ @attr.s(slots=True)
+ class C2(C):
+ pass
+
+ # The original C2 is in a reference cycle, so force a collect:
+ gc.collect()
+
+ assert [C2] == C.__subclasses__()
+
+ def _get_copy_kwargs(include_slots=True):
+ """
+ Generate a list of compatible attr.s arguments for the `copy` tests.
+ """
+ options = ["frozen", "hash", "cache_hash"]
+
+ if include_slots:
+ options.extend(["slots", "weakref_slot"])
+
+ out_kwargs = []
+ for args in itertools.product([True, False], repeat=len(options)):
+ kwargs = dict(zip(options, args))
+
+ kwargs["hash"] = kwargs["hash"] or None
+
+ if kwargs["cache_hash"] and not (
+ kwargs["frozen"] or kwargs["hash"]
+ ):
+ continue
+
+ out_kwargs.append(kwargs)
+
+ return out_kwargs
+
+ @pytest.mark.parametrize("kwargs", _get_copy_kwargs())
+ def test_copy(self, kwargs):
+ """
+ Ensure that an attrs class can be copied successfully.
+ """
+
+ @attr.s(eq=True, **kwargs)
+ class C(object):
+ x = attr.ib()
+
+ a = C(1)
+ b = copy.deepcopy(a)
+
+ assert a == b
+
+ @pytest.mark.parametrize("kwargs", _get_copy_kwargs(include_slots=False))
+ def test_copy_custom_setstate(self, kwargs):
+ """
+ Ensure that non-slots classes respect a custom __setstate__.
+ """
+
+ @attr.s(eq=True, **kwargs)
+ class C(object):
+ x = attr.ib()
+
+ def __getstate__(self):
+ return self.__dict__
+
+ def __setstate__(self, state):
+ state["x"] *= 5
+ self.__dict__.update(state)
+
+ expected = C(25)
+ actual = copy.copy(C(5))
+
+ assert actual == expected
+
+
+class TestMakeOrder:
+ """
+ Tests for _make_order().
+ """
+
+ def test_subclasses_cannot_be_compared(self):
+ """
+ Calling comparison methods on subclasses raises a TypeError.
+
+ We use the actual operation so we get an error raised on Python 3.
+ """
+
+ @attr.s
+ class A(object):
+ a = attr.ib()
+
+ @attr.s
+ class B(A):
+ pass
+
+ a = A(42)
+ b = B(42)
+
+ assert a <= a
+ assert a >= a
+ assert not a < a
+ assert not a > a
+
+ assert (
+ NotImplemented
+ == a.__lt__(b)
+ == a.__le__(b)
+ == a.__gt__(b)
+ == a.__ge__(b)
+ )
+
+ if not PY2:
+ with pytest.raises(TypeError):
+ a <= b
+
+ with pytest.raises(TypeError):
+ a >= b
+
+ with pytest.raises(TypeError):
+ a < b
+
+ with pytest.raises(TypeError):
+ a > b
+
+
+class TestDetermineAttrsEqOrder(object):
+ def test_default(self):
+ """
+ If all are set to None, set both eq and order to the passed default.
+ """
+ assert (42, 42) == _determine_attrs_eq_order(None, None, None, 42)
+
+ @pytest.mark.parametrize("eq", [True, False])
+ def test_order_mirrors_eq_by_default(self, eq):
+ """
+ If order is None, it mirrors eq.
+ """
+ assert (eq, eq) == _determine_attrs_eq_order(None, eq, None, True)
+
+ def test_order_without_eq(self):
+ """
+ eq=False, order=True raises a meaningful ValueError.
+ """
+ with pytest.raises(
+ ValueError, match="`order` can only be True if `eq` is True too."
+ ):
+ _determine_attrs_eq_order(None, False, True, True)
+
+ @given(cmp=booleans(), eq=optional_bool, order=optional_bool)
+ def test_mix(self, cmp, eq, order):
+ """
+ If cmp is not None, eq and order must be None and vice versa.
+ """
+ assume(eq is not None or order is not None)
+
+ with pytest.raises(
+ ValueError, match="Don't mix `cmp` with `eq' and `order`."
+ ):
+ _determine_attrs_eq_order(cmp, eq, order, True)
+
+
+class TestDetermineAttribEqOrder(object):
+ def test_default(self):
+ """
+ If all are set to None, set both eq and order to the passed default.
+ """
+ assert (42, None, 42, None) == _determine_attrib_eq_order(
+ None, None, None, 42
+ )
+
+ def test_eq_callable_order_boolean(self):
+ """
+ eq=callable or order=callable need to transformed into eq/eq_key
+ or order/order_key.
+ """
+ assert (True, str.lower, False, None) == _determine_attrib_eq_order(
+ None, str.lower, False, True
+ )
+
+ def test_eq_callable_order_callable(self):
+ """
+ eq=callable or order=callable need to transformed into eq/eq_key
+ or order/order_key.
+ """
+ assert (True, str.lower, True, abs) == _determine_attrib_eq_order(
+ None, str.lower, abs, True
+ )
+
+ def test_eq_boolean_order_callable(self):
+ """
+ eq=callable or order=callable need to transformed into eq/eq_key
+ or order/order_key.
+ """
+ assert (True, None, True, str.lower) == _determine_attrib_eq_order(
+ None, True, str.lower, True
+ )
+
+ @pytest.mark.parametrize("eq", [True, False])
+ def test_order_mirrors_eq_by_default(self, eq):
+ """
+ If order is None, it mirrors eq.
+ """
+ assert (eq, None, eq, None) == _determine_attrib_eq_order(
+ None, eq, None, True
+ )
+
+ def test_order_without_eq(self):
+ """
+ eq=False, order=True raises a meaningful ValueError.
+ """
+ with pytest.raises(
+ ValueError, match="`order` can only be True if `eq` is True too."
+ ):
+ _determine_attrib_eq_order(None, False, True, True)
+
+ @given(cmp=booleans(), eq=optional_bool, order=optional_bool)
+ def test_mix(self, cmp, eq, order):
+ """
+ If cmp is not None, eq and order must be None and vice versa.
+ """
+ assume(eq is not None or order is not None)
+
+ with pytest.raises(
+ ValueError, match="Don't mix `cmp` with `eq' and `order`."
+ ):
+ _determine_attrib_eq_order(cmp, eq, order, True)
+
+
+class TestDocs:
+ @pytest.mark.parametrize(
+ "meth_name",
+ [
+ "__init__",
+ "__repr__",
+ "__eq__",
+ "__ne__",
+ "__lt__",
+ "__le__",
+ "__gt__",
+ "__ge__",
+ ],
+ )
+ def test_docs(self, meth_name):
+ """
+ Tests the presence and correctness of the documentation
+ for the generated methods
+ """
+
+ @attr.s
+ class A(object):
+ pass
+
+ if hasattr(A, "__qualname__"):
+ method = getattr(A, meth_name)
+ expected = "Method generated by attrs for class {}.".format(
+ A.__qualname__
+ )
+ assert expected == method.__doc__
+
+
+@pytest.mark.skipif(not PY2, reason="Needs to be only caught on Python 2.")
+def test_auto_detect_raises_on_py2():
+ """
+ Trying to pass auto_detect=True to attr.s raises PythonTooOldError.
+ """
+ with pytest.raises(PythonTooOldError):
+ attr.s(auto_detect=True)
+
+
+class BareC(object):
+ pass
+
+
+class BareSlottedC(object):
+ __slots__ = ()
+
+
+@pytest.mark.skipif(PY2, reason="Auto-detection is Python 3-only.")
+class TestAutoDetect:
+ @pytest.mark.parametrize("C", (BareC, BareSlottedC))
+ def test_determine_detects_non_presence_correctly(self, C):
+ """
+ On an empty class, nothing should be detected.
+ """
+ assert True is _determine_whether_to_implement(
+ C, None, True, ("__init__",)
+ )
+ assert True is _determine_whether_to_implement(
+ C, None, True, ("__repr__",)
+ )
+ assert True is _determine_whether_to_implement(
+ C, None, True, ("__eq__", "__ne__")
+ )
+ assert True is _determine_whether_to_implement(
+ C, None, True, ("__le__", "__lt__", "__ge__", "__gt__")
+ )
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_make_all_by_default(self, slots, frozen):
+ """
+ If nothing is there to be detected, imply init=True, repr=True,
+ hash=None, eq=True, order=True.
+ """
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ i = C(1)
+ o = object()
+
+ assert i.__init__ is not o.__init__
+ assert i.__repr__ is not o.__repr__
+ assert i.__eq__ is not o.__eq__
+ assert i.__ne__ is not o.__ne__
+ assert i.__le__ is not o.__le__
+ assert i.__lt__ is not o.__lt__
+ assert i.__ge__ is not o.__ge__
+ assert i.__gt__ is not o.__gt__
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_detect_auto_init(self, slots, frozen):
+ """
+ If auto_detect=True and an __init__ exists, don't write one.
+ """
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class CI(object):
+ x = attr.ib()
+
+ def __init__(self):
+ object.__setattr__(self, "x", 42)
+
+ assert 42 == CI().x
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_detect_auto_repr(self, slots, frozen):
+ """
+ If auto_detect=True and an __repr__ exists, don't write one.
+ """
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __repr__(self):
+ return "hi"
+
+ assert "hi" == repr(C(42))
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_detect_auto_hash(self, slots, frozen):
+ """
+ If auto_detect=True and an __hash__ exists, don't write one.
+ """
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __hash__(self):
+ return 0xC0FFEE
+
+ assert 0xC0FFEE == hash(C(42))
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_detect_auto_eq(self, slots, frozen):
+ """
+ If auto_detect=True and an __eq__ or an __ne__, exist, don't write one.
+ """
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __eq__(self, o):
+ raise ValueError("worked")
+
+ with pytest.raises(ValueError, match="worked"):
+ C(1) == C(1)
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class D(object):
+ x = attr.ib()
+
+ def __ne__(self, o):
+ raise ValueError("worked")
+
+ with pytest.raises(ValueError, match="worked"):
+ D(1) != D(1)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_detect_auto_order(self, slots, frozen):
+ """
+ If auto_detect=True and an __ge__, __gt__, __le__, or and __lt__ exist,
+ don't write one.
+
+ It's surprisingly difficult to test this programmatically, so we do it
+ by hand.
+ """
+
+ def assert_not_set(cls, ex, meth_name):
+ __tracebackhide__ = True
+
+ a = getattr(cls, meth_name)
+ if meth_name == ex:
+ assert a == 42
+ else:
+ assert a is getattr(object, meth_name)
+
+ def assert_none_set(cls, ex):
+ __tracebackhide__ = True
+
+ for m in ("le", "lt", "ge", "gt"):
+ assert_not_set(cls, ex, "__" + m + "__")
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class LE(object):
+ __le__ = 42
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class LT(object):
+ __lt__ = 42
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class GE(object):
+ __ge__ = 42
+
+ @attr.s(auto_detect=True, slots=slots, frozen=frozen)
+ class GT(object):
+ __gt__ = 42
+
+ assert_none_set(LE, "__le__")
+ assert_none_set(LT, "__lt__")
+ assert_none_set(GE, "__ge__")
+ assert_none_set(GT, "__gt__")
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_override_init(self, slots, frozen):
+ """
+ If init=True is passed, ignore __init__.
+ """
+
+ @attr.s(init=True, auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __init__(self):
+ pytest.fail("should not be called")
+
+ assert C(1) == C(1)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_override_repr(self, slots, frozen):
+ """
+ If repr=True is passed, ignore __repr__.
+ """
+
+ @attr.s(repr=True, auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __repr__(self):
+ pytest.fail("should not be called")
+
+ assert "C(x=1)" == repr(C(1))
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_override_hash(self, slots, frozen):
+ """
+ If hash=True is passed, ignore __hash__.
+ """
+
+ @attr.s(hash=True, auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __hash__(self):
+ pytest.fail("should not be called")
+
+ assert hash(C(1))
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ def test_override_eq(self, slots, frozen):
+ """
+ If eq=True is passed, ignore __eq__ and __ne__.
+ """
+
+ @attr.s(eq=True, auto_detect=True, slots=slots, frozen=frozen)
+ class C(object):
+ x = attr.ib()
+
+ def __eq__(self, o):
+ pytest.fail("should not be called")
+
+ def __ne__(self, o):
+ pytest.fail("should not be called")
+
+ assert C(1) == C(1)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("frozen", [True, False])
+ @pytest.mark.parametrize(
+ "eq,order,cmp",
+ [
+ (True, None, None),
+ (True, True, None),
+ (None, True, None),
+ (None, None, True),
+ ],
+ )
+ def test_override_order(self, slots, frozen, eq, order, cmp):
+ """
+ If order=True is passed, ignore __le__, __lt__, __gt__, __ge__.
+
+ eq=True and cmp=True both imply order=True so test it too.
+ """
+
+ def meth(self, o):
+ pytest.fail("should not be called")
+
+ @attr.s(
+ cmp=cmp,
+ order=order,
+ eq=eq,
+ auto_detect=True,
+ slots=slots,
+ frozen=frozen,
+ )
+ class C(object):
+ x = attr.ib()
+ __le__ = __lt__ = __gt__ = __ge__ = meth
+
+ assert C(1) < C(2)
+ assert C(1) <= C(2)
+ assert C(2) > C(1)
+ assert C(2) >= C(1)
+
+ @pytest.mark.parametrize("slots", [True, False])
+ @pytest.mark.parametrize("first", [True, False])
+ def test_total_ordering(self, slots, first):
+ """
+ functools.total_ordering works as expected if an order method and an eq
+ method are detected.
+
+ Ensure the order doesn't matter.
+ """
+
+ class C(object):
+ x = attr.ib()
+ own_eq_called = attr.ib(default=False)
+ own_le_called = attr.ib(default=False)
+
+ def __eq__(self, o):
+ self.own_eq_called = True
+ return self.x == o.x
+
+ def __le__(self, o):
+ self.own_le_called = True
+ return self.x <= o.x
+
+ if first:
+ C = functools.total_ordering(
+ attr.s(auto_detect=True, slots=slots)(C)
+ )
+ else:
+ C = attr.s(auto_detect=True, slots=slots)(
+ functools.total_ordering(C)
+ )
+
+ c1, c2 = C(1), C(2)
+
+ assert c1 < c2
+ assert c1.own_le_called
+
+ c1, c2 = C(1), C(2)
+
+ assert c2 > c1
+ assert c2.own_le_called
+
+ c1, c2 = C(1), C(2)
+
+ assert c2 != c1
+ assert c1 == c1
+
+ assert c1.own_eq_called
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_detects_setstate_getstate(self, slots):
+ """
+ __getstate__ and __setstate__ are not overwritten if either is present.
+ """
+
+ @attr.s(slots=slots, auto_detect=True)
+ class C(object):
+ def __getstate__(self):
+ return ("hi",)
+
+ assert None is getattr(C(), "__setstate__", None)
+
+ @attr.s(slots=slots, auto_detect=True)
+ class C(object):
+ called = attr.ib(False)
+
+ def __setstate__(self, state):
+ self.called = True
+
+ i = C()
+
+ assert False is i.called
+
+ i.__setstate__(())
+
+ assert True is i.called
+ assert None is getattr(C(), "__getstate__", None)
+
+ @pytest.mark.skipif(PY310, reason="Pre-3.10 only.")
+ def test_match_args_pre_310(self):
+ """
+ __match_args__ is not created on Python versions older than 3.10.
+ """
+
+ @attr.s
+ class C(object):
+ a = attr.ib()
+
+ assert None is getattr(C, "__match_args__", None)
+
+
+@pytest.mark.skipif(not PY310, reason="Structural pattern matching is 3.10+")
+class TestMatchArgs(object):
+ """
+ Tests for match_args and __match_args__ generation.
+ """
+
+ def test_match_args(self):
+ """
+ __match_args__ is created by default on Python 3.10.
+ """
+
+ @attr.define
+ class C:
+ a = attr.field()
+
+ assert ("a",) == C.__match_args__
+
+ def test_explicit_match_args(self):
+ """
+ A custom __match_args__ set is not overwritten.
+ """
+
+ ma = ()
+
+ @attr.define
+ class C:
+ a = attr.field()
+ __match_args__ = ma
+
+ assert C(42).__match_args__ is ma
+
+ @pytest.mark.parametrize("match_args", [True, False])
+ def test_match_args_attr_set(self, match_args):
+ """
+ __match_args__ is set depending on match_args.
+ """
+
+ @attr.define(match_args=match_args)
+ class C:
+ a = attr.field()
+
+ if match_args:
+ assert hasattr(C, "__match_args__")
+ else:
+ assert not hasattr(C, "__match_args__")
+
+ def test_match_args_kw_only(self):
+ """
+ kw_only classes don't generate __match_args__.
+ kw_only fields are not included in __match_args__.
+ """
+
+ @attr.define
+ class C:
+ a = attr.field(kw_only=True)
+ b = attr.field()
+
+ assert C.__match_args__ == ("b",)
+
+ @attr.define(kw_only=True)
+ class C:
+ a = attr.field()
+ b = attr.field()
+
+ assert C.__match_args__ == ()
+
+ def test_match_args_argument(self):
+ """
+ match_args being False with inheritance.
+ """
+
+ @attr.define(match_args=False)
+ class X:
+ a = attr.field()
+
+ assert "__match_args__" not in X.__dict__
+
+ @attr.define(match_args=False)
+ class Y:
+ a = attr.field()
+ __match_args__ = ("b",)
+
+ assert Y.__match_args__ == ("b",)
+
+ @attr.define(match_args=False)
+ class Z(Y):
+ z = attr.field()
+
+ assert Z.__match_args__ == ("b",)
+
+ @attr.define
+ class A:
+ a = attr.field()
+ z = attr.field()
+
+ @attr.define(match_args=False)
+ class B(A):
+ b = attr.field()
+
+ assert B.__match_args__ == ("a", "z")
+
+ def test_make_class(self):
+ """
+ match_args generation with make_class.
+ """
+
+ C1 = make_class("C1", ["a", "b"])
+ assert ("a", "b") == C1.__match_args__
+
+ C1 = make_class("C1", ["a", "b"], match_args=False)
+ assert not hasattr(C1, "__match_args__")
+
+ C1 = make_class("C1", ["a", "b"], kw_only=True)
+ assert () == C1.__match_args__
+
+ C1 = make_class("C1", {"a": attr.ib(kw_only=True), "b": attr.ib()})
+ assert ("b",) == C1.__match_args__
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_mypy.yml b/testing/web-platform/tests/tools/third_party/attrs/tests/test_mypy.yml
new file mode 100644
index 0000000000..ca17b0a662
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_mypy.yml
@@ -0,0 +1,1395 @@
+- case: attr_s_with_type_argument
+ parametrized:
+ - val: 'a = attr.ib(type=int)'
+ - val: 'a: int = attr.ib()'
+ main: |
+ import attr
+ @attr.s
+ class C:
+ {{ val }}
+ C() # E: Missing positional argument "a" in call to "C"
+ C(1)
+ C(a=1)
+ C(a="hi") # E: Argument "a" to "C" has incompatible type "str"; expected "int"
+- case: attr_s_with_type_annotations
+ main : |
+ import attr
+ @attr.s
+ class C:
+ a: int = attr.ib()
+ C() # E: Missing positional argument "a" in call to "C"
+ C(1)
+ C(a=1)
+ C(a="hi") # E: Argument "a" to "C" has incompatible type "str"; expected "int"
+
+- case: testAttrsSimple
+ main: |
+ import attr
+ @attr.s
+ class A:
+ a = attr.ib()
+ _b = attr.ib()
+ c = attr.ib(18)
+ _d = attr.ib(validator=None, default=18)
+ E = 18
+
+ def foo(self):
+ return self.a
+ reveal_type(A) # N: Revealed type is "def (a: Any, b: Any, c: Any =, d: Any =) -> main.A"
+ A(1, [2])
+ A(1, [2], '3', 4)
+ A(1, 2, 3, 4)
+ A(1, [2], '3', 4, 5) # E: Too many arguments for "A"
+
+- case: testAttrsAnnotated
+ main: |
+ import attr
+ from typing import List, ClassVar
+ @attr.s
+ class A:
+ a: int = attr.ib()
+ _b: List[int] = attr.ib()
+ c: str = attr.ib('18')
+ _d: int = attr.ib(validator=None, default=18)
+ E = 7
+ F: ClassVar[int] = 22
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> main.A"
+ A(1, [2])
+ A(1, [2], '3', 4)
+ A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str"
+ A(1, [2], '3', 4, 5) # E: Too many arguments for "A"
+
+- case: testAttrsPython2Annotations
+ main: |
+ import attr
+ from typing import List, ClassVar
+ @attr.s
+ class A:
+ a = attr.ib() # type: int
+ _b = attr.ib() # type: List[int]
+ c = attr.ib('18') # type: str
+ _d = attr.ib(validator=None, default=18) # type: int
+ E = 7
+ F: ClassVar[int] = 22
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> main.A"
+ A(1, [2])
+ A(1, [2], '3', 4)
+ A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str"
+ A(1, [2], '3', 4, 5) # E: Too many arguments for "A"
+
+- case: testAttrsAutoAttribs
+ main: |
+ import attr
+ from typing import List, ClassVar
+ @attr.s(auto_attribs=True)
+ class A:
+ a: int
+ _b: List[int]
+ c: str = '18'
+ _d: int = attr.ib(validator=None, default=18)
+ E = 7
+ F: ClassVar[int] = 22
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> main.A"
+ A(1, [2])
+ A(1, [2], '3', 4)
+ A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str"
+ A(1, [2], '3', 4, 5) # E: Too many arguments for "A"
+
+- case: testAttrsUntypedNoUntypedDefs
+ mypy_config: |
+ disallow_untyped_defs = True
+ main: |
+ import attr
+ @attr.s
+ class A:
+ a = attr.ib() # E: Need type annotation for "a"
+ _b = attr.ib() # E: Need type annotation for "_b"
+ c = attr.ib(18) # E: Need type annotation for "c"
+ _d = attr.ib(validator=None, default=18) # E: Need type annotation for "_d"
+ E = 18
+
+- case: testAttrsWrongReturnValue
+ main: |
+ import attr
+ @attr.s
+ class A:
+ x: int = attr.ib(8)
+ def foo(self) -> str:
+ return self.x # E: Incompatible return value type (got "int", expected "str")
+ @attr.s
+ class B:
+ x = attr.ib(8) # type: int
+ def foo(self) -> str:
+ return self.x # E: Incompatible return value type (got "int", expected "str")
+ @attr.dataclass
+ class C:
+ x: int = 8
+ def foo(self) -> str:
+ return self.x # E: Incompatible return value type (got "int", expected "str")
+ @attr.s
+ class D:
+ x = attr.ib(8, type=int)
+ def foo(self) -> str:
+ return self.x # E: Incompatible return value type (got "int", expected "str")
+
+- case: testAttrsSeriousNames
+ main: |
+ from attr import attrib, attrs
+ from typing import List
+ @attrs(init=True)
+ class A:
+ a = attrib()
+ _b: List[int] = attrib()
+ c = attrib(18)
+ _d = attrib(validator=None, default=18)
+ CLASS_VAR = 18
+ reveal_type(A) # N: Revealed type is "def (a: Any, b: builtins.list[builtins.int], c: Any =, d: Any =) -> main.A"
+ A(1, [2])
+ A(1, [2], '3', 4)
+ A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]"
+ A(1, [2], '3', 4, 5) # E: Too many arguments for "A"
+
+- case: testAttrsDefaultErrors
+ main: |
+ import attr
+ @attr.s
+ class A:
+ x = attr.ib(default=17)
+ y = attr.ib() # E: Non-default attributes not allowed after default attributes.
+ @attr.s(auto_attribs=True)
+ class B:
+ x: int = 17
+ y: int # E: Non-default attributes not allowed after default attributes.
+ @attr.s(auto_attribs=True)
+ class C:
+ x: int = attr.ib(default=17)
+ y: int # E: Non-default attributes not allowed after default attributes.
+ @attr.s
+ class D:
+ x = attr.ib()
+ y = attr.ib() # E: Non-default attributes not allowed after default attributes.
+
+ @x.default
+ def foo(self):
+ return 17
+
+- case: testAttrsNotBooleans
+ main: |
+ import attr
+ x = True
+ @attr.s(cmp=x) # E: "cmp" argument must be True or False.
+ class A:
+ a = attr.ib(init=x) # E: "init" argument must be True or False.
+
+- case: testAttrsInitFalse
+ main: |
+ from attr import attrib, attrs
+ @attrs(auto_attribs=True, init=False)
+ class A:
+ a: int
+ _b: int
+ c: int = 18
+ _d: int = attrib(validator=None, default=18)
+ reveal_type(A) # N: Revealed type is "def () -> main.A"
+ A()
+ A(1, [2]) # E: Too many arguments for "A"
+ A(1, [2], '3', 4) # E: Too many arguments for "A"
+
+- case: testAttrsInitAttribFalse
+ main: |
+ from attr import attrib, attrs
+ @attrs
+ class A:
+ a = attrib(init=False)
+ b = attrib()
+ reveal_type(A) # N: Revealed type is "def (b: Any) -> main.A"
+
+- case: testAttrsCmpTrue
+ main: |
+ from attr import attrib, attrs
+ @attrs(auto_attribs=True)
+ class A:
+ a: int
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> main.A"
+ reveal_type(A.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(A.__le__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(A.__gt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(A.__ge__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+
+ A(1) < A(2)
+ A(1) <= A(2)
+ A(1) > A(2)
+ A(1) >= A(2)
+ A(1) == A(2)
+ A(1) != A(2)
+
+ A(1) < 1 # E: Unsupported operand types for < ("A" and "int")
+ A(1) <= 1 # E: Unsupported operand types for <= ("A" and "int")
+ A(1) > 1 # E: Unsupported operand types for > ("A" and "int")
+ A(1) >= 1 # E: Unsupported operand types for >= ("A" and "int")
+ A(1) == 1
+ A(1) != 1
+
+ 1 < A(1) # E: Unsupported operand types for < ("int" and "A")
+ 1 <= A(1) # E: Unsupported operand types for <= ("int" and "A")
+ 1 > A(1) # E: Unsupported operand types for > ("int" and "A")
+ 1 >= A(1) # E: Unsupported operand types for >= ("int" and "A")
+ 1 == A(1)
+ 1 != A(1)
+
+- case: testAttrsEqFalse
+ main: |
+ from attr import attrib, attrs
+ @attrs(auto_attribs=True, eq=False)
+ class A:
+ a: int
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> main.A"
+ reveal_type(A.__eq__) # N: Revealed type is "def (builtins.object, builtins.object) -> builtins.bool"
+ reveal_type(A.__ne__) # N: Revealed type is "def (builtins.object, builtins.object) -> builtins.bool"
+
+ A(1) < A(2) # E: Unsupported left operand type for < ("A")
+ A(1) <= A(2) # E: Unsupported left operand type for <= ("A")
+ A(1) > A(2) # E: Unsupported left operand type for > ("A")
+ A(1) >= A(2) # E: Unsupported left operand type for >= ("A")
+ A(1) == A(2)
+ A(1) != A(2)
+
+ A(1) < 1 # E: Unsupported operand types for > ("int" and "A")
+ A(1) <= 1 # E: Unsupported operand types for >= ("int" and "A")
+ A(1) > 1 # E: Unsupported operand types for < ("int" and "A")
+ A(1) >= 1 # E: Unsupported operand types for <= ("int" and "A")
+ A(1) == 1
+ A(1) != 1
+
+ 1 < A(1) # E: Unsupported operand types for < ("int" and "A")
+ 1 <= A(1) # E: Unsupported operand types for <= ("int" and "A")
+ 1 > A(1) # E: Unsupported operand types for > ("int" and "A")
+ 1 >= A(1) # E: Unsupported operand types for >= ("int" and "A")
+ 1 == A(1)
+ 1 != A(1)
+
+- case: testAttrsOrderFalse
+ main: |
+ from attr import attrib, attrs
+ @attrs(auto_attribs=True, order=False)
+ class A:
+ a: int
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> main.A"
+
+ A(1) < A(2) # E: Unsupported left operand type for < ("A")
+ A(1) <= A(2) # E: Unsupported left operand type for <= ("A")
+ A(1) > A(2) # E: Unsupported left operand type for > ("A")
+ A(1) >= A(2) # E: Unsupported left operand type for >= ("A")
+ A(1) == A(2)
+ A(1) != A(2)
+
+ A(1) < 1 # E: Unsupported operand types for > ("int" and "A")
+ A(1) <= 1 # E: Unsupported operand types for >= ("int" and "A")
+ A(1) > 1 # E: Unsupported operand types for < ("int" and "A")
+ A(1) >= 1 # E: Unsupported operand types for <= ("int" and "A")
+ A(1) == 1
+ A(1) != 1
+
+ 1 < A(1) # E: Unsupported operand types for < ("int" and "A")
+ 1 <= A(1) # E: Unsupported operand types for <= ("int" and "A")
+ 1 > A(1) # E: Unsupported operand types for > ("int" and "A")
+ 1 >= A(1) # E: Unsupported operand types for >= ("int" and "A")
+ 1 == A(1)
+ 1 != A(1)
+
+- case: testAttrsCmpEqOrderValues
+ main: |
+ from attr import attrib, attrs
+ @attrs(cmp=True)
+ class DeprecatedTrue:
+ ...
+
+ @attrs(cmp=False)
+ class DeprecatedFalse:
+ ...
+
+ @attrs(cmp=False, eq=True) # E: Don't mix "cmp" with "eq" and "order"
+ class Mixed:
+ ...
+
+ @attrs(order=True, eq=False) # E: eq must be True if order is True
+ class Confused:
+ ...
+
+
+- case: testAttrsInheritance
+ main: |
+ import attr
+ @attr.s
+ class A:
+ a: int = attr.ib()
+ @attr.s
+ class B:
+ b: str = attr.ib()
+ @attr.s
+ class C(A, B):
+ c: bool = attr.ib()
+ reveal_type(C) # N: Revealed type is "def (a: builtins.int, b: builtins.str, c: builtins.bool) -> main.C"
+
+- case: testAttrsNestedInClasses
+ main: |
+ import attr
+ @attr.s
+ class C:
+ y = attr.ib()
+ @attr.s
+ class D:
+ x: int = attr.ib()
+ reveal_type(C) # N: Revealed type is "def (y: Any) -> main.C"
+ reveal_type(C.D) # N: Revealed type is "def (x: builtins.int) -> main.C.D"
+
+- case: testAttrsInheritanceOverride
+ main: |
+ import attr
+
+ @attr.s
+ class A:
+ a: int = attr.ib()
+ x: int = attr.ib()
+
+ @attr.s
+ class B(A):
+ b: str = attr.ib()
+ x: int = attr.ib(default=22)
+
+ @attr.s
+ class C(B):
+ c: bool = attr.ib() # No error here because the x below overwrites the x above.
+ x: int = attr.ib()
+
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, x: builtins.int) -> main.A"
+ reveal_type(B) # N: Revealed type is "def (a: builtins.int, b: builtins.str, x: builtins.int =) -> main.B"
+ reveal_type(C) # N: Revealed type is "def (a: builtins.int, b: builtins.str, c: builtins.bool, x: builtins.int) -> main.C"
+
+- case: testAttrsTypeEquals
+ main: |
+ import attr
+
+ @attr.s
+ class A:
+ a = attr.ib(type=int)
+ b = attr.ib(18, type=int)
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.int =) -> main.A"
+
+- case: testAttrsFrozen
+ main: |
+ import attr
+
+ @attr.s(frozen=True)
+ class A:
+ a = attr.ib()
+
+ a = A(5)
+ a.a = 16 # E: Property "a" defined in "A" is read-only
+- case: testAttrsNextGenFrozen
+ main: |
+ from attr import frozen, field
+
+ @frozen
+ class A:
+ a = field()
+
+ a = A(5)
+ a.a = 16 # E: Property "a" defined in "A" is read-only
+
+- case: testAttrsNextGenDetect
+ main: |
+ from attr import define, field
+
+ @define
+ class A:
+ a = field()
+
+ @define
+ class B:
+ a: int
+
+ @define
+ class C:
+ a: int = field()
+ b = field()
+
+ @define
+ class D:
+ a: int
+ b = field()
+
+ # TODO: Next Gen hasn't shipped with mypy yet so the following are wrong
+ reveal_type(A) # N: Revealed type is "def (a: Any) -> main.A"
+ reveal_type(B) # N: Revealed type is "def (a: builtins.int) -> main.B"
+ reveal_type(C) # N: Revealed type is "def (a: builtins.int, b: Any) -> main.C"
+ reveal_type(D) # N: Revealed type is "def (b: Any) -> main.D"
+
+- case: testAttrsDataClass
+ main: |
+ import attr
+ from typing import List, ClassVar
+ @attr.dataclass
+ class A:
+ a: int
+ _b: List[str]
+ c: str = '18'
+ _d: int = attr.ib(validator=None, default=18)
+ E = 7
+ F: ClassVar[int] = 22
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.list[builtins.str], c: builtins.str =, d: builtins.int =) -> main.A"
+ A(1, ['2'])
+
+- case: testAttrsTypeAlias
+ main: |
+ from typing import List
+ import attr
+ Alias = List[int]
+ @attr.s(auto_attribs=True)
+ class A:
+ Alias2 = List[str]
+ x: Alias
+ y: Alias2 = attr.ib()
+ reveal_type(A) # N: Revealed type is "def (x: builtins.list[builtins.int], y: builtins.list[builtins.str]) -> main.A"
+
+- case: testAttrsGeneric
+ main: |
+ from typing import TypeVar, Generic, List
+ import attr
+ T = TypeVar('T')
+ @attr.s(auto_attribs=True)
+ class A(Generic[T]):
+ x: List[T]
+ y: T = attr.ib()
+ def foo(self) -> List[T]:
+ return [self.y]
+ def bar(self) -> T:
+ return self.x[0]
+ def problem(self) -> T:
+ return self.x # E: Incompatible return value type (got "List[T]", expected "T")
+ reveal_type(A) # N: Revealed type is "def [T] (x: builtins.list[T`1], y: T`1) -> main.A[T`1]"
+ a = A([1], 2)
+ reveal_type(a) # N: Revealed type is "main.A[builtins.int*]"
+ reveal_type(a.x) # N: Revealed type is "builtins.list[builtins.int*]"
+ reveal_type(a.y) # N: Revealed type is "builtins.int*"
+
+ A(['str'], 7) # E: Cannot infer type argument 1 of "A"
+ A([1], '2') # E: Cannot infer type argument 1 of "A"
+
+
+- case: testAttrsUntypedGenericInheritance
+ main: |
+ from typing import Generic, TypeVar
+ import attr
+
+ T = TypeVar("T")
+
+ @attr.s(auto_attribs=True)
+ class Base(Generic[T]):
+ attr: T
+
+ @attr.s(auto_attribs=True)
+ class Sub(Base):
+ pass
+
+ sub = Sub(attr=1)
+ reveal_type(sub) # N: Revealed type is "main.Sub"
+ reveal_type(sub.attr) # N: Revealed type is "Any"
+ skip: True # Need to investigate why this is broken
+
+- case: testAttrsGenericInheritance
+ main: |
+ from typing import Generic, TypeVar
+ import attr
+
+ S = TypeVar("S")
+ T = TypeVar("T")
+
+ @attr.s(auto_attribs=True)
+ class Base(Generic[T]):
+ attr: T
+
+ @attr.s(auto_attribs=True)
+ class Sub(Base[S]):
+ pass
+
+ sub_int = Sub[int](attr=1)
+ reveal_type(sub_int) # N: Revealed type is "main.Sub[builtins.int*]"
+ reveal_type(sub_int.attr) # N: Revealed type is "builtins.int*"
+
+ sub_str = Sub[str](attr='ok')
+ reveal_type(sub_str) # N: Revealed type is "main.Sub[builtins.str*]"
+ reveal_type(sub_str.attr) # N: Revealed type is "builtins.str*"
+
+- case: testAttrsGenericInheritance2
+ main: |
+ from typing import Generic, TypeVar
+ import attr
+
+ T1 = TypeVar("T1")
+ T2 = TypeVar("T2")
+ T3 = TypeVar("T3")
+
+ @attr.s(auto_attribs=True)
+ class Base(Generic[T1, T2, T3]):
+ one: T1
+ two: T2
+ three: T3
+
+ @attr.s(auto_attribs=True)
+ class Sub(Base[int, str, float]):
+ pass
+
+ sub = Sub(one=1, two='ok', three=3.14)
+ reveal_type(sub) # N: Revealed type is "main.Sub"
+ reveal_type(sub.one) # N: Revealed type is "builtins.int*"
+ reveal_type(sub.two) # N: Revealed type is "builtins.str*"
+ reveal_type(sub.three) # N: Revealed type is "builtins.float*"
+ skip: True # Need to investigate why this is broken
+
+- case: testAttrsMultiGenericInheritance
+ main: |
+ from typing import Generic, TypeVar
+ import attr
+
+ T = TypeVar("T")
+
+ @attr.s(auto_attribs=True, eq=False)
+ class Base(Generic[T]):
+ base_attr: T
+
+ S = TypeVar("S")
+
+ @attr.s(auto_attribs=True, eq=False)
+ class Middle(Base[int], Generic[S]):
+ middle_attr: S
+
+ @attr.s(auto_attribs=True, eq=False)
+ class Sub(Middle[str]):
+ pass
+
+ reveal_type(Sub.__init__)
+
+ sub = Sub(base_attr=1, middle_attr='ok')
+ reveal_type(sub) # N: Revealed type is "main.Sub"
+ reveal_type(sub.base_attr) # N: Revealed type is "builtins.int*"
+ reveal_type(sub.middle_attr) # N: Revealed type is "builtins.str*"
+ skip: True # Need to investigate why this is broken
+
+- case: testAttrsGenericClassmethod
+ main: |
+ from typing import TypeVar, Generic, Optional
+ import attr
+ T = TypeVar('T')
+ @attr.s(auto_attribs=True)
+ class A(Generic[T]):
+ x: Optional[T]
+ @classmethod
+ def clsmeth(cls) -> None:
+ reveal_type(cls) # N: Revealed type is "Type[main.A[T`1]]"
+
+- case: testAttrsForwardReference
+ main: |
+ from typing import Optional
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ parent: 'B'
+
+ @attr.s(auto_attribs=True)
+ class B:
+ parent: Optional[A]
+
+ reveal_type(A) # N: Revealed type is "def (parent: main.B) -> main.A"
+ reveal_type(B) # N: Revealed type is "def (parent: Union[main.A, None]) -> main.B"
+ A(B(None))
+
+- case: testAttrsForwardReferenceInClass
+ main: |
+ from typing import Optional
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ parent: A.B
+
+ @attr.s(auto_attribs=True)
+ class B:
+ parent: Optional[A]
+
+ reveal_type(A) # N: Revealed type is "def (parent: main.A.B) -> main.A"
+ reveal_type(A.B) # N: Revealed type is "def (parent: Union[main.A, None]) -> main.A.B"
+ A(A.B(None))
+
+- case: testAttrsImporting
+ main: |
+ from helper import A
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.str) -> helper.A"
+ files:
+ - path: helper.py
+ content: |
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ a: int
+ b: str = attr.ib()
+
+- case: testAttrsOtherMethods
+ main: |
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ a: int
+ b: str = attr.ib()
+ @classmethod
+ def new(cls) -> A:
+ reveal_type(cls) # N: Revealed type is "Type[main.A]"
+ return cls(6, 'hello')
+ @classmethod
+ def bad(cls) -> A:
+ return cls(17) # E: Missing positional argument "b" in call to "A"
+ def foo(self) -> int:
+ return self.a
+ reveal_type(A) # N: Revealed type is "def (a: builtins.int, b: builtins.str) -> main.A"
+ a = A.new()
+ reveal_type(a.foo) # N: Revealed type is "def () -> builtins.int"
+
+- case: testAttrsOtherOverloads
+ main: |
+ import attr
+ from typing import overload, Union
+
+ @attr.s
+ class A:
+ a = attr.ib()
+ b = attr.ib(default=3)
+
+ @classmethod
+ def other(cls) -> str:
+ return "..."
+
+ @overload
+ @classmethod
+ def foo(cls, x: int) -> int: ...
+
+ @overload
+ @classmethod
+ def foo(cls, x: str) -> str: ...
+
+ @classmethod
+ def foo(cls, x: Union[int, str]) -> Union[int, str]:
+ reveal_type(cls) # N: Revealed type is "Type[main.A]"
+ reveal_type(cls.other()) # N: Revealed type is "builtins.str"
+ return x
+
+ reveal_type(A.foo(3)) # N: Revealed type is "builtins.int"
+ reveal_type(A.foo("foo")) # N: Revealed type is "builtins.str"
+
+- case: testAttrsDefaultDecorator
+ main: |
+ import attr
+ @attr.s
+ class C(object):
+ x: int = attr.ib(default=1)
+ y: int = attr.ib()
+ @y.default
+ def name_does_not_matter(self):
+ return self.x + 1
+ C()
+
+- case: testAttrsValidatorDecorator
+ main: |
+ import attr
+ @attr.s
+ class C(object):
+ x = attr.ib()
+ @x.validator
+ def check(self, attribute, value):
+ if value > 42:
+ raise ValueError("x must be smaller or equal to 42")
+ C(42)
+ C(43)
+
+- case: testAttrsLocalVariablesInClassMethod
+ main: |
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ a: int
+ b: int = attr.ib()
+ @classmethod
+ def new(cls, foo: int) -> A:
+ a = foo
+ b = a
+ return cls(a, b)
+
+- case: testAttrsUnionForward
+ main: |
+ import attr
+ from typing import Union, List
+
+ @attr.s(auto_attribs=True)
+ class A:
+ frob: List['AOrB']
+
+ class B:
+ pass
+
+ AOrB = Union[A, B]
+
+ reveal_type(A) # N: Revealed type is "def (frob: builtins.list[Union[main.A, main.B]]) -> main.A"
+ reveal_type(B) # N: Revealed type is "def () -> main.B"
+
+ A([B()])
+
+- case: testAttrsUsingConverter
+ main: |
+ import attr
+ import helper
+
+ def converter2(s:int) -> str:
+ return 'hello'
+
+ @attr.s
+ class C:
+ x: str = attr.ib(converter=helper.converter)
+ y: str = attr.ib(converter=converter2)
+
+ # Because of the converter the __init__ takes an int, but the variable is a str.
+ reveal_type(C) # N: Revealed type is "def (x: builtins.int, y: builtins.int) -> main.C"
+ reveal_type(C(15, 16).x) # N: Revealed type is "builtins.str"
+ files:
+ - path: helper.py
+ content: |
+ def converter(s:int) -> str:
+ return 'hello'
+
+- case: testAttrsUsingBadConverter
+ mypy_config:
+ strict_optional = False
+ main: |
+ import attr
+ from typing import overload
+ @overload
+ def bad_overloaded_converter(x: int, y: int) -> int:
+ ...
+ @overload
+ def bad_overloaded_converter(x: str, y: str) -> str:
+ ...
+ def bad_overloaded_converter(x, y=7):
+ return x
+ def bad_converter() -> str:
+ return ''
+ @attr.dataclass
+ class A:
+ bad: str = attr.ib(converter=bad_converter)
+ bad_overloaded: int = attr.ib(converter=bad_overloaded_converter)
+ reveal_type(A)
+ out: |
+ main:15: error: Cannot determine __init__ type from converter
+ main:15: error: Argument "converter" has incompatible type "Callable[[], str]"; expected "Callable[[Any], Any]"
+ main:16: error: Cannot determine __init__ type from converter
+ main:16: error: Argument "converter" has incompatible type overloaded function; expected "Callable[[Any], Any]"
+ main:17: note: Revealed type is "def (bad: Any, bad_overloaded: Any) -> main.A"
+
+- case: testAttrsUsingBadConverterReprocess
+ mypy_config:
+ strict_optional = False
+ main: |
+ import attr
+ from typing import overload
+ forward: 'A'
+ @overload
+ def bad_overloaded_converter(x: int, y: int) -> int:
+ ...
+ @overload
+ def bad_overloaded_converter(x: str, y: str) -> str:
+ ...
+ def bad_overloaded_converter(x, y=7):
+ return x
+ def bad_converter() -> str:
+ return ''
+ @attr.dataclass
+ class A:
+ bad: str = attr.ib(converter=bad_converter)
+ bad_overloaded: int = attr.ib(converter=bad_overloaded_converter)
+ reveal_type(A)
+ out: |
+ main:16: error: Cannot determine __init__ type from converter
+ main:16: error: Argument "converter" has incompatible type "Callable[[], str]"; expected "Callable[[Any], Any]"
+ main:17: error: Cannot determine __init__ type from converter
+ main:17: error: Argument "converter" has incompatible type overloaded function; expected "Callable[[Any], Any]"
+ main:18: note: Revealed type is "def (bad: Any, bad_overloaded: Any) -> main.A"
+
+- case: testAttrsUsingUnsupportedConverter
+ main: |
+ import attr
+ class Thing:
+ def do_it(self, int) -> str:
+ ...
+ thing = Thing()
+ def factory(default: int):
+ ...
+ @attr.s
+ class C:
+ x: str = attr.ib(converter=thing.do_it) # E: Unsupported converter, only named functions and types are currently supported
+ y: str = attr.ib(converter=lambda x: x) # E: Unsupported converter, only named functions and types are currently supported
+ z: str = attr.ib(converter=factory(8)) # E: Unsupported converter, only named functions and types are currently supported
+ reveal_type(C) # N: Revealed type is "def (x: Any, y: Any, z: Any) -> main.C"
+
+- case: testAttrsUsingConverterAndSubclass
+ main: |
+ import attr
+
+ def converter(s:int) -> str:
+ return 'hello'
+
+ @attr.s
+ class C:
+ x: str = attr.ib(converter=converter)
+
+ @attr.s
+ class A(C):
+ pass
+
+ # Because of the convert the __init__ takes an int, but the variable is a str.
+ reveal_type(A) # N: Revealed type is "def (x: builtins.int) -> main.A"
+ reveal_type(A(15).x) # N: Revealed type is "builtins.str"
+
+- case: testAttrsUsingConverterWithTypes
+ main: |
+ from typing import overload
+ import attr
+
+ @attr.dataclass
+ class A:
+ x: str
+
+ @attr.s
+ class C:
+ x: complex = attr.ib(converter=complex)
+ y: int = attr.ib(converter=int)
+ z: A = attr.ib(converter=A)
+
+ o = C("1", "2", "3")
+ o = C(1, 2, "3")
+
+- case: testAttrsCmpWithSubclasses
+ main: |
+ import attr
+ @attr.s
+ class A: pass
+ @attr.s
+ class B: pass
+ @attr.s
+ class C(A, B): pass
+ @attr.s
+ class D(A): pass
+
+ reveal_type(A.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(B.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(C.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+ reveal_type(D.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+
+ A() < A()
+ B() < B()
+ A() < B() # E: Unsupported operand types for < ("A" and "B")
+
+ C() > A()
+ C() > B()
+ C() > C()
+ C() > D() # E: Unsupported operand types for > ("C" and "D")
+
+ D() >= A()
+ D() >= B() # E: Unsupported operand types for >= ("D" and "B")
+ D() >= C() # E: Unsupported operand types for >= ("D" and "C")
+ D() >= D()
+
+ A() <= 1 # E: Unsupported operand types for <= ("A" and "int")
+ B() <= 1 # E: Unsupported operand types for <= ("B" and "int")
+ C() <= 1 # E: Unsupported operand types for <= ("C" and "int")
+ D() <= 1 # E: Unsupported operand types for <= ("D" and "int")
+
+- case: testAttrsComplexSuperclass
+ main: |
+ import attr
+ @attr.s
+ class C:
+ x: int = attr.ib(default=1)
+ y: int = attr.ib()
+ @y.default
+ def name_does_not_matter(self):
+ return self.x + 1
+ @attr.s
+ class A(C):
+ z: int = attr.ib(default=18)
+ reveal_type(C) # N: Revealed type is "def (x: builtins.int =, y: builtins.int =) -> main.C"
+ reveal_type(A) # N: Revealed type is "def (x: builtins.int =, y: builtins.int =, z: builtins.int =) -> main.A"
+
+- case: testAttrsMultiAssign
+ main: |
+ import attr
+ @attr.s
+ class A:
+ x, y, z = attr.ib(), attr.ib(type=int), attr.ib(default=17)
+ reveal_type(A) # N: Revealed type is "def (x: Any, y: builtins.int, z: Any =) -> main.A"
+
+- case: testAttrsMultiAssign2
+ main: |
+ import attr
+ @attr.s
+ class A:
+ x = y = z = attr.ib() # E: Too many names for one attribute
+
+- case: testAttrsPrivateInit
+ main: |
+ import attr
+ @attr.s
+ class C(object):
+ _x = attr.ib(init=False, default=42)
+ C()
+ C(_x=42) # E: Unexpected keyword argument "_x" for "C"
+
+- case: testAttrsAutoMustBeAll
+ main: |
+ import attr
+ @attr.s(auto_attribs=True)
+ class A:
+ a: int
+ b = 17
+ # The following forms are not allowed with auto_attribs=True
+ c = attr.ib() # E: Need type annotation for "c"
+ d, e = attr.ib(), attr.ib() # E: Need type annotation for "d" # E: Need type annotation for "e"
+ f = g = attr.ib() # E: Need type annotation for "f" # E: Need type annotation for "g"
+
+- case: testAttrsRepeatedName
+ main: |
+ import attr
+ @attr.s
+ class A:
+ a = attr.ib(default=8)
+ b = attr.ib()
+ a = attr.ib()
+ reveal_type(A) # N: Revealed type is "def (b: Any, a: Any) -> main.A"
+ @attr.s
+ class B:
+ a: int = attr.ib(default=8)
+ b: int = attr.ib()
+ a: int = attr.ib() # E: Name "a" already defined on line 10
+ reveal_type(B) # N: Revealed type is "def (b: builtins.int, a: builtins.int) -> main.B"
+ @attr.s(auto_attribs=True)
+ class C:
+ a: int = 8
+ b: int
+ a: int = attr.ib() # E: Name "a" already defined on line 16
+ reveal_type(C) # N: Revealed type is "def (a: builtins.int, b: builtins.int) -> main.C"
+
+- case: testAttrsNewStyleClassPy2
+ mypy_config:
+ python_version = 2.7
+ main: |
+ import attr
+ @attr.s
+ class Good(object):
+ pass
+ @attr.s
+ class Bad: # E: attrs only works with new-style classes
+ pass
+ skip: True # https://github.com/typeddjango/pytest-mypy-plugins/issues/47
+
+- case: testAttrsAutoAttribsPy2
+ mypy_config: |
+ python_version = 2.7
+ main: |
+ import attr
+ @attr.s(auto_attribs=True) # E: auto_attribs is not supported in Python 2
+ class A(object):
+ x = attr.ib()
+ skip: True # https://github.com/typeddjango/pytest-mypy-plugins/issues/47
+
+- case: testAttrsFrozenSubclass
+ main: |
+ import attr
+
+ @attr.dataclass
+ class NonFrozenBase:
+ a: int
+
+ @attr.dataclass(frozen=True)
+ class FrozenBase:
+ a: int
+
+ @attr.dataclass(frozen=True)
+ class FrozenNonFrozen(NonFrozenBase):
+ b: int
+
+ @attr.dataclass(frozen=True)
+ class FrozenFrozen(FrozenBase):
+ b: int
+
+ @attr.dataclass
+ class NonFrozenFrozen(FrozenBase):
+ b: int
+
+ # Make sure these are untouched
+ non_frozen_base = NonFrozenBase(1)
+ non_frozen_base.a = 17
+ frozen_base = FrozenBase(1)
+ frozen_base.a = 17 # E: Property "a" defined in "FrozenBase" is read-only
+
+ a = FrozenNonFrozen(1, 2)
+ a.a = 17 # E: Property "a" defined in "FrozenNonFrozen" is read-only
+ a.b = 17 # E: Property "b" defined in "FrozenNonFrozen" is read-only
+
+ b = FrozenFrozen(1, 2)
+ b.a = 17 # E: Property "a" defined in "FrozenFrozen" is read-only
+ b.b = 17 # E: Property "b" defined in "FrozenFrozen" is read-only
+
+ c = NonFrozenFrozen(1, 2)
+ c.a = 17 # E: Property "a" defined in "NonFrozenFrozen" is read-only
+ c.b = 17 # E: Property "b" defined in "NonFrozenFrozen" is read-only
+- case: testAttrsCallableAttributes
+ main: |
+ from typing import Callable
+ import attr
+ def blah(a: int, b: int) -> bool:
+ return True
+
+ @attr.s(auto_attribs=True)
+ class F:
+ _cb: Callable[[int, int], bool] = blah
+ def foo(self) -> bool:
+ return self._cb(5, 6)
+
+ @attr.s
+ class G:
+ _cb: Callable[[int, int], bool] = attr.ib(blah)
+ def foo(self) -> bool:
+ return self._cb(5, 6)
+
+ @attr.s(auto_attribs=True, frozen=True)
+ class FFrozen(F):
+ def bar(self) -> bool:
+ return self._cb(5, 6)
+
+- case: testAttrsWithFactory
+ main: |
+ from typing import List
+ import attr
+ def my_factory() -> int:
+ return 7
+ @attr.s
+ class A:
+ x: List[int] = attr.ib(factory=list)
+ y: int = attr.ib(factory=my_factory)
+ A()
+
+- case: testAttrsFactoryAndDefault
+ main: |
+ import attr
+ @attr.s
+ class A:
+ x: int = attr.ib(factory=int, default=7) # E: Can't pass both "default" and "factory".
+
+- case: testAttrsFactoryBadReturn
+ main: |
+ import attr
+ def my_factory() -> int:
+ return 7
+ @attr.s
+ class A:
+ x: int = attr.ib(factory=list) # E: Incompatible types in assignment (expression has type "List[_T]", variable has type "int")
+ y: str = attr.ib(factory=my_factory) # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+- case: testAttrsDefaultAndInit
+ main: |
+ import attr
+
+ @attr.s
+ class C:
+ a = attr.ib(init=False, default=42)
+ b = attr.ib() # Ok because previous attribute is init=False
+ c = attr.ib(default=44)
+ d = attr.ib(init=False) # Ok because this attribute is init=False
+ e = attr.ib() # E: Non-default attributes not allowed after default attributes.
+
+- case: testAttrsOptionalConverter
+ main: |
+ # flags: --strict-optional
+ import attr
+ from attr.converters import optional
+ from typing import Optional
+
+ def converter(s:int) -> str:
+ return 'hello'
+
+
+ @attr.s
+ class A:
+ y: Optional[int] = attr.ib(converter=optional(int))
+ z: Optional[str] = attr.ib(converter=optional(converter))
+
+
+ A(None, None)
+
+- case: testAttrsTypeVarNoCollision
+ main: |
+ from typing import TypeVar, Generic
+ import attr
+
+ T = TypeVar("T", bytes, str)
+
+ # Make sure the generated __le__ (and friends) don't use T for their arguments.
+ @attr.s(auto_attribs=True)
+ class A(Generic[T]):
+ v: T
+
+- case: testAttrsKwOnlyAttrib
+ main: |
+ import attr
+ @attr.s
+ class A:
+ a = attr.ib(kw_only=True)
+ A() # E: Missing named argument "a" for "A"
+ A(15) # E: Too many positional arguments for "A"
+ A(a=15)
+
+- case: testAttrsKwOnlyClass
+ main: |
+ import attr
+ @attr.s(kw_only=True, auto_attribs=True)
+ class A:
+ a: int
+ b: bool
+ A() # E: Missing named argument "a" for "A" # E: Missing named argument "b" for "A"
+ A(b=True, a=15)
+
+- case: testAttrsKwOnlyClassNoInit
+ main: |
+ import attr
+ @attr.s(kw_only=True)
+ class B:
+ a = attr.ib(init=False)
+ b = attr.ib()
+ B(b=True)
+
+- case: testAttrsKwOnlyWithDefault
+ main: |
+ import attr
+ @attr.s
+ class C:
+ a = attr.ib(0)
+ b = attr.ib(kw_only=True)
+ c = attr.ib(16, kw_only=True)
+ C(b=17)
+
+- case: testAttrsKwOnlyClassWithMixedDefaults
+ main: |
+ import attr
+ @attr.s(kw_only=True)
+ class D:
+ a = attr.ib(10)
+ b = attr.ib()
+ c = attr.ib(15)
+ D(b=17)
+
+
+- case: testAttrsKwOnlySubclass
+ main: |
+ import attr
+ @attr.s
+ class A2:
+ a = attr.ib(default=0)
+ @attr.s
+ class B2(A2):
+ b = attr.ib(kw_only=True)
+ B2(b=1)
+
+- case: testAttrsNonKwOnlyAfterKwOnly
+ main: |
+ import attr
+ @attr.s(kw_only=True)
+ class A:
+ a = attr.ib(default=0)
+ @attr.s
+ class B(A):
+ b = attr.ib()
+ @attr.s
+ class C:
+ a = attr.ib(kw_only=True)
+ b = attr.ib(15)
+
+- case: testAttrsKwOnlyPy2
+ mypy_config:
+ python_version=2.7
+ main: |
+ import attr
+ @attr.s(kw_only=True) # E: kw_only is not supported in Python 2
+ class A(object):
+ x = attr.ib()
+ @attr.s
+ class B(object):
+ x = attr.ib(kw_only=True) # E: kw_only is not supported in Python 2
+ skip: True # https://github.com/typeddjango/pytest-mypy-plugins/issues/47
+
+- case: testAttrsDisallowUntypedWorksForward
+ main: |
+ # flags: --disallow-untyped-defs
+ import attr
+ from typing import List
+
+ @attr.s
+ class B:
+ x: C = attr.ib()
+
+ class C(List[C]):
+ pass
+
+ reveal_type(B) # N: Revealed type is "def (x: main.C) -> main.B"
+
+- case: testDisallowUntypedWorksForwardBad
+ mypy_config:
+ disallow_untyped_defs = True
+ main: |
+ import attr
+
+ @attr.s
+ class B:
+ x = attr.ib() # E: Need type annotation for "x"
+
+ reveal_type(B) # N: Revealed type is "def (x: Any) -> main.B"
+
+- case: testAttrsDefaultDecoratorDeferred
+ main: |
+ defer: Yes
+
+ import attr
+ @attr.s
+ class C(object):
+ x: int = attr.ib(default=1)
+ y: int = attr.ib()
+ @y.default
+ def inc(self):
+ return self.x + 1
+
+ class Yes: ...
+
+- case: testAttrsValidatorDecoratorDeferred
+ main: |
+ defer: Yes
+
+ import attr
+ @attr.s
+ class C(object):
+ x = attr.ib()
+ @x.validator
+ def check(self, attribute, value):
+ if value > 42:
+ raise ValueError("x must be smaller or equal to 42")
+ C(42)
+ C(43)
+
+ class Yes: ...
+
+- case: testTypeInAttrUndefined
+ main: |
+ import attr
+
+ @attr.s
+ class C:
+ total = attr.ib(type=Bad) # E: Name "Bad" is not defined
+
+- case: testTypeInAttrForwardInRuntime
+ main: |
+ import attr
+
+ @attr.s
+ class C:
+ total = attr.ib(type=Forward)
+
+ reveal_type(C.total) # N: Revealed type is "main.Forward"
+ C('no') # E: Argument 1 to "C" has incompatible type "str"; expected "Forward"
+ class Forward: ...
+
+- case: testDefaultInAttrForward
+ main: |
+ import attr
+
+ @attr.s
+ class C:
+ total = attr.ib(default=func())
+
+ def func() -> int: ...
+
+ C()
+ C(1)
+ C(1, 2) # E: Too many arguments for "C"
+
+- case: testTypeInAttrUndefinedFrozen
+ main: |
+ import attr
+
+ @attr.s(frozen=True)
+ class C:
+ total = attr.ib(type=Bad) # E: Name "Bad" is not defined
+
+ C(0).total = 1 # E: Property "total" defined in "C" is read-only
+
+- case: testTypeInAttrDeferredStar
+ main: |
+ import lib
+ files:
+ - path: lib.py
+ content: |
+ import attr
+ MYPY = False
+ if MYPY: # Force deferral
+ from other import *
+
+ @attr.s
+ class C:
+ total = attr.ib(type=int)
+
+ C() # E: Missing positional argument "total" in call to "C"
+ C('no') # E: Argument 1 to "C" has incompatible type "str"; expected "int"
+ - path: other.py
+ content: |
+ import lib
+
+- case: testAttrsDefaultsMroOtherFile
+ main: |
+ import a
+ files:
+ - path: a.py
+ content: |
+ import attr
+ from b import A1, A2
+
+ @attr.s
+ class Asdf(A1, A2): # E: Non-default attributes not allowed after default attributes.
+ pass
+ - path: b.py
+ content: |
+ import attr
+
+ @attr.s
+ class A1:
+ a: str = attr.ib('test')
+
+ @attr.s
+ class A2:
+ b: int = attr.ib()
+
+- case: testAttrsInheritanceNoAnnotation
+ main: |
+ import attr
+
+ @attr.s
+ class A:
+ foo = attr.ib() # type: int
+
+ x = 0
+ @attr.s
+ class B(A):
+ foo = x
+
+ reveal_type(B) # N: Revealed type is "def (foo: builtins.int) -> main.B"
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_next_gen.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_next_gen.py
new file mode 100644
index 0000000000..8395f9c028
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_next_gen.py
@@ -0,0 +1,440 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Python 3-only integration tests for provisional next generation APIs.
+"""
+
+import re
+
+from functools import partial
+
+import pytest
+
+import attr as _attr # don't use it by accident
+import attrs
+
+
+@attrs.define
+class C:
+ x: str
+ y: int
+
+
+class TestNextGen:
+ def test_simple(self):
+ """
+ Instantiation works.
+ """
+ C("1", 2)
+
+ def test_no_slots(self):
+ """
+ slots can be deactivated.
+ """
+
+ @attrs.define(slots=False)
+ class NoSlots:
+ x: int
+
+ ns = NoSlots(1)
+
+ assert {"x": 1} == getattr(ns, "__dict__")
+
+ def test_validates(self):
+ """
+ Validators at __init__ and __setattr__ work.
+ """
+
+ @attrs.define
+ class Validated:
+ x: int = attrs.field(validator=attrs.validators.instance_of(int))
+
+ v = Validated(1)
+
+ with pytest.raises(TypeError):
+ Validated(None)
+
+ with pytest.raises(TypeError):
+ v.x = "1"
+
+ def test_no_order(self):
+ """
+ Order is off by default but can be added.
+ """
+ with pytest.raises(TypeError):
+ C("1", 2) < C("2", 3)
+
+ @attrs.define(order=True)
+ class Ordered:
+ x: int
+
+ assert Ordered(1) < Ordered(2)
+
+ def test_override_auto_attribs_true(self):
+ """
+ Don't guess if auto_attrib is set explicitly.
+
+ Having an unannotated attrs.ib/attrs.field fails.
+ """
+ with pytest.raises(attrs.exceptions.UnannotatedAttributeError):
+
+ @attrs.define(auto_attribs=True)
+ class ThisFails:
+ x = attrs.field()
+ y: int
+
+ def test_override_auto_attribs_false(self):
+ """
+ Don't guess if auto_attrib is set explicitly.
+
+ Annotated fields that don't carry an attrs.ib are ignored.
+ """
+
+ @attrs.define(auto_attribs=False)
+ class NoFields:
+ x: int
+ y: int
+
+ assert NoFields() == NoFields()
+
+ def test_auto_attribs_detect(self):
+ """
+ define correctly detects if a class lacks type annotations.
+ """
+
+ @attrs.define
+ class OldSchool:
+ x = attrs.field()
+
+ assert OldSchool(1) == OldSchool(1)
+
+ # Test with maybe_cls = None
+ @attrs.define()
+ class OldSchool2:
+ x = attrs.field()
+
+ assert OldSchool2(1) == OldSchool2(1)
+
+ def test_auto_attribs_detect_fields_and_annotations(self):
+ """
+ define infers auto_attribs=True if fields have type annotations
+ """
+
+ @attrs.define
+ class NewSchool:
+ x: int
+ y: list = attrs.field()
+
+ @y.validator
+ def _validate_y(self, attribute, value):
+ if value < 0:
+ raise ValueError("y must be positive")
+
+ assert NewSchool(1, 1) == NewSchool(1, 1)
+ with pytest.raises(ValueError):
+ NewSchool(1, -1)
+ assert list(attrs.fields_dict(NewSchool).keys()) == ["x", "y"]
+
+ def test_auto_attribs_partially_annotated(self):
+ """
+ define infers auto_attribs=True if any type annotations are found
+ """
+
+ @attrs.define
+ class NewSchool:
+ x: int
+ y: list
+ z = 10
+
+ # fields are defined for any annotated attributes
+ assert NewSchool(1, []) == NewSchool(1, [])
+ assert list(attrs.fields_dict(NewSchool).keys()) == ["x", "y"]
+
+ # while the unannotated attributes are left as class vars
+ assert NewSchool.z == 10
+ assert "z" in NewSchool.__dict__
+
+ def test_auto_attribs_detect_annotations(self):
+ """
+ define correctly detects if a class has type annotations.
+ """
+
+ @attrs.define
+ class NewSchool:
+ x: int
+
+ assert NewSchool(1) == NewSchool(1)
+
+ # Test with maybe_cls = None
+ @attrs.define()
+ class NewSchool2:
+ x: int
+
+ assert NewSchool2(1) == NewSchool2(1)
+
+ def test_exception(self):
+ """
+ Exceptions are detected and correctly handled.
+ """
+
+ @attrs.define
+ class E(Exception):
+ msg: str
+ other: int
+
+ with pytest.raises(E) as ei:
+ raise E("yolo", 42)
+
+ e = ei.value
+
+ assert ("yolo", 42) == e.args
+ assert "yolo" == e.msg
+ assert 42 == e.other
+
+ def test_frozen(self):
+ """
+ attrs.frozen freezes classes.
+ """
+
+ @attrs.frozen
+ class F:
+ x: str
+
+ f = F(1)
+
+ with pytest.raises(attrs.exceptions.FrozenInstanceError):
+ f.x = 2
+
+ def test_auto_detect_eq(self):
+ """
+ auto_detect=True works for eq.
+
+ Regression test for #670.
+ """
+
+ @attrs.define
+ class C:
+ def __eq__(self, o):
+ raise ValueError()
+
+ with pytest.raises(ValueError):
+ C() == C()
+
+ def test_subclass_frozen(self):
+ """
+ It's possible to subclass an `attrs.frozen` class and the frozen-ness
+ is inherited.
+ """
+
+ @attrs.frozen
+ class A:
+ a: int
+
+ @attrs.frozen
+ class B(A):
+ b: int
+
+ @attrs.define(on_setattr=attrs.setters.NO_OP)
+ class C(B):
+ c: int
+
+ assert B(1, 2) == B(1, 2)
+ assert C(1, 2, 3) == C(1, 2, 3)
+
+ with pytest.raises(attrs.exceptions.FrozenInstanceError):
+ A(1).a = 1
+
+ with pytest.raises(attrs.exceptions.FrozenInstanceError):
+ B(1, 2).a = 1
+
+ with pytest.raises(attrs.exceptions.FrozenInstanceError):
+ B(1, 2).b = 2
+
+ with pytest.raises(attrs.exceptions.FrozenInstanceError):
+ C(1, 2, 3).c = 3
+
+ def test_catches_frozen_on_setattr(self):
+ """
+ Passing frozen=True and on_setattr hooks is caught, even if the
+ immutability is inherited.
+ """
+
+ @attrs.define(frozen=True)
+ class A:
+ pass
+
+ with pytest.raises(
+ ValueError, match="Frozen classes can't use on_setattr."
+ ):
+
+ @attrs.define(frozen=True, on_setattr=attrs.setters.validate)
+ class B:
+ pass
+
+ with pytest.raises(
+ ValueError,
+ match=re.escape(
+ "Frozen classes can't use on_setattr "
+ "(frozen-ness was inherited)."
+ ),
+ ):
+
+ @attrs.define(on_setattr=attrs.setters.validate)
+ class C(A):
+ pass
+
+ @pytest.mark.parametrize(
+ "decorator",
+ [
+ partial(_attr.s, frozen=True, slots=True, auto_exc=True),
+ attrs.frozen,
+ attrs.define,
+ attrs.mutable,
+ ],
+ )
+ def test_discard_context(self, decorator):
+ """
+ raise from None works.
+
+ Regression test for #703.
+ """
+
+ @decorator
+ class MyException(Exception):
+ x: str = attrs.field()
+
+ with pytest.raises(MyException) as ei:
+ try:
+ raise ValueError()
+ except ValueError:
+ raise MyException("foo") from None
+
+ assert "foo" == ei.value.x
+ assert ei.value.__cause__ is None
+
+ def test_converts_and_validates_by_default(self):
+ """
+ If no on_setattr is set, assume setters.convert, setters.validate.
+ """
+
+ @attrs.define
+ class C:
+ x: int = attrs.field(converter=int)
+
+ @x.validator
+ def _v(self, _, value):
+ if value < 10:
+ raise ValueError("must be >=10")
+
+ inst = C(10)
+
+ # Converts
+ inst.x = "11"
+
+ assert 11 == inst.x
+
+ # Validates
+ with pytest.raises(ValueError, match="must be >=10"):
+ inst.x = "9"
+
+ def test_mro_ng(self):
+ """
+ Attributes and methods are looked up the same way in NG by default.
+
+ See #428
+ """
+
+ @attrs.define
+ class A:
+
+ x: int = 10
+
+ def xx(self):
+ return 10
+
+ @attrs.define
+ class B(A):
+ y: int = 20
+
+ @attrs.define
+ class C(A):
+ x: int = 50
+
+ def xx(self):
+ return 50
+
+ @attrs.define
+ class D(B, C):
+ pass
+
+ d = D()
+
+ assert d.x == d.xx()
+
+
+class TestAsTuple:
+ def test_smoke(self):
+ """
+ `attrs.astuple` only changes defaults, so we just call it and compare.
+ """
+ inst = C("foo", 42)
+
+ assert attrs.astuple(inst) == _attr.astuple(inst)
+
+
+class TestAsDict:
+ def test_smoke(self):
+ """
+ `attrs.asdict` only changes defaults, so we just call it and compare.
+ """
+ inst = C("foo", {(1,): 42})
+
+ assert attrs.asdict(inst) == _attr.asdict(
+ inst, retain_collection_types=True
+ )
+
+
+class TestImports:
+ """
+ Verify our re-imports and mirroring works.
+ """
+
+ def test_converters(self):
+ """
+ Importing from attrs.converters works.
+ """
+ from attrs.converters import optional
+
+ assert optional is _attr.converters.optional
+
+ def test_exceptions(self):
+ """
+ Importing from attrs.exceptions works.
+ """
+ from attrs.exceptions import FrozenError
+
+ assert FrozenError is _attr.exceptions.FrozenError
+
+ def test_filters(self):
+ """
+ Importing from attrs.filters works.
+ """
+ from attrs.filters import include
+
+ assert include is _attr.filters.include
+
+ def test_setters(self):
+ """
+ Importing from attrs.setters works.
+ """
+ from attrs.setters import pipe
+
+ assert pipe is _attr.setters.pipe
+
+ def test_validators(self):
+ """
+ Importing from attrs.validators works.
+ """
+ from attrs.validators import and_
+
+ assert and_ is _attr.validators.and_
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_pattern_matching.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_pattern_matching.py
new file mode 100644
index 0000000000..590804a8a7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_pattern_matching.py
@@ -0,0 +1,101 @@
+# SPDX-License-Identifier: MIT
+
+# Keep this file SHORT, until Black can handle it.
+import pytest
+
+import attr
+
+
+class TestPatternMatching:
+ """
+ Pattern matching syntax test cases.
+ """
+
+ @pytest.mark.parametrize("dec", [attr.s, attr.define, attr.frozen])
+ def test_simple_match_case(self, dec):
+ """
+ Simple match case statement works as expected with all class
+ decorators.
+ """
+
+ @dec
+ class C(object):
+ a = attr.ib()
+
+ assert ("a",) == C.__match_args__
+
+ matched = False
+ c = C(a=1)
+ match c:
+ case C(a):
+ matched = True
+
+ assert matched
+ assert 1 == a
+
+ def test_explicit_match_args(self):
+ """
+ Does not overwrite a manually set empty __match_args__.
+ """
+
+ ma = ()
+
+ @attr.define
+ class C:
+ a = attr.field()
+ __match_args__ = ma
+
+ c = C(a=1)
+
+ msg = r"C\(\) accepts 0 positional sub-patterns \(1 given\)"
+ with pytest.raises(TypeError, match=msg):
+ match c:
+ case C(_):
+ pass
+
+ def test_match_args_kw_only(self):
+ """
+ kw_only classes don't generate __match_args__.
+ kw_only fields are not included in __match_args__.
+ """
+
+ @attr.define
+ class C:
+ a = attr.field(kw_only=True)
+ b = attr.field()
+
+ assert ("b",) == C.__match_args__
+
+ c = C(a=1, b=1)
+ msg = r"C\(\) accepts 1 positional sub-pattern \(2 given\)"
+ with pytest.raises(TypeError, match=msg):
+ match c:
+ case C(a, b):
+ pass
+
+ found = False
+ match c:
+ case C(b, a=a):
+ found = True
+
+ assert found
+
+ @attr.define(kw_only=True)
+ class C:
+ a = attr.field()
+ b = attr.field()
+
+ c = C(a=1, b=1)
+ msg = r"C\(\) accepts 0 positional sub-patterns \(2 given\)"
+ with pytest.raises(TypeError, match=msg):
+ match c:
+ case C(a, b):
+ pass
+
+ found = False
+ match c:
+ case C(a=a, b=b):
+ found = True
+
+ assert found
+ assert (1, 1) == (a, b)
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_pyright.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_pyright.py
new file mode 100644
index 0000000000..c30dcc5cb1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_pyright.py
@@ -0,0 +1,71 @@
+# SPDX-License-Identifier: MIT
+
+import json
+import os.path
+import shutil
+import subprocess
+import sys
+
+import pytest
+
+import attr
+
+
+if sys.version_info < (3, 6):
+ _found_pyright = False
+else:
+ _found_pyright = shutil.which("pyright")
+
+
+@attr.s(frozen=True)
+class PyrightDiagnostic(object):
+ severity = attr.ib()
+ message = attr.ib()
+
+
+@pytest.mark.skipif(not _found_pyright, reason="Requires pyright.")
+def test_pyright_baseline():
+ """The __dataclass_transform__ decorator allows pyright to determine
+ attrs decorated class types.
+ """
+
+ test_file = os.path.dirname(__file__) + "/dataclass_transform_example.py"
+
+ pyright = subprocess.run(
+ ["pyright", "--outputjson", str(test_file)], capture_output=True
+ )
+ pyright_result = json.loads(pyright.stdout)
+
+ diagnostics = set(
+ PyrightDiagnostic(d["severity"], d["message"])
+ for d in pyright_result["generalDiagnostics"]
+ )
+
+ # Expected diagnostics as per pyright 1.1.135
+ expected_diagnostics = {
+ PyrightDiagnostic(
+ severity="information",
+ message='Type of "Define.__init__" is'
+ ' "(self: Define, a: str, b: int) -> None"',
+ ),
+ PyrightDiagnostic(
+ severity="information",
+ message='Type of "DefineConverter.__init__" is '
+ '"(self: DefineConverter, with_converter: int) -> None"',
+ ),
+ PyrightDiagnostic(
+ severity="information",
+ message='Type of "d.a" is "Literal[\'new\']"',
+ ),
+ PyrightDiagnostic(
+ severity="error",
+ message='Cannot assign member "a" for type '
+ '"FrozenDefine"\n\xa0\xa0"FrozenDefine" is frozen',
+ ),
+ PyrightDiagnostic(
+ severity="information",
+ message='Type of "d2.a" is "Literal[\'new\']"',
+ ),
+ }
+
+ assert diagnostics == expected_diagnostics
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_setattr.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_setattr.py
new file mode 100644
index 0000000000..aaedde5746
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_setattr.py
@@ -0,0 +1,437 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import pickle
+
+import pytest
+
+import attr
+
+from attr import setters
+from attr._compat import PY2
+from attr.exceptions import FrozenAttributeError
+from attr.validators import instance_of, matches_re
+
+
+@attr.s(frozen=True)
+class Frozen(object):
+ x = attr.ib()
+
+
+@attr.s
+class WithOnSetAttrHook(object):
+ x = attr.ib(on_setattr=lambda *args: None)
+
+
+class TestSetAttr(object):
+ def test_change(self):
+ """
+ The return value of a hook overwrites the value. But they are not run
+ on __init__.
+ """
+
+ def hook(*a, **kw):
+ return "hooked!"
+
+ @attr.s
+ class Hooked(object):
+ x = attr.ib(on_setattr=hook)
+ y = attr.ib()
+
+ h = Hooked("x", "y")
+
+ assert "x" == h.x
+ assert "y" == h.y
+
+ h.x = "xxx"
+ h.y = "yyy"
+
+ assert "yyy" == h.y
+ assert "hooked!" == h.x
+
+ def test_frozen_attribute(self):
+ """
+ Frozen attributes raise FrozenAttributeError, others are not affected.
+ """
+
+ @attr.s
+ class PartiallyFrozen(object):
+ x = attr.ib(on_setattr=setters.frozen)
+ y = attr.ib()
+
+ pf = PartiallyFrozen("x", "y")
+
+ pf.y = "yyy"
+
+ assert "yyy" == pf.y
+
+ with pytest.raises(FrozenAttributeError):
+ pf.x = "xxx"
+
+ assert "x" == pf.x
+
+ @pytest.mark.parametrize(
+ "on_setattr",
+ [setters.validate, [setters.validate], setters.pipe(setters.validate)],
+ )
+ def test_validator(self, on_setattr):
+ """
+ Validators are run and they don't alter the value.
+ """
+
+ @attr.s(on_setattr=on_setattr)
+ class ValidatedAttribute(object):
+ x = attr.ib()
+ y = attr.ib(validator=[instance_of(str), matches_re("foo.*qux")])
+
+ va = ValidatedAttribute(42, "foobarqux")
+
+ with pytest.raises(TypeError) as ei:
+ va.y = 42
+
+ assert "foobarqux" == va.y
+
+ assert ei.value.args[0].startswith("'y' must be <")
+
+ with pytest.raises(ValueError) as ei:
+ va.y = "quxbarfoo"
+
+ assert ei.value.args[0].startswith("'y' must match regex '")
+
+ assert "foobarqux" == va.y
+
+ va.y = "foobazqux"
+
+ assert "foobazqux" == va.y
+
+ def test_pipe(self):
+ """
+ Multiple hooks are possible, in that case the last return value is
+ used. They can be supplied using the pipe functions or by passing a
+ list to on_setattr.
+ """
+
+ s = [setters.convert, lambda _, __, nv: nv + 1]
+
+ @attr.s
+ class Piped(object):
+ x1 = attr.ib(converter=int, on_setattr=setters.pipe(*s))
+ x2 = attr.ib(converter=int, on_setattr=s)
+
+ p = Piped("41", "22")
+
+ assert 41 == p.x1
+ assert 22 == p.x2
+
+ p.x1 = "41"
+ p.x2 = "22"
+
+ assert 42 == p.x1
+ assert 23 == p.x2
+
+ def test_make_class(self):
+ """
+ on_setattr of make_class gets forwarded.
+ """
+ C = attr.make_class("C", {"x": attr.ib()}, on_setattr=setters.frozen)
+
+ c = C(1)
+
+ with pytest.raises(FrozenAttributeError):
+ c.x = 2
+
+ def test_no_validator_no_converter(self):
+ """
+ validate and convert tolerate missing validators and converters.
+ """
+
+ @attr.s(on_setattr=[setters.convert, setters.validate])
+ class C(object):
+ x = attr.ib()
+
+ c = C(1)
+
+ c.x = 2
+
+ assert 2 == c.x
+
+ def test_validate_respects_run_validators_config(self):
+ """
+ If run validators is off, validate doesn't run them.
+ """
+
+ @attr.s(on_setattr=setters.validate)
+ class C(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+
+ c = C(1)
+
+ attr.set_run_validators(False)
+
+ c.x = "1"
+
+ assert "1" == c.x
+
+ attr.set_run_validators(True)
+
+ with pytest.raises(TypeError) as ei:
+ c.x = "1"
+
+ assert ei.value.args[0].startswith("'x' must be <")
+
+ def test_frozen_on_setattr_class_is_caught(self):
+ """
+ @attr.s(on_setattr=X, frozen=True) raises an ValueError.
+ """
+ with pytest.raises(ValueError) as ei:
+
+ @attr.s(frozen=True, on_setattr=setters.validate)
+ class C(object):
+ x = attr.ib()
+
+ assert "Frozen classes can't use on_setattr." == ei.value.args[0]
+
+ def test_frozen_on_setattr_attribute_is_caught(self):
+ """
+ attr.ib(on_setattr=X) on a frozen class raises an ValueError.
+ """
+
+ with pytest.raises(ValueError) as ei:
+
+ @attr.s(frozen=True)
+ class C(object):
+ x = attr.ib(on_setattr=setters.validate)
+
+ assert "Frozen classes can't use on_setattr." == ei.value.args[0]
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_reset_if_no_custom_setattr(self, slots):
+ """
+ If a class with an active setattr is subclassed and no new setattr
+ is generated, the __setattr__ is set to object.__setattr__.
+
+ We do the double test because of Python 2.
+ """
+
+ def boom(*args):
+ pytest.fail("Must not be called.")
+
+ @attr.s
+ class Hooked(object):
+ x = attr.ib(on_setattr=boom)
+
+ @attr.s(slots=slots)
+ class NoHook(WithOnSetAttrHook):
+ x = attr.ib()
+
+ if not PY2:
+ assert NoHook.__setattr__ == object.__setattr__
+
+ assert 1 == NoHook(1).x
+ assert Hooked.__attrs_own_setattr__
+ assert not NoHook.__attrs_own_setattr__
+ assert WithOnSetAttrHook.__attrs_own_setattr__
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_inherited_do_not_reset(self, slots):
+ """
+ If we inherit a __setattr__ that has been written by the user, we must
+ not reset it unless necessary.
+ """
+
+ class A(object):
+ """
+ Not an attrs class on purpose to prevent accidental resets that
+ would render the asserts meaningless.
+ """
+
+ def __setattr__(self, *args):
+ pass
+
+ @attr.s(slots=slots)
+ class B(A):
+ pass
+
+ assert B.__setattr__ == A.__setattr__
+
+ @attr.s(slots=slots)
+ class C(B):
+ pass
+
+ assert C.__setattr__ == A.__setattr__
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_pickling_retains_attrs_own(self, slots):
+ """
+ Pickling/Unpickling does not lose ownership information about
+ __setattr__.
+ """
+ i = WithOnSetAttrHook(1)
+
+ assert True is i.__attrs_own_setattr__
+
+ i2 = pickle.loads(pickle.dumps(i))
+
+ assert True is i2.__attrs_own_setattr__
+
+ WOSAH = pickle.loads(pickle.dumps(WithOnSetAttrHook))
+
+ assert True is WOSAH.__attrs_own_setattr__
+
+ def test_slotted_class_can_have_custom_setattr(self):
+ """
+ A slotted class can define a custom setattr and it doesn't get
+ overwritten.
+
+ Regression test for #680.
+ """
+
+ @attr.s(slots=True)
+ class A(object):
+ def __setattr__(self, key, value):
+ raise SystemError
+
+ with pytest.raises(SystemError):
+ A().x = 1
+
+ @pytest.mark.xfail(raises=attr.exceptions.FrozenAttributeError)
+ def test_slotted_confused(self):
+ """
+ If we have a in-between non-attrs class, setattr reset detection
+ should still work, but currently doesn't.
+
+ It works with dict classes because we can look the finished class and
+ patch it. With slotted classes we have to deduce it ourselves.
+ """
+
+ @attr.s(slots=True)
+ class A(object):
+ x = attr.ib(on_setattr=setters.frozen)
+
+ class B(A):
+ pass
+
+ @attr.s(slots=True)
+ class C(B):
+ x = attr.ib()
+
+ C(1).x = 2
+
+
+@pytest.mark.skipif(PY2, reason="Python 3-only.")
+class TestSetAttrNoPy2(object):
+ """
+ __setattr__ tests for Py3+ to avoid the skip repetition.
+ """
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_auto_detect_if_no_custom_setattr(self, slots):
+ """
+ It's possible to remove the on_setattr hook from an attribute and
+ therefore write a custom __setattr__.
+ """
+ assert 1 == WithOnSetAttrHook(1).x
+
+ @attr.s(auto_detect=True, slots=slots)
+ class RemoveNeedForOurSetAttr(WithOnSetAttrHook):
+ x = attr.ib()
+
+ def __setattr__(self, name, val):
+ object.__setattr__(self, name, val * 2)
+
+ i = RemoveNeedForOurSetAttr(1)
+
+ assert not RemoveNeedForOurSetAttr.__attrs_own_setattr__
+ assert 2 == i.x
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_restore_respects_auto_detect(self, slots):
+ """
+ If __setattr__ should be restored but the user supplied its own and
+ set auto_detect, leave is alone.
+ """
+
+ @attr.s(auto_detect=True, slots=slots)
+ class CustomSetAttr:
+ def __setattr__(self, _, __):
+ pass
+
+ assert CustomSetAttr.__setattr__ != object.__setattr__
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_auto_detect_frozen(self, slots):
+ """
+ frozen=True together with a detected custom __setattr__ are rejected.
+ """
+ with pytest.raises(
+ ValueError, match="Can't freeze a class with a custom __setattr__."
+ ):
+
+ @attr.s(auto_detect=True, slots=slots, frozen=True)
+ class CustomSetAttr(Frozen):
+ def __setattr__(self, _, __):
+ pass
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_setattr_auto_detect_on_setattr(self, slots):
+ """
+ on_setattr attributes together with a detected custom __setattr__ are
+ rejected.
+ """
+ with pytest.raises(
+ ValueError,
+ match="Can't combine custom __setattr__ with on_setattr hooks.",
+ ):
+
+ @attr.s(auto_detect=True, slots=slots)
+ class HookAndCustomSetAttr(object):
+ x = attr.ib(on_setattr=lambda *args: None)
+
+ def __setattr__(self, _, __):
+ pass
+
+ @pytest.mark.parametrize("a_slots", [True, False])
+ @pytest.mark.parametrize("b_slots", [True, False])
+ @pytest.mark.parametrize("c_slots", [True, False])
+ def test_setattr_inherited_do_not_reset_intermediate(
+ self, a_slots, b_slots, c_slots
+ ):
+ """
+ A user-provided intermediate __setattr__ is not reset to
+ object.__setattr__.
+
+ This only can work on Python 3+ with auto_detect activated, such that
+ attrs can know that there is a user-provided __setattr__.
+ """
+
+ @attr.s(slots=a_slots)
+ class A(object):
+ x = attr.ib(on_setattr=setters.frozen)
+
+ @attr.s(slots=b_slots, auto_detect=True)
+ class B(A):
+ x = attr.ib(on_setattr=setters.NO_OP)
+
+ def __setattr__(self, key, value):
+ raise SystemError
+
+ @attr.s(slots=c_slots)
+ class C(B):
+ pass
+
+ assert getattr(A, "__attrs_own_setattr__", False) is True
+ assert getattr(B, "__attrs_own_setattr__", False) is False
+ assert getattr(C, "__attrs_own_setattr__", False) is False
+
+ with pytest.raises(SystemError):
+ C(1).x = 3
+
+ def test_docstring(self):
+ """
+ Generated __setattr__ has a useful docstring.
+ """
+ assert (
+ "Method generated by attrs for class WithOnSetAttrHook."
+ == WithOnSetAttrHook.__setattr__.__doc__
+ )
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_slots.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_slots.py
new file mode 100644
index 0000000000..baf9a40ddb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_slots.py
@@ -0,0 +1,740 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Unit tests for slots-related functionality.
+"""
+
+import pickle
+import sys
+import types
+import weakref
+
+import pytest
+
+import attr
+
+from attr._compat import PY2, PYPY, just_warn, make_set_closure_cell
+
+
+# Pympler doesn't work on PyPy.
+try:
+ from pympler.asizeof import asizeof
+
+ has_pympler = True
+except BaseException: # Won't be an import error.
+ has_pympler = False
+
+
+@attr.s
+class C1(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+ def method(self):
+ return self.x
+
+ @classmethod
+ def classmethod(cls):
+ return "clsmethod"
+
+ @staticmethod
+ def staticmethod():
+ return "staticmethod"
+
+ if not PY2:
+
+ def my_class(self):
+ return __class__
+
+ def my_super(self):
+ """Just to test out the no-arg super."""
+ return super().__repr__()
+
+
+@attr.s(slots=True, hash=True)
+class C1Slots(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+ def method(self):
+ return self.x
+
+ @classmethod
+ def classmethod(cls):
+ return "clsmethod"
+
+ @staticmethod
+ def staticmethod():
+ return "staticmethod"
+
+ if not PY2:
+
+ def my_class(self):
+ return __class__
+
+ def my_super(self):
+ """Just to test out the no-arg super."""
+ return super().__repr__()
+
+
+def test_slots_being_used():
+ """
+ The class is really using __slots__.
+ """
+ non_slot_instance = C1(x=1, y="test")
+ slot_instance = C1Slots(x=1, y="test")
+
+ assert "__dict__" not in dir(slot_instance)
+ assert "__slots__" in dir(slot_instance)
+
+ assert "__dict__" in dir(non_slot_instance)
+ assert "__slots__" not in dir(non_slot_instance)
+
+ assert set(["__weakref__", "x", "y"]) == set(slot_instance.__slots__)
+
+ if has_pympler:
+ assert asizeof(slot_instance) < asizeof(non_slot_instance)
+
+ non_slot_instance.t = "test"
+ with pytest.raises(AttributeError):
+ slot_instance.t = "test"
+
+ assert 1 == non_slot_instance.method()
+ assert 1 == slot_instance.method()
+
+ assert attr.fields(C1Slots) == attr.fields(C1)
+ assert attr.asdict(slot_instance) == attr.asdict(non_slot_instance)
+
+
+def test_basic_attr_funcs():
+ """
+ Comparison, `__eq__`, `__hash__`, `__repr__`, `attrs.asdict` work.
+ """
+ a = C1Slots(x=1, y=2)
+ b = C1Slots(x=1, y=3)
+ a_ = C1Slots(x=1, y=2)
+
+ # Comparison.
+ assert b > a
+
+ assert a_ == a
+
+ # Hashing.
+ hash(b) # Just to assert it doesn't raise.
+
+ # Repr.
+ assert "C1Slots(x=1, y=2)" == repr(a)
+
+ assert {"x": 1, "y": 2} == attr.asdict(a)
+
+
+def test_inheritance_from_nonslots():
+ """
+ Inheritance from a non-slotted class works.
+
+ Note that a slotted class inheriting from an ordinary class loses most of
+ the benefits of slotted classes, but it should still work.
+ """
+
+ @attr.s(slots=True, hash=True)
+ class C2Slots(C1):
+ z = attr.ib()
+
+ c2 = C2Slots(x=1, y=2, z="test")
+
+ assert 1 == c2.x
+ assert 2 == c2.y
+ assert "test" == c2.z
+
+ c2.t = "test" # This will work, using the base class.
+
+ assert "test" == c2.t
+
+ assert 1 == c2.method()
+ assert "clsmethod" == c2.classmethod()
+ assert "staticmethod" == c2.staticmethod()
+
+ assert set(["z"]) == set(C2Slots.__slots__)
+
+ c3 = C2Slots(x=1, y=3, z="test")
+
+ assert c3 > c2
+
+ c2_ = C2Slots(x=1, y=2, z="test")
+
+ assert c2 == c2_
+
+ assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
+
+ hash(c2) # Just to assert it doesn't raise.
+
+ assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
+
+
+def test_nonslots_these():
+ """
+ Enhancing a dict class using 'these' works.
+
+ This will actually *replace* the class with another one, using slots.
+ """
+
+ class SimpleOrdinaryClass(object):
+ def __init__(self, x, y, z):
+ self.x = x
+ self.y = y
+ self.z = z
+
+ def method(self):
+ return self.x
+
+ @classmethod
+ def classmethod(cls):
+ return "clsmethod"
+
+ @staticmethod
+ def staticmethod():
+ return "staticmethod"
+
+ C2Slots = attr.s(
+ these={"x": attr.ib(), "y": attr.ib(), "z": attr.ib()},
+ init=False,
+ slots=True,
+ hash=True,
+ )(SimpleOrdinaryClass)
+
+ c2 = C2Slots(x=1, y=2, z="test")
+ assert 1 == c2.x
+ assert 2 == c2.y
+ assert "test" == c2.z
+ with pytest.raises(AttributeError):
+ c2.t = "test" # We have slots now.
+
+ assert 1 == c2.method()
+ assert "clsmethod" == c2.classmethod()
+ assert "staticmethod" == c2.staticmethod()
+
+ assert set(["__weakref__", "x", "y", "z"]) == set(C2Slots.__slots__)
+
+ c3 = C2Slots(x=1, y=3, z="test")
+ assert c3 > c2
+ c2_ = C2Slots(x=1, y=2, z="test")
+ assert c2 == c2_
+
+ assert "SimpleOrdinaryClass(x=1, y=2, z='test')" == repr(c2)
+
+ hash(c2) # Just to assert it doesn't raise.
+
+ assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
+
+
+def test_inheritance_from_slots():
+ """
+ Inheriting from an attrs slotted class works.
+ """
+
+ @attr.s(slots=True, hash=True)
+ class C2Slots(C1Slots):
+ z = attr.ib()
+
+ @attr.s(slots=True, hash=True)
+ class C2(C1):
+ z = attr.ib()
+
+ c2 = C2Slots(x=1, y=2, z="test")
+ assert 1 == c2.x
+ assert 2 == c2.y
+ assert "test" == c2.z
+
+ assert set(["z"]) == set(C2Slots.__slots__)
+
+ assert 1 == c2.method()
+ assert "clsmethod" == c2.classmethod()
+ assert "staticmethod" == c2.staticmethod()
+
+ with pytest.raises(AttributeError):
+ c2.t = "test"
+
+ non_slot_instance = C2(x=1, y=2, z="test")
+ if has_pympler:
+ assert asizeof(c2) < asizeof(non_slot_instance)
+
+ c3 = C2Slots(x=1, y=3, z="test")
+ assert c3 > c2
+ c2_ = C2Slots(x=1, y=2, z="test")
+ assert c2 == c2_
+
+ assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
+
+ hash(c2) # Just to assert it doesn't raise.
+
+ assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
+
+
+def test_inheritance_from_slots_with_attribute_override():
+ """
+ Inheriting from a slotted class doesn't re-create existing slots
+ """
+
+ class HasXSlot(object):
+ __slots__ = ("x",)
+
+ @attr.s(slots=True, hash=True)
+ class C2Slots(C1Slots):
+ # y re-defined here but it shouldn't get a slot
+ y = attr.ib()
+ z = attr.ib()
+
+ @attr.s(slots=True, hash=True)
+ class NonAttrsChild(HasXSlot):
+ # Parent class has slot for "x" already, so we skip it
+ x = attr.ib()
+ y = attr.ib()
+ z = attr.ib()
+
+ c2 = C2Slots(1, 2, "test")
+ assert 1 == c2.x
+ assert 2 == c2.y
+ assert "test" == c2.z
+
+ assert {"z"} == set(C2Slots.__slots__)
+
+ na = NonAttrsChild(1, 2, "test")
+ assert 1 == na.x
+ assert 2 == na.y
+ assert "test" == na.z
+
+ assert {"__weakref__", "y", "z"} == set(NonAttrsChild.__slots__)
+
+
+def test_inherited_slot_reuses_slot_descriptor():
+ """
+ We reuse slot descriptor for an attr.ib defined in a slotted attr.s
+ """
+
+ class HasXSlot(object):
+ __slots__ = ("x",)
+
+ class OverridesX(HasXSlot):
+ @property
+ def x(self):
+ return None
+
+ @attr.s(slots=True)
+ class Child(OverridesX):
+ x = attr.ib()
+
+ assert Child.x is not OverridesX.x
+ assert Child.x is HasXSlot.x
+
+ c = Child(1)
+ assert 1 == c.x
+ assert set() == set(Child.__slots__)
+
+ ox = OverridesX()
+ assert ox.x is None
+
+
+def test_bare_inheritance_from_slots():
+ """
+ Inheriting from a bare attrs slotted class works.
+ """
+
+ @attr.s(
+ init=False, eq=False, order=False, hash=False, repr=False, slots=True
+ )
+ class C1BareSlots(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+ def method(self):
+ return self.x
+
+ @classmethod
+ def classmethod(cls):
+ return "clsmethod"
+
+ @staticmethod
+ def staticmethod():
+ return "staticmethod"
+
+ @attr.s(init=False, eq=False, order=False, hash=False, repr=False)
+ class C1Bare(object):
+ x = attr.ib(validator=attr.validators.instance_of(int))
+ y = attr.ib()
+
+ def method(self):
+ return self.x
+
+ @classmethod
+ def classmethod(cls):
+ return "clsmethod"
+
+ @staticmethod
+ def staticmethod():
+ return "staticmethod"
+
+ @attr.s(slots=True, hash=True)
+ class C2Slots(C1BareSlots):
+ z = attr.ib()
+
+ @attr.s(slots=True, hash=True)
+ class C2(C1Bare):
+ z = attr.ib()
+
+ c2 = C2Slots(x=1, y=2, z="test")
+ assert 1 == c2.x
+ assert 2 == c2.y
+ assert "test" == c2.z
+
+ assert 1 == c2.method()
+ assert "clsmethod" == c2.classmethod()
+ assert "staticmethod" == c2.staticmethod()
+
+ with pytest.raises(AttributeError):
+ c2.t = "test"
+
+ non_slot_instance = C2(x=1, y=2, z="test")
+ if has_pympler:
+ assert asizeof(c2) < asizeof(non_slot_instance)
+
+ c3 = C2Slots(x=1, y=3, z="test")
+ assert c3 > c2
+ c2_ = C2Slots(x=1, y=2, z="test")
+ assert c2 == c2_
+
+ assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
+
+ hash(c2) # Just to assert it doesn't raise.
+
+ assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
+
+
+@pytest.mark.skipif(PY2, reason="closure cell rewriting is PY3-only.")
+class TestClosureCellRewriting(object):
+ def test_closure_cell_rewriting(self):
+ """
+ Slotted classes support proper closure cell rewriting.
+
+ This affects features like `__class__` and the no-arg super().
+ """
+ non_slot_instance = C1(x=1, y="test")
+ slot_instance = C1Slots(x=1, y="test")
+
+ assert non_slot_instance.my_class() is C1
+ assert slot_instance.my_class() is C1Slots
+
+ # Just assert they return something, and not an exception.
+ assert non_slot_instance.my_super()
+ assert slot_instance.my_super()
+
+ def test_inheritance(self):
+ """
+ Slotted classes support proper closure cell rewriting when inheriting.
+
+ This affects features like `__class__` and the no-arg super().
+ """
+
+ @attr.s
+ class C2(C1):
+ def my_subclass(self):
+ return __class__
+
+ @attr.s
+ class C2Slots(C1Slots):
+ def my_subclass(self):
+ return __class__
+
+ non_slot_instance = C2(x=1, y="test")
+ slot_instance = C2Slots(x=1, y="test")
+
+ assert non_slot_instance.my_class() is C1
+ assert slot_instance.my_class() is C1Slots
+
+ # Just assert they return something, and not an exception.
+ assert non_slot_instance.my_super()
+ assert slot_instance.my_super()
+
+ assert non_slot_instance.my_subclass() is C2
+ assert slot_instance.my_subclass() is C2Slots
+
+ @pytest.mark.parametrize("slots", [True, False])
+ def test_cls_static(self, slots):
+ """
+ Slotted classes support proper closure cell rewriting for class- and
+ static methods.
+ """
+ # Python can reuse closure cells, so we create new classes just for
+ # this test.
+
+ @attr.s(slots=slots)
+ class C:
+ @classmethod
+ def clsmethod(cls):
+ return __class__
+
+ assert C.clsmethod() is C
+
+ @attr.s(slots=slots)
+ class D:
+ @staticmethod
+ def statmethod():
+ return __class__
+
+ assert D.statmethod() is D
+
+ @pytest.mark.skipif(PYPY, reason="set_closure_cell always works on PyPy")
+ @pytest.mark.skipif(
+ sys.version_info >= (3, 8),
+ reason="can't break CodeType.replace() via monkeypatch",
+ )
+ def test_code_hack_failure(self, monkeypatch):
+ """
+ Keeps working if function/code object introspection doesn't work
+ on this (nonstandard) interpeter.
+
+ A warning is emitted that points to the actual code.
+ """
+ # This is a pretty good approximation of the behavior of
+ # the actual types.CodeType on Brython.
+ monkeypatch.setattr(types, "CodeType", lambda: None)
+ func = make_set_closure_cell()
+
+ with pytest.warns(RuntimeWarning) as wr:
+ func()
+
+ w = wr.pop()
+ assert __file__ == w.filename
+ assert (
+ "Running interpreter doesn't sufficiently support code object "
+ "introspection. Some features like bare super() or accessing "
+ "__class__ will not work with slotted classes.",
+ ) == w.message.args
+
+ assert just_warn is func
+
+
+@pytest.mark.skipif(PYPY, reason="__slots__ only block weakref on CPython")
+def test_not_weakrefable():
+ """
+ Instance is not weak-referenceable when `weakref_slot=False` in CPython.
+ """
+
+ @attr.s(slots=True, weakref_slot=False)
+ class C(object):
+ pass
+
+ c = C()
+
+ with pytest.raises(TypeError):
+ weakref.ref(c)
+
+
+@pytest.mark.skipif(
+ not PYPY, reason="slots without weakref_slot should only work on PyPy"
+)
+def test_implicitly_weakrefable():
+ """
+ Instance is weak-referenceable even when `weakref_slot=False` in PyPy.
+ """
+
+ @attr.s(slots=True, weakref_slot=False)
+ class C(object):
+ pass
+
+ c = C()
+ w = weakref.ref(c)
+
+ assert c is w()
+
+
+def test_weakrefable():
+ """
+ Instance is weak-referenceable when `weakref_slot=True`.
+ """
+
+ @attr.s(slots=True, weakref_slot=True)
+ class C(object):
+ pass
+
+ c = C()
+ w = weakref.ref(c)
+
+ assert c is w()
+
+
+def test_weakref_does_not_add_a_field():
+ """
+ `weakref_slot=True` does not add a field to the class.
+ """
+
+ @attr.s(slots=True, weakref_slot=True)
+ class C(object):
+ field = attr.ib()
+
+ assert [f.name for f in attr.fields(C)] == ["field"]
+
+
+def tests_weakref_does_not_add_when_inheriting_with_weakref():
+ """
+ `weakref_slot=True` does not add a new __weakref__ slot when inheriting
+ one.
+ """
+
+ @attr.s(slots=True, weakref_slot=True)
+ class C(object):
+ pass
+
+ @attr.s(slots=True, weakref_slot=True)
+ class D(C):
+ pass
+
+ d = D()
+ w = weakref.ref(d)
+
+ assert d is w()
+
+
+def tests_weakref_does_not_add_with_weakref_attribute():
+ """
+ `weakref_slot=True` does not add a new __weakref__ slot when an attribute
+ of that name exists.
+ """
+
+ @attr.s(slots=True, weakref_slot=True)
+ class C(object):
+ __weakref__ = attr.ib(
+ init=False, hash=False, repr=False, eq=False, order=False
+ )
+
+ c = C()
+ w = weakref.ref(c)
+
+ assert c is w()
+
+
+def test_slots_empty_cell():
+ """
+ Tests that no `ValueError: Cell is empty` exception is raised when
+ closure cells are present with no contents in a `slots=True` class.
+ (issue https://github.com/python-attrs/attrs/issues/589)
+
+ On Python 3, if a method mentions `__class__` or uses the no-arg `super()`,
+ the compiler will bake a reference to the class in the method itself as
+ `method.__closure__`. Since `attrs` replaces the class with a clone,
+ `_ClassBuilder._create_slots_class(self)` will rewrite these references so
+ it keeps working. This method was not properly covering the edge case where
+ the closure cell was empty, we fixed it and this is the non-regression
+ test.
+ """
+
+ @attr.s(slots=True)
+ class C(object):
+ field = attr.ib()
+
+ def f(self, a):
+ super(C, self).__init__()
+
+ C(field=1)
+
+
+@attr.s(getstate_setstate=True)
+class C2(object):
+ x = attr.ib()
+
+
+@attr.s(slots=True, getstate_setstate=True)
+class C2Slots(object):
+ x = attr.ib()
+
+
+class TestPickle(object):
+ @pytest.mark.parametrize("protocol", range(pickle.HIGHEST_PROTOCOL))
+ def test_pickleable_by_default(self, protocol):
+ """
+ If nothing else is passed, slotted classes can be pickled and
+ unpickled with all supported protocols.
+ """
+ i1 = C1Slots(1, 2)
+ i2 = pickle.loads(pickle.dumps(i1, protocol))
+
+ assert i1 == i2
+ assert i1 is not i2
+
+ def test_no_getstate_setstate_for_dict_classes(self):
+ """
+ As long as getstate_setstate is None, nothing is done to dict
+ classes.
+ """
+ i = C1(1, 2)
+
+ assert None is getattr(i, "__getstate__", None)
+ assert None is getattr(i, "__setstate__", None)
+
+ def test_no_getstate_setstate_if_option_false(self):
+ """
+ Don't add getstate/setstate if getstate_setstate is False.
+ """
+
+ @attr.s(slots=True, getstate_setstate=False)
+ class C(object):
+ x = attr.ib()
+
+ i = C(42)
+
+ assert None is getattr(i, "__getstate__", None)
+ assert None is getattr(i, "__setstate__", None)
+
+ @pytest.mark.parametrize("cls", [C2(1), C2Slots(1)])
+ def test_getstate_set_state_force_true(self, cls):
+ """
+ If getstate_setstate is True, add them unconditionally.
+ """
+ assert None is not getattr(cls, "__getstate__", None)
+ assert None is not getattr(cls, "__setstate__", None)
+
+
+def test_slots_super_property_get():
+ """
+ On Python 2/3: the `super(self.__class__, self)` works.
+ """
+
+ @attr.s(slots=True)
+ class A(object):
+ x = attr.ib()
+
+ @property
+ def f(self):
+ return self.x
+
+ @attr.s(slots=True)
+ class B(A):
+ @property
+ def f(self):
+ return super(B, self).f ** 2
+
+ assert B(11).f == 121
+ assert B(17).f == 289
+
+
+@pytest.mark.skipif(PY2, reason="shortcut super() is PY3-only.")
+def test_slots_super_property_get_shurtcut():
+ """
+ On Python 3, the `super()` shortcut is allowed.
+ """
+
+ @attr.s(slots=True)
+ class A(object):
+ x = attr.ib()
+
+ @property
+ def f(self):
+ return self.x
+
+ @attr.s(slots=True)
+ class B(A):
+ @property
+ def f(self):
+ return super().f ** 2
+
+ assert B(11).f == 121
+ assert B(17).f == 289
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_validators.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_validators.py
new file mode 100644
index 0000000000..d7c6de8bad
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_validators.py
@@ -0,0 +1,952 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Tests for `attr.validators`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import re
+
+import pytest
+
+import attr
+
+from attr import _config, fields, has
+from attr import validators as validator_module
+from attr._compat import PY2, TYPE
+from attr.validators import (
+ and_,
+ deep_iterable,
+ deep_mapping,
+ ge,
+ gt,
+ in_,
+ instance_of,
+ is_callable,
+ le,
+ lt,
+ matches_re,
+ max_len,
+ optional,
+ provides,
+)
+
+from .utils import simple_attr
+
+
+@pytest.fixture(scope="module")
+def zope_interface():
+ """Provides ``zope.interface`` if available, skipping the test if not."""
+ try:
+ import zope.interface
+ except ImportError:
+ raise pytest.skip(
+ "zope-related tests skipped when zope.interface is not installed"
+ )
+
+ return zope.interface
+
+
+class TestDisableValidators(object):
+ @pytest.fixture(autouse=True)
+ def reset_default(self):
+ """
+ Make sure validators are always enabled after a test.
+ """
+ yield
+ _config._run_validators = True
+
+ def test_default(self):
+ """
+ Run validators by default.
+ """
+ assert _config._run_validators is True
+
+ @pytest.mark.parametrize("value, expected", [(True, False), (False, True)])
+ def test_set_validators_diabled(self, value, expected):
+ """
+ Sets `_run_validators`.
+ """
+ validator_module.set_disabled(value)
+
+ assert _config._run_validators is expected
+
+ @pytest.mark.parametrize("value, expected", [(True, False), (False, True)])
+ def test_disabled(self, value, expected):
+ """
+ Returns `_run_validators`.
+ """
+ _config._run_validators = value
+
+ assert validator_module.get_disabled() is expected
+
+ def test_disabled_ctx(self):
+ """
+ The `disabled` context manager disables running validators,
+ but only within its context.
+ """
+ assert _config._run_validators is True
+
+ with validator_module.disabled():
+ assert _config._run_validators is False
+
+ assert _config._run_validators is True
+
+ def test_disabled_ctx_with_errors(self):
+ """
+ Running validators is re-enabled even if an error is raised.
+ """
+ assert _config._run_validators is True
+
+ with pytest.raises(ValueError):
+ with validator_module.disabled():
+ assert _config._run_validators is False
+
+ raise ValueError("haha!")
+
+ assert _config._run_validators is True
+
+
+class TestInstanceOf(object):
+ """
+ Tests for `instance_of`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert instance_of.__name__ in validator_module.__all__
+
+ def test_success(self):
+ """
+ Nothing happens if types match.
+ """
+ v = instance_of(int)
+ v(None, simple_attr("test"), 42)
+
+ def test_subclass(self):
+ """
+ Subclasses are accepted too.
+ """
+ v = instance_of(int)
+ # yep, bools are a subclass of int :(
+ v(None, simple_attr("test"), True)
+
+ def test_fail(self):
+ """
+ Raises `TypeError` on wrong types.
+ """
+ v = instance_of(int)
+ a = simple_attr("test")
+ with pytest.raises(TypeError) as e:
+ v(None, a, "42")
+ assert (
+ "'test' must be <{type} 'int'> (got '42' that is a <{type} "
+ "'str'>).".format(type=TYPE),
+ a,
+ int,
+ "42",
+ ) == e.value.args
+
+ def test_repr(self):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ v = instance_of(int)
+ assert (
+ "<instance_of validator for type <{type} 'int'>>".format(type=TYPE)
+ ) == repr(v)
+
+
+class TestMatchesRe(object):
+ """
+ Tests for `matches_re`.
+ """
+
+ def test_in_all(self):
+ """
+ validator is in ``__all__``.
+ """
+ assert matches_re.__name__ in validator_module.__all__
+
+ def test_match(self):
+ """
+ Silent on matches, raises ValueError on mismatches.
+ """
+
+ @attr.s
+ class ReTester(object):
+ str_match = attr.ib(validator=matches_re("a|ab"))
+
+ ReTester("ab") # shouldn't raise exceptions
+ with pytest.raises(TypeError):
+ ReTester(1)
+ with pytest.raises(ValueError):
+ ReTester("1")
+ with pytest.raises(ValueError):
+ ReTester("a1")
+
+ def test_flags(self):
+ """
+ Flags are propagated to the match function.
+ """
+
+ @attr.s
+ class MatchTester(object):
+ val = attr.ib(validator=matches_re("a", re.IGNORECASE, re.match))
+
+ MatchTester("A1") # test flags and using re.match
+
+ def test_precompiled_pattern(self):
+ """
+ Pre-compiled patterns are accepted.
+ """
+ pattern = re.compile("a")
+
+ @attr.s
+ class RePatternTester(object):
+ val = attr.ib(validator=matches_re(pattern))
+
+ RePatternTester("a")
+
+ def test_precompiled_pattern_no_flags(self):
+ """
+ A pre-compiled pattern cannot be combined with a 'flags' argument.
+ """
+ pattern = re.compile("")
+
+ with pytest.raises(
+ TypeError, match="can only be used with a string pattern"
+ ):
+ matches_re(pattern, flags=re.IGNORECASE)
+
+ def test_different_func(self):
+ """
+ Changing the match functions works.
+ """
+
+ @attr.s
+ class SearchTester(object):
+ val = attr.ib(validator=matches_re("a", 0, re.search))
+
+ SearchTester("bab") # re.search will match
+
+ def test_catches_invalid_func(self):
+ """
+ Invalid match functions are caught.
+ """
+ with pytest.raises(ValueError) as ei:
+ matches_re("a", 0, lambda: None)
+
+ if not PY2:
+ assert (
+ "'func' must be one of None, fullmatch, match, search."
+ == ei.value.args[0]
+ )
+ else:
+ assert (
+ "'func' must be one of None, match, search."
+ == ei.value.args[0]
+ )
+
+ @pytest.mark.parametrize(
+ "func", [None, getattr(re, "fullmatch", None), re.match, re.search]
+ )
+ def test_accepts_all_valid_func(self, func):
+ """
+ Every valid match function is accepted.
+ """
+ matches_re("a", func=func)
+
+ def test_repr(self):
+ """
+ __repr__ is meaningful.
+ """
+ assert repr(matches_re("a")).startswith(
+ "<matches_re validator for pattern"
+ )
+
+
+def always_pass(_, __, ___):
+ """
+ Toy validator that always passes.
+ """
+
+
+def always_fail(_, __, ___):
+ """
+ Toy validator that always fails.
+ """
+ 0 / 0
+
+
+class TestAnd(object):
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert and_.__name__ in validator_module.__all__
+
+ def test_success(self):
+ """
+ Succeeds if all wrapped validators succeed.
+ """
+ v = and_(instance_of(int), always_pass)
+
+ v(None, simple_attr("test"), 42)
+
+ def test_fail(self):
+ """
+ Fails if any wrapped validator fails.
+ """
+ v = and_(instance_of(int), always_fail)
+
+ with pytest.raises(ZeroDivisionError):
+ v(None, simple_attr("test"), 42)
+
+ def test_sugar(self):
+ """
+ `and_(v1, v2, v3)` and `[v1, v2, v3]` are equivalent.
+ """
+
+ @attr.s
+ class C(object):
+ a1 = attr.ib("a1", validator=and_(instance_of(int)))
+ a2 = attr.ib("a2", validator=[instance_of(int)])
+
+ assert C.__attrs_attrs__[0].validator == C.__attrs_attrs__[1].validator
+
+
+@pytest.fixture(scope="module")
+def ifoo(zope_interface):
+ """Provides a test ``zope.interface.Interface`` in ``zope`` tests."""
+
+ class IFoo(zope_interface.Interface):
+ """
+ An interface.
+ """
+
+ def f():
+ """
+ A function called f.
+ """
+
+ return IFoo
+
+
+class TestProvides(object):
+ """
+ Tests for `provides`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert provides.__name__ in validator_module.__all__
+
+ def test_success(self, zope_interface, ifoo):
+ """
+ Nothing happens if value provides requested interface.
+ """
+
+ @zope_interface.implementer(ifoo)
+ class C(object):
+ def f(self):
+ pass
+
+ v = provides(ifoo)
+ v(None, simple_attr("x"), C())
+
+ def test_fail(self, ifoo):
+ """
+ Raises `TypeError` if interfaces isn't provided by value.
+ """
+ value = object()
+ a = simple_attr("x")
+
+ v = provides(ifoo)
+ with pytest.raises(TypeError) as e:
+ v(None, a, value)
+ assert (
+ "'x' must provide {interface!r} which {value!r} doesn't.".format(
+ interface=ifoo, value=value
+ ),
+ a,
+ ifoo,
+ value,
+ ) == e.value.args
+
+ def test_repr(self, ifoo):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ v = provides(ifoo)
+ assert (
+ "<provides validator for interface {interface!r}>".format(
+ interface=ifoo
+ )
+ ) == repr(v)
+
+
+@pytest.mark.parametrize(
+ "validator", [instance_of(int), [always_pass, instance_of(int)]]
+)
+class TestOptional(object):
+ """
+ Tests for `optional`.
+ """
+
+ def test_in_all(self, validator):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert optional.__name__ in validator_module.__all__
+
+ def test_success(self, validator):
+ """
+ Nothing happens if validator succeeds.
+ """
+ v = optional(validator)
+ v(None, simple_attr("test"), 42)
+
+ def test_success_with_none(self, validator):
+ """
+ Nothing happens if None.
+ """
+ v = optional(validator)
+ v(None, simple_attr("test"), None)
+
+ def test_fail(self, validator):
+ """
+ Raises `TypeError` on wrong types.
+ """
+ v = optional(validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError) as e:
+ v(None, a, "42")
+ assert (
+ "'test' must be <{type} 'int'> (got '42' that is a <{type} "
+ "'str'>).".format(type=TYPE),
+ a,
+ int,
+ "42",
+ ) == e.value.args
+
+ def test_repr(self, validator):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ v = optional(validator)
+
+ if isinstance(validator, list):
+ repr_s = (
+ "<optional validator for _AndValidator(_validators=[{func}, "
+ "<instance_of validator for type <{type} 'int'>>]) or None>"
+ ).format(func=repr(always_pass), type=TYPE)
+ else:
+ repr_s = (
+ "<optional validator for <instance_of validator for type "
+ "<{type} 'int'>> or None>"
+ ).format(type=TYPE)
+
+ assert repr_s == repr(v)
+
+
+class TestIn_(object):
+ """
+ Tests for `in_`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert in_.__name__ in validator_module.__all__
+
+ def test_success_with_value(self):
+ """
+ If the value is in our options, nothing happens.
+ """
+ v = in_([1, 2, 3])
+ a = simple_attr("test")
+ v(1, a, 3)
+
+ def test_fail(self):
+ """
+ Raise ValueError if the value is outside our options.
+ """
+ v = in_([1, 2, 3])
+ a = simple_attr("test")
+ with pytest.raises(ValueError) as e:
+ v(None, a, None)
+ assert ("'test' must be in [1, 2, 3] (got None)",) == e.value.args
+
+ def test_fail_with_string(self):
+ """
+ Raise ValueError if the value is outside our options when the
+ options are specified as a string and the value is not a string.
+ """
+ v = in_("abc")
+ a = simple_attr("test")
+ with pytest.raises(ValueError) as e:
+ v(None, a, None)
+ assert ("'test' must be in 'abc' (got None)",) == e.value.args
+
+ def test_repr(self):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ v = in_([3, 4, 5])
+ assert (("<in_ validator with options [3, 4, 5]>")) == repr(v)
+
+
+class TestDeepIterable(object):
+ """
+ Tests for `deep_iterable`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert deep_iterable.__name__ in validator_module.__all__
+
+ def test_success_member_only(self):
+ """
+ If the member validator succeeds and the iterable validator is not set,
+ nothing happens.
+ """
+ member_validator = instance_of(int)
+ v = deep_iterable(member_validator)
+ a = simple_attr("test")
+ v(None, a, [42])
+
+ def test_success_member_and_iterable(self):
+ """
+ If both the member and iterable validators succeed, nothing happens.
+ """
+ member_validator = instance_of(int)
+ iterable_validator = instance_of(list)
+ v = deep_iterable(member_validator, iterable_validator)
+ a = simple_attr("test")
+ v(None, a, [42])
+
+ @pytest.mark.parametrize(
+ "member_validator, iterable_validator",
+ (
+ (instance_of(int), 42),
+ (42, instance_of(list)),
+ (42, 42),
+ (42, None),
+ ),
+ )
+ def test_noncallable_validators(
+ self, member_validator, iterable_validator
+ ):
+ """
+ Raise `TypeError` if any validators are not callable.
+ """
+ with pytest.raises(TypeError) as e:
+ deep_iterable(member_validator, iterable_validator)
+ value = 42
+ message = "must be callable (got {value} that is a {type_}).".format(
+ value=value, type_=value.__class__
+ )
+
+ assert message in e.value.args[0]
+ assert value == e.value.args[1]
+ assert message in e.value.msg
+ assert value == e.value.value
+
+ def test_fail_invalid_member(self):
+ """
+ Raise member validator error if an invalid member is found.
+ """
+ member_validator = instance_of(int)
+ v = deep_iterable(member_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, [42, "42"])
+
+ def test_fail_invalid_iterable(self):
+ """
+ Raise iterable validator error if an invalid iterable is found.
+ """
+ member_validator = instance_of(int)
+ iterable_validator = instance_of(tuple)
+ v = deep_iterable(member_validator, iterable_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, [42])
+
+ def test_fail_invalid_member_and_iterable(self):
+ """
+ Raise iterable validator error if both the iterable
+ and a member are invalid.
+ """
+ member_validator = instance_of(int)
+ iterable_validator = instance_of(tuple)
+ v = deep_iterable(member_validator, iterable_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, [42, "42"])
+
+ def test_repr_member_only(self):
+ """
+ Returned validator has a useful `__repr__`
+ when only member validator is set.
+ """
+ member_validator = instance_of(int)
+ member_repr = "<instance_of validator for type <{type} 'int'>>".format(
+ type=TYPE
+ )
+ v = deep_iterable(member_validator)
+ expected_repr = (
+ "<deep_iterable validator for iterables of {member_repr}>"
+ ).format(member_repr=member_repr)
+ assert ((expected_repr)) == repr(v)
+
+ def test_repr_member_and_iterable(self):
+ """
+ Returned validator has a useful `__repr__` when both member
+ and iterable validators are set.
+ """
+ member_validator = instance_of(int)
+ member_repr = "<instance_of validator for type <{type} 'int'>>".format(
+ type=TYPE
+ )
+ iterable_validator = instance_of(list)
+ iterable_repr = (
+ "<instance_of validator for type <{type} 'list'>>"
+ ).format(type=TYPE)
+ v = deep_iterable(member_validator, iterable_validator)
+ expected_repr = (
+ "<deep_iterable validator for"
+ " {iterable_repr} iterables of {member_repr}>"
+ ).format(iterable_repr=iterable_repr, member_repr=member_repr)
+ assert expected_repr == repr(v)
+
+
+class TestDeepMapping(object):
+ """
+ Tests for `deep_mapping`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert deep_mapping.__name__ in validator_module.__all__
+
+ def test_success(self):
+ """
+ If both the key and value validators succeed, nothing happens.
+ """
+ key_validator = instance_of(str)
+ value_validator = instance_of(int)
+ v = deep_mapping(key_validator, value_validator)
+ a = simple_attr("test")
+ v(None, a, {"a": 6, "b": 7})
+
+ @pytest.mark.parametrize(
+ "key_validator, value_validator, mapping_validator",
+ (
+ (42, instance_of(int), None),
+ (instance_of(str), 42, None),
+ (instance_of(str), instance_of(int), 42),
+ (42, 42, None),
+ (42, 42, 42),
+ ),
+ )
+ def test_noncallable_validators(
+ self, key_validator, value_validator, mapping_validator
+ ):
+ """
+ Raise `TypeError` if any validators are not callable.
+ """
+ with pytest.raises(TypeError) as e:
+ deep_mapping(key_validator, value_validator, mapping_validator)
+
+ value = 42
+ message = "must be callable (got {value} that is a {type_}).".format(
+ value=value, type_=value.__class__
+ )
+
+ assert message in e.value.args[0]
+ assert value == e.value.args[1]
+ assert message in e.value.msg
+ assert value == e.value.value
+
+ def test_fail_invalid_mapping(self):
+ """
+ Raise `TypeError` if mapping validator fails.
+ """
+ key_validator = instance_of(str)
+ value_validator = instance_of(int)
+ mapping_validator = instance_of(dict)
+ v = deep_mapping(key_validator, value_validator, mapping_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, None)
+
+ def test_fail_invalid_key(self):
+ """
+ Raise key validator error if an invalid key is found.
+ """
+ key_validator = instance_of(str)
+ value_validator = instance_of(int)
+ v = deep_mapping(key_validator, value_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, {"a": 6, 42: 7})
+
+ def test_fail_invalid_member(self):
+ """
+ Raise key validator error if an invalid member value is found.
+ """
+ key_validator = instance_of(str)
+ value_validator = instance_of(int)
+ v = deep_mapping(key_validator, value_validator)
+ a = simple_attr("test")
+ with pytest.raises(TypeError):
+ v(None, a, {"a": "6", "b": 7})
+
+ def test_repr(self):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ key_validator = instance_of(str)
+ key_repr = "<instance_of validator for type <{type} 'str'>>".format(
+ type=TYPE
+ )
+ value_validator = instance_of(int)
+ value_repr = "<instance_of validator for type <{type} 'int'>>".format(
+ type=TYPE
+ )
+ v = deep_mapping(key_validator, value_validator)
+ expected_repr = (
+ "<deep_mapping validator for objects mapping "
+ "{key_repr} to {value_repr}>"
+ ).format(key_repr=key_repr, value_repr=value_repr)
+ assert expected_repr == repr(v)
+
+
+class TestIsCallable(object):
+ """
+ Tests for `is_callable`.
+ """
+
+ def test_in_all(self):
+ """
+ Verify that this validator is in ``__all__``.
+ """
+ assert is_callable.__name__ in validator_module.__all__
+
+ def test_success(self):
+ """
+ If the value is callable, nothing happens.
+ """
+ v = is_callable()
+ a = simple_attr("test")
+ v(None, a, isinstance)
+
+ def test_fail(self):
+ """
+ Raise TypeError if the value is not callable.
+ """
+ v = is_callable()
+ a = simple_attr("test")
+ with pytest.raises(TypeError) as e:
+ v(None, a, None)
+
+ value = None
+ message = "'test' must be callable (got {value} that is a {type_})."
+ expected_message = message.format(value=value, type_=value.__class__)
+
+ assert expected_message == e.value.args[0]
+ assert value == e.value.args[1]
+ assert expected_message == e.value.msg
+ assert value == e.value.value
+
+ def test_repr(self):
+ """
+ Returned validator has a useful `__repr__`.
+ """
+ v = is_callable()
+ assert "<is_callable validator>" == repr(v)
+
+ def test_exception_repr(self):
+ """
+ Verify that NotCallableError exception has a useful `__str__`.
+ """
+ from attr.exceptions import NotCallableError
+
+ instance = NotCallableError(msg="Some Message", value=42)
+ assert "Some Message" == str(instance)
+
+
+def test_hashability():
+ """
+ Validator classes are hashable.
+ """
+ for obj_name in dir(validator_module):
+ obj = getattr(validator_module, obj_name)
+ if not has(obj):
+ continue
+ hash_func = getattr(obj, "__hash__", None)
+ assert hash_func is not None
+ assert hash_func is not object.__hash__
+
+
+class TestLtLeGeGt:
+ """
+ Tests for `max_len`.
+ """
+
+ BOUND = 4
+
+ def test_in_all(self):
+ """
+ validator is in ``__all__``.
+ """
+ assert all(
+ f.__name__ in validator_module.__all__ for f in [lt, le, ge, gt]
+ )
+
+ @pytest.mark.parametrize("v", [lt, le, ge, gt])
+ def test_retrieve_bound(self, v):
+ """
+ The configured bound for the comparison can be extracted from the
+ Attribute.
+ """
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=v(self.BOUND))
+
+ assert fields(Tester).value.validator.bound == self.BOUND
+
+ @pytest.mark.parametrize(
+ "v, value",
+ [
+ (lt, 3),
+ (le, 3),
+ (le, 4),
+ (ge, 4),
+ (ge, 5),
+ (gt, 5),
+ ],
+ )
+ def test_check_valid(self, v, value):
+ """Silent if value {op} bound."""
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=v(self.BOUND))
+
+ Tester(value) # shouldn't raise exceptions
+
+ @pytest.mark.parametrize(
+ "v, value",
+ [
+ (lt, 4),
+ (le, 5),
+ (ge, 3),
+ (gt, 4),
+ ],
+ )
+ def test_check_invalid(self, v, value):
+ """Raise ValueError if value {op} bound."""
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=v(self.BOUND))
+
+ with pytest.raises(ValueError):
+ Tester(value)
+
+ @pytest.mark.parametrize("v", [lt, le, ge, gt])
+ def test_repr(self, v):
+ """
+ __repr__ is meaningful.
+ """
+ nv = v(23)
+ assert repr(nv) == "<Validator for x {op} {bound}>".format(
+ op=nv.compare_op, bound=23
+ )
+
+
+class TestMaxLen:
+ """
+ Tests for `max_len`.
+ """
+
+ MAX_LENGTH = 4
+
+ def test_in_all(self):
+ """
+ validator is in ``__all__``.
+ """
+ assert max_len.__name__ in validator_module.__all__
+
+ def test_retrieve_max_len(self):
+ """
+ The configured max. length can be extracted from the Attribute
+ """
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=max_len(self.MAX_LENGTH))
+
+ assert fields(Tester).value.validator.max_length == self.MAX_LENGTH
+
+ @pytest.mark.parametrize(
+ "value",
+ [
+ "",
+ "foo",
+ "spam",
+ [],
+ list(range(MAX_LENGTH)),
+ {"spam": 3, "eggs": 4},
+ ],
+ )
+ def test_check_valid(self, value):
+ """
+ Silent if len(value) <= max_len.
+ Values can be strings and other iterables.
+ """
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=max_len(self.MAX_LENGTH))
+
+ Tester(value) # shouldn't raise exceptions
+
+ @pytest.mark.parametrize(
+ "value",
+ [
+ "bacon",
+ list(range(6)),
+ ],
+ )
+ def test_check_invalid(self, value):
+ """
+ Raise ValueError if len(value) > max_len.
+ """
+
+ @attr.s
+ class Tester(object):
+ value = attr.ib(validator=max_len(self.MAX_LENGTH))
+
+ with pytest.raises(ValueError):
+ Tester(value)
+
+ def test_repr(self):
+ """
+ __repr__ is meaningful.
+ """
+ assert repr(max_len(23)) == "<max_len validator for 23>"
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/test_version_info.py b/testing/web-platform/tests/tools/third_party/attrs/tests/test_version_info.py
new file mode 100644
index 0000000000..41f75f47a6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/test_version_info.py
@@ -0,0 +1,62 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import absolute_import, division, print_function
+
+import pytest
+
+from attr import VersionInfo
+from attr._compat import PY2
+
+
+@pytest.fixture(name="vi")
+def fixture_vi():
+ return VersionInfo(19, 2, 0, "final")
+
+
+class TestVersionInfo:
+ def test_from_string_no_releaselevel(self, vi):
+ """
+ If there is no suffix, the releaselevel becomes "final" by default.
+ """
+ assert vi == VersionInfo._from_version_string("19.2.0")
+
+ def test_suffix_is_preserved(self):
+ """
+ If there is a suffix, it's preserved.
+ """
+ assert (
+ "dev0"
+ == VersionInfo._from_version_string("19.2.0.dev0").releaselevel
+ )
+
+ @pytest.mark.skipif(
+ PY2, reason="Python 2 is too YOLO to care about comparability."
+ )
+ @pytest.mark.parametrize("other", [(), (19, 2, 0, "final", "garbage")])
+ def test_wrong_len(self, vi, other):
+ """
+ Comparing with a tuple that has the wrong length raises an error.
+ """
+ assert vi != other
+
+ with pytest.raises(TypeError):
+ vi < other
+
+ @pytest.mark.parametrize("other", [[19, 2, 0, "final"]])
+ def test_wrong_type(self, vi, other):
+ """
+ Only compare to other VersionInfos or tuples.
+ """
+ assert vi != other
+
+ def test_order(self, vi):
+ """
+ Ordering works as expected.
+ """
+ assert vi < (20,)
+ assert vi < (19, 2, 1)
+ assert vi > (0,)
+ assert vi <= (19, 2)
+ assert vi >= (19, 2)
+ assert vi > (19, 2, 0, "dev0")
+ assert vi < (19, 2, 0, "post1")
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/typing_example.py b/testing/web-platform/tests/tools/third_party/attrs/tests/typing_example.py
new file mode 100644
index 0000000000..a85c768c10
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/typing_example.py
@@ -0,0 +1,420 @@
+# SPDX-License-Identifier: MIT
+
+import re
+
+from typing import Any, Dict, List, Tuple, Union
+
+import attr
+import attrs
+
+
+# Typing via "type" Argument ---
+
+
+@attr.s
+class C:
+ a = attr.ib(type=int)
+
+
+c = C(1)
+C(a=1)
+
+
+@attr.s
+class D:
+ x = attr.ib(type=List[int])
+
+
+@attr.s
+class E:
+ y = attr.ib(type="List[int]")
+
+
+@attr.s
+class F:
+ z = attr.ib(type=Any)
+
+
+# Typing via Annotations ---
+
+
+@attr.s
+class CC:
+ a: int = attr.ib()
+
+
+cc = CC(1)
+CC(a=1)
+
+
+@attr.s
+class DD:
+ x: List[int] = attr.ib()
+
+
+@attr.s
+class EE:
+ y: "List[int]" = attr.ib()
+
+
+@attr.s
+class FF:
+ z: Any = attr.ib()
+
+
+@attrs.define
+class FFF:
+ z: int
+
+
+FFF(1)
+
+
+# Inheritance --
+
+
+@attr.s
+class GG(DD):
+ y: str = attr.ib()
+
+
+GG(x=[1], y="foo")
+
+
+@attr.s
+class HH(DD, EE):
+ z: float = attr.ib()
+
+
+HH(x=[1], y=[], z=1.1)
+
+
+# same class
+c == cc
+
+
+# Exceptions
+@attr.s(auto_exc=True)
+class Error(Exception):
+ x: int = attr.ib()
+
+
+try:
+ raise Error(1)
+except Error as e:
+ e.x
+ e.args
+ str(e)
+
+
+@attrs.define
+class Error2(Exception):
+ x: int
+
+
+try:
+ raise Error2(1)
+except Error as e:
+ e.x
+ e.args
+ str(e)
+
+
+# Converters
+# XXX: Currently converters can only be functions so none of this works
+# although the stubs should be correct.
+
+# @attr.s
+# class ConvCOptional:
+# x: Optional[int] = attr.ib(converter=attr.converters.optional(int))
+
+
+# ConvCOptional(1)
+# ConvCOptional(None)
+
+
+# @attr.s
+# class ConvCDefaultIfNone:
+# x: int = attr.ib(converter=attr.converters.default_if_none(42))
+
+
+# ConvCDefaultIfNone(1)
+# ConvCDefaultIfNone(None)
+
+
+# @attr.s
+# class ConvCToBool:
+# x: int = attr.ib(converter=attr.converters.to_bool)
+
+
+# ConvCToBool(1)
+# ConvCToBool(True)
+# ConvCToBool("on")
+# ConvCToBool("yes")
+# ConvCToBool(0)
+# ConvCToBool(False)
+# ConvCToBool("n")
+
+
+# Validators
+@attr.s
+class Validated:
+ a = attr.ib(
+ type=List[C],
+ validator=attr.validators.deep_iterable(
+ attr.validators.instance_of(C), attr.validators.instance_of(list)
+ ),
+ )
+ a = attr.ib(
+ type=Tuple[C],
+ validator=attr.validators.deep_iterable(
+ attr.validators.instance_of(C), attr.validators.instance_of(tuple)
+ ),
+ )
+ b = attr.ib(
+ type=List[C],
+ validator=attr.validators.deep_iterable(
+ attr.validators.instance_of(C)
+ ),
+ )
+ c = attr.ib(
+ type=Dict[C, D],
+ validator=attr.validators.deep_mapping(
+ attr.validators.instance_of(C),
+ attr.validators.instance_of(D),
+ attr.validators.instance_of(dict),
+ ),
+ )
+ d = attr.ib(
+ type=Dict[C, D],
+ validator=attr.validators.deep_mapping(
+ attr.validators.instance_of(C), attr.validators.instance_of(D)
+ ),
+ )
+ e: str = attr.ib(validator=attr.validators.matches_re(re.compile(r"foo")))
+ f: str = attr.ib(
+ validator=attr.validators.matches_re(r"foo", flags=42, func=re.search)
+ )
+
+ # Test different forms of instance_of
+ g: int = attr.ib(validator=attr.validators.instance_of(int))
+ h: int = attr.ib(validator=attr.validators.instance_of((int,)))
+ j: Union[int, str] = attr.ib(
+ validator=attr.validators.instance_of((int, str))
+ )
+ k: Union[int, str, C] = attr.ib(
+ validator=attrs.validators.instance_of((int, C, str))
+ )
+
+
+@attr.define
+class Validated2:
+ num: int = attr.field(validator=attr.validators.ge(0))
+
+
+@attrs.define
+class Validated3:
+ num: int = attr.field(validator=attr.validators.ge(0))
+
+
+with attr.validators.disabled():
+ Validated2(num=-1)
+
+with attrs.validators.disabled():
+ Validated3(num=-1)
+
+try:
+ attr.validators.set_disabled(True)
+ Validated2(num=-1)
+finally:
+ attr.validators.set_disabled(False)
+
+
+# Custom repr()
+@attr.s
+class WithCustomRepr:
+ a: int = attr.ib(repr=True)
+ b: str = attr.ib(repr=False)
+ c: str = attr.ib(repr=lambda value: "c is for cookie")
+ d: bool = attr.ib(repr=str)
+
+
+@attrs.define
+class WithCustomRepr2:
+ a: int = attrs.field(repr=True)
+ b: str = attrs.field(repr=False)
+ c: str = attrs.field(repr=lambda value: "c is for cookie")
+ d: bool = attrs.field(repr=str)
+
+
+# Check some of our own types
+@attr.s(eq=True, order=False)
+class OrderFlags:
+ a: int = attr.ib(eq=False, order=False)
+ b: int = attr.ib(eq=True, order=True)
+
+
+# on_setattr hooks
+@attr.s(on_setattr=attr.setters.validate)
+class ValidatedSetter:
+ a: int
+ b: str = attr.ib(on_setattr=attr.setters.NO_OP)
+ c: bool = attr.ib(on_setattr=attr.setters.frozen)
+ d: int = attr.ib(on_setattr=[attr.setters.convert, attr.setters.validate])
+ e: bool = attr.ib(
+ on_setattr=attr.setters.pipe(
+ attr.setters.convert, attr.setters.validate
+ )
+ )
+
+
+@attrs.define(on_setattr=attr.setters.validate)
+class ValidatedSetter2:
+ a: int
+ b: str = attrs.field(on_setattr=attrs.setters.NO_OP)
+ c: bool = attrs.field(on_setattr=attrs.setters.frozen)
+ d: int = attrs.field(
+ on_setattr=[attrs.setters.convert, attrs.setters.validate]
+ )
+ e: bool = attrs.field(
+ on_setattr=attrs.setters.pipe(
+ attrs.setters.convert, attrs.setters.validate
+ )
+ )
+
+
+# field_transformer
+def ft_hook(cls: type, attribs: List[attr.Attribute]) -> List[attr.Attribute]:
+ return attribs
+
+
+# field_transformer
+def ft_hook2(
+ cls: type, attribs: List[attrs.Attribute]
+) -> List[attrs.Attribute]:
+ return attribs
+
+
+@attr.s(field_transformer=ft_hook)
+class TransformedAttrs:
+ x: int
+
+
+@attrs.define(field_transformer=ft_hook2)
+class TransformedAttrs2:
+ x: int
+
+
+# Auto-detect
+@attr.s(auto_detect=True)
+class AutoDetect:
+ x: int
+
+ def __init__(self, x: int):
+ self.x = x
+
+
+# Provisional APIs
+@attr.define(order=True)
+class NGClass:
+ x: int = attr.field(default=42)
+
+
+ngc = NGClass(1)
+
+
+@attr.mutable(slots=False)
+class NGClass2:
+ x: int
+
+
+ngc2 = NGClass2(1)
+
+
+@attr.frozen(str=True)
+class NGFrozen:
+ x: int
+
+
+ngf = NGFrozen(1)
+
+attr.fields(NGFrozen).x.evolve(eq=False)
+a = attr.fields(NGFrozen).x
+a.evolve(repr=False)
+
+
+attrs.fields(NGFrozen).x.evolve(eq=False)
+a = attrs.fields(NGFrozen).x
+a.evolve(repr=False)
+
+
+@attr.s(collect_by_mro=True)
+class MRO:
+ pass
+
+
+@attr.s
+class FactoryTest:
+ a: List[int] = attr.ib(default=attr.Factory(list))
+ b: List[Any] = attr.ib(default=attr.Factory(list, False))
+ c: List[int] = attr.ib(default=attr.Factory((lambda s: s.a), True))
+
+
+@attrs.define
+class FactoryTest2:
+ a: List[int] = attrs.field(default=attrs.Factory(list))
+ b: List[Any] = attrs.field(default=attrs.Factory(list, False))
+ c: List[int] = attrs.field(default=attrs.Factory((lambda s: s.a), True))
+
+
+attrs.asdict(FactoryTest2())
+attr.asdict(FactoryTest(), tuple_keys=True)
+
+
+# Check match_args stub
+@attr.s(match_args=False)
+class MatchArgs:
+ a: int = attr.ib()
+ b: int = attr.ib()
+
+
+attr.asdict(FactoryTest())
+attr.asdict(FactoryTest(), retain_collection_types=False)
+
+
+# Check match_args stub
+@attrs.define(match_args=False)
+class MatchArgs2:
+ a: int
+ b: int
+
+
+# NG versions of asdict/astuple
+attrs.asdict(MatchArgs2(1, 2))
+attrs.astuple(MatchArgs2(1, 2))
+
+
+def importing_from_attr() -> None:
+ """
+ Use a function to keep the ns clean.
+ """
+ from attr.converters import optional
+ from attr.exceptions import FrozenError
+ from attr.filters import include
+ from attr.setters import frozen
+ from attr.validators import and_
+
+ assert optional and FrozenError and include and frozen and and_
+
+
+def importing_from_attrs() -> None:
+ """
+ Use a function to keep the ns clean.
+ """
+ from attrs.converters import optional
+ from attrs.exceptions import FrozenError
+ from attrs.filters import include
+ from attrs.setters import frozen
+ from attrs.validators import and_
+
+ assert optional and FrozenError and include and frozen and and_
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tests/utils.py b/testing/web-platform/tests/tools/third_party/attrs/tests/utils.py
new file mode 100644
index 0000000000..a2fefbd606
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tests/utils.py
@@ -0,0 +1,86 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Common helper functions for tests.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from attr import Attribute
+from attr._make import NOTHING, make_class
+
+
+def simple_class(
+ eq=False,
+ order=False,
+ repr=False,
+ hash=False,
+ str=False,
+ slots=False,
+ frozen=False,
+ cache_hash=False,
+):
+ """
+ Return a new simple class.
+ """
+ return make_class(
+ "C",
+ ["a", "b"],
+ eq=eq or order,
+ order=order,
+ repr=repr,
+ hash=hash,
+ init=True,
+ slots=slots,
+ str=str,
+ frozen=frozen,
+ cache_hash=cache_hash,
+ )
+
+
+def simple_attr(
+ name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ eq=True,
+ hash=None,
+ init=True,
+ converter=None,
+ kw_only=False,
+ inherited=False,
+):
+ """
+ Return an attribute with a name and no other bells and whistles.
+ """
+ return Attribute(
+ name=name,
+ default=default,
+ validator=validator,
+ repr=repr,
+ cmp=None,
+ eq=eq,
+ hash=hash,
+ init=init,
+ converter=converter,
+ kw_only=kw_only,
+ inherited=inherited,
+ )
+
+
+class TestSimpleClass(object):
+ """
+ Tests for the testing helper function `make_class`.
+ """
+
+ def test_returns_class(self):
+ """
+ Returns a class object.
+ """
+ assert type is simple_class().__class__
+
+ def returns_distinct_classes(self):
+ """
+ Each call returns a completely new class.
+ """
+ assert simple_class() is not simple_class()
diff --git a/testing/web-platform/tests/tools/third_party/attrs/tox.ini b/testing/web-platform/tests/tools/third_party/attrs/tox.ini
new file mode 100644
index 0000000000..ddcbc4dbbc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/attrs/tox.ini
@@ -0,0 +1,129 @@
+[pytest]
+addopts = -ra
+testpaths = tests
+xfail_strict = true
+filterwarnings =
+ once::Warning
+ ignore:::pympler[.*]
+
+
+# Keep docs in sync with docs env and .readthedocs.yml.
+[gh-actions]
+python =
+ 2.7: py27
+ 3.5: py35
+ 3.6: py36
+ 3.7: py37
+ 3.8: py38, changelog
+ 3.9: py39, pyright
+ 3.10: py310, manifest, typing, docs
+ pypy-2: pypy
+ pypy-3: pypy3
+
+
+[tox]
+envlist = typing,pre-commit,py27,py35,py36,py37,py38,py39,py310,pypy,pypy3,pyright,manifest,docs,pypi-description,changelog,coverage-report
+isolated_build = True
+
+
+[testenv:docs]
+# Keep basepython in sync with gh-actions and .readthedocs.yml.
+basepython = python3.10
+extras = docs
+commands =
+ sphinx-build -n -T -W -b html -d {envtmpdir}/doctrees docs docs/_build/html
+ sphinx-build -n -T -W -b doctest -d {envtmpdir}/doctrees docs docs/_build/html
+ python -m doctest README.rst
+
+
+[testenv]
+extras = tests
+commands = python -m pytest {posargs}
+
+
+[testenv:py27]
+extras = tests
+commands = coverage run -m pytest {posargs}
+
+
+[testenv:py37]
+extras = tests
+commands = coverage run -m pytest {posargs}
+
+
+[testenv:py310]
+# Python 3.6+ has a number of compile-time warnings on invalid string escapes.
+# PYTHONWARNINGS=d and --no-compile below make them visible during the Tox run.
+basepython = python3.10
+install_command = pip install --no-compile {opts} {packages}
+setenv =
+ PYTHONWARNINGS=d
+extras = tests
+commands = coverage run -m pytest {posargs}
+
+
+[testenv:coverage-report]
+basepython = python3.10
+depends = py27,py37,py310
+skip_install = true
+deps = coverage[toml]>=5.4
+commands =
+ coverage combine
+ coverage report
+
+
+[testenv:pre-commit]
+basepython = python3.10
+skip_install = true
+deps =
+ pre-commit
+passenv = HOMEPATH # needed on Windows
+commands =
+ pre-commit run --all-files
+
+
+[testenv:manifest]
+basepython = python3.10
+deps = check-manifest
+skip_install = true
+commands = check-manifest
+
+
+[testenv:pypi-description]
+basepython = python3.8
+skip_install = true
+deps =
+ twine
+ pip >= 18.0.0
+commands =
+ pip wheel -w {envtmpdir}/build --no-deps .
+ twine check {envtmpdir}/build/*
+
+
+[testenv:changelog]
+basepython = python3.8
+deps = towncrier<21.3
+skip_install = true
+commands = towncrier --draft
+
+
+[testenv:typing]
+basepython = python3.10
+deps = mypy>=0.902
+commands =
+ mypy src/attr/__init__.pyi src/attr/_version_info.pyi src/attr/converters.pyi src/attr/exceptions.pyi src/attr/filters.pyi src/attr/setters.pyi src/attr/validators.pyi
+ mypy tests/typing_example.py
+
+
+[testenv:pyright]
+# Install and configure node and pyright
+# This *could* be folded into a custom install_command
+# Use nodeenv to configure node in the running tox virtual environment
+# Seeing errors using "nodeenv -p"
+# Use npm install -g to install "globally" into the virtual environment
+basepython = python3.9
+deps = nodeenv
+commands =
+ nodeenv --prebuilt --node=lts --force {envdir}
+ npm install -g --no-package-lock --no-save pyright
+ pytest tests/test_pyright.py -vv
diff --git a/testing/web-platform/tests/tools/third_party/certifi/LICENSE b/testing/web-platform/tests/tools/third_party/certifi/LICENSE
new file mode 100644
index 0000000000..802b53ff11
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/certifi/LICENSE
@@ -0,0 +1,21 @@
+This packge contains a modified version of ca-bundle.crt:
+
+ca-bundle.crt -- Bundle of CA Root Certificates
+
+Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011#
+This is a bundle of X.509 certificates of public Certificate Authorities
+(CA). These were automatically extracted from Mozilla's root certificates
+file (certdata.txt). This file can be found in the mozilla source tree:
+http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1#
+It contains the certificates in PEM format and therefore
+can be directly used with curl / libcurl / php_curl, or with
+an Apache+mod_ssl webserver for SSL client authentication.
+Just configure this file as the SSLCACertificateFile.#
+
+***** BEGIN LICENSE BLOCK *****
+This Source Code Form is subject to the terms of the Mozilla Public License,
+v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
+one at http://mozilla.org/MPL/2.0/.
+
+***** END LICENSE BLOCK *****
+@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
diff --git a/testing/web-platform/tests/tools/third_party/certifi/MANIFEST.in b/testing/web-platform/tests/tools/third_party/certifi/MANIFEST.in
new file mode 100644
index 0000000000..6077b5ff84
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/certifi/MANIFEST.in
@@ -0,0 +1 @@
+include MANIFEST.in README.rst LICENSE certifi/cacert.pem
diff --git a/testing/web-platform/tests/tools/third_party/certifi/PKG-INFO b/testing/web-platform/tests/tools/third_party/certifi/PKG-INFO
new file mode 100644
index 0000000000..73f3643804
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/certifi/PKG-INFO
@@ -0,0 +1,69 @@
+Metadata-Version: 1.1
+Name: certifi
+Version: 2018.4.16
+Summary: Python package for providing Mozilla's CA Bundle.
+Home-page: http://certifi.io/
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.com
+License: MPL-2.0
+Description: Certifi: Python SSL Certificates
+ ================================
+
+ `Certifi`_ is a carefully curated collection of Root Certificates for
+ validating the trustworthiness of SSL certificates while verifying the identity
+ of TLS hosts. It has been extracted from the `Requests`_ project.
+
+ Installation
+ ------------
+
+ ``certifi`` is available on PyPI. Simply install it with ``pip``::
+
+ $ pip install certifi
+
+ Usage
+ -----
+
+ To reference the installed certificate authority (CA) bundle, you can use the
+ built-in function::
+
+ >>> import certifi
+
+ >>> certifi.where()
+ '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem'
+
+ Enjoy!
+
+ 1024-bit Root Certificates
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Browsers and certificate authorities have concluded that 1024-bit keys are
+ unacceptably weak for certificates, particularly root certificates. For this
+ reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its
+ bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key)
+ certificate from the same CA. Because Mozilla removed these certificates from
+ its bundle, ``certifi`` removed them as well.
+
+ In previous versions, ``certifi`` provided the ``certifi.old_where()`` function
+ to intentionally re-add the 1024-bit roots back into your bundle. This was not
+ recommended in production and therefore was removed. To assist in migrating old
+ code, the function ``certifi.old_where()`` continues to exist as an alias of
+ ``certifi.where()``. Please update your code to use ``certifi.where()``
+ instead. ``certifi.old_where()`` will be removed in 2018.
+
+ .. _`Certifi`: http://certifi.io/en/latest/
+ .. _`Requests`: http://docs.python-requests.org/en/latest/
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
diff --git a/testing/web-platform/tests/tools/third_party/certifi/README.rst b/testing/web-platform/tests/tools/third_party/certifi/README.rst
new file mode 100644
index 0000000000..64b3e38e10
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/certifi/README.rst
@@ -0,0 +1,46 @@
+Certifi: Python SSL Certificates
+================================
+
+`Certifi`_ is a carefully curated collection of Root Certificates for
+validating the trustworthiness of SSL certificates while verifying the identity
+of TLS hosts. It has been extracted from the `Requests`_ project.
+
+Installation
+------------
+
+``certifi`` is available on PyPI. Simply install it with ``pip``::
+
+ $ pip install certifi
+
+Usage
+-----
+
+To reference the installed certificate authority (CA) bundle, you can use the
+built-in function::
+
+ >>> import certifi
+
+ >>> certifi.where()
+ '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem'
+
+Enjoy!
+
+1024-bit Root Certificates
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Browsers and certificate authorities have concluded that 1024-bit keys are
+unacceptably weak for certificates, particularly root certificates. For this
+reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its
+bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key)
+certificate from the same CA. Because Mozilla removed these certificates from
+its bundle, ``certifi`` removed them as well.
+
+In previous versions, ``certifi`` provided the ``certifi.old_where()`` function
+to intentionally re-add the 1024-bit roots back into your bundle. This was not
+recommended in production and therefore was removed. To assist in migrating old
+code, the function ``certifi.old_where()`` continues to exist as an alias of
+``certifi.where()``. Please update your code to use ``certifi.where()``
+instead. ``certifi.old_where()`` will be removed in 2018.
+
+.. _`Certifi`: http://certifi.io/en/latest/
+.. _`Requests`: http://docs.python-requests.org/en/latest/
diff --git a/testing/web-platform/tests/tools/third_party/certifi/certifi/__init__.py b/testing/web-platform/tests/tools/third_party/certifi/certifi/__init__.py
new file mode 100644
index 0000000000..0c4963ef60
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/certifi/certifi/__init__.py
@@ -0,0 +1,3 @@
+from .core import where, old_where
+
+__version__ = "2018.04.16"
diff --git a/testing/web-platform/tests/tools/third_party/certifi/certifi/__main__.py b/testing/web-platform/tests/tools/third_party/certifi/certifi/__main__.py
new file mode 100644
index 0000000000..5f1da0dd0c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/certifi/certifi/__main__.py
@@ -0,0 +1,2 @@
+from certifi import where
+print(where())
diff --git a/testing/web-platform/tests/tools/third_party/certifi/certifi/cacert.pem b/testing/web-platform/tests/tools/third_party/certifi/certifi/cacert.pem
new file mode 100644
index 0000000000..2713f541c4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/certifi/certifi/cacert.pem
@@ -0,0 +1,4400 @@
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
+# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
+# Label: "Visa eCommerce Root"
+# Serial: 25952180776285836048024890241505565794
+# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02
+# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62
+# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22
+-----BEGIN CERTIFICATE-----
+MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr
+MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl
+cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv
+bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw
+CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h
+dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l
+cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h
+2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E
+lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV
+ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq
+299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t
+vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL
+dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF
+AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR
+zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3
+LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd
+7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw
+++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt
+398znM/jra6O1I7mT1GvFpLgXPYHDw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Label: "QuoVadis Root CA"
+# Serial: 985026699
+# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
+# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
+# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
+-----BEGIN CERTIFICATE-----
+MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
+MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
+IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
+dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
+li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
+rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
+WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
+F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
+xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
+Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
+dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
+ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
+IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
+c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
+ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
+Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
+KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
+KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
+y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
+dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
+VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
+MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
+fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
+7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
+cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
+mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
+xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
+SnQ2+Q==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
+# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
+# Label: "Security Communication Root CA"
+# Serial: 0
+# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
+# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
+# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sonera Class2 CA O=Sonera
+# Subject: CN=Sonera Class2 CA O=Sonera
+# Label: "Sonera Class 2 Root CA"
+# Serial: 29
+# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
+# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
+# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
+MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
+MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
+BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
+Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
+5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
+3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
+vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
+8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
+DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
+MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
+zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
+3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
+FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
+Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
+ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: O=Government Root Certification Authority
+# Subject: O=Government Root Certification Authority
+# Label: "Taiwan GRCA"
+# Serial: 42023070807708724159991140556527066870
+# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
+# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
+# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
+MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
+PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
+IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
+gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
+yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
+F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
+jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
+ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
+VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
+YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
+EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
+Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
+DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
+MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
+UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
+TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
+qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
+ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
+JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
+hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
+EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
+nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
+udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
+ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
+LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
+pYYsfPQS
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=Class 2 Primary CA O=Certplus
+# Subject: CN=Class 2 Primary CA O=Certplus
+# Label: "Certplus Class 2 Primary CA"
+# Serial: 177770208045934040241468760488327595043
+# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
+# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
+# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
+-----BEGIN CERTIFICATE-----
+MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
+PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
+cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
+MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
+IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
+ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
+VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
+kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
+EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
+H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
+HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
+DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
+QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
+Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
+AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
+yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
+FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
+ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
+kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
+l7+ijrRU
+-----END CERTIFICATE-----
+
+# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Label: "DST Root CA X3"
+# Serial: 91299735575339953335919266965803778155
+# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
+# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
+# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GA CA"
+# Serial: 86718877871133159090080555911823548314
+# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
+# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
+# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
+-----BEGIN CERTIFICATE-----
+MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
+ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
+aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
+ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
+NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
+A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
+VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
+SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
+VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
+w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
+mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
+4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
+4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
+EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
+SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
+ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
+vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
+hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
+Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
+/L7fCg0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Label: "Deutsche Telekom Root CA 2"
+# Serial: 38
+# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
+# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
+# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
+-----BEGIN CERTIFICATE-----
+MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
+MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
+IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
+IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
+RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
+U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
+IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
+ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
+QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
+rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
+NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
+QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
+txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
+BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
+tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
+IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
+6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
+Cm26OWMohpLzGITY+9HPBVZkVw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G2"
+# Serial: 10000012
+# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
+# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
+# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
+-----BEGIN CERTIFICATE-----
+MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
+DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
+qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
+uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
+Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
+pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
+5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
+UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
+GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
+5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
+6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
+eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
+B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
+BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
+L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
+SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
+CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
+5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
+IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
+gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
+vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
+bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
+N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
+Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
+ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Label: "Hongkong Post Root CA 1"
+# Serial: 1000
+# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
+# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
+# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
+-----BEGIN CERTIFICATE-----
+MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
+FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
+Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
+A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
+b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
+jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
+PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
+ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
+nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
+q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
+MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
+mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
+7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
+oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
+EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
+fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
+AmvZWg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 6047274297262753887
+# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
+# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
+# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Label: "Chambers of Commerce Root - 2008"
+# Serial: 11806822484801597146
+# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
+# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
+# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
+-----BEGIN CERTIFICATE-----
+MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
+IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
+MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
+dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
+EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
+MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
+28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
+VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
+DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
+5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
+ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
+Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
+UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
+Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
+ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
+hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
+HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
+YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
+L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
+ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
+IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
+HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
+DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
+PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
+5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
+glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
+FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
+pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
+xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
+tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
+jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
+fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
+OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
+d0jQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Label: "Global Chambersign Root - 2008"
+# Serial: 14541511773111788494
+# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
+# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
+# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
+-----BEGIN CERTIFICATE-----
+MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
+aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
+MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
+cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
+A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
+BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
+KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
+G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
+zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
+ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
+HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
+Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
+yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
+beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
+6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
+wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
+zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
+BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
+ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
+ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
+cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
+YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
+CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
+KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
+hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
+UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
+X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
+fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
+a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
+Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
+SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
+AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
+M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
+v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
+09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2011"
+# Serial: 0
+# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
+# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
+# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
+-----BEGIN CERTIFICATE-----
+MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
+RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
+YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
+NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
+EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
+cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
+dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
+fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
+bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
+75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
+FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
+HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
+5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
+b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
+A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
+6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
+TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
+dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
+Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
+l7WdmplNsDz4SgCbZN2fOUvRJ9e4
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
+# Subject: O=Trustis Limited OU=Trustis FPS Root CA
+# Label: "Trustis FPS Root CA"
+# Serial: 36053640375399034304724988975563710553
+# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
+# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
+# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
+-----BEGIN CERTIFICATE-----
+MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
+ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
+MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
+MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
+iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
+vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
+0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
+OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
+BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
+FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
+GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
+zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
+1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
+f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
+jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
+ZetX2fNXlrtIzYE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Label: "EE Certification Centre Root CA"
+# Serial: 112324828676200291871926431888494945866
+# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
+# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
+# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
+-----BEGIN CERTIFICATE-----
+MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
+MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
+czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
+CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
+MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
+ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
+b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
+AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
+euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
+bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
+WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
+MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
+1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
+zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
+BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
+BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
+v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
+E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
+uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
+iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
+GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
+# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
+# Label: "E-Tugra Certification Authority"
+# Serial: 7667447206703254355
+# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
+# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
+# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
+BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
+aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
+BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
+Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
+MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
+em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
+ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
+B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
+D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
+Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
+q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
+k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
+fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
+dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
+ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
+zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
+rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
+U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
+Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
+XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
+Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
+HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
+GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
+77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
+vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
+FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
+yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
+AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
+y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
+NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 14367148294922964480859022125800977897474
+# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
+# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
+# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
+-----BEGIN CERTIFICATE-----
+MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
+FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
+uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
+kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
+ewv4n4Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G3"
+# Serial: 10003001
+# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
+# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
+# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
+DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
+cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
+IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
+xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
+KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
+9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
+5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
+6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
+Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
+bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
+BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
+XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
+INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
+U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
+LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
+Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
+gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
+/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
+0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
+fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
+4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
+1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
+QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
+94B7IWcnMFk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden EV Root CA"
+# Serial: 10000013
+# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
+# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
+# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
+-----BEGIN CERTIFICATE-----
+MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
+MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
+TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
+b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
+M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
+UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
+Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
+rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
+pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
+j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
+KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
+/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
+cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
+1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
+px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
+MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
+eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
+2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
+v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
+wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
+CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
+vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
+Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
+Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
+eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
+FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
+7uzXLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
+# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
+# Label: "Certinomis - Root CA"
+# Serial: 1
+# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f
+# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8
+# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58
+-----BEGIN CERTIFICATE-----
+MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET
+MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb
+BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz
+MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx
+FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g
+Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2
+fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl
+LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV
+WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF
+TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb
+5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc
+CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri
+wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ
+wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG
+m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4
+F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng
+WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0
+2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF
+AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/
+0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw
+F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS
+g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj
+qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN
+h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/
+ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V
+btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj
+Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ
+8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW
+gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certplus Root CA G1 O=Certplus
+# Subject: CN=Certplus Root CA G1 O=Certplus
+# Label: "Certplus Root CA G1"
+# Serial: 1491911565779898356709731176965615564637713
+# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42
+# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66
+# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA
+MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy
+dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa
+MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy
+dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a
+iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt
+6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP
+0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f
+6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE
+EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN
+1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc
+h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT
+mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV
+4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO
+WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud
+DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd
+Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq
+hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh
+66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7
+/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS
+S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j
+2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R
+Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr
+RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy
+6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV
+V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5
+g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl
+++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certplus Root CA G2 O=Certplus
+# Subject: CN=Certplus Root CA G2 O=Certplus
+# Label: "Certplus Root CA G2"
+# Serial: 1492087096131536844209563509228951875861589
+# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31
+# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a
+# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17
+-----BEGIN CERTIFICATE-----
+MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x
+CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs
+dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x
+CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs
+dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat
+93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x
+Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P
+AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj
+FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG
+SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch
+p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal
+U5ORGpOucGpnutee5WEaXw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust
+# Subject: CN=OpenTrust Root CA G1 O=OpenTrust
+# Label: "OpenTrust Root CA G1"
+# Serial: 1492036577811947013770400127034825178844775
+# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da
+# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e
+# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4
+-----BEGIN CERTIFICATE-----
+MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA
+MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w
+ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw
+MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU
+T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b
+wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX
+/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0
+77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP
+uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx
+p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx
+Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2
+TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W
+G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw
+vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY
+EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1
+2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw
+DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E
+PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf
+gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS
+FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0
+V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P
+XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I
+i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t
+TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91
+09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky
+Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ
+AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj
+1oxx
+-----END CERTIFICATE-----
+
+# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust
+# Subject: CN=OpenTrust Root CA G2 O=OpenTrust
+# Label: "OpenTrust Root CA G2"
+# Serial: 1492012448042702096986875987676935573415441
+# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb
+# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b
+# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2
+-----BEGIN CERTIFICATE-----
+MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA
+MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w
+ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw
+MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU
+T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh
+/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e
+CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6
+1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE
+FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS
+gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X
+G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy
+YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH
+vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4
+t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/
+gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3
+5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w
+DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz
+Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0
+nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT
+RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT
+wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2
+t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa
+TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2
+o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU
+3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA
+iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f
+WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM
+S1IK
+-----END CERTIFICATE-----
+
+# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust
+# Subject: CN=OpenTrust Root CA G3 O=OpenTrust
+# Label: "OpenTrust Root CA G3"
+# Serial: 1492104908271485653071219941864171170455615
+# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24
+# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6
+# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92
+-----BEGIN CERTIFICATE-----
+MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx
+CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U
+cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow
+QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl
+blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm
+3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d
+oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G
+A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5
+DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK
+BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q
+j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx
+4nxp5V2a+EEfOzmTk51V6s2N8fvB
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Label: "LuxTrust Global Root 2"
+# Serial: 59914338225734147123941058376788110305822489521
+# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c
+# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f
+# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5
+-----BEGIN CERTIFICATE-----
+MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL
+BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV
+BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw
+MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B
+LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F
+ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem
+hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1
+EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn
+Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4
+zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ
+96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m
+j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g
+DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+
+8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j
+X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH
+hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB
+KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0
+Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT
++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL
+BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9
+BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO
+jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9
+loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c
+qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+
+2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/
+JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre
+zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf
+LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+
+x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6
+oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-1"
+# Serial: 15752444095811006489
+# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45
+# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a
+# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y
+IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB
+pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h
+IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG
+A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU
+cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid
+RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V
+seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme
+9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV
+EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW
+hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/
+DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I
+/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf
+ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ
+yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts
+L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN
+zl/HHk484IkzlQsPpTLWPFp5LBk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-2"
+# Serial: 2711694510199101698
+# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64
+# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0
+# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65
+-----BEGIN CERTIFICATE-----
+MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig
+Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk
+MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg
+Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD
+VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy
+dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+
+QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq
+1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp
+2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK
+DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape
+az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF
+3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88
+oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM
+g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3
+mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh
+8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd
+BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U
+nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX
+dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+
+MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL
+/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX
+CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa
+ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW
+2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7
+N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3
+Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB
+As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp
+5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu
+1uwJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor ECA-1"
+# Serial: 9548242946988625984
+# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c
+# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd
+# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y
+IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig
+RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb
+3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA
+BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5
+3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou
+owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/
+wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF
+ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf
+BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/
+MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv
+civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2
+AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F
+hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50
+soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI
+WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi
+tJ/X5g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/third_party/certifi/certifi/core.py b/testing/web-platform/tests/tools/third_party/certifi/certifi/core.py
new file mode 100644
index 0000000000..eab9d1d178
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/certifi/certifi/core.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+certifi.py
+~~~~~~~~~~
+
+This module returns the installation location of cacert.pem.
+"""
+import os
+import warnings
+
+
+class DeprecatedBundleWarning(DeprecationWarning):
+ """
+ The weak security bundle is being deprecated. Please bother your service
+ provider to get them to stop using cross-signed roots.
+ """
+
+
+def where():
+ f = os.path.dirname(__file__)
+
+ return os.path.join(f, 'cacert.pem')
+
+
+def old_where():
+ warnings.warn(
+ "The weak security bundle has been removed. certifi.old_where() is now an alias "
+ "of certifi.where(). Please update your code to use certifi.where() instead. "
+ "certifi.old_where() will be removed in 2018.",
+ DeprecatedBundleWarning
+ )
+ return where()
+
+if __name__ == '__main__':
+ print(where())
diff --git a/testing/web-platform/tests/tools/third_party/certifi/setup.cfg b/testing/web-platform/tests/tools/third_party/certifi/setup.cfg
new file mode 100644
index 0000000000..163eba3165
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/certifi/setup.cfg
@@ -0,0 +1,11 @@
+[bdist_wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/testing/web-platform/tests/tools/third_party/certifi/setup.py b/testing/web-platform/tests/tools/third_party/certifi/setup.py
new file mode 100755
index 0000000000..2c20c269f6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/certifi/setup.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import with_statement
+import re
+import os
+import sys
+
+# While I generally consider it an antipattern to try and support both
+# setuptools and distutils with a single setup.py, in this specific instance
+# where certifi is a dependency of setuptools, it can create a circular
+# dependency when projects attempt to unbundle stuff from setuptools and pip.
+# Though we don't really support that, it makes things easier if we do this and
+# should hopefully cause less issues for end users.
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+
+version_regex = r'__version__ = ["\']([^"\']*)["\']'
+with open('certifi/__init__.py', 'r') as f:
+ text = f.read()
+ match = re.search(version_regex, text)
+
+ if match:
+ VERSION = match.group(1)
+ else:
+ raise RuntimeError("No version number found!")
+
+if sys.argv[-1] == 'publish':
+ os.system('python setup.py sdist bdist_wheel upload')
+ sys.exit()
+
+required = []
+setup(
+ name='certifi',
+ version=VERSION,
+ description='Python package for providing Mozilla\'s CA Bundle.',
+ long_description=open('README.rst').read(),
+ author='Kenneth Reitz',
+ author_email='me@kennethreitz.com',
+ url='http://certifi.io/',
+ packages=[
+ 'certifi',
+ ],
+ package_dir={'certifi': 'certifi'},
+ package_data={'certifi': ['*.pem']},
+ # data_files=[('certifi', ['certifi/cacert.pem'])],
+ include_package_data=True,
+ zip_safe=False,
+ license='MPL-2.0',
+ classifiers=(
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
+ 'Natural Language :: English',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ ),
+)
diff --git a/testing/web-platform/tests/tools/third_party/enum/MANIFEST.in b/testing/web-platform/tests/tools/third_party/enum/MANIFEST.in
new file mode 100644
index 0000000000..98fe77f55a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/enum/MANIFEST.in
@@ -0,0 +1,9 @@
+exclude enum/*
+include setup.py
+include README
+include enum/__init__.py
+include enum/test.py
+include enum/LICENSE
+include enum/README
+include enum/doc/enum.pdf
+include enum/doc/enum.rst
diff --git a/testing/web-platform/tests/tools/third_party/enum/PKG-INFO b/testing/web-platform/tests/tools/third_party/enum/PKG-INFO
new file mode 100644
index 0000000000..623171e38f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/enum/PKG-INFO
@@ -0,0 +1,60 @@
+Metadata-Version: 1.1
+Name: enum34
+Version: 1.1.10
+Summary: Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4
+Home-page: https://bitbucket.org/stoneleaf/enum34
+Author: Ethan Furman
+Author-email: ethan@stoneleaf.us
+License: BSD License
+Description: enum --- support for enumerations
+ ========================================
+
+ An enumeration is a set of symbolic names (members) bound to unique, constant
+ values. Within an enumeration, the members can be compared by identity, and
+ the enumeration itself can be iterated over.
+
+ from enum import Enum
+
+ class Fruit(Enum):
+ apple = 1
+ banana = 2
+ orange = 3
+
+ list(Fruit)
+ # [<Fruit.apple: 1>, <Fruit.banana: 2>, <Fruit.orange: 3>]
+
+ len(Fruit)
+ # 3
+
+ Fruit.banana
+ # <Fruit.banana: 2>
+
+ Fruit['banana']
+ # <Fruit.banana: 2>
+
+ Fruit(2)
+ # <Fruit.banana: 2>
+
+ Fruit.banana is Fruit['banana'] is Fruit(2)
+ # True
+
+ Fruit.banana.name
+ # 'banana'
+
+ Fruit.banana.value
+ # 2
+
+ Repository and Issue Tracker at https://bitbucket.org/stoneleaf/enum34.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.3
+Provides: enum
diff --git a/testing/web-platform/tests/tools/third_party/enum/README b/testing/web-platform/tests/tools/third_party/enum/README
new file mode 100644
index 0000000000..aa2333d8df
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/enum/README
@@ -0,0 +1,3 @@
+enum34 is the new Python stdlib enum module available in Python 3.4
+backported for previous versions of Python from 2.4 to 3.3.
+tested on 2.6, 2.7, and 3.3+
diff --git a/testing/web-platform/tests/tools/third_party/enum/enum/LICENSE b/testing/web-platform/tests/tools/third_party/enum/enum/LICENSE
new file mode 100644
index 0000000000..9003b8850e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/enum/enum/LICENSE
@@ -0,0 +1,32 @@
+Copyright (c) 2013, Ethan Furman.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+ Redistributions of source code must retain the above
+ copyright notice, this list of conditions and the
+ following disclaimer.
+
+ Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials
+ provided with the distribution.
+
+ Neither the name Ethan Furman nor the names of any
+ contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/testing/web-platform/tests/tools/third_party/enum/enum/README b/testing/web-platform/tests/tools/third_party/enum/enum/README
new file mode 100644
index 0000000000..aa2333d8df
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/enum/enum/README
@@ -0,0 +1,3 @@
+enum34 is the new Python stdlib enum module available in Python 3.4
+backported for previous versions of Python from 2.4 to 3.3.
+tested on 2.6, 2.7, and 3.3+
diff --git a/testing/web-platform/tests/tools/third_party/enum/enum/__init__.py b/testing/web-platform/tests/tools/third_party/enum/enum/__init__.py
new file mode 100644
index 0000000000..51f3cf2470
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/enum/enum/__init__.py
@@ -0,0 +1,838 @@
+"""Python Enumerations"""
+
+import sys as _sys
+
+__all__ = ['Enum', 'IntEnum', 'unique']
+
+version = 1, 1, 10
+
+pyver = float('%s.%s' % _sys.version_info[:2])
+
+try:
+ any
+except NameError:
+ def any(iterable):
+ for element in iterable:
+ if element:
+ return True
+ return False
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ OrderedDict = None
+
+try:
+ basestring
+except NameError:
+ # In Python 2 basestring is the ancestor of both str and unicode
+ # in Python 3 it's just str, but was missing in 3.1
+ basestring = str
+
+try:
+ unicode
+except NameError:
+ # In Python 3 unicode no longer exists (it's just str)
+ unicode = str
+
+class _RouteClassAttributeToGetattr(object):
+ """Route attribute access on a class to __getattr__.
+
+ This is a descriptor, used to define attributes that act differently when
+ accessed through an instance and through a class. Instance access remains
+ normal, but access to an attribute through a class will be routed to the
+ class's __getattr__ method; this is done by raising AttributeError.
+
+ """
+ def __init__(self, fget=None):
+ self.fget = fget
+
+ def __get__(self, instance, ownerclass=None):
+ if instance is None:
+ raise AttributeError()
+ return self.fget(instance)
+
+ def __set__(self, instance, value):
+ raise AttributeError("can't set attribute")
+
+ def __delete__(self, instance):
+ raise AttributeError("can't delete attribute")
+
+
+def _is_descriptor(obj):
+ """Returns True if obj is a descriptor, False otherwise."""
+ return (
+ hasattr(obj, '__get__') or
+ hasattr(obj, '__set__') or
+ hasattr(obj, '__delete__'))
+
+
+def _is_dunder(name):
+ """Returns True if a __dunder__ name, False otherwise."""
+ return (name[:2] == name[-2:] == '__' and
+ name[2:3] != '_' and
+ name[-3:-2] != '_' and
+ len(name) > 4)
+
+
+def _is_sunder(name):
+ """Returns True if a _sunder_ name, False otherwise."""
+ return (name[0] == name[-1] == '_' and
+ name[1:2] != '_' and
+ name[-2:-1] != '_' and
+ len(name) > 2)
+
+
+def _make_class_unpicklable(cls):
+ """Make the given class un-picklable."""
+ def _break_on_call_reduce(self, protocol=None):
+ raise TypeError('%r cannot be pickled' % self)
+ cls.__reduce_ex__ = _break_on_call_reduce
+ cls.__module__ = '<unknown>'
+
+
+class _EnumDict(dict):
+ """Track enum member order and ensure member names are not reused.
+
+ EnumMeta will use the names found in self._member_names as the
+ enumeration member names.
+
+ """
+ def __init__(self):
+ super(_EnumDict, self).__init__()
+ self._member_names = []
+
+ def __setitem__(self, key, value):
+ """Changes anything not dundered or not a descriptor.
+
+ If a descriptor is added with the same name as an enum member, the name
+ is removed from _member_names (this may leave a hole in the numerical
+ sequence of values).
+
+ If an enum member name is used twice, an error is raised; duplicate
+ values are not checked for.
+
+ Single underscore (sunder) names are reserved.
+
+ Note: in 3.x __order__ is simply discarded as a not necessary piece
+ leftover from 2.x
+
+ """
+ if pyver >= 3.0 and key in ('_order_', '__order__'):
+ return
+ elif key == '__order__':
+ key = '_order_'
+ if _is_sunder(key):
+ if key != '_order_':
+ raise ValueError('_names_ are reserved for future Enum use')
+ elif _is_dunder(key):
+ pass
+ elif key in self._member_names:
+ # descriptor overwriting an enum?
+ raise TypeError('Attempted to reuse key: %r' % key)
+ elif not _is_descriptor(value):
+ if key in self:
+ # enum overwriting a descriptor?
+ raise TypeError('Key already defined as: %r' % self[key])
+ self._member_names.append(key)
+ super(_EnumDict, self).__setitem__(key, value)
+
+
+# Dummy value for Enum as EnumMeta explicity checks for it, but of course until
+# EnumMeta finishes running the first time the Enum class doesn't exist. This
+# is also why there are checks in EnumMeta like `if Enum is not None`
+Enum = None
+
+
+class EnumMeta(type):
+ """Metaclass for Enum"""
+ @classmethod
+ def __prepare__(metacls, cls, bases):
+ return _EnumDict()
+
+ def __new__(metacls, cls, bases, classdict):
+ # an Enum class is final once enumeration items have been defined; it
+ # cannot be mixed with other types (int, float, etc.) if it has an
+ # inherited __new__ unless a new __new__ is defined (or the resulting
+ # class will fail).
+ if type(classdict) is dict:
+ original_dict = classdict
+ classdict = _EnumDict()
+ for k, v in original_dict.items():
+ classdict[k] = v
+
+ member_type, first_enum = metacls._get_mixins_(bases)
+ __new__, save_new, use_args = metacls._find_new_(classdict, member_type,
+ first_enum)
+ # save enum items into separate mapping so they don't get baked into
+ # the new class
+ members = dict((k, classdict[k]) for k in classdict._member_names)
+ for name in classdict._member_names:
+ del classdict[name]
+
+ # py2 support for definition order
+ _order_ = classdict.get('_order_')
+ if _order_ is None:
+ if pyver < 3.0:
+ try:
+ _order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])]
+ except TypeError:
+ _order_ = [name for name in sorted(members.keys())]
+ else:
+ _order_ = classdict._member_names
+ else:
+ del classdict['_order_']
+ if pyver < 3.0:
+ if isinstance(_order_, basestring):
+ _order_ = _order_.replace(',', ' ').split()
+ aliases = [name for name in members if name not in _order_]
+ _order_ += aliases
+
+ # check for illegal enum names (any others?)
+ invalid_names = set(members) & set(['mro'])
+ if invalid_names:
+ raise ValueError('Invalid enum member name(s): %s' % (
+ ', '.join(invalid_names), ))
+
+ # save attributes from super classes so we know if we can take
+ # the shortcut of storing members in the class dict
+ base_attributes = set([a for b in bases for a in b.__dict__])
+ # create our new Enum type
+ enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict)
+ enum_class._member_names_ = [] # names in random order
+ if OrderedDict is not None:
+ enum_class._member_map_ = OrderedDict()
+ else:
+ enum_class._member_map_ = {} # name->value map
+ enum_class._member_type_ = member_type
+
+ # Reverse value->name map for hashable values.
+ enum_class._value2member_map_ = {}
+
+ # instantiate them, checking for duplicates as we go
+ # we instantiate first instead of checking for duplicates first in case
+ # a custom __new__ is doing something funky with the values -- such as
+ # auto-numbering ;)
+ if __new__ is None:
+ __new__ = enum_class.__new__
+ for member_name in _order_:
+ value = members[member_name]
+ if not isinstance(value, tuple):
+ args = (value, )
+ else:
+ args = value
+ if member_type is tuple: # special case for tuple enums
+ args = (args, ) # wrap it one more time
+ if not use_args or not args:
+ enum_member = __new__(enum_class)
+ if not hasattr(enum_member, '_value_'):
+ enum_member._value_ = value
+ else:
+ enum_member = __new__(enum_class, *args)
+ if not hasattr(enum_member, '_value_'):
+ enum_member._value_ = member_type(*args)
+ value = enum_member._value_
+ enum_member._name_ = member_name
+ enum_member.__objclass__ = enum_class
+ enum_member.__init__(*args)
+ # If another member with the same value was already defined, the
+ # new member becomes an alias to the existing one.
+ for name, canonical_member in enum_class._member_map_.items():
+ if canonical_member.value == enum_member._value_:
+ enum_member = canonical_member
+ break
+ else:
+ # Aliases don't appear in member names (only in __members__).
+ enum_class._member_names_.append(member_name)
+ # performance boost for any member that would not shadow
+ # a DynamicClassAttribute (aka _RouteClassAttributeToGetattr)
+ if member_name not in base_attributes:
+ setattr(enum_class, member_name, enum_member)
+ # now add to _member_map_
+ enum_class._member_map_[member_name] = enum_member
+ try:
+ # This may fail if value is not hashable. We can't add the value
+ # to the map, and by-value lookups for this value will be
+ # linear.
+ enum_class._value2member_map_[value] = enum_member
+ except TypeError:
+ pass
+
+
+ # If a custom type is mixed into the Enum, and it does not know how
+ # to pickle itself, pickle.dumps will succeed but pickle.loads will
+ # fail. Rather than have the error show up later and possibly far
+ # from the source, sabotage the pickle protocol for this class so
+ # that pickle.dumps also fails.
+ #
+ # However, if the new class implements its own __reduce_ex__, do not
+ # sabotage -- it's on them to make sure it works correctly. We use
+ # __reduce_ex__ instead of any of the others as it is preferred by
+ # pickle over __reduce__, and it handles all pickle protocols.
+ unpicklable = False
+ if '__reduce_ex__' not in classdict:
+ if member_type is not object:
+ methods = ('__getnewargs_ex__', '__getnewargs__',
+ '__reduce_ex__', '__reduce__')
+ if not any(m in member_type.__dict__ for m in methods):
+ _make_class_unpicklable(enum_class)
+ unpicklable = True
+
+
+ # double check that repr and friends are not the mixin's or various
+ # things break (such as pickle)
+ for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
+ class_method = getattr(enum_class, name)
+ obj_method = getattr(member_type, name, None)
+ enum_method = getattr(first_enum, name, None)
+ if name not in classdict and class_method is not enum_method:
+ if name == '__reduce_ex__' and unpicklable:
+ continue
+ setattr(enum_class, name, enum_method)
+
+ # method resolution and int's are not playing nice
+ # Python's less than 2.6 use __cmp__
+
+ if pyver < 2.6:
+
+ if issubclass(enum_class, int):
+ setattr(enum_class, '__cmp__', getattr(int, '__cmp__'))
+
+ elif pyver < 3.0:
+
+ if issubclass(enum_class, int):
+ for method in (
+ '__le__',
+ '__lt__',
+ '__gt__',
+ '__ge__',
+ '__eq__',
+ '__ne__',
+ '__hash__',
+ ):
+ setattr(enum_class, method, getattr(int, method))
+
+ # replace any other __new__ with our own (as long as Enum is not None,
+ # anyway) -- again, this is to support pickle
+ if Enum is not None:
+ # if the user defined their own __new__, save it before it gets
+ # clobbered in case they subclass later
+ if save_new:
+ setattr(enum_class, '__member_new__', enum_class.__dict__['__new__'])
+ setattr(enum_class, '__new__', Enum.__dict__['__new__'])
+ return enum_class
+
+ def __bool__(cls):
+ """
+ classes/types should always be True.
+ """
+ return True
+
+ def __call__(cls, value, names=None, module=None, type=None, start=1):
+ """Either returns an existing member, or creates a new enum class.
+
+ This method is used both when an enum class is given a value to match
+ to an enumeration member (i.e. Color(3)) and for the functional API
+ (i.e. Color = Enum('Color', names='red green blue')).
+
+ When used for the functional API: `module`, if set, will be stored in
+ the new class' __module__ attribute; `type`, if set, will be mixed in
+ as the first base class.
+
+ Note: if `module` is not set this routine will attempt to discover the
+ calling module by walking the frame stack; if this is unsuccessful
+ the resulting class will not be pickleable.
+
+ """
+ if names is None: # simple value lookup
+ return cls.__new__(cls, value)
+ # otherwise, functional API: we're creating a new Enum type
+ return cls._create_(value, names, module=module, type=type, start=start)
+
+ def __contains__(cls, member):
+ return isinstance(member, cls) and member.name in cls._member_map_
+
+ def __delattr__(cls, attr):
+ # nicer error message when someone tries to delete an attribute
+ # (see issue19025).
+ if attr in cls._member_map_:
+ raise AttributeError(
+ "%s: cannot delete Enum member." % cls.__name__)
+ super(EnumMeta, cls).__delattr__(attr)
+
+ def __dir__(self):
+ return (['__class__', '__doc__', '__members__', '__module__'] +
+ self._member_names_)
+
+ @property
+ def __members__(cls):
+ """Returns a mapping of member name->value.
+
+ This mapping lists all enum members, including aliases. Note that this
+ is a copy of the internal mapping.
+
+ """
+ return cls._member_map_.copy()
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+
+ """
+ if _is_dunder(name):
+ raise AttributeError(name)
+ try:
+ return cls._member_map_[name]
+ except KeyError:
+ raise AttributeError(name)
+
+ def __getitem__(cls, name):
+ return cls._member_map_[name]
+
+ def __iter__(cls):
+ return (cls._member_map_[name] for name in cls._member_names_)
+
+ def __reversed__(cls):
+ return (cls._member_map_[name] for name in reversed(cls._member_names_))
+
+ def __len__(cls):
+ return len(cls._member_names_)
+
+ __nonzero__ = __bool__
+
+ def __repr__(cls):
+ return "<enum %r>" % cls.__name__
+
+ def __setattr__(cls, name, value):
+ """Block attempts to reassign Enum members.
+
+ A simple assignment to the class namespace only changes one of the
+ several possible ways to get an Enum member from the Enum class,
+ resulting in an inconsistent Enumeration.
+
+ """
+ member_map = cls.__dict__.get('_member_map_', {})
+ if name in member_map:
+ raise AttributeError('Cannot reassign members.')
+ super(EnumMeta, cls).__setattr__(name, value)
+
+ def _create_(cls, class_name, names=None, module=None, type=None, start=1):
+ """Convenience method to create a new Enum class.
+
+ `names` can be:
+
+ * A string containing member names, separated either with spaces or
+ commas. Values are auto-numbered from 1.
+ * An iterable of member names. Values are auto-numbered from 1.
+ * An iterable of (member name, value) pairs.
+ * A mapping of member name -> value.
+
+ """
+ if pyver < 3.0:
+ # if class_name is unicode, attempt a conversion to ASCII
+ if isinstance(class_name, unicode):
+ try:
+ class_name = class_name.encode('ascii')
+ except UnicodeEncodeError:
+ raise TypeError('%r is not representable in ASCII' % class_name)
+ metacls = cls.__class__
+ if type is None:
+ bases = (cls, )
+ else:
+ bases = (type, cls)
+ classdict = metacls.__prepare__(class_name, bases)
+ _order_ = []
+
+ # special processing needed for names?
+ if isinstance(names, basestring):
+ names = names.replace(',', ' ').split()
+ if isinstance(names, (tuple, list)) and isinstance(names[0], basestring):
+ names = [(e, i+start) for (i, e) in enumerate(names)]
+
+ # Here, names is either an iterable of (name, value) or a mapping.
+ item = None # in case names is empty
+ for item in names:
+ if isinstance(item, basestring):
+ member_name, member_value = item, names[item]
+ else:
+ member_name, member_value = item
+ classdict[member_name] = member_value
+ _order_.append(member_name)
+ # only set _order_ in classdict if name/value was not from a mapping
+ if not isinstance(item, basestring):
+ classdict['_order_'] = _order_
+ enum_class = metacls.__new__(metacls, class_name, bases, classdict)
+
+ # TODO: replace the frame hack if a blessed way to know the calling
+ # module is ever developed
+ if module is None:
+ try:
+ module = _sys._getframe(2).f_globals['__name__']
+ except (AttributeError, ValueError):
+ pass
+ if module is None:
+ _make_class_unpicklable(enum_class)
+ else:
+ enum_class.__module__ = module
+
+ return enum_class
+
+ @staticmethod
+ def _get_mixins_(bases):
+ """Returns the type for creating enum members, and the first inherited
+ enum class.
+
+ bases: the tuple of bases that was given to __new__
+
+ """
+ if not bases or Enum is None:
+ return object, Enum
+
+
+ # double check that we are not subclassing a class with existing
+ # enumeration members; while we're at it, see if any other data
+ # type has been mixed in so we can use the correct __new__
+ member_type = first_enum = None
+ for base in bases:
+ if (base is not Enum and
+ issubclass(base, Enum) and
+ base._member_names_):
+ raise TypeError("Cannot extend enumerations")
+ # base is now the last base in bases
+ if not issubclass(base, Enum):
+ raise TypeError("new enumerations must be created as "
+ "`ClassName([mixin_type,] enum_type)`")
+
+ # get correct mix-in type (either mix-in type of Enum subclass, or
+ # first base if last base is Enum)
+ if not issubclass(bases[0], Enum):
+ member_type = bases[0] # first data type
+ first_enum = bases[-1] # enum type
+ else:
+ for base in bases[0].__mro__:
+ # most common: (IntEnum, int, Enum, object)
+ # possible: (<Enum 'AutoIntEnum'>, <Enum 'IntEnum'>,
+ # <class 'int'>, <Enum 'Enum'>,
+ # <class 'object'>)
+ if issubclass(base, Enum):
+ if first_enum is None:
+ first_enum = base
+ else:
+ if member_type is None:
+ member_type = base
+
+ return member_type, first_enum
+
+ if pyver < 3.0:
+ @staticmethod
+ def _find_new_(classdict, member_type, first_enum):
+ """Returns the __new__ to be used for creating the enum members.
+
+ classdict: the class dictionary given to __new__
+ member_type: the data type whose __new__ will be used by default
+ first_enum: enumeration to check for an overriding __new__
+
+ """
+ # now find the correct __new__, checking to see of one was defined
+ # by the user; also check earlier enum classes in case a __new__ was
+ # saved as __member_new__
+ __new__ = classdict.get('__new__', None)
+ if __new__:
+ return None, True, True # __new__, save_new, use_args
+
+ N__new__ = getattr(None, '__new__')
+ O__new__ = getattr(object, '__new__')
+ if Enum is None:
+ E__new__ = N__new__
+ else:
+ E__new__ = Enum.__dict__['__new__']
+ # check all possibles for __member_new__ before falling back to
+ # __new__
+ for method in ('__member_new__', '__new__'):
+ for possible in (member_type, first_enum):
+ try:
+ target = possible.__dict__[method]
+ except (AttributeError, KeyError):
+ target = getattr(possible, method, None)
+ if target not in [
+ None,
+ N__new__,
+ O__new__,
+ E__new__,
+ ]:
+ if method == '__member_new__':
+ classdict['__new__'] = target
+ return None, False, True
+ if isinstance(target, staticmethod):
+ target = target.__get__(member_type)
+ __new__ = target
+ break
+ if __new__ is not None:
+ break
+ else:
+ __new__ = object.__new__
+
+ # if a non-object.__new__ is used then whatever value/tuple was
+ # assigned to the enum member name will be passed to __new__ and to the
+ # new enum member's __init__
+ if __new__ is object.__new__:
+ use_args = False
+ else:
+ use_args = True
+
+ return __new__, False, use_args
+ else:
+ @staticmethod
+ def _find_new_(classdict, member_type, first_enum):
+ """Returns the __new__ to be used for creating the enum members.
+
+ classdict: the class dictionary given to __new__
+ member_type: the data type whose __new__ will be used by default
+ first_enum: enumeration to check for an overriding __new__
+
+ """
+ # now find the correct __new__, checking to see of one was defined
+ # by the user; also check earlier enum classes in case a __new__ was
+ # saved as __member_new__
+ __new__ = classdict.get('__new__', None)
+
+ # should __new__ be saved as __member_new__ later?
+ save_new = __new__ is not None
+
+ if __new__ is None:
+ # check all possibles for __member_new__ before falling back to
+ # __new__
+ for method in ('__member_new__', '__new__'):
+ for possible in (member_type, first_enum):
+ target = getattr(possible, method, None)
+ if target not in (
+ None,
+ None.__new__,
+ object.__new__,
+ Enum.__new__,
+ ):
+ __new__ = target
+ break
+ if __new__ is not None:
+ break
+ else:
+ __new__ = object.__new__
+
+ # if a non-object.__new__ is used then whatever value/tuple was
+ # assigned to the enum member name will be passed to __new__ and to the
+ # new enum member's __init__
+ if __new__ is object.__new__:
+ use_args = False
+ else:
+ use_args = True
+
+ return __new__, save_new, use_args
+
+
+########################################################
+# In order to support Python 2 and 3 with a single
+# codebase we have to create the Enum methods separately
+# and then use the `type(name, bases, dict)` method to
+# create the class.
+########################################################
+temp_enum_dict = {}
+temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n"
+
+def __new__(cls, value):
+ # all enum instances are actually created during class construction
+ # without calling this method; this method is called by the metaclass'
+ # __call__ (i.e. Color(3) ), and by pickle
+ if type(value) is cls:
+ # For lookups like Color(Color.red)
+ value = value.value
+ #return value
+ # by-value search for a matching enum member
+ # see if it's in the reverse mapping (for hashable values)
+ try:
+ if value in cls._value2member_map_:
+ return cls._value2member_map_[value]
+ except TypeError:
+ # not there, now do long search -- O(n) behavior
+ for member in cls._member_map_.values():
+ if member.value == value:
+ return member
+ raise ValueError("%s is not a valid %s" % (value, cls.__name__))
+temp_enum_dict['__new__'] = __new__
+del __new__
+
+def __repr__(self):
+ return "<%s.%s: %r>" % (
+ self.__class__.__name__, self._name_, self._value_)
+temp_enum_dict['__repr__'] = __repr__
+del __repr__
+
+def __str__(self):
+ return "%s.%s" % (self.__class__.__name__, self._name_)
+temp_enum_dict['__str__'] = __str__
+del __str__
+
+if pyver >= 3.0:
+ def __dir__(self):
+ added_behavior = [
+ m
+ for cls in self.__class__.mro()
+ for m in cls.__dict__
+ if m[0] != '_' and m not in self._member_map_
+ ]
+ return (['__class__', '__doc__', '__module__', ] + added_behavior)
+ temp_enum_dict['__dir__'] = __dir__
+ del __dir__
+
+def __format__(self, format_spec):
+ # mixed-in Enums should use the mixed-in type's __format__, otherwise
+ # we can get strange results with the Enum name showing up instead of
+ # the value
+
+ # pure Enum branch
+ if self._member_type_ is object:
+ cls = str
+ val = str(self)
+ # mix-in branch
+ else:
+ cls = self._member_type_
+ val = self.value
+ return cls.__format__(val, format_spec)
+temp_enum_dict['__format__'] = __format__
+del __format__
+
+
+####################################
+# Python's less than 2.6 use __cmp__
+
+if pyver < 2.6:
+
+ def __cmp__(self, other):
+ if type(other) is self.__class__:
+ if self is other:
+ return 0
+ return -1
+ return NotImplemented
+ raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__cmp__'] = __cmp__
+ del __cmp__
+
+else:
+
+ def __le__(self, other):
+ raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__le__'] = __le__
+ del __le__
+
+ def __lt__(self, other):
+ raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__lt__'] = __lt__
+ del __lt__
+
+ def __ge__(self, other):
+ raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__ge__'] = __ge__
+ del __ge__
+
+ def __gt__(self, other):
+ raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__gt__'] = __gt__
+ del __gt__
+
+
+def __eq__(self, other):
+ if type(other) is self.__class__:
+ return self is other
+ return NotImplemented
+temp_enum_dict['__eq__'] = __eq__
+del __eq__
+
+def __ne__(self, other):
+ if type(other) is self.__class__:
+ return self is not other
+ return NotImplemented
+temp_enum_dict['__ne__'] = __ne__
+del __ne__
+
+def __hash__(self):
+ return hash(self._name_)
+temp_enum_dict['__hash__'] = __hash__
+del __hash__
+
+def __reduce_ex__(self, proto):
+ return self.__class__, (self._value_, )
+temp_enum_dict['__reduce_ex__'] = __reduce_ex__
+del __reduce_ex__
+
+# _RouteClassAttributeToGetattr is used to provide access to the `name`
+# and `value` properties of enum members while keeping some measure of
+# protection from modification, while still allowing for an enumeration
+# to have members named `name` and `value`. This works because enumeration
+# members are not set directly on the enum class -- __getattr__ is
+# used to look them up.
+
+@_RouteClassAttributeToGetattr
+def name(self):
+ return self._name_
+temp_enum_dict['name'] = name
+del name
+
+@_RouteClassAttributeToGetattr
+def value(self):
+ return self._value_
+temp_enum_dict['value'] = value
+del value
+
+@classmethod
+def _convert(cls, name, module, filter, source=None):
+ """
+ Create a new Enum subclass that replaces a collection of global constants
+ """
+ # convert all constants from source (or module) that pass filter() to
+ # a new Enum called name, and export the enum and its members back to
+ # module;
+ # also, replace the __reduce_ex__ method so unpickling works in
+ # previous Python versions
+ module_globals = vars(_sys.modules[module])
+ if source:
+ source = vars(source)
+ else:
+ source = module_globals
+ members = dict((name, value) for name, value in source.items() if filter(name))
+ cls = cls(name, members, module=module)
+ cls.__reduce_ex__ = _reduce_ex_by_name
+ module_globals.update(cls.__members__)
+ module_globals[name] = cls
+ return cls
+temp_enum_dict['_convert'] = _convert
+del _convert
+
+Enum = EnumMeta('Enum', (object, ), temp_enum_dict)
+del temp_enum_dict
+
+# Enum has now been created
+###########################
+
+class IntEnum(int, Enum):
+ """Enum where members are also (and must be) ints"""
+
+def _reduce_ex_by_name(self, proto):
+ return self.name
+
+def unique(enumeration):
+ """Class decorator that ensures only unique members exist in an enumeration."""
+ duplicates = []
+ for name, member in enumeration.__members__.items():
+ if name != member.name:
+ duplicates.append((name, member.name))
+ if duplicates:
+ duplicate_names = ', '.join(
+ ["%s -> %s" % (alias, name) for (alias, name) in duplicates]
+ )
+ raise ValueError('duplicate names found in %r: %s' %
+ (enumeration, duplicate_names)
+ )
+ return enumeration
diff --git a/testing/web-platform/tests/tools/third_party/enum/enum/doc/enum.rst b/testing/web-platform/tests/tools/third_party/enum/enum/doc/enum.rst
new file mode 100644
index 0000000000..3afc238210
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/enum/enum/doc/enum.rst
@@ -0,0 +1,735 @@
+``enum`` --- support for enumerations
+========================================
+
+.. :synopsis: enumerations are sets of symbolic names bound to unique, constant
+ values.
+.. :moduleauthor:: Ethan Furman <ethan@stoneleaf.us>
+.. :sectionauthor:: Barry Warsaw <barry@python.org>,
+.. :sectionauthor:: Eli Bendersky <eliben@gmail.com>,
+.. :sectionauthor:: Ethan Furman <ethan@stoneleaf.us>
+
+----------------
+
+An enumeration is a set of symbolic names (members) bound to unique, constant
+values. Within an enumeration, the members can be compared by identity, and
+the enumeration itself can be iterated over.
+
+
+Module Contents
+---------------
+
+This module defines two enumeration classes that can be used to define unique
+sets of names and values: ``Enum`` and ``IntEnum``. It also defines
+one decorator, ``unique``.
+
+``Enum``
+
+Base class for creating enumerated constants. See section `Functional API`_
+for an alternate construction syntax.
+
+``IntEnum``
+
+Base class for creating enumerated constants that are also subclasses of ``int``.
+
+``unique``
+
+Enum class decorator that ensures only one name is bound to any one value.
+
+
+Creating an Enum
+----------------
+
+Enumerations are created using the ``class`` syntax, which makes them
+easy to read and write. An alternative creation method is described in
+`Functional API`_. To define an enumeration, subclass ``Enum`` as
+follows::
+
+ >>> from enum import Enum
+ >>> class Color(Enum):
+ ... red = 1
+ ... green = 2
+ ... blue = 3
+
+Note: Nomenclature
+
+ - The class ``Color`` is an *enumeration* (or *enum*)
+ - The attributes ``Color.red``, ``Color.green``, etc., are
+ *enumeration members* (or *enum members*).
+ - The enum members have *names* and *values* (the name of
+ ``Color.red`` is ``red``, the value of ``Color.blue`` is
+ ``3``, etc.)
+
+Note:
+
+ Even though we use the ``class`` syntax to create Enums, Enums
+ are not normal Python classes. See `How are Enums different?`_ for
+ more details.
+
+Enumeration members have human readable string representations::
+
+ >>> print(Color.red)
+ Color.red
+
+...while their ``repr`` has more information::
+
+ >>> print(repr(Color.red))
+ <Color.red: 1>
+
+The *type* of an enumeration member is the enumeration it belongs to::
+
+ >>> type(Color.red)
+ <enum 'Color'>
+ >>> isinstance(Color.green, Color)
+ True
+ >>>
+
+Enum members also have a property that contains just their item name::
+
+ >>> print(Color.red.name)
+ red
+
+Enumerations support iteration. In Python 3.x definition order is used; in
+Python 2.x the definition order is not available, but class attribute
+``__order__`` is supported; otherwise, value order is used::
+
+ >>> class Shake(Enum):
+ ... __order__ = 'vanilla chocolate cookies mint' # only needed in 2.x
+ ... vanilla = 7
+ ... chocolate = 4
+ ... cookies = 9
+ ... mint = 3
+ ...
+ >>> for shake in Shake:
+ ... print(shake)
+ ...
+ Shake.vanilla
+ Shake.chocolate
+ Shake.cookies
+ Shake.mint
+
+The ``__order__`` attribute is always removed, and in 3.x it is also ignored
+(order is definition order); however, in the stdlib version it will be ignored
+but not removed.
+
+Enumeration members are hashable, so they can be used in dictionaries and sets::
+
+ >>> apples = {}
+ >>> apples[Color.red] = 'red delicious'
+ >>> apples[Color.green] = 'granny smith'
+ >>> apples == {Color.red: 'red delicious', Color.green: 'granny smith'}
+ True
+
+
+Programmatic access to enumeration members and their attributes
+---------------------------------------------------------------
+
+Sometimes it's useful to access members in enumerations programmatically (i.e.
+situations where ``Color.red`` won't do because the exact color is not known
+at program-writing time). ``Enum`` allows such access::
+
+ >>> Color(1)
+ <Color.red: 1>
+ >>> Color(3)
+ <Color.blue: 3>
+
+If you want to access enum members by *name*, use item access::
+
+ >>> Color['red']
+ <Color.red: 1>
+ >>> Color['green']
+ <Color.green: 2>
+
+If have an enum member and need its ``name`` or ``value``::
+
+ >>> member = Color.red
+ >>> member.name
+ 'red'
+ >>> member.value
+ 1
+
+
+Duplicating enum members and values
+-----------------------------------
+
+Having two enum members (or any other attribute) with the same name is invalid;
+in Python 3.x this would raise an error, but in Python 2.x the second member
+simply overwrites the first::
+
+ >>> # python 2.x
+ >>> class Shape(Enum):
+ ... square = 2
+ ... square = 3
+ ...
+ >>> Shape.square
+ <Shape.square: 3>
+
+ >>> # python 3.x
+ >>> class Shape(Enum):
+ ... square = 2
+ ... square = 3
+ Traceback (most recent call last):
+ ...
+ TypeError: Attempted to reuse key: 'square'
+
+However, two enum members are allowed to have the same value. Given two members
+A and B with the same value (and A defined first), B is an alias to A. By-value
+lookup of the value of A and B will return A. By-name lookup of B will also
+return A::
+
+ >>> class Shape(Enum):
+ ... __order__ = 'square diamond circle alias_for_square' # only needed in 2.x
+ ... square = 2
+ ... diamond = 1
+ ... circle = 3
+ ... alias_for_square = 2
+ ...
+ >>> Shape.square
+ <Shape.square: 2>
+ >>> Shape.alias_for_square
+ <Shape.square: 2>
+ >>> Shape(2)
+ <Shape.square: 2>
+
+
+Allowing aliases is not always desirable. ``unique`` can be used to ensure
+that none exist in a particular enumeration::
+
+ >>> from enum import unique
+ >>> @unique
+ ... class Mistake(Enum):
+ ... __order__ = 'one two three four' # only needed in 2.x
+ ... one = 1
+ ... two = 2
+ ... three = 3
+ ... four = 3
+ Traceback (most recent call last):
+ ...
+ ValueError: duplicate names found in <enum 'Mistake'>: four -> three
+
+Iterating over the members of an enum does not provide the aliases::
+
+ >>> list(Shape)
+ [<Shape.square: 2>, <Shape.diamond: 1>, <Shape.circle: 3>]
+
+The special attribute ``__members__`` is a dictionary mapping names to members.
+It includes all names defined in the enumeration, including the aliases::
+
+ >>> for name, member in sorted(Shape.__members__.items()):
+ ... name, member
+ ...
+ ('alias_for_square', <Shape.square: 2>)
+ ('circle', <Shape.circle: 3>)
+ ('diamond', <Shape.diamond: 1>)
+ ('square', <Shape.square: 2>)
+
+The ``__members__`` attribute can be used for detailed programmatic access to
+the enumeration members. For example, finding all the aliases::
+
+ >>> [name for name, member in Shape.__members__.items() if member.name != name]
+ ['alias_for_square']
+
+Comparisons
+-----------
+
+Enumeration members are compared by identity::
+
+ >>> Color.red is Color.red
+ True
+ >>> Color.red is Color.blue
+ False
+ >>> Color.red is not Color.blue
+ True
+
+Ordered comparisons between enumeration values are *not* supported. Enum
+members are not integers (but see `IntEnum`_ below)::
+
+ >>> Color.red < Color.blue
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ TypeError: unorderable types: Color() < Color()
+
+.. warning::
+
+ In Python 2 *everything* is ordered, even though the ordering may not
+ make sense. If you want your enumerations to have a sensible ordering
+ check out the `OrderedEnum`_ recipe below.
+
+
+Equality comparisons are defined though::
+
+ >>> Color.blue == Color.red
+ False
+ >>> Color.blue != Color.red
+ True
+ >>> Color.blue == Color.blue
+ True
+
+Comparisons against non-enumeration values will always compare not equal
+(again, ``IntEnum`` was explicitly designed to behave differently, see
+below)::
+
+ >>> Color.blue == 2
+ False
+
+
+Allowed members and attributes of enumerations
+----------------------------------------------
+
+The examples above use integers for enumeration values. Using integers is
+short and handy (and provided by default by the `Functional API`_), but not
+strictly enforced. In the vast majority of use-cases, one doesn't care what
+the actual value of an enumeration is. But if the value *is* important,
+enumerations can have arbitrary values.
+
+Enumerations are Python classes, and can have methods and special methods as
+usual. If we have this enumeration::
+
+ >>> class Mood(Enum):
+ ... funky = 1
+ ... happy = 3
+ ...
+ ... def describe(self):
+ ... # self is the member here
+ ... return self.name, self.value
+ ...
+ ... def __str__(self):
+ ... return 'my custom str! {0}'.format(self.value)
+ ...
+ ... @classmethod
+ ... def favorite_mood(cls):
+ ... # cls here is the enumeration
+ ... return cls.happy
+
+Then::
+
+ >>> Mood.favorite_mood()
+ <Mood.happy: 3>
+ >>> Mood.happy.describe()
+ ('happy', 3)
+ >>> str(Mood.funky)
+ 'my custom str! 1'
+
+The rules for what is allowed are as follows: _sunder_ names (starting and
+ending with a single underscore) are reserved by enum and cannot be used;
+all other attributes defined within an enumeration will become members of this
+enumeration, with the exception of *__dunder__* names and descriptors (methods
+are also descriptors).
+
+Note:
+
+ If your enumeration defines ``__new__`` and/or ``__init__`` then
+ whatever value(s) were given to the enum member will be passed into
+ those methods. See `Planet`_ for an example.
+
+
+Restricted subclassing of enumerations
+--------------------------------------
+
+Subclassing an enumeration is allowed only if the enumeration does not define
+any members. So this is forbidden::
+
+ >>> class MoreColor(Color):
+ ... pink = 17
+ Traceback (most recent call last):
+ ...
+ TypeError: Cannot extend enumerations
+
+But this is allowed::
+
+ >>> class Foo(Enum):
+ ... def some_behavior(self):
+ ... pass
+ ...
+ >>> class Bar(Foo):
+ ... happy = 1
+ ... sad = 2
+ ...
+
+Allowing subclassing of enums that define members would lead to a violation of
+some important invariants of types and instances. On the other hand, it makes
+sense to allow sharing some common behavior between a group of enumerations.
+(See `OrderedEnum`_ for an example.)
+
+
+Pickling
+--------
+
+Enumerations can be pickled and unpickled::
+
+ >>> from enum.test_enum import Fruit
+ >>> from pickle import dumps, loads
+ >>> Fruit.tomato is loads(dumps(Fruit.tomato, 2))
+ True
+
+The usual restrictions for pickling apply: picklable enums must be defined in
+the top level of a module, since unpickling requires them to be importable
+from that module.
+
+Note:
+
+ With pickle protocol version 4 (introduced in Python 3.4) it is possible
+ to easily pickle enums nested in other classes.
+
+
+
+Functional API
+--------------
+
+The ``Enum`` class is callable, providing the following functional API::
+
+ >>> Animal = Enum('Animal', 'ant bee cat dog')
+ >>> Animal
+ <enum 'Animal'>
+ >>> Animal.ant
+ <Animal.ant: 1>
+ >>> Animal.ant.value
+ 1
+ >>> list(Animal)
+ [<Animal.ant: 1>, <Animal.bee: 2>, <Animal.cat: 3>, <Animal.dog: 4>]
+
+The semantics of this API resemble ``namedtuple``. The first argument
+of the call to ``Enum`` is the name of the enumeration.
+
+The second argument is the *source* of enumeration member names. It can be a
+whitespace-separated string of names, a sequence of names, a sequence of
+2-tuples with key/value pairs, or a mapping (e.g. dictionary) of names to
+values. The last two options enable assigning arbitrary values to
+enumerations; the others auto-assign increasing integers starting with 1. A
+new class derived from ``Enum`` is returned. In other words, the above
+assignment to ``Animal`` is equivalent to::
+
+ >>> class Animals(Enum):
+ ... ant = 1
+ ... bee = 2
+ ... cat = 3
+ ... dog = 4
+
+Pickling enums created with the functional API can be tricky as frame stack
+implementation details are used to try and figure out which module the
+enumeration is being created in (e.g. it will fail if you use a utility
+function in separate module, and also may not work on IronPython or Jython).
+The solution is to specify the module name explicitly as follows::
+
+ >>> Animals = Enum('Animals', 'ant bee cat dog', module=__name__)
+
+Derived Enumerations
+--------------------
+
+IntEnum
+^^^^^^^
+
+A variation of ``Enum`` is provided which is also a subclass of
+``int``. Members of an ``IntEnum`` can be compared to integers;
+by extension, integer enumerations of different types can also be compared
+to each other::
+
+ >>> from enum import IntEnum
+ >>> class Shape(IntEnum):
+ ... circle = 1
+ ... square = 2
+ ...
+ >>> class Request(IntEnum):
+ ... post = 1
+ ... get = 2
+ ...
+ >>> Shape == 1
+ False
+ >>> Shape.circle == 1
+ True
+ >>> Shape.circle == Request.post
+ True
+
+However, they still can't be compared to standard ``Enum`` enumerations::
+
+ >>> class Shape(IntEnum):
+ ... circle = 1
+ ... square = 2
+ ...
+ >>> class Color(Enum):
+ ... red = 1
+ ... green = 2
+ ...
+ >>> Shape.circle == Color.red
+ False
+
+``IntEnum`` values behave like integers in other ways you'd expect::
+
+ >>> int(Shape.circle)
+ 1
+ >>> ['a', 'b', 'c'][Shape.circle]
+ 'b'
+ >>> [i for i in range(Shape.square)]
+ [0, 1]
+
+For the vast majority of code, ``Enum`` is strongly recommended,
+since ``IntEnum`` breaks some semantic promises of an enumeration (by
+being comparable to integers, and thus by transitivity to other
+unrelated enumerations). It should be used only in special cases where
+there's no other choice; for example, when integer constants are
+replaced with enumerations and backwards compatibility is required with code
+that still expects integers.
+
+
+Others
+^^^^^^
+
+While ``IntEnum`` is part of the ``enum`` module, it would be very
+simple to implement independently::
+
+ class IntEnum(int, Enum):
+ pass
+
+This demonstrates how similar derived enumerations can be defined; for example
+a ``StrEnum`` that mixes in ``str`` instead of ``int``.
+
+Some rules:
+
+1. When subclassing ``Enum``, mix-in types must appear before
+ ``Enum`` itself in the sequence of bases, as in the ``IntEnum``
+ example above.
+2. While ``Enum`` can have members of any type, once you mix in an
+ additional type, all the members must have values of that type, e.g.
+ ``int`` above. This restriction does not apply to mix-ins which only
+ add methods and don't specify another data type such as ``int`` or
+ ``str``.
+3. When another data type is mixed in, the ``value`` attribute is *not the
+ same* as the enum member itself, although it is equivalant and will compare
+ equal.
+4. %-style formatting: ``%s`` and ``%r`` call ``Enum``'s ``__str__`` and
+ ``__repr__`` respectively; other codes (such as ``%i`` or ``%h`` for
+ IntEnum) treat the enum member as its mixed-in type.
+
+ Note: Prior to Python 3.4 there is a bug in ``str``'s %-formatting: ``int``
+ subclasses are printed as strings and not numbers when the ``%d``, ``%i``,
+ or ``%u`` codes are used.
+5. ``str.__format__`` (or ``format``) will use the mixed-in
+ type's ``__format__``. If the ``Enum``'s ``str`` or
+ ``repr`` is desired use the ``!s`` or ``!r`` ``str`` format codes.
+
+
+Decorators
+----------
+
+unique
+^^^^^^
+
+A ``class`` decorator specifically for enumerations. It searches an
+enumeration's ``__members__`` gathering any aliases it finds; if any are
+found ``ValueError`` is raised with the details::
+
+ >>> @unique
+ ... class NoDupes(Enum):
+ ... first = 'one'
+ ... second = 'two'
+ ... third = 'two'
+ Traceback (most recent call last):
+ ...
+ ValueError: duplicate names found in <enum 'NoDupes'>: third -> second
+
+
+Interesting examples
+--------------------
+
+While ``Enum`` and ``IntEnum`` are expected to cover the majority of
+use-cases, they cannot cover them all. Here are recipes for some different
+types of enumerations that can be used directly, or as examples for creating
+one's own.
+
+
+AutoNumber
+^^^^^^^^^^
+
+Avoids having to specify the value for each enumeration member::
+
+ >>> class AutoNumber(Enum):
+ ... def __new__(cls):
+ ... value = len(cls.__members__) + 1
+ ... obj = object.__new__(cls)
+ ... obj._value_ = value
+ ... return obj
+ ...
+ >>> class Color(AutoNumber):
+ ... __order__ = "red green blue" # only needed in 2.x
+ ... red = ()
+ ... green = ()
+ ... blue = ()
+ ...
+ >>> Color.green.value == 2
+ True
+
+Note:
+
+ The `__new__` method, if defined, is used during creation of the Enum
+ members; it is then replaced by Enum's `__new__` which is used after
+ class creation for lookup of existing members. Due to the way Enums are
+ supposed to behave, there is no way to customize Enum's `__new__`.
+
+
+UniqueEnum
+^^^^^^^^^^
+
+Raises an error if a duplicate member name is found instead of creating an
+alias::
+
+ >>> class UniqueEnum(Enum):
+ ... def __init__(self, *args):
+ ... cls = self.__class__
+ ... if any(self.value == e.value for e in cls):
+ ... a = self.name
+ ... e = cls(self.value).name
+ ... raise ValueError(
+ ... "aliases not allowed in UniqueEnum: %r --> %r"
+ ... % (a, e))
+ ...
+ >>> class Color(UniqueEnum):
+ ... red = 1
+ ... green = 2
+ ... blue = 3
+ ... grene = 2
+ Traceback (most recent call last):
+ ...
+ ValueError: aliases not allowed in UniqueEnum: 'grene' --> 'green'
+
+
+OrderedEnum
+^^^^^^^^^^^
+
+An ordered enumeration that is not based on ``IntEnum`` and so maintains
+the normal ``Enum`` invariants (such as not being comparable to other
+enumerations)::
+
+ >>> class OrderedEnum(Enum):
+ ... def __ge__(self, other):
+ ... if self.__class__ is other.__class__:
+ ... return self._value_ >= other._value_
+ ... return NotImplemented
+ ... def __gt__(self, other):
+ ... if self.__class__ is other.__class__:
+ ... return self._value_ > other._value_
+ ... return NotImplemented
+ ... def __le__(self, other):
+ ... if self.__class__ is other.__class__:
+ ... return self._value_ <= other._value_
+ ... return NotImplemented
+ ... def __lt__(self, other):
+ ... if self.__class__ is other.__class__:
+ ... return self._value_ < other._value_
+ ... return NotImplemented
+ ...
+ >>> class Grade(OrderedEnum):
+ ... __ordered__ = 'A B C D F'
+ ... A = 5
+ ... B = 4
+ ... C = 3
+ ... D = 2
+ ... F = 1
+ ...
+ >>> Grade.C < Grade.A
+ True
+
+
+Planet
+^^^^^^
+
+If ``__new__`` or ``__init__`` is defined the value of the enum member
+will be passed to those methods::
+
+ >>> class Planet(Enum):
+ ... MERCURY = (3.303e+23, 2.4397e6)
+ ... VENUS = (4.869e+24, 6.0518e6)
+ ... EARTH = (5.976e+24, 6.37814e6)
+ ... MARS = (6.421e+23, 3.3972e6)
+ ... JUPITER = (1.9e+27, 7.1492e7)
+ ... SATURN = (5.688e+26, 6.0268e7)
+ ... URANUS = (8.686e+25, 2.5559e7)
+ ... NEPTUNE = (1.024e+26, 2.4746e7)
+ ... def __init__(self, mass, radius):
+ ... self.mass = mass # in kilograms
+ ... self.radius = radius # in meters
+ ... @property
+ ... def surface_gravity(self):
+ ... # universal gravitational constant (m3 kg-1 s-2)
+ ... G = 6.67300E-11
+ ... return G * self.mass / (self.radius * self.radius)
+ ...
+ >>> Planet.EARTH.value
+ (5.976e+24, 6378140.0)
+ >>> Planet.EARTH.surface_gravity
+ 9.802652743337129
+
+
+How are Enums different?
+------------------------
+
+Enums have a custom metaclass that affects many aspects of both derived Enum
+classes and their instances (members).
+
+
+Enum Classes
+^^^^^^^^^^^^
+
+The ``EnumMeta`` metaclass is responsible for providing the
+``__contains__``, ``__dir__``, ``__iter__`` and other methods that
+allow one to do things with an ``Enum`` class that fail on a typical
+class, such as ``list(Color)`` or ``some_var in Color``. ``EnumMeta`` is
+responsible for ensuring that various other methods on the final ``Enum``
+class are correct (such as ``__new__``, ``__getnewargs__``,
+``__str__`` and ``__repr__``).
+
+.. note::
+
+ ``__dir__`` is not changed in the Python 2 line as it messes up some
+ of the decorators included in the stdlib.
+
+
+Enum Members (aka instances)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The most interesting thing about Enum members is that they are singletons.
+``EnumMeta`` creates them all while it is creating the ``Enum``
+class itself, and then puts a custom ``__new__`` in place to ensure
+that no new ones are ever instantiated by returning only the existing
+member instances.
+
+
+Finer Points
+^^^^^^^^^^^^
+
+``Enum`` members are instances of an ``Enum`` class, and even though they
+are accessible as `EnumClass.member1.member2`, they should not be
+accessed directly from the member as that lookup may fail or, worse,
+return something besides the ``Enum`` member you were looking for
+(changed in version 1.1.1)::
+
+ >>> class FieldTypes(Enum):
+ ... name = 1
+ ... value = 2
+ ... size = 3
+ ...
+ >>> FieldTypes.value.size
+ <FieldTypes.size: 3>
+ >>> FieldTypes.size.value
+ 3
+
+The ``__members__`` attribute is only available on the class.
+
+In Python 3.x ``__members__`` is always an ``OrderedDict``, with the order being
+the definition order. In Python 2.7 ``__members__`` is an ``OrderedDict`` if
+``__order__`` was specified, and a plain ``dict`` otherwise. In all other Python
+2.x versions ``__members__`` is a plain ``dict`` even if ``__order__`` was specified
+as the ``OrderedDict`` type didn't exist yet.
+
+If you give your ``Enum`` subclass extra methods, like the `Planet`_
+class above, those methods will show up in a `dir` of the member,
+but not of the class::
+
+ >>> dir(Planet)
+ ['EARTH', 'JUPITER', 'MARS', 'MERCURY', 'NEPTUNE', 'SATURN', 'URANUS',
+ 'VENUS', '__class__', '__doc__', '__members__', '__module__']
+ >>> dir(Planet.EARTH)
+ ['__class__', '__doc__', '__module__', 'name', 'surface_gravity', 'value']
+
+A ``__new__`` method will only be used for the creation of the
+``Enum`` members -- after that it is replaced. This means if you wish to
+change how ``Enum`` members are looked up you either have to write a
+helper function or a ``classmethod``.
diff --git a/testing/web-platform/tests/tools/third_party/enum/enum/test.py b/testing/web-platform/tests/tools/third_party/enum/enum/test.py
new file mode 100644
index 0000000000..c8c4b96224
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/enum/enum/test.py
@@ -0,0 +1,1841 @@
+from pickle import dumps, loads, PicklingError, HIGHEST_PROTOCOL
+import sys
+import unittest
+pyver = float('%s.%s' % sys.version_info[:2])
+if pyver < 2.5:
+ sys.path.insert(0, '.')
+import enum
+from enum import Enum, IntEnum, unique, EnumMeta
+
+if pyver < 2.6:
+ from __builtin__ import enumerate as bltin_enumerate
+ def enumerate(thing, start=0):
+ result = []
+ for i, item in bltin_enumerate(thing):
+ i = i + start
+ result.append((i, item))
+ return result
+
+try:
+ any
+except NameError:
+ def any(iterable):
+ for element in iterable:
+ if element:
+ return True
+ return False
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ OrderedDict = None
+
+# for pickle tests
+try:
+ class Stooges(Enum):
+ LARRY = 1
+ CURLY = 2
+ MOE = 3
+except Exception:
+ Stooges = sys.exc_info()[1]
+
+try:
+ class IntStooges(int, Enum):
+ LARRY = 1
+ CURLY = 2
+ MOE = 3
+except Exception:
+ IntStooges = sys.exc_info()[1]
+
+try:
+ class FloatStooges(float, Enum):
+ LARRY = 1.39
+ CURLY = 2.72
+ MOE = 3.142596
+except Exception:
+ FloatStooges = sys.exc_info()[1]
+
+# for pickle test and subclass tests
+try:
+ class StrEnum(str, Enum):
+ 'accepts only string values'
+ class Name(StrEnum):
+ BDFL = 'Guido van Rossum'
+ FLUFL = 'Barry Warsaw'
+except Exception:
+ Name = sys.exc_info()[1]
+
+try:
+ Question = Enum('Question', 'who what when where why', module=__name__)
+except Exception:
+ Question = sys.exc_info()[1]
+
+try:
+ Answer = Enum('Answer', 'him this then there because')
+except Exception:
+ Answer = sys.exc_info()[1]
+
+try:
+ Theory = Enum('Theory', 'rule law supposition', qualname='spanish_inquisition')
+except Exception:
+ Theory = sys.exc_info()[1]
+
+# for doctests
+try:
+ class Fruit(Enum):
+ tomato = 1
+ banana = 2
+ cherry = 3
+except Exception:
+ pass
+
+def test_pickle_dump_load(assertion, source, target=None,
+ protocol=(0, HIGHEST_PROTOCOL)):
+ start, stop = protocol
+ failures = []
+ for protocol in range(start, stop+1):
+ try:
+ if target is None:
+ assertion(loads(dumps(source, protocol=protocol)) is source)
+ else:
+ assertion(loads(dumps(source, protocol=protocol)), target)
+ except Exception:
+ exc, tb = sys.exc_info()[1:]
+ failures.append('%2d: %s' %(protocol, exc))
+ if failures:
+ raise ValueError('Failed with protocols: %s' % ', '.join(failures))
+
+def test_pickle_exception(assertion, exception, obj,
+ protocol=(0, HIGHEST_PROTOCOL)):
+ start, stop = protocol
+ failures = []
+ for protocol in range(start, stop+1):
+ try:
+ assertion(exception, dumps, obj, protocol=protocol)
+ except Exception:
+ exc = sys.exc_info()[1]
+ failures.append('%d: %s %s' % (protocol, exc.__class__.__name__, exc))
+ if failures:
+ raise ValueError('Failed with protocols: %s' % ', '.join(failures))
+
+
+class TestHelpers(unittest.TestCase):
+ # _is_descriptor, _is_sunder, _is_dunder
+
+ def test_is_descriptor(self):
+ class foo:
+ pass
+ for attr in ('__get__','__set__','__delete__'):
+ obj = foo()
+ self.assertFalse(enum._is_descriptor(obj))
+ setattr(obj, attr, 1)
+ self.assertTrue(enum._is_descriptor(obj))
+
+ def test_is_sunder(self):
+ for s in ('_a_', '_aa_'):
+ self.assertTrue(enum._is_sunder(s))
+
+ for s in ('a', 'a_', '_a', '__a', 'a__', '__a__', '_a__', '__a_', '_',
+ '__', '___', '____', '_____',):
+ self.assertFalse(enum._is_sunder(s))
+
+ def test_is_dunder(self):
+ for s in ('__a__', '__aa__'):
+ self.assertTrue(enum._is_dunder(s))
+ for s in ('a', 'a_', '_a', '__a', 'a__', '_a_', '_a__', '__a_', '_',
+ '__', '___', '____', '_____',):
+ self.assertFalse(enum._is_dunder(s))
+
+
+class TestEnum(unittest.TestCase):
+ def setUp(self):
+ class Season(Enum):
+ SPRING = 1
+ SUMMER = 2
+ AUTUMN = 3
+ WINTER = 4
+ self.Season = Season
+
+ class Konstants(float, Enum):
+ E = 2.7182818
+ PI = 3.1415926
+ TAU = 2 * PI
+ self.Konstants = Konstants
+
+ class Grades(IntEnum):
+ A = 5
+ B = 4
+ C = 3
+ D = 2
+ F = 0
+ self.Grades = Grades
+
+ class Directional(str, Enum):
+ EAST = 'east'
+ WEST = 'west'
+ NORTH = 'north'
+ SOUTH = 'south'
+ self.Directional = Directional
+
+ from datetime import date
+ class Holiday(date, Enum):
+ NEW_YEAR = 2013, 1, 1
+ IDES_OF_MARCH = 2013, 3, 15
+ self.Holiday = Holiday
+
+ if pyver >= 3.0: # do not specify custom `dir` on previous versions
+ def test_dir_on_class(self):
+ Season = self.Season
+ self.assertEqual(
+ set(dir(Season)),
+ set(['__class__', '__doc__', '__members__', '__module__',
+ 'SPRING', 'SUMMER', 'AUTUMN', 'WINTER']),
+ )
+
+ def test_dir_on_item(self):
+ Season = self.Season
+ self.assertEqual(
+ set(dir(Season.WINTER)),
+ set(['__class__', '__doc__', '__module__', 'name', 'value']),
+ )
+
+ def test_dir_with_added_behavior(self):
+ class Test(Enum):
+ this = 'that'
+ these = 'those'
+ def wowser(self):
+ return ("Wowser! I'm %s!" % self.name)
+ self.assertEqual(
+ set(dir(Test)),
+ set(['__class__', '__doc__', '__members__', '__module__', 'this', 'these']),
+ )
+ self.assertEqual(
+ set(dir(Test.this)),
+ set(['__class__', '__doc__', '__module__', 'name', 'value', 'wowser']),
+ )
+
+ def test_dir_on_sub_with_behavior_on_super(self):
+ # see issue22506
+ class SuperEnum(Enum):
+ def invisible(self):
+ return "did you see me?"
+ class SubEnum(SuperEnum):
+ sample = 5
+ self.assertEqual(
+ set(dir(SubEnum.sample)),
+ set(['__class__', '__doc__', '__module__', 'name', 'value', 'invisible']),
+ )
+
+ if pyver >= 2.7: # OrderedDict first available here
+ def test_members_is_ordereddict_if_ordered(self):
+ class Ordered(Enum):
+ __order__ = 'first second third'
+ first = 'bippity'
+ second = 'boppity'
+ third = 'boo'
+ self.assertTrue(type(Ordered.__members__) is OrderedDict)
+
+ def test_members_is_ordereddict_if_not_ordered(self):
+ class Unordered(Enum):
+ this = 'that'
+ these = 'those'
+ self.assertTrue(type(Unordered.__members__) is OrderedDict)
+
+ if pyver >= 3.0: # all objects are ordered in Python 2.x
+ def test_members_is_always_ordered(self):
+ class AlwaysOrdered(Enum):
+ first = 1
+ second = 2
+ third = 3
+ self.assertTrue(type(AlwaysOrdered.__members__) is OrderedDict)
+
+ def test_comparisons(self):
+ def bad_compare():
+ Season.SPRING > 4
+ Season = self.Season
+ self.assertNotEqual(Season.SPRING, 1)
+ self.assertRaises(TypeError, bad_compare)
+
+ class Part(Enum):
+ SPRING = 1
+ CLIP = 2
+ BARREL = 3
+
+ self.assertNotEqual(Season.SPRING, Part.SPRING)
+ def bad_compare():
+ Season.SPRING < Part.CLIP
+ self.assertRaises(TypeError, bad_compare)
+
+ def test_enum_in_enum_out(self):
+ Season = self.Season
+ self.assertTrue(Season(Season.WINTER) is Season.WINTER)
+
+ def test_enum_value(self):
+ Season = self.Season
+ self.assertEqual(Season.SPRING.value, 1)
+
+ def test_intenum_value(self):
+ self.assertEqual(IntStooges.CURLY.value, 2)
+
+ def test_enum(self):
+ Season = self.Season
+ lst = list(Season)
+ self.assertEqual(len(lst), len(Season))
+ self.assertEqual(len(Season), 4, Season)
+ self.assertEqual(
+ [Season.SPRING, Season.SUMMER, Season.AUTUMN, Season.WINTER], lst)
+
+ for i, season in enumerate('SPRING SUMMER AUTUMN WINTER'.split()):
+ i += 1
+ e = Season(i)
+ self.assertEqual(e, getattr(Season, season))
+ self.assertEqual(e.value, i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, season)
+ self.assertTrue(e in Season)
+ self.assertTrue(type(e) is Season)
+ self.assertTrue(isinstance(e, Season))
+ self.assertEqual(str(e), 'Season.' + season)
+ self.assertEqual(
+ repr(e),
+ '<Season.%s: %s>' % (season, i),
+ )
+
+ def test_value_name(self):
+ Season = self.Season
+ self.assertEqual(Season.SPRING.name, 'SPRING')
+ self.assertEqual(Season.SPRING.value, 1)
+ def set_name(obj, new_value):
+ obj.name = new_value
+ def set_value(obj, new_value):
+ obj.value = new_value
+ self.assertRaises(AttributeError, set_name, Season.SPRING, 'invierno', )
+ self.assertRaises(AttributeError, set_value, Season.SPRING, 2)
+
+ def test_attribute_deletion(self):
+ class Season(Enum):
+ SPRING = 1
+ SUMMER = 2
+ AUTUMN = 3
+ WINTER = 4
+
+ def spam(cls):
+ pass
+
+ self.assertTrue(hasattr(Season, 'spam'))
+ del Season.spam
+ self.assertFalse(hasattr(Season, 'spam'))
+
+ self.assertRaises(AttributeError, delattr, Season, 'SPRING')
+ self.assertRaises(AttributeError, delattr, Season, 'DRY')
+ self.assertRaises(AttributeError, delattr, Season.SPRING, 'name')
+
+ def test_bool_of_class(self):
+ class Empty(Enum):
+ pass
+ self.assertTrue(bool(Empty))
+
+ def test_bool_of_member(self):
+ class Count(Enum):
+ zero = 0
+ one = 1
+ two = 2
+ for member in Count:
+ self.assertTrue(bool(member))
+
+ def test_invalid_names(self):
+ def create_bad_class_1():
+ class Wrong(Enum):
+ mro = 9
+ def create_bad_class_2():
+ class Wrong(Enum):
+ _reserved_ = 3
+ self.assertRaises(ValueError, create_bad_class_1)
+ self.assertRaises(ValueError, create_bad_class_2)
+
+ def test_contains(self):
+ Season = self.Season
+ self.assertTrue(Season.AUTUMN in Season)
+ self.assertTrue(3 not in Season)
+
+ val = Season(3)
+ self.assertTrue(val in Season)
+
+ class OtherEnum(Enum):
+ one = 1; two = 2
+ self.assertTrue(OtherEnum.two not in Season)
+
+ if pyver >= 2.6: # when `format` came into being
+
+ def test_format_enum(self):
+ Season = self.Season
+ self.assertEqual('{0}'.format(Season.SPRING),
+ '{0}'.format(str(Season.SPRING)))
+ self.assertEqual( '{0:}'.format(Season.SPRING),
+ '{0:}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:20}'.format(Season.SPRING),
+ '{0:20}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:^20}'.format(Season.SPRING),
+ '{0:^20}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:>20}'.format(Season.SPRING),
+ '{0:>20}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:<20}'.format(Season.SPRING),
+ '{0:<20}'.format(str(Season.SPRING)))
+
+ def test_format_enum_custom(self):
+ class TestFloat(float, Enum):
+ one = 1.0
+ two = 2.0
+ def __format__(self, spec):
+ return 'TestFloat success!'
+ self.assertEqual('{0}'.format(TestFloat.one), 'TestFloat success!')
+
+ def assertFormatIsValue(self, spec, member):
+ self.assertEqual(spec.format(member), spec.format(member.value))
+
+ def test_format_enum_date(self):
+ Holiday = self.Holiday
+ self.assertFormatIsValue('{0}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:^20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:>20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:<20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:%Y %m}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:%Y %m %M:00}', Holiday.IDES_OF_MARCH)
+
+ def test_format_enum_float(self):
+ Konstants = self.Konstants
+ self.assertFormatIsValue('{0}', Konstants.TAU)
+ self.assertFormatIsValue('{0:}', Konstants.TAU)
+ self.assertFormatIsValue('{0:20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:^20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:>20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:<20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:n}', Konstants.TAU)
+ self.assertFormatIsValue('{0:5.2}', Konstants.TAU)
+ self.assertFormatIsValue('{0:f}', Konstants.TAU)
+
+ def test_format_enum_int(self):
+ Grades = self.Grades
+ self.assertFormatIsValue('{0}', Grades.C)
+ self.assertFormatIsValue('{0:}', Grades.C)
+ self.assertFormatIsValue('{0:20}', Grades.C)
+ self.assertFormatIsValue('{0:^20}', Grades.C)
+ self.assertFormatIsValue('{0:>20}', Grades.C)
+ self.assertFormatIsValue('{0:<20}', Grades.C)
+ self.assertFormatIsValue('{0:+}', Grades.C)
+ self.assertFormatIsValue('{0:08X}', Grades.C)
+ self.assertFormatIsValue('{0:b}', Grades.C)
+
+ def test_format_enum_str(self):
+ Directional = self.Directional
+ self.assertFormatIsValue('{0}', Directional.WEST)
+ self.assertFormatIsValue('{0:}', Directional.WEST)
+ self.assertFormatIsValue('{0:20}', Directional.WEST)
+ self.assertFormatIsValue('{0:^20}', Directional.WEST)
+ self.assertFormatIsValue('{0:>20}', Directional.WEST)
+ self.assertFormatIsValue('{0:<20}', Directional.WEST)
+
+ def test_hash(self):
+ Season = self.Season
+ dates = {}
+ dates[Season.WINTER] = '1225'
+ dates[Season.SPRING] = '0315'
+ dates[Season.SUMMER] = '0704'
+ dates[Season.AUTUMN] = '1031'
+ self.assertEqual(dates[Season.AUTUMN], '1031')
+
+ def test_enum_duplicates(self):
+ class Season(Enum):
+ _order_ = "SPRING SUMMER AUTUMN WINTER"
+ SPRING = 1
+ SUMMER = 2
+ AUTUMN = FALL = 3
+ WINTER = 4
+ ANOTHER_SPRING = 1
+ lst = list(Season)
+ self.assertEqual(
+ lst,
+ [Season.SPRING, Season.SUMMER,
+ Season.AUTUMN, Season.WINTER,
+ ])
+ self.assertTrue(Season.FALL is Season.AUTUMN)
+ self.assertEqual(Season.FALL.value, 3)
+ self.assertEqual(Season.AUTUMN.value, 3)
+ self.assertTrue(Season(3) is Season.AUTUMN)
+ self.assertTrue(Season(1) is Season.SPRING)
+ self.assertEqual(Season.FALL.name, 'AUTUMN')
+ self.assertEqual(
+ set([k for k,v in Season.__members__.items() if v.name != k]),
+ set(['FALL', 'ANOTHER_SPRING']),
+ )
+
+ if pyver >= 3.0:
+ cls = vars()
+ result = {'Enum':Enum}
+ exec("""def test_duplicate_name(self):
+ with self.assertRaises(TypeError):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ red = 4
+
+ with self.assertRaises(TypeError):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ def red(self):
+ return 'red'
+
+ with self.assertRaises(TypeError):
+ class Color(Enum):
+ @property
+
+ def red(self):
+ return 'redder'
+ red = 1
+ green = 2
+ blue = 3""",
+ result)
+ cls['test_duplicate_name'] = result['test_duplicate_name']
+
+ def test_enum_with_value_name(self):
+ class Huh(Enum):
+ name = 1
+ value = 2
+ self.assertEqual(
+ list(Huh),
+ [Huh.name, Huh.value],
+ )
+ self.assertTrue(type(Huh.name) is Huh)
+ self.assertEqual(Huh.name.name, 'name')
+ self.assertEqual(Huh.name.value, 1)
+
+ def test_intenum_from_scratch(self):
+ class phy(int, Enum):
+ pi = 3
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_intenum_inherited(self):
+ class IntEnum(int, Enum):
+ pass
+ class phy(IntEnum):
+ pi = 3
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_floatenum_from_scratch(self):
+ class phy(float, Enum):
+ pi = 3.1415926
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_floatenum_inherited(self):
+ class FloatEnum(float, Enum):
+ pass
+ class phy(FloatEnum):
+ pi = 3.1415926
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_strenum_from_scratch(self):
+ class phy(str, Enum):
+ pi = 'Pi'
+ tau = 'Tau'
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_strenum_inherited(self):
+ class StrEnum(str, Enum):
+ pass
+ class phy(StrEnum):
+ pi = 'Pi'
+ tau = 'Tau'
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_intenum(self):
+ class WeekDay(IntEnum):
+ SUNDAY = 1
+ MONDAY = 2
+ TUESDAY = 3
+ WEDNESDAY = 4
+ THURSDAY = 5
+ FRIDAY = 6
+ SATURDAY = 7
+
+ self.assertEqual(['a', 'b', 'c'][WeekDay.MONDAY], 'c')
+ self.assertEqual([i for i in range(WeekDay.TUESDAY)], [0, 1, 2])
+
+ lst = list(WeekDay)
+ self.assertEqual(len(lst), len(WeekDay))
+ self.assertEqual(len(WeekDay), 7)
+ target = 'SUNDAY MONDAY TUESDAY WEDNESDAY THURSDAY FRIDAY SATURDAY'
+ target = target.split()
+ for i, weekday in enumerate(target):
+ i += 1
+ e = WeekDay(i)
+ self.assertEqual(e, i)
+ self.assertEqual(int(e), i)
+ self.assertEqual(e.name, weekday)
+ self.assertTrue(e in WeekDay)
+ self.assertEqual(lst.index(e)+1, i)
+ self.assertTrue(0 < e < 8)
+ self.assertTrue(type(e) is WeekDay)
+ self.assertTrue(isinstance(e, int))
+ self.assertTrue(isinstance(e, Enum))
+
+ def test_intenum_duplicates(self):
+ class WeekDay(IntEnum):
+ __order__ = 'SUNDAY MONDAY TUESDAY WEDNESDAY THURSDAY FRIDAY SATURDAY'
+ SUNDAY = 1
+ MONDAY = 2
+ TUESDAY = TEUSDAY = 3
+ WEDNESDAY = 4
+ THURSDAY = 5
+ FRIDAY = 6
+ SATURDAY = 7
+ self.assertTrue(WeekDay.TEUSDAY is WeekDay.TUESDAY)
+ self.assertEqual(WeekDay(3).name, 'TUESDAY')
+ self.assertEqual([k for k,v in WeekDay.__members__.items()
+ if v.name != k], ['TEUSDAY', ])
+
+ def test_pickle_enum(self):
+ if isinstance(Stooges, Exception):
+ raise Stooges
+ test_pickle_dump_load(self.assertTrue, Stooges.CURLY)
+ test_pickle_dump_load(self.assertTrue, Stooges)
+
+ def test_pickle_int(self):
+ if isinstance(IntStooges, Exception):
+ raise IntStooges
+ test_pickle_dump_load(self.assertTrue, IntStooges.CURLY)
+ test_pickle_dump_load(self.assertTrue, IntStooges)
+
+ def test_pickle_float(self):
+ if isinstance(FloatStooges, Exception):
+ raise FloatStooges
+ test_pickle_dump_load(self.assertTrue, FloatStooges.CURLY)
+ test_pickle_dump_load(self.assertTrue, FloatStooges)
+
+ def test_pickle_enum_function(self):
+ if isinstance(Answer, Exception):
+ raise Answer
+ test_pickle_dump_load(self.assertTrue, Answer.him)
+ test_pickle_dump_load(self.assertTrue, Answer)
+
+ def test_pickle_enum_function_with_module(self):
+ if isinstance(Question, Exception):
+ raise Question
+ test_pickle_dump_load(self.assertTrue, Question.who)
+ test_pickle_dump_load(self.assertTrue, Question)
+
+ if pyver == 3.4:
+ def test_class_nested_enum_and_pickle_protocol_four(self):
+ # would normally just have this directly in the class namespace
+ class NestedEnum(Enum):
+ twigs = 'common'
+ shiny = 'rare'
+
+ self.__class__.NestedEnum = NestedEnum
+ self.NestedEnum.__qualname__ = '%s.NestedEnum' % self.__class__.__name__
+ test_pickle_exception(
+ self.assertRaises, PicklingError, self.NestedEnum.twigs,
+ protocol=(0, 3))
+ test_pickle_dump_load(self.assertTrue, self.NestedEnum.twigs,
+ protocol=(4, HIGHEST_PROTOCOL))
+
+ elif pyver == 3.5:
+ def test_class_nested_enum_and_pickle_protocol_four(self):
+ # would normally just have this directly in the class namespace
+ class NestedEnum(Enum):
+ twigs = 'common'
+ shiny = 'rare'
+
+ self.__class__.NestedEnum = NestedEnum
+ self.NestedEnum.__qualname__ = '%s.NestedEnum' % self.__class__.__name__
+ test_pickle_dump_load(self.assertTrue, self.NestedEnum.twigs,
+ protocol=(0, HIGHEST_PROTOCOL))
+
+ def test_exploding_pickle(self):
+ BadPickle = Enum('BadPickle', 'dill sweet bread_n_butter')
+ enum._make_class_unpicklable(BadPickle)
+ globals()['BadPickle'] = BadPickle
+ test_pickle_exception(self.assertRaises, TypeError, BadPickle.dill)
+ test_pickle_exception(self.assertRaises, PicklingError, BadPickle)
+
+ def test_string_enum(self):
+ class SkillLevel(str, Enum):
+ master = 'what is the sound of one hand clapping?'
+ journeyman = 'why did the chicken cross the road?'
+ apprentice = 'knock, knock!'
+ self.assertEqual(SkillLevel.apprentice, 'knock, knock!')
+
+ def test_getattr_getitem(self):
+ class Period(Enum):
+ morning = 1
+ noon = 2
+ evening = 3
+ night = 4
+ self.assertTrue(Period(2) is Period.noon)
+ self.assertTrue(getattr(Period, 'night') is Period.night)
+ self.assertTrue(Period['morning'] is Period.morning)
+
+ def test_getattr_dunder(self):
+ Season = self.Season
+ self.assertTrue(getattr(Season, '__hash__'))
+
+ def test_iteration_order(self):
+ class Season(Enum):
+ _order_ = 'SUMMER WINTER AUTUMN SPRING'
+ SUMMER = 2
+ WINTER = 4
+ AUTUMN = 3
+ SPRING = 1
+ self.assertEqual(
+ list(Season),
+ [Season.SUMMER, Season.WINTER, Season.AUTUMN, Season.SPRING],
+ )
+
+ def test_iteration_order_reversed(self):
+ self.assertEqual(
+ list(reversed(self.Season)),
+ [self.Season.WINTER, self.Season.AUTUMN, self.Season.SUMMER,
+ self.Season.SPRING]
+ )
+
+ def test_iteration_order_with_unorderable_values(self):
+ class Complex(Enum):
+ a = complex(7, 9)
+ b = complex(3.14, 2)
+ c = complex(1, -1)
+ d = complex(-77, 32)
+ self.assertEqual(
+ list(Complex),
+ [Complex.a, Complex.b, Complex.c, Complex.d],
+ )
+
+ def test_programatic_function_string(self):
+ SummerMonth = Enum('SummerMonth', 'june july august')
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_string_with_start(self):
+ SummerMonth = Enum('SummerMonth', 'june july august', start=10)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 10):
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_string_list(self):
+ SummerMonth = Enum('SummerMonth', ['june', 'july', 'august'])
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_string_list_with_start(self):
+ SummerMonth = Enum('SummerMonth', ['june', 'july', 'august'], start=20)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 20):
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_iterable(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ (('june', 1), ('july', 2), ('august', 3))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_iterable_with_weird_names(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ (('june', 1), ('july', 2), ('august', 3), ('fabulous september', 4))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 4, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august, SummerMonth['fabulous september']],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split() + ['fabulous september']):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_from_dict(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ dict((('june', 1), ('july', 2), ('august', 3)))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ if pyver < 3.0:
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type(self):
+ SummerMonth = Enum('SummerMonth', 'june july august', type=int)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type_with_start(self):
+ SummerMonth = Enum('SummerMonth', 'june july august', type=int, start=30)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 30):
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type_from_subclass(self):
+ SummerMonth = IntEnum('SummerMonth', 'june july august')
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type_from_subclass_with_start(self):
+ SummerMonth = IntEnum('SummerMonth', 'june july august', start=40)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 40):
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode(self):
+ SummerMonth = Enum('SummerMonth', unicode('june july august'))
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_list(self):
+ SummerMonth = Enum('SummerMonth', [unicode('june'), unicode('july'), unicode('august')])
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_iterable(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ ((unicode('june'), 1), (unicode('july'), 2), (unicode('august'), 3))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_from_unicode_dict(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ dict(((unicode('june'), 1), (unicode('july'), 2), (unicode('august'), 3)))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ if pyver < 3.0:
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_type(self):
+ SummerMonth = Enum('SummerMonth', unicode('june july august'), type=int)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_type_from_subclass(self):
+ SummerMonth = IntEnum('SummerMonth', unicode('june july august'))
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programmatic_function_unicode_class(self):
+ if pyver < 3.0:
+ class_names = unicode('SummerMonth'), 'S\xfcmm\xe9rM\xf6nth'.decode('latin1')
+ else:
+ class_names = 'SummerMonth', 'S\xfcmm\xe9rM\xf6nth'
+ for i, class_name in enumerate(class_names):
+ if pyver < 3.0 and i == 1:
+ self.assertRaises(TypeError, Enum, class_name, unicode('june july august'))
+ else:
+ SummerMonth = Enum(class_name, unicode('june july august'))
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e.value, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_subclassing(self):
+ if isinstance(Name, Exception):
+ raise Name
+ self.assertEqual(Name.BDFL, 'Guido van Rossum')
+ self.assertTrue(Name.BDFL, Name('Guido van Rossum'))
+ self.assertTrue(Name.BDFL is getattr(Name, 'BDFL'))
+ test_pickle_dump_load(self.assertTrue, Name.BDFL)
+
+ def test_extending(self):
+ def bad_extension():
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ class MoreColor(Color):
+ cyan = 4
+ magenta = 5
+ yellow = 6
+ self.assertRaises(TypeError, bad_extension)
+
+ def test_exclude_methods(self):
+ class whatever(Enum):
+ this = 'that'
+ these = 'those'
+ def really(self):
+ return 'no, not %s' % self.value
+ self.assertFalse(type(whatever.really) is whatever)
+ self.assertEqual(whatever.this.really(), 'no, not that')
+
+ def test_wrong_inheritance_order(self):
+ def wrong_inherit():
+ class Wrong(Enum, str):
+ NotHere = 'error before this point'
+ self.assertRaises(TypeError, wrong_inherit)
+
+ def test_intenum_transitivity(self):
+ class number(IntEnum):
+ one = 1
+ two = 2
+ three = 3
+ class numero(IntEnum):
+ uno = 1
+ dos = 2
+ tres = 3
+ self.assertEqual(number.one, numero.uno)
+ self.assertEqual(number.two, numero.dos)
+ self.assertEqual(number.three, numero.tres)
+
+ def test_introspection(self):
+ class Number(IntEnum):
+ one = 100
+ two = 200
+ self.assertTrue(Number.one._member_type_ is int)
+ self.assertTrue(Number._member_type_ is int)
+ class String(str, Enum):
+ yarn = 'soft'
+ rope = 'rough'
+ wire = 'hard'
+ self.assertTrue(String.yarn._member_type_ is str)
+ self.assertTrue(String._member_type_ is str)
+ class Plain(Enum):
+ vanilla = 'white'
+ one = 1
+ self.assertTrue(Plain.vanilla._member_type_ is object)
+ self.assertTrue(Plain._member_type_ is object)
+
+ def test_wrong_enum_in_call(self):
+ class Monochrome(Enum):
+ black = 0
+ white = 1
+ class Gender(Enum):
+ male = 0
+ female = 1
+ self.assertRaises(ValueError, Monochrome, Gender.male)
+
+ def test_wrong_enum_in_mixed_call(self):
+ class Monochrome(IntEnum):
+ black = 0
+ white = 1
+ class Gender(Enum):
+ male = 0
+ female = 1
+ self.assertRaises(ValueError, Monochrome, Gender.male)
+
+ def test_mixed_enum_in_call_1(self):
+ class Monochrome(IntEnum):
+ black = 0
+ white = 1
+ class Gender(IntEnum):
+ male = 0
+ female = 1
+ self.assertTrue(Monochrome(Gender.female) is Monochrome.white)
+
+ def test_mixed_enum_in_call_2(self):
+ class Monochrome(Enum):
+ black = 0
+ white = 1
+ class Gender(IntEnum):
+ male = 0
+ female = 1
+ self.assertTrue(Monochrome(Gender.male) is Monochrome.black)
+
+ def test_flufl_enum(self):
+ class Fluflnum(Enum):
+ def __int__(self):
+ return int(self.value)
+ class MailManOptions(Fluflnum):
+ option1 = 1
+ option2 = 2
+ option3 = 3
+ self.assertEqual(int(MailManOptions.option1), 1)
+
+ def test_no_such_enum_member(self):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ self.assertRaises(ValueError, Color, 4)
+ self.assertRaises(KeyError, Color.__getitem__, 'chartreuse')
+
+ def test_new_repr(self):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ def __repr__(self):
+ return "don't you just love shades of %s?" % self.name
+ self.assertEqual(
+ repr(Color.blue),
+ "don't you just love shades of blue?",
+ )
+
+ def test_inherited_repr(self):
+ class MyEnum(Enum):
+ def __repr__(self):
+ return "My name is %s." % self.name
+ class MyIntEnum(int, MyEnum):
+ this = 1
+ that = 2
+ theother = 3
+ self.assertEqual(repr(MyIntEnum.that), "My name is that.")
+
+ def test_multiple_mixin_mro(self):
+ class auto_enum(EnumMeta):
+ def __new__(metacls, cls, bases, classdict):
+ original_dict = classdict
+ classdict = enum._EnumDict()
+ for k, v in original_dict.items():
+ classdict[k] = v
+ temp = type(classdict)()
+ names = set(classdict._member_names)
+ i = 0
+ for k in classdict._member_names:
+ v = classdict[k]
+ if v == ():
+ v = i
+ else:
+ i = v
+ i += 1
+ temp[k] = v
+ for k, v in classdict.items():
+ if k not in names:
+ temp[k] = v
+ return super(auto_enum, metacls).__new__(
+ metacls, cls, bases, temp)
+
+ AutoNumberedEnum = auto_enum('AutoNumberedEnum', (Enum,), {})
+
+ AutoIntEnum = auto_enum('AutoIntEnum', (IntEnum,), {})
+
+ class TestAutoNumber(AutoNumberedEnum):
+ a = ()
+ b = 3
+ c = ()
+
+ class TestAutoInt(AutoIntEnum):
+ a = ()
+ b = 3
+ c = ()
+
+ def test_subclasses_with_getnewargs(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 1:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __getnewargs__(self):
+ return self._args
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ test_pickle_dump_load(self.assertTrue, NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_dump_load(self.assertTrue, NEI.y)
+
+ if pyver >= 3.4:
+ def test_subclasses_with_getnewargs_ex(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 2:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __getnewargs_ex__(self):
+ return self._args, {}
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "{}({!r}, {})".format(type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '({0} + {1})'.format(self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+
+ self.assertIs(NEI.__new__, Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ test_pickle_dump_load(self.assertEqual, NI5, 5, protocol=(4, HIGHEST_PROTOCOL))
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_dump_load(self.assertTrue, NEI.y, protocol=(4, HIGHEST_PROTOCOL))
+
+ def test_subclasses_with_reduce(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 1:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __reduce__(self):
+ return self.__class__, self._args
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ test_pickle_dump_load(self.assertEqual, NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_dump_load(self.assertTrue, NEI.y)
+
+ def test_subclasses_with_reduce_ex(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 1:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __reduce_ex__(self, proto):
+ return self.__class__, self._args
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ test_pickle_dump_load(self.assertEqual, NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_dump_load(self.assertTrue, NEI.y)
+
+ def test_subclasses_without_direct_pickle_support(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt'
+ def __new__(cls, *args):
+ _args = args
+ name, args = args[0], args[1:]
+ if len(args) == 0:
+ raise TypeError("name and value must be specified")
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI'
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_exception(self.assertRaises, TypeError, NEI.x)
+ test_pickle_exception(self.assertRaises, PicklingError, NEI)
+
+ def test_subclasses_without_direct_pickle_support_using_name(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt'
+ def __new__(cls, *args):
+ _args = args
+ name, args = args[0], args[1:]
+ if len(args) == 0:
+ raise TypeError("name and value must be specified")
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI'
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+ def __reduce_ex__(self, proto):
+ return getattr, (self.__class__, self._name_)
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ test_pickle_dump_load(self.assertTrue, NEI.y)
+ test_pickle_dump_load(self.assertTrue, NEI)
+
+ def test_tuple_subclass(self):
+ class SomeTuple(tuple, Enum):
+ __qualname__ = 'SomeTuple'
+ first = (1, 'for the money')
+ second = (2, 'for the show')
+ third = (3, 'for the music')
+ self.assertTrue(type(SomeTuple.first) is SomeTuple)
+ self.assertTrue(isinstance(SomeTuple.second, tuple))
+ self.assertEqual(SomeTuple.third, (3, 'for the music'))
+ globals()['SomeTuple'] = SomeTuple
+ test_pickle_dump_load(self.assertTrue, SomeTuple.first)
+
+ def test_duplicate_values_give_unique_enum_items(self):
+ class AutoNumber(Enum):
+ __order__ = 'enum_m enum_d enum_y'
+ enum_m = ()
+ enum_d = ()
+ enum_y = ()
+ def __new__(cls):
+ value = len(cls.__members__) + 1
+ obj = object.__new__(cls)
+ obj._value_ = value
+ return obj
+ def __int__(self):
+ return int(self._value_)
+ self.assertEqual(int(AutoNumber.enum_d), 2)
+ self.assertEqual(AutoNumber.enum_y.value, 3)
+ self.assertTrue(AutoNumber(1) is AutoNumber.enum_m)
+ self.assertEqual(
+ list(AutoNumber),
+ [AutoNumber.enum_m, AutoNumber.enum_d, AutoNumber.enum_y],
+ )
+
+ def test_inherited_new_from_enhanced_enum(self):
+ class AutoNumber2(Enum):
+ def __new__(cls):
+ value = len(cls.__members__) + 1
+ obj = object.__new__(cls)
+ obj._value_ = value
+ return obj
+ def __int__(self):
+ return int(self._value_)
+ class Color(AutoNumber2):
+ _order_ = 'red green blue'
+ red = ()
+ green = ()
+ blue = ()
+ self.assertEqual(len(Color), 3, "wrong number of elements: %d (should be %d)" % (len(Color), 3))
+ self.assertEqual(list(Color), [Color.red, Color.green, Color.blue])
+ if pyver >= 3.0:
+ self.assertEqual(list(map(int, Color)), [1, 2, 3])
+
+ def test_inherited_new_from_mixed_enum(self):
+ class AutoNumber3(IntEnum):
+ def __new__(cls):
+ value = len(cls.__members__) + 1
+ obj = int.__new__(cls, value)
+ obj._value_ = value
+ return obj
+ class Color(AutoNumber3):
+ red = ()
+ green = ()
+ blue = ()
+ self.assertEqual(len(Color), 3, "wrong number of elements: %d (should be %d)" % (len(Color), 3))
+ Color.red
+ Color.green
+ Color.blue
+
+ def test_equality(self):
+ class AlwaysEqual:
+ def __eq__(self, other):
+ return True
+ class OrdinaryEnum(Enum):
+ a = 1
+ self.assertEqual(AlwaysEqual(), OrdinaryEnum.a)
+ self.assertEqual(OrdinaryEnum.a, AlwaysEqual())
+
+ def test_ordered_mixin(self):
+ class OrderedEnum(Enum):
+ def __ge__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ >= other._value_
+ return NotImplemented
+ def __gt__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ > other._value_
+ return NotImplemented
+ def __le__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ <= other._value_
+ return NotImplemented
+ def __lt__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ < other._value_
+ return NotImplemented
+ class Grade(OrderedEnum):
+ __order__ = 'A B C D F'
+ A = 5
+ B = 4
+ C = 3
+ D = 2
+ F = 1
+ self.assertEqual(list(Grade), [Grade.A, Grade.B, Grade.C, Grade.D, Grade.F])
+ self.assertTrue(Grade.A > Grade.B)
+ self.assertTrue(Grade.F <= Grade.C)
+ self.assertTrue(Grade.D < Grade.A)
+ self.assertTrue(Grade.B >= Grade.B)
+
+ def test_extending2(self):
+ def bad_extension():
+ class Shade(Enum):
+ def shade(self):
+ print(self.name)
+ class Color(Shade):
+ red = 1
+ green = 2
+ blue = 3
+ class MoreColor(Color):
+ cyan = 4
+ magenta = 5
+ yellow = 6
+ self.assertRaises(TypeError, bad_extension)
+
+ def test_extending3(self):
+ class Shade(Enum):
+ def shade(self):
+ return self.name
+ class Color(Shade):
+ def hex(self):
+ return '%s hexlified!' % self.value
+ class MoreColor(Color):
+ cyan = 4
+ magenta = 5
+ yellow = 6
+ self.assertEqual(MoreColor.magenta.hex(), '5 hexlified!')
+
+ def test_no_duplicates(self):
+ def bad_duplicates():
+ class UniqueEnum(Enum):
+ def __init__(self, *args):
+ cls = self.__class__
+ if any(self.value == e.value for e in cls):
+ a = self.name
+ e = cls(self.value).name
+ raise ValueError(
+ "aliases not allowed in UniqueEnum: %r --> %r"
+ % (a, e)
+ )
+ class Color(UniqueEnum):
+ red = 1
+ green = 2
+ blue = 3
+ class Color(UniqueEnum):
+ red = 1
+ green = 2
+ blue = 3
+ grene = 2
+ self.assertRaises(ValueError, bad_duplicates)
+
+ def test_init(self):
+ class Planet(Enum):
+ MERCURY = (3.303e+23, 2.4397e6)
+ VENUS = (4.869e+24, 6.0518e6)
+ EARTH = (5.976e+24, 6.37814e6)
+ MARS = (6.421e+23, 3.3972e6)
+ JUPITER = (1.9e+27, 7.1492e7)
+ SATURN = (5.688e+26, 6.0268e7)
+ URANUS = (8.686e+25, 2.5559e7)
+ NEPTUNE = (1.024e+26, 2.4746e7)
+ def __init__(self, mass, radius):
+ self.mass = mass # in kilograms
+ self.radius = radius # in meters
+ @property
+ def surface_gravity(self):
+ # universal gravitational constant (m3 kg-1 s-2)
+ G = 6.67300E-11
+ return G * self.mass / (self.radius * self.radius)
+ self.assertEqual(round(Planet.EARTH.surface_gravity, 2), 9.80)
+ self.assertEqual(Planet.EARTH.value, (5.976e+24, 6.37814e6))
+
+ def test_nonhash_value(self):
+ class AutoNumberInAList(Enum):
+ def __new__(cls):
+ value = [len(cls.__members__) + 1]
+ obj = object.__new__(cls)
+ obj._value_ = value
+ return obj
+ class ColorInAList(AutoNumberInAList):
+ _order_ = 'red green blue'
+ red = ()
+ green = ()
+ blue = ()
+ self.assertEqual(list(ColorInAList), [ColorInAList.red, ColorInAList.green, ColorInAList.blue])
+ self.assertEqual(ColorInAList.red.value, [1])
+ self.assertEqual(ColorInAList([1]), ColorInAList.red)
+
+ def test_conflicting_types_resolved_in_new(self):
+ class LabelledIntEnum(int, Enum):
+ def __new__(cls, *args):
+ value, label = args
+ obj = int.__new__(cls, value)
+ obj.label = label
+ obj._value_ = value
+ return obj
+
+ class LabelledList(LabelledIntEnum):
+ unprocessed = (1, "Unprocessed")
+ payment_complete = (2, "Payment Complete")
+
+ self.assertEqual(list(LabelledList), [LabelledList.unprocessed, LabelledList.payment_complete])
+ self.assertEqual(LabelledList.unprocessed, 1)
+ self.assertEqual(LabelledList(1), LabelledList.unprocessed)
+
+ def test_empty_with_functional_api(self):
+ empty = enum.IntEnum('Foo', {})
+ self.assertEqual(len(empty), 0)
+
+
+class TestUnique(unittest.TestCase):
+ """2.4 doesn't allow class decorators, use function syntax."""
+
+ def test_unique_clean(self):
+ class Clean(Enum):
+ one = 1
+ two = 'dos'
+ tres = 4.0
+ unique(Clean)
+ class Cleaner(IntEnum):
+ single = 1
+ double = 2
+ triple = 3
+ unique(Cleaner)
+
+ def test_unique_dirty(self):
+ try:
+ class Dirty(Enum):
+ __order__ = 'one two tres'
+ one = 1
+ two = 'dos'
+ tres = 1
+ unique(Dirty)
+ except ValueError:
+ exc = sys.exc_info()[1]
+ message = exc.args[0]
+ self.assertTrue('tres -> one' in message)
+
+ try:
+ class Dirtier(IntEnum):
+ _order_ = 'single double triple turkey'
+ single = 1
+ double = 1
+ triple = 3
+ turkey = 3
+ unique(Dirtier)
+ except ValueError:
+ exc = sys.exc_info()[1]
+ message = exc.args[0]
+ self.assertTrue('double -> single' in message)
+ self.assertTrue('turkey -> triple' in message)
+
+
+class TestMe(unittest.TestCase):
+
+ pass
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/web-platform/tests/tools/third_party/enum/setup.cfg b/testing/web-platform/tests/tools/third_party/enum/setup.cfg
new file mode 100644
index 0000000000..8bfd5a12f8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/enum/setup.cfg
@@ -0,0 +1,4 @@
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/testing/web-platform/tests/tools/third_party/enum/setup.py b/testing/web-platform/tests/tools/third_party/enum/setup.py
new file mode 100644
index 0000000000..f54071e7f0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/enum/setup.py
@@ -0,0 +1,105 @@
+import os
+import sys
+import setuptools
+from distutils.core import setup
+
+
+if sys.version_info[:2] < (2, 7):
+ required = ['ordereddict']
+else:
+ required = []
+
+# Don't shadow builtin enum package if we are being installed on a
+# recent Python. This causes conflicts since at least 3.6:
+# https://bitbucket.org/stoneleaf/enum34/issues/19/enum34-isnt-compatible-with-python-36
+if sys.version_info[:2] < (3, 4):
+ packages = ['enum']
+else:
+ packages = []
+
+long_desc = '''\
+enum --- support for enumerations
+========================================
+
+An enumeration is a set of symbolic names (members) bound to unique, constant
+values. Within an enumeration, the members can be compared by identity, and
+the enumeration itself can be iterated over.
+
+ from enum import Enum
+
+ class Fruit(Enum):
+ apple = 1
+ banana = 2
+ orange = 3
+
+ list(Fruit)
+ # [<Fruit.apple: 1>, <Fruit.banana: 2>, <Fruit.orange: 3>]
+
+ len(Fruit)
+ # 3
+
+ Fruit.banana
+ # <Fruit.banana: 2>
+
+ Fruit['banana']
+ # <Fruit.banana: 2>
+
+ Fruit(2)
+ # <Fruit.banana: 2>
+
+ Fruit.banana is Fruit['banana'] is Fruit(2)
+ # True
+
+ Fruit.banana.name
+ # 'banana'
+
+ Fruit.banana.value
+ # 2
+
+Repository and Issue Tracker at https://bitbucket.org/stoneleaf/enum34.
+'''
+
+py2_only = ()
+py3_only = ()
+make = [
+ # 'rst2pdf enum/doc/enum.rst --output=enum/doc/enum.pdf',
+ ]
+
+
+data = dict(
+ name='enum34',
+ version='1.1.10',
+ url='https://bitbucket.org/stoneleaf/enum34',
+ packages=packages,
+ package_data={
+ 'enum' : [
+ 'LICENSE',
+ 'README',
+ 'doc/enum.rst',
+ 'doc/enum.pdf',
+ 'test.py',
+ ]
+ },
+ license='BSD License',
+ description='Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4',
+ long_description=long_desc,
+ provides=['enum'],
+ install_requires=required,
+ author='Ethan Furman',
+ author_email='ethan@stoneleaf.us',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: BSD License',
+ 'Programming Language :: Python',
+ 'Topic :: Software Development',
+ 'Programming Language :: Python :: 2.4',
+ 'Programming Language :: Python :: 2.5',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.3',
+ ],
+ )
+
+if __name__ == '__main__':
+ setup(**data)
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/.coveragerc b/testing/web-platform/tests/tools/third_party/funcsigs/.coveragerc
new file mode 100644
index 0000000000..d83bfc220b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/.coveragerc
@@ -0,0 +1,6 @@
+[run]
+source=funcsigs
+omit=funcsigs/odict*
+
+[report]
+include=funcsigs*
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/.gitignore b/testing/web-platform/tests/tools/third_party/funcsigs/.gitignore
new file mode 100644
index 0000000000..c8d2af85d3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/.gitignore
@@ -0,0 +1,19 @@
+*~
+*.egg
+*.egg-info
+*.pyc
+*.pyo
+*.swp
+.DS_Store
+.coverage
+.tox/
+MANIFEST
+build/
+docs/.build/
+dist/
+env*/
+htmlcov/
+tmp/
+coverage.xml
+junit.xml
+.eggs/
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/.travis.yml b/testing/web-platform/tests/tools/third_party/funcsigs/.travis.yml
new file mode 100644
index 0000000000..c1e7abe0b4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/.travis.yml
@@ -0,0 +1,18 @@
+language: python
+python:
+ - 2.6
+ - 2.7
+ - 3.3
+ - 3.4
+ - 3.5
+ - nightly
+ - pypy
+# - pypy3
+install:
+ - pip install -U pip setuptools wheel
+ - pip install -r requirements/development.txt .
+script:
+ - coverage run setup.py test
+ - coverage report --show-missing
+after_success:
+ - coveralls
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/CHANGELOG b/testing/web-platform/tests/tools/third_party/funcsigs/CHANGELOG
new file mode 100644
index 0000000000..e1366d2668
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/CHANGELOG
@@ -0,0 +1,24 @@
+Changelog
+---------
+
+0.5
+```
+
+* Fix binding with self as a kwarg. (Robert Collins #14)
+
+0.4 (2013-12-20)
+````````````````
+* Fix unbound methods getting their first parameter curried
+* Publish Python wheel packages
+
+0.3 (2013-05-29)
+````````````````
+* Fix annotation formatting of builtin types on Python 2.x
+
+0.2 (2012-01-07)
+````````````````
+* PyPy compatability
+
+0.1 (2012-01-06)
+````````````````
+* Initial release
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/LICENSE b/testing/web-platform/tests/tools/third_party/funcsigs/LICENSE
new file mode 100644
index 0000000000..3e563d6fbd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2013 Aaron Iles
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/MANIFEST.in b/testing/web-platform/tests/tools/third_party/funcsigs/MANIFEST.in
new file mode 100644
index 0000000000..f0abb42f04
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/MANIFEST.in
@@ -0,0 +1,7 @@
+recursive-include docs *
+recursive-include tests *.py
+include *.py
+include CHANGELOG
+include LICENSE
+include MANIFEST.in
+include README.rst
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/Makefile b/testing/web-platform/tests/tools/third_party/funcsigs/Makefile
new file mode 100644
index 0000000000..e2329231b5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/Makefile
@@ -0,0 +1,39 @@
+SHELL := /bin/bash
+
+deps:
+ pip install --upgrade \
+ -r requirements/development.txt \
+ -r requirements/production.txt
+
+sdist:
+ python setup.py sdist
+ python setup.py bdist_wheel
+
+register:
+ python setup.py register
+ python setup.py sdist upload
+ python setup.py bdist_wheel upload
+
+site:
+ cd docs; make html
+
+test:
+ coverage run setup.py test
+
+unittest:
+ coverage run -m unittest discover
+
+lint:
+ flake8 --exit-zero funcsigs tests
+
+coverage:
+ coverage report --show-missing
+
+clean:
+ python setup.py clean --all
+ find . -type f -name "*.pyc" -exec rm '{}' +
+ find . -type d -name "__pycache__" -exec rmdir '{}' +
+ rm -rf *.egg-info .coverage
+ cd docs; make clean
+
+docs: site
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/README.rst b/testing/web-platform/tests/tools/third_party/funcsigs/README.rst
new file mode 100644
index 0000000000..5fbca27e6e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/README.rst
@@ -0,0 +1,353 @@
+.. funcsigs documentation master file, created by
+ sphinx-quickstart on Fri Apr 20 20:27:52 2012.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Introducing funcsigs
+====================
+
+The Funcsigs Package
+--------------------
+
+``funcsigs`` is a backport of the `PEP 362`_ function signature features from
+Python 3.3's `inspect`_ module. The backport is compatible with Python 2.6, 2.7
+as well as 3.3 and up. 3.2 was supported by version 0.4, but with setuptools and
+pip no longer supporting 3.2, we cannot make any statement about 3.2
+compatibility.
+
+Compatibility
+`````````````
+
+The ``funcsigs`` backport has been tested against:
+
+* CPython 2.6
+* CPython 2.7
+* CPython 3.3
+* CPython 3.4
+* CPython 3.5
+* CPython nightlies
+* PyPy and PyPy3(currently failing CI)
+
+Continuous integration testing is provided by `Travis CI`_.
+
+Under Python 2.x there is a compatibility issue when a function is assigned to
+the ``__wrapped__`` property of a class after it has been constructed.
+Similiarily there under PyPy directly passing the ``__call__`` method of a
+builtin is also a compatibility issues. Otherwise the functionality is
+believed to be uniform between both Python2 and Python3.
+
+Issues
+``````
+
+Source code for ``funcsigs`` is hosted on `GitHub`_. Any bug reports or feature
+requests can be made using GitHub's `issues system`_. |build_status| |coverage|
+
+Example
+-------
+
+To obtain a `Signature` object, pass the target function to the
+``funcsigs.signature`` function.
+
+.. code-block:: python
+
+ >>> from funcsigs import signature
+ >>> def foo(a, b=None, *args, **kwargs):
+ ... pass
+ ...
+ >>> sig = signature(foo)
+ >>> sig
+ <funcsigs.Signature object at 0x...>
+ >>> sig.parameters
+ OrderedDict([('a', <Parameter at 0x... 'a'>), ('b', <Parameter at 0x... 'b'>), ('args', <Parameter at 0x... 'args'>), ('kwargs', <Parameter at 0x... 'kwargs'>)])
+ >>> sig.return_annotation
+ <class 'funcsigs._empty'>
+
+Introspecting callables with the Signature object
+-------------------------------------------------
+
+.. note::
+
+ This section of documentation is a direct reproduction of the Python
+ standard library documentation for the inspect module.
+
+The Signature object represents the call signature of a callable object and its
+return annotation. To retrieve a Signature object, use the :func:`signature`
+function.
+
+.. function:: signature(callable)
+
+ Return a :class:`Signature` object for the given ``callable``::
+
+ >>> from funcsigs import signature
+ >>> def foo(a, *, b:int, **kwargs):
+ ... pass
+
+ >>> sig = signature(foo)
+
+ >>> str(sig)
+ '(a, *, b:int, **kwargs)'
+
+ >>> str(sig.parameters['b'])
+ 'b:int'
+
+ >>> sig.parameters['b'].annotation
+ <class 'int'>
+
+ Accepts a wide range of python callables, from plain functions and classes to
+ :func:`functools.partial` objects.
+
+ .. note::
+
+ Some callables may not be introspectable in certain implementations of
+ Python. For example, in CPython, built-in functions defined in C provide
+ no metadata about their arguments.
+
+
+.. class:: Signature
+
+ A Signature object represents the call signature of a function and its return
+ annotation. For each parameter accepted by the function it stores a
+ :class:`Parameter` object in its :attr:`parameters` collection.
+
+ Signature objects are *immutable*. Use :meth:`Signature.replace` to make a
+ modified copy.
+
+ .. attribute:: Signature.empty
+
+ A special class-level marker to specify absence of a return annotation.
+
+ .. attribute:: Signature.parameters
+
+ An ordered mapping of parameters' names to the corresponding
+ :class:`Parameter` objects.
+
+ .. attribute:: Signature.return_annotation
+
+ The "return" annotation for the callable. If the callable has no "return"
+ annotation, this attribute is set to :attr:`Signature.empty`.
+
+ .. method:: Signature.bind(*args, **kwargs)
+
+ Create a mapping from positional and keyword arguments to parameters.
+ Returns :class:`BoundArguments` if ``*args`` and ``**kwargs`` match the
+ signature, or raises a :exc:`TypeError`.
+
+ .. method:: Signature.bind_partial(*args, **kwargs)
+
+ Works the same way as :meth:`Signature.bind`, but allows the omission of
+ some required arguments (mimics :func:`functools.partial` behavior.)
+ Returns :class:`BoundArguments`, or raises a :exc:`TypeError` if the
+ passed arguments do not match the signature.
+
+ .. method:: Signature.replace(*[, parameters][, return_annotation])
+
+ Create a new Signature instance based on the instance replace was invoked
+ on. It is possible to pass different ``parameters`` and/or
+ ``return_annotation`` to override the corresponding properties of the base
+ signature. To remove return_annotation from the copied Signature, pass in
+ :attr:`Signature.empty`.
+
+ ::
+
+ >>> def test(a, b):
+ ... pass
+ >>> sig = signature(test)
+ >>> new_sig = sig.replace(return_annotation="new return anno")
+ >>> str(new_sig)
+ "(a, b) -> 'new return anno'"
+
+
+.. class:: Parameter
+
+ Parameter objects are *immutable*. Instead of modifying a Parameter object,
+ you can use :meth:`Parameter.replace` to create a modified copy.
+
+ .. attribute:: Parameter.empty
+
+ A special class-level marker to specify absence of default values and
+ annotations.
+
+ .. attribute:: Parameter.name
+
+ The name of the parameter as a string. Must be a valid python identifier
+ name (with the exception of ``POSITIONAL_ONLY`` parameters, which can have
+ it set to ``None``).
+
+ .. attribute:: Parameter.default
+
+ The default value for the parameter. If the parameter has no default
+ value, this attribute is set to :attr:`Parameter.empty`.
+
+ .. attribute:: Parameter.annotation
+
+ The annotation for the parameter. If the parameter has no annotation,
+ this attribute is set to :attr:`Parameter.empty`.
+
+ .. attribute:: Parameter.kind
+
+ Describes how argument values are bound to the parameter. Possible values
+ (accessible via :class:`Parameter`, like ``Parameter.KEYWORD_ONLY``):
+
+ +------------------------+----------------------------------------------+
+ | Name | Meaning |
+ +========================+==============================================+
+ | *POSITIONAL_ONLY* | Value must be supplied as a positional |
+ | | argument. |
+ | | |
+ | | Python has no explicit syntax for defining |
+ | | positional-only parameters, but many built-in|
+ | | and extension module functions (especially |
+ | | those that accept only one or two parameters)|
+ | | accept them. |
+ +------------------------+----------------------------------------------+
+ | *POSITIONAL_OR_KEYWORD*| Value may be supplied as either a keyword or |
+ | | positional argument (this is the standard |
+ | | binding behaviour for functions implemented |
+ | | in Python.) |
+ +------------------------+----------------------------------------------+
+ | *VAR_POSITIONAL* | A tuple of positional arguments that aren't |
+ | | bound to any other parameter. This |
+ | | corresponds to a ``*args`` parameter in a |
+ | | Python function definition. |
+ +------------------------+----------------------------------------------+
+ | *KEYWORD_ONLY* | Value must be supplied as a keyword argument.|
+ | | Keyword only parameters are those which |
+ | | appear after a ``*`` or ``*args`` entry in a |
+ | | Python function definition. |
+ +------------------------+----------------------------------------------+
+ | *VAR_KEYWORD* | A dict of keyword arguments that aren't bound|
+ | | to any other parameter. This corresponds to a|
+ | | ``**kwargs`` parameter in a Python function |
+ | | definition. |
+ +------------------------+----------------------------------------------+
+
+ Example: print all keyword-only arguments without default values::
+
+ >>> def foo(a, b, *, c, d=10):
+ ... pass
+
+ >>> sig = signature(foo)
+ >>> for param in sig.parameters.values():
+ ... if (param.kind == param.KEYWORD_ONLY and
+ ... param.default is param.empty):
+ ... print('Parameter:', param)
+ Parameter: c
+
+ .. method:: Parameter.replace(*[, name][, kind][, default][, annotation])
+
+ Create a new Parameter instance based on the instance replaced was invoked
+ on. To override a :class:`Parameter` attribute, pass the corresponding
+ argument. To remove a default value or/and an annotation from a
+ Parameter, pass :attr:`Parameter.empty`.
+
+ ::
+
+ >>> from funcsigs import Parameter
+ >>> param = Parameter('foo', Parameter.KEYWORD_ONLY, default=42)
+ >>> str(param)
+ 'foo=42'
+
+ >>> str(param.replace()) # Will create a shallow copy of 'param'
+ 'foo=42'
+
+ >>> str(param.replace(default=Parameter.empty, annotation='spam'))
+ "foo:'spam'"
+
+
+.. class:: BoundArguments
+
+ Result of a :meth:`Signature.bind` or :meth:`Signature.bind_partial` call.
+ Holds the mapping of arguments to the function's parameters.
+
+ .. attribute:: BoundArguments.arguments
+
+ An ordered, mutable mapping (:class:`collections.OrderedDict`) of
+ parameters' names to arguments' values. Contains only explicitly bound
+ arguments. Changes in :attr:`arguments` will reflect in :attr:`args` and
+ :attr:`kwargs`.
+
+ Should be used in conjunction with :attr:`Signature.parameters` for any
+ argument processing purposes.
+
+ .. note::
+
+ Arguments for which :meth:`Signature.bind` or
+ :meth:`Signature.bind_partial` relied on a default value are skipped.
+ However, if needed, it is easy to include them.
+
+ ::
+
+ >>> def foo(a, b=10):
+ ... pass
+
+ >>> sig = signature(foo)
+ >>> ba = sig.bind(5)
+
+ >>> ba.args, ba.kwargs
+ ((5,), {})
+
+ >>> for param in sig.parameters.values():
+ ... if param.name not in ba.arguments:
+ ... ba.arguments[param.name] = param.default
+
+ >>> ba.args, ba.kwargs
+ ((5, 10), {})
+
+
+ .. attribute:: BoundArguments.args
+
+ A tuple of positional arguments values. Dynamically computed from the
+ :attr:`arguments` attribute.
+
+ .. attribute:: BoundArguments.kwargs
+
+ A dict of keyword arguments values. Dynamically computed from the
+ :attr:`arguments` attribute.
+
+ The :attr:`args` and :attr:`kwargs` properties can be used to invoke
+ functions::
+
+ def test(a, *, b):
+ ...
+
+ sig = signature(test)
+ ba = sig.bind(10, b=20)
+ test(*ba.args, **ba.kwargs)
+
+
+.. seealso::
+
+ :pep:`362` - Function Signature Object.
+ The detailed specification, implementation details and examples.
+
+Copyright
+---------
+
+*funcsigs* is a derived work of CPython under the terms of the `PSF License
+Agreement`_. The original CPython inspect module, its unit tests and
+documentation are the copyright of the Python Software Foundation. The derived
+work is distributed under the `Apache License Version 2.0`_.
+
+.. _PSF License Agreement: http://docs.python.org/3/license.html#terms-and-conditions-for-accessing-or-otherwise-using-python
+.. _Apache License Version 2.0: http://opensource.org/licenses/Apache-2.0
+.. _GitHub: https://github.com/testing-cabal/funcsigs
+.. _PSF License Agreement: http://docs.python.org/3/license.html#terms-and-conditions-for-accessing-or-otherwise-using-python
+.. _Travis CI: http://travis-ci.org/
+.. _Read The Docs: http://funcsigs.readthedocs.org/
+.. _PEP 362: http://www.python.org/dev/peps/pep-0362/
+.. _inspect: http://docs.python.org/3/library/inspect.html#introspecting-callables-with-the-signature-object
+.. _issues system: https://github.com/testing-cabal/funcsigs/issues
+
+.. |build_status| image:: https://secure.travis-ci.org/aliles/funcsigs.png?branch=master
+ :target: http://travis-ci.org/#!/aliles/funcsigs
+ :alt: Current build status
+
+.. |coverage| image:: https://coveralls.io/repos/aliles/funcsigs/badge.png?branch=master
+ :target: https://coveralls.io/r/aliles/funcsigs?branch=master
+ :alt: Coverage status
+
+.. |pypi_version| image:: https://pypip.in/v/funcsigs/badge.png
+ :target: https://crate.io/packages/funcsigs/
+ :alt: Latest PyPI version
+
+
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/docs/Makefile b/testing/web-platform/tests/tools/third_party/funcsigs/docs/Makefile
new file mode 100644
index 0000000000..f7ab3d16b4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/docs/Makefile
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/funcsigs.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/funcsigs.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/funcsigs"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/funcsigs"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/docs/_templates/page.html b/testing/web-platform/tests/tools/third_party/funcsigs/docs/_templates/page.html
new file mode 100644
index 0000000000..5e1e00bcaf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/docs/_templates/page.html
@@ -0,0 +1,9 @@
+{% extends "!page.html" %}
+{% block extrahead %}
+ <a href="https://github.com/aliles/funcsigs">
+ <img style="position: absolute; top: 0; right: 0; border: 0;"
+ src="https://s3.amazonaws.com/github/ribbons/forkme_right_red_aa0000.png"
+ alt="Fork me on GitHub">
+ </a>
+ {{ super() }}
+{% endblock %}
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/docs/conf.py b/testing/web-platform/tests/tools/third_party/funcsigs/docs/conf.py
new file mode 100644
index 0000000000..c6e4194cc0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/docs/conf.py
@@ -0,0 +1,251 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# funcsigs documentation build configuration file, created by
+# sphinx-quickstart on Fri Apr 20 20:27:52 2012.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('..'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'funcsigs'
+copyright = '2013, Aaron Iles'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+from funcsigs import __version__
+version = '.'.join(__version__.split('.')[:2])
+# The full version, including alpha/beta/rc tags.
+release = __version__
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'agogo'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'funcsigsdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'funcsigs.tex', 'funcsigs Documentation',
+ 'Aaron Iles', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'funcsigs', 'funcsigs Documentation',
+ ['Aaron Iles'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'funcsigs', 'funcsigs Documentation',
+ 'Aaron Iles', 'funcsigs', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+ 'python3': ('http://docs.python.org/py3k', None),
+ 'python': ('http://docs.python.org/', None)
+}
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/funcsigs/__init__.py b/testing/web-platform/tests/tools/third_party/funcsigs/funcsigs/__init__.py
new file mode 100644
index 0000000000..5f5378b42a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/funcsigs/__init__.py
@@ -0,0 +1,829 @@
+# Copyright 2001-2013 Python Software Foundation; All Rights Reserved
+"""Function signature objects for callables
+
+Back port of Python 3.3's function signature tools from the inspect module,
+modified to be compatible with Python 2.6, 2.7 and 3.3+.
+"""
+from __future__ import absolute_import, division, print_function
+import itertools
+import functools
+import re
+import types
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ from ordereddict import OrderedDict
+
+from funcsigs.version import __version__
+
+__all__ = ['BoundArguments', 'Parameter', 'Signature', 'signature']
+
+
+_WrapperDescriptor = type(type.__call__)
+_MethodWrapper = type(all.__call__)
+
+_NonUserDefinedCallables = (_WrapperDescriptor,
+ _MethodWrapper,
+ types.BuiltinFunctionType)
+
+
+def formatannotation(annotation, base_module=None):
+ if isinstance(annotation, type):
+ if annotation.__module__ in ('builtins', '__builtin__', base_module):
+ return annotation.__name__
+ return annotation.__module__+'.'+annotation.__name__
+ return repr(annotation)
+
+
+def _get_user_defined_method(cls, method_name, *nested):
+ try:
+ if cls is type:
+ return
+ meth = getattr(cls, method_name)
+ for name in nested:
+ meth = getattr(meth, name, meth)
+ except AttributeError:
+ return
+ else:
+ if not isinstance(meth, _NonUserDefinedCallables):
+ # Once '__signature__' will be added to 'C'-level
+ # callables, this check won't be necessary
+ return meth
+
+
+def signature(obj):
+ '''Get a signature object for the passed callable.'''
+
+ if not callable(obj):
+ raise TypeError('{0!r} is not a callable object'.format(obj))
+
+ if isinstance(obj, types.MethodType):
+ sig = signature(obj.__func__)
+ if obj.__self__ is None:
+ # Unbound method - preserve as-is.
+ return sig
+ else:
+ # Bound method. Eat self - if we can.
+ params = tuple(sig.parameters.values())
+
+ if not params or params[0].kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
+ raise ValueError('invalid method signature')
+
+ kind = params[0].kind
+ if kind in (_POSITIONAL_OR_KEYWORD, _POSITIONAL_ONLY):
+ # Drop first parameter:
+ # '(p1, p2[, ...])' -> '(p2[, ...])'
+ params = params[1:]
+ else:
+ if kind is not _VAR_POSITIONAL:
+ # Unless we add a new parameter type we never
+ # get here
+ raise ValueError('invalid argument type')
+ # It's a var-positional parameter.
+ # Do nothing. '(*args[, ...])' -> '(*args[, ...])'
+
+ return sig.replace(parameters=params)
+
+ try:
+ sig = obj.__signature__
+ except AttributeError:
+ pass
+ else:
+ if sig is not None:
+ return sig
+
+ try:
+ # Was this function wrapped by a decorator?
+ wrapped = obj.__wrapped__
+ except AttributeError:
+ pass
+ else:
+ return signature(wrapped)
+
+ if isinstance(obj, types.FunctionType):
+ return Signature.from_function(obj)
+
+ if isinstance(obj, functools.partial):
+ sig = signature(obj.func)
+
+ new_params = OrderedDict(sig.parameters.items())
+
+ partial_args = obj.args or ()
+ partial_keywords = obj.keywords or {}
+ try:
+ ba = sig.bind_partial(*partial_args, **partial_keywords)
+ except TypeError as ex:
+ msg = 'partial object {0!r} has incorrect arguments'.format(obj)
+ raise ValueError(msg)
+
+ for arg_name, arg_value in ba.arguments.items():
+ param = new_params[arg_name]
+ if arg_name in partial_keywords:
+ # We set a new default value, because the following code
+ # is correct:
+ #
+ # >>> def foo(a): print(a)
+ # >>> print(partial(partial(foo, a=10), a=20)())
+ # 20
+ # >>> print(partial(partial(foo, a=10), a=20)(a=30))
+ # 30
+ #
+ # So, with 'partial' objects, passing a keyword argument is
+ # like setting a new default value for the corresponding
+ # parameter
+ #
+ # We also mark this parameter with '_partial_kwarg'
+ # flag. Later, in '_bind', the 'default' value of this
+ # parameter will be added to 'kwargs', to simulate
+ # the 'functools.partial' real call.
+ new_params[arg_name] = param.replace(default=arg_value,
+ _partial_kwarg=True)
+
+ elif (param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) and
+ not param._partial_kwarg):
+ new_params.pop(arg_name)
+
+ return sig.replace(parameters=new_params.values())
+
+ sig = None
+ if isinstance(obj, type):
+ # obj is a class or a metaclass
+
+ # First, let's see if it has an overloaded __call__ defined
+ # in its metaclass
+ call = _get_user_defined_method(type(obj), '__call__')
+ if call is not None:
+ sig = signature(call)
+ else:
+ # Now we check if the 'obj' class has a '__new__' method
+ new = _get_user_defined_method(obj, '__new__')
+ if new is not None:
+ sig = signature(new)
+ else:
+ # Finally, we should have at least __init__ implemented
+ init = _get_user_defined_method(obj, '__init__')
+ if init is not None:
+ sig = signature(init)
+ elif not isinstance(obj, _NonUserDefinedCallables):
+ # An object with __call__
+ # We also check that the 'obj' is not an instance of
+ # _WrapperDescriptor or _MethodWrapper to avoid
+ # infinite recursion (and even potential segfault)
+ call = _get_user_defined_method(type(obj), '__call__', 'im_func')
+ if call is not None:
+ sig = signature(call)
+
+ if sig is not None:
+ # For classes and objects we skip the first parameter of their
+ # __call__, __new__, or __init__ methods
+ return sig.replace(parameters=tuple(sig.parameters.values())[1:])
+
+ if isinstance(obj, types.BuiltinFunctionType):
+ # Raise a nicer error message for builtins
+ msg = 'no signature found for builtin function {0!r}'.format(obj)
+ raise ValueError(msg)
+
+ raise ValueError('callable {0!r} is not supported by signature'.format(obj))
+
+
+class _void(object):
+ '''A private marker - used in Parameter & Signature'''
+
+
+class _empty(object):
+ pass
+
+
+class _ParameterKind(int):
+ def __new__(self, *args, **kwargs):
+ obj = int.__new__(self, *args)
+ obj._name = kwargs['name']
+ return obj
+
+ def __str__(self):
+ return self._name
+
+ def __repr__(self):
+ return '<_ParameterKind: {0!r}>'.format(self._name)
+
+
+_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY')
+_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD')
+_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL')
+_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY')
+_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD')
+
+
+class Parameter(object):
+ '''Represents a parameter in a function signature.
+
+ Has the following public attributes:
+
+ * name : str
+ The name of the parameter as a string.
+ * default : object
+ The default value for the parameter if specified. If the
+ parameter has no default value, this attribute is not set.
+ * annotation
+ The annotation for the parameter if specified. If the
+ parameter has no annotation, this attribute is not set.
+ * kind : str
+ Describes how argument values are bound to the parameter.
+ Possible values: `Parameter.POSITIONAL_ONLY`,
+ `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`,
+ `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`.
+ '''
+
+ __slots__ = ('_name', '_kind', '_default', '_annotation', '_partial_kwarg')
+
+ POSITIONAL_ONLY = _POSITIONAL_ONLY
+ POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD
+ VAR_POSITIONAL = _VAR_POSITIONAL
+ KEYWORD_ONLY = _KEYWORD_ONLY
+ VAR_KEYWORD = _VAR_KEYWORD
+
+ empty = _empty
+
+ def __init__(self, name, kind, default=_empty, annotation=_empty,
+ _partial_kwarg=False):
+
+ if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD,
+ _VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD):
+ raise ValueError("invalid value for 'Parameter.kind' attribute")
+ self._kind = kind
+
+ if default is not _empty:
+ if kind in (_VAR_POSITIONAL, _VAR_KEYWORD):
+ msg = '{0} parameters cannot have default values'.format(kind)
+ raise ValueError(msg)
+ self._default = default
+ self._annotation = annotation
+
+ if name is None:
+ if kind != _POSITIONAL_ONLY:
+ raise ValueError("None is not a valid name for a "
+ "non-positional-only parameter")
+ self._name = name
+ else:
+ name = str(name)
+ if kind != _POSITIONAL_ONLY and not re.match(r'[a-z_]\w*$', name, re.I):
+ msg = '{0!r} is not a valid parameter name'.format(name)
+ raise ValueError(msg)
+ self._name = name
+
+ self._partial_kwarg = _partial_kwarg
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def default(self):
+ return self._default
+
+ @property
+ def annotation(self):
+ return self._annotation
+
+ @property
+ def kind(self):
+ return self._kind
+
+ def replace(self, name=_void, kind=_void, annotation=_void,
+ default=_void, _partial_kwarg=_void):
+ '''Creates a customized copy of the Parameter.'''
+
+ if name is _void:
+ name = self._name
+
+ if kind is _void:
+ kind = self._kind
+
+ if annotation is _void:
+ annotation = self._annotation
+
+ if default is _void:
+ default = self._default
+
+ if _partial_kwarg is _void:
+ _partial_kwarg = self._partial_kwarg
+
+ return type(self)(name, kind, default=default, annotation=annotation,
+ _partial_kwarg=_partial_kwarg)
+
+ def __str__(self):
+ kind = self.kind
+
+ formatted = self._name
+ if kind == _POSITIONAL_ONLY:
+ if formatted is None:
+ formatted = ''
+ formatted = '<{0}>'.format(formatted)
+
+ # Add annotation and default value
+ if self._annotation is not _empty:
+ formatted = '{0}:{1}'.format(formatted,
+ formatannotation(self._annotation))
+
+ if self._default is not _empty:
+ formatted = '{0}={1}'.format(formatted, repr(self._default))
+
+ if kind == _VAR_POSITIONAL:
+ formatted = '*' + formatted
+ elif kind == _VAR_KEYWORD:
+ formatted = '**' + formatted
+
+ return formatted
+
+ def __repr__(self):
+ return '<{0} at {1:#x} {2!r}>'.format(self.__class__.__name__,
+ id(self), self.name)
+
+ def __hash__(self):
+ msg = "unhashable type: '{0}'".format(self.__class__.__name__)
+ raise TypeError(msg)
+
+ def __eq__(self, other):
+ return (issubclass(other.__class__, Parameter) and
+ self._name == other._name and
+ self._kind == other._kind and
+ self._default == other._default and
+ self._annotation == other._annotation)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class BoundArguments(object):
+ '''Result of `Signature.bind` call. Holds the mapping of arguments
+ to the function's parameters.
+
+ Has the following public attributes:
+
+ * arguments : OrderedDict
+ An ordered mutable mapping of parameters' names to arguments' values.
+ Does not contain arguments' default values.
+ * signature : Signature
+ The Signature object that created this instance.
+ * args : tuple
+ Tuple of positional arguments values.
+ * kwargs : dict
+ Dict of keyword arguments values.
+ '''
+
+ def __init__(self, signature, arguments):
+ self.arguments = arguments
+ self._signature = signature
+
+ @property
+ def signature(self):
+ return self._signature
+
+ @property
+ def args(self):
+ args = []
+ for param_name, param in self._signature.parameters.items():
+ if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
+ param._partial_kwarg):
+ # Keyword arguments mapped by 'functools.partial'
+ # (Parameter._partial_kwarg is True) are mapped
+ # in 'BoundArguments.kwargs', along with VAR_KEYWORD &
+ # KEYWORD_ONLY
+ break
+
+ try:
+ arg = self.arguments[param_name]
+ except KeyError:
+ # We're done here. Other arguments
+ # will be mapped in 'BoundArguments.kwargs'
+ break
+ else:
+ if param.kind == _VAR_POSITIONAL:
+ # *args
+ args.extend(arg)
+ else:
+ # plain argument
+ args.append(arg)
+
+ return tuple(args)
+
+ @property
+ def kwargs(self):
+ kwargs = {}
+ kwargs_started = False
+ for param_name, param in self._signature.parameters.items():
+ if not kwargs_started:
+ if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
+ param._partial_kwarg):
+ kwargs_started = True
+ else:
+ if param_name not in self.arguments:
+ kwargs_started = True
+ continue
+
+ if not kwargs_started:
+ continue
+
+ try:
+ arg = self.arguments[param_name]
+ except KeyError:
+ pass
+ else:
+ if param.kind == _VAR_KEYWORD:
+ # **kwargs
+ kwargs.update(arg)
+ else:
+ # plain keyword argument
+ kwargs[param_name] = arg
+
+ return kwargs
+
+ def __hash__(self):
+ msg = "unhashable type: '{0}'".format(self.__class__.__name__)
+ raise TypeError(msg)
+
+ def __eq__(self, other):
+ return (issubclass(other.__class__, BoundArguments) and
+ self.signature == other.signature and
+ self.arguments == other.arguments)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class Signature(object):
+ '''A Signature object represents the overall signature of a function.
+ It stores a Parameter object for each parameter accepted by the
+ function, as well as information specific to the function itself.
+
+ A Signature object has the following public attributes and methods:
+
+ * parameters : OrderedDict
+ An ordered mapping of parameters' names to the corresponding
+ Parameter objects (keyword-only arguments are in the same order
+ as listed in `code.co_varnames`).
+ * return_annotation : object
+ The annotation for the return type of the function if specified.
+ If the function has no annotation for its return type, this
+ attribute is not set.
+ * bind(*args, **kwargs) -> BoundArguments
+ Creates a mapping from positional and keyword arguments to
+ parameters.
+ * bind_partial(*args, **kwargs) -> BoundArguments
+ Creates a partial mapping from positional and keyword arguments
+ to parameters (simulating 'functools.partial' behavior.)
+ '''
+
+ __slots__ = ('_return_annotation', '_parameters')
+
+ _parameter_cls = Parameter
+ _bound_arguments_cls = BoundArguments
+
+ empty = _empty
+
+ def __init__(self, parameters=None, return_annotation=_empty,
+ __validate_parameters__=True):
+ '''Constructs Signature from the given list of Parameter
+ objects and 'return_annotation'. All arguments are optional.
+ '''
+
+ if parameters is None:
+ params = OrderedDict()
+ else:
+ if __validate_parameters__:
+ params = OrderedDict()
+ top_kind = _POSITIONAL_ONLY
+
+ for idx, param in enumerate(parameters):
+ kind = param.kind
+ if kind < top_kind:
+ msg = 'wrong parameter order: {0} before {1}'
+ msg = msg.format(top_kind, param.kind)
+ raise ValueError(msg)
+ else:
+ top_kind = kind
+
+ name = param.name
+ if name is None:
+ name = str(idx)
+ param = param.replace(name=name)
+
+ if name in params:
+ msg = 'duplicate parameter name: {0!r}'.format(name)
+ raise ValueError(msg)
+ params[name] = param
+ else:
+ params = OrderedDict(((param.name, param)
+ for param in parameters))
+
+ self._parameters = params
+ self._return_annotation = return_annotation
+
+ @classmethod
+ def from_function(cls, func):
+ '''Constructs Signature for the given python function'''
+
+ if not isinstance(func, types.FunctionType):
+ raise TypeError('{0!r} is not a Python function'.format(func))
+
+ Parameter = cls._parameter_cls
+
+ # Parameter information.
+ func_code = func.__code__
+ pos_count = func_code.co_argcount
+ arg_names = func_code.co_varnames
+ positional = tuple(arg_names[:pos_count])
+ keyword_only_count = getattr(func_code, 'co_kwonlyargcount', 0)
+ keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)]
+ annotations = getattr(func, '__annotations__', {})
+ defaults = func.__defaults__
+ kwdefaults = getattr(func, '__kwdefaults__', None)
+
+ if defaults:
+ pos_default_count = len(defaults)
+ else:
+ pos_default_count = 0
+
+ parameters = []
+
+ # Non-keyword-only parameters w/o defaults.
+ non_default_count = pos_count - pos_default_count
+ for name in positional[:non_default_count]:
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_POSITIONAL_OR_KEYWORD))
+
+ # ... w/ defaults.
+ for offset, name in enumerate(positional[non_default_count:]):
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_POSITIONAL_OR_KEYWORD,
+ default=defaults[offset]))
+
+ # *args
+ if func_code.co_flags & 0x04:
+ name = arg_names[pos_count + keyword_only_count]
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_VAR_POSITIONAL))
+
+ # Keyword-only parameters.
+ for name in keyword_only:
+ default = _empty
+ if kwdefaults is not None:
+ default = kwdefaults.get(name, _empty)
+
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_KEYWORD_ONLY,
+ default=default))
+ # **kwargs
+ if func_code.co_flags & 0x08:
+ index = pos_count + keyword_only_count
+ if func_code.co_flags & 0x04:
+ index += 1
+
+ name = arg_names[index]
+ annotation = annotations.get(name, _empty)
+ parameters.append(Parameter(name, annotation=annotation,
+ kind=_VAR_KEYWORD))
+
+ return cls(parameters,
+ return_annotation=annotations.get('return', _empty),
+ __validate_parameters__=False)
+
+ @property
+ def parameters(self):
+ try:
+ return types.MappingProxyType(self._parameters)
+ except AttributeError:
+ return OrderedDict(self._parameters.items())
+
+ @property
+ def return_annotation(self):
+ return self._return_annotation
+
+ def replace(self, parameters=_void, return_annotation=_void):
+ '''Creates a customized copy of the Signature.
+ Pass 'parameters' and/or 'return_annotation' arguments
+ to override them in the new copy.
+ '''
+
+ if parameters is _void:
+ parameters = self.parameters.values()
+
+ if return_annotation is _void:
+ return_annotation = self._return_annotation
+
+ return type(self)(parameters,
+ return_annotation=return_annotation)
+
+ def __hash__(self):
+ msg = "unhashable type: '{0}'".format(self.__class__.__name__)
+ raise TypeError(msg)
+
+ def __eq__(self, other):
+ if (not issubclass(type(other), Signature) or
+ self.return_annotation != other.return_annotation or
+ len(self.parameters) != len(other.parameters)):
+ return False
+
+ other_positions = dict((param, idx)
+ for idx, param in enumerate(other.parameters.keys()))
+
+ for idx, (param_name, param) in enumerate(self.parameters.items()):
+ if param.kind == _KEYWORD_ONLY:
+ try:
+ other_param = other.parameters[param_name]
+ except KeyError:
+ return False
+ else:
+ if param != other_param:
+ return False
+ else:
+ try:
+ other_idx = other_positions[param_name]
+ except KeyError:
+ return False
+ else:
+ if (idx != other_idx or
+ param != other.parameters[param_name]):
+ return False
+
+ return True
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def _bind(self, args, kwargs, partial=False):
+ '''Private method. Don't use directly.'''
+
+ arguments = OrderedDict()
+
+ parameters = iter(self.parameters.values())
+ parameters_ex = ()
+ arg_vals = iter(args)
+
+ if partial:
+ # Support for binding arguments to 'functools.partial' objects.
+ # See 'functools.partial' case in 'signature()' implementation
+ # for details.
+ for param_name, param in self.parameters.items():
+ if (param._partial_kwarg and param_name not in kwargs):
+ # Simulating 'functools.partial' behavior
+ kwargs[param_name] = param.default
+
+ while True:
+ # Let's iterate through the positional arguments and corresponding
+ # parameters
+ try:
+ arg_val = next(arg_vals)
+ except StopIteration:
+ # No more positional arguments
+ try:
+ param = next(parameters)
+ except StopIteration:
+ # No more parameters. That's it. Just need to check that
+ # we have no `kwargs` after this while loop
+ break
+ else:
+ if param.kind == _VAR_POSITIONAL:
+ # That's OK, just empty *args. Let's start parsing
+ # kwargs
+ break
+ elif param.name in kwargs:
+ if param.kind == _POSITIONAL_ONLY:
+ msg = '{arg!r} parameter is positional only, ' \
+ 'but was passed as a keyword'
+ msg = msg.format(arg=param.name)
+ raise TypeError(msg)
+ parameters_ex = (param,)
+ break
+ elif (param.kind == _VAR_KEYWORD or
+ param.default is not _empty):
+ # That's fine too - we have a default value for this
+ # parameter. So, lets start parsing `kwargs`, starting
+ # with the current parameter
+ parameters_ex = (param,)
+ break
+ else:
+ if partial:
+ parameters_ex = (param,)
+ break
+ else:
+ msg = '{arg!r} parameter lacking default value'
+ msg = msg.format(arg=param.name)
+ raise TypeError(msg)
+ else:
+ # We have a positional argument to process
+ try:
+ param = next(parameters)
+ except StopIteration:
+ raise TypeError('too many positional arguments')
+ else:
+ if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
+ # Looks like we have no parameter for this positional
+ # argument
+ raise TypeError('too many positional arguments')
+
+ if param.kind == _VAR_POSITIONAL:
+ # We have an '*args'-like argument, let's fill it with
+ # all positional arguments we have left and move on to
+ # the next phase
+ values = [arg_val]
+ values.extend(arg_vals)
+ arguments[param.name] = tuple(values)
+ break
+
+ if param.name in kwargs:
+ raise TypeError('multiple values for argument '
+ '{arg!r}'.format(arg=param.name))
+
+ arguments[param.name] = arg_val
+
+ # Now, we iterate through the remaining parameters to process
+ # keyword arguments
+ kwargs_param = None
+ for param in itertools.chain(parameters_ex, parameters):
+ if param.kind == _POSITIONAL_ONLY:
+ # This should never happen in case of a properly built
+ # Signature object (but let's have this check here
+ # to ensure correct behaviour just in case)
+ raise TypeError('{arg!r} parameter is positional only, '
+ 'but was passed as a keyword'. \
+ format(arg=param.name))
+
+ if param.kind == _VAR_KEYWORD:
+ # Memorize that we have a '**kwargs'-like parameter
+ kwargs_param = param
+ continue
+
+ param_name = param.name
+ try:
+ arg_val = kwargs.pop(param_name)
+ except KeyError:
+ # We have no value for this parameter. It's fine though,
+ # if it has a default value, or it is an '*args'-like
+ # parameter, left alone by the processing of positional
+ # arguments.
+ if (not partial and param.kind != _VAR_POSITIONAL and
+ param.default is _empty):
+ raise TypeError('{arg!r} parameter lacking default value'. \
+ format(arg=param_name))
+
+ else:
+ arguments[param_name] = arg_val
+
+ if kwargs:
+ if kwargs_param is not None:
+ # Process our '**kwargs'-like parameter
+ arguments[kwargs_param.name] = kwargs
+ else:
+ raise TypeError('too many keyword arguments %r' % kwargs)
+
+ return self._bound_arguments_cls(self, arguments)
+
+ def bind(*args, **kwargs):
+ '''Get a BoundArguments object, that maps the passed `args`
+ and `kwargs` to the function's signature. Raises `TypeError`
+ if the passed arguments can not be bound.
+ '''
+ return args[0]._bind(args[1:], kwargs)
+
+ def bind_partial(self, *args, **kwargs):
+ '''Get a BoundArguments object, that partially maps the
+ passed `args` and `kwargs` to the function's signature.
+ Raises `TypeError` if the passed arguments can not be bound.
+ '''
+ return self._bind(args, kwargs, partial=True)
+
+ def __str__(self):
+ result = []
+ render_kw_only_separator = True
+ for idx, param in enumerate(self.parameters.values()):
+ formatted = str(param)
+
+ kind = param.kind
+ if kind == _VAR_POSITIONAL:
+ # OK, we have an '*args'-like parameter, so we won't need
+ # a '*' to separate keyword-only arguments
+ render_kw_only_separator = False
+ elif kind == _KEYWORD_ONLY and render_kw_only_separator:
+ # We have a keyword-only parameter to render and we haven't
+ # rendered an '*args'-like parameter before, so add a '*'
+ # separator to the parameters list ("foo(arg1, *, arg2)" case)
+ result.append('*')
+ # This condition should be only triggered once, so
+ # reset the flag
+ render_kw_only_separator = False
+
+ result.append(formatted)
+
+ rendered = '({0})'.format(', '.join(result))
+
+ if self.return_annotation is not _empty:
+ anno = formatannotation(self.return_annotation)
+ rendered += ' -> {0}'.format(anno)
+
+ return rendered
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/funcsigs/version.py b/testing/web-platform/tests/tools/third_party/funcsigs/funcsigs/version.py
new file mode 100644
index 0000000000..7863915fa5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/funcsigs/version.py
@@ -0,0 +1 @@
+__version__ = "1.0.2"
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/requirements/development.txt b/testing/web-platform/tests/tools/third_party/funcsigs/requirements/development.txt
new file mode 100644
index 0000000000..40dedd92bf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/requirements/development.txt
@@ -0,0 +1,5 @@
+coverage
+coveralls
+flake8
+sphinx
+unittest2
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/setup.cfg b/testing/web-platform/tests/tools/third_party/funcsigs/setup.cfg
new file mode 100644
index 0000000000..5e4090017a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/setup.cfg
@@ -0,0 +1,2 @@
+[wheel]
+universal = 1
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/setup.py b/testing/web-platform/tests/tools/third_party/funcsigs/setup.py
new file mode 100644
index 0000000000..f3696888f9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/setup.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+from setuptools import setup
+import re
+import sys
+
+def load_version(filename='funcsigs/version.py'):
+ "Parse a __version__ number from a source file"
+ with open(filename) as source:
+ text = source.read()
+ match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", text)
+ if not match:
+ msg = "Unable to find version number in {}".format(filename)
+ raise RuntimeError(msg)
+ version = match.group(1)
+ return version
+
+
+setup(
+ name="funcsigs",
+ version=load_version(),
+ packages=['funcsigs'],
+ zip_safe=False,
+ author="Testing Cabal",
+ author_email="testing-in-python@lists.idyll.org",
+ url="http://funcsigs.readthedocs.org",
+ description="Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+",
+ long_description=open('README.rst').read(),
+ license="ASL",
+ extras_require = {
+ ':python_version<"2.7"': ['ordereddict'],
+ },
+ setup_requires = ["setuptools>=17.1"],
+ classifiers = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ 'Topic :: Software Development :: Libraries :: Python Modules'
+ ],
+ tests_require = ['unittest2'],
+ test_suite = 'unittest2.collector',
+)
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/tests/__init__.py b/testing/web-platform/tests/tools/third_party/funcsigs/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/tests/test_formatannotation.py b/testing/web-platform/tests/tools/third_party/funcsigs/tests/test_formatannotation.py
new file mode 100644
index 0000000000..4b98e6037d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/tests/test_formatannotation.py
@@ -0,0 +1,17 @@
+import funcsigs
+
+import unittest2 as unittest
+
+class TestFormatAnnotation(unittest.TestCase):
+ def test_string (self):
+ self.assertEqual(funcsigs.formatannotation("annotation"),
+ "'annotation'")
+
+ def test_builtin_type (self):
+ self.assertEqual(funcsigs.formatannotation(int),
+ "int")
+
+ def test_user_type (self):
+ class dummy (object): pass
+ self.assertEqual(funcsigs.formatannotation(dummy),
+ "tests.test_formatannotation.dummy")
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/tests/test_funcsigs.py b/testing/web-platform/tests/tools/third_party/funcsigs/tests/test_funcsigs.py
new file mode 100644
index 0000000000..a7b9cca767
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/tests/test_funcsigs.py
@@ -0,0 +1,91 @@
+import unittest2 as unittest
+
+import doctest
+import sys
+
+import funcsigs as inspect
+
+
+class TestFunctionSignatures(unittest.TestCase):
+
+ @staticmethod
+ def signature(func):
+ sig = inspect.signature(func)
+ return (tuple((param.name,
+ (Ellipsis if param.default is param.empty else param.default),
+ (Ellipsis if param.annotation is param.empty
+ else param.annotation),
+ str(param.kind).lower())
+ for param in sig.parameters.values()),
+ (Ellipsis if sig.return_annotation is sig.empty
+ else sig.return_annotation))
+
+ def test_zero_arguments(self):
+ def test():
+ pass
+ self.assertEqual(self.signature(test),
+ ((), Ellipsis))
+
+ def test_single_positional_argument(self):
+ def test(a):
+ pass
+ self.assertEqual(self.signature(test),
+ (((('a', Ellipsis, Ellipsis, "positional_or_keyword")),), Ellipsis))
+
+ def test_single_keyword_argument(self):
+ def test(a=None):
+ pass
+ self.assertEqual(self.signature(test),
+ (((('a', None, Ellipsis, "positional_or_keyword")),), Ellipsis))
+
+ def test_var_args(self):
+ def test(*args):
+ pass
+ self.assertEqual(self.signature(test),
+ (((('args', Ellipsis, Ellipsis, "var_positional")),), Ellipsis))
+
+ def test_keywords_args(self):
+ def test(**kwargs):
+ pass
+ self.assertEqual(self.signature(test),
+ (((('kwargs', Ellipsis, Ellipsis, "var_keyword")),), Ellipsis))
+
+ def test_multiple_arguments(self):
+ def test(a, b=None, *args, **kwargs):
+ pass
+ self.assertEqual(self.signature(test), ((
+ ('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', None, Ellipsis, "positional_or_keyword"),
+ ('args', Ellipsis, Ellipsis, "var_positional"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword"),
+ ), Ellipsis))
+
+ def test_has_version(self):
+ self.assertTrue(inspect.__version__)
+
+ def test_readme(self):
+ # XXX: This fails but doesn't fail the build.
+ # (and the syntax isn't valid on all pythons so that seems a little
+ # hard to get right.
+ doctest.testfile('../README.rst')
+
+ def test_unbound_method(self):
+ self_kind = "positional_or_keyword"
+ class Test(object):
+ def method(self):
+ pass
+ def method_with_args(self, a):
+ pass
+ def method_with_varargs(*args):
+ pass
+ self.assertEqual(
+ self.signature(Test.method),
+ (((('self', Ellipsis, Ellipsis, self_kind)),), Ellipsis))
+ self.assertEqual(
+ self.signature(Test.method_with_args),
+ ((('self', Ellipsis, Ellipsis, self_kind),
+ ('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ), Ellipsis))
+ self.assertEqual(
+ self.signature(Test.method_with_varargs),
+ ((('args', Ellipsis, Ellipsis, "var_positional"),), Ellipsis))
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/tests/test_inspect.py b/testing/web-platform/tests/tools/third_party/funcsigs/tests/test_inspect.py
new file mode 100644
index 0000000000..98d6592fcc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/tests/test_inspect.py
@@ -0,0 +1,1002 @@
+# Copyright 2001-2013 Python Software Foundation; All Rights Reserved
+from __future__ import absolute_import, division, print_function
+import collections
+import functools
+import sys
+
+import unittest2 as unittest
+
+import funcsigs as inspect
+
+
+class TestSignatureObject(unittest.TestCase):
+ @staticmethod
+ def signature(func):
+ sig = inspect.signature(func)
+ return (tuple((param.name,
+ (Ellipsis if param.default is param.empty else param.default),
+ (Ellipsis if param.annotation is param.empty
+ else param.annotation),
+ str(param.kind).lower())
+ for param in sig.parameters.values()),
+ (Ellipsis if sig.return_annotation is sig.empty
+ else sig.return_annotation))
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_object(self):
+ S = inspect.Signature
+ P = inspect.Parameter
+
+ self.assertEqual(str(S()), '()')
+
+ def test(po, pk, *args, ko, **kwargs):
+ pass
+ sig = inspect.signature(test)
+ po = sig.parameters['po'].replace(kind=P.POSITIONAL_ONLY)
+ pk = sig.parameters['pk']
+ args = sig.parameters['args']
+ ko = sig.parameters['ko']
+ kwargs = sig.parameters['kwargs']
+
+ S((po, pk, args, ko, kwargs))
+
+ with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
+ S((pk, po, args, ko, kwargs))
+
+ with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
+ S((po, args, pk, ko, kwargs))
+
+ with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
+ S((args, po, pk, ko, kwargs))
+
+ with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
+ S((po, pk, args, kwargs, ko))
+
+ kwargs2 = kwargs.replace(name='args')
+ with self.assertRaisesRegex(ValueError, 'duplicate parameter name'):
+ S((po, pk, args, kwargs2, ko))
+""")
+
+ def test_signature_immutability(self):
+ def test(a):
+ pass
+ sig = inspect.signature(test)
+
+ with self.assertRaises(AttributeError):
+ sig.foo = 'bar'
+
+ # Python2 does not have MappingProxyType class
+ if sys.version_info[:2] < (3, 3):
+ return
+
+ with self.assertRaises(TypeError):
+ sig.parameters['a'] = None
+
+ def test_signature_on_noarg(self):
+ def test():
+ pass
+ self.assertEqual(self.signature(test), ((), Ellipsis))
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_wargs(self):
+ def test(a, b:'foo') -> 123:
+ pass
+ self.assertEqual(self.signature(test),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', Ellipsis, 'foo', "positional_or_keyword")),
+ 123))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_wkwonly(self):
+ def test(*, a:float, b:str) -> int:
+ pass
+ self.assertEqual(self.signature(test),
+ ((('a', Ellipsis, float, "keyword_only"),
+ ('b', Ellipsis, str, "keyword_only")),
+ int))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_complex_args(self):
+ def test(a, b:'foo'=10, *args:'bar', spam:'baz', ham=123, **kwargs:int):
+ pass
+ self.assertEqual(self.signature(test),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', 10, 'foo', "positional_or_keyword"),
+ ('args', Ellipsis, 'bar', "var_positional"),
+ ('spam', Ellipsis, 'baz', "keyword_only"),
+ ('ham', 123, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, int, "var_keyword")),
+ Ellipsis))
+""")
+
+ def test_signature_on_builtin_function(self):
+ with self.assertRaisesRegex(ValueError, 'not supported by signature'):
+ inspect.signature(type)
+ with self.assertRaisesRegex(ValueError, 'not supported by signature'):
+ # support for 'wrapper_descriptor'
+ inspect.signature(type.__call__)
+ if hasattr(sys, 'pypy_version_info'):
+ raise ValueError('not supported by signature')
+ with self.assertRaisesRegex(ValueError, 'not supported by signature'):
+ # support for 'method-wrapper'
+ inspect.signature(min.__call__)
+ if hasattr(sys, 'pypy_version_info'):
+ raise ValueError('not supported by signature')
+ with self.assertRaisesRegex(ValueError,
+ 'no signature found for builtin function'):
+ # support for 'method-wrapper'
+ inspect.signature(min)
+
+ def test_signature_on_non_function(self):
+ with self.assertRaisesRegex(TypeError, 'is not a callable object'):
+ inspect.signature(42)
+
+ with self.assertRaisesRegex(TypeError, 'is not a Python function'):
+ inspect.Signature.from_function(42)
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_method(self):
+ class Test:
+ def foo(self, arg1, arg2=1) -> int:
+ pass
+
+ meth = Test().foo
+
+ self.assertEqual(self.signature(meth),
+ ((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('arg2', 1, Ellipsis, "positional_or_keyword")),
+ int))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_classmethod(self):
+ class Test:
+ @classmethod
+ def foo(cls, arg1, *, arg2=1):
+ pass
+
+ meth = Test().foo
+ self.assertEqual(self.signature(meth),
+ ((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('arg2', 1, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ meth = Test.foo
+ self.assertEqual(self.signature(meth),
+ ((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('arg2', 1, Ellipsis, "keyword_only")),
+ Ellipsis))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_staticmethod(self):
+ class Test:
+ @staticmethod
+ def foo(cls, *, arg):
+ pass
+
+ meth = Test().foo
+ self.assertEqual(self.signature(meth),
+ ((('cls', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('arg', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ meth = Test.foo
+ self.assertEqual(self.signature(meth),
+ ((('cls', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('arg', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_partial(self):
+ from functools import partial
+
+ def test():
+ pass
+
+ self.assertEqual(self.signature(partial(test)), ((), Ellipsis))
+
+ with self.assertRaisesRegex(ValueError, "has incorrect arguments"):
+ inspect.signature(partial(test, 1))
+
+ with self.assertRaisesRegex(ValueError, "has incorrect arguments"):
+ inspect.signature(partial(test, a=1))
+
+ def test(a, b, *, c, d):
+ pass
+
+ self.assertEqual(self.signature(partial(test)),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('c', Ellipsis, Ellipsis, "keyword_only"),
+ ('d', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, 1)),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('c', Ellipsis, Ellipsis, "keyword_only"),
+ ('d', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, 1, c=2)),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('c', 2, Ellipsis, "keyword_only"),
+ ('d', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, b=1, c=2)),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', 1, Ellipsis, "positional_or_keyword"),
+ ('c', 2, Ellipsis, "keyword_only"),
+ ('d', Ellipsis, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, 0, b=1, c=2)),
+ ((('b', 1, Ellipsis, "positional_or_keyword"),
+ ('c', 2, Ellipsis, "keyword_only"),
+ ('d', Ellipsis, Ellipsis, "keyword_only"),),
+ Ellipsis))
+
+ def test(a, *args, b, **kwargs):
+ pass
+
+ self.assertEqual(self.signature(partial(test, 1)),
+ ((('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', Ellipsis, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, 1, 2, 3)),
+ ((('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', Ellipsis, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+
+ self.assertEqual(self.signature(partial(test, 1, 2, 3, test=True)),
+ ((('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', Ellipsis, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, 1, 2, 3, test=1, b=0)),
+ ((('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', 0, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, b=0)),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', 0, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(partial(test, b=0, test=1)),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('args', Ellipsis, Ellipsis, "var_positional"),
+ ('b', 0, Ellipsis, "keyword_only"),
+ ('kwargs', Ellipsis, Ellipsis, "var_keyword")),
+ Ellipsis))
+
+ def test(a, b, c:int) -> 42:
+ pass
+
+ sig = test.__signature__ = inspect.signature(test)
+
+ self.assertEqual(self.signature(partial(partial(test, 1))),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('c', Ellipsis, int, "positional_or_keyword")),
+ 42))
+
+ self.assertEqual(self.signature(partial(partial(test, 1), 2)),
+ ((('c', Ellipsis, int, "positional_or_keyword"),),
+ 42))
+
+ psig = inspect.signature(partial(partial(test, 1), 2))
+
+ def foo(a):
+ return a
+ _foo = partial(partial(foo, a=10), a=20)
+ self.assertEqual(self.signature(_foo),
+ ((('a', 20, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+ # check that we don't have any side-effects in signature(),
+ # and the partial object is still functioning
+ self.assertEqual(_foo(), 20)
+
+ def foo(a, b, c):
+ return a, b, c
+ _foo = partial(partial(foo, 1, b=20), b=30)
+ self.assertEqual(self.signature(_foo),
+ ((('b', 30, Ellipsis, "positional_or_keyword"),
+ ('c', Ellipsis, Ellipsis, "positional_or_keyword")),
+ Ellipsis))
+ self.assertEqual(_foo(c=10), (1, 30, 10))
+ _foo = partial(_foo, 2) # now 'b' has two values -
+ # positional and keyword
+ with self.assertRaisesRegex(ValueError, "has incorrect arguments"):
+ inspect.signature(_foo)
+
+ def foo(a, b, c, *, d):
+ return a, b, c, d
+ _foo = partial(partial(foo, d=20, c=20), b=10, d=30)
+ self.assertEqual(self.signature(_foo),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', 10, Ellipsis, "positional_or_keyword"),
+ ('c', 20, Ellipsis, "positional_or_keyword"),
+ ('d', 30, Ellipsis, "keyword_only")),
+ Ellipsis))
+ ba = inspect.signature(_foo).bind(a=200, b=11)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (200, 11, 20, 30))
+
+ def foo(a=1, b=2, c=3):
+ return a, b, c
+ _foo = partial(foo, a=10, c=13)
+ ba = inspect.signature(_foo).bind(11)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 2, 13))
+ ba = inspect.signature(_foo).bind(11, 12)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 12, 13))
+ ba = inspect.signature(_foo).bind(11, b=12)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 12, 13))
+ ba = inspect.signature(_foo).bind(b=12)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (10, 12, 13))
+ _foo = partial(_foo, b=10)
+ ba = inspect.signature(_foo).bind(12, 14)
+ self.assertEqual(_foo(*ba.args, **ba.kwargs), (12, 14, 13))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_decorated(self):
+ import functools
+
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs) -> int:
+ return func(*args, **kwargs)
+ return wrapper
+
+ class Foo:
+ @decorator
+ def bar(self, a, b):
+ pass
+
+ self.assertEqual(self.signature(Foo.bar),
+ ((('self', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', Ellipsis, Ellipsis, "positional_or_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(Foo().bar),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', Ellipsis, Ellipsis, "positional_or_keyword")),
+ Ellipsis))
+
+ # Test that we handle method wrappers correctly
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs) -> int:
+ return func(42, *args, **kwargs)
+ sig = inspect.signature(func)
+ new_params = tuple(sig.parameters.values())[1:]
+ wrapper.__signature__ = sig.replace(parameters=new_params)
+ return wrapper
+
+ class Foo:
+ @decorator
+ def __call__(self, a, b):
+ pass
+
+ self.assertEqual(self.signature(Foo.__call__),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('b', Ellipsis, Ellipsis, "positional_or_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(Foo().__call__),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_on_class(self):
+ class C:
+ def __init__(self, a):
+ pass
+
+ self.assertEqual(self.signature(C),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ class CM(type):
+ def __call__(cls, a):
+ pass
+ class C(metaclass=CM):
+ def __init__(self, b):
+ pass
+
+ self.assertEqual(self.signature(C),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ class CM(type):
+ def __new__(mcls, name, bases, dct, *, foo=1):
+ return super().__new__(mcls, name, bases, dct)
+ class C(metaclass=CM):
+ def __init__(self, b):
+ pass
+
+ self.assertEqual(self.signature(C),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ self.assertEqual(self.signature(CM),
+ ((('name', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('bases', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('dct', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('foo', 1, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ class CMM(type):
+ def __new__(mcls, name, bases, dct, *, foo=1):
+ return super().__new__(mcls, name, bases, dct)
+ def __call__(cls, nm, bs, dt):
+ return type(nm, bs, dt)
+ class CM(type, metaclass=CMM):
+ def __new__(mcls, name, bases, dct, *, bar=2):
+ return super().__new__(mcls, name, bases, dct)
+ class C(metaclass=CM):
+ def __init__(self, b):
+ pass
+
+ self.assertEqual(self.signature(CMM),
+ ((('name', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('bases', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('dct', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('foo', 1, Ellipsis, "keyword_only")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(CM),
+ ((('nm', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('bs', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('dt', Ellipsis, Ellipsis, "positional_or_keyword")),
+ Ellipsis))
+
+ self.assertEqual(self.signature(C),
+ ((('b', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ class CM(type):
+ def __init__(cls, name, bases, dct, *, bar=2):
+ return super().__init__(name, bases, dct)
+ class C(metaclass=CM):
+ def __init__(self, b):
+ pass
+
+ self.assertEqual(self.signature(CM),
+ ((('name', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('bases', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('dct', Ellipsis, Ellipsis, "positional_or_keyword"),
+ ('bar', 2, Ellipsis, "keyword_only")),
+ Ellipsis))
+""")
+
+ def test_signature_on_callable_objects(self):
+ class Foo(object):
+ def __call__(self, a):
+ pass
+
+ self.assertEqual(self.signature(Foo()),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ class Spam(object):
+ pass
+ with self.assertRaisesRegex(TypeError, "is not a callable object"):
+ inspect.signature(Spam())
+
+ class Bar(Spam, Foo):
+ pass
+
+ self.assertEqual(self.signature(Bar()),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ class ToFail(object):
+ __call__ = type
+ with self.assertRaisesRegex(ValueError, "not supported by signature"):
+ inspect.signature(ToFail())
+
+ if sys.version_info[0] < 3:
+ return
+
+ class Wrapped(object):
+ pass
+ Wrapped.__wrapped__ = lambda a: None
+ self.assertEqual(self.signature(Wrapped),
+ ((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ def test_signature_on_lambdas(self):
+ self.assertEqual(self.signature((lambda a=10: a)),
+ ((('a', 10, Ellipsis, "positional_or_keyword"),),
+ Ellipsis))
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_equality(self):
+ def foo(a, *, b:int) -> float: pass
+ self.assertNotEqual(inspect.signature(foo), 42)
+
+ def bar(a, *, b:int) -> float: pass
+ self.assertEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def bar(a, *, b:int) -> int: pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def bar(a, *, b:int): pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def bar(a, *, b:int=42) -> float: pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def bar(a, *, c) -> float: pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def bar(a, b:int) -> float: pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+ def spam(b:int, a) -> float: pass
+ self.assertNotEqual(inspect.signature(spam), inspect.signature(bar))
+
+ def foo(*, a, b, c): pass
+ def bar(*, c, b, a): pass
+ self.assertEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def foo(*, a=1, b, c): pass
+ def bar(*, c, b, a=1): pass
+ self.assertEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def foo(pos, *, a=1, b, c): pass
+ def bar(pos, *, c, b, a=1): pass
+ self.assertEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def foo(pos, *, a, b, c): pass
+ def bar(pos, *, c, b, a=1): pass
+ self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
+
+ def foo(pos, *args, a=42, b, c, **kwargs:int): pass
+ def bar(pos, *args, c, b, a=42, **kwargs:int): pass
+ self.assertEqual(inspect.signature(foo), inspect.signature(bar))
+""")
+
+ def test_signature_unhashable(self):
+ def foo(a): pass
+ sig = inspect.signature(foo)
+ with self.assertRaisesRegex(TypeError, 'unhashable type'):
+ hash(sig)
+
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_str(self):
+ def foo(a:int=1, *, b, c=None, **kwargs) -> 42:
+ pass
+ self.assertEqual(str(inspect.signature(foo)),
+ '(a:int=1, *, b, c=None, **kwargs) -> 42')
+
+ def foo(a:int=1, *args, b, c=None, **kwargs) -> 42:
+ pass
+ self.assertEqual(str(inspect.signature(foo)),
+ '(a:int=1, *args, b, c=None, **kwargs) -> 42')
+
+ def foo():
+ pass
+ self.assertEqual(str(inspect.signature(foo)), '()')
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_str_positional_only(self):
+ P = inspect.Parameter
+
+ def test(a_po, *, b, **kwargs):
+ return a_po, kwargs
+
+ sig = inspect.signature(test)
+ new_params = list(sig.parameters.values())
+ new_params[0] = new_params[0].replace(kind=P.POSITIONAL_ONLY)
+ test.__signature__ = sig.replace(parameters=new_params)
+
+ self.assertEqual(str(inspect.signature(test)),
+ '(<a_po>, *, b, **kwargs)')
+
+ sig = inspect.signature(test)
+ new_params = list(sig.parameters.values())
+ new_params[0] = new_params[0].replace(name=None)
+ test.__signature__ = sig.replace(parameters=new_params)
+ self.assertEqual(str(inspect.signature(test)),
+ '(<0>, *, b, **kwargs)')
+""")
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_replace_anno(self):
+ def test() -> 42:
+ pass
+
+ sig = inspect.signature(test)
+ sig = sig.replace(return_annotation=None)
+ self.assertIs(sig.return_annotation, None)
+ sig = sig.replace(return_annotation=sig.empty)
+ self.assertIs(sig.return_annotation, sig.empty)
+ sig = sig.replace(return_annotation=42)
+ self.assertEqual(sig.return_annotation, 42)
+ self.assertEqual(sig, inspect.signature(test))
+""")
+
+
+class TestParameterObject(unittest.TestCase):
+
+ def test_signature_parameter_kinds(self):
+ P = inspect.Parameter
+ self.assertTrue(P.POSITIONAL_ONLY < P.POSITIONAL_OR_KEYWORD < \
+ P.VAR_POSITIONAL < P.KEYWORD_ONLY < P.VAR_KEYWORD)
+
+ self.assertEqual(str(P.POSITIONAL_ONLY), 'POSITIONAL_ONLY')
+ self.assertTrue('POSITIONAL_ONLY' in repr(P.POSITIONAL_ONLY))
+
+ def test_signature_parameter_object(self):
+ p = inspect.Parameter('foo', default=10,
+ kind=inspect.Parameter.POSITIONAL_ONLY)
+ self.assertEqual(p.name, 'foo')
+ self.assertEqual(p.default, 10)
+ self.assertIs(p.annotation, p.empty)
+ self.assertEqual(p.kind, inspect.Parameter.POSITIONAL_ONLY)
+
+ with self.assertRaisesRegex(ValueError, 'invalid value'):
+ inspect.Parameter('foo', default=10, kind='123')
+
+ with self.assertRaisesRegex(ValueError, 'not a valid parameter name'):
+ inspect.Parameter('1', kind=inspect.Parameter.VAR_KEYWORD)
+
+ with self.assertRaisesRegex(ValueError,
+ 'non-positional-only parameter'):
+ inspect.Parameter(None, kind=inspect.Parameter.VAR_KEYWORD)
+
+ with self.assertRaisesRegex(ValueError, 'cannot have default values'):
+ inspect.Parameter('a', default=42,
+ kind=inspect.Parameter.VAR_KEYWORD)
+
+ with self.assertRaisesRegex(ValueError, 'cannot have default values'):
+ inspect.Parameter('a', default=42,
+ kind=inspect.Parameter.VAR_POSITIONAL)
+
+ p = inspect.Parameter('a', default=42,
+ kind=inspect.Parameter.POSITIONAL_OR_KEYWORD)
+ with self.assertRaisesRegex(ValueError, 'cannot have default values'):
+ p.replace(kind=inspect.Parameter.VAR_POSITIONAL)
+
+ self.assertTrue(repr(p).startswith('<Parameter'))
+
+ def test_signature_parameter_equality(self):
+ P = inspect.Parameter
+ p = P('foo', default=42, kind=inspect.Parameter.KEYWORD_ONLY)
+
+ self.assertEqual(p, p)
+ self.assertNotEqual(p, 42)
+
+ self.assertEqual(p, P('foo', default=42,
+ kind=inspect.Parameter.KEYWORD_ONLY))
+
+ def test_signature_parameter_unhashable(self):
+ p = inspect.Parameter('foo', default=42,
+ kind=inspect.Parameter.KEYWORD_ONLY)
+
+ with self.assertRaisesRegex(TypeError, 'unhashable type'):
+ hash(p)
+
+ def test_signature_parameter_replace(self):
+ p = inspect.Parameter('foo', default=42,
+ kind=inspect.Parameter.KEYWORD_ONLY)
+
+ self.assertIsNot(p, p.replace())
+ self.assertEqual(p, p.replace())
+
+ p2 = p.replace(annotation=1)
+ self.assertEqual(p2.annotation, 1)
+ p2 = p2.replace(annotation=p2.empty)
+ self.assertEqual(p, p2)
+
+ p2 = p2.replace(name='bar')
+ self.assertEqual(p2.name, 'bar')
+ self.assertNotEqual(p2, p)
+
+ with self.assertRaisesRegex(ValueError, 'not a valid parameter name'):
+ p2 = p2.replace(name=p2.empty)
+
+ p2 = p2.replace(name='foo', default=None)
+ self.assertIs(p2.default, None)
+ self.assertNotEqual(p2, p)
+
+ p2 = p2.replace(name='foo', default=p2.empty)
+ self.assertIs(p2.default, p2.empty)
+
+
+ p2 = p2.replace(default=42, kind=p2.POSITIONAL_OR_KEYWORD)
+ self.assertEqual(p2.kind, p2.POSITIONAL_OR_KEYWORD)
+ self.assertNotEqual(p2, p)
+
+ with self.assertRaisesRegex(ValueError, 'invalid value for'):
+ p2 = p2.replace(kind=p2.empty)
+
+ p2 = p2.replace(kind=p2.KEYWORD_ONLY)
+ self.assertEqual(p2, p)
+
+ def test_signature_parameter_positional_only(self):
+ p = inspect.Parameter(None, kind=inspect.Parameter.POSITIONAL_ONLY)
+ self.assertEqual(str(p), '<>')
+
+ p = p.replace(name='1')
+ self.assertEqual(str(p), '<1>')
+
+ def test_signature_parameter_immutability(self):
+ p = inspect.Parameter(None, kind=inspect.Parameter.POSITIONAL_ONLY)
+
+ with self.assertRaises(AttributeError):
+ p.foo = 'bar'
+
+ with self.assertRaises(AttributeError):
+ p.kind = 123
+
+
+class TestSignatureBind(unittest.TestCase):
+ @staticmethod
+ def call(func, *args, **kwargs):
+ sig = inspect.signature(func)
+ ba = sig.bind(*args, **kwargs)
+ return func(*ba.args, **ba.kwargs)
+
+ def test_signature_bind_empty(self):
+ def test():
+ return 42
+
+ self.assertEqual(self.call(test), 42)
+ with self.assertRaisesRegex(TypeError, 'too many positional arguments'):
+ self.call(test, 1)
+ with self.assertRaisesRegex(TypeError, 'too many positional arguments'):
+ self.call(test, 1, spam=10)
+ with self.assertRaisesRegex(TypeError, 'too many keyword arguments'):
+ self.call(test, spam=1)
+
+ def test_signature_bind_var(self):
+ def test(*args, **kwargs):
+ return args, kwargs
+
+ self.assertEqual(self.call(test), ((), {}))
+ self.assertEqual(self.call(test, 1), ((1,), {}))
+ self.assertEqual(self.call(test, 1, 2), ((1, 2), {}))
+ self.assertEqual(self.call(test, foo='bar'), ((), {'foo': 'bar'}))
+ self.assertEqual(self.call(test, 1, foo='bar'), ((1,), {'foo': 'bar'}))
+ self.assertEqual(self.call(test, args=10), ((), {'args': 10}))
+ self.assertEqual(self.call(test, 1, 2, foo='bar'),
+ ((1, 2), {'foo': 'bar'}))
+
+ def test_signature_bind_just_args(self):
+ def test(a, b, c):
+ return a, b, c
+
+ self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3))
+
+ with self.assertRaisesRegex(TypeError, 'too many positional arguments'):
+ self.call(test, 1, 2, 3, 4)
+
+ with self.assertRaisesRegex(TypeError, "'b' parameter lacking default"):
+ self.call(test, 1)
+
+ with self.assertRaisesRegex(TypeError, "'a' parameter lacking default"):
+ self.call(test)
+
+ def test(a, b, c=10):
+ return a, b, c
+ self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3))
+ self.assertEqual(self.call(test, 1, 2), (1, 2, 10))
+
+ def test(a=1, b=2, c=3):
+ return a, b, c
+ self.assertEqual(self.call(test, a=10, c=13), (10, 2, 13))
+ self.assertEqual(self.call(test, a=10), (10, 2, 3))
+ self.assertEqual(self.call(test, b=10), (1, 10, 3))
+
+ def test_signature_bind_varargs_order(self):
+ def test(*args):
+ return args
+
+ self.assertEqual(self.call(test), ())
+ self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3))
+
+ def test_signature_bind_args_and_varargs(self):
+ def test(a, b, c=3, *args):
+ return a, b, c, args
+
+ self.assertEqual(self.call(test, 1, 2, 3, 4, 5), (1, 2, 3, (4, 5)))
+ self.assertEqual(self.call(test, 1, 2), (1, 2, 3, ()))
+ self.assertEqual(self.call(test, b=1, a=2), (2, 1, 3, ()))
+ self.assertEqual(self.call(test, 1, b=2), (1, 2, 3, ()))
+
+ with self.assertRaisesRegex(TypeError,
+ "multiple values for argument 'c'"):
+ self.call(test, 1, 2, 3, c=4)
+
+ def test_signature_bind_just_kwargs(self):
+ def test(**kwargs):
+ return kwargs
+
+ self.assertEqual(self.call(test), {})
+ self.assertEqual(self.call(test, foo='bar', spam='ham'),
+ {'foo': 'bar', 'spam': 'ham'})
+
+ def test_signature_bind_args_and_kwargs(self):
+ def test(a, b, c=3, **kwargs):
+ return a, b, c, kwargs
+
+ self.assertEqual(self.call(test, 1, 2), (1, 2, 3, {}))
+ self.assertEqual(self.call(test, 1, 2, foo='bar', spam='ham'),
+ (1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
+ self.assertEqual(self.call(test, b=2, a=1, foo='bar', spam='ham'),
+ (1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
+ self.assertEqual(self.call(test, a=1, b=2, foo='bar', spam='ham'),
+ (1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
+ self.assertEqual(self.call(test, 1, b=2, foo='bar', spam='ham'),
+ (1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
+ self.assertEqual(self.call(test, 1, b=2, c=4, foo='bar', spam='ham'),
+ (1, 2, 4, {'foo': 'bar', 'spam': 'ham'}))
+ self.assertEqual(self.call(test, 1, 2, 4, foo='bar'),
+ (1, 2, 4, {'foo': 'bar'}))
+ self.assertEqual(self.call(test, c=5, a=4, b=3),
+ (4, 3, 5, {}))
+
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_bind_kwonly(self):
+ def test(*, foo):
+ return foo
+ with self.assertRaisesRegex(TypeError,
+ 'too many positional arguments'):
+ self.call(test, 1)
+ self.assertEqual(self.call(test, foo=1), 1)
+
+ def test(a, *, foo=1, bar):
+ return foo
+ with self.assertRaisesRegex(TypeError,
+ "'bar' parameter lacking default value"):
+ self.call(test, 1)
+
+ def test(foo, *, bar):
+ return foo, bar
+ self.assertEqual(self.call(test, 1, bar=2), (1, 2))
+ self.assertEqual(self.call(test, bar=2, foo=1), (1, 2))
+
+ with self.assertRaisesRegex(TypeError,
+ 'too many keyword arguments'):
+ self.call(test, bar=2, foo=1, spam=10)
+
+ with self.assertRaisesRegex(TypeError,
+ 'too many positional arguments'):
+ self.call(test, 1, 2)
+
+ with self.assertRaisesRegex(TypeError,
+ 'too many positional arguments'):
+ self.call(test, 1, 2, bar=2)
+
+ with self.assertRaisesRegex(TypeError,
+ 'too many keyword arguments'):
+ self.call(test, 1, bar=2, spam='ham')
+
+ with self.assertRaisesRegex(TypeError,
+ "'bar' parameter lacking default value"):
+ self.call(test, 1)
+
+ def test(foo, *, bar, **bin):
+ return foo, bar, bin
+ self.assertEqual(self.call(test, 1, bar=2), (1, 2, {}))
+ self.assertEqual(self.call(test, foo=1, bar=2), (1, 2, {}))
+ self.assertEqual(self.call(test, 1, bar=2, spam='ham'),
+ (1, 2, {'spam': 'ham'}))
+ self.assertEqual(self.call(test, spam='ham', foo=1, bar=2),
+ (1, 2, {'spam': 'ham'}))
+ with self.assertRaisesRegex(TypeError,
+ "'foo' parameter lacking default value"):
+ self.call(test, spam='ham', bar=2)
+ self.assertEqual(self.call(test, 1, bar=2, bin=1, spam=10),
+ (1, 2, {'bin': 1, 'spam': 10}))
+""")
+#
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_bind_arguments(self):
+ def test(a, *args, b, z=100, **kwargs):
+ pass
+ sig = inspect.signature(test)
+ ba = sig.bind(10, 20, b=30, c=40, args=50, kwargs=60)
+ # we won't have 'z' argument in the bound arguments object, as we didn't
+ # pass it to the 'bind'
+ self.assertEqual(tuple(ba.arguments.items()),
+ (('a', 10), ('args', (20,)), ('b', 30),
+ ('kwargs', {'c': 40, 'args': 50, 'kwargs': 60})))
+ self.assertEqual(ba.kwargs,
+ {'b': 30, 'c': 40, 'args': 50, 'kwargs': 60})
+ self.assertEqual(ba.args, (10, 20))
+""")
+#
+ if sys.version_info[0] > 2:
+ exec("""
+def test_signature_bind_positional_only(self):
+ P = inspect.Parameter
+
+ def test(a_po, b_po, c_po=3, foo=42, *, bar=50, **kwargs):
+ return a_po, b_po, c_po, foo, bar, kwargs
+
+ sig = inspect.signature(test)
+ new_params = collections.OrderedDict(tuple(sig.parameters.items()))
+ for name in ('a_po', 'b_po', 'c_po'):
+ new_params[name] = new_params[name].replace(kind=P.POSITIONAL_ONLY)
+ new_sig = sig.replace(parameters=new_params.values())
+ test.__signature__ = new_sig
+
+ self.assertEqual(self.call(test, 1, 2, 4, 5, bar=6),
+ (1, 2, 4, 5, 6, {}))
+
+ with self.assertRaisesRegex(TypeError, "parameter is positional only"):
+ self.call(test, 1, 2, c_po=4)
+
+ with self.assertRaisesRegex(TypeError, "parameter is positional only"):
+ self.call(test, a_po=1, b_po=2)
+""")
+
+ def test_bind_self(self):
+ class F:
+ def f(a, self):
+ return a, self
+ an_f = F()
+ partial_f = functools.partial(F.f, an_f)
+ ba = inspect.signature(partial_f).bind(self=10)
+ self.assertEqual((an_f, 10), partial_f(*ba.args, **ba.kwargs))
+
+
+class TestBoundArguments(unittest.TestCase):
+
+ def test_signature_bound_arguments_unhashable(self):
+ def foo(a): pass
+ ba = inspect.signature(foo).bind(1)
+
+ with self.assertRaisesRegex(TypeError, 'unhashable type'):
+ hash(ba)
+
+ def test_signature_bound_arguments_equality(self):
+ def foo(a): pass
+ ba = inspect.signature(foo).bind(1)
+ self.assertEqual(ba, ba)
+
+ ba2 = inspect.signature(foo).bind(1)
+ self.assertEqual(ba, ba2)
+
+ ba3 = inspect.signature(foo).bind(2)
+ self.assertNotEqual(ba, ba3)
+ ba3.arguments['a'] = 1
+ self.assertEqual(ba, ba3)
+
+ def bar(b): pass
+ ba4 = inspect.signature(bar).bind(1)
+ self.assertNotEqual(ba, ba4)
diff --git a/testing/web-platform/tests/tools/third_party/funcsigs/tox.ini b/testing/web-platform/tests/tools/third_party/funcsigs/tox.ini
new file mode 100644
index 0000000000..1873c744a0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/funcsigs/tox.ini
@@ -0,0 +1,8 @@
+[tox]
+envlist = py26, py27, py33, py34, py35, py36, pypy, pypy3
+
+[testenv]
+deps = -rrequirements/development.txt
+commands =
+ coverage run setup.py test
+ coverage report --show-missing
diff --git a/testing/web-platform/tests/tools/third_party/h2/.coveragerc b/testing/web-platform/tests/tools/third_party/h2/.coveragerc
new file mode 100644
index 0000000000..153e38d3e0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/.coveragerc
@@ -0,0 +1,18 @@
+[run]
+branch = True
+source = h2
+
+[report]
+fail_under = 100
+show_missing = True
+exclude_lines =
+ pragma: no cover
+ .*:.* # Python \d.*
+ assert False, "Should not be reachable"
+ .*:.* # Platform-specific:
+
+[paths]
+source =
+ h2/
+ .tox/*/lib/python*/site-packages/h2
+ .tox/pypy*/site-packages/h2
diff --git a/testing/web-platform/tests/tools/third_party/h2/CONTRIBUTORS.rst b/testing/web-platform/tests/tools/third_party/h2/CONTRIBUTORS.rst
new file mode 100644
index 0000000000..5c4849fef0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/CONTRIBUTORS.rst
@@ -0,0 +1,115 @@
+Hyper-h2 is written and maintained by Cory Benfield and various contributors:
+
+Development Lead
+````````````````
+
+- Cory Benfield <cory@lukasa.co.uk>
+
+Contributors
+````````````
+
+In chronological order:
+
+- Robert Collins (@rbtcollins)
+
+ - Provided invaluable and substantial early input into API design and layout.
+ - Added code preventing ``Proxy-Authorization`` from getting added to HPACK
+ compression contexts.
+
+- Maximilian Hils (@maximilianhils)
+
+ - Added asyncio example.
+
+- Alex Chan (@alexwlchan)
+
+ - Fixed docstring, added URLs to README.
+
+- Glyph Lefkowitz (@glyph)
+
+ - Improved example Twisted server.
+
+- Thomas Kriechbaumer (@Kriechi)
+
+ - Fixed incorrect arguments being passed to ``StreamIDTooLowError``.
+ - Added new arguments to ``close_connection``.
+
+- WeiZheng Xu (@boyxuper)
+
+ - Reported a bug relating to hyper-h2's updating of the connection window in
+ response to SETTINGS_INITIAL_WINDOW_SIZE.
+
+- Evgeny Tataurov (@etataurov)
+
+ - Added the ``additional_data`` field to the ``ConnectionTerminated`` event.
+
+- Brett Cannon (@brettcannon)
+
+ - Changed Travis status icon to SVG.
+ - Documentation improvements.
+
+- Felix Yan (@felixonmars)
+
+ - Widened allowed version numbers of enum34.
+ - Updated test requirements.
+
+- Keith Dart (@kdart)
+
+ - Fixed curio example server flow control problems.
+
+- Gil Gonçalves (@LuRsT)
+
+ - Added code forbidding non-RFC 7540 pseudo-headers.
+
+- Louis Taylor (@kragniz)
+
+ - Cleaned up the README
+
+- Berker Peksag (@berkerpeksag)
+
+ - Improved the docstring for ``StreamIDTooLowError``.
+
+- Adrian Lewis (@aidylewis)
+
+ - Fixed the broken Twisted HEAD request example.
+ - Added verification logic for ensuring that responses to HEAD requests have
+ no body.
+
+- Lorenzo (@Mec-iS)
+
+ - Changed documentation to stop using dictionaries for header blocks.
+
+- Kracekumar Ramaraj (@kracekumar)
+
+ - Cleaned up Twisted example.
+
+- @mlvnd
+
+ - Cleaned up curio example.
+
+- Tom Offermann (@toffer)
+
+ - Added Tornado example.
+
+- Tarashish Mishra (@sunu)
+
+ - Added code to reject header fields with leading/trailing whitespace.
+ - Added code to remove leading/trailing whitespace from sent header fields.
+
+- Nate Prewitt (@nateprewitt)
+
+ - Added code to validate that trailers do not contain pseudo-header fields.
+
+- Chun-Han, Hsiao (@chhsiao90)
+
+ - Fixed a bug with invalid ``HTTP2-Settings`` header output in plaintext
+ upgrade.
+
+- Bhavishya (@bhavishyagopesh)
+
+ - Added support for equality testing to ``h2.settings.Settings`` objects.
+
+- Fred Thomsen (@fredthomsen)
+
+ - Added logging.
+ - Enhance equality testing of ``h2.settings.Settings`` objects with
+ ``hypothesis``.
diff --git a/testing/web-platform/tests/tools/third_party/h2/HISTORY.rst b/testing/web-platform/tests/tools/third_party/h2/HISTORY.rst
new file mode 100644
index 0000000000..5244cd8a94
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/HISTORY.rst
@@ -0,0 +1,760 @@
+Release History
+===============
+
+3.2.0 (2020-02-08)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Receiving DATA frames on closed (or reset) streams now properly emit a
+ WINDOW_UPDATE to keep the connection flow window topped up.
+
+API Changes (Backward-Incompatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- ``h2.config.logger`` now uses a `trace(...)` function, in addition
+ to `debug(...)`. If you defined a custom logger object, you need to handle
+ these new function calls.
+
+
+3.1.1 (2019-08-02)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Ignore WINDOW_UPDATE and RST_STREAM frames received after stream
+ closure.
+
+
+3.1.0 (2019-01-22)
+------------------
+
+API Changes (Backward-Incompatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- ``h2.connection.H2Connection.data_to_send`` first and only argument ``amt``
+ was renamed to ``amount``.
+- Support for Python 3.3 has been removed.
+
+API Changes (Backward-Compatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- ``h2.connection.H2Connection.send_data`` now supports ``data`` parameter
+ being a ``memoryview`` object.
+- Refactor ping-related events: a ``h2.events.PingReceived`` event is fired
+ when a PING frame is received and a ``h2.events.PingAckReceived`` event is
+ fired when a PING frame with an ACK flag is received.
+ ``h2.events.PingAcknowledged`` is deprecated in favour of the identical
+ ``h2.events.PingAckReceived``.
+- Added ``ENABLE_CONNECT_PROTOCOL`` to ``h2.settings.SettingCodes``.
+- Support ``CONNECT`` requests with a ``:protocol`` pseudo header
+ thereby supporting RFC 8441.
+- A limit to the number of closed streams kept in memory by the
+ connection is applied. It can be configured by
+ ``h2.connection.H2Connection.MAX_CLOSED_STREAMS``.
+
+Bugfixes
+~~~~~~~~
+
+- Debug logging when stream_id is None is now fixed and no longer errors.
+
+3.0.1 (2017-04-03)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- CONTINUATION frames sent on closed streams previously caused stream errors
+ of type STREAM_CLOSED. RFC 7540 § 6.10 requires that these be connection
+ errors of type PROTOCOL_ERROR, and so this release changes to match that
+ behaviour.
+- Remote peers incrementing their inbound connection window beyond the maximum
+ allowed value now cause stream-level errors, rather than connection-level
+ errors, allowing connections to stay up longer.
+- h2 now rejects receiving and sending request header blocks that are missing
+ any of the mandatory pseudo-header fields (:path, :scheme, and :method).
+- h2 now rejects receiving and sending request header blocks that have an empty
+ :path pseudo-header value.
+- h2 now rejects receiving and sending request header blocks that contain
+ response-only pseudo-headers, and vice versa.
+- h2 now correct respects user-initiated changes to the HEADER_TABLE_SIZE
+ local setting, and ensures that if users shrink or increase the header
+ table size it is policed appropriately.
+
+
+2.6.2 (2017-04-03)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- CONTINUATION frames sent on closed streams previously caused stream errors
+ of type STREAM_CLOSED. RFC 7540 § 6.10 requires that these be connection
+ errors of type PROTOCOL_ERROR, and so this release changes to match that
+ behaviour.
+- Remote peers incrementing their inbound connection window beyond the maximum
+ allowed value now cause stream-level errors, rather than connection-level
+ errors, allowing connections to stay up longer.
+- h2 now rejects receiving and sending request header blocks that are missing
+ any of the mandatory pseudo-header fields (:path, :scheme, and :method).
+- h2 now rejects receiving and sending request header blocks that have an empty
+ :path pseudo-header value.
+- h2 now rejects receiving and sending request header blocks that contain
+ response-only pseudo-headers, and vice versa.
+- h2 now correct respects user-initiated changes to the HEADER_TABLE_SIZE
+ local setting, and ensures that if users shrink or increase the header
+ table size it is policed appropriately.
+
+
+2.5.4 (2017-04-03)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- CONTINUATION frames sent on closed streams previously caused stream errors
+ of type STREAM_CLOSED. RFC 7540 § 6.10 requires that these be connection
+ errors of type PROTOCOL_ERROR, and so this release changes to match that
+ behaviour.
+- Remote peers incrementing their inbound connection window beyond the maximum
+ allowed value now cause stream-level errors, rather than connection-level
+ errors, allowing connections to stay up longer.
+- h2 now correct respects user-initiated changes to the HEADER_TABLE_SIZE
+ local setting, and ensures that if users shrink or increase the header
+ table size it is policed appropriately.
+
+
+3.0.0 (2017-03-24)
+------------------
+
+API Changes (Backward-Incompatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- By default, hyper-h2 now joins together received cookie header fields, per
+ RFC 7540 Section 8.1.2.5.
+- Added a ``normalize_inbound_headers`` flag to the ``H2Configuration`` object
+ that defaults to ``True``. Setting this to ``False`` changes the behaviour
+ from the previous point back to the v2 behaviour.
+- Removed deprecated fields from ``h2.errors`` module.
+- Removed deprecated fields from ``h2.settings`` module.
+- Removed deprecated ``client_side`` and ``header_encoding`` arguments from
+ ``H2Connection``.
+- Removed deprecated ``client_side`` and ``header_encoding`` properties from
+ ``H2Connection``.
+- ``dict`` objects are no longer allowed for user-supplied headers.
+- The default header encoding is now ``None``, not ``utf-8``: this means that
+ all events that carry headers now return those headers as byte strings by
+ default. The header encoding can be set back to ``utf-8`` to restore the old
+ behaviour.
+
+API Changes (Backward-Compatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Added new ``UnknownFrameReceived`` event that fires when unknown extension
+ frames have been received. This only fires when using hyperframe 5.0 or
+ later: earlier versions of hyperframe cause us to silently ignore extension
+ frames.
+
+Bugfixes
+~~~~~~~~
+
+None
+
+
+2.6.1 (2017-03-16)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Allowed hyperframe v5 support while continuing to ignore unexpected frames.
+
+
+2.5.3 (2017-03-16)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Allowed hyperframe v5 support while continuing to ignore unexpected frames.
+
+
+2.4.4 (2017-03-16)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Allowed hyperframe v5 support while continuing to ignore unexpected frames.
+
+
+2.6.0 (2017-02-28)
+------------------
+
+API Changes (Backward-Compatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Added a new ``h2.events.Event`` class that acts as a base class for all
+ events.
+- Rather than reject outbound Connection-specific headers, h2 will now
+ normalize the header block by removing them.
+- Implement equality for the ``h2.settings.Settings`` class.
+- Added ``h2.settings.SettingCodes``, an enum that is used to store all the
+ HTTP/2 setting codes. This allows us to use a better printed representation of
+ the setting code in most places that it is used.
+- The ``setting`` field in ``ChangedSetting`` for the ``RemoteSettingsChanged``
+ and ``SettingsAcknowledged`` events has been updated to be instances of
+ ``SettingCodes`` whenever they correspond to a known setting code. When they
+ are an unknown setting code, they are instead ``int``. As ``SettingCodes`` is
+ a subclass of ``int``, this is non-breaking.
+- Deprecated the other fields in ``h2.settings``. These will be removed in
+ 3.0.0.
+- Added an optional ``pad_length`` parameter to ``H2Connection.send_data``
+ to allow the user to include padding on a data frame.
+- Added a new parameter to the ``h2.config.H2Configuration`` initializer which
+ takes a logger. This allows us to log by providing a logger that conforms
+ to the requirements of this module so that it can be used in different
+ environments.
+
+Bugfixes
+~~~~~~~~
+
+- Correctly reject pushed request header blocks whenever they have malformed
+ request header blocks.
+- Correctly normalize pushed request header blocks whenever they have
+ normalizable header fields.
+- Remote peers are now allowed to send zero or any positive number as a value
+ for ``SETTINGS_MAX_HEADER_LIST_SIZE``, where previously sending zero would
+ raise a ``InvalidSettingsValueError``.
+- Resolved issue where the ``HTTP2-Settings`` header value for plaintext
+ upgrade that was emitted by ``initiate_upgrade_connection`` included the
+ *entire* ``SETTINGS`` frame, instead of just the payload.
+- Resolved issue where the ``HTTP2-Settings`` header value sent by a client for
+ plaintext upgrade would be ignored by ``initiate_upgrade_connection``, rather
+ than have those settings applied appropriately.
+- Resolved an issue whereby certain frames received from a peer in the CLOSED
+ state would trigger connection errors when RFC 7540 says they should have
+ triggered stream errors instead. Added more detailed stream closure tracking
+ to ensure we don't throw away connections unnecessarily.
+
+
+2.5.2 (2017-01-27)
+------------------
+
+- Resolved issue where the ``HTTP2-Settings`` header value for plaintext
+ upgrade that was emitted by ``initiate_upgrade_connection`` included the
+ *entire* ``SETTINGS`` frame, instead of just the payload.
+- Resolved issue where the ``HTTP2-Settings`` header value sent by a client for
+ plaintext upgrade would be ignored by ``initiate_upgrade_connection``, rather
+ than have those settings applied appropriately.
+
+
+2.4.3 (2017-01-27)
+------------------
+
+- Resolved issue where the ``HTTP2-Settings`` header value for plaintext
+ upgrade that was emitted by ``initiate_upgrade_connection`` included the
+ *entire* ``SETTINGS`` frame, instead of just the payload.
+- Resolved issue where the ``HTTP2-Settings`` header value sent by a client for
+ plaintext upgrade would be ignored by ``initiate_upgrade_connection``, rather
+ than have those settings applied appropriately.
+
+
+2.3.4 (2017-01-27)
+------------------
+
+- Resolved issue where the ``HTTP2-Settings`` header value for plaintext
+ upgrade that was emitted by ``initiate_upgrade_connection`` included the
+ *entire* ``SETTINGS`` frame, instead of just the payload.
+- Resolved issue where the ``HTTP2-Settings`` header value sent by a client for
+ plaintext upgrade would be ignored by ``initiate_upgrade_connection``, rather
+ than have those settings applied appropriately.
+
+
+2.5.1 (2016-12-17)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Remote peers are now allowed to send zero or any positive number as a value
+ for ``SETTINGS_MAX_HEADER_LIST_SIZE``, where previously sending zero would
+ raise a ``InvalidSettingsValueError``.
+
+
+2.5.0 (2016-10-25)
+------------------
+
+API Changes (Backward-Compatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Added a new ``H2Configuration`` object that allows rich configuration of
+ a ``H2Connection``. This object supersedes the prior keyword arguments to the
+ ``H2Connection`` object, which are now deprecated and will be removed in 3.0.
+- Added support for automated window management via the
+ ``acknowledge_received_data`` method. See the documentation for more details.
+- Added a ``DenialOfServiceError`` that is raised whenever a behaviour that
+ looks like a DoS attempt is encountered: for example, an overly large
+ decompressed header list. This is a subclass of ``ProtocolError``.
+- Added support for setting and managing ``SETTINGS_MAX_HEADER_LIST_SIZE``.
+ This setting is now defaulted to 64kB.
+- Added ``h2.errors.ErrorCodes``, an enum that is used to store all the HTTP/2
+ error codes. This allows us to use a better printed representation of the
+ error code in most places that it is used.
+- The ``error_code`` fields on ``ConnectionTerminated`` and ``StreamReset``
+ events have been updated to be instances of ``ErrorCodes`` whenever they
+ correspond to a known error code. When they are an unknown error code, they
+ are instead ``int``. As ``ErrorCodes`` is a subclass of ``int``, this is
+ non-breaking.
+- Deprecated the other fields in ``h2.errors``. These will be removed in 3.0.0.
+
+Bugfixes
+~~~~~~~~
+
+- Correctly reject request header blocks with neither :authority nor Host
+ headers, or header blocks which contain mismatched :authority and Host
+ headers, per RFC 7540 Section 8.1.2.3.
+- Correctly expect that responses to HEAD requests will have no body regardless
+ of the value of the Content-Length header, and reject those that do.
+- Correctly refuse to send header blocks that contain neither :authority nor
+ Host headers, or header blocks which contain mismatched :authority and Host
+ headers, per RFC 7540 Section 8.1.2.3.
+- Hyper-h2 will now reject header field names and values that contain leading
+ or trailing whitespace.
+- Correctly strip leading/trailing whitespace from header field names and
+ values.
+- Correctly refuse to send header blocks with a TE header whose value is not
+ ``trailers``, per RFC 7540 Section 8.1.2.2.
+- Correctly refuse to send header blocks with connection-specific headers,
+ per RFC 7540 Section 8.1.2.2.
+- Correctly refuse to send header blocks that contain duplicate pseudo-header
+ fields, or with pseudo-header fields that appear after ordinary header fields,
+ per RFC 7540 Section 8.1.2.1.
+
+ This may cause passing a dictionary as the header block to ``send_headers``
+ to throw a ``ProtocolError``, because dictionaries are unordered and so they
+ may trip this check. Passing dictionaries here is deprecated, and callers
+ should change to using a sequence of 2-tuples as their header blocks.
+- Correctly reject trailers that contain HTTP/2 pseudo-header fields, per RFC
+ 7540 Section 8.1.2.1.
+- Correctly refuse to send trailers that contain HTTP/2 pseudo-header fields,
+ per RFC 7540 Section 8.1.2.1.
+- Correctly reject responses that do not contain the ``:status`` header field,
+ per RFC 7540 Section 8.1.2.4.
+- Correctly refuse to send responses that do not contain the ``:status`` header
+ field, per RFC 7540 Section 8.1.2.4.
+- Correctly update the maximum frame size when the user updates the value of
+ that setting. Prior to this release, if the user updated the maximum frame
+ size hyper-h2 would ignore the update, preventing the remote peer from using
+ the higher frame sizes.
+
+2.4.2 (2016-10-25)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Correctly update the maximum frame size when the user updates the value of
+ that setting. Prior to this release, if the user updated the maximum frame
+ size hyper-h2 would ignore the update, preventing the remote peer from using
+ the higher frame sizes.
+
+2.3.3 (2016-10-25)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Correctly update the maximum frame size when the user updates the value of
+ that setting. Prior to this release, if the user updated the maximum frame
+ size hyper-h2 would ignore the update, preventing the remote peer from using
+ the higher frame sizes.
+
+2.2.7 (2016-10-25)
+------------------
+
+*Final 2.2.X release*
+
+Bugfixes
+~~~~~~~~
+
+- Correctly update the maximum frame size when the user updates the value of
+ that setting. Prior to this release, if the user updated the maximum frame
+ size hyper-h2 would ignore the update, preventing the remote peer from using
+ the higher frame sizes.
+
+2.4.1 (2016-08-23)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Correctly expect that responses to HEAD requests will have no body regardless
+ of the value of the Content-Length header, and reject those that do.
+
+2.3.2 (2016-08-23)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Correctly expect that responses to HEAD requests will have no body regardless
+ of the value of the Content-Length header, and reject those that do.
+
+2.4.0 (2016-07-01)
+------------------
+
+API Changes (Backward-Compatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Adds ``additional_data`` to ``H2Connection.close_connection``, allowing the
+ user to send additional debug data on the GOAWAY frame.
+- Adds ``last_stream_id`` to ``H2Connection.close_connection``, allowing the
+ user to manually control what the reported last stream ID is.
+- Add new method: ``prioritize``.
+- Add support for emitting stream priority information when sending headers
+ frames using three new keyword arguments: ``priority_weight``,
+ ``priority_depends_on``, and ``priority_exclusive``.
+- Add support for "related events": events that fire simultaneously on a single
+ frame.
+
+
+2.3.1 (2016-05-12)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Resolved ``AttributeError`` encountered when receiving more than one sequence
+ of CONTINUATION frames on a given connection.
+
+
+2.2.5 (2016-05-12)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Resolved ``AttributeError`` encountered when receiving more than one sequence
+ of CONTINUATION frames on a given connection.
+
+
+2.3.0 (2016-04-26)
+------------------
+
+API Changes (Backward-Compatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Added a new flag to the ``H2Connection`` constructor: ``header_encoding``,
+ that controls what encoding is used (if any) to decode the headers from bytes
+ to unicode. This defaults to UTF-8 for backward compatibility. To disable the
+ decode and use bytes exclusively, set the field to False, None, or the empty
+ string. This affects all headers, including those pushed by servers.
+- Bumped the minimum version of HPACK allowed from 2.0 to 2.2.
+- Added support for advertising RFC 7838 Alternative services.
+- Allowed users to provide ``hpack.HeaderTuple`` and
+ ``hpack.NeverIndexedHeaderTuple`` objects to all methods that send headers.
+- Changed all events that carry headers to emit ``hpack.HeaderTuple`` and
+ ``hpack.NeverIndexedHeaderTuple`` instead of plain tuples. This allows users
+ to maintain header indexing state.
+- Added support for plaintext upgrade with the ``initiate_upgrade_connection``
+ method.
+
+Bugfixes
+~~~~~~~~
+
+- Automatically ensure that all ``Authorization`` and ``Proxy-Authorization``
+ headers, as well as short ``Cookie`` headers, are prevented from being added
+ to encoding contexts.
+
+2.2.4 (2016-04-25)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Correctly forbid pseudo-headers that were not defined in RFC 7540.
+- Ignore AltSvc frames, rather than exploding when receiving them.
+
+2.1.5 (2016-04-25)
+------------------
+
+*Final 2.1.X release*
+
+Bugfixes
+~~~~~~~~
+
+- Correctly forbid pseudo-headers that were not defined in RFC 7540.
+- Ignore AltSvc frames, rather than exploding when receiving them.
+
+2.2.3 (2016-04-13)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Allowed the 4.X series of hyperframe releases as dependencies.
+
+2.1.4 (2016-04-13)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Allowed the 4.X series of hyperframe releases as dependencies.
+
+
+2.2.2 (2016-04-05)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Fixed issue where informational responses were erroneously not allowed to be
+ sent in the ``HALF_CLOSED_REMOTE`` state.
+- Fixed issue where informational responses were erroneously not allowed to be
+ received in the ``HALF_CLOSED_LOCAL`` state.
+- Fixed issue where we allowed information responses to be sent or received
+ after final responses.
+
+2.2.1 (2016-03-23)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Fixed issue where users using locales that did not default to UTF-8 were
+ unable to install source distributions of the package.
+
+2.2.0 (2016-03-23)
+------------------
+
+API Changes (Backward-Compatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Added support for sending informational responses (responses with 1XX status)
+ codes as part of the standard flow. HTTP/2 allows zero or more informational
+ responses with no upper limit: hyper-h2 does too.
+- Added support for receiving informational responses (responses with 1XX
+ status) codes as part of the standard flow. HTTP/2 allows zero or more
+ informational responses with no upper limit: hyper-h2 does too.
+- Added a new event: ``ReceivedInformationalResponse``. This response is fired
+ when informational responses (those with 1XX status codes).
+- Added an ``additional_data`` field to the ``ConnectionTerminated`` event that
+ carries any additional data sent on the GOAWAY frame. May be ``None`` if no
+ such data was sent.
+- Added the ``initial_values`` optional argument to the ``Settings`` object.
+
+Bugfixes
+~~~~~~~~
+
+- Correctly reject all of the connection-specific headers mentioned in RFC 7540
+ § 8.1.2.2, not just the ``Connection:`` header.
+- Defaulted the value of ``SETTINGS_MAX_CONCURRENT_STREAMS`` to 100, unless
+ explicitly overridden. This is a safe defensive initial value for this
+ setting.
+
+2.1.3 (2016-03-16)
+------------------
+
+Deprecations
+~~~~~~~~~~~~
+
+- Passing dictionaries to ``send_headers`` as the header block is deprecated,
+ and will be removed in 3.0.
+
+2.1.2 (2016-02-17)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Reject attempts to push streams on streams that were themselves pushed:
+ streams can only be pushed on streams that were initiated by the client.
+- Correctly allow CONTINUATION frames to extend the header block started by a
+ PUSH_PROMISE frame.
+- Changed our handling of frames received on streams that were reset by the
+ user.
+
+ Previously these would, at best, cause ProtocolErrors to be raised and the
+ connection to be torn down (rather defeating the point of resetting streams
+ at all) and, at worst, would cause subtle inconsistencies in state between
+ hyper-h2 and the remote peer that could lead to header block decoding errors
+ or flow control blockages.
+
+ Now when the user resets a stream all further frames received on that stream
+ are ignored except where they affect some form of connection-level state,
+ where they have their effect and are then ignored.
+- Fixed a bug whereby receiving a PUSH_PROMISE frame on a stream that was
+ closed would cause a RST_STREAM frame to be emitted on the closed-stream,
+ but not the newly-pushed one. Now this causes a ``ProtocolError``.
+
+2.1.1 (2016-02-05)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Added debug representations for all events.
+- Fixed problems with setup.py that caused trouble on older setuptools/pip
+ installs.
+
+2.1.0 (2016-02-02)
+------------------
+
+API Changes (Backward-Compatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Added new field to ``DataReceived``: ``flow_controlled_length``. This is the
+ length of the frame including padded data, allowing users to correctly track
+ changes to the flow control window.
+- Defined new ``UnsupportedFrameError``, thrown when frames that are known to
+ hyperframe but not supported by hyper-h2 are received. For
+ backward-compatibility reasons, this is a ``ProtocolError`` *and* a
+ ``KeyError``.
+
+Bugfixes
+~~~~~~~~
+
+- Hyper-h2 now correctly accounts for padding when maintaining flow control
+ windows.
+- Resolved a bug where hyper-h2 would mistakenly apply
+ SETTINGS_INITIAL_WINDOW_SIZE to the connection flow control window in
+ addition to the stream-level flow control windows.
+- Invalid Content-Length headers now throw ``ProtocolError`` exceptions and
+ correctly tear the connection down, instead of leaving the connection in an
+ indeterminate state.
+- Invalid header blocks now throw ``ProtocolError``, rather than a grab bag of
+ possible other exceptions.
+
+2.0.0 (2016-01-25)
+------------------
+
+API Changes (Breaking)
+~~~~~~~~~~~~~~~~~~~~~~
+
+- Attempts to open streams with invalid stream IDs, either by the remote peer
+ or by the user, are now rejected as a ``ProtocolError``. Previously these
+ were allowed, and would cause remote peers to error.
+- Receiving frames that have invalid padding now causes the connection to be
+ terminated with a ``ProtocolError`` being raised. Previously these passed
+ undetected.
+- Settings values set by both the user and the remote peer are now validated
+ when they're set. If they're invalid, a new ``InvalidSettingsValueError`` is
+ raised and, if set by the remote peer, a connection error is signaled.
+ Previously, it was possible to set invalid values. These would either be
+ caught when building frames, or would be allowed to stand.
+- Settings changes no longer require user action to be acknowledged: hyper-h2
+ acknowledges them automatically. This moves the location where some
+ exceptions may be thrown, and also causes the ``acknowledge_settings`` method
+ to be removed from the public API.
+- Removed a number of methods on the ``H2Connection`` object from the public,
+ semantically versioned API, by renaming them to have leading underscores.
+ Specifically, removed:
+
+ - ``get_stream_by_id``
+ - ``get_or_create_stream``
+ - ``begin_new_stream``
+ - ``receive_frame``
+ - ``acknowledge_settings``
+
+- Added full support for receiving CONTINUATION frames, including policing
+ logic about when and how they are received. Previously, receiving
+ CONTINUATION frames was not supported and would throw exceptions.
+- All public API functions on ``H2Connection`` except for ``receive_data`` no
+ longer return lists of events, because these lists were always empty. Events
+ are now only raised by ``receive_data``.
+- Calls to ``increment_flow_control_window`` with out of range values now raise
+ ``ValueError`` exceptions. Previously they would be allowed, or would cause
+ errors when serializing frames.
+
+API Changes (Backward-Compatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Added ``PriorityUpdated`` event for signaling priority changes.
+- Added ``get_next_available_stream_id`` function.
+- Receiving DATA frames on streams not in the OPEN or HALF_CLOSED_LOCAL states
+ now causes a stream reset, rather than a connection reset. The error is now
+ also classified as a ``StreamClosedError``, rather than a more generic
+ ``ProtocolError``.
+- Receiving HEADERS or PUSH_PROMISE frames in the HALF_CLOSED_REMOTE state now
+ causes a stream reset, rather than a connection reset.
+- Receiving frames that violate the max frame size now causes connection errors
+ with error code FRAME_SIZE_ERROR, not a generic PROTOCOL_ERROR. This
+ condition now also raises a ``FrameTooLargeError``, a new subclass of
+ ``ProtocolError``.
+- Made ``NoSuchStreamError`` a subclass of ``ProtocolError``.
+- The ``StreamReset`` event is now also fired whenever a protocol error from
+ the remote peer forces a stream to close early. This is only fired once.
+- The ``StreamReset`` event now carries a flag, ``remote_reset``, that is set
+ to ``True`` in all cases where ``StreamReset`` would previously have fired
+ (e.g. when the remote peer sent a RST_STREAM), and is set to ``False`` when
+ it fires because the remote peer made a protocol error.
+- Hyper-h2 now rejects attempts by peers to increment a flow control window by
+ zero bytes.
+- Hyper-h2 now rejects peers sending header blocks that are ill-formed for a
+ number of reasons as set out in RFC 7540 Section 8.1.2.
+- Attempting to send non-PRIORITY frames on closed streams now raises
+ ``StreamClosedError``.
+- Remote peers attempting to increase the flow control window beyond
+ ``2**31 - 1``, either by window increment or by settings frame, are now
+ rejected as ``ProtocolError``.
+- Local attempts to increase the flow control window beyond ``2**31 - 1`` by
+ window increment are now rejected as ``ProtocolError``.
+- The bytes that represent individual settings are now available in
+ ``h2.settings``, instead of needing users to import them from hyperframe.
+
+Bugfixes
+~~~~~~~~
+
+- RFC 7540 requires that a separate minimum stream ID be used for inbound and
+ outbound streams. Hyper-h2 now obeys this requirement.
+- Hyper-h2 now does a better job of reporting the last stream ID it has
+ partially handled when terminating connections.
+- Fixed an error in the arguments of ``StreamIDTooLowError``.
+- Prevent ``ValueError`` leaking from Hyperframe.
+- Prevent ``struct.error`` and ``InvalidFrameError`` leaking from Hyperframe.
+
+1.1.1 (2015-11-17)
+------------------
+
+Bugfixes
+~~~~~~~~
+
+- Forcibly lowercase all header names to improve compatibility with
+ implementations that demand lower-case header names.
+
+1.1.0 (2015-10-28)
+------------------
+
+API Changes (Backward-Compatible)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+- Added a new ``ConnectionTerminated`` event, which fires when GOAWAY frames
+ are received.
+- Added a subclass of ``NoSuchStreamError``, called ``StreamClosedError``, that
+ fires when actions are taken on a stream that is closed and has had its state
+ flushed from the system.
+- Added ``StreamIDTooLowError``, raised when the user or the remote peer
+ attempts to create a stream with an ID lower than one previously used in the
+ dialog. Inherits from ``ValueError`` for backward-compatibility reasons.
+
+Bugfixes
+~~~~~~~~
+
+- Do not throw ``ProtocolError`` when attempting to send multiple GOAWAY
+ frames on one connection.
+- We no longer forcefully change the decoder table size when settings changes
+ are ACKed, instead waiting for remote acknowledgement of the change.
+- Improve the performance of checking whether a stream is open.
+- We now attempt to lazily garbage collect closed streams, to avoid having the
+ state hang around indefinitely, leaking memory.
+- Avoid further per-stream allocations, leading to substantial performance
+ improvements when many short-lived streams are used.
+
+1.0.0 (2015-10-15)
+------------------
+
+- First production release!
diff --git a/testing/web-platform/tests/tools/third_party/h2/LICENSE b/testing/web-platform/tests/tools/third_party/h2/LICENSE
new file mode 100644
index 0000000000..7bb76c58ec
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015-2016 Cory Benfield and contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/h2/MANIFEST.in b/testing/web-platform/tests/tools/third_party/h2/MANIFEST.in
new file mode 100644
index 0000000000..04400de6b7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/MANIFEST.in
@@ -0,0 +1,8 @@
+include README.rst LICENSE CONTRIBUTORS.rst HISTORY.rst tox.ini test_requirements.txt .coveragerc Makefile
+recursive-include test *.py *.sh
+graft docs
+prune docs/build
+graft visualizer
+recursive-include examples *.py *.crt *.key *.pem *.csr
+recursive-include utils *.sh
+recursive-include _travis *.sh
diff --git a/testing/web-platform/tests/tools/third_party/h2/Makefile b/testing/web-platform/tests/tools/third_party/h2/Makefile
new file mode 100644
index 0000000000..689077472f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/Makefile
@@ -0,0 +1,9 @@
+.PHONY: publish test
+
+publish:
+ rm -rf dist/
+ python setup.py sdist bdist_wheel
+ twine upload -s dist/*
+
+test:
+ py.test -n 4 --cov h2 test/
diff --git a/testing/web-platform/tests/tools/third_party/h2/README.rst b/testing/web-platform/tests/tools/third_party/h2/README.rst
new file mode 100644
index 0000000000..7140d37acc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/README.rst
@@ -0,0 +1,65 @@
+===============================
+hyper-h2: HTTP/2 Protocol Stack
+===============================
+
+.. image:: https://raw.github.com/Lukasa/hyper/development/docs/source/images/hyper.png
+
+.. image:: https://travis-ci.org/python-hyper/hyper-h2.svg?branch=master
+ :target: https://travis-ci.org/python-hyper/hyper-h2
+
+This repository contains a pure-Python implementation of a HTTP/2 protocol
+stack. It's written from the ground up to be embeddable in whatever program you
+choose to use, ensuring that you can speak HTTP/2 regardless of your
+programming paradigm.
+
+You use it like this:
+
+.. code-block:: python
+
+ import h2.connection
+
+ conn = h2.connection.H2Connection()
+ conn.send_headers(stream_id=stream_id, headers=headers)
+ conn.send_data(stream_id, data)
+ socket.sendall(conn.data_to_send())
+ events = conn.receive_data(socket_data)
+
+This repository does not provide a parsing layer, a network layer, or any rules
+about concurrency. Instead, it's a purely in-memory solution, defined in terms
+of data actions and HTTP/2 frames. This is one building block of a full Python
+HTTP implementation.
+
+To install it, just run:
+
+.. code-block:: console
+
+ $ pip install h2
+
+Documentation
+=============
+
+Documentation is available at http://python-hyper.org/h2/.
+
+Contributing
+============
+
+``hyper-h2`` welcomes contributions from anyone! Unlike many other projects we
+are happy to accept cosmetic contributions and small contributions, in addition
+to large feature requests and changes.
+
+Before you contribute (either by opening an issue or filing a pull request),
+please `read the contribution guidelines`_.
+
+.. _read the contribution guidelines: http://python-hyper.org/en/latest/contributing.html
+
+License
+=======
+
+``hyper-h2`` is made available under the MIT License. For more details, see the
+``LICENSE`` file in the repository.
+
+Authors
+=======
+
+``hyper-h2`` is maintained by Cory Benfield, with contributions from others. For
+more details about the contributors, please see ``CONTRIBUTORS.rst``.
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/Makefile b/testing/web-platform/tests/tools/third_party/h2/docs/Makefile
new file mode 100644
index 0000000000..32b233be83
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/hyper-h2.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/hyper-h2.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/hyper-h2"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/hyper-h2"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/make.bat b/testing/web-platform/tests/tools/third_party/h2/docs/make.bat
new file mode 100644
index 0000000000..537686d817
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/make.bat
@@ -0,0 +1,242 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
+set I18NSPHINXOPTS=%SPHINXOPTS% source
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. xml to make Docutils-native XML files
+ echo. pseudoxml to make pseudoxml-XML files for display purposes
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+
+%SPHINXBUILD% 2> nul
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.http://sphinx-doc.org/
+ exit /b 1
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\hyper-h2.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\hyper-h2.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdf" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf
+ cd %BUILDDIR%/..
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdfja" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf-ja
+ cd %BUILDDIR%/..
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+if "%1" == "xml" (
+ %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The XML files are in %BUILDDIR%/xml.
+ goto end
+)
+
+if "%1" == "pseudoxml" (
+ %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
+ goto end
+)
+
+:end
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/_static/.keep b/testing/web-platform/tests/tools/third_party/h2/docs/source/_static/.keep
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/_static/.keep
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/_static/h2.connection.H2ConnectionStateMachine.dot.png b/testing/web-platform/tests/tools/third_party/h2/docs/source/_static/h2.connection.H2ConnectionStateMachine.dot.png
new file mode 100644
index 0000000000..f2c814ec77
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/_static/h2.connection.H2ConnectionStateMachine.dot.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/_static/h2.stream.H2StreamStateMachine.dot.png b/testing/web-platform/tests/tools/third_party/h2/docs/source/_static/h2.stream.H2StreamStateMachine.dot.png
new file mode 100644
index 0000000000..85bcb68321
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/_static/h2.stream.H2StreamStateMachine.dot.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/advanced-usage.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/advanced-usage.rst
new file mode 100644
index 0000000000..40496f0eae
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/advanced-usage.rst
@@ -0,0 +1,325 @@
+Advanced Usage
+==============
+
+Priority
+--------
+
+.. versionadded:: 2.0.0
+
+`RFC 7540`_ has a fairly substantial and complex section describing how to
+build a HTTP/2 priority tree, and the effect that should have on sending data
+from a server.
+
+Hyper-h2 does not enforce any priority logic by default for servers. This is
+because scheduling data sends is outside the scope of this library, as it
+likely requires fairly substantial understanding of the scheduler being used.
+
+However, for servers that *do* want to follow the priority recommendations
+given by clients, the Hyper project provides `an implementation`_ of the
+`RFC 7540`_ priority tree that will be useful to plug into a server. That,
+combined with the :class:`PriorityUpdated <h2.events.PriorityUpdated>` event from
+this library, can be used to build a server that conforms to RFC 7540's
+recommendations for priority handling.
+
+Related Events
+--------------
+
+.. versionadded:: 2.4.0
+
+In the 2.4.0 release hyper-h2 added support for signaling "related events".
+These are a HTTP/2-only construct that exist because certain HTTP/2 events can
+occur simultaneously: that is, one HTTP/2 frame can cause multiple state
+transitions to occur at the same time. One example of this is a HEADERS frame
+that contains priority information and carries the END_STREAM flag: this would
+cause three events to fire (one of the various request/response received
+events, a :class:`PriorityUpdated <h2.events.PriorityUpdated>` event, and a
+:class:`StreamEnded <h2.events.StreamEnded>` event).
+
+Ordinarily hyper-h2's logic will emit those events to you one at a time. This
+means that you may attempt to process, for example, a
+:class:`DataReceived <h2.events.DataReceived>` event, not knowing that the next
+event out will be a :class:`StreamEnded <h2.events.StreamEnded>` event.
+hyper-h2 *does* know this, however, and so will forbid you from taking certain
+actions that are a violation of the HTTP/2 protocol.
+
+To avoid this asymmetry of information, events that can occur simultaneously
+now carry properties for their "related events". These allow users to find the
+events that can have occurred simultaneously with each other before the event
+is emitted by hyper-h2. The following objects have "related events":
+
+- :class:`RequestReceived <h2.events.RequestReceived>`:
+
+ - :data:`stream_ended <h2.events.RequestReceived.stream_ended>`: any
+ :class:`StreamEnded <h2.events.StreamEnded>` event that occurred at the
+ same time as receiving this request.
+
+ - :data:`priority_updated
+ <h2.events.RequestReceived.priority_updated>`: any
+ :class:`PriorityUpdated <h2.events.PriorityUpdated>` event that occurred
+ at the same time as receiving this request.
+
+- :class:`ResponseReceived <h2.events.ResponseReceived>`:
+
+ - :data:`stream_ended <h2.events.ResponseReceived.stream_ended>`: any
+ :class:`StreamEnded <h2.events.StreamEnded>` event that occurred at the
+ same time as receiving this response.
+
+ - :data:`priority_updated
+ <h2.events.ResponseReceived.priority_updated>`: any
+ :class:`PriorityUpdated <h2.events.PriorityUpdated>` event that occurred
+ at the same time as receiving this response.
+
+- :class:`TrailersReceived <h2.events.TrailersReceived>`:
+
+ - :data:`stream_ended <h2.events.TrailersReceived.stream_ended>`: any
+ :class:`StreamEnded <h2.events.StreamEnded>` event that occurred at the
+ same time as receiving this set of trailers. This will **always** be
+ present for trailers, as they must terminate streams.
+
+ - :data:`priority_updated
+ <h2.events.TrailersReceived.priority_updated>`: any
+ :class:`PriorityUpdated <h2.events.PriorityUpdated>` event that occurred
+ at the same time as receiving this response.
+
+- :class:`InformationalResponseReceived
+ <h2.events.InformationalResponseReceived>`:
+
+ - :data:`priority_updated
+ <h2.events.InformationalResponseReceived.priority_updated>`: any
+ :class:`PriorityUpdated <h2.events.PriorityUpdated>` event that occurred
+ at the same time as receiving this informational response.
+
+- :class:`DataReceived <h2.events.DataReceived>`:
+
+ - :data:`stream_ended <h2.events.DataReceived.stream_ended>`: any
+ :class:`StreamEnded <h2.events.StreamEnded>` event that occurred at the
+ same time as receiving this data.
+
+
+.. warning:: hyper-h2 does not know if you are looking for related events or
+ expecting to find events in the event stream. Therefore, it will
+ always emit "related events" in the event stream. If you are using
+ the "related events" event pattern, you will want to be careful to
+ avoid double-processing related events.
+
+.. _h2-connection-advanced:
+
+Connections: Advanced
+---------------------
+
+Thread Safety
+~~~~~~~~~~~~~
+
+``H2Connection`` objects are *not* thread-safe. They cannot safely be accessed
+from multiple threads at once. This is a deliberate design decision: it is not
+trivially possible to design the ``H2Connection`` object in a way that would
+be either lock-free or have the locks at a fine granularity.
+
+Your implementations should bear this in mind, and handle it appropriately. It
+should be simple enough to use locking alongside the ``H2Connection``: simply
+lock around the connection object itself. Because the ``H2Connection`` object
+does no I/O it should be entirely safe to do that. Alternatively, have a single
+thread take ownership of the ``H2Connection`` and use a message-passing
+interface to serialize access to the ``H2Connection``.
+
+If you are using a non-threaded concurrency approach (e.g. Twisted), this
+should not affect you.
+
+Internal Buffers
+~~~~~~~~~~~~~~~~
+
+In order to avoid doing I/O, the ``H2Connection`` employs an internal buffer.
+This buffer is *unbounded* in size: it can potentially grow infinitely. This
+means that, if you are not making sure to regularly empty it, you are at risk
+of exceeding the memory limit of a single process and finding your program
+crashes.
+
+It is highly recommended that you send data at regular intervals, ideally as
+soon as possible.
+
+.. _advanced-sending-data:
+
+Sending Data
+~~~~~~~~~~~~
+
+When sending data on the network, it's important to remember that you may not
+be able to send an unbounded amount of data at once. Particularly when using
+TCP, it is often the case that there are limits on how much data may be in
+flight at any one time. These limits can be very low, and your operating system
+will only buffer so much data in memory before it starts to complain.
+
+For this reason, it is possible to consume only a subset of the data available
+when you call :meth:`data_to_send <h2.connection.H2Connection.data_to_send>`.
+However, once you have pulled the data out of the ``H2Connection`` internal
+buffer, it is *not* possible to put it back on again. For that reason, it is
+adviseable that you confirm how much space is available in the OS buffer before
+sending.
+
+Alternatively, use tools made available by your framework. For example, the
+Python standard library :mod:`socket <python:socket>` module provides a
+:meth:`sendall <python:socket.socket.sendall>` method that will automatically
+block until all the data has been sent. This will enable you to always use the
+unbounded form of
+:meth:`data_to_send <h2.connection.H2Connection.data_to_send>`, and will help
+you avoid subtle bugs.
+
+When To Send
+~~~~~~~~~~~~
+
+In addition to knowing how much data to send (see :ref:`advanced-sending-data`)
+it is important to know when to send data. For hyper-h2, this amounts to
+knowing when to call :meth:`data_to_send
+<h2.connection.H2Connection.data_to_send>`.
+
+Hyper-h2 may write data into its send buffer at two times. The first is
+whenever :meth:`receive_data <h2.connection.H2Connection.receive_data>` is
+called. This data is sent in response to some control frames that require no
+user input: for example, responding to PING frames. The second time is in
+response to user action: whenever a user calls a method like
+:meth:`send_headers <h2.connection.H2Connection.send_headers>`, data may be
+written into the buffer.
+
+In a standard design for a hyper-h2 consumer, then, that means there are two
+places where you'll potentially want to send data. The first is in your
+"receive data" loop. This is where you take the data you receive, pass it into
+:meth:`receive_data <h2.connection.H2Connection.receive_data>`, and then
+dispatch events. For this loop, it is usually best to save sending data until
+the loop is complete: that allows you to empty the buffer only once.
+
+The other place you'll want to send the data is when initiating requests or
+taking any other active, unprompted action on the connection. In this instance,
+you'll want to make all the relevant ``send_*`` calls, and *then* call
+:meth:`data_to_send <h2.connection.H2Connection.data_to_send>`.
+
+Headers
+-------
+
+HTTP/2 defines several "special header fields" which are used to encode data
+that was previously sent in either the request or status line of HTTP/1.1.
+These header fields are distinguished from ordinary header fields because their
+field name begins with a ``:`` character. The special header fields defined in
+`RFC 7540`_ are:
+
+- ``:status``
+- ``:path``
+- ``:method``
+- ``:scheme``
+- ``:authority``
+
+`RFC 7540`_ **mandates** that all of these header fields appear *first* in the
+header block, before the ordinary header fields. This could cause difficulty if
+the :meth:`send_headers <h2.connection.H2Connection.send_headers>` method
+accepted a plain ``dict`` for the ``headers`` argument, because ``dict``
+objects are unordered. For this reason, we require that you provide a list of
+two-tuples.
+
+.. _RFC 7540: https://tools.ietf.org/html/rfc7540
+.. _an implementation: http://python-hyper.org/projects/priority/en/latest/
+
+Flow Control
+------------
+
+HTTP/2 defines a complex flow control system that uses a sliding window of
+data on both a per-stream and per-connection basis. Essentially, each
+implementation allows its peer to send a specific amount of data at any time
+(the "flow control window") before it must stop. Each stream has a separate
+window, and the connection as a whole has a window. Each window can be opened
+by an implementation by sending a ``WINDOW_UPDATE`` frame, either on a specific
+stream (causing the window for that stream to be opened), or on stream ``0``,
+which causes the window for the entire connection to be opened.
+
+In HTTP/2, only data in ``DATA`` frames is flow controlled. All other frames
+are exempt from flow control. Each ``DATA`` frame consumes both stream and
+connection flow control window bytes. This means that the maximum amount of
+data that can be sent on any one stream before a ``WINDOW_UPDATE`` frame is
+received is the *lower* of the stream and connection windows. The maximum
+amount of data that can be sent on *all* streams before a ``WINDOW_UPDATE``
+frame is received is the size of the connection flow control window.
+
+Working With Flow Control
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The amount of flow control window a ``DATA`` frame consumes is the sum of both
+its contained application data *and* the amount of padding used. hyper-h2 shows
+this to the user in a :class:`DataReceived <h2.events.DataReceived>` event by
+using the :data:`flow_controlled_length
+<h2.events.DataReceived.flow_controlled_length>` field. When working with flow
+control in hyper-h2, users *must* use this field: simply using
+``len(datareceived.data)`` can eventually lead to deadlock.
+
+When data has been received and given to the user in a :class:`DataReceived
+<h2.events.DataReceived>`, it is the responsibility of the user to re-open the
+flow control window when the user is ready for more data. hyper-h2 does not do
+this automatically to avoid flooding the user with data: if we did, the remote
+peer could send unbounded amounts of data that the user would need to buffer
+before processing.
+
+To re-open the flow control window, then, the user must call
+:meth:`increment_flow_control_window
+<h2.connection.H2Connection.increment_flow_control_window>` with the
+:data:`flow_controlled_length <h2.events.DataReceived.flow_controlled_length>`
+of the received data. hyper-h2 requires that you manage both the connection
+and the stream flow control windows separately, so you may need to increment
+both the stream the data was received on and stream ``0``.
+
+When sending data, a HTTP/2 implementation must not send more than flow control
+window available for that stream. As noted above, the maximum amount of data
+that can be sent on the stream is the minimum of the stream and the connection
+flow control windows. You can find out how much data you can send on a given
+stream by using the :meth:`local_flow_control_window
+<h2.connection.H2Connection.local_flow_control_window>` method, which will do
+all of these calculations for you. If you attempt to send more than this amount
+of data on a stream, hyper-h2 will throw a :class:`ProtocolError
+<h2.exceptions.ProtocolError>` and refuse to send the data.
+
+In hyper-h2, receiving a ``WINDOW_UPDATE`` frame causes a :class:`WindowUpdated
+<h2.events.WindowUpdated>` event to fire. This will notify you that there is
+potentially more room in a flow control window. Note that, just because an
+increment of a given size was received *does not* mean that that much more data
+can be sent: remember that both the connection and stream flow control windows
+constrain how much data can be sent.
+
+As a result, when a :class:`WindowUpdated <h2.events.WindowUpdated>` event
+fires with a non-zero stream ID, and the user has more data to send on that
+stream, the user should call :meth:`local_flow_control_window
+<h2.connection.H2Connection.local_flow_control_window>` to check if there
+really is more room to send data on that stream.
+
+When a :class:`WindowUpdated <h2.events.WindowUpdated>` event fires with a
+stream ID of ``0``, that may have unblocked *all* streams that are currently
+blocked. The user should use :meth:`local_flow_control_window
+<h2.connection.H2Connection.local_flow_control_window>` to check all blocked
+streams to see if more data is available.
+
+Auto Flow Control
+~~~~~~~~~~~~~~~~~
+
+.. versionadded:: 2.5.0
+
+In most cases, there is no advantage for users in managing their own flow
+control strategies. While particular high performance or specific-use-case
+applications may gain value from directly controlling the emission of
+``WINDOW_UPDATE`` frames, the average application can use a
+lowest-common-denominator strategy to emit those frames. As of version 2.5.0,
+hyper-h2 now provides this automatic strategy for users, if they want to use
+it.
+
+This automatic strategy is built around a single method:
+:meth:`acknowledge_received_data
+<h2.connection.H2Connection.acknowledge_received_data>`. This method
+flags to the connection object that your application has dealt with a certain
+number of flow controlled bytes, and that the window should be incremented in
+some way. Whenever your application has "processed" some received bytes, this
+method should be called to signal that they have been processed.
+
+The key difference between this method and :meth:`increment_flow_control_window
+<h2.connection.H2Connection.increment_flow_control_window>` is that the method
+:meth:`acknowledge_received_data
+<h2.connection.H2Connection.acknowledge_received_data>` does not guarantee that
+it will emit a ``WINDOW_UPDATE`` frame, and if it does it will not necessarily
+emit them for *only* the stream or *only* the frame. Instead, the
+``WINDOW_UPDATE`` frames will be *coalesced*: they will be emitted only when
+a certain number of bytes have been freed up.
+
+For most applications, this method should be preferred to the manual flow
+control mechanism.
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/api.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/api.rst
new file mode 100644
index 0000000000..a46f8cce7d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/api.rst
@@ -0,0 +1,169 @@
+Hyper-h2 API
+============
+
+This document details the API of Hyper-h2.
+
+Semantic Versioning
+-------------------
+
+Hyper-h2 follows semantic versioning for its public API. Please note that the
+guarantees of semantic versioning apply only to the API that is *documented
+here*. Simply because a method or data field is not prefaced by an underscore
+does not make it part of Hyper-h2's public API. Anything not documented here is
+subject to change at any time.
+
+Connection
+----------
+
+.. autoclass:: h2.connection.H2Connection
+ :members:
+ :exclude-members: inbound_flow_control_window
+
+
+Configuration
+-------------
+
+.. autoclass:: h2.config.H2Configuration
+ :members:
+
+
+.. _h2-events-api:
+
+Events
+------
+
+.. autoclass:: h2.events.RequestReceived
+ :members:
+
+.. autoclass:: h2.events.ResponseReceived
+ :members:
+
+.. autoclass:: h2.events.TrailersReceived
+ :members:
+
+.. autoclass:: h2.events.InformationalResponseReceived
+ :members:
+
+.. autoclass:: h2.events.DataReceived
+ :members:
+
+.. autoclass:: h2.events.WindowUpdated
+ :members:
+
+.. autoclass:: h2.events.RemoteSettingsChanged
+ :members:
+
+.. autoclass:: h2.events.PingReceived
+ :members:
+
+.. autoclass:: h2.events.PingAckReceived
+ :members:
+
+.. autoclass:: h2.events.StreamEnded
+ :members:
+
+.. autoclass:: h2.events.StreamReset
+ :members:
+
+.. autoclass:: h2.events.PushedStreamReceived
+ :members:
+
+.. autoclass:: h2.events.SettingsAcknowledged
+ :members:
+
+.. autoclass:: h2.events.PriorityUpdated
+ :members:
+
+.. autoclass:: h2.events.ConnectionTerminated
+ :members:
+
+.. autoclass:: h2.events.AlternativeServiceAvailable
+ :members:
+
+.. autoclass:: h2.events.UnknownFrameReceived
+ :members:
+
+
+Exceptions
+----------
+
+.. autoclass:: h2.exceptions.H2Error
+ :members:
+
+.. autoclass:: h2.exceptions.NoSuchStreamError
+ :show-inheritance:
+ :members:
+
+.. autoclass:: h2.exceptions.StreamClosedError
+ :show-inheritance:
+ :members:
+
+.. autoclass:: h2.exceptions.RFC1122Error
+ :show-inheritance:
+ :members:
+
+
+Protocol Errors
+~~~~~~~~~~~~~~~
+
+.. autoclass:: h2.exceptions.ProtocolError
+ :show-inheritance:
+ :members:
+
+.. autoclass:: h2.exceptions.FrameTooLargeError
+ :show-inheritance:
+ :members:
+
+.. autoclass:: h2.exceptions.FrameDataMissingError
+ :show-inheritance:
+ :members:
+
+.. autoclass:: h2.exceptions.TooManyStreamsError
+ :show-inheritance:
+ :members:
+
+.. autoclass:: h2.exceptions.FlowControlError
+ :show-inheritance:
+ :members:
+
+.. autoclass:: h2.exceptions.StreamIDTooLowError
+ :show-inheritance:
+ :members:
+
+.. autoclass:: h2.exceptions.InvalidSettingsValueError
+ :members:
+
+.. autoclass:: h2.exceptions.NoAvailableStreamIDError
+ :show-inheritance:
+ :members:
+
+.. autoclass:: h2.exceptions.InvalidBodyLengthError
+ :show-inheritance:
+ :members:
+
+.. autoclass:: h2.exceptions.UnsupportedFrameError
+ :members:
+
+.. autoclass:: h2.exceptions.DenialOfServiceError
+ :show-inheritance:
+ :members:
+
+
+HTTP/2 Error Codes
+------------------
+
+.. automodule:: h2.errors
+ :members:
+
+
+Settings
+--------
+
+.. autoclass:: h2.settings.SettingCodes
+ :members:
+
+.. autoclass:: h2.settings.Settings
+ :inherited-members:
+
+.. autoclass:: h2.settings.ChangedSetting
+ :members:
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/asyncio-example.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/asyncio-example.rst
new file mode 100644
index 0000000000..d3afbfd051
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/asyncio-example.rst
@@ -0,0 +1,17 @@
+Asyncio Example Server
+======================
+
+This example is a basic HTTP/2 server written using `asyncio`_, using some
+functionality that was introduced in Python 3.5. This server represents
+basically just the same JSON-headers-returning server that was built in the
+:doc:`basic-usage` document.
+
+This example demonstrates some basic asyncio techniques.
+
+.. literalinclude:: ../../examples/asyncio/asyncio-server.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+.. _asyncio: https://docs.python.org/3/library/asyncio.html
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/basic-usage.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/basic-usage.rst
new file mode 100644
index 0000000000..b9aab6c6cf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/basic-usage.rst
@@ -0,0 +1,746 @@
+Getting Started: Writing Your Own HTTP/2 Server
+===============================================
+
+This document explains how to get started writing fully-fledged HTTP/2
+implementations using Hyper-h2 as the underlying protocol stack. It covers the
+basic concepts you need to understand, and talks you through writing a very
+simple HTTP/2 server.
+
+This document assumes you're moderately familiar with writing Python, and have
+*some* understanding of how computer networks work. If you don't, you'll find
+it a lot easier if you get some understanding of those concepts first and then
+return to this documentation.
+
+
+.. _h2-connection-basic:
+
+Connections
+-----------
+
+Hyper-h2's core object is the
+:class:`H2Connection <h2.connection.H2Connection>` object. This object is an
+abstract representation of the state of a single HTTP/2 connection, and holds
+all the important protocol state. When using Hyper-h2, this object will be the
+first thing you create and the object that does most of the heavy lifting.
+
+The interface to this object is relatively simple. For sending data, you
+call the object with methods indicating what actions you want to perform: for
+example, you may want to send headers (you'd use the
+:meth:`send_headers <h2.connection.H2Connection.send_headers>` method), or
+send data (you'd use the
+:meth:`send_data <h2.connection.H2Connection.send_data>` method). After you've
+decided what actions you want to perform, you get some bytes out of the object
+that represent the HTTP/2-encoded representation of your actions, and send them
+out over the network however you see fit.
+
+When you receive data from the network, you pass that data in to the
+``H2Connection`` object, which returns a list of *events*.
+These events, covered in more detail later in :ref:`h2-events-basic`, define
+the set of actions the remote peer has performed on the connection, as
+represented by the HTTP/2-encoded data you just passed to the object.
+
+Thus, you end up with a simple loop (which you may recognise as a more-specific
+form of an `event loop`_):
+
+ 1. First, you perform some actions.
+ 2. You send the data created by performing those actions to the network.
+ 3. You read data from the network.
+ 4. You decode those into events.
+ 5. The events cause you to trigger some actions: go back to step 1.
+
+Of course, HTTP/2 is more complex than that, but in the very simplest case you
+can write a fairly effective HTTP/2 tool using just that kind of loop. Later in
+this document, we'll do just that.
+
+Some important subtleties of ``H2Connection`` objects are covered in
+:doc:`advanced-usage`: see :ref:`h2-connection-advanced` for more information.
+However, one subtlety should be covered, and that is this: Hyper-h2's
+``H2Connection`` object doesn't do I/O. Let's talk briefly about why.
+
+I/O
+~~~
+
+Any useful HTTP/2 tool eventually needs to do I/O. This is because it's not
+very useful to be able to speak to other computers using a protocol like HTTP/2
+unless you actually *speak* to them sometimes.
+
+However, doing I/O is not a trivial thing: there are lots of different ways to
+do it, and once you choose a way to do it your code usually won't work well
+with the approaches you *didn't* choose.
+
+While there are lots of different ways to do I/O, when it comes down to it
+all HTTP/2 implementations transform bytes received into events, and events
+into bytes to send. So there's no reason to have lots of different versions of
+this core protocol code: one for Twisted, one for gevent, one for threading,
+and one for synchronous code.
+
+This is why we said at the top that Hyper-h2 is a *HTTP/2 Protocol Stack*, not
+a *fully-fledged implementation*. Hyper-h2 knows how to transform bytes into
+events and back, but that's it. The I/O and smarts might be different, but
+the core HTTP/2 logic is the same: that's what Hyper-h2 provides.
+
+Not doing I/O makes Hyper-h2 general, and also relatively simple. It has an
+easy-to-understand performance envelope, it's easy to test (and as a result
+easy to get correct behaviour out of), and it behaves in a reproducible way.
+These are all great traits to have in a library that is doing something quite
+complex.
+
+This document will talk you through how to build a relatively simple HTTP/2
+implementation using Hyper-h2, to give you an understanding of where it fits in
+your software.
+
+
+.. _h2-events-basic:
+
+Events
+------
+
+When writing a HTTP/2 implementation it's important to know what the remote
+peer is doing: if you didn't care, writing networked programs would be a lot
+easier!
+
+Hyper-h2 encodes the actions of the remote peer in the form of *events*. When
+you receive data from the remote peer and pass it into your ``H2Connection``
+object (see :ref:`h2-connection-basic`), the ``H2Connection`` returns a list
+of objects, each one representing a single event that has occurred. Each
+event refers to a single action the remote peer has taken.
+
+Some events are fairly high-level, referring to things that are more general
+than HTTP/2: for example, the
+:class:`RequestReceived <h2.events.RequestReceived>` event is a general HTTP
+concept, not just a HTTP/2 one. Other events are extremely HTTP/2-specific:
+for example, :class:`PushedStreamReceived <h2.events.PushedStreamReceived>`
+refers to Server Push, a very HTTP/2-specific concept.
+
+The reason these events exist is that Hyper-h2 is intended to be very general.
+This means that, in many cases, Hyper-h2 does not know exactly what to do in
+response to an event. Your code will need to handle these events, and make
+decisions about what to do. That's the major role of any HTTP/2 implementation
+built on top of Hyper-h2.
+
+A full list of events is available in :ref:`h2-events-api`. For the purposes
+of this example, we will handle only a small set of events.
+
+
+Writing Your Server
+-------------------
+
+Armed with the knowledge you just obtained, we're going to write a very simple
+HTTP/2 web server. The goal of this server is to write a server that can handle
+a HTTP GET, and that returns the headers sent by the client, encoded in JSON.
+Basically, something a lot like `httpbin.org/get`_. Nothing fancy, but this is
+a good way to get a handle on how you should interact with Hyper-h2.
+
+For the sake of simplicity, we're going to write this using the Python standard
+library, in Python 3. In reality, you'll probably want to use an asynchronous
+framework of some kind: see the `examples directory`_ in the repository for
+some examples of how you'd do that.
+
+Before we start, create a new file called ``h2server.py``: we'll use that as
+our workspace. Additionally, you should install Hyper-h2: follow the
+instructions in :doc:`installation`.
+
+Step 1: Sockets
+~~~~~~~~~~~~~~~
+
+To begin with, we need to make sure we can listen for incoming data and send it
+back. To do that, we need to use the `standard library's socket module`_. For
+now we're going to skip doing TLS: if you want to reach your server from your
+web browser, though, you'll need to add TLS and some other function. Consider
+looking at our examples in our `examples directory`_ instead.
+
+Let's begin. First, open up ``h2server.py``. We need to import the socket
+module and start listening for connections.
+
+This is not a socket tutorial, so we're not going to dive too deeply into how
+this works. If you want more detail about sockets, there are lots of good
+tutorials on the web that you should investigate.
+
+When you want to listen for incoming connections, the you need to *bind* an
+address first. So let's do that. Try setting up your file to look like this:
+
+.. code-block:: python
+
+ import socket
+
+ sock = socket.socket()
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ sock.bind(('0.0.0.0', 8080))
+ sock.listen(5)
+
+ while True:
+ print(sock.accept())
+
+In a shell window, execute this program (``python h2server.py``). Then, open
+another shell and run ``curl http://localhost:8080/``. In the first shell, you
+should see something like this:
+
+.. code-block:: console
+
+ $ python h2server.py
+ (<socket.socket fd=4, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=0, laddr=('127.0.0.1', 8080), raddr=('127.0.0.1', 58800)>, ('127.0.0.1', 58800))
+
+Run that ``curl`` command a few more times. You should see a few more similar
+lines appear. Note that the ``curl`` command itself will exit with an error.
+That's fine: it happens because we didn't send any data.
+
+Now go ahead and stop the server running by hitting Ctrl+C in the first shell.
+You should see a ``KeyboardInterrupt`` error take the process down.
+
+What's the program above doing? Well, first it creates a
+:func:`socket <python:socket.socket>` object. This socket is then *bound* to
+a specific address: ``('0.0.0.0', 8080)``. This is a special address: it means
+that this socket should be listening for any traffic to TCP port 8080. Don't
+worry about the call to ``setsockopt``: it just makes sure you can run this
+program repeatedly.
+
+We then loop forever calling the :meth:`accept <python:socket.socket.accept>`
+method on the socket. The accept method blocks until someone attempts to
+connect to our TCP port: when they do, it returns a tuple: the first element is
+a new socket object, the second element is a tuple of the address the new
+connection is from. You can see this in the output from our ``h2server.py``
+script.
+
+At this point, we have a script that can accept inbound connections. This is a
+good start! Let's start getting HTTP/2 involved.
+
+
+Step 2: Add a H2Connection
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Now that we can listen for socket information, we want to prepare our HTTP/2
+connection object and start handing it data. For now, let's just see what
+happens as we feed it data.
+
+To make HTTP/2 connections, we need a tool that knows how to speak HTTP/2.
+Most versions of curl in the wild don't, so let's install a Python tool. In
+your Python environment, run ``pip install hyper``. This will install a Python
+command-line HTTP/2 tool called ``hyper``. To confirm that it works, try
+running this command and verifying that the output looks similar to the one
+shown below:
+
+.. code-block:: console
+
+ $ hyper GET https://nghttp2.org/httpbin/get
+ {'args': {},
+ 'headers': {'Host': 'nghttp2.org'},
+ 'origin': '10.0.0.2',
+ 'url': 'https://nghttp2.org/httpbin/get'}
+
+Assuming it works, you're now ready to start sending HTTP/2 data.
+
+Back in our ``h2server.py`` script, we're going to want to start handling data.
+Let's add a function that takes a socket returned from ``accept``, and reads
+data from it. Let's call that function ``handle``. That function should create
+a :class:`H2Connection <h2.connection.H2Connection>` object and then loop on
+the socket, reading data and passing it to the connection.
+
+To read data from a socket we need to call ``recv``. The ``recv`` function
+takes a number as its argument, which is the *maximum* amount of data to be
+returned from a single call (note that ``recv`` will return as soon as any data
+is available, even if that amount is vastly less than the number you passed to
+it). For the purposes of writing this kind of software the specific value is
+not enormously useful, but should not be overly large. For that reason, when
+you're unsure, a number like 4096 or 65535 is a good bet. We'll use 65535 for
+this example.
+
+The function should look something like this:
+
+.. code-block:: python
+
+ import h2.connection
+ import h2.config
+
+ def handle(sock):
+ config = h2.config.H2Configuration(client_side=False)
+ conn = h2.connection.H2Connection(config=config)
+
+ while True:
+ data = sock.recv(65535)
+ print(conn.receive_data(data))
+
+Let's update our main loop so that it passes data on to our new data handling
+function. Your ``h2server.py`` should end up looking a like this:
+
+.. code-block:: python
+
+ import socket
+
+ import h2.connection
+ import h2.config
+
+ def handle(sock):
+ config = h2.config.H2Configuration(client_side=False)
+ conn = h2.connection.H2Connection(config=config)
+
+ while True:
+ data = sock.recv(65535)
+ if not data:
+ break
+
+ print(conn.receive_data(data))
+
+
+ sock = socket.socket()
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ sock.bind(('0.0.0.0', 8080))
+ sock.listen(5)
+
+ while True:
+ handle(sock.accept()[0])
+
+Running that in one shell, in your other shell you can run
+``hyper --h2 GET http://localhost:8080/``. That shell should hang, and you
+should then see the following output from your ``h2server.py`` shell:
+
+.. code-block:: console
+
+ $ python h2server.py
+ [<h2.events.RemoteSettingsChanged object at 0x10c4ee390>]
+
+You'll then need to kill ``hyper`` and ``h2server.py`` with Ctrl+C. Feel free
+to do this a few times, to see how things behave.
+
+So, what did we see here? When the connection was opened, we used the
+:meth:`recv <python:socket.socket.recv>` method to read some data from the
+socket, in a loop. We then passed that data to the connection object, which
+returned us a single event object:
+:class:`RemoteSettingsChanged <h2.events.RemoteSettingsChanged>`.
+
+But what we didn't see was anything else. So it seems like all ``hyper`` did
+was change its settings, but nothing else. If you look at the other ``hyper``
+window, you'll notice that it hangs for a while and then eventually fails with
+a socket timeout. It was waiting for something: what?
+
+Well, it turns out that at the start of a connection, both sides need to send
+a bit of data, called "the HTTP/2 preamble". We don't need to get into too much
+detail here, but basically both sides need to send a single block of HTTP/2
+data that tells the other side what their settings are. ``hyper`` did that,
+but we didn't.
+
+Let's do that next.
+
+
+Step 3: Sending the Preamble
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Hyper-h2 makes doing connection setup really easy. All you need to do is call
+the
+:meth:`initiate_connection <h2.connection.H2Connection.initiate_connection>`
+method, and then send the corresponding data. Let's update our ``handle``
+function to do just that:
+
+.. code-block:: python
+
+ def handle(sock):
+ config = h2.config.H2Configuration(client_side=False)
+ conn = h2.connection.H2Connection(config=config)
+ conn.initiate_connection()
+ sock.sendall(conn.data_to_send())
+
+ while True:
+ data = sock.recv(65535)
+ print(conn.receive_data(data))
+
+
+The big change here is the call to ``initiate_connection``, but there's another
+new method in there:
+:meth:`data_to_send <h2.connection.H2Connection.data_to_send>`.
+
+When you make function calls on your ``H2Connection`` object, these will often
+want to cause HTTP/2 data to be written out to the network. But Hyper-h2
+doesn't do any I/O, so it can't do that itself. Instead, it writes it to an
+internal buffer. You can retrieve data from this buffer using the
+``data_to_send`` method. There are some subtleties about that method, but we
+don't need to worry about them right now: all we need to do is make sure we're
+sending whatever data is outstanding.
+
+Your ``h2server.py`` script should now look like this:
+
+.. code-block:: python
+
+ import socket
+
+ import h2.connection
+ import h2.config
+
+ def handle(sock):
+ config = h2.config.H2Configuration(client_side=False)
+ conn = h2.connection.H2Connection(config=config)
+ conn.initiate_connection()
+ sock.sendall(conn.data_to_send())
+
+ while True:
+ data = sock.recv(65535)
+ if not data:
+ break
+
+ print(conn.receive_data(data))
+
+
+ sock = socket.socket()
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ sock.bind(('0.0.0.0', 8080))
+ sock.listen(5)
+
+ while True:
+ handle(sock.accept()[0])
+
+
+With this change made, rerun your ``h2server.py`` script and hit it with the
+same ``hyper`` command: ``hyper --h2 GET http://localhost:8080/``. The
+``hyper`` command still hangs, but this time we get a bit more output from our
+``h2server.py`` script:
+
+.. code-block:: console
+
+ $ python h2server.py
+ [<h2.events.RemoteSettingsChanged object at 0x10292d390>]
+ [<h2.events.SettingsAcknowledged object at 0x102b3a160>]
+ [<h2.events.RequestReceived object at 0x102b3a3c8>, <h2.events.StreamEnded object at 0x102b3a400>]
+
+So, what's happening?
+
+The first thing to note is that we're going around our loop more than once now.
+First, we receive some data that triggers a
+:class:`RemoteSettingsChanged <h2.events.RemoteSettingsChanged>` event.
+Then, we get some more data that triggers a
+:class:`SettingsAcknowledged <h2.events.SettingsAcknowledged>` event.
+Finally, even more data that triggers *two* events:
+:class:`RequestReceived <h2.events.RequestReceived>` and
+:class:`StreamEnded <h2.events.StreamEnded>`.
+
+So, what's happening is that ``hyper`` is telling us about its settings,
+acknowledging ours, and then sending us a request. Then it ends a *stream*,
+which is a HTTP/2 communications channel that holds a request and response
+pair.
+
+A stream isn't done until it's either *reset* or both sides *close* it:
+in this sense it's bi-directional. So what the ``StreamEnded`` event tells us
+is that ``hyper`` is closing its half of the stream: it won't send us any more
+data on that stream. That means the request is done.
+
+So why is ``hyper`` hanging? Well, we haven't sent a response yet: let's do
+that.
+
+
+Step 4: Handling Events
+~~~~~~~~~~~~~~~~~~~~~~~
+
+What we want to do is send a response when we receive a request. Happily, we
+get an event when we receive a request, so we can use that to be our signal.
+
+Let's define a new function that sends a response. For now, this response can
+just be a little bit of data that prints "it works!".
+
+The function should take the ``H2Connection`` object, and the event that
+signaled the request. Let's define it.
+
+.. code-block:: python
+
+ def send_response(conn, event):
+ stream_id = event.stream_id
+ conn.send_headers(
+ stream_id=stream_id,
+ headers=[
+ (':status', '200'),
+ ('server', 'basic-h2-server/1.0')
+ ],
+ )
+ conn.send_data(
+ stream_id=stream_id,
+ data=b'it works!',
+ end_stream=True
+ )
+
+So while this is only a short function, there's quite a lot going on here we
+need to unpack. Firstly, what's a stream ID? Earlier we discussed streams
+briefly, to say that they're a bi-directional communications channel that holds
+a request and response pair. Part of what makes HTTP/2 great is that there can
+be lots of streams going on at once, sending and receiving different requests
+and responses. To identify each stream, we use a *stream ID*. These are unique
+across the lifetime of a connection, and they go in ascending order.
+
+Most ``H2Connection`` functions take a stream ID: they require you to actively
+tell the connection which one to use. In this case, as a simple server, we will
+never need to choose a stream ID ourselves: the client will always choose one
+for us. That means we'll always be able to get the one we need off the events
+that fire.
+
+Next, we send some *headers*. In HTTP/2, a response is made up of some set of
+headers, and optionally some data. The headers have to come first: if you're a
+client then you'll be sending *request* headers, but in our case these headers
+are our *response* headers.
+
+Mostly these aren't very exciting, but you'll notice once special header in
+there: ``:status``. This is a HTTP/2-specific header, and it's used to hold the
+HTTP status code that used to go at the top of a HTTP response. Here, we're
+saying the response is ``200 OK``, which is successful.
+
+To send headers in Hyper-h2, you use the
+:meth:`send_headers <h2.connection.H2Connection.send_headers>` function.
+
+Next, we want to send the body data. To do that, we use the
+:meth:`send_data <h2.connection.H2Connection.send_data>` function. This also
+takes a stream ID. Note that the data is binary: Hyper-h2 does not work with
+unicode strings, so you *must* pass bytestrings to the ``H2Connection``. The
+one exception is headers: Hyper-h2 will automatically encode those into UTF-8.
+
+The last thing to note is that on our call to ``send_data``, we set
+``end_stream`` to ``True``. This tells Hyper-h2 (and the remote peer) that
+we're done with sending data: the response is over. Because we know that
+``hyper`` will have ended its side of the stream, when we end ours the stream
+will be totally done with.
+
+We're nearly ready to go with this: we just need to plumb this function in.
+Let's amend our ``handle`` function again:
+
+.. code-block:: python
+
+ import h2.events
+ import h2.config
+
+ def handle(sock):
+ config = h2.config.H2Configuration(client_side=False)
+ conn = h2.connection.H2Connection(config=config)
+ conn.initiate_connection()
+ sock.sendall(conn.data_to_send())
+
+ while True:
+ data = sock.recv(65535)
+ if not data:
+ break
+
+ events = conn.receive_data(data)
+ for event in events:
+ if isinstance(event, h2.events.RequestReceived):
+ send_response(conn, event)
+
+ data_to_send = conn.data_to_send()
+ if data_to_send:
+ sock.sendall(data_to_send)
+
+The changes here are all at the end. Now, when we receive some events, we
+look through them for the ``RequestReceived`` event. If we find it, we make
+sure we send a response.
+
+Then, at the bottom of the loop we check whether we have any data to send, and
+if we do, we send it. Then, we repeat again.
+
+With these changes, your ``h2server.py`` file should look like this:
+
+.. code-block:: python
+
+ import socket
+
+ import h2.connection
+ import h2.events
+ import h2.config
+
+ def send_response(conn, event):
+ stream_id = event.stream_id
+ conn.send_headers(
+ stream_id=stream_id,
+ headers=[
+ (':status', '200'),
+ ('server', 'basic-h2-server/1.0')
+ ],
+ )
+ conn.send_data(
+ stream_id=stream_id,
+ data=b'it works!',
+ end_stream=True
+ )
+
+ def handle(sock):
+ config = h2.config.H2Configuration(client_side=False)
+ conn = h2.connection.H2Connection(config=config)
+ conn.initiate_connection()
+ sock.sendall(conn.data_to_send())
+
+ while True:
+ data = sock.recv(65535)
+ if not data:
+ break
+
+ events = conn.receive_data(data)
+ for event in events:
+ if isinstance(event, h2.events.RequestReceived):
+ send_response(conn, event)
+
+ data_to_send = conn.data_to_send()
+ if data_to_send:
+ sock.sendall(data_to_send)
+
+
+ sock = socket.socket()
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ sock.bind(('0.0.0.0', 8080))
+ sock.listen(5)
+
+ while True:
+ handle(sock.accept()[0])
+
+Alright. Let's run this, and then run our ``hyper`` command again.
+
+This time, nothing is printed from our server, and the ``hyper`` side prints
+``it works!``. Success! Try running it a few more times, and we can see that
+not only does it work the first time, it works the other times too!
+
+We can speak HTTP/2! Let's add the final step: returning the JSON-encoded
+request headers.
+
+Step 5: Returning Headers
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If we want to return the request headers in JSON, the first thing we have to do
+is find them. Handily, if you check the documentation for
+:class:`RequestReceived <h2.events.RequestReceived>` you'll find that this
+event carries, in addition to the stream ID, the request headers.
+
+This means we can make a really simple change to our ``send_response``
+function to take those headers and encode them as a JSON object. Let's do that:
+
+.. code-block:: python
+
+ import json
+
+ def send_response(conn, event):
+ stream_id = event.stream_id
+ response_data = json.dumps(dict(event.headers)).encode('utf-8')
+
+ conn.send_headers(
+ stream_id=stream_id,
+ headers=[
+ (':status', '200'),
+ ('server', 'basic-h2-server/1.0'),
+ ('content-length', str(len(response_data))),
+ ('content-type', 'application/json'),
+ ],
+ )
+ conn.send_data(
+ stream_id=stream_id,
+ data=response_data,
+ end_stream=True
+ )
+
+This is a really simple change, but it's all we need to do: a few extra headers
+and the JSON dump, but that's it.
+
+Section 6: Bringing It All Together
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This should be all we need!
+
+Let's take all the work we just did and throw that into our ``h2server.py``
+file, which should now look like this:
+
+.. code-block:: python
+
+ import json
+ import socket
+
+ import h2.connection
+ import h2.events
+ import h2.config
+
+ def send_response(conn, event):
+ stream_id = event.stream_id
+ response_data = json.dumps(dict(event.headers)).encode('utf-8')
+
+ conn.send_headers(
+ stream_id=stream_id,
+ headers=[
+ (':status', '200'),
+ ('server', 'basic-h2-server/1.0'),
+ ('content-length', str(len(response_data))),
+ ('content-type', 'application/json'),
+ ],
+ )
+ conn.send_data(
+ stream_id=stream_id,
+ data=response_data,
+ end_stream=True
+ )
+
+ def handle(sock):
+ config = h2.config.H2Configuration(client_side=False)
+ conn = h2.connection.H2Connection(config=config)
+ conn.initiate_connection()
+ sock.sendall(conn.data_to_send())
+
+ while True:
+ data = sock.recv(65535)
+ if not data:
+ break
+
+ events = conn.receive_data(data)
+ for event in events:
+ if isinstance(event, h2.events.RequestReceived):
+ send_response(conn, event)
+
+ data_to_send = conn.data_to_send()
+ if data_to_send:
+ sock.sendall(data_to_send)
+
+
+ sock = socket.socket()
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ sock.bind(('0.0.0.0', 8080))
+ sock.listen(5)
+
+ while True:
+ handle(sock.accept()[0])
+
+Now, execute ``h2server.py`` and then point ``hyper`` at it again. You should
+see something like the following output from ``hyper``:
+
+.. code-block:: console
+
+ $ hyper --h2 GET http://localhost:8080/
+ {":scheme": "http", ":authority": "localhost", ":method": "GET", ":path": "/"}
+
+Here you can see the HTTP/2 request 'special headers' that ``hyper`` sends.
+These are similar to the ``:status`` header we have to send on our response:
+they encode important parts of the HTTP request in a clearly-defined way. If
+you were writing a client stack using Hyper-h2, you'd need to make sure you
+were sending those headers.
+
+Congratulations!
+~~~~~~~~~~~~~~~~
+
+Congratulations! You've written your first HTTP/2 server! If you want to extend
+it, there are a few directions you could investigate:
+
+- We didn't handle a few events that we saw were being raised: you could add
+ some methods to handle those appropriately.
+- Right now our server is single threaded, so it can only handle one client at
+ a time. Consider rewriting this server to use threads, or writing this
+ server again using your favourite asynchronous programming framework.
+
+ If you plan to use threads, you should know that a ``H2Connection`` object is
+ deliberately not thread-safe. As a possible design pattern, consider creating
+ threads and passing the sockets returned by ``accept`` to those threads, and
+ then letting those threads create their own ``H2Connection`` objects.
+- Take a look at some of our long-form code examples in :doc:`examples`.
+- Alternatively, try playing around with our examples in our repository's
+ `examples directory`_. These examples are a bit more fully-featured, and can
+ be reached from your web browser. Try adjusting what they do, or adding new
+ features to them!
+- You may want to make this server reachable from your web browser. To do that,
+ you'll need to add proper TLS support to your server. This can be tricky, and
+ in many cases requires `PyOpenSSL`_ in addition to the other libraries you
+ have installed. Check the `Eventlet example`_ to see what PyOpenSSL code is
+ required to TLS-ify your server.
+
+
+
+.. _event loop: https://en.wikipedia.org/wiki/Event_loop
+.. _httpbin.org/get: https://httpbin.org/get
+.. _examples directory: https://github.com/python-hyper/hyper-h2/tree/master/examples
+.. _standard library's socket module: https://docs.python.org/3.5/library/socket.html
+.. _Application Layer Protocol Negotiation: https://en.wikipedia.org/wiki/Application-Layer_Protocol_Negotiation
+.. _get your certificate here: https://raw.githubusercontent.com/python-hyper/hyper-h2/master/examples/twisted/server.crt
+.. _get your private key here: https://raw.githubusercontent.com/python-hyper/hyper-h2/master/examples/twisted/server.key
+.. _PyOpenSSL: http://pyopenssl.readthedocs.org/
+.. _Eventlet example: https://github.com/python-hyper/hyper-h2/blob/master/examples/eventlet/eventlet-server.py
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/conf.py b/testing/web-platform/tests/tools/third_party/h2/docs/source/conf.py
new file mode 100644
index 0000000000..a15a214634
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/conf.py
@@ -0,0 +1,270 @@
+# -*- coding: utf-8 -*-
+#
+# hyper-h2 documentation build configuration file, created by
+# sphinx-quickstart on Thu Sep 17 10:06:02 2015.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('../..'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.viewcode',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'hyper-h2'
+copyright = u'2015, Cory Benfield'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '3.2.0'
+# The full version, including alpha/beta/rc tags.
+release = '3.2.0'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'hyper-h2doc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ ('index', 'hyper-h2.tex', u'hyper-h2 Documentation',
+ u'Cory Benfield', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'hyper-h2', u'hyper-h2 Documentation',
+ [u'Cory Benfield'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'hyper-h2', u'hyper-h2 Documentation',
+ u'Cory Benfield', 'hyper-h2', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+ 'python': ('https://docs.python.org/3.5/', None),
+ 'hpack': ('https://python-hyper.org/hpack/en/stable/', None),
+ 'pyopenssl': ('https://pyopenssl.readthedocs.org/en/latest/', None),
+}
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/contributors.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/contributors.rst
new file mode 100644
index 0000000000..d84c791f6d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/contributors.rst
@@ -0,0 +1,4 @@
+Contributors
+============
+
+.. include:: ../../CONTRIBUTORS.rst
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/curio-example.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/curio-example.rst
new file mode 100644
index 0000000000..7cdb61608a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/curio-example.rst
@@ -0,0 +1,17 @@
+Curio Example Server
+====================
+
+This example is a basic HTTP/2 server written using `curio`_, David Beazley's
+example of how to build a concurrent networking framework using Python 3.5's
+new ``async``/``await`` syntax.
+
+This example is notable for demonstrating the correct use of HTTP/2 flow
+control with Hyper-h2. It is also a good example of the brand new syntax.
+
+.. literalinclude:: ../../examples/curio/curio-server.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+.. _curio: https://curio.readthedocs.org/en/latest/
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/eventlet-example.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/eventlet-example.rst
new file mode 100644
index 0000000000..a23b5e248f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/eventlet-example.rst
@@ -0,0 +1,19 @@
+Eventlet Example Server
+=======================
+
+This example is a basic HTTP/2 server written using the `eventlet`_ concurrent
+networking framework. This example is notable for demonstrating how to
+configure `PyOpenSSL`_, which `eventlet`_ uses for its TLS layer.
+
+In terms of HTTP/2 functionality, this example is very simple: it returns the
+request headers as a JSON document to the caller. It does not obey HTTP/2 flow
+control, which is a flaw, but it is otherwise functional.
+
+.. literalinclude:: ../../examples/eventlet/eventlet-server.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+.. _eventlet: http://eventlet.net/
+.. _PyOpenSSL: https://pyopenssl.readthedocs.org/en/stable/
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/examples.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/examples.rst
new file mode 100644
index 0000000000..ed7c5037bb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/examples.rst
@@ -0,0 +1,28 @@
+Code Examples
+=============
+
+This section of the documentation contains long-form code examples. These are
+intended as references for developers that would like to get an understanding
+of how Hyper-h2 fits in with various Python I/O frameworks.
+
+Example Servers
+---------------
+
+.. toctree::
+ :maxdepth: 2
+
+ asyncio-example
+ twisted-example
+ eventlet-example
+ curio-example
+ tornado-example
+ wsgi-example
+
+Example Clients
+---------------
+
+.. toctree::
+ :maxdepth: 2
+
+ twisted-head-example
+ twisted-post-example
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/index.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/index.rst
new file mode 100644
index 0000000000..be85dec7c3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/index.rst
@@ -0,0 +1,41 @@
+.. hyper-h2 documentation master file, created by
+ sphinx-quickstart on Thu Sep 17 10:06:02 2015.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Hyper-h2: A pure-Python HTTP/2 protocol stack
+=============================================
+
+Hyper-h2 is a HTTP/2 protocol stack, written entirely in Python. The goal of
+Hyper-h2 is to be a common HTTP/2 stack for the Python ecosystem,
+usable in all programs regardless of concurrency model or environment.
+
+To achieve this, Hyper-h2 is entirely self-contained: it does no I/O of any
+kind, leaving that up to a wrapper library to control. This ensures that it can
+seamlessly work in all kinds of environments, from single-threaded code to
+Twisted.
+
+Its goal is to be 100% compatible with RFC 7540, implementing a complete HTTP/2
+protocol stack build on a set of finite state machines. Its secondary goals are
+to be fast, clear, and efficient.
+
+For usage examples, see :doc:`basic-usage` or consult the examples in the
+repository.
+
+Contents
+--------
+
+.. toctree::
+ :maxdepth: 2
+
+ installation
+ basic-usage
+ negotiating-http2
+ examples
+ advanced-usage
+ low-level
+ api
+ testimonials
+ release-process
+ release-notes
+ contributors \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/installation.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/installation.rst
new file mode 100644
index 0000000000..683085f97b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/installation.rst
@@ -0,0 +1,18 @@
+Installation
+============
+
+Hyper-h2 is a pure-python project. This means installing it is extremely
+simple. To get the latest release from PyPI, simply run:
+
+.. code-block:: console
+
+ $ pip install h2
+
+Alternatively, feel free to download one of the release tarballs from
+`our GitHub page`_, extract it to your favourite directory, and then run
+
+.. code-block:: console
+
+ $ python setup.py install
+
+.. _our GitHub page: https://github.com/python-hyper/hyper-h2
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/low-level.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/low-level.rst
new file mode 100644
index 0000000000..824ba8e6ea
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/low-level.rst
@@ -0,0 +1,159 @@
+Low-Level Details
+=================
+
+.. warning:: This section of the documentation covers low-level implementation
+ details of hyper-h2. This is most likely to be of use to hyper-h2
+ developers and to other HTTP/2 implementers, though it could well
+ be of general interest. Feel free to peruse it, but if you're
+ looking for information about how to *use* hyper-h2 you should
+ consider looking elsewhere.
+
+State Machines
+--------------
+
+hyper-h2 is fundamentally built on top of a pair of interacting Finite State
+Machines. One of these FSMs manages per-connection state, and another manages
+per-stream state. Almost without exception (see :ref:`priority` for more
+details) every single frame is unconditionally translated into events for
+both state machines and those state machines are turned.
+
+The advantages of a system such as this is that the finite state machines can
+very densely encode the kinds of things that are allowed at any particular
+moment in a HTTP/2 connection. However, most importantly, almost all protocols
+are defined *in terms* of finite state machines: that is, protocol descriptions
+can be reduced to a number of states and inputs. That makes FSMs a very natural
+tool for implementing protocol stacks.
+
+Indeed, most protocol implementations that do not explicitly encode a finite
+state machine almost always *implicitly* encode a finite state machine, by
+using classes with a bunch of variables that amount to state-tracking
+variables, or by using the call-stack as an implicit state tracking mechanism.
+While these methods are not immediately problematic, they tend to lack
+*explicitness*, and can lead to subtle bugs of the form "protocol action X is
+incorrectly allowed in state Y".
+
+For these reasons, we have implemented two *explicit* finite state machines.
+These machines aim to encode most of the protocol-specific state, in particular
+regarding what frame is allowed at what time. This target goal is sometimes not
+achieved: in particular, as of this writing the *stream* FSM contains a number
+of other state variables that really ought to be rolled into the state machine
+itself in the form of new states, or in the form of a transformation of the
+FSM to use state *vectors* instead of state *scalars*.
+
+The following sections contain some implementers notes on these FSMs.
+
+Connection State Machine
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+The "outer" state machine, the first one that is encountered when sending or
+receiving data, is the connection state machine. This state machine tracks
+whole-connection state.
+
+This state machine is primarily intended to forbid certain actions on the basis
+of whether the implementation is acting as a client or a server. For example,
+clients are not permitted to send ``PUSH_PROMISE`` frames: this state machine
+forbids that by refusing to define a valid transition from the ``CLIENT_OPEN``
+state for the ``SEND_PUSH_PROMISE`` event.
+
+Otherwise, this particular state machine triggers no side-effects. It has a
+very coarse, high-level, functionality.
+
+A visual representation of this FSM is shown below:
+
+.. image:: _static/h2.connection.H2ConnectionStateMachine.dot.png
+ :alt: A visual representation of the connection FSM.
+ :target: _static/h2.connection.H2ConnectionStateMachine.dot.png
+
+
+.. _stream-state-machine:
+
+Stream State Machine
+~~~~~~~~~~~~~~~~~~~~
+
+Once the connection state machine has been spun, any frame that belongs to a
+stream is passed to the stream state machine for its given stream. Each stream
+has its own instance of the state machine, but all of them share the transition
+table: this is because the table itself is sufficiently large that having it be
+per-instance would be a ridiculous memory overhead.
+
+Unlike the connection state machine, the stream state machine is quite complex.
+This is because it frequently needs to encode some side-effects. The most
+common side-effect is emitting a ``RST_STREAM`` frame when an error is
+encountered: the need to do this means that far more transitions need to be
+encoded than for the connection state machine.
+
+Many of the side-effect functions in this state machine also raise
+:class:`ProtocolError <h2.exceptions.ProtocolError>` exceptions. This is almost
+always done on the basis of an extra state variable, which is an annoying code
+smell: it should always be possible for the state machine itself to police
+these using explicit state management. A future refactor will hopefully address
+this problem by making these additional state variables part of the state
+definitions in the FSM, which will lead to an expansion of the number of states
+but a greater degree of simplicity in understanding and tracking what is going
+on in the state machine.
+
+The other action taken by the side-effect functions defined here is returning
+:ref:`events <h2-events-basic>`. Most of these events are returned directly to
+the user, and reflect the specific state transition that has taken place, but
+some of the events are purely *internal*: they are used to signal to other
+parts of the hyper-h2 codebase what action has been taken.
+
+The major use of the internal events functionality at this time is for
+validating header blocks: there are different rules for request headers than
+there are for response headers, and different rules again for trailers. The
+internal events are used to determine *exactly what* kind of data the user is
+attempting to send, and using that information to do the correct kind of
+validation. This approach ensures that the final source of truth about what's
+happening at the protocol level lives inside the FSM, which is an extremely
+important design principle we want to continue to enshrine in hyper-h2.
+
+A visual representation of this FSM is shown below:
+
+.. image:: _static/h2.stream.H2StreamStateMachine.dot.png
+ :alt: A visual representation of the stream FSM.
+ :target: _static/h2.stream.H2StreamStateMachine.dot.png
+
+
+.. _priority:
+
+Priority
+~~~~~~~~
+
+In the :ref:`stream-state-machine` section we said that any frame that belongs
+to a stream is passed to the stream state machine. This turns out to be not
+quite true.
+
+Specifically, while ``PRIORITY`` frames are technically sent on a given stream
+(that is, `RFC 7540 Section 6.3`_ defines them as "always identifying a stream"
+and forbids the use of stream ID ``0`` for them), in practice they are almost
+completely exempt from the usual stream FSM behaviour. Specifically, the RFC
+has this to say:
+
+ The ``PRIORITY`` frame can be sent on a stream in any state, though it
+ cannot be sent between consecutive frames that comprise a single
+ header block (Section 4.3).
+
+Given that the consecutive header block requirement is handled outside of the
+FSMs, this section of the RFC essentially means that there is *never* a
+situation where it is invalid to receive a ``PRIORITY`` frame. This means that
+including it in the stream FSM would require that we allow ``SEND_PRIORITY``
+and ``RECV_PRIORITY`` in all states.
+
+This is not a totally onerous task: however, another key note is that hyper-h2
+uses the *absence* of a stream state machine to flag a closed stream. This is
+primarily for memory conservation reasons: if we needed to keep around an FSM
+for every stream we've ever seen, that would cause long-lived HTTP/2
+connections to consume increasingly large amounts of memory. On top of this,
+it would require us to create a stream FSM each time we received a ``PRIORITY``
+frame for a given stream, giving a malicious peer an easy route to force a
+hyper-h2 user to allocate nearly unbounded amounts of memory.
+
+For this reason, hyper-h2 circumvents the stream FSM entirely for ``PRIORITY``
+frames. Instead, these frames are treated as being connection-level frames that
+*just happen* to identify a specific stream. They do not bring streams into
+being, or in any sense interact with hyper-h2's view of streams. Their stream
+details are treated as strictly metadata that hyper-h2 is not interested in
+beyond being able to parse it out.
+
+
+.. _RFC 7540 Section 6.3: https://tools.ietf.org/html/rfc7540#section-6.3
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/negotiating-http2.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/negotiating-http2.rst
new file mode 100644
index 0000000000..20d58a71f1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/negotiating-http2.rst
@@ -0,0 +1,103 @@
+Negotiating HTTP/2
+==================
+
+`RFC 7540`_ specifies three methods of negotiating HTTP/2 connections. This document outlines how to use Hyper-h2 with each one.
+
+.. _starting-alpn:
+
+HTTPS URLs (ALPN)
+-------------------------
+
+Starting HTTP/2 for HTTPS URLs is outlined in `RFC 7540 Section 3.3`_. In this case, the client and server use a TLS extension to negotiate HTTP/2: `ALPN`_. How to use ALPN is currently not covered in this document: please consult the documentation for either the :mod:`ssl module <python:ssl>` in the standard library, or the :mod:`PyOpenSSL <pyopenssl:OpenSSL.SSL>` third-party modules, for more on this topic.
+
+This method is the simplest to use once the TLS connection is established. To use it with Hyper-h2, after you've established the connection and confirmed that HTTP/2 has been negotiated with `ALPN`_, create a :class:`H2Connection <h2.connection.H2Connection>` object and call :meth:`H2Connection.initiate_connection <h2.connection.H2Connection.initiate_connection>`. This will ensure that the appropriate preamble data is placed in the data buffer. You should then immediately send the data returned by :meth:`H2Connection.data_to_send <h2.connection.H2Connection.data_to_send>` on your TLS connection.
+
+At this point, you're free to use all the HTTP/2 functionality provided by Hyper-h2.
+
+.. note::
+ Although Hyper-h2 is not concerned with negotiating protocol versions, it is important to note that support for `ALPN`_ is not available in the standard library of Python versions < 2.7.9.
+ As a consequence, clients may encounter various errors due to protocol versions mismatch.
+
+Server Setup Example
+~~~~~~~~~~~~~~~~~~~~
+
+This example uses the APIs as defined in Python 3.5. If you are using an older version of Python you may not have access to the APIs used here. As noted above, please consult the documentation for the :mod:`ssl module <python:ssl>` to confirm.
+
+.. literalinclude:: ../../examples/fragments/server_https_setup_fragment.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+Client Setup Example
+~~~~~~~~~~~~~~~~~~~~
+
+The client example is very similar to the server example above. The :class:`SSLContext <python:ssl.SSLContext>` object requires some minor changes, as does the :class:`H2Connection <h2.connection.H2Connection>`, but the bulk of the code is the same.
+
+.. literalinclude:: ../../examples/fragments/client_https_setup_fragment.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+.. _starting-upgrade:
+
+HTTP URLs (Upgrade)
+-------------------
+
+Starting HTTP/2 for HTTP URLs is outlined in `RFC 7540 Section 3.2`_. In this case, the client and server use the HTTP Upgrade mechanism originally described in `RFC 7230 Section 6.7`_. The client sends its initial HTTP/1.1 request with two extra headers. The first is ``Upgrade: h2c``, which requests upgrade to cleartext HTTP/2. The second is a ``HTTP2-Settings`` header, which contains a specially formatted string that encodes a HTTP/2 Settings frame.
+
+To do this with Hyper-h2 you have two slightly different flows: one for clients, one for servers.
+
+Clients
+~~~~~~~
+
+For a client, when sending the first request you should manually add your ``Upgrade`` header. You should then create a :class:`H2Connection <h2.connection.H2Connection>` object and call :meth:`H2Connection.initiate_upgrade_connection <h2.connection.H2Connection.initiate_upgrade_connection>` with no arguments. This method will return a bytestring to use as the value of your ``HTTP2-Settings`` header.
+
+If the server returns a ``101`` status code, it has accepted the upgrade, and you should immediately send the data returned by :meth:`H2Connection.data_to_send <h2.connection.H2Connection.data_to_send>`. Now you should consume the entire ``101`` header block. All data after the ``101`` header block is HTTP/2 data that should be fed directly to :meth:`H2Connection.receive_data <h2.connection.H2Connection.receive_data>` and handled as normal with Hyper-h2.
+
+If the server does not return a ``101`` status code then it is not upgrading. Continue with HTTP/1.1 as normal: you may throw away your :class:`H2Connection <h2.connection.H2Connection>` object, as it is of no further use.
+
+The server will respond to your original request in HTTP/2. Please pay attention to the events received from Hyper-h2, as they will define the server's response.
+
+Client Example
+^^^^^^^^^^^^^^
+
+The code below demonstrates how to handle a plaintext upgrade from the perspective of the client. For the purposes of keeping the example code as simple and generic as possible it uses the synchronous socket API that comes with the Python standard library: if you want to use asynchronous I/O, you will need to translate this code to the appropriate idiom.
+
+.. literalinclude:: ../../examples/fragments/client_upgrade_fragment.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+Servers
+~~~~~~~
+
+If the first request you receive on a connection from the client contains an ``Upgrade`` header with the ``h2c`` token in it, and you're willing to upgrade, you should create a :class:`H2Connection <h2.connection.H2Connection>` object and call :meth:`H2Connection.initiate_upgrade_connection <h2.connection.H2Connection.initiate_upgrade_connection>` with the value of the ``HTTP2-Settings`` header (as a bytestring) as the only argument.
+
+Then, you should send back a ``101`` response that contains ``h2c`` in the ``Upgrade`` header. That response will inform the client that you're switching to HTTP/2. Then, you should immediately send the data that is returned to you by :meth:`H2Connection.data_to_send <h2.connection.H2Connection.data_to_send>` on the connection: this is a necessary part of the HTTP/2 upgrade process.
+
+At this point, you may now respond to the original HTTP/1.1 request in HTTP/2 by calling the appropriate methods on the :class:`H2Connection <h2.connection.H2Connection>` object. No further HTTP/1.1 may be sent on this connection: from this point onward, all data sent by you and the client will be HTTP/2 data.
+
+Server Example
+^^^^^^^^^^^^^^
+
+The code below demonstrates how to handle a plaintext upgrade from the perspective of the server. For the purposes of keeping the example code as simple and generic as possible it uses the synchronous socket API that comes with the Python standard library: if you want to use asynchronous I/O, you will need to translate this code to the appropriate idiom.
+
+.. literalinclude:: ../../examples/fragments/server_upgrade_fragment.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+Prior Knowledge
+---------------
+
+It's possible that you as a client know that a particular server supports HTTP/2, and that you do not need to perform any of the negotiations described above. In that case, you may follow the steps in :ref:`starting-alpn`, ignoring all references to ALPN: there's no need to perform the upgrade dance described in :ref:`starting-upgrade`.
+
+.. _RFC 7540: https://tools.ietf.org/html/rfc7540
+.. _RFC 7540 Section 3.2: https://tools.ietf.org/html/rfc7540#section-3.2
+.. _RFC 7540 Section 3.3: https://tools.ietf.org/html/rfc7540#section-3.3
+.. _ALPN: https://en.wikipedia.org/wiki/Application-Layer_Protocol_Negotiation
+.. _RFC 7230 Section 6.7: https://tools.ietf.org/html/rfc7230#section-6.7
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/release-notes.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/release-notes.rst
new file mode 100644
index 0000000000..fa425f1fef
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/release-notes.rst
@@ -0,0 +1,101 @@
+Release Notes
+=============
+
+This document contains release notes for Hyper-h2. In addition to the
+:ref:`detailed-release-notes` found at the bottom of this document, this
+document also includes a high-level prose overview of each major release after
+1.0.0.
+
+High Level Notes
+----------------
+
+3.0.0: 24 March 2017
+~~~~~~~~~~~~~~~~~~~~
+
+The Hyper-h2 team and the Hyper project are delighted to announce the release
+of Hyper-h2 version 3.0.0! Unlike the really notable 2.0.0 release, this
+release is proportionally quite small: however, it has the effect of removing a
+lot of cruft and complexity that has built up in the codebase over the lifetime
+of the v2 release series.
+
+This release was motivated primarily by discovering that applications that
+attempted to use both HTTP/1.1 and HTTP/2 using hyper-h2 would encounter
+problems with cookies, because hyper-h2 did not join together cookie headers as
+required by RFC 7540. Normally adding such behaviour would be a non-breaking
+change, but we previously had no flags to prevent normalization of received
+HTTP headers.
+
+Because it makes no sense for the cookie to be split *by default*, we needed to
+add a controlling flag and set it to true. The breaking nature of this change
+is very subtle, and it's possible most users would never notice, but
+nevertheless it *is* a breaking change and we need to treat it as such.
+
+Happily, we can take this opportunity to finalise a bunch of deprecations we'd
+made over the past year. The v2 release series was long-lived and successful,
+having had a series of releases across the past year-and-a-bit, and the Hyper
+team are very proud of it. However, it's time to open a new chapter, and remove
+the deprecated code.
+
+The past year has been enormously productive for the Hyper team. A total of 30
+v2 releases were made, an enormous amount of work. A good number of people have
+made their first contribution in this time, more than I can thank reasonably
+without taking up an unreasonable amount of space in this document, so instead
+I invite you to check out `our awesome contributor list`_.
+
+We're looking forward to the next chapter in hyper-h2: it's been a fun ride so
+far, and we hope even more of you come along and join in the fun over the next
+year!
+
+.. _our awesome contributor list: https://github.com/python-hyper/hyper-h2/graphs/contributors
+
+
+2.0.0: 25 January 2016
+~~~~~~~~~~~~~~~~~~~~~~
+
+The Hyper-h2 team and the Hyper project are delighted to announce the release
+of Hyper-h2 version 2.0.0! This is an enormous release that contains a gigantic
+collection of new features and fixes, with the goal of making it easier than
+ever to use Hyper-h2 to build a compliant HTTP/2 server or client.
+
+An enormous chunk of this work has been focused on tighter enforcement of
+restrictions in RFC 7540, ensuring that we correctly police the actions of
+remote peers, and error appropriately when those peers violate the
+specification. Several of these constitute breaking changes, because data that
+was previously received and handled without obvious error now raises
+``ProtocolError`` exceptions and causes the connection to be terminated.
+
+Additionally, the public API was cleaned up and had several helper methods that
+had been inavertently exposed removed from the public API. The team wants to
+stress that while Hyper-h2 follows semantic versioning, the guarantees of
+semver apply only to the public API as documented in :doc:`api`. Reducing the
+surface area of these APIs makes it easier for us to continue to ensure that
+the guarantees of semver are respected on our public API.
+
+We also attempted to clear up some of the warts that had appeared in the API,
+and add features that are helpful for implementing HTTP/2 endpoints. For
+example, the :class:`H2Connection <h2.connection.H2Connection>` object now
+exposes a method for generating the next stream ID that your client or server
+can use to initiate a connection (:meth:`get_next_available_stream_id
+<h2.connection.H2Connection.get_next_available_stream_id>`). We also removed
+some needless return values that were guaranteed to return empty lists, which
+were an attempt to make a forward-looking guarantee that was entirely unneeded.
+
+Altogether, this has been an extremely productive period for Hyper-h2, and a
+lot of great work has been done by the community. To that end, we'd also like
+to extend a great thankyou to those contributors who made their first contribution
+to the project between release 1.0.0 and 2.0.0. Many thanks to:
+`Thomas Kriechbaumer`_, `Alex Chan`_, `Maximilian Hils`_, and `Glyph`_. For a
+full historical list of contributors, see :doc:`contributors`.
+
+We're looking forward to the next few months of Python HTTP/2 work, and hoping
+that you'll find lots of excellent HTTP/2 applications to build with Hyper-h2!
+
+
+.. _Thomas Kriechbaumer: https://github.com/Kriechi
+.. _Alex Chan: https://github.com/alexwlchan
+.. _Maximilian Hils: https://github.com/mhils
+.. _Glyph: https://github.com/glyph
+
+
+.. _detailed-release-notes:
+.. include:: ../../HISTORY.rst
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/release-process.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/release-process.rst
new file mode 100644
index 0000000000..e7b46064d5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/release-process.rst
@@ -0,0 +1,56 @@
+Release Process
+===============
+
+Because of Hyper-h2's place at the bottom of the dependency tree, it is
+extremely important that the project maintains a diligent release schedule.
+This document outlines our process for managing releases.
+
+Versioning
+----------
+
+Hyper-h2 follows `semantic versioning`_ of its public API when it comes to
+numbering releases. The public API of Hyper-h2 is strictly limited to the
+entities listed in the :doc:`api` documentation: anything not mentioned in that
+document is not considered part of the public API and is not covered by the
+versioning guarantees given by semantic versioning.
+
+Maintenance
+-----------
+
+Hyper-h2 has the notion of a "release series", given by a major and minor
+version number: for example, there is the 2.1 release series. When each minor
+release is made and a release series is born, a branch is made off the release
+tag: for example, for the 2.1 release series, the 2.1.X branch.
+
+All changes merged into the master branch will be evaluated for whether they
+can be considered 'bugfixes' only (that is, they do not affect the public API).
+If they can, they will also be cherry-picked back to all active maintenance
+branches that require the bugfix. If the bugfix is not necessary, because the
+branch in question is unaffected by that bug, the bugfix will not be
+backported.
+
+Supported Release Series'
+-------------------------
+
+The developers of Hyper-h2 commit to supporting the following release series:
+
+- The most recent, as identified by the first two numbers in the highest
+ version currently released.
+- The immediately prior release series.
+
+The only exception to this policy is that no release series earlier than the
+2.1 series will be supported. In this context, "supported" means that they will
+continue to receive bugfix releases.
+
+For releases other than the ones identified above, no support is guaranteed.
+The developers may *choose* to support such a release series, but they do not
+promise to.
+
+The exception here is for security vulnerabilities. If a security vulnerability
+is identified in an out-of-support release series, the developers will do their
+best to patch it and issue an emergency release. For more information, see
+`our security documentation`_.
+
+
+.. _semantic versioning: http://semver.org/
+.. _our security documentation: http://python-hyper.org/en/latest/security.html
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/testimonials.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/testimonials.rst
new file mode 100644
index 0000000000..ec32fb9572
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/testimonials.rst
@@ -0,0 +1,9 @@
+Testimonials
+============
+
+Glyph Lefkowitz
+~~~~~~~~~~~~~~~
+
+Frankly, Hyper-h2 is almost SURREAL in how well-factored and decoupled the implementation is from I/O. If libraries in the Python ecosystem looked like this generally, Twisted would be a much better platform than it is. (Frankly, most of Twisted's _own_ protocol implementations should aspire to such cleanliness.)
+
+(`Source <https://twistedmatrix.com/pipermail/twisted-python/2015-November/029894.html>`_)
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/tornado-example.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/tornado-example.rst
new file mode 100644
index 0000000000..c7a80713a1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/tornado-example.rst
@@ -0,0 +1,16 @@
+Tornado Example Server
+======================
+
+This example is a basic HTTP/2 server written using the `Tornado`_ asynchronous
+networking library.
+
+The server returns the request headers as a JSON document to the caller, just
+like the example from the :doc:`basic-usage` document.
+
+.. literalinclude:: ../../examples/tornado/tornado-server.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+.. _Tornado: http://www.tornadoweb.org/
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-example.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-example.rst
new file mode 100644
index 0000000000..10d111628b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-example.rst
@@ -0,0 +1,18 @@
+Twisted Example Server
+======================
+
+This example is a basic HTTP/2 server written for the `Twisted`_ asynchronous
+networking framework. This is a relatively fleshed out example, and in
+particular it makes sure to obey HTTP/2 flow control rules.
+
+This server differs from some of the other example servers by serving files,
+rather than simply sending JSON responses. This makes the example lengthier,
+but also brings it closer to a real-world use-case.
+
+.. literalinclude:: ../../examples/twisted/twisted-server.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+.. _Twisted: https://twistedmatrix.com/ \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-head-example.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-head-example.rst
new file mode 100644
index 0000000000..df93b144e7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-head-example.rst
@@ -0,0 +1,17 @@
+Twisted Example Client: Head Requests
+=====================================
+
+This example is a basic HTTP/2 client written for the `Twisted`_ asynchronous
+networking framework.
+
+This client is fairly simple: it makes a hard-coded HEAD request to
+nghttp2.org/httpbin/ and prints out the response data. Its purpose is to demonstrate
+how to write a very basic HTTP/2 client implementation.
+
+.. literalinclude:: ../../examples/twisted/head_request.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+.. _Twisted: https://twistedmatrix.com/
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-post-example.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-post-example.rst
new file mode 100644
index 0000000000..7e3aba41a3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/twisted-post-example.rst
@@ -0,0 +1,18 @@
+Twisted Example Client: Post Requests
+=====================================
+
+This example is a basic HTTP/2 client written for the `Twisted`_ asynchronous
+networking framework.
+
+This client is fairly simple: it makes a hard-coded POST request to
+nghttp2.org/httpbin/post and prints out the response data, sending a file that is provided
+on the command line or the script itself. Its purpose is to demonstrate how to
+write a HTTP/2 client implementation that handles flow control.
+
+.. literalinclude:: ../../examples/twisted/post_request.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+.. _Twisted: https://twistedmatrix.com/
diff --git a/testing/web-platform/tests/tools/third_party/h2/docs/source/wsgi-example.rst b/testing/web-platform/tests/tools/third_party/h2/docs/source/wsgi-example.rst
new file mode 100644
index 0000000000..82513899b2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/docs/source/wsgi-example.rst
@@ -0,0 +1,23 @@
+Example HTTP/2-only WSGI Server
+===============================
+
+This example is a more complex HTTP/2 server that acts as a WSGI server,
+passing data to an arbitrary WSGI application. This example is written using
+`asyncio`_. The server supports most of PEP-3333, and so could in principle be
+used as a production WSGI server: however, that's *not recommended* as certain
+shortcuts have been taken to ensure ease of implementation and understanding.
+
+The main advantages of this example are:
+
+1. It properly demonstrates HTTP/2 flow control management.
+2. It demonstrates how to plug hyper-h2 into a larger, more complex
+ application.
+
+
+.. literalinclude:: ../../examples/asyncio/wsgi-server.py
+ :language: python
+ :linenos:
+ :encoding: utf-8
+
+
+.. _asyncio: https://docs.python.org/3/library/asyncio.html
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/asyncio-server.py b/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/asyncio-server.py
new file mode 100644
index 0000000000..278774644b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/asyncio-server.py
@@ -0,0 +1,210 @@
+# -*- coding: utf-8 -*-
+"""
+asyncio-server.py
+~~~~~~~~~~~~~~~~~
+
+A fully-functional HTTP/2 server using asyncio. Requires Python 3.5+.
+
+This example demonstrates handling requests with bodies, as well as handling
+those without. In particular, it demonstrates the fact that DataReceived may
+be called multiple times, and that applications must handle that possibility.
+"""
+import asyncio
+import io
+import json
+import ssl
+import collections
+from typing import List, Tuple
+
+from h2.config import H2Configuration
+from h2.connection import H2Connection
+from h2.events import (
+ ConnectionTerminated, DataReceived, RemoteSettingsChanged,
+ RequestReceived, StreamEnded, StreamReset, WindowUpdated
+)
+from h2.errors import ErrorCodes
+from h2.exceptions import ProtocolError, StreamClosedError
+from h2.settings import SettingCodes
+
+
+RequestData = collections.namedtuple('RequestData', ['headers', 'data'])
+
+
+class H2Protocol(asyncio.Protocol):
+ def __init__(self):
+ config = H2Configuration(client_side=False, header_encoding='utf-8')
+ self.conn = H2Connection(config=config)
+ self.transport = None
+ self.stream_data = {}
+ self.flow_control_futures = {}
+
+ def connection_made(self, transport: asyncio.Transport):
+ self.transport = transport
+ self.conn.initiate_connection()
+ self.transport.write(self.conn.data_to_send())
+
+ def connection_lost(self, exc):
+ for future in self.flow_control_futures.values():
+ future.cancel()
+ self.flow_control_futures = {}
+
+ def data_received(self, data: bytes):
+ try:
+ events = self.conn.receive_data(data)
+ except ProtocolError as e:
+ self.transport.write(self.conn.data_to_send())
+ self.transport.close()
+ else:
+ self.transport.write(self.conn.data_to_send())
+ for event in events:
+ if isinstance(event, RequestReceived):
+ self.request_received(event.headers, event.stream_id)
+ elif isinstance(event, DataReceived):
+ self.receive_data(event.data, event.stream_id)
+ elif isinstance(event, StreamEnded):
+ self.stream_complete(event.stream_id)
+ elif isinstance(event, ConnectionTerminated):
+ self.transport.close()
+ elif isinstance(event, StreamReset):
+ self.stream_reset(event.stream_id)
+ elif isinstance(event, WindowUpdated):
+ self.window_updated(event.stream_id, event.delta)
+ elif isinstance(event, RemoteSettingsChanged):
+ if SettingCodes.INITIAL_WINDOW_SIZE in event.changed_settings:
+ self.window_updated(None, 0)
+
+ self.transport.write(self.conn.data_to_send())
+
+ def request_received(self, headers: List[Tuple[str, str]], stream_id: int):
+ headers = collections.OrderedDict(headers)
+ method = headers[':method']
+
+ # Store off the request data.
+ request_data = RequestData(headers, io.BytesIO())
+ self.stream_data[stream_id] = request_data
+
+ def stream_complete(self, stream_id: int):
+ """
+ When a stream is complete, we can send our response.
+ """
+ try:
+ request_data = self.stream_data[stream_id]
+ except KeyError:
+ # Just return, we probably 405'd this already
+ return
+
+ headers = request_data.headers
+ body = request_data.data.getvalue().decode('utf-8')
+
+ data = json.dumps(
+ {"headers": headers, "body": body}, indent=4
+ ).encode("utf8")
+
+ response_headers = (
+ (':status', '200'),
+ ('content-type', 'application/json'),
+ ('content-length', str(len(data))),
+ ('server', 'asyncio-h2'),
+ )
+ self.conn.send_headers(stream_id, response_headers)
+ asyncio.ensure_future(self.send_data(data, stream_id))
+
+ def receive_data(self, data: bytes, stream_id: int):
+ """
+ We've received some data on a stream. If that stream is one we're
+ expecting data on, save it off. Otherwise, reset the stream.
+ """
+ try:
+ stream_data = self.stream_data[stream_id]
+ except KeyError:
+ self.conn.reset_stream(
+ stream_id, error_code=ErrorCodes.PROTOCOL_ERROR
+ )
+ else:
+ stream_data.data.write(data)
+
+ def stream_reset(self, stream_id):
+ """
+ A stream reset was sent. Stop sending data.
+ """
+ if stream_id in self.flow_control_futures:
+ future = self.flow_control_futures.pop(stream_id)
+ future.cancel()
+
+ async def send_data(self, data, stream_id):
+ """
+ Send data according to the flow control rules.
+ """
+ while data:
+ while self.conn.local_flow_control_window(stream_id) < 1:
+ try:
+ await self.wait_for_flow_control(stream_id)
+ except asyncio.CancelledError:
+ return
+
+ chunk_size = min(
+ self.conn.local_flow_control_window(stream_id),
+ len(data),
+ self.conn.max_outbound_frame_size,
+ )
+
+ try:
+ self.conn.send_data(
+ stream_id,
+ data[:chunk_size],
+ end_stream=(chunk_size == len(data))
+ )
+ except (StreamClosedError, ProtocolError):
+ # The stream got closed and we didn't get told. We're done
+ # here.
+ break
+
+ self.transport.write(self.conn.data_to_send())
+ data = data[chunk_size:]
+
+ async def wait_for_flow_control(self, stream_id):
+ """
+ Waits for a Future that fires when the flow control window is opened.
+ """
+ f = asyncio.Future()
+ self.flow_control_futures[stream_id] = f
+ await f
+
+ def window_updated(self, stream_id, delta):
+ """
+ A window update frame was received. Unblock some number of flow control
+ Futures.
+ """
+ if stream_id and stream_id in self.flow_control_futures:
+ f = self.flow_control_futures.pop(stream_id)
+ f.set_result(delta)
+ elif not stream_id:
+ for f in self.flow_control_futures.values():
+ f.set_result(delta)
+
+ self.flow_control_futures = {}
+
+
+ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
+ssl_context.options |= (
+ ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1 | ssl.OP_NO_COMPRESSION
+)
+ssl_context.load_cert_chain(certfile="cert.crt", keyfile="cert.key")
+ssl_context.set_alpn_protocols(["h2"])
+
+loop = asyncio.get_event_loop()
+# Each client connection will create a new protocol instance
+coro = loop.create_server(H2Protocol, '127.0.0.1', 8443, ssl=ssl_context)
+server = loop.run_until_complete(coro)
+
+# Serve requests until Ctrl+C is pressed
+print('Serving on {}'.format(server.sockets[0].getsockname()))
+try:
+ loop.run_forever()
+except KeyboardInterrupt:
+ pass
+
+# Close the server
+server.close()
+loop.run_until_complete(server.wait_closed())
+loop.close()
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/cert.crt b/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/cert.crt
new file mode 100644
index 0000000000..d6cf7d504d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/cert.crt
@@ -0,0 +1,21 @@
+-----BEGIN CERTIFICATE-----
+MIIDhTCCAm2gAwIBAgIJAOrxh0dOYJLdMA0GCSqGSIb3DQEBCwUAMFkxCzAJBgNV
+BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX
+aWRnaXRzIFB0eSBMdGQxEjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0xNTA5MTkxNDE2
+NDRaFw0xNTEwMTkxNDE2NDRaMFkxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21l
+LVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQxEjAQBgNV
+BAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMqt
+A1iu8EN00FU0eBcBGlLVmNEgV7Jkbukra+kwS8j/U2y50QPGJc/FiIVDfuBqk5dL
+ACTNc6A/FQcXvWmOc5ixmC3QKKasMpuofqKz0V9C6irZdYXZ9rcsW0gHQIr989yd
+R+N1VbIlEVW/T9FJL3B2UD9GVIkUELzm47CSOWZvAxQUlsx8CUNuUCWqyZJoqTFN
+j0LeJDOWGCsug1Pkj0Q1x+jMVL6l6Zf6vMkLNOMsOsWsxUk+0L3tl/OzcTgUOCsw
+UzY59RIi6Rudrp0oaU8NuHr91yiSqPbKFlX10M9KwEEdnIpcxhND3dacrDycj3ux
+eWlqKync2vOFUkhwiaMCAwEAAaNQME4wHQYDVR0OBBYEFA0PN+PGoofZ+QIys2Jy
+1Zz94vBOMB8GA1UdIwQYMBaAFA0PN+PGoofZ+QIys2Jy1Zz94vBOMAwGA1UdEwQF
+MAMBAf8wDQYJKoZIhvcNAQELBQADggEBAEplethBoPpcP3EbR5Rz6snDDIcbtAJu
+Ngd0YZppGT+P0DYnPJva4vRG3bb84ZMSuppz5j67qD6DdWte8UXhK8BzWiHzwmQE
+QmbKyzzTMKQgTNFntpx5cgsSvTtrHpNYoMHzHOmyAOboNeM0DWiRXsYLkWTitLTN
+qbOpstwPubExbT9lPjLclntShT/lCupt+zsbnrR9YiqlYFY/fDzfAybZhrD5GMBY
+XdMPItwAc/sWvH31yztarjkLmld76AGCcO5r8cSR/cX98SicyfjOBbSco8GkjYNY
+582gTPkKGYpStuN7GNT5tZmxvMq935HRa2XZvlAIe8ufp8EHVoYiF3c=
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/cert.key b/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/cert.key
new file mode 100644
index 0000000000..bda69e836c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/cert.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAyq0DWK7wQ3TQVTR4FwEaUtWY0SBXsmRu6Str6TBLyP9TbLnR
+A8Ylz8WIhUN+4GqTl0sAJM1zoD8VBxe9aY5zmLGYLdAopqwym6h+orPRX0LqKtl1
+hdn2tyxbSAdAiv3z3J1H43VVsiURVb9P0UkvcHZQP0ZUiRQQvObjsJI5Zm8DFBSW
+zHwJQ25QJarJkmipMU2PQt4kM5YYKy6DU+SPRDXH6MxUvqXpl/q8yQs04yw6xazF
+ST7Qve2X87NxOBQ4KzBTNjn1EiLpG52unShpTw24ev3XKJKo9soWVfXQz0rAQR2c
+ilzGE0Pd1pysPJyPe7F5aWorKdza84VSSHCJowIDAQABAoIBACp+nh4BB/VMz8Wd
+q7Q/EfLeQB1Q57JKpoqTBRwueSVai3ZXe4CMEi9/HkG6xiZtkiZ9njkZLq4hq9oB
+2z//kzMnwV2RsIRJxI6ohGy+wR51HD4BvEdlTPpY/Yabpqe92VyfSYxidKZWaU0O
+QMED1EODOw4ZQ+4928iPrJu//PMB4e7TFao0b9Fk/XLWtu5/tQZz9jsrlTi1zthh
+7n+oaGNhfTeIJJL4jrhTrKW1CLHXATtr9SJlfZ3wbMxQVeyj2wUlP1V0M6kBuhNj
+tbGbMpixD5iCNJ49Cm2PHg+wBOfS3ADGIpi3PcGw5mb8nB3N9eGBRPhLShAlq5Hi
+Lv4tyykCgYEA8u3b3xJ04pxWYN25ou/Sc8xzgDCK4XvDNdHVTuZDjLVA+VTVPzql
+lw7VvJArsx47MSPvsaX/+4hQXYtfnR7yJpx6QagvQ+z4ludnIZYrQwdUmb9pFL1s
+8UNj+3j9QFRPenIiIQ8qxxNIQ9w2HsVQ8scvc9CjYop/YYAPaQyHaL8CgYEA1ZSz
+CR4NcpfgRSILdhb1dLcyw5Qus1VOSAx3DYkhDkMiB8XZwgMdJjwehJo9yaqRCLE8
+Sw5znMnkfoZpu7+skrjK0FqmMpXMH9gIszHvFG8wSw/6+2HIWS19/wOu8dh95LuC
+0zurMk8rFqxgWMWF20afhgYrUz42cvUTo10FVB0CgYEAt7mW6W3PArfUSCxIwmb4
+VmXREKkl0ATHDYQl/Cb//YHzot467TgQll883QB4XF5HzBFurX9rSzO7/BN1e6I0
+52i+ubtWC9xD4fUetXMaQvZfUGxIL8xXgVxDWKQXfLiG54c8Mp6C7s6xf8kjEUCP
+yR1F0SSA/Pzb+8RbY0p7eocCgYA+1rs+SXtHZev0KyoYGnUpW+Uxqd17ofOgOxqj
+/t6c5Z+TjeCdtnDTGQkZlo/rT6XQWuUUaDIXxUbW+xEMzj4mBPyXBLS1WWFvVQ5q
+OpzO9E/PJeqAH6rkof/aEelc+oc/zvOU1o9uA+D3kMvgEm1psIOq2RHSMhGvDPA0
+NmAk+QKBgQCwd1681GagdIYSZUCBecnLtevXmIsJyDW2yR1NNcIe/ukcVQREMDvy
+5DDkhnGDgnV1D5gYcXb34g9vYvbfTnBMl/JXmMAAG1kIS+3pvHyN6f1poVe3yJV1
+yHVuvymnJxKnyaV0L3ntepVvV0vVNIkA3oauoUTLto6txBI+b/ImDA==
+-----END RSA PRIVATE KEY-----
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/wsgi-server.py b/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/wsgi-server.py
new file mode 100644
index 0000000000..9fdb2fa0fc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/asyncio/wsgi-server.py
@@ -0,0 +1,760 @@
+# -*- coding: utf-8 -*-
+"""
+asyncio-server.py
+~~~~~~~~~~~~~~~~~
+
+A fully-functional WSGI server, written using hyper-h2. Requires asyncio.
+
+To test it, try installing httpbin from pip (``pip install httpbin``) and then
+running the server (``python asyncio-server.py httpbin:app``).
+
+This server does not support HTTP/1.1: it is a HTTP/2-only WSGI server. The
+purpose of this code is to demonstrate how to integrate hyper-h2 into a more
+complex application, and to demonstrate several principles of concurrent
+programming.
+
+The architecture looks like this:
+
++---------------------------------+
+| 1x HTTP/2 Server Thread |
+| (running asyncio) |
++---------------------------------+
++---------------------------------+
+| N WSGI Application Threads |
+| (no asyncio) |
++---------------------------------+
+
+Essentially, we spin up an asyncio-based event loop in the main thread. This
+launches one HTTP/2 Protocol instance for each inbound connection, all of which
+will read and write data from within the main thread in an asynchronous manner.
+
+When each HTTP request comes in, the server will build the WSGI environment
+dictionary and create a ``Stream`` object. This object will hold the relevant
+state for the request/response pair and will act as the WSGI side of the logic.
+That object will then be passed to a background thread pool, and when a worker
+is available the WSGI logic will begin to be executed. This model ensures that
+the asyncio web server itself is never blocked by the WSGI application.
+
+The WSGI application and the HTTP/2 server communicate via an asyncio queue,
+together with locks and threading events. The locks themselves are implicit in
+asyncio's "call_soon_threadsafe", which allows for a background thread to
+register an action with the main asyncio thread. When the asyncio thread
+eventually takes the action in question it sets as threading event, signaling
+to the background thread that it is free to continue its work.
+
+To make the WSGI application work with flow control, there is a very important
+invariant that must be observed. Any WSGI action that would cause data to be
+emitted to the network MUST be accompanied by a threading Event that is not
+set until that data has been written to the transport. This ensures that the
+WSGI application *blocks* until the data is actually sent. The reason we
+require this invariant is that the HTTP/2 server may choose to re-order some
+data chunks for flow control reasons: that is, the application for stream X may
+have actually written its data first, but the server may elect to send the data
+for stream Y first. This means that it's vital that there not be *two* writes
+for stream X active at any one point or they may get reordered, which would be
+particularly terrible.
+
+Thus, the server must cooperate to ensure that each threading event only fires
+when the *complete* data for that event has been written to the asyncio
+transport. Any earlier will cause untold craziness.
+"""
+import asyncio
+import importlib
+import queue
+import ssl
+import sys
+import threading
+
+from h2.config import H2Configuration
+from h2.connection import H2Connection
+from h2.events import (
+ DataReceived, RequestReceived, WindowUpdated, StreamEnded, StreamReset
+)
+
+
+# Used to signal that a request has completed.
+#
+# This is a convenient way to do "in-band" signaling of stream completion
+# without doing anything so heavyweight as using a class. Essentially, we can
+# test identity against this empty object. In fact, this is so convenient that
+# we use this object for all streams, for data in both directions: in and out.
+END_DATA_SENTINEL = object()
+
+# The WSGI callable. Stored here so that the protocol instances can get hold
+# of the data.
+APPLICATION = None
+
+
+class H2Protocol(asyncio.Protocol):
+ def __init__(self):
+ config = H2Configuration(client_side=False, header_encoding='utf-8')
+
+ # Our server-side state machine.
+ self.conn = H2Connection(config=config)
+
+ # The backing transport.
+ self.transport = None
+
+ # A dictionary of ``Stream`` objects, keyed by their stream ID. This
+ # makes it easy to route data to the correct WSGI application instance.
+ self.streams = {}
+
+ # A queue of data emitted by WSGI applications that has not yet been
+ # sent. Each stream may only have one chunk of data in either this
+ # queue or the flow_controlled_data dictionary at any one time.
+ self._stream_data = asyncio.Queue()
+
+ # Data that has been pulled off the queue that is for a stream blocked
+ # behind flow control limitations. This is used to avoid spinning on
+ # _stream_data queue when a stream cannot have its data sent. Data that
+ # cannot be sent on the connection when it is popped off the queue gets
+ # placed here until the stream flow control window opens up again.
+ self._flow_controlled_data = {}
+
+ # A reference to the loop in which this protocol runs. This is needed
+ # to synchronise up with background threads.
+ self._loop = asyncio.get_event_loop()
+
+ # Any streams that have been remotely reset. We keep track of these to
+ # ensure that we don't emit data from a WSGI application whose stream
+ # has been cancelled.
+ self._reset_streams = set()
+
+ # Keep track of the loop sending task so we can kill it when the
+ # connection goes away.
+ self._send_loop_task = None
+
+ def connection_made(self, transport):
+ """
+ The connection has been made. Here we need to save off our transport,
+ do basic HTTP/2 connection setup, and then start our data writing
+ coroutine.
+ """
+ self.transport = transport
+ self.conn.initiate_connection()
+ self.transport.write(self.conn.data_to_send())
+ self._send_loop_task = self._loop.create_task(self.sending_loop())
+
+ def connection_lost(self, exc):
+ """
+ With the end of the connection, we just want to cancel our data sending
+ coroutine.
+ """
+ self._send_loop_task.cancel()
+
+ def data_received(self, data):
+ """
+ Process inbound data.
+ """
+ events = self.conn.receive_data(data)
+
+ for event in events:
+ if isinstance(event, RequestReceived):
+ self.request_received(event)
+ elif isinstance(event, DataReceived):
+ self.data_frame_received(event)
+ elif isinstance(event, WindowUpdated):
+ self.window_opened(event)
+ elif isinstance(event, StreamEnded):
+ self.end_stream(event)
+ elif isinstance(event, StreamReset):
+ self.reset_stream(event)
+
+ outbound_data = self.conn.data_to_send()
+ if outbound_data:
+ self.transport.write(outbound_data)
+
+ def window_opened(self, event):
+ """
+ The flow control window got opened.
+
+ This is important because it's possible that we were unable to send
+ some WSGI data because the flow control window was too small. If that
+ happens, the sending_loop coroutine starts buffering data.
+
+ As the window gets opened, we need to unbuffer the data. We do that by
+ placing the data chunks back on the back of the send queue and letting
+ the sending loop take another shot at sending them.
+
+ This system only works because we require that each stream only have
+ *one* data chunk in the sending queue at any time. The threading events
+ force this invariant to remain true.
+ """
+ if event.stream_id:
+ # This is specific to a single stream.
+ if event.stream_id in self._flow_controlled_data:
+ self._stream_data.put_nowait(
+ self._flow_controlled_data.pop(event.stream_id)
+ )
+ else:
+ # This event is specific to the connection. Free up *all* the
+ # streams. This is a bit tricky, but we *must not* yield the flow
+ # of control here or it all goes wrong.
+ for data in self._flow_controlled_data.values():
+ self._stream_data.put_nowait(data)
+
+ self._flow_controlled_data = {}
+
+ @asyncio.coroutine
+ def sending_loop(self):
+ """
+ A call that loops forever, attempting to send data. This sending loop
+ contains most of the flow-control smarts of this class: it pulls data
+ off of the asyncio queue and then attempts to send it.
+
+ The difficulties here are all around flow control. Specifically, a
+ chunk of data may be too large to send. In this case, what will happen
+ is that this coroutine will attempt to send what it can and will then
+ store the unsent data locally. When a flow control event comes in that
+ data will be freed up and placed back onto the asyncio queue, causing
+ it to pop back up into the sending logic of this coroutine.
+
+ This method explicitly *does not* handle HTTP/2 priority. That adds an
+ extra layer of complexity to what is already a fairly complex method,
+ and we'll look at how to do it another time.
+
+ This coroutine explicitly *does not end*.
+ """
+ while True:
+ stream_id, data, event = yield from self._stream_data.get()
+
+ # If this stream got reset, just drop the data on the floor. Note
+ # that we need to reset the event here to make sure that
+ # application doesn't lock up.
+ if stream_id in self._reset_streams:
+ event.set()
+
+ # Check if the body is done. If it is, this is really easy! Again,
+ # we *must* set the event here or the application will lock up.
+ if data is END_DATA_SENTINEL:
+ self.conn.end_stream(stream_id)
+ self.transport.write(self.conn.data_to_send())
+ event.set()
+ continue
+
+ # We need to send data, but not to exceed the flow control window.
+ # For that reason, grab only the data that fits: we'll buffer the
+ # rest.
+ window_size = self.conn.local_flow_control_window(stream_id)
+ chunk_size = min(window_size, len(data))
+ data_to_send = data[:chunk_size]
+ data_to_buffer = data[chunk_size:]
+
+ if data_to_send:
+ # There's a maximum frame size we have to respect. Because we
+ # aren't paying any attention to priority here, we can quite
+ # safely just split this string up into chunks of max frame
+ # size and blast them out.
+ #
+ # In a *real* application you'd want to consider priority here.
+ max_size = self.conn.max_outbound_frame_size
+ chunks = (
+ data_to_send[x:x+max_size]
+ for x in range(0, len(data_to_send), max_size)
+ )
+ for chunk in chunks:
+ self.conn.send_data(stream_id, chunk)
+ self.transport.write(self.conn.data_to_send())
+
+ # If there's data left to buffer, we should do that. Put it in a
+ # dictionary and *don't set the event*: the app must not generate
+ # any more data until we got rid of all of this data.
+ if data_to_buffer:
+ self._flow_controlled_data[stream_id] = (
+ stream_id, data_to_buffer, event
+ )
+ else:
+ # We sent everything. We can let the WSGI app progress.
+ event.set()
+
+ def request_received(self, event):
+ """
+ A HTTP/2 request has been received. We need to invoke the WSGI
+ application in a background thread to handle it.
+ """
+ # First, we are going to want an object to hold all the relevant state
+ # for this request/response. For that, we have a stream object. We
+ # need to store the stream object somewhere reachable for when data
+ # arrives later.
+ s = Stream(event.stream_id, self)
+ self.streams[event.stream_id] = s
+
+ # Next, we need to build the WSGI environ dictionary.
+ environ = _build_environ_dict(event.headers, s)
+
+ # Finally, we want to throw these arguments out to a threadpool and
+ # let it run.
+ self._loop.run_in_executor(
+ None,
+ s.run_in_threadpool,
+ APPLICATION,
+ environ,
+ )
+
+ def data_frame_received(self, event):
+ """
+ Data has been received by WSGI server and needs to be dispatched to a
+ running application.
+
+ Note that the flow control window is not modified here. That's
+ deliberate: see Stream.__next__ for a longer discussion of why.
+ """
+ # Grab the stream in question from our dictionary and pass it on.
+ stream = self.streams[event.stream_id]
+ stream.receive_data(event.data, event.flow_controlled_length)
+
+ def end_stream(self, event):
+ """
+ The stream data is complete.
+ """
+ stream = self.streams[event.stream_id]
+ stream.request_complete()
+
+ def reset_stream(self, event):
+ """
+ A stream got forcefully reset.
+
+ This is a tricky thing to deal with because WSGI doesn't really have a
+ good notion for it. Essentially, you have to let the application run
+ until completion, but not actually let it send any data.
+
+ We do that by discarding any data we currently have for it, and then
+ marking the stream as reset to allow us to spot when that stream is
+ trying to send data and drop that data on the floor.
+
+ We then *also* signal the WSGI application that no more data is
+ incoming, to ensure that it does not attempt to do further reads of the
+ data.
+ """
+ if event.stream_id in self._flow_controlled_data:
+ del self._flow_controlled_data
+
+ self._reset_streams.add(event.stream_id)
+ self.end_stream(event)
+
+ def data_for_stream(self, stream_id, data):
+ """
+ Thread-safe method called from outside the main asyncio thread in order
+ to send data on behalf of a WSGI application.
+
+ Places data being written by a stream on an asyncio queue. Returns a
+ threading event that will fire when that data is sent.
+ """
+ event = threading.Event()
+ self._loop.call_soon_threadsafe(
+ self._stream_data.put_nowait,
+ (stream_id, data, event)
+ )
+ return event
+
+ def send_response(self, stream_id, headers):
+ """
+ Thread-safe method called from outside the main asyncio thread in order
+ to send the HTTP response headers on behalf of a WSGI application.
+
+ Returns a threading event that will fire when the headers have been
+ emitted to the network.
+ """
+ event = threading.Event()
+
+ def _inner_send(stream_id, headers, event):
+ self.conn.send_headers(stream_id, headers, end_stream=False)
+ self.transport.write(self.conn.data_to_send())
+ event.set()
+
+ self._loop.call_soon_threadsafe(
+ _inner_send,
+ stream_id,
+ headers,
+ event
+ )
+ return event
+
+ def open_flow_control_window(self, stream_id, increment):
+ """
+ Opens a flow control window for the given stream by the given amount.
+ Called from a WSGI thread. Does not return an event because there's no
+ need to block on this action, it may take place at any time.
+ """
+ def _inner_open(stream_id, increment):
+ self.conn.increment_flow_control_window(increment, stream_id)
+ self.conn.increment_flow_control_window(increment, None)
+ self.transport.write(self.conn.data_to_send())
+
+ self._loop.call_soon_threadsafe(
+ _inner_open,
+ stream_id,
+ increment,
+ )
+
+
+class Stream:
+ """
+ This class holds all of the state for a single stream. It also provides
+ several of the callables used by the WSGI application. Finally, it provides
+ the logic for actually interfacing with the WSGI application.
+
+ For these reasons, the object has *strict* requirements on thread-safety.
+ While the object can be initialized in the main WSGI thread, the
+ ``run_in_threadpool`` method *must* be called from outside that thread. At
+ that point, the main WSGI thread may only call specific methods.
+ """
+ def __init__(self, stream_id, protocol):
+ self.stream_id = stream_id
+ self._protocol = protocol
+
+ # Queue for data that has been received from the network. This is a
+ # thread-safe queue, to allow both the WSGI application to block on
+ # receiving more data and to allow the asyncio server to keep sending
+ # more data.
+ #
+ # This queue is unbounded in size, but in practice it cannot contain
+ # too much data because the flow control window doesn't get adjusted
+ # unless data is removed from it.
+ self._received_data = queue.Queue()
+
+ # This buffer is used to hold partial chunks of data from
+ # _received_data that were not returned out of ``read`` and friends.
+ self._temp_buffer = b''
+
+ # Temporary variables that allow us to keep hold of the headers and
+ # response status until such time as the application needs us to send
+ # them.
+ self._response_status = b''
+ self._response_headers = []
+ self._headers_emitted = False
+
+ # Whether the application has received all the data from the network
+ # or not. This allows us to short-circuit some reads.
+ self._complete = False
+
+ def receive_data(self, data, flow_controlled_size):
+ """
+ Called by the H2Protocol when more data has been received from the
+ network.
+
+ Places the data directly on the queue in a thread-safe manner without
+ blocking. Does not introspect or process the data.
+ """
+ self._received_data.put_nowait((data, flow_controlled_size))
+
+ def request_complete(self):
+ """
+ Called by the H2Protocol when all the request data has been received.
+
+ This works by placing the ``END_DATA_SENTINEL`` on the queue. The
+ reading code knows, when it sees the ``END_DATA_SENTINEL``, to expect
+ no more data from the network. This ensures that the state of the
+ application only changes when it has finished processing the data from
+ the network, even though the server may have long-since finished
+ receiving all the data for this request.
+ """
+ self._received_data.put_nowait((END_DATA_SENTINEL, None))
+
+ def run_in_threadpool(self, wsgi_application, environ):
+ """
+ This method should be invoked in a threadpool. At the point this method
+ is invoked, the only safe methods to call from the original thread are
+ ``receive_data`` and ``request_complete``: any other method is unsafe.
+
+ This method handles the WSGI logic. It invokes the application callable
+ in this thread, passing control over to the WSGI application. It then
+ ensures that the data makes it back to the HTTP/2 connection via
+ the thread-safe APIs provided below.
+ """
+ result = wsgi_application(environ, self.start_response)
+
+ try:
+ for data in result:
+ self.write(data)
+ finally:
+ # This signals that we're done with data. The server will know that
+ # this allows it to clean up its state: we're done here.
+ self.write(END_DATA_SENTINEL)
+
+ # The next few methods are called by the WSGI application. Firstly, the
+ # three methods provided by the input stream.
+ def read(self, size=None):
+ """
+ Called by the WSGI application to read data.
+
+ This method is the one of two that explicitly pumps the input data
+ queue, which means it deals with the ``_complete`` flag and the
+ ``END_DATA_SENTINEL``.
+ """
+ # If we've already seen the END_DATA_SENTINEL, return immediately.
+ if self._complete:
+ return b''
+
+ # If we've been asked to read everything, just iterate over ourselves.
+ if size is None:
+ return b''.join(self)
+
+ # Otherwise, as long as we don't have enough data, spin looking for
+ # another data chunk.
+ data = b''
+ while len(data) < size:
+ try:
+ chunk = next(self)
+ except StopIteration:
+ break
+
+ # Concatenating strings this way is slow, but that's ok, this is
+ # just a demo.
+ data += chunk
+
+ # We have *at least* enough data to return, but we may have too much.
+ # If we do, throw it on a buffer: we'll use it later.
+ to_return = data[:size]
+ self._temp_buffer = data[size:]
+ return to_return
+
+ def readline(self, hint=None):
+ """
+ Called by the WSGI application to read a single line of data.
+
+ This method rigorously observes the ``hint`` parameter: it will only
+ ever read that much data. It then splits the data on a newline
+ character and throws everything it doesn't need into a buffer.
+ """
+ data = self.read(hint)
+ first_newline = data.find(b'\n')
+ if first_newline == -1:
+ # No newline, return all the data
+ return data
+
+ # We want to slice the data so that the head *includes* the first
+ # newline. Then, any data left in this line we don't care about should
+ # be prepended to the internal buffer.
+ head, tail = data[:first_newline + 1], data[first_newline + 1:]
+ self._temp_buffer = tail + self._temp_buffer
+
+ return head
+
+ def readlines(self, hint=None):
+ """
+ Called by the WSGI application to read several lines of data.
+
+ This method is really pretty stupid. It rigorously observes the
+ ``hint`` parameter, and quite happily returns the input split into
+ lines.
+ """
+ # This method is *crazy inefficient*, but it's also a pretty stupid
+ # method to call.
+ data = self.read(hint)
+ lines = data.split(b'\n')
+
+ # Split removes the newline character, but we want it, so put it back.
+ lines = [line + b'\n' for line in lines]
+
+ # Except if the last character was a newline character we now have an
+ # extra line that is just a newline: pull that out.
+ if lines[-1] == b'\n':
+ lines = lines[:-1]
+ return lines
+
+ def start_response(self, status, response_headers, exc_info=None):
+ """
+ This is the PEP-3333 mandated start_response callable.
+
+ All it does is store the headers for later sending, and return our
+ ```write`` callable.
+ """
+ if self._headers_emitted and exc_info is not None:
+ raise exc_info[1].with_traceback(exc_info[2])
+
+ assert not self._response_status or exc_info is not None
+ self._response_status = status
+ self._response_headers = response_headers
+
+ return self.write
+
+ def write(self, data):
+ """
+ Provides some data to write.
+
+ This function *blocks* until such time as the data is allowed by
+ HTTP/2 flow control. This allows a client to slow or pause the response
+ as needed.
+
+ This function is not supposed to be used, according to PEP-3333, but
+ once we have it it becomes quite convenient to use it, so this app
+ actually runs all writes through this function.
+ """
+ if not self._headers_emitted:
+ self._emit_headers()
+ event = self._protocol.data_for_stream(self.stream_id, data)
+ event.wait()
+ return
+
+ def _emit_headers(self):
+ """
+ Sends the response headers.
+
+ This is only called from the write callable and should only ever be
+ called once. It does some minor processing (converts the status line
+ into a status code because reason phrases are evil) and then passes
+ the headers on to the server. This call explicitly blocks until the
+ server notifies us that the headers have reached the network.
+ """
+ assert self._response_status and self._response_headers
+ assert not self._headers_emitted
+ self._headers_emitted = True
+
+ # We only need the status code
+ status = self._response_status.split(" ", 1)[0]
+ headers = [(":status", status)]
+ headers.extend(self._response_headers)
+ event = self._protocol.send_response(self.stream_id, headers)
+ event.wait()
+ return
+
+ # These two methods implement the iterator protocol. This allows a WSGI
+ # application to iterate over this Stream object to get the data.
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ # If the complete request has been read, abort immediately.
+ if self._complete:
+ raise StopIteration()
+
+ # If we have data stored in a temporary buffer for any reason, return
+ # that and clear the buffer.
+ #
+ # This can actually only happen when the application uses one of the
+ # read* callables, but that's fine.
+ if self._temp_buffer:
+ buffered_data = self._temp_buffer
+ self._temp_buffer = b''
+ return buffered_data
+
+ # Otherwise, pull data off the queue (blocking as needed). If this is
+ # the end of the request, we're done here: mark ourselves as complete
+ # and call it time. Otherwise, open the flow control window an
+ # appropriate amount and hand the chunk off.
+ chunk, chunk_size = self._received_data.get()
+ if chunk is END_DATA_SENTINEL:
+ self._complete = True
+ raise StopIteration()
+
+ # Let's talk a little bit about why we're opening the flow control
+ # window *here*, and not in the server thread.
+ #
+ # The purpose of HTTP/2 flow control is to allow for servers and
+ # clients to avoid needing to buffer data indefinitely because their
+ # peer is producing data faster than they can consume it. As a result,
+ # it's important that the flow control window be opened as late in the
+ # processing as possible. In this case, we open the flow control window
+ # exactly when the server hands the data to the application. This means
+ # that the flow control window essentially signals to the remote peer
+ # how much data hasn't even been *seen* by the application yet.
+ #
+ # If you wanted to be really clever you could consider not opening the
+ # flow control window until the application asks for the *next* chunk
+ # of data. That means that any buffers at the application level are now
+ # included in the flow control window processing. In my opinion, the
+ # advantage of that process does not outweigh the extra logical
+ # complexity involved in doing it, so we don't bother here.
+ #
+ # Another note: you'll notice that we don't include the _temp_buffer in
+ # our flow control considerations. This means you could in principle
+ # lead us to buffer slightly more than one connection flow control
+ # window's worth of data. That risk is considered acceptable for the
+ # much simpler logic available here.
+ #
+ # Finally, this is a pretty dumb flow control window management scheme:
+ # it causes us to emit a *lot* of window updates. A smarter server
+ # would want to use the content-length header to determine whether
+ # flow control window updates need to be emitted at all, and then to be
+ # more efficient about emitting them to avoid firing them off really
+ # frequently. For an example like this, there's very little gained by
+ # worrying about that.
+ self._protocol.open_flow_control_window(self.stream_id, chunk_size)
+
+ return chunk
+
+
+def _build_environ_dict(headers, stream):
+ """
+ Build the WSGI environ dictionary for a given request. To do that, we'll
+ temporarily create a dictionary for the headers. While this isn't actually
+ a valid way to represent headers, we know that the special headers we need
+ can only have one appearance in the block.
+
+ This code is arguably somewhat incautious: the conversion to dictionary
+ should only happen in a way that allows us to correctly join headers that
+ appear multiple times. That's acceptable in a demo app: in a productised
+ version you'd want to fix it.
+ """
+ header_dict = dict(headers)
+ path = header_dict.pop(u':path')
+ try:
+ path, query = path.split(u'?', 1)
+ except ValueError:
+ query = u""
+ server_name = header_dict.pop(u':authority')
+ try:
+ server_name, port = server_name.split(u':', 1)
+ except ValueError as e:
+ port = "8443"
+
+ environ = {
+ u'REQUEST_METHOD': header_dict.pop(u':method'),
+ u'SCRIPT_NAME': u'',
+ u'PATH_INFO': path,
+ u'QUERY_STRING': query,
+ u'SERVER_NAME': server_name,
+ u'SERVER_PORT': port,
+ u'SERVER_PROTOCOL': u'HTTP/2',
+ u'HTTPS': u"on",
+ u'SSL_PROTOCOL': u'TLSv1.2',
+ u'wsgi.version': (1, 0),
+ u'wsgi.url_scheme': header_dict.pop(u':scheme'),
+ u'wsgi.input': stream,
+ u'wsgi.errors': sys.stderr,
+ u'wsgi.multithread': True,
+ u'wsgi.multiprocess': False,
+ u'wsgi.run_once': False,
+ }
+ if u'content-type' in header_dict:
+ environ[u'CONTENT_TYPE'] = header_dict[u'content-type']
+ if u'content-length' in header_dict:
+ environ[u'CONTENT_LENGTH'] = header_dict[u'content-length']
+ for name, value in header_dict.items():
+ environ[u'HTTP_' + name.upper()] = value
+ return environ
+
+
+# Set up the WSGI app.
+application_string = sys.argv[1]
+path, func = application_string.split(':', 1)
+module = importlib.import_module(path)
+APPLICATION = getattr(module, func)
+
+# Set up TLS
+ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
+ssl_context.options |= (
+ ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1 | ssl.OP_NO_COMPRESSION
+)
+ssl_context.set_ciphers("ECDHE+AESGCM")
+ssl_context.load_cert_chain(certfile="cert.crt", keyfile="cert.key")
+ssl_context.set_alpn_protocols(["h2"])
+
+# Do the asnycio bits
+loop = asyncio.get_event_loop()
+# Each client connection will create a new protocol instance
+coro = loop.create_server(H2Protocol, '127.0.0.1', 8443, ssl=ssl_context)
+server = loop.run_until_complete(coro)
+
+# Serve requests until Ctrl+C is pressed
+print('Serving on {}'.format(server.sockets[0].getsockname()))
+try:
+ loop.run_forever()
+except KeyboardInterrupt:
+ pass
+
+# Close the server
+server.close()
+loop.run_until_complete(server.wait_closed())
+loop.close()
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/curio/curio-server.py b/testing/web-platform/tests/tools/third_party/h2/examples/curio/curio-server.py
new file mode 100644
index 0000000000..f93d4db9d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/curio/curio-server.py
@@ -0,0 +1,206 @@
+#!/usr/bin/env python3.5
+# -*- coding: utf-8 -*-
+"""
+curio-server.py
+~~~~~~~~~~~~~~~
+
+A fully-functional HTTP/2 server written for curio.
+
+Requires Python 3.5+.
+"""
+import mimetypes
+import os
+import sys
+
+from curio import Event, spawn, socket, ssl, run
+
+import h2.config
+import h2.connection
+import h2.events
+
+
+# The maximum amount of a file we'll send in a single DATA frame.
+READ_CHUNK_SIZE = 8192
+
+
+async def create_listening_ssl_socket(address, certfile, keyfile):
+ """
+ Create and return a listening TLS socket on a given address.
+ """
+ ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
+ ssl_context.options |= (
+ ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1 | ssl.OP_NO_COMPRESSION
+ )
+ ssl_context.set_ciphers("ECDHE+AESGCM")
+ ssl_context.load_cert_chain(certfile=certfile, keyfile=keyfile)
+ ssl_context.set_alpn_protocols(["h2"])
+
+ sock = socket.socket()
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ sock = await ssl_context.wrap_socket(sock)
+ sock.bind(address)
+ sock.listen()
+
+ return sock
+
+
+async def h2_server(address, root, certfile, keyfile):
+ """
+ Create an HTTP/2 server at the given address.
+ """
+ sock = await create_listening_ssl_socket(address, certfile, keyfile)
+ print("Now listening on %s:%d" % address)
+
+ async with sock:
+ while True:
+ client, _ = await sock.accept()
+ server = H2Server(client, root)
+ await spawn(server.run())
+
+
+class H2Server:
+ """
+ A basic HTTP/2 file server. This is essentially very similar to
+ SimpleHTTPServer from the standard library, but uses HTTP/2 instead of
+ HTTP/1.1.
+ """
+ def __init__(self, sock, root):
+ config = h2.config.H2Configuration(
+ client_side=False, header_encoding='utf-8'
+ )
+ self.sock = sock
+ self.conn = h2.connection.H2Connection(config=config)
+ self.root = root
+ self.flow_control_events = {}
+
+ async def run(self):
+ """
+ Loop over the connection, managing it appropriately.
+ """
+ self.conn.initiate_connection()
+ await self.sock.sendall(self.conn.data_to_send())
+
+ while True:
+ # 65535 is basically arbitrary here: this amounts to "give me
+ # whatever data you have".
+ data = await self.sock.recv(65535)
+ if not data:
+ break
+
+ events = self.conn.receive_data(data)
+ for event in events:
+ if isinstance(event, h2.events.RequestReceived):
+ await spawn(
+ self.request_received(event.headers, event.stream_id)
+ )
+ elif isinstance(event, h2.events.DataReceived):
+ self.conn.reset_stream(event.stream_id)
+ elif isinstance(event, h2.events.WindowUpdated):
+ await self.window_updated(event)
+
+ await self.sock.sendall(self.conn.data_to_send())
+
+ async def request_received(self, headers, stream_id):
+ """
+ Handle a request by attempting to serve a suitable file.
+ """
+ headers = dict(headers)
+ assert headers[':method'] == 'GET'
+
+ path = headers[':path'].lstrip('/')
+ full_path = os.path.join(self.root, path)
+
+ if not os.path.exists(full_path):
+ response_headers = (
+ (':status', '404'),
+ ('content-length', '0'),
+ ('server', 'curio-h2'),
+ )
+ self.conn.send_headers(
+ stream_id, response_headers, end_stream=True
+ )
+ await self.sock.sendall(self.conn.data_to_send())
+ else:
+ await self.send_file(full_path, stream_id)
+
+ async def send_file(self, file_path, stream_id):
+ """
+ Send a file, obeying the rules of HTTP/2 flow control.
+ """
+ filesize = os.stat(file_path).st_size
+ content_type, content_encoding = mimetypes.guess_type(file_path)
+ response_headers = [
+ (':status', '200'),
+ ('content-length', str(filesize)),
+ ('server', 'curio-h2'),
+ ]
+ if content_type:
+ response_headers.append(('content-type', content_type))
+ if content_encoding:
+ response_headers.append(('content-encoding', content_encoding))
+
+ self.conn.send_headers(stream_id, response_headers)
+ await self.sock.sendall(self.conn.data_to_send())
+
+ with open(file_path, 'rb', buffering=0) as f:
+ await self._send_file_data(f, stream_id)
+
+ async def _send_file_data(self, fileobj, stream_id):
+ """
+ Send the data portion of a file. Handles flow control rules.
+ """
+ while True:
+ while self.conn.local_flow_control_window(stream_id) < 1:
+ await self.wait_for_flow_control(stream_id)
+
+ chunk_size = min(
+ self.conn.local_flow_control_window(stream_id),
+ READ_CHUNK_SIZE,
+ )
+
+ data = fileobj.read(chunk_size)
+ keep_reading = (len(data) == chunk_size)
+
+ self.conn.send_data(stream_id, data, not keep_reading)
+ await self.sock.sendall(self.conn.data_to_send())
+
+ if not keep_reading:
+ break
+
+ async def wait_for_flow_control(self, stream_id):
+ """
+ Blocks until the flow control window for a given stream is opened.
+ """
+ evt = Event()
+ self.flow_control_events[stream_id] = evt
+ await evt.wait()
+
+ async def window_updated(self, event):
+ """
+ Unblock streams waiting on flow control, if needed.
+ """
+ stream_id = event.stream_id
+
+ if stream_id and stream_id in self.flow_control_events:
+ evt = self.flow_control_events.pop(stream_id)
+ await evt.set()
+ elif not stream_id:
+ # Need to keep a real list here to use only the events present at
+ # this time.
+ blocked_streams = list(self.flow_control_events.keys())
+ for stream_id in blocked_streams:
+ event = self.flow_control_events.pop(stream_id)
+ await event.set()
+ return
+
+
+if __name__ == '__main__':
+ host = sys.argv[2] if len(sys.argv) > 2 else "localhost"
+ print("Try GETting:")
+ print(" On OSX after 'brew install curl --with-c-ares --with-libidn --with-nghttp2 --with-openssl':")
+ print("/usr/local/opt/curl/bin/curl --tlsv1.2 --http2 -k https://localhost:5000/bundle.js")
+ print("Or open a browser to: https://localhost:5000/")
+ print(" (Accept all the warnings)")
+ run(h2_server((host, 5000), sys.argv[1],
+ "{}.crt.pem".format(host),
+ "{}.key".format(host)), with_monitor=True)
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/curio/localhost.crt.pem b/testing/web-platform/tests/tools/third_party/h2/examples/curio/localhost.crt.pem
new file mode 100644
index 0000000000..d6cf7d504d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/curio/localhost.crt.pem
@@ -0,0 +1,21 @@
+-----BEGIN CERTIFICATE-----
+MIIDhTCCAm2gAwIBAgIJAOrxh0dOYJLdMA0GCSqGSIb3DQEBCwUAMFkxCzAJBgNV
+BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX
+aWRnaXRzIFB0eSBMdGQxEjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0xNTA5MTkxNDE2
+NDRaFw0xNTEwMTkxNDE2NDRaMFkxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21l
+LVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQxEjAQBgNV
+BAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMqt
+A1iu8EN00FU0eBcBGlLVmNEgV7Jkbukra+kwS8j/U2y50QPGJc/FiIVDfuBqk5dL
+ACTNc6A/FQcXvWmOc5ixmC3QKKasMpuofqKz0V9C6irZdYXZ9rcsW0gHQIr989yd
+R+N1VbIlEVW/T9FJL3B2UD9GVIkUELzm47CSOWZvAxQUlsx8CUNuUCWqyZJoqTFN
+j0LeJDOWGCsug1Pkj0Q1x+jMVL6l6Zf6vMkLNOMsOsWsxUk+0L3tl/OzcTgUOCsw
+UzY59RIi6Rudrp0oaU8NuHr91yiSqPbKFlX10M9KwEEdnIpcxhND3dacrDycj3ux
+eWlqKync2vOFUkhwiaMCAwEAAaNQME4wHQYDVR0OBBYEFA0PN+PGoofZ+QIys2Jy
+1Zz94vBOMB8GA1UdIwQYMBaAFA0PN+PGoofZ+QIys2Jy1Zz94vBOMAwGA1UdEwQF
+MAMBAf8wDQYJKoZIhvcNAQELBQADggEBAEplethBoPpcP3EbR5Rz6snDDIcbtAJu
+Ngd0YZppGT+P0DYnPJva4vRG3bb84ZMSuppz5j67qD6DdWte8UXhK8BzWiHzwmQE
+QmbKyzzTMKQgTNFntpx5cgsSvTtrHpNYoMHzHOmyAOboNeM0DWiRXsYLkWTitLTN
+qbOpstwPubExbT9lPjLclntShT/lCupt+zsbnrR9YiqlYFY/fDzfAybZhrD5GMBY
+XdMPItwAc/sWvH31yztarjkLmld76AGCcO5r8cSR/cX98SicyfjOBbSco8GkjYNY
+582gTPkKGYpStuN7GNT5tZmxvMq935HRa2XZvlAIe8ufp8EHVoYiF3c=
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/curio/localhost.key b/testing/web-platform/tests/tools/third_party/h2/examples/curio/localhost.key
new file mode 100644
index 0000000000..bda69e836c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/curio/localhost.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAyq0DWK7wQ3TQVTR4FwEaUtWY0SBXsmRu6Str6TBLyP9TbLnR
+A8Ylz8WIhUN+4GqTl0sAJM1zoD8VBxe9aY5zmLGYLdAopqwym6h+orPRX0LqKtl1
+hdn2tyxbSAdAiv3z3J1H43VVsiURVb9P0UkvcHZQP0ZUiRQQvObjsJI5Zm8DFBSW
+zHwJQ25QJarJkmipMU2PQt4kM5YYKy6DU+SPRDXH6MxUvqXpl/q8yQs04yw6xazF
+ST7Qve2X87NxOBQ4KzBTNjn1EiLpG52unShpTw24ev3XKJKo9soWVfXQz0rAQR2c
+ilzGE0Pd1pysPJyPe7F5aWorKdza84VSSHCJowIDAQABAoIBACp+nh4BB/VMz8Wd
+q7Q/EfLeQB1Q57JKpoqTBRwueSVai3ZXe4CMEi9/HkG6xiZtkiZ9njkZLq4hq9oB
+2z//kzMnwV2RsIRJxI6ohGy+wR51HD4BvEdlTPpY/Yabpqe92VyfSYxidKZWaU0O
+QMED1EODOw4ZQ+4928iPrJu//PMB4e7TFao0b9Fk/XLWtu5/tQZz9jsrlTi1zthh
+7n+oaGNhfTeIJJL4jrhTrKW1CLHXATtr9SJlfZ3wbMxQVeyj2wUlP1V0M6kBuhNj
+tbGbMpixD5iCNJ49Cm2PHg+wBOfS3ADGIpi3PcGw5mb8nB3N9eGBRPhLShAlq5Hi
+Lv4tyykCgYEA8u3b3xJ04pxWYN25ou/Sc8xzgDCK4XvDNdHVTuZDjLVA+VTVPzql
+lw7VvJArsx47MSPvsaX/+4hQXYtfnR7yJpx6QagvQ+z4ludnIZYrQwdUmb9pFL1s
+8UNj+3j9QFRPenIiIQ8qxxNIQ9w2HsVQ8scvc9CjYop/YYAPaQyHaL8CgYEA1ZSz
+CR4NcpfgRSILdhb1dLcyw5Qus1VOSAx3DYkhDkMiB8XZwgMdJjwehJo9yaqRCLE8
+Sw5znMnkfoZpu7+skrjK0FqmMpXMH9gIszHvFG8wSw/6+2HIWS19/wOu8dh95LuC
+0zurMk8rFqxgWMWF20afhgYrUz42cvUTo10FVB0CgYEAt7mW6W3PArfUSCxIwmb4
+VmXREKkl0ATHDYQl/Cb//YHzot467TgQll883QB4XF5HzBFurX9rSzO7/BN1e6I0
+52i+ubtWC9xD4fUetXMaQvZfUGxIL8xXgVxDWKQXfLiG54c8Mp6C7s6xf8kjEUCP
+yR1F0SSA/Pzb+8RbY0p7eocCgYA+1rs+SXtHZev0KyoYGnUpW+Uxqd17ofOgOxqj
+/t6c5Z+TjeCdtnDTGQkZlo/rT6XQWuUUaDIXxUbW+xEMzj4mBPyXBLS1WWFvVQ5q
+OpzO9E/PJeqAH6rkof/aEelc+oc/zvOU1o9uA+D3kMvgEm1psIOq2RHSMhGvDPA0
+NmAk+QKBgQCwd1681GagdIYSZUCBecnLtevXmIsJyDW2yR1NNcIe/ukcVQREMDvy
+5DDkhnGDgnV1D5gYcXb34g9vYvbfTnBMl/JXmMAAG1kIS+3pvHyN6f1poVe3yJV1
+yHVuvymnJxKnyaV0L3ntepVvV0vVNIkA3oauoUTLto6txBI+b/ImDA==
+-----END RSA PRIVATE KEY-----
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/eventlet/eventlet-server.py b/testing/web-platform/tests/tools/third_party/h2/examples/eventlet/eventlet-server.py
new file mode 100644
index 0000000000..a46cfb3d15
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/eventlet/eventlet-server.py
@@ -0,0 +1,102 @@
+# -*- coding: utf-8 -*-
+"""
+eventlet-server.py
+~~~~~~~~~~~~~~~~~~
+
+A fully-functional HTTP/2 server written for Eventlet.
+"""
+import collections
+import json
+
+import eventlet
+
+from eventlet.green.OpenSSL import SSL, crypto
+from h2.config import H2Configuration
+from h2.connection import H2Connection
+from h2.events import RequestReceived, DataReceived
+
+
+class ConnectionManager(object):
+ """
+ An object that manages a single HTTP/2 connection.
+ """
+ def __init__(self, sock):
+ config = H2Configuration(client_side=False)
+ self.sock = sock
+ self.conn = H2Connection(config=config)
+
+ def run_forever(self):
+ self.conn.initiate_connection()
+ self.sock.sendall(self.conn.data_to_send())
+
+ while True:
+ data = self.sock.recv(65535)
+ if not data:
+ break
+
+ events = self.conn.receive_data(data)
+
+ for event in events:
+ if isinstance(event, RequestReceived):
+ self.request_received(event.headers, event.stream_id)
+ elif isinstance(event, DataReceived):
+ self.conn.reset_stream(event.stream_id)
+
+ self.sock.sendall(self.conn.data_to_send())
+
+ def request_received(self, headers, stream_id):
+ headers = collections.OrderedDict(headers)
+ data = json.dumps({'headers': headers}, indent=4).encode('utf-8')
+
+ response_headers = (
+ (':status', '200'),
+ ('content-type', 'application/json'),
+ ('content-length', str(len(data))),
+ ('server', 'eventlet-h2'),
+ )
+ self.conn.send_headers(stream_id, response_headers)
+ self.conn.send_data(stream_id, data, end_stream=True)
+
+
+def alpn_callback(conn, protos):
+ if b'h2' in protos:
+ return b'h2'
+
+ raise RuntimeError("No acceptable protocol offered!")
+
+
+def npn_advertise_cb(conn):
+ return [b'h2']
+
+
+# Let's set up SSL. This is a lot of work in PyOpenSSL.
+options = (
+ SSL.OP_NO_COMPRESSION |
+ SSL.OP_NO_SSLv2 |
+ SSL.OP_NO_SSLv3 |
+ SSL.OP_NO_TLSv1 |
+ SSL.OP_NO_TLSv1_1
+)
+context = SSL.Context(SSL.SSLv23_METHOD)
+context.set_options(options)
+context.set_verify(SSL.VERIFY_NONE, lambda *args: True)
+context.use_privatekey_file('server.key')
+context.use_certificate_file('server.crt')
+context.set_npn_advertise_callback(npn_advertise_cb)
+context.set_alpn_select_callback(alpn_callback)
+context.set_cipher_list(
+ "ECDHE+AESGCM"
+)
+context.set_tmp_ecdh(crypto.get_elliptic_curve(u'prime256v1'))
+
+server = eventlet.listen(('0.0.0.0', 443))
+server = SSL.Connection(context, server)
+pool = eventlet.GreenPool()
+
+while True:
+ try:
+ new_sock, _ = server.accept()
+ manager = ConnectionManager(new_sock)
+ pool.spawn_n(manager.run_forever)
+ except (SystemExit, KeyboardInterrupt):
+ break
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/eventlet/server.crt b/testing/web-platform/tests/tools/third_party/h2/examples/eventlet/server.crt
new file mode 100644
index 0000000000..bc8a4c08d2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/eventlet/server.crt
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDUjCCAjoCCQCQmNzzpQTCijANBgkqhkiG9w0BAQUFADBrMQswCQYDVQQGEwJH
+QjEPMA0GA1UECBMGTG9uZG9uMQ8wDQYDVQQHEwZMb25kb24xETAPBgNVBAoTCGh5
+cGVyLWgyMREwDwYDVQQLEwhoeXBleS1oMjEUMBIGA1UEAxMLZXhhbXBsZS5jb20w
+HhcNMTUwOTE2MjAyOTA0WhcNMTYwOTE1MjAyOTA0WjBrMQswCQYDVQQGEwJHQjEP
+MA0GA1UECBMGTG9uZG9uMQ8wDQYDVQQHEwZMb25kb24xETAPBgNVBAoTCGh5cGVy
+LWgyMREwDwYDVQQLEwhoeXBleS1oMjEUMBIGA1UEAxMLZXhhbXBsZS5jb20wggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC74ZeB4Jdb5cnC9KXXLJuzjwTg
+45q5EeShDYQe0TbKgreiUP6clU3BR0fFAVedN1q/LOuQ1HhvrDk1l4TfGF2bpCIq
+K+U9CnzcQknvdpyyVeOLtSsCjOPk4xydHwkQxwJvHVdtJx4CzDDqGbHNHCF/9gpQ
+lsa3JZW+tIZLK0XMEPFQ4XFXgegxTStO7kBBPaVIgG9Ooqc2MG4rjMNUpxa28WF1
+SyqWTICf2N8T/C+fPzbQLKCWrFrKUP7WQlOaqPNQL9bCDhSTPRTwQOc2/MzVZ9gT
+Xr0Z+JMTXwkSMKO52adE1pmKt00jJ1ecZBiJFyjx0X6hH+/59dLbG/7No+PzAgMB
+AAEwDQYJKoZIhvcNAQEFBQADggEBAG3UhOCa0EemL2iY+C+PR6CwEHQ+n7vkBzNz
+gKOG+Q39spyzqU1qJAzBxLTE81bIQbDg0R8kcLWHVH2y4zViRxZ0jHUFKMgjONW+
+Aj4evic/2Y/LxpLxCajECq/jeMHYrmQONszf9pbc0+exrQpgnwd8asfsM3d/FJS2
+5DIWryCKs/61m9vYL8icWx/9cnfPkBoNv1ER+V1L1TH3ARvABh406SBaeqLTm/kG
+MNuKytKWJsQbNlxzWHVgkKzVsBKvYj0uIEJpClIhbe6XNYRDy8T8mKXVWhJuxH4p
+/agmCG3nxO8aCrUK/EVmbWmVIfCH3t7jlwMX1nJ8MsRE7Ydnk8I=
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/eventlet/server.key b/testing/web-platform/tests/tools/third_party/h2/examples/eventlet/server.key
new file mode 100644
index 0000000000..11f9ea094b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/eventlet/server.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEAu+GXgeCXW+XJwvSl1yybs48E4OOauRHkoQ2EHtE2yoK3olD+
+nJVNwUdHxQFXnTdavyzrkNR4b6w5NZeE3xhdm6QiKivlPQp83EJJ73acslXji7Ur
+Aozj5OMcnR8JEMcCbx1XbSceAsww6hmxzRwhf/YKUJbGtyWVvrSGSytFzBDxUOFx
+V4HoMU0rTu5AQT2lSIBvTqKnNjBuK4zDVKcWtvFhdUsqlkyAn9jfE/wvnz820Cyg
+lqxaylD+1kJTmqjzUC/Wwg4Ukz0U8EDnNvzM1WfYE169GfiTE18JEjCjudmnRNaZ
+irdNIydXnGQYiRco8dF+oR/v+fXS2xv+zaPj8wIDAQABAoIBAQCsdq278+0c13d4
+tViSh4k5r1w8D9IUdp9XU2/nVgckqA9nOVAvbkJc3FC+P7gsQgbUHKj0XoVbhU1S
+q461t8kduPH/oiGhAcKR8WurHEdE0OC6ewhLJAeCMRQwCrAorXXHh7icIt9ClCuG
+iSWUcXEy5Cidx3oL3r1xvIbV85fzdDtE9RC1I/kMjAy63S47YGiqh5vYmJkCa8rG
+Dsd1sEMDPr63XJpqJj3uHRcPvySgXTa+ssTmUH8WJlPTjvDB5hnPz+lkk2JKVPNu
+8adzftZ6hSun+tsc4ZJp8XhGu/m/7MjxWh8MeupLHlXcOEsnj4uHQQsOM3zHojr3
+aDCZiC1pAoGBAOAhwe1ujoS2VJ5RXJ9KMs7eBER/02MDgWZjo54Jv/jFxPWGslKk
+QQceuTe+PruRm41nzvk3q4iZXt8pG0bvpgigN2epcVx/O2ouRsUWWBT0JrVlEzha
+TIvWjtZ5tSQExXgHL3VlM9+ka40l+NldLSPn25+prizaqhalWuvTpP23AoGBANaY
+VhEI6yhp0BBUSATEv9lRgkwx3EbcnXNXPQjDMOthsyfq7FxbdOBEK1rwSDyuE6Ij
+zQGcTOfdiur5Ttg0OQilTJIXJAlpoeecOQ9yGma08c5FMXVJJvcZUuWRZWg1ocQj
+/hx0WVE9NwOoKwTBERv8HX7vJOFRZyvgkJwFxoulAoGAe4m/1XoZrga9z2GzNs10
+AdgX7BW00x+MhH4pIiPnn1yK+nYa9jg4647Asnv3IfXZEnEEgRNxReKbi0+iDFBt
+aNW+lDGuHTi37AfD1EBDnpEQgO1MUcRb6rwBkTAWatsCaO00+HUmyX9cFLm4Vz7n
+caILyQ6CxZBlLgRIgDHxADMCgYEAtubsJGTHmZBmSCStpXLUWbOBLNQqfTM398DZ
+QoirP1PsUQ+IGUfSG/u+QCogR6fPEBkXeFHxsoY/Cvsm2lvYaKgK1VFn46Xm2vNq
+JuIH4pZCqp6LAv4weddZslT0a5eaowRSZ4o7PmTAaRuCXvD3VjTSJwhJFMo+90TV
+vEWn7gkCgYEAkk+unX9kYmKoUdLh22/tzQekBa8WqMxXDwzBCECTAs2GlpL/f73i
+zD15TnaNfLP6Q5RNb0N9tb0Gz1wSkwI1+jGAQLnh2K9X9cIVIqJn8Mf/KQa/wUDV
+Tb1j7FoGUEgX7vbsyWuTd8P76kNYyGqCss1XmbttcSolqpbIdlSUcO0=
+-----END RSA PRIVATE KEY-----
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/fragments/client_https_setup_fragment.py b/testing/web-platform/tests/tools/third_party/h2/examples/fragments/client_https_setup_fragment.py
new file mode 100644
index 0000000000..269194d6c7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/fragments/client_https_setup_fragment.py
@@ -0,0 +1,110 @@
+# -*- coding: utf-8 -*-
+"""
+Client HTTPS Setup
+~~~~~~~~~~~~~~~~~~
+
+This example code fragment demonstrates how to set up a HTTP/2 client that
+negotiates HTTP/2 using NPN and ALPN. For the sake of maximum explanatory value
+this code uses the synchronous, low-level sockets API: however, if you're not
+using sockets directly (e.g. because you're using asyncio), you should focus on
+the set up required for the SSLContext object. For other concurrency libraries
+you may need to use other setup (e.g. for Twisted you'll need to use
+IProtocolNegotiationFactory).
+
+This code requires Python 3.5 or later.
+"""
+import h2.connection
+import socket
+import ssl
+
+
+def establish_tcp_connection():
+ """
+ This function establishes a client-side TCP connection. How it works isn't
+ very important to this example. For the purpose of this example we connect
+ to localhost.
+ """
+ return socket.create_connection(('localhost', 443))
+
+
+def get_http2_ssl_context():
+ """
+ This function creates an SSLContext object that is suitably configured for
+ HTTP/2. If you're working with Python TLS directly, you'll want to do the
+ exact same setup as this function does.
+ """
+ # Get the basic context from the standard library.
+ ctx = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH)
+
+ # RFC 7540 Section 9.2: Implementations of HTTP/2 MUST use TLS version 1.2
+ # or higher. Disable TLS 1.1 and lower.
+ ctx.options |= (
+ ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
+ )
+
+ # RFC 7540 Section 9.2.1: A deployment of HTTP/2 over TLS 1.2 MUST disable
+ # compression.
+ ctx.options |= ssl.OP_NO_COMPRESSION
+
+ # RFC 7540 Section 9.2.2: "deployments of HTTP/2 that use TLS 1.2 MUST
+ # support TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256". In practice, the
+ # blocklist defined in this section allows only the AES GCM and ChaCha20
+ # cipher suites with ephemeral key negotiation.
+ ctx.set_ciphers("ECDHE+AESGCM:ECDHE+CHACHA20:DHE+AESGCM:DHE+CHACHA20")
+
+ # We want to negotiate using NPN and ALPN. ALPN is mandatory, but NPN may
+ # be absent, so allow that. This setup allows for negotiation of HTTP/1.1.
+ ctx.set_alpn_protocols(["h2", "http/1.1"])
+
+ try:
+ ctx.set_npn_protocols(["h2", "http/1.1"])
+ except NotImplementedError:
+ pass
+
+ return ctx
+
+
+def negotiate_tls(tcp_conn, context):
+ """
+ Given an established TCP connection and a HTTP/2-appropriate TLS context,
+ this function:
+
+ 1. wraps TLS around the TCP connection.
+ 2. confirms that HTTP/2 was negotiated and, if it was not, throws an error.
+ """
+ # Note that SNI is mandatory for HTTP/2, so you *must* pass the
+ # server_hostname argument.
+ tls_conn = context.wrap_socket(tcp_conn, server_hostname='localhost')
+
+ # Always prefer the result from ALPN to that from NPN.
+ # You can only check what protocol was negotiated once the handshake is
+ # complete.
+ negotiated_protocol = tls_conn.selected_alpn_protocol()
+ if negotiated_protocol is None:
+ negotiated_protocol = tls_conn.selected_npn_protocol()
+
+ if negotiated_protocol != "h2":
+ raise RuntimeError("Didn't negotiate HTTP/2!")
+
+ return tls_conn
+
+
+def main():
+ # Step 1: Set up your TLS context.
+ context = get_http2_ssl_context()
+
+ # Step 2: Create a TCP connection.
+ connection = establish_tcp_connection()
+
+ # Step 3: Wrap the connection in TLS and validate that we negotiated HTTP/2
+ tls_connection = negotiate_tls(connection, context)
+
+ # Step 4: Create a client-side H2 connection.
+ http2_connection = h2.connection.H2Connection()
+
+ # Step 5: Initiate the connection
+ http2_connection.initiate_connection()
+ tls_connection.sendall(http2_connection.data_to_send())
+
+ # The TCP, TLS, and HTTP/2 handshakes are now complete. You can enter your
+ # main loop now.
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/fragments/client_upgrade_fragment.py b/testing/web-platform/tests/tools/third_party/h2/examples/fragments/client_upgrade_fragment.py
new file mode 100644
index 0000000000..f45c002df7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/fragments/client_upgrade_fragment.py
@@ -0,0 +1,103 @@
+# -*- coding: utf-8 -*-
+"""
+Client Plaintext Upgrade
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+This example code fragment demonstrates how to set up a HTTP/2 client that uses
+the plaintext HTTP Upgrade mechanism to negotiate HTTP/2 connectivity. For
+maximum explanatory value it uses the synchronous socket API that comes with
+the Python standard library. In product code you will want to use an actual
+HTTP/1.1 client if possible.
+
+This code requires Python 3.5 or later.
+"""
+import h2.connection
+import socket
+
+
+def establish_tcp_connection():
+ """
+ This function establishes a client-side TCP connection. How it works isn't
+ very important to this example. For the purpose of this example we connect
+ to localhost.
+ """
+ return socket.create_connection(('localhost', 80))
+
+
+def send_initial_request(connection, settings):
+ """
+ For the sake of this upgrade demonstration, we're going to issue a GET
+ request against the root of the site. In principle the best request to
+ issue for an upgrade is actually ``OPTIONS *``, but this is remarkably
+ poorly supported and can break in weird ways.
+ """
+ # Craft our initial request per RFC 7540 Section 3.2. This requires two
+ # special header fields: the Upgrade headre, and the HTTP2-Settings header.
+ # The value of the HTTP2-Settings header field comes from h2.
+ request = (
+ b"GET / HTTP/1.1\r\n" +
+ b"Host: localhost\r\n" +
+ b"Upgrade: h2c\r\n" +
+ b"HTTP2-Settings: " + settings + b"\r\n" +
+ b"\r\n"
+ )
+ connection.sendall(request)
+
+
+def get_upgrade_response(connection):
+ """
+ This function reads from the socket until the HTTP/1.1 end-of-headers
+ sequence (CRLFCRLF) is received. It then checks what the status code of the
+ response is.
+
+ This is not a substitute for proper HTTP/1.1 parsing, but it's good enough
+ for example purposes.
+ """
+ data = b''
+ while b'\r\n\r\n' not in data:
+ data += connection.recv(8192)
+
+ headers, rest = data.split(b'\r\n\r\n', 1)
+
+ # An upgrade response begins HTTP/1.1 101 Switching Protocols. Look for the
+ # code. In production code you should also check that the upgrade is to
+ # h2c, but here we know we only offered one upgrade so there's only one
+ # possible upgrade in use.
+ split_headers = headers.split()
+ if split_headers[1] != b'101':
+ raise RuntimeError("Not upgrading!")
+
+ # We don't care about the HTTP/1.1 data anymore, but we do care about
+ # any other data we read from the socket: this is going to be HTTP/2 data
+ # that must be passed to the H2Connection.
+ return rest
+
+
+def main():
+ """
+ The client upgrade flow.
+ """
+ # Step 1: Establish the TCP connecton.
+ connection = establish_tcp_connection()
+
+ # Step 2: Create H2 Connection object, put it in upgrade mode, and get the
+ # value of the HTTP2-Settings header we want to use.
+ h2_connection = h2.connection.H2Connection()
+ settings_header_value = h2_connection.initiate_upgrade_connection()
+
+ # Step 3: Send the initial HTTP/1.1 request with the upgrade fields.
+ send_initial_request(connection, settings_header_value)
+
+ # Step 4: Read the HTTP/1.1 response, look for 101 response.
+ extra_data = get_upgrade_response(connection)
+
+ # Step 5: Immediately send the pending HTTP/2 data.
+ connection.sendall(h2_connection.data_to_send())
+
+ # Step 6: Feed the body data to the connection.
+ events = connection.receive_data(extra_data)
+
+ # Now you can enter your main loop, beginning by processing the first set
+ # of events above. These events may include ResponseReceived, which will
+ # contain the response to the request we made in Step 3.
+ main_loop(events)
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/fragments/server_https_setup_fragment.py b/testing/web-platform/tests/tools/third_party/h2/examples/fragments/server_https_setup_fragment.py
new file mode 100644
index 0000000000..9fc361f2c6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/fragments/server_https_setup_fragment.py
@@ -0,0 +1,112 @@
+# -*- coding: utf-8 -*-
+"""
+Server HTTPS Setup
+~~~~~~~~~~~~~~~~~~
+
+This example code fragment demonstrates how to set up a HTTP/2 server that
+negotiates HTTP/2 using NPN and ALPN. For the sake of maximum explanatory value
+this code uses the synchronous, low-level sockets API: however, if you're not
+using sockets directly (e.g. because you're using asyncio), you should focus on
+the set up required for the SSLContext object. For other concurrency libraries
+you may need to use other setup (e.g. for Twisted you'll need to use
+IProtocolNegotiationFactory).
+
+This code requires Python 3.5 or later.
+"""
+import h2.config
+import h2.connection
+import socket
+import ssl
+
+
+def establish_tcp_connection():
+ """
+ This function establishes a server-side TCP connection. How it works isn't
+ very important to this example.
+ """
+ bind_socket = socket.socket()
+ bind_socket.bind(('', 443))
+ bind_socket.listen(5)
+ return bind_socket.accept()[0]
+
+
+def get_http2_ssl_context():
+ """
+ This function creates an SSLContext object that is suitably configured for
+ HTTP/2. If you're working with Python TLS directly, you'll want to do the
+ exact same setup as this function does.
+ """
+ # Get the basic context from the standard library.
+ ctx = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH)
+
+ # RFC 7540 Section 9.2: Implementations of HTTP/2 MUST use TLS version 1.2
+ # or higher. Disable TLS 1.1 and lower.
+ ctx.options |= (
+ ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
+ )
+
+ # RFC 7540 Section 9.2.1: A deployment of HTTP/2 over TLS 1.2 MUST disable
+ # compression.
+ ctx.options |= ssl.OP_NO_COMPRESSION
+
+ # RFC 7540 Section 9.2.2: "deployments of HTTP/2 that use TLS 1.2 MUST
+ # support TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256". In practice, the
+ # blocklist defined in this section allows only the AES GCM and ChaCha20
+ # cipher suites with ephemeral key negotiation.
+ ctx.set_ciphers("ECDHE+AESGCM:ECDHE+CHACHA20:DHE+AESGCM:DHE+CHACHA20")
+
+ # We want to negotiate using NPN and ALPN. ALPN is mandatory, but NPN may
+ # be absent, so allow that. This setup allows for negotiation of HTTP/1.1.
+ ctx.set_alpn_protocols(["h2", "http/1.1"])
+
+ try:
+ ctx.set_npn_protocols(["h2", "http/1.1"])
+ except NotImplementedError:
+ pass
+
+ return ctx
+
+
+def negotiate_tls(tcp_conn, context):
+ """
+ Given an established TCP connection and a HTTP/2-appropriate TLS context,
+ this function:
+
+ 1. wraps TLS around the TCP connection.
+ 2. confirms that HTTP/2 was negotiated and, if it was not, throws an error.
+ """
+ tls_conn = context.wrap_socket(tcp_conn, server_side=True)
+
+ # Always prefer the result from ALPN to that from NPN.
+ # You can only check what protocol was negotiated once the handshake is
+ # complete.
+ negotiated_protocol = tls_conn.selected_alpn_protocol()
+ if negotiated_protocol is None:
+ negotiated_protocol = tls_conn.selected_npn_protocol()
+
+ if negotiated_protocol != "h2":
+ raise RuntimeError("Didn't negotiate HTTP/2!")
+
+ return tls_conn
+
+
+def main():
+ # Step 1: Set up your TLS context.
+ context = get_http2_ssl_context()
+
+ # Step 2: Receive a TCP connection.
+ connection = establish_tcp_connection()
+
+ # Step 3: Wrap the connection in TLS and validate that we negotiated HTTP/2
+ tls_connection = negotiate_tls(connection, context)
+
+ # Step 4: Create a server-side H2 connection.
+ config = h2.config.H2Configuration(client_side=False)
+ http2_connection = h2.connection.H2Connection(config=config)
+
+ # Step 5: Initiate the connection
+ http2_connection.initiate_connection()
+ tls_connection.sendall(http2_connection.data_to_send())
+
+ # The TCP, TLS, and HTTP/2 handshakes are now complete. You can enter your
+ # main loop now.
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/fragments/server_upgrade_fragment.py b/testing/web-platform/tests/tools/third_party/h2/examples/fragments/server_upgrade_fragment.py
new file mode 100644
index 0000000000..7e8b1f0eea
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/fragments/server_upgrade_fragment.py
@@ -0,0 +1,100 @@
+# -*- coding: utf-8 -*-
+"""
+Server Plaintext Upgrade
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+This example code fragment demonstrates how to set up a HTTP/2 server that uses
+the plaintext HTTP Upgrade mechanism to negotiate HTTP/2 connectivity. For
+maximum explanatory value it uses the synchronous socket API that comes with
+the Python standard library. In product code you will want to use an actual
+HTTP/1.1 server library if possible.
+
+This code requires Python 3.5 or later.
+"""
+import h2.config
+import h2.connection
+import re
+import socket
+
+
+def establish_tcp_connection():
+ """
+ This function establishes a server-side TCP connection. How it works isn't
+ very important to this example.
+ """
+ bind_socket = socket.socket()
+ bind_socket.bind(('', 443))
+ bind_socket.listen(5)
+ return bind_socket.accept()[0]
+
+
+def receive_initial_request(connection):
+ """
+ We're going to receive a request. For the sake of this example, we're going
+ to assume that the first request has no body. If it doesn't have the
+ Upgrade: h2c header field and the HTTP2-Settings header field, we'll throw
+ errors.
+
+ In production code, you should use a proper HTTP/1.1 parser and actually
+ serve HTTP/1.1 requests!
+
+ Returns the value of the HTTP2-Settings header field.
+ """
+ data = b''
+ while not data.endswith(b'\r\n\r\n'):
+ data += connection.recv(8192)
+
+ match = re.search(b'Upgrade: h2c\r\n', data)
+ if match is None:
+ raise RuntimeError("HTTP/2 upgrade not requested!")
+
+ # We need to look for the HTTP2-Settings header field. Again, in production
+ # code you shouldn't use regular expressions for this, but it's good enough
+ # for the example.
+ match = re.search(b'HTTP2-Settings: (\\S+)\r\n', data)
+ if match is None:
+ raise RuntimeError("HTTP2-Settings header field not present!")
+
+ return match.group(1)
+
+
+def send_upgrade_response(connection):
+ """
+ This function writes the 101 Switching Protocols response.
+ """
+ response = (
+ b"HTTP/1.1 101 Switching Protocols\r\n"
+ b"Upgrade: h2c\r\n"
+ b"\r\n"
+ )
+ connection.sendall(response)
+
+
+def main():
+ """
+ The server upgrade flow.
+ """
+ # Step 1: Establish the TCP connecton.
+ connection = establish_tcp_connection()
+
+ # Step 2: Read the response. We expect this to request an upgrade.
+ settings_header_value = receive_initial_request(connection)
+
+ # Step 3: Create a H2Connection object in server mode, and pass it the
+ # value of the HTTP2-Settings header field.
+ config = h2.config.H2Configuration(client_side=False)
+ h2_connection = h2.connection.H2Connection(config=config)
+ h2_connection.initiate_upgrade_connection(
+ settings_header=settings_header_value
+ )
+
+ # Step 4: Send the 101 Switching Protocols response.
+ send_upgrade_response(connection)
+
+ # Step 5: Send pending HTTP/2 data.
+ connection.sendall(h2_connection.data_to_send())
+
+ # At this point, you can enter your main loop. The first step has to be to
+ # send the response to the initial HTTP/1.1 request you received on stream
+ # 1.
+ main_loop()
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/tornado/server.crt b/testing/web-platform/tests/tools/third_party/h2/examples/tornado/server.crt
new file mode 100644
index 0000000000..bc8a4c08d2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/tornado/server.crt
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDUjCCAjoCCQCQmNzzpQTCijANBgkqhkiG9w0BAQUFADBrMQswCQYDVQQGEwJH
+QjEPMA0GA1UECBMGTG9uZG9uMQ8wDQYDVQQHEwZMb25kb24xETAPBgNVBAoTCGh5
+cGVyLWgyMREwDwYDVQQLEwhoeXBleS1oMjEUMBIGA1UEAxMLZXhhbXBsZS5jb20w
+HhcNMTUwOTE2MjAyOTA0WhcNMTYwOTE1MjAyOTA0WjBrMQswCQYDVQQGEwJHQjEP
+MA0GA1UECBMGTG9uZG9uMQ8wDQYDVQQHEwZMb25kb24xETAPBgNVBAoTCGh5cGVy
+LWgyMREwDwYDVQQLEwhoeXBleS1oMjEUMBIGA1UEAxMLZXhhbXBsZS5jb20wggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC74ZeB4Jdb5cnC9KXXLJuzjwTg
+45q5EeShDYQe0TbKgreiUP6clU3BR0fFAVedN1q/LOuQ1HhvrDk1l4TfGF2bpCIq
+K+U9CnzcQknvdpyyVeOLtSsCjOPk4xydHwkQxwJvHVdtJx4CzDDqGbHNHCF/9gpQ
+lsa3JZW+tIZLK0XMEPFQ4XFXgegxTStO7kBBPaVIgG9Ooqc2MG4rjMNUpxa28WF1
+SyqWTICf2N8T/C+fPzbQLKCWrFrKUP7WQlOaqPNQL9bCDhSTPRTwQOc2/MzVZ9gT
+Xr0Z+JMTXwkSMKO52adE1pmKt00jJ1ecZBiJFyjx0X6hH+/59dLbG/7No+PzAgMB
+AAEwDQYJKoZIhvcNAQEFBQADggEBAG3UhOCa0EemL2iY+C+PR6CwEHQ+n7vkBzNz
+gKOG+Q39spyzqU1qJAzBxLTE81bIQbDg0R8kcLWHVH2y4zViRxZ0jHUFKMgjONW+
+Aj4evic/2Y/LxpLxCajECq/jeMHYrmQONszf9pbc0+exrQpgnwd8asfsM3d/FJS2
+5DIWryCKs/61m9vYL8icWx/9cnfPkBoNv1ER+V1L1TH3ARvABh406SBaeqLTm/kG
+MNuKytKWJsQbNlxzWHVgkKzVsBKvYj0uIEJpClIhbe6XNYRDy8T8mKXVWhJuxH4p
+/agmCG3nxO8aCrUK/EVmbWmVIfCH3t7jlwMX1nJ8MsRE7Ydnk8I=
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/tornado/server.key b/testing/web-platform/tests/tools/third_party/h2/examples/tornado/server.key
new file mode 100644
index 0000000000..11f9ea094b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/tornado/server.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEAu+GXgeCXW+XJwvSl1yybs48E4OOauRHkoQ2EHtE2yoK3olD+
+nJVNwUdHxQFXnTdavyzrkNR4b6w5NZeE3xhdm6QiKivlPQp83EJJ73acslXji7Ur
+Aozj5OMcnR8JEMcCbx1XbSceAsww6hmxzRwhf/YKUJbGtyWVvrSGSytFzBDxUOFx
+V4HoMU0rTu5AQT2lSIBvTqKnNjBuK4zDVKcWtvFhdUsqlkyAn9jfE/wvnz820Cyg
+lqxaylD+1kJTmqjzUC/Wwg4Ukz0U8EDnNvzM1WfYE169GfiTE18JEjCjudmnRNaZ
+irdNIydXnGQYiRco8dF+oR/v+fXS2xv+zaPj8wIDAQABAoIBAQCsdq278+0c13d4
+tViSh4k5r1w8D9IUdp9XU2/nVgckqA9nOVAvbkJc3FC+P7gsQgbUHKj0XoVbhU1S
+q461t8kduPH/oiGhAcKR8WurHEdE0OC6ewhLJAeCMRQwCrAorXXHh7icIt9ClCuG
+iSWUcXEy5Cidx3oL3r1xvIbV85fzdDtE9RC1I/kMjAy63S47YGiqh5vYmJkCa8rG
+Dsd1sEMDPr63XJpqJj3uHRcPvySgXTa+ssTmUH8WJlPTjvDB5hnPz+lkk2JKVPNu
+8adzftZ6hSun+tsc4ZJp8XhGu/m/7MjxWh8MeupLHlXcOEsnj4uHQQsOM3zHojr3
+aDCZiC1pAoGBAOAhwe1ujoS2VJ5RXJ9KMs7eBER/02MDgWZjo54Jv/jFxPWGslKk
+QQceuTe+PruRm41nzvk3q4iZXt8pG0bvpgigN2epcVx/O2ouRsUWWBT0JrVlEzha
+TIvWjtZ5tSQExXgHL3VlM9+ka40l+NldLSPn25+prizaqhalWuvTpP23AoGBANaY
+VhEI6yhp0BBUSATEv9lRgkwx3EbcnXNXPQjDMOthsyfq7FxbdOBEK1rwSDyuE6Ij
+zQGcTOfdiur5Ttg0OQilTJIXJAlpoeecOQ9yGma08c5FMXVJJvcZUuWRZWg1ocQj
+/hx0WVE9NwOoKwTBERv8HX7vJOFRZyvgkJwFxoulAoGAe4m/1XoZrga9z2GzNs10
+AdgX7BW00x+MhH4pIiPnn1yK+nYa9jg4647Asnv3IfXZEnEEgRNxReKbi0+iDFBt
+aNW+lDGuHTi37AfD1EBDnpEQgO1MUcRb6rwBkTAWatsCaO00+HUmyX9cFLm4Vz7n
+caILyQ6CxZBlLgRIgDHxADMCgYEAtubsJGTHmZBmSCStpXLUWbOBLNQqfTM398DZ
+QoirP1PsUQ+IGUfSG/u+QCogR6fPEBkXeFHxsoY/Cvsm2lvYaKgK1VFn46Xm2vNq
+JuIH4pZCqp6LAv4weddZslT0a5eaowRSZ4o7PmTAaRuCXvD3VjTSJwhJFMo+90TV
+vEWn7gkCgYEAkk+unX9kYmKoUdLh22/tzQekBa8WqMxXDwzBCECTAs2GlpL/f73i
+zD15TnaNfLP6Q5RNb0N9tb0Gz1wSkwI1+jGAQLnh2K9X9cIVIqJn8Mf/KQa/wUDV
+Tb1j7FoGUEgX7vbsyWuTd8P76kNYyGqCss1XmbttcSolqpbIdlSUcO0=
+-----END RSA PRIVATE KEY-----
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/tornado/tornado-server.py b/testing/web-platform/tests/tools/third_party/h2/examples/tornado/tornado-server.py
new file mode 100755
index 0000000000..e7d08ab191
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/tornado/tornado-server.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+tornado-server.py
+~~~~~~~~~~~~~~~~~
+
+A fully-functional HTTP/2 server written for Tornado.
+"""
+import collections
+import json
+import ssl
+
+import tornado.gen
+import tornado.ioloop
+import tornado.iostream
+import tornado.tcpserver
+
+from h2.config import H2Configuration
+from h2.connection import H2Connection
+from h2.events import RequestReceived, DataReceived
+
+
+def create_ssl_context(certfile, keyfile):
+ ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
+ ssl_context.options |= (
+ ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1 | ssl.OP_NO_COMPRESSION
+ )
+ ssl_context.set_ciphers("ECDHE+AESGCM")
+ ssl_context.load_cert_chain(certfile=certfile, keyfile=keyfile)
+ ssl_context.set_alpn_protocols(["h2"])
+ return ssl_context
+
+
+class H2Server(tornado.tcpserver.TCPServer):
+
+ @tornado.gen.coroutine
+ def handle_stream(self, stream, address):
+ handler = EchoHeadersHandler(stream)
+ yield handler.handle()
+
+
+class EchoHeadersHandler(object):
+
+ def __init__(self, stream):
+ self.stream = stream
+
+ config = H2Configuration(client_side=False)
+ self.conn = H2Connection(config=config)
+
+ @tornado.gen.coroutine
+ def handle(self):
+ self.conn.initiate_connection()
+ yield self.stream.write(self.conn.data_to_send())
+
+ while True:
+ try:
+ data = yield self.stream.read_bytes(65535, partial=True)
+ if not data:
+ break
+
+ events = self.conn.receive_data(data)
+ for event in events:
+ if isinstance(event, RequestReceived):
+ self.request_received(event.headers, event.stream_id)
+ elif isinstance(event, DataReceived):
+ self.conn.reset_stream(event.stream_id)
+
+ yield self.stream.write(self.conn.data_to_send())
+
+ except tornado.iostream.StreamClosedError:
+ break
+
+ def request_received(self, headers, stream_id):
+ headers = collections.OrderedDict(headers)
+ data = json.dumps({'headers': headers}, indent=4).encode('utf-8')
+
+ response_headers = (
+ (':status', '200'),
+ ('content-type', 'application/json'),
+ ('content-length', str(len(data))),
+ ('server', 'tornado-h2'),
+ )
+ self.conn.send_headers(stream_id, response_headers)
+ self.conn.send_data(stream_id, data, end_stream=True)
+
+
+if __name__ == '__main__':
+ ssl_context = create_ssl_context('server.crt', 'server.key')
+ server = H2Server(ssl_options=ssl_context)
+ server.listen(8888)
+ io_loop = tornado.ioloop.IOLoop.current()
+ io_loop.start()
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/twisted/head_request.py b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/head_request.py
new file mode 100644
index 0000000000..4a7538024a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/head_request.py
@@ -0,0 +1,111 @@
+# -*- coding: utf-8 -*-
+"""
+head_request.py
+~~~~~~~~~~~~~~~
+
+A short example that demonstrates a client that makes HEAD requests to certain
+websites.
+
+This example is intended as a reproduction of nghttp2 issue 396, for the
+purposes of compatibility testing.
+"""
+from __future__ import print_function
+
+from twisted.internet import reactor
+from twisted.internet.endpoints import connectProtocol, SSL4ClientEndpoint
+from twisted.internet.protocol import Protocol
+from twisted.internet.ssl import optionsForClientTLS
+from hyperframe.frame import SettingsFrame
+from h2.connection import H2Connection
+from h2.events import (
+ ResponseReceived, DataReceived, StreamEnded,
+ StreamReset, SettingsAcknowledged,
+)
+
+
+AUTHORITY = u'nghttp2.org'
+PATH = '/httpbin/'
+SIZE = 4096
+
+
+class H2Protocol(Protocol):
+ def __init__(self):
+ self.conn = H2Connection()
+ self.known_proto = None
+ self.request_made = False
+
+ def connectionMade(self):
+ self.conn.initiate_connection()
+
+ # This reproduces the error in #396, by changing the header table size.
+ self.conn.update_settings({SettingsFrame.HEADER_TABLE_SIZE: SIZE})
+
+ self.transport.write(self.conn.data_to_send())
+
+ def dataReceived(self, data):
+ if not self.known_proto:
+ self.known_proto = self.transport.negotiatedProtocol
+ assert self.known_proto == b'h2'
+
+ events = self.conn.receive_data(data)
+
+ for event in events:
+ if isinstance(event, ResponseReceived):
+ self.handleResponse(event.headers, event.stream_id)
+ elif isinstance(event, DataReceived):
+ self.handleData(event.data, event.stream_id)
+ elif isinstance(event, StreamEnded):
+ self.endStream(event.stream_id)
+ elif isinstance(event, SettingsAcknowledged):
+ self.settingsAcked(event)
+ elif isinstance(event, StreamReset):
+ reactor.stop()
+ raise RuntimeError("Stream reset: %d" % event.error_code)
+ else:
+ print(event)
+
+ data = self.conn.data_to_send()
+ if data:
+ self.transport.write(data)
+
+ def settingsAcked(self, event):
+ # Having received the remote settings change, lets send our request.
+ if not self.request_made:
+ self.sendRequest()
+
+ def handleResponse(self, response_headers, stream_id):
+ for name, value in response_headers:
+ print("%s: %s" % (name.decode('utf-8'), value.decode('utf-8')))
+
+ print("")
+
+ def handleData(self, data, stream_id):
+ print(data, end='')
+
+ def endStream(self, stream_id):
+ self.conn.close_connection()
+ self.transport.write(self.conn.data_to_send())
+ self.transport.loseConnection()
+ reactor.stop()
+
+ def sendRequest(self):
+ request_headers = [
+ (':method', 'HEAD'),
+ (':authority', AUTHORITY),
+ (':scheme', 'https'),
+ (':path', PATH),
+ ('user-agent', 'hyper-h2/1.0.0'),
+ ]
+ self.conn.send_headers(1, request_headers, end_stream=True)
+ self.request_made = True
+
+options = optionsForClientTLS(
+ hostname=AUTHORITY,
+ acceptableProtocols=[b'h2'],
+)
+
+connectProtocol(
+ SSL4ClientEndpoint(reactor, AUTHORITY, 443, options),
+ H2Protocol()
+)
+reactor.run()
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/twisted/post_request.py b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/post_request.py
new file mode 100644
index 0000000000..c817bac465
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/post_request.py
@@ -0,0 +1,249 @@
+# -*- coding: utf-8 -*-
+"""
+post_request.py
+~~~~~~~~~~~~~~~
+
+A short example that demonstrates a client that makes POST requests to certain
+websites.
+
+This example is intended to demonstrate how to handle uploading request bodies.
+In this instance, a file will be uploaded. In order to handle arbitrary files,
+this example also demonstrates how to obey HTTP/2 flow control rules.
+
+Takes one command-line argument: a path to a file in the filesystem to upload.
+If none is present, uploads this file.
+"""
+from __future__ import print_function
+
+import mimetypes
+import os
+import sys
+
+from twisted.internet import reactor, defer
+from twisted.internet.endpoints import connectProtocol, SSL4ClientEndpoint
+from twisted.internet.protocol import Protocol
+from twisted.internet.ssl import optionsForClientTLS
+from h2.connection import H2Connection
+from h2.events import (
+ ResponseReceived, DataReceived, StreamEnded, StreamReset, WindowUpdated,
+ SettingsAcknowledged,
+)
+
+
+AUTHORITY = u'nghttp2.org'
+PATH = '/httpbin/post'
+
+
+class H2Protocol(Protocol):
+ def __init__(self, file_path):
+ self.conn = H2Connection()
+ self.known_proto = None
+ self.request_made = False
+ self.request_complete = False
+ self.file_path = file_path
+ self.flow_control_deferred = None
+ self.fileobj = None
+ self.file_size = None
+
+ def connectionMade(self):
+ """
+ Called by Twisted when the TCP connection is established. We can start
+ sending some data now: we should open with the connection preamble.
+ """
+ self.conn.initiate_connection()
+ self.transport.write(self.conn.data_to_send())
+
+ def dataReceived(self, data):
+ """
+ Called by Twisted when data is received on the connection.
+
+ We need to check a few things here. Firstly, we want to validate that
+ we actually negotiated HTTP/2: if we didn't, we shouldn't proceed!
+
+ Then, we want to pass the data to the protocol stack and check what
+ events occurred.
+ """
+ if not self.known_proto:
+ self.known_proto = self.transport.negotiatedProtocol
+ assert self.known_proto == b'h2'
+
+ events = self.conn.receive_data(data)
+
+ for event in events:
+ if isinstance(event, ResponseReceived):
+ self.handleResponse(event.headers)
+ elif isinstance(event, DataReceived):
+ self.handleData(event.data)
+ elif isinstance(event, StreamEnded):
+ self.endStream()
+ elif isinstance(event, SettingsAcknowledged):
+ self.settingsAcked(event)
+ elif isinstance(event, StreamReset):
+ reactor.stop()
+ raise RuntimeError("Stream reset: %d" % event.error_code)
+ elif isinstance(event, WindowUpdated):
+ self.windowUpdated(event)
+
+ data = self.conn.data_to_send()
+ if data:
+ self.transport.write(data)
+
+ def settingsAcked(self, event):
+ """
+ Called when the remote party ACKs our settings. We send a SETTINGS
+ frame as part of the preamble, so if we want to be very polite we can
+ wait until the ACK for that frame comes before we start sending our
+ request.
+ """
+ if not self.request_made:
+ self.sendRequest()
+
+ def handleResponse(self, response_headers):
+ """
+ Handle the response by printing the response headers.
+ """
+ for name, value in response_headers:
+ print("%s: %s" % (name.decode('utf-8'), value.decode('utf-8')))
+
+ print("")
+
+ def handleData(self, data):
+ """
+ We handle data that's received by just printing it.
+ """
+ print(data, end='')
+
+ def endStream(self):
+ """
+ We call this when the stream is cleanly ended by the remote peer. That
+ means that the response is complete.
+
+ Because this code only makes a single HTTP/2 request, once we receive
+ the complete response we can safely tear the connection down and stop
+ the reactor. We do that as cleanly as possible.
+ """
+ self.request_complete = True
+ self.conn.close_connection()
+ self.transport.write(self.conn.data_to_send())
+ self.transport.loseConnection()
+
+ def windowUpdated(self, event):
+ """
+ We call this when the flow control window for the connection or the
+ stream has been widened. If there's a flow control deferred present
+ (that is, if we're blocked behind the flow control), we fire it.
+ Otherwise, we do nothing.
+ """
+ if self.flow_control_deferred is None:
+ return
+
+ # Make sure we remove the flow control deferred to avoid firing it
+ # more than once.
+ flow_control_deferred = self.flow_control_deferred
+ self.flow_control_deferred = None
+ flow_control_deferred.callback(None)
+
+ def connectionLost(self, reason=None):
+ """
+ Called by Twisted when the connection is gone. Regardless of whether
+ it was clean or not, we want to stop the reactor.
+ """
+ if self.fileobj is not None:
+ self.fileobj.close()
+
+ if reactor.running:
+ reactor.stop()
+
+ def sendRequest(self):
+ """
+ Send the POST request.
+
+ A POST request is made up of one headers frame, and then 0+ data
+ frames. This method begins by sending the headers, and then starts a
+ series of calls to send data.
+ """
+ # First, we need to work out how large the file is.
+ self.file_size = os.stat(self.file_path).st_size
+
+ # Next, we want to guess a content-type and content-encoding.
+ content_type, content_encoding = mimetypes.guess_type(self.file_path)
+
+ # Now we can build a header block.
+ request_headers = [
+ (':method', 'POST'),
+ (':authority', AUTHORITY),
+ (':scheme', 'https'),
+ (':path', PATH),
+ ('user-agent', 'hyper-h2/1.0.0'),
+ ('content-length', str(self.file_size)),
+ ]
+
+ if content_type is not None:
+ request_headers.append(('content-type', content_type))
+
+ if content_encoding is not None:
+ request_headers.append(('content-encoding', content_encoding))
+
+ self.conn.send_headers(1, request_headers)
+ self.request_made = True
+
+ # We can now open the file.
+ self.fileobj = open(self.file_path, 'rb')
+
+ # We now need to send all the relevant data. We do this by checking
+ # what the acceptable amount of data is to send, and sending it. If we
+ # find ourselves blocked behind flow control, we then place a deferred
+ # and wait until that deferred fires.
+ self.sendFileData()
+
+ def sendFileData(self):
+ """
+ Send some file data on the connection.
+ """
+ # Firstly, check what the flow control window is for stream 1.
+ window_size = self.conn.local_flow_control_window(stream_id=1)
+
+ # Next, check what the maximum frame size is.
+ max_frame_size = self.conn.max_outbound_frame_size
+
+ # We will send no more than the window size or the remaining file size
+ # of data in this call, whichever is smaller.
+ bytes_to_send = min(window_size, self.file_size)
+
+ # We now need to send a number of data frames.
+ while bytes_to_send > 0:
+ chunk_size = min(bytes_to_send, max_frame_size)
+ data_chunk = self.fileobj.read(chunk_size)
+ self.conn.send_data(stream_id=1, data=data_chunk)
+
+ bytes_to_send -= chunk_size
+ self.file_size -= chunk_size
+
+ # We've prepared a whole chunk of data to send. If the file is fully
+ # sent, we also want to end the stream: we're done here.
+ if self.file_size == 0:
+ self.conn.end_stream(stream_id=1)
+ else:
+ # We've still got data left to send but the window is closed. Save
+ # a Deferred that will call us when the window gets opened.
+ self.flow_control_deferred = defer.Deferred()
+ self.flow_control_deferred.addCallback(self.sendFileData)
+
+ self.transport.write(self.conn.data_to_send())
+
+
+try:
+ filename = sys.argv[1]
+except IndexError:
+ filename = __file__
+
+options = optionsForClientTLS(
+ hostname=AUTHORITY,
+ acceptableProtocols=[b'h2'],
+)
+
+connectProtocol(
+ SSL4ClientEndpoint(reactor, AUTHORITY, 443, options),
+ H2Protocol(filename)
+)
+reactor.run()
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.crt b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.crt
new file mode 100644
index 0000000000..bc8a4c08d2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.crt
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDUjCCAjoCCQCQmNzzpQTCijANBgkqhkiG9w0BAQUFADBrMQswCQYDVQQGEwJH
+QjEPMA0GA1UECBMGTG9uZG9uMQ8wDQYDVQQHEwZMb25kb24xETAPBgNVBAoTCGh5
+cGVyLWgyMREwDwYDVQQLEwhoeXBleS1oMjEUMBIGA1UEAxMLZXhhbXBsZS5jb20w
+HhcNMTUwOTE2MjAyOTA0WhcNMTYwOTE1MjAyOTA0WjBrMQswCQYDVQQGEwJHQjEP
+MA0GA1UECBMGTG9uZG9uMQ8wDQYDVQQHEwZMb25kb24xETAPBgNVBAoTCGh5cGVy
+LWgyMREwDwYDVQQLEwhoeXBleS1oMjEUMBIGA1UEAxMLZXhhbXBsZS5jb20wggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC74ZeB4Jdb5cnC9KXXLJuzjwTg
+45q5EeShDYQe0TbKgreiUP6clU3BR0fFAVedN1q/LOuQ1HhvrDk1l4TfGF2bpCIq
+K+U9CnzcQknvdpyyVeOLtSsCjOPk4xydHwkQxwJvHVdtJx4CzDDqGbHNHCF/9gpQ
+lsa3JZW+tIZLK0XMEPFQ4XFXgegxTStO7kBBPaVIgG9Ooqc2MG4rjMNUpxa28WF1
+SyqWTICf2N8T/C+fPzbQLKCWrFrKUP7WQlOaqPNQL9bCDhSTPRTwQOc2/MzVZ9gT
+Xr0Z+JMTXwkSMKO52adE1pmKt00jJ1ecZBiJFyjx0X6hH+/59dLbG/7No+PzAgMB
+AAEwDQYJKoZIhvcNAQEFBQADggEBAG3UhOCa0EemL2iY+C+PR6CwEHQ+n7vkBzNz
+gKOG+Q39spyzqU1qJAzBxLTE81bIQbDg0R8kcLWHVH2y4zViRxZ0jHUFKMgjONW+
+Aj4evic/2Y/LxpLxCajECq/jeMHYrmQONszf9pbc0+exrQpgnwd8asfsM3d/FJS2
+5DIWryCKs/61m9vYL8icWx/9cnfPkBoNv1ER+V1L1TH3ARvABh406SBaeqLTm/kG
+MNuKytKWJsQbNlxzWHVgkKzVsBKvYj0uIEJpClIhbe6XNYRDy8T8mKXVWhJuxH4p
+/agmCG3nxO8aCrUK/EVmbWmVIfCH3t7jlwMX1nJ8MsRE7Ydnk8I=
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.csr b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.csr
new file mode 100644
index 0000000000..cadb53a512
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.csr
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE REQUEST-----
+MIICsDCCAZgCAQAwazELMAkGA1UEBhMCR0IxDzANBgNVBAgTBkxvbmRvbjEPMA0G
+A1UEBxMGTG9uZG9uMREwDwYDVQQKEwhoeXBlci1oMjERMA8GA1UECxMIaHlwZXkt
+aDIxFDASBgNVBAMTC2V4YW1wbGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAu+GXgeCXW+XJwvSl1yybs48E4OOauRHkoQ2EHtE2yoK3olD+nJVN
+wUdHxQFXnTdavyzrkNR4b6w5NZeE3xhdm6QiKivlPQp83EJJ73acslXji7UrAozj
+5OMcnR8JEMcCbx1XbSceAsww6hmxzRwhf/YKUJbGtyWVvrSGSytFzBDxUOFxV4Ho
+MU0rTu5AQT2lSIBvTqKnNjBuK4zDVKcWtvFhdUsqlkyAn9jfE/wvnz820Cyglqxa
+ylD+1kJTmqjzUC/Wwg4Ukz0U8EDnNvzM1WfYE169GfiTE18JEjCjudmnRNaZirdN
+IydXnGQYiRco8dF+oR/v+fXS2xv+zaPj8wIDAQABoAAwDQYJKoZIhvcNAQEFBQAD
+ggEBACZpSoZWxHU5uagpM2Vinh2E7CXiMAlBc6NXhQMD/3fycr9sX4d/+y9Gy3bL
+OfEOHBPlQVGrt05aiTh7m5s3HQfsH8l3RfKpfzCfoqd2ESVwgB092bJwY9fBnkw/
+UzIHvSnlaKc78h+POUoATOb4faQ8P04wzJHzckbCDI8zRzBZTMVGuiWUopq7K5Ce
+VSesbqHHnW9ob/apigKNE0k7et/28NOXNEP90tTsz98yN3TP+Nv9puwvT9JZOOoG
+0PZIQKJIaZ1NZoNQHLN9gXz012XWa99cBE0qNiBUugXlNhXjkIIM8FIhDQOREB18
+0KDxEma+A0quyjnDMwPSoZsMca4=
+-----END CERTIFICATE REQUEST-----
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.key b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.key
new file mode 100644
index 0000000000..11f9ea094b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/server.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEAu+GXgeCXW+XJwvSl1yybs48E4OOauRHkoQ2EHtE2yoK3olD+
+nJVNwUdHxQFXnTdavyzrkNR4b6w5NZeE3xhdm6QiKivlPQp83EJJ73acslXji7Ur
+Aozj5OMcnR8JEMcCbx1XbSceAsww6hmxzRwhf/YKUJbGtyWVvrSGSytFzBDxUOFx
+V4HoMU0rTu5AQT2lSIBvTqKnNjBuK4zDVKcWtvFhdUsqlkyAn9jfE/wvnz820Cyg
+lqxaylD+1kJTmqjzUC/Wwg4Ukz0U8EDnNvzM1WfYE169GfiTE18JEjCjudmnRNaZ
+irdNIydXnGQYiRco8dF+oR/v+fXS2xv+zaPj8wIDAQABAoIBAQCsdq278+0c13d4
+tViSh4k5r1w8D9IUdp9XU2/nVgckqA9nOVAvbkJc3FC+P7gsQgbUHKj0XoVbhU1S
+q461t8kduPH/oiGhAcKR8WurHEdE0OC6ewhLJAeCMRQwCrAorXXHh7icIt9ClCuG
+iSWUcXEy5Cidx3oL3r1xvIbV85fzdDtE9RC1I/kMjAy63S47YGiqh5vYmJkCa8rG
+Dsd1sEMDPr63XJpqJj3uHRcPvySgXTa+ssTmUH8WJlPTjvDB5hnPz+lkk2JKVPNu
+8adzftZ6hSun+tsc4ZJp8XhGu/m/7MjxWh8MeupLHlXcOEsnj4uHQQsOM3zHojr3
+aDCZiC1pAoGBAOAhwe1ujoS2VJ5RXJ9KMs7eBER/02MDgWZjo54Jv/jFxPWGslKk
+QQceuTe+PruRm41nzvk3q4iZXt8pG0bvpgigN2epcVx/O2ouRsUWWBT0JrVlEzha
+TIvWjtZ5tSQExXgHL3VlM9+ka40l+NldLSPn25+prizaqhalWuvTpP23AoGBANaY
+VhEI6yhp0BBUSATEv9lRgkwx3EbcnXNXPQjDMOthsyfq7FxbdOBEK1rwSDyuE6Ij
+zQGcTOfdiur5Ttg0OQilTJIXJAlpoeecOQ9yGma08c5FMXVJJvcZUuWRZWg1ocQj
+/hx0WVE9NwOoKwTBERv8HX7vJOFRZyvgkJwFxoulAoGAe4m/1XoZrga9z2GzNs10
+AdgX7BW00x+MhH4pIiPnn1yK+nYa9jg4647Asnv3IfXZEnEEgRNxReKbi0+iDFBt
+aNW+lDGuHTi37AfD1EBDnpEQgO1MUcRb6rwBkTAWatsCaO00+HUmyX9cFLm4Vz7n
+caILyQ6CxZBlLgRIgDHxADMCgYEAtubsJGTHmZBmSCStpXLUWbOBLNQqfTM398DZ
+QoirP1PsUQ+IGUfSG/u+QCogR6fPEBkXeFHxsoY/Cvsm2lvYaKgK1VFn46Xm2vNq
+JuIH4pZCqp6LAv4weddZslT0a5eaowRSZ4o7PmTAaRuCXvD3VjTSJwhJFMo+90TV
+vEWn7gkCgYEAkk+unX9kYmKoUdLh22/tzQekBa8WqMxXDwzBCECTAs2GlpL/f73i
+zD15TnaNfLP6Q5RNb0N9tb0Gz1wSkwI1+jGAQLnh2K9X9cIVIqJn8Mf/KQa/wUDV
+Tb1j7FoGUEgX7vbsyWuTd8P76kNYyGqCss1XmbttcSolqpbIdlSUcO0=
+-----END RSA PRIVATE KEY-----
diff --git a/testing/web-platform/tests/tools/third_party/h2/examples/twisted/twisted-server.py b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/twisted-server.py
new file mode 100644
index 0000000000..75a271d9b7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/examples/twisted/twisted-server.py
@@ -0,0 +1,192 @@
+# -*- coding: utf-8 -*-
+"""
+twisted-server.py
+~~~~~~~~~~~~~~~~~
+
+A fully-functional HTTP/2 server written for Twisted.
+"""
+import functools
+import mimetypes
+import os
+import os.path
+import sys
+
+from OpenSSL import crypto
+from twisted.internet.defer import Deferred, inlineCallbacks
+from twisted.internet.protocol import Protocol, Factory
+from twisted.internet import endpoints, reactor, ssl
+from h2.config import H2Configuration
+from h2.connection import H2Connection
+from h2.events import (
+ RequestReceived, DataReceived, WindowUpdated
+)
+from h2.exceptions import ProtocolError
+
+
+def close_file(file, d):
+ file.close()
+
+
+READ_CHUNK_SIZE = 8192
+
+
+class H2Protocol(Protocol):
+ def __init__(self, root):
+ config = H2Configuration(client_side=False)
+ self.conn = H2Connection(config=config)
+ self.known_proto = None
+ self.root = root
+
+ self._flow_control_deferreds = {}
+
+ def connectionMade(self):
+ self.conn.initiate_connection()
+ self.transport.write(self.conn.data_to_send())
+
+ def dataReceived(self, data):
+ if not self.known_proto:
+ self.known_proto = True
+
+ try:
+ events = self.conn.receive_data(data)
+ except ProtocolError:
+ if self.conn.data_to_send:
+ self.transport.write(self.conn.data_to_send())
+ self.transport.loseConnection()
+ else:
+ for event in events:
+ if isinstance(event, RequestReceived):
+ self.requestReceived(event.headers, event.stream_id)
+ elif isinstance(event, DataReceived):
+ self.dataFrameReceived(event.stream_id)
+ elif isinstance(event, WindowUpdated):
+ self.windowUpdated(event)
+
+ if self.conn.data_to_send:
+ self.transport.write(self.conn.data_to_send())
+
+ def requestReceived(self, headers, stream_id):
+ headers = dict(headers) # Invalid conversion, fix later.
+ assert headers[b':method'] == b'GET'
+
+ path = headers[b':path'].lstrip(b'/')
+ full_path = os.path.join(self.root, path)
+
+ if not os.path.exists(full_path):
+ response_headers = (
+ (':status', '404'),
+ ('content-length', '0'),
+ ('server', 'twisted-h2'),
+ )
+ self.conn.send_headers(
+ stream_id, response_headers, end_stream=True
+ )
+ self.transport.write(self.conn.data_to_send())
+ else:
+ self.sendFile(full_path, stream_id)
+
+ return
+
+ def dataFrameReceived(self, stream_id):
+ self.conn.reset_stream(stream_id)
+ self.transport.write(self.conn.data_to_send())
+
+ def sendFile(self, file_path, stream_id):
+ filesize = os.stat(file_path).st_size
+ content_type, content_encoding = mimetypes.guess_type(
+ file_path.decode('utf-8')
+ )
+ response_headers = [
+ (':status', '200'),
+ ('content-length', str(filesize)),
+ ('server', 'twisted-h2'),
+ ]
+ if content_type:
+ response_headers.append(('content-type', content_type))
+ if content_encoding:
+ response_headers.append(('content-encoding', content_encoding))
+
+ self.conn.send_headers(stream_id, response_headers)
+ self.transport.write(self.conn.data_to_send())
+
+ f = open(file_path, 'rb')
+ d = self._send_file(f, stream_id)
+ d.addErrback(functools.partial(close_file, f))
+
+ def windowUpdated(self, event):
+ """
+ Handle a WindowUpdated event by firing any waiting data sending
+ callbacks.
+ """
+ stream_id = event.stream_id
+
+ if stream_id and stream_id in self._flow_control_deferreds:
+ d = self._flow_control_deferreds.pop(stream_id)
+ d.callback(event.delta)
+ elif not stream_id:
+ for d in self._flow_control_deferreds.values():
+ d.callback(event.delta)
+
+ self._flow_control_deferreds = {}
+
+ return
+
+ @inlineCallbacks
+ def _send_file(self, file, stream_id):
+ """
+ This callback sends more data for a given file on the stream.
+ """
+ keep_reading = True
+ while keep_reading:
+ while not self.conn.remote_flow_control_window(stream_id):
+ yield self.wait_for_flow_control(stream_id)
+
+ chunk_size = min(
+ self.conn.remote_flow_control_window(stream_id), READ_CHUNK_SIZE
+ )
+ data = file.read(chunk_size)
+ keep_reading = len(data) == chunk_size
+ self.conn.send_data(stream_id, data, not keep_reading)
+ self.transport.write(self.conn.data_to_send())
+
+ if not keep_reading:
+ break
+
+ file.close()
+
+ def wait_for_flow_control(self, stream_id):
+ """
+ Returns a Deferred that fires when the flow control window is opened.
+ """
+ d = Deferred()
+ self._flow_control_deferreds[stream_id] = d
+ return d
+
+
+class H2Factory(Factory):
+ def __init__(self, root):
+ self.root = root
+
+ def buildProtocol(self, addr):
+ print(H2Protocol)
+ return H2Protocol(self.root)
+
+
+root = sys.argv[1].encode('utf-8')
+
+with open('server.crt', 'r') as f:
+ cert_data = f.read()
+with open('server.key', 'r') as f:
+ key_data = f.read()
+
+cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_data)
+key = crypto.load_privatekey(crypto.FILETYPE_PEM, key_data)
+options = ssl.CertificateOptions(
+ privateKey=key,
+ certificate=cert,
+ acceptableProtocols=[b'h2'],
+)
+
+endpoint = endpoints.SSL4ServerEndpoint(reactor, 8080, options, backlog=128)
+endpoint.listen(H2Factory(root))
+reactor.run()
diff --git a/testing/web-platform/tests/tools/third_party/h2/h2/__init__.py b/testing/web-platform/tests/tools/third_party/h2/h2/__init__.py
new file mode 100644
index 0000000000..6290710e63
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/h2/__init__.py
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+"""
+h2
+~~
+
+A HTTP/2 implementation.
+"""
+__version__ = '3.2.0'
diff --git a/testing/web-platform/tests/tools/third_party/h2/h2/config.py b/testing/web-platform/tests/tools/third_party/h2/h2/config.py
new file mode 100644
index 0000000000..1c437ee24f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/h2/config.py
@@ -0,0 +1,170 @@
+# -*- coding: utf-8 -*-
+"""
+h2/config
+~~~~~~~~~
+
+Objects for controlling the configuration of the HTTP/2 stack.
+"""
+
+
+class _BooleanConfigOption(object):
+ """
+ Descriptor for handling a boolean config option. This will block
+ attempts to set boolean config options to non-bools.
+ """
+ def __init__(self, name):
+ self.name = name
+ self.attr_name = '_%s' % self.name
+
+ def __get__(self, instance, owner):
+ return getattr(instance, self.attr_name)
+
+ def __set__(self, instance, value):
+ if not isinstance(value, bool):
+ raise ValueError("%s must be a bool" % self.name)
+ setattr(instance, self.attr_name, value)
+
+
+class DummyLogger(object):
+ """
+ An Logger object that does not actual logging, hence a DummyLogger.
+
+ For the class the log operation is merely a no-op. The intent is to avoid
+ conditionals being sprinkled throughout the hyper-h2 code for calls to
+ logging functions when no logger is passed into the corresponding object.
+ """
+ def __init__(self, *vargs):
+ pass
+
+ def debug(self, *vargs, **kwargs):
+ """
+ No-op logging. Only level needed for now.
+ """
+ pass
+
+ def trace(self, *vargs, **kwargs):
+ """
+ No-op logging. Only level needed for now.
+ """
+ pass
+
+
+class H2Configuration(object):
+ """
+ An object that controls the way a single HTTP/2 connection behaves.
+
+ This object allows the users to customize behaviour. In particular, it
+ allows users to enable or disable optional features, or to otherwise handle
+ various unusual behaviours.
+
+ This object has very little behaviour of its own: it mostly just ensures
+ that configuration is self-consistent.
+
+ :param client_side: Whether this object is to be used on the client side of
+ a connection, or on the server side. Affects the logic used by the
+ state machine, the default settings values, the allowable stream IDs,
+ and several other properties. Defaults to ``True``.
+ :type client_side: ``bool``
+
+ :param header_encoding: Controls whether the headers emitted by this object
+ in events are transparently decoded to ``unicode`` strings, and what
+ encoding is used to do that decoding. This defaults to ``None``,
+ meaning that headers will be returned as bytes. To automatically
+ decode headers (that is, to return them as unicode strings), this can
+ be set to the string name of any encoding, e.g. ``'utf-8'``.
+
+ .. versionchanged:: 3.0.0
+ Changed default value from ``'utf-8'`` to ``None``
+
+ :type header_encoding: ``str``, ``False``, or ``None``
+
+ :param validate_outbound_headers: Controls whether the headers emitted
+ by this object are validated against the rules in RFC 7540.
+ Disabling this setting will cause outbound header validation to
+ be skipped, and allow the object to emit headers that may be illegal
+ according to RFC 7540. Defaults to ``True``.
+ :type validate_outbound_headers: ``bool``
+
+ :param normalize_outbound_headers: Controls whether the headers emitted
+ by this object are normalized before sending. Disabling this setting
+ will cause outbound header normalization to be skipped, and allow
+ the object to emit headers that may be illegal according to
+ RFC 7540. Defaults to ``True``.
+ :type normalize_outbound_headers: ``bool``
+
+ :param validate_inbound_headers: Controls whether the headers received
+ by this object are validated against the rules in RFC 7540.
+ Disabling this setting will cause inbound header validation to
+ be skipped, and allow the object to receive headers that may be illegal
+ according to RFC 7540. Defaults to ``True``.
+ :type validate_inbound_headers: ``bool``
+
+ :param normalize_inbound_headers: Controls whether the headers received by
+ this object are normalized according to the rules of RFC 7540.
+ Disabling this setting may lead to hyper-h2 emitting header blocks that
+ some RFCs forbid, e.g. with multiple cookie fields.
+
+ .. versionadded:: 3.0.0
+
+ :type normalize_inbound_headers: ``bool``
+
+ :param logger: A logger that conforms to the requirements for this module,
+ those being no I/O and no context switches, which is needed in order
+ to run in asynchronous operation.
+
+ .. versionadded:: 2.6.0
+
+ :type logger: ``logging.Logger``
+ """
+ client_side = _BooleanConfigOption('client_side')
+ validate_outbound_headers = _BooleanConfigOption(
+ 'validate_outbound_headers'
+ )
+ normalize_outbound_headers = _BooleanConfigOption(
+ 'normalize_outbound_headers'
+ )
+ validate_inbound_headers = _BooleanConfigOption(
+ 'validate_inbound_headers'
+ )
+ normalize_inbound_headers = _BooleanConfigOption(
+ 'normalize_inbound_headers'
+ )
+
+ def __init__(self,
+ client_side=True,
+ header_encoding=None,
+ validate_outbound_headers=True,
+ normalize_outbound_headers=True,
+ validate_inbound_headers=True,
+ normalize_inbound_headers=True,
+ logger=None):
+ self.client_side = client_side
+ self.header_encoding = header_encoding
+ self.validate_outbound_headers = validate_outbound_headers
+ self.normalize_outbound_headers = normalize_outbound_headers
+ self.validate_inbound_headers = validate_inbound_headers
+ self.normalize_inbound_headers = normalize_inbound_headers
+ self.logger = logger or DummyLogger(__name__)
+
+ @property
+ def header_encoding(self):
+ """
+ Controls whether the headers emitted by this object in events are
+ transparently decoded to ``unicode`` strings, and what encoding is used
+ to do that decoding. This defaults to ``None``, meaning that headers
+ will be returned as bytes. To automatically decode headers (that is, to
+ return them as unicode strings), this can be set to the string name of
+ any encoding, e.g. ``'utf-8'``.
+ """
+ return self._header_encoding
+
+ @header_encoding.setter
+ def header_encoding(self, value):
+ """
+ Enforces constraints on the value of header encoding.
+ """
+ if not isinstance(value, (bool, str, type(None))):
+ raise ValueError("header_encoding must be bool, string, or None")
+ if value is True:
+ raise ValueError("header_encoding cannot be True")
+ self._header_encoding = value
diff --git a/testing/web-platform/tests/tools/third_party/h2/h2/connection.py b/testing/web-platform/tests/tools/third_party/h2/h2/connection.py
new file mode 100644
index 0000000000..35e6fd6da8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/h2/connection.py
@@ -0,0 +1,2048 @@
+# -*- coding: utf-8 -*-
+"""
+h2/connection
+~~~~~~~~~~~~~
+
+An implementation of a HTTP/2 connection.
+"""
+import base64
+
+from enum import Enum, IntEnum
+
+from hyperframe.exceptions import InvalidPaddingError
+from hyperframe.frame import (
+ GoAwayFrame, WindowUpdateFrame, HeadersFrame, DataFrame, PingFrame,
+ PushPromiseFrame, SettingsFrame, RstStreamFrame, PriorityFrame,
+ ContinuationFrame, AltSvcFrame, ExtensionFrame
+)
+from hpack.hpack import Encoder, Decoder
+from hpack.exceptions import HPACKError, OversizedHeaderListError
+
+from .config import H2Configuration
+from .errors import ErrorCodes, _error_code_from_int
+from .events import (
+ WindowUpdated, RemoteSettingsChanged, PingReceived, PingAckReceived,
+ SettingsAcknowledged, ConnectionTerminated, PriorityUpdated,
+ AlternativeServiceAvailable, UnknownFrameReceived
+)
+from .exceptions import (
+ ProtocolError, NoSuchStreamError, FlowControlError, FrameTooLargeError,
+ TooManyStreamsError, StreamClosedError, StreamIDTooLowError,
+ NoAvailableStreamIDError, RFC1122Error, DenialOfServiceError
+)
+from .frame_buffer import FrameBuffer
+from .settings import Settings, SettingCodes
+from .stream import H2Stream, StreamClosedBy
+from .utilities import SizeLimitDict, guard_increment_window
+from .windows import WindowManager
+
+
+class ConnectionState(Enum):
+ IDLE = 0
+ CLIENT_OPEN = 1
+ SERVER_OPEN = 2
+ CLOSED = 3
+
+
+class ConnectionInputs(Enum):
+ SEND_HEADERS = 0
+ SEND_PUSH_PROMISE = 1
+ SEND_DATA = 2
+ SEND_GOAWAY = 3
+ SEND_WINDOW_UPDATE = 4
+ SEND_PING = 5
+ SEND_SETTINGS = 6
+ SEND_RST_STREAM = 7
+ SEND_PRIORITY = 8
+ RECV_HEADERS = 9
+ RECV_PUSH_PROMISE = 10
+ RECV_DATA = 11
+ RECV_GOAWAY = 12
+ RECV_WINDOW_UPDATE = 13
+ RECV_PING = 14
+ RECV_SETTINGS = 15
+ RECV_RST_STREAM = 16
+ RECV_PRIORITY = 17
+ SEND_ALTERNATIVE_SERVICE = 18 # Added in 2.3.0
+ RECV_ALTERNATIVE_SERVICE = 19 # Added in 2.3.0
+
+
+class AllowedStreamIDs(IntEnum):
+ EVEN = 0
+ ODD = 1
+
+
+class H2ConnectionStateMachine(object):
+ """
+ A single HTTP/2 connection state machine.
+
+ This state machine, while defined in its own class, is logically part of
+ the H2Connection class also defined in this file. The state machine itself
+ maintains very little state directly, instead focusing entirely on managing
+ state transitions.
+ """
+ # For the purposes of this state machine we treat HEADERS and their
+ # associated CONTINUATION frames as a single jumbo frame. The protocol
+ # allows/requires this by preventing other frames from being interleved in
+ # between HEADERS/CONTINUATION frames.
+ #
+ # The _transitions dictionary contains a mapping of tuples of
+ # (state, input) to tuples of (side_effect_function, end_state). This map
+ # contains all allowed transitions: anything not in this map is invalid
+ # and immediately causes a transition to ``closed``.
+
+ _transitions = {
+ # State: idle
+ (ConnectionState.IDLE, ConnectionInputs.SEND_HEADERS):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.IDLE, ConnectionInputs.RECV_HEADERS):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.IDLE, ConnectionInputs.SEND_SETTINGS):
+ (None, ConnectionState.IDLE),
+ (ConnectionState.IDLE, ConnectionInputs.RECV_SETTINGS):
+ (None, ConnectionState.IDLE),
+ (ConnectionState.IDLE, ConnectionInputs.SEND_WINDOW_UPDATE):
+ (None, ConnectionState.IDLE),
+ (ConnectionState.IDLE, ConnectionInputs.RECV_WINDOW_UPDATE):
+ (None, ConnectionState.IDLE),
+ (ConnectionState.IDLE, ConnectionInputs.SEND_PING):
+ (None, ConnectionState.IDLE),
+ (ConnectionState.IDLE, ConnectionInputs.RECV_PING):
+ (None, ConnectionState.IDLE),
+ (ConnectionState.IDLE, ConnectionInputs.SEND_GOAWAY):
+ (None, ConnectionState.CLOSED),
+ (ConnectionState.IDLE, ConnectionInputs.RECV_GOAWAY):
+ (None, ConnectionState.CLOSED),
+ (ConnectionState.IDLE, ConnectionInputs.SEND_PRIORITY):
+ (None, ConnectionState.IDLE),
+ (ConnectionState.IDLE, ConnectionInputs.RECV_PRIORITY):
+ (None, ConnectionState.IDLE),
+ (ConnectionState.IDLE, ConnectionInputs.SEND_ALTERNATIVE_SERVICE):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.IDLE, ConnectionInputs.RECV_ALTERNATIVE_SERVICE):
+ (None, ConnectionState.CLIENT_OPEN),
+
+ # State: open, client side.
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_HEADERS):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_DATA):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_GOAWAY):
+ (None, ConnectionState.CLOSED),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_WINDOW_UPDATE):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_PING):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_SETTINGS):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_PRIORITY):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_HEADERS):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_PUSH_PROMISE):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_DATA):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_GOAWAY):
+ (None, ConnectionState.CLOSED),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_WINDOW_UPDATE):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_PING):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_SETTINGS):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_RST_STREAM):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_RST_STREAM):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_PRIORITY):
+ (None, ConnectionState.CLIENT_OPEN),
+ (ConnectionState.CLIENT_OPEN,
+ ConnectionInputs.RECV_ALTERNATIVE_SERVICE):
+ (None, ConnectionState.CLIENT_OPEN),
+
+ # State: open, server side.
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_HEADERS):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_PUSH_PROMISE):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_DATA):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_GOAWAY):
+ (None, ConnectionState.CLOSED),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_WINDOW_UPDATE):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_PING):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_SETTINGS):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_PRIORITY):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_HEADERS):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_DATA):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_GOAWAY):
+ (None, ConnectionState.CLOSED),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_WINDOW_UPDATE):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_PING):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_SETTINGS):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_PRIORITY):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_RST_STREAM):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_RST_STREAM):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN,
+ ConnectionInputs.SEND_ALTERNATIVE_SERVICE):
+ (None, ConnectionState.SERVER_OPEN),
+ (ConnectionState.SERVER_OPEN,
+ ConnectionInputs.RECV_ALTERNATIVE_SERVICE):
+ (None, ConnectionState.SERVER_OPEN),
+
+ # State: closed
+ (ConnectionState.CLOSED, ConnectionInputs.SEND_GOAWAY):
+ (None, ConnectionState.CLOSED),
+ (ConnectionState.CLOSED, ConnectionInputs.RECV_GOAWAY):
+ (None, ConnectionState.CLOSED),
+ }
+
+ def __init__(self):
+ self.state = ConnectionState.IDLE
+
+ def process_input(self, input_):
+ """
+ Process a specific input in the state machine.
+ """
+ if not isinstance(input_, ConnectionInputs):
+ raise ValueError("Input must be an instance of ConnectionInputs")
+
+ try:
+ func, target_state = self._transitions[(self.state, input_)]
+ except KeyError:
+ old_state = self.state
+ self.state = ConnectionState.CLOSED
+ raise ProtocolError(
+ "Invalid input %s in state %s" % (input_, old_state)
+ )
+ else:
+ self.state = target_state
+ if func is not None: # pragma: no cover
+ return func()
+
+ return []
+
+
+class H2Connection(object):
+ """
+ A low-level HTTP/2 connection object. This handles building and receiving
+ frames and maintains both connection and per-stream state for all streams
+ on this connection.
+
+ This wraps a HTTP/2 Connection state machine implementation, ensuring that
+ frames can only be sent/received when the connection is in a valid state.
+ It also builds stream state machines on demand to ensure that the
+ constraints of those state machines are met as well. Attempts to create
+ frames that cannot be sent will raise a ``ProtocolError``.
+
+ .. versionchanged:: 2.3.0
+ Added the ``header_encoding`` keyword argument.
+
+ .. versionchanged:: 2.5.0
+ Added the ``config`` keyword argument. Deprecated the ``client_side``
+ and ``header_encoding`` parameters.
+
+ .. versionchanged:: 3.0.0
+ Removed deprecated parameters and properties.
+
+ :param config: The configuration for the HTTP/2 connection.
+
+ .. versionadded:: 2.5.0
+
+ :type config: :class:`H2Configuration <h2.config.H2Configuration>`
+ """
+ # The initial maximum outbound frame size. This can be changed by receiving
+ # a settings frame.
+ DEFAULT_MAX_OUTBOUND_FRAME_SIZE = 65535
+
+ # The initial maximum inbound frame size. This is somewhat arbitrarily
+ # chosen.
+ DEFAULT_MAX_INBOUND_FRAME_SIZE = 2**24
+
+ # The highest acceptable stream ID.
+ HIGHEST_ALLOWED_STREAM_ID = 2**31 - 1
+
+ # The largest acceptable window increment.
+ MAX_WINDOW_INCREMENT = 2**31 - 1
+
+ # The initial default value of SETTINGS_MAX_HEADER_LIST_SIZE.
+ DEFAULT_MAX_HEADER_LIST_SIZE = 2**16
+
+ # Keep in memory limited amount of results for streams closes
+ MAX_CLOSED_STREAMS = 2**16
+
+ def __init__(self, config=None):
+ self.state_machine = H2ConnectionStateMachine()
+ self.streams = {}
+ self.highest_inbound_stream_id = 0
+ self.highest_outbound_stream_id = 0
+ self.encoder = Encoder()
+ self.decoder = Decoder()
+
+ # This won't always actually do anything: for versions of HPACK older
+ # than 2.3.0 it does nothing. However, we have to try!
+ self.decoder.max_header_list_size = self.DEFAULT_MAX_HEADER_LIST_SIZE
+
+ #: The configuration for this HTTP/2 connection object.
+ #:
+ #: .. versionadded:: 2.5.0
+ self.config = config
+ if self.config is None:
+ self.config = H2Configuration(
+ client_side=True,
+ )
+
+ # Objects that store settings, including defaults.
+ #
+ # We set the MAX_CONCURRENT_STREAMS value to 100 because its default is
+ # unbounded, and that's a dangerous default because it allows
+ # essentially unbounded resources to be allocated regardless of how
+ # they will be used. 100 should be suitable for the average
+ # application. This default obviously does not apply to the remote
+ # peer's settings: the remote peer controls them!
+ #
+ # We also set MAX_HEADER_LIST_SIZE to a reasonable value. This is to
+ # advertise our defence against CVE-2016-6581. However, not all
+ # versions of HPACK will let us do it. That's ok: we should at least
+ # suggest that we're not vulnerable.
+ self.local_settings = Settings(
+ client=self.config.client_side,
+ initial_values={
+ SettingCodes.MAX_CONCURRENT_STREAMS: 100,
+ SettingCodes.MAX_HEADER_LIST_SIZE:
+ self.DEFAULT_MAX_HEADER_LIST_SIZE,
+ }
+ )
+ self.remote_settings = Settings(client=not self.config.client_side)
+
+ # The current value of the connection flow control windows on the
+ # connection.
+ self.outbound_flow_control_window = (
+ self.remote_settings.initial_window_size
+ )
+
+ #: The maximum size of a frame that can be emitted by this peer, in
+ #: bytes.
+ self.max_outbound_frame_size = self.remote_settings.max_frame_size
+
+ #: The maximum size of a frame that can be received by this peer, in
+ #: bytes.
+ self.max_inbound_frame_size = self.local_settings.max_frame_size
+
+ # Buffer for incoming data.
+ self.incoming_buffer = FrameBuffer(server=not self.config.client_side)
+
+ # A private variable to store a sequence of received header frames
+ # until completion.
+ self._header_frames = []
+
+ # Data that needs to be sent.
+ self._data_to_send = bytearray()
+
+ # Keeps track of how streams are closed.
+ # Used to ensure that we don't blow up in the face of frames that were
+ # in flight when a RST_STREAM was sent.
+ # Also used to determine whether we should consider a frame received
+ # while a stream is closed as either a stream error or a connection
+ # error.
+ self._closed_streams = SizeLimitDict(
+ size_limit=self.MAX_CLOSED_STREAMS
+ )
+
+ # The flow control window manager for the connection.
+ self._inbound_flow_control_window_manager = WindowManager(
+ max_window_size=self.local_settings.initial_window_size
+ )
+
+ # When in doubt use dict-dispatch.
+ self._frame_dispatch_table = {
+ HeadersFrame: self._receive_headers_frame,
+ PushPromiseFrame: self._receive_push_promise_frame,
+ SettingsFrame: self._receive_settings_frame,
+ DataFrame: self._receive_data_frame,
+ WindowUpdateFrame: self._receive_window_update_frame,
+ PingFrame: self._receive_ping_frame,
+ RstStreamFrame: self._receive_rst_stream_frame,
+ PriorityFrame: self._receive_priority_frame,
+ GoAwayFrame: self._receive_goaway_frame,
+ ContinuationFrame: self._receive_naked_continuation,
+ AltSvcFrame: self._receive_alt_svc_frame,
+ ExtensionFrame: self._receive_unknown_frame
+ }
+
+ def _prepare_for_sending(self, frames):
+ if not frames:
+ return
+ self._data_to_send += b''.join(f.serialize() for f in frames)
+ assert all(f.body_len <= self.max_outbound_frame_size for f in frames)
+
+ def _open_streams(self, remainder):
+ """
+ A common method of counting number of open streams. Returns the number
+ of streams that are open *and* that have (stream ID % 2) == remainder.
+ While it iterates, also deletes any closed streams.
+ """
+ count = 0
+ to_delete = []
+
+ for stream_id, stream in self.streams.items():
+ if stream.open and (stream_id % 2 == remainder):
+ count += 1
+ elif stream.closed:
+ to_delete.append(stream_id)
+
+ for stream_id in to_delete:
+ stream = self.streams.pop(stream_id)
+ self._closed_streams[stream_id] = stream.closed_by
+
+ return count
+
+ @property
+ def open_outbound_streams(self):
+ """
+ The current number of open outbound streams.
+ """
+ outbound_numbers = int(self.config.client_side)
+ return self._open_streams(outbound_numbers)
+
+ @property
+ def open_inbound_streams(self):
+ """
+ The current number of open inbound streams.
+ """
+ inbound_numbers = int(not self.config.client_side)
+ return self._open_streams(inbound_numbers)
+
+ @property
+ def inbound_flow_control_window(self):
+ """
+ The size of the inbound flow control window for the connection. This is
+ rarely publicly useful: instead, use :meth:`remote_flow_control_window
+ <h2.connection.H2Connection.remote_flow_control_window>`. This
+ shortcut is largely present to provide a shortcut to this data.
+ """
+ return self._inbound_flow_control_window_manager.current_window_size
+
+ def _begin_new_stream(self, stream_id, allowed_ids):
+ """
+ Initiate a new stream.
+
+ .. versionchanged:: 2.0.0
+ Removed this function from the public API.
+
+ :param stream_id: The ID of the stream to open.
+ :param allowed_ids: What kind of stream ID is allowed.
+ """
+ self.config.logger.debug(
+ "Attempting to initiate stream ID %d", stream_id
+ )
+ outbound = self._stream_id_is_outbound(stream_id)
+ highest_stream_id = (
+ self.highest_outbound_stream_id if outbound else
+ self.highest_inbound_stream_id
+ )
+
+ if stream_id <= highest_stream_id:
+ raise StreamIDTooLowError(stream_id, highest_stream_id)
+
+ if (stream_id % 2) != int(allowed_ids):
+ raise ProtocolError(
+ "Invalid stream ID for peer."
+ )
+
+ s = H2Stream(
+ stream_id,
+ config=self.config,
+ inbound_window_size=self.local_settings.initial_window_size,
+ outbound_window_size=self.remote_settings.initial_window_size
+ )
+ self.config.logger.debug("Stream ID %d created", stream_id)
+ s.max_inbound_frame_size = self.max_inbound_frame_size
+ s.max_outbound_frame_size = self.max_outbound_frame_size
+
+ self.streams[stream_id] = s
+ self.config.logger.debug("Current streams: %s", self.streams.keys())
+
+ if outbound:
+ self.highest_outbound_stream_id = stream_id
+ else:
+ self.highest_inbound_stream_id = stream_id
+
+ return s
+
+ def initiate_connection(self):
+ """
+ Provides any data that needs to be sent at the start of the connection.
+ Must be called for both clients and servers.
+ """
+ self.config.logger.debug("Initializing connection")
+ self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)
+ if self.config.client_side:
+ preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
+ else:
+ preamble = b''
+
+ f = SettingsFrame(0)
+ for setting, value in self.local_settings.items():
+ f.settings[setting] = value
+ self.config.logger.debug(
+ "Send Settings frame: %s", self.local_settings
+ )
+
+ self._data_to_send += preamble + f.serialize()
+
+ def initiate_upgrade_connection(self, settings_header=None):
+ """
+ Call to initialise the connection object for use with an upgraded
+ HTTP/2 connection (i.e. a connection negotiated using the
+ ``Upgrade: h2c`` HTTP header).
+
+ This method differs from :meth:`initiate_connection
+ <h2.connection.H2Connection.initiate_connection>` in several ways.
+ Firstly, it handles the additional SETTINGS frame that is sent in the
+ ``HTTP2-Settings`` header field. When called on a client connection,
+ this method will return a bytestring that the caller can put in the
+ ``HTTP2-Settings`` field they send on their initial request. When
+ called on a server connection, the user **must** provide the value they
+ received from the client in the ``HTTP2-Settings`` header field to the
+ ``settings_header`` argument, which will be used appropriately.
+
+ Additionally, this method sets up stream 1 in a half-closed state
+ appropriate for this side of the connection, to reflect the fact that
+ the request is already complete.
+
+ Finally, this method also prepares the appropriate preamble to be sent
+ after the upgrade.
+
+ .. versionadded:: 2.3.0
+
+ :param settings_header: (optional, server-only): The value of the
+ ``HTTP2-Settings`` header field received from the client.
+ :type settings_header: ``bytes``
+
+ :returns: For clients, a bytestring to put in the ``HTTP2-Settings``.
+ For servers, returns nothing.
+ :rtype: ``bytes`` or ``None``
+ """
+ self.config.logger.debug(
+ "Upgrade connection. Current settings: %s", self.local_settings
+ )
+
+ frame_data = None
+ # Begin by getting the preamble in place.
+ self.initiate_connection()
+
+ if self.config.client_side:
+ f = SettingsFrame(0)
+ for setting, value in self.local_settings.items():
+ f.settings[setting] = value
+
+ frame_data = f.serialize_body()
+ frame_data = base64.urlsafe_b64encode(frame_data)
+ elif settings_header:
+ # We have a settings header from the client. This needs to be
+ # applied, but we want to throw away the ACK. We do this by
+ # inserting the data into a Settings frame and then passing it to
+ # the state machine, but ignoring the return value.
+ settings_header = base64.urlsafe_b64decode(settings_header)
+ f = SettingsFrame(0)
+ f.parse_body(settings_header)
+ self._receive_settings_frame(f)
+
+ # Set up appropriate state. Stream 1 in a half-closed state:
+ # half-closed(local) for clients, half-closed(remote) for servers.
+ # Additionally, we need to set up the Connection state machine.
+ connection_input = (
+ ConnectionInputs.SEND_HEADERS if self.config.client_side
+ else ConnectionInputs.RECV_HEADERS
+ )
+ self.config.logger.debug("Process input %s", connection_input)
+ self.state_machine.process_input(connection_input)
+
+ # Set up stream 1.
+ self._begin_new_stream(stream_id=1, allowed_ids=AllowedStreamIDs.ODD)
+ self.streams[1].upgrade(self.config.client_side)
+ return frame_data
+
+ def _get_or_create_stream(self, stream_id, allowed_ids):
+ """
+ Gets a stream by its stream ID. Will create one if one does not already
+ exist. Use allowed_ids to circumvent the usual stream ID rules for
+ clients and servers.
+
+ .. versionchanged:: 2.0.0
+ Removed this function from the public API.
+ """
+ try:
+ return self.streams[stream_id]
+ except KeyError:
+ return self._begin_new_stream(stream_id, allowed_ids)
+
+ def _get_stream_by_id(self, stream_id):
+ """
+ Gets a stream by its stream ID. Raises NoSuchStreamError if the stream
+ ID does not correspond to a known stream and is higher than the current
+ maximum: raises if it is lower than the current maximum.
+
+ .. versionchanged:: 2.0.0
+ Removed this function from the public API.
+ """
+ try:
+ return self.streams[stream_id]
+ except KeyError:
+ outbound = self._stream_id_is_outbound(stream_id)
+ highest_stream_id = (
+ self.highest_outbound_stream_id if outbound else
+ self.highest_inbound_stream_id
+ )
+
+ if stream_id > highest_stream_id:
+ raise NoSuchStreamError(stream_id)
+ else:
+ raise StreamClosedError(stream_id)
+
+ def get_next_available_stream_id(self):
+ """
+ Returns an integer suitable for use as the stream ID for the next
+ stream created by this endpoint. For server endpoints, this stream ID
+ will be even. For client endpoints, this stream ID will be odd. If no
+ stream IDs are available, raises :class:`NoAvailableStreamIDError
+ <h2.exceptions.NoAvailableStreamIDError>`.
+
+ .. warning:: The return value from this function does not change until
+ the stream ID has actually been used by sending or pushing
+ headers on that stream. For that reason, it should be
+ called as close as possible to the actual use of the
+ stream ID.
+
+ .. versionadded:: 2.0.0
+
+ :raises: :class:`NoAvailableStreamIDError
+ <h2.exceptions.NoAvailableStreamIDError>`
+ :returns: The next free stream ID this peer can use to initiate a
+ stream.
+ :rtype: ``int``
+ """
+ # No streams have been opened yet, so return the lowest allowed stream
+ # ID.
+ if not self.highest_outbound_stream_id:
+ next_stream_id = 1 if self.config.client_side else 2
+ else:
+ next_stream_id = self.highest_outbound_stream_id + 2
+ self.config.logger.debug(
+ "Next available stream ID %d", next_stream_id
+ )
+ if next_stream_id > self.HIGHEST_ALLOWED_STREAM_ID:
+ raise NoAvailableStreamIDError("Exhausted allowed stream IDs")
+
+ return next_stream_id
+
+ def send_headers(self, stream_id, headers, end_stream=False,
+ priority_weight=None, priority_depends_on=None,
+ priority_exclusive=None):
+ """
+ Send headers on a given stream.
+
+ This function can be used to send request or response headers: the kind
+ that are sent depends on whether this connection has been opened as a
+ client or server connection, and whether the stream was opened by the
+ remote peer or not.
+
+ If this is a client connection, calling ``send_headers`` will send the
+ headers as a request. It will also implicitly open the stream being
+ used. If this is a client connection and ``send_headers`` has *already*
+ been called, this will send trailers instead.
+
+ If this is a server connection, calling ``send_headers`` will send the
+ headers as a response. It is a protocol error for a server to open a
+ stream by sending headers. If this is a server connection and
+ ``send_headers`` has *already* been called, this will send trailers
+ instead.
+
+ When acting as a server, you may call ``send_headers`` any number of
+ times allowed by the following rules, in this order:
+
+ - zero or more times with ``(':status', '1XX')`` (where ``1XX`` is a
+ placeholder for any 100-level status code).
+ - once with any other status header.
+ - zero or one time for trailers.
+
+ That is, you are allowed to send as many informational responses as you
+ like, followed by one complete response and zero or one HTTP trailer
+ blocks.
+
+ Clients may send one or two header blocks: one request block, and
+ optionally one trailer block.
+
+ If it is important to send HPACK "never indexed" header fields (as
+ defined in `RFC 7451 Section 7.1.3
+ <https://tools.ietf.org/html/rfc7541#section-7.1.3>`_), the user may
+ instead provide headers using the HPACK library's :class:`HeaderTuple
+ <hpack:hpack.HeaderTuple>` and :class:`NeverIndexedHeaderTuple
+ <hpack:hpack.NeverIndexedHeaderTuple>` objects.
+
+ This method also allows users to prioritize the stream immediately,
+ by sending priority information on the HEADERS frame directly. To do
+ this, any one of ``priority_weight``, ``priority_depends_on``, or
+ ``priority_exclusive`` must be set to a value that is not ``None``. For
+ more information on the priority fields, see :meth:`prioritize
+ <h2.connection.H2Connection.prioritize>`.
+
+ .. warning:: In HTTP/2, it is mandatory that all the HTTP/2 special
+ headers (that is, ones whose header keys begin with ``:``) appear
+ at the start of the header block, before any normal headers.
+
+ .. versionchanged:: 2.3.0
+ Added support for using :class:`HeaderTuple
+ <hpack:hpack.HeaderTuple>` objects to store headers.
+
+ .. versionchanged:: 2.4.0
+ Added the ability to provide priority keyword arguments:
+ ``priority_weight``, ``priority_depends_on``, and
+ ``priority_exclusive``.
+
+ :param stream_id: The stream ID to send the headers on. If this stream
+ does not currently exist, it will be created.
+ :type stream_id: ``int``
+
+ :param headers: The request/response headers to send.
+ :type headers: An iterable of two tuples of bytestrings or
+ :class:`HeaderTuple <hpack:hpack.HeaderTuple>` objects.
+
+ :param end_stream: Whether this headers frame should end the stream
+ immediately (that is, whether no more data will be sent after this
+ frame). Defaults to ``False``.
+ :type end_stream: ``bool``
+
+ :param priority_weight: Sets the priority weight of the stream. See
+ :meth:`prioritize <h2.connection.H2Connection.prioritize>` for more
+ about how this field works. Defaults to ``None``, which means that
+ no priority information will be sent.
+ :type priority_weight: ``int`` or ``None``
+
+ :param priority_depends_on: Sets which stream this one depends on for
+ priority purposes. See :meth:`prioritize
+ <h2.connection.H2Connection.prioritize>` for more about how this
+ field works. Defaults to ``None``, which means that no priority
+ information will be sent.
+ :type priority_depends_on: ``int`` or ``None``
+
+ :param priority_exclusive: Sets whether this stream exclusively depends
+ on the stream given in ``priority_depends_on`` for priority
+ purposes. See :meth:`prioritize
+ <h2.connection.H2Connection.prioritize>` for more about how this
+ field workds. Defaults to ``None``, which means that no priority
+ information will be sent.
+ :type priority_depends_on: ``bool`` or ``None``
+
+ :returns: Nothing
+ """
+ self.config.logger.debug(
+ "Send headers on stream ID %d", stream_id
+ )
+
+ # Check we can open the stream.
+ if stream_id not in self.streams:
+ max_open_streams = self.remote_settings.max_concurrent_streams
+ if (self.open_outbound_streams + 1) > max_open_streams:
+ raise TooManyStreamsError(
+ "Max outbound streams is %d, %d open" %
+ (max_open_streams, self.open_outbound_streams)
+ )
+
+ self.state_machine.process_input(ConnectionInputs.SEND_HEADERS)
+ stream = self._get_or_create_stream(
+ stream_id, AllowedStreamIDs(self.config.client_side)
+ )
+ frames = stream.send_headers(
+ headers, self.encoder, end_stream
+ )
+
+ # We may need to send priority information.
+ priority_present = (
+ (priority_weight is not None) or
+ (priority_depends_on is not None) or
+ (priority_exclusive is not None)
+ )
+
+ if priority_present:
+ if not self.config.client_side:
+ raise RFC1122Error("Servers SHOULD NOT prioritize streams.")
+
+ headers_frame = frames[0]
+ headers_frame.flags.add('PRIORITY')
+ frames[0] = _add_frame_priority(
+ headers_frame,
+ priority_weight,
+ priority_depends_on,
+ priority_exclusive
+ )
+
+ self._prepare_for_sending(frames)
+
+ def send_data(self, stream_id, data, end_stream=False, pad_length=None):
+ """
+ Send data on a given stream.
+
+ This method does no breaking up of data: if the data is larger than the
+ value returned by :meth:`local_flow_control_window
+ <h2.connection.H2Connection.local_flow_control_window>` for this stream
+ then a :class:`FlowControlError <h2.exceptions.FlowControlError>` will
+ be raised. If the data is larger than :data:`max_outbound_frame_size
+ <h2.connection.H2Connection.max_outbound_frame_size>` then a
+ :class:`FrameTooLargeError <h2.exceptions.FrameTooLargeError>` will be
+ raised.
+
+ Hyper-h2 does this to avoid buffering the data internally. If the user
+ has more data to send than hyper-h2 will allow, consider breaking it up
+ and buffering it externally.
+
+ :param stream_id: The ID of the stream on which to send the data.
+ :type stream_id: ``int``
+ :param data: The data to send on the stream.
+ :type data: ``bytes``
+ :param end_stream: (optional) Whether this is the last data to be sent
+ on the stream. Defaults to ``False``.
+ :type end_stream: ``bool``
+ :param pad_length: (optional) Length of the padding to apply to the
+ data frame. Defaults to ``None`` for no use of padding. Note that
+ a value of ``0`` results in padding of length ``0``
+ (with the "padding" flag set on the frame).
+
+ .. versionadded:: 2.6.0
+
+ :type pad_length: ``int``
+ :returns: Nothing
+ """
+ self.config.logger.debug(
+ "Send data on stream ID %d with len %d", stream_id, len(data)
+ )
+ frame_size = len(data)
+ if pad_length is not None:
+ if not isinstance(pad_length, int):
+ raise TypeError("pad_length must be an int")
+ if pad_length < 0 or pad_length > 255:
+ raise ValueError("pad_length must be within range: [0, 255]")
+ # Account for padding bytes plus the 1-byte padding length field.
+ frame_size += pad_length + 1
+ self.config.logger.debug(
+ "Frame size on stream ID %d is %d", stream_id, frame_size
+ )
+
+ if frame_size > self.local_flow_control_window(stream_id):
+ raise FlowControlError(
+ "Cannot send %d bytes, flow control window is %d." %
+ (frame_size, self.local_flow_control_window(stream_id))
+ )
+ elif frame_size > self.max_outbound_frame_size:
+ raise FrameTooLargeError(
+ "Cannot send frame size %d, max frame size is %d" %
+ (frame_size, self.max_outbound_frame_size)
+ )
+
+ self.state_machine.process_input(ConnectionInputs.SEND_DATA)
+ frames = self.streams[stream_id].send_data(
+ data, end_stream, pad_length=pad_length
+ )
+
+ self._prepare_for_sending(frames)
+
+ self.outbound_flow_control_window -= frame_size
+ self.config.logger.debug(
+ "Outbound flow control window size is %d",
+ self.outbound_flow_control_window
+ )
+ assert self.outbound_flow_control_window >= 0
+
+ def end_stream(self, stream_id):
+ """
+ Cleanly end a given stream.
+
+ This method ends a stream by sending an empty DATA frame on that stream
+ with the ``END_STREAM`` flag set.
+
+ :param stream_id: The ID of the stream to end.
+ :type stream_id: ``int``
+ :returns: Nothing
+ """
+ self.config.logger.debug("End stream ID %d", stream_id)
+ self.state_machine.process_input(ConnectionInputs.SEND_DATA)
+ frames = self.streams[stream_id].end_stream()
+ self._prepare_for_sending(frames)
+
+ def increment_flow_control_window(self, increment, stream_id=None):
+ """
+ Increment a flow control window, optionally for a single stream. Allows
+ the remote peer to send more data.
+
+ .. versionchanged:: 2.0.0
+ Rejects attempts to increment the flow control window by out of
+ range values with a ``ValueError``.
+
+ :param increment: The amount to increment the flow control window by.
+ :type increment: ``int``
+ :param stream_id: (optional) The ID of the stream that should have its
+ flow control window opened. If not present or ``None``, the
+ connection flow control window will be opened instead.
+ :type stream_id: ``int`` or ``None``
+ :returns: Nothing
+ :raises: ``ValueError``
+ """
+ if not (1 <= increment <= self.MAX_WINDOW_INCREMENT):
+ raise ValueError(
+ "Flow control increment must be between 1 and %d" %
+ self.MAX_WINDOW_INCREMENT
+ )
+
+ self.state_machine.process_input(ConnectionInputs.SEND_WINDOW_UPDATE)
+
+ if stream_id is not None:
+ stream = self.streams[stream_id]
+ frames = stream.increase_flow_control_window(
+ increment
+ )
+
+ self.config.logger.debug(
+ "Increase stream ID %d flow control window by %d",
+ stream_id, increment
+ )
+ else:
+ self._inbound_flow_control_window_manager.window_opened(increment)
+ f = WindowUpdateFrame(0)
+ f.window_increment = increment
+ frames = [f]
+
+ self.config.logger.debug(
+ "Increase connection flow control window by %d", increment
+ )
+
+ self._prepare_for_sending(frames)
+
+ def push_stream(self, stream_id, promised_stream_id, request_headers):
+ """
+ Push a response to the client by sending a PUSH_PROMISE frame.
+
+ If it is important to send HPACK "never indexed" header fields (as
+ defined in `RFC 7451 Section 7.1.3
+ <https://tools.ietf.org/html/rfc7541#section-7.1.3>`_), the user may
+ instead provide headers using the HPACK library's :class:`HeaderTuple
+ <hpack:hpack.HeaderTuple>` and :class:`NeverIndexedHeaderTuple
+ <hpack:hpack.NeverIndexedHeaderTuple>` objects.
+
+ :param stream_id: The ID of the stream that this push is a response to.
+ :type stream_id: ``int``
+ :param promised_stream_id: The ID of the stream that the pushed
+ response will be sent on.
+ :type promised_stream_id: ``int``
+ :param request_headers: The headers of the request that the pushed
+ response will be responding to.
+ :type request_headers: An iterable of two tuples of bytestrings or
+ :class:`HeaderTuple <hpack:hpack.HeaderTuple>` objects.
+ :returns: Nothing
+ """
+ self.config.logger.debug(
+ "Send Push Promise frame on stream ID %d", stream_id
+ )
+
+ if not self.remote_settings.enable_push:
+ raise ProtocolError("Remote peer has disabled stream push")
+
+ self.state_machine.process_input(ConnectionInputs.SEND_PUSH_PROMISE)
+ stream = self._get_stream_by_id(stream_id)
+
+ # We need to prevent users pushing streams in response to streams that
+ # they themselves have already pushed: see #163 and RFC 7540 § 6.6. The
+ # easiest way to do that is to assert that the stream_id is not even:
+ # this shortcut works because only servers can push and the state
+ # machine will enforce this.
+ if (stream_id % 2) == 0:
+ raise ProtocolError("Cannot recursively push streams.")
+
+ new_stream = self._begin_new_stream(
+ promised_stream_id, AllowedStreamIDs.EVEN
+ )
+ self.streams[promised_stream_id] = new_stream
+
+ frames = stream.push_stream_in_band(
+ promised_stream_id, request_headers, self.encoder
+ )
+ new_frames = new_stream.locally_pushed()
+ self._prepare_for_sending(frames + new_frames)
+
+ def ping(self, opaque_data):
+ """
+ Send a PING frame.
+
+ :param opaque_data: A bytestring of length 8 that will be sent in the
+ PING frame.
+ :returns: Nothing
+ """
+ self.config.logger.debug("Send Ping frame")
+
+ if not isinstance(opaque_data, bytes) or len(opaque_data) != 8:
+ raise ValueError("Invalid value for ping data: %r" % opaque_data)
+
+ self.state_machine.process_input(ConnectionInputs.SEND_PING)
+ f = PingFrame(0)
+ f.opaque_data = opaque_data
+ self._prepare_for_sending([f])
+
+ def reset_stream(self, stream_id, error_code=0):
+ """
+ Reset a stream.
+
+ This method forcibly closes a stream by sending a RST_STREAM frame for
+ a given stream. This is not a graceful closure. To gracefully end a
+ stream, try the :meth:`end_stream
+ <h2.connection.H2Connection.end_stream>` method.
+
+ :param stream_id: The ID of the stream to reset.
+ :type stream_id: ``int``
+ :param error_code: (optional) The error code to use to reset the
+ stream. Defaults to :data:`ErrorCodes.NO_ERROR
+ <h2.errors.ErrorCodes.NO_ERROR>`.
+ :type error_code: ``int``
+ :returns: Nothing
+ """
+ self.config.logger.debug("Reset stream ID %d", stream_id)
+ self.state_machine.process_input(ConnectionInputs.SEND_RST_STREAM)
+ stream = self._get_stream_by_id(stream_id)
+ frames = stream.reset_stream(error_code)
+ self._prepare_for_sending(frames)
+
+ def close_connection(self, error_code=0, additional_data=None,
+ last_stream_id=None):
+
+ """
+ Close a connection, emitting a GOAWAY frame.
+
+ .. versionchanged:: 2.4.0
+ Added ``additional_data`` and ``last_stream_id`` arguments.
+
+ :param error_code: (optional) The error code to send in the GOAWAY
+ frame.
+ :param additional_data: (optional) Additional debug data indicating
+ a reason for closing the connection. Must be a bytestring.
+ :param last_stream_id: (optional) The last stream which was processed
+ by the sender. Defaults to ``highest_inbound_stream_id``.
+ :returns: Nothing
+ """
+ self.config.logger.debug("Close connection")
+ self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY)
+
+ # Additional_data must be bytes
+ if additional_data is not None:
+ assert isinstance(additional_data, bytes)
+
+ if last_stream_id is None:
+ last_stream_id = self.highest_inbound_stream_id
+
+ f = GoAwayFrame(
+ stream_id=0,
+ last_stream_id=last_stream_id,
+ error_code=error_code,
+ additional_data=(additional_data or b'')
+ )
+ self._prepare_for_sending([f])
+
+ def update_settings(self, new_settings):
+ """
+ Update the local settings. This will prepare and emit the appropriate
+ SETTINGS frame.
+
+ :param new_settings: A dictionary of {setting: new value}
+ """
+ self.config.logger.debug(
+ "Update connection settings to %s", new_settings
+ )
+ self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)
+ self.local_settings.update(new_settings)
+ s = SettingsFrame(0)
+ s.settings = new_settings
+ self._prepare_for_sending([s])
+
+ def advertise_alternative_service(self,
+ field_value,
+ origin=None,
+ stream_id=None):
+ """
+ Notify a client about an available Alternative Service.
+
+ An Alternative Service is defined in `RFC 7838
+ <https://tools.ietf.org/html/rfc7838>`_. An Alternative Service
+ notification informs a client that a given origin is also available
+ elsewhere.
+
+ Alternative Services can be advertised in two ways. Firstly, they can
+ be advertised explicitly: that is, a server can say "origin X is also
+ available at Y". To advertise like this, set the ``origin`` argument
+ and not the ``stream_id`` argument. Alternatively, they can be
+ advertised implicitly: that is, a server can say "the origin you're
+ contacting on stream X is also available at Y". To advertise like this,
+ set the ``stream_id`` argument and not the ``origin`` argument.
+
+ The explicit method of advertising can be done as long as the
+ connection is active. The implicit method can only be done after the
+ client has sent the request headers and before the server has sent the
+ response headers: outside of those points, Hyper-h2 will forbid sending
+ the Alternative Service advertisement by raising a ProtocolError.
+
+ The ``field_value`` parameter is specified in RFC 7838. Hyper-h2 does
+ not validate or introspect this argument: the user is required to
+ ensure that it's well-formed. ``field_value`` corresponds to RFC 7838's
+ "Alternative Service Field Value".
+
+ .. note:: It is strongly preferred to use the explicit method of
+ advertising Alternative Services. The implicit method of
+ advertising Alternative Services has a number of subtleties
+ and can lead to inconsistencies between the server and
+ client. Hyper-h2 allows both mechanisms, but caution is
+ strongly advised.
+
+ .. versionadded:: 2.3.0
+
+ :param field_value: The RFC 7838 Alternative Service Field Value. This
+ argument is not introspected by Hyper-h2: the user is responsible
+ for ensuring that it is well-formed.
+ :type field_value: ``bytes``
+
+ :param origin: The origin/authority to which the Alternative Service
+ being advertised applies. Must not be provided at the same time as
+ ``stream_id``.
+ :type origin: ``bytes`` or ``None``
+
+ :param stream_id: The ID of the stream which was sent to the authority
+ for which this Alternative Service advertisement applies. Must not
+ be provided at the same time as ``origin``.
+ :type stream_id: ``int`` or ``None``
+
+ :returns: Nothing.
+ """
+ if not isinstance(field_value, bytes):
+ raise ValueError("Field must be bytestring.")
+
+ if origin is not None and stream_id is not None:
+ raise ValueError("Must not provide both origin and stream_id")
+
+ self.state_machine.process_input(
+ ConnectionInputs.SEND_ALTERNATIVE_SERVICE
+ )
+
+ if origin is not None:
+ # This ALTSVC is sent on stream zero.
+ f = AltSvcFrame(stream_id=0)
+ f.origin = origin
+ f.field = field_value
+ frames = [f]
+ else:
+ stream = self._get_stream_by_id(stream_id)
+ frames = stream.advertise_alternative_service(field_value)
+
+ self._prepare_for_sending(frames)
+
+ def prioritize(self, stream_id, weight=None, depends_on=None,
+ exclusive=None):
+ """
+ Notify a server about the priority of a stream.
+
+ Stream priorities are a form of guidance to a remote server: they
+ inform the server about how important a given response is, so that the
+ server may allocate its resources (e.g. bandwidth, CPU time, etc.)
+ accordingly. This exists to allow clients to ensure that the most
+ important data arrives earlier, while less important data does not
+ starve out the more important data.
+
+ Stream priorities are explained in depth in `RFC 7540 Section 5.3
+ <https://tools.ietf.org/html/rfc7540#section-5.3>`_.
+
+ This method updates the priority information of a single stream. It may
+ be called well before a stream is actively in use, or well after a
+ stream is closed.
+
+ .. warning:: RFC 7540 allows for servers to change the priority of
+ streams. However, hyper-h2 **does not** allow server
+ stacks to do this. This is because most clients do not
+ adequately know how to respond when provided conflicting
+ priority information, and relatively little utility is
+ provided by making that functionality available.
+
+ .. note:: hyper-h2 **does not** maintain any information about the
+ RFC 7540 priority tree. That means that hyper-h2 does not
+ prevent incautious users from creating invalid priority
+ trees, particularly by creating priority loops. While some
+ basic error checking is provided by hyper-h2, users are
+ strongly recommended to understand their prioritisation
+ strategies before using the priority tools here.
+
+ .. note:: Priority information is strictly advisory. Servers are
+ allowed to disregard it entirely. Avoid relying on the idea
+ that your priority signaling will definitely be obeyed.
+
+ .. versionadded:: 2.4.0
+
+ :param stream_id: The ID of the stream to prioritize.
+ :type stream_id: ``int``
+
+ :param weight: The weight to give the stream. Defaults to ``16``, the
+ default weight of any stream. May be any value between ``1`` and
+ ``256`` inclusive. The relative weight of a stream indicates what
+ proportion of available resources will be allocated to that
+ stream.
+ :type weight: ``int``
+
+ :param depends_on: The ID of the stream on which this stream depends.
+ This stream will only be progressed if it is impossible to
+ progress the parent stream (the one on which this one depends).
+ Passing the value ``0`` means that this stream does not depend on
+ any other. Defaults to ``0``.
+ :type depends_on: ``int``
+
+ :param exclusive: Whether this stream is an exclusive dependency of its
+ "parent" stream (i.e. the stream given by ``depends_on``). If a
+ stream is an exclusive dependency of another, that means that all
+ previously-set children of the parent are moved to become children
+ of the new exclusively-dependent stream. Defaults to ``False``.
+ :type exclusive: ``bool``
+ """
+ if not self.config.client_side:
+ raise RFC1122Error("Servers SHOULD NOT prioritize streams.")
+
+ self.state_machine.process_input(
+ ConnectionInputs.SEND_PRIORITY
+ )
+
+ frame = PriorityFrame(stream_id)
+ frame = _add_frame_priority(frame, weight, depends_on, exclusive)
+
+ self._prepare_for_sending([frame])
+
+ def local_flow_control_window(self, stream_id):
+ """
+ Returns the maximum amount of data that can be sent on stream
+ ``stream_id``.
+
+ This value will never be larger than the total data that can be sent on
+ the connection: even if the given stream allows more data, the
+ connection window provides a logical maximum to the amount of data that
+ can be sent.
+
+ The maximum data that can be sent in a single data frame on a stream
+ is either this value, or the maximum frame size, whichever is
+ *smaller*.
+
+ :param stream_id: The ID of the stream whose flow control window is
+ being queried.
+ :type stream_id: ``int``
+ :returns: The amount of data in bytes that can be sent on the stream
+ before the flow control window is exhausted.
+ :rtype: ``int``
+ """
+ stream = self._get_stream_by_id(stream_id)
+ return min(
+ self.outbound_flow_control_window,
+ stream.outbound_flow_control_window
+ )
+
+ def remote_flow_control_window(self, stream_id):
+ """
+ Returns the maximum amount of data the remote peer can send on stream
+ ``stream_id``.
+
+ This value will never be larger than the total data that can be sent on
+ the connection: even if the given stream allows more data, the
+ connection window provides a logical maximum to the amount of data that
+ can be sent.
+
+ The maximum data that can be sent in a single data frame on a stream
+ is either this value, or the maximum frame size, whichever is
+ *smaller*.
+
+ :param stream_id: The ID of the stream whose flow control window is
+ being queried.
+ :type stream_id: ``int``
+ :returns: The amount of data in bytes that can be received on the
+ stream before the flow control window is exhausted.
+ :rtype: ``int``
+ """
+ stream = self._get_stream_by_id(stream_id)
+ return min(
+ self.inbound_flow_control_window,
+ stream.inbound_flow_control_window
+ )
+
+ def acknowledge_received_data(self, acknowledged_size, stream_id):
+ """
+ Inform the :class:`H2Connection <h2.connection.H2Connection>` that a
+ certain number of flow-controlled bytes have been processed, and that
+ the space should be handed back to the remote peer at an opportune
+ time.
+
+ .. versionadded:: 2.5.0
+
+ :param acknowledged_size: The total *flow-controlled size* of the data
+ that has been processed. Note that this must include the amount of
+ padding that was sent with that data.
+ :type acknowledged_size: ``int``
+ :param stream_id: The ID of the stream on which this data was received.
+ :type stream_id: ``int``
+ :returns: Nothing
+ :rtype: ``None``
+ """
+ self.config.logger.debug(
+ "Ack received data on stream ID %d with size %d",
+ stream_id, acknowledged_size
+ )
+ if stream_id <= 0:
+ raise ValueError(
+ "Stream ID %d is not valid for acknowledge_received_data" %
+ stream_id
+ )
+ if acknowledged_size < 0:
+ raise ValueError("Cannot acknowledge negative data")
+
+ frames = []
+
+ conn_manager = self._inbound_flow_control_window_manager
+ conn_increment = conn_manager.process_bytes(acknowledged_size)
+ if conn_increment:
+ f = WindowUpdateFrame(0)
+ f.window_increment = conn_increment
+ frames.append(f)
+
+ try:
+ stream = self._get_stream_by_id(stream_id)
+ except StreamClosedError:
+ # The stream is already gone. We're not worried about incrementing
+ # the window in this case.
+ pass
+ else:
+ # No point incrementing the windows of closed streams.
+ if stream.open:
+ frames.extend(
+ stream.acknowledge_received_data(acknowledged_size)
+ )
+
+ self._prepare_for_sending(frames)
+
+ def data_to_send(self, amount=None):
+ """
+ Returns some data for sending out of the internal data buffer.
+
+ This method is analogous to ``read`` on a file-like object, but it
+ doesn't block. Instead, it returns as much data as the user asks for,
+ or less if that much data is not available. It does not perform any
+ I/O, and so uses a different name.
+
+ :param amount: (optional) The maximum amount of data to return. If not
+ set, or set to ``None``, will return as much data as possible.
+ :type amount: ``int``
+ :returns: A bytestring containing the data to send on the wire.
+ :rtype: ``bytes``
+ """
+ if amount is None:
+ data = bytes(self._data_to_send)
+ self._data_to_send = bytearray()
+ return data
+ else:
+ data = bytes(self._data_to_send[:amount])
+ self._data_to_send = self._data_to_send[amount:]
+ return data
+
+ def clear_outbound_data_buffer(self):
+ """
+ Clears the outbound data buffer, such that if this call was immediately
+ followed by a call to
+ :meth:`data_to_send <h2.connection.H2Connection.data_to_send>`, that
+ call would return no data.
+
+ This method should not normally be used, but is made available to avoid
+ exposing implementation details.
+ """
+ self._data_to_send = bytearray()
+
+ def _acknowledge_settings(self):
+ """
+ Acknowledge settings that have been received.
+
+ .. versionchanged:: 2.0.0
+ Removed from public API, removed useless ``event`` parameter, made
+ automatic.
+
+ :returns: Nothing
+ """
+ self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS)
+
+ changes = self.remote_settings.acknowledge()
+
+ if SettingCodes.INITIAL_WINDOW_SIZE in changes:
+ setting = changes[SettingCodes.INITIAL_WINDOW_SIZE]
+ self._flow_control_change_from_settings(
+ setting.original_value,
+ setting.new_value,
+ )
+
+ # HEADER_TABLE_SIZE changes by the remote part affect our encoder: cf.
+ # RFC 7540 Section 6.5.2.
+ if SettingCodes.HEADER_TABLE_SIZE in changes:
+ setting = changes[SettingCodes.HEADER_TABLE_SIZE]
+ self.encoder.header_table_size = setting.new_value
+
+ if SettingCodes.MAX_FRAME_SIZE in changes:
+ setting = changes[SettingCodes.MAX_FRAME_SIZE]
+ self.max_outbound_frame_size = setting.new_value
+ for stream in self.streams.values():
+ stream.max_outbound_frame_size = setting.new_value
+
+ f = SettingsFrame(0)
+ f.flags.add('ACK')
+ return [f]
+
+ def _flow_control_change_from_settings(self, old_value, new_value):
+ """
+ Update flow control windows in response to a change in the value of
+ SETTINGS_INITIAL_WINDOW_SIZE.
+
+ When this setting is changed, it automatically updates all flow control
+ windows by the delta in the settings values. Note that it does not
+ increment the *connection* flow control window, per section 6.9.2 of
+ RFC 7540.
+ """
+ delta = new_value - old_value
+
+ for stream in self.streams.values():
+ stream.outbound_flow_control_window = guard_increment_window(
+ stream.outbound_flow_control_window,
+ delta
+ )
+
+ def _inbound_flow_control_change_from_settings(self, old_value, new_value):
+ """
+ Update remote flow control windows in response to a change in the value
+ of SETTINGS_INITIAL_WINDOW_SIZE.
+
+ When this setting is changed, it automatically updates all remote flow
+ control windows by the delta in the settings values.
+ """
+ delta = new_value - old_value
+
+ for stream in self.streams.values():
+ stream._inbound_flow_control_change_from_settings(delta)
+
+ def receive_data(self, data):
+ """
+ Pass some received HTTP/2 data to the connection for handling.
+
+ :param data: The data received from the remote peer on the network.
+ :type data: ``bytes``
+ :returns: A list of events that the remote peer triggered by sending
+ this data.
+ """
+ self.config.logger.trace(
+ "Process received data on connection. Received data: %r", data
+ )
+
+ events = []
+ self.incoming_buffer.add_data(data)
+ self.incoming_buffer.max_frame_size = self.max_inbound_frame_size
+
+ try:
+ for frame in self.incoming_buffer:
+ events.extend(self._receive_frame(frame))
+ except InvalidPaddingError:
+ self._terminate_connection(ErrorCodes.PROTOCOL_ERROR)
+ raise ProtocolError("Received frame with invalid padding.")
+ except ProtocolError as e:
+ # For whatever reason, receiving the frame caused a protocol error.
+ # We should prepare to emit a GoAway frame before throwing the
+ # exception up further. No need for an event: the exception will
+ # do fine.
+ self._terminate_connection(e.error_code)
+ raise
+
+ return events
+
+ def _receive_frame(self, frame):
+ """
+ Handle a frame received on the connection.
+
+ .. versionchanged:: 2.0.0
+ Removed from the public API.
+ """
+ try:
+ # I don't love using __class__ here, maybe reconsider it.
+ frames, events = self._frame_dispatch_table[frame.__class__](frame)
+ except StreamClosedError as e:
+ # If the stream was closed by RST_STREAM, we just send a RST_STREAM
+ # to the remote peer. Otherwise, this is a connection error, and so
+ # we will re-raise to trigger one.
+ if self._stream_is_closed_by_reset(e.stream_id):
+ f = RstStreamFrame(e.stream_id)
+ f.error_code = e.error_code
+ self._prepare_for_sending([f])
+ events = e._events
+ else:
+ raise
+ except StreamIDTooLowError as e:
+ # The stream ID seems invalid. This may happen when the closed
+ # stream has been cleaned up, or when the remote peer has opened a
+ # new stream with a higher stream ID than this one, forcing it
+ # closed implicitly.
+ #
+ # Check how the stream was closed: depending on the mechanism, it
+ # is either a stream error or a connection error.
+ if self._stream_is_closed_by_reset(e.stream_id):
+ # Closed by RST_STREAM is a stream error.
+ f = RstStreamFrame(e.stream_id)
+ f.error_code = ErrorCodes.STREAM_CLOSED
+ self._prepare_for_sending([f])
+ events = []
+ elif self._stream_is_closed_by_end(e.stream_id):
+ # Closed by END_STREAM is a connection error.
+ raise StreamClosedError(e.stream_id)
+ else:
+ # Closed implicitly, also a connection error, but of type
+ # PROTOCOL_ERROR.
+ raise
+ else:
+ self._prepare_for_sending(frames)
+
+ return events
+
+ def _terminate_connection(self, error_code):
+ """
+ Terminate the connection early. Used in error handling blocks to send
+ GOAWAY frames.
+ """
+ f = GoAwayFrame(0)
+ f.last_stream_id = self.highest_inbound_stream_id
+ f.error_code = error_code
+ self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY)
+ self._prepare_for_sending([f])
+
+ def _receive_headers_frame(self, frame):
+ """
+ Receive a headers frame on the connection.
+ """
+ # If necessary, check we can open the stream. Also validate that the
+ # stream ID is valid.
+ if frame.stream_id not in self.streams:
+ max_open_streams = self.local_settings.max_concurrent_streams
+ if (self.open_inbound_streams + 1) > max_open_streams:
+ raise TooManyStreamsError(
+ "Max outbound streams is %d, %d open" %
+ (max_open_streams, self.open_outbound_streams)
+ )
+
+ # Let's decode the headers. We handle headers as bytes internally up
+ # until we hang them off the event, at which point we may optionally
+ # convert them to unicode.
+ headers = _decode_headers(self.decoder, frame.data)
+
+ events = self.state_machine.process_input(
+ ConnectionInputs.RECV_HEADERS
+ )
+ stream = self._get_or_create_stream(
+ frame.stream_id, AllowedStreamIDs(not self.config.client_side)
+ )
+ frames, stream_events = stream.receive_headers(
+ headers,
+ 'END_STREAM' in frame.flags,
+ self.config.header_encoding
+ )
+
+ if 'PRIORITY' in frame.flags:
+ p_frames, p_events = self._receive_priority_frame(frame)
+ stream_events[0].priority_updated = p_events[0]
+ stream_events.extend(p_events)
+ assert not p_frames
+
+ return frames, events + stream_events
+
+ def _receive_push_promise_frame(self, frame):
+ """
+ Receive a push-promise frame on the connection.
+ """
+ if not self.local_settings.enable_push:
+ raise ProtocolError("Received pushed stream")
+
+ pushed_headers = _decode_headers(self.decoder, frame.data)
+
+ events = self.state_machine.process_input(
+ ConnectionInputs.RECV_PUSH_PROMISE
+ )
+
+ try:
+ stream = self._get_stream_by_id(frame.stream_id)
+ except NoSuchStreamError:
+ # We need to check if the parent stream was reset by us. If it was
+ # then we presume that the PUSH_PROMISE was in flight when we reset
+ # the parent stream. Rather than accept the new stream, just reset
+ # it.
+ #
+ # If this was closed naturally, however, we should call this a
+ # PROTOCOL_ERROR: pushing a stream on a naturally closed stream is
+ # a real problem because it creates a brand new stream that the
+ # remote peer now believes exists.
+ if (self._stream_closed_by(frame.stream_id) ==
+ StreamClosedBy.SEND_RST_STREAM):
+ f = RstStreamFrame(frame.promised_stream_id)
+ f.error_code = ErrorCodes.REFUSED_STREAM
+ return [f], events
+
+ raise ProtocolError("Attempted to push on closed stream.")
+
+ # We need to prevent peers pushing streams in response to streams that
+ # they themselves have already pushed: see #163 and RFC 7540 § 6.6. The
+ # easiest way to do that is to assert that the stream_id is not even:
+ # this shortcut works because only servers can push and the state
+ # machine will enforce this.
+ if (frame.stream_id % 2) == 0:
+ raise ProtocolError("Cannot recursively push streams.")
+
+ try:
+ frames, stream_events = stream.receive_push_promise_in_band(
+ frame.promised_stream_id,
+ pushed_headers,
+ self.config.header_encoding,
+ )
+ except StreamClosedError:
+ # The parent stream was reset by us, so we presume that
+ # PUSH_PROMISE was in flight when we reset the parent stream.
+ # So we just reset the new stream.
+ f = RstStreamFrame(frame.promised_stream_id)
+ f.error_code = ErrorCodes.REFUSED_STREAM
+ return [f], events
+
+ new_stream = self._begin_new_stream(
+ frame.promised_stream_id, AllowedStreamIDs.EVEN
+ )
+ self.streams[frame.promised_stream_id] = new_stream
+ new_stream.remotely_pushed(pushed_headers)
+
+ return frames, events + stream_events
+
+ def _handle_data_on_closed_stream(self, events, exc, frame):
+ # This stream is already closed - and yet we received a DATA frame.
+ # The received DATA frame counts towards the connection flow window.
+ # We need to manually to acknowledge the DATA frame to update the flow
+ # window of the connection. Otherwise the whole connection stalls due
+ # the inbound flow window being 0.
+ frames = []
+ conn_manager = self._inbound_flow_control_window_manager
+ conn_increment = conn_manager.process_bytes(
+ frame.flow_controlled_length
+ )
+ if conn_increment:
+ f = WindowUpdateFrame(0)
+ f.window_increment = conn_increment
+ frames.append(f)
+ self.config.logger.debug(
+ "Received DATA frame on closed stream %d - "
+ "auto-emitted a WINDOW_UPDATE by %d",
+ frame.stream_id, conn_increment
+ )
+ f = RstStreamFrame(exc.stream_id)
+ f.error_code = exc.error_code
+ frames.append(f)
+ self.config.logger.debug(
+ "Stream %d already CLOSED or cleaned up - "
+ "auto-emitted a RST_FRAME" % frame.stream_id
+ )
+ return frames, events + exc._events
+
+ def _receive_data_frame(self, frame):
+ """
+ Receive a data frame on the connection.
+ """
+ flow_controlled_length = frame.flow_controlled_length
+
+ events = self.state_machine.process_input(
+ ConnectionInputs.RECV_DATA
+ )
+ self._inbound_flow_control_window_manager.window_consumed(
+ flow_controlled_length
+ )
+
+ try:
+ stream = self._get_stream_by_id(frame.stream_id)
+ frames, stream_events = stream.receive_data(
+ frame.data,
+ 'END_STREAM' in frame.flags,
+ flow_controlled_length
+ )
+ except StreamClosedError as e:
+ # This stream is either marked as CLOSED or already gone from our
+ # internal state.
+ return self._handle_data_on_closed_stream(events, e, frame)
+
+ return frames, events + stream_events
+
+ def _receive_settings_frame(self, frame):
+ """
+ Receive a SETTINGS frame on the connection.
+ """
+ events = self.state_machine.process_input(
+ ConnectionInputs.RECV_SETTINGS
+ )
+
+ # This is an ack of the local settings.
+ if 'ACK' in frame.flags:
+ changed_settings = self._local_settings_acked()
+ ack_event = SettingsAcknowledged()
+ ack_event.changed_settings = changed_settings
+ events.append(ack_event)
+ return [], events
+
+ # Add the new settings.
+ self.remote_settings.update(frame.settings)
+ events.append(
+ RemoteSettingsChanged.from_settings(
+ self.remote_settings, frame.settings
+ )
+ )
+ frames = self._acknowledge_settings()
+
+ return frames, events
+
+ def _receive_window_update_frame(self, frame):
+ """
+ Receive a WINDOW_UPDATE frame on the connection.
+ """
+ # Validate the frame.
+ if not (1 <= frame.window_increment <= self.MAX_WINDOW_INCREMENT):
+ raise ProtocolError(
+ "Flow control increment must be between 1 and %d, received %d"
+ % (self.MAX_WINDOW_INCREMENT, frame.window_increment)
+ )
+
+ events = self.state_machine.process_input(
+ ConnectionInputs.RECV_WINDOW_UPDATE
+ )
+
+ if frame.stream_id:
+ stream = self._get_stream_by_id(frame.stream_id)
+ frames, stream_events = stream.receive_window_update(
+ frame.window_increment
+ )
+ else:
+ # Increment our local flow control window.
+ self.outbound_flow_control_window = guard_increment_window(
+ self.outbound_flow_control_window,
+ frame.window_increment
+ )
+
+ # FIXME: Should we split this into one event per active stream?
+ window_updated_event = WindowUpdated()
+ window_updated_event.stream_id = 0
+ window_updated_event.delta = frame.window_increment
+ stream_events = [window_updated_event]
+ frames = []
+
+ return frames, events + stream_events
+
+ def _receive_ping_frame(self, frame):
+ """
+ Receive a PING frame on the connection.
+ """
+ events = self.state_machine.process_input(
+ ConnectionInputs.RECV_PING
+ )
+ flags = []
+
+ if 'ACK' in frame.flags:
+ evt = PingAckReceived()
+ else:
+ evt = PingReceived()
+
+ # automatically ACK the PING with the same 'opaque data'
+ f = PingFrame(0)
+ f.flags = {'ACK'}
+ f.opaque_data = frame.opaque_data
+ flags.append(f)
+
+ evt.ping_data = frame.opaque_data
+ events.append(evt)
+
+ return flags, events
+
+ def _receive_rst_stream_frame(self, frame):
+ """
+ Receive a RST_STREAM frame on the connection.
+ """
+ events = self.state_machine.process_input(
+ ConnectionInputs.RECV_RST_STREAM
+ )
+ try:
+ stream = self._get_stream_by_id(frame.stream_id)
+ except NoSuchStreamError:
+ # The stream is missing. That's ok, we just do nothing here.
+ stream_frames = []
+ stream_events = []
+ else:
+ stream_frames, stream_events = stream.stream_reset(frame)
+
+ return stream_frames, events + stream_events
+
+ def _receive_priority_frame(self, frame):
+ """
+ Receive a PRIORITY frame on the connection.
+ """
+ events = self.state_machine.process_input(
+ ConnectionInputs.RECV_PRIORITY
+ )
+
+ event = PriorityUpdated()
+ event.stream_id = frame.stream_id
+ event.depends_on = frame.depends_on
+ event.exclusive = frame.exclusive
+
+ # Weight is an integer between 1 and 256, but the byte only allows
+ # 0 to 255: add one.
+ event.weight = frame.stream_weight + 1
+
+ # A stream may not depend on itself.
+ if event.depends_on == frame.stream_id:
+ raise ProtocolError(
+ "Stream %d may not depend on itself" % frame.stream_id
+ )
+ events.append(event)
+
+ return [], events
+
+ def _receive_goaway_frame(self, frame):
+ """
+ Receive a GOAWAY frame on the connection.
+ """
+ events = self.state_machine.process_input(
+ ConnectionInputs.RECV_GOAWAY
+ )
+
+ # Clear the outbound data buffer: we cannot send further data now.
+ self.clear_outbound_data_buffer()
+
+ # Fire an appropriate ConnectionTerminated event.
+ new_event = ConnectionTerminated()
+ new_event.error_code = _error_code_from_int(frame.error_code)
+ new_event.last_stream_id = frame.last_stream_id
+ new_event.additional_data = (frame.additional_data
+ if frame.additional_data else None)
+ events.append(new_event)
+
+ return [], events
+
+ def _receive_naked_continuation(self, frame):
+ """
+ A naked CONTINUATION frame has been received. This is always an error,
+ but the type of error it is depends on the state of the stream and must
+ transition the state of the stream, so we need to pass it to the
+ appropriate stream.
+ """
+ stream = self._get_stream_by_id(frame.stream_id)
+ stream.receive_continuation()
+ assert False, "Should not be reachable"
+
+ def _receive_alt_svc_frame(self, frame):
+ """
+ An ALTSVC frame has been received. This frame, specified in RFC 7838,
+ is used to advertise alternative places where the same service can be
+ reached.
+
+ This frame can optionally be received either on a stream or on stream
+ 0, and its semantics are different in each case.
+ """
+ events = self.state_machine.process_input(
+ ConnectionInputs.RECV_ALTERNATIVE_SERVICE
+ )
+ frames = []
+
+ if frame.stream_id:
+ # Given that it makes no sense to receive ALTSVC on a stream
+ # before that stream has been opened with a HEADERS frame, the
+ # ALTSVC frame cannot create a stream. If the stream is not
+ # present, we simply ignore the frame.
+ try:
+ stream = self._get_stream_by_id(frame.stream_id)
+ except (NoSuchStreamError, StreamClosedError):
+ pass
+ else:
+ stream_frames, stream_events = stream.receive_alt_svc(frame)
+ frames.extend(stream_frames)
+ events.extend(stream_events)
+ else:
+ # This frame is sent on stream 0. The origin field on the frame
+ # must be present, though if it isn't it's not a ProtocolError
+ # (annoyingly), we just need to ignore it.
+ if not frame.origin:
+ return frames, events
+
+ # If we're a server, we want to ignore this (RFC 7838 says so).
+ if not self.config.client_side:
+ return frames, events
+
+ event = AlternativeServiceAvailable()
+ event.origin = frame.origin
+ event.field_value = frame.field
+ events.append(event)
+
+ return frames, events
+
+ def _receive_unknown_frame(self, frame):
+ """
+ We have received a frame that we do not understand. This is almost
+ certainly an extension frame, though it's impossible to be entirely
+ sure.
+
+ RFC 7540 § 5.5 says that we MUST ignore unknown frame types: so we
+ do. We do notify the user that we received one, however.
+ """
+ # All we do here is log.
+ self.config.logger.debug(
+ "Received unknown extension frame (ID %d)", frame.stream_id
+ )
+ event = UnknownFrameReceived()
+ event.frame = frame
+ return [], [event]
+
+ def _local_settings_acked(self):
+ """
+ Handle the local settings being ACKed, update internal state.
+ """
+ changes = self.local_settings.acknowledge()
+
+ if SettingCodes.INITIAL_WINDOW_SIZE in changes:
+ setting = changes[SettingCodes.INITIAL_WINDOW_SIZE]
+ self._inbound_flow_control_change_from_settings(
+ setting.original_value,
+ setting.new_value,
+ )
+
+ if SettingCodes.MAX_HEADER_LIST_SIZE in changes:
+ setting = changes[SettingCodes.MAX_HEADER_LIST_SIZE]
+ self.decoder.max_header_list_size = setting.new_value
+
+ if SettingCodes.MAX_FRAME_SIZE in changes:
+ setting = changes[SettingCodes.MAX_FRAME_SIZE]
+ self.max_inbound_frame_size = setting.new_value
+
+ if SettingCodes.HEADER_TABLE_SIZE in changes:
+ setting = changes[SettingCodes.HEADER_TABLE_SIZE]
+ # This is safe across all hpack versions: some versions just won't
+ # respect it.
+ self.decoder.max_allowed_table_size = setting.new_value
+
+ return changes
+
+ def _stream_id_is_outbound(self, stream_id):
+ """
+ Returns ``True`` if the stream ID corresponds to an outbound stream
+ (one initiated by this peer), returns ``False`` otherwise.
+ """
+ return (stream_id % 2 == int(self.config.client_side))
+
+ def _stream_closed_by(self, stream_id):
+ """
+ Returns how the stream was closed.
+
+ The return value will be either a member of
+ ``h2.stream.StreamClosedBy`` or ``None``. If ``None``, the stream was
+ closed implicitly by the peer opening a stream with a higher stream ID
+ before opening this one.
+ """
+ if stream_id in self.streams:
+ return self.streams[stream_id].closed_by
+ if stream_id in self._closed_streams:
+ return self._closed_streams[stream_id]
+ return None
+
+ def _stream_is_closed_by_reset(self, stream_id):
+ """
+ Returns ``True`` if the stream was closed by sending or receiving a
+ RST_STREAM frame. Returns ``False`` otherwise.
+ """
+ return self._stream_closed_by(stream_id) in (
+ StreamClosedBy.RECV_RST_STREAM, StreamClosedBy.SEND_RST_STREAM
+ )
+
+ def _stream_is_closed_by_end(self, stream_id):
+ """
+ Returns ``True`` if the stream was closed by sending or receiving an
+ END_STREAM flag in a HEADERS or DATA frame. Returns ``False``
+ otherwise.
+ """
+ return self._stream_closed_by(stream_id) in (
+ StreamClosedBy.RECV_END_STREAM, StreamClosedBy.SEND_END_STREAM
+ )
+
+
+def _add_frame_priority(frame, weight=None, depends_on=None, exclusive=None):
+ """
+ Adds priority data to a given frame. Does not change any flags set on that
+ frame: if the caller is adding priority information to a HEADERS frame they
+ must set that themselves.
+
+ This method also deliberately sets defaults for anything missing.
+
+ This method validates the input values.
+ """
+ # A stream may not depend on itself.
+ if depends_on == frame.stream_id:
+ raise ProtocolError(
+ "Stream %d may not depend on itself" % frame.stream_id
+ )
+
+ # Weight must be between 1 and 256.
+ if weight is not None:
+ if weight > 256 or weight < 1:
+ raise ProtocolError(
+ "Weight must be between 1 and 256, not %d" % weight
+ )
+ else:
+ # Weight is an integer between 1 and 256, but the byte only allows
+ # 0 to 255: subtract one.
+ weight -= 1
+
+ # Set defaults for anything not provided.
+ weight = weight if weight is not None else 15
+ depends_on = depends_on if depends_on is not None else 0
+ exclusive = exclusive if exclusive is not None else False
+
+ frame.stream_weight = weight
+ frame.depends_on = depends_on
+ frame.exclusive = exclusive
+
+ return frame
+
+
+def _decode_headers(decoder, encoded_header_block):
+ """
+ Decode a HPACK-encoded header block, translating HPACK exceptions into
+ sensible hyper-h2 errors.
+
+ This only ever returns bytestring headers: hyper-h2 may emit them as
+ unicode later, but internally it processes them as bytestrings only.
+ """
+ try:
+ return decoder.decode(encoded_header_block, raw=True)
+ except OversizedHeaderListError as e:
+ # This is a symptom of a HPACK bomb attack: the user has
+ # disregarded our requirements on how large a header block we'll
+ # accept.
+ raise DenialOfServiceError("Oversized header block: %s" % e)
+ except (HPACKError, IndexError, TypeError, UnicodeDecodeError) as e:
+ # We should only need HPACKError here, but versions of HPACK older
+ # than 2.1.0 throw all three others as well. For maximum
+ # compatibility, catch all of them.
+ raise ProtocolError("Error decoding header block: %s" % e)
diff --git a/testing/web-platform/tests/tools/third_party/h2/h2/errors.py b/testing/web-platform/tests/tools/third_party/h2/h2/errors.py
new file mode 100644
index 0000000000..baef2001cd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/h2/errors.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+"""
+h2/errors
+~~~~~~~~~~~~~~~~~~~
+
+Global error code registry containing the established HTTP/2 error codes.
+
+The current registry is available at:
+https://tools.ietf.org/html/rfc7540#section-11.4
+"""
+import enum
+
+
+class ErrorCodes(enum.IntEnum):
+ """
+ All known HTTP/2 error codes.
+
+ .. versionadded:: 2.5.0
+ """
+ #: Graceful shutdown.
+ NO_ERROR = 0x0
+
+ #: Protocol error detected.
+ PROTOCOL_ERROR = 0x1
+
+ #: Implementation fault.
+ INTERNAL_ERROR = 0x2
+
+ #: Flow-control limits exceeded.
+ FLOW_CONTROL_ERROR = 0x3
+
+ #: Settings not acknowledged.
+ SETTINGS_TIMEOUT = 0x4
+
+ #: Frame received for closed stream.
+ STREAM_CLOSED = 0x5
+
+ #: Frame size incorrect.
+ FRAME_SIZE_ERROR = 0x6
+
+ #: Stream not processed.
+ REFUSED_STREAM = 0x7
+
+ #: Stream cancelled.
+ CANCEL = 0x8
+
+ #: Compression state not updated.
+ COMPRESSION_ERROR = 0x9
+
+ #: TCP connection error for CONNECT method.
+ CONNECT_ERROR = 0xa
+
+ #: Processing capacity exceeded.
+ ENHANCE_YOUR_CALM = 0xb
+
+ #: Negotiated TLS parameters not acceptable.
+ INADEQUATE_SECURITY = 0xc
+
+ #: Use HTTP/1.1 for the request.
+ HTTP_1_1_REQUIRED = 0xd
+
+
+def _error_code_from_int(code):
+ """
+ Given an integer error code, returns either one of :class:`ErrorCodes
+ <h2.errors.ErrorCodes>` or, if not present in the known set of codes,
+ returns the integer directly.
+ """
+ try:
+ return ErrorCodes(code)
+ except ValueError:
+ return code
+
+
+__all__ = ['ErrorCodes']
diff --git a/testing/web-platform/tests/tools/third_party/h2/h2/events.py b/testing/web-platform/tests/tools/third_party/h2/h2/events.py
new file mode 100644
index 0000000000..a06c9903d5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/h2/events.py
@@ -0,0 +1,648 @@
+# -*- coding: utf-8 -*-
+"""
+h2/events
+~~~~~~~~~
+
+Defines Event types for HTTP/2.
+
+Events are returned by the H2 state machine to allow implementations to keep
+track of events triggered by receiving data. Each time data is provided to the
+H2 state machine it processes the data and returns a list of Event objects.
+"""
+import binascii
+
+from .settings import ChangedSetting, _setting_code_from_int
+
+
+class Event(object):
+ """
+ Base class for h2 events.
+ """
+ pass
+
+
+class RequestReceived(Event):
+ """
+ The RequestReceived event is fired whenever request headers are received.
+ This event carries the HTTP headers for the given request and the stream ID
+ of the new stream.
+
+ .. versionchanged:: 2.3.0
+ Changed the type of ``headers`` to :class:`HeaderTuple
+ <hpack:hpack.HeaderTuple>`. This has no effect on current users.
+
+ .. versionchanged:: 2.4.0
+ Added ``stream_ended`` and ``priority_updated`` properties.
+ """
+ def __init__(self):
+ #: The Stream ID for the stream this request was made on.
+ self.stream_id = None
+
+ #: The request headers.
+ self.headers = None
+
+ #: If this request also ended the stream, the associated
+ #: :class:`StreamEnded <h2.events.StreamEnded>` event will be available
+ #: here.
+ #:
+ #: .. versionadded:: 2.4.0
+ self.stream_ended = None
+
+ #: If this request also had associated priority information, the
+ #: associated :class:`PriorityUpdated <h2.events.PriorityUpdated>`
+ #: event will be available here.
+ #:
+ #: .. versionadded:: 2.4.0
+ self.priority_updated = None
+
+ def __repr__(self):
+ return "<RequestReceived stream_id:%s, headers:%s>" % (
+ self.stream_id, self.headers
+ )
+
+
+class ResponseReceived(Event):
+ """
+ The ResponseReceived event is fired whenever response headers are received.
+ This event carries the HTTP headers for the given response and the stream
+ ID of the new stream.
+
+ .. versionchanged:: 2.3.0
+ Changed the type of ``headers`` to :class:`HeaderTuple
+ <hpack:hpack.HeaderTuple>`. This has no effect on current users.
+
+ .. versionchanged:: 2.4.0
+ Added ``stream_ended`` and ``priority_updated`` properties.
+ """
+ def __init__(self):
+ #: The Stream ID for the stream this response was made on.
+ self.stream_id = None
+
+ #: The response headers.
+ self.headers = None
+
+ #: If this response also ended the stream, the associated
+ #: :class:`StreamEnded <h2.events.StreamEnded>` event will be available
+ #: here.
+ #:
+ #: .. versionadded:: 2.4.0
+ self.stream_ended = None
+
+ #: If this response also had associated priority information, the
+ #: associated :class:`PriorityUpdated <h2.events.PriorityUpdated>`
+ #: event will be available here.
+ #:
+ #: .. versionadded:: 2.4.0
+ self.priority_updated = None
+
+ def __repr__(self):
+ return "<ResponseReceived stream_id:%s, headers:%s>" % (
+ self.stream_id, self.headers
+ )
+
+
+class TrailersReceived(Event):
+ """
+ The TrailersReceived event is fired whenever trailers are received on a
+ stream. Trailers are a set of headers sent after the body of the
+ request/response, and are used to provide information that wasn't known
+ ahead of time (e.g. content-length). This event carries the HTTP header
+ fields that form the trailers and the stream ID of the stream on which they
+ were received.
+
+ .. versionchanged:: 2.3.0
+ Changed the type of ``headers`` to :class:`HeaderTuple
+ <hpack:hpack.HeaderTuple>`. This has no effect on current users.
+
+ .. versionchanged:: 2.4.0
+ Added ``stream_ended`` and ``priority_updated`` properties.
+ """
+ def __init__(self):
+ #: The Stream ID for the stream on which these trailers were received.
+ self.stream_id = None
+
+ #: The trailers themselves.
+ self.headers = None
+
+ #: Trailers always end streams. This property has the associated
+ #: :class:`StreamEnded <h2.events.StreamEnded>` in it.
+ #:
+ #: .. versionadded:: 2.4.0
+ self.stream_ended = None
+
+ #: If the trailers also set associated priority information, the
+ #: associated :class:`PriorityUpdated <h2.events.PriorityUpdated>`
+ #: event will be available here.
+ #:
+ #: .. versionadded:: 2.4.0
+ self.priority_updated = None
+
+ def __repr__(self):
+ return "<TrailersReceived stream_id:%s, headers:%s>" % (
+ self.stream_id, self.headers
+ )
+
+
+class _HeadersSent(Event):
+ """
+ The _HeadersSent event is fired whenever headers are sent.
+
+ This is an internal event, used to determine validation steps on
+ outgoing header blocks.
+ """
+ pass
+
+
+class _ResponseSent(_HeadersSent):
+ """
+ The _ResponseSent event is fired whenever response headers are sent
+ on a stream.
+
+ This is an internal event, used to determine validation steps on
+ outgoing header blocks.
+ """
+ pass
+
+
+class _RequestSent(_HeadersSent):
+ """
+ The _RequestSent event is fired whenever request headers are sent
+ on a stream.
+
+ This is an internal event, used to determine validation steps on
+ outgoing header blocks.
+ """
+ pass
+
+
+class _TrailersSent(_HeadersSent):
+ """
+ The _TrailersSent event is fired whenever trailers are sent on a
+ stream. Trailers are a set of headers sent after the body of the
+ request/response, and are used to provide information that wasn't known
+ ahead of time (e.g. content-length).
+
+ This is an internal event, used to determine validation steps on
+ outgoing header blocks.
+ """
+ pass
+
+
+class _PushedRequestSent(_HeadersSent):
+ """
+ The _PushedRequestSent event is fired whenever pushed request headers are
+ sent.
+
+ This is an internal event, used to determine validation steps on outgoing
+ header blocks.
+ """
+ pass
+
+
+class InformationalResponseReceived(Event):
+ """
+ The InformationalResponseReceived event is fired when an informational
+ response (that is, one whose status code is a 1XX code) is received from
+ the remote peer.
+
+ The remote peer may send any number of these, from zero upwards. These
+ responses are most commonly sent in response to requests that have the
+ ``expect: 100-continue`` header field present. Most users can safely
+ ignore this event unless you are intending to use the
+ ``expect: 100-continue`` flow, or are for any reason expecting a different
+ 1XX status code.
+
+ .. versionadded:: 2.2.0
+
+ .. versionchanged:: 2.3.0
+ Changed the type of ``headers`` to :class:`HeaderTuple
+ <hpack:hpack.HeaderTuple>`. This has no effect on current users.
+
+ .. versionchanged:: 2.4.0
+ Added ``priority_updated`` property.
+ """
+ def __init__(self):
+ #: The Stream ID for the stream this informational response was made
+ #: on.
+ self.stream_id = None
+
+ #: The headers for this informational response.
+ self.headers = None
+
+ #: If this response also had associated priority information, the
+ #: associated :class:`PriorityUpdated <h2.events.PriorityUpdated>`
+ #: event will be available here.
+ #:
+ #: .. versionadded:: 2.4.0
+ self.priority_updated = None
+
+ def __repr__(self):
+ return "<InformationalResponseReceived stream_id:%s, headers:%s>" % (
+ self.stream_id, self.headers
+ )
+
+
+class DataReceived(Event):
+ """
+ The DataReceived event is fired whenever data is received on a stream from
+ the remote peer. The event carries the data itself, and the stream ID on
+ which the data was received.
+
+ .. versionchanged:: 2.4.0
+ Added ``stream_ended`` property.
+ """
+ def __init__(self):
+ #: The Stream ID for the stream this data was received on.
+ self.stream_id = None
+
+ #: The data itself.
+ self.data = None
+
+ #: The amount of data received that counts against the flow control
+ #: window. Note that padding counts against the flow control window, so
+ #: when adjusting flow control you should always use this field rather
+ #: than ``len(data)``.
+ self.flow_controlled_length = None
+
+ #: If this data chunk also completed the stream, the associated
+ #: :class:`StreamEnded <h2.events.StreamEnded>` event will be available
+ #: here.
+ #:
+ #: .. versionadded:: 2.4.0
+ self.stream_ended = None
+
+ def __repr__(self):
+ return (
+ "<DataReceived stream_id:%s, "
+ "flow_controlled_length:%s, "
+ "data:%s>" % (
+ self.stream_id,
+ self.flow_controlled_length,
+ _bytes_representation(self.data[:20]),
+ )
+ )
+
+
+class WindowUpdated(Event):
+ """
+ The WindowUpdated event is fired whenever a flow control window changes
+ size. HTTP/2 defines flow control windows for connections and streams: this
+ event fires for both connections and streams. The event carries the ID of
+ the stream to which it applies (set to zero if the window update applies to
+ the connection), and the delta in the window size.
+ """
+ def __init__(self):
+ #: The Stream ID of the stream whose flow control window was changed.
+ #: May be ``0`` if the connection window was changed.
+ self.stream_id = None
+
+ #: The window delta.
+ self.delta = None
+
+ def __repr__(self):
+ return "<WindowUpdated stream_id:%s, delta:%s>" % (
+ self.stream_id, self.delta
+ )
+
+
+class RemoteSettingsChanged(Event):
+ """
+ The RemoteSettingsChanged event is fired whenever the remote peer changes
+ its settings. It contains a complete inventory of changed settings,
+ including their previous values.
+
+ In HTTP/2, settings changes need to be acknowledged. hyper-h2 automatically
+ acknowledges settings changes for efficiency. However, it is possible that
+ the caller may not be happy with the changed setting.
+
+ When this event is received, the caller should confirm that the new
+ settings are acceptable. If they are not acceptable, the user should close
+ the connection with the error code :data:`PROTOCOL_ERROR
+ <h2.errors.ErrorCodes.PROTOCOL_ERROR>`.
+
+ .. versionchanged:: 2.0.0
+ Prior to this version the user needed to acknowledge settings changes.
+ This is no longer the case: hyper-h2 now automatically acknowledges
+ them.
+ """
+ def __init__(self):
+ #: A dictionary of setting byte to
+ #: :class:`ChangedSetting <h2.settings.ChangedSetting>`, representing
+ #: the changed settings.
+ self.changed_settings = {}
+
+ @classmethod
+ def from_settings(cls, old_settings, new_settings):
+ """
+ Build a RemoteSettingsChanged event from a set of changed settings.
+
+ :param old_settings: A complete collection of old settings, in the form
+ of a dictionary of ``{setting: value}``.
+ :param new_settings: All the changed settings and their new values, in
+ the form of a dictionary of ``{setting: value}``.
+ """
+ e = cls()
+ for setting, new_value in new_settings.items():
+ setting = _setting_code_from_int(setting)
+ original_value = old_settings.get(setting)
+ change = ChangedSetting(setting, original_value, new_value)
+ e.changed_settings[setting] = change
+
+ return e
+
+ def __repr__(self):
+ return "<RemoteSettingsChanged changed_settings:{%s}>" % (
+ ", ".join(repr(cs) for cs in self.changed_settings.values()),
+ )
+
+
+class PingReceived(Event):
+ """
+ The PingReceived event is fired whenever a PING is received. It contains
+ the 'opaque data' of the PING frame. A ping acknowledgment with the same
+ 'opaque data' is automatically emitted after receiving a ping.
+
+ .. versionadded:: 3.1.0
+ """
+ def __init__(self):
+ #: The data included on the ping.
+ self.ping_data = None
+
+ def __repr__(self):
+ return "<PingReceived ping_data:%s>" % (
+ _bytes_representation(self.ping_data),
+ )
+
+
+class PingAcknowledged(Event):
+ """
+ Same as PingAckReceived.
+
+ .. deprecated:: 3.1.0
+ """
+ def __init__(self):
+ #: The data included on the ping.
+ self.ping_data = None
+
+ def __repr__(self):
+ return "<PingAckReceived ping_data:%s>" % (
+ _bytes_representation(self.ping_data),
+ )
+
+
+class PingAckReceived(PingAcknowledged):
+ """
+ The PingAckReceived event is fired whenever a PING acknowledgment is
+ received. It contains the 'opaque data' of the PING+ACK frame, allowing the
+ user to correlate PINGs and calculate RTT.
+
+ .. versionadded:: 3.1.0
+ """
+ pass
+
+
+class StreamEnded(Event):
+ """
+ The StreamEnded event is fired whenever a stream is ended by a remote
+ party. The stream may not be fully closed if it has not been closed
+ locally, but no further data or headers should be expected on that stream.
+ """
+ def __init__(self):
+ #: The Stream ID of the stream that was closed.
+ self.stream_id = None
+
+ def __repr__(self):
+ return "<StreamEnded stream_id:%s>" % self.stream_id
+
+
+class StreamReset(Event):
+ """
+ The StreamReset event is fired in two situations. The first is when the
+ remote party forcefully resets the stream. The second is when the remote
+ party has made a protocol error which only affects a single stream. In this
+ case, Hyper-h2 will terminate the stream early and return this event.
+
+ .. versionchanged:: 2.0.0
+ This event is now fired when Hyper-h2 automatically resets a stream.
+ """
+ def __init__(self):
+ #: The Stream ID of the stream that was reset.
+ self.stream_id = None
+
+ #: The error code given. Either one of :class:`ErrorCodes
+ #: <h2.errors.ErrorCodes>` or ``int``
+ self.error_code = None
+
+ #: Whether the remote peer sent a RST_STREAM or we did.
+ self.remote_reset = True
+
+ def __repr__(self):
+ return "<StreamReset stream_id:%s, error_code:%s, remote_reset:%s>" % (
+ self.stream_id, self.error_code, self.remote_reset
+ )
+
+
+class PushedStreamReceived(Event):
+ """
+ The PushedStreamReceived event is fired whenever a pushed stream has been
+ received from a remote peer. The event carries on it the new stream ID, the
+ ID of the parent stream, and the request headers pushed by the remote peer.
+ """
+ def __init__(self):
+ #: The Stream ID of the stream created by the push.
+ self.pushed_stream_id = None
+
+ #: The Stream ID of the stream that the push is related to.
+ self.parent_stream_id = None
+
+ #: The request headers, sent by the remote party in the push.
+ self.headers = None
+
+ def __repr__(self):
+ return (
+ "<PushedStreamReceived pushed_stream_id:%s, parent_stream_id:%s, "
+ "headers:%s>" % (
+ self.pushed_stream_id,
+ self.parent_stream_id,
+ self.headers,
+ )
+ )
+
+
+class SettingsAcknowledged(Event):
+ """
+ The SettingsAcknowledged event is fired whenever a settings ACK is received
+ from the remote peer. The event carries on it the settings that were
+ acknowedged, in the same format as
+ :class:`h2.events.RemoteSettingsChanged`.
+ """
+ def __init__(self):
+ #: A dictionary of setting byte to
+ #: :class:`ChangedSetting <h2.settings.ChangedSetting>`, representing
+ #: the changed settings.
+ self.changed_settings = {}
+
+ def __repr__(self):
+ return "<SettingsAcknowledged changed_settings:{%s}>" % (
+ ", ".join(repr(cs) for cs in self.changed_settings.values()),
+ )
+
+
+class PriorityUpdated(Event):
+ """
+ The PriorityUpdated event is fired whenever a stream sends updated priority
+ information. This can occur when the stream is opened, or at any time
+ during the stream lifetime.
+
+ This event is purely advisory, and does not need to be acted on.
+
+ .. versionadded:: 2.0.0
+ """
+ def __init__(self):
+ #: The ID of the stream whose priority information is being updated.
+ self.stream_id = None
+
+ #: The new stream weight. May be the same as the original stream
+ #: weight. An integer between 1 and 256.
+ self.weight = None
+
+ #: The stream ID this stream now depends on. May be ``0``.
+ self.depends_on = None
+
+ #: Whether the stream *exclusively* depends on the parent stream. If it
+ #: does, this stream should inherit the current children of its new
+ #: parent.
+ self.exclusive = None
+
+ def __repr__(self):
+ return (
+ "<PriorityUpdated stream_id:%s, weight:%s, depends_on:%s, "
+ "exclusive:%s>" % (
+ self.stream_id,
+ self.weight,
+ self.depends_on,
+ self.exclusive
+ )
+ )
+
+
+class ConnectionTerminated(Event):
+ """
+ The ConnectionTerminated event is fired when a connection is torn down by
+ the remote peer using a GOAWAY frame. Once received, no further action may
+ be taken on the connection: a new connection must be established.
+ """
+ def __init__(self):
+ #: The error code cited when tearing down the connection. Should be
+ #: one of :class:`ErrorCodes <h2.errors.ErrorCodes>`, but may not be if
+ #: unknown HTTP/2 extensions are being used.
+ self.error_code = None
+
+ #: The stream ID of the last stream the remote peer saw. This can
+ #: provide an indication of what data, if any, never reached the remote
+ #: peer and so can safely be resent.
+ self.last_stream_id = None
+
+ #: Additional debug data that can be appended to GOAWAY frame.
+ self.additional_data = None
+
+ def __repr__(self):
+ return (
+ "<ConnectionTerminated error_code:%s, last_stream_id:%s, "
+ "additional_data:%s>" % (
+ self.error_code,
+ self.last_stream_id,
+ _bytes_representation(
+ self.additional_data[:20]
+ if self.additional_data else None)
+ )
+ )
+
+
+class AlternativeServiceAvailable(Event):
+ """
+ The AlternativeServiceAvailable event is fired when the remote peer
+ advertises an `RFC 7838 <https://tools.ietf.org/html/rfc7838>`_ Alternative
+ Service using an ALTSVC frame.
+
+ This event always carries the origin to which the ALTSVC information
+ applies. That origin is either supplied by the server directly, or inferred
+ by hyper-h2 from the ``:authority`` pseudo-header field that was sent by
+ the user when initiating a given stream.
+
+ This event also carries what RFC 7838 calls the "Alternative Service Field
+ Value", which is formatted like a HTTP header field and contains the
+ relevant alternative service information. Hyper-h2 does not parse or in any
+ way modify that information: the user is required to do that.
+
+ This event can only be fired on the client end of a connection.
+
+ .. versionadded:: 2.3.0
+ """
+ def __init__(self):
+ #: The origin to which the alternative service field value applies.
+ #: This field is either supplied by the server directly, or inferred by
+ #: hyper-h2 from the ``:authority`` pseudo-header field that was sent
+ #: by the user when initiating the stream on which the frame was
+ #: received.
+ self.origin = None
+
+ #: The ALTSVC field value. This contains information about the HTTP
+ #: alternative service being advertised by the server. Hyper-h2 does
+ #: not parse this field: it is left exactly as sent by the server. The
+ #: structure of the data in this field is given by `RFC 7838 Section 3
+ #: <https://tools.ietf.org/html/rfc7838#section-3>`_.
+ self.field_value = None
+
+ def __repr__(self):
+ return (
+ "<AlternativeServiceAvailable origin:%s, field_value:%s>" % (
+ self.origin.decode('utf-8', 'ignore'),
+ self.field_value.decode('utf-8', 'ignore'),
+ )
+ )
+
+
+class UnknownFrameReceived(Event):
+ """
+ The UnknownFrameReceived event is fired when the remote peer sends a frame
+ that hyper-h2 does not understand. This occurs primarily when the remote
+ peer is employing HTTP/2 extensions that hyper-h2 doesn't know anything
+ about.
+
+ RFC 7540 requires that HTTP/2 implementations ignore these frames. hyper-h2
+ does so. However, this event is fired to allow implementations to perform
+ special processing on those frames if needed (e.g. if the implementation
+ is capable of handling the frame itself).
+
+ .. versionadded:: 2.7.0
+ """
+ def __init__(self):
+ #: The hyperframe Frame object that encapsulates the received frame.
+ self.frame = None
+
+ def __repr__(self):
+ return "<UnknownFrameReceived>"
+
+
+def _bytes_representation(data):
+ """
+ Converts a bytestring into something that is safe to print on all Python
+ platforms.
+
+ This function is relatively expensive, so it should not be called on the
+ mainline of the code. It's safe to use in things like object repr methods
+ though.
+ """
+ if data is None:
+ return None
+
+ hex = binascii.hexlify(data)
+
+ # This is moderately clever: on all Python versions hexlify returns a byte
+ # string. On Python 3 we want an actual string, so we just check whether
+ # that's what we have.
+ if not isinstance(hex, str): # pragma: no cover
+ hex = hex.decode('ascii')
+
+ return hex
diff --git a/testing/web-platform/tests/tools/third_party/h2/h2/exceptions.py b/testing/web-platform/tests/tools/third_party/h2/h2/exceptions.py
new file mode 100644
index 0000000000..388f9e9a38
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/h2/exceptions.py
@@ -0,0 +1,186 @@
+# -*- coding: utf-8 -*-
+"""
+h2/exceptions
+~~~~~~~~~~~~~
+
+Exceptions for the HTTP/2 module.
+"""
+import h2.errors
+
+
+class H2Error(Exception):
+ """
+ The base class for all exceptions for the HTTP/2 module.
+ """
+
+
+class ProtocolError(H2Error):
+ """
+ An action was attempted in violation of the HTTP/2 protocol.
+ """
+ #: The error code corresponds to this kind of Protocol Error.
+ error_code = h2.errors.ErrorCodes.PROTOCOL_ERROR
+
+
+class FrameTooLargeError(ProtocolError):
+ """
+ The frame that we tried to send or that we received was too large.
+ """
+ #: This error code that corresponds to this kind of Protocol Error.
+ error_code = h2.errors.ErrorCodes.FRAME_SIZE_ERROR
+
+
+class FrameDataMissingError(ProtocolError):
+ """
+ The frame that we received is missing some data.
+
+ .. versionadded:: 2.0.0
+ """
+ #: The error code that corresponds to this kind of Protocol Error
+ error_code = h2.errors.ErrorCodes.FRAME_SIZE_ERROR
+
+
+class TooManyStreamsError(ProtocolError):
+ """
+ An attempt was made to open a stream that would lead to too many concurrent
+ streams.
+ """
+ pass
+
+
+class FlowControlError(ProtocolError):
+ """
+ An attempted action violates flow control constraints.
+ """
+ #: The error code that corresponds to this kind of
+ #: :class:`ProtocolError <h2.exceptions.ProtocolError>`
+ error_code = h2.errors.ErrorCodes.FLOW_CONTROL_ERROR
+
+
+class StreamIDTooLowError(ProtocolError):
+ """
+ An attempt was made to open a stream that had an ID that is lower than the
+ highest ID we have seen on this connection.
+ """
+ def __init__(self, stream_id, max_stream_id):
+ #: The ID of the stream that we attempted to open.
+ self.stream_id = stream_id
+
+ #: The current highest-seen stream ID.
+ self.max_stream_id = max_stream_id
+
+ def __str__(self):
+ return "StreamIDTooLowError: %d is lower than %d" % (
+ self.stream_id, self.max_stream_id
+ )
+
+
+class NoAvailableStreamIDError(ProtocolError):
+ """
+ There are no available stream IDs left to the connection. All stream IDs
+ have been exhausted.
+
+ .. versionadded:: 2.0.0
+ """
+ pass
+
+
+class NoSuchStreamError(ProtocolError):
+ """
+ A stream-specific action referenced a stream that does not exist.
+
+ .. versionchanged:: 2.0.0
+ Became a subclass of :class:`ProtocolError
+ <h2.exceptions.ProtocolError>`
+ """
+ def __init__(self, stream_id):
+ #: The stream ID that corresponds to the non-existent stream.
+ self.stream_id = stream_id
+
+
+class StreamClosedError(NoSuchStreamError):
+ """
+ A more specific form of
+ :class:`NoSuchStreamError <h2.exceptions.NoSuchStreamError>`. Indicates
+ that the stream has since been closed, and that all state relating to that
+ stream has been removed.
+ """
+ def __init__(self, stream_id):
+ #: The stream ID that corresponds to the nonexistent stream.
+ self.stream_id = stream_id
+
+ #: The relevant HTTP/2 error code.
+ self.error_code = h2.errors.ErrorCodes.STREAM_CLOSED
+
+ # Any events that internal code may need to fire. Not relevant to
+ # external users that may receive a StreamClosedError.
+ self._events = []
+
+
+class InvalidSettingsValueError(ProtocolError, ValueError):
+ """
+ An attempt was made to set an invalid Settings value.
+
+ .. versionadded:: 2.0.0
+ """
+ def __init__(self, msg, error_code):
+ super(InvalidSettingsValueError, self).__init__(msg)
+ self.error_code = error_code
+
+
+class InvalidBodyLengthError(ProtocolError):
+ """
+ The remote peer sent more or less data that the Content-Length header
+ indicated.
+
+ .. versionadded:: 2.0.0
+ """
+ def __init__(self, expected, actual):
+ self.expected_length = expected
+ self.actual_length = actual
+
+ def __str__(self):
+ return "InvalidBodyLengthError: Expected %d bytes, received %d" % (
+ self.expected_length, self.actual_length
+ )
+
+
+class UnsupportedFrameError(ProtocolError, KeyError):
+ """
+ The remote peer sent a frame that is unsupported in this context.
+
+ .. versionadded:: 2.1.0
+ """
+ # TODO: Remove the KeyError in 3.0.0
+ pass
+
+
+class RFC1122Error(H2Error):
+ """
+ Emitted when users attempt to do something that is literally allowed by the
+ relevant RFC, but is sufficiently ill-defined that it's unwise to allow
+ users to actually do it.
+
+ While there is some disagreement about whether or not we should be liberal
+ in what accept, it is a truth universally acknowledged that we should be
+ conservative in what emit.
+
+ .. versionadded:: 2.4.0
+ """
+ # shazow says I'm going to regret naming the exception this way. If that
+ # turns out to be true, TELL HIM NOTHING.
+ pass
+
+
+class DenialOfServiceError(ProtocolError):
+ """
+ Emitted when the remote peer exhibits a behaviour that is likely to be an
+ attempt to perform a Denial of Service attack on the implementation. This
+ is a form of ProtocolError that carries a different error code, and allows
+ more easy detection of this kind of behaviour.
+
+ .. versionadded:: 2.5.0
+ """
+ #: The error code that corresponds to this kind of
+ #: :class:`ProtocolError <h2.exceptions.ProtocolError>`
+ error_code = h2.errors.ErrorCodes.ENHANCE_YOUR_CALM
diff --git a/testing/web-platform/tests/tools/third_party/h2/h2/frame_buffer.py b/testing/web-platform/tests/tools/third_party/h2/h2/frame_buffer.py
new file mode 100644
index 0000000000..e79f6ec2de
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/h2/frame_buffer.py
@@ -0,0 +1,175 @@
+# -*- coding: utf-8 -*-
+"""
+h2/frame_buffer
+~~~~~~~~~~~~~~~
+
+A data structure that provides a way to iterate over a byte buffer in terms of
+frames.
+"""
+from hyperframe.exceptions import InvalidFrameError
+from hyperframe.frame import (
+ Frame, HeadersFrame, ContinuationFrame, PushPromiseFrame
+)
+
+from .exceptions import (
+ ProtocolError, FrameTooLargeError, FrameDataMissingError
+)
+
+# To avoid a DOS attack based on sending loads of continuation frames, we limit
+# the maximum number we're perpared to receive. In this case, we'll set the
+# limit to 64, which means the largest encoded header block we can receive by
+# default is 262144 bytes long, and the largest possible *at all* is 1073741760
+# bytes long.
+#
+# This value seems reasonable for now, but in future we may want to evaluate
+# making it configurable.
+CONTINUATION_BACKLOG = 64
+
+
+class FrameBuffer(object):
+ """
+ This is a data structure that expects to act as a buffer for HTTP/2 data
+ that allows iteraton in terms of H2 frames.
+ """
+ def __init__(self, server=False):
+ self.data = b''
+ self.max_frame_size = 0
+ self._preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n' if server else b''
+ self._preamble_len = len(self._preamble)
+ self._headers_buffer = []
+
+ def add_data(self, data):
+ """
+ Add more data to the frame buffer.
+
+ :param data: A bytestring containing the byte buffer.
+ """
+ if self._preamble_len:
+ data_len = len(data)
+ of_which_preamble = min(self._preamble_len, data_len)
+
+ if self._preamble[:of_which_preamble] != data[:of_which_preamble]:
+ raise ProtocolError("Invalid HTTP/2 preamble.")
+
+ data = data[of_which_preamble:]
+ self._preamble_len -= of_which_preamble
+ self._preamble = self._preamble[of_which_preamble:]
+
+ self.data += data
+
+ def _parse_frame_header(self, data):
+ """
+ Parses the frame header from the data. Either returns a tuple of
+ (frame, length), or throws an exception. The returned frame may be None
+ if the frame is of unknown type.
+ """
+ try:
+ frame, length = Frame.parse_frame_header(data[:9])
+ except ValueError as e:
+ # The frame header is invalid. This is a ProtocolError
+ raise ProtocolError("Invalid frame header received: %s" % str(e))
+
+ return frame, length
+
+ def _validate_frame_length(self, length):
+ """
+ Confirm that the frame is an appropriate length.
+ """
+ if length > self.max_frame_size:
+ raise FrameTooLargeError(
+ "Received overlong frame: length %d, max %d" %
+ (length, self.max_frame_size)
+ )
+
+ def _update_header_buffer(self, f):
+ """
+ Updates the internal header buffer. Returns a frame that should replace
+ the current one. May throw exceptions if this frame is invalid.
+ """
+ # Check if we're in the middle of a headers block. If we are, this
+ # frame *must* be a CONTINUATION frame with the same stream ID as the
+ # leading HEADERS or PUSH_PROMISE frame. Anything else is a
+ # ProtocolError. If the frame *is* valid, append it to the header
+ # buffer.
+ if self._headers_buffer:
+ stream_id = self._headers_buffer[0].stream_id
+ valid_frame = (
+ f is not None and
+ isinstance(f, ContinuationFrame) and
+ f.stream_id == stream_id
+ )
+ if not valid_frame:
+ raise ProtocolError("Invalid frame during header block.")
+
+ # Append the frame to the buffer.
+ self._headers_buffer.append(f)
+ if len(self._headers_buffer) > CONTINUATION_BACKLOG:
+ raise ProtocolError("Too many continuation frames received.")
+
+ # If this is the end of the header block, then we want to build a
+ # mutant HEADERS frame that's massive. Use the original one we got,
+ # then set END_HEADERS and set its data appopriately. If it's not
+ # the end of the block, lose the current frame: we can't yield it.
+ if 'END_HEADERS' in f.flags:
+ f = self._headers_buffer[0]
+ f.flags.add('END_HEADERS')
+ f.data = b''.join(x.data for x in self._headers_buffer)
+ self._headers_buffer = []
+ else:
+ f = None
+ elif (isinstance(f, (HeadersFrame, PushPromiseFrame)) and
+ 'END_HEADERS' not in f.flags):
+ # This is the start of a headers block! Save the frame off and then
+ # act like we didn't receive one.
+ self._headers_buffer.append(f)
+ f = None
+
+ return f
+
+ # The methods below support the iterator protocol.
+ def __iter__(self):
+ return self
+
+ def next(self): # Python 2
+ # First, check that we have enough data to successfully parse the
+ # next frame header. If not, bail. Otherwise, parse it.
+ if len(self.data) < 9:
+ raise StopIteration()
+
+ try:
+ f, length = self._parse_frame_header(self.data)
+ except InvalidFrameError: # pragma: no cover
+ raise ProtocolError("Received frame with invalid frame header.")
+
+ # Next, check that we have enough length to parse the frame body. If
+ # not, bail, leaving the frame header data in the buffer for next time.
+ if len(self.data) < length + 9:
+ raise StopIteration()
+
+ # Confirm the frame has an appropriate length.
+ self._validate_frame_length(length)
+
+ # Don't try to parse the body if we didn't get a frame we know about:
+ # there's nothing we can do with it anyway.
+ if f is not None:
+ try:
+ f.parse_body(memoryview(self.data[9:9+length]))
+ except InvalidFrameError:
+ raise FrameDataMissingError("Frame data missing or invalid")
+
+ # At this point, as we know we'll use or discard the entire frame, we
+ # can update the data.
+ self.data = self.data[9+length:]
+
+ # Pass the frame through the header buffer.
+ f = self._update_header_buffer(f)
+
+ # If we got a frame we didn't understand or shouldn't yield, rather
+ # than return None it'd be better if we just tried to get the next
+ # frame in the sequence instead. Recurse back into ourselves to do
+ # that. This is safe because the amount of work we have to do here is
+ # strictly bounded by the length of the buffer.
+ return f if f is not None else self.next()
+
+ def __next__(self): # Python 3
+ return self.next()
diff --git a/testing/web-platform/tests/tools/third_party/h2/h2/settings.py b/testing/web-platform/tests/tools/third_party/h2/h2/settings.py
new file mode 100644
index 0000000000..bf87c94f4e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/h2/settings.py
@@ -0,0 +1,339 @@
+# -*- coding: utf-8 -*-
+"""
+h2/settings
+~~~~~~~~~~~
+
+This module contains a HTTP/2 settings object. This object provides a simple
+API for manipulating HTTP/2 settings, keeping track of both the current active
+state of the settings and the unacknowledged future values of the settings.
+"""
+import collections
+import enum
+
+from hyperframe.frame import SettingsFrame
+
+from h2.errors import ErrorCodes
+from h2.exceptions import InvalidSettingsValueError
+
+try:
+ from collections.abc import MutableMapping
+except ImportError: # pragma: no cover
+ # Python 2.7 compatibility
+ from collections import MutableMapping
+
+
+class SettingCodes(enum.IntEnum):
+ """
+ All known HTTP/2 setting codes.
+
+ .. versionadded:: 2.6.0
+ """
+
+ #: Allows the sender to inform the remote endpoint of the maximum size of
+ #: the header compression table used to decode header blocks, in octets.
+ HEADER_TABLE_SIZE = SettingsFrame.HEADER_TABLE_SIZE
+
+ #: This setting can be used to disable server push. To disable server push
+ #: on a client, set this to 0.
+ ENABLE_PUSH = SettingsFrame.ENABLE_PUSH
+
+ #: Indicates the maximum number of concurrent streams that the sender will
+ #: allow.
+ MAX_CONCURRENT_STREAMS = SettingsFrame.MAX_CONCURRENT_STREAMS
+
+ #: Indicates the sender's initial window size (in octets) for stream-level
+ #: flow control.
+ INITIAL_WINDOW_SIZE = SettingsFrame.INITIAL_WINDOW_SIZE
+
+ #: Indicates the size of the largest frame payload that the sender is
+ #: willing to receive, in octets.
+ MAX_FRAME_SIZE = SettingsFrame.MAX_FRAME_SIZE
+
+ #: This advisory setting informs a peer of the maximum size of header list
+ #: that the sender is prepared to accept, in octets. The value is based on
+ #: the uncompressed size of header fields, including the length of the name
+ #: and value in octets plus an overhead of 32 octets for each header field.
+ MAX_HEADER_LIST_SIZE = SettingsFrame.MAX_HEADER_LIST_SIZE
+
+ #: This setting can be used to enable the connect protocol. To enable on a
+ #: client set this to 1.
+ ENABLE_CONNECT_PROTOCOL = SettingsFrame.ENABLE_CONNECT_PROTOCOL
+
+
+def _setting_code_from_int(code):
+ """
+ Given an integer setting code, returns either one of :class:`SettingCodes
+ <h2.settings.SettingCodes>` or, if not present in the known set of codes,
+ returns the integer directly.
+ """
+ try:
+ return SettingCodes(code)
+ except ValueError:
+ return code
+
+
+class ChangedSetting:
+
+ def __init__(self, setting, original_value, new_value):
+ #: The setting code given. Either one of :class:`SettingCodes
+ #: <h2.settings.SettingCodes>` or ``int``
+ #:
+ #: .. versionchanged:: 2.6.0
+ self.setting = setting
+
+ #: The original value before being changed.
+ self.original_value = original_value
+
+ #: The new value after being changed.
+ self.new_value = new_value
+
+ def __repr__(self):
+ return (
+ "ChangedSetting(setting=%s, original_value=%s, "
+ "new_value=%s)"
+ ) % (
+ self.setting,
+ self.original_value,
+ self.new_value
+ )
+
+
+class Settings(MutableMapping):
+ """
+ An object that encapsulates HTTP/2 settings state.
+
+ HTTP/2 Settings are a complex beast. Each party, remote and local, has its
+ own settings and a view of the other party's settings. When a settings
+ frame is emitted by a peer it cannot assume that the new settings values
+ are in place until the remote peer acknowledges the setting. In principle,
+ multiple settings changes can be "in flight" at the same time, all with
+ different values.
+
+ This object encapsulates this mess. It provides a dict-like interface to
+ settings, which return the *current* values of the settings in question.
+ Additionally, it keeps track of the stack of proposed values: each time an
+ acknowledgement is sent/received, it updates the current values with the
+ stack of proposed values. On top of all that, it validates the values to
+ make sure they're allowed, and raises :class:`InvalidSettingsValueError
+ <h2.exceptions.InvalidSettingsValueError>` if they are not.
+
+ Finally, this object understands what the default values of the HTTP/2
+ settings are, and sets those defaults appropriately.
+
+ .. versionchanged:: 2.2.0
+ Added the ``initial_values`` parameter.
+
+ .. versionchanged:: 2.5.0
+ Added the ``max_header_list_size`` property.
+
+ :param client: (optional) Whether these settings should be defaulted for a
+ client implementation or a server implementation. Defaults to ``True``.
+ :type client: ``bool``
+ :param initial_values: (optional) Any initial values the user would like
+ set, rather than RFC 7540's defaults.
+ :type initial_vales: ``MutableMapping``
+ """
+ def __init__(self, client=True, initial_values=None):
+ # Backing object for the settings. This is a dictionary of
+ # (setting: [list of values]), where the first value in the list is the
+ # current value of the setting. Strictly this doesn't use lists but
+ # instead uses collections.deque to avoid repeated memory allocations.
+ #
+ # This contains the default values for HTTP/2.
+ self._settings = {
+ SettingCodes.HEADER_TABLE_SIZE: collections.deque([4096]),
+ SettingCodes.ENABLE_PUSH: collections.deque([int(client)]),
+ SettingCodes.INITIAL_WINDOW_SIZE: collections.deque([65535]),
+ SettingCodes.MAX_FRAME_SIZE: collections.deque([16384]),
+ SettingCodes.ENABLE_CONNECT_PROTOCOL: collections.deque([0]),
+ }
+ if initial_values is not None:
+ for key, value in initial_values.items():
+ invalid = _validate_setting(key, value)
+ if invalid:
+ raise InvalidSettingsValueError(
+ "Setting %d has invalid value %d" % (key, value),
+ error_code=invalid
+ )
+ self._settings[key] = collections.deque([value])
+
+ def acknowledge(self):
+ """
+ The settings have been acknowledged, either by the user (remote
+ settings) or by the remote peer (local settings).
+
+ :returns: A dict of {setting: ChangedSetting} that were applied.
+ """
+ changed_settings = {}
+
+ # If there is more than one setting in the list, we have a setting
+ # value outstanding. Update them.
+ for k, v in self._settings.items():
+ if len(v) > 1:
+ old_setting = v.popleft()
+ new_setting = v[0]
+ changed_settings[k] = ChangedSetting(
+ k, old_setting, new_setting
+ )
+
+ return changed_settings
+
+ # Provide easy-access to well known settings.
+ @property
+ def header_table_size(self):
+ """
+ The current value of the :data:`HEADER_TABLE_SIZE
+ <h2.settings.SettingCodes.HEADER_TABLE_SIZE>` setting.
+ """
+ return self[SettingCodes.HEADER_TABLE_SIZE]
+
+ @header_table_size.setter
+ def header_table_size(self, value):
+ self[SettingCodes.HEADER_TABLE_SIZE] = value
+
+ @property
+ def enable_push(self):
+ """
+ The current value of the :data:`ENABLE_PUSH
+ <h2.settings.SettingCodes.ENABLE_PUSH>` setting.
+ """
+ return self[SettingCodes.ENABLE_PUSH]
+
+ @enable_push.setter
+ def enable_push(self, value):
+ self[SettingCodes.ENABLE_PUSH] = value
+
+ @property
+ def initial_window_size(self):
+ """
+ The current value of the :data:`INITIAL_WINDOW_SIZE
+ <h2.settings.SettingCodes.INITIAL_WINDOW_SIZE>` setting.
+ """
+ return self[SettingCodes.INITIAL_WINDOW_SIZE]
+
+ @initial_window_size.setter
+ def initial_window_size(self, value):
+ self[SettingCodes.INITIAL_WINDOW_SIZE] = value
+
+ @property
+ def max_frame_size(self):
+ """
+ The current value of the :data:`MAX_FRAME_SIZE
+ <h2.settings.SettingCodes.MAX_FRAME_SIZE>` setting.
+ """
+ return self[SettingCodes.MAX_FRAME_SIZE]
+
+ @max_frame_size.setter
+ def max_frame_size(self, value):
+ self[SettingCodes.MAX_FRAME_SIZE] = value
+
+ @property
+ def max_concurrent_streams(self):
+ """
+ The current value of the :data:`MAX_CONCURRENT_STREAMS
+ <h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS>` setting.
+ """
+ return self.get(SettingCodes.MAX_CONCURRENT_STREAMS, 2**32+1)
+
+ @max_concurrent_streams.setter
+ def max_concurrent_streams(self, value):
+ self[SettingCodes.MAX_CONCURRENT_STREAMS] = value
+
+ @property
+ def max_header_list_size(self):
+ """
+ The current value of the :data:`MAX_HEADER_LIST_SIZE
+ <h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE>` setting. If not set,
+ returns ``None``, which means unlimited.
+
+ .. versionadded:: 2.5.0
+ """
+ return self.get(SettingCodes.MAX_HEADER_LIST_SIZE, None)
+
+ @max_header_list_size.setter
+ def max_header_list_size(self, value):
+ self[SettingCodes.MAX_HEADER_LIST_SIZE] = value
+
+ @property
+ def enable_connect_protocol(self):
+ """
+ The current value of the :data:`ENABLE_CONNECT_PROTOCOL
+ <h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL>` setting.
+ """
+ return self[SettingCodes.ENABLE_CONNECT_PROTOCOL]
+
+ @enable_connect_protocol.setter
+ def enable_connect_protocol(self, value):
+ self[SettingCodes.ENABLE_CONNECT_PROTOCOL] = value
+
+ # Implement the MutableMapping API.
+ def __getitem__(self, key):
+ val = self._settings[key][0]
+
+ # Things that were created when a setting was received should stay
+ # KeyError'd.
+ if val is None:
+ raise KeyError
+
+ return val
+
+ def __setitem__(self, key, value):
+ invalid = _validate_setting(key, value)
+ if invalid:
+ raise InvalidSettingsValueError(
+ "Setting %d has invalid value %d" % (key, value),
+ error_code=invalid
+ )
+
+ try:
+ items = self._settings[key]
+ except KeyError:
+ items = collections.deque([None])
+ self._settings[key] = items
+
+ items.append(value)
+
+ def __delitem__(self, key):
+ del self._settings[key]
+
+ def __iter__(self):
+ return self._settings.__iter__()
+
+ def __len__(self):
+ return len(self._settings)
+
+ def __eq__(self, other):
+ if isinstance(other, Settings):
+ return self._settings == other._settings
+ else:
+ return NotImplemented
+
+ def __ne__(self, other):
+ if isinstance(other, Settings):
+ return not self == other
+ else:
+ return NotImplemented
+
+
+def _validate_setting(setting, value): # noqa: C901
+ """
+ Confirms that a specific setting has a well-formed value. If the setting is
+ invalid, returns an error code. Otherwise, returns 0 (NO_ERROR).
+ """
+ if setting == SettingCodes.ENABLE_PUSH:
+ if value not in (0, 1):
+ return ErrorCodes.PROTOCOL_ERROR
+ elif setting == SettingCodes.INITIAL_WINDOW_SIZE:
+ if not 0 <= value <= 2147483647: # 2^31 - 1
+ return ErrorCodes.FLOW_CONTROL_ERROR
+ elif setting == SettingCodes.MAX_FRAME_SIZE:
+ if not 16384 <= value <= 16777215: # 2^14 and 2^24 - 1
+ return ErrorCodes.PROTOCOL_ERROR
+ elif setting == SettingCodes.MAX_HEADER_LIST_SIZE:
+ if value < 0:
+ return ErrorCodes.PROTOCOL_ERROR
+ elif setting == SettingCodes.ENABLE_CONNECT_PROTOCOL:
+ if value not in (0, 1):
+ return ErrorCodes.PROTOCOL_ERROR
+
+ return 0
diff --git a/testing/web-platform/tests/tools/third_party/h2/h2/stream.py b/testing/web-platform/tests/tools/third_party/h2/h2/stream.py
new file mode 100644
index 0000000000..1cb91786d4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/h2/stream.py
@@ -0,0 +1,1369 @@
+# -*- coding: utf-8 -*-
+"""
+h2/stream
+~~~~~~~~~
+
+An implementation of a HTTP/2 stream.
+"""
+from enum import Enum, IntEnum
+from hpack import HeaderTuple
+from hyperframe.frame import (
+ HeadersFrame, ContinuationFrame, DataFrame, WindowUpdateFrame,
+ RstStreamFrame, PushPromiseFrame, AltSvcFrame
+)
+
+from .errors import ErrorCodes, _error_code_from_int
+from .events import (
+ RequestReceived, ResponseReceived, DataReceived, WindowUpdated,
+ StreamEnded, PushedStreamReceived, StreamReset, TrailersReceived,
+ InformationalResponseReceived, AlternativeServiceAvailable,
+ _ResponseSent, _RequestSent, _TrailersSent, _PushedRequestSent
+)
+from .exceptions import (
+ ProtocolError, StreamClosedError, InvalidBodyLengthError, FlowControlError
+)
+from .utilities import (
+ guard_increment_window, is_informational_response, authority_from_headers,
+ validate_headers, validate_outbound_headers, normalize_outbound_headers,
+ HeaderValidationFlags, extract_method_header, normalize_inbound_headers
+)
+from .windows import WindowManager
+
+
+class StreamState(IntEnum):
+ IDLE = 0
+ RESERVED_REMOTE = 1
+ RESERVED_LOCAL = 2
+ OPEN = 3
+ HALF_CLOSED_REMOTE = 4
+ HALF_CLOSED_LOCAL = 5
+ CLOSED = 6
+
+
+class StreamInputs(Enum):
+ SEND_HEADERS = 0
+ SEND_PUSH_PROMISE = 1
+ SEND_RST_STREAM = 2
+ SEND_DATA = 3
+ SEND_WINDOW_UPDATE = 4
+ SEND_END_STREAM = 5
+ RECV_HEADERS = 6
+ RECV_PUSH_PROMISE = 7
+ RECV_RST_STREAM = 8
+ RECV_DATA = 9
+ RECV_WINDOW_UPDATE = 10
+ RECV_END_STREAM = 11
+ RECV_CONTINUATION = 12 # Added in 2.0.0
+ SEND_INFORMATIONAL_HEADERS = 13 # Added in 2.2.0
+ RECV_INFORMATIONAL_HEADERS = 14 # Added in 2.2.0
+ SEND_ALTERNATIVE_SERVICE = 15 # Added in 2.3.0
+ RECV_ALTERNATIVE_SERVICE = 16 # Added in 2.3.0
+ UPGRADE_CLIENT = 17 # Added 2.3.0
+ UPGRADE_SERVER = 18 # Added 2.3.0
+
+
+class StreamClosedBy(Enum):
+ SEND_END_STREAM = 0
+ RECV_END_STREAM = 1
+ SEND_RST_STREAM = 2
+ RECV_RST_STREAM = 3
+
+
+# This array is initialized once, and is indexed by the stream states above.
+# It indicates whether a stream in the given state is open. The reason we do
+# this is that we potentially check whether a stream in a given state is open
+# quite frequently: given that we check so often, we should do so in the
+# fastest and most performant way possible.
+STREAM_OPEN = [False for _ in range(0, len(StreamState))]
+STREAM_OPEN[StreamState.OPEN] = True
+STREAM_OPEN[StreamState.HALF_CLOSED_LOCAL] = True
+STREAM_OPEN[StreamState.HALF_CLOSED_REMOTE] = True
+
+
+class H2StreamStateMachine(object):
+ """
+ A single HTTP/2 stream state machine.
+
+ This stream object implements basically the state machine described in
+ RFC 7540 section 5.1.
+
+ :param stream_id: The stream ID of this stream. This is stored primarily
+ for logging purposes.
+ """
+ def __init__(self, stream_id):
+ self.state = StreamState.IDLE
+ self.stream_id = stream_id
+
+ #: Whether this peer is the client side of this stream.
+ self.client = None
+
+ # Whether trailers have been sent/received on this stream or not.
+ self.headers_sent = None
+ self.trailers_sent = None
+ self.headers_received = None
+ self.trailers_received = None
+
+ # How the stream was closed. One of StreamClosedBy.
+ self.stream_closed_by = None
+
+ def process_input(self, input_):
+ """
+ Process a specific input in the state machine.
+ """
+ if not isinstance(input_, StreamInputs):
+ raise ValueError("Input must be an instance of StreamInputs")
+
+ try:
+ func, target_state = _transitions[(self.state, input_)]
+ except KeyError:
+ old_state = self.state
+ self.state = StreamState.CLOSED
+ raise ProtocolError(
+ "Invalid input %s in state %s" % (input_, old_state)
+ )
+ else:
+ previous_state = self.state
+ self.state = target_state
+ if func is not None:
+ try:
+ return func(self, previous_state)
+ except ProtocolError:
+ self.state = StreamState.CLOSED
+ raise
+ except AssertionError as e: # pragma: no cover
+ self.state = StreamState.CLOSED
+ raise ProtocolError(e)
+
+ return []
+
+ def request_sent(self, previous_state):
+ """
+ Fires when a request is sent.
+ """
+ self.client = True
+ self.headers_sent = True
+ event = _RequestSent()
+
+ return [event]
+
+ def response_sent(self, previous_state):
+ """
+ Fires when something that should be a response is sent. This 'response'
+ may actually be trailers.
+ """
+ if not self.headers_sent:
+ if self.client is True or self.client is None:
+ raise ProtocolError("Client cannot send responses.")
+ self.headers_sent = True
+ event = _ResponseSent()
+ else:
+ assert not self.trailers_sent
+ self.trailers_sent = True
+ event = _TrailersSent()
+
+ return [event]
+
+ def request_received(self, previous_state):
+ """
+ Fires when a request is received.
+ """
+ assert not self.headers_received
+ assert not self.trailers_received
+
+ self.client = False
+ self.headers_received = True
+ event = RequestReceived()
+
+ event.stream_id = self.stream_id
+ return [event]
+
+ def response_received(self, previous_state):
+ """
+ Fires when a response is received. Also disambiguates between responses
+ and trailers.
+ """
+ if not self.headers_received:
+ assert self.client is True
+ self.headers_received = True
+ event = ResponseReceived()
+ else:
+ assert not self.trailers_received
+ self.trailers_received = True
+ event = TrailersReceived()
+
+ event.stream_id = self.stream_id
+ return [event]
+
+ def data_received(self, previous_state):
+ """
+ Fires when data is received.
+ """
+ event = DataReceived()
+ event.stream_id = self.stream_id
+ return [event]
+
+ def window_updated(self, previous_state):
+ """
+ Fires when a window update frame is received.
+ """
+ event = WindowUpdated()
+ event.stream_id = self.stream_id
+ return [event]
+
+ def stream_half_closed(self, previous_state):
+ """
+ Fires when an END_STREAM flag is received in the OPEN state,
+ transitioning this stream to a HALF_CLOSED_REMOTE state.
+ """
+ event = StreamEnded()
+ event.stream_id = self.stream_id
+ return [event]
+
+ def stream_ended(self, previous_state):
+ """
+ Fires when a stream is cleanly ended.
+ """
+ self.stream_closed_by = StreamClosedBy.RECV_END_STREAM
+ event = StreamEnded()
+ event.stream_id = self.stream_id
+ return [event]
+
+ def stream_reset(self, previous_state):
+ """
+ Fired when a stream is forcefully reset.
+ """
+ self.stream_closed_by = StreamClosedBy.RECV_RST_STREAM
+ event = StreamReset()
+ event.stream_id = self.stream_id
+ return [event]
+
+ def send_new_pushed_stream(self, previous_state):
+ """
+ Fires on the newly pushed stream, when pushed by the local peer.
+
+ No event here, but definitionally this peer must be a server.
+ """
+ assert self.client is None
+ self.client = False
+ self.headers_received = True
+ return []
+
+ def recv_new_pushed_stream(self, previous_state):
+ """
+ Fires on the newly pushed stream, when pushed by the remote peer.
+
+ No event here, but definitionally this peer must be a client.
+ """
+ assert self.client is None
+ self.client = True
+ self.headers_sent = True
+ return []
+
+ def send_push_promise(self, previous_state):
+ """
+ Fires on the already-existing stream when a PUSH_PROMISE frame is sent.
+ We may only send PUSH_PROMISE frames if we're a server.
+ """
+ if self.client is True:
+ raise ProtocolError("Cannot push streams from client peers.")
+
+ event = _PushedRequestSent()
+ return [event]
+
+ def recv_push_promise(self, previous_state):
+ """
+ Fires on the already-existing stream when a PUSH_PROMISE frame is
+ received. We may only receive PUSH_PROMISE frames if we're a client.
+
+ Fires a PushedStreamReceived event.
+ """
+ if not self.client:
+ if self.client is None: # pragma: no cover
+ msg = "Idle streams cannot receive pushes"
+ else: # pragma: no cover
+ msg = "Cannot receive pushed streams as a server"
+ raise ProtocolError(msg)
+
+ event = PushedStreamReceived()
+ event.parent_stream_id = self.stream_id
+ return [event]
+
+ def send_end_stream(self, previous_state):
+ """
+ Called when an attempt is made to send END_STREAM in the
+ HALF_CLOSED_REMOTE state.
+ """
+ self.stream_closed_by = StreamClosedBy.SEND_END_STREAM
+
+ def send_reset_stream(self, previous_state):
+ """
+ Called when an attempt is made to send RST_STREAM in a non-closed
+ stream state.
+ """
+ self.stream_closed_by = StreamClosedBy.SEND_RST_STREAM
+
+ def reset_stream_on_error(self, previous_state):
+ """
+ Called when we need to forcefully emit another RST_STREAM frame on
+ behalf of the state machine.
+
+ If this is the first time we've done this, we should also hang an event
+ off the StreamClosedError so that the user can be informed. We know
+ it's the first time we've done this if the stream is currently in a
+ state other than CLOSED.
+ """
+ self.stream_closed_by = StreamClosedBy.SEND_RST_STREAM
+
+ error = StreamClosedError(self.stream_id)
+
+ event = StreamReset()
+ event.stream_id = self.stream_id
+ event.error_code = ErrorCodes.STREAM_CLOSED
+ event.remote_reset = False
+ error._events = [event]
+ raise error
+
+ def recv_on_closed_stream(self, previous_state):
+ """
+ Called when an unexpected frame is received on an already-closed
+ stream.
+
+ An endpoint that receives an unexpected frame should treat it as
+ a stream error or connection error with type STREAM_CLOSED, depending
+ on the specific frame. The error handling is done at a higher level:
+ this just raises the appropriate error.
+ """
+ raise StreamClosedError(self.stream_id)
+
+ def send_on_closed_stream(self, previous_state):
+ """
+ Called when an attempt is made to send data on an already-closed
+ stream.
+
+ This essentially overrides the standard logic by throwing a
+ more-specific error: StreamClosedError. This is a ProtocolError, so it
+ matches the standard API of the state machine, but provides more detail
+ to the user.
+ """
+ raise StreamClosedError(self.stream_id)
+
+ def recv_push_on_closed_stream(self, previous_state):
+ """
+ Called when a PUSH_PROMISE frame is received on a full stop
+ stream.
+
+ If the stream was closed by us sending a RST_STREAM frame, then we
+ presume that the PUSH_PROMISE was in flight when we reset the parent
+ stream. Rathen than accept the new stream, we just reset it.
+ Otherwise, we should call this a PROTOCOL_ERROR: pushing a stream on a
+ naturally closed stream is a real problem because it creates a brand
+ new stream that the remote peer now believes exists.
+ """
+ assert self.stream_closed_by is not None
+
+ if self.stream_closed_by == StreamClosedBy.SEND_RST_STREAM:
+ raise StreamClosedError(self.stream_id)
+ else:
+ raise ProtocolError("Attempted to push on closed stream.")
+
+ def send_push_on_closed_stream(self, previous_state):
+ """
+ Called when an attempt is made to push on an already-closed stream.
+
+ This essentially overrides the standard logic by providing a more
+ useful error message. It's necessary because simply indicating that the
+ stream is closed is not enough: there is now a new stream that is not
+ allowed to be there. The only recourse is to tear the whole connection
+ down.
+ """
+ raise ProtocolError("Attempted to push on closed stream.")
+
+ def send_informational_response(self, previous_state):
+ """
+ Called when an informational header block is sent (that is, a block
+ where the :status header has a 1XX value).
+
+ Only enforces that these are sent *before* final headers are sent.
+ """
+ if self.headers_sent:
+ raise ProtocolError("Information response after final response")
+
+ event = _ResponseSent()
+ return [event]
+
+ def recv_informational_response(self, previous_state):
+ """
+ Called when an informational header block is received (that is, a block
+ where the :status header has a 1XX value).
+ """
+ if self.headers_received:
+ raise ProtocolError("Informational response after final response")
+
+ event = InformationalResponseReceived()
+ event.stream_id = self.stream_id
+ return [event]
+
+ def recv_alt_svc(self, previous_state):
+ """
+ Called when receiving an ALTSVC frame.
+
+ RFC 7838 allows us to receive ALTSVC frames at any stream state, which
+ is really absurdly overzealous. For that reason, we want to limit the
+ states in which we can actually receive it. It's really only sensible
+ to receive it after we've sent our own headers and before the server
+ has sent its header block: the server can't guarantee that we have any
+ state around after it completes its header block, and the server
+ doesn't know what origin we're talking about before we've sent ours.
+
+ For that reason, this function applies a few extra checks on both state
+ and some of the little state variables we keep around. If those suggest
+ an unreasonable situation for the ALTSVC frame to have been sent in,
+ we quietly ignore it (as RFC 7838 suggests).
+
+ This function is also *not* always called by the state machine. In some
+ states (IDLE, RESERVED_LOCAL, CLOSED) we don't bother to call it,
+ because we know the frame cannot be valid in that state (IDLE because
+ the server cannot know what origin the stream applies to, CLOSED
+ because the server cannot assume we still have state around,
+ RESERVED_LOCAL because by definition if we're in the RESERVED_LOCAL
+ state then *we* are the server).
+ """
+ # Servers can't receive ALTSVC frames, but RFC 7838 tells us to ignore
+ # them.
+ if self.client is False:
+ return []
+
+ # If we've received the response headers from the server they can't
+ # guarantee we still have any state around. Other implementations
+ # (like nghttp2) ignore ALTSVC in this state, so we will too.
+ if self.headers_received:
+ return []
+
+ # Otherwise, this is a sensible enough frame to have received. Return
+ # the event and let it get populated.
+ return [AlternativeServiceAvailable()]
+
+ def send_alt_svc(self, previous_state):
+ """
+ Called when sending an ALTSVC frame on this stream.
+
+ For consistency with the restrictions we apply on receiving ALTSVC
+ frames in ``recv_alt_svc``, we want to restrict when users can send
+ ALTSVC frames to the situations when we ourselves would accept them.
+
+ That means: when we are a server, when we have received the request
+ headers, and when we have not yet sent our own response headers.
+ """
+ # We should not send ALTSVC after we've sent response headers, as the
+ # client may have disposed of its state.
+ if self.headers_sent:
+ raise ProtocolError(
+ "Cannot send ALTSVC after sending response headers."
+ )
+
+ return
+
+
+# STATE MACHINE
+#
+# The stream state machine is defined here to avoid the need to allocate it
+# repeatedly for each stream. It cannot be defined in the stream class because
+# it needs to be able to reference the callbacks defined on the class, but
+# because Python's scoping rules are weird the class object is not actually in
+# scope during the body of the class object.
+#
+# For the sake of clarity, we reproduce the RFC 7540 state machine here:
+#
+# +--------+
+# send PP | | recv PP
+# ,--------| idle |--------.
+# / | | \
+# v +--------+ v
+# +----------+ | +----------+
+# | | | send H / | |
+# ,------| reserved | | recv H | reserved |------.
+# | | (local) | | | (remote) | |
+# | +----------+ v +----------+ |
+# | | +--------+ | |
+# | | recv ES | | send ES | |
+# | send H | ,-------| open |-------. | recv H |
+# | | / | | \ | |
+# | v v +--------+ v v |
+# | +----------+ | +----------+ |
+# | | half | | | half | |
+# | | closed | | send R / | closed | |
+# | | (remote) | | recv R | (local) | |
+# | +----------+ | +----------+ |
+# | | | | |
+# | | send ES / | recv ES / | |
+# | | send R / v send R / | |
+# | | recv R +--------+ recv R | |
+# | send R / `----------->| |<-----------' send R / |
+# | recv R | closed | recv R |
+# `----------------------->| |<----------------------'
+# +--------+
+#
+# send: endpoint sends this frame
+# recv: endpoint receives this frame
+#
+# H: HEADERS frame (with implied CONTINUATIONs)
+# PP: PUSH_PROMISE frame (with implied CONTINUATIONs)
+# ES: END_STREAM flag
+# R: RST_STREAM frame
+#
+# For the purposes of this state machine we treat HEADERS and their
+# associated CONTINUATION frames as a single jumbo frame. The protocol
+# allows/requires this by preventing other frames from being interleved in
+# between HEADERS/CONTINUATION frames. However, if a CONTINUATION frame is
+# received without a prior HEADERS frame, it *will* be passed to this state
+# machine. The state machine should always reject that frame, either as an
+# invalid transition or because the stream is closed.
+#
+# There is a confusing relationship around PUSH_PROMISE frames. The state
+# machine above considers them to be frames belonging to the new stream,
+# which is *somewhat* true. However, they are sent with the stream ID of
+# their related stream, and are only sendable in some cases.
+# For this reason, our state machine implementation below allows for
+# PUSH_PROMISE frames both in the IDLE state (as in the diagram), but also
+# in the OPEN, HALF_CLOSED_LOCAL, and HALF_CLOSED_REMOTE states.
+# Essentially, for hyper-h2, PUSH_PROMISE frames are effectively sent on
+# two streams.
+#
+# The _transitions dictionary contains a mapping of tuples of
+# (state, input) to tuples of (side_effect_function, end_state). This
+# map contains all allowed transitions: anything not in this map is
+# invalid and immediately causes a transition to ``closed``.
+_transitions = {
+ # State: idle
+ (StreamState.IDLE, StreamInputs.SEND_HEADERS):
+ (H2StreamStateMachine.request_sent, StreamState.OPEN),
+ (StreamState.IDLE, StreamInputs.RECV_HEADERS):
+ (H2StreamStateMachine.request_received, StreamState.OPEN),
+ (StreamState.IDLE, StreamInputs.RECV_DATA):
+ (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED),
+ (StreamState.IDLE, StreamInputs.SEND_PUSH_PROMISE):
+ (H2StreamStateMachine.send_new_pushed_stream,
+ StreamState.RESERVED_LOCAL),
+ (StreamState.IDLE, StreamInputs.RECV_PUSH_PROMISE):
+ (H2StreamStateMachine.recv_new_pushed_stream,
+ StreamState.RESERVED_REMOTE),
+ (StreamState.IDLE, StreamInputs.RECV_ALTERNATIVE_SERVICE):
+ (None, StreamState.IDLE),
+ (StreamState.IDLE, StreamInputs.UPGRADE_CLIENT):
+ (H2StreamStateMachine.request_sent, StreamState.HALF_CLOSED_LOCAL),
+ (StreamState.IDLE, StreamInputs.UPGRADE_SERVER):
+ (H2StreamStateMachine.request_received,
+ StreamState.HALF_CLOSED_REMOTE),
+
+ # State: reserved local
+ (StreamState.RESERVED_LOCAL, StreamInputs.SEND_HEADERS):
+ (H2StreamStateMachine.response_sent, StreamState.HALF_CLOSED_REMOTE),
+ (StreamState.RESERVED_LOCAL, StreamInputs.RECV_DATA):
+ (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED),
+ (StreamState.RESERVED_LOCAL, StreamInputs.SEND_WINDOW_UPDATE):
+ (None, StreamState.RESERVED_LOCAL),
+ (StreamState.RESERVED_LOCAL, StreamInputs.RECV_WINDOW_UPDATE):
+ (H2StreamStateMachine.window_updated, StreamState.RESERVED_LOCAL),
+ (StreamState.RESERVED_LOCAL, StreamInputs.SEND_RST_STREAM):
+ (H2StreamStateMachine.send_reset_stream, StreamState.CLOSED),
+ (StreamState.RESERVED_LOCAL, StreamInputs.RECV_RST_STREAM):
+ (H2StreamStateMachine.stream_reset, StreamState.CLOSED),
+ (StreamState.RESERVED_LOCAL, StreamInputs.SEND_ALTERNATIVE_SERVICE):
+ (H2StreamStateMachine.send_alt_svc, StreamState.RESERVED_LOCAL),
+ (StreamState.RESERVED_LOCAL, StreamInputs.RECV_ALTERNATIVE_SERVICE):
+ (None, StreamState.RESERVED_LOCAL),
+
+ # State: reserved remote
+ (StreamState.RESERVED_REMOTE, StreamInputs.RECV_HEADERS):
+ (H2StreamStateMachine.response_received,
+ StreamState.HALF_CLOSED_LOCAL),
+ (StreamState.RESERVED_REMOTE, StreamInputs.RECV_DATA):
+ (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED),
+ (StreamState.RESERVED_REMOTE, StreamInputs.SEND_WINDOW_UPDATE):
+ (None, StreamState.RESERVED_REMOTE),
+ (StreamState.RESERVED_REMOTE, StreamInputs.RECV_WINDOW_UPDATE):
+ (H2StreamStateMachine.window_updated, StreamState.RESERVED_REMOTE),
+ (StreamState.RESERVED_REMOTE, StreamInputs.SEND_RST_STREAM):
+ (H2StreamStateMachine.send_reset_stream, StreamState.CLOSED),
+ (StreamState.RESERVED_REMOTE, StreamInputs.RECV_RST_STREAM):
+ (H2StreamStateMachine.stream_reset, StreamState.CLOSED),
+ (StreamState.RESERVED_REMOTE, StreamInputs.RECV_ALTERNATIVE_SERVICE):
+ (H2StreamStateMachine.recv_alt_svc, StreamState.RESERVED_REMOTE),
+
+ # State: open
+ (StreamState.OPEN, StreamInputs.SEND_HEADERS):
+ (H2StreamStateMachine.response_sent, StreamState.OPEN),
+ (StreamState.OPEN, StreamInputs.RECV_HEADERS):
+ (H2StreamStateMachine.response_received, StreamState.OPEN),
+ (StreamState.OPEN, StreamInputs.SEND_DATA):
+ (None, StreamState.OPEN),
+ (StreamState.OPEN, StreamInputs.RECV_DATA):
+ (H2StreamStateMachine.data_received, StreamState.OPEN),
+ (StreamState.OPEN, StreamInputs.SEND_END_STREAM):
+ (None, StreamState.HALF_CLOSED_LOCAL),
+ (StreamState.OPEN, StreamInputs.RECV_END_STREAM):
+ (H2StreamStateMachine.stream_half_closed,
+ StreamState.HALF_CLOSED_REMOTE),
+ (StreamState.OPEN, StreamInputs.SEND_WINDOW_UPDATE):
+ (None, StreamState.OPEN),
+ (StreamState.OPEN, StreamInputs.RECV_WINDOW_UPDATE):
+ (H2StreamStateMachine.window_updated, StreamState.OPEN),
+ (StreamState.OPEN, StreamInputs.SEND_RST_STREAM):
+ (H2StreamStateMachine.send_reset_stream, StreamState.CLOSED),
+ (StreamState.OPEN, StreamInputs.RECV_RST_STREAM):
+ (H2StreamStateMachine.stream_reset, StreamState.CLOSED),
+ (StreamState.OPEN, StreamInputs.SEND_PUSH_PROMISE):
+ (H2StreamStateMachine.send_push_promise, StreamState.OPEN),
+ (StreamState.OPEN, StreamInputs.RECV_PUSH_PROMISE):
+ (H2StreamStateMachine.recv_push_promise, StreamState.OPEN),
+ (StreamState.OPEN, StreamInputs.SEND_INFORMATIONAL_HEADERS):
+ (H2StreamStateMachine.send_informational_response, StreamState.OPEN),
+ (StreamState.OPEN, StreamInputs.RECV_INFORMATIONAL_HEADERS):
+ (H2StreamStateMachine.recv_informational_response, StreamState.OPEN),
+ (StreamState.OPEN, StreamInputs.SEND_ALTERNATIVE_SERVICE):
+ (H2StreamStateMachine.send_alt_svc, StreamState.OPEN),
+ (StreamState.OPEN, StreamInputs.RECV_ALTERNATIVE_SERVICE):
+ (H2StreamStateMachine.recv_alt_svc, StreamState.OPEN),
+
+ # State: half-closed remote
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_HEADERS):
+ (H2StreamStateMachine.response_sent, StreamState.HALF_CLOSED_REMOTE),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_HEADERS):
+ (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_DATA):
+ (None, StreamState.HALF_CLOSED_REMOTE),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_DATA):
+ (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_END_STREAM):
+ (H2StreamStateMachine.send_end_stream, StreamState.CLOSED),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_WINDOW_UPDATE):
+ (None, StreamState.HALF_CLOSED_REMOTE),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_WINDOW_UPDATE):
+ (H2StreamStateMachine.window_updated, StreamState.HALF_CLOSED_REMOTE),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_RST_STREAM):
+ (H2StreamStateMachine.send_reset_stream, StreamState.CLOSED),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_RST_STREAM):
+ (H2StreamStateMachine.stream_reset, StreamState.CLOSED),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_PUSH_PROMISE):
+ (H2StreamStateMachine.send_push_promise,
+ StreamState.HALF_CLOSED_REMOTE),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_PUSH_PROMISE):
+ (H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_INFORMATIONAL_HEADERS):
+ (H2StreamStateMachine.send_informational_response,
+ StreamState.HALF_CLOSED_REMOTE),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_ALTERNATIVE_SERVICE):
+ (H2StreamStateMachine.send_alt_svc, StreamState.HALF_CLOSED_REMOTE),
+ (StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_ALTERNATIVE_SERVICE):
+ (H2StreamStateMachine.recv_alt_svc, StreamState.HALF_CLOSED_REMOTE),
+
+ # State: half-closed local
+ (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_HEADERS):
+ (H2StreamStateMachine.response_received,
+ StreamState.HALF_CLOSED_LOCAL),
+ (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_DATA):
+ (H2StreamStateMachine.data_received, StreamState.HALF_CLOSED_LOCAL),
+ (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_END_STREAM):
+ (H2StreamStateMachine.stream_ended, StreamState.CLOSED),
+ (StreamState.HALF_CLOSED_LOCAL, StreamInputs.SEND_WINDOW_UPDATE):
+ (None, StreamState.HALF_CLOSED_LOCAL),
+ (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_WINDOW_UPDATE):
+ (H2StreamStateMachine.window_updated, StreamState.HALF_CLOSED_LOCAL),
+ (StreamState.HALF_CLOSED_LOCAL, StreamInputs.SEND_RST_STREAM):
+ (H2StreamStateMachine.send_reset_stream, StreamState.CLOSED),
+ (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_RST_STREAM):
+ (H2StreamStateMachine.stream_reset, StreamState.CLOSED),
+ (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_PUSH_PROMISE):
+ (H2StreamStateMachine.recv_push_promise,
+ StreamState.HALF_CLOSED_LOCAL),
+ (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_INFORMATIONAL_HEADERS):
+ (H2StreamStateMachine.recv_informational_response,
+ StreamState.HALF_CLOSED_LOCAL),
+ (StreamState.HALF_CLOSED_LOCAL, StreamInputs.SEND_ALTERNATIVE_SERVICE):
+ (H2StreamStateMachine.send_alt_svc, StreamState.HALF_CLOSED_LOCAL),
+ (StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_ALTERNATIVE_SERVICE):
+ (H2StreamStateMachine.recv_alt_svc, StreamState.HALF_CLOSED_LOCAL),
+
+ # State: closed
+ (StreamState.CLOSED, StreamInputs.RECV_END_STREAM):
+ (None, StreamState.CLOSED),
+ (StreamState.CLOSED, StreamInputs.RECV_ALTERNATIVE_SERVICE):
+ (None, StreamState.CLOSED),
+
+ # RFC 7540 Section 5.1 defines how the end point should react when
+ # receiving a frame on a closed stream with the following statements:
+ #
+ # > An endpoint that receives any frame other than PRIORITY after receiving
+ # > a RST_STREAM MUST treat that as a stream error of type STREAM_CLOSED.
+ # > An endpoint that receives any frames after receiving a frame with the
+ # > END_STREAM flag set MUST treat that as a connection error of type
+ # > STREAM_CLOSED.
+ (StreamState.CLOSED, StreamInputs.RECV_HEADERS):
+ (H2StreamStateMachine.recv_on_closed_stream, StreamState.CLOSED),
+ (StreamState.CLOSED, StreamInputs.RECV_DATA):
+ (H2StreamStateMachine.recv_on_closed_stream, StreamState.CLOSED),
+
+ # > WINDOW_UPDATE or RST_STREAM frames can be received in this state
+ # > for a short period after a DATA or HEADERS frame containing a
+ # > END_STREAM flag is sent, as instructed in RFC 7540 Section 5.1. But we
+ # > don't have access to a clock so we just always allow it.
+ (StreamState.CLOSED, StreamInputs.RECV_WINDOW_UPDATE):
+ (None, StreamState.CLOSED),
+ (StreamState.CLOSED, StreamInputs.RECV_RST_STREAM):
+ (None, StreamState.CLOSED),
+
+ # > A receiver MUST treat the receipt of a PUSH_PROMISE on a stream that is
+ # > neither "open" nor "half-closed (local)" as a connection error of type
+ # > PROTOCOL_ERROR.
+ (StreamState.CLOSED, StreamInputs.RECV_PUSH_PROMISE):
+ (H2StreamStateMachine.recv_push_on_closed_stream, StreamState.CLOSED),
+
+ # Also, users should be forbidden from sending on closed streams.
+ (StreamState.CLOSED, StreamInputs.SEND_HEADERS):
+ (H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED),
+ (StreamState.CLOSED, StreamInputs.SEND_PUSH_PROMISE):
+ (H2StreamStateMachine.send_push_on_closed_stream, StreamState.CLOSED),
+ (StreamState.CLOSED, StreamInputs.SEND_RST_STREAM):
+ (H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED),
+ (StreamState.CLOSED, StreamInputs.SEND_DATA):
+ (H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED),
+ (StreamState.CLOSED, StreamInputs.SEND_WINDOW_UPDATE):
+ (H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED),
+ (StreamState.CLOSED, StreamInputs.SEND_END_STREAM):
+ (H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED),
+}
+
+
+class H2Stream(object):
+ """
+ A low-level HTTP/2 stream object. This handles building and receiving
+ frames and maintains per-stream state.
+
+ This wraps a HTTP/2 Stream state machine implementation, ensuring that
+ frames can only be sent/received when the stream is in a valid state.
+ Attempts to create frames that cannot be sent will raise a
+ ``ProtocolError``.
+ """
+ def __init__(self,
+ stream_id,
+ config,
+ inbound_window_size,
+ outbound_window_size):
+ self.state_machine = H2StreamStateMachine(stream_id)
+ self.stream_id = stream_id
+ self.max_outbound_frame_size = None
+ self.request_method = None
+
+ # The current value of the outbound stream flow control window
+ self.outbound_flow_control_window = outbound_window_size
+
+ # The flow control manager.
+ self._inbound_window_manager = WindowManager(inbound_window_size)
+
+ # The expected content length, if any.
+ self._expected_content_length = None
+
+ # The actual received content length. Always tracked.
+ self._actual_content_length = 0
+
+ # The authority we believe this stream belongs to.
+ self._authority = None
+
+ # The configuration for this stream.
+ self.config = config
+
+ def __repr__(self):
+ return "<%s id:%d state:%r>" % (
+ type(self).__name__,
+ self.stream_id,
+ self.state_machine.state
+ )
+
+ @property
+ def inbound_flow_control_window(self):
+ """
+ The size of the inbound flow control window for the stream. This is
+ rarely publicly useful: instead, use :meth:`remote_flow_control_window
+ <h2.stream.H2Stream.remote_flow_control_window>`. This shortcut is
+ largely present to provide a shortcut to this data.
+ """
+ return self._inbound_window_manager.current_window_size
+
+ @property
+ def open(self):
+ """
+ Whether the stream is 'open' in any sense: that is, whether it counts
+ against the number of concurrent streams.
+ """
+ # RFC 7540 Section 5.1.2 defines 'open' for this purpose to mean either
+ # the OPEN state or either of the HALF_CLOSED states. Perplexingly,
+ # this excludes the reserved states.
+ # For more detail on why we're doing this in this slightly weird way,
+ # see the comment on ``STREAM_OPEN`` at the top of the file.
+ return STREAM_OPEN[self.state_machine.state]
+
+ @property
+ def closed(self):
+ """
+ Whether the stream is closed.
+ """
+ return self.state_machine.state == StreamState.CLOSED
+
+ @property
+ def closed_by(self):
+ """
+ Returns how the stream was closed, as one of StreamClosedBy.
+ """
+ return self.state_machine.stream_closed_by
+
+ def upgrade(self, client_side):
+ """
+ Called by the connection to indicate that this stream is the initial
+ request/response of an upgraded connection. Places the stream into an
+ appropriate state.
+ """
+ self.config.logger.debug("Upgrading %r", self)
+
+ assert self.stream_id == 1
+ input_ = (
+ StreamInputs.UPGRADE_CLIENT if client_side
+ else StreamInputs.UPGRADE_SERVER
+ )
+
+ # This may return events, we deliberately don't want them.
+ self.state_machine.process_input(input_)
+ return
+
+ def send_headers(self, headers, encoder, end_stream=False):
+ """
+ Returns a list of HEADERS/CONTINUATION frames to emit as either headers
+ or trailers.
+ """
+ self.config.logger.debug("Send headers %s on %r", headers, self)
+
+ # Because encoding headers makes an irreversible change to the header
+ # compression context, we make the state transition before we encode
+ # them.
+
+ # First, check if we're a client. If we are, no problem: if we aren't,
+ # we need to scan the header block to see if this is an informational
+ # response.
+ input_ = StreamInputs.SEND_HEADERS
+ if ((not self.state_machine.client) and
+ is_informational_response(headers)):
+ if end_stream:
+ raise ProtocolError(
+ "Cannot set END_STREAM on informational responses."
+ )
+
+ input_ = StreamInputs.SEND_INFORMATIONAL_HEADERS
+
+ events = self.state_machine.process_input(input_)
+
+ hf = HeadersFrame(self.stream_id)
+ hdr_validation_flags = self._build_hdr_validation_flags(events)
+ frames = self._build_headers_frames(
+ headers, encoder, hf, hdr_validation_flags
+ )
+
+ if end_stream:
+ # Not a bug: the END_STREAM flag is valid on the initial HEADERS
+ # frame, not the CONTINUATION frames that follow.
+ self.state_machine.process_input(StreamInputs.SEND_END_STREAM)
+ frames[0].flags.add('END_STREAM')
+
+ if self.state_machine.trailers_sent and not end_stream:
+ raise ProtocolError("Trailers must have END_STREAM set.")
+
+ if self.state_machine.client and self._authority is None:
+ self._authority = authority_from_headers(headers)
+
+ # store request method for _initialize_content_length
+ self.request_method = extract_method_header(headers)
+
+ return frames
+
+ def push_stream_in_band(self, related_stream_id, headers, encoder):
+ """
+ Returns a list of PUSH_PROMISE/CONTINUATION frames to emit as a pushed
+ stream header. Called on the stream that has the PUSH_PROMISE frame
+ sent on it.
+ """
+ self.config.logger.debug("Push stream %r", self)
+
+ # Because encoding headers makes an irreversible change to the header
+ # compression context, we make the state transition *first*.
+
+ events = self.state_machine.process_input(
+ StreamInputs.SEND_PUSH_PROMISE
+ )
+
+ ppf = PushPromiseFrame(self.stream_id)
+ ppf.promised_stream_id = related_stream_id
+ hdr_validation_flags = self._build_hdr_validation_flags(events)
+ frames = self._build_headers_frames(
+ headers, encoder, ppf, hdr_validation_flags
+ )
+
+ return frames
+
+ def locally_pushed(self):
+ """
+ Mark this stream as one that was pushed by this peer. Must be called
+ immediately after initialization. Sends no frames, simply updates the
+ state machine.
+ """
+ # This does not trigger any events.
+ events = self.state_machine.process_input(
+ StreamInputs.SEND_PUSH_PROMISE
+ )
+ assert not events
+ return []
+
+ def send_data(self, data, end_stream=False, pad_length=None):
+ """
+ Prepare some data frames. Optionally end the stream.
+
+ .. warning:: Does not perform flow control checks.
+ """
+ self.config.logger.debug(
+ "Send data on %r with end stream set to %s", self, end_stream
+ )
+
+ self.state_machine.process_input(StreamInputs.SEND_DATA)
+
+ df = DataFrame(self.stream_id)
+ df.data = data
+ if end_stream:
+ self.state_machine.process_input(StreamInputs.SEND_END_STREAM)
+ df.flags.add('END_STREAM')
+ if pad_length is not None:
+ df.flags.add('PADDED')
+ df.pad_length = pad_length
+
+ # Subtract flow_controlled_length to account for possible padding
+ self.outbound_flow_control_window -= df.flow_controlled_length
+ assert self.outbound_flow_control_window >= 0
+
+ return [df]
+
+ def end_stream(self):
+ """
+ End a stream without sending data.
+ """
+ self.config.logger.debug("End stream %r", self)
+
+ self.state_machine.process_input(StreamInputs.SEND_END_STREAM)
+ df = DataFrame(self.stream_id)
+ df.flags.add('END_STREAM')
+ return [df]
+
+ def advertise_alternative_service(self, field_value):
+ """
+ Advertise an RFC 7838 alternative service. The semantics of this are
+ better documented in the ``H2Connection`` class.
+ """
+ self.config.logger.debug(
+ "Advertise alternative service of %r for %r", field_value, self
+ )
+ self.state_machine.process_input(StreamInputs.SEND_ALTERNATIVE_SERVICE)
+ asf = AltSvcFrame(self.stream_id)
+ asf.field = field_value
+ return [asf]
+
+ def increase_flow_control_window(self, increment):
+ """
+ Increase the size of the flow control window for the remote side.
+ """
+ self.config.logger.debug(
+ "Increase flow control window for %r by %d",
+ self, increment
+ )
+ self.state_machine.process_input(StreamInputs.SEND_WINDOW_UPDATE)
+ self._inbound_window_manager.window_opened(increment)
+
+ wuf = WindowUpdateFrame(self.stream_id)
+ wuf.window_increment = increment
+ return [wuf]
+
+ def receive_push_promise_in_band(self,
+ promised_stream_id,
+ headers,
+ header_encoding):
+ """
+ Receives a push promise frame sent on this stream, pushing a remote
+ stream. This is called on the stream that has the PUSH_PROMISE sent
+ on it.
+ """
+ self.config.logger.debug(
+ "Receive Push Promise on %r for remote stream %d",
+ self, promised_stream_id
+ )
+ events = self.state_machine.process_input(
+ StreamInputs.RECV_PUSH_PROMISE
+ )
+ events[0].pushed_stream_id = promised_stream_id
+
+ hdr_validation_flags = self._build_hdr_validation_flags(events)
+ events[0].headers = self._process_received_headers(
+ headers, hdr_validation_flags, header_encoding
+ )
+ return [], events
+
+ def remotely_pushed(self, pushed_headers):
+ """
+ Mark this stream as one that was pushed by the remote peer. Must be
+ called immediately after initialization. Sends no frames, simply
+ updates the state machine.
+ """
+ self.config.logger.debug("%r pushed by remote peer", self)
+ events = self.state_machine.process_input(
+ StreamInputs.RECV_PUSH_PROMISE
+ )
+ self._authority = authority_from_headers(pushed_headers)
+ return [], events
+
+ def receive_headers(self, headers, end_stream, header_encoding):
+ """
+ Receive a set of headers (or trailers).
+ """
+ if is_informational_response(headers):
+ if end_stream:
+ raise ProtocolError(
+ "Cannot set END_STREAM on informational responses"
+ )
+ input_ = StreamInputs.RECV_INFORMATIONAL_HEADERS
+ else:
+ input_ = StreamInputs.RECV_HEADERS
+
+ events = self.state_machine.process_input(input_)
+
+ if end_stream:
+ es_events = self.state_machine.process_input(
+ StreamInputs.RECV_END_STREAM
+ )
+ events[0].stream_ended = es_events[0]
+ events += es_events
+
+ self._initialize_content_length(headers)
+
+ if isinstance(events[0], TrailersReceived):
+ if not end_stream:
+ raise ProtocolError("Trailers must have END_STREAM set")
+
+ hdr_validation_flags = self._build_hdr_validation_flags(events)
+ events[0].headers = self._process_received_headers(
+ headers, hdr_validation_flags, header_encoding
+ )
+ return [], events
+
+ def receive_data(self, data, end_stream, flow_control_len):
+ """
+ Receive some data.
+ """
+ self.config.logger.debug(
+ "Receive data on %r with end stream %s and flow control length "
+ "set to %d", self, end_stream, flow_control_len
+ )
+ events = self.state_machine.process_input(StreamInputs.RECV_DATA)
+ self._inbound_window_manager.window_consumed(flow_control_len)
+ self._track_content_length(len(data), end_stream)
+
+ if end_stream:
+ es_events = self.state_machine.process_input(
+ StreamInputs.RECV_END_STREAM
+ )
+ events[0].stream_ended = es_events[0]
+ events.extend(es_events)
+
+ events[0].data = data
+ events[0].flow_controlled_length = flow_control_len
+ return [], events
+
+ def receive_window_update(self, increment):
+ """
+ Handle a WINDOW_UPDATE increment.
+ """
+ self.config.logger.debug(
+ "Receive Window Update on %r for increment of %d",
+ self, increment
+ )
+ events = self.state_machine.process_input(
+ StreamInputs.RECV_WINDOW_UPDATE
+ )
+ frames = []
+
+ # If we encounter a problem with incrementing the flow control window,
+ # this should be treated as a *stream* error, not a *connection* error.
+ # That means we need to catch the error and forcibly close the stream.
+ if events:
+ events[0].delta = increment
+ try:
+ self.outbound_flow_control_window = guard_increment_window(
+ self.outbound_flow_control_window,
+ increment
+ )
+ except FlowControlError:
+ # Ok, this is bad. We're going to need to perform a local
+ # reset.
+ event = StreamReset()
+ event.stream_id = self.stream_id
+ event.error_code = ErrorCodes.FLOW_CONTROL_ERROR
+ event.remote_reset = False
+
+ events = [event]
+ frames = self.reset_stream(event.error_code)
+
+ return frames, events
+
+ def receive_continuation(self):
+ """
+ A naked CONTINUATION frame has been received. This is always an error,
+ but the type of error it is depends on the state of the stream and must
+ transition the state of the stream, so we need to handle it.
+ """
+ self.config.logger.debug("Receive Continuation frame on %r", self)
+ self.state_machine.process_input(
+ StreamInputs.RECV_CONTINUATION
+ )
+ assert False, "Should not be reachable"
+
+ def receive_alt_svc(self, frame):
+ """
+ An Alternative Service frame was received on the stream. This frame
+ inherits the origin associated with this stream.
+ """
+ self.config.logger.debug(
+ "Receive Alternative Service frame on stream %r", self
+ )
+
+ # If the origin is present, RFC 7838 says we have to ignore it.
+ if frame.origin:
+ return [], []
+
+ events = self.state_machine.process_input(
+ StreamInputs.RECV_ALTERNATIVE_SERVICE
+ )
+
+ # There are lots of situations where we want to ignore the ALTSVC
+ # frame. If we need to pay attention, we'll have an event and should
+ # fill it out.
+ if events:
+ assert isinstance(events[0], AlternativeServiceAvailable)
+ events[0].origin = self._authority
+ events[0].field_value = frame.field
+
+ return [], events
+
+ def reset_stream(self, error_code=0):
+ """
+ Close the stream locally. Reset the stream with an error code.
+ """
+ self.config.logger.debug(
+ "Local reset %r with error code: %d", self, error_code
+ )
+ self.state_machine.process_input(StreamInputs.SEND_RST_STREAM)
+
+ rsf = RstStreamFrame(self.stream_id)
+ rsf.error_code = error_code
+ return [rsf]
+
+ def stream_reset(self, frame):
+ """
+ Handle a stream being reset remotely.
+ """
+ self.config.logger.debug(
+ "Remote reset %r with error code: %d", self, frame.error_code
+ )
+ events = self.state_machine.process_input(StreamInputs.RECV_RST_STREAM)
+
+ if events:
+ # We don't fire an event if this stream is already closed.
+ events[0].error_code = _error_code_from_int(frame.error_code)
+
+ return [], events
+
+ def acknowledge_received_data(self, acknowledged_size):
+ """
+ The user has informed us that they've processed some amount of data
+ that was received on this stream. Pass that to the window manager and
+ potentially return some WindowUpdate frames.
+ """
+ self.config.logger.debug(
+ "Acknowledge received data with size %d on %r",
+ acknowledged_size, self
+ )
+ increment = self._inbound_window_manager.process_bytes(
+ acknowledged_size
+ )
+ if increment:
+ f = WindowUpdateFrame(self.stream_id)
+ f.window_increment = increment
+ return [f]
+
+ return []
+
+ def _build_hdr_validation_flags(self, events):
+ """
+ Constructs a set of header validation flags for use when normalizing
+ and validating header blocks.
+ """
+ is_trailer = isinstance(
+ events[0], (_TrailersSent, TrailersReceived)
+ )
+ is_response_header = isinstance(
+ events[0],
+ (
+ _ResponseSent,
+ ResponseReceived,
+ InformationalResponseReceived
+ )
+ )
+ is_push_promise = isinstance(
+ events[0], (PushedStreamReceived, _PushedRequestSent)
+ )
+
+ return HeaderValidationFlags(
+ is_client=self.state_machine.client,
+ is_trailer=is_trailer,
+ is_response_header=is_response_header,
+ is_push_promise=is_push_promise,
+ )
+
+ def _build_headers_frames(self,
+ headers,
+ encoder,
+ first_frame,
+ hdr_validation_flags):
+ """
+ Helper method to build headers or push promise frames.
+ """
+ # We need to lowercase the header names, and to ensure that secure
+ # header fields are kept out of compression contexts.
+ if self.config.normalize_outbound_headers:
+ headers = normalize_outbound_headers(
+ headers, hdr_validation_flags
+ )
+ if self.config.validate_outbound_headers:
+ headers = validate_outbound_headers(
+ headers, hdr_validation_flags
+ )
+
+ encoded_headers = encoder.encode(headers)
+
+ # Slice into blocks of max_outbound_frame_size. Be careful with this:
+ # it only works right because we never send padded frames or priority
+ # information on the frames. Revisit this if we do.
+ header_blocks = [
+ encoded_headers[i:i+self.max_outbound_frame_size]
+ for i in range(
+ 0, len(encoded_headers), self.max_outbound_frame_size
+ )
+ ]
+
+ frames = []
+ first_frame.data = header_blocks[0]
+ frames.append(first_frame)
+
+ for block in header_blocks[1:]:
+ cf = ContinuationFrame(self.stream_id)
+ cf.data = block
+ frames.append(cf)
+
+ frames[-1].flags.add('END_HEADERS')
+ return frames
+
+ def _process_received_headers(self,
+ headers,
+ header_validation_flags,
+ header_encoding):
+ """
+ When headers have been received from the remote peer, run a processing
+ pipeline on them to transform them into the appropriate form for
+ attaching to an event.
+ """
+ if self.config.normalize_inbound_headers:
+ headers = normalize_inbound_headers(
+ headers, header_validation_flags
+ )
+
+ if self.config.validate_inbound_headers:
+ headers = validate_headers(headers, header_validation_flags)
+
+ if header_encoding:
+ headers = _decode_headers(headers, header_encoding)
+
+ # The above steps are all generators, so we need to concretize the
+ # headers now.
+ return list(headers)
+
+ def _initialize_content_length(self, headers):
+ """
+ Checks the headers for a content-length header and initializes the
+ _expected_content_length field from it. It's not an error for no
+ Content-Length header to be present.
+ """
+ if self.request_method == b'HEAD':
+ self._expected_content_length = 0
+ return
+
+ for n, v in headers:
+ if n == b'content-length':
+ try:
+ self._expected_content_length = int(v, 10)
+ except ValueError:
+ raise ProtocolError(
+ "Invalid content-length header: %s" % v
+ )
+
+ return
+
+ def _track_content_length(self, length, end_stream):
+ """
+ Update the expected content length in response to data being received.
+ Validates that the appropriate amount of data is sent. Always updates
+ the received data, but only validates the length against the
+ content-length header if one was sent.
+
+ :param length: The length of the body chunk received.
+ :param end_stream: If this is the last body chunk received.
+ """
+ self._actual_content_length += length
+ actual = self._actual_content_length
+ expected = self._expected_content_length
+
+ if expected is not None:
+ if expected < actual:
+ raise InvalidBodyLengthError(expected, actual)
+
+ if end_stream and expected != actual:
+ raise InvalidBodyLengthError(expected, actual)
+
+ def _inbound_flow_control_change_from_settings(self, delta):
+ """
+ We changed SETTINGS_INITIAL_WINDOW_SIZE, which means we need to
+ update the target window size for flow control. For our flow control
+ strategy, this means we need to do two things: we need to adjust the
+ current window size, but we also need to set the target maximum window
+ size to the new value.
+ """
+ new_max_size = self._inbound_window_manager.max_window_size + delta
+ self._inbound_window_manager.window_opened(delta)
+ self._inbound_window_manager.max_window_size = new_max_size
+
+
+def _decode_headers(headers, encoding):
+ """
+ Given an iterable of header two-tuples and an encoding, decodes those
+ headers using that encoding while preserving the type of the header tuple.
+ This ensures that the use of ``HeaderTuple`` is preserved.
+ """
+ for header in headers:
+ # This function expects to work on decoded headers, which are always
+ # HeaderTuple objects.
+ assert isinstance(header, HeaderTuple)
+
+ name, value = header
+ name = name.decode(encoding)
+ value = value.decode(encoding)
+ yield header.__class__(name, value)
diff --git a/testing/web-platform/tests/tools/third_party/h2/h2/utilities.py b/testing/web-platform/tests/tools/third_party/h2/h2/utilities.py
new file mode 100644
index 0000000000..06c916eea7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/h2/utilities.py
@@ -0,0 +1,660 @@
+# -*- coding: utf-8 -*-
+"""
+h2/utilities
+~~~~~~~~~~~~
+
+Utility functions that do not belong in a separate module.
+"""
+import collections
+import re
+from string import whitespace
+import sys
+
+from hpack import HeaderTuple, NeverIndexedHeaderTuple
+
+from .exceptions import ProtocolError, FlowControlError
+
+UPPER_RE = re.compile(b"[A-Z]")
+
+# A set of headers that are hop-by-hop or connection-specific and thus
+# forbidden in HTTP/2. This list comes from RFC 7540 § 8.1.2.2.
+CONNECTION_HEADERS = frozenset([
+ b'connection', u'connection',
+ b'proxy-connection', u'proxy-connection',
+ b'keep-alive', u'keep-alive',
+ b'transfer-encoding', u'transfer-encoding',
+ b'upgrade', u'upgrade',
+])
+
+
+_ALLOWED_PSEUDO_HEADER_FIELDS = frozenset([
+ b':method', u':method',
+ b':scheme', u':scheme',
+ b':authority', u':authority',
+ b':path', u':path',
+ b':status', u':status',
+ b':protocol', u':protocol',
+])
+
+
+_SECURE_HEADERS = frozenset([
+ # May have basic credentials which are vulnerable to dictionary attacks.
+ b'authorization', u'authorization',
+ b'proxy-authorization', u'proxy-authorization',
+])
+
+
+_REQUEST_ONLY_HEADERS = frozenset([
+ b':scheme', u':scheme',
+ b':path', u':path',
+ b':authority', u':authority',
+ b':method', u':method',
+ b':protocol', u':protocol',
+])
+
+
+_RESPONSE_ONLY_HEADERS = frozenset([b':status', u':status'])
+
+
+# A Set of pseudo headers that are only valid if the method is
+# CONNECT, see RFC 8441 § 5
+_CONNECT_REQUEST_ONLY_HEADERS = frozenset([b':protocol', u':protocol'])
+
+
+if sys.version_info[0] == 2: # Python 2.X
+ _WHITESPACE = frozenset(whitespace)
+else: # Python 3.3+
+ _WHITESPACE = frozenset(map(ord, whitespace))
+
+
+def _secure_headers(headers, hdr_validation_flags):
+ """
+ Certain headers are at risk of being attacked during the header compression
+ phase, and so need to be kept out of header compression contexts. This
+ function automatically transforms certain specific headers into HPACK
+ never-indexed fields to ensure they don't get added to header compression
+ contexts.
+
+ This function currently implements two rules:
+
+ - 'authorization' and 'proxy-authorization' fields are automatically made
+ never-indexed.
+ - Any 'cookie' header field shorter than 20 bytes long is made
+ never-indexed.
+
+ These fields are the most at-risk. These rules are inspired by Firefox
+ and nghttp2.
+ """
+ for header in headers:
+ if header[0] in _SECURE_HEADERS:
+ yield NeverIndexedHeaderTuple(*header)
+ elif header[0] in (b'cookie', u'cookie') and len(header[1]) < 20:
+ yield NeverIndexedHeaderTuple(*header)
+ else:
+ yield header
+
+
+def extract_method_header(headers):
+ """
+ Extracts the request method from the headers list.
+ """
+ for k, v in headers:
+ if k in (b':method', u':method'):
+ if not isinstance(v, bytes):
+ return v.encode('utf-8')
+ else:
+ return v
+
+
+def is_informational_response(headers):
+ """
+ Searches a header block for a :status header to confirm that a given
+ collection of headers are an informational response. Assumes the header
+ block is well formed: that is, that the HTTP/2 special headers are first
+ in the block, and so that it can stop looking when it finds the first
+ header field whose name does not begin with a colon.
+
+ :param headers: The HTTP/2 header block.
+ :returns: A boolean indicating if this is an informational response.
+ """
+ for n, v in headers:
+ if isinstance(n, bytes):
+ sigil = b':'
+ status = b':status'
+ informational_start = b'1'
+ else:
+ sigil = u':'
+ status = u':status'
+ informational_start = u'1'
+
+ # If we find a non-special header, we're done here: stop looping.
+ if not n.startswith(sigil):
+ return False
+
+ # This isn't the status header, bail.
+ if n != status:
+ continue
+
+ # If the first digit is a 1, we've got informational headers.
+ return v.startswith(informational_start)
+
+
+def guard_increment_window(current, increment):
+ """
+ Increments a flow control window, guarding against that window becoming too
+ large.
+
+ :param current: The current value of the flow control window.
+ :param increment: The increment to apply to that window.
+ :returns: The new value of the window.
+ :raises: ``FlowControlError``
+ """
+ # The largest value the flow control window may take.
+ LARGEST_FLOW_CONTROL_WINDOW = 2**31 - 1
+
+ new_size = current + increment
+
+ if new_size > LARGEST_FLOW_CONTROL_WINDOW:
+ raise FlowControlError(
+ "May not increment flow control window past %d" %
+ LARGEST_FLOW_CONTROL_WINDOW
+ )
+
+ return new_size
+
+
+def authority_from_headers(headers):
+ """
+ Given a header set, searches for the authority header and returns the
+ value.
+
+ Note that this doesn't terminate early, so should only be called if the
+ headers are for a client request. Otherwise, will loop over the entire
+ header set, which is potentially unwise.
+
+ :param headers: The HTTP header set.
+ :returns: The value of the authority header, or ``None``.
+ :rtype: ``bytes`` or ``None``.
+ """
+ for n, v in headers:
+ # This gets run against headers that come both from HPACK and from the
+ # user, so we may have unicode floating around in here. We only want
+ # bytes.
+ if n in (b':authority', u':authority'):
+ return v.encode('utf-8') if not isinstance(v, bytes) else v
+
+ return None
+
+
+# Flags used by the validate_headers pipeline to determine which checks
+# should be applied to a given set of headers.
+HeaderValidationFlags = collections.namedtuple(
+ 'HeaderValidationFlags',
+ ['is_client', 'is_trailer', 'is_response_header', 'is_push_promise']
+)
+
+
+def validate_headers(headers, hdr_validation_flags):
+ """
+ Validates a header sequence against a set of constraints from RFC 7540.
+
+ :param headers: The HTTP header set.
+ :param hdr_validation_flags: An instance of HeaderValidationFlags.
+ """
+ # This validation logic is built on a sequence of generators that are
+ # iterated over to provide the final header list. This reduces some of the
+ # overhead of doing this checking. However, it's worth noting that this
+ # checking remains somewhat expensive, and attempts should be made wherever
+ # possible to reduce the time spent doing them.
+ #
+ # For example, we avoid tuple upacking in loops because it represents a
+ # fixed cost that we don't want to spend, instead indexing into the header
+ # tuples.
+ headers = _reject_uppercase_header_fields(
+ headers, hdr_validation_flags
+ )
+ headers = _reject_surrounding_whitespace(
+ headers, hdr_validation_flags
+ )
+ headers = _reject_te(
+ headers, hdr_validation_flags
+ )
+ headers = _reject_connection_header(
+ headers, hdr_validation_flags
+ )
+ headers = _reject_pseudo_header_fields(
+ headers, hdr_validation_flags
+ )
+ headers = _check_host_authority_header(
+ headers, hdr_validation_flags
+ )
+ headers = _check_path_header(headers, hdr_validation_flags)
+
+ return headers
+
+
+def _reject_uppercase_header_fields(headers, hdr_validation_flags):
+ """
+ Raises a ProtocolError if any uppercase character is found in a header
+ block.
+ """
+ for header in headers:
+ if UPPER_RE.search(header[0]):
+ raise ProtocolError(
+ "Received uppercase header name %s." % header[0])
+ yield header
+
+
+def _reject_surrounding_whitespace(headers, hdr_validation_flags):
+ """
+ Raises a ProtocolError if any header name or value is surrounded by
+ whitespace characters.
+ """
+ # For compatibility with RFC 7230 header fields, we need to allow the field
+ # value to be an empty string. This is ludicrous, but technically allowed.
+ # The field name may not be empty, though, so we can safely assume that it
+ # must have at least one character in it and throw exceptions if it
+ # doesn't.
+ for header in headers:
+ if header[0][0] in _WHITESPACE or header[0][-1] in _WHITESPACE:
+ raise ProtocolError(
+ "Received header name surrounded by whitespace %r" % header[0])
+ if header[1] and ((header[1][0] in _WHITESPACE) or
+ (header[1][-1] in _WHITESPACE)):
+ raise ProtocolError(
+ "Received header value surrounded by whitespace %r" % header[1]
+ )
+ yield header
+
+
+def _reject_te(headers, hdr_validation_flags):
+ """
+ Raises a ProtocolError if the TE header is present in a header block and
+ its value is anything other than "trailers".
+ """
+ for header in headers:
+ if header[0] in (b'te', u'te'):
+ if header[1].lower() not in (b'trailers', u'trailers'):
+ raise ProtocolError(
+ "Invalid value for Transfer-Encoding header: %s" %
+ header[1]
+ )
+
+ yield header
+
+
+def _reject_connection_header(headers, hdr_validation_flags):
+ """
+ Raises a ProtocolError if the Connection header is present in a header
+ block.
+ """
+ for header in headers:
+ if header[0] in CONNECTION_HEADERS:
+ raise ProtocolError(
+ "Connection-specific header field present: %s." % header[0]
+ )
+
+ yield header
+
+
+def _custom_startswith(test_string, bytes_prefix, unicode_prefix):
+ """
+ Given a string that might be a bytestring or a Unicode string,
+ return True if it starts with the appropriate prefix.
+ """
+ if isinstance(test_string, bytes):
+ return test_string.startswith(bytes_prefix)
+ else:
+ return test_string.startswith(unicode_prefix)
+
+
+def _assert_header_in_set(string_header, bytes_header, header_set):
+ """
+ Given a set of header names, checks whether the string or byte version of
+ the header name is present. Raises a Protocol error with the appropriate
+ error if it's missing.
+ """
+ if not (string_header in header_set or bytes_header in header_set):
+ raise ProtocolError(
+ "Header block missing mandatory %s header" % string_header
+ )
+
+
+def _reject_pseudo_header_fields(headers, hdr_validation_flags):
+ """
+ Raises a ProtocolError if duplicate pseudo-header fields are found in a
+ header block or if a pseudo-header field appears in a block after an
+ ordinary header field.
+
+ Raises a ProtocolError if pseudo-header fields are found in trailers.
+ """
+ seen_pseudo_header_fields = set()
+ seen_regular_header = False
+ method = None
+
+ for header in headers:
+ if _custom_startswith(header[0], b':', u':'):
+ if header[0] in seen_pseudo_header_fields:
+ raise ProtocolError(
+ "Received duplicate pseudo-header field %s" % header[0]
+ )
+
+ seen_pseudo_header_fields.add(header[0])
+
+ if seen_regular_header:
+ raise ProtocolError(
+ "Received pseudo-header field out of sequence: %s" %
+ header[0]
+ )
+
+ if header[0] not in _ALLOWED_PSEUDO_HEADER_FIELDS:
+ raise ProtocolError(
+ "Received custom pseudo-header field %s" % header[0]
+ )
+
+ if header[0] in (b':method', u':method'):
+ if not isinstance(header[1], bytes):
+ method = header[1].encode('utf-8')
+ else:
+ method = header[1]
+
+ else:
+ seen_regular_header = True
+
+ yield header
+
+ # Check the pseudo-headers we got to confirm they're acceptable.
+ _check_pseudo_header_field_acceptability(
+ seen_pseudo_header_fields, method, hdr_validation_flags
+ )
+
+
+def _check_pseudo_header_field_acceptability(pseudo_headers,
+ method,
+ hdr_validation_flags):
+ """
+ Given the set of pseudo-headers present in a header block and the
+ validation flags, confirms that RFC 7540 allows them.
+ """
+ # Pseudo-header fields MUST NOT appear in trailers - RFC 7540 § 8.1.2.1
+ if hdr_validation_flags.is_trailer and pseudo_headers:
+ raise ProtocolError(
+ "Received pseudo-header in trailer %s" % pseudo_headers
+ )
+
+ # If ':status' pseudo-header is not there in a response header, reject it.
+ # Similarly, if ':path', ':method', or ':scheme' are not there in a request
+ # header, reject it. Additionally, if a response contains any request-only
+ # headers or vice-versa, reject it.
+ # Relevant RFC section: RFC 7540 § 8.1.2.4
+ # https://tools.ietf.org/html/rfc7540#section-8.1.2.4
+ if hdr_validation_flags.is_response_header:
+ _assert_header_in_set(u':status', b':status', pseudo_headers)
+ invalid_response_headers = pseudo_headers & _REQUEST_ONLY_HEADERS
+ if invalid_response_headers:
+ raise ProtocolError(
+ "Encountered request-only headers %s" %
+ invalid_response_headers
+ )
+ elif (not hdr_validation_flags.is_response_header and
+ not hdr_validation_flags.is_trailer):
+ # This is a request, so we need to have seen :path, :method, and
+ # :scheme.
+ _assert_header_in_set(u':path', b':path', pseudo_headers)
+ _assert_header_in_set(u':method', b':method', pseudo_headers)
+ _assert_header_in_set(u':scheme', b':scheme', pseudo_headers)
+ invalid_request_headers = pseudo_headers & _RESPONSE_ONLY_HEADERS
+ if invalid_request_headers:
+ raise ProtocolError(
+ "Encountered response-only headers %s" %
+ invalid_request_headers
+ )
+ if method != b'CONNECT':
+ invalid_headers = pseudo_headers & _CONNECT_REQUEST_ONLY_HEADERS
+ if invalid_headers:
+ raise ProtocolError(
+ "Encountered connect-request-only headers %s" %
+ invalid_headers
+ )
+
+
+def _validate_host_authority_header(headers):
+ """
+ Given the :authority and Host headers from a request block that isn't
+ a trailer, check that:
+ 1. At least one of these headers is set.
+ 2. If both headers are set, they match.
+
+ :param headers: The HTTP header set.
+ :raises: ``ProtocolError``
+ """
+ # We use None as a sentinel value. Iterate over the list of headers,
+ # and record the value of these headers (if present). We don't need
+ # to worry about receiving duplicate :authority headers, as this is
+ # enforced by the _reject_pseudo_header_fields() pipeline.
+ #
+ # TODO: We should also guard against receiving duplicate Host headers,
+ # and against sending duplicate headers.
+ authority_header_val = None
+ host_header_val = None
+
+ for header in headers:
+ if header[0] in (b':authority', u':authority'):
+ authority_header_val = header[1]
+ elif header[0] in (b'host', u'host'):
+ host_header_val = header[1]
+
+ yield header
+
+ # If we have not-None values for these variables, then we know we saw
+ # the corresponding header.
+ authority_present = (authority_header_val is not None)
+ host_present = (host_header_val is not None)
+
+ # It is an error for a request header block to contain neither
+ # an :authority header nor a Host header.
+ if not authority_present and not host_present:
+ raise ProtocolError(
+ "Request header block does not have an :authority or Host header."
+ )
+
+ # If we receive both headers, they should definitely match.
+ if authority_present and host_present:
+ if authority_header_val != host_header_val:
+ raise ProtocolError(
+ "Request header block has mismatched :authority and "
+ "Host headers: %r / %r"
+ % (authority_header_val, host_header_val)
+ )
+
+
+def _check_host_authority_header(headers, hdr_validation_flags):
+ """
+ Raises a ProtocolError if a header block arrives that does not contain an
+ :authority or a Host header, or if a header block contains both fields,
+ but their values do not match.
+ """
+ # We only expect to see :authority and Host headers on request header
+ # blocks that aren't trailers, so skip this validation if this is a
+ # response header or we're looking at trailer blocks.
+ skip_validation = (
+ hdr_validation_flags.is_response_header or
+ hdr_validation_flags.is_trailer
+ )
+ if skip_validation:
+ return headers
+
+ return _validate_host_authority_header(headers)
+
+
+def _check_path_header(headers, hdr_validation_flags):
+ """
+ Raise a ProtocolError if a header block arrives or is sent that contains an
+ empty :path header.
+ """
+ def inner():
+ for header in headers:
+ if header[0] in (b':path', u':path'):
+ if not header[1]:
+ raise ProtocolError("An empty :path header is forbidden")
+
+ yield header
+
+ # We only expect to see :authority and Host headers on request header
+ # blocks that aren't trailers, so skip this validation if this is a
+ # response header or we're looking at trailer blocks.
+ skip_validation = (
+ hdr_validation_flags.is_response_header or
+ hdr_validation_flags.is_trailer
+ )
+ if skip_validation:
+ return headers
+ else:
+ return inner()
+
+
+def _lowercase_header_names(headers, hdr_validation_flags):
+ """
+ Given an iterable of header two-tuples, rebuilds that iterable with the
+ header names lowercased. This generator produces tuples that preserve the
+ original type of the header tuple for tuple and any ``HeaderTuple``.
+ """
+ for header in headers:
+ if isinstance(header, HeaderTuple):
+ yield header.__class__(header[0].lower(), header[1])
+ else:
+ yield (header[0].lower(), header[1])
+
+
+def _strip_surrounding_whitespace(headers, hdr_validation_flags):
+ """
+ Given an iterable of header two-tuples, strip both leading and trailing
+ whitespace from both header names and header values. This generator
+ produces tuples that preserve the original type of the header tuple for
+ tuple and any ``HeaderTuple``.
+ """
+ for header in headers:
+ if isinstance(header, HeaderTuple):
+ yield header.__class__(header[0].strip(), header[1].strip())
+ else:
+ yield (header[0].strip(), header[1].strip())
+
+
+def _strip_connection_headers(headers, hdr_validation_flags):
+ """
+ Strip any connection headers as per RFC7540 § 8.1.2.2.
+ """
+ for header in headers:
+ if header[0] not in CONNECTION_HEADERS:
+ yield header
+
+
+def _check_sent_host_authority_header(headers, hdr_validation_flags):
+ """
+ Raises an InvalidHeaderBlockError if we try to send a header block
+ that does not contain an :authority or a Host header, or if
+ the header block contains both fields, but their values do not match.
+ """
+ # We only expect to see :authority and Host headers on request header
+ # blocks that aren't trailers, so skip this validation if this is a
+ # response header or we're looking at trailer blocks.
+ skip_validation = (
+ hdr_validation_flags.is_response_header or
+ hdr_validation_flags.is_trailer
+ )
+ if skip_validation:
+ return headers
+
+ return _validate_host_authority_header(headers)
+
+
+def _combine_cookie_fields(headers, hdr_validation_flags):
+ """
+ RFC 7540 § 8.1.2.5 allows HTTP/2 clients to split the Cookie header field,
+ which must normally appear only once, into multiple fields for better
+ compression. However, they MUST be joined back up again when received.
+ This normalization step applies that transform. The side-effect is that
+ all cookie fields now appear *last* in the header block.
+ """
+ # There is a problem here about header indexing. Specifically, it's
+ # possible that all these cookies are sent with different header indexing
+ # values. At this point it shouldn't matter too much, so we apply our own
+ # logic and make them never-indexed.
+ cookies = []
+ for header in headers:
+ if header[0] == b'cookie':
+ cookies.append(header[1])
+ else:
+ yield header
+ if cookies:
+ cookie_val = b'; '.join(cookies)
+ yield NeverIndexedHeaderTuple(b'cookie', cookie_val)
+
+
+def normalize_outbound_headers(headers, hdr_validation_flags):
+ """
+ Normalizes a header sequence that we are about to send.
+
+ :param headers: The HTTP header set.
+ :param hdr_validation_flags: An instance of HeaderValidationFlags.
+ """
+ headers = _lowercase_header_names(headers, hdr_validation_flags)
+ headers = _strip_surrounding_whitespace(headers, hdr_validation_flags)
+ headers = _strip_connection_headers(headers, hdr_validation_flags)
+ headers = _secure_headers(headers, hdr_validation_flags)
+
+ return headers
+
+
+def normalize_inbound_headers(headers, hdr_validation_flags):
+ """
+ Normalizes a header sequence that we have received.
+
+ :param headers: The HTTP header set.
+ :param hdr_validation_flags: An instance of HeaderValidationFlags
+ """
+ headers = _combine_cookie_fields(headers, hdr_validation_flags)
+ return headers
+
+
+def validate_outbound_headers(headers, hdr_validation_flags):
+ """
+ Validates and normalizes a header sequence that we are about to send.
+
+ :param headers: The HTTP header set.
+ :param hdr_validation_flags: An instance of HeaderValidationFlags.
+ """
+ headers = _reject_te(
+ headers, hdr_validation_flags
+ )
+ headers = _reject_connection_header(
+ headers, hdr_validation_flags
+ )
+ headers = _reject_pseudo_header_fields(
+ headers, hdr_validation_flags
+ )
+ headers = _check_sent_host_authority_header(
+ headers, hdr_validation_flags
+ )
+ headers = _check_path_header(headers, hdr_validation_flags)
+
+ return headers
+
+
+class SizeLimitDict(collections.OrderedDict):
+
+ def __init__(self, *args, **kwargs):
+ self._size_limit = kwargs.pop("size_limit", None)
+ super(SizeLimitDict, self).__init__(*args, **kwargs)
+
+ self._check_size_limit()
+
+ def __setitem__(self, key, value):
+ super(SizeLimitDict, self).__setitem__(key, value)
+
+ self._check_size_limit()
+
+ def _check_size_limit(self):
+ if self._size_limit is not None:
+ while len(self) > self._size_limit:
+ self.popitem(last=False)
diff --git a/testing/web-platform/tests/tools/third_party/h2/h2/windows.py b/testing/web-platform/tests/tools/third_party/h2/h2/windows.py
new file mode 100644
index 0000000000..6656975f48
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/h2/windows.py
@@ -0,0 +1,139 @@
+# -*- coding: utf-8 -*-
+"""
+h2/windows
+~~~~~~~~~~
+
+Defines tools for managing HTTP/2 flow control windows.
+
+The objects defined in this module are used to automatically manage HTTP/2
+flow control windows. Specifically, they keep track of what the size of the
+window is, how much data has been consumed from that window, and how much data
+the user has already used. It then implements a basic algorithm that attempts
+to manage the flow control window without user input, trying to ensure that it
+does not emit too many WINDOW_UPDATE frames.
+"""
+from __future__ import division
+
+from .exceptions import FlowControlError
+
+
+# The largest acceptable value for a HTTP/2 flow control window.
+LARGEST_FLOW_CONTROL_WINDOW = 2**31 - 1
+
+
+class WindowManager(object):
+ """
+ A basic HTTP/2 window manager.
+
+ :param max_window_size: The maximum size of the flow control window.
+ :type max_window_size: ``int``
+ """
+ def __init__(self, max_window_size):
+ assert max_window_size <= LARGEST_FLOW_CONTROL_WINDOW
+ self.max_window_size = max_window_size
+ self.current_window_size = max_window_size
+ self._bytes_processed = 0
+
+ def window_consumed(self, size):
+ """
+ We have received a certain number of bytes from the remote peer. This
+ necessarily shrinks the flow control window!
+
+ :param size: The number of flow controlled bytes we received from the
+ remote peer.
+ :type size: ``int``
+ :returns: Nothing.
+ :rtype: ``None``
+ """
+ self.current_window_size -= size
+ if self.current_window_size < 0:
+ raise FlowControlError("Flow control window shrunk below 0")
+
+ def window_opened(self, size):
+ """
+ The flow control window has been incremented, either because of manual
+ flow control management or because of the user changing the flow
+ control settings. This can have the effect of increasing what we
+ consider to be the "maximum" flow control window size.
+
+ This does not increase our view of how many bytes have been processed,
+ only of how much space is in the window.
+
+ :param size: The increment to the flow control window we received.
+ :type size: ``int``
+ :returns: Nothing
+ :rtype: ``None``
+ """
+ self.current_window_size += size
+
+ if self.current_window_size > LARGEST_FLOW_CONTROL_WINDOW:
+ raise FlowControlError(
+ "Flow control window mustn't exceed %d" %
+ LARGEST_FLOW_CONTROL_WINDOW
+ )
+
+ if self.current_window_size > self.max_window_size:
+ self.max_window_size = self.current_window_size
+
+ def process_bytes(self, size):
+ """
+ The application has informed us that it has processed a certain number
+ of bytes. This may cause us to want to emit a window update frame. If
+ we do want to emit a window update frame, this method will return the
+ number of bytes that we should increment the window by.
+
+ :param size: The number of flow controlled bytes that the application
+ has processed.
+ :type size: ``int``
+ :returns: The number of bytes to increment the flow control window by,
+ or ``None``.
+ :rtype: ``int`` or ``None``
+ """
+ self._bytes_processed += size
+ return self._maybe_update_window()
+
+ def _maybe_update_window(self):
+ """
+ Run the algorithm.
+
+ Our current algorithm can be described like this.
+
+ 1. If no bytes have been processed, we immediately return 0. There is
+ no meaningful way for us to hand space in the window back to the
+ remote peer, so let's not even try.
+ 2. If there is no space in the flow control window, and we have
+ processed at least 1024 bytes (or 1/4 of the window, if the window
+ is smaller), we will emit a window update frame. This is to avoid
+ the risk of blocking a stream altogether.
+ 3. If there is space in the flow control window, and we have processed
+ at least 1/2 of the window worth of bytes, we will emit a window
+ update frame. This is to minimise the number of window update frames
+ we have to emit.
+
+ In a healthy system with large flow control windows, this will
+ irregularly emit WINDOW_UPDATE frames. This prevents us starving the
+ connection by emitting eleventy bajillion WINDOW_UPDATE frames,
+ especially in situations where the remote peer is sending a lot of very
+ small DATA frames.
+ """
+ # TODO: Can the window be smaller than 1024 bytes? If not, we can
+ # streamline this algorithm.
+ if not self._bytes_processed:
+ return None
+
+ max_increment = (self.max_window_size - self.current_window_size)
+ increment = 0
+
+ # Note that, even though we may increment less than _bytes_processed,
+ # we still want to set it to zero whenever we emit an increment. This
+ # is because we'll always increment up to the maximum we can.
+ if (self.current_window_size == 0) and (
+ self._bytes_processed > min(1024, self.max_window_size // 4)):
+ increment = min(self._bytes_processed, max_increment)
+ self._bytes_processed = 0
+ elif self._bytes_processed >= (self.max_window_size // 2):
+ increment = min(self._bytes_processed, max_increment)
+ self._bytes_processed = 0
+
+ self.current_window_size += increment
+ return increment
diff --git a/testing/web-platform/tests/tools/third_party/h2/setup.cfg b/testing/web-platform/tests/tools/third_party/h2/setup.cfg
new file mode 100644
index 0000000000..3670507ff1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/setup.cfg
@@ -0,0 +1,10 @@
+[tool:pytest]
+testpaths = test
+
+[wheel]
+universal = 1
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/testing/web-platform/tests/tools/third_party/h2/setup.py b/testing/web-platform/tests/tools/third_party/h2/setup.py
new file mode 100644
index 0000000000..1ce95d5796
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/setup.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import codecs
+import os
+import re
+import sys
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+# Get the version
+version_regex = r'__version__ = ["\']([^"\']*)["\']'
+with open('h2/__init__.py', 'r') as f:
+ text = f.read()
+ match = re.search(version_regex, text)
+
+ if match:
+ version = match.group(1)
+ else:
+ raise RuntimeError("No version number found!")
+
+# Stealing this from Kenneth Reitz
+if sys.argv[-1] == 'publish':
+ os.system('python setup.py sdist upload')
+ sys.exit()
+
+packages = [
+ 'h2',
+]
+
+readme = codecs.open('README.rst', encoding='utf-8').read()
+history = codecs.open('HISTORY.rst', encoding='utf-8').read()
+
+setup(
+ name='h2',
+ version=version,
+ description='HTTP/2 State-Machine based protocol implementation',
+ long_description=u'\n\n'.join([readme, history]),
+ author='Cory Benfield',
+ author_email='cory@lukasa.co.uk',
+ url='https://github.com/python-hyper/hyper-h2',
+ project_urls={
+ 'Documentation': 'https://python-hyper.org/projects/h2',
+ 'Source': 'https://github.com/python-hyper/hyper-h2',
+ },
+ packages=packages,
+ package_data={'': ['LICENSE', 'README.rst', 'CONTRIBUTORS.rst', 'HISTORY.rst', 'NOTICES']},
+ package_dir={'h2': 'h2'},
+ include_package_data=True,
+ license='MIT License',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ ],
+ install_requires=[
+ 'hyperframe>=5.2.0, <6',
+ 'hpack>=3.0,<4',
+ ],
+ extras_require={
+ ':python_version == "2.7"': ['enum34>=1.1.6, <2'],
+ }
+)
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/conftest.py b/testing/web-platform/tests/tools/third_party/h2/test/conftest.py
new file mode 100644
index 0000000000..c646ad361c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/conftest.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+from hypothesis import settings, HealthCheck
+
+import pytest
+import helpers
+
+# Set up a CI profile that allows slow example generation.
+settings.register_profile(
+ "travis",
+ settings(suppress_health_check=[HealthCheck.too_slow])
+)
+
+
+@pytest.fixture
+def frame_factory():
+ return helpers.FrameFactory()
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/coroutine_tests.py b/testing/web-platform/tests/tools/third_party/h2/test/coroutine_tests.py
new file mode 100644
index 0000000000..0f48c02d99
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/coroutine_tests.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+"""
+coroutine_tests
+~~~~~~~~~~~~~~~
+
+This file gives access to a coroutine-based test class. This allows each test
+case to be defined as a pair of interacting coroutines, sending data to each
+other by yielding the flow of control.
+
+The advantage of this method is that we avoid the difficulty of using threads
+in Python, as well as the pain of using sockets and events to communicate and
+organise the communication. This makes the tests entirely deterministic and
+makes them behave identically on all platforms, as well as ensuring they both
+succeed and fail quickly.
+"""
+import itertools
+import functools
+
+import pytest
+
+
+class CoroutineTestCase(object):
+ """
+ A base class for tests that use interacting coroutines.
+
+ The run_until_complete method takes a number of coroutines as arguments.
+ Each one is, in order, passed the output of the previous coroutine until
+ one is exhausted. If a coroutine does not initially yield data (that is,
+ its first action is to receive data), the calling code should prime it by
+ using the 'server' decorator on this class.
+ """
+ def run_until_complete(self, *coroutines):
+ """
+ Executes a set of coroutines that communicate between each other. Each
+ one is, in order, passed the output of the previous coroutine until
+ one is exhausted. If a coroutine does not initially yield data (that
+ is, its first action is to receive data), the calling code should prime
+ it by using the 'server' decorator on this class.
+
+ Once a coroutine is exhausted, the method performs a final check to
+ ensure that all other coroutines are exhausted. This ensures that all
+ assertions in those coroutines got executed.
+ """
+ looping_coroutines = itertools.cycle(coroutines)
+ data = None
+
+ for coro in looping_coroutines:
+ try:
+ data = coro.send(data)
+ except StopIteration:
+ break
+
+ for coro in coroutines:
+ try:
+ next(coro)
+ except StopIteration:
+ continue
+ else:
+ pytest.fail("Coroutine %s not exhausted" % coro)
+
+ def server(self, func):
+ """
+ A decorator that marks a test coroutine as a 'server' coroutine: that
+ is, one whose first action is to consume data, rather than one that
+ initially emits data. The effect of this decorator is simply to prime
+ the coroutine.
+ """
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ c = func(*args, **kwargs)
+ next(c)
+ return c
+
+ return wrapper
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/helpers.py b/testing/web-platform/tests/tools/third_party/h2/test/helpers.py
new file mode 100644
index 0000000000..2a4e909321
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/helpers.py
@@ -0,0 +1,176 @@
+# -*- coding: utf-8 -*-
+"""
+helpers
+~~~~~~~
+
+This module contains helpers for the h2 tests.
+"""
+from hyperframe.frame import (
+ HeadersFrame, DataFrame, SettingsFrame, WindowUpdateFrame, PingFrame,
+ GoAwayFrame, RstStreamFrame, PushPromiseFrame, PriorityFrame,
+ ContinuationFrame, AltSvcFrame
+)
+from hpack.hpack import Encoder
+
+
+SAMPLE_SETTINGS = {
+ SettingsFrame.HEADER_TABLE_SIZE: 4096,
+ SettingsFrame.ENABLE_PUSH: 1,
+ SettingsFrame.MAX_CONCURRENT_STREAMS: 2,
+}
+
+
+class FrameFactory(object):
+ """
+ A class containing lots of helper methods and state to build frames. This
+ allows test cases to easily build correct HTTP/2 frames to feed to
+ hyper-h2.
+ """
+ def __init__(self):
+ self.encoder = Encoder()
+
+ def refresh_encoder(self):
+ self.encoder = Encoder()
+
+ def preamble(self):
+ return b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
+
+ def build_headers_frame(self,
+ headers,
+ flags=[],
+ stream_id=1,
+ **priority_kwargs):
+ """
+ Builds a single valid headers frame out of the contained headers.
+ """
+ f = HeadersFrame(stream_id)
+ f.data = self.encoder.encode(headers)
+ f.flags.add('END_HEADERS')
+ for flag in flags:
+ f.flags.add(flag)
+
+ for k, v in priority_kwargs.items():
+ setattr(f, k, v)
+
+ return f
+
+ def build_continuation_frame(self, header_block, flags=[], stream_id=1):
+ """
+ Builds a single continuation frame out of the binary header block.
+ """
+ f = ContinuationFrame(stream_id)
+ f.data = header_block
+ f.flags = set(flags)
+
+ return f
+
+ def build_data_frame(self, data, flags=None, stream_id=1, padding_len=0):
+ """
+ Builds a single data frame out of a chunk of data.
+ """
+ flags = set(flags) if flags is not None else set()
+ f = DataFrame(stream_id)
+ f.data = data
+ f.flags = flags
+
+ if padding_len:
+ flags.add('PADDED')
+ f.pad_length = padding_len
+
+ return f
+
+ def build_settings_frame(self, settings, ack=False):
+ """
+ Builds a single settings frame.
+ """
+ f = SettingsFrame(0)
+ if ack:
+ f.flags.add('ACK')
+
+ f.settings = settings
+ return f
+
+ def build_window_update_frame(self, stream_id, increment):
+ """
+ Builds a single WindowUpdate frame.
+ """
+ f = WindowUpdateFrame(stream_id)
+ f.window_increment = increment
+ return f
+
+ def build_ping_frame(self, ping_data, flags=None):
+ """
+ Builds a single Ping frame.
+ """
+ f = PingFrame(0)
+ f.opaque_data = ping_data
+ if flags:
+ f.flags = set(flags)
+
+ return f
+
+ def build_goaway_frame(self,
+ last_stream_id,
+ error_code=0,
+ additional_data=b''):
+ """
+ Builds a single GOAWAY frame.
+ """
+ f = GoAwayFrame(0)
+ f.error_code = error_code
+ f.last_stream_id = last_stream_id
+ f.additional_data = additional_data
+ return f
+
+ def build_rst_stream_frame(self, stream_id, error_code=0):
+ """
+ Builds a single RST_STREAM frame.
+ """
+ f = RstStreamFrame(stream_id)
+ f.error_code = error_code
+ return f
+
+ def build_push_promise_frame(self,
+ stream_id,
+ promised_stream_id,
+ headers,
+ flags=[]):
+ """
+ Builds a single PUSH_PROMISE frame.
+ """
+ f = PushPromiseFrame(stream_id)
+ f.promised_stream_id = promised_stream_id
+ f.data = self.encoder.encode(headers)
+ f.flags = set(flags)
+ f.flags.add('END_HEADERS')
+ return f
+
+ def build_priority_frame(self,
+ stream_id,
+ weight,
+ depends_on=0,
+ exclusive=False):
+ """
+ Builds a single priority frame.
+ """
+ f = PriorityFrame(stream_id)
+ f.depends_on = depends_on
+ f.stream_weight = weight
+ f.exclusive = exclusive
+ return f
+
+ def build_alt_svc_frame(self, stream_id, origin, field):
+ """
+ Builds a single ALTSVC frame.
+ """
+ f = AltSvcFrame(stream_id)
+ f.origin = origin
+ f.field = field
+ return f
+
+ def change_table_size(self, new_size):
+ """
+ Causes the encoder to send a dynamic size update in the next header
+ block it sends.
+ """
+ self.encoder.header_table_size = new_size
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_basic_logic.py b/testing/web-platform/tests/tools/third_party/h2/test/test_basic_logic.py
new file mode 100644
index 0000000000..7df99a6a57
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_basic_logic.py
@@ -0,0 +1,1877 @@
+# -*- coding: utf-8 -*-
+"""
+test_basic_logic
+~~~~~~~~~~~~~~~~
+
+Test the basic logic of the h2 state machines.
+"""
+import random
+import sys
+
+import hyperframe
+import pytest
+
+import h2.config
+import h2.connection
+import h2.errors
+import h2.events
+import h2.exceptions
+import h2.frame_buffer
+import h2.settings
+import h2.stream
+
+import helpers
+
+from hypothesis import given
+from hypothesis.strategies import integers
+
+
+IS_PYTHON3 = sys.version_info >= (3, 0)
+
+
+class TestBasicClient(object):
+ """
+ Basic client-side tests.
+ """
+ example_request_headers = [
+ (u':authority', u'example.com'),
+ (u':path', u'/'),
+ (u':scheme', u'https'),
+ (u':method', u'GET'),
+ ]
+ bytes_example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ ]
+ example_response_headers = [
+ (u':status', u'200'),
+ (u'server', u'fake-serv/0.1.0')
+ ]
+ bytes_example_response_headers = [
+ (b':status', b'200'),
+ (b'server', b'fake-serv/0.1.0')
+ ]
+
+ def test_begin_connection(self, frame_factory):
+ """
+ Client connections emit the HTTP/2 preamble.
+ """
+ c = h2.connection.H2Connection()
+ expected_settings = frame_factory.build_settings_frame(
+ c.local_settings
+ )
+ expected_data = (
+ b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n' + expected_settings.serialize()
+ )
+
+ events = c.initiate_connection()
+ assert not events
+ assert c.data_to_send() == expected_data
+
+ def test_sending_headers(self):
+ """
+ Single headers frames are correctly encoded.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Clear the data, then send headers.
+ c.clear_outbound_data_buffer()
+ events = c.send_headers(1, self.example_request_headers)
+ assert not events
+ assert c.data_to_send() == (
+ b'\x00\x00\r\x01\x04\x00\x00\x00\x01'
+ b'A\x88/\x91\xd3]\x05\\\x87\xa7\x84\x87\x82'
+ )
+
+ def test_sending_data(self):
+ """
+ Single data frames are encoded correctly.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+
+ # Clear the data, then send some data.
+ c.clear_outbound_data_buffer()
+ events = c.send_data(1, b'some data')
+ assert not events
+ data_to_send = c.data_to_send()
+ assert (
+ data_to_send == b'\x00\x00\t\x00\x00\x00\x00\x00\x01some data'
+ )
+
+ buffer = h2.frame_buffer.FrameBuffer(server=False)
+ buffer.max_frame_size = 65535
+ buffer.add_data(data_to_send)
+ data_frame = list(buffer)[0]
+ sanity_check_data_frame(
+ data_frame=data_frame,
+ expected_flow_controlled_length=len(b'some data'),
+ expect_padded_flag=False,
+ expected_data_frame_pad_length=0
+ )
+
+ def test_sending_data_in_memoryview(self):
+ """
+ Support memoryview for sending data.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+
+ # Clear the data, then send some data.
+ c.clear_outbound_data_buffer()
+ events = c.send_data(1, memoryview(b'some data'))
+ assert not events
+ data_to_send = c.data_to_send()
+ assert (
+ data_to_send == b'\x00\x00\t\x00\x00\x00\x00\x00\x01some data'
+ )
+
+ def test_sending_data_with_padding(self):
+ """
+ Single data frames with padding are encoded correctly.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+
+ # Clear the data, then send some data.
+ c.clear_outbound_data_buffer()
+ events = c.send_data(1, b'some data', pad_length=5)
+ assert not events
+ data_to_send = c.data_to_send()
+ assert data_to_send == (
+ b'\x00\x00\x0f\x00\x08\x00\x00\x00\x01'
+ b'\x05some data\x00\x00\x00\x00\x00'
+ )
+
+ buffer = h2.frame_buffer.FrameBuffer(server=False)
+ buffer.max_frame_size = 65535
+ buffer.add_data(data_to_send)
+ data_frame = list(buffer)[0]
+ sanity_check_data_frame(
+ data_frame=data_frame,
+ expected_flow_controlled_length=len(b'some data') + 1 + 5,
+ expect_padded_flag=True,
+ expected_data_frame_pad_length=5
+ )
+
+ def test_sending_data_with_zero_length_padding(self):
+ """
+ Single data frames with zero-length padding are encoded
+ correctly.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+
+ # Clear the data, then send some data.
+ c.clear_outbound_data_buffer()
+ events = c.send_data(1, b'some data', pad_length=0)
+ assert not events
+ data_to_send = c.data_to_send()
+ assert data_to_send == (
+ b'\x00\x00\x0a\x00\x08\x00\x00\x00\x01'
+ b'\x00some data'
+ )
+
+ buffer = h2.frame_buffer.FrameBuffer(server=False)
+ buffer.max_frame_size = 65535
+ buffer.add_data(data_to_send)
+ data_frame = list(buffer)[0]
+ sanity_check_data_frame(
+ data_frame=data_frame,
+ expected_flow_controlled_length=len(b'some data') + 1,
+ expect_padded_flag=True,
+ expected_data_frame_pad_length=0
+ )
+
+ @pytest.mark.parametrize("expected_error,pad_length", [
+ (None, 0),
+ (None, 255),
+ (None, None),
+ (ValueError, -1),
+ (ValueError, 256),
+ (TypeError, 'invalid'),
+ (TypeError, ''),
+ (TypeError, '10'),
+ (TypeError, {}),
+ (TypeError, ['1', '2', '3']),
+ (TypeError, []),
+ (TypeError, 1.5),
+ (TypeError, 1.0),
+ (TypeError, -1.0),
+ ])
+ def test_sending_data_with_invalid_padding_length(self,
+ expected_error,
+ pad_length):
+ """
+ ``send_data`` with a ``pad_length`` parameter that is an integer
+ outside the range of [0, 255] throws a ``ValueError``, and a
+ ``pad_length`` parameter which is not an ``integer`` type
+ throws a ``TypeError``.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+
+ c.clear_outbound_data_buffer()
+ if expected_error is not None:
+ with pytest.raises(expected_error):
+ c.send_data(1, b'some data', pad_length=pad_length)
+ else:
+ c.send_data(1, b'some data', pad_length=pad_length)
+
+ def test_closing_stream_sending_data(self, frame_factory):
+ """
+ We can close a stream with a data frame.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+
+ f = frame_factory.build_data_frame(
+ data=b'some data',
+ flags=['END_STREAM'],
+ )
+
+ # Clear the data, then send some data.
+ c.clear_outbound_data_buffer()
+ events = c.send_data(1, b'some data', end_stream=True)
+ assert not events
+ assert c.data_to_send() == f.serialize()
+
+ def test_receiving_a_response(self, frame_factory):
+ """
+ When receiving a response, the ResponseReceived event fires.
+ """
+ config = h2.config.H2Configuration(header_encoding='utf-8')
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+
+ # Clear the data
+ f = frame_factory.build_headers_frame(
+ self.example_response_headers
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.ResponseReceived)
+ assert event.stream_id == 1
+ assert event.headers == self.example_response_headers
+
+ def test_receiving_a_response_bytes(self, frame_factory):
+ """
+ When receiving a response, the ResponseReceived event fires with bytes
+ headers if the encoding is set appropriately.
+ """
+ config = h2.config.H2Configuration(header_encoding=False)
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+
+ # Clear the data
+ f = frame_factory.build_headers_frame(
+ self.example_response_headers
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.ResponseReceived)
+ assert event.stream_id == 1
+ assert event.headers == self.bytes_example_response_headers
+
+ def test_receiving_a_response_change_encoding(self, frame_factory):
+ """
+ When receiving a response, the ResponseReceived event fires with bytes
+ headers if the encoding is set appropriately, but if this changes then
+ the change reflects it.
+ """
+ config = h2.config.H2Configuration(header_encoding=False)
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+
+ f = frame_factory.build_headers_frame(
+ self.example_response_headers
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.ResponseReceived)
+ assert event.stream_id == 1
+ assert event.headers == self.bytes_example_response_headers
+
+ c.send_headers(3, self.example_request_headers, end_stream=True)
+ c.config.header_encoding = 'utf-8'
+ f = frame_factory.build_headers_frame(
+ self.example_response_headers,
+ stream_id=3,
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.ResponseReceived)
+ assert event.stream_id == 3
+ assert event.headers == self.example_response_headers
+
+ def test_end_stream_without_data(self, frame_factory):
+ """
+ Ending a stream without data emits a zero-length DATA frame with
+ END_STREAM set.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=False)
+
+ # Clear the data
+ c.clear_outbound_data_buffer()
+ f = frame_factory.build_data_frame(b'', flags=['END_STREAM'])
+ events = c.end_stream(1)
+
+ assert not events
+ assert c.data_to_send() == f.serialize()
+
+ def test_cannot_send_headers_on_lower_stream_id(self):
+ """
+ Once stream ID x has been used, cannot use stream ID y where y < x.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(3, self.example_request_headers, end_stream=False)
+
+ with pytest.raises(h2.exceptions.StreamIDTooLowError) as e:
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+
+ assert e.value.stream_id == 1
+ assert e.value.max_stream_id == 3
+
+ def test_receiving_pushed_stream(self, frame_factory):
+ """
+ Pushed streams fire a PushedStreamReceived event, followed by
+ ResponseReceived when the response headers are received.
+ """
+ config = h2.config.H2Configuration(header_encoding='utf-8')
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=False)
+
+ f1 = frame_factory.build_headers_frame(
+ self.example_response_headers
+ )
+ f2 = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers,
+ flags=['END_HEADERS'],
+ )
+ f3 = frame_factory.build_headers_frame(
+ self.example_response_headers,
+ stream_id=2,
+ )
+ data = b''.join(x.serialize() for x in [f1, f2, f3])
+
+ events = c.receive_data(data)
+
+ assert len(events) == 3
+ stream_push_event = events[1]
+ response_event = events[2]
+ assert isinstance(stream_push_event, h2.events.PushedStreamReceived)
+ assert isinstance(response_event, h2.events.ResponseReceived)
+
+ assert stream_push_event.pushed_stream_id == 2
+ assert stream_push_event.parent_stream_id == 1
+ assert (
+ stream_push_event.headers == self.example_request_headers
+ )
+ assert response_event.stream_id == 2
+ assert response_event.headers == self.example_response_headers
+
+ def test_receiving_pushed_stream_bytes(self, frame_factory):
+ """
+ Pushed headers are not decoded if the header encoding is set to False.
+ """
+ config = h2.config.H2Configuration(header_encoding=False)
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=False)
+
+ f1 = frame_factory.build_headers_frame(
+ self.example_response_headers
+ )
+ f2 = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers,
+ flags=['END_HEADERS'],
+ )
+ f3 = frame_factory.build_headers_frame(
+ self.example_response_headers,
+ stream_id=2,
+ )
+ data = b''.join(x.serialize() for x in [f1, f2, f3])
+
+ events = c.receive_data(data)
+
+ assert len(events) == 3
+ stream_push_event = events[1]
+ response_event = events[2]
+ assert isinstance(stream_push_event, h2.events.PushedStreamReceived)
+ assert isinstance(response_event, h2.events.ResponseReceived)
+
+ assert stream_push_event.pushed_stream_id == 2
+ assert stream_push_event.parent_stream_id == 1
+ assert (
+ stream_push_event.headers == self.bytes_example_request_headers
+ )
+ assert response_event.stream_id == 2
+ assert response_event.headers == self.bytes_example_response_headers
+
+ def test_cannot_receive_pushed_stream_when_enable_push_is_0(self,
+ frame_factory):
+ """
+ If we have set SETTINGS_ENABLE_PUSH to 0, receiving PUSH_PROMISE frames
+ triggers the connection to be closed.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.local_settings.enable_push = 0
+ c.send_headers(1, self.example_request_headers, end_stream=False)
+
+ f1 = frame_factory.build_settings_frame({}, ack=True)
+ f2 = frame_factory.build_headers_frame(
+ self.example_response_headers
+ )
+ f3 = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers,
+ flags=['END_HEADERS'],
+ )
+ c.receive_data(f1.serialize())
+ c.receive_data(f2.serialize())
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f3.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ 0, h2.errors.ErrorCodes.PROTOCOL_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_receiving_response_no_body(self, frame_factory):
+ """
+ Receiving a response without a body fires two events, ResponseReceived
+ and StreamEnded.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+
+ f = frame_factory.build_headers_frame(
+ self.example_response_headers,
+ flags=['END_STREAM']
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 2
+ response_event = events[0]
+ end_stream = events[1]
+
+ assert isinstance(response_event, h2.events.ResponseReceived)
+ assert isinstance(end_stream, h2.events.StreamEnded)
+
+ def test_oversize_headers(self):
+ """
+ Sending headers that are oversized generates a stream of CONTINUATION
+ frames.
+ """
+ all_bytes = [chr(x) for x in range(0, 256)]
+ if IS_PYTHON3:
+ all_bytes = [x.encode('latin1') for x in all_bytes]
+
+ large_binary_string = b''.join(
+ random.choice(all_bytes) for _ in range(0, 256)
+ )
+ test_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':method', 'GET'),
+ (':scheme', 'https'),
+ ('key', large_binary_string)
+ ]
+ c = h2.connection.H2Connection()
+
+ # Greatly shrink the max frame size to force us over.
+ c.max_outbound_frame_size = 48
+ c.initiate_connection()
+ c.send_headers(1, test_headers, end_stream=True)
+
+ # Use the frame buffer here, because we don't care about decoding
+ # the headers. Don't send all the data in because that will force the
+ # frame buffer to stop caching the CONTINUATION frames, so instead
+ # send all but one byte.
+ buffer = h2.frame_buffer.FrameBuffer(server=True)
+ buffer.max_frame_size = 65535
+ data = c.data_to_send()
+ buffer.add_data(data[:-1])
+
+ # Drain the buffer, confirming that it only provides a single frame
+ # (the settings frame)
+ assert len(list(buffer)) == 1
+
+ # Get the cached frames.
+ frames = buffer._headers_buffer
+
+ # Split the frames up.
+ headers_frame = frames[0]
+ continuation_frames = frames[1:]
+
+ assert isinstance(headers_frame, hyperframe.frame.HeadersFrame)
+ assert all(
+ map(
+ lambda f: isinstance(f, hyperframe.frame.ContinuationFrame),
+ continuation_frames)
+ )
+ assert all(
+ map(lambda f: len(f.data) <= c.max_outbound_frame_size, frames)
+ )
+
+ assert frames[0].flags == {'END_STREAM'}
+
+ buffer.add_data(data[-1:])
+ headers = list(buffer)[0]
+ assert isinstance(headers, hyperframe.frame.HeadersFrame)
+
+ def test_handle_stream_reset(self, frame_factory):
+ """
+ Streams being remotely reset fires a StreamReset event.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.StreamReset)
+ assert event.stream_id == 1
+ assert event.error_code is h2.errors.ErrorCodes.STREAM_CLOSED
+ assert isinstance(event.error_code, h2.errors.ErrorCodes)
+ assert event.remote_reset
+
+ def test_handle_stream_reset_with_unknown_erorr_code(self, frame_factory):
+ """
+ Streams being remotely reset with unknown error codes behave exactly as
+ they do with known error codes, but the error code on the event is an
+ int, instead of being an ErrorCodes.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_rst_stream_frame(stream_id=1, error_code=0xFA)
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.StreamReset)
+ assert event.stream_id == 1
+ assert event.error_code == 250
+ assert not isinstance(event.error_code, h2.errors.ErrorCodes)
+ assert event.remote_reset
+
+ def test_can_consume_partial_data_from_connection(self):
+ """
+ We can do partial reads from the connection.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ assert len(c.data_to_send(2)) == 2
+ assert len(c.data_to_send(3)) == 3
+ assert 0 < len(c.data_to_send(500)) < 500
+ assert len(c.data_to_send(10)) == 0
+ assert len(c.data_to_send()) == 0
+
+ def test_we_can_update_settings(self, frame_factory):
+ """
+ Updating the settings emits a SETTINGS frame.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+
+ new_settings = {
+ h2.settings.SettingCodes.HEADER_TABLE_SIZE: 52,
+ h2.settings.SettingCodes.ENABLE_PUSH: 0,
+ }
+ events = c.update_settings(new_settings)
+ assert not events
+
+ f = frame_factory.build_settings_frame(new_settings)
+ assert c.data_to_send() == f.serialize()
+
+ def test_settings_get_acked_correctly(self, frame_factory):
+ """
+ When settings changes are ACKed, they contain the changed settings.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ new_settings = {
+ h2.settings.SettingCodes.HEADER_TABLE_SIZE: 52,
+ h2.settings.SettingCodes.ENABLE_PUSH: 0,
+ }
+ c.update_settings(new_settings)
+
+ f = frame_factory.build_settings_frame({}, ack=True)
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.SettingsAcknowledged)
+ assert len(event.changed_settings) == len(new_settings)
+ for setting, value in new_settings.items():
+ assert event.changed_settings[setting].new_value == value
+
+ def test_cannot_create_new_outbound_stream_over_limit(self, frame_factory):
+ """
+ When the number of outbound streams exceeds the remote peer's
+ MAX_CONCURRENT_STREAMS setting, attempting to open new streams fails.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ f = frame_factory.build_settings_frame(
+ {h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 1}
+ )
+ c.receive_data(f.serialize())[0]
+
+ c.send_headers(1, self.example_request_headers)
+
+ with pytest.raises(h2.exceptions.TooManyStreamsError):
+ c.send_headers(3, self.example_request_headers)
+
+ def test_can_receive_trailers(self, frame_factory):
+ """
+ When two HEADERS blocks are received in the same stream from a
+ server, the second set are trailers.
+ """
+ config = h2.config.H2Configuration(header_encoding='utf-8')
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+ f = frame_factory.build_headers_frame(self.example_response_headers)
+ c.receive_data(f.serialize())
+
+ # Send in trailers.
+ trailers = [('content-length', '0')]
+ f = frame_factory.build_headers_frame(
+ trailers,
+ flags=['END_STREAM'],
+ )
+ events = c.receive_data(f.serialize())
+ assert len(events) == 2
+
+ event = events[0]
+ assert isinstance(event, h2.events.TrailersReceived)
+ assert event.headers == trailers
+ assert event.stream_id == 1
+
+ def test_reject_trailers_not_ending_stream(self, frame_factory):
+ """
+ When trailers are received without the END_STREAM flag being present,
+ this is a ProtocolError.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+ f = frame_factory.build_headers_frame(self.example_response_headers)
+ c.receive_data(f.serialize())
+
+ # Send in trailers.
+ c.clear_outbound_data_buffer()
+ trailers = [('content-length', '0')]
+ f = frame_factory.build_headers_frame(
+ trailers,
+ flags=[],
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=0, error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_can_send_trailers(self, frame_factory):
+ """
+ When a second set of headers are sent, they are properly trailers.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+ c.send_headers(1, self.example_request_headers)
+
+ # Now send trailers.
+ trailers = [('content-length', '0')]
+ c.send_headers(1, trailers, end_stream=True)
+
+ frame_factory.refresh_encoder()
+ f1 = frame_factory.build_headers_frame(
+ self.example_request_headers,
+ )
+ f2 = frame_factory.build_headers_frame(
+ trailers,
+ flags=['END_STREAM'],
+ )
+ assert c.data_to_send() == f1.serialize() + f2.serialize()
+
+ def test_trailers_must_have_end_stream(self, frame_factory):
+ """
+ A set of trailers must carry the END_STREAM flag.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Send headers.
+ c.send_headers(1, self.example_request_headers)
+
+ # Now send trailers.
+ trailers = [('content-length', '0')]
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.send_headers(1, trailers)
+
+ def test_headers_are_lowercase(self, frame_factory):
+ """
+ When headers are sent, they are forced to lower-case.
+ """
+ weird_headers = self.example_request_headers + [
+ ('ChAnGiNg-CaSe', 'AlsoHere'),
+ ('alllowercase', 'alllowercase'),
+ ('ALLCAPS', 'ALLCAPS'),
+ ]
+ expected_headers = self.example_request_headers + [
+ ('changing-case', 'AlsoHere'),
+ ('alllowercase', 'alllowercase'),
+ ('allcaps', 'ALLCAPS'),
+ ]
+
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+
+ c.send_headers(1, weird_headers)
+ expected_frame = frame_factory.build_headers_frame(
+ headers=expected_headers
+ )
+
+ assert c.data_to_send() == expected_frame.serialize()
+
+ @given(frame_size=integers(min_value=2**14, max_value=(2**24 - 1)))
+ def test_changing_max_frame_size(self, frame_factory, frame_size):
+ """
+ When the user changes the max frame size and the change is ACKed, the
+ remote peer is now bound by the new frame size.
+ """
+ # We need to refresh the encoder because hypothesis has a problem with
+ # integrating with py.test, meaning that we use the same frame factory
+ # for all tests.
+ # See https://github.com/HypothesisWorks/hypothesis-python/issues/377
+ frame_factory.refresh_encoder()
+
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Set up the stream.
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+ headers_frame = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ )
+ c.receive_data(headers_frame.serialize())
+
+ # Change the max frame size.
+ c.update_settings(
+ {h2.settings.SettingCodes.MAX_FRAME_SIZE: frame_size}
+ )
+ settings_ack = frame_factory.build_settings_frame({}, ack=True)
+ c.receive_data(settings_ack.serialize())
+
+ # Greatly increase the flow control windows: we're not here to test
+ # flow control today.
+ c.increment_flow_control_window(increment=(2 * frame_size) + 1)
+ c.increment_flow_control_window(
+ increment=(2 * frame_size) + 1, stream_id=1
+ )
+
+ # Send one DATA frame that is exactly the max frame size: confirm it's
+ # fine.
+ data = frame_factory.build_data_frame(
+ data=(b'\x00' * frame_size),
+ )
+ events = c.receive_data(data.serialize())
+ assert len(events) == 1
+ assert isinstance(events[0], h2.events.DataReceived)
+ assert events[0].flow_controlled_length == frame_size
+
+ # Send one that is one byte too large: confirm a protocol error is
+ # raised.
+ data.data += b'\x00'
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(data.serialize())
+
+ def test_cookies_are_joined_on_push(self, frame_factory):
+ """
+ RFC 7540 Section 8.1.2.5 requires that we join multiple Cookie headers
+ in a header block together when they're received on a push.
+ """
+ # This is a moderately varied set of cookie headers: some combined,
+ # some split.
+ cookie_headers = [
+ ('cookie',
+ 'username=John Doe; expires=Thu, 18 Dec 2013 12:00:00 UTC'),
+ ('cookie', 'path=1'),
+ ('cookie', 'test1=val1; test2=val2')
+ ]
+ expected = (
+ 'username=John Doe; expires=Thu, 18 Dec 2013 12:00:00 UTC; '
+ 'path=1; test1=val1; test2=val2'
+ )
+
+ config = h2.config.H2Configuration(header_encoding='utf-8')
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers + cookie_headers
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ e = events[0]
+
+ cookie_fields = [(n, v) for n, v in e.headers if n == 'cookie']
+ assert len(cookie_fields) == 1
+
+ _, v = cookie_fields[0]
+ assert v == expected
+
+ def test_cookies_arent_joined_without_normalization(self, frame_factory):
+ """
+ If inbound header normalization is disabled, cookie headers aren't
+ joined.
+ """
+ # This is a moderately varied set of cookie headers: some combined,
+ # some split.
+ cookie_headers = [
+ ('cookie',
+ 'username=John Doe; expires=Thu, 18 Dec 2013 12:00:00 UTC'),
+ ('cookie', 'path=1'),
+ ('cookie', 'test1=val1; test2=val2')
+ ]
+
+ config = h2.config.H2Configuration(
+ client_side=True,
+ normalize_inbound_headers=False,
+ header_encoding='utf-8'
+ )
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers + cookie_headers
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ e = events[0]
+
+ received_cookies = [(n, v) for n, v in e.headers if n == 'cookie']
+ assert len(received_cookies) == 3
+ assert cookie_headers == received_cookies
+
+
+class TestBasicServer(object):
+ """
+ Basic server-side tests.
+ """
+ example_request_headers = [
+ (u':authority', u'example.com'),
+ (u':path', u'/'),
+ (u':scheme', u'https'),
+ (u':method', u'GET'),
+ ]
+ bytes_example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ ]
+ example_response_headers = [
+ (':status', '200'),
+ ('server', 'hyper-h2/0.1.0')
+ ]
+ server_config = h2.config.H2Configuration(
+ client_side=False, header_encoding='utf-8'
+ )
+
+ def test_ignores_preamble(self):
+ """
+ The preamble does not cause any events or frames to be written.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
+
+ events = c.receive_data(preamble)
+ assert not events
+ assert not c.data_to_send()
+
+ @pytest.mark.parametrize("chunk_size", range(1, 24))
+ def test_drip_feed_preamble(self, chunk_size):
+ """
+ The preamble can be sent in in less than a single buffer.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
+ events = []
+
+ for i in range(0, len(preamble), chunk_size):
+ events += c.receive_data(preamble[i:i+chunk_size])
+
+ assert not events
+ assert not c.data_to_send()
+
+ def test_initiate_connection_sends_server_preamble(self, frame_factory):
+ """
+ For server-side connections, initiate_connection sends a server
+ preamble.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ expected_settings = frame_factory.build_settings_frame(
+ c.local_settings
+ )
+ expected_data = expected_settings.serialize()
+
+ events = c.initiate_connection()
+ assert not events
+ assert c.data_to_send() == expected_data
+
+ def test_headers_event(self, frame_factory):
+ """
+ When a headers frame is received a RequestReceived event fires.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(self.example_request_headers)
+ data = f.serialize()
+ events = c.receive_data(data)
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.RequestReceived)
+ assert event.stream_id == 1
+ assert event.headers == self.example_request_headers
+
+ def test_headers_event_bytes(self, frame_factory):
+ """
+ When a headers frame is received a RequestReceived event fires with
+ bytes headers if the encoding is set appropriately.
+ """
+ config = h2.config.H2Configuration(
+ client_side=False, header_encoding=False
+ )
+ c = h2.connection.H2Connection(config=config)
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(self.example_request_headers)
+ data = f.serialize()
+ events = c.receive_data(data)
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.RequestReceived)
+ assert event.stream_id == 1
+ assert event.headers == self.bytes_example_request_headers
+
+ def test_data_event(self, frame_factory):
+ """
+ Test that data received on a stream fires a DataReceived event.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ f1 = frame_factory.build_headers_frame(
+ self.example_request_headers, stream_id=3
+ )
+ f2 = frame_factory.build_data_frame(
+ b'some request data',
+ stream_id=3,
+ )
+ data = b''.join(map(lambda f: f.serialize(), [f1, f2]))
+ events = c.receive_data(data)
+
+ assert len(events) == 2
+ event = events[1]
+
+ assert isinstance(event, h2.events.DataReceived)
+ assert event.stream_id == 3
+ assert event.data == b'some request data'
+ assert event.flow_controlled_length == 17
+
+ def test_data_event_with_padding(self, frame_factory):
+ """
+ Test that data received on a stream fires a DataReceived event that
+ accounts for padding.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ f1 = frame_factory.build_headers_frame(
+ self.example_request_headers, stream_id=3
+ )
+ f2 = frame_factory.build_data_frame(
+ b'some request data',
+ stream_id=3,
+ padding_len=20
+ )
+ data = b''.join(map(lambda f: f.serialize(), [f1, f2]))
+ events = c.receive_data(data)
+
+ assert len(events) == 2
+ event = events[1]
+
+ assert isinstance(event, h2.events.DataReceived)
+ assert event.stream_id == 3
+ assert event.data == b'some request data'
+ assert event.flow_controlled_length == 17 + 20 + 1
+
+ def test_receiving_ping_frame(self, frame_factory):
+ """
+ Ping frames should be immediately ACKed.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ ping_data = b'\x01' * 8
+ sent_frame = frame_factory.build_ping_frame(ping_data)
+ expected_frame = frame_factory.build_ping_frame(
+ ping_data, flags=["ACK"]
+ )
+ expected_data = expected_frame.serialize()
+
+ c.clear_outbound_data_buffer()
+ events = c.receive_data(sent_frame.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.PingReceived)
+ assert event.ping_data == ping_data
+
+ assert c.data_to_send() == expected_data
+
+ def test_receiving_settings_frame_event(self, frame_factory):
+ """
+ Settings frames should cause a RemoteSettingsChanged event to fire.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_settings_frame(
+ settings=helpers.SAMPLE_SETTINGS
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.RemoteSettingsChanged)
+ assert len(event.changed_settings) == len(helpers.SAMPLE_SETTINGS)
+
+ def test_acknowledging_settings(self, frame_factory):
+ """
+ Acknowledging settings causes appropriate Settings frame to be emitted.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ received_frame = frame_factory.build_settings_frame(
+ settings=helpers.SAMPLE_SETTINGS
+ )
+ expected_frame = frame_factory.build_settings_frame(
+ settings={}, ack=True
+ )
+ expected_data = expected_frame.serialize()
+
+ c.clear_outbound_data_buffer()
+ events = c.receive_data(received_frame.serialize())
+
+ assert len(events) == 1
+ assert c.data_to_send() == expected_data
+
+ def test_close_connection(self, frame_factory):
+ """
+ Closing the connection with no error code emits a GOAWAY frame with
+ error code 0.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_goaway_frame(last_stream_id=0)
+ expected_data = f.serialize()
+
+ c.clear_outbound_data_buffer()
+ events = c.close_connection()
+
+ assert not events
+ assert c.data_to_send() == expected_data
+
+ @pytest.mark.parametrize("error_code", h2.errors.ErrorCodes)
+ def test_close_connection_with_error_code(self, frame_factory, error_code):
+ """
+ Closing the connection with an error code emits a GOAWAY frame with
+ that error code.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_goaway_frame(
+ error_code=error_code, last_stream_id=0
+ )
+ expected_data = f.serialize()
+
+ c.clear_outbound_data_buffer()
+ events = c.close_connection(error_code)
+
+ assert not events
+ assert c.data_to_send() == expected_data
+
+ @pytest.mark.parametrize("last_stream_id,output", [
+ (None, 23),
+ (0, 0),
+ (42, 42)
+ ])
+ def test_close_connection_with_last_stream_id(self, frame_factory,
+ last_stream_id, output):
+ """
+ Closing the connection with last_stream_id set emits a GOAWAY frame
+ with that value.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ headers_frame = frame_factory.build_headers_frame(
+ [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ],
+ stream_id=23)
+ c.receive_data(headers_frame.serialize())
+
+ f = frame_factory.build_goaway_frame(
+ last_stream_id=output
+ )
+ expected_data = f.serialize()
+
+ c.clear_outbound_data_buffer()
+ events = c.close_connection(last_stream_id=last_stream_id)
+
+ assert not events
+ assert c.data_to_send() == expected_data
+
+ @pytest.mark.parametrize("additional_data,output", [
+ (None, b''),
+ (b'', b''),
+ (b'foobar', b'foobar')
+ ])
+ def test_close_connection_with_additional_data(self, frame_factory,
+ additional_data, output):
+ """
+ Closing the connection with additional debug data emits a GOAWAY frame
+ with that data attached.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_goaway_frame(
+ last_stream_id=0, additional_data=output
+ )
+ expected_data = f.serialize()
+
+ c.clear_outbound_data_buffer()
+ events = c.close_connection(additional_data=additional_data)
+
+ assert not events
+ assert c.data_to_send() == expected_data
+
+ def test_reset_stream(self, frame_factory):
+ """
+ Resetting a stream with no error code emits a RST_STREAM frame with
+ error code 0.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_headers_frame(self.example_request_headers)
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ expected_frame = frame_factory.build_rst_stream_frame(stream_id=1)
+ expected_data = expected_frame.serialize()
+
+ events = c.reset_stream(stream_id=1)
+
+ assert not events
+ assert c.data_to_send() == expected_data
+
+ @pytest.mark.parametrize("error_code", h2.errors.ErrorCodes)
+ def test_reset_stream_with_error_code(self, frame_factory, error_code):
+ """
+ Resetting a stream with an error code emits a RST_STREAM frame with
+ that error code.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers,
+ stream_id=3
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ expected_frame = frame_factory.build_rst_stream_frame(
+ stream_id=3, error_code=error_code
+ )
+ expected_data = expected_frame.serialize()
+
+ events = c.reset_stream(stream_id=3, error_code=error_code)
+
+ assert not events
+ assert c.data_to_send() == expected_data
+
+ def test_cannot_reset_nonexistent_stream(self, frame_factory):
+ """
+ Resetting nonexistent streams raises NoSuchStreamError.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers,
+ stream_id=3
+ )
+ c.receive_data(f.serialize())
+
+ with pytest.raises(h2.exceptions.NoSuchStreamError) as e:
+ c.reset_stream(stream_id=1)
+
+ assert e.value.stream_id == 1
+
+ with pytest.raises(h2.exceptions.NoSuchStreamError) as e:
+ c.reset_stream(stream_id=5)
+
+ assert e.value.stream_id == 5
+
+ def test_basic_sending_ping_frame_logic(self, frame_factory):
+ """
+ Sending ping frames serializes a ping frame on stream 0 with
+ approriate opaque data.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ ping_data = b'\x01\x02\x03\x04\x05\x06\x07\x08'
+
+ expected_frame = frame_factory.build_ping_frame(ping_data)
+ expected_data = expected_frame.serialize()
+
+ events = c.ping(ping_data)
+
+ assert not events
+ assert c.data_to_send() == expected_data
+
+ @pytest.mark.parametrize(
+ 'opaque_data',
+ [
+ b'',
+ b'\x01\x02\x03\x04\x05\x06\x07',
+ u'abcdefgh',
+ b'too many bytes',
+ ]
+ )
+ def test_ping_frame_opaque_data_must_be_length_8_bytestring(self,
+ frame_factory,
+ opaque_data):
+ """
+ Sending a ping frame only works with 8-byte bytestrings.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ with pytest.raises(ValueError):
+ c.ping(opaque_data)
+
+ def test_receiving_ping_acknowledgement(self, frame_factory):
+ """
+ Receiving a PING acknowledgement fires a PingAckReceived event.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ ping_data = b'\x01\x02\x03\x04\x05\x06\x07\x08'
+
+ f = frame_factory.build_ping_frame(
+ ping_data, flags=['ACK']
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.PingAckReceived)
+ assert isinstance(event, h2.events.PingAcknowledged) # deprecated
+ assert event.ping_data == ping_data
+
+ def test_stream_ended_remotely(self, frame_factory):
+ """
+ When the remote stream ends with a non-empty data frame a DataReceived
+ event and a StreamEnded event are fired.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ f1 = frame_factory.build_headers_frame(
+ self.example_request_headers, stream_id=3
+ )
+ f2 = frame_factory.build_data_frame(
+ b'some request data',
+ flags=['END_STREAM'],
+ stream_id=3,
+ )
+ data = b''.join(map(lambda f: f.serialize(), [f1, f2]))
+ events = c.receive_data(data)
+
+ assert len(events) == 3
+ data_event = events[1]
+ stream_ended_event = events[2]
+
+ assert isinstance(data_event, h2.events.DataReceived)
+ assert isinstance(stream_ended_event, h2.events.StreamEnded)
+ stream_ended_event.stream_id == 3
+
+ def test_can_push_stream(self, frame_factory):
+ """
+ Pushing a stream causes a PUSH_PROMISE frame to be emitted.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+
+ frame_factory.refresh_encoder()
+ expected_frame = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers,
+ flags=['END_HEADERS'],
+ )
+
+ c.clear_outbound_data_buffer()
+ c.push_stream(
+ stream_id=1,
+ promised_stream_id=2,
+ request_headers=self.example_request_headers
+ )
+
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_cannot_push_streams_when_disabled(self, frame_factory):
+ """
+ When the remote peer has disabled stream pushing, we should fail.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_settings_frame(
+ {h2.settings.SettingCodes.ENABLE_PUSH: 0}
+ )
+ c.receive_data(f.serialize())
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.push_stream(
+ stream_id=1,
+ promised_stream_id=2,
+ request_headers=self.example_request_headers
+ )
+
+ def test_settings_remote_change_header_table_size(self, frame_factory):
+ """
+ Acknowledging a remote HEADER_TABLE_SIZE settings change causes us to
+ change the header table size of our encoder.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ assert c.encoder.header_table_size == 4096
+
+ received_frame = frame_factory.build_settings_frame(
+ {h2.settings.SettingCodes.HEADER_TABLE_SIZE: 80}
+ )
+ c.receive_data(received_frame.serialize())[0]
+
+ assert c.encoder.header_table_size == 80
+
+ def test_settings_local_change_header_table_size(self, frame_factory):
+ """
+ The remote peer acknowledging a local HEADER_TABLE_SIZE settings change
+ does not cause us to change the header table size of our decoder.
+
+ For an explanation of why this test is this way around, see issue #37.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ assert c.decoder.header_table_size == 4096
+
+ expected_frame = frame_factory.build_settings_frame({}, ack=True)
+ c.update_settings(
+ {h2.settings.SettingCodes.HEADER_TABLE_SIZE: 80}
+ )
+ c.receive_data(expected_frame.serialize())
+ c.clear_outbound_data_buffer()
+
+ assert c.decoder.header_table_size == 4096
+
+ def test_restricting_outbound_frame_size_by_settings(self, frame_factory):
+ """
+ The remote peer can shrink the maximum outbound frame size using
+ settings.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(self.example_request_headers)
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.FrameTooLargeError):
+ c.send_data(1, b'\x01' * 17000)
+
+ received_frame = frame_factory.build_settings_frame(
+ {h2.settings.SettingCodes.MAX_FRAME_SIZE: 17001}
+ )
+ c.receive_data(received_frame.serialize())
+
+ c.send_data(1, b'\x01' * 17000)
+ assert c.data_to_send()
+
+ def test_restricting_inbound_frame_size_by_settings(self, frame_factory):
+ """
+ We throw ProtocolErrors and tear down connections if oversize frames
+ are received.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ h = frame_factory.build_headers_frame(self.example_request_headers)
+ c.receive_data(h.serialize())
+ c.clear_outbound_data_buffer()
+
+ data_frame = frame_factory.build_data_frame(b'\x01' * 17000)
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(data_frame.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1, error_code=h2.errors.ErrorCodes.FRAME_SIZE_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_cannot_receive_new_streams_over_limit(self, frame_factory):
+ """
+ When the number of inbound streams exceeds our MAX_CONCURRENT_STREAMS
+ setting, their attempt to open new streams fails.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.update_settings(
+ {h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 1}
+ )
+ f = frame_factory.build_settings_frame({}, ack=True)
+ c.receive_data(f.serialize())
+
+ f = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.example_request_headers,
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ stream_id=3,
+ headers=self.example_request_headers,
+ )
+ with pytest.raises(h2.exceptions.TooManyStreamsError):
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1, error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_can_receive_trailers(self, frame_factory):
+ """
+ When two HEADERS blocks are received in the same stream from a
+ client, the second set are trailers.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_headers_frame(self.example_request_headers)
+ c.receive_data(f.serialize())
+
+ # Send in trailers.
+ trailers = [('content-length', '0')]
+ f = frame_factory.build_headers_frame(
+ trailers,
+ flags=['END_STREAM'],
+ )
+ events = c.receive_data(f.serialize())
+ assert len(events) == 2
+
+ event = events[0]
+ assert isinstance(event, h2.events.TrailersReceived)
+ assert event.headers == trailers
+ assert event.stream_id == 1
+
+ def test_reject_trailers_not_ending_stream(self, frame_factory):
+ """
+ When trailers are received without the END_STREAM flag being present,
+ this is a ProtocolError.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_headers_frame(self.example_request_headers)
+ c.receive_data(f.serialize())
+
+ # Send in trailers.
+ c.clear_outbound_data_buffer()
+ trailers = [('content-length', '0')]
+ f = frame_factory.build_headers_frame(
+ trailers,
+ flags=[],
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1, error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_can_send_trailers(self, frame_factory):
+ """
+ When a second set of headers are sent, they are properly trailers.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_headers_frame(self.example_request_headers)
+ c.receive_data(f.serialize())
+
+ # Send headers.
+ c.clear_outbound_data_buffer()
+ c.send_headers(1, self.example_response_headers)
+
+ # Now send trailers.
+ trailers = [('content-length', '0')]
+ c.send_headers(1, trailers, end_stream=True)
+
+ frame_factory.refresh_encoder()
+ f1 = frame_factory.build_headers_frame(
+ self.example_response_headers,
+ )
+ f2 = frame_factory.build_headers_frame(
+ trailers,
+ flags=['END_STREAM'],
+ )
+ assert c.data_to_send() == f1.serialize() + f2.serialize()
+
+ def test_trailers_must_have_end_stream(self, frame_factory):
+ """
+ A set of trailers must carry the END_STREAM flag.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_headers_frame(self.example_request_headers)
+ c.receive_data(f.serialize())
+
+ # Send headers.
+ c.send_headers(1, self.example_response_headers)
+
+ # Now send trailers.
+ trailers = [('content-length', '0')]
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.send_headers(1, trailers)
+
+ @pytest.mark.parametrize("frame_id", range(12, 256))
+ def test_unknown_frames_are_ignored(self, frame_factory, frame_id):
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_data_frame(data=b'abcdefghtdst')
+ f.type = frame_id
+
+ events = c.receive_data(f.serialize())
+ assert not c.data_to_send()
+ assert len(events) == 1
+ assert isinstance(events[0], h2.events.UnknownFrameReceived)
+ assert isinstance(events[0].frame, hyperframe.frame.ExtensionFrame)
+
+ def test_can_send_goaway_repeatedly(self, frame_factory):
+ """
+ We can send a GOAWAY frame as many times as we like.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ c.close_connection()
+ c.close_connection()
+ c.close_connection()
+
+ f = frame_factory.build_goaway_frame(last_stream_id=0)
+
+ assert c.data_to_send() == (f.serialize() * 3)
+
+ def test_receiving_goaway_frame(self, frame_factory):
+ """
+ Receiving a GOAWAY frame causes a ConnectionTerminated event to be
+ fired and transitions the connection to the CLOSED state, and clears
+ the outbound data buffer.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_goaway_frame(
+ last_stream_id=5, error_code=h2.errors.ErrorCodes.SETTINGS_TIMEOUT
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.ConnectionTerminated)
+ assert event.error_code == h2.errors.ErrorCodes.SETTINGS_TIMEOUT
+ assert isinstance(event.error_code, h2.errors.ErrorCodes)
+ assert event.last_stream_id == 5
+ assert event.additional_data is None
+ assert c.state_machine.state == h2.connection.ConnectionState.CLOSED
+
+ assert not c.data_to_send()
+
+ def test_receiving_multiple_goaway_frames(self, frame_factory):
+ """
+ Multiple GOAWAY frames can be received at once, and are allowed. Each
+ one fires a ConnectionTerminated event.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_goaway_frame(last_stream_id=0)
+ events = c.receive_data(f.serialize() * 3)
+
+ assert len(events) == 3
+ assert all(
+ isinstance(event, h2.events.ConnectionTerminated)
+ for event in events
+ )
+
+ def test_receiving_goaway_frame_with_additional_data(self, frame_factory):
+ """
+ GOAWAY frame can contain additional data,
+ it should be available via ConnectionTerminated event.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ additional_data = b'debug data'
+ f = frame_factory.build_goaway_frame(last_stream_id=0,
+ additional_data=additional_data)
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.ConnectionTerminated)
+ assert event.additional_data == additional_data
+
+ def test_receiving_goaway_frame_with_unknown_error(self, frame_factory):
+ """
+ Receiving a GOAWAY frame with an unknown error code behaves exactly the
+ same as receiving one we know about, but the code is reported as an
+ integer instead of as an ErrorCodes.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_goaway_frame(
+ last_stream_id=5, error_code=0xFA
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.ConnectionTerminated)
+ assert event.error_code == 250
+ assert not isinstance(event.error_code, h2.errors.ErrorCodes)
+ assert event.last_stream_id == 5
+ assert event.additional_data is None
+ assert c.state_machine.state == h2.connection.ConnectionState.CLOSED
+
+ assert not c.data_to_send()
+
+ def test_cookies_are_joined(self, frame_factory):
+ """
+ RFC 7540 Section 8.1.2.5 requires that we join multiple Cookie headers
+ in a header block together.
+ """
+ # This is a moderately varied set of cookie headers: some combined,
+ # some split.
+ cookie_headers = [
+ ('cookie',
+ 'username=John Doe; expires=Thu, 18 Dec 2013 12:00:00 UTC'),
+ ('cookie', 'path=1'),
+ ('cookie', 'test1=val1; test2=val2')
+ ]
+ expected = (
+ 'username=John Doe; expires=Thu, 18 Dec 2013 12:00:00 UTC; '
+ 'path=1; test1=val1; test2=val2'
+ )
+
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers + cookie_headers
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ e = events[0]
+
+ cookie_fields = [(n, v) for n, v in e.headers if n == 'cookie']
+ assert len(cookie_fields) == 1
+
+ _, v = cookie_fields[0]
+ assert v == expected
+
+ def test_cookies_arent_joined_without_normalization(self, frame_factory):
+ """
+ If inbound header normalization is disabled, cookie headers aren't
+ joined.
+ """
+ # This is a moderately varied set of cookie headers: some combined,
+ # some split.
+ cookie_headers = [
+ ('cookie',
+ 'username=John Doe; expires=Thu, 18 Dec 2013 12:00:00 UTC'),
+ ('cookie', 'path=1'),
+ ('cookie', 'test1=val1; test2=val2')
+ ]
+
+ config = h2.config.H2Configuration(
+ client_side=False,
+ normalize_inbound_headers=False,
+ header_encoding='utf-8'
+ )
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers + cookie_headers
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ e = events[0]
+
+ received_cookies = [(n, v) for n, v in e.headers if n == 'cookie']
+ assert len(received_cookies) == 3
+ assert cookie_headers == received_cookies
+
+ def test_stream_repr(self):
+ """
+ Ensure stream string representation is appropriate.
+ """
+ s = h2.stream.H2Stream(4, None, 12, 14)
+ assert repr(s) == "<H2Stream id:4 state:<StreamState.IDLE: 0>>"
+
+
+def sanity_check_data_frame(data_frame,
+ expected_flow_controlled_length,
+ expect_padded_flag,
+ expected_data_frame_pad_length):
+ """
+ ``data_frame`` is a frame of type ``hyperframe.frame.DataFrame``,
+ and the ``flags`` and ``flow_controlled_length`` of ``data_frame``
+ match expectations.
+ """
+
+ assert isinstance(data_frame, hyperframe.frame.DataFrame)
+
+ assert data_frame.flow_controlled_length == expected_flow_controlled_length
+
+ if expect_padded_flag:
+ assert 'PADDED' in data_frame.flags
+ else:
+ assert 'PADDED' not in data_frame.flags
+
+ assert data_frame.pad_length == expected_data_frame_pad_length
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_closed_streams.py b/testing/web-platform/tests/tools/third_party/h2/test/test_closed_streams.py
new file mode 100644
index 0000000000..631a66787a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_closed_streams.py
@@ -0,0 +1,555 @@
+# -*- coding: utf-8 -*-
+"""
+test_closed_streams
+~~~~~~~~~~~~~~~~~~~
+
+Tests that we handle closed streams correctly.
+"""
+import pytest
+
+import h2.config
+import h2.connection
+import h2.errors
+import h2.events
+import h2.exceptions
+
+
+class TestClosedStreams(object):
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ example_response_headers = [
+ (':status', '200'),
+ ('server', 'fake-serv/0.1.0')
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def test_can_receive_multiple_rst_stream_frames(self, frame_factory):
+ """
+ Multiple RST_STREAM frames can be received, either at once or well
+ after one another. Only the first fires an event.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+
+ f = frame_factory.build_rst_stream_frame(stream_id=1)
+ events = c.receive_data(f.serialize() * 3)
+
+ # Force an iteration over all the streams to remove them.
+ c.open_outbound_streams
+
+ # Receive more data.
+ events += c.receive_data(f.serialize() * 3)
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.StreamReset)
+
+ def test_receiving_low_stream_id_causes_goaway(self, frame_factory):
+ """
+ The remote peer creating a stream with a lower ID than one we've seen
+ causes a GOAWAY frame.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.initiate_connection()
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers,
+ stream_id=3,
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers,
+ stream_id=1,
+ )
+
+ with pytest.raises(h2.exceptions.StreamIDTooLowError) as e:
+ c.receive_data(f.serialize())
+
+ assert e.value.stream_id == 1
+ assert e.value.max_stream_id == 3
+
+ f = frame_factory.build_goaway_frame(
+ last_stream_id=3,
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
+ )
+ assert c.data_to_send() == f.serialize()
+
+ def test_closed_stream_not_present_in_streams_dict(self, frame_factory):
+ """
+ When streams have been closed, they get removed from the streams
+ dictionary the next time we count the open streams.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.initiate_connection()
+
+ f = frame_factory.build_headers_frame(self.example_request_headers)
+ c.receive_data(f.serialize())
+ c.push_stream(1, 2, self.example_request_headers)
+ c.reset_stream(1)
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_rst_stream_frame(stream_id=2)
+ c.receive_data(f.serialize())
+
+ # Force a count of the streams.
+ assert not c.open_outbound_streams
+
+ # The streams dictionary should be empty.
+ assert not c.streams
+
+ def test_receive_rst_stream_on_closed_stream(self, frame_factory):
+ """
+ RST_STREAM frame should be ignored if stream is in a closed state.
+ See RFC 7540 Section 5.1 (closed state)
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Client sends request
+ c.send_headers(1, self.example_request_headers)
+
+ # Some time passes and client sends DATA frame and closes stream,
+ # so it is in a half-closed state
+ c.send_data(1, b'some data', end_stream=True)
+
+ # Server received HEADERS frame but DATA frame is still on the way.
+ # Stream is in open state on the server-side. In this state server is
+ # allowed to end stream and reset it - this trick helps immediately
+ # close stream on the server-side.
+ headers_frame = frame_factory.build_headers_frame(
+ [(':status', '200')],
+ flags=['END_STREAM'],
+ stream_id=1,
+ )
+ events = c.receive_data(headers_frame.serialize())
+ assert len(events) == 2
+ response_received, stream_ended = events
+ assert isinstance(response_received, h2.events.ResponseReceived)
+ assert isinstance(stream_ended, h2.events.StreamEnded)
+
+ rst_stream_frame = frame_factory.build_rst_stream_frame(stream_id=1)
+ events = c.receive_data(rst_stream_frame.serialize())
+ assert not events
+
+ def test_receive_window_update_on_closed_stream(self, frame_factory):
+ """
+ WINDOW_UPDATE frame should be ignored if stream is in a closed state.
+ See RFC 7540 Section 5.1 (closed state)
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Client sends request
+ c.send_headers(1, self.example_request_headers)
+
+ # Some time passes and client sends DATA frame and closes stream,
+ # so it is in a half-closed state
+ c.send_data(1, b'some data', end_stream=True)
+
+ # Server received HEADERS frame but DATA frame is still on the way.
+ # Stream is in open state on the server-side. In this state server is
+ # allowed to end stream and after that acknowledge received data by
+ # sending WINDOW_UPDATE frames.
+ headers_frame = frame_factory.build_headers_frame(
+ [(':status', '200')],
+ flags=['END_STREAM'],
+ stream_id=1,
+ )
+ events = c.receive_data(headers_frame.serialize())
+ assert len(events) == 2
+ response_received, stream_ended = events
+ assert isinstance(response_received, h2.events.ResponseReceived)
+ assert isinstance(stream_ended, h2.events.StreamEnded)
+
+ window_update_frame = frame_factory.build_window_update_frame(
+ stream_id=1,
+ increment=1,
+ )
+ events = c.receive_data(window_update_frame.serialize())
+ assert not events
+
+
+class TestStreamsClosedByEndStream(object):
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ example_response_headers = [
+ (':status', '200'),
+ ('server', 'fake-serv/0.1.0')
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ @pytest.mark.parametrize(
+ "frame",
+ [
+ lambda self, ff: ff.build_headers_frame(
+ self.example_request_headers, flags=['END_STREAM']),
+ lambda self, ff: ff.build_headers_frame(
+ self.example_request_headers),
+ ]
+ )
+ @pytest.mark.parametrize("clear_streams", [True, False])
+ def test_frames_after_recv_end_will_error(self,
+ frame_factory,
+ frame,
+ clear_streams):
+ """
+ A stream that is closed by receiving END_STREAM raises
+ ProtocolError when it receives an unexpected frame.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.initiate_connection()
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers, flags=['END_STREAM']
+ )
+ c.receive_data(f.serialize())
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_response_headers,
+ end_stream=True
+ )
+
+ if clear_streams:
+ # Call open_inbound_streams to force the connection to clean
+ # closed streams.
+ c.open_inbound_streams
+
+ c.clear_outbound_data_buffer()
+
+ f = frame(self, frame_factory)
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ f = frame_factory.build_goaway_frame(
+ last_stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ )
+ assert c.data_to_send() == f.serialize()
+
+ @pytest.mark.parametrize(
+ "frame",
+ [
+ lambda self, ff: ff.build_headers_frame(
+ self.example_response_headers, flags=['END_STREAM']),
+ lambda self, ff: ff.build_headers_frame(
+ self.example_response_headers),
+ ]
+ )
+ @pytest.mark.parametrize("clear_streams", [True, False])
+ def test_frames_after_send_end_will_error(self,
+ frame_factory,
+ frame,
+ clear_streams):
+ """
+ A stream that is closed by sending END_STREAM raises
+ ProtocolError when it receives an unexpected frame.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers,
+ end_stream=True)
+
+ f = frame_factory.build_headers_frame(
+ self.example_response_headers, flags=['END_STREAM']
+ )
+ c.receive_data(f.serialize())
+
+ if clear_streams:
+ # Call open_outbound_streams to force the connection to clean
+ # closed streams.
+ c.open_outbound_streams
+
+ c.clear_outbound_data_buffer()
+
+ f = frame(self, frame_factory)
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ f = frame_factory.build_goaway_frame(
+ last_stream_id=0,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ )
+ assert c.data_to_send() == f.serialize()
+
+ @pytest.mark.parametrize(
+ "frame",
+ [
+ lambda self, ff: ff.build_window_update_frame(1, 1),
+ lambda self, ff: ff.build_rst_stream_frame(1)
+ ]
+ )
+ def test_frames_after_send_end_will_be_ignored(self,
+ frame_factory,
+ frame):
+ """
+ A stream that is closed by sending END_STREAM will raise
+ ProtocolError when received unexpected frame.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.initiate_connection()
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers, flags=['END_STREAM']
+ )
+ c.receive_data(f.serialize())
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_response_headers,
+ end_stream=True
+ )
+
+ c.clear_outbound_data_buffer()
+
+ f = frame(self, frame_factory)
+ events = c.receive_data(f.serialize())
+
+ assert not events
+
+
+class TestStreamsClosedByRstStream(object):
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ example_response_headers = [
+ (':status', '200'),
+ ('server', 'fake-serv/0.1.0')
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ @pytest.mark.parametrize(
+ "frame",
+ [
+ lambda self, ff: ff.build_headers_frame(
+ self.example_request_headers),
+ lambda self, ff: ff.build_headers_frame(
+ self.example_request_headers, flags=['END_STREAM']),
+ ]
+ )
+ def test_resets_further_frames_after_recv_reset(self,
+ frame_factory,
+ frame):
+ """
+ A stream that is closed by receive RST_STREAM can receive further
+ frames: it simply sends RST_STREAM for it, and additionally
+ WINDOW_UPDATE for DATA frames.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.initiate_connection()
+
+ header_frame = frame_factory.build_headers_frame(
+ self.example_request_headers, flags=['END_STREAM']
+ )
+ c.receive_data(header_frame.serialize())
+
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_response_headers,
+ end_stream=False
+ )
+
+ rst_frame = frame_factory.build_rst_stream_frame(
+ 1, h2.errors.ErrorCodes.STREAM_CLOSED
+ )
+ c.receive_data(rst_frame.serialize())
+ c.clear_outbound_data_buffer()
+
+ f = frame(self, frame_factory)
+ events = c.receive_data(f.serialize())
+
+ rst_frame = frame_factory.build_rst_stream_frame(
+ 1, h2.errors.ErrorCodes.STREAM_CLOSED
+ )
+ assert not events
+ assert c.data_to_send() == rst_frame.serialize()
+
+ events = c.receive_data(f.serialize() * 3)
+ assert not events
+ assert c.data_to_send() == rst_frame.serialize() * 3
+
+ # Iterate over the streams to make sure it's gone, then confirm the
+ # behaviour is unchanged.
+ c.open_outbound_streams
+
+ events = c.receive_data(f.serialize() * 3)
+ assert not events
+ assert c.data_to_send() == rst_frame.serialize() * 3
+
+ def test_resets_further_data_frames_after_recv_reset(self,
+ frame_factory):
+ """
+ A stream that is closed by receive RST_STREAM can receive further
+ DATA frames: it simply sends WINDOW_UPDATE for the connection flow
+ window, and RST_STREAM for the stream.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.initiate_connection()
+
+ header_frame = frame_factory.build_headers_frame(
+ self.example_request_headers, flags=['END_STREAM']
+ )
+ c.receive_data(header_frame.serialize())
+
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_response_headers,
+ end_stream=False
+ )
+
+ rst_frame = frame_factory.build_rst_stream_frame(
+ 1, h2.errors.ErrorCodes.STREAM_CLOSED
+ )
+ c.receive_data(rst_frame.serialize())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_data_frame(
+ data=b'some data'
+ )
+
+ events = c.receive_data(f.serialize())
+ assert not events
+
+ expected = frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ ).serialize()
+ assert c.data_to_send() == expected
+
+ events = c.receive_data(f.serialize() * 3)
+ assert not events
+ assert c.data_to_send() == expected * 3
+
+ # Iterate over the streams to make sure it's gone, then confirm the
+ # behaviour is unchanged.
+ c.open_outbound_streams
+
+ events = c.receive_data(f.serialize() * 3)
+ assert not events
+ assert c.data_to_send() == expected * 3
+
+ @pytest.mark.parametrize(
+ "frame",
+ [
+ lambda self, ff: ff.build_headers_frame(
+ self.example_request_headers),
+ lambda self, ff: ff.build_headers_frame(
+ self.example_request_headers, flags=['END_STREAM']),
+ ]
+ )
+ def test_resets_further_frames_after_send_reset(self,
+ frame_factory,
+ frame):
+ """
+ A stream that is closed by sent RST_STREAM can receive further frames:
+ it simply sends RST_STREAM for it.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.initiate_connection()
+
+ header_frame = frame_factory.build_headers_frame(
+ self.example_request_headers, flags=['END_STREAM']
+ )
+ c.receive_data(header_frame.serialize())
+
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_response_headers,
+ end_stream=False
+ )
+
+ c.reset_stream(1, h2.errors.ErrorCodes.INTERNAL_ERROR)
+
+ rst_frame = frame_factory.build_rst_stream_frame(
+ 1, h2.errors.ErrorCodes.STREAM_CLOSED
+ )
+ c.clear_outbound_data_buffer()
+
+ f = frame(self, frame_factory)
+ events = c.receive_data(f.serialize())
+
+ rst_frame = frame_factory.build_rst_stream_frame(
+ 1, h2.errors.ErrorCodes.STREAM_CLOSED
+ )
+ assert not events
+ assert c.data_to_send() == rst_frame.serialize()
+
+ events = c.receive_data(f.serialize() * 3)
+ assert not events
+ assert c.data_to_send() == rst_frame.serialize() * 3
+
+ # Iterate over the streams to make sure it's gone, then confirm the
+ # behaviour is unchanged.
+ c.open_outbound_streams
+
+ events = c.receive_data(f.serialize() * 3)
+ assert not events
+ assert c.data_to_send() == rst_frame.serialize() * 3
+
+ def test_resets_further_data_frames_after_send_reset(self,
+ frame_factory):
+ """
+ A stream that is closed by sent RST_STREAM can receive further
+ data frames: it simply sends WINDOW_UPDATE and RST_STREAM for it.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.initiate_connection()
+
+ header_frame = frame_factory.build_headers_frame(
+ self.example_request_headers, flags=['END_STREAM']
+ )
+ c.receive_data(header_frame.serialize())
+
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_response_headers,
+ end_stream=False
+ )
+
+ c.reset_stream(1, h2.errors.ErrorCodes.INTERNAL_ERROR)
+
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_data_frame(
+ data=b'some data'
+ )
+ events = c.receive_data(f.serialize())
+ assert not events
+ expected = frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ ).serialize()
+ assert c.data_to_send() == expected
+
+ events = c.receive_data(f.serialize() * 3)
+ assert not events
+ assert c.data_to_send() == expected * 3
+
+ # Iterate over the streams to make sure it's gone, then confirm the
+ # behaviour is unchanged.
+ c.open_outbound_streams
+
+ events = c.receive_data(f.serialize() * 3)
+ assert not events
+ assert c.data_to_send() == expected * 3
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_complex_logic.py b/testing/web-platform/tests/tools/third_party/h2/test/test_complex_logic.py
new file mode 100644
index 0000000000..ff90bb8bf2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_complex_logic.py
@@ -0,0 +1,586 @@
+# -*- coding: utf-8 -*-
+"""
+test_complex_logic
+~~~~~~~~~~~~~~~~
+
+More complex tests that try to do more.
+
+Certain tests don't really eliminate incorrect behaviour unless they do quite
+a bit. These tests should live here, to keep the pain in once place rather than
+hide it in the other parts of the test suite.
+"""
+import pytest
+
+import h2
+import h2.config
+import h2.connection
+
+
+class TestComplexClient(object):
+ """
+ Complex tests for client-side stacks.
+ """
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ example_response_headers = [
+ (':status', '200'),
+ ('server', 'fake-serv/0.1.0')
+ ]
+
+ def test_correctly_count_server_streams(self, frame_factory):
+ """
+ We correctly count the number of server streams, both inbound and
+ outbound.
+ """
+ # This test makes no sense unless you do both inbound and outbound,
+ # because it's important to confirm that we count them correctly.
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ expected_inbound_streams = expected_outbound_streams = 0
+
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ for stream_id in range(1, 15, 2):
+ # Open an outbound stream
+ c.send_headers(stream_id, self.example_request_headers)
+ expected_outbound_streams += 1
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ # Receive a pushed stream (to create an inbound one). This doesn't
+ # open until we also receive headers.
+ f = frame_factory.build_push_promise_frame(
+ stream_id=stream_id,
+ promised_stream_id=stream_id+1,
+ headers=self.example_request_headers,
+ )
+ c.receive_data(f.serialize())
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ f = frame_factory.build_headers_frame(
+ stream_id=stream_id+1,
+ headers=self.example_response_headers,
+ )
+ c.receive_data(f.serialize())
+ expected_inbound_streams += 1
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ for stream_id in range(13, 0, -2):
+ # Close an outbound stream.
+ c.end_stream(stream_id)
+
+ # Stream doesn't close until both sides close it.
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ f = frame_factory.build_headers_frame(
+ stream_id=stream_id,
+ headers=self.example_response_headers,
+ flags=['END_STREAM'],
+ )
+ c.receive_data(f.serialize())
+ expected_outbound_streams -= 1
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ # Pushed streams can only be closed remotely.
+ f = frame_factory.build_data_frame(
+ stream_id=stream_id+1,
+ data=b'the content',
+ flags=['END_STREAM'],
+ )
+ c.receive_data(f.serialize())
+ expected_inbound_streams -= 1
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ assert c.open_inbound_streams == 0
+ assert c.open_outbound_streams == 0
+
+
+class TestComplexServer(object):
+ """
+ Complex tests for server-side stacks.
+ """
+ example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ ]
+ example_response_headers = [
+ (b':status', b'200'),
+ (b'server', b'fake-serv/0.1.0')
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def test_correctly_count_server_streams(self, frame_factory):
+ """
+ We correctly count the number of server streams, both inbound and
+ outbound.
+ """
+ # This test makes no sense unless you do both inbound and outbound,
+ # because it's important to confirm that we count them correctly.
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ expected_inbound_streams = expected_outbound_streams = 0
+
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ for stream_id in range(1, 15, 2):
+ # Receive an inbound stream.
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ stream_id=stream_id,
+ )
+ c.receive_data(f.serialize())
+ expected_inbound_streams += 1
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ # Push a stream (to create a outbound one). This doesn't open
+ # until we send our response headers.
+ c.push_stream(stream_id, stream_id+1, self.example_request_headers)
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ c.send_headers(stream_id+1, self.example_response_headers)
+ expected_outbound_streams += 1
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ for stream_id in range(13, 0, -2):
+ # Close an inbound stream.
+ f = frame_factory.build_data_frame(
+ data=b'',
+ flags=['END_STREAM'],
+ stream_id=stream_id,
+ )
+ c.receive_data(f.serialize())
+
+ # Stream doesn't close until both sides close it.
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ c.send_data(stream_id, b'', end_stream=True)
+ expected_inbound_streams -= 1
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ # Pushed streams, however, we can close ourselves.
+ c.send_data(
+ stream_id=stream_id+1,
+ data=b'',
+ end_stream=True,
+ )
+ expected_outbound_streams -= 1
+ assert c.open_inbound_streams == expected_inbound_streams
+ assert c.open_outbound_streams == expected_outbound_streams
+
+ assert c.open_inbound_streams == 0
+ assert c.open_outbound_streams == 0
+
+
+class TestContinuationFrames(object):
+ """
+ Tests for the relatively complex CONTINUATION frame logic.
+ """
+ example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def _build_continuation_sequence(self, headers, block_size, frame_factory):
+ f = frame_factory.build_headers_frame(headers)
+ header_data = f.data
+ chunks = [
+ header_data[x:x+block_size]
+ for x in range(0, len(header_data), block_size)
+ ]
+ f.data = chunks.pop(0)
+ frames = [
+ frame_factory.build_continuation_frame(c) for c in chunks
+ ]
+ f.flags = {'END_STREAM'}
+ frames[-1].flags.add('END_HEADERS')
+ frames.insert(0, f)
+ return frames
+
+ def test_continuation_frame_basic(self, frame_factory):
+ """
+ Test that we correctly decode a header block split across continuation
+ frames.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ frames = self._build_continuation_sequence(
+ headers=self.example_request_headers,
+ block_size=5,
+ frame_factory=frame_factory,
+ )
+ data = b''.join(f.serialize() for f in frames)
+ events = c.receive_data(data)
+
+ assert len(events) == 2
+ first_event, second_event = events
+
+ assert isinstance(first_event, h2.events.RequestReceived)
+ assert first_event.headers == self.example_request_headers
+ assert first_event.stream_id == 1
+
+ assert isinstance(second_event, h2.events.StreamEnded)
+ assert second_event.stream_id == 1
+
+ @pytest.mark.parametrize('stream_id', [3, 1])
+ def test_continuation_cannot_interleave_headers(self,
+ frame_factory,
+ stream_id):
+ """
+ We cannot interleave a new headers block with a CONTINUATION sequence.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ frames = self._build_continuation_sequence(
+ headers=self.example_request_headers,
+ block_size=5,
+ frame_factory=frame_factory,
+ )
+ assert len(frames) > 2 # This is mostly defensive.
+
+ bogus_frame = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ stream_id=stream_id,
+ flags=['END_STREAM'],
+ )
+ frames.insert(len(frames) - 2, bogus_frame)
+ data = b''.join(f.serialize() for f in frames)
+
+ with pytest.raises(h2.exceptions.ProtocolError) as e:
+ c.receive_data(data)
+
+ assert "invalid frame" in str(e.value).lower()
+
+ def test_continuation_cannot_interleave_data(self, frame_factory):
+ """
+ We cannot interleave a data frame with a CONTINUATION sequence.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ frames = self._build_continuation_sequence(
+ headers=self.example_request_headers,
+ block_size=5,
+ frame_factory=frame_factory,
+ )
+ assert len(frames) > 2 # This is mostly defensive.
+
+ bogus_frame = frame_factory.build_data_frame(
+ data=b'hello',
+ stream_id=1,
+ )
+ frames.insert(len(frames) - 2, bogus_frame)
+ data = b''.join(f.serialize() for f in frames)
+
+ with pytest.raises(h2.exceptions.ProtocolError) as e:
+ c.receive_data(data)
+
+ assert "invalid frame" in str(e.value).lower()
+
+ def test_continuation_cannot_interleave_unknown_frame(self, frame_factory):
+ """
+ We cannot interleave an unknown frame with a CONTINUATION sequence.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ frames = self._build_continuation_sequence(
+ headers=self.example_request_headers,
+ block_size=5,
+ frame_factory=frame_factory,
+ )
+ assert len(frames) > 2 # This is mostly defensive.
+
+ bogus_frame = frame_factory.build_data_frame(
+ data=b'hello',
+ stream_id=1,
+ )
+ bogus_frame.type = 88
+ frames.insert(len(frames) - 2, bogus_frame)
+ data = b''.join(f.serialize() for f in frames)
+
+ with pytest.raises(h2.exceptions.ProtocolError) as e:
+ c.receive_data(data)
+
+ assert "invalid frame" in str(e.value).lower()
+
+ def test_continuation_frame_multiple_blocks(self, frame_factory):
+ """
+ Test that we correctly decode several header blocks split across
+ continuation frames.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ for stream_id in range(1, 7, 2):
+ frames = self._build_continuation_sequence(
+ headers=self.example_request_headers,
+ block_size=2,
+ frame_factory=frame_factory,
+ )
+ for frame in frames:
+ frame.stream_id = stream_id
+
+ data = b''.join(f.serialize() for f in frames)
+ events = c.receive_data(data)
+
+ assert len(events) == 2
+ first_event, second_event = events
+
+ assert isinstance(first_event, h2.events.RequestReceived)
+ assert first_event.headers == self.example_request_headers
+ assert first_event.stream_id == stream_id
+
+ assert isinstance(second_event, h2.events.StreamEnded)
+ assert second_event.stream_id == stream_id
+
+
+class TestContinuationFramesPushPromise(object):
+ """
+ Tests for the relatively complex CONTINUATION frame logic working with
+ PUSH_PROMISE frames.
+ """
+ example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ ]
+ example_response_headers = [
+ (b':status', b'200'),
+ (b'server', b'fake-serv/0.1.0')
+ ]
+
+ def _build_continuation_sequence(self, headers, block_size, frame_factory):
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1, promised_stream_id=2, headers=headers
+ )
+ header_data = f.data
+ chunks = [
+ header_data[x:x+block_size]
+ for x in range(0, len(header_data), block_size)
+ ]
+ f.data = chunks.pop(0)
+ frames = [
+ frame_factory.build_continuation_frame(c) for c in chunks
+ ]
+ f.flags = {'END_STREAM'}
+ frames[-1].flags.add('END_HEADERS')
+ frames.insert(0, f)
+ return frames
+
+ def test_continuation_frame_basic_push_promise(self, frame_factory):
+ """
+ Test that we correctly decode a header block split across continuation
+ frames when that header block is initiated with a PUSH_PROMISE.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ frames = self._build_continuation_sequence(
+ headers=self.example_request_headers,
+ block_size=5,
+ frame_factory=frame_factory,
+ )
+ data = b''.join(f.serialize() for f in frames)
+ events = c.receive_data(data)
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.PushedStreamReceived)
+ assert event.headers == self.example_request_headers
+ assert event.parent_stream_id == 1
+ assert event.pushed_stream_id == 2
+
+ @pytest.mark.parametrize('stream_id', [3, 1, 2])
+ def test_continuation_cannot_interleave_headers_pp(self,
+ frame_factory,
+ stream_id):
+ """
+ We cannot interleave a new headers block with a CONTINUATION sequence
+ when the headers block is based on a PUSH_PROMISE frame.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ frames = self._build_continuation_sequence(
+ headers=self.example_request_headers,
+ block_size=5,
+ frame_factory=frame_factory,
+ )
+ assert len(frames) > 2 # This is mostly defensive.
+
+ bogus_frame = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ stream_id=stream_id,
+ flags=['END_STREAM'],
+ )
+ frames.insert(len(frames) - 2, bogus_frame)
+ data = b''.join(f.serialize() for f in frames)
+
+ with pytest.raises(h2.exceptions.ProtocolError) as e:
+ c.receive_data(data)
+
+ assert "invalid frame" in str(e.value).lower()
+
+ def test_continuation_cannot_interleave_data(self, frame_factory):
+ """
+ We cannot interleave a data frame with a CONTINUATION sequence when
+ that sequence began with a PUSH_PROMISE frame.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ frames = self._build_continuation_sequence(
+ headers=self.example_request_headers,
+ block_size=5,
+ frame_factory=frame_factory,
+ )
+ assert len(frames) > 2 # This is mostly defensive.
+
+ bogus_frame = frame_factory.build_data_frame(
+ data=b'hello',
+ stream_id=1,
+ )
+ frames.insert(len(frames) - 2, bogus_frame)
+ data = b''.join(f.serialize() for f in frames)
+
+ with pytest.raises(h2.exceptions.ProtocolError) as e:
+ c.receive_data(data)
+
+ assert "invalid frame" in str(e.value).lower()
+
+ def test_continuation_cannot_interleave_unknown_frame(self, frame_factory):
+ """
+ We cannot interleave an unknown frame with a CONTINUATION sequence when
+ that sequence began with a PUSH_PROMISE frame.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ frames = self._build_continuation_sequence(
+ headers=self.example_request_headers,
+ block_size=5,
+ frame_factory=frame_factory,
+ )
+ assert len(frames) > 2 # This is mostly defensive.
+
+ bogus_frame = frame_factory.build_data_frame(
+ data=b'hello',
+ stream_id=1,
+ )
+ bogus_frame.type = 88
+ frames.insert(len(frames) - 2, bogus_frame)
+ data = b''.join(f.serialize() for f in frames)
+
+ with pytest.raises(h2.exceptions.ProtocolError) as e:
+ c.receive_data(data)
+
+ assert "invalid frame" in str(e.value).lower()
+
+ @pytest.mark.parametrize('evict', [True, False])
+ def test_stream_remotely_closed_disallows_push_promise(self,
+ evict,
+ frame_factory):
+ """
+ Streams closed normally by the remote peer disallow PUSH_PROMISE
+ frames, and cause a GOAWAY.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_request_headers,
+ end_stream=True
+ )
+
+ f = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.example_response_headers,
+ flags=['END_STREAM']
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ if evict:
+ # This is annoyingly stateful, but enumerating the list of open
+ # streams will force us to flush state.
+ assert not c.open_outbound_streams
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers,
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ f = frame_factory.build_goaway_frame(
+ last_stream_id=0,
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
+ )
+ assert c.data_to_send() == f.serialize()
+
+ def test_continuation_frame_multiple_push_promise(self, frame_factory):
+ """
+ Test that we correctly decode header blocks split across continuation
+ frames when those header block is initiated with a PUSH_PROMISE, for
+ more than one pushed stream.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ for promised_stream_id in range(2, 8, 2):
+ frames = self._build_continuation_sequence(
+ headers=self.example_request_headers,
+ block_size=2,
+ frame_factory=frame_factory,
+ )
+ frames[0].promised_stream_id = promised_stream_id
+ data = b''.join(f.serialize() for f in frames)
+ events = c.receive_data(data)
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.PushedStreamReceived)
+ assert event.headers == self.example_request_headers
+ assert event.parent_stream_id == 1
+ assert event.pushed_stream_id == promised_stream_id
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_config.py b/testing/web-platform/tests/tools/third_party/h2/test/test_config.py
new file mode 100644
index 0000000000..8eb7fdc862
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_config.py
@@ -0,0 +1,130 @@
+# -*- coding: utf-8 -*-
+"""
+test_config
+~~~~~~~~~~~
+
+Test the configuration object.
+"""
+import logging
+import pytest
+
+import h2.config
+
+
+class TestH2Config(object):
+ """
+ Tests of the H2 config object.
+ """
+ def test_defaults(self):
+ """
+ The default values of the HTTP/2 config object are sensible.
+ """
+ config = h2.config.H2Configuration()
+ assert config.client_side
+ assert config.header_encoding is None
+ assert isinstance(config.logger, h2.config.DummyLogger)
+
+ boolean_config_options = [
+ 'client_side',
+ 'validate_outbound_headers',
+ 'normalize_outbound_headers',
+ 'validate_inbound_headers',
+ 'normalize_inbound_headers',
+ ]
+
+ @pytest.mark.parametrize('option_name', boolean_config_options)
+ @pytest.mark.parametrize('value', [None, 'False', 1])
+ def test_boolean_config_options_reject_non_bools_init(
+ self, option_name, value
+ ):
+ """
+ The boolean config options raise an error if you try to set a value
+ that isn't a boolean via the initializer.
+ """
+ with pytest.raises(ValueError):
+ h2.config.H2Configuration(**{option_name: value})
+
+ @pytest.mark.parametrize('option_name', boolean_config_options)
+ @pytest.mark.parametrize('value', [None, 'False', 1])
+ def test_boolean_config_options_reject_non_bools_attr(
+ self, option_name, value
+ ):
+ """
+ The boolean config options raise an error if you try to set a value
+ that isn't a boolean via attribute setter.
+ """
+ config = h2.config.H2Configuration()
+ with pytest.raises(ValueError):
+ setattr(config, option_name, value)
+
+ @pytest.mark.parametrize('option_name', boolean_config_options)
+ @pytest.mark.parametrize('value', [True, False])
+ def test_boolean_config_option_is_reflected_init(self, option_name, value):
+ """
+ The value of the boolean config options, when set, is reflected
+ in the value via the initializer.
+ """
+ config = h2.config.H2Configuration(**{option_name: value})
+ assert getattr(config, option_name) == value
+
+ @pytest.mark.parametrize('option_name', boolean_config_options)
+ @pytest.mark.parametrize('value', [True, False])
+ def test_boolean_config_option_is_reflected_attr(self, option_name, value):
+ """
+ The value of the boolean config options, when set, is reflected
+ in the value via attribute setter.
+ """
+ config = h2.config.H2Configuration()
+ setattr(config, option_name, value)
+ assert getattr(config, option_name) == value
+
+ @pytest.mark.parametrize('header_encoding', [True, 1, object()])
+ def test_header_encoding_must_be_false_str_none_init(
+ self, header_encoding
+ ):
+ """
+ The value of the ``header_encoding`` setting must be False, a string,
+ or None via the initializer.
+ """
+ with pytest.raises(ValueError):
+ h2.config.H2Configuration(header_encoding=header_encoding)
+
+ @pytest.mark.parametrize('header_encoding', [True, 1, object()])
+ def test_header_encoding_must_be_false_str_none_attr(
+ self, header_encoding
+ ):
+ """
+ The value of the ``header_encoding`` setting must be False, a string,
+ or None via attribute setter.
+ """
+ config = h2.config.H2Configuration()
+ with pytest.raises(ValueError):
+ config.header_encoding = header_encoding
+
+ @pytest.mark.parametrize('header_encoding', [False, 'ascii', None])
+ def test_header_encoding_is_reflected_init(self, header_encoding):
+ """
+ The value of ``header_encoding``, when set, is reflected in the value
+ via the initializer.
+ """
+ config = h2.config.H2Configuration(header_encoding=header_encoding)
+ assert config.header_encoding == header_encoding
+
+ @pytest.mark.parametrize('header_encoding', [False, 'ascii', None])
+ def test_header_encoding_is_reflected_attr(self, header_encoding):
+ """
+ The value of ``header_encoding``, when set, is reflected in the value
+ via the attribute setter.
+ """
+ config = h2.config.H2Configuration()
+ config.header_encoding = header_encoding
+ assert config.header_encoding == header_encoding
+
+ def test_logger_instance_is_reflected(self):
+ """
+ The value of ``logger``, when set, is reflected in the value.
+ """
+ logger = logging.Logger('hyper-h2.test')
+ config = h2.config.H2Configuration()
+ config.logger = logger
+ assert config.logger is logger
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_events.py b/testing/web-platform/tests/tools/third_party/h2/test/test_events.py
new file mode 100644
index 0000000000..a6e8d83790
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_events.py
@@ -0,0 +1,367 @@
+# -*- coding: utf-8 -*-
+"""
+test_events.py
+~~~~~~~~~~~~~~
+
+Specific tests for any function that is logically self-contained as part of
+events.py.
+"""
+import inspect
+import sys
+
+from hypothesis import given
+from hypothesis.strategies import (
+ integers, lists, tuples
+)
+import pytest
+
+import h2.errors
+import h2.events
+import h2.settings
+
+
+# We define a fairly complex Hypothesis strategy here. We want to build a list
+# of two tuples of (Setting, value). For Setting we want to make sure we can
+# handle settings that the rest of hyper knows nothing about, so we want to
+# use integers from 0 to (2**16-1). For values, they're from 0 to (2**32-1).
+# Define that strategy here for clarity.
+SETTINGS_STRATEGY = lists(
+ tuples(
+ integers(min_value=0, max_value=2**16-1),
+ integers(min_value=0, max_value=2**32-1),
+ )
+)
+
+
+class TestRemoteSettingsChanged(object):
+ """
+ Validate the function of the RemoteSettingsChanged event.
+ """
+ @given(SETTINGS_STRATEGY)
+ def test_building_settings_from_scratch(self, settings_list):
+ """
+ Missing old settings are defaulted to None.
+ """
+ settings_dict = dict(settings_list)
+ e = h2.events.RemoteSettingsChanged.from_settings(
+ old_settings={},
+ new_settings=settings_dict,
+ )
+
+ for setting, new_value in settings_dict.items():
+ assert e.changed_settings[setting].setting == setting
+ assert e.changed_settings[setting].original_value is None
+ assert e.changed_settings[setting].new_value == new_value
+
+ @given(SETTINGS_STRATEGY, SETTINGS_STRATEGY)
+ def test_only_reports_changed_settings(self,
+ old_settings_list,
+ new_settings_list):
+ """
+ Settings that were not changed are not reported.
+ """
+ old_settings_dict = dict(old_settings_list)
+ new_settings_dict = dict(new_settings_list)
+ e = h2.events.RemoteSettingsChanged.from_settings(
+ old_settings=old_settings_dict,
+ new_settings=new_settings_dict,
+ )
+
+ assert len(e.changed_settings) == len(new_settings_dict)
+ assert (
+ sorted(list(e.changed_settings.keys())) ==
+ sorted(list(new_settings_dict.keys()))
+ )
+
+ @given(SETTINGS_STRATEGY, SETTINGS_STRATEGY)
+ def test_correctly_reports_changed_settings(self,
+ old_settings_list,
+ new_settings_list):
+ """
+ Settings that are changed are correctly reported.
+ """
+ old_settings_dict = dict(old_settings_list)
+ new_settings_dict = dict(new_settings_list)
+ e = h2.events.RemoteSettingsChanged.from_settings(
+ old_settings=old_settings_dict,
+ new_settings=new_settings_dict,
+ )
+
+ for setting, new_value in new_settings_dict.items():
+ original_value = old_settings_dict.get(setting)
+ assert e.changed_settings[setting].setting == setting
+ assert e.changed_settings[setting].original_value == original_value
+ assert e.changed_settings[setting].new_value == new_value
+
+
+class TestEventReprs(object):
+ """
+ Events have useful representations.
+ """
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ example_informational_headers = [
+ (':status', '100'),
+ ('server', 'fake-serv/0.1.0')
+ ]
+ example_response_headers = [
+ (':status', '200'),
+ ('server', 'fake-serv/0.1.0')
+ ]
+
+ def test_requestreceived_repr(self):
+ """
+ RequestReceived has a useful debug representation.
+ """
+ e = h2.events.RequestReceived()
+ e.stream_id = 5
+ e.headers = self.example_request_headers
+
+ assert repr(e) == (
+ "<RequestReceived stream_id:5, headers:["
+ "(':authority', 'example.com'), "
+ "(':path', '/'), "
+ "(':scheme', 'https'), "
+ "(':method', 'GET')]>"
+ )
+
+ def test_responsereceived_repr(self):
+ """
+ ResponseReceived has a useful debug representation.
+ """
+ e = h2.events.ResponseReceived()
+ e.stream_id = 500
+ e.headers = self.example_response_headers
+
+ assert repr(e) == (
+ "<ResponseReceived stream_id:500, headers:["
+ "(':status', '200'), "
+ "('server', 'fake-serv/0.1.0')]>"
+ )
+
+ def test_trailersreceived_repr(self):
+ """
+ TrailersReceived has a useful debug representation.
+ """
+ e = h2.events.TrailersReceived()
+ e.stream_id = 62
+ e.headers = self.example_response_headers
+
+ assert repr(e) == (
+ "<TrailersReceived stream_id:62, headers:["
+ "(':status', '200'), "
+ "('server', 'fake-serv/0.1.0')]>"
+ )
+
+ def test_informationalresponsereceived_repr(self):
+ """
+ InformationalResponseReceived has a useful debug representation.
+ """
+ e = h2.events.InformationalResponseReceived()
+ e.stream_id = 62
+ e.headers = self.example_informational_headers
+
+ assert repr(e) == (
+ "<InformationalResponseReceived stream_id:62, headers:["
+ "(':status', '100'), "
+ "('server', 'fake-serv/0.1.0')]>"
+ )
+
+ def test_datareceived_repr(self):
+ """
+ DataReceived has a useful debug representation.
+ """
+ e = h2.events.DataReceived()
+ e.stream_id = 888
+ e.data = b"abcdefghijklmnopqrstuvwxyz"
+ e.flow_controlled_length = 88
+
+ assert repr(e) == (
+ "<DataReceived stream_id:888, flow_controlled_length:88, "
+ "data:6162636465666768696a6b6c6d6e6f7071727374>"
+ )
+
+ def test_windowupdated_repr(self):
+ """
+ WindowUpdated has a useful debug representation.
+ """
+ e = h2.events.WindowUpdated()
+ e.stream_id = 0
+ e.delta = 2**16
+
+ assert repr(e) == "<WindowUpdated stream_id:0, delta:65536>"
+
+ def test_remotesettingschanged_repr(self):
+ """
+ RemoteSettingsChanged has a useful debug representation.
+ """
+ e = h2.events.RemoteSettingsChanged()
+ e.changed_settings = {
+ h2.settings.SettingCodes.INITIAL_WINDOW_SIZE:
+ h2.settings.ChangedSetting(
+ h2.settings.SettingCodes.INITIAL_WINDOW_SIZE, 2**16, 2**15
+ ),
+ }
+
+ assert repr(e) == (
+ "<RemoteSettingsChanged changed_settings:{ChangedSetting("
+ "setting=SettingCodes.INITIAL_WINDOW_SIZE, original_value=65536, "
+ "new_value=32768)}>"
+ )
+
+ def test_pingreceived_repr(self):
+ """
+ PingReceived has a useful debug representation.
+ """
+ e = h2.events.PingReceived()
+ e.ping_data = b'abcdefgh'
+
+ assert repr(e) == "<PingReceived ping_data:6162636465666768>"
+
+ def test_pingackreceived_repr(self):
+ """
+ PingAckReceived has a useful debug representation.
+ """
+ e = h2.events.PingAckReceived()
+ e.ping_data = b'abcdefgh'
+
+ assert repr(e) == "<PingAckReceived ping_data:6162636465666768>"
+
+ def test_streamended_repr(self):
+ """
+ StreamEnded has a useful debug representation.
+ """
+ e = h2.events.StreamEnded()
+ e.stream_id = 99
+
+ assert repr(e) == "<StreamEnded stream_id:99>"
+
+ def test_streamreset_repr(self):
+ """
+ StreamEnded has a useful debug representation.
+ """
+ e = h2.events.StreamReset()
+ e.stream_id = 919
+ e.error_code = h2.errors.ErrorCodes.ENHANCE_YOUR_CALM
+ e.remote_reset = False
+
+ assert repr(e) == (
+ "<StreamReset stream_id:919, "
+ "error_code:ErrorCodes.ENHANCE_YOUR_CALM, remote_reset:False>"
+ )
+
+ def test_pushedstreamreceived_repr(self):
+ """
+ PushedStreamReceived has a useful debug representation.
+ """
+ e = h2.events.PushedStreamReceived()
+ e.pushed_stream_id = 50
+ e.parent_stream_id = 11
+ e.headers = self.example_request_headers
+
+ assert repr(e) == (
+ "<PushedStreamReceived pushed_stream_id:50, parent_stream_id:11, "
+ "headers:["
+ "(':authority', 'example.com'), "
+ "(':path', '/'), "
+ "(':scheme', 'https'), "
+ "(':method', 'GET')]>"
+ )
+
+ def test_settingsacknowledged_repr(self):
+ """
+ SettingsAcknowledged has a useful debug representation.
+ """
+ e = h2.events.SettingsAcknowledged()
+ e.changed_settings = {
+ h2.settings.SettingCodes.INITIAL_WINDOW_SIZE:
+ h2.settings.ChangedSetting(
+ h2.settings.SettingCodes.INITIAL_WINDOW_SIZE, 2**16, 2**15
+ ),
+ }
+
+ assert repr(e) == (
+ "<SettingsAcknowledged changed_settings:{ChangedSetting("
+ "setting=SettingCodes.INITIAL_WINDOW_SIZE, original_value=65536, "
+ "new_value=32768)}>"
+ )
+
+ def test_priorityupdated_repr(self):
+ """
+ PriorityUpdated has a useful debug representation.
+ """
+ e = h2.events.PriorityUpdated()
+ e.stream_id = 87
+ e.weight = 32
+ e.depends_on = 8
+ e.exclusive = True
+
+ assert repr(e) == (
+ "<PriorityUpdated stream_id:87, weight:32, depends_on:8, "
+ "exclusive:True>"
+ )
+
+ @pytest.mark.parametrize("additional_data,data_repr", [
+ (None, "None"),
+ (b'some data', "736f6d652064617461")
+ ])
+ def test_connectionterminated_repr(self, additional_data, data_repr):
+ """
+ ConnectionTerminated has a useful debug representation.
+ """
+ e = h2.events.ConnectionTerminated()
+ e.error_code = h2.errors.ErrorCodes.INADEQUATE_SECURITY
+ e.last_stream_id = 33
+ e.additional_data = additional_data
+
+ assert repr(e) == (
+ "<ConnectionTerminated error_code:ErrorCodes.INADEQUATE_SECURITY, "
+ "last_stream_id:33, additional_data:%s>" % data_repr
+ )
+
+ def test_alternativeserviceavailable_repr(self):
+ """
+ AlternativeServiceAvailable has a useful debug representation.
+ """
+ e = h2.events.AlternativeServiceAvailable()
+ e.origin = b"example.com"
+ e.field_value = b'h2=":8000"; ma=60'
+
+ assert repr(e) == (
+ '<AlternativeServiceAvailable origin:example.com, '
+ 'field_value:h2=":8000"; ma=60>'
+ )
+
+ def test_unknownframereceived_repr(self):
+ """
+ UnknownFrameReceived has a useful debug representation.
+ """
+ e = h2.events.UnknownFrameReceived()
+ assert repr(e) == '<UnknownFrameReceived>'
+
+
+def all_events():
+ """
+ Generates all the classes (i.e., events) defined in h2.events.
+ """
+ for _, obj in inspect.getmembers(sys.modules['h2.events']):
+
+ # We are only interested in objects that are defined in h2.events;
+ # objects that are imported from other modules are not of interest.
+ if hasattr(obj, '__module__') and (obj.__module__ != 'h2.events'):
+ continue
+
+ if inspect.isclass(obj):
+ yield obj
+
+
+@pytest.mark.parametrize('event', all_events())
+def test_all_events_subclass_from_event(event):
+ """
+ Every event defined in h2.events subclasses from h2.events.Event.
+ """
+ assert (event is h2.events.Event) or issubclass(event, h2.events.Event)
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_exceptions.py b/testing/web-platform/tests/tools/third_party/h2/test/test_exceptions.py
new file mode 100644
index 0000000000..1890459978
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_exceptions.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+"""
+test_exceptions
+~~~~~~~~~~~~~~~
+
+Tests that verify logic local to exceptions.
+"""
+import h2.exceptions
+
+
+class TestExceptions(object):
+ def test_stream_id_too_low_prints_properly(self):
+ x = h2.exceptions.StreamIDTooLowError(5, 10)
+
+ assert "StreamIDTooLowError: 5 is lower than 10" == str(x)
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_flow_control_window.py b/testing/web-platform/tests/tools/third_party/h2/test/test_flow_control_window.py
new file mode 100644
index 0000000000..24b345aac3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_flow_control_window.py
@@ -0,0 +1,952 @@
+# -*- coding: utf-8 -*-
+"""
+test_flow_control
+~~~~~~~~~~~~~~~~~
+
+Tests of the flow control management in h2
+"""
+import pytest
+
+from hypothesis import given
+from hypothesis.strategies import integers
+
+import h2.config
+import h2.connection
+import h2.errors
+import h2.events
+import h2.exceptions
+import h2.settings
+
+
+class TestFlowControl(object):
+ """
+ Tests of the flow control management in the connection objects.
+ """
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ DEFAULT_FLOW_WINDOW = 65535
+
+ def test_flow_control_initializes_properly(self):
+ """
+ The flow control window for a stream should initially be the default
+ flow control value.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+
+ assert c.local_flow_control_window(1) == self.DEFAULT_FLOW_WINDOW
+ assert c.remote_flow_control_window(1) == self.DEFAULT_FLOW_WINDOW
+
+ def test_flow_control_decreases_with_sent_data(self):
+ """
+ When data is sent on a stream, the flow control window should drop.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+ c.send_data(1, b'some data')
+
+ remaining_length = self.DEFAULT_FLOW_WINDOW - len(b'some data')
+ assert (c.local_flow_control_window(1) == remaining_length)
+
+ @pytest.mark.parametrize("pad_length", [5, 0])
+ def test_flow_control_decreases_with_sent_data_with_padding(self,
+ pad_length):
+ """
+ When padded data is sent on a stream, the flow control window drops
+ by the length of the padding plus 1 for the 1-byte padding length
+ field.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+
+ c.send_data(1, b'some data', pad_length=pad_length)
+ remaining_length = (
+ self.DEFAULT_FLOW_WINDOW - len(b'some data') - pad_length - 1
+ )
+ assert c.local_flow_control_window(1) == remaining_length
+
+ def test_flow_control_decreases_with_received_data(self, frame_factory):
+ """
+ When data is received on a stream, the remote flow control window
+ should drop.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f1 = frame_factory.build_headers_frame(self.example_request_headers)
+ f2 = frame_factory.build_data_frame(b'some data')
+
+ c.receive_data(f1.serialize() + f2.serialize())
+
+ remaining_length = self.DEFAULT_FLOW_WINDOW - len(b'some data')
+ assert (c.remote_flow_control_window(1) == remaining_length)
+
+ def test_flow_control_decreases_with_padded_data(self, frame_factory):
+ """
+ When padded data is received on a stream, the remote flow control
+ window drops by an amount that includes the padding.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f1 = frame_factory.build_headers_frame(self.example_request_headers)
+ f2 = frame_factory.build_data_frame(b'some data', padding_len=10)
+
+ c.receive_data(f1.serialize() + f2.serialize())
+
+ remaining_length = (
+ self.DEFAULT_FLOW_WINDOW - len(b'some data') - 10 - 1
+ )
+ assert (c.remote_flow_control_window(1) == remaining_length)
+
+ def test_flow_control_is_limited_by_connection(self):
+ """
+ The flow control window is limited by the flow control of the
+ connection.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+ c.send_data(1, b'some data')
+ c.send_headers(3, self.example_request_headers)
+
+ remaining_length = self.DEFAULT_FLOW_WINDOW - len(b'some data')
+ assert (c.local_flow_control_window(3) == remaining_length)
+
+ def test_remote_flow_control_is_limited_by_connection(self, frame_factory):
+ """
+ The remote flow control window is limited by the flow control of the
+ connection.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ f1 = frame_factory.build_headers_frame(self.example_request_headers)
+ f2 = frame_factory.build_data_frame(b'some data')
+ f3 = frame_factory.build_headers_frame(
+ self.example_request_headers,
+ stream_id=3,
+ )
+ c.receive_data(f1.serialize() + f2.serialize() + f3.serialize())
+
+ remaining_length = self.DEFAULT_FLOW_WINDOW - len(b'some data')
+ assert (c.remote_flow_control_window(3) == remaining_length)
+
+ def test_cannot_send_more_data_than_window(self):
+ """
+ Sending more data than the remaining flow control window raises a
+ FlowControlError.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+ c.outbound_flow_control_window = 5
+
+ with pytest.raises(h2.exceptions.FlowControlError):
+ c.send_data(1, b'some data')
+
+ def test_increasing_connection_window_allows_sending(self, frame_factory):
+ """
+ Confirm that sending a WindowUpdate frame on the connection frees
+ up space for further frames.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+ c.outbound_flow_control_window = 5
+
+ with pytest.raises(h2.exceptions.FlowControlError):
+ c.send_data(1, b'some data')
+
+ f = frame_factory.build_window_update_frame(
+ stream_id=0,
+ increment=5,
+ )
+ c.receive_data(f.serialize())
+
+ c.clear_outbound_data_buffer()
+ c.send_data(1, b'some data')
+ assert c.data_to_send()
+
+ def test_increasing_stream_window_allows_sending(self, frame_factory):
+ """
+ Confirm that sending a WindowUpdate frame on the connection frees
+ up space for further frames.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+ c._get_stream_by_id(1).outbound_flow_control_window = 5
+
+ with pytest.raises(h2.exceptions.FlowControlError):
+ c.send_data(1, b'some data')
+
+ f = frame_factory.build_window_update_frame(
+ stream_id=1,
+ increment=5,
+ )
+ c.receive_data(f.serialize())
+
+ c.clear_outbound_data_buffer()
+ c.send_data(1, b'some data')
+ assert c.data_to_send()
+
+ def test_flow_control_shrinks_in_response_to_settings(self, frame_factory):
+ """
+ Acknowledging SETTINGS_INITIAL_WINDOW_SIZE shrinks the flow control
+ window.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+
+ assert c.local_flow_control_window(1) == 65535
+
+ f = frame_factory.build_settings_frame(
+ settings={h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: 1280}
+ )
+ c.receive_data(f.serialize())
+
+ assert c.local_flow_control_window(1) == 1280
+
+ def test_flow_control_grows_in_response_to_settings(self, frame_factory):
+ """
+ Acknowledging SETTINGS_INITIAL_WINDOW_SIZE grows the flow control
+ window.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+
+ # Greatly increase the connection flow control window.
+ f = frame_factory.build_window_update_frame(
+ stream_id=0, increment=128000
+ )
+ c.receive_data(f.serialize())
+
+ # The stream flow control window is the bottleneck here.
+ assert c.local_flow_control_window(1) == 65535
+
+ f = frame_factory.build_settings_frame(
+ settings={h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: 128000}
+ )
+ c.receive_data(f.serialize())
+
+ # The stream window is still the bottleneck, but larger now.
+ assert c.local_flow_control_window(1) == 128000
+
+ def test_flow_control_settings_blocked_by_conn_window(self, frame_factory):
+ """
+ Changing SETTINGS_INITIAL_WINDOW_SIZE does not affect the effective
+ flow control window if the connection window isn't changed.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+
+ assert c.local_flow_control_window(1) == 65535
+
+ f = frame_factory.build_settings_frame(
+ settings={h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: 128000}
+ )
+ c.receive_data(f.serialize())
+
+ assert c.local_flow_control_window(1) == 65535
+
+ def test_new_streams_have_flow_control_per_settings(self, frame_factory):
+ """
+ After a SETTINGS_INITIAL_WINDOW_SIZE change is received, new streams
+ have appropriate new flow control windows.
+ """
+ c = h2.connection.H2Connection()
+
+ f = frame_factory.build_settings_frame(
+ settings={h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: 128000}
+ )
+ c.receive_data(f.serialize())
+
+ # Greatly increase the connection flow control window.
+ f = frame_factory.build_window_update_frame(
+ stream_id=0, increment=128000
+ )
+ c.receive_data(f.serialize())
+
+ c.send_headers(1, self.example_request_headers)
+ assert c.local_flow_control_window(1) == 128000
+
+ def test_window_update_no_stream(self, frame_factory):
+ """
+ WindowUpdate frames received without streams fire an appropriate
+ WindowUpdated event.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_window_update_frame(
+ stream_id=0,
+ increment=5
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.WindowUpdated)
+ assert event.stream_id == 0
+ assert event.delta == 5
+
+ def test_window_update_with_stream(self, frame_factory):
+ """
+ WindowUpdate frames received with streams fire an appropriate
+ WindowUpdated event.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ f1 = frame_factory.build_headers_frame(self.example_request_headers)
+ f2 = frame_factory.build_window_update_frame(
+ stream_id=1,
+ increment=66
+ )
+ data = b''.join(map(lambda f: f.serialize(), [f1, f2]))
+ events = c.receive_data(data)
+
+ assert len(events) == 2
+ event = events[1]
+
+ assert isinstance(event, h2.events.WindowUpdated)
+ assert event.stream_id == 1
+ assert event.delta == 66
+
+ def test_we_can_increment_stream_flow_control(self, frame_factory):
+ """
+ It is possible for the user to increase the flow control window for
+ streams.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+ c.clear_outbound_data_buffer()
+
+ expected_frame = frame_factory.build_window_update_frame(
+ stream_id=1,
+ increment=5
+ )
+
+ events = c.increment_flow_control_window(increment=5, stream_id=1)
+ assert not events
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_we_can_increment_connection_flow_control(self, frame_factory):
+ """
+ It is possible for the user to increase the flow control window for
+ the entire connection.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+ c.clear_outbound_data_buffer()
+
+ expected_frame = frame_factory.build_window_update_frame(
+ stream_id=0,
+ increment=5
+ )
+
+ events = c.increment_flow_control_window(increment=5)
+ assert not events
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_we_enforce_our_flow_control_window(self, frame_factory):
+ """
+ The user can set a low flow control window, which leads to connection
+ teardown if violated.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ # Change the flow control window to 80 bytes.
+ c.update_settings(
+ {h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: 80}
+ )
+ f = frame_factory.build_settings_frame({}, ack=True)
+ c.receive_data(f.serialize())
+
+ # Receive a new stream.
+ f = frame_factory.build_headers_frame(self.example_request_headers)
+ c.receive_data(f.serialize())
+
+ # Attempt to violate the flow control window.
+ c.clear_outbound_data_buffer()
+ f = frame_factory.build_data_frame(b'\x01' * 100)
+
+ with pytest.raises(h2.exceptions.FlowControlError):
+ c.receive_data(f.serialize())
+
+ # Verify we tear down appropriately.
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1,
+ error_code=h2.errors.ErrorCodes.FLOW_CONTROL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_shrink_remote_flow_control_settings(self, frame_factory):
+ """
+ The remote peer acknowledging our SETTINGS_INITIAL_WINDOW_SIZE shrinks
+ the flow control window.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+
+ assert c.remote_flow_control_window(1) == 65535
+
+ c.update_settings({h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: 1280})
+
+ f = frame_factory.build_settings_frame({}, ack=True)
+ c.receive_data(f.serialize())
+
+ assert c.remote_flow_control_window(1) == 1280
+
+ def test_grow_remote_flow_control_settings(self, frame_factory):
+ """
+ The remote peer acknowledging our SETTINGS_INITIAL_WINDOW_SIZE grows
+ the flow control window.
+ """
+ c = h2.connection.H2Connection()
+ c.send_headers(1, self.example_request_headers)
+
+ # Increase the connection flow control window greatly.
+ c.increment_flow_control_window(increment=128000)
+
+ assert c.remote_flow_control_window(1) == 65535
+
+ c.update_settings(
+ {h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: 128000}
+ )
+ f = frame_factory.build_settings_frame({}, ack=True)
+ c.receive_data(f.serialize())
+
+ assert c.remote_flow_control_window(1) == 128000
+
+ def test_new_streams_have_remote_flow_control(self, frame_factory):
+ """
+ After a SETTINGS_INITIAL_WINDOW_SIZE change is acknowledged by the
+ remote peer, new streams have appropriate new flow control windows.
+ """
+ c = h2.connection.H2Connection()
+
+ c.update_settings(
+ {h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: 128000}
+ )
+ f = frame_factory.build_settings_frame({}, ack=True)
+ c.receive_data(f.serialize())
+
+ # Increase the connection flow control window greatly.
+ c.increment_flow_control_window(increment=128000)
+
+ c.send_headers(1, self.example_request_headers)
+ assert c.remote_flow_control_window(1) == 128000
+
+ @pytest.mark.parametrize(
+ 'increment', [0, -15, 2**31]
+ )
+ def test_reject_bad_attempts_to_increment_flow_control(self, increment):
+ """
+ Attempting to increment a flow control increment outside the valid
+ range causes a ValueError to be raised.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+ c.clear_outbound_data_buffer()
+
+ # Fails both on and off streams.
+ with pytest.raises(ValueError):
+ c.increment_flow_control_window(increment=increment, stream_id=1)
+
+ with pytest.raises(ValueError):
+ c.increment_flow_control_window(increment=increment)
+
+ @pytest.mark.parametrize('stream_id', [0, 1])
+ def test_reject_bad_remote_increments(self, frame_factory, stream_id):
+ """
+ Remote peers attempting to increment flow control outside the valid
+ range cause connection errors of type PROTOCOL_ERROR.
+ """
+ # The only number that can be encoded in a WINDOW_UPDATE frame but
+ # isn't valid is 0.
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_window_update_frame(
+ stream_id=stream_id, increment=0
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=0,
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_reject_increasing_connection_window_too_far(self, frame_factory):
+ """
+ Attempts by the remote peer to increase the connection flow control
+ window beyond 2**31 - 1 are rejected.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+
+ increment = 2**31 - c.outbound_flow_control_window
+
+ f = frame_factory.build_window_update_frame(
+ stream_id=0, increment=increment
+ )
+
+ with pytest.raises(h2.exceptions.FlowControlError):
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=0,
+ error_code=h2.errors.ErrorCodes.FLOW_CONTROL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_reject_increasing_stream_window_too_far(self, frame_factory):
+ """
+ Attempts by the remote peer to increase the stream flow control window
+ beyond 2**31 - 1 are rejected.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+ c.clear_outbound_data_buffer()
+
+ increment = 2**31 - c.outbound_flow_control_window
+
+ f = frame_factory.build_window_update_frame(
+ stream_id=1, increment=increment
+ )
+
+ events = c.receive_data(f.serialize())
+ assert len(events) == 1
+
+ event = events[0]
+ assert isinstance(event, h2.events.StreamReset)
+ assert event.stream_id == 1
+ assert event.error_code == h2.errors.ErrorCodes.FLOW_CONTROL_ERROR
+ assert not event.remote_reset
+
+ expected_frame = frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.FLOW_CONTROL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_reject_overlarge_conn_window_settings(self, frame_factory):
+ """
+ SETTINGS frames cannot change the size of the connection flow control
+ window.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Go one byte smaller than the limit.
+ increment = 2**31 - 1 - c.outbound_flow_control_window
+
+ f = frame_factory.build_window_update_frame(
+ stream_id=0, increment=increment
+ )
+ c.receive_data(f.serialize())
+
+ # Receive an increment to the initial window size.
+ f = frame_factory.build_settings_frame(
+ settings={
+ h2.settings.SettingCodes.INITIAL_WINDOW_SIZE:
+ self.DEFAULT_FLOW_WINDOW + 1
+ }
+ )
+ c.clear_outbound_data_buffer()
+
+ # No error is encountered.
+ events = c.receive_data(f.serialize())
+ assert len(events) == 1
+ assert isinstance(events[0], h2.events.RemoteSettingsChanged)
+
+ expected_frame = frame_factory.build_settings_frame(
+ settings={},
+ ack=True
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_reject_overlarge_stream_window_settings(self, frame_factory):
+ """
+ Remote attempts to create overlarge stream windows via SETTINGS frames
+ are rejected.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+
+ # Go one byte smaller than the limit.
+ increment = 2**31 - 1 - c.outbound_flow_control_window
+
+ f = frame_factory.build_window_update_frame(
+ stream_id=1, increment=increment
+ )
+ c.receive_data(f.serialize())
+
+ # Receive an increment to the initial window size.
+ f = frame_factory.build_settings_frame(
+ settings={
+ h2.settings.SettingCodes.INITIAL_WINDOW_SIZE:
+ self.DEFAULT_FLOW_WINDOW + 1
+ }
+ )
+ c.clear_outbound_data_buffer()
+ with pytest.raises(h2.exceptions.FlowControlError):
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=0,
+ error_code=h2.errors.ErrorCodes.FLOW_CONTROL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_reject_local_overlarge_increase_connection_window(self):
+ """
+ Local attempts to increase the connection window too far are rejected.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ increment = 2**31 - c.inbound_flow_control_window
+
+ with pytest.raises(h2.exceptions.FlowControlError):
+ c.increment_flow_control_window(increment=increment)
+
+ def test_reject_local_overlarge_increase_stream_window(self):
+ """
+ Local attempts to increase the connection window too far are rejected.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+
+ increment = 2**31 - c.inbound_flow_control_window
+
+ with pytest.raises(h2.exceptions.FlowControlError):
+ c.increment_flow_control_window(increment=increment, stream_id=1)
+
+ def test_send_update_on_closed_streams(self, frame_factory):
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+ c.reset_stream(1)
+
+ c.clear_outbound_data_buffer()
+ c.open_outbound_streams
+ c.open_inbound_streams
+
+ f = frame_factory.build_data_frame(b'some data'*1500)
+ events = c.receive_data(f.serialize()*3)
+ assert not events
+
+ expected = frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ ).serialize() * 2 + frame_factory.build_window_update_frame(
+ stream_id=0,
+ increment=40500,
+ ).serialize() + frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ ).serialize()
+ assert c.data_to_send() == expected
+
+ f = frame_factory.build_data_frame(b'')
+ events = c.receive_data(f.serialize())
+ assert not events
+
+ expected = frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ ).serialize()
+ assert c.data_to_send() == expected
+
+
+class TestAutomaticFlowControl(object):
+ """
+ Tests for the automatic flow control logic.
+ """
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ DEFAULT_FLOW_WINDOW = 65535
+
+ def _setup_connection_and_send_headers(self, frame_factory):
+ """
+ Setup a server-side H2Connection and send a headers frame, and then
+ clear the outbound data buffer. Also increase the maximum frame size.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ c.update_settings(
+ {h2.settings.SettingCodes.MAX_FRAME_SIZE: self.DEFAULT_FLOW_WINDOW}
+ )
+ settings_frame = frame_factory.build_settings_frame(
+ settings={}, ack=True
+ )
+ c.receive_data(settings_frame.serialize())
+ c.clear_outbound_data_buffer()
+
+ headers_frame = frame_factory.build_headers_frame(
+ headers=self.example_request_headers
+ )
+ c.receive_data(headers_frame.serialize())
+ c.clear_outbound_data_buffer()
+ return c
+
+ @given(stream_id=integers(max_value=0))
+ def test_must_acknowledge_for_stream(self, frame_factory, stream_id):
+ """
+ Flow control acknowledgements must be done on a stream ID that is
+ greater than zero.
+ """
+ # We need to refresh the encoder because hypothesis has a problem with
+ # integrating with py.test, meaning that we use the same frame factory
+ # for all tests.
+ # See https://github.com/HypothesisWorks/hypothesis-python/issues/377
+ frame_factory.refresh_encoder()
+
+ # Create a connection in a state that might actually accept
+ # data acknolwedgement.
+ c = self._setup_connection_and_send_headers(frame_factory)
+ data_frame = frame_factory.build_data_frame(
+ b'some data', flags=['END_STREAM']
+ )
+ c.receive_data(data_frame.serialize())
+
+ with pytest.raises(ValueError):
+ c.acknowledge_received_data(
+ acknowledged_size=5, stream_id=stream_id
+ )
+
+ @given(size=integers(max_value=-1))
+ def test_cannot_acknowledge_less_than_zero(self, frame_factory, size):
+ """
+ The user must acknowledge at least 0 bytes.
+ """
+ # We need to refresh the encoder because hypothesis has a problem with
+ # integrating with py.test, meaning that we use the same frame factory
+ # for all tests.
+ # See https://github.com/HypothesisWorks/hypothesis-python/issues/377
+ frame_factory.refresh_encoder()
+
+ # Create a connection in a state that might actually accept
+ # data acknolwedgement.
+ c = self._setup_connection_and_send_headers(frame_factory)
+ data_frame = frame_factory.build_data_frame(
+ b'some data', flags=['END_STREAM']
+ )
+ c.receive_data(data_frame.serialize())
+
+ with pytest.raises(ValueError):
+ c.acknowledge_received_data(acknowledged_size=size, stream_id=1)
+
+ def test_acknowledging_small_chunks_does_nothing(self, frame_factory):
+ """
+ When a small amount of data is received and acknowledged, no window
+ update is emitted.
+ """
+ c = self._setup_connection_and_send_headers(frame_factory)
+
+ data_frame = frame_factory.build_data_frame(
+ b'some data', flags=['END_STREAM']
+ )
+ data_event = c.receive_data(data_frame.serialize())[0]
+
+ c.acknowledge_received_data(
+ data_event.flow_controlled_length, stream_id=1
+ )
+
+ assert not c.data_to_send()
+
+ def test_acknowledging_no_data_does_nothing(self, frame_factory):
+ """
+ If a user accidentally acknowledges no data, nothing happens.
+ """
+ c = self._setup_connection_and_send_headers(frame_factory)
+
+ # Send an empty data frame, just to give the user impetus to ack the
+ # data.
+ data_frame = frame_factory.build_data_frame(b'')
+ c.receive_data(data_frame.serialize())
+
+ c.acknowledge_received_data(0, stream_id=1)
+ assert not c.data_to_send()
+
+ @pytest.mark.parametrize('force_cleanup', (True, False))
+ def test_acknowledging_data_on_closed_stream(self,
+ frame_factory,
+ force_cleanup):
+ """
+ When acknowledging data on a stream that has just been closed, no
+ acknowledgement is given for that stream, only for the connection.
+ """
+ c = self._setup_connection_and_send_headers(frame_factory)
+
+ data_to_send = b'\x00' * self.DEFAULT_FLOW_WINDOW
+ data_frame = frame_factory.build_data_frame(data_to_send)
+ c.receive_data(data_frame.serialize())
+
+ rst_frame = frame_factory.build_rst_stream_frame(
+ stream_id=1
+ )
+ c.receive_data(rst_frame.serialize())
+ c.clear_outbound_data_buffer()
+
+ if force_cleanup:
+ # Check how many streams are open to force the old one to be
+ # cleaned up.
+ assert c.open_outbound_streams == 0
+
+ c.acknowledge_received_data(2048, stream_id=1)
+
+ expected = frame_factory.build_window_update_frame(
+ stream_id=0, increment=2048
+ )
+ assert c.data_to_send() == expected.serialize()
+
+ def test_acknowledging_streams_we_never_saw(self, frame_factory):
+ """
+ If the user acknowledges a stream ID we've never seen, that raises a
+ NoSuchStreamError.
+ """
+ c = self._setup_connection_and_send_headers(frame_factory)
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.NoSuchStreamError):
+ c.acknowledge_received_data(2048, stream_id=101)
+
+ @given(integers(min_value=1025, max_value=DEFAULT_FLOW_WINDOW))
+ def test_acknowledging_1024_bytes_when_empty_increments(self,
+ frame_factory,
+ increment):
+ """
+ If the flow control window is empty and we acknowledge 1024 bytes or
+ more, we will emit a WINDOW_UPDATE frame just to move the connection
+ forward.
+ """
+ # We need to refresh the encoder because hypothesis has a problem with
+ # integrating with py.test, meaning that we use the same frame factory
+ # for all tests.
+ # See https://github.com/HypothesisWorks/hypothesis-python/issues/377
+ frame_factory.refresh_encoder()
+
+ c = self._setup_connection_and_send_headers(frame_factory)
+
+ data_to_send = b'\x00' * self.DEFAULT_FLOW_WINDOW
+ data_frame = frame_factory.build_data_frame(data_to_send)
+ c.receive_data(data_frame.serialize())
+
+ c.acknowledge_received_data(increment, stream_id=1)
+
+ first_expected = frame_factory.build_window_update_frame(
+ stream_id=0, increment=increment
+ )
+ second_expected = frame_factory.build_window_update_frame(
+ stream_id=1, increment=increment
+ )
+ expected_data = b''.join(
+ [first_expected.serialize(), second_expected.serialize()]
+ )
+ assert c.data_to_send() == expected_data
+
+ # This test needs to use a lower cap, because otherwise the algo will
+ # increment the stream window anyway.
+ @given(integers(min_value=1025, max_value=(DEFAULT_FLOW_WINDOW // 4) - 1))
+ def test_connection_only_empty(self, frame_factory, increment):
+ """
+ If the connection flow control window is empty, but the stream flow
+ control windows aren't, and 1024 bytes or more are acknowledged by the
+ user, we increment the connection window only.
+ """
+ # We need to refresh the encoder because hypothesis has a problem with
+ # integrating with py.test, meaning that we use the same frame factory
+ # for all tests.
+ # See https://github.com/HypothesisWorks/hypothesis-python/issues/377
+ frame_factory.refresh_encoder()
+
+ # Here we'll use 4 streams. Set them up.
+ c = self._setup_connection_and_send_headers(frame_factory)
+
+ for stream_id in [3, 5, 7]:
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers, stream_id=stream_id
+ )
+ c.receive_data(f.serialize())
+
+ # Now we send 1/4 of the connection window per stream. Annoyingly,
+ # that's an odd number, so we need to round the last frame up.
+ data_to_send = b'\x00' * (self.DEFAULT_FLOW_WINDOW // 4)
+ for stream_id in [1, 3, 5]:
+ f = frame_factory.build_data_frame(
+ data_to_send, stream_id=stream_id
+ )
+ c.receive_data(f.serialize())
+
+ data_to_send = b'\x00' * c.remote_flow_control_window(7)
+ data_frame = frame_factory.build_data_frame(data_to_send, stream_id=7)
+ c.receive_data(data_frame.serialize())
+
+ # Ok, now the actual test.
+ c.acknowledge_received_data(increment, stream_id=1)
+
+ expected_data = frame_factory.build_window_update_frame(
+ stream_id=0, increment=increment
+ ).serialize()
+ assert c.data_to_send() == expected_data
+
+ @given(integers(min_value=1025, max_value=DEFAULT_FLOW_WINDOW))
+ def test_mixing_update_forms(self, frame_factory, increment):
+ """
+ If the user mixes ackowledging data with manually incrementing windows,
+ we still keep track of what's going on.
+ """
+ # We need to refresh the encoder because hypothesis has a problem with
+ # integrating with py.test, meaning that we use the same frame factory
+ # for all tests.
+ # See https://github.com/HypothesisWorks/hypothesis-python/issues/377
+ frame_factory.refresh_encoder()
+
+ # Empty the flow control window.
+ c = self._setup_connection_and_send_headers(frame_factory)
+ data_to_send = b'\x00' * self.DEFAULT_FLOW_WINDOW
+ data_frame = frame_factory.build_data_frame(data_to_send)
+ c.receive_data(data_frame.serialize())
+
+ # Manually increment the connection flow control window back to fully
+ # open, but leave the stream window closed.
+ c.increment_flow_control_window(
+ stream_id=None, increment=self.DEFAULT_FLOW_WINDOW
+ )
+ c.clear_outbound_data_buffer()
+
+ # Now, acknowledge the receipt of that data. This should cause the
+ # stream window to be widened, but not the connection window, because
+ # it is already open.
+ c.acknowledge_received_data(increment, stream_id=1)
+
+ # We expect to see one window update frame only, for the stream.
+ expected_data = frame_factory.build_window_update_frame(
+ stream_id=1, increment=increment
+ ).serialize()
+ assert c.data_to_send() == expected_data
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_h2_upgrade.py b/testing/web-platform/tests/tools/third_party/h2/test/test_h2_upgrade.py
new file mode 100644
index 0000000000..d63d44f3f7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_h2_upgrade.py
@@ -0,0 +1,302 @@
+# -*- coding: utf-8 -*-
+"""
+test_h2_upgrade.py
+~~~~~~~~~~~~~~~~~~
+
+This module contains tests that exercise the HTTP Upgrade functionality of
+hyper-h2, ensuring that clients and servers can upgrade their plaintext
+HTTP/1.1 connections to HTTP/2.
+"""
+import base64
+
+import pytest
+
+import h2.config
+import h2.connection
+import h2.errors
+import h2.events
+import h2.exceptions
+
+
+class TestClientUpgrade(object):
+ """
+ Tests of the client-side of the HTTP/2 upgrade dance.
+ """
+ example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ ]
+ example_response_headers = [
+ (b':status', b'200'),
+ (b'server', b'fake-serv/0.1.0')
+ ]
+
+ def test_returns_http2_settings(self, frame_factory):
+ """
+ Calling initiate_upgrade_connection returns a base64url encoded
+ Settings frame with the settings used by the connection.
+ """
+ conn = h2.connection.H2Connection()
+ data = conn.initiate_upgrade_connection()
+
+ # The base64 encoding must not be padded.
+ assert not data.endswith(b'=')
+
+ # However, SETTINGS frames should never need to be padded.
+ decoded_frame = base64.urlsafe_b64decode(data)
+ expected_frame = frame_factory.build_settings_frame(
+ settings=conn.local_settings
+ )
+ assert decoded_frame == expected_frame.serialize_body()
+
+ def test_emits_preamble(self, frame_factory):
+ """
+ Calling initiate_upgrade_connection emits the connection preamble.
+ """
+ conn = h2.connection.H2Connection()
+ conn.initiate_upgrade_connection()
+
+ data = conn.data_to_send()
+ assert data.startswith(frame_factory.preamble())
+
+ data = data[len(frame_factory.preamble()):]
+ expected_frame = frame_factory.build_settings_frame(
+ settings=conn.local_settings
+ )
+ assert data == expected_frame.serialize()
+
+ def test_can_receive_response(self, frame_factory):
+ """
+ After upgrading, we can safely receive a response.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_upgrade_connection()
+ c.clear_outbound_data_buffer()
+
+ f1 = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.example_response_headers,
+ )
+ f2 = frame_factory.build_data_frame(
+ stream_id=1,
+ data=b'some data',
+ flags=['END_STREAM']
+ )
+ events = c.receive_data(f1.serialize() + f2.serialize())
+ assert len(events) == 3
+
+ assert isinstance(events[0], h2.events.ResponseReceived)
+ assert isinstance(events[1], h2.events.DataReceived)
+ assert isinstance(events[2], h2.events.StreamEnded)
+
+ assert events[0].headers == self.example_response_headers
+ assert events[1].data == b'some data'
+ assert all(e.stream_id == 1 for e in events)
+
+ assert not c.data_to_send()
+
+ def test_can_receive_pushed_stream(self, frame_factory):
+ """
+ After upgrading, we can safely receive a pushed stream.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_upgrade_connection()
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers,
+ )
+ events = c.receive_data(f.serialize())
+ assert len(events) == 1
+
+ assert isinstance(events[0], h2.events.PushedStreamReceived)
+ assert events[0].headers == self.example_request_headers
+ assert events[0].parent_stream_id == 1
+ assert events[0].pushed_stream_id == 2
+
+ def test_cannot_send_headers_stream_1(self, frame_factory):
+ """
+ After upgrading, we cannot send headers on stream 1.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_upgrade_connection()
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ def test_cannot_send_data_stream_1(self, frame_factory):
+ """
+ After upgrading, we cannot send data on stream 1.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_upgrade_connection()
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.send_data(stream_id=1, data=b'some data')
+
+
+class TestServerUpgrade(object):
+ """
+ Tests of the server-side of the HTTP/2 upgrade dance.
+ """
+ example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ ]
+ example_response_headers = [
+ (b':status', b'200'),
+ (b'server', b'fake-serv/0.1.0')
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def test_returns_nothing(self, frame_factory):
+ """
+ Calling initiate_upgrade_connection returns nothing.
+ """
+ conn = h2.connection.H2Connection(config=self.server_config)
+ curl_header = b"AAMAAABkAAQAAP__"
+ data = conn.initiate_upgrade_connection(curl_header)
+ assert data is None
+
+ def test_emits_preamble(self, frame_factory):
+ """
+ Calling initiate_upgrade_connection emits the connection preamble.
+ """
+ conn = h2.connection.H2Connection(config=self.server_config)
+ conn.initiate_upgrade_connection()
+
+ data = conn.data_to_send()
+ expected_frame = frame_factory.build_settings_frame(
+ settings=conn.local_settings
+ )
+ assert data == expected_frame.serialize()
+
+ def test_can_send_response(self, frame_factory):
+ """
+ After upgrading, we can safely send a response.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_upgrade_connection()
+ c.clear_outbound_data_buffer()
+
+ c.send_headers(stream_id=1, headers=self.example_response_headers)
+ c.send_data(stream_id=1, data=b'some data', end_stream=True)
+
+ f1 = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.example_response_headers,
+ )
+ f2 = frame_factory.build_data_frame(
+ stream_id=1,
+ data=b'some data',
+ flags=['END_STREAM']
+ )
+
+ expected_data = f1.serialize() + f2.serialize()
+ assert c.data_to_send() == expected_data
+
+ def test_can_push_stream(self, frame_factory):
+ """
+ After upgrading, we can safely push a stream.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_upgrade_connection()
+ c.clear_outbound_data_buffer()
+
+ c.push_stream(
+ stream_id=1,
+ promised_stream_id=2,
+ request_headers=self.example_request_headers
+ )
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers,
+ )
+ assert c.data_to_send() == f.serialize()
+
+ def test_cannot_receive_headers_stream_1(self, frame_factory):
+ """
+ After upgrading, we cannot receive headers on stream 1.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_upgrade_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.example_request_headers,
+ )
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_cannot_receive_data_stream_1(self, frame_factory):
+ """
+ After upgrading, we cannot receive data on stream 1.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_upgrade_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_data_frame(
+ stream_id=1,
+ data=b'some data',
+ )
+ c.receive_data(f.serialize())
+
+ expected = frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ ).serialize()
+ assert c.data_to_send() == expected
+
+ def test_client_settings_are_applied(self, frame_factory):
+ """
+ The settings provided by the client are applied and immediately
+ ACK'ed.
+ """
+ server = h2.connection.H2Connection(config=self.server_config)
+ client = h2.connection.H2Connection()
+
+ # As a precaution, let's confirm that the server and client, at the
+ # start of the connection, do not agree on their initial settings
+ # state.
+ assert (
+ client.local_settings != server.remote_settings
+ )
+
+ # Get the client header data and pass it to the server.
+ header_data = client.initiate_upgrade_connection()
+ server.initiate_upgrade_connection(header_data)
+
+ # This gets complex, but here we go.
+ # RFC 7540 § 3.2.1 says that "explicit acknowledgement" of the settings
+ # in the header is "not necessary". That's annoyingly vague, but we
+ # interpret that to mean "should not be sent". So to test that this
+ # worked we need to test that the server has only sent the preamble,
+ # and has not sent a SETTINGS ack, and also that the server has the
+ # correct settings.
+ expected_frame = frame_factory.build_settings_frame(
+ server.local_settings
+ )
+ assert server.data_to_send() == expected_frame.serialize()
+
+ assert (
+ client.local_settings == server.remote_settings
+ )
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_head_request.py b/testing/web-platform/tests/tools/third_party/h2/test/test_head_request.py
new file mode 100644
index 0000000000..ef73007254
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_head_request.py
@@ -0,0 +1,55 @@
+# -*- coding; utf-8 -*-
+"""
+test_head_request
+~~~~~~~~~~~~~~~~~
+"""
+import h2.connection
+import pytest
+
+
+class TestHeadRequest(object):
+ example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'HEAD'),
+ ]
+
+ example_response_headers = [
+ (b':status', b'200'),
+ (b'server', b'fake-serv/0.1.0'),
+ (b'content_length', b'1'),
+ ]
+
+ def test_non_zero_content_and_no_body(self, frame_factory):
+
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+
+ f = frame_factory.build_headers_frame(
+ self.example_response_headers,
+ flags=['END_STREAM']
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 2
+ event = events[0]
+
+ assert isinstance(event, h2.events.ResponseReceived)
+ assert event.stream_id == 1
+ assert event.headers == self.example_response_headers
+
+ def test_reject_non_zero_content_and_body(self, frame_factory):
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers)
+
+ headers = frame_factory.build_headers_frame(
+ self.example_response_headers
+ )
+ data = frame_factory.build_data_frame(data=b'\x01')
+
+ c.receive_data(headers.serialize())
+ with pytest.raises(h2.exceptions.InvalidBodyLengthError):
+ c.receive_data(data.serialize())
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_header_indexing.py b/testing/web-platform/tests/tools/third_party/h2/test/test_header_indexing.py
new file mode 100644
index 0000000000..23fd06f15b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_header_indexing.py
@@ -0,0 +1,637 @@
+# -*- coding: utf-8 -*-
+"""
+test_header_indexing.py
+~~~~~~~~~~~~~~~~~~~~~~~
+
+This module contains tests that use HPACK header tuples that provide additional
+metadata to the hpack module about how to encode the headers.
+"""
+import pytest
+
+from hpack import HeaderTuple, NeverIndexedHeaderTuple
+
+import h2.config
+import h2.connection
+
+
+def assert_header_blocks_actually_equal(block_a, block_b):
+ """
+ Asserts that two header bocks are really, truly equal, down to the types
+ of their tuples. Doesn't return anything.
+ """
+ assert len(block_a) == len(block_b)
+
+ for a, b in zip(block_a, block_b):
+ assert a == b
+ assert a.__class__ is b.__class__
+
+
+class TestHeaderIndexing(object):
+ """
+ Test that Hyper-h2 can correctly handle never indexed header fields using
+ the appropriate hpack data structures.
+ """
+ example_request_headers = [
+ HeaderTuple(u':authority', u'example.com'),
+ HeaderTuple(u':path', u'/'),
+ HeaderTuple(u':scheme', u'https'),
+ HeaderTuple(u':method', u'GET'),
+ ]
+ bytes_example_request_headers = [
+ HeaderTuple(b':authority', b'example.com'),
+ HeaderTuple(b':path', b'/'),
+ HeaderTuple(b':scheme', b'https'),
+ HeaderTuple(b':method', b'GET'),
+ ]
+
+ extended_request_headers = [
+ HeaderTuple(u':authority', u'example.com'),
+ HeaderTuple(u':path', u'/'),
+ HeaderTuple(u':scheme', u'https'),
+ HeaderTuple(u':method', u'GET'),
+ NeverIndexedHeaderTuple(u'authorization', u'realpassword'),
+ ]
+ bytes_extended_request_headers = [
+ HeaderTuple(b':authority', b'example.com'),
+ HeaderTuple(b':path', b'/'),
+ HeaderTuple(b':scheme', b'https'),
+ HeaderTuple(b':method', b'GET'),
+ NeverIndexedHeaderTuple(b'authorization', b'realpassword'),
+ ]
+
+ example_response_headers = [
+ HeaderTuple(u':status', u'200'),
+ HeaderTuple(u'server', u'fake-serv/0.1.0')
+ ]
+ bytes_example_response_headers = [
+ HeaderTuple(b':status', b'200'),
+ HeaderTuple(b'server', b'fake-serv/0.1.0')
+ ]
+
+ extended_response_headers = [
+ HeaderTuple(u':status', u'200'),
+ HeaderTuple(u'server', u'fake-serv/0.1.0'),
+ NeverIndexedHeaderTuple(u'secure', u'you-bet'),
+ ]
+ bytes_extended_response_headers = [
+ HeaderTuple(b':status', b'200'),
+ HeaderTuple(b'server', b'fake-serv/0.1.0'),
+ NeverIndexedHeaderTuple(b'secure', b'you-bet'),
+ ]
+
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ @pytest.mark.parametrize(
+ 'headers', (
+ example_request_headers,
+ bytes_example_request_headers,
+ extended_request_headers,
+ bytes_extended_request_headers,
+ )
+ )
+ def test_sending_header_tuples(self, headers, frame_factory):
+ """
+ Providing HeaderTuple and HeaderTuple subclasses preserves the metadata
+ about indexing.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Clear the data, then send headers.
+ c.clear_outbound_data_buffer()
+ c.send_headers(1, headers)
+
+ f = frame_factory.build_headers_frame(headers=headers)
+ assert c.data_to_send() == f.serialize()
+
+ @pytest.mark.parametrize(
+ 'headers', (
+ example_request_headers,
+ bytes_example_request_headers,
+ extended_request_headers,
+ bytes_extended_request_headers,
+ )
+ )
+ def test_header_tuples_in_pushes(self, headers, frame_factory):
+ """
+ Providing HeaderTuple and HeaderTuple subclasses to push promises
+ preserves metadata about indexing.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ # We can use normal headers for the request.
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+
+ frame_factory.refresh_encoder()
+ expected_frame = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=headers,
+ flags=['END_HEADERS'],
+ )
+
+ c.clear_outbound_data_buffer()
+ c.push_stream(
+ stream_id=1,
+ promised_stream_id=2,
+ request_headers=headers
+ )
+
+ assert c.data_to_send() == expected_frame.serialize()
+
+ @pytest.mark.parametrize(
+ 'headers,encoding', (
+ (example_request_headers, 'utf-8'),
+ (bytes_example_request_headers, None),
+ (extended_request_headers, 'utf-8'),
+ (bytes_extended_request_headers, None),
+ )
+ )
+ def test_header_tuples_are_decoded_request(self,
+ headers,
+ encoding,
+ frame_factory):
+ """
+ The indexing status of the header is preserved when emitting
+ RequestReceived events.
+ """
+ config = h2.config.H2Configuration(
+ client_side=False, header_encoding=encoding
+ )
+ c = h2.connection.H2Connection(config=config)
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(headers)
+ data = f.serialize()
+ events = c.receive_data(data)
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.RequestReceived)
+ assert_header_blocks_actually_equal(headers, event.headers)
+
+ @pytest.mark.parametrize(
+ 'headers,encoding', (
+ (example_response_headers, 'utf-8'),
+ (bytes_example_response_headers, None),
+ (extended_response_headers, 'utf-8'),
+ (bytes_extended_response_headers, None),
+ )
+ )
+ def test_header_tuples_are_decoded_response(self,
+ headers,
+ encoding,
+ frame_factory):
+ """
+ The indexing status of the header is preserved when emitting
+ ResponseReceived events.
+ """
+ config = h2.config.H2Configuration(
+ header_encoding=encoding
+ )
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ f = frame_factory.build_headers_frame(headers)
+ data = f.serialize()
+ events = c.receive_data(data)
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.ResponseReceived)
+ assert_header_blocks_actually_equal(headers, event.headers)
+
+ @pytest.mark.parametrize(
+ 'headers,encoding', (
+ (example_response_headers, 'utf-8'),
+ (bytes_example_response_headers, None),
+ (extended_response_headers, 'utf-8'),
+ (bytes_extended_response_headers, None),
+ )
+ )
+ def test_header_tuples_are_decoded_info_response(self,
+ headers,
+ encoding,
+ frame_factory):
+ """
+ The indexing status of the header is preserved when emitting
+ InformationalResponseReceived events.
+ """
+ # Manipulate the headers to send 100 Continue. We need to copy the list
+ # to avoid breaking the example headers.
+ headers = headers[:]
+ if encoding:
+ headers[0] = HeaderTuple(u':status', u'100')
+ else:
+ headers[0] = HeaderTuple(b':status', b'100')
+
+ config = h2.config.H2Configuration(
+ header_encoding=encoding
+ )
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ f = frame_factory.build_headers_frame(headers)
+ data = f.serialize()
+ events = c.receive_data(data)
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.InformationalResponseReceived)
+ assert_header_blocks_actually_equal(headers, event.headers)
+
+ @pytest.mark.parametrize(
+ 'headers,encoding', (
+ (example_response_headers, 'utf-8'),
+ (bytes_example_response_headers, None),
+ (extended_response_headers, 'utf-8'),
+ (bytes_extended_response_headers, None),
+ )
+ )
+ def test_header_tuples_are_decoded_trailers(self,
+ headers,
+ encoding,
+ frame_factory):
+ """
+ The indexing status of the header is preserved when emitting
+ TrailersReceived events.
+ """
+ # Manipulate the headers to remove the status, which shouldn't be in
+ # the trailers. We need to copy the list to avoid breaking the example
+ # headers.
+ headers = headers[1:]
+
+ config = h2.config.H2Configuration(
+ header_encoding=encoding
+ )
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+ f = frame_factory.build_headers_frame(self.example_response_headers)
+ data = f.serialize()
+ c.receive_data(data)
+
+ f = frame_factory.build_headers_frame(headers, flags=['END_STREAM'])
+ data = f.serialize()
+ events = c.receive_data(data)
+
+ assert len(events) == 2
+ event = events[0]
+
+ assert isinstance(event, h2.events.TrailersReceived)
+ assert_header_blocks_actually_equal(headers, event.headers)
+
+ @pytest.mark.parametrize(
+ 'headers,encoding', (
+ (example_request_headers, 'utf-8'),
+ (bytes_example_request_headers, None),
+ (extended_request_headers, 'utf-8'),
+ (bytes_extended_request_headers, None),
+ )
+ )
+ def test_header_tuples_are_decoded_push_promise(self,
+ headers,
+ encoding,
+ frame_factory):
+ """
+ The indexing status of the header is preserved when emitting
+ PushedStreamReceived events.
+ """
+ config = h2.config.H2Configuration(
+ header_encoding=encoding
+ )
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=headers,
+ flags=['END_HEADERS'],
+ )
+ data = f.serialize()
+ events = c.receive_data(data)
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.PushedStreamReceived)
+ assert_header_blocks_actually_equal(headers, event.headers)
+
+
+class TestSecureHeaders(object):
+ """
+ Certain headers should always be transformed to their never-indexed form.
+ """
+ example_request_headers = [
+ (u':authority', u'example.com'),
+ (u':path', u'/'),
+ (u':scheme', u'https'),
+ (u':method', u'GET'),
+ ]
+ bytes_example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ ]
+ possible_auth_headers = [
+ (u'authorization', u'test'),
+ (u'Authorization', u'test'),
+ (u'authorization', u'really long test'),
+ HeaderTuple(u'authorization', u'test'),
+ HeaderTuple(u'Authorization', u'test'),
+ HeaderTuple(u'authorization', u'really long test'),
+ NeverIndexedHeaderTuple(u'authorization', u'test'),
+ NeverIndexedHeaderTuple(u'Authorization', u'test'),
+ NeverIndexedHeaderTuple(u'authorization', u'really long test'),
+ (b'authorization', b'test'),
+ (b'Authorization', b'test'),
+ (b'authorization', b'really long test'),
+ HeaderTuple(b'authorization', b'test'),
+ HeaderTuple(b'Authorization', b'test'),
+ HeaderTuple(b'authorization', b'really long test'),
+ NeverIndexedHeaderTuple(b'authorization', b'test'),
+ NeverIndexedHeaderTuple(b'Authorization', b'test'),
+ NeverIndexedHeaderTuple(b'authorization', b'really long test'),
+ (u'proxy-authorization', u'test'),
+ (u'Proxy-Authorization', u'test'),
+ (u'proxy-authorization', u'really long test'),
+ HeaderTuple(u'proxy-authorization', u'test'),
+ HeaderTuple(u'Proxy-Authorization', u'test'),
+ HeaderTuple(u'proxy-authorization', u'really long test'),
+ NeverIndexedHeaderTuple(u'proxy-authorization', u'test'),
+ NeverIndexedHeaderTuple(u'Proxy-Authorization', u'test'),
+ NeverIndexedHeaderTuple(u'proxy-authorization', u'really long test'),
+ (b'proxy-authorization', b'test'),
+ (b'Proxy-Authorization', b'test'),
+ (b'proxy-authorization', b'really long test'),
+ HeaderTuple(b'proxy-authorization', b'test'),
+ HeaderTuple(b'Proxy-Authorization', b'test'),
+ HeaderTuple(b'proxy-authorization', b'really long test'),
+ NeverIndexedHeaderTuple(b'proxy-authorization', b'test'),
+ NeverIndexedHeaderTuple(b'Proxy-Authorization', b'test'),
+ NeverIndexedHeaderTuple(b'proxy-authorization', b'really long test'),
+ ]
+ secured_cookie_headers = [
+ (u'cookie', u'short'),
+ (u'Cookie', u'short'),
+ (u'cookie', u'nineteen byte cooki'),
+ HeaderTuple(u'cookie', u'short'),
+ HeaderTuple(u'Cookie', u'short'),
+ HeaderTuple(u'cookie', u'nineteen byte cooki'),
+ NeverIndexedHeaderTuple(u'cookie', u'short'),
+ NeverIndexedHeaderTuple(u'Cookie', u'short'),
+ NeverIndexedHeaderTuple(u'cookie', u'nineteen byte cooki'),
+ NeverIndexedHeaderTuple(u'cookie', u'longer manually secured cookie'),
+ (b'cookie', b'short'),
+ (b'Cookie', b'short'),
+ (b'cookie', b'nineteen byte cooki'),
+ HeaderTuple(b'cookie', b'short'),
+ HeaderTuple(b'Cookie', b'short'),
+ HeaderTuple(b'cookie', b'nineteen byte cooki'),
+ NeverIndexedHeaderTuple(b'cookie', b'short'),
+ NeverIndexedHeaderTuple(b'Cookie', b'short'),
+ NeverIndexedHeaderTuple(b'cookie', b'nineteen byte cooki'),
+ NeverIndexedHeaderTuple(b'cookie', b'longer manually secured cookie'),
+ ]
+ unsecured_cookie_headers = [
+ (u'cookie', u'twenty byte cookie!!'),
+ (u'Cookie', u'twenty byte cookie!!'),
+ (u'cookie', u'substantially longer than 20 byte cookie'),
+ HeaderTuple(u'cookie', u'twenty byte cookie!!'),
+ HeaderTuple(u'cookie', u'twenty byte cookie!!'),
+ HeaderTuple(u'Cookie', u'twenty byte cookie!!'),
+ (b'cookie', b'twenty byte cookie!!'),
+ (b'Cookie', b'twenty byte cookie!!'),
+ (b'cookie', b'substantially longer than 20 byte cookie'),
+ HeaderTuple(b'cookie', b'twenty byte cookie!!'),
+ HeaderTuple(b'cookie', b'twenty byte cookie!!'),
+ HeaderTuple(b'Cookie', b'twenty byte cookie!!'),
+ ]
+
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ @pytest.mark.parametrize(
+ 'headers', (example_request_headers, bytes_example_request_headers)
+ )
+ @pytest.mark.parametrize('auth_header', possible_auth_headers)
+ def test_authorization_headers_never_indexed(self,
+ headers,
+ auth_header,
+ frame_factory):
+ """
+ Authorization and Proxy-Authorization headers are always forced to be
+ never-indexed, regardless of their form.
+ """
+ # Regardless of what we send, we expect it to be never indexed.
+ send_headers = headers + [auth_header]
+ expected_headers = headers + [
+ NeverIndexedHeaderTuple(auth_header[0].lower(), auth_header[1])
+ ]
+
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Clear the data, then send headers.
+ c.clear_outbound_data_buffer()
+ c.send_headers(1, send_headers)
+
+ f = frame_factory.build_headers_frame(headers=expected_headers)
+ assert c.data_to_send() == f.serialize()
+
+ @pytest.mark.parametrize(
+ 'headers', (example_request_headers, bytes_example_request_headers)
+ )
+ @pytest.mark.parametrize('auth_header', possible_auth_headers)
+ def test_authorization_headers_never_indexed_push(self,
+ headers,
+ auth_header,
+ frame_factory):
+ """
+ Authorization and Proxy-Authorization headers are always forced to be
+ never-indexed, regardless of their form, when pushed by a server.
+ """
+ # Regardless of what we send, we expect it to be never indexed.
+ send_headers = headers + [auth_header]
+ expected_headers = headers + [
+ NeverIndexedHeaderTuple(auth_header[0].lower(), auth_header[1])
+ ]
+
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ # We can use normal headers for the request.
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+
+ frame_factory.refresh_encoder()
+ expected_frame = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=expected_headers,
+ flags=['END_HEADERS'],
+ )
+
+ c.clear_outbound_data_buffer()
+ c.push_stream(
+ stream_id=1,
+ promised_stream_id=2,
+ request_headers=send_headers
+ )
+
+ assert c.data_to_send() == expected_frame.serialize()
+
+ @pytest.mark.parametrize(
+ 'headers', (example_request_headers, bytes_example_request_headers)
+ )
+ @pytest.mark.parametrize('cookie_header', secured_cookie_headers)
+ def test_short_cookie_headers_never_indexed(self,
+ headers,
+ cookie_header,
+ frame_factory):
+ """
+ Short cookie headers, and cookies provided as NeverIndexedHeaderTuple,
+ are never indexed.
+ """
+ # Regardless of what we send, we expect it to be never indexed.
+ send_headers = headers + [cookie_header]
+ expected_headers = headers + [
+ NeverIndexedHeaderTuple(cookie_header[0].lower(), cookie_header[1])
+ ]
+
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Clear the data, then send headers.
+ c.clear_outbound_data_buffer()
+ c.send_headers(1, send_headers)
+
+ f = frame_factory.build_headers_frame(headers=expected_headers)
+ assert c.data_to_send() == f.serialize()
+
+ @pytest.mark.parametrize(
+ 'headers', (example_request_headers, bytes_example_request_headers)
+ )
+ @pytest.mark.parametrize('cookie_header', secured_cookie_headers)
+ def test_short_cookie_headers_never_indexed_push(self,
+ headers,
+ cookie_header,
+ frame_factory):
+ """
+ Short cookie headers, and cookies provided as NeverIndexedHeaderTuple,
+ are never indexed when pushed by servers.
+ """
+ # Regardless of what we send, we expect it to be never indexed.
+ send_headers = headers + [cookie_header]
+ expected_headers = headers + [
+ NeverIndexedHeaderTuple(cookie_header[0].lower(), cookie_header[1])
+ ]
+
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ # We can use normal headers for the request.
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+
+ frame_factory.refresh_encoder()
+ expected_frame = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=expected_headers,
+ flags=['END_HEADERS'],
+ )
+
+ c.clear_outbound_data_buffer()
+ c.push_stream(
+ stream_id=1,
+ promised_stream_id=2,
+ request_headers=send_headers
+ )
+
+ assert c.data_to_send() == expected_frame.serialize()
+
+ @pytest.mark.parametrize(
+ 'headers', (example_request_headers, bytes_example_request_headers)
+ )
+ @pytest.mark.parametrize('cookie_header', unsecured_cookie_headers)
+ def test_long_cookie_headers_can_be_indexed(self,
+ headers,
+ cookie_header,
+ frame_factory):
+ """
+ Longer cookie headers can be indexed.
+ """
+ # Regardless of what we send, we expect it to be indexed.
+ send_headers = headers + [cookie_header]
+ expected_headers = headers + [
+ HeaderTuple(cookie_header[0].lower(), cookie_header[1])
+ ]
+
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Clear the data, then send headers.
+ c.clear_outbound_data_buffer()
+ c.send_headers(1, send_headers)
+
+ f = frame_factory.build_headers_frame(headers=expected_headers)
+ assert c.data_to_send() == f.serialize()
+
+ @pytest.mark.parametrize(
+ 'headers', (example_request_headers, bytes_example_request_headers)
+ )
+ @pytest.mark.parametrize('cookie_header', unsecured_cookie_headers)
+ def test_long_cookie_headers_can_be_indexed_push(self,
+ headers,
+ cookie_header,
+ frame_factory):
+ """
+ Longer cookie headers can be indexed.
+ """
+ # Regardless of what we send, we expect it to be never indexed.
+ send_headers = headers + [cookie_header]
+ expected_headers = headers + [
+ HeaderTuple(cookie_header[0].lower(), cookie_header[1])
+ ]
+
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ # We can use normal headers for the request.
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+
+ frame_factory.refresh_encoder()
+ expected_frame = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=expected_headers,
+ flags=['END_HEADERS'],
+ )
+
+ c.clear_outbound_data_buffer()
+ c.push_stream(
+ stream_id=1,
+ promised_stream_id=2,
+ request_headers=send_headers
+ )
+
+ assert c.data_to_send() == expected_frame.serialize()
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_informational_responses.py b/testing/web-platform/tests/tools/third_party/h2/test/test_informational_responses.py
new file mode 100644
index 0000000000..e18c44bcb4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_informational_responses.py
@@ -0,0 +1,444 @@
+# -*- coding: utf-8 -*-
+"""
+test_informational_responses
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Tests that validate that hyper-h2 correctly handles informational (1XX)
+responses in its state machine.
+"""
+import pytest
+
+import h2.config
+import h2.connection
+import h2.events
+import h2.exceptions
+
+
+class TestReceivingInformationalResponses(object):
+ """
+ Tests for receiving informational responses.
+ """
+ example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ (b'expect', b'100-continue'),
+ ]
+ example_informational_headers = [
+ (b':status', b'100'),
+ (b'server', b'fake-serv/0.1.0')
+ ]
+ example_response_headers = [
+ (b':status', b'200'),
+ (b'server', b'fake-serv/0.1.0')
+ ]
+ example_trailers = [
+ (b'trailer', b'you-bet'),
+ ]
+
+ @pytest.mark.parametrize('end_stream', (True, False))
+ def test_single_informational_response(self, frame_factory, end_stream):
+ """
+ When receiving a informational response, the appropriate event is
+ signaled.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_request_headers,
+ end_stream=end_stream
+ )
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_informational_headers,
+ stream_id=1,
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.InformationalResponseReceived)
+ assert event.headers == self.example_informational_headers
+ assert event.stream_id == 1
+
+ @pytest.mark.parametrize('end_stream', (True, False))
+ def test_receiving_multiple_header_blocks(self, frame_factory, end_stream):
+ """
+ At least three header blocks can be received: informational, headers,
+ trailers.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_request_headers,
+ end_stream=end_stream
+ )
+
+ f1 = frame_factory.build_headers_frame(
+ headers=self.example_informational_headers,
+ stream_id=1,
+ )
+ f2 = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ stream_id=1,
+ )
+ f3 = frame_factory.build_headers_frame(
+ headers=self.example_trailers,
+ stream_id=1,
+ flags=['END_STREAM'],
+ )
+ events = c.receive_data(
+ f1.serialize() + f2.serialize() + f3.serialize()
+ )
+
+ assert len(events) == 4
+
+ assert isinstance(events[0], h2.events.InformationalResponseReceived)
+ assert events[0].headers == self.example_informational_headers
+ assert events[0].stream_id == 1
+
+ assert isinstance(events[1], h2.events.ResponseReceived)
+ assert events[1].headers == self.example_response_headers
+ assert events[1].stream_id == 1
+
+ assert isinstance(events[2], h2.events.TrailersReceived)
+ assert events[2].headers == self.example_trailers
+ assert events[2].stream_id == 1
+
+ @pytest.mark.parametrize('end_stream', (True, False))
+ def test_receiving_multiple_informational_responses(self,
+ frame_factory,
+ end_stream):
+ """
+ More than one informational response is allowed.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_request_headers,
+ end_stream=end_stream
+ )
+
+ f1 = frame_factory.build_headers_frame(
+ headers=self.example_informational_headers,
+ stream_id=1,
+ )
+ f2 = frame_factory.build_headers_frame(
+ headers=[(':status', '101')],
+ stream_id=1,
+ )
+ events = c.receive_data(f1.serialize() + f2.serialize())
+
+ assert len(events) == 2
+
+ assert isinstance(events[0], h2.events.InformationalResponseReceived)
+ assert events[0].headers == self.example_informational_headers
+ assert events[0].stream_id == 1
+
+ assert isinstance(events[1], h2.events.InformationalResponseReceived)
+ assert events[1].headers == [(b':status', b'101')]
+ assert events[1].stream_id == 1
+
+ @pytest.mark.parametrize('end_stream', (True, False))
+ def test_receive_provisional_response_with_end_stream(self,
+ frame_factory,
+ end_stream):
+ """
+ Receiving provisional responses with END_STREAM set causes
+ ProtocolErrors.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_request_headers,
+ end_stream=end_stream
+ )
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_informational_headers,
+ stream_id=1,
+ flags=['END_STREAM']
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ expected = frame_factory.build_goaway_frame(
+ last_stream_id=0,
+ error_code=1,
+ )
+ assert c.data_to_send() == expected.serialize()
+
+ @pytest.mark.parametrize('end_stream', (True, False))
+ def test_receiving_out_of_order_headers(self, frame_factory, end_stream):
+ """
+ When receiving a informational response after the actual response
+ headers we consider it a ProtocolError and raise it.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_request_headers,
+ end_stream=end_stream
+ )
+
+ f1 = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ stream_id=1,
+ )
+ f2 = frame_factory.build_headers_frame(
+ headers=self.example_informational_headers,
+ stream_id=1,
+ )
+ c.receive_data(f1.serialize())
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f2.serialize())
+
+ expected = frame_factory.build_goaway_frame(
+ last_stream_id=0,
+ error_code=1,
+ )
+ assert c.data_to_send() == expected.serialize()
+
+
+class TestSendingInformationalResponses(object):
+ """
+ Tests for sending informational responses.
+ """
+ example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ (b'expect', b'100-continue'),
+ ]
+ unicode_informational_headers = [
+ (u':status', u'100'),
+ (u'server', u'fake-serv/0.1.0')
+ ]
+ bytes_informational_headers = [
+ (b':status', b'100'),
+ (b'server', b'fake-serv/0.1.0')
+ ]
+ example_response_headers = [
+ (b':status', b'200'),
+ (b'server', b'fake-serv/0.1.0')
+ ]
+ example_trailers = [
+ (b'trailer', b'you-bet'),
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ @pytest.mark.parametrize(
+ 'hdrs', (unicode_informational_headers, bytes_informational_headers),
+ )
+ @pytest.mark.parametrize('end_stream', (True, False))
+ def test_single_informational_response(self,
+ frame_factory,
+ hdrs,
+ end_stream):
+ """
+ When sending a informational response, the appropriate frames are
+ emitted.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ flags = ['END_STREAM'] if end_stream else []
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ stream_id=1,
+ flags=flags,
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+ frame_factory.refresh_encoder()
+
+ c.send_headers(
+ stream_id=1,
+ headers=hdrs
+ )
+
+ f = frame_factory.build_headers_frame(
+ headers=hdrs,
+ stream_id=1,
+ )
+ assert c.data_to_send() == f.serialize()
+
+ @pytest.mark.parametrize(
+ 'hdrs', (unicode_informational_headers, bytes_informational_headers),
+ )
+ @pytest.mark.parametrize('end_stream', (True, False))
+ def test_sending_multiple_header_blocks(self,
+ frame_factory,
+ hdrs,
+ end_stream):
+ """
+ At least three header blocks can be sent: informational, headers,
+ trailers.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ flags = ['END_STREAM'] if end_stream else []
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ stream_id=1,
+ flags=flags,
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+ frame_factory.refresh_encoder()
+
+ # Send the three header blocks.
+ c.send_headers(
+ stream_id=1,
+ headers=hdrs
+ )
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_response_headers
+ )
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_trailers,
+ end_stream=True
+ )
+
+ # Check that we sent them properly.
+ f1 = frame_factory.build_headers_frame(
+ headers=hdrs,
+ stream_id=1,
+ )
+ f2 = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ stream_id=1,
+ )
+ f3 = frame_factory.build_headers_frame(
+ headers=self.example_trailers,
+ stream_id=1,
+ flags=['END_STREAM']
+ )
+ assert (
+ c.data_to_send() ==
+ f1.serialize() + f2.serialize() + f3.serialize()
+ )
+
+ @pytest.mark.parametrize(
+ 'hdrs', (unicode_informational_headers, bytes_informational_headers),
+ )
+ @pytest.mark.parametrize('end_stream', (True, False))
+ def test_sending_multiple_informational_responses(self,
+ frame_factory,
+ hdrs,
+ end_stream):
+ """
+ More than one informational response is allowed.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ flags = ['END_STREAM'] if end_stream else []
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ stream_id=1,
+ flags=flags,
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+ frame_factory.refresh_encoder()
+
+ # Send two informational responses.
+ c.send_headers(
+ stream_id=1,
+ headers=hdrs,
+ )
+ c.send_headers(
+ stream_id=1,
+ headers=[(':status', '101')]
+ )
+
+ # Check we sent them both.
+ f1 = frame_factory.build_headers_frame(
+ headers=hdrs,
+ stream_id=1,
+ )
+ f2 = frame_factory.build_headers_frame(
+ headers=[(':status', '101')],
+ stream_id=1,
+ )
+ assert c.data_to_send() == f1.serialize() + f2.serialize()
+
+ @pytest.mark.parametrize(
+ 'hdrs', (unicode_informational_headers, bytes_informational_headers),
+ )
+ @pytest.mark.parametrize('end_stream', (True, False))
+ def test_send_provisional_response_with_end_stream(self,
+ frame_factory,
+ hdrs,
+ end_stream):
+ """
+ Sending provisional responses with END_STREAM set causes
+ ProtocolErrors.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ flags = ['END_STREAM'] if end_stream else []
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ stream_id=1,
+ flags=flags,
+ )
+ c.receive_data(f.serialize())
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.send_headers(
+ stream_id=1,
+ headers=hdrs,
+ end_stream=True,
+ )
+
+ @pytest.mark.parametrize(
+ 'hdrs', (unicode_informational_headers, bytes_informational_headers),
+ )
+ @pytest.mark.parametrize('end_stream', (True, False))
+ def test_reject_sending_out_of_order_headers(self,
+ frame_factory,
+ hdrs,
+ end_stream):
+ """
+ When sending an informational response after the actual response
+ headers we consider it a ProtocolError and raise it.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ flags = ['END_STREAM'] if end_stream else []
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ stream_id=1,
+ flags=flags,
+ )
+ c.receive_data(f.serialize())
+
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_response_headers
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.send_headers(
+ stream_id=1,
+ headers=hdrs
+ )
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_interacting_stacks.py b/testing/web-platform/tests/tools/third_party/h2/test/test_interacting_stacks.py
new file mode 100644
index 0000000000..90776829c8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_interacting_stacks.py
@@ -0,0 +1,120 @@
+# -*- coding: utf-8 -*-
+"""
+test_interacting_stacks
+~~~~~~~~~~~~~~~~~~~~~~~
+
+These tests run two entities, a client and a server, in parallel threads. These
+two entities talk to each other, running what amounts to a number of carefully
+controlled simulations of real flows.
+
+This is to ensure that the stack as a whole behaves intelligently in both
+client and server cases.
+
+These tests are long, complex, and somewhat brittle, so they aren't in general
+recommended for writing the majority of test cases. Their purposes is primarily
+to validate that the top-level API of the library behaves as described.
+
+We should also consider writing helper functions to reduce the complexity of
+these tests, so that they can be written more easily, as they are remarkably
+useful.
+"""
+import coroutine_tests
+
+import h2.config
+import h2.connection
+import h2.events
+import h2.settings
+
+
+class TestCommunication(coroutine_tests.CoroutineTestCase):
+ """
+ Test that two communicating state machines can work together.
+ """
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def test_basic_request_response(self):
+ """
+ A request issued by hyper-h2 can be responded to by hyper-h2.
+ """
+ request_headers = [
+ (b':method', b'GET'),
+ (b':path', b'/'),
+ (b':authority', b'example.com'),
+ (b':scheme', b'https'),
+ (b'user-agent', b'test-client/0.1.0'),
+ ]
+ response_headers = [
+ (b':status', b'204'),
+ (b'server', b'test-server/0.1.0'),
+ (b'content-length', b'0'),
+ ]
+
+ def client():
+ c = h2.connection.H2Connection()
+
+ # Do the handshake. First send the preamble.
+ c.initiate_connection()
+ data = yield c.data_to_send()
+
+ # Next, handle the remote preamble.
+ events = c.receive_data(data)
+ assert len(events) == 2
+ assert isinstance(events[0], h2.events.SettingsAcknowledged)
+ assert isinstance(events[1], h2.events.RemoteSettingsChanged)
+ changed = events[1].changed_settings
+ assert (
+ changed[
+ h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS
+ ].new_value == 100
+ )
+
+ # Send a request.
+ events = c.send_headers(1, request_headers, end_stream=True)
+ assert not events
+ data = yield c.data_to_send()
+
+ # Validate the response.
+ events = c.receive_data(data)
+ assert len(events) == 2
+ assert isinstance(events[0], h2.events.ResponseReceived)
+ assert events[0].stream_id == 1
+ assert events[0].headers == response_headers
+ assert isinstance(events[1], h2.events.StreamEnded)
+ assert events[1].stream_id == 1
+
+ @self.server
+ def server():
+ c = h2.connection.H2Connection(config=self.server_config)
+
+ # First, read for the preamble.
+ data = yield
+ events = c.receive_data(data)
+ assert len(events) == 1
+ assert isinstance(events[0], h2.events.RemoteSettingsChanged)
+ changed = events[0].changed_settings
+ assert (
+ changed[
+ h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS
+ ].new_value == 100
+ )
+
+ # Send our preamble back.
+ c.initiate_connection()
+ data = yield c.data_to_send()
+
+ # Listen for the request.
+ events = c.receive_data(data)
+ assert len(events) == 3
+ assert isinstance(events[0], h2.events.SettingsAcknowledged)
+ assert isinstance(events[1], h2.events.RequestReceived)
+ assert events[1].stream_id == 1
+ assert events[1].headers == request_headers
+ assert isinstance(events[2], h2.events.StreamEnded)
+ assert events[2].stream_id == 1
+
+ # Send our response.
+ events = c.send_headers(1, response_headers, end_stream=True)
+ assert not events
+ yield c.data_to_send()
+
+ self.run_until_complete(client(), server())
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_invalid_content_lengths.py b/testing/web-platform/tests/tools/third_party/h2/test/test_invalid_content_lengths.py
new file mode 100644
index 0000000000..fe682fcc27
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_invalid_content_lengths.py
@@ -0,0 +1,136 @@
+# -*- coding: utf-8 -*-
+"""
+test_invalid_content_lengths.py
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This module contains tests that use invalid content lengths, and validates that
+they fail appropriately.
+"""
+import pytest
+
+import h2.config
+import h2.connection
+import h2.errors
+import h2.events
+import h2.exceptions
+
+
+class TestInvalidContentLengths(object):
+ """
+ Hyper-h2 raises Protocol Errors when the content-length sent by a remote
+ peer is not valid.
+ """
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'POST'),
+ ('content-length', '15'),
+ ]
+ example_response_headers = [
+ (':status', '200'),
+ ('server', 'fake-serv/0.1.0')
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def test_too_much_data(self, frame_factory):
+ """
+ Remote peers sending data in excess of content-length causes Protocol
+ Errors.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ headers = frame_factory.build_headers_frame(
+ headers=self.example_request_headers
+ )
+ first_data = frame_factory.build_data_frame(data=b'\x01'*15)
+ c.receive_data(headers.serialize() + first_data.serialize())
+ c.clear_outbound_data_buffer()
+
+ second_data = frame_factory.build_data_frame(data=b'\x01')
+ with pytest.raises(h2.exceptions.InvalidBodyLengthError) as exp:
+ c.receive_data(second_data.serialize())
+
+ assert exp.value.expected_length == 15
+ assert exp.value.actual_length == 16
+ assert str(exp.value) == (
+ "InvalidBodyLengthError: Expected 15 bytes, received 16"
+ )
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1,
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_insufficient_data(self, frame_factory):
+ """
+ Remote peers sending less data than content-length causes Protocol
+ Errors.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ headers = frame_factory.build_headers_frame(
+ headers=self.example_request_headers
+ )
+ first_data = frame_factory.build_data_frame(data=b'\x01'*13)
+ c.receive_data(headers.serialize() + first_data.serialize())
+ c.clear_outbound_data_buffer()
+
+ second_data = frame_factory.build_data_frame(
+ data=b'\x01',
+ flags=['END_STREAM'],
+ )
+ with pytest.raises(h2.exceptions.InvalidBodyLengthError) as exp:
+ c.receive_data(second_data.serialize())
+
+ assert exp.value.expected_length == 15
+ assert exp.value.actual_length == 14
+ assert str(exp.value) == (
+ "InvalidBodyLengthError: Expected 15 bytes, received 14"
+ )
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1,
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_insufficient_data_empty_frame(self, frame_factory):
+ """
+ Remote peers sending less data than content-length where the last data
+ frame is empty causes Protocol Errors.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ headers = frame_factory.build_headers_frame(
+ headers=self.example_request_headers
+ )
+ first_data = frame_factory.build_data_frame(data=b'\x01'*14)
+ c.receive_data(headers.serialize() + first_data.serialize())
+ c.clear_outbound_data_buffer()
+
+ second_data = frame_factory.build_data_frame(
+ data=b'',
+ flags=['END_STREAM'],
+ )
+ with pytest.raises(h2.exceptions.InvalidBodyLengthError) as exp:
+ c.receive_data(second_data.serialize())
+
+ assert exp.value.expected_length == 15
+ assert exp.value.actual_length == 14
+ assert str(exp.value) == (
+ "InvalidBodyLengthError: Expected 15 bytes, received 14"
+ )
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1,
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_invalid_frame_sequences.py b/testing/web-platform/tests/tools/third_party/h2/test/test_invalid_frame_sequences.py
new file mode 100644
index 0000000000..12b70c4a6b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_invalid_frame_sequences.py
@@ -0,0 +1,488 @@
+# -*- coding: utf-8 -*-
+"""
+test_invalid_frame_sequences.py
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This module contains tests that use invalid frame sequences, and validates that
+they fail appropriately.
+"""
+import pytest
+
+import h2.config
+import h2.connection
+import h2.errors
+import h2.events
+import h2.exceptions
+
+
+class TestInvalidFrameSequences(object):
+ """
+ Invalid frame sequences, either sent or received, cause ProtocolErrors to
+ be thrown.
+ """
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ example_response_headers = [
+ (':status', '200'),
+ ('server', 'fake-serv/0.1.0')
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def test_cannot_send_on_closed_stream(self):
+ """
+ When we've closed a stream locally, we cannot send further data.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.send_data(1, b'some data')
+
+ def test_missing_preamble_errors(self):
+ """
+ Server side connections require the preamble.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ encoded_headers_frame = (
+ b'\x00\x00\r\x01\x04\x00\x00\x00\x01'
+ b'A\x88/\x91\xd3]\x05\\\x87\xa7\x84\x87\x82'
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(encoded_headers_frame)
+
+ def test_server_connections_reject_even_streams(self, frame_factory):
+ """
+ Servers do not allow clients to initiate even-numbered streams.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers, stream_id=2
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ def test_clients_reject_odd_stream_pushes(self, frame_factory):
+ """
+ Clients do not allow servers to push odd numbered streams.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(1, self.example_request_headers, end_stream=True)
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ headers=self.example_request_headers,
+ promised_stream_id=3
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ def test_can_handle_frames_with_invalid_padding(self, frame_factory):
+ """
+ Frames with invalid padding cause connection teardown.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(self.example_request_headers)
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ invalid_data_frame = (
+ b'\x00\x00\x05\x00\x0b\x00\x00\x00\x01\x06\x54\x65\x73\x74'
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(invalid_data_frame)
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1, error_code=1
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_receiving_frames_with_insufficent_size(self, frame_factory):
+ """
+ Frames with not enough data cause connection teardown.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ invalid_window_update_frame = (
+ b'\x00\x00\x03\x08\x00\x00\x00\x00\x00\x00\x00\x02'
+ )
+
+ with pytest.raises(h2.exceptions.FrameDataMissingError):
+ c.receive_data(invalid_window_update_frame)
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=0, error_code=h2.errors.ErrorCodes.FRAME_SIZE_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_reject_data_on_closed_streams(self, frame_factory):
+ """
+ When a stream is not open to the remote peer, we reject receiving data
+ frames from them.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers,
+ flags=['END_STREAM']
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ bad_frame = frame_factory.build_data_frame(
+ data=b'some data'
+ )
+ c.receive_data(bad_frame.serialize())
+
+ expected = frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ ).serialize()
+ assert c.data_to_send() == expected
+
+ def test_unexpected_continuation_on_closed_stream(self, frame_factory):
+ """
+ CONTINUATION frames received on closed streams cause connection errors
+ of type PROTOCOL_ERROR.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers,
+ flags=['END_STREAM']
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ bad_frame = frame_factory.build_continuation_frame(
+ header_block=b'hello'
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(bad_frame.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
+ last_stream_id=1
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_prevent_continuation_dos(self, frame_factory):
+ """
+ Receiving too many CONTINUATION frames in one block causes a protocol
+ error.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers,
+ )
+ f.flags = {'END_STREAM'}
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ # Send 63 additional frames.
+ for _ in range(0, 63):
+ extra_frame = frame_factory.build_continuation_frame(
+ header_block=b'hello'
+ )
+ c.receive_data(extra_frame.serialize())
+
+ # The final continuation frame should cause a protocol error.
+ extra_frame = frame_factory.build_continuation_frame(
+ header_block=b'hello'
+ )
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(extra_frame.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=0,
+ error_code=0x1,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ # These settings are a bit annoyingly anonymous, but trust me, they're bad.
+ @pytest.mark.parametrize(
+ "settings",
+ [
+ {0x2: 5},
+ {0x4: 2**31},
+ {0x5: 5},
+ {0x5: 2**24},
+ ]
+ )
+ def test_reject_invalid_settings_values(self, frame_factory, settings):
+ """
+ When a SETTINGS frame is received with invalid settings values it
+ causes connection teardown with the appropriate error code.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_settings_frame(settings=settings)
+
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError) as e:
+ c.receive_data(f.serialize())
+
+ assert e.value.error_code == (
+ h2.errors.ErrorCodes.FLOW_CONTROL_ERROR if 0x4 in settings else
+ h2.errors.ErrorCodes.PROTOCOL_ERROR
+ )
+
+ def test_invalid_frame_headers_are_protocol_errors(self, frame_factory):
+ """
+ When invalid frame headers are received they cause ProtocolErrors to be
+ raised.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers
+ )
+
+ # Do some annoying bit twiddling here: the stream ID is currently set
+ # to '1', change it to '0'. Grab the first 9 bytes (the frame header),
+ # replace any instances of the byte '\x01', and then graft it onto the
+ # remaining bytes.
+ frame_data = f.serialize()
+ frame_data = frame_data[:9].replace(b'\x01', b'\x00') + frame_data[9:]
+
+ with pytest.raises(h2.exceptions.ProtocolError) as e:
+ c.receive_data(frame_data)
+
+ assert "Stream ID must be non-zero" in str(e.value)
+
+ def test_get_stream_reset_event_on_auto_reset(self, frame_factory):
+ """
+ When hyper-h2 resets a stream automatically, a StreamReset event fires.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers,
+ flags=['END_STREAM']
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ bad_frame = frame_factory.build_data_frame(
+ data=b'some data'
+ )
+ events = c.receive_data(bad_frame.serialize())
+
+ expected = frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ ).serialize()
+ assert c.data_to_send() == expected
+
+ assert len(events) == 1
+ event = events[0]
+ assert isinstance(event, h2.events.StreamReset)
+ assert event.stream_id == 1
+ assert event.error_code == h2.errors.ErrorCodes.STREAM_CLOSED
+ assert not event.remote_reset
+
+ def test_one_one_stream_reset(self, frame_factory):
+ """
+ When hyper-h2 resets a stream automatically, a StreamReset event fires,
+ but only for the first reset: the others are silent.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ self.example_request_headers,
+ flags=['END_STREAM']
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ bad_frame = frame_factory.build_data_frame(
+ data=b'some data'
+ )
+ # Receive 5 frames.
+ events = c.receive_data(bad_frame.serialize() * 5)
+
+ expected = frame_factory.build_rst_stream_frame(
+ stream_id=1,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ ).serialize()
+ assert c.data_to_send() == expected * 5
+
+ assert len(events) == 1
+ event = events[0]
+ assert isinstance(event, h2.events.StreamReset)
+ assert event.stream_id == 1
+ assert event.error_code == h2.errors.ErrorCodes.STREAM_CLOSED
+ assert not event.remote_reset
+
+ @pytest.mark.parametrize('value', ['', 'twelve'])
+ def test_error_on_invalid_content_length(self, frame_factory, value):
+ """
+ When an invalid content-length is received, a ProtocolError is thrown.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.example_request_headers + [('content-length', value)]
+ )
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1,
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_invalid_header_data_protocol_error(self, frame_factory):
+ """
+ If an invalid header block is received, we raise a ProtocolError.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.example_request_headers
+ )
+ f.data = b'\x00\x00\x00\x00'
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=0,
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_invalid_push_promise_data_protocol_error(self, frame_factory):
+ """
+ If an invalid header block is received on a PUSH_PROMISE, we raise a
+ ProtocolError.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers
+ )
+ f.data = b'\x00\x00\x00\x00'
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=0,
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_cannot_receive_push_on_pushed_stream(self, frame_factory):
+ """
+ If a PUSH_PROMISE frame is received with the parent stream ID being a
+ pushed stream, this is rejected with a PROTOCOL_ERROR.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_request_headers,
+ end_stream=True
+ )
+
+ f1 = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers,
+ )
+ f2 = frame_factory.build_headers_frame(
+ stream_id=2,
+ headers=self.example_response_headers,
+ )
+ c.receive_data(f1.serialize() + f2.serialize())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=2,
+ promised_stream_id=4,
+ headers=self.example_request_headers,
+ )
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=2,
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_cannot_send_push_on_pushed_stream(self, frame_factory):
+ """
+ If a user tries to send a PUSH_PROMISE frame with the parent stream ID
+ being a pushed stream, this is rejected with a PROTOCOL_ERROR.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_headers_frame(
+ stream_id=1, headers=self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+
+ c.push_stream(
+ stream_id=1,
+ promised_stream_id=2,
+ request_headers=self.example_request_headers
+ )
+ c.send_headers(stream_id=2, headers=self.example_response_headers)
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.push_stream(
+ stream_id=2,
+ promised_stream_id=4,
+ request_headers=self.example_request_headers
+ )
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_invalid_headers.py b/testing/web-platform/tests/tools/third_party/h2/test/test_invalid_headers.py
new file mode 100644
index 0000000000..a379950733
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_invalid_headers.py
@@ -0,0 +1,952 @@
+# -*- coding: utf-8 -*-
+"""
+test_invalid_headers.py
+~~~~~~~~~~~~~~~~~~~~~~~
+
+This module contains tests that use invalid header blocks, and validates that
+they fail appropriately.
+"""
+import itertools
+
+import pytest
+
+import h2.config
+import h2.connection
+import h2.errors
+import h2.events
+import h2.exceptions
+import h2.settings
+import h2.utilities
+
+import hyperframe.frame
+
+from hypothesis import given
+from hypothesis.strategies import binary, lists, tuples
+
+HEADERS_STRATEGY = lists(tuples(binary(min_size=1), binary()))
+
+
+class TestInvalidFrameSequences(object):
+ """
+ Invalid header sequences cause ProtocolErrors to be thrown when received.
+ """
+ base_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ('user-agent', 'someua/0.0.1'),
+ ]
+ invalid_header_blocks = [
+ base_request_headers + [('Uppercase', 'name')],
+ base_request_headers + [(':late', 'pseudo-header')],
+ [(':path', 'duplicate-pseudo-header')] + base_request_headers,
+ base_request_headers + [('connection', 'close')],
+ base_request_headers + [('proxy-connection', 'close')],
+ base_request_headers + [('keep-alive', 'close')],
+ base_request_headers + [('transfer-encoding', 'gzip')],
+ base_request_headers + [('upgrade', 'super-protocol/1.1')],
+ base_request_headers + [('te', 'chunked')],
+ base_request_headers + [('host', 'notexample.com')],
+ base_request_headers + [(' name', 'name with leading space')],
+ base_request_headers + [('name ', 'name with trailing space')],
+ base_request_headers + [('name', ' value with leading space')],
+ base_request_headers + [('name', 'value with trailing space ')],
+ [header for header in base_request_headers
+ if header[0] != ':authority'],
+ [(':protocol', 'websocket')] + base_request_headers,
+ ]
+ server_config = h2.config.H2Configuration(
+ client_side=False, header_encoding='utf-8'
+ )
+
+ @pytest.mark.parametrize('headers', invalid_header_blocks)
+ def test_headers_event(self, frame_factory, headers):
+ """
+ Test invalid headers are rejected with PROTOCOL_ERROR.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(headers)
+ data = f.serialize()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(data)
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1, error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ @pytest.mark.parametrize('headers', invalid_header_blocks)
+ def test_push_promise_event(self, frame_factory, headers):
+ """
+ If a PUSH_PROMISE header frame is received with an invalid header block
+ it is rejected with a PROTOCOL_ERROR.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1, headers=self.base_request_headers, end_stream=True
+ )
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=headers
+ )
+ data = f.serialize()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(data)
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=0, error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ @pytest.mark.parametrize('headers', invalid_header_blocks)
+ def test_push_promise_skipping_validation(self, frame_factory, headers):
+ """
+ If we have ``validate_inbound_headers`` disabled, then invalid header
+ blocks in push promise frames are allowed to pass.
+ """
+ config = h2.config.H2Configuration(
+ client_side=True,
+ validate_inbound_headers=False,
+ header_encoding='utf-8'
+ )
+
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1, headers=self.base_request_headers, end_stream=True
+ )
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=headers
+ )
+ data = f.serialize()
+
+ events = c.receive_data(data)
+ assert len(events) == 1
+ pp_event = events[0]
+ assert pp_event.headers == headers
+
+ @pytest.mark.parametrize('headers', invalid_header_blocks)
+ def test_headers_event_skipping_validation(self, frame_factory, headers):
+ """
+ If we have ``validate_inbound_headers`` disabled, then all of these
+ invalid header blocks are allowed to pass.
+ """
+ config = h2.config.H2Configuration(
+ client_side=False,
+ validate_inbound_headers=False,
+ header_encoding='utf-8'
+ )
+
+ c = h2.connection.H2Connection(config=config)
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(headers)
+ data = f.serialize()
+
+ events = c.receive_data(data)
+ assert len(events) == 1
+ request_event = events[0]
+ assert request_event.headers == headers
+
+ def test_transfer_encoding_trailers_is_valid(self, frame_factory):
+ """
+ Transfer-Encoding trailers is allowed by the filter.
+ """
+ headers = (
+ self.base_request_headers + [('te', 'trailers')]
+ )
+
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(headers)
+ data = f.serialize()
+
+ events = c.receive_data(data)
+ assert len(events) == 1
+ request_event = events[0]
+ assert request_event.headers == headers
+
+ def test_pseudo_headers_rejected_in_trailer(self, frame_factory):
+ """
+ Ensure we reject pseudo headers included in trailers
+ """
+ trailers = [(':path', '/'), ('extra', 'value')]
+
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ header_frame = frame_factory.build_headers_frame(
+ self.base_request_headers
+ )
+ trailer_frame = frame_factory.build_headers_frame(
+ trailers, flags=["END_STREAM"]
+ )
+ head = header_frame.serialize()
+ trailer = trailer_frame.serialize()
+
+ c.receive_data(head)
+ # Raise exception if pseudo header in trailer
+ with pytest.raises(h2.exceptions.ProtocolError) as e:
+ c.receive_data(trailer)
+ assert "pseudo-header in trailer" in str(e.value)
+
+ # Test appropriate response frame is generated
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1, error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+
+class TestSendingInvalidFrameSequences(object):
+ """
+ Trying to send invalid header sequences cause ProtocolErrors to
+ be thrown.
+ """
+ base_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ('user-agent', 'someua/0.0.1'),
+ ]
+ invalid_header_blocks = [
+ base_request_headers + [(':late', 'pseudo-header')],
+ [(':path', 'duplicate-pseudo-header')] + base_request_headers,
+ base_request_headers + [('te', 'chunked')],
+ base_request_headers + [('host', 'notexample.com')],
+ [header for header in base_request_headers
+ if header[0] != ':authority'],
+ ]
+ strippable_header_blocks = [
+ base_request_headers + [('connection', 'close')],
+ base_request_headers + [('proxy-connection', 'close')],
+ base_request_headers + [('keep-alive', 'close')],
+ base_request_headers + [('transfer-encoding', 'gzip')],
+ base_request_headers + [('upgrade', 'super-protocol/1.1')]
+ ]
+ all_header_blocks = invalid_header_blocks + strippable_header_blocks
+
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ @pytest.mark.parametrize('headers', invalid_header_blocks)
+ def test_headers_event(self, frame_factory, headers):
+ """
+ Test sending invalid headers raise a ProtocolError.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Clear the data, then try to send headers.
+ c.clear_outbound_data_buffer()
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.send_headers(1, headers)
+
+ @pytest.mark.parametrize('headers', invalid_header_blocks)
+ def test_send_push_promise(self, frame_factory, headers):
+ """
+ Sending invalid headers in a push promise raises a ProtocolError.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ header_frame = frame_factory.build_headers_frame(
+ self.base_request_headers
+ )
+ c.receive_data(header_frame.serialize())
+
+ # Clear the data, then try to send a push promise.
+ c.clear_outbound_data_buffer()
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.push_stream(
+ stream_id=1, promised_stream_id=2, request_headers=headers
+ )
+
+ @pytest.mark.parametrize('headers', all_header_blocks)
+ def test_headers_event_skipping_validation(self, frame_factory, headers):
+ """
+ If we have ``validate_outbound_headers`` disabled, then all of these
+ invalid header blocks are allowed to pass.
+ """
+ config = h2.config.H2Configuration(
+ validate_outbound_headers=False
+ )
+
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+
+ # Clear the data, then send headers.
+ c.clear_outbound_data_buffer()
+ c.send_headers(1, headers)
+
+ # Ensure headers are still normalized.
+ norm_headers = h2.utilities.normalize_outbound_headers(headers, None)
+ f = frame_factory.build_headers_frame(norm_headers)
+ assert c.data_to_send() == f.serialize()
+
+ @pytest.mark.parametrize('headers', all_header_blocks)
+ def test_push_promise_skipping_validation(self, frame_factory, headers):
+ """
+ If we have ``validate_outbound_headers`` disabled, then all of these
+ invalid header blocks are allowed to pass.
+ """
+ config = h2.config.H2Configuration(
+ client_side=False,
+ validate_outbound_headers=False,
+ )
+
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ header_frame = frame_factory.build_headers_frame(
+ self.base_request_headers
+ )
+ c.receive_data(header_frame.serialize())
+
+ # Create push promise frame with normalized headers.
+ frame_factory.refresh_encoder()
+ norm_headers = h2.utilities.normalize_outbound_headers(headers, None)
+ pp_frame = frame_factory.build_push_promise_frame(
+ stream_id=1, promised_stream_id=2, headers=norm_headers
+ )
+
+ # Clear the data, then send a push promise.
+ c.clear_outbound_data_buffer()
+ c.push_stream(
+ stream_id=1, promised_stream_id=2, request_headers=headers
+ )
+ assert c.data_to_send() == pp_frame.serialize()
+
+ @pytest.mark.parametrize('headers', all_header_blocks)
+ def test_headers_event_skip_normalization(self, frame_factory, headers):
+ """
+ If we have ``normalize_outbound_headers`` disabled, then all of these
+ invalid header blocks are sent through unmodified.
+ """
+ config = h2.config.H2Configuration(
+ validate_outbound_headers=False,
+ normalize_outbound_headers=False
+ )
+
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+
+ f = frame_factory.build_headers_frame(
+ headers,
+ stream_id=1,
+ )
+
+ # Clear the data, then send headers.
+ c.clear_outbound_data_buffer()
+ c.send_headers(1, headers)
+ assert c.data_to_send() == f.serialize()
+
+ @pytest.mark.parametrize('headers', all_header_blocks)
+ def test_push_promise_skip_normalization(self, frame_factory, headers):
+ """
+ If we have ``normalize_outbound_headers`` disabled, then all of these
+ invalid header blocks are allowed to pass unmodified.
+ """
+ config = h2.config.H2Configuration(
+ client_side=False,
+ validate_outbound_headers=False,
+ normalize_outbound_headers=False,
+ )
+
+ c = h2.connection.H2Connection(config=config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ header_frame = frame_factory.build_headers_frame(
+ self.base_request_headers
+ )
+ c.receive_data(header_frame.serialize())
+
+ frame_factory.refresh_encoder()
+ pp_frame = frame_factory.build_push_promise_frame(
+ stream_id=1, promised_stream_id=2, headers=headers
+ )
+
+ # Clear the data, then send a push promise.
+ c.clear_outbound_data_buffer()
+ c.push_stream(
+ stream_id=1, promised_stream_id=2, request_headers=headers
+ )
+ assert c.data_to_send() == pp_frame.serialize()
+
+ @pytest.mark.parametrize('headers', strippable_header_blocks)
+ def test_strippable_headers(self, frame_factory, headers):
+ """
+ Test connection related headers are removed before sending.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # Clear the data, then try to send headers.
+ c.clear_outbound_data_buffer()
+ c.send_headers(1, headers)
+
+ f = frame_factory.build_headers_frame(self.base_request_headers)
+ assert c.data_to_send() == f.serialize()
+
+
+class TestFilter(object):
+ """
+ Test the filter function directly.
+
+ These tests exists to confirm the behaviour of the filter function in a
+ wide range of scenarios. Many of these scenarios may not be legal for
+ HTTP/2 and so may never hit the function, but it's worth validating that it
+ behaves as expected anyway.
+ """
+ validation_functions = [
+ h2.utilities.validate_headers,
+ h2.utilities.validate_outbound_headers
+ ]
+
+ hdr_validation_combos = [
+ h2.utilities.HeaderValidationFlags(
+ is_client, is_trailer, is_response_header, is_push_promise
+ )
+ for is_client, is_trailer, is_response_header, is_push_promise in (
+ itertools.product([True, False], repeat=4)
+ )
+ ]
+
+ hdr_validation_response_headers = [
+ flags for flags in hdr_validation_combos
+ if flags.is_response_header
+ ]
+
+ hdr_validation_request_headers_no_trailer = [
+ flags for flags in hdr_validation_combos
+ if not (flags.is_trailer or flags.is_response_header)
+ ]
+
+ invalid_request_header_blocks_bytes = (
+ # First, missing :method
+ (
+ (b':authority', b'google.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ ),
+ # Next, missing :path
+ (
+ (b':authority', b'google.com'),
+ (b':method', b'GET'),
+ (b':scheme', b'https'),
+ ),
+ # Next, missing :scheme
+ (
+ (b':authority', b'google.com'),
+ (b':method', b'GET'),
+ (b':path', b'/'),
+ ),
+ # Finally, path present but empty.
+ (
+ (b':authority', b'google.com'),
+ (b':method', b'GET'),
+ (b':scheme', b'https'),
+ (b':path', b''),
+ ),
+ )
+ invalid_request_header_blocks_unicode = (
+ # First, missing :method
+ (
+ (u':authority', u'google.com'),
+ (u':path', u'/'),
+ (u':scheme', u'https'),
+ ),
+ # Next, missing :path
+ (
+ (u':authority', u'google.com'),
+ (u':method', u'GET'),
+ (u':scheme', u'https'),
+ ),
+ # Next, missing :scheme
+ (
+ (u':authority', u'google.com'),
+ (u':method', u'GET'),
+ (u':path', u'/'),
+ ),
+ # Finally, path present but empty.
+ (
+ (u':authority', u'google.com'),
+ (u':method', u'GET'),
+ (u':scheme', u'https'),
+ (u':path', u''),
+ ),
+ )
+
+ # All headers that are forbidden from either request or response blocks.
+ forbidden_request_headers_bytes = (b':status',)
+ forbidden_request_headers_unicode = (u':status',)
+ forbidden_response_headers_bytes = (
+ b':path', b':scheme', b':authority', b':method'
+ )
+ forbidden_response_headers_unicode = (
+ u':path', u':scheme', u':authority', u':method'
+ )
+
+ @pytest.mark.parametrize('validation_function', validation_functions)
+ @pytest.mark.parametrize('hdr_validation_flags', hdr_validation_combos)
+ @given(headers=HEADERS_STRATEGY)
+ def test_range_of_acceptable_outputs(self,
+ headers,
+ validation_function,
+ hdr_validation_flags):
+ """
+ The header validation functions either return the data unchanged
+ or throw a ProtocolError.
+ """
+ try:
+ assert headers == list(validation_function(
+ headers, hdr_validation_flags))
+ except h2.exceptions.ProtocolError:
+ assert True
+
+ @pytest.mark.parametrize('hdr_validation_flags', hdr_validation_combos)
+ def test_invalid_pseudo_headers(self, hdr_validation_flags):
+ headers = [(b':custom', b'value')]
+ with pytest.raises(h2.exceptions.ProtocolError):
+ list(h2.utilities.validate_headers(headers, hdr_validation_flags))
+
+ @pytest.mark.parametrize('validation_function', validation_functions)
+ @pytest.mark.parametrize(
+ 'hdr_validation_flags', hdr_validation_request_headers_no_trailer
+ )
+ def test_matching_authority_host_headers(self,
+ validation_function,
+ hdr_validation_flags):
+ """
+ If a header block has :authority and Host headers and they match,
+ the headers should pass through unchanged.
+ """
+ headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ (b'host', b'example.com'),
+ ]
+ assert headers == list(h2.utilities.validate_headers(
+ headers, hdr_validation_flags
+ ))
+
+ @pytest.mark.parametrize(
+ 'hdr_validation_flags', hdr_validation_response_headers
+ )
+ def test_response_header_without_status(self, hdr_validation_flags):
+ headers = [(b'content-length', b'42')]
+ with pytest.raises(h2.exceptions.ProtocolError):
+ list(h2.utilities.validate_headers(headers, hdr_validation_flags))
+
+ @pytest.mark.parametrize(
+ 'hdr_validation_flags', hdr_validation_request_headers_no_trailer
+ )
+ @pytest.mark.parametrize(
+ 'header_block',
+ (
+ invalid_request_header_blocks_bytes +
+ invalid_request_header_blocks_unicode
+ )
+ )
+ def test_outbound_req_header_missing_pseudo_headers(self,
+ hdr_validation_flags,
+ header_block):
+ with pytest.raises(h2.exceptions.ProtocolError):
+ list(
+ h2.utilities.validate_outbound_headers(
+ header_block, hdr_validation_flags
+ )
+ )
+
+ @pytest.mark.parametrize(
+ 'hdr_validation_flags', hdr_validation_request_headers_no_trailer
+ )
+ @pytest.mark.parametrize(
+ 'header_block', invalid_request_header_blocks_bytes
+ )
+ def test_inbound_req_header_missing_pseudo_headers(self,
+ hdr_validation_flags,
+ header_block):
+ with pytest.raises(h2.exceptions.ProtocolError):
+ list(
+ h2.utilities.validate_headers(
+ header_block, hdr_validation_flags
+ )
+ )
+
+ @pytest.mark.parametrize(
+ 'hdr_validation_flags', hdr_validation_request_headers_no_trailer
+ )
+ @pytest.mark.parametrize(
+ 'invalid_header',
+ forbidden_request_headers_bytes + forbidden_request_headers_unicode
+ )
+ def test_outbound_req_header_extra_pseudo_headers(self,
+ hdr_validation_flags,
+ invalid_header):
+ """
+ Outbound request header blocks containing the forbidden request headers
+ fail validation.
+ """
+ headers = [
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':authority', b'google.com'),
+ (b':method', b'GET'),
+ ]
+ headers.append((invalid_header, b'some value'))
+ with pytest.raises(h2.exceptions.ProtocolError):
+ list(
+ h2.utilities.validate_outbound_headers(
+ headers, hdr_validation_flags
+ )
+ )
+
+ @pytest.mark.parametrize(
+ 'hdr_validation_flags', hdr_validation_request_headers_no_trailer
+ )
+ @pytest.mark.parametrize(
+ 'invalid_header',
+ forbidden_request_headers_bytes
+ )
+ def test_inbound_req_header_extra_pseudo_headers(self,
+ hdr_validation_flags,
+ invalid_header):
+ """
+ Inbound request header blocks containing the forbidden request headers
+ fail validation.
+ """
+ headers = [
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':authority', b'google.com'),
+ (b':method', b'GET'),
+ ]
+ headers.append((invalid_header, b'some value'))
+ with pytest.raises(h2.exceptions.ProtocolError):
+ list(h2.utilities.validate_headers(headers, hdr_validation_flags))
+
+ @pytest.mark.parametrize(
+ 'hdr_validation_flags', hdr_validation_response_headers
+ )
+ @pytest.mark.parametrize(
+ 'invalid_header',
+ forbidden_response_headers_bytes + forbidden_response_headers_unicode
+ )
+ def test_outbound_resp_header_extra_pseudo_headers(self,
+ hdr_validation_flags,
+ invalid_header):
+ """
+ Outbound response header blocks containing the forbidden response
+ headers fail validation.
+ """
+ headers = [(b':status', b'200')]
+ headers.append((invalid_header, b'some value'))
+ with pytest.raises(h2.exceptions.ProtocolError):
+ list(
+ h2.utilities.validate_outbound_headers(
+ headers, hdr_validation_flags
+ )
+ )
+
+ @pytest.mark.parametrize(
+ 'hdr_validation_flags', hdr_validation_response_headers
+ )
+ @pytest.mark.parametrize(
+ 'invalid_header',
+ forbidden_response_headers_bytes
+ )
+ def test_inbound_resp_header_extra_pseudo_headers(self,
+ hdr_validation_flags,
+ invalid_header):
+ """
+ Inbound response header blocks containing the forbidden response
+ headers fail validation.
+ """
+ headers = [(b':status', b'200')]
+ headers.append((invalid_header, b'some value'))
+ with pytest.raises(h2.exceptions.ProtocolError):
+ list(h2.utilities.validate_headers(headers, hdr_validation_flags))
+
+
+class TestOversizedHeaders(object):
+ """
+ Tests that oversized header blocks are correctly rejected. This replicates
+ the "HPACK Bomb" attack, and confirms that we're resistant against it.
+ """
+ request_header_block = [
+ (b':method', b'GET'),
+ (b':authority', b'example.com'),
+ (b':scheme', b'https'),
+ (b':path', b'/'),
+ ]
+
+ response_header_block = [
+ (b':status', b'200'),
+ ]
+
+ # The first header block contains a single header that fills the header
+ # table. To do that, we'll give it a single-character header name and a
+ # 4063 byte header value. This will make it exactly the size of the header
+ # table. It must come last, so that it evicts all other headers.
+ # This block must be appended to either a request or response block.
+ first_header_block = [
+ (b'a', b'a' * 4063),
+ ]
+
+ # The second header "block" is actually a custom HEADERS frame body that
+ # simply repeatedly refers to the first entry for 16kB. Each byte has the
+ # high bit set (0x80), and then uses the remaining 7 bits to encode the
+ # number 62 (0x3e), leading to a repeat of the byte 0xbe.
+ second_header_block = b'\xbe' * 2**14
+
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def test_hpack_bomb_request(self, frame_factory):
+ """
+ A HPACK bomb request causes the connection to be torn down with the
+ error code ENHANCE_YOUR_CALM.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ self.request_header_block + self.first_header_block
+ )
+ data = f.serialize()
+ c.receive_data(data)
+
+ # Build the attack payload.
+ attack_frame = hyperframe.frame.HeadersFrame(stream_id=3)
+ attack_frame.data = self.second_header_block
+ attack_frame.flags.add('END_HEADERS')
+ data = attack_frame.serialize()
+
+ with pytest.raises(h2.exceptions.DenialOfServiceError):
+ c.receive_data(data)
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1, error_code=h2.errors.ErrorCodes.ENHANCE_YOUR_CALM
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_hpack_bomb_response(self, frame_factory):
+ """
+ A HPACK bomb response causes the connection to be torn down with the
+ error code ENHANCE_YOUR_CALM.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1, headers=self.request_header_block
+ )
+ c.send_headers(
+ stream_id=3, headers=self.request_header_block
+ )
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ self.response_header_block + self.first_header_block
+ )
+ data = f.serialize()
+ c.receive_data(data)
+
+ # Build the attack payload.
+ attack_frame = hyperframe.frame.HeadersFrame(stream_id=3)
+ attack_frame.data = self.second_header_block
+ attack_frame.flags.add('END_HEADERS')
+ data = attack_frame.serialize()
+
+ with pytest.raises(h2.exceptions.DenialOfServiceError):
+ c.receive_data(data)
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=0, error_code=h2.errors.ErrorCodes.ENHANCE_YOUR_CALM
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_hpack_bomb_push(self, frame_factory):
+ """
+ A HPACK bomb push causes the connection to be torn down with the
+ error code ENHANCE_YOUR_CALM.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1, headers=self.request_header_block
+ )
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ self.response_header_block + self.first_header_block
+ )
+ data = f.serialize()
+ c.receive_data(data)
+
+ # Build the attack payload. We need to shrink it by four bytes because
+ # the promised_stream_id consumes four bytes of body.
+ attack_frame = hyperframe.frame.PushPromiseFrame(stream_id=3)
+ attack_frame.promised_stream_id = 2
+ attack_frame.data = self.second_header_block[:-4]
+ attack_frame.flags.add('END_HEADERS')
+ data = attack_frame.serialize()
+
+ with pytest.raises(h2.exceptions.DenialOfServiceError):
+ c.receive_data(data)
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=0, error_code=h2.errors.ErrorCodes.ENHANCE_YOUR_CALM
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_reject_headers_when_list_size_shrunk(self, frame_factory):
+ """
+ When we've shrunk the header list size, we reject new header blocks
+ that violate the new size.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ # Receive the first request, which causes no problem.
+ f = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.request_header_block
+ )
+ data = f.serialize()
+ c.receive_data(data)
+
+ # Now, send a settings change. It's un-ACKed at this time. A new
+ # request arrives, also without incident.
+ c.update_settings({h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 50})
+ c.clear_outbound_data_buffer()
+ f = frame_factory.build_headers_frame(
+ stream_id=3,
+ headers=self.request_header_block
+ )
+ data = f.serialize()
+ c.receive_data(data)
+
+ # We get a SETTINGS ACK.
+ f = frame_factory.build_settings_frame({}, ack=True)
+ data = f.serialize()
+ c.receive_data(data)
+
+ # Now a third request comes in. This explodes.
+ f = frame_factory.build_headers_frame(
+ stream_id=5,
+ headers=self.request_header_block
+ )
+ data = f.serialize()
+
+ with pytest.raises(h2.exceptions.DenialOfServiceError):
+ c.receive_data(data)
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=3, error_code=h2.errors.ErrorCodes.ENHANCE_YOUR_CALM
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_reject_headers_when_table_size_shrunk(self, frame_factory):
+ """
+ When we've shrunk the header table size, we reject header blocks that
+ do not respect the change.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ # Receive the first request, which causes no problem.
+ f = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.request_header_block
+ )
+ data = f.serialize()
+ c.receive_data(data)
+
+ # Now, send a settings change. It's un-ACKed at this time. A new
+ # request arrives, also without incident.
+ c.update_settings({h2.settings.SettingCodes.HEADER_TABLE_SIZE: 128})
+ c.clear_outbound_data_buffer()
+ f = frame_factory.build_headers_frame(
+ stream_id=3,
+ headers=self.request_header_block
+ )
+ data = f.serialize()
+ c.receive_data(data)
+
+ # We get a SETTINGS ACK.
+ f = frame_factory.build_settings_frame({}, ack=True)
+ data = f.serialize()
+ c.receive_data(data)
+
+ # Now a third request comes in. This explodes, as it does not contain
+ # a dynamic table size update.
+ f = frame_factory.build_headers_frame(
+ stream_id=5,
+ headers=self.request_header_block
+ )
+ data = f.serialize()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(data)
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=3, error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ def test_reject_headers_exceeding_table_size(self, frame_factory):
+ """
+ When the remote peer sends a dynamic table size update that exceeds our
+ setting, we reject it.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ # Receive the first request, which causes no problem.
+ f = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.request_header_block
+ )
+ data = f.serialize()
+ c.receive_data(data)
+
+ # Now a second request comes in that sets the table size too high.
+ # This explodes.
+ frame_factory.change_table_size(c.local_settings.header_table_size + 1)
+ f = frame_factory.build_headers_frame(
+ stream_id=5,
+ headers=self.request_header_block
+ )
+ data = f.serialize()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(data)
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=1, error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR
+ )
+ assert c.data_to_send() == expected_frame.serialize()
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_priority.py b/testing/web-platform/tests/tools/third_party/h2/test/test_priority.py
new file mode 100644
index 0000000000..cbc7332253
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_priority.py
@@ -0,0 +1,358 @@
+# -*- coding: utf-8 -*-
+"""
+test_priority
+~~~~~~~~~~~~~
+
+Test the priority logic of Hyper-h2.
+"""
+import pytest
+
+import h2.config
+import h2.connection
+import h2.errors
+import h2.events
+import h2.exceptions
+import h2.stream
+
+
+class TestPriority(object):
+ """
+ Basic priority tests.
+ """
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ example_response_headers = [
+ (':status', '200'),
+ ('server', 'pytest-h2'),
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def test_receiving_priority_emits_priority_update(self, frame_factory):
+ """
+ Receiving a priority frame emits a PriorityUpdated event.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_priority_frame(
+ stream_id=1,
+ weight=255,
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ assert not c.data_to_send()
+
+ event = events[0]
+ assert isinstance(event, h2.events.PriorityUpdated)
+ assert event.stream_id == 1
+ assert event.depends_on == 0
+ assert event.weight == 256
+ assert event.exclusive is False
+
+ def test_headers_with_priority_info(self, frame_factory):
+ """
+ Receiving a HEADERS frame with priority information on it emits a
+ PriorityUpdated event.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ stream_id=3,
+ flags=['PRIORITY'],
+ stream_weight=15,
+ depends_on=1,
+ exclusive=True,
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 2
+ assert not c.data_to_send()
+
+ event = events[1]
+ assert isinstance(event, h2.events.PriorityUpdated)
+ assert event.stream_id == 3
+ assert event.depends_on == 1
+ assert event.weight == 16
+ assert event.exclusive is True
+
+ def test_streams_may_not_depend_on_themselves(self, frame_factory):
+ """
+ A stream adjusted to depend on itself causes a Protocol Error.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ stream_id=3,
+ flags=['PRIORITY'],
+ stream_weight=15,
+ depends_on=1,
+ exclusive=True,
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_priority_frame(
+ stream_id=3,
+ depends_on=3,
+ weight=15
+ )
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.receive_data(f.serialize())
+
+ expected_frame = frame_factory.build_goaway_frame(
+ last_stream_id=3,
+ error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
+ )
+ assert c.data_to_send() == expected_frame.serialize()
+
+ @pytest.mark.parametrize(
+ 'depends_on,weight,exclusive',
+ [
+ (0, 256, False),
+ (3, 128, False),
+ (3, 128, True),
+ ]
+ )
+ def test_can_prioritize_stream(self, depends_on, weight, exclusive,
+ frame_factory):
+ """
+ hyper-h2 can emit priority frames.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ c.send_headers(headers=self.example_request_headers, stream_id=1)
+ c.send_headers(headers=self.example_request_headers, stream_id=3)
+ c.clear_outbound_data_buffer()
+
+ c.prioritize(
+ stream_id=1,
+ depends_on=depends_on,
+ weight=weight,
+ exclusive=exclusive
+ )
+
+ f = frame_factory.build_priority_frame(
+ stream_id=1,
+ weight=weight - 1,
+ depends_on=depends_on,
+ exclusive=exclusive,
+ )
+ assert c.data_to_send() == f.serialize()
+
+ @pytest.mark.parametrize(
+ 'depends_on,weight,exclusive',
+ [
+ (0, 256, False),
+ (1, 128, False),
+ (1, 128, True),
+ ]
+ )
+ def test_emit_headers_with_priority_info(self, depends_on, weight,
+ exclusive, frame_factory):
+ """
+ It is possible to send a headers frame with priority information on
+ it.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+
+ c.send_headers(
+ headers=self.example_request_headers,
+ stream_id=3,
+ priority_weight=weight,
+ priority_depends_on=depends_on,
+ priority_exclusive=exclusive,
+ )
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ stream_id=3,
+ flags=['PRIORITY'],
+ stream_weight=weight - 1,
+ depends_on=depends_on,
+ exclusive=exclusive,
+ )
+ assert c.data_to_send() == f.serialize()
+
+ def test_may_not_prioritize_stream_to_depend_on_self(self, frame_factory):
+ """
+ A stream adjusted to depend on itself causes a Protocol Error.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.send_headers(
+ headers=self.example_request_headers,
+ stream_id=3,
+ priority_weight=255,
+ priority_depends_on=0,
+ priority_exclusive=False,
+ )
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.prioritize(
+ stream_id=3,
+ depends_on=3,
+ )
+
+ assert not c.data_to_send()
+
+ def test_may_not_initially_set_stream_depend_on_self(self, frame_factory):
+ """
+ A stream that starts by depending on itself causes a Protocol Error.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.send_headers(
+ headers=self.example_request_headers,
+ stream_id=3,
+ priority_depends_on=3,
+ )
+
+ assert not c.data_to_send()
+
+ @pytest.mark.parametrize('weight', [0, -15, 257])
+ def test_prioritize_requires_valid_weight(self, weight):
+ """
+ A call to prioritize with an invalid weight causes a ProtocolError.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.prioritize(stream_id=1, weight=weight)
+
+ assert not c.data_to_send()
+
+ @pytest.mark.parametrize('weight', [0, -15, 257])
+ def test_send_headers_requires_valid_weight(self, weight):
+ """
+ A call to send_headers with an invalid weight causes a ProtocolError.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_request_headers,
+ priority_weight=weight
+ )
+
+ assert not c.data_to_send()
+
+ def test_prioritize_defaults(self, frame_factory):
+ """
+ When prioritize() is called with no explicit arguments, it emits a
+ weight of 16, depending on stream zero non-exclusively.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+
+ c.prioritize(stream_id=1)
+
+ f = frame_factory.build_priority_frame(
+ stream_id=1,
+ weight=15,
+ depends_on=0,
+ exclusive=False,
+ )
+ assert c.data_to_send() == f.serialize()
+
+ @pytest.mark.parametrize(
+ 'priority_kwargs',
+ [
+ {'priority_weight': 16},
+ {'priority_depends_on': 0},
+ {'priority_exclusive': False},
+ ]
+ )
+ def test_send_headers_defaults(self, priority_kwargs, frame_factory):
+ """
+ When send_headers() is called with only one explicit argument, it emits
+ default values for everything else.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_request_headers,
+ **priority_kwargs
+ )
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ stream_id=1,
+ flags=['PRIORITY'],
+ stream_weight=15,
+ depends_on=0,
+ exclusive=False,
+ )
+ assert c.data_to_send() == f.serialize()
+
+ def test_servers_cannot_prioritize(self, frame_factory):
+ """
+ Server stacks are not allowed to call ``prioritize()``.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.example_request_headers,
+ )
+ c.receive_data(f.serialize())
+
+ with pytest.raises(h2.exceptions.RFC1122Error):
+ c.prioritize(stream_id=1)
+
+ def test_servers_cannot_prioritize_with_headers(self, frame_factory):
+ """
+ Server stacks are not allowed to prioritize on headers either.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ stream_id=1,
+ headers=self.example_request_headers,
+ )
+ c.receive_data(f.serialize())
+
+ with pytest.raises(h2.exceptions.RFC1122Error):
+ c.send_headers(
+ stream_id=1,
+ headers=self.example_response_headers,
+ priority_weight=16,
+ )
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_related_events.py b/testing/web-platform/tests/tools/third_party/h2/test/test_related_events.py
new file mode 100644
index 0000000000..eb6b878905
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_related_events.py
@@ -0,0 +1,370 @@
+# -*- coding: utf-8 -*-
+"""
+test_related_events.py
+~~~~~~~~~~~~~~~~~~~~~~
+
+Specific tests to validate the "related events" logic used by certain events
+inside hyper-h2.
+"""
+import h2.config
+import h2.connection
+import h2.events
+
+
+class TestRelatedEvents(object):
+ """
+ Related events correlate all those events that happen on a single frame.
+ """
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+
+ example_response_headers = [
+ (':status', '200'),
+ ('server', 'fake-serv/0.1.0')
+ ]
+
+ informational_response_headers = [
+ (':status', '100'),
+ ('server', 'fake-serv/0.1.0')
+ ]
+
+ example_trailers = [
+ ('another', 'field'),
+ ]
+
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def test_request_received_related_all(self, frame_factory):
+ """
+ RequestReceived has two possible related events: PriorityUpdated and
+ StreamEnded, all fired when a single HEADERS frame is received.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ input_frame = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ flags=['END_STREAM', 'PRIORITY'],
+ stream_weight=15,
+ depends_on=0,
+ exclusive=False,
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 3
+ base_event = events[0]
+ other_events = events[1:]
+
+ assert base_event.stream_ended in other_events
+ assert isinstance(base_event.stream_ended, h2.events.StreamEnded)
+ assert base_event.priority_updated in other_events
+ assert isinstance(
+ base_event.priority_updated, h2.events.PriorityUpdated
+ )
+
+ def test_request_received_related_priority(self, frame_factory):
+ """
+ RequestReceived can be related to PriorityUpdated.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ input_frame = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ flags=['PRIORITY'],
+ stream_weight=15,
+ depends_on=0,
+ exclusive=False,
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 2
+ base_event = events[0]
+ priority_updated_event = events[1]
+
+ assert base_event.priority_updated is priority_updated_event
+ assert base_event.stream_ended is None
+ assert isinstance(
+ base_event.priority_updated, h2.events.PriorityUpdated
+ )
+
+ def test_request_received_related_stream_ended(self, frame_factory):
+ """
+ RequestReceived can be related to StreamEnded.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ input_frame = frame_factory.build_headers_frame(
+ headers=self.example_request_headers,
+ flags=['END_STREAM'],
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 2
+ base_event = events[0]
+ stream_ended_event = events[1]
+
+ assert base_event.stream_ended is stream_ended_event
+ assert base_event.priority_updated is None
+ assert isinstance(base_event.stream_ended, h2.events.StreamEnded)
+
+ def test_response_received_related_nothing(self, frame_factory):
+ """
+ ResponseReceived is ordinarily related to no events.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ input_frame = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 1
+ base_event = events[0]
+
+ assert base_event.stream_ended is None
+ assert base_event.priority_updated is None
+
+ def test_response_received_related_all(self, frame_factory):
+ """
+ ResponseReceived has two possible related events: PriorityUpdated and
+ StreamEnded, all fired when a single HEADERS frame is received.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ input_frame = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ flags=['END_STREAM', 'PRIORITY'],
+ stream_weight=15,
+ depends_on=0,
+ exclusive=False,
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 3
+ base_event = events[0]
+ other_events = events[1:]
+
+ assert base_event.stream_ended in other_events
+ assert isinstance(base_event.stream_ended, h2.events.StreamEnded)
+ assert base_event.priority_updated in other_events
+ assert isinstance(
+ base_event.priority_updated, h2.events.PriorityUpdated
+ )
+
+ def test_response_received_related_priority(self, frame_factory):
+ """
+ ResponseReceived can be related to PriorityUpdated.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ input_frame = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ flags=['PRIORITY'],
+ stream_weight=15,
+ depends_on=0,
+ exclusive=False,
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 2
+ base_event = events[0]
+ priority_updated_event = events[1]
+
+ assert base_event.priority_updated is priority_updated_event
+ assert base_event.stream_ended is None
+ assert isinstance(
+ base_event.priority_updated, h2.events.PriorityUpdated
+ )
+
+ def test_response_received_related_stream_ended(self, frame_factory):
+ """
+ ResponseReceived can be related to StreamEnded.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ input_frame = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ flags=['END_STREAM'],
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 2
+ base_event = events[0]
+ stream_ended_event = events[1]
+
+ assert base_event.stream_ended is stream_ended_event
+ assert base_event.priority_updated is None
+ assert isinstance(base_event.stream_ended, h2.events.StreamEnded)
+
+ def test_trailers_received_related_all(self, frame_factory):
+ """
+ TrailersReceived has two possible related events: PriorityUpdated and
+ StreamEnded, all fired when a single HEADERS frame is received.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ )
+ c.receive_data(f.serialize())
+
+ input_frame = frame_factory.build_headers_frame(
+ headers=self.example_trailers,
+ flags=['END_STREAM', 'PRIORITY'],
+ stream_weight=15,
+ depends_on=0,
+ exclusive=False,
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 3
+ base_event = events[0]
+ other_events = events[1:]
+
+ assert base_event.stream_ended in other_events
+ assert isinstance(base_event.stream_ended, h2.events.StreamEnded)
+ assert base_event.priority_updated in other_events
+ assert isinstance(
+ base_event.priority_updated, h2.events.PriorityUpdated
+ )
+
+ def test_trailers_received_related_stream_ended(self, frame_factory):
+ """
+ TrailersReceived can be related to StreamEnded by itself.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ )
+ c.receive_data(f.serialize())
+
+ input_frame = frame_factory.build_headers_frame(
+ headers=self.example_trailers,
+ flags=['END_STREAM'],
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 2
+ base_event = events[0]
+ stream_ended_event = events[1]
+
+ assert base_event.stream_ended is stream_ended_event
+ assert base_event.priority_updated is None
+ assert isinstance(base_event.stream_ended, h2.events.StreamEnded)
+
+ def test_informational_response_related_nothing(self, frame_factory):
+ """
+ InformationalResponseReceived in the standard case is related to
+ nothing.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ input_frame = frame_factory.build_headers_frame(
+ headers=self.informational_response_headers,
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 1
+ base_event = events[0]
+
+ assert base_event.priority_updated is None
+
+ def test_informational_response_received_related_all(self, frame_factory):
+ """
+ InformationalResponseReceived has one possible related event:
+ PriorityUpdated, fired when a single HEADERS frame is received.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ input_frame = frame_factory.build_headers_frame(
+ headers=self.informational_response_headers,
+ flags=['PRIORITY'],
+ stream_weight=15,
+ depends_on=0,
+ exclusive=False,
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 2
+ base_event = events[0]
+ priority_updated_event = events[1]
+
+ assert base_event.priority_updated is priority_updated_event
+ assert isinstance(
+ base_event.priority_updated, h2.events.PriorityUpdated
+ )
+
+ def test_data_received_normally_relates_to_nothing(self, frame_factory):
+ """
+ A plain DATA frame leads to DataReceieved with no related events.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ )
+ c.receive_data(f.serialize())
+
+ input_frame = frame_factory.build_data_frame(
+ data=b'some data',
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 1
+ base_event = events[0]
+
+ assert base_event.stream_ended is None
+
+ def test_data_received_related_stream_ended(self, frame_factory):
+ """
+ DataReceived can be related to StreamEnded by itself.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ )
+ c.receive_data(f.serialize())
+
+ input_frame = frame_factory.build_data_frame(
+ data=b'some data',
+ flags=['END_STREAM'],
+ )
+ events = c.receive_data(input_frame.serialize())
+
+ assert len(events) == 2
+ base_event = events[0]
+ stream_ended_event = events[1]
+
+ assert base_event.stream_ended is stream_ended_event
+ assert isinstance(base_event.stream_ended, h2.events.StreamEnded)
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_rfc7838.py b/testing/web-platform/tests/tools/third_party/h2/test/test_rfc7838.py
new file mode 100644
index 0000000000..d7704e2345
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_rfc7838.py
@@ -0,0 +1,447 @@
+# -*- coding: utf-8 -*-
+"""
+test_rfc7838
+~~~~~~~~~~~~
+
+Test the RFC 7838 ALTSVC support.
+"""
+import pytest
+
+import h2.config
+import h2.connection
+import h2.events
+import h2.exceptions
+
+
+class TestRFC7838Client(object):
+ """
+ Tests that the client supports receiving the RFC 7838 AltSvc frame.
+ """
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ example_response_headers = [
+ (u':status', u'200'),
+ (u'server', u'fake-serv/0.1.0')
+ ]
+
+ def test_receiving_altsvc_stream_zero(self, frame_factory):
+ """
+ An ALTSVC frame received on stream zero correctly transposes all the
+ fields from the frames.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=0, origin=b"example.com", field=b'h2=":8000"; ma=60'
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.AlternativeServiceAvailable)
+ assert event.origin == b"example.com"
+ assert event.field_value == b'h2=":8000"; ma=60'
+
+ # No data gets sent.
+ assert not c.data_to_send()
+
+ def test_receiving_altsvc_stream_zero_no_origin(self, frame_factory):
+ """
+ An ALTSVC frame received on stream zero without an origin field is
+ ignored.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=0, origin=b"", field=b'h2=":8000"; ma=60'
+ )
+ events = c.receive_data(f.serialize())
+
+ assert not events
+ assert not c.data_to_send()
+
+ def test_receiving_altsvc_on_stream(self, frame_factory):
+ """
+ An ALTSVC frame received on a stream correctly transposes all the
+ fields from the frame and attaches the expected origin.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=1, origin=b"", field=b'h2=":8000"; ma=60'
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.AlternativeServiceAvailable)
+ assert event.origin == b"example.com"
+ assert event.field_value == b'h2=":8000"; ma=60'
+
+ # No data gets sent.
+ assert not c.data_to_send()
+
+ def test_receiving_altsvc_on_stream_with_origin(self, frame_factory):
+ """
+ An ALTSVC frame received on a stream with an origin field present gets
+ ignored.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=1, origin=b"example.com", field=b'h2=":8000"; ma=60'
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 0
+ assert not c.data_to_send()
+
+ def test_receiving_altsvc_on_stream_not_yet_opened(self, frame_factory):
+ """
+ When an ALTSVC frame is received on a stream the client hasn't yet
+ opened, the frame is ignored.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.clear_outbound_data_buffer()
+
+ # We'll test this twice, once on a client-initiated stream ID and once
+ # on a server initiated one.
+ f1 = frame_factory.build_alt_svc_frame(
+ stream_id=1, origin=b"", field=b'h2=":8000"; ma=60'
+ )
+ f2 = frame_factory.build_alt_svc_frame(
+ stream_id=2, origin=b"", field=b'h2=":8000"; ma=60'
+ )
+ events = c.receive_data(f1.serialize() + f2.serialize())
+
+ assert len(events) == 0
+ assert not c.data_to_send()
+
+ def test_receiving_altsvc_before_sending_headers(self, frame_factory):
+ """
+ When an ALTSVC frame is received but the client hasn't sent headers yet
+ it gets ignored.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ # We need to create the idle stream. We have to do it by calling
+ # a private API. While this can't naturally happen in hyper-h2 (we
+ # don't currently have a mechanism by which this could occur), it could
+ # happen in the future and we defend against it.
+ c._begin_new_stream(
+ stream_id=1, allowed_ids=h2.connection.AllowedStreamIDs.ODD
+ )
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=1, origin=b"", field=b'h2=":8000"; ma=60'
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 0
+ assert not c.data_to_send()
+
+ def test_receiving_altsvc_after_receiving_headers(self, frame_factory):
+ """
+ When an ALTSVC frame is received but the server has already sent
+ headers it gets ignored.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_response_headers
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=1, origin=b"", field=b'h2=":8000"; ma=60'
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 0
+ assert not c.data_to_send()
+
+ def test_receiving_altsvc_on_closed_stream(self, frame_factory):
+ """
+ When an ALTSVC frame is received on a closed stream, we ignore it.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(
+ stream_id=1, headers=self.example_request_headers, end_stream=True
+ )
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ flags=['END_STREAM'],
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=1, origin=b"", field=b'h2=":8000"; ma=60'
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 0
+ assert not c.data_to_send()
+
+ def test_receiving_altsvc_on_pushed_stream(self, frame_factory):
+ """
+ When an ALTSVC frame is received on a stream that the server pushed,
+ the frame is accepted.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=2, origin=b"", field=b'h2=":8000"; ma=60'
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 1
+ event = events[0]
+
+ assert isinstance(event, h2.events.AlternativeServiceAvailable)
+ assert event.origin == b"example.com"
+ assert event.field_value == b'h2=":8000"; ma=60'
+
+ # No data gets sent.
+ assert not c.data_to_send()
+
+ def test_cannot_send_explicit_alternative_service(self, frame_factory):
+ """
+ A client cannot send an explicit alternative service.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.advertise_alternative_service(
+ field_value=b'h2=":8000"; ma=60',
+ origin=b"example.com",
+ )
+
+ def test_cannot_send_implicit_alternative_service(self, frame_factory):
+ """
+ A client cannot send an implicit alternative service.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.advertise_alternative_service(
+ field_value=b'h2=":8000"; ma=60',
+ stream_id=1,
+ )
+
+
+class TestRFC7838Server(object):
+ """
+ Tests that the server supports sending the RFC 7838 AltSvc frame.
+ """
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ example_response_headers = [
+ (u':status', u'200'),
+ (u'server', u'fake-serv/0.1.0')
+ ]
+
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def test_receiving_altsvc_as_server_stream_zero(self, frame_factory):
+ """
+ When an ALTSVC frame is received on stream zero and we are a server,
+ we ignore it.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=0, origin=b"example.com", field=b'h2=":8000"; ma=60'
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 0
+ assert not c.data_to_send()
+
+ def test_receiving_altsvc_as_server_on_stream(self, frame_factory):
+ """
+ When an ALTSVC frame is received on a stream and we are a server, we
+ ignore it.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=1, origin=b"", field=b'h2=":8000"; ma=60'
+ )
+ events = c.receive_data(f.serialize())
+
+ assert len(events) == 0
+ assert not c.data_to_send()
+
+ def test_sending_explicit_alternative_service(self, frame_factory):
+ """
+ A server can send an explicit alternative service.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ c.advertise_alternative_service(
+ field_value=b'h2=":8000"; ma=60',
+ origin=b"example.com",
+ )
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=0, origin=b"example.com", field=b'h2=":8000"; ma=60'
+ )
+ assert c.data_to_send() == f.serialize()
+
+ def test_sending_implicit_alternative_service(self, frame_factory):
+ """
+ A server can send an implicit alternative service.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ c.advertise_alternative_service(
+ field_value=b'h2=":8000"; ma=60',
+ stream_id=1,
+ )
+
+ f = frame_factory.build_alt_svc_frame(
+ stream_id=1, origin=b"", field=b'h2=":8000"; ma=60'
+ )
+ assert c.data_to_send() == f.serialize()
+
+ def test_no_implicit_alternative_service_before_headers(self,
+ frame_factory):
+ """
+ If headers haven't been received yet, the server forbids sending an
+ implicit alternative service.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.advertise_alternative_service(
+ field_value=b'h2=":8000"; ma=60',
+ stream_id=1,
+ )
+
+ def test_no_implicit_alternative_service_after_response(self,
+ frame_factory):
+ """
+ If the server has sent response headers, hyper-h2 forbids sending an
+ implicit alternative service.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+ c.send_headers(stream_id=1, headers=self.example_response_headers)
+ c.clear_outbound_data_buffer()
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ c.advertise_alternative_service(
+ field_value=b'h2=":8000"; ma=60',
+ stream_id=1,
+ )
+
+ def test_cannot_provide_origin_and_stream_id(self, frame_factory):
+ """
+ The user cannot provide both the origin and stream_id arguments when
+ advertising alternative services.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+
+ with pytest.raises(ValueError):
+ c.advertise_alternative_service(
+ field_value=b'h2=":8000"; ma=60',
+ origin=b"example.com",
+ stream_id=1,
+ )
+
+ def test_cannot_provide_unicode_altsvc_field(self, frame_factory):
+ """
+ The user cannot provide the field value for alternative services as a
+ unicode string.
+ """
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+
+ with pytest.raises(ValueError):
+ c.advertise_alternative_service(
+ field_value=u'h2=":8000"; ma=60',
+ origin=b"example.com",
+ )
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_settings.py b/testing/web-platform/tests/tools/third_party/h2/test/test_settings.py
new file mode 100644
index 0000000000..d19f93a7c2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_settings.py
@@ -0,0 +1,470 @@
+# -*- coding: utf-8 -*-
+"""
+test_settings
+~~~~~~~~~~~~~
+
+Test the Settings object.
+"""
+import pytest
+
+import h2.errors
+import h2.exceptions
+import h2.settings
+
+from hypothesis import given, assume
+from hypothesis.strategies import (
+ integers, booleans, fixed_dictionaries, builds
+)
+
+
+class TestSettings(object):
+ """
+ Test the Settings object behaves as expected.
+ """
+ def test_settings_defaults_client(self):
+ """
+ The Settings object begins with the appropriate defaults for clients.
+ """
+ s = h2.settings.Settings(client=True)
+
+ assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 4096
+ assert s[h2.settings.SettingCodes.ENABLE_PUSH] == 1
+ assert s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE] == 65535
+ assert s[h2.settings.SettingCodes.MAX_FRAME_SIZE] == 16384
+ assert s[h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL] == 0
+
+ def test_settings_defaults_server(self):
+ """
+ The Settings object begins with the appropriate defaults for servers.
+ """
+ s = h2.settings.Settings(client=False)
+
+ assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 4096
+ assert s[h2.settings.SettingCodes.ENABLE_PUSH] == 0
+ assert s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE] == 65535
+ assert s[h2.settings.SettingCodes.MAX_FRAME_SIZE] == 16384
+ assert s[h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL] == 0
+
+ @pytest.mark.parametrize('client', [True, False])
+ def test_can_set_initial_values(self, client):
+ """
+ The Settings object can be provided initial values that override the
+ defaults.
+ """
+ overrides = {
+ h2.settings.SettingCodes.HEADER_TABLE_SIZE: 8080,
+ h2.settings.SettingCodes.MAX_FRAME_SIZE: 16388,
+ h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100,
+ h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 2**16,
+ h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL: 1,
+ }
+ s = h2.settings.Settings(client=client, initial_values=overrides)
+
+ assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 8080
+ assert s[h2.settings.SettingCodes.ENABLE_PUSH] == bool(client)
+ assert s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE] == 65535
+ assert s[h2.settings.SettingCodes.MAX_FRAME_SIZE] == 16388
+ assert s[h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS] == 100
+ assert s[h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE] == 2**16
+ assert s[h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL] == 1
+
+ @pytest.mark.parametrize(
+ 'setting,value',
+ [
+ (h2.settings.SettingCodes.ENABLE_PUSH, 2),
+ (h2.settings.SettingCodes.ENABLE_PUSH, -1),
+ (h2.settings.SettingCodes.INITIAL_WINDOW_SIZE, -1),
+ (h2.settings.SettingCodes.INITIAL_WINDOW_SIZE, 2**34),
+ (h2.settings.SettingCodes.MAX_FRAME_SIZE, 1),
+ (h2.settings.SettingCodes.MAX_FRAME_SIZE, 2**30),
+ (h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE, -1),
+ (h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL, -1),
+ ]
+ )
+ def test_cannot_set_invalid_initial_values(self, setting, value):
+ """
+ The Settings object can be provided initial values that override the
+ defaults.
+ """
+ overrides = {setting: value}
+
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError):
+ h2.settings.Settings(initial_values=overrides)
+
+ def test_applying_value_doesnt_take_effect_immediately(self):
+ """
+ When a value is applied to the settings object, it doesn't immediately
+ take effect.
+ """
+ s = h2.settings.Settings(client=True)
+ s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 8000
+
+ assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 4096
+
+ def test_acknowledging_values(self):
+ """
+ When we acknowledge settings, the values change.
+ """
+ s = h2.settings.Settings(client=True)
+ old_settings = dict(s)
+
+ new_settings = {
+ h2.settings.SettingCodes.HEADER_TABLE_SIZE: 4000,
+ h2.settings.SettingCodes.ENABLE_PUSH: 0,
+ h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: 60,
+ h2.settings.SettingCodes.MAX_FRAME_SIZE: 16385,
+ h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL: 1,
+ }
+ s.update(new_settings)
+
+ assert dict(s) == old_settings
+ s.acknowledge()
+ assert dict(s) == new_settings
+
+ def test_acknowledging_returns_the_changed_settings(self):
+ """
+ Acknowledging settings returns the changes.
+ """
+ s = h2.settings.Settings(client=True)
+ s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] = 8000
+ s[h2.settings.SettingCodes.ENABLE_PUSH] = 0
+
+ changes = s.acknowledge()
+ assert len(changes) == 2
+
+ table_size_change = (
+ changes[h2.settings.SettingCodes.HEADER_TABLE_SIZE]
+ )
+ push_change = changes[h2.settings.SettingCodes.ENABLE_PUSH]
+
+ assert table_size_change.setting == (
+ h2.settings.SettingCodes.HEADER_TABLE_SIZE
+ )
+ assert table_size_change.original_value == 4096
+ assert table_size_change.new_value == 8000
+
+ assert push_change.setting == h2.settings.SettingCodes.ENABLE_PUSH
+ assert push_change.original_value == 1
+ assert push_change.new_value == 0
+
+ def test_acknowledging_only_returns_changed_settings(self):
+ """
+ Acknowledging settings does not return unchanged settings.
+ """
+ s = h2.settings.Settings(client=True)
+ s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE] = 70
+
+ changes = s.acknowledge()
+ assert len(changes) == 1
+ assert list(changes.keys()) == [
+ h2.settings.SettingCodes.INITIAL_WINDOW_SIZE
+ ]
+
+ def test_deleting_values_deletes_all_of_them(self):
+ """
+ When we delete a key we lose all state about it.
+ """
+ s = h2.settings.Settings(client=True)
+ s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 8000
+
+ del s[h2.settings.SettingCodes.HEADER_TABLE_SIZE]
+
+ with pytest.raises(KeyError):
+ s[h2.settings.SettingCodes.HEADER_TABLE_SIZE]
+
+ def test_length_correctly_reported(self):
+ """
+ Length is related only to the number of keys.
+ """
+ s = h2.settings.Settings(client=True)
+ assert len(s) == 5
+
+ s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 8000
+ assert len(s) == 5
+
+ s.acknowledge()
+ assert len(s) == 5
+
+ del s[h2.settings.SettingCodes.HEADER_TABLE_SIZE]
+ assert len(s) == 4
+
+ def test_new_values_work(self):
+ """
+ New values initially don't appear
+ """
+ s = h2.settings.Settings(client=True)
+ s[80] = 81
+
+ with pytest.raises(KeyError):
+ s[80]
+
+ def test_new_values_follow_basic_acknowledgement_rules(self):
+ """
+ A new value properly appears when acknowledged.
+ """
+ s = h2.settings.Settings(client=True)
+ s[80] = 81
+ changed_settings = s.acknowledge()
+
+ assert s[80] == 81
+ assert len(changed_settings) == 1
+
+ changed = changed_settings[80]
+ assert changed.setting == 80
+ assert changed.original_value is None
+ assert changed.new_value == 81
+
+ def test_single_values_arent_affected_by_acknowledgement(self):
+ """
+ When acknowledged, unchanged settings remain unchanged.
+ """
+ s = h2.settings.Settings(client=True)
+ assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 4096
+
+ s.acknowledge()
+ assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 4096
+
+ def test_settings_getters(self):
+ """
+ Getters exist for well-known settings.
+ """
+ s = h2.settings.Settings(client=True)
+
+ assert s.header_table_size == (
+ s[h2.settings.SettingCodes.HEADER_TABLE_SIZE]
+ )
+ assert s.enable_push == s[h2.settings.SettingCodes.ENABLE_PUSH]
+ assert s.initial_window_size == (
+ s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE]
+ )
+ assert s.max_frame_size == s[h2.settings.SettingCodes.MAX_FRAME_SIZE]
+ assert s.max_concurrent_streams == 2**32 + 1 # A sensible default.
+ assert s.max_header_list_size is None
+ assert s.enable_connect_protocol == s[
+ h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL
+ ]
+
+ def test_settings_setters(self):
+ """
+ Setters exist for well-known settings.
+ """
+ s = h2.settings.Settings(client=True)
+
+ s.header_table_size = 0
+ s.enable_push = 1
+ s.initial_window_size = 2
+ s.max_frame_size = 16385
+ s.max_concurrent_streams = 4
+ s.max_header_list_size = 2**16
+ s.enable_connect_protocol = 1
+
+ s.acknowledge()
+ assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 0
+ assert s[h2.settings.SettingCodes.ENABLE_PUSH] == 1
+ assert s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE] == 2
+ assert s[h2.settings.SettingCodes.MAX_FRAME_SIZE] == 16385
+ assert s[h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS] == 4
+ assert s[h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE] == 2**16
+ assert s[h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL] == 1
+
+ @given(integers())
+ def test_cannot_set_invalid_values_for_enable_push(self, val):
+ """
+ SETTINGS_ENABLE_PUSH only allows two values: 0, 1.
+ """
+ assume(val not in (0, 1))
+ s = h2.settings.Settings()
+
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError) as e:
+ s.enable_push = val
+
+ s.acknowledge()
+ assert e.value.error_code == h2.errors.ErrorCodes.PROTOCOL_ERROR
+ assert s.enable_push == 1
+
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError) as e:
+ s[h2.settings.SettingCodes.ENABLE_PUSH] = val
+
+ s.acknowledge()
+ assert e.value.error_code == h2.errors.ErrorCodes.PROTOCOL_ERROR
+ assert s[h2.settings.SettingCodes.ENABLE_PUSH] == 1
+
+ @given(integers())
+ def test_cannot_set_invalid_vals_for_initial_window_size(self, val):
+ """
+ SETTINGS_INITIAL_WINDOW_SIZE only allows values between 0 and 2**32 - 1
+ inclusive.
+ """
+ s = h2.settings.Settings()
+
+ if 0 <= val <= 2**31 - 1:
+ s.initial_window_size = val
+ s.acknowledge()
+ assert s.initial_window_size == val
+ else:
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError) as e:
+ s.initial_window_size = val
+
+ s.acknowledge()
+ assert (
+ e.value.error_code == h2.errors.ErrorCodes.FLOW_CONTROL_ERROR
+ )
+ assert s.initial_window_size == 65535
+
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError) as e:
+ s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE] = val
+
+ s.acknowledge()
+ assert (
+ e.value.error_code == h2.errors.ErrorCodes.FLOW_CONTROL_ERROR
+ )
+ assert s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE] == 65535
+
+ @given(integers())
+ def test_cannot_set_invalid_values_for_max_frame_size(self, val):
+ """
+ SETTINGS_MAX_FRAME_SIZE only allows values between 2**14 and 2**24 - 1.
+ """
+ s = h2.settings.Settings()
+
+ if 2**14 <= val <= 2**24 - 1:
+ s.max_frame_size = val
+ s.acknowledge()
+ assert s.max_frame_size == val
+ else:
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError) as e:
+ s.max_frame_size = val
+
+ s.acknowledge()
+ assert e.value.error_code == h2.errors.ErrorCodes.PROTOCOL_ERROR
+ assert s.max_frame_size == 16384
+
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError) as e:
+ s[h2.settings.SettingCodes.MAX_FRAME_SIZE] = val
+
+ s.acknowledge()
+ assert e.value.error_code == h2.errors.ErrorCodes.PROTOCOL_ERROR
+ assert s[h2.settings.SettingCodes.MAX_FRAME_SIZE] == 16384
+
+ @given(integers())
+ def test_cannot_set_invalid_values_for_max_header_list_size(self, val):
+ """
+ SETTINGS_MAX_HEADER_LIST_SIZE only allows non-negative values.
+ """
+ s = h2.settings.Settings()
+
+ if val >= 0:
+ s.max_header_list_size = val
+ s.acknowledge()
+ assert s.max_header_list_size == val
+ else:
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError) as e:
+ s.max_header_list_size = val
+
+ s.acknowledge()
+ assert e.value.error_code == h2.errors.ErrorCodes.PROTOCOL_ERROR
+ assert s.max_header_list_size is None
+
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError) as e:
+ s[h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE] = val
+
+ s.acknowledge()
+ assert e.value.error_code == h2.errors.ErrorCodes.PROTOCOL_ERROR
+
+ with pytest.raises(KeyError):
+ s[h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE]
+
+ @given(integers())
+ def test_cannot_set_invalid_values_for_enable_connect_protocol(self, val):
+ """
+ SETTINGS_ENABLE_CONNECT_PROTOCOL only allows two values: 0, 1.
+ """
+ assume(val not in (0, 1))
+ s = h2.settings.Settings()
+
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError) as e:
+ s.enable_connect_protocol = val
+
+ s.acknowledge()
+ assert e.value.error_code == h2.errors.ErrorCodes.PROTOCOL_ERROR
+ assert s.enable_connect_protocol == 0
+
+ with pytest.raises(h2.exceptions.InvalidSettingsValueError) as e:
+ s[h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL] = val
+
+ s.acknowledge()
+ assert e.value.error_code == h2.errors.ErrorCodes.PROTOCOL_ERROR
+ assert s[h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL] == 0
+
+
+class TestSettingsEquality(object):
+ """
+ A class defining tests for the standard implementation of == and != .
+ """
+
+ SettingsStrategy = builds(
+ h2.settings.Settings,
+ client=booleans(),
+ initial_values=fixed_dictionaries({
+ h2.settings.SettingCodes.HEADER_TABLE_SIZE:
+ integers(0, 2**32 - 1),
+ h2.settings.SettingCodes.ENABLE_PUSH: integers(0, 1),
+ h2.settings.SettingCodes.INITIAL_WINDOW_SIZE:
+ integers(0, 2**31 - 1),
+ h2.settings.SettingCodes.MAX_FRAME_SIZE:
+ integers(2**14, 2**24 - 1),
+ h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS:
+ integers(0, 2**32 - 1),
+ h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE:
+ integers(0, 2**32 - 1),
+ })
+ )
+
+ @given(settings=SettingsStrategy)
+ def test_equality_reflexive(self, settings):
+ """
+ An object compares equal to itself using the == operator and the !=
+ operator.
+ """
+ assert (settings == settings)
+ assert not (settings != settings)
+
+ @given(settings=SettingsStrategy, o_settings=SettingsStrategy)
+ def test_equality_multiple(self, settings, o_settings):
+ """
+ Two objects compare themselves using the == operator and the !=
+ operator.
+ """
+ if settings == o_settings:
+ assert settings == o_settings
+ assert not (settings != o_settings)
+ else:
+ assert settings != o_settings
+ assert not (settings == o_settings)
+
+ @given(settings=SettingsStrategy)
+ def test_another_type_equality(self, settings):
+ """
+ The object does not compare equal to an object of an unrelated type
+ (which does not implement the comparison) using the == operator.
+ """
+ obj = object()
+ assert (settings != obj)
+ assert not (settings == obj)
+
+ @given(settings=SettingsStrategy)
+ def test_delegated_eq(self, settings):
+ """
+ The result of comparison is delegated to the right-hand operand if
+ it is of an unrelated type.
+ """
+ class Delegate(object):
+ def __eq__(self, other):
+ return [self]
+
+ def __ne__(self, other):
+ return [self]
+
+ delg = Delegate()
+ assert (settings == delg) == [delg]
+ assert (settings != delg) == [delg]
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_state_machines.py b/testing/web-platform/tests/tools/third_party/h2/test/test_state_machines.py
new file mode 100644
index 0000000000..034ae909d2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_state_machines.py
@@ -0,0 +1,163 @@
+# -*- coding: utf-8 -*-
+"""
+test_state_machines
+~~~~~~~~~~~~~~~~~~~
+
+These tests validate the state machines directly. Writing meaningful tests for
+this case can be tricky, so the majority of these tests use Hypothesis to try
+to talk about general behaviours rather than specific cases.
+"""
+import pytest
+
+import h2.connection
+import h2.exceptions
+import h2.stream
+
+from hypothesis import given
+from hypothesis.strategies import sampled_from
+
+
+class TestConnectionStateMachine(object):
+ """
+ Tests of the connection state machine.
+ """
+ @given(state=sampled_from(h2.connection.ConnectionState),
+ input_=sampled_from(h2.connection.ConnectionInputs))
+ def test_state_transitions(self, state, input_):
+ c = h2.connection.H2ConnectionStateMachine()
+ c.state = state
+
+ try:
+ c.process_input(input_)
+ except h2.exceptions.ProtocolError:
+ assert c.state == h2.connection.ConnectionState.CLOSED
+ else:
+ assert c.state in h2.connection.ConnectionState
+
+ def test_state_machine_only_allows_connection_states(self):
+ """
+ The Connection state machine only allows ConnectionState inputs.
+ """
+ c = h2.connection.H2ConnectionStateMachine()
+
+ with pytest.raises(ValueError):
+ c.process_input(1)
+
+ @pytest.mark.parametrize(
+ "state",
+ (
+ s for s in h2.connection.ConnectionState
+ if s != h2.connection.ConnectionState.CLOSED
+ ),
+ )
+ @pytest.mark.parametrize(
+ "input_",
+ [
+ h2.connection.ConnectionInputs.RECV_PRIORITY,
+ h2.connection.ConnectionInputs.SEND_PRIORITY
+ ]
+ )
+ def test_priority_frames_allowed_in_all_states(self, state, input_):
+ """
+ Priority frames can be sent/received in all connection states except
+ closed.
+ """
+ c = h2.connection.H2ConnectionStateMachine()
+ c.state = state
+
+ c.process_input(input_)
+
+
+class TestStreamStateMachine(object):
+ """
+ Tests of the stream state machine.
+ """
+ @given(state=sampled_from(h2.stream.StreamState),
+ input_=sampled_from(h2.stream.StreamInputs))
+ def test_state_transitions(self, state, input_):
+ s = h2.stream.H2StreamStateMachine(stream_id=1)
+ s.state = state
+
+ try:
+ s.process_input(input_)
+ except h2.exceptions.StreamClosedError:
+ # This can only happen for streams that started in the closed
+ # state OR where the input was RECV_DATA and the state was not
+ # OPEN or HALF_CLOSED_LOCAL OR where the state was
+ # HALF_CLOSED_REMOTE and a frame was received.
+ if state == h2.stream.StreamState.CLOSED:
+ assert s.state == h2.stream.StreamState.CLOSED
+ elif input_ == h2.stream.StreamInputs.RECV_DATA:
+ assert s.state == h2.stream.StreamState.CLOSED
+ assert state not in (
+ h2.stream.StreamState.OPEN,
+ h2.stream.StreamState.HALF_CLOSED_LOCAL,
+ )
+ elif state == h2.stream.StreamState.HALF_CLOSED_REMOTE:
+ assert input_ in (
+ h2.stream.StreamInputs.RECV_HEADERS,
+ h2.stream.StreamInputs.RECV_PUSH_PROMISE,
+ h2.stream.StreamInputs.RECV_DATA,
+ h2.stream.StreamInputs.RECV_CONTINUATION,
+ )
+ except h2.exceptions.ProtocolError:
+ assert s.state == h2.stream.StreamState.CLOSED
+ else:
+ assert s.state in h2.stream.StreamState
+
+ def test_state_machine_only_allows_stream_states(self):
+ """
+ The Stream state machine only allows StreamState inputs.
+ """
+ s = h2.stream.H2StreamStateMachine(stream_id=1)
+
+ with pytest.raises(ValueError):
+ s.process_input(1)
+
+ def test_stream_state_machine_forbids_pushes_on_server_streams(self):
+ """
+ Streams where this peer is a server do not allow receiving pushed
+ frames.
+ """
+ s = h2.stream.H2StreamStateMachine(stream_id=1)
+ s.process_input(h2.stream.StreamInputs.RECV_HEADERS)
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ s.process_input(h2.stream.StreamInputs.RECV_PUSH_PROMISE)
+
+ def test_stream_state_machine_forbids_sending_pushes_from_clients(self):
+ """
+ Streams where this peer is a client do not allow sending pushed frames.
+ """
+ s = h2.stream.H2StreamStateMachine(stream_id=1)
+ s.process_input(h2.stream.StreamInputs.SEND_HEADERS)
+
+ with pytest.raises(h2.exceptions.ProtocolError):
+ s.process_input(h2.stream.StreamInputs.SEND_PUSH_PROMISE)
+
+ @pytest.mark.parametrize(
+ "input_",
+ [
+ h2.stream.StreamInputs.SEND_HEADERS,
+ h2.stream.StreamInputs.SEND_PUSH_PROMISE,
+ h2.stream.StreamInputs.SEND_RST_STREAM,
+ h2.stream.StreamInputs.SEND_DATA,
+ h2.stream.StreamInputs.SEND_WINDOW_UPDATE,
+ h2.stream.StreamInputs.SEND_END_STREAM,
+ ]
+ )
+ def test_cannot_send_on_closed_streams(self, input_):
+ """
+ Sending anything but a PRIORITY frame is forbidden on closed streams.
+ """
+ c = h2.stream.H2StreamStateMachine(stream_id=1)
+ c.state = h2.stream.StreamState.CLOSED
+
+ expected_error = (
+ h2.exceptions.ProtocolError
+ if input_ == h2.stream.StreamInputs.SEND_PUSH_PROMISE
+ else h2.exceptions.StreamClosedError
+ )
+
+ with pytest.raises(expected_error):
+ c.process_input(input_)
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_stream_reset.py b/testing/web-platform/tests/tools/third_party/h2/test/test_stream_reset.py
new file mode 100644
index 0000000000..778445515f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_stream_reset.py
@@ -0,0 +1,137 @@
+# -*- coding: utf-8 -*-
+"""
+test_stream_reset
+~~~~~~~~~~~~~~~~~
+
+More complex tests that exercise stream resetting functionality to validate
+that connection state is appropriately maintained.
+
+Specifically, these tests validate that streams that have been reset accurately
+keep track of connection-level state.
+"""
+import pytest
+
+import h2.connection
+import h2.errors
+import h2.events
+
+
+class TestStreamReset(object):
+ """
+ Tests for resetting streams.
+ """
+ example_request_headers = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ ]
+ example_response_headers = [
+ (b':status', b'200'),
+ (b'server', b'fake-serv/0.1.0'),
+ (b'content-length', b'0')
+ ]
+
+ def test_reset_stream_keeps_header_state_correct(self, frame_factory):
+ """
+ A stream that has been reset still affects the header decoder.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+ c.reset_stream(stream_id=1)
+ c.send_headers(stream_id=3, headers=self.example_request_headers)
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_response_headers, stream_id=1
+ )
+ rst_frame = frame_factory.build_rst_stream_frame(
+ 1, h2.errors.ErrorCodes.STREAM_CLOSED
+ )
+ events = c.receive_data(f.serialize())
+ assert not events
+ assert c.data_to_send() == rst_frame.serialize()
+
+ # This works because the header state should be intact from the headers
+ # frame that was send on stream 1, so they should decode cleanly.
+ f = frame_factory.build_headers_frame(
+ headers=self.example_response_headers, stream_id=3
+ )
+ event = c.receive_data(f.serialize())[0]
+
+ assert isinstance(event, h2.events.ResponseReceived)
+ assert event.stream_id == 3
+ assert event.headers == self.example_response_headers
+
+ @pytest.mark.parametrize('close_id,other_id', [(1, 3), (3, 1)])
+ def test_reset_stream_keeps_flow_control_correct(self,
+ close_id,
+ other_id,
+ frame_factory):
+ """
+ A stream that has been reset does not affect the connection flow
+ control window.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+ c.send_headers(stream_id=3, headers=self.example_request_headers)
+
+ # Record the initial window size.
+ initial_window = c.remote_flow_control_window(stream_id=other_id)
+
+ f = frame_factory.build_headers_frame(
+ headers=self.example_response_headers, stream_id=close_id
+ )
+ c.receive_data(f.serialize())
+ c.reset_stream(stream_id=close_id)
+ c.clear_outbound_data_buffer()
+
+ f = frame_factory.build_data_frame(
+ data=b'some data',
+ stream_id=close_id
+ )
+ c.receive_data(f.serialize())
+
+ expected = frame_factory.build_rst_stream_frame(
+ stream_id=close_id,
+ error_code=h2.errors.ErrorCodes.STREAM_CLOSED,
+ ).serialize()
+ assert c.data_to_send() == expected
+
+ new_window = c.remote_flow_control_window(stream_id=other_id)
+ assert initial_window - len(b'some data') == new_window
+
+ @pytest.mark.parametrize('clear_streams', [True, False])
+ def test_reset_stream_automatically_resets_pushed_streams(self,
+ frame_factory,
+ clear_streams):
+ """
+ Resetting a stream causes RST_STREAM frames to be automatically emitted
+ to close any streams pushed after the reset.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ c.send_headers(stream_id=1, headers=self.example_request_headers)
+ c.reset_stream(stream_id=1)
+ c.clear_outbound_data_buffer()
+
+ if clear_streams:
+ # Call open_outbound_streams to force the connection to clean
+ # closed streams.
+ c.open_outbound_streams
+
+ f = frame_factory.build_push_promise_frame(
+ stream_id=1,
+ promised_stream_id=2,
+ headers=self.example_request_headers,
+ )
+ events = c.receive_data(f.serialize())
+ assert not events
+
+ f = frame_factory.build_rst_stream_frame(
+ stream_id=2,
+ error_code=h2.errors.ErrorCodes.REFUSED_STREAM,
+ )
+ assert c.data_to_send() == f.serialize()
diff --git a/testing/web-platform/tests/tools/third_party/h2/test/test_utility_functions.py b/testing/web-platform/tests/tools/third_party/h2/test/test_utility_functions.py
new file mode 100644
index 0000000000..4cb0b2ae60
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test/test_utility_functions.py
@@ -0,0 +1,226 @@
+# -*- coding: utf-8 -*-
+"""
+test_utility_functions
+~~~~~~~~~~~~~~~~~~~~~~
+
+Tests for the various utility functions provided by hyper-h2.
+"""
+import pytest
+
+import h2.config
+import h2.connection
+import h2.errors
+import h2.events
+import h2.exceptions
+from h2.utilities import SizeLimitDict, extract_method_header
+
+# These tests require a non-list-returning range function.
+try:
+ range = xrange
+except NameError:
+ range = range
+
+
+class TestGetNextAvailableStreamID(object):
+ """
+ Tests for the ``H2Connection.get_next_available_stream_id`` method.
+ """
+ example_request_headers = [
+ (':authority', 'example.com'),
+ (':path', '/'),
+ (':scheme', 'https'),
+ (':method', 'GET'),
+ ]
+ example_response_headers = [
+ (':status', '200'),
+ ('server', 'fake-serv/0.1.0')
+ ]
+ server_config = h2.config.H2Configuration(client_side=False)
+
+ def test_returns_correct_sequence_for_clients(self, frame_factory):
+ """
+ For a client connection, the correct sequence of stream IDs is
+ returned.
+ """
+ # Running the exhaustive version of this test (all 1 billion available
+ # stream IDs) is too painful. For that reason, we validate that the
+ # original sequence is right for the first few thousand, and then just
+ # check that it terminates properly.
+ #
+ # Make sure that the streams get cleaned up: 8k streams floating
+ # around would make this test memory-hard, and it's not supposed to be
+ # a test of how much RAM your machine has.
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+ initial_sequence = range(1, 2**13, 2)
+
+ for expected_stream_id in initial_sequence:
+ stream_id = c.get_next_available_stream_id()
+ assert stream_id == expected_stream_id
+
+ c.send_headers(
+ stream_id=stream_id,
+ headers=self.example_request_headers,
+ end_stream=True
+ )
+ f = frame_factory.build_headers_frame(
+ headers=self.example_response_headers,
+ stream_id=stream_id,
+ flags=['END_STREAM'],
+ )
+ c.receive_data(f.serialize())
+ c.clear_outbound_data_buffer()
+
+ # Jump up to the last available stream ID. Don't clean up the stream
+ # here because who cares about one stream.
+ last_client_id = 2**31 - 1
+ c.send_headers(
+ stream_id=last_client_id,
+ headers=self.example_request_headers,
+ end_stream=True
+ )
+
+ with pytest.raises(h2.exceptions.NoAvailableStreamIDError):
+ c.get_next_available_stream_id()
+
+ def test_returns_correct_sequence_for_servers(self, frame_factory):
+ """
+ For a server connection, the correct sequence of stream IDs is
+ returned.
+ """
+ # Running the exhaustive version of this test (all 1 billion available
+ # stream IDs) is too painful. For that reason, we validate that the
+ # original sequence is right for the first few thousand, and then just
+ # check that it terminates properly.
+ #
+ # Make sure that the streams get cleaned up: 8k streams floating
+ # around would make this test memory-hard, and it's not supposed to be
+ # a test of how much RAM your machine has.
+ c = h2.connection.H2Connection(config=self.server_config)
+ c.initiate_connection()
+ c.receive_data(frame_factory.preamble())
+ f = frame_factory.build_headers_frame(
+ headers=self.example_request_headers
+ )
+ c.receive_data(f.serialize())
+
+ initial_sequence = range(2, 2**13, 2)
+
+ for expected_stream_id in initial_sequence:
+ stream_id = c.get_next_available_stream_id()
+ assert stream_id == expected_stream_id
+
+ c.push_stream(
+ stream_id=1,
+ promised_stream_id=stream_id,
+ request_headers=self.example_request_headers
+ )
+ c.send_headers(
+ stream_id=stream_id,
+ headers=self.example_response_headers,
+ end_stream=True
+ )
+ c.clear_outbound_data_buffer()
+
+ # Jump up to the last available stream ID. Don't clean up the stream
+ # here because who cares about one stream.
+ last_server_id = 2**31 - 2
+ c.push_stream(
+ stream_id=1,
+ promised_stream_id=last_server_id,
+ request_headers=self.example_request_headers,
+ )
+
+ with pytest.raises(h2.exceptions.NoAvailableStreamIDError):
+ c.get_next_available_stream_id()
+
+ def test_does_not_increment_without_stream_send(self):
+ """
+ If a new stream isn't actually created, the next stream ID doesn't
+ change.
+ """
+ c = h2.connection.H2Connection()
+ c.initiate_connection()
+
+ first_stream_id = c.get_next_available_stream_id()
+ second_stream_id = c.get_next_available_stream_id()
+
+ assert first_stream_id == second_stream_id
+
+ c.send_headers(
+ stream_id=first_stream_id,
+ headers=self.example_request_headers
+ )
+
+ third_stream_id = c.get_next_available_stream_id()
+ assert third_stream_id == (first_stream_id + 2)
+
+
+class TestExtractHeader(object):
+
+ example_request_headers = [
+ (u':authority', u'example.com'),
+ (u':path', u'/'),
+ (u':scheme', u'https'),
+ (u':method', u'GET'),
+ ]
+ example_headers_with_bytes = [
+ (b':authority', b'example.com'),
+ (b':path', b'/'),
+ (b':scheme', b'https'),
+ (b':method', b'GET'),
+ ]
+
+ @pytest.mark.parametrize(
+ 'headers', [example_request_headers, example_headers_with_bytes]
+ )
+ def test_extract_header_method(self, headers):
+ assert extract_method_header(headers) == b'GET'
+
+
+def test_size_limit_dict_limit():
+ dct = SizeLimitDict(size_limit=2)
+
+ dct[1] = 1
+ dct[2] = 2
+
+ assert len(dct) == 2
+ assert dct[1] == 1
+ assert dct[2] == 2
+
+ dct[3] = 3
+
+ assert len(dct) == 2
+ assert dct[2] == 2
+ assert dct[3] == 3
+ assert 1 not in dct
+
+
+def test_size_limit_dict_limit_init():
+ initial_dct = {
+ 1: 1,
+ 2: 2,
+ 3: 3,
+ }
+
+ dct = SizeLimitDict(initial_dct, size_limit=2)
+
+ assert len(dct) == 2
+
+
+def test_size_limit_dict_no_limit():
+ dct = SizeLimitDict(size_limit=None)
+
+ dct[1] = 1
+ dct[2] = 2
+
+ assert len(dct) == 2
+ assert dct[1] == 1
+ assert dct[2] == 2
+
+ dct[3] = 3
+
+ assert len(dct) == 3
+ assert dct[1] == 1
+ assert dct[2] == 2
+ assert dct[3] == 3
diff --git a/testing/web-platform/tests/tools/third_party/h2/test_requirements.txt b/testing/web-platform/tests/tools/third_party/h2/test_requirements.txt
new file mode 100644
index 0000000000..e0eefa28ac
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/test_requirements.txt
@@ -0,0 +1,5 @@
+pytest==4.6.5 # rq.filter: < 5
+pytest-cov==2.8.1
+pytest-xdist==1.31.0
+coverage==4.5.4
+hypothesis
diff --git a/testing/web-platform/tests/tools/third_party/h2/tox.ini b/testing/web-platform/tests/tools/third_party/h2/tox.ini
new file mode 100644
index 0000000000..971f9a07e9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/tox.ini
@@ -0,0 +1,48 @@
+[tox]
+envlist = py27, py34, py35, py36, py37, py38, pypy, lint, packaging, docs
+
+[testenv]
+deps= -r{toxinidir}/test_requirements.txt
+commands=
+ coverage run -m py.test {posargs}
+ coverage report
+
+[testenv:pypy]
+# temporarily disable coverage testing on PyPy due to performance problems
+commands= py.test {posargs}
+
+[testenv:lint]
+basepython=python3.7
+deps = flake8==3.7.8
+commands = flake8 --max-complexity 10 h2 test
+
+[testenv:docs]
+basepython=python3.7
+deps = sphinx==2.2.0
+changedir = {toxinidir}/docs
+whitelist_externals = rm
+commands =
+ rm -rf build
+ sphinx-build -nW -b html -d build/doctrees source build/html
+
+[testenv:graphs]
+basepython=python2.7
+deps = graphviz==0.13
+commands =
+ python visualizer/visualize.py -i docs/source/_static
+
+[testenv:packaging]
+basepython=python3.7
+deps =
+ check-manifest==0.39
+ readme-renderer==24.0
+commands =
+ check-manifest
+ python setup.py check --metadata --restructuredtext --strict
+
+[testenv:h2spec]
+basepython=python3.6
+deps = twisted[tls]==19.7.0
+whitelist_externals = {toxinidir}/test/h2spectest.sh
+commands =
+ {toxinidir}/test/h2spectest.sh
diff --git a/testing/web-platform/tests/tools/third_party/h2/utils/backport.sh b/testing/web-platform/tests/tools/third_party/h2/utils/backport.sh
new file mode 100755
index 0000000000..273c1479b2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/utils/backport.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+
+# This script is invoked as follows: the first argument is the target branch
+# for the backport. All following arguments are considered the "commit spec",
+# and will be passed to cherry-pick.
+
+TARGET_BRANCH="$1"
+PR_BRANCH="backport-${TARGET_BRANCH}"
+COMMIT_SPEC="${@:2}"
+
+if ! git checkout "$TARGET_BRANCH"; then
+ echo "Failed to checkout $TARGET_BRANCH"
+ exit 1
+fi
+
+if ! git pull --ff-only; then
+ echo "Unable to update $TARGET_BRANCH"
+ exit 2
+fi
+
+if ! git checkout -b "$PR_BRANCH"; then
+ echo "Failed to open new branch $PR_BRANCH"
+ exit 3
+fi
+
+if ! git cherry-pick -x $COMMIT_SPEC; then
+ echo "Cherry-pick failed. Please fix up manually."
+else
+ echo "Clean backport. Add changelog and open PR."
+fi
+
diff --git a/testing/web-platform/tests/tools/third_party/h2/visualizer/NOTICES.visualizer b/testing/web-platform/tests/tools/third_party/h2/visualizer/NOTICES.visualizer
new file mode 100644
index 0000000000..202ca64e17
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/visualizer/NOTICES.visualizer
@@ -0,0 +1,24 @@
+This module contains code inspired by and borrowed from Automat. That code was
+made available under the following license:
+
+Copyright (c) 2014
+Rackspace
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/h2/visualizer/visualize.py b/testing/web-platform/tests/tools/third_party/h2/visualizer/visualize.py
new file mode 100644
index 0000000000..1fd3f179cb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/h2/visualizer/visualize.py
@@ -0,0 +1,252 @@
+# -*- coding: utf-8 -*-
+"""
+State Machine Visualizer
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+This code provides a module that can use graphviz to visualise the state
+machines included in hyper-h2. These visualisations can be used as part of the
+documentation of hyper-h2, and as a reference material to understand how the
+state machines function.
+
+The code in this module is heavily inspired by code in Automat, which can be
+found here: https://github.com/glyph/automat. For details on the licensing of
+Automat, please see the NOTICES.visualizer file in this folder.
+
+This module is very deliberately not shipped with the rest of hyper-h2. This is
+because it is of minimal value to users who are installing hyper-h2: its use
+is only really for the developers of hyper-h2.
+"""
+from __future__ import print_function
+import argparse
+import collections
+import sys
+
+import graphviz
+import graphviz.files
+
+import h2.connection
+import h2.stream
+
+
+StateMachine = collections.namedtuple(
+ 'StateMachine', ['fqdn', 'machine', 'states', 'inputs', 'transitions']
+)
+
+
+# This is all the state machines we currently know about and will render.
+# If any new state machines are added, they should be inserted here.
+STATE_MACHINES = [
+ StateMachine(
+ fqdn='h2.connection.H2ConnectionStateMachine',
+ machine=h2.connection.H2ConnectionStateMachine,
+ states=h2.connection.ConnectionState,
+ inputs=h2.connection.ConnectionInputs,
+ transitions=h2.connection.H2ConnectionStateMachine._transitions,
+ ),
+ StateMachine(
+ fqdn='h2.stream.H2StreamStateMachine',
+ machine=h2.stream.H2StreamStateMachine,
+ states=h2.stream.StreamState,
+ inputs=h2.stream.StreamInputs,
+ transitions=h2.stream._transitions,
+ ),
+]
+
+
+def quote(s):
+ return '"{}"'.format(s.replace('"', r'\"'))
+
+
+def html(s):
+ return '<{}>'.format(s)
+
+
+def element(name, *children, **attrs):
+ """
+ Construct a string from the HTML element description.
+ """
+ formatted_attributes = ' '.join(
+ '{}={}'.format(key, quote(str(value)))
+ for key, value in sorted(attrs.items())
+ )
+ formatted_children = ''.join(children)
+ return u'<{name} {attrs}>{children}</{name}>'.format(
+ name=name,
+ attrs=formatted_attributes,
+ children=formatted_children
+ )
+
+
+def row_for_output(event, side_effect):
+ """
+ Given an output tuple (an event and its side effect), generates a table row
+ from it.
+ """
+ point_size = {'point-size': '9'}
+ event_cell = element(
+ "td",
+ element("font", enum_member_name(event), **point_size)
+ )
+ side_effect_name = (
+ function_name(side_effect) if side_effect is not None else "None"
+ )
+ side_effect_cell = element(
+ "td",
+ element("font", side_effect_name, **point_size)
+ )
+ return element("tr", event_cell, side_effect_cell)
+
+
+def table_maker(initial_state, final_state, outputs, port):
+ """
+ Construct an HTML table to label a state transition.
+ """
+ header = "{} -&gt; {}".format(
+ enum_member_name(initial_state), enum_member_name(final_state)
+ )
+ header_row = element(
+ "tr",
+ element(
+ "td",
+ element(
+ "font",
+ header,
+ face="menlo-italic"
+ ),
+ port=port,
+ colspan="2",
+ )
+ )
+ rows = [header_row]
+ rows.extend(row_for_output(*output) for output in outputs)
+ return element("table", *rows)
+
+
+def enum_member_name(state):
+ """
+ All enum member names have the form <EnumClassName>.<EnumMemberName>. For
+ our rendering we only want the member name, so we take their representation
+ and split it.
+ """
+ return str(state).split('.', 1)[1]
+
+
+def function_name(func):
+ """
+ Given a side-effect function, return its string name.
+ """
+ return func.__name__
+
+
+def build_digraph(state_machine):
+ """
+ Produce a L{graphviz.Digraph} object from a state machine.
+ """
+ digraph = graphviz.Digraph(node_attr={'fontname': 'Menlo'},
+ edge_attr={'fontname': 'Menlo'},
+ graph_attr={'dpi': '200'})
+
+ # First, add the states as nodes.
+ seen_first_state = False
+ for state in state_machine.states:
+ if not seen_first_state:
+ state_shape = "bold"
+ font_name = "Menlo-Bold"
+ else:
+ state_shape = ""
+ font_name = "Menlo"
+ digraph.node(enum_member_name(state),
+ fontame=font_name,
+ shape="ellipse",
+ style=state_shape,
+ color="blue")
+ seen_first_state = True
+
+ # We frequently have vary many inputs that all trigger the same state
+ # transition, and only differ in terms of their input and side-effect. It
+ # would be polite to say that graphviz does not handle this very well. So
+ # instead we *collapse* the state transitions all into the one edge, and
+ # then provide a label that displays a table of all the inputs and their
+ # associated side effects.
+ transitions = collections.defaultdict(list)
+ for transition in state_machine.transitions.items():
+ initial_state, event = transition[0]
+ side_effect, final_state = transition[1]
+ transition_key = (initial_state, final_state)
+ transitions[transition_key].append((event, side_effect))
+
+ for n, (transition_key, outputs) in enumerate(transitions.items()):
+ this_transition = "t{}".format(n)
+ initial_state, final_state = transition_key
+
+ port = "tableport"
+ table = table_maker(
+ initial_state=initial_state,
+ final_state=final_state,
+ outputs=outputs,
+ port=port
+ )
+
+ digraph.node(this_transition,
+ label=html(table), margin="0.2", shape="none")
+
+ digraph.edge(enum_member_name(initial_state),
+ '{}:{}:w'.format(this_transition, port),
+ arrowhead="none")
+ digraph.edge('{}:{}:e'.format(this_transition, port),
+ enum_member_name(final_state))
+
+ return digraph
+
+
+def main():
+ """
+ Renders all the state machines in hyper-h2 into images.
+ """
+ program_name = sys.argv[0]
+ argv = sys.argv[1:]
+
+ description = """
+ Visualize hyper-h2 state machines as graphs.
+ """
+ epilog = """
+ You must have the graphviz tool suite installed. Please visit
+ http://www.graphviz.org for more information.
+ """
+
+ argument_parser = argparse.ArgumentParser(
+ prog=program_name,
+ description=description,
+ epilog=epilog
+ )
+ argument_parser.add_argument(
+ '--image-directory',
+ '-i',
+ help="Where to write out image files.",
+ default=".h2_visualize"
+ )
+ argument_parser.add_argument(
+ '--view',
+ '-v',
+ help="View rendered graphs with default image viewer",
+ default=False,
+ action="store_true"
+ )
+ args = argument_parser.parse_args(argv)
+
+ for state_machine in STATE_MACHINES:
+ print(state_machine.fqdn, '...discovered')
+
+ digraph = build_digraph(state_machine)
+
+ if args.image_directory:
+ digraph.format = "png"
+ digraph.render(filename="{}.dot".format(state_machine.fqdn),
+ directory=args.image_directory,
+ view=args.view,
+ cleanup=True)
+ print(state_machine.fqdn, "...wrote image into", args.image_directory)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/web-platform/tests/tools/third_party/hpack/CONTRIBUTORS.rst b/testing/web-platform/tests/tools/third_party/hpack/CONTRIBUTORS.rst
new file mode 100644
index 0000000000..f56d156eea
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/CONTRIBUTORS.rst
@@ -0,0 +1,62 @@
+Hyper is written and maintained by Cory Benfield and various contributors:
+
+Development Lead
+````````````````
+
+- Cory Benfield <cory@lukasa.co.uk>
+
+Contributors (hpack)
+````````````````````
+In chronological order:
+
+- Sriram Ganesan (@elricL)
+
+ - Implemented the Huffman encoding/decoding logic.
+
+- Tatsuhiro Tsujikawa (@tatsuhiro-t)
+
+ - Improved compression efficiency.
+
+- Jim Carreer (@jimcarreer)
+
+ - Support for 'never indexed' header fields.
+ - Refactor of header table code.
+ - Add support for returning bytestring headers instead of UTF-8 decoded ones.
+
+- Eugene Obukhov (@irvind)
+
+ - Improved decoding efficiency.
+
+- Ian Foote (@Ian-Foote)
+
+ - 25% performance improvement to integer decode.
+
+- Davey Shafik (@dshafik)
+
+ - More testing.
+
+- Seth Michael Larson (@SethMichaelLarson)
+
+ - Code cleanups.
+
+Contributors (hyper)
+````````````````````
+
+In chronological order:
+
+- Alek Storm (@alekstorm)
+
+ - Implemented Python 2.7 support.
+ - Implemented HTTP/2 draft 10 support.
+ - Implemented server push.
+
+- Tetsuya Morimoto (@t2y)
+
+ - Fixed a bug where large or incomplete frames were not handled correctly.
+ - Added hyper command-line tool.
+ - General code cleanups.
+
+- Jerome De Cuyper (@jdecuyper)
+
+ - Updated documentation and tests.
+
diff --git a/testing/web-platform/tests/tools/third_party/hpack/HISTORY.rst b/testing/web-platform/tests/tools/third_party/hpack/HISTORY.rst
new file mode 100644
index 0000000000..37b2d9c009
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/HISTORY.rst
@@ -0,0 +1,134 @@
+Release History
+===============
+
+3.0.0 (2017-03-29)
+------------------
+
+**API Changes (Backward Incompatible)**
+
+- Removed nghttp2 support. This support had rotted and was essentially
+ non-functional, so it has now been removed until someone has time to re-add
+ the support in a functional form.
+- Attempts by the encoder to exceed the maximum allowed header table size via
+ dynamic table size updates (or the absence thereof) are now forbidden.
+
+**API Changes (Backward Compatible)**
+
+- Added a new ``InvalidTableSizeError`` thrown when the encoder does not
+ respect the maximum table size set by the user.
+- Added a ``Decoder.max_allowed_table_size`` field that sets the maximum
+ allowed size of the decoder header table. See the documentation for an
+ indication of how this should be used.
+
+**Bugfixes**
+
+- Up to 25% performance improvement decoding HPACK-packed integers, depending
+ on the platform.
+- HPACK now tolerates receiving multiple header table size changes in sequence,
+ rather than only one.
+- HPACK now forbids header table size changes anywhere but first in a header
+ block, as required by RFC 7541 § 4.2.
+- Other miscellaneous performance improvements.
+
+2.3.0 (2016-08-04)
+------------------
+
+**Security Fixes**
+
+- CVE-2016-6581: HPACK Bomb. This release now enforces a maximum value of the
+ decompressed size of the header list. This is to avoid the so-called "HPACK
+ Bomb" vulnerability, which is caused when a malicious peer sends a compressed
+ HPACK body that decompresses to a gigantic header list size.
+
+ This also adds a ``OversizedHeaderListError``, which is thrown by the
+ ``decode`` method if the maximum header list size is being violated. This
+ places the HPACK decoder into a broken state: it must not be used after this
+ exception is thrown.
+
+ This also adds a ``max_header_list_size`` to the ``Decoder`` object. This
+ controls the maximum allowable decompressed size of the header list. By
+ default this is set to 64kB.
+
+2.2.0 (2016-04-20)
+------------------
+
+**API Changes (Backward Compatible)**
+
+- Added ``HeaderTuple`` and ``NeverIndexedHeaderTuple`` classes that signal
+ whether a given header field may ever be indexed in HTTP/2 header
+ compression.
+- Changed ``Decoder.decode()`` to return the newly added ``HeaderTuple`` class
+ and subclass. These objects behave like two-tuples, so this change does not
+ break working code.
+
+**Bugfixes**
+
+- Improve Huffman decoding speed by 4x using an approach borrowed from nghttp2.
+- Improve HPACK decoding speed by 10% by caching header table sizes.
+
+2.1.1 (2016-03-16)
+------------------
+
+**Bugfixes**
+
+- When passing a dictionary or dictionary subclass to ``Encoder.encode``, HPACK
+ now ensures that HTTP/2 special headers (headers whose names begin with
+ ``:`` characters) appear first in the header block.
+
+2.1.0 (2016-02-02)
+------------------
+
+**API Changes (Backward Compatible)**
+
+- Added new ``InvalidTableIndex`` exception, a subclass of
+ ``HPACKDecodingError``.
+- Instead of throwing ``IndexError`` when encountering invalid encoded integers
+ HPACK now throws ``HPACKDecodingError``.
+- Instead of throwing ``UnicodeDecodeError`` when encountering headers that are
+ not UTF-8 encoded, HPACK now throws ``HPACKDecodingError``.
+- Instead of throwing ``IndexError`` when encountering invalid table offsets,
+ HPACK now throws ``InvalidTableIndex``.
+- Added ``raw`` flag to ``decode``, allowing ``decode`` to return bytes instead
+ of attempting to decode the headers as UTF-8.
+
+**Bugfixes**
+
+- ``memoryview`` objects are now used when decoding HPACK, improving the
+ performance by avoiding unnecessary data copies.
+
+2.0.1 (2015-11-09)
+------------------
+
+- Fixed a bug where the Python HPACK implementation would only emit header
+ table size changes for the total change between one header block and another,
+ rather than for the entire sequence of changes.
+
+2.0.0 (2015-10-12)
+------------------
+
+- Remove unused ``HPACKEncodingError``.
+- Add the shortcut ability to import the public API (``Encoder``, ``Decoder``,
+ ``HPACKError``, ``HPACKDecodingError``) directly, rather than from
+ ``hpack.hpack``.
+
+1.1.0 (2015-07-07)
+------------------
+
+- Add support for emitting 'never indexed' header fields, by using an optional
+ third element in the header tuple. With thanks to @jimcarreer!
+
+1.0.1 (2015-04-19)
+------------------
+
+- Header fields that have names matching header table entries are now added to
+ the header table. This improves compression efficiency at the cost of
+ slightly more table operations. With thanks to `Tatsuhiro Tsujikawa`_.
+
+.. _Tatsuhiro Tsujikawa: https://github.com/tatsuhiro-t
+
+1.0.0 (2015-04-13)
+------------------
+
+- Initial fork of the code from `hyper`_.
+
+.. _hyper: https://hyper.readthedocs.org/
diff --git a/testing/web-platform/tests/tools/third_party/hpack/LICENSE b/testing/web-platform/tests/tools/third_party/hpack/LICENSE
new file mode 100644
index 0000000000..d24c351e18
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Cory Benfield
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/hpack/MANIFEST.in b/testing/web-platform/tests/tools/third_party/hpack/MANIFEST.in
new file mode 100644
index 0000000000..2f464676cb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/MANIFEST.in
@@ -0,0 +1,2 @@
+include README.rst LICENSE CONTRIBUTORS.rst HISTORY.rst
+
diff --git a/testing/web-platform/tests/tools/third_party/hpack/PKG-INFO b/testing/web-platform/tests/tools/third_party/hpack/PKG-INFO
new file mode 100644
index 0000000000..c2a3a1a7f5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/PKG-INFO
@@ -0,0 +1,199 @@
+Metadata-Version: 1.1
+Name: hpack
+Version: 3.0.0
+Summary: Pure-Python HPACK header compression
+Home-page: http://hyper.rtfd.org
+Author: Cory Benfield
+Author-email: cory@lukasa.co.uk
+License: MIT License
+Description: ========================================
+ hpack: HTTP/2 Header Encoding for Python
+ ========================================
+
+ .. image:: https://raw.github.com/Lukasa/hyper/development/docs/source/images/hyper.png
+
+ .. image:: https://travis-ci.org/python-hyper/hpack.png?branch=master
+ :target: https://travis-ci.org/python-hyper/hpack
+
+ This module contains a pure-Python HTTP/2 header encoding (HPACK) logic for use
+ in Python programs that implement HTTP/2. It also contains a compatibility
+ layer that automatically enables the use of ``nghttp2`` if it's available.
+
+ Documentation
+ =============
+
+ Documentation is available at http://python-hyper.org/hpack/.
+
+ Contributing
+ ============
+
+ ``hpack`` welcomes contributions from anyone! Unlike many other projects we are
+ happy to accept cosmetic contributions and small contributions, in addition to
+ large feature requests and changes.
+
+ Before you contribute (either by opening an issue or filing a pull request),
+ please `read the contribution guidelines`_.
+
+ .. _read the contribution guidelines: http://hyper.readthedocs.org/en/development/contributing.html
+
+ License
+ =======
+
+ ``hpack`` is made available under the MIT License. For more details, see the
+ ``LICENSE`` file in the repository.
+
+ Authors
+ =======
+
+ ``hpack`` is maintained by Cory Benfield, with contributions from others. For
+ more details about the contributors, please see ``CONTRIBUTORS.rst``.
+
+
+ Release History
+ ===============
+
+ 3.0.0 (2017-03-29)
+ ------------------
+
+ **API Changes (Backward Incompatible)**
+
+ - Removed nghttp2 support. This support had rotted and was essentially
+ non-functional, so it has now been removed until someone has time to re-add
+ the support in a functional form.
+ - Attempts by the encoder to exceed the maximum allowed header table size via
+ dynamic table size updates (or the absence thereof) are now forbidden.
+
+ **API Changes (Backward Compatible)**
+
+ - Added a new ``InvalidTableSizeError`` thrown when the encoder does not
+ respect the maximum table size set by the user.
+ - Added a ``Decoder.max_allowed_table_size`` field that sets the maximum
+ allowed size of the decoder header table. See the documentation for an
+ indication of how this should be used.
+
+ **Bugfixes**
+
+ - Up to 25% performance improvement decoding HPACK-packed integers, depending
+ on the platform.
+ - HPACK now tolerates receiving multiple header table size changes in sequence,
+ rather than only one.
+ - HPACK now forbids header table size changes anywhere but first in a header
+ block, as required by RFC 7541 § 4.2.
+ - Other miscellaneous performance improvements.
+
+ 2.3.0 (2016-08-04)
+ ------------------
+
+ **Security Fixes**
+
+ - CVE-2016-6581: HPACK Bomb. This release now enforces a maximum value of the
+ decompressed size of the header list. This is to avoid the so-called "HPACK
+ Bomb" vulnerability, which is caused when a malicious peer sends a compressed
+ HPACK body that decompresses to a gigantic header list size.
+
+ This also adds a ``OversizedHeaderListError``, which is thrown by the
+ ``decode`` method if the maximum header list size is being violated. This
+ places the HPACK decoder into a broken state: it must not be used after this
+ exception is thrown.
+
+ This also adds a ``max_header_list_size`` to the ``Decoder`` object. This
+ controls the maximum allowable decompressed size of the header list. By
+ default this is set to 64kB.
+
+ 2.2.0 (2016-04-20)
+ ------------------
+
+ **API Changes (Backward Compatible)**
+
+ - Added ``HeaderTuple`` and ``NeverIndexedHeaderTuple`` classes that signal
+ whether a given header field may ever be indexed in HTTP/2 header
+ compression.
+ - Changed ``Decoder.decode()`` to return the newly added ``HeaderTuple`` class
+ and subclass. These objects behave like two-tuples, so this change does not
+ break working code.
+
+ **Bugfixes**
+
+ - Improve Huffman decoding speed by 4x using an approach borrowed from nghttp2.
+ - Improve HPACK decoding speed by 10% by caching header table sizes.
+
+ 2.1.1 (2016-03-16)
+ ------------------
+
+ **Bugfixes**
+
+ - When passing a dictionary or dictionary subclass to ``Encoder.encode``, HPACK
+ now ensures that HTTP/2 special headers (headers whose names begin with
+ ``:`` characters) appear first in the header block.
+
+ 2.1.0 (2016-02-02)
+ ------------------
+
+ **API Changes (Backward Compatible)**
+
+ - Added new ``InvalidTableIndex`` exception, a subclass of
+ ``HPACKDecodingError``.
+ - Instead of throwing ``IndexError`` when encountering invalid encoded integers
+ HPACK now throws ``HPACKDecodingError``.
+ - Instead of throwing ``UnicodeDecodeError`` when encountering headers that are
+ not UTF-8 encoded, HPACK now throws ``HPACKDecodingError``.
+ - Instead of throwing ``IndexError`` when encountering invalid table offsets,
+ HPACK now throws ``InvalidTableIndex``.
+ - Added ``raw`` flag to ``decode``, allowing ``decode`` to return bytes instead
+ of attempting to decode the headers as UTF-8.
+
+ **Bugfixes**
+
+ - ``memoryview`` objects are now used when decoding HPACK, improving the
+ performance by avoiding unnecessary data copies.
+
+ 2.0.1 (2015-11-09)
+ ------------------
+
+ - Fixed a bug where the Python HPACK implementation would only emit header
+ table size changes for the total change between one header block and another,
+ rather than for the entire sequence of changes.
+
+ 2.0.0 (2015-10-12)
+ ------------------
+
+ - Remove unused ``HPACKEncodingError``.
+ - Add the shortcut ability to import the public API (``Encoder``, ``Decoder``,
+ ``HPACKError``, ``HPACKDecodingError``) directly, rather than from
+ ``hpack.hpack``.
+
+ 1.1.0 (2015-07-07)
+ ------------------
+
+ - Add support for emitting 'never indexed' header fields, by using an optional
+ third element in the header tuple. With thanks to @jimcarreer!
+
+ 1.0.1 (2015-04-19)
+ ------------------
+
+ - Header fields that have names matching header table entries are now added to
+ the header table. This improves compression efficiency at the cost of
+ slightly more table operations. With thanks to `Tatsuhiro Tsujikawa`_.
+
+ .. _Tatsuhiro Tsujikawa: https://github.com/tatsuhiro-t
+
+ 1.0.0 (2015-04-13)
+ ------------------
+
+ - Initial fork of the code from `hyper`_.
+
+ .. _hyper: https://hyper.readthedocs.org/
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: Implementation :: CPython
diff --git a/testing/web-platform/tests/tools/third_party/hpack/README.rst b/testing/web-platform/tests/tools/third_party/hpack/README.rst
new file mode 100644
index 0000000000..1a04397b94
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/README.rst
@@ -0,0 +1,41 @@
+========================================
+hpack: HTTP/2 Header Encoding for Python
+========================================
+
+.. image:: https://raw.github.com/Lukasa/hyper/development/docs/source/images/hyper.png
+
+.. image:: https://travis-ci.org/python-hyper/hpack.png?branch=master
+ :target: https://travis-ci.org/python-hyper/hpack
+
+This module contains a pure-Python HTTP/2 header encoding (HPACK) logic for use
+in Python programs that implement HTTP/2. It also contains a compatibility
+layer that automatically enables the use of ``nghttp2`` if it's available.
+
+Documentation
+=============
+
+Documentation is available at http://python-hyper.org/hpack/.
+
+Contributing
+============
+
+``hpack`` welcomes contributions from anyone! Unlike many other projects we are
+happy to accept cosmetic contributions and small contributions, in addition to
+large feature requests and changes.
+
+Before you contribute (either by opening an issue or filing a pull request),
+please `read the contribution guidelines`_.
+
+.. _read the contribution guidelines: http://hyper.readthedocs.org/en/development/contributing.html
+
+License
+=======
+
+``hpack`` is made available under the MIT License. For more details, see the
+``LICENSE`` file in the repository.
+
+Authors
+=======
+
+``hpack`` is maintained by Cory Benfield, with contributions from others. For
+more details about the contributors, please see ``CONTRIBUTORS.rst``.
diff --git a/testing/web-platform/tests/tools/third_party/hpack/hpack/__init__.py b/testing/web-platform/tests/tools/third_party/hpack/hpack/__init__.py
new file mode 100644
index 0000000000..22edde2ce3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/hpack/__init__.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+"""
+hpack
+~~~~~
+
+HTTP/2 header encoding for Python.
+"""
+from .hpack import Encoder, Decoder
+from .struct import HeaderTuple, NeverIndexedHeaderTuple
+from .exceptions import (
+ HPACKError, HPACKDecodingError, InvalidTableIndex, OversizedHeaderListError
+)
+
+__all__ = [
+ 'Encoder', 'Decoder', 'HPACKError', 'HPACKDecodingError',
+ 'InvalidTableIndex', 'HeaderTuple', 'NeverIndexedHeaderTuple',
+ 'OversizedHeaderListError'
+]
+
+__version__ = '3.0.0'
diff --git a/testing/web-platform/tests/tools/third_party/hpack/hpack/compat.py b/testing/web-platform/tests/tools/third_party/hpack/hpack/compat.py
new file mode 100644
index 0000000000..4fcaad439f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/hpack/compat.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+"""
+hpack/compat
+~~~~~~~~~~~~
+
+Normalizes the Python 2/3 API for internal use.
+"""
+import sys
+
+
+_ver = sys.version_info
+is_py2 = _ver[0] == 2
+is_py3 = _ver[0] == 3
+
+if is_py2:
+ def to_byte(char):
+ return ord(char)
+
+ def decode_hex(b):
+ return b.decode('hex')
+
+ def to_bytes(b):
+ if isinstance(b, memoryview):
+ return b.tobytes()
+ else:
+ return bytes(b)
+
+ unicode = unicode # noqa
+ bytes = str
+
+elif is_py3:
+ def to_byte(char):
+ return char
+
+ def decode_hex(b):
+ return bytes.fromhex(b)
+
+ def to_bytes(b):
+ return bytes(b)
+
+ unicode = str
+ bytes = bytes
diff --git a/testing/web-platform/tests/tools/third_party/hpack/hpack/exceptions.py b/testing/web-platform/tests/tools/third_party/hpack/hpack/exceptions.py
new file mode 100644
index 0000000000..571ba98f2c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/hpack/exceptions.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+"""
+hyper/http20/exceptions
+~~~~~~~~~~~~~~~~~~~~~~~
+
+This defines exceptions used in the HTTP/2 portion of hyper.
+"""
+
+
+class HPACKError(Exception):
+ """
+ The base class for all ``hpack`` exceptions.
+ """
+ pass
+
+
+class HPACKDecodingError(HPACKError):
+ """
+ An error has been encountered while performing HPACK decoding.
+ """
+ pass
+
+
+class InvalidTableIndex(HPACKDecodingError):
+ """
+ An invalid table index was received.
+ """
+ pass
+
+
+class OversizedHeaderListError(HPACKDecodingError):
+ """
+ A header list that was larger than we allow has been received. This may be
+ a DoS attack.
+
+ .. versionadded:: 2.3.0
+ """
+ pass
+
+
+class InvalidTableSizeError(HPACKDecodingError):
+ """
+ An attempt was made to change the decoder table size to a value larger than
+ allowed, or the list was shrunk and the remote peer didn't shrink their
+ table size.
+
+ .. versionadded:: 3.0.0
+ """
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/hpack/hpack/hpack.py b/testing/web-platform/tests/tools/third_party/hpack/hpack/hpack.py
new file mode 100644
index 0000000000..f8e808bec9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/hpack/hpack.py
@@ -0,0 +1,629 @@
+# -*- coding: utf-8 -*-
+"""
+hpack/hpack
+~~~~~~~~~~~
+
+Implements the HPACK header compression algorithm as detailed by the IETF.
+"""
+import logging
+
+from .table import HeaderTable, table_entry_size
+from .compat import to_byte, to_bytes
+from .exceptions import (
+ HPACKDecodingError, OversizedHeaderListError, InvalidTableSizeError
+)
+from .huffman import HuffmanEncoder
+from .huffman_constants import (
+ REQUEST_CODES, REQUEST_CODES_LENGTH
+)
+from .huffman_table import decode_huffman
+from .struct import HeaderTuple, NeverIndexedHeaderTuple
+
+log = logging.getLogger(__name__)
+
+INDEX_NONE = b'\x00'
+INDEX_NEVER = b'\x10'
+INDEX_INCREMENTAL = b'\x40'
+
+# Precompute 2^i for 1-8 for use in prefix calcs.
+# Zero index is not used but there to save a subtraction
+# as prefix numbers are not zero indexed.
+_PREFIX_BIT_MAX_NUMBERS = [(2 ** i) - 1 for i in range(9)]
+
+try: # pragma: no cover
+ basestring = basestring
+except NameError: # pragma: no cover
+ basestring = (str, bytes)
+
+
+# We default the maximum header list we're willing to accept to 64kB. That's a
+# lot of headers, but if applications want to raise it they can do.
+DEFAULT_MAX_HEADER_LIST_SIZE = 2 ** 16
+
+
+def _unicode_if_needed(header, raw):
+ """
+ Provides a header as a unicode string if raw is False, otherwise returns
+ it as a bytestring.
+ """
+ name = to_bytes(header[0])
+ value = to_bytes(header[1])
+ if not raw:
+ name = name.decode('utf-8')
+ value = value.decode('utf-8')
+ return header.__class__(name, value)
+
+
+def encode_integer(integer, prefix_bits):
+ """
+ This encodes an integer according to the wacky integer encoding rules
+ defined in the HPACK spec.
+ """
+ log.debug("Encoding %d with %d bits", integer, prefix_bits)
+
+ if integer < 0:
+ raise ValueError(
+ "Can only encode positive integers, got %s" % integer
+ )
+
+ if prefix_bits < 1 or prefix_bits > 8:
+ raise ValueError(
+ "Prefix bits must be between 1 and 8, got %s" % prefix_bits
+ )
+
+ max_number = _PREFIX_BIT_MAX_NUMBERS[prefix_bits]
+
+ if integer < max_number:
+ return bytearray([integer]) # Seriously?
+ else:
+ elements = [max_number]
+ integer -= max_number
+
+ while integer >= 128:
+ elements.append((integer & 127) + 128)
+ integer >>= 7
+
+ elements.append(integer)
+
+ return bytearray(elements)
+
+
+def decode_integer(data, prefix_bits):
+ """
+ This decodes an integer according to the wacky integer encoding rules
+ defined in the HPACK spec. Returns a tuple of the decoded integer and the
+ number of bytes that were consumed from ``data`` in order to get that
+ integer.
+ """
+ if prefix_bits < 1 or prefix_bits > 8:
+ raise ValueError(
+ "Prefix bits must be between 1 and 8, got %s" % prefix_bits
+ )
+
+ max_number = _PREFIX_BIT_MAX_NUMBERS[prefix_bits]
+ index = 1
+ shift = 0
+ mask = (0xFF >> (8 - prefix_bits))
+
+ try:
+ number = to_byte(data[0]) & mask
+ if number == max_number:
+ while True:
+ next_byte = to_byte(data[index])
+ index += 1
+
+ if next_byte >= 128:
+ number += (next_byte - 128) << shift
+ else:
+ number += next_byte << shift
+ break
+ shift += 7
+
+ except IndexError:
+ raise HPACKDecodingError(
+ "Unable to decode HPACK integer representation from %r" % data
+ )
+
+ log.debug("Decoded %d, consumed %d bytes", number, index)
+
+ return number, index
+
+
+def _dict_to_iterable(header_dict):
+ """
+ This converts a dictionary to an iterable of two-tuples. This is a
+ HPACK-specific function becuase it pulls "special-headers" out first and
+ then emits them.
+ """
+ assert isinstance(header_dict, dict)
+ keys = sorted(
+ header_dict.keys(),
+ key=lambda k: not _to_bytes(k).startswith(b':')
+ )
+ for key in keys:
+ yield key, header_dict[key]
+
+
+def _to_bytes(string):
+ """
+ Convert string to bytes.
+ """
+ if not isinstance(string, basestring): # pragma: no cover
+ string = str(string)
+
+ return string if isinstance(string, bytes) else string.encode('utf-8')
+
+
+class Encoder(object):
+ """
+ An HPACK encoder object. This object takes HTTP headers and emits encoded
+ HTTP/2 header blocks.
+ """
+
+ def __init__(self):
+ self.header_table = HeaderTable()
+ self.huffman_coder = HuffmanEncoder(
+ REQUEST_CODES, REQUEST_CODES_LENGTH
+ )
+ self.table_size_changes = []
+
+ @property
+ def header_table_size(self):
+ """
+ Controls the size of the HPACK header table.
+ """
+ return self.header_table.maxsize
+
+ @header_table_size.setter
+ def header_table_size(self, value):
+ self.header_table.maxsize = value
+ if self.header_table.resized:
+ self.table_size_changes.append(value)
+
+ def encode(self, headers, huffman=True):
+ """
+ Takes a set of headers and encodes them into a HPACK-encoded header
+ block.
+
+ :param headers: The headers to encode. Must be either an iterable of
+ tuples, an iterable of :class:`HeaderTuple
+ <hpack.struct.HeaderTuple>`, or a ``dict``.
+
+ If an iterable of tuples, the tuples may be either
+ two-tuples or three-tuples. If they are two-tuples, the
+ tuples must be of the format ``(name, value)``. If they
+ are three-tuples, they must be of the format
+ ``(name, value, sensitive)``, where ``sensitive`` is a
+ boolean value indicating whether the header should be
+ added to header tables anywhere. If not present,
+ ``sensitive`` defaults to ``False``.
+
+ If an iterable of :class:`HeaderTuple
+ <hpack.struct.HeaderTuple>`, the tuples must always be
+ two-tuples. Instead of using ``sensitive`` as a third
+ tuple entry, use :class:`NeverIndexedHeaderTuple
+ <hpack.struct.NeverIndexedHeaderTuple>` to request that
+ the field never be indexed.
+
+ .. warning:: HTTP/2 requires that all special headers
+ (headers whose names begin with ``:`` characters)
+ appear at the *start* of the header block. While
+ this method will ensure that happens for ``dict``
+ subclasses, callers using any other iterable of
+ tuples **must** ensure they place their special
+ headers at the start of the iterable.
+
+ For efficiency reasons users should prefer to use
+ iterables of two-tuples: fixing the ordering of
+ dictionary headers is an expensive operation that
+ should be avoided if possible.
+
+ :param huffman: (optional) Whether to Huffman-encode any header sent as
+ a literal value. Except for use when debugging, it is
+ recommended that this be left enabled.
+
+ :returns: A bytestring containing the HPACK-encoded header block.
+ """
+ # Transforming the headers into a header block is a procedure that can
+ # be modeled as a chain or pipe. First, the headers are encoded. This
+ # encoding can be done a number of ways. If the header name-value pair
+ # are already in the header table we can represent them using the
+ # indexed representation: the same is true if they are in the static
+ # table. Otherwise, a literal representation will be used.
+ header_block = []
+
+ # Turn the headers into a list of tuples if possible. This is the
+ # natural way to interact with them in HPACK. Because dictionaries are
+ # un-ordered, we need to make sure we grab the "special" headers first.
+ if isinstance(headers, dict):
+ headers = _dict_to_iterable(headers)
+
+ # Before we begin, if the header table size has been changed we need
+ # to signal all changes since last emission appropriately.
+ if self.header_table.resized:
+ header_block.append(self._encode_table_size_change())
+ self.header_table.resized = False
+
+ # Add each header to the header block
+ for header in headers:
+ sensitive = False
+ if isinstance(header, HeaderTuple):
+ sensitive = not header.indexable
+ elif len(header) > 2:
+ sensitive = header[2]
+
+ header = (_to_bytes(header[0]), _to_bytes(header[1]))
+ header_block.append(self.add(header, sensitive, huffman))
+
+ header_block = b''.join(header_block)
+
+ log.debug("Encoded header block to %s", header_block)
+
+ return header_block
+
+ def add(self, to_add, sensitive, huffman=False):
+ """
+ This function takes a header key-value tuple and serializes it.
+ """
+ log.debug("Adding %s to the header table", to_add)
+
+ name, value = to_add
+
+ # Set our indexing mode
+ indexbit = INDEX_INCREMENTAL if not sensitive else INDEX_NEVER
+
+ # Search for a matching header in the header table.
+ match = self.header_table.search(name, value)
+
+ if match is None:
+ # Not in the header table. Encode using the literal syntax,
+ # and add it to the header table.
+ encoded = self._encode_literal(name, value, indexbit, huffman)
+ if not sensitive:
+ self.header_table.add(name, value)
+ return encoded
+
+ # The header is in the table, break out the values. If we matched
+ # perfectly, we can use the indexed representation: otherwise we
+ # can use the indexed literal.
+ index, name, perfect = match
+
+ if perfect:
+ # Indexed representation.
+ encoded = self._encode_indexed(index)
+ else:
+ # Indexed literal. We are going to add header to the
+ # header table unconditionally. It is a future todo to
+ # filter out headers which are known to be ineffective for
+ # indexing since they just take space in the table and
+ # pushed out other valuable headers.
+ encoded = self._encode_indexed_literal(
+ index, value, indexbit, huffman
+ )
+ if not sensitive:
+ self.header_table.add(name, value)
+
+ return encoded
+
+ def _encode_indexed(self, index):
+ """
+ Encodes a header using the indexed representation.
+ """
+ field = encode_integer(index, 7)
+ field[0] |= 0x80 # we set the top bit
+ return bytes(field)
+
+ def _encode_literal(self, name, value, indexbit, huffman=False):
+ """
+ Encodes a header with a literal name and literal value. If ``indexing``
+ is True, the header will be added to the header table: otherwise it
+ will not.
+ """
+ if huffman:
+ name = self.huffman_coder.encode(name)
+ value = self.huffman_coder.encode(value)
+
+ name_len = encode_integer(len(name), 7)
+ value_len = encode_integer(len(value), 7)
+
+ if huffman:
+ name_len[0] |= 0x80
+ value_len[0] |= 0x80
+
+ return b''.join(
+ [indexbit, bytes(name_len), name, bytes(value_len), value]
+ )
+
+ def _encode_indexed_literal(self, index, value, indexbit, huffman=False):
+ """
+ Encodes a header with an indexed name and a literal value and performs
+ incremental indexing.
+ """
+ if indexbit != INDEX_INCREMENTAL:
+ prefix = encode_integer(index, 4)
+ else:
+ prefix = encode_integer(index, 6)
+
+ prefix[0] |= ord(indexbit)
+
+ if huffman:
+ value = self.huffman_coder.encode(value)
+
+ value_len = encode_integer(len(value), 7)
+
+ if huffman:
+ value_len[0] |= 0x80
+
+ return b''.join([bytes(prefix), bytes(value_len), value])
+
+ def _encode_table_size_change(self):
+ """
+ Produces the encoded form of all header table size change context
+ updates.
+ """
+ block = b''
+ for size_bytes in self.table_size_changes:
+ size_bytes = encode_integer(size_bytes, 5)
+ size_bytes[0] |= 0x20
+ block += bytes(size_bytes)
+ self.table_size_changes = []
+ return block
+
+
+class Decoder(object):
+ """
+ An HPACK decoder object.
+
+ .. versionchanged:: 2.3.0
+ Added ``max_header_list_size`` argument.
+
+ :param max_header_list_size: The maximum decompressed size we will allow
+ for any single header block. This is a protection against DoS attacks
+ that attempt to force the application to expand a relatively small
+ amount of data into a really large header list, allowing enormous
+ amounts of memory to be allocated.
+
+ If this amount of data is exceeded, a `OversizedHeaderListError
+ <hpack.OversizedHeaderListError>` exception will be raised. At this
+ point the connection should be shut down, as the HPACK state will no
+ longer be useable.
+
+ Defaults to 64kB.
+ :type max_header_list_size: ``int``
+ """
+ def __init__(self, max_header_list_size=DEFAULT_MAX_HEADER_LIST_SIZE):
+ self.header_table = HeaderTable()
+
+ #: The maximum decompressed size we will allow for any single header
+ #: block. This is a protection against DoS attacks that attempt to
+ #: force the application to expand a relatively small amount of data
+ #: into a really large header list, allowing enormous amounts of memory
+ #: to be allocated.
+ #:
+ #: If this amount of data is exceeded, a `OversizedHeaderListError
+ #: <hpack.OversizedHeaderListError>` exception will be raised. At this
+ #: point the connection should be shut down, as the HPACK state will no
+ #: longer be usable.
+ #:
+ #: Defaults to 64kB.
+ #:
+ #: .. versionadded:: 2.3.0
+ self.max_header_list_size = max_header_list_size
+
+ #: Maximum allowed header table size.
+ #:
+ #: A HTTP/2 implementation should set this to the most recent value of
+ #: SETTINGS_HEADER_TABLE_SIZE that it sent *and has received an ACK
+ #: for*. Once this setting is set, the actual header table size will be
+ #: checked at the end of each decoding run and whenever it is changed,
+ #: to confirm that it fits in this size.
+ self.max_allowed_table_size = self.header_table.maxsize
+
+ @property
+ def header_table_size(self):
+ """
+ Controls the size of the HPACK header table.
+ """
+ return self.header_table.maxsize
+
+ @header_table_size.setter
+ def header_table_size(self, value):
+ self.header_table.maxsize = value
+
+ def decode(self, data, raw=False):
+ """
+ Takes an HPACK-encoded header block and decodes it into a header set.
+
+ :param data: A bytestring representing a complete HPACK-encoded header
+ block.
+ :param raw: (optional) Whether to return the headers as tuples of raw
+ byte strings or to decode them as UTF-8 before returning
+ them. The default value is False, which returns tuples of
+ Unicode strings
+ :returns: A list of two-tuples of ``(name, value)`` representing the
+ HPACK-encoded headers, in the order they were decoded.
+ :raises HPACKDecodingError: If an error is encountered while decoding
+ the header block.
+ """
+ log.debug("Decoding %s", data)
+
+ data_mem = memoryview(data)
+ headers = []
+ data_len = len(data)
+ inflated_size = 0
+ current_index = 0
+
+ while current_index < data_len:
+ # Work out what kind of header we're decoding.
+ # If the high bit is 1, it's an indexed field.
+ current = to_byte(data[current_index])
+ indexed = True if current & 0x80 else False
+
+ # Otherwise, if the second-highest bit is 1 it's a field that does
+ # alter the header table.
+ literal_index = True if current & 0x40 else False
+
+ # Otherwise, if the third-highest bit is 1 it's an encoding context
+ # update.
+ encoding_update = True if current & 0x20 else False
+
+ if indexed:
+ header, consumed = self._decode_indexed(
+ data_mem[current_index:]
+ )
+ elif literal_index:
+ # It's a literal header that does affect the header table.
+ header, consumed = self._decode_literal_index(
+ data_mem[current_index:]
+ )
+ elif encoding_update:
+ # It's an update to the encoding context. These are forbidden
+ # in a header block after any actual header.
+ if headers:
+ raise HPACKDecodingError(
+ "Table size update not at the start of the block"
+ )
+ consumed = self._update_encoding_context(
+ data_mem[current_index:]
+ )
+ header = None
+ else:
+ # It's a literal header that does not affect the header table.
+ header, consumed = self._decode_literal_no_index(
+ data_mem[current_index:]
+ )
+
+ if header:
+ headers.append(header)
+ inflated_size += table_entry_size(*header)
+
+ if inflated_size > self.max_header_list_size:
+ raise OversizedHeaderListError(
+ "A header list larger than %d has been received" %
+ self.max_header_list_size
+ )
+
+ current_index += consumed
+
+ # Confirm that the table size is lower than the maximum. We do this
+ # here to ensure that we catch when the max has been *shrunk* and the
+ # remote peer hasn't actually done that.
+ self._assert_valid_table_size()
+
+ try:
+ return [_unicode_if_needed(h, raw) for h in headers]
+ except UnicodeDecodeError:
+ raise HPACKDecodingError("Unable to decode headers as UTF-8.")
+
+ def _assert_valid_table_size(self):
+ """
+ Check that the table size set by the encoder is lower than the maximum
+ we expect to have.
+ """
+ if self.header_table_size > self.max_allowed_table_size:
+ raise InvalidTableSizeError(
+ "Encoder did not shrink table size to within the max"
+ )
+
+ def _update_encoding_context(self, data):
+ """
+ Handles a byte that updates the encoding context.
+ """
+ # We've been asked to resize the header table.
+ new_size, consumed = decode_integer(data, 5)
+ if new_size > self.max_allowed_table_size:
+ raise InvalidTableSizeError(
+ "Encoder exceeded max allowable table size"
+ )
+ self.header_table_size = new_size
+ return consumed
+
+ def _decode_indexed(self, data):
+ """
+ Decodes a header represented using the indexed representation.
+ """
+ index, consumed = decode_integer(data, 7)
+ header = HeaderTuple(*self.header_table.get_by_index(index))
+ log.debug("Decoded %s, consumed %d", header, consumed)
+ return header, consumed
+
+ def _decode_literal_no_index(self, data):
+ return self._decode_literal(data, False)
+
+ def _decode_literal_index(self, data):
+ return self._decode_literal(data, True)
+
+ def _decode_literal(self, data, should_index):
+ """
+ Decodes a header represented with a literal.
+ """
+ total_consumed = 0
+
+ # When should_index is true, if the low six bits of the first byte are
+ # nonzero, the header name is indexed.
+ # When should_index is false, if the low four bits of the first byte
+ # are nonzero the header name is indexed.
+ if should_index:
+ indexed_name = to_byte(data[0]) & 0x3F
+ name_len = 6
+ not_indexable = False
+ else:
+ high_byte = to_byte(data[0])
+ indexed_name = high_byte & 0x0F
+ name_len = 4
+ not_indexable = high_byte & 0x10
+
+ if indexed_name:
+ # Indexed header name.
+ index, consumed = decode_integer(data, name_len)
+ name = self.header_table.get_by_index(index)[0]
+
+ total_consumed = consumed
+ length = 0
+ else:
+ # Literal header name. The first byte was consumed, so we need to
+ # move forward.
+ data = data[1:]
+
+ length, consumed = decode_integer(data, 7)
+ name = data[consumed:consumed + length]
+ if len(name) != length:
+ raise HPACKDecodingError("Truncated header block")
+
+ if to_byte(data[0]) & 0x80:
+ name = decode_huffman(name)
+ total_consumed = consumed + length + 1 # Since we moved forward 1.
+
+ data = data[consumed + length:]
+
+ # The header value is definitely length-based.
+ length, consumed = decode_integer(data, 7)
+ value = data[consumed:consumed + length]
+ if len(value) != length:
+ raise HPACKDecodingError("Truncated header block")
+
+ if to_byte(data[0]) & 0x80:
+ value = decode_huffman(value)
+
+ # Updated the total consumed length.
+ total_consumed += length + consumed
+
+ # If we have been told never to index the header field, encode that in
+ # the tuple we use.
+ if not_indexable:
+ header = NeverIndexedHeaderTuple(name, value)
+ else:
+ header = HeaderTuple(name, value)
+
+ # If we've been asked to index this, add it to the header table.
+ if should_index:
+ self.header_table.add(name, value)
+
+ log.debug(
+ "Decoded %s, total consumed %d bytes, indexed %s",
+ header,
+ total_consumed,
+ should_index
+ )
+
+ return header, total_consumed
diff --git a/testing/web-platform/tests/tools/third_party/hpack/hpack/huffman.py b/testing/web-platform/tests/tools/third_party/hpack/hpack/huffman.py
new file mode 100644
index 0000000000..159569cf63
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/hpack/huffman.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+"""
+hpack/huffman_decoder
+~~~~~~~~~~~~~~~~~~~~~
+
+An implementation of a bitwise prefix tree specially built for decoding
+Huffman-coded content where we already know the Huffman table.
+"""
+from .compat import to_byte, decode_hex
+
+
+class HuffmanEncoder(object):
+ """
+ Encodes a string according to the Huffman encoding table defined in the
+ HPACK specification.
+ """
+ def __init__(self, huffman_code_list, huffman_code_list_lengths):
+ self.huffman_code_list = huffman_code_list
+ self.huffman_code_list_lengths = huffman_code_list_lengths
+
+ def encode(self, bytes_to_encode):
+ """
+ Given a string of bytes, encodes them according to the HPACK Huffman
+ specification.
+ """
+ # If handed the empty string, just immediately return.
+ if not bytes_to_encode:
+ return b''
+
+ final_num = 0
+ final_int_len = 0
+
+ # Turn each byte into its huffman code. These codes aren't necessarily
+ # octet aligned, so keep track of how far through an octet we are. To
+ # handle this cleanly, just use a single giant integer.
+ for char in bytes_to_encode:
+ byte = to_byte(char)
+ bin_int_len = self.huffman_code_list_lengths[byte]
+ bin_int = self.huffman_code_list[byte] & (
+ 2 ** (bin_int_len + 1) - 1
+ )
+ final_num <<= bin_int_len
+ final_num |= bin_int
+ final_int_len += bin_int_len
+
+ # Pad out to an octet with ones.
+ bits_to_be_padded = (8 - (final_int_len % 8)) % 8
+ final_num <<= bits_to_be_padded
+ final_num |= (1 << bits_to_be_padded) - 1
+
+ # Convert the number to hex and strip off the leading '0x' and the
+ # trailing 'L', if present.
+ final_num = hex(final_num)[2:].rstrip('L')
+
+ # If this is odd, prepend a zero.
+ final_num = '0' + final_num if len(final_num) % 2 != 0 else final_num
+
+ # This number should have twice as many digits as bytes. If not, we're
+ # missing some leading zeroes. Work out how many bytes we want and how
+ # many digits we have, then add the missing zero digits to the front.
+ total_bytes = (final_int_len + bits_to_be_padded) // 8
+ expected_digits = total_bytes * 2
+
+ if len(final_num) != expected_digits:
+ missing_digits = expected_digits - len(final_num)
+ final_num = ('0' * missing_digits) + final_num
+
+ return decode_hex(final_num)
diff --git a/testing/web-platform/tests/tools/third_party/hpack/hpack/huffman_constants.py b/testing/web-platform/tests/tools/third_party/hpack/hpack/huffman_constants.py
new file mode 100644
index 0000000000..c2b3bb283e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/hpack/huffman_constants.py
@@ -0,0 +1,288 @@
+# -*- coding: utf-8 -*-
+"""
+hpack/huffman_constants
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Defines the constant Huffman table. This takes up an upsetting amount of space,
+but c'est la vie.
+"""
+
+REQUEST_CODES = [
+ 0x1ff8,
+ 0x7fffd8,
+ 0xfffffe2,
+ 0xfffffe3,
+ 0xfffffe4,
+ 0xfffffe5,
+ 0xfffffe6,
+ 0xfffffe7,
+ 0xfffffe8,
+ 0xffffea,
+ 0x3ffffffc,
+ 0xfffffe9,
+ 0xfffffea,
+ 0x3ffffffd,
+ 0xfffffeb,
+ 0xfffffec,
+ 0xfffffed,
+ 0xfffffee,
+ 0xfffffef,
+ 0xffffff0,
+ 0xffffff1,
+ 0xffffff2,
+ 0x3ffffffe,
+ 0xffffff3,
+ 0xffffff4,
+ 0xffffff5,
+ 0xffffff6,
+ 0xffffff7,
+ 0xffffff8,
+ 0xffffff9,
+ 0xffffffa,
+ 0xffffffb,
+ 0x14,
+ 0x3f8,
+ 0x3f9,
+ 0xffa,
+ 0x1ff9,
+ 0x15,
+ 0xf8,
+ 0x7fa,
+ 0x3fa,
+ 0x3fb,
+ 0xf9,
+ 0x7fb,
+ 0xfa,
+ 0x16,
+ 0x17,
+ 0x18,
+ 0x0,
+ 0x1,
+ 0x2,
+ 0x19,
+ 0x1a,
+ 0x1b,
+ 0x1c,
+ 0x1d,
+ 0x1e,
+ 0x1f,
+ 0x5c,
+ 0xfb,
+ 0x7ffc,
+ 0x20,
+ 0xffb,
+ 0x3fc,
+ 0x1ffa,
+ 0x21,
+ 0x5d,
+ 0x5e,
+ 0x5f,
+ 0x60,
+ 0x61,
+ 0x62,
+ 0x63,
+ 0x64,
+ 0x65,
+ 0x66,
+ 0x67,
+ 0x68,
+ 0x69,
+ 0x6a,
+ 0x6b,
+ 0x6c,
+ 0x6d,
+ 0x6e,
+ 0x6f,
+ 0x70,
+ 0x71,
+ 0x72,
+ 0xfc,
+ 0x73,
+ 0xfd,
+ 0x1ffb,
+ 0x7fff0,
+ 0x1ffc,
+ 0x3ffc,
+ 0x22,
+ 0x7ffd,
+ 0x3,
+ 0x23,
+ 0x4,
+ 0x24,
+ 0x5,
+ 0x25,
+ 0x26,
+ 0x27,
+ 0x6,
+ 0x74,
+ 0x75,
+ 0x28,
+ 0x29,
+ 0x2a,
+ 0x7,
+ 0x2b,
+ 0x76,
+ 0x2c,
+ 0x8,
+ 0x9,
+ 0x2d,
+ 0x77,
+ 0x78,
+ 0x79,
+ 0x7a,
+ 0x7b,
+ 0x7ffe,
+ 0x7fc,
+ 0x3ffd,
+ 0x1ffd,
+ 0xffffffc,
+ 0xfffe6,
+ 0x3fffd2,
+ 0xfffe7,
+ 0xfffe8,
+ 0x3fffd3,
+ 0x3fffd4,
+ 0x3fffd5,
+ 0x7fffd9,
+ 0x3fffd6,
+ 0x7fffda,
+ 0x7fffdb,
+ 0x7fffdc,
+ 0x7fffdd,
+ 0x7fffde,
+ 0xffffeb,
+ 0x7fffdf,
+ 0xffffec,
+ 0xffffed,
+ 0x3fffd7,
+ 0x7fffe0,
+ 0xffffee,
+ 0x7fffe1,
+ 0x7fffe2,
+ 0x7fffe3,
+ 0x7fffe4,
+ 0x1fffdc,
+ 0x3fffd8,
+ 0x7fffe5,
+ 0x3fffd9,
+ 0x7fffe6,
+ 0x7fffe7,
+ 0xffffef,
+ 0x3fffda,
+ 0x1fffdd,
+ 0xfffe9,
+ 0x3fffdb,
+ 0x3fffdc,
+ 0x7fffe8,
+ 0x7fffe9,
+ 0x1fffde,
+ 0x7fffea,
+ 0x3fffdd,
+ 0x3fffde,
+ 0xfffff0,
+ 0x1fffdf,
+ 0x3fffdf,
+ 0x7fffeb,
+ 0x7fffec,
+ 0x1fffe0,
+ 0x1fffe1,
+ 0x3fffe0,
+ 0x1fffe2,
+ 0x7fffed,
+ 0x3fffe1,
+ 0x7fffee,
+ 0x7fffef,
+ 0xfffea,
+ 0x3fffe2,
+ 0x3fffe3,
+ 0x3fffe4,
+ 0x7ffff0,
+ 0x3fffe5,
+ 0x3fffe6,
+ 0x7ffff1,
+ 0x3ffffe0,
+ 0x3ffffe1,
+ 0xfffeb,
+ 0x7fff1,
+ 0x3fffe7,
+ 0x7ffff2,
+ 0x3fffe8,
+ 0x1ffffec,
+ 0x3ffffe2,
+ 0x3ffffe3,
+ 0x3ffffe4,
+ 0x7ffffde,
+ 0x7ffffdf,
+ 0x3ffffe5,
+ 0xfffff1,
+ 0x1ffffed,
+ 0x7fff2,
+ 0x1fffe3,
+ 0x3ffffe6,
+ 0x7ffffe0,
+ 0x7ffffe1,
+ 0x3ffffe7,
+ 0x7ffffe2,
+ 0xfffff2,
+ 0x1fffe4,
+ 0x1fffe5,
+ 0x3ffffe8,
+ 0x3ffffe9,
+ 0xffffffd,
+ 0x7ffffe3,
+ 0x7ffffe4,
+ 0x7ffffe5,
+ 0xfffec,
+ 0xfffff3,
+ 0xfffed,
+ 0x1fffe6,
+ 0x3fffe9,
+ 0x1fffe7,
+ 0x1fffe8,
+ 0x7ffff3,
+ 0x3fffea,
+ 0x3fffeb,
+ 0x1ffffee,
+ 0x1ffffef,
+ 0xfffff4,
+ 0xfffff5,
+ 0x3ffffea,
+ 0x7ffff4,
+ 0x3ffffeb,
+ 0x7ffffe6,
+ 0x3ffffec,
+ 0x3ffffed,
+ 0x7ffffe7,
+ 0x7ffffe8,
+ 0x7ffffe9,
+ 0x7ffffea,
+ 0x7ffffeb,
+ 0xffffffe,
+ 0x7ffffec,
+ 0x7ffffed,
+ 0x7ffffee,
+ 0x7ffffef,
+ 0x7fffff0,
+ 0x3ffffee,
+ 0x3fffffff,
+]
+
+REQUEST_CODES_LENGTH = [
+ 13, 23, 28, 28, 28, 28, 28, 28, 28, 24, 30, 28, 28, 30, 28, 28,
+ 28, 28, 28, 28, 28, 28, 30, 28, 28, 28, 28, 28, 28, 28, 28, 28,
+ 6, 10, 10, 12, 13, 6, 8, 11, 10, 10, 8, 11, 8, 6, 6, 6,
+ 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 7, 8, 15, 6, 12, 10,
+ 13, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 8, 7, 8, 13, 19, 13, 14, 6,
+ 15, 5, 6, 5, 6, 5, 6, 6, 6, 5, 7, 7, 6, 6, 6, 5,
+ 6, 7, 6, 5, 5, 6, 7, 7, 7, 7, 7, 15, 11, 14, 13, 28,
+ 20, 22, 20, 20, 22, 22, 22, 23, 22, 23, 23, 23, 23, 23, 24, 23,
+ 24, 24, 22, 23, 24, 23, 23, 23, 23, 21, 22, 23, 22, 23, 23, 24,
+ 22, 21, 20, 22, 22, 23, 23, 21, 23, 22, 22, 24, 21, 22, 23, 23,
+ 21, 21, 22, 21, 23, 22, 23, 23, 20, 22, 22, 22, 23, 22, 22, 23,
+ 26, 26, 20, 19, 22, 23, 22, 25, 26, 26, 26, 27, 27, 26, 24, 25,
+ 19, 21, 26, 27, 27, 26, 27, 24, 21, 21, 26, 26, 28, 27, 27, 27,
+ 20, 24, 20, 21, 22, 21, 21, 23, 22, 22, 25, 25, 24, 24, 26, 23,
+ 26, 27, 26, 26, 27, 27, 27, 27, 27, 28, 27, 27, 27, 27, 27, 26,
+ 30,
+]
diff --git a/testing/web-platform/tests/tools/third_party/hpack/hpack/huffman_table.py b/testing/web-platform/tests/tools/third_party/hpack/hpack/huffman_table.py
new file mode 100644
index 0000000000..c199ef5a3f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/hpack/huffman_table.py
@@ -0,0 +1,4739 @@
+# -*- coding: utf-8 -*-
+"""
+hpack/huffman_table
+~~~~~~~~~~~~~~~~~~~
+
+This implementation of a Huffman decoding table for HTTP/2 is essentially a
+Python port of the work originally done for nghttp2's Huffman decoding. For
+this reason, while this file is made available under the MIT license as is the
+rest of this module, this file is undoubtedly a derivative work of the nghttp2
+file ``nghttp2_hd_huffman_data.c``, obtained from
+https://github.com/tatsuhiro-t/nghttp2/ at commit
+d2b55ad1a245e1d1964579fa3fac36ebf3939e72. That work is made available under
+the Apache 2.0 license under the following terms:
+
+ Copyright (c) 2013 Tatsuhiro Tsujikawa
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+The essence of this approach is that it builds a finite state machine out of
+4-bit nibbles of Huffman coded data. The input function passes 4 bits worth of
+data to the state machine each time, which uses those 4 bits of data along with
+the current accumulated state data to process the data given.
+
+For the sake of efficiency, the in-memory representation of the states,
+transitions, and result values of the state machine are represented as a long
+list containing three-tuples. This list is enormously long, and viewing it as
+an in-memory representation is not very clear, but it is laid out here in a way
+that is intended to be *somewhat* more clear.
+
+Essentially, the list is structured as 256 collections of 16 entries (one for
+each nibble) of three-tuples. Each collection is called a "node", and the
+zeroth collection is called the "root node". The state machine tracks one
+value: the "state" byte.
+
+For each nibble passed to the state machine, it first multiplies the "state"
+byte by 16 and adds the numerical value of the nibble. This number is the index
+into the large flat list.
+
+The three-tuple that is found by looking up that index consists of three
+values:
+
+- a new state value, used for subsequent decoding
+- a collection of flags, used to determine whether data is emitted or whether
+ the state machine is complete.
+- the byte value to emit, assuming that emitting a byte is required.
+
+The flags are consulted, if necessary a byte is emitted, and then the next
+nibble is used. This continues until the state machine believes it has
+completely Huffman-decoded the data.
+
+This approach has relatively little indirection, and therefore performs
+relatively well, particularly on implementations like PyPy where the cost of
+loops at the Python-level is not too expensive. The total number of loop
+iterations is 4x the number of bytes passed to the decoder.
+"""
+from .exceptions import HPACKDecodingError
+
+
+# This defines the state machine "class" at the top of the file. The reason we
+# do this is to keep the terrifing monster state table at the *bottom* of the
+# file so you don't have to actually *look* at the damn thing.
+def decode_huffman(huffman_string):
+ """
+ Given a bytestring of Huffman-encoded data for HPACK, returns a bytestring
+ of the decompressed data.
+ """
+ if not huffman_string:
+ return b''
+
+ state = 0
+ flags = 0
+ decoded_bytes = bytearray()
+
+ # Perversely, bytearrays are a lot more convenient across Python 2 and
+ # Python 3 because they behave *the same way* on both platforms. Given that
+ # we really do want numerical bytes when we iterate here, let's use a
+ # bytearray.
+ huffman_string = bytearray(huffman_string)
+
+ # This loop is unrolled somewhat. Because we use a nibble, not a byte, we
+ # need to handle each nibble twice. We unroll that: it makes the loop body
+ # a bit longer, but that's ok.
+ for input_byte in huffman_string:
+ index = (state * 16) + (input_byte >> 4)
+ state, flags, output_byte = HUFFMAN_TABLE[index]
+
+ if flags & HUFFMAN_FAIL:
+ raise HPACKDecodingError("Invalid Huffman String")
+
+ if flags & HUFFMAN_EMIT_SYMBOL:
+ decoded_bytes.append(output_byte)
+
+ index = (state * 16) + (input_byte & 0x0F)
+ state, flags, output_byte = HUFFMAN_TABLE[index]
+
+ if flags & HUFFMAN_FAIL:
+ raise HPACKDecodingError("Invalid Huffman String")
+
+ if flags & HUFFMAN_EMIT_SYMBOL:
+ decoded_bytes.append(output_byte)
+
+ if not (flags & HUFFMAN_COMPLETE):
+ raise HPACKDecodingError("Incomplete Huffman string")
+
+ return bytes(decoded_bytes)
+
+
+# Some decoder flags to control state transitions.
+HUFFMAN_COMPLETE = 1
+HUFFMAN_EMIT_SYMBOL = (1 << 1)
+HUFFMAN_FAIL = (1 << 2)
+
+# This is the monster table. Avert your eyes, children.
+HUFFMAN_TABLE = [
+ # Node 0 (Root Node, never emits symbols.)
+ (4, 0, 0),
+ (5, 0, 0),
+ (7, 0, 0),
+ (8, 0, 0),
+ (11, 0, 0),
+ (12, 0, 0),
+ (16, 0, 0),
+ (19, 0, 0),
+ (25, 0, 0),
+ (28, 0, 0),
+ (32, 0, 0),
+ (35, 0, 0),
+ (42, 0, 0),
+ (49, 0, 0),
+ (57, 0, 0),
+ (64, HUFFMAN_COMPLETE, 0),
+
+ # Node 1
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 48),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 49),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 50),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 97),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 99),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 101),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 105),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 111),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 115),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 116),
+ (13, 0, 0),
+ (14, 0, 0),
+ (17, 0, 0),
+ (18, 0, 0),
+ (20, 0, 0),
+ (21, 0, 0),
+
+ # Node 2
+ (1, HUFFMAN_EMIT_SYMBOL, 48),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 48),
+ (1, HUFFMAN_EMIT_SYMBOL, 49),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 49),
+ (1, HUFFMAN_EMIT_SYMBOL, 50),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 50),
+ (1, HUFFMAN_EMIT_SYMBOL, 97),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 97),
+ (1, HUFFMAN_EMIT_SYMBOL, 99),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 99),
+ (1, HUFFMAN_EMIT_SYMBOL, 101),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 101),
+ (1, HUFFMAN_EMIT_SYMBOL, 105),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 105),
+ (1, HUFFMAN_EMIT_SYMBOL, 111),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 111),
+
+ # Node 3
+ (2, HUFFMAN_EMIT_SYMBOL, 48),
+ (9, HUFFMAN_EMIT_SYMBOL, 48),
+ (23, HUFFMAN_EMIT_SYMBOL, 48),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 48),
+ (2, HUFFMAN_EMIT_SYMBOL, 49),
+ (9, HUFFMAN_EMIT_SYMBOL, 49),
+ (23, HUFFMAN_EMIT_SYMBOL, 49),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 49),
+ (2, HUFFMAN_EMIT_SYMBOL, 50),
+ (9, HUFFMAN_EMIT_SYMBOL, 50),
+ (23, HUFFMAN_EMIT_SYMBOL, 50),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 50),
+ (2, HUFFMAN_EMIT_SYMBOL, 97),
+ (9, HUFFMAN_EMIT_SYMBOL, 97),
+ (23, HUFFMAN_EMIT_SYMBOL, 97),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 97),
+
+ # Node 4
+ (3, HUFFMAN_EMIT_SYMBOL, 48),
+ (6, HUFFMAN_EMIT_SYMBOL, 48),
+ (10, HUFFMAN_EMIT_SYMBOL, 48),
+ (15, HUFFMAN_EMIT_SYMBOL, 48),
+ (24, HUFFMAN_EMIT_SYMBOL, 48),
+ (31, HUFFMAN_EMIT_SYMBOL, 48),
+ (41, HUFFMAN_EMIT_SYMBOL, 48),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 48),
+ (3, HUFFMAN_EMIT_SYMBOL, 49),
+ (6, HUFFMAN_EMIT_SYMBOL, 49),
+ (10, HUFFMAN_EMIT_SYMBOL, 49),
+ (15, HUFFMAN_EMIT_SYMBOL, 49),
+ (24, HUFFMAN_EMIT_SYMBOL, 49),
+ (31, HUFFMAN_EMIT_SYMBOL, 49),
+ (41, HUFFMAN_EMIT_SYMBOL, 49),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 49),
+
+ # Node 5
+ (3, HUFFMAN_EMIT_SYMBOL, 50),
+ (6, HUFFMAN_EMIT_SYMBOL, 50),
+ (10, HUFFMAN_EMIT_SYMBOL, 50),
+ (15, HUFFMAN_EMIT_SYMBOL, 50),
+ (24, HUFFMAN_EMIT_SYMBOL, 50),
+ (31, HUFFMAN_EMIT_SYMBOL, 50),
+ (41, HUFFMAN_EMIT_SYMBOL, 50),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 50),
+ (3, HUFFMAN_EMIT_SYMBOL, 97),
+ (6, HUFFMAN_EMIT_SYMBOL, 97),
+ (10, HUFFMAN_EMIT_SYMBOL, 97),
+ (15, HUFFMAN_EMIT_SYMBOL, 97),
+ (24, HUFFMAN_EMIT_SYMBOL, 97),
+ (31, HUFFMAN_EMIT_SYMBOL, 97),
+ (41, HUFFMAN_EMIT_SYMBOL, 97),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 97),
+
+ # Node 6
+ (2, HUFFMAN_EMIT_SYMBOL, 99),
+ (9, HUFFMAN_EMIT_SYMBOL, 99),
+ (23, HUFFMAN_EMIT_SYMBOL, 99),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 99),
+ (2, HUFFMAN_EMIT_SYMBOL, 101),
+ (9, HUFFMAN_EMIT_SYMBOL, 101),
+ (23, HUFFMAN_EMIT_SYMBOL, 101),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 101),
+ (2, HUFFMAN_EMIT_SYMBOL, 105),
+ (9, HUFFMAN_EMIT_SYMBOL, 105),
+ (23, HUFFMAN_EMIT_SYMBOL, 105),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 105),
+ (2, HUFFMAN_EMIT_SYMBOL, 111),
+ (9, HUFFMAN_EMIT_SYMBOL, 111),
+ (23, HUFFMAN_EMIT_SYMBOL, 111),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 111),
+
+ # Node 7
+ (3, HUFFMAN_EMIT_SYMBOL, 99),
+ (6, HUFFMAN_EMIT_SYMBOL, 99),
+ (10, HUFFMAN_EMIT_SYMBOL, 99),
+ (15, HUFFMAN_EMIT_SYMBOL, 99),
+ (24, HUFFMAN_EMIT_SYMBOL, 99),
+ (31, HUFFMAN_EMIT_SYMBOL, 99),
+ (41, HUFFMAN_EMIT_SYMBOL, 99),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 99),
+ (3, HUFFMAN_EMIT_SYMBOL, 101),
+ (6, HUFFMAN_EMIT_SYMBOL, 101),
+ (10, HUFFMAN_EMIT_SYMBOL, 101),
+ (15, HUFFMAN_EMIT_SYMBOL, 101),
+ (24, HUFFMAN_EMIT_SYMBOL, 101),
+ (31, HUFFMAN_EMIT_SYMBOL, 101),
+ (41, HUFFMAN_EMIT_SYMBOL, 101),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 101),
+
+ # Node 8
+ (3, HUFFMAN_EMIT_SYMBOL, 105),
+ (6, HUFFMAN_EMIT_SYMBOL, 105),
+ (10, HUFFMAN_EMIT_SYMBOL, 105),
+ (15, HUFFMAN_EMIT_SYMBOL, 105),
+ (24, HUFFMAN_EMIT_SYMBOL, 105),
+ (31, HUFFMAN_EMIT_SYMBOL, 105),
+ (41, HUFFMAN_EMIT_SYMBOL, 105),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 105),
+ (3, HUFFMAN_EMIT_SYMBOL, 111),
+ (6, HUFFMAN_EMIT_SYMBOL, 111),
+ (10, HUFFMAN_EMIT_SYMBOL, 111),
+ (15, HUFFMAN_EMIT_SYMBOL, 111),
+ (24, HUFFMAN_EMIT_SYMBOL, 111),
+ (31, HUFFMAN_EMIT_SYMBOL, 111),
+ (41, HUFFMAN_EMIT_SYMBOL, 111),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 111),
+
+ # Node 9
+ (1, HUFFMAN_EMIT_SYMBOL, 115),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 115),
+ (1, HUFFMAN_EMIT_SYMBOL, 116),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 116),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 32),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 37),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 45),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 46),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 47),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 51),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 52),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 53),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 54),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 55),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 56),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 57),
+
+ # Node 10
+ (2, HUFFMAN_EMIT_SYMBOL, 115),
+ (9, HUFFMAN_EMIT_SYMBOL, 115),
+ (23, HUFFMAN_EMIT_SYMBOL, 115),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 115),
+ (2, HUFFMAN_EMIT_SYMBOL, 116),
+ (9, HUFFMAN_EMIT_SYMBOL, 116),
+ (23, HUFFMAN_EMIT_SYMBOL, 116),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 116),
+ (1, HUFFMAN_EMIT_SYMBOL, 32),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 32),
+ (1, HUFFMAN_EMIT_SYMBOL, 37),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 37),
+ (1, HUFFMAN_EMIT_SYMBOL, 45),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 45),
+ (1, HUFFMAN_EMIT_SYMBOL, 46),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 46),
+
+ # Node 11
+ (3, HUFFMAN_EMIT_SYMBOL, 115),
+ (6, HUFFMAN_EMIT_SYMBOL, 115),
+ (10, HUFFMAN_EMIT_SYMBOL, 115),
+ (15, HUFFMAN_EMIT_SYMBOL, 115),
+ (24, HUFFMAN_EMIT_SYMBOL, 115),
+ (31, HUFFMAN_EMIT_SYMBOL, 115),
+ (41, HUFFMAN_EMIT_SYMBOL, 115),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 115),
+ (3, HUFFMAN_EMIT_SYMBOL, 116),
+ (6, HUFFMAN_EMIT_SYMBOL, 116),
+ (10, HUFFMAN_EMIT_SYMBOL, 116),
+ (15, HUFFMAN_EMIT_SYMBOL, 116),
+ (24, HUFFMAN_EMIT_SYMBOL, 116),
+ (31, HUFFMAN_EMIT_SYMBOL, 116),
+ (41, HUFFMAN_EMIT_SYMBOL, 116),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 116),
+
+ # Node 12
+ (2, HUFFMAN_EMIT_SYMBOL, 32),
+ (9, HUFFMAN_EMIT_SYMBOL, 32),
+ (23, HUFFMAN_EMIT_SYMBOL, 32),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 32),
+ (2, HUFFMAN_EMIT_SYMBOL, 37),
+ (9, HUFFMAN_EMIT_SYMBOL, 37),
+ (23, HUFFMAN_EMIT_SYMBOL, 37),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 37),
+ (2, HUFFMAN_EMIT_SYMBOL, 45),
+ (9, HUFFMAN_EMIT_SYMBOL, 45),
+ (23, HUFFMAN_EMIT_SYMBOL, 45),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 45),
+ (2, HUFFMAN_EMIT_SYMBOL, 46),
+ (9, HUFFMAN_EMIT_SYMBOL, 46),
+ (23, HUFFMAN_EMIT_SYMBOL, 46),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 46),
+
+ # Node 13
+ (3, HUFFMAN_EMIT_SYMBOL, 32),
+ (6, HUFFMAN_EMIT_SYMBOL, 32),
+ (10, HUFFMAN_EMIT_SYMBOL, 32),
+ (15, HUFFMAN_EMIT_SYMBOL, 32),
+ (24, HUFFMAN_EMIT_SYMBOL, 32),
+ (31, HUFFMAN_EMIT_SYMBOL, 32),
+ (41, HUFFMAN_EMIT_SYMBOL, 32),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 32),
+ (3, HUFFMAN_EMIT_SYMBOL, 37),
+ (6, HUFFMAN_EMIT_SYMBOL, 37),
+ (10, HUFFMAN_EMIT_SYMBOL, 37),
+ (15, HUFFMAN_EMIT_SYMBOL, 37),
+ (24, HUFFMAN_EMIT_SYMBOL, 37),
+ (31, HUFFMAN_EMIT_SYMBOL, 37),
+ (41, HUFFMAN_EMIT_SYMBOL, 37),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 37),
+
+ # Node 14
+ (3, HUFFMAN_EMIT_SYMBOL, 45),
+ (6, HUFFMAN_EMIT_SYMBOL, 45),
+ (10, HUFFMAN_EMIT_SYMBOL, 45),
+ (15, HUFFMAN_EMIT_SYMBOL, 45),
+ (24, HUFFMAN_EMIT_SYMBOL, 45),
+ (31, HUFFMAN_EMIT_SYMBOL, 45),
+ (41, HUFFMAN_EMIT_SYMBOL, 45),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 45),
+ (3, HUFFMAN_EMIT_SYMBOL, 46),
+ (6, HUFFMAN_EMIT_SYMBOL, 46),
+ (10, HUFFMAN_EMIT_SYMBOL, 46),
+ (15, HUFFMAN_EMIT_SYMBOL, 46),
+ (24, HUFFMAN_EMIT_SYMBOL, 46),
+ (31, HUFFMAN_EMIT_SYMBOL, 46),
+ (41, HUFFMAN_EMIT_SYMBOL, 46),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 46),
+
+ # Node 15
+ (1, HUFFMAN_EMIT_SYMBOL, 47),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 47),
+ (1, HUFFMAN_EMIT_SYMBOL, 51),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 51),
+ (1, HUFFMAN_EMIT_SYMBOL, 52),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 52),
+ (1, HUFFMAN_EMIT_SYMBOL, 53),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 53),
+ (1, HUFFMAN_EMIT_SYMBOL, 54),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 54),
+ (1, HUFFMAN_EMIT_SYMBOL, 55),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 55),
+ (1, HUFFMAN_EMIT_SYMBOL, 56),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 56),
+ (1, HUFFMAN_EMIT_SYMBOL, 57),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 57),
+
+ # Node 16
+ (2, HUFFMAN_EMIT_SYMBOL, 47),
+ (9, HUFFMAN_EMIT_SYMBOL, 47),
+ (23, HUFFMAN_EMIT_SYMBOL, 47),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 47),
+ (2, HUFFMAN_EMIT_SYMBOL, 51),
+ (9, HUFFMAN_EMIT_SYMBOL, 51),
+ (23, HUFFMAN_EMIT_SYMBOL, 51),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 51),
+ (2, HUFFMAN_EMIT_SYMBOL, 52),
+ (9, HUFFMAN_EMIT_SYMBOL, 52),
+ (23, HUFFMAN_EMIT_SYMBOL, 52),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 52),
+ (2, HUFFMAN_EMIT_SYMBOL, 53),
+ (9, HUFFMAN_EMIT_SYMBOL, 53),
+ (23, HUFFMAN_EMIT_SYMBOL, 53),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 53),
+
+ # Node 17
+ (3, HUFFMAN_EMIT_SYMBOL, 47),
+ (6, HUFFMAN_EMIT_SYMBOL, 47),
+ (10, HUFFMAN_EMIT_SYMBOL, 47),
+ (15, HUFFMAN_EMIT_SYMBOL, 47),
+ (24, HUFFMAN_EMIT_SYMBOL, 47),
+ (31, HUFFMAN_EMIT_SYMBOL, 47),
+ (41, HUFFMAN_EMIT_SYMBOL, 47),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 47),
+ (3, HUFFMAN_EMIT_SYMBOL, 51),
+ (6, HUFFMAN_EMIT_SYMBOL, 51),
+ (10, HUFFMAN_EMIT_SYMBOL, 51),
+ (15, HUFFMAN_EMIT_SYMBOL, 51),
+ (24, HUFFMAN_EMIT_SYMBOL, 51),
+ (31, HUFFMAN_EMIT_SYMBOL, 51),
+ (41, HUFFMAN_EMIT_SYMBOL, 51),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 51),
+
+ # Node 18
+ (3, HUFFMAN_EMIT_SYMBOL, 52),
+ (6, HUFFMAN_EMIT_SYMBOL, 52),
+ (10, HUFFMAN_EMIT_SYMBOL, 52),
+ (15, HUFFMAN_EMIT_SYMBOL, 52),
+ (24, HUFFMAN_EMIT_SYMBOL, 52),
+ (31, HUFFMAN_EMIT_SYMBOL, 52),
+ (41, HUFFMAN_EMIT_SYMBOL, 52),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 52),
+ (3, HUFFMAN_EMIT_SYMBOL, 53),
+ (6, HUFFMAN_EMIT_SYMBOL, 53),
+ (10, HUFFMAN_EMIT_SYMBOL, 53),
+ (15, HUFFMAN_EMIT_SYMBOL, 53),
+ (24, HUFFMAN_EMIT_SYMBOL, 53),
+ (31, HUFFMAN_EMIT_SYMBOL, 53),
+ (41, HUFFMAN_EMIT_SYMBOL, 53),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 53),
+
+ # Node 19
+ (2, HUFFMAN_EMIT_SYMBOL, 54),
+ (9, HUFFMAN_EMIT_SYMBOL, 54),
+ (23, HUFFMAN_EMIT_SYMBOL, 54),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 54),
+ (2, HUFFMAN_EMIT_SYMBOL, 55),
+ (9, HUFFMAN_EMIT_SYMBOL, 55),
+ (23, HUFFMAN_EMIT_SYMBOL, 55),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 55),
+ (2, HUFFMAN_EMIT_SYMBOL, 56),
+ (9, HUFFMAN_EMIT_SYMBOL, 56),
+ (23, HUFFMAN_EMIT_SYMBOL, 56),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 56),
+ (2, HUFFMAN_EMIT_SYMBOL, 57),
+ (9, HUFFMAN_EMIT_SYMBOL, 57),
+ (23, HUFFMAN_EMIT_SYMBOL, 57),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 57),
+
+ # Node 20
+ (3, HUFFMAN_EMIT_SYMBOL, 54),
+ (6, HUFFMAN_EMIT_SYMBOL, 54),
+ (10, HUFFMAN_EMIT_SYMBOL, 54),
+ (15, HUFFMAN_EMIT_SYMBOL, 54),
+ (24, HUFFMAN_EMIT_SYMBOL, 54),
+ (31, HUFFMAN_EMIT_SYMBOL, 54),
+ (41, HUFFMAN_EMIT_SYMBOL, 54),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 54),
+ (3, HUFFMAN_EMIT_SYMBOL, 55),
+ (6, HUFFMAN_EMIT_SYMBOL, 55),
+ (10, HUFFMAN_EMIT_SYMBOL, 55),
+ (15, HUFFMAN_EMIT_SYMBOL, 55),
+ (24, HUFFMAN_EMIT_SYMBOL, 55),
+ (31, HUFFMAN_EMIT_SYMBOL, 55),
+ (41, HUFFMAN_EMIT_SYMBOL, 55),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 55),
+
+ # Node 21
+ (3, HUFFMAN_EMIT_SYMBOL, 56),
+ (6, HUFFMAN_EMIT_SYMBOL, 56),
+ (10, HUFFMAN_EMIT_SYMBOL, 56),
+ (15, HUFFMAN_EMIT_SYMBOL, 56),
+ (24, HUFFMAN_EMIT_SYMBOL, 56),
+ (31, HUFFMAN_EMIT_SYMBOL, 56),
+ (41, HUFFMAN_EMIT_SYMBOL, 56),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 56),
+ (3, HUFFMAN_EMIT_SYMBOL, 57),
+ (6, HUFFMAN_EMIT_SYMBOL, 57),
+ (10, HUFFMAN_EMIT_SYMBOL, 57),
+ (15, HUFFMAN_EMIT_SYMBOL, 57),
+ (24, HUFFMAN_EMIT_SYMBOL, 57),
+ (31, HUFFMAN_EMIT_SYMBOL, 57),
+ (41, HUFFMAN_EMIT_SYMBOL, 57),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 57),
+
+ # Node 22
+ (26, 0, 0),
+ (27, 0, 0),
+ (29, 0, 0),
+ (30, 0, 0),
+ (33, 0, 0),
+ (34, 0, 0),
+ (36, 0, 0),
+ (37, 0, 0),
+ (43, 0, 0),
+ (46, 0, 0),
+ (50, 0, 0),
+ (53, 0, 0),
+ (58, 0, 0),
+ (61, 0, 0),
+ (65, 0, 0),
+ (68, HUFFMAN_COMPLETE, 0),
+
+ # Node 23
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 61),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 65),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 95),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 98),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 100),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 102),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 103),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 104),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 108),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 109),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 110),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 112),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 114),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 117),
+ (38, 0, 0),
+ (39, 0, 0),
+
+ # Node 24
+ (1, HUFFMAN_EMIT_SYMBOL, 61),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 61),
+ (1, HUFFMAN_EMIT_SYMBOL, 65),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 65),
+ (1, HUFFMAN_EMIT_SYMBOL, 95),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 95),
+ (1, HUFFMAN_EMIT_SYMBOL, 98),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 98),
+ (1, HUFFMAN_EMIT_SYMBOL, 100),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 100),
+ (1, HUFFMAN_EMIT_SYMBOL, 102),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 102),
+ (1, HUFFMAN_EMIT_SYMBOL, 103),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 103),
+ (1, HUFFMAN_EMIT_SYMBOL, 104),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 104),
+
+ # Node 25
+ (2, HUFFMAN_EMIT_SYMBOL, 61),
+ (9, HUFFMAN_EMIT_SYMBOL, 61),
+ (23, HUFFMAN_EMIT_SYMBOL, 61),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 61),
+ (2, HUFFMAN_EMIT_SYMBOL, 65),
+ (9, HUFFMAN_EMIT_SYMBOL, 65),
+ (23, HUFFMAN_EMIT_SYMBOL, 65),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 65),
+ (2, HUFFMAN_EMIT_SYMBOL, 95),
+ (9, HUFFMAN_EMIT_SYMBOL, 95),
+ (23, HUFFMAN_EMIT_SYMBOL, 95),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 95),
+ (2, HUFFMAN_EMIT_SYMBOL, 98),
+ (9, HUFFMAN_EMIT_SYMBOL, 98),
+ (23, HUFFMAN_EMIT_SYMBOL, 98),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 98),
+
+ # Node 26
+ (3, HUFFMAN_EMIT_SYMBOL, 61),
+ (6, HUFFMAN_EMIT_SYMBOL, 61),
+ (10, HUFFMAN_EMIT_SYMBOL, 61),
+ (15, HUFFMAN_EMIT_SYMBOL, 61),
+ (24, HUFFMAN_EMIT_SYMBOL, 61),
+ (31, HUFFMAN_EMIT_SYMBOL, 61),
+ (41, HUFFMAN_EMIT_SYMBOL, 61),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 61),
+ (3, HUFFMAN_EMIT_SYMBOL, 65),
+ (6, HUFFMAN_EMIT_SYMBOL, 65),
+ (10, HUFFMAN_EMIT_SYMBOL, 65),
+ (15, HUFFMAN_EMIT_SYMBOL, 65),
+ (24, HUFFMAN_EMIT_SYMBOL, 65),
+ (31, HUFFMAN_EMIT_SYMBOL, 65),
+ (41, HUFFMAN_EMIT_SYMBOL, 65),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 65),
+
+ # Node 27
+ (3, HUFFMAN_EMIT_SYMBOL, 95),
+ (6, HUFFMAN_EMIT_SYMBOL, 95),
+ (10, HUFFMAN_EMIT_SYMBOL, 95),
+ (15, HUFFMAN_EMIT_SYMBOL, 95),
+ (24, HUFFMAN_EMIT_SYMBOL, 95),
+ (31, HUFFMAN_EMIT_SYMBOL, 95),
+ (41, HUFFMAN_EMIT_SYMBOL, 95),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 95),
+ (3, HUFFMAN_EMIT_SYMBOL, 98),
+ (6, HUFFMAN_EMIT_SYMBOL, 98),
+ (10, HUFFMAN_EMIT_SYMBOL, 98),
+ (15, HUFFMAN_EMIT_SYMBOL, 98),
+ (24, HUFFMAN_EMIT_SYMBOL, 98),
+ (31, HUFFMAN_EMIT_SYMBOL, 98),
+ (41, HUFFMAN_EMIT_SYMBOL, 98),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 98),
+
+ # Node 28
+ (2, HUFFMAN_EMIT_SYMBOL, 100),
+ (9, HUFFMAN_EMIT_SYMBOL, 100),
+ (23, HUFFMAN_EMIT_SYMBOL, 100),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 100),
+ (2, HUFFMAN_EMIT_SYMBOL, 102),
+ (9, HUFFMAN_EMIT_SYMBOL, 102),
+ (23, HUFFMAN_EMIT_SYMBOL, 102),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 102),
+ (2, HUFFMAN_EMIT_SYMBOL, 103),
+ (9, HUFFMAN_EMIT_SYMBOL, 103),
+ (23, HUFFMAN_EMIT_SYMBOL, 103),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 103),
+ (2, HUFFMAN_EMIT_SYMBOL, 104),
+ (9, HUFFMAN_EMIT_SYMBOL, 104),
+ (23, HUFFMAN_EMIT_SYMBOL, 104),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 104),
+
+ # Node 29
+ (3, HUFFMAN_EMIT_SYMBOL, 100),
+ (6, HUFFMAN_EMIT_SYMBOL, 100),
+ (10, HUFFMAN_EMIT_SYMBOL, 100),
+ (15, HUFFMAN_EMIT_SYMBOL, 100),
+ (24, HUFFMAN_EMIT_SYMBOL, 100),
+ (31, HUFFMAN_EMIT_SYMBOL, 100),
+ (41, HUFFMAN_EMIT_SYMBOL, 100),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 100),
+ (3, HUFFMAN_EMIT_SYMBOL, 102),
+ (6, HUFFMAN_EMIT_SYMBOL, 102),
+ (10, HUFFMAN_EMIT_SYMBOL, 102),
+ (15, HUFFMAN_EMIT_SYMBOL, 102),
+ (24, HUFFMAN_EMIT_SYMBOL, 102),
+ (31, HUFFMAN_EMIT_SYMBOL, 102),
+ (41, HUFFMAN_EMIT_SYMBOL, 102),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 102),
+
+ # Node 30
+ (3, HUFFMAN_EMIT_SYMBOL, 103),
+ (6, HUFFMAN_EMIT_SYMBOL, 103),
+ (10, HUFFMAN_EMIT_SYMBOL, 103),
+ (15, HUFFMAN_EMIT_SYMBOL, 103),
+ (24, HUFFMAN_EMIT_SYMBOL, 103),
+ (31, HUFFMAN_EMIT_SYMBOL, 103),
+ (41, HUFFMAN_EMIT_SYMBOL, 103),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 103),
+ (3, HUFFMAN_EMIT_SYMBOL, 104),
+ (6, HUFFMAN_EMIT_SYMBOL, 104),
+ (10, HUFFMAN_EMIT_SYMBOL, 104),
+ (15, HUFFMAN_EMIT_SYMBOL, 104),
+ (24, HUFFMAN_EMIT_SYMBOL, 104),
+ (31, HUFFMAN_EMIT_SYMBOL, 104),
+ (41, HUFFMAN_EMIT_SYMBOL, 104),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 104),
+
+ # Node 31
+ (1, HUFFMAN_EMIT_SYMBOL, 108),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 108),
+ (1, HUFFMAN_EMIT_SYMBOL, 109),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 109),
+ (1, HUFFMAN_EMIT_SYMBOL, 110),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 110),
+ (1, HUFFMAN_EMIT_SYMBOL, 112),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 112),
+ (1, HUFFMAN_EMIT_SYMBOL, 114),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 114),
+ (1, HUFFMAN_EMIT_SYMBOL, 117),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 117),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 58),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 66),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 67),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 68),
+
+ # Node 32
+ (2, HUFFMAN_EMIT_SYMBOL, 108),
+ (9, HUFFMAN_EMIT_SYMBOL, 108),
+ (23, HUFFMAN_EMIT_SYMBOL, 108),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 108),
+ (2, HUFFMAN_EMIT_SYMBOL, 109),
+ (9, HUFFMAN_EMIT_SYMBOL, 109),
+ (23, HUFFMAN_EMIT_SYMBOL, 109),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 109),
+ (2, HUFFMAN_EMIT_SYMBOL, 110),
+ (9, HUFFMAN_EMIT_SYMBOL, 110),
+ (23, HUFFMAN_EMIT_SYMBOL, 110),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 110),
+ (2, HUFFMAN_EMIT_SYMBOL, 112),
+ (9, HUFFMAN_EMIT_SYMBOL, 112),
+ (23, HUFFMAN_EMIT_SYMBOL, 112),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 112),
+
+ # Node 33
+ (3, HUFFMAN_EMIT_SYMBOL, 108),
+ (6, HUFFMAN_EMIT_SYMBOL, 108),
+ (10, HUFFMAN_EMIT_SYMBOL, 108),
+ (15, HUFFMAN_EMIT_SYMBOL, 108),
+ (24, HUFFMAN_EMIT_SYMBOL, 108),
+ (31, HUFFMAN_EMIT_SYMBOL, 108),
+ (41, HUFFMAN_EMIT_SYMBOL, 108),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 108),
+ (3, HUFFMAN_EMIT_SYMBOL, 109),
+ (6, HUFFMAN_EMIT_SYMBOL, 109),
+ (10, HUFFMAN_EMIT_SYMBOL, 109),
+ (15, HUFFMAN_EMIT_SYMBOL, 109),
+ (24, HUFFMAN_EMIT_SYMBOL, 109),
+ (31, HUFFMAN_EMIT_SYMBOL, 109),
+ (41, HUFFMAN_EMIT_SYMBOL, 109),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 109),
+
+ # Node 34
+ (3, HUFFMAN_EMIT_SYMBOL, 110),
+ (6, HUFFMAN_EMIT_SYMBOL, 110),
+ (10, HUFFMAN_EMIT_SYMBOL, 110),
+ (15, HUFFMAN_EMIT_SYMBOL, 110),
+ (24, HUFFMAN_EMIT_SYMBOL, 110),
+ (31, HUFFMAN_EMIT_SYMBOL, 110),
+ (41, HUFFMAN_EMIT_SYMBOL, 110),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 110),
+ (3, HUFFMAN_EMIT_SYMBOL, 112),
+ (6, HUFFMAN_EMIT_SYMBOL, 112),
+ (10, HUFFMAN_EMIT_SYMBOL, 112),
+ (15, HUFFMAN_EMIT_SYMBOL, 112),
+ (24, HUFFMAN_EMIT_SYMBOL, 112),
+ (31, HUFFMAN_EMIT_SYMBOL, 112),
+ (41, HUFFMAN_EMIT_SYMBOL, 112),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 112),
+
+ # Node 35
+ (2, HUFFMAN_EMIT_SYMBOL, 114),
+ (9, HUFFMAN_EMIT_SYMBOL, 114),
+ (23, HUFFMAN_EMIT_SYMBOL, 114),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 114),
+ (2, HUFFMAN_EMIT_SYMBOL, 117),
+ (9, HUFFMAN_EMIT_SYMBOL, 117),
+ (23, HUFFMAN_EMIT_SYMBOL, 117),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 117),
+ (1, HUFFMAN_EMIT_SYMBOL, 58),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 58),
+ (1, HUFFMAN_EMIT_SYMBOL, 66),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 66),
+ (1, HUFFMAN_EMIT_SYMBOL, 67),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 67),
+ (1, HUFFMAN_EMIT_SYMBOL, 68),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 68),
+
+ # Node 36
+ (3, HUFFMAN_EMIT_SYMBOL, 114),
+ (6, HUFFMAN_EMIT_SYMBOL, 114),
+ (10, HUFFMAN_EMIT_SYMBOL, 114),
+ (15, HUFFMAN_EMIT_SYMBOL, 114),
+ (24, HUFFMAN_EMIT_SYMBOL, 114),
+ (31, HUFFMAN_EMIT_SYMBOL, 114),
+ (41, HUFFMAN_EMIT_SYMBOL, 114),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 114),
+ (3, HUFFMAN_EMIT_SYMBOL, 117),
+ (6, HUFFMAN_EMIT_SYMBOL, 117),
+ (10, HUFFMAN_EMIT_SYMBOL, 117),
+ (15, HUFFMAN_EMIT_SYMBOL, 117),
+ (24, HUFFMAN_EMIT_SYMBOL, 117),
+ (31, HUFFMAN_EMIT_SYMBOL, 117),
+ (41, HUFFMAN_EMIT_SYMBOL, 117),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 117),
+
+ # Node 37
+ (2, HUFFMAN_EMIT_SYMBOL, 58),
+ (9, HUFFMAN_EMIT_SYMBOL, 58),
+ (23, HUFFMAN_EMIT_SYMBOL, 58),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 58),
+ (2, HUFFMAN_EMIT_SYMBOL, 66),
+ (9, HUFFMAN_EMIT_SYMBOL, 66),
+ (23, HUFFMAN_EMIT_SYMBOL, 66),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 66),
+ (2, HUFFMAN_EMIT_SYMBOL, 67),
+ (9, HUFFMAN_EMIT_SYMBOL, 67),
+ (23, HUFFMAN_EMIT_SYMBOL, 67),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 67),
+ (2, HUFFMAN_EMIT_SYMBOL, 68),
+ (9, HUFFMAN_EMIT_SYMBOL, 68),
+ (23, HUFFMAN_EMIT_SYMBOL, 68),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 68),
+
+ # Node 38
+ (3, HUFFMAN_EMIT_SYMBOL, 58),
+ (6, HUFFMAN_EMIT_SYMBOL, 58),
+ (10, HUFFMAN_EMIT_SYMBOL, 58),
+ (15, HUFFMAN_EMIT_SYMBOL, 58),
+ (24, HUFFMAN_EMIT_SYMBOL, 58),
+ (31, HUFFMAN_EMIT_SYMBOL, 58),
+ (41, HUFFMAN_EMIT_SYMBOL, 58),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 58),
+ (3, HUFFMAN_EMIT_SYMBOL, 66),
+ (6, HUFFMAN_EMIT_SYMBOL, 66),
+ (10, HUFFMAN_EMIT_SYMBOL, 66),
+ (15, HUFFMAN_EMIT_SYMBOL, 66),
+ (24, HUFFMAN_EMIT_SYMBOL, 66),
+ (31, HUFFMAN_EMIT_SYMBOL, 66),
+ (41, HUFFMAN_EMIT_SYMBOL, 66),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 66),
+
+ # Node 39
+ (3, HUFFMAN_EMIT_SYMBOL, 67),
+ (6, HUFFMAN_EMIT_SYMBOL, 67),
+ (10, HUFFMAN_EMIT_SYMBOL, 67),
+ (15, HUFFMAN_EMIT_SYMBOL, 67),
+ (24, HUFFMAN_EMIT_SYMBOL, 67),
+ (31, HUFFMAN_EMIT_SYMBOL, 67),
+ (41, HUFFMAN_EMIT_SYMBOL, 67),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 67),
+ (3, HUFFMAN_EMIT_SYMBOL, 68),
+ (6, HUFFMAN_EMIT_SYMBOL, 68),
+ (10, HUFFMAN_EMIT_SYMBOL, 68),
+ (15, HUFFMAN_EMIT_SYMBOL, 68),
+ (24, HUFFMAN_EMIT_SYMBOL, 68),
+ (31, HUFFMAN_EMIT_SYMBOL, 68),
+ (41, HUFFMAN_EMIT_SYMBOL, 68),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 68),
+
+ # Node 40
+ (44, 0, 0),
+ (45, 0, 0),
+ (47, 0, 0),
+ (48, 0, 0),
+ (51, 0, 0),
+ (52, 0, 0),
+ (54, 0, 0),
+ (55, 0, 0),
+ (59, 0, 0),
+ (60, 0, 0),
+ (62, 0, 0),
+ (63, 0, 0),
+ (66, 0, 0),
+ (67, 0, 0),
+ (69, 0, 0),
+ (72, HUFFMAN_COMPLETE, 0),
+
+ # Node 41
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 69),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 70),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 71),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 72),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 73),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 74),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 75),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 76),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 77),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 78),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 79),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 80),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 81),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 82),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 83),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 84),
+
+ # Node 42
+ (1, HUFFMAN_EMIT_SYMBOL, 69),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 69),
+ (1, HUFFMAN_EMIT_SYMBOL, 70),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 70),
+ (1, HUFFMAN_EMIT_SYMBOL, 71),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 71),
+ (1, HUFFMAN_EMIT_SYMBOL, 72),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 72),
+ (1, HUFFMAN_EMIT_SYMBOL, 73),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 73),
+ (1, HUFFMAN_EMIT_SYMBOL, 74),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 74),
+ (1, HUFFMAN_EMIT_SYMBOL, 75),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 75),
+ (1, HUFFMAN_EMIT_SYMBOL, 76),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 76),
+
+ # Node 43
+ (2, HUFFMAN_EMIT_SYMBOL, 69),
+ (9, HUFFMAN_EMIT_SYMBOL, 69),
+ (23, HUFFMAN_EMIT_SYMBOL, 69),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 69),
+ (2, HUFFMAN_EMIT_SYMBOL, 70),
+ (9, HUFFMAN_EMIT_SYMBOL, 70),
+ (23, HUFFMAN_EMIT_SYMBOL, 70),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 70),
+ (2, HUFFMAN_EMIT_SYMBOL, 71),
+ (9, HUFFMAN_EMIT_SYMBOL, 71),
+ (23, HUFFMAN_EMIT_SYMBOL, 71),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 71),
+ (2, HUFFMAN_EMIT_SYMBOL, 72),
+ (9, HUFFMAN_EMIT_SYMBOL, 72),
+ (23, HUFFMAN_EMIT_SYMBOL, 72),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 72),
+
+ # Node 44
+ (3, HUFFMAN_EMIT_SYMBOL, 69),
+ (6, HUFFMAN_EMIT_SYMBOL, 69),
+ (10, HUFFMAN_EMIT_SYMBOL, 69),
+ (15, HUFFMAN_EMIT_SYMBOL, 69),
+ (24, HUFFMAN_EMIT_SYMBOL, 69),
+ (31, HUFFMAN_EMIT_SYMBOL, 69),
+ (41, HUFFMAN_EMIT_SYMBOL, 69),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 69),
+ (3, HUFFMAN_EMIT_SYMBOL, 70),
+ (6, HUFFMAN_EMIT_SYMBOL, 70),
+ (10, HUFFMAN_EMIT_SYMBOL, 70),
+ (15, HUFFMAN_EMIT_SYMBOL, 70),
+ (24, HUFFMAN_EMIT_SYMBOL, 70),
+ (31, HUFFMAN_EMIT_SYMBOL, 70),
+ (41, HUFFMAN_EMIT_SYMBOL, 70),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 70),
+
+ # Node 45
+ (3, HUFFMAN_EMIT_SYMBOL, 71),
+ (6, HUFFMAN_EMIT_SYMBOL, 71),
+ (10, HUFFMAN_EMIT_SYMBOL, 71),
+ (15, HUFFMAN_EMIT_SYMBOL, 71),
+ (24, HUFFMAN_EMIT_SYMBOL, 71),
+ (31, HUFFMAN_EMIT_SYMBOL, 71),
+ (41, HUFFMAN_EMIT_SYMBOL, 71),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 71),
+ (3, HUFFMAN_EMIT_SYMBOL, 72),
+ (6, HUFFMAN_EMIT_SYMBOL, 72),
+ (10, HUFFMAN_EMIT_SYMBOL, 72),
+ (15, HUFFMAN_EMIT_SYMBOL, 72),
+ (24, HUFFMAN_EMIT_SYMBOL, 72),
+ (31, HUFFMAN_EMIT_SYMBOL, 72),
+ (41, HUFFMAN_EMIT_SYMBOL, 72),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 72),
+
+ # Node 46
+ (2, HUFFMAN_EMIT_SYMBOL, 73),
+ (9, HUFFMAN_EMIT_SYMBOL, 73),
+ (23, HUFFMAN_EMIT_SYMBOL, 73),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 73),
+ (2, HUFFMAN_EMIT_SYMBOL, 74),
+ (9, HUFFMAN_EMIT_SYMBOL, 74),
+ (23, HUFFMAN_EMIT_SYMBOL, 74),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 74),
+ (2, HUFFMAN_EMIT_SYMBOL, 75),
+ (9, HUFFMAN_EMIT_SYMBOL, 75),
+ (23, HUFFMAN_EMIT_SYMBOL, 75),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 75),
+ (2, HUFFMAN_EMIT_SYMBOL, 76),
+ (9, HUFFMAN_EMIT_SYMBOL, 76),
+ (23, HUFFMAN_EMIT_SYMBOL, 76),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 76),
+
+ # Node 47
+ (3, HUFFMAN_EMIT_SYMBOL, 73),
+ (6, HUFFMAN_EMIT_SYMBOL, 73),
+ (10, HUFFMAN_EMIT_SYMBOL, 73),
+ (15, HUFFMAN_EMIT_SYMBOL, 73),
+ (24, HUFFMAN_EMIT_SYMBOL, 73),
+ (31, HUFFMAN_EMIT_SYMBOL, 73),
+ (41, HUFFMAN_EMIT_SYMBOL, 73),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 73),
+ (3, HUFFMAN_EMIT_SYMBOL, 74),
+ (6, HUFFMAN_EMIT_SYMBOL, 74),
+ (10, HUFFMAN_EMIT_SYMBOL, 74),
+ (15, HUFFMAN_EMIT_SYMBOL, 74),
+ (24, HUFFMAN_EMIT_SYMBOL, 74),
+ (31, HUFFMAN_EMIT_SYMBOL, 74),
+ (41, HUFFMAN_EMIT_SYMBOL, 74),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 74),
+
+ # Node 48
+ (3, HUFFMAN_EMIT_SYMBOL, 75),
+ (6, HUFFMAN_EMIT_SYMBOL, 75),
+ (10, HUFFMAN_EMIT_SYMBOL, 75),
+ (15, HUFFMAN_EMIT_SYMBOL, 75),
+ (24, HUFFMAN_EMIT_SYMBOL, 75),
+ (31, HUFFMAN_EMIT_SYMBOL, 75),
+ (41, HUFFMAN_EMIT_SYMBOL, 75),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 75),
+ (3, HUFFMAN_EMIT_SYMBOL, 76),
+ (6, HUFFMAN_EMIT_SYMBOL, 76),
+ (10, HUFFMAN_EMIT_SYMBOL, 76),
+ (15, HUFFMAN_EMIT_SYMBOL, 76),
+ (24, HUFFMAN_EMIT_SYMBOL, 76),
+ (31, HUFFMAN_EMIT_SYMBOL, 76),
+ (41, HUFFMAN_EMIT_SYMBOL, 76),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 76),
+
+ # Node 49
+ (1, HUFFMAN_EMIT_SYMBOL, 77),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 77),
+ (1, HUFFMAN_EMIT_SYMBOL, 78),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 78),
+ (1, HUFFMAN_EMIT_SYMBOL, 79),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 79),
+ (1, HUFFMAN_EMIT_SYMBOL, 80),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 80),
+ (1, HUFFMAN_EMIT_SYMBOL, 81),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 81),
+ (1, HUFFMAN_EMIT_SYMBOL, 82),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 82),
+ (1, HUFFMAN_EMIT_SYMBOL, 83),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 83),
+ (1, HUFFMAN_EMIT_SYMBOL, 84),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 84),
+
+ # Node 50
+ (2, HUFFMAN_EMIT_SYMBOL, 77),
+ (9, HUFFMAN_EMIT_SYMBOL, 77),
+ (23, HUFFMAN_EMIT_SYMBOL, 77),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 77),
+ (2, HUFFMAN_EMIT_SYMBOL, 78),
+ (9, HUFFMAN_EMIT_SYMBOL, 78),
+ (23, HUFFMAN_EMIT_SYMBOL, 78),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 78),
+ (2, HUFFMAN_EMIT_SYMBOL, 79),
+ (9, HUFFMAN_EMIT_SYMBOL, 79),
+ (23, HUFFMAN_EMIT_SYMBOL, 79),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 79),
+ (2, HUFFMAN_EMIT_SYMBOL, 80),
+ (9, HUFFMAN_EMIT_SYMBOL, 80),
+ (23, HUFFMAN_EMIT_SYMBOL, 80),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 80),
+
+ # Node 51
+ (3, HUFFMAN_EMIT_SYMBOL, 77),
+ (6, HUFFMAN_EMIT_SYMBOL, 77),
+ (10, HUFFMAN_EMIT_SYMBOL, 77),
+ (15, HUFFMAN_EMIT_SYMBOL, 77),
+ (24, HUFFMAN_EMIT_SYMBOL, 77),
+ (31, HUFFMAN_EMIT_SYMBOL, 77),
+ (41, HUFFMAN_EMIT_SYMBOL, 77),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 77),
+ (3, HUFFMAN_EMIT_SYMBOL, 78),
+ (6, HUFFMAN_EMIT_SYMBOL, 78),
+ (10, HUFFMAN_EMIT_SYMBOL, 78),
+ (15, HUFFMAN_EMIT_SYMBOL, 78),
+ (24, HUFFMAN_EMIT_SYMBOL, 78),
+ (31, HUFFMAN_EMIT_SYMBOL, 78),
+ (41, HUFFMAN_EMIT_SYMBOL, 78),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 78),
+
+ # Node 52
+ (3, HUFFMAN_EMIT_SYMBOL, 79),
+ (6, HUFFMAN_EMIT_SYMBOL, 79),
+ (10, HUFFMAN_EMIT_SYMBOL, 79),
+ (15, HUFFMAN_EMIT_SYMBOL, 79),
+ (24, HUFFMAN_EMIT_SYMBOL, 79),
+ (31, HUFFMAN_EMIT_SYMBOL, 79),
+ (41, HUFFMAN_EMIT_SYMBOL, 79),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 79),
+ (3, HUFFMAN_EMIT_SYMBOL, 80),
+ (6, HUFFMAN_EMIT_SYMBOL, 80),
+ (10, HUFFMAN_EMIT_SYMBOL, 80),
+ (15, HUFFMAN_EMIT_SYMBOL, 80),
+ (24, HUFFMAN_EMIT_SYMBOL, 80),
+ (31, HUFFMAN_EMIT_SYMBOL, 80),
+ (41, HUFFMAN_EMIT_SYMBOL, 80),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 80),
+
+ # Node 53
+ (2, HUFFMAN_EMIT_SYMBOL, 81),
+ (9, HUFFMAN_EMIT_SYMBOL, 81),
+ (23, HUFFMAN_EMIT_SYMBOL, 81),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 81),
+ (2, HUFFMAN_EMIT_SYMBOL, 82),
+ (9, HUFFMAN_EMIT_SYMBOL, 82),
+ (23, HUFFMAN_EMIT_SYMBOL, 82),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 82),
+ (2, HUFFMAN_EMIT_SYMBOL, 83),
+ (9, HUFFMAN_EMIT_SYMBOL, 83),
+ (23, HUFFMAN_EMIT_SYMBOL, 83),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 83),
+ (2, HUFFMAN_EMIT_SYMBOL, 84),
+ (9, HUFFMAN_EMIT_SYMBOL, 84),
+ (23, HUFFMAN_EMIT_SYMBOL, 84),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 84),
+
+ # Node 54
+ (3, HUFFMAN_EMIT_SYMBOL, 81),
+ (6, HUFFMAN_EMIT_SYMBOL, 81),
+ (10, HUFFMAN_EMIT_SYMBOL, 81),
+ (15, HUFFMAN_EMIT_SYMBOL, 81),
+ (24, HUFFMAN_EMIT_SYMBOL, 81),
+ (31, HUFFMAN_EMIT_SYMBOL, 81),
+ (41, HUFFMAN_EMIT_SYMBOL, 81),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 81),
+ (3, HUFFMAN_EMIT_SYMBOL, 82),
+ (6, HUFFMAN_EMIT_SYMBOL, 82),
+ (10, HUFFMAN_EMIT_SYMBOL, 82),
+ (15, HUFFMAN_EMIT_SYMBOL, 82),
+ (24, HUFFMAN_EMIT_SYMBOL, 82),
+ (31, HUFFMAN_EMIT_SYMBOL, 82),
+ (41, HUFFMAN_EMIT_SYMBOL, 82),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 82),
+
+ # Node 55
+ (3, HUFFMAN_EMIT_SYMBOL, 83),
+ (6, HUFFMAN_EMIT_SYMBOL, 83),
+ (10, HUFFMAN_EMIT_SYMBOL, 83),
+ (15, HUFFMAN_EMIT_SYMBOL, 83),
+ (24, HUFFMAN_EMIT_SYMBOL, 83),
+ (31, HUFFMAN_EMIT_SYMBOL, 83),
+ (41, HUFFMAN_EMIT_SYMBOL, 83),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 83),
+ (3, HUFFMAN_EMIT_SYMBOL, 84),
+ (6, HUFFMAN_EMIT_SYMBOL, 84),
+ (10, HUFFMAN_EMIT_SYMBOL, 84),
+ (15, HUFFMAN_EMIT_SYMBOL, 84),
+ (24, HUFFMAN_EMIT_SYMBOL, 84),
+ (31, HUFFMAN_EMIT_SYMBOL, 84),
+ (41, HUFFMAN_EMIT_SYMBOL, 84),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 84),
+
+ # Node 56
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 85),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 86),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 87),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 89),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 106),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 107),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 113),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 118),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 119),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 120),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 121),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 122),
+ (70, 0, 0),
+ (71, 0, 0),
+ (73, 0, 0),
+ (74, HUFFMAN_COMPLETE, 0),
+
+ # Node 57
+ (1, HUFFMAN_EMIT_SYMBOL, 85),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 85),
+ (1, HUFFMAN_EMIT_SYMBOL, 86),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 86),
+ (1, HUFFMAN_EMIT_SYMBOL, 87),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 87),
+ (1, HUFFMAN_EMIT_SYMBOL, 89),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 89),
+ (1, HUFFMAN_EMIT_SYMBOL, 106),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 106),
+ (1, HUFFMAN_EMIT_SYMBOL, 107),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 107),
+ (1, HUFFMAN_EMIT_SYMBOL, 113),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 113),
+ (1, HUFFMAN_EMIT_SYMBOL, 118),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 118),
+
+ # Node 58
+ (2, HUFFMAN_EMIT_SYMBOL, 85),
+ (9, HUFFMAN_EMIT_SYMBOL, 85),
+ (23, HUFFMAN_EMIT_SYMBOL, 85),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 85),
+ (2, HUFFMAN_EMIT_SYMBOL, 86),
+ (9, HUFFMAN_EMIT_SYMBOL, 86),
+ (23, HUFFMAN_EMIT_SYMBOL, 86),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 86),
+ (2, HUFFMAN_EMIT_SYMBOL, 87),
+ (9, HUFFMAN_EMIT_SYMBOL, 87),
+ (23, HUFFMAN_EMIT_SYMBOL, 87),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 87),
+ (2, HUFFMAN_EMIT_SYMBOL, 89),
+ (9, HUFFMAN_EMIT_SYMBOL, 89),
+ (23, HUFFMAN_EMIT_SYMBOL, 89),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 89),
+
+ # Node 59
+ (3, HUFFMAN_EMIT_SYMBOL, 85),
+ (6, HUFFMAN_EMIT_SYMBOL, 85),
+ (10, HUFFMAN_EMIT_SYMBOL, 85),
+ (15, HUFFMAN_EMIT_SYMBOL, 85),
+ (24, HUFFMAN_EMIT_SYMBOL, 85),
+ (31, HUFFMAN_EMIT_SYMBOL, 85),
+ (41, HUFFMAN_EMIT_SYMBOL, 85),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 85),
+ (3, HUFFMAN_EMIT_SYMBOL, 86),
+ (6, HUFFMAN_EMIT_SYMBOL, 86),
+ (10, HUFFMAN_EMIT_SYMBOL, 86),
+ (15, HUFFMAN_EMIT_SYMBOL, 86),
+ (24, HUFFMAN_EMIT_SYMBOL, 86),
+ (31, HUFFMAN_EMIT_SYMBOL, 86),
+ (41, HUFFMAN_EMIT_SYMBOL, 86),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 86),
+
+ # Node 60
+ (3, HUFFMAN_EMIT_SYMBOL, 87),
+ (6, HUFFMAN_EMIT_SYMBOL, 87),
+ (10, HUFFMAN_EMIT_SYMBOL, 87),
+ (15, HUFFMAN_EMIT_SYMBOL, 87),
+ (24, HUFFMAN_EMIT_SYMBOL, 87),
+ (31, HUFFMAN_EMIT_SYMBOL, 87),
+ (41, HUFFMAN_EMIT_SYMBOL, 87),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 87),
+ (3, HUFFMAN_EMIT_SYMBOL, 89),
+ (6, HUFFMAN_EMIT_SYMBOL, 89),
+ (10, HUFFMAN_EMIT_SYMBOL, 89),
+ (15, HUFFMAN_EMIT_SYMBOL, 89),
+ (24, HUFFMAN_EMIT_SYMBOL, 89),
+ (31, HUFFMAN_EMIT_SYMBOL, 89),
+ (41, HUFFMAN_EMIT_SYMBOL, 89),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 89),
+
+ # Node 61
+ (2, HUFFMAN_EMIT_SYMBOL, 106),
+ (9, HUFFMAN_EMIT_SYMBOL, 106),
+ (23, HUFFMAN_EMIT_SYMBOL, 106),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 106),
+ (2, HUFFMAN_EMIT_SYMBOL, 107),
+ (9, HUFFMAN_EMIT_SYMBOL, 107),
+ (23, HUFFMAN_EMIT_SYMBOL, 107),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 107),
+ (2, HUFFMAN_EMIT_SYMBOL, 113),
+ (9, HUFFMAN_EMIT_SYMBOL, 113),
+ (23, HUFFMAN_EMIT_SYMBOL, 113),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 113),
+ (2, HUFFMAN_EMIT_SYMBOL, 118),
+ (9, HUFFMAN_EMIT_SYMBOL, 118),
+ (23, HUFFMAN_EMIT_SYMBOL, 118),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 118),
+
+ # Node 62
+ (3, HUFFMAN_EMIT_SYMBOL, 106),
+ (6, HUFFMAN_EMIT_SYMBOL, 106),
+ (10, HUFFMAN_EMIT_SYMBOL, 106),
+ (15, HUFFMAN_EMIT_SYMBOL, 106),
+ (24, HUFFMAN_EMIT_SYMBOL, 106),
+ (31, HUFFMAN_EMIT_SYMBOL, 106),
+ (41, HUFFMAN_EMIT_SYMBOL, 106),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 106),
+ (3, HUFFMAN_EMIT_SYMBOL, 107),
+ (6, HUFFMAN_EMIT_SYMBOL, 107),
+ (10, HUFFMAN_EMIT_SYMBOL, 107),
+ (15, HUFFMAN_EMIT_SYMBOL, 107),
+ (24, HUFFMAN_EMIT_SYMBOL, 107),
+ (31, HUFFMAN_EMIT_SYMBOL, 107),
+ (41, HUFFMAN_EMIT_SYMBOL, 107),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 107),
+
+ # Node 63
+ (3, HUFFMAN_EMIT_SYMBOL, 113),
+ (6, HUFFMAN_EMIT_SYMBOL, 113),
+ (10, HUFFMAN_EMIT_SYMBOL, 113),
+ (15, HUFFMAN_EMIT_SYMBOL, 113),
+ (24, HUFFMAN_EMIT_SYMBOL, 113),
+ (31, HUFFMAN_EMIT_SYMBOL, 113),
+ (41, HUFFMAN_EMIT_SYMBOL, 113),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 113),
+ (3, HUFFMAN_EMIT_SYMBOL, 118),
+ (6, HUFFMAN_EMIT_SYMBOL, 118),
+ (10, HUFFMAN_EMIT_SYMBOL, 118),
+ (15, HUFFMAN_EMIT_SYMBOL, 118),
+ (24, HUFFMAN_EMIT_SYMBOL, 118),
+ (31, HUFFMAN_EMIT_SYMBOL, 118),
+ (41, HUFFMAN_EMIT_SYMBOL, 118),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 118),
+
+ # Node 64
+ (1, HUFFMAN_EMIT_SYMBOL, 119),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 119),
+ (1, HUFFMAN_EMIT_SYMBOL, 120),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 120),
+ (1, HUFFMAN_EMIT_SYMBOL, 121),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 121),
+ (1, HUFFMAN_EMIT_SYMBOL, 122),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 122),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 38),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 42),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 44),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 59),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 88),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 90),
+ (75, 0, 0),
+ (78, 0, 0),
+
+ # Node 65
+ (2, HUFFMAN_EMIT_SYMBOL, 119),
+ (9, HUFFMAN_EMIT_SYMBOL, 119),
+ (23, HUFFMAN_EMIT_SYMBOL, 119),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 119),
+ (2, HUFFMAN_EMIT_SYMBOL, 120),
+ (9, HUFFMAN_EMIT_SYMBOL, 120),
+ (23, HUFFMAN_EMIT_SYMBOL, 120),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 120),
+ (2, HUFFMAN_EMIT_SYMBOL, 121),
+ (9, HUFFMAN_EMIT_SYMBOL, 121),
+ (23, HUFFMAN_EMIT_SYMBOL, 121),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 121),
+ (2, HUFFMAN_EMIT_SYMBOL, 122),
+ (9, HUFFMAN_EMIT_SYMBOL, 122),
+ (23, HUFFMAN_EMIT_SYMBOL, 122),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 122),
+
+ # Node 66
+ (3, HUFFMAN_EMIT_SYMBOL, 119),
+ (6, HUFFMAN_EMIT_SYMBOL, 119),
+ (10, HUFFMAN_EMIT_SYMBOL, 119),
+ (15, HUFFMAN_EMIT_SYMBOL, 119),
+ (24, HUFFMAN_EMIT_SYMBOL, 119),
+ (31, HUFFMAN_EMIT_SYMBOL, 119),
+ (41, HUFFMAN_EMIT_SYMBOL, 119),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 119),
+ (3, HUFFMAN_EMIT_SYMBOL, 120),
+ (6, HUFFMAN_EMIT_SYMBOL, 120),
+ (10, HUFFMAN_EMIT_SYMBOL, 120),
+ (15, HUFFMAN_EMIT_SYMBOL, 120),
+ (24, HUFFMAN_EMIT_SYMBOL, 120),
+ (31, HUFFMAN_EMIT_SYMBOL, 120),
+ (41, HUFFMAN_EMIT_SYMBOL, 120),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 120),
+
+ # Node 67
+ (3, HUFFMAN_EMIT_SYMBOL, 121),
+ (6, HUFFMAN_EMIT_SYMBOL, 121),
+ (10, HUFFMAN_EMIT_SYMBOL, 121),
+ (15, HUFFMAN_EMIT_SYMBOL, 121),
+ (24, HUFFMAN_EMIT_SYMBOL, 121),
+ (31, HUFFMAN_EMIT_SYMBOL, 121),
+ (41, HUFFMAN_EMIT_SYMBOL, 121),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 121),
+ (3, HUFFMAN_EMIT_SYMBOL, 122),
+ (6, HUFFMAN_EMIT_SYMBOL, 122),
+ (10, HUFFMAN_EMIT_SYMBOL, 122),
+ (15, HUFFMAN_EMIT_SYMBOL, 122),
+ (24, HUFFMAN_EMIT_SYMBOL, 122),
+ (31, HUFFMAN_EMIT_SYMBOL, 122),
+ (41, HUFFMAN_EMIT_SYMBOL, 122),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 122),
+
+ # Node 68
+ (1, HUFFMAN_EMIT_SYMBOL, 38),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 38),
+ (1, HUFFMAN_EMIT_SYMBOL, 42),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 42),
+ (1, HUFFMAN_EMIT_SYMBOL, 44),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 44),
+ (1, HUFFMAN_EMIT_SYMBOL, 59),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 59),
+ (1, HUFFMAN_EMIT_SYMBOL, 88),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 88),
+ (1, HUFFMAN_EMIT_SYMBOL, 90),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 90),
+ (76, 0, 0),
+ (77, 0, 0),
+ (79, 0, 0),
+ (81, 0, 0),
+
+ # Node 69
+ (2, HUFFMAN_EMIT_SYMBOL, 38),
+ (9, HUFFMAN_EMIT_SYMBOL, 38),
+ (23, HUFFMAN_EMIT_SYMBOL, 38),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 38),
+ (2, HUFFMAN_EMIT_SYMBOL, 42),
+ (9, HUFFMAN_EMIT_SYMBOL, 42),
+ (23, HUFFMAN_EMIT_SYMBOL, 42),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 42),
+ (2, HUFFMAN_EMIT_SYMBOL, 44),
+ (9, HUFFMAN_EMIT_SYMBOL, 44),
+ (23, HUFFMAN_EMIT_SYMBOL, 44),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 44),
+ (2, HUFFMAN_EMIT_SYMBOL, 59),
+ (9, HUFFMAN_EMIT_SYMBOL, 59),
+ (23, HUFFMAN_EMIT_SYMBOL, 59),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 59),
+
+ # Node 70
+ (3, HUFFMAN_EMIT_SYMBOL, 38),
+ (6, HUFFMAN_EMIT_SYMBOL, 38),
+ (10, HUFFMAN_EMIT_SYMBOL, 38),
+ (15, HUFFMAN_EMIT_SYMBOL, 38),
+ (24, HUFFMAN_EMIT_SYMBOL, 38),
+ (31, HUFFMAN_EMIT_SYMBOL, 38),
+ (41, HUFFMAN_EMIT_SYMBOL, 38),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 38),
+ (3, HUFFMAN_EMIT_SYMBOL, 42),
+ (6, HUFFMAN_EMIT_SYMBOL, 42),
+ (10, HUFFMAN_EMIT_SYMBOL, 42),
+ (15, HUFFMAN_EMIT_SYMBOL, 42),
+ (24, HUFFMAN_EMIT_SYMBOL, 42),
+ (31, HUFFMAN_EMIT_SYMBOL, 42),
+ (41, HUFFMAN_EMIT_SYMBOL, 42),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 42),
+
+ # Node 71
+ (3, HUFFMAN_EMIT_SYMBOL, 44),
+ (6, HUFFMAN_EMIT_SYMBOL, 44),
+ (10, HUFFMAN_EMIT_SYMBOL, 44),
+ (15, HUFFMAN_EMIT_SYMBOL, 44),
+ (24, HUFFMAN_EMIT_SYMBOL, 44),
+ (31, HUFFMAN_EMIT_SYMBOL, 44),
+ (41, HUFFMAN_EMIT_SYMBOL, 44),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 44),
+ (3, HUFFMAN_EMIT_SYMBOL, 59),
+ (6, HUFFMAN_EMIT_SYMBOL, 59),
+ (10, HUFFMAN_EMIT_SYMBOL, 59),
+ (15, HUFFMAN_EMIT_SYMBOL, 59),
+ (24, HUFFMAN_EMIT_SYMBOL, 59),
+ (31, HUFFMAN_EMIT_SYMBOL, 59),
+ (41, HUFFMAN_EMIT_SYMBOL, 59),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 59),
+
+ # Node 72
+ (2, HUFFMAN_EMIT_SYMBOL, 88),
+ (9, HUFFMAN_EMIT_SYMBOL, 88),
+ (23, HUFFMAN_EMIT_SYMBOL, 88),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 88),
+ (2, HUFFMAN_EMIT_SYMBOL, 90),
+ (9, HUFFMAN_EMIT_SYMBOL, 90),
+ (23, HUFFMAN_EMIT_SYMBOL, 90),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 90),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 33),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 34),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 40),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 41),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 63),
+ (80, 0, 0),
+ (82, 0, 0),
+ (84, 0, 0),
+
+ # Node 73
+ (3, HUFFMAN_EMIT_SYMBOL, 88),
+ (6, HUFFMAN_EMIT_SYMBOL, 88),
+ (10, HUFFMAN_EMIT_SYMBOL, 88),
+ (15, HUFFMAN_EMIT_SYMBOL, 88),
+ (24, HUFFMAN_EMIT_SYMBOL, 88),
+ (31, HUFFMAN_EMIT_SYMBOL, 88),
+ (41, HUFFMAN_EMIT_SYMBOL, 88),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 88),
+ (3, HUFFMAN_EMIT_SYMBOL, 90),
+ (6, HUFFMAN_EMIT_SYMBOL, 90),
+ (10, HUFFMAN_EMIT_SYMBOL, 90),
+ (15, HUFFMAN_EMIT_SYMBOL, 90),
+ (24, HUFFMAN_EMIT_SYMBOL, 90),
+ (31, HUFFMAN_EMIT_SYMBOL, 90),
+ (41, HUFFMAN_EMIT_SYMBOL, 90),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 90),
+
+ # Node 74
+ (1, HUFFMAN_EMIT_SYMBOL, 33),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 33),
+ (1, HUFFMAN_EMIT_SYMBOL, 34),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 34),
+ (1, HUFFMAN_EMIT_SYMBOL, 40),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 40),
+ (1, HUFFMAN_EMIT_SYMBOL, 41),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 41),
+ (1, HUFFMAN_EMIT_SYMBOL, 63),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 63),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 39),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 43),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 124),
+ (83, 0, 0),
+ (85, 0, 0),
+ (88, 0, 0),
+
+ # Node 75
+ (2, HUFFMAN_EMIT_SYMBOL, 33),
+ (9, HUFFMAN_EMIT_SYMBOL, 33),
+ (23, HUFFMAN_EMIT_SYMBOL, 33),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 33),
+ (2, HUFFMAN_EMIT_SYMBOL, 34),
+ (9, HUFFMAN_EMIT_SYMBOL, 34),
+ (23, HUFFMAN_EMIT_SYMBOL, 34),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 34),
+ (2, HUFFMAN_EMIT_SYMBOL, 40),
+ (9, HUFFMAN_EMIT_SYMBOL, 40),
+ (23, HUFFMAN_EMIT_SYMBOL, 40),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 40),
+ (2, HUFFMAN_EMIT_SYMBOL, 41),
+ (9, HUFFMAN_EMIT_SYMBOL, 41),
+ (23, HUFFMAN_EMIT_SYMBOL, 41),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 41),
+
+ # Node 76
+ (3, HUFFMAN_EMIT_SYMBOL, 33),
+ (6, HUFFMAN_EMIT_SYMBOL, 33),
+ (10, HUFFMAN_EMIT_SYMBOL, 33),
+ (15, HUFFMAN_EMIT_SYMBOL, 33),
+ (24, HUFFMAN_EMIT_SYMBOL, 33),
+ (31, HUFFMAN_EMIT_SYMBOL, 33),
+ (41, HUFFMAN_EMIT_SYMBOL, 33),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 33),
+ (3, HUFFMAN_EMIT_SYMBOL, 34),
+ (6, HUFFMAN_EMIT_SYMBOL, 34),
+ (10, HUFFMAN_EMIT_SYMBOL, 34),
+ (15, HUFFMAN_EMIT_SYMBOL, 34),
+ (24, HUFFMAN_EMIT_SYMBOL, 34),
+ (31, HUFFMAN_EMIT_SYMBOL, 34),
+ (41, HUFFMAN_EMIT_SYMBOL, 34),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 34),
+
+ # Node 77
+ (3, HUFFMAN_EMIT_SYMBOL, 40),
+ (6, HUFFMAN_EMIT_SYMBOL, 40),
+ (10, HUFFMAN_EMIT_SYMBOL, 40),
+ (15, HUFFMAN_EMIT_SYMBOL, 40),
+ (24, HUFFMAN_EMIT_SYMBOL, 40),
+ (31, HUFFMAN_EMIT_SYMBOL, 40),
+ (41, HUFFMAN_EMIT_SYMBOL, 40),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 40),
+ (3, HUFFMAN_EMIT_SYMBOL, 41),
+ (6, HUFFMAN_EMIT_SYMBOL, 41),
+ (10, HUFFMAN_EMIT_SYMBOL, 41),
+ (15, HUFFMAN_EMIT_SYMBOL, 41),
+ (24, HUFFMAN_EMIT_SYMBOL, 41),
+ (31, HUFFMAN_EMIT_SYMBOL, 41),
+ (41, HUFFMAN_EMIT_SYMBOL, 41),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 41),
+
+ # Node 78
+ (2, HUFFMAN_EMIT_SYMBOL, 63),
+ (9, HUFFMAN_EMIT_SYMBOL, 63),
+ (23, HUFFMAN_EMIT_SYMBOL, 63),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 63),
+ (1, HUFFMAN_EMIT_SYMBOL, 39),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 39),
+ (1, HUFFMAN_EMIT_SYMBOL, 43),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 43),
+ (1, HUFFMAN_EMIT_SYMBOL, 124),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 124),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 35),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 62),
+ (86, 0, 0),
+ (87, 0, 0),
+ (89, 0, 0),
+ (90, 0, 0),
+
+ # Node 79
+ (3, HUFFMAN_EMIT_SYMBOL, 63),
+ (6, HUFFMAN_EMIT_SYMBOL, 63),
+ (10, HUFFMAN_EMIT_SYMBOL, 63),
+ (15, HUFFMAN_EMIT_SYMBOL, 63),
+ (24, HUFFMAN_EMIT_SYMBOL, 63),
+ (31, HUFFMAN_EMIT_SYMBOL, 63),
+ (41, HUFFMAN_EMIT_SYMBOL, 63),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 63),
+ (2, HUFFMAN_EMIT_SYMBOL, 39),
+ (9, HUFFMAN_EMIT_SYMBOL, 39),
+ (23, HUFFMAN_EMIT_SYMBOL, 39),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 39),
+ (2, HUFFMAN_EMIT_SYMBOL, 43),
+ (9, HUFFMAN_EMIT_SYMBOL, 43),
+ (23, HUFFMAN_EMIT_SYMBOL, 43),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 43),
+
+ # Node 80
+ (3, HUFFMAN_EMIT_SYMBOL, 39),
+ (6, HUFFMAN_EMIT_SYMBOL, 39),
+ (10, HUFFMAN_EMIT_SYMBOL, 39),
+ (15, HUFFMAN_EMIT_SYMBOL, 39),
+ (24, HUFFMAN_EMIT_SYMBOL, 39),
+ (31, HUFFMAN_EMIT_SYMBOL, 39),
+ (41, HUFFMAN_EMIT_SYMBOL, 39),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 39),
+ (3, HUFFMAN_EMIT_SYMBOL, 43),
+ (6, HUFFMAN_EMIT_SYMBOL, 43),
+ (10, HUFFMAN_EMIT_SYMBOL, 43),
+ (15, HUFFMAN_EMIT_SYMBOL, 43),
+ (24, HUFFMAN_EMIT_SYMBOL, 43),
+ (31, HUFFMAN_EMIT_SYMBOL, 43),
+ (41, HUFFMAN_EMIT_SYMBOL, 43),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 43),
+
+ # Node 81
+ (2, HUFFMAN_EMIT_SYMBOL, 124),
+ (9, HUFFMAN_EMIT_SYMBOL, 124),
+ (23, HUFFMAN_EMIT_SYMBOL, 124),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 124),
+ (1, HUFFMAN_EMIT_SYMBOL, 35),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 35),
+ (1, HUFFMAN_EMIT_SYMBOL, 62),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 62),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 0),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 36),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 64),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 91),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 93),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 126),
+ (91, 0, 0),
+ (92, 0, 0),
+
+ # Node 82
+ (3, HUFFMAN_EMIT_SYMBOL, 124),
+ (6, HUFFMAN_EMIT_SYMBOL, 124),
+ (10, HUFFMAN_EMIT_SYMBOL, 124),
+ (15, HUFFMAN_EMIT_SYMBOL, 124),
+ (24, HUFFMAN_EMIT_SYMBOL, 124),
+ (31, HUFFMAN_EMIT_SYMBOL, 124),
+ (41, HUFFMAN_EMIT_SYMBOL, 124),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 124),
+ (2, HUFFMAN_EMIT_SYMBOL, 35),
+ (9, HUFFMAN_EMIT_SYMBOL, 35),
+ (23, HUFFMAN_EMIT_SYMBOL, 35),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 35),
+ (2, HUFFMAN_EMIT_SYMBOL, 62),
+ (9, HUFFMAN_EMIT_SYMBOL, 62),
+ (23, HUFFMAN_EMIT_SYMBOL, 62),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 62),
+
+ # Node 83
+ (3, HUFFMAN_EMIT_SYMBOL, 35),
+ (6, HUFFMAN_EMIT_SYMBOL, 35),
+ (10, HUFFMAN_EMIT_SYMBOL, 35),
+ (15, HUFFMAN_EMIT_SYMBOL, 35),
+ (24, HUFFMAN_EMIT_SYMBOL, 35),
+ (31, HUFFMAN_EMIT_SYMBOL, 35),
+ (41, HUFFMAN_EMIT_SYMBOL, 35),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 35),
+ (3, HUFFMAN_EMIT_SYMBOL, 62),
+ (6, HUFFMAN_EMIT_SYMBOL, 62),
+ (10, HUFFMAN_EMIT_SYMBOL, 62),
+ (15, HUFFMAN_EMIT_SYMBOL, 62),
+ (24, HUFFMAN_EMIT_SYMBOL, 62),
+ (31, HUFFMAN_EMIT_SYMBOL, 62),
+ (41, HUFFMAN_EMIT_SYMBOL, 62),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 62),
+
+ # Node 84
+ (1, HUFFMAN_EMIT_SYMBOL, 0),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 0),
+ (1, HUFFMAN_EMIT_SYMBOL, 36),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 36),
+ (1, HUFFMAN_EMIT_SYMBOL, 64),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 64),
+ (1, HUFFMAN_EMIT_SYMBOL, 91),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 91),
+ (1, HUFFMAN_EMIT_SYMBOL, 93),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 93),
+ (1, HUFFMAN_EMIT_SYMBOL, 126),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 126),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 94),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 125),
+ (93, 0, 0),
+ (94, 0, 0),
+
+ # Node 85
+ (2, HUFFMAN_EMIT_SYMBOL, 0),
+ (9, HUFFMAN_EMIT_SYMBOL, 0),
+ (23, HUFFMAN_EMIT_SYMBOL, 0),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 0),
+ (2, HUFFMAN_EMIT_SYMBOL, 36),
+ (9, HUFFMAN_EMIT_SYMBOL, 36),
+ (23, HUFFMAN_EMIT_SYMBOL, 36),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 36),
+ (2, HUFFMAN_EMIT_SYMBOL, 64),
+ (9, HUFFMAN_EMIT_SYMBOL, 64),
+ (23, HUFFMAN_EMIT_SYMBOL, 64),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 64),
+ (2, HUFFMAN_EMIT_SYMBOL, 91),
+ (9, HUFFMAN_EMIT_SYMBOL, 91),
+ (23, HUFFMAN_EMIT_SYMBOL, 91),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 91),
+
+ # Node 86
+ (3, HUFFMAN_EMIT_SYMBOL, 0),
+ (6, HUFFMAN_EMIT_SYMBOL, 0),
+ (10, HUFFMAN_EMIT_SYMBOL, 0),
+ (15, HUFFMAN_EMIT_SYMBOL, 0),
+ (24, HUFFMAN_EMIT_SYMBOL, 0),
+ (31, HUFFMAN_EMIT_SYMBOL, 0),
+ (41, HUFFMAN_EMIT_SYMBOL, 0),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 0),
+ (3, HUFFMAN_EMIT_SYMBOL, 36),
+ (6, HUFFMAN_EMIT_SYMBOL, 36),
+ (10, HUFFMAN_EMIT_SYMBOL, 36),
+ (15, HUFFMAN_EMIT_SYMBOL, 36),
+ (24, HUFFMAN_EMIT_SYMBOL, 36),
+ (31, HUFFMAN_EMIT_SYMBOL, 36),
+ (41, HUFFMAN_EMIT_SYMBOL, 36),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 36),
+
+ # Node 87
+ (3, HUFFMAN_EMIT_SYMBOL, 64),
+ (6, HUFFMAN_EMIT_SYMBOL, 64),
+ (10, HUFFMAN_EMIT_SYMBOL, 64),
+ (15, HUFFMAN_EMIT_SYMBOL, 64),
+ (24, HUFFMAN_EMIT_SYMBOL, 64),
+ (31, HUFFMAN_EMIT_SYMBOL, 64),
+ (41, HUFFMAN_EMIT_SYMBOL, 64),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 64),
+ (3, HUFFMAN_EMIT_SYMBOL, 91),
+ (6, HUFFMAN_EMIT_SYMBOL, 91),
+ (10, HUFFMAN_EMIT_SYMBOL, 91),
+ (15, HUFFMAN_EMIT_SYMBOL, 91),
+ (24, HUFFMAN_EMIT_SYMBOL, 91),
+ (31, HUFFMAN_EMIT_SYMBOL, 91),
+ (41, HUFFMAN_EMIT_SYMBOL, 91),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 91),
+
+ # Node 88
+ (2, HUFFMAN_EMIT_SYMBOL, 93),
+ (9, HUFFMAN_EMIT_SYMBOL, 93),
+ (23, HUFFMAN_EMIT_SYMBOL, 93),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 93),
+ (2, HUFFMAN_EMIT_SYMBOL, 126),
+ (9, HUFFMAN_EMIT_SYMBOL, 126),
+ (23, HUFFMAN_EMIT_SYMBOL, 126),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 126),
+ (1, HUFFMAN_EMIT_SYMBOL, 94),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 94),
+ (1, HUFFMAN_EMIT_SYMBOL, 125),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 125),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 60),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 96),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 123),
+ (95, 0, 0),
+
+ # Node 89
+ (3, HUFFMAN_EMIT_SYMBOL, 93),
+ (6, HUFFMAN_EMIT_SYMBOL, 93),
+ (10, HUFFMAN_EMIT_SYMBOL, 93),
+ (15, HUFFMAN_EMIT_SYMBOL, 93),
+ (24, HUFFMAN_EMIT_SYMBOL, 93),
+ (31, HUFFMAN_EMIT_SYMBOL, 93),
+ (41, HUFFMAN_EMIT_SYMBOL, 93),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 93),
+ (3, HUFFMAN_EMIT_SYMBOL, 126),
+ (6, HUFFMAN_EMIT_SYMBOL, 126),
+ (10, HUFFMAN_EMIT_SYMBOL, 126),
+ (15, HUFFMAN_EMIT_SYMBOL, 126),
+ (24, HUFFMAN_EMIT_SYMBOL, 126),
+ (31, HUFFMAN_EMIT_SYMBOL, 126),
+ (41, HUFFMAN_EMIT_SYMBOL, 126),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 126),
+
+ # Node 90
+ (2, HUFFMAN_EMIT_SYMBOL, 94),
+ (9, HUFFMAN_EMIT_SYMBOL, 94),
+ (23, HUFFMAN_EMIT_SYMBOL, 94),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 94),
+ (2, HUFFMAN_EMIT_SYMBOL, 125),
+ (9, HUFFMAN_EMIT_SYMBOL, 125),
+ (23, HUFFMAN_EMIT_SYMBOL, 125),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 125),
+ (1, HUFFMAN_EMIT_SYMBOL, 60),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 60),
+ (1, HUFFMAN_EMIT_SYMBOL, 96),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 96),
+ (1, HUFFMAN_EMIT_SYMBOL, 123),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 123),
+ (96, 0, 0),
+ (110, 0, 0),
+
+ # Node 91
+ (3, HUFFMAN_EMIT_SYMBOL, 94),
+ (6, HUFFMAN_EMIT_SYMBOL, 94),
+ (10, HUFFMAN_EMIT_SYMBOL, 94),
+ (15, HUFFMAN_EMIT_SYMBOL, 94),
+ (24, HUFFMAN_EMIT_SYMBOL, 94),
+ (31, HUFFMAN_EMIT_SYMBOL, 94),
+ (41, HUFFMAN_EMIT_SYMBOL, 94),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 94),
+ (3, HUFFMAN_EMIT_SYMBOL, 125),
+ (6, HUFFMAN_EMIT_SYMBOL, 125),
+ (10, HUFFMAN_EMIT_SYMBOL, 125),
+ (15, HUFFMAN_EMIT_SYMBOL, 125),
+ (24, HUFFMAN_EMIT_SYMBOL, 125),
+ (31, HUFFMAN_EMIT_SYMBOL, 125),
+ (41, HUFFMAN_EMIT_SYMBOL, 125),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 125),
+
+ # Node 92
+ (2, HUFFMAN_EMIT_SYMBOL, 60),
+ (9, HUFFMAN_EMIT_SYMBOL, 60),
+ (23, HUFFMAN_EMIT_SYMBOL, 60),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 60),
+ (2, HUFFMAN_EMIT_SYMBOL, 96),
+ (9, HUFFMAN_EMIT_SYMBOL, 96),
+ (23, HUFFMAN_EMIT_SYMBOL, 96),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 96),
+ (2, HUFFMAN_EMIT_SYMBOL, 123),
+ (9, HUFFMAN_EMIT_SYMBOL, 123),
+ (23, HUFFMAN_EMIT_SYMBOL, 123),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 123),
+ (97, 0, 0),
+ (101, 0, 0),
+ (111, 0, 0),
+ (133, 0, 0),
+
+ # Node 93
+ (3, HUFFMAN_EMIT_SYMBOL, 60),
+ (6, HUFFMAN_EMIT_SYMBOL, 60),
+ (10, HUFFMAN_EMIT_SYMBOL, 60),
+ (15, HUFFMAN_EMIT_SYMBOL, 60),
+ (24, HUFFMAN_EMIT_SYMBOL, 60),
+ (31, HUFFMAN_EMIT_SYMBOL, 60),
+ (41, HUFFMAN_EMIT_SYMBOL, 60),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 60),
+ (3, HUFFMAN_EMIT_SYMBOL, 96),
+ (6, HUFFMAN_EMIT_SYMBOL, 96),
+ (10, HUFFMAN_EMIT_SYMBOL, 96),
+ (15, HUFFMAN_EMIT_SYMBOL, 96),
+ (24, HUFFMAN_EMIT_SYMBOL, 96),
+ (31, HUFFMAN_EMIT_SYMBOL, 96),
+ (41, HUFFMAN_EMIT_SYMBOL, 96),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 96),
+
+ # Node 94
+ (3, HUFFMAN_EMIT_SYMBOL, 123),
+ (6, HUFFMAN_EMIT_SYMBOL, 123),
+ (10, HUFFMAN_EMIT_SYMBOL, 123),
+ (15, HUFFMAN_EMIT_SYMBOL, 123),
+ (24, HUFFMAN_EMIT_SYMBOL, 123),
+ (31, HUFFMAN_EMIT_SYMBOL, 123),
+ (41, HUFFMAN_EMIT_SYMBOL, 123),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 123),
+ (98, 0, 0),
+ (99, 0, 0),
+ (102, 0, 0),
+ (105, 0, 0),
+ (112, 0, 0),
+ (119, 0, 0),
+ (134, 0, 0),
+ (153, 0, 0),
+
+ # Node 95
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 92),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 195),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 208),
+ (100, 0, 0),
+ (103, 0, 0),
+ (104, 0, 0),
+ (106, 0, 0),
+ (107, 0, 0),
+ (113, 0, 0),
+ (116, 0, 0),
+ (120, 0, 0),
+ (126, 0, 0),
+ (135, 0, 0),
+ (142, 0, 0),
+ (154, 0, 0),
+ (169, 0, 0),
+
+ # Node 96
+ (1, HUFFMAN_EMIT_SYMBOL, 92),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 92),
+ (1, HUFFMAN_EMIT_SYMBOL, 195),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 195),
+ (1, HUFFMAN_EMIT_SYMBOL, 208),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 208),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 128),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 130),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 131),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 162),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 184),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 194),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 224),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 226),
+ (108, 0, 0),
+ (109, 0, 0),
+
+ # Node 97
+ (2, HUFFMAN_EMIT_SYMBOL, 92),
+ (9, HUFFMAN_EMIT_SYMBOL, 92),
+ (23, HUFFMAN_EMIT_SYMBOL, 92),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 92),
+ (2, HUFFMAN_EMIT_SYMBOL, 195),
+ (9, HUFFMAN_EMIT_SYMBOL, 195),
+ (23, HUFFMAN_EMIT_SYMBOL, 195),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 195),
+ (2, HUFFMAN_EMIT_SYMBOL, 208),
+ (9, HUFFMAN_EMIT_SYMBOL, 208),
+ (23, HUFFMAN_EMIT_SYMBOL, 208),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 208),
+ (1, HUFFMAN_EMIT_SYMBOL, 128),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 128),
+ (1, HUFFMAN_EMIT_SYMBOL, 130),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 130),
+
+ # Node 98
+ (3, HUFFMAN_EMIT_SYMBOL, 92),
+ (6, HUFFMAN_EMIT_SYMBOL, 92),
+ (10, HUFFMAN_EMIT_SYMBOL, 92),
+ (15, HUFFMAN_EMIT_SYMBOL, 92),
+ (24, HUFFMAN_EMIT_SYMBOL, 92),
+ (31, HUFFMAN_EMIT_SYMBOL, 92),
+ (41, HUFFMAN_EMIT_SYMBOL, 92),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 92),
+ (3, HUFFMAN_EMIT_SYMBOL, 195),
+ (6, HUFFMAN_EMIT_SYMBOL, 195),
+ (10, HUFFMAN_EMIT_SYMBOL, 195),
+ (15, HUFFMAN_EMIT_SYMBOL, 195),
+ (24, HUFFMAN_EMIT_SYMBOL, 195),
+ (31, HUFFMAN_EMIT_SYMBOL, 195),
+ (41, HUFFMAN_EMIT_SYMBOL, 195),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 195),
+
+ # Node 99
+ (3, HUFFMAN_EMIT_SYMBOL, 208),
+ (6, HUFFMAN_EMIT_SYMBOL, 208),
+ (10, HUFFMAN_EMIT_SYMBOL, 208),
+ (15, HUFFMAN_EMIT_SYMBOL, 208),
+ (24, HUFFMAN_EMIT_SYMBOL, 208),
+ (31, HUFFMAN_EMIT_SYMBOL, 208),
+ (41, HUFFMAN_EMIT_SYMBOL, 208),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 208),
+ (2, HUFFMAN_EMIT_SYMBOL, 128),
+ (9, HUFFMAN_EMIT_SYMBOL, 128),
+ (23, HUFFMAN_EMIT_SYMBOL, 128),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 128),
+ (2, HUFFMAN_EMIT_SYMBOL, 130),
+ (9, HUFFMAN_EMIT_SYMBOL, 130),
+ (23, HUFFMAN_EMIT_SYMBOL, 130),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 130),
+
+ # Node 100
+ (3, HUFFMAN_EMIT_SYMBOL, 128),
+ (6, HUFFMAN_EMIT_SYMBOL, 128),
+ (10, HUFFMAN_EMIT_SYMBOL, 128),
+ (15, HUFFMAN_EMIT_SYMBOL, 128),
+ (24, HUFFMAN_EMIT_SYMBOL, 128),
+ (31, HUFFMAN_EMIT_SYMBOL, 128),
+ (41, HUFFMAN_EMIT_SYMBOL, 128),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 128),
+ (3, HUFFMAN_EMIT_SYMBOL, 130),
+ (6, HUFFMAN_EMIT_SYMBOL, 130),
+ (10, HUFFMAN_EMIT_SYMBOL, 130),
+ (15, HUFFMAN_EMIT_SYMBOL, 130),
+ (24, HUFFMAN_EMIT_SYMBOL, 130),
+ (31, HUFFMAN_EMIT_SYMBOL, 130),
+ (41, HUFFMAN_EMIT_SYMBOL, 130),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 130),
+
+ # Node 101
+ (1, HUFFMAN_EMIT_SYMBOL, 131),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 131),
+ (1, HUFFMAN_EMIT_SYMBOL, 162),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 162),
+ (1, HUFFMAN_EMIT_SYMBOL, 184),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 184),
+ (1, HUFFMAN_EMIT_SYMBOL, 194),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 194),
+ (1, HUFFMAN_EMIT_SYMBOL, 224),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 224),
+ (1, HUFFMAN_EMIT_SYMBOL, 226),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 226),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 153),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 161),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 167),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 172),
+
+ # Node 102
+ (2, HUFFMAN_EMIT_SYMBOL, 131),
+ (9, HUFFMAN_EMIT_SYMBOL, 131),
+ (23, HUFFMAN_EMIT_SYMBOL, 131),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 131),
+ (2, HUFFMAN_EMIT_SYMBOL, 162),
+ (9, HUFFMAN_EMIT_SYMBOL, 162),
+ (23, HUFFMAN_EMIT_SYMBOL, 162),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 162),
+ (2, HUFFMAN_EMIT_SYMBOL, 184),
+ (9, HUFFMAN_EMIT_SYMBOL, 184),
+ (23, HUFFMAN_EMIT_SYMBOL, 184),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 184),
+ (2, HUFFMAN_EMIT_SYMBOL, 194),
+ (9, HUFFMAN_EMIT_SYMBOL, 194),
+ (23, HUFFMAN_EMIT_SYMBOL, 194),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 194),
+
+ # Node 103
+ (3, HUFFMAN_EMIT_SYMBOL, 131),
+ (6, HUFFMAN_EMIT_SYMBOL, 131),
+ (10, HUFFMAN_EMIT_SYMBOL, 131),
+ (15, HUFFMAN_EMIT_SYMBOL, 131),
+ (24, HUFFMAN_EMIT_SYMBOL, 131),
+ (31, HUFFMAN_EMIT_SYMBOL, 131),
+ (41, HUFFMAN_EMIT_SYMBOL, 131),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 131),
+ (3, HUFFMAN_EMIT_SYMBOL, 162),
+ (6, HUFFMAN_EMIT_SYMBOL, 162),
+ (10, HUFFMAN_EMIT_SYMBOL, 162),
+ (15, HUFFMAN_EMIT_SYMBOL, 162),
+ (24, HUFFMAN_EMIT_SYMBOL, 162),
+ (31, HUFFMAN_EMIT_SYMBOL, 162),
+ (41, HUFFMAN_EMIT_SYMBOL, 162),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 162),
+
+ # Node 104
+ (3, HUFFMAN_EMIT_SYMBOL, 184),
+ (6, HUFFMAN_EMIT_SYMBOL, 184),
+ (10, HUFFMAN_EMIT_SYMBOL, 184),
+ (15, HUFFMAN_EMIT_SYMBOL, 184),
+ (24, HUFFMAN_EMIT_SYMBOL, 184),
+ (31, HUFFMAN_EMIT_SYMBOL, 184),
+ (41, HUFFMAN_EMIT_SYMBOL, 184),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 184),
+ (3, HUFFMAN_EMIT_SYMBOL, 194),
+ (6, HUFFMAN_EMIT_SYMBOL, 194),
+ (10, HUFFMAN_EMIT_SYMBOL, 194),
+ (15, HUFFMAN_EMIT_SYMBOL, 194),
+ (24, HUFFMAN_EMIT_SYMBOL, 194),
+ (31, HUFFMAN_EMIT_SYMBOL, 194),
+ (41, HUFFMAN_EMIT_SYMBOL, 194),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 194),
+
+ # Node 105
+ (2, HUFFMAN_EMIT_SYMBOL, 224),
+ (9, HUFFMAN_EMIT_SYMBOL, 224),
+ (23, HUFFMAN_EMIT_SYMBOL, 224),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 224),
+ (2, HUFFMAN_EMIT_SYMBOL, 226),
+ (9, HUFFMAN_EMIT_SYMBOL, 226),
+ (23, HUFFMAN_EMIT_SYMBOL, 226),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 226),
+ (1, HUFFMAN_EMIT_SYMBOL, 153),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 153),
+ (1, HUFFMAN_EMIT_SYMBOL, 161),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 161),
+ (1, HUFFMAN_EMIT_SYMBOL, 167),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 167),
+ (1, HUFFMAN_EMIT_SYMBOL, 172),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 172),
+
+ # Node 106
+ (3, HUFFMAN_EMIT_SYMBOL, 224),
+ (6, HUFFMAN_EMIT_SYMBOL, 224),
+ (10, HUFFMAN_EMIT_SYMBOL, 224),
+ (15, HUFFMAN_EMIT_SYMBOL, 224),
+ (24, HUFFMAN_EMIT_SYMBOL, 224),
+ (31, HUFFMAN_EMIT_SYMBOL, 224),
+ (41, HUFFMAN_EMIT_SYMBOL, 224),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 224),
+ (3, HUFFMAN_EMIT_SYMBOL, 226),
+ (6, HUFFMAN_EMIT_SYMBOL, 226),
+ (10, HUFFMAN_EMIT_SYMBOL, 226),
+ (15, HUFFMAN_EMIT_SYMBOL, 226),
+ (24, HUFFMAN_EMIT_SYMBOL, 226),
+ (31, HUFFMAN_EMIT_SYMBOL, 226),
+ (41, HUFFMAN_EMIT_SYMBOL, 226),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 226),
+
+ # Node 107
+ (2, HUFFMAN_EMIT_SYMBOL, 153),
+ (9, HUFFMAN_EMIT_SYMBOL, 153),
+ (23, HUFFMAN_EMIT_SYMBOL, 153),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 153),
+ (2, HUFFMAN_EMIT_SYMBOL, 161),
+ (9, HUFFMAN_EMIT_SYMBOL, 161),
+ (23, HUFFMAN_EMIT_SYMBOL, 161),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 161),
+ (2, HUFFMAN_EMIT_SYMBOL, 167),
+ (9, HUFFMAN_EMIT_SYMBOL, 167),
+ (23, HUFFMAN_EMIT_SYMBOL, 167),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 167),
+ (2, HUFFMAN_EMIT_SYMBOL, 172),
+ (9, HUFFMAN_EMIT_SYMBOL, 172),
+ (23, HUFFMAN_EMIT_SYMBOL, 172),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 172),
+
+ # Node 108
+ (3, HUFFMAN_EMIT_SYMBOL, 153),
+ (6, HUFFMAN_EMIT_SYMBOL, 153),
+ (10, HUFFMAN_EMIT_SYMBOL, 153),
+ (15, HUFFMAN_EMIT_SYMBOL, 153),
+ (24, HUFFMAN_EMIT_SYMBOL, 153),
+ (31, HUFFMAN_EMIT_SYMBOL, 153),
+ (41, HUFFMAN_EMIT_SYMBOL, 153),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 153),
+ (3, HUFFMAN_EMIT_SYMBOL, 161),
+ (6, HUFFMAN_EMIT_SYMBOL, 161),
+ (10, HUFFMAN_EMIT_SYMBOL, 161),
+ (15, HUFFMAN_EMIT_SYMBOL, 161),
+ (24, HUFFMAN_EMIT_SYMBOL, 161),
+ (31, HUFFMAN_EMIT_SYMBOL, 161),
+ (41, HUFFMAN_EMIT_SYMBOL, 161),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 161),
+
+ # Node 109
+ (3, HUFFMAN_EMIT_SYMBOL, 167),
+ (6, HUFFMAN_EMIT_SYMBOL, 167),
+ (10, HUFFMAN_EMIT_SYMBOL, 167),
+ (15, HUFFMAN_EMIT_SYMBOL, 167),
+ (24, HUFFMAN_EMIT_SYMBOL, 167),
+ (31, HUFFMAN_EMIT_SYMBOL, 167),
+ (41, HUFFMAN_EMIT_SYMBOL, 167),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 167),
+ (3, HUFFMAN_EMIT_SYMBOL, 172),
+ (6, HUFFMAN_EMIT_SYMBOL, 172),
+ (10, HUFFMAN_EMIT_SYMBOL, 172),
+ (15, HUFFMAN_EMIT_SYMBOL, 172),
+ (24, HUFFMAN_EMIT_SYMBOL, 172),
+ (31, HUFFMAN_EMIT_SYMBOL, 172),
+ (41, HUFFMAN_EMIT_SYMBOL, 172),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 172),
+
+ # Node 110
+ (114, 0, 0),
+ (115, 0, 0),
+ (117, 0, 0),
+ (118, 0, 0),
+ (121, 0, 0),
+ (123, 0, 0),
+ (127, 0, 0),
+ (130, 0, 0),
+ (136, 0, 0),
+ (139, 0, 0),
+ (143, 0, 0),
+ (146, 0, 0),
+ (155, 0, 0),
+ (162, 0, 0),
+ (170, 0, 0),
+ (180, 0, 0),
+
+ # Node 111
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 176),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 177),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 179),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 209),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 216),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 217),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 227),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 229),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 230),
+ (122, 0, 0),
+ (124, 0, 0),
+ (125, 0, 0),
+ (128, 0, 0),
+ (129, 0, 0),
+ (131, 0, 0),
+ (132, 0, 0),
+
+ # Node 112
+ (1, HUFFMAN_EMIT_SYMBOL, 176),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 176),
+ (1, HUFFMAN_EMIT_SYMBOL, 177),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 177),
+ (1, HUFFMAN_EMIT_SYMBOL, 179),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 179),
+ (1, HUFFMAN_EMIT_SYMBOL, 209),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 209),
+ (1, HUFFMAN_EMIT_SYMBOL, 216),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 216),
+ (1, HUFFMAN_EMIT_SYMBOL, 217),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 217),
+ (1, HUFFMAN_EMIT_SYMBOL, 227),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 227),
+ (1, HUFFMAN_EMIT_SYMBOL, 229),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 229),
+
+ # Node 113
+ (2, HUFFMAN_EMIT_SYMBOL, 176),
+ (9, HUFFMAN_EMIT_SYMBOL, 176),
+ (23, HUFFMAN_EMIT_SYMBOL, 176),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 176),
+ (2, HUFFMAN_EMIT_SYMBOL, 177),
+ (9, HUFFMAN_EMIT_SYMBOL, 177),
+ (23, HUFFMAN_EMIT_SYMBOL, 177),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 177),
+ (2, HUFFMAN_EMIT_SYMBOL, 179),
+ (9, HUFFMAN_EMIT_SYMBOL, 179),
+ (23, HUFFMAN_EMIT_SYMBOL, 179),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 179),
+ (2, HUFFMAN_EMIT_SYMBOL, 209),
+ (9, HUFFMAN_EMIT_SYMBOL, 209),
+ (23, HUFFMAN_EMIT_SYMBOL, 209),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 209),
+
+ # Node 114
+ (3, HUFFMAN_EMIT_SYMBOL, 176),
+ (6, HUFFMAN_EMIT_SYMBOL, 176),
+ (10, HUFFMAN_EMIT_SYMBOL, 176),
+ (15, HUFFMAN_EMIT_SYMBOL, 176),
+ (24, HUFFMAN_EMIT_SYMBOL, 176),
+ (31, HUFFMAN_EMIT_SYMBOL, 176),
+ (41, HUFFMAN_EMIT_SYMBOL, 176),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 176),
+ (3, HUFFMAN_EMIT_SYMBOL, 177),
+ (6, HUFFMAN_EMIT_SYMBOL, 177),
+ (10, HUFFMAN_EMIT_SYMBOL, 177),
+ (15, HUFFMAN_EMIT_SYMBOL, 177),
+ (24, HUFFMAN_EMIT_SYMBOL, 177),
+ (31, HUFFMAN_EMIT_SYMBOL, 177),
+ (41, HUFFMAN_EMIT_SYMBOL, 177),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 177),
+
+ # Node 115
+ (3, HUFFMAN_EMIT_SYMBOL, 179),
+ (6, HUFFMAN_EMIT_SYMBOL, 179),
+ (10, HUFFMAN_EMIT_SYMBOL, 179),
+ (15, HUFFMAN_EMIT_SYMBOL, 179),
+ (24, HUFFMAN_EMIT_SYMBOL, 179),
+ (31, HUFFMAN_EMIT_SYMBOL, 179),
+ (41, HUFFMAN_EMIT_SYMBOL, 179),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 179),
+ (3, HUFFMAN_EMIT_SYMBOL, 209),
+ (6, HUFFMAN_EMIT_SYMBOL, 209),
+ (10, HUFFMAN_EMIT_SYMBOL, 209),
+ (15, HUFFMAN_EMIT_SYMBOL, 209),
+ (24, HUFFMAN_EMIT_SYMBOL, 209),
+ (31, HUFFMAN_EMIT_SYMBOL, 209),
+ (41, HUFFMAN_EMIT_SYMBOL, 209),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 209),
+
+ # Node 116
+ (2, HUFFMAN_EMIT_SYMBOL, 216),
+ (9, HUFFMAN_EMIT_SYMBOL, 216),
+ (23, HUFFMAN_EMIT_SYMBOL, 216),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 216),
+ (2, HUFFMAN_EMIT_SYMBOL, 217),
+ (9, HUFFMAN_EMIT_SYMBOL, 217),
+ (23, HUFFMAN_EMIT_SYMBOL, 217),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 217),
+ (2, HUFFMAN_EMIT_SYMBOL, 227),
+ (9, HUFFMAN_EMIT_SYMBOL, 227),
+ (23, HUFFMAN_EMIT_SYMBOL, 227),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 227),
+ (2, HUFFMAN_EMIT_SYMBOL, 229),
+ (9, HUFFMAN_EMIT_SYMBOL, 229),
+ (23, HUFFMAN_EMIT_SYMBOL, 229),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 229),
+
+ # Node 117
+ (3, HUFFMAN_EMIT_SYMBOL, 216),
+ (6, HUFFMAN_EMIT_SYMBOL, 216),
+ (10, HUFFMAN_EMIT_SYMBOL, 216),
+ (15, HUFFMAN_EMIT_SYMBOL, 216),
+ (24, HUFFMAN_EMIT_SYMBOL, 216),
+ (31, HUFFMAN_EMIT_SYMBOL, 216),
+ (41, HUFFMAN_EMIT_SYMBOL, 216),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 216),
+ (3, HUFFMAN_EMIT_SYMBOL, 217),
+ (6, HUFFMAN_EMIT_SYMBOL, 217),
+ (10, HUFFMAN_EMIT_SYMBOL, 217),
+ (15, HUFFMAN_EMIT_SYMBOL, 217),
+ (24, HUFFMAN_EMIT_SYMBOL, 217),
+ (31, HUFFMAN_EMIT_SYMBOL, 217),
+ (41, HUFFMAN_EMIT_SYMBOL, 217),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 217),
+
+ # Node 118
+ (3, HUFFMAN_EMIT_SYMBOL, 227),
+ (6, HUFFMAN_EMIT_SYMBOL, 227),
+ (10, HUFFMAN_EMIT_SYMBOL, 227),
+ (15, HUFFMAN_EMIT_SYMBOL, 227),
+ (24, HUFFMAN_EMIT_SYMBOL, 227),
+ (31, HUFFMAN_EMIT_SYMBOL, 227),
+ (41, HUFFMAN_EMIT_SYMBOL, 227),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 227),
+ (3, HUFFMAN_EMIT_SYMBOL, 229),
+ (6, HUFFMAN_EMIT_SYMBOL, 229),
+ (10, HUFFMAN_EMIT_SYMBOL, 229),
+ (15, HUFFMAN_EMIT_SYMBOL, 229),
+ (24, HUFFMAN_EMIT_SYMBOL, 229),
+ (31, HUFFMAN_EMIT_SYMBOL, 229),
+ (41, HUFFMAN_EMIT_SYMBOL, 229),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 229),
+
+ # Node 119
+ (1, HUFFMAN_EMIT_SYMBOL, 230),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 230),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 129),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 132),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 133),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 134),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 136),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 146),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 154),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 156),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 160),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 163),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 164),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 169),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 170),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 173),
+
+ # Node 120
+ (2, HUFFMAN_EMIT_SYMBOL, 230),
+ (9, HUFFMAN_EMIT_SYMBOL, 230),
+ (23, HUFFMAN_EMIT_SYMBOL, 230),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 230),
+ (1, HUFFMAN_EMIT_SYMBOL, 129),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 129),
+ (1, HUFFMAN_EMIT_SYMBOL, 132),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 132),
+ (1, HUFFMAN_EMIT_SYMBOL, 133),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 133),
+ (1, HUFFMAN_EMIT_SYMBOL, 134),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 134),
+ (1, HUFFMAN_EMIT_SYMBOL, 136),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 136),
+ (1, HUFFMAN_EMIT_SYMBOL, 146),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 146),
+
+ # Node 121
+ (3, HUFFMAN_EMIT_SYMBOL, 230),
+ (6, HUFFMAN_EMIT_SYMBOL, 230),
+ (10, HUFFMAN_EMIT_SYMBOL, 230),
+ (15, HUFFMAN_EMIT_SYMBOL, 230),
+ (24, HUFFMAN_EMIT_SYMBOL, 230),
+ (31, HUFFMAN_EMIT_SYMBOL, 230),
+ (41, HUFFMAN_EMIT_SYMBOL, 230),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 230),
+ (2, HUFFMAN_EMIT_SYMBOL, 129),
+ (9, HUFFMAN_EMIT_SYMBOL, 129),
+ (23, HUFFMAN_EMIT_SYMBOL, 129),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 129),
+ (2, HUFFMAN_EMIT_SYMBOL, 132),
+ (9, HUFFMAN_EMIT_SYMBOL, 132),
+ (23, HUFFMAN_EMIT_SYMBOL, 132),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 132),
+
+ # Node 122
+ (3, HUFFMAN_EMIT_SYMBOL, 129),
+ (6, HUFFMAN_EMIT_SYMBOL, 129),
+ (10, HUFFMAN_EMIT_SYMBOL, 129),
+ (15, HUFFMAN_EMIT_SYMBOL, 129),
+ (24, HUFFMAN_EMIT_SYMBOL, 129),
+ (31, HUFFMAN_EMIT_SYMBOL, 129),
+ (41, HUFFMAN_EMIT_SYMBOL, 129),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 129),
+ (3, HUFFMAN_EMIT_SYMBOL, 132),
+ (6, HUFFMAN_EMIT_SYMBOL, 132),
+ (10, HUFFMAN_EMIT_SYMBOL, 132),
+ (15, HUFFMAN_EMIT_SYMBOL, 132),
+ (24, HUFFMAN_EMIT_SYMBOL, 132),
+ (31, HUFFMAN_EMIT_SYMBOL, 132),
+ (41, HUFFMAN_EMIT_SYMBOL, 132),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 132),
+
+ # Node 123
+ (2, HUFFMAN_EMIT_SYMBOL, 133),
+ (9, HUFFMAN_EMIT_SYMBOL, 133),
+ (23, HUFFMAN_EMIT_SYMBOL, 133),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 133),
+ (2, HUFFMAN_EMIT_SYMBOL, 134),
+ (9, HUFFMAN_EMIT_SYMBOL, 134),
+ (23, HUFFMAN_EMIT_SYMBOL, 134),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 134),
+ (2, HUFFMAN_EMIT_SYMBOL, 136),
+ (9, HUFFMAN_EMIT_SYMBOL, 136),
+ (23, HUFFMAN_EMIT_SYMBOL, 136),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 136),
+ (2, HUFFMAN_EMIT_SYMBOL, 146),
+ (9, HUFFMAN_EMIT_SYMBOL, 146),
+ (23, HUFFMAN_EMIT_SYMBOL, 146),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 146),
+
+ # Node 124
+ (3, HUFFMAN_EMIT_SYMBOL, 133),
+ (6, HUFFMAN_EMIT_SYMBOL, 133),
+ (10, HUFFMAN_EMIT_SYMBOL, 133),
+ (15, HUFFMAN_EMIT_SYMBOL, 133),
+ (24, HUFFMAN_EMIT_SYMBOL, 133),
+ (31, HUFFMAN_EMIT_SYMBOL, 133),
+ (41, HUFFMAN_EMIT_SYMBOL, 133),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 133),
+ (3, HUFFMAN_EMIT_SYMBOL, 134),
+ (6, HUFFMAN_EMIT_SYMBOL, 134),
+ (10, HUFFMAN_EMIT_SYMBOL, 134),
+ (15, HUFFMAN_EMIT_SYMBOL, 134),
+ (24, HUFFMAN_EMIT_SYMBOL, 134),
+ (31, HUFFMAN_EMIT_SYMBOL, 134),
+ (41, HUFFMAN_EMIT_SYMBOL, 134),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 134),
+
+ # Node 125
+ (3, HUFFMAN_EMIT_SYMBOL, 136),
+ (6, HUFFMAN_EMIT_SYMBOL, 136),
+ (10, HUFFMAN_EMIT_SYMBOL, 136),
+ (15, HUFFMAN_EMIT_SYMBOL, 136),
+ (24, HUFFMAN_EMIT_SYMBOL, 136),
+ (31, HUFFMAN_EMIT_SYMBOL, 136),
+ (41, HUFFMAN_EMIT_SYMBOL, 136),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 136),
+ (3, HUFFMAN_EMIT_SYMBOL, 146),
+ (6, HUFFMAN_EMIT_SYMBOL, 146),
+ (10, HUFFMAN_EMIT_SYMBOL, 146),
+ (15, HUFFMAN_EMIT_SYMBOL, 146),
+ (24, HUFFMAN_EMIT_SYMBOL, 146),
+ (31, HUFFMAN_EMIT_SYMBOL, 146),
+ (41, HUFFMAN_EMIT_SYMBOL, 146),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 146),
+
+ # Node 126
+ (1, HUFFMAN_EMIT_SYMBOL, 154),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 154),
+ (1, HUFFMAN_EMIT_SYMBOL, 156),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 156),
+ (1, HUFFMAN_EMIT_SYMBOL, 160),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 160),
+ (1, HUFFMAN_EMIT_SYMBOL, 163),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 163),
+ (1, HUFFMAN_EMIT_SYMBOL, 164),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 164),
+ (1, HUFFMAN_EMIT_SYMBOL, 169),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 169),
+ (1, HUFFMAN_EMIT_SYMBOL, 170),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 170),
+ (1, HUFFMAN_EMIT_SYMBOL, 173),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 173),
+
+ # Node 127
+ (2, HUFFMAN_EMIT_SYMBOL, 154),
+ (9, HUFFMAN_EMIT_SYMBOL, 154),
+ (23, HUFFMAN_EMIT_SYMBOL, 154),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 154),
+ (2, HUFFMAN_EMIT_SYMBOL, 156),
+ (9, HUFFMAN_EMIT_SYMBOL, 156),
+ (23, HUFFMAN_EMIT_SYMBOL, 156),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 156),
+ (2, HUFFMAN_EMIT_SYMBOL, 160),
+ (9, HUFFMAN_EMIT_SYMBOL, 160),
+ (23, HUFFMAN_EMIT_SYMBOL, 160),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 160),
+ (2, HUFFMAN_EMIT_SYMBOL, 163),
+ (9, HUFFMAN_EMIT_SYMBOL, 163),
+ (23, HUFFMAN_EMIT_SYMBOL, 163),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 163),
+
+ # Node 128
+ (3, HUFFMAN_EMIT_SYMBOL, 154),
+ (6, HUFFMAN_EMIT_SYMBOL, 154),
+ (10, HUFFMAN_EMIT_SYMBOL, 154),
+ (15, HUFFMAN_EMIT_SYMBOL, 154),
+ (24, HUFFMAN_EMIT_SYMBOL, 154),
+ (31, HUFFMAN_EMIT_SYMBOL, 154),
+ (41, HUFFMAN_EMIT_SYMBOL, 154),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 154),
+ (3, HUFFMAN_EMIT_SYMBOL, 156),
+ (6, HUFFMAN_EMIT_SYMBOL, 156),
+ (10, HUFFMAN_EMIT_SYMBOL, 156),
+ (15, HUFFMAN_EMIT_SYMBOL, 156),
+ (24, HUFFMAN_EMIT_SYMBOL, 156),
+ (31, HUFFMAN_EMIT_SYMBOL, 156),
+ (41, HUFFMAN_EMIT_SYMBOL, 156),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 156),
+
+ # Node 129
+ (3, HUFFMAN_EMIT_SYMBOL, 160),
+ (6, HUFFMAN_EMIT_SYMBOL, 160),
+ (10, HUFFMAN_EMIT_SYMBOL, 160),
+ (15, HUFFMAN_EMIT_SYMBOL, 160),
+ (24, HUFFMAN_EMIT_SYMBOL, 160),
+ (31, HUFFMAN_EMIT_SYMBOL, 160),
+ (41, HUFFMAN_EMIT_SYMBOL, 160),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 160),
+ (3, HUFFMAN_EMIT_SYMBOL, 163),
+ (6, HUFFMAN_EMIT_SYMBOL, 163),
+ (10, HUFFMAN_EMIT_SYMBOL, 163),
+ (15, HUFFMAN_EMIT_SYMBOL, 163),
+ (24, HUFFMAN_EMIT_SYMBOL, 163),
+ (31, HUFFMAN_EMIT_SYMBOL, 163),
+ (41, HUFFMAN_EMIT_SYMBOL, 163),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 163),
+
+ # Node 130
+ (2, HUFFMAN_EMIT_SYMBOL, 164),
+ (9, HUFFMAN_EMIT_SYMBOL, 164),
+ (23, HUFFMAN_EMIT_SYMBOL, 164),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 164),
+ (2, HUFFMAN_EMIT_SYMBOL, 169),
+ (9, HUFFMAN_EMIT_SYMBOL, 169),
+ (23, HUFFMAN_EMIT_SYMBOL, 169),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 169),
+ (2, HUFFMAN_EMIT_SYMBOL, 170),
+ (9, HUFFMAN_EMIT_SYMBOL, 170),
+ (23, HUFFMAN_EMIT_SYMBOL, 170),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 170),
+ (2, HUFFMAN_EMIT_SYMBOL, 173),
+ (9, HUFFMAN_EMIT_SYMBOL, 173),
+ (23, HUFFMAN_EMIT_SYMBOL, 173),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 173),
+
+ # Node 131
+ (3, HUFFMAN_EMIT_SYMBOL, 164),
+ (6, HUFFMAN_EMIT_SYMBOL, 164),
+ (10, HUFFMAN_EMIT_SYMBOL, 164),
+ (15, HUFFMAN_EMIT_SYMBOL, 164),
+ (24, HUFFMAN_EMIT_SYMBOL, 164),
+ (31, HUFFMAN_EMIT_SYMBOL, 164),
+ (41, HUFFMAN_EMIT_SYMBOL, 164),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 164),
+ (3, HUFFMAN_EMIT_SYMBOL, 169),
+ (6, HUFFMAN_EMIT_SYMBOL, 169),
+ (10, HUFFMAN_EMIT_SYMBOL, 169),
+ (15, HUFFMAN_EMIT_SYMBOL, 169),
+ (24, HUFFMAN_EMIT_SYMBOL, 169),
+ (31, HUFFMAN_EMIT_SYMBOL, 169),
+ (41, HUFFMAN_EMIT_SYMBOL, 169),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 169),
+
+ # Node 132
+ (3, HUFFMAN_EMIT_SYMBOL, 170),
+ (6, HUFFMAN_EMIT_SYMBOL, 170),
+ (10, HUFFMAN_EMIT_SYMBOL, 170),
+ (15, HUFFMAN_EMIT_SYMBOL, 170),
+ (24, HUFFMAN_EMIT_SYMBOL, 170),
+ (31, HUFFMAN_EMIT_SYMBOL, 170),
+ (41, HUFFMAN_EMIT_SYMBOL, 170),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 170),
+ (3, HUFFMAN_EMIT_SYMBOL, 173),
+ (6, HUFFMAN_EMIT_SYMBOL, 173),
+ (10, HUFFMAN_EMIT_SYMBOL, 173),
+ (15, HUFFMAN_EMIT_SYMBOL, 173),
+ (24, HUFFMAN_EMIT_SYMBOL, 173),
+ (31, HUFFMAN_EMIT_SYMBOL, 173),
+ (41, HUFFMAN_EMIT_SYMBOL, 173),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 173),
+
+ # Node 133
+ (137, 0, 0),
+ (138, 0, 0),
+ (140, 0, 0),
+ (141, 0, 0),
+ (144, 0, 0),
+ (145, 0, 0),
+ (147, 0, 0),
+ (150, 0, 0),
+ (156, 0, 0),
+ (159, 0, 0),
+ (163, 0, 0),
+ (166, 0, 0),
+ (171, 0, 0),
+ (174, 0, 0),
+ (181, 0, 0),
+ (190, 0, 0),
+
+ # Node 134
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 178),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 181),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 185),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 186),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 187),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 189),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 190),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 196),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 198),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 228),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 232),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 233),
+ (148, 0, 0),
+ (149, 0, 0),
+ (151, 0, 0),
+ (152, 0, 0),
+
+ # Node 135
+ (1, HUFFMAN_EMIT_SYMBOL, 178),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 178),
+ (1, HUFFMAN_EMIT_SYMBOL, 181),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 181),
+ (1, HUFFMAN_EMIT_SYMBOL, 185),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 185),
+ (1, HUFFMAN_EMIT_SYMBOL, 186),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 186),
+ (1, HUFFMAN_EMIT_SYMBOL, 187),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 187),
+ (1, HUFFMAN_EMIT_SYMBOL, 189),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 189),
+ (1, HUFFMAN_EMIT_SYMBOL, 190),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 190),
+ (1, HUFFMAN_EMIT_SYMBOL, 196),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 196),
+
+ # Node 136
+ (2, HUFFMAN_EMIT_SYMBOL, 178),
+ (9, HUFFMAN_EMIT_SYMBOL, 178),
+ (23, HUFFMAN_EMIT_SYMBOL, 178),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 178),
+ (2, HUFFMAN_EMIT_SYMBOL, 181),
+ (9, HUFFMAN_EMIT_SYMBOL, 181),
+ (23, HUFFMAN_EMIT_SYMBOL, 181),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 181),
+ (2, HUFFMAN_EMIT_SYMBOL, 185),
+ (9, HUFFMAN_EMIT_SYMBOL, 185),
+ (23, HUFFMAN_EMIT_SYMBOL, 185),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 185),
+ (2, HUFFMAN_EMIT_SYMBOL, 186),
+ (9, HUFFMAN_EMIT_SYMBOL, 186),
+ (23, HUFFMAN_EMIT_SYMBOL, 186),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 186),
+
+ # Node 137
+ (3, HUFFMAN_EMIT_SYMBOL, 178),
+ (6, HUFFMAN_EMIT_SYMBOL, 178),
+ (10, HUFFMAN_EMIT_SYMBOL, 178),
+ (15, HUFFMAN_EMIT_SYMBOL, 178),
+ (24, HUFFMAN_EMIT_SYMBOL, 178),
+ (31, HUFFMAN_EMIT_SYMBOL, 178),
+ (41, HUFFMAN_EMIT_SYMBOL, 178),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 178),
+ (3, HUFFMAN_EMIT_SYMBOL, 181),
+ (6, HUFFMAN_EMIT_SYMBOL, 181),
+ (10, HUFFMAN_EMIT_SYMBOL, 181),
+ (15, HUFFMAN_EMIT_SYMBOL, 181),
+ (24, HUFFMAN_EMIT_SYMBOL, 181),
+ (31, HUFFMAN_EMIT_SYMBOL, 181),
+ (41, HUFFMAN_EMIT_SYMBOL, 181),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 181),
+
+ # Node 138
+ (3, HUFFMAN_EMIT_SYMBOL, 185),
+ (6, HUFFMAN_EMIT_SYMBOL, 185),
+ (10, HUFFMAN_EMIT_SYMBOL, 185),
+ (15, HUFFMAN_EMIT_SYMBOL, 185),
+ (24, HUFFMAN_EMIT_SYMBOL, 185),
+ (31, HUFFMAN_EMIT_SYMBOL, 185),
+ (41, HUFFMAN_EMIT_SYMBOL, 185),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 185),
+ (3, HUFFMAN_EMIT_SYMBOL, 186),
+ (6, HUFFMAN_EMIT_SYMBOL, 186),
+ (10, HUFFMAN_EMIT_SYMBOL, 186),
+ (15, HUFFMAN_EMIT_SYMBOL, 186),
+ (24, HUFFMAN_EMIT_SYMBOL, 186),
+ (31, HUFFMAN_EMIT_SYMBOL, 186),
+ (41, HUFFMAN_EMIT_SYMBOL, 186),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 186),
+
+ # Node 139
+ (2, HUFFMAN_EMIT_SYMBOL, 187),
+ (9, HUFFMAN_EMIT_SYMBOL, 187),
+ (23, HUFFMAN_EMIT_SYMBOL, 187),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 187),
+ (2, HUFFMAN_EMIT_SYMBOL, 189),
+ (9, HUFFMAN_EMIT_SYMBOL, 189),
+ (23, HUFFMAN_EMIT_SYMBOL, 189),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 189),
+ (2, HUFFMAN_EMIT_SYMBOL, 190),
+ (9, HUFFMAN_EMIT_SYMBOL, 190),
+ (23, HUFFMAN_EMIT_SYMBOL, 190),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 190),
+ (2, HUFFMAN_EMIT_SYMBOL, 196),
+ (9, HUFFMAN_EMIT_SYMBOL, 196),
+ (23, HUFFMAN_EMIT_SYMBOL, 196),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 196),
+
+ # Node 140
+ (3, HUFFMAN_EMIT_SYMBOL, 187),
+ (6, HUFFMAN_EMIT_SYMBOL, 187),
+ (10, HUFFMAN_EMIT_SYMBOL, 187),
+ (15, HUFFMAN_EMIT_SYMBOL, 187),
+ (24, HUFFMAN_EMIT_SYMBOL, 187),
+ (31, HUFFMAN_EMIT_SYMBOL, 187),
+ (41, HUFFMAN_EMIT_SYMBOL, 187),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 187),
+ (3, HUFFMAN_EMIT_SYMBOL, 189),
+ (6, HUFFMAN_EMIT_SYMBOL, 189),
+ (10, HUFFMAN_EMIT_SYMBOL, 189),
+ (15, HUFFMAN_EMIT_SYMBOL, 189),
+ (24, HUFFMAN_EMIT_SYMBOL, 189),
+ (31, HUFFMAN_EMIT_SYMBOL, 189),
+ (41, HUFFMAN_EMIT_SYMBOL, 189),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 189),
+
+ # Node 141
+ (3, HUFFMAN_EMIT_SYMBOL, 190),
+ (6, HUFFMAN_EMIT_SYMBOL, 190),
+ (10, HUFFMAN_EMIT_SYMBOL, 190),
+ (15, HUFFMAN_EMIT_SYMBOL, 190),
+ (24, HUFFMAN_EMIT_SYMBOL, 190),
+ (31, HUFFMAN_EMIT_SYMBOL, 190),
+ (41, HUFFMAN_EMIT_SYMBOL, 190),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 190),
+ (3, HUFFMAN_EMIT_SYMBOL, 196),
+ (6, HUFFMAN_EMIT_SYMBOL, 196),
+ (10, HUFFMAN_EMIT_SYMBOL, 196),
+ (15, HUFFMAN_EMIT_SYMBOL, 196),
+ (24, HUFFMAN_EMIT_SYMBOL, 196),
+ (31, HUFFMAN_EMIT_SYMBOL, 196),
+ (41, HUFFMAN_EMIT_SYMBOL, 196),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 196),
+
+ # Node 142
+ (1, HUFFMAN_EMIT_SYMBOL, 198),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 198),
+ (1, HUFFMAN_EMIT_SYMBOL, 228),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 228),
+ (1, HUFFMAN_EMIT_SYMBOL, 232),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 232),
+ (1, HUFFMAN_EMIT_SYMBOL, 233),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 233),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 1),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 135),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 137),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 138),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 139),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 140),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 141),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 143),
+
+ # Node 143
+ (2, HUFFMAN_EMIT_SYMBOL, 198),
+ (9, HUFFMAN_EMIT_SYMBOL, 198),
+ (23, HUFFMAN_EMIT_SYMBOL, 198),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 198),
+ (2, HUFFMAN_EMIT_SYMBOL, 228),
+ (9, HUFFMAN_EMIT_SYMBOL, 228),
+ (23, HUFFMAN_EMIT_SYMBOL, 228),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 228),
+ (2, HUFFMAN_EMIT_SYMBOL, 232),
+ (9, HUFFMAN_EMIT_SYMBOL, 232),
+ (23, HUFFMAN_EMIT_SYMBOL, 232),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 232),
+ (2, HUFFMAN_EMIT_SYMBOL, 233),
+ (9, HUFFMAN_EMIT_SYMBOL, 233),
+ (23, HUFFMAN_EMIT_SYMBOL, 233),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 233),
+
+ # Node 144
+ (3, HUFFMAN_EMIT_SYMBOL, 198),
+ (6, HUFFMAN_EMIT_SYMBOL, 198),
+ (10, HUFFMAN_EMIT_SYMBOL, 198),
+ (15, HUFFMAN_EMIT_SYMBOL, 198),
+ (24, HUFFMAN_EMIT_SYMBOL, 198),
+ (31, HUFFMAN_EMIT_SYMBOL, 198),
+ (41, HUFFMAN_EMIT_SYMBOL, 198),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 198),
+ (3, HUFFMAN_EMIT_SYMBOL, 228),
+ (6, HUFFMAN_EMIT_SYMBOL, 228),
+ (10, HUFFMAN_EMIT_SYMBOL, 228),
+ (15, HUFFMAN_EMIT_SYMBOL, 228),
+ (24, HUFFMAN_EMIT_SYMBOL, 228),
+ (31, HUFFMAN_EMIT_SYMBOL, 228),
+ (41, HUFFMAN_EMIT_SYMBOL, 228),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 228),
+
+ # Node 145
+ (3, HUFFMAN_EMIT_SYMBOL, 232),
+ (6, HUFFMAN_EMIT_SYMBOL, 232),
+ (10, HUFFMAN_EMIT_SYMBOL, 232),
+ (15, HUFFMAN_EMIT_SYMBOL, 232),
+ (24, HUFFMAN_EMIT_SYMBOL, 232),
+ (31, HUFFMAN_EMIT_SYMBOL, 232),
+ (41, HUFFMAN_EMIT_SYMBOL, 232),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 232),
+ (3, HUFFMAN_EMIT_SYMBOL, 233),
+ (6, HUFFMAN_EMIT_SYMBOL, 233),
+ (10, HUFFMAN_EMIT_SYMBOL, 233),
+ (15, HUFFMAN_EMIT_SYMBOL, 233),
+ (24, HUFFMAN_EMIT_SYMBOL, 233),
+ (31, HUFFMAN_EMIT_SYMBOL, 233),
+ (41, HUFFMAN_EMIT_SYMBOL, 233),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 233),
+
+ # Node 146
+ (1, HUFFMAN_EMIT_SYMBOL, 1),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 1),
+ (1, HUFFMAN_EMIT_SYMBOL, 135),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 135),
+ (1, HUFFMAN_EMIT_SYMBOL, 137),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 137),
+ (1, HUFFMAN_EMIT_SYMBOL, 138),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 138),
+ (1, HUFFMAN_EMIT_SYMBOL, 139),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 139),
+ (1, HUFFMAN_EMIT_SYMBOL, 140),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 140),
+ (1, HUFFMAN_EMIT_SYMBOL, 141),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 141),
+ (1, HUFFMAN_EMIT_SYMBOL, 143),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 143),
+
+ # Node 147
+ (2, HUFFMAN_EMIT_SYMBOL, 1),
+ (9, HUFFMAN_EMIT_SYMBOL, 1),
+ (23, HUFFMAN_EMIT_SYMBOL, 1),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 1),
+ (2, HUFFMAN_EMIT_SYMBOL, 135),
+ (9, HUFFMAN_EMIT_SYMBOL, 135),
+ (23, HUFFMAN_EMIT_SYMBOL, 135),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 135),
+ (2, HUFFMAN_EMIT_SYMBOL, 137),
+ (9, HUFFMAN_EMIT_SYMBOL, 137),
+ (23, HUFFMAN_EMIT_SYMBOL, 137),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 137),
+ (2, HUFFMAN_EMIT_SYMBOL, 138),
+ (9, HUFFMAN_EMIT_SYMBOL, 138),
+ (23, HUFFMAN_EMIT_SYMBOL, 138),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 138),
+
+ # Node 148
+ (3, HUFFMAN_EMIT_SYMBOL, 1),
+ (6, HUFFMAN_EMIT_SYMBOL, 1),
+ (10, HUFFMAN_EMIT_SYMBOL, 1),
+ (15, HUFFMAN_EMIT_SYMBOL, 1),
+ (24, HUFFMAN_EMIT_SYMBOL, 1),
+ (31, HUFFMAN_EMIT_SYMBOL, 1),
+ (41, HUFFMAN_EMIT_SYMBOL, 1),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 1),
+ (3, HUFFMAN_EMIT_SYMBOL, 135),
+ (6, HUFFMAN_EMIT_SYMBOL, 135),
+ (10, HUFFMAN_EMIT_SYMBOL, 135),
+ (15, HUFFMAN_EMIT_SYMBOL, 135),
+ (24, HUFFMAN_EMIT_SYMBOL, 135),
+ (31, HUFFMAN_EMIT_SYMBOL, 135),
+ (41, HUFFMAN_EMIT_SYMBOL, 135),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 135),
+
+ # Node 149
+ (3, HUFFMAN_EMIT_SYMBOL, 137),
+ (6, HUFFMAN_EMIT_SYMBOL, 137),
+ (10, HUFFMAN_EMIT_SYMBOL, 137),
+ (15, HUFFMAN_EMIT_SYMBOL, 137),
+ (24, HUFFMAN_EMIT_SYMBOL, 137),
+ (31, HUFFMAN_EMIT_SYMBOL, 137),
+ (41, HUFFMAN_EMIT_SYMBOL, 137),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 137),
+ (3, HUFFMAN_EMIT_SYMBOL, 138),
+ (6, HUFFMAN_EMIT_SYMBOL, 138),
+ (10, HUFFMAN_EMIT_SYMBOL, 138),
+ (15, HUFFMAN_EMIT_SYMBOL, 138),
+ (24, HUFFMAN_EMIT_SYMBOL, 138),
+ (31, HUFFMAN_EMIT_SYMBOL, 138),
+ (41, HUFFMAN_EMIT_SYMBOL, 138),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 138),
+
+ # Node 150
+ (2, HUFFMAN_EMIT_SYMBOL, 139),
+ (9, HUFFMAN_EMIT_SYMBOL, 139),
+ (23, HUFFMAN_EMIT_SYMBOL, 139),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 139),
+ (2, HUFFMAN_EMIT_SYMBOL, 140),
+ (9, HUFFMAN_EMIT_SYMBOL, 140),
+ (23, HUFFMAN_EMIT_SYMBOL, 140),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 140),
+ (2, HUFFMAN_EMIT_SYMBOL, 141),
+ (9, HUFFMAN_EMIT_SYMBOL, 141),
+ (23, HUFFMAN_EMIT_SYMBOL, 141),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 141),
+ (2, HUFFMAN_EMIT_SYMBOL, 143),
+ (9, HUFFMAN_EMIT_SYMBOL, 143),
+ (23, HUFFMAN_EMIT_SYMBOL, 143),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 143),
+
+ # Node 151
+ (3, HUFFMAN_EMIT_SYMBOL, 139),
+ (6, HUFFMAN_EMIT_SYMBOL, 139),
+ (10, HUFFMAN_EMIT_SYMBOL, 139),
+ (15, HUFFMAN_EMIT_SYMBOL, 139),
+ (24, HUFFMAN_EMIT_SYMBOL, 139),
+ (31, HUFFMAN_EMIT_SYMBOL, 139),
+ (41, HUFFMAN_EMIT_SYMBOL, 139),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 139),
+ (3, HUFFMAN_EMIT_SYMBOL, 140),
+ (6, HUFFMAN_EMIT_SYMBOL, 140),
+ (10, HUFFMAN_EMIT_SYMBOL, 140),
+ (15, HUFFMAN_EMIT_SYMBOL, 140),
+ (24, HUFFMAN_EMIT_SYMBOL, 140),
+ (31, HUFFMAN_EMIT_SYMBOL, 140),
+ (41, HUFFMAN_EMIT_SYMBOL, 140),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 140),
+
+ # Node 152
+ (3, HUFFMAN_EMIT_SYMBOL, 141),
+ (6, HUFFMAN_EMIT_SYMBOL, 141),
+ (10, HUFFMAN_EMIT_SYMBOL, 141),
+ (15, HUFFMAN_EMIT_SYMBOL, 141),
+ (24, HUFFMAN_EMIT_SYMBOL, 141),
+ (31, HUFFMAN_EMIT_SYMBOL, 141),
+ (41, HUFFMAN_EMIT_SYMBOL, 141),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 141),
+ (3, HUFFMAN_EMIT_SYMBOL, 143),
+ (6, HUFFMAN_EMIT_SYMBOL, 143),
+ (10, HUFFMAN_EMIT_SYMBOL, 143),
+ (15, HUFFMAN_EMIT_SYMBOL, 143),
+ (24, HUFFMAN_EMIT_SYMBOL, 143),
+ (31, HUFFMAN_EMIT_SYMBOL, 143),
+ (41, HUFFMAN_EMIT_SYMBOL, 143),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 143),
+
+ # Node 153
+ (157, 0, 0),
+ (158, 0, 0),
+ (160, 0, 0),
+ (161, 0, 0),
+ (164, 0, 0),
+ (165, 0, 0),
+ (167, 0, 0),
+ (168, 0, 0),
+ (172, 0, 0),
+ (173, 0, 0),
+ (175, 0, 0),
+ (177, 0, 0),
+ (182, 0, 0),
+ (185, 0, 0),
+ (191, 0, 0),
+ (207, 0, 0),
+
+ # Node 154
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 147),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 149),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 150),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 151),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 152),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 155),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 157),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 158),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 165),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 166),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 168),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 174),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 175),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 180),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 182),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 183),
+
+ # Node 155
+ (1, HUFFMAN_EMIT_SYMBOL, 147),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 147),
+ (1, HUFFMAN_EMIT_SYMBOL, 149),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 149),
+ (1, HUFFMAN_EMIT_SYMBOL, 150),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 150),
+ (1, HUFFMAN_EMIT_SYMBOL, 151),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 151),
+ (1, HUFFMAN_EMIT_SYMBOL, 152),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 152),
+ (1, HUFFMAN_EMIT_SYMBOL, 155),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 155),
+ (1, HUFFMAN_EMIT_SYMBOL, 157),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 157),
+ (1, HUFFMAN_EMIT_SYMBOL, 158),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 158),
+
+ # Node 156
+ (2, HUFFMAN_EMIT_SYMBOL, 147),
+ (9, HUFFMAN_EMIT_SYMBOL, 147),
+ (23, HUFFMAN_EMIT_SYMBOL, 147),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 147),
+ (2, HUFFMAN_EMIT_SYMBOL, 149),
+ (9, HUFFMAN_EMIT_SYMBOL, 149),
+ (23, HUFFMAN_EMIT_SYMBOL, 149),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 149),
+ (2, HUFFMAN_EMIT_SYMBOL, 150),
+ (9, HUFFMAN_EMIT_SYMBOL, 150),
+ (23, HUFFMAN_EMIT_SYMBOL, 150),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 150),
+ (2, HUFFMAN_EMIT_SYMBOL, 151),
+ (9, HUFFMAN_EMIT_SYMBOL, 151),
+ (23, HUFFMAN_EMIT_SYMBOL, 151),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 151),
+
+ # Node 157
+ (3, HUFFMAN_EMIT_SYMBOL, 147),
+ (6, HUFFMAN_EMIT_SYMBOL, 147),
+ (10, HUFFMAN_EMIT_SYMBOL, 147),
+ (15, HUFFMAN_EMIT_SYMBOL, 147),
+ (24, HUFFMAN_EMIT_SYMBOL, 147),
+ (31, HUFFMAN_EMIT_SYMBOL, 147),
+ (41, HUFFMAN_EMIT_SYMBOL, 147),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 147),
+ (3, HUFFMAN_EMIT_SYMBOL, 149),
+ (6, HUFFMAN_EMIT_SYMBOL, 149),
+ (10, HUFFMAN_EMIT_SYMBOL, 149),
+ (15, HUFFMAN_EMIT_SYMBOL, 149),
+ (24, HUFFMAN_EMIT_SYMBOL, 149),
+ (31, HUFFMAN_EMIT_SYMBOL, 149),
+ (41, HUFFMAN_EMIT_SYMBOL, 149),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 149),
+
+ # Node 158
+ (3, HUFFMAN_EMIT_SYMBOL, 150),
+ (6, HUFFMAN_EMIT_SYMBOL, 150),
+ (10, HUFFMAN_EMIT_SYMBOL, 150),
+ (15, HUFFMAN_EMIT_SYMBOL, 150),
+ (24, HUFFMAN_EMIT_SYMBOL, 150),
+ (31, HUFFMAN_EMIT_SYMBOL, 150),
+ (41, HUFFMAN_EMIT_SYMBOL, 150),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 150),
+ (3, HUFFMAN_EMIT_SYMBOL, 151),
+ (6, HUFFMAN_EMIT_SYMBOL, 151),
+ (10, HUFFMAN_EMIT_SYMBOL, 151),
+ (15, HUFFMAN_EMIT_SYMBOL, 151),
+ (24, HUFFMAN_EMIT_SYMBOL, 151),
+ (31, HUFFMAN_EMIT_SYMBOL, 151),
+ (41, HUFFMAN_EMIT_SYMBOL, 151),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 151),
+
+ # Node 159
+ (2, HUFFMAN_EMIT_SYMBOL, 152),
+ (9, HUFFMAN_EMIT_SYMBOL, 152),
+ (23, HUFFMAN_EMIT_SYMBOL, 152),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 152),
+ (2, HUFFMAN_EMIT_SYMBOL, 155),
+ (9, HUFFMAN_EMIT_SYMBOL, 155),
+ (23, HUFFMAN_EMIT_SYMBOL, 155),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 155),
+ (2, HUFFMAN_EMIT_SYMBOL, 157),
+ (9, HUFFMAN_EMIT_SYMBOL, 157),
+ (23, HUFFMAN_EMIT_SYMBOL, 157),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 157),
+ (2, HUFFMAN_EMIT_SYMBOL, 158),
+ (9, HUFFMAN_EMIT_SYMBOL, 158),
+ (23, HUFFMAN_EMIT_SYMBOL, 158),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 158),
+
+ # Node 160
+ (3, HUFFMAN_EMIT_SYMBOL, 152),
+ (6, HUFFMAN_EMIT_SYMBOL, 152),
+ (10, HUFFMAN_EMIT_SYMBOL, 152),
+ (15, HUFFMAN_EMIT_SYMBOL, 152),
+ (24, HUFFMAN_EMIT_SYMBOL, 152),
+ (31, HUFFMAN_EMIT_SYMBOL, 152),
+ (41, HUFFMAN_EMIT_SYMBOL, 152),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 152),
+ (3, HUFFMAN_EMIT_SYMBOL, 155),
+ (6, HUFFMAN_EMIT_SYMBOL, 155),
+ (10, HUFFMAN_EMIT_SYMBOL, 155),
+ (15, HUFFMAN_EMIT_SYMBOL, 155),
+ (24, HUFFMAN_EMIT_SYMBOL, 155),
+ (31, HUFFMAN_EMIT_SYMBOL, 155),
+ (41, HUFFMAN_EMIT_SYMBOL, 155),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 155),
+
+ # Node 161
+ (3, HUFFMAN_EMIT_SYMBOL, 157),
+ (6, HUFFMAN_EMIT_SYMBOL, 157),
+ (10, HUFFMAN_EMIT_SYMBOL, 157),
+ (15, HUFFMAN_EMIT_SYMBOL, 157),
+ (24, HUFFMAN_EMIT_SYMBOL, 157),
+ (31, HUFFMAN_EMIT_SYMBOL, 157),
+ (41, HUFFMAN_EMIT_SYMBOL, 157),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 157),
+ (3, HUFFMAN_EMIT_SYMBOL, 158),
+ (6, HUFFMAN_EMIT_SYMBOL, 158),
+ (10, HUFFMAN_EMIT_SYMBOL, 158),
+ (15, HUFFMAN_EMIT_SYMBOL, 158),
+ (24, HUFFMAN_EMIT_SYMBOL, 158),
+ (31, HUFFMAN_EMIT_SYMBOL, 158),
+ (41, HUFFMAN_EMIT_SYMBOL, 158),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 158),
+
+ # Node 162
+ (1, HUFFMAN_EMIT_SYMBOL, 165),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 165),
+ (1, HUFFMAN_EMIT_SYMBOL, 166),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 166),
+ (1, HUFFMAN_EMIT_SYMBOL, 168),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 168),
+ (1, HUFFMAN_EMIT_SYMBOL, 174),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 174),
+ (1, HUFFMAN_EMIT_SYMBOL, 175),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 175),
+ (1, HUFFMAN_EMIT_SYMBOL, 180),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 180),
+ (1, HUFFMAN_EMIT_SYMBOL, 182),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 182),
+ (1, HUFFMAN_EMIT_SYMBOL, 183),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 183),
+
+ # Node 163
+ (2, HUFFMAN_EMIT_SYMBOL, 165),
+ (9, HUFFMAN_EMIT_SYMBOL, 165),
+ (23, HUFFMAN_EMIT_SYMBOL, 165),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 165),
+ (2, HUFFMAN_EMIT_SYMBOL, 166),
+ (9, HUFFMAN_EMIT_SYMBOL, 166),
+ (23, HUFFMAN_EMIT_SYMBOL, 166),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 166),
+ (2, HUFFMAN_EMIT_SYMBOL, 168),
+ (9, HUFFMAN_EMIT_SYMBOL, 168),
+ (23, HUFFMAN_EMIT_SYMBOL, 168),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 168),
+ (2, HUFFMAN_EMIT_SYMBOL, 174),
+ (9, HUFFMAN_EMIT_SYMBOL, 174),
+ (23, HUFFMAN_EMIT_SYMBOL, 174),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 174),
+
+ # Node 164
+ (3, HUFFMAN_EMIT_SYMBOL, 165),
+ (6, HUFFMAN_EMIT_SYMBOL, 165),
+ (10, HUFFMAN_EMIT_SYMBOL, 165),
+ (15, HUFFMAN_EMIT_SYMBOL, 165),
+ (24, HUFFMAN_EMIT_SYMBOL, 165),
+ (31, HUFFMAN_EMIT_SYMBOL, 165),
+ (41, HUFFMAN_EMIT_SYMBOL, 165),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 165),
+ (3, HUFFMAN_EMIT_SYMBOL, 166),
+ (6, HUFFMAN_EMIT_SYMBOL, 166),
+ (10, HUFFMAN_EMIT_SYMBOL, 166),
+ (15, HUFFMAN_EMIT_SYMBOL, 166),
+ (24, HUFFMAN_EMIT_SYMBOL, 166),
+ (31, HUFFMAN_EMIT_SYMBOL, 166),
+ (41, HUFFMAN_EMIT_SYMBOL, 166),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 166),
+
+ # Node 165
+ (3, HUFFMAN_EMIT_SYMBOL, 168),
+ (6, HUFFMAN_EMIT_SYMBOL, 168),
+ (10, HUFFMAN_EMIT_SYMBOL, 168),
+ (15, HUFFMAN_EMIT_SYMBOL, 168),
+ (24, HUFFMAN_EMIT_SYMBOL, 168),
+ (31, HUFFMAN_EMIT_SYMBOL, 168),
+ (41, HUFFMAN_EMIT_SYMBOL, 168),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 168),
+ (3, HUFFMAN_EMIT_SYMBOL, 174),
+ (6, HUFFMAN_EMIT_SYMBOL, 174),
+ (10, HUFFMAN_EMIT_SYMBOL, 174),
+ (15, HUFFMAN_EMIT_SYMBOL, 174),
+ (24, HUFFMAN_EMIT_SYMBOL, 174),
+ (31, HUFFMAN_EMIT_SYMBOL, 174),
+ (41, HUFFMAN_EMIT_SYMBOL, 174),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 174),
+
+ # Node 166
+ (2, HUFFMAN_EMIT_SYMBOL, 175),
+ (9, HUFFMAN_EMIT_SYMBOL, 175),
+ (23, HUFFMAN_EMIT_SYMBOL, 175),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 175),
+ (2, HUFFMAN_EMIT_SYMBOL, 180),
+ (9, HUFFMAN_EMIT_SYMBOL, 180),
+ (23, HUFFMAN_EMIT_SYMBOL, 180),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 180),
+ (2, HUFFMAN_EMIT_SYMBOL, 182),
+ (9, HUFFMAN_EMIT_SYMBOL, 182),
+ (23, HUFFMAN_EMIT_SYMBOL, 182),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 182),
+ (2, HUFFMAN_EMIT_SYMBOL, 183),
+ (9, HUFFMAN_EMIT_SYMBOL, 183),
+ (23, HUFFMAN_EMIT_SYMBOL, 183),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 183),
+
+ # Node 167
+ (3, HUFFMAN_EMIT_SYMBOL, 175),
+ (6, HUFFMAN_EMIT_SYMBOL, 175),
+ (10, HUFFMAN_EMIT_SYMBOL, 175),
+ (15, HUFFMAN_EMIT_SYMBOL, 175),
+ (24, HUFFMAN_EMIT_SYMBOL, 175),
+ (31, HUFFMAN_EMIT_SYMBOL, 175),
+ (41, HUFFMAN_EMIT_SYMBOL, 175),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 175),
+ (3, HUFFMAN_EMIT_SYMBOL, 180),
+ (6, HUFFMAN_EMIT_SYMBOL, 180),
+ (10, HUFFMAN_EMIT_SYMBOL, 180),
+ (15, HUFFMAN_EMIT_SYMBOL, 180),
+ (24, HUFFMAN_EMIT_SYMBOL, 180),
+ (31, HUFFMAN_EMIT_SYMBOL, 180),
+ (41, HUFFMAN_EMIT_SYMBOL, 180),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 180),
+
+ # Node 168
+ (3, HUFFMAN_EMIT_SYMBOL, 182),
+ (6, HUFFMAN_EMIT_SYMBOL, 182),
+ (10, HUFFMAN_EMIT_SYMBOL, 182),
+ (15, HUFFMAN_EMIT_SYMBOL, 182),
+ (24, HUFFMAN_EMIT_SYMBOL, 182),
+ (31, HUFFMAN_EMIT_SYMBOL, 182),
+ (41, HUFFMAN_EMIT_SYMBOL, 182),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 182),
+ (3, HUFFMAN_EMIT_SYMBOL, 183),
+ (6, HUFFMAN_EMIT_SYMBOL, 183),
+ (10, HUFFMAN_EMIT_SYMBOL, 183),
+ (15, HUFFMAN_EMIT_SYMBOL, 183),
+ (24, HUFFMAN_EMIT_SYMBOL, 183),
+ (31, HUFFMAN_EMIT_SYMBOL, 183),
+ (41, HUFFMAN_EMIT_SYMBOL, 183),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 183),
+
+ # Node 169
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 188),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 191),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 197),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 231),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 239),
+ (176, 0, 0),
+ (178, 0, 0),
+ (179, 0, 0),
+ (183, 0, 0),
+ (184, 0, 0),
+ (186, 0, 0),
+ (187, 0, 0),
+ (192, 0, 0),
+ (199, 0, 0),
+ (208, 0, 0),
+ (223, 0, 0),
+
+ # Node 170
+ (1, HUFFMAN_EMIT_SYMBOL, 188),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 188),
+ (1, HUFFMAN_EMIT_SYMBOL, 191),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 191),
+ (1, HUFFMAN_EMIT_SYMBOL, 197),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 197),
+ (1, HUFFMAN_EMIT_SYMBOL, 231),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 231),
+ (1, HUFFMAN_EMIT_SYMBOL, 239),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 239),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 9),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 142),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 144),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 145),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 148),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 159),
+
+ # Node 171
+ (2, HUFFMAN_EMIT_SYMBOL, 188),
+ (9, HUFFMAN_EMIT_SYMBOL, 188),
+ (23, HUFFMAN_EMIT_SYMBOL, 188),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 188),
+ (2, HUFFMAN_EMIT_SYMBOL, 191),
+ (9, HUFFMAN_EMIT_SYMBOL, 191),
+ (23, HUFFMAN_EMIT_SYMBOL, 191),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 191),
+ (2, HUFFMAN_EMIT_SYMBOL, 197),
+ (9, HUFFMAN_EMIT_SYMBOL, 197),
+ (23, HUFFMAN_EMIT_SYMBOL, 197),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 197),
+ (2, HUFFMAN_EMIT_SYMBOL, 231),
+ (9, HUFFMAN_EMIT_SYMBOL, 231),
+ (23, HUFFMAN_EMIT_SYMBOL, 231),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 231),
+
+ # Node 172
+ (3, HUFFMAN_EMIT_SYMBOL, 188),
+ (6, HUFFMAN_EMIT_SYMBOL, 188),
+ (10, HUFFMAN_EMIT_SYMBOL, 188),
+ (15, HUFFMAN_EMIT_SYMBOL, 188),
+ (24, HUFFMAN_EMIT_SYMBOL, 188),
+ (31, HUFFMAN_EMIT_SYMBOL, 188),
+ (41, HUFFMAN_EMIT_SYMBOL, 188),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 188),
+ (3, HUFFMAN_EMIT_SYMBOL, 191),
+ (6, HUFFMAN_EMIT_SYMBOL, 191),
+ (10, HUFFMAN_EMIT_SYMBOL, 191),
+ (15, HUFFMAN_EMIT_SYMBOL, 191),
+ (24, HUFFMAN_EMIT_SYMBOL, 191),
+ (31, HUFFMAN_EMIT_SYMBOL, 191),
+ (41, HUFFMAN_EMIT_SYMBOL, 191),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 191),
+
+ # Node 173
+ (3, HUFFMAN_EMIT_SYMBOL, 197),
+ (6, HUFFMAN_EMIT_SYMBOL, 197),
+ (10, HUFFMAN_EMIT_SYMBOL, 197),
+ (15, HUFFMAN_EMIT_SYMBOL, 197),
+ (24, HUFFMAN_EMIT_SYMBOL, 197),
+ (31, HUFFMAN_EMIT_SYMBOL, 197),
+ (41, HUFFMAN_EMIT_SYMBOL, 197),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 197),
+ (3, HUFFMAN_EMIT_SYMBOL, 231),
+ (6, HUFFMAN_EMIT_SYMBOL, 231),
+ (10, HUFFMAN_EMIT_SYMBOL, 231),
+ (15, HUFFMAN_EMIT_SYMBOL, 231),
+ (24, HUFFMAN_EMIT_SYMBOL, 231),
+ (31, HUFFMAN_EMIT_SYMBOL, 231),
+ (41, HUFFMAN_EMIT_SYMBOL, 231),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 231),
+
+ # Node 174
+ (2, HUFFMAN_EMIT_SYMBOL, 239),
+ (9, HUFFMAN_EMIT_SYMBOL, 239),
+ (23, HUFFMAN_EMIT_SYMBOL, 239),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 239),
+ (1, HUFFMAN_EMIT_SYMBOL, 9),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 9),
+ (1, HUFFMAN_EMIT_SYMBOL, 142),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 142),
+ (1, HUFFMAN_EMIT_SYMBOL, 144),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 144),
+ (1, HUFFMAN_EMIT_SYMBOL, 145),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 145),
+ (1, HUFFMAN_EMIT_SYMBOL, 148),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 148),
+ (1, HUFFMAN_EMIT_SYMBOL, 159),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 159),
+
+ # Node 175
+ (3, HUFFMAN_EMIT_SYMBOL, 239),
+ (6, HUFFMAN_EMIT_SYMBOL, 239),
+ (10, HUFFMAN_EMIT_SYMBOL, 239),
+ (15, HUFFMAN_EMIT_SYMBOL, 239),
+ (24, HUFFMAN_EMIT_SYMBOL, 239),
+ (31, HUFFMAN_EMIT_SYMBOL, 239),
+ (41, HUFFMAN_EMIT_SYMBOL, 239),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 239),
+ (2, HUFFMAN_EMIT_SYMBOL, 9),
+ (9, HUFFMAN_EMIT_SYMBOL, 9),
+ (23, HUFFMAN_EMIT_SYMBOL, 9),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 9),
+ (2, HUFFMAN_EMIT_SYMBOL, 142),
+ (9, HUFFMAN_EMIT_SYMBOL, 142),
+ (23, HUFFMAN_EMIT_SYMBOL, 142),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 142),
+
+ # Node 176
+ (3, HUFFMAN_EMIT_SYMBOL, 9),
+ (6, HUFFMAN_EMIT_SYMBOL, 9),
+ (10, HUFFMAN_EMIT_SYMBOL, 9),
+ (15, HUFFMAN_EMIT_SYMBOL, 9),
+ (24, HUFFMAN_EMIT_SYMBOL, 9),
+ (31, HUFFMAN_EMIT_SYMBOL, 9),
+ (41, HUFFMAN_EMIT_SYMBOL, 9),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 9),
+ (3, HUFFMAN_EMIT_SYMBOL, 142),
+ (6, HUFFMAN_EMIT_SYMBOL, 142),
+ (10, HUFFMAN_EMIT_SYMBOL, 142),
+ (15, HUFFMAN_EMIT_SYMBOL, 142),
+ (24, HUFFMAN_EMIT_SYMBOL, 142),
+ (31, HUFFMAN_EMIT_SYMBOL, 142),
+ (41, HUFFMAN_EMIT_SYMBOL, 142),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 142),
+
+ # Node 177
+ (2, HUFFMAN_EMIT_SYMBOL, 144),
+ (9, HUFFMAN_EMIT_SYMBOL, 144),
+ (23, HUFFMAN_EMIT_SYMBOL, 144),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 144),
+ (2, HUFFMAN_EMIT_SYMBOL, 145),
+ (9, HUFFMAN_EMIT_SYMBOL, 145),
+ (23, HUFFMAN_EMIT_SYMBOL, 145),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 145),
+ (2, HUFFMAN_EMIT_SYMBOL, 148),
+ (9, HUFFMAN_EMIT_SYMBOL, 148),
+ (23, HUFFMAN_EMIT_SYMBOL, 148),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 148),
+ (2, HUFFMAN_EMIT_SYMBOL, 159),
+ (9, HUFFMAN_EMIT_SYMBOL, 159),
+ (23, HUFFMAN_EMIT_SYMBOL, 159),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 159),
+
+ # Node 178
+ (3, HUFFMAN_EMIT_SYMBOL, 144),
+ (6, HUFFMAN_EMIT_SYMBOL, 144),
+ (10, HUFFMAN_EMIT_SYMBOL, 144),
+ (15, HUFFMAN_EMIT_SYMBOL, 144),
+ (24, HUFFMAN_EMIT_SYMBOL, 144),
+ (31, HUFFMAN_EMIT_SYMBOL, 144),
+ (41, HUFFMAN_EMIT_SYMBOL, 144),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 144),
+ (3, HUFFMAN_EMIT_SYMBOL, 145),
+ (6, HUFFMAN_EMIT_SYMBOL, 145),
+ (10, HUFFMAN_EMIT_SYMBOL, 145),
+ (15, HUFFMAN_EMIT_SYMBOL, 145),
+ (24, HUFFMAN_EMIT_SYMBOL, 145),
+ (31, HUFFMAN_EMIT_SYMBOL, 145),
+ (41, HUFFMAN_EMIT_SYMBOL, 145),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 145),
+
+ # Node 179
+ (3, HUFFMAN_EMIT_SYMBOL, 148),
+ (6, HUFFMAN_EMIT_SYMBOL, 148),
+ (10, HUFFMAN_EMIT_SYMBOL, 148),
+ (15, HUFFMAN_EMIT_SYMBOL, 148),
+ (24, HUFFMAN_EMIT_SYMBOL, 148),
+ (31, HUFFMAN_EMIT_SYMBOL, 148),
+ (41, HUFFMAN_EMIT_SYMBOL, 148),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 148),
+ (3, HUFFMAN_EMIT_SYMBOL, 159),
+ (6, HUFFMAN_EMIT_SYMBOL, 159),
+ (10, HUFFMAN_EMIT_SYMBOL, 159),
+ (15, HUFFMAN_EMIT_SYMBOL, 159),
+ (24, HUFFMAN_EMIT_SYMBOL, 159),
+ (31, HUFFMAN_EMIT_SYMBOL, 159),
+ (41, HUFFMAN_EMIT_SYMBOL, 159),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 159),
+
+ # Node 180
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 171),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 206),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 215),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 225),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 236),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 237),
+ (188, 0, 0),
+ (189, 0, 0),
+ (193, 0, 0),
+ (196, 0, 0),
+ (200, 0, 0),
+ (203, 0, 0),
+ (209, 0, 0),
+ (216, 0, 0),
+ (224, 0, 0),
+ (238, 0, 0),
+
+ # Node 181
+ (1, HUFFMAN_EMIT_SYMBOL, 171),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 171),
+ (1, HUFFMAN_EMIT_SYMBOL, 206),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 206),
+ (1, HUFFMAN_EMIT_SYMBOL, 215),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 215),
+ (1, HUFFMAN_EMIT_SYMBOL, 225),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 225),
+ (1, HUFFMAN_EMIT_SYMBOL, 236),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 236),
+ (1, HUFFMAN_EMIT_SYMBOL, 237),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 237),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 199),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 207),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 234),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 235),
+
+ # Node 182
+ (2, HUFFMAN_EMIT_SYMBOL, 171),
+ (9, HUFFMAN_EMIT_SYMBOL, 171),
+ (23, HUFFMAN_EMIT_SYMBOL, 171),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 171),
+ (2, HUFFMAN_EMIT_SYMBOL, 206),
+ (9, HUFFMAN_EMIT_SYMBOL, 206),
+ (23, HUFFMAN_EMIT_SYMBOL, 206),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 206),
+ (2, HUFFMAN_EMIT_SYMBOL, 215),
+ (9, HUFFMAN_EMIT_SYMBOL, 215),
+ (23, HUFFMAN_EMIT_SYMBOL, 215),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 215),
+ (2, HUFFMAN_EMIT_SYMBOL, 225),
+ (9, HUFFMAN_EMIT_SYMBOL, 225),
+ (23, HUFFMAN_EMIT_SYMBOL, 225),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 225),
+
+ # Node 183
+ (3, HUFFMAN_EMIT_SYMBOL, 171),
+ (6, HUFFMAN_EMIT_SYMBOL, 171),
+ (10, HUFFMAN_EMIT_SYMBOL, 171),
+ (15, HUFFMAN_EMIT_SYMBOL, 171),
+ (24, HUFFMAN_EMIT_SYMBOL, 171),
+ (31, HUFFMAN_EMIT_SYMBOL, 171),
+ (41, HUFFMAN_EMIT_SYMBOL, 171),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 171),
+ (3, HUFFMAN_EMIT_SYMBOL, 206),
+ (6, HUFFMAN_EMIT_SYMBOL, 206),
+ (10, HUFFMAN_EMIT_SYMBOL, 206),
+ (15, HUFFMAN_EMIT_SYMBOL, 206),
+ (24, HUFFMAN_EMIT_SYMBOL, 206),
+ (31, HUFFMAN_EMIT_SYMBOL, 206),
+ (41, HUFFMAN_EMIT_SYMBOL, 206),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 206),
+
+ # Node 184
+ (3, HUFFMAN_EMIT_SYMBOL, 215),
+ (6, HUFFMAN_EMIT_SYMBOL, 215),
+ (10, HUFFMAN_EMIT_SYMBOL, 215),
+ (15, HUFFMAN_EMIT_SYMBOL, 215),
+ (24, HUFFMAN_EMIT_SYMBOL, 215),
+ (31, HUFFMAN_EMIT_SYMBOL, 215),
+ (41, HUFFMAN_EMIT_SYMBOL, 215),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 215),
+ (3, HUFFMAN_EMIT_SYMBOL, 225),
+ (6, HUFFMAN_EMIT_SYMBOL, 225),
+ (10, HUFFMAN_EMIT_SYMBOL, 225),
+ (15, HUFFMAN_EMIT_SYMBOL, 225),
+ (24, HUFFMAN_EMIT_SYMBOL, 225),
+ (31, HUFFMAN_EMIT_SYMBOL, 225),
+ (41, HUFFMAN_EMIT_SYMBOL, 225),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 225),
+
+ # Node 185
+ (2, HUFFMAN_EMIT_SYMBOL, 236),
+ (9, HUFFMAN_EMIT_SYMBOL, 236),
+ (23, HUFFMAN_EMIT_SYMBOL, 236),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 236),
+ (2, HUFFMAN_EMIT_SYMBOL, 237),
+ (9, HUFFMAN_EMIT_SYMBOL, 237),
+ (23, HUFFMAN_EMIT_SYMBOL, 237),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 237),
+ (1, HUFFMAN_EMIT_SYMBOL, 199),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 199),
+ (1, HUFFMAN_EMIT_SYMBOL, 207),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 207),
+ (1, HUFFMAN_EMIT_SYMBOL, 234),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 234),
+ (1, HUFFMAN_EMIT_SYMBOL, 235),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 235),
+
+ # Node 186
+ (3, HUFFMAN_EMIT_SYMBOL, 236),
+ (6, HUFFMAN_EMIT_SYMBOL, 236),
+ (10, HUFFMAN_EMIT_SYMBOL, 236),
+ (15, HUFFMAN_EMIT_SYMBOL, 236),
+ (24, HUFFMAN_EMIT_SYMBOL, 236),
+ (31, HUFFMAN_EMIT_SYMBOL, 236),
+ (41, HUFFMAN_EMIT_SYMBOL, 236),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 236),
+ (3, HUFFMAN_EMIT_SYMBOL, 237),
+ (6, HUFFMAN_EMIT_SYMBOL, 237),
+ (10, HUFFMAN_EMIT_SYMBOL, 237),
+ (15, HUFFMAN_EMIT_SYMBOL, 237),
+ (24, HUFFMAN_EMIT_SYMBOL, 237),
+ (31, HUFFMAN_EMIT_SYMBOL, 237),
+ (41, HUFFMAN_EMIT_SYMBOL, 237),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 237),
+
+ # Node 187
+ (2, HUFFMAN_EMIT_SYMBOL, 199),
+ (9, HUFFMAN_EMIT_SYMBOL, 199),
+ (23, HUFFMAN_EMIT_SYMBOL, 199),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 199),
+ (2, HUFFMAN_EMIT_SYMBOL, 207),
+ (9, HUFFMAN_EMIT_SYMBOL, 207),
+ (23, HUFFMAN_EMIT_SYMBOL, 207),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 207),
+ (2, HUFFMAN_EMIT_SYMBOL, 234),
+ (9, HUFFMAN_EMIT_SYMBOL, 234),
+ (23, HUFFMAN_EMIT_SYMBOL, 234),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 234),
+ (2, HUFFMAN_EMIT_SYMBOL, 235),
+ (9, HUFFMAN_EMIT_SYMBOL, 235),
+ (23, HUFFMAN_EMIT_SYMBOL, 235),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 235),
+
+ # Node 188
+ (3, HUFFMAN_EMIT_SYMBOL, 199),
+ (6, HUFFMAN_EMIT_SYMBOL, 199),
+ (10, HUFFMAN_EMIT_SYMBOL, 199),
+ (15, HUFFMAN_EMIT_SYMBOL, 199),
+ (24, HUFFMAN_EMIT_SYMBOL, 199),
+ (31, HUFFMAN_EMIT_SYMBOL, 199),
+ (41, HUFFMAN_EMIT_SYMBOL, 199),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 199),
+ (3, HUFFMAN_EMIT_SYMBOL, 207),
+ (6, HUFFMAN_EMIT_SYMBOL, 207),
+ (10, HUFFMAN_EMIT_SYMBOL, 207),
+ (15, HUFFMAN_EMIT_SYMBOL, 207),
+ (24, HUFFMAN_EMIT_SYMBOL, 207),
+ (31, HUFFMAN_EMIT_SYMBOL, 207),
+ (41, HUFFMAN_EMIT_SYMBOL, 207),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 207),
+
+ # Node 189
+ (3, HUFFMAN_EMIT_SYMBOL, 234),
+ (6, HUFFMAN_EMIT_SYMBOL, 234),
+ (10, HUFFMAN_EMIT_SYMBOL, 234),
+ (15, HUFFMAN_EMIT_SYMBOL, 234),
+ (24, HUFFMAN_EMIT_SYMBOL, 234),
+ (31, HUFFMAN_EMIT_SYMBOL, 234),
+ (41, HUFFMAN_EMIT_SYMBOL, 234),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 234),
+ (3, HUFFMAN_EMIT_SYMBOL, 235),
+ (6, HUFFMAN_EMIT_SYMBOL, 235),
+ (10, HUFFMAN_EMIT_SYMBOL, 235),
+ (15, HUFFMAN_EMIT_SYMBOL, 235),
+ (24, HUFFMAN_EMIT_SYMBOL, 235),
+ (31, HUFFMAN_EMIT_SYMBOL, 235),
+ (41, HUFFMAN_EMIT_SYMBOL, 235),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 235),
+
+ # Node 190
+ (194, 0, 0),
+ (195, 0, 0),
+ (197, 0, 0),
+ (198, 0, 0),
+ (201, 0, 0),
+ (202, 0, 0),
+ (204, 0, 0),
+ (205, 0, 0),
+ (210, 0, 0),
+ (213, 0, 0),
+ (217, 0, 0),
+ (220, 0, 0),
+ (225, 0, 0),
+ (231, 0, 0),
+ (239, 0, 0),
+ (246, 0, 0),
+
+ # Node 191
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 192),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 193),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 200),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 201),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 202),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 205),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 210),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 213),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 218),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 219),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 238),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 240),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 242),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 243),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 255),
+ (206, 0, 0),
+
+ # Node 192
+ (1, HUFFMAN_EMIT_SYMBOL, 192),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 192),
+ (1, HUFFMAN_EMIT_SYMBOL, 193),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 193),
+ (1, HUFFMAN_EMIT_SYMBOL, 200),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 200),
+ (1, HUFFMAN_EMIT_SYMBOL, 201),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 201),
+ (1, HUFFMAN_EMIT_SYMBOL, 202),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 202),
+ (1, HUFFMAN_EMIT_SYMBOL, 205),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 205),
+ (1, HUFFMAN_EMIT_SYMBOL, 210),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 210),
+ (1, HUFFMAN_EMIT_SYMBOL, 213),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 213),
+
+ # Node 193
+ (2, HUFFMAN_EMIT_SYMBOL, 192),
+ (9, HUFFMAN_EMIT_SYMBOL, 192),
+ (23, HUFFMAN_EMIT_SYMBOL, 192),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 192),
+ (2, HUFFMAN_EMIT_SYMBOL, 193),
+ (9, HUFFMAN_EMIT_SYMBOL, 193),
+ (23, HUFFMAN_EMIT_SYMBOL, 193),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 193),
+ (2, HUFFMAN_EMIT_SYMBOL, 200),
+ (9, HUFFMAN_EMIT_SYMBOL, 200),
+ (23, HUFFMAN_EMIT_SYMBOL, 200),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 200),
+ (2, HUFFMAN_EMIT_SYMBOL, 201),
+ (9, HUFFMAN_EMIT_SYMBOL, 201),
+ (23, HUFFMAN_EMIT_SYMBOL, 201),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 201),
+
+ # Node 194
+ (3, HUFFMAN_EMIT_SYMBOL, 192),
+ (6, HUFFMAN_EMIT_SYMBOL, 192),
+ (10, HUFFMAN_EMIT_SYMBOL, 192),
+ (15, HUFFMAN_EMIT_SYMBOL, 192),
+ (24, HUFFMAN_EMIT_SYMBOL, 192),
+ (31, HUFFMAN_EMIT_SYMBOL, 192),
+ (41, HUFFMAN_EMIT_SYMBOL, 192),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 192),
+ (3, HUFFMAN_EMIT_SYMBOL, 193),
+ (6, HUFFMAN_EMIT_SYMBOL, 193),
+ (10, HUFFMAN_EMIT_SYMBOL, 193),
+ (15, HUFFMAN_EMIT_SYMBOL, 193),
+ (24, HUFFMAN_EMIT_SYMBOL, 193),
+ (31, HUFFMAN_EMIT_SYMBOL, 193),
+ (41, HUFFMAN_EMIT_SYMBOL, 193),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 193),
+
+ # Node 195
+ (3, HUFFMAN_EMIT_SYMBOL, 200),
+ (6, HUFFMAN_EMIT_SYMBOL, 200),
+ (10, HUFFMAN_EMIT_SYMBOL, 200),
+ (15, HUFFMAN_EMIT_SYMBOL, 200),
+ (24, HUFFMAN_EMIT_SYMBOL, 200),
+ (31, HUFFMAN_EMIT_SYMBOL, 200),
+ (41, HUFFMAN_EMIT_SYMBOL, 200),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 200),
+ (3, HUFFMAN_EMIT_SYMBOL, 201),
+ (6, HUFFMAN_EMIT_SYMBOL, 201),
+ (10, HUFFMAN_EMIT_SYMBOL, 201),
+ (15, HUFFMAN_EMIT_SYMBOL, 201),
+ (24, HUFFMAN_EMIT_SYMBOL, 201),
+ (31, HUFFMAN_EMIT_SYMBOL, 201),
+ (41, HUFFMAN_EMIT_SYMBOL, 201),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 201),
+
+ # Node 196
+ (2, HUFFMAN_EMIT_SYMBOL, 202),
+ (9, HUFFMAN_EMIT_SYMBOL, 202),
+ (23, HUFFMAN_EMIT_SYMBOL, 202),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 202),
+ (2, HUFFMAN_EMIT_SYMBOL, 205),
+ (9, HUFFMAN_EMIT_SYMBOL, 205),
+ (23, HUFFMAN_EMIT_SYMBOL, 205),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 205),
+ (2, HUFFMAN_EMIT_SYMBOL, 210),
+ (9, HUFFMAN_EMIT_SYMBOL, 210),
+ (23, HUFFMAN_EMIT_SYMBOL, 210),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 210),
+ (2, HUFFMAN_EMIT_SYMBOL, 213),
+ (9, HUFFMAN_EMIT_SYMBOL, 213),
+ (23, HUFFMAN_EMIT_SYMBOL, 213),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 213),
+
+ # Node 197
+ (3, HUFFMAN_EMIT_SYMBOL, 202),
+ (6, HUFFMAN_EMIT_SYMBOL, 202),
+ (10, HUFFMAN_EMIT_SYMBOL, 202),
+ (15, HUFFMAN_EMIT_SYMBOL, 202),
+ (24, HUFFMAN_EMIT_SYMBOL, 202),
+ (31, HUFFMAN_EMIT_SYMBOL, 202),
+ (41, HUFFMAN_EMIT_SYMBOL, 202),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 202),
+ (3, HUFFMAN_EMIT_SYMBOL, 205),
+ (6, HUFFMAN_EMIT_SYMBOL, 205),
+ (10, HUFFMAN_EMIT_SYMBOL, 205),
+ (15, HUFFMAN_EMIT_SYMBOL, 205),
+ (24, HUFFMAN_EMIT_SYMBOL, 205),
+ (31, HUFFMAN_EMIT_SYMBOL, 205),
+ (41, HUFFMAN_EMIT_SYMBOL, 205),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 205),
+
+ # Node 198
+ (3, HUFFMAN_EMIT_SYMBOL, 210),
+ (6, HUFFMAN_EMIT_SYMBOL, 210),
+ (10, HUFFMAN_EMIT_SYMBOL, 210),
+ (15, HUFFMAN_EMIT_SYMBOL, 210),
+ (24, HUFFMAN_EMIT_SYMBOL, 210),
+ (31, HUFFMAN_EMIT_SYMBOL, 210),
+ (41, HUFFMAN_EMIT_SYMBOL, 210),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 210),
+ (3, HUFFMAN_EMIT_SYMBOL, 213),
+ (6, HUFFMAN_EMIT_SYMBOL, 213),
+ (10, HUFFMAN_EMIT_SYMBOL, 213),
+ (15, HUFFMAN_EMIT_SYMBOL, 213),
+ (24, HUFFMAN_EMIT_SYMBOL, 213),
+ (31, HUFFMAN_EMIT_SYMBOL, 213),
+ (41, HUFFMAN_EMIT_SYMBOL, 213),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 213),
+
+ # Node 199
+ (1, HUFFMAN_EMIT_SYMBOL, 218),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 218),
+ (1, HUFFMAN_EMIT_SYMBOL, 219),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 219),
+ (1, HUFFMAN_EMIT_SYMBOL, 238),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 238),
+ (1, HUFFMAN_EMIT_SYMBOL, 240),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 240),
+ (1, HUFFMAN_EMIT_SYMBOL, 242),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 242),
+ (1, HUFFMAN_EMIT_SYMBOL, 243),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 243),
+ (1, HUFFMAN_EMIT_SYMBOL, 255),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 255),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 203),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 204),
+
+ # Node 200
+ (2, HUFFMAN_EMIT_SYMBOL, 218),
+ (9, HUFFMAN_EMIT_SYMBOL, 218),
+ (23, HUFFMAN_EMIT_SYMBOL, 218),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 218),
+ (2, HUFFMAN_EMIT_SYMBOL, 219),
+ (9, HUFFMAN_EMIT_SYMBOL, 219),
+ (23, HUFFMAN_EMIT_SYMBOL, 219),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 219),
+ (2, HUFFMAN_EMIT_SYMBOL, 238),
+ (9, HUFFMAN_EMIT_SYMBOL, 238),
+ (23, HUFFMAN_EMIT_SYMBOL, 238),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 238),
+ (2, HUFFMAN_EMIT_SYMBOL, 240),
+ (9, HUFFMAN_EMIT_SYMBOL, 240),
+ (23, HUFFMAN_EMIT_SYMBOL, 240),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 240),
+
+ # Node 201
+ (3, HUFFMAN_EMIT_SYMBOL, 218),
+ (6, HUFFMAN_EMIT_SYMBOL, 218),
+ (10, HUFFMAN_EMIT_SYMBOL, 218),
+ (15, HUFFMAN_EMIT_SYMBOL, 218),
+ (24, HUFFMAN_EMIT_SYMBOL, 218),
+ (31, HUFFMAN_EMIT_SYMBOL, 218),
+ (41, HUFFMAN_EMIT_SYMBOL, 218),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 218),
+ (3, HUFFMAN_EMIT_SYMBOL, 219),
+ (6, HUFFMAN_EMIT_SYMBOL, 219),
+ (10, HUFFMAN_EMIT_SYMBOL, 219),
+ (15, HUFFMAN_EMIT_SYMBOL, 219),
+ (24, HUFFMAN_EMIT_SYMBOL, 219),
+ (31, HUFFMAN_EMIT_SYMBOL, 219),
+ (41, HUFFMAN_EMIT_SYMBOL, 219),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 219),
+
+ # Node 202
+ (3, HUFFMAN_EMIT_SYMBOL, 238),
+ (6, HUFFMAN_EMIT_SYMBOL, 238),
+ (10, HUFFMAN_EMIT_SYMBOL, 238),
+ (15, HUFFMAN_EMIT_SYMBOL, 238),
+ (24, HUFFMAN_EMIT_SYMBOL, 238),
+ (31, HUFFMAN_EMIT_SYMBOL, 238),
+ (41, HUFFMAN_EMIT_SYMBOL, 238),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 238),
+ (3, HUFFMAN_EMIT_SYMBOL, 240),
+ (6, HUFFMAN_EMIT_SYMBOL, 240),
+ (10, HUFFMAN_EMIT_SYMBOL, 240),
+ (15, HUFFMAN_EMIT_SYMBOL, 240),
+ (24, HUFFMAN_EMIT_SYMBOL, 240),
+ (31, HUFFMAN_EMIT_SYMBOL, 240),
+ (41, HUFFMAN_EMIT_SYMBOL, 240),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 240),
+
+ # Node 203
+ (2, HUFFMAN_EMIT_SYMBOL, 242),
+ (9, HUFFMAN_EMIT_SYMBOL, 242),
+ (23, HUFFMAN_EMIT_SYMBOL, 242),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 242),
+ (2, HUFFMAN_EMIT_SYMBOL, 243),
+ (9, HUFFMAN_EMIT_SYMBOL, 243),
+ (23, HUFFMAN_EMIT_SYMBOL, 243),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 243),
+ (2, HUFFMAN_EMIT_SYMBOL, 255),
+ (9, HUFFMAN_EMIT_SYMBOL, 255),
+ (23, HUFFMAN_EMIT_SYMBOL, 255),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 255),
+ (1, HUFFMAN_EMIT_SYMBOL, 203),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 203),
+ (1, HUFFMAN_EMIT_SYMBOL, 204),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 204),
+
+ # Node 204
+ (3, HUFFMAN_EMIT_SYMBOL, 242),
+ (6, HUFFMAN_EMIT_SYMBOL, 242),
+ (10, HUFFMAN_EMIT_SYMBOL, 242),
+ (15, HUFFMAN_EMIT_SYMBOL, 242),
+ (24, HUFFMAN_EMIT_SYMBOL, 242),
+ (31, HUFFMAN_EMIT_SYMBOL, 242),
+ (41, HUFFMAN_EMIT_SYMBOL, 242),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 242),
+ (3, HUFFMAN_EMIT_SYMBOL, 243),
+ (6, HUFFMAN_EMIT_SYMBOL, 243),
+ (10, HUFFMAN_EMIT_SYMBOL, 243),
+ (15, HUFFMAN_EMIT_SYMBOL, 243),
+ (24, HUFFMAN_EMIT_SYMBOL, 243),
+ (31, HUFFMAN_EMIT_SYMBOL, 243),
+ (41, HUFFMAN_EMIT_SYMBOL, 243),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 243),
+
+ # Node 205
+ (3, HUFFMAN_EMIT_SYMBOL, 255),
+ (6, HUFFMAN_EMIT_SYMBOL, 255),
+ (10, HUFFMAN_EMIT_SYMBOL, 255),
+ (15, HUFFMAN_EMIT_SYMBOL, 255),
+ (24, HUFFMAN_EMIT_SYMBOL, 255),
+ (31, HUFFMAN_EMIT_SYMBOL, 255),
+ (41, HUFFMAN_EMIT_SYMBOL, 255),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 255),
+ (2, HUFFMAN_EMIT_SYMBOL, 203),
+ (9, HUFFMAN_EMIT_SYMBOL, 203),
+ (23, HUFFMAN_EMIT_SYMBOL, 203),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 203),
+ (2, HUFFMAN_EMIT_SYMBOL, 204),
+ (9, HUFFMAN_EMIT_SYMBOL, 204),
+ (23, HUFFMAN_EMIT_SYMBOL, 204),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 204),
+
+ # Node 206
+ (3, HUFFMAN_EMIT_SYMBOL, 203),
+ (6, HUFFMAN_EMIT_SYMBOL, 203),
+ (10, HUFFMAN_EMIT_SYMBOL, 203),
+ (15, HUFFMAN_EMIT_SYMBOL, 203),
+ (24, HUFFMAN_EMIT_SYMBOL, 203),
+ (31, HUFFMAN_EMIT_SYMBOL, 203),
+ (41, HUFFMAN_EMIT_SYMBOL, 203),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 203),
+ (3, HUFFMAN_EMIT_SYMBOL, 204),
+ (6, HUFFMAN_EMIT_SYMBOL, 204),
+ (10, HUFFMAN_EMIT_SYMBOL, 204),
+ (15, HUFFMAN_EMIT_SYMBOL, 204),
+ (24, HUFFMAN_EMIT_SYMBOL, 204),
+ (31, HUFFMAN_EMIT_SYMBOL, 204),
+ (41, HUFFMAN_EMIT_SYMBOL, 204),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 204),
+
+ # Node 207
+ (211, 0, 0),
+ (212, 0, 0),
+ (214, 0, 0),
+ (215, 0, 0),
+ (218, 0, 0),
+ (219, 0, 0),
+ (221, 0, 0),
+ (222, 0, 0),
+ (226, 0, 0),
+ (228, 0, 0),
+ (232, 0, 0),
+ (235, 0, 0),
+ (240, 0, 0),
+ (243, 0, 0),
+ (247, 0, 0),
+ (250, 0, 0),
+
+ # Node 208
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 211),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 212),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 214),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 221),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 222),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 223),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 241),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 244),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 245),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 246),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 247),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 248),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 250),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 251),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 252),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 253),
+
+ # Node 209
+ (1, HUFFMAN_EMIT_SYMBOL, 211),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 211),
+ (1, HUFFMAN_EMIT_SYMBOL, 212),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 212),
+ (1, HUFFMAN_EMIT_SYMBOL, 214),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 214),
+ (1, HUFFMAN_EMIT_SYMBOL, 221),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 221),
+ (1, HUFFMAN_EMIT_SYMBOL, 222),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 222),
+ (1, HUFFMAN_EMIT_SYMBOL, 223),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 223),
+ (1, HUFFMAN_EMIT_SYMBOL, 241),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 241),
+ (1, HUFFMAN_EMIT_SYMBOL, 244),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 244),
+
+ # Node 210
+ (2, HUFFMAN_EMIT_SYMBOL, 211),
+ (9, HUFFMAN_EMIT_SYMBOL, 211),
+ (23, HUFFMAN_EMIT_SYMBOL, 211),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 211),
+ (2, HUFFMAN_EMIT_SYMBOL, 212),
+ (9, HUFFMAN_EMIT_SYMBOL, 212),
+ (23, HUFFMAN_EMIT_SYMBOL, 212),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 212),
+ (2, HUFFMAN_EMIT_SYMBOL, 214),
+ (9, HUFFMAN_EMIT_SYMBOL, 214),
+ (23, HUFFMAN_EMIT_SYMBOL, 214),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 214),
+ (2, HUFFMAN_EMIT_SYMBOL, 221),
+ (9, HUFFMAN_EMIT_SYMBOL, 221),
+ (23, HUFFMAN_EMIT_SYMBOL, 221),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 221),
+
+ # Node 211
+ (3, HUFFMAN_EMIT_SYMBOL, 211),
+ (6, HUFFMAN_EMIT_SYMBOL, 211),
+ (10, HUFFMAN_EMIT_SYMBOL, 211),
+ (15, HUFFMAN_EMIT_SYMBOL, 211),
+ (24, HUFFMAN_EMIT_SYMBOL, 211),
+ (31, HUFFMAN_EMIT_SYMBOL, 211),
+ (41, HUFFMAN_EMIT_SYMBOL, 211),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 211),
+ (3, HUFFMAN_EMIT_SYMBOL, 212),
+ (6, HUFFMAN_EMIT_SYMBOL, 212),
+ (10, HUFFMAN_EMIT_SYMBOL, 212),
+ (15, HUFFMAN_EMIT_SYMBOL, 212),
+ (24, HUFFMAN_EMIT_SYMBOL, 212),
+ (31, HUFFMAN_EMIT_SYMBOL, 212),
+ (41, HUFFMAN_EMIT_SYMBOL, 212),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 212),
+
+ # Node 212
+ (3, HUFFMAN_EMIT_SYMBOL, 214),
+ (6, HUFFMAN_EMIT_SYMBOL, 214),
+ (10, HUFFMAN_EMIT_SYMBOL, 214),
+ (15, HUFFMAN_EMIT_SYMBOL, 214),
+ (24, HUFFMAN_EMIT_SYMBOL, 214),
+ (31, HUFFMAN_EMIT_SYMBOL, 214),
+ (41, HUFFMAN_EMIT_SYMBOL, 214),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 214),
+ (3, HUFFMAN_EMIT_SYMBOL, 221),
+ (6, HUFFMAN_EMIT_SYMBOL, 221),
+ (10, HUFFMAN_EMIT_SYMBOL, 221),
+ (15, HUFFMAN_EMIT_SYMBOL, 221),
+ (24, HUFFMAN_EMIT_SYMBOL, 221),
+ (31, HUFFMAN_EMIT_SYMBOL, 221),
+ (41, HUFFMAN_EMIT_SYMBOL, 221),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 221),
+
+ # Node 213
+ (2, HUFFMAN_EMIT_SYMBOL, 222),
+ (9, HUFFMAN_EMIT_SYMBOL, 222),
+ (23, HUFFMAN_EMIT_SYMBOL, 222),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 222),
+ (2, HUFFMAN_EMIT_SYMBOL, 223),
+ (9, HUFFMAN_EMIT_SYMBOL, 223),
+ (23, HUFFMAN_EMIT_SYMBOL, 223),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 223),
+ (2, HUFFMAN_EMIT_SYMBOL, 241),
+ (9, HUFFMAN_EMIT_SYMBOL, 241),
+ (23, HUFFMAN_EMIT_SYMBOL, 241),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 241),
+ (2, HUFFMAN_EMIT_SYMBOL, 244),
+ (9, HUFFMAN_EMIT_SYMBOL, 244),
+ (23, HUFFMAN_EMIT_SYMBOL, 244),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 244),
+
+ # Node 214
+ (3, HUFFMAN_EMIT_SYMBOL, 222),
+ (6, HUFFMAN_EMIT_SYMBOL, 222),
+ (10, HUFFMAN_EMIT_SYMBOL, 222),
+ (15, HUFFMAN_EMIT_SYMBOL, 222),
+ (24, HUFFMAN_EMIT_SYMBOL, 222),
+ (31, HUFFMAN_EMIT_SYMBOL, 222),
+ (41, HUFFMAN_EMIT_SYMBOL, 222),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 222),
+ (3, HUFFMAN_EMIT_SYMBOL, 223),
+ (6, HUFFMAN_EMIT_SYMBOL, 223),
+ (10, HUFFMAN_EMIT_SYMBOL, 223),
+ (15, HUFFMAN_EMIT_SYMBOL, 223),
+ (24, HUFFMAN_EMIT_SYMBOL, 223),
+ (31, HUFFMAN_EMIT_SYMBOL, 223),
+ (41, HUFFMAN_EMIT_SYMBOL, 223),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 223),
+
+ # Node 215
+ (3, HUFFMAN_EMIT_SYMBOL, 241),
+ (6, HUFFMAN_EMIT_SYMBOL, 241),
+ (10, HUFFMAN_EMIT_SYMBOL, 241),
+ (15, HUFFMAN_EMIT_SYMBOL, 241),
+ (24, HUFFMAN_EMIT_SYMBOL, 241),
+ (31, HUFFMAN_EMIT_SYMBOL, 241),
+ (41, HUFFMAN_EMIT_SYMBOL, 241),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 241),
+ (3, HUFFMAN_EMIT_SYMBOL, 244),
+ (6, HUFFMAN_EMIT_SYMBOL, 244),
+ (10, HUFFMAN_EMIT_SYMBOL, 244),
+ (15, HUFFMAN_EMIT_SYMBOL, 244),
+ (24, HUFFMAN_EMIT_SYMBOL, 244),
+ (31, HUFFMAN_EMIT_SYMBOL, 244),
+ (41, HUFFMAN_EMIT_SYMBOL, 244),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 244),
+
+ # Node 216
+ (1, HUFFMAN_EMIT_SYMBOL, 245),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 245),
+ (1, HUFFMAN_EMIT_SYMBOL, 246),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 246),
+ (1, HUFFMAN_EMIT_SYMBOL, 247),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 247),
+ (1, HUFFMAN_EMIT_SYMBOL, 248),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 248),
+ (1, HUFFMAN_EMIT_SYMBOL, 250),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 250),
+ (1, HUFFMAN_EMIT_SYMBOL, 251),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 251),
+ (1, HUFFMAN_EMIT_SYMBOL, 252),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 252),
+ (1, HUFFMAN_EMIT_SYMBOL, 253),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 253),
+
+ # Node 217
+ (2, HUFFMAN_EMIT_SYMBOL, 245),
+ (9, HUFFMAN_EMIT_SYMBOL, 245),
+ (23, HUFFMAN_EMIT_SYMBOL, 245),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 245),
+ (2, HUFFMAN_EMIT_SYMBOL, 246),
+ (9, HUFFMAN_EMIT_SYMBOL, 246),
+ (23, HUFFMAN_EMIT_SYMBOL, 246),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 246),
+ (2, HUFFMAN_EMIT_SYMBOL, 247),
+ (9, HUFFMAN_EMIT_SYMBOL, 247),
+ (23, HUFFMAN_EMIT_SYMBOL, 247),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 247),
+ (2, HUFFMAN_EMIT_SYMBOL, 248),
+ (9, HUFFMAN_EMIT_SYMBOL, 248),
+ (23, HUFFMAN_EMIT_SYMBOL, 248),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 248),
+
+ # Node 218
+ (3, HUFFMAN_EMIT_SYMBOL, 245),
+ (6, HUFFMAN_EMIT_SYMBOL, 245),
+ (10, HUFFMAN_EMIT_SYMBOL, 245),
+ (15, HUFFMAN_EMIT_SYMBOL, 245),
+ (24, HUFFMAN_EMIT_SYMBOL, 245),
+ (31, HUFFMAN_EMIT_SYMBOL, 245),
+ (41, HUFFMAN_EMIT_SYMBOL, 245),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 245),
+ (3, HUFFMAN_EMIT_SYMBOL, 246),
+ (6, HUFFMAN_EMIT_SYMBOL, 246),
+ (10, HUFFMAN_EMIT_SYMBOL, 246),
+ (15, HUFFMAN_EMIT_SYMBOL, 246),
+ (24, HUFFMAN_EMIT_SYMBOL, 246),
+ (31, HUFFMAN_EMIT_SYMBOL, 246),
+ (41, HUFFMAN_EMIT_SYMBOL, 246),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 246),
+
+ # Node 219
+ (3, HUFFMAN_EMIT_SYMBOL, 247),
+ (6, HUFFMAN_EMIT_SYMBOL, 247),
+ (10, HUFFMAN_EMIT_SYMBOL, 247),
+ (15, HUFFMAN_EMIT_SYMBOL, 247),
+ (24, HUFFMAN_EMIT_SYMBOL, 247),
+ (31, HUFFMAN_EMIT_SYMBOL, 247),
+ (41, HUFFMAN_EMIT_SYMBOL, 247),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 247),
+ (3, HUFFMAN_EMIT_SYMBOL, 248),
+ (6, HUFFMAN_EMIT_SYMBOL, 248),
+ (10, HUFFMAN_EMIT_SYMBOL, 248),
+ (15, HUFFMAN_EMIT_SYMBOL, 248),
+ (24, HUFFMAN_EMIT_SYMBOL, 248),
+ (31, HUFFMAN_EMIT_SYMBOL, 248),
+ (41, HUFFMAN_EMIT_SYMBOL, 248),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 248),
+
+ # Node 220
+ (2, HUFFMAN_EMIT_SYMBOL, 250),
+ (9, HUFFMAN_EMIT_SYMBOL, 250),
+ (23, HUFFMAN_EMIT_SYMBOL, 250),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 250),
+ (2, HUFFMAN_EMIT_SYMBOL, 251),
+ (9, HUFFMAN_EMIT_SYMBOL, 251),
+ (23, HUFFMAN_EMIT_SYMBOL, 251),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 251),
+ (2, HUFFMAN_EMIT_SYMBOL, 252),
+ (9, HUFFMAN_EMIT_SYMBOL, 252),
+ (23, HUFFMAN_EMIT_SYMBOL, 252),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 252),
+ (2, HUFFMAN_EMIT_SYMBOL, 253),
+ (9, HUFFMAN_EMIT_SYMBOL, 253),
+ (23, HUFFMAN_EMIT_SYMBOL, 253),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 253),
+
+ # Node 221
+ (3, HUFFMAN_EMIT_SYMBOL, 250),
+ (6, HUFFMAN_EMIT_SYMBOL, 250),
+ (10, HUFFMAN_EMIT_SYMBOL, 250),
+ (15, HUFFMAN_EMIT_SYMBOL, 250),
+ (24, HUFFMAN_EMIT_SYMBOL, 250),
+ (31, HUFFMAN_EMIT_SYMBOL, 250),
+ (41, HUFFMAN_EMIT_SYMBOL, 250),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 250),
+ (3, HUFFMAN_EMIT_SYMBOL, 251),
+ (6, HUFFMAN_EMIT_SYMBOL, 251),
+ (10, HUFFMAN_EMIT_SYMBOL, 251),
+ (15, HUFFMAN_EMIT_SYMBOL, 251),
+ (24, HUFFMAN_EMIT_SYMBOL, 251),
+ (31, HUFFMAN_EMIT_SYMBOL, 251),
+ (41, HUFFMAN_EMIT_SYMBOL, 251),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 251),
+
+ # Node 222
+ (3, HUFFMAN_EMIT_SYMBOL, 252),
+ (6, HUFFMAN_EMIT_SYMBOL, 252),
+ (10, HUFFMAN_EMIT_SYMBOL, 252),
+ (15, HUFFMAN_EMIT_SYMBOL, 252),
+ (24, HUFFMAN_EMIT_SYMBOL, 252),
+ (31, HUFFMAN_EMIT_SYMBOL, 252),
+ (41, HUFFMAN_EMIT_SYMBOL, 252),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 252),
+ (3, HUFFMAN_EMIT_SYMBOL, 253),
+ (6, HUFFMAN_EMIT_SYMBOL, 253),
+ (10, HUFFMAN_EMIT_SYMBOL, 253),
+ (15, HUFFMAN_EMIT_SYMBOL, 253),
+ (24, HUFFMAN_EMIT_SYMBOL, 253),
+ (31, HUFFMAN_EMIT_SYMBOL, 253),
+ (41, HUFFMAN_EMIT_SYMBOL, 253),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 253),
+
+ # Node 223
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 254),
+ (227, 0, 0),
+ (229, 0, 0),
+ (230, 0, 0),
+ (233, 0, 0),
+ (234, 0, 0),
+ (236, 0, 0),
+ (237, 0, 0),
+ (241, 0, 0),
+ (242, 0, 0),
+ (244, 0, 0),
+ (245, 0, 0),
+ (248, 0, 0),
+ (249, 0, 0),
+ (251, 0, 0),
+ (252, 0, 0),
+
+ # Node 224
+ (1, HUFFMAN_EMIT_SYMBOL, 254),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 254),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 2),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 3),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 4),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 5),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 6),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 7),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 8),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 11),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 12),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 14),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 15),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 16),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 17),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 18),
+
+ # Node 225
+ (2, HUFFMAN_EMIT_SYMBOL, 254),
+ (9, HUFFMAN_EMIT_SYMBOL, 254),
+ (23, HUFFMAN_EMIT_SYMBOL, 254),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 254),
+ (1, HUFFMAN_EMIT_SYMBOL, 2),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 2),
+ (1, HUFFMAN_EMIT_SYMBOL, 3),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 3),
+ (1, HUFFMAN_EMIT_SYMBOL, 4),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 4),
+ (1, HUFFMAN_EMIT_SYMBOL, 5),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 5),
+ (1, HUFFMAN_EMIT_SYMBOL, 6),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 6),
+ (1, HUFFMAN_EMIT_SYMBOL, 7),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 7),
+
+ # Node 226
+ (3, HUFFMAN_EMIT_SYMBOL, 254),
+ (6, HUFFMAN_EMIT_SYMBOL, 254),
+ (10, HUFFMAN_EMIT_SYMBOL, 254),
+ (15, HUFFMAN_EMIT_SYMBOL, 254),
+ (24, HUFFMAN_EMIT_SYMBOL, 254),
+ (31, HUFFMAN_EMIT_SYMBOL, 254),
+ (41, HUFFMAN_EMIT_SYMBOL, 254),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 254),
+ (2, HUFFMAN_EMIT_SYMBOL, 2),
+ (9, HUFFMAN_EMIT_SYMBOL, 2),
+ (23, HUFFMAN_EMIT_SYMBOL, 2),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 2),
+ (2, HUFFMAN_EMIT_SYMBOL, 3),
+ (9, HUFFMAN_EMIT_SYMBOL, 3),
+ (23, HUFFMAN_EMIT_SYMBOL, 3),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 3),
+
+ # Node 227
+ (3, HUFFMAN_EMIT_SYMBOL, 2),
+ (6, HUFFMAN_EMIT_SYMBOL, 2),
+ (10, HUFFMAN_EMIT_SYMBOL, 2),
+ (15, HUFFMAN_EMIT_SYMBOL, 2),
+ (24, HUFFMAN_EMIT_SYMBOL, 2),
+ (31, HUFFMAN_EMIT_SYMBOL, 2),
+ (41, HUFFMAN_EMIT_SYMBOL, 2),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 2),
+ (3, HUFFMAN_EMIT_SYMBOL, 3),
+ (6, HUFFMAN_EMIT_SYMBOL, 3),
+ (10, HUFFMAN_EMIT_SYMBOL, 3),
+ (15, HUFFMAN_EMIT_SYMBOL, 3),
+ (24, HUFFMAN_EMIT_SYMBOL, 3),
+ (31, HUFFMAN_EMIT_SYMBOL, 3),
+ (41, HUFFMAN_EMIT_SYMBOL, 3),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 3),
+
+ # Node 228
+ (2, HUFFMAN_EMIT_SYMBOL, 4),
+ (9, HUFFMAN_EMIT_SYMBOL, 4),
+ (23, HUFFMAN_EMIT_SYMBOL, 4),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 4),
+ (2, HUFFMAN_EMIT_SYMBOL, 5),
+ (9, HUFFMAN_EMIT_SYMBOL, 5),
+ (23, HUFFMAN_EMIT_SYMBOL, 5),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 5),
+ (2, HUFFMAN_EMIT_SYMBOL, 6),
+ (9, HUFFMAN_EMIT_SYMBOL, 6),
+ (23, HUFFMAN_EMIT_SYMBOL, 6),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 6),
+ (2, HUFFMAN_EMIT_SYMBOL, 7),
+ (9, HUFFMAN_EMIT_SYMBOL, 7),
+ (23, HUFFMAN_EMIT_SYMBOL, 7),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 7),
+
+ # Node 229
+ (3, HUFFMAN_EMIT_SYMBOL, 4),
+ (6, HUFFMAN_EMIT_SYMBOL, 4),
+ (10, HUFFMAN_EMIT_SYMBOL, 4),
+ (15, HUFFMAN_EMIT_SYMBOL, 4),
+ (24, HUFFMAN_EMIT_SYMBOL, 4),
+ (31, HUFFMAN_EMIT_SYMBOL, 4),
+ (41, HUFFMAN_EMIT_SYMBOL, 4),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 4),
+ (3, HUFFMAN_EMIT_SYMBOL, 5),
+ (6, HUFFMAN_EMIT_SYMBOL, 5),
+ (10, HUFFMAN_EMIT_SYMBOL, 5),
+ (15, HUFFMAN_EMIT_SYMBOL, 5),
+ (24, HUFFMAN_EMIT_SYMBOL, 5),
+ (31, HUFFMAN_EMIT_SYMBOL, 5),
+ (41, HUFFMAN_EMIT_SYMBOL, 5),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 5),
+
+ # Node 230
+ (3, HUFFMAN_EMIT_SYMBOL, 6),
+ (6, HUFFMAN_EMIT_SYMBOL, 6),
+ (10, HUFFMAN_EMIT_SYMBOL, 6),
+ (15, HUFFMAN_EMIT_SYMBOL, 6),
+ (24, HUFFMAN_EMIT_SYMBOL, 6),
+ (31, HUFFMAN_EMIT_SYMBOL, 6),
+ (41, HUFFMAN_EMIT_SYMBOL, 6),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 6),
+ (3, HUFFMAN_EMIT_SYMBOL, 7),
+ (6, HUFFMAN_EMIT_SYMBOL, 7),
+ (10, HUFFMAN_EMIT_SYMBOL, 7),
+ (15, HUFFMAN_EMIT_SYMBOL, 7),
+ (24, HUFFMAN_EMIT_SYMBOL, 7),
+ (31, HUFFMAN_EMIT_SYMBOL, 7),
+ (41, HUFFMAN_EMIT_SYMBOL, 7),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 7),
+
+ # Node 231
+ (1, HUFFMAN_EMIT_SYMBOL, 8),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 8),
+ (1, HUFFMAN_EMIT_SYMBOL, 11),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 11),
+ (1, HUFFMAN_EMIT_SYMBOL, 12),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 12),
+ (1, HUFFMAN_EMIT_SYMBOL, 14),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 14),
+ (1, HUFFMAN_EMIT_SYMBOL, 15),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 15),
+ (1, HUFFMAN_EMIT_SYMBOL, 16),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 16),
+ (1, HUFFMAN_EMIT_SYMBOL, 17),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 17),
+ (1, HUFFMAN_EMIT_SYMBOL, 18),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 18),
+
+ # Node 232
+ (2, HUFFMAN_EMIT_SYMBOL, 8),
+ (9, HUFFMAN_EMIT_SYMBOL, 8),
+ (23, HUFFMAN_EMIT_SYMBOL, 8),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 8),
+ (2, HUFFMAN_EMIT_SYMBOL, 11),
+ (9, HUFFMAN_EMIT_SYMBOL, 11),
+ (23, HUFFMAN_EMIT_SYMBOL, 11),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 11),
+ (2, HUFFMAN_EMIT_SYMBOL, 12),
+ (9, HUFFMAN_EMIT_SYMBOL, 12),
+ (23, HUFFMAN_EMIT_SYMBOL, 12),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 12),
+ (2, HUFFMAN_EMIT_SYMBOL, 14),
+ (9, HUFFMAN_EMIT_SYMBOL, 14),
+ (23, HUFFMAN_EMIT_SYMBOL, 14),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 14),
+
+ # Node 233
+ (3, HUFFMAN_EMIT_SYMBOL, 8),
+ (6, HUFFMAN_EMIT_SYMBOL, 8),
+ (10, HUFFMAN_EMIT_SYMBOL, 8),
+ (15, HUFFMAN_EMIT_SYMBOL, 8),
+ (24, HUFFMAN_EMIT_SYMBOL, 8),
+ (31, HUFFMAN_EMIT_SYMBOL, 8),
+ (41, HUFFMAN_EMIT_SYMBOL, 8),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 8),
+ (3, HUFFMAN_EMIT_SYMBOL, 11),
+ (6, HUFFMAN_EMIT_SYMBOL, 11),
+ (10, HUFFMAN_EMIT_SYMBOL, 11),
+ (15, HUFFMAN_EMIT_SYMBOL, 11),
+ (24, HUFFMAN_EMIT_SYMBOL, 11),
+ (31, HUFFMAN_EMIT_SYMBOL, 11),
+ (41, HUFFMAN_EMIT_SYMBOL, 11),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 11),
+
+ # Node 234
+ (3, HUFFMAN_EMIT_SYMBOL, 12),
+ (6, HUFFMAN_EMIT_SYMBOL, 12),
+ (10, HUFFMAN_EMIT_SYMBOL, 12),
+ (15, HUFFMAN_EMIT_SYMBOL, 12),
+ (24, HUFFMAN_EMIT_SYMBOL, 12),
+ (31, HUFFMAN_EMIT_SYMBOL, 12),
+ (41, HUFFMAN_EMIT_SYMBOL, 12),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 12),
+ (3, HUFFMAN_EMIT_SYMBOL, 14),
+ (6, HUFFMAN_EMIT_SYMBOL, 14),
+ (10, HUFFMAN_EMIT_SYMBOL, 14),
+ (15, HUFFMAN_EMIT_SYMBOL, 14),
+ (24, HUFFMAN_EMIT_SYMBOL, 14),
+ (31, HUFFMAN_EMIT_SYMBOL, 14),
+ (41, HUFFMAN_EMIT_SYMBOL, 14),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 14),
+
+ # Node 235
+ (2, HUFFMAN_EMIT_SYMBOL, 15),
+ (9, HUFFMAN_EMIT_SYMBOL, 15),
+ (23, HUFFMAN_EMIT_SYMBOL, 15),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 15),
+ (2, HUFFMAN_EMIT_SYMBOL, 16),
+ (9, HUFFMAN_EMIT_SYMBOL, 16),
+ (23, HUFFMAN_EMIT_SYMBOL, 16),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 16),
+ (2, HUFFMAN_EMIT_SYMBOL, 17),
+ (9, HUFFMAN_EMIT_SYMBOL, 17),
+ (23, HUFFMAN_EMIT_SYMBOL, 17),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 17),
+ (2, HUFFMAN_EMIT_SYMBOL, 18),
+ (9, HUFFMAN_EMIT_SYMBOL, 18),
+ (23, HUFFMAN_EMIT_SYMBOL, 18),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 18),
+
+ # Node 236
+ (3, HUFFMAN_EMIT_SYMBOL, 15),
+ (6, HUFFMAN_EMIT_SYMBOL, 15),
+ (10, HUFFMAN_EMIT_SYMBOL, 15),
+ (15, HUFFMAN_EMIT_SYMBOL, 15),
+ (24, HUFFMAN_EMIT_SYMBOL, 15),
+ (31, HUFFMAN_EMIT_SYMBOL, 15),
+ (41, HUFFMAN_EMIT_SYMBOL, 15),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 15),
+ (3, HUFFMAN_EMIT_SYMBOL, 16),
+ (6, HUFFMAN_EMIT_SYMBOL, 16),
+ (10, HUFFMAN_EMIT_SYMBOL, 16),
+ (15, HUFFMAN_EMIT_SYMBOL, 16),
+ (24, HUFFMAN_EMIT_SYMBOL, 16),
+ (31, HUFFMAN_EMIT_SYMBOL, 16),
+ (41, HUFFMAN_EMIT_SYMBOL, 16),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 16),
+
+ # Node 237
+ (3, HUFFMAN_EMIT_SYMBOL, 17),
+ (6, HUFFMAN_EMIT_SYMBOL, 17),
+ (10, HUFFMAN_EMIT_SYMBOL, 17),
+ (15, HUFFMAN_EMIT_SYMBOL, 17),
+ (24, HUFFMAN_EMIT_SYMBOL, 17),
+ (31, HUFFMAN_EMIT_SYMBOL, 17),
+ (41, HUFFMAN_EMIT_SYMBOL, 17),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 17),
+ (3, HUFFMAN_EMIT_SYMBOL, 18),
+ (6, HUFFMAN_EMIT_SYMBOL, 18),
+ (10, HUFFMAN_EMIT_SYMBOL, 18),
+ (15, HUFFMAN_EMIT_SYMBOL, 18),
+ (24, HUFFMAN_EMIT_SYMBOL, 18),
+ (31, HUFFMAN_EMIT_SYMBOL, 18),
+ (41, HUFFMAN_EMIT_SYMBOL, 18),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 18),
+
+ # Node 238
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 19),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 20),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 21),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 23),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 24),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 25),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 26),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 27),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 28),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 29),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 30),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 31),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 127),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 220),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 249),
+ (253, 0, 0),
+
+ # Node 239
+ (1, HUFFMAN_EMIT_SYMBOL, 19),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 19),
+ (1, HUFFMAN_EMIT_SYMBOL, 20),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 20),
+ (1, HUFFMAN_EMIT_SYMBOL, 21),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 21),
+ (1, HUFFMAN_EMIT_SYMBOL, 23),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 23),
+ (1, HUFFMAN_EMIT_SYMBOL, 24),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 24),
+ (1, HUFFMAN_EMIT_SYMBOL, 25),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 25),
+ (1, HUFFMAN_EMIT_SYMBOL, 26),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 26),
+ (1, HUFFMAN_EMIT_SYMBOL, 27),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 27),
+
+ # Node 240
+ (2, HUFFMAN_EMIT_SYMBOL, 19),
+ (9, HUFFMAN_EMIT_SYMBOL, 19),
+ (23, HUFFMAN_EMIT_SYMBOL, 19),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 19),
+ (2, HUFFMAN_EMIT_SYMBOL, 20),
+ (9, HUFFMAN_EMIT_SYMBOL, 20),
+ (23, HUFFMAN_EMIT_SYMBOL, 20),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 20),
+ (2, HUFFMAN_EMIT_SYMBOL, 21),
+ (9, HUFFMAN_EMIT_SYMBOL, 21),
+ (23, HUFFMAN_EMIT_SYMBOL, 21),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 21),
+ (2, HUFFMAN_EMIT_SYMBOL, 23),
+ (9, HUFFMAN_EMIT_SYMBOL, 23),
+ (23, HUFFMAN_EMIT_SYMBOL, 23),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 23),
+
+ # Node 241
+ (3, HUFFMAN_EMIT_SYMBOL, 19),
+ (6, HUFFMAN_EMIT_SYMBOL, 19),
+ (10, HUFFMAN_EMIT_SYMBOL, 19),
+ (15, HUFFMAN_EMIT_SYMBOL, 19),
+ (24, HUFFMAN_EMIT_SYMBOL, 19),
+ (31, HUFFMAN_EMIT_SYMBOL, 19),
+ (41, HUFFMAN_EMIT_SYMBOL, 19),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 19),
+ (3, HUFFMAN_EMIT_SYMBOL, 20),
+ (6, HUFFMAN_EMIT_SYMBOL, 20),
+ (10, HUFFMAN_EMIT_SYMBOL, 20),
+ (15, HUFFMAN_EMIT_SYMBOL, 20),
+ (24, HUFFMAN_EMIT_SYMBOL, 20),
+ (31, HUFFMAN_EMIT_SYMBOL, 20),
+ (41, HUFFMAN_EMIT_SYMBOL, 20),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 20),
+
+ # Node 242
+ (3, HUFFMAN_EMIT_SYMBOL, 21),
+ (6, HUFFMAN_EMIT_SYMBOL, 21),
+ (10, HUFFMAN_EMIT_SYMBOL, 21),
+ (15, HUFFMAN_EMIT_SYMBOL, 21),
+ (24, HUFFMAN_EMIT_SYMBOL, 21),
+ (31, HUFFMAN_EMIT_SYMBOL, 21),
+ (41, HUFFMAN_EMIT_SYMBOL, 21),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 21),
+ (3, HUFFMAN_EMIT_SYMBOL, 23),
+ (6, HUFFMAN_EMIT_SYMBOL, 23),
+ (10, HUFFMAN_EMIT_SYMBOL, 23),
+ (15, HUFFMAN_EMIT_SYMBOL, 23),
+ (24, HUFFMAN_EMIT_SYMBOL, 23),
+ (31, HUFFMAN_EMIT_SYMBOL, 23),
+ (41, HUFFMAN_EMIT_SYMBOL, 23),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 23),
+
+ # Node 243
+ (2, HUFFMAN_EMIT_SYMBOL, 24),
+ (9, HUFFMAN_EMIT_SYMBOL, 24),
+ (23, HUFFMAN_EMIT_SYMBOL, 24),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 24),
+ (2, HUFFMAN_EMIT_SYMBOL, 25),
+ (9, HUFFMAN_EMIT_SYMBOL, 25),
+ (23, HUFFMAN_EMIT_SYMBOL, 25),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 25),
+ (2, HUFFMAN_EMIT_SYMBOL, 26),
+ (9, HUFFMAN_EMIT_SYMBOL, 26),
+ (23, HUFFMAN_EMIT_SYMBOL, 26),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 26),
+ (2, HUFFMAN_EMIT_SYMBOL, 27),
+ (9, HUFFMAN_EMIT_SYMBOL, 27),
+ (23, HUFFMAN_EMIT_SYMBOL, 27),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 27),
+
+ # Node 244
+ (3, HUFFMAN_EMIT_SYMBOL, 24),
+ (6, HUFFMAN_EMIT_SYMBOL, 24),
+ (10, HUFFMAN_EMIT_SYMBOL, 24),
+ (15, HUFFMAN_EMIT_SYMBOL, 24),
+ (24, HUFFMAN_EMIT_SYMBOL, 24),
+ (31, HUFFMAN_EMIT_SYMBOL, 24),
+ (41, HUFFMAN_EMIT_SYMBOL, 24),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 24),
+ (3, HUFFMAN_EMIT_SYMBOL, 25),
+ (6, HUFFMAN_EMIT_SYMBOL, 25),
+ (10, HUFFMAN_EMIT_SYMBOL, 25),
+ (15, HUFFMAN_EMIT_SYMBOL, 25),
+ (24, HUFFMAN_EMIT_SYMBOL, 25),
+ (31, HUFFMAN_EMIT_SYMBOL, 25),
+ (41, HUFFMAN_EMIT_SYMBOL, 25),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 25),
+
+ # Node 245
+ (3, HUFFMAN_EMIT_SYMBOL, 26),
+ (6, HUFFMAN_EMIT_SYMBOL, 26),
+ (10, HUFFMAN_EMIT_SYMBOL, 26),
+ (15, HUFFMAN_EMIT_SYMBOL, 26),
+ (24, HUFFMAN_EMIT_SYMBOL, 26),
+ (31, HUFFMAN_EMIT_SYMBOL, 26),
+ (41, HUFFMAN_EMIT_SYMBOL, 26),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 26),
+ (3, HUFFMAN_EMIT_SYMBOL, 27),
+ (6, HUFFMAN_EMIT_SYMBOL, 27),
+ (10, HUFFMAN_EMIT_SYMBOL, 27),
+ (15, HUFFMAN_EMIT_SYMBOL, 27),
+ (24, HUFFMAN_EMIT_SYMBOL, 27),
+ (31, HUFFMAN_EMIT_SYMBOL, 27),
+ (41, HUFFMAN_EMIT_SYMBOL, 27),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 27),
+
+ # Node 246
+ (1, HUFFMAN_EMIT_SYMBOL, 28),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 28),
+ (1, HUFFMAN_EMIT_SYMBOL, 29),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 29),
+ (1, HUFFMAN_EMIT_SYMBOL, 30),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 30),
+ (1, HUFFMAN_EMIT_SYMBOL, 31),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 31),
+ (1, HUFFMAN_EMIT_SYMBOL, 127),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 127),
+ (1, HUFFMAN_EMIT_SYMBOL, 220),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 220),
+ (1, HUFFMAN_EMIT_SYMBOL, 249),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 249),
+ (254, 0, 0),
+ (255, 0, 0),
+
+ # Node 247
+ (2, HUFFMAN_EMIT_SYMBOL, 28),
+ (9, HUFFMAN_EMIT_SYMBOL, 28),
+ (23, HUFFMAN_EMIT_SYMBOL, 28),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 28),
+ (2, HUFFMAN_EMIT_SYMBOL, 29),
+ (9, HUFFMAN_EMIT_SYMBOL, 29),
+ (23, HUFFMAN_EMIT_SYMBOL, 29),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 29),
+ (2, HUFFMAN_EMIT_SYMBOL, 30),
+ (9, HUFFMAN_EMIT_SYMBOL, 30),
+ (23, HUFFMAN_EMIT_SYMBOL, 30),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 30),
+ (2, HUFFMAN_EMIT_SYMBOL, 31),
+ (9, HUFFMAN_EMIT_SYMBOL, 31),
+ (23, HUFFMAN_EMIT_SYMBOL, 31),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 31),
+
+ # Node 248
+ (3, HUFFMAN_EMIT_SYMBOL, 28),
+ (6, HUFFMAN_EMIT_SYMBOL, 28),
+ (10, HUFFMAN_EMIT_SYMBOL, 28),
+ (15, HUFFMAN_EMIT_SYMBOL, 28),
+ (24, HUFFMAN_EMIT_SYMBOL, 28),
+ (31, HUFFMAN_EMIT_SYMBOL, 28),
+ (41, HUFFMAN_EMIT_SYMBOL, 28),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 28),
+ (3, HUFFMAN_EMIT_SYMBOL, 29),
+ (6, HUFFMAN_EMIT_SYMBOL, 29),
+ (10, HUFFMAN_EMIT_SYMBOL, 29),
+ (15, HUFFMAN_EMIT_SYMBOL, 29),
+ (24, HUFFMAN_EMIT_SYMBOL, 29),
+ (31, HUFFMAN_EMIT_SYMBOL, 29),
+ (41, HUFFMAN_EMIT_SYMBOL, 29),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 29),
+
+ # Node 249
+ (3, HUFFMAN_EMIT_SYMBOL, 30),
+ (6, HUFFMAN_EMIT_SYMBOL, 30),
+ (10, HUFFMAN_EMIT_SYMBOL, 30),
+ (15, HUFFMAN_EMIT_SYMBOL, 30),
+ (24, HUFFMAN_EMIT_SYMBOL, 30),
+ (31, HUFFMAN_EMIT_SYMBOL, 30),
+ (41, HUFFMAN_EMIT_SYMBOL, 30),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 30),
+ (3, HUFFMAN_EMIT_SYMBOL, 31),
+ (6, HUFFMAN_EMIT_SYMBOL, 31),
+ (10, HUFFMAN_EMIT_SYMBOL, 31),
+ (15, HUFFMAN_EMIT_SYMBOL, 31),
+ (24, HUFFMAN_EMIT_SYMBOL, 31),
+ (31, HUFFMAN_EMIT_SYMBOL, 31),
+ (41, HUFFMAN_EMIT_SYMBOL, 31),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 31),
+
+ # Node 250
+ (2, HUFFMAN_EMIT_SYMBOL, 127),
+ (9, HUFFMAN_EMIT_SYMBOL, 127),
+ (23, HUFFMAN_EMIT_SYMBOL, 127),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 127),
+ (2, HUFFMAN_EMIT_SYMBOL, 220),
+ (9, HUFFMAN_EMIT_SYMBOL, 220),
+ (23, HUFFMAN_EMIT_SYMBOL, 220),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 220),
+ (2, HUFFMAN_EMIT_SYMBOL, 249),
+ (9, HUFFMAN_EMIT_SYMBOL, 249),
+ (23, HUFFMAN_EMIT_SYMBOL, 249),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 249),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 10),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 13),
+ (0, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 22),
+ (0, HUFFMAN_FAIL, 0),
+
+ # Node 251
+ (3, HUFFMAN_EMIT_SYMBOL, 127),
+ (6, HUFFMAN_EMIT_SYMBOL, 127),
+ (10, HUFFMAN_EMIT_SYMBOL, 127),
+ (15, HUFFMAN_EMIT_SYMBOL, 127),
+ (24, HUFFMAN_EMIT_SYMBOL, 127),
+ (31, HUFFMAN_EMIT_SYMBOL, 127),
+ (41, HUFFMAN_EMIT_SYMBOL, 127),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 127),
+ (3, HUFFMAN_EMIT_SYMBOL, 220),
+ (6, HUFFMAN_EMIT_SYMBOL, 220),
+ (10, HUFFMAN_EMIT_SYMBOL, 220),
+ (15, HUFFMAN_EMIT_SYMBOL, 220),
+ (24, HUFFMAN_EMIT_SYMBOL, 220),
+ (31, HUFFMAN_EMIT_SYMBOL, 220),
+ (41, HUFFMAN_EMIT_SYMBOL, 220),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 220),
+
+ # Node 252
+ (3, HUFFMAN_EMIT_SYMBOL, 249),
+ (6, HUFFMAN_EMIT_SYMBOL, 249),
+ (10, HUFFMAN_EMIT_SYMBOL, 249),
+ (15, HUFFMAN_EMIT_SYMBOL, 249),
+ (24, HUFFMAN_EMIT_SYMBOL, 249),
+ (31, HUFFMAN_EMIT_SYMBOL, 249),
+ (41, HUFFMAN_EMIT_SYMBOL, 249),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 249),
+ (1, HUFFMAN_EMIT_SYMBOL, 10),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 10),
+ (1, HUFFMAN_EMIT_SYMBOL, 13),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 13),
+ (1, HUFFMAN_EMIT_SYMBOL, 22),
+ (22, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 22),
+ (0, HUFFMAN_FAIL, 0),
+ (0, HUFFMAN_FAIL, 0),
+
+ # Node 253
+ (2, HUFFMAN_EMIT_SYMBOL, 10),
+ (9, HUFFMAN_EMIT_SYMBOL, 10),
+ (23, HUFFMAN_EMIT_SYMBOL, 10),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 10),
+ (2, HUFFMAN_EMIT_SYMBOL, 13),
+ (9, HUFFMAN_EMIT_SYMBOL, 13),
+ (23, HUFFMAN_EMIT_SYMBOL, 13),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 13),
+ (2, HUFFMAN_EMIT_SYMBOL, 22),
+ (9, HUFFMAN_EMIT_SYMBOL, 22),
+ (23, HUFFMAN_EMIT_SYMBOL, 22),
+ (40, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 22),
+ (0, HUFFMAN_FAIL, 0),
+ (0, HUFFMAN_FAIL, 0),
+ (0, HUFFMAN_FAIL, 0),
+ (0, HUFFMAN_FAIL, 0),
+
+ # Node 254
+ (3, HUFFMAN_EMIT_SYMBOL, 10),
+ (6, HUFFMAN_EMIT_SYMBOL, 10),
+ (10, HUFFMAN_EMIT_SYMBOL, 10),
+ (15, HUFFMAN_EMIT_SYMBOL, 10),
+ (24, HUFFMAN_EMIT_SYMBOL, 10),
+ (31, HUFFMAN_EMIT_SYMBOL, 10),
+ (41, HUFFMAN_EMIT_SYMBOL, 10),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 10),
+ (3, HUFFMAN_EMIT_SYMBOL, 13),
+ (6, HUFFMAN_EMIT_SYMBOL, 13),
+ (10, HUFFMAN_EMIT_SYMBOL, 13),
+ (15, HUFFMAN_EMIT_SYMBOL, 13),
+ (24, HUFFMAN_EMIT_SYMBOL, 13),
+ (31, HUFFMAN_EMIT_SYMBOL, 13),
+ (41, HUFFMAN_EMIT_SYMBOL, 13),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 13),
+
+ # Node 255
+ (3, HUFFMAN_EMIT_SYMBOL, 22),
+ (6, HUFFMAN_EMIT_SYMBOL, 22),
+ (10, HUFFMAN_EMIT_SYMBOL, 22),
+ (15, HUFFMAN_EMIT_SYMBOL, 22),
+ (24, HUFFMAN_EMIT_SYMBOL, 22),
+ (31, HUFFMAN_EMIT_SYMBOL, 22),
+ (41, HUFFMAN_EMIT_SYMBOL, 22),
+ (56, HUFFMAN_COMPLETE | HUFFMAN_EMIT_SYMBOL, 22),
+ (0, HUFFMAN_FAIL, 0),
+ (0, HUFFMAN_FAIL, 0),
+ (0, HUFFMAN_FAIL, 0),
+ (0, HUFFMAN_FAIL, 0),
+ (0, HUFFMAN_FAIL, 0),
+ (0, HUFFMAN_FAIL, 0),
+ (0, HUFFMAN_FAIL, 0),
+ (0, HUFFMAN_FAIL, 0),
+]
diff --git a/testing/web-platform/tests/tools/third_party/hpack/hpack/struct.py b/testing/web-platform/tests/tools/third_party/hpack/hpack/struct.py
new file mode 100644
index 0000000000..e860cd756e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/hpack/struct.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+"""
+hpack/struct
+~~~~~~~~~~~~
+
+Contains structures for representing header fields with associated metadata.
+"""
+
+
+class HeaderTuple(tuple):
+ """
+ A data structure that stores a single header field.
+
+ HTTP headers can be thought of as tuples of ``(field name, field value)``.
+ A single header block is a sequence of such tuples.
+
+ In HTTP/2, however, certain bits of additional information are required for
+ compressing these headers: in particular, whether the header field can be
+ safely added to the HPACK compression context.
+
+ This class stores a header that can be added to the compression context. In
+ all other ways it behaves exactly like a tuple.
+ """
+ __slots__ = ()
+
+ indexable = True
+
+ def __new__(_cls, *args):
+ return tuple.__new__(_cls, args)
+
+
+class NeverIndexedHeaderTuple(HeaderTuple):
+ """
+ A data structure that stores a single header field that cannot be added to
+ a HTTP/2 header compression context.
+ """
+ __slots__ = ()
+
+ indexable = False
diff --git a/testing/web-platform/tests/tools/third_party/hpack/hpack/table.py b/testing/web-platform/tests/tools/third_party/hpack/hpack/table.py
new file mode 100644
index 0000000000..9a89c72118
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/hpack/table.py
@@ -0,0 +1,215 @@
+# -*- coding: utf-8 -*-
+# flake8: noqa
+from collections import deque
+import logging
+
+from .exceptions import InvalidTableIndex
+
+log = logging.getLogger(__name__)
+
+
+def table_entry_size(name, value):
+ """
+ Calculates the size of a single entry
+
+ This size is mostly irrelevant to us and defined
+ specifically to accommodate memory management for
+ lower level implementations. The 32 extra bytes are
+ considered the "maximum" overhead that would be
+ required to represent each entry in the table.
+
+ See RFC7541 Section 4.1
+ """
+ return 32 + len(name) + len(value)
+
+
+class HeaderTable(object):
+ """
+ Implements the combined static and dynamic header table
+
+ The name and value arguments for all the functions
+ should ONLY be byte strings (b'') however this is not
+ strictly enforced in the interface.
+
+ See RFC7541 Section 2.3
+ """
+ #: Default maximum size of the dynamic table. See
+ #: RFC7540 Section 6.5.2.
+ DEFAULT_SIZE = 4096
+
+ #: Constant list of static headers. See RFC7541 Section
+ #: 2.3.1 and Appendix A
+ STATIC_TABLE = (
+ (b':authority' , b'' ), # noqa
+ (b':method' , b'GET' ), # noqa
+ (b':method' , b'POST' ), # noqa
+ (b':path' , b'/' ), # noqa
+ (b':path' , b'/index.html' ), # noqa
+ (b':scheme' , b'http' ), # noqa
+ (b':scheme' , b'https' ), # noqa
+ (b':status' , b'200' ), # noqa
+ (b':status' , b'204' ), # noqa
+ (b':status' , b'206' ), # noqa
+ (b':status' , b'304' ), # noqa
+ (b':status' , b'400' ), # noqa
+ (b':status' , b'404' ), # noqa
+ (b':status' , b'500' ), # noqa
+ (b'accept-charset' , b'' ), # noqa
+ (b'accept-encoding' , b'gzip, deflate'), # noqa
+ (b'accept-language' , b'' ), # noqa
+ (b'accept-ranges' , b'' ), # noqa
+ (b'accept' , b'' ), # noqa
+ (b'access-control-allow-origin' , b'' ), # noqa
+ (b'age' , b'' ), # noqa
+ (b'allow' , b'' ), # noqa
+ (b'authorization' , b'' ), # noqa
+ (b'cache-control' , b'' ), # noqa
+ (b'content-disposition' , b'' ), # noqa
+ (b'content-encoding' , b'' ), # noqa
+ (b'content-language' , b'' ), # noqa
+ (b'content-length' , b'' ), # noqa
+ (b'content-location' , b'' ), # noqa
+ (b'content-range' , b'' ), # noqa
+ (b'content-type' , b'' ), # noqa
+ (b'cookie' , b'' ), # noqa
+ (b'date' , b'' ), # noqa
+ (b'etag' , b'' ), # noqa
+ (b'expect' , b'' ), # noqa
+ (b'expires' , b'' ), # noqa
+ (b'from' , b'' ), # noqa
+ (b'host' , b'' ), # noqa
+ (b'if-match' , b'' ), # noqa
+ (b'if-modified-since' , b'' ), # noqa
+ (b'if-none-match' , b'' ), # noqa
+ (b'if-range' , b'' ), # noqa
+ (b'if-unmodified-since' , b'' ), # noqa
+ (b'last-modified' , b'' ), # noqa
+ (b'link' , b'' ), # noqa
+ (b'location' , b'' ), # noqa
+ (b'max-forwards' , b'' ), # noqa
+ (b'proxy-authenticate' , b'' ), # noqa
+ (b'proxy-authorization' , b'' ), # noqa
+ (b'range' , b'' ), # noqa
+ (b'referer' , b'' ), # noqa
+ (b'refresh' , b'' ), # noqa
+ (b'retry-after' , b'' ), # noqa
+ (b'server' , b'' ), # noqa
+ (b'set-cookie' , b'' ), # noqa
+ (b'strict-transport-security' , b'' ), # noqa
+ (b'transfer-encoding' , b'' ), # noqa
+ (b'user-agent' , b'' ), # noqa
+ (b'vary' , b'' ), # noqa
+ (b'via' , b'' ), # noqa
+ (b'www-authenticate' , b'' ), # noqa
+ ) # noqa
+
+ STATIC_TABLE_LENGTH = len(STATIC_TABLE)
+
+ def __init__(self):
+ self._maxsize = HeaderTable.DEFAULT_SIZE
+ self._current_size = 0
+ self.resized = False
+ self.dynamic_entries = deque()
+
+ def get_by_index(self, index):
+ """
+ Returns the entry specified by index
+
+ Note that the table is 1-based ie an index of 0 is
+ invalid. This is due to the fact that a zero value
+ index signals that a completely unindexed header
+ follows.
+
+ The entry will either be from the static table or
+ the dynamic table depending on the value of index.
+ """
+ original_index = index
+ index -= 1
+ if 0 <= index:
+ if index < HeaderTable.STATIC_TABLE_LENGTH:
+ return HeaderTable.STATIC_TABLE[index]
+
+ index -= HeaderTable.STATIC_TABLE_LENGTH
+ if index < len(self.dynamic_entries):
+ return self.dynamic_entries[index]
+
+ raise InvalidTableIndex("Invalid table index %d" % original_index)
+
+ def __repr__(self):
+ return "HeaderTable(%d, %s, %r)" % (
+ self._maxsize,
+ self.resized,
+ self.dynamic_entries
+ )
+
+ def add(self, name, value):
+ """
+ Adds a new entry to the table
+
+ We reduce the table size if the entry will make the
+ table size greater than maxsize.
+ """
+ # We just clear the table if the entry is too big
+ size = table_entry_size(name, value)
+ if size > self._maxsize:
+ self.dynamic_entries.clear()
+ self._current_size = 0
+ else:
+ # Add new entry
+ self.dynamic_entries.appendleft((name, value))
+ self._current_size += size
+ self._shrink()
+
+ def search(self, name, value):
+ """
+ Searches the table for the entry specified by name
+ and value
+
+ Returns one of the following:
+ - ``None``, no match at all
+ - ``(index, name, None)`` for partial matches on name only.
+ - ``(index, name, value)`` for perfect matches.
+ """
+ offset = HeaderTable.STATIC_TABLE_LENGTH + 1
+ partial = None
+ for (i, (n, v)) in enumerate(HeaderTable.STATIC_TABLE):
+ if n == name:
+ if v == value:
+ return i + 1, n, v
+ elif partial is None:
+ partial = (i + 1, n, None)
+ for (i, (n, v)) in enumerate(self.dynamic_entries):
+ if n == name:
+ if v == value:
+ return i + offset, n, v
+ elif partial is None:
+ partial = (i + offset, n, None)
+ return partial
+
+ @property
+ def maxsize(self):
+ return self._maxsize
+
+ @maxsize.setter
+ def maxsize(self, newmax):
+ newmax = int(newmax)
+ log.debug("Resizing header table to %d from %d", newmax, self._maxsize)
+ oldmax = self._maxsize
+ self._maxsize = newmax
+ self.resized = (newmax != oldmax)
+ if newmax <= 0:
+ self.dynamic_entries.clear()
+ self._current_size = 0
+ elif oldmax > newmax:
+ self._shrink()
+
+ def _shrink(self):
+ """
+ Shrinks the dynamic table to be at or below maxsize
+ """
+ cursize = self._current_size
+ while cursize > self._maxsize:
+ name, value = self.dynamic_entries.pop()
+ cursize -= table_entry_size(name, value)
+ log.debug("Evicting %s: %s from the header table", name, value)
+ self._current_size = cursize
diff --git a/testing/web-platform/tests/tools/third_party/hpack/setup.cfg b/testing/web-platform/tests/tools/third_party/hpack/setup.cfg
new file mode 100644
index 0000000000..b1d2b88c0a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/setup.cfg
@@ -0,0 +1,12 @@
+[wheel]
+universal = 1
+
+[flake8]
+max-complexity = 10
+exclude =
+ hpack/huffman_constants.py
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/testing/web-platform/tests/tools/third_party/hpack/setup.py b/testing/web-platform/tests/tools/third_party/hpack/setup.py
new file mode 100644
index 0000000000..7ffc4beb3d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/setup.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import os
+import re
+import sys
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+# Get the version
+version_regex = r'__version__ = ["\']([^"\']*)["\']'
+with open('hpack/__init__.py', 'r') as f:
+ text = f.read()
+ match = re.search(version_regex, text)
+
+ if match:
+ version = match.group(1)
+ else:
+ raise RuntimeError("No version number found!")
+
+# Stealing this from Kenneth Reitz
+if sys.argv[-1] == 'publish':
+ os.system('python setup.py sdist upload')
+ sys.exit()
+
+packages = ['hpack']
+
+setup(
+ name='hpack',
+ version=version,
+ description='Pure-Python HPACK header compression',
+ long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
+ author='Cory Benfield',
+ author_email='cory@lukasa.co.uk',
+ url='http://hyper.rtfd.org',
+ packages=packages,
+ package_data={'': ['LICENSE', 'README.rst', 'CONTRIBUTORS.rst', 'HISTORY.rst', 'NOTICES']},
+ package_dir={'hpack': 'hpack'},
+ include_package_data=True,
+ license='MIT License',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ ],
+)
diff --git a/testing/web-platform/tests/tools/third_party/hpack/test/test_encode_decode.py b/testing/web-platform/tests/tools/third_party/hpack/test/test_encode_decode.py
new file mode 100644
index 0000000000..94820f2e9e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/test/test_encode_decode.py
@@ -0,0 +1,141 @@
+# -*- coding: utf-8 -*-
+"""
+Test for the integer encoding/decoding functionality in the HPACK library.
+"""
+import pytest
+
+from hypothesis import given
+from hypothesis.strategies import integers, binary, one_of
+
+from hpack.hpack import encode_integer, decode_integer
+from hpack.exceptions import HPACKDecodingError
+
+
+class TestIntegerEncoding(object):
+ # These tests are stolen from the HPACK spec.
+ def test_encoding_10_with_5_bit_prefix(self):
+ val = encode_integer(10, 5)
+ assert len(val) == 1
+ assert val == bytearray(b'\x0a')
+
+ def test_encoding_1337_with_5_bit_prefix(self):
+ val = encode_integer(1337, 5)
+ assert len(val) == 3
+ assert val == bytearray(b'\x1f\x9a\x0a')
+
+ def test_encoding_42_with_8_bit_prefix(self):
+ val = encode_integer(42, 8)
+ assert len(val) == 1
+ assert val == bytearray(b'\x2a')
+
+
+class TestIntegerDecoding(object):
+ # These tests are stolen from the HPACK spec.
+ def test_decoding_10_with_5_bit_prefix(self):
+ val = decode_integer(b'\x0a', 5)
+ assert val == (10, 1)
+
+ def test_encoding_1337_with_5_bit_prefix(self):
+ val = decode_integer(b'\x1f\x9a\x0a', 5)
+ assert val == (1337, 3)
+
+ def test_encoding_42_with_8_bit_prefix(self):
+ val = decode_integer(b'\x2a', 8)
+ assert val == (42, 1)
+
+ def test_decode_empty_string_fails(self):
+ with pytest.raises(HPACKDecodingError):
+ decode_integer(b'', 8)
+
+ def test_decode_insufficient_data_fails(self):
+ with pytest.raises(HPACKDecodingError):
+ decode_integer(b'\x1f', 5)
+
+
+class TestEncodingProperties(object):
+ """
+ Property-based tests for our integer encoder and decoder.
+ """
+ @given(
+ integer=integers(min_value=0),
+ prefix_bits=integers(min_value=1, max_value=8)
+ )
+ def test_encode_positive_integer_always_valid(self, integer, prefix_bits):
+ """
+ So long as the prefix bits are between 1 and 8, any positive integer
+ can be represented.
+ """
+ result = encode_integer(integer, prefix_bits)
+ assert isinstance(result, bytearray)
+ assert len(result) > 0
+
+ @given(
+ integer=integers(max_value=-1),
+ prefix_bits=integers(min_value=1, max_value=8)
+ )
+ def test_encode_fails_for_negative_integers(self, integer, prefix_bits):
+ """
+ If the integer to encode is negative, the encoder fails.
+ """
+ with pytest.raises(ValueError):
+ encode_integer(integer, prefix_bits)
+
+ @given(
+ integer=integers(min_value=0),
+ prefix_bits=one_of(
+ integers(max_value=0),
+ integers(min_value=9)
+ )
+ )
+ def test_encode_fails_for_invalid_prefixes(self, integer, prefix_bits):
+ """
+ If the prefix is out of the range [1,8], the encoder fails.
+ """
+ with pytest.raises(ValueError):
+ encode_integer(integer, prefix_bits)
+
+ @given(
+ prefix_bits=one_of(
+ integers(max_value=0),
+ integers(min_value=9)
+ )
+ )
+ def test_decode_fails_for_invalid_prefixes(self, prefix_bits):
+ """
+ If the prefix is out of the range [1,8], the encoder fails.
+ """
+ with pytest.raises(ValueError):
+ decode_integer(b'\x00', prefix_bits)
+
+ @given(
+ data=binary(),
+ prefix_bits=integers(min_value=1, max_value=8)
+ )
+ def test_decode_either_succeeds_or_raises_error(self, data, prefix_bits):
+ """
+ Attempting to decode data either returns a positive integer or throws a
+ HPACKDecodingError.
+ """
+ try:
+ result, consumed = decode_integer(data, prefix_bits)
+ except HPACKDecodingError:
+ pass
+ else:
+ assert isinstance(result, int)
+ assert result >= 0
+ assert consumed > 0
+
+ @given(
+ integer=integers(min_value=0),
+ prefix_bits=integers(min_value=1, max_value=8)
+ )
+ def test_encode_decode_round_trips(self, integer, prefix_bits):
+ """
+ Given valid data, the encoder and decoder can round trip.
+ """
+ encoded_result = encode_integer(integer, prefix_bits)
+ decoded_integer, consumed = decode_integer(
+ bytes(encoded_result), prefix_bits
+ )
+ assert integer == decoded_integer
+ assert consumed > 0
diff --git a/testing/web-platform/tests/tools/third_party/hpack/test/test_hpack.py b/testing/web-platform/tests/tools/third_party/hpack/test/test_hpack.py
new file mode 100644
index 0000000000..c3333b4144
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/test/test_hpack.py
@@ -0,0 +1,828 @@
+# -*- coding: utf-8 -*-
+from hpack.hpack import Encoder, Decoder, _dict_to_iterable, _to_bytes
+from hpack.exceptions import (
+ HPACKDecodingError, InvalidTableIndex, OversizedHeaderListError,
+ InvalidTableSizeError
+)
+from hpack.struct import HeaderTuple, NeverIndexedHeaderTuple
+import itertools
+import pytest
+
+from hypothesis import given
+from hypothesis.strategies import text, binary, sets, one_of
+
+try:
+ unicode = unicode
+except NameError:
+ unicode = str
+
+
+class TestHPACKEncoder(object):
+ # These tests are stolen entirely from the IETF specification examples.
+ def test_literal_header_field_with_indexing(self):
+ """
+ The header field representation uses a literal name and a literal
+ value.
+ """
+ e = Encoder()
+ header_set = {'custom-key': 'custom-header'}
+ result = b'\x40\x0acustom-key\x0dcustom-header'
+
+ assert e.encode(header_set, huffman=False) == result
+ assert list(e.header_table.dynamic_entries) == [
+ (n.encode('utf-8'), v.encode('utf-8'))
+ for n, v in header_set.items()
+ ]
+
+ def test_sensitive_headers(self):
+ """
+ Test encoding header values
+ """
+ e = Encoder()
+ result = (b'\x82\x14\x88\x63\xa1\xa9' +
+ b'\x32\x08\x73\xd0\xc7\x10' +
+ b'\x87\x25\xa8\x49\xe9\xea' +
+ b'\x5f\x5f\x89\x41\x6a\x41' +
+ b'\x92\x6e\xe5\x35\x52\x9f')
+ header_set = [
+ (':method', 'GET', True),
+ (':path', '/jimiscool/', True),
+ ('customkey', 'sensitiveinfo', True),
+ ]
+ assert e.encode(header_set, huffman=True) == result
+
+ def test_non_sensitive_headers_with_header_tuples(self):
+ """
+ A header field stored in a HeaderTuple emits a representation that
+ allows indexing.
+ """
+ e = Encoder()
+ result = (b'\x82\x44\x88\x63\xa1\xa9' +
+ b'\x32\x08\x73\xd0\xc7\x40' +
+ b'\x87\x25\xa8\x49\xe9\xea' +
+ b'\x5f\x5f\x89\x41\x6a\x41' +
+ b'\x92\x6e\xe5\x35\x52\x9f')
+ header_set = [
+ HeaderTuple(':method', 'GET'),
+ HeaderTuple(':path', '/jimiscool/'),
+ HeaderTuple('customkey', 'sensitiveinfo'),
+ ]
+ assert e.encode(header_set, huffman=True) == result
+
+ def test_sensitive_headers_with_header_tuples(self):
+ """
+ A header field stored in a NeverIndexedHeaderTuple emits a
+ representation that forbids indexing.
+ """
+ e = Encoder()
+ result = (b'\x82\x14\x88\x63\xa1\xa9' +
+ b'\x32\x08\x73\xd0\xc7\x10' +
+ b'\x87\x25\xa8\x49\xe9\xea' +
+ b'\x5f\x5f\x89\x41\x6a\x41' +
+ b'\x92\x6e\xe5\x35\x52\x9f')
+ header_set = [
+ NeverIndexedHeaderTuple(':method', 'GET'),
+ NeverIndexedHeaderTuple(':path', '/jimiscool/'),
+ NeverIndexedHeaderTuple('customkey', 'sensitiveinfo'),
+ ]
+ assert e.encode(header_set, huffman=True) == result
+
+ def test_header_table_size_getter(self):
+ e = Encoder()
+ assert e.header_table_size == 4096
+
+ def test_indexed_literal_header_field_with_indexing(self):
+ """
+ The header field representation uses an indexed name and a literal
+ value and performs incremental indexing.
+ """
+ e = Encoder()
+ header_set = {':path': '/sample/path'}
+ result = b'\x44\x0c/sample/path'
+
+ assert e.encode(header_set, huffman=False) == result
+ assert list(e.header_table.dynamic_entries) == [
+ (n.encode('utf-8'), v.encode('utf-8'))
+ for n, v in header_set.items()
+ ]
+
+ def test_indexed_header_field(self):
+ """
+ The header field representation uses an indexed header field, from
+ the static table.
+ """
+ e = Encoder()
+ header_set = {':method': 'GET'}
+ result = b'\x82'
+
+ assert e.encode(header_set, huffman=False) == result
+ assert list(e.header_table.dynamic_entries) == []
+
+ def test_indexed_header_field_from_static_table(self):
+ e = Encoder()
+ e.header_table_size = 0
+ header_set = {':method': 'GET'}
+ result = b'\x82'
+
+ # Make sure we don't emit an encoding context update.
+ e.header_table.resized = False
+
+ assert e.encode(header_set, huffman=False) == result
+ assert list(e.header_table.dynamic_entries) == []
+
+ def test_request_examples_without_huffman(self):
+ """
+ This section shows several consecutive header sets, corresponding to
+ HTTP requests, on the same connection.
+ """
+ e = Encoder()
+ first_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'http',),
+ (':path', '/',),
+ (':authority', 'www.example.com'),
+ ]
+ # We should have :authority in first_header_table since we index it
+ first_header_table = [(':authority', 'www.example.com')]
+ first_result = b'\x82\x86\x84\x41\x0fwww.example.com'
+
+ assert e.encode(first_header_set, huffman=False) == first_result
+ assert list(e.header_table.dynamic_entries) == [
+ (n.encode('utf-8'), v.encode('utf-8'))
+ for n, v in first_header_table
+ ]
+
+ second_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'http',),
+ (':path', '/',),
+ (':authority', 'www.example.com',),
+ ('cache-control', 'no-cache'),
+ ]
+ second_header_table = [
+ ('cache-control', 'no-cache'),
+ (':authority', 'www.example.com')
+ ]
+ second_result = b'\x82\x86\x84\xbeX\x08no-cache'
+
+ assert e.encode(second_header_set, huffman=False) == second_result
+ assert list(e.header_table.dynamic_entries) == [
+ (n.encode('utf-8'), v.encode('utf-8'))
+ for n, v in second_header_table
+ ]
+
+ third_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'https',),
+ (':path', '/index.html',),
+ (':authority', 'www.example.com',),
+ ('custom-key', 'custom-value'),
+ ]
+ third_result = (
+ b'\x82\x87\x85\xbf@\ncustom-key\x0ccustom-value'
+ )
+
+ assert e.encode(third_header_set, huffman=False) == third_result
+ # Don't check the header table here, it's just too complex to be
+ # reliable. Check its length though.
+ assert len(e.header_table.dynamic_entries) == 3
+
+ def test_request_examples_with_huffman(self):
+ """
+ This section shows the same examples as the previous section, but
+ using Huffman encoding for the literal values.
+ """
+ e = Encoder()
+ first_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'http',),
+ (':path', '/',),
+ (':authority', 'www.example.com'),
+ ]
+ first_header_table = [(':authority', 'www.example.com')]
+ first_result = (
+ b'\x82\x86\x84\x41\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff'
+ )
+
+ assert e.encode(first_header_set, huffman=True) == first_result
+ assert list(e.header_table.dynamic_entries) == [
+ (n.encode('utf-8'), v.encode('utf-8'))
+ for n, v in first_header_table
+ ]
+
+ second_header_table = [
+ ('cache-control', 'no-cache'),
+ (':authority', 'www.example.com')
+ ]
+ second_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'http',),
+ (':path', '/',),
+ (':authority', 'www.example.com',),
+ ('cache-control', 'no-cache'),
+ ]
+ second_result = b'\x82\x86\x84\xbeX\x86\xa8\xeb\x10d\x9c\xbf'
+
+ assert e.encode(second_header_set, huffman=True) == second_result
+ assert list(e.header_table.dynamic_entries) == [
+ (n.encode('utf-8'), v.encode('utf-8'))
+ for n, v in second_header_table
+ ]
+
+ third_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'https',),
+ (':path', '/index.html',),
+ (':authority', 'www.example.com',),
+ ('custom-key', 'custom-value'),
+ ]
+ third_result = (
+ b'\x82\x87\x85\xbf'
+ b'@\x88%\xa8I\xe9[\xa9}\x7f\x89%\xa8I\xe9[\xb8\xe8\xb4\xbf'
+ )
+
+ assert e.encode(third_header_set, huffman=True) == third_result
+ assert len(e.header_table.dynamic_entries) == 3
+
+ # These tests are custom, for hyper.
+ def test_resizing_header_table(self):
+ # We need to encode a substantial number of headers, to populate the
+ # header table.
+ e = Encoder()
+ header_set = [
+ (':method', 'GET'),
+ (':scheme', 'https'),
+ (':path', '/some/path'),
+ (':authority', 'www.example.com'),
+ ('custom-key', 'custom-value'),
+ (
+ "user-agent",
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:16.0) "
+ "Gecko/20100101 Firefox/16.0",
+ ),
+ (
+ "accept",
+ "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;"
+ "q=0.8",
+ ),
+ ('X-Lukasa-Test', '88989'),
+ ]
+ e.encode(header_set, huffman=True)
+
+ # Resize the header table to a size so small that nothing can be in it.
+ e.header_table_size = 40
+ assert len(e.header_table.dynamic_entries) == 0
+
+ def test_resizing_header_table_sends_multiple_updates(self):
+ e = Encoder()
+
+ e.header_table_size = 40
+ e.header_table_size = 100
+ e.header_table_size = 40
+
+ header_set = [(':method', 'GET')]
+ out = e.encode(header_set, huffman=True)
+ assert out == b'\x3F\x09\x3F\x45\x3F\x09\x82'
+
+ def test_resizing_header_table_to_same_size_ignored(self):
+ e = Encoder()
+
+ # These size changes should be ignored
+ e.header_table_size = 4096
+ e.header_table_size = 4096
+ e.header_table_size = 4096
+
+ # These size changes should be encoded
+ e.header_table_size = 40
+ e.header_table_size = 100
+ e.header_table_size = 40
+
+ header_set = [(':method', 'GET')]
+ out = e.encode(header_set, huffman=True)
+ assert out == b'\x3F\x09\x3F\x45\x3F\x09\x82'
+
+ def test_resizing_header_table_sends_context_update(self):
+ e = Encoder()
+
+ # Resize the header table to a size so small that nothing can be in it.
+ e.header_table_size = 40
+
+ # Now, encode a header set. Just a small one, with a well-defined
+ # output.
+ header_set = [(':method', 'GET')]
+ out = e.encode(header_set, huffman=True)
+
+ assert out == b'?\t\x82'
+
+ def test_setting_table_size_to_the_same_does_nothing(self):
+ e = Encoder()
+
+ # Set the header table size to the default.
+ e.header_table_size = 4096
+
+ # Now encode a header set. Just a small one, with a well-defined
+ # output.
+ header_set = [(':method', 'GET')]
+ out = e.encode(header_set, huffman=True)
+
+ assert out == b'\x82'
+
+ def test_evicting_header_table_objects(self):
+ e = Encoder()
+
+ # Set the header table size large enough to include one header.
+ e.header_table_size = 66
+ header_set = [('a', 'b'), ('long-custom-header', 'longish value')]
+ e.encode(header_set)
+
+ assert len(e.header_table.dynamic_entries) == 1
+
+
+class TestHPACKDecoder(object):
+ # These tests are stolen entirely from the IETF specification examples.
+ def test_literal_header_field_with_indexing(self):
+ """
+ The header field representation uses a literal name and a literal
+ value.
+ """
+ d = Decoder()
+ header_set = [('custom-key', 'custom-header')]
+ data = b'\x40\x0acustom-key\x0dcustom-header'
+
+ assert d.decode(data) == header_set
+ assert list(d.header_table.dynamic_entries) == [
+ (n.encode('utf-8'), v.encode('utf-8')) for n, v in header_set
+ ]
+
+ def test_raw_decoding(self):
+ """
+ The header field representation is decoded as a raw byte string instead
+ of UTF-8
+ """
+ d = Decoder()
+ header_set = [
+ (b'\x00\x01\x99\x30\x11\x22\x55\x21\x89\x14', b'custom-header')
+ ]
+ data = (
+ b'\x40\x0a\x00\x01\x99\x30\x11\x22\x55\x21\x89\x14\x0d'
+ b'custom-header'
+ )
+
+ assert d.decode(data, raw=True) == header_set
+
+ def test_literal_header_field_without_indexing(self):
+ """
+ The header field representation uses an indexed name and a literal
+ value.
+ """
+ d = Decoder()
+ header_set = [(':path', '/sample/path')]
+ data = b'\x04\x0c/sample/path'
+
+ assert d.decode(data) == header_set
+ assert list(d.header_table.dynamic_entries) == []
+
+ def test_header_table_size_getter(self):
+ d = Decoder()
+ assert d.header_table_size
+
+ def test_indexed_header_field(self):
+ """
+ The header field representation uses an indexed header field, from
+ the static table.
+ """
+ d = Decoder()
+ header_set = [(':method', 'GET')]
+ data = b'\x82'
+
+ assert d.decode(data) == header_set
+ assert list(d.header_table.dynamic_entries) == []
+
+ def test_request_examples_without_huffman(self):
+ """
+ This section shows several consecutive header sets, corresponding to
+ HTTP requests, on the same connection.
+ """
+ d = Decoder()
+ first_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'http',),
+ (':path', '/',),
+ (':authority', 'www.example.com'),
+ ]
+ # The first_header_table doesn't contain 'authority'
+ first_data = b'\x82\x86\x84\x01\x0fwww.example.com'
+
+ assert d.decode(first_data) == first_header_set
+ assert list(d.header_table.dynamic_entries) == []
+
+ # This request takes advantage of the differential encoding of header
+ # sets.
+ second_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'http',),
+ (':path', '/',),
+ (':authority', 'www.example.com',),
+ ('cache-control', 'no-cache'),
+ ]
+ second_data = (
+ b'\x82\x86\x84\x01\x0fwww.example.com\x0f\t\x08no-cache'
+ )
+
+ assert d.decode(second_data) == second_header_set
+ assert list(d.header_table.dynamic_entries) == []
+
+ third_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'https',),
+ (':path', '/index.html',),
+ (':authority', 'www.example.com',),
+ ('custom-key', 'custom-value'),
+ ]
+ third_data = (
+ b'\x82\x87\x85\x01\x0fwww.example.com@\ncustom-key\x0ccustom-value'
+ )
+
+ assert d.decode(third_data) == third_header_set
+ # Don't check the header table here, it's just too complex to be
+ # reliable. Check its length though.
+ assert len(d.header_table.dynamic_entries) == 1
+
+ def test_request_examples_with_huffman(self):
+ """
+ This section shows the same examples as the previous section, but
+ using Huffman encoding for the literal values.
+ """
+ d = Decoder()
+
+ first_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'http',),
+ (':path', '/',),
+ (':authority', 'www.example.com'),
+ ]
+ first_data = (
+ b'\x82\x86\x84\x01\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff'
+ )
+
+ assert d.decode(first_data) == first_header_set
+ assert list(d.header_table.dynamic_entries) == []
+
+ second_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'http',),
+ (':path', '/',),
+ (':authority', 'www.example.com',),
+ ('cache-control', 'no-cache'),
+ ]
+ second_data = (
+ b'\x82\x86\x84\x01\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff'
+ b'\x0f\t\x86\xa8\xeb\x10d\x9c\xbf'
+ )
+
+ assert d.decode(second_data) == second_header_set
+ assert list(d.header_table.dynamic_entries) == []
+
+ third_header_set = [
+ (':method', 'GET',),
+ (':scheme', 'https',),
+ (':path', '/index.html',),
+ (':authority', 'www.example.com',),
+ ('custom-key', 'custom-value'),
+ ]
+ third_data = (
+ b'\x82\x87\x85\x01\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff@'
+ b'\x88%\xa8I\xe9[\xa9}\x7f\x89%\xa8I\xe9[\xb8\xe8\xb4\xbf'
+ )
+
+ assert d.decode(third_data) == third_header_set
+ assert len(d.header_table.dynamic_entries) == 1
+
+ # These tests are custom, for hyper.
+ def test_resizing_header_table(self):
+ # We need to decode a substantial number of headers, to populate the
+ # header table. This string isn't magic: it's the output from the
+ # equivalent test for the Encoder.
+ d = Decoder()
+ data = (
+ b'\x82\x87D\x87a\x07\xa4\xacV4\xcfA\x8c\xf1\xe3\xc2\xe5\xf2:k\xa0'
+ b'\xab\x90\xf4\xff@\x88%\xa8I\xe9[\xa9}\x7f\x89%\xa8I\xe9[\xb8\xe8'
+ b'\xb4\xbfz\xbc\xd0\x7ff\xa2\x81\xb0\xda\xe0S\xfa\xd02\x1a\xa4\x9d'
+ b'\x13\xfd\xa9\x92\xa4\x96\x854\x0c\x8aj\xdc\xa7\xe2\x81\x02\xef}'
+ b'\xa9g{\x81qp\x7fjb):\x9d\x81\x00 \x00@\x150\x9a\xc2\xca\x7f,\x05'
+ b'\xc5\xc1S\xb0I|\xa5\x89\xd3M\x1fC\xae\xba\x0cA\xa4\xc7\xa9\x8f3'
+ b'\xa6\x9a?\xdf\x9ah\xfa\x1du\xd0b\r&=Ly\xa6\x8f\xbe\xd0\x01w\xfe'
+ b'\xbeX\xf9\xfb\xed\x00\x17{@\x8a\xfc[=\xbdF\x81\xad\xbc\xa8O\x84y'
+ b'\xe7\xde\x7f'
+ )
+ d.decode(data)
+
+ # Resize the header table to a size so small that nothing can be in it.
+ d.header_table_size = 40
+ assert len(d.header_table.dynamic_entries) == 0
+
+ def test_apache_trafficserver(self):
+ # This test reproduces the bug in #110, using exactly the same header
+ # data.
+ d = Decoder()
+ data = (
+ b'\x10\x07:status\x03200@\x06server\tATS/6.0.0'
+ b'@\x04date\x1dTue, 31 Mar 2015 08:09:51 GMT'
+ b'@\x0ccontent-type\ttext/html@\x0econtent-length\x0542468'
+ b'@\rlast-modified\x1dTue, 31 Mar 2015 01:55:51 GMT'
+ b'@\x04vary\x0fAccept-Encoding@\x04etag\x0f"5519fea7-a5e4"'
+ b'@\x08x-served\x05Nginx@\x14x-subdomain-tryfiles\x04True'
+ b'@\x07x-deity\thydra-lts@\raccept-ranges\x05bytes@\x03age\x010'
+ b'@\x19strict-transport-security\rmax-age=86400'
+ b'@\x03via2https/1.1 ATS (ApacheTrafficServer/6.0.0 [cSsNfU])'
+ )
+ expect = [
+ (':status', '200'),
+ ('server', 'ATS/6.0.0'),
+ ('date', 'Tue, 31 Mar 2015 08:09:51 GMT'),
+ ('content-type', 'text/html'),
+ ('content-length', '42468'),
+ ('last-modified', 'Tue, 31 Mar 2015 01:55:51 GMT'),
+ ('vary', 'Accept-Encoding'),
+ ('etag', '"5519fea7-a5e4"'),
+ ('x-served', 'Nginx'),
+ ('x-subdomain-tryfiles', 'True'),
+ ('x-deity', 'hydra-lts'),
+ ('accept-ranges', 'bytes'),
+ ('age', '0'),
+ ('strict-transport-security', 'max-age=86400'),
+ ('via', 'https/1.1 ATS (ApacheTrafficServer/6.0.0 [cSsNfU])'),
+ ]
+
+ result = d.decode(data)
+
+ assert result == expect
+ # The status header shouldn't be indexed.
+ assert len(d.header_table.dynamic_entries) == len(expect) - 1
+
+ def test_utf8_errors_raise_hpack_decoding_error(self):
+ d = Decoder()
+
+ # Invalid UTF-8 data.
+ data = b'\x82\x86\x84\x01\x10www.\x07\xaa\xd7\x95\xd7\xa8\xd7\x94.com'
+
+ with pytest.raises(HPACKDecodingError):
+ d.decode(data)
+
+ def test_invalid_indexed_literal(self):
+ d = Decoder()
+
+ # Refer to an index that is too large.
+ data = b'\x82\x86\x84\x7f\x0a\x0fwww.example.com'
+ with pytest.raises(InvalidTableIndex):
+ d.decode(data)
+
+ def test_invalid_indexed_header(self):
+ d = Decoder()
+
+ # Refer to an indexed header that is too large.
+ data = b'\xBE\x86\x84\x01\x0fwww.example.com'
+ with pytest.raises(InvalidTableIndex):
+ d.decode(data)
+
+ def test_literal_header_field_with_indexing_emits_headertuple(self):
+ """
+ A header field with indexing emits a HeaderTuple.
+ """
+ d = Decoder()
+ data = b'\x00\x0acustom-key\x0dcustom-header'
+
+ headers = d.decode(data)
+ assert len(headers) == 1
+
+ header = headers[0]
+ assert isinstance(header, HeaderTuple)
+ assert not isinstance(header, NeverIndexedHeaderTuple)
+
+ def test_literal_never_indexed_emits_neverindexedheadertuple(self):
+ """
+ A literal header field that must never be indexed emits a
+ NeverIndexedHeaderTuple.
+ """
+ d = Decoder()
+ data = b'\x10\x0acustom-key\x0dcustom-header'
+
+ headers = d.decode(data)
+ assert len(headers) == 1
+
+ header = headers[0]
+ assert isinstance(header, NeverIndexedHeaderTuple)
+
+ def test_indexed_never_indexed_emits_neverindexedheadertuple(self):
+ """
+ A header field with an indexed name that must never be indexed emits a
+ NeverIndexedHeaderTuple.
+ """
+ d = Decoder()
+ data = b'\x14\x0c/sample/path'
+
+ headers = d.decode(data)
+ assert len(headers) == 1
+
+ header = headers[0]
+ assert isinstance(header, NeverIndexedHeaderTuple)
+
+ def test_max_header_list_size(self):
+ """
+ If the header block is larger than the max_header_list_size, the HPACK
+ decoder throws an OversizedHeaderListError.
+ """
+ d = Decoder(max_header_list_size=44)
+ data = b'\x14\x0c/sample/path'
+
+ with pytest.raises(OversizedHeaderListError):
+ d.decode(data)
+
+ def test_can_decode_multiple_header_table_size_changes(self):
+ """
+ If multiple header table size changes are sent in at once, they are
+ successfully decoded.
+ """
+ d = Decoder()
+ data = b'?a?\xe1\x1f\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
+ expect = [
+ (':method', 'GET'),
+ (':scheme', 'https'),
+ (':path', '/'),
+ (':authority', '127.0.0.1:8443')
+ ]
+
+ assert d.decode(data) == expect
+
+ def test_header_table_size_change_above_maximum(self):
+ """
+ If a header table size change is received that exceeds the maximum
+ allowed table size, it is rejected.
+ """
+ d = Decoder()
+ d.max_allowed_table_size = 127
+ data = b'?a\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
+
+ with pytest.raises(InvalidTableSizeError):
+ d.decode(data)
+
+ def test_table_size_not_adjusting(self):
+ """
+ If the header table size is shrunk, and then the remote peer doesn't
+ join in the shrinking, then an error is raised.
+ """
+ d = Decoder()
+ d.max_allowed_table_size = 128
+ data = b'\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
+
+ with pytest.raises(InvalidTableSizeError):
+ d.decode(data)
+
+ def test_table_size_last_rejected(self):
+ """
+ If a header table size change comes last in the header block, it is
+ forbidden.
+ """
+ d = Decoder()
+ data = b'\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99?a'
+
+ with pytest.raises(HPACKDecodingError):
+ d.decode(data)
+
+ def test_table_size_middle_rejected(self):
+ """
+ If a header table size change comes anywhere but first in the header
+ block, it is forbidden.
+ """
+ d = Decoder()
+ data = b'\x82?a\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
+
+ with pytest.raises(HPACKDecodingError):
+ d.decode(data)
+
+ def test_truncated_header_name(self):
+ """
+ If a header name is truncated an error is raised.
+ """
+ d = Decoder()
+ # This is a simple header block that has a bad ending. The interesting
+ # part begins on the second line. This indicates a string that has
+ # literal name and value. The name is a 5 character huffman-encoded
+ # string that is only three bytes long.
+ data = (
+ b'\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
+ b'\x00\x85\xf2\xb2J'
+ )
+
+ with pytest.raises(HPACKDecodingError):
+ d.decode(data)
+
+ def test_truncated_header_value(self):
+ """
+ If a header value is truncated an error is raised.
+ """
+ d = Decoder()
+ # This is a simple header block that has a bad ending. The interesting
+ # part begins on the second line. This indicates a string that has
+ # literal name and value. The name is a 5 character huffman-encoded
+ # string, but the entire EOS character has been written over the end.
+ # This causes hpack to see the header value as being supposed to be
+ # 622462 bytes long, which it clearly is not, and so this must fail.
+ data = (
+ b'\x82\x87\x84A\x8a\x08\x9d\\\x0b\x81p\xdcy\xa6\x99'
+ b'\x00\x85\xf2\xb2J\x87\xff\xff\xff\xfd%B\x7f'
+ )
+
+ with pytest.raises(HPACKDecodingError):
+ d.decode(data)
+
+
+class TestDictToIterable(object):
+ """
+ The dict_to_iterable function has some subtle requirements: validates that
+ everything behaves as expected.
+
+ As much as possible this tries to be exhaustive.
+ """
+ keys = one_of(
+ text().filter(lambda k: k and not k.startswith(u':')),
+ binary().filter(lambda k: k and not k.startswith(b':'))
+ )
+
+ @given(
+ special_keys=sets(keys),
+ boring_keys=sets(keys),
+ )
+ def test_ordering(self, special_keys, boring_keys):
+ """
+ _dict_to_iterable produces an iterable where all the keys beginning
+ with a colon are emitted first.
+ """
+ def _prepend_colon(k):
+ if isinstance(k, unicode):
+ return u':' + k
+ else:
+ return b':' + k
+
+ special_keys = set(map(_prepend_colon, special_keys))
+ input_dict = {
+ k: b'testval' for k in itertools.chain(
+ special_keys,
+ boring_keys
+ )
+ }
+ filtered = _dict_to_iterable(input_dict)
+
+ received_special = set()
+ received_boring = set()
+
+ for _ in special_keys:
+ k, _ = next(filtered)
+ received_special.add(k)
+ for _ in boring_keys:
+ k, _ = next(filtered)
+ received_boring.add(k)
+
+ assert special_keys == received_special
+ assert boring_keys == received_boring
+
+ @given(
+ special_keys=sets(keys),
+ boring_keys=sets(keys),
+ )
+ def test_ordering_applies_to_encoding(self, special_keys, boring_keys):
+ """
+ When encoding a dictionary the special keys all appear first.
+ """
+ def _prepend_colon(k):
+ if isinstance(k, unicode):
+ return u':' + k
+ else:
+ return b':' + k
+
+ special_keys = set(map(_prepend_colon, special_keys))
+ input_dict = {
+ k: b'testval' for k in itertools.chain(
+ special_keys,
+ boring_keys
+ )
+ }
+ e = Encoder()
+ d = Decoder()
+ encoded = e.encode(input_dict)
+ decoded = iter(d.decode(encoded, raw=True))
+
+ received_special = set()
+ received_boring = set()
+ expected_special = set(map(_to_bytes, special_keys))
+ expected_boring = set(map(_to_bytes, boring_keys))
+
+ for _ in special_keys:
+ k, _ = next(decoded)
+ received_special.add(k)
+ for _ in boring_keys:
+ k, _ = next(decoded)
+ received_boring.add(k)
+
+ assert expected_special == received_special
+ assert expected_boring == received_boring
diff --git a/testing/web-platform/tests/tools/third_party/hpack/test/test_hpack_integration.py b/testing/web-platform/tests/tools/third_party/hpack/test/test_hpack_integration.py
new file mode 100644
index 0000000000..8b8de650d2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/test/test_hpack_integration.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+"""
+This module defines substantial HPACK integration tests. These can take a very
+long time to run, so they're outside the main test suite, but they need to be
+run before every change to HPACK.
+"""
+from hpack.hpack import Decoder, Encoder
+from hpack.struct import HeaderTuple
+from binascii import unhexlify
+from pytest import skip
+
+
+class TestHPACKDecoderIntegration(object):
+ def test_can_decode_a_story(self, story):
+ d = Decoder()
+
+ # We test against draft 9 of the HPACK spec.
+ if story['draft'] != 9:
+ skip("We test against draft 9, not draft %d" % story['draft'])
+
+ for case in story['cases']:
+ try:
+ d.header_table_size = case['header_table_size']
+ except KeyError:
+ pass
+ decoded_headers = d.decode(unhexlify(case['wire']))
+
+ # The correct headers are a list of dicts, which is annoying.
+ correct_headers = [
+ (item[0], item[1])
+ for header in case['headers']
+ for item in header.items()
+ ]
+ correct_headers = correct_headers
+ assert correct_headers == decoded_headers
+ assert all(
+ isinstance(header, HeaderTuple) for header in decoded_headers
+ )
+
+ def test_can_encode_a_story_no_huffman(self, raw_story):
+ d = Decoder()
+ e = Encoder()
+
+ for case in raw_story['cases']:
+ # The input headers are a list of dicts, which is annoying.
+ input_headers = [
+ (item[0], item[1])
+ for header in case['headers']
+ for item in header.items()
+ ]
+
+ encoded = e.encode(input_headers, huffman=False)
+ decoded_headers = d.decode(encoded)
+
+ assert input_headers == decoded_headers
+ assert all(
+ isinstance(header, HeaderTuple) for header in decoded_headers
+ )
+
+ def test_can_encode_a_story_with_huffman(self, raw_story):
+ d = Decoder()
+ e = Encoder()
+
+ for case in raw_story['cases']:
+ # The input headers are a list of dicts, which is annoying.
+ input_headers = [
+ (item[0], item[1])
+ for header in case['headers']
+ for item in header.items()
+ ]
+
+ encoded = e.encode(input_headers, huffman=True)
+ decoded_headers = d.decode(encoded)
+
+ assert input_headers == decoded_headers
diff --git a/testing/web-platform/tests/tools/third_party/hpack/test/test_huffman.py b/testing/web-platform/tests/tools/third_party/hpack/test/test_huffman.py
new file mode 100644
index 0000000000..1b8c2f1238
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/test/test_huffman.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+from hpack.exceptions import HPACKDecodingError
+from hpack.huffman_table import decode_huffman
+from hpack.huffman import HuffmanEncoder
+from hpack.huffman_constants import REQUEST_CODES, REQUEST_CODES_LENGTH
+
+from hypothesis import given, example
+from hypothesis.strategies import binary
+
+
+class TestHuffman(object):
+
+ def test_request_huffman_decoder(self):
+ assert (
+ decode_huffman(b'\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff') ==
+ b"www.example.com"
+ )
+ assert decode_huffman(b'\xa8\xeb\x10d\x9c\xbf') == b"no-cache"
+ assert decode_huffman(b'%\xa8I\xe9[\xa9}\x7f') == b"custom-key"
+ assert (
+ decode_huffman(b'%\xa8I\xe9[\xb8\xe8\xb4\xbf') == b"custom-value"
+ )
+
+ def test_request_huffman_encode(self):
+ encoder = HuffmanEncoder(REQUEST_CODES, REQUEST_CODES_LENGTH)
+ assert (
+ encoder.encode(b"www.example.com") ==
+ b'\xf1\xe3\xc2\xe5\xf2:k\xa0\xab\x90\xf4\xff'
+ )
+ assert encoder.encode(b"no-cache") == b'\xa8\xeb\x10d\x9c\xbf'
+ assert encoder.encode(b"custom-key") == b'%\xa8I\xe9[\xa9}\x7f'
+ assert (
+ encoder.encode(b"custom-value") == b'%\xa8I\xe9[\xb8\xe8\xb4\xbf'
+ )
+
+
+class TestHuffmanDecoder(object):
+ @given(data=binary())
+ @example(b'\xff')
+ @example(b'\x5f\xff\xff\xff\xff')
+ @example(b'\x00\x3f\xff\xff\xff')
+ def test_huffman_decoder_properly_handles_all_bytestrings(self, data):
+ """
+ When given random bytestrings, either we get HPACKDecodingError or we
+ get a bytestring back.
+ """
+ # The examples aren't special, they're just known to hit specific error
+ # paths through the state machine. Basically, they are strings that are
+ # definitely invalid.
+ try:
+ result = decode_huffman(data)
+ except HPACKDecodingError:
+ result = b''
+
+ assert isinstance(result, bytes)
diff --git a/testing/web-platform/tests/tools/third_party/hpack/test/test_struct.py b/testing/web-platform/tests/tools/third_party/hpack/test/test_struct.py
new file mode 100644
index 0000000000..613b8c6bae
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/test/test_struct.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+"""
+test_struct
+~~~~~~~~~~~
+
+Tests for the Header tuples.
+"""
+import pytest
+
+from hpack.struct import HeaderTuple, NeverIndexedHeaderTuple
+
+
+class TestHeaderTuple(object):
+ def test_is_tuple(self):
+ """
+ HeaderTuple objects are tuples.
+ """
+ h = HeaderTuple('name', 'value')
+ assert isinstance(h, tuple)
+
+ def test_unpacks_properly(self):
+ """
+ HeaderTuple objects unpack like tuples.
+ """
+ h = HeaderTuple('name', 'value')
+ k, v = h
+
+ assert k == 'name'
+ assert v == 'value'
+
+ def test_header_tuples_are_indexable(self):
+ """
+ HeaderTuple objects can be indexed.
+ """
+ h = HeaderTuple('name', 'value')
+ assert h.indexable
+
+ def test_never_indexed_tuples_are_not_indexable(self):
+ """
+ NeverIndexedHeaderTuple objects cannot be indexed.
+ """
+ h = NeverIndexedHeaderTuple('name', 'value')
+ assert not h.indexable
+
+ @pytest.mark.parametrize('cls', (HeaderTuple, NeverIndexedHeaderTuple))
+ def test_equal_to_tuples(self, cls):
+ """
+ HeaderTuples and NeverIndexedHeaderTuples are equal to equivalent
+ tuples.
+ """
+ t1 = ('name', 'value')
+ t2 = cls('name', 'value')
+
+ assert t1 == t2
+ assert t1 is not t2
+
+ @pytest.mark.parametrize('cls', (HeaderTuple, NeverIndexedHeaderTuple))
+ def test_equal_to_self(self, cls):
+ """
+ HeaderTuples and NeverIndexedHeaderTuples are always equal when
+ compared to the same class.
+ """
+ t1 = cls('name', 'value')
+ t2 = cls('name', 'value')
+
+ assert t1 == t2
+ assert t1 is not t2
+
+ def test_equal_for_different_indexes(self):
+ """
+ HeaderTuples compare equal to equivalent NeverIndexedHeaderTuples.
+ """
+ t1 = HeaderTuple('name', 'value')
+ t2 = NeverIndexedHeaderTuple('name', 'value')
+
+ assert t1 == t2
+ assert t1 is not t2
diff --git a/testing/web-platform/tests/tools/third_party/hpack/test/test_table.py b/testing/web-platform/tests/tools/third_party/hpack/test/test_table.py
new file mode 100644
index 0000000000..d77c30a2fb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hpack/test/test_table.py
@@ -0,0 +1,158 @@
+# -*- coding: utf-8 -*-
+from hpack.table import HeaderTable, table_entry_size
+from hpack.exceptions import InvalidTableIndex
+import pytest
+import sys
+_ver = sys.version_info
+is_py2 = _ver[0] == 2
+is_py3 = _ver[0] == 3
+
+
+class TestPackageFunctions(object):
+ def test_table_entry_size(self):
+ res = table_entry_size(b'TestName', b'TestValue')
+ assert res == 49
+
+
+class TestHeaderTable(object):
+ def test_get_by_index_dynamic_table(self):
+ tbl = HeaderTable()
+ off = len(HeaderTable.STATIC_TABLE)
+ val = (b'TestName', b'TestValue')
+ tbl.add(*val)
+ res = tbl.get_by_index(off + 1)
+ assert res == val
+
+ def test_get_by_index_static_table(self):
+ tbl = HeaderTable()
+ exp = (b':authority', b'')
+ res = tbl.get_by_index(1)
+ assert res == exp
+ idx = len(HeaderTable.STATIC_TABLE)
+ exp = (b'www-authenticate', b'')
+ res = tbl.get_by_index(idx)
+ assert res == exp
+
+ def test_get_by_index_zero_index(self):
+ tbl = HeaderTable()
+ with pytest.raises(InvalidTableIndex):
+ tbl.get_by_index(0)
+
+ def test_get_by_index_out_of_range(self):
+ tbl = HeaderTable()
+ off = len(HeaderTable.STATIC_TABLE)
+ tbl.add(b'TestName', b'TestValue')
+ with pytest.raises(InvalidTableIndex) as e:
+ tbl.get_by_index(off + 2)
+
+ assert (
+ "InvalidTableIndex: Invalid table index %d" % (off + 2) in str(e)
+ )
+
+ def test_repr(self):
+ tbl = HeaderTable()
+ tbl.add(b'TestName1', b'TestValue1')
+ tbl.add(b'TestName2', b'TestValue2')
+ tbl.add(b'TestName2', b'TestValue2')
+ # Meh, I hate that I have to do this to test
+ # repr
+ if is_py3:
+ exp = (
+ "HeaderTable(4096, False, deque(["
+ "(b'TestName2', b'TestValue2'), "
+ "(b'TestName2', b'TestValue2'), "
+ "(b'TestName1', b'TestValue1')"
+ "]))"
+ )
+ else:
+ exp = (
+ "HeaderTable(4096, False, deque(["
+ "('TestName2', 'TestValue2'), "
+ "('TestName2', 'TestValue2'), "
+ "('TestName1', 'TestValue1')"
+ "]))"
+ )
+ res = repr(tbl)
+ assert res == exp
+
+ def test_add_to_large(self):
+ tbl = HeaderTable()
+ # Max size to small to hold the value we specify
+ tbl.maxsize = 1
+ tbl.add(b'TestName', b'TestValue')
+ # Table length should be 0
+ assert len(tbl.dynamic_entries) == 0
+
+ def test_search_in_static_full(self):
+ tbl = HeaderTable()
+ itm = (b':authority', b'')
+ exp = (1, itm[0], itm[1])
+ res = tbl.search(itm[0], itm[1])
+ assert res == exp
+
+ def test_search_in_static_partial(self):
+ tbl = HeaderTable()
+ exp = (1, b':authority', None)
+ res = tbl.search(b':authority', b'NotInTable')
+ assert res == exp
+
+ def test_search_in_dynamic_full(self):
+ tbl = HeaderTable()
+ idx = len(HeaderTable.STATIC_TABLE) + 1
+ tbl.add(b'TestName', b'TestValue')
+ exp = (idx, b'TestName', b'TestValue')
+ res = tbl.search(b'TestName', b'TestValue')
+ assert res == exp
+
+ def test_search_in_dynamic_partial(self):
+ tbl = HeaderTable()
+ idx = len(HeaderTable.STATIC_TABLE) + 1
+ tbl.add(b'TestName', b'TestValue')
+ exp = (idx, b'TestName', None)
+ res = tbl.search(b'TestName', b'NotInTable')
+ assert res == exp
+
+ def test_search_no_match(self):
+ tbl = HeaderTable()
+ tbl.add(b'TestName', b'TestValue')
+ res = tbl.search(b'NotInTable', b'NotInTable')
+ assert res is None
+
+ def test_maxsize_prop_getter(self):
+ tbl = HeaderTable()
+ assert tbl.maxsize == HeaderTable.DEFAULT_SIZE
+
+ def test_maxsize_prop_setter(self):
+ tbl = HeaderTable()
+ exp = int(HeaderTable.DEFAULT_SIZE / 2)
+ tbl.maxsize = exp
+ assert tbl.resized is True
+ assert tbl.maxsize == exp
+ tbl.resized = False
+ tbl.maxsize = exp
+ assert tbl.resized is False
+ assert tbl.maxsize == exp
+
+ def test_size(self):
+ tbl = HeaderTable()
+ for i in range(3):
+ tbl.add(b'TestName', b'TestValue')
+ res = tbl._current_size
+ assert res == 147
+
+ def test_shrink_maxsize_is_zero(self):
+ tbl = HeaderTable()
+ tbl.add(b'TestName', b'TestValue')
+ assert len(tbl.dynamic_entries) == 1
+ tbl.maxsize = 0
+ assert len(tbl.dynamic_entries) == 0
+
+ def test_shrink_maxsize(self):
+ tbl = HeaderTable()
+ for i in range(3):
+ tbl.add(b'TestName', b'TestValue')
+
+ assert tbl._current_size == 147
+ tbl.maxsize = 146
+ assert len(tbl.dynamic_entries) == 2
+ assert tbl._current_size == 98
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/.appveyor.yml b/testing/web-platform/tests/tools/third_party/html5lib/.appveyor.yml
new file mode 100644
index 0000000000..984e2b7fa5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/.appveyor.yml
@@ -0,0 +1,31 @@
+# To activate, change the Appveyor settings to use `.appveyor.yml`.
+environment:
+ global:
+ PATH: "C:\\Python27\\Scripts\\;%PATH%"
+ PYTEST_COMMAND: "coverage run -m pytest"
+ matrix:
+ - TOXENV: py27-base
+ - TOXENV: py27-optional
+ - TOXENV: py33-base
+ - TOXENV: py33-optional
+ - TOXENV: py34-base
+ - TOXENV: py34-optional
+ - TOXENV: py35-base
+ - TOXENV: py35-optional
+ - TOXENV: py36-base
+ - TOXENV: py36-optional
+
+install:
+ - git submodule update --init --recursive
+ - python -m pip install tox codecov
+
+build: off
+
+test_script:
+ - tox
+
+after_test:
+ - python debug-info.py
+
+on_success:
+ - codecov
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/.coveragerc b/testing/web-platform/tests/tools/third_party/html5lib/.coveragerc
new file mode 100644
index 0000000000..6facf35239
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/.coveragerc
@@ -0,0 +1,8 @@
+[run]
+branch = True
+source = html5lib
+
+[paths]
+source =
+ html5lib
+ .tox/*/lib/python*/site-packages/html5lib
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/.gitignore b/testing/web-platform/tests/tools/third_party/html5lib/.gitignore
new file mode 100644
index 0000000000..ecd62df31b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/.gitignore
@@ -0,0 +1,85 @@
+# Copyright (c) 2014 GitHub, Inc.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+doc/_build/
+
+# PyBuilder
+target/
+
+# Generated by parse.py -p
+stats.prof
+
+# IDE
+.idea
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/.prospector.yaml b/testing/web-platform/tests/tools/third_party/html5lib/.prospector.yaml
new file mode 100644
index 0000000000..7e8efe1a62
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/.prospector.yaml
@@ -0,0 +1,21 @@
+strictness: veryhigh
+doc-warnings: false
+test-warnings: false
+
+max-line-length: 139
+
+requirements:
+ - requirements.txt
+ - requirements-test.txt
+ - requirements-optional.txt
+
+ignore-paths:
+ - parse.py
+ - utils/
+
+python-targets:
+ - 2
+ - 3
+
+mccabe:
+ run: false
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/.pylintrc b/testing/web-platform/tests/tools/third_party/html5lib/.pylintrc
new file mode 100644
index 0000000000..ea74d5db3f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/.pylintrc
@@ -0,0 +1,10 @@
+[MASTER]
+ignore=tests
+
+[MESSAGES CONTROL]
+# messages up to fixme should probably be fixed somehow
+disable = redefined-builtin,attribute-defined-outside-init,anomalous-backslash-in-string,no-self-use,redefined-outer-name,bad-continuation,wrong-import-order,superfluous-parens,no-member,duplicate-code,super-init-not-called,abstract-method,property-on-old-class,wrong-import-position,no-name-in-module,no-init,bad-mcs-classmethod-argument,bad-classmethod-argument,fixme,invalid-name,import-error,too-few-public-methods,too-many-ancestors,too-many-arguments,too-many-boolean-expressions,too-many-branches,too-many-instance-attributes,too-many-locals,too-many-lines,too-many-public-methods,too-many-return-statements,too-many-statements,missing-docstring,line-too-long,locally-disabled,locally-enabled,bad-builtin,deprecated-lambda
+
+[FORMAT]
+max-line-length=139
+single-line-if-stmt=no
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/.pytest.expect b/testing/web-platform/tests/tools/third_party/html5lib/.pytest.expect
new file mode 100644
index 0000000000..0fa326f035
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/.pytest.expect
@@ -0,0 +1,1322 @@
+pytest-expect file v1
+(2, 7, 11, 'final', 0)
+b'html5lib/tests/test_encoding.py::test_encoding::[110]': FAIL
+b'html5lib/tests/test_encoding.py::test_encoding::[111]': FAIL
+u'html5lib/tests/testdata/tokenizer/test2.test::0::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::228::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::231::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::232::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::234::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::235::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::237::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::240::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::241::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::243::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::244::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::246::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::258::dataState': FAIL
+u'html5lib/tests/testdata/tokenizer/test3.test::656::dataState': FAIL
+u'html5lib/tests/testdata/tree-construction/adoption01.dat::17::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/adoption01.dat::17::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/adoption01.dat::17::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/adoption01.dat::17::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/adoption01.dat::17::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/adoption01.dat::17::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/adoption01.dat::17::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/adoption01.dat::17::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::0::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::0::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::0::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::0::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::0::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::0::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::0::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::0::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::18::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::18::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::18::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::18::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::18::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::18::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::18::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::18::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::19::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::19::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::19::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::19::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::19::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::19::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::19::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::19::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::1::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::1::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::1::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::1::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::1::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::1::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::1::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::1::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::22::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::22::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::22::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::22::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::22::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::22::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::22::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::22::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::23::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::23::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::23::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::23::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::23::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::23::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::23::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::23::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::26::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::26::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::26::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::26::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::26::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::26::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::26::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::26::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::27::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::27::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::27::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::27::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::27::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::27::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::27::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::27::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::2::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::2::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::2::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::2::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::2::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::2::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::2::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::2::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::30::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::30::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::30::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::30::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::30::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::30::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::30::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::30::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::31::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::31::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::31::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::31::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::31::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::31::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::31::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::31::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::34::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::34::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::34::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::34::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::34::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::34::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::34::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::34::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::35::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::35::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::35::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::35::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::35::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::35::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::35::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::35::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::38::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::38::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::38::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::38::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::38::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::38::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::38::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::38::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::39::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::39::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::39::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::39::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::39::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::39::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::39::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::39::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::3::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::3::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::3::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::3::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::3::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::3::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::3::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::3::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::40::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::40::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::40::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::40::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::40::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::40::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::40::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::40::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::41::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::41::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::41::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::41::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::41::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::41::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::41::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::41::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::47::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::47::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::47::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::47::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::47::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::47::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::47::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::47::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::48::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::48::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::48::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::48::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::48::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::48::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::48::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/foreign-fragment.dat::48::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::0::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::0::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::0::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::0::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::0::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::0::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::0::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::0::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::1::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::1::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::1::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::1::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::1::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::1::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::1::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::1::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::2::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::2::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::2::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::2::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::2::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::2::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::2::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::2::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::3::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::3::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::3::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::3::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::3::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::3::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::3::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/isindex.dat::3::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::3::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::3::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::3::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::3::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::3::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::3::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::3::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::3::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::4::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::4::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::4::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::4::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::4::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::4::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::4::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::4::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::5::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::5::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::5::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::5::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::5::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::5::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::5::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/menuitem-element.dat::5::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/namespace-sensitivity.dat::0::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/namespace-sensitivity.dat::0::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/namespace-sensitivity.dat::0::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/namespace-sensitivity.dat::0::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/namespace-sensitivity.dat::0::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/namespace-sensitivity.dat::0::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/namespace-sensitivity.dat::0::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/namespace-sensitivity.dat::0::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::0::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::0::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::0::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::0::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::0::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::0::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::0::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::0::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::10::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::10::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::10::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::10::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::10::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::10::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::10::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::10::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::12::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::12::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::12::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::12::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::12::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::12::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::12::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::12::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::15::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::15::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::15::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::15::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::15::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::15::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::15::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::15::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::17::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::17::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::17::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::17::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::17::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::17::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::17::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::17::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::1::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::1::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::1::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::1::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::1::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::1::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::1::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::1::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::20::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::20::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::20::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::20::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::20::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::20::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::20::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::20::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::2::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::2::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::2::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::2::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::2::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::2::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::2::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::2::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::3::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::3::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::3::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::3::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::3::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::3::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::3::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::3::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::5::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::5::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::5::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::5::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::5::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::5::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::5::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::5::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::7::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::7::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::7::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::7::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::7::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::7::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::7::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/ruby.dat::7::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/adoption01.dat::0::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/adoption01.dat::0::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/adoption01.dat::0::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/adoption01.dat::0::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/adoption01.dat::0::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/adoption01.dat::0::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/adoption01.dat::0::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/adoption01.dat::0::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/ark.dat::0::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/ark.dat::0::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/ark.dat::0::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/ark.dat::0::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/ark.dat::0::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/ark.dat::0::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/ark.dat::0::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/ark.dat::0::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::0::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::0::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::0::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::0::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::0::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::0::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::0::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::0::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::1::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::1::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::1::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::1::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::1::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::1::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::1::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/scripted/webkit01.dat::1::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::0::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::0::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::0::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::0::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::0::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::0::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::0::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::0::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::100::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::100::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::100::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::100::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::100::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::100::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::100::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::100::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::101::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::101::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::101::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::101::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::101::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::101::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::101::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::101::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::102::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::102::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::102::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::102::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::102::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::102::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::102::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::102::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::103::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::103::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::103::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::103::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::103::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::103::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::103::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::103::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::104::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::104::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::104::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::104::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::104::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::104::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::104::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::104::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::105::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::105::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::105::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::105::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::105::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::105::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::105::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::105::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::106::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::106::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::106::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::106::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::106::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::106::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::106::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::106::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::107::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::107::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::107::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::107::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::107::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::107::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::107::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::107::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::10::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::10::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::10::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::10::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::10::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::10::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::10::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::10::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::11::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::11::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::11::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::11::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::11::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::11::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::11::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::11::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::12::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::12::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::12::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::12::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::12::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::12::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::12::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::12::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::13::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::13::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::13::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::13::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::13::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::13::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::13::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::13::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::14::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::14::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::14::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::14::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::14::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::14::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::14::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::14::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::15::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::15::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::15::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::15::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::15::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::15::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::15::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::15::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::16::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::16::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::16::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::16::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::16::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::16::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::16::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::16::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::17::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::17::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::17::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::17::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::17::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::17::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::17::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::17::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::18::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::18::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::18::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::18::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::18::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::18::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::18::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::18::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::19::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::19::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::19::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::19::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::19::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::19::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::19::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::19::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::1::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::1::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::1::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::1::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::1::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::1::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::1::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::1::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::20::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::20::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::20::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::20::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::20::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::20::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::20::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::20::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::21::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::21::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::21::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::21::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::21::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::21::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::21::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::21::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::22::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::22::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::22::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::22::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::22::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::22::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::22::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::22::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::23::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::23::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::23::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::23::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::23::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::23::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::23::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::23::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::24::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::24::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::24::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::24::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::24::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::24::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::24::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::24::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::25::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::25::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::25::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::25::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::25::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::25::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::25::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::25::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::26::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::26::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::26::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::26::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::26::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::26::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::26::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::26::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::27::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::27::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::27::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::27::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::27::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::27::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::27::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::27::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::28::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::28::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::28::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::28::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::28::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::28::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::28::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::28::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::29::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::29::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::29::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::29::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::29::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::29::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::29::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::29::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::2::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::2::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::2::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::2::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::2::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::2::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::2::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::2::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::30::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::30::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::30::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::30::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::30::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::30::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::30::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::30::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::31::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::31::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::31::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::31::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::31::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::31::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::31::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::31::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::32::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::32::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::32::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::32::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::32::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::32::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::32::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::32::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::33::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::33::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::33::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::33::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::33::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::33::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::33::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::33::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::34::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::34::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::34::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::34::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::34::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::34::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::34::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::34::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::35::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::35::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::35::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::35::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::35::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::35::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::35::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::35::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::36::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::36::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::36::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::36::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::36::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::36::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::36::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::36::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::37::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::37::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::37::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::37::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::37::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::37::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::37::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::37::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::38::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::38::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::38::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::38::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::38::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::38::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::38::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::38::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::3::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::3::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::3::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::3::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::3::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::3::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::3::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::3::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::40::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::40::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::40::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::40::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::40::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::40::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::40::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::40::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::41::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::41::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::41::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::41::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::41::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::41::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::41::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::41::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::42::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::42::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::42::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::42::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::42::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::42::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::42::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::42::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::43::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::43::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::43::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::43::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::43::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::43::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::43::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::43::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::44::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::44::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::44::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::44::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::44::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::44::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::44::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::44::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::45::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::45::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::45::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::45::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::45::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::45::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::45::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::45::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::46::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::46::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::46::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::46::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::46::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::46::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::46::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::46::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::47::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::47::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::47::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::47::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::47::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::47::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::47::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::47::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::48::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::48::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::48::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::48::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::48::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::48::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::48::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::48::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::49::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::49::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::49::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::49::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::49::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::49::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::49::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::49::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::4::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::4::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::4::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::4::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::4::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::4::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::4::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::4::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::50::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::50::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::50::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::50::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::50::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::50::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::50::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::50::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::51::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::51::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::51::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::51::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::51::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::51::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::51::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::51::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::52::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::52::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::52::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::52::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::52::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::52::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::52::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::52::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::53::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::53::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::53::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::53::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::53::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::53::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::53::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::53::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::54::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::54::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::54::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::54::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::54::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::54::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::54::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::54::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::55::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::55::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::55::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::55::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::55::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::55::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::55::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::55::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::56::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::56::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::56::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::56::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::56::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::56::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::56::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::56::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::57::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::57::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::57::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::57::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::57::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::57::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::57::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::57::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::58::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::58::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::58::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::58::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::58::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::58::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::58::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::58::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::59::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::59::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::59::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::59::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::59::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::59::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::59::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::59::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::5::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::5::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::5::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::5::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::5::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::5::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::5::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::5::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::60::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::60::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::60::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::60::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::60::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::60::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::60::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::60::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::61::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::61::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::61::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::61::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::61::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::61::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::61::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::61::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::62::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::62::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::62::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::62::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::62::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::62::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::62::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::62::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::63::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::63::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::63::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::63::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::63::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::63::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::63::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::63::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::64::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::64::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::64::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::64::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::64::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::64::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::64::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::64::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::65::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::65::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::65::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::65::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::65::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::65::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::65::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::65::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::66::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::66::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::66::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::66::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::66::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::66::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::66::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::66::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::67::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::67::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::67::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::67::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::67::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::67::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::67::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::67::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::68::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::68::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::68::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::68::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::68::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::68::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::68::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::68::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::69::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::69::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::69::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::69::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::69::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::69::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::69::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::69::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::6::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::6::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::6::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::6::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::6::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::6::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::6::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::6::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::70::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::70::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::70::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::70::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::70::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::70::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::70::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::70::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::71::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::71::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::71::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::71::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::71::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::71::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::71::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::71::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::72::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::72::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::72::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::72::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::72::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::72::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::72::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::72::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::73::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::73::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::73::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::73::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::73::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::73::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::73::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::73::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::74::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::74::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::74::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::74::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::74::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::74::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::74::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::74::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::75::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::75::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::75::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::75::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::75::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::75::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::75::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::75::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::76::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::76::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::76::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::76::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::76::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::76::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::76::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::76::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::77::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::77::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::77::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::77::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::77::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::77::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::77::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::77::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::78::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::78::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::78::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::78::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::78::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::78::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::78::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::78::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::79::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::79::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::79::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::79::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::79::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::79::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::79::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::79::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::80::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::80::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::80::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::80::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::80::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::80::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::80::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::80::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::81::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::81::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::81::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::81::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::81::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::81::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::81::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::81::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::82::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::82::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::82::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::82::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::82::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::82::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::82::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::82::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::83::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::83::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::83::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::83::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::83::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::83::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::83::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::83::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::84::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::84::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::84::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::84::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::84::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::84::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::84::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::84::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::85::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::85::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::85::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::85::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::85::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::85::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::85::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::85::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::86::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::86::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::86::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::86::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::86::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::86::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::86::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::86::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::87::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::87::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::87::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::87::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::87::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::87::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::87::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::87::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::88::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::88::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::88::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::88::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::88::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::88::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::88::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::88::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::89::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::89::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::89::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::89::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::89::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::89::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::89::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::89::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::8::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::8::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::8::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::8::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::8::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::8::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::8::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::8::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::90::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::90::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::90::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::90::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::90::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::90::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::90::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::90::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::91::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::91::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::91::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::91::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::91::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::91::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::91::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::91::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::92::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::92::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::92::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::92::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::92::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::92::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::92::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::92::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::93::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::93::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::93::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::93::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::93::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::93::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::93::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::93::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::94::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::94::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::94::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::94::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::94::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::94::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::94::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::94::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::95::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::95::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::95::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::95::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::95::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::95::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::95::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::95::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::96::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::96::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::96::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::96::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::96::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::96::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::96::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::96::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::97::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::97::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::97::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::97::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::97::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::97::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::97::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::97::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::98::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::98::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::98::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::98::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::98::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::98::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::98::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::98::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::99::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::99::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::99::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::99::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::99::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::99::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::99::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::99::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::9::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::9::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::9::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::9::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::9::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::9::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::9::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/template.dat::9::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::2::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::2::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::2::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::2::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::2::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::2::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::2::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::2::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::4::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::4::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::4::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::4::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::4::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::4::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::4::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::4::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::5::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::5::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::5::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::5::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::5::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::5::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::5::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::5::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::6::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::6::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::6::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::6::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::6::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::6::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::6::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests11.dat::6::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::14::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::14::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::14::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::14::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::14::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::14::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::14::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::14::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::17::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::17::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::17::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::17::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::17::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::17::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::17::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::17::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::7::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::7::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::7::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::7::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::7::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::7::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::7::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests19.dat::7::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::6::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::6::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::6::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::6::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::6::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::6::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::6::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::6::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::7::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::7::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::7::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::7::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::7::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::7::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::7::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests2.dat::7::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests25.dat::7::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests25.dat::7::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests25.dat::7::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests25.dat::7::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests25.dat::7::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests25.dat::7::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/tests25.dat::7::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/tests25.dat::7::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::14::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::14::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::14::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::14::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::14::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::14::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::14::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::14::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::15::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::15::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::15::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::15::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::15::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::15::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::15::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::15::lxml::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::16::DOM::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::16::DOM::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::16::ElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::16::ElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::16::cElementTree::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::16::cElementTree::parser::void-namespace': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::16::lxml::parser::namespaced': FAIL
+u'html5lib/tests/testdata/tree-construction/webkit02.dat::16::lxml::parser::void-namespace': FAIL
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/.travis.yml b/testing/web-platform/tests/tools/third_party/html5lib/.travis.yml
new file mode 100644
index 0000000000..5727e0947e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/.travis.yml
@@ -0,0 +1,32 @@
+language: python
+python:
+ - "pypy"
+ - "3.6"
+ - "3.5"
+ - "3.4"
+ - "3.3"
+ - "2.7"
+
+sudo: false
+
+cache: pip
+
+env:
+ global:
+ - PYTEST_COMMAND="coverage run -m pytest"
+ matrix:
+ - TOXENV=optional
+ - TOXENV=base
+ - TOXENV=six19-optional
+
+install:
+ - pip install tox codecov
+
+script:
+ - tox
+
+after_script:
+ - python debug-info.py
+
+after_success:
+ - codecov
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/AUTHORS.rst b/testing/web-platform/tests/tools/third_party/html5lib/AUTHORS.rst
new file mode 100644
index 0000000000..904013908d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/AUTHORS.rst
@@ -0,0 +1,66 @@
+Credits
+=======
+
+``html5lib`` is written and maintained by:
+
+- James Graham
+- Sam Sneddon
+- Åukasz Langa
+- Will Kahn-Greene
+
+
+Patches and suggestions
+-----------------------
+(In chronological order, by first commit:)
+
+- Anne van Kesteren
+- Lachlan Hunt
+- lantis63
+- Sam Ruby
+- Thomas Broyer
+- Tim Fletcher
+- Mark Pilgrim
+- Ryan King
+- Philip Taylor
+- Edward Z. Yang
+- fantasai
+- Philip Jägenstedt
+- Ms2ger
+- Mohammad Taha Jahangir
+- Andy Wingo
+- Andreas Madsack
+- Karim Valiev
+- Juan Carlos Garcia Segovia
+- Mike West
+- Marc DM
+- Simon Sapin
+- Michael[tm] Smith
+- Ritwik Gupta
+- Marc Abramowitz
+- Tony Lopes
+- lilbludevil
+- Kevin
+- Drew Hubl
+- Austin Kumbera
+- Jim Baker
+- Jon Dufresne
+- Donald Stufft
+- Alex Gaynor
+- Nik Nyby
+- Jakub Wilk
+- Sigmund Cherem
+- Gabi Davar
+- Florian Mounier
+- neumond
+- Vitalik Verhovodov
+- Kovid Goyal
+- Adam Chainz
+- John Vandenberg
+- Eric Amorde
+- Benedikt Morbach
+- Jonathan Vanasco
+- Tom Most
+- Ville Skyttä
+- Hugo van Kemenade
+- Mark Vasilkov
+
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/CHANGES.rst b/testing/web-platform/tests/tools/third_party/html5lib/CHANGES.rst
new file mode 100644
index 0000000000..fcb22475cd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/CHANGES.rst
@@ -0,0 +1,359 @@
+Change Log
+----------
+
+1.1
+~~~
+
+UNRELEASED
+
+Breaking changes:
+
+* Drop support for Python 3.3. (#358)
+* Drop support for Python 3.4. (#421)
+
+Deprecations:
+
+* Deprecate the ``html5lib`` sanitizer (``html5lib.serialize(sanitize=True)`` and
+ ``html5lib.filters.sanitizer``). We recommend users migrate to `Bleach
+ <https://github.com/mozilla/bleach>`. Please let us know if Bleach doesn't suffice for your
+ use. (#443)
+
+Other changes:
+
+* Try to import from ``collections.abc`` to remove DeprecationWarning and ensure
+ ``html5lib`` keeps working in future Python versions. (#403)
+* Drop optional ``datrie`` dependency. (#442)
+
+
+1.0.1
+~~~~~
+
+Released on December 7, 2017
+
+Breaking changes:
+
+* Drop support for Python 2.6. (#330) (Thank you, Hugo, Will Kahn-Greene!)
+* Remove ``utils/spider.py`` (#353) (Thank you, Jon Dufresne!)
+
+Features:
+
+* Improve documentation. (#300, #307) (Thank you, Jon Dufresne, Tom Most,
+ Will Kahn-Greene!)
+* Add iframe seamless boolean attribute. (Thank you, Ritwik Gupta!)
+* Add itemscope as a boolean attribute. (#194) (Thank you, Jonathan Vanasco!)
+* Support Python 3.6. (#333) (Thank you, Jon Dufresne!)
+* Add CI support for Windows using AppVeyor. (Thank you, John Vandenberg!)
+* Improve testing and CI and add code coverage (#323, #334), (Thank you, Jon
+ Dufresne, John Vandenberg, Sam Sneddon, Will Kahn-Greene!)
+* Semver-compliant version number.
+
+Bug fixes:
+
+* Add support for setuptools < 18.5 to support environment markers. (Thank you,
+ John Vandenberg!)
+* Add explicit dependency for six >= 1.9. (Thank you, Eric Amorde!)
+* Fix regexes to work with Python 3.7 regex adjustments. (#318, #379) (Thank
+ you, Benedikt Morbach, Ville Skyttä, Mark Vasilkov!)
+* Fix alphabeticalattributes filter namespace bug. (#324) (Thank you, Will
+ Kahn-Greene!)
+* Include license file in generated wheel package. (#350) (Thank you, Jon
+ Dufresne!)
+* Fix annotation-xml typo. (#339) (Thank you, Will Kahn-Greene!)
+* Allow uppercase hex chararcters in CSS colour check. (#377) (Thank you,
+ Komal Dembla, Hugo!)
+
+
+1.0
+~~~
+
+Released and unreleased on December 7, 2017. Badly packaged release.
+
+
+0.999999999/1.0b10
+~~~~~~~~~~~~~~~~~~
+
+Released on July 15, 2016
+
+* Fix attribute order going to the tree builder to be document order
+ instead of reverse document order(!).
+
+
+0.99999999/1.0b9
+~~~~~~~~~~~~~~~~
+
+Released on July 14, 2016
+
+* **Added ordereddict as a mandatory dependency on Python 2.6.**
+
+* Added ``lxml``, ``genshi``, ``datrie``, ``charade``, and ``all``
+ extras that will do the right thing based on the specific
+ interpreter implementation.
+
+* Now requires the ``mock`` package for the testsuite.
+
+* Cease supporting DATrie under PyPy.
+
+* **Remove PullDOM support, as this hasn't ever been properly
+ tested, doesn't entirely work, and as far as I can tell is
+ completely unused by anyone.**
+
+* Move testsuite to ``py.test``.
+
+* **Fix #124: move to webencodings for decoding the input byte stream;
+ this makes html5lib compliant with the Encoding Standard, and
+ introduces a required dependency on webencodings.**
+
+* **Cease supporting Python 3.2 (in both CPython and PyPy forms).**
+
+* **Fix comments containing double-dash with lxml 3.5 and above.**
+
+* **Use scripting disabled by default (as we don't implement
+ scripting).**
+
+* **Fix #11, avoiding the XSS bug potentially caused by serializer
+ allowing attribute values to be escaped out of in old browser versions,
+ changing the quote_attr_values option on serializer to take one of
+ three values, "always" (the old True value), "legacy" (the new option,
+ and the new default), and "spec" (the old False value, and the old
+ default).**
+
+* **Fix #72 by rewriting the sanitizer to apply only to treewalkers
+ (instead of the tokenizer); as such, this will require amending all
+ callers of it to use it via the treewalker API.**
+
+* **Drop support of charade, now that chardet is supported once more.**
+
+* **Replace the charset keyword argument on parse and related methods
+ with a set of keyword arguments: override_encoding, transport_encoding,
+ same_origin_parent_encoding, likely_encoding, and default_encoding.**
+
+* **Move filters._base, treebuilder._base, and treewalkers._base to .base
+ to clarify their status as public.**
+
+* **Get rid of the sanitizer package. Merge sanitizer.sanitize into the
+ sanitizer.htmlsanitizer module and move that to sanitizer. This means
+ anyone who used sanitizer.sanitize or sanitizer.HTMLSanitizer needs no
+ code changes.**
+
+* **Rename treewalkers.lxmletree to .etree_lxml and
+ treewalkers.genshistream to .genshi to have a consistent API.**
+
+* Move a whole load of stuff (inputstream, ihatexml, trie, tokenizer,
+ utils) to be underscore prefixed to clarify their status as private.
+
+
+0.9999999/1.0b8
+~~~~~~~~~~~~~~~
+
+Released on September 10, 2015
+
+* Fix #195: fix the sanitizer to drop broken URLs (it threw an
+ exception between 0.9999 and 0.999999).
+
+
+0.999999/1.0b7
+~~~~~~~~~~~~~~
+
+Released on July 7, 2015
+
+* Fix #189: fix the sanitizer to allow relative URLs again (as it did
+ prior to 0.9999/1.0b5).
+
+
+0.99999/1.0b6
+~~~~~~~~~~~~~
+
+Released on April 30, 2015
+
+* Fix #188: fix the sanitizer to not throw an exception when sanitizing
+ bogus data URLs.
+
+
+0.9999/1.0b5
+~~~~~~~~~~~~
+
+Released on April 29, 2015
+
+* Fix #153: Sanitizer fails to treat some attributes as URLs. Despite how
+ this sounds, this has no known security implications. No known version
+ of IE (5.5 to current), Firefox (3 to current), Safari (6 to current),
+ Chrome (1 to current), or Opera (12 to current) will run any script
+ provided in these attributes.
+
+* Pass error message to the ParseError exception in strict parsing mode.
+
+* Allow data URIs in the sanitizer, with a whitelist of content-types.
+
+* Add support for Python implementations that don't support lone
+ surrogates (read: Jython). Fixes #2.
+
+* Remove localization of error messages. This functionality was totally
+ unused (and untested that everything was localizable), so we may as
+ well follow numerous browsers in not supporting translating technical
+ strings.
+
+* Expose treewalkers.pprint as a public API.
+
+* Add a documentEncoding property to HTML5Parser, fix #121.
+
+
+0.999
+~~~~~
+
+Released on December 23, 2013
+
+* Fix #127: add work-around for CPython issue #20007: .read(0) on
+ http.client.HTTPResponse drops the rest of the content.
+
+* Fix #115: lxml treewalker can now deal with fragments containing, at
+ their root level, text nodes with non-ASCII characters on Python 2.
+
+
+0.99
+~~~~
+
+Released on September 10, 2013
+
+* No library changes from 1.0b3; released as 0.99 as pip has changed
+ behaviour from 1.4 to avoid installing pre-release versions per
+ PEP 440.
+
+
+1.0b3
+~~~~~
+
+Released on July 24, 2013
+
+* Removed ``RecursiveTreeWalker`` from ``treewalkers._base``. Any
+ implementation using it should be moved to
+ ``NonRecursiveTreeWalker``, as everything bundled with html5lib has
+ for years.
+
+* Fix #67 so that ``BufferedStream`` to correctly returns a bytes
+ object, thereby fixing any case where html5lib is passed a
+ non-seekable RawIOBase-like object.
+
+
+1.0b2
+~~~~~
+
+Released on June 27, 2013
+
+* Removed reordering of attributes within the serializer. There is now
+ an ``alphabetical_attributes`` option which preserves the previous
+ behaviour through a new filter. This allows attribute order to be
+ preserved through html5lib if the tree builder preserves order.
+
+* Removed ``dom2sax`` from DOM treebuilders. It has been replaced by
+ ``treeadapters.sax.to_sax`` which is generic and supports any
+ treewalker; it also resolves all known bugs with ``dom2sax``.
+
+* Fix treewalker assertions on hitting bytes strings on
+ Python 2. Previous to 1.0b1, treewalkers coped with mixed
+ bytes/unicode data on Python 2; this reintroduces this prior
+ behaviour on Python 2. Behaviour is unchanged on Python 3.
+
+
+1.0b1
+~~~~~
+
+Released on May 17, 2013
+
+* Implementation updated to implement the `HTML specification
+ <http://www.whatwg.org/specs/web-apps/current-work/>`_ as of 5th May
+ 2013 (`SVN <http://svn.whatwg.org/webapps/>`_ revision r7867).
+
+* Python 3.2+ supported in a single codebase using the ``six`` library.
+
+* Removed support for Python 2.5 and older.
+
+* Removed the deprecated Beautiful Soup 3 treebuilder.
+ ``beautifulsoup4`` can use ``html5lib`` as a parser instead. Note that
+ since it doesn't support namespaces, foreign content like SVG and
+ MathML is parsed incorrectly.
+
+* Removed ``simpletree`` from the package. The default tree builder is
+ now ``etree`` (using the ``xml.etree.cElementTree`` implementation if
+ available, and ``xml.etree.ElementTree`` otherwise).
+
+* Removed the ``XHTMLSerializer`` as it never actually guaranteed its
+ output was well-formed XML, and hence provided little of use.
+
+* Removed default DOM treebuilder, so ``html5lib.treebuilders.dom`` is no
+ longer supported. ``html5lib.treebuilders.getTreeBuilder("dom")`` will
+ return the default DOM treebuilder, which uses ``xml.dom.minidom``.
+
+* Optional heuristic character encoding detection now based on
+ ``charade`` for Python 2.6 - 3.3 compatibility.
+
+* Optional ``Genshi`` treewalker support fixed.
+
+* Many bugfixes, including:
+
+ * #33: null in attribute value breaks XML AttValue;
+
+ * #4: nested, indirect descendant, <button> causes infinite loop;
+
+ * `Google Code 215
+ <http://code.google.com/p/html5lib/issues/detail?id=215>`_: Properly
+ detect seekable streams;
+
+ * `Google Code 206
+ <http://code.google.com/p/html5lib/issues/detail?id=206>`_: add
+ support for <video preload=...>, <audio preload=...>;
+
+ * `Google Code 205
+ <http://code.google.com/p/html5lib/issues/detail?id=205>`_: add
+ support for <video poster=...>;
+
+ * `Google Code 202
+ <http://code.google.com/p/html5lib/issues/detail?id=202>`_: Unicode
+ file breaks InputStream.
+
+* Source code is now mostly PEP 8 compliant.
+
+* Test harness has been improved and now depends on ``nose``.
+
+* Documentation updated and moved to https://html5lib.readthedocs.io/.
+
+
+0.95
+~~~~
+
+Released on February 11, 2012
+
+
+0.90
+~~~~
+
+Released on January 17, 2010
+
+
+0.11.1
+~~~~~~
+
+Released on June 12, 2008
+
+
+0.11
+~~~~
+
+Released on June 10, 2008
+
+
+0.10
+~~~~
+
+Released on October 7, 2007
+
+
+0.9
+~~~
+
+Released on March 11, 2007
+
+
+0.2
+~~~
+
+Released on January 8, 2007
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/CONTRIBUTING.rst b/testing/web-platform/tests/tools/third_party/html5lib/CONTRIBUTING.rst
new file mode 100644
index 0000000000..8c5e198535
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/CONTRIBUTING.rst
@@ -0,0 +1,60 @@
+Contributing
+============
+
+Pull requests are more than welcome — both to the library and to the
+documentation. Some useful information:
+
+- We aim to follow PEP 8 in the library, but ignoring the
+ 79-character-per-line limit, instead following a soft limit of 99,
+ but allowing lines over this where it is the readable thing to do.
+
+- We aim to follow PEP 257 for all docstrings, and make them properly
+ parseable by Sphinx while generating API documentation.
+
+- We keep ``pyflakes`` reporting no errors or warnings at all times.
+
+- We keep the master branch passing all tests at all times on all
+ supported versions.
+
+`Travis CI <https://travis-ci.org/html5lib/html5lib-python/>`_ is run
+against all pull requests and should enforce all of the above.
+
+We use `Opera Critic <https://critic.hoppipolla.co.uk/>`_ as an external
+code-review tool, which uses your GitHub login to authenticate. You'll
+get email notifications for issues raised in the review.
+
+
+Patch submission guidelines
+---------------------------
+
+- **Create a new Git branch specific to your change.** Do not put
+ multiple fixes/features in the same pull request. If you find an
+ unrelated bug, create a distinct branch and submit a separate pull
+ request for the bugfix. This makes life much easier for maintainers
+ and will speed up merging your patches.
+
+- **Write a test** whenever possible. Following existing tests is often
+ easiest, and a good way to tell whether the feature you're modifying
+ is easily testable.
+
+- **Make sure documentation is updated.** Keep docstrings current, and
+ if necessary, update the Sphinx documentation in ``doc/``.
+
+- **Add a changelog entry** at the top of ``CHANGES.rst`` following
+ existing entries' styles.
+
+- **Run tests with tox** if possible, to make sure your changes are
+ compatible with all supported Python versions.
+
+- **Squash commits** before submitting the pull request so that a single
+ commit contains the entire change, and only that change (see the first
+ bullet).
+
+- **Don't rebase after creating the pull request.** Merge with upstream,
+ if necessary, and use ``git commit --fixup`` for fixing issues raised
+ in a Critic review or by a failing Travis build. The reviewer will
+ squash and rebase your pull request while accepting it. Even though
+ GitHub won't recognize the pull request as accepted, the squashed
+ commits will properly specify you as the author.
+
+- **Attribute yourself** in ``AUTHORS.rst``.
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/LICENSE b/testing/web-platform/tests/tools/third_party/html5lib/LICENSE
new file mode 100644
index 0000000000..c87fa7a000
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) 2006-2013 James Graham and other contributors
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/MANIFEST.in b/testing/web-platform/tests/tools/third_party/html5lib/MANIFEST.in
new file mode 100644
index 0000000000..4b3ffe3ed9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/MANIFEST.in
@@ -0,0 +1,10 @@
+include LICENSE
+include AUTHORS.rst
+include CHANGES.rst
+include README.rst
+include requirements*.txt
+include .pytest.expect
+include tox.ini
+include pytest.ini
+graft html5lib/tests/testdata
+recursive-include html5lib/tests *.py
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/README.rst b/testing/web-platform/tests/tools/third_party/html5lib/README.rst
new file mode 100644
index 0000000000..d367905da0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/README.rst
@@ -0,0 +1,151 @@
+html5lib
+========
+
+.. image:: https://travis-ci.org/html5lib/html5lib-python.svg?branch=master
+ :target: https://travis-ci.org/html5lib/html5lib-python
+
+
+html5lib is a pure-python library for parsing HTML. It is designed to
+conform to the WHATWG HTML specification, as is implemented by all major
+web browsers.
+
+
+Usage
+-----
+
+Simple usage follows this pattern:
+
+.. code-block:: python
+
+ import html5lib
+ with open("mydocument.html", "rb") as f:
+ document = html5lib.parse(f)
+
+or:
+
+.. code-block:: python
+
+ import html5lib
+ document = html5lib.parse("<p>Hello World!")
+
+By default, the ``document`` will be an ``xml.etree`` element instance.
+Whenever possible, html5lib chooses the accelerated ``ElementTree``
+implementation (i.e. ``xml.etree.cElementTree`` on Python 2.x).
+
+Two other tree types are supported: ``xml.dom.minidom`` and
+``lxml.etree``. To use an alternative format, specify the name of
+a treebuilder:
+
+.. code-block:: python
+
+ import html5lib
+ with open("mydocument.html", "rb") as f:
+ lxml_etree_document = html5lib.parse(f, treebuilder="lxml")
+
+When using with ``urllib2`` (Python 2), the charset from HTTP should be
+pass into html5lib as follows:
+
+.. code-block:: python
+
+ from contextlib import closing
+ from urllib2 import urlopen
+ import html5lib
+
+ with closing(urlopen("http://example.com/")) as f:
+ document = html5lib.parse(f, transport_encoding=f.info().getparam("charset"))
+
+When using with ``urllib.request`` (Python 3), the charset from HTTP
+should be pass into html5lib as follows:
+
+.. code-block:: python
+
+ from urllib.request import urlopen
+ import html5lib
+
+ with urlopen("http://example.com/") as f:
+ document = html5lib.parse(f, transport_encoding=f.info().get_content_charset())
+
+To have more control over the parser, create a parser object explicitly.
+For instance, to make the parser raise exceptions on parse errors, use:
+
+.. code-block:: python
+
+ import html5lib
+ with open("mydocument.html", "rb") as f:
+ parser = html5lib.HTMLParser(strict=True)
+ document = parser.parse(f)
+
+When you're instantiating parser objects explicitly, pass a treebuilder
+class as the ``tree`` keyword argument to use an alternative document
+format:
+
+.. code-block:: python
+
+ import html5lib
+ parser = html5lib.HTMLParser(tree=html5lib.getTreeBuilder("dom"))
+ minidom_document = parser.parse("<p>Hello World!")
+
+More documentation is available at https://html5lib.readthedocs.io/.
+
+
+Installation
+------------
+
+html5lib works on CPython 2.7+, CPython 3.5+ and PyPy. To install:
+
+.. code-block:: bash
+
+ $ pip install html5lib
+
+The goal is to support a (non-strict) superset of the versions that `pip
+supports
+<https://pip.pypa.io/en/stable/installing/#python-and-os-compatibility>`_.
+
+Optional Dependencies
+---------------------
+
+The following third-party libraries may be used for additional
+functionality:
+
+- ``lxml`` is supported as a tree format (for both building and
+ walking) under CPython (but *not* PyPy where it is known to cause
+ segfaults);
+
+- ``genshi`` has a treewalker (but not builder); and
+
+- ``chardet`` can be used as a fallback when character encoding cannot
+ be determined.
+
+
+Bugs
+----
+
+Please report any bugs on the `issue tracker
+<https://github.com/html5lib/html5lib-python/issues>`_.
+
+
+Tests
+-----
+
+Unit tests require the ``pytest`` and ``mock`` libraries and can be
+run using the ``py.test`` command in the root directory.
+
+Test data are contained in a separate `html5lib-tests
+<https://github.com/html5lib/html5lib-tests>`_ repository and included
+as a submodule, thus for git checkouts they must be initialized::
+
+ $ git submodule init
+ $ git submodule update
+
+If you have all compatible Python implementations available on your
+system, you can run tests on all of them using the ``tox`` utility,
+which can be found on PyPI.
+
+
+Questions?
+----------
+
+There's a mailing list available for support on Google Groups,
+`html5lib-discuss <http://groups.google.com/group/html5lib-discuss>`_,
+though you may get a quicker response asking on IRC in `#whatwg on
+irc.freenode.net <http://wiki.whatwg.org/wiki/IRC>`_.
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/bench_html.py b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/bench_html.py
new file mode 100644
index 0000000000..cfe53c6733
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/bench_html.py
@@ -0,0 +1,57 @@
+import io
+import os
+import sys
+
+import pyperf
+
+sys.path[0:0] = [os.path.join(os.path.dirname(__file__), "..")]
+import html5lib # noqa: E402
+
+
+def bench_parse(fh, treebuilder):
+ fh.seek(0)
+ html5lib.parse(fh, treebuilder=treebuilder, useChardet=False)
+
+
+def bench_serialize(loops, fh, treebuilder):
+ fh.seek(0)
+ doc = html5lib.parse(fh, treebuilder=treebuilder, useChardet=False)
+
+ range_it = range(loops)
+ t0 = pyperf.perf_counter()
+
+ for loops in range_it:
+ html5lib.serialize(doc, tree=treebuilder, encoding="ascii", inject_meta_charset=False)
+
+ return pyperf.perf_counter() - t0
+
+
+BENCHMARKS = ["parse", "serialize"]
+
+
+def add_cmdline_args(cmd, args):
+ if args.benchmark:
+ cmd.append(args.benchmark)
+
+
+if __name__ == "__main__":
+ runner = pyperf.Runner(add_cmdline_args=add_cmdline_args)
+ runner.metadata["description"] = "Run benchmarks based on Anolis"
+ runner.argparser.add_argument("benchmark", nargs="?", choices=BENCHMARKS)
+
+ args = runner.parse_args()
+ if args.benchmark:
+ benchmarks = (args.benchmark,)
+ else:
+ benchmarks = BENCHMARKS
+
+ with open(os.path.join(os.path.dirname(__file__), "data", "html.html"), "rb") as fh:
+ source = io.BytesIO(fh.read())
+
+ if "parse" in benchmarks:
+ for tb in ("etree", "dom", "lxml"):
+ runner.bench_func("html_parse_%s" % tb, bench_parse, source, tb)
+
+ if "serialize" in benchmarks:
+ for tb in ("etree", "dom", "lxml"):
+ runner.bench_time_func("html_serialize_%s" % tb, bench_serialize, source, tb)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/bench_wpt.py b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/bench_wpt.py
new file mode 100644
index 0000000000..d5da006984
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/bench_wpt.py
@@ -0,0 +1,45 @@
+import io
+import os
+import sys
+
+import pyperf
+
+sys.path[0:0] = [os.path.join(os.path.dirname(__file__), "..")]
+import html5lib # noqa: E402
+
+
+def bench_html5lib(fh):
+ fh.seek(0)
+ html5lib.parse(fh, treebuilder="etree", useChardet=False)
+
+
+def add_cmdline_args(cmd, args):
+ if args.benchmark:
+ cmd.append(args.benchmark)
+
+
+BENCHMARKS = {}
+for root, dirs, files in os.walk(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "wpt")):
+ for f in files:
+ if f.endswith(".html"):
+ BENCHMARKS[f[: -len(".html")]] = os.path.join(root, f)
+
+
+if __name__ == "__main__":
+ runner = pyperf.Runner(add_cmdline_args=add_cmdline_args)
+ runner.metadata["description"] = "Run parser benchmarks from WPT"
+ runner.argparser.add_argument("benchmark", nargs="?", choices=sorted(BENCHMARKS))
+
+ args = runner.parse_args()
+ if args.benchmark:
+ benchmarks = (args.benchmark,)
+ else:
+ benchmarks = sorted(BENCHMARKS)
+
+ for bench in benchmarks:
+ name = "wpt_%s" % bench
+ path = BENCHMARKS[bench]
+ with open(path, "rb") as fh:
+ fh2 = io.BytesIO(fh.read())
+
+ runner.bench_func(name, bench_html5lib, fh2)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/README.md b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/README.md
new file mode 100644
index 0000000000..5b896cbb7c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/README.md
@@ -0,0 +1,8 @@
+The files in this data are derived from:
+
+ * `html.html`: from [html](http://github.com/whatwg/html), revision
+ 77db356a293f2b152b648c836b6989d17afe42bb. This is the first 5000 lines of `source`. (This is
+ representative of the input to [Anolis](https://bitbucket.org/ms2ger/anolis/); first 5000 lines
+ chosen to make it parse in a reasonable time.)
+
+ * `wpt`: see `wpt/README.md`.
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/html.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/html.html
new file mode 100644
index 0000000000..d2bb1be745
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/html.html
@@ -0,0 +1,5000 @@
+<!-- EDITOR NOTES -*- mode: Text; fill-column: 100 -*-
+ !
+ ! Adding a new element involves editing the following sections:
+ ! - section for the element itself
+ ! - descriptions of the element's categories
+ ! - images/content-venn.svg
+ ! - syntax, if it's void or otherwise special
+ ! - parser, if it's not phrasing-level
+ ! - rendering
+ ! - obsolete section
+ ! - element, attribute, content model, and interface indexes
+ ! - adding it to the section with ARIA mappings
+ !
+ !-->
+
+<!--
+ ! http://lists.w3.org/Archives/Public/www-archive/2014Apr/0034.html
+ !-->
+
+<!--START complete--><!--START dev-html-->
+<!DOCTYPE html>
+<!--SET FINGERPRINT=<span title="fingerprinting vector" class="fingerprint"><img src="images/fingerprint.png" alt="(This is a fingerprinting vector.)" width=46 height=64></span>-->
+<html lang="en-GB-x-hixie" class="big">
+ <head>
+ <title>HTML Standard</title>
+ <script>
+ var loadTimer = new Date();
+ var current_revision = "r" + "$Revision: 1 $".substr(11);
+ current_revision = current_revision.substr(0, current_revision.length - 2);
+ var last_known_revision = current_revision;
+ function F( /* varargs... */) {
+ var fragment = document.createDocumentFragment();
+ for (var index = 0; index < arguments.length; index += 1) {
+ if (arguments[index] instanceof Array) {
+ fragment.appendChild(F.apply(this, arguments[index]));
+ } else if (typeof arguments[index] == 'string') {
+ fragment.appendChild(document.createTextNode(arguments[index]));
+ } else {
+ fragment.appendChild(arguments[index]);
+ }
+ }
+ return fragment;
+ }
+ function E(name, /* optional */ attributes /*, varargs... */) {
+ var element = document.createElement(name);
+ var index = 1;
+ if ((arguments.length > 1) && (typeof attributes != 'string') &&
+ (!(attributes instanceof Node)) && (!(attributes instanceof Array))) {
+ for (var attName in attributes) {
+ if (typeof attributes[attName] == 'boolean') {
+ if (attributes[attName])
+ element.setAttribute(attName, '');
+ } else if (typeof attributes[attName] == 'function') {
+ element[attName] = attributes[attName];
+ } else {
+ element.setAttribute(attName, attributes[attName]);
+ }
+ }
+ index = 2;
+ }
+ for (; index < arguments.length; index += 1) {
+ if (arguments[index] instanceof Array) {
+ element.appendChild(F.apply(this, arguments[index]));
+ } else if (typeof arguments[index] == 'string') {
+ element.appendChild(document.createTextNode(arguments[index]));
+ } else {
+ element.appendChild(arguments[index]);
+ }
+ }
+ return element;
+ }
+ function getCookie(name) {
+ var params = location.search.substr(1).split("&");
+ for (var index = 0; index < params.length; index++) {
+ if (params[index] == name)
+ return "1";
+ var data = params[index].split("=");
+ if (data[0] == name)
+ return unescape(data[1]);
+ }
+ var cookies = document.cookie.split("; ");
+ for (var index = 0; index < cookies.length; index++) {
+ var data = cookies[index].split("=");
+ if (data[0] == name)
+ return unescape(data[1]);
+ }
+ return null;
+ }
+ var currentAlert;
+ var currentAlertTimeout;
+ function showAlert(s, href) {
+ if (!currentAlert) {
+ currentAlert = document.createElement('div');
+ currentAlert.id = 'alert';
+ var x = document.createElement('button');
+ x.textContent = '\u2573';
+ x.onclick = closeAlert2;
+ currentAlert.appendChild(x);
+ currentAlert.appendChild(document.createElement('span'));
+ currentAlert.onmousemove = function () {
+ clearTimeout(currentAlertTimeout);
+ currentAlert.className = '';
+ currentAlertTimeout = setTimeout(closeAlert, 10000);
+ }
+ document.body.appendChild(currentAlert);
+ } else {
+ clearTimeout(currentAlertTimeout);
+ currentAlert.className = '';
+ }
+ currentAlert.lastChild.textContent = '';
+ currentAlert.lastChild.appendChild(F(s));
+ if (href) {
+ var link = document.createElement('a');
+ link.href = href;
+ link.textContent = href;
+ currentAlert.lastChild.appendChild(F(' ', link));
+ }
+ currentAlertTimeout = setTimeout(closeAlert, 10000);
+ }
+ function closeAlert() {
+ clearTimeout(currentAlertTimeout);
+ if (currentAlert) {
+ currentAlert.className = 'closed';
+ currentAlertTimeout = setTimeout(closeAlert2, 3000);
+ }
+ }
+ function closeAlert2() {
+ clearTimeout(currentAlertTimeout);
+ if (currentAlert) {
+ currentAlert.parentNode.removeChild(currentAlert);
+ currentAlert = null;
+ }
+ }
+ window.addEventListener('keydown', function (event) {
+ if (event.keyCode == 27) {
+ if (currentAlert)
+ closeAlert2();
+ } else {
+ closeAlert();
+ }
+ }, false);
+ window.addEventListener('scroll', function (event) {
+ closeAlert();
+ }, false);
+ function load(script) {
+ var e = document.createElement('script');
+ e.setAttribute('src', '//www.whatwg.org/specs/web-apps/current-work/' + script);
+ document.body.appendChild(e);
+ }
+
+ var startedInit = 0;
+ function init() {
+ startedInit = 1;
+ if (location.search == '?slow-browser')
+ return;
+ load('reviewer.js');
+ if (document.documentElement.className == "big" || document.documentElement.className == "big split index")
+ load('toc.js');
+ load('updater.js');
+ load('dfn.js');
+ load('status.js');
+ if (getCookie('profile') == '1')
+ document.getElementsByTagName('h2')[0].textContent += '; load: ' + (new Date() - loadTimer) + 'ms';
+ }
+ if (document.documentElement.className == "")
+ setTimeout(function () {
+ if (!startedInit)
+ showAlert("Too slow? Try reading the multipage copy of the spec instead:", "http://whatwg.org/html");
+ }, 6000);
+
+ window.addEventListener('keypress', function (event) {
+ if ((event.which == 114) && (event.metaKey)) {
+ if (!confirm('Are you sure you want to reload this page?'))
+ event.preventDefault();
+ }
+ }, false);
+
+ </script>
+ <link rel="stylesheet" href="//www.whatwg.org/style/specification">
+ <link rel="icon" href="//www.whatwg.org/images/icon">
+ <style>
+ .proposal { border: blue solid; padding: 1em; }
+ .bad, .bad *:not(.XXX) { color: gray; border-color: gray; background: transparent; }
+ #updatesStatus { display: none; z-index: 10; }
+ #updatesStatus.relevant { display: block; position: fixed; right: 1em; top: 1em; padding: 0.5em; font: bold small sans-serif; min-width: 25em; width: 30%; max-width: 40em; height: auto; border: ridge 4px gray; background: #EEEEEE; color: black; }
+ div.head .logo { width: 11em; margin-bottom: 20em; }
+
+ #configUI { position: absolute; z-index: 20; top: auto; right: 0; width: 11em; padding: 0 0.5em 0 0.5em; font-size: small; background: gray; background: rgba(32,32,32,0.9); color: white; border-radius: 1em 0 0 1em; -moz-border-radius: 1em 0 0 1em; }
+ #configUI p { margin: 0.75em 0; padding: 0.3em; }
+ #configUI p label { display: block; }
+ #configUI #updateUI, #configUI .loginUI { text-align: center; }
+ #configUI input[type=button] { display: block; margin: auto; }
+ #configUI :link, #configUI :visited { color: white; }
+ #configUI :link:hover, #configUI :visited:hover { background: transparent; }
+
+ #alert { position: fixed; top: 20%; left: 20%; right: 20%; font-size: 2em; padding: 0.5em; z-index: 40; background: gray; background: rgba(32,32,32,0.9); color: white; border-radius: 1em; -moz-border-radius: 1em; -webkit-transition: opacity 1s linear; }
+ #alert.closed { opacity: 0; }
+ #alert button { position: absolute; top: -1em; right: 2em; border-radius: 1em 1em 0 0; border: none; line-height: 0.9; color: white; background: rgb(64,64,64); font-size: 0.6em; font-weight: 900; cursor: pointer; }
+ #alert :link, #alert :visited { color: white; }
+ #alert :link:hover, #alert :visited:hover { background: transparent; }
+ @media print { #configUI { display: none; } }
+
+ .rfc2119 { font-variant: small-caps; text-shadow: 0 0 0.5em yellow; position: static; }
+ .rfc2119::after { position: absolute; left: 0; width: 25px; text-align: center; color: yellow; text-shadow: 0.075em 0.075em 0.2em black; }
+ .rfc2119.m\ust::after { content: '\2605'; }
+ .rfc2119.s\hould::after { content: '\2606'; }
+ [hidden] { display: none; }
+
+ .fingerprint { float: right; }
+
+ .applies thead th > * { display: block; }
+ .applies thead code { display: block; }
+ .applies td { text-align: center; }
+ .applies .yes { background: yellow; }
+
+ .matrix, .matrix td { border: hidden; text-align: right; }
+ .matrix { margin-left: 2em; }
+
+ .vertical-summary-table tr > th[rowspan="2"]:first-child + th,
+ .vertical-summary-table tr > td[rowspan="2"]:first-child + td { border-bottom: hidden; }
+
+ .dice-example { border-collapse: collapse; border-style: hidden solid solid hidden; border-width: thin; margin-left: 3em; }
+ .dice-example caption { width: 30em; font-size: smaller; font-style: italic; padding: 0.75em 0; text-align: left; }
+ .dice-example td, .dice-example th { border: solid thin; width: 1.35em; height: 1.05em; text-align: center; padding: 0; }
+
+ td.eg { border-width: thin; text-align: center; }
+
+ #table-example-1 { border: solid thin; border-collapse: collapse; margin-left: 3em; }
+ #table-example-1 * { font-family: "Essays1743", serif; line-height: 1.01em; }
+ #table-example-1 caption { padding-bottom: 0.5em; }
+ #table-example-1 thead, #table-example-1 tbody { border: none; }
+ #table-example-1 th, #table-example-1 td { border: solid thin; }
+ #table-example-1 th { font-weight: normal; }
+ #table-example-1 td { border-style: none solid; vertical-align: top; }
+ #table-example-1 th { padding: 0.5em; vertical-align: middle; text-align: center; }
+ #table-example-1 tbody tr:first-child td { padding-top: 0.5em; }
+ #table-example-1 tbody tr:last-child td { padding-bottom: 1.5em; }
+ #table-example-1 tbody td:first-child { padding-left: 2.5em; padding-right: 0; width: 9em; }
+ #table-example-1 tbody td:first-child::after { content: leader(". "); }
+ #table-example-1 tbody td { padding-left: 2em; padding-right: 2em; }
+ #table-example-1 tbody td:first-child + td { width: 10em; }
+ #table-example-1 tbody td:first-child + td ~ td { width: 2.5em; }
+ #table-example-1 tbody td:first-child + td + td + td ~ td { width: 1.25em; }
+
+ .apple-table-examples { border: none; border-collapse: separate; border-spacing: 1.5em 0em; width: 40em; margin-left: 3em; }
+ .apple-table-examples * { font-family: "Times", serif; }
+ .apple-table-examples td, .apple-table-examples th { border: none; white-space: nowrap; padding-top: 0; padding-bottom: 0; }
+ .apple-table-examples tbody th:first-child { border-left: none; width: 100%; }
+ .apple-table-examples thead th:first-child ~ th { font-size: smaller; font-weight: bolder; border-bottom: solid 2px; text-align: center; }
+ .apple-table-examples tbody th::after, .apple-table-examples tfoot th::after { content: leader(". ") }
+ .apple-table-examples tbody th, .apple-table-examples tfoot th { font: inherit; text-align: left; }
+ .apple-table-examples td { text-align: right; vertical-align: top; }
+ .apple-table-examples.e1 tbody tr:last-child td { border-bottom: solid 1px; }
+ .apple-table-examples.e1 tbody + tbody tr:last-child td { border-bottom: double 3px; }
+ .apple-table-examples.e2 th[scope=row] { padding-left: 1em; }
+ .apple-table-examples sup { line-height: 0; }
+
+ .three-column-nowrap tr > td:first-child,
+ .three-column-nowrap tr > td:first-child + td,
+ .three-column-nowrap tr > td:first-child + td + td { white-space: nowrap; }
+
+ .details-example img { vertical-align: top; }
+
+ #base64-table {
+ white-space: nowrap;
+ font-size: 0.6em;
+ column-width: 6em;
+ column-count: 5;
+ column-gap: 1em;
+ -moz-column-width: 6em;
+ -moz-column-count: 5;
+ -moz-column-gap: 1em;
+ -webkit-column-width: 6em;
+ -webkit-column-count: 5;
+ -webkit-column-gap: 1em;
+ }
+ #base64-table thead { display: none; }
+ #base64-table * { border: none; }
+ #base64-table tbody td:first-child:after { content: ':'; }
+ #base64-table tbody td:last-child { text-align: right; }
+
+ #named-character-references-table {
+ white-space: nowrap;
+ font-size: 0.6em;
+ column-width: 30em;
+ column-gap: 1em;
+ -moz-column-width: 30em;
+ -moz-column-gap: 1em;
+ -webkit-column-width: 30em;
+ -webkit-column-gap: 1em;
+ }
+ #named-character-references-table > table > tbody > tr > td:first-child + td,
+ #named-character-references-table > table > tbody > tr > td:last-child { text-align: center; }
+ #named-character-references-table > table > tbody > tr > td:last-child:hover > span { position: absolute; top: auto; left: auto; margin-left: 0.5em; line-height: 1.2; font-size: 5em; border: outset; padding: 0.25em 0.5em; background: white; width: 1.25em; height: auto; text-align: center; }
+ #named-character-references-table > table > tbody > tr#entity-CounterClockwiseContourIntegral > td:first-child { font-size: 0.5em; }
+
+ .glyph.control { color: red; }
+
+ @font-face {
+ font-family: 'Essays1743';
+ src: url('//www.whatwg.org/specs/web-apps/current-work/fonts/Essays1743.ttf');
+ }
+ @font-face {
+ font-family: 'Essays1743';
+ font-weight: bold;
+ src: url('//www.whatwg.org/specs/web-apps/current-work/fonts/Essays1743-Bold.ttf');
+ }
+ @font-face {
+ font-family: 'Essays1743';
+ font-style: italic;
+ src: url('//www.whatwg.org/specs/web-apps/current-work/fonts/Essays1743-Italic.ttf');
+ }
+ @font-face {
+ font-family: 'Essays1743';
+ font-style: italic;
+ font-weight: bold;
+ src: url('//www.whatwg.org/specs/web-apps/current-work/fonts/Essays1743-BoldItalic.ttf');
+ }
+ </style>
+ <link rel="stylesheet" href="status.css">
+ </head>
+ <body onload="init()">
+ <header class="head with-buttons" id="head">
+ <p><a href="//www.whatwg.org/" class="logo"><img width="101" height="101" alt="WHATWG" src="/images/logo"></a></p>
+ <hgroup>
+ <h1 class="allcaps">HTML</h1>
+ <h2 class="no-num no-toc">Living Standard &mdash; Last Updated <span class="pubdate">[DATE: 01 Jan 1901]</span></h2>
+ </hgroup>
+ <div>
+ <div>
+ <a href="//whatwg.org/html"><span><strong>Multipage Version</strong> <code>whatwg.org/html</code></span></a>
+ <a href="//whatwg.org/c"><span><strong>One-Page Version</strong> <code>whatwg.org/c</code></span></a>
+ <a href="//whatwg.org/pdf"><span><strong>PDF Version</strong> <code>whatwg.org/pdf</code></span></a>
+ <a href="http://developers.whatwg.org/"><span><strong>Developer Version</strong> <code>developers.whatwg.org</code></span></a>
+ </div>
+ <div>
+ <a class="misc" href="//whatwg.org/faq"><span><strong>FAQ</strong> <code>whatwg.org/faq</code></span></a>
+ <a class="misc" href="http://validator.whatwg.org/"><span><strong>Validators</strong> <code>validator.whatwg.org</code></span></a>
+ </div>
+ <div>
+ <a class="comms" href="//www.whatwg.org/mailing-list"><span><strong>Join our Mailing List</strong> <code>whatwg@whatwg.org</code></span></a>
+ <a class="comms" href="http://wiki.whatwg.org/wiki/IRC"><span><strong>Join us on IRC</strong> <code>#whatwg on Freenode</code></span></a>
+ <a class="comms" href="http://forums.whatwg.org/"><span><strong>Join our Forums</strong> <code>forums.whatwg.org</code></span></a>
+ </div>
+ <div>
+ <!--<a class="changes" href="http://svn.whatwg.org/webapps"><span><strong>SVN Repository</strong> <code>svn.whatwg.org/webapps</code></span></a>-->
+ <a class="changes" href="http://html5.org/tools/web-apps-tracker"><span><strong>Change Log</strong> <code>html5.org's tracker</code></span></a>
+ <a class="changes" href="http://twitter.com/WHATWG"><span><strong>Twitter Updates</strong> <code>@WHATWG</code></span></a>
+ </div>
+ <div>
+ <a class="feedback" href="https://www.w3.org/Bugs/Public/buglist.cgi?bug_status=UNCONFIRMED&amp;bug_status=NEW&amp;bug_status=ASSIGNED&amp;bug_status=REOPENED&amp;component=HTML&amp;product=WHATWG"><span><strong>View Open Bugs</strong> <code>filed in Bugzilla</code></span></a>
+ <a class="feedback" href="//www.whatwg.org/newbug"><span><strong>File a Bug</strong> <code>whatwg.org/newbug</code></span></a>
+ <a class="feedback" href="http://ian.hixie.ch/+"><span><strong>E-mail the Editor</strong> <code>ian@hixie.ch</code></span></a>
+ </div>
+ </div>
+ </header>
+
+ <hr>
+
+ <div id="configUI"></div>
+
+ <h2 class="no-num no-toc" id="contents">Table of contents</h2>
+ <!--toc-->
+
+ <hr>
+
+<!--
+ <pre class="idl">
+ interface Screen { }; // CSSOM
+ interface URL { }; // URL API
+ interface Blob { }; // File API
+ interface File : Blob { }; // File API
+ interface FileList { }; // File API
+ interface WebGLRenderingContext { }; // WebGL
+ interface XMLDocument { }; // DOM
+ interface HTMLCollection { }; // DOM
+ interface DOMTokenList { }; // DOM
+ interface DOMSettableTokenList { attribute any value; }; // DOM
+ interface SVGMatrix { }; // SVG
+ // fake interfaces that map to JS object types:
+ interface ArrayBuffer { };
+ interface Int8Array { };
+ interface Uint8Array { };
+ interface Uint8ClampedArray { };
+ interface Int16Array { };
+ interface Uint16Array { };
+ interface Int32Array { };
+ interface Uint32Array { };
+ interface Float32Array { };
+ interface Float64Array { };
+ interface Uint8ClampedArray { };
+ </pre>
+-->
+
+ <h2 id="introduction">Introduction</h2>
+
+ <div class="nodev">
+
+ <h3 id="abstract">Where does this specification fit?</h3>
+
+ <p>This specification defines a big part of the Web platform, in lots of detail. Its place in the
+ Web platform specification stack relative to other specifications can be best summed up as
+ follows:</p>
+
+ <p><img src="images/abstract.png" width="398" height="359" alt="It consists of everything else, above such core technologies as HTTP, URI/IRIs, DOM, XML, Unicode, and ECMAScript; below presentation-layer technologies like CSS and the NPAPI; and to the side of technologies like Geolocation, SVG, MathML, and XHR."></p>
+
+ </div>
+
+
+ <h3 id="is-this-html5?">Is this HTML5?</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>In short: Yes.</p>
+
+ <p>In more length: The term "HTML5" is widely used as a buzzword to refer to modern Web
+ technologies, many of which (though by no means all) are developed at the WHATWG. This document is
+ one such; others are available from <a href="http://www.whatwg.org/specs/">the WHATWG
+ specification index</a>.</p>
+
+ <p class="note">Although we have asked them to stop doing so, the W3C also republishes some parts
+ of this specification as separate documents. There are numerous differences between this
+ specification and the W3C forks; some minor, some major. Unfortunately these are not currently
+ accurately documented anywhere, so there is no way to know which are intentional and which are
+ not.</p>
+
+
+ <h3>Background</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>HTML is the World Wide Web's core markup language. Originally, HTML was primarily designed as a
+ language for semantically describing scientific documents. Its general design, however, has
+ enabled it to be adapted, over the subsequent years, to describe a number of other types of
+ documents and even applications.</p>
+
+
+ <h3>Audience</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>This specification is intended for authors of documents and scripts that use the features
+ defined in this specification<span class="nodev">, implementors of tools that operate on pages that
+ use the features defined in this specification, and individuals wishing to establish the
+ correctness of documents or implementations with respect to the requirements of this
+ specification</span>.</p>
+
+ <p>This document is probably not suited to readers who do not already have at least a passing
+ familiarity with Web technologies, as in places it sacrifices clarity for precision, and brevity
+ for completeness. More approachable tutorials and authoring guides can provide a gentler
+ introduction to the topic.</p>
+
+ <p>In particular, familiarity with the basics of DOM is necessary for a complete understanding of
+ some of the more technical parts of this specification. An understanding of Web IDL, HTTP, XML,
+ Unicode, character encodings, JavaScript, and CSS will also be helpful in places but is not
+ essential.</p>
+
+
+ <h3>Scope</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>This specification is limited to providing a semantic-level markup language and associated
+ semantic-level scripting APIs for authoring accessible pages on the Web ranging from static
+ documents to dynamic applications.</p>
+
+ <p>The scope of this specification does not include providing mechanisms for media-specific
+ customization of presentation (although default rendering rules for Web browsers are included at
+ the end of this specification, and several mechanisms for hooking into CSS are provided as part of
+ the language).</p>
+
+ <p>The scope of this specification is not to describe an entire operating system. In particular,
+ hardware configuration software, image manipulation tools, and applications that users would be
+ expected to use with high-end workstations on a daily basis are out of scope. In terms of
+ applications, this specification is targeted specifically at applications that would be expected
+ to be used by users on an occasional basis, or regularly but from disparate locations, with low
+ CPU requirements. Examples of such applications include online purchasing systems, searching
+ systems, games (especially multiplayer online games), public telephone books or address books,
+ communications software (e-mail clients, instant messaging clients, discussion software), document
+ editing software, etc.</p>
+
+
+ <h3>History</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>For its first five years (1990-1995), HTML went through a number of revisions and experienced a
+ number of extensions, primarily hosted first at CERN, and then at the IETF.</p>
+
+ <p>With the creation of the W3C, HTML's development changed venue again. A first abortive attempt
+ at extending HTML in 1995 known as HTML 3.0 then made way to a more pragmatic approach known as
+ HTML 3.2, which was completed in 1997. HTML4 quickly followed later that same year.</p>
+
+ <p>The following year, the W3C membership decided to stop evolving HTML and instead begin work on
+ an XML-based equivalent, called XHTML. <!-- http://www.w3.org/MarkUp/future/#summary --> This
+ effort started with a reformulation of HTML4 in XML, known as XHTML 1.0, which added no new
+ features except the new serialisation, and which was completed in 2000. After XHTML 1.0, the W3C's
+ focus turned to making it easier for other working groups to extend XHTML, under the banner of
+ XHTML Modularization. In parallel with this, the W3C also worked on a new language that was not
+ compatible with the earlier HTML and XHTML languages, calling it XHTML2.</p>
+
+ <p>Around the time that HTML's evolution was stopped in 1998, parts of the API for HTML developed
+ by browser vendors were specified and published under the name DOM Level 1 (in 1998) and DOM Level
+ 2 Core and DOM Level 2 HTML (starting in 2000 and culminating in 2003). These efforts then petered
+ out, with some DOM Level 3 specifications published in 2004 but the working group being closed
+ before all the Level 3 drafts were completed.</p>
+
+ <p>In 2003, the publication of XForms, a technology which was positioned as the next generation of
+ Web forms, sparked a renewed interest in evolving HTML itself, rather than finding replacements
+ for it. This interest was borne from the realization that XML's deployment as a Web technology was
+ limited to entirely new technologies (like RSS and later Atom), rather than as a replacement for
+ existing deployed technologies (like HTML).</p>
+
+ <p>A proof of concept to show that it was possible to extend HTML4's forms to provide many of the
+ features that XForms 1.0 introduced, without requiring browsers to implement rendering engines
+ that were incompatible with existing HTML Web pages, was the first result of this renewed
+ interest. At this early stage, while the draft was already publicly available, and input was
+ already being solicited from all sources, the specification was only under Opera Software's
+ copyright.</p>
+
+ <p>The idea that HTML's evolution should be reopened was tested at a W3C workshop in 2004, where
+ some of the principles that underlie the HTML5 work (described below), as well as the
+ aforementioned early draft proposal covering just forms-related features, were presented to the
+ W3C jointly by Mozilla and Opera. The proposal was rejected on the grounds that the proposal
+ conflicted with the previously chosen direction for the Web's evolution; the W3C staff and
+ membership voted to continue developing XML-based replacements instead.</p>
+
+ <p>Shortly thereafter, Apple, Mozilla, and Opera jointly announced their intent to continue
+ working on the effort under the umbrella of a new venue called the WHATWG. A public mailing list
+ was created, and the draft was moved to the WHATWG site. The copyright was subsequently amended to
+ be jointly owned by all three vendors, and to allow reuse of the specification.</p>
+
+ <p>The WHATWG was based on several core principles, in particular that technologies need to be
+ backwards compatible, that specifications and implementations need to match even if this means
+ changing the specification rather than the implementations, and that specifications need to be
+ detailed enough that implementations can achieve complete interoperability without
+ reverse-engineering each other.</p>
+
+ <p>The latter requirement in particular required that the scope of the HTML5 specification include
+ what had previously been specified in three separate documents: HTML4, XHTML1, and DOM2 HTML. It
+ also meant including significantly more detail than had previously been considered the norm.</p>
+
+ <p>In 2006, the W3C indicated an interest to participate in the development of HTML5 after all,
+ and in 2007 formed a working group chartered to work with the WHATWG on the development of the
+ HTML5 specification. Apple, Mozilla, and Opera allowed the W3C to publish the specification under
+ the W3C copyright, while keeping a version with the less restrictive license on the WHATWG
+ site.</p>
+
+ <p>For a number of years, both groups then worked together. In 2011, however, the groups came to
+ the conclusion that they had different goals: the W3C wanted to publish a "finished" version of
+ "HTML5", while the WHATWG wanted to continue working on a Living Standard for HTML, continuously
+ maintaining the specification rather than freezing it in a state with known problems, and adding
+ new features as needed to evolve the platform.</p>
+
+ <p>Since then, the WHATWG has been working on this specification (amongst others), and the W3C has
+ been copying fixes made by the WHATWG into their fork of the document, as well as making other
+ changes, some intentional and some not, with no documentation listing or explaining the
+ differences.</p>
+
+
+
+ <h3>Design notes</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>It must be admitted that many aspects of HTML appear at first glance to be nonsensical and
+ inconsistent.</p>
+
+ <p>HTML, its supporting DOM APIs, as well as many of its supporting technologies, have been
+ developed over a period of several decades by a wide array of people with different priorities
+ who, in many cases, did not know of each other's existence.</p>
+
+ <p>Features have thus arisen from many sources, and have not always been designed in especially
+ consistent ways. Furthermore, because of the unique characteristics of the Web, implementation
+ bugs have often become de-facto, and now de-jure, standards, as content is often unintentionally
+ written in ways that rely on them before they can be fixed.</p>
+
+ <p>Despite all this, efforts have been made to adhere to certain design goals. These are described
+ in the next few subsections.</p>
+
+
+
+ <h4>Serializability of script execution</h4>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>To avoid exposing Web authors to the complexities of multithreading, the HTML and DOM APIs are
+ designed such that no script can ever detect the simultaneous execution of other scripts. Even
+ with <span data-x="Worker">workers</span>, the intent is that the behavior of implementations can
+ be thought of as completely serializing the execution of all scripts in all <span data-x="browsing
+ context">browsing contexts</span>.</p>
+
+ <p class="note">The <code
+ data-x="dom-navigator-yieldForStorageUpdates">navigator.yieldForStorageUpdates()</code> method, in
+ this model, is equivalent to allowing other scripts to run while the calling script is
+ blocked.</p>
+
+
+
+ <h4>Compliance with other specifications</h4>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>This specification interacts with and relies on a wide variety of other specifications. In
+ certain circumstances, unfortunately, conflicting needs have led to this specification violating
+ the requirements of these other specifications. Whenever this has occurred, the transgressions
+ have each been noted as a "<dfn>willful violation</dfn>", and the reason for the violation has
+ been noted.</p>
+
+
+
+ <h4>Extensibility</h4>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>HTML has a wide array of extensibility mechanisms that can be used for adding semantics in a
+ safe manner:</p>
+
+ <ul>
+
+ <li><p>Authors can use the <code data-x="attr-class">class</code> attribute to extend elements,
+ effectively creating their own elements, while using the most applicable existing "real" HTML
+ element, so that browsers and other tools that don't know of the extension can still support it
+ somewhat well. This is the tack used by microformats, for example.</p></li>
+
+ <li><p>Authors can include data for inline client-side scripts or server-side site-wide scripts
+ to process using the <code data-x="attr-data-*">data-*=""</code> attributes. These are guaranteed
+ to never be touched by browsers, and allow scripts to include data on HTML elements that scripts
+ can then look for and process.</p></li>
+
+ <li><p>Authors can use the <code data-x="meta">&lt;meta name="" content=""></code> mechanism to
+ include page-wide metadata by registering <span data-x="concept-meta-extensions">extensions to
+ the predefined set of metadata names</span>.</p></li>
+
+ <li><p>Authors can use the <code data-x="attr-hyperlink-rel">rel=""</code> mechanism to annotate
+ links with specific meanings by registering <span data-x="concept-rel-extensions">extensions to
+ the predefined set of link types</span>. This is also used by microformats.</p></li>
+
+ <li><p>Authors can embed raw data using the <code data-x="script">&lt;script type=""></code>
+ mechanism with a custom type, for further handling by inline or server-side scripts.</p></li>
+
+ <li><p>Authors can create <span data-x="plugin">plugins</span> and invoke them using the
+ <code>embed</code> element. This is how Flash works.</p></li>
+
+ <li><p>Authors can extend APIs using the JavaScript prototyping mechanism. This is widely used by
+ script libraries, for instance.</p></li>
+
+ <li><p>Authors can use the microdata feature (the <code
+ data-x="attr-itemscope">itemscope=""</code> and <code data-x="attr-itemprop">itemprop=""</code>
+ attributes) to embed nested name-value pairs of data to be shared with other applications and
+ sites.</p></li>
+
+ </ul>
+
+
+
+
+ <h3>HTML vs XHTML</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>This specification defines an abstract language for describing documents and applications, and
+ some APIs for interacting with in-memory representations of resources that use this language.</p>
+
+ <p>The in-memory representation is known as "DOM HTML", or "the DOM" for short.</p>
+
+ <p>There are various concrete syntaxes that can be used to transmit resources that use this
+ abstract language, two of which are defined in this specification.</p>
+
+ <p>The first such concrete syntax is the HTML syntax. This is the format suggested for most
+ authors. It is compatible with most legacy Web browsers. If a document is transmitted with the
+ <code>text/html</code> <span>MIME type</span>, then it will be processed as an HTML document by
+ Web browsers. This specification defines the latest HTML syntax, known simply as "HTML".</p>
+
+ <p>The second concrete syntax is the XHTML syntax, which is an application of XML. When a document
+ is transmitted with an <span>XML MIME type</span>, such as <code>application/xhtml+xml</code>,
+ then it is treated as an XML document by Web browsers, to be parsed by an XML processor. Authors
+ are reminded that the processing for XML and HTML differs; in particular, even minor syntax errors
+ will prevent a document labeled as XML from being rendered fully, whereas they would be ignored in
+ the HTML syntax. This specification defines the latest XHTML syntax, known simply as "XHTML".</p>
+
+ <p>The DOM, the HTML syntax, and the XHTML syntax cannot all represent the same content. For
+ example, namespaces cannot be represented using the HTML syntax, but they are supported in the DOM
+ and in the XHTML syntax. Similarly, documents that use the <code>noscript</code> feature can be
+ represented using the HTML syntax, but cannot be represented with the DOM or in the XHTML syntax.
+ Comments that contain the string "<code data-x="">--&gt;</code>" can only be represented in the
+ DOM, not in the HTML and XHTML syntaxes.</p>
+
+
+ <h3>Structure of this specification</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>This specification is divided into the following major sections:</p>
+
+ <dl>
+
+
+ <dt><a href="#introduction">Introduction</a></dt>
+
+ <dd>Non-normative materials providing a context for the HTML standard.</dd>
+
+
+ <dt><a href="#infrastructure">Common infrastructure</a></dt>
+
+ <dd>The conformance classes, algorithms, definitions, and the common underpinnings of the rest of
+ the specification.</dd>
+
+
+ <dt><a href="#dom">Semantics, structure, and APIs of HTML documents</a></dt>
+
+ <dd>Documents are built from elements. These elements form a tree using the DOM. This section
+ defines the features of this DOM, as well as introducing the features common to all elements, and
+ the concepts used in defining elements.</dd>
+
+
+ <dt><a href="#semantics">The elements of HTML</a></dt>
+
+ <dd>Each element has a predefined meaning, which is explained in this section. Rules for authors
+ on how to use the element<span class="nodev">, along with user agent requirements for how to
+ handle each element,</span> are also given. This includes large signature features of HTML such
+ as video playback and subtitles, form controls and form submission, and a 2D graphics API known
+ as the HTML canvas.</dd>
+
+
+ <dt><a href="#microdata">Microdata</a></dt>
+
+ <dd>This specification introduces a mechanism for adding machine-readable annotations to
+ documents, so that tools can extract trees of name-value pairs from the document. This section
+ describes this mechanism<span class="nodev"> and some algorithms that can be used to convert HTML
+ documents into other formats</span>. This section also defines some sample Microdata vocabularies
+ for contact information, calendar events, and licensing works.</dd>
+
+
+ <dt><a href="#editing">User interaction</a></dt>
+
+ <dd>HTML documents can provide a number of mechanisms for users to interact with and modify
+ content, which are described in this section, such as how focus works, and drag-and-drop.</dd>
+
+
+ <dt><a href="#browsers">Loading Web pages</a></dt>
+
+ <dd>HTML documents do not exist in a vacuum &mdash; this section defines many of the features
+ that affect environments that deal with multiple pages, such as Web browsers and offline
+ caching of Web applications.</dd>
+
+
+ <dt><a href="#webappapis">Web application APIs</a></dt>
+
+ <dd>This section introduces basic features for scripting of applications in HTML.</dd>
+
+
+ <dt><a href="#workers">Web workers</a></dt>
+
+ <dd>This section defines an API for background threads in JavaScript.</dd>
+
+
+ <dt><a href="#comms">The communication APIs</a></dt>
+
+ <dd>This section describes some mechanisms that applications written in HTML can use to
+ communicate with other applications from different domains running on the same client. It also
+ introduces a server-push event stream mechanism known as Server Sent Events or
+ <code>EventSource</code>, and a two-way full-duplex socket protocol for scripts known as Web
+ Sockets.
+
+ </dd>
+
+
+ <dt><a href="#webstorage">Web storage</a></dt>
+
+ <dd>This section defines a client-side storage mechanism based on name-value pairs.</dd>
+
+
+ <dt><a href="#syntax">The HTML syntax</a></dt>
+ <dt><a href="#xhtml">The XHTML syntax</a></dt>
+
+ <dd>All of these features would be for naught if they couldn't be represented in a serialized
+ form and sent to other people, and so these sections define the syntaxes of HTML and XHTML<span
+ class="nodev">, along with rules for how to parse content using those syntaxes</span>.</dd>
+
+
+ <dt><a href="#rendering">Rendering</a></dt>
+
+ <dd>This section defines the default rendering rules for Web browsers.</dd>
+
+
+ </dl>
+
+ <p>There are also some appendices, listing <a href="#obsolete">obsolete features</a> and <a
+ href="#iana">IANA considerations</a>, and several indices.</p>
+
+
+
+ <h4>How to read this specification</h4>
+
+ <p>This specification should be read like all other specifications. First, it should be read
+ cover-to-cover, multiple times. Then, it should be read backwards at least once. Then it should be
+ read by picking random sections from the contents list and following all the cross-references.</p>
+
+ <p>As described in the conformance requirements section below, this specification describes
+ conformance criteria for a variety of conformance classes. In particular, there are conformance
+ requirements that apply to <em>producers</em>, for example authors and the documents they create,
+ and there are conformance requirements that apply to <em>consumers</em>, for example Web browsers.
+ They can be distinguished by what they are requiring: a requirement on a producer states what is
+ allowed, while a requirement on a consumer states how software is to act.</p>
+
+ <div class="example">
+
+ <p>For example, "the <code data-x="">foo</code> attribute's value must be a <span>valid
+ integer</span>" is a requirement on producers, as it lays out the allowed values; in contrast,
+ the requirement "the <code data-x="">foo</code> attribute's value must be parsed using the
+ <span>rules for parsing integers</span>" is a requirement on consumers, as it describes how to
+ process the content.</p>
+
+ </div>
+
+ <p><strong>Requirements on producers have no bearing whatsoever on consumers.</strong></p>
+
+ <div class="example">
+
+ <p>Continuing the above example, a requirement stating that a particular attribute's value is
+ constrained to being a <span>valid integer</span> emphatically does <em>not</em> imply anything
+ about the requirements on consumers. It might be that the consumers are in fact required to treat
+ the attribute as an opaque string, completely unaffected by whether the value conforms to the
+ requirements or not. It might be (as in the previous example) that the consumers are required to
+ parse the value using specific rules that define how invalid (non-numeric in this case) values
+ are to be processed.</p>
+
+ </div>
+
+
+
+ <h4>Typographic conventions</h4>
+
+ <p>This is a definition, requirement, or explanation.</p>
+
+ <p class="note">This is a note.</p>
+
+ <p class="example">This is an example.</p>
+
+ <p class="&#x0058;&#x0058;&#x0058;">This is an open issue.</p>
+
+ <p class="warning">This is a warning.</p>
+
+ <pre class="idl extract">interface <dfn data-x="">Example</dfn> {
+ // this is an IDL definition
+};</pre>
+
+ <dl class="domintro">
+
+ <dt><var data-x="">variable</var> = <var data-x="">object</var> . <code data-x="">method</code>( [ <var data-x="">optionalArgument</var> ] )</dt>
+
+ <dd>
+
+ <p>This is a note to authors describing the usage of an interface.</p>
+
+ </dd>
+
+ </dl>
+
+ <pre class="css">/* this is a CSS fragment */</pre>
+
+ <p>The defining instance of a term is marked up like <dfn data-x="x-this">this</dfn>. Uses of that
+ term are marked up like <span data-x="x-this">this</span> or like <i data-x="x-this">this</i>.</p>
+
+ <p>The defining instance of an element, attribute, or API is marked up like <dfn
+ data-x="x-that"><code>this</code></dfn>. References to that element, attribute, or API are marked
+ up like <code data-x="x-that">this</code>.</p>
+
+ <p>Other code fragments are marked up <code data-x="">like this</code>.</p>
+
+ <p>Variables are marked up like <var data-x="">this</var>.</p>
+
+ <p>In an algorithm, steps in <span data-x="synchronous section">synchronous sections</span> are
+ marked with &#x231B;.</p>
+
+ <p>In some cases, requirements are given in the form of lists with conditions and corresponding
+ requirements. In such cases, the requirements that apply to a condition are always the first set
+ of requirements that follow the condition, even in the case of there being multiple sets of
+ conditions for those requirements. Such cases are presented as follows:</p>
+
+ <dl class="switch">
+
+ <dt>This is a condition
+ <dt>This is another condition
+ <dd>This is the requirement that applies to the conditions above.
+
+ <dt>This is a third condition
+ <dd>This is the requirement that applies to the third condition.
+
+ </dl>
+
+
+
+ <h3 id="fingerprint">Privacy concerns</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>Some features of HTML trade user convenience for a measure of user privacy.</p>
+
+ <p>In general, due to the Internet's architecture, a user can be distinguished from another by the
+ user's IP address. IP addresses do not perfectly match to a user; as a user moves from device to
+ device, or from network to network, their IP address will change; similarly, NAT routing, proxy
+ servers, and shared computers enable packets that appear to all come from a single IP address to
+ actually map to multiple users. Technologies such as onion routing can be used to further
+ anonymise requests so that requests from a single user at one node on the Internet appear to come
+ from many disparate parts of the network.</p>
+
+ <p>However, the IP address used for a user's requests is not the only mechanism by which a user's
+ requests could be related to each other. Cookies, for example, are designed specifically to enable
+ this, and are the basis of most of the Web's session features that enable you to log into a site
+ with which you have an account.</p>
+
+ <p>There are other mechanisms that are more subtle. Certain characteristics of a user's system can
+ be used to distinguish groups of users from each other; by collecting enough such information, an
+ individual user's browser's "digital fingerprint" can be computed, which can be as good, if not
+ better, as an IP address in ascertaining which requests are from the same user.</p>
+
+ <p>Grouping requests in this manner, especially across multiple sites, can be used for both benign
+ (and even arguably positive) purposes, as well as for malevolent purposes. An example of a
+ reasonably benign purpose would be determining whether a particular person seems to prefer sites
+ with dog illustrations as opposed to sites with cat illustrations (based on how often they visit
+ the sites in question) and then automatically using the preferred illustrations on subsequent
+ visits to participating sites. Malevolent purposes, however, could include governments combining
+ information such as the person's home address (determined from the addresses they use when getting
+ driving directions on one site) with their apparent political affiliations (determined by
+ examining the forum sites that they participate in) to determine whether the person should be
+ prevented from voting in an election.</p>
+
+ <p>Since the malevolent purposes can be remarkably evil, user agent implementors are encouraged to
+ consider how to provide their users with tools to minimise leaking information that could be used
+ to fingerprint a user.</p>
+
+ <p>Unfortunately, as the first paragraph in this section implies, sometimes there is great benefit
+ to be derived from exposing the very information that can also be used for fingerprinting
+ purposes, so it's not as easy as simply blocking all possible leaks. For instance, the ability to
+ log into a site to post under a specific identity requires that the user's requests be
+ identifiable as all being from the same user, more or less by definition. More subtly, though,
+ information such as how wide text is, which is necessary for many effects that involve drawing
+ text onto a canvas (e.g. any effect that involves drawing a border around the text) also leaks
+ information that can be used to group a user's requests. (In this case, by potentially exposing,
+ via a brute force search, which fonts a user has installed, information which can vary
+ considerably from user to user.)</p>
+
+ <p>Features in this specification which can be <dfn data-x="fingerprinting vector">used to
+ fingerprint the user</dfn> are marked as this paragraph is.
+ <!--INSERT FINGERPRINT-->
+ </p>
+
+ <p>Other features in the platform can be used for the same purpose, though, including, though not
+ limited to:</p>
+
+ <ul>
+
+ <li>The exact list of which features a user agents supports.</li>
+
+ <li>The maximum allowed stack depth for recursion in script.</li>
+
+ <li>Features that describe the user's environment, like Media Queries and the <code>Screen</code>
+ object. <a href="#refsMQ">[MQ]</a> <a href="#refsCSSOMVIEW">[CSSOMVIEW]</a></li>
+
+ <li>The user's time zone.</li>
+
+ </ul>
+
+
+
+ <h3>A quick introduction to HTML</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>A basic HTML document looks like this:</p>
+
+ <pre id="intro-early-example">&lt;!DOCTYPE html>
+&lt;html>
+ &lt;head>
+ &lt;title>Sample page&lt;/title>
+ &lt;/head>
+ &lt;body>
+ &lt;h1>Sample page&lt;/h1>
+ &lt;p>This is a &lt;a href="demo.html">simple&lt;/a> sample.&lt;/p>
+ &lt;!-- this is a comment -->
+ &lt;/body>
+&lt;/html></pre>
+
+ <p>HTML documents consist of a tree of elements and text. Each element is denoted in the source by
+ a <span data-x="syntax-start-tag">start tag</span>, such as "<code data-x="">&lt;body></code>", and
+ an <span data-x="syntax-end-tag">end tag</span>, such as "<code data-x="">&lt;/body></code>".
+ (Certain start tags and end tags can in certain cases be <span
+ data-x="syntax-tag-omission">omitted</span> and are implied by other tags.)</p>
+
+ <p>Tags have to be nested such that elements are all completely within each other, without
+ overlapping:</p>
+
+ <pre class="bad">&lt;p>This is &lt;em>very &lt;strong>wrong&lt;/em>!&lt;/strong>&lt;/p></pre>
+ <pre>&lt;p>This &lt;em>is &lt;strong>correct&lt;/strong>.&lt;/em>&lt;/p></pre>
+
+ <p>This specification defines a set of elements that can be used in HTML, along with rules about
+ the ways in which the elements can be nested.</p>
+
+ <p>Elements can have attributes, which control how the elements work. In the example below, there
+ is a <span>hyperlink</span>, formed using the <code>a</code> element and its <code
+ data-x="attr-hyperlink-href">href</code> attribute:</p>
+
+ <pre>&lt;a href="demo.html">simple&lt;/a></pre>
+
+ <p><span data-x="syntax-attributes">Attributes</span> are placed inside the start tag, and consist
+ of a <span data-x="syntax-attribute-name">name</span> and a <span
+ data-x="syntax-attribute-value">value</span>, separated by an "<code data-x="">=</code>" character.
+ The attribute value can remain <a href="#unquoted">unquoted</a> if it doesn't contain <span
+ data-x="space character">space characters</span> or any of <code data-x="">"</code> <code
+ data-x="">'</code> <code data-x="">`</code> <code data-x="">=</code> <code data-x="">&lt;</code> or
+ <code data-x="">&gt;</code>. Otherwise, it has to be quoted using either single or double quotes.
+ The value, along with the "<code data-x="">=</code>" character, can be omitted altogether if the
+ value is the empty string.</p>
+
+ <pre>&lt;!-- empty attributes -->
+&lt;input name=address disabled>
+&lt;input name=address disabled="">
+
+&lt;!-- attributes with a value -->
+&lt;input name=address maxlength=200>
+&lt;input name=address maxlength='200'>
+&lt;input name=address maxlength="200"></pre>
+
+ <p>HTML user agents (e.g. Web browsers) then <i>parse</i> this markup, turning it into a DOM
+ (Document Object Model) tree. A DOM tree is an in-memory representation of a document.</p>
+
+ <p>DOM trees contain several kinds of nodes, in particular a <code>DocumentType</code> node,
+ <code>Element</code> nodes, <code>Text</code> nodes, <code>Comment</code> nodes, and in some cases
+ <code>ProcessingInstruction</code> nodes.</p>
+
+ <p>The <a href="#intro-early-example">markup snippet at the top of this section</a> would be
+ turned into the following DOM tree:</p>
+
+ <ul class="domTree"><li class="t10">DOCTYPE: <code data-x="">html</code></li><li class="t1"><code>html</code><ul><li class="t1"><code>head</code><ul><li class="t3"><code>#text</code>: <span data-x="">&#x23CE;&#x2423;&#x2423;</span></li><li class="t1"><code>title</code><ul><li class="t3"><code>#text</code>: <span data-x="">Sample page</span></li></ul></li><li class="t3"><code>#text</code>: <span data-x="">&#x23CE;&#x2423;</span></li></ul></li><li class="t3"><code>#text</code>: <span data-x="">&#x23CE;&#x2423;</span></li><li class="t1"><code>body</code><ul><li class="t3"><code>#text</code>: <span data-x="">&#x23CE;&#x2423;&#x2423;</span></li><li class="t1"><code>h1</code><ul><li class="t3"><code>#text</code>: <span data-x="">Sample page</span></li></ul></li><li class="t3"><code>#text</code>: <span data-x="">&#x23CE;&#x2423;&#x2423;</span></li><li class="t1"><code>p</code><ul><li class="t3"><code>#text</code>: <span data-x="">This is a <!--grammar-check-override--></span></li><li class="t1"><code>a</code> <span data-x="" class="t2"><code class="attribute name">href</code>="<code class="attribute value">demo.html</code>"</span><ul><li class="t3"><code>#text</code>: <span data-x="">simple</span></li></ul></li><li class="t3"><code>#text</code>: <span data-x=""> sample.</span></li></ul></li><li class="t3"><code>#text</code>: <span data-x="">&#x23CE;&#x2423;&#x2423;</span></li><li class="t8"><code>#comment</code>: <span data-x=""> this is a comment </span></li><li class="t3"><code>#text</code>: <span data-x="">&#x23CE;&#x2423;&#x23CE;</span></li></ul></li></ul></li></ul>
+
+ <p>The <span>root element</span> of this tree is the <code>html</code> element, which is the
+ element always found at the root of HTML documents. It contains two elements, <code>head</code>
+ and <code>body</code>, as well as a <code>Text</code> node between them.</p>
+
+ <p>There are many more <code>Text</code> nodes in the DOM tree than one would initially expect,
+ because the source contains a number of spaces (represented here by "&#x2423;") and line breaks
+ ("&#x23CE;") that all end up as <code>Text</code> nodes in the DOM. However, for historical
+ reasons not all of the spaces and line breaks in the original markup appear in the DOM. In
+ particular, all the whitespace before <code>head</code> start tag ends up being dropped silently,
+ and all the whitespace after the <code>body</code> end tag ends up placed at the end of the
+ <code>body</code>.</p>
+
+ <p>The <code>head</code> element contains a <code>title</code> element, which itself contains a
+ <code>Text</code> node with the text "Sample page". Similarly, the <code>body</code> element
+ contains an <code>h1</code> element, a <code>p</code> element, and a comment.</p>
+
+ <hr>
+
+ <p>This DOM tree can be manipulated from scripts in the page. Scripts (typically in JavaScript)
+ are small programs that can be embedded using the <code>script</code> element or using <span>event
+ handler content attributes</span>. For example, here is a form with a script that sets the value
+ of the form's <code>output</code> element to say "Hello World":</p>
+
+ <pre>&lt;<span>form</span> <span data-x="attr-form-name">name</span>="main">
+ Result: &lt;<span>output</span> <span data-x="attr-fe-name">name</span>="result">&lt;/output>
+ &lt;<span>script</span>>
+ <span data-x="Document">document</span>.<span data-x="dom-document-forms">forms</span>.main.<span data-x="dom-form-elements">elements</span>.result.<span data-x="dom-output-value">value</span> = 'Hello World';
+ &lt;/script>
+&lt;/form></pre>
+
+ <p>Each element in the DOM tree is represented by an object, and these objects have APIs so that
+ they can be manipulated. For instance, a link (e.g. the <code>a</code> element in the tree above)
+ can have its "<code data-x="attr-hyperlink-href">href</code>" attribute changed in several
+ ways:</p>
+
+ <pre>var a = <span data-x="Document">document</span>.<span data-x="dom-document-links">links</span>[0]; // obtain the first link in the document
+a.<span data-x="dom-url-href">href</span> = 'sample.html'; // change the destination URL of the link
+a.<span data-x="dom-url-protocol">protocol</span> = 'https'; // change just the scheme part of the URL
+a.setAttribute('href', 'http://example.com/'); // change the content attribute directly</pre>
+
+ <p>Since DOM trees are used as the way to represent HTML documents when they are processed and
+ presented by implementations (especially interactive implementations like Web browsers), this
+ specification is mostly phrased in terms of DOM trees, instead of the markup described above.</p>
+
+ <hr>
+
+ <p>HTML documents represent a media-independent description of interactive content. HTML documents
+ might be rendered to a screen, or through a speech synthesiser, or on a braille display. To
+ influence exactly how such rendering takes place, authors can use a styling language such as
+ CSS.</p>
+
+ <p>In the following example, the page has been made yellow-on-blue using CSS.</p>
+
+ <pre>&lt;!DOCTYPE html>
+&lt;html>
+ &lt;head>
+ &lt;title>Sample styled page&lt;/title>
+ &lt;style>
+ body { background: navy; color: yellow; }
+ &lt;/style>
+ &lt;/head>
+ &lt;body>
+ &lt;h1>Sample styled page&lt;/h1>
+ &lt;p>This page is just a demo.&lt;/p>
+ &lt;/body>
+&lt;/html></pre>
+
+ <p>For more details on how to use HTML, authors are encouraged to consult tutorials and guides.
+ Some of the examples included in this specification might also be of use, but the novice author is
+ cautioned that this specification, by necessity, defines the language with a level of detail that
+ might be difficult to understand at first.</p>
+
+
+
+<!--ADD-TOPIC:Security-->
+ <h4>Writing secure applications with HTML</h4>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>When HTML is used to create interactive sites, care needs to be taken to avoid introducing
+ vulnerabilities through which attackers can compromise the integrity of the site itself or of the
+ site's users.</p>
+
+ <p>A comprehensive study of this matter is beyond the scope of this document, and authors are
+ strongly encouraged to study the matter in more detail. However, this section attempts to provide
+ a quick introduction to some common pitfalls in HTML application development.</p>
+
+ <p>The security model of the Web is based on the concept of "origins", and correspondingly many of
+ the potential attacks on the Web involve cross-origin actions. <a
+ href="#refsORIGIN">[ORIGIN]</a></p>
+
+ <dl>
+
+ <dt>Not validating user input</dt>
+ <dt>Cross-site scripting (XSS)</dt>
+ <dt>SQL injection</dt>
+
+ <dd>
+
+ <p>When accepting untrusted input, e.g. user-generated content such as text comments, values in
+ URL parameters, messages from third-party sites, etc, it is imperative that the data be
+ validated before use, and properly escaped when displayed. Failing to do this can allow a
+ hostile user to perform a variety of attacks, ranging from the potentially benign, such as
+ providing bogus user information like a negative age, to the serious, such as running scripts
+ every time a user looks at a page that includes the information, potentially propagating the
+ attack in the process, to the catastrophic, such as deleting all data in the server.</p>
+
+ <p>When writing filters to validate user input, it is imperative that filters always be
+ whitelist-based, allowing known-safe constructs and disallowing all other input. Blacklist-based
+ filters that disallow known-bad inputs and allow everything else are not secure, as not
+ everything that is bad is yet known (for example, because it might be invented in the
+ future).</p>
+
+ <div class="example">
+
+ <p>For example, suppose a page looked at its URL's query string to determine what to display,
+ and the site then redirected the user to that page to display a message, as in:</p>
+
+ <pre>&lt;ul>
+ &lt;li>&lt;a href="message.cgi?say=Hello">Say Hello&lt;/a>
+ &lt;li>&lt;a href="message.cgi?say=Welcome">Say Welcome&lt;/a>
+ &lt;li>&lt;a href="message.cgi?say=Kittens">Say Kittens&lt;/a>
+&lt;/ul></pre>
+
+ <p>If the message was just displayed to the user without escaping, a hostile attacker could
+ then craft a URL that contained a script element:</p>
+
+ <pre>http://example.com/message.cgi?say=%3Cscript%3Ealert%28%27Oh%20no%21%27%29%3C/script%3E</pre>
+
+ <p>If the attacker then convinced a victim user to visit this page, a script of the attacker's
+ choosing would run on the page. Such a script could do any number of hostile actions, limited
+ only by what the site offers: if the site is an e-commerce shop, for instance, such a script
+ could cause the user to unknowingly make arbitrarily many unwanted purchases.</p>
+
+ <p>This is called a cross-site scripting attack.</p>
+
+ </div>
+
+ <p>There are many constructs that can be used to try to trick a site into executing code. Here
+ are some that authors are encouraged to consider when writing whitelist filters:</p>
+
+ <ul>
+
+ <li>When allowing harmless-seeming elements like <code>img</code>, it is important to whitelist
+ any provided attributes as well. If one allowed all attributes then an attacker could, for
+ instance, use the <code data-x="handler-onload">onload</code> attribute to run arbitrary
+ script.</li>
+
+ <li>When allowing URLs to be provided (e.g. for links), the scheme of each URL also needs to be
+ explicitly whitelisted, as there are many schemes that can be abused. The most prominent
+ example is "<code data-x="javascript-protocol">javascript:</code>", but user agents can
+ implement (and indeed, have historically implemented) others.</li> <!-- IE had vbscript:,
+ Netscape had livescript:, etc. -->
+
+ <li>Allowing a <code>base</code> element to be inserted means any <code>script</code> elements
+ in the page with relative links can be hijacked, and similarly that any form submissions can
+ get redirected to a hostile site.</li>
+
+ </ul>
+
+ </dd>
+
+
+ <dt>Cross-site request forgery (CSRF)</dt>
+
+ <dd>
+
+ <p>If a site allows a user to make form submissions with user-specific side-effects, for example
+ posting messages on a forum under the user's name, making purchases, or applying for a passport,
+ it is important to verify that the request was made by the user intentionally, rather than by
+ another site tricking the user into making the request unknowingly.</p>
+
+ <p>This problem exists because HTML forms can be submitted to other origins.</p>
+
+ <p>Sites can prevent such attacks by populating forms with user-specific hidden tokens, or by
+ checking <code data-x="http-origin">Origin</code> headers on all requests.</p>
+
+ </dd>
+
+
+
+ <dt>Clickjacking</dt>
+
+ <dd>
+
+ <p>A page that provides users with an interface to perform actions that the user might not wish
+ to perform needs to be designed so as to avoid the possibility that users can be tricked into
+ activating the interface.</p>
+
+ <p>One way that a user could be so tricked is if a hostile site places the victim site in a
+ small <code>iframe</code> and then convinces the user to click, for instance by having the user
+ play a reaction game. Once the user is playing the game, the hostile site can quickly position
+ the iframe under the mouse cursor just as the user is about to click, thus tricking the user
+ into clicking the victim site's interface.</p>
+
+ <p>To avoid this, sites that do not expect to be used in frames are encouraged to only enable
+ their interface if they detect that they are not in a frame (e.g. by comparing the <code
+ data-x="dom-window">window</code> object to the value of the <code data-x="dom-top">top</code>
+ attribute).</p>
+
+ </dd>
+
+ </dl>
+<!--REMOVE-TOPIC:Security-->
+
+
+ <h4>Common pitfalls to avoid when using the scripting APIs</h4>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>Scripts in HTML have "run-to-completion" semantics, meaning that the browser will generally run
+ the script uninterrupted before doing anything else, such as firing further events or continuing
+ to parse the document.</p>
+
+ <p>On the other hand, parsing of HTML files happens asynchronously and incrementally, meaning that
+ the parser can pause at any point to let scripts run. This is generally a good thing, but it does
+ mean that authors need to be careful to avoid hooking event handlers after the events could have
+ possibly fired.</p>
+
+ <p>There are two techniques for doing this reliably: use <span>event handler content
+ attributes</span>, or create the element and add the event handlers in the same script. The latter
+ is safe because, as mentioned earlier, scripts are run to completion before further events can
+ fire.</p>
+
+ <div class="example">
+
+ <p>One way this could manifest itself is with <code>img</code> elements and the <code
+ data-x="event-load">load</code> event. The event could fire as soon as the element has been
+ parsed, especially if the image has already been cached (which is common).</p>
+
+ <p>Here, the author uses the <code data-x="handler-onload">onload</code> handler on an
+ <code>img</code> element to catch the <code data-x="event-load">load</code> event:</p>
+
+ <pre>&lt;img src="games.png" alt="Games" onload="gamesLogoHasLoaded(event)"></pre>
+
+ <p>If the element is being added by script, then so long as the event handlers are added in the
+ same script, the event will still not be missed:</p>
+
+ <pre>&lt;script>
+ var img = new Image();
+ img.src = 'games.png';
+ img.alt = 'Games';
+ img.onload = gamesLogoHasLoaded;
+ // img.addEventListener('load', gamesLogoHasLoaded, false); // would work also
+&lt;/script></pre>
+
+ <p>However, if the author first created the <code>img</code> element and then in a separate
+ script added the event listeners, there's a chance that the <code data-x="event-load">load</code>
+ event would be fired in between, leading it to be missed:</p>
+
+ <pre class="bad">&lt;!-- Do not use this style, it has a race condition! -->
+ &lt;img id="games" src="games.png" alt="Games">
+ &lt;!-- the 'load' event might fire here while the parser is taking a
+ break, in which case you will not see it! -->
+ &lt;script>
+ var img = document.getElementById('games');
+ img.onload = gamesLogoHasLoaded; // might never fire!
+ &lt;/script></pre>
+
+ </div>
+
+
+
+ <h4>How to catch mistakes when writing HTML: validators and conformance checkers</h4>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>Authors are encouraged to make use of conformance checkers (also known as <i>validators</i>) to
+ catch common mistakes. The WHATWG maintains a list of such tools at: <a
+ href="http://validator.whatwg.org/">http://validator.whatwg.org/</a></p>
+
+
+
+ <h3>Conformance requirements for authors</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>Unlike previous versions of the HTML specification, this specification defines in some detail
+ the required processing for invalid documents as well as valid documents.</p> <!-- This has led to
+ some questioning the purpose of conformance criteria: if there is no ambiguity in how something
+ will be processed, why disallow it? -->
+
+ <p>However, even though the processing of invalid content is in most cases well-defined,
+ conformance requirements for documents are still important: in practice, interoperability (the
+ situation in which all implementations process particular content in a reliable and identical or
+ equivalent way) is not the only goal of document conformance requirements. This section details
+ some of the more common reasons for still distinguishing between a conforming document and one
+ with errors.</p>
+
+
+ <h4>Presentational markup</h4>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>The majority of presentational features from previous versions of HTML are no longer allowed.
+ Presentational markup in general has been found to have a number of problems:</p>
+
+ <dl>
+
+ <dt>The use of presentational elements leads to poorer accessibility</dt>
+
+ <dd>
+
+ <p>While it is possible to use presentational markup in a way that provides users of assistive
+ technologies (ATs) with an acceptable experience (e.g. using ARIA), doing so is significantly
+ more difficult than doing so when using semantically-appropriate markup. Furthermore, even using
+ such techniques doesn't help make pages accessible for non-AT non-graphical users, such as users
+ of text-mode browsers.</p>
+
+ <p>Using media-independent markup, on the other hand, provides an easy way for documents to be
+ authored in such a way that they work for more users (e.g. text browsers).</p>
+
+ </dd>
+
+
+ <dt>Higher cost of maintenance</dt>
+
+ <dd>
+
+ <p>It is significantly easier to maintain a site written in such a way that the markup is
+ style-independent. For example, changing the colour of a site that uses
+ <code>&lt;font&nbsp;color=""></code> throughout requires changes across the entire site, whereas
+ a similar change to a site based on CSS can be done by changing a single file.</p>
+
+ </dd>
+
+
+ <dt>Larger document sizes</dt>
+
+ <dd>
+
+ <p>Presentational markup tends to be much more redundant, and thus results in larger document
+ sizes.</p>
+
+ </dd>
+
+ </dl>
+
+ <p>For those reasons, presentational markup has been removed from HTML in this version. This
+ change should not come as a surprise; HTML4 deprecated presentational markup many years ago and
+ provided a mode (HTML4 Transitional) to help authors move away from presentational markup; later,
+ XHTML 1.1 went further and obsoleted those features altogether.</p>
+
+ <p>The only remaining presentational markup features in HTML are the <code
+ data-x="attr-style">style</code> attribute and the <code>style</code> element. Use of the <code
+ data-x="attr-style">style</code> attribute is somewhat discouraged in production environments, but
+ it can be useful for rapid prototyping (where its rules can be directly moved into a separate
+ style sheet later) and for providing specific styles in unusual cases where a separate style sheet
+ would be inconvenient. Similarly, the <code>style</code> element can be useful in syndication or
+ for page-specific styles, but in general an external style sheet is likely to be more convenient
+ when the styles apply to multiple pages.</p>
+
+ <p>It is also worth noting that some elements that were previously presentational have been
+ redefined in this specification to be media-independent: <code>b</code>, <code>i</code>,
+ <code>hr</code>, <code>s</code>, <code>small</code>, and <code>u</code>.</p>
+
+
+ <h4>Syntax errors</h4>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>The syntax of HTML is constrained to avoid a wide variety of problems.</p>
+
+ <dl>
+
+ <dt>Unintuitive error-handling behavior</dt>
+
+ <dd>
+
+ <p>Certain invalid syntax constructs, when parsed, result in DOM trees that are highly
+ unintuitive.</p>
+
+ <div class="example">
+
+ <p>For example, the following markup fragment results in a DOM with an <code>hr</code> element
+ that is an <em>earlier</em> sibling of the corresponding <code>table</code> element:</p>
+
+ <pre class="bad">&lt;table>&lt;hr>...</pre>
+
+ </div>
+
+ </dd>
+
+
+ <dt>Errors with optional error recovery</dt>
+
+ <dd>
+
+ <p>To allow user agents to be used in controlled environments without having to implement the
+ more bizarre and convoluted error handling rules, user agents are permitted to fail whenever
+ encountering a <span>parse error</span>.</p>
+
+ </dd>
+
+
+ <dt>Errors where the error-handling behavior is not compatible with streaming user agents</dt>
+
+ <dd>
+
+ <p>Some error-handling behavior, such as the behavior for the <code
+ data-x="">&lt;table>&lt;hr>...</code> example mentioned above, are incompatible with streaming
+ user agents (user agents that process HTML files in one pass, without storing state). To avoid
+ interoperability problems with such user agents, any syntax resulting in such behavior is
+ considered invalid.</p>
+
+ </dd>
+
+
+ <dt>Errors that can result in infoset coercion</dt>
+
+ <dd>
+
+ <p>When a user agent based on XML is connected to an HTML parser, it is possible that certain
+ invariants that XML enforces, such as comments never containing two consecutive hyphens, will be
+ violated by an HTML file. Handling this can require that the parser coerce the HTML DOM into an
+ XML-compatible infoset. Most syntax constructs that require such handling are considered
+ invalid.</p>
+
+ </dd>
+
+
+ <dt>Errors that result in disproportionally poor performance</dt>
+
+ <dd>
+
+ <p>Certain syntax constructs can result in disproportionally poor performance. To discourage the
+ use of such constructs, they are typically made non-conforming.</p>
+
+ <div class="example">
+
+ <p>For example, the following markup results in poor performance, since all the unclosed
+ <code>i</code> elements have to be reconstructed in each paragraph, resulting in progressively
+ more elements in each paragraph:</p>
+
+ <pre class="bad">&lt;p>&lt;i>He dreamt.
+&lt;p>&lt;i>He dreamt that he ate breakfast.
+&lt;p>&lt;i>Then lunch.
+&lt;p>&lt;i>And finally dinner.</pre>
+
+ <p>The resulting DOM for this fragment would be:</p>
+
+ <ul class="domTree"><li class="t1"><code>p</code><ul><li class="t1"><code>i</code><ul><li class="t3"><code>#text</code>: <span data-x="">He dreamt.</span></li></ul></li></ul></li><li class="t1"><code>p</code><ul><li class="t1"><code>i</code><ul><li class="t1"><code>i</code><ul><li class="t3"><code>#text</code>: <span data-x="">He dreamt that he ate breakfast.</span></li></ul></li></ul></li></ul></li><li class="t1"><code>p</code><ul><li class="t1"><code>i</code><ul><li class="t1"><code>i</code><ul><li class="t1"><code>i</code><ul><li class="t3"><code>#text</code>: <span data-x="">Then lunch.</span></li></ul></li></ul></li></ul></li></ul></li><li class="t1"><code>p</code><ul><li class="t1"><code>i</code><ul><li class="t1"><code>i</code><ul><li class="t1"><code>i</code><ul><li class="t1"><code>i</code><ul><li class="t3"><code>#text</code>: <span data-x="">And finally dinner.</span></li></ul></li></ul></li></ul></li></ul></li></ul></li></ul>
+
+ </div>
+
+ </dd>
+
+
+ <dt>Errors involving fragile syntax constructs</dt>
+
+ <dd>
+
+ <p>There are syntax constructs that, for historical reasons, are relatively fragile. To help
+ reduce the number of users who accidentally run into such problems, they are made
+ non-conforming.</p>
+
+ <div class="example">
+
+ <p>For example, the parsing of certain named character references in attributes happens even
+ with the closing semicolon being omitted. It is safe to include an ampersand followed by
+ letters that do not form a named character reference, but if the letters are changed to a
+ string that <em>does</em> form a named character reference, they will be interpreted as that
+ character instead.</p>
+
+ <p>In this fragment, the attribute's value is "<code data-x="">?bill&amp;ted</code>":</p>
+
+ <pre class="bad">&lt;a href="?bill&amp;ted">Bill and Ted&lt;/a></pre>
+
+ <p>In the following fragment, however, the attribute's value is actually "<code
+ data-x="">?art&copy;</code>", <em>not</em> the intended "<code data-x="">?art&amp;copy</code>",
+ because even without the final semicolon, "<code data-x="">&amp;copy</code>" is handled the same
+ as "<code data-x="">&amp;copy;</code>" and thus gets interpreted as "<code
+ data-x="">&copy;</code>":</p>
+
+ <pre class="bad">&lt;a href="?art&amp;copy">Art and Copy&lt;/a></pre>
+
+ <p>To avoid this problem, all named character references are required to end with a semicolon,
+ and uses of named character references without a semicolon are flagged as errors.</p>
+
+ <p>Thus, the correct way to express the above cases is as
+ follows:</p>
+
+ <pre>&lt;a href="?bill&amp;ted">Bill and Ted&lt;/a> &lt;!-- &amp;ted is ok, since it's not a named character reference --></pre>
+ <pre>&lt;a href="?art&amp;amp;copy">Art and Copy&lt;/a> &lt;!-- the &amp; has to be escaped, since &amp;copy <em>is</em> a named character reference --></pre>
+
+ </div>
+
+ </dd>
+
+
+ <dt>Errors involving known interoperability problems in legacy user agents</dt>
+
+ <dd>
+
+ <p>Certain syntax constructs are known to cause especially subtle or serious problems in legacy
+ user agents, and are therefore marked as non-conforming to help authors avoid them.</p>
+
+ <div class="example">
+
+ <p>For example, this is why the U+0060 GRAVE ACCENT character (`) is not allowed in unquoted
+ attributes. In certain legacy user agents, <!-- namely IE --> it is sometimes treated as a
+ quote character.</p>
+
+ </div>
+
+ <div class="example">
+
+ <p>Another example of this is the DOCTYPE, which is required to trigger <span>no-quirks
+ mode</span>, because the behavior of legacy user agents in <span>quirks mode</span> is often
+ largely undocumented.</p>
+
+ </div>
+
+ </dd>
+
+
+<!--ADD-TOPIC:Security-->
+ <dt>Errors that risk exposing authors to security attacks</dt>
+
+ <dd>
+
+ <p>Certain restrictions exist purely to avoid known security problems.</p>
+
+ <div class="example">
+
+ <p>For example, the restriction on using UTF-7 exists purely to avoid authors falling prey to a
+ known cross-site-scripting attack using UTF-7. <a href="#refsUTF7">[UTF7]</a></p>
+
+ </div>
+
+ </dd>
+<!--REMOVE-TOPIC:Security-->
+
+
+ <dt>Cases where the author's intent is unclear</dt>
+
+ <dd>
+
+ <p>Markup where the author's intent is very unclear is often made non-conforming. Correcting
+ these errors early makes later maintenance easier.</p>
+
+ <div class="example">
+
+ <p>For example, it is unclear whether the author intended the following to be an
+ <code>h1</code> heading or an <code>h2</code> heading:</p>
+
+ <pre class="bad">&lt;h1>Contact details&lt;/h2></pre>
+
+ </div>
+
+ </dd>
+
+
+ <dt>Cases that are likely to be typos</dt>
+
+ <dd>
+
+ <p>When a user makes a simple typo, it is helpful if the error can be caught early, as this can
+ save the author a lot of debugging time. This specification therefore usually considers it an
+ error to use element names, attribute names, and so forth, that do not match the names defined
+ in this specification.</p>
+
+ <div class="example">
+
+ <p>For example, if the author typed <code>&lt;capton></code> instead of
+ <code>&lt;caption></code>, this would be flagged as an error and the author could correct the
+ typo immediately.</p>
+
+ </div>
+
+ </dd>
+
+
+ <dt>Errors that could interfere with new syntax in the future</dt>
+
+ <dd>
+
+ <p>In order to allow the language syntax to be extended in the future, certain otherwise
+ harmless features are disallowed.</p>
+
+ <div class="example">
+
+ <p>For example, "attributes" in end tags are ignored currently, but they are invalid, in case a
+ future change to the language makes use of that syntax feature without conflicting with
+ already-deployed (and valid!) content.</p>
+
+ </div>
+
+ </dd>
+
+
+ </dl>
+
+ <p>Some authors find it helpful to be in the practice of always quoting all attributes and always
+ including all optional tags, preferring the consistency derived from such custom over the minor
+ benefits of terseness afforded by making use of the flexibility of the HTML syntax. To aid such
+ authors, conformance checkers can provide modes of operation wherein such conventions are
+ enforced.</p>
+
+
+
+ <h4>Restrictions on content models and on attribute values</h4>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>Beyond the syntax of the language, this specification also places restrictions on how elements
+ and attributes can be specified. These restrictions are present for similar reasons:</p>
+
+ <dl>
+
+
+ <dt>Errors involving content with dubious semantics</dt>
+
+ <dd>
+
+ <p>To avoid misuse of elements with defined meanings, content models are defined that restrict
+ how elements can be nested when such nestings would be of dubious value.</p>
+
+ <p class="example">For example, this specification disallows nesting a <code>section</code>
+ element inside a <code>kbd</code> element, since it is highly unlikely for an author to indicate
+ that an entire section should be keyed in.</p>
+
+ </dd>
+
+
+ <dt>Errors that involve a conflict in expressed semantics</dt>
+
+ <dd>
+
+ <p>Similarly, to draw the author's attention to mistakes in the use of elements, clear
+ contradictions in the semantics expressed are also considered conformance errors.</p>
+
+ <div class="example">
+
+ <p>In the fragments below, for example, the semantics are nonsensical: a separator cannot
+ simultaneously be a cell, nor can a radio button be a progress bar.</p>
+
+ <pre class="bad">&lt;hr role="cell"></pre>
+ <pre class="bad">&lt;input type=radio role=progressbar></pre>
+
+ </div>
+
+ <p class="example">Another example is the restrictions on the content models of the
+ <code>ul</code> element, which only allows <code>li</code> element children. Lists by definition
+ consist just of zero or more list items, so if a <code>ul</code> element contains something
+ other than an <code>li</code> element, it's not clear what was meant.</p>
+
+ </dd>
+
+
+ <dt>Cases where the default styles are likely to lead to confusion</dt>
+
+ <dd>
+
+ <p>Certain elements have default styles or behaviors that make certain combinations likely to
+ lead to confusion. Where these have equivalent alternatives without this problem, the confusing
+ combinations are disallowed.</p>
+
+ <p class="example">For example, <code>div</code> elements are rendered as block boxes, and
+ <code>span</code> elements as inline boxes. Putting a block box in an inline box is
+ unnecessarily confusing; since either nesting just <code>div</code> elements, or nesting just
+ <code>span</code> elements, or nesting <code>span</code> elements inside <code>div</code>
+ elements all serve the same purpose as nesting a <code>div</code> element in a <code>span</code>
+ element, but only the latter involves a block box in an inline box, the latter combination is
+ disallowed.</p>
+
+ <p class="example">Another example would be the way <span>interactive content</span> cannot be
+ nested. For example, a <code>button</code> element cannot contain a <code>textarea</code>
+ element. This is because the default behavior of such nesting interactive elements would be
+ highly confusing to users. Instead of nesting these elements, they can be placed side by
+ side.</p>
+
+ </dd>
+
+
+ <dt>Errors that indicate a likely misunderstanding of the specification</dt>
+
+ <dd>
+
+ <p>Sometimes, something is disallowed because allowing it would likely cause author
+ confusion.</p>
+
+ <p class="example">For example, setting the <code data-x="attr-fe-disabled">disabled</code>
+ attribute to the value "<code data-x="">false</code>" is disallowed, because despite the
+ appearance of meaning that the element is enabled, it in fact means that the element is
+ <em>disabled</em> (what matters for implementations is the presence of the attribute, not its
+ value).</p>
+
+ </dd>
+
+
+ <dt>Errors involving limits that have been imposed merely to simplify the language</dt>
+
+ <dd>
+
+ <p>Some conformance errors simplify the language that authors need to learn.</p>
+
+ <p class="example">For example, the <code>area</code> element's <code
+ data-x="attr-area-shape">shape</code> attribute, despite accepting both <code
+ data-x="attr-area-shape-keyword-circ">circ</code> and <code
+ data-x="attr-area-shape-keyword-circle">circle</code> values in practice as synonyms, disallows
+ the use of the <code data-x="attr-area-shape-keyword-circ">circ</code> value, so as to simplify
+ tutorials and other learning aids. There would be no benefit to allowing both, but it would
+ cause extra confusion when teaching the language.</p>
+
+ </dd>
+
+
+ <dt>Errors that involve peculiarities of the parser</dt>
+
+ <dd>
+
+ <p>Certain elements are parsed in somewhat eccentric ways (typically for historical reasons),
+ and their content model restrictions are intended to avoid exposing the author to these
+ issues.</p>
+
+ <div class="example">
+
+ <p>For example, a <code>form</code> element isn't allowed inside <span>phrasing content</span>,
+ because when parsed as HTML, a <code>form</code> element's start tag will imply a
+ <code>p</code> element's end tag. Thus, the following markup results in two <span
+ data-x="paragraph">paragraphs</span>, not one:</p>
+
+ <pre>&lt;p>Welcome. &lt;form>&lt;label>Name:&lt;/label> &lt;input>&lt;/form></pre>
+
+ <p>It is parsed exactly like the following:</p>
+
+ <pre>&lt;p>Welcome. &lt;/p>&lt;form>&lt;label>Name:&lt;/label> &lt;input>&lt;/form></pre>
+
+ </div>
+
+ </dd>
+
+
+ <dt>Errors that would likely result in scripts failing in hard-to-debug ways</dt>
+
+ <dd>
+
+ <p>Some errors are intended to help prevent script problems that would be hard to debug.</p>
+
+ <p class="example">This is why, for instance, it is non-conforming to have two <code
+ data-x="attr-id">id</code> attributes with the same value. Duplicate IDs lead to the wrong
+ element being selected, with sometimes disastrous effects whose cause is hard to determine.</p>
+
+ </dd>
+
+
+ <dt>Errors that waste authoring time</dt>
+
+ <dd>
+
+ <p>Some constructs are disallowed because historically they have been the cause of a lot of
+ wasted authoring time, and by encouraging authors to avoid making them, authors can save time in
+ future efforts.</p>
+
+ <p class="example">For example, a <code>script</code> element's <code
+ data-x="attr-script-src">src</code> attribute causes the element's contents to be ignored.
+ However, this isn't obvious, especially if the element's contents appear to be executable script
+ &mdash; which can lead to authors spending a lot of time trying to debug the inline script
+ without realizing that it is not executing. To reduce this problem, this specification makes it
+ non-conforming to have executable script in a <code>script</code> element when the <code
+ data-x="attr-script-src">src</code> attribute is present. This means that authors who are
+ validating their documents are less likely to waste time with this kind of mistake.</p>
+
+ </dd>
+
+
+ <dt>Errors that involve areas that affect authors migrating to and from XHTML</dt>
+
+ <dd>
+
+ <p>Some authors like to write files that can be interpreted as both XML and HTML with similar
+ results. Though this practice is discouraged in general due to the myriad of subtle
+ complications involved (especially when involving scripting, styling, or any kind of automated
+ serialisation), this specification has a few restrictions intended to at least somewhat mitigate
+ the difficulties. This makes it easier for authors to use this as a transitionary step when
+ migrating between HTML and XHTML.</p>
+
+ <p class="example">For example, there are somewhat complicated rules surrounding the <code
+ data-x="attr-lang">lang</code> and <code data-x="attr-xml-lang">xml:lang</code> attributes
+ intended to keep the two synchronized.</p>
+
+ <p class="example">Another example would be the restrictions on the values of <code
+ data-x="">xmlns</code> attributes in the HTML serialisation, which are intended to ensure that
+ elements in conforming documents end up in the same namespaces whether processed as HTML or
+ XML.</p>
+
+ </dd>
+
+
+ <dt>Errors that involve areas reserved for future expansion</dt>
+
+ <dd>
+
+ <p>As with the restrictions on the syntax intended to allow for new syntax in future revisions
+ of the language, some restrictions on the content models of elements and values of attributes
+ are intended to allow for future expansion of the HTML vocabulary.</p>
+
+ <p class="example">For example, limiting the values of the <code
+ data-x="attr-hyperlink-target">target</code> attribute that start with an U+005F LOW LINE
+ character (_) to only specific predefined values allows new predefined values to be introduced
+ at a future time without conflicting with author-defined values.</p>
+
+ </dd>
+
+
+ <dt>Errors that indicate a mis-use of other specifications</dt>
+
+ <dd>
+
+ <p>Certain restrictions are intended to support the restrictions made by other
+ specifications.</p>
+
+ <p class="example">For example, requiring that attributes that take media queries use only
+ <em>valid</em> media queries reinforces the importance of following the conformance rules of
+ that specification.</p>
+
+ </dd>
+
+ </dl>
+
+
+
+ <h3>Suggested reading</h3>
+
+ <!-- NON-NORMATIVE SECTION -->
+
+ <p>The following documents might be of interest to readers of this specification.</p>
+
+ <dl>
+
+ <dt><cite>Character Model for the World Wide Web 1.0: Fundamentals</cite> <a href="#refsCHARMOD">[CHARMOD]</a></dt>
+
+ <dd><blockquote><p>This Architectural Specification provides authors of specifications, software
+ developers, and content developers with a common reference for interoperable text manipulation on
+ the World Wide Web, building on the Universal Character Set, defined jointly by the Unicode
+ Standard and ISO/IEC 10646. Topics addressed include use of the terms 'character', 'encoding' and
+ 'string', a reference processing model, choice and identification of character encodings,
+ character escaping, and string indexing.</p></blockquote></dd>
+
+ <dt><cite>Unicode Security Considerations</cite> <a href="#refsUTR36">[UTR36]</a></dt>
+
+ <dd><blockquote><p>Because Unicode contains such a large number of characters and incorporates
+ the varied writing systems of the world, incorrect usage can expose programs or systems to
+ possible security attacks. This is especially important as more and more products are
+ internationalized. This document describes some of the security considerations that programmers,
+ system analysts, standards developers, and users should take into account, and provides specific
+ recommendations to reduce the risk of problems.</p></blockquote></dd>
+
+ <dt><cite>Web Content Accessibility Guidelines (WCAG) 2.0</cite> <a href="#refsWCAG">[WCAG]</a></dt>
+
+ <dd><blockquote><p>Web Content Accessibility Guidelines (WCAG) 2.0 covers a wide range of
+ recommendations for making Web content more accessible. Following these guidelines will make
+ content accessible to a wider range of people with disabilities, including blindness and low
+ vision, deafness and hearing loss, learning disabilities, cognitive limitations, limited
+ movement, speech disabilities, photosensitivity and combinations of these. Following these
+ guidelines will also often make your Web content more usable to users in
+ general.</p></blockquote></dd>
+
+ <dt class="nodev"><cite>Authoring Tool Accessibility Guidelines (ATAG) 2.0</cite> <a href="#refsATAG">[ATAG]</a></dt>
+
+ <dd class="nodev"><blockquote><p>This specification provides guidelines for designing Web content
+ authoring tools that are more accessible for people with disabilities. An authoring tool that
+ conforms to these guidelines will promote accessibility by providing an accessible user interface
+ to authors with disabilities as well as by enabling, supporting, and promoting the production of
+ accessible Web content by all authors.</p></blockquote></dd>
+
+ <dt class="nodev"><cite>User Agent Accessibility Guidelines (UAAG) 2.0</cite> <a href="#refsUAAG">[UAAG]</a></dt>
+
+ <dd class="nodev"><blockquote><p>This document provides guidelines for designing user agents that
+ lower barriers to Web accessibility for people with disabilities. User agents include browsers
+ and other types of software that retrieve and render Web content. A user agent that conforms to
+ these guidelines will promote accessibility through its own user interface and through other
+ internal facilities, including its ability to communicate with other technologies (especially
+ assistive technologies). Furthermore, all users, not just users with disabilities, should find
+ conforming user agents to be more usable.</p></blockquote></dd>
+
+ </dl>
+
+
+
+ <h2 id="infrastructure">Common infrastructure</h2>
+
+ <h3>Terminology</h3>
+
+ <p>This specification refers to both HTML and XML attributes and IDL attributes, often in the same
+ context. When it is not clear which is being referred to, they are referred to as <dfn
+ data-x="">content attributes</dfn> for HTML and XML attributes, and <dfn data-x="">IDL
+ attributes</dfn> for those defined on IDL interfaces. Similarly, the term "properties" is used for
+ both JavaScript object properties and CSS properties. When these are ambiguous they are qualified
+ as <dfn data-x="">object properties</dfn> and <dfn data-x="">CSS properties</dfn> respectively.</p>
+
+ <p>Generally, when the specification states that a feature applies to <span>the HTML syntax</span>
+ or <span>the XHTML syntax</span>, it also includes the other. When a feature specifically only
+ applies to one of the two languages, it is called out by explicitly stating that it does not apply
+ to the other format, as in "for HTML, ... (this does not apply to XHTML)".</p>
+
+ <p>This specification uses the term <dfn data-x="">document</dfn> to refer to any use of HTML,
+ ranging from short static documents to long essays or reports with rich multimedia, as well as to
+ fully-fledged interactive applications. The term is used to refer both to <code>Document</code>
+ objects and their descendant DOM trees, and to serialised byte streams using the <span data-x="the
+ HTML syntax">HTML syntax</span> or <span data-x="the XHTML syntax">XHTML syntax</span>, depending
+ on context.</p>
+
+ <p>In the context of the DOM structures, the terms <span data-x="HTML documents">HTML
+ document</span> and <span data-x="XML documents">XML document</span> are used as defined in the DOM
+ specification, and refer specifically to two different modes that <code>Document</code> objects
+ can find themselves in. <a href="#refsDOM">[DOM]</a> (Such uses are always hyperlinked to their
+ definition.)</p>
+
+ <p>In the context of byte streams, the term HTML document refers to resources labeled as
+ <code>text/html</code>, and the term XML document refers to resources labeled with an <span>XML
+ MIME type</span>.</p>
+
+ <p>The term <dfn>XHTML document</dfn> is used to refer to both <code>Document</code>s in the <span
+ data-x="XML documents">XML document</span> mode that contains element nodes in the <span>HTML
+ namespace</span>, and byte streams labeled with an <span>XML MIME type</span> that contain
+ elements from the <span>HTML namespace</span>, depending on context.</p>
+
+ <hr>
+
+ <p>For simplicity, terms such as <dfn data-x="">shown</dfn>, <dfn data-x="">displayed</dfn>, and
+ <dfn data-x="">visible</dfn> might sometimes be used when referring to the way a document is
+ rendered to the user. These terms are not meant to imply a visual medium; they must be considered
+ to apply to other media in equivalent ways.</p>
+
+ <div class="nodev">
+
+ <p>When an algorithm B says to return to another algorithm A, it implies that A called B. Upon
+ returning to A, the implementation must continue from where it left off in calling B.</p>
+
+ </div>
+
+ <!-- should find somewhere more appropriate to put this -->
+ <p>The term "transparent black" refers to the colour with red, green, blue, and alpha channels all
+ set to zero.</p>
+
+
+ <h4>Resources</h4>
+
+ <p>The specification uses the term <dfn data-x="">supported</dfn> when referring to whether a user
+ agent has an implementation capable of decoding the semantics of an external resource. A format or
+ type is said to be <i>supported</i> if the implementation can process an external resource of that
+ format or type without critical aspects of the resource being ignored. Whether a specific resource
+ is <i>supported</i> can depend on what features of the resource's format are in use.</p>
+
+ <p class="example">For example, a PNG image would be considered to be in a supported format if its
+ pixel data could be decoded and rendered, even if, unbeknownst to the implementation, the image
+ also contained animation data.</p>
+
+ <p class="example">An MPEG-4 video file would not be considered to be in a supported format if the
+ compression format used was not supported, even if the implementation could determine the
+ dimensions of the movie from the file's metadata.</p>
+
+ <p>What some specifications, in particular the HTTP specification, refer to as a
+ <i>representation</i> is referred to in this specification as a <dfn data-x="">resource</dfn>. <a
+ href="#refsHTTP">[HTTP]</a></p>
+
+ <p>The term <dfn>MIME type</dfn> is used to refer to what is sometimes called an <i>Internet media
+ type</i> in protocol literature. The term <i>media type</i> in this specification is used to refer
+ to the type of media intended for presentation, as used by the CSS specifications. <a
+ href="#refsRFC2046">[RFC2046]</a> <a href="#refsMQ">[MQ]</a></p>
+
+ <p>A string is a <dfn>valid MIME type</dfn> if it matches the <code data-x="">media-type</code>
+ rule defined in section 3.7 "Media Types" of RFC 2616. In particular, a <span>valid MIME
+ type</span> may include MIME type parameters. <a href="#refsHTTP">[HTTP]</a></p>
+
+ <p>A string is a <dfn>valid MIME type with no parameters</dfn> if it matches the <code
+ data-x="">media-type</code> rule defined in section 3.7 "Media Types" of RFC 2616, but does not
+ contain any U+003B SEMICOLON characters (;). In other words, if it consists only of a type and
+ subtype, with no MIME Type parameters. <a href="#refsHTTP">[HTTP]</a></p>
+
+ <p>The term <dfn>HTML MIME type</dfn> is used to refer to the <span>MIME type</span>
+ <code>text/html</code>.</p>
+
+ <p>A resource's <dfn>critical subresources</dfn> are those that the resource needs to have
+ available to be correctly processed. Which resources are considered critical or not is defined by
+ the specification that defines the resource's format.</p>
+
+ <p>The term <dfn data-x="data protocol"><code data-x="">data:</code> URL</dfn> refers to <span
+ data-x="URL">URLs</span> that use the <code data-x="">data:</code> scheme. <a
+ href="#refsRFC2397">[RFC2397]</a></p>
+
+
+ <h4>XML</h4>
+
+ <p id="html-namespace">To ease migration from HTML to XHTML, UAs conforming to this specification
+ will place elements in HTML in the <code>http://www.w3.org/1999/xhtml</code> namespace, at least
+ for the purposes of the DOM and CSS. The term "<dfn>HTML elements</dfn>", when used in this
+ specification, refers to any element in that namespace, and thus refers to both HTML and XHTML
+ elements.</p>
+
+ <p>Except where otherwise stated, all elements defined or mentioned in this specification are in
+ the <span>HTML namespace</span> ("<code>http://www.w3.org/1999/xhtml</code>"), and all attributes
+ defined or mentioned in this specification have no namespace.</p>
+
+ <p>The term <dfn>element type</dfn> is used to refer to the set of elements that have a given
+ local name and namespace. For example, <code>button</code> elements are elements with the element
+ type <code>button</code>, meaning they have the local name "<code data-x="">button</code>" and
+ (implicitly as defined above) the <span>HTML namespace</span>.</p>
+
+ <p>Attribute names are said to be <dfn>XML-compatible</dfn> if they match the <a
+ href="http://www.w3.org/TR/xml/#NT-Name"><code data-x="">Name</code></a> production defined in XML
+ and they contain no U+003A COLON characters (:). <a href="#refsXML">[XML]</a></p>
+
+ <p>The term <dfn>XML MIME type</dfn> is used to refer to the <span data-x="MIME type">MIME
+ types</span> <code data-x="">text/xml</code>, <code data-x="">application/xml</code>, and any
+ <span>MIME type</span> whose subtype ends with the four characters "<code data-x="">+xml</code>".
+ <a href="#refsRFC3023">[RFC3023]</a></p>
+
+
+ <h4>DOM trees</h4>
+
+ <p>The <dfn>root element of a <code>Document</code> object</dfn> is that <code>Document</code>'s
+ first element child, if any. If it does not have one then the <code>Document</code> has no root
+ element.</p>
+
+ <p>The term <dfn>root element</dfn>, when not referring to a <code>Document</code> object's root
+ element, means the furthest ancestor element node of whatever node is being discussed, or the node
+ itself if it has no ancestors. When the node is a part of the document, then the node's <span>root
+ element</span> is indeed the document's root element; however, if the node is not currently part
+ of the document tree, the root element will be an orphaned node.</p>
+
+ <p>When an element's <span>root element</span> is the <span>root element of a
+ <code>Document</code> object</span>, it is said to be <dfn>in a <code>Document</code></dfn>. An
+ element is said to have been <dfn data-x="insert an element into a document">inserted into a
+ document</dfn> when its <span>root element</span> changes and is now the document's <span>root
+ element</span>. Analogously, an element is said to have been <dfn data-x="remove an element from a
+ document">removed from a document</dfn> when its <span>root element</span> changes from being the
+ document's <span>root element</span> to being another element.</p>
+
+ <p>A node's <dfn>home subtree</dfn> is the subtree rooted at that node's <span>root
+ element</span>. When a node is <span>in a <code>Document</code></span>, its <span>home
+ subtree</span> is that <code>Document</code>'s tree.</p>
+
+ <p>The <code>Document</code> of a <code>Node</code> (such as an element) is the
+ <code>Document</code> that the <code>Node</code>'s <code
+ data-x="dom-Node-ownerDocument">ownerDocument</code> IDL attribute returns. When a
+ <code>Node</code> is <span>in a <code>Document</code></span> then that <code>Document</code> is
+ always the <code>Node</code>'s <code>Document</code>, and the <code>Node</code>'s <code
+ data-x="dom-Node-ownerDocument">ownerDocument</code> IDL attribute thus always returns that
+ <code>Document</code>.</p>
+
+ <p>The <code>Document</code> of a content attribute is the <code>Document</code> of the
+ attribute's element.</p>
+
+ <p>The term <dfn>tree order</dfn> means a pre-order, depth-first traversal of DOM nodes involved
+ (through the <code data-x="dom-Node-parentNode">parentNode</code>/<code
+ data-x="dom-Node-childNodes">childNodes</code> relationship).</p>
+
+ <p>When it is stated that some element or attribute is <dfn data-x="ignore">ignored</dfn>, or
+ treated as some other value, or handled as if it was something else, this refers only to the
+ processing of the node after it is in the DOM. <span class="nodev">A user agent must not mutate the
+ DOM in such situations.</span></p>
+
+ <p>A content attribute is said to <dfn data-x="">change</dfn> value only if its new value is
+ different than its previous value; setting an attribute to a value it already has does not change
+ it.</p>
+
+ <p>The term <dfn data-x="">empty</dfn>, when used of an attribute value, <code>Text</code> node, or
+ string, means that the length of the text is zero (i.e. not even containing spaces or <span>control
+ characters</span>).</p>
+
+
+ <h4>Scripting</h4>
+
+ <p>The construction "a <code>Foo</code> object", where <code>Foo</code> is actually an interface,
+ is sometimes used instead of the more accurate "an object implementing the interface
+ <code>Foo</code>".</p>
+
+ <p>An IDL attribute is said to be <dfn data-x="">getting</dfn> when its value is being retrieved
+ (e.g. by author script), and is said to be <dfn data-x="">setting</dfn> when a new value is
+ assigned to it.</p>
+
+ <p>If a DOM object is said to be <dfn>live</dfn>, then the attributes and methods on that object
+ <span class="nodev">must</span> operate on the actual underlying data, not a snapshot of the
+ data.</p>
+
+ <p>In the contexts of events, the terms <i>fire</i> and <i>dispatch</i> are used as defined in the
+ DOM specification: <dfn data-x="concept-event-fire">firing</dfn> an event means to create and <span
+ data-x="concept-event-dispatch">dispatch</span> it, and <dfn
+ data-x="concept-event-dispatch">dispatching</dfn> an event means to follow the steps that propagate
+ the event through the tree. The term <dfn data-x="concept-events-trusted">trusted event</dfn> is
+ used to refer to events whose <code data-x="dom-event-isTrusted">isTrusted</code> attribute is
+ initialised to true. <a href="#refsDOM">[DOM]</a></p>
+
+
+ <h4>Plugins</h4>
+
+ <p>The term <dfn>plugin</dfn> refers to a user-agent defined set of content handlers used by the
+ user agent that can take part in the user agent's rendering of a <code>Document</code> object, but
+ that neither act as <span data-x="child browsing context">child browsing contexts</span> of the
+ <code>Document</code> nor introduce any <code>Node</code> objects to the <code>Document</code>'s
+ DOM.</p>
+
+ <p>Typically such content handlers are provided by third parties, though a user agent can also
+ designate built-in content handlers as plugins.</p>
+
+ <div class="nodev">
+
+ <p>A user agent must not consider the types <code>text/plain</code> and
+ <code>application/octet-stream</code> as having a registered <span>plugin</span>.</p> <!-- because
+ of the way <object> elements handles those types, if nothing else (it also doesn't make any sense
+ to have a plugin registered for those types, of course) -->
+
+ </div>
+
+ <p class="example">One example of a plugin would be a PDF viewer that is instantiated in a
+ <span>browsing context</span> when the user navigates to a PDF file. This would count as a plugin
+ regardless of whether the party that implemented the PDF viewer component was the same as that
+ which implemented the user agent itself. However, a PDF viewer application that launches separate
+ from the user agent (as opposed to using the same interface) is not a plugin by this
+ definition.</p>
+
+ <p class="note">This specification does not define a mechanism for interacting with plugins, as it
+ is expected to be user-agent- and platform-specific. Some UAs might opt to support a plugin
+ mechanism such as the Netscape Plugin API; others might use remote content converters or have
+ built-in support for certain types. Indeed, this specification doesn't require user agents to
+ support plugins at all. <a href="#refsNPAPI">[NPAPI]</a></p>
+
+ <p>A plugin can be <dfn data-x="concept-plugin-secure">secured</dfn> if it honors the semantics of
+ the <code data-x="attr-iframe-sandbox">sandbox</code> attribute.</p>
+
+ <p class="example">For example, a secured plugin would prevent its contents from creating pop-up
+ windows when the plugin is instantiated inside a sandboxed <code>iframe</code>.</p>
+
+ <div class="nodev">
+
+ <p class="warning">Browsers should take extreme care when interacting with external content
+ intended for <span data-x="plugin">plugins</span>. When third-party software is run with the same
+ privileges as the user agent itself, vulnerabilities in the third-party software become as
+ dangerous as those in the user agent.</p>
+
+ <p>Since different users having differents sets of <span data-x="plugin">plugins</span> provides a
+ fingerprinting vector that increases the chances of users being uniquely identified, user agents
+ are encouraged to support the exact same set of <span data-x="plugin">plugins</span> for each
+ user.
+ <!--INSERT FINGERPRINT-->
+ </p>
+
+ </div>
+
+
+
+ <h4 id="encoding-terminology">Character encodings</h4>
+
+ <p>A <dfn data-x="encoding">character encoding</dfn>, or just <i>encoding</i> where that is not
+ ambiguous, is a defined way to convert between byte streams and Unicode strings, as defined in the
+ WHATWG Encoding standard. An <span>encoding</span> has an <dfn>encoding name</dfn> and one or more
+ <dfn data-x="encoding label">encoding labels</dfn>, referred to as the encoding's <i>name</i> and
+ <i>labels</i> in the Encoding standard. <a href="#refsENCODING">[ENCODING]</a></p>
+
+ <p>An <dfn>ASCII-compatible character encoding</dfn> is a single-byte or variable-length
+ <span>encoding</span> in which the bytes 0x09, 0x0A, 0x0C, 0x0D, 0x20 - 0x22, 0x26, 0x27, 0x2C -
+ 0x3F, 0x41 - 0x5A, and 0x61 - 0x7A<!-- is that list ok? do any character sets we want to support
+ do things outside that range? -->, ignoring bytes that are the second and later bytes of multibyte
+ sequences, all correspond to single-byte sequences that map to the same Unicode characters as
+ those bytes in Windows-1252<!--ANSI_X3.4-1968 (US-ASCII)-->. <a href="#refsENCODING">[ENCODING]</a></p>
+
+ <p class="note">This includes such encodings as Shift_JIS, HZ-GB-2312, and variants of ISO-2022,
+ even though it is possible in these encodings for bytes like 0x70 to be part of longer sequences
+ that are unrelated to their interpretation as ASCII. It excludes UTF-16 variants, as well as
+ obsolete legacy encodings such as UTF-7, GSM03.38, and EBCDIC variants.</p>
+
+ <!--
+ We'll have to change that if anyone comes up with a way to have a document that is valid as two
+ different encodings at once, with different <meta charset> elements applying in each case.
+ -->
+
+ <p>The term <dfn>a UTF-16 encoding</dfn> refers to any variant of UTF-16: UTF-16LE or UTF-16BE,
+ regardless of the presence or absence of a BOM. <a href="#refsENCODING">[ENCODING]</a></p>
+
+ <p>The term <dfn>code unit</dfn> is used as defined in the Web IDL specification: a 16 bit
+ unsigned integer, the smallest atomic component of a <code>DOMString</code>. (This is a narrower
+ definition than the one used in Unicode, and is not the same as a <i>code point</i>.) <a
+ href="#refsWEBIDL">[WEBIDL]</a></p>
+
+ <p>The term <dfn>Unicode code point</dfn> means a <i data-x="">Unicode scalar value</i> where
+ possible, and an isolated surrogate code point when not. When a conformance requirement is defined
+ in terms of characters or Unicode code points, a pair of <span data-x="code unit">code units</span>
+ consisting of a high surrogate followed by a low surrogate must be treated as the single code
+ point represented by the surrogate pair, but isolated surrogates must each be treated as the
+ single code point with the value of the surrogate. <a href="#refsUNICODE">[UNICODE]</a></p>
+
+ <p>In this specification, the term <dfn>character</dfn>, when not qualified as <em>Unicode</em>
+ character, is synonymous with the term <span>Unicode code point</span>.</p>
+
+ <p>The term <dfn>Unicode character</dfn> is used to mean a <i data-x="">Unicode scalar value</i>
+ (i.e. any Unicode code point that is not a surrogate code point). <a
+ href="#refsUNICODE">[UNICODE]</a></p>
+
+ <p>The <dfn>code-unit length</dfn> of a string is the number of <span data-x="code unit">code
+ units</span> in that string.</p>
+
+ <p class="note">This complexity results from the historical decision to define the DOM API in
+ terms of 16 bit (UTF-16) <span data-x="code unit">code units</span>, rather than in terms of <span
+ data-x="Unicode character">Unicode characters</span>.</p>
+
+
+
+ <div class="nodev">
+
+ <h3>Conformance requirements</h3>
+
+ <p>All diagrams, examples, and notes in this specification are non-normative, as are all sections
+ explicitly marked non-normative. Everything else in this specification is normative.</p>
+
+ <p>The key words "MUST", "MUST NOT", <!--"REQUIRED",--> <!--"SHALL", "SHALL NOT",--> "SHOULD", "SHOULD
+ NOT", <!--"RECOMMENDED", "NOT RECOMMENDED",--> "MAY", and "OPTIONAL" in the normative parts of
+ this document are to be interpreted as described in RFC2119. The key word "OPTIONALLY" in the
+ normative parts of this document is to be interpreted with the same normative meaning as "MAY" and
+ "OPTIONAL". For readability, these words do not appear in all uppercase letters in this
+ specification. <a href="#refsRFC2119">[RFC2119]</a></p>
+
+ <p>Requirements phrased in the imperative as part of algorithms (such as "strip any leading space
+ characters" or "return false and abort these steps") are to be interpreted with the meaning of the
+ key word ("must", "should", "may", etc) used in introducing the algorithm.</p>
+
+ <div class="example">
+
+ <p>For example, were the spec to say:</p>
+
+ <pre>To eat an orange, the user must:
+1. Peel the orange.
+2. Separate each slice of the orange.
+3. Eat the orange slices.</pre>
+
+ <p>...it would be equivalent to the following:</p>
+
+ <pre>To eat an orange:
+1. The user must peel the orange.
+2. The user must separate each slice of the orange.
+3. The user must eat the orange slices.</pre>
+
+ <p>Here the key word is "must".</p>
+
+ <p>The former (imperative) style is generally preferred in this specification for stylistic
+ reasons.</p>
+
+ </div>
+
+ <p>Conformance requirements phrased as algorithms or specific steps may be implemented in any
+ manner, so long as the end result is equivalent. (In particular, the algorithms defined in this
+ specification are intended to be easy to follow, and not intended to be performant.)</p>
+
+ </div>
+
+
+
+ <div class="nodev">
+
+ <h4>Conformance classes</h4>
+
+ <p>This specification describes the conformance criteria for <span class="nodev">user agents
+ (relevant to implementors) and</span> documents<span class="nodev"> (relevant to authors and
+ authoring tool implementors)</span>.</p>
+
+ <p><dfn>Conforming documents</dfn> are those that comply with all the conformance criteria for
+ documents. For readability, some of these conformance requirements are phrased as conformance
+ requirements on authors; such requirements are implicitly requirements on documents: by
+ definition, all documents are assumed to have had an author. (In some cases, that author may
+ itself be a user agent &mdash; such user agents are subject to additional rules, as explained
+ below.)</p>
+
+ <p class="example">For example, if a requirement states that "authors must not use the <code
+ data-x="">foobar</code> element", it would imply that documents are not allowed to contain elements
+ named <code data-x="">foobar</code>.</p>
+
+ <p class="note impl">There is no implied relationship between document conformance requirements
+ and implementation conformance requirements. User agents are not free to handle non-conformant
+ documents as they please; the processing model described in this specification applies to
+ implementations regardless of the conformity of the input documents.</p>
+
+ <p>User agents fall into several (overlapping) categories with different conformance
+ requirements.</p>
+
+ <dl>
+
+ <dt id="interactive">Web browsers and other interactive user agents</dt>
+
+ <dd>
+
+ <p>Web browsers that support <span>the XHTML syntax</span> must process elements and attributes
+ from the <span>HTML namespace</span> found in XML documents as described in this specification,
+ so that users can interact with them, unless the semantics of those elements have been
+ overridden by other specifications.</p>
+
+ <p class="example">A conforming XHTML processor would, upon finding an XHTML <code>script</code>
+ element in an XML document, execute the script contained in that element. However, if the
+ element is found within a transformation expressed in XSLT (assuming the user agent also
+ supports XSLT), then the processor would instead treat the <code>script</code> element as an
+ opaque element that forms part of the transform.</p>
+
+ <p>Web browsers that support <span>the HTML syntax</span> must process documents labeled with an
+ <span>HTML MIME type</span> as described in this specification, so that users can interact with
+ them.</p>
+
+ <p>User agents that support scripting must also be conforming implementations of the IDL
+ fragments in this specification, as described in the Web IDL specification. <a
+ href="#refsWEBIDL">[WEBIDL]</a></p>
+
+ <p class="note">Unless explicitly stated, specifications that override the semantics of HTML
+ elements do not override the requirements on DOM objects representing those elements. For
+ example, the <code>script</code> element in the example above would still implement the
+ <code>HTMLScriptElement</code> interface.</p>
+
+ </dd>
+
+ <dt id="non-interactive">Non-interactive presentation user agents</dt>
+
+ <dd>
+
+ <p>User agents that process HTML and XHTML documents purely to render non-interactive versions
+ of them must comply to the same conformance criteria as Web browsers, except that they are
+ exempt from requirements regarding user interaction.</p>
+
+ <p class="note">Typical examples of non-interactive presentation user agents are printers
+ (static UAs) and overhead displays (dynamic UAs). It is expected that most static
+ non-interactive presentation user agents will also opt to <a href="#non-scripted">lack scripting
+ support</a>.</p>
+
+ <p class="example">A non-interactive but dynamic presentation UA would still execute scripts,
+ allowing forms to be dynamically submitted, and so forth. However, since the concept of "focus"
+ is irrelevant when the user cannot interact with the document, the UA would not need to support
+ any of the focus-related DOM APIs.</p>
+
+ </dd>
+
+ <dt id="renderingUA">Visual user agents that support the suggested default rendering</dt>
+
+ <dd>
+
+ <p>User agents, whether interactive or not, may be designated (possibly as a user option) as
+ supporting the suggested default rendering defined by this specification.</p>
+
+ <p>This is not required. In particular, even user agents that do implement the suggested default
+ rendering are encouraged to offer settings that override this default to improve the experience
+ for the user, e.g. changing the colour contrast, using different focus styles, or otherwise
+ making the experience more accessible and usable to the user.</p>
+
+ <p>User agents that are designated as supporting the suggested default rendering must, while so
+ designated, implement the rules in <a href="#rendering">the rendering section</a> that that
+ section defines as the behavior that user agents are <em>expected</em> to implement.</p>
+
+ </dd>
+
+ <dt id="non-scripted">User agents with no scripting support</dt>
+
+ <dd>
+
+ <p>Implementations that do not support scripting (or which have their scripting features
+ disabled entirely) are exempt from supporting the events and DOM interfaces mentioned in this
+ specification. For the parts of this specification that are defined in terms of an events model
+ or in terms of the DOM, such user agents must still act as if events and the DOM were
+ supported.</p>
+
+ <p class="note">Scripting can form an integral part of an application. Web browsers that do not
+ support scripting, or that have scripting disabled, might be unable to fully convey the author's
+ intent.</p>
+
+ </dd>
+
+
+ <dt>Conformance checkers</dt>
+
+ <dd id="conformance-checkers">
+
+ <p>Conformance checkers must verify that a document conforms to the applicable conformance
+ criteria described in this specification. Automated conformance checkers are exempt from
+ detecting errors that require interpretation of the author's intent (for example, while a
+ document is non-conforming if the content of a <code>blockquote</code> element is not a quote,
+ conformance checkers running without the input of human judgement do not have to check that
+ <code>blockquote</code> elements only contain quoted material).</p>
+
+ <p>Conformance checkers must check that the input document conforms when parsed without a
+ <span>browsing context</span> (meaning that no scripts are run, and that the parser's
+ <span>scripting flag</span> is disabled), and should also check that the input document conforms
+ when parsed with a <span>browsing context</span> in which scripts execute, and that the scripts
+ never cause non-conforming states to occur other than transiently during script execution
+ itself. (This is only a "SHOULD" and not a "MUST" requirement because it has been proven to be
+ impossible. <a href="#refsCOMPUTABLE">[COMPUTABLE]</a>)</p>
+
+ <p>The term "HTML validator" can be used to refer to a conformance checker that itself conforms
+ to the applicable requirements of this specification.</p>
+
+ <div class="note">
+
+ <p>XML DTDs cannot express all the conformance requirements of this specification. Therefore, a
+ validating XML processor and a DTD cannot constitute a conformance checker. Also, since neither
+ of the two authoring formats defined in this specification are applications of SGML, a
+ validating SGML system cannot constitute a conformance checker either.</p>
+
+ <p>To put it another way, there are three types of conformance criteria:</p>
+
+ <ol>
+
+ <li>Criteria that can be expressed in a DTD.</li>
+
+ <li>Criteria that cannot be expressed by a DTD, but can still be checked by a machine.</li>
+
+ <li>Criteria that can only be checked by a human.</li>
+
+ </ol>
+
+ <p>A conformance checker must check for the first two. A simple DTD-based validator only checks
+ for the first class of errors and is therefore not a conforming conformance checker according
+ to this specification.</p>
+
+ </div>
+ </dd>
+
+
+ <dt>Data mining tools</dt>
+
+ <dd id="data-mining">
+
+ <p>Applications and tools that process HTML and XHTML documents for reasons other than to either
+ render the documents or check them for conformance should act in accordance with the semantics
+ of the documents that they process.</p>
+
+ <p class="example">A tool that generates <span data-x="outline">document outlines</span> but
+ increases the nesting level for each paragraph and does not increase the nesting level for each
+ section would not be conforming.</p>
+
+ </dd>
+
+
+ <dt id="editors">Authoring tools and markup generators</dt>
+
+ <dd>
+
+ <p>Authoring tools and markup generators must generate <span>conforming documents</span>.
+ Conformance criteria that apply to authors also apply to authoring tools, where appropriate.</p>
+
+ <p>Authoring tools are exempt from the strict requirements of using elements only for their
+ specified purpose, but only to the extent that authoring tools are not yet able to determine
+ author intent. However, authoring tools must not automatically misuse elements or encourage
+ their users to do so.</p>
+
+ <p class="example">For example, it is not conforming to use an <code>address</code> element for
+ arbitrary contact information; that element can only be used for marking up contact information
+ for the author of the document or section. However, since an authoring tool is likely unable to
+ determine the difference, an authoring tool is exempt from that requirement. This does not mean,
+ though, that authoring tools can use <code>address</code> elements for any block of italics text
+ (for instance); it just means that the authoring tool doesn't have to verify that when the user
+ uses a tool for inserting contact information for a section, that the user really is doing that
+ and not inserting something else instead.</p>
+
+ <p class="note">In terms of conformance checking, an editor has to output documents that conform
+ to the same extent that a conformance checker will verify.</p>
+
+ <p>When an authoring tool is used to edit a non-conforming document, it may preserve the
+ conformance errors in sections of the document that were not edited during the editing session
+ (i.e. an editing tool is allowed to round-trip erroneous content). However, an authoring tool
+ must not claim that the output is conformant if errors have been so preserved.</p>
+
+ <p>Authoring tools are expected to come in two broad varieties: tools that work from structure
+ or semantic data, and tools that work on a What-You-See-Is-What-You-Get media-specific editing
+ basis (WYSIWYG).</p>
+
+ <p>The former is the preferred mechanism for tools that author HTML, since the structure in the
+ source information can be used to make informed choices regarding which HTML elements and
+ attributes are most appropriate.</p>
+
+ <p>However, WYSIWYG tools are legitimate. WYSIWYG tools should use elements they know are
+ appropriate, and should not use elements that they do not know to be appropriate. This might in
+ certain extreme cases mean limiting the use of flow elements to just a few elements, like
+ <code>div</code>, <code>b</code>, <code>i</code>, and <code>span</code> and making liberal use
+ of the <code data-x="attr-style">style</code> attribute.</p>
+
+ <p>All authoring tools, whether WYSIWYG or not, should make a best effort attempt at enabling
+ users to create well-structured, semantically rich, media-independent content.</p>
+
+ </dd>
+
+ </dl>
+
+ <p id="hardwareLimitations">User agents may impose implementation-specific limits on otherwise
+ unconstrained inputs, e.g. to prevent denial of service attacks, to guard against running out of
+ memory, or to work around platform-specific limitations.
+ <!--INSERT FINGERPRINT-->
+ </p>
+
+ <p>For compatibility with existing content and prior specifications, this specification describes
+ two authoring formats: one based on XML (referred to as <span>the XHTML syntax</span>), and one
+ using a <a href="#writing">custom format</a> inspired by SGML (referred to as <span>the HTML
+ syntax</span>). Implementations must support at least one of these two formats, although
+ supporting both is encouraged.</p>
+
+ <p>Some conformance requirements are phrased as requirements on elements, attributes, methods or
+ objects. Such requirements fall into two categories: those describing content model restrictions,
+ and those describing implementation behavior. Those in the former category are requirements on
+ documents and authoring tools. Those in the second category are requirements on user agents.
+ Similarly, some conformance requirements are phrased as requirements on authors; such requirements
+ are to be interpreted as conformance requirements on the documents that authors produce. (In other
+ words, this specification does not distinguish between conformance criteria on authors and
+ conformance criteria on documents.)</p>
+
+ </div>
+
+
+ <div class="nodev">
+
+ <h4>Dependencies</h4>
+
+ <p>This specification relies on several other underlying specifications.</p>
+
+ <dl>
+
+ <dt>Unicode and Encoding</dt>
+
+ <dd>
+
+ <p>The Unicode character set is used to represent textual data, and the WHATWG Encoding standard
+ defines requirements around <span data-x="encoding">character encodings</span>. <a
+ href="#refsUNICODE">[UNICODE]</a></p>
+
+ <p class="note">This specification <a href="#encoding-terminology">introduces terminology</a>
+ based on the terms defined in those specifications, as described earlier.</p>
+
+ <p>The following terms are used as defined in the WHATWG Encoding standard: <a
+ href="#refsENCODING">[ENCODING]</a></p>
+
+ <ul class="brief">
+
+ <li><dfn>Getting an encoding</dfn>
+
+ <li>The <dfn>encoder</dfn> and <dfn>decoder</dfn> algorithms for various encodings, including
+ the <dfn>UTF-8 encoder</dfn> and <dfn>UTF-8 decoder</dfn>
+
+ <li>The generic <dfn>decode</dfn> algorithm which takes a byte stream and an encoding and
+ returns a character stream
+
+ <li>The <dfn>UTF-8 decode</dfn> algorithm which takes a byte stream and returns a character
+ stream, additionally stripping one leading UTF-8 Byte Order Mark (BOM), if any
+
+ </ul>
+
+ <p class="note">The <span>UTF-8 decoder</span> is distinct from the <i>UTF-8 decode
+ algorithm</i>. The latter first strips a Byte Order Mark (BOM), if any, and then invokes the
+ former.</p>
+
+ <p>For readability, character encodings are sometimes referenced in this specification with a
+ case that differs from the canonical case given in the WHATWG Encoding standard. (For example,
+ "UTF-16LE" instead of "utf-16le".)</p>
+
+ </dd>
+
+
+ <dt>XML</dt>
+
+ <dd>
+
+ <p>Implementations that support <span>the XHTML syntax</span> must support some version of XML,
+ as well as its corresponding namespaces specification, because that syntax uses an XML
+ serialisation with namespaces. <a href="#refsXML">[XML]</a> <a href="#refsXMLNS">[XMLNS]</a></p>
+
+ </dd>
+
+
+ <dt>URLs</dt>
+
+ <dd>
+
+ <p>The following terms are defined in the WHATWG URL standard: <a href="#refsURL">[URL]</a></p>
+
+ <ul class="brief">
+ <li><dfn>URL</dfn>
+ <li><dfn>Absolute URL</dfn>
+ <li><dfn>Relative URL</dfn>
+ <li><dfn data-x="concept-url-scheme-relative">Relative schemes</dfn>
+ <li>The <dfn>URL parser</dfn>
+ <li><dfn>Parsed URL</dfn>
+ <li>The <dfn data-x="concept-url-scheme">scheme</dfn> component of a <span>parsed URL</span>
+ <li>The <dfn data-x="concept-url-scheme-data">scheme data</dfn> component of a <span>parsed URL</span>
+ <li>The <dfn data-x="concept-url-username">username</dfn> component of a <span>parsed URL</span>
+ <li>The <dfn data-x="concept-url-password">password</dfn> component of a <span>parsed URL</span>
+ <li>The <dfn data-x="concept-url-host">host</dfn> component of a <span>parsed URL</span>
+ <li>The <dfn data-x="concept-url-port">port</dfn> component of a <span>parsed URL</span>
+ <li>The <dfn data-x="concept-url-path">path</dfn> component of a <span>parsed URL</span>
+ <li>The <dfn data-x="concept-url-query">query</dfn> component of a <span>parsed URL</span>
+ <li>The <dfn data-x="concept-url-fragment">fragment</dfn> component of a <span>parsed URL</span>
+ <li><dfn data-x="concept-url-parse-error">Parse errors</dfn> from the <span>URL parser</span>
+ <li>The <dfn data-x="concept-url-serializer">URL serializer</dfn>
+ <li><dfn>Default encode set</dfn>
+ <li><dfn>Percent encode</dfn>
+ <li><dfn>UTF-8 percent encode</dfn>
+ <li><dfn>Percent decode</dfn>
+ <li><dfn>Decoder error</dfn>
+ <li>The <dfn>domain label to ASCII</dfn> algorithm</li>
+ <li>The <dfn>domain label to Unicode</dfn> algorithm</li>
+ <li><dfn><code>URLUtils</code></dfn> interface
+ <li><dfn><code>URLUtilsReadOnly</code></dfn> interface
+ <li><dfn data-x="dom-url-href"><code>href</code> attribute</dfn>
+ <li><dfn data-x="dom-url-protocol"><code>protocol</code> attribute</dfn>
+ <li>The <dfn data-x="concept-uu-get-the-base">get the base</dfn> hook for <code>URLUtils</code>
+ <li>The <dfn data-x="concept-uu-update">update steps</dfn> hook for <code>URLUtils</code>
+ <li>The <dfn data-x="concept-uu-set-the-input">set the input</dfn> algorithm for <code>URLUtils</code>
+ <li>The <dfn data-x="concept-uu-query-encoding">query encoding</dfn> of an <code>URLUtils</code> object
+ <li>The <dfn data-x="concept-uu-input">input</dfn> of an <code>URLUtils</code> object
+ <li>The <dfn data-x="concept-uu-url">url</dfn> of an <code>URLUtils</code> object
+ </ul>
+
+ </dd>
+
+
+ <dt>Cookies</dt>
+
+ <dd>
+
+ <p>The following terms are defined in the Cookie specification: <a
+ href="#refsCOOKIES">[COOKIES]</a></p>
+
+ <ul class="brief">
+ <li><dfn>cookie-string</dfn>
+ <li><dfn>receives a set-cookie-string</dfn>
+ </ul>
+
+ </dd>
+
+
+ <dt>Fetch</dt>
+
+ <dd>
+
+ <p>The following terms are defined in the WHATWG Fetch specification: <a href="#refsFETCH">[FETCH]</a></p>
+
+ <ul class="brief">
+ <li><dfn>cross-origin request</dfn>
+ <li><dfn>cross-origin request status</dfn>
+ <li><dfn>custom request headers</dfn>
+ <li><dfn>simple cross-origin request</dfn>
+ <li><dfn>redirect steps</dfn>
+ <li><dfn>omit credentials flag</dfn>
+ <li><dfn>resource sharing check</dfn>
+ </ul>
+
+ <p class="note">This specification does not yet use the "fetch" algorithm from the WHATWG Fetch
+ specification. It will be updated to do so in due course.</p>
+
+ </dd>
+
+
+<!--TOPIC:DOM APIs-->
+
+ <dt>Web IDL</dt>
+
+ <dd>
+
+ <p>The IDL fragments in this specification must be interpreted as required for conforming IDL
+ fragments, as described in the Web IDL specification. <a href="#refsWEBIDL">[WEBIDL]</a></p>
+
+ <p>The terms <dfn>supported property indices</dfn>, <dfn>determine the value of an indexed
+ property</dfn>, <dfn>support named properties</dfn>, <dfn>supported property names</dfn>,
+ <dfn>unenumerable</dfn>, <dfn>determine the value of a named property</dfn>, <dfn>platform array
+ objects</dfn>, and <dfn data-x="dfn-read-only-array">read only</dfn> (when applied to arrays)
+ are used as defined in the Web IDL specification. The algorithm to <dfn>convert a DOMString to a
+ sequence of Unicode characters</dfn> is similarly that defined in the Web IDL specification.</p>
+
+ <p>When this specification requires a user agent to <dfn>create a <code>Date</code> object</dfn>
+ representing a particular time (which could be the special value Not-a-Number), the milliseconds
+ component of that time, if any, must be truncated to an integer, and the time value of the newly
+ created <code>Date</code> object must represent the resulting truncated time.</p>
+
+ <p class="example">For instance, given the time 23045 millionths of a second after 01:00 UTC on
+ January 1st 2000, i.e. the time 2000-01-01T00:00:00.023045Z, then the <code>Date</code> object
+ created representing that time would represent the same time as that created representing the
+ time 2000-01-01T00:00:00.023Z, 45 millionths earlier. If the given time is NaN, then the result
+ is a <code>Date</code> object that represents a time value NaN (indicating that the object does
+ not represent a specific instant of time).</p>
+
+ </dd>
+
+
+ <dt>JavaScript</dt>
+
+ <dd>
+
+ <p>Some parts of the language described by this specification only support JavaScript as the
+ underlying scripting language. <a href="#refsECMA262">[ECMA262]</a></p>
+
+ <p class="note">The term "JavaScript" is used to refer to ECMA262, rather than the official term
+ ECMAScript, since the term JavaScript is more widely known. Similarly, the <span>MIME
+ type</span> used to refer to JavaScript in this specification is <code
+ data-x="">text/javascript</code>, since that is the most commonly used type, <span data-x="willful
+ violation">despite it being an officially obsoleted type</span> according to RFC 4329. <a
+ href="#refsRFC4329">[RFC4329]</a></p>
+
+ <p>The term <dfn>JavaScript global environment</dfn> refers to the <i data-x="">global
+ environment</i> concept defined in the ECMAScript specification.</p>
+
+ <p>The ECMAScript <dfn data-x="js-SyntaxError"><code>SyntaxError</code></dfn> exception is also
+ defined in the ECMAScript specification. <a href="#refsECMA262">[ECMA262]</a></p>
+
+ <p>The <dfn>ArrayBuffer</dfn> and related object types and underlying concepts from the
+ ECMAScript Specification are used for several features in this specification. <a
+ href="#refsECMA262">[ECMA262]</a></p>
+
+ <p>The following helper IDL is used for referring to <code>ArrayBuffer</code>-related types:</p>
+
+ <pre class="idl">typedef (<dfn>Int8Array</dfn> or <dfn>Uint8Array</dfn> or <dfn>Uint8ClampedArray</dfn> or
+ <dfn>Int16Array</dfn> or <dfn>Uint16Array</dfn> or
+ <dfn>Int32Array</dfn> or <dfn>Uint32Array</dfn> or
+ <dfn>Float32Array</dfn> or <dfn>Float64Array</dfn> or
+ <dfn>DataView</dfn>) <dfn>ArrayBufferView</dfn>;</pre>
+
+ <p class="note">In particular, the <code>Uint8ClampedArray</code> type is used by some <span
+ data-x="ImageData">2D canvas APIs</span>, and the <a href="#network"><code>WebSocket</code>
+ API</a> uses <code>ArrayBuffer</code> objects for handling binary frames.</p>
+
+ </dd>
+
+
+ <dt>DOM</dt>
+
+ <dd>
+
+ <p>The Document Object Model (DOM) is a representation &mdash; a model &mdash; of a document and
+ its content. The DOM is not just an API; the conformance criteria of HTML implementations are
+ defined, in this specification, in terms of operations on the DOM. <a
+ href="#refsDOM">[DOM]</a></p>
+
+ <p>Implementations must support DOM and the events defined in DOM Events, because this
+ specification is defined in terms of the DOM, and some of the features are defined as extensions
+ to the DOM interfaces. <a href="#refsDOM">[DOM]</a> <a href="#refsDOMEVENTS">[DOMEVENTS]</a></p>
+
+ <p>In particular, the following features are defined in the DOM specification: <a
+ href="#refsDOM">[DOM]</a></p> <!-- aka DOM Core or DOMCORE -->
+
+ <ul class="brief">
+
+ <li><dfn><code>Attr</code></dfn> interface</li>
+ <li><dfn><code>Comment</code></dfn> interface</li>
+ <li><dfn><code>DOMImplementation</code></dfn> interface</li>
+ <li><dfn data-x="DOM Document"><code>Document</code></dfn> interface</li>
+ <li><dfn><code>XMLDocument</code></dfn> interface</li>
+ <li><dfn><code>DocumentFragment</code></dfn> interface</li>
+ <li><dfn><code>DocumentType</code></dfn> interface</li>
+ <li><dfn><code>DOMException</code></dfn> interface</li>
+ <li><dfn><code>ChildNode</code></dfn> interface</li>
+ <li><dfn><code>Element</code></dfn> interface</li>
+ <li><dfn><code>Node</code></dfn> interface</li>
+ <li><dfn><code>NodeList</code></dfn> interface</li>
+ <li><dfn><code>ProcessingInstruction</code></dfn> interface</li>
+ <li><dfn><code>Text</code></dfn> interface</li>
+
+ <li><dfn><code>HTMLCollection</code></dfn> interface</li>
+ <li><dfn data-x="dom-HTMLCollection-item"><code>item()</code></dfn> method</li>
+ <li>The terms <dfn>collections</dfn> and <dfn>represented by the collection</dfn></li>
+
+ <li><dfn><code>DOMTokenList</code></dfn> interface</li>
+ <li><dfn><code>DOMSettableTokenList</code></dfn> interface</li>
+
+ <li><dfn data-x="dom-DOMImplementation-createDocument"><code>createDocument()</code></dfn> method</li>
+ <li><dfn data-x="dom-DOMImplementation-createHTMLDocument"><code>createHTMLDocument()</code></dfn> method</li>
+ <li><dfn data-x="dom-Document-createElement"><code>createElement()</code></dfn> method</li>
+ <li><dfn data-x="dom-Document-createElementNS"><code>createElementNS()</code></dfn> method</li>
+ <li><dfn data-x="dom-Document-getElementById"><code>getElementById()</code></dfn> method</li>
+ <li><dfn data-x="dom-Node-insertBefore"><code>insertBefore()</code></dfn> method</li>
+
+ <li><dfn data-x="dom-Node-ownerDocument"><code>ownerDocument</code></dfn> attribute</li>
+ <li><dfn data-x="dom-Node-childNodes"><code>childNodes</code></dfn> attribute</li>
+ <li><dfn data-x="dom-Node-localName"><code>localName</code></dfn> attribute</li>
+ <li><dfn data-x="dom-Node-parentNode"><code>parentNode</code></dfn> attribute</li>
+ <li><dfn data-x="dom-Node-namespaceURI"><code>namespaceURI</code></dfn> attribute</li>
+ <li><dfn data-x="dom-Element-tagName"><code>tagName</code></dfn> attribute</li>
+ <li><dfn data-x="dom-Element-id"><code>id</code></dfn> attribute</li>
+ <li><dfn><code>textContent</code></dfn> attribute</li>
+
+ <li>The <dfn data-x="concept-node-insert">insert</dfn>, <dfn data-x="concept-node-append">append</dfn>, <dfn data-x="concept-node-remove">remove</dfn>, <dfn data-x="concept-node-replace">replace</dfn>, and <dfn data-x="concept-node-adopt">adopt</dfn> algorithms for nodes</li>
+ <li>The <dfn>nodes are inserted</dfn> and <dfn>nodes are removed</dfn> concepts</li>
+ <li>An element's <dfn data-x="concept-node-adopt-ext">adopting steps</dfn></li>
+ <li>The <dfn>attribute list</dfn> concept.</li>
+ <li>The <dfn data-x="concept-cd-data">data</dfn> of a text node.</li>
+
+ <li><dfn><code>Event</code></dfn> interface</li>
+ <li><dfn><code>EventTarget</code></dfn> interface</li>
+ <li><dfn><code>EventInit</code></dfn> dictionary type</li>
+ <li><dfn data-x="dom-Event-target"><code>target</code></dfn> attribute</li>
+ <li><dfn data-x="dom-Event-isTrusted"><code>isTrusted</code></dfn> attribute</li>
+ <li>The <dfn data-x="concept-event-type">type</dfn> of an event</li>
+ <li>The concept of an <dfn data-x=concept-event-listener>event listener</dfn> and the <span data-x=concept-event-listener>event listeners</span> associated with an <code>EventTarget</code></li>
+ <li>The concept of a <dfn>target override</dfn></li>
+ <li>The concept of a regular <dfn>event parent</dfn> and a <dfn>cross-boundary event parent</dfn></li> <!-- see bug 18780 -->
+
+ <li>The <dfn data-x="document's character encoding">encoding</dfn> (herein the <i>character encoding</i>) and <dfn data-x="concept-document-content-type">content type</dfn> of a <code>Document</code></li>
+ <li>The distinction between <dfn>XML documents</dfn> and <dfn>HTML documents</dfn></li>
+ <li>The terms <dfn>quirks mode</dfn>, <dfn>limited-quirks mode</dfn>, and <dfn>no-quirks mode</dfn></li>
+ <li>The algorithm to <dfn data-x="concept-node-clone">clone</dfn> a <code>Node</code>, and the concept of <dfn data-x="concept-node-clone-ext">cloning steps</dfn> used by that algorithm</li>
+ <li>The concept of <dfn>base URL change steps</dfn> and the definition of what happens when an element is <dfn>affected by a base URL change</dfn></li>
+ <li>The concept of an element's <dfn data-x="concept-id">unique identifier (ID)</dfn></li>
+
+ <li>The concept of a DOM <dfn data-x="concept-range">range</dfn>, and the terms <dfn data-x="concept-range-start">start</dfn>, <dfn data-x="concept-range-end">end</dfn>, and <dfn data-x="concept-range-bp">boundary point</dfn> as applied to ranges.</li>
+
+ <li><dfn><code>MutationObserver</code></dfn> interface</li>
+ <li>The <dfn data-x="concept-mo-invoke">invoke <code>MutationObserver</code> objects</dfn> algorithm</li>
+
+ <li><dfn>Promise</dfn> interface</li>
+ <li>The <dfn data-x="concept-resolver">resolver</dfn> concept</li>
+ <li>The <dfn data-x="concept-resolver-fulfill">fulfill</dfn> and <dfn data-x="concept-resolver-reject">reject</dfn> algorithms</li>
+
+ </ul>
+
+ <p>The term <dfn>throw</dfn> in this specification is used as defined in the DOM specification.
+ The following <code>DOMException</code> types are defined in the DOM specification: <a
+ href="#refsDOM">[DOM]</a></p>
+
+ <ol class="brief">
+ <li value="1"><dfn><code>IndexSizeError</code></dfn></li>
+ <li value="3"><dfn><code>HierarchyRequestError</code></dfn></li>
+ <li value="4"><dfn><code>WrongDocumentError</code></dfn></li>
+ <li value="5"><dfn><code>InvalidCharacterError</code></dfn></li>
+ <li value="7"><dfn><code>NoModificationAllowedError</code></dfn></li>
+ <li value="8"><dfn><code>NotFoundError</code></dfn></li>
+ <li value="9"><dfn><code>NotSupportedError</code></dfn></li>
+ <li value="11"><dfn><code>InvalidStateError</code></dfn></li>
+ <li value="12"><dfn><code>SyntaxError</code></dfn></li>
+ <li value="13"><dfn><code>InvalidModificationError</code></dfn></li>
+ <li value="14"><dfn><code>NamespaceError</code></dfn></li>
+ <li value="15"><dfn><code>InvalidAccessError</code></dfn></li>
+ <li value="18"><dfn><code>SecurityError</code></dfn></li>
+ <li value="19"><dfn><code>NetworkError</code></dfn></li>
+ <li value="20"><dfn><code>AbortError</code></dfn></li>
+ <li value="21"><dfn><code>URLMismatchError</code></dfn></li>
+ <li value="22"><dfn><code>QuotaExceededError</code></dfn></li>
+ <li value="23"><dfn><code>TimeoutError</code></dfn></li>
+ <li value="24"><dfn><code>InvalidNodeTypeError</code></dfn></li>
+ <li value="25"><dfn><code>DataCloneError</code></dfn></li>
+ </ol>
+
+ <p class="example">For example, to <i>throw a <code>TimeoutError</code> exception</i>, a user
+ agent would construct a <code>DOMException</code> object whose type was the string "<code
+ data-x="">TimeoutError</code>" (and whose code was the number 23, for legacy reasons) and
+ actually throw that object as an exception.</p>
+
+ <p>The following features are defined in the DOM Events specification: <a
+ href="#refsDOMEVENTS">[DOMEVENTS]</a></p>
+
+ <ul class="brief">
+
+ <li><dfn><code>MouseEvent</code></dfn> interface</li>
+ <li><dfn><code>MouseEventInit</code></dfn> dictionary type</li>
+
+ <li>The <dfn><code>FocusEvent</code></dfn> interface and its <dfn data-x="dom-FocusEvent-relatedTarget"><code>relatedTarget</code></dfn> attribute</li>
+
+ <li>The <dfn><code>UIEvent</code></dfn> interface's <dfn data-x="dom-UIEvent-detail"><code>detail</code></dfn> attribute</li>
+
+ <li><dfn data-x="event-click"><code>click</code></dfn> event</li>
+ <li><dfn data-x="event-dblclick"><code>dblclick</code></dfn> event</li>
+ <li><dfn data-x="event-mousedown"><code>mousedown</code></dfn> event</li>
+ <li><dfn data-x="event-mouseenter"><code>mouseenter</code></dfn> event</li>
+ <li><dfn data-x="event-mouseleave"><code>mouseleave</code></dfn> event</li>
+ <li><dfn data-x="event-mousemove"><code>mousemove</code></dfn> event</li>
+ <li><dfn data-x="event-mouseout"><code>mouseout</code></dfn> event</li>
+ <li><dfn data-x="event-mouseover"><code>mouseover</code></dfn> event</li>
+ <li><dfn data-x="event-mouseup"><code>mouseup</code></dfn> event</li>
+ <li><dfn data-x="event-mousewheel"><code>mousewheel</code></dfn> event</li>
+
+ <li><dfn data-x="event-keydown"><code>keydown</code></dfn> event</li>
+ <li><dfn data-x="event-keyup"><code>keyup</code></dfn> event</li>
+ <li><dfn data-x="event-keypress"><code>keypress</code></dfn> event</li>
+
+ </ul>
+
+ <p>The following features are defined in the Touch Events specification: <a
+ href="#refsTOUCH">[TOUCH]</a></p>
+
+ <ul class="brief">
+
+ <li><dfn><code>Touch</code></dfn> interface</li>
+
+ <li><dfn>Touch point</dfn> concept</li>
+
+ </ul>
+
+ <p>This specification sometimes uses the term <dfn data-x="">name</dfn> to refer to the event's
+ <code data-x="dom-event-type">type</code>; as in, "an event named <code data-x="">click</code>"
+ or "if the event name is <code data-x="">keypress</code>". The terms "name" and "type" for
+ events are synonymous.</p>
+
+ <p>The following features are defined in the DOM Parsing and Serialisation specification: <a
+ href="#refsDOMPARSING">[DOMPARSING]</a></p>
+
+ <ul class="brief">
+ <li><dfn data-x="dom-innerHTML"><code>innerHTML</code></dfn></li>
+ <li><dfn data-x="dom-outerHTML"><code>outerHTML</code></dfn></li>
+ </ul>
+
+ <p class="note">User agents are also encouraged to implement the features described in the
+ <cite>HTML Editing APIs</cite> and <cite><code>UndoManager</code> and DOM Transaction</cite>
+ specifications.
+ <a href="#refsEDITING">[EDITING]</a>
+ <a href="#refsUNDO">[UNDO]</a>
+ </p>
+
+ <p>The following parts of the Fullscreen specification are referenced from this specification,
+ in part to define the rendering of <code>dialog</code> elements, and also to define how the
+ Fullscreen API interacts with the sandboxing features in HTML: <a
+ href="#refsFULLSCREEN">[FULLSCREEN]</a></p>
+
+ <ul class="brief">
+ <li>The <dfn>top layer</dfn> concept</li>
+ <li><dfn data-x="dom-element-requestFullscreen"><code>requestFullscreen()</code></dfn>
+ <li>The <dfn>fullscreen enabled flag</dfn></li>
+ <li>The <dfn>fully exit fullscreen</dfn> algorithm</li>
+ </ul>
+
+ </dd>
+
+
+
+ <dt>File API</dt>
+
+ <dd>
+
+ <p>This specification uses the following features defined in the File API specification: <a
+ href="#refsFILEAPI">[FILEAPI]</a></p>
+
+ <ul class="brief">
+
+ <li><dfn><code>Blob</code></dfn></li>
+ <li><dfn><code>File</code></dfn></li>
+ <li><dfn><code>FileList</code></dfn></li>
+ <li><dfn data-x="dom-Blob-close"><code>Blob.close()</code></dfn></li>
+ <li><dfn data-x="dom-Blob-type"><code>Blob.type</code></dfn></li>
+ <li>The concept of <dfn data-x="file-error-read">read errors</dfn></li>
+ </ul>
+
+ </dd>
+
+
+ <dt>XMLHttpRequest</dt>
+
+ <dd>
+
+ <p>This specification references the XMLHttpRequest specification to describe how the two
+ specifications interact and to use its <code>ProgressEvent</code> features. The following
+ features and terms are defined in the XMLHttpRequest specification: <a
+ href="#refsXHR">[XHR]</a></p>
+
+ <ul class="brief">
+
+ <li><dfn><code>XMLHttpRequest</code></dfn>
+ <li><dfn><code>ProgressEvent</code></dfn>
+ <li><dfn data-x="fire a progress event">Fire a progress event named <var data-x="">e</var></dfn>
+
+ </ul>
+
+ </dd>
+
+
+<!--TOPIC:HTML-->
+
+ <dt>Media Queries</dt>
+
+ <dd>
+
+ <p>Implementations must support the Media Queries language. <a href="#refsMQ">[MQ]</a></p>
+
+ </dd>
+
+
+ <dt>CSS modules</dt>
+
+ <dd>
+
+ <p>While support for CSS as a whole is not required of implementations of this specification
+ (though it is encouraged, at least for Web browsers), some features are defined in terms of
+ specific CSS requirements.</p>
+
+ <p>In particular, some features require that a string be <dfn>parsed as a CSS &lt;color&gt;
+ value</dfn>. When parsing a CSS value, user agents are required by the CSS specifications to
+ apply some error handling rules. These apply to this specification also. <a
+ href="#refsCSSCOLOR">[CSSCOLOR]</a> <a href="#refsCSS">[CSS]</a></p>
+
+ <p class="example">For example, user agents are required to close all open constructs upon
+ finding the end of a style sheet unexpectedly. Thus, when parsing the string "<code
+ data-x="">rgb(0,0,0</code>" (with a missing close-parenthesis) for a colour value, the close
+ parenthesis is implied by this error handling rule, and a value is obtained (the colour 'black').
+ However, the similar construct "<code data-x="">rgb(0,0,</code>" (with both a missing parenthesis
+ and a missing "blue" value) cannot be parsed, as closing the open construct does not result in a
+ viable value.</p>
+
+ <p>The term <dfn>CSS element reference identifier</dfn> is used as defined in the <cite>CSS
+ Image Values and Replaced Content</cite> specification to define the API that declares
+ identifiers for use with the CSS 'element()' function. <a
+ href="#refsCSSIMAGES">[CSSIMAGES]</a></p>
+
+ <p>Similarly, the term <dfn>provides a paint source</dfn> is used as defined in the <cite>CSS
+ Image Values and Replaced Content</cite> specification to define the interaction of certain HTML
+ elements with the CSS 'element()' function. <a href="#refsCSSIMAGES">[CSSIMAGES]</a></p>
+
+ <p>The term <dfn>default object size</dfn> is also defined in the <cite>CSS Image Values and
+ Replaced Content</cite> specification. <a href="#refsCSSIMAGES">[CSSIMAGES]</a></p>
+
+ <p>Implementations that support scripting must support the CSS Object Model. The following
+ features and terms are defined in the CSSOM specifications: <a href="#refsCSSOM">[CSSOM]</a> <a
+ href="#refsCSSOMVIEW">[CSSOMVIEW]</a>
+
+ <ul class="brief">
+ <li><dfn><code>Screen</code></dfn></li>
+ <li><dfn><code>LinkStyle</code></dfn></li>
+ <li><dfn><code>CSSStyleDeclaration</code></dfn></li>
+ <li><dfn data-x="dom-CSSStyleDeclaration-cssText"><code>cssText</code></dfn> attribute of <code>CSSStyleDeclaration</code></li>
+ <li><dfn><code>StyleSheet</code></dfn></li>
+ <li>The terms <dfn>create a CSS style sheet</dfn>, <dfn>remove a CSS style sheet</dfn>, and <dfn>associated CSS style sheet</dfn></li>
+ <li><dfn data-x="CSS style sheet">CSS style sheets</dfn> and their properties:
+ <dfn data-x="concept-css-style-sheet-type">type</dfn>,
+ <dfn data-x="concept-css-style-sheet-location">location</dfn>,
+ <dfn data-x="concept-css-style-sheet-parent-CSS-style-sheet">parent CSS style sheet</dfn>,
+ <dfn data-x="concept-css-style-sheet-owner-node">owner node</dfn>,
+ <dfn data-x="concept-css-style-sheet-owner-CSS-rule">owner CSS rule</dfn>,
+ <dfn data-x="concept-css-style-sheet-media">media</dfn>,
+ <dfn data-x="concept-css-style-sheet-title">title</dfn>,
+ <dfn data-x="concept-css-style-sheet-alternate-flag">alternate flag</dfn>,
+ <dfn data-x="concept-css-style-sheet-disabeld-flag">disabled flag</dfn>,
+ <dfn data-x="concept-css-style-sheet-CSS-rules">CSS rules</dfn>,
+ <dfn data-x="concept-css-style-sheet-origin-clean-flag">origin-clean flag</dfn>
+ </li>
+ <li><dfn>Alternative style sheet sets</dfn> and the <dfn>preferred style sheet set</dfn></li>
+ <li><dfn>Serializing a CSS value</dfn></li>
+ <li><dfn>Scroll an element into view</dfn></li>
+ <li><dfn>Scroll to the beginning of the document</dfn></li>
+ <li>The <dfn data-x="event-resize"><code>resize</code></dfn> event</li>
+ <li>The <dfn data-x="event-scroll"><code>scroll</code></dfn> event</li>
+ </ul>
+
+ <p>The term <dfn>environment encoding</dfn> is defined in the <cite>CSS Syntax</cite>
+ specifications. <a href="#refsCSSSYNTAX">[CSSSYNTAX]</a></p>
+
+ <p>The term <dfn>CSS styling attribute</dfn> is defined in the <cite>CSS Style Attributes</cite>
+ specification. <a href="#refsCSSATTR">[CSSATTR]</a></p>
+
+ <p>The <code>CanvasRenderingContext2D</code> object's use of fonts depends on the features
+ described in the CSS <cite>Fonts</cite> and <cite>Font Load Events</cite> specifications, including in particular
+ <dfn><code>FontLoader</code></dfn>. <a href="#refsCSSFONTS">[CSSFONTS]</a> <a
+ href="#refsCSSFONTLOAD">[CSSFONTLOAD]</a></p>
+
+ </dd>
+
+
+<!--TOPIC:Canvas-->
+
+ <dt>SVG</dt>
+
+ <dd>
+
+ <p>The following interface is defined in the SVG specification: <a href="#refsSVG">[SVG]</a></p>
+
+ <ul class="brief">
+ <li><dfn><code>SVGMatrix</code></dfn>
+ </ul>
+
+ <!-- mention that the parser supports it? -->
+
+ </dd>
+
+
+ <dt>WebGL</dt>
+
+ <dd>
+
+ <p>The following interface is defined in the WebGL specification: <a
+ href="#refsWEBGL">[WEBGL]</a></p>
+
+ <ul class="brief">
+ <li><dfn><code>WebGLRenderingContext</code></dfn>
+ </ul>
+
+ </dd>
+
+
+<!--TOPIC:HTML-->
+
+ <!-- mention that the parser supports mathml? -->
+
+
+<!--TOPIC:Video Text Tracks-->
+
+ <dt>WebVTT</dt>
+
+ <dd>
+
+ <p>Implementations may support <dfn>WebVTT</dfn> as a text track format for subtitles, captions,
+ chapter titles, metadata, etc, for media resources. <a href="#refsWEBVTT">[WEBVTT]</a></p>
+
+ <p>The following terms, used in this specification, are defined in the WebVTT specification:</p>
+
+ <ul class="brief">
+ <li><dfn>WebVTT file</dfn>
+ <li><dfn>WebVTT file using cue text</dfn>
+ <li><dfn>WebVTT file using chapter title text</dfn>
+ <li><dfn>WebVTT file using only nested cues</dfn>
+ <li><dfn>WebVTT parser</dfn>
+ <li>The <dfn>rules for updating the display of WebVTT text tracks</dfn>
+ <li>The <dfn>rules for interpreting WebVTT cue text</dfn>
+ <li>The WebVTT <dfn>text track cue writing direction</dfn>
+ </ul>
+
+ </dd>
+
+
+<!--TOPIC:WebSocket API-->
+
+ <dt>The WebSocket protocol</dt>
+
+ <dd>
+
+ <p>The following terms are defined in the WebSocket protocol specification: <a
+ href="#refsWSP">[WSP]</a></p>
+
+ <ul class="brief">
+
+ <li><dfn>establish a WebSocket connection</dfn>
+ <li><dfn>the WebSocket connection is established</dfn>
+ <li><dfn>validate the server's response</dfn>
+ <li><dfn>extensions in use</dfn>
+ <li><dfn>subprotocol in use</dfn>
+ <li><dfn>headers to send appropriate cookies</dfn>
+ <li><dfn>cookies set during the server's opening handshake</dfn>
+ <li><dfn>a WebSocket message has been received</dfn>
+ <li><dfn>send a WebSocket Message</dfn>
+ <li><dfn>fail the WebSocket connection</dfn>
+ <li><dfn>close the WebSocket connection</dfn>
+ <li><dfn>start the WebSocket closing handshake</dfn>
+ <li><dfn>the WebSocket closing handshake is started</dfn>
+ <li><dfn>the WebSocket connection is closed</dfn> (possibly <i data-x="">cleanly</i>)
+ <li><dfn>the WebSocket connection close code</dfn>
+ <li><dfn>the WebSocket connection close reason</dfn>
+
+ </ul>
+
+ </dd>
+
+
+<!--TOPIC:HTML-->
+
+ <dt>ARIA</dt>
+
+ <dd>
+
+ <p>The terms <dfn>strong native semantics</dfn> is used as defined in the ARIA specification.
+ The term <dfn>default implicit ARIA semantics</dfn> has the same meaning as the term <i>implicit
+ WAI-ARIA semantics</i> as used in the ARIA specification. <a href="#refsARIA">[ARIA]</a></p>
+
+ <p>The <dfn data-x="attr-aria-role"><code>role</code></dfn> and <code data-x="">aria-*</code>
+ attributes are defined in the ARIA specification. <a href="#refsARIA">[ARIA]</a></p>
+
+
+ </dd>
+
+
+ </dl>
+
+ <p>This specification does not <em>require</em> support of any particular network protocol, style
+ sheet language, scripting language, or any of the DOM specifications beyond those required in the
+ list above. However, the language described by this specification is biased towards CSS as the
+ styling language, JavaScript as the scripting language, and HTTP as the network protocol, and
+ several features assume that those languages and protocols are in use.</p>
+
+ <p>A user agent that implements the HTTP protocol must implement the Web Origin Concept
+ specification and the HTTP State Management Mechanism specification (Cookies) as well. <a
+ href="#refsHTTP">[HTTP]</a> <a href="#refsORIGIN">[ORIGIN]</a> <a
+ href="#refsCOOKIES">[COOKIES]</a></p>
+
+ <p class="note">This specification might have certain additional requirements on character
+ encodings, image formats, audio formats, and video formats in the respective sections.</p>
+
+ </div>
+
+ </div>
+
+
+ <h4>Extensibility</h4>
+
+ <p>Vendor-specific proprietary user agent extensions to this specification are strongly
+ discouraged. Documents must not use such extensions, as doing so reduces interoperability and
+ fragments the user base, allowing only users of specific user agents to access the content in
+ question.</p>
+
+ <div class="nodev">
+
+ <p>If such extensions are nonetheless needed, e.g. for experimental purposes, then vendors are
+ strongly urged to use one of the following extension mechanisms:</p>
+
+ <ul>
+
+ <li><p>For markup-level features that can be limited to the XML serialisation and need not be
+ supported in the HTML serialisation, vendors should use the namespace mechanism to define custom
+ namespaces in which the non-standard elements and attributes are supported.</p>
+
+ <li>
+
+ <p>For markup-level features that are intended for use with <span>the HTML syntax</span>,
+ extensions should be limited to new attributes of the form "<code data-x="">x-<var
+ data-x="">vendor</var>-<var data-x="">feature</var></code>", where <var data-x="">vendor</var> is a
+ short string that identifies the vendor responsible for the extension, and <var
+ data-x="">feature</var> is the name of the feature. New element names should not be created.
+ Using attributes for such extensions exclusively allows extensions from multiple vendors to
+ co-exist on the same element, which would not be possible with elements. Using the "<code
+ data-x="">x-<var data-x="">vendor</var>-<var data-x="">feature</var></code>" form allows extensions
+ to be made without risk of conflicting with future additions to the specification.</p>
+
+ <div class="example">
+
+ <p>For instance, a browser named "FerretBrowser" could use "ferret" as a vendor prefix, while a
+ browser named "Mellblom Browser" could use "mb". If both of these browsers invented extensions
+ that turned elements into scratch-and-sniff areas, an author experimenting with these features
+ could write:</p>
+
+ <pre>&lt;p>This smells of lemons!
+&lt;span x-ferret-smellovision x-ferret-smellcode="LEM01"
+ x-mb-outputsmell x-mb-smell="lemon juice">&lt;/span>&lt;/p></pre>
+
+ </div>
+
+ </li>
+
+ </ul>
+
+ <p>Attribute names beginning with the two characters "<code data-x="">x-</code>" are reserved for
+ user agent use and are guaranteed to never be formally added to the HTML language. For
+ flexibility, attributes names containing underscores (the U+005F LOW LINE character) are also
+ reserved for experimental purposes and are guaranteed to never be formally added to the HTML
+ language.</p>
+
+ <p class="note">Pages that use such attributes are by definition non-conforming.</p>
+
+ <p>For DOM extensions, e.g. new methods and IDL attributes, the new members should be prefixed by
+ vendor-specific strings to prevent clashes with future versions of this specification.</p>
+
+ <p>For events, experimental event types should be prefixed with vendor-specific strings.</p>
+
+ <div class="example">
+
+ <p>For example, if a user agent called "Pleas<!--e h-->old" were to add an event to indicate when
+ the user is going up in an elevator, it could use the prefix "<code data-x="">pleasold</code>" and
+ thus name the event "<code data-x="">pleasoldgoingup</code>", possibly with an event handler
+ attribute named "<code data-x="">onpleasoldgoingup</code>".</p>
+
+ </div>
+
+ <p>All extensions must be defined so that the use of extensions neither contradicts nor causes the
+ non-conformance of functionality defined in the specification.</p> <!-- thanks to QA Framework -->
+
+ <div class="example">
+
+ <p>For example, while strongly discouraged from doing so, an implementation "Foo Browser" could
+ add a new IDL attribute "<code data-x="">fooTypeTime</code>" to a control's DOM interface that
+ returned the time it took the user to select the current value of a control (say). On the other
+ hand, defining a new control that appears in a form's <code
+ data-x="dom-form-elements">elements</code> array would be in violation of the above requirement,
+ as it would violate the definition of <code data-x="dom-form-elements">elements</code> given in
+ this specification.</p>
+
+ </div>
+
+ <p>When adding new <span data-x="reflect">reflecting</span> IDL attributes corresponding to content
+ attributes of the form "<code data-x="">x-<var data-x="">vendor</var>-<var
+ data-x="">feature</var></code>", the IDL attribute should be named "<code data-x=""><var
+ data-x="">vendor</var><var data-x="">Feature</var></code>" (i.e. the "<code data-x="">x</code>" is
+ dropped from the IDL attribute's name).</p>
+
+ </div>
+
+ <hr>
+
+ <p>When vendor-neutral extensions to this specification are needed, either this specification can
+ be updated accordingly, or an extension specification can be written that overrides the
+ requirements in this specification. When someone applying this specification to their activities
+ decides that they will recognise the requirements of such an extension specification, it becomes
+ an <dfn data-x="other applicable specifications">applicable specification</dfn> for the purposes of
+ conformance requirements in this specification.</p>
+
+ <p class="note">Someone could write a specification that defines any arbitrary byte stream as
+ conforming, and then claim that their random junk is conforming. However, that does not mean that
+ their random junk actually is conforming for everyone's purposes: if someone else decides that
+ that specification does not apply to their work, then they can quite legitimately say that the
+ aforementioned random junk is just that, junk, and not conforming at all. As far as conformance
+ goes, what matters in a particular community is what that community <em>agrees</em> is
+ applicable.</p>
+
+ <div class="nodev">
+
+ <hr>
+
+ <p>User agents must treat elements and attributes that they do not understand as semantically
+ neutral; leaving them in the DOM (for DOM processors), and styling them according to CSS (for CSS
+ processors), but not inferring any meaning from them.</p>
+
+<!--ADD-TOPIC:Security-->
+ <p>When support for a feature is disabled (e.g. as an emergency measure to mitigate a security
+ problem, or to aid in development, or for performance reasons), user agents must act as if they
+ had no support for the feature whatsoever, and as if the feature was not mentioned in this
+ specification. For example, if a particular feature is accessed via an attribute in a Web IDL
+ interface, the attribute itself would be omitted from the objects that implement that interface
+ &mdash; leaving the attribute on the object but making it return null or throw an exception is
+ insufficient.</p>
+<!--REMOVE-TOPIC:Security-->
+
+ </div>
+
+
+ <div class="nodev">
+
+ <h4>Interactions with XPath and XSLT</h4>
+
+ <p id="xpath-1.0-processors">Implementations of XPath 1.0 that operate on <span>HTML
+ documents</span> parsed or created in the manners described in this specification (e.g. as part of
+ the <code data-x="">document.evaluate()</code> API) must act as if the following edit was applied
+ to the XPath 1.0 specification.</p>
+
+ <p>First, remove this paragraph:</p>
+
+ <blockquote cite="http://www.w3.org/TR/1999/REC-xpath-19991116#node-tests">
+
+ <p>A <a href="http://www.w3.org/TR/REC-xml-names#NT-QName">QName</a> in the node test is expanded
+ into an <a href="http://www.w3.org/TR/1999/REC-xpath-19991116#dt-expanded-name">expanded-name</a>
+ using the namespace declarations from the expression context. This is the same way expansion is
+ done for element type names in start and end-tags except that the default namespace declared with
+ <code data-x="">xmlns</code> is not used: if the <a
+ href="http://www.w3.org/TR/REC-xml-names#NT-QName">QName</a> does not have a prefix, then the
+ namespace URI is null (this is the same way attribute names are expanded). It is an error if the
+ <a href="http://www.w3.org/TR/REC-xml-names#NT-QName">QName</a> has a prefix for which there is
+ no namespace declaration in the expression context.</p>
+
+ </blockquote>
+
+ <p>Then, insert in its place the following:</p>
+
+ <blockquote cite="http://www.w3.org/Bugs/Public/show_bug.cgi?id=7059#c37">
+
+ <p>A QName in the node test is expanded into an expanded-name using the namespace declarations
+ from the expression context. If the QName has a prefix, then there must be a<!-- added 2009-10-27
+ - http://www.w3.org/Bugs/Public/show_bug.cgi?id=8062 --> namespace declaration for this prefix in
+ the expression context, and the corresponding<!-- typo fixed 2009-10-27 -
+ http://www.w3.org/Bugs/Public/show_bug.cgi?id=8063 --> namespace URI is the one that is
+ associated with this prefix. It is an error if the QName has a prefix for which there is no
+ namespace declaration in the expression context. </p>
+
+ <p>If the QName has no prefix and the principal node type of the axis is element, then the
+ default element namespace is used. Otherwise if the QName has no prefix, the namespace URI is
+ null. The default element namespace is a member of the context for the XPath expression. The
+ value of the default element namespace when executing an XPath expression through the DOM3 XPath
+ API is determined in the following way:</p>
+
+ <ol>
+
+ <li>If the context node is from an HTML DOM, the default element namespace is
+ "http://www.w3.org/1999/xhtml".</li>
+
+ <li>Otherwise, the default element namespace URI is null.</li>
+
+ </ol>
+
+ <p class="note">This is equivalent to adding the default element namespace feature of XPath 2.0
+ to XPath 1.0, and using the HTML namespace as the default element namespace for HTML documents.
+ It is motivated by the desire to have implementations be compatible with legacy HTML content
+ while still supporting the changes that this specification introduces to HTML regarding the
+ namespace used for HTML elements, and by the desire to use XPath 1.0 rather than XPath 2.0.</p>
+
+ </blockquote>
+
+ <p class="note">This change is a <span>willful violation</span> of the XPath 1.0 specification,
+ motivated by desire to have implementations be compatible with legacy content while still
+ supporting the changes that this specification introduces to HTML regarding which namespace is
+ used for HTML elements. <a href="#refsXPATH10">[XPATH10]</a></p> <!-- note: version matters for
+ this ref -->
+
+ <hr>
+
+ <p id="dom-based-xslt-1.0-processors">XSLT 1.0 processors outputting to a DOM when the output
+ method is "html" (either explicitly or via the defaulting rule in XSLT 1.0) are affected as
+ follows:</p>
+
+ <p>If the transformation program outputs an element in no namespace, the processor must, prior to
+ constructing the corresponding DOM element node, change the namespace of the element to the
+ <span>HTML namespace</span>, <span data-x="converted to ASCII lowercase">ASCII-lowercase</span> the
+ element's local name, and <span data-x="converted to ASCII lowercase">ASCII-lowercase</span> the
+ names of any non-namespaced attributes on the element.</p>
+
+ <p class="note">This requirement is a <span>willful violation</span> of the XSLT 1.0
+ specification, required because this specification changes the namespaces and case-sensitivity
+ rules of HTML in a manner that would otherwise be incompatible with DOM-based XSLT
+ transformations. (Processors that serialise the output are unaffected.) <a
+ href="#refsXSLT10">[XSLT10]</a></p> <!-- note: version matters for this ref -->
+
+ <hr>
+
+ <p>This specification does not specify precisely how XSLT processing interacts with the <span>HTML
+ parser</span> infrastructure (for example, whether an XSLT processor acts as if it puts any
+ elements into a <span>stack of open elements</span>). However, XSLT processors must <span>stop
+ parsing</span> if they successfully complete, and must set the <span>current document
+ readiness</span> first to "<code data-x="">interactive</code>"<!-- this synchronously fires an
+ event --> and then to "<code data-x="">complete</code>"<!-- this also synchronously fires an event
+ --> if they are aborted.</p>
+
+ <hr>
+
+ <p>This specification does not specify how XSLT interacts with the <span
+ data-x="navigate">navigation</span> algorithm, how it fits in with the <span>event loop</span>, nor
+ how error pages are to be handled (e.g. whether XSLT errors are to replace an incremental XSLT
+ output, or are rendered inline, etc).</p>
+
+ <p class="note">There are also additional non-normative comments regarding the interaction of XSLT
+ and HTML <a href="#scriptTagXSLT">in the <code>script</code> element section</a>, and of
+ XSLT, XPath, and HTML <a href="#template-XSLT-XPath">in the <code>template</code> element
+ section</a>.</p>
+
+ </div>
+
+
+
+
+ <h3>Case-sensitivity and string comparison</h3>
+
+ <p>Comparing two strings in a <dfn>case-sensitive</dfn> manner means comparing them exactly, code
+ point for code point.</p>
+
+ <p>Comparing two strings in an <dfn>ASCII case-insensitive</dfn> manner means comparing them
+ exactly, code point for code point, except that the characters in the range U+0041 to U+005A (i.e.
+ LATIN CAPITAL LETTER A to LATIN CAPITAL LETTER Z) and the corresponding characters in the range
+ U+0061 to U+007A (i.e. LATIN SMALL LETTER A to LATIN SMALL LETTER Z) are considered to also
+ match.</p>
+
+ <p>Comparing two strings in a <dfn>compatibility caseless</dfn> manner means using the Unicode
+ <i>compatibility caseless match</i> operation to compare the two strings, with no language-specific tailoirings. <a
+ href="#refsUNICODE">[UNICODE]</a></p>
+
+ <p>Except where otherwise stated, string comparisons must be performed in a
+ <span>case-sensitive</span> manner.</p>
+
+
+ <div class="nodev">
+
+ <p><dfn data-x="converted to ASCII uppercase">Converting a string to ASCII uppercase</dfn> means
+ replacing all characters in the range U+0061 to U+007A (i.e. LATIN SMALL LETTER A to LATIN SMALL
+ LETTER Z) with the corresponding characters in the range U+0041 to U+005A (i.e. LATIN CAPITAL
+ LETTER A to LATIN CAPITAL LETTER Z).</p>
+
+ <p><dfn data-x="converted to ASCII lowercase">Converting a string to ASCII lowercase</dfn> means
+ replacing all characters in the range U+0041 to U+005A (i.e. LATIN CAPITAL LETTER A to LATIN
+ CAPITAL LETTER Z) with the corresponding characters in the range U+0061 to U+007A (i.e. LATIN
+ SMALL LETTER A to LATIN SMALL LETTER Z).</p>
+
+ </div>
+
+
+ <p>A string <var data-x="">pattern</var> is a <dfn>prefix match</dfn> for a string <var
+ data-x="">s</var> when <var data-x="">pattern</var> is not longer than <var data-x="">s</var> and
+ truncating <var data-x="">s</var> to <var data-x="">pattern</var>'s length leaves the two strings as
+ matches of each other.</p>
+
+
+
+ <h3>Common microsyntaxes</h3>
+
+ <p>There are various places in HTML that accept particular data types, such as dates or numbers.
+ This section describes what the conformance criteria for content in those formats is, and how to
+ parse them.</p>
+
+ <div class="nodev">
+
+ <p class="note">Implementors are strongly urged to carefully examine any third-party libraries
+ they might consider using to implement the parsing of syntaxes described below. For example, date
+ libraries are likely to implement error handling behavior that differs from what is required in
+ this specification, since error-handling behavior is often not defined in specifications that
+ describe date syntaxes similar to those used in this specification, and thus implementations tend
+ to vary greatly in how they handle errors.</p>
+
+ </div>
+
+
+ <div class="nodev">
+
+ <h4>Common parser idioms</h4>
+
+ </div>
+
+ <p>The <dfn data-x="space character">space characters</dfn>, for the purposes of this
+ specification, are U+0020 SPACE, U+0009 CHARACTER TABULATION (tab), U+000A LINE FEED (LF), U+000C
+ FORM FEED (FF), and U+000D CARRIAGE RETURN (CR).</p>
+
+ <p>The <dfn data-x="White_Space">White_Space characters</dfn> are those that have the Unicode
+ property "White_Space" in the Unicode <code data-x="">PropList.txt</code> data file. <a
+ href="#refsUNICODE">[UNICODE]</a></p>
+
+ <p class="note">This should not be confused with the "White_Space" value (abbreviated "WS") of the
+ "Bidi_Class" property in the <code data-x="">Unicode.txt</code> data file.</p>
+
+ <p>The <dfn>control characters</dfn> are those whose Unicode "General_Category" property has the
+ value "Cc" in the Unicode <code data-x="">UnicodeData.txt</code> data file. <a
+ href="#refsUNICODE">[UNICODE]</a></p>
+
+ <p>The <dfn>uppercase ASCII letters</dfn> are the characters in the range U+0041 LATIN CAPITAL
+ LETTER A to U+005A LATIN CAPITAL LETTER Z.</p>
+
+ <p>The <dfn>lowercase ASCII letters</dfn> are the characters in the range U+0061 LATIN SMALL
+ LETTER A to U+007A LATIN SMALL LETTER Z.</p>
+
+ <p>The <dfn>ASCII digits</dfn> are the characters in the range U+0030 DIGIT ZERO (0) to U+0039
+ DIGIT NINE (9).</p>
+
+ <p>The <dfn>alphanumeric ASCII characters</dfn> are those that are either <span>uppercase ASCII
+ letters</span>, <span>lowercase ASCII letters</span>, or <span>ASCII digits</span>.</p>
+
+ <p>The <dfn>ASCII hex digits</dfn> are the characters in the ranges U+0030 DIGIT ZERO (0) to
+ U+0039 DIGIT NINE (9), U+0041 LATIN CAPITAL LETTER A to U+0046 LATIN CAPITAL LETTER F, and U+0061
+ LATIN SMALL LETTER A to U+0066 LATIN SMALL LETTER F.</p>
+
+ <p>The <dfn>uppercase ASCII hex digits</dfn> are the characters in the ranges U+0030 DIGIT ZERO (0) to
+ U+0039 DIGIT NINE (9) and U+0041 LATIN CAPITAL LETTER A to U+0046 LATIN CAPITAL LETTER F only.</p>
+
+ <p>The <dfn>lowercase ASCII hex digits</dfn> are the characters in the ranges U+0030 DIGIT ZERO
+ (0) to U+0039 DIGIT NINE (9) and U+0061 LATIN SMALL LETTER A to U+0066 LATIN SMALL LETTER F
+ only.</p>
+
+ <div class="nodev">
+
+ <p>Some of the micro-parsers described below follow the pattern of having an <var
+ data-x="">input</var> variable that holds the string being parsed, and having a <var
+ data-x="">position</var> variable pointing at the next character to parse in <var
+ data-x="">input</var>.</p>
+
+ <p>For parsers based on this pattern, a step that requires the user agent to <dfn>collect a
+ sequence of characters</dfn> means that the following algorithm must be run, with <var
+ data-x="">characters</var> being the set of characters that can be collected:</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> and <var data-x="">position</var> be the same variables as
+ those of the same name in the algorithm that invoked these steps.</p></li>
+
+ <li><p>Let <var data-x="">result</var> be the empty string.</p></li>
+
+ <li><p>While <var data-x="">position</var> doesn't point past the end of <var data-x="">input</var>
+ and the character at <var data-x="">position</var> is one of the <var data-x="">characters</var>,
+ append that character to the end of <var data-x="">result</var> and advance <var
+ data-x="">position</var> to the next character in <var data-x="">input</var>.</p></li>
+
+ <li><p>Return <var data-x="">result</var>.</p></li>
+
+ </ol>
+
+ <p>The step <dfn>skip whitespace</dfn> means that the user agent must <span>collect a sequence of
+ characters</span> that are <span data-x="space character">space characters</span>. The step
+ <dfn>skip White_Space characters</dfn> means that the user agent must <span>collect a sequence of
+ characters</span> that are <span>White_Space</span> characters. In both cases, the collected
+ characters are not used. <a href="#refsUNICODE">[UNICODE]</a></p>
+
+ <p>When a user agent is to <dfn>strip line breaks</dfn> from a string, the user agent must remove
+ any U+000A LINE FEED (LF) and U+000D CARRIAGE RETURN (CR) characters from that string.</p>
+
+ <p>When a user agent is to <dfn>strip leading and trailing whitespace</dfn> from a string, the
+ user agent must remove all <span data-x="space character">space characters</span> that are at the
+ start or end of the string.</p>
+
+ <p>When a user agent is to <dfn>strip and collapse whitespace</dfn> in a string, it must replace
+ any sequence of one or more consecutive <span data-x="space character">space characters</span> in
+ that string with a single U+0020 SPACE character, and then <span>strip leading and trailing
+ whitespace</span> from that string.</p>
+
+ <p>When a user agent has to <dfn>strictly split a string</dfn> on a particular delimiter character
+ <var data-x="">delimiter</var>, it must use the following algorithm:</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> be the string being parsed.</p></li>
+
+ <li><p>Let <var data-x="">position</var> be a pointer into <var data-x="">input</var>, initially
+ pointing at the start of the string.</p></li>
+
+ <li><p>Let <var data-x="">tokens</var> be an ordered list of tokens, initially empty.</p></li>
+
+ <li><p>While <var data-x="">position</var> is not past the end of <var data-x="">input</var>:</p>
+
+ <ol>
+
+ <li><p><span>Collect a sequence of characters</span> that are not the <var
+ data-x="">delimiter</var> character.</p></li>
+
+ <li><p>Append the string collected in the previous step to <var data-x="">tokens</var>.</p></li>
+
+ <li><p>Advance <var data-x="">position</var> to the next character in <var
+ data-x="">input</var>.</p></li> <!-- skips past the delimiter -->
+
+ </ol>
+
+ </li>
+
+ <li><p>Return <var data-x="">tokens</var>.</p></li>
+
+ </ol>
+
+ <p class="note">For the special cases of splitting a string <span data-x="split a string on
+ spaces">on spaces</span> and <span data-x="split a string on commas">on commas</span>, this
+ algorithm does not apply (those algorithms also perform <span data-x="strip leading and trailing
+ whitespace">whitespace trimming</span>).</p>
+
+ </div>
+
+
+
+ <h4>Boolean attributes</h4>
+
+ <p>A number of attributes are <dfn data-x="boolean attribute">boolean attributes</dfn>. The
+ presence of a boolean attribute on an element represents the true value, and the absence of the
+ attribute represents the false value.</p>
+
+ <p>If the attribute is present, its value must either be the empty string or a value that is an
+ <span>ASCII case-insensitive</span> match for the attribute's canonical name, with no leading or
+ trailing whitespace.</p>
+
+ <p class="note">The values "true" and "false" are not allowed on boolean attributes. To represent
+ a false value, the attribute has to be omitted altogether.</p>
+
+ <div class="example">
+
+ <p>Here is an example of a checkbox that is checked and disabled. The <code
+ data-x="attr-input-checked">checked</code> and <code data-x="attr-fe-disabled">disabled</code>
+ attributes are the boolean attributes.</p>
+
+ <pre>&lt;label>&lt;input type=checkbox checked name=cheese disabled> Cheese&lt;/label></pre>
+
+ <p>This could be equivalently written as this:
+
+ <pre>&lt;label>&lt;input type=checkbox checked=checked name=cheese disabled=disabled> Cheese&lt;/label></pre>
+
+ <p>You can also mix styles; the following is still equivalent:</p>
+
+ <pre>&lt;label>&lt;input type='checkbox' checked name=cheese disabled=""> Cheese&lt;/label></pre>
+
+ </div>
+
+
+
+ <h4>Keywords and enumerated attributes</h4>
+
+ <p>Some attributes are defined as taking one of a finite set of keywords. Such attributes are
+ called <dfn data-x="enumerated attribute">enumerated attributes</dfn>. The keywords are each
+ defined to map to a particular <em>state</em> (several keywords might map to the same state, in
+ which case some of the keywords are synonyms of each other; additionally, some of the keywords can
+ be said to be non-conforming, and are only in the specification for historical reasons). In
+ addition, two default states can be given. The first is the <i>invalid value default</i>, the
+ second is the <i>missing value default</i>.</p>
+
+ <p>If an enumerated attribute is specified, the attribute's value must be an <span>ASCII
+ case-insensitive</span> match for one of the given keywords that are not said to be
+ non-conforming, with no leading or trailing whitespace.</p>
+
+ <p>When the attribute is specified, if its value is an <span>ASCII case-insensitive</span> match
+ for one of the given keywords then that keyword's state is the state that the attribute
+ represents. If the attribute value matches none of the given keywords, but the attribute has an
+ <i>invalid value default</i>, then the attribute represents that state. Otherwise, if the
+ attribute value matches none of the keywords but there is a <i>missing value default</i> state
+ defined, then <em>that</em> is the state represented by the attribute. Otherwise, there is no
+ default, and invalid values mean that there is no state represented.</p>
+
+ <p>When the attribute is <em>not</em> specified, if there is a <i>missing value default</i> state
+ defined, then that is the state represented by the (missing) attribute. Otherwise, the absence of
+ the attribute means that there is no state represented.</p>
+
+ <p class="note">The empty string can be a valid keyword.</p>
+
+
+ <h4>Numbers</h4>
+
+ <h5>Signed integers</h5>
+
+ <p>A string is a <dfn>valid integer</dfn> if it consists of one or more <span>ASCII digits</span>,
+ optionally prefixed with a U+002D HYPHEN-MINUS character (-).</p>
+
+ <p>A <span>valid integer</span> without a U+002D HYPHEN-MINUS (-) prefix represents the number
+ that is represented in base ten by that string of digits. A <span>valid integer</span>
+ <em>with</em> a U+002D HYPHEN-MINUS (-) prefix represents the number represented in base ten by
+ the string of digits that follows the U+002D HYPHEN-MINUS, subtracted from zero.</p>
+
+ <div class="nodev">
+
+ <p>The <dfn>rules for parsing integers</dfn> are as given in the following algorithm. When
+ invoked, the steps must be followed in the order given, aborting at the first step that returns a
+ value. This algorithm will return either an integer or an error.</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> be the string being parsed.</p></li>
+
+ <li><p>Let <var data-x="">position</var> be a pointer into <var data-x="">input</var>, initially
+ pointing at the start of the string.</p></li>
+
+ <li><p>Let <var data-x="">sign</var> have the value "positive".</p></li>
+
+ <li><p><span>Skip whitespace</span>.</p></li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, return an
+ error.</p></li>
+
+ <li>
+
+ <p>If the character indicated by <var data-x="">position</var> (the first character) is a U+002D
+ HYPHEN-MINUS character (-):</p>
+
+ <ol>
+
+ <li>Let <var data-x="">sign</var> be "negative".</li>
+
+ <li>Advance <var data-x="">position</var> to the next character.</li>
+
+ <li>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, return an
+ error.</li>
+
+ </ol>
+
+ <p>Otherwise, if the character indicated by <var data-x="">position</var> (the first character)
+ is a U+002B PLUS SIGN character (+):</p>
+
+ <ol>
+
+ <li>Advance <var data-x="">position</var> to the next character. (The "<code data-x="">+</code>"
+ is ignored, but it is not conforming.)</li>
+
+ <li>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, return an
+ error.</li>
+
+ </ol>
+
+ </li>
+
+ <li><p>If the character indicated by <var data-x="">position</var> is not an <span data-x="ASCII
+ digits">ASCII digit</span>, then return an error.</p></li>
+
+ <!-- Ok. At this point we know we have a number. It might have
+ trailing garbage which we'll ignore, but it's a number, and we
+ won't return an error. -->
+
+ <li><p><span>Collect a sequence of characters</span> that are <span>ASCII digits</span>, and
+ interpret the resulting sequence as a base-ten integer. Let <var data-x="">value</var> be that
+ integer.</p></li>
+
+ <li><p>If <var data-x="">sign</var> is "positive", return <var
+ data-x="">value</var>, otherwise return the result of subtracting
+ <var data-x="">value</var> from zero.</p></li>
+
+ </ol>
+
+ </div>
+
+
+ <h5>Non-negative integers</h5>
+
+ <p>A string is a <dfn>valid non-negative integer</dfn> if it consists of one or more <span>ASCII
+ digits</span>.</p>
+
+ <p>A <span>valid non-negative integer</span> represents the number that is represented in base ten
+ by that string of digits.</p>
+
+ <div class="nodev">
+
+ <p>The <dfn>rules for parsing non-negative integers</dfn> are as given in the following algorithm.
+ When invoked, the steps must be followed in the order given, aborting at the first step that
+ returns a value. This algorithm will return either zero, a positive integer, or an error.</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> be the string being parsed.</p></li>
+
+ <li><p>Let <var data-x="">value</var> be the result of parsing <var data-x="">input</var> using the
+ <span>rules for parsing integers</span>.</p></li>
+
+ <li><p>If <var data-x="">value</var> is an error, return an error.</p></li>
+
+ <li><p>If <var data-x="">value</var> is less than zero, return an error.</p></li>
+
+ <li><p>Return <var data-x="">value</var>.</p></li>
+
+ </ol>
+
+ <!-- Implications: A leading + is ignored. A leading - is ignored if the value is zero. -->
+
+ </div>
+
+
+ <h5>Floating-point numbers</h5>
+
+ <p>A string is a <dfn>valid floating-point number</dfn> if it consists of:</p>
+
+ <ol class="brief">
+
+ <li>Optionally, a U+002D HYPHEN-MINUS character (-).</li>
+
+ <li>One or both of the following, in the given order:
+
+ <ol>
+
+ <li>A series of one or more <span>ASCII digits</span>.</li>
+
+ <li>
+
+ <ol>
+
+ <li>A single U+002E FULL STOP character (.).</li>
+
+ <li>A series of one or more <span>ASCII digits</span>.</li>
+
+ </ol>
+
+ </li>
+
+ </ol>
+
+ </li>
+
+ <li>Optionally:
+
+ <ol>
+
+ <li>Either a U+0065 LATIN SMALL LETTER E character (e) or a U+0045 LATIN CAPITAL LETTER E
+ character (E).</li>
+
+ <li>Optionally, a U+002D HYPHEN-MINUS character (-) or U+002B PLUS SIGN character (+).</li>
+
+ <li>A series of one or more <span>ASCII digits</span>.</li>
+
+ </ol>
+
+ </li>
+
+ </ol>
+
+ <p>A <span>valid floating-point number</span> represents the number obtained by multiplying the
+ significand by ten raised to the power of the exponent, where the significand is the first number,
+ interpreted as base ten (including the decimal point and the number after the decimal point, if
+ any, and interpreting the significand as a negative number if the whole string starts with a
+ U+002D HYPHEN-MINUS character (-) and the number is not zero), and where the exponent is the
+ number after the E, if any (interpreted as a negative number if there is a U+002D HYPHEN-MINUS
+ character (-) between the E and the number and the number is not zero, or else ignoring a U+002B
+ PLUS SIGN character (+) between the E and the number if there is one). If there is no E, then the
+ exponent is treated as zero.</p>
+
+ <p class="note">The Infinity and Not-a-Number (NaN) values are not <span data-x="valid
+ floating-point number">valid floating-point numbers</span>.</p>
+
+ <div class="nodev">
+
+ <p>The <dfn data-x="best representation of the number as a floating-point number">best
+ representation of the number <var data-x="">n</var> as a floating-point number</dfn> is the string
+ obtained from applying the JavaScript operator ToString to <var data-x="">n</var>. The JavaScript
+ operator ToString is not uniquely determined. When there are multiple possible strings that could
+ be obtained from the JavaScript operator ToString for a particular value, the user agent must
+ always return the same string for that value (though it may differ from the value used by other
+ user agents).</p>
+
+ <p>The <dfn>rules for parsing floating-point number values</dfn> are as given in the following
+ algorithm. This algorithm must be aborted at the first step that returns something. This algorithm
+ will return either a number or an error.</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> be the string being parsed.</p></li>
+
+ <li><p>Let <var data-x="">position</var> be a pointer into <var data-x="">input</var>, initially
+ pointing at the start of the string.</p></li>
+
+ <li><p>Let <var data-x="">value</var> have the value 1.</li>
+
+ <li><p>Let <var data-x="">divisor</var> have the value 1.</p></li>
+
+ <li><p>Let <var data-x="">exponent</var> have the value 1.</p></li>
+
+ <li><p><span>Skip whitespace</span>.</p></li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, return an
+ error.</p></li>
+
+ <li>
+
+ <p>If the character indicated by <var data-x="">position</var> is a U+002D HYPHEN-MINUS character
+ (-):</p>
+
+ <ol>
+
+ <li>Change <var data-x="">value</var> and <var data-x="">divisor</var> to &#x2212;1.</li>
+
+ <li>Advance <var data-x="">position</var> to the next character.</li>
+
+ <li>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, return an
+ error.</li>
+
+ </ol>
+
+ <p>Otherwise, if the character indicated by <var data-x="">position</var> (the first character)
+ is a U+002B PLUS SIGN character (+):</p>
+
+ <ol>
+
+ <li>Advance <var data-x="">position</var> to the next character. (The "<code data-x="">+</code>"
+ is ignored, but it is not conforming.)</li>
+
+ <li>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, return an
+ error.</li>
+
+ </ol>
+
+ </li>
+
+ <li><p>If the character indicated by <var data-x="">position</var> is a U+002E FULL STOP (.), and
+ that is not the last character in <var data-x="">input</var>, and the character after the
+ character indicated by <var data-x="">position</var> is an <span data-x="ASCII digits">ASCII
+ digit</span>, then set <var data-x="">value</var> to zero and jump to the step labeled
+ <i>fraction</i>.</p> <!-- we have to check there's a number so that ".e1" fails to parse but ".0"
+ does not -->
+
+ <li><p>If the character indicated by <var data-x="">position</var> is not an <span data-x="ASCII
+ digits">ASCII digit</span>, then return an error.</p></li>
+
+ <li><p><span>Collect a sequence of characters</span> that are <span>ASCII digits</span>, and
+ interpret the resulting sequence as a base-ten integer. Multiply <var data-x="">value</var> by
+ that integer.</p></li>
+
+ <li>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, jump to the
+ step labeled <i>conversion</i>.</li>
+
+ <li><p><i>Fraction</i>: If the character indicated by <var data-x="">position</var> is a U+002E
+ FULL STOP (.), run these substeps:</p>
+
+ <ol>
+
+ <li><p>Advance <var data-x="">position</var> to the next character.</p></li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, or if the
+ character indicated by <var data-x="">position</var> is not an <span data-x="ASCII digits">ASCII
+ digit</span>, U+0065 LATIN SMALL LETTER E (e), or U+0045 LATIN CAPITAL LETTER E (E), then jump
+ to the step labeled <i>conversion</i>.</li>
+
+ <li><p>If the character indicated by <var data-x="">position</var> is a U+0065 LATIN SMALL
+ LETTER E character (e) or a U+0045 LATIN CAPITAL LETTER E character (E), skip the remainder of
+ these substeps.</p>
+
+ <li><p><i>Fraction loop</i>: Multiply <var data-x="">divisor</var> by ten.</p></li>
+
+ <li>Add the value of the character indicated by <var data-x="">position</var>, interpreted as a
+ base-ten digit (0..9) and divided by <var data-x="">divisor</var>, to <var
+ data-x="">value</var>.</li>
+
+ <li><p>Advance <var data-x="">position</var> to the next character.</p></li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, then jump
+ to the step labeled <i>conversion</i>.</li>
+
+ <li><p>If the character indicated by <var data-x="">position</var> is an <span data-x="ASCII
+ digits">ASCII digit</span>, jump back to the step labeled <i>fraction loop</i> in these
+ substeps.</p></li>
+
+ </ol>
+
+ </li>
+
+ <li><p>If the character indicated by <var data-x="">position</var> is a U+0065 LATIN SMALL LETTER
+ E character (e) or a U+0045 LATIN CAPITAL LETTER E character (E), run these substeps:</p>
+
+ <ol>
+
+ <li><p>Advance <var data-x="">position</var> to the next character.</p></li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, then jump
+ to the step labeled <i>conversion</i>.</li>
+
+ <li>
+
+ <p>If the character indicated by <var data-x="">position</var> is a U+002D HYPHEN-MINUS
+ character (-):</p>
+
+ <ol>
+
+ <li>Change <var data-x="">exponent</var> to &#x2212;1.</li>
+
+ <li>Advance <var data-x="">position</var> to the next character.</li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, then
+ jump to the step labeled <i>conversion</i>.</li>
+
+ </ol>
+
+ <p>Otherwise, if the character indicated by <var data-x="">position</var> is a U+002B PLUS SIGN
+ character (+):</p>
+
+ <ol>
+
+ <li>Advance <var data-x="">position</var> to the next character.</li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, then
+ jump to the step labeled <i>conversion</i>.</li>
+
+ </ol>
+
+ </li>
+
+ <li><p>If the character indicated by <var data-x="">position</var> is not an <span data-x="ASCII
+ digits">ASCII digit</span>, then jump to the step labeled <i>conversion</i>.</li>
+
+ <li><p><span>Collect a sequence of characters</span> that are <span>ASCII digits</span>, and
+ interpret the resulting sequence as a base-ten integer. Multiply <var data-x="">exponent</var>
+ by that integer.</p></li>
+
+ <li><p>Multiply <var data-x="">value</var> by ten raised to the <var data-x="">exponent</var>th
+ power.</p></li>
+
+ </ol>
+
+ </li>
+
+ <li><p><i>Conversion</i>: Let <var data-x="">S</var> be the set of finite IEEE 754
+ double-precision floating-point values except &#x2212;0, but with two special values added: 2<sup
+ data-x="">1024</sup> and &#x2212;2<sup data-x="">1024</sup>.</p></li>
+
+ <li><p>Let <var data-x="">rounded-value</var> be the number in <var data-x="">S</var> that is
+ closest to <var data-x="">value</var>, selecting the number with an even significand if there are
+ two equally close values. (The two special values 2<sup data-x="">1024</sup> and &#x2212;2<sup
+ data-x="">1024</sup> are considered to have even significands for this purpose.)</p></li>
+
+ <li><p>If <var data-x="">rounded-value</var> is 2<sup data-x="">1024</sup> or &#x2212;2<sup
+ data-x="">1024</sup>, return an error.</p></li>
+
+ <li><p>Return <var data-x="">rounded-value</var>.</p></li>
+
+ </ol>
+
+ </div>
+
+
+<div class="nodev">
+ <h5 id="percentages-and-dimensions">Percentages and lengths</h5>
+
+ <p>The <dfn>rules for parsing dimension values</dfn> are as given in the following algorithm. When
+ invoked, the steps must be followed in the order given, aborting at the first step that returns a
+ value. This algorithm will return either a number greater than or equal to 1.0, or an error; if a
+ number is returned, then it is further categorised as either a percentage or a length.</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> be the string being parsed.</p></li>
+
+ <li><p>Let <var data-x="">position</var> be a pointer into <var data-x="">input</var>, initially
+ pointing at the start of the string.</p></li>
+
+ <li><p><span>Skip whitespace</span>.</p></li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, return an
+ error.</p></li>
+
+ <li><p>If the character indicated by <var data-x="">position</var> is a U+002B PLUS SIGN character
+ (+), advance <var data-x="">position</var> to the next character.</li>
+
+ <li><p><span>Collect a sequence of characters</span> that are U+0030 DIGIT ZERO (0) characters,
+ and discard them.</p></li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, return an
+ error.</p></li>
+
+ <li><p>If the character indicated by <var data-x="">position</var> is not one of U+0031 DIGIT ONE
+ (1) to U+0039 DIGIT NINE (9), then return an error.</p></li>
+
+ <!-- Ok. At this point we know we have a number. It might have trailing garbage which we'll
+ ignore, but it's a number, and we won't return an error. -->
+
+ <li><p><span>Collect a sequence of characters</span> that are <span>ASCII digits</span>, and
+ interpret the resulting sequence as a base-ten integer. Let <var data-x="">value</var> be that
+ number.</li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, return <var
+ data-x="">value</var> as a length.</p></li>
+
+ <li>
+
+ <p>If the character indicated by <var data-x="">position</var> is a U+002E FULL STOP character
+ (.):</p>
+
+ <ol>
+
+ <li><p>Advance <var data-x="">position</var> to the next character.</p></li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, or if the
+ character indicated by <var data-x="">position</var> is not an <span data-x="ASCII digits">ASCII
+ digit</span>, then return <var data-x="">value</var> as a length.</li>
+
+ <li><p>Let <var data-x="">divisor</var> have the value 1.</p></li>
+
+ <li><p><i>Fraction loop</i>: Multiply <var data-x="">divisor</var> by ten.</p></li>
+
+ <li>Add the value of the character indicated by <var data-x="">position</var>, interpreted as a
+ base-ten digit (0..9) and divided by <var data-x="">divisor</var>, to <var
+ data-x="">value</var>.</li>
+
+ <li><p>Advance <var data-x="">position</var> to the next character.</p></li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, then
+ return <var data-x="">value</var> as a length.</li>
+
+ <li><p>If the character indicated by <var data-x="">position</var> is an <span data-x="ASCII
+ digits">ASCII digit</span>, return to the step labeled <i>fraction loop</i> in these
+ substeps.</p></li>
+
+ </ol>
+
+ </li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, return <var
+ data-x="">value</var> as a length.</p></li>
+
+ <li><p>If the character indicated by <var data-x="">position</var> is a U+0025 PERCENT SIGN
+ character (%), return <var data-x="">value</var> as a percentage.</p></li>
+
+ <li><p>Return <var data-x="">value</var> as a length.</p></li>
+
+ </ol>
+
+ </div>
+
+
+ <h5>Lists of integers</h5>
+
+ <p>A <dfn>valid list of integers</dfn> is a number of <span data-x="valid integer">valid
+ integers</span> separated by U+002C COMMA characters, with no other characters (e.g. no <span
+ data-x="space character">space characters</span>). In addition, there might be restrictions on the
+ number of integers that can be given, or on the range of values allowed.</p>
+
+ <div class="nodev">
+
+ <p>The <dfn>rules for parsing a list of integers</dfn> are as follows:</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> be the string being parsed.</p></li>
+
+ <li><p>Let <var data-x="">position</var> be a pointer into <var data-x="">input</var>, initially
+ pointing at the start of the string.</p></li>
+
+ <li><p>Let <var data-x="">numbers</var> be an initially empty list of integers. This list will be
+ the result of this algorithm.</p></li>
+
+ <li><p>If there is a character in the string <var data-x="">input</var> at position <var
+ data-x="">position</var>, and it is either a U+0020 SPACE, U+002C COMMA, or U+003B SEMICOLON
+ character, then advance <var data-x="">position</var> to the next character in <var
+ data-x="">input</var>, or to beyond the end of the string if there are no more
+ characters.</p></li>
+
+ <li><p>If <var data-x="">position</var> points to beyond the end of <var data-x="">input</var>,
+ return <var data-x="">numbers</var> and abort.</p></li>
+
+ <li><p>If the character in the string <var data-x="">input</var> at position <var
+ data-x="">position</var> is a U+0020 SPACE, U+002C COMMA, or U+003B SEMICOLON character, then
+ return to step 4.</li>
+
+ <li><p>Let <var data-x="">negated</var> be false.</p></li> <li><p>Let <var data-x="">value</var> be
+ 0.</p></li>
+
+ <li><p>Let <var data-x="">started</var> be false. This variable is set to true when the parser
+ sees a number or a U+002D HYPHEN-MINUS character (-).</p></li>
+
+ <li><p>Let <var data-x="">got number</var> be false. This variable is set to true when the parser
+ sees a number.</p></li>
+
+ <li><p>Let <var data-x="">finished</var> be false. This variable is set to true to switch parser
+ into a mode where it ignores characters until the next separator.</p></li>
+
+ <li><p>Let <var data-x="">bogus</var> be false.</p></li>
+
+ <li><p><i>Parser</i>: If the character in the string <var data-x="">input</var> at position <var
+ data-x="">position</var> is:</p>
+
+ <dl class="switch">
+
+ <dt>A U+002D HYPHEN-MINUS character</dt>
+
+ <dd>
+
+ <p>Follow these substeps:</p>
+
+ <ol>
+
+ <li>If <var data-x="">got number</var> is true, let <var data-x="">finished</var> be true.</li>
+
+ <li>If <var data-x="">finished</var> is true, skip to the next step in the overall set of
+ steps.</li>
+
+ <li>If <var data-x="">started</var> is true, let <var data-x="">negated</var> be false.</li>
+
+ <li>Otherwise, if <var data-x="">started</var> is false and if <var data-x="">bogus</var> is
+ false, let <var data-x="">negated</var> be true.</li>
+
+ <li>Let <var data-x="">started</var> be true.</li>
+
+ </ol>
+
+ </dd>
+
+ <dt>An <span data-x="ASCII digits">ASCII digit</span></dt>
+
+ <dd>
+
+ <p>Follow these substeps:</p>
+
+ <ol>
+
+ <li>If <var data-x="">finished</var> is true, skip to the next step in the overall set of
+ steps.</li>
+
+ <li>Multiply <var data-x="">value</var> by ten.</li>
+
+ <li>Add the value of the digit, interpreted in base ten, to <var data-x="">value</var>.</li>
+
+ <li>Let <var data-x="">started</var> be true.</li>
+
+ <li>Let <var data-x="">got number</var> be true.</li>
+
+ </ol>
+
+ </dd>
+
+
+ <dt>A U+0020 SPACE character</dt>
+ <dt>A U+002C COMMA character</dt>
+ <dt>A U+003B SEMICOLON character</dt>
+
+ <dd>
+
+ <p>Follow these substeps:</p>
+
+ <ol>
+
+ <li>If <var data-x="">got number</var> is false, return the <var data-x="">numbers</var> list
+ and abort. This happens if an entry in the list has no digits, as in "<code
+ data-x="">1,2,x,4</code>".</li>
+
+ <li>If <var data-x="">negated</var> is true, then negate <var data-x="">value</var>.</li>
+
+ <li>Append <var data-x="">value</var> to the <var data-x="">numbers</var> list.</li>
+
+ <li>Jump to step 4 in the overall set of steps.</li>
+
+ </ol>
+
+ </dd>
+
+
+ <!-- <dt>A U+002E FULL STOP character</dt> -->
+ <dt>A character in the range U+0001 to U+001F, <!-- space --> U+0021 to U+002B, <!-- comma --> U+002D to U+002F, <!-- digits --> U+003A, <!-- semicolon --> U+003C to U+0040, <!-- a-z --> U+005B to U+0060, <!-- A-Z --> U+007b to U+007F
+ (i.e. any other non-alphabetic ASCII character)</dt>
+
+ <!--
+ Test: http://www.hixie.ch/tests/adhoc/html/flow/image-maps/004-demo.html
+ IE6 on Wine treats the following characters like this also: U+1-U+1f, U+21-U+2b, U+2d-U+2f, U+3a,
+ U+3c-U+40, U+5b-U+60, U+7b-U+82, U+84-U+89, U+8b, U+8d, U+8f-U+99, U+9b, U+9d, U+a0-U+bf, U+d7,
+ U+f7, U+1f6-U+1f9, U+218-U+24f, U+2a9-U+385, U+387, U+38b, U+38d, U+3a2, U+3cf, U+3d7-U+3d9, U+3db,
+ U+3dd, U+3df, U+3e1, U+3f4-U+400, U+40d, U+450, U+45d, U+482-U+48f, U+4c5-U+4c6, U+4c9-U+4ca,
+ U+4cd-U+4cf, U+4ec-U+4ed, U+4f6-U+4f7, U+4fa-U+530, U+557-U+560, U+588-U+5cf, U+5eb-U+5ef,
+ U+5f3-U+620, U+63b-U+640, U+64b-U+670, U+6b8-U+6b9, U+6bf, U+6cf, U+6d4, U+6d6-U+904, U+93a-U+957,
+ U+962-U+984, U+98d-U+98e, U+991-U+992, U+9a9, U+9b1, U+9b3-U+9b5, U+9ba-U+9db, U+9de, U+9e2-U+9ef,
+ U+9f2-U+a04, U+a0b-U+a0e, U+a11-U+a12, U+a29, U+a31, U+a34, U+a37, U+a3a-U+a58, U+a5d, U+a5f-U+a84,
+ U+a8c, U+a8e, U+a92, U+aa9, U+ab1, U+ab4, U+aba-U+adf, U+ae1-U+b04, U+b0d-U+b0e, U+b11-U+b12,
+ U+b29, U+b31, U+b34-U+b35, U+b3a-U+b5b, U+b5e, U+b62-U+b84, U+b8b-U+b8d, U+b91, U+b96-U+b98, U+b9b,
+ U+b9d, U+ba0-U+ba2, U+ba5-U+ba7, U+bab-U+bad, U+bb6, U+bba-U+c04, U+c0d, U+c11, U+c29, U+c34,
+ U+c3a-U+c5f, U+c62-U+c84, U+c8d, U+c91, U+ca9, U+cb4, U+cba-U+cdd, U+cdf, U+ce2-U+d04, U+d0d,
+ U+d11, U+d29, U+d3a-U+d5f, U+d62-U+e00, U+e2f, U+e31, U+e34-U+e3f, U+e46-U+e80, U+e83, U+e85-U+e86,
+ U+e89, U+e8b-U+e8c, U+e8e-U+e93, U+e98, U+ea0, U+ea4, U+ea6, U+ea8-U+ea9, U+eac, U+eaf-U+edb,
+ U+ede-U+109f, U+10c6-U+10cf, U+10f7-U+10ff, U+115a-U+115e, U+11a3-U+11a7, U+11fa-U+1dff,
+ U+1e9b-U+1e9f, U+1efa-U+1eff, U+1f16-U+1f17, U+1f1e-U+1f1f, U+1f46-U+1f47, U+1f4e-U+1f4f, U+1f58,
+ U+1f5a, U+1f5c, U+1f5e, U+1f7e-U+1f7f, U+1fb5, U+1fbd-U+1fc1, U+1fc5, U+1fcd-U+1fcf, U+1fd4-U+1fd5,
+ U+1fdc-U+1fdf, U+1fed-U+1ff1, U+1ff5, U+1ffd-U+249b, U+24ea-U+3004, U+3006-U+3040, U+3095-U+309a,
+ U+309f-U+30a0, U+30fb, U+30ff-U+3104, U+312d-U+3130, U+318f-U+4dff, U+9fa6-U+abff, U+d7a4-U+d7ff,
+ U+e000-U+f8ff, U+fa2e-U+faff, U+fb07-U+fb12, U+fb18-U+fb1e, U+fb37, U+fb3d, U+fb3f, U+fb42, U+fb45,
+ U+fbb2-U+fbd2, U+fbe9, U+fce1, U+fd3e-U+fd4f, U+fd90-U+fd91, U+fdc8-U+fdef, U+fdfc-U+fe7f,
+ U+fefd-U+ff20, U+ff3b-U+ff40, U+ff5b-U+ff65, U+ffa0, U+ffbf-U+ffc1, U+ffc8-U+ffc9, U+ffd0-U+ffd1,
+ U+ffd8-U+ffd9, U+ffdd-U+ffff
+ IE7 on Win2003 treats the following characters like this also instead: U+1-U+1f, U+21-U+2b,
+ U+2d-U+2f, U+3a, U+3c-U+40, U+5b-U+60, U+7b-U+82, U+84-U+89, U+8b, U+8d, U+8f-U+99, U+9b, U+9d,
+ U+a0-U+a9, U+ab-U+b4, U+b6-U+b9, U+bb-U+bf, U+d7, U+f7, U+220-U+221, U+234-U+24f, U+2ae-U+2af,
+ U+2b9-U+2ba, U+2c2-U+2df, U+2e5-U+2ed, U+2ef-U+344, U+346-U+379, U+37b-U+385, U+387, U+38b, U+38d,
+ U+3a2, U+3cf, U+3d8-U+3d9, U+3f4-U+3ff, U+482-U+48b, U+4c5-U+4c6, U+4c9-U+4ca, U+4cd-U+4cf,
+ U+4f6-U+4f7, U+4fa-U+530, U+557-U+558, U+55a-U+560, U+588-U+5cf, U+5eb-U+5ef, U+5f3-U+620,
+ U+63b-U+640, U+656-U+66f, U+6d4, U+6dd-U+6e0, U+6e9-U+6ec, U+6ee-U+6f9, U+6fd-U+70f, U+72d-U+72f,
+ U+740-U+77f, U+7b1-U+900, U+904, U+93a-U+93c, U+94d - U+94f, U+951-U+957, U+964-U+980, U+984,
+ U+98d-U+98e, U+991-U+992, U+9a9, U+9b1, U+9b3-U+9b5, U+9ba-U+9bd, U+9c5-U+9c6, U+9c9-U+9ca,
+ U+9cd-U+9d6, U+9d8-U+9db, U+9de, U+9e4-U+9ef, U+9f2-U+a01, U+a03-U+a04, U+a0b-U+a0e, U+a11-U+a12,
+ U+a29, U+a31, U+a34, U+a37, U+a3a-U+a3d, U+a43-U+a46, U+a49-U+a4a, U+a4d-U+a58, U+a5d, U+a5f-U+a6f,
+ U+a75-U+a80, U+a84, U+a8c, U+a8e, U+a92, U+aa9, U+ab1, U+ab4, U+aba-U+abc, U+ac6, U+aca,
+ U+acd-U+acf, U+ad1-U+adf, U+ae1-U+b00, U+b04, U+b0d-U+b0e, U+b11-U+b12, U+b29, U+b31, U+b34-U+b35,
+ U+b3a-U+b3c, U+b44-U+b46, U+b49 - U+b4a, U+b4d-U+b55, U+b58-U+b5b, U+b5e, U+b62-U+b81, U+b84,
+ U+b8b-U+b8d, U+b91, U+b96-U+b98, U+b9b, U+b9d, U+ba0 - U+ba2, U+ba5-U+ba7, U+bab-U+bad, U+bb6,
+ U+bba-U+bbd, U+bc3-U+bc5, U+bc9, U+bcd-U+bd6, U+bd8-U+c00, U+c04, U+c0d, U+c11, U+c29, U+c34,
+ U+c3a-U+c3d, U+c45, U+c49, U+c4d-U+c54, U+c57-U+c5f, U+c62-U+c81, U+c84, U+c8d, U+c91, U+ca9,
+ U+cb4, U+cba-U+cbd, U+cc5, U+cc9, U+ccd-U+cd4, U+cd7-U+cdd, U+cdf, U+ce2-U+d01, U+d04, U+d0d,
+ U+d11, U+d29, U+d3a-U+d3d, U+d44-U+d45, U+d49, U+d4d-U+d56, U+d58-U+d5f, U+d62-U+d81, U+d84,
+ U+d97-U+d99, U+db2, U+dbc, U+dbe - U+dbf, U+dc7-U+dce, U+dd5, U+dd7, U+de0-U+df1, U+df4-U+e00,
+ U+e3b-U+e3f, U+e4f-U+e80, U+e83, U+e85-U+e86, U+e89, U+e8b-U+e8c, U+e8e-U+e93, U+e98, U+ea0, U+ea4,
+ U+ea6, U+ea8-U+ea9, U+eac, U+eba, U+ebe-U+ebf, U+ec5-U+ecc, U+ece-U+edb, U+ede-U+eff, U+f01-U+f3f,
+ U+f48, U+f6b-U+f70, U+f82-U+f87, U+f8c-U+f8f, U+f98, U+fbd-U+fff, U+1022, U+1028, U+102b,
+ U+1033-U+1035, U+1037, U+1039-U+104f, U+105a-U+109f, U+10c6-U+10cf, U+10f7-U+10ff, U+115a - U+115e,
+ U+11a3-U+11a7, U+11fa-U+11ff, U+1207, U+1247, U+1249, U+124e-U+124f, U+1257, U+1259, U+125e-U+125f,
+ U+1287, U+1289, U+128e-U+128f, U+12af, U+12b1, U+12b6-U+12b7, U+12bf, U+12c1, U+12c6-U+12c7,
+ U+12cf, U+12d7, U+12ef, U+130f, U+1311, U+1316-U+1317, U+131f, U+1347, U+135b-U+139f,
+ U+13f5-U+1400, U+166d-U+166e, U+1677-U+1680, U+169b - U+169f, U+16eb-U+177f, U+17c9-U+181f, U+1843,
+ U+1878-U+187f, U+18aa-U+1dff, U+1e9c-U+1e9f, U+1efa-U+1eff, U+1f16-U+1f17, U+1f1e-U+1f1f,
+ U+1f46-U+1f47, U+1f4e-U+1f4f, U+1f58, U+1f5a, U+1f5c, U+1f5e, U+1f7e-U+1f7f, U+1fb5, U+1fbd,
+ U+1fbf-U+1fc1, U+1fc5, U+1fcd-U+1fcf, U+1fd4-U+1fd5, U+1fdc-U+1fdf, U+1fed-U+1ff1, U+1ff5,
+ U+1ffd-U+207e, U+2080-U+2101, U+2103-U+2106, U+2108-U+2109, U+2114, U+2116-U+2118, U+211e-U+2123,
+ U+2125, U+2127, U+2129, U+212e, U+2132, U+213a-U+215f, U+2184-U+3005, U+3008-U+3020, U+302a-U+3037,
+ U+303b-U+3104, U+312d-U+3130, U+318f - U+319f, U+31b8-U+33ff, U+4db6-U+4dff, U+9fa6-U+9fff,
+ U+a48d-U+abff, U+d7a4-U+d7ff, U+e000-U+f8ff, U+fa2e-U+faff, U+fb07-U+fb12, U+fb18-U+fb1c, U+fb1e,
+ U+fb29, U+fb37, U+fb3d, U+fb3f, U+fb42, U+fb45, U+fbb2-U+fbd2, U+fd3e-U+fd4f, U+fd90-U+fd91,
+ U+fdc8-U+fdef, U+fdfc-U+fe6f, U+fe73, U+fe75, U+fefd-U+ff20, U+ff3b-U+ff40, U+ff5b-U+ff9f,
+ U+ffbf-U+ffc1, U+ffc8-U+ffc9, U+ffd0-U+ffd1, U+ffd8-U+ffd9, U+ffdd-U+ffff
+-->
+
+ <dd>
+
+ <p>Follow these substeps:</p>
+
+ <ol>
+
+ <li>If <var data-x="">got number</var> is true, let <var data-x="">finished</var> be true.</li>
+
+ <li>If <var data-x="">finished</var> is true, skip to the next step in the overall set of
+ steps.</li>
+
+ <li>Let <var data-x="">negated</var> be false.</li>
+
+ </ol>
+
+ </dd>
+
+
+ <dt>Any other character</dt>
+ <!-- alphabetic a-z A-Z, and non-ASCII -->
+
+ <dd>
+
+ <p>Follow these substeps:</p>
+
+ <ol>
+
+ <li>If <var data-x="">finished</var> is true, skip to the next step in the overall set of
+ steps.</li>
+
+ <li>Let <var data-x="">negated</var> be false.</li>
+
+ <li>Let <var data-x="">bogus</var> be true.</li>
+
+ <li>If <var data-x="">started</var> is true, then return the <var data-x="">numbers</var> list,
+ and abort. (The value in <var data-x="">value</var> is not appended to the list first; it is
+ dropped.)</li>
+
+ </ol>
+
+ </dd>
+
+ </dl>
+
+ </li>
+
+ <li><p>Advance <var data-x="">position</var> to the next character in <var data-x="">input</var>,
+ or to beyond the end of the string if there are no more characters.</p></li>
+
+ <li><p>If <var data-x="">position</var> points to a character (and not to beyond the end of <var
+ data-x="">input</var>), jump to the big <i>Parser</i> step above.</p></li>
+
+ <li><p>If <var data-x="">negated</var> is true, then negate <var data-x="">value</var>.</li>
+
+ <li><p>If <var data-x="">got number</var> is true, then append <var data-x="">value</var> to the
+ <var data-x="">numbers</var> list.</li>
+
+ <li><p>Return the <var data-x="">numbers</var> list and abort.</p></li>
+
+ </ol>
+
+ </div>
+
+
+ <div class="nodev">
+
+ <h5>Lists of dimensions</h5>
+
+ <!-- no definition of a type since no conforming feature uses this syntax (it's only used in
+ cols="" and rows="" on <frameset> elements -->
+
+ <p>The <dfn>rules for parsing a list of dimensions</dfn> are as follows. These rules return a list
+ of zero or more pairs consisting of a number and a unit, the unit being one of <i>percentage</i>,
+ <i>relative</i>, and <i>absolute</i>.</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">raw input</var> be the string being parsed.</p></li>
+
+ <li><p>If the last character in <var data-x="">raw input</var> is a U+002C COMMA character (,),
+ then remove that character from <var data-x="">raw input</var>.</p></li>
+
+ <li><p><span data-x="split a string on commas">Split the string <var data-x="">raw input</var> on
+ commas</span>. Let <var data-x="">raw tokens</var> be the resulting list of tokens.</p></li>
+
+ <li><p>Let <var data-x="">result</var> be an empty list of number/unit pairs.</p></li>
+
+ <li>
+
+ <p>For each token in <var data-x="">raw tokens</var>, run the following substeps:</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> be the token.</p></li>
+
+ <li><p>Let <var data-x="">position</var> be a pointer into <var data-x="">input</var>,
+ initially pointing at the start of the string.</p></li>
+
+ <li><p>Let <var data-x="">value</var> be the number 0.</p></li>
+
+ <li><p>Let <var data-x="">unit</var> be <i>absolute</i>.</p></li>
+
+ <li><p>If <var data-x="">position</var> is past the end of <var data-x="">input</var>, set <var
+ data-x="">unit</var> to <i>relative</i> and jump to the last substep.</p></li>
+
+ <li><p>If the character at <var data-x="">position</var> is an <span data-x="ASCII
+ digits">ASCII digit</span>, <span>collect a sequence of characters</span> that are <span>ASCII
+ digits</span>, interpret the resulting sequence as an integer in base ten, and increment <var
+ data-x="">value</var> by that integer.</p></li>
+
+ <li>
+
+ <p>If the character at <var data-x="">position</var> is a U+002E FULL STOP character (.), run
+ these substeps:</p>
+
+ <ol>
+
+ <li><p><span>Collect a sequence of characters</span> consisting of <span data-x="space
+ character">space characters</span> and <span>ASCII digits</span>. Let <var data-x="">s</var>
+ be the resulting sequence.</p></li>
+
+ <li><p>Remove all <span data-x="space character">space characters</span> in <var
+ data-x="">s</var>.</p></li>
+
+ <li>
+
+ <p>If <var data-x="">s</var> is not the empty string, run these subsubsteps:</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">length</var> be the number of characters in <var
+ data-x="">s</var> (after the spaces were removed).</p></li>
+
+ <li><p>Let <var data-x="">fraction</var> be the result of interpreting <var
+ data-x="">s</var> as a base-ten integer, and then dividing that number by <span
+ data-x="">10<sup data-x=""><var data-x="">length</var></sup></span>.</li>
+
+ <li><p>Increment <var data-x="">value</var> by <var data-x="">fraction</var>.</p></li>
+
+ </ol>
+
+ </li>
+
+ </ol>
+
+ </li>
+
+ <li><p><span>Skip whitespace</span>.</p></li>
+
+ <li>
+
+ <p>If the character at <var data-x="">position</var> is a U+0025 PERCENT SIGN character (%),
+ then set <var data-x="">unit</var> to <i>percentage</i>.</p>
+
+ <p>Otherwise, if the character at <var data-x="">position</var> is a U+002A ASTERISK character
+ (*), then set <var data-x="">unit</var> to <i>relative</i>.</p>
+
+ </li>
+
+ <!-- the remaining characters in /input/ are ignored -->
+
+ <li><p>Add an entry to <var data-x="">result</var> consisting of the number given by <var
+ data-x="">value</var> and the unit given by <var data-x="">unit</var>.</p></li>
+
+ </ol>
+
+ </li>
+
+ <li><p>Return the list <var data-x="">result</var>.</p></li>
+
+ </ol>
+
+ </div>
+
+
+ <h4>Dates and times</h4>
+
+ <p>In the algorithms below, the <dfn>number of days in month <var data-x="">month</var> of year
+ <var data-x="">year</var></dfn> is: <em>31</em> if <var data-x="">month</var> is 1, 3, 5, 7, 8,
+ 10, or 12; <em>30</em> if <var data-x="">month</var> is 4, 6, 9, or 11; <em>29</em> if <var
+ data-x="">month</var> is 2 and <var data-x="">year</var> is a number divisible by 400, or if <var
+ data-x="">year</var> is a number divisible by 4 but not by 100; and <em>28</em> otherwise. This
+ takes into account leap years in the Gregorian calendar. <a
+ href="#refsGREGORIAN">[GREGORIAN]</a></p>
+
+ <p>When <span>ASCII digits</span> are used in the date and time syntaxes defined in this section,
+ they express numbers in base ten.</p>
+
+ <div class="nodev">
+
+ <p class="note">While the formats described here are intended to be subsets of the corresponding
+ ISO8601 formats, this specification defines parsing rules in much more detail than ISO8601.
+ Implementors are therefore encouraged to carefully examine any date parsing libraries before using
+ them to implement the parsing rules described below; ISO8601 libraries might not parse dates and
+ times in exactly the same manner. <a href="#refsISO8601">[ISO8601]</a></p>
+
+ </div>
+
+ <p>Where this specification refers to the <dfn>proleptic Gregorian calendar</dfn>, it means the
+ modern Gregorian calendar, extrapolated backwards to year 1. A date in the <span>proleptic
+ Gregorian calendar</span>, sometimes explicitly referred to as a <dfn>proleptic-Gregorian
+ date</dfn>, is one that is described using that calendar even if that calendar was not in use at
+ the time (or place) in question. <a href="#refsGREGORIAN">[GREGORIAN]</a></p>
+
+ <p class="note">The use of the Gregorian calendar as the wire format in this specification is an
+ arbitrary choice resulting from the cultural biases of those involved in the decision. See also
+ the section discussing <a href="#input-author-notes">date, time, and number formats</a> in forms
+ <span class="nodev">(for authors), <a href="#input-impl-notes">implemention notes regarding
+ localization of form controls</a>,</span> and the <code>time</code> element.</p>
+
+
+ <h5>Months</h5>
+
+ <p>A <dfn data-x="concept-month">month</dfn> consists of a specific <span>proleptic-Gregorian
+ date</span> with no time-zone information and no date information beyond a year and a month. <a
+ href="#refsGREGORIAN">[GREGORIAN]</a></p>
+
+ <p>A string is a <dfn>valid month string</dfn> representing a year <var data-x="">year</var> and
+ month <var data-x="">month</var> if it consists of the following components in the given order:</p>
+
+ <ol>
+
+ <li>Four or more <span>ASCII digits</span>, representing <var data-x="">year</var>, where <var
+ data-x="">year</var>&nbsp;&gt;&nbsp;0</li>
+
+ <li>A U+002D HYPHEN-MINUS character (-)</li>
+
+ <li>Two <span>ASCII digits</span>, representing the month <var data-x="">month</var>, in the range
+ 1&nbsp;&le;&nbsp;<var data-x="">month</var>&nbsp;&le;&nbsp;12</li>
+
+ </ol>
+
+ <div class="nodev">
+
+ <p>The rules to <dfn>parse a month string</dfn> are as follows. This will return either a year and
+ month, or nothing. If at any point the algorithm says that it "fails", this means that it is
+ aborted at that point and returns nothing.</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> be the string being parsed.</p></li>
+
+ <li><p>Let <var data-x="">position</var> be a pointer into <var data-x="">input</var>, initially
+ pointing at the start of the string.</p></li>
+
+ <li><p><span>Parse a month component</span> to obtain <var data-x="">year</var> and <var
+ data-x="">month</var>. If this returns nothing, then fail.</p>
+
+ <li><p>If <var data-x="">position</var> is <em>not</em> beyond the
+ end of <var data-x="">input</var>, then fail.</p></li>
+
+ <li><p>Return <var data-x="">year</var> and <var data-x="">month</var>.</p></li>
+
+ </ol>
+
+ <p>The rules to <dfn>parse a month component</dfn>, given an <var data-x="">input</var> string and
+ a <var data-x="">position</var>, are as follows. This will return either a year and a month, or
+ nothing. If at any point the algorithm says that it "fails", this means that it is aborted at that
+ point and returns nothing.</p>
+
+ <ol>
+
+ <li><p><span>Collect a sequence of characters</span> that are <span>ASCII digits</span>. If the
+ collected sequence is not at least four characters long, then fail. Otherwise, interpret the
+ resulting sequence as a base-ten integer. Let that number be the <var
+ data-x="">year</var>.</p></li>
+
+ <li><p>If <var data-x="">year</var> is not a number greater than zero, then fail.</p></li>
+
+ <li><p>If <var data-x="">position</var> is beyond the end of <var data-x="">input</var> or if the
+ character at <var data-x="">position</var> is not a U+002D HYPHEN-MINUS character, then fail.
+ Otherwise, move <var data-x="">position</var> forwards one character.</p></li>
+
+ <li><p><span>Collect a sequence of characters</span> that are <span>ASCII digits</span>. If the
+ collected sequence is not exactly two characters long, then fail. Otherwise, interpret the
+ resulting sequence as a base-ten integer. Let that number be the <var
+ data-x="">month</var>.</p></li>
+
+ <li><p>If <var data-x="">month</var> is not a number in the range 1&nbsp;&le;&nbsp;<var
+ data-x="">month</var>&nbsp;&le;&nbsp;12, then fail.</p></li>
+
+ <li><p>Return <var data-x="">year</var> and <var data-x="">month</var>.</p></li>
+
+ </ol>
+
+ </div>
+
+
+ <h5>Dates</h5>
+
+ <p>A <dfn data-x="concept-date">date</dfn> consists of a specific <span>proleptic-Gregorian
+ date</span> with no time-zone information, consisting of a year, a month, and a day. <a
+ href="#refsGREGORIAN">[GREGORIAN]</a></p>
+
+ <p>A string is a <dfn>valid date string</dfn> representing a year <var data-x="">year</var>, month
+ <var data-x="">month</var>, and day <var data-x="">day</var> if it consists of the following
+ components in the given order:</p>
+
+ <ol>
+
+ <li>A <span>valid month string</span>, representing <var data-x="">year</var> and <var
+ data-x="">month</var></li>
+
+ <li>A U+002D HYPHEN-MINUS character (-)</li>
+
+ <li>Two <span>ASCII digits</span>, representing <var data-x="">day</var>, in the range
+ 1&nbsp;&le;&nbsp;<var data-x="">day</var>&nbsp;&le;&nbsp;<var data-x="">maxday</var> where <var
+ data-x="">maxday</var> is the <span data-x="number of days in month month of year year">number of
+ days in the month <var data-x="">month</var> and year <var data-x="">year</var></span></li>
+
+ </ol>
+
+ <div class="nodev">
+
+ <p>The rules to <dfn>parse a date string</dfn> are as follows. This will return either a date, or
+ nothing. If at any point the algorithm says that it "fails", this means that it is aborted at that
+ point and returns nothing.</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> be the string being parsed.</p></li>
+
+ <li><p>Let <var data-x="">position</var> be a pointer into <var data-x="">input</var>, initially
+ pointing at the start of the string.</p></li>
+
+ <li><p><span>Parse a date component</span> to obtain <var data-x="">year</var>, <var
+ data-x="">month</var>, and <var data-x="">day</var>. If this returns nothing, then fail.</p>
+
+ <li><p>If <var data-x="">position</var> is <em>not</em> beyond the end of <var
+ data-x="">input</var>, then fail.</p></li>
+
+ <li><p>Let <var data-x="">date</var> be the date with year <var data-x="">year</var>, month <var
+ data-x="">month</var>, and day <var data-x="">day</var>.</p></li>
+
+ <li><p>Return <var data-x="">date</var>.</p></li>
+
+ </ol>
+
+ <p>The rules to <dfn>parse a date component</dfn>, given an <var data-x="">input</var> string and a
+ <var data-x="">position</var>, are as follows. This will return either a year, a month, and a day,
+ or nothing. If at any point the algorithm says that it "fails", this means that it is aborted at
+ that point and returns nothing.</p>
+
+ <ol>
+
+ <li><p><span>Parse a month component</span> to obtain <var data-x="">year</var> and <var
+ data-x="">month</var>. If this returns nothing, then fail.</li>
+
+ <li><p>Let <var data-x="">maxday</var> be the <span>number of days in month <var
+ data-x="">month</var> of year <var data-x="">year</var></span>.</p></li>
+
+ <li><p>If <var data-x="">position</var> is beyond the end of <var data-x="">input</var> or if the
+ character at <var data-x="">position</var> is not a U+002D HYPHEN-MINUS character, then fail.
+ Otherwise, move <var data-x="">position</var> forwards one character.</p></li>
+
+ <li><p><span>Collect a sequence of characters</span> that are <span>ASCII digits</span>. If the
+ collected sequence is not exactly two characters long, then fail. Otherwise, interpret the
+ resulting sequence as a base-ten integer. Let that number be the <var
+ data-x="">day</var>.</p></li>
+
+ <li><p>If <var data-x="">day</var> is not a number in the range 1&nbsp;&le;&nbsp;<var
+ data-x="">day</var>&nbsp;&le;&nbsp;<var data-x="">maxday</var>, then fail.</li>
+
+ <li><p>Return <var data-x="">year</var>, <var data-x="">month</var>, and <var
+ data-x="">day</var>.</p></li>
+
+ </ol>
+
+ </div>
+
+
+ <h5>Yearless dates</h5>
+
+ <p>A <dfn data-x="concept-yearless-date">yearless date</dfn> consists of a Gregorian month and a
+ day within that month, but with no associated year. <a href="#refsGREGORIAN">[GREGORIAN]</a></p>
+
+ <p>A string is a <dfn>valid yearless date string</dfn> representing a month <var
+ data-x="">month</var> and a day <var data-x="">day</var> if it consists of the following components
+ in the given order:</p>
+
+ <ol>
+
+ <li>Optionally, two U+002D HYPHEN-MINUS characters (-)</li>
+
+ <li>Two <span>ASCII digits</span>, representing the month <var data-x="">month</var>, in the range
+ 1&nbsp;&le;&nbsp;<var data-x="">month</var>&nbsp;&le;&nbsp;12</li>
+
+ <li>A U+002D HYPHEN-MINUS character (-)</li>
+
+ <li>Two <span>ASCII digits</span>, representing <var data-x="">day</var>, in the range
+ 1&nbsp;&le;&nbsp;<var data-x="">day</var>&nbsp;&le;&nbsp;<var data-x="">maxday</var> where <var
+ data-x="">maxday</var> is the <span data-x="number of days in month month of year year">number of
+ days</span> in the month <var data-x="">month</var> and any arbitrary leap year (e.g. 4 or
+ 2000)</li>
+
+ </ol>
+
+ <p class="note">In other words, if the <var data-x="">month</var> is "<code data-x="">02</code>",
+ meaning February, then the day can be 29, as if the year was a leap year.</p>
+
+ <div class="nodev">
+
+ <p>The rules to <dfn>parse a yearless date string</dfn> are as follows. This will return either a
+ month and a day, or nothing. If at any point the algorithm says that it "fails", this means that
+ it is aborted at that point and returns nothing.</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> be the string being parsed.</p></li>
+
+ <li><p>Let <var data-x="">position</var> be a pointer into <var data-x="">input</var>, initially
+ pointing at the start of the string.</p></li>
+
+ <li><p><span>Parse a yearless date component</span> to obtain <var data-x="">month</var> and <var
+ data-x="">day</var>. If this returns nothing, then fail.</p>
+
+ <li><p>If <var data-x="">position</var> is <em>not</em> beyond the end of <var
+ data-x="">input</var>, then fail.</p></li>
+
+ <li><p>Return <var data-x="">month</var> and <var data-x="">day</var>.</p></li>
+
+ </ol>
+
+ <p>The rules to <dfn>parse a yearless date component</dfn>, given an <var data-x="">input</var>
+ string and a <var data-x="">position</var>, are as follows. This will return either a month and a
+ day, or nothing. If at any point the algorithm says that it "fails", this means that it is aborted
+ at that point and returns nothing.</p>
+
+ <ol>
+
+ <li><p><span>Collect a sequence of characters</span> that are U+002D HYPHEN-MINUS characters (-).
+ If the collected sequence is not exactly zero or two characters long, then fail.</p></li>
+
+ <li><p><span>Collect a sequence of characters</span> that are <span>ASCII digits</span>. If the
+ collected sequence is not exactly two characters long, then fail. Otherwise, interpret the
+ resulting sequence as a base-ten integer. Let that number be the <var
+ data-x="">month</var>.</p></li>
+
+ <li><p>If <var data-x="">month</var> is not a number in the range 1&nbsp;&le;&nbsp;<var
+ data-x="">month</var>&nbsp;&le;&nbsp;12, then fail.</p></li>
+
+ <li><p>Let <var data-x="">maxday</var> be the <span data-x="number of days in month month of year
+ year">number of days</span> in month <var data-x="">month</var> of any arbitrary leap year (e.g. 4
+ or 2000).</p></li>
+
+ <li><p>If <var data-x="">position</var> is beyond the end of <var data-x="">input</var> or if the
+ character at <var data-x="">position</var> is not a U+002D HYPHEN-MINUS character, then fail.
+ Otherwise, move <var data-x="">position</var> forwards one character.</p></li>
+
+ <li><p><span>Collect a sequence of characters</span> that are <span>ASCII digits</span>. If the
+ collected sequence is not exactly two characters long, then fail. Otherwise, interpret the
+ resulting sequence as a base-ten integer. Let that number be the <var
+ data-x="">day</var>.</p></li>
+
+ <li><p>If <var data-x="">day</var> is not a number in the range 1&nbsp;&le;&nbsp;<var
+ data-x="">day</var>&nbsp;&le;&nbsp;<var data-x="">maxday</var>, then fail.</li>
+
+ <li><p>Return <var data-x="">month</var> and <var data-x="">day</var>.</p></li>
+
+ </ol>
+
+ </div>
+
+
+ <h5>Times</h5>
+
+ <p>A <dfn data-x="concept-time">time</dfn> consists of a specific time with no time-zone
+ information, consisting of an hour, a minute, a second, and a fraction of a second.</p>
+
+ <p>A string is a <dfn>valid time string</dfn> representing an hour <var data-x="">hour</var>, a
+ minute <var data-x="">minute</var>, and a second <var data-x="">second</var> if it consists of the
+ following components in the given order:</p>
+
+ <ol>
+
+ <li>Two <span>ASCII digits</span>, representing <var data-x="">hour</var>, in the range
+ 0&nbsp;&le;&nbsp;<var data-x="">hour</var>&nbsp;&le;&nbsp;23</li>
+
+ <li>A U+003A COLON character (:)</li>
+
+ <li>Two <span>ASCII digits</span>, representing <var data-x="">minute</var>, in the range
+ 0&nbsp;&le;&nbsp;<var data-x="">minute</var>&nbsp;&le;&nbsp;59</li>
+
+ <li>If <var data-x="">second</var> is non-zero, or optionally if <var data-x="">second</var> is
+ zero:
+
+ <ol>
+
+ <li>A U+003A COLON character (:)</li>
+
+ <li>Two <span>ASCII digits</span>, representing the integer part of <var data-x="">second</var>,
+ in the range 0&nbsp;&le;&nbsp;<var data-x="">s</var>&nbsp;&le;&nbsp;59</li>
+
+ <li>If <var data-x="">second</var> is not an integer, or optionally if <var
+ data-x="">second</var> is an integer:
+
+ <ol>
+
+ <li>A 002E FULL STOP character (.)</li>
+
+ <li>One, two, or three <span>ASCII digits</span>, representing the fractional part of <var
+ data-x="">second</var></li>
+
+ </ol>
+
+ </li>
+
+ </ol>
+
+ </li>
+
+ </ol>
+
+ <p class="note">The <var data-x="">second</var> component cannot be 60 or 61; leap seconds cannot
+ be represented.</p>
+
+ <div class="nodev">
+
+ <p>The rules to <dfn>parse a time string</dfn> are as follows. This will return either a time, or
+ nothing. If at any point the algorithm says that it "fails", this means that it is aborted at that
+ point and returns nothing.</p>
+
+ <ol>
+
+ <li><p>Let <var data-x="">input</var> be the string being parsed.</p></li>
+
+ <li><p>Let <var data-x="">position</var> be a pointer into <var data-x="">input</var>, initially
+ pointing at the start of the string.</p></li>
+
+ <li><p><span>Parse a time component</span> to obtain <var data-x="">hour</var>, <var
+ data-x="">minute</var>, and <var data-x="">second</var>. If this returns nothing, then fail.</p>
+
+ <li><p>If <var data-x="">position</var> is <em>not</em> beyond the end of <var
+ data-x="">input</var>, then fail.</p></li>
+
+ <li><p>Let <var data-x="">time</var> be the time with hour <var data-x="">hour</var>, minute <var
+ data-x="">minute</var>, and second <var data-x="">second</var>.</p></li>
+
+ <li><p>Return <var data-x="">time</var>.</p></li>
+
+ </ol>
+
+ <p>The rules to <dfn>parse a time component</dfn>, given an <var data-x="">input</var> string and a
+ <var data-x="">position</var>, are as follows. This will return either an hour, a minute, and a
+ second, or nothing. If at any point the algorithm says that it "fails", this means that it is
+ aborted at that point and returns nothing.</p>
+
+ <ol>
+
+ <li><p><span>Collect a sequence of characters</span> that are <span>ASCII digits</span>. If the
+ collected sequence is not exactly two characters long, then fail. Otherwise, interpret the
+ resulting sequence as a base-ten integer. Let that number be the <var
+ data-x="">hour</var>.</p></li>
+
+ <li>If <var data-x="">hour</var> is not a number in the range 0&nbsp;&le;&nbsp;<var
+ data-x="">hour</var>&nbsp;&le;&nbsp;23, then fail.</li>
+
+ <li><p>If <var data-x="">position</var> is beyond the end of <var data-x="">input</var> or if the
+ character at <var data-x="">position</var> is not a U+003A COLON character, then fail. Otherwise,
+ move <var data-x="">position</var> forwards one character.</p></li>
+
+ <li><p><span>Collect a sequence of characters</span> that are <span>ASCII digits</span>. If the
+ collected sequence is not exactly two characters long, then fail. Otherwise, interpret the
+ resulting sequence as a base-ten integer. Let that number be the <var
+ data-x="">minute</var>.</p></li>
+
+ <li>If <var data-x="">minute</var> is not a number in the range 0&nbsp;&le;&nbsp;<var
+ data-x="">minute</var>&nbsp;&le;&nbsp;59, then fail.</li>
+
+ <li><p>Let <var data-x="">second</var> be a string with the value "0".</p></li>
+
+ <li>
+
+ <p>If <var data-x="">position</var> is not beyond the end of <var data-x="">input</var> and the
+ character at <var data-x="">position</var> is a U+003A COLON, then run these substeps:</p>
+
+ <ol>
+
+ <li><p>Advance <var data-x="">position</var> to the next character in <var
+ data-x="">input</var>.</p></li>
+
+ <li><p>If <var data-x="">position</var> is beyond the end of <var data-x="">input</var>, or at
+ the last character in <var data-x="">input</var>, or if the next <em>two</em> characters in <var
+ data-x="">input</var> starting at <var data-x="">position</var> are not both <span>ASCII
+ digits</span>, then fail.</p></li>
+
+ <li><p><span>Collect a sequence of characters</span> that are either <span>ASCII digits</span>
+ or U+002E FULL STOP characters. If the collected sequence is three characters long, or if it is
+ longer than three characters long and the third character is not a U+002E FULL STOP character,
+ or if it has more than one U+002E FULL STOP character, then fail. Otherwise, let the collected
+ string be <var data-x="">second</var> instead of its previous value.</p></li>
+
+ </ol>
+
+ </li>
+
+ <li><p>Interpret <var data-x="">second</var> as a base-ten number (possibly with a fractional
+ part). Let <var data-x="">second</var> be that number instead of the string version.</p></li>
+
+ <li><p>If <var data-x="">second</var> is not a number in the range 0&nbsp;&le;&nbsp;<var
+ data-x="">second</var>&nbsp;&lt;&nbsp;60, then fail.</p></li>
+
+ <li><p>Return <var data-x="">hour</var>, <var data-x="">minute</var>, and <var
+ data-x="">second</var>.</p></li>
+
+ </ol>
+
+ </div>
+
+
+ <h5>Local dates and times</h5>
+
+ <p>A <dfn data-x="concept-datetime-local">local date and time</dfn> consists of a specific
+ <span>proleptic-Gregorian date</span>, consisting of a year, a month, and a day, and a time,
+ consisting of an hour, a minute, a second, and a fraction of a second, but expressed without a
+ time zone. <a href="#refsGREGORIAN">[GREGORIAN]</a></p>
+
+ <p>A string is a <dfn>valid local date and time string</dfn> representing a date and time if it
+ consists of the following components in the given order:</p>
+
+ <ol>
+
+ <li>A <span>valid date string</span> representing the date</li>
+
+ <li>A U+0054 LATIN CAPITAL LETTER T character (T) or a U+0020 SPACE character</li>
+
+ <li>A <span>valid time string</span> representing the time</li>
+
+ </ol>
+
+ <p>A string is a <dfn>valid normalised local date and time string</dfn> representing a date and
+ time if it consists of the following components in the given order:</p>
+
+ <ol>
+
+ <li>A <span>valid date string</span> representing the date</li>
+
+ <li>A U+0054 LATIN CAPITAL LETTER T character (T)</li>
+
+ <li>A <span>valid time string</span> representing the time, expressed as the shortest possible
+ string for the given time (e.g. omitting the seconds component entirely if the given time is zero
+ seconds past the minute)</li>
+
+ </ol>
+
+ <div class="nodev">
+
+ <p>The rules to <dfn>parse a local date and time string</dfn> are as follows. This will return
+ either a date and time, or nothing. If at any point the algorithm says that it "fails", this means
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/LICENSE.md b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/LICENSE.md
new file mode 100644
index 0000000000..ad4858c874
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/LICENSE.md
@@ -0,0 +1,11 @@
+# The 3-Clause BSD License
+
+Copyright 2019 web-platform-tests contributors
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/README.md b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/README.md
new file mode 100644
index 0000000000..61b656941b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/README.md
@@ -0,0 +1,52 @@
+This directory contains a number of tests from
+[web-platform-tests](https://github.com/web-platform-tests/wpt) at
+77585330fd7da01392aec01cf5fed7aa22597180, chosen from the files processed by the manifest script.
+
+These files are split into two directories:
+
+ * `weighted`, a set of 15 tests curated from a random weighted sample of 30, weighted by parse
+ time as of html5lib 1.0.1. The curation was performed primarily as many of the slowest files are
+ very similar and therefore provide little extra coverage while it is relatively probable both
+ with be chosen. This provides a set of files which significantly contribute to the manifest
+ generation time.
+
+ * `random`, a further set of 15 tests, this time a random unweighted sample of 15. This provides a
+ set of files much closer to the average file in WPT.
+
+The files are sourced from the following:
+
+`weighted`:
+
+ * `css/compositing/test-plan/test-plan.src.html`
+ * `css/css-flexbox/align-content-wrap-002.html`
+ * `css/css-grid/grid-definition/grid-auto-fill-rows-001.html`
+ * `css/css-grid/masonry.tentative/masonry-item-placement-006.html`
+ * `css/css-images/image-orientation/reference/image-orientation-from-image-content-images-ref.html`
+ * `css/css-position/position-sticky-table-th-bottom-ref.html`
+ * `css/css-text/white-space/pre-float-001.html`
+ * `css/css-ui/resize-004.html`
+ * `css/css-will-change/will-change-abspos-cb-001.html`
+ * `css/filter-effects/filter-turbulence-invalid-001.html`
+ * `css/vendor-imports/mozilla/mozilla-central-reftests/css21/pagination/moz-css21-table-page-break-inside-avoid-2.html`
+ * `encoding/legacy-mb-tchinese/big5/big5_chars_extra.html`
+ * `html/canvas/element/compositing/2d.composite.image.destination-over.html`
+ * `html/semantics/embedded-content/the-canvas-element/toBlob.png.html`
+ * `referrer-policy/4K-1/gen/top.http-rp/unsafe-url/fetch.http.html`
+
+`random`:
+
+ * `content-security-policy/frame-ancestors/frame-ancestors-self-allow.html`
+ * `css/css-backgrounds/reference/background-origin-007-ref.html`
+ * `css/css-fonts/idlharness.html`
+ * `css/css-position/static-position/htb-ltr-ltr.html`
+ * `css/vendor-imports/mozilla/mozilla-central-reftests/css21/pagination/moz-css21-float-page-break-inside-avoid-6.html`
+ * `css/vendor-imports/mozilla/mozilla-central-reftests/shapes1/shape-outside-content-box-002.html`
+ * `encoding/legacy-mb-korean/euc-kr/euckr-encode-form.html`
+ * `html/browsers/browsing-the-web/unloading-documents/beforeunload-on-history-back-1.html`
+ * `html/browsers/the-window-object/apis-for-creating-and-navigating-browsing-contexts-by-name/non_automated/001.html`
+ * `html/editing/dnd/overlay/heavy-styling-005.html`
+ * `html/rendering/non-replaced-elements/lists/li-type-unsupported-ref.html`
+ * `html/semantics/grouping-content/the-dl-element/grouping-dl.html`
+ * `trusted-types/worker-constructor.https.html`
+ * `webvtt/rendering/cues-with-video/processing-model/selectors/cue/background_shorthand_css_relative_url.html`
+ * `IndexedDB/idbindex_get8.htm`
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/001.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/001.html
new file mode 100644
index 0000000000..7b0f21ec04
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/001.html
@@ -0,0 +1,3 @@
+<!doctype html>
+<title>Accessing named windows from outside the unit of related browsing contexts</title>
+<a href="001-1.html" target="test_name">Click here</a>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/background-origin-007-ref.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/background-origin-007-ref.html
new file mode 100644
index 0000000000..d3a1d05328
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/background-origin-007-ref.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<title>CSS Backgrounds and Borders Reference</title>
+<link rel="author" title="Intel" href="http://www.intel.com">
+<style>
+ div {
+ background-color: green;
+ height: 55px;
+ left: 5px;
+ position: relative;
+ top: 5px;
+ width: 55px;
+ }
+</style>
+<body>
+ <p>Test passes if there is a filled green square and <strong>no red</strong>.</p>
+ <div></div>
+</body>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/background_shorthand_css_relative_url.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/background_shorthand_css_relative_url.html
new file mode 100644
index 0000000000..2397fec005
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/background_shorthand_css_relative_url.html
@@ -0,0 +1,24 @@
+<!DOCTYPE html>
+<html class="reftest-wait">
+<title>WebVTT rendering, ::cue, background shorthand, background image URL with relative path from CSS file</title>
+<link rel="match" href="background_shorthand_css_relative_url-ref.html">
+<link rel="stylesheet" type="text/css" href="/fonts/ahem.css" />
+<style>
+html { overflow:hidden }
+body { margin:0 }
+::cue {
+ font-family: Ahem, sans-serif;
+ background: #0f0 url('../../media/background.gif') repeat-x top left;
+ color: green;
+}
+</style>
+<script src="/common/reftest-wait.js"></script>
+<video width="320" height="180" autoplay onplaying="this.onplaying = null; this.pause(); takeScreenshot();">
+ <source src="/media/white.webm" type="video/webm">
+ <source src="/media/white.mp4" type="video/mp4">
+ <track src="../../support/test.vtt">
+ <script>
+ document.getElementsByTagName('track')[0].track.mode = 'showing';
+ </script>
+</video>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/beforeunload-on-history-back-1.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/beforeunload-on-history-back-1.html
new file mode 100644
index 0000000000..4403cfa8e9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/beforeunload-on-history-back-1.html
@@ -0,0 +1,5 @@
+<!doctype html>
+001-1
+<script>
+addEventListener("beforeunload", function() {top.t.step(function() {top.beforeunload_fired = true})}, false);
+</script>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/euckr-encode-form.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/euckr-encode-form.html
new file mode 100644
index 0000000000..545f8ac93f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/euckr-encode-form.html
@@ -0,0 +1,52 @@
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset="euc-kr"> <!-- test breaks if the server overrides this -->
+<title>EUC-KR encoding (form)</title>
+<meta name="timeout" content="long">
+<meta name="variant" content="?1-1000">
+<meta name="variant" content="?1001-2000">
+<meta name="variant" content="?2001-3000">
+<meta name="variant" content="?3001-4000">
+<meta name="variant" content="?4001-5000">
+<meta name="variant" content="?5001-6000">
+<meta name="variant" content="?6001-7000">
+<meta name="variant" content="?7001-8000">
+<meta name="variant" content="?8001-9000">
+<meta name="variant" content="?9001-10000">
+<meta name="variant" content="?10001-11000">
+<meta name="variant" content="?11001-12000">
+<meta name="variant" content="?12001-13000">
+<meta name="variant" content="?13001-14000">
+<meta name="variant" content="?14001-15000">
+<meta name="variant" content="?15001-16000">
+<meta name="variant" content="?16001-17000">
+<meta name="variant" content="?17001-last">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/common/subset-tests.js"></script>
+<script src="euckr_index.js"></script>
+<script src="euckr-encoder.js"></script>
+<link rel="author" title="Richard Ishida" href="mailto:ishida@w3.org">
+<link rel="help" href="https://encoding.spec.whatwg.org/#euc-kr">
+<meta name="assert" content="The browser produces the expected byte sequences for all characters in the euc-kr encoding after 0x9F when encoding bytes for a URL produced by a form, using the encoder steps in the specification.">
+<style>
+ iframe { display:none }
+ form { display:none }
+</style>
+</head>
+<body>
+<div id="log"></div>
+<script src="../../resources/ranges.js"></script>
+<script>
+var errors = false;
+var encoder = euckrEncoder;
+var ranges = rangesAll;
+var separator = ",";
+function expect(result, codepoint) {
+ return "%" + result.replace(/ /g, "%");
+}
+</script>
+<script src="../../resources/encode-form-common.js"></script>
+</body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/frame-ancestors-self-allow.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/frame-ancestors-self-allow.html
new file mode 100644
index 0000000000..a8a295dfc4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/frame-ancestors-self-allow.html
@@ -0,0 +1,16 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="support/frame-ancestors-test.sub.js"></script>
+</head>
+<body>
+ <script>
+ test = async_test("A 'frame-ancestors' CSP directive with a value 'self' should allow rendering.");
+
+ sameOriginFrameShouldBeAllowed("'self'");
+ </script>
+</body>
+</html>
+
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/grouping-dl.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/grouping-dl.html
new file mode 100644
index 0000000000..2394d6a929
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/grouping-dl.html
@@ -0,0 +1,30 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>the dl element</title>
+ <link rel="author" title="dzenana" href="mailto:dzenana.trenutak@gmail.com">
+ <link rel="help" href="https://html.spec.whatwg.org/multipage/#the-dl-element">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script>
+ "use strict";
+
+ // check that prototype matches spec's DOM interface
+ test(function () {
+ var testElement = document.createElement("dl");
+ assert_equals(Object.getPrototypeOf(testElement), HTMLDListElement.prototype, "HTMLDListElement.prototype should be used for dl");
+ }, "The prototype for dl is HTMLDListElement.prototype");
+
+ // Not checking: effects of markup on defining groups and the name-pair values within those groups
+
+ </script>
+</head>
+<body>
+ <h1>Description</h1>
+ <p>This test validates the dl element.</p>
+
+ <div id="log"></div>
+
+</body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/heavy-styling-005.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/heavy-styling-005.html
new file mode 100644
index 0000000000..2bbdb3cf73
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/heavy-styling-005.html
@@ -0,0 +1,15 @@
+<!DOCTYPe html>
+<meta charset='utf-8'>
+<title>drag and drop – feedback overlay for heavily styled elements – 005</title>
+<style>
+a {
+ display: block;
+ height: 200px;
+ width: 200px;
+ background-color: rgba(0,0,255,0.5);
+}
+</style>
+
+<p>Drag the blue box below downwards. The drag placeholder should resemble the blue box, including the text within it.</p>
+
+<a draggable="true" ondragstart="event.dataTransfer.effectAllowed ='copy'">TEST</a>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/htb-ltr-ltr.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/htb-ltr-ltr.html
new file mode 100644
index 0000000000..5a19c0e9cc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/htb-ltr-ltr.html
@@ -0,0 +1,74 @@
+<!DOCTYPE html>
+<link rel="help" href="https://drafts.csswg.org/css2/visudet.html#abs-non-replaced-width" />
+<link rel="match" href="htb-ref.html">
+<meta name="assert" content="This test checks the static position of an out of flow absolute positioned element, under various conditions." />
+<link rel="stylesheet" type="text/css" href="/fonts/ahem.css" />
+<style>
+.container {
+ position: relative;
+ background: green;
+ color: green;
+ font: 16px/1 Ahem;
+ border: solid black 3px;
+ width: 400px;
+ margin: 16px 0;
+}
+.red { color: red; }
+.cb { position: relative; }
+.rtl { direction: rtl; }
+.ltr { direction: ltr; }
+.inline { display: inline; }
+.abs { position: absolute; }
+
+.indent { text-indent: 20px; }
+* { text-indent: initial; }
+</style>
+
+There should be no red.
+<div class="container ltr">
+ XXX<span class="ltr">XX<div class="abs inline">XXXXX</div><span class="red">XXXXX</span></span>
+</div>
+
+<div class="container ltr indent">
+ XXX<span class="ltr">XX<div class="abs inline">XXXXX</div><span class="red">XXXXX</span></span>
+</div>
+
+<div class="container ltr">
+ XXX<span class="ltr">XX<div class="abs block">XXXXX</div><br><span class="red">XXXXX</span></span>
+</div>
+
+<div class="container ltr indent">
+ XXX<span class="ltr">XX<div class="abs block">XXXXX</div><br><span class="red">XXXXX</span></span>
+</div>
+
+<div class="container ltr">
+ XXX<span class="ltr cb">XX<div class="abs inline">XXXXX</div><span class="red">XXXXX</span></span>
+</div>
+
+<div class="container ltr indent">
+ XXX<span class="ltr cb">XX<div class="abs inline">XXXXX</div><span class="red">XXXXX</span></span>
+</div>
+
+<div class="container ltr">
+ XXX<span class="ltr cb">XX<div class="abs block">XXXXX</div><br><span class="red">XXXXX</span></span>
+</div>
+
+<div class="container ltr indent">
+ XXX<span class="ltr cb">XX<div class="abs block">XXXXX</div><br><span class="red">XXXXX</span></span>
+</div>
+
+<div class="container ltr">
+ <span class="cb">XXX<span class="ltr">XX<div class="abs inline">XXXXX</div><span class="red">XXXXX</span></span></span>
+</div>
+
+<div class="container ltr indent">
+ <span class="cb">XXX<span class="ltr">XX<div class="abs inline">XXXXX</div><span class="red">XXXXX</span></span></span>
+</div>
+
+<div class="container ltr">
+ <span class="cb">XXX<span class="ltr">XX<div class="abs block">XXXXX</div><br><span class="red">XXXXX</span></span></span>
+</div>
+
+<div class="container ltr indent">
+ <span class="cb">XXX<span class="ltr">XX<div class="abs block">XXXXX</div><br><span class="red">XXXXX</span></span></span>
+</div>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/idbindex_get8.htm b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/idbindex_get8.htm
new file mode 100644
index 0000000000..9bfc48422f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/idbindex_get8.htm
@@ -0,0 +1,27 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<title>IDBIndex.get() - throw InvalidStateError on index deleted by aborted upgrade</title>
+<link rel="help" href="https://w3c.github.io/IndexedDB/#dom-idb">
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script src=support.js></script>
+<div id="log"></div>
+<script>
+ var db,
+ t = async_test();
+
+ var open_rq = createdb(t);
+ open_rq.onupgradeneeded = function(e) {
+ db = e.target.result;
+ var store = db.createObjectStore("store", { keyPath: "key" });
+ var index = store.createIndex("index", "indexedProperty");
+ store.add({ key: 1, indexedProperty: "data" });
+
+ e.target.transaction.abort();
+
+ assert_throws_dom("InvalidStateError", function(){
+ index.get("data");
+ });
+ t.done();
+ }
+</script>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/idlharness.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/idlharness.html
new file mode 100644
index 0000000000..ecc601bcf6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/idlharness.html
@@ -0,0 +1,34 @@
+<!doctype html>
+<title>CSS Fonts IDL tests</title>
+<link rel="help" href="https://drafts.csswg.org/css-fonts-4/">
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/WebIDLParser.js"></script>
+<script src="/resources/idlharness.js"></script>
+
+<style>
+ div { display: block; }
+</style>
+<style>
+ @font-face {
+ font-family: fwf;
+ src: url(support/fonts/FontWithFancyFeatures.otf);
+ }
+</style>
+
+<script>
+ "use strict";
+
+ idl_test(
+ ["css-fonts"],
+ ["cssom"],
+ idl_array => {
+ idl_array.add_objects({
+ CSSRule: ['cssRule'],
+ CSSFontFaceRule: ['cssFontFaceRule'],
+ });
+ self.cssRule = document.styleSheets[0].cssRules[0];
+ self.cssFontFaceRule = document.styleSheets[1].cssRules[0];
+ }
+ );
+</script>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/li-type-unsupported-ref.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/li-type-unsupported-ref.html
new file mode 100644
index 0000000000..4fbc5aca97
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/li-type-unsupported-ref.html
@@ -0,0 +1,13 @@
+<!doctype html>
+<meta charset=utf-8>
+<title>li@type: unsupported types</title>
+<li>first item</li>
+<li>second item</li>
+<ol>
+ <li>first ordered item</li>
+ <li>second ordered item</li>
+</ol>
+<ul>
+ <li>first unordered item</li>
+ <li>second unordered item</li>
+</ul>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/moz-css21-float-page-break-inside-avoid-6.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/moz-css21-float-page-break-inside-avoid-6.html
new file mode 100644
index 0000000000..3cd0a5fb1c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/moz-css21-float-page-break-inside-avoid-6.html
@@ -0,0 +1,19 @@
+<!DOCTYPE html>
+<html lang="en-US" class="reftest-paged">
+<head>
+ <title>CSS Test: CSS 2.1 page-break-inside:avoid</title>
+ <link rel="author" title="Mats Palmgren" href="https://bugzilla.mozilla.org/show_bug.cgi?id=685012">
+ <link rel="help" href="http://www.w3.org/TR/CSS21/page.html#propdef-page-break-inside">
+ <link rel="match" href="moz-css21-float-page-break-inside-avoid-6-ref.html">
+ <meta name="flags" content="paged">
+<style type="text/css">
+@page { size:5in 3in; margin:0.5in; }
+html,body {
+ color:black; background-color:white; font-size:16px; padding:0; margin:0; height:100%;
+}
+p { height:60%; width:90%; margin:0; background-color:blue; border:1px solid black; }
+.test { page-break-inside:avoid; float:left; }
+</style>
+</head>
+<body><p>1</p><p class="test">2</p></body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/shape-outside-content-box-002.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/shape-outside-content-box-002.html
new file mode 100644
index 0000000000..e2040763df
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/shape-outside-content-box-002.html
@@ -0,0 +1,66 @@
+<!DOCTYPE html>
+<!-- Any copyright is dedicated to the Public Domain.
+ - http://creativecommons.org/publicdomain/zero/1.0/ -->
+
+<html>
+ <title>CSS Shape Test: float right, content-box</title>
+ <link rel="author" title="Ting-Yu Lin" href="mailto:tlin@mozilla.com">
+ <link rel="author" title="Mozilla" href="http://www.mozilla.org/">
+ <link rel="help" href="https://drafts.csswg.org/css-shapes-1/#shapes-from-box-values">
+ <link rel="match" href="shape-outside-content-box-002-ref.html">
+ <meta name="flags" content="">
+ <meta name="assert" content="Test the boxes are wrapping around the right float shape defined by the content-box value.">
+ <style>
+ .container {
+ direction: rtl;
+ width: 175px;
+ line-height: 0;
+ }
+
+ .shape {
+ float: right;
+ shape-outside: content-box;
+ box-sizing: content-box;
+ height: 25px;
+ width: 25px;
+ padding: 25px;
+ border: 25px solid lightgreen;
+ margin: 25px;
+ background-color: orange;
+ }
+
+ .box {
+ display: inline-block;
+ width: 50px;
+ height: 25px;
+ background-color: blue;
+ }
+
+ .longbox {
+ display: inline-block;
+ width: 175px;
+ height: 25px;
+ background-color: blue;
+ }
+ </style>
+
+ <main class="container">
+ <div class="shape"></div>
+ <div class="shape"></div>
+ <div class="longbox"></div> <!-- Saturate the margin space -->
+ <div class="longbox"></div> <!-- Saturate the border space -->
+ <div class="longbox"></div> <!-- Saturate the padding space -->
+ <div class="box"></div>
+ <div class="longbox"></div> <!-- Saturate the padding space -->
+ <div class="longbox"></div> <!-- Saturate the border space -->
+ <div class="longbox"></div> <!-- Saturate the margin space -->
+
+ <div class="longbox"></div> <!-- Saturate the margin space -->
+ <div class="longbox"></div> <!-- Saturate the border space -->
+ <div class="longbox"></div> <!-- Saturate the padding space -->
+ <div class="box"></div>
+ <div class="longbox"></div> <!-- Saturate the padding space -->
+ <div class="longbox"></div> <!-- Saturate the border space -->
+ <div class="longbox"></div> <!-- Saturate the margin space -->
+ </main>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/worker-constructor.https.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/worker-constructor.https.html
new file mode 100644
index 0000000000..6e127b11a5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/random/worker-constructor.https.html
@@ -0,0 +1,86 @@
+<!doctype html>
+<html>
+<head>
+ <meta http-equiv="Content-Security-Policy" content="require-trusted-types-for 'script';">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<script>
+
+const test_url = "support/WorkerGlobalScope-importScripts.https.js"
+const trusted_url = trustedTypes.createPolicy("anythinggoes", {
+ createScriptURL: x => x}).createScriptURL(test_url);
+const default_url = "support/WorkerGlobalScope-importScripts.potato.js"
+
+async function service_worker(url) {
+ if (!('serviceWorker' in navigator)) return Promise.resolve();
+
+ const scope = 'support/some/scope/for/this/test';
+ const reg = await navigator.serviceWorker.getRegistration(scope);
+ if (reg) await reg.unregister();
+ return await navigator.serviceWorker.register(url, {scope});
+}
+
+// Most tests below don't need promises, but the ones related to
+// ServiceWorkers do. Since we can't mix promise and non-promise tests,
+// we'll just run the non-promise tests in the main function and return
+// an empty-resolved promise for those.
+// Since an active default policy will affect all subsequent DOM operations,
+// we're wrapping policy creation in a promise_test. Together, this will
+// force proper serialization of all tests.
+//
+// Generally, we don't actually care what the workers here do, we'll merely
+// check whether creation succeeds.
+
+promise_test(t => {
+ new Worker(trusted_url);
+ return Promise.resolve();
+}, "Create Worker via ScriptTestUrl");
+
+promise_test(t => {
+ new SharedWorker(trusted_url);
+ return Promise.resolve();
+}, "Create SharedWorker via ScriptTestUrl");
+
+promise_test(t => {
+ return service_worker(trusted_url);
+}, "Create ServiceWorker via ScriptTestUrl");
+
+promise_test(t => {
+ assert_throws_js(TypeError, () => new Worker(test_url));
+ return Promise.resolve();
+}, "Block Worker creation via string");
+
+promise_test(t => {
+ assert_throws_js(TypeError, () => new SharedWorker(test_url));
+ return Promise.resolve();
+}, "Block SharedWorker creation via string");
+
+promise_test(t => {
+ return promise_rejects_js(t, TypeError, service_worker(test_url));
+}, "Block ServiceWorker creation via String");
+
+// Tests with default policy.
+promise_test(t => {
+ trustedTypes.createPolicy("default", {
+ createScriptURL: s => s.replace("potato", "https") });
+ return Promise.resolve();
+}, "Setup default policy.");
+
+promise_test(t => {
+ new Worker(default_url);
+ return Promise.resolve();
+}, "Create Worker via string with default policy.");
+
+promise_test(t => {
+ new SharedWorker(default_url);
+ return Promise.resolve();
+}, "Create SharedWorker via string with default policy.");
+
+promise_test(t => {
+ return service_worker(default_url);
+}, "Create ServiceWorker via string with default policy.");
+
+</script>
+</body>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/2d.composite.image.destination-over.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/2d.composite.image.destination-over.html
new file mode 100644
index 0000000000..d742f84dfb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/2d.composite.image.destination-over.html
@@ -0,0 +1,33 @@
+<!DOCTYPE html>
+<!-- DO NOT EDIT! This test has been generated by /html/canvas/tools/gentest.py. -->
+<title>Canvas test: 2d.composite.image.destination-over</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/html/canvas/resources/canvas-tests.js"></script>
+<link rel="stylesheet" href="/html/canvas/resources/canvas-tests.css">
+<body class="show_output">
+
+<h1>2d.composite.image.destination-over</h1>
+<p class="desc"></p>
+
+
+<p class="output">Actual output:</p>
+<canvas id="c" class="output" width="100" height="50"><p class="fallback">FAIL (fallback content)</p></canvas>
+<p class="output expectedtext">Expected output:<p><img src="2d.composite.image.destination-over.png" class="output expected" id="expected" alt="">
+<ul id="d"></ul>
+<script>
+var t = async_test("");
+_addTest(function(canvas, ctx) {
+
+
+ctx.fillStyle = 'rgba(0, 255, 255, 0.5)';
+ctx.fillRect(0, 0, 100, 50);
+ctx.globalCompositeOperation = 'destination-over';
+ctx.drawImage(document.getElementById('yellow75.png'), 0, 0);
+_assertPixelApprox(canvas, 50,25, 109,255,146,223, "50,25", "109,255,146,223", 5);
+
+
+});
+</script>
+<img src="/images/yellow75.png" id="yellow75.png" class="resource">
+
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/align-content-wrap-002.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/align-content-wrap-002.html
new file mode 100644
index 0000000000..a15f7ea844
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/align-content-wrap-002.html
@@ -0,0 +1,108 @@
+<!DOCTYPE html>
+<link rel="help" href="https://drafts.csswg.org/css-flexbox/#propdef-align-content" />
+<title>css-flexbox: Tests align-content with flex-wrap: wrap</title>
+<style>
+.flex-horizontal {
+ width:600px;
+ display:flex;
+ height:100px;
+ background:gray;
+ margin-bottom:100px;
+}
+.flex-vertical {
+ width:100px;
+ display:flex;
+ flex-direction: column;
+ height:600px;
+ background:gray;
+ margin-top:200px;
+ margin-bottom:100px;
+}
+.item-horizontal {
+ width:150px;
+ background:yellow;
+ margin:10px;
+ flex:none;
+}
+.item-vertical {
+ height:150px;
+ background:yellow;
+ margin:10px;
+ flex:none;
+}
+.content1-horizontal {
+ width:100px;
+ height:150px;
+ background:red;
+}
+.content2-horizontal {
+ width:100px;
+ height:100px;
+ background:red;
+}
+.content3-horizontal {
+ width:100px;
+ height:50px;
+ background:red;
+}
+.content1-vertical {
+ width:150px;
+ height:100px;
+ background:red;
+}
+.content2-vertical {
+ width:100px;
+ height:100px;
+ background:red;
+}
+.content3-vertical {
+ width:50px;
+ height:100px;
+ background:red;
+}
+</style>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/check-layout-th.js"></script>
+<body onload="checkLayout('.flex-horizontal, .flex-vertical');">
+<div id=log></div>
+<p>Test for crbug.com/362848: Flex box word-wrap is not adhering to spec</p>
+<div class="flex-horizontal">
+ <div class="item-horizontal" data-expected-height="80"><div class="content1-horizontal"></div></div>
+ <div class="item-horizontal" data-expected-height="80"><div class="content2-horizontal"></div></div>
+ <div class="item-horizontal" data-expected-height="80"><div class="content3-horizontal"></div></div>
+</div>
+
+<div class="flex-horizontal" style="flex-wrap:wrap;">
+ <div class="item-horizontal" data-expected-height="150"><div class="content1-horizontal"></div></div>
+ <div class="item-horizontal" data-expected-height="150"><div class="content2-horizontal"></div></div>
+ <div class="item-horizontal" data-expected-height="150"><div class="content3-horizontal"></div></div>
+</div>
+
+<div class="flex-horizontal" style="flex-wrap:wrap;">
+ <div class="item-horizontal" data-expected-height="150"><div class="content1-horizontal"></div></div>
+ <div class="item-horizontal" data-expected-height="150"><div class="content2-horizontal"></div></div>
+ <div class="item-horizontal" data-expected-height="150"><div class="content3-horizontal"></div></div>
+ <div class="item-horizontal" data-expected-height="150"><div class="content1-horizontal"></div></div>
+ <div class="item-horizontal" data-expected-height="150"><div class="content2-horizontal"></div></div>
+</div>
+
+<div class="flex-vertical">
+ <div class="item-vertical" data-expected-width="80"><div class="content1-vertical"></div></div>
+ <div class="item-vertical" data-expected-width="80"><div class="content2-vertical"></div></div>
+ <div class="item-vertical" data-expected-width="80"><div class="content3-vertical"></div></div>
+</div>
+
+<div class="flex-vertical" style="flex-wrap:wrap;">
+ <div class="item-vertical" data-expected-width="150"><div class="content1-vertical"></div></div>
+ <div class="item-vertical" data-expected-width="150"><div class="content2-vertical"></div></div>
+ <div class="item-vertical" data-expected-width="150"><div class="content3-vertical"></div></div>
+</div>
+
+<div class="flex-vertical" style="flex-wrap:wrap;">
+ <div class="item-vertical" data-expected-width="150"><div class="content1-vertical"></div></div>
+ <div class="item-vertical" data-expected-width="150"><div class="content2-vertical"></div></div>
+ <div class="item-vertical" data-expected-width="150"><div class="content3-vertical"></div></div>
+ <div class="item-vertical" data-expected-width="150"><div class="content1-vertical"></div></div>
+ <div class="item-vertical" data-expected-width="150"><div class="content2-vertical"></div></div>
+</div>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/big5_chars_extra.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/big5_chars_extra.html
new file mode 100644
index 0000000000..5ea8e5740d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/big5_chars_extra.html
@@ -0,0 +1 @@
+<!doctype html><html><head><meta charset="big5"><title>big5 characters</title></head><body><span data-cp="C0" data-bytes="88 59">ˆY</span> <span data-cp="C1" data-bytes="88 57">ˆW</span> <span data-cp="C8" data-bytes="88 5D">ˆ]</span> <span data-cp="C9" data-bytes="88 5B">ˆ[</span> <span data-cp="CA" data-bytes="88 66">ˆf</span> <span data-cp="D2" data-bytes="88 61">ˆa</span> <span data-cp="D3" data-bytes="88 5F">ˆ_</span> <span data-cp="E0" data-bytes="88 6A">ˆj</span> <span data-cp="E1" data-bytes="88 68">ˆh</span> <span data-cp="E8" data-bytes="88 6F">ˆo</span> <span data-cp="E9" data-bytes="88 6D">ˆm</span> <span data-cp="EA" data-bytes="88 A7">ˆ§</span> <span data-cp="EC" data-bytes="88 73">ˆs</span> <span data-cp="ED" data-bytes="88 71">ˆq</span> <span data-cp="F2" data-bytes="88 77">ˆw</span> <span data-cp="F3" data-bytes="88 75">ˆu</span> <span data-cp="F9" data-bytes="88 7B">ˆ{</span> <span data-cp="FA" data-bytes="88 79">ˆy</span> <span data-cp="FC" data-bytes="88 A2">ˆ¢</span> <span data-cp="100" data-bytes="88 56">ˆV</span> <span data-cp="101" data-bytes="88 67">ˆg</span> <span data-cp="112" data-bytes="88 5A">ˆZ</span> <span data-cp="113" data-bytes="88 6C">ˆl</span> <span data-cp="11A" data-bytes="88 5C">ˆ\</span> <span data-cp="11B" data-bytes="88 6E">ˆn</span> <span data-cp="12B" data-bytes="88 70">ˆp</span> <span data-cp="14C" data-bytes="88 5E">ˆ^</span> <span data-cp="14D" data-bytes="88 74">ˆt</span> <span data-cp="16B" data-bytes="88 78">ˆx</span> <span data-cp="1CD" data-bytes="88 58">ˆX</span> <span data-cp="1CE" data-bytes="88 69">ˆi</span> <span data-cp="1D0" data-bytes="88 72">ˆr</span> <span data-cp="1D1" data-bytes="88 60">ˆ`</span> <span data-cp="1D2" data-bytes="88 76">ˆv</span> <span data-cp="1D4" data-bytes="88 7A">ˆz</span> <span data-cp="1D6" data-bytes="88 7C">ˆ|</span> <span data-cp="1D8" data-bytes="88 7D">ˆ}</span> <span data-cp="1DA" data-bytes="88 7E">ˆ~</span> <span data-cp="1DC" data-bytes="88 A1">ˆ¡</span> <span data-cp="251" data-bytes="88 6B">ˆk</span> <span data-cp="261" data-bytes="88 A8">ˆ¨</span> <span data-cp="1EBE" data-bytes="88 63">ˆc</span> <span data-cp="1EBF" data-bytes="88 A4">ˆ¤</span> <span data-cp="1EC0" data-bytes="88 65">ˆe</span> <span data-cp="1EC1" data-bytes="88 A6">ˆ¦</span> <span data-cp="23DA" data-bytes="88 A9">ˆ©</span> <span data-cp="23DB" data-bytes="88 AA">ˆª</span> <span data-cp="31C0" data-bytes="88 40">ˆ@</span> <span data-cp="31C1" data-bytes="88 41">ˆA</span> <span data-cp="31C2" data-bytes="88 42">ˆB</span> <span data-cp="31C3" data-bytes="88 43">ˆC</span> <span data-cp="31C4" data-bytes="88 44">ˆD</span> <span data-cp="31C5" data-bytes="88 46">ˆF</span> <span data-cp="31C6" data-bytes="88 49">ˆI</span> <span data-cp="31C7" data-bytes="88 4A">ˆJ</span> <span data-cp="31C8" data-bytes="88 4D">ˆM</span> <span data-cp="31C9" data-bytes="88 4F">ˆO</span> <span data-cp="31CA" data-bytes="88 50">ˆP</span> <span data-cp="31CB" data-bytes="88 51">ˆQ</span> <span data-cp="31CC" data-bytes="88 52">ˆR</span> <span data-cp="31CD" data-bytes="88 54">ˆT</span> <span data-cp="31CE" data-bytes="88 55">ˆU</span> <span data-cp="3435" data-bytes="92 77">’w</span> <span data-cp="3440" data-bytes="96 DF">–ß</span> <span data-cp="344A" data-bytes="8C F4">Œô</span> <span data-cp="344C" data-bytes="89 D5">‰Õ</span> <span data-cp="3464" data-bytes="93 CD">“Í</span> <span data-cp="3473" data-bytes="9B DF">›ß</span> <span data-cp="347D" data-bytes="89 DA">‰Ú</span> <span data-cp="347E" data-bytes="8F 59">Y</span> <span data-cp="3493" data-bytes="89 DB">‰Û</span> <span data-cp="3496" data-bytes="8F 5D">]</span> <span data-cp="34A5" data-bytes="89 DC">‰Ü</span> <span data-cp="34AF" data-bytes="96 F7">–÷</span> <span data-cp="34BC" data-bytes="8A DA">ŠÚ</span> <span data-cp="34C1" data-bytes="8B DC">‹Ü</span> <span data-cp="34C8" data-bytes="97 DB">—Û</span> <span data-cp="34DF" data-bytes="9E 53">žS</span> <span data-cp="34E4" data-bytes="9D AA">ª</span> <span data-cp="34E6" data-bytes="87 BE">‡¾</span> <span data-cp="34FB" data-bytes="9B EA">›ê</span> <span data-cp="3506" data-bytes="8A 6E">Šn</span> <span data-cp="353E" data-bytes="8B C8">‹È</span> <span data-cp="3551" data-bytes="89 E8">‰è</span> <span data-cp="3553" data-bytes="89 EA">‰ê</span> <span data-cp="3559" data-bytes="8C 4B">ŒK</span> <span data-cp="356D" data-bytes="89 ED">‰í</span> <span data-cp="3570" data-bytes="94 DD">”Ý</span> <span data-cp="3572" data-bytes="89 EE">‰î</span> <span data-cp="3577" data-bytes="9E B4">ž´</span> <span data-cp="3578" data-bytes="8A D3">ŠÓ</span> <span data-cp="3584" data-bytes="92 DB">’Û</span> <span data-cp="3597" data-bytes="94 DB">”Û</span> <span data-cp="3598" data-bytes="89 F9">‰ù</span> <span data-cp="35A5" data-bytes="89 FB">‰û</span> <span data-cp="35AD" data-bytes="9E FC">žü</span> <span data-cp="35BF" data-bytes="89 FC">‰ü</span> <span data-cp="35C1" data-bytes="89 BF">‰¿</span> <span data-cp="35C5" data-bytes="89 FE">‰þ</span> <span data-cp="35C7" data-bytes="89 E6">‰æ</span> <span data-cp="35CA" data-bytes="9D 46">F</span> <span data-cp="35CE" data-bytes="9D EE">î</span> <span data-cp="35D2" data-bytes="A0 7E"> ~</span> <span data-cp="35D6" data-bytes="A0 68"> h</span> <span data-cp="35DB" data-bytes="98 E9">˜é</span> <span data-cp="35DD" data-bytes="8B 68">‹h</span> <span data-cp="35F1" data-bytes="8D FD">ý</span> <span data-cp="35F2" data-bytes="8B BE">‹¾</span> <span data-cp="35F3" data-bytes="9F D9">ŸÙ</span> <span data-cp="35FB" data-bytes="8A EB">Šë</span> <span data-cp="35FE" data-bytes="9F D7">Ÿ×</span> <span data-cp="3609" data-bytes="8B 6A">‹j</span> <span data-cp="3618" data-bytes="9C 5C">œ\</span> <span data-cp="361A" data-bytes="8B B1">‹±</span> <span data-cp="3625" data-bytes="87 70">‡p</span> <span data-cp="362D" data-bytes="9D F3">ó</span> <span data-cp="3635" data-bytes="A0 D0"> Ð</span> <span data-cp="363E" data-bytes="92 E9">’é</span> <span data-cp="3647" data-bytes="9A EC">šì</span> <span data-cp="3648" data-bytes="8F AB">«</span> <span data-cp="364E" data-bytes="8E 45">ŽE</span> <span data-cp="365F" data-bytes="9C 6F">œo</span> <span data-cp="3661" data-bytes="8D 5C">\</span> <span data-cp="367A" data-bytes="9E DE">žÞ</span> <span data-cp="3681" data-bytes="89 EF">‰ï</span> <span data-cp="369A" data-bytes="96 E9">–é</span> <span data-cp="36A5" data-bytes="9E BB">ž»</span> <span data-cp="36AA" data-bytes="94 DE">”Þ</span> <span data-cp="36AC" data-bytes="9E B8">ž¸</span> <span data-cp="36B0" data-bytes="97 BA">—º</span> <span data-cp="36B5" data-bytes="95 D6">•Ö</span> <span data-cp="36B9" data-bytes="9C BB">œ»</span> <span data-cp="36BC" data-bytes="97 DA">—Ú</span> <span data-cp="36C1" data-bytes="8F 45">E</span> <span data-cp="36C4" data-bytes="91 58">‘X</span> <span data-cp="36C7" data-bytes="98 56">˜V</span> <span data-cp="36C8" data-bytes="9B 4D">›M</span> <span data-cp="36D3" data-bytes="93 5B">“[</span> <span data-cp="36D4" data-bytes="95 C7">•Ç</span> <span data-cp="36D6" data-bytes="97 E7">—ç</span> <span data-cp="36DD" data-bytes="93 59">“Y</span> <span data-cp="36E1" data-bytes="91 F5">‘õ</span> <span data-cp="36E2" data-bytes="97 B8">—¸</span> <span data-cp="36F5" data-bytes="92 FA">’ú</span> <span data-cp="3701" data-bytes="93 57">“W</span> <span data-cp="3703" data-bytes="8B A6">‹¦</span> <span data-cp="370A" data-bytes="97 B0">—°</span> <span data-cp="371C" data-bytes="9C A1">œ¡</span> <span data-cp="3722" data-bytes="91 F2">‘ò</span> <span data-cp="3723" data-bytes="91 F9">‘ù</span> <span data-cp="3725" data-bytes="8F F1">ñ</span> <span data-cp="372C" data-bytes="97 45">—E</span> <span data-cp="372D" data-bytes="98 53">˜S</span> <span data-cp="3733" data-bytes="92 51">’Q</span> <span data-cp="373A" data-bytes="9D AD">­</span> <span data-cp="3762" data-bytes="9B C2">›Â</span> <span data-cp="376F" data-bytes="9A 7B">š{</span> <span data-cp="3797" data-bytes="8B 60">‹`</span> <span data-cp="37A0" data-bytes="93 4B">“K</span> <span data-cp="37B9" data-bytes="9A BD">š½</span> <span data-cp="37BE" data-bytes="91 B7">‘·</span> <span data-cp="37D6" data-bytes="8D 4B">K</span> <span data-cp="37F2" data-bytes="95 B4">•´</span> <span data-cp="37FB" data-bytes="9E F0">žð</span> <span data-cp="380F" data-bytes="8D 64">d</span> <span data-cp="3819" data-bytes="92 69">’i</span> <span data-cp="3820" data-bytes="8D 67">g</span> <span data-cp="3838" data-bytes="8D 68">h</span> <span data-cp="3863" data-bytes="93 EB">“ë</span> <span data-cp="3875" data-bytes="87 7A">‡z</span> <span data-cp="38C3" data-bytes="91 66">‘f</span> <span data-cp="38D1" data-bytes="93 DD">“Ý</span> <span data-cp="38D4" data-bytes="8D 52">R</span> <span data-cp="38FA" data-bytes="8B CC">‹Ì</span> <span data-cp="3908" data-bytes="8D 6D">m</span> <span data-cp="3914" data-bytes="8D 6E">n</span> <span data-cp="3927" data-bytes="96 A8">–¨</span> <span data-cp="393F" data-bytes="8D 6F">o</span> <span data-cp="394D" data-bytes="8D 70">p</span> <span data-cp="3978" data-bytes="8C F3">Œó</span> <span data-cp="3980" data-bytes="90 60">`</span> <span data-cp="3989" data-bytes="8D 74">t</span> <span data-cp="398A" data-bytes="97 C3">—Ã</span> <span data-cp="3992" data-bytes="8A D0">ŠÐ</span> <span data-cp="3999" data-bytes="92 74">’t</span> <span data-cp="399B" data-bytes="9B BE">›¾</span> <span data-cp="39A1" data-bytes="9C C8">œÈ</span> <span data-cp="39A4" data-bytes="9C BA">œº</span> <span data-cp="39B8" data-bytes="8D 78">x</span> <span data-cp="39DC" data-bytes="9E B9">ž¹</span> <span data-cp="39E2" data-bytes="95 5A">•Z</span> <span data-cp="39E5" data-bytes="91 B4">‘´</span> <span data-cp="39EC" data-bytes="8A 48">ŠH</span> <span data-cp="39F8" data-bytes="8D 7D">}</span> <span data-cp="39FB" data-bytes="8A 7D">Š}</span> <span data-cp="39FE" data-bytes="8A C2">ŠÂ</span> <span data-cp="3A03" data-bytes="8D A1">¡</span> <span data-cp="3A06" data-bytes="8A D1">ŠÑ</span> <span data-cp="3A18" data-bytes="8B 47">‹G</span> <span data-cp="3A29" data-bytes="93 A4">“¤</span> <span data-cp="3A2A" data-bytes="9E DA">žÚ</span> <span data-cp="3A34" data-bytes="8A 51">ŠQ</span> <span data-cp="3A4B" data-bytes="8D A6">¦</span> <span data-cp="3A52" data-bytes="9E C5">žÅ</span> <span data-cp="3A5C" data-bytes="A0 78"> x</span> <span data-cp="3A5E" data-bytes="94 B5">”µ</span> <span data-cp="3A67" data-bytes="8A 6B">Šk</span> <span data-cp="3A97" data-bytes="8D AB">«</span> <span data-cp="3ABD" data-bytes="8D AD">­</span> <span data-cp="3AE0" data-bytes="93 C1">“Á</span> <span data-cp="3AF0" data-bytes="90 6F">o</span> <span data-cp="3AF2" data-bytes="8D B0">°</span> <span data-cp="3AF5" data-bytes="87 A2">‡¢</span> <span data-cp="3AFB" data-bytes="94 7E">”~</span> <span data-cp="3B0E" data-bytes="90 FA">ú</span> <span data-cp="3B19" data-bytes="94 79">”y</span> <span data-cp="3B22" data-bytes="8D B2">²</span> <span data-cp="3B39" data-bytes="99 7B">™{</span> <span data-cp="3B42" data-bytes="8D B4">´</span> <span data-cp="3B58" data-bytes="8D B7">·</span> <span data-cp="3B60" data-bytes="91 B3">‘³</span> <span data-cp="3B71" data-bytes="8D BB">»</span> <span data-cp="3B72" data-bytes="8D BA">º</span> <span data-cp="3B7B" data-bytes="8D BC">¼</span> <span data-cp="3B7C" data-bytes="90 44">D</span> <span data-cp="3B95" data-bytes="87 4B">‡K</span> <span data-cp="3B96" data-bytes="93 E4">“ä</span> <span data-cp="3B99" data-bytes="93 E0">“à</span> <span data-cp="3BBC" data-bytes="8D C3">Ã</span> <span data-cp="3BBE" data-bytes="9B B8">›¸</span> <span data-cp="3BC4" data-bytes="93 E9">“é</span> <span data-cp="3BD7" data-bytes="93 F6">“ö</span> <span data-cp="3BDD" data-bytes="8D C5">Å</span> <span data-cp="3BEC" data-bytes="8D CA">Ê</span> <span data-cp="3BF2" data-bytes="8D CC">Ì</span> <span data-cp="3BF4" data-bytes="93 B5">“µ</span> <span data-cp="3C11" data-bytes="9C F8">œø</span> <span data-cp="3C15" data-bytes="92 52">’R</span> <span data-cp="3C18" data-bytes="A0 E8"> è</span> <span data-cp="3C54" data-bytes="9C A5">œ¥</span> <span data-cp="3C8B" data-bytes="8C 56">ŒV</span> <span data-cp="3CCB" data-bytes="8D D6">Ö</span> <span data-cp="3CCD" data-bytes="97 C0">—À</span> <span data-cp="3CD1" data-bytes="A0 DE"> Þ</span> <span data-cp="3CD6" data-bytes="97 D2">—Ò</span> <span data-cp="3CEF" data-bytes="8D DB">Û</span> <span data-cp="3D12" data-bytes="8C EA">Œê</span> <span data-cp="3D13" data-bytes="8E AF">Ž¯</span> <span data-cp="3D1D" data-bytes="91 B5">‘µ</span> <span data-cp="3D46" data-bytes="8D EB">ë</span> <span data-cp="3D4C" data-bytes="97 C6">—Æ</span> <span data-cp="3D51" data-bytes="90 FC">ü</span> <span data-cp="3D62" data-bytes="96 D6">–Ö</span> <span data-cp="3D69" data-bytes="97 C5">—Å</span> <span data-cp="3D6A" data-bytes="8D EF">ï</span> <span data-cp="3D6F" data-bytes="97 D7">—×</span> <span data-cp="3D75" data-bytes="8D F0">ð</span> <span data-cp="3D7D" data-bytes="96 A6">–¦</span> <span data-cp="3D88" data-bytes="8C DF">Œß</span> <span data-cp="3D8A" data-bytes="8D F3">ó</span> <span data-cp="3D8F" data-bytes="94 49">”I</span> <span data-cp="3D91" data-bytes="8D F5">õ</span> <span data-cp="3DA5" data-bytes="98 72">˜r</span> <span data-cp="3DAD" data-bytes="8E 6B">Žk</span> <span data-cp="3DBF" data-bytes="8F 50">P</span> <span data-cp="3DC6" data-bytes="9D CC">Ì</span> <span data-cp="3DC9" data-bytes="8C 44">ŒD</span> <span data-cp="3DCC" data-bytes="99 6E">™n</span> <span data-cp="3DCD" data-bytes="94 A1">”¡</span> <span data-cp="3DD3" data-bytes="8F 63">c</span> <span data-cp="3DDB" data-bytes="A0 DA"> Ú</span> <span data-cp="3DE7" data-bytes="92 53">’S</span> <span data-cp="3DEB" data-bytes="9D B5">µ</span> <span data-cp="3DF3" data-bytes="98 79">˜y</span> <span data-cp="3DF4" data-bytes="87 6A">‡j</span> <span data-cp="3DF7" data-bytes="9D 5D">]</span> <span data-cp="3DFC" data-bytes="8D 63">c</span> <span data-cp="3DFD" data-bytes="96 69">–i</span> <span data-cp="3E06" data-bytes="9F 70">Ÿp</span> <span data-cp="3E43" data-bytes="8A C7">ŠÇ</span> <span data-cp="3E48" data-bytes="89 D7">‰×</span> <span data-cp="3E74" data-bytes="9E DD">žÝ</span> <span data-cp="3EA9" data-bytes="98 BC">˜¼</span> <span data-cp="3EAD" data-bytes="95 B0">•°</span> <span data-cp="3EB1" data-bytes="94 64">”d</span> <span data-cp="3EB8" data-bytes="93 6F">“o</span> <span data-cp="3EBF" data-bytes="94 B9">”¹</span> <span data-cp="3EC2" data-bytes="95 EC">•ì</span> <span data-cp="3EC7" data-bytes="91 EE">‘î</span> <span data-cp="3ECA" data-bytes="98 C3">˜Ã</span> <span data-cp="3ECC" data-bytes="95 F6">•ö</span> <span data-cp="3ED0" data-bytes="8F FD">ý</span> <span data-cp="3ED1" data-bytes="98 C5">˜Å</span> <span data-cp="3ED6" data-bytes="97 66">—f</span> <span data-cp="3EDA" data-bytes="97 DD">—Ý</span> <span data-cp="3EDB" data-bytes="8C AA">Œª</span> <span data-cp="3EDE" data-bytes="92 D2">’Ò</span> <span data-cp="3EE1" data-bytes="97 61">—a</span> <span data-cp="3EE2" data-bytes="98 CB">˜Ë</span> <span data-cp="3EE7" data-bytes="95 F0">•ð</span> <span data-cp="3EE9" data-bytes="97 5D">—]</span> <span data-cp="3EEB" data-bytes="91 E3">‘ã</span> <span data-cp="3EEC" data-bytes="87 7E">‡~</span> <span data-cp="3EF0" data-bytes="98 CC">˜Ì</span> <span data-cp="3EF3" data-bytes="94 69">”i</span> <span data-cp="3EF4" data-bytes="98 CD">˜Í</span> <span data-cp="3EFA" data-bytes="98 CE">˜Î</span> <span data-cp="3EFC" data-bytes="95 FC">•ü</span> <span data-cp="3EFF" data-bytes="94 A3">”£</span> <span data-cp="3F00" data-bytes="96 62">–b</span> <span data-cp="3F06" data-bytes="94 63">”c</span> <span data-cp="3F07" data-bytes="8D 47">G</span> <span data-cp="3F0E" data-bytes="98 D0">˜Ð</span> <span data-cp="3F53" data-bytes="98 D1">˜Ñ</span> <span data-cp="3F58" data-bytes="94 75">”u</span> <span data-cp="3F63" data-bytes="94 72">”r</span> <span data-cp="3F7C" data-bytes="98 D6">˜Ö</span> <span data-cp="3F93" data-bytes="8A F0">Šð</span> <span data-cp="3FC0" data-bytes="98 D9">˜Ù</span> <span data-cp="3FC8" data-bytes="8D 5A">Z</span> <span data-cp="3FD7" data-bytes="98 DB">˜Û</span> <span data-cp="3FDC" data-bytes="98 DD">˜Ý</span> <span data-cp="3FE5" data-bytes="98 A8">˜¨</span> <span data-cp="3FED" data-bytes="8A 6D">Šm</span> <span data-cp="3FF9" data-bytes="8A FB">Šû</span> <span data-cp="3FFA" data-bytes="8A AE">Š®</span> <span data-cp="4009" data-bytes="8C 5D">Œ]</span> <span data-cp="401D" data-bytes="98 E4">˜ä</span> <span data-cp="4039" data-bytes="98 E6">˜æ</span> <span data-cp="4045" data-bytes="98 E8">˜è</span> <span data-cp="4053" data-bytes="8A 4D">ŠM</span> <span data-cp="4057" data-bytes="92 57">’W</span> <span data-cp="4062" data-bytes="95 DF">•ß</span> <span data-cp="4065" data-bytes="A0 AC"> ¬</span> <span data-cp="406A" data-bytes="98 EB">˜ë</span> <span data-cp="406F" data-bytes="98 EC">˜ì</span> <span data-cp="4071" data-bytes="8C C3">ŒÃ</span> <span data-cp="40A8" data-bytes="98 F4">˜ô</span> <span data-cp="40B4" data-bytes="87 D9">‡Ù</span> <span data-cp="40BB" data-bytes="8A B8">Š¸</span> <span data-cp="40BF" data-bytes="9E E7">žç</span> <span data-cp="40C8" data-bytes="94 BC">”¼</span> <span data-cp="40DF" data-bytes="9C C6">œÆ</span> <span data-cp="40F8" data-bytes="8D 4A">J</span> <span data-cp="40FA" data-bytes="9E 7E">ž~</span> <span data-cp="4102" data-bytes="8D 44">D</span> <span data-cp="4103" data-bytes="98 FE">˜þ</span> <span data-cp="4109" data-bytes="99 40">™@</span> <span data-cp="410E" data-bytes="94 C9">”É</span> <span data-cp="4131" data-bytes="87 C6">‡Æ</span> <span data-cp="4132" data-bytes="94 D3">”Ó</span> <span data-cp="4167" data-bytes="99 46">™F</span> <span data-cp="416C" data-bytes="90 C0">À</span> <span data-cp="416E" data-bytes="94 D1">”Ñ</span> <span data-cp="417C" data-bytes="8D 4E">N</span> <span data-cp="417F" data-bytes="95 73">•s</span> <span data-cp="4181" data-bytes="87 CE">‡Î</span> <span data-cp="4190" data-bytes="93 C2">“Â</span> <span data-cp="41B2" data-bytes="99 48">™H</span> <span data-cp="41C4" data-bytes="99 4B">™K</span> <span data-cp="41CA" data-bytes="8E 55">ŽU</span> <span data-cp="41CF" data-bytes="99 4E">™N</span> <span data-cp="41DB" data-bytes="8E FE">Žþ</span> <span data-cp="41ED" data-bytes="8D 5F">_</span> <span data-cp="41EF" data-bytes="8E 59">ŽY</span> <span data-cp="41F9" data-bytes="94 EC">”ì</span> <span data-cp="4211" data-bytes="94 EF">”ï</span> <span data-cp="4223" data-bytes="8C 60">Œ`</span> <span data-cp="4240" data-bytes="8F 74">t</span> <span data-cp="4260" data-bytes="99 55">™U</span> <span data-cp="426A" data-bytes="95 44">•D</span> <span data-cp="4276" data-bytes="8C CB">ŒË</span> <span data-cp="427A" data-bytes="99 56">™V</span> <span data-cp="428C" data-bytes="99 59">™Y</span> <span data-cp="4294" data-bytes="99 5B">™[</span> <span data-cp="42A2" data-bytes="8C C4">ŒÄ</span> <span data-cp="42B9" data-bytes="90 B7">·</span> <span data-cp="42BC" data-bytes="97 43">—C</span> <span data-cp="42F4" data-bytes="95 CD">•Í</span> <span data-cp="42FB" data-bytes="97 C9">—É</span> <span data-cp="430A" data-bytes="87 AA">‡ª</span> <span data-cp="432B" data-bytes="8E B9">Ž¹</span> <span data-cp="436E" data-bytes="95 C6">•Æ</span> <span data-cp="4397" data-bytes="99 67">™g</span> <span data-cp="439A" data-bytes="8C E3">Œã</span> <span data-cp="43BA" data-bytes="8A B9">Š¹</span> <span data-cp="43C1" data-bytes="8D FC">ü</span> <span data-cp="43D9" data-bytes="8A 76">Šv</span> <span data-cp="43DF" data-bytes="9D 51">Q</span> <span data-cp="43ED" data-bytes="99 73">™s</span> <span data-cp="43F0" data-bytes="87 40">‡@</span> <span data-cp="43F2" data-bytes="9D 4F">O</span> <span data-cp="4401" data-bytes="99 7A">™z</span> <span data-cp="4402" data-bytes="95 64">•d</span> <span data-cp="4413" data-bytes="99 A1">™¡</span> <span data-cp="4425" data-bytes="99 A5">™¥</span> <span data-cp="442D" data-bytes="99 A7">™§</span> <span data-cp="447A" data-bytes="8E ED">Ží</span> <span data-cp="448F" data-bytes="99 AD">™­</span> <span data-cp="449F" data-bytes="94 6E">”n</span> <span data-cp="44A0" data-bytes="8F 70">p</span> <span data-cp="44B0" data-bytes="99 B3">™³</span> <span data-cp="44B7" data-bytes="A0 53"> S</span> <span data-cp="44BD" data-bytes="8D 5E">^</span> <span data-cp="44C0" data-bytes="96 5C">–\</span> <span data-cp="44C3" data-bytes="8C E0">Œà</span> <span data-cp="44CE" data-bytes="97 FE">—þ</span> <span data-cp="44DD" data-bytes="92 BD">’½</span> <span data-cp="44DE" data-bytes="8D 5D">]</span> <span data-cp="44DF" data-bytes="97 FD">—ý</span> <span data-cp="44E1" data-bytes="87 DB">‡Û</span> <span data-cp="44E4" data-bytes="8F 64">d</span> <span data-cp="44EA" data-bytes="95 62">•b</span> <span data-cp="44EB" data-bytes="97 CD">—Í</span> <span data-cp="44EC" data-bytes="9E 64">žd</span> <span data-cp="44F4" data-bytes="92 4C">’L</span> <span data-cp="4503" data-bytes="8E C9">ŽÉ</span> <span data-cp="4504" data-bytes="99 BC">™¼</span> <span data-cp="4509" data-bytes="9D A5">¥</span> <span data-cp="450B" data-bytes="8F 54">T</span> <span data-cp="4516" data-bytes="8F 7C">|</span> <span data-cp="451B" data-bytes="8D 55">U</span> <span data-cp="451D" data-bytes="8E A2">Ž¢</span> <span data-cp="4527" data-bytes="8F 7A">z</span> <span data-cp="452E" data-bytes="97 AE">—®</span> <span data-cp="4533" data-bytes="96 C8">–È</span> <span data-cp="4536" data-bytes="8C E4">Œä</span> <span data-cp="453B" data-bytes="99 C3">™Ã</span> <span data-cp="453D" data-bytes="90 D6">Ö</span> <span data-cp="453F" data-bytes="9C BE">œ¾</span> <span data-cp="4543" data-bytes="8F 76">v</span> <span data-cp="4551" data-bytes="94 70">”p</span> <span data-cp="4558" data-bytes="8C EF">Œï</span> <span data-cp="455C" data-bytes="8E C7">ŽÇ</span> <span data-cp="4561" data-bytes="8D 54">T</span> <span data-cp="4562" data-bytes="A0 F9"> ù</span> <span data-cp="456A" data-bytes="8F A9">©</span> <span data-cp="456D" data-bytes="8D 51">Q</span> <span data-cp="4577" data-bytes="99 C7">™Ç</span> <span data-cp="4578" data-bytes="87 44">‡D</span> <span data-cp="4585" data-bytes="90 D7">×</span> <span data-cp="45A6" data-bytes="87 43">‡C</span> <span data-cp="45B3" data-bytes="87 47">‡G</span> <span data-cp="45DA" data-bytes="87 58">‡X</span> <span data-cp="45E9" data-bytes="9E DF">žß</span> <span data-cp="45EA" data-bytes="8D 59">Y</span> <span data-cp="4603" data-bytes="87 42">‡B</span> <span data-cp="4606" data-bytes="99 CE">™Î</span> <span data-cp="460F" data-bytes="8F BA">º</span> <span data-cp="4615" data-bytes="8F EB">ë</span> <span data-cp="4617" data-bytes="99 CF">™Ï</span> <span data-cp="465B" data-bytes="8F C2">Â</span> <span data-cp="467A" data-bytes="92 C9">’É</span> <span data-cp="4680" data-bytes="97 DC">—Ü</span> <span data-cp="46A1" data-bytes="87 5D">‡]</span> <span data-cp="46AE" data-bytes="87 CC">‡Ì</span> <span data-cp="46BB" data-bytes="8D 45">E</span> <span data-cp="46CF" data-bytes="95 B3">•³</span> <span data-cp="46D0" data-bytes="9C 79">œy</span> <span data-cp="46F5" data-bytes="95 B2">•²</span> <span data-cp="46F7" data-bytes="8D 4C">L</span> <span data-cp="4713" data-bytes="8F DB">Û</span> <span data-cp="4718" data-bytes="9B E3">›ã</span> <span data-cp="4736" data-bytes="87 4C">‡L</span> <span data-cp="4744" data-bytes="87 4D">‡M</span> <span data-cp="474E" data-bytes="9E 7A">žz</span> <span data-cp="474F" data-bytes="87 57">‡W</span> <span data-cp="477C" data-bytes="9B EE">›î</span> <span data-cp="4798" data-bytes="99 DE">™Þ</span> <span data-cp="47D5" data-bytes="8A 52">ŠR</span> <span data-cp="47ED" data-bytes="99 E1">™á</span> <span data-cp="47F4" data-bytes="8A 67">Šg</span> <span data-cp="4800" data-bytes="8B B5">‹µ</span> <span data-cp="480B" data-bytes="8A AC">Š¬</span> <span data-cp="4837" data-bytes="99 E9">™é</span> <span data-cp="4871" data-bytes="97 DE">—Þ</span> <span data-cp="489B" data-bytes="95 D1">•Ñ</span> <span data-cp="48AD" data-bytes="99 F5">™õ</span> <span data-cp="48D0" data-bytes="9B A9">›©</span> <span data-cp="48F3" data-bytes="9E A4">ž¤</span> <span data-cp="48FA" data-bytes="9D 49">I</span> <span data-cp="4906" data-bytes="95 DB">•Û</span> <span data-cp="4911" data-bytes="89 C5">‰Å</span> <span data-cp="491E" data-bytes="99 F8">™ø</span> <span data-cp="4925" data-bytes="96 64">–d</span> <span data-cp="492A" data-bytes="90 55">U</span> <span data-cp="492D" data-bytes="96 D4">–Ô</span> <span data-cp="492F" data-bytes="87 C4">‡Ä</span> <span data-cp="4930" data-bytes="87 AE">‡®</span> <span data-cp="4935" data-bytes="97 7C">—|</span> <span data-cp="493C" data-bytes="96 4D">–M</span> <span data-cp="493E" data-bytes="97 E1">—á</span> <span data-cp="4945" data-bytes="9A 48">šH</span> <span data-cp="4951" data-bytes="9A 49">šI</span> <span data-cp="4965" data-bytes="90 AA">ª</span> <span data-cp="496A" data-bytes="9A 50">šP</span> <span data-cp="4972" data-bytes="93 47">“G</span> <span data-cp="4989" data-bytes="8E D8">ŽØ</span> <span data-cp="49A1" data-bytes="90 C9">É</span> <span data-cp="49A7" data-bytes="9A 55">šU</span> <span data-cp="49DF" data-bytes="90 BC">¼</span> <span data-cp="49E5" data-bytes="9A 58">šX</span> <span data-cp="49E7" data-bytes="8B B8">‹¸</span> <span data-cp="4A0F" data-bytes="90 D5">Õ</span> <span data-cp="4A1D" data-bytes="96 41">–A</span> <span data-cp="4A24" data-bytes="9A 5A">šZ</span> <span data-cp="4A35" data-bytes="9A 5C">š\</span> <span data-cp="4A96" data-bytes="97 C2">—Â</span> <span data-cp="4AA4" data-bytes="87 5C">‡\</span> <span data-cp="4AB4" data-bytes="8A BB">Š»</span> <span data-cp="4AB8" data-bytes="9B AA">›ª</span> <span data-cp="4AD1" data-bytes="90 F5">õ</span> <span data-cp="4AE4" data-bytes="9A 60">š`</span> <span data-cp="4AFF" data-bytes="91 45">‘E</span> <span data-cp="4B10" data-bytes="8C 58">ŒX</span> <span data-cp="4B19" data-bytes="9A 63">šc</span> <span data-cp="4B20" data-bytes="8C 49">ŒI</span> <span data-cp="4B2C" data-bytes="8B B6">‹¶</span> <span data-cp="4B6F" data-bytes="96 6B">–k</span> <span data-cp="4B70" data-bytes="9A 6E">šn</span> <span data-cp="4B72" data-bytes="91 4F">‘O</span> <span data-cp="4B7B" data-bytes="97 46">—F</span> <span data-cp="4B7E" data-bytes="A0 E6"> æ</span> <span data-cp="4B8E" data-bytes="92 D7">’×</span> <span data-cp="4B90" data-bytes="96 75">–u</span> <span data-cp="4B93" data-bytes="93 D4">“Ô</span> <span data-cp="4B96" data-bytes="91 BB">‘»</span> <span data-cp="4B97" data-bytes="96 79">–y</span> <span data-cp="4B9D" data-bytes="9A 70">šp</span> <span data-cp="4BBD" data-bytes="96 78">–x</span> <span data-cp="4BBE" data-bytes="91 CD">‘Í</span> <span data-cp="4BC0" data-bytes="9C 4A">œJ</span> <span data-cp="4C04" data-bytes="A0 6F"> o</span> <span data-cp="4C07" data-bytes="A0 6A"> j</span> <span data-cp="4C0E" data-bytes="91 5F">‘_</span> <span data-cp="4C32" data-bytes="87 41">‡A</span> <span data-cp="4C3B" data-bytes="9F A5">Ÿ¥</span> <span data-cp="4C3E" data-bytes="89 BA">‰º</span> <span data-cp="4C40" data-bytes="87 4F">‡O</span> <span data-cp="4C47" data-bytes="87 4E">‡N</span> <span data-cp="4C57" data-bytes="87 55">‡U</span> <span data-cp="4C5B" data-bytes="9E CD">žÍ</span> <span data-cp="4C6D" data-bytes="9A 79">šy</span> <span data-cp="4C77" data-bytes="8C F2">Œò</span> <span data-cp="4C7B" data-bytes="8D 57">W</span> <span data-cp="4C7D" data-bytes="9D CE">Î</span> <span data-cp="4C81" data-bytes="8C D2">ŒÒ</span> <span data-cp="4C85" data-bytes="87 59">‡Y</span> <span data-cp="4CA4" data-bytes="9D 73">s</span> <span data-cp="4CAE" data-bytes="96 B9">–¹</span> <span data-cp="4CB0" data-bytes="96 BC">–¼</span> <span data-cp="4CB7" data-bytes="9C D1">œÑ</span> <span data-cp="4CCD" data-bytes="89 B7">‰·</span> <span data-cp="4CE1" data-bytes="9E EE">žî</span> <span data-cp="4CE2" data-bytes="87 49">‡I</span> <span data-cp="4D07" data-bytes="87 5B">‡[</span> <span data-cp="4D09" data-bytes="9E C9">žÉ</span> <span data-cp="4D34" data-bytes="91 AE">‘®</span> <span data-cp="4D76" data-bytes="8D 58">X</span> <span data-cp="4D77" data-bytes="87 46">‡F</span> <span data-cp="4D89" data-bytes="8D 56">V</span> <span data-cp="4D91" data-bytes="9D 78">x</span> <span data-cp="4D9C" data-bytes="9D 7B">{</span> <span data-cp="4E04" data-bytes="9E B3">ž³</span> <span data-cp="4E1A" data-bytes="9E B2">ž²</span> <span data-cp="4E1C" data-bytes="9D D6">Ö</span> <span data-cp="4E21" data-bytes="99 4F">™O</span> <span data-cp="4E24" data-bytes="89 CE">‰Î</span> <span data-cp="4E28" data-bytes="8B C0">‹À</span> <span data-cp="4E2A" data-bytes="9F C4">ŸÄ</span> <span data-cp="4E2C" data-bytes="8B D4">‹Ô</span> <span data-cp="4E2F" data-bytes="8C 72">Œr</span> <span data-cp="4E37" data-bytes="8B F9">‹ù</span> <span data-cp="4E3D" data-bytes="89 46">‰F</span> <span data-cp="4E5B" data-bytes="8B C6">‹Æ</span> <span data-cp="4E6A" data-bytes="9C 57">œW</span> <span data-cp="4E78" data-bytes="9A FB">šû</span> <span data-cp="4E80" data-bytes="89 D0">‰Ð</span> <span data-cp="4E81" data-bytes="89 CF">‰Ï</span> <span data-cp="4E87" data-bytes="89 D1">‰Ñ</span> <span data-cp="4E89" data-bytes="89 E2">‰â</span> <span data-cp="4E98" data-bytes="92 7E">’~</span> <span data-cp="4E9A" data-bytes="9D BA">º</span> <span data-cp="4EA3" data-bytes="8C 6F">Œo</span> <span data-cp="4EBB" data-bytes="8B C7">‹Ç</span> <span data-cp="4EBC" data-bytes="92 6B">’k</span> <span data-cp="4EBF" data-bytes="89 D2">‰Ò</span> <span data-cp="4ECE" data-bytes="9F CF">ŸÏ</span> <span data-cp="4EEA" data-bytes="9D A9">©</span> <span data-cp="4EEB" data-bytes="89 D3">‰Ó</span> <span data-cp="4EEE" data-bytes="99 E2">™â</span> <span data-cp="4EF8" data-bytes="92 67">’g</span> <span data-cp="4F03" data-bytes="92 A4">’¤</span> <span data-cp="4F17" data-bytes="8C 73">Œs</span> <span data-cp="4F1A" data-bytes="89 4E">‰N</span> <span data-cp="4F28" data-bytes="89 4F">‰O</span> <span data-cp="4F29" data-bytes="92 78">’x</span> <span data-cp="4F32" data-bytes="91 B6">‘¶</span> <span data-cp="4F37" data-bytes="89 D4">‰Ô</span> <span data-cp="4F39" data-bytes="9F D2">ŸÒ</span> <span data-cp="4F42" data-bytes="92 A7">’§</span> <span data-cp="4F45" data-bytes="95 A2">•¢</span> <span data-cp="4F4B" data-bytes="92 6E">’n</span> <span data-cp="4F72" data-bytes="96 EA">–ê</span> <span data-cp="4F8A" data-bytes="92 6F">’o</span> <span data-cp="4FA2" data-bytes="92 A3">’£</span> <span data-cp="4FA8" data-bytes="89 50">‰P</span> <span data-cp="4FB0" data-bytes="98 66">˜f</span> <span data-cp="4FB4" data-bytes="8C F8">Œø</span> <span data-cp="4FBB" data-bytes="9C 53">œS</span> <span data-cp="4FBD" data-bytes="89 D6">‰Ö</span> <span data-cp="4FC8" data-bytes="98 B2">˜²</span> <span data-cp="4FCC" data-bytes="92 AB">’«</span> <span data-cp="4FE4" data-bytes="96 DE">–Þ</span> <span data-cp="4FE5" data-bytes="92 AC">’¬</span> <span data-cp="4FF0" data-bytes="8C 70">Œp</span> <span data-cp="4FF2" data-bytes="9F 6E">Ÿn</span> <span data-cp="4FF9" data-bytes="8E F2">Žò</span> <span data-cp="4FFD" data-bytes="9F 6C">Ÿl</span> <span data-cp="5003" data-bytes="89 D8">‰Ø</span> <span data-cp="502E" data-bytes="92 A8">’¨</span> <span data-cp="5034" data-bytes="91 63">‘c</span> <span data-cp="503B" data-bytes="8C 40">Œ@</span> <span data-cp="5056" data-bytes="9F 73">Ÿs</span> <span data-cp="5058" data-bytes="92 AD">’­</span> <span data-cp="5066" data-bytes="9B E9">›é</span> <span data-cp="506C" data-bytes="92 A9">’©</span> <span data-cp="5081" data-bytes="92 AA">’ª</span> <span data-cp="5088" data-bytes="89 D9">‰Ù</span> <span data-cp="50A6" data-bytes="9F A8">Ÿ¨</span> <span data-cp="50BC" data-bytes="8C 71">Œq</span> <span data-cp="50CD" data-bytes="92 A1">’¡</span> <span data-cp="50D0" data-bytes="90 E3">ã</span> <span data-cp="50D9" data-bytes="A0 A6"> ¦</span> <span data-cp="50DF" data-bytes="94 AB">”«</span> <span data-cp="50ED" data-bytes="9F CB">ŸË</span> <span data-cp="50F4" data-bytes="97 C4">—Ä</span> <span data-cp="50FC" data-bytes="92 AE">’®</span> <span data-cp="510D" data-bytes="92 A2">’¢</span> <span data-cp="512B" data-bytes="92 68">’h</span> <span data-cp="5156" data-bytes="89 51">‰Q</span> <span data-cp="5159" data-bytes="92 AF">’¯</span> <span data-cp="515B" data-bytes="92 B0">’°</span> <span data-cp="515D" data-bytes="92 B1">’±</span> <span data-cp="515E" data-bytes="92 B2">’²</span> <span data-cp="5174" data-bytes="89 52">‰R</span> <span data-cp="5179" data-bytes="94 5A">”Z</span> <span data-cp="5186" data-bytes="89 DD">‰Ý</span> <span data-cp="519A" data-bytes="9E 52">žR</span> <span data-cp="519C" data-bytes="89 53">‰S</span> <span data-cp="51A7" data-bytes="9E 55">žU</span> <span data-cp="51A8" data-bytes="92 BA">’º</span> <span data-cp="51AE" data-bytes="8C 5B">Œ[</span> <span data-cp="51B4" data-bytes="9A 68">šh</span> <span data-cp="51C3" data-bytes="92 BB">’»</span> <span data-cp="51D2" data-bytes="9B B4">›´</span> <span data-cp="51DB" data-bytes="89 DF">‰ß</span> <span data-cp="51E4" data-bytes="89 54">‰T</span> <span data-cp="51FC" data-bytes="89 E0">‰à</span> <span data-cp="51FE" data-bytes="9F 4F">ŸO</span> <span data-cp="5205" data-bytes="89 E1">‰á</span> <span data-cp="521F" data-bytes="9F CD">ŸÍ</span> <span data-cp="5220" data-bytes="A0 E7"> ç</span> <span data-cp="5227" data-bytes="89 A6">‰¦</span> <span data-cp="5234" data-bytes="9E FA">žú</span> <span data-cp="524F" data-bytes="87 BC">‡¼</span> <span data-cp="5259" data-bytes="92 C4">’Ä</span> <span data-cp="5260" data-bytes="9F 6F">Ÿo</span> <span data-cp="5268" data-bytes="8B B0">‹°</span> <span data-cp="5273" data-bytes="9F AC">Ÿ¬</span> <span data-cp="5279" data-bytes="89 E3">‰ã</span> <span data-cp="528F" data-bytes="9B D3">›Ó</span> <span data-cp="5290" data-bytes="89 E4">‰ä</span> <span data-cp="529A" data-bytes="9F D5">ŸÕ</span> <span data-cp="52A1" data-bytes="89 55">‰U</span> <span data-cp="52A4" data-bytes="92 C5">’Å</span> <span data-cp="52A8" data-bytes="89 56">‰V</span> <span data-cp="52CC" data-bytes="9E DC">žÜ</span> <span data-cp="52D1" data-bytes="9F 71">Ÿq</span> <span data-cp="52E1" data-bytes="92 C7">’Ç</span> <span data-cp="5301" data-bytes="9A 4C">šL</span> <span data-cp="5324" data-bytes="8C 68">Œh</span> <span data-cp="5327" data-bytes="89 E5">‰å</span> <span data-cp="532C" data-bytes="9F 7D">Ÿ}</span> <span data-cp="5332" data-bytes="A0 A9"> ©</span> <span data-cp="533B" data-bytes="89 57">‰W</span> <span data-cp="534E" data-bytes="89 58">‰X</span> <span data-cp="535D" data-bytes="8B E3">‹ã</span> <span data-cp="535F" data-bytes="8B 61">‹a</span> <span data-cp="5364" data-bytes="9A F1">šñ</span> <span data-cp="5367" data-bytes="9E B7">ž·</span> <span data-cp="537D" data-bytes="9E BA">žº</span> <span data-cp="53A2" data-bytes="9C E0">œà</span> <span data-cp="53A9" data-bytes="89 E7">‰ç</span> <span data-cp="53AA" data-bytes="A0 7A"> z</span> <span data-cp="53B0" data-bytes="89 E9">‰é</span> <span data-cp="53C2" data-bytes="89 EB">‰ë</span> <span data-cp="53CC" data-bytes="90 C8">È</span> <span data-cp="53D0" data-bytes="92 DA">’Ú</span> <span data-cp="53D1" data-bytes="89 59">‰Y</span> <span data-cp="53D2" data-bytes="9C F5">œõ</span> <span data-cp="53D8" data-bytes="89 5A">‰Z</span> <span data-cp="53DA" data-bytes="9F A2">Ÿ¢</span> <span data-cp="53F7" data-bytes="8F AD">­</span> <span data-cp="5414" data-bytes="96 EF">–ï</span> <span data-cp="5416" data-bytes="9D EC">ì</span> <span data-cp="541A" data-bytes="9D CA">Ê</span> <span data-cp="5423" data-bytes="89 EC">‰ì</span> <span data-cp="5432" data-bytes="9D E2">â</span> <span data-cp="5434" data-bytes="8C 75">Œu</span> <span data-cp="544B" data-bytes="9E C0">žÀ</span> <span data-cp="544C" data-bytes="87 C5">‡Å</span> <span data-cp="544D" data-bytes="9E 56">žV</span> <span data-cp="5469" data-bytes="9F 79">Ÿy</span> <span data-cp="546A" data-bytes="9A C7">šÇ</span> <span data-cp="5485" data-bytes="98 A1">˜¡</span> <span data-cp="5493" data-bytes="89 F0">‰ð</span> <span data-cp="5494" data-bytes="9E 47">žG</span> <span data-cp="5497" data-bytes="9D F7">÷</span> <span data-cp="549C" data-bytes="9F D3">ŸÓ</span> <span data-cp="549E" data-bytes="9A CA">šÊ</span> <span data-cp="54A3" data-bytes="89 F1">‰ñ</span> <span data-cp="54B2" data-bytes="8E 5A">ŽZ</span> <span data-cp="54B4" data-bytes="89 F2">‰ò</span> <span data-cp="54B9" data-bytes="89 F3">‰ó</span> <span data-cp="54CB" data-bytes="92 5D">’]</span> <span data-cp="54CC" data-bytes="8B 51">‹Q</span> <span data-cp="54CD" data-bytes="92 E0">’à</span> <span data-cp="54D0" data-bytes="89 F4">‰ô</span> <span data-cp="54DA" data-bytes="9F D4">ŸÔ</span> <span data-cp="54E3" data-bytes="8A 79">Šy</span> <span data-cp="54EF" data-bytes="89 F5">‰õ</span> <span data-cp="5502" data-bytes="97 A7">—§</span> <span data-cp="550D" data-bytes="93 BA">“º</span> <span data-cp="5513" data-bytes="9E 58">žX</span> <span data-cp="5518" data-bytes="89 F6">‰ö</span> <span data-cp="551E" data-bytes="9E 57">žW</span> <span data-cp="5523" data-bytes="89 F7">‰÷</span> <span data-cp="5525" data-bytes="8A 41">ŠA</span> <span data-cp="5528" data-bytes="89 F8">‰ø</span> <span data-cp="553F" data-bytes="89 FA">‰ú</span> <span data-cp="5569" data-bytes="9E 4E">žN</span> <span data-cp="556B" data-bytes="94 DC">”Ü</span> <span data-cp="5571" data-bytes="95 DA">•Ú</span> <span data-cp="5572" data-bytes="9D F8">ø</span> <span data-cp="5573" data-bytes="9F 6A">Ÿj</span> <span data-cp="5579" data-bytes="8A B7">Š·</span> <span data-cp="5590" data-bytes="8A 46">ŠF</span> <span data-cp="55B0" data-bytes="91 48">‘H</span> <span data-cp="55B4" data-bytes="92 DE">’Þ</span> <span data-cp="55B9" data-bytes="8B 53">‹S</span> <span data-cp="55BA" data-bytes="9D F6">ö</span> <span data-cp="55BC" data-bytes="9B DA">›Ú</span> <span data-cp="55C1" data-bytes="9D 7E">~</span> <span data-cp="55D7" data-bytes="89 FD">‰ý</span> <span data-cp="55D8" data-bytes="99 E4">™ä</span> <span data-cp="55DE" data-bytes="9E 43">žC</span> <span data-cp="55EA" data-bytes="9D E9">é</span> <span data-cp="55EC" data-bytes="8F 52">R</span> <span data-cp="55F0" data-bytes="9D F5">õ</span> <span data-cp="55F1" data-bytes="9D F0">ð</span> <span data-cp="55F5" data-bytes="99 E7">™ç</span> <span data-cp="55FB" data-bytes="8B BD">‹½</span> <span data-cp="5605" data-bytes="9D EF">ï</span> <span data-cp="5611" data-bytes="9F B7">Ÿ·</span> <span data-cp="561E" data-bytes="9D D0">Ð</span> <span data-cp="5620" data-bytes="9F EB">Ÿë</span> <span data-cp="5621" data-bytes="8D A9">©</span> <span data-cp="5622" data-bytes="9D CF">Ï</span> <span data-cp="5623" data-bytes="98 E1">˜á</span> <span data-cp="5625" data-bytes="9D E5">å</span> <span data-cp="562D" data-bytes="9D C8">È</span> <span data-cp="5643" data-bytes="9D EB">ë</span> <span data-cp="5650" data-bytes="9A A2">š¢</span> <span data-cp="5652" data-bytes="8A D6">ŠÖ</span> <span data-cp="5654" data-bytes="9A 5F">š_</span> <span data-cp="565D" data-bytes="9E F5">žõ</span> <span data-cp="5661" data-bytes="8F B7">·</span> <span data-cp="567A" data-bytes="9A D2">šÒ</span> <span data-cp="567B" data-bytes="9E 6A">žj</span> <span data-cp="567C" data-bytes="9E E8">žè</span> <span data-cp="5689" data-bytes="8B BF">‹¿</span> <span data-cp="568A" data-bytes="91 C2">‘Â</span> <span data-cp="568B" data-bytes="9D 62">b</span> <span data-cp="5692" data-bytes="92 60">’`</span> <span data-cp="569E" data-bytes="92 5E">’^</span> <span data-cp="569F" data-bytes="91 C1">‘Á</span> <span data-cp="56A1" data-bytes="8A C5">ŠÅ</span> <span data-cp="56A4" data-bytes="97 A3">—£</span> <span data-cp="56AF" data-bytes="8B 6C">‹l</span> <span data-cp="56B1" data-bytes="8D 7E">~</span> <span data-cp="56B9" data-bytes="9C 54">œT</span> <span data-cp="56BF" data-bytes="9D BD">½</span> <span data-cp="56D6" data-bytes="9C C5">œÅ</span> <span data-cp="56E2" data-bytes="89 5B">‰[</span> <span data-cp="56FB" data-bytes="87 65">‡e</span> <span data-cp="56FD" data-bytes="98 C7">˜Ç</span> <span data-cp="5715" data-bytes="9C EE">œî</span> <span data-cp="571D" data-bytes="92 E2">’â</span> <span data-cp="5732" data-bytes="94 A7">”§</span> <span data-cp="573D" data-bytes="8C CC">ŒÌ</span> <span data-cp="573F" data-bytes="9B D4">›Ô</span> <span data-cp="5754" data-bytes="99 E5">™å</span> <span data-cp="5757" data-bytes="9A C2">šÂ</span> <span data-cp="575B" data-bytes="91 FB">‘û</span> <span data-cp="575F" data-bytes="A0 73"> s</span> <span data-cp="5767" data-bytes="9F 72">Ÿr</span> <span data-cp="577A" data-bytes="9F CC">ŸÌ</span> <span data-cp="577E" data-bytes="98 A5">˜¥</span> <span data-cp="577F" data-bytes="92 E8">’è</span> <span data-cp="5788" data-bytes="9B BC">›¼</span> <span data-cp="578A" data-bytes="96 F3">–ó</span> <span data-cp="578D" data-bytes="92 E7">’ç</span> <span data-cp="579C" data-bytes="8B 7D">‹}</span> <span data-cp="57A1" data-bytes="9B F4">›ô</span> <span data-cp="57A7" data-bytes="9E F7">ž÷</span> <span data-cp="57AA" data-bytes="9E C1">žÁ</span> <span data-cp="57B3" data-bytes="87 C3">‡Ã</span> <span data-cp="57B4" data-bytes="99 6F">™o</span> <span data-cp="57BB" data-bytes="96 F1">–ñ</span> <span data-cp="57BE" data-bytes="8E 41">ŽA</span> <span data-cp="57C4" data-bytes="95 4A">•J</span> <span data-cp="57C8" data-bytes="97 E6">—æ</span> <span data-cp="57D7" data-bytes="96 F5">–õ</span> <span data-cp="57DD" data-bytes="92 E6">’æ</span> <span data-cp="57DE" data-bytes="9F 42">ŸB</span> <span data-cp="57EF" data-bytes="99 A9">™©</span> <span data-cp="5812" data-bytes="97 E5">—å</span> <span data-cp="5818" data-bytes="87 C8">‡È</span> <span data-cp="5822" data-bytes="96 7D">–}</span> <span data-cp="583A" data-bytes="99 A2">™¢</span> <span data-cp="5840" data-bytes="9A BB">š»</span> <span data-cp="5844" data-bytes="9A 65">še</span> <span data-cp="5847" data-bytes="94 4E">”N</span> <span data-cp="585F" data-bytes="99 DF">™ß</span> <span data-cp="5869" data-bytes="98 E3">˜ã</span> <span data-cp="586C" data-bytes="92 54">’T</span> <span data-cp="5872" data-bytes="96 7B">–{</span> <span data-cp="5873" data-bytes="8A AF">Š¯</span> <span data-cp="5892" data-bytes="8C 77">Œw</span> <span data-cp="5896" data-bytes="87 B0">‡°</span> <span data-cp="5899" data-bytes="8B AF">‹¯</span> <span data-cp="589A" data-bytes="9E BD">ž½</span> <span data-cp="58A7" data-bytes="9E E6">žæ</span> <span data-cp="58B0" data-bytes="8E E1">Žá</span> <span data-cp="58B5" data-bytes="9B 7D">›}</span> <span data-cp="58B6" data-bytes="9C 7E">œ~</span> <span data-cp="58CB" data-bytes="92 EA">’ê</span> <span data-cp="58D0" data-bytes="8C 78">Œx</span> <span data-cp="58F0" data-bytes="89 5C">‰\</span> <span data-cp="58F2" data-bytes="98 F0">˜ð</span> <span data-cp="58F3" data-bytes="96 F2">–ò</span> <span data-cp="5902" data-bytes="8B C1">‹Á</span> <span data-cp="5904" data-bytes="89 5D">‰]</span> <span data-cp="5905" data-bytes="89 DE">‰Þ</span> <span data-cp="5907" data-bytes="89 5E">‰^</span> <span data-cp="591D" data-bytes="87 68">‡h</span> <span data-cp="5932" data-bytes="89 5F">‰_</span> <span data-cp="5934" data-bytes="89 60">‰`</span> <span data-cp="5965" data-bytes="9B CD">›Í</span> <span data-cp="5975" data-bytes="9D D3">Ó</span> <span data-cp="5989" data-bytes="98 4C">˜L</span> <span data-cp="5994" data-bytes="97 52">—R</span> <span data-cp="599A" data-bytes="95 C3">•Ã</span> <span data-cp="599F" data-bytes="9B B6">›¶</span> <span data-cp="59AC" data-bytes="9A B9">š¹</span> <span data-cp="59B0" data-bytes="97 B3">—³</span> <span data-cp="59B7" data-bytes="9F 74">Ÿt</span> <span data-cp="59B8" data-bytes="92 F1">’ñ</span> <span data-cp="59BF" data-bytes="8C FA">Œú</span> <span data-cp="59C4" data-bytes="97 DF">—ß</span> <span data-cp="59EB" data-bytes="98 77">˜w</span> <span data-cp="59EF" data-bytes="98 54">˜T</span> <span data-cp="59F0" data-bytes="95 C5">•Å</span> <span data-cp="59F8" data-bytes="9D 55">U</span> <span data-cp="5A02" data-bytes="95 7E">•~</span> <span data-cp="5A0B" data-bytes="97 42">—B</span> <span data-cp="5A0D" data-bytes="94 E6">”æ</span> <span data-cp="5A12" data-bytes="92 F5">’õ</span> <span data-cp="5A1A" data-bytes="8C C5">ŒÅ</span> <span data-cp="5A21" data-bytes="92 FD">’ý</span> <span data-cp="5A27" data-bytes="9C 51">œQ</span> <span data-cp="5A2A" data-bytes="94 E9">”é</span> <span data-cp="5A2B" data-bytes="98 5C">˜\</span> <span data-cp="5A2C" data-bytes="92 F0">’ð</span> <span data-cp="5A3D" data-bytes="94 4C">”L</span> <span data-cp="5A45" data-bytes="91 6B">‘k</span> <span data-cp="5A54" data-bytes="8B 78">‹x</span> <span data-cp="5A59" data-bytes="94 E2">”â</span> <span data-cp="5A61" data-bytes="98 4F">˜O</span> <span data-cp="5A67" data-bytes="9C D0">œÐ</span> <span data-cp="5A68" data-bytes="92 71">’q</span> <span data-cp="5A6B" data-bytes="93 65">“e</span> <span data-cp="5A6E" data-bytes="98 5B">˜[</span> <span data-cp="5A71" data-bytes="98 50">˜P</span> <span data-cp="5A79" data-bytes="97 BC">—¼</span> <span data-cp="5A7E" data-bytes="92 F3">’ó</span> <span data-cp="5A81" data-bytes="93 40">“@</span> <span data-cp="5A82" data-bytes="98 4D">˜M</span> <span data-cp="5A86" data-bytes="95 72">•r</span> <span data-cp="5A99" data-bytes="92 EB">’ë</span> <span data-cp="5AA1" data-bytes="97 B7">—·</span> <span data-cp="5AA4" data-bytes="87 6F">‡o</span> <span data-cp="5AC3" data-bytes="90 A7">§</span> <span data-cp="5ACE" data-bytes="97 41">—A</span> <span data-cp="5ACF" data-bytes="92 F4">’ô</span> <span data-cp="5AD1" data-bytes="87 72">‡r</span> <span data-cp="5AE4" data-bytes="95 77">•w</span> <span data-cp="5AF0" data-bytes="9E E2">žâ</span> <span data-cp="5AF2" data-bytes="8F 78">x</span> <span data-cp="5AFE" data-bytes="96 72">–r</span> <span data-cp="5B0D" data-bytes="9E B5">žµ</span> <span data-cp="5B11" data-bytes="96 4B">–K</span> <span data-cp="5B15" data-bytes="8C AC">Œ¬</span> <span data-cp="5B1F" data-bytes="A0 FA"> ú</span> <span data-cp="5B28" data-bytes="96 FC">–ü</span> <span data-cp="5B2B" data-bytes="95 75">•u</span> <span data-cp="5B41" data-bytes="90 DA">Ú</span> <span data-cp="5B44" data-bytes="93 67">“g</span> <span data-cp="5B4A" data-bytes="90 DF">ß</span> <span data-cp="5B4F" data-bytes="93 54">“T</span> <span data-cp="5B66" data-bytes="89 61">‰a</span> <span data-cp="5B68" data-bytes="8B B4">‹´</span> <span data-cp="5B6D" data-bytes="9D C0">À</span> <span data-cp="5B74" data-bytes="8E 48">ŽH</span> <span data-cp="5B90" data-bytes="9E 67">žg</span> <span data-cp="5B96" data-bytes="8C D9">ŒÙ</span> <span data-cp="5B9E" data-bytes="89 62">‰b</span> <span data-cp="5B9F" data-bytes="89 63">‰c</span> <span data-cp="5BB7" data-bytes="87 73">‡s</span> <span data-cp="5BC3" data-bytes="9F 6B">Ÿk</span> <span data-cp="5BDB" data-bytes="87 6D">‡m</span> <span data-cp="5C10" data-bytes="9C BC">œ¼</span> <span data-cp="5C1C" data-bytes="8B 5D">‹]</span> <span data-cp="5C1E" data-bytes="93 4C">“L</span> <span data-cp="5C20" data-bytes="9A E2">šâ</span> <span data-cp="5C23" data-bytes="8B C9">‹É</span> <span data-cp="5C4A" data-bytes="9F C9">ŸÉ</span> <span data-cp="5C53" data-bytes="9F 44">ŸD</span> <span data-cp="5C5E" data-bytes="98 ED">˜í</span> <span data-cp="5C78" data-bytes="8C E9">Œé</span> <span data-cp="5C99" data-bytes="8D F2">ò</span> <span data-cp="5C9A" data-bytes="89 64">‰d</span> <span data-cp="5C9E" data-bytes="93 4D">“M</span> <span data-cp="5CC1" data-bytes="A0 F2"> ò</span> <span data-cp="5CC2" data-bytes="98 68">˜h</span> <span data-cp="5CD1" data-bytes="9F 58">ŸX</span> <span data-cp="5CD5" data-bytes="8C E6">Œæ</span> <span data-cp="5CE5" data-bytes="8D 73">s</span> <span data-cp="5CF5" data-bytes="8C 48">ŒH</span> <span data-cp="5CFC" data-bytes="87 74">‡t</span> <span data-cp="5D15" data-bytes="8D A8">¨</span> <span data-cp="5D2C" data-bytes="9C 75">œu</span> <span data-cp="5D2F" data-bytes="98 78">˜x</span> <span data-cp="5D3E" data-bytes="8D 60">`</span> <span data-cp="5D48" data-bytes="8D 61">a</span> <span data-cp="5D56" data-bytes="8D 62">b</span> <span data-cp="5D57" data-bytes="A0 A1"> ¡</span> <span data-cp="5D5B" data-bytes="9C 40">œ@</span> <span data-cp="5D70" data-bytes="98 AD">˜­</span> <span data-cp="5D74" data-bytes="9E EA">žê</span> <span data-cp="5D78" data-bytes="8C EC">Œì</span> <span data-cp="5D7B" data-bytes="8C D4">ŒÔ</span> <span data-cp="5D85" data-bytes="9C EB">œë</span> <span data-cp="5D8E" data-bytes="9F 51">ŸQ</span> <span data-cp="5DA4" data-bytes="8D 65">e</span> <span data-cp="5DAB" data-bytes="9C F1">œñ</span> <span data-cp="5DB9" data-bytes="8D 66">f</span> <span data-cp="5DC1" data-bytes="96 54">–T</span> <span data-cp="5DF5" data-bytes="9F CE">ŸÎ</span> <span data-cp="5E0B" data-bytes="9A E4">šä</span> <span data-cp="5E12" data-bytes="9F 75">Ÿu</span> <span data-cp="5E42" data-bytes="8D 69">i</span> <span data-cp="5E48" data-bytes="93 4F">“O</span> <span data-cp="5E5E" data-bytes="93 4E">“N</span> <span data-cp="5E86" data-bytes="89 65">‰e</span> <span data-cp="5E92" data-bytes="8C 7A">Œz</span> <span data-cp="5E99" data-bytes="8C 7B">Œ{</span> <span data-cp="5EBD" data-bytes="8D 6A">j</span> <span data-cp="5ECD" data-bytes="93 53">“S</span> <span data-cp="5ED0" data-bytes="9D FB">û</span> <span data-cp="5EF8" data-bytes="90 59">Y</span> <span data-cp="5F0C" data-bytes="93 61">“a</span> <span data-cp="5F0E" data-bytes="93 62">“b</span> <span data-cp="5F25" data-bytes="8D 6B">k</span> <span data-cp="5F3B" data-bytes="8C FE">Œþ</span> <span data-cp="5F4D" data-bytes="95 B8">•¸</span> <span data-cp="5F51" data-bytes="8B CA">‹Ê</span> <span data-cp="5F5C" data-bytes="98 7A">˜z</span> <span data-cp="5F83" data-bytes="8D 6C">l</span> <span data-cp="5FB1" data-bytes="9B 70">›p</span> <span data-cp="5FBA" data-bytes="A0 51"> Q</span> <span data-cp="5FC2" data-bytes="8C 7C">Œ|</span> <span data-cp="5FC4" data-bytes="8B CB">‹Ë</span> <span data-cp="5FDB" data-bytes="93 6E">“n</span> <span data-cp="603B" data-bytes="89 66">‰f</span> <span data-cp="6062" data-bytes="9E A9">ž©</span> <span data-cp="6075" data-bytes="93 7A">“z</span> <span data-cp="6077" data-bytes="A0 E0"> à</span> <span data-cp="607E" data-bytes="93 6B">“k</span> <span data-cp="60A4" data-bytes="A0 DC"> Ü</span> <span data-cp="60A7" data-bytes="94 68">”h</span> <span data-cp="60D7" data-bytes="8D 71">q</span> <span data-cp="60DE" data-bytes="9B EC">›ì</span> <span data-cp="60E3" data-bytes="99 BA">™º</span> <span data-cp="60E7" data-bytes="9A D0">šÐ</span> <span data-cp="60E8" data-bytes="9A 61">ša</span> <span data-cp="60E9" data-bytes="A0 E5"> å</span> <span data-cp="60FD" data-bytes="A0 5B"> [</span> <span data-cp="6107" data-bytes="96 AC">–¬</span> <span data-cp="610C" data-bytes="97 40">—@</span> <span data-cp="6119" data-bytes="9E F1">žñ</span> <span data-cp="6122" data-bytes="8C 4D">ŒM</span> <span data-cp="6130" data-bytes="9F 7E">Ÿ~</span> <span data-cp="613D" data-bytes="8D 72">r</span> <span data-cp="6150" data-bytes="96 A9">–©</span> <span data-cp="6159" data-bytes="A0 6E"> n</span> <span data-cp="616F" data-bytes="A0 74"> t</span> <span data-cp="617D" data-bytes="A0 71"> q</span> <span data-cp="6195" data-bytes="9C 50">œP</span> <span data-cp="6198" data-bytes="93 79">“y</span> <span data-cp="6199" data-bytes="93 78">“x</span> <span data-cp="619C" data-bytes="A0 DD"> Ý</span> <span data-cp="61B7" data-bytes="8D 75">u</span> <span data-cp="61B9" data-bytes="8D 76">v</span> <span data-cp="61C0" data-bytes="93 74">“t</span> <span data-cp="61CF" data-bytes="8D 77">w</span> <span data-cp="61DA" data-bytes="90 C3">Ã</span> <span data-cp="61E2" data-bytes="A0 79"> y</span> <span data-cp="622C" data-bytes="8D 79">y</span> <span data-cp="6237" data-bytes="8B FC">‹ü</span> <span data-cp="6239" data-bytes="A0 76"> v</span> <span data-cp="624C" data-bytes="8B CD">‹Í</span> <span data-cp="6268" data-bytes="9F 5A">ŸZ</span> <span data-cp="6282" data-bytes="9F F4">Ÿô</span> <span data-cp="6285" data-bytes="9F BA">Ÿº</span> <span data-cp="6290" data-bytes="8D 7A">z</span> <span data-cp="629D" data-bytes="9E 45">žE</span> <span data-cp="62A4" data-bytes="93 B0">“°</span> <span data-cp="62A6" data-bytes="A0 75"> u</span> <span data-cp="62C1" data-bytes="87 DD">‡Ý</span> <span data-cp="62C3" data-bytes="9B 46">›F</span> <span data-cp="62CE" data-bytes="A0 77"> w</span> <span data-cp="62D0" data-bytes="9D C4">Ä</span> <span data-cp="62E5" data-bytes="8D 7B">{</span> <span data-cp="6318" data-bytes="8D 7C">|</span> <span data-cp="632E" data-bytes="9E D6">žÖ</span> <span data-cp="6331" data-bytes="93 AC">“¬</span> <span data-cp="6335" data-bytes="9F 5B">Ÿ[</span> <span data-cp="6337" data-bytes="93 A9">“©</span> <span data-cp="6364" data-bytes="A0 7C"> |</span> <span data-cp="6379" data-bytes="8A C1">ŠÁ</span> <span data-cp="637F" data-bytes="9F B4">Ÿ´</span> <span data-cp="63B9" data-bytes="9E 4C">žL</span> <span data-cp="63C1" data-bytes="8F C5">Å</span> <span data-cp="63D1" data-bytes="93 AD">“­</span> <span data-cp="63DE" data-bytes="9D C3">Ã</span> <span data-cp="63E2" data-bytes="8D A2">¢</span> <span data-cp="63E6" data-bytes="9D 4A">J</span> <span data-cp="63FB" data-bytes="8D A3">£</span> <span data-cp="63FC" data-bytes="9E 4B">žK</span> <span data-cp="63FE" data-bytes="9E 4D">žM</span> <span data-cp="6407" data-bytes="8D A4">¤</span> <span data-cp="6432" data-bytes="8A FD">Šý</span> <span data-cp="643B" data-bytes="93 B2">“²</span> <span data-cp="645A" data-bytes="8D A5">¥</span> <span data-cp="6471" data-bytes="93 A1">“¡</span> <span data-cp="647C" data-bytes="8A C6">ŠÆ</span> <span data-cp="648D" data-bytes="8A 5B">Š[</span> <span data-cp="6491" data-bytes="89 4D">‰M</span> <span data-cp="64B4" data-bytes="8A 78">Šx</span> <span data-cp="64B6" data-bytes="93 AB">“«</span> <span data-cp="64C0" data-bytes="8D A7">§</span> <span data-cp="64D3" data-bytes="9F 45">ŸE</span> <span data-cp="64DD" data-bytes="8A 56">ŠV</span> <span data-cp="64E7" data-bytes="8E E6">Žæ</span> <span data-cp="64EA" data-bytes="8A A4">Š¤</span> <span data-cp="650A" data-bytes="89 43">‰C</span> <span data-cp="6511" data-bytes="93 F3">“ó</span> <span data-cp="651F" data-bytes="9E A2">ž¢</span> <span data-cp="6530" data-bytes="9D C7">Ç</span> <span data-cp="6535" data-bytes="8B CE">‹Î</span> <span data-cp="656B" data-bytes="93 B3">“³</span> <span data-cp="6586" data-bytes="8D AC">¬</span> <span data-cp="6589" data-bytes="89 67">‰g</span> <span data-cp="658B" data-bytes="8C 7E">Œ~</span> <span data-cp="65BE" data-bytes="9C F3">œó</span> <span data-cp="65D4" data-bytes="95 BB">•»</span> <span data-cp="65FF" data-bytes="8D AE">®</span> <span data-cp="661E" data-bytes="93 DB">“Û</span> <span data-cp="6630" data-bytes="93 D5">“Õ</span> <span data-cp="6648" data-bytes="9B 71">›q</span> <span data-cp="664D" data-bytes="87 64">‡d</span> <span data-cp="6653" data-bytes="8D AF">¯</span> <span data-cp="6660" data-bytes="87 B5">‡µ</span> <span data-cp="6663" data-bytes="93 D8">“Ø</span> <span data-cp="666B" data-bytes="93 D3">“Ó</span> <span data-cp="667D" data-bytes="8E 76">Žv</span> <span data-cp="668E" data-bytes="93 D1">“Ñ</span> <span data-cp="6692" data-bytes="8D B1">±</span> <span data-cp="669A" data-bytes="98 59">˜Y</span> <span data-cp="66B6" data-bytes="9C BF">œ¿</span> <span data-cp="66BF" data-bytes="9B 72">›r</span> <span data-cp="66CE" data-bytes="93 BE">“¾</span> <span data-cp="66E7" data-bytes="8C DB">ŒÛ</span> <span data-cp="66F1" data-bytes="9D F1">ñ</span> <span data-cp="670C" data-bytes="A0 BB"> »</span> <span data-cp="670E" data-bytes="9B 7E">›~</span> <span data-cp="6716" data-bytes="8D B3">³</span> <span data-cp="6719" data-bytes="8C 52">ŒR</span> <span data-cp="671E" data-bytes="9A E8">šè</span> <span data-cp="6725" data-bytes="8E DC">ŽÜ</span> <span data-cp="6736" data-bytes="9C F9">œù</span> <span data-cp="6761" data-bytes="98 E7">˜ç</span> <span data-cp="676B" data-bytes="8C CA">ŒÊ</span> <span data-cp="676E" data-bytes="87 75">‡u</span> <span data-cp="6782" data-bytes="87 BA">‡º</span> <span data-cp="678F" data-bytes="93 E5">“å</span> <span data-cp="67A0" data-bytes="9A 59">šY</span> <span data-cp="67A4" data-bytes="8D B5">µ</span> <span data-cp="67BF" data-bytes="8F 7D">}</span> <span data-cp="67D6" data-bytes="95 47">•G</span> <span data-cp="67F9" data-bytes="92 50">’P</span> <span data-cp="67FE" data-bytes="89 68">‰h</span> <span data-cp="6800" data-bytes="8D B6">¶</span> <span data-cp="6802" data-bytes="A0 7D"> }</span> <span data-cp="6803" data-bytes="98 FC">˜ü</span> <span data-cp="6804" data-bytes="89 69">‰i</span> <span data-cp="6810" data-bytes="92 56">’V</span> <span data-cp="681E" data-bytes="93 E8">“è</span> <span data-cp="6836" data-bytes="9C E3">œã</span> <span data-cp="6847" data-bytes="96 40">–@</span> <span data-cp="684A" data-bytes="8D B8">¸</span> <span data-cp="6855" data-bytes="9B 4A">›J</span> <span data-cp="6856" data-bytes="8F B9">¹</span> <span data-cp="6865" data-bytes="89 6A">‰j</span> <span data-cp="6884" data-bytes="8D B9">¹</span> <span data-cp="6888" data-bytes="91 7E">‘~</span> <span data-cp="6898" data-bytes="93 F4">“ô</span> <span data-cp="68B6" data-bytes="93 E7">“ç</span> <span data-cp="68B9" data-bytes="97 EF">—ï</span> <span data-cp="68C5" data-bytes="96 A5">–¥</span> <span data-cp="6909" data-bytes="8D BD">½</span> <span data-cp="6918" data-bytes="9B A1">›¡</span> <span data-cp="6919" data-bytes="8C A2">Œ¢</span> <span data-cp="691A" data-bytes="9A B7">š·</span> <span data-cp="691B" data-bytes="8E FC">Žü</span> <span data-cp="692C" data-bytes="9F A1">Ÿ¡</span> <span data-cp="6943" data-bytes="8D BE">¾</span> <span data-cp="6946" data-bytes="89 A4">‰¤</span> <span data-cp="6955" data-bytes="9A D9">šÙ</span> <span data-cp="6964" data-bytes="8D C0">À</span> <span data-cp="6967" data-bytes="97 F0">—ð</span> <span data-cp="6972" data-bytes="93 B4">“´</span> <span data-cp="6980" data-bytes="9F A7">Ÿ§</span> <span data-cp="6985" data-bytes="8D C2">Â</span> <span data-cp="698A" data-bytes="99 B6">™¶</span> <span data-cp="699F" data-bytes="8D C1">Á</span> <span data-cp="69A2" data-bytes="8E 46">ŽF</span> <span data-cp="69B2" data-bytes="A0 D1"> Ñ</span> <span data-cp="69C0" data-bytes="9F CA">ŸÊ</span> <span data-cp="69D1" data-bytes="92 CF">’Ï</span> <span data-cp="69D5" data-bytes="9C F4">œô</span> <span data-cp="69D6" data-bytes="8D C4">Ä</span> <span data-cp="69E9" data-bytes="9B 4C">›L</span> <span data-cp="6A03" data-bytes="9C DE">œÞ</span> <span data-cp="6A0C" data-bytes="98 6C">˜l</span> <span data-cp="6A1A" data-bytes="97 F9">—ù</span> <span data-cp="6A1C" data-bytes="95 58">•X</span> <span data-cp="6A29" data-bytes="87 B6">‡¶</span> <span data-cp="6A2B" data-bytes="98 5E">˜^</span> <span data-cp="6A2D" data-bytes="94 CD">”Í</span> <span data-cp="6A33" data-bytes="93 EE">“î</span> <span data-cp="6A43" data-bytes="8C A3">Œ£</span> <span data-cp="6A4C" data-bytes="93 F5">“õ</span> <span data-cp="6A52" data-bytes="93 EF">“ï</span> <span data-cp="6A53" data-bytes="8E EA">Žê</span> <span data-cp="6A57" data-bytes="8F 5B">[</span> <span data-cp="6A63" data-bytes="8C 5E">Œ^</span> <span data-cp="6A65" data-bytes="8D C6">Æ</span> <span data-cp="6A71" data-bytes="8D C8">È</span> <span data-cp="6A74" data-bytes="8D C7">Ç</span> <span data-cp="6A7A" data-bytes="93 F7">“÷</span> <span data-cp="6A82" data-bytes="8D C9">É</span> <span data-cp="6A8F" data-bytes="96 70">–p</span> <span data-cp="6A99" data-bytes="8D CB">Ë</span> <span data-cp="6AA7" data-bytes="8F 65">e</span> <span data-cp="6AAB" data-bytes="8D CD">Í</span> <span data-cp="6AB1" data-bytes="9D A8">¨</span> <span data-cp="6AB2" data-bytes="94 F9">”ù</span> <span data-cp="6AB5" data-bytes="8D CE">Î</span> <span data-cp="6ABE" data-bytes="93 EA">“ê</span> <span data-cp="6AC9" data-bytes="93 F0">“ð</span> <span data-cp="6ACA" data-bytes="9F B6">Ÿ¶</span> <span data-cp="6AD4" data-bytes="8D CF">Ï</span> <span data-cp="6AD8" data-bytes="97 63">—c</span> <span data-cp="6AF6" data-bytes="8D D0">Ð</span> <span data-cp="6B05" data-bytes="93 F1">“ñ</span> <span data-cp="6B52" data-bytes="9F DB">ŸÛ</span> <span data-cp="6B57" data-bytes="93 F8">“ø</span> <span data-cp="6B6F" data-bytes="8B F7">‹÷</span> <span data-cp="6B7A" data-bytes="8B CF">‹Ï</span> <span data-cp="6B81" data-bytes="8D D1">Ñ</span> <span data-cp="6BC1" data-bytes="8D D2">Ò</span> <span data-cp="6BEA" data-bytes="8D D3">Ó</span> <span data-cp="6BFA" data-bytes="9F E7">Ÿç</span> <span data-cp="6C1C" data-bytes="90 BD">½</span> <span data-cp="6C31" data-bytes="9F D0">ŸÐ</span> <span data-cp="6C35" data-bytes="8B D0">‹Ð</span> <span data-cp="6C39" data-bytes="9C AE">œ®</span> <span data-cp="6C3A" data-bytes="8B D1">‹Ñ</span> <span data-cp="6C3D" data-bytes="8A DB">ŠÛ</span> <span data-cp="6C4A" data-bytes="9E FD">žý</span> <span data-cp="6C58" data-bytes="95 CE">•Î</span> <span data-cp="6C75" data-bytes="8D D4">Ô</span> <span data-cp="6C7F" data-bytes="8E E3">Žã</span> <span data-cp="6C9F" data-bytes="90 76">v</span> <span data-cp="6CA2" data-bytes="98 C6">˜Æ</span> <span data-cp="6CAA" data-bytes="8D D5">Õ</span> <span data-cp="6CAF" data-bytes="97 D1">—Ñ</span> <span data-cp="6CB2" data-bytes="9E B6">ž¶</span> <span data-cp="6CCE" data-bytes="A0 42"> B</span> <span data-cp="6CDF" data-bytes="98 73">˜s</span> <span data-cp="6CEA" data-bytes="9F FC">Ÿü</span> <span data-cp="6CFF" data-bytes="8C A5">Œ¥</span> <span data-cp="6D02" data-bytes="8D D7">×</span> <span data-cp="6D05" data-bytes="92 FB">’û</span> <span data-cp="6D06" data-bytes="8D D8">Ø</span> <span data-cp="6D24" data-bytes="94 4F">”O</span> <span data-cp="6D26" data-bytes="8D D9">Ù</span> <span data-cp="6D4E" data-bytes="89 6B">‰k</span> <span data-cp="6D57" data-bytes="97 CE">—Î</span> <span data-cp="6D67" data-bytes="94 47">”G</span> <span data-cp="6D72" data-bytes="92 B7">’·</span> <span data-cp="6D81" data-bytes="8D DA">Ú</span> <span data-cp="6D8F" data-bytes="9C 5A">œZ</span> <span data-cp="6DA4" data-bytes="8D DC">Ü</span> <span data-cp="6DA5" data-bytes="94 44">”D</span> <span data-cp="6DB1" data-bytes="8D DD">Ý</span> <span data-cp="6DB9" data-bytes="A0 D6"> Ö</span> <span data-cp="6DFE" data-bytes="8C 41">ŒA</span> <span data-cp="6E02" data-bytes="97 D5">—Õ</span> <span data-cp="6E04" data-bytes="94 4A">”J</span> <span data-cp="6E0A" data-bytes="94 4D">”M</span> <span data-cp="6E0F" data-bytes="97 CB">—Ë</span> <span data-cp="6E15" data-bytes="8D DE">Þ</span> <span data-cp="6E18" data-bytes="8D DF">ß</span> <span data-cp="6E29" data-bytes="8D E0">à</span> <span data-cp="6E57" data-bytes="8C DD">ŒÝ</span> <span data-cp="6E76" data-bytes="92 B3">’³</span> <span data-cp="6E86" data-bytes="8D E1">á</span> <span data-cp="6E8B" data-bytes="95 D3">•Ó</span> <span data-cp="6E9A" data-bytes="89 C1">‰Á</span> <span data-cp="6EB8" data-bytes="9C B7">œ·</span> <span data-cp="6EBB" data-bytes="8D E3">ã</span> <span data-cp="6EDA" data-bytes="8D E5">å</span> <span data-cp="6EDD" data-bytes="89 47">‰G</span> <span data-cp="6EE2" data-bytes="8D E4">ä</span> <span data-cp="6EE8" data-bytes="8D E7">ç</span> <span data-cp="6EE9" data-bytes="8D E8">è</span> <span data-cp="6F0B" data-bytes="94 45">”E</span> <span data-cp="6F0C" data-bytes="97 D6">—Ö</span> <span data-cp="6F17" data-bytes="98 44">˜D</span> <span data-cp="6F24" data-bytes="8D E9">é</span> <span data-cp="6F34" data-bytes="8D EA">ê</span> <span data-cp="6F56" data-bytes="9D A7">§</span> <span data-cp="6F79" data-bytes="95 D2">•Ò</span> <span data-cp="6F81" data-bytes="8D ED">í</span> <span data-cp="6FB5" data-bytes="9C DC">œÜ</span> <span data-cp="6FB6" data-bytes="9B F6">›ö</span> <span data-cp="6FBB" data-bytes="95 CF">•Ï</span> <span data-cp="6FBE" data-bytes="8D EE">î</span> <span data-cp="6FD9" data-bytes="96 EC">–ì</span> <span data-cp="6FDA" data-bytes="96 EB">–ë</span> <span data-cp="6FF6" data-bytes="90 B6">¶</span> <span data-cp="7003" data-bytes="98 AB">˜«</span> <span data-cp="701E" data-bytes="96 ED">–í</span> <span data-cp="702C" data-bytes="8D F4">ô</span> <span data-cp="704D" data-bytes="8C 67">Œg</span> <span data-cp="7050" data-bytes="8D F6">ö</span> <span data-cp="7054" data-bytes="8D F7">÷</span> <span data-cp="705C" data-bytes="8F FA">ú</span> <span data-cp="7067" data-bytes="97 D0">—Ð</span> <span data-cp="706C" data-bytes="8B D2">‹Ò</span> <span data-cp="706E" data-bytes="87 DE">‡Þ</span> <span data-cp="706F" data-bytes="8D F8">ø</span> <span data-cp="7075" data-bytes="90 D9">Ù</span> <span data-cp="7077" data-bytes="8C 47">ŒG</span> <span data-cp="707F" data-bytes="8D F9">ù</span> <span data-cp="7089" data-bytes="8D FA">ú</span> <span data-cp="708F" data-bytes="90 A6">¦</span> <span data-cp="70A0" data-bytes="99 70">™p</span> <span data-cp="70A3" data-bytes="91 EB">‘ë</span> <span data-cp="70A5" data-bytes="97 70">—p</span> <span data-cp="70A6" data-bytes="98 6F">˜o</span> <span data-cp="70B9" data-bytes="98 F2">˜ò</span> <span data-cp="70BB" data-bytes="9A FC">šü</span> <span data-cp="70BC" data-bytes="89 6C">‰l</span> <span data-cp="70C0" data-bytes="99 5E">™^</span> <span data-cp="70C4" data-bytes="95 BD">•½</span> <span data-cp="70D0" data-bytes="91 E6">‘æ</span> <span data-cp="70F1" data-bytes="94 54">”T</span> <span data-cp="70F5" data-bytes="99 B8">™¸</span> <span data-cp="70FE" data-bytes="97 E9">—é</span> <span data-cp="7105" data-bytes="93 46">“F</span> <span data-cp="711D" data-bytes="98 63">˜c</span> <span data-cp="7129" data-bytes="95 BC">•¼</span> <span data-cp="7133" data-bytes="98 70">˜p</span> <span data-cp="7134" data-bytes="96 F6">–ö</span> <span data-cp="7135" data-bytes="8E A9">Ž©</span> <span data-cp="713B" data-bytes="94 51">”Q</span> <span data-cp="713E" data-bytes="8E 43">ŽC</span> <span data-cp="7140" data-bytes="8B 5A">‹Z</span> <span data-cp="7151" data-bytes="9B F5">›õ</span> <span data-cp="7157" data-bytes="8C EE">Œî</span> <span data-cp="7162" data-bytes="A0 DF"> ß</span> <span data-cp="716B" data-bytes="97 7E">—~</span> <span data-cp="7171" data-bytes="9B D5">›Õ</span> <span data-cp="7173" data-bytes="9A C3">šÃ</span> <span data-cp="7175" data-bytes="97 C8">—È</span> <span data-cp="7176" data-bytes="A0 DB"> Û</span> <span data-cp="7177" data-bytes="91 D0">‘Ð</span> <span data-cp="717A" data-bytes="9F E4">Ÿä</span> <span data-cp="717C" data-bytes="8F DD">Ý</span> <span data-cp="717E" data-bytes="91 E9">‘é</span> <span data-cp="7188" data-bytes="98 E0">˜à</span> <span data-cp="718C" data-bytes="92 CA">’Ê</span> <span data-cp="718E" data-bytes="98 57">˜W</span> <span data-cp="7191" data-bytes="8C 51">ŒQ</span> <span data-cp="7198" data-bytes="9B 49">›I</span> <span data-cp="71A2" data-bytes="9D 76">v</span> <span data-cp="71A3" data-bytes="9E AF">ž¯</span> <span data-cp="71AD" data-bytes="9C CC">œÌ</span> <span data-cp="71B7" data-bytes="8D F1">ñ</span> <span data-cp="71D1" data-bytes="8E 53">ŽS</span> <span data-cp="71DF" data-bytes="9C 62">œb</span> <span data-cp="71EB" data-bytes="96 F9">–ù</span> <span data-cp="71F5" data-bytes="98 BF">˜¿</span> <span data-cp="71F6" data-bytes="9E 49">žI</span> <span data-cp="7200" data-bytes="8C A7">Œ§</span> <span data-cp="7201" data-bytes="9B 76">›v</span> <span data-cp="7209" data-bytes="9B CA">›Ê</span> <span data-cp="720F" data-bytes="92 DC">’Ü</span> <span data-cp="7216" data-bytes="91 CC">‘Ì</span> <span data-cp="7217" data-bytes="91 E2">‘â</span> <span data-cp="7225" data-bytes="87 5F">‡_</span> <span data-cp="722B" data-bytes="8B D3">‹Ó</span> <span data-cp="7250" data-bytes="94 55">”U</span> <span data-cp="725C" data-bytes="8D BF">¿</span> <span data-cp="7266" data-bytes="9E 78">žx</span> <span data-cp="7287" data-bytes="94 56">”V</span> <span data-cp="728F" data-bytes="9D 61">a</span> <span data-cp="7294" data-bytes="94 57">”W</span> <span data-cp="729F" data-bytes="99 66">™f</span> <span data-cp="72AD" data-bytes="8B D5">‹Õ</span> <span data-cp="72B2" data-bytes="A0 69"> i</span> <span data-cp="72CD" data-bytes="98 B4">˜´</span> <span data-cp="72E2" data-bytes="A0 49"> I</span> <span data-cp="7302" data-bytes="A0 4C"> L</span> <span data-cp="7304" data-bytes="9E 65">že</span> <span data-cp="7310" data-bytes="98 B5">˜µ</span> <span data-cp="732A" data-bytes="99 75">™u</span> <span data-cp="732C" data-bytes="A0 65"> e</span> <span data-cp="7338" data-bytes="98 B7">˜·</span> <span data-cp="7339" data-bytes="98 B8">˜¸</span> <span data-cp="7341" data-bytes="98 BA">˜º</span> <span data-cp="7348" data-bytes="98 BB">˜»</span> <span data-cp="734F" data-bytes="9F BC">Ÿ¼</span> <span data-cp="7371" data-bytes="A0 4A"> J</span> <span data-cp="7374" data-bytes="9E C7">žÇ</span> <span data-cp="738C" data-bytes="8C A9">Œ©</span> <span data-cp="738F" data-bytes="98 AE">˜®</span> <span data-cp="7398" data-bytes="92 D6">’Ö</span> <span data-cp="739E" data-bytes="91 D4">‘Ô</span> <span data-cp="73BA" data-bytes="8C 53">ŒS</span> <span data-cp="73C4" data-bytes="87 BF">‡¿</span> <span data-cp="73D0" data-bytes="94 C5">”Å</span> <span data-cp="73E1" data-bytes="98 C1">˜Á</span> <span data-cp="73E2" data-bytes="97 5C">—\</span> <span data-cp="73E6" data-bytes="97 73">—s</span> <span data-cp="73F3" data-bytes="97 64">—d</span> <span data-cp="73F9" data-bytes="96 4E">–N</span> <span data-cp="73FB" data-bytes="97 65">—e</span> <span data-cp="7402" data-bytes="8C 5A">ŒZ</span> <span data-cp="7411" data-bytes="89 A1">‰¡</span> <span data-cp="7412" data-bytes="95 FA">•ú</span> <span data-cp="7414" data-bytes="92 D4">’Ô</span> <span data-cp="7419" data-bytes="98 C8">˜È</span> <span data-cp="741C" data-bytes="90 EF">ï</span> <span data-cp="741E" data-bytes="98 C9">˜É</span> <span data-cp="741F" data-bytes="98 CA">˜Ê</span> <span data-cp="7437" data-bytes="94 6D">”m</span> <span data-cp="7438" data-bytes="94 B7">”·</span> <span data-cp="743C" data-bytes="94 6B">”k</span> <span data-cp="7443" data-bytes="92 FC">’ü</span> <span data-cp="7445" data-bytes="95 EB">•ë</span> <span data-cp="7448" data-bytes="97 6E">—n</span> <span data-cp="744C" data-bytes="87 B8">‡¸</span> <span data-cp="7456" data-bytes="92 D5">’Õ</span> <span data-cp="7461" data-bytes="87 78">‡x</span> <span data-cp="7468" data-bytes="94 7A">”z</span> <span data-cp="746B" data-bytes="95 FB">•û</span> <span data-cp="7479" data-bytes="92 D1">’Ñ</span> <span data-cp="747A" data-bytes="94 5D">”]</span> <span data-cp="748C" data-bytes="93 44">“D</span> <span data-cp="748D" data-bytes="8E A6">Ž¦</span> <span data-cp="7499" data-bytes="92 D3">’Ó</span> <span data-cp="749B" data-bytes="94 B8">”¸</span> <span data-cp="749D" data-bytes="87 79">‡y</span> <span data-cp="74B4" data-bytes="97 5E">—^</span> <span data-cp="74B9" data-bytes="8C AD">Œ­</span> <span data-cp="74C6" data-bytes="87 C1">‡Á</span> <span data-cp="74CC" data-bytes="94 6A">”j</span> <span data-cp="74D0" data-bytes="93 E3">“ã</span> <span data-cp="74D3" data-bytes="98 CF">˜Ï</span> <span data-cp="74E7" data-bytes="A0 D9"> Ù</span> <span data-cp="74F0" data-bytes="A0 BF"> ¿</span> <span data-cp="74F1" data-bytes="A0 4D"> M</span> <span data-cp="74F2" data-bytes="A0 B8"> ¸</span> <span data-cp="74F8" data-bytes="A0 CE"> Î</span> <span data-cp="7505" data-bytes="A0 B7"> ·</span> <span data-cp="7519" data-bytes="89 C3">‰Ã</span> <span data-cp="7534" data-bytes="9D F4">ô</span> <span data-cp="7535" data-bytes="89 6D">‰m</span> <span data-cp="753B" data-bytes="9C 7B">œ{</span> <span data-cp="7542" data-bytes="98 D2">˜Ò</span> <span data-cp="7546" data-bytes="9F A9">Ÿ©</span> <span data-cp="7551" data-bytes="97 D9">—Ù</span> <span data-cp="7553" data-bytes="A0 C4"> Ä</span> <span data-cp="7555" data-bytes="94 76">”v</span> <span data-cp="7560" data-bytes="99 78">™x</span> <span data-cp="756D" data-bytes="98 D3">˜Ó</span> <span data-cp="7572" data-bytes="98 D4">˜Ô</span> <span data-cp="757A" data-bytes="9F B9">Ÿ¹</span> <span data-cp="7583" data-bytes="94 71">”q</span> <span data-cp="758D" data-bytes="98 D5">˜Õ</span> <span data-cp="75B1" data-bytes="9E 5C">ž\</span> <span data-cp="75C3" data-bytes="A0 44"> D</span> <span data-cp="75C8" data-bytes="98 D7">˜×</span> <span data-cp="75DC" data-bytes="98 D8">˜Ø</span> <span data-cp="75F9" data-bytes="9E EF">žï</span> <span data-cp="7607" data-bytes="9F FE">Ÿþ</span> <span data-cp="763B" data-bytes="9D DD">Ý</span> <span data-cp="7640" data-bytes="9E E1">žá</span> <span data-cp="764D" data-bytes="98 DA">˜Ú</span> <span data-cp="764E" data-bytes="9D DF">ß</span> <span data-cp="7654" data-bytes="9E EB">žë</span> <span data-cp="7666" data-bytes="9E 59">žY</span> <span data-cp="7667" data-bytes="A0 5C"> \</span> <span data-cp="7673" data-bytes="94 77">”w</span> <span data-cp="7674" data-bytes="98 DC">˜Ü</span> <span data-cp="767A" data-bytes="98 DE">˜Þ</span> <span data-cp="76D6" data-bytes="9F C2">ŸÂ</span> <span data-cp="76D9" data-bytes="8C 6B">Œk</span> <span data-cp="770C" data-bytes="98 C4">˜Ä</span> <span data-cp="770E" data-bytes="94 B0">”°</span> <span data-cp="770F" data-bytes="94 B1">”±</span> <span data-cp="7724" data-bytes="A0 C1"> Á</span> <span data-cp="772B" data-bytes="A0 CD"> Í</span> <span data-cp="7743" data-bytes="98 E5">˜å</span> <span data-cp="7772" data-bytes="91 E4">‘ä</span> <span data-cp="7777" data-bytes="8F C7">Ç</span> <span data-cp="7778" data-bytes="94 AE">”®</span> <span data-cp="777A" data-bytes="8A 4F">ŠO</span> <span data-cp="777B" data-bytes="94 B2">”²</span> <span data-cp="7793" data-bytes="8F D4">Ô</span> <span data-cp="7798" data-bytes="98 EA">˜ê</span> <span data-cp="77B9" data-bytes="9D E0">à</span> <span data-cp="77BE" data-bytes="98 EE">˜î</span> <span data-cp="77C3" data-bytes="95 C4">•Ä</span> <span data-cp="77CB" data-bytes="98 EF">˜ï</span> <span data-cp="77D7" data-bytes="9B 78">›x</span> <span data-cp="77DD" data-bytes="8C 6E">Œn</span> <span data-cp="77FE" data-bytes="A0 AE"> ®</span> <span data-cp="7808" data-bytes="9D 4C">L</span> <span data-cp="7818" data-bytes="98 F1">˜ñ</span> <span data-cp="781C" data-bytes="98 F3">˜ó</span> <span data-cp="781E" data-bytes="94 C1">”Á</span> <span data-cp="7839" data-bytes="98 F5">˜õ</span> <span data-cp="783D" data-bytes="96 E2">–â</span> <span data-cp="7842" data-bytes="94 50">”P</span> <span data-cp="7844" data-bytes="96 A2">–¢</span> <span data-cp="7847" data-bytes="98 F6">˜ö</span> <span data-cp="784B" data-bytes="96 E5">–å</span> <span data-cp="7851" data-bytes="98 F7">˜÷</span> <span data-cp="7853" data-bytes="A0 46"> F</span> <span data-cp="7854" data-bytes="96 E3">–ã</span> <span data-cp="7866" data-bytes="98 F8">˜ø</span> <span data-cp="787A" data-bytes="9E E4">žä</span> <span data-cp="7888" data-bytes="94 C3">”Ã</span> <span data-cp="788D" data-bytes="94 C2">”Â</span> <span data-cp="78B6" data-bytes="96 E4">–ä</span> <span data-cp="78B8" data-bytes="89 AC">‰¬</span> <span data-cp="78B9" data-bytes="96 DB">–Û</span> <span data-cp="78D2" data-bytes="94 C4">”Ä</span> <span data-cp="78D8" data-bytes="9F FB">Ÿû</span> <span data-cp="78E4" data-bytes="8C 59">ŒY</span> <span data-cp="78EE" data-bytes="93 C9">“É</span> <span data-cp="78F0" data-bytes="94 E8">”è</span> <span data-cp="78F5" data-bytes="90 C5">Å</span> <span data-cp="7906" data-bytes="A0 A8"> ¨</span> <span data-cp="7932" data-bytes="98 FD">˜ý</span> <span data-cp="7933" data-bytes="98 FB">˜û</span> <span data-cp="7936" data-bytes="8E BF">Ž¿</span> <span data-cp="793B" data-bytes="8B D8">‹Ø</span> <span data-cp="7958" data-bytes="8F 68">h</span> <span data-cp="7959" data-bytes="94 C6">”Æ</span> <span data-cp="7962" data-bytes="9D EA">ê</span> <span data-cp="797E" data-bytes="9C DA">œÚ</span> <span data-cp="7983" data-bytes="9C 72">œr</span> <span data-cp="7987" data-bytes="89 C9">‰É</span> <span data-cp="7991" data-bytes="99 41">™A</span> <span data-cp="7999" data-bytes="99 42">™B</span> <span data-cp="799B" data-bytes="94 CA">”Ê</span> <span data-cp="799F" data-bytes="91 D7">‘×</span> <span data-cp="79A5" data-bytes="94 CC">”Ì</span> <span data-cp="79C4" data-bytes="97 A8">—¨</span> <span data-cp="79CA" data-bytes="8C DE">ŒÞ</span> <span data-cp="79D0" data-bytes="87 B3">‡³</span> <span data-cp="79E2" data-bytes="96 D1">–Ñ</span> <span data-cp="79E3" data-bytes="9C BD">œ½</span> <span data-cp="79F1" data-bytes="94 D5">”Õ</span> <span data-cp="79F4" data-bytes="94 D0">”Ð</span> <span data-cp="7A06" data-bytes="99 44">™D</span> <span data-cp="7A2A" data-bytes="8C 63">Œc</span> <span data-cp="7A2C" data-bytes="87 BB">‡»</span> <span data-cp="7A2D" data-bytes="A0 B3"> ³</span> <span data-cp="7A32" data-bytes="87 B4">‡´</span> <span data-cp="7A3A" data-bytes="94 CF">”Ï</span> <span data-cp="7A3E" data-bytes="9F FA">Ÿú</span> <span data-cp="7A43" data-bytes="91 E5">‘å</span> <span data-cp="7A45" data-bytes="9C 6A">œj</span> <span data-cp="7A49" data-bytes="8E 49">ŽI</span> <span data-cp="7A65" data-bytes="8E 4C">ŽL</span> <span data-cp="7A72" data-bytes="87 C9">‡É</span> <span data-cp="7A7D" data-bytes="8E 4D">ŽM</span> <span data-cp="7A83" data-bytes="9A 73">šs</span> <span data-cp="7A91" data-bytes="99 47">™G</span> <span data-cp="7A93" data-bytes="8C B1">Œ±</span> <span data-cp="7AB0" data-bytes="8E 50">ŽP</span> <span data-cp="7ABB" data-bytes="8E 4F">ŽO</span> <span data-cp="7ABC" data-bytes="99 49">™I</span> <span data-cp="7AC2" data-bytes="8E 51">ŽQ</span> <span data-cp="7AC3" data-bytes="8E 52">ŽR</span> <span data-cp="7AC8" data-bytes="9A B2">š²</span> <span data-cp="7AC9" data-bytes="89 A5">‰¥</span> <span data-cp="7ACF" data-bytes="99 4C">™L</span> <span data-cp="7AD3" data-bytes="9F F8">Ÿø</span> <span data-cp="7ADA" data-bytes="8E 56">ŽV</span> <span data-cp="7ADB" data-bytes="99 4D">™M</span> <span data-cp="7ADC" data-bytes="91 CA">‘Ê</span> <span data-cp="7ADD" data-bytes="8E 57">ŽW</span> <span data-cp="7AE2" data-bytes="94 E1">”á</span> <span data-cp="7AE7" data-bytes="90 47">G</span> <span data-cp="7AE9" data-bytes="8F D8">Ø</span> <span data-cp="7AEA" data-bytes="8E 58">ŽX</span> <span data-cp="7AFC" data-bytes="87 A3">‡£</span> <span data-cp="7AFE" data-bytes="94 EB">”ë</span> <span data-cp="7B0B" data-bytes="8E 5C">Ž\</span> <span data-cp="7B0C" data-bytes="95 53">•S</span> <span data-cp="7B14" data-bytes="9F E5">Ÿå</span> <span data-cp="7B1F" data-bytes="9F 56">ŸV</span> <span data-cp="7B27" data-bytes="95 4F">•O</span> <span data-cp="7B29" data-bytes="8E 5E">Ž^</span> <span data-cp="7B39" data-bytes="99 6A">™j</span> <span data-cp="7B42" data-bytes="9C 64">œd</span> <span data-cp="7B43" data-bytes="9C D9">œÙ</span> <span data-cp="7B51" data-bytes="9D 5A">Z</span> <span data-cp="7B55" data-bytes="8E 5D">Ž]</span> <span data-cp="7B62" data-bytes="99 50">™P</span> <span data-cp="7B6C" data-bytes="99 51">™Q</span> <span data-cp="7B6F" data-bytes="8E 62">Žb</span> <span data-cp="7B7B" data-bytes="99 52">™R</span> <span data-cp="7B92" data-bytes="8E 68">Žh</span> <span data-cp="7BA2" data-bytes="8E 61">Ža</span> <span data-cp="7BA3" data-bytes="9F 59">ŸY</span> <span data-cp="7BAE" data-bytes="87 D0">‡Ð</span> <span data-cp="7BB2" data-bytes="8B B3">‹³</span> <span data-cp="7BB8" data-bytes="8E 69">Ži</span> <span data-cp="7BC5" data-bytes="87 B9">‡¹</span> <span data-cp="7BCF" data-bytes="9F 5D">Ÿ]</span> <span data-cp="7BD0" data-bytes="8E 66">Žf</span> <span data-cp="7BEC" data-bytes="8C B2">Œ²</span> <span data-cp="7BFA" data-bytes="8E 6E">Žn</span> <span data-cp="7BFC" data-bytes="9F 64">Ÿd</span> <span data-cp="7C06" data-bytes="8E 6F">Žo</span> <span data-cp="7C12" data-bytes="99 53">™S</span> <span data-cp="7C1B" data-bytes="99 54">™T</span> <span data-cp="7C35" data-bytes="8E 70">Žp</span> <span data-cp="7C42" data-bytes="9F 61">Ÿa</span> <span data-cp="7C44" data-bytes="8E 72">Žr</span> <span data-cp="7C51" data-bytes="A0 6B"> k</span> <span data-cp="7C56" data-bytes="9F 40">Ÿ@</span> <span data-cp="7C5D" data-bytes="94 ED">”í</span> <span data-cp="7C6D" data-bytes="94 EE">”î</span> <span data-cp="7C70" data-bytes="9F BD">Ÿ½</span> <span data-cp="7C74" data-bytes="8E 7B">Ž{</span> <span data-cp="7C7B" data-bytes="99 57">™W</span> <span data-cp="7C7C" data-bytes="94 F7">”÷</span> <span data-cp="7C7E" data-bytes="9F 5F">Ÿ_</span> <span data-cp="7C83" data-bytes="8E 73">Žs</span> <span data-cp="7C86" data-bytes="9F 62">Ÿb</span> <span data-cp="7C8E" data-bytes="94 F6">”ö</span> <span data-cp="7C9C" data-bytes="99 58">™X</span> <span data-cp="7CA6" data-bytes="8E 75">Žu</span> <span data-cp="7CAC" data-bytes="90 72">r</span> <span data-cp="7CAE" data-bytes="94 F8">”ø</span> <span data-cp="7CB8" data-bytes="99 5A">™Z</span> <span data-cp="7CC2" data-bytes="A0 B0"> °</span> <span data-cp="7CC3" data-bytes="8C B3">Œ³</span> <span data-cp="7CC7" data-bytes="8E 79">Žy</span> <span data-cp="7CC9" data-bytes="8E 78">Žx</span> <span data-cp="7CCD" data-bytes="94 F3">”ó</span> <span data-cp="7CCE" data-bytes="8E 7E">Ž~</span> <span data-cp="7CD3" data-bytes="98 AF">˜¯</span> <span data-cp="7CDA" data-bytes="A0 B2"> ²</span> <span data-cp="7CE6" data-bytes="8E 7A">Žz</span> <span data-cp="7CED" data-bytes="99 5C">™\</span> <span data-cp="7CF3" data-bytes="8E 7C">Ž|</span> <span data-cp="7CF5" data-bytes="8E 7D">Ž}</span> <span data-cp="7CF9" data-bytes="8B D9">‹Ù</span> <span data-cp="7CFC" data-bytes="89 A2">‰¢</span> <span data-cp="7D25" data-bytes="9E D7">ž×</span> <span data-cp="7D4D" data-bytes="A0 B6"> ¶</span> <span data-cp="7D5A" data-bytes="9E 42">žB</span> <span data-cp="7D5D" data-bytes="8E A4">Ž¤</span> <span data-cp="7D89" data-bytes="8E A7">Ž§</span> <span data-cp="7D8B" data-bytes="8C 76">Œv</span> <span data-cp="7D95" data-bytes="87 67">‡g</span> <span data-cp="7D97" data-bytes="95 42">•B</span> <span data-cp="7DA4" data-bytes="98 7D">˜}</span> <span data-cp="7DA8" data-bytes="97 55">—U</span> <span data-cp="7DAB" data-bytes="8E A8">Ž¨</span> <span data-cp="7DB3" data-bytes="8E AA">Žª</span> <span data-cp="7DCD" data-bytes="89 A3">‰£</span> <span data-cp="7DCF" data-bytes="99 60">™`</span> <span data-cp="7DD0" data-bytes="99 62">™b</span> <span data-cp="7DD2" data-bytes="8E AB">Ž«</span> <span data-cp="7DD3" data-bytes="94 FC">”ü</span> <span data-cp="7DD4" data-bytes="99 61">™a</span> <span data-cp="7DDC" data-bytes="94 FA">”ú</span> <span data-cp="7DE4" data-bytes="8E AE">Ž®</span> <span data-cp="7DE5" data-bytes="8E B2">Ž²</span> <span data-cp="7DF5" data-bytes="8E B0">Ž°</span> <span data-cp="7DFD" data-bytes="99 63">™c</span> <span data-cp="7DFE" data-bytes="97 AA">—ª</span> <span data-cp="7E07" data-bytes="94 FB">”û</span> <span data-cp="7E1D" data-bytes="8E B4">Ž´</span> <span data-cp="7E27" data-bytes="8E BB">Ž»</span> <span data-cp="7E5B" data-bytes="8C DC">ŒÜ</span> <span data-cp="7E65" data-bytes="98 76">˜v</span> <span data-cp="7E67" data-bytes="8E A1">Ž¡</span> <span data-cp="7E6C" data-bytes="8C B4">Œ´</span> <span data-cp="7E6E" data-bytes="8E B7">Ž·</span> <span data-cp="7E7F" data-bytes="9D A6">¦</span> <span data-cp="7E87" data-bytes="9B 7B">›{</span> <span data-cp="7E8E" data-bytes="9E B0">ž°</span> <span data-cp="7E92" data-bytes="8E B8">Ž¸</span> <span data-cp="7E9F" data-bytes="9D 70">p</span> <span data-cp="7EA4" data-bytes="89 6E">‰n</span> <span data-cp="7EAC" data-bytes="89 6F">‰o</span> <span data-cp="7EBA" data-bytes="89 70">‰p</span> <span data-cp="7EC7" data-bytes="89 71">‰q</span> <span data-cp="7ECF" data-bytes="89 72">‰r</span> <span data-cp="7EDF" data-bytes="89 73">‰s</span> <span data-cp="7F06" data-bytes="89 74">‰t</span> <span data-cp="7F37" data-bytes="89 75">‰u</span> <span data-cp="7F40" data-bytes="8E BC">Ž¼</span> <span data-cp="7F41" data-bytes="8E BD">Ž½</span> <span data-cp="7F47" data-bytes="8E BE">Ž¾</span> <span data-cp="7F49" data-bytes="9D D1">Ñ</span> <span data-cp="7F4E" data-bytes="94 FD">”ý</span> <span data-cp="7F52" data-bytes="8B D7">‹×</span> <span data-cp="7F53" data-bytes="8B DA">‹Ú</span> <span data-cp="7F71" data-bytes="A0 E2"> â</span> <span data-cp="7F78" data-bytes="9F E9">Ÿé</span> <span data-cp="7F93" data-bytes="8A E7">Šç</span> <span data-cp="7F97" data-bytes="8E C2">ŽÂ</span> <span data-cp="7FA3" data-bytes="8E C4">ŽÄ</span> <span data-cp="7FAE" data-bytes="99 64">™d</span> <span data-cp="7FB4" data-bytes="99 65">™e</span> <span data-cp="7FDD" data-bytes="95 4E">•N</span> <span data-cp="7FE7" data-bytes="98 B3">˜³</span> <span data-cp="7FFA" data-bytes="8E CB">ŽË</span> <span data-cp="8002" data-bytes="8B DF">‹ß</span> <span data-cp="8005" data-bytes="8E CD">ŽÍ</span> <span data-cp="8008" data-bytes="8E CE">ŽÎ</span> <span data-cp="801D" data-bytes="8E CF">ŽÏ</span> <span data-cp="8020" data-bytes="99 68">™h</span> <span data-cp="8025" data-bytes="99 69">™i</span> <span data-cp="8028" data-bytes="8E D0">ŽÐ</span> <span data-cp="802E" data-bytes="99 6B">™k</span> <span data-cp="802F" data-bytes="8E D1">ŽÑ</span> <span data-cp="8031" data-bytes="99 6C">™l</span> <span data-cp="803B" data-bytes="8E D4">ŽÔ</span> <span data-cp="803C" data-bytes="8E D5">ŽÕ</span> <span data-cp="8054" data-bytes="99 6D">™m</span> <span data-cp="805B" data-bytes="A0 BE"> ¾</span> <span data-cp="8061" data-bytes="8E D6">ŽÖ</span> <span data-cp="8062" data-bytes="A0 BC"> ¼</span> <span data-cp="8063" data-bytes="A0 B5"> µ</span> <span data-cp="8066" data-bytes="A0 B4"> ´</span> <span data-cp="8080" data-bytes="8B E0">‹à</span> <span data-cp="809F" data-bytes="89 B5">‰µ</span> <span data-cp="80A7" data-bytes="8E DD">ŽÝ</span> <span data-cp="80B6" data-bytes="9E 5D">ž]</span> <span data-cp="80B7" data-bytes="99 71">™q</span> <span data-cp="80BC" data-bytes="89 AE">‰®</span> <span data-cp="80BD" data-bytes="9D E8">è</span> <span data-cp="80C6" data-bytes="95 65">•e</span> <span data-cp="80E9" data-bytes="99 72">™r</span> <span data-cp="80EC" data-bytes="8B 5C">‹\</span> <span data-cp="80F6" data-bytes="89 B1">‰±</span> <span data-cp="8103" data-bytes="A0 C0"> À</span> <span data-cp="8107" data-bytes="8E DF">Žß</span> <span data-cp="8109" data-bytes="95 66">•f</span> <span data-cp="810C" data-bytes="99 74">™t</span> <span data-cp="810E" data-bytes="99 76">™v</span> <span data-cp="8112" data-bytes="99 77">™w</span> <span data-cp="8114" data-bytes="99 79">™y</span> <span data-cp="8117" data-bytes="9D DA">Ú</span> <span data-cp="811A" data-bytes="8E E0">Žà</span> <span data-cp="812A" data-bytes="93 5C">“\</span> <span data-cp="8132" data-bytes="9D E6">æ</span> <span data-cp="8134" data-bytes="8B 5F">‹_</span> <span data-cp="8137" data-bytes="95 63">•c</span> <span data-cp="8142" data-bytes="95 67">•g</span> <span data-cp="8148" data-bytes="9D E3">ã</span> <span data-cp="8156" data-bytes="99 7C">™|</span> <span data-cp="8159" data-bytes="99 7D">™}</span> <span data-cp="815A" data-bytes="99 7E">™~</span> <span data-cp="816C" data-bytes="8C FB">Œû</span> <span data-cp="816D" data-bytes="8B 5B">‹[</span> <span data-cp="817C" data-bytes="99 A3">™£</span> <span data-cp="8184" data-bytes="99 A4">™¤</span> <span data-cp="8193" data-bytes="99 A6">™¦</span> <span data-cp="81A5" data-bytes="99 A8">™¨</span> <span data-cp="81AA" data-bytes="8A BE">Š¾</span> <span data-cp="81B6" data-bytes="9E 61">ža</span> <span data-cp="81C1" data-bytes="99 AA">™ª</span> <span data-cp="81C8" data-bytes="A0 C8"> È</span> <span data-cp="81E4" data-bytes="99 AB">™«</span> <span data-cp="81F6" data-bytes="98 C2">˜Â</span> <span data-cp="8218" data-bytes="8E E8">Žè</span> <span data-cp="821A" data-bytes="A0 BA"> º</span> <span data-cp="8229" data-bytes="8E EE">Žî</span> <span data-cp="822D" data-bytes="9E BF">ž¿</span> <span data-cp="823E" data-bytes="89 C2">‰Â</span> <span data-cp="8254" data-bytes="99 AC">™¬</span> <span data-cp="8262" data-bytes="95 6B">•k</span> <span data-cp="8265" data-bytes="95 6C">•l</span> <span data-cp="8276" data-bytes="99 AF">™¯</span> <span data-cp="8279" data-bytes="99 4A">™J</span> <span data-cp="827A" data-bytes="89 76">‰v</span> <span data-cp="827B" data-bytes="8F 48">H</span> <span data-cp="82A6" data-bytes="99 AE">™®</span> <span data-cp="82AA" data-bytes="8E FB">Žû</span> <span data-cp="82BF" data-bytes="8C D0">ŒÐ</span> <span data-cp="82C4" data-bytes="8B 52">‹R</span> <span data-cp="82CA" data-bytes="99 B0">™°</span> <span data-cp="82CF" data-bytes="89 77">‰w</span> <span data-cp="82D0" data-bytes="8F 41">A</span> <span data-cp="82D8" data-bytes="99 B1">™±</span> <span data-cp="82E2" data-bytes="8F 49">I</span> <span data-cp="82F7" data-bytes="9D E4">ä</span> <span data-cp="82F8" data-bytes="8C B5">Œµ</span> <span data-cp="82FD" data-bytes="9B 54">›T</span> <span data-cp="82FF" data-bytes="99 B2">™²</span> <span data-cp="830B" data-bytes="9E 68">žh</span> <span data-cp="8318" data-bytes="8F 4A">J</span> <span data-cp="831A" data-bytes="8F 42">B</span> <span data-cp="831D" data-bytes="8F 51">Q</span> <span data-cp="833D" data-bytes="98 46">˜F</span> <span data-cp="8357" data-bytes="99 B4">™´</span> <span data-cp="8362" data-bytes="8E F5">Žõ</span> <span data-cp="8366" data-bytes="9C CD">œÍ</span> <span data-cp="836F" data-bytes="89 78">‰x</span> <span data-cp="8385" data-bytes="8F 53">S</span> <span data-cp="8391" data-bytes="8F 6F">o</span> <span data-cp="839C" data-bytes="8E 63">Žc</span> <span data-cp="83AC" data-bytes="8F 56">V</span> <span data-cp="83BE" data-bytes="9F C6">ŸÆ</span> <span data-cp="83C1" data-bytes="8F 57">W</span> <span data-cp="83CF" data-bytes="9C 77">œw</span> <span data-cp="83D3" data-bytes="8F 58">X</span> <span data-cp="83ED" data-bytes="98 48">˜H</span> <span data-cp="8405" data-bytes="99 B7">™·</span> <span data-cp="840F" data-bytes="8F 6E">n</span> <span data-cp="8414" data-bytes="96 65">–e</span> <span data-cp="8418" data-bytes="9D E7">ç</span> <span data-cp="841C" data-bytes="9E 62">žb</span> <span data-cp="8420" data-bytes="96 CC">–Ì</span> <span data-cp="8421" data-bytes="8E 67">Žg</span> <span data-cp="8426" data-bytes="98 7E">˜~</span> <span data-cp="843E" data-bytes="97 FC">—ü</span> <span data-cp="8448" data-bytes="98 F9">˜ù</span> <span data-cp="844A" data-bytes="8F 66">f</span> <span data-cp="8453" data-bytes="95 6E">•n</span> <span data-cp="8455" data-bytes="92 45">’E</span> <span data-cp="8458" data-bytes="8F 60">`</span> <span data-cp="845C" data-bytes="9E D1">žÑ</span> <span data-cp="8464" data-bytes="99 B9">™¹</span> <span data-cp="8471" data-bytes="8F 62">b</span> <span data-cp="8472" data-bytes="97 4C">—L</span> <span data-cp="847F" data-bytes="91 C7">‘Ç</span> <span data-cp="8480" data-bytes="95 5F">•_</span> <span data-cp="8484" data-bytes="87 AB">‡«</span> <span data-cp="8488" data-bytes="99 BB">™»</span> <span data-cp="8492" data-bytes="8E 6D">Žm</span> <span data-cp="8493" data-bytes="8F 71">q</span> <span data-cp="8496" data-bytes="94 CB">”Ë</span> <span data-cp="84A3" data-bytes="95 B1">•±</span> <span data-cp="84A8" data-bytes="8F 69">i</span> <span data-cp="84AD" data-bytes="9A F2">šò</span> <span data-cp="84BD" data-bytes="96 C3">–Ã</span> <span data-cp="84BE" data-bytes="99 BD">™½</span> <span data-cp="84DA" data-bytes="A0 CF"> Ï</span> <span data-cp="84DE" data-bytes="8F 6D">m</span> <span data-cp="84E1" data-bytes="99 BE">™¾</span> <span data-cp="84E2" data-bytes="8E F4">Žô</span> <span data-cp="84E4" data-bytes="8F 72">r</span> <span data-cp="84E5" data-bytes="95 E4">•ä</span> <span data-cp="84F8" data-bytes="99 BF">™¿</span> <span data-cp="8503" data-bytes="92 42">’B</span> <span data-cp="8504" data-bytes="87 D7">‡×</span> <span data-cp="8510" data-bytes="99 C0">™À</span> <span data-cp="8534" data-bytes="8F 77">w</span> <span data-cp="8538" data-bytes="99 C1">™Á</span> <span data-cp="854B" data-bytes="8F 40">@</span> <span data-cp="8552" data-bytes="99 C2">™Â</span> <span data-cp="855A" data-bytes="8F 5C">\</span> <span data-cp="855F" data-bytes="8C BD">Œ½</span> <span data-cp="856F" data-bytes="99 C4">™Ä</span> <span data-cp="8570" data-bytes="99 C5">™Å</span> <span data-cp="8573" data-bytes="8F 7B">{</span> <span data-cp="8593" data-bytes="87 76">‡v</span> <span data-cp="8597" data-bytes="8C B6">Œ¶</span> <span data-cp="85C1" data-bytes="8F A3">£</span> <span data-cp="85D6" data-bytes="8C CE">ŒÎ</span> <span data-cp="85E0" data-bytes="99 C6">™Æ</span> <span data-cp="85EE" data-bytes="96 CD">–Í</span> <span data-cp="85FC" data-bytes="96 C7">–Ç</span> <span data-cp="8602" data-bytes="8F A5">¥</span> <span data-cp="860F" data-bytes="8C 61">Œa</span> <span data-cp="8610" data-bytes="95 70">•p</span> <span data-cp="8613" data-bytes="87 AF">‡¯</span> <span data-cp="8614" data-bytes="93 68">“h</span> <span data-cp="8616" data-bytes="8F 7E">~</span> <span data-cp="8628" data-bytes="8F AA">ª</span> <span data-cp="862F" data-bytes="A0 50"> P</span> <span data-cp="8642" data-bytes="90 D3">Ó</span> <span data-cp="8645" data-bytes="95 56">•V</span> <span data-cp="866C" data-bytes="8F B8">¸</span> <span data-cp="8672" data-bytes="99 C8">™È</span> <span data-cp="867E" data-bytes="8F AF">¯</span> <span data-cp="8692" data-bytes="99 C9">™É</span> <span data-cp="86A0" data-bytes="95 79">•y</span> <span data-cp="86AD" data-bytes="9F 49">ŸI</span> <span data-cp="86B2" data-bytes="99 CA">™Ê</span> <span data-cp="86EF" data-bytes="99 CB">™Ë</span> <span data-cp="8770" data-bytes="9D D5">Õ</span> <span data-cp="8771" data-bytes="8F B0">°</span> <span data-cp="8786" data-bytes="9E 5F">ž_</span> <span data-cp="878B" data-bytes="99 CD">™Í</span> <span data-cp="878C" data-bytes="A0 C9"> É</span> <span data-cp="87A5" data-bytes="9A DB">šÛ</span> <span data-cp="87A9" data-bytes="A0 C6"> Æ</span> <span data-cp="87B1" data-bytes="8F B4">´</span> <span data-cp="87C1" data-bytes="A0 D7"> ×</span> <span data-cp="87CE" data-bytes="A0 C7"> Ç</span> <span data-cp="87D6" data-bytes="A0 43"> C</span> <span data-cp="87DA" data-bytes="8F B5">µ</span> <span data-cp="87EE" data-bytes="8F B2">²</span> <span data-cp="87F5" data-bytes="A0 61"> a</span> <span data-cp="8804" data-bytes="9E 5E">ž^</span> <span data-cp="880F" data-bytes="8F B6">¶</span> <span data-cp="8818" data-bytes="9F E8">Ÿè</span> <span data-cp="8827" data-bytes="9C B2">œ²</span> <span data-cp="882D" data-bytes="95 7C">•|</span> <span data-cp="8842" data-bytes="9F C7">ŸÇ</span> <span data-cp="8845" data-bytes="8F BB">»</span> <span data-cp="8846" data-bytes="8F BC">¼</span> <span data-cp="884F" data-bytes="8F EC">ì</span> <span data-cp="885E" data-bytes="8F C0">À</span> <span data-cp="8860" data-bytes="93 6A">“j</span> <span data-cp="8864" data-bytes="8B E4">‹ä</span> <span data-cp="8865" data-bytes="9C 7C">œ|</span> <span data-cp="886E" data-bytes="95 A1">•¡</span> <span data-cp="8887" data-bytes="95 A3">•£</span> <span data-cp="888F" data-bytes="8C 45">ŒE</span> <span data-cp="8890" data-bytes="8C B8">Œ¸</span> <span data-cp="889C" data-bytes="8F C1">Á</span> <span data-cp="889D" data-bytes="87 B7">‡·</span> <span data-cp="88A0" data-bytes="A0 52"> R</span> <span data-cp="88AE" data-bytes="99 D0">™Ð</span> <span data-cp="88B4" data-bytes="8F C3">Ã</span> <span data-cp="88B5" data-bytes="8F C4">Ä</span> <span data-cp="88BF" data-bytes="95 A4">•¤</span> <span data-cp="88C5" data-bytes="8F C6">Æ</span> <span data-cp="88C7" data-bytes="9E 60">ž`</span> <span data-cp="88E6" data-bytes="95 A5">•¥</span> <span data-cp="88F5" data-bytes="9C B3">œ³</span> <span data-cp="88FF" data-bytes="99 D1">™Ñ</span> <span data-cp="8924" data-bytes="99 D2">™Ò</span> <span data-cp="8943" data-bytes="9C C2">œÂ</span> <span data-cp="8947" data-bytes="99 D3">™Ó</span> <span data-cp="894D" data-bytes="95 A7">•§</span> <span data-cp="8954" data-bytes="95 A9">•©</span> <span data-cp="8965" data-bytes="95 A6">•¦</span> <span data-cp="8977" data-bytes="9C 5D">œ]</span> <span data-cp="8980" data-bytes="98 E2">˜â</span> <span data-cp="8987" data-bytes="8F C9">É</span> <span data-cp="8989" data-bytes="A0 C2"> Â</span> <span data-cp="898A" data-bytes="8F CA">Ê</span> <span data-cp="8991" data-bytes="99 D4">™Ô</span> <span data-cp="8994" data-bytes="A0 B9"> ¹</span> <span data-cp="89A5" data-bytes="9B 58">›X</span> <span data-cp="89A6" data-bytes="8F CB">Ë</span> <span data-cp="89A7" data-bytes="8F CD">Í</span> <span data-cp="89A9" data-bytes="8F CC">Ì</span> <span data-cp="89BC" data-bytes="8F CE">Î</span> <span data-cp="89C1" data-bytes="8B E5">‹å</span> <span data-cp="89C6" data-bytes="89 79">‰y</span> <span data-cp="89E7" data-bytes="8F D0">Ð</span> <span data-cp="8A1C" data-bytes="95 B6">•¶</span> <span data-cp="8A29" data-bytes="99 D6">™Ö</span> <span data-cp="8A2B" data-bytes="95 E5">•å</span> <span data-cp="8A38" data-bytes="99 D7">™×</span> <span data-cp="8A3D" data-bytes="95 B5">•µ</span> <span data-cp="8A49" data-bytes="A0 CA"> Ê</span> <span data-cp="8A67" data-bytes="9F FD">Ÿý</span> <span data-cp="8A7E" data-bytes="A0 58"> X</span> <span data-cp="8A90" data-bytes="8F D6">Ö</span> <span data-cp="8A94" data-bytes="99 D8">™Ø</span> <span data-cp="8A9C" data-bytes="8F D3">Ó</span> <span data-cp="8AA9" data-bytes="8F E5">å</span> <span data-cp="8AAF" data-bytes="8F E9">é</span> <span data-cp="8AB4" data-bytes="99 D9">™Ù</span> <span data-cp="8ACC" data-bytes="8C F7">Œ÷</span> <span data-cp="8ADA" data-bytes="92 7C">’|</span> <span data-cp="8AEA" data-bytes="9C 45">œE</span> <span data-cp="8AF9" data-bytes="8C E8">Œè</span> <span data-cp="8B0C" data-bytes="8F DE">Þ</span> <span data-cp="8B1F" data-bytes="8F DF">ß</span> <span data-cp="8B2D" data-bytes="A0 4B"> K</span> <span data-cp="8B3F" data-bytes="8F E2">â</span> <span data-cp="8B43" data-bytes="A0 CC"> Ì</span> <span data-cp="8B4C" data-bytes="8F E3">ã</span> <span data-cp="8B4D" data-bytes="8F E4">ä</span> <span data-cp="8B5E" data-bytes="9B C4">›Ä</span> <span data-cp="8B62" data-bytes="9B FC">›ü</span> <span data-cp="8B69" data-bytes="96 4C">–L</span> <span data-cp="8B81" data-bytes="9A F6">šö</span> <span data-cp="8B83" data-bytes="8C AE">Œ®</span> <span data-cp="8B8F" data-bytes="87 CB">‡Ë</span> <span data-cp="8B90" data-bytes="8F E7">ç</span> <span data-cp="8B9B" data-bytes="8F E8">è</span> <span data-cp="8BA0" data-bytes="8B E7">‹ç</span> <span data-cp="8BBE" data-bytes="89 7A">‰z</span> <span data-cp="8BE2" data-bytes="89 7B">‰{</span> <span data-cp="8C51" data-bytes="99 DA">™Ú</span> <span data-cp="8C9B" data-bytes="8F ED">í</span> <span data-cp="8C9F" data-bytes="95 C0">•À</span> <span data-cp="8CAD" data-bytes="A0 CB"> Ë</span> <span data-cp="8CCD" data-bytes="9E 48">žH</span> <span data-cp="8CD4" data-bytes="99 DB">™Û</span> <span data-cp="8CD6" data-bytes="8F F3">ó</span> <span data-cp="8CDB" data-bytes="8F F9">ù</span> <span data-cp="8CE9" data-bytes="95 C1">•Á</span> <span data-cp="8CEB" data-bytes="A0 4E"> N</span> <span data-cp="8CF2" data-bytes="99 DC">™Ü</span> <span data-cp="8CF7" data-bytes="A0 64"> d</span> <span data-cp="8D03" data-bytes="8F F7">÷</span> <span data-cp="8D0B" data-bytes="89 B0">‰°</span> <span data-cp="8D0C" data-bytes="A0 48"> H</span> <span data-cp="8D11" data-bytes="8F FB">û</span> <span data-cp="8D12" data-bytes="8F F6">ö</span> <span data-cp="8D18" data-bytes="9D DC">Ü</span> <span data-cp="8D1C" data-bytes="99 DD">™Ý</span> <span data-cp="8D1D" data-bytes="8B E8">‹è</span> <span data-cp="8D77" data-bytes="8F FE">þ</span> <span data-cp="8D7A" data-bytes="92 C1">’Á</span> <span data-cp="8D82" data-bytes="9F D6">ŸÖ</span> <span data-cp="8DA6" data-bytes="A0 D2"> Ò</span> <span data-cp="8DA9" data-bytes="90 40">@</span> <span data-cp="8DC0" data-bytes="8A C4">ŠÄ</span> <span data-cp="8DC3" data-bytes="99 E0">™à</span> <span data-cp="8DD4" data-bytes="9F F0">Ÿð</span> <span data-cp="8E01" data-bytes="9F F3">Ÿó</span> <span data-cp="8E0E" data-bytes="9D BF">¿</span> <span data-cp="8E28" data-bytes="9F F6">Ÿö</span> <span data-cp="8E2A" data-bytes="95 C8">•È</span> <span data-cp="8E2D" data-bytes="9E 5A">žZ</span> <span data-cp="8E3A" data-bytes="99 E3">™ã</span> <span data-cp="8E46" data-bytes="8A 4A">ŠJ</span> <span data-cp="8E4F" data-bytes="9F F1">Ÿñ</span> <span data-cp="8E68" data-bytes="8A A7">Š§</span> <span data-cp="8E71" data-bytes="99 E6">™æ</span> <span data-cp="8E75" data-bytes="9F F7">Ÿ÷</span> <span data-cp="8E77" data-bytes="9F ED">Ÿí</span> <span data-cp="8E7E" data-bytes="8A 5C">Š\</span> <span data-cp="8E80" data-bytes="9D AE">®</span> <span data-cp="8EA7" data-bytes="95 C9">•É</span> <span data-cp="8EAD" data-bytes="90 48">H</span> <span data-cp="8EB0" data-bytes="99 E8">™è</span> <span data-cp="8EB6" data-bytes="90 49">I</span> <span data-cp="8EB9" data-bytes="8C BA">Œº</span> <span data-cp="8EBC" data-bytes="90 B1">±</span> <span data-cp="8EC3" data-bytes="90 4A">J</span> <span data-cp="8ECE" data-bytes="99 EA">™ê</span> <span data-cp="8EDA" data-bytes="9B D1">›Ñ</span> <span data-cp="8EE2" data-bytes="99 EB">™ë</span> <span data-cp="8EE4" data-bytes="99 EC">™ì</span> <span data-cp="8EED" data-bytes="99 ED">™í</span> <span data-cp="8EF2" data-bytes="99 EE">™î</span> <span data-cp="8F0B" data-bytes="9D 57">W</span> <span data-cp="8F19" data-bytes="90 4C">L</span> <span data-cp="8F2D" data-bytes="90 4D">M</span> <span data-cp="8F30" data-bytes="95 CB">•Ë</span> <span data-cp="8F36" data-bytes="9C 42">œB</span> <span data-cp="8F41" data-bytes="97 E2">—â</span> <span data-cp="8F4A" data-bytes="95 CC">•Ì</span> <span data-cp="8F5C" data-bytes="9F 78">Ÿx</span> <span data-cp="8F66" data-bytes="89 7C">‰|</span> <span data-cp="8F67" data-bytes="89 7D">‰}</span> <span data-cp="8F6E" data-bytes="89 7E">‰~</span> <span data-cp="8F93" data-bytes="99 5D">™]</span> <span data-cp="8FA0" data-bytes="9B 5A">›Z</span> <span data-cp="8FA5" data-bytes="90 50">P</span> <span data-cp="8FA7" data-bytes="8C 4F">ŒO</span> <span data-cp="8FB3" data-bytes="90 54">T</span> <span data-cp="8FB6" data-bytes="9A A8">š¨</span> <span data-cp="8FB7" data-bytes="99 EF">™ï</span> <span data-cp="8FB9" data-bytes="9D A3">£</span> <span data-cp="8FBA" data-bytes="9D A1">¡</span> <span data-cp="8FBB" data-bytes="99 43">™C</span> <span data-cp="8FBC" data-bytes="99 45">™E</span> <span data-cp="8FBE" data-bytes="9D 7D">}</span> <span data-cp="8FC1" data-bytes="99 F0">™ð</span> <span data-cp="8FCA" data-bytes="99 F1">™ñ</span> <span data-cp="8FCC" data-bytes="99 F2">™ò</span> <span data-cp="8FCF" data-bytes="8C BC">Œ¼</span> <span data-cp="8FD0" data-bytes="9D 60">`</span> <span data-cp="8FDA" data-bytes="A0 A3"> £</span> <span data-cp="8FF9" data-bytes="90 5B">[</span> <span data-cp="9008" data-bytes="9E DB">žÛ</span> <span data-cp="9012" data-bytes="9D 79">y</span> <span data-cp="9033" data-bytes="99 F3">™ó</span> <span data-cp="9037" data-bytes="90 62">b</span> <span data-cp="9046" data-bytes="87 BD">‡½</span> <span data-cp="904C" data-bytes="9F 55">ŸU</span> <span data-cp="9056" data-bytes="9B F9">›ù</span> <span data-cp="9061" data-bytes="90 65">e</span> <span data-cp="9064" data-bytes="96 E0">–à</span> <span data-cp="906C" data-bytes="98 BE">˜¾</span> <span data-cp="9097" data-bytes="95 D9">•Ù</span> <span data-cp="90A8" data-bytes="90 68">h</span> <span data-cp="90AE" data-bytes="90 6C">l</span> <span data-cp="90BB" data-bytes="95 D8">•Ø</span> <span data-cp="90C4" data-bytes="90 6A">j</span> <span data-cp="90FD" data-bytes="90 6D">m</span> <span data-cp="9104" data-bytes="9C 68">œh</span> <span data-cp="9151" data-bytes="9F B2">Ÿ²</span> <span data-cp="9159" data-bytes="9F AE">Ÿ®</span> <span data-cp="915C" data-bytes="9F B0">Ÿ°</span> <span data-cp="915E" data-bytes="89 AD">‰­</span> <span data-cp="9167" data-bytes="90 6E">n</span> <span data-cp="9170" data-bytes="9E 71">žq</span> <span data-cp="9176" data-bytes="9E 4A">žJ</span> <span data-cp="917C" data-bytes="9F DC">ŸÜ</span> <span data-cp="918C" data-bytes="89 AB">‰«</span> <span data-cp="918E" data-bytes="9F B8">Ÿ¸</span> <span data-cp="91A9" data-bytes="90 70">p</span> <span data-cp="91B6" data-bytes="8B 63">‹c</span> <span data-cp="91BB" data-bytes="95 DC">•Ü</span> <span data-cp="91C4" data-bytes="90 71">q</span> <span data-cp="91D4" data-bytes="9B DE">›Þ</span> <span data-cp="91DF" data-bytes="89 49">‰I</span> <span data-cp="91E5" data-bytes="96 5B">–[</span> <span data-cp="91F6" data-bytes="8C 50">ŒP</span> <span data-cp="91FA" data-bytes="94 A6">”¦</span> <span data-cp="91FE" data-bytes="8F D5">Õ</span> <span data-cp="9208" data-bytes="9E 73">žs</span> <span data-cp="920E" data-bytes="90 75">u</span> <span data-cp="9213" data-bytes="99 F7">™÷</span> <span data-cp="9218" data-bytes="87 B2">‡²</span> <span data-cp="9221" data-bytes="8C BF">Œ¿</span> <span data-cp="9228" data-bytes="99 F9">™ù</span> <span data-cp="922A" data-bytes="96 63">–c</span> <span data-cp="922B" data-bytes="95 B9">•¹</span> <span data-cp="9235" data-bytes="94 D4">”Ô</span> <span data-cp="9241" data-bytes="90 77">w</span> <span data-cp="9244" data-bytes="90 AB">«</span> <span data-cp="9255" data-bytes="9D 4D">M</span> <span data-cp="9258" data-bytes="99 FA">™ú</span> <span data-cp="925D" data-bytes="92 E3">’ã</span> <span data-cp="925F" data-bytes="97 BB">—»</span> <span data-cp="9262" data-bytes="90 78">x</span> <span data-cp="926B" data-bytes="99 FB">™û</span> <span data-cp="926E" data-bytes="97 E0">—à</span> <span data-cp="9277" data-bytes="96 DC">–Ü</span> <span data-cp="9281" data-bytes="9C A8">œ¨</span> <span data-cp="9284" data-bytes="97 72">—r</span> <span data-cp="9289" data-bytes="94 40">”@</span> <span data-cp="928F" data-bytes="92 F2">’ò</span> <span data-cp="92AE" data-bytes="99 FD">™ý</span> <span data-cp="92B1" data-bytes="99 FC">™ü</span> <span data-cp="92B9" data-bytes="90 7A">z</span> <span data-cp="92BA" data-bytes="96 4A">–J</span> <span data-cp="92BE" data-bytes="96 D8">–Ø</span> <span data-cp="92BF" data-bytes="99 FE">™þ</span> <span data-cp="92D4" data-bytes="90 4B">K</span> <span data-cp="92E3" data-bytes="9A 40">š@</span> <span data-cp="92E5" data-bytes="97 5B">—[</span> <span data-cp="92EB" data-bytes="9A 41">šA</span> <span data-cp="92EC" data-bytes="91 DD">‘Ý</span> <span data-cp="92F2" data-bytes="93 FC">“ü</span> <span data-cp="92F3" data-bytes="9A 42">šB</span> <span data-cp="92F4" data-bytes="9A 43">šC</span> <span data-cp="92F6" data-bytes="96 59">–Y</span> <span data-cp="92FD" data-bytes="9A 44">šD</span> <span data-cp="9303" data-bytes="90 51">Q</span> <span data-cp="9307" data-bytes="94 BF">”¿</span> <span data-cp="932C" data-bytes="90 A2">¢</span> <span data-cp="9330" data-bytes="9C AB">œ«</span> <span data-cp="9331" data-bytes="97 76">—v</span> <span data-cp="9342" data-bytes="94 A8">”¨</span> <span data-cp="9343" data-bytes="9A 45">šE</span> <span data-cp="9345" data-bytes="9D E1">á</span> <span data-cp="9348" data-bytes="96 D9">–Ù</span> <span data-cp="935F" data-bytes="97 74">—t</span> <span data-cp="9366" data-bytes="92 E5">’å</span> <span data-cp="9368" data-bytes="96 45">–E</span> <span data-cp="9369" data-bytes="91 DA">‘Ú</span> <span data-cp="936B" data-bytes="90 A3">£</span> <span data-cp="936E" data-bytes="92 C8">’È</span> <span data-cp="9373" data-bytes="90 AF">¯</span> <span data-cp="9374" data-bytes="97 BF">—¿</span> <span data-cp="9378" data-bytes="91 4C">‘L</span> <span data-cp="937D" data-bytes="96 7A">–z</span> <span data-cp="9381" data-bytes="91 DE">‘Þ</span> <span data-cp="9384" data-bytes="9A 46">šF</span> <span data-cp="9386" data-bytes="97 79">—y</span> <span data-cp="9387" data-bytes="94 6C">”l</span> <span data-cp="9390" data-bytes="98 58">˜X</span> <span data-cp="939C" data-bytes="92 66">’f</span> <span data-cp="93A0" data-bytes="93 FB">“û</span> <span data-cp="93AD" data-bytes="9A 47">šG</span> <span data-cp="93B8" data-bytes="97 49">—I</span> <span data-cp="93BB" data-bytes="97 48">—H</span> <span data-cp="93BD" data-bytes="93 4A">“J</span> <span data-cp="93BF" data-bytes="9C E2">œâ</span> <span data-cp="93C6" data-bytes="92 64">’d</span> <span data-cp="93CB" data-bytes="91 DF">‘ß</span> <span data-cp="93DB" data-bytes="96 D7">–×</span> <span data-cp="93E0" data-bytes="93 43">“C</span> <span data-cp="93F3" data-bytes="91 DB">‘Û</span> <span data-cp="93F4" data-bytes="8C 6A">Œj</span> <span data-cp="9401" data-bytes="97 AF">—¯</span> <span data-cp="9404" data-bytes="95 DD">•Ý</span> <span data-cp="9408" data-bytes="93 48">“H</span> <span data-cp="9417" data-bytes="9A 4B">šK</span> <span data-cp="941D" data-bytes="9A 4D">šM</span> <span data-cp="9424" data-bytes="91 BC">‘¼</span> <span data-cp="9425" data-bytes="90 E2">â</span> <span data-cp="9426" data-bytes="90 B4">´</span> <span data-cp="9427" data-bytes="95 E1">•á</span> <span data-cp="942D" data-bytes="9A 4E">šN</span> <span data-cp="942F" data-bytes="87 AD">‡­</span> <span data-cp="943E" data-bytes="9A 4F">šO</span> <span data-cp="944D" data-bytes="96 DD">–Ý</span> <span data-cp="9454" data-bytes="9A 51">šQ</span> <span data-cp="9458" data-bytes="96 A7">–§</span> <span data-cp="945B" data-bytes="90 B0">°</span> <span data-cp="9465" data-bytes="9C 4E">œN</span> <span data-cp="9467" data-bytes="94 43">”C</span> <span data-cp="946C" data-bytes="8E BA">Žº</span> <span data-cp="9479" data-bytes="9A 52">šR</span> <span data-cp="9485" data-bytes="8B E9">‹é</span> <span data-cp="949F" data-bytes="9C AF">œ¯</span> <span data-cp="94A2" data-bytes="8B FD">‹ý</span> <span data-cp="94C1" data-bytes="9A BC">š¼</span> <span data-cp="94C3" data-bytes="9A B8">š¸</span> <span data-cp="94DC" data-bytes="9A AE">š®</span> <span data-cp="94F6" data-bytes="9A A7">š§</span> <span data-cp="952D" data-bytes="9A 53">šS</span> <span data-cp="9547" data-bytes="9D 74">t</span> <span data-cp="9578" data-bytes="8B EA">‹ê</span> <span data-cp="957F" data-bytes="8B EB">‹ë</span> <span data-cp="9585" data-bytes="90 B2">²</span> <span data-cp="9596" data-bytes="95 E9">•é</span> <span data-cp="9597" data-bytes="95 E8">•è</span> <span data-cp="9599" data-bytes="95 E6">•æ</span> <span data-cp="95A0" data-bytes="90 B5">µ</span> <span data-cp="95A2" data-bytes="9A 54">šT</span> <span data-cp="95A6" data-bytes="90 B3">³</span> <span data-cp="95A7" data-bytes="95 E7">•ç</span> <span data-cp="95AA" data-bytes="8B 50">‹P</span> <span data-cp="95E8" data-bytes="8B EC">‹ì</span> <span data-cp="95F4" data-bytes="9A 56">šV</span> <span data-cp="961D" data-bytes="8B FB">‹û</span> <span data-cp="9633" data-bytes="9A 57">šW</span> <span data-cp="9638" data-bytes="A0 AA"> ª</span> <span data-cp="9641" data-bytes="9F A6">Ÿ¦</span> <span data-cp="9645" data-bytes="99 CC">™Ì</span> <span data-cp="9656" data-bytes="9C 59">œY</span> <span data-cp="9669" data-bytes="99 B5">™µ</span> <span data-cp="967B" data-bytes="90 BE">¾</span> <span data-cp="9681" data-bytes="9F AF">Ÿ¯</span> <span data-cp="968F" data-bytes="95 F2">•ò</span> <span data-cp="9696" data-bytes="90 BF">¿</span> <span data-cp="96A3" data-bytes="90 C1">Á</span> <span data-cp="96B6" data-bytes="90 C4">Ä</span> <span data-cp="96BD" data-bytes="90 C7">Ç</span> <span data-cp="96F4" data-bytes="92 E4">’ä</span> <span data-cp="9703" data-bytes="9F 52">ŸR</span> <span data-cp="971B" data-bytes="90 DB">Û</span> <span data-cp="9721" data-bytes="A0 66"> f</span> <span data-cp="9731" data-bytes="90 D2">Ò</span> <span data-cp="9734" data-bytes="87 6B">‡k</span> <span data-cp="9736" data-bytes="90 D4">Ô</span> <span data-cp="9740" data-bytes="9A 5B">š[</span> <span data-cp="9741" data-bytes="95 FD">•ý</span> <span data-cp="974A" data-bytes="87 B1">‡±</span> <span data-cp="9751" data-bytes="8B C4">‹Ä</span> <span data-cp="9755" data-bytes="8C 66">Œf</span> <span data-cp="9757" data-bytes="90 DE">Þ</span> <span data-cp="975C" data-bytes="90 DC">Ü</span> <span data-cp="975D" data-bytes="96 44">–D</span> <span data-cp="975F" data-bytes="90 E1">á</span> <span data-cp="976D" data-bytes="9E 46">žF</span> <span data-cp="9771" data-bytes="96 51">–Q</span> <span data-cp="9789" data-bytes="90 E6">æ</span> <span data-cp="979B" data-bytes="96 50">–P</span> <span data-cp="979F" data-bytes="90 E7">ç</span> <span data-cp="97B1" data-bytes="90 E8">è</span> <span data-cp="97B2" data-bytes="9A 5D">š]</span> <span data-cp="97B4" data-bytes="9F 7A">Ÿz</span> <span data-cp="97B8" data-bytes="9B 5C">›\</span> <span data-cp="97BA" data-bytes="9F 7C">Ÿ|</span> <span data-cp="97BE" data-bytes="90 E9">é</span> <span data-cp="97C0" data-bytes="90 EA">ê</span> <span data-cp="97C2" data-bytes="9A 5E">š^</span> <span data-cp="97C8" data-bytes="9F 76">Ÿv</span> <span data-cp="97D2" data-bytes="90 EB">ë</span> <span data-cp="97E0" data-bytes="90 EC">ì</span> <span data-cp="97E6" data-bytes="8B EE">‹î</span> <span data-cp="97EE" data-bytes="90 EE">î</span> <span data-cp="97F2" data-bytes="91 C6">‘Æ</span> <span data-cp="97F5" data-bytes="90 F2">ò</span> <span data-cp="97FF" data-bytes="90 F1">ñ</span> <span data-cp="9815" data-bytes="8A 74">Št</span> <span data-cp="981F" data-bytes="96 57">–W</span> <span data-cp="9823" data-bytes="9C EF">œï</span> <span data-cp="982E" data-bytes="9F DF">Ÿß</span> <span data-cp="9833" data-bytes="90 F7">÷</span> <span data-cp="9834" data-bytes="90 F6">ö</span> <span data-cp="9847" data-bytes="9B 5E">›^</span> <span data-cp="984B" data-bytes="90 F8">ø</span> <span data-cp="9856" data-bytes="8C FC">Œü</span> <span data-cp="9866" data-bytes="90 F9">ù</span> <span data-cp="9868" data-bytes="8C C9">ŒÉ</span> <span data-cp="9875" data-bytes="8B EF">‹ï</span> <span data-cp="98B4" data-bytes="9F E0">Ÿà</span> <span data-cp="98B7" data-bytes="91 42">‘B</span> <span data-cp="98B9" data-bytes="9A 62">šb</span> <span data-cp="98C3" data-bytes="95 69">•i</span> <span data-cp="98C7" data-bytes="91 44">‘D</span> <span data-cp="98C8" data-bytes="91 43">‘C</span> <span data-cp="98CA" data-bytes="91 41">‘A</span> <span data-cp="98CE" data-bytes="8B F0">‹ð</span> <span data-cp="98DC" data-bytes="96 60">–`</span> <span data-cp="98DE" data-bytes="8B F1">‹ñ</span> <span data-cp="98E0" data-bytes="99 F6">™ö</span> <span data-cp="98E1" data-bytes="91 49">‘I</span> <span data-cp="98E6" data-bytes="91 4A">‘J</span> <span data-cp="98EC" data-bytes="91 4B">‘K</span> <span data-cp="98F1" data-bytes="9A 64">šd</span> <span data-cp="98F5" data-bytes="8A BF">Š¿</span> <span data-cp="990E" data-bytes="9A 66">šf</span> <span data-cp="9919" data-bytes="9A 67">šg</span> <span data-cp="991C" data-bytes="9A 69">ši</span> <span data-cp="9937" data-bytes="9A 6A">šj</span> <span data-cp="9938" data-bytes="96 52">–R</span> <span data-cp="9939" data-bytes="91 4D">‘M</span> <span data-cp="993B" data-bytes="96 66">–f</span> <span data-cp="9940" data-bytes="9F 7B">Ÿ{</span> <span data-cp="9942" data-bytes="9A 6B">šk</span> <span data-cp="994A" data-bytes="A0 6C"> l</span> <span data-cp="994D" data-bytes="96 67">–g</span> <span data-cp="995D" data-bytes="9A 6C">šl</span> <span data-cp="9962" data-bytes="9A 6D">šm</span> <span data-cp="9963" data-bytes="8B F2">‹ò</span> <span data-cp="999B" data-bytes="96 6A">–j</span> <span data-cp="99AA" data-bytes="96 6C">–l</span> <span data-cp="99B8" data-bytes="91 C4">‘Ä</span> <span data-cp="99BC" data-bytes="96 77">–w</span> <span data-cp="99C4" data-bytes="99 F4">™ô</span> <span data-cp="99C5" data-bytes="9A 6F">šo</span> <span data-cp="99D6" data-bytes="9B C6">›Æ</span> <span data-cp="99DA" data-bytes="9F AB">Ÿ«</span> <span data-cp="99E0" data-bytes="8C BE">Œ¾</span> <span data-cp="99E1" data-bytes="8E C1">ŽÁ</span> <span data-cp="99E6" data-bytes="95 55">•U</span> <span data-cp="99F5" data-bytes="91 52">‘R</span> <span data-cp="9A0C" data-bytes="91 53">‘S</span> <span data-cp="9A10" data-bytes="91 55">‘U</span> <span data-cp="9A1F" data-bytes="95 5D">•]</span> <span data-cp="9A21" data-bytes="96 71">–q</span> <span data-cp="9A26" data-bytes="9C 6D">œm</span> <span data-cp="9A2F" data-bytes="96 73">–s</span> <span data-cp="9A3B" data-bytes="91 54">‘T</span> <span data-cp="9A3C" data-bytes="9A 71">šq</span> <span data-cp="9A58" data-bytes="91 56">‘V</span> <span data-cp="9A5C" data-bytes="96 6D">–m</span> <span data-cp="9A63" data-bytes="95 57">•W</span> <span data-cp="9A6C" data-bytes="89 C6">‰Æ</span> <span data-cp="9A8F" data-bytes="89 C7">‰Ç</span> <span data-cp="9AB2" data-bytes="8A 6A">Šj</span> <span data-cp="9AB6" data-bytes="8B 57">‹W</span> <span data-cp="9ABA" data-bytes="9F E1">Ÿá</span> <span data-cp="9ABD" data-bytes="9B 5F">›_</span> <span data-cp="9AD7" data-bytes="A0 5D"> ]</span> <span data-cp="9AE0" data-bytes="91 5B">‘[</span> <span data-cp="9AE2" data-bytes="91 5C">‘\</span> <span data-cp="9AF4" data-bytes="91 5E">‘^</span> <span data-cp="9AFF" data-bytes="9F 5C">Ÿ\</span> <span data-cp="9B02" data-bytes="9F 57">ŸW</span> <span data-cp="9B09" data-bytes="9F 65">Ÿe</span> <span data-cp="9B0F" data-bytes="9A 72">šr</span> <span data-cp="9B14" data-bytes="91 60">‘`</span> <span data-cp="9B2A" data-bytes="9F 5E">Ÿ^</span> <span data-cp="9B2D" data-bytes="91 61">‘a</span> <span data-cp="9B2E" data-bytes="9F 60">Ÿ`</span> <span data-cp="9B34" data-bytes="91 64">‘d</span> <span data-cp="9B39" data-bytes="9F 41">ŸA</span> <span data-cp="9B40" data-bytes="91 69">‘i</span> <span data-cp="9B50" data-bytes="91 68">‘h</span> <span data-cp="9B69" data-bytes="9A 74">št</span> <span data-cp="9B7F" data-bytes="96 B2">–²</span> <span data-cp="9B81" data-bytes="9A 75">šu</span> <span data-cp="9B8B" data-bytes="9E E9">žé</span> <span data-cp="9B8D" data-bytes="8B BA">‹º</span> <span data-cp="9B8E" data-bytes="91 6D">‘m</span> <span data-cp="9B8F" data-bytes="A0 60"> `</span> <span data-cp="9B97" data-bytes="9F DE">ŸÞ</span> <span data-cp="9B9D" data-bytes="9F C3">ŸÃ</span> <span data-cp="9B9F" data-bytes="96 B5">–µ</span> <span data-cp="9BB0" data-bytes="A0 67"> g</span> <span data-cp="9BCF" data-bytes="96 B3">–³</span> <span data-cp="9BDD" data-bytes="9A 76">šv</span> <span data-cp="9BE9" data-bytes="95 D5">•Õ</span> <span data-cp="9BED" data-bytes="9E CA">žÊ</span> <span data-cp="9BF1" data-bytes="9A 77">šw</span> <span data-cp="9BF4" data-bytes="9A 78">šx</span> <span data-cp="9BFF" data-bytes="91 70">‘p</span> <span data-cp="9C02" data-bytes="91 6F">‘o</span> <span data-cp="9C0A" data-bytes="9F A3">Ÿ£</span> <span data-cp="9C0C" data-bytes="91 71">‘q</span> <span data-cp="9C10" data-bytes="96 B1">–±</span> <span data-cp="9C15" data-bytes="9F 63">Ÿc</span> <span data-cp="9C1B" data-bytes="9F 67">Ÿg</span> <span data-cp="9C1F" data-bytes="8B B9">‹¹</span> <span data-cp="9C20" data-bytes="9A 7A">šz</span> <span data-cp="9C26" data-bytes="8B 56">‹V</span> <span data-cp="9C2F" data-bytes="9A DA">šÚ</span> <span data-cp="9C35" data-bytes="96 B0">–°</span> <span data-cp="9C3A" data-bytes="9A 7E">š~</span> <span data-cp="9C45" data-bytes="9D DE">Þ</span> <span data-cp="9C4F" data-bytes="96 AD">–­</span> <span data-cp="9C53" data-bytes="96 AE">–®</span> <span data-cp="9C5D" data-bytes="9E A1">ž¡</span> <span data-cp="9C72" data-bytes="9E 50">žP</span> <span data-cp="9C7B" data-bytes="96 AF">–¯</span> <span data-cp="9C7C" data-bytes="8B F4">‹ô</span> <span data-cp="9D02" data-bytes="9F A4">Ÿ¤</span> <span data-cp="9D0C" data-bytes="96 BD">–½</span> <span data-cp="9D16" data-bytes="96 F4">–ô</span> <span data-cp="9D21" data-bytes="96 B8">–¸</span> <span data-cp="9D39" data-bytes="91 A7">‘§</span> <span data-cp="9D44" data-bytes="A0 5E"> ^</span> <span data-cp="9D49" data-bytes="9A 7D">š}</span> <span data-cp="9D4E" data-bytes="89 48">‰H</span> <span data-cp="9D50" data-bytes="9E B1">ž±</span> <span data-cp="9D5E" data-bytes="9D DB">Û</span> <span data-cp="9D6D" data-bytes="95 BF">•¿</span> <span data-cp="9D6E" data-bytes="8A 73">Šs</span> <span data-cp="9D7C" data-bytes="9E FE">žþ</span> <span data-cp="9D7E" data-bytes="91 7A">‘z</span> <span data-cp="9D83" data-bytes="91 7B">‘{</span> <span data-cp="9D93" data-bytes="9A A3">š£</span> <span data-cp="9DA5" data-bytes="96 C2">–Â</span> <span data-cp="9DAB" data-bytes="9F 77">Ÿw</span> <span data-cp="9DBD" data-bytes="9A A4">š¤</span> <span data-cp="9DC0" data-bytes="9A A5">š¥</span> <span data-cp="9DC4" data-bytes="91 A1">‘¡</span> <span data-cp="9DC9" data-bytes="89 B8">‰¸</span> <span data-cp="9DD4" data-bytes="91 73">‘s</span> <span data-cp="9DF0" data-bytes="9C 6B">œk</span> <span data-cp="9DFC" data-bytes="9A A6">š¦</span> <span data-cp="9E0A" data-bytes="89 BD">‰½</span> <span data-cp="9E0C" data-bytes="89 B9">‰¹</span> <span data-cp="9E0E" data-bytes="91 7D">‘}</span> <span data-cp="9E18" data-bytes="96 BB">–»</span> <span data-cp="9E1C" data-bytes="9F F2">Ÿò</span> <span data-cp="9E1F" data-bytes="8B F5">‹õ</span> <span data-cp="9E7B" data-bytes="9A A9">š©</span> <span data-cp="9E81" data-bytes="9F 54">ŸT</span> <span data-cp="9E84" data-bytes="9F E3">Ÿã</span> <span data-cp="9E85" data-bytes="9E ED">ží</span> <span data-cp="9E90" data-bytes="91 AA">‘ª</span> <span data-cp="9E95" data-bytes="91 AB">‘«</span> <span data-cp="9E96" data-bytes="A0 70"> p</span> <span data-cp="9E98" data-bytes="9F 6D">Ÿm</span> <span data-cp="9E9E" data-bytes="91 AC">‘¬</span> <span data-cp="9EA2" data-bytes="91 AD">‘­</span> <span data-cp="9EA6" data-bytes="A0 FD"> ý</span> <span data-cp="9EA8" data-bytes="9F E2">Ÿâ</span> <span data-cp="9EAA" data-bytes="91 AF">‘¯</span> <span data-cp="9EAB" data-bytes="9E 41">žA</span> <span data-cp="9EAC" data-bytes="9A AA">šª</span> <span data-cp="9EAF" data-bytes="91 B0">‘°</span> <span data-cp="9EB1" data-bytes="9A AB">š«</span> <span data-cp="9EBD" data-bytes="9A AC">š¬</span> <span data-cp="9EBF" data-bytes="9A 4A">šJ</span> <span data-cp="9EC1" data-bytes="91 B2">‘²</span> <span data-cp="9EC4" data-bytes="8B F6">‹ö</span> <span data-cp="9EC6" data-bytes="9A AD">š­</span> <span data-cp="9EC7" data-bytes="89 B6">‰¶</span> <span data-cp="9EE2" data-bytes="9A AF">š¯</span> <span data-cp="9EF1" data-bytes="9A B0">š°</span> <span data-cp="9EF8" data-bytes="9A B1">š±</span> <span data-cp="9EFE" data-bytes="9A A1">š¡</span> <span data-cp="9F02" data-bytes="91 B9">‘¹</span> <span data-cp="9F08" data-bytes="91 BA">‘º</span> <span data-cp="9F16" data-bytes="91 BF">‘¿</span> <span data-cp="9F17" data-bytes="91 BE">‘¾</span> <span data-cp="9F26" data-bytes="A0 41"> A</span> <span data-cp="9F27" data-bytes="8B B7">‹·</span> <span data-cp="9F39" data-bytes="91 C0">‘À</span> <span data-cp="9F44" data-bytes="9A B3">š³</span> <span data-cp="9F45" data-bytes="91 C3">‘Ã</span> <span data-cp="9F50" data-bytes="A0 FC"> ü</span> <span data-cp="9F53" data-bytes="9F EE">Ÿî</span> <span data-cp="9F5A" data-bytes="9F 69">Ÿi</span> <span data-cp="9F62" data-bytes="91 C8">‘È</span> <span data-cp="9F69" data-bytes="91 C9">‘É</span> <span data-cp="9F7F" data-bytes="8D E6">æ</span> <span data-cp="9F8E" data-bytes="91 CB">‘Ë</span> <span data-cp="9F96" data-bytes="87 AC">‡¬</span> <span data-cp="9F97" data-bytes="87 A4">‡¤</span> <span data-cp="9F99" data-bytes="89 C8">‰È</span> <span data-cp="9F9F" data-bytes="8D AA">ª</span> <span data-cp="9FA5" data-bytes="9F DD">ŸÝ</span> <span data-cp="9FA6" data-bytes="8C 43">ŒC</span> <span data-cp="9FA7" data-bytes="8C 6D">Œm</span> <span data-cp="9FA8" data-bytes="8C 74">Œt</span> <span data-cp="9FA9" data-bytes="8C B7">Œ·</span> <span data-cp="9FAA" data-bytes="8C B9">Œ¹</span> <span data-cp="9FAB" data-bytes="8C BB">Œ»</span> <span data-cp="9FAC" data-bytes="8C C0">ŒÀ</span> <span data-cp="9FAD" data-bytes="8C D7">Œ×</span> <span data-cp="9FAE" data-bytes="8C D8">ŒØ</span> <span data-cp="9FAF" data-bytes="8C DA">ŒÚ</span> <span data-cp="9FB2" data-bytes="8C ED">Œí</span> <span data-cp="9FB3" data-bytes="8D 48">H</span> <span data-cp="9FC7" data-bytes="87 C2">‡Â</span> <span data-cp="9FC8" data-bytes="87 D2">‡Ò</span> <span data-cp="9FC9" data-bytes="87 D6">‡Ö</span> <span data-cp="9FCA" data-bytes="87 DA">‡Ú</span> <span data-cp="9FCB" data-bytes="87 DF">‡ß</span> <span data-cp="F907" data-bytes="8B F8">‹ø</span> </body></html>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/fetch.http.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/fetch.http.html
new file mode 100644
index 0000000000..d0cb7206f8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/fetch.http.html
@@ -0,0 +1,143 @@
+<!DOCTYPE html>
+<!-- DO NOT EDIT! Generated by `common/security-features/tools/generate.py --spec referrer-policy/4K-1/` -->
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="timeout" content="long">
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="/common/security-features/resources/common.sub.js"></script>
+ <script src="../../../../generic/test-case.sub.js"></script>
+ <script src="../../../generic/test-case.sub.js"></script>
+ </head>
+ <body>
+ <script>
+ TestCase(
+ [
+ {
+ "expectation": "stripped-referrer",
+ "origin": "cross-http",
+ "redirection": "keep-origin",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to cross-http origin and keep-origin redirection from http context."
+ },
+ {
+ "expectation": "stripped-referrer",
+ "origin": "cross-http",
+ "redirection": "no-redirect",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to cross-http origin and no-redirect redirection from http context."
+ },
+ {
+ "expectation": "stripped-referrer",
+ "origin": "cross-http",
+ "redirection": "swap-origin",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to cross-http origin and swap-origin redirection from http context."
+ },
+ {
+ "expectation": "stripped-referrer",
+ "origin": "cross-https",
+ "redirection": "keep-origin",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to cross-https origin and keep-origin redirection from http context."
+ },
+ {
+ "expectation": "stripped-referrer",
+ "origin": "cross-https",
+ "redirection": "no-redirect",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to cross-https origin and no-redirect redirection from http context."
+ },
+ {
+ "expectation": "stripped-referrer",
+ "origin": "cross-https",
+ "redirection": "swap-origin",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to cross-https origin and swap-origin redirection from http context."
+ },
+ {
+ "expectation": "stripped-referrer",
+ "origin": "same-http",
+ "redirection": "keep-origin",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to same-http origin and keep-origin redirection from http context."
+ },
+ {
+ "expectation": "stripped-referrer",
+ "origin": "same-http",
+ "redirection": "no-redirect",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to same-http origin and no-redirect redirection from http context."
+ },
+ {
+ "expectation": "stripped-referrer",
+ "origin": "same-http",
+ "redirection": "swap-origin",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to same-http origin and swap-origin redirection from http context."
+ },
+ {
+ "expectation": "stripped-referrer",
+ "origin": "same-https",
+ "redirection": "keep-origin",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to same-https origin and keep-origin redirection from http context."
+ },
+ {
+ "expectation": "stripped-referrer",
+ "origin": "same-https",
+ "redirection": "no-redirect",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to same-https origin and no-redirect redirection from http context."
+ },
+ {
+ "expectation": "stripped-referrer",
+ "origin": "same-https",
+ "redirection": "swap-origin",
+ "source_context_list": [],
+ "source_scheme": "http",
+ "subresource": "fetch",
+ "subresource_policy_deliveries": [],
+ "test_description": "Referrer Policy: Expects stripped-referrer for fetch to same-https origin and swap-origin redirection from http context."
+ }
+ ],
+ new SanityChecker()
+ ).start();
+ </script>
+ <div id="log"></div>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/filter-turbulence-invalid-001.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/filter-turbulence-invalid-001.html
new file mode 100644
index 0000000000..7400c8b379
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/filter-turbulence-invalid-001.html
@@ -0,0 +1,51 @@
+<!DOCTYPE html>
+<title>CSS Filter Effects: feTurbulence with negative values from baseFrequency</title>
+<link rel="help" href="https://drafts.fxtf.org/filter-effects/#element-attrdef-feturbulence-basefrequency">
+<link rel="help" href="https://crbug.com/1068863"/>
+<link rel="match" href="reference/filter-turbulence-invalid-001-ref.html">
+<meta name="assert" content="This test checks that negative baseFrequency values are unsupported for feTurbulence.">
+<style>
+.target {
+ display: inline-block;
+ width: 100px;
+ height: 100px;
+ background-color: red;
+}
+</style>
+<div class="target" style="filter: url(#fn1)"></div>
+<div class="target" style="filter: url(#fn2)"></div>
+<div class="target" style="filter: url(#tb1)"></div>
+<div class="target" style="filter: url(#tb2)"></div>
+<svg height="0" color-interpolation-filters="sRGB">
+ <!-- type=fractalNoise -->
+ <filter id="fn1" x="0" y="0" width="1" height="1">
+ <feTurbulence type="fractalNoise" baseFrequency="-1 1"/>
+ <feComponentTransfer>
+ <feFuncR type="discrete" tableValues="1 0 1"/>
+ <feFuncG type="discrete" tableValues="0 0.502 0"/> <!-- map [0..1/3] -> 0; [1/3...2/3] -> 0.502; [2/3..1] -> 0 -->
+ <feFuncB type="discrete" tableValues="0"/>
+ <feFuncA type="discrete" tableValues="0 1 0"/>
+ </feComponentTransfer>
+ </filter>
+
+ <filter id="fn2" x="0" y="0" width="1" height="1">
+ <feTurbulence type="fractalNoise" baseFrequency="1 -1"/>
+ <feComponentTransfer>
+ <feFuncR type="discrete" tableValues="1 0 1"/>
+ <feFuncG type="discrete" tableValues="0 0.502 0"/> <!-- map [0..1/3] -> 0; [1/3...2/3] -> 0.502; [2/3..1] -> 0 -->
+ <feFuncB type="discrete" tableValues="0"/>
+ <feFuncA type="discrete" tableValues="0 1 0"/>
+ </feComponentTransfer>
+ </filter>
+
+ <!-- type=turbulence -->
+ <filter id="tb1" x="0" y="0" width="1" height="1">
+ <feTurbulence type="turbulence" baseFrequency="-1 1"/>
+ <feColorMatrix values="1 0 0 0 0, 0 1 0 0 0.502, 0 0 1 0 0, 0 0 0 1 1"/>
+ </filter>
+
+ <filter id="tb2" x="0" y="0" width="1" height="1">
+ <feTurbulence type="turbulence" baseFrequency="1 -1"/>
+ <feColorMatrix values="1 0 0 0 0, 0 1 0 0 0.502, 0 0 1 0 0, 0 0 0 1 1"/>
+ </filter>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/grid-auto-fill-rows-001.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/grid-auto-fill-rows-001.html
new file mode 100644
index 0000000000..afce3f5fa9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/grid-auto-fill-rows-001.html
@@ -0,0 +1,184 @@
+<!DOCTYPE html>
+<title>CSS Grid: auto-fill rows</title>
+<link rel="author" title="Sergio Villar" href="mailto: svillar@igalia.com">
+<link rel="help" href="https://drafts.csswg.org/css-grid-1/#valdef-repeat-auto-fill">
+<link rel="help" href="https://drafts.csswg.org/css-grid-1/#propdef-grid-auto-columns">
+<link rel="help" href="https://drafts.csswg.org/css-grid-1/#propdef-grid-auto-rows">
+<link rel="help" href="https://drafts.csswg.org/css-grid-1/#propdef-grid-template-rows">
+<link rel="help" href="https://drafts.csswg.org/css-grid-1/#propdef-grid-row">
+<link rel="help" href="https://drafts.csswg.org/css-align-3/#propdef-grid-row-gap">
+<link rel="help" href="https://crbug.com/619930">
+<link rel="help" href="https://crbug.com/589460">
+<link rel="help" href="https://crbug.com/648814">
+<meta name="assert" content="Check that auto-fill rows are properly computed in a grid container"/>
+<link href="/css/support/grid.css" rel="stylesheet">
+<style>
+
+.grid {
+ border: 2px solid magenta;
+ height: 200px;
+ width: 25px;
+ align-content: start;
+ grid-auto-rows: 157px;
+ grid-auto-columns: 25px;
+
+ float: left;
+ position: relative;
+ margin-right: 2px;
+}
+
+.gridOnlyAutoRepeat { grid-template-rows: repeat(auto-fill, 30px [autobar]); }
+.gridAutoRepeatAndFixedBefore { grid-template-rows: 10px [foo] 20% [bar] repeat(auto-fill, [autofoo] 35px); }
+.gridAutoRepeatAndFixedAfter { grid-template-rows: repeat(auto-fill, [first] 30px [last]) [foo] minmax(60px, 80px) [bar] minmax(45px, max-content); }
+.gridAutoRepeatAndFixed { grid-template-rows: [start] repeat(2, 50px [a]) [middle] repeat(auto-fill, [autofoo] 15px [autobar]) minmax(5%, 10%) [end]; }
+.gridMultipleNames { grid-template-rows: [start] 20px [foo] 50% repeat(auto-fill, [bar] 20px [start foo]) [foo] 10% [end bar]; }
+.gridMultipleTracks { grid-template-rows: [start] 20px repeat(auto-fill, [a] 2em [b c] 10% [d]) [e] minmax(75px, 1fr) [last]; }
+
+.item { background-color: blue; }
+.item:nth-child(2) { background: green; }
+.item:nth-child(3) { background: orange; }
+
+.gap { grid-row-gap: 20px; }
+
+</style>
+
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<script src="/resources/check-layout-th.js"></script>
+
+<body onload="checkLayout('.grid')">
+<div id="log"></div>
+
+<p>This test checks that repeat(auto-fill, ) syntax works as expected.</p>
+
+<div class="grid gridOnlyAutoRepeat">
+ <div class="item" style="grid-row: 1 / span 6" data-offset-y="0" data-offset-x="0" data-expected-height="180" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridOnlyAutoRepeat">
+ <div class="item" style="grid-row: 1 / span 6 autobar" data-offset-y="0" data-offset-x="0" data-expected-height="180" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridOnlyAutoRepeat gap">
+ <div class="item" style="grid-row: 1 / span 5" data-offset-y="0" data-offset-x="0" data-expected-height="357" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridOnlyAutoRepeat gap">
+ <div class="item" style="grid-row: autobar 2 / span 3" data-offset-y="100" data-offset-x="0" data-expected-height="257" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridOnlyAutoRepeat gap" style="height: auto; max-height: 90px;" data-expected-height="94" data-expected-width="29">
+ <div class="item" data-offset-y="0" data-offset-x="0" data-expected-height="30" data-expected-width="25"></div>
+ <div class="item" data-offset-y="50" data-offset-x="0" data-expected-height="30" data-expected-width="25"></div>
+ <div class="item" data-offset-y="100" data-offset-x="0" data-expected-height="157" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridOnlyAutoRepeat gap" style="height: auto; max-height: 90px; min-height: 130px;" data-expected-height="134" data-expected-width="29">
+ <div class="item" data-offset-y="0" data-offset-x="0" data-expected-height="30" data-expected-width="25"></div>
+ <div class="item" data-offset-y="50" data-offset-x="0" data-expected-height="30" data-expected-width="25"></div>
+ <div class="item" data-offset-y="100" data-offset-x="0" data-expected-height="30" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedBefore">
+ <div class="item" style="grid-row: 1 / span 6" data-offset-y="0" data-offset-x="0" data-expected-height="190" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedBefore">
+ <div class="item" style="grid-row: foo / autofoo" data-offset-y="10" data-offset-x="0" data-expected-height="40" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedBefore">
+ <div class="item" style="grid-row: bar / 5 autofoo" data-offset-y="50" data-offset-x="0" data-expected-height="297" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedBefore gap">
+ <div class="item" style="grid-row: 1 / span 4" data-offset-y="0" data-offset-x="0" data-expected-height="180" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedBefore gap">
+ <div class="item" style="grid-row: span 3 / 2 autofoo" data-offset-y="0" data-offset-x="0" data-expected-height="125" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedBefore gap">
+ <div class="item" style="grid-row: notPresent / 3 autofoo" data-offset-y="377" data-offset-x="0" data-expected-height="157" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedAfter">
+ <div class="item" style="grid-row: 1 / span 4" data-offset-y="0" data-offset-x="0" data-expected-height="185" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedAfter">
+ <div class="item" style="grid-row: first / last 2" data-offset-y="0" data-offset-x="0" data-expected-height="60" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedAfter">
+ <div class="item" style="grid-row: last 2 / foo" data-offset-y="60" data-offset-x="0" data-expected-height="80" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedAfter gap">
+ <div class="item" style="grid-row: 1 / span 3" data-offset-y="0" data-offset-x="0" data-expected-height="195" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedAfter gap">
+ <div class="item" style="grid-row: 3 / span 1 bar" data-offset-y="130" data-offset-x="0" data-expected-height="222" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixedAfter gap">
+ <div class="item" style="grid-row: first / foo" data-offset-y="0" data-offset-x="0" data-expected-height="30" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixed">
+ <div class="item" style="grid-row: 1 / span 8" data-offset-y="0" data-offset-x="0" data-expected-height="195" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixed">
+ <div class="item" style="grid-row: a / autobar 2" data-offset-y="50" data-offset-x="0" data-expected-height="80" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixed">
+ <div class="item" style="grid-row: autofoo / end" data-offset-y="100" data-offset-x="0" data-expected-height="95" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixed gap">
+ <div class="item" style="grid-row: 1 / span 4" data-offset-y="0" data-offset-x="0" data-expected-height="195" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridAutoRepeatAndFixed gap">
+ <div class="item" style="grid-row: autobar / -1" data-offset-y="175" data-offset-x="0" data-expected-height="20" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridMultipleNames">
+ <div class="item" style="grid-row: 1 / -1" data-offset-y="0" data-offset-x="0" data-expected-height="200" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridMultipleNames">
+ <div class="item" style="grid-row: foo 3 / 4 bar" data-offset-y="160" data-offset-x="0" data-expected-height="40" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridMultipleNames">
+ <div class="item" style="grid-row: -6 / span 2 start" data-offset-y="20" data-offset-x="0" data-expected-height="140" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridMultipleNames gap">
+ <div class="item" style="grid-row: -4 / -2" data-offset-y="40" data-offset-x="0" data-expected-height="140" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridMultipleNames gap">
+ <div class="item" style="grid-row: bar / foo 2" data-offset-y="160" data-offset-x="0" data-expected-height="20" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridMultipleNames gap">
+ <div class="item" style="grid-row: foo / bar 2" data-offset-y="40" data-offset-x="0" data-expected-height="180" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridMultipleTracks">
+ <div class="item" style="grid-row: a / 2 c" data-offset-y="20" data-offset-x="0" data-expected-height="84" data-expected-width="25"></div>
+ <div class="item" style="grid-row: 3 / e; grid-column: 2;" data-offset-y="52" data-offset-x="25" data-expected-height="72" data-expected-width="25"></div>
+</div>
+
+<div class="grid gridMultipleTracks gap">
+ <div class="item" style="grid-row: a / c" data-offset-y="40" data-offset-x="0" data-expected-height="32" data-expected-width="25"></div>
+ <div class="item" style="grid-row: 3 / last; grid-column: 2;" data-offset-y="92" data-offset-x="25" data-expected-height="115" data-expected-width="25"></div>
+</div>
+
+</body>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/image-orientation-from-image-content-images-ref.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/image-orientation-from-image-content-images-ref.html
new file mode 100644
index 0000000000..c0d29909f9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/image-orientation-from-image-content-images-ref.html
@@ -0,0 +1,86 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title>CSS Images Module Level 3: image-orientation: from-image for content images</title>
+<link rel="author" title="Stephen Chenney" href="mailto:schenney@chromium.org">
+<link rel="help" href="https://drafts.csswg.org/css-images-3/#propdef-image-orientation">
+<style>
+ div.image {
+ display: inline-block;
+ }
+ div.container {
+ display: inline-block;
+ width: 100px;
+ vertical-align: top;
+ }
+ img {
+ width: 100px;
+ height: 100px;
+ background-repeat: no-repeat;
+ }
+ body {
+ overflow: hidden;
+ }
+</style>
+</head>
+<body >
+ <p>The images should rotate respecting their EXIF orientation because
+ image-orientation: from-image is specified.
+ </p>
+ <div class="container">
+ <div class="image" style="content: url(../support/exif-orientation-1-ul-pre-rotated.jpg)"></div>
+ <br>Normal
+ </div>
+ <div class="container">
+ <div class="image" style="content: url(../support/exif-orientation-2-ur-pre-rotated.jpg)"></div>
+ <br>Flipped horizontally
+ </div>
+ <div class="container">
+ <div class="image" style="content: url(../support/exif-orientation-3-lr-pre-rotated.jpg)"></div>
+ <br>Rotated 180&deg;
+ </div>
+ <div class="container">
+ <div class="image" style="content: url(../support/exif-orientation-4-lol-pre-rotated.jpg)"></div>
+ <br>Flipped vertically
+ </div>
+ <br>
+ <div class="container">
+ <div class="image" style="content: url(../support/exif-orientation-5-lu-pre-rotated.jpg)"></div>
+ <br>Rotated 90&deg; CCW and flipped vertically
+ </div>
+ <div class="container">
+ <div class="image" style="content: url(../support/exif-orientation-6-ru-pre-rotated.jpg)"></div>
+ <br>Rotated 90&deg; CW
+ </div>
+ <div class="container">
+ <div class="image" style="content: url(../support/exif-orientation-7-rl-pre-rotated.jpg)"></div>
+ <br>Rotated 90&deg; CW and flipped vertically
+ </div>
+ <div class="container">
+ <div class="image" style="content: url(../support/exif-orientation-8-llo-pre-rotated.jpg)"></div>
+ <br>Rotated 90&deg; CCW
+ </div>
+ <br>
+ <div class="container">
+ <img style="background-image: url(../support/exif-orientation-5-lu-pre-rotated.jpg)"></img>
+ <br>Rotated 90&deg; CCW and flipped vertically
+ </div>
+ <div class="container">
+ <img style="background-image: url(../support/exif-orientation-6-ru-pre-rotated.jpg)"></img>
+ <br>Rotated 90&deg; CW
+ </div>
+ <div class="container">
+ <img style="background-image: url(../support/exif-orientation-7-rl-pre-rotated.jpg)"></img>
+ <br>Rotated 90&deg; CW and flipped vertically
+ </div>
+ <div class="container">
+ <img style="background-image: url(../support/exif-orientation-8-llo-pre-rotated.jpg)"></img>
+ <br>Rotated 90&deg; CCW
+ </div>
+ <br>
+ <div class="container">
+ <div class="image" style="content: url(../support/exif-orientation-9-u-pre-rotated.jpg)"></div>
+ <br>Undefined (invalid value)
+ </div>
+</body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/masonry-item-placement-006.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/masonry-item-placement-006.html
new file mode 100644
index 0000000000..0082d72df2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/masonry-item-placement-006.html
@@ -0,0 +1,149 @@
+<!DOCTYPE HTML>
+<!--
+ Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<html><head>
+ <meta charset="utf-8">
+ <title>CSS Grid Test: Masonry item placement</title>
+ <link rel="author" title="Mats Palmgren" href="mailto:mats@mozilla.com">
+ <link rel="help" href="https://drafts.csswg.org/css-grid-2">
+ <link rel="match" href="masonry-item-placement-006-ref.html">
+ <style>
+html,body {
+ color:black; background-color:white; font:15px/1 monospace; padding:0; margin:0;
+}
+
+grid {
+ display: inline-grid;
+ gap: 1px 2px;
+ grid-template-columns: repeat(4,20px);
+ grid-template-rows: masonry;
+ color: #444;
+ border: 1px solid;
+ padding: 2px;
+}
+
+item {
+ background-color: #444;
+ color: #fff;
+}
+.next > grid {
+ masonry-auto-flow: next;
+}
+</style>
+</head>
+<body>
+
+<grid>
+ <item style="padding-top:30px">1</item>
+ <item>2</item>
+ <item>3</item>
+ <item>4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+
+<grid>
+ <item>1</item>
+ <item style="padding-top:30px">2</item>
+ <item>3</item>
+ <item>4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+
+<grid>
+ <item style="padding-top:30px">1</item>
+ <item style="padding-top:30px">2</item>
+ <item style="padding-top:10px">3</item>
+ <item>4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+
+<grid>
+ <item>1</item>
+ <item style="padding-top:30px">2</item>
+ <item style="padding-top:10px">3</item>
+ <item style="grid-column:span 2">4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+
+<grid>
+ <item>1</item>
+ <item style="padding-top:30px">2</item>
+ <item style="padding-top:10px">3</item>
+ <item style="grid-column:span 3">4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+
+<grid>
+ <item>1</item>
+ <item style="padding-top:30px">2</item>
+ <item style="padding-top:10px">3</item>
+ <item style="grid-column:span 4">4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+
+<span class="next">
+<grid>
+ <item style="padding-top:30px">1</item>
+ <item>2</item>
+ <item>3</item>
+ <item>4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+
+<grid>
+ <item>1</item>
+ <item style="padding-top:30px">2</item>
+ <item>3</item>
+ <item>4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+
+<grid>
+ <item style="padding-top:30px">1</item>
+ <item style="padding-top:30px">2</item>
+ <item style="padding-top:10px">3</item>
+ <item>4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+
+<grid>
+ <item>1</item>
+ <item style="padding-top:30px">2</item>
+ <item style="padding-top:10px">3</item>
+ <item style="grid-column:span 2">4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+
+<grid>
+ <item>1</item>
+ <item style="padding-top:30px">2</item>
+ <item style="padding-top:10px">3</item>
+ <item style="grid-column:span 3">4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+
+<grid>
+ <item>1</item>
+ <item style="padding-top:30px">2</item>
+ <item style="padding-top:10px">3</item>
+ <item style="grid-column:span 4">4</item>
+ <item>5</item>
+ <item>6</item>
+</grid>
+</span>
+
+</body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/moz-css21-table-page-break-inside-avoid-2.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/moz-css21-table-page-break-inside-avoid-2.html
new file mode 100644
index 0000000000..cc6a55933f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/moz-css21-table-page-break-inside-avoid-2.html
@@ -0,0 +1,29 @@
+<!DOCTYPE html>
+<html lang="en-US" class="reftest-paged">
+<head>
+ <title>CSS Test: CSS 2.1 page-break-inside:avoid</title>
+ <link rel="author" title="Mats Palmgren" href="https://bugzilla.mozilla.org/show_bug.cgi?id=685012">
+ <link rel="help" href="http://www.w3.org/TR/CSS21/page.html#propdef-page-break-inside">
+ <link rel="match" href="moz-css21-table-page-break-inside-avoid-2-ref.html">
+ <meta name="flags" content="paged">
+<style type="text/css">
+@page { size:5in 3in; margin:0.5in; }
+p { height: 1in; width: 1in; margin:0; background-color:blue; }
+.test { page-break-inside:avoid; }
+</style>
+</head>
+<body>
+<table border="1">
+<tbody>
+<tr><td><p>1</p></td></tr>
+</tbody>
+</table>
+<div style= "page-break-after: always"></div>
+<table border="1" class="test">
+<tbody></tbody>
+<tbody>
+<tr><td><p>2</p><p>3</p></td></tr>
+</tbody>
+</table>
+</body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/position-sticky-table-th-bottom-ref.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/position-sticky-table-th-bottom-ref.html
new file mode 100644
index 0000000000..2aa5c08a55
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/position-sticky-table-th-bottom-ref.html
@@ -0,0 +1,62 @@
+<!DOCTYPE html>
+<title>Reference for position:sticky bottom constraint should behave correctly for &lt;th&gt; elements</title>
+
+<style>
+.group {
+ display: inline-block;
+ position: relative;
+ width: 150px;
+ height: 200px;
+}
+
+.scroller {
+ position: relative;
+ width: 100px;
+ height: 150px;
+ overflow-x: hidden;
+ overflow-y: auto;
+}
+
+.contents {
+ height: 550px;
+}
+
+.indicator {
+ position: absolute;
+ background-color: green;
+ left: 0;
+ height: 50px;
+ width: 50px;
+}
+</style>
+
+<script>
+window.addEventListener('load', function() {
+ document.getElementById('scroller1').scrollTop = 0;
+ document.getElementById('scroller2').scrollTop = 75;
+ document.getElementById('scroller3').scrollTop = 200;
+});
+</script>
+
+<div class="group">
+ <div id="scroller1" class="scroller">
+ <div class="indicator" style="top: 100px;"></div>
+ <div class="contents"></div>
+ </div>
+</div>
+
+<div class="group">
+ <div id="scroller2" class="scroller">
+ <div class="indicator" style="top: 150px;"></div>
+ <div class="contents"></div>
+ </div>
+</div>
+
+<div class="group">
+ <div id="scroller3" class="scroller">
+ <div class="indicator" style="top: 250px;"></div>
+ <div class="contents"></div>
+ </div>
+</div>
+
+<div>You should see three green boxes above. No red should be visible.</div>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/pre-float-001.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/pre-float-001.html
new file mode 100644
index 0000000000..8dd08d8099
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/pre-float-001.html
@@ -0,0 +1,36 @@
+<!DOCTYPE html>
+<title>CSS test preserved spaces and floats interaction</title>
+<link rel="author" title="Koji Ishii" href="kojii@chromium.org">
+<link rel="match" href="reference/pre-float-001-ref.html">
+<link rel="help" href="https://drafts.csswg.org/css-text-3/#white-space-property">
+<link rel="stylesheet" type="text/css" href="/fonts/ahem.css" />
+<style>
+html {
+ font-family: Ahem;
+ font-size: 20px;
+ line-height: 1;
+}
+.container {
+ white-space: pre;
+ width: 10ch;
+ margin-bottom: 1em;
+}
+.float {
+ float: left;
+ width: 3ch;
+ height: 2em;
+ background: orange;
+}
+</style>
+<body>
+ <div class="float"></div>
+ <div class="container">123456 <br>123456</div>
+ <div class="float"></div>
+ <div class="container">1234567 <br>1234567</div>
+ <div class="float"></div>
+ <div class="container">1234567 <br>1234567</div>
+ <div class="float"></div>
+ <div class="container">1234567 <br>1234567</div>
+ <div class="float"></div>
+ <div class="container">12345678 <br>12345678</div>
+</body>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/resize-004.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/resize-004.html
new file mode 100644
index 0000000000..3a1f561749
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/resize-004.html
@@ -0,0 +1,20 @@
+<!DOCTYPE html>
+<meta charset="utf-8">
+<title>CSS Basic User Interface Test: resize initial value - none</title>
+<link rel="author" title="Intel" href="http://www.intel.com/">
+<link rel="author" title="Shiyou Tan" href="mailto:shiyoux.tan@intel.com">
+<link rel="help" title="8.1. 'resize' property" href="http://www.w3.org/TR/css3-ui/#resize">
+<meta name="flags" content="interact">
+<meta name="assert" content="Test checks that the resize property initial value is none">
+<style>
+ #test {
+ border: 2px solid blue;
+ height: 100px;
+ overflow: auto;
+ width: 100px;
+ }
+</style>
+<body>
+ <p>Test passes if <strong>neither</strong> the height <strong>nor</strong> the width of the blue border square can be adjusted(for instance by dragging the bottom-right corner).</p>
+ <div id="test"></div>
+</body>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/test-plan.src.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/test-plan.src.html
new file mode 100644
index 0000000000..c29f268837
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/test-plan.src.html
@@ -0,0 +1,1616 @@
+
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>Compositing and Blending Test Plan</title>
+ <meta charset='utf-8'>
+ <script src='http://www.w3.org/Tools/respec/respec-w3c-common'
+ async class='remove'></script>
+ <script class='remove'>
+ var respecConfig = {
+ specStatus: "unofficial",
+ shortName: "compositing-1-test-plan",
+ editors: [
+ {
+ name: "Mirela Budaes",
+ company: "Adobe Systems, Inc.",
+ companyURL: "http://www.adobe.com/"
+ },
+ {
+ name: "Horia Olaru",
+ company: "Adobe Systems, Inc.",
+ companyURL: "http://www.adobe.com/"
+ },
+ {
+ name: "Mihai Tica",
+ company: "Adobe Systems, Inc.",
+ companyURL: "http://www.adobe.com/"
+ },
+
+ ]
+ };
+ </script>
+ <style>
+ table
+ {
+ border-collapse:collapse;
+ }
+ table, td, th
+ {
+ border:1px solid black;
+ padding: 13px;
+ }
+ table
+ {
+ width: 100%;
+ }
+ img
+ {
+ width: 400px;
+ }
+ </style>
+ </head>
+ <body>
+ <section id='abstract'>
+ <p>
+ This document is intended to be used as a guideline for the testing
+ activities related to the Compositing and Blending spec [[!compositing-1]]. Its main
+ goal is to provide an overview of the general testing areas and an informative
+ description of possible test cases.
+ </p>
+ <p>
+ This document is not meant to replace the spec in determining the
+ normative and non-normative assertions to be tested, but rather
+ complement it.
+ </p>
+ </section>
+ <section>
+ <h2>Goals</h2>
+ <section>
+ <h3>Providing guidance on testing</h3>
+ <p>
+ In order to increase the quality of the test contributions, this
+ document offers a set of test cases description for conducting testing (see
+ <a href="#test-cases-description" class="sectionRef"></a>).
+ </p>
+ </section>
+ <section>
+ <h3>Creating automation-friendly tests</h3>
+ <p>
+ In terms of actual tests produced for the CSS Compositing and Blending, the main goal
+ is to ensure that most tests are automatable (i.e. they're either
+ reftests or use <code>testharness.js</code>). Even where manual tests
+ are absolutely necessary they should be written so that they can be
+ easily automated &ndash; as there are on-going efforts to make
+ WebDriver [[webdriver]] automated tests a first class citizen in W3C
+ testing. This means that even if a manual test requires user
+ interaction, the validation or PASS/FAIL conditions should still be
+ clear enough as to allow automatic validation if said interaction is
+ later automated.
+ </p>
+ </section>
+ </section>
+ <section>
+ <h2>Approach</h2>
+ <p>
+ Since CSS blending has only three new CSS properties,
+ the approach is to deep dive into every aspect of the spec as much as possible.
+
+ Tests will be created for the testing areas listed in <a href="#testig-areas" class="sectionRef"></a>
+ and having as guidance the test cases description from <a href="#test-cases-description" class="sectionRef"></a>.
+ </p>
+ </section>
+ <section>
+ <h2>Testing areas</h2>
+ <section>
+ <h3>Explicit testing areas</h3>
+ <p>
+ These testing areas cover things explicitly defined in the normative sections of the Blending and Compositing spec. Please note that while detailed, this list is not necessarily
+ exhaustive and some normative behaviors may not be contained in it.
+ When in doubt, consult the Blending and Compositing spec or ask a question on the
+ <a href="http://lists.w3.org/Archives/Public/www-style/">mailing
+ list</a>.
+ </p>
+ <p>Below is the list of explicit testing areas:</p>
+ <ol>
+ <li>Proper parsing of the CSS properties and rendering according to the spec
+ <ul><code>mix-blend-mode</code></ul>
+ <ul><code>isolation</code></ul>
+ <ul><code>background-blend-mode</code></ul>
+ </li>
+ <li>SVG blending</li>
+ <li>Canvas 2D blending</li>
+ </ol>
+ </section>
+ <section>
+ <h3>Implicit testing areas</h3>
+ <p>
+ These are testing areas either normatively defined in other specs
+ that explicitly refer to the Blending and Compositing spec (e.g. [[!css3-transforms]])
+ or simply not explicitly defined, but implied by various aspects of
+ the spec (e.g. processing model, CSS 2.1 compliance, etc.).
+ Please note that while detailed, this list is not necessarily
+ exhaustive and some normative behaviors may not be contained in it.
+ When in doubt, consult the Blending and Compositing spec or ask a question on the
+ <a href="http://lists.w3.org/Archives/Public/www-style/">mailing
+ list</a>.
+ </p>
+ <p>Below is the list of implicit testing areas:</p>
+ <ol>
+ <li>Blending different types of elements
+ <ul>
+ <li><code>&lt;video&gt;</code></li>
+ <li><code>&lt;canvas&gt;</code></li>
+ <li><code>&lt;table&gt;</code></li>
+ </ul>
+ </li>
+ <li>Blending elements with specific style rules applied
+ <ul>
+ <li><code>transforms</code></li>
+ <li><code>transitions</code> </li>
+ <li><code>animations</code> </li>
+ </ul>
+ </li>
+ </ol>
+ </section>
+ </section>
+ <section>
+ <h2>Test cases description</h2>
+ <section>
+ <h3>Test cases for <code>mix-blend-mode</code></h3>
+ <p>
+ The following diagram describes a list of notations to be used later on in the document as well as the general document structure the test cases will follow. The test cases should not be limited to this structure. This should be a wireframe and people are encouraged to come up with complex test cases as well.
+ </p>
+ <p>
+ <img id="test_outline" src="test_template.png" alt="Mix-blend-mode sample elements">
+ </p>
+ <p>The intended structure of the document is the following:</p>
+ <pre>
+&lt;body&gt;
+ &lt;div id="[P]"&gt;
+ &lt;div id="[IN-S]"&gt;&lt;/div&gt;
+ &lt;div id="[IN-P]"&gt;
+ &lt;div id="[B]"&gt;
+ &lt;div id="[CB]"&gt;&lt;/div&gt;
+ &lt;/div&gt;
+ &lt;/div&gt;
+ &lt;/div&gt;
+&lt;/body&gt;
+ </pre>
+ <p> Unless otherwise stated, test cases assume the following properties for the elements: <br>
+ <ul>
+ <li> default value for the <code>background-color</code> of the <code>body</code></li>
+ <li> <code>background-color</code> set to a fully opaque color for all the other elements </li>
+ </ul>
+ </p>
+ <p>The CSS associated to the elements used in the tests shouldn't use properties that creates a stacking context, except the ones specified in the test case descriptions.</p>
+ <p>Every test case has a description of the elements used. The notation from the image is used in the test case description too (e.g. for parent element the notation is [P]). Each test case uses only a subset of the elements while the other elements should just be removed.
+ </p></p>
+ <section>
+ <h4>An element with <code>mix-blend-mode</code> other than normal creates a stacking context</h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#mix-blend-mode" >spec</a>: <q>Applying a blendmode other than ‘normal’ to the element must establish a new stacking context [CSS21].</q></p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Simple <code>&lt;div&gt;</td>
+ <td>1 element required: <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal
+ </td>
+ <td>The element [B] creates a stacking context</td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>An element with <code>mix-blend-mode</code> blends with the content within the current stacking context</h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#csscompositingrules_CSS">spec</a>: <q>An element that has blending applied, must blend with all the underlying content of the stacking context [CSS21] that that element belongs to.</q> </p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Blending simple elements </td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal
+ </td>
+ <td>The color of the parent element [P] mixes with the color of the child element [B].</td>
+ </tr>
+ <tr>
+ <td>Blending <code>&lt;video&gt;</code></td>
+ <td>2 elements required: <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Sibling of the element [B]">[IN-S]</a> <br>
+ [B] - <code>&lt;video&gt;</code> element with <code>mix-blend-mode</code> other than normal <br>
+ [IN-S] - sibling(of the element [B]) visually overlaping the <code>&lt;video&gt;</code> element <br>
+ [IN-S] has some text inside
+ </td>
+ <td>The content of the <code>video</code> element [B] mixes with the colors of the sibling element and the text from [IN-S].</td>
+ </tr>
+ <tr>
+ <td>Blending with a sibling</td>
+ <td>3 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>, <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Sibling of the element [B]">[IN-S]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal <br>
+ [IN-S] - sibling of the element [B] <br>
+ The [IN-S] element visually overlaps the [B] element
+ </td>
+ <td>The colors of the parent element [P] and the sibling element [IN-S] mixes with the color of the blended element [B].</td>
+ </tr>
+ <tr>
+ <td>Blending with two levels of ascendants</td>
+ <td>3 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>, <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="child of the element [P]">[IN-P]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal <br>
+ [IN-P] - Intermediate child element between the parent [P] and the child [B]
+ </td>
+ <td>The colors of the parent element [P] and the child element [IN-P] mixes with the color of the blended element [B].</td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>An element with <code>mix-blend-mode</code> doesn't blend with anything outside the current stacking context</h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#csscompositingrules_CSS">spec</a>: <q> An element that has blending applied, must blend with all the underlying content of the stacking context [CSS21] that that element belongs to.</q></p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Blending child overflows the parent</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal <br>
+ The blending element [B] has content that lies outside the parent element. <br>
+ Set the <code>background-color</code> of the <code>body</code> to a value other than default</td>
+ <td>The color of the parent element mixes with the color of the child element. <br>
+ The area of the child element outside of the parent element doesn't mix with the color of the <code>body</code></td>
+ </tr>
+ <tr>
+ <td>Parent with transparent pixels</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ The element has some text inside and default value for <code>background-color</code> <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal <br>
+ The <code>background-color</code> of the <code>body</code> has a value other than default</td>
+ <td>The color of the text from the parent element [P] mixes with the color of the child element [B]. <br>
+ No blending between the color of the <code>body</code> and the color of the blending element [B].
+ </td>
+ </tr>
+ <tr>
+ <td>Parent with <code>border-radius</code></td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [P] has <code>border-radius</code> specified (e.g.50%). <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal <br>
+ [B] has content that lies outside the parent element, over a rounded corner. <br>
+ The <code>background-color</code> of the <code>body</code> has a value other than default. </td>
+ <td>The color of the parent element mixes with the color of the child element. <br>
+ The area of the child element which draws over the rounded corner doesn't mix with the color of the <code>body</code></td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>An element with <code>mix-blend-mode</code> other than normal must cause a group to be isolated</h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#isolation" >spec</a>: <q>operations that cause the creation of stacking context [CSS21] must cause a group to be isolated.</q> </p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Child of the blended element has opacity</td>
+ <td>3 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>, <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Child of the lement [B]">[CB]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal <br>
+ [CB] - child of the element [B] with <code>opacity</code> less than one. </td>
+ <td>The group created by the two child elements([B] and [CB]) is blended with the parent element [P]. <br>
+ No blending between [B] and [CB]</td>
+ </tr>
+ <tr>
+ <td>Overflowed child of the blended element</td>
+ <td>3 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>, <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Child of the lement [B]">[CB]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal <br>
+ [CB] - child of the element [B] with content that lies outside the parent element [B].
+ </td>
+ <td>The group created by the two child elements([B] and [CB]) is blended with the parent element [P]. <br>
+ No blending between [B] and [CB]. There is only one color for the entire element [CB] </td>
+ </tr>
+ <tr>
+ <td>Blended element with transparent pixels</td>
+ <td>3 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>, <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Child of the lement [B]">[CB]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal and transparent <code>background-color</code> <br>
+ [CB] - child of the element [B]
+ </td>
+ <td>The group created by the two child elements([B] and [CB]) is blended with the parent element [P]. <br>
+ No blending between [B] and [CB]. </td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>An element with <code>mix-blend-mode</code> must work properly with css transforms</h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Parent with 3D transform</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with <code>3D transform</code> <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal
+ <td>The color of the parent element [P] mixes with the color of the child element [B] <br>
+ The element (and the content) of the element [P] is properly transformed
+ </td>
+ </tr>
+ <tr>
+ <td>Blended element with 3D transform</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>, <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Child of the lement [B]">[CB]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal and <code>3D transform</code> <br>
+ [CB] - child of the element [B] </td>
+ <td> The color of the parent element [P] mixes with the color of the child element [B] <br>
+ The element (and the content) of the element [P] is properly transformed </td>
+ </tr>
+ <tr>
+ <td>Both parent and blended element with 3D transform</td>
+ <td> 2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with <code>3D transform</code> <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal and <code>3D transform</code>
+ </td>
+ <td>The color of the parent element [P] mixes with the color of the child element [B] <br>
+ The elements (and the content) of the elements [P] and [B] are properly transformed</td>
+ </tr>
+ <tr>
+ <td>Blended element with transform and preserve-3d</td>
+ <td>3 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>, <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Child of the lement [B]">[CB]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal and transform with <code>transform-style:preserve-3d</code> <br>
+ [CB] - child of the element [B]. It has 3D transform property</td>
+ <td> The child element [CB] will NOT preserve its 3D position. <br>
+ <code>mix-blend-mode</code> override the behavior of <code>transform-style:preserve-3d</code>:
+ creates a flattened representation of the descendant elements <br>
+ The color of the group created by the child elements([B] and [CB]) will blend with the color of the parent element [P] </td>
+ </tr>
+ <tr>
+ <td>Blended element with transform and perspective</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal and transform with <code>perspective</code> set to positive length </td>
+ <td>The colors of the parent and the child are mixed ([P] and [B]) <br>
+ The element (and the content) of the element [B] is properly transformed
+ </td>
+ </tr>
+ <tr>
+ <td>Sibling with 3D transform between the parent and the blended element</td>
+ <td>3 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>, <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Sibling of the element [B]">[IN-S]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal <br>
+ [IN-S] - Sibling(of the element [B]) with <code>3D transform</code> between the parent [P] and the child [B]
+ </td>
+ <td>The colors of the parent element [P] and the transformed sibling element [IN-S] mixes with the color of the blended element [B].<br>
+ The element (and the content) of the element [IN-S] is properly transformed
+ </td>
+ </tr>
+ <tr>
+ <td>Parent with 3D transform and transition</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with <code>3D transform</code> and transition <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal
+ <td>The color of the parent element [P] mixes with the color of the child element [B] <br>
+ The element (and the content) of the element [P] is properly transformed
+ </td>
+ </tr>
+ <tr>
+ <td>Sibling with 3D transform(and transition) between the parent and the blended element</td>
+ <td>3 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>, <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Sibling of the element [B]">[IN-S]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal <br>
+ [IN-S] - sibling(of the element [B]) with <code>3D transform</code> and transition between the parent [P] and the child [B]
+ </td>
+ <td>The colors of the parent element [P] and the transformed sibling element [IN-S] mixes with the color of the blended element [B].<br>
+ The element (and the content) of the element [IN-S] is properly transformed
+ </td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>An element with <code>mix-blend-mode</code> must work properly with elements with <code>overflow</code> property</h4>
+ <table>
+ <tr>
+ <td>Parent element with <code>overflow:scroll</code> </td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [P] has <code>overflow:scroll</code> <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal tat overflows the parents [P] dimensions so that it creates scrolling for the parent
+ <td>The color of the parent element [P] mixes with the color of the child element [B]. <br>
+ The scrolling mechanism is not affected.
+ </td>
+ </tr>
+ <tr>
+ <td>Blended element with <code>overflow:scroll</code></td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal, <code>overflow:scroll</code> and a child element that creates overflow for [B]</td>
+ <td>The color of the parent element [P] mixes with the color of the child element [B] <br>
+ The scrolling mechanism is not affected.
+ </td>
+ </tr>
+ <tr>
+ <td>Parent element with <code>overflow:scroll</code> and blended with <code>position:fixed</code> </td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [P] has <code>overflow:scroll</code> <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal, <code>position:fixed</code> and should overflow the parents [P] dimensions so that it creates scrolling for the parent</td>
+ <td>The color of the parent element [P] mixes with the color of the child element [B] <br>
+ The blending happens when scrolling the content of the parent element [P] too. <br>
+ The scrolling mechanism is not affected.
+ </td>
+ </tr>
+ <tr>
+ <td>Parent with <code>overflow:hidden</code> and <code>border-radius</code></td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [P] has <code>overflow:hidden</code> and <code>border-radius</code> specified (e.g.50%) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal with content that lies outside the parent element, over a rounded corner <br>
+ Set the <code>background-color</code> of the <code>body</code> to a value other than default.</td>
+ <td>The color of the parent element mixes with the color of the child element. <br>
+ The area of the child element which draws over the rounded corner is properly cut </td>
+ </tr>
+ <tr>
+ <td>Blended element with <code>overflow:hidden</code> and <code>border-radius</code></td>
+ <td>3 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Child of the lement [B]">[CB]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal, <code>overflow:hidden</code> and <code>border-radius</code> specified (e.g.50%). <br>
+ [CB] - child of the element [B], with content that lies outside the parent element, over a rounded corner. <br> </td>
+ <td>The group created by the two child elements([B] and [CB]) is blended with the parent element [P]. <br>
+ No blending between [B] and [CB]. <br>
+ [CB] is properly clipped so no overflow is visible.</td>
+ </tr>
+ <tr>
+ <td>Intermediate child with <code>overflow:hidden</code> and <code>border-radius</code> between the parent and the blended element</td>
+ <td>3 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>, <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="child of the element [P]">[IN-P]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal that overflows the parents [IN-P] dimensions
+ [IN-P] - child(of the element [P]) with <code>overflow:hidden</code> and <code>border-radius</code> specified (e.g.50%)
+ </td>
+ <td>The colors of the parent element [P] and the child element [IN-P] mixes with the color of the blended element [B]. <br>
+ [B] is is properly clipped so no overflow is visible
+ </td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>Other test cases</h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Blended element with <code>border-image</code> </td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal and <code>border-image</code> specified as a png file
+ </td>
+ <td>The color of the parent element [P] mixes with the color of the child element. <br>
+ The color of the <code>border-image</code> mixes with the color of the parent element [P].
+ </td>
+ </tr>
+ <tr>
+ <td>Blending with <code>&lt;canvas&gt;</code> </td>
+ <td>2 elements required: <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Sibling of the element [B]">[IN-S]</a> <br>
+ [B] - <code>&lt;canvas&gt;</code> element with <code>mix-blend-mode</code> other than normal <br>
+ [IN-S] - Sibling of the <code>&lt;canvas&gt;</code> element with some text <br>
+ The [IN-S] element overlaps the <code>&lt;canvas&gt;</code> element
+ </td>
+ <td>The content of the <code>&lt;canvas&gt;</code> element mixes with the color of the sibling element and the text [IN-S].</td>
+ </tr>
+ <tr>
+ <td>Blended <code>&lt;canvas&gt;</code></td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - Child <code>&lt;canvas&gt;</code> element with <code>mix-blend-mode</code> other than normal
+ </td>
+ <td>The color of the <code>&lt;canvas&gt;</code> element [B] mixes with the color of the parent element [P] .</td>
+ </tr>
+ <tr>
+ <td>Blended <code>&lt;video&gt;</code></td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - <code>&lt;video&gt;</code> element with <code>mix-blend-mode</code> other than normal
+ </td>
+ <td>The color of the <code>&lt;video&gt;</code> element mixes with the color of the parent element [P] .</td>
+ </tr>
+ <tr>
+ <td>Blending with <code>&lt;iframe&gt;</code> </td>
+ <td>2 elements required: <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> and <a href="#test_outline" title="Sibling of the element [B]">[IN-S]</a> <br>
+ [B] - <code>&lt;iframe&gt;</code> element with <code>mix-blend-mode</code> other than normal <br>
+ [IN-S] - sibling(of the element [B]) with some text <br>
+ The [IN-S] element visually overlaps the <code>&lt;iframe&gt;</code> element
+ </td>
+ <td>The color of the <code>&lt;iframe&gt;</code> element mixes with the color of the sibling element and the text [IN-S].</td>
+ </tr>
+ <tr>
+ <td>Blended <code>&lt;iframe&gt;</code></td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - <code>&lt;iframe&gt;</code> element with <code>mix-blend-mode</code> other than normal
+ </td>
+ <td>The color of the <code>&lt;iframe&gt;</code> element [B] mixes with the color of the parent element [P]. </td>
+ </tr>
+ <tr>
+ <td>Blended element with <code>mask</code> property</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal and <code>mask</code> property specified to an SVG image (e.g. circle)</td>
+ <td>The colors of the parent and the masked child are mixed ([P] and [B])</td>
+ </tr>
+ <tr>
+ <td>Blended element with <code>clip-path</code> property </td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal and <code>clip-path</code> property specified to a basic shape (e.g. ellipse)</td>
+ <td>The colors of the parent and the clipped child are mixed ([P] and [B])</td>
+ </tr>
+ <tr>
+ <td>Blended element with <code>filter</code> property</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal and <code>filter</code> property value other than none </td>
+ <td>The filter is applied and the result is mixed with the parent element</td>
+ </tr>
+ <tr>
+ <td>Blended element with <code>transition</code></td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal and <code>transition-property</code> for <code>opacity</code> </td>
+ <td>The transition is applied and the result is mixed with the parent element</td>
+ </tr>
+ <tr>
+ <td>Blended element with <code>animation</code></td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - element with <code>mix-blend-mode</code> other than normal and <code>animation</code> specified</td>
+ <td>The animation is applied to the child element and the result is mixed with the parent element</td>
+ </tr>
+ <tr>
+ <td>Image element</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - <code>&lt;img&gt;</code> element (.jpeg or .gif image) with <code>mix-blend-mode</code> other than normal</td>
+ <td>The color of the <code>&lt;img&gt;</code> is mixed with the color of the <code>&lt;div&gt;</code>.</td>
+ </tr>
+ <tr>
+ <td>SVG element</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - SVG element with <code>mix-blend-mode</code> other than normal</td>
+ <td>The color of the SVG is mixed with the color of the <code>&lt;div&gt;</code>.</td>
+ </tr>
+ <tr>
+ <td>Paragraph element</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - paragraph element with <code>mix-blend-mode</code> other than normal</td>
+ <td>The color of the text from the paragraph element is mixed with the color of the <code>&lt;div&gt;</code></td>
+ </tr>
+ <tr>
+ <td>Paragraph element and background-image</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ and <code>background-image</code> <br>
+ [B] - Child <code>p</code> element with some text and <code>mix-blend-mode</code> other than normal</td>
+ <td>The color of the text from the <code>p</code> element is mixed with the background image of the <code>&lt;div&gt;</code>.</td>
+ </tr>
+ <tr>
+ <td>Set blending from JavaScript</td>
+ <td>2 elements required: <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [B] - Child <code>&lt;div&gt;</code> element with no <code>mix-blend-mode</code> specified<br>
+ From JavaScript, set the <code>mix-blend-mode</code> property for the child <code>&lt;div&gt;</code> to a value other than normal</td>
+ <td>The colors of the <code>&lt;div&gt;</code> elements are mixed.</td>
+ </tr>
+ </table>
+ </section>
+ </section>
+ <section>
+ <h3>Test cases for SVG elements with <code>mix-blend-mode</code></h4>
+ <section>
+ <h4><code>mix-blend-mode</code> with simple SVG graphical elements</h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#mix-blend-mode" >spec</a> : <q><code>mix-blend-mode</code> applies to svg, g, use, image, path, rect, circle, ellipse, line, polyline, polygon, text, tspan, and marker.</q></p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Circle with SVG background</td>
+ <td>Set a background color for the SVG.<br>
+ Create 16 <code>circle</code> elements and fill them with a solid color.
+ <br>Apply each <code>mix-blend-mode</code> on them.</td>
+ <td>The color of the <code>circle</code> is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Ellipse with SVG background</td>
+ <td>Set a background color for the SVG.<br>
+ Create an <code>ellipse</code> element and fill it with a solid color.
+ <br>Apply a <code>mix-blend-mode</code> on it other than <code>normal</code>.</td>
+ <td>The color of the <code>ellipse</code> is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Image with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create an <code>image</code> element and apply a <code>mix-blend-mode</code> other than <code>normal</code>.</td>
+ <td>The <code>image</code> is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Line with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>line</code> element and fill it with a solid color.
+ <br>Apply a <code>mix-blend-mode</code> on it other than <code>normal</code>.</td>
+ <td>The color of the <code>line</code> is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Path with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>path</code> element and fill it with a solid color.
+ <br>Apply a <code>mix-blend-mode</code> on it other than <code>normal</code>.</td>
+ <td>The color of the <code>path</code> is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Polygon with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>polygon</code> element and fill it with a solid color.
+ <br>Apply a <code>mix-blend-mode</code> on it other than <code>normal</code>.</td>
+ <td>The color of the <code>polygon</code> is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Polyline with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>polyline</code> element and fill it with a solid color.
+ <br>Apply a <code>mix-blend-mode</code> on it other than <code>normal</code>.</td>
+ <td>The color of the <code>polyline</code> is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Rect with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>rect</code> element and fill it with a solid color.
+ <br>Apply a <code>mix-blend-mode</code> on it other than <code>normal</code>.</td>
+ <td>The color of the <code>rect</code> is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Text with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>text</code> element and apply a <code>mix-blend-mode</code> other than <code>normal</code>.</td>
+ <td>The text is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Text having tspan with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>text</code> element and a <code>tspan</code> inside it.
+ <br>Apply a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>tspan</code>.</td>
+ <td>The text is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Gradient with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>rect</code> element and fill it with a <code>gradient</code>.
+ <br>Apply a <code>mix-blend-mode</code> on it other than normal.</td>
+ <td>The gradient is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Pattern with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>rect</code> element and fill it with a <code>pattern</code>.
+ <br>Apply a <code>mix-blend-mode</code> on it other than normal.</td>
+ <td>The pattern is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Set blending on an element from JavaScript</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>rect</code> element and fill it with a solid color.
+ <br>Apply a <code>mix-blend-mode</code> (other than <code>normal</code>) on it from JavaScript.</td>
+ <td>The color of the <code>rect</code> is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Marker with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>line</code> element containing a marker.
+ <br>Apply a <code>mix-blend-mode</code> other than <code>normal</code> on the marker.</td>
+ <td>The marker color is mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>Metadata with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>metadata</code> element containing an embedded pdf.
+ <br>Apply a <code>mix-blend-mode</code> other than <code>normal</code> on the marker.</td>
+ <td>The metadata content is not mixed with the color of the background.</td>
+ </tr>
+ <tr>
+ <td>ForeignObject with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>foreignObject</code> element containing a simple xhtml file.
+ <br>Apply a <code>mix-blend-mode</code> other than <code>normal</code> on the marker.</td>
+ <td>The foreignObject content is not mixed with the color of the background.</td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4><code>mix-blend-mode</code> with SVG groups</h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Group of overlapping elements with SVG background</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>group</code> element containing two overlapping <code>rect</code> elements, each filled with a different solid color.
+ <br>Apply a <code>mix-blend-mode</code> other than <code>normal</code> on the group.</td>
+ <td>The <code>group</code> is mixed as a whole with the color of the background.</td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4><code>mix-blend-mode</code> with isolated groups</h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#mix-blend-mode" >spec</a>:
+ <br><q>By default, every element must create a non-isolated group.<br>
+ However, certain operations in SVG will create isolated groups.<br>
+ If one of the following features is used, the group must become isolated:
+ <ul>
+ <li>opacity</li>
+ <li>filters</li>
+ <li>3D transforms (2D transforms must NOT cause isolation)</li>
+ <li>blending</li>
+ <li>masking</li>
+ </ul>
+ </q>
+ </p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Blending two elements in an isolated group</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing two overlapping <code>rect</code> elements, each filled with a different solid color.<br>
+ Apply <code>opacity</code> less than 1 on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the second rect.</td>
+ <td>Only the intersection of the <code>rect</code> elements should mix.</td>
+ </tr>
+ <tr>
+ <td>Blending in a group with opacity</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing a <code>rect</code> element filled with a different solid color.<br>
+ Apply <code>opacity</code> less than 1 on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will not mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blending in a group with filter</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing a <code>rect</code> element filled with a different solid color.<br>
+ Apply a <code>filter</code> on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will not mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blending in a group with 2D transform</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing a <code>rect</code> element filled with a different solid color.<br>
+ Apply a <code>transform</code> on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blending in a group with 3D transform</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing a <code>rect</code> element filled with a different solid color.<br>
+ Apply a 3d transform on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will not mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blending in a group with a mask</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing a <code>rect</code> element filled with a different solid color.<br>
+ Apply a <code>mask</code> on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will not mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blending in a group with mix-blend-mode</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing a <code>rect</code> element filled with a different solid color.<br>
+ Apply a <code>mix-blend-mode</code> other than <code>normal</code> on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will not mix with the content behind it.</td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>Other test cases for SVG</h4>
+ <table>
+ <tr>
+ <td>Blend with element having opacity</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>rect</code> element filled with a different solid color.<br>
+ Apply <code>opacity</code> less than 1 and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blend with element having stroke</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>rect</code> element filled with a different solid color.<br>
+ Apply a <code>stroke</code> and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blend with element having stroke-opacity</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>rect</code> element filled with a different solid color.<br>
+ Apply a <code>stroke</code>, <code>stroke-opacity</code> less than 1 and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blend with element having stroke-dasharray</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>rect</code> element filled with a different solid color.<br>
+ Apply a <code>stroke-dasharray</code> and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blend with element having transform</td>
+ <td>Set a background color for the SVG.<br>
+ Create an <code>image</code> element. Apply a <code>transform</code> (any combination of <code>translate</code>, <code>rotate</code>, <code>scale</code>, <code>skew</code>) and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>image</code>.</td>
+ <td>The <code>image</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blend with SVG having viewbox and preserveAspectRatio set</td>
+ <td>Set a background color for the SVG, as well as <code>viewbox</code> and <code>preserveAspectRatio</code>.<br>
+ Create a <code>rect</code> element filled with a different solid color and apply a <code>mix-blend-mode</code> other than <code>normal</code> on it.</td>
+ <td>The <code>rect</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blend with an element having color-profile set</td>
+ <td>Set a background color for the SVG.<br>
+ Create an <code>image</code> element. Apply a <code>color-profile</code> (<code>sRGB</code>, for example) and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>image</code>.</td>
+ <td>The <code>image</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blend with an element having overflow</td>
+ <td>Set a background color for the SVG.<br>
+ Create an <code>image</code> larger than the SVG.<br>
+ Apply <code>overflow</code> (<code>visible</code>, <code>hidden</code>, <code>scroll</code>) and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>image</code>.</td>
+ <td>The <code>image</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blend with an element having clip-path</td>
+ <td>Set a background color for the SVG.<br>
+ Create an <code>image</code> element. Apply a <code>clip-path</code> and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>image</code>.</td>
+ <td>The <code>image</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blend with an element having a mask</td>
+ <td>Set a background color for the SVG.<br>
+ Create an <code>image</code> element.<br>
+ Apply a <code>mask</code> and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>image</code>.</td>
+ <td>The <code>image</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blend with an element having a filter</td>
+ <td>Set a background color for the SVG.<br>
+ Create an <code>image</code> element.<br>
+ Apply a <code>filter</code> and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>image</code>.</td>
+ <td>The <code>image</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blend with an animated element</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>rect</code> element filled with a different solid color.<br>
+ Apply an <code>animateTransform</code> and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Set blending from an SVG script element</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>rect</code> element and fill it with a solid color.<br>
+ Apply a <code>mix-blend-mode</code> (other than <code>normal</code>) on it from an svg <code>script</code> element.</td>
+ <td>The <code>rect</code> will mix with the content behind it.</td>
+ </tr>
+ </table>
+ </section>
+ </section>
+ <section>
+ <h3>Test cases for <code>background-blend-mode</code></h3>
+ <section>
+ <h4>Blending between the background layers and the background color for an element with <code>background-blend-mode</code> </h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#background-blend-mode">spec</a>: <q>Each background layer must blend with the element's background layer that are below it and the element's background color.</q></p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Images with different formats</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ Tests should be created for <code>&lt;image&gt;</code> with different formats such as PNG, JPEG or SVG
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code></td>
+ </tr>
+ <tr>
+ <td>Gradient and background color</td>
+ <td>
+ Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;gradient&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code></td>
+ </tr>
+ <tr>
+ <td>Image and gradient</td>
+ <td>
+ Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code> on top of a <code>&lt;gradient&gt;</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>&lt;image&gt;</code> is mixed with the content of the <code>&lt;gradient&gt;</code>
+ </td>
+ </tr>
+ <tr>
+ <td>Gradient and image</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to a <code>&lt;gradient&gt;</code> on top of an <code>&lt;image&gt;</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>&lt;image&gt;</code> is mixed with the content of the <code>&lt;gradient&gt;</code></td>
+ </tr>
+ <tr>
+ <td>Two gradients</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to a <code>&lt;gradient&gt;</code> on top of another <code>&lt;gradient&gt;</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul></td>
+ <td>The content of the two gradients is mixed</td>
+ </tr>
+ <tr>
+ <td>Two images</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code> on top of another <code>&lt;image&gt;</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul></td>
+ <td>The content of the two images is mixed</td>
+ </tr>
+ <tr>
+ <td>Image and background color with transparency</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code> with transparency(e.g. PNG images)</li>
+ <li><code>background-color</code> set to a transparent color</li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code></td>
+ </tr>
+ <tr>
+ <td>Cross-fade image and gradient</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to a <q>cross-fade()</q> image on top of a <code>&lt;gradient&gt;</code> </li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the cross-faded image is mixed with the content of the <code>&lt;gradient&gt;</code></td>
+ </tr>
+ <tr>
+ <td>SVG image and background color</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to a data URI for an SVG image </li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the image is mixed with the color of the background</td>
+ </tr>
+ <tr>
+ <td>Animated gif image and background color</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an animated gif image</li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the image is mixed with the color of the background</td>
+ </tr>
+ <tr>
+ <td>Set <code>background-blend-mode</code> from JavaScript</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to a <code>gradient</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li>no <code>background-blend-mode</code> explicitly specified</li>
+ From JavaScript, set the <code>background-blend-mode</code> property to a value other than normal.
+ </ul>
+ </td>
+ <td>The content of the gradient is mixed with the color of the background</td>
+ </tr>
+ <tr>
+ <td><code>background-blend-mode</code> on element with 3D transform</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ <li><code>transform</code> set to a 3D function like rotateX, rotateY or translateZ</li>
+ </ul>
+ </td>
+ <td>The content of the image is mixed with the color of the background</td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>Background layers do not blend with content outside the background (or behind the element)</h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#background-blend-mode">spec</a>: <q>Background layer must not blend with the content that is behind the element instead they must act as if they are rendered into an isolated group.</q>
+ </p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>One background layer</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The <code>background-image</code> is not mixed with anything outside the element</td>
+ </tr>
+ <tr>
+ <td>Two elements</td>
+ <td>2 elements required: a parent element with a child. <br>
+ Each one with the following properties:
+ <ul>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ <td>No blending between the background colors of the two elements</td>
+ </tr>
+ <tr>
+ <td>Parent and child with <code>background-blend-mode</code></td>
+ <td>2 elements required: a parent element with a child <br>
+ Parent properties: <br>
+ <ul>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ Child properties: <br>
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ <td>The content of the image from the child element does not mixes with the background color from the parent element</td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4> <code>background-blend-mode</code> list values apply to the corresponding background layer</h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#background-blend-mode">spec</a>: <q>The ‘background-blend-mode’ list must be applied in the same order as ‘background-image’[CSS3BG]. This means that the first element in the list will apply to the layer that is on top.</q>
+ </p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Different blend modes applied between layers</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image-list&gt;</code> containing three images: (e.g. I1, I2 and I3 ) </li>
+ <li><code>background-blend-mode</code> set to different <code>blendmode</code> for every image: (e.g. multiply, difference, screen) </li>
+ </ul></td>
+ <td>The content of the three images is correctly mixed <br>
+ (multiply for I1, difference for I2 and screen for I3)
+ </td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4><code>background-blend-mode</code> list values are repeated if the list is shorter than the background layer list</h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#background-blend-mode">spec</a>: <q>If a property doesn't have enough comma-separated values to match the number of layers, the UA must calculate its used value by repeating the list of values until there are enough.</q>
+ </p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Blend mode list repeat</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image-list&gt;</code> containing three images</li>
+ <li><code>background-blend-mode</code> set to two different <code>blendmode</code> values</li>
+ </ul></td>
+ <td>The unspecified blend modes should be obtained by repeating the blend mode list from the beginning</td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>The default <code>background-blend-mode</code> value for the <code>background</code> shorthand is 'normal' </h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#background-blend-mode">spec</a>: <q>If the ‘background’ [CSS3BG] shorthand is used, the ‘background-blend-mode’ property for that element must be reset to its initial value.</q>
+ </p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Default blend mode for 'background' shorthand</td>
+ <td>Element with
+ <ul>
+ <li><code>background</code> property set to an image and a color</li>
+ <li>No value explicitly set for <code>background-blend-mode</code> </li>
+ </ul></td>
+ <td> The computed value of <code>background-blend-mode</code> is 'normal'
+ </td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4><code>background-blend-mode</code> for an element with <code>background-position</code></h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td><code>background-position</code> percentage</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-position</code> specified in percentage, such as 50% 50%</li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ The <code>background-image</code> is correctly positioned
+ </td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4><code>background-blend-mode</code> for an element with <code>background-size</code></h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Background size defined in pixels</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-size</code> specified in pixels</li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ The <code>background-image</code> has the correct size
+ </td>
+ </tr>
+ <tr>
+ <td>Background size defined in percentage (second phase)</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-size</code> specified in percentage</li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ The <code>background-image</code> has the correct size
+ </td>
+ </tr>
+ <tr>
+ <td>Background size cover</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-size</code> set to <code>cover</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ The <code>background-image</code> has the correct size
+ </td>
+ </tr>
+ <tr>
+ <td>Background size contain</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-size</code> set to <code>contain</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ The <code>background-image</code> has the correct size
+ </td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4><code>background-blend-mode</code> for an element with <code>background-repeat</code></h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td><code>background-repeat</code> set to no-repeat</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-repeat</code> set to <code>no-repeat</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ The <code>background-image</code> is not repeated
+ </td>
+ </tr>
+ <tr>
+ <td><code>background-repeat</code> set to space</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-repeat</code> set to <code>space</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ </td>
+ </tr>
+ <tr>
+ <td><code>background-repeat</code> set to round</td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-repeat</code> set to <code>round</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ </td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4><code>background-blend-mode</code> for an element with <code>background-clip</code></h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td><code>background-clip</code> set to <code>padding-box</code></td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-clip</code> set to <code>padding-box</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ No background is drawn below the border (background extends to the outside edge of the padding)
+ </td>
+ </tr>
+ <tr>
+ <td><code>background-clip</code> set to <code>content-box</code></td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-clip</code> set to <code>content-box</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ The background is painted within (clipped to) the content box
+ </td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4><code>background-blend-mode</code> for an element with <code>background-origin</code></h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td><code>background-origin</code> set to <code>border-box</code></td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-origin</code> set to <code>border-box</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ The background extends to the outside edge of the border (but underneath the border in z-ordering)
+ </td>
+ </tr>
+ <tr>
+ <td><code>background-origin</code> set to <code>content-box</code></td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-origin</code> set to <code>content-box</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ The background is painted within (clipped to) the content box
+ </td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4><code>background-blend-mode</code> for an element with <code>background-attachement</code></h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td><code>background-attachment</code> set to <code>fixed</code></td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to an <code>&lt;image&gt;</code></li>
+ <li><code>background-color</code> set to a fully opaque color</li>
+ <li><code>background-attachment</code> set to <code>fixed</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The content of the <code>background-image</code> is mixed with the color of the <code>background-color</code> <br>
+ The background image will not scroll with its containing element, instead remaining stationary within the viewport
+ </td>
+ </tr>
+ <tr>
+ <td>2 background images with <code>background-attachment</code> set to <code>fixed, scroll</code></td>
+ <td>Element with
+ <ul>
+ <li><code>background-image</code> set to 2 <code>&lt;image&gt;</code>(s)</li>
+ <li><code>background-attachment</code> set to <code>fixed, scroll</code></li>
+ <li><code>background-blend-mode</code> other than normal</li>
+ </ul>
+ </td>
+ <td>The background images will be mixed when they overlap while scrolling
+ </td>
+ </tr>
+ </table>
+ </section>
+ </section>
+ <section>
+ <h3>Test cases for <code>isolation</code></h3>
+ <section>
+ <h4>An element with <code>isolation:isolate</code> creates a stacking context</h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#csscompositingrules_CSS">spec</a>: <q>For CSS, setting ‘isolation’ to ‘isolate’ will turn the element into a stacking context [CSS21].</q></p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Isolation isolate</td>
+ <td>Have an element with <code>isolation</code> set to <code>isolate</code></td>
+ <td>The element creates a stacking context.</td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>An element with <code>isolation:isolate</code> creates an isolated group for blended children</h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Isolation of blended child which overflows</td>
+ <td>3 elements required:
+ <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>,
+ <a href="#test_outline" title="child of the element [P]">[IN-P]</a> and
+ <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>) <br>
+ [IN-P] - Intermediate child element between the parent [P] and the child [B]<br>
+ This element has <code>isolation:isolate</code> set.<br>
+ [B] - element with <code>mix-blend-mode</code> other than <code>normal</code> <br>
+ The blending element [B] has content that lies outside the parent element. <br>
+ </td>
+ <td>
+ The color of the child element [B] mixes with the color of the intermediate element [IN-P], where they overlap.<br>
+ The area of the child element outside of the intermediate parent element does not mix with the color of the parent element [P], or of the <code>body</code>.
+ </td>
+ </tr>
+ <tr>
+ <td>Isolation on intermediate element with transparent pixels</td>
+ <td>3 elements required:
+ <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>,
+ <a href="#test_outline" title="child of the element [P]">[IN-P]</a> and
+ <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>); the element <code>background-color</code> is other than <code>transparent</code><br>
+ [IN-P] - Intermediate child element between the parent [P] and the child [B]<br>
+ The intermediate element has text content, default value for <code>background-color</code> and <code>isolation:isolate</code> set<br>
+ [B] - element with <code>mix-blend-mode</code> other than <code>normal</d <br>
+ <td>
+ The color of the child element [B] mixes with the color of the intermediate element [IN-P], where they overlap.<br>
+ There is no blending between the color of the parent element [P] and the color of the blended element [B].
+ </td>
+ </tr>
+ <tr>
+ <td>Isolate inside a stacking context created by a 3d transform</td>
+ <td>
+ 3 elements required:
+ <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a>,
+ <a href="#test_outline" title="child of the element [P]">[IN-P]</a> and
+ <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a <code>3D transform</code> applied<br>
+ [IN-P] - Intermediate child element between the parent [P] and the child [B]<br>
+ The intermediate element has <code>isolation:isolate</code> set<br>
+ [B] - element with <code>mix-blend-mode</code> other than <code>normal</code><br>
+ </td>
+ <td>
+ The color of the child element [B] mixes with the color of the intermediate element [IN-P], where they overlap.<br>
+ There is no blending between the color of the parent element [P] and the color of the blended element [B].
+ </td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>An element with <code>isolation:auto</code> set does not change the elements existing stacking context behavior</h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Isolation auto</td>
+ <td>Have an element with <code>isolation</code> explicitly set to <code>auto</code>, and no other style that would create a stacking context</td>
+ <td>The element does not create a stacking context - the computed value of its <code>z-index</code> is value <code>auto</code></td>
+ </tr>
+ <tr>
+ <td>Stacking context not affected by isolation</td>
+ <td>2 elements required:
+ <a href="#test_outline" title="parent element with a property that creates stacking context">[P]</a> and
+ <a href="#test_outline" title="Element with mix-blend-mode property other than normal">[B]</a> <br>
+ [P] - parent element with a property that creates a stacking context (e.g. <code>position:fixed</code>); This element has <code>isolation</code> explicitly set to <code>auto</code> <br>
+ [B] - element with <code>mix-blend-mode</code> other than <code>normal</code> <br>
+ The blending element [B] has content that lies outside the parent element. <br>
+ Set the <code>background-color</code> of the <code>body</code> to a value other than default
+ </td>
+ <td>The color of the parent element mixes with the color of the child element. <br>
+ The area of the child element outside of the parent element doesn't mix with the color of the <code>body</code>.<br>
+ In other words, setting the <code>isolation</code> to <code>auto</code> does not affect the creation of a stacking context by other properties.
+ </td>
+ </tr>
+ </table>
+ </section>
+ </section>
+ <section>
+ <h4>Test cases for <code>isolation</code> in SVG</h4>
+ <section>
+ <h4>In SVG, an element with <code>isolation:isolate</code> creates an isolated group for blended children</h4>
+ <p>Refers to the following assertion in the <a href="https://drafts.fxtf.org/compositing-1/#isolation">spec</a>: <q>In SVG, this defines whether an element is isolated or not.</q></p>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Blending in an isolated group</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing a <code>rect</code> element filled with a different solid color.<br>
+ Apply <code>isolation:isolate</code> on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will not mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blending two elements in an isolated group</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing two overlapping <code>rect</code> elements, each filled with a different solid color.<br>
+ Apply <code>isolation:isolate</code> on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the second rect.</td>
+ <td>Only the intersection of the <code>rect</code> elements should mix.</td>
+ </tr>
+ <tr>
+ <td>Blending in an isolated group with 2D transform</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing a <code>rect</code> element filled with a different solid color.<br>
+ Apply <code>isolation:isolate</code> and 2D transform on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will not mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Set isolation on an element from JavaScript</td>
+ <td>Set a background color for the SVG.
+ <br>Create a <code>rect</code> element and fill it with a solid color and a <code>mix-blend-mode</code> other than <code>normal</code>.
+ <br>Apply <code>isolation:isolate</code> on it from JavaScript.</td>
+ <td>The <code>rect</code> will not mix with the content behind it.</td>
+ </tr>
+ </table>
+ </section>
+ <section>
+ <h4>In SVG, an element with <code>isolation:auto</code> set does not change the rendering behaviour</h4>
+ <table>
+ <tr>
+ <th>Test name</th>
+ <th>Elements and styles</th>
+ <th>Expected result</th>
+ </tr>
+ <tr>
+ <td>Blending a group with <code>isolation:auto</code></td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing a <code>rect</code> element filled with a different solid color.<br>
+ Apply <code>isolation:auto</code> on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The element will mix with the content behind it.</td>
+ </tr>
+ <tr>
+ <td>Blending in a group with opacity</td>
+ <td>Set a background color for the SVG.<br>
+ Create a <code>group</code> element containing a <code>rect</code> element filled with a different solid color.<br>
+ Apply <code>opacity</code> less than 1 and <code>isolation:auto</code> on the group and a <code>mix-blend-mode</code> other than <code>normal</code> on the <code>rect</code>.</td>
+ <td>The <code>rect</code> will not mix with the content behind it.</td>
+ </tr>
+ </table>
+ </section>
+ </section>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/toBlob.png.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/toBlob.png.html
new file mode 100644
index 0000000000..1533bfdb6c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/toBlob.png.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<title>Canvas test: toBlob.png</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id="log"></div>
+<canvas id="c"></canvas>
+<script>
+async_test(function() {
+ on_event(window, "load", this.step_func(function() {
+ var canvas = document.getElementById('c');
+ var ctx = canvas.getContext('2d');
+ canvas.toBlob(this.step_func_done(function(data) {
+ assert_equals(data.type, "image/png");
+ }), 'image/png');
+ }));
+}, "toBlob with image/png returns a PNG Blob");
+</script>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/will-change-abspos-cb-001.html b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/will-change-abspos-cb-001.html
new file mode 100644
index 0000000000..d59e443310
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/benchmarks/data/wpt/weighted/will-change-abspos-cb-001.html
@@ -0,0 +1,30 @@
+<!doctype html>
+<meta charset="utf-8">
+<title>CSS Test: will-change: position turns an element in an abspos containing block.</title>
+<link rel="author" title="Emilio Cobos Ãlvarez" href="mailto:emilio@crisal.io">
+<link rel="author" title="Boris Zbarsky" href="mailto:bzbarsky@mit.edu">
+<link rel="author" title="Mozilla" href="https://mozilla.org">
+<link rel="help" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1498873">
+<link rel="help" href="https://drafts.csswg.org/css-will-change/#will-change">
+<link rel="match" href="will-change-abspos-cb-001-ref.html">
+<style>
+ .container {
+ border: 1px solid green;
+ width: 100px;
+ height: 100px;
+ margin-top: 100px;
+ display: flex;
+ will-change: position;
+ }
+ .abspos {
+ position: absolute;
+ top: 0;
+ left: 0;
+ background: orange;
+ height: 20px;
+ width: 20px;
+ }
+</style>
+<div class="container">
+ <div class="abspos"></div>
+</div>
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/debug-info.py b/testing/web-platform/tests/tools/third_party/html5lib/debug-info.py
new file mode 100644
index 0000000000..b47b8ebfa2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/debug-info.py
@@ -0,0 +1,37 @@
+from __future__ import print_function, unicode_literals
+
+import platform
+import sys
+
+
+info = {
+ "impl": platform.python_implementation(),
+ "version": platform.python_version(),
+ "revision": platform.python_revision(),
+ "maxunicode": sys.maxunicode,
+ "maxsize": sys.maxsize
+}
+
+search_modules = ["chardet", "genshi", "html5lib", "lxml", "six"]
+found_modules = []
+
+for m in search_modules:
+ try:
+ __import__(m)
+ except ImportError:
+ pass
+ else:
+ found_modules.append(m)
+
+info["modules"] = ", ".join(found_modules)
+
+
+print("""html5lib debug info:
+
+Python %(version)s (revision: %(revision)s)
+Implementation: %(impl)s
+
+sys.maxunicode: %(maxunicode)X
+sys.maxsize: %(maxsize)X
+
+Installed modules: %(modules)s""" % info)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/Makefile b/testing/web-platform/tests/tools/third_party/html5lib/doc/Makefile
new file mode 100644
index 0000000000..e0e58667e7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/html5lib.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/html5lib.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/html5lib"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/html5lib"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/changes.rst b/testing/web-platform/tests/tools/third_party/html5lib/doc/changes.rst
new file mode 100644
index 0000000000..ded3b705d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/changes.rst
@@ -0,0 +1,3 @@
+.. :changelog:
+
+.. include:: ../CHANGES.rst
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/conf.py b/testing/web-platform/tests/tools/third_party/html5lib/doc/conf.py
new file mode 100644
index 0000000000..22ebab4faa
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/conf.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# html5lib documentation build configuration file, created by
+# sphinx-quickstart on Wed May 8 00:04:49 2013.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# -- General configuration -----------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.viewcode']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'html5lib'
+copyright = '2006 - 2013, James Graham, Sam Sneddon, and contributors'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '1.0'
+# The full version, including alpha/beta/rc tags.
+sys.path.append(os.path.abspath('..'))
+from html5lib import __version__ # noqa
+release = __version__
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build', 'theme']
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'default'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'html5libdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'html5lib.tex', 'html5lib Documentation',
+ 'James Graham, Sam Sneddon, and contributors', 'manual'),
+]
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'html5lib', 'html5lib Documentation',
+ ['James Graham, Sam Sneddon, and contributors'], 1)
+]
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'html5lib', 'html5lib Documentation',
+ 'James Graham, Sam Sneddon, and contributors', 'html5lib', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+
+class CExtMock(object):
+ """Required for autodoc on readthedocs.org where you cannot build C extensions."""
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __call__(self, *args, **kwargs):
+ return CExtMock()
+
+ @classmethod
+ def __getattr__(cls, name):
+ if name in ('__file__', '__path__'):
+ return '/dev/null'
+ else:
+ return CExtMock()
+
+
+try:
+ import lxml # noqa
+except ImportError:
+ sys.modules['lxml'] = CExtMock()
+ sys.modules['lxml.etree'] = CExtMock()
+ print("warning: lxml modules mocked.")
+
+try:
+ import genshi # noqa
+except ImportError:
+ sys.modules['genshi'] = CExtMock()
+ sys.modules['genshi.core'] = CExtMock()
+ print("warning: genshi modules mocked.")
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.filters.rst b/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.filters.rst
new file mode 100644
index 0000000000..d70e4552f0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.filters.rst
@@ -0,0 +1,58 @@
+filters Package
+===============
+
+:mod:`base` Module
+-------------------
+
+.. automodule:: html5lib.filters.base
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`alphabeticalattributes` Module
+------------------------------------
+
+.. automodule:: html5lib.filters.alphabeticalattributes
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`inject_meta_charset` Module
+---------------------------------
+
+.. automodule:: html5lib.filters.inject_meta_charset
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`lint` Module
+------------------
+
+.. automodule:: html5lib.filters.lint
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`optionaltags` Module
+--------------------------
+
+.. automodule:: html5lib.filters.optionaltags
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`sanitizer` Module
+-----------------------
+
+.. automodule:: html5lib.filters.sanitizer
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`whitespace` Module
+------------------------
+
+.. automodule:: html5lib.filters.whitespace
+ :members:
+ :show-inheritance:
+ :special-members: __init__
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.rst b/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.rst
new file mode 100644
index 0000000000..d7c75c5842
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.rst
@@ -0,0 +1,38 @@
+html5lib Package
+================
+
+.. automodule:: html5lib
+ :members: __version__
+
+:mod:`constants` Module
+-----------------------
+
+.. automodule:: html5lib.constants
+ :members:
+ :show-inheritance:
+
+:mod:`html5parser` Module
+-------------------------
+
+.. automodule:: html5lib.html5parser
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`serializer` Module
+------------------------
+
+.. automodule:: html5lib.serializer
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+Subpackages
+-----------
+
+.. toctree::
+
+ html5lib.filters
+ html5lib.treebuilders
+ html5lib.treewalkers
+ html5lib.treeadapters
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treeadapters.rst b/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treeadapters.rst
new file mode 100644
index 0000000000..1d3a9fba2f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treeadapters.rst
@@ -0,0 +1,20 @@
+treeadapters Package
+====================
+
+:mod:`~html5lib.treeadapters` Package
+-------------------------------------
+
+.. automodule:: html5lib.treeadapters
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+.. automodule:: html5lib.treeadapters.genshi
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+.. automodule:: html5lib.treeadapters.sax
+ :members:
+ :show-inheritance:
+ :special-members: __init__
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treebuilders.rst b/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treebuilders.rst
new file mode 100644
index 0000000000..1a051e50bd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treebuilders.rst
@@ -0,0 +1,42 @@
+treebuilders Package
+====================
+
+:mod:`treebuilders` Package
+---------------------------
+
+.. automodule:: html5lib.treebuilders
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`base` Module
+-------------------
+
+.. automodule:: html5lib.treebuilders.base
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`dom` Module
+-----------------
+
+.. automodule:: html5lib.treebuilders.dom
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`etree` Module
+-------------------
+
+.. automodule:: html5lib.treebuilders.etree
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`etree_lxml` Module
+------------------------
+
+.. automodule:: html5lib.treebuilders.etree_lxml
+ :members:
+ :show-inheritance:
+ :special-members: __init__
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treewalkers.rst b/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treewalkers.rst
new file mode 100644
index 0000000000..4afef47609
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/html5lib.treewalkers.rst
@@ -0,0 +1,50 @@
+treewalkers Package
+===================
+
+:mod:`treewalkers` Package
+--------------------------
+
+.. automodule:: html5lib.treewalkers
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`base` Module
+------------------
+
+.. automodule:: html5lib.treewalkers.base
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`dom` Module
+-----------------
+
+.. automodule:: html5lib.treewalkers.dom
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`etree` Module
+-------------------
+
+.. automodule:: html5lib.treewalkers.etree
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`etree_lxml` Module
+------------------------
+
+.. automodule:: html5lib.treewalkers.etree_lxml
+ :members:
+ :show-inheritance:
+ :special-members: __init__
+
+:mod:`genshi` Module
+--------------------
+
+.. automodule:: html5lib.treewalkers.genshi
+ :members:
+ :show-inheritance:
+ :special-members: __init__
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/index.rst b/testing/web-platform/tests/tools/third_party/html5lib/doc/index.rst
new file mode 100644
index 0000000000..27104b1469
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/index.rst
@@ -0,0 +1,22 @@
+Overview
+========
+
+.. include:: ../README.rst
+ :start-line: 6
+
+.. toctree::
+ :maxdepth: 2
+
+ movingparts
+ modules
+ changes
+ License <license>
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/license.rst b/testing/web-platform/tests/tools/third_party/html5lib/doc/license.rst
new file mode 100644
index 0000000000..7e6291f3b9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/license.rst
@@ -0,0 +1,4 @@
+License
+=======
+
+.. include:: ../LICENSE
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/make.bat b/testing/web-platform/tests/tools/third_party/html5lib/doc/make.bat
new file mode 100644
index 0000000000..e88c769ce3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/make.bat
@@ -0,0 +1,242 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. xml to make Docutils-native XML files
+ echo. pseudoxml to make pseudoxml-XML files for display purposes
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+
+%SPHINXBUILD% 2> nul
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.http://sphinx-doc.org/
+ exit /b 1
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\html5lib.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\html5lib.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdf" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf
+ cd %BUILDDIR%/..
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "latexpdfja" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ cd %BUILDDIR%/latex
+ make all-pdf-ja
+ cd %BUILDDIR%/..
+ echo.
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+if "%1" == "xml" (
+ %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The XML files are in %BUILDDIR%/xml.
+ goto end
+)
+
+if "%1" == "pseudoxml" (
+ %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
+ goto end
+)
+
+:end
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/modules.rst b/testing/web-platform/tests/tools/third_party/html5lib/doc/modules.rst
new file mode 100644
index 0000000000..59fbcc86bc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/modules.rst
@@ -0,0 +1,7 @@
+html5lib
+========
+
+.. toctree::
+ :maxdepth: 4
+
+ html5lib
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/doc/movingparts.rst b/testing/web-platform/tests/tools/third_party/html5lib/doc/movingparts.rst
new file mode 100644
index 0000000000..6ba367a27a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/doc/movingparts.rst
@@ -0,0 +1,165 @@
+The moving parts
+================
+
+html5lib consists of a number of components, which are responsible for
+handling its features.
+
+Parsing uses a *tree builder* to generate a *tree*, the in-memory representation of the document.
+Several tree representations are supported, as are translations to other formats via *tree adapters*.
+The tree may be translated to a token stream with a *tree walker*, from which :class:`~html5lib.serializer.HTMLSerializer` produces a stream of bytes.
+The token stream may also be transformed by use of *filters* to accomplish tasks like sanitization.
+
+Tree builders
+-------------
+
+The parser reads HTML by tokenizing the content and building a tree that
+the user can later access. html5lib can build three types of trees:
+
+* ``etree`` - this is the default; builds a tree based on :mod:`xml.etree`,
+ which can be found in the standard library. Whenever possible, the
+ accelerated ``ElementTree`` implementation (i.e.
+ ``xml.etree.cElementTree`` on Python 2.x) is used.
+
+* ``dom`` - builds a tree based on :mod:`xml.dom.minidom`.
+
+* ``lxml`` - uses the :mod:`lxml.etree` implementation of the ``ElementTree``
+ API. The performance gains are relatively small compared to using the
+ accelerated ``ElementTree`` module.
+
+You can specify the builder by name when using the shorthand API:
+
+.. code-block:: python
+
+ import html5lib
+ with open("mydocument.html", "rb") as f:
+ lxml_etree_document = html5lib.parse(f, treebuilder="lxml")
+
+To get a builder class by name, use the :func:`~html5lib.treebuilders.getTreeBuilder` function.
+
+When instantiating a :class:`~html5lib.html5parser.HTMLParser` object, you must pass a tree builder class via the ``tree`` keyword attribute:
+
+.. code-block:: python
+
+ import html5lib
+ TreeBuilder = html5lib.getTreeBuilder("dom")
+ parser = html5lib.HTMLParser(tree=TreeBuilder)
+ minidom_document = parser.parse("<p>Hello World!")
+
+The implementation of builders can be found in `html5lib/treebuilders/
+<https://github.com/html5lib/html5lib-python/tree/master/html5lib/treebuilders>`_.
+
+
+Tree walkers
+------------
+
+In addition to manipulating a tree directly, you can use a tree walker to generate a streaming view of it.
+html5lib provides walkers for ``etree``, ``dom``, and ``lxml`` trees, as well as ``genshi`` `markup streams <https://genshi.edgewall.org/wiki/Documentation/streams.html>`_.
+
+The implementation of walkers can be found in `html5lib/treewalkers/
+<https://github.com/html5lib/html5lib-python/tree/master/html5lib/treewalkers>`_.
+
+html5lib provides :class:`~html5lib.serializer.HTMLSerializer` for generating a stream of bytes from a token stream, and several filters which manipulate the stream.
+
+HTMLSerializer
+~~~~~~~~~~~~~~
+
+The serializer lets you write HTML back as a stream of bytes.
+
+.. code-block:: pycon
+
+ >>> import html5lib
+ >>> element = html5lib.parse('<p xml:lang="pl">Witam wszystkich')
+ >>> walker = html5lib.getTreeWalker("etree")
+ >>> stream = walker(element)
+ >>> s = html5lib.serializer.HTMLSerializer()
+ >>> output = s.serialize(stream)
+ >>> for item in output:
+ ... print("%r" % item)
+ '<p'
+ ' '
+ 'xml:lang'
+ '='
+ 'pl'
+ '>'
+ 'Witam wszystkich'
+
+You can customize the serializer behaviour in a variety of ways. Consult
+the :class:`~html5lib.serializer.HTMLSerializer` documentation.
+
+
+Filters
+~~~~~~~
+
+html5lib provides several filters:
+
+* :class:`alphabeticalattributes.Filter
+ <html5lib.filters.alphabeticalattributes.Filter>` sorts attributes on
+ tags to be in alphabetical order
+
+* :class:`inject_meta_charset.Filter
+ <html5lib.filters.inject_meta_charset.Filter>` sets a user-specified
+ encoding in the correct ``<meta>`` tag in the ``<head>`` section of
+ the document
+
+* :class:`lint.Filter <html5lib.filters.lint.Filter>` raises
+ :exc:`AssertionError` exceptions on invalid tag and attribute names, invalid
+ PCDATA, etc.
+
+* :class:`optionaltags.Filter <html5lib.filters.optionaltags.Filter>`
+ removes tags from the token stream which are not necessary to produce valid
+ HTML
+
+* :class:`sanitizer.Filter <html5lib.filters.sanitizer.Filter>` removes
+ unsafe markup and CSS. Elements that are known to be safe are passed
+ through and the rest is converted to visible text. The default
+ configuration of the sanitizer follows the `WHATWG Sanitization Rules
+ <http://wiki.whatwg.org/wiki/Sanitization_rules>`_.
+
+* :class:`whitespace.Filter <html5lib.filters.whitespace.Filter>`
+ collapses all whitespace characters to single spaces unless they're in
+ ``<pre/>`` or ``<textarea/>`` tags.
+
+To use a filter, simply wrap it around a token stream:
+
+.. code-block:: python
+
+ >>> import html5lib
+ >>> from html5lib.filters import sanitizer
+ >>> dom = html5lib.parse("<p><script>alert('Boo!')", treebuilder="dom")
+ >>> walker = html5lib.getTreeWalker("dom")
+ >>> stream = walker(dom)
+ >>> clean_stream = sanitizer.Filter(stream)
+
+
+Tree adapters
+-------------
+
+Tree adapters can be used to translate between tree formats.
+Two adapters are provided by html5lib:
+
+* :func:`html5lib.treeadapters.genshi.to_genshi()` generates a `Genshi markup stream <https://genshi.edgewall.org/wiki/Documentation/streams.html>`_.
+* :func:`html5lib.treeadapters.sax.to_sax()` calls a SAX handler based on the tree.
+
+Encoding discovery
+------------------
+
+Parsed trees are always Unicode. However a large variety of input
+encodings are supported. The encoding of the document is determined in
+the following way:
+
+* The encoding may be explicitly specified by passing the name of the
+ encoding as the encoding parameter to the
+ :meth:`~html5lib.html5parser.HTMLParser.parse` method on
+ :class:`~html5lib.html5parser.HTMLParser` objects.
+
+* If no encoding is specified, the parser will attempt to detect the
+ encoding from a ``<meta>`` element in the first 512 bytes of the
+ document (this is only a partial implementation of the current HTML
+ specification).
+
+* If no encoding can be found and the :mod:`chardet` library is available, an
+ attempt will be made to sniff the encoding from the byte pattern.
+
+* If all else fails, the default encoding will be used. This is usually
+ `Windows-1252 <http://en.wikipedia.org/wiki/Windows-1252>`_, which is
+ a common fallback used by Web browsers.
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/flake8-run.sh b/testing/web-platform/tests/tools/third_party/html5lib/flake8-run.sh
new file mode 100755
index 0000000000..d926494699
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/flake8-run.sh
@@ -0,0 +1,9 @@
+#!/bin/bash -e
+
+if [[ ! -x $(which flake8) ]]; then
+ echo "fatal: flake8 not found on $PATH. Exiting."
+ exit 1
+fi
+
+flake8 `dirname $0`
+exit $?
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/__init__.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/__init__.py
new file mode 100644
index 0000000000..7b854f9900
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/__init__.py
@@ -0,0 +1,35 @@
+"""
+HTML parsing library based on the `WHATWG HTML specification
+<https://whatwg.org/html>`_. The parser is designed to be compatible with
+existing HTML found in the wild and implements well-defined error recovery that
+is largely compatible with modern desktop web browsers.
+
+Example usage::
+
+ import html5lib
+ with open("my_document.html", "rb") as f:
+ tree = html5lib.parse(f)
+
+For convenience, this module re-exports the following names:
+
+* :func:`~.html5parser.parse`
+* :func:`~.html5parser.parseFragment`
+* :class:`~.html5parser.HTMLParser`
+* :func:`~.treebuilders.getTreeBuilder`
+* :func:`~.treewalkers.getTreeWalker`
+* :func:`~.serializer.serialize`
+"""
+
+from __future__ import absolute_import, division, unicode_literals
+
+from .html5parser import HTMLParser, parse, parseFragment
+from .treebuilders import getTreeBuilder
+from .treewalkers import getTreeWalker
+from .serializer import serialize
+
+__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
+ "getTreeWalker", "serialize"]
+
+# this has to be at the top level, see how setup.py parses this
+#: Distribution version number.
+__version__ = "1.2-dev"
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_ihatexml.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_ihatexml.py
new file mode 100644
index 0000000000..3ff803c195
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_ihatexml.py
@@ -0,0 +1,289 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import re
+import warnings
+
+from .constants import DataLossWarning
+
+baseChar = """
+[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] |
+[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] |
+[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] |
+[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 |
+[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] |
+[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] |
+[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] |
+[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] |
+[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 |
+[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] |
+[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] |
+[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D |
+[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] |
+[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] |
+[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] |
+[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] |
+[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] |
+[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] |
+[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 |
+[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] |
+[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] |
+[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] |
+[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] |
+[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] |
+[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] |
+[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] |
+[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] |
+[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] |
+[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] |
+[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A |
+#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 |
+#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] |
+#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] |
+[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] |
+[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C |
+#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 |
+[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] |
+[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] |
+[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 |
+[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] |
+[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B |
+#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE |
+[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] |
+[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 |
+[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] |
+[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]"""
+
+ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]"""
+
+combiningCharacter = """
+[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] |
+[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 |
+[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] |
+[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] |
+#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] |
+[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] |
+[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 |
+#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] |
+[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC |
+[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] |
+#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] |
+[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] |
+[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] |
+[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] |
+[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] |
+[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] |
+#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 |
+[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] |
+#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] |
+[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] |
+[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] |
+#x3099 | #x309A"""
+
+digit = """
+[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] |
+[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] |
+[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] |
+[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]"""
+
+extender = """
+#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 |
+#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]"""
+
+letter = " | ".join([baseChar, ideographic])
+
+# Without the
+name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter,
+ extender])
+nameFirst = " | ".join([letter, "_"])
+
+reChar = re.compile(r"#x([\d|A-F]{4,4})")
+reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]")
+
+
+def charStringToList(chars):
+ charRanges = [item.strip() for item in chars.split(" | ")]
+ rv = []
+ for item in charRanges:
+ foundMatch = False
+ for regexp in (reChar, reCharRange):
+ match = regexp.match(item)
+ if match is not None:
+ rv.append([hexToInt(item) for item in match.groups()])
+ if len(rv[-1]) == 1:
+ rv[-1] = rv[-1] * 2
+ foundMatch = True
+ break
+ if not foundMatch:
+ assert len(item) == 1
+
+ rv.append([ord(item)] * 2)
+ rv = normaliseCharList(rv)
+ return rv
+
+
+def normaliseCharList(charList):
+ charList = sorted(charList)
+ for item in charList:
+ assert item[1] >= item[0]
+ rv = []
+ i = 0
+ while i < len(charList):
+ j = 1
+ rv.append(charList[i])
+ while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1:
+ rv[-1][1] = charList[i + j][1]
+ j += 1
+ i += j
+ return rv
+
+
+# We don't really support characters above the BMP :(
+max_unicode = int("FFFF", 16)
+
+
+def missingRanges(charList):
+ rv = []
+ if charList[0] != 0:
+ rv.append([0, charList[0][0] - 1])
+ for i, item in enumerate(charList[:-1]):
+ rv.append([item[1] + 1, charList[i + 1][0] - 1])
+ if charList[-1][1] != max_unicode:
+ rv.append([charList[-1][1] + 1, max_unicode])
+ return rv
+
+
+def listToRegexpStr(charList):
+ rv = []
+ for item in charList:
+ if item[0] == item[1]:
+ rv.append(escapeRegexp(chr(item[0])))
+ else:
+ rv.append(escapeRegexp(chr(item[0])) + "-" +
+ escapeRegexp(chr(item[1])))
+ return "[%s]" % "".join(rv)
+
+
+def hexToInt(hex_str):
+ return int(hex_str, 16)
+
+
+def escapeRegexp(string):
+ specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}",
+ "[", "]", "|", "(", ")", "-")
+ for char in specialCharacters:
+ string = string.replace(char, "\\" + char)
+
+ return string
+
+# output from the above
+nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa
+
+nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa
+
+# Simpler things
+nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\\-'()+,./:=?;!*#@$_%]")
+
+
+class InfosetFilter(object):
+ replacementRegexp = re.compile(r"U[\dA-F]{5,5}")
+
+ def __init__(self,
+ dropXmlnsLocalName=False,
+ dropXmlnsAttrNs=False,
+ preventDoubleDashComments=False,
+ preventDashAtCommentEnd=False,
+ replaceFormFeedCharacters=True,
+ preventSingleQuotePubid=False):
+
+ self.dropXmlnsLocalName = dropXmlnsLocalName
+ self.dropXmlnsAttrNs = dropXmlnsAttrNs
+
+ self.preventDoubleDashComments = preventDoubleDashComments
+ self.preventDashAtCommentEnd = preventDashAtCommentEnd
+
+ self.replaceFormFeedCharacters = replaceFormFeedCharacters
+
+ self.preventSingleQuotePubid = preventSingleQuotePubid
+
+ self.replaceCache = {}
+
+ def coerceAttribute(self, name, namespace=None):
+ if self.dropXmlnsLocalName and name.startswith("xmlns:"):
+ warnings.warn("Attributes cannot begin with xmlns", DataLossWarning)
+ return None
+ elif (self.dropXmlnsAttrNs and
+ namespace == "http://www.w3.org/2000/xmlns/"):
+ warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning)
+ return None
+ else:
+ return self.toXmlName(name)
+
+ def coerceElement(self, name):
+ return self.toXmlName(name)
+
+ def coerceComment(self, data):
+ if self.preventDoubleDashComments:
+ while "--" in data:
+ warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning)
+ data = data.replace("--", "- -")
+ if data.endswith("-"):
+ warnings.warn("Comments cannot end in a dash", DataLossWarning)
+ data += " "
+ return data
+
+ def coerceCharacters(self, data):
+ if self.replaceFormFeedCharacters:
+ for _ in range(data.count("\x0C")):
+ warnings.warn("Text cannot contain U+000C", DataLossWarning)
+ data = data.replace("\x0C", " ")
+ # Other non-xml characters
+ return data
+
+ def coercePubid(self, data):
+ dataOutput = data
+ for char in nonPubidCharRegexp.findall(data):
+ warnings.warn("Coercing non-XML pubid", DataLossWarning)
+ replacement = self.getReplacementCharacter(char)
+ dataOutput = dataOutput.replace(char, replacement)
+ if self.preventSingleQuotePubid and dataOutput.find("'") >= 0:
+ warnings.warn("Pubid cannot contain single quote", DataLossWarning)
+ dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'"))
+ return dataOutput
+
+ def toXmlName(self, name):
+ nameFirst = name[0]
+ nameRest = name[1:]
+ m = nonXmlNameFirstBMPRegexp.match(nameFirst)
+ if m:
+ warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning)
+ nameFirstOutput = self.getReplacementCharacter(nameFirst)
+ else:
+ nameFirstOutput = nameFirst
+
+ nameRestOutput = nameRest
+ replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest))
+ for char in replaceChars:
+ warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning)
+ replacement = self.getReplacementCharacter(char)
+ nameRestOutput = nameRestOutput.replace(char, replacement)
+ return nameFirstOutput + nameRestOutput
+
+ def getReplacementCharacter(self, char):
+ if char in self.replaceCache:
+ replacement = self.replaceCache[char]
+ else:
+ replacement = self.escapeChar(char)
+ return replacement
+
+ def fromXmlName(self, name):
+ for item in set(self.replacementRegexp.findall(name)):
+ name = name.replace(item, self.unescapeChar(item))
+ return name
+
+ def escapeChar(self, char):
+ replacement = "U%05X" % ord(char)
+ self.replaceCache[char] = replacement
+ return replacement
+
+ def unescapeChar(self, charcode):
+ return chr(int(charcode[1:], 16))
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_inputstream.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_inputstream.py
new file mode 100644
index 0000000000..0207dd211b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_inputstream.py
@@ -0,0 +1,918 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from six import text_type
+from six.moves import http_client, urllib
+
+import codecs
+import re
+from io import BytesIO, StringIO
+
+import webencodings
+
+from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase
+from .constants import _ReparseException
+from . import _utils
+
+# Non-unicode versions of constants for use in the pre-parser
+spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters])
+asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters])
+asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase])
+spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"])
+
+
+invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqa
+
+if _utils.supports_lone_surrogates:
+ # Use one extra step of indirection and create surrogates with
+ # eval. Not using this indirection would introduce an illegal
+ # unicode literal on platforms not supporting such lone
+ # surrogates.
+ assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1
+ invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] +
+ eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used
+ "]")
+else:
+ invalid_unicode_re = re.compile(invalid_unicode_no_surrogate)
+
+non_bmp_invalid_codepoints = {0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
+ 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF,
+ 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE,
+ 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF,
+ 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
+ 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF,
+ 0x10FFFE, 0x10FFFF}
+
+ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]")
+
+# Cache for charsUntil()
+charsUntilRegEx = {}
+
+
+class BufferedStream(object):
+ """Buffering for streams that do not have buffering of their own
+
+ The buffer is implemented as a list of chunks on the assumption that
+ joining many strings will be slow since it is O(n**2)
+ """
+
+ def __init__(self, stream):
+ self.stream = stream
+ self.buffer = []
+ self.position = [-1, 0] # chunk number, offset
+
+ def tell(self):
+ pos = 0
+ for chunk in self.buffer[:self.position[0]]:
+ pos += len(chunk)
+ pos += self.position[1]
+ return pos
+
+ def seek(self, pos):
+ assert pos <= self._bufferedBytes()
+ offset = pos
+ i = 0
+ while len(self.buffer[i]) < offset:
+ offset -= len(self.buffer[i])
+ i += 1
+ self.position = [i, offset]
+
+ def read(self, bytes):
+ if not self.buffer:
+ return self._readStream(bytes)
+ elif (self.position[0] == len(self.buffer) and
+ self.position[1] == len(self.buffer[-1])):
+ return self._readStream(bytes)
+ else:
+ return self._readFromBuffer(bytes)
+
+ def _bufferedBytes(self):
+ return sum([len(item) for item in self.buffer])
+
+ def _readStream(self, bytes):
+ data = self.stream.read(bytes)
+ self.buffer.append(data)
+ self.position[0] += 1
+ self.position[1] = len(data)
+ return data
+
+ def _readFromBuffer(self, bytes):
+ remainingBytes = bytes
+ rv = []
+ bufferIndex = self.position[0]
+ bufferOffset = self.position[1]
+ while bufferIndex < len(self.buffer) and remainingBytes != 0:
+ assert remainingBytes > 0
+ bufferedData = self.buffer[bufferIndex]
+
+ if remainingBytes <= len(bufferedData) - bufferOffset:
+ bytesToRead = remainingBytes
+ self.position = [bufferIndex, bufferOffset + bytesToRead]
+ else:
+ bytesToRead = len(bufferedData) - bufferOffset
+ self.position = [bufferIndex, len(bufferedData)]
+ bufferIndex += 1
+ rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead])
+ remainingBytes -= bytesToRead
+
+ bufferOffset = 0
+
+ if remainingBytes:
+ rv.append(self._readStream(remainingBytes))
+
+ return b"".join(rv)
+
+
+def HTMLInputStream(source, **kwargs):
+ # Work around Python bug #20007: read(0) closes the connection.
+ # http://bugs.python.org/issue20007
+ if (isinstance(source, http_client.HTTPResponse) or
+ # Also check for addinfourl wrapping HTTPResponse
+ (isinstance(source, urllib.response.addbase) and
+ isinstance(source.fp, http_client.HTTPResponse))):
+ isUnicode = False
+ elif hasattr(source, "read"):
+ isUnicode = isinstance(source.read(0), text_type)
+ else:
+ isUnicode = isinstance(source, text_type)
+
+ if isUnicode:
+ encodings = [x for x in kwargs if x.endswith("_encoding")]
+ if encodings:
+ raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings)
+
+ return HTMLUnicodeInputStream(source, **kwargs)
+ else:
+ return HTMLBinaryInputStream(source, **kwargs)
+
+
+class HTMLUnicodeInputStream(object):
+ """Provides a unicode stream of characters to the HTMLTokenizer.
+
+ This class takes care of character encoding and removing or replacing
+ incorrect byte-sequences and also provides column and line tracking.
+
+ """
+
+ _defaultChunkSize = 10240
+
+ def __init__(self, source):
+ """Initialises the HTMLInputStream.
+
+ HTMLInputStream(source, [encoding]) -> Normalized stream from source
+ for use by html5lib.
+
+ source can be either a file-object, local filename or a string.
+
+ The optional encoding parameter must be a string that indicates
+ the encoding. If specified, that encoding will be used,
+ regardless of any BOM or later declaration (such as in a meta
+ element)
+
+ """
+
+ if not _utils.supports_lone_surrogates:
+ # Such platforms will have already checked for such
+ # surrogate errors, so no need to do this checking.
+ self.reportCharacterErrors = None
+ elif len("\U0010FFFF") == 1:
+ self.reportCharacterErrors = self.characterErrorsUCS4
+ else:
+ self.reportCharacterErrors = self.characterErrorsUCS2
+
+ # List of where new lines occur
+ self.newLines = [0]
+
+ self.charEncoding = (lookupEncoding("utf-8"), "certain")
+ self.dataStream = self.openStream(source)
+
+ self.reset()
+
+ def reset(self):
+ self.chunk = ""
+ self.chunkSize = 0
+ self.chunkOffset = 0
+ self.errors = []
+
+ # number of (complete) lines in previous chunks
+ self.prevNumLines = 0
+ # number of columns in the last line of the previous chunk
+ self.prevNumCols = 0
+
+ # Deal with CR LF and surrogates split over chunk boundaries
+ self._bufferedCharacter = None
+
+ def openStream(self, source):
+ """Produces a file object from source.
+
+ source can be either a file object, local filename or a string.
+
+ """
+ # Already a file object
+ if hasattr(source, 'read'):
+ stream = source
+ else:
+ stream = StringIO(source)
+
+ return stream
+
+ def _position(self, offset):
+ chunk = self.chunk
+ nLines = chunk.count('\n', 0, offset)
+ positionLine = self.prevNumLines + nLines
+ lastLinePos = chunk.rfind('\n', 0, offset)
+ if lastLinePos == -1:
+ positionColumn = self.prevNumCols + offset
+ else:
+ positionColumn = offset - (lastLinePos + 1)
+ return (positionLine, positionColumn)
+
+ def position(self):
+ """Returns (line, col) of the current position in the stream."""
+ line, col = self._position(self.chunkOffset)
+ return (line + 1, col)
+
+ def char(self):
+ """ Read one character from the stream or queue if available. Return
+ EOF when EOF is reached.
+ """
+ # Read a new chunk from the input stream if necessary
+ if self.chunkOffset >= self.chunkSize:
+ if not self.readChunk():
+ return EOF
+
+ chunkOffset = self.chunkOffset
+ char = self.chunk[chunkOffset]
+ self.chunkOffset = chunkOffset + 1
+
+ return char
+
+ def readChunk(self, chunkSize=None):
+ if chunkSize is None:
+ chunkSize = self._defaultChunkSize
+
+ self.prevNumLines, self.prevNumCols = self._position(self.chunkSize)
+
+ self.chunk = ""
+ self.chunkSize = 0
+ self.chunkOffset = 0
+
+ data = self.dataStream.read(chunkSize)
+
+ # Deal with CR LF and surrogates broken across chunks
+ if self._bufferedCharacter:
+ data = self._bufferedCharacter + data
+ self._bufferedCharacter = None
+ elif not data:
+ # We have no more data, bye-bye stream
+ return False
+
+ if len(data) > 1:
+ lastv = ord(data[-1])
+ if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF:
+ self._bufferedCharacter = data[-1]
+ data = data[:-1]
+
+ if self.reportCharacterErrors:
+ self.reportCharacterErrors(data)
+
+ # Replace invalid characters
+ data = data.replace("\r\n", "\n")
+ data = data.replace("\r", "\n")
+
+ self.chunk = data
+ self.chunkSize = len(data)
+
+ return True
+
+ def characterErrorsUCS4(self, data):
+ for _ in range(len(invalid_unicode_re.findall(data))):
+ self.errors.append("invalid-codepoint")
+
+ def characterErrorsUCS2(self, data):
+ # Someone picked the wrong compile option
+ # You lose
+ skip = False
+ for match in invalid_unicode_re.finditer(data):
+ if skip:
+ continue
+ codepoint = ord(match.group())
+ pos = match.start()
+ # Pretty sure there should be endianness issues here
+ if _utils.isSurrogatePair(data[pos:pos + 2]):
+ # We have a surrogate pair!
+ char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2])
+ if char_val in non_bmp_invalid_codepoints:
+ self.errors.append("invalid-codepoint")
+ skip = True
+ elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and
+ pos == len(data) - 1):
+ self.errors.append("invalid-codepoint")
+ else:
+ skip = False
+ self.errors.append("invalid-codepoint")
+
+ def charsUntil(self, characters, opposite=False):
+ """ Returns a string of characters from the stream up to but not
+ including any character in 'characters' or EOF. 'characters' must be
+ a container that supports the 'in' method and iteration over its
+ characters.
+ """
+
+ # Use a cache of regexps to find the required characters
+ try:
+ chars = charsUntilRegEx[(characters, opposite)]
+ except KeyError:
+ if __debug__:
+ for c in characters:
+ assert(ord(c) < 128)
+ regex = "".join(["\\x%02x" % ord(c) for c in characters])
+ if not opposite:
+ regex = "^%s" % regex
+ chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex)
+
+ rv = []
+
+ while True:
+ # Find the longest matching prefix
+ m = chars.match(self.chunk, self.chunkOffset)
+ if m is None:
+ # If nothing matched, and it wasn't because we ran out of chunk,
+ # then stop
+ if self.chunkOffset != self.chunkSize:
+ break
+ else:
+ end = m.end()
+ # If not the whole chunk matched, return everything
+ # up to the part that didn't match
+ if end != self.chunkSize:
+ rv.append(self.chunk[self.chunkOffset:end])
+ self.chunkOffset = end
+ break
+ # If the whole remainder of the chunk matched,
+ # use it all and read the next chunk
+ rv.append(self.chunk[self.chunkOffset:])
+ if not self.readChunk():
+ # Reached EOF
+ break
+
+ r = "".join(rv)
+ return r
+
+ def unget(self, char):
+ # Only one character is allowed to be ungotten at once - it must
+ # be consumed again before any further call to unget
+ if char is not EOF:
+ if self.chunkOffset == 0:
+ # unget is called quite rarely, so it's a good idea to do
+ # more work here if it saves a bit of work in the frequently
+ # called char and charsUntil.
+ # So, just prepend the ungotten character onto the current
+ # chunk:
+ self.chunk = char + self.chunk
+ self.chunkSize += 1
+ else:
+ self.chunkOffset -= 1
+ assert self.chunk[self.chunkOffset] == char
+
+
+class HTMLBinaryInputStream(HTMLUnicodeInputStream):
+ """Provides a unicode stream of characters to the HTMLTokenizer.
+
+ This class takes care of character encoding and removing or replacing
+ incorrect byte-sequences and also provides column and line tracking.
+
+ """
+
+ def __init__(self, source, override_encoding=None, transport_encoding=None,
+ same_origin_parent_encoding=None, likely_encoding=None,
+ default_encoding="windows-1252", useChardet=True):
+ """Initialises the HTMLInputStream.
+
+ HTMLInputStream(source, [encoding]) -> Normalized stream from source
+ for use by html5lib.
+
+ source can be either a file-object, local filename or a string.
+
+ The optional encoding parameter must be a string that indicates
+ the encoding. If specified, that encoding will be used,
+ regardless of any BOM or later declaration (such as in a meta
+ element)
+
+ """
+ # Raw Stream - for unicode objects this will encode to utf-8 and set
+ # self.charEncoding as appropriate
+ self.rawStream = self.openStream(source)
+
+ HTMLUnicodeInputStream.__init__(self, self.rawStream)
+
+ # Encoding Information
+ # Number of bytes to use when looking for a meta element with
+ # encoding information
+ self.numBytesMeta = 1024
+ # Number of bytes to use when using detecting encoding using chardet
+ self.numBytesChardet = 100
+ # Things from args
+ self.override_encoding = override_encoding
+ self.transport_encoding = transport_encoding
+ self.same_origin_parent_encoding = same_origin_parent_encoding
+ self.likely_encoding = likely_encoding
+ self.default_encoding = default_encoding
+
+ # Determine encoding
+ self.charEncoding = self.determineEncoding(useChardet)
+ assert self.charEncoding[0] is not None
+
+ # Call superclass
+ self.reset()
+
+ def reset(self):
+ self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace')
+ HTMLUnicodeInputStream.reset(self)
+
+ def openStream(self, source):
+ """Produces a file object from source.
+
+ source can be either a file object, local filename or a string.
+
+ """
+ # Already a file object
+ if hasattr(source, 'read'):
+ stream = source
+ else:
+ stream = BytesIO(source)
+
+ try:
+ stream.seek(stream.tell())
+ except Exception:
+ stream = BufferedStream(stream)
+
+ return stream
+
+ def determineEncoding(self, chardet=True):
+ # BOMs take precedence over everything
+ # This will also read past the BOM if present
+ charEncoding = self.detectBOM(), "certain"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # If we've been overridden, we've been overridden
+ charEncoding = lookupEncoding(self.override_encoding), "certain"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # Now check the transport layer
+ charEncoding = lookupEncoding(self.transport_encoding), "certain"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # Look for meta elements with encoding information
+ charEncoding = self.detectEncodingMeta(), "tentative"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # Parent document encoding
+ charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative"
+ if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"):
+ return charEncoding
+
+ # "likely" encoding
+ charEncoding = lookupEncoding(self.likely_encoding), "tentative"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # Guess with chardet, if available
+ if chardet:
+ try:
+ from chardet.universaldetector import UniversalDetector
+ except ImportError:
+ pass
+ else:
+ buffers = []
+ detector = UniversalDetector()
+ while not detector.done:
+ buffer = self.rawStream.read(self.numBytesChardet)
+ assert isinstance(buffer, bytes)
+ if not buffer:
+ break
+ buffers.append(buffer)
+ detector.feed(buffer)
+ detector.close()
+ encoding = lookupEncoding(detector.result['encoding'])
+ self.rawStream.seek(0)
+ if encoding is not None:
+ return encoding, "tentative"
+
+ # Try the default encoding
+ charEncoding = lookupEncoding(self.default_encoding), "tentative"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # Fallback to html5lib's default if even that hasn't worked
+ return lookupEncoding("windows-1252"), "tentative"
+
+ def changeEncoding(self, newEncoding):
+ assert self.charEncoding[1] != "certain"
+ newEncoding = lookupEncoding(newEncoding)
+ if newEncoding is None:
+ return
+ if newEncoding.name in ("utf-16be", "utf-16le"):
+ newEncoding = lookupEncoding("utf-8")
+ assert newEncoding is not None
+ elif newEncoding == self.charEncoding[0]:
+ self.charEncoding = (self.charEncoding[0], "certain")
+ else:
+ self.rawStream.seek(0)
+ self.charEncoding = (newEncoding, "certain")
+ self.reset()
+ raise _ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding))
+
+ def detectBOM(self):
+ """Attempts to detect at BOM at the start of the stream. If
+ an encoding can be determined from the BOM return the name of the
+ encoding otherwise return None"""
+ bomDict = {
+ codecs.BOM_UTF8: 'utf-8',
+ codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be',
+ codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be'
+ }
+
+ # Go to beginning of file and read in 4 bytes
+ string = self.rawStream.read(4)
+ assert isinstance(string, bytes)
+
+ # Try detecting the BOM using bytes from the string
+ encoding = bomDict.get(string[:3]) # UTF-8
+ seek = 3
+ if not encoding:
+ # Need to detect UTF-32 before UTF-16
+ encoding = bomDict.get(string) # UTF-32
+ seek = 4
+ if not encoding:
+ encoding = bomDict.get(string[:2]) # UTF-16
+ seek = 2
+
+ # Set the read position past the BOM if one was found, otherwise
+ # set it to the start of the stream
+ if encoding:
+ self.rawStream.seek(seek)
+ return lookupEncoding(encoding)
+ else:
+ self.rawStream.seek(0)
+ return None
+
+ def detectEncodingMeta(self):
+ """Report the encoding declared by the meta element
+ """
+ buffer = self.rawStream.read(self.numBytesMeta)
+ assert isinstance(buffer, bytes)
+ parser = EncodingParser(buffer)
+ self.rawStream.seek(0)
+ encoding = parser.getEncoding()
+
+ if encoding is not None and encoding.name in ("utf-16be", "utf-16le"):
+ encoding = lookupEncoding("utf-8")
+
+ return encoding
+
+
+class EncodingBytes(bytes):
+ """String-like object with an associated position and various extra methods
+ If the position is ever greater than the string length then an exception is
+ raised"""
+ def __new__(self, value):
+ assert isinstance(value, bytes)
+ return bytes.__new__(self, value.lower())
+
+ def __init__(self, value):
+ # pylint:disable=unused-argument
+ self._position = -1
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ p = self._position = self._position + 1
+ if p >= len(self):
+ raise StopIteration
+ elif p < 0:
+ raise TypeError
+ return self[p:p + 1]
+
+ def next(self):
+ # Py2 compat
+ return self.__next__()
+
+ def previous(self):
+ p = self._position
+ if p >= len(self):
+ raise StopIteration
+ elif p < 0:
+ raise TypeError
+ self._position = p = p - 1
+ return self[p:p + 1]
+
+ def setPosition(self, position):
+ if self._position >= len(self):
+ raise StopIteration
+ self._position = position
+
+ def getPosition(self):
+ if self._position >= len(self):
+ raise StopIteration
+ if self._position >= 0:
+ return self._position
+ else:
+ return None
+
+ position = property(getPosition, setPosition)
+
+ def getCurrentByte(self):
+ return self[self.position:self.position + 1]
+
+ currentByte = property(getCurrentByte)
+
+ def skip(self, chars=spaceCharactersBytes):
+ """Skip past a list of characters"""
+ p = self.position # use property for the error-checking
+ while p < len(self):
+ c = self[p:p + 1]
+ if c not in chars:
+ self._position = p
+ return c
+ p += 1
+ self._position = p
+ return None
+
+ def skipUntil(self, chars):
+ p = self.position
+ while p < len(self):
+ c = self[p:p + 1]
+ if c in chars:
+ self._position = p
+ return c
+ p += 1
+ self._position = p
+ return None
+
+ def matchBytes(self, bytes):
+ """Look for a sequence of bytes at the start of a string. If the bytes
+ are found return True and advance the position to the byte after the
+ match. Otherwise return False and leave the position alone"""
+ rv = self.startswith(bytes, self.position)
+ if rv:
+ self.position += len(bytes)
+ return rv
+
+ def jumpTo(self, bytes):
+ """Look for the next sequence of bytes matching a given sequence. If
+ a match is found advance the position to the last byte of the match"""
+ try:
+ self._position = self.index(bytes, self.position) + len(bytes) - 1
+ except ValueError:
+ raise StopIteration
+ return True
+
+
+class EncodingParser(object):
+ """Mini parser for detecting character encoding from meta elements"""
+
+ def __init__(self, data):
+ """string - the data to work on for encoding detection"""
+ self.data = EncodingBytes(data)
+ self.encoding = None
+
+ def getEncoding(self):
+ if b"<meta" not in self.data:
+ return None
+
+ methodDispatch = (
+ (b"<!--", self.handleComment),
+ (b"<meta", self.handleMeta),
+ (b"</", self.handlePossibleEndTag),
+ (b"<!", self.handleOther),
+ (b"<?", self.handleOther),
+ (b"<", self.handlePossibleStartTag))
+ for _ in self.data:
+ keepParsing = True
+ try:
+ self.data.jumpTo(b"<")
+ except StopIteration:
+ break
+ for key, method in methodDispatch:
+ if self.data.matchBytes(key):
+ try:
+ keepParsing = method()
+ break
+ except StopIteration:
+ keepParsing = False
+ break
+ if not keepParsing:
+ break
+
+ return self.encoding
+
+ def handleComment(self):
+ """Skip over comments"""
+ return self.data.jumpTo(b"-->")
+
+ def handleMeta(self):
+ if self.data.currentByte not in spaceCharactersBytes:
+ # if we have <meta not followed by a space so just keep going
+ return True
+ # We have a valid meta element we want to search for attributes
+ hasPragma = False
+ pendingEncoding = None
+ while True:
+ # Try to find the next attribute after the current position
+ attr = self.getAttribute()
+ if attr is None:
+ return True
+ else:
+ if attr[0] == b"http-equiv":
+ hasPragma = attr[1] == b"content-type"
+ if hasPragma and pendingEncoding is not None:
+ self.encoding = pendingEncoding
+ return False
+ elif attr[0] == b"charset":
+ tentativeEncoding = attr[1]
+ codec = lookupEncoding(tentativeEncoding)
+ if codec is not None:
+ self.encoding = codec
+ return False
+ elif attr[0] == b"content":
+ contentParser = ContentAttrParser(EncodingBytes(attr[1]))
+ tentativeEncoding = contentParser.parse()
+ if tentativeEncoding is not None:
+ codec = lookupEncoding(tentativeEncoding)
+ if codec is not None:
+ if hasPragma:
+ self.encoding = codec
+ return False
+ else:
+ pendingEncoding = codec
+
+ def handlePossibleStartTag(self):
+ return self.handlePossibleTag(False)
+
+ def handlePossibleEndTag(self):
+ next(self.data)
+ return self.handlePossibleTag(True)
+
+ def handlePossibleTag(self, endTag):
+ data = self.data
+ if data.currentByte not in asciiLettersBytes:
+ # If the next byte is not an ascii letter either ignore this
+ # fragment (possible start tag case) or treat it according to
+ # handleOther
+ if endTag:
+ data.previous()
+ self.handleOther()
+ return True
+
+ c = data.skipUntil(spacesAngleBrackets)
+ if c == b"<":
+ # return to the first step in the overall "two step" algorithm
+ # reprocessing the < byte
+ data.previous()
+ else:
+ # Read all attributes
+ attr = self.getAttribute()
+ while attr is not None:
+ attr = self.getAttribute()
+ return True
+
+ def handleOther(self):
+ return self.data.jumpTo(b">")
+
+ def getAttribute(self):
+ """Return a name,value pair for the next attribute in the stream,
+ if one is found, or None"""
+ data = self.data
+ # Step 1 (skip chars)
+ c = data.skip(spaceCharactersBytes | frozenset([b"/"]))
+ assert c is None or len(c) == 1
+ # Step 2
+ if c in (b">", None):
+ return None
+ # Step 3
+ attrName = []
+ attrValue = []
+ # Step 4 attribute name
+ while True:
+ if c == b"=" and attrName:
+ break
+ elif c in spaceCharactersBytes:
+ # Step 6!
+ c = data.skip()
+ break
+ elif c in (b"/", b">"):
+ return b"".join(attrName), b""
+ elif c in asciiUppercaseBytes:
+ attrName.append(c.lower())
+ elif c is None:
+ return None
+ else:
+ attrName.append(c)
+ # Step 5
+ c = next(data)
+ # Step 7
+ if c != b"=":
+ data.previous()
+ return b"".join(attrName), b""
+ # Step 8
+ next(data)
+ # Step 9
+ c = data.skip()
+ # Step 10
+ if c in (b"'", b'"'):
+ # 10.1
+ quoteChar = c
+ while True:
+ # 10.2
+ c = next(data)
+ # 10.3
+ if c == quoteChar:
+ next(data)
+ return b"".join(attrName), b"".join(attrValue)
+ # 10.4
+ elif c in asciiUppercaseBytes:
+ attrValue.append(c.lower())
+ # 10.5
+ else:
+ attrValue.append(c)
+ elif c == b">":
+ return b"".join(attrName), b""
+ elif c in asciiUppercaseBytes:
+ attrValue.append(c.lower())
+ elif c is None:
+ return None
+ else:
+ attrValue.append(c)
+ # Step 11
+ while True:
+ c = next(data)
+ if c in spacesAngleBrackets:
+ return b"".join(attrName), b"".join(attrValue)
+ elif c in asciiUppercaseBytes:
+ attrValue.append(c.lower())
+ elif c is None:
+ return None
+ else:
+ attrValue.append(c)
+
+
+class ContentAttrParser(object):
+ def __init__(self, data):
+ assert isinstance(data, bytes)
+ self.data = data
+
+ def parse(self):
+ try:
+ # Check if the attr name is charset
+ # otherwise return
+ self.data.jumpTo(b"charset")
+ self.data.position += 1
+ self.data.skip()
+ if not self.data.currentByte == b"=":
+ # If there is no = sign keep looking for attrs
+ return None
+ self.data.position += 1
+ self.data.skip()
+ # Look for an encoding between matching quote marks
+ if self.data.currentByte in (b'"', b"'"):
+ quoteMark = self.data.currentByte
+ self.data.position += 1
+ oldPosition = self.data.position
+ if self.data.jumpTo(quoteMark):
+ return self.data[oldPosition:self.data.position]
+ else:
+ return None
+ else:
+ # Unquoted value
+ oldPosition = self.data.position
+ try:
+ self.data.skipUntil(spaceCharactersBytes)
+ return self.data[oldPosition:self.data.position]
+ except StopIteration:
+ # Return the whole remaining value
+ return self.data[oldPosition:]
+ except StopIteration:
+ return None
+
+
+def lookupEncoding(encoding):
+ """Return the python codec name corresponding to an encoding or None if the
+ string doesn't correspond to a valid encoding."""
+ if isinstance(encoding, bytes):
+ try:
+ encoding = encoding.decode("ascii")
+ except UnicodeDecodeError:
+ return None
+
+ if encoding is not None:
+ try:
+ return webencodings.lookup(encoding)
+ except AttributeError:
+ return None
+ else:
+ return None
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_tokenizer.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_tokenizer.py
new file mode 100644
index 0000000000..4748a19795
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_tokenizer.py
@@ -0,0 +1,1735 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from six import unichr as chr
+
+from collections import deque, OrderedDict
+from sys import version_info
+
+from .constants import spaceCharacters
+from .constants import entities
+from .constants import asciiLetters, asciiUpper2Lower
+from .constants import digits, hexDigits, EOF
+from .constants import tokenTypes, tagTokenTypes
+from .constants import replacementCharacters
+
+from ._inputstream import HTMLInputStream
+
+from ._trie import Trie
+
+entitiesTrie = Trie(entities)
+
+if version_info >= (3, 7):
+ attributeMap = dict
+else:
+ attributeMap = OrderedDict
+
+
+class HTMLTokenizer(object):
+ """ This class takes care of tokenizing HTML.
+
+ * self.currentToken
+ Holds the token that is currently being processed.
+
+ * self.state
+ Holds a reference to the method to be invoked... XXX
+
+ * self.stream
+ Points to HTMLInputStream object.
+ """
+
+ def __init__(self, stream, parser=None, **kwargs):
+
+ self.stream = HTMLInputStream(stream, **kwargs)
+ self.parser = parser
+
+ # Setup the initial tokenizer state
+ self.escapeFlag = False
+ self.lastFourChars = []
+ self.state = self.dataState
+ self.escape = False
+
+ # The current token being created
+ self.currentToken = None
+ super(HTMLTokenizer, self).__init__()
+
+ def __iter__(self):
+ """ This is where the magic happens.
+
+ We do our usually processing through the states and when we have a token
+ to return we yield the token which pauses processing until the next token
+ is requested.
+ """
+ self.tokenQueue = deque([])
+ # Start processing. When EOF is reached self.state will return False
+ # instead of True and the loop will terminate.
+ while self.state():
+ while self.stream.errors:
+ yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)}
+ while self.tokenQueue:
+ yield self.tokenQueue.popleft()
+
+ def consumeNumberEntity(self, isHex):
+ """This function returns either U+FFFD or the character based on the
+ decimal or hexadecimal representation. It also discards ";" if present.
+ If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked.
+ """
+
+ allowed = digits
+ radix = 10
+ if isHex:
+ allowed = hexDigits
+ radix = 16
+
+ charStack = []
+
+ # Consume all the characters that are in range while making sure we
+ # don't hit an EOF.
+ c = self.stream.char()
+ while c in allowed and c is not EOF:
+ charStack.append(c)
+ c = self.stream.char()
+
+ # Convert the set of characters consumed to an int.
+ charAsInt = int("".join(charStack), radix)
+
+ # Certain characters get replaced with others
+ if charAsInt in replacementCharacters:
+ char = replacementCharacters[charAsInt]
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "illegal-codepoint-for-numeric-entity",
+ "datavars": {"charAsInt": charAsInt}})
+ elif ((0xD800 <= charAsInt <= 0xDFFF) or
+ (charAsInt > 0x10FFFF)):
+ char = "\uFFFD"
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "illegal-codepoint-for-numeric-entity",
+ "datavars": {"charAsInt": charAsInt}})
+ else:
+ # Should speed up this check somehow (e.g. move the set to a constant)
+ if ((0x0001 <= charAsInt <= 0x0008) or
+ (0x000E <= charAsInt <= 0x001F) or
+ (0x007F <= charAsInt <= 0x009F) or
+ (0xFDD0 <= charAsInt <= 0xFDEF) or
+ charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE,
+ 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
+ 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE,
+ 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE,
+ 0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE,
+ 0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE,
+ 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
+ 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE,
+ 0xFFFFF, 0x10FFFE, 0x10FFFF])):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data":
+ "illegal-codepoint-for-numeric-entity",
+ "datavars": {"charAsInt": charAsInt}})
+ try:
+ # Try/except needed as UCS-2 Python builds' unichar only works
+ # within the BMP.
+ char = chr(charAsInt)
+ except ValueError:
+ v = charAsInt - 0x10000
+ char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF))
+
+ # Discard the ; if present. Otherwise, put it back on the queue and
+ # invoke parseError on parser.
+ if c != ";":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "numeric-entity-without-semicolon"})
+ self.stream.unget(c)
+
+ return char
+
+ def consumeEntity(self, allowedChar=None, fromAttribute=False):
+ # Initialise to the default output for when no entity is matched
+ output = "&"
+
+ charStack = [self.stream.char()]
+ if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or
+ (allowedChar is not None and allowedChar == charStack[0])):
+ self.stream.unget(charStack[0])
+
+ elif charStack[0] == "#":
+ # Read the next character to see if it's hex or decimal
+ hex = False
+ charStack.append(self.stream.char())
+ if charStack[-1] in ("x", "X"):
+ hex = True
+ charStack.append(self.stream.char())
+
+ # charStack[-1] should be the first digit
+ if (hex and charStack[-1] in hexDigits) \
+ or (not hex and charStack[-1] in digits):
+ # At least one digit found, so consume the whole number
+ self.stream.unget(charStack[-1])
+ output = self.consumeNumberEntity(hex)
+ else:
+ # No digits found
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "expected-numeric-entity"})
+ self.stream.unget(charStack.pop())
+ output = "&" + "".join(charStack)
+
+ else:
+ # At this point in the process might have named entity. Entities
+ # are stored in the global variable "entities".
+ #
+ # Consume characters and compare to these to a substring of the
+ # entity names in the list until the substring no longer matches.
+ while (charStack[-1] is not EOF):
+ if not entitiesTrie.has_keys_with_prefix("".join(charStack)):
+ break
+ charStack.append(self.stream.char())
+
+ # At this point we have a string that starts with some characters
+ # that may match an entity
+ # Try to find the longest entity the string will match to take care
+ # of &noti for instance.
+ try:
+ entityName = entitiesTrie.longest_prefix("".join(charStack[:-1]))
+ entityLength = len(entityName)
+ except KeyError:
+ entityName = None
+
+ if entityName is not None:
+ if entityName[-1] != ";":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "named-entity-without-semicolon"})
+ if (entityName[-1] != ";" and fromAttribute and
+ (charStack[entityLength] in asciiLetters or
+ charStack[entityLength] in digits or
+ charStack[entityLength] == "=")):
+ self.stream.unget(charStack.pop())
+ output = "&" + "".join(charStack)
+ else:
+ output = entities[entityName]
+ self.stream.unget(charStack.pop())
+ output += "".join(charStack[entityLength:])
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-named-entity"})
+ self.stream.unget(charStack.pop())
+ output = "&" + "".join(charStack)
+
+ if fromAttribute:
+ self.currentToken["data"][-1][1] += output
+ else:
+ if output in spaceCharacters:
+ tokenType = "SpaceCharacters"
+ else:
+ tokenType = "Characters"
+ self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output})
+
+ def processEntityInAttribute(self, allowedChar):
+ """This method replaces the need for "entityInAttributeValueState".
+ """
+ self.consumeEntity(allowedChar=allowedChar, fromAttribute=True)
+
+ def emitCurrentToken(self):
+ """This method is a generic handler for emitting the tags. It also sets
+ the state to "data" because that's what's needed after a token has been
+ emitted.
+ """
+ token = self.currentToken
+ # Add token to the queue to be yielded
+ if (token["type"] in tagTokenTypes):
+ token["name"] = token["name"].translate(asciiUpper2Lower)
+ if token["type"] == tokenTypes["StartTag"]:
+ raw = token["data"]
+ data = attributeMap(raw)
+ if len(raw) > len(data):
+ # we had some duplicated attribute, fix so first wins
+ data.update(raw[::-1])
+ token["data"] = data
+
+ if token["type"] == tokenTypes["EndTag"]:
+ if token["data"]:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "attributes-in-end-tag"})
+ if token["selfClosing"]:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "self-closing-flag-on-end-tag"})
+ self.tokenQueue.append(token)
+ self.state = self.dataState
+
+ # Below are the various tokenizer states worked out.
+ def dataState(self):
+ data = self.stream.char()
+ if data == "&":
+ self.state = self.entityDataState
+ elif data == "<":
+ self.state = self.tagOpenState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\u0000"})
+ elif data is EOF:
+ # Tokenization ends.
+ return False
+ elif data in spaceCharacters:
+ # Directly after emitting a token you switch back to the "data
+ # state". At that point spaceCharacters are important so they are
+ # emitted separately.
+ self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data":
+ data + self.stream.charsUntil(spaceCharacters, True)})
+ # No need to update lastFourChars here, since the first space will
+ # have already been appended to lastFourChars and will have broken
+ # any <!-- or --> sequences
+ else:
+ chars = self.stream.charsUntil(("&", "<", "\u0000"))
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + chars})
+ return True
+
+ def entityDataState(self):
+ self.consumeEntity()
+ self.state = self.dataState
+ return True
+
+ def rcdataState(self):
+ data = self.stream.char()
+ if data == "&":
+ self.state = self.characterReferenceInRcdata
+ elif data == "<":
+ self.state = self.rcdataLessThanSignState
+ elif data == EOF:
+ # Tokenization ends.
+ return False
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ elif data in spaceCharacters:
+ # Directly after emitting a token you switch back to the "data
+ # state". At that point spaceCharacters are important so they are
+ # emitted separately.
+ self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data":
+ data + self.stream.charsUntil(spaceCharacters, True)})
+ # No need to update lastFourChars here, since the first space will
+ # have already been appended to lastFourChars and will have broken
+ # any <!-- or --> sequences
+ else:
+ chars = self.stream.charsUntil(("&", "<", "\u0000"))
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + chars})
+ return True
+
+ def characterReferenceInRcdata(self):
+ self.consumeEntity()
+ self.state = self.rcdataState
+ return True
+
+ def rawtextState(self):
+ data = self.stream.char()
+ if data == "<":
+ self.state = self.rawtextLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ elif data == EOF:
+ # Tokenization ends.
+ return False
+ else:
+ chars = self.stream.charsUntil(("<", "\u0000"))
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + chars})
+ return True
+
+ def scriptDataState(self):
+ data = self.stream.char()
+ if data == "<":
+ self.state = self.scriptDataLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ elif data == EOF:
+ # Tokenization ends.
+ return False
+ else:
+ chars = self.stream.charsUntil(("<", "\u0000"))
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + chars})
+ return True
+
+ def plaintextState(self):
+ data = self.stream.char()
+ if data == EOF:
+ # Tokenization ends.
+ return False
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + self.stream.charsUntil("\u0000")})
+ return True
+
+ def tagOpenState(self):
+ data = self.stream.char()
+ if data == "!":
+ self.state = self.markupDeclarationOpenState
+ elif data == "/":
+ self.state = self.closeTagOpenState
+ elif data in asciiLetters:
+ self.currentToken = {"type": tokenTypes["StartTag"],
+ "name": data, "data": [],
+ "selfClosing": False,
+ "selfClosingAcknowledged": False}
+ self.state = self.tagNameState
+ elif data == ">":
+ # XXX In theory it could be something besides a tag name. But
+ # do we really care?
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-tag-name-but-got-right-bracket"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"})
+ self.state = self.dataState
+ elif data == "?":
+ # XXX In theory it could be something besides a tag name. But
+ # do we really care?
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-tag-name-but-got-question-mark"})
+ self.stream.unget(data)
+ self.state = self.bogusCommentState
+ else:
+ # XXX
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-tag-name"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.stream.unget(data)
+ self.state = self.dataState
+ return True
+
+ def closeTagOpenState(self):
+ data = self.stream.char()
+ if data in asciiLetters:
+ self.currentToken = {"type": tokenTypes["EndTag"], "name": data,
+ "data": [], "selfClosing": False}
+ self.state = self.tagNameState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-closing-tag-but-got-right-bracket"})
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-closing-tag-but-got-eof"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
+ self.state = self.dataState
+ else:
+ # XXX data can be _'_...
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-closing-tag-but-got-char",
+ "datavars": {"data": data}})
+ self.stream.unget(data)
+ self.state = self.bogusCommentState
+ return True
+
+ def tagNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeAttributeNameState
+ elif data == ">":
+ self.emitCurrentToken()
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-tag-name"})
+ self.state = self.dataState
+ elif data == "/":
+ self.state = self.selfClosingStartTagState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["name"] += "\uFFFD"
+ else:
+ self.currentToken["name"] += data
+ # (Don't use charsUntil here, because tag names are
+ # very short and it's faster to not do anything fancy)
+ return True
+
+ def rcdataLessThanSignState(self):
+ data = self.stream.char()
+ if data == "/":
+ self.temporaryBuffer = ""
+ self.state = self.rcdataEndTagOpenState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.stream.unget(data)
+ self.state = self.rcdataState
+ return True
+
+ def rcdataEndTagOpenState(self):
+ data = self.stream.char()
+ if data in asciiLetters:
+ self.temporaryBuffer += data
+ self.state = self.rcdataEndTagNameState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
+ self.stream.unget(data)
+ self.state = self.rcdataState
+ return True
+
+ def rcdataEndTagNameState(self):
+ appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
+ data = self.stream.char()
+ if data in spaceCharacters and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.beforeAttributeNameState
+ elif data == "/" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.selfClosingStartTagState
+ elif data == ">" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.emitCurrentToken()
+ self.state = self.dataState
+ elif data in asciiLetters:
+ self.temporaryBuffer += data
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "</" + self.temporaryBuffer})
+ self.stream.unget(data)
+ self.state = self.rcdataState
+ return True
+
+ def rawtextLessThanSignState(self):
+ data = self.stream.char()
+ if data == "/":
+ self.temporaryBuffer = ""
+ self.state = self.rawtextEndTagOpenState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.stream.unget(data)
+ self.state = self.rawtextState
+ return True
+
+ def rawtextEndTagOpenState(self):
+ data = self.stream.char()
+ if data in asciiLetters:
+ self.temporaryBuffer += data
+ self.state = self.rawtextEndTagNameState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
+ self.stream.unget(data)
+ self.state = self.rawtextState
+ return True
+
+ def rawtextEndTagNameState(self):
+ appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
+ data = self.stream.char()
+ if data in spaceCharacters and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.beforeAttributeNameState
+ elif data == "/" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.selfClosingStartTagState
+ elif data == ">" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.emitCurrentToken()
+ self.state = self.dataState
+ elif data in asciiLetters:
+ self.temporaryBuffer += data
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "</" + self.temporaryBuffer})
+ self.stream.unget(data)
+ self.state = self.rawtextState
+ return True
+
+ def scriptDataLessThanSignState(self):
+ data = self.stream.char()
+ if data == "/":
+ self.temporaryBuffer = ""
+ self.state = self.scriptDataEndTagOpenState
+ elif data == "!":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<!"})
+ self.state = self.scriptDataEscapeStartState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.stream.unget(data)
+ self.state = self.scriptDataState
+ return True
+
+ def scriptDataEndTagOpenState(self):
+ data = self.stream.char()
+ if data in asciiLetters:
+ self.temporaryBuffer += data
+ self.state = self.scriptDataEndTagNameState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
+ self.stream.unget(data)
+ self.state = self.scriptDataState
+ return True
+
+ def scriptDataEndTagNameState(self):
+ appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
+ data = self.stream.char()
+ if data in spaceCharacters and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.beforeAttributeNameState
+ elif data == "/" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.selfClosingStartTagState
+ elif data == ">" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.emitCurrentToken()
+ self.state = self.dataState
+ elif data in asciiLetters:
+ self.temporaryBuffer += data
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "</" + self.temporaryBuffer})
+ self.stream.unget(data)
+ self.state = self.scriptDataState
+ return True
+
+ def scriptDataEscapeStartState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataEscapeStartDashState
+ else:
+ self.stream.unget(data)
+ self.state = self.scriptDataState
+ return True
+
+ def scriptDataEscapeStartDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataEscapedDashDashState
+ else:
+ self.stream.unget(data)
+ self.state = self.scriptDataState
+ return True
+
+ def scriptDataEscapedState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataEscapedDashState
+ elif data == "<":
+ self.state = self.scriptDataEscapedLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ elif data == EOF:
+ self.state = self.dataState
+ else:
+ chars = self.stream.charsUntil(("<", "-", "\u0000"))
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + chars})
+ return True
+
+ def scriptDataEscapedDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataEscapedDashDashState
+ elif data == "<":
+ self.state = self.scriptDataEscapedLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ self.state = self.scriptDataEscapedState
+ elif data == EOF:
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataEscapedDashDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ elif data == "<":
+ self.state = self.scriptDataEscapedLessThanSignState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"})
+ self.state = self.scriptDataState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ self.state = self.scriptDataEscapedState
+ elif data == EOF:
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataEscapedLessThanSignState(self):
+ data = self.stream.char()
+ if data == "/":
+ self.temporaryBuffer = ""
+ self.state = self.scriptDataEscapedEndTagOpenState
+ elif data in asciiLetters:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data})
+ self.temporaryBuffer = data
+ self.state = self.scriptDataDoubleEscapeStartState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.stream.unget(data)
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataEscapedEndTagOpenState(self):
+ data = self.stream.char()
+ if data in asciiLetters:
+ self.temporaryBuffer = data
+ self.state = self.scriptDataEscapedEndTagNameState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
+ self.stream.unget(data)
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataEscapedEndTagNameState(self):
+ appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
+ data = self.stream.char()
+ if data in spaceCharacters and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.beforeAttributeNameState
+ elif data == "/" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.selfClosingStartTagState
+ elif data == ">" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.emitCurrentToken()
+ self.state = self.dataState
+ elif data in asciiLetters:
+ self.temporaryBuffer += data
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "</" + self.temporaryBuffer})
+ self.stream.unget(data)
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataDoubleEscapeStartState(self):
+ data = self.stream.char()
+ if data in (spaceCharacters | frozenset(("/", ">"))):
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ if self.temporaryBuffer.lower() == "script":
+ self.state = self.scriptDataDoubleEscapedState
+ else:
+ self.state = self.scriptDataEscapedState
+ elif data in asciiLetters:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.temporaryBuffer += data
+ else:
+ self.stream.unget(data)
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataDoubleEscapedState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataDoubleEscapedDashState
+ elif data == "<":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.state = self.scriptDataDoubleEscapedLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ elif data == EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-script-in-script"})
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ return True
+
+ def scriptDataDoubleEscapedDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataDoubleEscapedDashDashState
+ elif data == "<":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.state = self.scriptDataDoubleEscapedLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ self.state = self.scriptDataDoubleEscapedState
+ elif data == EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-script-in-script"})
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.state = self.scriptDataDoubleEscapedState
+ return True
+
+ def scriptDataDoubleEscapedDashDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ elif data == "<":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.state = self.scriptDataDoubleEscapedLessThanSignState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"})
+ self.state = self.scriptDataState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ self.state = self.scriptDataDoubleEscapedState
+ elif data == EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-script-in-script"})
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.state = self.scriptDataDoubleEscapedState
+ return True
+
+ def scriptDataDoubleEscapedLessThanSignState(self):
+ data = self.stream.char()
+ if data == "/":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"})
+ self.temporaryBuffer = ""
+ self.state = self.scriptDataDoubleEscapeEndState
+ else:
+ self.stream.unget(data)
+ self.state = self.scriptDataDoubleEscapedState
+ return True
+
+ def scriptDataDoubleEscapeEndState(self):
+ data = self.stream.char()
+ if data in (spaceCharacters | frozenset(("/", ">"))):
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ if self.temporaryBuffer.lower() == "script":
+ self.state = self.scriptDataEscapedState
+ else:
+ self.state = self.scriptDataDoubleEscapedState
+ elif data in asciiLetters:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.temporaryBuffer += data
+ else:
+ self.stream.unget(data)
+ self.state = self.scriptDataDoubleEscapedState
+ return True
+
+ def beforeAttributeNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.stream.charsUntil(spaceCharacters, True)
+ elif data in asciiLetters:
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ elif data == ">":
+ self.emitCurrentToken()
+ elif data == "/":
+ self.state = self.selfClosingStartTagState
+ elif data in ("'", '"', "=", "<"):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "invalid-character-in-attribute-name"})
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"].append(["\uFFFD", ""])
+ self.state = self.attributeNameState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-attribute-name-but-got-eof"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ return True
+
+ def attributeNameState(self):
+ data = self.stream.char()
+ leavingThisState = True
+ emitToken = False
+ if data == "=":
+ self.state = self.beforeAttributeValueState
+ elif data in asciiLetters:
+ self.currentToken["data"][-1][0] += data +\
+ self.stream.charsUntil(asciiLetters, True)
+ leavingThisState = False
+ elif data == ">":
+ # XXX If we emit here the attributes are converted to a dict
+ # without being checked and when the code below runs we error
+ # because data is a dict not a list
+ emitToken = True
+ elif data in spaceCharacters:
+ self.state = self.afterAttributeNameState
+ elif data == "/":
+ self.state = self.selfClosingStartTagState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"][-1][0] += "\uFFFD"
+ leavingThisState = False
+ elif data in ("'", '"', "<"):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data":
+ "invalid-character-in-attribute-name"})
+ self.currentToken["data"][-1][0] += data
+ leavingThisState = False
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "eof-in-attribute-name"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"][-1][0] += data
+ leavingThisState = False
+
+ if leavingThisState:
+ # Attributes are not dropped at this stage. That happens when the
+ # start tag token is emitted so values can still be safely appended
+ # to attributes, but we do want to report the parse error in time.
+ self.currentToken["data"][-1][0] = (
+ self.currentToken["data"][-1][0].translate(asciiUpper2Lower))
+ for name, _ in self.currentToken["data"][:-1]:
+ if self.currentToken["data"][-1][0] == name:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "duplicate-attribute"})
+ break
+ # XXX Fix for above XXX
+ if emitToken:
+ self.emitCurrentToken()
+ return True
+
+ def afterAttributeNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.stream.charsUntil(spaceCharacters, True)
+ elif data == "=":
+ self.state = self.beforeAttributeValueState
+ elif data == ">":
+ self.emitCurrentToken()
+ elif data in asciiLetters:
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ elif data == "/":
+ self.state = self.selfClosingStartTagState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"].append(["\uFFFD", ""])
+ self.state = self.attributeNameState
+ elif data in ("'", '"', "<"):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "invalid-character-after-attribute-name"})
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-end-of-tag-but-got-eof"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ return True
+
+ def beforeAttributeValueState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.stream.charsUntil(spaceCharacters, True)
+ elif data == "\"":
+ self.state = self.attributeValueDoubleQuotedState
+ elif data == "&":
+ self.state = self.attributeValueUnQuotedState
+ self.stream.unget(data)
+ elif data == "'":
+ self.state = self.attributeValueSingleQuotedState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-attribute-value-but-got-right-bracket"})
+ self.emitCurrentToken()
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"][-1][1] += "\uFFFD"
+ self.state = self.attributeValueUnQuotedState
+ elif data in ("=", "<", "`"):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "equals-in-unquoted-attribute-value"})
+ self.currentToken["data"][-1][1] += data
+ self.state = self.attributeValueUnQuotedState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-attribute-value-but-got-eof"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"][-1][1] += data
+ self.state = self.attributeValueUnQuotedState
+ return True
+
+ def attributeValueDoubleQuotedState(self):
+ data = self.stream.char()
+ if data == "\"":
+ self.state = self.afterAttributeValueState
+ elif data == "&":
+ self.processEntityInAttribute('"')
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"][-1][1] += "\uFFFD"
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-attribute-value-double-quote"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"][-1][1] += data +\
+ self.stream.charsUntil(("\"", "&", "\u0000"))
+ return True
+
+ def attributeValueSingleQuotedState(self):
+ data = self.stream.char()
+ if data == "'":
+ self.state = self.afterAttributeValueState
+ elif data == "&":
+ self.processEntityInAttribute("'")
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"][-1][1] += "\uFFFD"
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-attribute-value-single-quote"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"][-1][1] += data +\
+ self.stream.charsUntil(("'", "&", "\u0000"))
+ return True
+
+ def attributeValueUnQuotedState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeAttributeNameState
+ elif data == "&":
+ self.processEntityInAttribute(">")
+ elif data == ">":
+ self.emitCurrentToken()
+ elif data in ('"', "'", "=", "<", "`"):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-character-in-unquoted-attribute-value"})
+ self.currentToken["data"][-1][1] += data
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"][-1][1] += "\uFFFD"
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-attribute-value-no-quotes"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"][-1][1] += data + self.stream.charsUntil(
+ frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters)
+ return True
+
+ def afterAttributeValueState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeAttributeNameState
+ elif data == ">":
+ self.emitCurrentToken()
+ elif data == "/":
+ self.state = self.selfClosingStartTagState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-EOF-after-attribute-value"})
+ self.stream.unget(data)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-character-after-attribute-value"})
+ self.stream.unget(data)
+ self.state = self.beforeAttributeNameState
+ return True
+
+ def selfClosingStartTagState(self):
+ data = self.stream.char()
+ if data == ">":
+ self.currentToken["selfClosing"] = True
+ self.emitCurrentToken()
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data":
+ "unexpected-EOF-after-solidus-in-tag"})
+ self.stream.unget(data)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-character-after-solidus-in-tag"})
+ self.stream.unget(data)
+ self.state = self.beforeAttributeNameState
+ return True
+
+ def bogusCommentState(self):
+ # Make a new comment token and give it as value all the characters
+ # until the first > or EOF (charsUntil checks for EOF automatically)
+ # and emit it.
+ data = self.stream.charsUntil(">")
+ data = data.replace("\u0000", "\uFFFD")
+ self.tokenQueue.append(
+ {"type": tokenTypes["Comment"], "data": data})
+
+ # Eat the character directly after the bogus comment which is either a
+ # ">" or an EOF.
+ self.stream.char()
+ self.state = self.dataState
+ return True
+
+ def markupDeclarationOpenState(self):
+ charStack = [self.stream.char()]
+ if charStack[-1] == "-":
+ charStack.append(self.stream.char())
+ if charStack[-1] == "-":
+ self.currentToken = {"type": tokenTypes["Comment"], "data": ""}
+ self.state = self.commentStartState
+ return True
+ elif charStack[-1] in ('d', 'D'):
+ matched = True
+ for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'),
+ ('y', 'Y'), ('p', 'P'), ('e', 'E')):
+ charStack.append(self.stream.char())
+ if charStack[-1] not in expected:
+ matched = False
+ break
+ if matched:
+ self.currentToken = {"type": tokenTypes["Doctype"],
+ "name": "",
+ "publicId": None, "systemId": None,
+ "correct": True}
+ self.state = self.doctypeState
+ return True
+ elif (charStack[-1] == "[" and
+ self.parser is not None and
+ self.parser.tree.openElements and
+ self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace):
+ matched = True
+ for expected in ["C", "D", "A", "T", "A", "["]:
+ charStack.append(self.stream.char())
+ if charStack[-1] != expected:
+ matched = False
+ break
+ if matched:
+ self.state = self.cdataSectionState
+ return True
+
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-dashes-or-doctype"})
+
+ while charStack:
+ self.stream.unget(charStack.pop())
+ self.state = self.bogusCommentState
+ return True
+
+ def commentStartState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.state = self.commentStartDashState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "incorrect-comment"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-comment"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["data"] += data
+ self.state = self.commentState
+ return True
+
+ def commentStartDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.state = self.commentEndState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "-\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "incorrect-comment"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-comment"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["data"] += "-" + data
+ self.state = self.commentState
+ return True
+
+ def commentState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.state = self.commentEndDashState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "\uFFFD"
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "eof-in-comment"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["data"] += data + \
+ self.stream.charsUntil(("-", "\u0000"))
+ return True
+
+ def commentEndDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.state = self.commentEndState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "-\uFFFD"
+ self.state = self.commentState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-comment-end-dash"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["data"] += "-" + data
+ self.state = self.commentState
+ return True
+
+ def commentEndState(self):
+ data = self.stream.char()
+ if data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "--\uFFFD"
+ self.state = self.commentState
+ elif data == "!":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-bang-after-double-dash-in-comment"})
+ self.state = self.commentEndBangState
+ elif data == "-":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-dash-after-double-dash-in-comment"})
+ self.currentToken["data"] += data
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-comment-double-dash"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ # XXX
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-comment"})
+ self.currentToken["data"] += "--" + data
+ self.state = self.commentState
+ return True
+
+ def commentEndBangState(self):
+ data = self.stream.char()
+ if data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == "-":
+ self.currentToken["data"] += "--!"
+ self.state = self.commentEndDashState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "--!\uFFFD"
+ self.state = self.commentState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-comment-end-bang-state"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["data"] += "--!" + data
+ self.state = self.commentState
+ return True
+
+ def doctypeState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeDoctypeNameState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-doctype-name-but-got-eof"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "need-space-after-doctype"})
+ self.stream.unget(data)
+ self.state = self.beforeDoctypeNameState
+ return True
+
+ def beforeDoctypeNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-doctype-name-but-got-right-bracket"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["name"] = "\uFFFD"
+ self.state = self.doctypeNameState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-doctype-name-but-got-eof"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["name"] = data
+ self.state = self.doctypeNameState
+ return True
+
+ def doctypeNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
+ self.state = self.afterDoctypeNameState
+ elif data == ">":
+ self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["name"] += "\uFFFD"
+ self.state = self.doctypeNameState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype-name"})
+ self.currentToken["correct"] = False
+ self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["name"] += data
+ return True
+
+ def afterDoctypeNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.currentToken["correct"] = False
+ self.stream.unget(data)
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ if data in ("p", "P"):
+ matched = True
+ for expected in (("u", "U"), ("b", "B"), ("l", "L"),
+ ("i", "I"), ("c", "C")):
+ data = self.stream.char()
+ if data not in expected:
+ matched = False
+ break
+ if matched:
+ self.state = self.afterDoctypePublicKeywordState
+ return True
+ elif data in ("s", "S"):
+ matched = True
+ for expected in (("y", "Y"), ("s", "S"), ("t", "T"),
+ ("e", "E"), ("m", "M")):
+ data = self.stream.char()
+ if data not in expected:
+ matched = False
+ break
+ if matched:
+ self.state = self.afterDoctypeSystemKeywordState
+ return True
+
+ # All the characters read before the current 'data' will be
+ # [a-zA-Z], so they're garbage in the bogus doctype and can be
+ # discarded; only the latest character might be '>' or EOF
+ # and needs to be ungetted
+ self.stream.unget(data)
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-space-or-right-bracket-in-doctype", "datavars":
+ {"data": data}})
+ self.currentToken["correct"] = False
+ self.state = self.bogusDoctypeState
+
+ return True
+
+ def afterDoctypePublicKeywordState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeDoctypePublicIdentifierState
+ elif data in ("'", '"'):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.stream.unget(data)
+ self.state = self.beforeDoctypePublicIdentifierState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.stream.unget(data)
+ self.state = self.beforeDoctypePublicIdentifierState
+ return True
+
+ def beforeDoctypePublicIdentifierState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == "\"":
+ self.currentToken["publicId"] = ""
+ self.state = self.doctypePublicIdentifierDoubleQuotedState
+ elif data == "'":
+ self.currentToken["publicId"] = ""
+ self.state = self.doctypePublicIdentifierSingleQuotedState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-end-of-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["correct"] = False
+ self.state = self.bogusDoctypeState
+ return True
+
+ def doctypePublicIdentifierDoubleQuotedState(self):
+ data = self.stream.char()
+ if data == "\"":
+ self.state = self.afterDoctypePublicIdentifierState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["publicId"] += "\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-end-of-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["publicId"] += data
+ return True
+
+ def doctypePublicIdentifierSingleQuotedState(self):
+ data = self.stream.char()
+ if data == "'":
+ self.state = self.afterDoctypePublicIdentifierState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["publicId"] += "\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-end-of-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["publicId"] += data
+ return True
+
+ def afterDoctypePublicIdentifierState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.betweenDoctypePublicAndSystemIdentifiersState
+ elif data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == '"':
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierDoubleQuotedState
+ elif data == "'":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierSingleQuotedState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["correct"] = False
+ self.state = self.bogusDoctypeState
+ return True
+
+ def betweenDoctypePublicAndSystemIdentifiersState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == '"':
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierDoubleQuotedState
+ elif data == "'":
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierSingleQuotedState
+ elif data == EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["correct"] = False
+ self.state = self.bogusDoctypeState
+ return True
+
+ def afterDoctypeSystemKeywordState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeDoctypeSystemIdentifierState
+ elif data in ("'", '"'):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.stream.unget(data)
+ self.state = self.beforeDoctypeSystemIdentifierState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.stream.unget(data)
+ self.state = self.beforeDoctypeSystemIdentifierState
+ return True
+
+ def beforeDoctypeSystemIdentifierState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == "\"":
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierDoubleQuotedState
+ elif data == "'":
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierSingleQuotedState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["correct"] = False
+ self.state = self.bogusDoctypeState
+ return True
+
+ def doctypeSystemIdentifierDoubleQuotedState(self):
+ data = self.stream.char()
+ if data == "\"":
+ self.state = self.afterDoctypeSystemIdentifierState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["systemId"] += "\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-end-of-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["systemId"] += data
+ return True
+
+ def doctypeSystemIdentifierSingleQuotedState(self):
+ data = self.stream.char()
+ if data == "'":
+ self.state = self.afterDoctypeSystemIdentifierState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["systemId"] += "\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-end-of-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["systemId"] += data
+ return True
+
+ def afterDoctypeSystemIdentifierState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.state = self.bogusDoctypeState
+ return True
+
+ def bogusDoctypeState(self):
+ data = self.stream.char()
+ if data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ # XXX EMIT
+ self.stream.unget(data)
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ pass
+ return True
+
+ def cdataSectionState(self):
+ data = []
+ while True:
+ data.append(self.stream.charsUntil("]"))
+ data.append(self.stream.charsUntil(">"))
+ char = self.stream.char()
+ if char == EOF:
+ break
+ else:
+ assert char == ">"
+ if data[-1][-2:] == "]]":
+ data[-1] = data[-1][:-2]
+ break
+ else:
+ data.append(char)
+
+ data = "".join(data) # pylint:disable=redefined-variable-type
+ # Deal with null here rather than in the parser
+ nullCount = data.count("\u0000")
+ if nullCount > 0:
+ for _ in range(nullCount):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ data = data.replace("\u0000", "\uFFFD")
+ if data:
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": data})
+ self.state = self.dataState
+ return True
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/__init__.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/__init__.py
new file mode 100644
index 0000000000..07bad5d31c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from .py import Trie
+
+__all__ = ["Trie"]
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/_base.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/_base.py
new file mode 100644
index 0000000000..6b71975f08
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/_base.py
@@ -0,0 +1,40 @@
+from __future__ import absolute_import, division, unicode_literals
+
+try:
+ from collections.abc import Mapping
+except ImportError: # Python 2.7
+ from collections import Mapping
+
+
+class Trie(Mapping):
+ """Abstract base class for tries"""
+
+ def keys(self, prefix=None):
+ # pylint:disable=arguments-differ
+ keys = super(Trie, self).keys()
+
+ if prefix is None:
+ return set(keys)
+
+ return {x for x in keys if x.startswith(prefix)}
+
+ def has_keys_with_prefix(self, prefix):
+ for key in self.keys():
+ if key.startswith(prefix):
+ return True
+
+ return False
+
+ def longest_prefix(self, prefix):
+ if prefix in self:
+ return prefix
+
+ for i in range(1, len(prefix) + 1):
+ if prefix[:-i] in self:
+ return prefix[:-i]
+
+ raise KeyError(prefix)
+
+ def longest_prefix_item(self, prefix):
+ lprefix = self.longest_prefix(prefix)
+ return (lprefix, self[lprefix])
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/py.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/py.py
new file mode 100644
index 0000000000..c2ba3da757
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_trie/py.py
@@ -0,0 +1,67 @@
+from __future__ import absolute_import, division, unicode_literals
+from six import text_type
+
+from bisect import bisect_left
+
+from ._base import Trie as ABCTrie
+
+
+class Trie(ABCTrie):
+ def __init__(self, data):
+ if not all(isinstance(x, text_type) for x in data.keys()):
+ raise TypeError("All keys must be strings")
+
+ self._data = data
+ self._keys = sorted(data.keys())
+ self._cachestr = ""
+ self._cachepoints = (0, len(data))
+
+ def __contains__(self, key):
+ return key in self._data
+
+ def __len__(self):
+ return len(self._data)
+
+ def __iter__(self):
+ return iter(self._data)
+
+ def __getitem__(self, key):
+ return self._data[key]
+
+ def keys(self, prefix=None):
+ if prefix is None or prefix == "" or not self._keys:
+ return set(self._keys)
+
+ if prefix.startswith(self._cachestr):
+ lo, hi = self._cachepoints
+ start = i = bisect_left(self._keys, prefix, lo, hi)
+ else:
+ start = i = bisect_left(self._keys, prefix)
+
+ keys = set()
+ if start == len(self._keys):
+ return keys
+
+ while self._keys[i].startswith(prefix):
+ keys.add(self._keys[i])
+ i += 1
+
+ self._cachestr = prefix
+ self._cachepoints = (start, i)
+
+ return keys
+
+ def has_keys_with_prefix(self, prefix):
+ if prefix in self._data:
+ return True
+
+ if prefix.startswith(self._cachestr):
+ lo, hi = self._cachepoints
+ i = bisect_left(self._keys, prefix, lo, hi)
+ else:
+ i = bisect_left(self._keys, prefix)
+
+ if i == len(self._keys):
+ return False
+
+ return self._keys[i].startswith(prefix)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_utils.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_utils.py
new file mode 100644
index 0000000000..9ea5794214
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/_utils.py
@@ -0,0 +1,159 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from types import ModuleType
+
+try:
+ from collections.abc import Mapping
+except ImportError:
+ from collections import Mapping
+
+from six import text_type, PY3
+
+if PY3:
+ import xml.etree.ElementTree as default_etree
+else:
+ try:
+ import xml.etree.cElementTree as default_etree
+ except ImportError:
+ import xml.etree.ElementTree as default_etree
+
+
+__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair",
+ "surrogatePairToCodepoint", "moduleFactoryFactory",
+ "supports_lone_surrogates"]
+
+
+# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be
+# caught by the below test. In general this would be any platform
+# using UTF-16 as its encoding of unicode strings, such as
+# Jython. This is because UTF-16 itself is based on the use of such
+# surrogates, and there is no mechanism to further escape such
+# escapes.
+try:
+ _x = eval('"\\uD800"') # pylint:disable=eval-used
+ if not isinstance(_x, text_type):
+ # We need this with u"" because of http://bugs.jython.org/issue2039
+ _x = eval('u"\\uD800"') # pylint:disable=eval-used
+ assert isinstance(_x, text_type)
+except Exception:
+ supports_lone_surrogates = False
+else:
+ supports_lone_surrogates = True
+
+
+class MethodDispatcher(dict):
+ """Dict with 2 special properties:
+
+ On initiation, keys that are lists, sets or tuples are converted to
+ multiple keys so accessing any one of the items in the original
+ list-like object returns the matching value
+
+ md = MethodDispatcher({("foo", "bar"):"baz"})
+ md["foo"] == "baz"
+
+ A default value which can be set through the default attribute.
+ """
+
+ def __init__(self, items=()):
+ _dictEntries = []
+ for name, value in items:
+ if isinstance(name, (list, tuple, frozenset, set)):
+ for item in name:
+ _dictEntries.append((item, value))
+ else:
+ _dictEntries.append((name, value))
+ dict.__init__(self, _dictEntries)
+ assert len(self) == len(_dictEntries)
+ self.default = None
+
+ def __getitem__(self, key):
+ return dict.get(self, key, self.default)
+
+ def __get__(self, instance, owner=None):
+ return BoundMethodDispatcher(instance, self)
+
+
+class BoundMethodDispatcher(Mapping):
+ """Wraps a MethodDispatcher, binding its return values to `instance`"""
+ def __init__(self, instance, dispatcher):
+ self.instance = instance
+ self.dispatcher = dispatcher
+
+ def __getitem__(self, key):
+ # see https://docs.python.org/3/reference/datamodel.html#object.__get__
+ # on a function, __get__ is used to bind a function to an instance as a bound method
+ return self.dispatcher[key].__get__(self.instance)
+
+ def get(self, key, default):
+ if key in self.dispatcher:
+ return self[key]
+ else:
+ return default
+
+ def __iter__(self):
+ return iter(self.dispatcher)
+
+ def __len__(self):
+ return len(self.dispatcher)
+
+ def __contains__(self, key):
+ return key in self.dispatcher
+
+
+# Some utility functions to deal with weirdness around UCS2 vs UCS4
+# python builds
+
+def isSurrogatePair(data):
+ return (len(data) == 2 and
+ ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and
+ ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF)
+
+
+def surrogatePairToCodepoint(data):
+ char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 +
+ (ord(data[1]) - 0xDC00))
+ return char_val
+
+# Module Factory Factory (no, this isn't Java, I know)
+# Here to stop this being duplicated all over the place.
+
+
+def moduleFactoryFactory(factory):
+ moduleCache = {}
+
+ def moduleFactory(baseModule, *args, **kwargs):
+ if isinstance(ModuleType.__name__, type("")):
+ name = "_%s_factory" % baseModule.__name__
+ else:
+ name = b"_%s_factory" % baseModule.__name__
+
+ kwargs_tuple = tuple(kwargs.items())
+
+ try:
+ return moduleCache[name][args][kwargs_tuple]
+ except KeyError:
+ mod = ModuleType(name)
+ objs = factory(baseModule, *args, **kwargs)
+ mod.__dict__.update(objs)
+ if "name" not in moduleCache:
+ moduleCache[name] = {}
+ if "args" not in moduleCache[name]:
+ moduleCache[name][args] = {}
+ if "kwargs" not in moduleCache[name][args]:
+ moduleCache[name][args][kwargs_tuple] = {}
+ moduleCache[name][args][kwargs_tuple] = mod
+ return mod
+
+ return moduleFactory
+
+
+def memoize(func):
+ cache = {}
+
+ def wrapped(*args, **kwargs):
+ key = (tuple(args), tuple(kwargs.items()))
+ if key not in cache:
+ cache[key] = func(*args, **kwargs)
+ return cache[key]
+
+ return wrapped
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/constants.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/constants.py
new file mode 100644
index 0000000000..fe3e237cd8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/constants.py
@@ -0,0 +1,2946 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import string
+
+EOF = None
+
+E = {
+ "null-character":
+ "Null character in input stream, replaced with U+FFFD.",
+ "invalid-codepoint":
+ "Invalid codepoint in stream.",
+ "incorrectly-placed-solidus":
+ "Solidus (/) incorrectly placed in tag.",
+ "incorrect-cr-newline-entity":
+ "Incorrect CR newline entity, replaced with LF.",
+ "illegal-windows-1252-entity":
+ "Entity used with illegal number (windows-1252 reference).",
+ "cant-convert-numeric-entity":
+ "Numeric entity couldn't be converted to character "
+ "(codepoint U+%(charAsInt)08x).",
+ "illegal-codepoint-for-numeric-entity":
+ "Numeric entity represents an illegal codepoint: "
+ "U+%(charAsInt)08x.",
+ "numeric-entity-without-semicolon":
+ "Numeric entity didn't end with ';'.",
+ "expected-numeric-entity-but-got-eof":
+ "Numeric entity expected. Got end of file instead.",
+ "expected-numeric-entity":
+ "Numeric entity expected but none found.",
+ "named-entity-without-semicolon":
+ "Named entity didn't end with ';'.",
+ "expected-named-entity":
+ "Named entity expected. Got none.",
+ "attributes-in-end-tag":
+ "End tag contains unexpected attributes.",
+ 'self-closing-flag-on-end-tag':
+ "End tag contains unexpected self-closing flag.",
+ "expected-tag-name-but-got-right-bracket":
+ "Expected tag name. Got '>' instead.",
+ "expected-tag-name-but-got-question-mark":
+ "Expected tag name. Got '?' instead. (HTML doesn't "
+ "support processing instructions.)",
+ "expected-tag-name":
+ "Expected tag name. Got something else instead",
+ "expected-closing-tag-but-got-right-bracket":
+ "Expected closing tag. Got '>' instead. Ignoring '</>'.",
+ "expected-closing-tag-but-got-eof":
+ "Expected closing tag. Unexpected end of file.",
+ "expected-closing-tag-but-got-char":
+ "Expected closing tag. Unexpected character '%(data)s' found.",
+ "eof-in-tag-name":
+ "Unexpected end of file in the tag name.",
+ "expected-attribute-name-but-got-eof":
+ "Unexpected end of file. Expected attribute name instead.",
+ "eof-in-attribute-name":
+ "Unexpected end of file in attribute name.",
+ "invalid-character-in-attribute-name":
+ "Invalid character in attribute name",
+ "duplicate-attribute":
+ "Dropped duplicate attribute on tag.",
+ "expected-end-of-tag-name-but-got-eof":
+ "Unexpected end of file. Expected = or end of tag.",
+ "expected-attribute-value-but-got-eof":
+ "Unexpected end of file. Expected attribute value.",
+ "expected-attribute-value-but-got-right-bracket":
+ "Expected attribute value. Got '>' instead.",
+ 'equals-in-unquoted-attribute-value':
+ "Unexpected = in unquoted attribute",
+ 'unexpected-character-in-unquoted-attribute-value':
+ "Unexpected character in unquoted attribute",
+ "invalid-character-after-attribute-name":
+ "Unexpected character after attribute name.",
+ "unexpected-character-after-attribute-value":
+ "Unexpected character after attribute value.",
+ "eof-in-attribute-value-double-quote":
+ "Unexpected end of file in attribute value (\").",
+ "eof-in-attribute-value-single-quote":
+ "Unexpected end of file in attribute value (').",
+ "eof-in-attribute-value-no-quotes":
+ "Unexpected end of file in attribute value.",
+ "unexpected-EOF-after-solidus-in-tag":
+ "Unexpected end of file in tag. Expected >",
+ "unexpected-character-after-solidus-in-tag":
+ "Unexpected character after / in tag. Expected >",
+ "expected-dashes-or-doctype":
+ "Expected '--' or 'DOCTYPE'. Not found.",
+ "unexpected-bang-after-double-dash-in-comment":
+ "Unexpected ! after -- in comment",
+ "unexpected-space-after-double-dash-in-comment":
+ "Unexpected space after -- in comment",
+ "incorrect-comment":
+ "Incorrect comment.",
+ "eof-in-comment":
+ "Unexpected end of file in comment.",
+ "eof-in-comment-end-dash":
+ "Unexpected end of file in comment (-)",
+ "unexpected-dash-after-double-dash-in-comment":
+ "Unexpected '-' after '--' found in comment.",
+ "eof-in-comment-double-dash":
+ "Unexpected end of file in comment (--).",
+ "eof-in-comment-end-space-state":
+ "Unexpected end of file in comment.",
+ "eof-in-comment-end-bang-state":
+ "Unexpected end of file in comment.",
+ "unexpected-char-in-comment":
+ "Unexpected character in comment found.",
+ "need-space-after-doctype":
+ "No space after literal string 'DOCTYPE'.",
+ "expected-doctype-name-but-got-right-bracket":
+ "Unexpected > character. Expected DOCTYPE name.",
+ "expected-doctype-name-but-got-eof":
+ "Unexpected end of file. Expected DOCTYPE name.",
+ "eof-in-doctype-name":
+ "Unexpected end of file in DOCTYPE name.",
+ "eof-in-doctype":
+ "Unexpected end of file in DOCTYPE.",
+ "expected-space-or-right-bracket-in-doctype":
+ "Expected space or '>'. Got '%(data)s'",
+ "unexpected-end-of-doctype":
+ "Unexpected end of DOCTYPE.",
+ "unexpected-char-in-doctype":
+ "Unexpected character in DOCTYPE.",
+ "eof-in-innerhtml":
+ "XXX innerHTML EOF",
+ "unexpected-doctype":
+ "Unexpected DOCTYPE. Ignored.",
+ "non-html-root":
+ "html needs to be the first start tag.",
+ "expected-doctype-but-got-eof":
+ "Unexpected End of file. Expected DOCTYPE.",
+ "unknown-doctype":
+ "Erroneous DOCTYPE.",
+ "expected-doctype-but-got-chars":
+ "Unexpected non-space characters. Expected DOCTYPE.",
+ "expected-doctype-but-got-start-tag":
+ "Unexpected start tag (%(name)s). Expected DOCTYPE.",
+ "expected-doctype-but-got-end-tag":
+ "Unexpected end tag (%(name)s). Expected DOCTYPE.",
+ "end-tag-after-implied-root":
+ "Unexpected end tag (%(name)s) after the (implied) root element.",
+ "expected-named-closing-tag-but-got-eof":
+ "Unexpected end of file. Expected end tag (%(name)s).",
+ "two-heads-are-not-better-than-one":
+ "Unexpected start tag head in existing head. Ignored.",
+ "unexpected-end-tag":
+ "Unexpected end tag (%(name)s). Ignored.",
+ "unexpected-start-tag-out-of-my-head":
+ "Unexpected start tag (%(name)s) that can be in head. Moved.",
+ "unexpected-start-tag":
+ "Unexpected start tag (%(name)s).",
+ "missing-end-tag":
+ "Missing end tag (%(name)s).",
+ "missing-end-tags":
+ "Missing end tags (%(name)s).",
+ "unexpected-start-tag-implies-end-tag":
+ "Unexpected start tag (%(startName)s) "
+ "implies end tag (%(endName)s).",
+ "unexpected-start-tag-treated-as":
+ "Unexpected start tag (%(originalName)s). Treated as %(newName)s.",
+ "deprecated-tag":
+ "Unexpected start tag %(name)s. Don't use it!",
+ "unexpected-start-tag-ignored":
+ "Unexpected start tag %(name)s. Ignored.",
+ "expected-one-end-tag-but-got-another":
+ "Unexpected end tag (%(gotName)s). "
+ "Missing end tag (%(expectedName)s).",
+ "end-tag-too-early":
+ "End tag (%(name)s) seen too early. Expected other end tag.",
+ "end-tag-too-early-named":
+ "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).",
+ "end-tag-too-early-ignored":
+ "End tag (%(name)s) seen too early. Ignored.",
+ "adoption-agency-1.1":
+ "End tag (%(name)s) violates step 1, "
+ "paragraph 1 of the adoption agency algorithm.",
+ "adoption-agency-1.2":
+ "End tag (%(name)s) violates step 1, "
+ "paragraph 2 of the adoption agency algorithm.",
+ "adoption-agency-1.3":
+ "End tag (%(name)s) violates step 1, "
+ "paragraph 3 of the adoption agency algorithm.",
+ "adoption-agency-4.4":
+ "End tag (%(name)s) violates step 4, "
+ "paragraph 4 of the adoption agency algorithm.",
+ "unexpected-end-tag-treated-as":
+ "Unexpected end tag (%(originalName)s). Treated as %(newName)s.",
+ "no-end-tag":
+ "This element (%(name)s) has no end tag.",
+ "unexpected-implied-end-tag-in-table":
+ "Unexpected implied end tag (%(name)s) in the table phase.",
+ "unexpected-implied-end-tag-in-table-body":
+ "Unexpected implied end tag (%(name)s) in the table body phase.",
+ "unexpected-char-implies-table-voodoo":
+ "Unexpected non-space characters in "
+ "table context caused voodoo mode.",
+ "unexpected-hidden-input-in-table":
+ "Unexpected input with type hidden in table context.",
+ "unexpected-form-in-table":
+ "Unexpected form in table context.",
+ "unexpected-start-tag-implies-table-voodoo":
+ "Unexpected start tag (%(name)s) in "
+ "table context caused voodoo mode.",
+ "unexpected-end-tag-implies-table-voodoo":
+ "Unexpected end tag (%(name)s) in "
+ "table context caused voodoo mode.",
+ "unexpected-cell-in-table-body":
+ "Unexpected table cell start tag (%(name)s) "
+ "in the table body phase.",
+ "unexpected-cell-end-tag":
+ "Got table cell end tag (%(name)s) "
+ "while required end tags are missing.",
+ "unexpected-end-tag-in-table-body":
+ "Unexpected end tag (%(name)s) in the table body phase. Ignored.",
+ "unexpected-implied-end-tag-in-table-row":
+ "Unexpected implied end tag (%(name)s) in the table row phase.",
+ "unexpected-end-tag-in-table-row":
+ "Unexpected end tag (%(name)s) in the table row phase. Ignored.",
+ "unexpected-select-in-select":
+ "Unexpected select start tag in the select phase "
+ "treated as select end tag.",
+ "unexpected-input-in-select":
+ "Unexpected input start tag in the select phase.",
+ "unexpected-start-tag-in-select":
+ "Unexpected start tag token (%(name)s in the select phase. "
+ "Ignored.",
+ "unexpected-end-tag-in-select":
+ "Unexpected end tag (%(name)s) in the select phase. Ignored.",
+ "unexpected-table-element-start-tag-in-select-in-table":
+ "Unexpected table element start tag (%(name)s) in the select in table phase.",
+ "unexpected-table-element-end-tag-in-select-in-table":
+ "Unexpected table element end tag (%(name)s) in the select in table phase.",
+ "unexpected-char-after-body":
+ "Unexpected non-space characters in the after body phase.",
+ "unexpected-start-tag-after-body":
+ "Unexpected start tag token (%(name)s)"
+ " in the after body phase.",
+ "unexpected-end-tag-after-body":
+ "Unexpected end tag token (%(name)s)"
+ " in the after body phase.",
+ "unexpected-char-in-frameset":
+ "Unexpected characters in the frameset phase. Characters ignored.",
+ "unexpected-start-tag-in-frameset":
+ "Unexpected start tag token (%(name)s)"
+ " in the frameset phase. Ignored.",
+ "unexpected-frameset-in-frameset-innerhtml":
+ "Unexpected end tag token (frameset) "
+ "in the frameset phase (innerHTML).",
+ "unexpected-end-tag-in-frameset":
+ "Unexpected end tag token (%(name)s)"
+ " in the frameset phase. Ignored.",
+ "unexpected-char-after-frameset":
+ "Unexpected non-space characters in the "
+ "after frameset phase. Ignored.",
+ "unexpected-start-tag-after-frameset":
+ "Unexpected start tag (%(name)s)"
+ " in the after frameset phase. Ignored.",
+ "unexpected-end-tag-after-frameset":
+ "Unexpected end tag (%(name)s)"
+ " in the after frameset phase. Ignored.",
+ "unexpected-end-tag-after-body-innerhtml":
+ "Unexpected end tag after body(innerHtml)",
+ "expected-eof-but-got-char":
+ "Unexpected non-space characters. Expected end of file.",
+ "expected-eof-but-got-start-tag":
+ "Unexpected start tag (%(name)s)"
+ ". Expected end of file.",
+ "expected-eof-but-got-end-tag":
+ "Unexpected end tag (%(name)s)"
+ ". Expected end of file.",
+ "eof-in-table":
+ "Unexpected end of file. Expected table content.",
+ "eof-in-select":
+ "Unexpected end of file. Expected select content.",
+ "eof-in-frameset":
+ "Unexpected end of file. Expected frameset content.",
+ "eof-in-script-in-script":
+ "Unexpected end of file. Expected script content.",
+ "eof-in-foreign-lands":
+ "Unexpected end of file. Expected foreign content",
+ "non-void-element-with-trailing-solidus":
+ "Trailing solidus not allowed on element %(name)s",
+ "unexpected-html-element-in-foreign-content":
+ "Element %(name)s not allowed in a non-html context",
+ "unexpected-end-tag-before-html":
+ "Unexpected end tag (%(name)s) before html.",
+ "unexpected-inhead-noscript-tag":
+ "Element %(name)s not allowed in a inhead-noscript context",
+ "eof-in-head-noscript":
+ "Unexpected end of file. Expected inhead-noscript content",
+ "char-in-head-noscript":
+ "Unexpected non-space character. Expected inhead-noscript content",
+ "XXX-undefined-error":
+ "Undefined error (this sucks and should be fixed)",
+}
+
+namespaces = {
+ "html": "http://www.w3.org/1999/xhtml",
+ "mathml": "http://www.w3.org/1998/Math/MathML",
+ "svg": "http://www.w3.org/2000/svg",
+ "xlink": "http://www.w3.org/1999/xlink",
+ "xml": "http://www.w3.org/XML/1998/namespace",
+ "xmlns": "http://www.w3.org/2000/xmlns/"
+}
+
+scopingElements = frozenset([
+ (namespaces["html"], "applet"),
+ (namespaces["html"], "caption"),
+ (namespaces["html"], "html"),
+ (namespaces["html"], "marquee"),
+ (namespaces["html"], "object"),
+ (namespaces["html"], "table"),
+ (namespaces["html"], "td"),
+ (namespaces["html"], "th"),
+ (namespaces["mathml"], "mi"),
+ (namespaces["mathml"], "mo"),
+ (namespaces["mathml"], "mn"),
+ (namespaces["mathml"], "ms"),
+ (namespaces["mathml"], "mtext"),
+ (namespaces["mathml"], "annotation-xml"),
+ (namespaces["svg"], "foreignObject"),
+ (namespaces["svg"], "desc"),
+ (namespaces["svg"], "title"),
+])
+
+formattingElements = frozenset([
+ (namespaces["html"], "a"),
+ (namespaces["html"], "b"),
+ (namespaces["html"], "big"),
+ (namespaces["html"], "code"),
+ (namespaces["html"], "em"),
+ (namespaces["html"], "font"),
+ (namespaces["html"], "i"),
+ (namespaces["html"], "nobr"),
+ (namespaces["html"], "s"),
+ (namespaces["html"], "small"),
+ (namespaces["html"], "strike"),
+ (namespaces["html"], "strong"),
+ (namespaces["html"], "tt"),
+ (namespaces["html"], "u")
+])
+
+specialElements = frozenset([
+ (namespaces["html"], "address"),
+ (namespaces["html"], "applet"),
+ (namespaces["html"], "area"),
+ (namespaces["html"], "article"),
+ (namespaces["html"], "aside"),
+ (namespaces["html"], "base"),
+ (namespaces["html"], "basefont"),
+ (namespaces["html"], "bgsound"),
+ (namespaces["html"], "blockquote"),
+ (namespaces["html"], "body"),
+ (namespaces["html"], "br"),
+ (namespaces["html"], "button"),
+ (namespaces["html"], "caption"),
+ (namespaces["html"], "center"),
+ (namespaces["html"], "col"),
+ (namespaces["html"], "colgroup"),
+ (namespaces["html"], "command"),
+ (namespaces["html"], "dd"),
+ (namespaces["html"], "details"),
+ (namespaces["html"], "dir"),
+ (namespaces["html"], "div"),
+ (namespaces["html"], "dl"),
+ (namespaces["html"], "dt"),
+ (namespaces["html"], "embed"),
+ (namespaces["html"], "fieldset"),
+ (namespaces["html"], "figure"),
+ (namespaces["html"], "footer"),
+ (namespaces["html"], "form"),
+ (namespaces["html"], "frame"),
+ (namespaces["html"], "frameset"),
+ (namespaces["html"], "h1"),
+ (namespaces["html"], "h2"),
+ (namespaces["html"], "h3"),
+ (namespaces["html"], "h4"),
+ (namespaces["html"], "h5"),
+ (namespaces["html"], "h6"),
+ (namespaces["html"], "head"),
+ (namespaces["html"], "header"),
+ (namespaces["html"], "hr"),
+ (namespaces["html"], "html"),
+ (namespaces["html"], "iframe"),
+ # Note that image is commented out in the spec as "this isn't an
+ # element that can end up on the stack, so it doesn't matter,"
+ (namespaces["html"], "image"),
+ (namespaces["html"], "img"),
+ (namespaces["html"], "input"),
+ (namespaces["html"], "isindex"),
+ (namespaces["html"], "li"),
+ (namespaces["html"], "link"),
+ (namespaces["html"], "listing"),
+ (namespaces["html"], "marquee"),
+ (namespaces["html"], "menu"),
+ (namespaces["html"], "meta"),
+ (namespaces["html"], "nav"),
+ (namespaces["html"], "noembed"),
+ (namespaces["html"], "noframes"),
+ (namespaces["html"], "noscript"),
+ (namespaces["html"], "object"),
+ (namespaces["html"], "ol"),
+ (namespaces["html"], "p"),
+ (namespaces["html"], "param"),
+ (namespaces["html"], "plaintext"),
+ (namespaces["html"], "pre"),
+ (namespaces["html"], "script"),
+ (namespaces["html"], "section"),
+ (namespaces["html"], "select"),
+ (namespaces["html"], "style"),
+ (namespaces["html"], "table"),
+ (namespaces["html"], "tbody"),
+ (namespaces["html"], "td"),
+ (namespaces["html"], "textarea"),
+ (namespaces["html"], "tfoot"),
+ (namespaces["html"], "th"),
+ (namespaces["html"], "thead"),
+ (namespaces["html"], "title"),
+ (namespaces["html"], "tr"),
+ (namespaces["html"], "ul"),
+ (namespaces["html"], "wbr"),
+ (namespaces["html"], "xmp"),
+ (namespaces["svg"], "foreignObject")
+])
+
+htmlIntegrationPointElements = frozenset([
+ (namespaces["mathml"], "annotation-xml"),
+ (namespaces["svg"], "foreignObject"),
+ (namespaces["svg"], "desc"),
+ (namespaces["svg"], "title")
+])
+
+mathmlTextIntegrationPointElements = frozenset([
+ (namespaces["mathml"], "mi"),
+ (namespaces["mathml"], "mo"),
+ (namespaces["mathml"], "mn"),
+ (namespaces["mathml"], "ms"),
+ (namespaces["mathml"], "mtext")
+])
+
+adjustSVGAttributes = {
+ "attributename": "attributeName",
+ "attributetype": "attributeType",
+ "basefrequency": "baseFrequency",
+ "baseprofile": "baseProfile",
+ "calcmode": "calcMode",
+ "clippathunits": "clipPathUnits",
+ "contentscripttype": "contentScriptType",
+ "contentstyletype": "contentStyleType",
+ "diffuseconstant": "diffuseConstant",
+ "edgemode": "edgeMode",
+ "externalresourcesrequired": "externalResourcesRequired",
+ "filterres": "filterRes",
+ "filterunits": "filterUnits",
+ "glyphref": "glyphRef",
+ "gradienttransform": "gradientTransform",
+ "gradientunits": "gradientUnits",
+ "kernelmatrix": "kernelMatrix",
+ "kernelunitlength": "kernelUnitLength",
+ "keypoints": "keyPoints",
+ "keysplines": "keySplines",
+ "keytimes": "keyTimes",
+ "lengthadjust": "lengthAdjust",
+ "limitingconeangle": "limitingConeAngle",
+ "markerheight": "markerHeight",
+ "markerunits": "markerUnits",
+ "markerwidth": "markerWidth",
+ "maskcontentunits": "maskContentUnits",
+ "maskunits": "maskUnits",
+ "numoctaves": "numOctaves",
+ "pathlength": "pathLength",
+ "patterncontentunits": "patternContentUnits",
+ "patterntransform": "patternTransform",
+ "patternunits": "patternUnits",
+ "pointsatx": "pointsAtX",
+ "pointsaty": "pointsAtY",
+ "pointsatz": "pointsAtZ",
+ "preservealpha": "preserveAlpha",
+ "preserveaspectratio": "preserveAspectRatio",
+ "primitiveunits": "primitiveUnits",
+ "refx": "refX",
+ "refy": "refY",
+ "repeatcount": "repeatCount",
+ "repeatdur": "repeatDur",
+ "requiredextensions": "requiredExtensions",
+ "requiredfeatures": "requiredFeatures",
+ "specularconstant": "specularConstant",
+ "specularexponent": "specularExponent",
+ "spreadmethod": "spreadMethod",
+ "startoffset": "startOffset",
+ "stddeviation": "stdDeviation",
+ "stitchtiles": "stitchTiles",
+ "surfacescale": "surfaceScale",
+ "systemlanguage": "systemLanguage",
+ "tablevalues": "tableValues",
+ "targetx": "targetX",
+ "targety": "targetY",
+ "textlength": "textLength",
+ "viewbox": "viewBox",
+ "viewtarget": "viewTarget",
+ "xchannelselector": "xChannelSelector",
+ "ychannelselector": "yChannelSelector",
+ "zoomandpan": "zoomAndPan"
+}
+
+adjustMathMLAttributes = {"definitionurl": "definitionURL"}
+
+adjustForeignAttributes = {
+ "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]),
+ "xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]),
+ "xlink:href": ("xlink", "href", namespaces["xlink"]),
+ "xlink:role": ("xlink", "role", namespaces["xlink"]),
+ "xlink:show": ("xlink", "show", namespaces["xlink"]),
+ "xlink:title": ("xlink", "title", namespaces["xlink"]),
+ "xlink:type": ("xlink", "type", namespaces["xlink"]),
+ "xml:base": ("xml", "base", namespaces["xml"]),
+ "xml:lang": ("xml", "lang", namespaces["xml"]),
+ "xml:space": ("xml", "space", namespaces["xml"]),
+ "xmlns": (None, "xmlns", namespaces["xmlns"]),
+ "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"])
+}
+
+unadjustForeignAttributes = {(ns, local): qname for qname, (prefix, local, ns) in
+ adjustForeignAttributes.items()}
+
+spaceCharacters = frozenset([
+ "\t",
+ "\n",
+ "\u000C",
+ " ",
+ "\r"
+])
+
+tableInsertModeElements = frozenset([
+ "table",
+ "tbody",
+ "tfoot",
+ "thead",
+ "tr"
+])
+
+asciiLowercase = frozenset(string.ascii_lowercase)
+asciiUppercase = frozenset(string.ascii_uppercase)
+asciiLetters = frozenset(string.ascii_letters)
+digits = frozenset(string.digits)
+hexDigits = frozenset(string.hexdigits)
+
+asciiUpper2Lower = {ord(c): ord(c.lower()) for c in string.ascii_uppercase}
+
+# Heading elements need to be ordered
+headingElements = (
+ "h1",
+ "h2",
+ "h3",
+ "h4",
+ "h5",
+ "h6"
+)
+
+voidElements = frozenset([
+ "base",
+ "command",
+ "event-source",
+ "link",
+ "meta",
+ "hr",
+ "br",
+ "img",
+ "embed",
+ "param",
+ "area",
+ "col",
+ "input",
+ "source",
+ "track"
+])
+
+cdataElements = frozenset(['title', 'textarea'])
+
+rcdataElements = frozenset([
+ 'style',
+ 'script',
+ 'xmp',
+ 'iframe',
+ 'noembed',
+ 'noframes',
+ 'noscript'
+])
+
+booleanAttributes = {
+ "": frozenset(["irrelevant", "itemscope"]),
+ "style": frozenset(["scoped"]),
+ "img": frozenset(["ismap"]),
+ "audio": frozenset(["autoplay", "controls"]),
+ "video": frozenset(["autoplay", "controls"]),
+ "script": frozenset(["defer", "async"]),
+ "details": frozenset(["open"]),
+ "datagrid": frozenset(["multiple", "disabled"]),
+ "command": frozenset(["hidden", "disabled", "checked", "default"]),
+ "hr": frozenset(["noshade"]),
+ "menu": frozenset(["autosubmit"]),
+ "fieldset": frozenset(["disabled", "readonly"]),
+ "option": frozenset(["disabled", "readonly", "selected"]),
+ "optgroup": frozenset(["disabled", "readonly"]),
+ "button": frozenset(["disabled", "autofocus"]),
+ "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]),
+ "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]),
+ "output": frozenset(["disabled", "readonly"]),
+ "iframe": frozenset(["seamless"]),
+}
+
+# entitiesWindows1252 has to be _ordered_ and needs to have an index. It
+# therefore can't be a frozenset.
+entitiesWindows1252 = (
+ 8364, # 0x80 0x20AC EURO SIGN
+ 65533, # 0x81 UNDEFINED
+ 8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK
+ 402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK
+ 8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK
+ 8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS
+ 8224, # 0x86 0x2020 DAGGER
+ 8225, # 0x87 0x2021 DOUBLE DAGGER
+ 710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT
+ 8240, # 0x89 0x2030 PER MILLE SIGN
+ 352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON
+ 8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+ 338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE
+ 65533, # 0x8D UNDEFINED
+ 381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON
+ 65533, # 0x8F UNDEFINED
+ 65533, # 0x90 UNDEFINED
+ 8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK
+ 8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK
+ 8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK
+ 8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK
+ 8226, # 0x95 0x2022 BULLET
+ 8211, # 0x96 0x2013 EN DASH
+ 8212, # 0x97 0x2014 EM DASH
+ 732, # 0x98 0x02DC SMALL TILDE
+ 8482, # 0x99 0x2122 TRADE MARK SIGN
+ 353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON
+ 8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+ 339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE
+ 65533, # 0x9D UNDEFINED
+ 382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON
+ 376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS
+)
+
+xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;'])
+
+entities = {
+ "AElig": "\xc6",
+ "AElig;": "\xc6",
+ "AMP": "&",
+ "AMP;": "&",
+ "Aacute": "\xc1",
+ "Aacute;": "\xc1",
+ "Abreve;": "\u0102",
+ "Acirc": "\xc2",
+ "Acirc;": "\xc2",
+ "Acy;": "\u0410",
+ "Afr;": "\U0001d504",
+ "Agrave": "\xc0",
+ "Agrave;": "\xc0",
+ "Alpha;": "\u0391",
+ "Amacr;": "\u0100",
+ "And;": "\u2a53",
+ "Aogon;": "\u0104",
+ "Aopf;": "\U0001d538",
+ "ApplyFunction;": "\u2061",
+ "Aring": "\xc5",
+ "Aring;": "\xc5",
+ "Ascr;": "\U0001d49c",
+ "Assign;": "\u2254",
+ "Atilde": "\xc3",
+ "Atilde;": "\xc3",
+ "Auml": "\xc4",
+ "Auml;": "\xc4",
+ "Backslash;": "\u2216",
+ "Barv;": "\u2ae7",
+ "Barwed;": "\u2306",
+ "Bcy;": "\u0411",
+ "Because;": "\u2235",
+ "Bernoullis;": "\u212c",
+ "Beta;": "\u0392",
+ "Bfr;": "\U0001d505",
+ "Bopf;": "\U0001d539",
+ "Breve;": "\u02d8",
+ "Bscr;": "\u212c",
+ "Bumpeq;": "\u224e",
+ "CHcy;": "\u0427",
+ "COPY": "\xa9",
+ "COPY;": "\xa9",
+ "Cacute;": "\u0106",
+ "Cap;": "\u22d2",
+ "CapitalDifferentialD;": "\u2145",
+ "Cayleys;": "\u212d",
+ "Ccaron;": "\u010c",
+ "Ccedil": "\xc7",
+ "Ccedil;": "\xc7",
+ "Ccirc;": "\u0108",
+ "Cconint;": "\u2230",
+ "Cdot;": "\u010a",
+ "Cedilla;": "\xb8",
+ "CenterDot;": "\xb7",
+ "Cfr;": "\u212d",
+ "Chi;": "\u03a7",
+ "CircleDot;": "\u2299",
+ "CircleMinus;": "\u2296",
+ "CirclePlus;": "\u2295",
+ "CircleTimes;": "\u2297",
+ "ClockwiseContourIntegral;": "\u2232",
+ "CloseCurlyDoubleQuote;": "\u201d",
+ "CloseCurlyQuote;": "\u2019",
+ "Colon;": "\u2237",
+ "Colone;": "\u2a74",
+ "Congruent;": "\u2261",
+ "Conint;": "\u222f",
+ "ContourIntegral;": "\u222e",
+ "Copf;": "\u2102",
+ "Coproduct;": "\u2210",
+ "CounterClockwiseContourIntegral;": "\u2233",
+ "Cross;": "\u2a2f",
+ "Cscr;": "\U0001d49e",
+ "Cup;": "\u22d3",
+ "CupCap;": "\u224d",
+ "DD;": "\u2145",
+ "DDotrahd;": "\u2911",
+ "DJcy;": "\u0402",
+ "DScy;": "\u0405",
+ "DZcy;": "\u040f",
+ "Dagger;": "\u2021",
+ "Darr;": "\u21a1",
+ "Dashv;": "\u2ae4",
+ "Dcaron;": "\u010e",
+ "Dcy;": "\u0414",
+ "Del;": "\u2207",
+ "Delta;": "\u0394",
+ "Dfr;": "\U0001d507",
+ "DiacriticalAcute;": "\xb4",
+ "DiacriticalDot;": "\u02d9",
+ "DiacriticalDoubleAcute;": "\u02dd",
+ "DiacriticalGrave;": "`",
+ "DiacriticalTilde;": "\u02dc",
+ "Diamond;": "\u22c4",
+ "DifferentialD;": "\u2146",
+ "Dopf;": "\U0001d53b",
+ "Dot;": "\xa8",
+ "DotDot;": "\u20dc",
+ "DotEqual;": "\u2250",
+ "DoubleContourIntegral;": "\u222f",
+ "DoubleDot;": "\xa8",
+ "DoubleDownArrow;": "\u21d3",
+ "DoubleLeftArrow;": "\u21d0",
+ "DoubleLeftRightArrow;": "\u21d4",
+ "DoubleLeftTee;": "\u2ae4",
+ "DoubleLongLeftArrow;": "\u27f8",
+ "DoubleLongLeftRightArrow;": "\u27fa",
+ "DoubleLongRightArrow;": "\u27f9",
+ "DoubleRightArrow;": "\u21d2",
+ "DoubleRightTee;": "\u22a8",
+ "DoubleUpArrow;": "\u21d1",
+ "DoubleUpDownArrow;": "\u21d5",
+ "DoubleVerticalBar;": "\u2225",
+ "DownArrow;": "\u2193",
+ "DownArrowBar;": "\u2913",
+ "DownArrowUpArrow;": "\u21f5",
+ "DownBreve;": "\u0311",
+ "DownLeftRightVector;": "\u2950",
+ "DownLeftTeeVector;": "\u295e",
+ "DownLeftVector;": "\u21bd",
+ "DownLeftVectorBar;": "\u2956",
+ "DownRightTeeVector;": "\u295f",
+ "DownRightVector;": "\u21c1",
+ "DownRightVectorBar;": "\u2957",
+ "DownTee;": "\u22a4",
+ "DownTeeArrow;": "\u21a7",
+ "Downarrow;": "\u21d3",
+ "Dscr;": "\U0001d49f",
+ "Dstrok;": "\u0110",
+ "ENG;": "\u014a",
+ "ETH": "\xd0",
+ "ETH;": "\xd0",
+ "Eacute": "\xc9",
+ "Eacute;": "\xc9",
+ "Ecaron;": "\u011a",
+ "Ecirc": "\xca",
+ "Ecirc;": "\xca",
+ "Ecy;": "\u042d",
+ "Edot;": "\u0116",
+ "Efr;": "\U0001d508",
+ "Egrave": "\xc8",
+ "Egrave;": "\xc8",
+ "Element;": "\u2208",
+ "Emacr;": "\u0112",
+ "EmptySmallSquare;": "\u25fb",
+ "EmptyVerySmallSquare;": "\u25ab",
+ "Eogon;": "\u0118",
+ "Eopf;": "\U0001d53c",
+ "Epsilon;": "\u0395",
+ "Equal;": "\u2a75",
+ "EqualTilde;": "\u2242",
+ "Equilibrium;": "\u21cc",
+ "Escr;": "\u2130",
+ "Esim;": "\u2a73",
+ "Eta;": "\u0397",
+ "Euml": "\xcb",
+ "Euml;": "\xcb",
+ "Exists;": "\u2203",
+ "ExponentialE;": "\u2147",
+ "Fcy;": "\u0424",
+ "Ffr;": "\U0001d509",
+ "FilledSmallSquare;": "\u25fc",
+ "FilledVerySmallSquare;": "\u25aa",
+ "Fopf;": "\U0001d53d",
+ "ForAll;": "\u2200",
+ "Fouriertrf;": "\u2131",
+ "Fscr;": "\u2131",
+ "GJcy;": "\u0403",
+ "GT": ">",
+ "GT;": ">",
+ "Gamma;": "\u0393",
+ "Gammad;": "\u03dc",
+ "Gbreve;": "\u011e",
+ "Gcedil;": "\u0122",
+ "Gcirc;": "\u011c",
+ "Gcy;": "\u0413",
+ "Gdot;": "\u0120",
+ "Gfr;": "\U0001d50a",
+ "Gg;": "\u22d9",
+ "Gopf;": "\U0001d53e",
+ "GreaterEqual;": "\u2265",
+ "GreaterEqualLess;": "\u22db",
+ "GreaterFullEqual;": "\u2267",
+ "GreaterGreater;": "\u2aa2",
+ "GreaterLess;": "\u2277",
+ "GreaterSlantEqual;": "\u2a7e",
+ "GreaterTilde;": "\u2273",
+ "Gscr;": "\U0001d4a2",
+ "Gt;": "\u226b",
+ "HARDcy;": "\u042a",
+ "Hacek;": "\u02c7",
+ "Hat;": "^",
+ "Hcirc;": "\u0124",
+ "Hfr;": "\u210c",
+ "HilbertSpace;": "\u210b",
+ "Hopf;": "\u210d",
+ "HorizontalLine;": "\u2500",
+ "Hscr;": "\u210b",
+ "Hstrok;": "\u0126",
+ "HumpDownHump;": "\u224e",
+ "HumpEqual;": "\u224f",
+ "IEcy;": "\u0415",
+ "IJlig;": "\u0132",
+ "IOcy;": "\u0401",
+ "Iacute": "\xcd",
+ "Iacute;": "\xcd",
+ "Icirc": "\xce",
+ "Icirc;": "\xce",
+ "Icy;": "\u0418",
+ "Idot;": "\u0130",
+ "Ifr;": "\u2111",
+ "Igrave": "\xcc",
+ "Igrave;": "\xcc",
+ "Im;": "\u2111",
+ "Imacr;": "\u012a",
+ "ImaginaryI;": "\u2148",
+ "Implies;": "\u21d2",
+ "Int;": "\u222c",
+ "Integral;": "\u222b",
+ "Intersection;": "\u22c2",
+ "InvisibleComma;": "\u2063",
+ "InvisibleTimes;": "\u2062",
+ "Iogon;": "\u012e",
+ "Iopf;": "\U0001d540",
+ "Iota;": "\u0399",
+ "Iscr;": "\u2110",
+ "Itilde;": "\u0128",
+ "Iukcy;": "\u0406",
+ "Iuml": "\xcf",
+ "Iuml;": "\xcf",
+ "Jcirc;": "\u0134",
+ "Jcy;": "\u0419",
+ "Jfr;": "\U0001d50d",
+ "Jopf;": "\U0001d541",
+ "Jscr;": "\U0001d4a5",
+ "Jsercy;": "\u0408",
+ "Jukcy;": "\u0404",
+ "KHcy;": "\u0425",
+ "KJcy;": "\u040c",
+ "Kappa;": "\u039a",
+ "Kcedil;": "\u0136",
+ "Kcy;": "\u041a",
+ "Kfr;": "\U0001d50e",
+ "Kopf;": "\U0001d542",
+ "Kscr;": "\U0001d4a6",
+ "LJcy;": "\u0409",
+ "LT": "<",
+ "LT;": "<",
+ "Lacute;": "\u0139",
+ "Lambda;": "\u039b",
+ "Lang;": "\u27ea",
+ "Laplacetrf;": "\u2112",
+ "Larr;": "\u219e",
+ "Lcaron;": "\u013d",
+ "Lcedil;": "\u013b",
+ "Lcy;": "\u041b",
+ "LeftAngleBracket;": "\u27e8",
+ "LeftArrow;": "\u2190",
+ "LeftArrowBar;": "\u21e4",
+ "LeftArrowRightArrow;": "\u21c6",
+ "LeftCeiling;": "\u2308",
+ "LeftDoubleBracket;": "\u27e6",
+ "LeftDownTeeVector;": "\u2961",
+ "LeftDownVector;": "\u21c3",
+ "LeftDownVectorBar;": "\u2959",
+ "LeftFloor;": "\u230a",
+ "LeftRightArrow;": "\u2194",
+ "LeftRightVector;": "\u294e",
+ "LeftTee;": "\u22a3",
+ "LeftTeeArrow;": "\u21a4",
+ "LeftTeeVector;": "\u295a",
+ "LeftTriangle;": "\u22b2",
+ "LeftTriangleBar;": "\u29cf",
+ "LeftTriangleEqual;": "\u22b4",
+ "LeftUpDownVector;": "\u2951",
+ "LeftUpTeeVector;": "\u2960",
+ "LeftUpVector;": "\u21bf",
+ "LeftUpVectorBar;": "\u2958",
+ "LeftVector;": "\u21bc",
+ "LeftVectorBar;": "\u2952",
+ "Leftarrow;": "\u21d0",
+ "Leftrightarrow;": "\u21d4",
+ "LessEqualGreater;": "\u22da",
+ "LessFullEqual;": "\u2266",
+ "LessGreater;": "\u2276",
+ "LessLess;": "\u2aa1",
+ "LessSlantEqual;": "\u2a7d",
+ "LessTilde;": "\u2272",
+ "Lfr;": "\U0001d50f",
+ "Ll;": "\u22d8",
+ "Lleftarrow;": "\u21da",
+ "Lmidot;": "\u013f",
+ "LongLeftArrow;": "\u27f5",
+ "LongLeftRightArrow;": "\u27f7",
+ "LongRightArrow;": "\u27f6",
+ "Longleftarrow;": "\u27f8",
+ "Longleftrightarrow;": "\u27fa",
+ "Longrightarrow;": "\u27f9",
+ "Lopf;": "\U0001d543",
+ "LowerLeftArrow;": "\u2199",
+ "LowerRightArrow;": "\u2198",
+ "Lscr;": "\u2112",
+ "Lsh;": "\u21b0",
+ "Lstrok;": "\u0141",
+ "Lt;": "\u226a",
+ "Map;": "\u2905",
+ "Mcy;": "\u041c",
+ "MediumSpace;": "\u205f",
+ "Mellintrf;": "\u2133",
+ "Mfr;": "\U0001d510",
+ "MinusPlus;": "\u2213",
+ "Mopf;": "\U0001d544",
+ "Mscr;": "\u2133",
+ "Mu;": "\u039c",
+ "NJcy;": "\u040a",
+ "Nacute;": "\u0143",
+ "Ncaron;": "\u0147",
+ "Ncedil;": "\u0145",
+ "Ncy;": "\u041d",
+ "NegativeMediumSpace;": "\u200b",
+ "NegativeThickSpace;": "\u200b",
+ "NegativeThinSpace;": "\u200b",
+ "NegativeVeryThinSpace;": "\u200b",
+ "NestedGreaterGreater;": "\u226b",
+ "NestedLessLess;": "\u226a",
+ "NewLine;": "\n",
+ "Nfr;": "\U0001d511",
+ "NoBreak;": "\u2060",
+ "NonBreakingSpace;": "\xa0",
+ "Nopf;": "\u2115",
+ "Not;": "\u2aec",
+ "NotCongruent;": "\u2262",
+ "NotCupCap;": "\u226d",
+ "NotDoubleVerticalBar;": "\u2226",
+ "NotElement;": "\u2209",
+ "NotEqual;": "\u2260",
+ "NotEqualTilde;": "\u2242\u0338",
+ "NotExists;": "\u2204",
+ "NotGreater;": "\u226f",
+ "NotGreaterEqual;": "\u2271",
+ "NotGreaterFullEqual;": "\u2267\u0338",
+ "NotGreaterGreater;": "\u226b\u0338",
+ "NotGreaterLess;": "\u2279",
+ "NotGreaterSlantEqual;": "\u2a7e\u0338",
+ "NotGreaterTilde;": "\u2275",
+ "NotHumpDownHump;": "\u224e\u0338",
+ "NotHumpEqual;": "\u224f\u0338",
+ "NotLeftTriangle;": "\u22ea",
+ "NotLeftTriangleBar;": "\u29cf\u0338",
+ "NotLeftTriangleEqual;": "\u22ec",
+ "NotLess;": "\u226e",
+ "NotLessEqual;": "\u2270",
+ "NotLessGreater;": "\u2278",
+ "NotLessLess;": "\u226a\u0338",
+ "NotLessSlantEqual;": "\u2a7d\u0338",
+ "NotLessTilde;": "\u2274",
+ "NotNestedGreaterGreater;": "\u2aa2\u0338",
+ "NotNestedLessLess;": "\u2aa1\u0338",
+ "NotPrecedes;": "\u2280",
+ "NotPrecedesEqual;": "\u2aaf\u0338",
+ "NotPrecedesSlantEqual;": "\u22e0",
+ "NotReverseElement;": "\u220c",
+ "NotRightTriangle;": "\u22eb",
+ "NotRightTriangleBar;": "\u29d0\u0338",
+ "NotRightTriangleEqual;": "\u22ed",
+ "NotSquareSubset;": "\u228f\u0338",
+ "NotSquareSubsetEqual;": "\u22e2",
+ "NotSquareSuperset;": "\u2290\u0338",
+ "NotSquareSupersetEqual;": "\u22e3",
+ "NotSubset;": "\u2282\u20d2",
+ "NotSubsetEqual;": "\u2288",
+ "NotSucceeds;": "\u2281",
+ "NotSucceedsEqual;": "\u2ab0\u0338",
+ "NotSucceedsSlantEqual;": "\u22e1",
+ "NotSucceedsTilde;": "\u227f\u0338",
+ "NotSuperset;": "\u2283\u20d2",
+ "NotSupersetEqual;": "\u2289",
+ "NotTilde;": "\u2241",
+ "NotTildeEqual;": "\u2244",
+ "NotTildeFullEqual;": "\u2247",
+ "NotTildeTilde;": "\u2249",
+ "NotVerticalBar;": "\u2224",
+ "Nscr;": "\U0001d4a9",
+ "Ntilde": "\xd1",
+ "Ntilde;": "\xd1",
+ "Nu;": "\u039d",
+ "OElig;": "\u0152",
+ "Oacute": "\xd3",
+ "Oacute;": "\xd3",
+ "Ocirc": "\xd4",
+ "Ocirc;": "\xd4",
+ "Ocy;": "\u041e",
+ "Odblac;": "\u0150",
+ "Ofr;": "\U0001d512",
+ "Ograve": "\xd2",
+ "Ograve;": "\xd2",
+ "Omacr;": "\u014c",
+ "Omega;": "\u03a9",
+ "Omicron;": "\u039f",
+ "Oopf;": "\U0001d546",
+ "OpenCurlyDoubleQuote;": "\u201c",
+ "OpenCurlyQuote;": "\u2018",
+ "Or;": "\u2a54",
+ "Oscr;": "\U0001d4aa",
+ "Oslash": "\xd8",
+ "Oslash;": "\xd8",
+ "Otilde": "\xd5",
+ "Otilde;": "\xd5",
+ "Otimes;": "\u2a37",
+ "Ouml": "\xd6",
+ "Ouml;": "\xd6",
+ "OverBar;": "\u203e",
+ "OverBrace;": "\u23de",
+ "OverBracket;": "\u23b4",
+ "OverParenthesis;": "\u23dc",
+ "PartialD;": "\u2202",
+ "Pcy;": "\u041f",
+ "Pfr;": "\U0001d513",
+ "Phi;": "\u03a6",
+ "Pi;": "\u03a0",
+ "PlusMinus;": "\xb1",
+ "Poincareplane;": "\u210c",
+ "Popf;": "\u2119",
+ "Pr;": "\u2abb",
+ "Precedes;": "\u227a",
+ "PrecedesEqual;": "\u2aaf",
+ "PrecedesSlantEqual;": "\u227c",
+ "PrecedesTilde;": "\u227e",
+ "Prime;": "\u2033",
+ "Product;": "\u220f",
+ "Proportion;": "\u2237",
+ "Proportional;": "\u221d",
+ "Pscr;": "\U0001d4ab",
+ "Psi;": "\u03a8",
+ "QUOT": "\"",
+ "QUOT;": "\"",
+ "Qfr;": "\U0001d514",
+ "Qopf;": "\u211a",
+ "Qscr;": "\U0001d4ac",
+ "RBarr;": "\u2910",
+ "REG": "\xae",
+ "REG;": "\xae",
+ "Racute;": "\u0154",
+ "Rang;": "\u27eb",
+ "Rarr;": "\u21a0",
+ "Rarrtl;": "\u2916",
+ "Rcaron;": "\u0158",
+ "Rcedil;": "\u0156",
+ "Rcy;": "\u0420",
+ "Re;": "\u211c",
+ "ReverseElement;": "\u220b",
+ "ReverseEquilibrium;": "\u21cb",
+ "ReverseUpEquilibrium;": "\u296f",
+ "Rfr;": "\u211c",
+ "Rho;": "\u03a1",
+ "RightAngleBracket;": "\u27e9",
+ "RightArrow;": "\u2192",
+ "RightArrowBar;": "\u21e5",
+ "RightArrowLeftArrow;": "\u21c4",
+ "RightCeiling;": "\u2309",
+ "RightDoubleBracket;": "\u27e7",
+ "RightDownTeeVector;": "\u295d",
+ "RightDownVector;": "\u21c2",
+ "RightDownVectorBar;": "\u2955",
+ "RightFloor;": "\u230b",
+ "RightTee;": "\u22a2",
+ "RightTeeArrow;": "\u21a6",
+ "RightTeeVector;": "\u295b",
+ "RightTriangle;": "\u22b3",
+ "RightTriangleBar;": "\u29d0",
+ "RightTriangleEqual;": "\u22b5",
+ "RightUpDownVector;": "\u294f",
+ "RightUpTeeVector;": "\u295c",
+ "RightUpVector;": "\u21be",
+ "RightUpVectorBar;": "\u2954",
+ "RightVector;": "\u21c0",
+ "RightVectorBar;": "\u2953",
+ "Rightarrow;": "\u21d2",
+ "Ropf;": "\u211d",
+ "RoundImplies;": "\u2970",
+ "Rrightarrow;": "\u21db",
+ "Rscr;": "\u211b",
+ "Rsh;": "\u21b1",
+ "RuleDelayed;": "\u29f4",
+ "SHCHcy;": "\u0429",
+ "SHcy;": "\u0428",
+ "SOFTcy;": "\u042c",
+ "Sacute;": "\u015a",
+ "Sc;": "\u2abc",
+ "Scaron;": "\u0160",
+ "Scedil;": "\u015e",
+ "Scirc;": "\u015c",
+ "Scy;": "\u0421",
+ "Sfr;": "\U0001d516",
+ "ShortDownArrow;": "\u2193",
+ "ShortLeftArrow;": "\u2190",
+ "ShortRightArrow;": "\u2192",
+ "ShortUpArrow;": "\u2191",
+ "Sigma;": "\u03a3",
+ "SmallCircle;": "\u2218",
+ "Sopf;": "\U0001d54a",
+ "Sqrt;": "\u221a",
+ "Square;": "\u25a1",
+ "SquareIntersection;": "\u2293",
+ "SquareSubset;": "\u228f",
+ "SquareSubsetEqual;": "\u2291",
+ "SquareSuperset;": "\u2290",
+ "SquareSupersetEqual;": "\u2292",
+ "SquareUnion;": "\u2294",
+ "Sscr;": "\U0001d4ae",
+ "Star;": "\u22c6",
+ "Sub;": "\u22d0",
+ "Subset;": "\u22d0",
+ "SubsetEqual;": "\u2286",
+ "Succeeds;": "\u227b",
+ "SucceedsEqual;": "\u2ab0",
+ "SucceedsSlantEqual;": "\u227d",
+ "SucceedsTilde;": "\u227f",
+ "SuchThat;": "\u220b",
+ "Sum;": "\u2211",
+ "Sup;": "\u22d1",
+ "Superset;": "\u2283",
+ "SupersetEqual;": "\u2287",
+ "Supset;": "\u22d1",
+ "THORN": "\xde",
+ "THORN;": "\xde",
+ "TRADE;": "\u2122",
+ "TSHcy;": "\u040b",
+ "TScy;": "\u0426",
+ "Tab;": "\t",
+ "Tau;": "\u03a4",
+ "Tcaron;": "\u0164",
+ "Tcedil;": "\u0162",
+ "Tcy;": "\u0422",
+ "Tfr;": "\U0001d517",
+ "Therefore;": "\u2234",
+ "Theta;": "\u0398",
+ "ThickSpace;": "\u205f\u200a",
+ "ThinSpace;": "\u2009",
+ "Tilde;": "\u223c",
+ "TildeEqual;": "\u2243",
+ "TildeFullEqual;": "\u2245",
+ "TildeTilde;": "\u2248",
+ "Topf;": "\U0001d54b",
+ "TripleDot;": "\u20db",
+ "Tscr;": "\U0001d4af",
+ "Tstrok;": "\u0166",
+ "Uacute": "\xda",
+ "Uacute;": "\xda",
+ "Uarr;": "\u219f",
+ "Uarrocir;": "\u2949",
+ "Ubrcy;": "\u040e",
+ "Ubreve;": "\u016c",
+ "Ucirc": "\xdb",
+ "Ucirc;": "\xdb",
+ "Ucy;": "\u0423",
+ "Udblac;": "\u0170",
+ "Ufr;": "\U0001d518",
+ "Ugrave": "\xd9",
+ "Ugrave;": "\xd9",
+ "Umacr;": "\u016a",
+ "UnderBar;": "_",
+ "UnderBrace;": "\u23df",
+ "UnderBracket;": "\u23b5",
+ "UnderParenthesis;": "\u23dd",
+ "Union;": "\u22c3",
+ "UnionPlus;": "\u228e",
+ "Uogon;": "\u0172",
+ "Uopf;": "\U0001d54c",
+ "UpArrow;": "\u2191",
+ "UpArrowBar;": "\u2912",
+ "UpArrowDownArrow;": "\u21c5",
+ "UpDownArrow;": "\u2195",
+ "UpEquilibrium;": "\u296e",
+ "UpTee;": "\u22a5",
+ "UpTeeArrow;": "\u21a5",
+ "Uparrow;": "\u21d1",
+ "Updownarrow;": "\u21d5",
+ "UpperLeftArrow;": "\u2196",
+ "UpperRightArrow;": "\u2197",
+ "Upsi;": "\u03d2",
+ "Upsilon;": "\u03a5",
+ "Uring;": "\u016e",
+ "Uscr;": "\U0001d4b0",
+ "Utilde;": "\u0168",
+ "Uuml": "\xdc",
+ "Uuml;": "\xdc",
+ "VDash;": "\u22ab",
+ "Vbar;": "\u2aeb",
+ "Vcy;": "\u0412",
+ "Vdash;": "\u22a9",
+ "Vdashl;": "\u2ae6",
+ "Vee;": "\u22c1",
+ "Verbar;": "\u2016",
+ "Vert;": "\u2016",
+ "VerticalBar;": "\u2223",
+ "VerticalLine;": "|",
+ "VerticalSeparator;": "\u2758",
+ "VerticalTilde;": "\u2240",
+ "VeryThinSpace;": "\u200a",
+ "Vfr;": "\U0001d519",
+ "Vopf;": "\U0001d54d",
+ "Vscr;": "\U0001d4b1",
+ "Vvdash;": "\u22aa",
+ "Wcirc;": "\u0174",
+ "Wedge;": "\u22c0",
+ "Wfr;": "\U0001d51a",
+ "Wopf;": "\U0001d54e",
+ "Wscr;": "\U0001d4b2",
+ "Xfr;": "\U0001d51b",
+ "Xi;": "\u039e",
+ "Xopf;": "\U0001d54f",
+ "Xscr;": "\U0001d4b3",
+ "YAcy;": "\u042f",
+ "YIcy;": "\u0407",
+ "YUcy;": "\u042e",
+ "Yacute": "\xdd",
+ "Yacute;": "\xdd",
+ "Ycirc;": "\u0176",
+ "Ycy;": "\u042b",
+ "Yfr;": "\U0001d51c",
+ "Yopf;": "\U0001d550",
+ "Yscr;": "\U0001d4b4",
+ "Yuml;": "\u0178",
+ "ZHcy;": "\u0416",
+ "Zacute;": "\u0179",
+ "Zcaron;": "\u017d",
+ "Zcy;": "\u0417",
+ "Zdot;": "\u017b",
+ "ZeroWidthSpace;": "\u200b",
+ "Zeta;": "\u0396",
+ "Zfr;": "\u2128",
+ "Zopf;": "\u2124",
+ "Zscr;": "\U0001d4b5",
+ "aacute": "\xe1",
+ "aacute;": "\xe1",
+ "abreve;": "\u0103",
+ "ac;": "\u223e",
+ "acE;": "\u223e\u0333",
+ "acd;": "\u223f",
+ "acirc": "\xe2",
+ "acirc;": "\xe2",
+ "acute": "\xb4",
+ "acute;": "\xb4",
+ "acy;": "\u0430",
+ "aelig": "\xe6",
+ "aelig;": "\xe6",
+ "af;": "\u2061",
+ "afr;": "\U0001d51e",
+ "agrave": "\xe0",
+ "agrave;": "\xe0",
+ "alefsym;": "\u2135",
+ "aleph;": "\u2135",
+ "alpha;": "\u03b1",
+ "amacr;": "\u0101",
+ "amalg;": "\u2a3f",
+ "amp": "&",
+ "amp;": "&",
+ "and;": "\u2227",
+ "andand;": "\u2a55",
+ "andd;": "\u2a5c",
+ "andslope;": "\u2a58",
+ "andv;": "\u2a5a",
+ "ang;": "\u2220",
+ "ange;": "\u29a4",
+ "angle;": "\u2220",
+ "angmsd;": "\u2221",
+ "angmsdaa;": "\u29a8",
+ "angmsdab;": "\u29a9",
+ "angmsdac;": "\u29aa",
+ "angmsdad;": "\u29ab",
+ "angmsdae;": "\u29ac",
+ "angmsdaf;": "\u29ad",
+ "angmsdag;": "\u29ae",
+ "angmsdah;": "\u29af",
+ "angrt;": "\u221f",
+ "angrtvb;": "\u22be",
+ "angrtvbd;": "\u299d",
+ "angsph;": "\u2222",
+ "angst;": "\xc5",
+ "angzarr;": "\u237c",
+ "aogon;": "\u0105",
+ "aopf;": "\U0001d552",
+ "ap;": "\u2248",
+ "apE;": "\u2a70",
+ "apacir;": "\u2a6f",
+ "ape;": "\u224a",
+ "apid;": "\u224b",
+ "apos;": "'",
+ "approx;": "\u2248",
+ "approxeq;": "\u224a",
+ "aring": "\xe5",
+ "aring;": "\xe5",
+ "ascr;": "\U0001d4b6",
+ "ast;": "*",
+ "asymp;": "\u2248",
+ "asympeq;": "\u224d",
+ "atilde": "\xe3",
+ "atilde;": "\xe3",
+ "auml": "\xe4",
+ "auml;": "\xe4",
+ "awconint;": "\u2233",
+ "awint;": "\u2a11",
+ "bNot;": "\u2aed",
+ "backcong;": "\u224c",
+ "backepsilon;": "\u03f6",
+ "backprime;": "\u2035",
+ "backsim;": "\u223d",
+ "backsimeq;": "\u22cd",
+ "barvee;": "\u22bd",
+ "barwed;": "\u2305",
+ "barwedge;": "\u2305",
+ "bbrk;": "\u23b5",
+ "bbrktbrk;": "\u23b6",
+ "bcong;": "\u224c",
+ "bcy;": "\u0431",
+ "bdquo;": "\u201e",
+ "becaus;": "\u2235",
+ "because;": "\u2235",
+ "bemptyv;": "\u29b0",
+ "bepsi;": "\u03f6",
+ "bernou;": "\u212c",
+ "beta;": "\u03b2",
+ "beth;": "\u2136",
+ "between;": "\u226c",
+ "bfr;": "\U0001d51f",
+ "bigcap;": "\u22c2",
+ "bigcirc;": "\u25ef",
+ "bigcup;": "\u22c3",
+ "bigodot;": "\u2a00",
+ "bigoplus;": "\u2a01",
+ "bigotimes;": "\u2a02",
+ "bigsqcup;": "\u2a06",
+ "bigstar;": "\u2605",
+ "bigtriangledown;": "\u25bd",
+ "bigtriangleup;": "\u25b3",
+ "biguplus;": "\u2a04",
+ "bigvee;": "\u22c1",
+ "bigwedge;": "\u22c0",
+ "bkarow;": "\u290d",
+ "blacklozenge;": "\u29eb",
+ "blacksquare;": "\u25aa",
+ "blacktriangle;": "\u25b4",
+ "blacktriangledown;": "\u25be",
+ "blacktriangleleft;": "\u25c2",
+ "blacktriangleright;": "\u25b8",
+ "blank;": "\u2423",
+ "blk12;": "\u2592",
+ "blk14;": "\u2591",
+ "blk34;": "\u2593",
+ "block;": "\u2588",
+ "bne;": "=\u20e5",
+ "bnequiv;": "\u2261\u20e5",
+ "bnot;": "\u2310",
+ "bopf;": "\U0001d553",
+ "bot;": "\u22a5",
+ "bottom;": "\u22a5",
+ "bowtie;": "\u22c8",
+ "boxDL;": "\u2557",
+ "boxDR;": "\u2554",
+ "boxDl;": "\u2556",
+ "boxDr;": "\u2553",
+ "boxH;": "\u2550",
+ "boxHD;": "\u2566",
+ "boxHU;": "\u2569",
+ "boxHd;": "\u2564",
+ "boxHu;": "\u2567",
+ "boxUL;": "\u255d",
+ "boxUR;": "\u255a",
+ "boxUl;": "\u255c",
+ "boxUr;": "\u2559",
+ "boxV;": "\u2551",
+ "boxVH;": "\u256c",
+ "boxVL;": "\u2563",
+ "boxVR;": "\u2560",
+ "boxVh;": "\u256b",
+ "boxVl;": "\u2562",
+ "boxVr;": "\u255f",
+ "boxbox;": "\u29c9",
+ "boxdL;": "\u2555",
+ "boxdR;": "\u2552",
+ "boxdl;": "\u2510",
+ "boxdr;": "\u250c",
+ "boxh;": "\u2500",
+ "boxhD;": "\u2565",
+ "boxhU;": "\u2568",
+ "boxhd;": "\u252c",
+ "boxhu;": "\u2534",
+ "boxminus;": "\u229f",
+ "boxplus;": "\u229e",
+ "boxtimes;": "\u22a0",
+ "boxuL;": "\u255b",
+ "boxuR;": "\u2558",
+ "boxul;": "\u2518",
+ "boxur;": "\u2514",
+ "boxv;": "\u2502",
+ "boxvH;": "\u256a",
+ "boxvL;": "\u2561",
+ "boxvR;": "\u255e",
+ "boxvh;": "\u253c",
+ "boxvl;": "\u2524",
+ "boxvr;": "\u251c",
+ "bprime;": "\u2035",
+ "breve;": "\u02d8",
+ "brvbar": "\xa6",
+ "brvbar;": "\xa6",
+ "bscr;": "\U0001d4b7",
+ "bsemi;": "\u204f",
+ "bsim;": "\u223d",
+ "bsime;": "\u22cd",
+ "bsol;": "\\",
+ "bsolb;": "\u29c5",
+ "bsolhsub;": "\u27c8",
+ "bull;": "\u2022",
+ "bullet;": "\u2022",
+ "bump;": "\u224e",
+ "bumpE;": "\u2aae",
+ "bumpe;": "\u224f",
+ "bumpeq;": "\u224f",
+ "cacute;": "\u0107",
+ "cap;": "\u2229",
+ "capand;": "\u2a44",
+ "capbrcup;": "\u2a49",
+ "capcap;": "\u2a4b",
+ "capcup;": "\u2a47",
+ "capdot;": "\u2a40",
+ "caps;": "\u2229\ufe00",
+ "caret;": "\u2041",
+ "caron;": "\u02c7",
+ "ccaps;": "\u2a4d",
+ "ccaron;": "\u010d",
+ "ccedil": "\xe7",
+ "ccedil;": "\xe7",
+ "ccirc;": "\u0109",
+ "ccups;": "\u2a4c",
+ "ccupssm;": "\u2a50",
+ "cdot;": "\u010b",
+ "cedil": "\xb8",
+ "cedil;": "\xb8",
+ "cemptyv;": "\u29b2",
+ "cent": "\xa2",
+ "cent;": "\xa2",
+ "centerdot;": "\xb7",
+ "cfr;": "\U0001d520",
+ "chcy;": "\u0447",
+ "check;": "\u2713",
+ "checkmark;": "\u2713",
+ "chi;": "\u03c7",
+ "cir;": "\u25cb",
+ "cirE;": "\u29c3",
+ "circ;": "\u02c6",
+ "circeq;": "\u2257",
+ "circlearrowleft;": "\u21ba",
+ "circlearrowright;": "\u21bb",
+ "circledR;": "\xae",
+ "circledS;": "\u24c8",
+ "circledast;": "\u229b",
+ "circledcirc;": "\u229a",
+ "circleddash;": "\u229d",
+ "cire;": "\u2257",
+ "cirfnint;": "\u2a10",
+ "cirmid;": "\u2aef",
+ "cirscir;": "\u29c2",
+ "clubs;": "\u2663",
+ "clubsuit;": "\u2663",
+ "colon;": ":",
+ "colone;": "\u2254",
+ "coloneq;": "\u2254",
+ "comma;": ",",
+ "commat;": "@",
+ "comp;": "\u2201",
+ "compfn;": "\u2218",
+ "complement;": "\u2201",
+ "complexes;": "\u2102",
+ "cong;": "\u2245",
+ "congdot;": "\u2a6d",
+ "conint;": "\u222e",
+ "copf;": "\U0001d554",
+ "coprod;": "\u2210",
+ "copy": "\xa9",
+ "copy;": "\xa9",
+ "copysr;": "\u2117",
+ "crarr;": "\u21b5",
+ "cross;": "\u2717",
+ "cscr;": "\U0001d4b8",
+ "csub;": "\u2acf",
+ "csube;": "\u2ad1",
+ "csup;": "\u2ad0",
+ "csupe;": "\u2ad2",
+ "ctdot;": "\u22ef",
+ "cudarrl;": "\u2938",
+ "cudarrr;": "\u2935",
+ "cuepr;": "\u22de",
+ "cuesc;": "\u22df",
+ "cularr;": "\u21b6",
+ "cularrp;": "\u293d",
+ "cup;": "\u222a",
+ "cupbrcap;": "\u2a48",
+ "cupcap;": "\u2a46",
+ "cupcup;": "\u2a4a",
+ "cupdot;": "\u228d",
+ "cupor;": "\u2a45",
+ "cups;": "\u222a\ufe00",
+ "curarr;": "\u21b7",
+ "curarrm;": "\u293c",
+ "curlyeqprec;": "\u22de",
+ "curlyeqsucc;": "\u22df",
+ "curlyvee;": "\u22ce",
+ "curlywedge;": "\u22cf",
+ "curren": "\xa4",
+ "curren;": "\xa4",
+ "curvearrowleft;": "\u21b6",
+ "curvearrowright;": "\u21b7",
+ "cuvee;": "\u22ce",
+ "cuwed;": "\u22cf",
+ "cwconint;": "\u2232",
+ "cwint;": "\u2231",
+ "cylcty;": "\u232d",
+ "dArr;": "\u21d3",
+ "dHar;": "\u2965",
+ "dagger;": "\u2020",
+ "daleth;": "\u2138",
+ "darr;": "\u2193",
+ "dash;": "\u2010",
+ "dashv;": "\u22a3",
+ "dbkarow;": "\u290f",
+ "dblac;": "\u02dd",
+ "dcaron;": "\u010f",
+ "dcy;": "\u0434",
+ "dd;": "\u2146",
+ "ddagger;": "\u2021",
+ "ddarr;": "\u21ca",
+ "ddotseq;": "\u2a77",
+ "deg": "\xb0",
+ "deg;": "\xb0",
+ "delta;": "\u03b4",
+ "demptyv;": "\u29b1",
+ "dfisht;": "\u297f",
+ "dfr;": "\U0001d521",
+ "dharl;": "\u21c3",
+ "dharr;": "\u21c2",
+ "diam;": "\u22c4",
+ "diamond;": "\u22c4",
+ "diamondsuit;": "\u2666",
+ "diams;": "\u2666",
+ "die;": "\xa8",
+ "digamma;": "\u03dd",
+ "disin;": "\u22f2",
+ "div;": "\xf7",
+ "divide": "\xf7",
+ "divide;": "\xf7",
+ "divideontimes;": "\u22c7",
+ "divonx;": "\u22c7",
+ "djcy;": "\u0452",
+ "dlcorn;": "\u231e",
+ "dlcrop;": "\u230d",
+ "dollar;": "$",
+ "dopf;": "\U0001d555",
+ "dot;": "\u02d9",
+ "doteq;": "\u2250",
+ "doteqdot;": "\u2251",
+ "dotminus;": "\u2238",
+ "dotplus;": "\u2214",
+ "dotsquare;": "\u22a1",
+ "doublebarwedge;": "\u2306",
+ "downarrow;": "\u2193",
+ "downdownarrows;": "\u21ca",
+ "downharpoonleft;": "\u21c3",
+ "downharpoonright;": "\u21c2",
+ "drbkarow;": "\u2910",
+ "drcorn;": "\u231f",
+ "drcrop;": "\u230c",
+ "dscr;": "\U0001d4b9",
+ "dscy;": "\u0455",
+ "dsol;": "\u29f6",
+ "dstrok;": "\u0111",
+ "dtdot;": "\u22f1",
+ "dtri;": "\u25bf",
+ "dtrif;": "\u25be",
+ "duarr;": "\u21f5",
+ "duhar;": "\u296f",
+ "dwangle;": "\u29a6",
+ "dzcy;": "\u045f",
+ "dzigrarr;": "\u27ff",
+ "eDDot;": "\u2a77",
+ "eDot;": "\u2251",
+ "eacute": "\xe9",
+ "eacute;": "\xe9",
+ "easter;": "\u2a6e",
+ "ecaron;": "\u011b",
+ "ecir;": "\u2256",
+ "ecirc": "\xea",
+ "ecirc;": "\xea",
+ "ecolon;": "\u2255",
+ "ecy;": "\u044d",
+ "edot;": "\u0117",
+ "ee;": "\u2147",
+ "efDot;": "\u2252",
+ "efr;": "\U0001d522",
+ "eg;": "\u2a9a",
+ "egrave": "\xe8",
+ "egrave;": "\xe8",
+ "egs;": "\u2a96",
+ "egsdot;": "\u2a98",
+ "el;": "\u2a99",
+ "elinters;": "\u23e7",
+ "ell;": "\u2113",
+ "els;": "\u2a95",
+ "elsdot;": "\u2a97",
+ "emacr;": "\u0113",
+ "empty;": "\u2205",
+ "emptyset;": "\u2205",
+ "emptyv;": "\u2205",
+ "emsp13;": "\u2004",
+ "emsp14;": "\u2005",
+ "emsp;": "\u2003",
+ "eng;": "\u014b",
+ "ensp;": "\u2002",
+ "eogon;": "\u0119",
+ "eopf;": "\U0001d556",
+ "epar;": "\u22d5",
+ "eparsl;": "\u29e3",
+ "eplus;": "\u2a71",
+ "epsi;": "\u03b5",
+ "epsilon;": "\u03b5",
+ "epsiv;": "\u03f5",
+ "eqcirc;": "\u2256",
+ "eqcolon;": "\u2255",
+ "eqsim;": "\u2242",
+ "eqslantgtr;": "\u2a96",
+ "eqslantless;": "\u2a95",
+ "equals;": "=",
+ "equest;": "\u225f",
+ "equiv;": "\u2261",
+ "equivDD;": "\u2a78",
+ "eqvparsl;": "\u29e5",
+ "erDot;": "\u2253",
+ "erarr;": "\u2971",
+ "escr;": "\u212f",
+ "esdot;": "\u2250",
+ "esim;": "\u2242",
+ "eta;": "\u03b7",
+ "eth": "\xf0",
+ "eth;": "\xf0",
+ "euml": "\xeb",
+ "euml;": "\xeb",
+ "euro;": "\u20ac",
+ "excl;": "!",
+ "exist;": "\u2203",
+ "expectation;": "\u2130",
+ "exponentiale;": "\u2147",
+ "fallingdotseq;": "\u2252",
+ "fcy;": "\u0444",
+ "female;": "\u2640",
+ "ffilig;": "\ufb03",
+ "fflig;": "\ufb00",
+ "ffllig;": "\ufb04",
+ "ffr;": "\U0001d523",
+ "filig;": "\ufb01",
+ "fjlig;": "fj",
+ "flat;": "\u266d",
+ "fllig;": "\ufb02",
+ "fltns;": "\u25b1",
+ "fnof;": "\u0192",
+ "fopf;": "\U0001d557",
+ "forall;": "\u2200",
+ "fork;": "\u22d4",
+ "forkv;": "\u2ad9",
+ "fpartint;": "\u2a0d",
+ "frac12": "\xbd",
+ "frac12;": "\xbd",
+ "frac13;": "\u2153",
+ "frac14": "\xbc",
+ "frac14;": "\xbc",
+ "frac15;": "\u2155",
+ "frac16;": "\u2159",
+ "frac18;": "\u215b",
+ "frac23;": "\u2154",
+ "frac25;": "\u2156",
+ "frac34": "\xbe",
+ "frac34;": "\xbe",
+ "frac35;": "\u2157",
+ "frac38;": "\u215c",
+ "frac45;": "\u2158",
+ "frac56;": "\u215a",
+ "frac58;": "\u215d",
+ "frac78;": "\u215e",
+ "frasl;": "\u2044",
+ "frown;": "\u2322",
+ "fscr;": "\U0001d4bb",
+ "gE;": "\u2267",
+ "gEl;": "\u2a8c",
+ "gacute;": "\u01f5",
+ "gamma;": "\u03b3",
+ "gammad;": "\u03dd",
+ "gap;": "\u2a86",
+ "gbreve;": "\u011f",
+ "gcirc;": "\u011d",
+ "gcy;": "\u0433",
+ "gdot;": "\u0121",
+ "ge;": "\u2265",
+ "gel;": "\u22db",
+ "geq;": "\u2265",
+ "geqq;": "\u2267",
+ "geqslant;": "\u2a7e",
+ "ges;": "\u2a7e",
+ "gescc;": "\u2aa9",
+ "gesdot;": "\u2a80",
+ "gesdoto;": "\u2a82",
+ "gesdotol;": "\u2a84",
+ "gesl;": "\u22db\ufe00",
+ "gesles;": "\u2a94",
+ "gfr;": "\U0001d524",
+ "gg;": "\u226b",
+ "ggg;": "\u22d9",
+ "gimel;": "\u2137",
+ "gjcy;": "\u0453",
+ "gl;": "\u2277",
+ "glE;": "\u2a92",
+ "gla;": "\u2aa5",
+ "glj;": "\u2aa4",
+ "gnE;": "\u2269",
+ "gnap;": "\u2a8a",
+ "gnapprox;": "\u2a8a",
+ "gne;": "\u2a88",
+ "gneq;": "\u2a88",
+ "gneqq;": "\u2269",
+ "gnsim;": "\u22e7",
+ "gopf;": "\U0001d558",
+ "grave;": "`",
+ "gscr;": "\u210a",
+ "gsim;": "\u2273",
+ "gsime;": "\u2a8e",
+ "gsiml;": "\u2a90",
+ "gt": ">",
+ "gt;": ">",
+ "gtcc;": "\u2aa7",
+ "gtcir;": "\u2a7a",
+ "gtdot;": "\u22d7",
+ "gtlPar;": "\u2995",
+ "gtquest;": "\u2a7c",
+ "gtrapprox;": "\u2a86",
+ "gtrarr;": "\u2978",
+ "gtrdot;": "\u22d7",
+ "gtreqless;": "\u22db",
+ "gtreqqless;": "\u2a8c",
+ "gtrless;": "\u2277",
+ "gtrsim;": "\u2273",
+ "gvertneqq;": "\u2269\ufe00",
+ "gvnE;": "\u2269\ufe00",
+ "hArr;": "\u21d4",
+ "hairsp;": "\u200a",
+ "half;": "\xbd",
+ "hamilt;": "\u210b",
+ "hardcy;": "\u044a",
+ "harr;": "\u2194",
+ "harrcir;": "\u2948",
+ "harrw;": "\u21ad",
+ "hbar;": "\u210f",
+ "hcirc;": "\u0125",
+ "hearts;": "\u2665",
+ "heartsuit;": "\u2665",
+ "hellip;": "\u2026",
+ "hercon;": "\u22b9",
+ "hfr;": "\U0001d525",
+ "hksearow;": "\u2925",
+ "hkswarow;": "\u2926",
+ "hoarr;": "\u21ff",
+ "homtht;": "\u223b",
+ "hookleftarrow;": "\u21a9",
+ "hookrightarrow;": "\u21aa",
+ "hopf;": "\U0001d559",
+ "horbar;": "\u2015",
+ "hscr;": "\U0001d4bd",
+ "hslash;": "\u210f",
+ "hstrok;": "\u0127",
+ "hybull;": "\u2043",
+ "hyphen;": "\u2010",
+ "iacute": "\xed",
+ "iacute;": "\xed",
+ "ic;": "\u2063",
+ "icirc": "\xee",
+ "icirc;": "\xee",
+ "icy;": "\u0438",
+ "iecy;": "\u0435",
+ "iexcl": "\xa1",
+ "iexcl;": "\xa1",
+ "iff;": "\u21d4",
+ "ifr;": "\U0001d526",
+ "igrave": "\xec",
+ "igrave;": "\xec",
+ "ii;": "\u2148",
+ "iiiint;": "\u2a0c",
+ "iiint;": "\u222d",
+ "iinfin;": "\u29dc",
+ "iiota;": "\u2129",
+ "ijlig;": "\u0133",
+ "imacr;": "\u012b",
+ "image;": "\u2111",
+ "imagline;": "\u2110",
+ "imagpart;": "\u2111",
+ "imath;": "\u0131",
+ "imof;": "\u22b7",
+ "imped;": "\u01b5",
+ "in;": "\u2208",
+ "incare;": "\u2105",
+ "infin;": "\u221e",
+ "infintie;": "\u29dd",
+ "inodot;": "\u0131",
+ "int;": "\u222b",
+ "intcal;": "\u22ba",
+ "integers;": "\u2124",
+ "intercal;": "\u22ba",
+ "intlarhk;": "\u2a17",
+ "intprod;": "\u2a3c",
+ "iocy;": "\u0451",
+ "iogon;": "\u012f",
+ "iopf;": "\U0001d55a",
+ "iota;": "\u03b9",
+ "iprod;": "\u2a3c",
+ "iquest": "\xbf",
+ "iquest;": "\xbf",
+ "iscr;": "\U0001d4be",
+ "isin;": "\u2208",
+ "isinE;": "\u22f9",
+ "isindot;": "\u22f5",
+ "isins;": "\u22f4",
+ "isinsv;": "\u22f3",
+ "isinv;": "\u2208",
+ "it;": "\u2062",
+ "itilde;": "\u0129",
+ "iukcy;": "\u0456",
+ "iuml": "\xef",
+ "iuml;": "\xef",
+ "jcirc;": "\u0135",
+ "jcy;": "\u0439",
+ "jfr;": "\U0001d527",
+ "jmath;": "\u0237",
+ "jopf;": "\U0001d55b",
+ "jscr;": "\U0001d4bf",
+ "jsercy;": "\u0458",
+ "jukcy;": "\u0454",
+ "kappa;": "\u03ba",
+ "kappav;": "\u03f0",
+ "kcedil;": "\u0137",
+ "kcy;": "\u043a",
+ "kfr;": "\U0001d528",
+ "kgreen;": "\u0138",
+ "khcy;": "\u0445",
+ "kjcy;": "\u045c",
+ "kopf;": "\U0001d55c",
+ "kscr;": "\U0001d4c0",
+ "lAarr;": "\u21da",
+ "lArr;": "\u21d0",
+ "lAtail;": "\u291b",
+ "lBarr;": "\u290e",
+ "lE;": "\u2266",
+ "lEg;": "\u2a8b",
+ "lHar;": "\u2962",
+ "lacute;": "\u013a",
+ "laemptyv;": "\u29b4",
+ "lagran;": "\u2112",
+ "lambda;": "\u03bb",
+ "lang;": "\u27e8",
+ "langd;": "\u2991",
+ "langle;": "\u27e8",
+ "lap;": "\u2a85",
+ "laquo": "\xab",
+ "laquo;": "\xab",
+ "larr;": "\u2190",
+ "larrb;": "\u21e4",
+ "larrbfs;": "\u291f",
+ "larrfs;": "\u291d",
+ "larrhk;": "\u21a9",
+ "larrlp;": "\u21ab",
+ "larrpl;": "\u2939",
+ "larrsim;": "\u2973",
+ "larrtl;": "\u21a2",
+ "lat;": "\u2aab",
+ "latail;": "\u2919",
+ "late;": "\u2aad",
+ "lates;": "\u2aad\ufe00",
+ "lbarr;": "\u290c",
+ "lbbrk;": "\u2772",
+ "lbrace;": "{",
+ "lbrack;": "[",
+ "lbrke;": "\u298b",
+ "lbrksld;": "\u298f",
+ "lbrkslu;": "\u298d",
+ "lcaron;": "\u013e",
+ "lcedil;": "\u013c",
+ "lceil;": "\u2308",
+ "lcub;": "{",
+ "lcy;": "\u043b",
+ "ldca;": "\u2936",
+ "ldquo;": "\u201c",
+ "ldquor;": "\u201e",
+ "ldrdhar;": "\u2967",
+ "ldrushar;": "\u294b",
+ "ldsh;": "\u21b2",
+ "le;": "\u2264",
+ "leftarrow;": "\u2190",
+ "leftarrowtail;": "\u21a2",
+ "leftharpoondown;": "\u21bd",
+ "leftharpoonup;": "\u21bc",
+ "leftleftarrows;": "\u21c7",
+ "leftrightarrow;": "\u2194",
+ "leftrightarrows;": "\u21c6",
+ "leftrightharpoons;": "\u21cb",
+ "leftrightsquigarrow;": "\u21ad",
+ "leftthreetimes;": "\u22cb",
+ "leg;": "\u22da",
+ "leq;": "\u2264",
+ "leqq;": "\u2266",
+ "leqslant;": "\u2a7d",
+ "les;": "\u2a7d",
+ "lescc;": "\u2aa8",
+ "lesdot;": "\u2a7f",
+ "lesdoto;": "\u2a81",
+ "lesdotor;": "\u2a83",
+ "lesg;": "\u22da\ufe00",
+ "lesges;": "\u2a93",
+ "lessapprox;": "\u2a85",
+ "lessdot;": "\u22d6",
+ "lesseqgtr;": "\u22da",
+ "lesseqqgtr;": "\u2a8b",
+ "lessgtr;": "\u2276",
+ "lesssim;": "\u2272",
+ "lfisht;": "\u297c",
+ "lfloor;": "\u230a",
+ "lfr;": "\U0001d529",
+ "lg;": "\u2276",
+ "lgE;": "\u2a91",
+ "lhard;": "\u21bd",
+ "lharu;": "\u21bc",
+ "lharul;": "\u296a",
+ "lhblk;": "\u2584",
+ "ljcy;": "\u0459",
+ "ll;": "\u226a",
+ "llarr;": "\u21c7",
+ "llcorner;": "\u231e",
+ "llhard;": "\u296b",
+ "lltri;": "\u25fa",
+ "lmidot;": "\u0140",
+ "lmoust;": "\u23b0",
+ "lmoustache;": "\u23b0",
+ "lnE;": "\u2268",
+ "lnap;": "\u2a89",
+ "lnapprox;": "\u2a89",
+ "lne;": "\u2a87",
+ "lneq;": "\u2a87",
+ "lneqq;": "\u2268",
+ "lnsim;": "\u22e6",
+ "loang;": "\u27ec",
+ "loarr;": "\u21fd",
+ "lobrk;": "\u27e6",
+ "longleftarrow;": "\u27f5",
+ "longleftrightarrow;": "\u27f7",
+ "longmapsto;": "\u27fc",
+ "longrightarrow;": "\u27f6",
+ "looparrowleft;": "\u21ab",
+ "looparrowright;": "\u21ac",
+ "lopar;": "\u2985",
+ "lopf;": "\U0001d55d",
+ "loplus;": "\u2a2d",
+ "lotimes;": "\u2a34",
+ "lowast;": "\u2217",
+ "lowbar;": "_",
+ "loz;": "\u25ca",
+ "lozenge;": "\u25ca",
+ "lozf;": "\u29eb",
+ "lpar;": "(",
+ "lparlt;": "\u2993",
+ "lrarr;": "\u21c6",
+ "lrcorner;": "\u231f",
+ "lrhar;": "\u21cb",
+ "lrhard;": "\u296d",
+ "lrm;": "\u200e",
+ "lrtri;": "\u22bf",
+ "lsaquo;": "\u2039",
+ "lscr;": "\U0001d4c1",
+ "lsh;": "\u21b0",
+ "lsim;": "\u2272",
+ "lsime;": "\u2a8d",
+ "lsimg;": "\u2a8f",
+ "lsqb;": "[",
+ "lsquo;": "\u2018",
+ "lsquor;": "\u201a",
+ "lstrok;": "\u0142",
+ "lt": "<",
+ "lt;": "<",
+ "ltcc;": "\u2aa6",
+ "ltcir;": "\u2a79",
+ "ltdot;": "\u22d6",
+ "lthree;": "\u22cb",
+ "ltimes;": "\u22c9",
+ "ltlarr;": "\u2976",
+ "ltquest;": "\u2a7b",
+ "ltrPar;": "\u2996",
+ "ltri;": "\u25c3",
+ "ltrie;": "\u22b4",
+ "ltrif;": "\u25c2",
+ "lurdshar;": "\u294a",
+ "luruhar;": "\u2966",
+ "lvertneqq;": "\u2268\ufe00",
+ "lvnE;": "\u2268\ufe00",
+ "mDDot;": "\u223a",
+ "macr": "\xaf",
+ "macr;": "\xaf",
+ "male;": "\u2642",
+ "malt;": "\u2720",
+ "maltese;": "\u2720",
+ "map;": "\u21a6",
+ "mapsto;": "\u21a6",
+ "mapstodown;": "\u21a7",
+ "mapstoleft;": "\u21a4",
+ "mapstoup;": "\u21a5",
+ "marker;": "\u25ae",
+ "mcomma;": "\u2a29",
+ "mcy;": "\u043c",
+ "mdash;": "\u2014",
+ "measuredangle;": "\u2221",
+ "mfr;": "\U0001d52a",
+ "mho;": "\u2127",
+ "micro": "\xb5",
+ "micro;": "\xb5",
+ "mid;": "\u2223",
+ "midast;": "*",
+ "midcir;": "\u2af0",
+ "middot": "\xb7",
+ "middot;": "\xb7",
+ "minus;": "\u2212",
+ "minusb;": "\u229f",
+ "minusd;": "\u2238",
+ "minusdu;": "\u2a2a",
+ "mlcp;": "\u2adb",
+ "mldr;": "\u2026",
+ "mnplus;": "\u2213",
+ "models;": "\u22a7",
+ "mopf;": "\U0001d55e",
+ "mp;": "\u2213",
+ "mscr;": "\U0001d4c2",
+ "mstpos;": "\u223e",
+ "mu;": "\u03bc",
+ "multimap;": "\u22b8",
+ "mumap;": "\u22b8",
+ "nGg;": "\u22d9\u0338",
+ "nGt;": "\u226b\u20d2",
+ "nGtv;": "\u226b\u0338",
+ "nLeftarrow;": "\u21cd",
+ "nLeftrightarrow;": "\u21ce",
+ "nLl;": "\u22d8\u0338",
+ "nLt;": "\u226a\u20d2",
+ "nLtv;": "\u226a\u0338",
+ "nRightarrow;": "\u21cf",
+ "nVDash;": "\u22af",
+ "nVdash;": "\u22ae",
+ "nabla;": "\u2207",
+ "nacute;": "\u0144",
+ "nang;": "\u2220\u20d2",
+ "nap;": "\u2249",
+ "napE;": "\u2a70\u0338",
+ "napid;": "\u224b\u0338",
+ "napos;": "\u0149",
+ "napprox;": "\u2249",
+ "natur;": "\u266e",
+ "natural;": "\u266e",
+ "naturals;": "\u2115",
+ "nbsp": "\xa0",
+ "nbsp;": "\xa0",
+ "nbump;": "\u224e\u0338",
+ "nbumpe;": "\u224f\u0338",
+ "ncap;": "\u2a43",
+ "ncaron;": "\u0148",
+ "ncedil;": "\u0146",
+ "ncong;": "\u2247",
+ "ncongdot;": "\u2a6d\u0338",
+ "ncup;": "\u2a42",
+ "ncy;": "\u043d",
+ "ndash;": "\u2013",
+ "ne;": "\u2260",
+ "neArr;": "\u21d7",
+ "nearhk;": "\u2924",
+ "nearr;": "\u2197",
+ "nearrow;": "\u2197",
+ "nedot;": "\u2250\u0338",
+ "nequiv;": "\u2262",
+ "nesear;": "\u2928",
+ "nesim;": "\u2242\u0338",
+ "nexist;": "\u2204",
+ "nexists;": "\u2204",
+ "nfr;": "\U0001d52b",
+ "ngE;": "\u2267\u0338",
+ "nge;": "\u2271",
+ "ngeq;": "\u2271",
+ "ngeqq;": "\u2267\u0338",
+ "ngeqslant;": "\u2a7e\u0338",
+ "nges;": "\u2a7e\u0338",
+ "ngsim;": "\u2275",
+ "ngt;": "\u226f",
+ "ngtr;": "\u226f",
+ "nhArr;": "\u21ce",
+ "nharr;": "\u21ae",
+ "nhpar;": "\u2af2",
+ "ni;": "\u220b",
+ "nis;": "\u22fc",
+ "nisd;": "\u22fa",
+ "niv;": "\u220b",
+ "njcy;": "\u045a",
+ "nlArr;": "\u21cd",
+ "nlE;": "\u2266\u0338",
+ "nlarr;": "\u219a",
+ "nldr;": "\u2025",
+ "nle;": "\u2270",
+ "nleftarrow;": "\u219a",
+ "nleftrightarrow;": "\u21ae",
+ "nleq;": "\u2270",
+ "nleqq;": "\u2266\u0338",
+ "nleqslant;": "\u2a7d\u0338",
+ "nles;": "\u2a7d\u0338",
+ "nless;": "\u226e",
+ "nlsim;": "\u2274",
+ "nlt;": "\u226e",
+ "nltri;": "\u22ea",
+ "nltrie;": "\u22ec",
+ "nmid;": "\u2224",
+ "nopf;": "\U0001d55f",
+ "not": "\xac",
+ "not;": "\xac",
+ "notin;": "\u2209",
+ "notinE;": "\u22f9\u0338",
+ "notindot;": "\u22f5\u0338",
+ "notinva;": "\u2209",
+ "notinvb;": "\u22f7",
+ "notinvc;": "\u22f6",
+ "notni;": "\u220c",
+ "notniva;": "\u220c",
+ "notnivb;": "\u22fe",
+ "notnivc;": "\u22fd",
+ "npar;": "\u2226",
+ "nparallel;": "\u2226",
+ "nparsl;": "\u2afd\u20e5",
+ "npart;": "\u2202\u0338",
+ "npolint;": "\u2a14",
+ "npr;": "\u2280",
+ "nprcue;": "\u22e0",
+ "npre;": "\u2aaf\u0338",
+ "nprec;": "\u2280",
+ "npreceq;": "\u2aaf\u0338",
+ "nrArr;": "\u21cf",
+ "nrarr;": "\u219b",
+ "nrarrc;": "\u2933\u0338",
+ "nrarrw;": "\u219d\u0338",
+ "nrightarrow;": "\u219b",
+ "nrtri;": "\u22eb",
+ "nrtrie;": "\u22ed",
+ "nsc;": "\u2281",
+ "nsccue;": "\u22e1",
+ "nsce;": "\u2ab0\u0338",
+ "nscr;": "\U0001d4c3",
+ "nshortmid;": "\u2224",
+ "nshortparallel;": "\u2226",
+ "nsim;": "\u2241",
+ "nsime;": "\u2244",
+ "nsimeq;": "\u2244",
+ "nsmid;": "\u2224",
+ "nspar;": "\u2226",
+ "nsqsube;": "\u22e2",
+ "nsqsupe;": "\u22e3",
+ "nsub;": "\u2284",
+ "nsubE;": "\u2ac5\u0338",
+ "nsube;": "\u2288",
+ "nsubset;": "\u2282\u20d2",
+ "nsubseteq;": "\u2288",
+ "nsubseteqq;": "\u2ac5\u0338",
+ "nsucc;": "\u2281",
+ "nsucceq;": "\u2ab0\u0338",
+ "nsup;": "\u2285",
+ "nsupE;": "\u2ac6\u0338",
+ "nsupe;": "\u2289",
+ "nsupset;": "\u2283\u20d2",
+ "nsupseteq;": "\u2289",
+ "nsupseteqq;": "\u2ac6\u0338",
+ "ntgl;": "\u2279",
+ "ntilde": "\xf1",
+ "ntilde;": "\xf1",
+ "ntlg;": "\u2278",
+ "ntriangleleft;": "\u22ea",
+ "ntrianglelefteq;": "\u22ec",
+ "ntriangleright;": "\u22eb",
+ "ntrianglerighteq;": "\u22ed",
+ "nu;": "\u03bd",
+ "num;": "#",
+ "numero;": "\u2116",
+ "numsp;": "\u2007",
+ "nvDash;": "\u22ad",
+ "nvHarr;": "\u2904",
+ "nvap;": "\u224d\u20d2",
+ "nvdash;": "\u22ac",
+ "nvge;": "\u2265\u20d2",
+ "nvgt;": ">\u20d2",
+ "nvinfin;": "\u29de",
+ "nvlArr;": "\u2902",
+ "nvle;": "\u2264\u20d2",
+ "nvlt;": "<\u20d2",
+ "nvltrie;": "\u22b4\u20d2",
+ "nvrArr;": "\u2903",
+ "nvrtrie;": "\u22b5\u20d2",
+ "nvsim;": "\u223c\u20d2",
+ "nwArr;": "\u21d6",
+ "nwarhk;": "\u2923",
+ "nwarr;": "\u2196",
+ "nwarrow;": "\u2196",
+ "nwnear;": "\u2927",
+ "oS;": "\u24c8",
+ "oacute": "\xf3",
+ "oacute;": "\xf3",
+ "oast;": "\u229b",
+ "ocir;": "\u229a",
+ "ocirc": "\xf4",
+ "ocirc;": "\xf4",
+ "ocy;": "\u043e",
+ "odash;": "\u229d",
+ "odblac;": "\u0151",
+ "odiv;": "\u2a38",
+ "odot;": "\u2299",
+ "odsold;": "\u29bc",
+ "oelig;": "\u0153",
+ "ofcir;": "\u29bf",
+ "ofr;": "\U0001d52c",
+ "ogon;": "\u02db",
+ "ograve": "\xf2",
+ "ograve;": "\xf2",
+ "ogt;": "\u29c1",
+ "ohbar;": "\u29b5",
+ "ohm;": "\u03a9",
+ "oint;": "\u222e",
+ "olarr;": "\u21ba",
+ "olcir;": "\u29be",
+ "olcross;": "\u29bb",
+ "oline;": "\u203e",
+ "olt;": "\u29c0",
+ "omacr;": "\u014d",
+ "omega;": "\u03c9",
+ "omicron;": "\u03bf",
+ "omid;": "\u29b6",
+ "ominus;": "\u2296",
+ "oopf;": "\U0001d560",
+ "opar;": "\u29b7",
+ "operp;": "\u29b9",
+ "oplus;": "\u2295",
+ "or;": "\u2228",
+ "orarr;": "\u21bb",
+ "ord;": "\u2a5d",
+ "order;": "\u2134",
+ "orderof;": "\u2134",
+ "ordf": "\xaa",
+ "ordf;": "\xaa",
+ "ordm": "\xba",
+ "ordm;": "\xba",
+ "origof;": "\u22b6",
+ "oror;": "\u2a56",
+ "orslope;": "\u2a57",
+ "orv;": "\u2a5b",
+ "oscr;": "\u2134",
+ "oslash": "\xf8",
+ "oslash;": "\xf8",
+ "osol;": "\u2298",
+ "otilde": "\xf5",
+ "otilde;": "\xf5",
+ "otimes;": "\u2297",
+ "otimesas;": "\u2a36",
+ "ouml": "\xf6",
+ "ouml;": "\xf6",
+ "ovbar;": "\u233d",
+ "par;": "\u2225",
+ "para": "\xb6",
+ "para;": "\xb6",
+ "parallel;": "\u2225",
+ "parsim;": "\u2af3",
+ "parsl;": "\u2afd",
+ "part;": "\u2202",
+ "pcy;": "\u043f",
+ "percnt;": "%",
+ "period;": ".",
+ "permil;": "\u2030",
+ "perp;": "\u22a5",
+ "pertenk;": "\u2031",
+ "pfr;": "\U0001d52d",
+ "phi;": "\u03c6",
+ "phiv;": "\u03d5",
+ "phmmat;": "\u2133",
+ "phone;": "\u260e",
+ "pi;": "\u03c0",
+ "pitchfork;": "\u22d4",
+ "piv;": "\u03d6",
+ "planck;": "\u210f",
+ "planckh;": "\u210e",
+ "plankv;": "\u210f",
+ "plus;": "+",
+ "plusacir;": "\u2a23",
+ "plusb;": "\u229e",
+ "pluscir;": "\u2a22",
+ "plusdo;": "\u2214",
+ "plusdu;": "\u2a25",
+ "pluse;": "\u2a72",
+ "plusmn": "\xb1",
+ "plusmn;": "\xb1",
+ "plussim;": "\u2a26",
+ "plustwo;": "\u2a27",
+ "pm;": "\xb1",
+ "pointint;": "\u2a15",
+ "popf;": "\U0001d561",
+ "pound": "\xa3",
+ "pound;": "\xa3",
+ "pr;": "\u227a",
+ "prE;": "\u2ab3",
+ "prap;": "\u2ab7",
+ "prcue;": "\u227c",
+ "pre;": "\u2aaf",
+ "prec;": "\u227a",
+ "precapprox;": "\u2ab7",
+ "preccurlyeq;": "\u227c",
+ "preceq;": "\u2aaf",
+ "precnapprox;": "\u2ab9",
+ "precneqq;": "\u2ab5",
+ "precnsim;": "\u22e8",
+ "precsim;": "\u227e",
+ "prime;": "\u2032",
+ "primes;": "\u2119",
+ "prnE;": "\u2ab5",
+ "prnap;": "\u2ab9",
+ "prnsim;": "\u22e8",
+ "prod;": "\u220f",
+ "profalar;": "\u232e",
+ "profline;": "\u2312",
+ "profsurf;": "\u2313",
+ "prop;": "\u221d",
+ "propto;": "\u221d",
+ "prsim;": "\u227e",
+ "prurel;": "\u22b0",
+ "pscr;": "\U0001d4c5",
+ "psi;": "\u03c8",
+ "puncsp;": "\u2008",
+ "qfr;": "\U0001d52e",
+ "qint;": "\u2a0c",
+ "qopf;": "\U0001d562",
+ "qprime;": "\u2057",
+ "qscr;": "\U0001d4c6",
+ "quaternions;": "\u210d",
+ "quatint;": "\u2a16",
+ "quest;": "?",
+ "questeq;": "\u225f",
+ "quot": "\"",
+ "quot;": "\"",
+ "rAarr;": "\u21db",
+ "rArr;": "\u21d2",
+ "rAtail;": "\u291c",
+ "rBarr;": "\u290f",
+ "rHar;": "\u2964",
+ "race;": "\u223d\u0331",
+ "racute;": "\u0155",
+ "radic;": "\u221a",
+ "raemptyv;": "\u29b3",
+ "rang;": "\u27e9",
+ "rangd;": "\u2992",
+ "range;": "\u29a5",
+ "rangle;": "\u27e9",
+ "raquo": "\xbb",
+ "raquo;": "\xbb",
+ "rarr;": "\u2192",
+ "rarrap;": "\u2975",
+ "rarrb;": "\u21e5",
+ "rarrbfs;": "\u2920",
+ "rarrc;": "\u2933",
+ "rarrfs;": "\u291e",
+ "rarrhk;": "\u21aa",
+ "rarrlp;": "\u21ac",
+ "rarrpl;": "\u2945",
+ "rarrsim;": "\u2974",
+ "rarrtl;": "\u21a3",
+ "rarrw;": "\u219d",
+ "ratail;": "\u291a",
+ "ratio;": "\u2236",
+ "rationals;": "\u211a",
+ "rbarr;": "\u290d",
+ "rbbrk;": "\u2773",
+ "rbrace;": "}",
+ "rbrack;": "]",
+ "rbrke;": "\u298c",
+ "rbrksld;": "\u298e",
+ "rbrkslu;": "\u2990",
+ "rcaron;": "\u0159",
+ "rcedil;": "\u0157",
+ "rceil;": "\u2309",
+ "rcub;": "}",
+ "rcy;": "\u0440",
+ "rdca;": "\u2937",
+ "rdldhar;": "\u2969",
+ "rdquo;": "\u201d",
+ "rdquor;": "\u201d",
+ "rdsh;": "\u21b3",
+ "real;": "\u211c",
+ "realine;": "\u211b",
+ "realpart;": "\u211c",
+ "reals;": "\u211d",
+ "rect;": "\u25ad",
+ "reg": "\xae",
+ "reg;": "\xae",
+ "rfisht;": "\u297d",
+ "rfloor;": "\u230b",
+ "rfr;": "\U0001d52f",
+ "rhard;": "\u21c1",
+ "rharu;": "\u21c0",
+ "rharul;": "\u296c",
+ "rho;": "\u03c1",
+ "rhov;": "\u03f1",
+ "rightarrow;": "\u2192",
+ "rightarrowtail;": "\u21a3",
+ "rightharpoondown;": "\u21c1",
+ "rightharpoonup;": "\u21c0",
+ "rightleftarrows;": "\u21c4",
+ "rightleftharpoons;": "\u21cc",
+ "rightrightarrows;": "\u21c9",
+ "rightsquigarrow;": "\u219d",
+ "rightthreetimes;": "\u22cc",
+ "ring;": "\u02da",
+ "risingdotseq;": "\u2253",
+ "rlarr;": "\u21c4",
+ "rlhar;": "\u21cc",
+ "rlm;": "\u200f",
+ "rmoust;": "\u23b1",
+ "rmoustache;": "\u23b1",
+ "rnmid;": "\u2aee",
+ "roang;": "\u27ed",
+ "roarr;": "\u21fe",
+ "robrk;": "\u27e7",
+ "ropar;": "\u2986",
+ "ropf;": "\U0001d563",
+ "roplus;": "\u2a2e",
+ "rotimes;": "\u2a35",
+ "rpar;": ")",
+ "rpargt;": "\u2994",
+ "rppolint;": "\u2a12",
+ "rrarr;": "\u21c9",
+ "rsaquo;": "\u203a",
+ "rscr;": "\U0001d4c7",
+ "rsh;": "\u21b1",
+ "rsqb;": "]",
+ "rsquo;": "\u2019",
+ "rsquor;": "\u2019",
+ "rthree;": "\u22cc",
+ "rtimes;": "\u22ca",
+ "rtri;": "\u25b9",
+ "rtrie;": "\u22b5",
+ "rtrif;": "\u25b8",
+ "rtriltri;": "\u29ce",
+ "ruluhar;": "\u2968",
+ "rx;": "\u211e",
+ "sacute;": "\u015b",
+ "sbquo;": "\u201a",
+ "sc;": "\u227b",
+ "scE;": "\u2ab4",
+ "scap;": "\u2ab8",
+ "scaron;": "\u0161",
+ "sccue;": "\u227d",
+ "sce;": "\u2ab0",
+ "scedil;": "\u015f",
+ "scirc;": "\u015d",
+ "scnE;": "\u2ab6",
+ "scnap;": "\u2aba",
+ "scnsim;": "\u22e9",
+ "scpolint;": "\u2a13",
+ "scsim;": "\u227f",
+ "scy;": "\u0441",
+ "sdot;": "\u22c5",
+ "sdotb;": "\u22a1",
+ "sdote;": "\u2a66",
+ "seArr;": "\u21d8",
+ "searhk;": "\u2925",
+ "searr;": "\u2198",
+ "searrow;": "\u2198",
+ "sect": "\xa7",
+ "sect;": "\xa7",
+ "semi;": ";",
+ "seswar;": "\u2929",
+ "setminus;": "\u2216",
+ "setmn;": "\u2216",
+ "sext;": "\u2736",
+ "sfr;": "\U0001d530",
+ "sfrown;": "\u2322",
+ "sharp;": "\u266f",
+ "shchcy;": "\u0449",
+ "shcy;": "\u0448",
+ "shortmid;": "\u2223",
+ "shortparallel;": "\u2225",
+ "shy": "\xad",
+ "shy;": "\xad",
+ "sigma;": "\u03c3",
+ "sigmaf;": "\u03c2",
+ "sigmav;": "\u03c2",
+ "sim;": "\u223c",
+ "simdot;": "\u2a6a",
+ "sime;": "\u2243",
+ "simeq;": "\u2243",
+ "simg;": "\u2a9e",
+ "simgE;": "\u2aa0",
+ "siml;": "\u2a9d",
+ "simlE;": "\u2a9f",
+ "simne;": "\u2246",
+ "simplus;": "\u2a24",
+ "simrarr;": "\u2972",
+ "slarr;": "\u2190",
+ "smallsetminus;": "\u2216",
+ "smashp;": "\u2a33",
+ "smeparsl;": "\u29e4",
+ "smid;": "\u2223",
+ "smile;": "\u2323",
+ "smt;": "\u2aaa",
+ "smte;": "\u2aac",
+ "smtes;": "\u2aac\ufe00",
+ "softcy;": "\u044c",
+ "sol;": "/",
+ "solb;": "\u29c4",
+ "solbar;": "\u233f",
+ "sopf;": "\U0001d564",
+ "spades;": "\u2660",
+ "spadesuit;": "\u2660",
+ "spar;": "\u2225",
+ "sqcap;": "\u2293",
+ "sqcaps;": "\u2293\ufe00",
+ "sqcup;": "\u2294",
+ "sqcups;": "\u2294\ufe00",
+ "sqsub;": "\u228f",
+ "sqsube;": "\u2291",
+ "sqsubset;": "\u228f",
+ "sqsubseteq;": "\u2291",
+ "sqsup;": "\u2290",
+ "sqsupe;": "\u2292",
+ "sqsupset;": "\u2290",
+ "sqsupseteq;": "\u2292",
+ "squ;": "\u25a1",
+ "square;": "\u25a1",
+ "squarf;": "\u25aa",
+ "squf;": "\u25aa",
+ "srarr;": "\u2192",
+ "sscr;": "\U0001d4c8",
+ "ssetmn;": "\u2216",
+ "ssmile;": "\u2323",
+ "sstarf;": "\u22c6",
+ "star;": "\u2606",
+ "starf;": "\u2605",
+ "straightepsilon;": "\u03f5",
+ "straightphi;": "\u03d5",
+ "strns;": "\xaf",
+ "sub;": "\u2282",
+ "subE;": "\u2ac5",
+ "subdot;": "\u2abd",
+ "sube;": "\u2286",
+ "subedot;": "\u2ac3",
+ "submult;": "\u2ac1",
+ "subnE;": "\u2acb",
+ "subne;": "\u228a",
+ "subplus;": "\u2abf",
+ "subrarr;": "\u2979",
+ "subset;": "\u2282",
+ "subseteq;": "\u2286",
+ "subseteqq;": "\u2ac5",
+ "subsetneq;": "\u228a",
+ "subsetneqq;": "\u2acb",
+ "subsim;": "\u2ac7",
+ "subsub;": "\u2ad5",
+ "subsup;": "\u2ad3",
+ "succ;": "\u227b",
+ "succapprox;": "\u2ab8",
+ "succcurlyeq;": "\u227d",
+ "succeq;": "\u2ab0",
+ "succnapprox;": "\u2aba",
+ "succneqq;": "\u2ab6",
+ "succnsim;": "\u22e9",
+ "succsim;": "\u227f",
+ "sum;": "\u2211",
+ "sung;": "\u266a",
+ "sup1": "\xb9",
+ "sup1;": "\xb9",
+ "sup2": "\xb2",
+ "sup2;": "\xb2",
+ "sup3": "\xb3",
+ "sup3;": "\xb3",
+ "sup;": "\u2283",
+ "supE;": "\u2ac6",
+ "supdot;": "\u2abe",
+ "supdsub;": "\u2ad8",
+ "supe;": "\u2287",
+ "supedot;": "\u2ac4",
+ "suphsol;": "\u27c9",
+ "suphsub;": "\u2ad7",
+ "suplarr;": "\u297b",
+ "supmult;": "\u2ac2",
+ "supnE;": "\u2acc",
+ "supne;": "\u228b",
+ "supplus;": "\u2ac0",
+ "supset;": "\u2283",
+ "supseteq;": "\u2287",
+ "supseteqq;": "\u2ac6",
+ "supsetneq;": "\u228b",
+ "supsetneqq;": "\u2acc",
+ "supsim;": "\u2ac8",
+ "supsub;": "\u2ad4",
+ "supsup;": "\u2ad6",
+ "swArr;": "\u21d9",
+ "swarhk;": "\u2926",
+ "swarr;": "\u2199",
+ "swarrow;": "\u2199",
+ "swnwar;": "\u292a",
+ "szlig": "\xdf",
+ "szlig;": "\xdf",
+ "target;": "\u2316",
+ "tau;": "\u03c4",
+ "tbrk;": "\u23b4",
+ "tcaron;": "\u0165",
+ "tcedil;": "\u0163",
+ "tcy;": "\u0442",
+ "tdot;": "\u20db",
+ "telrec;": "\u2315",
+ "tfr;": "\U0001d531",
+ "there4;": "\u2234",
+ "therefore;": "\u2234",
+ "theta;": "\u03b8",
+ "thetasym;": "\u03d1",
+ "thetav;": "\u03d1",
+ "thickapprox;": "\u2248",
+ "thicksim;": "\u223c",
+ "thinsp;": "\u2009",
+ "thkap;": "\u2248",
+ "thksim;": "\u223c",
+ "thorn": "\xfe",
+ "thorn;": "\xfe",
+ "tilde;": "\u02dc",
+ "times": "\xd7",
+ "times;": "\xd7",
+ "timesb;": "\u22a0",
+ "timesbar;": "\u2a31",
+ "timesd;": "\u2a30",
+ "tint;": "\u222d",
+ "toea;": "\u2928",
+ "top;": "\u22a4",
+ "topbot;": "\u2336",
+ "topcir;": "\u2af1",
+ "topf;": "\U0001d565",
+ "topfork;": "\u2ada",
+ "tosa;": "\u2929",
+ "tprime;": "\u2034",
+ "trade;": "\u2122",
+ "triangle;": "\u25b5",
+ "triangledown;": "\u25bf",
+ "triangleleft;": "\u25c3",
+ "trianglelefteq;": "\u22b4",
+ "triangleq;": "\u225c",
+ "triangleright;": "\u25b9",
+ "trianglerighteq;": "\u22b5",
+ "tridot;": "\u25ec",
+ "trie;": "\u225c",
+ "triminus;": "\u2a3a",
+ "triplus;": "\u2a39",
+ "trisb;": "\u29cd",
+ "tritime;": "\u2a3b",
+ "trpezium;": "\u23e2",
+ "tscr;": "\U0001d4c9",
+ "tscy;": "\u0446",
+ "tshcy;": "\u045b",
+ "tstrok;": "\u0167",
+ "twixt;": "\u226c",
+ "twoheadleftarrow;": "\u219e",
+ "twoheadrightarrow;": "\u21a0",
+ "uArr;": "\u21d1",
+ "uHar;": "\u2963",
+ "uacute": "\xfa",
+ "uacute;": "\xfa",
+ "uarr;": "\u2191",
+ "ubrcy;": "\u045e",
+ "ubreve;": "\u016d",
+ "ucirc": "\xfb",
+ "ucirc;": "\xfb",
+ "ucy;": "\u0443",
+ "udarr;": "\u21c5",
+ "udblac;": "\u0171",
+ "udhar;": "\u296e",
+ "ufisht;": "\u297e",
+ "ufr;": "\U0001d532",
+ "ugrave": "\xf9",
+ "ugrave;": "\xf9",
+ "uharl;": "\u21bf",
+ "uharr;": "\u21be",
+ "uhblk;": "\u2580",
+ "ulcorn;": "\u231c",
+ "ulcorner;": "\u231c",
+ "ulcrop;": "\u230f",
+ "ultri;": "\u25f8",
+ "umacr;": "\u016b",
+ "uml": "\xa8",
+ "uml;": "\xa8",
+ "uogon;": "\u0173",
+ "uopf;": "\U0001d566",
+ "uparrow;": "\u2191",
+ "updownarrow;": "\u2195",
+ "upharpoonleft;": "\u21bf",
+ "upharpoonright;": "\u21be",
+ "uplus;": "\u228e",
+ "upsi;": "\u03c5",
+ "upsih;": "\u03d2",
+ "upsilon;": "\u03c5",
+ "upuparrows;": "\u21c8",
+ "urcorn;": "\u231d",
+ "urcorner;": "\u231d",
+ "urcrop;": "\u230e",
+ "uring;": "\u016f",
+ "urtri;": "\u25f9",
+ "uscr;": "\U0001d4ca",
+ "utdot;": "\u22f0",
+ "utilde;": "\u0169",
+ "utri;": "\u25b5",
+ "utrif;": "\u25b4",
+ "uuarr;": "\u21c8",
+ "uuml": "\xfc",
+ "uuml;": "\xfc",
+ "uwangle;": "\u29a7",
+ "vArr;": "\u21d5",
+ "vBar;": "\u2ae8",
+ "vBarv;": "\u2ae9",
+ "vDash;": "\u22a8",
+ "vangrt;": "\u299c",
+ "varepsilon;": "\u03f5",
+ "varkappa;": "\u03f0",
+ "varnothing;": "\u2205",
+ "varphi;": "\u03d5",
+ "varpi;": "\u03d6",
+ "varpropto;": "\u221d",
+ "varr;": "\u2195",
+ "varrho;": "\u03f1",
+ "varsigma;": "\u03c2",
+ "varsubsetneq;": "\u228a\ufe00",
+ "varsubsetneqq;": "\u2acb\ufe00",
+ "varsupsetneq;": "\u228b\ufe00",
+ "varsupsetneqq;": "\u2acc\ufe00",
+ "vartheta;": "\u03d1",
+ "vartriangleleft;": "\u22b2",
+ "vartriangleright;": "\u22b3",
+ "vcy;": "\u0432",
+ "vdash;": "\u22a2",
+ "vee;": "\u2228",
+ "veebar;": "\u22bb",
+ "veeeq;": "\u225a",
+ "vellip;": "\u22ee",
+ "verbar;": "|",
+ "vert;": "|",
+ "vfr;": "\U0001d533",
+ "vltri;": "\u22b2",
+ "vnsub;": "\u2282\u20d2",
+ "vnsup;": "\u2283\u20d2",
+ "vopf;": "\U0001d567",
+ "vprop;": "\u221d",
+ "vrtri;": "\u22b3",
+ "vscr;": "\U0001d4cb",
+ "vsubnE;": "\u2acb\ufe00",
+ "vsubne;": "\u228a\ufe00",
+ "vsupnE;": "\u2acc\ufe00",
+ "vsupne;": "\u228b\ufe00",
+ "vzigzag;": "\u299a",
+ "wcirc;": "\u0175",
+ "wedbar;": "\u2a5f",
+ "wedge;": "\u2227",
+ "wedgeq;": "\u2259",
+ "weierp;": "\u2118",
+ "wfr;": "\U0001d534",
+ "wopf;": "\U0001d568",
+ "wp;": "\u2118",
+ "wr;": "\u2240",
+ "wreath;": "\u2240",
+ "wscr;": "\U0001d4cc",
+ "xcap;": "\u22c2",
+ "xcirc;": "\u25ef",
+ "xcup;": "\u22c3",
+ "xdtri;": "\u25bd",
+ "xfr;": "\U0001d535",
+ "xhArr;": "\u27fa",
+ "xharr;": "\u27f7",
+ "xi;": "\u03be",
+ "xlArr;": "\u27f8",
+ "xlarr;": "\u27f5",
+ "xmap;": "\u27fc",
+ "xnis;": "\u22fb",
+ "xodot;": "\u2a00",
+ "xopf;": "\U0001d569",
+ "xoplus;": "\u2a01",
+ "xotime;": "\u2a02",
+ "xrArr;": "\u27f9",
+ "xrarr;": "\u27f6",
+ "xscr;": "\U0001d4cd",
+ "xsqcup;": "\u2a06",
+ "xuplus;": "\u2a04",
+ "xutri;": "\u25b3",
+ "xvee;": "\u22c1",
+ "xwedge;": "\u22c0",
+ "yacute": "\xfd",
+ "yacute;": "\xfd",
+ "yacy;": "\u044f",
+ "ycirc;": "\u0177",
+ "ycy;": "\u044b",
+ "yen": "\xa5",
+ "yen;": "\xa5",
+ "yfr;": "\U0001d536",
+ "yicy;": "\u0457",
+ "yopf;": "\U0001d56a",
+ "yscr;": "\U0001d4ce",
+ "yucy;": "\u044e",
+ "yuml": "\xff",
+ "yuml;": "\xff",
+ "zacute;": "\u017a",
+ "zcaron;": "\u017e",
+ "zcy;": "\u0437",
+ "zdot;": "\u017c",
+ "zeetrf;": "\u2128",
+ "zeta;": "\u03b6",
+ "zfr;": "\U0001d537",
+ "zhcy;": "\u0436",
+ "zigrarr;": "\u21dd",
+ "zopf;": "\U0001d56b",
+ "zscr;": "\U0001d4cf",
+ "zwj;": "\u200d",
+ "zwnj;": "\u200c",
+}
+
+replacementCharacters = {
+ 0x0: "\uFFFD",
+ 0x0d: "\u000D",
+ 0x80: "\u20AC",
+ 0x81: "\u0081",
+ 0x82: "\u201A",
+ 0x83: "\u0192",
+ 0x84: "\u201E",
+ 0x85: "\u2026",
+ 0x86: "\u2020",
+ 0x87: "\u2021",
+ 0x88: "\u02C6",
+ 0x89: "\u2030",
+ 0x8A: "\u0160",
+ 0x8B: "\u2039",
+ 0x8C: "\u0152",
+ 0x8D: "\u008D",
+ 0x8E: "\u017D",
+ 0x8F: "\u008F",
+ 0x90: "\u0090",
+ 0x91: "\u2018",
+ 0x92: "\u2019",
+ 0x93: "\u201C",
+ 0x94: "\u201D",
+ 0x95: "\u2022",
+ 0x96: "\u2013",
+ 0x97: "\u2014",
+ 0x98: "\u02DC",
+ 0x99: "\u2122",
+ 0x9A: "\u0161",
+ 0x9B: "\u203A",
+ 0x9C: "\u0153",
+ 0x9D: "\u009D",
+ 0x9E: "\u017E",
+ 0x9F: "\u0178",
+}
+
+tokenTypes = {
+ "Doctype": 0,
+ "Characters": 1,
+ "SpaceCharacters": 2,
+ "StartTag": 3,
+ "EndTag": 4,
+ "EmptyTag": 5,
+ "Comment": 6,
+ "ParseError": 7
+}
+
+tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"],
+ tokenTypes["EmptyTag"]])
+
+
+prefixes = {v: k for k, v in namespaces.items()}
+prefixes["http://www.w3.org/1998/Math/MathML"] = "math"
+
+
+class DataLossWarning(UserWarning):
+ """Raised when the current tree is unable to represent the input data"""
+ pass
+
+
+class _ReparseException(Exception):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/__init__.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/alphabeticalattributes.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/alphabeticalattributes.py
new file mode 100644
index 0000000000..5ba926e3b0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/alphabeticalattributes.py
@@ -0,0 +1,29 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from . import base
+
+from collections import OrderedDict
+
+
+def _attr_key(attr):
+ """Return an appropriate key for an attribute for sorting
+
+ Attributes have a namespace that can be either ``None`` or a string. We
+ can't compare the two because they're different types, so we convert
+ ``None`` to an empty string first.
+
+ """
+ return (attr[0][0] or ''), attr[0][1]
+
+
+class Filter(base.Filter):
+ """Alphabetizes attributes for elements"""
+ def __iter__(self):
+ for token in base.Filter.__iter__(self):
+ if token["type"] in ("StartTag", "EmptyTag"):
+ attrs = OrderedDict()
+ for name, value in sorted(token["data"].items(),
+ key=_attr_key):
+ attrs[name] = value
+ token["data"] = attrs
+ yield token
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/base.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/base.py
new file mode 100644
index 0000000000..c7dbaed0fa
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/base.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import, division, unicode_literals
+
+
+class Filter(object):
+ def __init__(self, source):
+ self.source = source
+
+ def __iter__(self):
+ return iter(self.source)
+
+ def __getattr__(self, name):
+ return getattr(self.source, name)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/inject_meta_charset.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/inject_meta_charset.py
new file mode 100644
index 0000000000..aefb5c842c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/inject_meta_charset.py
@@ -0,0 +1,73 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from . import base
+
+
+class Filter(base.Filter):
+ """Injects ``<meta charset=ENCODING>`` tag into head of document"""
+ def __init__(self, source, encoding):
+ """Creates a Filter
+
+ :arg source: the source token stream
+
+ :arg encoding: the encoding to set
+
+ """
+ base.Filter.__init__(self, source)
+ self.encoding = encoding
+
+ def __iter__(self):
+ state = "pre_head"
+ meta_found = (self.encoding is None)
+ pending = []
+
+ for token in base.Filter.__iter__(self):
+ type = token["type"]
+ if type == "StartTag":
+ if token["name"].lower() == "head":
+ state = "in_head"
+
+ elif type == "EmptyTag":
+ if token["name"].lower() == "meta":
+ # replace charset with actual encoding
+ has_http_equiv_content_type = False
+ for (namespace, name), value in token["data"].items():
+ if namespace is not None:
+ continue
+ elif name.lower() == 'charset':
+ token["data"][(namespace, name)] = self.encoding
+ meta_found = True
+ break
+ elif name == 'http-equiv' and value.lower() == 'content-type':
+ has_http_equiv_content_type = True
+ else:
+ if has_http_equiv_content_type and (None, "content") in token["data"]:
+ token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding
+ meta_found = True
+
+ elif token["name"].lower() == "head" and not meta_found:
+ # insert meta into empty head
+ yield {"type": "StartTag", "name": "head",
+ "data": token["data"]}
+ yield {"type": "EmptyTag", "name": "meta",
+ "data": {(None, "charset"): self.encoding}}
+ yield {"type": "EndTag", "name": "head"}
+ meta_found = True
+ continue
+
+ elif type == "EndTag":
+ if token["name"].lower() == "head" and pending:
+ # insert meta into head (if necessary) and flush pending queue
+ yield pending.pop(0)
+ if not meta_found:
+ yield {"type": "EmptyTag", "name": "meta",
+ "data": {(None, "charset"): self.encoding}}
+ while pending:
+ yield pending.pop(0)
+ meta_found = True
+ state = "post_head"
+
+ if state == "in_head":
+ pending.append(token)
+ else:
+ yield token
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/lint.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/lint.py
new file mode 100644
index 0000000000..acd4d7a2af
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/lint.py
@@ -0,0 +1,93 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from six import text_type
+
+from . import base
+from ..constants import namespaces, voidElements
+
+from ..constants import spaceCharacters
+spaceCharacters = "".join(spaceCharacters)
+
+
+class Filter(base.Filter):
+ """Lints the token stream for errors
+
+ If it finds any errors, it'll raise an ``AssertionError``.
+
+ """
+ def __init__(self, source, require_matching_tags=True):
+ """Creates a Filter
+
+ :arg source: the source token stream
+
+ :arg require_matching_tags: whether or not to require matching tags
+
+ """
+ super(Filter, self).__init__(source)
+ self.require_matching_tags = require_matching_tags
+
+ def __iter__(self):
+ open_elements = []
+ for token in base.Filter.__iter__(self):
+ type = token["type"]
+ if type in ("StartTag", "EmptyTag"):
+ namespace = token["namespace"]
+ name = token["name"]
+ assert namespace is None or isinstance(namespace, text_type)
+ assert namespace != ""
+ assert isinstance(name, text_type)
+ assert name != ""
+ assert isinstance(token["data"], dict)
+ if (not namespace or namespace == namespaces["html"]) and name in voidElements:
+ assert type == "EmptyTag"
+ else:
+ assert type == "StartTag"
+ if type == "StartTag" and self.require_matching_tags:
+ open_elements.append((namespace, name))
+ for (namespace, name), value in token["data"].items():
+ assert namespace is None or isinstance(namespace, text_type)
+ assert namespace != ""
+ assert isinstance(name, text_type)
+ assert name != ""
+ assert isinstance(value, text_type)
+
+ elif type == "EndTag":
+ namespace = token["namespace"]
+ name = token["name"]
+ assert namespace is None or isinstance(namespace, text_type)
+ assert namespace != ""
+ assert isinstance(name, text_type)
+ assert name != ""
+ if (not namespace or namespace == namespaces["html"]) and name in voidElements:
+ assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name}
+ elif self.require_matching_tags:
+ start = open_elements.pop()
+ assert start == (namespace, name)
+
+ elif type == "Comment":
+ data = token["data"]
+ assert isinstance(data, text_type)
+
+ elif type in ("Characters", "SpaceCharacters"):
+ data = token["data"]
+ assert isinstance(data, text_type)
+ assert data != ""
+ if type == "SpaceCharacters":
+ assert data.strip(spaceCharacters) == ""
+
+ elif type == "Doctype":
+ name = token["name"]
+ assert name is None or isinstance(name, text_type)
+ assert token["publicId"] is None or isinstance(name, text_type)
+ assert token["systemId"] is None or isinstance(name, text_type)
+
+ elif type == "Entity":
+ assert isinstance(token["name"], text_type)
+
+ elif type == "SerializerError":
+ assert isinstance(token["data"], text_type)
+
+ else:
+ assert False, "Unknown token type: %(type)s" % {"type": type}
+
+ yield token
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/optionaltags.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/optionaltags.py
new file mode 100644
index 0000000000..4a865012c1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/optionaltags.py
@@ -0,0 +1,207 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from . import base
+
+
+class Filter(base.Filter):
+ """Removes optional tags from the token stream"""
+ def slider(self):
+ previous1 = previous2 = None
+ for token in self.source:
+ if previous1 is not None:
+ yield previous2, previous1, token
+ previous2 = previous1
+ previous1 = token
+ if previous1 is not None:
+ yield previous2, previous1, None
+
+ def __iter__(self):
+ for previous, token, next in self.slider():
+ type = token["type"]
+ if type == "StartTag":
+ if (token["data"] or
+ not self.is_optional_start(token["name"], previous, next)):
+ yield token
+ elif type == "EndTag":
+ if not self.is_optional_end(token["name"], next):
+ yield token
+ else:
+ yield token
+
+ def is_optional_start(self, tagname, previous, next):
+ type = next and next["type"] or None
+ if tagname in 'html':
+ # An html element's start tag may be omitted if the first thing
+ # inside the html element is not a space character or a comment.
+ return type not in ("Comment", "SpaceCharacters")
+ elif tagname == 'head':
+ # A head element's start tag may be omitted if the first thing
+ # inside the head element is an element.
+ # XXX: we also omit the start tag if the head element is empty
+ if type in ("StartTag", "EmptyTag"):
+ return True
+ elif type == "EndTag":
+ return next["name"] == "head"
+ elif tagname == 'body':
+ # A body element's start tag may be omitted if the first thing
+ # inside the body element is not a space character or a comment,
+ # except if the first thing inside the body element is a script
+ # or style element and the node immediately preceding the body
+ # element is a head element whose end tag has been omitted.
+ if type in ("Comment", "SpaceCharacters"):
+ return False
+ elif type == "StartTag":
+ # XXX: we do not look at the preceding event, so we never omit
+ # the body element's start tag if it's followed by a script or
+ # a style element.
+ return next["name"] not in ('script', 'style')
+ else:
+ return True
+ elif tagname == 'colgroup':
+ # A colgroup element's start tag may be omitted if the first thing
+ # inside the colgroup element is a col element, and if the element
+ # is not immediately preceded by another colgroup element whose
+ # end tag has been omitted.
+ if type in ("StartTag", "EmptyTag"):
+ # XXX: we do not look at the preceding event, so instead we never
+ # omit the colgroup element's end tag when it is immediately
+ # followed by another colgroup element. See is_optional_end.
+ return next["name"] == "col"
+ else:
+ return False
+ elif tagname == 'tbody':
+ # A tbody element's start tag may be omitted if the first thing
+ # inside the tbody element is a tr element, and if the element is
+ # not immediately preceded by a tbody, thead, or tfoot element
+ # whose end tag has been omitted.
+ if type == "StartTag":
+ # omit the thead and tfoot elements' end tag when they are
+ # immediately followed by a tbody element. See is_optional_end.
+ if previous and previous['type'] == 'EndTag' and \
+ previous['name'] in ('tbody', 'thead', 'tfoot'):
+ return False
+ return next["name"] == 'tr'
+ else:
+ return False
+ return False
+
+ def is_optional_end(self, tagname, next):
+ type = next and next["type"] or None
+ if tagname in ('html', 'head', 'body'):
+ # An html element's end tag may be omitted if the html element
+ # is not immediately followed by a space character or a comment.
+ return type not in ("Comment", "SpaceCharacters")
+ elif tagname in ('li', 'optgroup', 'tr'):
+ # A li element's end tag may be omitted if the li element is
+ # immediately followed by another li element or if there is
+ # no more content in the parent element.
+ # An optgroup element's end tag may be omitted if the optgroup
+ # element is immediately followed by another optgroup element,
+ # or if there is no more content in the parent element.
+ # A tr element's end tag may be omitted if the tr element is
+ # immediately followed by another tr element, or if there is
+ # no more content in the parent element.
+ if type == "StartTag":
+ return next["name"] == tagname
+ else:
+ return type == "EndTag" or type is None
+ elif tagname in ('dt', 'dd'):
+ # A dt element's end tag may be omitted if the dt element is
+ # immediately followed by another dt element or a dd element.
+ # A dd element's end tag may be omitted if the dd element is
+ # immediately followed by another dd element or a dt element,
+ # or if there is no more content in the parent element.
+ if type == "StartTag":
+ return next["name"] in ('dt', 'dd')
+ elif tagname == 'dd':
+ return type == "EndTag" or type is None
+ else:
+ return False
+ elif tagname == 'p':
+ # A p element's end tag may be omitted if the p element is
+ # immediately followed by an address, article, aside,
+ # blockquote, datagrid, dialog, dir, div, dl, fieldset,
+ # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu,
+ # nav, ol, p, pre, section, table, or ul, element, or if
+ # there is no more content in the parent element.
+ if type in ("StartTag", "EmptyTag"):
+ return next["name"] in ('address', 'article', 'aside',
+ 'blockquote', 'datagrid', 'dialog',
+ 'dir', 'div', 'dl', 'fieldset', 'footer',
+ 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
+ 'header', 'hr', 'menu', 'nav', 'ol',
+ 'p', 'pre', 'section', 'table', 'ul')
+ else:
+ return type == "EndTag" or type is None
+ elif tagname == 'option':
+ # An option element's end tag may be omitted if the option
+ # element is immediately followed by another option element,
+ # or if it is immediately followed by an <code>optgroup</code>
+ # element, or if there is no more content in the parent
+ # element.
+ if type == "StartTag":
+ return next["name"] in ('option', 'optgroup')
+ else:
+ return type == "EndTag" or type is None
+ elif tagname in ('rt', 'rp'):
+ # An rt element's end tag may be omitted if the rt element is
+ # immediately followed by an rt or rp element, or if there is
+ # no more content in the parent element.
+ # An rp element's end tag may be omitted if the rp element is
+ # immediately followed by an rt or rp element, or if there is
+ # no more content in the parent element.
+ if type == "StartTag":
+ return next["name"] in ('rt', 'rp')
+ else:
+ return type == "EndTag" or type is None
+ elif tagname == 'colgroup':
+ # A colgroup element's end tag may be omitted if the colgroup
+ # element is not immediately followed by a space character or
+ # a comment.
+ if type in ("Comment", "SpaceCharacters"):
+ return False
+ elif type == "StartTag":
+ # XXX: we also look for an immediately following colgroup
+ # element. See is_optional_start.
+ return next["name"] != 'colgroup'
+ else:
+ return True
+ elif tagname in ('thead', 'tbody'):
+ # A thead element's end tag may be omitted if the thead element
+ # is immediately followed by a tbody or tfoot element.
+ # A tbody element's end tag may be omitted if the tbody element
+ # is immediately followed by a tbody or tfoot element, or if
+ # there is no more content in the parent element.
+ # A tfoot element's end tag may be omitted if the tfoot element
+ # is immediately followed by a tbody element, or if there is no
+ # more content in the parent element.
+ # XXX: we never omit the end tag when the following element is
+ # a tbody. See is_optional_start.
+ if type == "StartTag":
+ return next["name"] in ['tbody', 'tfoot']
+ elif tagname == 'tbody':
+ return type == "EndTag" or type is None
+ else:
+ return False
+ elif tagname == 'tfoot':
+ # A tfoot element's end tag may be omitted if the tfoot element
+ # is immediately followed by a tbody element, or if there is no
+ # more content in the parent element.
+ # XXX: we never omit the end tag when the following element is
+ # a tbody. See is_optional_start.
+ if type == "StartTag":
+ return next["name"] == 'tbody'
+ else:
+ return type == "EndTag" or type is None
+ elif tagname in ('td', 'th'):
+ # A td element's end tag may be omitted if the td element is
+ # immediately followed by a td or th element, or if there is
+ # no more content in the parent element.
+ # A th element's end tag may be omitted if the th element is
+ # immediately followed by a td or th element, or if there is
+ # no more content in the parent element.
+ if type == "StartTag":
+ return next["name"] in ('td', 'th')
+ else:
+ return type == "EndTag" or type is None
+ return False
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/sanitizer.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/sanitizer.py
new file mode 100644
index 0000000000..70ef90665e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/sanitizer.py
@@ -0,0 +1,916 @@
+"""Deprecated from html5lib 1.1.
+
+See `here <https://github.com/html5lib/html5lib-python/issues/443>`_ for
+information about its deprecation; `Bleach <https://github.com/mozilla/bleach>`_
+is recommended as a replacement. Please let us know in the aforementioned issue
+if Bleach is unsuitable for your needs.
+
+"""
+from __future__ import absolute_import, division, unicode_literals
+
+import re
+import warnings
+from xml.sax.saxutils import escape, unescape
+
+from six.moves import urllib_parse as urlparse
+
+from . import base
+from ..constants import namespaces, prefixes
+
+__all__ = ["Filter"]
+
+
+_deprecation_msg = (
+ "html5lib's sanitizer is deprecated; see " +
+ "https://github.com/html5lib/html5lib-python/issues/443 and please let " +
+ "us know if Bleach is unsuitable for your needs"
+)
+
+warnings.warn(_deprecation_msg, DeprecationWarning)
+
+allowed_elements = frozenset((
+ (namespaces['html'], 'a'),
+ (namespaces['html'], 'abbr'),
+ (namespaces['html'], 'acronym'),
+ (namespaces['html'], 'address'),
+ (namespaces['html'], 'area'),
+ (namespaces['html'], 'article'),
+ (namespaces['html'], 'aside'),
+ (namespaces['html'], 'audio'),
+ (namespaces['html'], 'b'),
+ (namespaces['html'], 'big'),
+ (namespaces['html'], 'blockquote'),
+ (namespaces['html'], 'br'),
+ (namespaces['html'], 'button'),
+ (namespaces['html'], 'canvas'),
+ (namespaces['html'], 'caption'),
+ (namespaces['html'], 'center'),
+ (namespaces['html'], 'cite'),
+ (namespaces['html'], 'code'),
+ (namespaces['html'], 'col'),
+ (namespaces['html'], 'colgroup'),
+ (namespaces['html'], 'command'),
+ (namespaces['html'], 'datagrid'),
+ (namespaces['html'], 'datalist'),
+ (namespaces['html'], 'dd'),
+ (namespaces['html'], 'del'),
+ (namespaces['html'], 'details'),
+ (namespaces['html'], 'dfn'),
+ (namespaces['html'], 'dialog'),
+ (namespaces['html'], 'dir'),
+ (namespaces['html'], 'div'),
+ (namespaces['html'], 'dl'),
+ (namespaces['html'], 'dt'),
+ (namespaces['html'], 'em'),
+ (namespaces['html'], 'event-source'),
+ (namespaces['html'], 'fieldset'),
+ (namespaces['html'], 'figcaption'),
+ (namespaces['html'], 'figure'),
+ (namespaces['html'], 'footer'),
+ (namespaces['html'], 'font'),
+ (namespaces['html'], 'form'),
+ (namespaces['html'], 'header'),
+ (namespaces['html'], 'h1'),
+ (namespaces['html'], 'h2'),
+ (namespaces['html'], 'h3'),
+ (namespaces['html'], 'h4'),
+ (namespaces['html'], 'h5'),
+ (namespaces['html'], 'h6'),
+ (namespaces['html'], 'hr'),
+ (namespaces['html'], 'i'),
+ (namespaces['html'], 'img'),
+ (namespaces['html'], 'input'),
+ (namespaces['html'], 'ins'),
+ (namespaces['html'], 'keygen'),
+ (namespaces['html'], 'kbd'),
+ (namespaces['html'], 'label'),
+ (namespaces['html'], 'legend'),
+ (namespaces['html'], 'li'),
+ (namespaces['html'], 'm'),
+ (namespaces['html'], 'map'),
+ (namespaces['html'], 'menu'),
+ (namespaces['html'], 'meter'),
+ (namespaces['html'], 'multicol'),
+ (namespaces['html'], 'nav'),
+ (namespaces['html'], 'nextid'),
+ (namespaces['html'], 'ol'),
+ (namespaces['html'], 'output'),
+ (namespaces['html'], 'optgroup'),
+ (namespaces['html'], 'option'),
+ (namespaces['html'], 'p'),
+ (namespaces['html'], 'pre'),
+ (namespaces['html'], 'progress'),
+ (namespaces['html'], 'q'),
+ (namespaces['html'], 's'),
+ (namespaces['html'], 'samp'),
+ (namespaces['html'], 'section'),
+ (namespaces['html'], 'select'),
+ (namespaces['html'], 'small'),
+ (namespaces['html'], 'sound'),
+ (namespaces['html'], 'source'),
+ (namespaces['html'], 'spacer'),
+ (namespaces['html'], 'span'),
+ (namespaces['html'], 'strike'),
+ (namespaces['html'], 'strong'),
+ (namespaces['html'], 'sub'),
+ (namespaces['html'], 'sup'),
+ (namespaces['html'], 'table'),
+ (namespaces['html'], 'tbody'),
+ (namespaces['html'], 'td'),
+ (namespaces['html'], 'textarea'),
+ (namespaces['html'], 'time'),
+ (namespaces['html'], 'tfoot'),
+ (namespaces['html'], 'th'),
+ (namespaces['html'], 'thead'),
+ (namespaces['html'], 'tr'),
+ (namespaces['html'], 'tt'),
+ (namespaces['html'], 'u'),
+ (namespaces['html'], 'ul'),
+ (namespaces['html'], 'var'),
+ (namespaces['html'], 'video'),
+ (namespaces['mathml'], 'maction'),
+ (namespaces['mathml'], 'math'),
+ (namespaces['mathml'], 'merror'),
+ (namespaces['mathml'], 'mfrac'),
+ (namespaces['mathml'], 'mi'),
+ (namespaces['mathml'], 'mmultiscripts'),
+ (namespaces['mathml'], 'mn'),
+ (namespaces['mathml'], 'mo'),
+ (namespaces['mathml'], 'mover'),
+ (namespaces['mathml'], 'mpadded'),
+ (namespaces['mathml'], 'mphantom'),
+ (namespaces['mathml'], 'mprescripts'),
+ (namespaces['mathml'], 'mroot'),
+ (namespaces['mathml'], 'mrow'),
+ (namespaces['mathml'], 'mspace'),
+ (namespaces['mathml'], 'msqrt'),
+ (namespaces['mathml'], 'mstyle'),
+ (namespaces['mathml'], 'msub'),
+ (namespaces['mathml'], 'msubsup'),
+ (namespaces['mathml'], 'msup'),
+ (namespaces['mathml'], 'mtable'),
+ (namespaces['mathml'], 'mtd'),
+ (namespaces['mathml'], 'mtext'),
+ (namespaces['mathml'], 'mtr'),
+ (namespaces['mathml'], 'munder'),
+ (namespaces['mathml'], 'munderover'),
+ (namespaces['mathml'], 'none'),
+ (namespaces['svg'], 'a'),
+ (namespaces['svg'], 'animate'),
+ (namespaces['svg'], 'animateColor'),
+ (namespaces['svg'], 'animateMotion'),
+ (namespaces['svg'], 'animateTransform'),
+ (namespaces['svg'], 'clipPath'),
+ (namespaces['svg'], 'circle'),
+ (namespaces['svg'], 'defs'),
+ (namespaces['svg'], 'desc'),
+ (namespaces['svg'], 'ellipse'),
+ (namespaces['svg'], 'font-face'),
+ (namespaces['svg'], 'font-face-name'),
+ (namespaces['svg'], 'font-face-src'),
+ (namespaces['svg'], 'g'),
+ (namespaces['svg'], 'glyph'),
+ (namespaces['svg'], 'hkern'),
+ (namespaces['svg'], 'linearGradient'),
+ (namespaces['svg'], 'line'),
+ (namespaces['svg'], 'marker'),
+ (namespaces['svg'], 'metadata'),
+ (namespaces['svg'], 'missing-glyph'),
+ (namespaces['svg'], 'mpath'),
+ (namespaces['svg'], 'path'),
+ (namespaces['svg'], 'polygon'),
+ (namespaces['svg'], 'polyline'),
+ (namespaces['svg'], 'radialGradient'),
+ (namespaces['svg'], 'rect'),
+ (namespaces['svg'], 'set'),
+ (namespaces['svg'], 'stop'),
+ (namespaces['svg'], 'svg'),
+ (namespaces['svg'], 'switch'),
+ (namespaces['svg'], 'text'),
+ (namespaces['svg'], 'title'),
+ (namespaces['svg'], 'tspan'),
+ (namespaces['svg'], 'use'),
+))
+
+allowed_attributes = frozenset((
+ # HTML attributes
+ (None, 'abbr'),
+ (None, 'accept'),
+ (None, 'accept-charset'),
+ (None, 'accesskey'),
+ (None, 'action'),
+ (None, 'align'),
+ (None, 'alt'),
+ (None, 'autocomplete'),
+ (None, 'autofocus'),
+ (None, 'axis'),
+ (None, 'background'),
+ (None, 'balance'),
+ (None, 'bgcolor'),
+ (None, 'bgproperties'),
+ (None, 'border'),
+ (None, 'bordercolor'),
+ (None, 'bordercolordark'),
+ (None, 'bordercolorlight'),
+ (None, 'bottompadding'),
+ (None, 'cellpadding'),
+ (None, 'cellspacing'),
+ (None, 'ch'),
+ (None, 'challenge'),
+ (None, 'char'),
+ (None, 'charoff'),
+ (None, 'choff'),
+ (None, 'charset'),
+ (None, 'checked'),
+ (None, 'cite'),
+ (None, 'class'),
+ (None, 'clear'),
+ (None, 'color'),
+ (None, 'cols'),
+ (None, 'colspan'),
+ (None, 'compact'),
+ (None, 'contenteditable'),
+ (None, 'controls'),
+ (None, 'coords'),
+ (None, 'data'),
+ (None, 'datafld'),
+ (None, 'datapagesize'),
+ (None, 'datasrc'),
+ (None, 'datetime'),
+ (None, 'default'),
+ (None, 'delay'),
+ (None, 'dir'),
+ (None, 'disabled'),
+ (None, 'draggable'),
+ (None, 'dynsrc'),
+ (None, 'enctype'),
+ (None, 'end'),
+ (None, 'face'),
+ (None, 'for'),
+ (None, 'form'),
+ (None, 'frame'),
+ (None, 'galleryimg'),
+ (None, 'gutter'),
+ (None, 'headers'),
+ (None, 'height'),
+ (None, 'hidefocus'),
+ (None, 'hidden'),
+ (None, 'high'),
+ (None, 'href'),
+ (None, 'hreflang'),
+ (None, 'hspace'),
+ (None, 'icon'),
+ (None, 'id'),
+ (None, 'inputmode'),
+ (None, 'ismap'),
+ (None, 'keytype'),
+ (None, 'label'),
+ (None, 'leftspacing'),
+ (None, 'lang'),
+ (None, 'list'),
+ (None, 'longdesc'),
+ (None, 'loop'),
+ (None, 'loopcount'),
+ (None, 'loopend'),
+ (None, 'loopstart'),
+ (None, 'low'),
+ (None, 'lowsrc'),
+ (None, 'max'),
+ (None, 'maxlength'),
+ (None, 'media'),
+ (None, 'method'),
+ (None, 'min'),
+ (None, 'multiple'),
+ (None, 'name'),
+ (None, 'nohref'),
+ (None, 'noshade'),
+ (None, 'nowrap'),
+ (None, 'open'),
+ (None, 'optimum'),
+ (None, 'pattern'),
+ (None, 'ping'),
+ (None, 'point-size'),
+ (None, 'poster'),
+ (None, 'pqg'),
+ (None, 'preload'),
+ (None, 'prompt'),
+ (None, 'radiogroup'),
+ (None, 'readonly'),
+ (None, 'rel'),
+ (None, 'repeat-max'),
+ (None, 'repeat-min'),
+ (None, 'replace'),
+ (None, 'required'),
+ (None, 'rev'),
+ (None, 'rightspacing'),
+ (None, 'rows'),
+ (None, 'rowspan'),
+ (None, 'rules'),
+ (None, 'scope'),
+ (None, 'selected'),
+ (None, 'shape'),
+ (None, 'size'),
+ (None, 'span'),
+ (None, 'src'),
+ (None, 'start'),
+ (None, 'step'),
+ (None, 'style'),
+ (None, 'summary'),
+ (None, 'suppress'),
+ (None, 'tabindex'),
+ (None, 'target'),
+ (None, 'template'),
+ (None, 'title'),
+ (None, 'toppadding'),
+ (None, 'type'),
+ (None, 'unselectable'),
+ (None, 'usemap'),
+ (None, 'urn'),
+ (None, 'valign'),
+ (None, 'value'),
+ (None, 'variable'),
+ (None, 'volume'),
+ (None, 'vspace'),
+ (None, 'vrml'),
+ (None, 'width'),
+ (None, 'wrap'),
+ (namespaces['xml'], 'lang'),
+ # MathML attributes
+ (None, 'actiontype'),
+ (None, 'align'),
+ (None, 'columnalign'),
+ (None, 'columnalign'),
+ (None, 'columnalign'),
+ (None, 'columnlines'),
+ (None, 'columnspacing'),
+ (None, 'columnspan'),
+ (None, 'depth'),
+ (None, 'display'),
+ (None, 'displaystyle'),
+ (None, 'equalcolumns'),
+ (None, 'equalrows'),
+ (None, 'fence'),
+ (None, 'fontstyle'),
+ (None, 'fontweight'),
+ (None, 'frame'),
+ (None, 'height'),
+ (None, 'linethickness'),
+ (None, 'lspace'),
+ (None, 'mathbackground'),
+ (None, 'mathcolor'),
+ (None, 'mathvariant'),
+ (None, 'mathvariant'),
+ (None, 'maxsize'),
+ (None, 'minsize'),
+ (None, 'other'),
+ (None, 'rowalign'),
+ (None, 'rowalign'),
+ (None, 'rowalign'),
+ (None, 'rowlines'),
+ (None, 'rowspacing'),
+ (None, 'rowspan'),
+ (None, 'rspace'),
+ (None, 'scriptlevel'),
+ (None, 'selection'),
+ (None, 'separator'),
+ (None, 'stretchy'),
+ (None, 'width'),
+ (None, 'width'),
+ (namespaces['xlink'], 'href'),
+ (namespaces['xlink'], 'show'),
+ (namespaces['xlink'], 'type'),
+ # SVG attributes
+ (None, 'accent-height'),
+ (None, 'accumulate'),
+ (None, 'additive'),
+ (None, 'alphabetic'),
+ (None, 'arabic-form'),
+ (None, 'ascent'),
+ (None, 'attributeName'),
+ (None, 'attributeType'),
+ (None, 'baseProfile'),
+ (None, 'bbox'),
+ (None, 'begin'),
+ (None, 'by'),
+ (None, 'calcMode'),
+ (None, 'cap-height'),
+ (None, 'class'),
+ (None, 'clip-path'),
+ (None, 'color'),
+ (None, 'color-rendering'),
+ (None, 'content'),
+ (None, 'cx'),
+ (None, 'cy'),
+ (None, 'd'),
+ (None, 'dx'),
+ (None, 'dy'),
+ (None, 'descent'),
+ (None, 'display'),
+ (None, 'dur'),
+ (None, 'end'),
+ (None, 'fill'),
+ (None, 'fill-opacity'),
+ (None, 'fill-rule'),
+ (None, 'font-family'),
+ (None, 'font-size'),
+ (None, 'font-stretch'),
+ (None, 'font-style'),
+ (None, 'font-variant'),
+ (None, 'font-weight'),
+ (None, 'from'),
+ (None, 'fx'),
+ (None, 'fy'),
+ (None, 'g1'),
+ (None, 'g2'),
+ (None, 'glyph-name'),
+ (None, 'gradientUnits'),
+ (None, 'hanging'),
+ (None, 'height'),
+ (None, 'horiz-adv-x'),
+ (None, 'horiz-origin-x'),
+ (None, 'id'),
+ (None, 'ideographic'),
+ (None, 'k'),
+ (None, 'keyPoints'),
+ (None, 'keySplines'),
+ (None, 'keyTimes'),
+ (None, 'lang'),
+ (None, 'marker-end'),
+ (None, 'marker-mid'),
+ (None, 'marker-start'),
+ (None, 'markerHeight'),
+ (None, 'markerUnits'),
+ (None, 'markerWidth'),
+ (None, 'mathematical'),
+ (None, 'max'),
+ (None, 'min'),
+ (None, 'name'),
+ (None, 'offset'),
+ (None, 'opacity'),
+ (None, 'orient'),
+ (None, 'origin'),
+ (None, 'overline-position'),
+ (None, 'overline-thickness'),
+ (None, 'panose-1'),
+ (None, 'path'),
+ (None, 'pathLength'),
+ (None, 'points'),
+ (None, 'preserveAspectRatio'),
+ (None, 'r'),
+ (None, 'refX'),
+ (None, 'refY'),
+ (None, 'repeatCount'),
+ (None, 'repeatDur'),
+ (None, 'requiredExtensions'),
+ (None, 'requiredFeatures'),
+ (None, 'restart'),
+ (None, 'rotate'),
+ (None, 'rx'),
+ (None, 'ry'),
+ (None, 'slope'),
+ (None, 'stemh'),
+ (None, 'stemv'),
+ (None, 'stop-color'),
+ (None, 'stop-opacity'),
+ (None, 'strikethrough-position'),
+ (None, 'strikethrough-thickness'),
+ (None, 'stroke'),
+ (None, 'stroke-dasharray'),
+ (None, 'stroke-dashoffset'),
+ (None, 'stroke-linecap'),
+ (None, 'stroke-linejoin'),
+ (None, 'stroke-miterlimit'),
+ (None, 'stroke-opacity'),
+ (None, 'stroke-width'),
+ (None, 'systemLanguage'),
+ (None, 'target'),
+ (None, 'text-anchor'),
+ (None, 'to'),
+ (None, 'transform'),
+ (None, 'type'),
+ (None, 'u1'),
+ (None, 'u2'),
+ (None, 'underline-position'),
+ (None, 'underline-thickness'),
+ (None, 'unicode'),
+ (None, 'unicode-range'),
+ (None, 'units-per-em'),
+ (None, 'values'),
+ (None, 'version'),
+ (None, 'viewBox'),
+ (None, 'visibility'),
+ (None, 'width'),
+ (None, 'widths'),
+ (None, 'x'),
+ (None, 'x-height'),
+ (None, 'x1'),
+ (None, 'x2'),
+ (namespaces['xlink'], 'actuate'),
+ (namespaces['xlink'], 'arcrole'),
+ (namespaces['xlink'], 'href'),
+ (namespaces['xlink'], 'role'),
+ (namespaces['xlink'], 'show'),
+ (namespaces['xlink'], 'title'),
+ (namespaces['xlink'], 'type'),
+ (namespaces['xml'], 'base'),
+ (namespaces['xml'], 'lang'),
+ (namespaces['xml'], 'space'),
+ (None, 'y'),
+ (None, 'y1'),
+ (None, 'y2'),
+ (None, 'zoomAndPan'),
+))
+
+attr_val_is_uri = frozenset((
+ (None, 'href'),
+ (None, 'src'),
+ (None, 'cite'),
+ (None, 'action'),
+ (None, 'longdesc'),
+ (None, 'poster'),
+ (None, 'background'),
+ (None, 'datasrc'),
+ (None, 'dynsrc'),
+ (None, 'lowsrc'),
+ (None, 'ping'),
+ (namespaces['xlink'], 'href'),
+ (namespaces['xml'], 'base'),
+))
+
+svg_attr_val_allows_ref = frozenset((
+ (None, 'clip-path'),
+ (None, 'color-profile'),
+ (None, 'cursor'),
+ (None, 'fill'),
+ (None, 'filter'),
+ (None, 'marker'),
+ (None, 'marker-start'),
+ (None, 'marker-mid'),
+ (None, 'marker-end'),
+ (None, 'mask'),
+ (None, 'stroke'),
+))
+
+svg_allow_local_href = frozenset((
+ (None, 'altGlyph'),
+ (None, 'animate'),
+ (None, 'animateColor'),
+ (None, 'animateMotion'),
+ (None, 'animateTransform'),
+ (None, 'cursor'),
+ (None, 'feImage'),
+ (None, 'filter'),
+ (None, 'linearGradient'),
+ (None, 'pattern'),
+ (None, 'radialGradient'),
+ (None, 'textpath'),
+ (None, 'tref'),
+ (None, 'set'),
+ (None, 'use')
+))
+
+allowed_css_properties = frozenset((
+ 'azimuth',
+ 'background-color',
+ 'border-bottom-color',
+ 'border-collapse',
+ 'border-color',
+ 'border-left-color',
+ 'border-right-color',
+ 'border-top-color',
+ 'clear',
+ 'color',
+ 'cursor',
+ 'direction',
+ 'display',
+ 'elevation',
+ 'float',
+ 'font',
+ 'font-family',
+ 'font-size',
+ 'font-style',
+ 'font-variant',
+ 'font-weight',
+ 'height',
+ 'letter-spacing',
+ 'line-height',
+ 'overflow',
+ 'pause',
+ 'pause-after',
+ 'pause-before',
+ 'pitch',
+ 'pitch-range',
+ 'richness',
+ 'speak',
+ 'speak-header',
+ 'speak-numeral',
+ 'speak-punctuation',
+ 'speech-rate',
+ 'stress',
+ 'text-align',
+ 'text-decoration',
+ 'text-indent',
+ 'unicode-bidi',
+ 'vertical-align',
+ 'voice-family',
+ 'volume',
+ 'white-space',
+ 'width',
+))
+
+allowed_css_keywords = frozenset((
+ 'auto',
+ 'aqua',
+ 'black',
+ 'block',
+ 'blue',
+ 'bold',
+ 'both',
+ 'bottom',
+ 'brown',
+ 'center',
+ 'collapse',
+ 'dashed',
+ 'dotted',
+ 'fuchsia',
+ 'gray',
+ 'green',
+ '!important',
+ 'italic',
+ 'left',
+ 'lime',
+ 'maroon',
+ 'medium',
+ 'none',
+ 'navy',
+ 'normal',
+ 'nowrap',
+ 'olive',
+ 'pointer',
+ 'purple',
+ 'red',
+ 'right',
+ 'solid',
+ 'silver',
+ 'teal',
+ 'top',
+ 'transparent',
+ 'underline',
+ 'white',
+ 'yellow',
+))
+
+allowed_svg_properties = frozenset((
+ 'fill',
+ 'fill-opacity',
+ 'fill-rule',
+ 'stroke',
+ 'stroke-width',
+ 'stroke-linecap',
+ 'stroke-linejoin',
+ 'stroke-opacity',
+))
+
+allowed_protocols = frozenset((
+ 'ed2k',
+ 'ftp',
+ 'http',
+ 'https',
+ 'irc',
+ 'mailto',
+ 'news',
+ 'gopher',
+ 'nntp',
+ 'telnet',
+ 'webcal',
+ 'xmpp',
+ 'callto',
+ 'feed',
+ 'urn',
+ 'aim',
+ 'rsync',
+ 'tag',
+ 'ssh',
+ 'sftp',
+ 'rtsp',
+ 'afs',
+ 'data',
+))
+
+allowed_content_types = frozenset((
+ 'image/png',
+ 'image/jpeg',
+ 'image/gif',
+ 'image/webp',
+ 'image/bmp',
+ 'text/plain',
+))
+
+
+data_content_type = re.compile(r'''
+ ^
+ # Match a content type <application>/<type>
+ (?P<content_type>[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+)
+ # Match any character set and encoding
+ (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?)
+ |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?)
+ # Assume the rest is data
+ ,.*
+ $
+ ''',
+ re.VERBOSE)
+
+
+class Filter(base.Filter):
+ """Sanitizes token stream of XHTML+MathML+SVG and of inline style attributes"""
+ def __init__(self,
+ source,
+ allowed_elements=allowed_elements,
+ allowed_attributes=allowed_attributes,
+ allowed_css_properties=allowed_css_properties,
+ allowed_css_keywords=allowed_css_keywords,
+ allowed_svg_properties=allowed_svg_properties,
+ allowed_protocols=allowed_protocols,
+ allowed_content_types=allowed_content_types,
+ attr_val_is_uri=attr_val_is_uri,
+ svg_attr_val_allows_ref=svg_attr_val_allows_ref,
+ svg_allow_local_href=svg_allow_local_href):
+ """Creates a Filter
+
+ :arg allowed_elements: set of elements to allow--everything else will
+ be escaped
+
+ :arg allowed_attributes: set of attributes to allow in
+ elements--everything else will be stripped
+
+ :arg allowed_css_properties: set of CSS properties to allow--everything
+ else will be stripped
+
+ :arg allowed_css_keywords: set of CSS keywords to allow--everything
+ else will be stripped
+
+ :arg allowed_svg_properties: set of SVG properties to allow--everything
+ else will be removed
+
+ :arg allowed_protocols: set of allowed protocols for URIs
+
+ :arg allowed_content_types: set of allowed content types for ``data`` URIs.
+
+ :arg attr_val_is_uri: set of attributes that have URI values--values
+ that have a scheme not listed in ``allowed_protocols`` are removed
+
+ :arg svg_attr_val_allows_ref: set of SVG attributes that can have
+ references
+
+ :arg svg_allow_local_href: set of SVG elements that can have local
+ hrefs--these are removed
+
+ """
+ super(Filter, self).__init__(source)
+
+ warnings.warn(_deprecation_msg, DeprecationWarning)
+
+ self.allowed_elements = allowed_elements
+ self.allowed_attributes = allowed_attributes
+ self.allowed_css_properties = allowed_css_properties
+ self.allowed_css_keywords = allowed_css_keywords
+ self.allowed_svg_properties = allowed_svg_properties
+ self.allowed_protocols = allowed_protocols
+ self.allowed_content_types = allowed_content_types
+ self.attr_val_is_uri = attr_val_is_uri
+ self.svg_attr_val_allows_ref = svg_attr_val_allows_ref
+ self.svg_allow_local_href = svg_allow_local_href
+
+ def __iter__(self):
+ for token in base.Filter.__iter__(self):
+ token = self.sanitize_token(token)
+ if token:
+ yield token
+
+ # Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and
+ # stripping out all attributes not in ALLOWED_ATTRIBUTES. Style attributes
+ # are parsed, and a restricted set, specified by ALLOWED_CSS_PROPERTIES and
+ # ALLOWED_CSS_KEYWORDS, are allowed through. attributes in ATTR_VAL_IS_URI
+ # are scanned, and only URI schemes specified in ALLOWED_PROTOCOLS are
+ # allowed.
+ #
+ # sanitize_html('<script> do_nasty_stuff() </script>')
+ # => &lt;script> do_nasty_stuff() &lt;/script>
+ # sanitize_html('<a href="javascript: sucker();">Click here for $100</a>')
+ # => <a>Click here for $100</a>
+ def sanitize_token(self, token):
+
+ # accommodate filters which use token_type differently
+ token_type = token["type"]
+ if token_type in ("StartTag", "EndTag", "EmptyTag"):
+ name = token["name"]
+ namespace = token["namespace"]
+ if ((namespace, name) in self.allowed_elements or
+ (namespace is None and
+ (namespaces["html"], name) in self.allowed_elements)):
+ return self.allowed_token(token)
+ else:
+ return self.disallowed_token(token)
+ elif token_type == "Comment":
+ pass
+ else:
+ return token
+
+ def allowed_token(self, token):
+ if "data" in token:
+ attrs = token["data"]
+ attr_names = set(attrs.keys())
+
+ # Remove forbidden attributes
+ for to_remove in (attr_names - self.allowed_attributes):
+ del token["data"][to_remove]
+ attr_names.remove(to_remove)
+
+ # Remove attributes with disallowed URL values
+ for attr in (attr_names & self.attr_val_is_uri):
+ assert attr in attrs
+ # I don't have a clue where this regexp comes from or why it matches those
+ # characters, nor why we call unescape. I just know it's always been here.
+ # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all
+ # this will do is remove *more* than it otherwise would.
+ val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '',
+ unescape(attrs[attr])).lower()
+ # remove replacement characters from unescaped characters
+ val_unescaped = val_unescaped.replace("\ufffd", "")
+ try:
+ uri = urlparse.urlparse(val_unescaped)
+ except ValueError:
+ uri = None
+ del attrs[attr]
+ if uri and uri.scheme:
+ if uri.scheme not in self.allowed_protocols:
+ del attrs[attr]
+ if uri.scheme == 'data':
+ m = data_content_type.match(uri.path)
+ if not m:
+ del attrs[attr]
+ elif m.group('content_type') not in self.allowed_content_types:
+ del attrs[attr]
+
+ for attr in self.svg_attr_val_allows_ref:
+ if attr in attrs:
+ attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)',
+ ' ',
+ unescape(attrs[attr]))
+ if (token["name"] in self.svg_allow_local_href and
+ (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*',
+ attrs[(namespaces['xlink'], 'href')])):
+ del attrs[(namespaces['xlink'], 'href')]
+ if (None, 'style') in attrs:
+ attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')])
+ token["data"] = attrs
+ return token
+
+ def disallowed_token(self, token):
+ token_type = token["type"]
+ if token_type == "EndTag":
+ token["data"] = "</%s>" % token["name"]
+ elif token["data"]:
+ assert token_type in ("StartTag", "EmptyTag")
+ attrs = []
+ for (ns, name), v in token["data"].items():
+ attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v)))
+ token["data"] = "<%s%s>" % (token["name"], ''.join(attrs))
+ else:
+ token["data"] = "<%s>" % token["name"]
+ if token.get("selfClosing"):
+ token["data"] = token["data"][:-1] + "/>"
+
+ token["type"] = "Characters"
+
+ del token["name"]
+ return token
+
+ def sanitize_css(self, style):
+ # disallow urls
+ style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
+
+ # gauntlet
+ if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
+ return ''
+ if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
+ return ''
+
+ clean = []
+ for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style):
+ if not value:
+ continue
+ if prop.lower() in self.allowed_css_properties:
+ clean.append(prop + ': ' + value + ';')
+ elif prop.split('-')[0].lower() in ['background', 'border', 'margin',
+ 'padding']:
+ for keyword in value.split():
+ if keyword not in self.allowed_css_keywords and \
+ not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa
+ break
+ else:
+ clean.append(prop + ': ' + value + ';')
+ elif prop.lower() in self.allowed_svg_properties:
+ clean.append(prop + ': ' + value + ';')
+
+ return ' '.join(clean)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/whitespace.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/whitespace.py
new file mode 100644
index 0000000000..0d12584b45
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/filters/whitespace.py
@@ -0,0 +1,38 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import re
+
+from . import base
+from ..constants import rcdataElements, spaceCharacters
+spaceCharacters = "".join(spaceCharacters)
+
+SPACES_REGEX = re.compile("[%s]+" % spaceCharacters)
+
+
+class Filter(base.Filter):
+ """Collapses whitespace except in pre, textarea, and script elements"""
+ spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements))
+
+ def __iter__(self):
+ preserve = 0
+ for token in base.Filter.__iter__(self):
+ type = token["type"]
+ if type == "StartTag" \
+ and (preserve or token["name"] in self.spacePreserveElements):
+ preserve += 1
+
+ elif type == "EndTag" and preserve:
+ preserve -= 1
+
+ elif not preserve and type == "SpaceCharacters" and token["data"]:
+ # Test on token["data"] above to not introduce spaces where there were not
+ token["data"] = " "
+
+ elif not preserve and type == "Characters":
+ token["data"] = collapse_spaces(token["data"])
+
+ yield token
+
+
+def collapse_spaces(text):
+ return SPACES_REGEX.sub(' ', text)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/html5parser.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/html5parser.py
new file mode 100644
index 0000000000..74d829d984
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/html5parser.py
@@ -0,0 +1,2795 @@
+from __future__ import absolute_import, division, unicode_literals
+from six import with_metaclass, viewkeys
+
+import types
+
+from . import _inputstream
+from . import _tokenizer
+
+from . import treebuilders
+from .treebuilders.base import Marker
+
+from . import _utils
+from .constants import (
+ spaceCharacters, asciiUpper2Lower,
+ specialElements, headingElements, cdataElements, rcdataElements,
+ tokenTypes, tagTokenTypes,
+ namespaces,
+ htmlIntegrationPointElements, mathmlTextIntegrationPointElements,
+ adjustForeignAttributes as adjustForeignAttributesMap,
+ adjustMathMLAttributes, adjustSVGAttributes,
+ E,
+ _ReparseException
+)
+
+
+def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs):
+ """Parse an HTML document as a string or file-like object into a tree
+
+ :arg doc: the document to parse as a string or file-like object
+
+ :arg treebuilder: the treebuilder to use when parsing
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5parser import parse
+ >>> parse('<html><body><p>This is a doc</p></body></html>')
+ <Element u'{http://www.w3.org/1999/xhtml}html' at 0x7feac4909db0>
+
+ """
+ tb = treebuilders.getTreeBuilder(treebuilder)
+ p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
+ return p.parse(doc, **kwargs)
+
+
+def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs):
+ """Parse an HTML fragment as a string or file-like object into a tree
+
+ :arg doc: the fragment to parse as a string or file-like object
+
+ :arg container: the container context to parse the fragment in
+
+ :arg treebuilder: the treebuilder to use when parsing
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5libparser import parseFragment
+ >>> parseFragment('<b>this is a fragment</b>')
+ <Element u'DOCUMENT_FRAGMENT' at 0x7feac484b090>
+
+ """
+ tb = treebuilders.getTreeBuilder(treebuilder)
+ p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
+ return p.parseFragment(doc, container=container, **kwargs)
+
+
+def method_decorator_metaclass(function):
+ class Decorated(type):
+ def __new__(meta, classname, bases, classDict):
+ for attributeName, attribute in classDict.items():
+ if isinstance(attribute, types.FunctionType):
+ attribute = function(attribute)
+
+ classDict[attributeName] = attribute
+ return type.__new__(meta, classname, bases, classDict)
+ return Decorated
+
+
+class HTMLParser(object):
+ """HTML parser
+
+ Generates a tree structure from a stream of (possibly malformed) HTML.
+
+ """
+
+ def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False):
+ """
+ :arg tree: a treebuilder class controlling the type of tree that will be
+ returned. Built in treebuilders can be accessed through
+ html5lib.treebuilders.getTreeBuilder(treeType)
+
+ :arg strict: raise an exception when a parse error is encountered
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ :arg debug: whether or not to enable debug mode which logs things
+
+ Example:
+
+ >>> from html5lib.html5parser import HTMLParser
+ >>> parser = HTMLParser() # generates parser with etree builder
+ >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict
+
+ """
+
+ # Raise an exception on the first error encountered
+ self.strict = strict
+
+ if tree is None:
+ tree = treebuilders.getTreeBuilder("etree")
+ self.tree = tree(namespaceHTMLElements)
+ self.errors = []
+
+ self.phases = {name: cls(self, self.tree) for name, cls in
+ getPhases(debug).items()}
+
+ def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs):
+
+ self.innerHTMLMode = innerHTML
+ self.container = container
+ self.scripting = scripting
+ self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs)
+ self.reset()
+
+ try:
+ self.mainLoop()
+ except _ReparseException:
+ self.reset()
+ self.mainLoop()
+
+ def reset(self):
+ self.tree.reset()
+ self.firstStartTag = False
+ self.errors = []
+ self.log = [] # only used with debug mode
+ # "quirks" / "limited quirks" / "no quirks"
+ self.compatMode = "no quirks"
+
+ if self.innerHTMLMode:
+ self.innerHTML = self.container.lower()
+
+ if self.innerHTML in cdataElements:
+ self.tokenizer.state = self.tokenizer.rcdataState
+ elif self.innerHTML in rcdataElements:
+ self.tokenizer.state = self.tokenizer.rawtextState
+ elif self.innerHTML == 'plaintext':
+ self.tokenizer.state = self.tokenizer.plaintextState
+ else:
+ # state already is data state
+ # self.tokenizer.state = self.tokenizer.dataState
+ pass
+ self.phase = self.phases["beforeHtml"]
+ self.phase.insertHtmlElement()
+ self.resetInsertionMode()
+ else:
+ self.innerHTML = False # pylint:disable=redefined-variable-type
+ self.phase = self.phases["initial"]
+
+ self.lastPhase = None
+
+ self.beforeRCDataPhase = None
+
+ self.framesetOK = True
+
+ @property
+ def documentEncoding(self):
+ """Name of the character encoding that was used to decode the input stream, or
+ :obj:`None` if that is not determined yet
+
+ """
+ if not hasattr(self, 'tokenizer'):
+ return None
+ return self.tokenizer.stream.charEncoding[0].name
+
+ def isHTMLIntegrationPoint(self, element):
+ if (element.name == "annotation-xml" and
+ element.namespace == namespaces["mathml"]):
+ return ("encoding" in element.attributes and
+ element.attributes["encoding"].translate(
+ asciiUpper2Lower) in
+ ("text/html", "application/xhtml+xml"))
+ else:
+ return (element.namespace, element.name) in htmlIntegrationPointElements
+
+ def isMathMLTextIntegrationPoint(self, element):
+ return (element.namespace, element.name) in mathmlTextIntegrationPointElements
+
+ def mainLoop(self):
+ CharactersToken = tokenTypes["Characters"]
+ SpaceCharactersToken = tokenTypes["SpaceCharacters"]
+ StartTagToken = tokenTypes["StartTag"]
+ EndTagToken = tokenTypes["EndTag"]
+ CommentToken = tokenTypes["Comment"]
+ DoctypeToken = tokenTypes["Doctype"]
+ ParseErrorToken = tokenTypes["ParseError"]
+
+ for token in self.tokenizer:
+ prev_token = None
+ new_token = token
+ while new_token is not None:
+ prev_token = new_token
+ currentNode = self.tree.openElements[-1] if self.tree.openElements else None
+ currentNodeNamespace = currentNode.namespace if currentNode else None
+ currentNodeName = currentNode.name if currentNode else None
+
+ type = new_token["type"]
+
+ if type == ParseErrorToken:
+ self.parseError(new_token["data"], new_token.get("datavars", {}))
+ new_token = None
+ else:
+ if (len(self.tree.openElements) == 0 or
+ currentNodeNamespace == self.tree.defaultNamespace or
+ (self.isMathMLTextIntegrationPoint(currentNode) and
+ ((type == StartTagToken and
+ token["name"] not in frozenset(["mglyph", "malignmark"])) or
+ type in (CharactersToken, SpaceCharactersToken))) or
+ (currentNodeNamespace == namespaces["mathml"] and
+ currentNodeName == "annotation-xml" and
+ type == StartTagToken and
+ token["name"] == "svg") or
+ (self.isHTMLIntegrationPoint(currentNode) and
+ type in (StartTagToken, CharactersToken, SpaceCharactersToken))):
+ phase = self.phase
+ else:
+ phase = self.phases["inForeignContent"]
+
+ if type == CharactersToken:
+ new_token = phase.processCharacters(new_token)
+ elif type == SpaceCharactersToken:
+ new_token = phase.processSpaceCharacters(new_token)
+ elif type == StartTagToken:
+ new_token = phase.processStartTag(new_token)
+ elif type == EndTagToken:
+ new_token = phase.processEndTag(new_token)
+ elif type == CommentToken:
+ new_token = phase.processComment(new_token)
+ elif type == DoctypeToken:
+ new_token = phase.processDoctype(new_token)
+
+ if (type == StartTagToken and prev_token["selfClosing"] and
+ not prev_token["selfClosingAcknowledged"]):
+ self.parseError("non-void-element-with-trailing-solidus",
+ {"name": prev_token["name"]})
+
+ # When the loop finishes it's EOF
+ reprocess = True
+ phases = []
+ while reprocess:
+ phases.append(self.phase)
+ reprocess = self.phase.processEOF()
+ if reprocess:
+ assert self.phase not in phases
+
+ def parse(self, stream, *args, **kwargs):
+ """Parse a HTML document into a well-formed tree
+
+ :arg stream: a file-like object or string containing the HTML to be parsed
+
+ The optional encoding parameter must be a string that indicates
+ the encoding. If specified, that encoding will be used,
+ regardless of any BOM or later declaration (such as in a meta
+ element).
+
+ :arg scripting: treat noscript elements as if JavaScript was turned on
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5parser import HTMLParser
+ >>> parser = HTMLParser()
+ >>> parser.parse('<html><body><p>This is a doc</p></body></html>')
+ <Element u'{http://www.w3.org/1999/xhtml}html' at 0x7feac4909db0>
+
+ """
+ self._parse(stream, False, None, *args, **kwargs)
+ return self.tree.getDocument()
+
+ def parseFragment(self, stream, *args, **kwargs):
+ """Parse a HTML fragment into a well-formed tree fragment
+
+ :arg container: name of the element we're setting the innerHTML
+ property if set to None, default to 'div'
+
+ :arg stream: a file-like object or string containing the HTML to be parsed
+
+ The optional encoding parameter must be a string that indicates
+ the encoding. If specified, that encoding will be used,
+ regardless of any BOM or later declaration (such as in a meta
+ element)
+
+ :arg scripting: treat noscript elements as if JavaScript was turned on
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5libparser import HTMLParser
+ >>> parser = HTMLParser()
+ >>> parser.parseFragment('<b>this is a fragment</b>')
+ <Element u'DOCUMENT_FRAGMENT' at 0x7feac484b090>
+
+ """
+ self._parse(stream, True, *args, **kwargs)
+ return self.tree.getFragment()
+
+ def parseError(self, errorcode="XXX-undefined-error", datavars=None):
+ # XXX The idea is to make errorcode mandatory.
+ if datavars is None:
+ datavars = {}
+ self.errors.append((self.tokenizer.stream.position(), errorcode, datavars))
+ if self.strict:
+ raise ParseError(E[errorcode] % datavars)
+
+ def adjustMathMLAttributes(self, token):
+ adjust_attributes(token, adjustMathMLAttributes)
+
+ def adjustSVGAttributes(self, token):
+ adjust_attributes(token, adjustSVGAttributes)
+
+ def adjustForeignAttributes(self, token):
+ adjust_attributes(token, adjustForeignAttributesMap)
+
+ def reparseTokenNormal(self, token):
+ # pylint:disable=unused-argument
+ self.parser.phase()
+
+ def resetInsertionMode(self):
+ # The name of this method is mostly historical. (It's also used in the
+ # specification.)
+ last = False
+ newModes = {
+ "select": "inSelect",
+ "td": "inCell",
+ "th": "inCell",
+ "tr": "inRow",
+ "tbody": "inTableBody",
+ "thead": "inTableBody",
+ "tfoot": "inTableBody",
+ "caption": "inCaption",
+ "colgroup": "inColumnGroup",
+ "table": "inTable",
+ "head": "inBody",
+ "body": "inBody",
+ "frameset": "inFrameset",
+ "html": "beforeHead"
+ }
+ for node in self.tree.openElements[::-1]:
+ nodeName = node.name
+ new_phase = None
+ if node == self.tree.openElements[0]:
+ assert self.innerHTML
+ last = True
+ nodeName = self.innerHTML
+ # Check for conditions that should only happen in the innerHTML
+ # case
+ if nodeName in ("select", "colgroup", "head", "html"):
+ assert self.innerHTML
+
+ if not last and node.namespace != self.tree.defaultNamespace:
+ continue
+
+ if nodeName in newModes:
+ new_phase = self.phases[newModes[nodeName]]
+ break
+ elif last:
+ new_phase = self.phases["inBody"]
+ break
+
+ self.phase = new_phase
+
+ def parseRCDataRawtext(self, token, contentType):
+ # Generic RCDATA/RAWTEXT Parsing algorithm
+ assert contentType in ("RAWTEXT", "RCDATA")
+
+ self.tree.insertElement(token)
+
+ if contentType == "RAWTEXT":
+ self.tokenizer.state = self.tokenizer.rawtextState
+ else:
+ self.tokenizer.state = self.tokenizer.rcdataState
+
+ self.originalPhase = self.phase
+
+ self.phase = self.phases["text"]
+
+
+@_utils.memoize
+def getPhases(debug):
+ def log(function):
+ """Logger that records which phase processes each token"""
+ type_names = {value: key for key, value in tokenTypes.items()}
+
+ def wrapped(self, *args, **kwargs):
+ if function.__name__.startswith("process") and len(args) > 0:
+ token = args[0]
+ info = {"type": type_names[token['type']]}
+ if token['type'] in tagTokenTypes:
+ info["name"] = token['name']
+
+ self.parser.log.append((self.parser.tokenizer.state.__name__,
+ self.parser.phase.__class__.__name__,
+ self.__class__.__name__,
+ function.__name__,
+ info))
+ return function(self, *args, **kwargs)
+ else:
+ return function(self, *args, **kwargs)
+ return wrapped
+
+ def getMetaclass(use_metaclass, metaclass_func):
+ if use_metaclass:
+ return method_decorator_metaclass(metaclass_func)
+ else:
+ return type
+
+ # pylint:disable=unused-argument
+ class Phase(with_metaclass(getMetaclass(debug, log))):
+ """Base class for helper object that implements each phase of processing
+ """
+ __slots__ = ("parser", "tree", "__startTagCache", "__endTagCache")
+
+ def __init__(self, parser, tree):
+ self.parser = parser
+ self.tree = tree
+ self.__startTagCache = {}
+ self.__endTagCache = {}
+
+ def processEOF(self):
+ raise NotImplementedError
+
+ def processComment(self, token):
+ # For most phases the following is correct. Where it's not it will be
+ # overridden.
+ self.tree.insertComment(token, self.tree.openElements[-1])
+
+ def processDoctype(self, token):
+ self.parser.parseError("unexpected-doctype")
+
+ def processCharacters(self, token):
+ self.tree.insertText(token["data"])
+
+ def processSpaceCharacters(self, token):
+ self.tree.insertText(token["data"])
+
+ def processStartTag(self, token):
+ # Note the caching is done here rather than BoundMethodDispatcher as doing it there
+ # requires a circular reference to the Phase, and this ends up with a significant
+ # (CPython 2.7, 3.8) GC cost when parsing many short inputs
+ name = token["name"]
+ # In Py2, using `in` is quicker in general than try/except KeyError
+ # In Py3, `in` is quicker when there are few cache hits (typically short inputs)
+ if name in self.__startTagCache:
+ func = self.__startTagCache[name]
+ else:
+ func = self.__startTagCache[name] = self.startTagHandler[name]
+ # bound the cache size in case we get loads of unknown tags
+ while len(self.__startTagCache) > len(self.startTagHandler) * 1.1:
+ # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7
+ self.__startTagCache.pop(next(iter(self.__startTagCache)))
+ return func(token)
+
+ def startTagHtml(self, token):
+ if not self.parser.firstStartTag and token["name"] == "html":
+ self.parser.parseError("non-html-root")
+ # XXX Need a check here to see if the first start tag token emitted is
+ # this token... If it's not, invoke self.parser.parseError().
+ for attr, value in token["data"].items():
+ if attr not in self.tree.openElements[0].attributes:
+ self.tree.openElements[0].attributes[attr] = value
+ self.parser.firstStartTag = False
+
+ def processEndTag(self, token):
+ # Note the caching is done here rather than BoundMethodDispatcher as doing it there
+ # requires a circular reference to the Phase, and this ends up with a significant
+ # (CPython 2.7, 3.8) GC cost when parsing many short inputs
+ name = token["name"]
+ # In Py2, using `in` is quicker in general than try/except KeyError
+ # In Py3, `in` is quicker when there are few cache hits (typically short inputs)
+ if name in self.__endTagCache:
+ func = self.__endTagCache[name]
+ else:
+ func = self.__endTagCache[name] = self.endTagHandler[name]
+ # bound the cache size in case we get loads of unknown tags
+ while len(self.__endTagCache) > len(self.endTagHandler) * 1.1:
+ # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7
+ self.__endTagCache.pop(next(iter(self.__endTagCache)))
+ return func(token)
+
+ class InitialPhase(Phase):
+ __slots__ = tuple()
+
+ def processSpaceCharacters(self, token):
+ pass
+
+ def processComment(self, token):
+ self.tree.insertComment(token, self.tree.document)
+
+ def processDoctype(self, token):
+ name = token["name"]
+ publicId = token["publicId"]
+ systemId = token["systemId"]
+ correct = token["correct"]
+
+ if (name != "html" or publicId is not None or
+ systemId is not None and systemId != "about:legacy-compat"):
+ self.parser.parseError("unknown-doctype")
+
+ if publicId is None:
+ publicId = ""
+
+ self.tree.insertDoctype(token)
+
+ if publicId != "":
+ publicId = publicId.translate(asciiUpper2Lower)
+
+ if (not correct or token["name"] != "html" or
+ publicId.startswith(
+ ("+//silmaril//dtd html pro v0r11 19970101//",
+ "-//advasoft ltd//dtd html 3.0 aswedit + extensions//",
+ "-//as//dtd html 3.0 aswedit + extensions//",
+ "-//ietf//dtd html 2.0 level 1//",
+ "-//ietf//dtd html 2.0 level 2//",
+ "-//ietf//dtd html 2.0 strict level 1//",
+ "-//ietf//dtd html 2.0 strict level 2//",
+ "-//ietf//dtd html 2.0 strict//",
+ "-//ietf//dtd html 2.0//",
+ "-//ietf//dtd html 2.1e//",
+ "-//ietf//dtd html 3.0//",
+ "-//ietf//dtd html 3.2 final//",
+ "-//ietf//dtd html 3.2//",
+ "-//ietf//dtd html 3//",
+ "-//ietf//dtd html level 0//",
+ "-//ietf//dtd html level 1//",
+ "-//ietf//dtd html level 2//",
+ "-//ietf//dtd html level 3//",
+ "-//ietf//dtd html strict level 0//",
+ "-//ietf//dtd html strict level 1//",
+ "-//ietf//dtd html strict level 2//",
+ "-//ietf//dtd html strict level 3//",
+ "-//ietf//dtd html strict//",
+ "-//ietf//dtd html//",
+ "-//metrius//dtd metrius presentational//",
+ "-//microsoft//dtd internet explorer 2.0 html strict//",
+ "-//microsoft//dtd internet explorer 2.0 html//",
+ "-//microsoft//dtd internet explorer 2.0 tables//",
+ "-//microsoft//dtd internet explorer 3.0 html strict//",
+ "-//microsoft//dtd internet explorer 3.0 html//",
+ "-//microsoft//dtd internet explorer 3.0 tables//",
+ "-//netscape comm. corp.//dtd html//",
+ "-//netscape comm. corp.//dtd strict html//",
+ "-//o'reilly and associates//dtd html 2.0//",
+ "-//o'reilly and associates//dtd html extended 1.0//",
+ "-//o'reilly and associates//dtd html extended relaxed 1.0//",
+ "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//",
+ "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//",
+ "-//spyglass//dtd html 2.0 extended//",
+ "-//sq//dtd html 2.0 hotmetal + extensions//",
+ "-//sun microsystems corp.//dtd hotjava html//",
+ "-//sun microsystems corp.//dtd hotjava strict html//",
+ "-//w3c//dtd html 3 1995-03-24//",
+ "-//w3c//dtd html 3.2 draft//",
+ "-//w3c//dtd html 3.2 final//",
+ "-//w3c//dtd html 3.2//",
+ "-//w3c//dtd html 3.2s draft//",
+ "-//w3c//dtd html 4.0 frameset//",
+ "-//w3c//dtd html 4.0 transitional//",
+ "-//w3c//dtd html experimental 19960712//",
+ "-//w3c//dtd html experimental 970421//",
+ "-//w3c//dtd w3 html//",
+ "-//w3o//dtd w3 html 3.0//",
+ "-//webtechs//dtd mozilla html 2.0//",
+ "-//webtechs//dtd mozilla html//")) or
+ publicId in ("-//w3o//dtd w3 html strict 3.0//en//",
+ "-/w3c/dtd html 4.0 transitional/en",
+ "html") or
+ publicId.startswith(
+ ("-//w3c//dtd html 4.01 frameset//",
+ "-//w3c//dtd html 4.01 transitional//")) and
+ systemId is None or
+ systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"):
+ self.parser.compatMode = "quirks"
+ elif (publicId.startswith(
+ ("-//w3c//dtd xhtml 1.0 frameset//",
+ "-//w3c//dtd xhtml 1.0 transitional//")) or
+ publicId.startswith(
+ ("-//w3c//dtd html 4.01 frameset//",
+ "-//w3c//dtd html 4.01 transitional//")) and
+ systemId is not None):
+ self.parser.compatMode = "limited quirks"
+
+ self.parser.phase = self.parser.phases["beforeHtml"]
+
+ def anythingElse(self):
+ self.parser.compatMode = "quirks"
+ self.parser.phase = self.parser.phases["beforeHtml"]
+
+ def processCharacters(self, token):
+ self.parser.parseError("expected-doctype-but-got-chars")
+ self.anythingElse()
+ return token
+
+ def processStartTag(self, token):
+ self.parser.parseError("expected-doctype-but-got-start-tag",
+ {"name": token["name"]})
+ self.anythingElse()
+ return token
+
+ def processEndTag(self, token):
+ self.parser.parseError("expected-doctype-but-got-end-tag",
+ {"name": token["name"]})
+ self.anythingElse()
+ return token
+
+ def processEOF(self):
+ self.parser.parseError("expected-doctype-but-got-eof")
+ self.anythingElse()
+ return True
+
+ class BeforeHtmlPhase(Phase):
+ __slots__ = tuple()
+
+ # helper methods
+ def insertHtmlElement(self):
+ self.tree.insertRoot(impliedTagToken("html", "StartTag"))
+ self.parser.phase = self.parser.phases["beforeHead"]
+
+ # other
+ def processEOF(self):
+ self.insertHtmlElement()
+ return True
+
+ def processComment(self, token):
+ self.tree.insertComment(token, self.tree.document)
+
+ def processSpaceCharacters(self, token):
+ pass
+
+ def processCharacters(self, token):
+ self.insertHtmlElement()
+ return token
+
+ def processStartTag(self, token):
+ if token["name"] == "html":
+ self.parser.firstStartTag = True
+ self.insertHtmlElement()
+ return token
+
+ def processEndTag(self, token):
+ if token["name"] not in ("head", "body", "html", "br"):
+ self.parser.parseError("unexpected-end-tag-before-html",
+ {"name": token["name"]})
+ else:
+ self.insertHtmlElement()
+ return token
+
+ class BeforeHeadPhase(Phase):
+ __slots__ = tuple()
+
+ def processEOF(self):
+ self.startTagHead(impliedTagToken("head", "StartTag"))
+ return True
+
+ def processSpaceCharacters(self, token):
+ pass
+
+ def processCharacters(self, token):
+ self.startTagHead(impliedTagToken("head", "StartTag"))
+ return token
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagHead(self, token):
+ self.tree.insertElement(token)
+ self.tree.headPointer = self.tree.openElements[-1]
+ self.parser.phase = self.parser.phases["inHead"]
+
+ def startTagOther(self, token):
+ self.startTagHead(impliedTagToken("head", "StartTag"))
+ return token
+
+ def endTagImplyHead(self, token):
+ self.startTagHead(impliedTagToken("head", "StartTag"))
+ return token
+
+ def endTagOther(self, token):
+ self.parser.parseError("end-tag-after-implied-root",
+ {"name": token["name"]})
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", startTagHtml),
+ ("head", startTagHead)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ (("head", "body", "html", "br"), endTagImplyHead)
+ ])
+ endTagHandler.default = endTagOther
+
+ class InHeadPhase(Phase):
+ __slots__ = tuple()
+
+ # the real thing
+ def processEOF(self):
+ self.anythingElse()
+ return True
+
+ def processCharacters(self, token):
+ self.anythingElse()
+ return token
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagHead(self, token):
+ self.parser.parseError("two-heads-are-not-better-than-one")
+
+ def startTagBaseLinkCommand(self, token):
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def startTagMeta(self, token):
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ attributes = token["data"]
+ if self.parser.tokenizer.stream.charEncoding[1] == "tentative":
+ if "charset" in attributes:
+ self.parser.tokenizer.stream.changeEncoding(attributes["charset"])
+ elif ("content" in attributes and
+ "http-equiv" in attributes and
+ attributes["http-equiv"].lower() == "content-type"):
+ # Encoding it as UTF-8 here is a hack, as really we should pass
+ # the abstract Unicode string, and just use the
+ # ContentAttrParser on that, but using UTF-8 allows all chars
+ # to be encoded and as a ASCII-superset works.
+ data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8"))
+ parser = _inputstream.ContentAttrParser(data)
+ codec = parser.parse()
+ self.parser.tokenizer.stream.changeEncoding(codec)
+
+ def startTagTitle(self, token):
+ self.parser.parseRCDataRawtext(token, "RCDATA")
+
+ def startTagNoFramesStyle(self, token):
+ # Need to decide whether to implement the scripting-disabled case
+ self.parser.parseRCDataRawtext(token, "RAWTEXT")
+
+ def startTagNoscript(self, token):
+ if self.parser.scripting:
+ self.parser.parseRCDataRawtext(token, "RAWTEXT")
+ else:
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inHeadNoscript"]
+
+ def startTagScript(self, token):
+ self.tree.insertElement(token)
+ self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState
+ self.parser.originalPhase = self.parser.phase
+ self.parser.phase = self.parser.phases["text"]
+
+ def startTagOther(self, token):
+ self.anythingElse()
+ return token
+
+ def endTagHead(self, token):
+ node = self.parser.tree.openElements.pop()
+ assert node.name == "head", "Expected head got %s" % node.name
+ self.parser.phase = self.parser.phases["afterHead"]
+
+ def endTagHtmlBodyBr(self, token):
+ self.anythingElse()
+ return token
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def anythingElse(self):
+ self.endTagHead(impliedTagToken("head"))
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", startTagHtml),
+ ("title", startTagTitle),
+ (("noframes", "style"), startTagNoFramesStyle),
+ ("noscript", startTagNoscript),
+ ("script", startTagScript),
+ (("base", "basefont", "bgsound", "command", "link"),
+ startTagBaseLinkCommand),
+ ("meta", startTagMeta),
+ ("head", startTagHead)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ ("head", endTagHead),
+ (("br", "html", "body"), endTagHtmlBodyBr)
+ ])
+ endTagHandler.default = endTagOther
+
+ class InHeadNoscriptPhase(Phase):
+ __slots__ = tuple()
+
+ def processEOF(self):
+ self.parser.parseError("eof-in-head-noscript")
+ self.anythingElse()
+ return True
+
+ def processComment(self, token):
+ return self.parser.phases["inHead"].processComment(token)
+
+ def processCharacters(self, token):
+ self.parser.parseError("char-in-head-noscript")
+ self.anythingElse()
+ return token
+
+ def processSpaceCharacters(self, token):
+ return self.parser.phases["inHead"].processSpaceCharacters(token)
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagBaseLinkCommand(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagHeadNoscript(self, token):
+ self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})
+ self.anythingElse()
+ return token
+
+ def endTagNoscript(self, token):
+ node = self.parser.tree.openElements.pop()
+ assert node.name == "noscript", "Expected noscript got %s" % node.name
+ self.parser.phase = self.parser.phases["inHead"]
+
+ def endTagBr(self, token):
+ self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})
+ self.anythingElse()
+ return token
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def anythingElse(self):
+ # Caller must raise parse error first!
+ self.endTagNoscript(impliedTagToken("noscript"))
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", startTagHtml),
+ (("basefont", "bgsound", "link", "meta", "noframes", "style"), startTagBaseLinkCommand),
+ (("head", "noscript"), startTagHeadNoscript),
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ ("noscript", endTagNoscript),
+ ("br", endTagBr),
+ ])
+ endTagHandler.default = endTagOther
+
+ class AfterHeadPhase(Phase):
+ __slots__ = tuple()
+
+ def processEOF(self):
+ self.anythingElse()
+ return True
+
+ def processCharacters(self, token):
+ self.anythingElse()
+ return token
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagBody(self, token):
+ self.parser.framesetOK = False
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inBody"]
+
+ def startTagFrameset(self, token):
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inFrameset"]
+
+ def startTagFromHead(self, token):
+ self.parser.parseError("unexpected-start-tag-out-of-my-head",
+ {"name": token["name"]})
+ self.tree.openElements.append(self.tree.headPointer)
+ self.parser.phases["inHead"].processStartTag(token)
+ for node in self.tree.openElements[::-1]:
+ if node.name == "head":
+ self.tree.openElements.remove(node)
+ break
+
+ def startTagHead(self, token):
+ self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
+
+ def startTagOther(self, token):
+ self.anythingElse()
+ return token
+
+ def endTagHtmlBodyBr(self, token):
+ self.anythingElse()
+ return token
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def anythingElse(self):
+ self.tree.insertElement(impliedTagToken("body", "StartTag"))
+ self.parser.phase = self.parser.phases["inBody"]
+ self.parser.framesetOK = True
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", startTagHtml),
+ ("body", startTagBody),
+ ("frameset", startTagFrameset),
+ (("base", "basefont", "bgsound", "link", "meta", "noframes", "script",
+ "style", "title"),
+ startTagFromHead),
+ ("head", startTagHead)
+ ])
+ startTagHandler.default = startTagOther
+ endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"),
+ endTagHtmlBodyBr)])
+ endTagHandler.default = endTagOther
+
+ class InBodyPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody
+ # the really-really-really-very crazy mode
+ __slots__ = ("processSpaceCharacters",)
+
+ def __init__(self, *args, **kwargs):
+ super(InBodyPhase, self).__init__(*args, **kwargs)
+ # Set this to the default handler
+ self.processSpaceCharacters = self.processSpaceCharactersNonPre
+
+ def isMatchingFormattingElement(self, node1, node2):
+ return (node1.name == node2.name and
+ node1.namespace == node2.namespace and
+ node1.attributes == node2.attributes)
+
+ # helper
+ def addFormattingElement(self, token):
+ self.tree.insertElement(token)
+ element = self.tree.openElements[-1]
+
+ matchingElements = []
+ for node in self.tree.activeFormattingElements[::-1]:
+ if node is Marker:
+ break
+ elif self.isMatchingFormattingElement(node, element):
+ matchingElements.append(node)
+
+ assert len(matchingElements) <= 3
+ if len(matchingElements) == 3:
+ self.tree.activeFormattingElements.remove(matchingElements[-1])
+ self.tree.activeFormattingElements.append(element)
+
+ # the real deal
+ def processEOF(self):
+ allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td",
+ "tfoot", "th", "thead", "tr", "body",
+ "html"))
+ for node in self.tree.openElements[::-1]:
+ if node.name not in allowed_elements:
+ self.parser.parseError("expected-closing-tag-but-got-eof")
+ break
+ # Stop parsing
+
+ def processSpaceCharactersDropNewline(self, token):
+ # Sometimes (start of <pre>, <listing>, and <textarea> blocks) we
+ # want to drop leading newlines
+ data = token["data"]
+ self.processSpaceCharacters = self.processSpaceCharactersNonPre
+ if (data.startswith("\n") and
+ self.tree.openElements[-1].name in ("pre", "listing", "textarea") and
+ not self.tree.openElements[-1].hasContent()):
+ data = data[1:]
+ if data:
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertText(data)
+
+ def processCharacters(self, token):
+ if token["data"] == "\u0000":
+ # The tokenizer should always emit null on its own
+ return
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertText(token["data"])
+ # This must be bad for performance
+ if (self.parser.framesetOK and
+ any([char not in spaceCharacters
+ for char in token["data"]])):
+ self.parser.framesetOK = False
+
+ def processSpaceCharactersNonPre(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertText(token["data"])
+
+ def startTagProcessInHead(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagBody(self, token):
+ self.parser.parseError("unexpected-start-tag", {"name": "body"})
+ if (len(self.tree.openElements) == 1 or
+ self.tree.openElements[1].name != "body"):
+ assert self.parser.innerHTML
+ else:
+ self.parser.framesetOK = False
+ for attr, value in token["data"].items():
+ if attr not in self.tree.openElements[1].attributes:
+ self.tree.openElements[1].attributes[attr] = value
+
+ def startTagFrameset(self, token):
+ self.parser.parseError("unexpected-start-tag", {"name": "frameset"})
+ if (len(self.tree.openElements) == 1 or self.tree.openElements[1].name != "body"):
+ assert self.parser.innerHTML
+ elif not self.parser.framesetOK:
+ pass
+ else:
+ if self.tree.openElements[1].parent:
+ self.tree.openElements[1].parent.removeChild(self.tree.openElements[1])
+ while self.tree.openElements[-1].name != "html":
+ self.tree.openElements.pop()
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inFrameset"]
+
+ def startTagCloseP(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.insertElement(token)
+
+ def startTagPreListing(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.insertElement(token)
+ self.parser.framesetOK = False
+ self.processSpaceCharacters = self.processSpaceCharactersDropNewline
+
+ def startTagForm(self, token):
+ if self.tree.formPointer:
+ self.parser.parseError("unexpected-start-tag", {"name": "form"})
+ else:
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.insertElement(token)
+ self.tree.formPointer = self.tree.openElements[-1]
+
+ def startTagListItem(self, token):
+ self.parser.framesetOK = False
+
+ stopNamesMap = {"li": ["li"],
+ "dt": ["dt", "dd"],
+ "dd": ["dt", "dd"]}
+ stopNames = stopNamesMap[token["name"]]
+ for node in reversed(self.tree.openElements):
+ if node.name in stopNames:
+ self.parser.phase.processEndTag(
+ impliedTagToken(node.name, "EndTag"))
+ break
+ if (node.nameTuple in specialElements and
+ node.name not in ("address", "div", "p")):
+ break
+
+ if self.tree.elementInScope("p", variant="button"):
+ self.parser.phase.processEndTag(
+ impliedTagToken("p", "EndTag"))
+
+ self.tree.insertElement(token)
+
+ def startTagPlaintext(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.insertElement(token)
+ self.parser.tokenizer.state = self.parser.tokenizer.plaintextState
+
+ def startTagHeading(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ if self.tree.openElements[-1].name in headingElements:
+ self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
+ self.tree.openElements.pop()
+ self.tree.insertElement(token)
+
+ def startTagA(self, token):
+ afeAElement = self.tree.elementInActiveFormattingElements("a")
+ if afeAElement:
+ self.parser.parseError("unexpected-start-tag-implies-end-tag",
+ {"startName": "a", "endName": "a"})
+ self.endTagFormatting(impliedTagToken("a"))
+ if afeAElement in self.tree.openElements:
+ self.tree.openElements.remove(afeAElement)
+ if afeAElement in self.tree.activeFormattingElements:
+ self.tree.activeFormattingElements.remove(afeAElement)
+ self.tree.reconstructActiveFormattingElements()
+ self.addFormattingElement(token)
+
+ def startTagFormatting(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.addFormattingElement(token)
+
+ def startTagNobr(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ if self.tree.elementInScope("nobr"):
+ self.parser.parseError("unexpected-start-tag-implies-end-tag",
+ {"startName": "nobr", "endName": "nobr"})
+ self.processEndTag(impliedTagToken("nobr"))
+ # XXX Need tests that trigger the following
+ self.tree.reconstructActiveFormattingElements()
+ self.addFormattingElement(token)
+
+ def startTagButton(self, token):
+ if self.tree.elementInScope("button"):
+ self.parser.parseError("unexpected-start-tag-implies-end-tag",
+ {"startName": "button", "endName": "button"})
+ self.processEndTag(impliedTagToken("button"))
+ return token
+ else:
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(token)
+ self.parser.framesetOK = False
+
+ def startTagAppletMarqueeObject(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(token)
+ self.tree.activeFormattingElements.append(Marker)
+ self.parser.framesetOK = False
+
+ def startTagXmp(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.reconstructActiveFormattingElements()
+ self.parser.framesetOK = False
+ self.parser.parseRCDataRawtext(token, "RAWTEXT")
+
+ def startTagTable(self, token):
+ if self.parser.compatMode != "quirks":
+ if self.tree.elementInScope("p", variant="button"):
+ self.processEndTag(impliedTagToken("p"))
+ self.tree.insertElement(token)
+ self.parser.framesetOK = False
+ self.parser.phase = self.parser.phases["inTable"]
+
+ def startTagVoidFormatting(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+ self.parser.framesetOK = False
+
+ def startTagInput(self, token):
+ framesetOK = self.parser.framesetOK
+ self.startTagVoidFormatting(token)
+ if ("type" in token["data"] and
+ token["data"]["type"].translate(asciiUpper2Lower) == "hidden"):
+ # input type=hidden doesn't change framesetOK
+ self.parser.framesetOK = framesetOK
+
+ def startTagParamSource(self, token):
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def startTagHr(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+ self.parser.framesetOK = False
+
+ def startTagImage(self, token):
+ # No really...
+ self.parser.parseError("unexpected-start-tag-treated-as",
+ {"originalName": "image", "newName": "img"})
+ self.processStartTag(impliedTagToken("img", "StartTag",
+ attributes=token["data"],
+ selfClosing=token["selfClosing"]))
+
+ def startTagIsIndex(self, token):
+ self.parser.parseError("deprecated-tag", {"name": "isindex"})
+ if self.tree.formPointer:
+ return
+ form_attrs = {}
+ if "action" in token["data"]:
+ form_attrs["action"] = token["data"]["action"]
+ self.processStartTag(impliedTagToken("form", "StartTag",
+ attributes=form_attrs))
+ self.processStartTag(impliedTagToken("hr", "StartTag"))
+ self.processStartTag(impliedTagToken("label", "StartTag"))
+ # XXX Localization ...
+ if "prompt" in token["data"]:
+ prompt = token["data"]["prompt"]
+ else:
+ prompt = "This is a searchable index. Enter search keywords: "
+ self.processCharacters(
+ {"type": tokenTypes["Characters"], "data": prompt})
+ attributes = token["data"].copy()
+ if "action" in attributes:
+ del attributes["action"]
+ if "prompt" in attributes:
+ del attributes["prompt"]
+ attributes["name"] = "isindex"
+ self.processStartTag(impliedTagToken("input", "StartTag",
+ attributes=attributes,
+ selfClosing=token["selfClosing"]))
+ self.processEndTag(impliedTagToken("label"))
+ self.processStartTag(impliedTagToken("hr", "StartTag"))
+ self.processEndTag(impliedTagToken("form"))
+
+ def startTagTextarea(self, token):
+ self.tree.insertElement(token)
+ self.parser.tokenizer.state = self.parser.tokenizer.rcdataState
+ self.processSpaceCharacters = self.processSpaceCharactersDropNewline
+ self.parser.framesetOK = False
+
+ def startTagIFrame(self, token):
+ self.parser.framesetOK = False
+ self.startTagRawtext(token)
+
+ def startTagNoscript(self, token):
+ if self.parser.scripting:
+ self.startTagRawtext(token)
+ else:
+ self.startTagOther(token)
+
+ def startTagRawtext(self, token):
+ """iframe, noembed noframes, noscript(if scripting enabled)"""
+ self.parser.parseRCDataRawtext(token, "RAWTEXT")
+
+ def startTagOpt(self, token):
+ if self.tree.openElements[-1].name == "option":
+ self.parser.phase.processEndTag(impliedTagToken("option"))
+ self.tree.reconstructActiveFormattingElements()
+ self.parser.tree.insertElement(token)
+
+ def startTagSelect(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(token)
+ self.parser.framesetOK = False
+ if self.parser.phase in (self.parser.phases["inTable"],
+ self.parser.phases["inCaption"],
+ self.parser.phases["inColumnGroup"],
+ self.parser.phases["inTableBody"],
+ self.parser.phases["inRow"],
+ self.parser.phases["inCell"]):
+ self.parser.phase = self.parser.phases["inSelectInTable"]
+ else:
+ self.parser.phase = self.parser.phases["inSelect"]
+
+ def startTagRpRt(self, token):
+ if self.tree.elementInScope("ruby"):
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1].name != "ruby":
+ self.parser.parseError()
+ self.tree.insertElement(token)
+
+ def startTagMath(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.parser.adjustMathMLAttributes(token)
+ self.parser.adjustForeignAttributes(token)
+ token["namespace"] = namespaces["mathml"]
+ self.tree.insertElement(token)
+ # Need to get the parse error right for the case where the token
+ # has a namespace not equal to the xmlns attribute
+ if token["selfClosing"]:
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def startTagSvg(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.parser.adjustSVGAttributes(token)
+ self.parser.adjustForeignAttributes(token)
+ token["namespace"] = namespaces["svg"]
+ self.tree.insertElement(token)
+ # Need to get the parse error right for the case where the token
+ # has a namespace not equal to the xmlns attribute
+ if token["selfClosing"]:
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def startTagMisplaced(self, token):
+ """ Elements that should be children of other elements that have a
+ different insertion mode; here they are ignored
+ "caption", "col", "colgroup", "frame", "frameset", "head",
+ "option", "optgroup", "tbody", "td", "tfoot", "th", "thead",
+ "tr", "noscript"
+ """
+ self.parser.parseError("unexpected-start-tag-ignored", {"name": token["name"]})
+
+ def startTagOther(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(token)
+
+ def endTagP(self, token):
+ if not self.tree.elementInScope("p", variant="button"):
+ self.startTagCloseP(impliedTagToken("p", "StartTag"))
+ self.parser.parseError("unexpected-end-tag", {"name": "p"})
+ self.endTagP(impliedTagToken("p", "EndTag"))
+ else:
+ self.tree.generateImpliedEndTags("p")
+ if self.tree.openElements[-1].name != "p":
+ self.parser.parseError("unexpected-end-tag", {"name": "p"})
+ node = self.tree.openElements.pop()
+ while node.name != "p":
+ node = self.tree.openElements.pop()
+
+ def endTagBody(self, token):
+ if not self.tree.elementInScope("body"):
+ self.parser.parseError()
+ return
+ elif self.tree.openElements[-1].name != "body":
+ for node in self.tree.openElements[2:]:
+ if node.name not in frozenset(("dd", "dt", "li", "optgroup",
+ "option", "p", "rp", "rt",
+ "tbody", "td", "tfoot",
+ "th", "thead", "tr", "body",
+ "html")):
+ # Not sure this is the correct name for the parse error
+ self.parser.parseError(
+ "expected-one-end-tag-but-got-another",
+ {"gotName": "body", "expectedName": node.name})
+ break
+ self.parser.phase = self.parser.phases["afterBody"]
+
+ def endTagHtml(self, token):
+ # We repeat the test for the body end tag token being ignored here
+ if self.tree.elementInScope("body"):
+ self.endTagBody(impliedTagToken("body"))
+ return token
+
+ def endTagBlock(self, token):
+ # Put us back in the right whitespace handling mode
+ if token["name"] == "pre":
+ self.processSpaceCharacters = self.processSpaceCharactersNonPre
+ inScope = self.tree.elementInScope(token["name"])
+ if inScope:
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError("end-tag-too-early", {"name": token["name"]})
+ if inScope:
+ node = self.tree.openElements.pop()
+ while node.name != token["name"]:
+ node = self.tree.openElements.pop()
+
+ def endTagForm(self, token):
+ node = self.tree.formPointer
+ self.tree.formPointer = None
+ if node is None or not self.tree.elementInScope(node):
+ self.parser.parseError("unexpected-end-tag",
+ {"name": "form"})
+ else:
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1] != node:
+ self.parser.parseError("end-tag-too-early-ignored",
+ {"name": "form"})
+ self.tree.openElements.remove(node)
+
+ def endTagListItem(self, token):
+ if token["name"] == "li":
+ variant = "list"
+ else:
+ variant = None
+ if not self.tree.elementInScope(token["name"], variant=variant):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+ else:
+ self.tree.generateImpliedEndTags(exclude=token["name"])
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError(
+ "end-tag-too-early",
+ {"name": token["name"]})
+ node = self.tree.openElements.pop()
+ while node.name != token["name"]:
+ node = self.tree.openElements.pop()
+
+ def endTagHeading(self, token):
+ for item in headingElements:
+ if self.tree.elementInScope(item):
+ self.tree.generateImpliedEndTags()
+ break
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError("end-tag-too-early", {"name": token["name"]})
+
+ for item in headingElements:
+ if self.tree.elementInScope(item):
+ item = self.tree.openElements.pop()
+ while item.name not in headingElements:
+ item = self.tree.openElements.pop()
+ break
+
+ def endTagFormatting(self, token):
+ """The much-feared adoption agency algorithm"""
+ # http://svn.whatwg.org/webapps/complete.html#adoptionAgency revision 7867
+ # XXX Better parseError messages appreciated.
+
+ # Step 1
+ outerLoopCounter = 0
+
+ # Step 2
+ while outerLoopCounter < 8:
+
+ # Step 3
+ outerLoopCounter += 1
+
+ # Step 4:
+
+ # Let the formatting element be the last element in
+ # the list of active formatting elements that:
+ # - is between the end of the list and the last scope
+ # marker in the list, if any, or the start of the list
+ # otherwise, and
+ # - has the same tag name as the token.
+ formattingElement = self.tree.elementInActiveFormattingElements(
+ token["name"])
+ if (not formattingElement or
+ (formattingElement in self.tree.openElements and
+ not self.tree.elementInScope(formattingElement.name))):
+ # If there is no such node, then abort these steps
+ # and instead act as described in the "any other
+ # end tag" entry below.
+ self.endTagOther(token)
+ return
+
+ # Otherwise, if there is such a node, but that node is
+ # not in the stack of open elements, then this is a
+ # parse error; remove the element from the list, and
+ # abort these steps.
+ elif formattingElement not in self.tree.openElements:
+ self.parser.parseError("adoption-agency-1.2", {"name": token["name"]})
+ self.tree.activeFormattingElements.remove(formattingElement)
+ return
+
+ # Otherwise, if there is such a node, and that node is
+ # also in the stack of open elements, but the element
+ # is not in scope, then this is a parse error; ignore
+ # the token, and abort these steps.
+ elif not self.tree.elementInScope(formattingElement.name):
+ self.parser.parseError("adoption-agency-4.4", {"name": token["name"]})
+ return
+
+ # Otherwise, there is a formatting element and that
+ # element is in the stack and is in scope. If the
+ # element is not the current node, this is a parse
+ # error. In any case, proceed with the algorithm as
+ # written in the following steps.
+ else:
+ if formattingElement != self.tree.openElements[-1]:
+ self.parser.parseError("adoption-agency-1.3", {"name": token["name"]})
+
+ # Step 5:
+
+ # Let the furthest block be the topmost node in the
+ # stack of open elements that is lower in the stack
+ # than the formatting element, and is an element in
+ # the special category. There might not be one.
+ afeIndex = self.tree.openElements.index(formattingElement)
+ furthestBlock = None
+ for element in self.tree.openElements[afeIndex:]:
+ if element.nameTuple in specialElements:
+ furthestBlock = element
+ break
+
+ # Step 6:
+
+ # If there is no furthest block, then the UA must
+ # first pop all the nodes from the bottom of the stack
+ # of open elements, from the current node up to and
+ # including the formatting element, then remove the
+ # formatting element from the list of active
+ # formatting elements, and finally abort these steps.
+ if furthestBlock is None:
+ element = self.tree.openElements.pop()
+ while element != formattingElement:
+ element = self.tree.openElements.pop()
+ self.tree.activeFormattingElements.remove(element)
+ return
+
+ # Step 7
+ commonAncestor = self.tree.openElements[afeIndex - 1]
+
+ # Step 8:
+ # The bookmark is supposed to help us identify where to reinsert
+ # nodes in step 15. We have to ensure that we reinsert nodes after
+ # the node before the active formatting element. Note the bookmark
+ # can move in step 9.7
+ bookmark = self.tree.activeFormattingElements.index(formattingElement)
+
+ # Step 9
+ lastNode = node = furthestBlock
+ innerLoopCounter = 0
+
+ index = self.tree.openElements.index(node)
+ while innerLoopCounter < 3:
+ innerLoopCounter += 1
+ # Node is element before node in open elements
+ index -= 1
+ node = self.tree.openElements[index]
+ if node not in self.tree.activeFormattingElements:
+ self.tree.openElements.remove(node)
+ continue
+ # Step 9.6
+ if node == formattingElement:
+ break
+ # Step 9.7
+ if lastNode == furthestBlock:
+ bookmark = self.tree.activeFormattingElements.index(node) + 1
+ # Step 9.8
+ clone = node.cloneNode()
+ # Replace node with clone
+ self.tree.activeFormattingElements[
+ self.tree.activeFormattingElements.index(node)] = clone
+ self.tree.openElements[
+ self.tree.openElements.index(node)] = clone
+ node = clone
+ # Step 9.9
+ # Remove lastNode from its parents, if any
+ if lastNode.parent:
+ lastNode.parent.removeChild(lastNode)
+ node.appendChild(lastNode)
+ # Step 9.10
+ lastNode = node
+
+ # Step 10
+ # Foster parent lastNode if commonAncestor is a
+ # table, tbody, tfoot, thead, or tr we need to foster
+ # parent the lastNode
+ if lastNode.parent:
+ lastNode.parent.removeChild(lastNode)
+
+ if commonAncestor.name in frozenset(("table", "tbody", "tfoot", "thead", "tr")):
+ parent, insertBefore = self.tree.getTableMisnestedNodePosition()
+ parent.insertBefore(lastNode, insertBefore)
+ else:
+ commonAncestor.appendChild(lastNode)
+
+ # Step 11
+ clone = formattingElement.cloneNode()
+
+ # Step 12
+ furthestBlock.reparentChildren(clone)
+
+ # Step 13
+ furthestBlock.appendChild(clone)
+
+ # Step 14
+ self.tree.activeFormattingElements.remove(formattingElement)
+ self.tree.activeFormattingElements.insert(bookmark, clone)
+
+ # Step 15
+ self.tree.openElements.remove(formattingElement)
+ self.tree.openElements.insert(
+ self.tree.openElements.index(furthestBlock) + 1, clone)
+
+ def endTagAppletMarqueeObject(self, token):
+ if self.tree.elementInScope(token["name"]):
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError("end-tag-too-early", {"name": token["name"]})
+
+ if self.tree.elementInScope(token["name"]):
+ element = self.tree.openElements.pop()
+ while element.name != token["name"]:
+ element = self.tree.openElements.pop()
+ self.tree.clearActiveFormattingElements()
+
+ def endTagBr(self, token):
+ self.parser.parseError("unexpected-end-tag-treated-as",
+ {"originalName": "br", "newName": "br element"})
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(impliedTagToken("br", "StartTag"))
+ self.tree.openElements.pop()
+
+ def endTagOther(self, token):
+ for node in self.tree.openElements[::-1]:
+ if node.name == token["name"]:
+ self.tree.generateImpliedEndTags(exclude=token["name"])
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+ while self.tree.openElements.pop() != node:
+ pass
+ break
+ else:
+ if node.nameTuple in specialElements:
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+ break
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", Phase.startTagHtml),
+ (("base", "basefont", "bgsound", "command", "link", "meta",
+ "script", "style", "title"),
+ startTagProcessInHead),
+ ("body", startTagBody),
+ ("frameset", startTagFrameset),
+ (("address", "article", "aside", "blockquote", "center", "details",
+ "dir", "div", "dl", "fieldset", "figcaption", "figure",
+ "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p",
+ "section", "summary", "ul"),
+ startTagCloseP),
+ (headingElements, startTagHeading),
+ (("pre", "listing"), startTagPreListing),
+ ("form", startTagForm),
+ (("li", "dd", "dt"), startTagListItem),
+ ("plaintext", startTagPlaintext),
+ ("a", startTagA),
+ (("b", "big", "code", "em", "font", "i", "s", "small", "strike",
+ "strong", "tt", "u"), startTagFormatting),
+ ("nobr", startTagNobr),
+ ("button", startTagButton),
+ (("applet", "marquee", "object"), startTagAppletMarqueeObject),
+ ("xmp", startTagXmp),
+ ("table", startTagTable),
+ (("area", "br", "embed", "img", "keygen", "wbr"),
+ startTagVoidFormatting),
+ (("param", "source", "track"), startTagParamSource),
+ ("input", startTagInput),
+ ("hr", startTagHr),
+ ("image", startTagImage),
+ ("isindex", startTagIsIndex),
+ ("textarea", startTagTextarea),
+ ("iframe", startTagIFrame),
+ ("noscript", startTagNoscript),
+ (("noembed", "noframes"), startTagRawtext),
+ ("select", startTagSelect),
+ (("rp", "rt"), startTagRpRt),
+ (("option", "optgroup"), startTagOpt),
+ (("math"), startTagMath),
+ (("svg"), startTagSvg),
+ (("caption", "col", "colgroup", "frame", "head",
+ "tbody", "td", "tfoot", "th", "thead",
+ "tr"), startTagMisplaced)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ ("body", endTagBody),
+ ("html", endTagHtml),
+ (("address", "article", "aside", "blockquote", "button", "center",
+ "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure",
+ "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre",
+ "section", "summary", "ul"), endTagBlock),
+ ("form", endTagForm),
+ ("p", endTagP),
+ (("dd", "dt", "li"), endTagListItem),
+ (headingElements, endTagHeading),
+ (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small",
+ "strike", "strong", "tt", "u"), endTagFormatting),
+ (("applet", "marquee", "object"), endTagAppletMarqueeObject),
+ ("br", endTagBr),
+ ])
+ endTagHandler.default = endTagOther
+
+ class TextPhase(Phase):
+ __slots__ = tuple()
+
+ def processCharacters(self, token):
+ self.tree.insertText(token["data"])
+
+ def processEOF(self):
+ self.parser.parseError("expected-named-closing-tag-but-got-eof",
+ {"name": self.tree.openElements[-1].name})
+ self.tree.openElements.pop()
+ self.parser.phase = self.parser.originalPhase
+ return True
+
+ def startTagOther(self, token):
+ assert False, "Tried to process start tag %s in RCDATA/RAWTEXT mode" % token['name']
+
+ def endTagScript(self, token):
+ node = self.tree.openElements.pop()
+ assert node.name == "script"
+ self.parser.phase = self.parser.originalPhase
+ # The rest of this method is all stuff that only happens if
+ # document.write works
+
+ def endTagOther(self, token):
+ self.tree.openElements.pop()
+ self.parser.phase = self.parser.originalPhase
+
+ startTagHandler = _utils.MethodDispatcher([])
+ startTagHandler.default = startTagOther
+ endTagHandler = _utils.MethodDispatcher([
+ ("script", endTagScript)])
+ endTagHandler.default = endTagOther
+
+ class InTablePhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-table
+ __slots__ = tuple()
+
+ # helper methods
+ def clearStackToTableContext(self):
+ # "clear the stack back to a table context"
+ while self.tree.openElements[-1].name not in ("table", "html"):
+ # self.parser.parseError("unexpected-implied-end-tag-in-table",
+ # {"name": self.tree.openElements[-1].name})
+ self.tree.openElements.pop()
+ # When the current node is <html> it's an innerHTML case
+
+ # processing methods
+ def processEOF(self):
+ if self.tree.openElements[-1].name != "html":
+ self.parser.parseError("eof-in-table")
+ else:
+ assert self.parser.innerHTML
+ # Stop parsing
+
+ def processSpaceCharacters(self, token):
+ originalPhase = self.parser.phase
+ self.parser.phase = self.parser.phases["inTableText"]
+ self.parser.phase.originalPhase = originalPhase
+ self.parser.phase.processSpaceCharacters(token)
+
+ def processCharacters(self, token):
+ originalPhase = self.parser.phase
+ self.parser.phase = self.parser.phases["inTableText"]
+ self.parser.phase.originalPhase = originalPhase
+ self.parser.phase.processCharacters(token)
+
+ def insertText(self, token):
+ # If we get here there must be at least one non-whitespace character
+ # Do the table magic!
+ self.tree.insertFromTable = True
+ self.parser.phases["inBody"].processCharacters(token)
+ self.tree.insertFromTable = False
+
+ def startTagCaption(self, token):
+ self.clearStackToTableContext()
+ self.tree.activeFormattingElements.append(Marker)
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inCaption"]
+
+ def startTagColgroup(self, token):
+ self.clearStackToTableContext()
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inColumnGroup"]
+
+ def startTagCol(self, token):
+ self.startTagColgroup(impliedTagToken("colgroup", "StartTag"))
+ return token
+
+ def startTagRowGroup(self, token):
+ self.clearStackToTableContext()
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inTableBody"]
+
+ def startTagImplyTbody(self, token):
+ self.startTagRowGroup(impliedTagToken("tbody", "StartTag"))
+ return token
+
+ def startTagTable(self, token):
+ self.parser.parseError("unexpected-start-tag-implies-end-tag",
+ {"startName": "table", "endName": "table"})
+ self.parser.phase.processEndTag(impliedTagToken("table"))
+ if not self.parser.innerHTML:
+ return token
+
+ def startTagStyleScript(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagInput(self, token):
+ if ("type" in token["data"] and
+ token["data"]["type"].translate(asciiUpper2Lower) == "hidden"):
+ self.parser.parseError("unexpected-hidden-input-in-table")
+ self.tree.insertElement(token)
+ # XXX associate with form
+ self.tree.openElements.pop()
+ else:
+ self.startTagOther(token)
+
+ def startTagForm(self, token):
+ self.parser.parseError("unexpected-form-in-table")
+ if self.tree.formPointer is None:
+ self.tree.insertElement(token)
+ self.tree.formPointer = self.tree.openElements[-1]
+ self.tree.openElements.pop()
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-start-tag-implies-table-voodoo", {"name": token["name"]})
+ # Do the table magic!
+ self.tree.insertFromTable = True
+ self.parser.phases["inBody"].processStartTag(token)
+ self.tree.insertFromTable = False
+
+ def endTagTable(self, token):
+ if self.tree.elementInScope("table", variant="table"):
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1].name != "table":
+ self.parser.parseError("end-tag-too-early-named",
+ {"gotName": "table",
+ "expectedName": self.tree.openElements[-1].name})
+ while self.tree.openElements[-1].name != "table":
+ self.tree.openElements.pop()
+ self.tree.openElements.pop()
+ self.parser.resetInsertionMode()
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def endTagIgnore(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag-implies-table-voodoo", {"name": token["name"]})
+ # Do the table magic!
+ self.tree.insertFromTable = True
+ self.parser.phases["inBody"].processEndTag(token)
+ self.tree.insertFromTable = False
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", Phase.startTagHtml),
+ ("caption", startTagCaption),
+ ("colgroup", startTagColgroup),
+ ("col", startTagCol),
+ (("tbody", "tfoot", "thead"), startTagRowGroup),
+ (("td", "th", "tr"), startTagImplyTbody),
+ ("table", startTagTable),
+ (("style", "script"), startTagStyleScript),
+ ("input", startTagInput),
+ ("form", startTagForm)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ ("table", endTagTable),
+ (("body", "caption", "col", "colgroup", "html", "tbody", "td",
+ "tfoot", "th", "thead", "tr"), endTagIgnore)
+ ])
+ endTagHandler.default = endTagOther
+
+ class InTableTextPhase(Phase):
+ __slots__ = ("originalPhase", "characterTokens")
+
+ def __init__(self, *args, **kwargs):
+ super(InTableTextPhase, self).__init__(*args, **kwargs)
+ self.originalPhase = None
+ self.characterTokens = []
+
+ def flushCharacters(self):
+ data = "".join([item["data"] for item in self.characterTokens])
+ if any([item not in spaceCharacters for item in data]):
+ token = {"type": tokenTypes["Characters"], "data": data}
+ self.parser.phases["inTable"].insertText(token)
+ elif data:
+ self.tree.insertText(data)
+ self.characterTokens = []
+
+ def processComment(self, token):
+ self.flushCharacters()
+ self.parser.phase = self.originalPhase
+ return token
+
+ def processEOF(self):
+ self.flushCharacters()
+ self.parser.phase = self.originalPhase
+ return True
+
+ def processCharacters(self, token):
+ if token["data"] == "\u0000":
+ return
+ self.characterTokens.append(token)
+
+ def processSpaceCharacters(self, token):
+ # pretty sure we should never reach here
+ self.characterTokens.append(token)
+ # assert False
+
+ def processStartTag(self, token):
+ self.flushCharacters()
+ self.parser.phase = self.originalPhase
+ return token
+
+ def processEndTag(self, token):
+ self.flushCharacters()
+ self.parser.phase = self.originalPhase
+ return token
+
+ class InCaptionPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-caption
+ __slots__ = tuple()
+
+ def ignoreEndTagCaption(self):
+ return not self.tree.elementInScope("caption", variant="table")
+
+ def processEOF(self):
+ self.parser.phases["inBody"].processEOF()
+
+ def processCharacters(self, token):
+ return self.parser.phases["inBody"].processCharacters(token)
+
+ def startTagTableElement(self, token):
+ self.parser.parseError()
+ # XXX Have to duplicate logic here to find out if the tag is ignored
+ ignoreEndTag = self.ignoreEndTagCaption()
+ self.parser.phase.processEndTag(impliedTagToken("caption"))
+ if not ignoreEndTag:
+ return token
+
+ def startTagOther(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def endTagCaption(self, token):
+ if not self.ignoreEndTagCaption():
+ # AT this code is quite similar to endTagTable in "InTable"
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1].name != "caption":
+ self.parser.parseError("expected-one-end-tag-but-got-another",
+ {"gotName": "caption",
+ "expectedName": self.tree.openElements[-1].name})
+ while self.tree.openElements[-1].name != "caption":
+ self.tree.openElements.pop()
+ self.tree.openElements.pop()
+ self.tree.clearActiveFormattingElements()
+ self.parser.phase = self.parser.phases["inTable"]
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def endTagTable(self, token):
+ self.parser.parseError()
+ ignoreEndTag = self.ignoreEndTagCaption()
+ self.parser.phase.processEndTag(impliedTagToken("caption"))
+ if not ignoreEndTag:
+ return token
+
+ def endTagIgnore(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def endTagOther(self, token):
+ return self.parser.phases["inBody"].processEndTag(token)
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", Phase.startTagHtml),
+ (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th",
+ "thead", "tr"), startTagTableElement)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ ("caption", endTagCaption),
+ ("table", endTagTable),
+ (("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th",
+ "thead", "tr"), endTagIgnore)
+ ])
+ endTagHandler.default = endTagOther
+
+ class InColumnGroupPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-column
+ __slots__ = tuple()
+
+ def ignoreEndTagColgroup(self):
+ return self.tree.openElements[-1].name == "html"
+
+ def processEOF(self):
+ if self.tree.openElements[-1].name == "html":
+ assert self.parser.innerHTML
+ return
+ else:
+ ignoreEndTag = self.ignoreEndTagColgroup()
+ self.endTagColgroup(impliedTagToken("colgroup"))
+ if not ignoreEndTag:
+ return True
+
+ def processCharacters(self, token):
+ ignoreEndTag = self.ignoreEndTagColgroup()
+ self.endTagColgroup(impliedTagToken("colgroup"))
+ if not ignoreEndTag:
+ return token
+
+ def startTagCol(self, token):
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def startTagOther(self, token):
+ ignoreEndTag = self.ignoreEndTagColgroup()
+ self.endTagColgroup(impliedTagToken("colgroup"))
+ if not ignoreEndTag:
+ return token
+
+ def endTagColgroup(self, token):
+ if self.ignoreEndTagColgroup():
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+ else:
+ self.tree.openElements.pop()
+ self.parser.phase = self.parser.phases["inTable"]
+
+ def endTagCol(self, token):
+ self.parser.parseError("no-end-tag", {"name": "col"})
+
+ def endTagOther(self, token):
+ ignoreEndTag = self.ignoreEndTagColgroup()
+ self.endTagColgroup(impliedTagToken("colgroup"))
+ if not ignoreEndTag:
+ return token
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", Phase.startTagHtml),
+ ("col", startTagCol)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ ("colgroup", endTagColgroup),
+ ("col", endTagCol)
+ ])
+ endTagHandler.default = endTagOther
+
+ class InTableBodyPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-table0
+ __slots__ = tuple()
+
+ # helper methods
+ def clearStackToTableBodyContext(self):
+ while self.tree.openElements[-1].name not in ("tbody", "tfoot",
+ "thead", "html"):
+ # self.parser.parseError("unexpected-implied-end-tag-in-table",
+ # {"name": self.tree.openElements[-1].name})
+ self.tree.openElements.pop()
+ if self.tree.openElements[-1].name == "html":
+ assert self.parser.innerHTML
+
+ # the rest
+ def processEOF(self):
+ self.parser.phases["inTable"].processEOF()
+
+ def processSpaceCharacters(self, token):
+ return self.parser.phases["inTable"].processSpaceCharacters(token)
+
+ def processCharacters(self, token):
+ return self.parser.phases["inTable"].processCharacters(token)
+
+ def startTagTr(self, token):
+ self.clearStackToTableBodyContext()
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inRow"]
+
+ def startTagTableCell(self, token):
+ self.parser.parseError("unexpected-cell-in-table-body",
+ {"name": token["name"]})
+ self.startTagTr(impliedTagToken("tr", "StartTag"))
+ return token
+
+ def startTagTableOther(self, token):
+ # XXX AT Any ideas on how to share this with endTagTable?
+ if (self.tree.elementInScope("tbody", variant="table") or
+ self.tree.elementInScope("thead", variant="table") or
+ self.tree.elementInScope("tfoot", variant="table")):
+ self.clearStackToTableBodyContext()
+ self.endTagTableRowGroup(
+ impliedTagToken(self.tree.openElements[-1].name))
+ return token
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def startTagOther(self, token):
+ return self.parser.phases["inTable"].processStartTag(token)
+
+ def endTagTableRowGroup(self, token):
+ if self.tree.elementInScope(token["name"], variant="table"):
+ self.clearStackToTableBodyContext()
+ self.tree.openElements.pop()
+ self.parser.phase = self.parser.phases["inTable"]
+ else:
+ self.parser.parseError("unexpected-end-tag-in-table-body",
+ {"name": token["name"]})
+
+ def endTagTable(self, token):
+ if (self.tree.elementInScope("tbody", variant="table") or
+ self.tree.elementInScope("thead", variant="table") or
+ self.tree.elementInScope("tfoot", variant="table")):
+ self.clearStackToTableBodyContext()
+ self.endTagTableRowGroup(
+ impliedTagToken(self.tree.openElements[-1].name))
+ return token
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def endTagIgnore(self, token):
+ self.parser.parseError("unexpected-end-tag-in-table-body",
+ {"name": token["name"]})
+
+ def endTagOther(self, token):
+ return self.parser.phases["inTable"].processEndTag(token)
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", Phase.startTagHtml),
+ ("tr", startTagTr),
+ (("td", "th"), startTagTableCell),
+ (("caption", "col", "colgroup", "tbody", "tfoot", "thead"),
+ startTagTableOther)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ (("tbody", "tfoot", "thead"), endTagTableRowGroup),
+ ("table", endTagTable),
+ (("body", "caption", "col", "colgroup", "html", "td", "th",
+ "tr"), endTagIgnore)
+ ])
+ endTagHandler.default = endTagOther
+
+ class InRowPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-row
+ __slots__ = tuple()
+
+ # helper methods (XXX unify this with other table helper methods)
+ def clearStackToTableRowContext(self):
+ while self.tree.openElements[-1].name not in ("tr", "html"):
+ self.parser.parseError("unexpected-implied-end-tag-in-table-row",
+ {"name": self.tree.openElements[-1].name})
+ self.tree.openElements.pop()
+
+ def ignoreEndTagTr(self):
+ return not self.tree.elementInScope("tr", variant="table")
+
+ # the rest
+ def processEOF(self):
+ self.parser.phases["inTable"].processEOF()
+
+ def processSpaceCharacters(self, token):
+ return self.parser.phases["inTable"].processSpaceCharacters(token)
+
+ def processCharacters(self, token):
+ return self.parser.phases["inTable"].processCharacters(token)
+
+ def startTagTableCell(self, token):
+ self.clearStackToTableRowContext()
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inCell"]
+ self.tree.activeFormattingElements.append(Marker)
+
+ def startTagTableOther(self, token):
+ ignoreEndTag = self.ignoreEndTagTr()
+ self.endTagTr(impliedTagToken("tr"))
+ # XXX how are we sure it's always ignored in the innerHTML case?
+ if not ignoreEndTag:
+ return token
+
+ def startTagOther(self, token):
+ return self.parser.phases["inTable"].processStartTag(token)
+
+ def endTagTr(self, token):
+ if not self.ignoreEndTagTr():
+ self.clearStackToTableRowContext()
+ self.tree.openElements.pop()
+ self.parser.phase = self.parser.phases["inTableBody"]
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def endTagTable(self, token):
+ ignoreEndTag = self.ignoreEndTagTr()
+ self.endTagTr(impliedTagToken("tr"))
+ # Reprocess the current tag if the tr end tag was not ignored
+ # XXX how are we sure it's always ignored in the innerHTML case?
+ if not ignoreEndTag:
+ return token
+
+ def endTagTableRowGroup(self, token):
+ if self.tree.elementInScope(token["name"], variant="table"):
+ self.endTagTr(impliedTagToken("tr"))
+ return token
+ else:
+ self.parser.parseError()
+
+ def endTagIgnore(self, token):
+ self.parser.parseError("unexpected-end-tag-in-table-row",
+ {"name": token["name"]})
+
+ def endTagOther(self, token):
+ return self.parser.phases["inTable"].processEndTag(token)
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", Phase.startTagHtml),
+ (("td", "th"), startTagTableCell),
+ (("caption", "col", "colgroup", "tbody", "tfoot", "thead",
+ "tr"), startTagTableOther)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ ("tr", endTagTr),
+ ("table", endTagTable),
+ (("tbody", "tfoot", "thead"), endTagTableRowGroup),
+ (("body", "caption", "col", "colgroup", "html", "td", "th"),
+ endTagIgnore)
+ ])
+ endTagHandler.default = endTagOther
+
+ class InCellPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-cell
+ __slots__ = tuple()
+
+ # helper
+ def closeCell(self):
+ if self.tree.elementInScope("td", variant="table"):
+ self.endTagTableCell(impliedTagToken("td"))
+ elif self.tree.elementInScope("th", variant="table"):
+ self.endTagTableCell(impliedTagToken("th"))
+
+ # the rest
+ def processEOF(self):
+ self.parser.phases["inBody"].processEOF()
+
+ def processCharacters(self, token):
+ return self.parser.phases["inBody"].processCharacters(token)
+
+ def startTagTableOther(self, token):
+ if (self.tree.elementInScope("td", variant="table") or
+ self.tree.elementInScope("th", variant="table")):
+ self.closeCell()
+ return token
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def startTagOther(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def endTagTableCell(self, token):
+ if self.tree.elementInScope(token["name"], variant="table"):
+ self.tree.generateImpliedEndTags(token["name"])
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError("unexpected-cell-end-tag",
+ {"name": token["name"]})
+ while True:
+ node = self.tree.openElements.pop()
+ if node.name == token["name"]:
+ break
+ else:
+ self.tree.openElements.pop()
+ self.tree.clearActiveFormattingElements()
+ self.parser.phase = self.parser.phases["inRow"]
+ else:
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def endTagIgnore(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def endTagImply(self, token):
+ if self.tree.elementInScope(token["name"], variant="table"):
+ self.closeCell()
+ return token
+ else:
+ # sometimes innerHTML case
+ self.parser.parseError()
+
+ def endTagOther(self, token):
+ return self.parser.phases["inBody"].processEndTag(token)
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", Phase.startTagHtml),
+ (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th",
+ "thead", "tr"), startTagTableOther)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ (("td", "th"), endTagTableCell),
+ (("body", "caption", "col", "colgroup", "html"), endTagIgnore),
+ (("table", "tbody", "tfoot", "thead", "tr"), endTagImply)
+ ])
+ endTagHandler.default = endTagOther
+
+ class InSelectPhase(Phase):
+ __slots__ = tuple()
+
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-select
+ def processEOF(self):
+ if self.tree.openElements[-1].name != "html":
+ self.parser.parseError("eof-in-select")
+ else:
+ assert self.parser.innerHTML
+
+ def processCharacters(self, token):
+ if token["data"] == "\u0000":
+ return
+ self.tree.insertText(token["data"])
+
+ def startTagOption(self, token):
+ # We need to imply </option> if <option> is the current node.
+ if self.tree.openElements[-1].name == "option":
+ self.tree.openElements.pop()
+ self.tree.insertElement(token)
+
+ def startTagOptgroup(self, token):
+ if self.tree.openElements[-1].name == "option":
+ self.tree.openElements.pop()
+ if self.tree.openElements[-1].name == "optgroup":
+ self.tree.openElements.pop()
+ self.tree.insertElement(token)
+
+ def startTagSelect(self, token):
+ self.parser.parseError("unexpected-select-in-select")
+ self.endTagSelect(impliedTagToken("select"))
+
+ def startTagInput(self, token):
+ self.parser.parseError("unexpected-input-in-select")
+ if self.tree.elementInScope("select", variant="select"):
+ self.endTagSelect(impliedTagToken("select"))
+ return token
+ else:
+ assert self.parser.innerHTML
+
+ def startTagScript(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-start-tag-in-select",
+ {"name": token["name"]})
+
+ def endTagOption(self, token):
+ if self.tree.openElements[-1].name == "option":
+ self.tree.openElements.pop()
+ else:
+ self.parser.parseError("unexpected-end-tag-in-select",
+ {"name": "option"})
+
+ def endTagOptgroup(self, token):
+ # </optgroup> implicitly closes <option>
+ if (self.tree.openElements[-1].name == "option" and
+ self.tree.openElements[-2].name == "optgroup"):
+ self.tree.openElements.pop()
+ # It also closes </optgroup>
+ if self.tree.openElements[-1].name == "optgroup":
+ self.tree.openElements.pop()
+ # But nothing else
+ else:
+ self.parser.parseError("unexpected-end-tag-in-select",
+ {"name": "optgroup"})
+
+ def endTagSelect(self, token):
+ if self.tree.elementInScope("select", variant="select"):
+ node = self.tree.openElements.pop()
+ while node.name != "select":
+ node = self.tree.openElements.pop()
+ self.parser.resetInsertionMode()
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag-in-select",
+ {"name": token["name"]})
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", Phase.startTagHtml),
+ ("option", startTagOption),
+ ("optgroup", startTagOptgroup),
+ ("select", startTagSelect),
+ (("input", "keygen", "textarea"), startTagInput),
+ ("script", startTagScript)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ ("option", endTagOption),
+ ("optgroup", endTagOptgroup),
+ ("select", endTagSelect)
+ ])
+ endTagHandler.default = endTagOther
+
+ class InSelectInTablePhase(Phase):
+ __slots__ = tuple()
+
+ def processEOF(self):
+ self.parser.phases["inSelect"].processEOF()
+
+ def processCharacters(self, token):
+ return self.parser.phases["inSelect"].processCharacters(token)
+
+ def startTagTable(self, token):
+ self.parser.parseError("unexpected-table-element-start-tag-in-select-in-table", {"name": token["name"]})
+ self.endTagOther(impliedTagToken("select"))
+ return token
+
+ def startTagOther(self, token):
+ return self.parser.phases["inSelect"].processStartTag(token)
+
+ def endTagTable(self, token):
+ self.parser.parseError("unexpected-table-element-end-tag-in-select-in-table", {"name": token["name"]})
+ if self.tree.elementInScope(token["name"], variant="table"):
+ self.endTagOther(impliedTagToken("select"))
+ return token
+
+ def endTagOther(self, token):
+ return self.parser.phases["inSelect"].processEndTag(token)
+
+ startTagHandler = _utils.MethodDispatcher([
+ (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),
+ startTagTable)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),
+ endTagTable)
+ ])
+ endTagHandler.default = endTagOther
+
+ class InForeignContentPhase(Phase):
+ __slots__ = tuple()
+
+ breakoutElements = frozenset(["b", "big", "blockquote", "body", "br",
+ "center", "code", "dd", "div", "dl", "dt",
+ "em", "embed", "h1", "h2", "h3",
+ "h4", "h5", "h6", "head", "hr", "i", "img",
+ "li", "listing", "menu", "meta", "nobr",
+ "ol", "p", "pre", "ruby", "s", "small",
+ "span", "strong", "strike", "sub", "sup",
+ "table", "tt", "u", "ul", "var"])
+
+ def adjustSVGTagNames(self, token):
+ replacements = {"altglyph": "altGlyph",
+ "altglyphdef": "altGlyphDef",
+ "altglyphitem": "altGlyphItem",
+ "animatecolor": "animateColor",
+ "animatemotion": "animateMotion",
+ "animatetransform": "animateTransform",
+ "clippath": "clipPath",
+ "feblend": "feBlend",
+ "fecolormatrix": "feColorMatrix",
+ "fecomponenttransfer": "feComponentTransfer",
+ "fecomposite": "feComposite",
+ "feconvolvematrix": "feConvolveMatrix",
+ "fediffuselighting": "feDiffuseLighting",
+ "fedisplacementmap": "feDisplacementMap",
+ "fedistantlight": "feDistantLight",
+ "feflood": "feFlood",
+ "fefunca": "feFuncA",
+ "fefuncb": "feFuncB",
+ "fefuncg": "feFuncG",
+ "fefuncr": "feFuncR",
+ "fegaussianblur": "feGaussianBlur",
+ "feimage": "feImage",
+ "femerge": "feMerge",
+ "femergenode": "feMergeNode",
+ "femorphology": "feMorphology",
+ "feoffset": "feOffset",
+ "fepointlight": "fePointLight",
+ "fespecularlighting": "feSpecularLighting",
+ "fespotlight": "feSpotLight",
+ "fetile": "feTile",
+ "feturbulence": "feTurbulence",
+ "foreignobject": "foreignObject",
+ "glyphref": "glyphRef",
+ "lineargradient": "linearGradient",
+ "radialgradient": "radialGradient",
+ "textpath": "textPath"}
+
+ if token["name"] in replacements:
+ token["name"] = replacements[token["name"]]
+
+ def processCharacters(self, token):
+ if token["data"] == "\u0000":
+ token["data"] = "\uFFFD"
+ elif (self.parser.framesetOK and
+ any(char not in spaceCharacters for char in token["data"])):
+ self.parser.framesetOK = False
+ Phase.processCharacters(self, token)
+
+ def processStartTag(self, token):
+ currentNode = self.tree.openElements[-1]
+ if (token["name"] in self.breakoutElements or
+ (token["name"] == "font" and
+ set(token["data"].keys()) & {"color", "face", "size"})):
+ self.parser.parseError("unexpected-html-element-in-foreign-content",
+ {"name": token["name"]})
+ while (self.tree.openElements[-1].namespace !=
+ self.tree.defaultNamespace and
+ not self.parser.isHTMLIntegrationPoint(self.tree.openElements[-1]) and
+ not self.parser.isMathMLTextIntegrationPoint(self.tree.openElements[-1])):
+ self.tree.openElements.pop()
+ return token
+
+ else:
+ if currentNode.namespace == namespaces["mathml"]:
+ self.parser.adjustMathMLAttributes(token)
+ elif currentNode.namespace == namespaces["svg"]:
+ self.adjustSVGTagNames(token)
+ self.parser.adjustSVGAttributes(token)
+ self.parser.adjustForeignAttributes(token)
+ token["namespace"] = currentNode.namespace
+ self.tree.insertElement(token)
+ if token["selfClosing"]:
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def processEndTag(self, token):
+ nodeIndex = len(self.tree.openElements) - 1
+ node = self.tree.openElements[-1]
+ if node.name.translate(asciiUpper2Lower) != token["name"]:
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ while True:
+ if node.name.translate(asciiUpper2Lower) == token["name"]:
+ # XXX this isn't in the spec but it seems necessary
+ if self.parser.phase == self.parser.phases["inTableText"]:
+ self.parser.phase.flushCharacters()
+ self.parser.phase = self.parser.phase.originalPhase
+ while self.tree.openElements.pop() != node:
+ assert self.tree.openElements
+ new_token = None
+ break
+ nodeIndex -= 1
+
+ node = self.tree.openElements[nodeIndex]
+ if node.namespace != self.tree.defaultNamespace:
+ continue
+ else:
+ new_token = self.parser.phase.processEndTag(token)
+ break
+ return new_token
+
+ class AfterBodyPhase(Phase):
+ __slots__ = tuple()
+
+ def processEOF(self):
+ # Stop parsing
+ pass
+
+ def processComment(self, token):
+ # This is needed because data is to be appended to the <html> element
+ # here and not to whatever is currently open.
+ self.tree.insertComment(token, self.tree.openElements[0])
+
+ def processCharacters(self, token):
+ self.parser.parseError("unexpected-char-after-body")
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-start-tag-after-body",
+ {"name": token["name"]})
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ def endTagHtml(self, name):
+ if self.parser.innerHTML:
+ self.parser.parseError("unexpected-end-tag-after-body-innerhtml")
+ else:
+ self.parser.phase = self.parser.phases["afterAfterBody"]
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag-after-body",
+ {"name": token["name"]})
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", startTagHtml)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([("html", endTagHtml)])
+ endTagHandler.default = endTagOther
+
+ class InFramesetPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-frameset
+ __slots__ = tuple()
+
+ def processEOF(self):
+ if self.tree.openElements[-1].name != "html":
+ self.parser.parseError("eof-in-frameset")
+ else:
+ assert self.parser.innerHTML
+
+ def processCharacters(self, token):
+ self.parser.parseError("unexpected-char-in-frameset")
+
+ def startTagFrameset(self, token):
+ self.tree.insertElement(token)
+
+ def startTagFrame(self, token):
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+
+ def startTagNoframes(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-start-tag-in-frameset",
+ {"name": token["name"]})
+
+ def endTagFrameset(self, token):
+ if self.tree.openElements[-1].name == "html":
+ # innerHTML case
+ self.parser.parseError("unexpected-frameset-in-frameset-innerhtml")
+ else:
+ self.tree.openElements.pop()
+ if (not self.parser.innerHTML and
+ self.tree.openElements[-1].name != "frameset"):
+ # If we're not in innerHTML mode and the current node is not a
+ # "frameset" element (anymore) then switch.
+ self.parser.phase = self.parser.phases["afterFrameset"]
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag-in-frameset",
+ {"name": token["name"]})
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", Phase.startTagHtml),
+ ("frameset", startTagFrameset),
+ ("frame", startTagFrame),
+ ("noframes", startTagNoframes)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ ("frameset", endTagFrameset)
+ ])
+ endTagHandler.default = endTagOther
+
+ class AfterFramesetPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#after3
+ __slots__ = tuple()
+
+ def processEOF(self):
+ # Stop parsing
+ pass
+
+ def processCharacters(self, token):
+ self.parser.parseError("unexpected-char-after-frameset")
+
+ def startTagNoframes(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-start-tag-after-frameset",
+ {"name": token["name"]})
+
+ def endTagHtml(self, token):
+ self.parser.phase = self.parser.phases["afterAfterFrameset"]
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag-after-frameset",
+ {"name": token["name"]})
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", Phase.startTagHtml),
+ ("noframes", startTagNoframes)
+ ])
+ startTagHandler.default = startTagOther
+
+ endTagHandler = _utils.MethodDispatcher([
+ ("html", endTagHtml)
+ ])
+ endTagHandler.default = endTagOther
+
+ class AfterAfterBodyPhase(Phase):
+ __slots__ = tuple()
+
+ def processEOF(self):
+ pass
+
+ def processComment(self, token):
+ self.tree.insertComment(token, self.tree.document)
+
+ def processSpaceCharacters(self, token):
+ return self.parser.phases["inBody"].processSpaceCharacters(token)
+
+ def processCharacters(self, token):
+ self.parser.parseError("expected-eof-but-got-char")
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("expected-eof-but-got-start-tag",
+ {"name": token["name"]})
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ def processEndTag(self, token):
+ self.parser.parseError("expected-eof-but-got-end-tag",
+ {"name": token["name"]})
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", startTagHtml)
+ ])
+ startTagHandler.default = startTagOther
+
+ class AfterAfterFramesetPhase(Phase):
+ __slots__ = tuple()
+
+ def processEOF(self):
+ pass
+
+ def processComment(self, token):
+ self.tree.insertComment(token, self.tree.document)
+
+ def processSpaceCharacters(self, token):
+ return self.parser.phases["inBody"].processSpaceCharacters(token)
+
+ def processCharacters(self, token):
+ self.parser.parseError("expected-eof-but-got-char")
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagNoFrames(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("expected-eof-but-got-start-tag",
+ {"name": token["name"]})
+
+ def processEndTag(self, token):
+ self.parser.parseError("expected-eof-but-got-end-tag",
+ {"name": token["name"]})
+
+ startTagHandler = _utils.MethodDispatcher([
+ ("html", startTagHtml),
+ ("noframes", startTagNoFrames)
+ ])
+ startTagHandler.default = startTagOther
+
+ # pylint:enable=unused-argument
+
+ return {
+ "initial": InitialPhase,
+ "beforeHtml": BeforeHtmlPhase,
+ "beforeHead": BeforeHeadPhase,
+ "inHead": InHeadPhase,
+ "inHeadNoscript": InHeadNoscriptPhase,
+ "afterHead": AfterHeadPhase,
+ "inBody": InBodyPhase,
+ "text": TextPhase,
+ "inTable": InTablePhase,
+ "inTableText": InTableTextPhase,
+ "inCaption": InCaptionPhase,
+ "inColumnGroup": InColumnGroupPhase,
+ "inTableBody": InTableBodyPhase,
+ "inRow": InRowPhase,
+ "inCell": InCellPhase,
+ "inSelect": InSelectPhase,
+ "inSelectInTable": InSelectInTablePhase,
+ "inForeignContent": InForeignContentPhase,
+ "afterBody": AfterBodyPhase,
+ "inFrameset": InFramesetPhase,
+ "afterFrameset": AfterFramesetPhase,
+ "afterAfterBody": AfterAfterBodyPhase,
+ "afterAfterFrameset": AfterAfterFramesetPhase,
+ # XXX after after frameset
+ }
+
+
+def adjust_attributes(token, replacements):
+ needs_adjustment = viewkeys(token['data']) & viewkeys(replacements)
+ if needs_adjustment:
+ token['data'] = type(token['data'])((replacements.get(k, k), v)
+ for k, v in token['data'].items())
+
+
+def impliedTagToken(name, type="EndTag", attributes=None,
+ selfClosing=False):
+ if attributes is None:
+ attributes = {}
+ return {"type": tokenTypes[type], "name": name, "data": attributes,
+ "selfClosing": selfClosing}
+
+
+class ParseError(Exception):
+ """Error in parsed document"""
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/serializer.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/serializer.py
new file mode 100644
index 0000000000..c66df68392
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/serializer.py
@@ -0,0 +1,409 @@
+from __future__ import absolute_import, division, unicode_literals
+from six import text_type
+
+import re
+
+from codecs import register_error, xmlcharrefreplace_errors
+
+from .constants import voidElements, booleanAttributes, spaceCharacters
+from .constants import rcdataElements, entities, xmlEntities
+from . import treewalkers, _utils
+from xml.sax.saxutils import escape
+
+_quoteAttributeSpecChars = "".join(spaceCharacters) + "\"'=<>`"
+_quoteAttributeSpec = re.compile("[" + _quoteAttributeSpecChars + "]")
+_quoteAttributeLegacy = re.compile("[" + _quoteAttributeSpecChars +
+ "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n"
+ "\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15"
+ "\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000"
+ "\u2001\u2002\u2003\u2004\u2005\u2006\u2007"
+ "\u2008\u2009\u200a\u2028\u2029\u202f\u205f"
+ "\u3000]")
+
+
+_encode_entity_map = {}
+_is_ucs4 = len("\U0010FFFF") == 1
+for k, v in list(entities.items()):
+ # skip multi-character entities
+ if ((_is_ucs4 and len(v) > 1) or
+ (not _is_ucs4 and len(v) > 2)):
+ continue
+ if v != "&":
+ if len(v) == 2:
+ v = _utils.surrogatePairToCodepoint(v)
+ else:
+ v = ord(v)
+ if v not in _encode_entity_map or k.islower():
+ # prefer &lt; over &LT; and similarly for &amp;, &gt;, etc.
+ _encode_entity_map[v] = k
+
+
+def htmlentityreplace_errors(exc):
+ if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):
+ res = []
+ codepoints = []
+ skip = False
+ for i, c in enumerate(exc.object[exc.start:exc.end]):
+ if skip:
+ skip = False
+ continue
+ index = i + exc.start
+ if _utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]):
+ codepoint = _utils.surrogatePairToCodepoint(exc.object[index:index + 2])
+ skip = True
+ else:
+ codepoint = ord(c)
+ codepoints.append(codepoint)
+ for cp in codepoints:
+ e = _encode_entity_map.get(cp)
+ if e:
+ res.append("&")
+ res.append(e)
+ if not e.endswith(";"):
+ res.append(";")
+ else:
+ res.append("&#x%s;" % (hex(cp)[2:]))
+ return ("".join(res), exc.end)
+ else:
+ return xmlcharrefreplace_errors(exc)
+
+
+register_error("htmlentityreplace", htmlentityreplace_errors)
+
+
+def serialize(input, tree="etree", encoding=None, **serializer_opts):
+ """Serializes the input token stream using the specified treewalker
+
+ :arg input: the token stream to serialize
+
+ :arg tree: the treewalker to use
+
+ :arg encoding: the encoding to use
+
+ :arg serializer_opts: any options to pass to the
+ :py:class:`html5lib.serializer.HTMLSerializer` that gets created
+
+ :returns: the tree serialized as a string
+
+ Example:
+
+ >>> from html5lib.html5parser import parse
+ >>> from html5lib.serializer import serialize
+ >>> token_stream = parse('<html><body><p>Hi!</p></body></html>')
+ >>> serialize(token_stream, omit_optional_tags=False)
+ '<html><head></head><body><p>Hi!</p></body></html>'
+
+ """
+ # XXX: Should we cache this?
+ walker = treewalkers.getTreeWalker(tree)
+ s = HTMLSerializer(**serializer_opts)
+ return s.render(walker(input), encoding)
+
+
+class HTMLSerializer(object):
+
+ # attribute quoting options
+ quote_attr_values = "legacy" # be secure by default
+ quote_char = '"'
+ use_best_quote_char = True
+
+ # tag syntax options
+ omit_optional_tags = True
+ minimize_boolean_attributes = True
+ use_trailing_solidus = False
+ space_before_trailing_solidus = True
+
+ # escaping options
+ escape_lt_in_attrs = False
+ escape_rcdata = False
+ resolve_entities = True
+
+ # miscellaneous options
+ alphabetical_attributes = False
+ inject_meta_charset = True
+ strip_whitespace = False
+ sanitize = False
+
+ options = ("quote_attr_values", "quote_char", "use_best_quote_char",
+ "omit_optional_tags", "minimize_boolean_attributes",
+ "use_trailing_solidus", "space_before_trailing_solidus",
+ "escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
+ "alphabetical_attributes", "inject_meta_charset",
+ "strip_whitespace", "sanitize")
+
+ def __init__(self, **kwargs):
+ """Initialize HTMLSerializer
+
+ :arg inject_meta_charset: Whether or not to inject the meta charset.
+
+ Defaults to ``True``.
+
+ :arg quote_attr_values: Whether to quote attribute values that don't
+ require quoting per legacy browser behavior (``"legacy"``), when
+ required by the standard (``"spec"``), or always (``"always"``).
+
+ Defaults to ``"legacy"``.
+
+ :arg quote_char: Use given quote character for attribute quoting.
+
+ Defaults to ``"`` which will use double quotes unless attribute
+ value contains a double quote, in which case single quotes are
+ used.
+
+ :arg escape_lt_in_attrs: Whether or not to escape ``<`` in attribute
+ values.
+
+ Defaults to ``False``.
+
+ :arg escape_rcdata: Whether to escape characters that need to be
+ escaped within normal elements within rcdata elements such as
+ style.
+
+ Defaults to ``False``.
+
+ :arg resolve_entities: Whether to resolve named character entities that
+ appear in the source tree. The XML predefined entities &lt; &gt;
+ &amp; &quot; &apos; are unaffected by this setting.
+
+ Defaults to ``True``.
+
+ :arg strip_whitespace: Whether to remove semantically meaningless
+ whitespace. (This compresses all whitespace to a single space
+ except within ``pre``.)
+
+ Defaults to ``False``.
+
+ :arg minimize_boolean_attributes: Shortens boolean attributes to give
+ just the attribute value, for example::
+
+ <input disabled="disabled">
+
+ becomes::
+
+ <input disabled>
+
+ Defaults to ``True``.
+
+ :arg use_trailing_solidus: Includes a close-tag slash at the end of the
+ start tag of void elements (empty elements whose end tag is
+ forbidden). E.g. ``<hr/>``.
+
+ Defaults to ``False``.
+
+ :arg space_before_trailing_solidus: Places a space immediately before
+ the closing slash in a tag using a trailing solidus. E.g.
+ ``<hr />``. Requires ``use_trailing_solidus=True``.
+
+ Defaults to ``True``.
+
+ :arg sanitize: Strip all unsafe or unknown constructs from output.
+ See :py:class:`html5lib.filters.sanitizer.Filter`.
+
+ Defaults to ``False``.
+
+ :arg omit_optional_tags: Omit start/end tags that are optional.
+
+ Defaults to ``True``.
+
+ :arg alphabetical_attributes: Reorder attributes to be in alphabetical order.
+
+ Defaults to ``False``.
+
+ """
+ unexpected_args = frozenset(kwargs) - frozenset(self.options)
+ if len(unexpected_args) > 0:
+ raise TypeError("__init__() got an unexpected keyword argument '%s'" % next(iter(unexpected_args)))
+ if 'quote_char' in kwargs:
+ self.use_best_quote_char = False
+ for attr in self.options:
+ setattr(self, attr, kwargs.get(attr, getattr(self, attr)))
+ self.errors = []
+ self.strict = False
+
+ def encode(self, string):
+ assert(isinstance(string, text_type))
+ if self.encoding:
+ return string.encode(self.encoding, "htmlentityreplace")
+ else:
+ return string
+
+ def encodeStrict(self, string):
+ assert(isinstance(string, text_type))
+ if self.encoding:
+ return string.encode(self.encoding, "strict")
+ else:
+ return string
+
+ def serialize(self, treewalker, encoding=None):
+ # pylint:disable=too-many-nested-blocks
+ self.encoding = encoding
+ in_cdata = False
+ self.errors = []
+
+ if encoding and self.inject_meta_charset:
+ from .filters.inject_meta_charset import Filter
+ treewalker = Filter(treewalker, encoding)
+ # Alphabetical attributes is here under the assumption that none of
+ # the later filters add or change order of attributes; it needs to be
+ # before the sanitizer so escaped elements come out correctly
+ if self.alphabetical_attributes:
+ from .filters.alphabeticalattributes import Filter
+ treewalker = Filter(treewalker)
+ # WhitespaceFilter should be used before OptionalTagFilter
+ # for maximum efficiently of this latter filter
+ if self.strip_whitespace:
+ from .filters.whitespace import Filter
+ treewalker = Filter(treewalker)
+ if self.sanitize:
+ from .filters.sanitizer import Filter
+ treewalker = Filter(treewalker)
+ if self.omit_optional_tags:
+ from .filters.optionaltags import Filter
+ treewalker = Filter(treewalker)
+
+ for token in treewalker:
+ type = token["type"]
+ if type == "Doctype":
+ doctype = "<!DOCTYPE %s" % token["name"]
+
+ if token["publicId"]:
+ doctype += ' PUBLIC "%s"' % token["publicId"]
+ elif token["systemId"]:
+ doctype += " SYSTEM"
+ if token["systemId"]:
+ if token["systemId"].find('"') >= 0:
+ if token["systemId"].find("'") >= 0:
+ self.serializeError("System identifier contains both single and double quote characters")
+ quote_char = "'"
+ else:
+ quote_char = '"'
+ doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char)
+
+ doctype += ">"
+ yield self.encodeStrict(doctype)
+
+ elif type in ("Characters", "SpaceCharacters"):
+ if type == "SpaceCharacters" or in_cdata:
+ if in_cdata and token["data"].find("</") >= 0:
+ self.serializeError("Unexpected </ in CDATA")
+ yield self.encode(token["data"])
+ else:
+ yield self.encode(escape(token["data"]))
+
+ elif type in ("StartTag", "EmptyTag"):
+ name = token["name"]
+ yield self.encodeStrict("<%s" % name)
+ if name in rcdataElements and not self.escape_rcdata:
+ in_cdata = True
+ elif in_cdata:
+ self.serializeError("Unexpected child element of a CDATA element")
+ for (_, attr_name), attr_value in token["data"].items():
+ # TODO: Add namespace support here
+ k = attr_name
+ v = attr_value
+ yield self.encodeStrict(' ')
+
+ yield self.encodeStrict(k)
+ if not self.minimize_boolean_attributes or \
+ (k not in booleanAttributes.get(name, tuple()) and
+ k not in booleanAttributes.get("", tuple())):
+ yield self.encodeStrict("=")
+ if self.quote_attr_values == "always" or len(v) == 0:
+ quote_attr = True
+ elif self.quote_attr_values == "spec":
+ quote_attr = _quoteAttributeSpec.search(v) is not None
+ elif self.quote_attr_values == "legacy":
+ quote_attr = _quoteAttributeLegacy.search(v) is not None
+ else:
+ raise ValueError("quote_attr_values must be one of: "
+ "'always', 'spec', or 'legacy'")
+ v = v.replace("&", "&amp;")
+ if self.escape_lt_in_attrs:
+ v = v.replace("<", "&lt;")
+ if quote_attr:
+ quote_char = self.quote_char
+ if self.use_best_quote_char:
+ if "'" in v and '"' not in v:
+ quote_char = '"'
+ elif '"' in v and "'" not in v:
+ quote_char = "'"
+ if quote_char == "'":
+ v = v.replace("'", "&#39;")
+ else:
+ v = v.replace('"', "&quot;")
+ yield self.encodeStrict(quote_char)
+ yield self.encode(v)
+ yield self.encodeStrict(quote_char)
+ else:
+ yield self.encode(v)
+ if name in voidElements and self.use_trailing_solidus:
+ if self.space_before_trailing_solidus:
+ yield self.encodeStrict(" /")
+ else:
+ yield self.encodeStrict("/")
+ yield self.encode(">")
+
+ elif type == "EndTag":
+ name = token["name"]
+ if name in rcdataElements:
+ in_cdata = False
+ elif in_cdata:
+ self.serializeError("Unexpected child element of a CDATA element")
+ yield self.encodeStrict("</%s>" % name)
+
+ elif type == "Comment":
+ data = token["data"]
+ if data.find("--") >= 0:
+ self.serializeError("Comment contains --")
+ yield self.encodeStrict("<!--%s-->" % token["data"])
+
+ elif type == "Entity":
+ name = token["name"]
+ key = name + ";"
+ if key not in entities:
+ self.serializeError("Entity %s not recognized" % name)
+ if self.resolve_entities and key not in xmlEntities:
+ data = entities[key]
+ else:
+ data = "&%s;" % name
+ yield self.encodeStrict(data)
+
+ else:
+ self.serializeError(token["data"])
+
+ def render(self, treewalker, encoding=None):
+ """Serializes the stream from the treewalker into a string
+
+ :arg treewalker: the treewalker to serialize
+
+ :arg encoding: the string encoding to use
+
+ :returns: the serialized tree
+
+ Example:
+
+ >>> from html5lib import parse, getTreeWalker
+ >>> from html5lib.serializer import HTMLSerializer
+ >>> token_stream = parse('<html><body>Hi!</body></html>')
+ >>> walker = getTreeWalker('etree')
+ >>> serializer = HTMLSerializer(omit_optional_tags=False)
+ >>> serializer.render(walker(token_stream))
+ '<html><head></head><body>Hi!</body></html>'
+
+ """
+ if encoding:
+ return b"".join(list(self.serialize(treewalker, encoding)))
+ else:
+ return "".join(list(self.serialize(treewalker)))
+
+ def serializeError(self, data="XXX ERROR MESSAGE NEEDED"):
+ # XXX The idea is to make data mandatory.
+ self.errors.append(data)
+ if self.strict:
+ raise SerializeError
+
+
+class SerializeError(Exception):
+ """Error in serialized tree"""
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/__init__.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/__init__.py
new file mode 100644
index 0000000000..b8ce2de32e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/__init__.py
@@ -0,0 +1 @@
+from __future__ import absolute_import, division, unicode_literals
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/conftest.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/conftest.py
new file mode 100644
index 0000000000..dad167c583
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/conftest.py
@@ -0,0 +1,108 @@
+from __future__ import print_function
+import os.path
+import sys
+
+import pkg_resources
+import pytest
+
+from .tree_construction import TreeConstructionFile
+from .tokenizer import TokenizerFile
+from .sanitizer import SanitizerFile
+
+_dir = os.path.abspath(os.path.dirname(__file__))
+_root = os.path.join(_dir, "..", "..")
+_testdata = os.path.join(_dir, "testdata")
+_tree_construction = os.path.join(_testdata, "tree-construction")
+_tokenizer = os.path.join(_testdata, "tokenizer")
+_sanitizer_testdata = os.path.join(_dir, "sanitizer-testdata")
+
+
+def fail_if_missing_pytest_expect():
+ """Throws an exception halting pytest if pytest-expect isn't working"""
+ try:
+ from pytest_expect import expect # noqa
+ except ImportError:
+ header = '*' * 78
+ print(
+ '\n' +
+ header + '\n' +
+ 'ERROR: Either pytest-expect or its dependency u-msgpack-python is not\n' +
+ 'installed. Please install them both before running pytest.\n' +
+ header + '\n',
+ file=sys.stderr
+ )
+ raise
+
+
+fail_if_missing_pytest_expect()
+
+
+def pytest_configure(config):
+ msgs = []
+
+ if not os.path.exists(_testdata):
+ msg = "testdata not available! "
+ if os.path.exists(os.path.join(_root, ".git")):
+ msg += ("Please run git submodule update --init --recursive " +
+ "and then run tests again.")
+ else:
+ msg += ("The testdata doesn't appear to be included with this package, " +
+ "so finding the right version will be hard. :(")
+ msgs.append(msg)
+
+ if config.option.update_xfail:
+ # Check for optional requirements
+ req_file = os.path.join(_root, "requirements-optional.txt")
+ if os.path.exists(req_file):
+ with open(req_file, "r") as fp:
+ for line in fp:
+ if (line.strip() and
+ not (line.startswith("-r") or
+ line.startswith("#"))):
+ if ";" in line:
+ spec, marker = line.strip().split(";", 1)
+ else:
+ spec, marker = line.strip(), None
+ req = pkg_resources.Requirement.parse(spec)
+ if marker and not pkg_resources.evaluate_marker(marker):
+ msgs.append("%s not available in this environment" % spec)
+ else:
+ try:
+ installed = pkg_resources.working_set.find(req)
+ except pkg_resources.VersionConflict:
+ msgs.append("Outdated version of %s installed, need %s" % (req.name, spec))
+ else:
+ if not installed:
+ msgs.append("Need %s" % spec)
+
+ # Check cElementTree
+ import xml.etree.ElementTree as ElementTree
+
+ try:
+ import xml.etree.cElementTree as cElementTree
+ except ImportError:
+ msgs.append("cElementTree unable to be imported")
+ else:
+ if cElementTree.Element is ElementTree.Element:
+ msgs.append("cElementTree is just an alias for ElementTree")
+
+ if msgs:
+ pytest.exit("\n".join(msgs))
+
+
+def pytest_collect_file(path, parent):
+ dir = os.path.abspath(path.dirname)
+ dir_and_parents = set()
+ while dir not in dir_and_parents:
+ dir_and_parents.add(dir)
+ dir = os.path.dirname(dir)
+
+ if _tree_construction in dir_and_parents:
+ if path.ext == ".dat":
+ return TreeConstructionFile(path, parent)
+ elif _tokenizer in dir_and_parents:
+ if path.ext == ".test":
+ return TokenizerFile(path, parent)
+ elif _sanitizer_testdata in dir_and_parents:
+ if path.ext == ".dat":
+ return SanitizerFile(path, parent)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/sanitizer-testdata/tests1.dat b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/sanitizer-testdata/tests1.dat
new file mode 100644
index 0000000000..74e8833686
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/sanitizer-testdata/tests1.dat
@@ -0,0 +1,433 @@
+[
+ {
+ "name": "IE_Comments",
+ "input": "<!--[if gte IE 4]><script>alert('XSS');</script><![endif]-->",
+ "output": ""
+ },
+
+ {
+ "name": "IE_Comments_2",
+ "input": "<![if !IE 5]><script>alert('XSS');</script><![endif]>",
+ "output": "&lt;script&gt;alert('XSS');&lt;/script&gt;"
+ },
+
+ {
+ "name": "allow_colons_in_path_component",
+ "input": "<a href=\"./this:that\">foo</a>",
+ "output": "<a href='./this:that'>foo</a>"
+ },
+
+ {
+ "name": "background_attribute",
+ "input": "<div background=\"javascript:alert('XSS')\"></div>",
+ "output": "<div></div>"
+ },
+
+ {
+ "name": "bgsound",
+ "input": "<bgsound src=\"javascript:alert('XSS');\" />",
+ "output": "&lt;bgsound src=\"javascript:alert('XSS');\"&gt;&lt;/bgsound&gt;"
+ },
+
+ {
+ "name": "div_background_image_unicode_encoded",
+ "input": "<div style=\"background-image:\u00a5\u00a2\u006C\u0028'\u006a\u0061\u00a6\u0061\u00a3\u0063\u00a2\u0069\u00a0\u00a4\u003a\u0061\u006c\u0065\u00a2\u00a4\u0028.1027\u0058.1053\u0053\u0027\u0029'\u0029\">foo</div>",
+ "output": "<div style=''>foo</div>"
+ },
+
+ {
+ "name": "div_expression",
+ "input": "<div style=\"width: expression(alert('XSS'));\">foo</div>",
+ "output": "<div style=''>foo</div>"
+ },
+
+ {
+ "name": "double_open_angle_brackets",
+ "input": "<img src=http://ha.ckers.org/scriptlet.html <",
+ "output": ""
+ },
+
+ {
+ "name": "double_open_angle_brackets_2",
+ "input": "<script src=http://ha.ckers.org/scriptlet.html <",
+ "output": ""
+ },
+
+ {
+ "name": "grave_accents",
+ "input": "<img src=`javascript:alert('XSS')` />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "img_dynsrc_lowsrc",
+ "input": "<img dynsrc=\"javascript:alert('XSS')\" />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "img_vbscript",
+ "input": "<img src='vbscript:msgbox(\"XSS\")' />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "input_image",
+ "input": "<input type=\"image\" src=\"javascript:alert('XSS');\" />",
+ "output": "<input type='image'/>"
+ },
+
+ {
+ "name": "link_stylesheets",
+ "input": "<link rel=\"stylesheet\" href=\"javascript:alert('XSS');\" />",
+ "output": "&lt;link href=\"javascript:alert('XSS');\" rel=\"stylesheet\"&gt;"
+ },
+
+ {
+ "name": "link_stylesheets_2",
+ "input": "<link rel=\"stylesheet\" href=\"http://ha.ckers.org/xss.css\" />",
+ "output": "&lt;link href=\"http://ha.ckers.org/xss.css\" rel=\"stylesheet\"&gt;"
+ },
+
+ {
+ "name": "list_style_image",
+ "input": "<li style=\"list-style-image: url(javascript:alert('XSS'))\">foo</li>",
+ "output": "<li style=''>foo</li>"
+ },
+
+ {
+ "name": "no_closing_script_tags",
+ "input": "<script src=http://ha.ckers.org/xss.js?<b>",
+ "output": "&lt;script src=\"http://ha.ckers.org/xss.js?&amp;lt;b\"&gt;&lt;/script&gt;"
+ },
+
+ {
+ "name": "non_alpha_non_digit",
+ "input": "<script/XSS src=\"http://ha.ckers.org/xss.js\"></script>",
+ "output": "&lt;script src=\"http://ha.ckers.org/xss.js\" xss=\"\"&gt;&lt;/script&gt;"
+ },
+
+ {
+ "name": "non_alpha_non_digit_2",
+ "input": "<a onclick!\\#$%&()*~+-_.,:;?@[/|\\]^`=alert(\"XSS\")>foo</a>",
+ "output": "<a>foo</a>"
+ },
+
+ {
+ "name": "non_alpha_non_digit_3",
+ "input": "<img/src=\"http://ha.ckers.org/xss.js\"/>",
+ "output": "<img src='http://ha.ckers.org/xss.js'/>"
+ },
+
+ {
+ "name": "non_alpha_non_digit_II",
+ "input": "<a href!\\#$%&()*~+-_.,:;?@[/|]^`=alert('XSS')>foo</a>",
+ "output": "<a>foo</a>"
+ },
+
+ {
+ "name": "non_alpha_non_digit_III",
+ "input": "<a/href=\"javascript:alert('XSS');\">foo</a>",
+ "output": "<a>foo</a>"
+ },
+
+ {
+ "name": "platypus",
+ "input": "<a href=\"http://www.ragingplatypus.com/\" style=\"display:block; position:absolute; left:0; top:0; width:100%; height:100%; z-index:1; background-color:black; background-image:url(http://www.ragingplatypus.com/i/cam-full.jpg); background-x:center; background-y:center; background-repeat:repeat;\">never trust your upstream platypus</a>",
+ "output": "<a href='http://www.ragingplatypus.com/' style='display: block; width: 100%; height: 100%; background-color: black; background-x: center; background-y: center;'>never trust your upstream platypus</a>"
+ },
+
+ {
+ "name": "protocol_resolution_in_script_tag",
+ "input": "<script src=//ha.ckers.org/.j></script>",
+ "output": "&lt;script src=\"//ha.ckers.org/.j\"&gt;&lt;/script&gt;"
+ },
+
+ {
+ "name": "should_allow_anchors",
+ "input": "<a href='foo' onclick='bar'><script>baz</script></a>",
+ "output": "<a href='foo'>&lt;script&gt;baz&lt;/script&gt;</a>"
+ },
+
+ {
+ "name": "should_allow_image_alt_attribute",
+ "input": "<img alt='foo' onclick='bar' />",
+ "output": "<img alt='foo'/>"
+ },
+
+ {
+ "name": "should_allow_image_height_attribute",
+ "input": "<img height='foo' onclick='bar' />",
+ "output": "<img height='foo'/>"
+ },
+
+ {
+ "name": "should_allow_image_src_attribute",
+ "input": "<img src='foo' onclick='bar' />",
+ "output": "<img src='foo'/>"
+ },
+
+ {
+ "name": "should_allow_image_width_attribute",
+ "input": "<img width='foo' onclick='bar' />",
+ "output": "<img width='foo'/>"
+ },
+
+ {
+ "name": "should_handle_blank_text",
+ "input": "",
+ "output": ""
+ },
+
+ {
+ "name": "should_handle_malformed_image_tags",
+ "input": "<img \"\"\"><script>alert(\"XSS\")</script>\">",
+ "output": "<img/>&lt;script&gt;alert(\"XSS\")&lt;/script&gt;\"&gt;"
+ },
+
+ {
+ "name": "should_handle_non_html",
+ "input": "abc",
+ "output": "abc"
+ },
+
+ {
+ "name": "should_not_fall_for_ridiculous_hack",
+ "input": "<img\nsrc\n=\n\"\nj\na\nv\na\ns\nc\nr\ni\np\nt\n:\na\nl\ne\nr\nt\n(\n'\nX\nS\nS\n'\n)\n\"\n />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_0",
+ "input": "<img src=\"javascript:alert('XSS');\" />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_1",
+ "input": "<img src=javascript:alert('XSS') />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_10",
+ "input": "<img src=\"jav&#x0A;ascript:alert('XSS');\" />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_11",
+ "input": "<img src=\"jav&#x0D;ascript:alert('XSS');\" />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_12",
+ "input": "<img src=\" &#14; javascript:alert('XSS');\" />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_13",
+ "input": "<img src=\"&#x20;javascript:alert('XSS');\" />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_14",
+ "input": "<img src=\"&#xA0;javascript:alert('XSS');\" />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_2",
+ "input": "<img src=\"JaVaScRiPt:alert('XSS')\" />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_3",
+ "input": "<img src='javascript:alert(&quot;XSS&quot;)' />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_4",
+ "input": "<img src='javascript:alert(String.fromCharCode(88,83,83))' />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_5",
+ "input": "<img src='&#106;&#97;&#118;&#97;&#115;&#99;&#114;&#105;&#112;&#116;&#58;&#97;&#108;&#101;&#114;&#116;&#40;&#39;&#88;&#83;&#83;&#39;&#41;' />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_6",
+ "input": "<img src='&#0000106;&#0000097;&#0000118;&#0000097;&#0000115;&#0000099;&#0000114;&#0000105;&#0000112;&#0000116;&#0000058;&#0000097;&#0000108;&#0000101;&#0000114;&#0000116;&#0000040;&#0000039;&#0000088;&#0000083;&#0000083;&#0000039;&#0000041' />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_7",
+ "input": "<img src='&#x6A;&#x61;&#x76;&#x61;&#x73;&#x63;&#x72;&#x69;&#x70;&#x74;&#x3A;&#x61;&#x6C;&#x65;&#x72;&#x74;&#x28;&#x27;&#x58;&#x53;&#x53;&#x27;&#x29' />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_8",
+ "input": "<img src=\"jav\tascript:alert('XSS');\" />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_not_fall_for_xss_image_hack_9",
+ "input": "<img src=\"jav&#x09;ascript:alert('XSS');\" />",
+ "output": "<img/>"
+ },
+
+ {
+ "name": "should_sanitize_half_open_scripts",
+ "input": "<img src=\"javascript:alert('XSS')\"",
+ "output": ""
+ },
+
+ {
+ "name": "should_sanitize_invalid_script_tag",
+ "input": "<script/XSS SRC=\"http://ha.ckers.org/xss.js\"></script>",
+ "output": "&lt;script src=\"http://ha.ckers.org/xss.js\" xss=\"\"&gt;&lt;/script&gt;"
+ },
+
+ {
+ "name": "should_sanitize_script_tag_with_multiple_open_brackets",
+ "input": "<<script>alert(\"XSS\");//<</script>",
+ "output": "&lt;&lt;script&gt;alert(\"XSS\");//&lt;&lt;/script&gt;"
+ },
+
+ {
+ "name": "should_sanitize_script_tag_with_multiple_open_brackets_2",
+ "input": "<iframe src=http://ha.ckers.org/scriptlet.html\n<",
+ "output": ""
+ },
+
+ {
+ "name": "should_sanitize_tag_broken_up_by_null",
+ "input": "<scr\u0000ipt>alert(\"XSS\")</scr\u0000ipt>",
+ "output": "&lt;scr\ufffdipt&gt;alert(\"XSS\")&lt;/scr\ufffdipt&gt;"
+ },
+
+ {
+ "name": "should_sanitize_unclosed_script",
+ "input": "<script src=http://ha.ckers.org/xss.js?<b>",
+ "output": "&lt;script src=\"http://ha.ckers.org/xss.js?&amp;lt;b\"&gt;&lt;/script&gt;"
+ },
+
+ {
+ "name": "should_strip_href_attribute_in_a_with_bad_protocols",
+ "input": "<a href=\"javascript:XSS\" title=\"1\">boo</a>",
+ "output": "<a title='1'>boo</a>"
+ },
+
+ {
+ "name": "should_strip_href_attribute_in_a_with_bad_protocols_and_whitespace",
+ "input": "<a href=\" javascript:XSS\" title=\"1\">boo</a>",
+ "output": "<a title='1'>boo</a>"
+ },
+
+ {
+ "name": "should_strip_src_attribute_in_img_with_bad_protocols",
+ "input": "<img src=\"javascript:XSS\" title=\"1\">boo</img>",
+ "output": "<img title='1'/>boo"
+ },
+
+ {
+ "name": "should_strip_src_attribute_in_img_with_bad_protocols_and_whitespace",
+ "input": "<img src=\" javascript:XSS\" title=\"1\">boo</img>",
+ "output": "<img title='1'/>boo"
+ },
+
+ {
+ "name": "xml_base",
+ "input": "<div xml:base=\"javascript:alert('XSS');//\">foo</div>",
+ "output": "<div>foo</div>"
+ },
+
+ {
+ "name": "xul",
+ "input": "<p style=\"-moz-binding:url('http://ha.ckers.org/xssmoz.xml#xss')\">fubar</p>",
+ "output": "<p style=''>fubar</p>"
+ },
+
+ {
+ "name": "quotes_in_attributes",
+ "input": "<img src='foo' title='\"foo\" bar' />",
+ "output": "<img src='foo' title='\"foo\" bar'/>"
+ },
+
+ {
+ "name": "uri_refs_in_svg_attributes",
+ "input": "<svg><rect fill='url(#foo)' />",
+ "output": "<svg><rect fill='url(#foo)'></rect></svg>"
+ },
+
+ {
+ "name": "absolute_uri_refs_in_svg_attributes",
+ "input": "<svg><rect fill='url(http://bad.com/) #fff' />",
+ "output": "<svg><rect fill=' #fff'></rect></svg>"
+ },
+
+ {
+ "name": "uri_ref_with_space_in svg_attribute",
+ "input": "<svg><rect fill='url(\n#foo)' />",
+ "output": "<svg><rect fill='url(\n#foo)'></rect></svg>"
+ },
+
+ {
+ "name": "absolute_uri_ref_with_space_in svg_attribute",
+ "input": "<svg><rect fill=\"url(\nhttp://bad.com/)\" />",
+ "output": "<svg><rect fill=' '></rect></svg>"
+ },
+
+ {
+ "name": "allow_html5_image_tag",
+ "input": "<image src='foo' />",
+ "output": "<img src='foo'/>"
+ },
+
+ {
+ "name": "style_attr_end_with_nothing",
+ "input": "<div style=\"color: blue\" />",
+ "output": "<div style='color: blue;'></div>"
+ },
+
+ {
+ "name": "style_attr_end_with_space",
+ "input": "<div style=\"color: blue \" />",
+ "output": "<div style='color: blue ;'></div>"
+ },
+
+ {
+ "name": "style_attr_end_with_semicolon",
+ "input": "<div style=\"color: blue;\" />",
+ "output": "<div style='color: blue;'></div>"
+ },
+
+ {
+ "name": "style_attr_end_with_semicolon_space",
+ "input": "<div style=\"color: blue; \" />",
+ "output": "<div style='color: blue;'></div>"
+ },
+
+ {
+ "name": "attributes_with_embedded_quotes",
+ "input": "<img src=doesntexist.jpg\"'onerror=\"alert(1) />",
+ "output": "<img src='doesntexist.jpg\"&#39;onerror=\"alert(1)'/>"
+ },
+
+ {
+ "name": "attributes_with_embedded_quotes_II",
+ "input": "<img src=notthere.jpg\"\"onerror=\"alert(2) />",
+ "output": "<img src='notthere.jpg\"\"onerror=\"alert(2)'/>"
+ }
+]
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/sanitizer.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/sanitizer.py
new file mode 100644
index 0000000000..bb4834214f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/sanitizer.py
@@ -0,0 +1,51 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import codecs
+import json
+
+import pytest
+
+from html5lib import parseFragment, serialize
+
+
+class SanitizerFile(pytest.File):
+ def collect(self):
+ with codecs.open(str(self.fspath), "r", encoding="utf-8") as fp:
+ tests = json.load(fp)
+ for i, test in enumerate(tests):
+ yield SanitizerTest(str(i), self, test=test)
+
+
+class SanitizerTest(pytest.Item):
+ def __init__(self, name, parent, test):
+ super(SanitizerTest, self).__init__(name, parent)
+ self.obj = lambda: 1 # this is to hack around skipif needing a function!
+ self.test = test
+
+ def runtest(self):
+ input = self.test["input"]
+ expected = self.test["output"]
+
+ parsed = parseFragment(input)
+ with pytest.deprecated_call():
+ serialized = serialize(parsed,
+ sanitize=True,
+ omit_optional_tags=False,
+ use_trailing_solidus=True,
+ space_before_trailing_solidus=False,
+ quote_attr_values="always",
+ quote_char="'",
+ alphabetical_attributes=True)
+ errorMsg = "\n".join(["\n\nInput:", input,
+ "\nExpected:", expected,
+ "\nReceived:", serialized])
+ assert expected == serialized, errorMsg
+
+ def repr_failure(self, excinfo):
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=__file__)
+ excinfo.traceback = ntraceback.filter()
+
+ return excinfo.getrepr(funcargs=True,
+ showlocals=False,
+ style="short", tbfilter=False)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/core.test b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/core.test
new file mode 100644
index 0000000000..55294b6831
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/core.test
@@ -0,0 +1,395 @@
+{
+ "tests": [
+ {
+ "expected": [
+ "<span title='test \"with\" &amp;quot;'>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "test \"with\" &quot;"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value escaping"
+ },
+ {
+ "expected": [
+ "<span title=foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value non-quoting"
+ },
+ {
+ "expected": [
+ "<span title=\"foo<bar\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo<bar"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value non-quoting (with <)"
+ },
+ {
+ "expected": [
+ "<span title=\"foo=bar\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo=bar"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value quoting (with =)"
+ },
+ {
+ "expected": [
+ "<span title=\"foo>bar\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo>bar"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value quoting (with >)"
+ },
+ {
+ "expected": [
+ "<span title='foo\"bar'>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo\"bar"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value quoting (with \")"
+ },
+ {
+ "expected": [
+ "<span title=\"foo'bar\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo'bar"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value quoting (with ')"
+ },
+ {
+ "expected": [
+ "<span title=\"foo'bar&quot;baz\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo'bar\"baz"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value quoting (with both \" and ')"
+ },
+ {
+ "expected": [
+ "<span title=\"foo bar\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo bar"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value quoting (with space)"
+ },
+ {
+ "expected": [
+ "<span title=\"foo\tbar\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo\tbar"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value quoting (with tab)"
+ },
+ {
+ "expected": [
+ "<span title=\"foo\nbar\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo\nbar"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value quoting (with LF)"
+ },
+ {
+ "expected": [
+ "<span title=\"foo\rbar\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo\rbar"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value quoting (with CR)"
+ },
+ {
+ "expected": [
+ "<span title=\"foo\u000bbar\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo\u000bbar"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value non-quoting (with linetab)"
+ },
+ {
+ "expected": [
+ "<span title=\"foo\fbar\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "foo\fbar"
+ }
+ ]
+ ]
+ ],
+ "description": "proper attribute value quoting (with form feed)"
+ },
+ {
+ "expected": [
+ "<img>"
+ ],
+ "input": [
+ [
+ "EmptyTag",
+ "img",
+ {}
+ ]
+ ],
+ "description": "void element (as EmptyTag token)"
+ },
+ {
+ "expected": [
+ "<!DOCTYPE foo>"
+ ],
+ "input": [
+ [
+ "Doctype",
+ "foo"
+ ]
+ ],
+ "description": "doctype in error"
+ },
+ {
+ "expected": [
+ "a&lt;b&gt;c&amp;d"
+ ],
+ "input": [
+ [
+ "Characters",
+ "a<b>c&d"
+ ]
+ ],
+ "description": "character data",
+ "options": {
+ "encoding": "utf-8"
+ }
+ },
+ {
+ "expected": [
+ "<script>a<b>c&d"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "script",
+ {}
+ ],
+ [
+ "Characters",
+ "a<b>c&d"
+ ]
+ ],
+ "description": "rcdata"
+ },
+ {
+ "expected": [
+ "<!DOCTYPE HTML>"
+ ],
+ "input": [
+ [
+ "Doctype",
+ "HTML"
+ ]
+ ],
+ "description": "doctype"
+ },
+ {
+ "expected": [
+ "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">"
+ ],
+ "input": [
+ [
+ "Doctype",
+ "HTML",
+ "-//W3C//DTD HTML 4.01//EN",
+ "http://www.w3.org/TR/html4/strict.dtd"
+ ]
+ ],
+ "description": "HTML 4.01 DOCTYPE"
+ },
+ {
+ "expected": [
+ "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\">"
+ ],
+ "input": [
+ [
+ "Doctype",
+ "HTML",
+ "-//W3C//DTD HTML 4.01//EN"
+ ]
+ ],
+ "description": "HTML 4.01 DOCTYPE without system identifier"
+ },
+ {
+ "expected": [
+ "<!DOCTYPE html SYSTEM \"http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd\">"
+ ],
+ "input": [
+ [
+ "Doctype",
+ "html",
+ "",
+ "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"
+ ]
+ ],
+ "description": "IBM DOCTYPE without public identifier"
+ }
+ ]
+}
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/injectmeta.test b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/injectmeta.test
new file mode 100644
index 0000000000..399590c3f3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/injectmeta.test
@@ -0,0 +1,350 @@
+{
+ "tests": [
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ]
+ ],
+ "description": "no encoding",
+ "options": {
+ "inject_meta_charset": true
+ }
+ },
+ {
+ "expected": [
+ "<meta charset=utf-8>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ]
+ ],
+ "description": "empytag head",
+ "options": {
+ "encoding": "utf-8",
+ "inject_meta_charset": true
+ }
+ },
+ {
+ "expected": [
+ "<meta charset=utf-8><title>foo</title>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "title",
+ {}
+ ],
+ [
+ "Characters",
+ "foo"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "title"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ]
+ ],
+ "description": "head w/title",
+ "options": {
+ "encoding": "utf-8",
+ "inject_meta_charset": true
+ }
+ },
+ {
+ "expected": [
+ "<meta charset=utf-8>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "EmptyTag",
+ "meta",
+ [
+ {
+ "namespace": null,
+ "name": "charset",
+ "value": "ascii"
+ }
+ ]
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ]
+ ],
+ "description": "head w/meta-charset",
+ "options": {
+ "encoding": "utf-8",
+ "inject_meta_charset": true
+ }
+ },
+ {
+ "expected": [
+ "<meta charset=utf-8><meta charset=utf-8>",
+ "<head><meta charset=utf-8><meta charset=ascii>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "EmptyTag",
+ "meta",
+ [
+ {
+ "namespace": null,
+ "name": "charset",
+ "value": "ascii"
+ }
+ ]
+ ],
+ [
+ "EmptyTag",
+ "meta",
+ [
+ {
+ "namespace": null,
+ "name": "charset",
+ "value": "ascii"
+ }
+ ]
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ]
+ ],
+ "description": "head w/ two meta-charset",
+ "options": {
+ "encoding": "utf-8",
+ "inject_meta_charset": true
+ }
+ },
+ {
+ "expected": [
+ "<meta charset=utf-8><meta content=noindex name=robots>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "EmptyTag",
+ "meta",
+ [
+ {
+ "namespace": null,
+ "name": "name",
+ "value": "robots"
+ },
+ {
+ "namespace": null,
+ "name": "content",
+ "value": "noindex"
+ }
+ ]
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ]
+ ],
+ "description": "head w/robots",
+ "options": {
+ "encoding": "utf-8",
+ "inject_meta_charset": true
+ }
+ },
+ {
+ "expected": [
+ "<meta content=noindex name=robots><meta charset=utf-8>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "EmptyTag",
+ "meta",
+ [
+ {
+ "namespace": null,
+ "name": "name",
+ "value": "robots"
+ },
+ {
+ "namespace": null,
+ "name": "content",
+ "value": "noindex"
+ }
+ ]
+ ],
+ [
+ "EmptyTag",
+ "meta",
+ [
+ {
+ "namespace": null,
+ "name": "charset",
+ "value": "ascii"
+ }
+ ]
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ]
+ ],
+ "description": "head w/robots & charset",
+ "options": {
+ "encoding": "utf-8",
+ "inject_meta_charset": true
+ }
+ },
+ {
+ "expected": [
+ "<meta content=\"text/html; charset=utf-8\" http-equiv=content-type>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "EmptyTag",
+ "meta",
+ [
+ {
+ "namespace": null,
+ "name": "http-equiv",
+ "value": "content-type"
+ },
+ {
+ "namespace": null,
+ "name": "content",
+ "value": "text/html; charset=ascii"
+ }
+ ]
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ]
+ ],
+ "description": "head w/ charset in http-equiv content-type",
+ "options": {
+ "encoding": "utf-8",
+ "inject_meta_charset": true
+ }
+ },
+ {
+ "expected": [
+ "<meta content=noindex name=robots><meta content=\"text/html; charset=utf-8\" http-equiv=content-type>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "EmptyTag",
+ "meta",
+ [
+ {
+ "namespace": null,
+ "name": "name",
+ "value": "robots"
+ },
+ {
+ "namespace": null,
+ "name": "content",
+ "value": "noindex"
+ }
+ ]
+ ],
+ [
+ "EmptyTag",
+ "meta",
+ [
+ {
+ "namespace": null,
+ "name": "http-equiv",
+ "value": "content-type"
+ },
+ {
+ "namespace": null,
+ "name": "content",
+ "value": "text/html; charset=ascii"
+ }
+ ]
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ]
+ ],
+ "description": "head w/robots & charset in http-equiv content-type",
+ "options": {
+ "encoding": "utf-8",
+ "inject_meta_charset": true
+ }
+ }
+ ]
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/optionaltags.test b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/optionaltags.test
new file mode 100644
index 0000000000..e67725ca26
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/optionaltags.test
@@ -0,0 +1,3254 @@
+{
+ "tests": [
+ {
+ "expected": [
+ "<html lang=en>foo"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "html",
+ [
+ {
+ "namespace": null,
+ "name": "lang",
+ "value": "en"
+ }
+ ]
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "html start-tag followed by text, with attributes"
+ },
+ {
+ "expected": [
+ "<html><!--foo-->"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "html",
+ {}
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "html start-tag followed by comment"
+ },
+ {
+ "expected": [
+ "<html> foo"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "html",
+ {}
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "html start-tag followed by space character"
+ },
+ {
+ "expected": [
+ "foo"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "html",
+ {}
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "html start-tag followed by text"
+ },
+ {
+ "expected": [
+ "<foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "html",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "html start-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "html",
+ {}
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "html start-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "html",
+ {}
+ ]
+ ],
+ "description": "html start-tag at EOF (shouldn't ever happen?!)"
+ },
+ {
+ "expected": [
+ "</html><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "html"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "html end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</html> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "html"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "html end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "html"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "html end-tag followed by text"
+ },
+ {
+ "expected": [
+ "<foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "html"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "html end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "html"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "html end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "html"
+ ]
+ ],
+ "description": "html end-tag at EOF"
+ },
+ {
+ "expected": [
+ "<head><!--foo-->"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "head start-tag followed by comment"
+ },
+ {
+ "expected": [
+ "<head> foo"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "head start-tag followed by space character"
+ },
+ {
+ "expected": [
+ "<head>foo"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "head start-tag followed by text"
+ },
+ {
+ "expected": [
+ "<foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "head start-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<head></foo>",
+ "</foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "head start-tag followed by end-tag (shouldn't ever happen?!)"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ]
+ ],
+ "description": "empty head element"
+ },
+ {
+ "expected": [
+ "<meta>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ],
+ [
+ "EmptyTag",
+ "meta",
+ {}
+ ]
+ ],
+ "description": "head start-tag followed by empty-tag"
+ },
+ {
+ "expected": [
+ "<head>",
+ ""
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "head",
+ {}
+ ]
+ ],
+ "description": "head start-tag at EOF (shouldn't ever happen?!)"
+ },
+ {
+ "expected": [
+ "</head><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "head end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</head> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "head end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "head end-tag followed by text"
+ },
+ {
+ "expected": [
+ "<foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "head end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "head end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "head"
+ ]
+ ],
+ "description": "head end-tag at EOF"
+ },
+ {
+ "expected": [
+ "<body><!--foo-->"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "body",
+ {}
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "body start-tag followed by comment"
+ },
+ {
+ "expected": [
+ "<body> foo"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "body",
+ {}
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "body start-tag followed by space character"
+ },
+ {
+ "expected": [
+ "foo"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "body",
+ {}
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "body start-tag followed by text"
+ },
+ {
+ "expected": [
+ "<foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "body",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "body start-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "body",
+ {}
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "body start-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "body",
+ {}
+ ]
+ ],
+ "description": "body start-tag at EOF (shouldn't ever happen?!)"
+ },
+ {
+ "expected": [
+ "</body><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "body"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "body end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</body> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "body"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "body end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "body"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "body end-tag followed by text"
+ },
+ {
+ "expected": [
+ "<foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "body"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "body end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "body"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "body end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "body"
+ ]
+ ],
+ "description": "body end-tag at EOF"
+ },
+ {
+ "expected": [
+ "</li><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "li"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "li end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</li> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "li"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "li end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</li>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "li"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "li end-tag followed by text"
+ },
+ {
+ "expected": [
+ "</li><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "li"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "li end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<li>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "li"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "li",
+ {}
+ ]
+ ],
+ "description": "li end-tag followed by li start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "li"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "li end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "li"
+ ]
+ ],
+ "description": "li end-tag at EOF"
+ },
+ {
+ "expected": [
+ "</dt><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dt"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "dt end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</dt> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dt"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "dt end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</dt>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dt"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "dt end-tag followed by text"
+ },
+ {
+ "expected": [
+ "</dt><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dt"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "dt end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<dt>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dt"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "dt",
+ {}
+ ]
+ ],
+ "description": "dt end-tag followed by dt start-tag"
+ },
+ {
+ "expected": [
+ "<dd>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dt"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "dd",
+ {}
+ ]
+ ],
+ "description": "dt end-tag followed by dd start-tag"
+ },
+ {
+ "expected": [
+ "</dt></foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dt"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "dt end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ "</dt>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dt"
+ ]
+ ],
+ "description": "dt end-tag at EOF"
+ },
+ {
+ "expected": [
+ "</dd><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dd"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "dd end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</dd> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dd"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "dd end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</dd>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dd"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "dd end-tag followed by text"
+ },
+ {
+ "expected": [
+ "</dd><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dd"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "dd end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<dd>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dd"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "dd",
+ {}
+ ]
+ ],
+ "description": "dd end-tag followed by dd start-tag"
+ },
+ {
+ "expected": [
+ "<dt>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dd"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "dt",
+ {}
+ ]
+ ],
+ "description": "dd end-tag followed by dt start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dd"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "dd end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "dd"
+ ]
+ ],
+ "description": "dd end-tag at EOF"
+ },
+ {
+ "expected": [
+ "</p><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "p end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</p> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "p end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</p>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "p end-tag followed by text"
+ },
+ {
+ "expected": [
+ "</p><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<address>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "address",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by address start-tag"
+ },
+ {
+ "expected": [
+ "<article>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "article",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by article start-tag"
+ },
+ {
+ "expected": [
+ "<aside>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "aside",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by aside start-tag"
+ },
+ {
+ "expected": [
+ "<blockquote>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "blockquote",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by blockquote start-tag"
+ },
+ {
+ "expected": [
+ "<datagrid>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "datagrid",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by datagrid start-tag"
+ },
+ {
+ "expected": [
+ "<dialog>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "dialog",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by dialog start-tag"
+ },
+ {
+ "expected": [
+ "<dir>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "dir",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by dir start-tag"
+ },
+ {
+ "expected": [
+ "<div>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "div",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by div start-tag"
+ },
+ {
+ "expected": [
+ "<dl>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "dl",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by dl start-tag"
+ },
+ {
+ "expected": [
+ "<fieldset>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "fieldset",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by fieldset start-tag"
+ },
+ {
+ "expected": [
+ "<footer>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "footer",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by footer start-tag"
+ },
+ {
+ "expected": [
+ "<form>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "form",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by form start-tag"
+ },
+ {
+ "expected": [
+ "<h1>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "h1",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by h1 start-tag"
+ },
+ {
+ "expected": [
+ "<h2>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "h2",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by h2 start-tag"
+ },
+ {
+ "expected": [
+ "<h3>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "h3",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by h3 start-tag"
+ },
+ {
+ "expected": [
+ "<h4>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "h4",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by h4 start-tag"
+ },
+ {
+ "expected": [
+ "<h5>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "h5",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by h5 start-tag"
+ },
+ {
+ "expected": [
+ "<h6>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "h6",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by h6 start-tag"
+ },
+ {
+ "expected": [
+ "<header>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "header",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by header start-tag"
+ },
+ {
+ "expected": [
+ "<hr>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "EmptyTag",
+ "hr",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by hr empty-tag"
+ },
+ {
+ "expected": [
+ "<menu>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "menu",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by menu start-tag"
+ },
+ {
+ "expected": [
+ "<nav>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "nav",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by nav start-tag"
+ },
+ {
+ "expected": [
+ "<ol>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "ol",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by ol start-tag"
+ },
+ {
+ "expected": [
+ "<p>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "p",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by p start-tag"
+ },
+ {
+ "expected": [
+ "<pre>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "pre",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by pre start-tag"
+ },
+ {
+ "expected": [
+ "<section>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "section",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by section start-tag"
+ },
+ {
+ "expected": [
+ "<table>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "table",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by table start-tag"
+ },
+ {
+ "expected": [
+ "<ul>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "ul",
+ {}
+ ]
+ ],
+ "description": "p end-tag followed by ul start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "p end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "p"
+ ]
+ ],
+ "description": "p end-tag at EOF"
+ },
+ {
+ "expected": [
+ "</optgroup><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "optgroup"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "optgroup end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</optgroup> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "optgroup"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "optgroup end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</optgroup>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "optgroup"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "optgroup end-tag followed by text"
+ },
+ {
+ "expected": [
+ "</optgroup><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "optgroup"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "optgroup end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<optgroup>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "optgroup"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "optgroup",
+ {}
+ ]
+ ],
+ "description": "optgroup end-tag followed by optgroup start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "optgroup"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "optgroup end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "optgroup"
+ ]
+ ],
+ "description": "optgroup end-tag at EOF"
+ },
+ {
+ "expected": [
+ "</option><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "option"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "option end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</option> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "option"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "option end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</option>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "option"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "option end-tag followed by text"
+ },
+ {
+ "expected": [
+ "<optgroup>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "option"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "optgroup",
+ {}
+ ]
+ ],
+ "description": "option end-tag followed by optgroup start-tag"
+ },
+ {
+ "expected": [
+ "</option><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "option"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "option end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<option>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "option"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "option",
+ {}
+ ]
+ ],
+ "description": "option end-tag followed by option start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "option"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "option end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "option"
+ ]
+ ],
+ "description": "option end-tag at EOF"
+ },
+ {
+ "expected": [
+ "<colgroup><!--foo-->"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup",
+ {}
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "colgroup start-tag followed by comment"
+ },
+ {
+ "expected": [
+ "<colgroup> foo"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup",
+ {}
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "colgroup start-tag followed by space character"
+ },
+ {
+ "expected": [
+ "<colgroup>foo"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup",
+ {}
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "colgroup start-tag followed by text"
+ },
+ {
+ "expected": [
+ "<colgroup><foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "colgroup start-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<table><col>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "table",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup",
+ {}
+ ],
+ [
+ "EmptyTag",
+ "col",
+ {}
+ ]
+ ],
+ "description": "first colgroup in a table with a col child"
+ },
+ {
+ "expected": [
+ "</colgroup><col>",
+ "<colgroup><col>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup",
+ {}
+ ],
+ [
+ "EmptyTag",
+ "http://www.w3.org/1999/xhtml",
+ "col",
+ {}
+ ]
+ ],
+ "description": "colgroup with a col child, following another colgroup"
+ },
+ {
+ "expected": [
+ "<colgroup></foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup",
+ {}
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "colgroup start-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ "<colgroup>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup",
+ {}
+ ]
+ ],
+ "description": "colgroup start-tag at EOF"
+ },
+ {
+ "expected": [
+ "</colgroup><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "colgroup end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</colgroup> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "colgroup end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "colgroup end-tag followed by text"
+ },
+ {
+ "expected": [
+ "<foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "colgroup end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "colgroup end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "colgroup"
+ ]
+ ],
+ "description": "colgroup end-tag at EOF"
+ },
+ {
+ "expected": [
+ "</thead><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "thead"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "thead end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</thead> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "thead"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "thead end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</thead>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "thead"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "thead end-tag followed by text"
+ },
+ {
+ "expected": [
+ "</thead><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "thead"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "thead end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<tbody>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "thead"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ]
+ ],
+ "description": "thead end-tag followed by tbody start-tag"
+ },
+ {
+ "expected": [
+ "<tfoot>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "thead"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tfoot",
+ {}
+ ]
+ ],
+ "description": "thead end-tag followed by tfoot start-tag"
+ },
+ {
+ "expected": [
+ "</thead></foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "thead"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "thead end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ "</thead>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "thead"
+ ]
+ ],
+ "description": "thead end-tag at EOF"
+ },
+ {
+ "expected": [
+ "<tbody><!--foo-->"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "tbody start-tag followed by comment"
+ },
+ {
+ "expected": [
+ "<tbody> foo"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "tbody start-tag followed by space character"
+ },
+ {
+ "expected": [
+ "<tbody>foo"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "tbody start-tag followed by text"
+ },
+ {
+ "expected": [
+ "<tbody><foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "tbody start-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<table><tr>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "table",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr",
+ {}
+ ]
+ ],
+ "description": "first tbody in a table with a tr child"
+ },
+ {
+ "expected": [
+ "<tbody><tr>",
+ "</tbody><tr>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr",
+ {}
+ ]
+ ],
+ "description": "tbody with a tr child, following another tbody"
+ },
+ {
+ "expected": [
+ "<tbody><tr>",
+ "</thead><tr>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "thead"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr",
+ {}
+ ]
+ ],
+ "description": "tbody with a tr child, following a thead"
+ },
+ {
+ "expected": [
+ "<tbody><tr>",
+ "</tfoot><tr>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tfoot"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr",
+ {}
+ ]
+ ],
+ "description": "tbody with a tr child, following a tfoot"
+ },
+ {
+ "expected": [
+ "<tbody></foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "tbody start-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ "<tbody>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ]
+ ],
+ "description": "tbody start-tag at EOF"
+ },
+ {
+ "expected": [
+ "</tbody><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "tbody end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</tbody> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "tbody end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</tbody>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "tbody end-tag followed by text"
+ },
+ {
+ "expected": [
+ "</tbody><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "tbody end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<tbody>",
+ "</tbody>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ]
+ ],
+ "description": "tbody end-tag followed by tbody start-tag"
+ },
+ {
+ "expected": [
+ "<tfoot>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tfoot",
+ {}
+ ]
+ ],
+ "description": "tbody end-tag followed by tfoot start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "tbody end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody"
+ ]
+ ],
+ "description": "tbody end-tag at EOF"
+ },
+ {
+ "expected": [
+ "</tfoot><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tfoot"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "tfoot end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</tfoot> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tfoot"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "tfoot end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</tfoot>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tfoot"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "tfoot end-tag followed by text"
+ },
+ {
+ "expected": [
+ "</tfoot><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tfoot"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "tfoot end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<tbody>",
+ "</tfoot>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tfoot"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tbody",
+ {}
+ ]
+ ],
+ "description": "tfoot end-tag followed by tbody start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tfoot"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "tfoot end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tfoot"
+ ]
+ ],
+ "description": "tfoot end-tag at EOF"
+ },
+ {
+ "expected": [
+ "</tr><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "tr end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</tr> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "tr end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</tr>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "tr end-tag followed by text"
+ },
+ {
+ "expected": [
+ "</tr><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "tr end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<tr>",
+ "</tr>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr",
+ {}
+ ]
+ ],
+ "description": "tr end-tag followed by tr start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "tr end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "tr"
+ ]
+ ],
+ "description": "tr end-tag at EOF"
+ },
+ {
+ "expected": [
+ "</td><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "td"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "td end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</td> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "td"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "td end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</td>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "td"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "td end-tag followed by text"
+ },
+ {
+ "expected": [
+ "</td><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "td"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "td end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<td>",
+ "</td>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "td"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "td",
+ {}
+ ]
+ ],
+ "description": "td end-tag followed by td start-tag"
+ },
+ {
+ "expected": [
+ "<th>",
+ "</td>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "td"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "th",
+ {}
+ ]
+ ],
+ "description": "td end-tag followed by th start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "td"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "td end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "td"
+ ]
+ ],
+ "description": "td end-tag at EOF"
+ },
+ {
+ "expected": [
+ "</th><!--foo-->"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "th"
+ ],
+ [
+ "Comment",
+ "foo"
+ ]
+ ],
+ "description": "th end-tag followed by comment"
+ },
+ {
+ "expected": [
+ "</th> foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "th"
+ ],
+ [
+ "Characters",
+ " foo"
+ ]
+ ],
+ "description": "th end-tag followed by space character"
+ },
+ {
+ "expected": [
+ "</th>foo"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "th"
+ ],
+ [
+ "Characters",
+ "foo"
+ ]
+ ],
+ "description": "th end-tag followed by text"
+ },
+ {
+ "expected": [
+ "</th><foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "th"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo",
+ {}
+ ]
+ ],
+ "description": "th end-tag followed by start-tag"
+ },
+ {
+ "expected": [
+ "<th>",
+ "</th>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "th"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "th",
+ {}
+ ]
+ ],
+ "description": "th end-tag followed by th start-tag"
+ },
+ {
+ "expected": [
+ "<td>",
+ "</th>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "th"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "td",
+ {}
+ ]
+ ],
+ "description": "th end-tag followed by td start-tag"
+ },
+ {
+ "expected": [
+ "</foo>"
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "th"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "foo"
+ ]
+ ],
+ "description": "th end-tag followed by end-tag"
+ },
+ {
+ "expected": [
+ ""
+ ],
+ "input": [
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "th"
+ ]
+ ],
+ "description": "th end-tag at EOF"
+ }
+ ]
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/options.test b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/options.test
new file mode 100644
index 0000000000..a22eebfcf3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/options.test
@@ -0,0 +1,334 @@
+{
+ "tests": [
+ {
+ "expected": [
+ "<span title='test &#39;with&#39; quote_char'>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "test 'with' quote_char"
+ }
+ ]
+ ]
+ ],
+ "description": "quote_char=\"'\"",
+ "options": {
+ "quote_char": "'"
+ }
+ },
+ {
+ "expected": [
+ "<button disabled>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "button",
+ [
+ {
+ "namespace": null,
+ "name": "disabled",
+ "value": "disabled"
+ }
+ ]
+ ]
+ ],
+ "description": "quote_attr_values='always'",
+ "options": {
+ "quote_attr_values": "always"
+ }
+ },
+ {
+ "expected": [
+ "<div itemscope>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "div",
+ [
+ {
+ "namespace": null,
+ "name": "itemscope",
+ "value": "itemscope"
+ }
+ ]
+ ]
+ ],
+ "description": "quote_attr_values='always' with itemscope",
+ "options": {
+ "quote_attr_values": "always"
+ }
+ },
+ {
+ "expected": [
+ "<div irrelevant>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "div",
+ [
+ {
+ "namespace": null,
+ "name": "irrelevant",
+ "value": "irrelevant"
+ }
+ ]
+ ]
+ ],
+ "description": "quote_attr_values='always' with irrelevant",
+ "options": {
+ "quote_attr_values": "always"
+ }
+ },
+ {
+ "expected": [
+ "<div class=\"foo\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "div",
+ [
+ {
+ "namespace": null,
+ "name": "class",
+ "value": "foo"
+ }
+ ]
+ ]
+ ],
+ "description": "non-minimized quote_attr_values='always'",
+ "options": {
+ "quote_attr_values": "always"
+ }
+ },
+ {
+ "expected": [
+ "<div class=foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "div",
+ [
+ {
+ "namespace": null,
+ "name": "class",
+ "value": "foo"
+ }
+ ]
+ ]
+ ],
+ "description": "non-minimized quote_attr_values='legacy'",
+ "options": {
+ "quote_attr_values": "legacy"
+ }
+ },
+ {
+ "expected": [
+ "<div class=foo>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "div",
+ [
+ {
+ "namespace": null,
+ "name": "class",
+ "value": "foo"
+ }
+ ]
+ ]
+ ],
+ "description": "non-minimized quote_attr_values='spec'",
+ "options": {
+ "quote_attr_values": "spec"
+ }
+ },
+ {
+ "expected": [
+ "<img />"
+ ],
+ "input": [
+ [
+ "EmptyTag",
+ "img",
+ {}
+ ]
+ ],
+ "description": "use_trailing_solidus=true with void element",
+ "options": {
+ "use_trailing_solidus": true
+ }
+ },
+ {
+ "expected": [
+ "<div>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "div",
+ {}
+ ]
+ ],
+ "description": "use_trailing_solidus=true with non-void element",
+ "options": {
+ "use_trailing_solidus": true
+ }
+ },
+ {
+ "expected": [
+ "<div itemscope=itemscope>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "div",
+ [
+ {
+ "namespace": null,
+ "name": "itemscope",
+ "value": "itemscope"
+ }
+ ]
+ ]
+ ],
+ "description": "minimize_boolean_attributes=false",
+ "options": {
+ "minimize_boolean_attributes": false
+ }
+ },
+ {
+ "expected": [
+ "<div irrelevant=irrelevant>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "div",
+ [
+ {
+ "namespace": null,
+ "name": "irrelevant",
+ "value": "irrelevant"
+ }
+ ]
+ ]
+ ],
+ "description": "minimize_boolean_attributes=false",
+ "options": {
+ "minimize_boolean_attributes": false
+ }
+ },
+ {
+ "expected": [
+ "<div itemscope=\"\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "div",
+ [
+ {
+ "namespace": null,
+ "name": "itemscope",
+ "value": ""
+ }
+ ]
+ ]
+ ],
+ "description": "minimize_boolean_attributes=false with empty value",
+ "options": {
+ "minimize_boolean_attributes": false
+ }
+ },
+ {
+ "expected": [
+ "<div irrelevant=\"\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "div",
+ [
+ {
+ "namespace": null,
+ "name": "irrelevant",
+ "value": ""
+ }
+ ]
+ ]
+ ],
+ "description": "minimize_boolean_attributes=false with empty value",
+ "options": {
+ "minimize_boolean_attributes": false
+ }
+ },
+ {
+ "expected": [
+ "<a title=\"a&lt;b>c&amp;d\">"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "a",
+ [
+ {
+ "namespace": null,
+ "name": "title",
+ "value": "a<b>c&d"
+ }
+ ]
+ ]
+ ],
+ "description": "escape less than signs in attribute values",
+ "options": {
+ "escape_lt_in_attrs": true
+ }
+ },
+ {
+ "expected": [
+ "<script>a&lt;b&gt;c&amp;d"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "script",
+ {}
+ ],
+ [
+ "Characters",
+ "a<b>c&d"
+ ]
+ ],
+ "description": "rcdata",
+ "options": {
+ "escape_rcdata": true
+ }
+ }
+ ]
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/whitespace.test b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/whitespace.test
new file mode 100644
index 0000000000..dac3a69e27
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/serializer-testdata/whitespace.test
@@ -0,0 +1,198 @@
+{
+ "tests": [
+ {
+ "expected": [
+ " foo"
+ ],
+ "input": [
+ [
+ "Characters",
+ "\t\r\n\f foo"
+ ]
+ ],
+ "description": "bare text with leading spaces",
+ "options": {
+ "strip_whitespace": true
+ }
+ },
+ {
+ "expected": [
+ "foo "
+ ],
+ "input": [
+ [
+ "Characters",
+ "foo \t\r\n\f"
+ ]
+ ],
+ "description": "bare text with trailing spaces",
+ "options": {
+ "strip_whitespace": true
+ }
+ },
+ {
+ "expected": [
+ "foo bar"
+ ],
+ "input": [
+ [
+ "Characters",
+ "foo \t\r\n\f bar"
+ ]
+ ],
+ "description": "bare text with inner spaces",
+ "options": {
+ "strip_whitespace": true
+ }
+ },
+ {
+ "expected": [
+ "<pre>\t\r\n\f foo \t\r\n\f bar \t\r\n\f</pre>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "pre",
+ {}
+ ],
+ [
+ "Characters",
+ "\t\r\n\f foo \t\r\n\f bar \t\r\n\f"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "pre"
+ ]
+ ],
+ "description": "text within <pre>",
+ "options": {
+ "strip_whitespace": true
+ }
+ },
+ {
+ "expected": [
+ "<pre>\t\r\n\f fo<span>o \t\r\n\f b</span>ar \t\r\n\f</pre>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "pre",
+ {}
+ ],
+ [
+ "Characters",
+ "\t\r\n\f fo"
+ ],
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "span",
+ {}
+ ],
+ [
+ "Characters",
+ "o \t\r\n\f b"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "span"
+ ],
+ [
+ "Characters",
+ "ar \t\r\n\f"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "pre"
+ ]
+ ],
+ "description": "text within <pre>, with inner markup",
+ "options": {
+ "strip_whitespace": true
+ }
+ },
+ {
+ "expected": [
+ "<textarea>\t\r\n\f foo \t\r\n\f bar \t\r\n\f</textarea>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "textarea",
+ {}
+ ],
+ [
+ "Characters",
+ "\t\r\n\f foo \t\r\n\f bar \t\r\n\f"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "textarea"
+ ]
+ ],
+ "description": "text within <textarea>",
+ "options": {
+ "strip_whitespace": true
+ }
+ },
+ {
+ "expected": [
+ "<script>\t\r\n\f foo \t\r\n\f bar \t\r\n\f</script>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "script",
+ {}
+ ],
+ [
+ "Characters",
+ "\t\r\n\f foo \t\r\n\f bar \t\r\n\f"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "script"
+ ]
+ ],
+ "description": "text within <script>",
+ "options": {
+ "strip_whitespace": true
+ }
+ },
+ {
+ "expected": [
+ "<style>\t\r\n\f foo \t\r\n\f bar \t\r\n\f</style>"
+ ],
+ "input": [
+ [
+ "StartTag",
+ "http://www.w3.org/1999/xhtml",
+ "style",
+ {}
+ ],
+ [
+ "Characters",
+ "\t\r\n\f foo \t\r\n\f bar \t\r\n\f"
+ ],
+ [
+ "EndTag",
+ "http://www.w3.org/1999/xhtml",
+ "style"
+ ]
+ ],
+ "description": "text within <style>",
+ "options": {
+ "strip_whitespace": true
+ }
+ }
+ ]
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/support.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/support.py
new file mode 100644
index 0000000000..9cd5afbe69
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/support.py
@@ -0,0 +1,199 @@
+from __future__ import absolute_import, division, unicode_literals
+
+# pylint:disable=wrong-import-position
+
+import os
+import sys
+import codecs
+import glob
+import xml.sax.handler
+
+base_path = os.path.split(__file__)[0]
+
+test_dir = os.path.join(base_path, 'testdata')
+sys.path.insert(0, os.path.abspath(os.path.join(base_path,
+ os.path.pardir,
+ os.path.pardir)))
+
+from html5lib import treebuilders, treewalkers, treeadapters # noqa
+del base_path
+
+# Build a dict of available trees
+treeTypes = {}
+
+# DOM impls
+treeTypes["DOM"] = {
+ "builder": treebuilders.getTreeBuilder("dom"),
+ "walker": treewalkers.getTreeWalker("dom")
+}
+
+# ElementTree impls
+import xml.etree.ElementTree as ElementTree # noqa
+treeTypes['ElementTree'] = {
+ "builder": treebuilders.getTreeBuilder("etree", ElementTree, fullTree=True),
+ "walker": treewalkers.getTreeWalker("etree", ElementTree)
+}
+
+try:
+ import xml.etree.cElementTree as cElementTree # noqa
+except ImportError:
+ treeTypes['cElementTree'] = None
+else:
+ # On Python 3.3 and above cElementTree is an alias, don't run them twice.
+ if cElementTree.Element is ElementTree.Element:
+ treeTypes['cElementTree'] = None
+ else:
+ treeTypes['cElementTree'] = {
+ "builder": treebuilders.getTreeBuilder("etree", cElementTree, fullTree=True),
+ "walker": treewalkers.getTreeWalker("etree", cElementTree)
+ }
+
+try:
+ import lxml.etree as lxml # noqa
+except ImportError:
+ treeTypes['lxml'] = None
+else:
+ treeTypes['lxml'] = {
+ "builder": treebuilders.getTreeBuilder("lxml"),
+ "walker": treewalkers.getTreeWalker("lxml")
+ }
+
+# Genshi impls
+try:
+ import genshi # noqa
+except ImportError:
+ treeTypes["genshi"] = None
+else:
+ treeTypes["genshi"] = {
+ "builder": treebuilders.getTreeBuilder("dom"),
+ "adapter": lambda tree: treeadapters.genshi.to_genshi(treewalkers.getTreeWalker("dom")(tree)),
+ "walker": treewalkers.getTreeWalker("genshi")
+ }
+
+# pylint:enable=wrong-import-position
+
+
+def get_data_files(subdirectory, files='*.dat', search_dir=test_dir):
+ return sorted(glob.glob(os.path.join(search_dir, subdirectory, files)))
+
+
+class DefaultDict(dict):
+ def __init__(self, default, *args, **kwargs):
+ self.default = default
+ dict.__init__(self, *args, **kwargs)
+
+ def __getitem__(self, key):
+ return dict.get(self, key, self.default)
+
+
+class TestData(object):
+ def __init__(self, filename, newTestHeading="data", encoding="utf8"):
+ if encoding is None:
+ self.f = open(filename, mode="rb")
+ else:
+ self.f = codecs.open(filename, encoding=encoding)
+ self.encoding = encoding
+ self.newTestHeading = newTestHeading
+
+ def __iter__(self):
+ data = DefaultDict(None)
+ key = None
+ for line in self.f:
+ heading = self.isSectionHeading(line)
+ if heading:
+ if data and heading == self.newTestHeading:
+ # Remove trailing newline
+ data[key] = data[key][:-1]
+ yield self.normaliseOutput(data)
+ data = DefaultDict(None)
+ key = heading
+ data[key] = "" if self.encoding else b""
+ elif key is not None:
+ data[key] += line
+ if data:
+ yield self.normaliseOutput(data)
+
+ def isSectionHeading(self, line):
+ """If the current heading is a test section heading return the heading,
+ otherwise return False"""
+ # print(line)
+ if line.startswith("#" if self.encoding else b"#"):
+ return line[1:].strip()
+ else:
+ return False
+
+ def normaliseOutput(self, data):
+ # Remove trailing newlines
+ for key, value in data.items():
+ if value.endswith("\n" if self.encoding else b"\n"):
+ data[key] = value[:-1]
+ return data
+
+
+def convert(stripChars):
+ def convertData(data):
+ """convert the output of str(document) to the format used in the testcases"""
+ data = data.split("\n")
+ rv = []
+ for line in data:
+ if line.startswith("|"):
+ rv.append(line[stripChars:])
+ else:
+ rv.append(line)
+ return "\n".join(rv)
+ return convertData
+
+
+convertExpected = convert(2)
+
+
+def errorMessage(input, expected, actual):
+ msg = ("Input:\n%s\nExpected:\n%s\nReceived\n%s\n" %
+ (repr(input), repr(expected), repr(actual)))
+ if sys.version_info[0] == 2:
+ msg = msg.encode("ascii", "backslashreplace")
+ return msg
+
+
+class TracingSaxHandler(xml.sax.handler.ContentHandler):
+ def __init__(self):
+ xml.sax.handler.ContentHandler.__init__(self)
+ self.visited = []
+
+ def startDocument(self):
+ self.visited.append('startDocument')
+
+ def endDocument(self):
+ self.visited.append('endDocument')
+
+ def startPrefixMapping(self, prefix, uri):
+ # These are ignored as their order is not guaranteed
+ pass
+
+ def endPrefixMapping(self, prefix):
+ # These are ignored as their order is not guaranteed
+ pass
+
+ def startElement(self, name, attrs):
+ self.visited.append(('startElement', name, attrs))
+
+ def endElement(self, name):
+ self.visited.append(('endElement', name))
+
+ def startElementNS(self, name, qname, attrs):
+ self.visited.append(('startElementNS', name, qname, dict(attrs)))
+
+ def endElementNS(self, name, qname):
+ self.visited.append(('endElementNS', name, qname))
+
+ def characters(self, content):
+ self.visited.append(('characters', content))
+
+ def ignorableWhitespace(self, whitespace):
+ self.visited.append(('ignorableWhitespace', whitespace))
+
+ def processingInstruction(self, target, data):
+ self.visited.append(('processingInstruction', target, data))
+
+ def skippedEntity(self, name):
+ self.visited.append(('skippedEntity', name))
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_alphabeticalattributes.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_alphabeticalattributes.py
new file mode 100644
index 0000000000..7d5b8e0f65
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_alphabeticalattributes.py
@@ -0,0 +1,78 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from collections import OrderedDict
+
+import pytest
+
+import html5lib
+from html5lib.filters.alphabeticalattributes import Filter
+from html5lib.serializer import HTMLSerializer
+
+
+@pytest.mark.parametrize('msg, attrs, expected_attrs', [
+ (
+ 'no attrs',
+ {},
+ {}
+ ),
+ (
+ 'one attr',
+ {(None, 'alt'): 'image'},
+ OrderedDict([((None, 'alt'), 'image')])
+ ),
+ (
+ 'multiple attrs',
+ {
+ (None, 'src'): 'foo',
+ (None, 'alt'): 'image',
+ (None, 'style'): 'border: 1px solid black;'
+ },
+ OrderedDict([
+ ((None, 'alt'), 'image'),
+ ((None, 'src'), 'foo'),
+ ((None, 'style'), 'border: 1px solid black;')
+ ])
+ ),
+])
+def test_alphabetizing(msg, attrs, expected_attrs):
+ tokens = [{'type': 'StartTag', 'name': 'img', 'data': attrs}]
+ output_tokens = list(Filter(tokens))
+
+ attrs = output_tokens[0]['data']
+ assert attrs == expected_attrs
+
+
+def test_with_different_namespaces():
+ tokens = [{
+ 'type': 'StartTag',
+ 'name': 'pattern',
+ 'data': {
+ (None, 'id'): 'patt1',
+ ('http://www.w3.org/1999/xlink', 'href'): '#patt2'
+ }
+ }]
+ output_tokens = list(Filter(tokens))
+
+ attrs = output_tokens[0]['data']
+ assert attrs == OrderedDict([
+ ((None, 'id'), 'patt1'),
+ (('http://www.w3.org/1999/xlink', 'href'), '#patt2')
+ ])
+
+
+def test_with_serializer():
+ """Verify filter works in the context of everything else"""
+ parser = html5lib.HTMLParser()
+ dom = parser.parseFragment('<svg><pattern xlink:href="#patt2" id="patt1"></svg>')
+ walker = html5lib.getTreeWalker('etree')
+ ser = HTMLSerializer(
+ alphabetical_attributes=True,
+ quote_attr_values='always'
+ )
+
+ # FIXME(willkg): The "xlink" namespace gets dropped by the serializer. When
+ # that gets fixed, we can fix this expected result.
+ assert (
+ ser.render(walker(dom)) ==
+ '<svg><pattern id="patt1" href="#patt2"></pattern></svg>'
+ )
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_encoding.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_encoding.py
new file mode 100644
index 0000000000..47c4814a47
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_encoding.py
@@ -0,0 +1,117 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import os
+
+import pytest
+
+from .support import get_data_files, test_dir, errorMessage, TestData as _TestData
+from html5lib import HTMLParser, _inputstream
+
+
+def test_basic_prescan_length():
+ data = "<title>Caf\u00E9</title><!--a--><meta charset='utf-8'>".encode('utf-8')
+ pad = 1024 - len(data) + 1
+ data = data.replace(b"-a-", b"-" + (b"a" * pad) + b"-")
+ assert len(data) == 1024 # Sanity
+ stream = _inputstream.HTMLBinaryInputStream(data, useChardet=False)
+ assert 'utf-8' == stream.charEncoding[0].name
+
+
+def test_parser_reparse():
+ data = "<title>Caf\u00E9</title><!--a--><meta charset='utf-8'>".encode('utf-8')
+ pad = 10240 - len(data) + 1
+ data = data.replace(b"-a-", b"-" + (b"a" * pad) + b"-")
+ assert len(data) == 10240 # Sanity
+ stream = _inputstream.HTMLBinaryInputStream(data, useChardet=False)
+ assert 'windows-1252' == stream.charEncoding[0].name
+ p = HTMLParser(namespaceHTMLElements=False)
+ doc = p.parse(data, useChardet=False)
+ assert 'utf-8' == p.documentEncoding
+ assert doc.find(".//title").text == "Caf\u00E9"
+
+
+@pytest.mark.parametrize("expected,data,kwargs", [
+ ("utf-16le", b"\xFF\xFE", {"override_encoding": "iso-8859-2"}),
+ ("utf-16be", b"\xFE\xFF", {"override_encoding": "iso-8859-2"}),
+ ("utf-8", b"\xEF\xBB\xBF", {"override_encoding": "iso-8859-2"}),
+ ("iso-8859-2", b"", {"override_encoding": "iso-8859-2", "transport_encoding": "iso-8859-3"}),
+ ("iso-8859-2", b"<meta charset=iso-8859-3>", {"transport_encoding": "iso-8859-2"}),
+ ("iso-8859-2", b"<meta charset=iso-8859-2>", {"same_origin_parent_encoding": "iso-8859-3"}),
+ ("iso-8859-2", b"", {"same_origin_parent_encoding": "iso-8859-2", "likely_encoding": "iso-8859-3"}),
+ ("iso-8859-2", b"", {"same_origin_parent_encoding": "utf-16", "likely_encoding": "iso-8859-2"}),
+ ("iso-8859-2", b"", {"same_origin_parent_encoding": "utf-16be", "likely_encoding": "iso-8859-2"}),
+ ("iso-8859-2", b"", {"same_origin_parent_encoding": "utf-16le", "likely_encoding": "iso-8859-2"}),
+ ("iso-8859-2", b"", {"likely_encoding": "iso-8859-2", "default_encoding": "iso-8859-3"}),
+ ("iso-8859-2", b"", {"default_encoding": "iso-8859-2"}),
+ ("windows-1252", b"", {"default_encoding": "totally-bogus-string"}),
+ ("windows-1252", b"", {}),
+])
+def test_parser_args(expected, data, kwargs):
+ stream = _inputstream.HTMLBinaryInputStream(data, useChardet=False, **kwargs)
+ assert expected == stream.charEncoding[0].name
+ p = HTMLParser()
+ p.parse(data, useChardet=False, **kwargs)
+ assert expected == p.documentEncoding
+
+
+@pytest.mark.parametrize("kwargs", [
+ {"override_encoding": "iso-8859-2"},
+ {"override_encoding": None},
+ {"transport_encoding": "iso-8859-2"},
+ {"transport_encoding": None},
+ {"same_origin_parent_encoding": "iso-8859-2"},
+ {"same_origin_parent_encoding": None},
+ {"likely_encoding": "iso-8859-2"},
+ {"likely_encoding": None},
+ {"default_encoding": "iso-8859-2"},
+ {"default_encoding": None},
+ {"foo_encoding": "iso-8859-2"},
+ {"foo_encoding": None},
+])
+def test_parser_args_raises(kwargs):
+ with pytest.raises(TypeError) as exc_info:
+ p = HTMLParser()
+ p.parse("", useChardet=False, **kwargs)
+ assert exc_info.value.args[0].startswith("Cannot set an encoding with a unicode input")
+
+
+def param_encoding():
+ for filename in get_data_files("encoding"):
+ tests = _TestData(filename, b"data", encoding=None)
+ for test in tests:
+ yield test[b'data'], test[b'encoding']
+
+
+@pytest.mark.parametrize("data, encoding", param_encoding())
+def test_parser_encoding(data, encoding):
+ p = HTMLParser()
+ assert p.documentEncoding is None
+ p.parse(data, useChardet=False)
+ encoding = encoding.lower().decode("ascii")
+
+ assert encoding == p.documentEncoding, errorMessage(data, encoding, p.documentEncoding)
+
+
+@pytest.mark.parametrize("data, encoding", param_encoding())
+def test_prescan_encoding(data, encoding):
+ stream = _inputstream.HTMLBinaryInputStream(data, useChardet=False)
+ encoding = encoding.lower().decode("ascii")
+
+ # Very crude way to ignore irrelevant tests
+ if len(data) > stream.numBytesMeta:
+ return
+
+ assert encoding == stream.charEncoding[0].name, errorMessage(data, encoding, stream.charEncoding[0].name)
+
+
+# pylint:disable=wrong-import-position
+try:
+ import chardet # noqa
+except ImportError:
+ print("chardet not found, skipping chardet tests")
+else:
+ def test_chardet():
+ with open(os.path.join(test_dir, "encoding", "chardet", "test_big5.txt"), "rb") as fp:
+ encoding = _inputstream.HTMLInputStream(fp.read()).charEncoding
+ assert encoding[0].name == "big5"
+# pylint:enable=wrong-import-position
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_meta.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_meta.py
new file mode 100644
index 0000000000..dd02dd7fb7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_meta.py
@@ -0,0 +1,41 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import six
+from mock import Mock
+
+from . import support
+
+
+def _createReprMock(r):
+ """Creates a mock with a __repr__ returning r
+
+ Also provides __str__ mock with default mock behaviour"""
+ mock = Mock()
+ mock.__repr__ = Mock()
+ mock.__repr__.return_value = r
+ mock.__str__ = Mock(wraps=mock.__str__)
+ return mock
+
+
+def test_errorMessage():
+ # Create mock objects to take repr of
+ input = _createReprMock("1")
+ expected = _createReprMock("2")
+ actual = _createReprMock("3")
+
+ # Run the actual test
+ r = support.errorMessage(input, expected, actual)
+
+ # Assertions!
+ if six.PY2:
+ assert b"Input:\n1\nExpected:\n2\nReceived\n3\n" == r
+ else:
+ assert six.PY3
+ assert "Input:\n1\nExpected:\n2\nReceived\n3\n" == r
+
+ assert input.__repr__.call_count == 1
+ assert expected.__repr__.call_count == 1
+ assert actual.__repr__.call_count == 1
+ assert not input.__str__.called
+ assert not expected.__str__.called
+ assert not actual.__str__.called
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_optionaltags_filter.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_optionaltags_filter.py
new file mode 100644
index 0000000000..cd2821497f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_optionaltags_filter.py
@@ -0,0 +1,7 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from html5lib.filters.optionaltags import Filter
+
+
+def test_empty():
+ assert list(Filter([])) == []
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_parser2.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_parser2.py
new file mode 100644
index 0000000000..879d2447df
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_parser2.py
@@ -0,0 +1,94 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from six import PY2, text_type
+
+import io
+
+from . import support # noqa
+
+from html5lib.constants import namespaces
+from html5lib import parse, parseFragment, HTMLParser
+
+
+# tests that aren't autogenerated from text files
+def test_assertDoctypeCloneable():
+ doc = parse('<!DOCTYPE HTML>', treebuilder="dom")
+ assert doc.cloneNode(True) is not None
+
+
+def test_line_counter():
+ # http://groups.google.com/group/html5lib-discuss/browse_frm/thread/f4f00e4a2f26d5c0
+ assert parse("<pre>\nx\n&gt;\n</pre>") is not None
+
+
+def test_namespace_html_elements_0_dom():
+ doc = parse("<html></html>",
+ treebuilder="dom",
+ namespaceHTMLElements=True)
+ assert doc.childNodes[0].namespaceURI == namespaces["html"]
+
+
+def test_namespace_html_elements_1_dom():
+ doc = parse("<html></html>",
+ treebuilder="dom",
+ namespaceHTMLElements=False)
+ assert doc.childNodes[0].namespaceURI is None
+
+
+def test_namespace_html_elements_0_etree():
+ doc = parse("<html></html>",
+ treebuilder="etree",
+ namespaceHTMLElements=True)
+ assert doc.tag == "{%s}html" % (namespaces["html"],)
+
+
+def test_namespace_html_elements_1_etree():
+ doc = parse("<html></html>",
+ treebuilder="etree",
+ namespaceHTMLElements=False)
+ assert doc.tag == "html"
+
+
+def test_unicode_file():
+ assert parse(io.StringIO("a")) is not None
+
+
+def test_debug_log():
+ parser = HTMLParser(debug=True)
+ parser.parse("<!doctype html><title>a</title><p>b<script>c</script>d</p>e")
+
+ expected = [('dataState', 'InitialPhase', 'InitialPhase', 'processDoctype', {'type': 'Doctype'}),
+ ('dataState', 'BeforeHtmlPhase', 'BeforeHtmlPhase', 'processStartTag', {'name': 'title', 'type': 'StartTag'}),
+ ('dataState', 'BeforeHeadPhase', 'BeforeHeadPhase', 'processStartTag', {'name': 'title', 'type': 'StartTag'}),
+ ('dataState', 'InHeadPhase', 'InHeadPhase', 'processStartTag', {'name': 'title', 'type': 'StartTag'}),
+ ('rcdataState', 'TextPhase', 'TextPhase', 'processCharacters', {'type': 'Characters'}),
+ ('dataState', 'TextPhase', 'TextPhase', 'processEndTag', {'name': 'title', 'type': 'EndTag'}),
+ ('dataState', 'InHeadPhase', 'InHeadPhase', 'processStartTag', {'name': 'p', 'type': 'StartTag'}),
+ ('dataState', 'AfterHeadPhase', 'AfterHeadPhase', 'processStartTag', {'name': 'p', 'type': 'StartTag'}),
+ ('dataState', 'InBodyPhase', 'InBodyPhase', 'processStartTag', {'name': 'p', 'type': 'StartTag'}),
+ ('dataState', 'InBodyPhase', 'InBodyPhase', 'processCharacters', {'type': 'Characters'}),
+ ('dataState', 'InBodyPhase', 'InBodyPhase', 'processStartTag', {'name': 'script', 'type': 'StartTag'}),
+ ('dataState', 'InBodyPhase', 'InHeadPhase', 'processStartTag', {'name': 'script', 'type': 'StartTag'}),
+ ('scriptDataState', 'TextPhase', 'TextPhase', 'processCharacters', {'type': 'Characters'}),
+ ('dataState', 'TextPhase', 'TextPhase', 'processEndTag', {'name': 'script', 'type': 'EndTag'}),
+ ('dataState', 'InBodyPhase', 'InBodyPhase', 'processCharacters', {'type': 'Characters'}),
+ ('dataState', 'InBodyPhase', 'InBodyPhase', 'processEndTag', {'name': 'p', 'type': 'EndTag'}),
+ ('dataState', 'InBodyPhase', 'InBodyPhase', 'processCharacters', {'type': 'Characters'})]
+
+ if PY2:
+ for i, log in enumerate(expected):
+ log = [x.encode("ascii") if isinstance(x, text_type) else x for x in log]
+ expected[i] = tuple(log)
+
+ assert parser.log == expected
+
+
+def test_no_duplicate_clone():
+ frag = parseFragment("<b><em><foo><foob><fooc><aside></b></em>")
+ assert len(frag) == 2
+
+
+def test_self_closing_col():
+ parser = HTMLParser()
+ parser.parseFragment('<table><colgroup><col /></colgroup></table>')
+ assert not parser.errors
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_sanitizer.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_sanitizer.py
new file mode 100644
index 0000000000..f3faeb8050
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_sanitizer.py
@@ -0,0 +1,133 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import pytest
+
+from html5lib import constants, parseFragment, serialize
+from html5lib.filters import sanitizer
+
+
+def sanitize_html(stream):
+ parsed = parseFragment(stream)
+ with pytest.deprecated_call():
+ serialized = serialize(parsed,
+ sanitize=True,
+ omit_optional_tags=False,
+ use_trailing_solidus=True,
+ space_before_trailing_solidus=False,
+ quote_attr_values="always",
+ quote_char='"',
+ alphabetical_attributes=True)
+ return serialized
+
+
+def test_should_handle_astral_plane_characters():
+ sanitized = sanitize_html("<p>&#x1d4b5; &#x1d538;</p>")
+ expected = '<p>\U0001d4b5 \U0001d538</p>'
+ assert expected == sanitized
+
+
+def test_should_allow_relative_uris():
+ sanitized = sanitize_html('<p><a href="/example.com"></a></p>')
+ expected = '<p><a href="/example.com"></a></p>'
+ assert expected == sanitized
+
+
+def test_invalid_data_uri():
+ sanitized = sanitize_html('<audio controls="" src="data:foobar"></audio>')
+ expected = '<audio controls></audio>'
+ assert expected == sanitized
+
+
+def test_invalid_ipv6_url():
+ sanitized = sanitize_html('<a href="h://]">')
+ expected = "<a></a>"
+ assert expected == sanitized
+
+
+def test_data_uri_disallowed_type():
+ sanitized = sanitize_html('<audio controls="" src="data:text/html,<html>"></audio>')
+ expected = "<audio controls></audio>"
+ assert expected == sanitized
+
+
+def param_sanitizer():
+ for ns, tag_name in sanitizer.allowed_elements:
+ if ns != constants.namespaces["html"]:
+ continue
+ if tag_name in ['caption', 'col', 'colgroup', 'optgroup', 'option', 'table', 'tbody', 'td',
+ 'tfoot', 'th', 'thead', 'tr', 'select']:
+ continue # TODO
+ if tag_name == 'image':
+ yield ("test_should_allow_%s_tag" % tag_name,
+ "<img title=\"1\"/>foo &lt;bad&gt;bar&lt;/bad&gt; baz",
+ "<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name))
+ elif tag_name == 'br':
+ yield ("test_should_allow_%s_tag" % tag_name,
+ "<br title=\"1\"/>foo &lt;bad&gt;bar&lt;/bad&gt; baz<br/>",
+ "<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name))
+ elif tag_name in constants.voidElements:
+ yield ("test_should_allow_%s_tag" % tag_name,
+ "<%s title=\"1\"/>foo &lt;bad&gt;bar&lt;/bad&gt; baz" % tag_name,
+ "<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name))
+ else:
+ yield ("test_should_allow_%s_tag" % tag_name,
+ "<%s title=\"1\">foo &lt;bad&gt;bar&lt;/bad&gt; baz</%s>" % (tag_name, tag_name),
+ "<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name))
+
+ for ns, attribute_name in sanitizer.allowed_attributes:
+ if ns is not None:
+ continue
+ if attribute_name != attribute_name.lower():
+ continue # TODO
+ if attribute_name == 'style':
+ continue
+ attribute_value = 'foo'
+ if attribute_name in sanitizer.attr_val_is_uri:
+ attribute_value = '%s://sub.domain.tld/path/object.ext' % sanitizer.allowed_protocols[0]
+ yield ("test_should_allow_%s_attribute" % attribute_name,
+ "<p %s=\"%s\">foo &lt;bad&gt;bar&lt;/bad&gt; baz</p>" % (attribute_name, attribute_value),
+ "<p %s='%s'>foo <bad>bar</bad> baz</p>" % (attribute_name, attribute_value))
+
+ for protocol in sanitizer.allowed_protocols:
+ rest_of_uri = '//sub.domain.tld/path/object.ext'
+ if protocol == 'data':
+ rest_of_uri = 'image/png;base64,aGVsbG8gd29ybGQ='
+ yield ("test_should_allow_uppercase_%s_uris" % protocol,
+ "<img src=\"%s:%s\">foo</a>" % (protocol, rest_of_uri),
+ """<img src="%s:%s">foo</a>""" % (protocol, rest_of_uri))
+
+ for protocol in sanitizer.allowed_protocols:
+ rest_of_uri = '//sub.domain.tld/path/object.ext'
+ if protocol == 'data':
+ rest_of_uri = 'image/png;base64,aGVsbG8gd29ybGQ='
+ protocol = protocol.upper()
+ yield ("test_should_allow_uppercase_%s_uris" % protocol,
+ "<img src=\"%s:%s\">foo</a>" % (protocol, rest_of_uri),
+ """<img src="%s:%s">foo</a>""" % (protocol, rest_of_uri))
+
+
+@pytest.mark.parametrize("expected, input",
+ (pytest.param(expected, input, id=id)
+ for id, expected, input in param_sanitizer()))
+def test_sanitizer(expected, input):
+ parsed = parseFragment(expected)
+ expected = serialize(parsed,
+ omit_optional_tags=False,
+ use_trailing_solidus=True,
+ space_before_trailing_solidus=False,
+ quote_attr_values="always",
+ quote_char='"',
+ alphabetical_attributes=True)
+ assert expected == sanitize_html(input)
+
+
+def test_lowercase_color_codes_in_style():
+ sanitized = sanitize_html("<p style=\"border: 1px solid #a2a2a2;\"></p>")
+ expected = '<p style=\"border: 1px solid #a2a2a2;\"></p>'
+ assert expected == sanitized
+
+
+def test_uppercase_color_codes_in_style():
+ sanitized = sanitize_html("<p style=\"border: 1px solid #A2A2A2;\"></p>")
+ expected = '<p style=\"border: 1px solid #A2A2A2;\"></p>'
+ assert expected == sanitized
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_serializer.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_serializer.py
new file mode 100644
index 0000000000..bce6245905
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_serializer.py
@@ -0,0 +1,226 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import os
+import json
+
+import pytest
+
+from .support import get_data_files
+
+from html5lib import constants
+from html5lib.filters.lint import Filter as Lint
+from html5lib.serializer import HTMLSerializer, serialize
+from html5lib.treewalkers.base import TreeWalker
+
+# pylint:disable=wrong-import-position
+optionals_loaded = []
+
+try:
+ from lxml import etree
+ optionals_loaded.append("lxml")
+except ImportError:
+ pass
+# pylint:enable=wrong-import-position
+
+default_namespace = constants.namespaces["html"]
+
+
+class JsonWalker(TreeWalker):
+ def __iter__(self):
+ for token in self.tree:
+ type = token[0]
+ if type == "StartTag":
+ if len(token) == 4:
+ namespace, name, attrib = token[1:4]
+ else:
+ namespace = default_namespace
+ name, attrib = token[1:3]
+ yield self.startTag(namespace, name, self._convertAttrib(attrib))
+ elif type == "EndTag":
+ if len(token) == 3:
+ namespace, name = token[1:3]
+ else:
+ namespace = default_namespace
+ name = token[1]
+ yield self.endTag(namespace, name)
+ elif type == "EmptyTag":
+ if len(token) == 4:
+ namespace, name, attrib = token[1:]
+ else:
+ namespace = default_namespace
+ name, attrib = token[1:]
+ for token in self.emptyTag(namespace, name, self._convertAttrib(attrib)):
+ yield token
+ elif type == "Comment":
+ yield self.comment(token[1])
+ elif type in ("Characters", "SpaceCharacters"):
+ for token in self.text(token[1]):
+ yield token
+ elif type == "Doctype":
+ if len(token) == 4:
+ yield self.doctype(token[1], token[2], token[3])
+ elif len(token) == 3:
+ yield self.doctype(token[1], token[2])
+ else:
+ yield self.doctype(token[1])
+ else:
+ raise ValueError("Unknown token type: " + type)
+
+ def _convertAttrib(self, attribs):
+ """html5lib tree-walkers use a dict of (namespace, name): value for
+ attributes, but JSON cannot represent this. Convert from the format
+ in the serializer tests (a list of dicts with "namespace", "name",
+ and "value" as keys) to html5lib's tree-walker format."""
+ attrs = {}
+ for attrib in attribs:
+ name = (attrib["namespace"], attrib["name"])
+ assert(name not in attrs)
+ attrs[name] = attrib["value"]
+ return attrs
+
+
+def serialize_html(input, options):
+ options = {str(k): v for k, v in options.items()}
+ encoding = options.get("encoding", None)
+ if "encoding" in options:
+ del options["encoding"]
+ stream = Lint(JsonWalker(input), False)
+ serializer = HTMLSerializer(alphabetical_attributes=True, **options)
+ return serializer.render(stream, encoding)
+
+
+def throwsWithLatin1(input):
+ with pytest.raises(UnicodeEncodeError):
+ serialize_html(input, {"encoding": "iso-8859-1"})
+
+
+def testDoctypeName():
+ throwsWithLatin1([["Doctype", "\u0101"]])
+
+
+def testDoctypePublicId():
+ throwsWithLatin1([["Doctype", "potato", "\u0101"]])
+
+
+def testDoctypeSystemId():
+ throwsWithLatin1([["Doctype", "potato", "potato", "\u0101"]])
+
+
+def testCdataCharacters():
+ test_serializer([["StartTag", "http://www.w3.org/1999/xhtml", "style", {}], ["Characters", "\u0101"]],
+ ["<style>&amacr;"], {"encoding": "iso-8859-1"})
+
+
+def testCharacters():
+ test_serializer([["Characters", "\u0101"]],
+ ["&amacr;"], {"encoding": "iso-8859-1"})
+
+
+def testStartTagName():
+ throwsWithLatin1([["StartTag", "http://www.w3.org/1999/xhtml", "\u0101", []]])
+
+
+def testAttributeName():
+ throwsWithLatin1([["StartTag", "http://www.w3.org/1999/xhtml", "span", [{"namespace": None, "name": "\u0101", "value": "potato"}]]])
+
+
+def testAttributeValue():
+ test_serializer([["StartTag", "http://www.w3.org/1999/xhtml", "span",
+ [{"namespace": None, "name": "potato", "value": "\u0101"}]]],
+ ["<span potato=&amacr;>"], {"encoding": "iso-8859-1"})
+
+
+def testEndTagName():
+ throwsWithLatin1([["EndTag", "http://www.w3.org/1999/xhtml", "\u0101"]])
+
+
+def testComment():
+ throwsWithLatin1([["Comment", "\u0101"]])
+
+
+def testThrowsUnknownOption():
+ with pytest.raises(TypeError):
+ HTMLSerializer(foobar=None)
+
+
+@pytest.mark.parametrize("c", list("\t\n\u000C\x20\r\"'=<>`"))
+def testSpecQuoteAttribute(c):
+ input_ = [["StartTag", "http://www.w3.org/1999/xhtml", "span",
+ [{"namespace": None, "name": "foo", "value": c}]]]
+ if c == '"':
+ output_ = ["<span foo='%s'>" % c]
+ else:
+ output_ = ['<span foo="%s">' % c]
+ options_ = {"quote_attr_values": "spec"}
+ test_serializer(input_, output_, options_)
+
+
+@pytest.mark.parametrize("c", list("\t\n\u000C\x20\r\"'=<>`"
+ "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n"
+ "\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15"
+ "\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000"
+ "\u2001\u2002\u2003\u2004\u2005\u2006\u2007"
+ "\u2008\u2009\u200a\u2028\u2029\u202f\u205f"
+ "\u3000"))
+def testLegacyQuoteAttribute(c):
+ input_ = [["StartTag", "http://www.w3.org/1999/xhtml", "span",
+ [{"namespace": None, "name": "foo", "value": c}]]]
+ if c == '"':
+ output_ = ["<span foo='%s'>" % c]
+ else:
+ output_ = ['<span foo="%s">' % c]
+ options_ = {"quote_attr_values": "legacy"}
+ test_serializer(input_, output_, options_)
+
+
+@pytest.fixture
+def lxml_parser():
+ return etree.XMLParser(resolve_entities=False)
+
+
+@pytest.mark.skipif("lxml" not in optionals_loaded, reason="lxml not importable")
+def testEntityReplacement(lxml_parser):
+ doc = '<!DOCTYPE html SYSTEM "about:legacy-compat"><html>&beta;</html>'
+ tree = etree.fromstring(doc, parser=lxml_parser).getroottree()
+ result = serialize(tree, tree="lxml", omit_optional_tags=False)
+ assert result == '<!DOCTYPE html SYSTEM "about:legacy-compat"><html>\u03B2</html>'
+
+
+@pytest.mark.skipif("lxml" not in optionals_loaded, reason="lxml not importable")
+def testEntityXML(lxml_parser):
+ doc = '<!DOCTYPE html SYSTEM "about:legacy-compat"><html>&gt;</html>'
+ tree = etree.fromstring(doc, parser=lxml_parser).getroottree()
+ result = serialize(tree, tree="lxml", omit_optional_tags=False)
+ assert result == '<!DOCTYPE html SYSTEM "about:legacy-compat"><html>&gt;</html>'
+
+
+@pytest.mark.skipif("lxml" not in optionals_loaded, reason="lxml not importable")
+def testEntityNoResolve(lxml_parser):
+ doc = '<!DOCTYPE html SYSTEM "about:legacy-compat"><html>&beta;</html>'
+ tree = etree.fromstring(doc, parser=lxml_parser).getroottree()
+ result = serialize(tree, tree="lxml", omit_optional_tags=False,
+ resolve_entities=False)
+ assert result == '<!DOCTYPE html SYSTEM "about:legacy-compat"><html>&beta;</html>'
+
+
+def param_serializer():
+ for filename in get_data_files('serializer-testdata', '*.test', os.path.dirname(__file__)):
+ with open(filename) as fp:
+ tests = json.load(fp)
+ for test in tests['tests']:
+ yield test["input"], test["expected"], test.get("options", {})
+
+
+@pytest.mark.parametrize("input, expected, options", param_serializer())
+def test_serializer(input, expected, options):
+ encoding = options.get("encoding", None)
+
+ if encoding:
+ expected = list(map(lambda x: x.encode(encoding), expected))
+
+ result = serialize_html(input, options)
+ if len(expected) == 1:
+ assert expected[0] == result, "Expected:\n%s\nActual:\n%s\nOptions:\n%s" % (expected[0], result, str(options))
+ elif result not in expected:
+ assert False, "Expected: %s, Received: %s" % (expected, result)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_stream.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_stream.py
new file mode 100644
index 0000000000..efe9b472f5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_stream.py
@@ -0,0 +1,325 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from . import support # noqa
+
+import codecs
+import sys
+from io import BytesIO, StringIO
+
+import pytest
+
+import six
+from six.moves import http_client, urllib
+
+from html5lib._inputstream import (BufferedStream, HTMLInputStream,
+ HTMLUnicodeInputStream, HTMLBinaryInputStream)
+from html5lib._utils import supports_lone_surrogates
+
+
+def test_basic():
+ s = b"abc"
+ fp = BufferedStream(BytesIO(s))
+ read = fp.read(10)
+ assert read == s
+
+
+def test_read_length():
+ fp = BufferedStream(BytesIO(b"abcdef"))
+ read1 = fp.read(1)
+ assert read1 == b"a"
+ read2 = fp.read(2)
+ assert read2 == b"bc"
+ read3 = fp.read(3)
+ assert read3 == b"def"
+ read4 = fp.read(4)
+ assert read4 == b""
+
+
+def test_tell():
+ fp = BufferedStream(BytesIO(b"abcdef"))
+ read1 = fp.read(1)
+ assert read1 == b"a"
+ assert fp.tell() == 1
+ read2 = fp.read(2)
+ assert read2 == b"bc"
+ assert fp.tell() == 3
+ read3 = fp.read(3)
+ assert read3 == b"def"
+ assert fp.tell() == 6
+ read4 = fp.read(4)
+ assert read4 == b""
+ assert fp.tell() == 6
+
+
+def test_seek():
+ fp = BufferedStream(BytesIO(b"abcdef"))
+ read1 = fp.read(1)
+ assert read1 == b"a"
+ fp.seek(0)
+ read2 = fp.read(1)
+ assert read2 == b"a"
+ read3 = fp.read(2)
+ assert read3 == b"bc"
+ fp.seek(2)
+ read4 = fp.read(2)
+ assert read4 == b"cd"
+ fp.seek(4)
+ read5 = fp.read(2)
+ assert read5 == b"ef"
+
+
+def test_seek_tell():
+ fp = BufferedStream(BytesIO(b"abcdef"))
+ read1 = fp.read(1)
+ assert read1 == b"a"
+ assert fp.tell() == 1
+ fp.seek(0)
+ read2 = fp.read(1)
+ assert read2 == b"a"
+ assert fp.tell() == 1
+ read3 = fp.read(2)
+ assert read3 == b"bc"
+ assert fp.tell() == 3
+ fp.seek(2)
+ read4 = fp.read(2)
+ assert read4 == b"cd"
+ assert fp.tell() == 4
+ fp.seek(4)
+ read5 = fp.read(2)
+ assert read5 == b"ef"
+ assert fp.tell() == 6
+
+
+class HTMLUnicodeInputStreamShortChunk(HTMLUnicodeInputStream):
+ _defaultChunkSize = 2
+
+
+class HTMLBinaryInputStreamShortChunk(HTMLBinaryInputStream):
+ _defaultChunkSize = 2
+
+
+def test_char_ascii():
+ stream = HTMLInputStream(b"'", override_encoding='ascii')
+ assert stream.charEncoding[0].name == 'windows-1252'
+ assert stream.char() == "'"
+
+
+def test_char_utf8():
+ stream = HTMLInputStream('\u2018'.encode('utf-8'), override_encoding='utf-8')
+ assert stream.charEncoding[0].name == 'utf-8'
+ assert stream.char() == '\u2018'
+
+
+def test_char_win1252():
+ stream = HTMLInputStream("\xa9\xf1\u2019".encode('windows-1252'))
+ assert stream.charEncoding[0].name == 'windows-1252'
+ assert stream.char() == "\xa9"
+ assert stream.char() == "\xf1"
+ assert stream.char() == "\u2019"
+
+
+def test_bom():
+ stream = HTMLInputStream(codecs.BOM_UTF8 + b"'")
+ assert stream.charEncoding[0].name == 'utf-8'
+ assert stream.char() == "'"
+
+
+def test_utf_16():
+ stream = HTMLInputStream((' ' * 1025).encode('utf-16'))
+ assert stream.charEncoding[0].name in ['utf-16le', 'utf-16be']
+ assert len(stream.charsUntil(' ', True)) == 1025
+
+
+def test_newlines():
+ stream = HTMLBinaryInputStreamShortChunk(codecs.BOM_UTF8 + b"a\nbb\r\nccc\rddddxe")
+ assert stream.position() == (1, 0)
+ assert stream.charsUntil('c') == "a\nbb\n"
+ assert stream.position() == (3, 0)
+ assert stream.charsUntil('x') == "ccc\ndddd"
+ assert stream.position() == (4, 4)
+ assert stream.charsUntil('e') == "x"
+ assert stream.position() == (4, 5)
+
+
+def test_newlines2():
+ size = HTMLUnicodeInputStream._defaultChunkSize
+ stream = HTMLInputStream("\r" * size + "\n")
+ assert stream.charsUntil('x') == "\n" * size
+
+
+def test_position():
+ stream = HTMLBinaryInputStreamShortChunk(codecs.BOM_UTF8 + b"a\nbb\nccc\nddde\nf\ngh")
+ assert stream.position() == (1, 0)
+ assert stream.charsUntil('c') == "a\nbb\n"
+ assert stream.position() == (3, 0)
+ stream.unget("\n")
+ assert stream.position() == (2, 2)
+ assert stream.charsUntil('c') == "\n"
+ assert stream.position() == (3, 0)
+ stream.unget("\n")
+ assert stream.position() == (2, 2)
+ assert stream.char() == "\n"
+ assert stream.position() == (3, 0)
+ assert stream.charsUntil('e') == "ccc\nddd"
+ assert stream.position() == (4, 3)
+ assert stream.charsUntil('h') == "e\nf\ng"
+ assert stream.position() == (6, 1)
+
+
+def test_position2():
+ stream = HTMLUnicodeInputStreamShortChunk("abc\nd")
+ assert stream.position() == (1, 0)
+ assert stream.char() == "a"
+ assert stream.position() == (1, 1)
+ assert stream.char() == "b"
+ assert stream.position() == (1, 2)
+ assert stream.char() == "c"
+ assert stream.position() == (1, 3)
+ assert stream.char() == "\n"
+ assert stream.position() == (2, 0)
+ assert stream.char() == "d"
+ assert stream.position() == (2, 1)
+
+
+def test_python_issue_20007():
+ """
+ Make sure we have a work-around for Python bug #20007
+ http://bugs.python.org/issue20007
+ """
+ class FakeSocket(object):
+ def makefile(self, _mode, _bufsize=None):
+ # pylint:disable=unused-argument
+ return BytesIO(b"HTTP/1.1 200 Ok\r\n\r\nText")
+
+ source = http_client.HTTPResponse(FakeSocket())
+ source.begin()
+ stream = HTMLInputStream(source)
+ assert stream.charsUntil(" ") == "Text"
+
+
+def test_python_issue_20007_b():
+ """
+ Make sure we have a work-around for Python bug #20007
+ http://bugs.python.org/issue20007
+ """
+ if six.PY2:
+ return
+
+ class FakeSocket(object):
+ def makefile(self, _mode, _bufsize=None):
+ # pylint:disable=unused-argument
+ return BytesIO(b"HTTP/1.1 200 Ok\r\n\r\nText")
+
+ source = http_client.HTTPResponse(FakeSocket())
+ source.begin()
+ wrapped = urllib.response.addinfourl(source, source.msg, "http://example.com")
+ stream = HTMLInputStream(wrapped)
+ assert stream.charsUntil(" ") == "Text"
+
+
+@pytest.mark.parametrize("inp,num",
+ [("\u0000", 0),
+ ("\u0001", 1),
+ ("\u0008", 1),
+ ("\u0009", 0),
+ ("\u000A", 0),
+ ("\u000B", 1),
+ ("\u000C", 0),
+ ("\u000D", 0),
+ ("\u000E", 1),
+ ("\u001F", 1),
+ ("\u0020", 0),
+ ("\u007E", 0),
+ ("\u007F", 1),
+ ("\u009F", 1),
+ ("\u00A0", 0),
+ ("\uFDCF", 0),
+ ("\uFDD0", 1),
+ ("\uFDEF", 1),
+ ("\uFDF0", 0),
+ ("\uFFFD", 0),
+ ("\uFFFE", 1),
+ ("\uFFFF", 1),
+ ("\U0001FFFD", 0),
+ ("\U0001FFFE", 1),
+ ("\U0001FFFF", 1),
+ ("\U0002FFFD", 0),
+ ("\U0002FFFE", 1),
+ ("\U0002FFFF", 1),
+ ("\U0003FFFD", 0),
+ ("\U0003FFFE", 1),
+ ("\U0003FFFF", 1),
+ ("\U0004FFFD", 0),
+ ("\U0004FFFE", 1),
+ ("\U0004FFFF", 1),
+ ("\U0005FFFD", 0),
+ ("\U0005FFFE", 1),
+ ("\U0005FFFF", 1),
+ ("\U0006FFFD", 0),
+ ("\U0006FFFE", 1),
+ ("\U0006FFFF", 1),
+ ("\U0007FFFD", 0),
+ ("\U0007FFFE", 1),
+ ("\U0007FFFF", 1),
+ ("\U0008FFFD", 0),
+ ("\U0008FFFE", 1),
+ ("\U0008FFFF", 1),
+ ("\U0009FFFD", 0),
+ ("\U0009FFFE", 1),
+ ("\U0009FFFF", 1),
+ ("\U000AFFFD", 0),
+ ("\U000AFFFE", 1),
+ ("\U000AFFFF", 1),
+ ("\U000BFFFD", 0),
+ ("\U000BFFFE", 1),
+ ("\U000BFFFF", 1),
+ ("\U000CFFFD", 0),
+ ("\U000CFFFE", 1),
+ ("\U000CFFFF", 1),
+ ("\U000DFFFD", 0),
+ ("\U000DFFFE", 1),
+ ("\U000DFFFF", 1),
+ ("\U000EFFFD", 0),
+ ("\U000EFFFE", 1),
+ ("\U000EFFFF", 1),
+ ("\U000FFFFD", 0),
+ ("\U000FFFFE", 1),
+ ("\U000FFFFF", 1),
+ ("\U0010FFFD", 0),
+ ("\U0010FFFE", 1),
+ ("\U0010FFFF", 1),
+ ("\x01\x01\x01", 3),
+ ("a\x01a\x01a\x01a", 3)])
+def test_invalid_codepoints(inp, num):
+ stream = HTMLUnicodeInputStream(StringIO(inp))
+ for _i in range(len(inp)):
+ stream.char()
+ assert len(stream.errors) == num
+
+
+@pytest.mark.skipif(not supports_lone_surrogates, reason="doesn't support lone surrogates")
+@pytest.mark.parametrize("inp,num",
+ [("'\\uD7FF'", 0),
+ ("'\\uD800'", 1),
+ ("'\\uDBFF'", 1),
+ ("'\\uDC00'", 1),
+ ("'\\uDFFF'", 1),
+ ("'\\uE000'", 0),
+ ("'\\uD800\\uD800\\uD800'", 3),
+ ("'a\\uD800a\\uD800a\\uD800a'", 3),
+ ("'\\uDFFF\\uDBFF'", 2),
+ pytest.param(
+ "'\\uDBFF\\uDFFF'", 2,
+ marks=pytest.mark.skipif(
+ sys.maxunicode == 0xFFFF,
+ reason="narrow Python"))])
+def test_invalid_codepoints_surrogates(inp, num):
+ inp = eval(inp) # pylint:disable=eval-used
+ fp = StringIO(inp)
+ if ord(max(fp.read())) > 0xFFFF:
+ pytest.skip("StringIO altered string")
+ fp.seek(0)
+ stream = HTMLUnicodeInputStream(fp)
+ for _i in range(len(inp)):
+ stream.char()
+ assert len(stream.errors) == num
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_tokenizer2.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_tokenizer2.py
new file mode 100644
index 0000000000..158d847a26
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_tokenizer2.py
@@ -0,0 +1,66 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import io
+
+from six import unichr, text_type
+
+from html5lib._tokenizer import HTMLTokenizer
+from html5lib.constants import tokenTypes
+
+
+def ignore_parse_errors(toks):
+ for tok in toks:
+ if tok['type'] != tokenTypes['ParseError']:
+ yield tok
+
+
+def test_maintain_attribute_order():
+ # generate loads to maximize the chance a hash-based mutation will occur
+ attrs = [(unichr(x), text_type(i)) for i, x in enumerate(range(ord('a'), ord('z')))]
+ stream = io.StringIO("<span " + " ".join("%s='%s'" % (x, i) for x, i in attrs) + ">")
+
+ toks = HTMLTokenizer(stream)
+ out = list(ignore_parse_errors(toks))
+
+ assert len(out) == 1
+ assert out[0]['type'] == tokenTypes['StartTag']
+
+ attrs_tok = out[0]['data']
+ assert len(attrs_tok) == len(attrs)
+
+ for (in_name, in_value), (out_name, out_value) in zip(attrs, attrs_tok.items()):
+ assert in_name == out_name
+ assert in_value == out_value
+
+
+def test_duplicate_attribute():
+ stream = io.StringIO("<span a=1 a=2 a=3>")
+
+ toks = HTMLTokenizer(stream)
+ out = list(ignore_parse_errors(toks))
+
+ assert len(out) == 1
+ assert out[0]['type'] == tokenTypes['StartTag']
+
+ attrs_tok = out[0]['data']
+ assert len(attrs_tok) == 1
+ assert list(attrs_tok.items()) == [('a', '1')]
+
+
+def test_maintain_duplicate_attribute_order():
+ # generate loads to maximize the chance a hash-based mutation will occur
+ attrs = [(unichr(x), text_type(i)) for i, x in enumerate(range(ord('a'), ord('z')))]
+ stream = io.StringIO("<span " + " ".join("%s='%s'" % (x, i) for x, i in attrs) + " a=100>")
+
+ toks = HTMLTokenizer(stream)
+ out = list(ignore_parse_errors(toks))
+
+ assert len(out) == 1
+ assert out[0]['type'] == tokenTypes['StartTag']
+
+ attrs_tok = out[0]['data']
+ assert len(attrs_tok) == len(attrs)
+
+ for (in_name, in_value), (out_name, out_value) in zip(attrs, attrs_tok.items()):
+ assert in_name == out_name
+ assert in_value == out_value
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_treeadapters.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_treeadapters.py
new file mode 100644
index 0000000000..95e56c00c9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_treeadapters.py
@@ -0,0 +1,40 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from . import support # noqa
+
+import html5lib
+from html5lib.treeadapters import sax
+from html5lib.treewalkers import getTreeWalker
+
+
+def test_to_sax():
+ handler = support.TracingSaxHandler()
+ tree = html5lib.parse("""<html xml:lang="en">
+ <title>Directory Listing</title>
+ <a href="/"><b/></p>
+ """, treebuilder="etree")
+ walker = getTreeWalker("etree")
+ sax.to_sax(walker(tree), handler)
+ expected = [
+ 'startDocument',
+ ('startElementNS', ('http://www.w3.org/1999/xhtml', 'html'),
+ 'html', {(None, 'xml:lang'): 'en'}),
+ ('startElementNS', ('http://www.w3.org/1999/xhtml', 'head'), 'head', {}),
+ ('startElementNS', ('http://www.w3.org/1999/xhtml', 'title'), 'title', {}),
+ ('characters', 'Directory Listing'),
+ ('endElementNS', ('http://www.w3.org/1999/xhtml', 'title'), 'title'),
+ ('characters', '\n '),
+ ('endElementNS', ('http://www.w3.org/1999/xhtml', 'head'), 'head'),
+ ('startElementNS', ('http://www.w3.org/1999/xhtml', 'body'), 'body', {}),
+ ('startElementNS', ('http://www.w3.org/1999/xhtml', 'a'), 'a', {(None, 'href'): '/'}),
+ ('startElementNS', ('http://www.w3.org/1999/xhtml', 'b'), 'b', {}),
+ ('startElementNS', ('http://www.w3.org/1999/xhtml', 'p'), 'p', {}),
+ ('endElementNS', ('http://www.w3.org/1999/xhtml', 'p'), 'p'),
+ ('characters', '\n '),
+ ('endElementNS', ('http://www.w3.org/1999/xhtml', 'b'), 'b'),
+ ('endElementNS', ('http://www.w3.org/1999/xhtml', 'a'), 'a'),
+ ('endElementNS', ('http://www.w3.org/1999/xhtml', 'body'), 'body'),
+ ('endElementNS', ('http://www.w3.org/1999/xhtml', 'html'), 'html'),
+ 'endDocument',
+ ]
+ assert expected == handler.visited
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_treewalkers.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_treewalkers.py
new file mode 100644
index 0000000000..780ca964ba
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_treewalkers.py
@@ -0,0 +1,205 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import itertools
+import sys
+
+from six import unichr, text_type
+import pytest
+
+try:
+ import lxml.etree
+except ImportError:
+ pass
+
+from .support import treeTypes
+
+from html5lib import html5parser, treewalkers
+from html5lib.filters.lint import Filter as Lint
+
+import re
+attrlist = re.compile(r"^(\s+)\w+=.*(\n\1\w+=.*)+", re.M)
+
+
+def sortattrs(x):
+ lines = x.group(0).split("\n")
+ lines.sort()
+ return "\n".join(lines)
+
+
+def test_all_tokens():
+ expected = [
+ {'data': {}, 'type': 'StartTag', 'namespace': 'http://www.w3.org/1999/xhtml', 'name': 'html'},
+ {'data': {}, 'type': 'StartTag', 'namespace': 'http://www.w3.org/1999/xhtml', 'name': 'head'},
+ {'type': 'EndTag', 'namespace': 'http://www.w3.org/1999/xhtml', 'name': 'head'},
+ {'data': {}, 'type': 'StartTag', 'namespace': 'http://www.w3.org/1999/xhtml', 'name': 'body'},
+ {'data': 'a', 'type': 'Characters'},
+ {'data': {}, 'type': 'StartTag', 'namespace': 'http://www.w3.org/1999/xhtml', 'name': 'div'},
+ {'data': 'b', 'type': 'Characters'},
+ {'type': 'EndTag', 'namespace': 'http://www.w3.org/1999/xhtml', 'name': 'div'},
+ {'data': 'c', 'type': 'Characters'},
+ {'type': 'EndTag', 'namespace': 'http://www.w3.org/1999/xhtml', 'name': 'body'},
+ {'type': 'EndTag', 'namespace': 'http://www.w3.org/1999/xhtml', 'name': 'html'}
+ ]
+ for _, treeCls in sorted(treeTypes.items()):
+ if treeCls is None:
+ continue
+ p = html5parser.HTMLParser(tree=treeCls["builder"])
+ document = p.parse("<html><head></head><body>a<div>b</div>c</body></html>")
+ document = treeCls.get("adapter", lambda x: x)(document)
+ output = Lint(treeCls["walker"](document))
+ for expectedToken, outputToken in zip(expected, output):
+ assert expectedToken == outputToken
+
+
+def set_attribute_on_first_child(docfrag, name, value, treeName):
+ """naively sets an attribute on the first child of the document
+ fragment passed in"""
+ setter = {'ElementTree': lambda d: d[0].set,
+ 'DOM': lambda d: d.firstChild.setAttribute}
+ setter['cElementTree'] = setter['ElementTree']
+ try:
+ setter.get(treeName, setter['DOM'])(docfrag)(name, value)
+ except AttributeError:
+ setter['ElementTree'](docfrag)(name, value)
+
+
+def param_treewalker_six_mix():
+ """Str/Unicode mix. If str attrs added to tree"""
+
+ # On Python 2.x string literals are of type str. Unless, like this
+ # file, the programmer imports unicode_literals from __future__.
+ # In that case, string literals become objects of type unicode.
+
+ # This test simulates a Py2 user, modifying attributes on a document
+ # fragment but not using the u'' syntax nor importing unicode_literals
+ sm_tests = [
+ ('<a href="http://example.com">Example</a>',
+ [(str('class'), str('test123'))],
+ '<a>\n class="test123"\n href="http://example.com"\n "Example"'),
+
+ ('<link href="http://example.com/cow">',
+ [(str('rel'), str('alternate'))],
+ '<link>\n href="http://example.com/cow"\n rel="alternate"\n "Example"')
+ ]
+
+ for tree in sorted(treeTypes.items()):
+ for intext, attrs, expected in sm_tests:
+ yield intext, expected, attrs, tree
+
+
+@pytest.mark.parametrize("intext, expected, attrs_to_add, tree", param_treewalker_six_mix())
+def test_treewalker_six_mix(intext, expected, attrs_to_add, tree):
+ """tests what happens when we add attributes to the intext"""
+ treeName, treeClass = tree
+ if treeClass is None:
+ pytest.skip("Treebuilder not loaded")
+ parser = html5parser.HTMLParser(tree=treeClass["builder"])
+ document = parser.parseFragment(intext)
+ for nom, val in attrs_to_add:
+ set_attribute_on_first_child(document, nom, val, treeName)
+
+ document = treeClass.get("adapter", lambda x: x)(document)
+ output = treewalkers.pprint(treeClass["walker"](document))
+ output = attrlist.sub(sortattrs, output)
+ if output not in expected:
+ raise AssertionError("TreewalkerEditTest: %s\nExpected:\n%s\nReceived:\n%s" % (treeName, expected, output))
+
+
+@pytest.mark.parametrize("tree,char", itertools.product(sorted(treeTypes.items()), ["x", "\u1234"]))
+def test_fragment_single_char(tree, char):
+ expected = [
+ {'data': char, 'type': 'Characters'}
+ ]
+
+ treeName, treeClass = tree
+ if treeClass is None:
+ pytest.skip("Treebuilder not loaded")
+
+ parser = html5parser.HTMLParser(tree=treeClass["builder"])
+ document = parser.parseFragment(char)
+ document = treeClass.get("adapter", lambda x: x)(document)
+ output = Lint(treeClass["walker"](document))
+
+ assert list(output) == expected
+
+
+@pytest.mark.skipif(treeTypes["lxml"] is None, reason="lxml not importable")
+def test_lxml_xml():
+ expected = [
+ {'data': {}, 'name': 'div', 'namespace': None, 'type': 'StartTag'},
+ {'data': {}, 'name': 'div', 'namespace': None, 'type': 'StartTag'},
+ {'name': 'div', 'namespace': None, 'type': 'EndTag'},
+ {'name': 'div', 'namespace': None, 'type': 'EndTag'}
+ ]
+
+ lxmltree = lxml.etree.fromstring('<div><div></div></div>')
+ walker = treewalkers.getTreeWalker('lxml')
+ output = Lint(walker(lxmltree))
+
+ assert list(output) == expected
+
+
+@pytest.mark.parametrize("treeName",
+ [pytest.param(treeName, marks=[getattr(pytest.mark, treeName),
+ pytest.mark.skipif(
+ treeName != "lxml" or
+ sys.version_info < (3, 7), reason="dict order undef")])
+ for treeName in sorted(treeTypes.keys())])
+def test_maintain_attribute_order(treeName):
+ treeAPIs = treeTypes[treeName]
+ if treeAPIs is None:
+ pytest.skip("Treebuilder not loaded")
+
+ # generate loads to maximize the chance a hash-based mutation will occur
+ attrs = [(unichr(x), text_type(i)) for i, x in enumerate(range(ord('a'), ord('z')))]
+ data = "<span " + " ".join("%s='%s'" % (x, i) for x, i in attrs) + ">"
+
+ parser = html5parser.HTMLParser(tree=treeAPIs["builder"])
+ document = parser.parseFragment(data)
+
+ document = treeAPIs.get("adapter", lambda x: x)(document)
+ output = list(Lint(treeAPIs["walker"](document)))
+
+ assert len(output) == 2
+ assert output[0]['type'] == 'StartTag'
+ assert output[1]['type'] == "EndTag"
+
+ attrs_out = output[0]['data']
+ assert len(attrs) == len(attrs_out)
+
+ for (in_name, in_value), (out_name, out_value) in zip(attrs, attrs_out.items()):
+ assert (None, in_name) == out_name
+ assert in_value == out_value
+
+
+@pytest.mark.parametrize("treeName",
+ [pytest.param(treeName, marks=[getattr(pytest.mark, treeName),
+ pytest.mark.skipif(
+ treeName != "lxml" or
+ sys.version_info < (3, 7), reason="dict order undef")])
+ for treeName in sorted(treeTypes.keys())])
+def test_maintain_attribute_order_adjusted(treeName):
+ treeAPIs = treeTypes[treeName]
+ if treeAPIs is None:
+ pytest.skip("Treebuilder not loaded")
+
+ # generate loads to maximize the chance a hash-based mutation will occur
+ data = "<svg a=1 refx=2 b=3 xml:lang=4 c=5>"
+
+ parser = html5parser.HTMLParser(tree=treeAPIs["builder"])
+ document = parser.parseFragment(data)
+
+ document = treeAPIs.get("adapter", lambda x: x)(document)
+ output = list(Lint(treeAPIs["walker"](document)))
+
+ assert len(output) == 2
+ assert output[0]['type'] == 'StartTag'
+ assert output[1]['type'] == "EndTag"
+
+ attrs_out = output[0]['data']
+
+ assert list(attrs_out.items()) == [((None, 'a'), '1'),
+ ((None, 'refX'), '2'),
+ ((None, 'b'), '3'),
+ (('http://www.w3.org/XML/1998/namespace', 'lang'), '4'),
+ ((None, 'c'), '5')]
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_whitespace_filter.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_whitespace_filter.py
new file mode 100644
index 0000000000..e9da6140a9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/test_whitespace_filter.py
@@ -0,0 +1,125 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from html5lib.filters.whitespace import Filter
+from html5lib.constants import spaceCharacters
+spaceCharacters = "".join(spaceCharacters)
+
+
+def runTest(input, expected):
+ output = list(Filter(input))
+ errorMsg = "\n".join(["\n\nInput:", str(input),
+ "\nExpected:", str(expected),
+ "\nReceived:", str(output)])
+ assert expected == output, errorMsg
+
+
+def runTestUnmodifiedOutput(input):
+ runTest(input, input)
+
+
+def testPhrasingElements():
+ runTestUnmodifiedOutput(
+ [{"type": "Characters", "data": "This is a "},
+ {"type": "StartTag", "name": "span", "data": []},
+ {"type": "Characters", "data": "phrase"},
+ {"type": "EndTag", "name": "span", "data": []},
+ {"type": "SpaceCharacters", "data": " "},
+ {"type": "Characters", "data": "with"},
+ {"type": "SpaceCharacters", "data": " "},
+ {"type": "StartTag", "name": "em", "data": []},
+ {"type": "Characters", "data": "emphasised text"},
+ {"type": "EndTag", "name": "em", "data": []},
+ {"type": "Characters", "data": " and an "},
+ {"type": "StartTag", "name": "img", "data": [["alt", "image"]]},
+ {"type": "Characters", "data": "."}])
+
+
+def testLeadingWhitespace():
+ runTest(
+ [{"type": "StartTag", "name": "p", "data": []},
+ {"type": "SpaceCharacters", "data": spaceCharacters},
+ {"type": "Characters", "data": "foo"},
+ {"type": "EndTag", "name": "p", "data": []}],
+ [{"type": "StartTag", "name": "p", "data": []},
+ {"type": "SpaceCharacters", "data": " "},
+ {"type": "Characters", "data": "foo"},
+ {"type": "EndTag", "name": "p", "data": []}])
+
+
+def testLeadingWhitespaceAsCharacters():
+ runTest(
+ [{"type": "StartTag", "name": "p", "data": []},
+ {"type": "Characters", "data": spaceCharacters + "foo"},
+ {"type": "EndTag", "name": "p", "data": []}],
+ [{"type": "StartTag", "name": "p", "data": []},
+ {"type": "Characters", "data": " foo"},
+ {"type": "EndTag", "name": "p", "data": []}])
+
+
+def testTrailingWhitespace():
+ runTest(
+ [{"type": "StartTag", "name": "p", "data": []},
+ {"type": "Characters", "data": "foo"},
+ {"type": "SpaceCharacters", "data": spaceCharacters},
+ {"type": "EndTag", "name": "p", "data": []}],
+ [{"type": "StartTag", "name": "p", "data": []},
+ {"type": "Characters", "data": "foo"},
+ {"type": "SpaceCharacters", "data": " "},
+ {"type": "EndTag", "name": "p", "data": []}])
+
+
+def testTrailingWhitespaceAsCharacters():
+ runTest(
+ [{"type": "StartTag", "name": "p", "data": []},
+ {"type": "Characters", "data": "foo" + spaceCharacters},
+ {"type": "EndTag", "name": "p", "data": []}],
+ [{"type": "StartTag", "name": "p", "data": []},
+ {"type": "Characters", "data": "foo "},
+ {"type": "EndTag", "name": "p", "data": []}])
+
+
+def testWhitespace():
+ runTest(
+ [{"type": "StartTag", "name": "p", "data": []},
+ {"type": "Characters", "data": "foo" + spaceCharacters + "bar"},
+ {"type": "EndTag", "name": "p", "data": []}],
+ [{"type": "StartTag", "name": "p", "data": []},
+ {"type": "Characters", "data": "foo bar"},
+ {"type": "EndTag", "name": "p", "data": []}])
+
+
+def testLeadingWhitespaceInPre():
+ runTestUnmodifiedOutput(
+ [{"type": "StartTag", "name": "pre", "data": []},
+ {"type": "SpaceCharacters", "data": spaceCharacters},
+ {"type": "Characters", "data": "foo"},
+ {"type": "EndTag", "name": "pre", "data": []}])
+
+
+def testLeadingWhitespaceAsCharactersInPre():
+ runTestUnmodifiedOutput(
+ [{"type": "StartTag", "name": "pre", "data": []},
+ {"type": "Characters", "data": spaceCharacters + "foo"},
+ {"type": "EndTag", "name": "pre", "data": []}])
+
+
+def testTrailingWhitespaceInPre():
+ runTestUnmodifiedOutput(
+ [{"type": "StartTag", "name": "pre", "data": []},
+ {"type": "Characters", "data": "foo"},
+ {"type": "SpaceCharacters", "data": spaceCharacters},
+ {"type": "EndTag", "name": "pre", "data": []}])
+
+
+def testTrailingWhitespaceAsCharactersInPre():
+ runTestUnmodifiedOutput(
+ [{"type": "StartTag", "name": "pre", "data": []},
+ {"type": "Characters", "data": "foo" + spaceCharacters},
+ {"type": "EndTag", "name": "pre", "data": []}])
+
+
+def testWhitespaceInPre():
+ runTestUnmodifiedOutput(
+ [{"type": "StartTag", "name": "pre", "data": []},
+ {"type": "Characters", "data": "foo" + spaceCharacters + "bar"},
+ {"type": "EndTag", "name": "pre", "data": []}])
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tokenizer.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tokenizer.py
new file mode 100644
index 0000000000..47264cc325
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tokenizer.py
@@ -0,0 +1,253 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import codecs
+import json
+import warnings
+import re
+
+import pytest
+from six import unichr
+
+from html5lib._tokenizer import HTMLTokenizer
+from html5lib import constants, _utils
+
+
+class TokenizerTestParser(object):
+ def __init__(self, initialState, lastStartTag=None):
+ self.tokenizer = HTMLTokenizer
+ self._state = initialState
+ self._lastStartTag = lastStartTag
+
+ def parse(self, stream, encoding=None, innerHTML=False):
+ # pylint:disable=unused-argument
+ tokenizer = self.tokenizer(stream, encoding)
+ self.outputTokens = []
+
+ tokenizer.state = getattr(tokenizer, self._state)
+ if self._lastStartTag is not None:
+ tokenizer.currentToken = {"type": "startTag",
+ "name": self._lastStartTag}
+
+ types = {v: k for k, v in constants.tokenTypes.items()}
+ for token in tokenizer:
+ getattr(self, 'process%s' % types[token["type"]])(token)
+
+ return self.outputTokens
+
+ def processDoctype(self, token):
+ self.outputTokens.append(["DOCTYPE", token["name"], token["publicId"],
+ token["systemId"], token["correct"]])
+
+ def processStartTag(self, token):
+ self.outputTokens.append(["StartTag", token["name"],
+ token["data"], token["selfClosing"]])
+
+ def processEmptyTag(self, token):
+ if token["name"] not in constants.voidElements:
+ self.outputTokens.append("ParseError")
+ self.outputTokens.append(["StartTag", token["name"], dict(token["data"][::-1])])
+
+ def processEndTag(self, token):
+ self.outputTokens.append(["EndTag", token["name"],
+ token["selfClosing"]])
+
+ def processComment(self, token):
+ self.outputTokens.append(["Comment", token["data"]])
+
+ def processSpaceCharacters(self, token):
+ self.outputTokens.append(["Character", token["data"]])
+ self.processSpaceCharacters = self.processCharacters
+
+ def processCharacters(self, token):
+ self.outputTokens.append(["Character", token["data"]])
+
+ def processEOF(self, token):
+ pass
+
+ def processParseError(self, token):
+ self.outputTokens.append(["ParseError", token["data"]])
+
+
+def concatenateCharacterTokens(tokens):
+ outputTokens = []
+ for token in tokens:
+ if "ParseError" not in token and token[0] == "Character":
+ if (outputTokens and "ParseError" not in outputTokens[-1] and
+ outputTokens[-1][0] == "Character"):
+ outputTokens[-1][1] += token[1]
+ else:
+ outputTokens.append(token)
+ else:
+ outputTokens.append(token)
+ return outputTokens
+
+
+def normalizeTokens(tokens):
+ # TODO: convert tests to reflect arrays
+ for i, token in enumerate(tokens):
+ if token[0] == 'ParseError':
+ tokens[i] = token[0]
+ return tokens
+
+
+def tokensMatch(expectedTokens, receivedTokens, ignoreErrorOrder,
+ ignoreErrors=False):
+ """Test whether the test has passed or failed
+
+ If the ignoreErrorOrder flag is set to true we don't test the relative
+ positions of parse errors and non parse errors
+ """
+ checkSelfClosing = False
+ for token in expectedTokens:
+ if (token[0] == "StartTag" and len(token) == 4 or
+ token[0] == "EndTag" and len(token) == 3):
+ checkSelfClosing = True
+ break
+
+ if not checkSelfClosing:
+ for token in receivedTokens:
+ if token[0] == "StartTag" or token[0] == "EndTag":
+ token.pop()
+
+ if not ignoreErrorOrder and not ignoreErrors:
+ expectedTokens = concatenateCharacterTokens(expectedTokens)
+ return expectedTokens == receivedTokens
+ else:
+ # Sort the tokens into two groups; non-parse errors and parse errors
+ tokens = {"expected": [[], []], "received": [[], []]}
+ for tokenType, tokenList in zip(list(tokens.keys()),
+ (expectedTokens, receivedTokens)):
+ for token in tokenList:
+ if token != "ParseError":
+ tokens[tokenType][0].append(token)
+ else:
+ if not ignoreErrors:
+ tokens[tokenType][1].append(token)
+ tokens[tokenType][0] = concatenateCharacterTokens(tokens[tokenType][0])
+ return tokens["expected"] == tokens["received"]
+
+
+_surrogateRe = re.compile(r"\\u([0-9A-Fa-f]{4})(?:\\u([0-9A-Fa-f]{4}))?")
+
+
+def unescape(test):
+ def decode(inp):
+ """Decode \\uXXXX escapes
+
+ This decodes \\uXXXX escapes, possibly into non-BMP characters when
+ two surrogate character escapes are adjacent to each other.
+ """
+ # This cannot be implemented using the unicode_escape codec
+ # because that requires its input be ISO-8859-1, and we need
+ # arbitrary unicode as input.
+ def repl(m):
+ if m.group(2) is not None:
+ high = int(m.group(1), 16)
+ low = int(m.group(2), 16)
+ if 0xD800 <= high <= 0xDBFF and 0xDC00 <= low <= 0xDFFF:
+ cp = ((high - 0xD800) << 10) + (low - 0xDC00) + 0x10000
+ return unichr(cp)
+ else:
+ return unichr(high) + unichr(low)
+ else:
+ return unichr(int(m.group(1), 16))
+ try:
+ return _surrogateRe.sub(repl, inp)
+ except ValueError:
+ # This occurs when unichr throws ValueError, which should
+ # only be for a lone-surrogate.
+ if _utils.supports_lone_surrogates:
+ raise
+ return None
+
+ test["input"] = decode(test["input"])
+ for token in test["output"]:
+ if token == "ParseError":
+ continue
+ else:
+ token[1] = decode(token[1])
+ if len(token) > 2:
+ for key, value in token[2]:
+ del token[2][key]
+ token[2][decode(key)] = decode(value)
+ return test
+
+
+def _doCapitalize(match):
+ return match.group(1).upper()
+
+
+_capitalizeRe = re.compile(r"\W+(\w)").sub
+
+
+def capitalize(s):
+ s = s.lower()
+ s = _capitalizeRe(_doCapitalize, s)
+ return s
+
+
+class TokenizerFile(pytest.File):
+ def collect(self):
+ with codecs.open(str(self.fspath), "r", encoding="utf-8") as fp:
+ tests = json.load(fp)
+ if 'tests' in tests:
+ for i, test in enumerate(tests['tests']):
+ yield TokenizerTestCollector(str(i), self, testdata=test)
+
+
+class TokenizerTestCollector(pytest.Collector):
+ def __init__(self, name, parent=None, config=None, session=None, testdata=None):
+ super(TokenizerTestCollector, self).__init__(name, parent, config, session)
+ if 'initialStates' not in testdata:
+ testdata["initialStates"] = ["Data state"]
+ if 'doubleEscaped' in testdata:
+ testdata = unescape(testdata)
+ self.testdata = testdata
+
+ def collect(self):
+ for initialState in self.testdata["initialStates"]:
+ initialState = capitalize(initialState)
+ item = TokenizerTest(initialState,
+ self,
+ self.testdata,
+ initialState)
+ if self.testdata["input"] is None:
+ item.add_marker(pytest.mark.skipif(True, reason="Relies on lone surrogates"))
+ yield item
+
+
+class TokenizerTest(pytest.Item):
+ def __init__(self, name, parent, test, initialState):
+ super(TokenizerTest, self).__init__(name, parent)
+ self.obj = lambda: 1 # this is to hack around skipif needing a function!
+ self.test = test
+ self.initialState = initialState
+
+ def runtest(self):
+ warnings.resetwarnings()
+ warnings.simplefilter("error")
+
+ expected = self.test['output']
+ if 'lastStartTag' not in self.test:
+ self.test['lastStartTag'] = None
+ parser = TokenizerTestParser(self.initialState,
+ self.test['lastStartTag'])
+ tokens = parser.parse(self.test['input'])
+ received = normalizeTokens(tokens)
+ errorMsg = "\n".join(["\n\nInitial state:",
+ self.initialState,
+ "\nInput:", self.test['input'],
+ "\nExpected:", repr(expected),
+ "\nreceived:", repr(tokens)])
+ errorMsg = errorMsg
+ ignoreErrorOrder = self.test.get('ignoreErrorOrder', False)
+ assert tokensMatch(expected, received, ignoreErrorOrder, True), errorMsg
+
+ def repr_failure(self, excinfo):
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=__file__)
+ excinfo.traceback = ntraceback.filter()
+
+ return excinfo.getrepr(funcargs=True,
+ showlocals=False,
+ style="short", tbfilter=False)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tokenizertotree.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tokenizertotree.py
new file mode 100644
index 0000000000..8528e8766a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tokenizertotree.py
@@ -0,0 +1,69 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import sys
+import os
+import json
+import re
+
+import html5lib
+from . import support
+from . import test_tokenizer
+
+p = html5lib.HTMLParser()
+
+unnamespaceExpected = re.compile(r"^(\|\s*)<html ([^>]+)>", re.M).sub
+
+
+def main(out_path):
+ if not os.path.exists(out_path):
+ sys.stderr.write("Path %s does not exist" % out_path)
+ sys.exit(1)
+
+ for filename in support.get_data_files('tokenizer', '*.test'):
+ run_file(filename, out_path)
+
+
+def run_file(filename, out_path):
+ try:
+ tests_data = json.load(open(filename, "r"))
+ except ValueError:
+ sys.stderr.write("Failed to load %s\n" % filename)
+ return
+ name = os.path.splitext(os.path.split(filename)[1])[0]
+ output_file = open(os.path.join(out_path, "tokenizer_%s.dat" % name), "w")
+
+ if 'tests' in tests_data:
+ for test_data in tests_data['tests']:
+ if 'initialStates' not in test_data:
+ test_data["initialStates"] = ["Data state"]
+
+ for initial_state in test_data["initialStates"]:
+ if initial_state != "Data state":
+ # don't support this yet
+ continue
+ test = make_test(test_data)
+ output_file.write(test)
+
+ output_file.close()
+
+
+def make_test(test_data):
+ if 'doubleEscaped' in test_data:
+ test_data = test_tokenizer.unescape_test(test_data)
+
+ rv = []
+ rv.append("#data")
+ rv.append(test_data["input"].encode("utf8"))
+ rv.append("#errors")
+ tree = p.parse(test_data["input"])
+ output = p.tree.testSerializer(tree)
+ output = "\n".join(("| " + line[3:]) if line.startswith("| ") else line
+ for line in output.split("\n"))
+ output = unnamespaceExpected(r"\1<\2>", output)
+ rv.append(output.encode("utf8"))
+ rv.append("")
+ return "\n".join(rv)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1])
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tree_construction.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tree_construction.py
new file mode 100644
index 0000000000..1ef6e7250c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/tree_construction.py
@@ -0,0 +1,205 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import itertools
+import re
+import warnings
+from difflib import unified_diff
+
+import pytest
+
+from .support import TestData, convert, convertExpected, treeTypes
+from html5lib import html5parser, constants, treewalkers
+from html5lib.filters.lint import Filter as Lint
+
+_attrlist_re = re.compile(r"^(\s+)\w+=.*(\n\1\w+=.*)+", re.M)
+
+
+def sortattrs(s):
+ def replace(m):
+ lines = m.group(0).split("\n")
+ lines.sort()
+ return "\n".join(lines)
+ return _attrlist_re.sub(replace, s)
+
+
+class TreeConstructionFile(pytest.File):
+ def collect(self):
+ tests = TestData(str(self.fspath), "data")
+ for i, test in enumerate(tests):
+ yield TreeConstructionTest(str(i), self, testdata=test)
+
+
+class TreeConstructionTest(pytest.Collector):
+ def __init__(self, name, parent=None, config=None, session=None, testdata=None):
+ super(TreeConstructionTest, self).__init__(name, parent, config, session)
+ self.testdata = testdata
+
+ def collect(self):
+ for treeName, treeAPIs in sorted(treeTypes.items()):
+ for x in itertools.chain(self._getParserTests(treeName, treeAPIs),
+ self._getTreeWalkerTests(treeName, treeAPIs)):
+ yield x
+
+ def _getParserTests(self, treeName, treeAPIs):
+ if treeAPIs is not None and "adapter" in treeAPIs:
+ return
+ for namespaceHTMLElements in (True, False):
+ if namespaceHTMLElements:
+ nodeid = "%s::parser::namespaced" % treeName
+ else:
+ nodeid = "%s::parser::void-namespace" % treeName
+ item = ParserTest(nodeid,
+ self,
+ self.testdata,
+ treeAPIs["builder"] if treeAPIs is not None else None,
+ namespaceHTMLElements)
+ item.add_marker(getattr(pytest.mark, treeName))
+ item.add_marker(pytest.mark.parser)
+ if namespaceHTMLElements:
+ item.add_marker(pytest.mark.namespaced)
+ yield item
+
+ def _getTreeWalkerTests(self, treeName, treeAPIs):
+ nodeid = "%s::treewalker" % treeName
+ item = TreeWalkerTest(nodeid,
+ self,
+ self.testdata,
+ treeAPIs)
+ item.add_marker(getattr(pytest.mark, treeName))
+ item.add_marker(pytest.mark.treewalker)
+ yield item
+
+
+def convertTreeDump(data):
+ return "\n".join(convert(3)(data).split("\n")[1:])
+
+
+namespaceExpected = re.compile(r"^(\s*)<(\S+)>", re.M).sub
+
+
+class ParserTest(pytest.Item):
+ def __init__(self, name, parent, test, treeClass, namespaceHTMLElements):
+ super(ParserTest, self).__init__(name, parent)
+ self.test = test
+ self.treeClass = treeClass
+ self.namespaceHTMLElements = namespaceHTMLElements
+
+ def runtest(self):
+ if self.treeClass is None:
+ pytest.skip("Treebuilder not loaded")
+
+ p = html5parser.HTMLParser(tree=self.treeClass,
+ namespaceHTMLElements=self.namespaceHTMLElements)
+
+ input = self.test['data']
+ fragmentContainer = self.test['document-fragment']
+ expected = convertExpected(self.test['document'])
+ expectedErrors = self.test['errors'].split("\n") if self.test['errors'] else []
+
+ scripting = False
+ if 'script-on' in self.test:
+ scripting = True
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ try:
+ if fragmentContainer:
+ document = p.parseFragment(input, fragmentContainer, scripting=scripting)
+ else:
+ document = p.parse(input, scripting=scripting)
+ except constants.DataLossWarning:
+ pytest.skip("data loss warning")
+
+ output = convertTreeDump(p.tree.testSerializer(document))
+
+ expected = expected
+ if self.namespaceHTMLElements:
+ expected = namespaceExpected(r"\1<html \2>", expected)
+
+ errorMsg = "\n".join(["\n\nInput:", input, "\nExpected:", expected,
+ "\nReceived:", output])
+ assert expected == output, errorMsg
+
+ errStr = []
+ for (line, col), errorcode, datavars in p.errors:
+ assert isinstance(datavars, dict), "%s, %s" % (errorcode, repr(datavars))
+ errStr.append("Line: %i Col: %i %s" % (line, col,
+ constants.E[errorcode] % datavars))
+
+ errorMsg2 = "\n".join(["\n\nInput:", input,
+ "\nExpected errors (" + str(len(expectedErrors)) + "):\n" + "\n".join(expectedErrors),
+ "\nActual errors (" + str(len(p.errors)) + "):\n" + "\n".join(errStr)])
+ if False: # we're currently not testing parse errors
+ assert len(p.errors) == len(expectedErrors), errorMsg2
+
+ def repr_failure(self, excinfo):
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=__file__)
+ excinfo.traceback = ntraceback.filter()
+
+ return excinfo.getrepr(funcargs=True,
+ showlocals=False,
+ style="short", tbfilter=False)
+
+
+class TreeWalkerTest(pytest.Item):
+ def __init__(self, name, parent, test, treeAPIs):
+ super(TreeWalkerTest, self).__init__(name, parent)
+ self.test = test
+ self.treeAPIs = treeAPIs
+
+ def runtest(self):
+ if self.treeAPIs is None:
+ pytest.skip("Treebuilder not loaded")
+
+ p = html5parser.HTMLParser(tree=self.treeAPIs["builder"])
+
+ input = self.test['data']
+ fragmentContainer = self.test['document-fragment']
+ expected = convertExpected(self.test['document'])
+
+ scripting = False
+ if 'script-on' in self.test:
+ scripting = True
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ try:
+ if fragmentContainer:
+ document = p.parseFragment(input, fragmentContainer, scripting=scripting)
+ else:
+ document = p.parse(input, scripting=scripting)
+ except constants.DataLossWarning:
+ pytest.skip("data loss warning")
+
+ poutput = convertTreeDump(p.tree.testSerializer(document))
+ namespace_expected = namespaceExpected(r"\1<html \2>", expected)
+ if poutput != namespace_expected:
+ pytest.skip("parser output incorrect")
+
+ document = self.treeAPIs.get("adapter", lambda x: x)(document)
+
+ try:
+ output = treewalkers.pprint(Lint(self.treeAPIs["walker"](document)))
+ output = sortattrs(output)
+ expected = sortattrs(expected)
+ diff = "".join(unified_diff([line + "\n" for line in expected.splitlines()],
+ [line + "\n" for line in output.splitlines()],
+ "Expected", "Received"))
+ assert expected == output, "\n".join([
+ "", "Input:", input,
+ "", "Expected:", expected,
+ "", "Received:", output,
+ "", "Diff:", diff,
+ ])
+ except NotImplementedError:
+ pytest.skip("tree walker NotImplementedError")
+
+ def repr_failure(self, excinfo):
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=__file__)
+ excinfo.traceback = ntraceback.filter()
+
+ return excinfo.getrepr(funcargs=True,
+ showlocals=False,
+ style="short", tbfilter=False)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/us-ascii.html b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/us-ascii.html
new file mode 100644
index 0000000000..728cb6baf9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/us-ascii.html
@@ -0,0 +1,3 @@
+<!doctype html>
+<title>Test</title>
+<p>Hello World! \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/utf-8-bom.html b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/utf-8-bom.html
new file mode 100644
index 0000000000..6ac5efcedf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/tests/utf-8-bom.html
@@ -0,0 +1,3 @@
+<!doctype html>
+<title>Test</title>
+<p>Hello World! © \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/__init__.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/__init__.py
new file mode 100644
index 0000000000..dfeb0ba5e1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/__init__.py
@@ -0,0 +1,30 @@
+"""Tree adapters let you convert from one tree structure to another
+
+Example:
+
+.. code-block:: python
+
+ import html5lib
+ from html5lib.treeadapters import genshi
+
+ doc = '<html><body>Hi!</body></html>'
+ treebuilder = html5lib.getTreeBuilder('etree')
+ parser = html5lib.HTMLParser(tree=treebuilder)
+ tree = parser.parse(doc)
+ TreeWalker = html5lib.getTreeWalker('etree')
+
+ genshi_tree = genshi.to_genshi(TreeWalker(tree))
+
+"""
+from __future__ import absolute_import, division, unicode_literals
+
+from . import sax
+
+__all__ = ["sax"]
+
+try:
+ from . import genshi # noqa
+except ImportError:
+ pass
+else:
+ __all__.append("genshi")
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/genshi.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/genshi.py
new file mode 100644
index 0000000000..61d5fb6ac4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/genshi.py
@@ -0,0 +1,54 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from genshi.core import QName, Attrs
+from genshi.core import START, END, TEXT, COMMENT, DOCTYPE
+
+
+def to_genshi(walker):
+ """Convert a tree to a genshi tree
+
+ :arg walker: the treewalker to use to walk the tree to convert it
+
+ :returns: generator of genshi nodes
+
+ """
+ text = []
+ for token in walker:
+ type = token["type"]
+ if type in ("Characters", "SpaceCharacters"):
+ text.append(token["data"])
+ elif text:
+ yield TEXT, "".join(text), (None, -1, -1)
+ text = []
+
+ if type in ("StartTag", "EmptyTag"):
+ if token["namespace"]:
+ name = "{%s}%s" % (token["namespace"], token["name"])
+ else:
+ name = token["name"]
+ attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value)
+ for attr, value in token["data"].items()])
+ yield (START, (QName(name), attrs), (None, -1, -1))
+ if type == "EmptyTag":
+ type = "EndTag"
+
+ if type == "EndTag":
+ if token["namespace"]:
+ name = "{%s}%s" % (token["namespace"], token["name"])
+ else:
+ name = token["name"]
+
+ yield END, QName(name), (None, -1, -1)
+
+ elif type == "Comment":
+ yield COMMENT, token["data"], (None, -1, -1)
+
+ elif type == "Doctype":
+ yield DOCTYPE, (token["name"], token["publicId"],
+ token["systemId"]), (None, -1, -1)
+
+ else:
+ pass # FIXME: What to do?
+
+ if text:
+ yield TEXT, "".join(text), (None, -1, -1)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/sax.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/sax.py
new file mode 100644
index 0000000000..f4ccea5a25
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treeadapters/sax.py
@@ -0,0 +1,50 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from xml.sax.xmlreader import AttributesNSImpl
+
+from ..constants import adjustForeignAttributes, unadjustForeignAttributes
+
+prefix_mapping = {}
+for prefix, localName, namespace in adjustForeignAttributes.values():
+ if prefix is not None:
+ prefix_mapping[prefix] = namespace
+
+
+def to_sax(walker, handler):
+ """Call SAX-like content handler based on treewalker walker
+
+ :arg walker: the treewalker to use to walk the tree to convert it
+
+ :arg handler: SAX handler to use
+
+ """
+ handler.startDocument()
+ for prefix, namespace in prefix_mapping.items():
+ handler.startPrefixMapping(prefix, namespace)
+
+ for token in walker:
+ type = token["type"]
+ if type == "Doctype":
+ continue
+ elif type in ("StartTag", "EmptyTag"):
+ attrs = AttributesNSImpl(token["data"],
+ unadjustForeignAttributes)
+ handler.startElementNS((token["namespace"], token["name"]),
+ token["name"],
+ attrs)
+ if type == "EmptyTag":
+ handler.endElementNS((token["namespace"], token["name"]),
+ token["name"])
+ elif type == "EndTag":
+ handler.endElementNS((token["namespace"], token["name"]),
+ token["name"])
+ elif type in ("Characters", "SpaceCharacters"):
+ handler.characters(token["data"])
+ elif type == "Comment":
+ pass
+ else:
+ assert False, "Unknown token type"
+
+ for prefix, namespace in prefix_mapping.items():
+ handler.endPrefixMapping(prefix)
+ handler.endDocument()
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/__init__.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/__init__.py
new file mode 100644
index 0000000000..d44447eaf5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/__init__.py
@@ -0,0 +1,88 @@
+"""A collection of modules for building different kinds of trees from HTML
+documents.
+
+To create a treebuilder for a new type of tree, you need to do
+implement several things:
+
+1. A set of classes for various types of elements: Document, Doctype, Comment,
+ Element. These must implement the interface of ``base.treebuilders.Node``
+ (although comment nodes have a different signature for their constructor,
+ see ``treebuilders.etree.Comment``) Textual content may also be implemented
+ as another node type, or not, as your tree implementation requires.
+
+2. A treebuilder object (called ``TreeBuilder`` by convention) that inherits
+ from ``treebuilders.base.TreeBuilder``. This has 4 required attributes:
+
+ * ``documentClass`` - the class to use for the bottommost node of a document
+ * ``elementClass`` - the class to use for HTML Elements
+ * ``commentClass`` - the class to use for comments
+ * ``doctypeClass`` - the class to use for doctypes
+
+ It also has one required method:
+
+ * ``getDocument`` - Returns the root node of the complete document tree
+
+3. If you wish to run the unit tests, you must also create a ``testSerializer``
+ method on your treebuilder which accepts a node and returns a string
+ containing Node and its children serialized according to the format used in
+ the unittests
+
+"""
+
+from __future__ import absolute_import, division, unicode_literals
+
+from .._utils import default_etree
+
+treeBuilderCache = {}
+
+
+def getTreeBuilder(treeType, implementation=None, **kwargs):
+ """Get a TreeBuilder class for various types of trees with built-in support
+
+ :arg treeType: the name of the tree type required (case-insensitive). Supported
+ values are:
+
+ * "dom" - A generic builder for DOM implementations, defaulting to a
+ xml.dom.minidom based implementation.
+ * "etree" - A generic builder for tree implementations exposing an
+ ElementTree-like interface, defaulting to xml.etree.cElementTree if
+ available and xml.etree.ElementTree if not.
+ * "lxml" - A etree-based builder for lxml.etree, handling limitations
+ of lxml's implementation.
+
+ :arg implementation: (Currently applies to the "etree" and "dom" tree
+ types). A module implementing the tree type e.g. xml.etree.ElementTree
+ or xml.etree.cElementTree.
+
+ :arg kwargs: Any additional options to pass to the TreeBuilder when
+ creating it.
+
+ Example:
+
+ >>> from html5lib.treebuilders import getTreeBuilder
+ >>> builder = getTreeBuilder('etree')
+
+ """
+
+ treeType = treeType.lower()
+ if treeType not in treeBuilderCache:
+ if treeType == "dom":
+ from . import dom
+ # Come up with a sane default (pref. from the stdlib)
+ if implementation is None:
+ from xml.dom import minidom
+ implementation = minidom
+ # NEVER cache here, caching is done in the dom submodule
+ return dom.getDomModule(implementation, **kwargs).TreeBuilder
+ elif treeType == "lxml":
+ from . import etree_lxml
+ treeBuilderCache[treeType] = etree_lxml.TreeBuilder
+ elif treeType == "etree":
+ from . import etree
+ if implementation is None:
+ implementation = default_etree
+ # NEVER cache here, caching is done in the etree submodule
+ return etree.getETreeModule(implementation, **kwargs).TreeBuilder
+ else:
+ raise ValueError("""Unrecognised treebuilder "%s" """ % treeType)
+ return treeBuilderCache.get(treeType)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/base.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/base.py
new file mode 100644
index 0000000000..e4a3d710d9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/base.py
@@ -0,0 +1,417 @@
+from __future__ import absolute_import, division, unicode_literals
+from six import text_type
+
+from ..constants import scopingElements, tableInsertModeElements, namespaces
+
+# The scope markers are inserted when entering object elements,
+# marquees, table cells, and table captions, and are used to prevent formatting
+# from "leaking" into tables, object elements, and marquees.
+Marker = None
+
+listElementsMap = {
+ None: (frozenset(scopingElements), False),
+ "button": (frozenset(scopingElements | {(namespaces["html"], "button")}), False),
+ "list": (frozenset(scopingElements | {(namespaces["html"], "ol"),
+ (namespaces["html"], "ul")}), False),
+ "table": (frozenset([(namespaces["html"], "html"),
+ (namespaces["html"], "table")]), False),
+ "select": (frozenset([(namespaces["html"], "optgroup"),
+ (namespaces["html"], "option")]), True)
+}
+
+
+class Node(object):
+ """Represents an item in the tree"""
+ def __init__(self, name):
+ """Creates a Node
+
+ :arg name: The tag name associated with the node
+
+ """
+ # The tag name associated with the node
+ self.name = name
+ # The parent of the current node (or None for the document node)
+ self.parent = None
+ # The value of the current node (applies to text nodes and comments)
+ self.value = None
+ # A dict holding name -> value pairs for attributes of the node
+ self.attributes = {}
+ # A list of child nodes of the current node. This must include all
+ # elements but not necessarily other node types.
+ self.childNodes = []
+ # A list of miscellaneous flags that can be set on the node.
+ self._flags = []
+
+ def __str__(self):
+ attributesStr = " ".join(["%s=\"%s\"" % (name, value)
+ for name, value in
+ self.attributes.items()])
+ if attributesStr:
+ return "<%s %s>" % (self.name, attributesStr)
+ else:
+ return "<%s>" % (self.name)
+
+ def __repr__(self):
+ return "<%s>" % (self.name)
+
+ def appendChild(self, node):
+ """Insert node as a child of the current node
+
+ :arg node: the node to insert
+
+ """
+ raise NotImplementedError
+
+ def insertText(self, data, insertBefore=None):
+ """Insert data as text in the current node, positioned before the
+ start of node insertBefore or to the end of the node's text.
+
+ :arg data: the data to insert
+
+ :arg insertBefore: True if you want to insert the text before the node
+ and False if you want to insert it after the node
+
+ """
+ raise NotImplementedError
+
+ def insertBefore(self, node, refNode):
+ """Insert node as a child of the current node, before refNode in the
+ list of child nodes. Raises ValueError if refNode is not a child of
+ the current node
+
+ :arg node: the node to insert
+
+ :arg refNode: the child node to insert the node before
+
+ """
+ raise NotImplementedError
+
+ def removeChild(self, node):
+ """Remove node from the children of the current node
+
+ :arg node: the child node to remove
+
+ """
+ raise NotImplementedError
+
+ def reparentChildren(self, newParent):
+ """Move all the children of the current node to newParent.
+ This is needed so that trees that don't store text as nodes move the
+ text in the correct way
+
+ :arg newParent: the node to move all this node's children to
+
+ """
+ # XXX - should this method be made more general?
+ for child in self.childNodes:
+ newParent.appendChild(child)
+ self.childNodes = []
+
+ def cloneNode(self):
+ """Return a shallow copy of the current node i.e. a node with the same
+ name and attributes but with no parent or child nodes
+ """
+ raise NotImplementedError
+
+ def hasContent(self):
+ """Return true if the node has children or text, false otherwise
+ """
+ raise NotImplementedError
+
+
+class ActiveFormattingElements(list):
+ def append(self, node):
+ equalCount = 0
+ if node != Marker:
+ for element in self[::-1]:
+ if element == Marker:
+ break
+ if self.nodesEqual(element, node):
+ equalCount += 1
+ if equalCount == 3:
+ self.remove(element)
+ break
+ list.append(self, node)
+
+ def nodesEqual(self, node1, node2):
+ if not node1.nameTuple == node2.nameTuple:
+ return False
+
+ if not node1.attributes == node2.attributes:
+ return False
+
+ return True
+
+
+class TreeBuilder(object):
+ """Base treebuilder implementation
+
+ * documentClass - the class to use for the bottommost node of a document
+ * elementClass - the class to use for HTML Elements
+ * commentClass - the class to use for comments
+ * doctypeClass - the class to use for doctypes
+
+ """
+ # pylint:disable=not-callable
+
+ # Document class
+ documentClass = None
+
+ # The class to use for creating a node
+ elementClass = None
+
+ # The class to use for creating comments
+ commentClass = None
+
+ # The class to use for creating doctypes
+ doctypeClass = None
+
+ # Fragment class
+ fragmentClass = None
+
+ def __init__(self, namespaceHTMLElements):
+ """Create a TreeBuilder
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ """
+ if namespaceHTMLElements:
+ self.defaultNamespace = "http://www.w3.org/1999/xhtml"
+ else:
+ self.defaultNamespace = None
+ self.reset()
+
+ def reset(self):
+ self.openElements = []
+ self.activeFormattingElements = ActiveFormattingElements()
+
+ # XXX - rename these to headElement, formElement
+ self.headPointer = None
+ self.formPointer = None
+
+ self.insertFromTable = False
+
+ self.document = self.documentClass()
+
+ def elementInScope(self, target, variant=None):
+
+ # If we pass a node in we match that. if we pass a string
+ # match any node with that name
+ exactNode = hasattr(target, "nameTuple")
+ if not exactNode:
+ if isinstance(target, text_type):
+ target = (namespaces["html"], target)
+ assert isinstance(target, tuple)
+
+ listElements, invert = listElementsMap[variant]
+
+ for node in reversed(self.openElements):
+ if exactNode and node == target:
+ return True
+ elif not exactNode and node.nameTuple == target:
+ return True
+ elif (invert ^ (node.nameTuple in listElements)):
+ return False
+
+ assert False # We should never reach this point
+
+ def reconstructActiveFormattingElements(self):
+ # Within this algorithm the order of steps described in the
+ # specification is not quite the same as the order of steps in the
+ # code. It should still do the same though.
+
+ # Step 1: stop the algorithm when there's nothing to do.
+ if not self.activeFormattingElements:
+ return
+
+ # Step 2 and step 3: we start with the last element. So i is -1.
+ i = len(self.activeFormattingElements) - 1
+ entry = self.activeFormattingElements[i]
+ if entry == Marker or entry in self.openElements:
+ return
+
+ # Step 6
+ while entry != Marker and entry not in self.openElements:
+ if i == 0:
+ # This will be reset to 0 below
+ i = -1
+ break
+ i -= 1
+ # Step 5: let entry be one earlier in the list.
+ entry = self.activeFormattingElements[i]
+
+ while True:
+ # Step 7
+ i += 1
+
+ # Step 8
+ entry = self.activeFormattingElements[i]
+ clone = entry.cloneNode() # Mainly to get a new copy of the attributes
+
+ # Step 9
+ element = self.insertElement({"type": "StartTag",
+ "name": clone.name,
+ "namespace": clone.namespace,
+ "data": clone.attributes})
+
+ # Step 10
+ self.activeFormattingElements[i] = element
+
+ # Step 11
+ if element == self.activeFormattingElements[-1]:
+ break
+
+ def clearActiveFormattingElements(self):
+ entry = self.activeFormattingElements.pop()
+ while self.activeFormattingElements and entry != Marker:
+ entry = self.activeFormattingElements.pop()
+
+ def elementInActiveFormattingElements(self, name):
+ """Check if an element exists between the end of the active
+ formatting elements and the last marker. If it does, return it, else
+ return false"""
+
+ for item in self.activeFormattingElements[::-1]:
+ # Check for Marker first because if it's a Marker it doesn't have a
+ # name attribute.
+ if item == Marker:
+ break
+ elif item.name == name:
+ return item
+ return False
+
+ def insertRoot(self, token):
+ element = self.createElement(token)
+ self.openElements.append(element)
+ self.document.appendChild(element)
+
+ def insertDoctype(self, token):
+ name = token["name"]
+ publicId = token["publicId"]
+ systemId = token["systemId"]
+
+ doctype = self.doctypeClass(name, publicId, systemId)
+ self.document.appendChild(doctype)
+
+ def insertComment(self, token, parent=None):
+ if parent is None:
+ parent = self.openElements[-1]
+ parent.appendChild(self.commentClass(token["data"]))
+
+ def createElement(self, token):
+ """Create an element but don't insert it anywhere"""
+ name = token["name"]
+ namespace = token.get("namespace", self.defaultNamespace)
+ element = self.elementClass(name, namespace)
+ element.attributes = token["data"]
+ return element
+
+ def _getInsertFromTable(self):
+ return self._insertFromTable
+
+ def _setInsertFromTable(self, value):
+ """Switch the function used to insert an element from the
+ normal one to the misnested table one and back again"""
+ self._insertFromTable = value
+ if value:
+ self.insertElement = self.insertElementTable
+ else:
+ self.insertElement = self.insertElementNormal
+
+ insertFromTable = property(_getInsertFromTable, _setInsertFromTable)
+
+ def insertElementNormal(self, token):
+ name = token["name"]
+ assert isinstance(name, text_type), "Element %s not unicode" % name
+ namespace = token.get("namespace", self.defaultNamespace)
+ element = self.elementClass(name, namespace)
+ element.attributes = token["data"]
+ self.openElements[-1].appendChild(element)
+ self.openElements.append(element)
+ return element
+
+ def insertElementTable(self, token):
+ """Create an element and insert it into the tree"""
+ element = self.createElement(token)
+ if self.openElements[-1].name not in tableInsertModeElements:
+ return self.insertElementNormal(token)
+ else:
+ # We should be in the InTable mode. This means we want to do
+ # special magic element rearranging
+ parent, insertBefore = self.getTableMisnestedNodePosition()
+ if insertBefore is None:
+ parent.appendChild(element)
+ else:
+ parent.insertBefore(element, insertBefore)
+ self.openElements.append(element)
+ return element
+
+ def insertText(self, data, parent=None):
+ """Insert text data."""
+ if parent is None:
+ parent = self.openElements[-1]
+
+ if (not self.insertFromTable or (self.insertFromTable and
+ self.openElements[-1].name
+ not in tableInsertModeElements)):
+ parent.insertText(data)
+ else:
+ # We should be in the InTable mode. This means we want to do
+ # special magic element rearranging
+ parent, insertBefore = self.getTableMisnestedNodePosition()
+ parent.insertText(data, insertBefore)
+
+ def getTableMisnestedNodePosition(self):
+ """Get the foster parent element, and sibling to insert before
+ (or None) when inserting a misnested table node"""
+ # The foster parent element is the one which comes before the most
+ # recently opened table element
+ # XXX - this is really inelegant
+ lastTable = None
+ fosterParent = None
+ insertBefore = None
+ for elm in self.openElements[::-1]:
+ if elm.name == "table":
+ lastTable = elm
+ break
+ if lastTable:
+ # XXX - we should really check that this parent is actually a
+ # node here
+ if lastTable.parent:
+ fosterParent = lastTable.parent
+ insertBefore = lastTable
+ else:
+ fosterParent = self.openElements[
+ self.openElements.index(lastTable) - 1]
+ else:
+ fosterParent = self.openElements[0]
+ return fosterParent, insertBefore
+
+ def generateImpliedEndTags(self, exclude=None):
+ name = self.openElements[-1].name
+ # XXX td, th and tr are not actually needed
+ if (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt")) and
+ name != exclude):
+ self.openElements.pop()
+ # XXX This is not entirely what the specification says. We should
+ # investigate it more closely.
+ self.generateImpliedEndTags(exclude)
+
+ def getDocument(self):
+ """Return the final tree"""
+ return self.document
+
+ def getFragment(self):
+ """Return the final fragment"""
+ # assert self.innerHTML
+ fragment = self.fragmentClass()
+ self.openElements[0].reparentChildren(fragment)
+ return fragment
+
+ def testSerializer(self, node):
+ """Serialize the subtree of node in the format required by unit tests
+
+ :arg node: the node from which to start serializing
+
+ """
+ raise NotImplementedError
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/dom.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/dom.py
new file mode 100644
index 0000000000..d8b5300465
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/dom.py
@@ -0,0 +1,239 @@
+from __future__ import absolute_import, division, unicode_literals
+
+
+try:
+ from collections.abc import MutableMapping
+except ImportError: # Python 2.7
+ from collections import MutableMapping
+from xml.dom import minidom, Node
+import weakref
+
+from . import base
+from .. import constants
+from ..constants import namespaces
+from .._utils import moduleFactoryFactory
+
+
+def getDomBuilder(DomImplementation):
+ Dom = DomImplementation
+
+ class AttrList(MutableMapping):
+ def __init__(self, element):
+ self.element = element
+
+ def __iter__(self):
+ return iter(self.element.attributes.keys())
+
+ def __setitem__(self, name, value):
+ if isinstance(name, tuple):
+ raise NotImplementedError
+ else:
+ attr = self.element.ownerDocument.createAttribute(name)
+ attr.value = value
+ self.element.attributes[name] = attr
+
+ def __len__(self):
+ return len(self.element.attributes)
+
+ def items(self):
+ return list(self.element.attributes.items())
+
+ def values(self):
+ return list(self.element.attributes.values())
+
+ def __getitem__(self, name):
+ if isinstance(name, tuple):
+ raise NotImplementedError
+ else:
+ return self.element.attributes[name].value
+
+ def __delitem__(self, name):
+ if isinstance(name, tuple):
+ raise NotImplementedError
+ else:
+ del self.element.attributes[name]
+
+ class NodeBuilder(base.Node):
+ def __init__(self, element):
+ base.Node.__init__(self, element.nodeName)
+ self.element = element
+
+ namespace = property(lambda self: hasattr(self.element, "namespaceURI") and
+ self.element.namespaceURI or None)
+
+ def appendChild(self, node):
+ node.parent = self
+ self.element.appendChild(node.element)
+
+ def insertText(self, data, insertBefore=None):
+ text = self.element.ownerDocument.createTextNode(data)
+ if insertBefore:
+ self.element.insertBefore(text, insertBefore.element)
+ else:
+ self.element.appendChild(text)
+
+ def insertBefore(self, node, refNode):
+ self.element.insertBefore(node.element, refNode.element)
+ node.parent = self
+
+ def removeChild(self, node):
+ if node.element.parentNode == self.element:
+ self.element.removeChild(node.element)
+ node.parent = None
+
+ def reparentChildren(self, newParent):
+ while self.element.hasChildNodes():
+ child = self.element.firstChild
+ self.element.removeChild(child)
+ newParent.element.appendChild(child)
+ self.childNodes = []
+
+ def getAttributes(self):
+ return AttrList(self.element)
+
+ def setAttributes(self, attributes):
+ if attributes:
+ for name, value in list(attributes.items()):
+ if isinstance(name, tuple):
+ if name[0] is not None:
+ qualifiedName = (name[0] + ":" + name[1])
+ else:
+ qualifiedName = name[1]
+ self.element.setAttributeNS(name[2], qualifiedName,
+ value)
+ else:
+ self.element.setAttribute(
+ name, value)
+ attributes = property(getAttributes, setAttributes)
+
+ def cloneNode(self):
+ return NodeBuilder(self.element.cloneNode(False))
+
+ def hasContent(self):
+ return self.element.hasChildNodes()
+
+ def getNameTuple(self):
+ if self.namespace is None:
+ return namespaces["html"], self.name
+ else:
+ return self.namespace, self.name
+
+ nameTuple = property(getNameTuple)
+
+ class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variable
+ def documentClass(self):
+ self.dom = Dom.getDOMImplementation().createDocument(None, None, None)
+ return weakref.proxy(self)
+
+ def insertDoctype(self, token):
+ name = token["name"]
+ publicId = token["publicId"]
+ systemId = token["systemId"]
+
+ domimpl = Dom.getDOMImplementation()
+ doctype = domimpl.createDocumentType(name, publicId, systemId)
+ self.document.appendChild(NodeBuilder(doctype))
+ if Dom == minidom:
+ doctype.ownerDocument = self.dom
+
+ def elementClass(self, name, namespace=None):
+ if namespace is None and self.defaultNamespace is None:
+ node = self.dom.createElement(name)
+ else:
+ node = self.dom.createElementNS(namespace, name)
+
+ return NodeBuilder(node)
+
+ def commentClass(self, data):
+ return NodeBuilder(self.dom.createComment(data))
+
+ def fragmentClass(self):
+ return NodeBuilder(self.dom.createDocumentFragment())
+
+ def appendChild(self, node):
+ self.dom.appendChild(node.element)
+
+ def testSerializer(self, element):
+ return testSerializer(element)
+
+ def getDocument(self):
+ return self.dom
+
+ def getFragment(self):
+ return base.TreeBuilder.getFragment(self).element
+
+ def insertText(self, data, parent=None):
+ data = data
+ if parent != self:
+ base.TreeBuilder.insertText(self, data, parent)
+ else:
+ # HACK: allow text nodes as children of the document node
+ if hasattr(self.dom, '_child_node_types'):
+ # pylint:disable=protected-access
+ if Node.TEXT_NODE not in self.dom._child_node_types:
+ self.dom._child_node_types = list(self.dom._child_node_types)
+ self.dom._child_node_types.append(Node.TEXT_NODE)
+ self.dom.appendChild(self.dom.createTextNode(data))
+
+ implementation = DomImplementation
+ name = None
+
+ def testSerializer(element):
+ element.normalize()
+ rv = []
+
+ def serializeElement(element, indent=0):
+ if element.nodeType == Node.DOCUMENT_TYPE_NODE:
+ if element.name:
+ if element.publicId or element.systemId:
+ publicId = element.publicId or ""
+ systemId = element.systemId or ""
+ rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" %
+ (' ' * indent, element.name, publicId, systemId))
+ else:
+ rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, element.name))
+ else:
+ rv.append("|%s<!DOCTYPE >" % (' ' * indent,))
+ elif element.nodeType == Node.DOCUMENT_NODE:
+ rv.append("#document")
+ elif element.nodeType == Node.DOCUMENT_FRAGMENT_NODE:
+ rv.append("#document-fragment")
+ elif element.nodeType == Node.COMMENT_NODE:
+ rv.append("|%s<!-- %s -->" % (' ' * indent, element.nodeValue))
+ elif element.nodeType == Node.TEXT_NODE:
+ rv.append("|%s\"%s\"" % (' ' * indent, element.nodeValue))
+ else:
+ if (hasattr(element, "namespaceURI") and
+ element.namespaceURI is not None):
+ name = "%s %s" % (constants.prefixes[element.namespaceURI],
+ element.nodeName)
+ else:
+ name = element.nodeName
+ rv.append("|%s<%s>" % (' ' * indent, name))
+ if element.hasAttributes():
+ attributes = []
+ for i in range(len(element.attributes)):
+ attr = element.attributes.item(i)
+ name = attr.nodeName
+ value = attr.value
+ ns = attr.namespaceURI
+ if ns:
+ name = "%s %s" % (constants.prefixes[ns], attr.localName)
+ else:
+ name = attr.nodeName
+ attributes.append((name, value))
+
+ for name, value in sorted(attributes):
+ rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
+ indent += 2
+ for child in element.childNodes:
+ serializeElement(child, indent)
+ serializeElement(element, 0)
+
+ return "\n".join(rv)
+
+ return locals()
+
+
+# The actual means to get a module!
+getDomModule = moduleFactoryFactory(getDomBuilder)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/etree.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/etree.py
new file mode 100644
index 0000000000..086bed4eed
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/etree.py
@@ -0,0 +1,343 @@
+from __future__ import absolute_import, division, unicode_literals
+# pylint:disable=protected-access
+
+from six import text_type
+
+import re
+
+from copy import copy
+
+from . import base
+from .. import _ihatexml
+from .. import constants
+from ..constants import namespaces
+from .._utils import moduleFactoryFactory
+
+tag_regexp = re.compile("{([^}]*)}(.*)")
+
+
+def getETreeBuilder(ElementTreeImplementation, fullTree=False):
+ ElementTree = ElementTreeImplementation
+ ElementTreeCommentType = ElementTree.Comment("asd").tag
+
+ class Element(base.Node):
+ def __init__(self, name, namespace=None):
+ self._name = name
+ self._namespace = namespace
+ self._element = ElementTree.Element(self._getETreeTag(name,
+ namespace))
+ if namespace is None:
+ self.nameTuple = namespaces["html"], self._name
+ else:
+ self.nameTuple = self._namespace, self._name
+ self.parent = None
+ self._childNodes = []
+ self._flags = []
+
+ def _getETreeTag(self, name, namespace):
+ if namespace is None:
+ etree_tag = name
+ else:
+ etree_tag = "{%s}%s" % (namespace, name)
+ return etree_tag
+
+ def _setName(self, name):
+ self._name = name
+ self._element.tag = self._getETreeTag(self._name, self._namespace)
+
+ def _getName(self):
+ return self._name
+
+ name = property(_getName, _setName)
+
+ def _setNamespace(self, namespace):
+ self._namespace = namespace
+ self._element.tag = self._getETreeTag(self._name, self._namespace)
+
+ def _getNamespace(self):
+ return self._namespace
+
+ namespace = property(_getNamespace, _setNamespace)
+
+ def _getAttributes(self):
+ return self._element.attrib
+
+ def _setAttributes(self, attributes):
+ el_attrib = self._element.attrib
+ el_attrib.clear()
+ if attributes:
+ # calling .items _always_ allocates, and the above truthy check is cheaper than the
+ # allocation on average
+ for key, value in attributes.items():
+ if isinstance(key, tuple):
+ name = "{%s}%s" % (key[2], key[1])
+ else:
+ name = key
+ el_attrib[name] = value
+
+ attributes = property(_getAttributes, _setAttributes)
+
+ def _getChildNodes(self):
+ return self._childNodes
+
+ def _setChildNodes(self, value):
+ del self._element[:]
+ self._childNodes = []
+ for element in value:
+ self.insertChild(element)
+
+ childNodes = property(_getChildNodes, _setChildNodes)
+
+ def hasContent(self):
+ """Return true if the node has children or text"""
+ return bool(self._element.text or len(self._element))
+
+ def appendChild(self, node):
+ self._childNodes.append(node)
+ self._element.append(node._element)
+ node.parent = self
+
+ def insertBefore(self, node, refNode):
+ index = list(self._element).index(refNode._element)
+ self._element.insert(index, node._element)
+ node.parent = self
+
+ def removeChild(self, node):
+ self._childNodes.remove(node)
+ self._element.remove(node._element)
+ node.parent = None
+
+ def insertText(self, data, insertBefore=None):
+ if not(len(self._element)):
+ if not self._element.text:
+ self._element.text = ""
+ self._element.text += data
+ elif insertBefore is None:
+ # Insert the text as the tail of the last child element
+ if not self._element[-1].tail:
+ self._element[-1].tail = ""
+ self._element[-1].tail += data
+ else:
+ # Insert the text before the specified node
+ children = list(self._element)
+ index = children.index(insertBefore._element)
+ if index > 0:
+ if not self._element[index - 1].tail:
+ self._element[index - 1].tail = ""
+ self._element[index - 1].tail += data
+ else:
+ if not self._element.text:
+ self._element.text = ""
+ self._element.text += data
+
+ def cloneNode(self):
+ element = type(self)(self.name, self.namespace)
+ if self._element.attrib:
+ element._element.attrib = copy(self._element.attrib)
+ return element
+
+ def reparentChildren(self, newParent):
+ if newParent.childNodes:
+ newParent.childNodes[-1]._element.tail += self._element.text
+ else:
+ if not newParent._element.text:
+ newParent._element.text = ""
+ if self._element.text is not None:
+ newParent._element.text += self._element.text
+ self._element.text = ""
+ base.Node.reparentChildren(self, newParent)
+
+ class Comment(Element):
+ def __init__(self, data):
+ # Use the superclass constructor to set all properties on the
+ # wrapper element
+ self._element = ElementTree.Comment(data)
+ self.parent = None
+ self._childNodes = []
+ self._flags = []
+
+ def _getData(self):
+ return self._element.text
+
+ def _setData(self, value):
+ self._element.text = value
+
+ data = property(_getData, _setData)
+
+ class DocumentType(Element):
+ def __init__(self, name, publicId, systemId):
+ Element.__init__(self, "<!DOCTYPE>")
+ self._element.text = name
+ self.publicId = publicId
+ self.systemId = systemId
+
+ def _getPublicId(self):
+ return self._element.get("publicId", "")
+
+ def _setPublicId(self, value):
+ if value is not None:
+ self._element.set("publicId", value)
+
+ publicId = property(_getPublicId, _setPublicId)
+
+ def _getSystemId(self):
+ return self._element.get("systemId", "")
+
+ def _setSystemId(self, value):
+ if value is not None:
+ self._element.set("systemId", value)
+
+ systemId = property(_getSystemId, _setSystemId)
+
+ class Document(Element):
+ def __init__(self):
+ Element.__init__(self, "DOCUMENT_ROOT")
+
+ class DocumentFragment(Element):
+ def __init__(self):
+ Element.__init__(self, "DOCUMENT_FRAGMENT")
+
+ def testSerializer(element):
+ rv = []
+
+ def serializeElement(element, indent=0):
+ if not(hasattr(element, "tag")):
+ element = element.getroot()
+ if element.tag == "<!DOCTYPE>":
+ if element.get("publicId") or element.get("systemId"):
+ publicId = element.get("publicId") or ""
+ systemId = element.get("systemId") or ""
+ rv.append("""<!DOCTYPE %s "%s" "%s">""" %
+ (element.text, publicId, systemId))
+ else:
+ rv.append("<!DOCTYPE %s>" % (element.text,))
+ elif element.tag == "DOCUMENT_ROOT":
+ rv.append("#document")
+ if element.text is not None:
+ rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
+ if element.tail is not None:
+ raise TypeError("Document node cannot have tail")
+ if hasattr(element, "attrib") and len(element.attrib):
+ raise TypeError("Document node cannot have attributes")
+ elif element.tag == ElementTreeCommentType:
+ rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
+ else:
+ assert isinstance(element.tag, text_type), \
+ "Expected unicode, got %s, %s" % (type(element.tag), element.tag)
+ nsmatch = tag_regexp.match(element.tag)
+
+ if nsmatch is None:
+ name = element.tag
+ else:
+ ns, name = nsmatch.groups()
+ prefix = constants.prefixes[ns]
+ name = "%s %s" % (prefix, name)
+ rv.append("|%s<%s>" % (' ' * indent, name))
+
+ if hasattr(element, "attrib"):
+ attributes = []
+ for name, value in element.attrib.items():
+ nsmatch = tag_regexp.match(name)
+ if nsmatch is not None:
+ ns, name = nsmatch.groups()
+ prefix = constants.prefixes[ns]
+ attr_string = "%s %s" % (prefix, name)
+ else:
+ attr_string = name
+ attributes.append((attr_string, value))
+
+ for name, value in sorted(attributes):
+ rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
+ if element.text:
+ rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
+ indent += 2
+ for child in element:
+ serializeElement(child, indent)
+ if element.tail:
+ rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
+ serializeElement(element, 0)
+
+ return "\n".join(rv)
+
+ def tostring(element): # pylint:disable=unused-variable
+ """Serialize an element and its child nodes to a string"""
+ rv = []
+ filter = _ihatexml.InfosetFilter()
+
+ def serializeElement(element):
+ if isinstance(element, ElementTree.ElementTree):
+ element = element.getroot()
+
+ if element.tag == "<!DOCTYPE>":
+ if element.get("publicId") or element.get("systemId"):
+ publicId = element.get("publicId") or ""
+ systemId = element.get("systemId") or ""
+ rv.append("""<!DOCTYPE %s PUBLIC "%s" "%s">""" %
+ (element.text, publicId, systemId))
+ else:
+ rv.append("<!DOCTYPE %s>" % (element.text,))
+ elif element.tag == "DOCUMENT_ROOT":
+ if element.text is not None:
+ rv.append(element.text)
+ if element.tail is not None:
+ raise TypeError("Document node cannot have tail")
+ if hasattr(element, "attrib") and len(element.attrib):
+ raise TypeError("Document node cannot have attributes")
+
+ for child in element:
+ serializeElement(child)
+
+ elif element.tag == ElementTreeCommentType:
+ rv.append("<!--%s-->" % (element.text,))
+ else:
+ # This is assumed to be an ordinary element
+ if not element.attrib:
+ rv.append("<%s>" % (filter.fromXmlName(element.tag),))
+ else:
+ attr = " ".join(["%s=\"%s\"" % (
+ filter.fromXmlName(name), value)
+ for name, value in element.attrib.items()])
+ rv.append("<%s %s>" % (element.tag, attr))
+ if element.text:
+ rv.append(element.text)
+
+ for child in element:
+ serializeElement(child)
+
+ rv.append("</%s>" % (element.tag,))
+
+ if element.tail:
+ rv.append(element.tail)
+
+ serializeElement(element)
+
+ return "".join(rv)
+
+ class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variable
+ documentClass = Document
+ doctypeClass = DocumentType
+ elementClass = Element
+ commentClass = Comment
+ fragmentClass = DocumentFragment
+ implementation = ElementTreeImplementation
+
+ def testSerializer(self, element):
+ return testSerializer(element)
+
+ def getDocument(self):
+ if fullTree:
+ return self.document._element
+ else:
+ if self.defaultNamespace is not None:
+ return self.document._element.find(
+ "{%s}html" % self.defaultNamespace)
+ else:
+ return self.document._element.find("html")
+
+ def getFragment(self):
+ return base.TreeBuilder.getFragment(self)._element
+
+ return locals()
+
+
+getETreeModule = moduleFactoryFactory(getETreeBuilder)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/etree_lxml.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/etree_lxml.py
new file mode 100644
index 0000000000..e73de61a85
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treebuilders/etree_lxml.py
@@ -0,0 +1,392 @@
+"""Module for supporting the lxml.etree library. The idea here is to use as much
+of the native library as possible, without using fragile hacks like custom element
+names that break between releases. The downside of this is that we cannot represent
+all possible trees; specifically the following are known to cause problems:
+
+Text or comments as siblings of the root element
+Docypes with no name
+
+When any of these things occur, we emit a DataLossWarning
+"""
+
+from __future__ import absolute_import, division, unicode_literals
+# pylint:disable=protected-access
+
+import warnings
+import re
+import sys
+
+try:
+ from collections.abc import MutableMapping
+except ImportError:
+ from collections import MutableMapping
+
+from . import base
+from ..constants import DataLossWarning
+from .. import constants
+from . import etree as etree_builders
+from .. import _ihatexml
+
+import lxml.etree as etree
+from six import PY3, binary_type
+
+
+fullTree = True
+tag_regexp = re.compile("{([^}]*)}(.*)")
+
+comment_type = etree.Comment("asd").tag
+
+
+class DocumentType(object):
+ def __init__(self, name, publicId, systemId):
+ self.name = name
+ self.publicId = publicId
+ self.systemId = systemId
+
+
+class Document(object):
+ def __init__(self):
+ self._elementTree = None
+ self._childNodes = []
+
+ def appendChild(self, element):
+ last = self._elementTree.getroot()
+ for last in self._elementTree.getroot().itersiblings():
+ pass
+
+ last.addnext(element._element)
+
+ def _getChildNodes(self):
+ return self._childNodes
+
+ childNodes = property(_getChildNodes)
+
+
+def testSerializer(element):
+ rv = []
+ infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True)
+
+ def serializeElement(element, indent=0):
+ if not hasattr(element, "tag"):
+ if hasattr(element, "getroot"):
+ # Full tree case
+ rv.append("#document")
+ if element.docinfo.internalDTD:
+ if not (element.docinfo.public_id or
+ element.docinfo.system_url):
+ dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name
+ else:
+ dtd_str = """<!DOCTYPE %s "%s" "%s">""" % (
+ element.docinfo.root_name,
+ element.docinfo.public_id,
+ element.docinfo.system_url)
+ rv.append("|%s%s" % (' ' * (indent + 2), dtd_str))
+ next_element = element.getroot()
+ while next_element.getprevious() is not None:
+ next_element = next_element.getprevious()
+ while next_element is not None:
+ serializeElement(next_element, indent + 2)
+ next_element = next_element.getnext()
+ elif isinstance(element, str) or isinstance(element, bytes):
+ # Text in a fragment
+ assert isinstance(element, str) or sys.version_info[0] == 2
+ rv.append("|%s\"%s\"" % (' ' * indent, element))
+ else:
+ # Fragment case
+ rv.append("#document-fragment")
+ for next_element in element:
+ serializeElement(next_element, indent + 2)
+ elif element.tag == comment_type:
+ rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
+ if hasattr(element, "tail") and element.tail:
+ rv.append("|%s\"%s\"" % (' ' * indent, element.tail))
+ else:
+ assert isinstance(element, etree._Element)
+ nsmatch = etree_builders.tag_regexp.match(element.tag)
+ if nsmatch is not None:
+ ns = nsmatch.group(1)
+ tag = nsmatch.group(2)
+ prefix = constants.prefixes[ns]
+ rv.append("|%s<%s %s>" % (' ' * indent, prefix,
+ infosetFilter.fromXmlName(tag)))
+ else:
+ rv.append("|%s<%s>" % (' ' * indent,
+ infosetFilter.fromXmlName(element.tag)))
+
+ if hasattr(element, "attrib"):
+ attributes = []
+ for name, value in element.attrib.items():
+ nsmatch = tag_regexp.match(name)
+ if nsmatch is not None:
+ ns, name = nsmatch.groups()
+ name = infosetFilter.fromXmlName(name)
+ prefix = constants.prefixes[ns]
+ attr_string = "%s %s" % (prefix, name)
+ else:
+ attr_string = infosetFilter.fromXmlName(name)
+ attributes.append((attr_string, value))
+
+ for name, value in sorted(attributes):
+ rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
+
+ if element.text:
+ rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
+ indent += 2
+ for child in element:
+ serializeElement(child, indent)
+ if hasattr(element, "tail") and element.tail:
+ rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
+ serializeElement(element, 0)
+
+ return "\n".join(rv)
+
+
+def tostring(element):
+ """Serialize an element and its child nodes to a string"""
+ rv = []
+
+ def serializeElement(element):
+ if not hasattr(element, "tag"):
+ if element.docinfo.internalDTD:
+ if element.docinfo.doctype:
+ dtd_str = element.docinfo.doctype
+ else:
+ dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name
+ rv.append(dtd_str)
+ serializeElement(element.getroot())
+
+ elif element.tag == comment_type:
+ rv.append("<!--%s-->" % (element.text,))
+
+ else:
+ # This is assumed to be an ordinary element
+ if not element.attrib:
+ rv.append("<%s>" % (element.tag,))
+ else:
+ attr = " ".join(["%s=\"%s\"" % (name, value)
+ for name, value in element.attrib.items()])
+ rv.append("<%s %s>" % (element.tag, attr))
+ if element.text:
+ rv.append(element.text)
+
+ for child in element:
+ serializeElement(child)
+
+ rv.append("</%s>" % (element.tag,))
+
+ if hasattr(element, "tail") and element.tail:
+ rv.append(element.tail)
+
+ serializeElement(element)
+
+ return "".join(rv)
+
+
+class TreeBuilder(base.TreeBuilder):
+ documentClass = Document
+ doctypeClass = DocumentType
+ elementClass = None
+ commentClass = None
+ fragmentClass = Document
+ implementation = etree
+
+ def __init__(self, namespaceHTMLElements, fullTree=False):
+ builder = etree_builders.getETreeModule(etree, fullTree=fullTree)
+ infosetFilter = self.infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True)
+ self.namespaceHTMLElements = namespaceHTMLElements
+
+ class Attributes(MutableMapping):
+ def __init__(self, element):
+ self._element = element
+
+ def _coerceKey(self, key):
+ if isinstance(key, tuple):
+ name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1]))
+ else:
+ name = infosetFilter.coerceAttribute(key)
+ return name
+
+ def __getitem__(self, key):
+ value = self._element._element.attrib[self._coerceKey(key)]
+ if not PY3 and isinstance(value, binary_type):
+ value = value.decode("ascii")
+ return value
+
+ def __setitem__(self, key, value):
+ self._element._element.attrib[self._coerceKey(key)] = value
+
+ def __delitem__(self, key):
+ del self._element._element.attrib[self._coerceKey(key)]
+
+ def __iter__(self):
+ return iter(self._element._element.attrib)
+
+ def __len__(self):
+ return len(self._element._element.attrib)
+
+ def clear(self):
+ return self._element._element.attrib.clear()
+
+ class Element(builder.Element):
+ def __init__(self, name, namespace):
+ name = infosetFilter.coerceElement(name)
+ builder.Element.__init__(self, name, namespace=namespace)
+ self._attributes = Attributes(self)
+
+ def _setName(self, name):
+ self._name = infosetFilter.coerceElement(name)
+ self._element.tag = self._getETreeTag(
+ self._name, self._namespace)
+
+ def _getName(self):
+ return infosetFilter.fromXmlName(self._name)
+
+ name = property(_getName, _setName)
+
+ def _getAttributes(self):
+ return self._attributes
+
+ def _setAttributes(self, value):
+ attributes = self.attributes
+ attributes.clear()
+ attributes.update(value)
+
+ attributes = property(_getAttributes, _setAttributes)
+
+ def insertText(self, data, insertBefore=None):
+ data = infosetFilter.coerceCharacters(data)
+ builder.Element.insertText(self, data, insertBefore)
+
+ def cloneNode(self):
+ element = type(self)(self.name, self.namespace)
+ if self._element.attrib:
+ element._element.attrib.update(self._element.attrib)
+ return element
+
+ class Comment(builder.Comment):
+ def __init__(self, data):
+ data = infosetFilter.coerceComment(data)
+ builder.Comment.__init__(self, data)
+
+ def _setData(self, data):
+ data = infosetFilter.coerceComment(data)
+ self._element.text = data
+
+ def _getData(self):
+ return self._element.text
+
+ data = property(_getData, _setData)
+
+ self.elementClass = Element
+ self.commentClass = Comment
+ # self.fragmentClass = builder.DocumentFragment
+ base.TreeBuilder.__init__(self, namespaceHTMLElements)
+
+ def reset(self):
+ base.TreeBuilder.reset(self)
+ self.insertComment = self.insertCommentInitial
+ self.initial_comments = []
+ self.doctype = None
+
+ def testSerializer(self, element):
+ return testSerializer(element)
+
+ def getDocument(self):
+ if fullTree:
+ return self.document._elementTree
+ else:
+ return self.document._elementTree.getroot()
+
+ def getFragment(self):
+ fragment = []
+ element = self.openElements[0]._element
+ if element.text:
+ fragment.append(element.text)
+ fragment.extend(list(element))
+ if element.tail:
+ fragment.append(element.tail)
+ return fragment
+
+ def insertDoctype(self, token):
+ name = token["name"]
+ publicId = token["publicId"]
+ systemId = token["systemId"]
+
+ if not name:
+ warnings.warn("lxml cannot represent empty doctype", DataLossWarning)
+ self.doctype = None
+ else:
+ coercedName = self.infosetFilter.coerceElement(name)
+ if coercedName != name:
+ warnings.warn("lxml cannot represent non-xml doctype", DataLossWarning)
+
+ doctype = self.doctypeClass(coercedName, publicId, systemId)
+ self.doctype = doctype
+
+ def insertCommentInitial(self, data, parent=None):
+ assert parent is None or parent is self.document
+ assert self.document._elementTree is None
+ self.initial_comments.append(data)
+
+ def insertCommentMain(self, data, parent=None):
+ if (parent == self.document and
+ self.document._elementTree.getroot()[-1].tag == comment_type):
+ warnings.warn("lxml cannot represent adjacent comments beyond the root elements", DataLossWarning)
+ super(TreeBuilder, self).insertComment(data, parent)
+
+ def insertRoot(self, token):
+ # Because of the way libxml2 works, it doesn't seem to be possible to
+ # alter information like the doctype after the tree has been parsed.
+ # Therefore we need to use the built-in parser to create our initial
+ # tree, after which we can add elements like normal
+ docStr = ""
+ if self.doctype:
+ assert self.doctype.name
+ docStr += "<!DOCTYPE %s" % self.doctype.name
+ if (self.doctype.publicId is not None or
+ self.doctype.systemId is not None):
+ docStr += (' PUBLIC "%s" ' %
+ (self.infosetFilter.coercePubid(self.doctype.publicId or "")))
+ if self.doctype.systemId:
+ sysid = self.doctype.systemId
+ if sysid.find("'") >= 0 and sysid.find('"') >= 0:
+ warnings.warn("DOCTYPE system cannot contain single and double quotes", DataLossWarning)
+ sysid = sysid.replace("'", 'U00027')
+ if sysid.find("'") >= 0:
+ docStr += '"%s"' % sysid
+ else:
+ docStr += "'%s'" % sysid
+ else:
+ docStr += "''"
+ docStr += ">"
+ if self.doctype.name != token["name"]:
+ warnings.warn("lxml cannot represent doctype with a different name to the root element", DataLossWarning)
+ docStr += "<THIS_SHOULD_NEVER_APPEAR_PUBLICLY/>"
+ root = etree.fromstring(docStr)
+
+ # Append the initial comments:
+ for comment_token in self.initial_comments:
+ comment = self.commentClass(comment_token["data"])
+ root.addprevious(comment._element)
+
+ # Create the root document and add the ElementTree to it
+ self.document = self.documentClass()
+ self.document._elementTree = root.getroottree()
+
+ # Give the root element the right name
+ name = token["name"]
+ namespace = token.get("namespace", self.defaultNamespace)
+ if namespace is None:
+ etree_tag = name
+ else:
+ etree_tag = "{%s}%s" % (namespace, name)
+ root.tag = etree_tag
+
+ # Add the root element to the internal child/open data structures
+ root_element = self.elementClass(name, namespace)
+ root_element._element = root
+ self.document._childNodes.append(root_element)
+ self.openElements.append(root_element)
+
+ # Reset to the default insert comment function
+ self.insertComment = self.insertCommentMain
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/__init__.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/__init__.py
new file mode 100644
index 0000000000..b2d3aac313
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/__init__.py
@@ -0,0 +1,154 @@
+"""A collection of modules for iterating through different kinds of
+tree, generating tokens identical to those produced by the tokenizer
+module.
+
+To create a tree walker for a new type of tree, you need to
+implement a tree walker object (called TreeWalker by convention) that
+implements a 'serialize' method which takes a tree as sole argument and
+returns an iterator which generates tokens.
+"""
+
+from __future__ import absolute_import, division, unicode_literals
+
+from .. import constants
+from .._utils import default_etree
+
+__all__ = ["getTreeWalker", "pprint"]
+
+treeWalkerCache = {}
+
+
+def getTreeWalker(treeType, implementation=None, **kwargs):
+ """Get a TreeWalker class for various types of tree with built-in support
+
+ :arg str treeType: the name of the tree type required (case-insensitive).
+ Supported values are:
+
+ * "dom": The xml.dom.minidom DOM implementation
+ * "etree": A generic walker for tree implementations exposing an
+ elementtree-like interface (known to work with ElementTree,
+ cElementTree and lxml.etree).
+ * "lxml": Optimized walker for lxml.etree
+ * "genshi": a Genshi stream
+
+ :arg implementation: A module implementing the tree type e.g.
+ xml.etree.ElementTree or cElementTree (Currently applies to the "etree"
+ tree type only).
+
+ :arg kwargs: keyword arguments passed to the etree walker--for other
+ walkers, this has no effect
+
+ :returns: a TreeWalker class
+
+ """
+
+ treeType = treeType.lower()
+ if treeType not in treeWalkerCache:
+ if treeType == "dom":
+ from . import dom
+ treeWalkerCache[treeType] = dom.TreeWalker
+ elif treeType == "genshi":
+ from . import genshi
+ treeWalkerCache[treeType] = genshi.TreeWalker
+ elif treeType == "lxml":
+ from . import etree_lxml
+ treeWalkerCache[treeType] = etree_lxml.TreeWalker
+ elif treeType == "etree":
+ from . import etree
+ if implementation is None:
+ implementation = default_etree
+ # XXX: NEVER cache here, caching is done in the etree submodule
+ return etree.getETreeModule(implementation, **kwargs).TreeWalker
+ return treeWalkerCache.get(treeType)
+
+
+def concatenateCharacterTokens(tokens):
+ pendingCharacters = []
+ for token in tokens:
+ type = token["type"]
+ if type in ("Characters", "SpaceCharacters"):
+ pendingCharacters.append(token["data"])
+ else:
+ if pendingCharacters:
+ yield {"type": "Characters", "data": "".join(pendingCharacters)}
+ pendingCharacters = []
+ yield token
+ if pendingCharacters:
+ yield {"type": "Characters", "data": "".join(pendingCharacters)}
+
+
+def pprint(walker):
+ """Pretty printer for tree walkers
+
+ Takes a TreeWalker instance and pretty prints the output of walking the tree.
+
+ :arg walker: a TreeWalker instance
+
+ """
+ output = []
+ indent = 0
+ for token in concatenateCharacterTokens(walker):
+ type = token["type"]
+ if type in ("StartTag", "EmptyTag"):
+ # tag name
+ if token["namespace"] and token["namespace"] != constants.namespaces["html"]:
+ if token["namespace"] in constants.prefixes:
+ ns = constants.prefixes[token["namespace"]]
+ else:
+ ns = token["namespace"]
+ name = "%s %s" % (ns, token["name"])
+ else:
+ name = token["name"]
+ output.append("%s<%s>" % (" " * indent, name))
+ indent += 2
+ # attributes (sorted for consistent ordering)
+ attrs = token["data"]
+ for (namespace, localname), value in sorted(attrs.items()):
+ if namespace:
+ if namespace in constants.prefixes:
+ ns = constants.prefixes[namespace]
+ else:
+ ns = namespace
+ name = "%s %s" % (ns, localname)
+ else:
+ name = localname
+ output.append("%s%s=\"%s\"" % (" " * indent, name, value))
+ # self-closing
+ if type == "EmptyTag":
+ indent -= 2
+
+ elif type == "EndTag":
+ indent -= 2
+
+ elif type == "Comment":
+ output.append("%s<!-- %s -->" % (" " * indent, token["data"]))
+
+ elif type == "Doctype":
+ if token["name"]:
+ if token["publicId"]:
+ output.append("""%s<!DOCTYPE %s "%s" "%s">""" %
+ (" " * indent,
+ token["name"],
+ token["publicId"],
+ token["systemId"] if token["systemId"] else ""))
+ elif token["systemId"]:
+ output.append("""%s<!DOCTYPE %s "" "%s">""" %
+ (" " * indent,
+ token["name"],
+ token["systemId"]))
+ else:
+ output.append("%s<!DOCTYPE %s>" % (" " * indent,
+ token["name"]))
+ else:
+ output.append("%s<!DOCTYPE >" % (" " * indent,))
+
+ elif type == "Characters":
+ output.append("%s\"%s\"" % (" " * indent, token["data"]))
+
+ elif type == "SpaceCharacters":
+ assert False, "concatenateCharacterTokens should have got rid of all Space tokens"
+
+ else:
+ raise ValueError("Unknown token type, %s" % type)
+
+ return "\n".join(output)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/base.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/base.py
new file mode 100644
index 0000000000..80c474c4e9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/base.py
@@ -0,0 +1,252 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from xml.dom import Node
+from ..constants import namespaces, voidElements, spaceCharacters
+
+__all__ = ["DOCUMENT", "DOCTYPE", "TEXT", "ELEMENT", "COMMENT", "ENTITY", "UNKNOWN",
+ "TreeWalker", "NonRecursiveTreeWalker"]
+
+DOCUMENT = Node.DOCUMENT_NODE
+DOCTYPE = Node.DOCUMENT_TYPE_NODE
+TEXT = Node.TEXT_NODE
+ELEMENT = Node.ELEMENT_NODE
+COMMENT = Node.COMMENT_NODE
+ENTITY = Node.ENTITY_NODE
+UNKNOWN = "<#UNKNOWN#>"
+
+spaceCharacters = "".join(spaceCharacters)
+
+
+class TreeWalker(object):
+ """Walks a tree yielding tokens
+
+ Tokens are dicts that all have a ``type`` field specifying the type of the
+ token.
+
+ """
+ def __init__(self, tree):
+ """Creates a TreeWalker
+
+ :arg tree: the tree to walk
+
+ """
+ self.tree = tree
+
+ def __iter__(self):
+ raise NotImplementedError
+
+ def error(self, msg):
+ """Generates an error token with the given message
+
+ :arg msg: the error message
+
+ :returns: SerializeError token
+
+ """
+ return {"type": "SerializeError", "data": msg}
+
+ def emptyTag(self, namespace, name, attrs, hasChildren=False):
+ """Generates an EmptyTag token
+
+ :arg namespace: the namespace of the token--can be ``None``
+
+ :arg name: the name of the element
+
+ :arg attrs: the attributes of the element as a dict
+
+ :arg hasChildren: whether or not to yield a SerializationError because
+ this tag shouldn't have children
+
+ :returns: EmptyTag token
+
+ """
+ yield {"type": "EmptyTag", "name": name,
+ "namespace": namespace,
+ "data": attrs}
+ if hasChildren:
+ yield self.error("Void element has children")
+
+ def startTag(self, namespace, name, attrs):
+ """Generates a StartTag token
+
+ :arg namespace: the namespace of the token--can be ``None``
+
+ :arg name: the name of the element
+
+ :arg attrs: the attributes of the element as a dict
+
+ :returns: StartTag token
+
+ """
+ return {"type": "StartTag",
+ "name": name,
+ "namespace": namespace,
+ "data": attrs}
+
+ def endTag(self, namespace, name):
+ """Generates an EndTag token
+
+ :arg namespace: the namespace of the token--can be ``None``
+
+ :arg name: the name of the element
+
+ :returns: EndTag token
+
+ """
+ return {"type": "EndTag",
+ "name": name,
+ "namespace": namespace}
+
+ def text(self, data):
+ """Generates SpaceCharacters and Characters tokens
+
+ Depending on what's in the data, this generates one or more
+ ``SpaceCharacters`` and ``Characters`` tokens.
+
+ For example:
+
+ >>> from html5lib.treewalkers.base import TreeWalker
+ >>> # Give it an empty tree just so it instantiates
+ >>> walker = TreeWalker([])
+ >>> list(walker.text(''))
+ []
+ >>> list(walker.text(' '))
+ [{u'data': ' ', u'type': u'SpaceCharacters'}]
+ >>> list(walker.text(' abc ')) # doctest: +NORMALIZE_WHITESPACE
+ [{u'data': ' ', u'type': u'SpaceCharacters'},
+ {u'data': u'abc', u'type': u'Characters'},
+ {u'data': u' ', u'type': u'SpaceCharacters'}]
+
+ :arg data: the text data
+
+ :returns: one or more ``SpaceCharacters`` and ``Characters`` tokens
+
+ """
+ data = data
+ middle = data.lstrip(spaceCharacters)
+ left = data[:len(data) - len(middle)]
+ if left:
+ yield {"type": "SpaceCharacters", "data": left}
+ data = middle
+ middle = data.rstrip(spaceCharacters)
+ right = data[len(middle):]
+ if middle:
+ yield {"type": "Characters", "data": middle}
+ if right:
+ yield {"type": "SpaceCharacters", "data": right}
+
+ def comment(self, data):
+ """Generates a Comment token
+
+ :arg data: the comment
+
+ :returns: Comment token
+
+ """
+ return {"type": "Comment", "data": data}
+
+ def doctype(self, name, publicId=None, systemId=None):
+ """Generates a Doctype token
+
+ :arg name:
+
+ :arg publicId:
+
+ :arg systemId:
+
+ :returns: the Doctype token
+
+ """
+ return {"type": "Doctype",
+ "name": name,
+ "publicId": publicId,
+ "systemId": systemId}
+
+ def entity(self, name):
+ """Generates an Entity token
+
+ :arg name: the entity name
+
+ :returns: an Entity token
+
+ """
+ return {"type": "Entity", "name": name}
+
+ def unknown(self, nodeType):
+ """Handles unknown node types"""
+ return self.error("Unknown node type: " + nodeType)
+
+
+class NonRecursiveTreeWalker(TreeWalker):
+ def getNodeDetails(self, node):
+ raise NotImplementedError
+
+ def getFirstChild(self, node):
+ raise NotImplementedError
+
+ def getNextSibling(self, node):
+ raise NotImplementedError
+
+ def getParentNode(self, node):
+ raise NotImplementedError
+
+ def __iter__(self):
+ currentNode = self.tree
+ while currentNode is not None:
+ details = self.getNodeDetails(currentNode)
+ type, details = details[0], details[1:]
+ hasChildren = False
+
+ if type == DOCTYPE:
+ yield self.doctype(*details)
+
+ elif type == TEXT:
+ for token in self.text(*details):
+ yield token
+
+ elif type == ELEMENT:
+ namespace, name, attributes, hasChildren = details
+ if (not namespace or namespace == namespaces["html"]) and name in voidElements:
+ for token in self.emptyTag(namespace, name, attributes,
+ hasChildren):
+ yield token
+ hasChildren = False
+ else:
+ yield self.startTag(namespace, name, attributes)
+
+ elif type == COMMENT:
+ yield self.comment(details[0])
+
+ elif type == ENTITY:
+ yield self.entity(details[0])
+
+ elif type == DOCUMENT:
+ hasChildren = True
+
+ else:
+ yield self.unknown(details[0])
+
+ if hasChildren:
+ firstChild = self.getFirstChild(currentNode)
+ else:
+ firstChild = None
+
+ if firstChild is not None:
+ currentNode = firstChild
+ else:
+ while currentNode is not None:
+ details = self.getNodeDetails(currentNode)
+ type, details = details[0], details[1:]
+ if type == ELEMENT:
+ namespace, name, attributes, hasChildren = details
+ if (namespace and namespace != namespaces["html"]) or name not in voidElements:
+ yield self.endTag(namespace, name)
+ if self.tree is currentNode:
+ currentNode = None
+ break
+ nextSibling = self.getNextSibling(currentNode)
+ if nextSibling is not None:
+ currentNode = nextSibling
+ break
+ else:
+ currentNode = self.getParentNode(currentNode)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/dom.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/dom.py
new file mode 100644
index 0000000000..b0c89b001f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/dom.py
@@ -0,0 +1,43 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from xml.dom import Node
+
+from . import base
+
+
+class TreeWalker(base.NonRecursiveTreeWalker):
+ def getNodeDetails(self, node):
+ if node.nodeType == Node.DOCUMENT_TYPE_NODE:
+ return base.DOCTYPE, node.name, node.publicId, node.systemId
+
+ elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
+ return base.TEXT, node.nodeValue
+
+ elif node.nodeType == Node.ELEMENT_NODE:
+ attrs = {}
+ for attr in list(node.attributes.keys()):
+ attr = node.getAttributeNode(attr)
+ if attr.namespaceURI:
+ attrs[(attr.namespaceURI, attr.localName)] = attr.value
+ else:
+ attrs[(None, attr.name)] = attr.value
+ return (base.ELEMENT, node.namespaceURI, node.nodeName,
+ attrs, node.hasChildNodes())
+
+ elif node.nodeType == Node.COMMENT_NODE:
+ return base.COMMENT, node.nodeValue
+
+ elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE):
+ return (base.DOCUMENT,)
+
+ else:
+ return base.UNKNOWN, node.nodeType
+
+ def getFirstChild(self, node):
+ return node.firstChild
+
+ def getNextSibling(self, node):
+ return node.nextSibling
+
+ def getParentNode(self, node):
+ return node.parentNode
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/etree.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/etree.py
new file mode 100644
index 0000000000..44653372d6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/etree.py
@@ -0,0 +1,131 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from collections import OrderedDict
+import re
+
+from six import string_types
+
+from . import base
+from .._utils import moduleFactoryFactory
+
+tag_regexp = re.compile("{([^}]*)}(.*)")
+
+
+def getETreeBuilder(ElementTreeImplementation):
+ ElementTree = ElementTreeImplementation
+ ElementTreeCommentType = ElementTree.Comment("asd").tag
+
+ class TreeWalker(base.NonRecursiveTreeWalker): # pylint:disable=unused-variable
+ """Given the particular ElementTree representation, this implementation,
+ to avoid using recursion, returns "nodes" as tuples with the following
+ content:
+
+ 1. The current element
+
+ 2. The index of the element relative to its parent
+
+ 3. A stack of ancestor elements
+
+ 4. A flag "text", "tail" or None to indicate if the current node is a
+ text node; either the text or tail of the current element (1)
+ """
+ def getNodeDetails(self, node):
+ if isinstance(node, tuple): # It might be the root Element
+ elt, _, _, flag = node
+ if flag in ("text", "tail"):
+ return base.TEXT, getattr(elt, flag)
+ else:
+ node = elt
+
+ if not(hasattr(node, "tag")):
+ node = node.getroot()
+
+ if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"):
+ return (base.DOCUMENT,)
+
+ elif node.tag == "<!DOCTYPE>":
+ return (base.DOCTYPE, node.text,
+ node.get("publicId"), node.get("systemId"))
+
+ elif node.tag == ElementTreeCommentType:
+ return base.COMMENT, node.text
+
+ else:
+ assert isinstance(node.tag, string_types), type(node.tag)
+ # This is assumed to be an ordinary element
+ match = tag_regexp.match(node.tag)
+ if match:
+ namespace, tag = match.groups()
+ else:
+ namespace = None
+ tag = node.tag
+ attrs = OrderedDict()
+ for name, value in list(node.attrib.items()):
+ match = tag_regexp.match(name)
+ if match:
+ attrs[(match.group(1), match.group(2))] = value
+ else:
+ attrs[(None, name)] = value
+ return (base.ELEMENT, namespace, tag,
+ attrs, len(node) or node.text)
+
+ def getFirstChild(self, node):
+ if isinstance(node, tuple):
+ element, key, parents, flag = node
+ else:
+ element, key, parents, flag = node, None, [], None
+
+ if flag in ("text", "tail"):
+ return None
+ else:
+ if element.text:
+ return element, key, parents, "text"
+ elif len(element):
+ parents.append(element)
+ return element[0], 0, parents, None
+ else:
+ return None
+
+ def getNextSibling(self, node):
+ if isinstance(node, tuple):
+ element, key, parents, flag = node
+ else:
+ return None
+
+ if flag == "text":
+ if len(element):
+ parents.append(element)
+ return element[0], 0, parents, None
+ else:
+ return None
+ else:
+ if element.tail and flag != "tail":
+ return element, key, parents, "tail"
+ elif key < len(parents[-1]) - 1:
+ return parents[-1][key + 1], key + 1, parents, None
+ else:
+ return None
+
+ def getParentNode(self, node):
+ if isinstance(node, tuple):
+ element, key, parents, flag = node
+ else:
+ return None
+
+ if flag == "text":
+ if not parents:
+ return element
+ else:
+ return element, key, parents, None
+ else:
+ parent = parents.pop()
+ if not parents:
+ return parent
+ else:
+ assert list(parents[-1]).count(parent) == 1
+ return parent, list(parents[-1]).index(parent), parents, None
+
+ return locals()
+
+
+getETreeModule = moduleFactoryFactory(getETreeBuilder)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/etree_lxml.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/etree_lxml.py
new file mode 100644
index 0000000000..a614ac5b3f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/etree_lxml.py
@@ -0,0 +1,215 @@
+from __future__ import absolute_import, division, unicode_literals
+from six import text_type
+
+from collections import OrderedDict
+
+from lxml import etree
+from ..treebuilders.etree import tag_regexp
+
+from . import base
+
+from .. import _ihatexml
+
+
+def ensure_str(s):
+ if s is None:
+ return None
+ elif isinstance(s, text_type):
+ return s
+ else:
+ return s.decode("ascii", "strict")
+
+
+class Root(object):
+ def __init__(self, et):
+ self.elementtree = et
+ self.children = []
+
+ try:
+ if et.docinfo.internalDTD:
+ self.children.append(Doctype(self,
+ ensure_str(et.docinfo.root_name),
+ ensure_str(et.docinfo.public_id),
+ ensure_str(et.docinfo.system_url)))
+ except AttributeError:
+ pass
+
+ try:
+ node = et.getroot()
+ except AttributeError:
+ node = et
+
+ while node.getprevious() is not None:
+ node = node.getprevious()
+ while node is not None:
+ self.children.append(node)
+ node = node.getnext()
+
+ self.text = None
+ self.tail = None
+
+ def __getitem__(self, key):
+ return self.children[key]
+
+ def getnext(self):
+ return None
+
+ def __len__(self):
+ return 1
+
+
+class Doctype(object):
+ def __init__(self, root_node, name, public_id, system_id):
+ self.root_node = root_node
+ self.name = name
+ self.public_id = public_id
+ self.system_id = system_id
+
+ self.text = None
+ self.tail = None
+
+ def getnext(self):
+ return self.root_node.children[1]
+
+
+class FragmentRoot(Root):
+ def __init__(self, children):
+ self.children = [FragmentWrapper(self, child) for child in children]
+ self.text = self.tail = None
+
+ def getnext(self):
+ return None
+
+
+class FragmentWrapper(object):
+ def __init__(self, fragment_root, obj):
+ self.root_node = fragment_root
+ self.obj = obj
+ if hasattr(self.obj, 'text'):
+ self.text = ensure_str(self.obj.text)
+ else:
+ self.text = None
+ if hasattr(self.obj, 'tail'):
+ self.tail = ensure_str(self.obj.tail)
+ else:
+ self.tail = None
+
+ def __getattr__(self, name):
+ return getattr(self.obj, name)
+
+ def getnext(self):
+ siblings = self.root_node.children
+ idx = siblings.index(self)
+ if idx < len(siblings) - 1:
+ return siblings[idx + 1]
+ else:
+ return None
+
+ def __getitem__(self, key):
+ return self.obj[key]
+
+ def __bool__(self):
+ return bool(self.obj)
+
+ def getparent(self):
+ return None
+
+ def __str__(self):
+ return str(self.obj)
+
+ def __unicode__(self):
+ return str(self.obj)
+
+ def __len__(self):
+ return len(self.obj)
+
+
+class TreeWalker(base.NonRecursiveTreeWalker):
+ def __init__(self, tree):
+ # pylint:disable=redefined-variable-type
+ if isinstance(tree, list):
+ self.fragmentChildren = set(tree)
+ tree = FragmentRoot(tree)
+ else:
+ self.fragmentChildren = set()
+ tree = Root(tree)
+ base.NonRecursiveTreeWalker.__init__(self, tree)
+ self.filter = _ihatexml.InfosetFilter()
+
+ def getNodeDetails(self, node):
+ if isinstance(node, tuple): # Text node
+ node, key = node
+ assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
+ return base.TEXT, ensure_str(getattr(node, key))
+
+ elif isinstance(node, Root):
+ return (base.DOCUMENT,)
+
+ elif isinstance(node, Doctype):
+ return base.DOCTYPE, node.name, node.public_id, node.system_id
+
+ elif isinstance(node, FragmentWrapper) and not hasattr(node, "tag"):
+ return base.TEXT, ensure_str(node.obj)
+
+ elif node.tag == etree.Comment:
+ return base.COMMENT, ensure_str(node.text)
+
+ elif node.tag == etree.Entity:
+ return base.ENTITY, ensure_str(node.text)[1:-1] # strip &;
+
+ else:
+ # This is assumed to be an ordinary element
+ match = tag_regexp.match(ensure_str(node.tag))
+ if match:
+ namespace, tag = match.groups()
+ else:
+ namespace = None
+ tag = ensure_str(node.tag)
+ attrs = OrderedDict()
+ for name, value in list(node.attrib.items()):
+ name = ensure_str(name)
+ value = ensure_str(value)
+ match = tag_regexp.match(name)
+ if match:
+ attrs[(match.group(1), match.group(2))] = value
+ else:
+ attrs[(None, name)] = value
+ return (base.ELEMENT, namespace, self.filter.fromXmlName(tag),
+ attrs, len(node) > 0 or node.text)
+
+ def getFirstChild(self, node):
+ assert not isinstance(node, tuple), "Text nodes have no children"
+
+ assert len(node) or node.text, "Node has no children"
+ if node.text:
+ return (node, "text")
+ else:
+ return node[0]
+
+ def getNextSibling(self, node):
+ if isinstance(node, tuple): # Text node
+ node, key = node
+ assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
+ if key == "text":
+ # XXX: we cannot use a "bool(node) and node[0] or None" construct here
+ # because node[0] might evaluate to False if it has no child element
+ if len(node):
+ return node[0]
+ else:
+ return None
+ else: # tail
+ return node.getnext()
+
+ return (node, "tail") if node.tail else node.getnext()
+
+ def getParentNode(self, node):
+ if isinstance(node, tuple): # Text node
+ node, key = node
+ assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
+ if key == "text":
+ return node
+ # else: fallback to "normal" processing
+ elif node in self.fragmentChildren:
+ return None
+
+ return node.getparent()
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/genshi.py b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/genshi.py
new file mode 100644
index 0000000000..7483be27d4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/html5lib/treewalkers/genshi.py
@@ -0,0 +1,69 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from genshi.core import QName
+from genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXT
+from genshi.core import START_NS, END_NS, START_CDATA, END_CDATA, PI, COMMENT
+
+from . import base
+
+from ..constants import voidElements, namespaces
+
+
+class TreeWalker(base.TreeWalker):
+ def __iter__(self):
+ # Buffer the events so we can pass in the following one
+ previous = None
+ for event in self.tree:
+ if previous is not None:
+ for token in self.tokens(previous, event):
+ yield token
+ previous = event
+
+ # Don't forget the final event!
+ if previous is not None:
+ for token in self.tokens(previous, None):
+ yield token
+
+ def tokens(self, event, next):
+ kind, data, _ = event
+ if kind == START:
+ tag, attribs = data
+ name = tag.localname
+ namespace = tag.namespace
+ converted_attribs = {}
+ for k, v in attribs:
+ if isinstance(k, QName):
+ converted_attribs[(k.namespace, k.localname)] = v
+ else:
+ converted_attribs[(None, k)] = v
+
+ if namespace == namespaces["html"] and name in voidElements:
+ for token in self.emptyTag(namespace, name, converted_attribs,
+ not next or next[0] != END or
+ next[1] != tag):
+ yield token
+ else:
+ yield self.startTag(namespace, name, converted_attribs)
+
+ elif kind == END:
+ name = data.localname
+ namespace = data.namespace
+ if namespace != namespaces["html"] or name not in voidElements:
+ yield self.endTag(namespace, name)
+
+ elif kind == COMMENT:
+ yield self.comment(data)
+
+ elif kind == TEXT:
+ for token in self.text(data):
+ yield token
+
+ elif kind == DOCTYPE:
+ yield self.doctype(*data)
+
+ elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS,
+ START_CDATA, END_CDATA, PI):
+ pass
+
+ else:
+ yield self.unknown(kind)
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/parse.py b/testing/web-platform/tests/tools/third_party/html5lib/parse.py
new file mode 100755
index 0000000000..e6806b4607
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/parse.py
@@ -0,0 +1,236 @@
+#!/usr/bin/env python
+"""
+Parse a document to a tree, with optional profiling
+"""
+
+import argparse
+import sys
+import traceback
+
+from html5lib import html5parser
+from html5lib import treebuilders, serializer, treewalkers
+from html5lib import constants
+from html5lib import _utils
+
+
+def parse():
+ parser = get_parser()
+ opts = parser.parse_args()
+ encoding = "utf8"
+
+ try:
+ f = opts.filename
+ # Try opening from the internet
+ if f.startswith('http://'):
+ try:
+ import urllib.request
+ import urllib.parse
+ import urllib.error
+ import cgi
+ f = urllib.request.urlopen(f)
+ contentType = f.headers.get('content-type')
+ if contentType:
+ (mediaType, params) = cgi.parse_header(contentType)
+ encoding = params.get('charset')
+ except Exception:
+ pass
+ elif f == '-':
+ f = sys.stdin
+ if sys.version_info[0] >= 3:
+ encoding = None
+ else:
+ try:
+ # Try opening from file system
+ f = open(f, "rb")
+ except IOError as e:
+ sys.stderr.write("Unable to open file: %s\n" % e)
+ sys.exit(1)
+ except IndexError:
+ sys.stderr.write("No filename provided. Use -h for help\n")
+ sys.exit(1)
+
+ treebuilder = treebuilders.getTreeBuilder(opts.treebuilder)
+
+ p = html5parser.HTMLParser(tree=treebuilder, debug=opts.log)
+
+ if opts.fragment:
+ parseMethod = p.parseFragment
+ else:
+ parseMethod = p.parse
+
+ if opts.profile:
+ import cProfile
+ import pstats
+ cProfile.runctx("run(parseMethod, f, encoding, scripting)", None,
+ {"run": run,
+ "parseMethod": parseMethod,
+ "f": f,
+ "encoding": encoding,
+ "scripting": opts.scripting},
+ "stats.prof")
+ # XXX - We should use a temp file here
+ stats = pstats.Stats('stats.prof')
+ stats.strip_dirs()
+ stats.sort_stats('time')
+ stats.print_stats()
+ elif opts.time:
+ import time
+ t0 = time.time()
+ document = run(parseMethod, f, encoding, opts.scripting)
+ t1 = time.time()
+ if document:
+ printOutput(p, document, opts)
+ t2 = time.time()
+ sys.stderr.write("\n\nRun took: %fs (plus %fs to print the output)" % (t1 - t0, t2 - t1))
+ else:
+ sys.stderr.write("\n\nRun took: %fs" % (t1 - t0))
+ else:
+ document = run(parseMethod, f, encoding, opts.scripting)
+ if document:
+ printOutput(p, document, opts)
+
+
+def run(parseMethod, f, encoding, scripting):
+ try:
+ document = parseMethod(f, override_encoding=encoding, scripting=scripting)
+ except Exception:
+ document = None
+ traceback.print_exc()
+ return document
+
+
+def printOutput(parser, document, opts):
+ if opts.encoding:
+ print("Encoding:", parser.tokenizer.stream.charEncoding)
+
+ for item in parser.log:
+ print(item)
+
+ if document is not None:
+ if opts.xml:
+ tb = opts.treebuilder.lower()
+ if tb == "dom":
+ document.writexml(sys.stdout, encoding="utf-8")
+ elif tb == "lxml":
+ import lxml.etree
+ sys.stdout.write(lxml.etree.tostring(document, encoding="unicode"))
+ elif tb == "etree":
+ sys.stdout.write(_utils.default_etree.tostring(document, encoding="unicode"))
+ elif opts.tree:
+ if not hasattr(document, '__getitem__'):
+ document = [document]
+ for fragment in document:
+ print(parser.tree.testSerializer(fragment))
+ elif opts.html:
+ kwargs = {}
+ for opt in serializer.HTMLSerializer.options:
+ try:
+ kwargs[opt] = getattr(opts, opt)
+ except Exception:
+ pass
+ if not kwargs['quote_char']:
+ del kwargs['quote_char']
+
+ if opts.sanitize:
+ kwargs["sanitize"] = True
+
+ tokens = treewalkers.getTreeWalker(opts.treebuilder)(document)
+ if sys.version_info[0] >= 3:
+ encoding = None
+ else:
+ encoding = "utf-8"
+ for text in serializer.HTMLSerializer(**kwargs).serialize(tokens, encoding=encoding):
+ sys.stdout.write(text)
+ if not text.endswith('\n'):
+ sys.stdout.write('\n')
+ if opts.error:
+ errList = []
+ for pos, errorcode, datavars in parser.errors:
+ errList.append("Line %i Col %i" % pos + " " + constants.E.get(errorcode, 'Unknown error "%s"' % errorcode) % datavars)
+ sys.stdout.write("\nParse errors:\n" + "\n".join(errList) + "\n")
+
+
+def get_parser():
+ parser = argparse.ArgumentParser(description=__doc__)
+
+ parser.add_argument("-p", "--profile", action="store_true",
+ help="Use the hotshot profiler to "
+ "produce a detailed log of the run")
+
+ parser.add_argument("-t", "--time",
+ action="store_true",
+ help="Time the run using time.time (may not be accurate on all platforms, especially for short runs)")
+
+ parser.add_argument("-b", "--treebuilder",
+ default="etree")
+
+ parser.add_argument("-e", "--error", action="store_true",
+ help="Print a list of parse errors")
+
+ parser.add_argument("-f", "--fragment", action="store_true",
+ help="Parse as a fragment")
+
+ parser.add_argument("-s", "--scripting", action="store_true",
+ help="Handle noscript tags as if scripting was enabled")
+
+ parser.add_argument("--tree", action="store_true",
+ help="Output as debug tree")
+
+ parser.add_argument("-x", "--xml", action="store_true",
+ help="Output as xml")
+
+ parser.add_argument("--no-html", action="store_false",
+ dest="html", help="Don't output html")
+
+ parser.add_argument("-c", "--encoding", action="store_true",
+ help="Print character encoding used")
+
+ parser.add_argument("--inject-meta-charset", action="store_true",
+ help="inject <meta charset>")
+
+ parser.add_argument("--strip-whitespace", action="store_true",
+ help="strip whitespace")
+
+ parser.add_argument("--omit-optional-tags", action="store_true",
+ help="omit optional tags")
+
+ parser.add_argument("--quote-attr-values", action="store_true",
+ help="quote attribute values")
+
+ parser.add_argument("--use-best-quote-char", action="store_true",
+ help="use best quote character")
+
+ parser.add_argument("--quote-char",
+ help="quote character")
+
+ parser.add_argument("--no-minimize-boolean-attributes",
+ action="store_false",
+ dest="minimize_boolean_attributes",
+ help="minimize boolean attributes")
+
+ parser.add_argument("--use-trailing-solidus", action="store_true",
+ help="use trailing solidus")
+
+ parser.add_argument("--space-before-trailing-solidus",
+ action="store_true",
+ help="add space before trailing solidus")
+
+ parser.add_argument("--escape-lt-in-attrs", action="store_true",
+ help="escape less than signs in attribute values")
+
+ parser.add_argument("--escape-rcdata", action="store_true",
+ help="escape rcdata element values")
+
+ parser.add_argument("--sanitize", action="store_true",
+ help="sanitize")
+
+ parser.add_argument("-l", "--log", action="store_true",
+ help="log state transitions")
+
+ parser.add_argument("filename")
+
+ return parser
+
+
+if __name__ == "__main__":
+ parse()
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/pytest.ini b/testing/web-platform/tests/tools/third_party/html5lib/pytest.ini
new file mode 100644
index 0000000000..8824977a8f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/pytest.ini
@@ -0,0 +1,17 @@
+[pytest]
+# Output fails, errors, xpass, and warnings; ignore doctest; make warnings errors
+addopts = -rfEXw -p no:doctest --strict
+
+# Make xpass results be considered fail
+xfail_strict = true
+
+# Document our markers
+markers =
+ DOM: mark a test as a DOM tree test
+ ElementTree: mark a test as a ElementTree tree test
+ cElementTree: mark a test as a cElementTree tree test
+ lxml: mark a test as a lxml tree test
+ genshi: mark a test as a genshi tree test
+ parser: mark a test as a parser test
+ namespaced: mark a test as a namespaced parser test
+ treewalker: mark a test as a treewalker test
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/requirements-install.sh b/testing/web-platform/tests/tools/third_party/html5lib/requirements-install.sh
new file mode 100755
index 0000000000..b7a8d96dd6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/requirements-install.sh
@@ -0,0 +1,15 @@
+#!/bin/bash -ex
+
+if [[ $SIX_VERSION ]]; then
+ pip install six==$SIX_VERSION
+fi
+
+pip install -r requirements-test.txt
+
+if [[ $USE_OPTIONAL == "true" ]]; then
+ pip install -r requirements-optional.txt
+fi
+
+if [[ $CI == "true" ]]; then
+ pip install codecov
+fi
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/requirements-optional.txt b/testing/web-platform/tests/tools/third_party/html5lib/requirements-optional.txt
new file mode 100644
index 0000000000..2e78c952c1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/requirements-optional.txt
@@ -0,0 +1,13 @@
+-r requirements.txt
+
+# We support a Genshi treewalker that can be used to serialize Genshi
+# streams.
+genshi
+
+# chardet can be used as a fallback in case we are unable to determine
+# the encoding of a document.
+chardet>=2.2
+
+# lxml is supported with its own treebuilder ("lxml") and otherwise
+# uses the standard ElementTree support
+lxml ; platform_python_implementation == 'CPython'
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/requirements-test.txt b/testing/web-platform/tests/tools/third_party/html5lib/requirements-test.txt
new file mode 100644
index 0000000000..703d0e690f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/requirements-test.txt
@@ -0,0 +1,10 @@
+-r requirements.txt
+
+tox>=3.15.1,<4
+flake8>=3.8.1,<3.9
+pytest>=4.6.10,<5 ; python_version < '3'
+pytest>=5.4.2,<6 ; python_version >= '3'
+coverage>=5.1,<6
+pytest-expect>=1.1.0,<2
+mock>=3.0.5,<4 ; python_version < '3.6'
+mock>=4.0.2,<5 ; python_version >= '3.6'
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/requirements.txt b/testing/web-platform/tests/tools/third_party/html5lib/requirements.txt
new file mode 100644
index 0000000000..ae7ec3d08b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/requirements.txt
@@ -0,0 +1,2 @@
+six>=1.9
+webencodings
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/setup.cfg b/testing/web-platform/tests/tools/third_party/html5lib/setup.cfg
new file mode 100644
index 0000000000..0b2bb9c79b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/setup.cfg
@@ -0,0 +1,11 @@
+[bdist_wheel]
+universal = 1
+
+[pep8]
+ignore = N
+max-line-length = 139
+exclude = .git,__pycache__,.tox,doc
+
+[flake8]
+ignore = N, W504
+max-line-length = 139
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/setup.py b/testing/web-platform/tests/tools/third_party/html5lib/setup.py
new file mode 100644
index 0000000000..f84c128496
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/setup.py
@@ -0,0 +1,127 @@
+from __future__ import print_function
+
+import ast
+import codecs
+import sys
+
+from os.path import join, dirname
+from setuptools import setup, find_packages, __version__ as setuptools_version
+from pkg_resources import parse_version
+
+import pkg_resources
+
+try:
+ import _markerlib.markers
+except ImportError:
+ _markerlib = None
+
+
+# _markerlib.default_environment() obtains its data from _VARS
+# and wraps it in another dict, but _markerlib_evaluate writes
+# to the dict while it is iterating the keys, causing an error
+# on Python 3 only.
+# Replace _markerlib.default_environment to return a custom dict
+# that has all the necessary markers, and ignores any writes.
+
+class Python3MarkerDict(dict):
+
+ def __setitem__(self, key, value):
+ pass
+
+ def pop(self, i=-1):
+ return self[i]
+
+
+if _markerlib and sys.version_info[0] == 3:
+ env = _markerlib.markers._VARS
+ for key in list(env.keys()):
+ new_key = key.replace('.', '_')
+ if new_key != key:
+ env[new_key] = env[key]
+
+ _markerlib.markers._VARS = Python3MarkerDict(env)
+
+ def default_environment():
+ return _markerlib.markers._VARS
+
+ _markerlib.default_environment = default_environment
+
+# Avoid the very buggy pkg_resources.parser, which doesn't consistently
+# recognise the markers needed by this setup.py
+# Change this to setuptools 20.10.0 to support all markers.
+if pkg_resources:
+ if parse_version(setuptools_version) < parse_version('18.5'):
+ MarkerEvaluation = pkg_resources.MarkerEvaluation
+
+ del pkg_resources.parser
+ pkg_resources.evaluate_marker = MarkerEvaluation._markerlib_evaluate
+ MarkerEvaluation.evaluate_marker = MarkerEvaluation._markerlib_evaluate
+
+classifiers = [
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ 'Topic :: Software Development :: Libraries :: Python Modules',
+ 'Topic :: Text Processing :: Markup :: HTML'
+]
+
+here = dirname(__file__)
+with codecs.open(join(here, 'README.rst'), 'r', 'utf8') as readme_file:
+ with codecs.open(join(here, 'CHANGES.rst'), 'r', 'utf8') as changes_file:
+ long_description = readme_file.read() + '\n' + changes_file.read()
+
+version = None
+with open(join(here, "html5lib", "__init__.py"), "rb") as init_file:
+ t = ast.parse(init_file.read(), filename="__init__.py", mode="exec")
+ assert isinstance(t, ast.Module)
+ assignments = filter(lambda x: isinstance(x, ast.Assign), t.body)
+ for a in assignments:
+ if (len(a.targets) == 1 and
+ isinstance(a.targets[0], ast.Name) and
+ a.targets[0].id == "__version__" and
+ isinstance(a.value, ast.Str)):
+ version = a.value.s
+
+setup(name='html5lib',
+ version=version,
+ url='https://github.com/html5lib/html5lib-python',
+ license="MIT License",
+ description='HTML parser based on the WHATWG HTML specification',
+ long_description=long_description,
+ classifiers=classifiers,
+ maintainer='James Graham',
+ maintainer_email='james@hoppipolla.co.uk',
+ packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
+ install_requires=[
+ 'six>=1.9',
+ 'webencodings',
+ ],
+ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
+ extras_require={
+ # A conditional extra will only install these items when the extra is
+ # requested and the condition matches.
+ "lxml:platform_python_implementation == 'CPython'": ["lxml"],
+
+ # Standard extras, will be installed when the extra is requested.
+ "genshi": ["genshi"],
+ "chardet": ["chardet>=2.2"],
+
+ # The all extra combines a standard extra which will be used anytime
+ # the all extra is requested, and it extends it with a conditional
+ # extra that will be installed whenever the condition matches and the
+ # all extra is requested.
+ "all": ["genshi", "chardet>=2.2"],
+ "all:platform_python_implementation == 'CPython'": ["lxml"],
+ },
+ )
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/tox.ini b/testing/web-platform/tests/tools/third_party/html5lib/tox.ini
new file mode 100644
index 0000000000..58758cea13
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/tox.ini
@@ -0,0 +1,20 @@
+[tox]
+envlist = py{27,35,36,37,38,py,py3}-{base,six19,optional}
+
+[testenv]
+deps =
+ optional: -r{toxinidir}/requirements-optional.txt
+ -r{toxinidir}/requirements-test.txt
+ doc: Sphinx
+
+passenv =
+ PYTEST_COMMAND
+ COVERAGE_RUN_OPTIONS
+commands =
+ six19: pip install six==1.9
+ {env:PYTEST_COMMAND:{envbindir}/py.test} {posargs}
+ flake8 {toxinidir}
+
+[testenv:doc]
+changedir = doc
+commands = sphinx-build -b html . _build
diff --git a/testing/web-platform/tests/tools/third_party/html5lib/utils/entities.py b/testing/web-platform/tests/tools/third_party/html5lib/utils/entities.py
new file mode 100644
index 0000000000..6e8ca45806
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/html5lib/utils/entities.py
@@ -0,0 +1,101 @@
+import json
+
+import html5lib
+
+
+def parse(path="html5ents.xml"):
+ return html5lib.parse(open(path), treebuilder="lxml")
+
+
+def entity_table(tree):
+ return {entity_name("".join(tr[0].xpath(".//text()"))):
+ entity_characters(tr[1].text)
+ for tr in tree.xpath("//h:tbody/h:tr",
+ namespaces={"h": "http://www.w3.org/1999/xhtml"})}
+
+
+def entity_name(inp):
+ return inp.strip()
+
+
+def entity_characters(inp):
+ return "".join(codepoint_to_character(item)
+ for item in inp.split()
+ if item)
+
+
+def codepoint_to_character(inp):
+ return ("\\U000" + inp[2:]).decode("unicode-escape")
+
+
+def make_tests_json(entities):
+ test_list = make_test_list(entities)
+ tests_json = {"tests":
+ [make_test(*item) for item in test_list]
+ }
+ return tests_json
+
+
+def make_test(name, characters, good):
+ return {
+ "description": test_description(name, good),
+ "input": "&%s" % name,
+ "output": test_expected(name, characters, good)
+ }
+
+
+def test_description(name, good):
+ with_semicolon = name.endswith(";")
+ semicolon_text = {True: "with a semi-colon",
+ False: "without a semi-colon"}[with_semicolon]
+ if good:
+ text = "Named entity: %s %s" % (name, semicolon_text)
+ else:
+ text = "Bad named entity: %s %s" % (name, semicolon_text)
+ return text
+
+
+def test_expected(name, characters, good):
+ rv = []
+ if not good or not name.endswith(";"):
+ rv.append("ParseError")
+ rv.append(["Character", characters])
+ return rv
+
+
+def make_test_list(entities):
+ tests = []
+ for entity_name, characters in entities.items():
+ if entity_name.endswith(";") and not subentity_exists(entity_name, entities):
+ tests.append((entity_name[:-1], "&" + entity_name[:-1], False))
+ tests.append((entity_name, characters, True))
+ return sorted(tests)
+
+
+def subentity_exists(entity_name, entities):
+ for i in range(1, len(entity_name)):
+ if entity_name[:-i] in entities:
+ return True
+ return False
+
+
+def make_entities_code(entities):
+ entities_text = "\n".join(" \"%s\": u\"%s\"," % (
+ name, entities[name].encode(
+ "unicode-escape").replace("\"", "\\\""))
+ for name in sorted(entities.keys()))
+ return """entities = {
+%s
+}""" % entities_text
+
+
+def main():
+ entities = entity_table(parse())
+ tests_json = make_tests_json(entities)
+ json.dump(tests_json, open("namedEntities.test", "w"), indent=4)
+ code = make_entities_code(entities)
+ open("entities_constants.py", "w").write(code)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/CONTRIBUTORS.rst b/testing/web-platform/tests/tools/third_party/hyperframe/CONTRIBUTORS.rst
new file mode 100644
index 0000000000..aa7ab8b637
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/CONTRIBUTORS.rst
@@ -0,0 +1,56 @@
+Hyper is written and maintained by Cory Benfield and various contributors:
+
+Development Lead
+````````````````
+
+- Cory Benfield <cory@lukasa.co.uk>
+
+Contributors
+````````````
+
+In chronological order:
+
+- Sriram Ganesan (@elricL)
+
+ - Implemented the Huffman encoding/decoding logic.
+
+- Alek Storm (@alekstorm)
+
+ - Implemented Python 2.7 support.
+ - Implemented HTTP/2 draft 10 support.
+ - Implemented server push.
+
+- Tetsuya Morimoto (@t2y)
+
+ - Fixed a bug where large or incomplete frames were not handled correctly.
+ - Added hyper command-line tool.
+ - General code cleanups.
+
+- Jerome De Cuyper (@jdecuyper)
+
+ - Updated documentation and tests.
+
+- Maximilian Hils (@mhils)
+
+ - Added repr for frames.
+ - Improved frame initialization code.
+ - Added flag validation.
+
+- Thomas Kriechbaumer (@Kriechi)
+
+ - Improved initialization code.
+ - Fixed bugs in frame initialization code.
+ - Improved frame repr for frames with non-printable bodies.
+
+- Davey Shafik (@dshafik)
+
+ - Fixed Alt Svc frame stream association.
+
+- Seth Michael Larson (@SethMichaelLarson)
+
+ - Performance improvements to serialization and parsing.
+
+- Fred Thomsen (@fredthomsen)
+
+ - Support for memoryview in DataFrames.
+
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/HISTORY.rst b/testing/web-platform/tests/tools/third_party/hyperframe/HISTORY.rst
new file mode 100644
index 0000000000..172b2b91ba
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/HISTORY.rst
@@ -0,0 +1,179 @@
+Release History
+===============
+
+6.0.0dev0
+---------
+
+5.2.0 (2019-01-18)
+------------------
+
+**API Changes (Backward-compatible)**
+
+- Add a new ENABLE_CONNECT_PROTOCOL settings paramter.
+
+**Other Changes**
+
+- Fix collections.abc deprecation.
+- Drop support for Python 3.3 and support 3.7.
+
+5.1.0 (2017-04-24)
+------------------
+
+**API Changes (Backward-compatible)**
+
+- Added support for ``DataFrame.data`` being a ``memoryview`` object.
+
+5.0.0 (2017-03-07)
+------------------
+
+**Backwards Incompatible API Changes**
+
+- Added support for unknown extension frames. These will be returned in the new
+ ``ExtensionFrame`` object. The flag information for these frames is persisted
+ in ``flag_byte`` if needed.
+
+4.0.2 (2017-02-20)
+------------------
+
+**Bugfixes**
+
+- Fixed AltSvc stream association, which was incorrectly set to ``'both'``:
+ should have been ``'either'``.
+- Fixed a bug where stream IDs on received frames were allowed to be 32-bit,
+ instead of 31-bit.
+- Fixed a bug with frames that had the ``PADDING`` flag set but zero-length
+ padding, whose flow-controlled length was calculated wrongly.
+- Miscellaneous performance improvements to serialization and parsing logic.
+
+4.0.1 (2016-03-13)
+------------------
+
+**Bugfixes**
+
+- Fixed bug with the repr of ``AltSvcFrame``, where building it could throw
+ exceptions if the frame had been received from the network.
+
+4.0.0 (2016-03-13)
+------------------
+
+**Backwards Incompatible API Changes**
+
+- Updated old ALTSVC frame definition to match the newly specified RFC 7838.
+- Remove BLOCKED frame, which was never actually specified.
+- Removed previously deprecated ``SettingsFrame.SETTINGS_MAX_FRAME_SIZE`` and
+ ``SettingsFrame.SETTINGS_MAX_HEADER_LIST_SIZE``.
+
+3.2.0 (2016-02-02)
+------------------
+
+**API Changes (Backward-compatible)**
+
+- Invalid PING frame bodies now raise ``InvalidFrameError``, not
+ ``ValueError``. Note that ``InvalidFrameError`` is a ``ValueError`` subclass.
+- Invalid RST_STREAM frame bodies now raise ``InvalidFramError``, not
+ ``ValueError``. Note that ``InvalidFrameError`` is a ``ValueError`` subclass.
+- Canonicalized the names of ``SettingsFrame.SETTINGS_MAX_FRAME_SIZE`` and
+ ``SettingsFrame.SETTINGS_MAX_HEADER_LIST_SIZE`` to match their peers, by
+ adding new properties ``SettingsFrame.MAX_FRAME_SIZE`` and
+ ``SettingsFrame.SETTINGS_MAX_HEADER_LIST_SIZE``. The old names are still
+ present, but will be deprecated in 4.0.0.
+
+**Bugfixes**
+
+- The change in ``3.1.0`` that ensured that ``InvalidFrameError`` would be
+ thrown did not affect certain invalid values in ALT_SVC frames. This has been
+ fixed: ``ValueError`` will no longer be thrown from invalid ALT_SVC bodies.
+
+3.1.1 (2016-01-18)
+------------------
+
+**Bugfixes**
+
+- Correctly error when receiving Ping frames that have insufficient data.
+
+3.1.0 (2016-01-13)
+------------------
+
+**API Changes**
+
+- Added new ``InvalidFrameError`` that is thrown instead of ``struct.error``
+ when parsing a frame.
+
+**Bugfixes**
+
+- Fixed error when trying to serialize frames that use Priority information
+ with the defaults for that information.
+- Fixed errors when displaying the repr of frames with non-printable bodies.
+
+3.0.1 (2016-01-08)
+------------------
+
+**Bugfixes**
+
+- Fix issue where unpadded DATA, PUSH_PROMISE and HEADERS frames that had empty
+ bodies would raise ``InvalidPaddingError`` exceptions when parsed.
+
+3.0.0 (2016-01-08)
+------------------
+
+**Backwards Incompatible API Changes**
+
+- Parsing padded frames that have invalid padding sizes now throws an
+ ``InvalidPaddingError``.
+
+2.2.0 (2015-10-15)
+------------------
+
+**API Changes**
+
+- When an unknown frame is encountered, ``parse_frame_header`` now throws a
+ ``ValueError`` subclass: ``UnknownFrameError``. This subclass contains the
+ frame type and the length of the frame body.
+
+2.1.0 (2015-10-06)
+------------------
+
+**API Changes**
+
+- Frames parsed from binary data now carry a ``body_len`` attribute that
+ matches the frame length (minus the frame header).
+
+2.0.0 (2015-09-21)
+------------------
+
+**API Changes**
+
+- Attempting to parse unrecognised frames now throws ``ValueError`` instead of
+ ``KeyError``. Thanks to @Kriechi!
+- Flags are now validated for correctness, preventing setting flags that
+ ``hyperframe`` does not recognise and that would not serialize. Thanks to
+ @mhils!
+- Frame properties can now be initialized in the constructors. Thanks to @mhils
+ and @Kriechi!
+- Frames that cannot be sent on a stream now have their stream ID defaulted
+ to ``0``. Thanks to @Kriechi!
+
+**Other Changes**
+
+- Frames have a more useful repr. Thanks to @mhils!
+
+1.1.1 (2015-07-20)
+------------------
+
+- Fix a bug where ``FRAME_MAX_LEN`` was one byte too small.
+
+1.1.0 (2015-06-28)
+------------------
+
+- Add ``body_len`` property to frames to enable introspection of the actual
+ frame length. Thanks to @jdecuyper!
+
+1.0.1 (2015-06-27)
+------------------
+
+- Fix bug where the frame header would have an incorrect length added to it.
+
+1.0.0 (2015-04-12)
+------------------
+
+- Initial extraction from hyper.
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/LICENSE b/testing/web-platform/tests/tools/third_party/hyperframe/LICENSE
new file mode 100644
index 0000000000..d24c351e18
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Cory Benfield
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/MANIFEST.in b/testing/web-platform/tests/tools/third_party/hyperframe/MANIFEST.in
new file mode 100644
index 0000000000..2f464676cb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/MANIFEST.in
@@ -0,0 +1,2 @@
+include README.rst LICENSE CONTRIBUTORS.rst HISTORY.rst
+
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/PKG-INFO b/testing/web-platform/tests/tools/third_party/hyperframe/PKG-INFO
new file mode 100644
index 0000000000..cfd53f7e93
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/PKG-INFO
@@ -0,0 +1,242 @@
+Metadata-Version: 1.1
+Name: hyperframe
+Version: 5.2.0
+Summary: HTTP/2 framing layer for Python
+Home-page: https://python-hyper.org/hyperframe/en/latest/
+Author: Cory Benfield
+Author-email: cory@lukasa.co.uk
+License: MIT License
+Description: ======================================
+ hyperframe: Pure-Python HTTP/2 framing
+ ======================================
+
+ .. image:: https://travis-ci.org/python-hyper/hyperframe.png?branch=master
+ :target: https://travis-ci.org/python-hyper/hyperframe
+
+ This library contains the HTTP/2 framing code used in the `hyper`_ project. It
+ provides a pure-Python codebase that is capable of decoding a binary stream
+ into HTTP/2 frames.
+
+ This library is used directly by `hyper`_ and a number of other projects to
+ provide HTTP/2 frame decoding logic.
+
+ Contributing
+ ============
+
+ hyperframe welcomes contributions from anyone! Unlike many other projects we
+ are happy to accept cosmetic contributions and small contributions, in addition
+ to large feature requests and changes.
+
+ Before you contribute (either by opening an issue or filing a pull request),
+ please `read the contribution guidelines`_.
+
+ .. _read the contribution guidelines: http://hyper.readthedocs.org/en/development/contributing.html
+
+ License
+ =======
+
+ hyperframe is made available under the MIT License. For more details, see the
+ ``LICENSE`` file in the repository.
+
+ Authors
+ =======
+
+ hyperframe is maintained by Cory Benfield, with contributions from others. For
+ more details about the contributors, please see ``CONTRIBUTORS.rst``.
+
+ .. _hyper: http://python-hyper.org/
+
+
+ Release History
+ ===============
+
+ 6.0.0dev0
+ ---------
+
+ 5.2.0 (2019-01-18)
+ ------------------
+
+ **API Changes (Backward-compatible)**
+
+ - Add a new ENABLE_CONNECT_PROTOCOL settings paramter.
+
+ **Other Changes**
+
+ - Fix collections.abc deprecation.
+ - Drop support for Python 3.3 and support 3.7.
+
+ 5.1.0 (2017-04-24)
+ ------------------
+
+ **API Changes (Backward-compatible)**
+
+ - Added support for ``DataFrame.data`` being a ``memoryview`` object.
+
+ 5.0.0 (2017-03-07)
+ ------------------
+
+ **Backwards Incompatible API Changes**
+
+ - Added support for unknown extension frames. These will be returned in the new
+ ``ExtensionFrame`` object. The flag information for these frames is persisted
+ in ``flag_byte`` if needed.
+
+ 4.0.2 (2017-02-20)
+ ------------------
+
+ **Bugfixes**
+
+ - Fixed AltSvc stream association, which was incorrectly set to ``'both'``:
+ should have been ``'either'``.
+ - Fixed a bug where stream IDs on received frames were allowed to be 32-bit,
+ instead of 31-bit.
+ - Fixed a bug with frames that had the ``PADDING`` flag set but zero-length
+ padding, whose flow-controlled length was calculated wrongly.
+ - Miscellaneous performance improvements to serialization and parsing logic.
+
+ 4.0.1 (2016-03-13)
+ ------------------
+
+ **Bugfixes**
+
+ - Fixed bug with the repr of ``AltSvcFrame``, where building it could throw
+ exceptions if the frame had been received from the network.
+
+ 4.0.0 (2016-03-13)
+ ------------------
+
+ **Backwards Incompatible API Changes**
+
+ - Updated old ALTSVC frame definition to match the newly specified RFC 7838.
+ - Remove BLOCKED frame, which was never actually specified.
+ - Removed previously deprecated ``SettingsFrame.SETTINGS_MAX_FRAME_SIZE`` and
+ ``SettingsFrame.SETTINGS_MAX_HEADER_LIST_SIZE``.
+
+ 3.2.0 (2016-02-02)
+ ------------------
+
+ **API Changes (Backward-compatible)**
+
+ - Invalid PING frame bodies now raise ``InvalidFrameError``, not
+ ``ValueError``. Note that ``InvalidFrameError`` is a ``ValueError`` subclass.
+ - Invalid RST_STREAM frame bodies now raise ``InvalidFramError``, not
+ ``ValueError``. Note that ``InvalidFrameError`` is a ``ValueError`` subclass.
+ - Canonicalized the names of ``SettingsFrame.SETTINGS_MAX_FRAME_SIZE`` and
+ ``SettingsFrame.SETTINGS_MAX_HEADER_LIST_SIZE`` to match their peers, by
+ adding new properties ``SettingsFrame.MAX_FRAME_SIZE`` and
+ ``SettingsFrame.SETTINGS_MAX_HEADER_LIST_SIZE``. The old names are still
+ present, but will be deprecated in 4.0.0.
+
+ **Bugfixes**
+
+ - The change in ``3.1.0`` that ensured that ``InvalidFrameError`` would be
+ thrown did not affect certain invalid values in ALT_SVC frames. This has been
+ fixed: ``ValueError`` will no longer be thrown from invalid ALT_SVC bodies.
+
+ 3.1.1 (2016-01-18)
+ ------------------
+
+ **Bugfixes**
+
+ - Correctly error when receiving Ping frames that have insufficient data.
+
+ 3.1.0 (2016-01-13)
+ ------------------
+
+ **API Changes**
+
+ - Added new ``InvalidFrameError`` that is thrown instead of ``struct.error``
+ when parsing a frame.
+
+ **Bugfixes**
+
+ - Fixed error when trying to serialize frames that use Priority information
+ with the defaults for that information.
+ - Fixed errors when displaying the repr of frames with non-printable bodies.
+
+ 3.0.1 (2016-01-08)
+ ------------------
+
+ **Bugfixes**
+
+ - Fix issue where unpadded DATA, PUSH_PROMISE and HEADERS frames that had empty
+ bodies would raise ``InvalidPaddingError`` exceptions when parsed.
+
+ 3.0.0 (2016-01-08)
+ ------------------
+
+ **Backwards Incompatible API Changes**
+
+ - Parsing padded frames that have invalid padding sizes now throws an
+ ``InvalidPaddingError``.
+
+ 2.2.0 (2015-10-15)
+ ------------------
+
+ **API Changes**
+
+ - When an unknown frame is encountered, ``parse_frame_header`` now throws a
+ ``ValueError`` subclass: ``UnknownFrameError``. This subclass contains the
+ frame type and the length of the frame body.
+
+ 2.1.0 (2015-10-06)
+ ------------------
+
+ **API Changes**
+
+ - Frames parsed from binary data now carry a ``body_len`` attribute that
+ matches the frame length (minus the frame header).
+
+ 2.0.0 (2015-09-21)
+ ------------------
+
+ **API Changes**
+
+ - Attempting to parse unrecognised frames now throws ``ValueError`` instead of
+ ``KeyError``. Thanks to @Kriechi!
+ - Flags are now validated for correctness, preventing setting flags that
+ ``hyperframe`` does not recognise and that would not serialize. Thanks to
+ @mhils!
+ - Frame properties can now be initialized in the constructors. Thanks to @mhils
+ and @Kriechi!
+ - Frames that cannot be sent on a stream now have their stream ID defaulted
+ to ``0``. Thanks to @Kriechi!
+
+ **Other Changes**
+
+ - Frames have a more useful repr. Thanks to @mhils!
+
+ 1.1.1 (2015-07-20)
+ ------------------
+
+ - Fix a bug where ``FRAME_MAX_LEN`` was one byte too small.
+
+ 1.1.0 (2015-06-28)
+ ------------------
+
+ - Add ``body_len`` property to frames to enable introspection of the actual
+ frame length. Thanks to @jdecuyper!
+
+ 1.0.1 (2015-06-27)
+ ------------------
+
+ - Fix bug where the frame header would have an incorrect length added to it.
+
+ 1.0.0 (2015-04-12)
+ ------------------
+
+ - Initial extraction from hyper.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: CPython
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/README.rst b/testing/web-platform/tests/tools/third_party/hyperframe/README.rst
new file mode 100644
index 0000000000..385b39af9b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/README.rst
@@ -0,0 +1,39 @@
+======================================
+hyperframe: Pure-Python HTTP/2 framing
+======================================
+
+.. image:: https://travis-ci.org/python-hyper/hyperframe.png?branch=master
+ :target: https://travis-ci.org/python-hyper/hyperframe
+
+This library contains the HTTP/2 framing code used in the `hyper`_ project. It
+provides a pure-Python codebase that is capable of decoding a binary stream
+into HTTP/2 frames.
+
+This library is used directly by `hyper`_ and a number of other projects to
+provide HTTP/2 frame decoding logic.
+
+Contributing
+============
+
+hyperframe welcomes contributions from anyone! Unlike many other projects we
+are happy to accept cosmetic contributions and small contributions, in addition
+to large feature requests and changes.
+
+Before you contribute (either by opening an issue or filing a pull request),
+please `read the contribution guidelines`_.
+
+.. _read the contribution guidelines: http://hyper.readthedocs.org/en/development/contributing.html
+
+License
+=======
+
+hyperframe is made available under the MIT License. For more details, see the
+``LICENSE`` file in the repository.
+
+Authors
+=======
+
+hyperframe is maintained by Cory Benfield, with contributions from others. For
+more details about the contributors, please see ``CONTRIBUTORS.rst``.
+
+.. _hyper: http://python-hyper.org/
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/__init__.py b/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/__init__.py
new file mode 100644
index 0000000000..7620b4bdf7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/__init__.py
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+"""
+hyperframe
+~~~~~~~~~~
+
+A module for providing a pure-Python HTTP/2 framing layer.
+"""
+__version__ = '5.2.0'
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/exceptions.py b/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/exceptions.py
new file mode 100644
index 0000000000..dd30369c70
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/exceptions.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+"""
+hyperframe/exceptions
+~~~~~~~~~~~~~~~~~~~~~
+
+Defines the exceptions that can be thrown by hyperframe.
+"""
+
+
+class UnknownFrameError(ValueError):
+ """
+ An frame of unknown type was received.
+ """
+ def __init__(self, frame_type, length):
+ #: The type byte of the unknown frame that was received.
+ self.frame_type = frame_type
+
+ #: The length of the data portion of the unknown frame.
+ self.length = length
+
+ def __str__(self):
+ return (
+ "UnknownFrameError: Unknown frame type 0x%X received, "
+ "length %d bytes" % (self.frame_type, self.length)
+ )
+
+
+class InvalidPaddingError(ValueError):
+ """
+ A frame with invalid padding was received.
+ """
+ pass
+
+
+class InvalidFrameError(ValueError):
+ """
+ Parsing a frame failed because the data was not laid out appropriately.
+
+ .. versionadded:: 3.0.2
+ """
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/flags.py b/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/flags.py
new file mode 100644
index 0000000000..1660bd1800
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/flags.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+"""
+hyperframe/flags
+~~~~~~~~~~~~~~~~
+
+Defines basic Flag and Flags data structures.
+"""
+import collections
+
+try:
+ from collections.abc import MutableSet
+except ImportError: # pragma: no cover
+ # Python 2.7 compatibility
+ from collections import MutableSet
+
+Flag = collections.namedtuple("Flag", ["name", "bit"])
+
+
+class Flags(MutableSet):
+ """
+ A simple MutableSet implementation that will only accept known flags as
+ elements.
+
+ Will behave like a regular set(), except that a ValueError will be thrown
+ when .add()ing unexpected flags.
+ """
+ def __init__(self, defined_flags):
+ self._valid_flags = set(flag.name for flag in defined_flags)
+ self._flags = set()
+
+ def __contains__(self, x):
+ return self._flags.__contains__(x)
+
+ def __iter__(self):
+ return self._flags.__iter__()
+
+ def __len__(self):
+ return self._flags.__len__()
+
+ def discard(self, value):
+ return self._flags.discard(value)
+
+ def add(self, value):
+ if value not in self._valid_flags:
+ raise ValueError(
+ "Unexpected flag: {}. Valid flags are: {}".format(
+ value, self._valid_flags
+ )
+ )
+ return self._flags.add(value)
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/frame.py b/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/frame.py
new file mode 100644
index 0000000000..795057279b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/hyperframe/frame.py
@@ -0,0 +1,822 @@
+# -*- coding: utf-8 -*-
+"""
+hyperframe/frame
+~~~~~~~~~~~~~~~~
+
+Defines framing logic for HTTP/2. Provides both classes to represent framed
+data and logic for aiding the connection when it comes to reading from the
+socket.
+"""
+import struct
+import binascii
+
+from .exceptions import (
+ UnknownFrameError, InvalidPaddingError, InvalidFrameError
+)
+from .flags import Flag, Flags
+
+
+# The maximum initial length of a frame. Some frames have shorter maximum
+# lengths.
+FRAME_MAX_LEN = (2 ** 14)
+
+# The maximum allowed length of a frame.
+FRAME_MAX_ALLOWED_LEN = (2 ** 24) - 1
+
+# Stream association enumerations.
+_STREAM_ASSOC_HAS_STREAM = "has-stream"
+_STREAM_ASSOC_NO_STREAM = "no-stream"
+_STREAM_ASSOC_EITHER = "either"
+
+# Structs for packing and unpacking
+_STRUCT_HBBBL = struct.Struct(">HBBBL")
+_STRUCT_LL = struct.Struct(">LL")
+_STRUCT_HL = struct.Struct(">HL")
+_STRUCT_LB = struct.Struct(">LB")
+_STRUCT_L = struct.Struct(">L")
+_STRUCT_H = struct.Struct(">H")
+_STRUCT_B = struct.Struct(">B")
+
+
+class Frame(object):
+ """
+ The base class for all HTTP/2 frames.
+ """
+ #: The flags defined on this type of frame.
+ defined_flags = []
+
+ #: The byte used to define the type of the frame.
+ type = None
+
+ # If 'has-stream', the frame's stream_id must be non-zero. If 'no-stream',
+ # it must be zero. If 'either', it's not checked.
+ stream_association = None
+
+ def __init__(self, stream_id, flags=()):
+ #: The stream identifier for the stream this frame was received on.
+ #: Set to 0 for frames sent on the connection (stream-id 0).
+ self.stream_id = stream_id
+
+ #: The flags set for this frame.
+ self.flags = Flags(self.defined_flags)
+
+ #: The frame length, excluding the nine-byte header.
+ self.body_len = 0
+
+ for flag in flags:
+ self.flags.add(flag)
+
+ if (not self.stream_id and
+ self.stream_association == _STREAM_ASSOC_HAS_STREAM):
+ raise ValueError('Stream ID must be non-zero')
+ if (self.stream_id and
+ self.stream_association == _STREAM_ASSOC_NO_STREAM):
+ raise ValueError('Stream ID must be zero')
+
+ def __repr__(self):
+ flags = ", ".join(self.flags) or "None"
+ body = binascii.hexlify(self.serialize_body()).decode('ascii')
+ if len(body) > 20:
+ body = body[:20] + "..."
+ return (
+ "{type}(Stream: {stream}; Flags: {flags}): {body}"
+ ).format(
+ type=type(self).__name__,
+ stream=self.stream_id,
+ flags=flags,
+ body=body
+ )
+
+ @staticmethod
+ def parse_frame_header(header, strict=False):
+ """
+ Takes a 9-byte frame header and returns a tuple of the appropriate
+ Frame object and the length that needs to be read from the socket.
+
+ This populates the flags field, and determines how long the body is.
+
+ :param strict: Whether to raise an exception when encountering a frame
+ not defined by spec and implemented by hyperframe.
+
+ :raises hyperframe.exceptions.UnknownFrameError: If a frame of unknown
+ type is received.
+
+ .. versionchanged:: 5.0.0
+ Added :param:`strict` to accommodate :class:`ExtensionFrame`
+ """
+ try:
+ fields = _STRUCT_HBBBL.unpack(header)
+ except struct.error:
+ raise InvalidFrameError("Invalid frame header")
+
+ # First 24 bits are frame length.
+ length = (fields[0] << 8) + fields[1]
+ type = fields[2]
+ flags = fields[3]
+ stream_id = fields[4] & 0x7FFFFFFF
+
+ try:
+ frame = FRAMES[type](stream_id)
+ except KeyError:
+ if strict:
+ raise UnknownFrameError(type, length)
+ frame = ExtensionFrame(type=type, stream_id=stream_id)
+
+ frame.parse_flags(flags)
+ return (frame, length)
+
+ def parse_flags(self, flag_byte):
+ for flag, flag_bit in self.defined_flags:
+ if flag_byte & flag_bit:
+ self.flags.add(flag)
+
+ return self.flags
+
+ def serialize(self):
+ """
+ Convert a frame into a bytestring, representing the serialized form of
+ the frame.
+ """
+ body = self.serialize_body()
+ self.body_len = len(body)
+
+ # Build the common frame header.
+ # First, get the flags.
+ flags = 0
+
+ for flag, flag_bit in self.defined_flags:
+ if flag in self.flags:
+ flags |= flag_bit
+
+ header = _STRUCT_HBBBL.pack(
+ (self.body_len >> 8) & 0xFFFF, # Length spread over top 24 bits
+ self.body_len & 0xFF,
+ self.type,
+ flags,
+ self.stream_id & 0x7FFFFFFF # Stream ID is 32 bits.
+ )
+
+ return header + body
+
+ def serialize_body(self):
+ raise NotImplementedError()
+
+ def parse_body(self, data):
+ """
+ Given the body of a frame, parses it into frame data. This populates
+ the non-header parts of the frame: that is, it does not populate the
+ stream ID or flags.
+
+ :param data: A memoryview object containing the body data of the frame.
+ Must not contain *more* data than the length returned by
+ :meth:`parse_frame_header
+ <hyperframe.frame.Frame.parse_frame_header>`.
+ """
+ raise NotImplementedError()
+
+
+class Padding(object):
+ """
+ Mixin for frames that contain padding. Defines extra fields that can be
+ used and set by frames that can be padded.
+ """
+ def __init__(self, stream_id, pad_length=0, **kwargs):
+ super(Padding, self).__init__(stream_id, **kwargs)
+
+ #: The length of the padding to use.
+ self.pad_length = pad_length
+
+ def serialize_padding_data(self):
+ if 'PADDED' in self.flags:
+ return _STRUCT_B.pack(self.pad_length)
+ return b''
+
+ def parse_padding_data(self, data):
+ if 'PADDED' in self.flags:
+ try:
+ self.pad_length = struct.unpack('!B', data[:1])[0]
+ except struct.error:
+ raise InvalidFrameError("Invalid Padding data")
+ return 1
+ return 0
+
+ @property
+ def total_padding(self):
+ return self.pad_length
+
+
+class Priority(object):
+ """
+ Mixin for frames that contain priority data. Defines extra fields that can
+ be used and set by frames that contain priority data.
+ """
+ def __init__(self,
+ stream_id,
+ depends_on=0x0,
+ stream_weight=0x0,
+ exclusive=False,
+ **kwargs):
+ super(Priority, self).__init__(stream_id, **kwargs)
+
+ #: The stream ID of the stream on which this stream depends.
+ self.depends_on = depends_on
+
+ #: The weight of the stream. This is an integer between 0 and 256.
+ self.stream_weight = stream_weight
+
+ #: Whether the exclusive bit was set.
+ self.exclusive = exclusive
+
+ def serialize_priority_data(self):
+ return _STRUCT_LB.pack(
+ self.depends_on + (0x80000000 if self.exclusive else 0),
+ self.stream_weight
+ )
+
+ def parse_priority_data(self, data):
+ try:
+ self.depends_on, self.stream_weight = _STRUCT_LB.unpack(data[:5])
+ except struct.error:
+ raise InvalidFrameError("Invalid Priority data")
+
+ self.exclusive = True if self.depends_on >> 31 else False
+ self.depends_on &= 0x7FFFFFFF
+ return 5
+
+
+class DataFrame(Padding, Frame):
+ """
+ DATA frames convey arbitrary, variable-length sequences of octets
+ associated with a stream. One or more DATA frames are used, for instance,
+ to carry HTTP request or response payloads.
+ """
+ #: The flags defined for DATA frames.
+ defined_flags = [
+ Flag('END_STREAM', 0x01),
+ Flag('PADDED', 0x08),
+ ]
+
+ #: The type byte for data frames.
+ type = 0x0
+
+ stream_association = _STREAM_ASSOC_HAS_STREAM
+
+ def __init__(self, stream_id, data=b'', **kwargs):
+ super(DataFrame, self).__init__(stream_id, **kwargs)
+
+ #: The data contained on this frame.
+ self.data = data
+
+ def serialize_body(self):
+ padding_data = self.serialize_padding_data()
+ padding = b'\0' * self.total_padding
+ if isinstance(self.data, memoryview):
+ self.data = self.data.tobytes()
+ return b''.join([padding_data, self.data, padding])
+
+ def parse_body(self, data):
+ padding_data_length = self.parse_padding_data(data)
+ self.data = (
+ data[padding_data_length:len(data)-self.total_padding].tobytes()
+ )
+ self.body_len = len(data)
+
+ if self.total_padding and self.total_padding >= self.body_len:
+ raise InvalidPaddingError("Padding is too long.")
+
+ @property
+ def flow_controlled_length(self):
+ """
+ The length of the frame that needs to be accounted for when considering
+ flow control.
+ """
+ padding_len = 0
+ if 'PADDED' in self.flags:
+ # Account for extra 1-byte padding length field, which is still
+ # present if possibly zero-valued.
+ padding_len = self.total_padding + 1
+ return len(self.data) + padding_len
+
+
+class PriorityFrame(Priority, Frame):
+ """
+ The PRIORITY frame specifies the sender-advised priority of a stream. It
+ can be sent at any time for an existing stream. This enables
+ reprioritisation of existing streams.
+ """
+ #: The flags defined for PRIORITY frames.
+ defined_flags = []
+
+ #: The type byte defined for PRIORITY frames.
+ type = 0x02
+
+ stream_association = _STREAM_ASSOC_HAS_STREAM
+
+ def serialize_body(self):
+ return self.serialize_priority_data()
+
+ def parse_body(self, data):
+ self.parse_priority_data(data)
+ self.body_len = len(data)
+
+
+class RstStreamFrame(Frame):
+ """
+ The RST_STREAM frame allows for abnormal termination of a stream. When sent
+ by the initiator of a stream, it indicates that they wish to cancel the
+ stream or that an error condition has occurred. When sent by the receiver
+ of a stream, it indicates that either the receiver is rejecting the stream,
+ requesting that the stream be cancelled or that an error condition has
+ occurred.
+ """
+ #: The flags defined for RST_STREAM frames.
+ defined_flags = []
+
+ #: The type byte defined for RST_STREAM frames.
+ type = 0x03
+
+ stream_association = _STREAM_ASSOC_HAS_STREAM
+
+ def __init__(self, stream_id, error_code=0, **kwargs):
+ super(RstStreamFrame, self).__init__(stream_id, **kwargs)
+
+ #: The error code used when resetting the stream.
+ self.error_code = error_code
+
+ def serialize_body(self):
+ return _STRUCT_L.pack(self.error_code)
+
+ def parse_body(self, data):
+ if len(data) != 4:
+ raise InvalidFrameError(
+ "RST_STREAM must have 4 byte body: actual length %s." %
+ len(data)
+ )
+
+ try:
+ self.error_code = _STRUCT_L.unpack(data)[0]
+ except struct.error: # pragma: no cover
+ raise InvalidFrameError("Invalid RST_STREAM body")
+
+ self.body_len = 4
+
+
+class SettingsFrame(Frame):
+ """
+ The SETTINGS frame conveys configuration parameters that affect how
+ endpoints communicate. The parameters are either constraints on peer
+ behavior or preferences.
+
+ Settings are not negotiated. Settings describe characteristics of the
+ sending peer, which are used by the receiving peer. Different values for
+ the same setting can be advertised by each peer. For example, a client
+ might set a high initial flow control window, whereas a server might set a
+ lower value to conserve resources.
+ """
+ #: The flags defined for SETTINGS frames.
+ defined_flags = [Flag('ACK', 0x01)]
+
+ #: The type byte defined for SETTINGS frames.
+ type = 0x04
+
+ stream_association = _STREAM_ASSOC_NO_STREAM
+
+ # We need to define the known settings, they may as well be class
+ # attributes.
+ #: The byte that signals the SETTINGS_HEADER_TABLE_SIZE setting.
+ HEADER_TABLE_SIZE = 0x01
+ #: The byte that signals the SETTINGS_ENABLE_PUSH setting.
+ ENABLE_PUSH = 0x02
+ #: The byte that signals the SETTINGS_MAX_CONCURRENT_STREAMS setting.
+ MAX_CONCURRENT_STREAMS = 0x03
+ #: The byte that signals the SETTINGS_INITIAL_WINDOW_SIZE setting.
+ INITIAL_WINDOW_SIZE = 0x04
+ #: The byte that signals the SETTINGS_MAX_FRAME_SIZE setting.
+ MAX_FRAME_SIZE = 0x05
+ #: The byte that signals the SETTINGS_MAX_HEADER_LIST_SIZE setting.
+ MAX_HEADER_LIST_SIZE = 0x06
+ #: The byte that signals SETTINGS_ENABLE_CONNECT_PROTOCOL setting.
+ ENABLE_CONNECT_PROTOCOL = 0x08
+
+ def __init__(self, stream_id=0, settings=None, **kwargs):
+ super(SettingsFrame, self).__init__(stream_id, **kwargs)
+
+ if settings and "ACK" in kwargs.get("flags", ()):
+ raise ValueError("Settings must be empty if ACK flag is set.")
+
+ #: A dictionary of the setting type byte to the value of the setting.
+ self.settings = settings or {}
+
+ def serialize_body(self):
+ return b''.join([_STRUCT_HL.pack(setting & 0xFF, value)
+ for setting, value in self.settings.items()])
+
+ def parse_body(self, data):
+ body_len = 0
+ for i in range(0, len(data), 6):
+ try:
+ name, value = _STRUCT_HL.unpack(data[i:i+6])
+ except struct.error:
+ raise InvalidFrameError("Invalid SETTINGS body")
+
+ self.settings[name] = value
+ body_len += 6
+
+ self.body_len = body_len
+
+
+class PushPromiseFrame(Padding, Frame):
+ """
+ The PUSH_PROMISE frame is used to notify the peer endpoint in advance of
+ streams the sender intends to initiate.
+ """
+ #: The flags defined for PUSH_PROMISE frames.
+ defined_flags = [
+ Flag('END_HEADERS', 0x04),
+ Flag('PADDED', 0x08)
+ ]
+
+ #: The type byte defined for PUSH_PROMISE frames.
+ type = 0x05
+
+ stream_association = _STREAM_ASSOC_HAS_STREAM
+
+ def __init__(self, stream_id, promised_stream_id=0, data=b'', **kwargs):
+ super(PushPromiseFrame, self).__init__(stream_id, **kwargs)
+
+ #: The stream ID that is promised by this frame.
+ self.promised_stream_id = promised_stream_id
+
+ #: The HPACK-encoded header block for the simulated request on the new
+ #: stream.
+ self.data = data
+
+ def serialize_body(self):
+ padding_data = self.serialize_padding_data()
+ padding = b'\0' * self.total_padding
+ data = _STRUCT_L.pack(self.promised_stream_id)
+ return b''.join([padding_data, data, self.data, padding])
+
+ def parse_body(self, data):
+ padding_data_length = self.parse_padding_data(data)
+
+ try:
+ self.promised_stream_id = _STRUCT_L.unpack(
+ data[padding_data_length:padding_data_length + 4]
+ )[0]
+ except struct.error:
+ raise InvalidFrameError("Invalid PUSH_PROMISE body")
+
+ self.data = data[padding_data_length + 4:].tobytes()
+ self.body_len = len(data)
+
+ if self.total_padding and self.total_padding >= self.body_len:
+ raise InvalidPaddingError("Padding is too long.")
+
+
+class PingFrame(Frame):
+ """
+ The PING frame is a mechanism for measuring a minimal round-trip time from
+ the sender, as well as determining whether an idle connection is still
+ functional. PING frames can be sent from any endpoint.
+ """
+ #: The flags defined for PING frames.
+ defined_flags = [Flag('ACK', 0x01)]
+
+ #: The type byte defined for PING frames.
+ type = 0x06
+
+ stream_association = _STREAM_ASSOC_NO_STREAM
+
+ def __init__(self, stream_id=0, opaque_data=b'', **kwargs):
+ super(PingFrame, self).__init__(stream_id, **kwargs)
+
+ #: The opaque data sent in this PING frame, as a bytestring.
+ self.opaque_data = opaque_data
+
+ def serialize_body(self):
+ if len(self.opaque_data) > 8:
+ raise InvalidFrameError(
+ "PING frame may not have more than 8 bytes of data, got %s" %
+ self.opaque_data
+ )
+
+ data = self.opaque_data
+ data += b'\x00' * (8 - len(self.opaque_data))
+ return data
+
+ def parse_body(self, data):
+ if len(data) != 8:
+ raise InvalidFrameError(
+ "PING frame must have 8 byte length: got %s" % len(data)
+ )
+
+ self.opaque_data = data.tobytes()
+ self.body_len = 8
+
+
+class GoAwayFrame(Frame):
+ """
+ The GOAWAY frame informs the remote peer to stop creating streams on this
+ connection. It can be sent from the client or the server. Once sent, the
+ sender will ignore frames sent on new streams for the remainder of the
+ connection.
+ """
+ #: The flags defined for GOAWAY frames.
+ defined_flags = []
+
+ #: The type byte defined for GOAWAY frames.
+ type = 0x07
+
+ stream_association = _STREAM_ASSOC_NO_STREAM
+
+ def __init__(self,
+ stream_id=0,
+ last_stream_id=0,
+ error_code=0,
+ additional_data=b'',
+ **kwargs):
+ super(GoAwayFrame, self).__init__(stream_id, **kwargs)
+
+ #: The last stream ID definitely seen by the remote peer.
+ self.last_stream_id = last_stream_id
+
+ #: The error code for connection teardown.
+ self.error_code = error_code
+
+ #: Any additional data sent in the GOAWAY.
+ self.additional_data = additional_data
+
+ def serialize_body(self):
+ data = _STRUCT_LL.pack(
+ self.last_stream_id & 0x7FFFFFFF,
+ self.error_code
+ )
+ data += self.additional_data
+
+ return data
+
+ def parse_body(self, data):
+ try:
+ self.last_stream_id, self.error_code = _STRUCT_LL.unpack(
+ data[:8]
+ )
+ except struct.error:
+ raise InvalidFrameError("Invalid GOAWAY body.")
+
+ self.body_len = len(data)
+
+ if len(data) > 8:
+ self.additional_data = data[8:].tobytes()
+
+
+class WindowUpdateFrame(Frame):
+ """
+ The WINDOW_UPDATE frame is used to implement flow control.
+
+ Flow control operates at two levels: on each individual stream and on the
+ entire connection.
+
+ Both types of flow control are hop by hop; that is, only between the two
+ endpoints. Intermediaries do not forward WINDOW_UPDATE frames between
+ dependent connections. However, throttling of data transfer by any receiver
+ can indirectly cause the propagation of flow control information toward the
+ original sender.
+ """
+ #: The flags defined for WINDOW_UPDATE frames.
+ defined_flags = []
+
+ #: The type byte defined for WINDOW_UPDATE frames.
+ type = 0x08
+
+ stream_association = _STREAM_ASSOC_EITHER
+
+ def __init__(self, stream_id, window_increment=0, **kwargs):
+ super(WindowUpdateFrame, self).__init__(stream_id, **kwargs)
+
+ #: The amount the flow control window is to be incremented.
+ self.window_increment = window_increment
+
+ def serialize_body(self):
+ return _STRUCT_L.pack(self.window_increment & 0x7FFFFFFF)
+
+ def parse_body(self, data):
+ try:
+ self.window_increment = _STRUCT_L.unpack(data)[0]
+ except struct.error:
+ raise InvalidFrameError("Invalid WINDOW_UPDATE body")
+
+ self.body_len = 4
+
+
+class HeadersFrame(Padding, Priority, Frame):
+ """
+ The HEADERS frame carries name-value pairs. It is used to open a stream.
+ HEADERS frames can be sent on a stream in the "open" or "half closed
+ (remote)" states.
+
+ The HeadersFrame class is actually basically a data frame in this
+ implementation, because of the requirement to control the sizes of frames.
+ A header block fragment that doesn't fit in an entire HEADERS frame needs
+ to be followed with CONTINUATION frames. From the perspective of the frame
+ building code the header block is an opaque data segment.
+ """
+ #: The flags defined for HEADERS frames.
+ defined_flags = [
+ Flag('END_STREAM', 0x01),
+ Flag('END_HEADERS', 0x04),
+ Flag('PADDED', 0x08),
+ Flag('PRIORITY', 0x20),
+ ]
+
+ #: The type byte defined for HEADERS frames.
+ type = 0x01
+
+ stream_association = _STREAM_ASSOC_HAS_STREAM
+
+ def __init__(self, stream_id, data=b'', **kwargs):
+ super(HeadersFrame, self).__init__(stream_id, **kwargs)
+
+ #: The HPACK-encoded header block.
+ self.data = data
+
+ def serialize_body(self):
+ padding_data = self.serialize_padding_data()
+ padding = b'\0' * self.total_padding
+
+ if 'PRIORITY' in self.flags:
+ priority_data = self.serialize_priority_data()
+ else:
+ priority_data = b''
+
+ return b''.join([padding_data, priority_data, self.data, padding])
+
+ def parse_body(self, data):
+ padding_data_length = self.parse_padding_data(data)
+ data = data[padding_data_length:]
+
+ if 'PRIORITY' in self.flags:
+ priority_data_length = self.parse_priority_data(data)
+ else:
+ priority_data_length = 0
+
+ self.body_len = len(data)
+ self.data = (
+ data[priority_data_length:len(data)-self.total_padding].tobytes()
+ )
+
+ if self.total_padding and self.total_padding >= self.body_len:
+ raise InvalidPaddingError("Padding is too long.")
+
+
+class ContinuationFrame(Frame):
+ """
+ The CONTINUATION frame is used to continue a sequence of header block
+ fragments. Any number of CONTINUATION frames can be sent on an existing
+ stream, as long as the preceding frame on the same stream is one of
+ HEADERS, PUSH_PROMISE or CONTINUATION without the END_HEADERS flag set.
+
+ Much like the HEADERS frame, hyper treats this as an opaque data frame with
+ different flags and a different type.
+ """
+ #: The flags defined for CONTINUATION frames.
+ defined_flags = [Flag('END_HEADERS', 0x04)]
+
+ #: The type byte defined for CONTINUATION frames.
+ type = 0x09
+
+ stream_association = _STREAM_ASSOC_HAS_STREAM
+
+ def __init__(self, stream_id, data=b'', **kwargs):
+ super(ContinuationFrame, self).__init__(stream_id, **kwargs)
+
+ #: The HPACK-encoded header block.
+ self.data = data
+
+ def serialize_body(self):
+ return self.data
+
+ def parse_body(self, data):
+ self.data = data.tobytes()
+ self.body_len = len(data)
+
+
+class AltSvcFrame(Frame):
+ """
+ The ALTSVC frame is used to advertise alternate services that the current
+ host, or a different one, can understand. This frame is standardised as
+ part of RFC 7838.
+
+ This frame does no work to validate that the ALTSVC field parameter is
+ acceptable per the rules of RFC 7838.
+
+ .. note:: If the ``stream_id`` of this frame is nonzero, the origin field
+ must have zero length. Conversely, if the ``stream_id`` of this
+ frame is zero, the origin field must have nonzero length. Put
+ another way, a valid ALTSVC frame has ``stream_id != 0`` XOR
+ ``len(origin) != 0``.
+ """
+ type = 0xA
+
+ stream_association = _STREAM_ASSOC_EITHER
+
+ def __init__(self, stream_id, origin=b'', field=b'', **kwargs):
+ super(AltSvcFrame, self).__init__(stream_id, **kwargs)
+
+ if not isinstance(origin, bytes):
+ raise ValueError("AltSvc origin must be bytestring.")
+ if not isinstance(field, bytes):
+ raise ValueError("AltSvc field must be a bytestring.")
+ self.origin = origin
+ self.field = field
+
+ def serialize_body(self):
+ origin_len = _STRUCT_H.pack(len(self.origin))
+ return b''.join([origin_len, self.origin, self.field])
+
+ def parse_body(self, data):
+ try:
+ origin_len = _STRUCT_H.unpack(data[0:2])[0]
+ self.origin = data[2:2+origin_len].tobytes()
+
+ if len(self.origin) != origin_len:
+ raise InvalidFrameError("Invalid ALTSVC frame body.")
+
+ self.field = data[2+origin_len:].tobytes()
+ except (struct.error, ValueError):
+ raise InvalidFrameError("Invalid ALTSVC frame body.")
+
+ self.body_len = len(data)
+
+
+class ExtensionFrame(Frame):
+ """
+ ExtensionFrame is used to wrap frames which are not natively interpretable
+ by hyperframe.
+
+ Although certain byte prefixes are ordained by specification to have
+ certain contextual meanings, frames with other prefixes are not prohibited,
+ and may be used to communicate arbitrary meaning between HTTP/2 peers.
+
+ Thus, hyperframe, rather than raising an exception when such a frame is
+ encountered, wraps it in a generic frame to be properly acted upon by
+ upstream consumers which might have additional context on how to use it.
+
+ .. versionadded:: 5.0.0
+ """
+
+ stream_association = _STREAM_ASSOC_EITHER
+
+ def __init__(self, type, stream_id, **kwargs):
+ super(ExtensionFrame, self).__init__(stream_id, **kwargs)
+ self.type = type
+ self.flag_byte = None
+
+ def parse_flags(self, flag_byte):
+ """
+ For extension frames, we parse the flags by just storing a flag byte.
+ """
+ self.flag_byte = flag_byte
+
+ def parse_body(self, data):
+ self.body = data.tobytes()
+ self.body_len = len(data)
+
+ def serialize(self):
+ """
+ A broad override of the serialize method that ensures that the data
+ comes back out exactly as it came in. This should not be used in most
+ user code: it exists only as a helper method if frames need to be
+ reconstituted.
+ """
+ # Build the frame header.
+ # First, get the flags.
+ flags = self.flag_byte
+
+ header = _STRUCT_HBBBL.pack(
+ (self.body_len >> 8) & 0xFFFF, # Length spread over top 24 bits
+ self.body_len & 0xFF,
+ self.type,
+ flags,
+ self.stream_id & 0x7FFFFFFF # Stream ID is 32 bits.
+ )
+
+ return header + self.body
+
+
+_FRAME_CLASSES = [
+ DataFrame,
+ HeadersFrame,
+ PriorityFrame,
+ RstStreamFrame,
+ SettingsFrame,
+ PushPromiseFrame,
+ PingFrame,
+ GoAwayFrame,
+ WindowUpdateFrame,
+ ContinuationFrame,
+ AltSvcFrame,
+]
+#: FRAMES maps the type byte for each frame to the class used to represent that
+#: frame.
+FRAMES = {cls.type: cls for cls in _FRAME_CLASSES}
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/setup.cfg b/testing/web-platform/tests/tools/third_party/hyperframe/setup.cfg
new file mode 100644
index 0000000000..50220e135b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/setup.cfg
@@ -0,0 +1,10 @@
+[wheel]
+universal = 1
+
+[tool:pytest]
+testpaths = test
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/setup.py b/testing/web-platform/tests/tools/third_party/hyperframe/setup.py
new file mode 100644
index 0000000000..1ab7212141
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/setup.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import itertools
+import os
+import re
+import sys
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+# Get the version
+version_regex = r'__version__ = ["\']([^"\']*)["\']'
+with open('hyperframe/__init__.py', 'r') as f:
+ text = f.read()
+ match = re.search(version_regex, text)
+
+ if match:
+ version = match.group(1)
+ else:
+ raise RuntimeError("No version number found!")
+
+# Stealing this from Kenneth Reitz
+if sys.argv[-1] == 'publish':
+ os.system('python setup.py sdist upload')
+ sys.exit()
+
+
+packages = ['hyperframe']
+
+setup(
+ name='hyperframe',
+ version=version,
+ description='HTTP/2 framing layer for Python',
+ long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
+ author='Cory Benfield',
+ author_email='cory@lukasa.co.uk',
+ url='https://python-hyper.org/hyperframe/en/latest/',
+ packages=packages,
+ package_data={'': ['LICENSE', 'README.rst', 'CONTRIBUTORS.rst', 'HISTORY.rst']},
+ package_dir={'hyperframe': 'hyperframe'},
+ include_package_data=True,
+ license='MIT License',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ ],
+)
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/test/test_flags.py b/testing/web-platform/tests/tools/third_party/hyperframe/test/test_flags.py
new file mode 100644
index 0000000000..62a6a30f67
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/test/test_flags.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+from hyperframe.frame import (
+ Flags, Flag,
+)
+import pytest
+
+
+class TestFlags(object):
+ def test_add(self):
+ flags = Flags([Flag("VALID_FLAG", 0x00)])
+ assert not flags
+
+ flags.add("VALID_FLAG")
+ flags.add("VALID_FLAG")
+ assert "VALID_FLAG" in flags
+ assert list(flags) == ["VALID_FLAG"]
+ assert len(flags) == 1
+
+ def test_remove(self):
+ flags = Flags([Flag("VALID_FLAG", 0x00)])
+ flags.add("VALID_FLAG")
+
+ flags.discard("VALID_FLAG")
+ assert "VALID_FLAG" not in flags
+ assert list(flags) == []
+ assert len(flags) == 0
+
+ # discarding elements not in the set should not throw an exception
+ flags.discard("END_STREAM")
+
+ def test_validation(self):
+ flags = Flags([Flag("VALID_FLAG", 0x00)])
+ flags.add("VALID_FLAG")
+ with pytest.raises(ValueError):
+ flags.add("INVALID_FLAG")
diff --git a/testing/web-platform/tests/tools/third_party/hyperframe/test/test_frames.py b/testing/web-platform/tests/tools/third_party/hyperframe/test/test_frames.py
new file mode 100644
index 0000000000..abfecb692d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/hyperframe/test/test_frames.py
@@ -0,0 +1,791 @@
+# -*- coding: utf-8 -*-
+from hyperframe.frame import (
+ Frame, Flags, DataFrame, PriorityFrame, RstStreamFrame, SettingsFrame,
+ PushPromiseFrame, PingFrame, GoAwayFrame, WindowUpdateFrame, HeadersFrame,
+ ContinuationFrame, AltSvcFrame, ExtensionFrame
+)
+from hyperframe.exceptions import (
+ UnknownFrameError, InvalidPaddingError, InvalidFrameError
+)
+import pytest
+
+
+def decode_frame(frame_data):
+ f, length = Frame.parse_frame_header(frame_data[:9])
+ f.parse_body(memoryview(frame_data[9:9 + length]))
+ assert 9 + length == len(frame_data)
+ return f
+
+
+class TestGeneralFrameBehaviour(object):
+ def test_base_frame_ignores_flags(self):
+ f = Frame(stream_id=0)
+ flags = f.parse_flags(0xFF)
+ assert not flags
+ assert isinstance(flags, Flags)
+
+ def test_base_frame_cant_serialize(self):
+ f = Frame(stream_id=0)
+ with pytest.raises(NotImplementedError):
+ f.serialize()
+
+ def test_base_frame_cant_parse_body(self):
+ data = b''
+ f = Frame(stream_id=0)
+ with pytest.raises(NotImplementedError):
+ f.parse_body(data)
+
+ def test_parse_frame_header_unknown_type_strict(self):
+ with pytest.raises(UnknownFrameError) as excinfo:
+ Frame.parse_frame_header(
+ b'\x00\x00\x59\xFF\x00\x00\x00\x00\x01',
+ strict=True
+ )
+ exception = excinfo.value
+ assert exception.frame_type == 0xFF
+ assert exception.length == 0x59
+ assert str(exception) == (
+ "UnknownFrameError: Unknown frame type 0xFF received, "
+ "length 89 bytes"
+ )
+
+ def test_parse_frame_header_ignore_first_bit_of_stream_id(self):
+ s = b'\x00\x00\x00\x06\x01\x80\x00\x00\x00'
+ f, _ = Frame.parse_frame_header(s)
+
+ assert f.stream_id == 0
+
+ def test_parse_frame_header_unknown_type(self):
+ frame, length = Frame.parse_frame_header(
+ b'\x00\x00\x59\xFF\x00\x00\x00\x00\x01'
+ )
+ assert frame.type == 0xFF
+ assert length == 0x59
+ assert isinstance(frame, ExtensionFrame)
+ assert frame.stream_id == 1
+
+ def test_flags_are_persisted(self):
+ frame, length = Frame.parse_frame_header(
+ b'\x00\x00\x59\xFF\x09\x00\x00\x00\x01'
+ )
+ assert frame.type == 0xFF
+ assert length == 0x59
+ assert frame.flag_byte == 0x09
+
+ def test_parse_body_unknown_type(self):
+ frame = decode_frame(
+ b'\x00\x00\x0C\xFF\x00\x00\x00\x00\x01hello world!'
+ )
+ assert frame.body == b'hello world!'
+ assert frame.body_len == 12
+ assert frame.stream_id == 1
+
+ def test_can_round_trip_unknown_frames(self):
+ frame_data = b'\x00\x00\x0C\xFF\x00\x00\x00\x00\x01hello world!'
+ f = decode_frame(frame_data)
+ assert f.serialize() == frame_data
+
+ def test_repr(self, monkeypatch):
+ f = Frame(stream_id=0)
+ monkeypatch.setattr(Frame, "serialize_body", lambda _: b"body")
+ assert repr(f) == "Frame(Stream: 0; Flags: None): 626f6479"
+
+ monkeypatch.setattr(Frame, "serialize_body", lambda _: b"A"*25)
+ assert repr(f) == (
+ "Frame(Stream: 0; Flags: None): {}...".format("41"*10)
+ )
+
+ def test_cannot_parse_invalid_frame_header(self):
+ with pytest.raises(InvalidFrameError):
+ Frame.parse_frame_header(b'\x00\x00\x08\x00\x01\x00\x00\x00')
+
+
+class TestDataFrame(object):
+ payload = b'\x00\x00\x08\x00\x01\x00\x00\x00\x01testdata'
+ payload_with_padding = (
+ b'\x00\x00\x13\x00\x09\x00\x00\x00\x01\x0Atestdata' + b'\0' * 10
+ )
+
+ def test_data_frame_has_correct_flags(self):
+ f = DataFrame(1)
+ flags = f.parse_flags(0xFF)
+ assert flags == set([
+ 'END_STREAM', 'PADDED'
+ ])
+
+ @pytest.mark.parametrize('data', [
+ b'testdata',
+ memoryview(b'testdata')
+ ])
+ def test_data_frame_serializes_properly(self, data):
+ f = DataFrame(1)
+ f.flags = set(['END_STREAM'])
+ f.data = data
+
+ s = f.serialize()
+ assert s == self.payload
+
+ def test_data_frame_with_padding_serializes_properly(self):
+ f = DataFrame(1)
+ f.flags = set(['END_STREAM', 'PADDED'])
+ f.data = b'testdata'
+ f.pad_length = 10
+
+ s = f.serialize()
+ assert s == self.payload_with_padding
+
+ def test_data_frame_parses_properly(self):
+ f = decode_frame(self.payload)
+
+ assert isinstance(f, DataFrame)
+ assert f.flags == set(['END_STREAM'])
+ assert f.pad_length == 0
+ assert f.data == b'testdata'
+ assert f.body_len == 8
+
+ def test_data_frame_with_padding_parses_properly(self):
+ f = decode_frame(self.payload_with_padding)
+
+ assert isinstance(f, DataFrame)
+ assert f.flags == set(['END_STREAM', 'PADDED'])
+ assert f.pad_length == 10
+ assert f.data == b'testdata'
+ assert f.body_len == 19
+
+ def test_data_frame_with_invalid_padding_errors(self):
+ with pytest.raises(InvalidFrameError):
+ decode_frame(self.payload_with_padding[:9])
+
+ def test_data_frame_with_padding_calculates_flow_control_len(self):
+ f = DataFrame(1)
+ f.flags = set(['PADDED'])
+ f.data = b'testdata'
+ f.pad_length = 10
+
+ assert f.flow_controlled_length == 19
+
+ def test_data_frame_zero_length_padding_calculates_flow_control_len(self):
+ f = DataFrame(1)
+ f.flags = set(['PADDED'])
+ f.data = b'testdata'
+ f.pad_length = 0
+
+ assert f.flow_controlled_length == len(b'testdata') + 1
+
+ def test_data_frame_without_padding_calculates_flow_control_len(self):
+ f = DataFrame(1)
+ f.data = b'testdata'
+
+ assert f.flow_controlled_length == 8
+
+ def test_data_frame_comes_on_a_stream(self):
+ with pytest.raises(ValueError):
+ DataFrame(0)
+
+ def test_long_data_frame(self):
+ f = DataFrame(1)
+
+ # Use more than 256 bytes of data to force setting higher bits.
+ f.data = b'\x01' * 300
+ data = f.serialize()
+
+ # The top three bytes should be numerically equal to 300. That means
+ # they should read 00 01 2C.
+ # The weird double index trick is to ensure this test behaves equally
+ # on Python 2 and Python 3.
+ assert data[0] == b'\x00'[0]
+ assert data[1] == b'\x01'[0]
+ assert data[2] == b'\x2C'[0]
+
+ def test_body_length_behaves_correctly(self):
+ f = DataFrame(1)
+
+ f.data = b'\x01' * 300
+
+ # Initially the body length is zero. For now this is incidental, but
+ # I'm going to test it to ensure that the behaviour is codified. We
+ # should change this test if we change that.
+ assert f.body_len == 0
+
+ f.serialize()
+ assert f.body_len == 300
+
+ def test_data_frame_with_invalid_padding_fails_to_parse(self):
+ # This frame has a padding length of 6 bytes, but a total length of
+ # only 5.
+ data = b'\x00\x00\x05\x00\x0b\x00\x00\x00\x01\x06\x54\x65\x73\x74'
+
+ with pytest.raises(InvalidPaddingError):
+ decode_frame(data)
+
+ def test_data_frame_with_no_length_parses(self):
+ # Fixes issue with empty data frames raising InvalidPaddingError.
+ f = DataFrame(1)
+ f.data = b''
+ data = f.serialize()
+
+ new_frame = decode_frame(data)
+ assert new_frame.data == b''
+
+
+class TestPriorityFrame(object):
+ payload = b'\x00\x00\x05\x02\x00\x00\x00\x00\x01\x80\x00\x00\x04\x40'
+
+ def test_priority_frame_has_no_flags(self):
+ f = PriorityFrame(1)
+ flags = f.parse_flags(0xFF)
+ assert flags == set()
+ assert isinstance(flags, Flags)
+
+ def test_priority_frame_default_serializes_properly(self):
+ f = PriorityFrame(1)
+
+ assert f.serialize() == (
+ b'\x00\x00\x05\x02\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00'
+ )
+
+ def test_priority_frame_with_all_data_serializes_properly(self):
+ f = PriorityFrame(1)
+ f.depends_on = 0x04
+ f.stream_weight = 64
+ f.exclusive = True
+
+ assert f.serialize() == self.payload
+
+ def test_priority_frame_with_all_data_parses_properly(self):
+ f = decode_frame(self.payload)
+
+ assert isinstance(f, PriorityFrame)
+ assert f.flags == set()
+ assert f.depends_on == 4
+ assert f.stream_weight == 64
+ assert f.exclusive is True
+ assert f.body_len == 5
+
+ def test_priority_frame_comes_on_a_stream(self):
+ with pytest.raises(ValueError):
+ PriorityFrame(0)
+
+ def test_short_priority_frame_errors(self):
+ with pytest.raises(InvalidFrameError):
+ decode_frame(self.payload[:-2])
+
+
+class TestRstStreamFrame(object):
+ def test_rst_stream_frame_has_no_flags(self):
+ f = RstStreamFrame(1)
+ flags = f.parse_flags(0xFF)
+ assert not flags
+ assert isinstance(flags, Flags)
+
+ def test_rst_stream_frame_serializes_properly(self):
+ f = RstStreamFrame(1)
+ f.error_code = 420
+
+ s = f.serialize()
+ assert s == b'\x00\x00\x04\x03\x00\x00\x00\x00\x01\x00\x00\x01\xa4'
+
+ def test_rst_stream_frame_parses_properly(self):
+ s = b'\x00\x00\x04\x03\x00\x00\x00\x00\x01\x00\x00\x01\xa4'
+ f = decode_frame(s)
+
+ assert isinstance(f, RstStreamFrame)
+ assert f.flags == set()
+ assert f.error_code == 420
+ assert f.body_len == 4
+
+ def test_rst_stream_frame_comes_on_a_stream(self):
+ with pytest.raises(ValueError):
+ RstStreamFrame(0)
+
+ def test_rst_stream_frame_must_have_body_length_four(self):
+ f = RstStreamFrame(1)
+ with pytest.raises(ValueError):
+ f.parse_body(b'\x01')
+
+
+class TestSettingsFrame(object):
+ serialized = (
+ b'\x00\x00\x2A\x04\x01\x00\x00\x00\x00' + # Frame header
+ b'\x00\x01\x00\x00\x10\x00' + # HEADER_TABLE_SIZE
+ b'\x00\x02\x00\x00\x00\x00' + # ENABLE_PUSH
+ b'\x00\x03\x00\x00\x00\x64' + # MAX_CONCURRENT_STREAMS
+ b'\x00\x04\x00\x00\xFF\xFF' + # INITIAL_WINDOW_SIZE
+ b'\x00\x05\x00\x00\x40\x00' + # MAX_FRAME_SIZE
+ b'\x00\x06\x00\x00\xFF\xFF' + # MAX_HEADER_LIST_SIZE
+ b'\x00\x08\x00\x00\x00\x01' # ENABLE_CONNECT_PROTOCOL
+ )
+
+ settings = {
+ SettingsFrame.HEADER_TABLE_SIZE: 4096,
+ SettingsFrame.ENABLE_PUSH: 0,
+ SettingsFrame.MAX_CONCURRENT_STREAMS: 100,
+ SettingsFrame.INITIAL_WINDOW_SIZE: 65535,
+ SettingsFrame.MAX_FRAME_SIZE: 16384,
+ SettingsFrame.MAX_HEADER_LIST_SIZE: 65535,
+ SettingsFrame.ENABLE_CONNECT_PROTOCOL: 1,
+ }
+
+ def test_settings_frame_has_only_one_flag(self):
+ f = SettingsFrame()
+ flags = f.parse_flags(0xFF)
+ assert flags == set(['ACK'])
+
+ def test_settings_frame_serializes_properly(self):
+ f = SettingsFrame()
+ f.parse_flags(0xFF)
+ f.settings = self.settings
+
+ s = f.serialize()
+ assert s == self.serialized
+
+ def test_settings_frame_with_settings(self):
+ f = SettingsFrame(settings=self.settings)
+ assert f.settings == self.settings
+
+ def test_settings_frame_without_settings(self):
+ f = SettingsFrame()
+ assert f.settings == {}
+
+ def test_settings_frame_with_ack(self):
+ f = SettingsFrame(flags=('ACK',))
+ assert 'ACK' in f.flags
+
+ def test_settings_frame_ack_and_settings(self):
+ with pytest.raises(ValueError):
+ SettingsFrame(settings=self.settings, flags=('ACK',))
+
+ def test_settings_frame_parses_properly(self):
+ f = decode_frame(self.serialized)
+
+ assert isinstance(f, SettingsFrame)
+ assert f.flags == set(['ACK'])
+ assert f.settings == self.settings
+ assert f.body_len == 42
+
+ def test_settings_frames_never_have_streams(self):
+ with pytest.raises(ValueError):
+ SettingsFrame(stream_id=1)
+
+ def test_short_settings_frame_errors(self):
+ with pytest.raises(InvalidFrameError):
+ decode_frame(self.serialized[:-2])
+
+
+class TestPushPromiseFrame(object):
+ def test_push_promise_frame_flags(self):
+ f = PushPromiseFrame(1)
+ flags = f.parse_flags(0xFF)
+
+ assert flags == set(['END_HEADERS', 'PADDED'])
+
+ def test_push_promise_frame_serializes_properly(self):
+ f = PushPromiseFrame(1)
+ f.flags = set(['END_HEADERS'])
+ f.promised_stream_id = 4
+ f.data = b'hello world'
+
+ s = f.serialize()
+ assert s == (
+ b'\x00\x00\x0F\x05\x04\x00\x00\x00\x01' +
+ b'\x00\x00\x00\x04' +
+ b'hello world'
+ )
+
+ def test_push_promise_frame_parses_properly(self):
+ s = (
+ b'\x00\x00\x0F\x05\x04\x00\x00\x00\x01' +
+ b'\x00\x00\x00\x04' +
+ b'hello world'
+ )
+ f = decode_frame(s)
+
+ assert isinstance(f, PushPromiseFrame)
+ assert f.flags == set(['END_HEADERS'])
+ assert f.promised_stream_id == 4
+ assert f.data == b'hello world'
+ assert f.body_len == 15
+
+ def test_push_promise_frame_with_invalid_padding_fails_to_parse(self):
+ # This frame has a padding length of 6 bytes, but a total length of
+ # only 5.
+ data = b'\x00\x00\x05\x05\x08\x00\x00\x00\x01\x06\x54\x65\x73\x74'
+
+ with pytest.raises(InvalidPaddingError):
+ decode_frame(data)
+
+ def test_push_promise_frame_with_no_length_parses(self):
+ # Fixes issue with empty data frames raising InvalidPaddingError.
+ f = PushPromiseFrame(1)
+ f.data = b''
+ data = f.serialize()
+
+ new_frame = decode_frame(data)
+ assert new_frame.data == b''
+
+ def test_short_push_promise_errors(self):
+ s = (
+ b'\x00\x00\x0F\x05\x04\x00\x00\x00\x01' +
+ b'\x00\x00\x00' # One byte short
+ )
+
+ with pytest.raises(InvalidFrameError):
+ decode_frame(s)
+
+
+class TestPingFrame(object):
+ def test_ping_frame_has_only_one_flag(self):
+ f = PingFrame()
+ flags = f.parse_flags(0xFF)
+
+ assert flags == set(['ACK'])
+
+ def test_ping_frame_serializes_properly(self):
+ f = PingFrame()
+ f.parse_flags(0xFF)
+ f.opaque_data = b'\x01\x02'
+
+ s = f.serialize()
+ assert s == (
+ b'\x00\x00\x08\x06\x01\x00\x00\x00\x00\x01\x02\x00\x00\x00\x00\x00'
+ b'\x00'
+ )
+
+ def test_no_more_than_8_octets(self):
+ f = PingFrame()
+ f.opaque_data = b'\x01\x02\x03\x04\x05\x06\x07\x08\x09'
+
+ with pytest.raises(ValueError):
+ f.serialize()
+
+ def test_ping_frame_parses_properly(self):
+ s = (
+ b'\x00\x00\x08\x06\x01\x00\x00\x00\x00\x01\x02\x00\x00\x00\x00\x00'
+ b'\x00'
+ )
+ f = decode_frame(s)
+
+ assert isinstance(f, PingFrame)
+ assert f.flags == set(['ACK'])
+ assert f.opaque_data == b'\x01\x02\x00\x00\x00\x00\x00\x00'
+ assert f.body_len == 8
+
+ def test_ping_frame_never_has_a_stream(self):
+ with pytest.raises(ValueError):
+ PingFrame(stream_id=1)
+
+ def test_ping_frame_has_no_more_than_body_length_8(self):
+ f = PingFrame()
+ with pytest.raises(ValueError):
+ f.parse_body(b'\x01\x02\x03\x04\x05\x06\x07\x08\x09')
+
+ def test_ping_frame_has_no_less_than_body_length_8(self):
+ f = PingFrame()
+ with pytest.raises(ValueError):
+ f.parse_body(b'\x01\x02\x03\x04\x05\x06\x07')
+
+
+class TestGoAwayFrame(object):
+ def test_go_away_has_no_flags(self):
+ f = GoAwayFrame()
+ flags = f.parse_flags(0xFF)
+
+ assert not flags
+ assert isinstance(flags, Flags)
+
+ def test_goaway_serializes_properly(self):
+ f = GoAwayFrame()
+ f.last_stream_id = 64
+ f.error_code = 32
+ f.additional_data = b'hello'
+
+ s = f.serialize()
+ assert s == (
+ b'\x00\x00\x0D\x07\x00\x00\x00\x00\x00' + # Frame header
+ b'\x00\x00\x00\x40' + # Last Stream ID
+ b'\x00\x00\x00\x20' + # Error Code
+ b'hello' # Additional data
+ )
+
+ def test_goaway_frame_parses_properly(self):
+ s = (
+ b'\x00\x00\x0D\x07\x00\x00\x00\x00\x00' + # Frame header
+ b'\x00\x00\x00\x40' + # Last Stream ID
+ b'\x00\x00\x00\x20' + # Error Code
+ b'hello' # Additional data
+ )
+ f = decode_frame(s)
+
+ assert isinstance(f, GoAwayFrame)
+ assert f.flags == set()
+ assert f.additional_data == b'hello'
+ assert f.body_len == 13
+
+ s = (
+ b'\x00\x00\x08\x07\x00\x00\x00\x00\x00' + # Frame header
+ b'\x00\x00\x00\x40' + # Last Stream ID
+ b'\x00\x00\x00\x20' + # Error Code
+ b'' # Additional data
+ )
+ f = decode_frame(s)
+
+ assert isinstance(f, GoAwayFrame)
+ assert f.flags == set()
+ assert f.additional_data == b''
+ assert f.body_len == 8
+
+ def test_goaway_frame_never_has_a_stream(self):
+ with pytest.raises(ValueError):
+ GoAwayFrame(stream_id=1)
+
+ def test_short_goaway_frame_errors(self):
+ s = (
+ b'\x00\x00\x0D\x07\x00\x00\x00\x00\x00' + # Frame header
+ b'\x00\x00\x00\x40' + # Last Stream ID
+ b'\x00\x00\x00' # short Error Code
+ )
+ with pytest.raises(InvalidFrameError):
+ decode_frame(s)
+
+
+class TestWindowUpdateFrame(object):
+ def test_window_update_has_no_flags(self):
+ f = WindowUpdateFrame(0)
+ flags = f.parse_flags(0xFF)
+
+ assert not flags
+ assert isinstance(flags, Flags)
+
+ def test_window_update_serializes_properly(self):
+ f = WindowUpdateFrame(0)
+ f.window_increment = 512
+
+ s = f.serialize()
+ assert s == b'\x00\x00\x04\x08\x00\x00\x00\x00\x00\x00\x00\x02\x00'
+
+ def test_windowupdate_frame_parses_properly(self):
+ s = b'\x00\x00\x04\x08\x00\x00\x00\x00\x00\x00\x00\x02\x00'
+ f = decode_frame(s)
+
+ assert isinstance(f, WindowUpdateFrame)
+ assert f.flags == set()
+ assert f.window_increment == 512
+ assert f.body_len == 4
+
+ def test_short_windowupdate_frame_errors(self):
+ s = b'\x00\x00\x04\x08\x00\x00\x00\x00\x00\x00\x00\x02' # -1 byte
+
+ with pytest.raises(InvalidFrameError):
+ decode_frame(s)
+
+
+class TestHeadersFrame(object):
+ def test_headers_frame_flags(self):
+ f = HeadersFrame(1)
+ flags = f.parse_flags(0xFF)
+
+ assert flags == set(['END_STREAM', 'END_HEADERS',
+ 'PADDED', 'PRIORITY'])
+
+ def test_headers_frame_serializes_properly(self):
+ f = HeadersFrame(1)
+ f.flags = set(['END_STREAM', 'END_HEADERS'])
+ f.data = b'hello world'
+
+ s = f.serialize()
+ assert s == (
+ b'\x00\x00\x0B\x01\x05\x00\x00\x00\x01' +
+ b'hello world'
+ )
+
+ def test_headers_frame_parses_properly(self):
+ s = (
+ b'\x00\x00\x0B\x01\x05\x00\x00\x00\x01' +
+ b'hello world'
+ )
+ f = decode_frame(s)
+
+ assert isinstance(f, HeadersFrame)
+ assert f.flags == set(['END_STREAM', 'END_HEADERS'])
+ assert f.data == b'hello world'
+ assert f.body_len == 11
+
+ def test_headers_frame_with_priority_parses_properly(self):
+ # This test also tests that we can receive a HEADERS frame with no
+ # actual headers on it. This is technically possible.
+ s = (
+ b'\x00\x00\x05\x01\x20\x00\x00\x00\x01' +
+ b'\x80\x00\x00\x04\x40'
+ )
+ f = decode_frame(s)
+
+ assert isinstance(f, HeadersFrame)
+ assert f.flags == set(['PRIORITY'])
+ assert f.data == b''
+ assert f.depends_on == 4
+ assert f.stream_weight == 64
+ assert f.exclusive is True
+ assert f.body_len == 5
+
+ def test_headers_frame_with_priority_serializes_properly(self):
+ # This test also tests that we can receive a HEADERS frame with no
+ # actual headers on it. This is technically possible.
+ s = (
+ b'\x00\x00\x05\x01\x20\x00\x00\x00\x01' +
+ b'\x80\x00\x00\x04\x40'
+ )
+ f = HeadersFrame(1)
+ f.flags = set(['PRIORITY'])
+ f.data = b''
+ f.depends_on = 4
+ f.stream_weight = 64
+ f.exclusive = True
+
+ assert f.serialize() == s
+
+ def test_headers_frame_with_invalid_padding_fails_to_parse(self):
+ # This frame has a padding length of 6 bytes, but a total length of
+ # only 5.
+ data = b'\x00\x00\x05\x01\x08\x00\x00\x00\x01\x06\x54\x65\x73\x74'
+
+ with pytest.raises(InvalidPaddingError):
+ decode_frame(data)
+
+ def test_headers_frame_with_no_length_parses(self):
+ # Fixes issue with empty data frames raising InvalidPaddingError.
+ f = HeadersFrame(1)
+ f.data = b''
+ data = f.serialize()
+
+ new_frame = decode_frame(data)
+ assert new_frame.data == b''
+
+
+class TestContinuationFrame(object):
+ def test_continuation_frame_flags(self):
+ f = ContinuationFrame(1)
+ flags = f.parse_flags(0xFF)
+
+ assert flags == set(['END_HEADERS'])
+
+ def test_continuation_frame_serializes(self):
+ f = ContinuationFrame(1)
+ f.parse_flags(0x04)
+ f.data = b'hello world'
+
+ s = f.serialize()
+ assert s == (
+ b'\x00\x00\x0B\x09\x04\x00\x00\x00\x01' +
+ b'hello world'
+ )
+
+ def test_continuation_frame_parses_properly(self):
+ s = b'\x00\x00\x0B\x09\x04\x00\x00\x00\x01hello world'
+ f = decode_frame(s)
+
+ assert isinstance(f, ContinuationFrame)
+ assert f.flags == set(['END_HEADERS'])
+ assert f.data == b'hello world'
+ assert f.body_len == 11
+
+
+class TestAltSvcFrame(object):
+ payload_with_origin = (
+ b'\x00\x00\x31' # Length
+ b'\x0A' # Type
+ b'\x00' # Flags
+ b'\x00\x00\x00\x00' # Stream ID
+ b'\x00\x0B' # Origin len
+ b'example.com' # Origin
+ b'h2="alt.example.com:8000", h2=":443"' # Field Value
+ )
+ payload_without_origin = (
+ b'\x00\x00\x13' # Length
+ b'\x0A' # Type
+ b'\x00' # Flags
+ b'\x00\x00\x00\x01' # Stream ID
+ b'\x00\x00' # Origin len
+ b'' # Origin
+ b'h2=":8000"; ma=60' # Field Value
+ )
+ payload_with_origin_and_stream = (
+ b'\x00\x00\x36' # Length
+ b'\x0A' # Type
+ b'\x00' # Flags
+ b'\x00\x00\x00\x01' # Stream ID
+ b'\x00\x0B' # Origin len
+ b'example.com' # Origin
+ b'Alt-Svc: h2=":443"; ma=2592000; persist=1' # Field Value
+ )
+
+ def test_altsvc_frame_flags(self):
+ f = AltSvcFrame(stream_id=0)
+ flags = f.parse_flags(0xFF)
+
+ assert flags == set()
+
+ def test_altsvc_frame_with_origin_serializes_properly(self):
+ f = AltSvcFrame(stream_id=0)
+ f.origin = b'example.com'
+ f.field = b'h2="alt.example.com:8000", h2=":443"'
+
+ s = f.serialize()
+ assert s == self.payload_with_origin
+
+ def test_altsvc_frame_with_origin_parses_properly(self):
+ f = decode_frame(self.payload_with_origin)
+
+ assert isinstance(f, AltSvcFrame)
+ assert f.origin == b'example.com'
+ assert f.field == b'h2="alt.example.com:8000", h2=":443"'
+ assert f.body_len == 49
+ assert f.stream_id == 0
+
+ def test_altsvc_frame_without_origin_serializes_properly(self):
+ f = AltSvcFrame(stream_id=1, origin=b'', field=b'h2=":8000"; ma=60')
+ s = f.serialize()
+ assert s == self.payload_without_origin
+
+ def test_altsvc_frame_without_origin_parses_properly(self):
+ f = decode_frame(self.payload_without_origin)
+
+ assert isinstance(f, AltSvcFrame)
+ assert f.origin == b''
+ assert f.field == b'h2=":8000"; ma=60'
+ assert f.body_len == 19
+ assert f.stream_id == 1
+
+ def test_altsvc_frame_without_origin_parses_with_good_repr(self):
+ f = decode_frame(self.payload_without_origin)
+
+ assert repr(f) == (
+ "AltSvcFrame(Stream: 1; Flags: None): 000068323d223a383030..."
+ )
+
+ def test_altsvc_frame_with_origin_and_stream_serializes_properly(self):
+ # This frame is not valid, but we allow it to be serialized anyway.
+ f = AltSvcFrame(stream_id=1)
+ f.origin = b'example.com'
+ f.field = b'Alt-Svc: h2=":443"; ma=2592000; persist=1'
+
+ assert f.serialize() == self.payload_with_origin_and_stream
+
+ def test_short_altsvc_frame_errors(self):
+ with pytest.raises(InvalidFrameError):
+ decode_frame(self.payload_with_origin[:12])
+
+ with pytest.raises(InvalidFrameError):
+ decode_frame(self.payload_with_origin[:10])
+
+ def test_altsvc_with_unicode_origin_fails(self):
+ with pytest.raises(ValueError):
+ AltSvcFrame(
+ stream_id=0, origin=u'hello', field=b'h2=":8000"; ma=60'
+
+ )
+
+ def test_altsvc_with_unicode_field_fails(self):
+ with pytest.raises(ValueError):
+ AltSvcFrame(
+ stream_id=0, origin=b'hello', field=u'h2=":8000"; ma=60'
+ )
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/.github/workflows/main.yml b/testing/web-platform/tests/tools/third_party/importlib_metadata/.github/workflows/main.yml
new file mode 100644
index 0000000000..b56320fd00
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/.github/workflows/main.yml
@@ -0,0 +1,126 @@
+name: Automated Tests
+
+on: [push, pull_request]
+
+jobs:
+ test:
+ strategy:
+ matrix:
+ python: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
+ platform: [ubuntu-latest, macos-latest, windows-latest]
+ runs-on: ${{ matrix.platform }}
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python }}
+ - name: Install tox
+ run: |
+ python -m pip install tox
+ - name: Run tests
+ run: tox
+ env:
+ TOXENV: python
+
+ qa:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ - name: Install tox
+ run: |
+ python -m pip install tox
+ - name: Run checks
+ run: tox
+ env:
+ TOXENV: qa
+
+ coverage:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ - name: Install tox
+ run: |
+ python -m pip install tox
+ - name: Evaluate coverage
+ run: tox
+ env:
+ TOXENV: cov
+
+ benchmark:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ - name: Install tox
+ run: |
+ python -m pip install tox
+ - name: Run benchmarks
+ run: tox
+ env:
+ TOXENV: perf
+
+ diffcov:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ - name: Install tox
+ run: |
+ python -m pip install tox
+ - name: Evaluate coverage
+ run: tox
+ env:
+ TOXENV: diffcov
+
+ docs:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ - name: Install tox
+ run: |
+ python -m pip install tox
+ - name: Build docs
+ run: tox
+ env:
+ TOXENV: docs
+
+ release:
+ needs: test
+ if: github.event_name == 'push' && contains(github.ref, 'refs/tags/')
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ - name: Install tox
+ run: |
+ python -m pip install tox
+ - name: Release
+ run: tox -e release
+ env:
+ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/.gitignore b/testing/web-platform/tests/tools/third_party/importlib_metadata/.gitignore
new file mode 100644
index 0000000000..ae864d6125
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/.gitignore
@@ -0,0 +1,13 @@
+build
+/coverage.xml
+/diffcov.html
+htmlcov
+importlib_metadata.egg-info
+.mypy_cache
+/.coverage
+/.DS_Store
+artifacts
+.eggs
+.doctrees
+dist
+pip-wheel-metadata
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/.readthedocs.yml b/testing/web-platform/tests/tools/third_party/importlib_metadata/.readthedocs.yml
new file mode 100644
index 0000000000..8ae4468428
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/.readthedocs.yml
@@ -0,0 +1,5 @@
+python:
+ version: 3
+ extra_requirements:
+ - docs
+ pip_install: true
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/LICENSE b/testing/web-platform/tests/tools/third_party/importlib_metadata/LICENSE
new file mode 100644
index 0000000000..be7e092b0b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2017-2019 Jason R. Coombs, Barry Warsaw
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/MANIFEST.in b/testing/web-platform/tests/tools/third_party/importlib_metadata/MANIFEST.in
new file mode 100644
index 0000000000..3fcf6d633a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/MANIFEST.in
@@ -0,0 +1,5 @@
+include *.py MANIFEST.in LICENSE README.rst
+global-include *.txt *.rst *.ini *.cfg *.toml *.whl *.egg
+exclude .gitignore
+prune build
+prune .tox
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/README.rst b/testing/web-platform/tests/tools/third_party/importlib_metadata/README.rst
new file mode 100644
index 0000000000..5655d9ab98
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/README.rst
@@ -0,0 +1,42 @@
+=========================
+ ``importlib_metadata``
+=========================
+
+``importlib_metadata`` is a library to access the metadata for a
+Python package.
+
+As of Python 3.8, this functionality has been added to the
+`Python standard library
+<https://docs.python.org/3/library/importlib.metadata.html>`_.
+This package supplies backports of that functionality including
+improvements added to subsequent Python versions.
+
+
+Usage
+=====
+
+See the `online documentation <https://importlib_metadata.readthedocs.io/>`_
+for usage details.
+
+`Finder authors
+<https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can
+also add support for custom package installers. See the above documentation
+for details.
+
+
+Caveats
+=======
+
+This project primarily supports third-party packages installed by PyPA
+tools (or other conforming packages). It does not support:
+
+- Packages in the stdlib.
+- Packages installed without metadata.
+
+Project details
+===============
+
+ * Project home: https://github.com/python/importlib_metadata
+ * Report bugs at: https://github.com/python/importlib_metadata/issues
+ * Code hosting: https://github.com/python/importlib_metadata
+ * Documentation: https://importlib_metadata.readthedocs.io/
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/codecov.yml b/testing/web-platform/tests/tools/third_party/importlib_metadata/codecov.yml
new file mode 100644
index 0000000000..66c7f4bd19
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/codecov.yml
@@ -0,0 +1,2 @@
+codecov:
+ token: 5eb1bc45-1b7f-43e6-8bc1-f2b02833dba9
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/coverage.ini b/testing/web-platform/tests/tools/third_party/importlib_metadata/coverage.ini
new file mode 100644
index 0000000000..b4d3102f42
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/coverage.ini
@@ -0,0 +1,24 @@
+[run]
+branch = true
+parallel = true
+omit =
+ setup*
+ .tox/*/lib/python*
+ */tests/*.py
+ */testing/*.py
+ /usr/local/*
+ */mod.py
+plugins =
+ coverplug
+
+[report]
+exclude_lines =
+ pragma: nocover
+ raise NotImplementedError
+ raise AssertionError
+ assert\s
+ nocoverpy${PYV}
+
+[paths]
+source =
+ importlib_metadata
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/coverplug.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/coverplug.py
new file mode 100644
index 0000000000..0b0c7cb549
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/coverplug.py
@@ -0,0 +1,21 @@
+"""Coverage plugin to add exclude lines based on the Python version."""
+
+import sys
+
+from coverage import CoveragePlugin
+
+
+class MyConfigPlugin(CoveragePlugin):
+ def configure(self, config):
+ opt_name = 'report:exclude_lines'
+ exclude_lines = config.get_option(opt_name)
+ # Python >= 3.6 has os.PathLike.
+ if sys.version_info >= (3, 6):
+ exclude_lines.append('pragma: >=36')
+ else:
+ exclude_lines.append('pragma: <=35')
+ config.set_option(opt_name, exclude_lines)
+
+
+def coverage_init(reg, options):
+ reg.add_configurer(MyConfigPlugin())
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/__init__.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/changelog.rst b/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/changelog.rst
new file mode 100644
index 0000000000..396535744a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/changelog.rst
@@ -0,0 +1,314 @@
+=========================
+ importlib_metadata NEWS
+=========================
+
+v2.1.0
+======
+
+* #253: When querying for package metadata, the lookup
+ now honors
+ `package normalization rules <https://packaging.python.org/specifications/recording-installed-packages/>`_.
+
+v2.0.0
+======
+
+* ``importlib_metadata`` no longer presents a
+ ``__version__`` attribute. Consumers wishing to
+ resolve the version of the package should query it
+ directly with
+ ``importlib_metadata.version('importlib-metadata')``.
+ Closes #71.
+
+v1.7.0
+======
+
+* ``PathNotFoundError`` now has a custom ``__str__``
+ mentioning "package metadata" being missing to help
+ guide users to the cause when the package is installed
+ but no metadata is present. Closes #124.
+
+v1.6.1
+======
+
+* Added ``Distribution._local()`` as a provisional
+ demonstration of how to load metadata for a local
+ package. Implicitly requires that
+ `pep517 <https://pypi.org/project/pep517>`_ is
+ installed. Ref #42.
+* Ensure inputs to FastPath are Unicode. Closes #121.
+* Tests now rely on ``importlib.resources.files`` (and
+ backport) instead of the older ``path`` function.
+* Support any iterable from ``find_distributions``.
+ Closes #122.
+
+v1.6.0
+======
+
+* Added ``module`` and ``attr`` attributes to ``EntryPoint``
+
+v1.5.2
+======
+
+* Fix redundant entries from ``FastPath.zip_children``.
+ Closes #117.
+
+v1.5.1
+======
+
+* Improve reliability and consistency of compatibility
+ imports for contextlib and pathlib when running tests.
+ Closes #116.
+
+v1.5.0
+======
+
+* Additional performance optimizations in FastPath now
+ saves an additional 20% on a typical call.
+* Correct for issue where PyOxidizer finder has no
+ ``__module__`` attribute. Closes #110.
+
+v1.4.0
+======
+
+* Through careful optimization, ``distribution()`` is
+ 3-4x faster. Thanks to Antony Lee for the
+ contribution. Closes #95.
+
+* When searching through ``sys.path``, if any error
+ occurs attempting to list a path entry, that entry
+ is skipped, making the system much more lenient
+ to errors. Closes #94.
+
+v1.3.0
+======
+
+* Improve custom finders documentation. Closes #105.
+
+v1.2.0
+======
+
+* Once again, drop support for Python 3.4. Ref #104.
+
+v1.1.3
+======
+
+* Restored support for Python 3.4 due to improper version
+ compatibility declarations in the v1.1.0 and v1.1.1
+ releases. Closes #104.
+
+v1.1.2
+======
+
+* Repaired project metadata to correctly declare the
+ ``python_requires`` directive. Closes #103.
+
+v1.1.1
+======
+
+* Fixed ``repr(EntryPoint)`` on PyPy 3 also. Closes #102.
+
+v1.1.0
+======
+
+* Dropped support for Python 3.4.
+* EntryPoints are now pickleable. Closes #96.
+* Fixed ``repr(EntryPoint)`` on PyPy 2. Closes #97.
+
+v1.0.0
+======
+
+* Project adopts semver for versioning.
+
+* Removed compatibility shim introduced in 0.23.
+
+* For better compatibility with the stdlib implementation and to
+ avoid the same distributions being discovered by the stdlib and
+ backport implementations, the backport now disables the
+ stdlib DistributionFinder during initialization (import time).
+ Closes #91 and closes #100.
+
+0.23
+====
+* Added a compatibility shim to prevent failures on beta releases
+ of Python before the signature changed to accept the
+ "context" parameter on find_distributions. This workaround
+ will have a limited lifespan, not to extend beyond release of
+ Python 3.8 final.
+
+0.22
+====
+* Renamed ``package`` parameter to ``distribution_name``
+ as `recommended <https://bugs.python.org/issue34632#msg349423>`_
+ in the following functions: ``distribution``, ``metadata``,
+ ``version``, ``files``, and ``requires``. This
+ backward-incompatible change is expected to have little impact
+ as these functions are assumed to be primarily used with
+ positional parameters.
+
+0.21
+====
+* ``importlib.metadata`` now exposes the ``DistributionFinder``
+ metaclass and references it in the docs for extending the
+ search algorithm.
+* Add ``Distribution.at`` for constructing a Distribution object
+ from a known metadata directory on the file system. Closes #80.
+* Distribution finders now receive a context object that
+ supplies ``.path`` and ``.name`` properties. This change
+ introduces a fundamental backward incompatibility for
+ any projects implementing a ``find_distributions`` method
+ on a ``MetaPathFinder``. This new layer of abstraction
+ allows this context to be supplied directly or constructed
+ on demand and opens the opportunity for a
+ ``find_distributions`` method to solicit additional
+ context from the caller. Closes #85.
+
+0.20
+====
+* Clarify in the docs that calls to ``.files`` could return
+ ``None`` when the metadata is not present. Closes #69.
+* Return all requirements and not just the first for dist-info
+ packages. Closes #67.
+
+0.19
+====
+* Restrain over-eager egg metadata resolution.
+* Add support for entry points with colons in the name. Closes #75.
+
+0.18
+====
+* Parse entry points case sensitively. Closes #68
+* Add a version constraint on the backport configparser package. Closes #66
+
+0.17
+====
+* Fix a permission problem in the tests on Windows.
+
+0.16
+====
+* Don't crash if there exists an EGG-INFO directory on sys.path.
+
+0.15
+====
+* Fix documentation.
+
+0.14
+====
+* Removed ``local_distribution`` function from the API.
+ **This backward-incompatible change removes this
+ behavior summarily**. Projects should remove their
+ reliance on this behavior. A replacement behavior is
+ under review in the `pep517 project
+ <https://github.com/pypa/pep517>`_. Closes #42.
+
+0.13
+====
+* Update docstrings to match PEP 8. Closes #63.
+* Merged modules into one module. Closes #62.
+
+0.12
+====
+* Add support for eggs. !65; Closes #19.
+
+0.11
+====
+* Support generic zip files (not just wheels). Closes #59
+* Support zip files with multiple distributions in them. Closes #60
+* Fully expose the public API in ``importlib_metadata.__all__``.
+
+0.10
+====
+* The ``Distribution`` ABC is now officially part of the public API.
+ Closes #37.
+* Fixed support for older single file egg-info formats. Closes #43.
+* Fixed a testing bug when ``$CWD`` has spaces in the path. Closes #50.
+* Add Python 3.8 to the ``tox`` testing matrix.
+
+0.9
+===
+* Fixed issue where entry points without an attribute would raise an
+ Exception. Closes #40.
+* Removed unused ``name`` parameter from ``entry_points()``. Closes #44.
+* ``DistributionFinder`` classes must now be instantiated before
+ being placed on ``sys.meta_path``.
+
+0.8
+===
+* This library can now discover/enumerate all installed packages. **This
+ backward-incompatible change alters the protocol finders must
+ implement to support distribution package discovery.** Closes #24.
+* The signature of ``find_distributions()`` on custom installer finders
+ should now accept two parameters, ``name`` and ``path`` and
+ these parameters must supply defaults.
+* The ``entry_points()`` method no longer accepts a package name
+ but instead returns all entry points in a dictionary keyed by the
+ ``EntryPoint.group``. The ``resolve`` method has been removed. Instead,
+ call ``EntryPoint.load()``, which has the same semantics as
+ ``pkg_resources`` and ``entrypoints``. **This is a backward incompatible
+ change.**
+* Metadata is now always returned as Unicode text regardless of
+ Python version. Closes #29.
+* This library can now discover metadata for a 'local' package (found
+ in the current-working directory). Closes #27.
+* Added ``files()`` function for resolving files from a distribution.
+* Added a new ``requires()`` function, which returns the requirements
+ for a package suitable for parsing by
+ ``packaging.requirements.Requirement``. Closes #18.
+* The top-level ``read_text()`` function has been removed. Use
+ ``PackagePath.read_text()`` on instances returned by the ``files()``
+ function. **This is a backward incompatible change.**
+* Release dates are now automatically injected into the changelog
+ based on SCM tags.
+
+0.7
+===
+* Fixed issue where packages with dashes in their names would
+ not be discovered. Closes #21.
+* Distribution lookup is now case-insensitive. Closes #20.
+* Wheel distributions can no longer be discovered by their module
+ name. Like Path distributions, they must be indicated by their
+ distribution package name.
+
+0.6
+===
+* Removed ``importlib_metadata.distribution`` function. Now
+ the public interface is primarily the utility functions exposed
+ in ``importlib_metadata.__all__``. Closes #14.
+* Added two new utility functions ``read_text`` and
+ ``metadata``.
+
+0.5
+===
+* Updated README and removed details about Distribution
+ class, now considered private. Closes #15.
+* Added test suite support for Python 3.4+.
+* Fixed SyntaxErrors on Python 3.4 and 3.5. !12
+* Fixed errors on Windows joining Path elements. !15
+
+0.4
+===
+* Housekeeping.
+
+0.3
+===
+* Added usage documentation. Closes #8
+* Add support for getting metadata from wheels on ``sys.path``. Closes #9
+
+0.2
+===
+* Added ``importlib_metadata.entry_points()``. Closes #1
+* Added ``importlib_metadata.resolve()``. Closes #12
+* Add support for Python 2.7. Closes #4
+
+0.1
+===
+* Initial release.
+
+
+..
+ Local Variables:
+ mode: change-log-mode
+ indent-tabs-mode: nil
+ sentence-end-double-space: t
+ fill-column: 78
+ coding: utf-8
+ End:
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/conf.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/conf.py
new file mode 100644
index 0000000000..129a7a4eae
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/conf.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# importlib_metadata documentation build configuration file, created by
+# sphinx-quickstart on Thu Nov 30 10:21:00 2017.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+# import os
+# import sys
+# sys.path.insert(0, os.path.abspath('.'))
+
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'rst.linker',
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.doctest',
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.viewcode',
+ ]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'importlib_metadata'
+copyright = '2017-2019, Jason R. Coombs, Barry Warsaw'
+author = 'Jason R. Coombs, Barry Warsaw'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '0.1'
+# The full version, including alpha/beta/rc tags.
+release = '0.1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This patterns also effect to html_static_path and html_extra_path
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'default'
+
+# Custom sidebar templates, must be a dictionary that maps document names
+# to template names.
+#
+# This is required for the alabaster theme
+# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
+html_sidebars = {
+ '**': [
+ 'relations.html', # needs 'show_related': True theme option to display
+ 'searchbox.html',
+ ]
+ }
+
+
+# -- Options for HTMLHelp output ------------------------------------------
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'importlib_metadatadoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+ #
+ # 'papersize': 'letterpaper',
+
+ # The font size ('10pt', '11pt' or '12pt').
+ #
+ # 'pointsize': '10pt',
+
+ # Additional stuff for the LaTeX preamble.
+ #
+ # 'preamble': '',
+
+ # Latex figure (float) alignment
+ #
+ # 'figure_align': 'htbp',
+ }
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc, 'importlib_metadata.tex',
+ 'importlib\\_metadata Documentation',
+ 'Brett Cannon, Barry Warsaw', 'manual'),
+ ]
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc, 'importlib_metadata', 'importlib_metadata Documentation',
+ [author], 1)
+ ]
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc, 'importlib_metadata', 'importlib_metadata Documentation',
+ author, 'importlib_metadata', 'One line description of project.',
+ 'Miscellaneous'),
+ ]
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+ 'python': ('https://docs.python.org/3', None),
+ 'importlib_resources': (
+ 'https://importlib-resources.readthedocs.io/en/latest/', None
+ ),
+ }
+
+
+# For rst.linker, inject release dates into changelog.rst
+link_files = {
+ 'changelog.rst': dict(
+ replace=[
+ dict(
+ pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
+ with_scm='{text}\n{rev[timestamp]:%Y-%m-%d}\n\n',
+ ),
+ ],
+ ),
+ }
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/index.rst b/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/index.rst
new file mode 100644
index 0000000000..57332f5e8d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/index.rst
@@ -0,0 +1,52 @@
+===============================
+ Welcome to importlib_metadata
+===============================
+
+``importlib_metadata`` is a library which provides an API for accessing an
+installed package's metadata (see :pep:`566`), such as its entry points or its top-level
+name. This functionality intends to replace most uses of ``pkg_resources``
+`entry point API`_ and `metadata API`_. Along with :mod:`importlib.resources` in
+Python 3.7 and newer (backported as :doc:`importlib_resources <importlib_resources:index>` for older
+versions of Python), this can eliminate the need to use the older and less
+efficient ``pkg_resources`` package.
+
+``importlib_metadata`` supplies a backport of
+:doc:`importlib.metadata <library/importlib.metadata>` as found in
+Python 3.8 and later for earlier Python releases. Users of
+Python 3.8 and beyond are encouraged to use the standard library module
+when possible and fall back to ``importlib_metadata`` when necessary.
+When imported on Python 3.8 and later, ``importlib_metadata`` replaces the
+DistributionFinder behavior from the stdlib, but leaves the API in tact.
+Developers looking for detailed API descriptions should refer to the Python
+3.8 standard library documentation.
+
+The documentation here includes a general :ref:`usage <using>` guide.
+
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Contents:
+
+ using.rst
+ changelog (links).rst
+
+
+Project details
+===============
+
+ * Project home: https://github.com/python/importlib_metadata
+ * Report bugs at: https://github.com/python/importlib_metadata/issues
+ * Code hosting: https://github.com/python/importlib_metadata
+ * Documentation: https://importlib_metadata.readthedocs.io/
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
+
+.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points
+.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/using.rst b/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/using.rst
new file mode 100644
index 0000000000..11965147f4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/docs/using.rst
@@ -0,0 +1,260 @@
+.. _using:
+
+=================================
+ Using :mod:`!importlib_metadata`
+=================================
+
+``importlib_metadata`` is a library that provides for access to installed
+package metadata. Built in part on Python's import system, this library
+intends to replace similar functionality in the `entry point
+API`_ and `metadata API`_ of ``pkg_resources``. Along with
+:mod:`importlib.resources` in Python 3.7
+and newer (backported as :doc:`importlib_resources <importlib_resources:index>` for older versions of
+Python), this can eliminate the need to use the older and less efficient
+``pkg_resources`` package.
+
+By "installed package" we generally mean a third-party package installed into
+Python's ``site-packages`` directory via tools such as `pip
+<https://pypi.org/project/pip/>`_. Specifically,
+it means a package with either a discoverable ``dist-info`` or ``egg-info``
+directory, and metadata defined by :pep:`566` or its older specifications.
+By default, package metadata can live on the file system or in zip archives on
+:data:`sys.path`. Through an extension mechanism, the metadata can live almost
+anywhere.
+
+
+Overview
+========
+
+Let's say you wanted to get the version string for a package you've installed
+using ``pip``. We start by creating a virtual environment and installing
+something into it::
+
+ $ python3 -m venv example
+ $ source example/bin/activate
+ (example) $ pip install importlib_metadata
+ (example) $ pip install wheel
+
+You can get the version string for ``wheel`` by running the following::
+
+ (example) $ python
+ >>> from importlib_metadata import version
+ >>> version('wheel')
+ '0.32.3'
+
+You can also get the set of entry points keyed by group, such as
+``console_scripts``, ``distutils.commands`` and others. Each group contains a
+sequence of :ref:`EntryPoint <entry-points>` objects.
+
+You can get the :ref:`metadata for a distribution <metadata>`::
+
+ >>> list(metadata('wheel'))
+ ['Metadata-Version', 'Name', 'Version', 'Summary', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Project-URL', 'Project-URL', 'Project-URL', 'Keywords', 'Platform', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Requires-Python', 'Provides-Extra', 'Requires-Dist', 'Requires-Dist']
+
+You can also get a :ref:`distribution's version number <version>`, list its
+:ref:`constituent files <files>`, and get a list of the distribution's
+:ref:`requirements`.
+
+
+Functional API
+==============
+
+This package provides the following functionality via its public API.
+
+
+.. _entry-points:
+
+Entry points
+------------
+
+The ``entry_points()`` function returns a dictionary of all entry points,
+keyed by group. Entry points are represented by ``EntryPoint`` instances;
+each ``EntryPoint`` has a ``.name``, ``.group``, and ``.value`` attributes and
+a ``.load()`` method to resolve the value. There are also ``.module``,
+``.attr``, and ``.extras`` attributes for getting the components of the
+``.value`` attribute::
+
+ >>> eps = entry_points()
+ >>> list(eps)
+ ['console_scripts', 'distutils.commands', 'distutils.setup_keywords', 'egg_info.writers', 'setuptools.installation']
+ >>> scripts = eps['console_scripts']
+ >>> wheel = [ep for ep in scripts if ep.name == 'wheel'][0]
+ >>> wheel
+ EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts')
+ >>> wheel.module
+ 'wheel.cli'
+ >>> wheel.attr
+ 'main'
+ >>> wheel.extras
+ []
+ >>> main = wheel.load()
+ >>> main
+ <function main at 0x103528488>
+
+The ``group`` and ``name`` are arbitrary values defined by the package author
+and usually a client will wish to resolve all entry points for a particular
+group. Read `the setuptools docs
+<https://setuptools.readthedocs.io/en/latest/setuptools.html#dynamic-discovery-of-services-and-plugins>`_
+for more information on entry points, their definition, and usage.
+
+
+.. _metadata:
+
+Distribution metadata
+---------------------
+
+Every distribution includes some metadata, which you can extract using the
+``metadata()`` function::
+
+ >>> wheel_metadata = metadata('wheel')
+
+The keys of the returned data structure [#f1]_ name the metadata keywords, and
+their values are returned unparsed from the distribution metadata::
+
+ >>> wheel_metadata['Requires-Python']
+ '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*'
+
+
+.. _version:
+
+Distribution versions
+---------------------
+
+The ``version()`` function is the quickest way to get a distribution's version
+number, as a string::
+
+ >>> version('wheel')
+ '0.32.3'
+
+
+.. _files:
+
+Distribution files
+------------------
+
+You can also get the full set of files contained within a distribution. The
+``files()`` function takes a distribution package name and returns all of the
+files installed by this distribution. Each file object returned is a
+``PackagePath``, a :class:`pathlib.Path` derived object with additional ``dist``,
+``size``, and ``hash`` properties as indicated by the metadata. For example::
+
+ >>> util = [p for p in files('wheel') if 'util.py' in str(p)][0]
+ >>> util
+ PackagePath('wheel/util.py')
+ >>> util.size
+ 859
+ >>> util.dist
+ <importlib_metadata._hooks.PathDistribution object at 0x101e0cef0>
+ >>> util.hash
+ <FileHash mode: sha256 value: bYkw5oMccfazVCoYQwKkkemoVyMAFoR34mmKBx8R1NI>
+
+Once you have the file, you can also read its contents::
+
+ >>> print(util.read_text())
+ import base64
+ import sys
+ ...
+ def as_bytes(s):
+ if isinstance(s, text_type):
+ return s.encode('utf-8')
+ return s
+
+In the case where the metadata file listing files
+(RECORD or SOURCES.txt) is missing, ``files()`` will
+return ``None``. The caller may wish to wrap calls to
+``files()`` in `always_iterable
+<https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_
+or otherwise guard against this condition if the target
+distribution is not known to have the metadata present.
+
+.. _requirements:
+
+Distribution requirements
+-------------------------
+
+To get the full set of requirements for a distribution, use the ``requires()``
+function::
+
+ >>> requires('wheel')
+ ["pytest (>=3.0.0) ; extra == 'test'", "pytest-cov ; extra == 'test'"]
+
+
+Distributions
+=============
+
+While the above API is the most common and convenient usage, you can get all
+of that information from the ``Distribution`` class. A ``Distribution`` is an
+abstract object that represents the metadata for a Python package. You can
+get the ``Distribution`` instance::
+
+ >>> from importlib_metadata import distribution
+ >>> dist = distribution('wheel')
+
+Thus, an alternative way to get the version number is through the
+``Distribution`` instance::
+
+ >>> dist.version
+ '0.32.3'
+
+There are all kinds of additional metadata available on the ``Distribution``
+instance::
+
+ >>> d.metadata['Requires-Python']
+ '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*'
+ >>> d.metadata['License']
+ 'MIT'
+
+The full set of available metadata is not described here. See :pep:`566`
+for additional details.
+
+
+Extending the search algorithm
+==============================
+
+Because package metadata is not available through :data:`sys.path` searches, or
+package loaders directly, the metadata for a package is found through import
+system `finders`_. To find a distribution package's metadata,
+``importlib.metadata`` queries the list of :term:`meta path finders <meta path finder>` on
+:data:`sys.meta_path`.
+
+By default ``importlib_metadata`` installs a finder for distribution packages
+found on the file system. This finder doesn't actually find any *packages*,
+but it can find the packages' metadata.
+
+The abstract class :py:class:`importlib.abc.MetaPathFinder` defines the
+interface expected of finders by Python's import system.
+``importlib_metadata`` extends this protocol by looking for an optional
+``find_distributions`` callable on the finders from
+:data:`sys.meta_path` and presents this extended interface as the
+``DistributionFinder`` abstract base class, which defines this abstract
+method::
+
+ @abc.abstractmethod
+ def find_distributions(context=DistributionFinder.Context()):
+ """Return an iterable of all Distribution instances capable of
+ loading the metadata for packages for the indicated ``context``.
+ """
+
+The ``DistributionFinder.Context`` object provides ``.path`` and ``.name``
+properties indicating the path to search and name to match and may
+supply other relevant context.
+
+What this means in practice is that to support finding distribution package
+metadata in locations other than the file system, subclass
+``Distribution`` and implement the abstract methods. Then from
+a custom finder, return instances of this derived ``Distribution`` in the
+``find_distributions()`` method.
+
+
+.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points
+.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api
+.. _`finders`: https://docs.python.org/3/reference/import.html#finders-and-loaders
+
+
+.. rubric:: Footnotes
+
+.. [#f1] Technically, the returned distribution metadata object is an
+ :class:`email.message.EmailMessage`
+ instance, but this is an implementation detail, and not part of the
+ stable API. You should only use dictionary-like methods and syntax
+ to access the metadata contents.
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/importlib_metadata/__init__.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/importlib_metadata/__init__.py
new file mode 100644
index 0000000000..d5cbc2d03c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/importlib_metadata/__init__.py
@@ -0,0 +1,627 @@
+from __future__ import unicode_literals, absolute_import
+
+import io
+import os
+import re
+import abc
+import csv
+import sys
+import zipp
+import operator
+import functools
+import itertools
+import posixpath
+import collections
+
+from ._compat import (
+ install,
+ NullFinder,
+ ConfigParser,
+ suppress,
+ map,
+ FileNotFoundError,
+ IsADirectoryError,
+ NotADirectoryError,
+ PermissionError,
+ pathlib,
+ ModuleNotFoundError,
+ MetaPathFinder,
+ email_message_from_string,
+ PyPy_repr,
+ unique_ordered,
+ str,
+ )
+from importlib import import_module
+from itertools import starmap
+
+
+__metaclass__ = type
+
+
+__all__ = [
+ 'Distribution',
+ 'DistributionFinder',
+ 'PackageNotFoundError',
+ 'distribution',
+ 'distributions',
+ 'entry_points',
+ 'files',
+ 'metadata',
+ 'requires',
+ 'version',
+ ]
+
+
+class PackageNotFoundError(ModuleNotFoundError):
+ """The package was not found."""
+
+ def __str__(self):
+ tmpl = "No package metadata was found for {self.name}"
+ return tmpl.format(**locals())
+
+ @property
+ def name(self):
+ name, = self.args
+ return name
+
+
+class EntryPoint(
+ PyPy_repr,
+ collections.namedtuple('EntryPointBase', 'name value group')):
+ """An entry point as defined by Python packaging conventions.
+
+ See `the packaging docs on entry points
+ <https://packaging.python.org/specifications/entry-points/>`_
+ for more information.
+ """
+
+ pattern = re.compile(
+ r'(?P<module>[\w.]+)\s*'
+ r'(:\s*(?P<attr>[\w.]+))?\s*'
+ r'(?P<extras>\[.*\])?\s*$'
+ )
+ """
+ A regular expression describing the syntax for an entry point,
+ which might look like:
+
+ - module
+ - package.module
+ - package.module:attribute
+ - package.module:object.attribute
+ - package.module:attr [extra1, extra2]
+
+ Other combinations are possible as well.
+
+ The expression is lenient about whitespace around the ':',
+ following the attr, and following any extras.
+ """
+
+ def load(self):
+ """Load the entry point from its definition. If only a module
+ is indicated by the value, return that module. Otherwise,
+ return the named object.
+ """
+ match = self.pattern.match(self.value)
+ module = import_module(match.group('module'))
+ attrs = filter(None, (match.group('attr') or '').split('.'))
+ return functools.reduce(getattr, attrs, module)
+
+ @property
+ def module(self):
+ match = self.pattern.match(self.value)
+ return match.group('module')
+
+ @property
+ def attr(self):
+ match = self.pattern.match(self.value)
+ return match.group('attr')
+
+ @property
+ def extras(self):
+ match = self.pattern.match(self.value)
+ return list(re.finditer(r'\w+', match.group('extras') or ''))
+
+ @classmethod
+ def _from_config(cls, config):
+ return [
+ cls(name, value, group)
+ for group in config.sections()
+ for name, value in config.items(group)
+ ]
+
+ @classmethod
+ def _from_text(cls, text):
+ config = ConfigParser(delimiters='=')
+ # case sensitive: https://stackoverflow.com/q/1611799/812183
+ config.optionxform = str
+ try:
+ config.read_string(text)
+ except AttributeError: # pragma: nocover
+ # Python 2 has no read_string
+ config.readfp(io.StringIO(text))
+ return EntryPoint._from_config(config)
+
+ def __iter__(self):
+ """
+ Supply iter so one may construct dicts of EntryPoints easily.
+ """
+ return iter((self.name, self))
+
+ def __reduce__(self):
+ return (
+ self.__class__,
+ (self.name, self.value, self.group),
+ )
+
+
+class PackagePath(pathlib.PurePosixPath):
+ """A reference to a path in a package"""
+
+ def read_text(self, encoding='utf-8'):
+ with self.locate().open(encoding=encoding) as stream:
+ return stream.read()
+
+ def read_binary(self):
+ with self.locate().open('rb') as stream:
+ return stream.read()
+
+ def locate(self):
+ """Return a path-like object for this path"""
+ return self.dist.locate_file(self)
+
+
+class FileHash:
+ def __init__(self, spec):
+ self.mode, _, self.value = spec.partition('=')
+
+ def __repr__(self):
+ return '<FileHash mode: {} value: {}>'.format(self.mode, self.value)
+
+
+class Distribution:
+ """A Python distribution package."""
+
+ @abc.abstractmethod
+ def read_text(self, filename):
+ """Attempt to load metadata file given by the name.
+
+ :param filename: The name of the file in the distribution info.
+ :return: The text if found, otherwise None.
+ """
+
+ @abc.abstractmethod
+ def locate_file(self, path):
+ """
+ Given a path to a file in this distribution, return a path
+ to it.
+ """
+
+ @classmethod
+ def from_name(cls, name):
+ """Return the Distribution for the given package name.
+
+ :param name: The name of the distribution package to search for.
+ :return: The Distribution instance (or subclass thereof) for the named
+ package, if found.
+ :raises PackageNotFoundError: When the named package's distribution
+ metadata cannot be found.
+ """
+ for resolver in cls._discover_resolvers():
+ dists = resolver(DistributionFinder.Context(name=name))
+ dist = next(iter(dists), None)
+ if dist is not None:
+ return dist
+ else:
+ raise PackageNotFoundError(name)
+
+ @classmethod
+ def discover(cls, **kwargs):
+ """Return an iterable of Distribution objects for all packages.
+
+ Pass a ``context`` or pass keyword arguments for constructing
+ a context.
+
+ :context: A ``DistributionFinder.Context`` object.
+ :return: Iterable of Distribution objects for all packages.
+ """
+ context = kwargs.pop('context', None)
+ if context and kwargs:
+ raise ValueError("cannot accept context and kwargs")
+ context = context or DistributionFinder.Context(**kwargs)
+ return itertools.chain.from_iterable(
+ resolver(context)
+ for resolver in cls._discover_resolvers()
+ )
+
+ @staticmethod
+ def at(path):
+ """Return a Distribution for the indicated metadata path
+
+ :param path: a string or path-like object
+ :return: a concrete Distribution instance for the path
+ """
+ return PathDistribution(pathlib.Path(path))
+
+ @staticmethod
+ def _discover_resolvers():
+ """Search the meta_path for resolvers."""
+ declared = (
+ getattr(finder, 'find_distributions', None)
+ for finder in sys.meta_path
+ )
+ return filter(None, declared)
+
+ @classmethod
+ def _local(cls, root='.'):
+ from pep517 import build, meta
+ system = build.compat_system(root)
+ builder = functools.partial(
+ meta.build,
+ source_dir=root,
+ system=system,
+ )
+ return PathDistribution(zipp.Path(meta.build_as_zip(builder)))
+
+ @property
+ def metadata(self):
+ """Return the parsed metadata for this Distribution.
+
+ The returned object will have keys that name the various bits of
+ metadata. See PEP 566 for details.
+ """
+ text = (
+ self.read_text('METADATA')
+ or self.read_text('PKG-INFO')
+ # This last clause is here to support old egg-info files. Its
+ # effect is to just end up using the PathDistribution's self._path
+ # (which points to the egg-info file) attribute unchanged.
+ or self.read_text('')
+ )
+ return email_message_from_string(text)
+
+ @property
+ def version(self):
+ """Return the 'Version' metadata for the distribution package."""
+ return self.metadata['Version']
+
+ @property
+ def entry_points(self):
+ return EntryPoint._from_text(self.read_text('entry_points.txt'))
+
+ @property
+ def files(self):
+ """Files in this distribution.
+
+ :return: List of PackagePath for this distribution or None
+
+ Result is `None` if the metadata file that enumerates files
+ (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
+ missing.
+ Result may be empty if the metadata exists but is empty.
+ """
+ file_lines = self._read_files_distinfo() or self._read_files_egginfo()
+
+ def make_file(name, hash=None, size_str=None):
+ result = PackagePath(name)
+ result.hash = FileHash(hash) if hash else None
+ result.size = int(size_str) if size_str else None
+ result.dist = self
+ return result
+
+ return file_lines and list(starmap(make_file, csv.reader(file_lines)))
+
+ def _read_files_distinfo(self):
+ """
+ Read the lines of RECORD
+ """
+ text = self.read_text('RECORD')
+ return text and text.splitlines()
+
+ def _read_files_egginfo(self):
+ """
+ SOURCES.txt might contain literal commas, so wrap each line
+ in quotes.
+ """
+ text = self.read_text('SOURCES.txt')
+ return text and map('"{}"'.format, text.splitlines())
+
+ @property
+ def requires(self):
+ """Generated requirements specified for this Distribution"""
+ reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
+ return reqs and list(reqs)
+
+ def _read_dist_info_reqs(self):
+ return self.metadata.get_all('Requires-Dist')
+
+ def _read_egg_info_reqs(self):
+ source = self.read_text('requires.txt')
+ return source and self._deps_from_requires_text(source)
+
+ @classmethod
+ def _deps_from_requires_text(cls, source):
+ section_pairs = cls._read_sections(source.splitlines())
+ sections = {
+ section: list(map(operator.itemgetter('line'), results))
+ for section, results in
+ itertools.groupby(section_pairs, operator.itemgetter('section'))
+ }
+ return cls._convert_egg_info_reqs_to_simple_reqs(sections)
+
+ @staticmethod
+ def _read_sections(lines):
+ section = None
+ for line in filter(None, lines):
+ section_match = re.match(r'\[(.*)\]$', line)
+ if section_match:
+ section = section_match.group(1)
+ continue
+ yield locals()
+
+ @staticmethod
+ def _convert_egg_info_reqs_to_simple_reqs(sections):
+ """
+ Historically, setuptools would solicit and store 'extra'
+ requirements, including those with environment markers,
+ in separate sections. More modern tools expect each
+ dependency to be defined separately, with any relevant
+ extras and environment markers attached directly to that
+ requirement. This method converts the former to the
+ latter. See _test_deps_from_requires_text for an example.
+ """
+ def make_condition(name):
+ return name and 'extra == "{name}"'.format(name=name)
+
+ def parse_condition(section):
+ section = section or ''
+ extra, sep, markers = section.partition(':')
+ if extra and markers:
+ markers = '({markers})'.format(markers=markers)
+ conditions = list(filter(None, [markers, make_condition(extra)]))
+ return '; ' + ' and '.join(conditions) if conditions else ''
+
+ for section, deps in sections.items():
+ for dep in deps:
+ yield dep + parse_condition(section)
+
+
+class DistributionFinder(MetaPathFinder):
+ """
+ A MetaPathFinder capable of discovering installed distributions.
+ """
+
+ class Context:
+ """
+ Keyword arguments presented by the caller to
+ ``distributions()`` or ``Distribution.discover()``
+ to narrow the scope of a search for distributions
+ in all DistributionFinders.
+
+ Each DistributionFinder may expect any parameters
+ and should attempt to honor the canonical
+ parameters defined below when appropriate.
+ """
+
+ name = None
+ """
+ Specific name for which a distribution finder should match.
+ A name of ``None`` matches all distributions.
+ """
+
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ @property
+ def path(self):
+ """
+ The path that a distribution finder should search.
+
+ Typically refers to Python package paths and defaults
+ to ``sys.path``.
+ """
+ return vars(self).get('path', sys.path)
+
+ @abc.abstractmethod
+ def find_distributions(self, context=Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching the ``context``,
+ a DistributionFinder.Context instance.
+ """
+
+
+class FastPath:
+ """
+ Micro-optimized class for searching a path for
+ children.
+ """
+
+ def __init__(self, root):
+ self.root = str(root)
+ self.base = os.path.basename(self.root).lower()
+
+ def joinpath(self, child):
+ return pathlib.Path(self.root, child)
+
+ def children(self):
+ with suppress(Exception):
+ return os.listdir(self.root or '')
+ with suppress(Exception):
+ return self.zip_children()
+ return []
+
+ def zip_children(self):
+ zip_path = zipp.Path(self.root)
+ names = zip_path.root.namelist()
+ self.joinpath = zip_path.joinpath
+
+ return unique_ordered(
+ child.split(posixpath.sep, 1)[0]
+ for child in names
+ )
+
+ def is_egg(self, search):
+ base = self.base
+ return (
+ base == search.versionless_egg_name
+ or base.startswith(search.prefix)
+ and base.endswith('.egg'))
+
+ def search(self, name):
+ for child in self.children():
+ n_low = child.lower()
+ if (n_low in name.exact_matches
+ or n_low.replace('.', '_').startswith(name.prefix)
+ and n_low.endswith(name.suffixes)
+ # legacy case:
+ or self.is_egg(name) and n_low == 'egg-info'):
+ yield self.joinpath(child)
+
+
+class Prepared:
+ """
+ A prepared search for metadata on a possibly-named package.
+ """
+ normalized = ''
+ prefix = ''
+ suffixes = '.dist-info', '.egg-info'
+ exact_matches = [''][:0]
+ versionless_egg_name = ''
+
+ def __init__(self, name):
+ self.name = name
+ if name is None:
+ return
+ self.normalized = self.normalize(name)
+ self.prefix = self.normalized + '-'
+ self.exact_matches = [
+ self.normalized + suffix for suffix in self.suffixes]
+ self.versionless_egg_name = self.normalized + '.egg'
+
+ @staticmethod
+ def normalize(name):
+ """
+ PEP 503 normalization plus dashes as underscores.
+ """
+ return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_')
+
+
+@install
+class MetadataPathFinder(NullFinder, DistributionFinder):
+ """A degenerate finder for distribution packages on the file system.
+
+ This finder supplies only a find_distributions() method for versions
+ of Python that do not have a PathFinder find_distributions().
+ """
+
+ def find_distributions(self, context=DistributionFinder.Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching ``context.name``
+ (or all names if ``None`` indicated) along the paths in the list
+ of directories ``context.path``.
+ """
+ found = self._search_paths(context.name, context.path)
+ return map(PathDistribution, found)
+
+ @classmethod
+ def _search_paths(cls, name, paths):
+ """Find metadata directories in paths heuristically."""
+ return itertools.chain.from_iterable(
+ path.search(Prepared(name))
+ for path in map(FastPath, paths)
+ )
+
+
+class PathDistribution(Distribution):
+ def __init__(self, path):
+ """Construct a distribution from a path to the metadata directory.
+
+ :param path: A pathlib.Path or similar object supporting
+ .joinpath(), __div__, .parent, and .read_text().
+ """
+ self._path = path
+
+ def read_text(self, filename):
+ with suppress(FileNotFoundError, IsADirectoryError, KeyError,
+ NotADirectoryError, PermissionError):
+ return self._path.joinpath(filename).read_text(encoding='utf-8')
+ read_text.__doc__ = Distribution.read_text.__doc__
+
+ def locate_file(self, path):
+ return self._path.parent / path
+
+
+def distribution(distribution_name):
+ """Get the ``Distribution`` instance for the named package.
+
+ :param distribution_name: The name of the distribution package as a string.
+ :return: A ``Distribution`` instance (or subclass thereof).
+ """
+ return Distribution.from_name(distribution_name)
+
+
+def distributions(**kwargs):
+ """Get all ``Distribution`` instances in the current environment.
+
+ :return: An iterable of ``Distribution`` instances.
+ """
+ return Distribution.discover(**kwargs)
+
+
+def metadata(distribution_name):
+ """Get the metadata for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: An email.Message containing the parsed metadata.
+ """
+ return Distribution.from_name(distribution_name).metadata
+
+
+def version(distribution_name):
+ """Get the version string for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: The version string for the package as defined in the package's
+ "Version" metadata key.
+ """
+ return distribution(distribution_name).version
+
+
+def entry_points():
+ """Return EntryPoint objects for all installed packages.
+
+ :return: EntryPoint objects for all installed packages.
+ """
+ eps = itertools.chain.from_iterable(
+ dist.entry_points for dist in distributions())
+ by_group = operator.attrgetter('group')
+ ordered = sorted(eps, key=by_group)
+ grouped = itertools.groupby(ordered, by_group)
+ return {
+ group: tuple(eps)
+ for group, eps in grouped
+ }
+
+
+def files(distribution_name):
+ """Return a list of files for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: List of files composing the distribution.
+ """
+ return distribution(distribution_name).files
+
+
+def requires(distribution_name):
+ """
+ Return a list of requirements for the named package.
+
+ :return: An iterator of requirements, suitable for
+ packaging.requirement.Requirement.
+ """
+ return distribution(distribution_name).requires
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/importlib_metadata/_compat.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/importlib_metadata/_compat.py
new file mode 100644
index 0000000000..303d4a22e8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/importlib_metadata/_compat.py
@@ -0,0 +1,152 @@
+from __future__ import absolute_import, unicode_literals
+
+import io
+import abc
+import sys
+import email
+
+
+if sys.version_info > (3,): # pragma: nocover
+ import builtins
+ from configparser import ConfigParser
+ import contextlib
+ FileNotFoundError = builtins.FileNotFoundError
+ IsADirectoryError = builtins.IsADirectoryError
+ NotADirectoryError = builtins.NotADirectoryError
+ PermissionError = builtins.PermissionError
+ map = builtins.map
+ from itertools import filterfalse
+else: # pragma: nocover
+ from backports.configparser import ConfigParser
+ from itertools import imap as map # type: ignore
+ from itertools import ifilterfalse as filterfalse
+ import contextlib2 as contextlib
+ FileNotFoundError = IOError, OSError
+ IsADirectoryError = IOError, OSError
+ NotADirectoryError = IOError, OSError
+ PermissionError = IOError, OSError
+
+str = type('')
+
+suppress = contextlib.suppress
+
+if sys.version_info > (3, 5): # pragma: nocover
+ import pathlib
+else: # pragma: nocover
+ import pathlib2 as pathlib
+
+try:
+ ModuleNotFoundError = builtins.FileNotFoundError
+except (NameError, AttributeError): # pragma: nocover
+ ModuleNotFoundError = ImportError # type: ignore
+
+
+if sys.version_info >= (3,): # pragma: nocover
+ from importlib.abc import MetaPathFinder
+else: # pragma: nocover
+ class MetaPathFinder(object):
+ __metaclass__ = abc.ABCMeta
+
+
+__metaclass__ = type
+__all__ = [
+ 'install', 'NullFinder', 'MetaPathFinder', 'ModuleNotFoundError',
+ 'pathlib', 'ConfigParser', 'map', 'suppress', 'FileNotFoundError',
+ 'NotADirectoryError', 'email_message_from_string',
+ ]
+
+
+def install(cls):
+ """
+ Class decorator for installation on sys.meta_path.
+
+ Adds the backport DistributionFinder to sys.meta_path and
+ attempts to disable the finder functionality of the stdlib
+ DistributionFinder.
+ """
+ sys.meta_path.append(cls())
+ disable_stdlib_finder()
+ return cls
+
+
+def disable_stdlib_finder():
+ """
+ Give the backport primacy for discovering path-based distributions
+ by monkey-patching the stdlib O_O.
+
+ See #91 for more background for rationale on this sketchy
+ behavior.
+ """
+ def matches(finder):
+ return (
+ getattr(finder, '__module__', None) == '_frozen_importlib_external'
+ and hasattr(finder, 'find_distributions')
+ )
+ for finder in filter(matches, sys.meta_path): # pragma: nocover
+ del finder.find_distributions
+
+
+class NullFinder:
+ """
+ A "Finder" (aka "MetaClassFinder") that never finds any modules,
+ but may find distributions.
+ """
+ @staticmethod
+ def find_spec(*args, **kwargs):
+ return None
+
+ # In Python 2, the import system requires finders
+ # to have a find_module() method, but this usage
+ # is deprecated in Python 3 in favor of find_spec().
+ # For the purposes of this finder (i.e. being present
+ # on sys.meta_path but having no other import
+ # system functionality), the two methods are identical.
+ find_module = find_spec
+
+
+def py2_message_from_string(text): # nocoverpy3
+ # Work around https://bugs.python.org/issue25545 where
+ # email.message_from_string cannot handle Unicode on Python 2.
+ io_buffer = io.StringIO(text)
+ return email.message_from_file(io_buffer)
+
+
+email_message_from_string = (
+ py2_message_from_string
+ if sys.version_info < (3,) else
+ email.message_from_string
+ )
+
+
+class PyPy_repr:
+ """
+ Override repr for EntryPoint objects on PyPy to avoid __iter__ access.
+ Ref #97, #102.
+ """
+ affected = hasattr(sys, 'pypy_version_info')
+
+ def __compat_repr__(self): # pragma: nocover
+ def make_param(name):
+ value = getattr(self, name)
+ return '{name}={value!r}'.format(**locals())
+ params = ', '.join(map(make_param, self._fields))
+ return 'EntryPoint({params})'.format(**locals())
+
+ if affected: # pragma: nocover
+ __repr__ = __compat_repr__
+ del affected
+
+
+# from itertools recipes
+def unique_everseen(iterable): # pragma: nocover
+ "List unique elements, preserving order. Remember all elements ever seen."
+ seen = set()
+ seen_add = seen.add
+
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+
+
+unique_ordered = (
+ unique_everseen if sys.version_info < (3, 7) else dict.fromkeys)
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/prepare/example/example/__init__.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/prepare/example/example/__init__.py
new file mode 100644
index 0000000000..ba73b74339
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/prepare/example/example/__init__.py
@@ -0,0 +1,2 @@
+def main():
+ return 'example'
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/prepare/example/setup.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/prepare/example/setup.py
new file mode 100644
index 0000000000..8663ad389a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/prepare/example/setup.py
@@ -0,0 +1,10 @@
+from setuptools import setup
+setup(
+ name='example',
+ version='21.12',
+ license='Apache Software License',
+ packages=['example'],
+ entry_points={
+ 'console_scripts': ['example = example:main', 'Example=example:main'],
+ },
+ )
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/pyproject.toml b/testing/web-platform/tests/tools/third_party/importlib_metadata/pyproject.toml
new file mode 100644
index 0000000000..e5c3a6a455
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/pyproject.toml
@@ -0,0 +1,2 @@
+[build-system]
+requires = ["setuptools>=30.3", "wheel", "setuptools_scm"]
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/setup.cfg b/testing/web-platform/tests/tools/third_party/importlib_metadata/setup.cfg
new file mode 100644
index 0000000000..fa10c8d358
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/setup.cfg
@@ -0,0 +1,47 @@
+[metadata]
+name = importlib_metadata
+author = Jason R. Coombs
+author_email = jaraco@jaraco.com
+url = http://importlib-metadata.readthedocs.io/
+description = Read metadata from Python packages
+long_description = file: README.rst
+license = Apache Software License
+classifiers =
+ Development Status :: 3 - Alpha
+ Intended Audience :: Developers
+ License :: OSI Approved :: Apache Software License
+ Topic :: Software Development :: Libraries
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 2
+
+[options]
+python_requires = >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*
+setup_requires = setuptools-scm
+install_requires =
+ zipp>=0.5
+ pathlib2; python_version < '3'
+ contextlib2; python_version < '3'
+ configparser>=3.5; python_version < '3'
+packages = importlib_metadata
+
+[mypy]
+ignore_missing_imports = True
+# XXX We really should use the default `True` value here, but it causes too
+# many warnings, so for now just disable it. E.g. a package's __spec__ is
+# defined as Optional[ModuleSpec] so we can't just blindly pull attributes off
+# of that attribute. The real fix is to add conditionals or asserts proving
+# that package.__spec__ is not None.
+strict_optional = False
+
+[wheel]
+universal=1
+
+[options.extras_require]
+testing =
+ importlib_resources>=1.3; python_version < "3.9"
+ packaging
+ pep517
+ unittest2; python_version < "3"
+docs =
+ sphinx
+ rst.linker
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/setup.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/setup.py
new file mode 100644
index 0000000000..d5d43d7c93
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/setup.py
@@ -0,0 +1,3 @@
+from setuptools import setup
+
+setup(use_scm_version=True)
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/__init__.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/__init__.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/example-21.12-py3-none-any.whl b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/example-21.12-py3-none-any.whl
new file mode 100644
index 0000000000..641ab07f7a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/example-21.12-py3-none-any.whl
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/example-21.12-py3.6.egg b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/example-21.12-py3.6.egg
new file mode 100644
index 0000000000..cdb298a19b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/data/example-21.12-py3.6.egg
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/fixtures.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/fixtures.py
new file mode 100644
index 0000000000..0d834c6580
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/fixtures.py
@@ -0,0 +1,263 @@
+from __future__ import unicode_literals
+
+import os
+import sys
+import shutil
+import tempfile
+import textwrap
+
+from .py39compat import FS_NONASCII
+
+from importlib_metadata._compat import pathlib, contextlib
+
+
+__metaclass__ = type
+
+
+@contextlib.contextmanager
+def tempdir():
+ tmpdir = tempfile.mkdtemp()
+ try:
+ yield pathlib.Path(tmpdir)
+ finally:
+ shutil.rmtree(tmpdir)
+
+
+@contextlib.contextmanager
+def save_cwd():
+ orig = os.getcwd()
+ try:
+ yield
+ finally:
+ os.chdir(orig)
+
+
+@contextlib.contextmanager
+def tempdir_as_cwd():
+ with tempdir() as tmp:
+ with save_cwd():
+ os.chdir(str(tmp))
+ yield tmp
+
+
+@contextlib.contextmanager
+def install_finder(finder):
+ sys.meta_path.append(finder)
+ try:
+ yield
+ finally:
+ sys.meta_path.remove(finder)
+
+
+class Fixtures:
+ def setUp(self):
+ self.fixtures = contextlib.ExitStack()
+ self.addCleanup(self.fixtures.close)
+
+
+class SiteDir(Fixtures):
+ def setUp(self):
+ super(SiteDir, self).setUp()
+ self.site_dir = self.fixtures.enter_context(tempdir())
+
+
+class OnSysPath(Fixtures):
+ @staticmethod
+ @contextlib.contextmanager
+ def add_sys_path(dir):
+ sys.path[:0] = [str(dir)]
+ try:
+ yield
+ finally:
+ sys.path.remove(str(dir))
+
+ def setUp(self):
+ super(OnSysPath, self).setUp()
+ self.fixtures.enter_context(self.add_sys_path(self.site_dir))
+
+
+class DistInfoPkg(OnSysPath, SiteDir):
+ files = {
+ "distinfo_pkg-1.0.0.dist-info": {
+ "METADATA": """
+ Name: distinfo-pkg
+ Author: Steven Ma
+ Version: 1.0.0
+ Requires-Dist: wheel >= 1.0
+ Requires-Dist: pytest; extra == 'test'
+ """,
+ "RECORD": "mod.py,sha256=abc,20\n",
+ "entry_points.txt": """
+ [entries]
+ main = mod:main
+ ns:sub = mod:main
+ """
+ },
+ "mod.py": """
+ def main():
+ print("hello world")
+ """,
+ }
+
+ def setUp(self):
+ super(DistInfoPkg, self).setUp()
+ build_files(DistInfoPkg.files, self.site_dir)
+
+
+class DistInfoPkgWithDot(OnSysPath, SiteDir):
+ files = {
+ "pkg_dot-1.0.0.dist-info": {
+ "METADATA": """
+ Name: pkg.dot
+ Version: 1.0.0
+ """,
+ },
+ }
+
+ def setUp(self):
+ super(DistInfoPkgWithDot, self).setUp()
+ build_files(DistInfoPkgWithDot.files, self.site_dir)
+
+
+class DistInfoPkgWithDotLegacy(OnSysPath, SiteDir):
+ files = {
+ "pkg.dot-1.0.0.dist-info": {
+ "METADATA": """
+ Name: pkg.dot
+ Version: 1.0.0
+ """,
+ },
+ }
+
+ def setUp(self):
+ super(DistInfoPkgWithDotLegacy, self).setUp()
+ build_files(DistInfoPkgWithDotLegacy.files, self.site_dir)
+
+
+class DistInfoPkgOffPath(SiteDir):
+ def setUp(self):
+ super(DistInfoPkgOffPath, self).setUp()
+ build_files(DistInfoPkg.files, self.site_dir)
+
+
+class EggInfoPkg(OnSysPath, SiteDir):
+ files = {
+ "egginfo_pkg.egg-info": {
+ "PKG-INFO": """
+ Name: egginfo-pkg
+ Author: Steven Ma
+ License: Unknown
+ Version: 1.0.0
+ Classifier: Intended Audience :: Developers
+ Classifier: Topic :: Software Development :: Libraries
+ """,
+ "SOURCES.txt": """
+ mod.py
+ egginfo_pkg.egg-info/top_level.txt
+ """,
+ "entry_points.txt": """
+ [entries]
+ main = mod:main
+ """,
+ "requires.txt": """
+ wheel >= 1.0; python_version >= "2.7"
+ [test]
+ pytest
+ """,
+ "top_level.txt": "mod\n"
+ },
+ "mod.py": """
+ def main():
+ print("hello world")
+ """,
+ }
+
+ def setUp(self):
+ super(EggInfoPkg, self).setUp()
+ build_files(EggInfoPkg.files, prefix=self.site_dir)
+
+
+class EggInfoFile(OnSysPath, SiteDir):
+ files = {
+ "egginfo_file.egg-info": """
+ Metadata-Version: 1.0
+ Name: egginfo_file
+ Version: 0.1
+ Summary: An example package
+ Home-page: www.example.com
+ Author: Eric Haffa-Vee
+ Author-email: eric@example.coms
+ License: UNKNOWN
+ Description: UNKNOWN
+ Platform: UNKNOWN
+ """,
+ }
+
+ def setUp(self):
+ super(EggInfoFile, self).setUp()
+ build_files(EggInfoFile.files, prefix=self.site_dir)
+
+
+class LocalPackage:
+ files = {
+ "setup.py": """
+ import setuptools
+ setuptools.setup(name="local-pkg", version="2.0.1")
+ """,
+ }
+
+ def setUp(self):
+ self.fixtures = contextlib.ExitStack()
+ self.addCleanup(self.fixtures.close)
+ self.fixtures.enter_context(tempdir_as_cwd())
+ build_files(self.files)
+
+
+def build_files(file_defs, prefix=pathlib.Path()):
+ """Build a set of files/directories, as described by the
+
+ file_defs dictionary. Each key/value pair in the dictionary is
+ interpreted as a filename/contents pair. If the contents value is a
+ dictionary, a directory is created, and the dictionary interpreted
+ as the files within it, recursively.
+
+ For example:
+
+ {"README.txt": "A README file",
+ "foo": {
+ "__init__.py": "",
+ "bar": {
+ "__init__.py": "",
+ },
+ "baz.py": "# Some code",
+ }
+ }
+ """
+ for name, contents in file_defs.items():
+ full_name = prefix / name
+ if isinstance(contents, dict):
+ full_name.mkdir()
+ build_files(contents, prefix=full_name)
+ else:
+ if isinstance(contents, bytes):
+ with full_name.open('wb') as f:
+ f.write(contents)
+ else:
+ with full_name.open('w') as f:
+ f.write(DALS(contents))
+
+
+class FileBuilder:
+ def unicode_filename(self):
+ return FS_NONASCII or \
+ self.skip("File system does not support non-ascii.")
+
+
+def DALS(str):
+ "Dedent and left-strip"
+ return textwrap.dedent(str).lstrip()
+
+
+class NullFinder:
+ def find_module(self, name):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/py39compat.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/py39compat.py
new file mode 100644
index 0000000000..a175d4c355
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/py39compat.py
@@ -0,0 +1,4 @@
+try:
+ from test.support.os_helpers import FS_NONASCII
+except ImportError:
+ from test.support import FS_NONASCII # noqa
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_api.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_api.py
new file mode 100644
index 0000000000..efa9799642
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_api.py
@@ -0,0 +1,196 @@
+import re
+import textwrap
+
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
+from . import fixtures
+from importlib_metadata import (
+ Distribution, PackageNotFoundError, distribution,
+ entry_points, files, metadata, requires, version,
+ )
+
+try:
+ from collections.abc import Iterator
+except ImportError:
+ from collections import Iterator # noqa: F401
+
+try:
+ from builtins import str as text
+except ImportError:
+ from __builtin__ import unicode as text
+
+
+class APITests(
+ fixtures.EggInfoPkg,
+ fixtures.DistInfoPkg,
+ fixtures.DistInfoPkgWithDot,
+ fixtures.EggInfoFile,
+ unittest.TestCase):
+
+ version_pattern = r'\d+\.\d+(\.\d)?'
+
+ def test_retrieves_version_of_self(self):
+ pkg_version = version('egginfo-pkg')
+ assert isinstance(pkg_version, text)
+ assert re.match(self.version_pattern, pkg_version)
+
+ def test_retrieves_version_of_distinfo_pkg(self):
+ pkg_version = version('distinfo-pkg')
+ assert isinstance(pkg_version, text)
+ assert re.match(self.version_pattern, pkg_version)
+
+ def test_for_name_does_not_exist(self):
+ with self.assertRaises(PackageNotFoundError):
+ distribution('does-not-exist')
+
+ def test_name_normalization(self):
+ names = 'pkg.dot', 'pkg_dot', 'pkg-dot', 'pkg..dot', 'Pkg.Dot'
+ for name in names:
+ with self.subTest(name):
+ assert distribution(name).metadata['Name'] == 'pkg.dot'
+
+ def test_for_top_level(self):
+ self.assertEqual(
+ distribution('egginfo-pkg').read_text('top_level.txt').strip(),
+ 'mod')
+
+ def test_read_text(self):
+ top_level = [
+ path for path in files('egginfo-pkg')
+ if path.name == 'top_level.txt'
+ ][0]
+ self.assertEqual(top_level.read_text(), 'mod\n')
+
+ def test_entry_points(self):
+ entries = dict(entry_points()['entries'])
+ ep = entries['main']
+ self.assertEqual(ep.value, 'mod:main')
+ self.assertEqual(ep.extras, [])
+
+ def test_metadata_for_this_package(self):
+ md = metadata('egginfo-pkg')
+ assert md['author'] == 'Steven Ma'
+ assert md['LICENSE'] == 'Unknown'
+ assert md['Name'] == 'egginfo-pkg'
+ classifiers = md.get_all('Classifier')
+ assert 'Topic :: Software Development :: Libraries' in classifiers
+
+ def test_importlib_metadata_version(self):
+ resolved = version('importlib-metadata')
+ assert re.match(self.version_pattern, resolved)
+
+ @staticmethod
+ def _test_files(files):
+ root = files[0].root
+ for file in files:
+ assert file.root == root
+ assert not file.hash or file.hash.value
+ assert not file.hash or file.hash.mode == 'sha256'
+ assert not file.size or file.size >= 0
+ assert file.locate().exists()
+ assert isinstance(file.read_binary(), bytes)
+ if file.name.endswith('.py'):
+ file.read_text()
+
+ def test_file_hash_repr(self):
+ try:
+ assertRegex = self.assertRegex
+ except AttributeError:
+ # Python 2
+ assertRegex = self.assertRegexpMatches
+
+ util = [
+ p for p in files('distinfo-pkg')
+ if p.name == 'mod.py'
+ ][0]
+ assertRegex(
+ repr(util.hash),
+ '<FileHash mode: sha256 value: .*>')
+
+ def test_files_dist_info(self):
+ self._test_files(files('distinfo-pkg'))
+
+ def test_files_egg_info(self):
+ self._test_files(files('egginfo-pkg'))
+
+ def test_version_egg_info_file(self):
+ self.assertEqual(version('egginfo-file'), '0.1')
+
+ def test_requires_egg_info_file(self):
+ requirements = requires('egginfo-file')
+ self.assertIsNone(requirements)
+
+ def test_requires_egg_info(self):
+ deps = requires('egginfo-pkg')
+ assert len(deps) == 2
+ assert any(
+ dep == 'wheel >= 1.0; python_version >= "2.7"'
+ for dep in deps
+ )
+
+ def test_requires_dist_info(self):
+ deps = requires('distinfo-pkg')
+ assert len(deps) == 2
+ assert all(deps)
+ assert 'wheel >= 1.0' in deps
+ assert "pytest; extra == 'test'" in deps
+
+ def test_more_complex_deps_requires_text(self):
+ requires = textwrap.dedent("""
+ dep1
+ dep2
+
+ [:python_version < "3"]
+ dep3
+
+ [extra1]
+ dep4
+
+ [extra2:python_version < "3"]
+ dep5
+ """)
+ deps = sorted(Distribution._deps_from_requires_text(requires))
+ expected = [
+ 'dep1',
+ 'dep2',
+ 'dep3; python_version < "3"',
+ 'dep4; extra == "extra1"',
+ 'dep5; (python_version < "3") and extra == "extra2"',
+ ]
+ # It's important that the environment marker expression be
+ # wrapped in parentheses to avoid the following 'and' binding more
+ # tightly than some other part of the environment expression.
+
+ assert deps == expected
+
+
+class LegacyDots(fixtures.DistInfoPkgWithDotLegacy, unittest.TestCase):
+ def test_name_normalization(self):
+ names = 'pkg.dot', 'pkg_dot', 'pkg-dot', 'pkg..dot', 'Pkg.Dot'
+ for name in names:
+ with self.subTest(name):
+ assert distribution(name).metadata['Name'] == 'pkg.dot'
+
+
+class OffSysPathTests(fixtures.DistInfoPkgOffPath, unittest.TestCase):
+ def test_find_distributions_specified_path(self):
+ dists = Distribution.discover(path=[str(self.site_dir)])
+ assert any(
+ dist.metadata['Name'] == 'distinfo-pkg'
+ for dist in dists
+ )
+
+ def test_distribution_at_pathlib(self):
+ """Demonstrate how to load metadata direct from a directory.
+ """
+ dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info'
+ dist = Distribution.at(dist_info_path)
+ assert dist.version == '1.0.0'
+
+ def test_distribution_at_str(self):
+ dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info'
+ dist = Distribution.at(str(dist_info_path))
+ assert dist.version == '1.0.0'
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_integration.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_integration.py
new file mode 100644
index 0000000000..377574c448
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_integration.py
@@ -0,0 +1,54 @@
+# coding: utf-8
+
+from __future__ import unicode_literals
+
+import unittest
+import packaging.requirements
+import packaging.version
+
+from . import fixtures
+from importlib_metadata import (
+ Distribution,
+ _compat,
+ version,
+ )
+
+
+class IntegrationTests(fixtures.DistInfoPkg, unittest.TestCase):
+
+ def test_package_spec_installed(self):
+ """
+ Illustrate the recommended procedure to determine if
+ a specified version of a package is installed.
+ """
+ def is_installed(package_spec):
+ req = packaging.requirements.Requirement(package_spec)
+ return version(req.name) in req.specifier
+
+ assert is_installed('distinfo-pkg==1.0')
+ assert is_installed('distinfo-pkg>=1.0,<2.0')
+ assert not is_installed('distinfo-pkg<1.0')
+
+
+class FinderTests(fixtures.Fixtures, unittest.TestCase):
+
+ def test_finder_without_module(self):
+ class ModuleFreeFinder(fixtures.NullFinder):
+ """
+ A finder without an __module__ attribute
+ """
+ def __getattribute__(self, name):
+ if name == '__module__':
+ raise AttributeError(name)
+ return super().__getattribute__(name)
+
+ self.fixtures.enter_context(
+ fixtures.install_finder(ModuleFreeFinder()))
+ _compat.disable_stdlib_finder()
+
+
+class LocalProjectTests(fixtures.LocalPackage, unittest.TestCase):
+ def test_find_local(self):
+ dist = Distribution._local()
+ assert dist.metadata['Name'] == 'local-pkg'
+ assert dist.version == '2.0.1'
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_main.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_main.py
new file mode 100644
index 0000000000..847750bc30
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_main.py
@@ -0,0 +1,285 @@
+# coding: utf-8
+from __future__ import unicode_literals
+
+import re
+import json
+import pickle
+import textwrap
+import unittest
+import importlib
+import importlib_metadata
+import pyfakefs.fake_filesystem_unittest as ffs
+
+from . import fixtures
+from importlib_metadata import (
+ Distribution, EntryPoint, MetadataPathFinder,
+ PackageNotFoundError, distributions,
+ entry_points, metadata, version,
+ )
+
+try:
+ from builtins import str as text
+except ImportError:
+ from __builtin__ import unicode as text
+
+
+class BasicTests(fixtures.DistInfoPkg, unittest.TestCase):
+ version_pattern = r'\d+\.\d+(\.\d)?'
+
+ def test_retrieves_version_of_self(self):
+ dist = Distribution.from_name('distinfo-pkg')
+ assert isinstance(dist.version, text)
+ assert re.match(self.version_pattern, dist.version)
+
+ def test_for_name_does_not_exist(self):
+ with self.assertRaises(PackageNotFoundError):
+ Distribution.from_name('does-not-exist')
+
+ def test_package_not_found_mentions_metadata(self):
+ """
+ When a package is not found, that could indicate that the
+ packgae is not installed or that it is installed without
+ metadata. Ensure the exception mentions metadata to help
+ guide users toward the cause. See #124.
+ """
+ with self.assertRaises(PackageNotFoundError) as ctx:
+ Distribution.from_name('does-not-exist')
+
+ assert "metadata" in str(ctx.exception)
+
+ def test_new_style_classes(self):
+ self.assertIsInstance(Distribution, type)
+ self.assertIsInstance(MetadataPathFinder, type)
+
+
+class ImportTests(fixtures.DistInfoPkg, unittest.TestCase):
+ def test_import_nonexistent_module(self):
+ # Ensure that the MetadataPathFinder does not crash an import of a
+ # non-existent module.
+ with self.assertRaises(ImportError):
+ importlib.import_module('does_not_exist')
+
+ def test_resolve(self):
+ entries = dict(entry_points()['entries'])
+ ep = entries['main']
+ self.assertEqual(ep.load().__name__, "main")
+
+ def test_entrypoint_with_colon_in_name(self):
+ entries = dict(entry_points()['entries'])
+ ep = entries['ns:sub']
+ self.assertEqual(ep.value, 'mod:main')
+
+ def test_resolve_without_attr(self):
+ ep = EntryPoint(
+ name='ep',
+ value='importlib_metadata',
+ group='grp',
+ )
+ assert ep.load() is importlib_metadata
+
+
+class NameNormalizationTests(
+ fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
+ @staticmethod
+ def pkg_with_dashes(site_dir):
+ """
+ Create minimal metadata for a package with dashes
+ in the name (and thus underscores in the filename).
+ """
+ metadata_dir = site_dir / 'my_pkg.dist-info'
+ metadata_dir.mkdir()
+ metadata = metadata_dir / 'METADATA'
+ with metadata.open('w') as strm:
+ strm.write('Version: 1.0\n')
+ return 'my-pkg'
+
+ def test_dashes_in_dist_name_found_as_underscores(self):
+ """
+ For a package with a dash in the name, the dist-info metadata
+ uses underscores in the name. Ensure the metadata loads.
+ """
+ pkg_name = self.pkg_with_dashes(self.site_dir)
+ assert version(pkg_name) == '1.0'
+
+ @staticmethod
+ def pkg_with_mixed_case(site_dir):
+ """
+ Create minimal metadata for a package with mixed case
+ in the name.
+ """
+ metadata_dir = site_dir / 'CherryPy.dist-info'
+ metadata_dir.mkdir()
+ metadata = metadata_dir / 'METADATA'
+ with metadata.open('w') as strm:
+ strm.write('Version: 1.0\n')
+ return 'CherryPy'
+
+ def test_dist_name_found_as_any_case(self):
+ """
+ Ensure the metadata loads when queried with any case.
+ """
+ pkg_name = self.pkg_with_mixed_case(self.site_dir)
+ assert version(pkg_name) == '1.0'
+ assert version(pkg_name.lower()) == '1.0'
+ assert version(pkg_name.upper()) == '1.0'
+
+
+class NonASCIITests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
+ @staticmethod
+ def pkg_with_non_ascii_description(site_dir):
+ """
+ Create minimal metadata for a package with non-ASCII in
+ the description.
+ """
+ metadata_dir = site_dir / 'portend.dist-info'
+ metadata_dir.mkdir()
+ metadata = metadata_dir / 'METADATA'
+ with metadata.open('w', encoding='utf-8') as fp:
+ fp.write('Description: pôrˈtend\n')
+ return 'portend'
+
+ @staticmethod
+ def pkg_with_non_ascii_description_egg_info(site_dir):
+ """
+ Create minimal metadata for an egg-info package with
+ non-ASCII in the description.
+ """
+ metadata_dir = site_dir / 'portend.dist-info'
+ metadata_dir.mkdir()
+ metadata = metadata_dir / 'METADATA'
+ with metadata.open('w', encoding='utf-8') as fp:
+ fp.write(textwrap.dedent("""
+ Name: portend
+
+ pôrˈtend
+ """).lstrip())
+ return 'portend'
+
+ def test_metadata_loads(self):
+ pkg_name = self.pkg_with_non_ascii_description(self.site_dir)
+ meta = metadata(pkg_name)
+ assert meta['Description'] == 'pôrˈtend'
+
+ def test_metadata_loads_egg_info(self):
+ pkg_name = self.pkg_with_non_ascii_description_egg_info(self.site_dir)
+ meta = metadata(pkg_name)
+ assert meta.get_payload() == 'pôrˈtend\n'
+
+
+class DiscoveryTests(fixtures.EggInfoPkg,
+ fixtures.DistInfoPkg,
+ unittest.TestCase):
+
+ def test_package_discovery(self):
+ dists = list(distributions())
+ assert all(
+ isinstance(dist, Distribution)
+ for dist in dists
+ )
+ assert any(
+ dist.metadata['Name'] == 'egginfo-pkg'
+ for dist in dists
+ )
+ assert any(
+ dist.metadata['Name'] == 'distinfo-pkg'
+ for dist in dists
+ )
+
+ def test_invalid_usage(self):
+ with self.assertRaises(ValueError):
+ list(distributions(context='something', name='else'))
+
+
+class DirectoryTest(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase):
+ def test_egg_info(self):
+ # make an `EGG-INFO` directory that's unrelated
+ self.site_dir.joinpath('EGG-INFO').mkdir()
+ # used to crash with `IsADirectoryError`
+ with self.assertRaises(PackageNotFoundError):
+ version('unknown-package')
+
+ def test_egg(self):
+ egg = self.site_dir.joinpath('foo-3.6.egg')
+ egg.mkdir()
+ with self.add_sys_path(egg):
+ with self.assertRaises(PackageNotFoundError):
+ version('foo')
+
+
+class MissingSysPath(fixtures.OnSysPath, unittest.TestCase):
+ site_dir = '/does-not-exist'
+
+ def test_discovery(self):
+ """
+ Discovering distributions should succeed even if
+ there is an invalid path on sys.path.
+ """
+ importlib_metadata.distributions()
+
+
+class InaccessibleSysPath(fixtures.OnSysPath, ffs.TestCase):
+ site_dir = '/access-denied'
+
+ def setUp(self):
+ super(InaccessibleSysPath, self).setUp()
+ self.setUpPyfakefs()
+ self.fs.create_dir(self.site_dir, perm_bits=000)
+
+ def test_discovery(self):
+ """
+ Discovering distributions should succeed even if
+ there is an invalid path on sys.path.
+ """
+ list(importlib_metadata.distributions())
+
+
+class TestEntryPoints(unittest.TestCase):
+ def __init__(self, *args):
+ super(TestEntryPoints, self).__init__(*args)
+ self.ep = importlib_metadata.EntryPoint('name', 'value', 'group')
+
+ def test_entry_point_pickleable(self):
+ revived = pickle.loads(pickle.dumps(self.ep))
+ assert revived == self.ep
+
+ def test_immutable(self):
+ """EntryPoints should be immutable"""
+ with self.assertRaises(AttributeError):
+ self.ep.name = 'badactor'
+
+ def test_repr(self):
+ assert 'EntryPoint' in repr(self.ep)
+ assert 'name=' in repr(self.ep)
+ assert "'name'" in repr(self.ep)
+
+ def test_hashable(self):
+ """EntryPoints should be hashable"""
+ hash(self.ep)
+
+ def test_json_dump(self):
+ """
+ json should not expect to be able to dump an EntryPoint
+ """
+ with self.assertRaises(Exception):
+ json.dumps(self.ep)
+
+ def test_module(self):
+ assert self.ep.module == 'value'
+
+ def test_attr(self):
+ assert self.ep.attr is None
+
+
+class FileSystem(
+ fixtures.OnSysPath, fixtures.SiteDir, fixtures.FileBuilder,
+ unittest.TestCase):
+ def test_unicode_dir_on_sys_path(self):
+ """
+ Ensure a Unicode subdirectory of a directory on sys.path
+ does not crash.
+ """
+ fixtures.build_files(
+ {self.unicode_filename(): {}},
+ prefix=self.site_dir,
+ )
+ list(distributions())
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_zip.py b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_zip.py
new file mode 100644
index 0000000000..5cebcd02f7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/tests/test_zip.py
@@ -0,0 +1,80 @@
+import sys
+import unittest
+
+from importlib_metadata import (
+ distribution, entry_points, files, PackageNotFoundError,
+ version, distributions,
+ )
+
+try:
+ from importlib import resources
+ getattr(resources, 'files')
+ getattr(resources, 'as_file')
+except (ImportError, AttributeError):
+ import importlib_resources as resources
+
+try:
+ from contextlib import ExitStack
+except ImportError:
+ from contextlib2 import ExitStack
+
+
+class TestZip(unittest.TestCase):
+ root = 'tests.data'
+
+ def _fixture_on_path(self, filename):
+ pkg_file = resources.files(self.root).joinpath(filename)
+ file = self.resources.enter_context(resources.as_file(pkg_file))
+ assert file.name.startswith('example-'), file.name
+ sys.path.insert(0, str(file))
+ self.resources.callback(sys.path.pop, 0)
+
+ def setUp(self):
+ # Find the path to the example-*.whl so we can add it to the front of
+ # sys.path, where we'll then try to find the metadata thereof.
+ self.resources = ExitStack()
+ self.addCleanup(self.resources.close)
+ self._fixture_on_path('example-21.12-py3-none-any.whl')
+
+ def test_zip_version(self):
+ self.assertEqual(version('example'), '21.12')
+
+ def test_zip_version_does_not_match(self):
+ with self.assertRaises(PackageNotFoundError):
+ version('definitely-not-installed')
+
+ def test_zip_entry_points(self):
+ scripts = dict(entry_points()['console_scripts'])
+ entry_point = scripts['example']
+ self.assertEqual(entry_point.value, 'example:main')
+ entry_point = scripts['Example']
+ self.assertEqual(entry_point.value, 'example:main')
+
+ def test_missing_metadata(self):
+ self.assertIsNone(distribution('example').read_text('does not exist'))
+
+ def test_case_insensitive(self):
+ self.assertEqual(version('Example'), '21.12')
+
+ def test_files(self):
+ for file in files('example'):
+ path = str(file.dist.locate_file(file))
+ assert '.whl/' in path, path
+
+ def test_one_distribution(self):
+ dists = list(distributions(path=sys.path[:1]))
+ assert len(dists) == 1
+
+
+class TestEgg(TestZip):
+ def setUp(self):
+ # Find the path to the example-*.egg so we can add it to the front of
+ # sys.path, where we'll then try to find the metadata thereof.
+ self.resources = ExitStack()
+ self.addCleanup(self.resources.close)
+ self._fixture_on_path('example-21.12-py3.6.egg')
+
+ def test_files(self):
+ for file in files('example'):
+ path = str(file.dist.locate_file(file))
+ assert '.egg/' in path, path
diff --git a/testing/web-platform/tests/tools/third_party/importlib_metadata/tox.ini b/testing/web-platform/tests/tools/third_party/importlib_metadata/tox.ini
new file mode 100644
index 0000000000..1f0e975783
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/importlib_metadata/tox.ini
@@ -0,0 +1,97 @@
+[tox]
+envlist = {py27,py35,py36,py37,py38}{,-cov,-diffcov},qa,docs,perf
+skip_missing_interpreters = True
+minversion = 3.2
+# Ensure that a late version of pip is used even on tox-venv.
+requires =
+ tox-pip-version>=0.0.6
+
+[testenv]
+pip_version = pip
+commands =
+ !cov,!diffcov: python -m unittest discover {posargs}
+ cov,diffcov: python -m coverage run {[coverage]rc} -m unittest discover {posargs}
+ cov,diffcov: python -m coverage combine {[coverage]rc}
+ cov: python -m coverage html {[coverage]rc}
+ cov: python -m coverage xml {[coverage]rc}
+ cov: python -m coverage report -m {[coverage]rc} --fail-under=100
+ diffcov: python -m coverage xml {[coverage]rc}
+ diffcov: diff-cover coverage.xml --html-report diffcov.html
+ diffcov: diff-cover coverage.xml --fail-under=100
+usedevelop = True
+passenv =
+ PYTHON*
+ LANG*
+ LC_*
+ PYV
+deps =
+ cov,diffcov: coverage>=4.5
+ diffcov: diff_cover
+ pyfakefs
+setenv =
+ cov: COVERAGE_PROCESS_START={[coverage]rcfile}
+ cov: COVERAGE_OPTIONS="-p"
+ cov: COVERAGE_FILE={toxinidir}/.coverage
+ py27: PYV=2
+ py35,py36,py37,py38: PYV=3
+ # workaround deprecation warnings in pip's vendored packages
+ PYTHONWARNINGS=ignore:Using or importing the ABCs:DeprecationWarning:pip._vendor
+extras =
+ testing
+
+
+[testenv:qa]
+basepython = python3.7
+commands =
+ python -m flake8 importlib_metadata
+ mypy importlib_metadata
+deps =
+ mypy
+ flake8
+ flufl.flake8
+extras =
+
+
+[testenv:docs]
+basepython = python3
+commands =
+ sphinx-build docs build/sphinx/html
+extras =
+ docs
+
+
+[testenv:perf]
+use_develop = False
+deps =
+ ipython
+commands =
+ python -m timeit -s 'import importlib_metadata' -- 'importlib_metadata.distribution("ipython")'
+
+
+[testenv:release]
+basepython = python3
+deps =
+ twine
+ wheel
+ setuptools
+ keyring
+ setuptools_scm
+passenv =
+ TWINE_PASSWORD
+setenv =
+ TWINE_USERNAME = {env:TWINE_USERNAME:__token__}
+commands =
+ python setup.py sdist bdist_wheel
+ python -m twine {posargs} upload dist/*
+
+
+[coverage]
+rcfile = {toxinidir}/coverage.ini
+rc = --rcfile="{[coverage]rcfile}"
+
+
+[flake8]
+hang-closing = True
+jobs = 1
+max-line-length = 79
+enable-extensions = U4
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/.gitignore b/testing/web-platform/tests/tools/third_party/iniconfig/.gitignore
new file mode 100644
index 0000000000..89e6234380
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/.gitignore
@@ -0,0 +1,8 @@
+*.egg-info
+*.pyc
+.cache/
+.eggs/
+build/
+dist/
+__pycache__
+.tox/
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/.landscape.yml b/testing/web-platform/tests/tools/third_party/iniconfig/.landscape.yml
new file mode 100644
index 0000000000..5212ddea41
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/.landscape.yml
@@ -0,0 +1,5 @@
+pep8:
+ full: true
+python-targets:
+ - 2
+ - 3
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/.travis.yml b/testing/web-platform/tests/tools/third_party/iniconfig/.travis.yml
new file mode 100644
index 0000000000..e3fee06c9f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/.travis.yml
@@ -0,0 +1,18 @@
+language: python
+python:
+- '2.7'
+- '3.4'
+- '3.5'
+- nightly
+- pypy
+install: pip install setuptools_scm tox
+script: tox -e py
+deploy:
+ provider: pypi
+ user: ronny
+ password:
+ secure: DsRVX99HA6+3JoXOVP/nPXeabJy2P73ws7Ager/e4rx3p3jS74bId09XsBU46bAT9ANmRWPR8y5DRi5Zlq0WQ2uXoR55wmsdu2KUegk6bDIS4Iop8DFxY8Kjou9s8RZbDTP27LfuYXKMO1rDW/xa6EhiotYRodekeZUz3P3MYjIi6rBV2Rz3vwmInpkKOti7AFwAsCGmCCK13irmPJEp5nwl3RgeKu2AGaolw9eypJXeNLUcNDVQ88ZUUXQCkwgq7a1BkK6NMeQLMrWAE1bD3amCbVXHCR9TaVx1ZH1dnha5Jcfj3gEFucTmInWWes5u9rypvsCkSxKtSqdiUA7BMJq7XykV7nGNplGLm2sq4+KSYlf3gZXg4XNXQkNOi4EBtRvathfFziD2SZgdtjiQX2neh0dMjf9czc/uCYkKYCFLeozdw2oQQ+BsxhQfsmU2ILGCFHyFikmDbBqZOWfQE5TN3itQqV3TFK8sOHQ8iy3MDShs+lBk9AUwbCA5YbRh8hJKhgXyEsDpisC417Pj22+TbutTj7v3Rmpe/st4hoL740grWc3PSVUBaypG0RsoafSDZWnYnTC+0aakd6QEb5S9wnMkP94kijYjjF6yUInuT05wdbQv5XcSXqAdGzBqB5jNNdfwgWVCOlwGfjnvzKllhF3PmWPW/nfmQpGOQh4=
+ on:
+ tags: true
+ distributions: sdist bdist_wheel
+ repo: RonnyPfannschmidt/iniconfig
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/CHANGELOG b/testing/web-platform/tests/tools/third_party/iniconfig/CHANGELOG
new file mode 100644
index 0000000000..679919fcd2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/CHANGELOG
@@ -0,0 +1,32 @@
+1.1.1
+=========
+
+* fix version determination (thanks @florimondmanca)
+
+1.1.0
+=====
+
+- typing stubs (thanks @bluetech)
+- ci fixes
+
+1.0.1
+======
+
+pytest 5+ support
+
+1.0
+====
+
+- re-sync with pylib codebase
+
+0.2
+==================
+
+- added ability to ask "name in iniconfig", i.e. to check
+ if a section is contained.
+
+- fix bug in "name=value" parsing where value was "x=3"
+
+- allow for ': ' to delimit name=value pairs, so that e.g. .pypirc files
+ like http://docs.python.org/distutils/packageindex.html
+ can be successfully parsed
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/LICENSE b/testing/web-platform/tests/tools/third_party/iniconfig/LICENSE
new file mode 100644
index 0000000000..31ecdfb1db
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/LICENSE
@@ -0,0 +1,19 @@
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/MANIFEST.in b/testing/web-platform/tests/tools/third_party/iniconfig/MANIFEST.in
new file mode 100644
index 0000000000..06be514ae5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/MANIFEST.in
@@ -0,0 +1,5 @@
+include LICENSE
+include example.ini
+include tox.ini
+include src/iniconfig/py.typed
+recursive-include src *.pyi
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/README.txt b/testing/web-platform/tests/tools/third_party/iniconfig/README.txt
new file mode 100644
index 0000000000..6bbad9a8d9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/README.txt
@@ -0,0 +1,51 @@
+iniconfig: brain-dead simple parsing of ini files
+=======================================================
+
+iniconfig is a small and simple INI-file parser module
+having a unique set of features:
+
+* tested against Python2.4 across to Python3.2, Jython, PyPy
+* maintains order of sections and entries
+* supports multi-line values with or without line-continuations
+* supports "#" comments everywhere
+* raises errors with proper line-numbers
+* no bells and whistles like automatic substitutions
+* iniconfig raises an Error if two sections have the same name.
+
+If you encounter issues or have feature wishes please report them to:
+
+ http://github.com/RonnyPfannschmidt/iniconfig/issues
+
+Basic Example
+===================================
+
+If you have an ini file like this::
+
+ # content of example.ini
+ [section1] # comment
+ name1=value1 # comment
+ name1b=value1,value2 # comment
+
+ [section2]
+ name2=
+ line1
+ line2
+
+then you can do::
+
+ >>> import iniconfig
+ >>> ini = iniconfig.IniConfig("example.ini")
+ >>> ini['section1']['name1'] # raises KeyError if not exists
+ 'value1'
+ >>> ini.get('section1', 'name1b', [], lambda x: x.split(","))
+ ['value1', 'value2']
+ >>> ini.get('section1', 'notexist', [], lambda x: x.split(","))
+ []
+ >>> [x.name for x in list(ini)]
+ ['section1', 'section2']
+ >>> list(list(ini)[0].items())
+ [('name1', 'value1'), ('name1b', 'value1,value2')]
+ >>> 'section1' in ini
+ True
+ >>> 'inexistendsection' in ini
+ False
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/example.ini b/testing/web-platform/tests/tools/third_party/iniconfig/example.ini
new file mode 100644
index 0000000000..65481d2074
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/example.ini
@@ -0,0 +1,10 @@
+
+# content of example.ini
+[section1] # comment
+name1=value1 # comment
+name1b=value1,value2 # comment
+
+[section2]
+name2=
+ line1
+ line2
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/pyproject.toml b/testing/web-platform/tests/tools/third_party/iniconfig/pyproject.toml
new file mode 100644
index 0000000000..b2725d8f65
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/pyproject.toml
@@ -0,0 +1,5 @@
+[build-system]
+requires = ["setuptools>=41.2.0", "wheel", "setuptools_scm>3"]
+
+
+[tool.setuptools_scm] \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/setup.cfg b/testing/web-platform/tests/tools/third_party/iniconfig/setup.cfg
new file mode 100644
index 0000000000..3c6e79cf31
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/setup.cfg
@@ -0,0 +1,2 @@
+[bdist_wheel]
+universal=1
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/setup.py b/testing/web-platform/tests/tools/third_party/iniconfig/setup.py
new file mode 100644
index 0000000000..f46f3214de
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/setup.py
@@ -0,0 +1,46 @@
+"""
+iniconfig: brain-dead simple config-ini parsing.
+
+compatible CPython 2.3 through to CPython 3.2, Jython, PyPy
+
+(c) 2010 Ronny Pfannschmidt, Holger Krekel
+"""
+
+from setuptools import setup
+
+
+def main():
+ with open('README.txt') as fp:
+ readme = fp.read()
+ setup(
+ name='iniconfig',
+ packages=['iniconfig'],
+ package_dir={'': 'src'},
+ description='iniconfig: brain-dead simple config-ini parsing',
+ long_description=readme,
+ use_scm_version=True,
+ url='http://github.com/RonnyPfannschmidt/iniconfig',
+ license='MIT License',
+ platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'],
+ author='Ronny Pfannschmidt, Holger Krekel',
+ author_email=(
+ 'opensource@ronnypfannschmidt.de, holger.krekel@gmail.com'),
+ classifiers=[
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: POSIX',
+ 'Operating System :: Microsoft :: Windows',
+ 'Operating System :: MacOS :: MacOS X',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Utilities',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 3',
+ ],
+ include_package_data=True,
+ zip_safe=False,
+ )
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/__init__.py b/testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/__init__.py
new file mode 100644
index 0000000000..6ad9eaf868
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/__init__.py
@@ -0,0 +1,165 @@
+""" brain-dead simple parser for ini-style files.
+(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed
+"""
+__all__ = ['IniConfig', 'ParseError']
+
+COMMENTCHARS = "#;"
+
+
+class ParseError(Exception):
+ def __init__(self, path, lineno, msg):
+ Exception.__init__(self, path, lineno, msg)
+ self.path = path
+ self.lineno = lineno
+ self.msg = msg
+
+ def __str__(self):
+ return "%s:%s: %s" % (self.path, self.lineno+1, self.msg)
+
+
+class SectionWrapper(object):
+ def __init__(self, config, name):
+ self.config = config
+ self.name = name
+
+ def lineof(self, name):
+ return self.config.lineof(self.name, name)
+
+ def get(self, key, default=None, convert=str):
+ return self.config.get(self.name, key,
+ convert=convert, default=default)
+
+ def __getitem__(self, key):
+ return self.config.sections[self.name][key]
+
+ def __iter__(self):
+ section = self.config.sections.get(self.name, [])
+
+ def lineof(key):
+ return self.config.lineof(self.name, key)
+ for name in sorted(section, key=lineof):
+ yield name
+
+ def items(self):
+ for name in self:
+ yield name, self[name]
+
+
+class IniConfig(object):
+ def __init__(self, path, data=None):
+ self.path = str(path) # convenience
+ if data is None:
+ f = open(self.path)
+ try:
+ tokens = self._parse(iter(f))
+ finally:
+ f.close()
+ else:
+ tokens = self._parse(data.splitlines(True))
+
+ self._sources = {}
+ self.sections = {}
+
+ for lineno, section, name, value in tokens:
+ if section is None:
+ self._raise(lineno, 'no section header defined')
+ self._sources[section, name] = lineno
+ if name is None:
+ if section in self.sections:
+ self._raise(lineno, 'duplicate section %r' % (section, ))
+ self.sections[section] = {}
+ else:
+ if name in self.sections[section]:
+ self._raise(lineno, 'duplicate name %r' % (name, ))
+ self.sections[section][name] = value
+
+ def _raise(self, lineno, msg):
+ raise ParseError(self.path, lineno, msg)
+
+ def _parse(self, line_iter):
+ result = []
+ section = None
+ for lineno, line in enumerate(line_iter):
+ name, data = self._parseline(line, lineno)
+ # new value
+ if name is not None and data is not None:
+ result.append((lineno, section, name, data))
+ # new section
+ elif name is not None and data is None:
+ if not name:
+ self._raise(lineno, 'empty section name')
+ section = name
+ result.append((lineno, section, None, None))
+ # continuation
+ elif name is None and data is not None:
+ if not result:
+ self._raise(lineno, 'unexpected value continuation')
+ last = result.pop()
+ last_name, last_data = last[-2:]
+ if last_name is None:
+ self._raise(lineno, 'unexpected value continuation')
+
+ if last_data:
+ data = '%s\n%s' % (last_data, data)
+ result.append(last[:-1] + (data,))
+ return result
+
+ def _parseline(self, line, lineno):
+ # blank lines
+ if iscommentline(line):
+ line = ""
+ else:
+ line = line.rstrip()
+ if not line:
+ return None, None
+ # section
+ if line[0] == '[':
+ realline = line
+ for c in COMMENTCHARS:
+ line = line.split(c)[0].rstrip()
+ if line[-1] == "]":
+ return line[1:-1], None
+ return None, realline.strip()
+ # value
+ elif not line[0].isspace():
+ try:
+ name, value = line.split('=', 1)
+ if ":" in name:
+ raise ValueError()
+ except ValueError:
+ try:
+ name, value = line.split(":", 1)
+ except ValueError:
+ self._raise(lineno, 'unexpected line: %r' % line)
+ return name.strip(), value.strip()
+ # continuation
+ else:
+ return None, line.strip()
+
+ def lineof(self, section, name=None):
+ lineno = self._sources.get((section, name))
+ if lineno is not None:
+ return lineno + 1
+
+ def get(self, section, name, default=None, convert=str):
+ try:
+ return convert(self.sections[section][name])
+ except KeyError:
+ return default
+
+ def __getitem__(self, name):
+ if name not in self.sections:
+ raise KeyError(name)
+ return SectionWrapper(self, name)
+
+ def __iter__(self):
+ for name in sorted(self.sections, key=self.lineof):
+ yield SectionWrapper(self, name)
+
+ def __contains__(self, arg):
+ return arg in self.sections
+
+
+def iscommentline(line):
+ c = line.lstrip()[:1]
+ return c in COMMENTCHARS
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/__init__.pyi b/testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/__init__.pyi
new file mode 100644
index 0000000000..b6284bec3f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/__init__.pyi
@@ -0,0 +1,31 @@
+from typing import Callable, Iterator, Mapping, Optional, Tuple, TypeVar, Union
+from typing_extensions import Final
+
+_D = TypeVar('_D')
+_T = TypeVar('_T')
+
+class ParseError(Exception):
+ # Private __init__.
+ path: Final[str]
+ lineno: Final[int]
+ msg: Final[str]
+
+class SectionWrapper:
+ # Private __init__.
+ config: Final[IniConfig]
+ name: Final[str]
+ def __getitem__(self, key: str) -> str: ...
+ def __iter__(self) -> Iterator[str]: ...
+ def get(self, key: str, default: _D = ..., convert: Callable[[str], _T] = ...) -> Union[_T, _D]: ...
+ def items(self) -> Iterator[Tuple[str, str]]: ...
+ def lineof(self, name: str) -> Optional[int]: ...
+
+class IniConfig:
+ path: Final[str]
+ sections: Final[Mapping[str, Mapping[str, str]]]
+ def __init__(self, path: str, data: Optional[str] = None): ...
+ def __contains__(self, arg: str) -> bool: ...
+ def __getitem__(self, name: str) -> SectionWrapper: ...
+ def __iter__(self) -> Iterator[SectionWrapper]: ...
+ def get(self, section: str, name: str, default: _D = ..., convert: Callable[[str], _T] = ...) -> Union[_T, _D]: ...
+ def lineof(self, section: str, name: Optional[str] = ...) -> Optional[int]: ...
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/py.typed b/testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/src/iniconfig/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/testing/conftest.py b/testing/web-platform/tests/tools/third_party/iniconfig/testing/conftest.py
new file mode 100644
index 0000000000..d265a29f86
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/testing/conftest.py
@@ -0,0 +1,2 @@
+
+option_doctestglob = "README.txt"
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/testing/test_iniconfig.py b/testing/web-platform/tests/tools/third_party/iniconfig/testing/test_iniconfig.py
new file mode 100644
index 0000000000..fe12421e5a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/testing/test_iniconfig.py
@@ -0,0 +1,314 @@
+import py
+import pytest
+from iniconfig import IniConfig, ParseError, __all__ as ALL
+from iniconfig import iscommentline
+from textwrap import dedent
+
+
+check_tokens = {
+ 'section': (
+ '[section]',
+ [(0, 'section', None, None)]
+ ),
+ 'value': (
+ 'value = 1',
+ [(0, None, 'value', '1')]
+ ),
+ 'value in section': (
+ '[section]\nvalue=1',
+ [(0, 'section', None, None), (1, 'section', 'value', '1')]
+ ),
+ 'value with continuation': (
+ 'names =\n Alice\n Bob',
+ [(0, None, 'names', 'Alice\nBob')]
+ ),
+ 'value with aligned continuation': (
+ 'names = Alice\n'
+ ' Bob',
+ [(0, None, 'names', 'Alice\nBob')]
+ ),
+ 'blank line': (
+ '[section]\n\nvalue=1',
+ [(0, 'section', None, None), (2, 'section', 'value', '1')]
+ ),
+ 'comment': (
+ '# comment',
+ []
+ ),
+ 'comment on value': (
+ 'value = 1',
+ [(0, None, 'value', '1')]
+ ),
+
+ 'comment on section': (
+ '[section] #comment',
+ [(0, 'section', None, None)]
+ ),
+ 'comment2': (
+ '; comment',
+ []
+ ),
+
+ 'comment2 on section': (
+ '[section] ;comment',
+ [(0, 'section', None, None)]
+ ),
+ 'pseudo section syntax in value': (
+ 'name = value []',
+ [(0, None, 'name', 'value []')]
+ ),
+ 'assignment in value': (
+ 'value = x = 3',
+ [(0, None, 'value', 'x = 3')]
+ ),
+ 'use of colon for name-values': (
+ 'name: y',
+ [(0, None, 'name', 'y')]
+ ),
+ 'use of colon without space': (
+ 'value:y=5',
+ [(0, None, 'value', 'y=5')]
+ ),
+ 'equality gets precedence': (
+ 'value=xyz:5',
+ [(0, None, 'value', 'xyz:5')]
+ ),
+
+}
+
+
+@pytest.fixture(params=sorted(check_tokens))
+def input_expected(request):
+ return check_tokens[request.param]
+
+
+@pytest.fixture
+def input(input_expected):
+ return input_expected[0]
+
+
+@pytest.fixture
+def expected(input_expected):
+ return input_expected[1]
+
+
+def parse(input):
+ # only for testing purposes - _parse() does not use state except path
+ ini = object.__new__(IniConfig)
+ ini.path = "sample"
+ return ini._parse(input.splitlines(True))
+
+
+def parse_a_error(input):
+ return py.test.raises(ParseError, parse, input)
+
+
+def test_tokenize(input, expected):
+ parsed = parse(input)
+ assert parsed == expected
+
+
+def test_parse_empty():
+ parsed = parse("")
+ assert not parsed
+ ini = IniConfig("sample", "")
+ assert not ini.sections
+
+
+def test_ParseError():
+ e = ParseError("filename", 0, "hello")
+ assert str(e) == "filename:1: hello"
+
+
+def test_continuation_needs_perceeding_token():
+ excinfo = parse_a_error(' Foo')
+ assert excinfo.value.lineno == 0
+
+
+def test_continuation_cant_be_after_section():
+ excinfo = parse_a_error('[section]\n Foo')
+ assert excinfo.value.lineno == 1
+
+
+def test_section_cant_be_empty():
+ excinfo = parse_a_error('[]')
+ assert excinfo.value.lineno == 0
+
+
+@py.test.mark.parametrize('line', [
+ '!!',
+ ])
+def test_error_on_weird_lines(line):
+ parse_a_error(line)
+
+
+def test_iniconfig_from_file(tmpdir):
+ path = tmpdir/'test.txt'
+ path.write('[metadata]\nname=1')
+
+ config = IniConfig(path=path)
+ assert list(config.sections) == ['metadata']
+ config = IniConfig(path, "[diff]")
+ assert list(config.sections) == ['diff']
+ with pytest.raises(TypeError):
+ IniConfig(data=path.read())
+
+
+def test_iniconfig_section_first(tmpdir):
+ with pytest.raises(ParseError) as excinfo:
+ IniConfig("x", data='name=1')
+ assert excinfo.value.msg == "no section header defined"
+
+
+def test_iniconig_section_duplicate_fails():
+ with pytest.raises(ParseError) as excinfo:
+ IniConfig("x", data='[section]\n[section]')
+ assert 'duplicate section' in str(excinfo.value)
+
+
+def test_iniconfig_duplicate_key_fails():
+ with pytest.raises(ParseError) as excinfo:
+ IniConfig("x", data='[section]\nname = Alice\nname = bob')
+
+ assert 'duplicate name' in str(excinfo.value)
+
+
+def test_iniconfig_lineof():
+ config = IniConfig("x.ini", data=(
+ '[section]\n'
+ 'value = 1\n'
+ '[section2]\n'
+ '# comment\n'
+ 'value =2'
+ ))
+
+ assert config.lineof('missing') is None
+ assert config.lineof('section') == 1
+ assert config.lineof('section2') == 3
+ assert config.lineof('section', 'value') == 2
+ assert config.lineof('section2', 'value') == 5
+
+ assert config['section'].lineof('value') == 2
+ assert config['section2'].lineof('value') == 5
+
+
+def test_iniconfig_get_convert():
+ config = IniConfig("x", data='[section]\nint = 1\nfloat = 1.1')
+ assert config.get('section', 'int') == '1'
+ assert config.get('section', 'int', convert=int) == 1
+
+
+def test_iniconfig_get_missing():
+ config = IniConfig("x", data='[section]\nint = 1\nfloat = 1.1')
+ assert config.get('section', 'missing', default=1) == 1
+ assert config.get('section', 'missing') is None
+
+
+def test_section_get():
+ config = IniConfig("x", data='[section]\nvalue=1')
+ section = config['section']
+ assert section.get('value', convert=int) == 1
+ assert section.get('value', 1) == "1"
+ assert section.get('missing', 2) == 2
+
+
+def test_missing_section():
+ config = IniConfig("x", data='[section]\nvalue=1')
+ with pytest.raises(KeyError):
+ config["other"]
+
+
+def test_section_getitem():
+ config = IniConfig("x", data='[section]\nvalue=1')
+ assert config['section']['value'] == '1'
+ assert config['section']['value'] == '1'
+
+
+def test_section_iter():
+ config = IniConfig("x", data='[section]\nvalue=1')
+ names = list(config['section'])
+ assert names == ['value']
+ items = list(config['section'].items())
+ assert items == [('value', '1')]
+
+
+def test_config_iter():
+ config = IniConfig("x.ini", data=dedent('''
+ [section1]
+ value=1
+ [section2]
+ value=2
+ '''))
+ l = list(config)
+ assert len(l) == 2
+ assert l[0].name == 'section1'
+ assert l[0]['value'] == '1'
+ assert l[1].name == 'section2'
+ assert l[1]['value'] == '2'
+
+
+def test_config_contains():
+ config = IniConfig("x.ini", data=dedent('''
+ [section1]
+ value=1
+ [section2]
+ value=2
+ '''))
+ assert 'xyz' not in config
+ assert 'section1' in config
+ assert 'section2' in config
+
+
+def test_iter_file_order():
+ config = IniConfig("x.ini", data="""
+[section2] #cpython dict ordered before section
+value = 1
+value2 = 2 # dict ordered before value
+[section]
+a = 1
+b = 2
+""")
+ l = list(config)
+ secnames = [x.name for x in l]
+ assert secnames == ['section2', 'section']
+ assert list(config['section2']) == ['value', 'value2']
+ assert list(config['section']) == ['a', 'b']
+
+
+def test_example_pypirc():
+ config = IniConfig("pypirc", data=dedent('''
+ [distutils]
+ index-servers =
+ pypi
+ other
+
+ [pypi]
+ repository: <repository-url>
+ username: <username>
+ password: <password>
+
+ [other]
+ repository: http://example.com/pypi
+ username: <username>
+ password: <password>
+ '''))
+ distutils, pypi, other = list(config)
+ assert distutils["index-servers"] == "pypi\nother"
+ assert pypi['repository'] == '<repository-url>'
+ assert pypi['username'] == '<username>'
+ assert pypi['password'] == '<password>'
+ assert ['repository', 'username', 'password'] == list(other)
+
+
+def test_api_import():
+ assert ALL == ['IniConfig', 'ParseError']
+
+
+@pytest.mark.parametrize("line", [
+ "#qwe",
+ " #qwe",
+ ";qwe",
+ " ;qwe",
+])
+def test_iscommentline_true(line):
+ assert iscommentline(line)
diff --git a/testing/web-platform/tests/tools/third_party/iniconfig/tox.ini b/testing/web-platform/tests/tools/third_party/iniconfig/tox.ini
new file mode 100644
index 0000000000..298838bee0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/iniconfig/tox.ini
@@ -0,0 +1,14 @@
+[tox]
+envlist=py27,py26,py33,py34,py35
+
+
+[testenv]
+commands=
+ pytest {posargs}
+deps=
+ pytest
+
+
+[pytest]
+testpaths=
+ testing
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/.gitignore b/testing/web-platform/tests/tools/third_party/more-itertools/.gitignore
new file mode 100644
index 0000000000..229891fb43
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/.gitignore
@@ -0,0 +1,34 @@
+*.py[co]
+
+# Packages
+*.egg
+*.eggs
+*.egg-info
+dist
+build
+eggs
+parts
+bin
+var
+sdist
+develop-eggs
+.installed.cfg
+
+# Installer logs
+pip-log.txt
+
+# Unit test / coverage reports
+.coverage
+.tox
+.noseids
+
+# Docs by Sphinx
+_build
+
+# Environment
+.env
+
+# IDE files
+.idea
+.vscode
+.DS_Store
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/.travis.yml b/testing/web-platform/tests/tools/third_party/more-itertools/.travis.yml
new file mode 100644
index 0000000000..008fb0c67e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/.travis.yml
@@ -0,0 +1,26 @@
+sudo: false
+
+language: "python"
+
+python:
+ - "2.7"
+ - "3.4"
+ - "3.5"
+ - "3.6"
+ - "3.7-dev"
+ - "pypy-5.4.1"
+ - "pypy3"
+
+install:
+ - "pip install ."
+ - "pip install -U coveralls flake8"
+
+script:
+ - "coverage run --include='more_itertools/*.py' --omit='more_itertools/tests/*' setup.py test"
+ - "flake8 ."
+
+notifications:
+ email: false
+
+after_success:
+ - "coveralls"
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/LICENSE b/testing/web-platform/tests/tools/third_party/more-itertools/LICENSE
new file mode 100644
index 0000000000..0a523bece3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2012 Erik Rose
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/MANIFEST.in b/testing/web-platform/tests/tools/third_party/more-itertools/MANIFEST.in
new file mode 100644
index 0000000000..ec800e3e02
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/MANIFEST.in
@@ -0,0 +1,8 @@
+include README.rst
+include LICENSE
+include docs/*.rst
+include docs/Makefile
+include docs/make.bat
+include docs/conf.py
+include fabfile.py
+include tox.ini
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/README.rst b/testing/web-platform/tests/tools/third_party/more-itertools/README.rst
new file mode 100644
index 0000000000..252b394737
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/README.rst
@@ -0,0 +1,59 @@
+==============
+More Itertools
+==============
+
+.. image:: https://coveralls.io/repos/github/erikrose/more-itertools/badge.svg?branch=master
+ :target: https://coveralls.io/github/erikrose/more-itertools?branch=master
+
+Python's ``itertools`` library is a gem - you can compose elegant solutions
+for a variety of problems with the functions it provides. In ``more-itertools``
+we collect additional building blocks, recipes, and routines for working with
+Python iterables.
+
+Getting started
+===============
+
+To get started, install the library with `pip <https://pip.pypa.io/en/stable/>`_:
+
+.. code-block:: shell
+
+ pip install more-itertools
+
+The recipes from the `itertools docs <https://docs.python.org/3/library/itertools.html#itertools-recipes>`_
+are included in the top-level package:
+
+.. code-block:: python
+
+ >>> from more_itertools import flatten
+ >>> iterable = [(0, 1), (2, 3)]
+ >>> list(flatten(iterable))
+ [0, 1, 2, 3]
+
+Several new recipes are available as well:
+
+.. code-block:: python
+
+ >>> from more_itertools import chunked
+ >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8]
+ >>> list(chunked(iterable, 3))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ >>> from more_itertools import spy
+ >>> iterable = (x * x for x in range(1, 6))
+ >>> head, iterable = spy(iterable, n=3)
+ >>> list(head)
+ [1, 4, 9]
+ >>> list(iterable)
+ [1, 4, 9, 16, 25]
+
+
+
+For the full listing of functions, see the `API documentation <https://more-itertools.readthedocs.io/en/latest/api.html>`_.
+
+Development
+===========
+
+``more-itertools`` is maintained by `@erikrose <https://github.com/erikrose>`_
+and `@bbayles <https://github.com/bbayles>`_, with help from `many others <https://github.com/erikrose/more-itertools/graphs/contributors>`_.
+If you have a problem or suggestion, please file a bug or pull request in this
+repository. Thanks for contributing!
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/docs/Makefile b/testing/web-platform/tests/tools/third_party/more-itertools/docs/Makefile
new file mode 100644
index 0000000000..47888da7b7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/docs/Makefile
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/more-itertools.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/more-itertools.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/more-itertools"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/more-itertools"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/docs/api.rst b/testing/web-platform/tests/tools/third_party/more-itertools/docs/api.rst
new file mode 100644
index 0000000000..63e5d7f450
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/docs/api.rst
@@ -0,0 +1,234 @@
+=============
+API Reference
+=============
+
+.. automodule:: more_itertools
+
+Grouping
+========
+
+These tools yield groups of items from a source iterable.
+
+----
+
+**New itertools**
+
+.. autofunction:: chunked
+.. autofunction:: sliced
+.. autofunction:: distribute
+.. autofunction:: divide
+.. autofunction:: split_at
+.. autofunction:: split_before
+.. autofunction:: split_after
+.. autofunction:: bucket
+
+----
+
+**Itertools recipes**
+
+.. autofunction:: grouper
+.. autofunction:: partition
+
+
+Lookahead and lookback
+======================
+
+These tools peek at an iterable's values without advancing it.
+
+----
+
+**New itertools**
+
+
+.. autofunction:: spy
+.. autoclass:: peekable
+.. autoclass:: seekable
+
+
+Windowing
+=========
+
+These tools yield windows of items from an iterable.
+
+----
+
+**New itertools**
+
+.. autofunction:: windowed
+.. autofunction:: stagger
+
+----
+
+**Itertools recipes**
+
+.. autofunction:: pairwise
+
+
+Augmenting
+==========
+
+These tools yield items from an iterable, plus additional data.
+
+----
+
+**New itertools**
+
+.. autofunction:: count_cycle
+.. autofunction:: intersperse
+.. autofunction:: padded
+.. autofunction:: adjacent
+.. autofunction:: groupby_transform
+
+----
+
+**Itertools recipes**
+
+.. autofunction:: padnone
+.. autofunction:: ncycles
+
+
+Combining
+=========
+
+These tools combine multiple iterables.
+
+----
+
+**New itertools**
+
+.. autofunction:: collapse
+.. autofunction:: sort_together
+.. autofunction:: interleave
+.. autofunction:: interleave_longest
+.. autofunction:: collate(*iterables, key=lambda a: a, reverse=False)
+.. autofunction:: zip_offset(*iterables, offsets, longest=False, fillvalue=None)
+
+----
+
+**Itertools recipes**
+
+.. autofunction:: dotproduct
+.. autofunction:: flatten
+.. autofunction:: roundrobin
+.. autofunction:: prepend
+
+
+Summarizing
+===========
+
+These tools return summarized or aggregated data from an iterable.
+
+----
+
+**New itertools**
+
+.. autofunction:: ilen
+.. autofunction:: first(iterable[, default])
+.. autofunction:: one
+.. autofunction:: unique_to_each
+.. autofunction:: locate(iterable, pred=bool)
+.. autofunction:: consecutive_groups(iterable, ordering=lambda x: x)
+.. autofunction:: exactly_n(iterable, n, predicate=bool)
+.. autoclass:: run_length
+.. autofunction:: map_reduce
+
+----
+
+**Itertools recipes**
+
+.. autofunction:: all_equal
+.. autofunction:: first_true
+.. autofunction:: nth
+.. autofunction:: quantify(iterable, pred=bool)
+
+
+Selecting
+=========
+
+These tools yield certain items from an iterable.
+
+----
+
+**New itertools**
+
+.. autofunction:: islice_extended(start, stop, step)
+.. autofunction:: strip
+.. autofunction:: lstrip
+.. autofunction:: rstrip
+
+----
+
+**Itertools recipes**
+
+.. autofunction:: take
+.. autofunction:: tail
+.. autofunction:: unique_everseen
+.. autofunction:: unique_justseen
+
+
+Combinatorics
+=============
+
+These tools yield combinatorial arrangements of items from iterables.
+
+----
+
+**New itertools**
+
+.. autofunction:: distinct_permutations
+.. autofunction:: circular_shifts
+
+----
+
+**Itertools recipes**
+
+.. autofunction:: powerset
+.. autofunction:: random_product
+.. autofunction:: random_permutation
+.. autofunction:: random_combination
+.. autofunction:: random_combination_with_replacement
+.. autofunction:: nth_combination
+
+
+Wrapping
+========
+
+These tools provide wrappers to smooth working with objects that produce or
+consume iterables.
+
+----
+
+**New itertools**
+
+.. autofunction:: always_iterable
+.. autofunction:: consumer
+.. autofunction:: with_iter
+
+----
+
+**Itertools recipes**
+
+.. autofunction:: iter_except
+
+
+Others
+======
+
+**New itertools**
+
+.. autofunction:: numeric_range(start, stop, step)
+.. autofunction:: always_reversible
+.. autofunction:: side_effect
+.. autofunction:: iterate
+.. autofunction:: difference(iterable, func=operator.sub)
+.. autofunction:: make_decorator
+.. autoclass:: SequenceView
+
+----
+
+**Itertools recipes**
+
+.. autofunction:: consume
+.. autofunction:: accumulate(iterable, func=operator.add)
+.. autofunction:: tabulate
+.. autofunction:: repeatfunc
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/docs/conf.py b/testing/web-platform/tests/tools/third_party/more-itertools/docs/conf.py
new file mode 100644
index 0000000000..e38c71aeaa
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/docs/conf.py
@@ -0,0 +1,244 @@
+# -*- coding: utf-8 -*-
+#
+# more-itertools documentation build configuration file, created by
+# sphinx-quickstart on Mon Jun 25 20:42:39 2012.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+import sphinx_rtd_theme
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('..'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'more-itertools'
+copyright = u'2012, Erik Rose'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '4.2.0'
+# The full version, including alpha/beta/rc tags.
+release = version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'sphinx_rtd_theme'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'more-itertoolsdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'more-itertools.tex', u'more-itertools Documentation',
+ u'Erik Rose', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'more-itertools', u'more-itertools Documentation',
+ [u'Erik Rose'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'more-itertools', u'more-itertools Documentation',
+ u'Erik Rose', 'more-itertools', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/docs/index.rst b/testing/web-platform/tests/tools/third_party/more-itertools/docs/index.rst
new file mode 100644
index 0000000000..091461ff7d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/docs/index.rst
@@ -0,0 +1,16 @@
+.. include:: ../README.rst
+
+Contents
+========
+
+.. toctree::
+ :maxdepth: 2
+
+ api
+
+.. toctree::
+ :maxdepth: 1
+
+ license
+ testing
+ versions
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/docs/license.rst b/testing/web-platform/tests/tools/third_party/more-itertools/docs/license.rst
new file mode 100644
index 0000000000..123c0f54dc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/docs/license.rst
@@ -0,0 +1,16 @@
+=======
+License
+=======
+
+more-itertools is under the MIT License. See the LICENSE file.
+
+Conditions for Contributors
+===========================
+
+By contributing to this software project, you are agreeing to the following
+terms and conditions for your contributions: First, you agree your
+contributions are submitted under the MIT license. Second, you represent you
+are authorized to make the contributions and grant the license. If your
+employer has rights to intellectual property that includes your contributions,
+you represent that you have received permission to make contributions and grant
+the required license on behalf of that employer.
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/docs/make.bat b/testing/web-platform/tests/tools/third_party/more-itertools/docs/make.bat
new file mode 100644
index 0000000000..8023c0aa67
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/docs/make.bat
@@ -0,0 +1,190 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\more-itertools.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\more-itertools.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+:end
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/docs/testing.rst b/testing/web-platform/tests/tools/third_party/more-itertools/docs/testing.rst
new file mode 100644
index 0000000000..bdd4219951
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/docs/testing.rst
@@ -0,0 +1,19 @@
+=======
+Testing
+=======
+
+To run install dependencies and run tests, use this command::
+
+ python setup.py test
+
+Multiple Python Versions
+========================
+
+To run the tests on all the versions of Python more-itertools supports, install
+tox::
+
+ pip install tox
+
+Then, run the tests::
+
+ tox
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/docs/versions.rst b/testing/web-platform/tests/tools/third_party/more-itertools/docs/versions.rst
new file mode 100644
index 0000000000..e50ac4393d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/docs/versions.rst
@@ -0,0 +1,237 @@
+===============
+Version History
+===============
+
+.. automodule:: more_itertools
+
+4.2.0
+-----
+
+* New itertools:
+ * :func:`map_reduce` (thanks to pylang)
+ * :func:`prepend` (from the `Python 3.7 docs <https://docs.python.org/3.7/library/itertools.html#itertools-recipes>`_)
+
+* Improvements to existing itertools:
+ * :func:`bucket` now complies with PEP 479 (thanks to irmen)
+
+* Other changes:
+ * Python 3.7 is now supported (thanks to irmen)
+ * Python 3.3 is no longer supported
+ * The test suite no longer requires third-party modules to run
+ * The API docs now include links to source code
+
+4.1.0
+-----
+
+* New itertools:
+ * :func:`split_at` (thanks to michael-celani)
+ * :func:`circular_shifts` (thanks to hiqua)
+ * :func:`make_decorator` - see the blog post `Yo, I heard you like decorators <https://sites.google.com/site/bbayles/index/decorator_factory>`_
+ for a tour (thanks to pylang)
+ * :func:`always_reversible` (thanks to michael-celani)
+ * :func:`nth_combination` (from the `Python 3.7 docs <https://docs.python.org/3.7/library/itertools.html#itertools-recipes>`_)
+
+* Improvements to existing itertools:
+ * :func:`seekable` now has an ``elements`` method to return cached items.
+ * The performance tradeoffs between :func:`roundrobin` and
+ :func:`interleave_longest` are now documented (thanks michael-celani,
+ pylang, and MSeifert04)
+
+4.0.1
+-----
+
+* No code changes - this release fixes how the docs display on PyPI.
+
+4.0.0
+-----
+
+* New itertools:
+ * :func:`consecutive_groups` (Based on the example in the `Python 2.4 docs <https://docs.python.org/release/2.4.4/lib/itertools-example.html>`_)
+ * :func:`seekable` (If you're looking for how to "reset" an iterator,
+ you're in luck!)
+ * :func:`exactly_n` (thanks to michael-celani)
+ * :func:`run_length.encode` and :func:`run_length.decode`
+ * :func:`difference`
+
+* Improvements to existing itertools:
+ * The number of items between filler elements in :func:`intersperse` can
+ now be specified (thanks to pylang)
+ * :func:`distinct_permutations` and :func:`peekable` got some minor
+ adjustments (thanks to MSeifert04)
+ * :func:`always_iterable` now returns an iterator object. It also now
+ allows different types to be considered iterable (thanks to jaraco)
+ * :func:`bucket` can now limit the keys it stores in memory
+ * :func:`one` now allows for custom exceptions (thanks to kalekundert)
+
+* Other changes:
+ * A few typos were fixed (thanks to EdwardBetts)
+ * All tests can now be run with ``python setup.py test``
+
+The major version update is due to the change in the return value of :func:`always_iterable`.
+It now always returns iterator objects:
+
+.. code-block:: python
+
+ >>> from more_itertools import always_iterable
+ # Non-iterable objects are wrapped with iter(tuple(obj))
+ >>> always_iterable(12345)
+ <tuple_iterator object at 0x7fb24c9488d0>
+ >>> list(always_iterable(12345))
+ [12345]
+ # Iterable objects are wrapped with iter()
+ >>> always_iterable([1, 2, 3, 4, 5])
+ <list_iterator object at 0x7fb24c948c50>
+
+3.2.0
+-----
+
+* New itertools:
+ * :func:`lstrip`, :func:`rstrip`, and :func:`strip`
+ (thanks to MSeifert04 and pylang)
+ * :func:`islice_extended`
+* Improvements to existing itertools:
+ * Some bugs with slicing :func:`peekable`-wrapped iterables were fixed
+
+3.1.0
+-----
+
+* New itertools:
+ * :func:`numeric_range` (Thanks to BebeSparkelSparkel and MSeifert04)
+ * :func:`count_cycle` (Thanks to BebeSparkelSparkel)
+ * :func:`locate` (Thanks to pylang and MSeifert04)
+* Improvements to existing itertools:
+ * A few itertools are now slightly faster due to some function
+ optimizations. (Thanks to MSeifert04)
+* The docs have been substantially revised with installation notes,
+ categories for library functions, links, and more. (Thanks to pylang)
+
+
+3.0.0
+-----
+
+* Removed itertools:
+ * ``context`` has been removed due to a design flaw - see below for
+ replacement options. (thanks to NeilGirdhar)
+* Improvements to existing itertools:
+ * ``side_effect`` now supports ``before`` and ``after`` keyword
+ arguments. (Thanks to yardsale8)
+* PyPy and PyPy3 are now supported.
+
+The major version change is due to the removal of the ``context`` function.
+Replace it with standard ``with`` statement context management:
+
+.. code-block:: python
+
+ # Don't use context() anymore
+ file_obj = StringIO()
+ consume(print(x, file=f) for f in context(file_obj) for x in u'123')
+
+ # Use a with statement instead
+ file_obj = StringIO()
+ with file_obj as f:
+ consume(print(x, file=f) for x in u'123')
+
+2.6.0
+-----
+
+* New itertools:
+ * ``adjacent`` and ``groupby_transform`` (Thanks to diazona)
+ * ``always_iterable`` (Thanks to jaraco)
+ * (Removed in 3.0.0) ``context`` (Thanks to yardsale8)
+ * ``divide`` (Thanks to mozbhearsum)
+* Improvements to existing itertools:
+ * ``ilen`` is now slightly faster. (Thanks to wbolster)
+ * ``peekable`` can now prepend items to an iterable. (Thanks to diazona)
+
+2.5.0
+-----
+
+* New itertools:
+ * ``distribute`` (Thanks to mozbhearsum and coady)
+ * ``sort_together`` (Thanks to clintval)
+ * ``stagger`` and ``zip_offset`` (Thanks to joshbode)
+ * ``padded``
+* Improvements to existing itertools:
+ * ``peekable`` now handles negative indexes and slices with negative
+ components properly.
+ * ``intersperse`` is now slightly faster. (Thanks to pylang)
+ * ``windowed`` now accepts a ``step`` keyword argument.
+ (Thanks to pylang)
+* Python 3.6 is now supported.
+
+2.4.1
+-----
+
+* Move docs 100% to readthedocs.io.
+
+2.4
+-----
+
+* New itertools:
+ * ``accumulate``, ``all_equal``, ``first_true``, ``partition``, and
+ ``tail`` from the itertools documentation.
+ * ``bucket`` (Thanks to Rosuav and cvrebert)
+ * ``collapse`` (Thanks to abarnet)
+ * ``interleave`` and ``interleave_longest`` (Thanks to abarnet)
+ * ``side_effect`` (Thanks to nvie)
+ * ``sliced`` (Thanks to j4mie and coady)
+ * ``split_before`` and ``split_after`` (Thanks to astronouth7303)
+ * ``spy`` (Thanks to themiurgo and mathieulongtin)
+* Improvements to existing itertools:
+ * ``chunked`` is now simpler and more friendly to garbage collection.
+ (Contributed by coady, with thanks to piskvorky)
+ * ``collate`` now delegates to ``heapq.merge`` when possible.
+ (Thanks to kmike and julianpistorius)
+ * ``peekable``-wrapped iterables are now indexable and sliceable.
+ Iterating through ``peekable``-wrapped iterables is also faster.
+ * ``one`` and ``unique_to_each`` have been simplified.
+ (Thanks to coady)
+
+
+2.3
+-----
+
+* Added ``one`` from ``jaraco.util.itertools``. (Thanks, jaraco!)
+* Added ``distinct_permutations`` and ``unique_to_each``. (Contributed by
+ bbayles)
+* Added ``windowed``. (Contributed by bbayles, with thanks to buchanae,
+ jaraco, and abarnert)
+* Simplified the implementation of ``chunked``. (Thanks, nvie!)
+* Python 3.5 is now supported. Python 2.6 is no longer supported.
+* Python 3 is now supported directly; there is no 2to3 step.
+
+2.2
+-----
+
+* Added ``iterate`` and ``with_iter``. (Thanks, abarnert!)
+
+2.1
+-----
+
+* Added (tested!) implementations of the recipes from the itertools
+ documentation. (Thanks, Chris Lonnen!)
+* Added ``ilen``. (Thanks for the inspiration, Matt Basta!)
+
+2.0
+-----
+
+* ``chunked`` now returns lists rather than tuples. After all, they're
+ homogeneous. This slightly backward-incompatible change is the reason for
+ the major version bump.
+* Added ``@consumer``.
+* Improved test machinery.
+
+1.1
+-----
+
+* Added ``first`` function.
+* Added Python 3 support.
+* Added a default arg to ``peekable.peek()``.
+* Noted how to easily test whether a peekable iterator is exhausted.
+* Rewrote documentation.
+
+1.0
+-----
+
+* Initial release, with ``collate``, ``peekable``, and ``chunked``. Could
+ really use better docs.
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/__init__.py b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/__init__.py
new file mode 100644
index 0000000000..bba462c3db
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/__init__.py
@@ -0,0 +1,2 @@
+from more_itertools.more import * # noqa
+from more_itertools.recipes import * # noqa
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/more.py b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/more.py
new file mode 100644
index 0000000000..d517250242
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/more.py
@@ -0,0 +1,2068 @@
+from __future__ import print_function
+
+from collections import Counter, defaultdict, deque
+from functools import partial, wraps
+from heapq import merge
+from itertools import (
+ chain,
+ compress,
+ count,
+ cycle,
+ dropwhile,
+ groupby,
+ islice,
+ repeat,
+ takewhile,
+ tee
+)
+from operator import itemgetter, lt, gt, sub
+from sys import maxsize, version_info
+try:
+ from collections.abc import Sequence
+except ImportError:
+ from collections import Sequence
+
+from six import binary_type, string_types, text_type
+from six.moves import filter, map, range, zip, zip_longest
+
+from .recipes import consume, flatten, take
+
+__all__ = [
+ 'adjacent',
+ 'always_iterable',
+ 'always_reversible',
+ 'bucket',
+ 'chunked',
+ 'circular_shifts',
+ 'collapse',
+ 'collate',
+ 'consecutive_groups',
+ 'consumer',
+ 'count_cycle',
+ 'difference',
+ 'distinct_permutations',
+ 'distribute',
+ 'divide',
+ 'exactly_n',
+ 'first',
+ 'groupby_transform',
+ 'ilen',
+ 'interleave_longest',
+ 'interleave',
+ 'intersperse',
+ 'islice_extended',
+ 'iterate',
+ 'locate',
+ 'lstrip',
+ 'make_decorator',
+ 'map_reduce',
+ 'numeric_range',
+ 'one',
+ 'padded',
+ 'peekable',
+ 'rstrip',
+ 'run_length',
+ 'seekable',
+ 'SequenceView',
+ 'side_effect',
+ 'sliced',
+ 'sort_together',
+ 'split_at',
+ 'split_after',
+ 'split_before',
+ 'spy',
+ 'stagger',
+ 'strip',
+ 'unique_to_each',
+ 'windowed',
+ 'with_iter',
+ 'zip_offset',
+]
+
+_marker = object()
+
+
+def chunked(iterable, n):
+ """Break *iterable* into lists of length *n*:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6], 3))
+ [[1, 2, 3], [4, 5, 6]]
+
+ If the length of *iterable* is not evenly divisible by *n*, the last
+ returned list will be shorter:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3))
+ [[1, 2, 3], [4, 5, 6], [7, 8]]
+
+ To use a fill-in value instead, see the :func:`grouper` recipe.
+
+ :func:`chunked` is useful for splitting up a computation on a large number
+ of keys into batches, to be pickled and sent off to worker processes. One
+ example is operations on rows in MySQL, which does not implement
+ server-side cursors properly and would otherwise load the entire dataset
+ into RAM on the client.
+
+ """
+ return iter(partial(take, n, iter(iterable)), [])
+
+
+def first(iterable, default=_marker):
+ """Return the first item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> first([0, 1, 2, 3])
+ 0
+ >>> first([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+
+ :func:`first` is useful when you have a generator of expensive-to-retrieve
+ values and want any arbitrary one. It is marginally shorter than
+ ``next(iter(iterable), default)``.
+
+ """
+ try:
+ return next(iter(iterable))
+ except StopIteration:
+ # I'm on the edge about raising ValueError instead of StopIteration. At
+ # the moment, ValueError wins, because the caller could conceivably
+ # want to do something different with flow control when I raise the
+ # exception, and it's weird to explicitly catch StopIteration.
+ if default is _marker:
+ raise ValueError('first() was called on an empty iterable, and no '
+ 'default value was provided.')
+ return default
+
+
+class peekable(object):
+ """Wrap an iterator to allow lookahead and prepending elements.
+
+ Call :meth:`peek` on the result to get the value that will be returned
+ by :func:`next`. This won't advance the iterator:
+
+ >>> p = peekable(['a', 'b'])
+ >>> p.peek()
+ 'a'
+ >>> next(p)
+ 'a'
+
+ Pass :meth:`peek` a default value to return that instead of raising
+ ``StopIteration`` when the iterator is exhausted.
+
+ >>> p = peekable([])
+ >>> p.peek('hi')
+ 'hi'
+
+ peekables also offer a :meth:`prepend` method, which "inserts" items
+ at the head of the iterable:
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> p.peek()
+ 11
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ peekables can be indexed. Index 0 is the item that will be returned by
+ :func:`next`, index 1 is the item after that, and so on:
+ The values up to the given index will be cached.
+
+ >>> p = peekable(['a', 'b', 'c', 'd'])
+ >>> p[0]
+ 'a'
+ >>> p[1]
+ 'b'
+ >>> next(p)
+ 'a'
+
+ Negative indexes are supported, but be aware that they will cache the
+ remaining items in the source iterator, which may require significant
+ storage.
+
+ To check whether a peekable is exhausted, check its truth value:
+
+ >>> p = peekable(['a', 'b'])
+ >>> if p: # peekable has items
+ ... list(p)
+ ['a', 'b']
+ >>> if not p: # peekable is exhaused
+ ... list(p)
+ []
+
+ """
+ def __init__(self, iterable):
+ self._it = iter(iterable)
+ self._cache = deque()
+
+ def __iter__(self):
+ return self
+
+ def __bool__(self):
+ try:
+ self.peek()
+ except StopIteration:
+ return False
+ return True
+
+ def __nonzero__(self):
+ # For Python 2 compatibility
+ return self.__bool__()
+
+ def peek(self, default=_marker):
+ """Return the item that will be next returned from ``next()``.
+
+ Return ``default`` if there are no items left. If ``default`` is not
+ provided, raise ``StopIteration``.
+
+ """
+ if not self._cache:
+ try:
+ self._cache.append(next(self._it))
+ except StopIteration:
+ if default is _marker:
+ raise
+ return default
+ return self._cache[0]
+
+ def prepend(self, *items):
+ """Stack up items to be the next ones returned from ``next()`` or
+ ``self.peek()``. The items will be returned in
+ first in, first out order::
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ It is possible, by prepending items, to "resurrect" a peekable that
+ previously raised ``StopIteration``.
+
+ >>> p = peekable([])
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+ >>> p.prepend(1)
+ >>> next(p)
+ 1
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+
+ """
+ self._cache.extendleft(reversed(items))
+
+ def __next__(self):
+ if self._cache:
+ return self._cache.popleft()
+
+ return next(self._it)
+
+ next = __next__ # For Python 2 compatibility
+
+ def _get_slice(self, index):
+ # Normalize the slice's arguments
+ step = 1 if (index.step is None) else index.step
+ if step > 0:
+ start = 0 if (index.start is None) else index.start
+ stop = maxsize if (index.stop is None) else index.stop
+ elif step < 0:
+ start = -1 if (index.start is None) else index.start
+ stop = (-maxsize - 1) if (index.stop is None) else index.stop
+ else:
+ raise ValueError('slice step cannot be zero')
+
+ # If either the start or stop index is negative, we'll need to cache
+ # the rest of the iterable in order to slice from the right side.
+ if (start < 0) or (stop < 0):
+ self._cache.extend(self._it)
+ # Otherwise we'll need to find the rightmost index and cache to that
+ # point.
+ else:
+ n = min(max(start, stop) + 1, maxsize)
+ cache_len = len(self._cache)
+ if n >= cache_len:
+ self._cache.extend(islice(self._it, n - cache_len))
+
+ return list(self._cache)[index]
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ return self._get_slice(index)
+
+ cache_len = len(self._cache)
+ if index < 0:
+ self._cache.extend(self._it)
+ elif index >= cache_len:
+ self._cache.extend(islice(self._it, index + 1 - cache_len))
+
+ return self._cache[index]
+
+
+def _collate(*iterables, **kwargs):
+ """Helper for ``collate()``, called when the user is using the ``reverse``
+ or ``key`` keyword arguments on Python versions below 3.5.
+
+ """
+ key = kwargs.pop('key', lambda a: a)
+ reverse = kwargs.pop('reverse', False)
+
+ min_or_max = partial(max if reverse else min, key=itemgetter(0))
+ peekables = [peekable(it) for it in iterables]
+ peekables = [p for p in peekables if p] # Kill empties.
+ while peekables:
+ _, p = min_or_max((key(p.peek()), p) for p in peekables)
+ yield next(p)
+ peekables = [x for x in peekables if x]
+
+
+def collate(*iterables, **kwargs):
+ """Return a sorted merge of the items from each of several already-sorted
+ *iterables*.
+
+ >>> list(collate('ACDZ', 'AZ', 'JKL'))
+ ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z']
+
+ Works lazily, keeping only the next value from each iterable in memory. Use
+ :func:`collate` to, for example, perform a n-way mergesort of items that
+ don't fit in memory.
+
+ If a *key* function is specified, the iterables will be sorted according
+ to its result:
+
+ >>> key = lambda s: int(s) # Sort by numeric value, not by string
+ >>> list(collate(['1', '10'], ['2', '11'], key=key))
+ ['1', '2', '10', '11']
+
+
+ If the *iterables* are sorted in descending order, set *reverse* to
+ ``True``:
+
+ >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True))
+ [5, 4, 3, 2, 1, 0]
+
+ If the elements of the passed-in iterables are out of order, you might get
+ unexpected results.
+
+ On Python 2.7, this function delegates to :func:`heapq.merge` if neither
+ of the keyword arguments are specified. On Python 3.5+, this function
+ is an alias for :func:`heapq.merge`.
+
+ """
+ if not kwargs:
+ return merge(*iterables)
+
+ return _collate(*iterables, **kwargs)
+
+
+# If using Python version 3.5 or greater, heapq.merge() will be faster than
+# collate - use that instead.
+if version_info >= (3, 5, 0):
+ _collate_docstring = collate.__doc__
+ collate = partial(merge)
+ collate.__doc__ = _collate_docstring
+
+
+def consumer(func):
+ """Decorator that automatically advances a PEP-342-style "reverse iterator"
+ to its first yield point so you don't have to call ``next()`` on it
+ manually.
+
+ >>> @consumer
+ ... def tally():
+ ... i = 0
+ ... while True:
+ ... print('Thing number %s is %s.' % (i, (yield)))
+ ... i += 1
+ ...
+ >>> t = tally()
+ >>> t.send('red')
+ Thing number 0 is red.
+ >>> t.send('fish')
+ Thing number 1 is fish.
+
+ Without the decorator, you would have to call ``next(t)`` before
+ ``t.send()`` could be used.
+
+ """
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ gen = func(*args, **kwargs)
+ next(gen)
+ return gen
+ return wrapper
+
+
+def ilen(iterable):
+ """Return the number of items in *iterable*.
+
+ >>> ilen(x for x in range(1000000) if x % 3 == 0)
+ 333334
+
+ This consumes the iterable, so handle with care.
+
+ """
+ # maxlen=1 only stores the last item in the deque
+ d = deque(enumerate(iterable, 1), maxlen=1)
+ # since we started enumerate at 1,
+ # the first item of the last pair will be the length of the iterable
+ # (assuming there were items)
+ return d[0][0] if d else 0
+
+
+def iterate(func, start):
+ """Return ``start``, ``func(start)``, ``func(func(start))``, ...
+
+ >>> from itertools import islice
+ >>> list(islice(iterate(lambda x: 2*x, 1), 10))
+ [1, 2, 4, 8, 16, 32, 64, 128, 256, 512]
+
+ """
+ while True:
+ yield start
+ start = func(start)
+
+
+def with_iter(context_manager):
+ """Wrap an iterable in a ``with`` statement, so it closes once exhausted.
+
+ For example, this will close the file when the iterator is exhausted::
+
+ upper_lines = (line.upper() for line in with_iter(open('foo')))
+
+ Any context manager which returns an iterable is a candidate for
+ ``with_iter``.
+
+ """
+ with context_manager as iterable:
+ for item in iterable:
+ yield item
+
+
+def one(iterable, too_short=None, too_long=None):
+ """Return the first item from *iterable*, which is expected to contain only
+ that item. Raise an exception if *iterable* is empty or has more than one
+ item.
+
+ :func:`one` is useful for ensuring that an iterable contains only one item.
+ For example, it can be used to retrieve the result of a database query
+ that is expected to return a single row.
+
+ If *iterable* is empty, ``ValueError`` will be raised. You may specify a
+ different exception with the *too_short* keyword:
+
+ >>> it = []
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (expected 1)'
+ >>> too_short = IndexError('too few items')
+ >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ IndexError: too few items
+
+ Similarly, if *iterable* contains more than one item, ``ValueError`` will
+ be raised. You may specify a different exception with the *too_long*
+ keyword:
+
+ >>> it = ['too', 'many']
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (expected 1)'
+ >>> too_long = RuntimeError
+ >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+
+ Note that :func:`one` attempts to advance *iterable* twice to ensure there
+ is only one item. If there is more than one, both items will be discarded.
+ See :func:`spy` or :func:`peekable` to check iterable contents less
+ destructively.
+
+ """
+ it = iter(iterable)
+
+ try:
+ value = next(it)
+ except StopIteration:
+ raise too_short or ValueError('too few items in iterable (expected 1)')
+
+ try:
+ next(it)
+ except StopIteration:
+ pass
+ else:
+ raise too_long or ValueError('too many items in iterable (expected 1)')
+
+ return value
+
+
+def distinct_permutations(iterable):
+ """Yield successive distinct permutations of the elements in *iterable*.
+
+ >>> sorted(distinct_permutations([1, 0, 1]))
+ [(0, 1, 1), (1, 0, 1), (1, 1, 0)]
+
+ Equivalent to ``set(permutations(iterable))``, except duplicates are not
+ generated and thrown away. For larger input sequences this is much more
+ efficient.
+
+ Duplicate permutations arise when there are duplicated elements in the
+ input iterable. The number of items returned is
+ `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of
+ items input, and each `x_i` is the count of a distinct item in the input
+ sequence.
+
+ """
+ def perm_unique_helper(item_counts, perm, i):
+ """Internal helper function
+
+ :arg item_counts: Stores the unique items in ``iterable`` and how many
+ times they are repeated
+ :arg perm: The permutation that is being built for output
+ :arg i: The index of the permutation being modified
+
+ The output permutations are built up recursively; the distinct items
+ are placed until their repetitions are exhausted.
+ """
+ if i < 0:
+ yield tuple(perm)
+ else:
+ for item in item_counts:
+ if item_counts[item] <= 0:
+ continue
+ perm[i] = item
+ item_counts[item] -= 1
+ for x in perm_unique_helper(item_counts, perm, i - 1):
+ yield x
+ item_counts[item] += 1
+
+ item_counts = Counter(iterable)
+ length = sum(item_counts.values())
+
+ return perm_unique_helper(item_counts, [None] * length, length - 1)
+
+
+def intersperse(e, iterable, n=1):
+ """Intersperse filler element *e* among the items in *iterable*, leaving
+ *n* items between each filler element.
+
+ >>> list(intersperse('!', [1, 2, 3, 4, 5]))
+ [1, '!', 2, '!', 3, '!', 4, '!', 5]
+
+ >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2))
+ [1, 2, None, 3, 4, None, 5]
+
+ """
+ if n == 0:
+ raise ValueError('n must be > 0')
+ elif n == 1:
+ # interleave(repeat(e), iterable) -> e, x_0, e, e, x_1, e, x_2...
+ # islice(..., 1, None) -> x_0, e, e, x_1, e, x_2...
+ return islice(interleave(repeat(e), iterable), 1, None)
+ else:
+ # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]...
+ # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]...
+ # flatten(...) -> x_0, x_1, e, x_2, x_3...
+ filler = repeat([e])
+ chunks = chunked(iterable, n)
+ return flatten(islice(interleave(filler, chunks), 1, None))
+
+
+def unique_to_each(*iterables):
+ """Return the elements from each of the input iterables that aren't in the
+ other input iterables.
+
+ For example, suppose you have a set of packages, each with a set of
+ dependencies::
+
+ {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}}
+
+ If you remove one package, which dependencies can also be removed?
+
+ If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not
+ associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for
+ ``pkg_2``, and ``D`` is only needed for ``pkg_3``::
+
+ >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'})
+ [['A'], ['C'], ['D']]
+
+ If there are duplicates in one input iterable that aren't in the others
+ they will be duplicated in the output. Input order is preserved::
+
+ >>> unique_to_each("mississippi", "missouri")
+ [['p', 'p'], ['o', 'u', 'r']]
+
+ It is assumed that the elements of each iterable are hashable.
+
+ """
+ pool = [list(it) for it in iterables]
+ counts = Counter(chain.from_iterable(map(set, pool)))
+ uniques = {element for element in counts if counts[element] == 1}
+ return [list(filter(uniques.__contains__, it)) for it in pool]
+
+
+def windowed(seq, n, fillvalue=None, step=1):
+ """Return a sliding window of width *n* over the given iterable.
+
+ >>> all_windows = windowed([1, 2, 3, 4, 5], 3)
+ >>> list(all_windows)
+ [(1, 2, 3), (2, 3, 4), (3, 4, 5)]
+
+ When the window is larger than the iterable, *fillvalue* is used in place
+ of missing values::
+
+ >>> list(windowed([1, 2, 3], 4))
+ [(1, 2, 3, None)]
+
+ Each window will advance in increments of *step*:
+
+ >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2))
+ [(1, 2, 3), (3, 4, 5), (5, 6, '!')]
+
+ """
+ if n < 0:
+ raise ValueError('n must be >= 0')
+ if n == 0:
+ yield tuple()
+ return
+ if step < 1:
+ raise ValueError('step must be >= 1')
+
+ it = iter(seq)
+ window = deque([], n)
+ append = window.append
+
+ # Initial deque fill
+ for _ in range(n):
+ append(next(it, fillvalue))
+ yield tuple(window)
+
+ # Appending new items to the right causes old items to fall off the left
+ i = 0
+ for item in it:
+ append(item)
+ i = (i + 1) % step
+ if i % step == 0:
+ yield tuple(window)
+
+ # If there are items from the iterable in the window, pad with the given
+ # value and emit them.
+ if (i % step) and (step - i < n):
+ for _ in range(step - i):
+ append(fillvalue)
+ yield tuple(window)
+
+
+class bucket(object):
+ """Wrap *iterable* and return an object that buckets it iterable into
+ child iterables based on a *key* function.
+
+ >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
+ >>> s = bucket(iterable, key=lambda x: x[0])
+ >>> a_iterable = s['a']
+ >>> next(a_iterable)
+ 'a1'
+ >>> next(a_iterable)
+ 'a2'
+ >>> list(s['b'])
+ ['b1', 'b2', 'b3']
+
+ The original iterable will be advanced and its items will be cached until
+ they are used by the child iterables. This may require significant storage.
+
+ By default, attempting to select a bucket to which no items belong will
+ exhaust the iterable and cache all values.
+ If you specify a *validator* function, selected buckets will instead be
+ checked against it.
+
+ >>> from itertools import count
+ >>> it = count(1, 2) # Infinite sequence of odd numbers
+ >>> key = lambda x: x % 10 # Bucket by last digit
+ >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only
+ >>> s = bucket(it, key=key, validator=validator)
+ >>> 2 in s
+ False
+ >>> list(s[2])
+ []
+
+ """
+ def __init__(self, iterable, key, validator=None):
+ self._it = iter(iterable)
+ self._key = key
+ self._cache = defaultdict(deque)
+ self._validator = validator or (lambda x: True)
+
+ def __contains__(self, value):
+ if not self._validator(value):
+ return False
+
+ try:
+ item = next(self[value])
+ except StopIteration:
+ return False
+ else:
+ self._cache[value].appendleft(item)
+
+ return True
+
+ def _get_values(self, value):
+ """
+ Helper to yield items from the parent iterator that match *value*.
+ Items that don't match are stored in the local cache as they
+ are encountered.
+ """
+ while True:
+ # If we've cached some items that match the target value, emit
+ # the first one and evict it from the cache.
+ if self._cache[value]:
+ yield self._cache[value].popleft()
+ # Otherwise we need to advance the parent iterator to search for
+ # a matching item, caching the rest.
+ else:
+ while True:
+ try:
+ item = next(self._it)
+ except StopIteration:
+ return
+ item_value = self._key(item)
+ if item_value == value:
+ yield item
+ break
+ elif self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ def __getitem__(self, value):
+ if not self._validator(value):
+ return iter(())
+
+ return self._get_values(value)
+
+
+def spy(iterable, n=1):
+ """Return a 2-tuple with a list containing the first *n* elements of
+ *iterable*, and an iterator with the same items as *iterable*.
+ This allows you to "look ahead" at the items in the iterable without
+ advancing it.
+
+ There is one item in the list by default:
+
+ >>> iterable = 'abcdefg'
+ >>> head, iterable = spy(iterable)
+ >>> head
+ ['a']
+ >>> list(iterable)
+ ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+
+ You may use unpacking to retrieve items instead of lists:
+
+ >>> (head,), iterable = spy('abcdefg')
+ >>> head
+ 'a'
+ >>> (first, second), iterable = spy('abcdefg', 2)
+ >>> first
+ 'a'
+ >>> second
+ 'b'
+
+ The number of items requested can be larger than the number of items in
+ the iterable:
+
+ >>> iterable = [1, 2, 3, 4, 5]
+ >>> head, iterable = spy(iterable, 10)
+ >>> head
+ [1, 2, 3, 4, 5]
+ >>> list(iterable)
+ [1, 2, 3, 4, 5]
+
+ """
+ it = iter(iterable)
+ head = take(n, it)
+
+ return head, chain(head, it)
+
+
+def interleave(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ until the shortest is exhausted.
+
+ >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7]
+
+ For a version that doesn't terminate after the shortest iterable is
+ exhausted, see :func:`interleave_longest`.
+
+ """
+ return chain.from_iterable(zip(*iterables))
+
+
+def interleave_longest(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ skipping any that are exhausted.
+
+ >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7, 3, 8]
+
+ This function produces the same output as :func:`roundrobin`, but may
+ perform better for some inputs (in particular when the number of iterables
+ is large).
+
+ """
+ i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker))
+ return (x for x in i if x is not _marker)
+
+
+def collapse(iterable, base_type=None, levels=None):
+ """Flatten an iterable with multiple levels of nesting (e.g., a list of
+ lists of tuples) into non-iterable types.
+
+ >>> iterable = [(1, 2), ([3, 4], [[5], [6]])]
+ >>> list(collapse(iterable))
+ [1, 2, 3, 4, 5, 6]
+
+ String types are not considered iterable and will not be collapsed.
+ To avoid collapsing other types, specify *base_type*:
+
+ >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']]
+ >>> list(collapse(iterable, base_type=tuple))
+ ['ab', ('cd', 'ef'), 'gh', 'ij']
+
+ Specify *levels* to stop flattening after a certain level:
+
+ >>> iterable = [('a', ['b']), ('c', ['d'])]
+ >>> list(collapse(iterable)) # Fully flattened
+ ['a', 'b', 'c', 'd']
+ >>> list(collapse(iterable, levels=1)) # Only one level flattened
+ ['a', ['b'], 'c', ['d']]
+
+ """
+ def walk(node, level):
+ if (
+ ((levels is not None) and (level > levels)) or
+ isinstance(node, string_types) or
+ ((base_type is not None) and isinstance(node, base_type))
+ ):
+ yield node
+ return
+
+ try:
+ tree = iter(node)
+ except TypeError:
+ yield node
+ return
+ else:
+ for child in tree:
+ for x in walk(child, level + 1):
+ yield x
+
+ for x in walk(iterable, 0):
+ yield x
+
+
+def side_effect(func, iterable, chunk_size=None, before=None, after=None):
+ """Invoke *func* on each item in *iterable* (or on each *chunk_size* group
+ of items) before yielding the item.
+
+ `func` must be a function that takes a single argument. Its return value
+ will be discarded.
+
+ *before* and *after* are optional functions that take no arguments. They
+ will be executed before iteration starts and after it ends, respectively.
+
+ `side_effect` can be used for logging, updating progress bars, or anything
+ that is not functionally "pure."
+
+ Emitting a status message:
+
+ >>> from more_itertools import consume
+ >>> func = lambda item: print('Received {}'.format(item))
+ >>> consume(side_effect(func, range(2)))
+ Received 0
+ Received 1
+
+ Operating on chunks of items:
+
+ >>> pair_sums = []
+ >>> func = lambda chunk: pair_sums.append(sum(chunk))
+ >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2))
+ [0, 1, 2, 3, 4, 5]
+ >>> list(pair_sums)
+ [1, 5, 9]
+
+ Writing to a file-like object:
+
+ >>> from io import StringIO
+ >>> from more_itertools import consume
+ >>> f = StringIO()
+ >>> func = lambda x: print(x, file=f)
+ >>> before = lambda: print(u'HEADER', file=f)
+ >>> after = f.close
+ >>> it = [u'a', u'b', u'c']
+ >>> consume(side_effect(func, it, before=before, after=after))
+ >>> f.closed
+ True
+
+ """
+ try:
+ if before is not None:
+ before()
+
+ if chunk_size is None:
+ for item in iterable:
+ func(item)
+ yield item
+ else:
+ for chunk in chunked(iterable, chunk_size):
+ func(chunk)
+ for item in chunk:
+ yield item
+ finally:
+ if after is not None:
+ after()
+
+
+def sliced(seq, n):
+ """Yield slices of length *n* from the sequence *seq*.
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6), 3))
+ [(1, 2, 3), (4, 5, 6)]
+
+ If the length of the sequence is not divisible by the requested slice
+ length, the last slice will be shorter.
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3))
+ [(1, 2, 3), (4, 5, 6), (7, 8)]
+
+ This function will only work for iterables that support slicing.
+ For non-sliceable iterables, see :func:`chunked`.
+
+ """
+ return takewhile(bool, (seq[i: i + n] for i in count(0, n)))
+
+
+def split_at(iterable, pred):
+ """Yield lists of items from *iterable*, where each list is delimited by
+ an item where callable *pred* returns ``True``. The lists do not include
+ the delimiting items.
+
+ >>> list(split_at('abcdcba', lambda x: x == 'b'))
+ [['a'], ['c', 'd', 'c'], ['a']]
+
+ >>> list(split_at(range(10), lambda n: n % 2 == 1))
+ [[0], [2], [4], [6], [8], []]
+ """
+ buf = []
+ for item in iterable:
+ if pred(item):
+ yield buf
+ buf = []
+ else:
+ buf.append(item)
+ yield buf
+
+
+def split_before(iterable, pred):
+ """Yield lists of items from *iterable*, where each list starts with an
+ item where callable *pred* returns ``True``:
+
+ >>> list(split_before('OneTwo', lambda s: s.isupper()))
+ [['O', 'n', 'e'], ['T', 'w', 'o']]
+
+ >>> list(split_before(range(10), lambda n: n % 3 == 0))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
+
+ """
+ buf = []
+ for item in iterable:
+ if pred(item) and buf:
+ yield buf
+ buf = []
+ buf.append(item)
+ yield buf
+
+
+def split_after(iterable, pred):
+ """Yield lists of items from *iterable*, where each list ends with an
+ item where callable *pred* returns ``True``:
+
+ >>> list(split_after('one1two2', lambda s: s.isdigit()))
+ [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']]
+
+ >>> list(split_after(range(10), lambda n: n % 3 == 0))
+ [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]]
+
+ """
+ buf = []
+ for item in iterable:
+ buf.append(item)
+ if pred(item) and buf:
+ yield buf
+ buf = []
+ if buf:
+ yield buf
+
+
+def padded(iterable, fillvalue=None, n=None, next_multiple=False):
+ """Yield the elements from *iterable*, followed by *fillvalue*, such that
+ at least *n* items are emitted.
+
+ >>> list(padded([1, 2, 3], '?', 5))
+ [1, 2, 3, '?', '?']
+
+ If *next_multiple* is ``True``, *fillvalue* will be emitted until the
+ number of items emitted is a multiple of *n*::
+
+ >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True))
+ [1, 2, 3, 4, None, None]
+
+ If *n* is ``None``, *fillvalue* will be emitted indefinitely.
+
+ """
+ it = iter(iterable)
+ if n is None:
+ for item in chain(it, repeat(fillvalue)):
+ yield item
+ elif n < 1:
+ raise ValueError('n must be at least 1')
+ else:
+ item_count = 0
+ for item in it:
+ yield item
+ item_count += 1
+
+ remaining = (n - item_count) % n if next_multiple else n - item_count
+ for _ in range(remaining):
+ yield fillvalue
+
+
+def distribute(n, iterable):
+ """Distribute the items from *iterable* among *n* smaller iterables.
+
+ >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 3, 5]
+ >>> list(group_2)
+ [2, 4, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 4, 7], [2, 5], [3, 6]]
+
+ If the length of *iterable* is smaller than *n*, then the last returned
+ iterables will be empty:
+
+ >>> children = distribute(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function uses :func:`itertools.tee` and may require significant
+ storage. If you need the order items in the smaller iterables to match the
+ original iterable, see :func:`divide`.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ children = tee(iterable, n)
+ return [islice(it, index, None, n) for index, it in enumerate(children)]
+
+
+def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None):
+ """Yield tuples whose elements are offset from *iterable*.
+ The amount by which the `i`-th item in each tuple is offset is given by
+ the `i`-th item in *offsets*.
+
+ >>> list(stagger([0, 1, 2, 3]))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3)]
+ >>> list(stagger(range(8), offsets=(0, 2, 4)))
+ [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)]
+
+ By default, the sequence will end when the final element of a tuple is the
+ last item in the iterable. To continue until the first element of a tuple
+ is the last item in the iterable, set *longest* to ``True``::
+
+ >>> list(stagger([0, 1, 2, 3], longest=True))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ children = tee(iterable, len(offsets))
+
+ return zip_offset(
+ *children, offsets=offsets, longest=longest, fillvalue=fillvalue
+ )
+
+
+def zip_offset(*iterables, **kwargs):
+ """``zip`` the input *iterables* together, but offset the `i`-th iterable
+ by the `i`-th item in *offsets*.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1)))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')]
+
+ This can be used as a lightweight alternative to SciPy or pandas to analyze
+ data sets in which somes series have a lead or lag relationship.
+
+ By default, the sequence will end when the shortest iterable is exhausted.
+ To continue until the longest iterable is exhausted, set *longest* to
+ ``True``.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ offsets = kwargs['offsets']
+ longest = kwargs.get('longest', False)
+ fillvalue = kwargs.get('fillvalue', None)
+
+ if len(iterables) != len(offsets):
+ raise ValueError("Number of iterables and offsets didn't match")
+
+ staggered = []
+ for it, n in zip(iterables, offsets):
+ if n < 0:
+ staggered.append(chain(repeat(fillvalue, -n), it))
+ elif n > 0:
+ staggered.append(islice(it, n, None))
+ else:
+ staggered.append(it)
+
+ if longest:
+ return zip_longest(*staggered, fillvalue=fillvalue)
+
+ return zip(*staggered)
+
+
+def sort_together(iterables, key_list=(0,), reverse=False):
+ """Return the input iterables sorted together, with *key_list* as the
+ priority for sorting. All iterables are trimmed to the length of the
+ shortest one.
+
+ This can be used like the sorting function in a spreadsheet. If each
+ iterable represents a column of data, the key list determines which
+ columns are used for sorting.
+
+ By default, all iterables are sorted using the ``0``-th iterable::
+
+ >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')]
+ >>> sort_together(iterables)
+ [(1, 2, 3, 4), ('d', 'c', 'b', 'a')]
+
+ Set a different key list to sort according to another iterable.
+ Specifying mutliple keys dictates how ties are broken::
+
+ >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')]
+ >>> sort_together(iterables, key_list=(1, 2))
+ [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')]
+
+ Set *reverse* to ``True`` to sort in descending order.
+
+ >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True)
+ [(3, 2, 1), ('a', 'b', 'c')]
+
+ """
+ return list(zip(*sorted(zip(*iterables),
+ key=itemgetter(*key_list),
+ reverse=reverse)))
+
+
+def divide(n, iterable):
+ """Divide the elements from *iterable* into *n* parts, maintaining
+ order.
+
+ >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 2, 3]
+ >>> list(group_2)
+ [4, 5, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 2, 3], [4, 5], [6, 7]]
+
+ If the length of the iterable is smaller than n, then the last returned
+ iterables will be empty:
+
+ >>> children = divide(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function will exhaust the iterable before returning and may require
+ significant storage. If order is not important, see :func:`distribute`,
+ which does not first pull the iterable into memory.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ seq = tuple(iterable)
+ q, r = divmod(len(seq), n)
+
+ ret = []
+ for i in range(n):
+ start = (i * q) + (i if i < r else r)
+ stop = ((i + 1) * q) + (i + 1 if i + 1 < r else r)
+ ret.append(iter(seq[start:stop]))
+
+ return ret
+
+
+def always_iterable(obj, base_type=(text_type, binary_type)):
+ """If *obj* is iterable, return an iterator over its items::
+
+ >>> obj = (1, 2, 3)
+ >>> list(always_iterable(obj))
+ [1, 2, 3]
+
+ If *obj* is not iterable, return a one-item iterable containing *obj*::
+
+ >>> obj = 1
+ >>> list(always_iterable(obj))
+ [1]
+
+ If *obj* is ``None``, return an empty iterable:
+
+ >>> obj = None
+ >>> list(always_iterable(None))
+ []
+
+ By default, binary and text strings are not considered iterable::
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj))
+ ['foo']
+
+ If *base_type* is set, objects for which ``isinstance(obj, base_type)``
+ returns ``True`` won't be considered iterable.
+
+ >>> obj = {'a': 1}
+ >>> list(always_iterable(obj)) # Iterate over the dict's keys
+ ['a']
+ >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
+ [{'a': 1}]
+
+ Set *base_type* to ``None`` to avoid any special handling and treat objects
+ Python considers iterable as iterable:
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj, base_type=None))
+ ['f', 'o', 'o']
+ """
+ if obj is None:
+ return iter(())
+
+ if (base_type is not None) and isinstance(obj, base_type):
+ return iter((obj,))
+
+ try:
+ return iter(obj)
+ except TypeError:
+ return iter((obj,))
+
+
+def adjacent(predicate, iterable, distance=1):
+ """Return an iterable over `(bool, item)` tuples where the `item` is
+ drawn from *iterable* and the `bool` indicates whether
+ that item satisfies the *predicate* or is adjacent to an item that does.
+
+ For example, to find whether items are adjacent to a ``3``::
+
+ >>> list(adjacent(lambda x: x == 3, range(6)))
+ [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)]
+
+ Set *distance* to change what counts as adjacent. For example, to find
+ whether items are two places away from a ``3``:
+
+ >>> list(adjacent(lambda x: x == 3, range(6), distance=2))
+ [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)]
+
+ This is useful for contextualizing the results of a search function.
+ For example, a code comparison tool might want to identify lines that
+ have changed, but also surrounding lines to give the viewer of the diff
+ context.
+
+ The predicate function will only be called once for each item in the
+ iterable.
+
+ See also :func:`groupby_transform`, which can be used with this function
+ to group ranges of items with the same `bool` value.
+
+ """
+ # Allow distance=0 mainly for testing that it reproduces results with map()
+ if distance < 0:
+ raise ValueError('distance must be at least 0')
+
+ i1, i2 = tee(iterable)
+ padding = [False] * distance
+ selected = chain(padding, map(predicate, i1), padding)
+ adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1))
+ return zip(adjacent_to_selected, i2)
+
+
+def groupby_transform(iterable, keyfunc=None, valuefunc=None):
+ """An extension of :func:`itertools.groupby` that transforms the values of
+ *iterable* after grouping them.
+ *keyfunc* is a function used to compute a grouping key for each item.
+ *valuefunc* is a function for transforming the items after grouping.
+
+ >>> iterable = 'AaaABbBCcA'
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: x.lower()
+ >>> grouper = groupby_transform(iterable, keyfunc, valuefunc)
+ >>> [(k, ''.join(g)) for k, g in grouper]
+ [('A', 'aaaa'), ('B', 'bbb'), ('C', 'cc'), ('A', 'a')]
+
+ *keyfunc* and *valuefunc* default to identity functions if they are not
+ specified.
+
+ :func:`groupby_transform` is useful when grouping elements of an iterable
+ using a separate iterable as the key. To do this, :func:`zip` the iterables
+ and pass a *keyfunc* that extracts the first element and a *valuefunc*
+ that extracts the second element::
+
+ >>> from operator import itemgetter
+ >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3]
+ >>> values = 'abcdefghi'
+ >>> iterable = zip(keys, values)
+ >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1))
+ >>> [(k, ''.join(g)) for k, g in grouper]
+ [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')]
+
+ Note that the order of items in the iterable is significant.
+ Only adjacent items are grouped together, so if you don't want any
+ duplicate groups, you should sort the iterable by the key function.
+
+ """
+ valuefunc = (lambda x: x) if valuefunc is None else valuefunc
+ return ((k, map(valuefunc, g)) for k, g in groupby(iterable, keyfunc))
+
+
+def numeric_range(*args):
+ """An extension of the built-in ``range()`` function whose arguments can
+ be any orderable numeric type.
+
+ With only *stop* specified, *start* defaults to ``0`` and *step*
+ defaults to ``1``. The output items will match the type of *stop*:
+
+ >>> list(numeric_range(3.5))
+ [0.0, 1.0, 2.0, 3.0]
+
+ With only *start* and *stop* specified, *step* defaults to ``1``. The
+ output items will match the type of *start*:
+
+ >>> from decimal import Decimal
+ >>> start = Decimal('2.1')
+ >>> stop = Decimal('5.1')
+ >>> list(numeric_range(start, stop))
+ [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')]
+
+ With *start*, *stop*, and *step* specified the output items will match
+ the type of ``start + step``:
+
+ >>> from fractions import Fraction
+ >>> start = Fraction(1, 2) # Start at 1/2
+ >>> stop = Fraction(5, 2) # End at 5/2
+ >>> step = Fraction(1, 2) # Count by 1/2
+ >>> list(numeric_range(start, stop, step))
+ [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)]
+
+ If *step* is zero, ``ValueError`` is raised. Negative steps are supported:
+
+ >>> list(numeric_range(3, -1, -1.0))
+ [3.0, 2.0, 1.0, 0.0]
+
+ Be aware of the limitations of floating point numbers; the representation
+ of the yielded numbers may be surprising.
+
+ """
+ argc = len(args)
+ if argc == 1:
+ stop, = args
+ start = type(stop)(0)
+ step = 1
+ elif argc == 2:
+ start, stop = args
+ step = 1
+ elif argc == 3:
+ start, stop, step = args
+ else:
+ err_msg = 'numeric_range takes at most 3 arguments, got {}'
+ raise TypeError(err_msg.format(argc))
+
+ values = (start + (step * n) for n in count())
+ if step > 0:
+ return takewhile(partial(gt, stop), values)
+ elif step < 0:
+ return takewhile(partial(lt, stop), values)
+ else:
+ raise ValueError('numeric_range arg 3 must not be zero')
+
+
+def count_cycle(iterable, n=None):
+ """Cycle through the items from *iterable* up to *n* times, yielding
+ the number of completed cycles along with each item. If *n* is omitted the
+ process repeats indefinitely.
+
+ >>> list(count_cycle('AB', 3))
+ [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')]
+
+ """
+ iterable = tuple(iterable)
+ if not iterable:
+ return iter(())
+ counter = count() if n is None else range(n)
+ return ((i, item) for i in counter for item in iterable)
+
+
+def locate(iterable, pred=bool):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(locate([0, 1, 1, 0, 1, 0, 0]))
+ [1, 2, 4]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item:
+
+ >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b'))
+ [1, 3]
+
+ Use with :func:`windowed` to find the indexes of a sub-sequence:
+
+ >>> from more_itertools import windowed
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> sub = [1, 2, 3]
+ >>> pred = lambda w: w == tuple(sub) # windowed() returns tuples
+ >>> list(locate(windowed(iterable, len(sub)), pred=pred))
+ [1, 5, 9]
+
+ Use with :func:`seekable` to find indexes and then retrieve the associated
+ items:
+
+ >>> from itertools import count
+ >>> from more_itertools import seekable
+ >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count())
+ >>> it = seekable(source)
+ >>> pred = lambda x: x > 100
+ >>> indexes = locate(it, pred=pred)
+ >>> i = next(indexes)
+ >>> it.seek(i)
+ >>> next(it)
+ 106
+
+ """
+ return compress(count(), map(pred, iterable))
+
+
+def lstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the beginning
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the start of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(lstrip(iterable, pred))
+ [1, 2, None, 3, False, None]
+
+ This function is analogous to to :func:`str.lstrip`, and is essentially
+ an wrapper for :func:`itertools.dropwhile`.
+
+ """
+ return dropwhile(pred, iterable)
+
+
+def rstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the end
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the end of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(rstrip(iterable, pred))
+ [None, False, None, 1, 2, None, 3]
+
+ This function is analogous to :func:`str.rstrip`.
+
+ """
+ cache = []
+ cache_append = cache.append
+ for x in iterable:
+ if pred(x):
+ cache_append(x)
+ else:
+ for y in cache:
+ yield y
+ del cache[:]
+ yield x
+
+
+def strip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the
+ beginning and end for which *pred* returns ``True``.
+
+ For example, to remove a set of items from both ends of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(strip(iterable, pred))
+ [1, 2, None, 3]
+
+ This function is analogous to :func:`str.strip`.
+
+ """
+ return rstrip(lstrip(iterable, pred), pred)
+
+
+def islice_extended(iterable, *args):
+ """An extension of :func:`itertools.islice` that supports negative values
+ for *stop*, *start*, and *step*.
+
+ >>> iterable = iter('abcdefgh')
+ >>> list(islice_extended(iterable, -4, -1))
+ ['e', 'f', 'g']
+
+ Slices with negative values require some caching of *iterable*, but this
+ function takes care to minimize the amount of memory required.
+
+ For example, you can use a negative step with an infinite iterator:
+
+ >>> from itertools import count
+ >>> list(islice_extended(count(), 110, 99, -2))
+ [110, 108, 106, 104, 102, 100]
+
+ """
+ s = slice(*args)
+ start = s.start
+ stop = s.stop
+ if s.step == 0:
+ raise ValueError('step argument must be a non-zero integer or None.')
+ step = s.step or 1
+
+ it = iter(iterable)
+
+ if step > 0:
+ start = 0 if (start is None) else start
+
+ if (start < 0):
+ # Consume all but the last -start items
+ cache = deque(enumerate(it, 1), maxlen=-start)
+ len_iter = cache[-1][0] if cache else 0
+
+ # Adjust start to be positive
+ i = max(len_iter + start, 0)
+
+ # Adjust stop to be positive
+ if stop is None:
+ j = len_iter
+ elif stop >= 0:
+ j = min(stop, len_iter)
+ else:
+ j = max(len_iter + stop, 0)
+
+ # Slice the cache
+ n = j - i
+ if n <= 0:
+ return
+
+ for index, item in islice(cache, 0, n, step):
+ yield item
+ elif (stop is not None) and (stop < 0):
+ # Advance to the start position
+ next(islice(it, start, start), None)
+
+ # When stop is negative, we have to carry -stop items while
+ # iterating
+ cache = deque(islice(it, -stop), maxlen=-stop)
+
+ for index, item in enumerate(it):
+ cached_item = cache.popleft()
+ if index % step == 0:
+ yield cached_item
+ cache.append(item)
+ else:
+ # When both start and stop are positive we have the normal case
+ for item in islice(it, start, stop, step):
+ yield item
+ else:
+ start = -1 if (start is None) else start
+
+ if (stop is not None) and (stop < 0):
+ # Consume all but the last items
+ n = -stop - 1
+ cache = deque(enumerate(it, 1), maxlen=n)
+ len_iter = cache[-1][0] if cache else 0
+
+ # If start and stop are both negative they are comparable and
+ # we can just slice. Otherwise we can adjust start to be negative
+ # and then slice.
+ if start < 0:
+ i, j = start, stop
+ else:
+ i, j = min(start - len_iter, -1), None
+
+ for index, item in list(cache)[i:j:step]:
+ yield item
+ else:
+ # Advance to the stop position
+ if stop is not None:
+ m = stop + 1
+ next(islice(it, m, m), None)
+
+ # stop is positive, so if start is negative they are not comparable
+ # and we need the rest of the items.
+ if start < 0:
+ i = start
+ n = None
+ # stop is None and start is positive, so we just need items up to
+ # the start index.
+ elif stop is None:
+ i = None
+ n = start + 1
+ # Both stop and start are positive, so they are comparable.
+ else:
+ i = None
+ n = start - stop
+ if n <= 0:
+ return
+
+ cache = list(islice(it, n))
+
+ for item in cache[i::step]:
+ yield item
+
+
+def always_reversible(iterable):
+ """An extension of :func:`reversed` that supports all iterables, not
+ just those which implement the ``Reversible`` or ``Sequence`` protocols.
+
+ >>> print(*always_reversible(x for x in range(3)))
+ 2 1 0
+
+ If the iterable is already reversible, this function returns the
+ result of :func:`reversed()`. If the iterable is not reversible,
+ this function will cache the remaining items in the iterable and
+ yield them in reverse order, which may require significant storage.
+ """
+ try:
+ return reversed(iterable)
+ except TypeError:
+ return reversed(list(iterable))
+
+
+def consecutive_groups(iterable, ordering=lambda x: x):
+ """Yield groups of consecutive items using :func:`itertools.groupby`.
+ The *ordering* function determines whether two items are adjacent by
+ returning their position.
+
+ By default, the ordering function is the identity function. This is
+ suitable for finding runs of numbers:
+
+ >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40]
+ >>> for group in consecutive_groups(iterable):
+ ... print(list(group))
+ [1]
+ [10, 11, 12]
+ [20]
+ [30, 31, 32, 33]
+ [40]
+
+ For finding runs of adjacent letters, try using the :meth:`index` method
+ of a string of letters:
+
+ >>> from string import ascii_lowercase
+ >>> iterable = 'abcdfgilmnop'
+ >>> ordering = ascii_lowercase.index
+ >>> for group in consecutive_groups(iterable, ordering):
+ ... print(list(group))
+ ['a', 'b', 'c', 'd']
+ ['f', 'g']
+ ['i']
+ ['l', 'm', 'n', 'o', 'p']
+
+ """
+ for k, g in groupby(
+ enumerate(iterable), key=lambda x: x[0] - ordering(x[1])
+ ):
+ yield map(itemgetter(1), g)
+
+
+def difference(iterable, func=sub):
+ """By default, compute the first difference of *iterable* using
+ :func:`operator.sub`.
+
+ >>> iterable = [0, 1, 3, 6, 10]
+ >>> list(difference(iterable))
+ [0, 1, 2, 3, 4]
+
+ This is the opposite of :func:`accumulate`'s default behavior:
+
+ >>> from more_itertools import accumulate
+ >>> iterable = [0, 1, 2, 3, 4]
+ >>> list(accumulate(iterable))
+ [0, 1, 3, 6, 10]
+ >>> list(difference(accumulate(iterable)))
+ [0, 1, 2, 3, 4]
+
+ By default *func* is :func:`operator.sub`, but other functions can be
+ specified. They will be applied as follows::
+
+ A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ...
+
+ For example, to do progressive division:
+
+ >>> iterable = [1, 2, 6, 24, 120] # Factorial sequence
+ >>> func = lambda x, y: x // y
+ >>> list(difference(iterable, func))
+ [1, 2, 3, 4, 5]
+
+ """
+ a, b = tee(iterable)
+ try:
+ item = next(b)
+ except StopIteration:
+ return iter([])
+ return chain([item], map(lambda x: func(x[1], x[0]), zip(a, b)))
+
+
+class SequenceView(Sequence):
+ """Return a read-only view of the sequence object *target*.
+
+ :class:`SequenceView` objects are analagous to Python's built-in
+ "dictionary view" types. They provide a dynamic view of a sequence's items,
+ meaning that when the sequence updates, so does the view.
+
+ >>> seq = ['0', '1', '2']
+ >>> view = SequenceView(seq)
+ >>> view
+ SequenceView(['0', '1', '2'])
+ >>> seq.append('3')
+ >>> view
+ SequenceView(['0', '1', '2', '3'])
+
+ Sequence views support indexing, slicing, and length queries. They act
+ like the underlying sequence, except they don't allow assignment:
+
+ >>> view[1]
+ '1'
+ >>> view[1:-1]
+ ['1', '2']
+ >>> len(view)
+ 4
+
+ Sequence views are useful as an alternative to copying, as they don't
+ require (much) extra storage.
+
+ """
+ def __init__(self, target):
+ if not isinstance(target, Sequence):
+ raise TypeError
+ self._target = target
+
+ def __getitem__(self, index):
+ return self._target[index]
+
+ def __len__(self):
+ return len(self._target)
+
+ def __repr__(self):
+ return '{}({})'.format(self.__class__.__name__, repr(self._target))
+
+
+class seekable(object):
+ """Wrap an iterator to allow for seeking backward and forward. This
+ progressively caches the items in the source iterable so they can be
+ re-visited.
+
+ Call :meth:`seek` with an index to seek to that position in the source
+ iterable.
+
+ To "reset" an iterator, seek to ``0``:
+
+ >>> from itertools import count
+ >>> it = seekable((str(n) for n in count()))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> it.seek(0)
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> next(it)
+ '3'
+
+ You can also seek forward:
+
+ >>> it = seekable((str(n) for n in range(20)))
+ >>> it.seek(10)
+ >>> next(it)
+ '10'
+ >>> it.seek(20) # Seeking past the end of the source isn't a problem
+ >>> list(it)
+ []
+ >>> it.seek(0) # Resetting works even after hitting the end
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+
+ The cache grows as the source iterable progresses, so beware of wrapping
+ very large or infinite iterables.
+
+ You may view the contents of the cache with the :meth:`elements` method.
+ That returns a :class:`SequenceView`, a view that updates automatically:
+
+ >>> it = seekable((str(n) for n in range(10)))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> elements = it.elements()
+ >>> elements
+ SequenceView(['0', '1', '2'])
+ >>> next(it)
+ '3'
+ >>> elements
+ SequenceView(['0', '1', '2', '3'])
+
+ """
+
+ def __init__(self, iterable):
+ self._source = iter(iterable)
+ self._cache = []
+ self._index = None
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self._index is not None:
+ try:
+ item = self._cache[self._index]
+ except IndexError:
+ self._index = None
+ else:
+ self._index += 1
+ return item
+
+ item = next(self._source)
+ self._cache.append(item)
+ return item
+
+ next = __next__
+
+ def elements(self):
+ return SequenceView(self._cache)
+
+ def seek(self, index):
+ self._index = index
+ remainder = index - len(self._cache)
+ if remainder > 0:
+ consume(self, remainder)
+
+
+class run_length(object):
+ """
+ :func:`run_length.encode` compresses an iterable with run-length encoding.
+ It yields groups of repeated items with the count of how many times they
+ were repeated:
+
+ >>> uncompressed = 'abbcccdddd'
+ >>> list(run_length.encode(uncompressed))
+ [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+
+ :func:`run_length.decode` decompresses an iterable that was previously
+ compressed with run-length encoding. It yields the items of the
+ decompressed iterable:
+
+ >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> list(run_length.decode(compressed))
+ ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd']
+
+ """
+
+ @staticmethod
+ def encode(iterable):
+ return ((k, ilen(g)) for k, g in groupby(iterable))
+
+ @staticmethod
+ def decode(iterable):
+ return chain.from_iterable(repeat(k, n) for k, n in iterable)
+
+
+def exactly_n(iterable, n, predicate=bool):
+ """Return ``True`` if exactly ``n`` items in the iterable are ``True``
+ according to the *predicate* function.
+
+ >>> exactly_n([True, True, False], 2)
+ True
+ >>> exactly_n([True, True, False], 1)
+ False
+ >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3)
+ True
+
+ The iterable will be advanced until ``n + 1`` truthy items are encountered,
+ so avoid calling it on infinite iterables.
+
+ """
+ return len(take(n + 1, filter(predicate, iterable))) == n
+
+
+def circular_shifts(iterable):
+ """Return a list of circular shifts of *iterable*.
+
+ >>> circular_shifts(range(4))
+ [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
+ """
+ lst = list(iterable)
+ return take(len(lst), windowed(cycle(lst), len(lst)))
+
+
+def make_decorator(wrapping_func, result_index=0):
+ """Return a decorator version of *wrapping_func*, which is a function that
+ modifies an iterable. *result_index* is the position in that function's
+ signature where the iterable goes.
+
+ This lets you use itertools on the "production end," i.e. at function
+ definition. This can augment what the function returns without changing the
+ function's code.
+
+ For example, to produce a decorator version of :func:`chunked`:
+
+ >>> from more_itertools import chunked
+ >>> chunker = make_decorator(chunked, result_index=0)
+ >>> @chunker(3)
+ ... def iter_range(n):
+ ... return iter(range(n))
+ ...
+ >>> list(iter_range(9))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ To only allow truthy items to be returned:
+
+ >>> truth_serum = make_decorator(filter, result_index=1)
+ >>> @truth_serum(bool)
+ ... def boolean_test():
+ ... return [0, 1, '', ' ', False, True]
+ ...
+ >>> list(boolean_test())
+ [1, ' ', True]
+
+ The :func:`peekable` and :func:`seekable` wrappers make for practical
+ decorators:
+
+ >>> from more_itertools import peekable
+ >>> peekable_function = make_decorator(peekable)
+ >>> @peekable_function()
+ ... def str_range(*args):
+ ... return (str(x) for x in range(*args))
+ ...
+ >>> it = str_range(1, 20, 2)
+ >>> next(it), next(it), next(it)
+ ('1', '3', '5')
+ >>> it.peek()
+ '7'
+ >>> next(it)
+ '7'
+
+ """
+ # See https://sites.google.com/site/bbayles/index/decorator_factory for
+ # notes on how this works.
+ def decorator(*wrapping_args, **wrapping_kwargs):
+ def outer_wrapper(f):
+ def inner_wrapper(*args, **kwargs):
+ result = f(*args, **kwargs)
+ wrapping_args_ = list(wrapping_args)
+ wrapping_args_.insert(result_index, result)
+ return wrapping_func(*wrapping_args_, **wrapping_kwargs)
+
+ return inner_wrapper
+
+ return outer_wrapper
+
+ return decorator
+
+
+def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None):
+ """Return a dictionary that maps the items in *iterable* to categories
+ defined by *keyfunc*, transforms them with *valuefunc*, and
+ then summarizes them by category with *reducefunc*.
+
+ *valuefunc* defaults to the identity function if it is unspecified.
+ If *reducefunc* is unspecified, no summarization takes place:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> result = map_reduce('abbccc', keyfunc)
+ >>> sorted(result.items())
+ [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])]
+
+ Specifying *valuefunc* transforms the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc)
+ >>> sorted(result.items())
+ [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])]
+
+ Specifying *reducefunc* summarizes the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> reducefunc = sum
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc)
+ >>> sorted(result.items())
+ [('A', 1), ('B', 2), ('C', 3)]
+
+ You may want to filter the input iterable before applying the map/reduce
+ proecdure:
+
+ >>> all_items = range(30)
+ >>> items = [x for x in all_items if 10 <= x <= 20] # Filter
+ >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1
+ >>> categories = map_reduce(items, keyfunc=keyfunc)
+ >>> sorted(categories.items())
+ [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])]
+ >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum)
+ >>> sorted(summaries.items())
+ [(0, 90), (1, 75)]
+
+ Note that all items in the iterable are gathered into a list before the
+ summarization step, which may require significant storage.
+
+ The returned object is a :obj:`collections.defaultdict` with the
+ ``default_factory`` set to ``None``, such that it behaves like a normal
+ dictionary.
+
+ """
+ valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc
+
+ ret = defaultdict(list)
+ for item in iterable:
+ key = keyfunc(item)
+ value = valuefunc(item)
+ ret[key].append(value)
+
+ if reducefunc is not None:
+ for key, value_list in ret.items():
+ ret[key] = reducefunc(value_list)
+
+ ret.default_factory = None
+ return ret
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/recipes.py b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/recipes.py
new file mode 100644
index 0000000000..3a7706cb91
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/recipes.py
@@ -0,0 +1,565 @@
+"""Imported from the recipes section of the itertools documentation.
+
+All functions taken from the recipes section of the itertools library docs
+[1]_.
+Some backward-compatible usability improvements have been made.
+
+.. [1] http://docs.python.org/library/itertools.html#recipes
+
+"""
+from collections import deque
+from itertools import (
+ chain, combinations, count, cycle, groupby, islice, repeat, starmap, tee
+)
+import operator
+from random import randrange, sample, choice
+
+from six import PY2
+from six.moves import filter, filterfalse, map, range, zip, zip_longest
+
+__all__ = [
+ 'accumulate',
+ 'all_equal',
+ 'consume',
+ 'dotproduct',
+ 'first_true',
+ 'flatten',
+ 'grouper',
+ 'iter_except',
+ 'ncycles',
+ 'nth',
+ 'nth_combination',
+ 'padnone',
+ 'pairwise',
+ 'partition',
+ 'powerset',
+ 'prepend',
+ 'quantify',
+ 'random_combination_with_replacement',
+ 'random_combination',
+ 'random_permutation',
+ 'random_product',
+ 'repeatfunc',
+ 'roundrobin',
+ 'tabulate',
+ 'tail',
+ 'take',
+ 'unique_everseen',
+ 'unique_justseen',
+]
+
+
+def accumulate(iterable, func=operator.add):
+ """
+ Return an iterator whose items are the accumulated results of a function
+ (specified by the optional *func* argument) that takes two arguments.
+ By default, returns accumulated sums with :func:`operator.add`.
+
+ >>> list(accumulate([1, 2, 3, 4, 5])) # Running sum
+ [1, 3, 6, 10, 15]
+ >>> list(accumulate([1, 2, 3], func=operator.mul)) # Running product
+ [1, 2, 6]
+ >>> list(accumulate([0, 1, -1, 2, 3, 2], func=max)) # Running maximum
+ [0, 1, 1, 2, 3, 3]
+
+ This function is available in the ``itertools`` module for Python 3.2 and
+ greater.
+
+ """
+ it = iter(iterable)
+ try:
+ total = next(it)
+ except StopIteration:
+ return
+ else:
+ yield total
+
+ for element in it:
+ total = func(total, element)
+ yield total
+
+
+def take(n, iterable):
+ """Return first *n* items of the iterable as a list.
+
+ >>> take(3, range(10))
+ [0, 1, 2]
+ >>> take(5, range(3))
+ [0, 1, 2]
+
+ Effectively a short replacement for ``next`` based iterator consumption
+ when you want more than one item, but less than the whole iterator.
+
+ """
+ return list(islice(iterable, n))
+
+
+def tabulate(function, start=0):
+ """Return an iterator over the results of ``func(start)``,
+ ``func(start + 1)``, ``func(start + 2)``...
+
+ *func* should be a function that accepts one integer argument.
+
+ If *start* is not specified it defaults to 0. It will be incremented each
+ time the iterator is advanced.
+
+ >>> square = lambda x: x ** 2
+ >>> iterator = tabulate(square, -3)
+ >>> take(4, iterator)
+ [9, 4, 1, 0]
+
+ """
+ return map(function, count(start))
+
+
+def tail(n, iterable):
+ """Return an iterator over the last *n* items of *iterable*.
+
+ >>> t = tail(3, 'ABCDEFG')
+ >>> list(t)
+ ['E', 'F', 'G']
+
+ """
+ return iter(deque(iterable, maxlen=n))
+
+
+def consume(iterator, n=None):
+ """Advance *iterable* by *n* steps. If *n* is ``None``, consume it
+ entirely.
+
+ Efficiently exhausts an iterator without returning values. Defaults to
+ consuming the whole iterator, but an optional second argument may be
+ provided to limit consumption.
+
+ >>> i = (x for x in range(10))
+ >>> next(i)
+ 0
+ >>> consume(i, 3)
+ >>> next(i)
+ 4
+ >>> consume(i)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ If the iterator has fewer items remaining than the provided limit, the
+ whole iterator will be consumed.
+
+ >>> i = (x for x in range(3))
+ >>> consume(i, 5)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ """
+ # Use functions that consume iterators at C speed.
+ if n is None:
+ # feed the entire iterator into a zero-length deque
+ deque(iterator, maxlen=0)
+ else:
+ # advance to the empty slice starting at position n
+ next(islice(iterator, n, n), None)
+
+
+def nth(iterable, n, default=None):
+ """Returns the nth item or a default value.
+
+ >>> l = range(10)
+ >>> nth(l, 3)
+ 3
+ >>> nth(l, 20, "zebra")
+ 'zebra'
+
+ """
+ return next(islice(iterable, n, None), default)
+
+
+def all_equal(iterable):
+ """
+ Returns ``True`` if all the elements are equal to each other.
+
+ >>> all_equal('aaaa')
+ True
+ >>> all_equal('aaab')
+ False
+
+ """
+ g = groupby(iterable)
+ return next(g, True) and not next(g, False)
+
+
+def quantify(iterable, pred=bool):
+ """Return the how many times the predicate is true.
+
+ >>> quantify([True, False, True])
+ 2
+
+ """
+ return sum(map(pred, iterable))
+
+
+def padnone(iterable):
+ """Returns the sequence of elements and then returns ``None`` indefinitely.
+
+ >>> take(5, padnone(range(3)))
+ [0, 1, 2, None, None]
+
+ Useful for emulating the behavior of the built-in :func:`map` function.
+
+ See also :func:`padded`.
+
+ """
+ return chain(iterable, repeat(None))
+
+
+def ncycles(iterable, n):
+ """Returns the sequence elements *n* times
+
+ >>> list(ncycles(["a", "b"], 3))
+ ['a', 'b', 'a', 'b', 'a', 'b']
+
+ """
+ return chain.from_iterable(repeat(tuple(iterable), n))
+
+
+def dotproduct(vec1, vec2):
+ """Returns the dot product of the two iterables.
+
+ >>> dotproduct([10, 10], [20, 20])
+ 400
+
+ """
+ return sum(map(operator.mul, vec1, vec2))
+
+
+def flatten(listOfLists):
+ """Return an iterator flattening one level of nesting in a list of lists.
+
+ >>> list(flatten([[0, 1], [2, 3]]))
+ [0, 1, 2, 3]
+
+ See also :func:`collapse`, which can flatten multiple levels of nesting.
+
+ """
+ return chain.from_iterable(listOfLists)
+
+
+def repeatfunc(func, times=None, *args):
+ """Call *func* with *args* repeatedly, returning an iterable over the
+ results.
+
+ If *times* is specified, the iterable will terminate after that many
+ repetitions:
+
+ >>> from operator import add
+ >>> times = 4
+ >>> args = 3, 5
+ >>> list(repeatfunc(add, times, *args))
+ [8, 8, 8, 8]
+
+ If *times* is ``None`` the iterable will not terminate:
+
+ >>> from random import randrange
+ >>> times = None
+ >>> args = 1, 11
+ >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
+ [2, 4, 8, 1, 8, 4]
+
+ """
+ if times is None:
+ return starmap(func, repeat(args))
+ return starmap(func, repeat(args, times))
+
+
+def pairwise(iterable):
+ """Returns an iterator of paired items, overlapping, from the original
+
+ >>> take(4, pairwise(count()))
+ [(0, 1), (1, 2), (2, 3), (3, 4)]
+
+ """
+ a, b = tee(iterable)
+ next(b, None)
+ return zip(a, b)
+
+
+def grouper(n, iterable, fillvalue=None):
+ """Collect data into fixed-length chunks or blocks.
+
+ >>> list(grouper(3, 'ABCDEFG', 'x'))
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
+
+ """
+ args = [iter(iterable)] * n
+ return zip_longest(fillvalue=fillvalue, *args)
+
+
+def roundrobin(*iterables):
+ """Yields an item from each iterable, alternating between them.
+
+ >>> list(roundrobin('ABC', 'D', 'EF'))
+ ['A', 'D', 'E', 'B', 'F', 'C']
+
+ This function produces the same output as :func:`interleave_longest`, but
+ may perform better for some inputs (in particular when the number of
+ iterables is small).
+
+ """
+ # Recipe credited to George Sakkis
+ pending = len(iterables)
+ if PY2:
+ nexts = cycle(iter(it).next for it in iterables)
+ else:
+ nexts = cycle(iter(it).__next__ for it in iterables)
+ while pending:
+ try:
+ for next in nexts:
+ yield next()
+ except StopIteration:
+ pending -= 1
+ nexts = cycle(islice(nexts, pending))
+
+
+def partition(pred, iterable):
+ """
+ Returns a 2-tuple of iterables derived from the input iterable.
+ The first yields the items that have ``pred(item) == False``.
+ The second yields the items that have ``pred(item) == True``.
+
+ >>> is_odd = lambda x: x % 2 != 0
+ >>> iterable = range(10)
+ >>> even_items, odd_items = partition(is_odd, iterable)
+ >>> list(even_items), list(odd_items)
+ ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+
+ """
+ # partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9
+ t1, t2 = tee(iterable)
+ return filterfalse(pred, t1), filter(pred, t2)
+
+
+def powerset(iterable):
+ """Yields all possible subsets of the iterable.
+
+ >>> list(powerset([1,2,3]))
+ [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
+
+ """
+ s = list(iterable)
+ return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
+
+
+def unique_everseen(iterable, key=None):
+ """
+ Yield unique elements, preserving order.
+
+ >>> list(unique_everseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D']
+ >>> list(unique_everseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'D']
+
+ Sequences with a mix of hashable and unhashable items can be used.
+ The function will be slower (i.e., `O(n^2)`) for unhashable items.
+
+ """
+ seenset = set()
+ seenset_add = seenset.add
+ seenlist = []
+ seenlist_add = seenlist.append
+ if key is None:
+ for element in iterable:
+ try:
+ if element not in seenset:
+ seenset_add(element)
+ yield element
+ except TypeError:
+ if element not in seenlist:
+ seenlist_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ try:
+ if k not in seenset:
+ seenset_add(k)
+ yield element
+ except TypeError:
+ if k not in seenlist:
+ seenlist_add(k)
+ yield element
+
+
+def unique_justseen(iterable, key=None):
+ """Yields elements in order, ignoring serial duplicates
+
+ >>> list(unique_justseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D', 'A', 'B']
+ >>> list(unique_justseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'A', 'D']
+
+ """
+ return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
+
+
+def iter_except(func, exception, first=None):
+ """Yields results from a function repeatedly until an exception is raised.
+
+ Converts a call-until-exception interface to an iterator interface.
+ Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
+ to end the loop.
+
+ >>> l = [0, 1, 2]
+ >>> list(iter_except(l.pop, IndexError))
+ [2, 1, 0]
+
+ """
+ try:
+ if first is not None:
+ yield first()
+ while 1:
+ yield func()
+ except exception:
+ pass
+
+
+def first_true(iterable, default=False, pred=None):
+ """
+ Returns the first true value in the iterable.
+
+ If no true value is found, returns *default*
+
+ If *pred* is not None, returns the first item for which
+ ``pred(item) == True`` .
+
+ >>> first_true(range(10))
+ 1
+ >>> first_true(range(10), pred=lambda x: x > 5)
+ 6
+ >>> first_true(range(10), default='missing', pred=lambda x: x > 9)
+ 'missing'
+
+ """
+ return next(filter(pred, iterable), default)
+
+
+def random_product(*args, **kwds):
+ """Draw an item at random from each of the input iterables.
+
+ >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
+ ('c', 3, 'Z')
+
+ If *repeat* is provided as a keyword argument, that many items will be
+ drawn from each iterable.
+
+ >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
+ ('a', 2, 'd', 3)
+
+ This equivalent to taking a random selection from
+ ``itertools.product(*args, **kwarg)``.
+
+ """
+ pools = [tuple(pool) for pool in args] * kwds.get('repeat', 1)
+ return tuple(choice(pool) for pool in pools)
+
+
+def random_permutation(iterable, r=None):
+ """Return a random *r* length permutation of the elements in *iterable*.
+
+ If *r* is not specified or is ``None``, then *r* defaults to the length of
+ *iterable*.
+
+ >>> random_permutation(range(5)) # doctest:+SKIP
+ (3, 4, 0, 1, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.permutations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ r = len(pool) if r is None else r
+ return tuple(sample(pool, r))
+
+
+def random_combination(iterable, r):
+ """Return a random *r* length subsequence of the elements in *iterable*.
+
+ >>> random_combination(range(5), 3) # doctest:+SKIP
+ (2, 3, 4)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(sample(range(n), r))
+ return tuple(pool[i] for i in indices)
+
+
+def random_combination_with_replacement(iterable, r):
+ """Return a random *r* length subsequence of elements in *iterable*,
+ allowing individual elements to be repeated.
+
+ >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
+ (0, 0, 1, 2, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations_with_replacement(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(randrange(n) for i in range(r))
+ return tuple(pool[i] for i in indices)
+
+
+def nth_combination(iterable, r, index):
+ """Equivalent to ``list(combinations(iterable, r))[index]``.
+
+ The subsequences of *iterable* that are of length *r* can be ordered
+ lexicographically. :func:`nth_combination` computes the subsequence at
+ sort position *index* directly, without computing the previous
+ subsequences.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ if (r < 0) or (r > n):
+ raise ValueError
+
+ c = 1
+ k = min(r, n - r)
+ for i in range(1, k + 1):
+ c = c * (n - k + i) // i
+
+ if index < 0:
+ index += c
+
+ if (index < 0) or (index >= c):
+ raise IndexError
+
+ result = []
+ while r:
+ c, n, r = c * r // n, n - 1, r - 1
+ while index >= c:
+ index -= c
+ c, n = c * (n - r) // n, n - 1
+ result.append(pool[-1 - n])
+
+ return tuple(result)
+
+
+def prepend(value, iterator):
+ """Yield *value*, followed by the elements in *iterator*.
+
+ >>> value = '0'
+ >>> iterator = ['1', '2', '3']
+ >>> list(prepend(value, iterator))
+ ['0', '1', '2', '3']
+
+ To prepend multiple values, see :func:`itertools.chain`.
+
+ """
+ return chain([value], iterator)
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/__init__.py b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/test_more.py b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/test_more.py
new file mode 100644
index 0000000000..2023ba6a4c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/test_more.py
@@ -0,0 +1,1848 @@
+from __future__ import division, print_function, unicode_literals
+
+from decimal import Decimal
+from doctest import DocTestSuite
+from fractions import Fraction
+from functools import partial, reduce
+from heapq import merge
+from io import StringIO
+from itertools import (
+ chain,
+ count,
+ groupby,
+ islice,
+ permutations,
+ product,
+ repeat,
+)
+from operator import add, mul, itemgetter
+from unittest import TestCase
+
+from six.moves import filter, map, range, zip
+
+import more_itertools as mi
+
+
+def load_tests(loader, tests, ignore):
+ # Add the doctests
+ tests.addTests(DocTestSuite('more_itertools.more'))
+ return tests
+
+
+class CollateTests(TestCase):
+ """Unit tests for ``collate()``"""
+ # Also accidentally tests peekable, though that could use its own tests
+
+ def test_default(self):
+ """Test with the default `key` function."""
+ iterables = [range(4), range(7), range(3, 6)]
+ self.assertEqual(
+ sorted(reduce(list.__add__, [list(it) for it in iterables])),
+ list(mi.collate(*iterables))
+ )
+
+ def test_key(self):
+ """Test using a custom `key` function."""
+ iterables = [range(5, 0, -1), range(4, 0, -1)]
+ actual = sorted(
+ reduce(list.__add__, [list(it) for it in iterables]), reverse=True
+ )
+ expected = list(mi.collate(*iterables, key=lambda x: -x))
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ """Be nice if passed an empty list of iterables."""
+ self.assertEqual([], list(mi.collate()))
+
+ def test_one(self):
+ """Work when only 1 iterable is passed."""
+ self.assertEqual([0, 1], list(mi.collate(range(2))))
+
+ def test_reverse(self):
+ """Test the `reverse` kwarg."""
+ iterables = [range(4, 0, -1), range(7, 0, -1), range(3, 6, -1)]
+
+ actual = sorted(
+ reduce(list.__add__, [list(it) for it in iterables]), reverse=True
+ )
+ expected = list(mi.collate(*iterables, reverse=True))
+ self.assertEqual(actual, expected)
+
+ def test_alias(self):
+ self.assertNotEqual(merge.__doc__, mi.collate.__doc__)
+ self.assertNotEqual(partial.__doc__, mi.collate.__doc__)
+
+
+class ChunkedTests(TestCase):
+ """Tests for ``chunked()``"""
+
+ def test_even(self):
+ """Test when ``n`` divides evenly into the length of the iterable."""
+ self.assertEqual(
+ list(mi.chunked('ABCDEF', 3)), [['A', 'B', 'C'], ['D', 'E', 'F']]
+ )
+
+ def test_odd(self):
+ """Test when ``n`` does not divide evenly into the length of the
+ iterable.
+
+ """
+ self.assertEqual(
+ list(mi.chunked('ABCDE', 3)), [['A', 'B', 'C'], ['D', 'E']]
+ )
+
+
+class FirstTests(TestCase):
+ """Tests for ``first()``"""
+
+ def test_many(self):
+ """Test that it works on many-item iterables."""
+ # Also try it on a generator expression to make sure it works on
+ # whatever those return, across Python versions.
+ self.assertEqual(mi.first(x for x in range(4)), 0)
+
+ def test_one(self):
+ """Test that it doesn't raise StopIteration prematurely."""
+ self.assertEqual(mi.first([3]), 3)
+
+ def test_empty_stop_iteration(self):
+ """It should raise StopIteration for empty iterables."""
+ self.assertRaises(ValueError, lambda: mi.first([]))
+
+ def test_default(self):
+ """It should return the provided default arg for empty iterables."""
+ self.assertEqual(mi.first([], 'boo'), 'boo')
+
+
+class PeekableTests(TestCase):
+ """Tests for ``peekable()`` behavor not incidentally covered by testing
+ ``collate()``
+
+ """
+ def test_peek_default(self):
+ """Make sure passing a default into ``peek()`` works."""
+ p = mi.peekable([])
+ self.assertEqual(p.peek(7), 7)
+
+ def test_truthiness(self):
+ """Make sure a ``peekable`` tests true iff there are items remaining in
+ the iterable.
+
+ """
+ p = mi.peekable([])
+ self.assertFalse(p)
+
+ p = mi.peekable(range(3))
+ self.assertTrue(p)
+
+ def test_simple_peeking(self):
+ """Make sure ``next`` and ``peek`` advance and don't advance the
+ iterator, respectively.
+
+ """
+ p = mi.peekable(range(10))
+ self.assertEqual(next(p), 0)
+ self.assertEqual(p.peek(), 1)
+ self.assertEqual(next(p), 1)
+
+ def test_indexing(self):
+ """
+ Indexing into the peekable shouldn't advance the iterator.
+ """
+ p = mi.peekable('abcdefghijkl')
+
+ # The 0th index is what ``next()`` will return
+ self.assertEqual(p[0], 'a')
+ self.assertEqual(next(p), 'a')
+
+ # Indexing further into the peekable shouldn't advance the itertor
+ self.assertEqual(p[2], 'd')
+ self.assertEqual(next(p), 'b')
+
+ # The 0th index moves up with the iterator; the last index follows
+ self.assertEqual(p[0], 'c')
+ self.assertEqual(p[9], 'l')
+
+ self.assertEqual(next(p), 'c')
+ self.assertEqual(p[8], 'l')
+
+ # Negative indexing should work too
+ self.assertEqual(p[-2], 'k')
+ self.assertEqual(p[-9], 'd')
+ self.assertRaises(IndexError, lambda: p[-10])
+
+ def test_slicing(self):
+ """Slicing the peekable shouldn't advance the iterator."""
+ seq = list('abcdefghijkl')
+ p = mi.peekable(seq)
+
+ # Slicing the peekable should just be like slicing a re-iterable
+ self.assertEqual(p[1:4], seq[1:4])
+
+ # Advancing the iterator moves the slices up also
+ self.assertEqual(next(p), 'a')
+ self.assertEqual(p[1:4], seq[1:][1:4])
+
+ # Implicit starts and stop should work
+ self.assertEqual(p[:5], seq[1:][:5])
+ self.assertEqual(p[:], seq[1:][:])
+
+ # Indexing past the end should work
+ self.assertEqual(p[:100], seq[1:][:100])
+
+ # Steps should work, including negative
+ self.assertEqual(p[::2], seq[1:][::2])
+ self.assertEqual(p[::-1], seq[1:][::-1])
+
+ def test_slicing_reset(self):
+ """Test slicing on a fresh iterable each time"""
+ iterable = ['0', '1', '2', '3', '4', '5']
+ indexes = list(range(-4, len(iterable) + 4)) + [None]
+ steps = [1, 2, 3, 4, -1, -2, -3, 4]
+ for slice_args in product(indexes, indexes, steps):
+ it = iter(iterable)
+ p = mi.peekable(it)
+ next(p)
+ index = slice(*slice_args)
+ actual = p[index]
+ expected = iterable[1:][index]
+ self.assertEqual(actual, expected, slice_args)
+
+ def test_slicing_error(self):
+ iterable = '01234567'
+ p = mi.peekable(iter(iterable))
+
+ # Prime the cache
+ p.peek()
+ old_cache = list(p._cache)
+
+ # Illegal slice
+ with self.assertRaises(ValueError):
+ p[1:-1:0]
+
+ # Neither the cache nor the iteration should be affected
+ self.assertEqual(old_cache, list(p._cache))
+ self.assertEqual(list(p), list(iterable))
+
+ def test_passthrough(self):
+ """Iterating a peekable without using ``peek()`` or ``prepend()``
+ should just give the underlying iterable's elements (a trivial test but
+ useful to set a baseline in case something goes wrong)"""
+ expected = [1, 2, 3, 4, 5]
+ actual = list(mi.peekable(expected))
+ self.assertEqual(actual, expected)
+
+ # prepend() behavior tests
+
+ def test_prepend(self):
+ """Tests intersperesed ``prepend()`` and ``next()`` calls"""
+ it = mi.peekable(range(2))
+ actual = []
+
+ # Test prepend() before next()
+ it.prepend(10)
+ actual += [next(it), next(it)]
+
+ # Test prepend() between next()s
+ it.prepend(11)
+ actual += [next(it), next(it)]
+
+ # Test prepend() after source iterable is consumed
+ it.prepend(12)
+ actual += [next(it)]
+
+ expected = [10, 0, 11, 1, 12]
+ self.assertEqual(actual, expected)
+
+ def test_multi_prepend(self):
+ """Tests prepending multiple items and getting them in proper order"""
+ it = mi.peekable(range(5))
+ actual = [next(it), next(it)]
+ it.prepend(10, 11, 12)
+ it.prepend(20, 21)
+ actual += list(it)
+ expected = [0, 1, 20, 21, 10, 11, 12, 2, 3, 4]
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ """Tests prepending in front of an empty iterable"""
+ it = mi.peekable([])
+ it.prepend(10)
+ actual = list(it)
+ expected = [10]
+ self.assertEqual(actual, expected)
+
+ def test_prepend_truthiness(self):
+ """Tests that ``__bool__()`` or ``__nonzero__()`` works properly
+ with ``prepend()``"""
+ it = mi.peekable(range(5))
+ self.assertTrue(it)
+ actual = list(it)
+ self.assertFalse(it)
+ it.prepend(10)
+ self.assertTrue(it)
+ actual += [next(it)]
+ self.assertFalse(it)
+ expected = [0, 1, 2, 3, 4, 10]
+ self.assertEqual(actual, expected)
+
+ def test_multi_prepend_peek(self):
+ """Tests prepending multiple elements and getting them in reverse order
+ while peeking"""
+ it = mi.peekable(range(5))
+ actual = [next(it), next(it)]
+ self.assertEqual(it.peek(), 2)
+ it.prepend(10, 11, 12)
+ self.assertEqual(it.peek(), 10)
+ it.prepend(20, 21)
+ self.assertEqual(it.peek(), 20)
+ actual += list(it)
+ self.assertFalse(it)
+ expected = [0, 1, 20, 21, 10, 11, 12, 2, 3, 4]
+ self.assertEqual(actual, expected)
+
+ def test_prepend_after_stop(self):
+ """Test resuming iteration after a previous exhaustion"""
+ it = mi.peekable(range(3))
+ self.assertEqual(list(it), [0, 1, 2])
+ self.assertRaises(StopIteration, lambda: next(it))
+ it.prepend(10)
+ self.assertEqual(next(it), 10)
+ self.assertRaises(StopIteration, lambda: next(it))
+
+ def test_prepend_slicing(self):
+ """Tests interaction between prepending and slicing"""
+ seq = list(range(20))
+ p = mi.peekable(seq)
+
+ p.prepend(30, 40, 50)
+ pseq = [30, 40, 50] + seq # pseq for prepended_seq
+
+ # adapt the specific tests from test_slicing
+ self.assertEqual(p[0], 30)
+ self.assertEqual(p[1:8], pseq[1:8])
+ self.assertEqual(p[1:], pseq[1:])
+ self.assertEqual(p[:5], pseq[:5])
+ self.assertEqual(p[:], pseq[:])
+ self.assertEqual(p[:100], pseq[:100])
+ self.assertEqual(p[::2], pseq[::2])
+ self.assertEqual(p[::-1], pseq[::-1])
+
+ def test_prepend_indexing(self):
+ """Tests interaction between prepending and indexing"""
+ seq = list(range(20))
+ p = mi.peekable(seq)
+
+ p.prepend(30, 40, 50)
+
+ self.assertEqual(p[0], 30)
+ self.assertEqual(next(p), 30)
+ self.assertEqual(p[2], 0)
+ self.assertEqual(next(p), 40)
+ self.assertEqual(p[0], 50)
+ self.assertEqual(p[9], 8)
+ self.assertEqual(next(p), 50)
+ self.assertEqual(p[8], 8)
+ self.assertEqual(p[-2], 18)
+ self.assertEqual(p[-9], 11)
+ self.assertRaises(IndexError, lambda: p[-21])
+
+ def test_prepend_iterable(self):
+ """Tests prepending from an iterable"""
+ it = mi.peekable(range(5))
+ # Don't directly use the range() object to avoid any range-specific
+ # optimizations
+ it.prepend(*(x for x in range(5)))
+ actual = list(it)
+ expected = list(chain(range(5), range(5)))
+ self.assertEqual(actual, expected)
+
+ def test_prepend_many(self):
+ """Tests that prepending a huge number of elements works"""
+ it = mi.peekable(range(5))
+ # Don't directly use the range() object to avoid any range-specific
+ # optimizations
+ it.prepend(*(x for x in range(20000)))
+ actual = list(it)
+ expected = list(chain(range(20000), range(5)))
+ self.assertEqual(actual, expected)
+
+ def test_prepend_reversed(self):
+ """Tests prepending from a reversed iterable"""
+ it = mi.peekable(range(3))
+ it.prepend(*reversed((10, 11, 12)))
+ actual = list(it)
+ expected = [12, 11, 10, 0, 1, 2]
+ self.assertEqual(actual, expected)
+
+
+class ConsumerTests(TestCase):
+ """Tests for ``consumer()``"""
+
+ def test_consumer(self):
+ @mi.consumer
+ def eater():
+ while True:
+ x = yield # noqa
+
+ e = eater()
+ e.send('hi') # without @consumer, would raise TypeError
+
+
+class DistinctPermutationsTests(TestCase):
+ def test_distinct_permutations(self):
+ """Make sure the output for ``distinct_permutations()`` is the same as
+ set(permutations(it)).
+
+ """
+ iterable = ['z', 'a', 'a', 'q', 'q', 'q', 'y']
+ test_output = sorted(mi.distinct_permutations(iterable))
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+ def test_other_iterables(self):
+ """Make sure ``distinct_permutations()`` accepts a different type of
+ iterables.
+
+ """
+ # a generator
+ iterable = (c for c in ['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ test_output = sorted(mi.distinct_permutations(iterable))
+ # "reload" it
+ iterable = (c for c in ['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+ # an iterator
+ iterable = iter(['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ test_output = sorted(mi.distinct_permutations(iterable))
+ # "reload" it
+ iterable = iter(['z', 'a', 'a', 'q', 'q', 'q', 'y'])
+ ref_output = sorted(set(permutations(iterable)))
+ self.assertEqual(test_output, ref_output)
+
+
+class IlenTests(TestCase):
+ def test_ilen(self):
+ """Sanity-checks for ``ilen()``."""
+ # Non-empty
+ self.assertEqual(
+ mi.ilen(filter(lambda x: x % 10 == 0, range(101))), 11
+ )
+
+ # Empty
+ self.assertEqual(mi.ilen((x for x in range(0))), 0)
+
+ # Iterable with __len__
+ self.assertEqual(mi.ilen(list(range(6))), 6)
+
+
+class WithIterTests(TestCase):
+ def test_with_iter(self):
+ s = StringIO('One fish\nTwo fish')
+ initial_words = [line.split()[0] for line in mi.with_iter(s)]
+
+ # Iterable's items should be faithfully represented
+ self.assertEqual(initial_words, ['One', 'Two'])
+ # The file object should be closed
+ self.assertEqual(s.closed, True)
+
+
+class OneTests(TestCase):
+ def test_basic(self):
+ it = iter(['item'])
+ self.assertEqual(mi.one(it), 'item')
+
+ def test_too_short(self):
+ it = iter([])
+ self.assertRaises(ValueError, lambda: mi.one(it))
+ self.assertRaises(IndexError, lambda: mi.one(it, too_short=IndexError))
+
+ def test_too_long(self):
+ it = count()
+ self.assertRaises(ValueError, lambda: mi.one(it)) # burn 0 and 1
+ self.assertEqual(next(it), 2)
+ self.assertRaises(
+ OverflowError, lambda: mi.one(it, too_long=OverflowError)
+ )
+
+
+class IntersperseTest(TestCase):
+ """ Tests for intersperse() """
+
+ def test_even(self):
+ iterable = (x for x in '01')
+ self.assertEqual(
+ list(mi.intersperse(None, iterable)), ['0', None, '1']
+ )
+
+ def test_odd(self):
+ iterable = (x for x in '012')
+ self.assertEqual(
+ list(mi.intersperse(None, iterable)), ['0', None, '1', None, '2']
+ )
+
+ def test_nested(self):
+ element = ('a', 'b')
+ iterable = (x for x in '012')
+ actual = list(mi.intersperse(element, iterable))
+ expected = ['0', ('a', 'b'), '1', ('a', 'b'), '2']
+ self.assertEqual(actual, expected)
+
+ def test_not_iterable(self):
+ self.assertRaises(TypeError, lambda: mi.intersperse('x', 1))
+
+ def test_n(self):
+ for n, element, expected in [
+ (1, '_', ['0', '_', '1', '_', '2', '_', '3', '_', '4', '_', '5']),
+ (2, '_', ['0', '1', '_', '2', '3', '_', '4', '5']),
+ (3, '_', ['0', '1', '2', '_', '3', '4', '5']),
+ (4, '_', ['0', '1', '2', '3', '_', '4', '5']),
+ (5, '_', ['0', '1', '2', '3', '4', '_', '5']),
+ (6, '_', ['0', '1', '2', '3', '4', '5']),
+ (7, '_', ['0', '1', '2', '3', '4', '5']),
+ (3, ['a', 'b'], ['0', '1', '2', ['a', 'b'], '3', '4', '5']),
+ ]:
+ iterable = (x for x in '012345')
+ actual = list(mi.intersperse(element, iterable, n=n))
+ self.assertEqual(actual, expected)
+
+ def test_n_zero(self):
+ self.assertRaises(
+ ValueError, lambda: list(mi.intersperse('x', '012', n=0))
+ )
+
+
+class UniqueToEachTests(TestCase):
+ """Tests for ``unique_to_each()``"""
+
+ def test_all_unique(self):
+ """When all the input iterables are unique the output should match
+ the input."""
+ iterables = [[1, 2], [3, 4, 5], [6, 7, 8]]
+ self.assertEqual(mi.unique_to_each(*iterables), iterables)
+
+ def test_duplicates(self):
+ """When there are duplicates in any of the input iterables that aren't
+ in the rest, those duplicates should be emitted."""
+ iterables = ["mississippi", "missouri"]
+ self.assertEqual(
+ mi.unique_to_each(*iterables), [['p', 'p'], ['o', 'u', 'r']]
+ )
+
+ def test_mixed(self):
+ """When the input iterables contain different types the function should
+ still behave properly"""
+ iterables = ['x', (i for i in range(3)), [1, 2, 3], tuple()]
+ self.assertEqual(mi.unique_to_each(*iterables), [['x'], [0], [3], []])
+
+
+class WindowedTests(TestCase):
+ """Tests for ``windowed()``"""
+
+ def test_basic(self):
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 3))
+ expected = [(1, 2, 3), (2, 3, 4), (3, 4, 5)]
+ self.assertEqual(actual, expected)
+
+ def test_large_size(self):
+ """
+ When the window size is larger than the iterable, and no fill value is
+ given,``None`` should be filled in.
+ """
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 6))
+ expected = [(1, 2, 3, 4, 5, None)]
+ self.assertEqual(actual, expected)
+
+ def test_fillvalue(self):
+ """
+ When sizes don't match evenly, the given fill value should be used.
+ """
+ iterable = [1, 2, 3, 4, 5]
+
+ for n, kwargs, expected in [
+ (6, {}, [(1, 2, 3, 4, 5, '!')]), # n > len(iterable)
+ (3, {'step': 3}, [(1, 2, 3), (4, 5, '!')]), # using ``step``
+ ]:
+ actual = list(mi.windowed(iterable, n, fillvalue='!', **kwargs))
+ self.assertEqual(actual, expected)
+
+ def test_zero(self):
+ """When the window size is zero, an empty tuple should be emitted."""
+ actual = list(mi.windowed([1, 2, 3, 4, 5], 0))
+ expected = [tuple()]
+ self.assertEqual(actual, expected)
+
+ def test_negative(self):
+ """When the window size is negative, ValueError should be raised."""
+ with self.assertRaises(ValueError):
+ list(mi.windowed([1, 2, 3, 4, 5], -1))
+
+ def test_step(self):
+ """The window should advance by the number of steps provided"""
+ iterable = [1, 2, 3, 4, 5, 6, 7]
+ for n, step, expected in [
+ (3, 2, [(1, 2, 3), (3, 4, 5), (5, 6, 7)]), # n > step
+ (3, 3, [(1, 2, 3), (4, 5, 6), (7, None, None)]), # n == step
+ (3, 4, [(1, 2, 3), (5, 6, 7)]), # line up nicely
+ (3, 5, [(1, 2, 3), (6, 7, None)]), # off by one
+ (3, 6, [(1, 2, 3), (7, None, None)]), # off by two
+ (3, 7, [(1, 2, 3)]), # step past the end
+ (7, 8, [(1, 2, 3, 4, 5, 6, 7)]), # step > len(iterable)
+ ]:
+ actual = list(mi.windowed(iterable, n, step=step))
+ self.assertEqual(actual, expected)
+
+ # Step must be greater than or equal to 1
+ with self.assertRaises(ValueError):
+ list(mi.windowed(iterable, 3, step=0))
+
+
+class BucketTests(TestCase):
+ """Tests for ``bucket()``"""
+
+ def test_basic(self):
+ iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33]
+ D = mi.bucket(iterable, key=lambda x: 10 * (x // 10))
+
+ # In-order access
+ self.assertEqual(list(D[10]), [10, 11, 12])
+
+ # Out of order access
+ self.assertEqual(list(D[30]), [30, 31, 33])
+ self.assertEqual(list(D[20]), [20, 21, 22, 23])
+
+ self.assertEqual(list(D[40]), []) # Nothing in here!
+
+ def test_in(self):
+ iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33]
+ D = mi.bucket(iterable, key=lambda x: 10 * (x // 10))
+
+ self.assertTrue(10 in D)
+ self.assertFalse(40 in D)
+ self.assertTrue(20 in D)
+ self.assertFalse(21 in D)
+
+ # Checking in-ness shouldn't advance the iterator
+ self.assertEqual(next(D[10]), 10)
+
+ def test_validator(self):
+ iterable = count(0)
+ key = lambda x: int(str(x)[0]) # First digit of each number
+ validator = lambda x: 0 < x < 10 # No leading zeros
+ D = mi.bucket(iterable, key, validator=validator)
+ self.assertEqual(mi.take(3, D[1]), [1, 10, 11])
+ self.assertNotIn(0, D) # Non-valid entries don't return True
+ self.assertNotIn(0, D._cache) # Don't store non-valid entries
+ self.assertEqual(list(D[0]), [])
+
+
+class SpyTests(TestCase):
+ """Tests for ``spy()``"""
+
+ def test_basic(self):
+ original_iterable = iter('abcdefg')
+ head, new_iterable = mi.spy(original_iterable)
+ self.assertEqual(head, ['a'])
+ self.assertEqual(
+ list(new_iterable), ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+ )
+
+ def test_unpacking(self):
+ original_iterable = iter('abcdefg')
+ (first, second, third), new_iterable = mi.spy(original_iterable, 3)
+ self.assertEqual(first, 'a')
+ self.assertEqual(second, 'b')
+ self.assertEqual(third, 'c')
+ self.assertEqual(
+ list(new_iterable), ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+ )
+
+ def test_too_many(self):
+ original_iterable = iter('abc')
+ head, new_iterable = mi.spy(original_iterable, 4)
+ self.assertEqual(head, ['a', 'b', 'c'])
+ self.assertEqual(list(new_iterable), ['a', 'b', 'c'])
+
+ def test_zero(self):
+ original_iterable = iter('abc')
+ head, new_iterable = mi.spy(original_iterable, 0)
+ self.assertEqual(head, [])
+ self.assertEqual(list(new_iterable), ['a', 'b', 'c'])
+
+
+class InterleaveTests(TestCase):
+ def test_even(self):
+ actual = list(mi.interleave([1, 4, 7], [2, 5, 8], [3, 6, 9]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_short(self):
+ actual = list(mi.interleave([1, 4], [2, 5, 7], [3, 6, 8]))
+ expected = [1, 2, 3, 4, 5, 6]
+ self.assertEqual(actual, expected)
+
+ def test_mixed_types(self):
+ it_list = ['a', 'b', 'c', 'd']
+ it_str = '12345'
+ it_inf = count()
+ actual = list(mi.interleave(it_list, it_str, it_inf))
+ expected = ['a', '1', 0, 'b', '2', 1, 'c', '3', 2, 'd', '4', 3]
+ self.assertEqual(actual, expected)
+
+
+class InterleaveLongestTests(TestCase):
+ def test_even(self):
+ actual = list(mi.interleave_longest([1, 4, 7], [2, 5, 8], [3, 6, 9]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ self.assertEqual(actual, expected)
+
+ def test_short(self):
+ actual = list(mi.interleave_longest([1, 4], [2, 5, 7], [3, 6, 8]))
+ expected = [1, 2, 3, 4, 5, 6, 7, 8]
+ self.assertEqual(actual, expected)
+
+ def test_mixed_types(self):
+ it_list = ['a', 'b', 'c', 'd']
+ it_str = '12345'
+ it_gen = (x for x in range(3))
+ actual = list(mi.interleave_longest(it_list, it_str, it_gen))
+ expected = ['a', '1', 0, 'b', '2', 1, 'c', '3', 2, 'd', '4', '5']
+ self.assertEqual(actual, expected)
+
+
+class TestCollapse(TestCase):
+ """Tests for ``collapse()``"""
+
+ def test_collapse(self):
+ l = [[1], 2, [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l)), [1, 2, 3, 4, 5])
+
+ def test_collapse_to_string(self):
+ l = [["s1"], "s2", [["s3"], "s4"], [[["s5"]]]]
+ self.assertEqual(list(mi.collapse(l)), ["s1", "s2", "s3", "s4", "s5"])
+
+ def test_collapse_flatten(self):
+ l = [[1], [2], [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l, levels=1)), list(mi.flatten(l)))
+
+ def test_collapse_to_level(self):
+ l = [[1], 2, [[3], 4], [[[5]]]]
+ self.assertEqual(list(mi.collapse(l, levels=2)), [1, 2, 3, 4, [5]])
+ self.assertEqual(
+ list(mi.collapse(mi.collapse(l, levels=1), levels=1)),
+ list(mi.collapse(l, levels=2))
+ )
+
+ def test_collapse_to_list(self):
+ l = (1, [2], (3, [4, (5,)], 'ab'))
+ actual = list(mi.collapse(l, base_type=list))
+ expected = [1, [2], 3, [4, (5,)], 'ab']
+ self.assertEqual(actual, expected)
+
+
+class SideEffectTests(TestCase):
+ """Tests for ``side_effect()``"""
+
+ def test_individual(self):
+ # The function increments the counter for each call
+ counter = [0]
+
+ def func(arg):
+ counter[0] += 1
+
+ result = list(mi.side_effect(func, range(10)))
+ self.assertEqual(result, list(range(10)))
+ self.assertEqual(counter[0], 10)
+
+ def test_chunked(self):
+ # The function increments the counter for each call
+ counter = [0]
+
+ def func(arg):
+ counter[0] += 1
+
+ result = list(mi.side_effect(func, range(10), 2))
+ self.assertEqual(result, list(range(10)))
+ self.assertEqual(counter[0], 5)
+
+ def test_before_after(self):
+ f = StringIO()
+ collector = []
+
+ def func(item):
+ print(item, file=f)
+ collector.append(f.getvalue())
+
+ def it():
+ yield u'a'
+ yield u'b'
+ raise RuntimeError('kaboom')
+
+ before = lambda: print('HEADER', file=f)
+ after = f.close
+
+ try:
+ mi.consume(mi.side_effect(func, it(), before=before, after=after))
+ except RuntimeError:
+ pass
+
+ # The iterable should have been written to the file
+ self.assertEqual(collector, [u'HEADER\na\n', u'HEADER\na\nb\n'])
+
+ # The file should be closed even though something bad happened
+ self.assertTrue(f.closed)
+
+ def test_before_fails(self):
+ f = StringIO()
+ func = lambda x: print(x, file=f)
+
+ def before():
+ raise RuntimeError('ouch')
+
+ try:
+ mi.consume(
+ mi.side_effect(func, u'abc', before=before, after=f.close)
+ )
+ except RuntimeError:
+ pass
+
+ # The file should be closed even though something bad happened in the
+ # before function
+ self.assertTrue(f.closed)
+
+
+class SlicedTests(TestCase):
+ """Tests for ``sliced()``"""
+
+ def test_even(self):
+ """Test when the length of the sequence is divisible by *n*"""
+ seq = 'ABCDEFGHI'
+ self.assertEqual(list(mi.sliced(seq, 3)), ['ABC', 'DEF', 'GHI'])
+
+ def test_odd(self):
+ """Test when the length of the sequence is not divisible by *n*"""
+ seq = 'ABCDEFGHI'
+ self.assertEqual(list(mi.sliced(seq, 4)), ['ABCD', 'EFGH', 'I'])
+
+ def test_not_sliceable(self):
+ seq = (x for x in 'ABCDEFGHI')
+
+ with self.assertRaises(TypeError):
+ list(mi.sliced(seq, 3))
+
+
+class SplitAtTests(TestCase):
+ """Tests for ``split()``"""
+
+ def comp_with_str_split(self, str_to_split, delim):
+ pred = lambda c: c == delim
+ actual = list(map(''.join, mi.split_at(str_to_split, pred)))
+ expected = str_to_split.split(delim)
+ self.assertEqual(actual, expected)
+
+ def test_seperators(self):
+ test_strs = ['', 'abcba', 'aaabbbcccddd', 'e']
+ for s, delim in product(test_strs, 'abcd'):
+ self.comp_with_str_split(s, delim)
+
+
+class SplitBeforeTest(TestCase):
+ """Tests for ``split_before()``"""
+
+ def test_starts_with_sep(self):
+ actual = list(mi.split_before('xooxoo', lambda c: c == 'x'))
+ expected = [['x', 'o', 'o'], ['x', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_ends_with_sep(self):
+ actual = list(mi.split_before('ooxoox', lambda c: c == 'x'))
+ expected = [['o', 'o'], ['x', 'o', 'o'], ['x']]
+ self.assertEqual(actual, expected)
+
+ def test_no_sep(self):
+ actual = list(mi.split_before('ooo', lambda c: c == 'x'))
+ expected = [['o', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+
+class SplitAfterTest(TestCase):
+ """Tests for ``split_after()``"""
+
+ def test_starts_with_sep(self):
+ actual = list(mi.split_after('xooxoo', lambda c: c == 'x'))
+ expected = [['x'], ['o', 'o', 'x'], ['o', 'o']]
+ self.assertEqual(actual, expected)
+
+ def test_ends_with_sep(self):
+ actual = list(mi.split_after('ooxoox', lambda c: c == 'x'))
+ expected = [['o', 'o', 'x'], ['o', 'o', 'x']]
+ self.assertEqual(actual, expected)
+
+ def test_no_sep(self):
+ actual = list(mi.split_after('ooo', lambda c: c == 'x'))
+ expected = [['o', 'o', 'o']]
+ self.assertEqual(actual, expected)
+
+
+class PaddedTest(TestCase):
+ """Tests for ``padded()``"""
+
+ def test_no_n(self):
+ seq = [1, 2, 3]
+
+ # No fillvalue
+ self.assertEqual(mi.take(5, mi.padded(seq)), [1, 2, 3, None, None])
+
+ # With fillvalue
+ self.assertEqual(
+ mi.take(5, mi.padded(seq, fillvalue='')), [1, 2, 3, '', '']
+ )
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: list(mi.padded([1, 2, 3], n=-1)))
+ self.assertRaises(ValueError, lambda: list(mi.padded([1, 2, 3], n=0)))
+
+ def test_valid_n(self):
+ seq = [1, 2, 3, 4, 5]
+
+ # No need for padding: len(seq) <= n
+ self.assertEqual(list(mi.padded(seq, n=4)), [1, 2, 3, 4, 5])
+ self.assertEqual(list(mi.padded(seq, n=5)), [1, 2, 3, 4, 5])
+
+ # No fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, n=7)), [1, 2, 3, 4, 5, None, None]
+ )
+
+ # With fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, fillvalue='', n=7)), [1, 2, 3, 4, 5, '', '']
+ )
+
+ def test_next_multiple(self):
+ seq = [1, 2, 3, 4, 5, 6]
+
+ # No need for padding: len(seq) % n == 0
+ self.assertEqual(
+ list(mi.padded(seq, n=3, next_multiple=True)), [1, 2, 3, 4, 5, 6]
+ )
+
+ # Padding needed: len(seq) < n
+ self.assertEqual(
+ list(mi.padded(seq, n=8, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, None, None]
+ )
+
+ # No padding needed: len(seq) == n
+ self.assertEqual(
+ list(mi.padded(seq, n=6, next_multiple=True)), [1, 2, 3, 4, 5, 6]
+ )
+
+ # Padding needed: len(seq) > n
+ self.assertEqual(
+ list(mi.padded(seq, n=4, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, None, None]
+ )
+
+ # With fillvalue
+ self.assertEqual(
+ list(mi.padded(seq, fillvalue='', n=4, next_multiple=True)),
+ [1, 2, 3, 4, 5, 6, '', '']
+ )
+
+
+class DistributeTest(TestCase):
+ """Tests for distribute()"""
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: mi.distribute(-1, [1, 2, 3]))
+ self.assertRaises(ValueError, lambda: mi.distribute(0, [1, 2, 3]))
+
+ def test_basic(self):
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+
+ for n, expected in [
+ (1, [iterable]),
+ (2, [[1, 3, 5, 7, 9], [2, 4, 6, 8, 10]]),
+ (3, [[1, 4, 7, 10], [2, 5, 8], [3, 6, 9]]),
+ (10, [[n] for n in range(1, 10 + 1)]),
+ ]:
+ self.assertEqual(
+ [list(x) for x in mi.distribute(n, iterable)], expected
+ )
+
+ def test_large_n(self):
+ iterable = [1, 2, 3, 4]
+ self.assertEqual(
+ [list(x) for x in mi.distribute(6, iterable)],
+ [[1], [2], [3], [4], [], []]
+ )
+
+
+class StaggerTest(TestCase):
+ """Tests for ``stagger()``"""
+
+ def test_default(self):
+ iterable = [0, 1, 2, 3]
+ actual = list(mi.stagger(iterable))
+ expected = [(None, 0, 1), (0, 1, 2), (1, 2, 3)]
+ self.assertEqual(actual, expected)
+
+ def test_offsets(self):
+ iterable = [0, 1, 2, 3]
+ for offsets, expected in [
+ ((-2, 0, 2), [('', 0, 2), ('', 1, 3)]),
+ ((-2, -1), [('', ''), ('', 0), (0, 1), (1, 2), (2, 3)]),
+ ((1, 2), [(1, 2), (2, 3)]),
+ ]:
+ all_groups = mi.stagger(iterable, offsets=offsets, fillvalue='')
+ self.assertEqual(list(all_groups), expected)
+
+ def test_longest(self):
+ iterable = [0, 1, 2, 3]
+ for offsets, expected in [
+ (
+ (-1, 0, 1),
+ [('', 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, ''), (3, '', '')]
+ ),
+ ((-2, -1), [('', ''), ('', 0), (0, 1), (1, 2), (2, 3), (3, '')]),
+ ((1, 2), [(1, 2), (2, 3), (3, '')]),
+ ]:
+ all_groups = mi.stagger(
+ iterable, offsets=offsets, fillvalue='', longest=True
+ )
+ self.assertEqual(list(all_groups), expected)
+
+
+class ZipOffsetTest(TestCase):
+ """Tests for ``zip_offset()``"""
+
+ def test_shortest(self):
+ a_1 = [0, 1, 2, 3]
+ a_2 = [0, 1, 2, 3, 4, 5]
+ a_3 = [0, 1, 2, 3, 4, 5, 6, 7]
+ actual = list(
+ mi.zip_offset(a_1, a_2, a_3, offsets=(-1, 0, 1), fillvalue='')
+ )
+ expected = [('', 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, 4), (3, 4, 5)]
+ self.assertEqual(actual, expected)
+
+ def test_longest(self):
+ a_1 = [0, 1, 2, 3]
+ a_2 = [0, 1, 2, 3, 4, 5]
+ a_3 = [0, 1, 2, 3, 4, 5, 6, 7]
+ actual = list(
+ mi.zip_offset(a_1, a_2, a_3, offsets=(-1, 0, 1), longest=True)
+ )
+ expected = [
+ (None, 0, 1),
+ (0, 1, 2),
+ (1, 2, 3),
+ (2, 3, 4),
+ (3, 4, 5),
+ (None, 5, 6),
+ (None, None, 7),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_mismatch(self):
+ iterables = [0, 1, 2], [2, 3, 4]
+ offsets = (-1, 0, 1)
+ self.assertRaises(
+ ValueError,
+ lambda: list(mi.zip_offset(*iterables, offsets=offsets))
+ )
+
+
+class SortTogetherTest(TestCase):
+ """Tests for sort_together()"""
+
+ def test_key_list(self):
+ """tests `key_list` including default, iterables include duplicates"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20]
+ ]
+
+ self.assertEqual(
+ mi.sort_together(iterables),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('June', 'July', 'July', 'May', 'Aug.', 'May'),
+ (70, 100, 20, 97, 20, 100)
+ ]
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1)),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('July', 'July', 'June', 'Aug.', 'May', 'May'),
+ (100, 20, 70, 20, 97, 100)
+ ]
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1, 2)),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('July', 'July', 'June', 'Aug.', 'May', 'May'),
+ (20, 100, 70, 20, 97, 100)
+ ]
+ )
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(2,)),
+ [
+ ('GA', 'CT', 'CT', 'GA', 'GA', 'CT'),
+ ('Aug.', 'July', 'June', 'May', 'May', 'July'),
+ (20, 20, 70, 97, 100, 100)
+ ]
+ )
+
+ def test_invalid_key_list(self):
+ """tests `key_list` for indexes not available in `iterables`"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20]
+ ]
+
+ self.assertRaises(
+ IndexError, lambda: mi.sort_together(iterables, key_list=(5,))
+ )
+
+ def test_reverse(self):
+ """tests `reverse` to ensure a reverse sort for `key_list` iterables"""
+ iterables = [
+ ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20]
+ ]
+
+ self.assertEqual(
+ mi.sort_together(iterables, key_list=(0, 1, 2), reverse=True),
+ [('GA', 'GA', 'GA', 'CT', 'CT', 'CT'),
+ ('May', 'May', 'Aug.', 'June', 'July', 'July'),
+ (100, 97, 20, 70, 100, 20)]
+ )
+
+ def test_uneven_iterables(self):
+ """tests trimming of iterables to the shortest length before sorting"""
+ iterables = [['GA', 'GA', 'GA', 'CT', 'CT', 'CT', 'MA'],
+ ['May', 'Aug.', 'May', 'June', 'July', 'July'],
+ [97, 20, 100, 70, 100, 20, 0]]
+
+ self.assertEqual(
+ mi.sort_together(iterables),
+ [
+ ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'),
+ ('June', 'July', 'July', 'May', 'Aug.', 'May'),
+ (70, 100, 20, 97, 20, 100)
+ ]
+ )
+
+
+class DivideTest(TestCase):
+ """Tests for divide()"""
+
+ def test_invalid_n(self):
+ self.assertRaises(ValueError, lambda: mi.divide(-1, [1, 2, 3]))
+ self.assertRaises(ValueError, lambda: mi.divide(0, [1, 2, 3]))
+
+ def test_basic(self):
+ iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+
+ for n, expected in [
+ (1, [iterable]),
+ (2, [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]),
+ (3, [[1, 2, 3, 4], [5, 6, 7], [8, 9, 10]]),
+ (10, [[n] for n in range(1, 10 + 1)]),
+ ]:
+ self.assertEqual(
+ [list(x) for x in mi.divide(n, iterable)], expected
+ )
+
+ def test_large_n(self):
+ iterable = [1, 2, 3, 4]
+ self.assertEqual(
+ [list(x) for x in mi.divide(6, iterable)],
+ [[1], [2], [3], [4], [], []]
+ )
+
+
+class TestAlwaysIterable(TestCase):
+ """Tests for always_iterable()"""
+ def test_single(self):
+ self.assertEqual(list(mi.always_iterable(1)), [1])
+
+ def test_strings(self):
+ for obj in ['foo', b'bar', u'baz']:
+ actual = list(mi.always_iterable(obj))
+ expected = [obj]
+ self.assertEqual(actual, expected)
+
+ def test_base_type(self):
+ dict_obj = {'a': 1, 'b': 2}
+ str_obj = '123'
+
+ # Default: dicts are iterable like they normally are
+ default_actual = list(mi.always_iterable(dict_obj))
+ default_expected = list(dict_obj)
+ self.assertEqual(default_actual, default_expected)
+
+ # Unitary types set: dicts are not iterable
+ custom_actual = list(mi.always_iterable(dict_obj, base_type=dict))
+ custom_expected = [dict_obj]
+ self.assertEqual(custom_actual, custom_expected)
+
+ # With unitary types set, strings are iterable
+ str_actual = list(mi.always_iterable(str_obj, base_type=None))
+ str_expected = list(str_obj)
+ self.assertEqual(str_actual, str_expected)
+
+ def test_iterables(self):
+ self.assertEqual(list(mi.always_iterable([0, 1])), [0, 1])
+ self.assertEqual(
+ list(mi.always_iterable([0, 1], base_type=list)), [[0, 1]]
+ )
+ self.assertEqual(
+ list(mi.always_iterable(iter('foo'))), ['f', 'o', 'o']
+ )
+ self.assertEqual(list(mi.always_iterable([])), [])
+
+ def test_none(self):
+ self.assertEqual(list(mi.always_iterable(None)), [])
+
+ def test_generator(self):
+ def _gen():
+ yield 0
+ yield 1
+
+ self.assertEqual(list(mi.always_iterable(_gen())), [0, 1])
+
+
+class AdjacentTests(TestCase):
+ def test_typical(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10)))
+ expected = [(True, 0), (True, 1), (False, 2), (False, 3), (True, 4),
+ (True, 5), (True, 6), (False, 7), (False, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ def test_empty_iterable(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, []))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_length_one(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, [0]))
+ expected = [(True, 0)]
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, [1]))
+ expected = [(False, 1)]
+ self.assertEqual(actual, expected)
+
+ def test_consecutive_true(self):
+ """Test that when the predicate matches multiple consecutive elements
+ it doesn't repeat elements in the output"""
+ actual = list(mi.adjacent(lambda x: x % 5 < 2, range(10)))
+ expected = [(True, 0), (True, 1), (True, 2), (False, 3), (True, 4),
+ (True, 5), (True, 6), (True, 7), (False, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ def test_distance(self):
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10), distance=2))
+ expected = [(True, 0), (True, 1), (True, 2), (True, 3), (True, 4),
+ (True, 5), (True, 6), (True, 7), (False, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10), distance=3))
+ expected = [(True, 0), (True, 1), (True, 2), (True, 3), (True, 4),
+ (True, 5), (True, 6), (True, 7), (True, 8), (False, 9)]
+ self.assertEqual(actual, expected)
+
+ def test_large_distance(self):
+ """Test distance larger than the length of the iterable"""
+ iterable = range(10)
+ actual = list(mi.adjacent(lambda x: x % 5 == 4, iterable, distance=20))
+ expected = list(zip(repeat(True), iterable))
+ self.assertEqual(actual, expected)
+
+ actual = list(mi.adjacent(lambda x: False, iterable, distance=20))
+ expected = list(zip(repeat(False), iterable))
+ self.assertEqual(actual, expected)
+
+ def test_zero_distance(self):
+ """Test that adjacent() reduces to zip+map when distance is 0"""
+ iterable = range(1000)
+ predicate = lambda x: x % 4 == 2
+ actual = mi.adjacent(predicate, iterable, 0)
+ expected = zip(map(predicate, iterable), iterable)
+ self.assertTrue(all(a == e for a, e in zip(actual, expected)))
+
+ def test_negative_distance(self):
+ """Test that adjacent() raises an error with negative distance"""
+ pred = lambda x: x
+ self.assertRaises(
+ ValueError, lambda: mi.adjacent(pred, range(1000), -1)
+ )
+ self.assertRaises(
+ ValueError, lambda: mi.adjacent(pred, range(10), -10)
+ )
+
+ def test_grouping(self):
+ """Test interaction of adjacent() with groupby_transform()"""
+ iterable = mi.adjacent(lambda x: x % 5 == 0, range(10))
+ grouper = mi.groupby_transform(iterable, itemgetter(0), itemgetter(1))
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [
+ (True, [0, 1]),
+ (False, [2, 3]),
+ (True, [4, 5, 6]),
+ (False, [7, 8, 9]),
+ ]
+ self.assertEqual(actual, expected)
+
+ def test_call_once(self):
+ """Test that the predicate is only called once per item."""
+ already_seen = set()
+ iterable = range(10)
+
+ def predicate(item):
+ self.assertNotIn(item, already_seen)
+ already_seen.add(item)
+ return True
+
+ actual = list(mi.adjacent(predicate, iterable))
+ expected = [(True, x) for x in iterable]
+ self.assertEqual(actual, expected)
+
+
+class GroupByTransformTests(TestCase):
+ def assertAllGroupsEqual(self, groupby1, groupby2):
+ """Compare two groupby objects for equality, both keys and groups."""
+ for a, b in zip(groupby1, groupby2):
+ key1, group1 = a
+ key2, group2 = b
+ self.assertEqual(key1, key2)
+ self.assertListEqual(list(group1), list(group2))
+ self.assertRaises(StopIteration, lambda: next(groupby1))
+ self.assertRaises(StopIteration, lambda: next(groupby2))
+
+ def test_default_funcs(self):
+ """Test that groupby_transform() with default args mimics groupby()"""
+ iterable = [(x // 5, x) for x in range(1000)]
+ actual = mi.groupby_transform(iterable)
+ expected = groupby(iterable)
+ self.assertAllGroupsEqual(actual, expected)
+
+ def test_valuefunc(self):
+ iterable = [(int(x / 5), int(x / 3), x) for x in range(10)]
+
+ # Test the standard usage of grouping one iterable using another's keys
+ grouper = mi.groupby_transform(
+ iterable, keyfunc=itemgetter(0), valuefunc=itemgetter(-1)
+ )
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [(0, [0, 1, 2, 3, 4]), (1, [5, 6, 7, 8, 9])]
+ self.assertEqual(actual, expected)
+
+ grouper = mi.groupby_transform(
+ iterable, keyfunc=itemgetter(1), valuefunc=itemgetter(-1)
+ )
+ actual = [(k, list(g)) for k, g in grouper]
+ expected = [(0, [0, 1, 2]), (1, [3, 4, 5]), (2, [6, 7, 8]), (3, [9])]
+ self.assertEqual(actual, expected)
+
+ # and now for something a little different
+ d = dict(zip(range(10), 'abcdefghij'))
+ grouper = mi.groupby_transform(
+ range(10), keyfunc=lambda x: x // 5, valuefunc=d.get
+ )
+ actual = [(k, ''.join(g)) for k, g in grouper]
+ expected = [(0, 'abcde'), (1, 'fghij')]
+ self.assertEqual(actual, expected)
+
+ def test_no_valuefunc(self):
+ iterable = range(1000)
+
+ def key(x):
+ return x // 5
+
+ actual = mi.groupby_transform(iterable, key, valuefunc=None)
+ expected = groupby(iterable, key)
+ self.assertAllGroupsEqual(actual, expected)
+
+ actual = mi.groupby_transform(iterable, key) # default valuefunc
+ expected = groupby(iterable, key)
+ self.assertAllGroupsEqual(actual, expected)
+
+
+class NumericRangeTests(TestCase):
+ def test_basic(self):
+ for args, expected in [
+ ((4,), [0, 1, 2, 3]),
+ ((4.0,), [0.0, 1.0, 2.0, 3.0]),
+ ((1.0, 4), [1.0, 2.0, 3.0]),
+ ((1, 4.0), [1, 2, 3]),
+ ((1.0, 5), [1.0, 2.0, 3.0, 4.0]),
+ ((0, 20, 5), [0, 5, 10, 15]),
+ ((0, 20, 5.0), [0.0, 5.0, 10.0, 15.0]),
+ ((0, 10, 3), [0, 3, 6, 9]),
+ ((0, 10, 3.0), [0.0, 3.0, 6.0, 9.0]),
+ ((0, -5, -1), [0, -1, -2, -3, -4]),
+ ((0.0, -5, -1), [0.0, -1.0, -2.0, -3.0, -4.0]),
+ ((1, 2, Fraction(1, 2)), [Fraction(1, 1), Fraction(3, 2)]),
+ ((0,), []),
+ ((0.0,), []),
+ ((1, 0), []),
+ ((1.0, 0.0), []),
+ ((Fraction(2, 1),), [Fraction(0, 1), Fraction(1, 1)]),
+ ((Decimal('2.0'),), [Decimal('0.0'), Decimal('1.0')]),
+ ]:
+ actual = list(mi.numeric_range(*args))
+ self.assertEqual(actual, expected)
+ self.assertTrue(
+ all(type(a) == type(e) for a, e in zip(actual, expected))
+ )
+
+ def test_arg_count(self):
+ self.assertRaises(TypeError, lambda: list(mi.numeric_range()))
+ self.assertRaises(
+ TypeError, lambda: list(mi.numeric_range(0, 1, 2, 3))
+ )
+
+ def test_zero_step(self):
+ self.assertRaises(
+ ValueError, lambda: list(mi.numeric_range(1, 2, 0))
+ )
+
+
+class CountCycleTests(TestCase):
+ def test_basic(self):
+ expected = [
+ (0, 'a'), (0, 'b'), (0, 'c'),
+ (1, 'a'), (1, 'b'), (1, 'c'),
+ (2, 'a'), (2, 'b'), (2, 'c'),
+ ]
+ for actual in [
+ mi.take(9, mi.count_cycle('abc')), # n=None
+ list(mi.count_cycle('abc', 3)), # n=3
+ ]:
+ self.assertEqual(actual, expected)
+
+ def test_empty(self):
+ self.assertEqual(list(mi.count_cycle('')), [])
+ self.assertEqual(list(mi.count_cycle('', 2)), [])
+
+ def test_negative(self):
+ self.assertEqual(list(mi.count_cycle('abc', -3)), [])
+
+
+class LocateTests(TestCase):
+ def test_default_pred(self):
+ iterable = [0, 1, 1, 0, 1, 0, 0]
+ actual = list(mi.locate(iterable))
+ expected = [1, 2, 4]
+ self.assertEqual(actual, expected)
+
+ def test_no_matches(self):
+ iterable = [0, 0, 0]
+ actual = list(mi.locate(iterable))
+ expected = []
+ self.assertEqual(actual, expected)
+
+ def test_custom_pred(self):
+ iterable = ['0', 1, 1, '0', 1, '0', '0']
+ pred = lambda x: x == '0'
+ actual = list(mi.locate(iterable, pred))
+ expected = [0, 3, 5, 6]
+ self.assertEqual(actual, expected)
+
+
+class StripFunctionTests(TestCase):
+ def test_hashable(self):
+ iterable = list('www.example.com')
+ pred = lambda x: x in set('cmowz.')
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), list('example.com'))
+ self.assertEqual(list(mi.rstrip(iterable, pred)), list('www.example'))
+ self.assertEqual(list(mi.strip(iterable, pred)), list('example'))
+
+ def test_not_hashable(self):
+ iterable = [
+ list('http://'), list('www'), list('.example'), list('.com')
+ ]
+ pred = lambda x: x in [list('http://'), list('www'), list('.com')]
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), iterable[2:])
+ self.assertEqual(list(mi.rstrip(iterable, pred)), iterable[:3])
+ self.assertEqual(list(mi.strip(iterable, pred)), iterable[2: 3])
+
+ def test_math(self):
+ iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2]
+ pred = lambda x: x <= 2
+
+ self.assertEqual(list(mi.lstrip(iterable, pred)), iterable[3:])
+ self.assertEqual(list(mi.rstrip(iterable, pred)), iterable[:-3])
+ self.assertEqual(list(mi.strip(iterable, pred)), iterable[3:-3])
+
+
+class IsliceExtendedTests(TestCase):
+ def test_all(self):
+ iterable = ['0', '1', '2', '3', '4', '5']
+ indexes = list(range(-4, len(iterable) + 4)) + [None]
+ steps = [1, 2, 3, 4, -1, -2, -3, 4]
+ for slice_args in product(indexes, indexes, steps):
+ try:
+ actual = list(mi.islice_extended(iterable, *slice_args))
+ except Exception as e:
+ self.fail((slice_args, e))
+
+ expected = iterable[slice(*slice_args)]
+ self.assertEqual(actual, expected, slice_args)
+
+ def test_zero_step(self):
+ with self.assertRaises(ValueError):
+ list(mi.islice_extended([1, 2, 3], 0, 1, 0))
+
+
+class ConsecutiveGroupsTest(TestCase):
+ def test_numbers(self):
+ iterable = [-10, -8, -7, -6, 1, 2, 4, 5, -1, 7]
+ actual = [list(g) for g in mi.consecutive_groups(iterable)]
+ expected = [[-10], [-8, -7, -6], [1, 2], [4, 5], [-1], [7]]
+ self.assertEqual(actual, expected)
+
+ def test_custom_ordering(self):
+ iterable = ['1', '10', '11', '20', '21', '22', '30', '31']
+ ordering = lambda x: int(x)
+ actual = [list(g) for g in mi.consecutive_groups(iterable, ordering)]
+ expected = [['1'], ['10', '11'], ['20', '21', '22'], ['30', '31']]
+ self.assertEqual(actual, expected)
+
+ def test_exotic_ordering(self):
+ iterable = [
+ ('a', 'b', 'c', 'd'),
+ ('a', 'c', 'b', 'd'),
+ ('a', 'c', 'd', 'b'),
+ ('a', 'd', 'b', 'c'),
+ ('d', 'b', 'c', 'a'),
+ ('d', 'c', 'a', 'b'),
+ ]
+ ordering = list(permutations('abcd')).index
+ actual = [list(g) for g in mi.consecutive_groups(iterable, ordering)]
+ expected = [
+ [('a', 'b', 'c', 'd')],
+ [('a', 'c', 'b', 'd'), ('a', 'c', 'd', 'b'), ('a', 'd', 'b', 'c')],
+ [('d', 'b', 'c', 'a'), ('d', 'c', 'a', 'b')],
+ ]
+ self.assertEqual(actual, expected)
+
+
+class DifferenceTest(TestCase):
+ def test_normal(self):
+ iterable = [10, 20, 30, 40, 50]
+ actual = list(mi.difference(iterable))
+ expected = [10, 10, 10, 10, 10]
+ self.assertEqual(actual, expected)
+
+ def test_custom(self):
+ iterable = [10, 20, 30, 40, 50]
+ actual = list(mi.difference(iterable, add))
+ expected = [10, 30, 50, 70, 90]
+ self.assertEqual(actual, expected)
+
+ def test_roundtrip(self):
+ original = list(range(100))
+ accumulated = mi.accumulate(original)
+ actual = list(mi.difference(accumulated))
+ self.assertEqual(actual, original)
+
+ def test_one(self):
+ self.assertEqual(list(mi.difference([0])), [0])
+
+ def test_empty(self):
+ self.assertEqual(list(mi.difference([])), [])
+
+
+class SeekableTest(TestCase):
+ def test_exhaustion_reset(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(list(s), iterable) # Normal iteration
+ self.assertEqual(list(s), []) # Iterable is exhausted
+
+ s.seek(0)
+ self.assertEqual(list(s), iterable) # Back in action
+
+ def test_partial_reset(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(5, s), iterable[:5]) # Normal iteration
+
+ s.seek(1)
+ self.assertEqual(list(s), iterable[1:]) # Get the rest of the iterable
+
+ def test_forward(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(1, s), iterable[:1]) # Normal iteration
+
+ s.seek(3) # Skip over index 2
+ self.assertEqual(list(s), iterable[3:]) # Result is similar to slicing
+
+ s.seek(0) # Back to 0
+ self.assertEqual(list(s), iterable) # No difference in result
+
+ def test_past_end(self):
+ iterable = [str(n) for n in range(10)]
+
+ s = mi.seekable(iterable)
+ self.assertEqual(mi.take(1, s), iterable[:1]) # Normal iteration
+
+ s.seek(20)
+ self.assertEqual(list(s), []) # Iterable is exhausted
+
+ s.seek(0) # Back to 0
+ self.assertEqual(list(s), iterable) # No difference in result
+
+ def test_elements(self):
+ iterable = map(str, count())
+
+ s = mi.seekable(iterable)
+ mi.take(10, s)
+
+ elements = s.elements()
+ self.assertEqual(
+ [elements[i] for i in range(10)], [str(n) for n in range(10)]
+ )
+ self.assertEqual(len(elements), 10)
+
+ mi.take(10, s)
+ self.assertEqual(list(elements), [str(n) for n in range(20)])
+
+
+class SequenceViewTests(TestCase):
+ def test_init(self):
+ view = mi.SequenceView((1, 2, 3))
+ self.assertEqual(repr(view), "SequenceView((1, 2, 3))")
+ self.assertRaises(TypeError, lambda: mi.SequenceView({}))
+
+ def test_update(self):
+ seq = [1, 2, 3]
+ view = mi.SequenceView(seq)
+ self.assertEqual(len(view), 3)
+ self.assertEqual(repr(view), "SequenceView([1, 2, 3])")
+
+ seq.pop()
+ self.assertEqual(len(view), 2)
+ self.assertEqual(repr(view), "SequenceView([1, 2])")
+
+ def test_indexing(self):
+ seq = ('a', 'b', 'c', 'd', 'e', 'f')
+ view = mi.SequenceView(seq)
+ for i in range(-len(seq), len(seq)):
+ self.assertEqual(view[i], seq[i])
+
+ def test_slicing(self):
+ seq = ('a', 'b', 'c', 'd', 'e', 'f')
+ view = mi.SequenceView(seq)
+ n = len(seq)
+ indexes = list(range(-n - 1, n + 1)) + [None]
+ steps = list(range(-n, n + 1))
+ steps.remove(0)
+ for slice_args in product(indexes, indexes, steps):
+ i = slice(*slice_args)
+ self.assertEqual(view[i], seq[i])
+
+ def test_abc_methods(self):
+ # collections.Sequence should provide all of this functionality
+ seq = ('a', 'b', 'c', 'd', 'e', 'f', 'f')
+ view = mi.SequenceView(seq)
+
+ # __contains__
+ self.assertIn('b', view)
+ self.assertNotIn('g', view)
+
+ # __iter__
+ self.assertEqual(list(iter(view)), list(seq))
+
+ # __reversed__
+ self.assertEqual(list(reversed(view)), list(reversed(seq)))
+
+ # index
+ self.assertEqual(view.index('b'), 1)
+
+ # count
+ self.assertEqual(seq.count('f'), 2)
+
+
+class RunLengthTest(TestCase):
+ def test_encode(self):
+ iterable = (int(str(n)[0]) for n in count(800))
+ actual = mi.take(4, mi.run_length.encode(iterable))
+ expected = [(8, 100), (9, 100), (1, 1000), (2, 1000)]
+ self.assertEqual(actual, expected)
+
+ def test_decode(self):
+ iterable = [('d', 4), ('c', 3), ('b', 2), ('a', 1)]
+ actual = ''.join(mi.run_length.decode(iterable))
+ expected = 'ddddcccbba'
+ self.assertEqual(actual, expected)
+
+
+class ExactlyNTests(TestCase):
+ """Tests for ``exactly_n()``"""
+
+ def test_true(self):
+ """Iterable has ``n`` ``True`` elements"""
+ self.assertTrue(mi.exactly_n([True, False, True], 2))
+ self.assertTrue(mi.exactly_n([1, 1, 1, 0], 3))
+ self.assertTrue(mi.exactly_n([False, False], 0))
+ self.assertTrue(mi.exactly_n(range(100), 10, lambda x: x < 10))
+
+ def test_false(self):
+ """Iterable does not have ``n`` ``True`` elements"""
+ self.assertFalse(mi.exactly_n([True, False, False], 2))
+ self.assertFalse(mi.exactly_n([True, True, False], 1))
+ self.assertFalse(mi.exactly_n([False], 1))
+ self.assertFalse(mi.exactly_n([True], -1))
+ self.assertFalse(mi.exactly_n(repeat(True), 100))
+
+ def test_empty(self):
+ """Return ``True`` if the iterable is empty and ``n`` is 0"""
+ self.assertTrue(mi.exactly_n([], 0))
+ self.assertFalse(mi.exactly_n([], 1))
+
+
+class AlwaysReversibleTests(TestCase):
+ """Tests for ``always_reversible()``"""
+
+ def test_regular_reversed(self):
+ self.assertEqual(list(reversed(range(10))),
+ list(mi.always_reversible(range(10))))
+ self.assertEqual(list(reversed([1, 2, 3])),
+ list(mi.always_reversible([1, 2, 3])))
+ self.assertEqual(reversed([1, 2, 3]).__class__,
+ mi.always_reversible([1, 2, 3]).__class__)
+
+ def test_nonseq_reversed(self):
+ # Create a non-reversible generator from a sequence
+ with self.assertRaises(TypeError):
+ reversed(x for x in range(10))
+
+ self.assertEqual(list(reversed(range(10))),
+ list(mi.always_reversible(x for x in range(10))))
+ self.assertEqual(list(reversed([1, 2, 3])),
+ list(mi.always_reversible(x for x in [1, 2, 3])))
+ self.assertNotEqual(reversed((1, 2)).__class__,
+ mi.always_reversible(x for x in (1, 2)).__class__)
+
+
+class CircularShiftsTests(TestCase):
+ def test_empty(self):
+ # empty iterable -> empty list
+ self.assertEqual(list(mi.circular_shifts([])), [])
+
+ def test_simple_circular_shifts(self):
+ # test the a simple iterator case
+ self.assertEqual(
+ mi.circular_shifts(range(4)),
+ [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
+ )
+
+ def test_duplicates(self):
+ # test non-distinct entries
+ self.assertEqual(
+ mi.circular_shifts([0, 1, 0, 1]),
+ [(0, 1, 0, 1), (1, 0, 1, 0), (0, 1, 0, 1), (1, 0, 1, 0)]
+ )
+
+
+class MakeDecoratorTests(TestCase):
+ def test_basic(self):
+ slicer = mi.make_decorator(islice)
+
+ @slicer(1, 10, 2)
+ def user_function(arg_1, arg_2, kwarg_1=None):
+ self.assertEqual(arg_1, 'arg_1')
+ self.assertEqual(arg_2, 'arg_2')
+ self.assertEqual(kwarg_1, 'kwarg_1')
+ return map(str, count())
+
+ it = user_function('arg_1', 'arg_2', kwarg_1='kwarg_1')
+ actual = list(it)
+ expected = ['1', '3', '5', '7', '9']
+ self.assertEqual(actual, expected)
+
+ def test_result_index(self):
+ def stringify(*args, **kwargs):
+ self.assertEqual(args[0], 'arg_0')
+ iterable = args[1]
+ self.assertEqual(args[2], 'arg_2')
+ self.assertEqual(kwargs['kwarg_1'], 'kwarg_1')
+ return map(str, iterable)
+
+ stringifier = mi.make_decorator(stringify, result_index=1)
+
+ @stringifier('arg_0', 'arg_2', kwarg_1='kwarg_1')
+ def user_function(n):
+ return count(n)
+
+ it = user_function(1)
+ actual = mi.take(5, it)
+ expected = ['1', '2', '3', '4', '5']
+ self.assertEqual(actual, expected)
+
+ def test_wrap_class(self):
+ seeker = mi.make_decorator(mi.seekable)
+
+ @seeker()
+ def user_function(n):
+ return map(str, range(n))
+
+ it = user_function(5)
+ self.assertEqual(list(it), ['0', '1', '2', '3', '4'])
+
+ it.seek(0)
+ self.assertEqual(list(it), ['0', '1', '2', '3', '4'])
+
+
+class MapReduceTests(TestCase):
+ def test_default(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ actual = sorted(mi.map_reduce(iterable, keyfunc).items())
+ expected = [(0, ['0', '1']), (1, ['2', '3']), (2, ['4'])]
+ self.assertEqual(actual, expected)
+
+ def test_valuefunc(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ valuefunc = int
+ actual = sorted(mi.map_reduce(iterable, keyfunc, valuefunc).items())
+ expected = [(0, [0, 1]), (1, [2, 3]), (2, [4])]
+ self.assertEqual(actual, expected)
+
+ def test_reducefunc(self):
+ iterable = (str(x) for x in range(5))
+ keyfunc = lambda x: int(x) // 2
+ valuefunc = int
+ reducefunc = lambda value_list: reduce(mul, value_list, 1)
+ actual = sorted(
+ mi.map_reduce(iterable, keyfunc, valuefunc, reducefunc).items()
+ )
+ expected = [(0, 0), (1, 6), (2, 4)]
+ self.assertEqual(actual, expected)
+
+ def test_ret(self):
+ d = mi.map_reduce([1, 0, 2, 0, 1, 0], bool)
+ self.assertEqual(d, {False: [0, 0, 0], True: [1, 2, 1]})
+ self.assertRaises(KeyError, lambda: d[None].append(1))
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/test_recipes.py b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/test_recipes.py
new file mode 100644
index 0000000000..81721fdf9f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/more_itertools/tests/test_recipes.py
@@ -0,0 +1,607 @@
+from doctest import DocTestSuite
+from unittest import TestCase
+
+from itertools import combinations
+from six.moves import range
+
+import more_itertools as mi
+
+
+def load_tests(loader, tests, ignore):
+ # Add the doctests
+ tests.addTests(DocTestSuite('more_itertools.recipes'))
+ return tests
+
+
+class AccumulateTests(TestCase):
+ """Tests for ``accumulate()``"""
+
+ def test_empty(self):
+ """Test that an empty input returns an empty output"""
+ self.assertEqual(list(mi.accumulate([])), [])
+
+ def test_default(self):
+ """Test accumulate with the default function (addition)"""
+ self.assertEqual(list(mi.accumulate([1, 2, 3])), [1, 3, 6])
+
+ def test_bogus_function(self):
+ """Test accumulate with an invalid function"""
+ with self.assertRaises(TypeError):
+ list(mi.accumulate([1, 2, 3], func=lambda x: x))
+
+ def test_custom_function(self):
+ """Test accumulate with a custom function"""
+ self.assertEqual(
+ list(mi.accumulate((1, 2, 3, 2, 1), func=max)), [1, 2, 3, 3, 3]
+ )
+
+
+class TakeTests(TestCase):
+ """Tests for ``take()``"""
+
+ def test_simple_take(self):
+ """Test basic usage"""
+ t = mi.take(5, range(10))
+ self.assertEqual(t, [0, 1, 2, 3, 4])
+
+ def test_null_take(self):
+ """Check the null case"""
+ t = mi.take(0, range(10))
+ self.assertEqual(t, [])
+
+ def test_negative_take(self):
+ """Make sure taking negative items results in a ValueError"""
+ self.assertRaises(ValueError, lambda: mi.take(-3, range(10)))
+
+ def test_take_too_much(self):
+ """Taking more than an iterator has remaining should return what the
+ iterator has remaining.
+
+ """
+ t = mi.take(10, range(5))
+ self.assertEqual(t, [0, 1, 2, 3, 4])
+
+
+class TabulateTests(TestCase):
+ """Tests for ``tabulate()``"""
+
+ def test_simple_tabulate(self):
+ """Test the happy path"""
+ t = mi.tabulate(lambda x: x)
+ f = tuple([next(t) for _ in range(3)])
+ self.assertEqual(f, (0, 1, 2))
+
+ def test_count(self):
+ """Ensure tabulate accepts specific count"""
+ t = mi.tabulate(lambda x: 2 * x, -1)
+ f = (next(t), next(t), next(t))
+ self.assertEqual(f, (-2, 0, 2))
+
+
+class TailTests(TestCase):
+ """Tests for ``tail()``"""
+
+ def test_greater(self):
+ """Length of iterable is greather than requested tail"""
+ self.assertEqual(list(mi.tail(3, 'ABCDEFG')), ['E', 'F', 'G'])
+
+ def test_equal(self):
+ """Length of iterable is equal to the requested tail"""
+ self.assertEqual(
+ list(mi.tail(7, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G']
+ )
+
+ def test_less(self):
+ """Length of iterable is less than requested tail"""
+ self.assertEqual(
+ list(mi.tail(8, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G']
+ )
+
+
+class ConsumeTests(TestCase):
+ """Tests for ``consume()``"""
+
+ def test_sanity(self):
+ """Test basic functionality"""
+ r = (x for x in range(10))
+ mi.consume(r, 3)
+ self.assertEqual(3, next(r))
+
+ def test_null_consume(self):
+ """Check the null case"""
+ r = (x for x in range(10))
+ mi.consume(r, 0)
+ self.assertEqual(0, next(r))
+
+ def test_negative_consume(self):
+ """Check that negative consumsion throws an error"""
+ r = (x for x in range(10))
+ self.assertRaises(ValueError, lambda: mi.consume(r, -1))
+
+ def test_total_consume(self):
+ """Check that iterator is totally consumed by default"""
+ r = (x for x in range(10))
+ mi.consume(r)
+ self.assertRaises(StopIteration, lambda: next(r))
+
+
+class NthTests(TestCase):
+ """Tests for ``nth()``"""
+
+ def test_basic(self):
+ """Make sure the nth item is returned"""
+ l = range(10)
+ for i, v in enumerate(l):
+ self.assertEqual(mi.nth(l, i), v)
+
+ def test_default(self):
+ """Ensure a default value is returned when nth item not found"""
+ l = range(3)
+ self.assertEqual(mi.nth(l, 100, "zebra"), "zebra")
+
+ def test_negative_item_raises(self):
+ """Ensure asking for a negative item raises an exception"""
+ self.assertRaises(ValueError, lambda: mi.nth(range(10), -3))
+
+
+class AllEqualTests(TestCase):
+ """Tests for ``all_equal()``"""
+
+ def test_true(self):
+ """Everything is equal"""
+ self.assertTrue(mi.all_equal('aaaaaa'))
+ self.assertTrue(mi.all_equal([0, 0, 0, 0]))
+
+ def test_false(self):
+ """Not everything is equal"""
+ self.assertFalse(mi.all_equal('aaaaab'))
+ self.assertFalse(mi.all_equal([0, 0, 0, 1]))
+
+ def test_tricky(self):
+ """Not everything is identical, but everything is equal"""
+ items = [1, complex(1, 0), 1.0]
+ self.assertTrue(mi.all_equal(items))
+
+ def test_empty(self):
+ """Return True if the iterable is empty"""
+ self.assertTrue(mi.all_equal(''))
+ self.assertTrue(mi.all_equal([]))
+
+ def test_one(self):
+ """Return True if the iterable is singular"""
+ self.assertTrue(mi.all_equal('0'))
+ self.assertTrue(mi.all_equal([0]))
+
+
+class QuantifyTests(TestCase):
+ """Tests for ``quantify()``"""
+
+ def test_happy_path(self):
+ """Make sure True count is returned"""
+ q = [True, False, True]
+ self.assertEqual(mi.quantify(q), 2)
+
+ def test_custom_predicate(self):
+ """Ensure non-default predicates return as expected"""
+ q = range(10)
+ self.assertEqual(mi.quantify(q, lambda x: x % 2 == 0), 5)
+
+
+class PadnoneTests(TestCase):
+ """Tests for ``padnone()``"""
+
+ def test_happy_path(self):
+ """wrapper iterator should return None indefinitely"""
+ r = range(2)
+ p = mi.padnone(r)
+ self.assertEqual([0, 1, None, None], [next(p) for _ in range(4)])
+
+
+class NcyclesTests(TestCase):
+ """Tests for ``nyclces()``"""
+
+ def test_happy_path(self):
+ """cycle a sequence three times"""
+ r = ["a", "b", "c"]
+ n = mi.ncycles(r, 3)
+ self.assertEqual(
+ ["a", "b", "c", "a", "b", "c", "a", "b", "c"],
+ list(n)
+ )
+
+ def test_null_case(self):
+ """asking for 0 cycles should return an empty iterator"""
+ n = mi.ncycles(range(100), 0)
+ self.assertRaises(StopIteration, lambda: next(n))
+
+ def test_pathalogical_case(self):
+ """asking for negative cycles should return an empty iterator"""
+ n = mi.ncycles(range(100), -10)
+ self.assertRaises(StopIteration, lambda: next(n))
+
+
+class DotproductTests(TestCase):
+ """Tests for ``dotproduct()``'"""
+
+ def test_happy_path(self):
+ """simple dotproduct example"""
+ self.assertEqual(400, mi.dotproduct([10, 10], [20, 20]))
+
+
+class FlattenTests(TestCase):
+ """Tests for ``flatten()``"""
+
+ def test_basic_usage(self):
+ """ensure list of lists is flattened one level"""
+ f = [[0, 1, 2], [3, 4, 5]]
+ self.assertEqual(list(range(6)), list(mi.flatten(f)))
+
+ def test_single_level(self):
+ """ensure list of lists is flattened only one level"""
+ f = [[0, [1, 2]], [[3, 4], 5]]
+ self.assertEqual([0, [1, 2], [3, 4], 5], list(mi.flatten(f)))
+
+
+class RepeatfuncTests(TestCase):
+ """Tests for ``repeatfunc()``"""
+
+ def test_simple_repeat(self):
+ """test simple repeated functions"""
+ r = mi.repeatfunc(lambda: 5)
+ self.assertEqual([5, 5, 5, 5, 5], [next(r) for _ in range(5)])
+
+ def test_finite_repeat(self):
+ """ensure limited repeat when times is provided"""
+ r = mi.repeatfunc(lambda: 5, times=5)
+ self.assertEqual([5, 5, 5, 5, 5], list(r))
+
+ def test_added_arguments(self):
+ """ensure arguments are applied to the function"""
+ r = mi.repeatfunc(lambda x: x, 2, 3)
+ self.assertEqual([3, 3], list(r))
+
+ def test_null_times(self):
+ """repeat 0 should return an empty iterator"""
+ r = mi.repeatfunc(range, 0, 3)
+ self.assertRaises(StopIteration, lambda: next(r))
+
+
+class PairwiseTests(TestCase):
+ """Tests for ``pairwise()``"""
+
+ def test_base_case(self):
+ """ensure an iterable will return pairwise"""
+ p = mi.pairwise([1, 2, 3])
+ self.assertEqual([(1, 2), (2, 3)], list(p))
+
+ def test_short_case(self):
+ """ensure an empty iterator if there's not enough values to pair"""
+ p = mi.pairwise("a")
+ self.assertRaises(StopIteration, lambda: next(p))
+
+
+class GrouperTests(TestCase):
+ """Tests for ``grouper()``"""
+
+ def test_even(self):
+ """Test when group size divides evenly into the length of
+ the iterable.
+
+ """
+ self.assertEqual(
+ list(mi.grouper(3, 'ABCDEF')), [('A', 'B', 'C'), ('D', 'E', 'F')]
+ )
+
+ def test_odd(self):
+ """Test when group size does not divide evenly into the length of the
+ iterable.
+
+ """
+ self.assertEqual(
+ list(mi.grouper(3, 'ABCDE')), [('A', 'B', 'C'), ('D', 'E', None)]
+ )
+
+ def test_fill_value(self):
+ """Test that the fill value is used to pad the final group"""
+ self.assertEqual(
+ list(mi.grouper(3, 'ABCDE', 'x')),
+ [('A', 'B', 'C'), ('D', 'E', 'x')]
+ )
+
+
+class RoundrobinTests(TestCase):
+ """Tests for ``roundrobin()``"""
+
+ def test_even_groups(self):
+ """Ensure ordered output from evenly populated iterables"""
+ self.assertEqual(
+ list(mi.roundrobin('ABC', [1, 2, 3], range(3))),
+ ['A', 1, 0, 'B', 2, 1, 'C', 3, 2]
+ )
+
+ def test_uneven_groups(self):
+ """Ensure ordered output from unevenly populated iterables"""
+ self.assertEqual(
+ list(mi.roundrobin('ABCD', [1, 2], range(0))),
+ ['A', 1, 'B', 2, 'C', 'D']
+ )
+
+
+class PartitionTests(TestCase):
+ """Tests for ``partition()``"""
+
+ def test_bool(self):
+ """Test when pred() returns a boolean"""
+ lesser, greater = mi.partition(lambda x: x > 5, range(10))
+ self.assertEqual(list(lesser), [0, 1, 2, 3, 4, 5])
+ self.assertEqual(list(greater), [6, 7, 8, 9])
+
+ def test_arbitrary(self):
+ """Test when pred() returns an integer"""
+ divisibles, remainders = mi.partition(lambda x: x % 3, range(10))
+ self.assertEqual(list(divisibles), [0, 3, 6, 9])
+ self.assertEqual(list(remainders), [1, 2, 4, 5, 7, 8])
+
+
+class PowersetTests(TestCase):
+ """Tests for ``powerset()``"""
+
+ def test_combinatorics(self):
+ """Ensure a proper enumeration"""
+ p = mi.powerset([1, 2, 3])
+ self.assertEqual(
+ list(p),
+ [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
+ )
+
+
+class UniqueEverseenTests(TestCase):
+ """Tests for ``unique_everseen()``"""
+
+ def test_everseen(self):
+ """ensure duplicate elements are ignored"""
+ u = mi.unique_everseen('AAAABBBBCCDAABBB')
+ self.assertEqual(
+ ['A', 'B', 'C', 'D'],
+ list(u)
+ )
+
+ def test_custom_key(self):
+ """ensure the custom key comparison works"""
+ u = mi.unique_everseen('aAbACCc', key=str.lower)
+ self.assertEqual(list('abC'), list(u))
+
+ def test_unhashable(self):
+ """ensure things work for unhashable items"""
+ iterable = ['a', [1, 2, 3], [1, 2, 3], 'a']
+ u = mi.unique_everseen(iterable)
+ self.assertEqual(list(u), ['a', [1, 2, 3]])
+
+ def test_unhashable_key(self):
+ """ensure things work for unhashable items with a custom key"""
+ iterable = ['a', [1, 2, 3], [1, 2, 3], 'a']
+ u = mi.unique_everseen(iterable, key=lambda x: x)
+ self.assertEqual(list(u), ['a', [1, 2, 3]])
+
+
+class UniqueJustseenTests(TestCase):
+ """Tests for ``unique_justseen()``"""
+
+ def test_justseen(self):
+ """ensure only last item is remembered"""
+ u = mi.unique_justseen('AAAABBBCCDABB')
+ self.assertEqual(list('ABCDAB'), list(u))
+
+ def test_custom_key(self):
+ """ensure the custom key comparison works"""
+ u = mi.unique_justseen('AABCcAD', str.lower)
+ self.assertEqual(list('ABCAD'), list(u))
+
+
+class IterExceptTests(TestCase):
+ """Tests for ``iter_except()``"""
+
+ def test_exact_exception(self):
+ """ensure the exact specified exception is caught"""
+ l = [1, 2, 3]
+ i = mi.iter_except(l.pop, IndexError)
+ self.assertEqual(list(i), [3, 2, 1])
+
+ def test_generic_exception(self):
+ """ensure the generic exception can be caught"""
+ l = [1, 2]
+ i = mi.iter_except(l.pop, Exception)
+ self.assertEqual(list(i), [2, 1])
+
+ def test_uncaught_exception_is_raised(self):
+ """ensure a non-specified exception is raised"""
+ l = [1, 2, 3]
+ i = mi.iter_except(l.pop, KeyError)
+ self.assertRaises(IndexError, lambda: list(i))
+
+ def test_first(self):
+ """ensure first is run before the function"""
+ l = [1, 2, 3]
+ f = lambda: 25
+ i = mi.iter_except(l.pop, IndexError, f)
+ self.assertEqual(list(i), [25, 3, 2, 1])
+
+
+class FirstTrueTests(TestCase):
+ """Tests for ``first_true()``"""
+
+ def test_something_true(self):
+ """Test with no keywords"""
+ self.assertEqual(mi.first_true(range(10)), 1)
+
+ def test_nothing_true(self):
+ """Test default return value."""
+ self.assertEqual(mi.first_true([0, 0, 0]), False)
+
+ def test_default(self):
+ """Test with a default keyword"""
+ self.assertEqual(mi.first_true([0, 0, 0], default='!'), '!')
+
+ def test_pred(self):
+ """Test with a custom predicate"""
+ self.assertEqual(
+ mi.first_true([2, 4, 6], pred=lambda x: x % 3 == 0), 6
+ )
+
+
+class RandomProductTests(TestCase):
+ """Tests for ``random_product()``
+
+ Since random.choice() has different results with the same seed across
+ python versions 2.x and 3.x, these tests use highly probably events to
+ create predictable outcomes across platforms.
+ """
+
+ def test_simple_lists(self):
+ """Ensure that one item is chosen from each list in each pair.
+ Also ensure that each item from each list eventually appears in
+ the chosen combinations.
+
+ Odds are roughly 1 in 7.1 * 10e16 that one item from either list will
+ not be chosen after 100 samplings of one item from each list. Just to
+ be safe, better use a known random seed, too.
+
+ """
+ nums = [1, 2, 3]
+ lets = ['a', 'b', 'c']
+ n, m = zip(*[mi.random_product(nums, lets) for _ in range(100)])
+ n, m = set(n), set(m)
+ self.assertEqual(n, set(nums))
+ self.assertEqual(m, set(lets))
+ self.assertEqual(len(n), len(nums))
+ self.assertEqual(len(m), len(lets))
+
+ def test_list_with_repeat(self):
+ """ensure multiple items are chosen, and that they appear to be chosen
+ from one list then the next, in proper order.
+
+ """
+ nums = [1, 2, 3]
+ lets = ['a', 'b', 'c']
+ r = list(mi.random_product(nums, lets, repeat=100))
+ self.assertEqual(2 * 100, len(r))
+ n, m = set(r[::2]), set(r[1::2])
+ self.assertEqual(n, set(nums))
+ self.assertEqual(m, set(lets))
+ self.assertEqual(len(n), len(nums))
+ self.assertEqual(len(m), len(lets))
+
+
+class RandomPermutationTests(TestCase):
+ """Tests for ``random_permutation()``"""
+
+ def test_full_permutation(self):
+ """ensure every item from the iterable is returned in a new ordering
+
+ 15 elements have a 1 in 1.3 * 10e12 of appearing in sorted order, so
+ we fix a seed value just to be sure.
+
+ """
+ i = range(15)
+ r = mi.random_permutation(i)
+ self.assertEqual(set(i), set(r))
+ if i == r:
+ raise AssertionError("Values were not permuted")
+
+ def test_partial_permutation(self):
+ """ensure all returned items are from the iterable, that the returned
+ permutation is of the desired length, and that all items eventually
+ get returned.
+
+ Sampling 100 permutations of length 5 from a set of 15 leaves a
+ (2/3)^100 chance that an item will not be chosen. Multiplied by 15
+ items, there is a 1 in 2.6e16 chance that at least 1 item will not
+ show up in the resulting output. Using a random seed will fix that.
+
+ """
+ items = range(15)
+ item_set = set(items)
+ all_items = set()
+ for _ in range(100):
+ permutation = mi.random_permutation(items, 5)
+ self.assertEqual(len(permutation), 5)
+ permutation_set = set(permutation)
+ self.assertLessEqual(permutation_set, item_set)
+ all_items |= permutation_set
+ self.assertEqual(all_items, item_set)
+
+
+class RandomCombinationTests(TestCase):
+ """Tests for ``random_combination()``"""
+
+ def test_psuedorandomness(self):
+ """ensure different subsets of the iterable get returned over many
+ samplings of random combinations"""
+ items = range(15)
+ all_items = set()
+ for _ in range(50):
+ combination = mi.random_combination(items, 5)
+ all_items |= set(combination)
+ self.assertEqual(all_items, set(items))
+
+ def test_no_replacement(self):
+ """ensure that elements are sampled without replacement"""
+ items = range(15)
+ for _ in range(50):
+ combination = mi.random_combination(items, len(items))
+ self.assertEqual(len(combination), len(set(combination)))
+ self.assertRaises(
+ ValueError, lambda: mi.random_combination(items, len(items) + 1)
+ )
+
+
+class RandomCombinationWithReplacementTests(TestCase):
+ """Tests for ``random_combination_with_replacement()``"""
+
+ def test_replacement(self):
+ """ensure that elements are sampled with replacement"""
+ items = range(5)
+ combo = mi.random_combination_with_replacement(items, len(items) * 2)
+ self.assertEqual(2 * len(items), len(combo))
+ if len(set(combo)) == len(combo):
+ raise AssertionError("Combination contained no duplicates")
+
+ def test_pseudorandomness(self):
+ """ensure different subsets of the iterable get returned over many
+ samplings of random combinations"""
+ items = range(15)
+ all_items = set()
+ for _ in range(50):
+ combination = mi.random_combination_with_replacement(items, 5)
+ all_items |= set(combination)
+ self.assertEqual(all_items, set(items))
+
+
+class NthCombinationTests(TestCase):
+ def test_basic(self):
+ iterable = 'abcdefg'
+ r = 4
+ for index, expected in enumerate(combinations(iterable, r)):
+ actual = mi.nth_combination(iterable, r, index)
+ self.assertEqual(actual, expected)
+
+ def test_long(self):
+ actual = mi.nth_combination(range(180), 4, 2000000)
+ expected = (2, 12, 35, 126)
+ self.assertEqual(actual, expected)
+
+
+class PrependTests(TestCase):
+ def test_basic(self):
+ value = 'a'
+ iterator = iter('bcdefg')
+ actual = list(mi.prepend(value, iterator))
+ expected = list('abcdefg')
+ self.assertEqual(actual, expected)
+
+ def test_multiple(self):
+ value = 'ab'
+ iterator = iter('cdefg')
+ actual = tuple(mi.prepend(value, iterator))
+ expected = ('ab',) + tuple('cdefg')
+ self.assertEqual(actual, expected)
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/setup.cfg b/testing/web-platform/tests/tools/third_party/more-itertools/setup.cfg
new file mode 100644
index 0000000000..7c0e37ba6c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/setup.cfg
@@ -0,0 +1,3 @@
+[flake8]
+exclude = ./docs/conf.py, .eggs/
+ignore = E731, E741, F999
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/setup.py b/testing/web-platform/tests/tools/third_party/more-itertools/setup.py
new file mode 100644
index 0000000000..484e4d06f7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/setup.py
@@ -0,0 +1,59 @@
+# Hack to prevent stupid error on exit of `python setup.py test`. (See
+# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html.)
+try:
+ import multiprocessing # noqa
+except ImportError:
+ pass
+from re import sub
+
+from setuptools import setup, find_packages
+
+
+def get_long_description():
+ # Fix display issues on PyPI caused by RST markup
+ readme = open('README.rst').read()
+
+ version_lines = []
+ with open('docs/versions.rst') as infile:
+ next(infile)
+ for line in infile:
+ line = line.rstrip().replace('.. automodule:: more_itertools', '')
+ version_lines.append(line)
+ version_history = '\n'.join(version_lines)
+ version_history = sub(r':func:`([a-zA-Z0-9._]+)`', r'\1', version_history)
+
+ ret = readme + '\n\n' + version_history
+ return ret
+
+
+setup(
+ name='more-itertools',
+ version='4.2.0',
+ description='More routines for operating on iterables, beyond itertools',
+ long_description=get_long_description(),
+ author='Erik Rose',
+ author_email='erikrose@grinchcentral.com',
+ license='MIT',
+ packages=find_packages(exclude=['ez_setup']),
+ install_requires=['six>=1.0.0,<2.0.0'],
+ test_suite='more_itertools.tests',
+ url='https://github.com/erikrose/more-itertools',
+ include_package_data=True,
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Natural Language :: English',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Topic :: Software Development :: Libraries'],
+ keywords=['itertools', 'iterator', 'iteration', 'filter', 'peek',
+ 'peekable', 'collate', 'chunk', 'chunked'],
+)
diff --git a/testing/web-platform/tests/tools/third_party/more-itertools/tox.ini b/testing/web-platform/tests/tools/third_party/more-itertools/tox.ini
new file mode 100644
index 0000000000..70c68c058d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/more-itertools/tox.ini
@@ -0,0 +1,5 @@
+[tox]
+envlist = py27, py34, py35, py36, py37
+
+[testenv]
+commands = {envbindir}/python -m unittest discover -v
diff --git a/testing/web-platform/tests/tools/third_party/packaging/.coveragerc b/testing/web-platform/tests/tools/third_party/packaging/.coveragerc
new file mode 100644
index 0000000000..da205e5a14
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/.coveragerc
@@ -0,0 +1,9 @@
+[run]
+branch = True
+omit = packaging/_compat.py
+
+[report]
+exclude_lines =
+ pragma: no cover
+ @abc.abstractmethod
+ @abc.abstractproperty
diff --git a/testing/web-platform/tests/tools/third_party/packaging/.flake8 b/testing/web-platform/tests/tools/third_party/packaging/.flake8
new file mode 100644
index 0000000000..b5a35be92a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/.flake8
@@ -0,0 +1,3 @@
+[flake8]
+max-line-length = 88
+ignore = E203,W503,W504
diff --git a/testing/web-platform/tests/tools/third_party/packaging/.github/workflows/docs.yml b/testing/web-platform/tests/tools/third_party/packaging/.github/workflows/docs.yml
new file mode 100644
index 0000000000..2c15738779
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/.github/workflows/docs.yml
@@ -0,0 +1,30 @@
+name: Documentation
+
+on:
+ pull_request:
+ paths:
+ - 'docs/**'
+ push:
+ paths:
+ - 'docs/**'
+
+jobs:
+ docs:
+ name: nox -s docs
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v1
+
+ - uses: actions/setup-python@v2
+ name: Install Python
+ with:
+ python-version: '3.9'
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ python -m pip install --upgrade nox
+
+ - name: Build documentation
+ run: python -m nox -s docs
diff --git a/testing/web-platform/tests/tools/third_party/packaging/.github/workflows/lint.yml b/testing/web-platform/tests/tools/third_party/packaging/.github/workflows/lint.yml
new file mode 100644
index 0000000000..f37e63463c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/.github/workflows/lint.yml
@@ -0,0 +1,59 @@
+name: Linting
+
+on:
+ pull_request:
+ paths:
+ - "**.py"
+ push:
+ paths:
+ - "**.py"
+
+jobs:
+ lint:
+ name: nox -s lint
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v1
+
+ - uses: actions/setup-python@v2
+ name: Install Python
+ with:
+ python-version: "3.9"
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ python -m pip install --upgrade nox
+
+ - name: Run `nox -s lint`
+ run: python -m nox -s lint
+
+ build:
+ name: Build sdist and wheel
+ runs-on: ubuntu-latest
+ # Linting verifies that the project is in an acceptable state to create files
+ # for releasing.
+ # And this action should be run whenever a release is ready to go public as
+ # the version number will be changed by editing __about__.py.
+ needs: lint
+
+ steps:
+ - uses: actions/checkout@v1
+
+ - uses: actions/setup-python@v2
+ name: Install Python
+ with:
+ python-version: "3.9"
+
+ - name: Install dependencies
+ run: python -m pip install --upgrade build
+
+ - name: Build
+ run: pyproject-build
+
+ - name: Archive files
+ uses: actions/upload-artifact@v1
+ with:
+ name: dist
+ path: dist
diff --git a/testing/web-platform/tests/tools/third_party/packaging/.github/workflows/test.yml b/testing/web-platform/tests/tools/third_party/packaging/.github/workflows/test.yml
new file mode 100644
index 0000000000..97c0c25d3c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/.github/workflows/test.yml
@@ -0,0 +1,56 @@
+name: Test
+
+on:
+ pull_request:
+ paths:
+ - ".github/workflows/test.yml"
+ - "**.py"
+ push:
+ paths:
+ - ".github/workflows/test.yml"
+ - "**.py"
+
+jobs:
+ test:
+ name: ${{ matrix.os }} / ${{ matrix.python_version }}
+ runs-on: ${{ matrix.os }}-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [Ubuntu, Windows, macOS]
+ python_version:
+ ["3.6", "3.7", "3.8", "3.9", "3.10", "pypy-3.7"]
+
+ steps:
+ - uses: actions/checkout@v1
+
+ - uses: actions/setup-python@v2
+ name: Install Python ${{ matrix.python_version }}
+ with:
+ python-version: ${{ matrix.python_version }}
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ python -m pip install nox
+ shell: bash
+
+ - name: Run nox
+ run: |
+ python -m nox --error-on-missing-interpreters -s tests-${{ matrix.python_version }}
+ shell: bash
+ if: ${{ ! startsWith( matrix.python_version, 'pypy' ) }}
+
+ # Binary is named 'pypy', but setup-python specifies it as 'pypy2'.
+ - name: Run nox for pypy2
+ run: |
+ python -m nox --error-on-missing-interpreters -s tests-pypy
+ shell: bash
+ if: matrix.python_version == 'pypy2'
+
+ # Binary is named 'pypy3', but setup-python specifies it as 'pypy-3.7'.
+ - name: Run nox for pypy3
+ run: |
+ python -m nox --error-on-missing-interpreters -s tests-pypy3
+ shell: bash
+ if: matrix.python_version == 'pypy-3.7'
diff --git a/testing/web-platform/tests/tools/third_party/packaging/.gitignore b/testing/web-platform/tests/tools/third_party/packaging/.gitignore
new file mode 100644
index 0000000000..05e554a64c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/.gitignore
@@ -0,0 +1,18 @@
+*.egg-info/
+*.egg
+*.py[co]
+
+.[nt]ox/
+.cache/
+.coverage
+.idea
+.venv*
+.vscode/
+
+.mypy_cache/
+.pytest_cache/
+__pycache__/
+_build/
+build/
+dist/
+htmlcov/
diff --git a/testing/web-platform/tests/tools/third_party/packaging/.pre-commit-config.yaml b/testing/web-platform/tests/tools/third_party/packaging/.pre-commit-config.yaml
new file mode 100644
index 0000000000..49ae0d4e78
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/.pre-commit-config.yaml
@@ -0,0 +1,39 @@
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v3.4.0
+ hooks:
+ - id: check-toml
+ - id: check-yaml
+ - id: end-of-file-fixer
+ - id: trailing-whitespace
+
+ - repo: https://github.com/pre-commit/mirrors-mypy
+ rev: v0.812
+ hooks:
+ - id: mypy
+ exclude: '^(docs|tasks|tests)|setup\.py'
+ args: []
+
+ - repo: https://github.com/asottile/pyupgrade
+ rev: v2.29.0
+ hooks:
+ - id: pyupgrade
+ args: [--py36-plus]
+
+ - repo: https://github.com/psf/black
+ rev: 20.8b1
+ hooks:
+ - id: black
+
+ - repo: https://github.com/PyCQA/isort
+ rev: 5.8.0
+ hooks:
+ - id: isort
+
+ - repo: https://gitlab.com/PyCQA/flake8
+ rev: "3.9.0"
+ hooks:
+ - id: flake8
+ additional_dependencies: ["pep8-naming"]
+ # Ignore all format-related checks as Black takes care of those.
+ args: ["--ignore", "E2,W5", "--select", "E,W,F,N"]
diff --git a/testing/web-platform/tests/tools/third_party/packaging/.readthedocs.yml b/testing/web-platform/tests/tools/third_party/packaging/.readthedocs.yml
new file mode 100644
index 0000000000..d8ac216687
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/.readthedocs.yml
@@ -0,0 +1,15 @@
+version: 2
+
+build:
+ image: latest
+
+formats: [pdf]
+sphinx:
+ configuration: docs/conf.py
+
+python:
+ version: 3.8
+ install:
+ - requirements: docs/requirements.txt
+ - method: pip
+ path: .
diff --git a/testing/web-platform/tests/tools/third_party/packaging/CHANGELOG.rst b/testing/web-platform/tests/tools/third_party/packaging/CHANGELOG.rst
new file mode 100644
index 0000000000..f23c30314a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/CHANGELOG.rst
@@ -0,0 +1,347 @@
+Changelog
+---------
+
+21.3 - 2021-11-17
+~~~~~~~~~~~~~~~~~
+
+* Add a ``pp3-none-any`` tag (:issue:`311`)
+* Replace the blank pyparsing 3 exclusion with a 3.0.5 exclusion (:issue:`481`, :issue:`486`)
+* Fix a spelling mistake (:issue:`479`)
+
+21.2 - 2021-10-29
+~~~~~~~~~~~~~~~~~
+
+* Update documentation entry for 21.1.
+
+21.1 - 2021-10-29
+~~~~~~~~~~~~~~~~~
+
+* Update pin to pyparsing to exclude 3.0.0.
+
+21.0 - 2021-07-03
+~~~~~~~~~~~~~~~~~
+
+* PEP 656: musllinux support (:issue:`411`)
+* Drop support for Python 2.7, Python 3.4 and Python 3.5.
+* Replace distutils usage with sysconfig (:issue:`396`)
+* Add support for zip files in ``parse_sdist_filename`` (:issue:`429`)
+* Use cached ``_hash`` attribute to short-circuit tag equality comparisons (:issue:`417`)
+* Specify the default value for the ``specifier`` argument to ``SpecifierSet`` (:issue:`437`)
+* Proper keyword-only "warn" argument in packaging.tags (:issue:`403`)
+* Correctly remove prerelease suffixes from ~= check (:issue:`366`)
+* Fix type hints for ``Version.post`` and ``Version.dev`` (:issue:`393`)
+* Use typing alias ``UnparsedVersion`` (:issue:`398`)
+* Improve type inference for ``packaging.specifiers.filter()`` (:issue:`430`)
+* Tighten the return type of ``canonicalize_version()`` (:issue:`402`)
+
+20.9 - 2021-01-29
+~~~~~~~~~~~~~~~~~
+
+* Run `isort <https://pypi.org/project/isort/>`_ over the code base (:issue:`377`)
+* Add support for the ``macosx_10_*_universal2`` platform tags (:issue:`379`)
+* Introduce ``packaging.utils.parse_wheel_filename()`` and ``parse_sdist_filename()``
+ (:issue:`387` and :issue:`389`)
+
+20.8 - 2020-12-11
+~~~~~~~~~~~~~~~~~
+
+* Revert back to setuptools for compatibility purposes for some Linux distros (:issue:`363`)
+* Do not insert an underscore in wheel tags when the interpreter version number
+ is more than 2 digits (:issue:`372`)
+
+20.7 - 2020-11-28
+~~~~~~~~~~~~~~~~~
+
+No unreleased changes.
+
+20.6 - 2020-11-28
+~~~~~~~~~~~~~~~~~
+
+.. note:: This release was subsequently yanked, and these changes were included in 20.7.
+
+* Fix flit configuration, to include LICENSE files (:issue:`357`)
+* Make `intel` a recognized CPU architecture for the `universal` macOS platform tag (:issue:`361`)
+* Add some missing type hints to `packaging.requirements` (issue:`350`)
+
+20.5 - 2020-11-27
+~~~~~~~~~~~~~~~~~
+
+* Officially support Python 3.9 (:issue:`343`)
+* Deprecate the ``LegacyVersion`` and ``LegacySpecifier`` classes (:issue:`321`)
+* Handle ``OSError`` on non-dynamic executables when attempting to resolve
+ the glibc version string.
+
+20.4 - 2020-05-19
+~~~~~~~~~~~~~~~~~
+
+* Canonicalize version before comparing specifiers. (:issue:`282`)
+* Change type hint for ``canonicalize_name`` to return
+ ``packaging.utils.NormalizedName``.
+ This enables the use of static typing tools (like mypy) to detect mixing of
+ normalized and un-normalized names.
+
+20.3 - 2020-03-05
+~~~~~~~~~~~~~~~~~
+
+* Fix changelog for 20.2.
+
+20.2 - 2020-03-05
+~~~~~~~~~~~~~~~~~
+
+* Fix a bug that caused a 32-bit OS that runs on a 64-bit ARM CPU (e.g. ARM-v8,
+ aarch64), to report the wrong bitness.
+
+20.1 - 2020-01-24
+~~~~~~~~~~~~~~~~~~~
+
+* Fix a bug caused by reuse of an exhausted iterator. (:issue:`257`)
+
+20.0 - 2020-01-06
+~~~~~~~~~~~~~~~~~
+
+* Add type hints (:issue:`191`)
+
+* Add proper trove classifiers for PyPy support (:issue:`198`)
+
+* Scale back depending on ``ctypes`` for manylinux support detection (:issue:`171`)
+
+* Use ``sys.implementation.name`` where appropriate for ``packaging.tags`` (:issue:`193`)
+
+* Expand upon the API provided by ``packaging.tags``: ``interpreter_name()``, ``mac_platforms()``, ``compatible_tags()``, ``cpython_tags()``, ``generic_tags()`` (:issue:`187`)
+
+* Officially support Python 3.8 (:issue:`232`)
+
+* Add ``major``, ``minor``, and ``micro`` aliases to ``packaging.version.Version`` (:issue:`226`)
+
+* Properly mark ``packaging`` has being fully typed by adding a `py.typed` file (:issue:`226`)
+
+19.2 - 2019-09-18
+~~~~~~~~~~~~~~~~~
+
+* Remove dependency on ``attrs`` (:issue:`178`, :issue:`179`)
+
+* Use appropriate fallbacks for CPython ABI tag (:issue:`181`, :issue:`185`)
+
+* Add manylinux2014 support (:issue:`186`)
+
+* Improve ABI detection (:issue:`181`)
+
+* Properly handle debug wheels for Python 3.8 (:issue:`172`)
+
+* Improve detection of debug builds on Windows (:issue:`194`)
+
+19.1 - 2019-07-30
+~~~~~~~~~~~~~~~~~
+
+* Add the ``packaging.tags`` module. (:issue:`156`)
+
+* Correctly handle two-digit versions in ``python_version`` (:issue:`119`)
+
+
+19.0 - 2019-01-20
+~~~~~~~~~~~~~~~~~
+
+* Fix string representation of PEP 508 direct URL requirements with markers.
+
+* Better handling of file URLs
+
+ This allows for using ``file:///absolute/path``, which was previously
+ prevented due to the missing ``netloc``.
+
+ This allows for all file URLs that ``urlunparse`` turns back into the
+ original URL to be valid.
+
+
+18.0 - 2018-09-26
+~~~~~~~~~~~~~~~~~
+
+* Improve error messages when invalid requirements are given. (:issue:`129`)
+
+
+17.1 - 2017-02-28
+~~~~~~~~~~~~~~~~~
+
+* Fix ``utils.canonicalize_version`` when supplying non PEP 440 versions.
+
+
+17.0 - 2017-02-28
+~~~~~~~~~~~~~~~~~
+
+* Drop support for python 2.6, 3.2, and 3.3.
+
+* Define minimal pyparsing version to 2.0.2 (:issue:`91`).
+
+* Add ``epoch``, ``release``, ``pre``, ``dev``, and ``post`` attributes to
+ ``Version`` and ``LegacyVersion`` (:issue:`34`).
+
+* Add ``Version().is_devrelease`` and ``LegacyVersion().is_devrelease`` to
+ make it easy to determine if a release is a development release.
+
+* Add ``utils.canonicalize_version`` to canonicalize version strings or
+ ``Version`` instances (:issue:`121`).
+
+
+16.8 - 2016-10-29
+~~~~~~~~~~~~~~~~~
+
+* Fix markers that utilize ``in`` so that they render correctly.
+
+* Fix an erroneous test on Python RC releases.
+
+
+16.7 - 2016-04-23
+~~~~~~~~~~~~~~~~~
+
+* Add support for the deprecated ``python_implementation`` marker which was
+ an undocumented setuptools marker in addition to the newer markers.
+
+
+16.6 - 2016-03-29
+~~~~~~~~~~~~~~~~~
+
+* Add support for the deprecated, PEP 345 environment markers in addition to
+ the newer markers.
+
+
+16.5 - 2016-02-26
+~~~~~~~~~~~~~~~~~
+
+* Fix a regression in parsing requirements with whitespaces between the comma
+ separators.
+
+
+16.4 - 2016-02-22
+~~~~~~~~~~~~~~~~~
+
+* Fix a regression in parsing requirements like ``foo (==4)``.
+
+
+16.3 - 2016-02-21
+~~~~~~~~~~~~~~~~~
+
+* Fix a bug where ``packaging.requirements:Requirement`` was overly strict when
+ matching legacy requirements.
+
+
+16.2 - 2016-02-09
+~~~~~~~~~~~~~~~~~
+
+* Add a function that implements the name canonicalization from PEP 503.
+
+
+16.1 - 2016-02-07
+~~~~~~~~~~~~~~~~~
+
+* Implement requirement specifiers from PEP 508.
+
+
+16.0 - 2016-01-19
+~~~~~~~~~~~~~~~~~
+
+* Relicense so that packaging is available under *either* the Apache License,
+ Version 2.0 or a 2 Clause BSD license.
+
+* Support installation of packaging when only distutils is available.
+
+* Fix ``==`` comparison when there is a prefix and a local version in play.
+ (:issue:`41`).
+
+* Implement environment markers from PEP 508.
+
+
+15.3 - 2015-08-01
+~~~~~~~~~~~~~~~~~
+
+* Normalize post-release spellings for rev/r prefixes. :issue:`35`
+
+
+15.2 - 2015-05-13
+~~~~~~~~~~~~~~~~~
+
+* Fix an error where the arbitrary specifier (``===``) was not correctly
+ allowing pre-releases when it was being used.
+
+* Expose the specifier and version parts through properties on the
+ ``Specifier`` classes.
+
+* Allow iterating over the ``SpecifierSet`` to get access to all of the
+ ``Specifier`` instances.
+
+* Allow testing if a version is contained within a specifier via the ``in``
+ operator.
+
+
+15.1 - 2015-04-13
+~~~~~~~~~~~~~~~~~
+
+* Fix a logic error that was causing inconsistent answers about whether or not
+ a pre-release was contained within a ``SpecifierSet`` or not.
+
+
+15.0 - 2015-01-02
+~~~~~~~~~~~~~~~~~
+
+* Add ``Version().is_postrelease`` and ``LegacyVersion().is_postrelease`` to
+ make it easy to determine if a release is a post release.
+
+* Add ``Version().base_version`` and ``LegacyVersion().base_version`` to make
+ it easy to get the public version without any pre or post release markers.
+
+* Support the update to PEP 440 which removed the implied ``!=V.*`` when using
+ either ``>V`` or ``<V`` and which instead special cased the handling of
+ pre-releases, post-releases, and local versions when using ``>V`` or ``<V``.
+
+
+14.5 - 2014-12-17
+~~~~~~~~~~~~~~~~~
+
+* Normalize release candidates as ``rc`` instead of ``c``.
+
+* Expose the ``VERSION_PATTERN`` constant, a regular expression matching
+ a valid version.
+
+
+14.4 - 2014-12-15
+~~~~~~~~~~~~~~~~~
+
+* Ensure that versions are normalized before comparison when used in a
+ specifier with a less than (``<``) or greater than (``>``) operator.
+
+
+14.3 - 2014-11-19
+~~~~~~~~~~~~~~~~~
+
+* **BACKWARDS INCOMPATIBLE** Refactor specifier support so that it can sanely
+ handle legacy specifiers as well as PEP 440 specifiers.
+
+* **BACKWARDS INCOMPATIBLE** Move the specifier support out of
+ ``packaging.version`` into ``packaging.specifiers``.
+
+
+14.2 - 2014-09-10
+~~~~~~~~~~~~~~~~~
+
+* Add prerelease support to ``Specifier``.
+* Remove the ability to do ``item in Specifier()`` and replace it with
+ ``Specifier().contains(item)`` in order to allow flags that signal if a
+ prerelease should be accepted or not.
+* Add a method ``Specifier().filter()`` which will take an iterable and returns
+ an iterable with items that do not match the specifier filtered out.
+
+
+14.1 - 2014-09-08
+~~~~~~~~~~~~~~~~~
+
+* Allow ``LegacyVersion`` and ``Version`` to be sorted together.
+* Add ``packaging.version.parse()`` to enable easily parsing a version string
+ as either a ``Version`` or a ``LegacyVersion`` depending on it's PEP 440
+ validity.
+
+
+14.0 - 2014-09-05
+~~~~~~~~~~~~~~~~~
+
+* Initial release.
+
+
+.. _`master`: https://github.com/pypa/packaging/
diff --git a/testing/web-platform/tests/tools/third_party/packaging/CONTRIBUTING.rst b/testing/web-platform/tests/tools/third_party/packaging/CONTRIBUTING.rst
new file mode 100644
index 0000000000..d9d70ec047
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/CONTRIBUTING.rst
@@ -0,0 +1,23 @@
+Contributing to packaging
+=========================
+
+As an open source project, packaging welcomes contributions of many forms.
+
+Examples of contributions include:
+
+* Code patches
+* Documentation improvements
+* Bug reports and patch reviews
+
+Extensive contribution guidelines are available in the repository at
+``docs/development/index.rst``, or online at:
+
+https://packaging.pypa.io/en/latest/development/
+
+Security issues
+---------------
+
+To report a security issue, please follow the special `security reporting
+guidelines`_, do not report them in the public issue tracker.
+
+.. _`security reporting guidelines`: https://packaging.pypa.io/en/latest/security/
diff --git a/testing/web-platform/tests/tools/third_party/packaging/LICENSE b/testing/web-platform/tests/tools/third_party/packaging/LICENSE
new file mode 100644
index 0000000000..6f62d44e4e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/LICENSE
@@ -0,0 +1,3 @@
+This software is made available under the terms of *either* of the licenses
+found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
+under the terms of *both* these licenses.
diff --git a/testing/web-platform/tests/tools/third_party/packaging/LICENSE.APACHE b/testing/web-platform/tests/tools/third_party/packaging/LICENSE.APACHE
new file mode 100644
index 0000000000..f433b1a53f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/LICENSE.APACHE
@@ -0,0 +1,177 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/testing/web-platform/tests/tools/third_party/packaging/LICENSE.BSD b/testing/web-platform/tests/tools/third_party/packaging/LICENSE.BSD
new file mode 100644
index 0000000000..42ce7b75c9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/LICENSE.BSD
@@ -0,0 +1,23 @@
+Copyright (c) Donald Stufft and individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/testing/web-platform/tests/tools/third_party/packaging/MANIFEST.in b/testing/web-platform/tests/tools/third_party/packaging/MANIFEST.in
new file mode 100644
index 0000000000..a078133d35
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/MANIFEST.in
@@ -0,0 +1,24 @@
+include CHANGELOG.rst CONTRIBUTING.rst README.rst
+include LICENSE LICENSE.APACHE LICENSE.BSD
+
+include .coveragerc
+include .flake8
+include .pre-commit-config.yaml
+include mypy.ini
+
+recursive-include docs *
+recursive-include tests *.py
+recursive-include tests/manylinux hello-world-*
+recursive-include tests/musllinux glibc-*
+recursive-include tests/musllinux musl-*
+
+exclude noxfile.py
+exclude .readthedocs.yml
+exclude .travis.yml
+exclude dev-requirements.txt
+exclude tests/manylinux/build-hello-world.sh
+exclude tests/musllinux/build.sh
+exclude tests/hello-world.c
+
+prune docs/_build
+prune tasks
diff --git a/testing/web-platform/tests/tools/third_party/packaging/README.rst b/testing/web-platform/tests/tools/third_party/packaging/README.rst
new file mode 100644
index 0000000000..e8bebe74dc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/README.rst
@@ -0,0 +1,73 @@
+packaging
+=========
+
+.. start-intro
+
+Reusable core utilities for various Python Packaging
+`interoperability specifications <https://packaging.python.org/specifications/>`_.
+
+This library provides utilities that implement the interoperability
+specifications which have clearly one correct behaviour (eg: :pep:`440`)
+or benefit greatly from having a single shared implementation (eg: :pep:`425`).
+
+.. end-intro
+
+The ``packaging`` project includes the following: version handling, specifiers,
+markers, requirements, tags, utilities.
+
+Documentation
+-------------
+
+The `documentation`_ provides information and the API for the following:
+
+- Version Handling
+- Specifiers
+- Markers
+- Requirements
+- Tags
+- Utilities
+
+Installation
+------------
+
+Use ``pip`` to install these utilities::
+
+ pip install packaging
+
+Discussion
+----------
+
+If you run into bugs, you can file them in our `issue tracker`_.
+
+You can also join ``#pypa`` on Freenode to ask questions or get involved.
+
+
+.. _`documentation`: https://packaging.pypa.io/
+.. _`issue tracker`: https://github.com/pypa/packaging/issues
+
+
+Code of Conduct
+---------------
+
+Everyone interacting in the packaging project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
+
+.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
+
+Contributing
+------------
+
+The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as
+well as how to report a potential security issue. The documentation for this
+project also covers information about `project development`_ and `security`_.
+
+.. _`project development`: https://packaging.pypa.io/en/latest/development/
+.. _`security`: https://packaging.pypa.io/en/latest/security/
+
+Project History
+---------------
+
+Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for
+recent changes and project history.
+
+.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/Makefile b/testing/web-platform/tests/tools/third_party/packaging/docs/Makefile
new file mode 100644
index 0000000000..9d683b4024
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/Makefile
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/packaging.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/packaging.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/packaging"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/packaging"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/_static/.empty b/testing/web-platform/tests/tools/third_party/packaging/docs/_static/.empty
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/_static/.empty
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/changelog.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/changelog.rst
new file mode 100644
index 0000000000..565b0521d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/changelog.rst
@@ -0,0 +1 @@
+.. include:: ../CHANGELOG.rst
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/conf.py b/testing/web-platform/tests/tools/third_party/packaging/docs/conf.py
new file mode 100644
index 0000000000..edd8dd5cc7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/conf.py
@@ -0,0 +1,111 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import os
+import sys
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath("."))
+
+# -- General configuration ----------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = [
+ "sphinx.ext.autodoc",
+ "sphinx.ext.doctest",
+ "sphinx.ext.extlinks",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.viewcode",
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# The suffix of source filenames.
+source_suffix = ".rst"
+
+# The master toctree document.
+master_doc = "index"
+
+# General information about the project.
+project = "Packaging"
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+
+base_dir = os.path.join(os.path.dirname(__file__), os.pardir)
+about = {}
+with open(os.path.join(base_dir, "packaging", "__about__.py")) as f:
+ exec(f.read(), about)
+
+version = release = about["__version__"]
+copyright = about["__copyright__"]
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ["_build"]
+
+extlinks = {
+ "issue": ("https://github.com/pypa/packaging/issues/%s", "#"),
+ "pull": ("https://github.com/pypa/packaging/pull/%s", "PR #"),
+}
+# -- Options for HTML output --------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+
+html_theme = "furo"
+html_title = "packaging"
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ["_static"]
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = "packagingdoc"
+
+
+# -- Options for LaTeX output -------------------------------------------------
+
+latex_elements = {}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual])
+latex_documents = [
+ ("index", "packaging.tex", "Packaging Documentation", "Donald Stufft", "manual")
+]
+
+# -- Options for manual page output -------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [("index", "packaging", "Packaging Documentation", ["Donald Stufft"], 1)]
+
+# -- Options for Texinfo output -----------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (
+ "index",
+ "packaging",
+ "Packaging Documentation",
+ "Donald Stufft",
+ "packaging",
+ "Core utilities for Python packages",
+ "Miscellaneous",
+ )
+]
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {"https://docs.python.org/": None}
+
+epub_theme = "epub"
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/development/getting-started.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/development/getting-started.rst
new file mode 100644
index 0000000000..8bd42ac089
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/development/getting-started.rst
@@ -0,0 +1,77 @@
+Getting started
+===============
+
+Working on packaging requires the installation of a small number of
+development dependencies. To see what dependencies are required to
+run the tests manually, please look at the ``noxfile.py`` file.
+
+Running tests
+~~~~~~~~~~~~~
+
+The packaging unit tests are found in the ``tests/`` directory and are
+designed to be run using `pytest`_. `pytest`_ will discover the tests
+automatically, so all you have to do is:
+
+.. code-block:: console
+
+ $ python -m pytest
+ ...
+ 29204 passed, 4 skipped, 1 xfailed in 83.98 seconds
+
+This runs the tests with the default Python interpreter. This also allows
+you to run select tests instead of the entire test suite.
+
+You can also verify that the tests pass on other supported Python interpreters.
+For this we use `nox`_, which will automatically create a `virtualenv`_ for
+each supported Python version and run the tests. For example:
+
+.. code-block:: console
+
+ $ nox -s tests
+ ...
+ nox > Ran multiple sessions:
+ nox > * tests-3.6: success
+ nox > * tests-3.7: success
+ nox > * tests-3.8: success
+ nox > * tests-3.9: success
+ nox > * tests-pypy3: skipped
+
+You may not have all the required Python versions installed, in which case you
+will see one or more ``InterpreterNotFound`` errors.
+
+Running linters
+~~~~~~~~~~~~~~~
+
+If you wish to run the linting rules, you may use `pre-commit`_ or run
+``nox -s lint``.
+
+.. code-block:: console
+
+ $ nox -s lint
+ ...
+ nox > Session lint was successful.
+
+Building documentation
+~~~~~~~~~~~~~~~~~~~~~~
+
+packaging documentation is stored in the ``docs/`` directory. It is
+written in `reStructured Text`_ and rendered using `Sphinx`_.
+
+Use `nox`_ to build the documentation. For example:
+
+.. code-block:: console
+
+ $ nox -s docs
+ ...
+ nox > Session docs was successful.
+
+The HTML documentation index can now be found at
+``docs/_build/html/index.html``.
+
+.. _`pytest`: https://pypi.org/project/pytest/
+.. _`nox`: https://pypi.org/project/nox/
+.. _`virtualenv`: https://pypi.org/project/virtualenv/
+.. _`pip`: https://pypi.org/project/pip/
+.. _`sphinx`: https://pypi.org/project/Sphinx/
+.. _`reStructured Text`: http://sphinx-doc.org/rest.html
+.. _`pre-commit`: https://pre-commit.com
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/development/index.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/development/index.rst
new file mode 100644
index 0000000000..c0aea8acb3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/development/index.rst
@@ -0,0 +1,19 @@
+Development
+===========
+
+As an open source project, packaging welcomes contributions of all
+forms. The sections below will help you get started.
+
+File bugs and feature requests on our issue tracker on `GitHub`_. If it is a
+bug check out `what to put in your bug report`_.
+
+.. toctree::
+ :maxdepth: 2
+
+ getting-started
+ submitting-patches
+ reviewing-patches
+ release-process
+
+.. _`GitHub`: https://github.com/pypa/packaging
+.. _`what to put in your bug report`: http://www.contribution-guide.org/#what-to-put-in-your-bug-report
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/development/release-process.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/development/release-process.rst
new file mode 100644
index 0000000000..84e5bec868
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/development/release-process.rst
@@ -0,0 +1,25 @@
+Release Process
+===============
+
+#. Checkout the current ``main`` branch.
+#. Install the latest ``nox``::
+
+ $ pip install nox
+
+#. Run the release automation with the required version number (YY.N)::
+
+ $ nox -s release -- YY.N
+
+ You will need the password for your GPG key as well as an API token for PyPI.
+
+#. Add a `release on GitHub <https://github.com/pypa/packaging/releases>`__.
+
+#. Notify the other project owners of the release.
+
+.. note::
+
+ Access needed for making the release are:
+
+ - PyPI maintainer (or owner) access to ``packaging``
+ - push directly to the ``main`` branch on the source repository
+ - push tags directly to the source repository
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/development/reviewing-patches.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/development/reviewing-patches.rst
new file mode 100644
index 0000000000..c476c7512d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/development/reviewing-patches.rst
@@ -0,0 +1,37 @@
+Reviewing and merging patches
+=============================
+
+Everyone is encouraged to review open pull requests. We only ask that you try
+and think carefully, ask questions and are `excellent to one another`_. Code
+review is our opportunity to share knowledge, design ideas and make friends.
+
+When reviewing a patch try to keep each of these concepts in mind:
+
+Architecture
+------------
+
+* Is the proposed change being made in the correct place?
+
+Intent
+------
+
+* What is the change being proposed?
+* Do we want this feature or is the bug they're fixing really a bug?
+
+Implementation
+--------------
+
+* Does the change do what the author claims?
+* Are there sufficient tests?
+* Has it been documented?
+* Will this change introduce new bugs?
+
+Grammar and style
+-----------------
+
+These are small things that are not caught by the automated style checkers.
+
+* Does a variable need a better name?
+* Should this be a keyword argument?
+
+.. _`excellent to one another`: https://speakerdeck.com/ohrite/better-code-review
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/development/submitting-patches.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/development/submitting-patches.rst
new file mode 100644
index 0000000000..fbdb5a4deb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/development/submitting-patches.rst
@@ -0,0 +1,74 @@
+Submitting patches
+==================
+
+* Always make a new branch for your work.
+* Patches should be small to facilitate easier review. `Studies have shown`_
+ that review quality falls off as patch size grows. Sometimes this will result
+ in many small PRs to land a single large feature.
+* Larger changes should be discussed in a ticket before submission.
+* New features and significant bug fixes should be documented in the
+ :doc:`/changelog`.
+* You must have legal permission to distribute any code you contribute and it
+ must be available under both the BSD and Apache Software License Version 2.0
+ licenses.
+
+If you believe you've identified a security issue in packaging, please
+follow the directions on the :doc:`security page </security>`.
+
+Code
+----
+
+This project's source is auto-formatted with |black|. You can check if your
+code meets our requirements by running our linters against it with ``nox -s
+lint`` or ``pre-commit run --all-files``.
+
+`Write comments as complete sentences.`_
+
+Every code file must start with the boilerplate licensing notice:
+
+.. code-block:: python
+
+ # This file is dual licensed under the terms of the Apache License, Version
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
+ # for complete details.
+
+Tests
+-----
+
+All code changes must be accompanied by unit tests with 100% code coverage (as
+measured by the combined metrics across our build matrix).
+
+
+Documentation
+-------------
+
+All features should be documented with prose in the ``docs`` section.
+
+When referring to a hypothetical individual (such as "a person receiving an
+encrypted message") use gender neutral pronouns (they/them/their).
+
+Docstrings are typically only used when writing abstract classes, but should
+be written like this if required:
+
+.. code-block:: python
+
+ def some_function(some_arg):
+ """
+ Does some things.
+
+ :param some_arg: Some argument.
+ """
+
+So, specifically:
+
+* Always use three double quotes.
+* Put the three double quotes on their own line.
+* No blank line at the end.
+* Use Sphinx parameter/attribute documentation `syntax`_.
+
+
+.. |black| replace:: ``black``
+.. _black: https://pypi.org/project/black/
+.. _`Write comments as complete sentences.`: https://nedbatchelder.com/blog/201401/comments_should_be_sentences.html
+.. _`syntax`: http://sphinx-doc.org/domains.html#info-field-lists
+.. _`Studies have shown`: http://www.ibm.com/developerworks/rational/library/11-proven-practices-for-peer-review/
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/index.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/index.rst
new file mode 100644
index 0000000000..aafdae83c4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/index.rst
@@ -0,0 +1,38 @@
+Welcome to packaging
+====================
+
+.. include:: ../README.rst
+ :start-after: start-intro
+ :end-before: end-intro
+
+
+Installation
+------------
+
+You can install packaging with ``pip``:
+
+.. code-block:: console
+
+ $ pip install packaging
+
+
+.. toctree::
+ :maxdepth: 1
+ :caption: API Documentation
+ :hidden:
+
+ version
+ specifiers
+ markers
+ requirements
+ tags
+ utils
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Project
+ :hidden:
+
+ development/index
+ security
+ changelog
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/markers.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/markers.rst
new file mode 100644
index 0000000000..ad25361647
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/markers.rst
@@ -0,0 +1,93 @@
+Markers
+=======
+
+.. currentmodule:: packaging.markers
+
+One extra requirement of dealing with dependencies is the ability to specify
+if it is required depending on the operating system or Python version in use.
+`PEP 508`_ defines the scheme which has been implemented by this module.
+
+Usage
+-----
+
+.. doctest::
+
+ >>> from packaging.markers import Marker, UndefinedEnvironmentName
+ >>> marker = Marker("python_version>'2'")
+ >>> marker
+ <Marker('python_version > "2"')>
+ >>> # We can evaluate the marker to see if it is satisfied
+ >>> marker.evaluate()
+ True
+ >>> # We can also override the environment
+ >>> env = {'python_version': '1.5.4'}
+ >>> marker.evaluate(environment=env)
+ False
+ >>> # Multiple markers can be ANDed
+ >>> and_marker = Marker("os_name=='a' and os_name=='b'")
+ >>> and_marker
+ <Marker('os_name == "a" and os_name == "b"')>
+ >>> # Multiple markers can be ORed
+ >>> or_marker = Marker("os_name=='a' or os_name=='b'")
+ >>> or_marker
+ <Marker('os_name == "a" or os_name == "b"')>
+ >>> # Markers can be also used with extras, to pull in dependencies if
+ >>> # a certain extra is being installed
+ >>> extra = Marker('extra == "bar"')
+ >>> # Evaluating an extra marker with no environment is an error
+ >>> try:
+ ... extra.evaluate()
+ ... except UndefinedEnvironmentName:
+ ... pass
+ >>> extra_environment = {'extra': ''}
+ >>> extra.evaluate(environment=extra_environment)
+ False
+ >>> extra_environment['extra'] = 'bar'
+ >>> extra.evaluate(environment=extra_environment)
+ True
+
+
+Reference
+---------
+
+.. class:: Marker(markers)
+
+ This class abstracts handling markers for dependencies of a project. It can
+ be passed a single marker or multiple markers that are ANDed or ORed
+ together. Each marker will be parsed according to PEP 508.
+
+ :param str markers: The string representation of a marker or markers.
+ :raises InvalidMarker: If the given ``markers`` are not parseable, then
+ this exception will be raised.
+
+ .. method:: evaluate(environment=None)
+
+ Evaluate the marker given the context of the current Python process.
+
+ :param dict environment: A dictionary containing keys and values to
+ override the detected environment.
+ :raises: UndefinedComparison: If the marker uses a PEP 440 comparison on
+ strings which are not valid PEP 440 versions.
+ :raises: UndefinedEnvironmentName: If the marker accesses a value that
+ isn't present inside of the environment
+ dictionary.
+
+.. exception:: InvalidMarker
+
+ Raised when attempting to create a :class:`Marker` with a string that
+ does not conform to PEP 508.
+
+
+.. exception:: UndefinedComparison
+
+ Raised when attempting to evaluate a :class:`Marker` with a PEP 440
+ comparison operator against values that are not valid PEP 440 versions.
+
+
+.. exception:: UndefinedEnvironmentName
+
+ Raised when attempting to evaluate a :class:`Marker` with a value that is
+ missing from the evaluation environment.
+
+
+.. _`PEP 508`: https://www.python.org/dev/peps/pep-0508/
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/requirements.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/requirements.rst
new file mode 100644
index 0000000000..e7c5a85a5e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/requirements.rst
@@ -0,0 +1,89 @@
+Requirements
+============
+
+.. currentmodule:: packaging.requirements
+
+Parse a given requirements line for specifying dependencies of a Python
+project, using `PEP 508`_ which defines the scheme that has been implemented
+by this module.
+
+Usage
+-----
+
+.. doctest::
+
+ >>> from packaging.requirements import Requirement
+ >>> simple_req = Requirement("name")
+ >>> simple_req
+ <Requirement('name')>
+ >>> simple_req.name
+ 'name'
+ >>> simple_req.url is None
+ True
+ >>> simple_req.extras
+ set()
+ >>> simple_req.specifier
+ <SpecifierSet('')>
+ >>> simple_req.marker is None
+ True
+ >>> # Requirements can be specified with extras, specifiers and markers
+ >>> req = Requirement('name[foo]>=2,<3; python_version>"2.0"')
+ >>> req.name
+ 'name'
+ >>> req.extras
+ {'foo'}
+ >>> req.specifier
+ <SpecifierSet('<3,>=2')>
+ >>> req.marker
+ <Marker('python_version > "2.0"')>
+ >>> # Requirements can also be specified with a URL, but may not specify
+ >>> # a version.
+ >>> url_req = Requirement('name @ https://github.com/pypa ;os_name=="a"')
+ >>> url_req.name
+ 'name'
+ >>> url_req.url
+ 'https://github.com/pypa'
+ >>> url_req.extras
+ set()
+ >>> url_req.marker
+ <Marker('os_name == "a"')>
+
+
+Reference
+---------
+
+.. class:: Requirement(requirement)
+
+ This class abstracts handling the details of a requirement for a project.
+ Each requirement will be parsed according to PEP 508.
+
+ :param str requirement: The string representation of a requirement.
+ :raises InvalidRequirement: If the given ``requirement`` is not parseable,
+ then this exception will be raised.
+
+ .. attribute:: name
+
+ The name of the requirement.
+
+ .. attribute:: url
+
+ The URL, if any where to download the requirement from. Can be None.
+
+ .. attribute:: extras
+
+ A set of extras that the requirement specifies.
+
+ .. attribute:: specifier
+
+ A :class:`~.SpecifierSet` of the version specified by the requirement.
+
+ .. attribute:: marker
+
+ A :class:`~.Marker` of the marker for the requirement. Can be None.
+
+.. exception:: InvalidRequirement
+
+ Raised when attempting to create a :class:`Requirement` with a string that
+ does not conform to PEP 508.
+
+.. _`PEP 508`: https://www.python.org/dev/peps/pep-0508/
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/requirements.txt b/testing/web-platform/tests/tools/third_party/packaging/docs/requirements.txt
new file mode 100644
index 0000000000..a95ae18b4f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/requirements.txt
@@ -0,0 +1 @@
+furo
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/security.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/security.rst
new file mode 100644
index 0000000000..f7fdb00029
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/security.rst
@@ -0,0 +1,18 @@
+Security
+========
+
+We take the security of packaging seriously. If you believe you've identified a
+security issue in it, DO NOT report the issue in any public forum, including
+(but not limited to):
+
+- GitHub issue tracker
+- Official or unofficial chat channels
+- Official or unofficial mailing lists
+
+Please report your issue to ``security@python.org``. Messages may be optionally
+encrypted with GPG using key fingerprints available at the `Python Security
+page <https://www.python.org/news/security/>`_.
+
+Once you've submitted an issue via email, you should receive an acknowledgment
+within 48 hours, and depending on the action to be taken, you may receive
+further follow-up emails.
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/specifiers.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/specifiers.rst
new file mode 100644
index 0000000000..83299a8a70
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/specifiers.rst
@@ -0,0 +1,222 @@
+Specifiers
+==========
+
+.. currentmodule:: packaging.specifiers
+
+A core requirement of dealing with dependencies is the ability to specify what
+versions of a dependency are acceptable for you. `PEP 440`_ defines the
+standard specifier scheme which has been implemented by this module.
+
+Usage
+-----
+
+.. doctest::
+
+ >>> from packaging.specifiers import SpecifierSet
+ >>> from packaging.version import Version
+ >>> spec1 = SpecifierSet("~=1.0")
+ >>> spec1
+ <SpecifierSet('~=1.0')>
+ >>> spec2 = SpecifierSet(">=1.0")
+ >>> spec2
+ <SpecifierSet('>=1.0')>
+ >>> # We can combine specifiers
+ >>> combined_spec = spec1 & spec2
+ >>> combined_spec
+ <SpecifierSet('>=1.0,~=1.0')>
+ >>> # We can also implicitly combine a string specifier
+ >>> combined_spec &= "!=1.1"
+ >>> combined_spec
+ <SpecifierSet('!=1.1,>=1.0,~=1.0')>
+ >>> # Create a few versions to check for contains.
+ >>> v1 = Version("1.0a5")
+ >>> v2 = Version("1.0")
+ >>> # We can check a version object to see if it falls within a specifier
+ >>> v1 in combined_spec
+ False
+ >>> v2 in combined_spec
+ True
+ >>> # We can even do the same with a string based version
+ >>> "1.4" in combined_spec
+ True
+ >>> # Finally we can filter a list of versions to get only those which are
+ >>> # contained within our specifier.
+ >>> list(combined_spec.filter([v1, v2, "1.4"]))
+ [<Version('1.0')>, '1.4']
+
+
+Reference
+---------
+
+.. class:: SpecifierSet(specifiers="", prereleases=None)
+
+ This class abstracts handling specifying the dependencies of a project. It
+ can be passed a single specifier (``>=3.0``), a comma-separated list of
+ specifiers (``>=3.0,!=3.1``), or no specifier at all. Each individual
+ specifier will be attempted to be parsed as a PEP 440 specifier
+ (:class:`Specifier`) or as a legacy, setuptools style specifier
+ (deprecated :class:`LegacySpecifier`). You may combine
+ :class:`SpecifierSet` instances using the ``&`` operator
+ (``SpecifierSet(">2") & SpecifierSet("<4")``).
+
+ Both the membership tests and the combination support using raw strings
+ in place of already instantiated objects.
+
+ :param str specifiers: The string representation of a specifier or a
+ comma-separated list of specifiers which will
+ be parsed and normalized before use.
+ :param bool prereleases: This tells the SpecifierSet if it should accept
+ prerelease versions if applicable or not. The
+ default of ``None`` will autodetect it from the
+ given specifiers.
+ :raises InvalidSpecifier: If the given ``specifiers`` are not parseable
+ than this exception will be raised.
+
+ .. attribute:: prereleases
+
+ A boolean value indicating whether this :class:`SpecifierSet`
+ represents a specifier that includes a pre-release versions. This can be
+ set to either ``True`` or ``False`` to explicitly enable or disable
+ prereleases or it can be set to ``None`` (the default) to enable
+ autodetection.
+
+ .. method:: __contains__(version)
+
+ This is the more Pythonic version of :meth:`contains()`, but does
+ not allow you to override the ``prereleases`` argument. If you
+ need that, use :meth:`contains()`.
+
+ See :meth:`contains()`.
+
+ .. method:: contains(version, prereleases=None)
+
+ Determines if ``version``, which can be either a version string, a
+ :class:`Version`, or a deprecated :class:`LegacyVersion` object, is
+ contained within this set of specifiers.
+
+ This will either match or not match prereleases based on the
+ ``prereleases`` parameter. When ``prereleases`` is set to ``None``
+ (the default) it will use the ``Specifier().prereleases`` attribute to
+ determine if to allow them. Otherwise it will use the boolean value of
+ the passed in value to determine if to allow them or not.
+
+ .. method:: __len__()
+
+ Returns the number of specifiers in this specifier set.
+
+ .. method:: __iter__()
+
+ Returns an iterator over all the underlying :class:`Specifier` (or
+ deprecated :class:`LegacySpecifier`) instances in this specifier set.
+
+ .. method:: filter(iterable, prereleases=None)
+
+ Takes an iterable that can contain version strings, :class:`~.Version`,
+ and deprecated :class:`~.LegacyVersion` instances and will then filter
+ it, returning an iterable that contains only items which match the
+ rules of this specifier object.
+
+ This method is smarter than just
+ ``filter(Specifier().contains, [...])`` because it implements the rule
+ from PEP 440 where a prerelease item SHOULD be accepted if no other
+ versions match the given specifier.
+
+ The ``prereleases`` parameter functions similarly to that of the same
+ parameter in ``contains``. If the value is ``None`` (the default) then
+ it will intelligently decide if to allow prereleases based on the
+ specifier, the ``Specifier().prereleases`` value, and the PEP 440
+ rules. Otherwise it will act as a boolean which will enable or disable
+ all prerelease versions from being included.
+
+
+.. class:: Specifier(specifier, prereleases=None)
+
+ This class abstracts the handling of a single `PEP 440`_ compatible
+ specifier. It is generally not required to instantiate this manually,
+ preferring instead to work with :class:`SpecifierSet`.
+
+ :param str specifier: The string representation of a specifier which will
+ be parsed and normalized before use.
+ :param bool prereleases: This tells the specifier if it should accept
+ prerelease versions if applicable or not. The
+ default of ``None`` will autodetect it from the
+ given specifiers.
+ :raises InvalidSpecifier: If the ``specifier`` does not conform to PEP 440
+ in any way then this exception will be raised.
+
+ .. attribute:: operator
+
+ The string value of the operator part of this specifier.
+
+ .. attribute:: version
+
+ The string version of the version part of this specifier.
+
+ .. attribute:: prereleases
+
+ See :attr:`SpecifierSet.prereleases`.
+
+ .. method:: __contains__(version)
+
+ See :meth:`SpecifierSet.__contains__()`.
+
+ .. method:: contains(version, prereleases=None)
+
+ See :meth:`SpecifierSet.contains()`.
+
+ .. method:: filter(iterable, prereleases=None)
+
+ See :meth:`SpecifierSet.filter()`.
+
+
+.. class:: LegacySpecifier(specifier, prereleases=None)
+
+ .. deprecated:: 20.5
+
+ Use :class:`Specifier` instead.
+
+ This class abstracts the handling of a single legacy, setuptools style
+ specifier. It is generally not required to instantiate this manually,
+ preferring instead to work with :class:`SpecifierSet`.
+
+ :param str specifier: The string representation of a specifier which will
+ be parsed and normalized before use.
+ :param bool prereleases: This tells the specifier if it should accept
+ prerelease versions if applicable or not. The
+ default of ``None`` will autodetect it from the
+ given specifiers.
+ :raises InvalidSpecifier: If the ``specifier`` is not parseable then this
+ will be raised.
+
+ .. attribute:: operator
+
+ The string value of the operator part of this specifier.
+
+ .. attribute:: version
+
+ The string version of the version part of this specifier.
+
+ .. attribute:: prereleases
+
+ See :attr:`SpecifierSet.prereleases`.
+
+ .. method:: __contains__(version)
+
+ See :meth:`SpecifierSet.__contains__()`.
+
+ .. method:: contains(version, prereleases=None)
+
+ See :meth:`SpecifierSet.contains()`.
+
+ .. method:: filter(iterable, prereleases=None)
+
+ See :meth:`SpecifierSet.filter()`.
+
+
+.. exception:: InvalidSpecifier
+
+ Raised when attempting to create a :class:`Specifier` with a specifier
+ string that does not conform to `PEP 440`_.
+
+
+.. _`PEP 440`: https://www.python.org/dev/peps/pep-0440/
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/tags.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/tags.rst
new file mode 100644
index 0000000000..ecd613b5aa
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/tags.rst
@@ -0,0 +1,225 @@
+Tags
+====
+
+.. currentmodule:: packaging.tags
+
+Wheels encode the Python interpreter, ABI, and platform that they support in
+their filenames using `platform compatibility tags`_. This module provides
+support for both parsing these tags as well as discovering what tags the
+running Python interpreter supports.
+
+Usage
+-----
+
+.. doctest::
+
+ >>> from packaging.tags import Tag, sys_tags
+ >>> import sys
+ >>> looking_for = Tag("py{major}".format(major=sys.version_info.major), "none", "any")
+ >>> supported_tags = list(sys_tags())
+ >>> looking_for in supported_tags
+ True
+ >>> really_old = Tag("py1", "none", "any")
+ >>> wheels = {really_old, looking_for}
+ >>> best_wheel = None
+ >>> for supported_tag in supported_tags:
+ ... for wheel_tag in wheels:
+ ... if supported_tag == wheel_tag:
+ ... best_wheel = wheel_tag
+ ... break
+ >>> best_wheel == looking_for
+ True
+
+Reference
+---------
+
+High Level Interface
+''''''''''''''''''''
+
+The following functions are the main interface to the library, and are typically the only
+items that applications should need to reference, in order to parse and check tags.
+
+.. class:: Tag(interpreter, abi, platform)
+
+ A representation of the tag triple for a wheel. Instances are considered
+ immutable and thus are hashable. Equality checking is also supported.
+
+ :param str interpreter: The interpreter name, e.g. ``"py"``
+ (see :attr:`INTERPRETER_SHORT_NAMES` for mapping
+ well-known interpreter names to their short names).
+ :param str abi: The ABI that a wheel supports, e.g. ``"cp37m"``.
+ :param str platform: The OS/platform the wheel supports,
+ e.g. ``"win_amd64"``.
+
+ .. attribute:: interpreter
+
+ The interpreter name.
+
+ .. attribute:: abi
+
+ The supported ABI.
+
+ .. attribute:: platform
+
+ The OS/platform.
+
+
+.. function:: parse_tag(tag)
+
+ Parses the provided ``tag`` into a set of :class:`Tag` instances.
+
+ Returning a set is required due to the possibility that the tag is a
+ `compressed tag set`_, e.g. ``"py2.py3-none-any"`` which supports both
+ Python 2 and Python 3.
+
+ :param str tag: The tag to parse, e.g. ``"py3-none-any"``.
+
+
+.. function:: sys_tags(*, warn=False)
+
+ Yields the tags that the running interpreter supports.
+
+ The iterable is ordered so that the best-matching tag is first in the
+ sequence. The exact preferential order to tags is interpreter-specific, but
+ in general the tag importance is in the order of:
+
+ 1. Interpreter
+ 2. Platform
+ 3. ABI
+
+ This order is due to the fact that an ABI is inherently tied to the
+ platform, but platform-specific code is not necessarily tied to the ABI. The
+ interpreter is the most important tag as it dictates basic support for any
+ wheel.
+
+ The function returns an iterable in order to allow for the possible
+ short-circuiting of tag generation if the entire sequence is not necessary
+ and tag calculation happens to be expensive.
+
+ :param bool warn: Whether warnings should be logged. Defaults to ``False``.
+
+
+Low Level Interface
+'''''''''''''''''''
+
+The following functions are low-level implementation details. They should typically not
+be needed in application code, unless the application has specialised requirements (for
+example, constructing sets of supported tags for environments other than the running
+interpreter).
+
+These functions capture the precise details of which environments support which tags. That
+information is not defined in the compatibility tag standards but is noted as being up
+to the implementation to provide.
+
+
+.. attribute:: INTERPRETER_SHORT_NAMES
+
+ A dictionary mapping interpreter names to their `abbreviation codes`_
+ (e.g. ``"cpython"`` is ``"cp"``). All interpreter names are lower-case.
+
+
+.. function:: interpreter_name()
+
+ Returns the running interpreter's name.
+
+ This typically acts as the prefix to the :attr:`~Tag.interpreter` tag.
+
+
+.. function:: interpreter_version(*, warn=False)
+
+ Returns the running interpreter's version.
+
+ This typically acts as the suffix to the :attr:`~Tag.interpreter` tag.
+
+
+.. function:: mac_platforms(version=None, arch=None)
+
+ Yields the :attr:`~Tag.platform` tags for macOS.
+
+ :param tuple version: A two-item tuple presenting the version of macOS.
+ Defaults to the current system's version.
+ :param str arch: The CPU architecture. Defaults to the architecture of the
+ current system, e.g. ``"x86_64"``.
+
+ .. note::
+ Equivalent support for the other major platforms is purposefully not
+ provided:
+
+ - On Windows, platform compatibility is statically specified
+ - On Linux, code must be run on the system itself to determine
+ compatibility
+
+
+.. function:: platform_tags(version=None, arch=None)
+
+ Yields the :attr:`~Tag.platform` tags for the running interpreter.
+
+
+.. function:: compatible_tags(python_version=None, interpreter=None, platforms=None)
+
+ Yields the tags for an interpreter compatible with the Python version
+ specified by ``python_version``.
+
+ The specific tags generated are:
+
+ - ``py*-none-<platform>``
+ - ``<interpreter>-none-any`` if ``interpreter`` is provided
+ - ``py*-none-any``
+
+ :param Sequence python_version: A one- or two-item sequence representing the
+ compatible version of Python. Defaults to
+ ``sys.version_info[:2]``.
+ :param str interpreter: The name of the interpreter (if known), e.g.
+ ``"cp38"``. Defaults to the current interpreter.
+ :param Iterable platforms: Iterable of compatible platforms. Defaults to the
+ platforms compatible with the current system.
+
+.. function:: cpython_tags(python_version=None, abis=None, platforms=None, *, warn=False)
+
+ Yields the tags for the CPython interpreter.
+
+ The specific tags generated are:
+
+ - ``cp<python_version>-<abi>-<platform>``
+ - ``cp<python_version>-abi3-<platform>``
+ - ``cp<python_version>-none-<platform>``
+ - ``cp<older version>-abi3-<platform>`` where "older version" is all older
+ minor versions down to Python 3.2 (when ``abi3`` was introduced)
+
+ If ``python_version`` only provides a major-only version then only
+ user-provided ABIs via ``abis`` and the ``none`` ABI will be used.
+
+ :param Sequence python_version: A one- or two-item sequence representing the
+ targeted Python version. Defaults to
+ ``sys.version_info[:2]``.
+ :param Iterable abis: Iterable of compatible ABIs. Defaults to the ABIs
+ compatible with the current system.
+ :param Iterable platforms: Iterable of compatible platforms. Defaults to the
+ platforms compatible with the current system.
+ :param bool warn: Whether warnings should be logged. Defaults to ``False``.
+
+.. function:: generic_tags(interpreter=None, abis=None, platforms=None, *, warn=False)
+
+ Yields the tags for an interpreter which requires no specialization.
+
+ This function should be used if one of the other interpreter-specific
+ functions provided by this module is not appropriate (i.e. not calculating
+ tags for a CPython interpreter).
+
+ The specific tags generated are:
+
+ - ``<interpreter>-<abi>-<platform>``
+
+ The ``"none"`` ABI will be added if it was not explicitly provided.
+
+ :param str interpreter: The name of the interpreter. Defaults to being
+ calculated.
+ :param Iterable abis: Iterable of compatible ABIs. Defaults to the ABIs
+ compatible with the current system.
+ :param Iterable platforms: Iterable of compatible platforms. Defaults to the
+ platforms compatible with the current system.
+ :param bool warn: Whether warnings should be logged. Defaults to ``False``.
+
+.. _`abbreviation codes`: https://www.python.org/dev/peps/pep-0425/#python-tag
+.. _`compressed tag set`: https://www.python.org/dev/peps/pep-0425/#compressed-tag-sets
+.. _`platform compatibility tags`: https://packaging.python.org/specifications/platform-compatibility-tags/
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/utils.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/utils.rst
new file mode 100644
index 0000000000..8fbb0250b7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/utils.rst
@@ -0,0 +1,92 @@
+Utilities
+=========
+
+.. currentmodule:: packaging.utils
+
+
+A set of small, helper utilities for dealing with Python packages.
+
+
+Reference
+---------
+
+.. function:: canonicalize_name(name)
+
+ This function takes a valid Python package name, and returns the normalized
+ form of it.
+
+ :param str name: The name to normalize.
+
+ .. doctest::
+
+ >>> from packaging.utils import canonicalize_name
+ >>> canonicalize_name("Django")
+ 'django'
+ >>> canonicalize_name("oslo.concurrency")
+ 'oslo-concurrency'
+ >>> canonicalize_name("requests")
+ 'requests'
+
+.. function:: canonicalize_version(version)
+
+ This function takes a string representing a package version (or a
+ :class:`~packaging.version.Version` instance), and returns the
+ normalized form of it.
+
+ :param str version: The version to normalize.
+
+ .. doctest::
+
+ >>> from packaging.utils import canonicalize_version
+ >>> canonicalize_version('1.4.0.0.0')
+ '1.4'
+
+.. function:: parse_wheel_filename(filename)
+
+ This function takes the filename of a wheel file, and parses it,
+ returning a tuple of name, version, build number, and tags.
+
+ The name part of the tuple is normalized. The version portion is an
+ instance of :class:`~packaging.version.Version`. The build number
+ is ``()`` if there is no build number in the wheel filename,
+ otherwise a two-item tuple of an integer for the leading digits and
+ a string for the rest of the build number. The tags portion is an
+ instance of :class:`~packaging.tags.Tag`.
+
+ :param str filename: The name of the wheel file.
+
+ .. doctest::
+
+ >>> from packaging.utils import parse_wheel_filename
+ >>> from packaging.tags import Tag
+ >>> from packaging.version import Version
+ >>> name, ver, build, tags = parse_wheel_filename("foo-1.0-py3-none-any.whl")
+ >>> name
+ 'foo'
+ >>> ver == Version('1.0')
+ True
+ >>> tags == {Tag("py3", "none", "any")}
+ True
+ >>> not build
+ True
+
+.. function:: parse_sdist_filename(filename)
+
+ This function takes the filename of a sdist file (as specified
+ in the `Source distribution format`_ documentation), and parses
+ it, returning a tuple of the normalized name and version as
+ represented by an instance of :class:`~packaging.version.Version`.
+
+ :param str filename: The name of the sdist file.
+
+ .. doctest::
+
+ >>> from packaging.utils import parse_sdist_filename
+ >>> from packaging.version import Version
+ >>> name, ver = parse_sdist_filename("foo-1.0.tar.gz")
+ >>> name
+ 'foo'
+ >>> ver == Version('1.0')
+ True
+
+.. _Source distribution format: https://packaging.python.org/specifications/source-distribution-format/#source-distribution-file-name
diff --git a/testing/web-platform/tests/tools/third_party/packaging/docs/version.rst b/testing/web-platform/tests/tools/third_party/packaging/docs/version.rst
new file mode 100644
index 0000000000..a43cf7868e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/docs/version.rst
@@ -0,0 +1,292 @@
+Version Handling
+================
+
+.. currentmodule:: packaging.version
+
+A core requirement of dealing with packages is the ability to work with
+versions. `PEP 440`_ defines the standard version scheme for Python packages
+which has been implemented by this module.
+
+Usage
+-----
+
+.. doctest::
+
+ >>> from packaging.version import Version, parse
+ >>> v1 = parse("1.0a5")
+ >>> v2 = Version("1.0")
+ >>> v1
+ <Version('1.0a5')>
+ >>> v2
+ <Version('1.0')>
+ >>> v1 < v2
+ True
+ >>> v1.epoch
+ 0
+ >>> v1.release
+ (1, 0)
+ >>> v1.pre
+ ('a', 5)
+ >>> v1.is_prerelease
+ True
+ >>> v2.is_prerelease
+ False
+ >>> Version("french toast")
+ Traceback (most recent call last):
+ ...
+ InvalidVersion: Invalid version: 'french toast'
+ >>> Version("1.0").post
+ >>> Version("1.0").is_postrelease
+ False
+ >>> Version("1.0.post0").post
+ 0
+ >>> Version("1.0.post0").is_postrelease
+ True
+
+
+Reference
+---------
+
+.. function:: parse(version)
+
+ This function takes a version string and will parse it as a
+ :class:`Version` if the version is a valid PEP 440 version, otherwise it
+ will parse it as a deprecated :class:`LegacyVersion`.
+
+
+.. class:: Version(version)
+
+ This class abstracts handling of a project's versions. It implements the
+ scheme defined in `PEP 440`_. A :class:`Version` instance is comparison
+ aware and can be compared and sorted using the standard Python interfaces.
+
+ :param str version: The string representation of a version which will be
+ parsed and normalized before use.
+ :raises InvalidVersion: If the ``version`` does not conform to PEP 440 in
+ any way then this exception will be raised.
+
+ .. attribute:: public
+
+ A string representing the public version portion of this ``Version()``.
+
+ .. attribute:: base_version
+
+ A string representing the base version of this :class:`Version`
+ instance. The base version is the public version of the project without
+ any pre or post release markers.
+
+ .. attribute:: epoch
+
+ An integer giving the version epoch of this :class:`Version` instance
+
+ .. attribute:: release
+
+ A tuple of integers giving the components of the release segment of
+ this :class:`Version` instance; that is, the ``1.2.3`` part of the
+ version number, including trailing zeroes but not including the epoch
+ or any prerelease/development/postrelease suffixes
+
+ .. attribute:: major
+
+ An integer representing the first item of :attr:`release` or ``0`` if unavailable.
+
+ .. attribute:: minor
+
+ An integer representing the second item of :attr:`release` or ``0`` if unavailable.
+
+ .. attribute:: micro
+
+ An integer representing the third item of :attr:`release` or ``0`` if unavailable.
+
+ .. attribute:: local
+
+ A string representing the local version portion of this ``Version()``
+ if it has one, or ``None`` otherwise.
+
+ .. attribute:: pre
+
+ If this :class:`Version` instance represents a prerelease, this
+ attribute will be a pair of the prerelease phase (the string ``"a"``,
+ ``"b"``, or ``"rc"``) and the prerelease number (an integer). If this
+ instance is not a prerelease, the attribute will be `None`.
+
+ .. attribute:: is_prerelease
+
+ A boolean value indicating whether this :class:`Version` instance
+ represents a prerelease and/or development release.
+
+ .. attribute:: dev
+
+ If this :class:`Version` instance represents a development release,
+ this attribute will be the development release number (an integer);
+ otherwise, it will be `None`.
+
+ .. attribute:: is_devrelease
+
+ A boolean value indicating whether this :class:`Version` instance
+ represents a development release.
+
+ .. attribute:: post
+
+ If this :class:`Version` instance represents a postrelease, this
+ attribute will be the postrelease number (an integer); otherwise, it
+ will be `None`.
+
+ .. attribute:: is_postrelease
+
+ A boolean value indicating whether this :class:`Version` instance
+ represents a post-release.
+
+
+.. class:: LegacyVersion(version)
+
+ .. deprecated:: 20.5
+
+ Use :class:`Version` instead.
+
+ This class abstracts handling of a project's versions if they are not
+ compatible with the scheme defined in `PEP 440`_. It implements a similar
+ interface to that of :class:`Version`.
+
+ This class implements the previous de facto sorting algorithm used by
+ setuptools, however it will always sort as less than a :class:`Version`
+ instance.
+
+ :param str version: The string representation of a version which will be
+ used as is.
+
+ .. note::
+
+ :class:`LegacyVersion` instances are always ordered lower than :class:`Version` instances.
+
+ >>> from packaging.version import Version, LegacyVersion
+ >>> v1 = Version("1.0")
+ >>> v2 = LegacyVersion("1.0")
+ >>> v1 > v2
+ True
+ >>> v3 = LegacyVersion("1.3")
+ >>> v1 > v3
+ True
+
+ Also note that some strings are still valid PEP 440 strings (:class:`Version`), even if they look very similar to
+ other versions that are not (:class:`LegacyVersion`). Examples include versions with `Pre-release spelling`_ and
+ `Post-release spelling`_.
+
+ >>> from packaging.version import parse
+ >>> v1 = parse('0.9.8a')
+ >>> v2 = parse('0.9.8beta')
+ >>> v3 = parse('0.9.8r')
+ >>> v4 = parse('0.9.8rev')
+ >>> v5 = parse('0.9.8t')
+ >>> v1
+ <Version('0.9.8a0')>
+ >>> v1.is_prerelease
+ True
+ >>> v2
+ <Version('0.9.8b0')>
+ >>> v2.is_prerelease
+ True
+ >>> v3
+ <Version('0.9.8.post0')>
+ >>> v3.is_postrelease
+ True
+ >>> v4
+ <Version('0.9.8.post0')>
+ >>> v4.is_postrelease
+ True
+ >>> v5
+ <LegacyVersion('0.9.8t')>
+ >>> v5.is_prerelease
+ False
+ >>> v5.is_postrelease
+ False
+
+ .. attribute:: public
+
+ A string representing the public version portion of this
+ :class:`LegacyVersion`. This will always be the entire version string.
+
+ .. attribute:: base_version
+
+ A string representing the base version portion of this
+ :class:`LegacyVersion` instance. This will always be the entire version
+ string.
+
+ .. attribute:: epoch
+
+ This will always be ``-1`` since without `PEP 440`_ we do not have the
+ concept of version epochs. The value reflects the fact that
+ :class:`LegacyVersion` instances always compare less than
+ :class:`Version` instances.
+
+ .. attribute:: release
+
+ This will always be ``None`` since without `PEP 440`_ we do not have
+ the concept of a release segment or its components. It exists
+ primarily to allow a :class:`LegacyVersion` to be used as a stand in
+ for a :class:`Version`.
+
+ .. attribute:: local
+
+ This will always be ``None`` since without `PEP 440`_ we do not have
+ the concept of a local version. It exists primarily to allow a
+ :class:`LegacyVersion` to be used as a stand in for a :class:`Version`.
+
+ .. attribute:: pre
+
+ This will always be ``None`` since without `PEP 440`_ we do not have
+ the concept of a prerelease. It exists primarily to allow a
+ :class:`LegacyVersion` to be used as a stand in for a :class:`Version`.
+
+ .. attribute:: is_prerelease
+
+ A boolean value indicating whether this :class:`LegacyVersion`
+ represents a prerelease and/or development release. Since without
+ `PEP 440`_ there is no concept of pre or dev releases this will
+ always be `False` and exists for compatibility with :class:`Version`.
+
+ .. attribute:: dev
+
+ This will always be ``None`` since without `PEP 440`_ we do not have
+ the concept of a development release. It exists primarily to allow a
+ :class:`LegacyVersion` to be used as a stand in for a :class:`Version`.
+
+ .. attribute:: is_devrelease
+
+ A boolean value indicating whether this :class:`LegacyVersion`
+ represents a development release. Since without `PEP 440`_ there is
+ no concept of dev releases this will always be `False` and exists for
+ compatibility with :class:`Version`.
+
+ .. attribute:: post
+
+ This will always be ``None`` since without `PEP 440`_ we do not have
+ the concept of a postrelease. It exists primarily to allow a
+ :class:`LegacyVersion` to be used as a stand in for a :class:`Version`.
+
+ .. attribute:: is_postrelease
+
+ A boolean value indicating whether this :class:`LegacyVersion`
+ represents a post-release. Since without `PEP 440`_ there is no concept
+ of post-releases this will always be ``False`` and exists for
+ compatibility with :class:`Version`.
+
+
+.. exception:: InvalidVersion
+
+ Raised when attempting to create a :class:`Version` with a version string
+ that does not conform to `PEP 440`_.
+
+
+.. data:: VERSION_PATTERN
+
+ A string containing the regular expression used to match a valid version.
+ The pattern is not anchored at either end, and is intended for embedding
+ in larger expressions (for example, matching a version number as part of
+ a file name). The regular expression should be compiled with the
+ ``re.VERBOSE`` and ``re.IGNORECASE`` flags set.
+
+
+.. _PEP 440: https://www.python.org/dev/peps/pep-0440/
+.. _Pre-release spelling : https://www.python.org/dev/peps/pep-0440/#pre-release-spelling
+.. _Post-release spelling : https://www.python.org/dev/peps/pep-0440/#post-release-spelling
diff --git a/testing/web-platform/tests/tools/third_party/packaging/mypy.ini b/testing/web-platform/tests/tools/third_party/packaging/mypy.ini
new file mode 100644
index 0000000000..d88ab8f164
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/mypy.ini
@@ -0,0 +1,17 @@
+[mypy]
+ignore_missing_imports = True
+
+# The following are the flags enabled by --strict
+warn_unused_configs = True
+disallow_subclassing_any = True
+disallow_any_generics = True
+disallow_untyped_calls = True
+disallow_untyped_defs = True
+disallow_incomplete_defs = True
+check_untyped_defs = True
+disallow_untyped_decorators = True
+no_implicit_optional = True
+warn_redundant_casts = True
+warn_unused_ignores = True
+warn_return_any = True
+no_implicit_reexport = True
diff --git a/testing/web-platform/tests/tools/third_party/packaging/noxfile.py b/testing/web-platform/tests/tools/third_party/packaging/noxfile.py
new file mode 100644
index 0000000000..10564101ef
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/noxfile.py
@@ -0,0 +1,321 @@
+# mypy: disallow-untyped-defs=False, disallow-untyped-calls=False
+
+import contextlib
+import datetime
+import difflib
+import glob
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import textwrap
+import time
+import webbrowser
+from pathlib import Path
+
+import nox
+
+nox.options.sessions = ["lint"]
+nox.options.reuse_existing_virtualenvs = True
+
+
+@nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "pypy3"])
+def tests(session):
+ def coverage(*args):
+ session.run("python", "-m", "coverage", *args)
+
+ # Once coverage 5 is used then `.coverage` can move into `pyproject.toml`.
+ session.install("coverage<5.0.0", "pretend", "pytest>=6.2.0", "pip>=9.0.2")
+ session.install(".")
+
+ if "pypy" not in session.python:
+ coverage(
+ "run",
+ "--source",
+ "packaging/",
+ "-m",
+ "pytest",
+ "--strict-markers",
+ *session.posargs,
+ )
+ coverage("report", "-m", "--fail-under", "100")
+ else:
+ # Don't do coverage tracking for PyPy, since it's SLOW.
+ session.run(
+ "python",
+ "-m",
+ "pytest",
+ "--capture=no",
+ "--strict-markers",
+ *session.posargs,
+ )
+
+
+@nox.session(python="3.9")
+def lint(session):
+ # Run the linters (via pre-commit)
+ session.install("pre-commit")
+ session.run("pre-commit", "run", "--all-files")
+
+ # Check the distribution
+ session.install("build", "twine")
+ session.run("pyproject-build")
+ session.run("twine", "check", *glob.glob("dist/*"))
+
+
+@nox.session(python="3.9")
+def docs(session):
+ shutil.rmtree("docs/_build", ignore_errors=True)
+ session.install("furo")
+ session.install("-e", ".")
+
+ variants = [
+ # (builder, dest)
+ ("html", "html"),
+ ("latex", "latex"),
+ ("doctest", "html"),
+ ]
+
+ for builder, dest in variants:
+ session.run(
+ "sphinx-build",
+ "-W",
+ "-b",
+ builder,
+ "-d",
+ "docs/_build/doctrees/" + dest,
+ "docs", # source directory
+ "docs/_build/" + dest, # output directory
+ )
+
+
+@nox.session
+def release(session):
+ package_name = "packaging"
+ version_file = Path(f"{package_name}/__about__.py")
+ changelog_file = Path("CHANGELOG.rst")
+
+ try:
+ release_version = _get_version_from_arguments(session.posargs)
+ except ValueError as e:
+ session.error(f"Invalid arguments: {e}")
+ return
+
+ # Check state of working directory and git.
+ _check_working_directory_state(session)
+ _check_git_state(session, release_version)
+
+ # Prepare for release.
+ _changelog_update_unreleased_title(release_version, file=changelog_file)
+ session.run("git", "add", str(changelog_file), external=True)
+ _bump(session, version=release_version, file=version_file, kind="release")
+
+ # Tag the release commit.
+ # fmt: off
+ session.run(
+ "git", "tag",
+ "-s", release_version,
+ "-m", f"Release {release_version}",
+ external=True,
+ )
+ # fmt: on
+
+ # Prepare for development.
+ _changelog_add_unreleased_title(file=changelog_file)
+ session.run("git", "add", str(changelog_file), external=True)
+
+ major, minor = map(int, release_version.split("."))
+ next_version = f"{major}.{minor + 1}.dev0"
+ _bump(session, version=next_version, file=version_file, kind="development")
+
+ # Checkout the git tag.
+ session.run("git", "checkout", "-q", release_version, external=True)
+
+ session.install("build", "twine")
+
+ # Build the distribution.
+ session.run("python", "-m", "build")
+
+ # Check what files are in dist/ for upload.
+ files = sorted(glob.glob("dist/*"))
+ expected = [
+ f"dist/{package_name}-{release_version}-py3-none-any.whl",
+ f"dist/{package_name}-{release_version}.tar.gz",
+ ]
+ if files != expected:
+ diff_generator = difflib.context_diff(
+ expected, files, fromfile="expected", tofile="got", lineterm=""
+ )
+ diff = "\n".join(diff_generator)
+ session.error(f"Got the wrong files:\n{diff}")
+
+ # Get back out into main.
+ session.run("git", "checkout", "-q", "main", external=True)
+
+ # Check and upload distribution files.
+ session.run("twine", "check", *files)
+
+ # Push the commits and tag.
+ # NOTE: The following fails if pushing to the branch is not allowed. This can
+ # happen on GitHub, if the main branch is protected, there are required
+ # CI checks and "Include administrators" is enabled on the protection.
+ session.run("git", "push", "upstream", "main", release_version, external=True)
+
+ # Upload the distribution.
+ session.run("twine", "upload", *files)
+
+ # Open up the GitHub release page.
+ webbrowser.open("https://github.com/pypa/packaging/releases")
+
+
+# -----------------------------------------------------------------------------
+# Helpers
+# -----------------------------------------------------------------------------
+def _get_version_from_arguments(arguments):
+ """Checks the arguments passed to `nox -s release`.
+
+ Only 1 argument that looks like a version? Return the argument.
+ Otherwise, raise a ValueError describing what's wrong.
+ """
+ if len(arguments) != 1:
+ raise ValueError("Expected exactly 1 argument")
+
+ version = arguments[0]
+ parts = version.split(".")
+
+ if len(parts) != 2:
+ # Not of the form: YY.N
+ raise ValueError("not of the form: YY.N")
+
+ if not all(part.isdigit() for part in parts):
+ # Not all segments are integers.
+ raise ValueError("non-integer segments")
+
+ # All is good.
+ return version
+
+
+def _check_working_directory_state(session):
+ """Check state of the working directory, prior to making the release."""
+ should_not_exist = ["build/", "dist/"]
+
+ bad_existing_paths = list(filter(os.path.exists, should_not_exist))
+ if bad_existing_paths:
+ session.error(f"Remove {', '.join(bad_existing_paths)} and try again")
+
+
+def _check_git_state(session, version_tag):
+ """Check state of the git repository, prior to making the release."""
+ # Ensure the upstream remote pushes to the correct URL.
+ allowed_upstreams = [
+ "git@github.com:pypa/packaging.git",
+ "https://github.com/pypa/packaging.git",
+ ]
+ result = subprocess.run(
+ ["git", "remote", "get-url", "--push", "upstream"],
+ capture_output=True,
+ encoding="utf-8",
+ )
+ if result.stdout.rstrip() not in allowed_upstreams:
+ session.error(f"git remote `upstream` is not one of {allowed_upstreams}")
+ # Ensure we're on main branch for cutting a release.
+ result = subprocess.run(
+ ["git", "rev-parse", "--abbrev-ref", "HEAD"],
+ capture_output=True,
+ encoding="utf-8",
+ )
+ if result.stdout != "main\n":
+ session.error(f"Not on main branch: {result.stdout!r}")
+
+ # Ensure there are no uncommitted changes.
+ result = subprocess.run(
+ ["git", "status", "--porcelain"], capture_output=True, encoding="utf-8"
+ )
+ if result.stdout:
+ print(result.stdout, end="", file=sys.stderr)
+ session.error("The working tree has uncommitted changes")
+
+ # Ensure this tag doesn't exist already.
+ result = subprocess.run(
+ ["git", "rev-parse", version_tag], capture_output=True, encoding="utf-8"
+ )
+ if not result.returncode:
+ session.error(f"Tag already exists! {version_tag} -- {result.stdout!r}")
+
+ # Back up the current git reference, in a tag that's easy to clean up.
+ _release_backup_tag = "auto/release-start-" + str(int(time.time()))
+ session.run("git", "tag", _release_backup_tag, external=True)
+
+
+def _bump(session, *, version, file, kind):
+ session.log(f"Bump version to {version!r}")
+ contents = file.read_text()
+ new_contents = re.sub(
+ '__version__ = "(.+)"', f'__version__ = "{version}"', contents
+ )
+ file.write_text(new_contents)
+
+ session.log("git commit")
+ subprocess.run(["git", "add", str(file)])
+ subprocess.run(["git", "commit", "-m", f"Bump for {kind}"])
+
+
+@contextlib.contextmanager
+def _replace_file(original_path):
+ # Create a temporary file.
+ fh, replacement_path = tempfile.mkstemp()
+
+ try:
+ with os.fdopen(fh, "w") as replacement:
+ with open(original_path) as original:
+ yield original, replacement
+ except Exception:
+ raise
+ else:
+ shutil.copymode(original_path, replacement_path)
+ os.remove(original_path)
+ shutil.move(replacement_path, original_path)
+
+
+def _changelog_update_unreleased_title(version, *, file):
+ """Update an "*unreleased*" heading to "{version} - {date}" """
+ yyyy_mm_dd = datetime.datetime.today().strftime("%Y-%m-%d")
+ title = f"{version} - {yyyy_mm_dd}"
+
+ with _replace_file(file) as (original, replacement):
+ for line in original:
+ if line == "*unreleased*\n":
+ replacement.write(f"{title}\n")
+ replacement.write(len(title) * "~" + "\n")
+ # Skip processing the next line (the heading underline for *unreleased*)
+ # since we already wrote the heading underline.
+ next(original)
+ else:
+ replacement.write(line)
+
+
+def _changelog_add_unreleased_title(*, file):
+ with _replace_file(file) as (original, replacement):
+ # Duplicate first 3 lines from the original file.
+ for _ in range(3):
+ line = next(original)
+ replacement.write(line)
+
+ # Write the heading.
+ replacement.write(
+ textwrap.dedent(
+ """\
+ *unreleased*
+ ~~~~~~~~~~~~
+
+ No unreleased changes.
+
+ """
+ )
+ )
+
+ # Duplicate all the remaining lines.
+ for line in original:
+ replacement.write(line)
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/__about__.py b/testing/web-platform/tests/tools/third_party/packaging/packaging/__about__.py
new file mode 100644
index 0000000000..3551bc2d29
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/__about__.py
@@ -0,0 +1,26 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+__all__ = [
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
+]
+
+__title__ = "packaging"
+__summary__ = "Core utilities for Python packages"
+__uri__ = "https://github.com/pypa/packaging"
+
+__version__ = "21.3"
+
+__author__ = "Donald Stufft and individual contributors"
+__email__ = "donald@stufft.io"
+
+__license__ = "BSD-2-Clause or Apache-2.0"
+__copyright__ = "2014-2019 %s" % __author__
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/__init__.py b/testing/web-platform/tests/tools/third_party/packaging/packaging/__init__.py
new file mode 100644
index 0000000000..3c50c5dcfe
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/__init__.py
@@ -0,0 +1,25 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from .__about__ import (
+ __author__,
+ __copyright__,
+ __email__,
+ __license__,
+ __summary__,
+ __title__,
+ __uri__,
+ __version__,
+)
+
+__all__ = [
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
+]
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/_manylinux.py b/testing/web-platform/tests/tools/third_party/packaging/packaging/_manylinux.py
new file mode 100644
index 0000000000..4c379aa6f6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/_manylinux.py
@@ -0,0 +1,301 @@
+import collections
+import functools
+import os
+import re
+import struct
+import sys
+import warnings
+from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
+
+
+# Python does not provide platform information at sufficient granularity to
+# identify the architecture of the running executable in some cases, so we
+# determine it dynamically by reading the information from the running
+# process. This only applies on Linux, which uses the ELF format.
+class _ELFFileHeader:
+ # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
+ class _InvalidELFFileHeader(ValueError):
+ """
+ An invalid ELF file header was found.
+ """
+
+ ELF_MAGIC_NUMBER = 0x7F454C46
+ ELFCLASS32 = 1
+ ELFCLASS64 = 2
+ ELFDATA2LSB = 1
+ ELFDATA2MSB = 2
+ EM_386 = 3
+ EM_S390 = 22
+ EM_ARM = 40
+ EM_X86_64 = 62
+ EF_ARM_ABIMASK = 0xFF000000
+ EF_ARM_ABI_VER5 = 0x05000000
+ EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+ def __init__(self, file: IO[bytes]) -> None:
+ def unpack(fmt: str) -> int:
+ try:
+ data = file.read(struct.calcsize(fmt))
+ result: Tuple[int, ...] = struct.unpack(fmt, data)
+ except struct.error:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ return result[0]
+
+ self.e_ident_magic = unpack(">I")
+ if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_class = unpack("B")
+ if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_data = unpack("B")
+ if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_version = unpack("B")
+ self.e_ident_osabi = unpack("B")
+ self.e_ident_abiversion = unpack("B")
+ self.e_ident_pad = file.read(7)
+ format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
+ format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
+ format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
+ format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
+ self.e_type = unpack(format_h)
+ self.e_machine = unpack(format_h)
+ self.e_version = unpack(format_i)
+ self.e_entry = unpack(format_p)
+ self.e_phoff = unpack(format_p)
+ self.e_shoff = unpack(format_p)
+ self.e_flags = unpack(format_i)
+ self.e_ehsize = unpack(format_h)
+ self.e_phentsize = unpack(format_h)
+ self.e_phnum = unpack(format_h)
+ self.e_shentsize = unpack(format_h)
+ self.e_shnum = unpack(format_h)
+ self.e_shstrndx = unpack(format_h)
+
+
+def _get_elf_header() -> Optional[_ELFFileHeader]:
+ try:
+ with open(sys.executable, "rb") as f:
+ elf_header = _ELFFileHeader(f)
+ except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
+ return None
+ return elf_header
+
+
+def _is_linux_armhf() -> bool:
+ # hard-float ABI can be detected from the ELF header of the running
+ # process
+ # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+ elf_header = _get_elf_header()
+ if elf_header is None:
+ return False
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+ result &= elf_header.e_machine == elf_header.EM_ARM
+ result &= (
+ elf_header.e_flags & elf_header.EF_ARM_ABIMASK
+ ) == elf_header.EF_ARM_ABI_VER5
+ result &= (
+ elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
+ ) == elf_header.EF_ARM_ABI_FLOAT_HARD
+ return result
+
+
+def _is_linux_i686() -> bool:
+ elf_header = _get_elf_header()
+ if elf_header is None:
+ return False
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+ result &= elf_header.e_machine == elf_header.EM_386
+ return result
+
+
+def _have_compatible_abi(arch: str) -> bool:
+ if arch == "armv7l":
+ return _is_linux_armhf()
+ if arch == "i686":
+ return _is_linux_i686()
+ return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
+
+
+# If glibc ever changes its major version, we need to know what the last
+# minor version was, so we can build the complete list of all versions.
+# For now, guess what the highest minor version might be, assume it will
+# be 50 for testing. Once this actually happens, update the dictionary
+# with the actual value.
+_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
+
+
+class _GLibCVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+def _glibc_version_string_confstr() -> Optional[str]:
+ """
+ Primary implementation of glibc_version_string using os.confstr.
+ """
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+ # to be broken or missing. This strategy is used in the standard library
+ # platform module.
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
+ try:
+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
+ version_string = os.confstr("CS_GNU_LIBC_VERSION")
+ assert version_string is not None
+ _, version = version_string.split()
+ except (AssertionError, AttributeError, OSError, ValueError):
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+ return None
+ return version
+
+
+def _glibc_version_string_ctypes() -> Optional[str]:
+ """
+ Fallback implementation of glibc_version_string using ctypes.
+ """
+ try:
+ import ctypes
+ except ImportError:
+ return None
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ #
+ # We must also handle the special case where the executable is not a
+ # dynamically linked executable. This can occur when using musl libc,
+ # for example. In this situation, dlopen() will error, leading to an
+ # OSError. Interestingly, at least in the case of musl, there is no
+ # errno set on the OSError. The single string argument used to construct
+ # OSError comes from libc itself and is therefore not portable to
+ # hard code here. In any case, failure to call dlopen() means we
+ # can proceed, so we bail on our attempt.
+ try:
+ process_namespace = ctypes.CDLL(None)
+ except OSError:
+ return None
+
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str: str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+def _glibc_version_string() -> Optional[str]:
+ """Returns glibc version string, or None if not using glibc."""
+ return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
+ """Parse glibc version.
+
+ We use a regexp instead of str.split because we want to discard any
+ random junk that might come after the minor version -- this might happen
+ in patched/forked versions of glibc (e.g. Linaro's version of glibc
+ uses version strings like "2.20-2014.11"). See gh-3588.
+ """
+ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+ if not m:
+ warnings.warn(
+ "Expected glibc version with 2 components major.minor,"
+ " got: %s" % version_str,
+ RuntimeWarning,
+ )
+ return -1, -1
+ return int(m.group("major")), int(m.group("minor"))
+
+
+@functools.lru_cache()
+def _get_glibc_version() -> Tuple[int, int]:
+ version_str = _glibc_version_string()
+ if version_str is None:
+ return (-1, -1)
+ return _parse_glibc_version(version_str)
+
+
+# From PEP 513, PEP 600
+def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
+ sys_glibc = _get_glibc_version()
+ if sys_glibc < version:
+ return False
+ # Check for presence of _manylinux module.
+ try:
+ import _manylinux # noqa
+ except ImportError:
+ return True
+ if hasattr(_manylinux, "manylinux_compatible"):
+ result = _manylinux.manylinux_compatible(version[0], version[1], arch)
+ if result is not None:
+ return bool(result)
+ return True
+ if version == _GLibCVersion(2, 5):
+ if hasattr(_manylinux, "manylinux1_compatible"):
+ return bool(_manylinux.manylinux1_compatible)
+ if version == _GLibCVersion(2, 12):
+ if hasattr(_manylinux, "manylinux2010_compatible"):
+ return bool(_manylinux.manylinux2010_compatible)
+ if version == _GLibCVersion(2, 17):
+ if hasattr(_manylinux, "manylinux2014_compatible"):
+ return bool(_manylinux.manylinux2014_compatible)
+ return True
+
+
+_LEGACY_MANYLINUX_MAP = {
+ # CentOS 7 w/ glibc 2.17 (PEP 599)
+ (2, 17): "manylinux2014",
+ # CentOS 6 w/ glibc 2.12 (PEP 571)
+ (2, 12): "manylinux2010",
+ # CentOS 5 w/ glibc 2.5 (PEP 513)
+ (2, 5): "manylinux1",
+}
+
+
+def platform_tags(linux: str, arch: str) -> Iterator[str]:
+ if not _have_compatible_abi(arch):
+ return
+ # Oldest glibc to be supported regardless of architecture is (2, 17).
+ too_old_glibc2 = _GLibCVersion(2, 16)
+ if arch in {"x86_64", "i686"}:
+ # On x86/i686 also oldest glibc to be supported is (2, 5).
+ too_old_glibc2 = _GLibCVersion(2, 4)
+ current_glibc = _GLibCVersion(*_get_glibc_version())
+ glibc_max_list = [current_glibc]
+ # We can assume compatibility across glibc major versions.
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
+ #
+ # Build a list of maximum glibc versions so that we can
+ # output the canonical list of all glibc from current_glibc
+ # down to too_old_glibc2, including all intermediary versions.
+ for glibc_major in range(current_glibc.major - 1, 1, -1):
+ glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
+ glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
+ for glibc_max in glibc_max_list:
+ if glibc_max.major == too_old_glibc2.major:
+ min_minor = too_old_glibc2.minor
+ else:
+ # For other glibc major versions oldest supported is (x, 0).
+ min_minor = -1
+ for glibc_minor in range(glibc_max.minor, min_minor, -1):
+ glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+ tag = "manylinux_{}_{}".format(*glibc_version)
+ if _is_compatible(tag, arch, glibc_version):
+ yield linux.replace("linux", tag)
+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+ if glibc_version in _LEGACY_MANYLINUX_MAP:
+ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+ if _is_compatible(legacy_tag, arch, glibc_version):
+ yield linux.replace("linux", legacy_tag)
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/_musllinux.py b/testing/web-platform/tests/tools/third_party/packaging/packaging/_musllinux.py
new file mode 100644
index 0000000000..8ac3059ba3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/_musllinux.py
@@ -0,0 +1,136 @@
+"""PEP 656 support.
+
+This module implements logic to detect if the currently running Python is
+linked against musl, and what musl version is used.
+"""
+
+import contextlib
+import functools
+import operator
+import os
+import re
+import struct
+import subprocess
+import sys
+from typing import IO, Iterator, NamedTuple, Optional, Tuple
+
+
+def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]:
+ return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
+
+
+def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]:
+ """Detect musl libc location by parsing the Python executable.
+
+ Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
+ ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
+ """
+ f.seek(0)
+ try:
+ ident = _read_unpacked(f, "16B")
+ except struct.error:
+ return None
+ if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF.
+ return None
+ f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version.
+
+ try:
+ # e_fmt: Format for program header.
+ # p_fmt: Format for section header.
+ # p_idx: Indexes to find p_type, p_offset, and p_filesz.
+ e_fmt, p_fmt, p_idx = {
+ 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit.
+ 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit.
+ }[ident[4]]
+ except KeyError:
+ return None
+ else:
+ p_get = operator.itemgetter(*p_idx)
+
+ # Find the interpreter section and return its content.
+ try:
+ _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
+ except struct.error:
+ return None
+ for i in range(e_phnum + 1):
+ f.seek(e_phoff + e_phentsize * i)
+ try:
+ p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
+ except struct.error:
+ return None
+ if p_type != 3: # Not PT_INTERP.
+ continue
+ f.seek(p_offset)
+ interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
+ if "musl" not in interpreter:
+ return None
+ return interpreter
+ return None
+
+
+class _MuslVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
+ lines = [n for n in (n.strip() for n in output.splitlines()) if n]
+ if len(lines) < 2 or lines[0][:4] != "musl":
+ return None
+ m = re.match(r"Version (\d+)\.(\d+)", lines[1])
+ if not m:
+ return None
+ return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
+
+
+@functools.lru_cache()
+def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
+ """Detect currently-running musl runtime version.
+
+ This is done by checking the specified executable's dynamic linking
+ information, and invoking the loader to parse its output for a version
+ string. If the loader is musl, the output would be something like::
+
+ musl libc (x86_64)
+ Version 1.2.2
+ Dynamic Program Loader
+ """
+ with contextlib.ExitStack() as stack:
+ try:
+ f = stack.enter_context(open(executable, "rb"))
+ except OSError:
+ return None
+ ld = _parse_ld_musl_from_elf(f)
+ if not ld:
+ return None
+ proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
+ return _parse_musl_version(proc.stderr)
+
+
+def platform_tags(arch: str) -> Iterator[str]:
+ """Generate musllinux tags compatible to the current platform.
+
+ :param arch: Should be the part of platform tag after the ``linux_``
+ prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
+ prerequisite for the current platform to be musllinux-compatible.
+
+ :returns: An iterator of compatible musllinux tags.
+ """
+ sys_musl = _get_musl_version(sys.executable)
+ if sys_musl is None: # Python not dynamically linked against musl.
+ return
+ for minor in range(sys_musl.minor, -1, -1):
+ yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+
+
+if __name__ == "__main__": # pragma: no cover
+ import sysconfig
+
+ plat = sysconfig.get_platform()
+ assert plat.startswith("linux-"), "not linux"
+
+ print("plat:", plat)
+ print("musl:", _get_musl_version(sys.executable))
+ print("tags:", end=" ")
+ for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
+ print(t, end="\n ")
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/_structures.py b/testing/web-platform/tests/tools/third_party/packaging/packaging/_structures.py
new file mode 100644
index 0000000000..90a6465f96
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/_structures.py
@@ -0,0 +1,61 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+class InfinityType:
+ def __repr__(self) -> str:
+ return "Infinity"
+
+ def __hash__(self) -> int:
+ return hash(repr(self))
+
+ def __lt__(self, other: object) -> bool:
+ return False
+
+ def __le__(self, other: object) -> bool:
+ return False
+
+ def __eq__(self, other: object) -> bool:
+ return isinstance(other, self.__class__)
+
+ def __gt__(self, other: object) -> bool:
+ return True
+
+ def __ge__(self, other: object) -> bool:
+ return True
+
+ def __neg__(self: object) -> "NegativeInfinityType":
+ return NegativeInfinity
+
+
+Infinity = InfinityType()
+
+
+class NegativeInfinityType:
+ def __repr__(self) -> str:
+ return "-Infinity"
+
+ def __hash__(self) -> int:
+ return hash(repr(self))
+
+ def __lt__(self, other: object) -> bool:
+ return True
+
+ def __le__(self, other: object) -> bool:
+ return True
+
+ def __eq__(self, other: object) -> bool:
+ return isinstance(other, self.__class__)
+
+ def __gt__(self, other: object) -> bool:
+ return False
+
+ def __ge__(self, other: object) -> bool:
+ return False
+
+ def __neg__(self: object) -> InfinityType:
+ return Infinity
+
+
+NegativeInfinity = NegativeInfinityType()
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/markers.py b/testing/web-platform/tests/tools/third_party/packaging/packaging/markers.py
new file mode 100644
index 0000000000..cb640e8f9b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/markers.py
@@ -0,0 +1,304 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import operator
+import os
+import platform
+import sys
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+
+from pyparsing import ( # noqa: N817
+ Forward,
+ Group,
+ Literal as L,
+ ParseException,
+ ParseResults,
+ QuotedString,
+ ZeroOrMore,
+ stringEnd,
+ stringStart,
+)
+
+from .specifiers import InvalidSpecifier, Specifier
+
+__all__ = [
+ "InvalidMarker",
+ "UndefinedComparison",
+ "UndefinedEnvironmentName",
+ "Marker",
+ "default_environment",
+]
+
+Operator = Callable[[str, str], bool]
+
+
+class InvalidMarker(ValueError):
+ """
+ An invalid marker was found, users should refer to PEP 508.
+ """
+
+
+class UndefinedComparison(ValueError):
+ """
+ An invalid operation was attempted on a value that doesn't support it.
+ """
+
+
+class UndefinedEnvironmentName(ValueError):
+ """
+ A name was attempted to be used that does not exist inside of the
+ environment.
+ """
+
+
+class Node:
+ def __init__(self, value: Any) -> None:
+ self.value = value
+
+ def __str__(self) -> str:
+ return str(self.value)
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}('{self}')>"
+
+ def serialize(self) -> str:
+ raise NotImplementedError
+
+
+class Variable(Node):
+ def serialize(self) -> str:
+ return str(self)
+
+
+class Value(Node):
+ def serialize(self) -> str:
+ return f'"{self}"'
+
+
+class Op(Node):
+ def serialize(self) -> str:
+ return str(self)
+
+
+VARIABLE = (
+ L("implementation_version")
+ | L("platform_python_implementation")
+ | L("implementation_name")
+ | L("python_full_version")
+ | L("platform_release")
+ | L("platform_version")
+ | L("platform_machine")
+ | L("platform_system")
+ | L("python_version")
+ | L("sys_platform")
+ | L("os_name")
+ | L("os.name") # PEP-345
+ | L("sys.platform") # PEP-345
+ | L("platform.version") # PEP-345
+ | L("platform.machine") # PEP-345
+ | L("platform.python_implementation") # PEP-345
+ | L("python_implementation") # undocumented setuptools legacy
+ | L("extra") # PEP-508
+)
+ALIASES = {
+ "os.name": "os_name",
+ "sys.platform": "sys_platform",
+ "platform.version": "platform_version",
+ "platform.machine": "platform_machine",
+ "platform.python_implementation": "platform_python_implementation",
+ "python_implementation": "platform_python_implementation",
+}
+VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
+
+VERSION_CMP = (
+ L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
+)
+
+MARKER_OP = VERSION_CMP | L("not in") | L("in")
+MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
+
+MARKER_VALUE = QuotedString("'") | QuotedString('"')
+MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
+
+BOOLOP = L("and") | L("or")
+
+MARKER_VAR = VARIABLE | MARKER_VALUE
+
+MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
+MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
+
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+
+MARKER_EXPR = Forward()
+MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
+MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
+
+MARKER = stringStart + MARKER_EXPR + stringEnd
+
+
+def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]:
+ if isinstance(results, ParseResults):
+ return [_coerce_parse_result(i) for i in results]
+ else:
+ return results
+
+
+def _format_marker(
+ marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True
+) -> str:
+
+ assert isinstance(marker, (list, tuple, str))
+
+ # Sometimes we have a structure like [[...]] which is a single item list
+ # where the single item is itself it's own list. In that case we want skip
+ # the rest of this function so that we don't get extraneous () on the
+ # outside.
+ if (
+ isinstance(marker, list)
+ and len(marker) == 1
+ and isinstance(marker[0], (list, tuple))
+ ):
+ return _format_marker(marker[0])
+
+ if isinstance(marker, list):
+ inner = (_format_marker(m, first=False) for m in marker)
+ if first:
+ return " ".join(inner)
+ else:
+ return "(" + " ".join(inner) + ")"
+ elif isinstance(marker, tuple):
+ return " ".join([m.serialize() for m in marker])
+ else:
+ return marker
+
+
+_operators: Dict[str, Operator] = {
+ "in": lambda lhs, rhs: lhs in rhs,
+ "not in": lambda lhs, rhs: lhs not in rhs,
+ "<": operator.lt,
+ "<=": operator.le,
+ "==": operator.eq,
+ "!=": operator.ne,
+ ">=": operator.ge,
+ ">": operator.gt,
+}
+
+
+def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
+ try:
+ spec = Specifier("".join([op.serialize(), rhs]))
+ except InvalidSpecifier:
+ pass
+ else:
+ return spec.contains(lhs)
+
+ oper: Optional[Operator] = _operators.get(op.serialize())
+ if oper is None:
+ raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
+
+ return oper(lhs, rhs)
+
+
+class Undefined:
+ pass
+
+
+_undefined = Undefined()
+
+
+def _get_env(environment: Dict[str, str], name: str) -> str:
+ value: Union[str, Undefined] = environment.get(name, _undefined)
+
+ if isinstance(value, Undefined):
+ raise UndefinedEnvironmentName(
+ f"{name!r} does not exist in evaluation environment."
+ )
+
+ return value
+
+
+def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool:
+ groups: List[List[bool]] = [[]]
+
+ for marker in markers:
+ assert isinstance(marker, (list, tuple, str))
+
+ if isinstance(marker, list):
+ groups[-1].append(_evaluate_markers(marker, environment))
+ elif isinstance(marker, tuple):
+ lhs, op, rhs = marker
+
+ if isinstance(lhs, Variable):
+ lhs_value = _get_env(environment, lhs.value)
+ rhs_value = rhs.value
+ else:
+ lhs_value = lhs.value
+ rhs_value = _get_env(environment, rhs.value)
+
+ groups[-1].append(_eval_op(lhs_value, op, rhs_value))
+ else:
+ assert marker in ["and", "or"]
+ if marker == "or":
+ groups.append([])
+
+ return any(all(item) for item in groups)
+
+
+def format_full_version(info: "sys._version_info") -> str:
+ version = "{0.major}.{0.minor}.{0.micro}".format(info)
+ kind = info.releaselevel
+ if kind != "final":
+ version += kind[0] + str(info.serial)
+ return version
+
+
+def default_environment() -> Dict[str, str]:
+ iver = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
+ return {
+ "implementation_name": implementation_name,
+ "implementation_version": iver,
+ "os_name": os.name,
+ "platform_machine": platform.machine(),
+ "platform_release": platform.release(),
+ "platform_system": platform.system(),
+ "platform_version": platform.version(),
+ "python_full_version": platform.python_version(),
+ "platform_python_implementation": platform.python_implementation(),
+ "python_version": ".".join(platform.python_version_tuple()[:2]),
+ "sys_platform": sys.platform,
+ }
+
+
+class Marker:
+ def __init__(self, marker: str) -> None:
+ try:
+ self._markers = _coerce_parse_result(MARKER.parseString(marker))
+ except ParseException as e:
+ raise InvalidMarker(
+ f"Invalid marker: {marker!r}, parse error at "
+ f"{marker[e.loc : e.loc + 8]!r}"
+ )
+
+ def __str__(self) -> str:
+ return _format_marker(self._markers)
+
+ def __repr__(self) -> str:
+ return f"<Marker('{self}')>"
+
+ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
+ """Evaluate a marker.
+
+ Return the boolean from evaluating the given marker against the
+ environment. environment is an optional argument to override all or
+ part of the determined environment.
+
+ The environment is determined from the current Python process.
+ """
+ current_environment = default_environment()
+ if environment is not None:
+ current_environment.update(environment)
+
+ return _evaluate_markers(self._markers, current_environment)
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/py.typed b/testing/web-platform/tests/tools/third_party/packaging/packaging/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/requirements.py b/testing/web-platform/tests/tools/third_party/packaging/packaging/requirements.py
new file mode 100644
index 0000000000..53f9a3aa42
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/requirements.py
@@ -0,0 +1,146 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import re
+import string
+import urllib.parse
+from typing import List, Optional as TOptional, Set
+
+from pyparsing import ( # noqa
+ Combine,
+ Literal as L,
+ Optional,
+ ParseException,
+ Regex,
+ Word,
+ ZeroOrMore,
+ originalTextFor,
+ stringEnd,
+ stringStart,
+)
+
+from .markers import MARKER_EXPR, Marker
+from .specifiers import LegacySpecifier, Specifier, SpecifierSet
+
+
+class InvalidRequirement(ValueError):
+ """
+ An invalid requirement was found, users should refer to PEP 508.
+ """
+
+
+ALPHANUM = Word(string.ascii_letters + string.digits)
+
+LBRACKET = L("[").suppress()
+RBRACKET = L("]").suppress()
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+COMMA = L(",").suppress()
+SEMICOLON = L(";").suppress()
+AT = L("@").suppress()
+
+PUNCTUATION = Word("-_.")
+IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
+IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
+
+NAME = IDENTIFIER("name")
+EXTRA = IDENTIFIER
+
+URI = Regex(r"[^ ]+")("url")
+URL = AT + URI
+
+EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
+EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
+
+VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
+VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
+
+VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
+VERSION_MANY = Combine(
+ VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
+)("_raw_spec")
+_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)
+_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
+
+VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
+VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
+
+MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
+MARKER_EXPR.setParseAction(
+ lambda s, l, t: Marker(s[t._original_start : t._original_end])
+)
+MARKER_SEPARATOR = SEMICOLON
+MARKER = MARKER_SEPARATOR + MARKER_EXPR
+
+VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
+URL_AND_MARKER = URL + Optional(MARKER)
+
+NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
+
+REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
+# pyparsing isn't thread safe during initialization, so we do it eagerly, see
+# issue #104
+REQUIREMENT.parseString("x[]")
+
+
+class Requirement:
+ """Parse a requirement.
+
+ Parse a given requirement string into its parts, such as name, specifier,
+ URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+ string.
+ """
+
+ # TODO: Can we test whether something is contained within a requirement?
+ # If so how do we do that? Do we need to test against the _name_ of
+ # the thing as well as the version? What about the markers?
+ # TODO: Can we normalize the name and extra name?
+
+ def __init__(self, requirement_string: str) -> None:
+ try:
+ req = REQUIREMENT.parseString(requirement_string)
+ except ParseException as e:
+ raise InvalidRequirement(
+ f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}'
+ )
+
+ self.name: str = req.name
+ if req.url:
+ parsed_url = urllib.parse.urlparse(req.url)
+ if parsed_url.scheme == "file":
+ if urllib.parse.urlunparse(parsed_url) != req.url:
+ raise InvalidRequirement("Invalid URL given")
+ elif not (parsed_url.scheme and parsed_url.netloc) or (
+ not parsed_url.scheme and not parsed_url.netloc
+ ):
+ raise InvalidRequirement(f"Invalid URL: {req.url}")
+ self.url: TOptional[str] = req.url
+ else:
+ self.url = None
+ self.extras: Set[str] = set(req.extras.asList() if req.extras else [])
+ self.specifier: SpecifierSet = SpecifierSet(req.specifier)
+ self.marker: TOptional[Marker] = req.marker if req.marker else None
+
+ def __str__(self) -> str:
+ parts: List[str] = [self.name]
+
+ if self.extras:
+ formatted_extras = ",".join(sorted(self.extras))
+ parts.append(f"[{formatted_extras}]")
+
+ if self.specifier:
+ parts.append(str(self.specifier))
+
+ if self.url:
+ parts.append(f"@ {self.url}")
+ if self.marker:
+ parts.append(" ")
+
+ if self.marker:
+ parts.append(f"; {self.marker}")
+
+ return "".join(parts)
+
+ def __repr__(self) -> str:
+ return f"<Requirement('{self}')>"
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/specifiers.py b/testing/web-platform/tests/tools/third_party/packaging/packaging/specifiers.py
new file mode 100644
index 0000000000..0e218a6f9f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/specifiers.py
@@ -0,0 +1,802 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import abc
+import functools
+import itertools
+import re
+import warnings
+from typing import (
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Pattern,
+ Set,
+ Tuple,
+ TypeVar,
+ Union,
+)
+
+from .utils import canonicalize_version
+from .version import LegacyVersion, Version, parse
+
+ParsedVersion = Union[Version, LegacyVersion]
+UnparsedVersion = Union[Version, LegacyVersion, str]
+VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion)
+CallableOperator = Callable[[ParsedVersion, str], bool]
+
+
+class InvalidSpecifier(ValueError):
+ """
+ An invalid specifier was found, users should refer to PEP 440.
+ """
+
+
+class BaseSpecifier(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def __str__(self) -> str:
+ """
+ Returns the str representation of this Specifier like object. This
+ should be representative of the Specifier itself.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self) -> int:
+ """
+ Returns a hash value for this Specifier like object.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Returns a boolean representing whether or not the two Specifier like
+ objects are equal.
+ """
+
+ @abc.abstractproperty
+ def prereleases(self) -> Optional[bool]:
+ """
+ Returns whether or not pre-releases as a whole are allowed by this
+ specifier.
+ """
+
+ @prereleases.setter
+ def prereleases(self, value: bool) -> None:
+ """
+ Sets whether or not pre-releases as a whole are allowed by this
+ specifier.
+ """
+
+ @abc.abstractmethod
+ def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
+ """
+ Determines if the given item is contained within this specifier.
+ """
+
+ @abc.abstractmethod
+ def filter(
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+ ) -> Iterable[VersionTypeVar]:
+ """
+ Takes an iterable of items and filters them so that only items which
+ are contained within this specifier are allowed in it.
+ """
+
+
+class _IndividualSpecifier(BaseSpecifier):
+
+ _operators: Dict[str, str] = {}
+ _regex: Pattern[str]
+
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+ match = self._regex.search(spec)
+ if not match:
+ raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
+
+ self._spec: Tuple[str, str] = (
+ match.group("operator").strip(),
+ match.group("version").strip(),
+ )
+
+ # Store whether or not this Specifier should accept prereleases
+ self._prereleases = prereleases
+
+ def __repr__(self) -> str:
+ pre = (
+ f", prereleases={self.prereleases!r}"
+ if self._prereleases is not None
+ else ""
+ )
+
+ return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
+
+ def __str__(self) -> str:
+ return "{}{}".format(*self._spec)
+
+ @property
+ def _canonical_spec(self) -> Tuple[str, str]:
+ return self._spec[0], canonicalize_version(self._spec[1])
+
+ def __hash__(self) -> int:
+ return hash(self._canonical_spec)
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, str):
+ try:
+ other = self.__class__(str(other))
+ except InvalidSpecifier:
+ return NotImplemented
+ elif not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._canonical_spec == other._canonical_spec
+
+ def _get_operator(self, op: str) -> CallableOperator:
+ operator_callable: CallableOperator = getattr(
+ self, f"_compare_{self._operators[op]}"
+ )
+ return operator_callable
+
+ def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion:
+ if not isinstance(version, (LegacyVersion, Version)):
+ version = parse(version)
+ return version
+
+ @property
+ def operator(self) -> str:
+ return self._spec[0]
+
+ @property
+ def version(self) -> str:
+ return self._spec[1]
+
+ @property
+ def prereleases(self) -> Optional[bool]:
+ return self._prereleases
+
+ @prereleases.setter
+ def prereleases(self, value: bool) -> None:
+ self._prereleases = value
+
+ def __contains__(self, item: str) -> bool:
+ return self.contains(item)
+
+ def contains(
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
+ ) -> bool:
+
+ # Determine if prereleases are to be allowed or not.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # Normalize item to a Version or LegacyVersion, this allows us to have
+ # a shortcut for ``"2.0" in Specifier(">=2")
+ normalized_item = self._coerce_version(item)
+
+ # Determine if we should be supporting prereleases in this specifier
+ # or not, if we do not support prereleases than we can short circuit
+ # logic if this version is a prereleases.
+ if normalized_item.is_prerelease and not prereleases:
+ return False
+
+ # Actually do the comparison to determine if this item is contained
+ # within this Specifier or not.
+ operator_callable: CallableOperator = self._get_operator(self.operator)
+ return operator_callable(normalized_item, self.version)
+
+ def filter(
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+ ) -> Iterable[VersionTypeVar]:
+
+ yielded = False
+ found_prereleases = []
+
+ kw = {"prereleases": prereleases if prereleases is not None else True}
+
+ # Attempt to iterate over all the values in the iterable and if any of
+ # them match, yield them.
+ for version in iterable:
+ parsed_version = self._coerce_version(version)
+
+ if self.contains(parsed_version, **kw):
+ # If our version is a prerelease, and we were not set to allow
+ # prereleases, then we'll store it for later in case nothing
+ # else matches this specifier.
+ if parsed_version.is_prerelease and not (
+ prereleases or self.prereleases
+ ):
+ found_prereleases.append(version)
+ # Either this is not a prerelease, or we should have been
+ # accepting prereleases from the beginning.
+ else:
+ yielded = True
+ yield version
+
+ # Now that we've iterated over everything, determine if we've yielded
+ # any values, and if we have not and we have any prereleases stored up
+ # then we will go ahead and yield the prereleases.
+ if not yielded and found_prereleases:
+ for version in found_prereleases:
+ yield version
+
+
+class LegacySpecifier(_IndividualSpecifier):
+
+ _regex_str = r"""
+ (?P<operator>(==|!=|<=|>=|<|>))
+ \s*
+ (?P<version>
+ [^,;\s)]* # Since this is a "legacy" specifier, and the version
+ # string can be just about anything, we match everything
+ # except for whitespace, a semi-colon for marker support,
+ # a closing paren since versions can be enclosed in
+ # them, and a comma since it's a version separator.
+ )
+ """
+
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ _operators = {
+ "==": "equal",
+ "!=": "not_equal",
+ "<=": "less_than_equal",
+ ">=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ }
+
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+ super().__init__(spec, prereleases)
+
+ warnings.warn(
+ "Creating a LegacyVersion has been deprecated and will be "
+ "removed in the next major release",
+ DeprecationWarning,
+ )
+
+ def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion:
+ if not isinstance(version, LegacyVersion):
+ version = LegacyVersion(str(version))
+ return version
+
+ def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool:
+ return prospective == self._coerce_version(spec)
+
+ def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool:
+ return prospective != self._coerce_version(spec)
+
+ def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool:
+ return prospective <= self._coerce_version(spec)
+
+ def _compare_greater_than_equal(
+ self, prospective: LegacyVersion, spec: str
+ ) -> bool:
+ return prospective >= self._coerce_version(spec)
+
+ def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool:
+ return prospective < self._coerce_version(spec)
+
+ def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool:
+ return prospective > self._coerce_version(spec)
+
+
+def _require_version_compare(
+ fn: Callable[["Specifier", ParsedVersion, str], bool]
+) -> Callable[["Specifier", ParsedVersion, str], bool]:
+ @functools.wraps(fn)
+ def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool:
+ if not isinstance(prospective, Version):
+ return False
+ return fn(self, prospective, spec)
+
+ return wrapped
+
+
+class Specifier(_IndividualSpecifier):
+
+ _regex_str = r"""
+ (?P<operator>(~=|==|!=|<=|>=|<|>|===))
+ (?P<version>
+ (?:
+ # The identity operators allow for an escape hatch that will
+ # do an exact string match of the version you wish to install.
+ # This will not be parsed by PEP 440 and we cannot determine
+ # any semantic meaning from it. This operator is discouraged
+ # but included entirely as an escape hatch.
+ (?<====) # Only match for the identity operator
+ \s*
+ [^\s]* # We just match everything, except for whitespace
+ # since we are only testing for strict identity.
+ )
+ |
+ (?:
+ # The (non)equality operators allow for wild card and local
+ # versions to be specified so we have to define these two
+ # operators separately to enable that.
+ (?<===|!=) # Only match for equals and not equals
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+
+ # You cannot use a wild card and a dev or local version
+ # together so group them with a | and make them optional.
+ (?:
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+ |
+ \.\* # Wild card syntax of .*
+ )?
+ )
+ |
+ (?:
+ # The compatible operator requires at least two digits in the
+ # release segment.
+ (?<=~=) # Only match for the compatible operator
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ |
+ (?:
+ # All other operators only allow a sub set of what the
+ # (non)equality operators do. Specifically they do not allow
+ # local versions to be specified nor do they allow the prefix
+ # matching wild cards.
+ (?<!==|!=|~=) # We have special cases for these
+ # operators so we want to make sure they
+ # don't match here.
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ )
+ """
+
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ _operators = {
+ "~=": "compatible",
+ "==": "equal",
+ "!=": "not_equal",
+ "<=": "less_than_equal",
+ ">=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ "===": "arbitrary",
+ }
+
+ @_require_version_compare
+ def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool:
+
+ # Compatible releases have an equivalent combination of >= and ==. That
+ # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+ # implement this in terms of the other specifiers instead of
+ # implementing it ourselves. The only thing we need to do is construct
+ # the other specifiers.
+
+ # We want everything but the last item in the version, but we want to
+ # ignore suffix segments.
+ prefix = ".".join(
+ list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
+ )
+
+ # Add the prefix notation to the end of our string
+ prefix += ".*"
+
+ return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+ prospective, prefix
+ )
+
+ @_require_version_compare
+ def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool:
+
+ # We need special logic to handle prefix matching
+ if spec.endswith(".*"):
+ # In the case of prefix matching we want to ignore local segment.
+ prospective = Version(prospective.public)
+ # Split the spec out by dots, and pretend that there is an implicit
+ # dot in between a release segment and a pre-release segment.
+ split_spec = _version_split(spec[:-2]) # Remove the trailing .*
+
+ # Split the prospective version out by dots, and pretend that there
+ # is an implicit dot in between a release segment and a pre-release
+ # segment.
+ split_prospective = _version_split(str(prospective))
+
+ # Shorten the prospective version to be the same length as the spec
+ # so that we can determine if the specifier is a prefix of the
+ # prospective version or not.
+ shortened_prospective = split_prospective[: len(split_spec)]
+
+ # Pad out our two sides with zeros so that they both equal the same
+ # length.
+ padded_spec, padded_prospective = _pad_version(
+ split_spec, shortened_prospective
+ )
+
+ return padded_prospective == padded_spec
+ else:
+ # Convert our spec string into a Version
+ spec_version = Version(spec)
+
+ # If the specifier does not have a local segment, then we want to
+ # act as if the prospective version also does not have a local
+ # segment.
+ if not spec_version.local:
+ prospective = Version(prospective.public)
+
+ return prospective == spec_version
+
+ @_require_version_compare
+ def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool:
+ return not self._compare_equal(prospective, spec)
+
+ @_require_version_compare
+ def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool:
+
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return Version(prospective.public) <= Version(spec)
+
+ @_require_version_compare
+ def _compare_greater_than_equal(
+ self, prospective: ParsedVersion, spec: str
+ ) -> bool:
+
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return Version(prospective.public) >= Version(spec)
+
+ @_require_version_compare
+ def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
+
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec_str)
+
+ # Check to see if the prospective version is less than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective < spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a pre-release version, that we do not accept pre-release
+ # versions for the version mentioned in the specifier (e.g. <3.1 should
+ # not match 3.1.dev0, but should match 3.0.dev0).
+ if not spec.is_prerelease and prospective.is_prerelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # less than the spec version *and* it's not a pre-release of the same
+ # version in the spec.
+ return True
+
+ @_require_version_compare
+ def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
+
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec_str)
+
+ # Check to see if the prospective version is greater than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective > spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a post-release version, that we do not accept
+ # post-release versions for the version mentioned in the specifier
+ # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+ if not spec.is_postrelease and prospective.is_postrelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # Ensure that we do not allow a local version of the version mentioned
+ # in the specifier, which is technically greater than, to match.
+ if prospective.local is not None:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # greater than the spec version *and* it's not a pre-release of the
+ # same version in the spec.
+ return True
+
+ def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
+ return str(prospective).lower() == str(spec).lower()
+
+ @property
+ def prereleases(self) -> bool:
+
+ # If there is an explicit prereleases set for this, then we'll just
+ # blindly use that.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # Look at all of our specifiers and determine if they are inclusive
+ # operators, and if they are if they are including an explicit
+ # prerelease.
+ operator, version = self._spec
+ if operator in ["==", ">=", "<=", "~=", "==="]:
+ # The == specifier can include a trailing .*, if it does we
+ # want to remove before parsing.
+ if operator == "==" and version.endswith(".*"):
+ version = version[:-2]
+
+ # Parse the version, and if it is a pre-release than this
+ # specifier allows pre-releases.
+ if parse(version).is_prerelease:
+ return True
+
+ return False
+
+ @prereleases.setter
+ def prereleases(self, value: bool) -> None:
+ self._prereleases = value
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version: str) -> List[str]:
+ result: List[str] = []
+ for item in version.split("."):
+ match = _prefix_regex.search(item)
+ if match:
+ result.extend(match.groups())
+ else:
+ result.append(item)
+ return result
+
+
+def _is_not_suffix(segment: str) -> bool:
+ return not any(
+ segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
+ )
+
+
+def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
+ left_split, right_split = [], []
+
+ # Get the release segment of our versions
+ left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+ right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+ # Get the rest of our versions
+ left_split.append(left[len(left_split[0]) :])
+ right_split.append(right[len(right_split[0]) :])
+
+ # Insert our padding
+ left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+ right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
+
+ return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
+
+
+class SpecifierSet(BaseSpecifier):
+ def __init__(
+ self, specifiers: str = "", prereleases: Optional[bool] = None
+ ) -> None:
+
+ # Split on , to break each individual specifier into it's own item, and
+ # strip each item to remove leading/trailing whitespace.
+ split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+ # Parsed each individual specifier, attempting first to make it a
+ # Specifier and falling back to a LegacySpecifier.
+ parsed: Set[_IndividualSpecifier] = set()
+ for specifier in split_specifiers:
+ try:
+ parsed.add(Specifier(specifier))
+ except InvalidSpecifier:
+ parsed.add(LegacySpecifier(specifier))
+
+ # Turn our parsed specifiers into a frozen set and save them for later.
+ self._specs = frozenset(parsed)
+
+ # Store our prereleases value so we can use it later to determine if
+ # we accept prereleases or not.
+ self._prereleases = prereleases
+
+ def __repr__(self) -> str:
+ pre = (
+ f", prereleases={self.prereleases!r}"
+ if self._prereleases is not None
+ else ""
+ )
+
+ return f"<SpecifierSet({str(self)!r}{pre})>"
+
+ def __str__(self) -> str:
+ return ",".join(sorted(str(s) for s in self._specs))
+
+ def __hash__(self) -> int:
+ return hash(self._specs)
+
+ def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
+ if isinstance(other, str):
+ other = SpecifierSet(other)
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ specifier = SpecifierSet()
+ specifier._specs = frozenset(self._specs | other._specs)
+
+ if self._prereleases is None and other._prereleases is not None:
+ specifier._prereleases = other._prereleases
+ elif self._prereleases is not None and other._prereleases is None:
+ specifier._prereleases = self._prereleases
+ elif self._prereleases == other._prereleases:
+ specifier._prereleases = self._prereleases
+ else:
+ raise ValueError(
+ "Cannot combine SpecifierSets with True and False prerelease "
+ "overrides."
+ )
+
+ return specifier
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, (str, _IndividualSpecifier)):
+ other = SpecifierSet(str(other))
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ return self._specs == other._specs
+
+ def __len__(self) -> int:
+ return len(self._specs)
+
+ def __iter__(self) -> Iterator[_IndividualSpecifier]:
+ return iter(self._specs)
+
+ @property
+ def prereleases(self) -> Optional[bool]:
+
+ # If we have been given an explicit prerelease modifier, then we'll
+ # pass that through here.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # If we don't have any specifiers, and we don't have a forced value,
+ # then we'll just return None since we don't know if this should have
+ # pre-releases or not.
+ if not self._specs:
+ return None
+
+ # Otherwise we'll see if any of the given specifiers accept
+ # prereleases, if any of them do we'll return True, otherwise False.
+ return any(s.prereleases for s in self._specs)
+
+ @prereleases.setter
+ def prereleases(self, value: bool) -> None:
+ self._prereleases = value
+
+ def __contains__(self, item: UnparsedVersion) -> bool:
+ return self.contains(item)
+
+ def contains(
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
+ ) -> bool:
+
+ # Ensure that our item is a Version or LegacyVersion instance.
+ if not isinstance(item, (LegacyVersion, Version)):
+ item = parse(item)
+
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # We can determine if we're going to allow pre-releases by looking to
+ # see if any of the underlying items supports them. If none of them do
+ # and this item is a pre-release then we do not allow it and we can
+ # short circuit that here.
+ # Note: This means that 1.0.dev1 would not be contained in something
+ # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+ if not prereleases and item.is_prerelease:
+ return False
+
+ # We simply dispatch to the underlying specs here to make sure that the
+ # given version is contained within all of them.
+ # Note: This use of all() here means that an empty set of specifiers
+ # will always return True, this is an explicit design decision.
+ return all(s.contains(item, prereleases=prereleases) for s in self._specs)
+
+ def filter(
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+ ) -> Iterable[VersionTypeVar]:
+
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # If we have any specifiers, then we want to wrap our iterable in the
+ # filter method for each one, this will act as a logical AND amongst
+ # each specifier.
+ if self._specs:
+ for spec in self._specs:
+ iterable = spec.filter(iterable, prereleases=bool(prereleases))
+ return iterable
+ # If we do not have any specifiers, then we need to have a rough filter
+ # which will filter out any pre-releases, unless there are no final
+ # releases, and which will filter out LegacyVersion in general.
+ else:
+ filtered: List[VersionTypeVar] = []
+ found_prereleases: List[VersionTypeVar] = []
+
+ item: UnparsedVersion
+ parsed_version: Union[Version, LegacyVersion]
+
+ for item in iterable:
+ # Ensure that we some kind of Version class for this item.
+ if not isinstance(item, (LegacyVersion, Version)):
+ parsed_version = parse(item)
+ else:
+ parsed_version = item
+
+ # Filter out any item which is parsed as a LegacyVersion
+ if isinstance(parsed_version, LegacyVersion):
+ continue
+
+ # Store any item which is a pre-release for later unless we've
+ # already found a final version or we are accepting prereleases
+ if parsed_version.is_prerelease and not prereleases:
+ if not filtered:
+ found_prereleases.append(item)
+ else:
+ filtered.append(item)
+
+ # If we've found no items except for pre-releases, then we'll go
+ # ahead and use the pre-releases
+ if not filtered and found_prereleases and prereleases is None:
+ return found_prereleases
+
+ return filtered
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/tags.py b/testing/web-platform/tests/tools/third_party/packaging/packaging/tags.py
new file mode 100644
index 0000000000..9a3d25a71c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/tags.py
@@ -0,0 +1,487 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import logging
+import platform
+import sys
+import sysconfig
+from importlib.machinery import EXTENSION_SUFFIXES
+from typing import (
+ Dict,
+ FrozenSet,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Sequence,
+ Tuple,
+ Union,
+ cast,
+)
+
+from . import _manylinux, _musllinux
+
+logger = logging.getLogger(__name__)
+
+PythonVersion = Sequence[int]
+MacVersion = Tuple[int, int]
+
+INTERPRETER_SHORT_NAMES: Dict[str, str] = {
+ "python": "py", # Generic.
+ "cpython": "cp",
+ "pypy": "pp",
+ "ironpython": "ip",
+ "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
+
+
+class Tag:
+ """
+ A representation of the tag triple for a wheel.
+
+ Instances are considered immutable and thus are hashable. Equality checking
+ is also supported.
+ """
+
+ __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
+
+ def __init__(self, interpreter: str, abi: str, platform: str) -> None:
+ self._interpreter = interpreter.lower()
+ self._abi = abi.lower()
+ self._platform = platform.lower()
+ # The __hash__ of every single element in a Set[Tag] will be evaluated each time
+ # that a set calls its `.disjoint()` method, which may be called hundreds of
+ # times when scanning a page of links for packages with tags matching that
+ # Set[Tag]. Pre-computing the value here produces significant speedups for
+ # downstream consumers.
+ self._hash = hash((self._interpreter, self._abi, self._platform))
+
+ @property
+ def interpreter(self) -> str:
+ return self._interpreter
+
+ @property
+ def abi(self) -> str:
+ return self._abi
+
+ @property
+ def platform(self) -> str:
+ return self._platform
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Tag):
+ return NotImplemented
+
+ return (
+ (self._hash == other._hash) # Short-circuit ASAP for perf reasons.
+ and (self._platform == other._platform)
+ and (self._abi == other._abi)
+ and (self._interpreter == other._interpreter)
+ )
+
+ def __hash__(self) -> int:
+ return self._hash
+
+ def __str__(self) -> str:
+ return f"{self._interpreter}-{self._abi}-{self._platform}"
+
+ def __repr__(self) -> str:
+ return f"<{self} @ {id(self)}>"
+
+
+def parse_tag(tag: str) -> FrozenSet[Tag]:
+ """
+ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+ Returning a set is required due to the possibility that the tag is a
+ compressed tag set.
+ """
+ tags = set()
+ interpreters, abis, platforms = tag.split("-")
+ for interpreter in interpreters.split("."):
+ for abi in abis.split("."):
+ for platform_ in platforms.split("."):
+ tags.add(Tag(interpreter, abi, platform_))
+ return frozenset(tags)
+
+
+def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
+ value = sysconfig.get_config_var(name)
+ if value is None and warn:
+ logger.debug(
+ "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+ )
+ return value
+
+
+def _normalize_string(string: str) -> str:
+ return string.replace(".", "_").replace("-", "_")
+
+
+def _abi3_applies(python_version: PythonVersion) -> bool:
+ """
+ Determine if the Python version supports abi3.
+
+ PEP 384 was first implemented in Python 3.2.
+ """
+ return len(python_version) > 1 and tuple(python_version) >= (3, 2)
+
+
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
+ py_version = tuple(py_version) # To allow for version comparison.
+ abis = []
+ version = _version_nodot(py_version[:2])
+ debug = pymalloc = ucs4 = ""
+ with_debug = _get_config_var("Py_DEBUG", warn)
+ has_refcount = hasattr(sys, "gettotalrefcount")
+ # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+ # extension modules is the best option.
+ # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+ has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+ if with_debug or (with_debug is None and (has_refcount or has_ext)):
+ debug = "d"
+ if py_version < (3, 8):
+ with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
+ if with_pymalloc or with_pymalloc is None:
+ pymalloc = "m"
+ if py_version < (3, 3):
+ unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
+ if unicode_size == 4 or (
+ unicode_size is None and sys.maxunicode == 0x10FFFF
+ ):
+ ucs4 = "u"
+ elif debug:
+ # Debug builds can also load "normal" extension modules.
+ # We can also assume no UCS-4 or pymalloc requirement.
+ abis.append(f"cp{version}")
+ abis.insert(
+ 0,
+ "cp{version}{debug}{pymalloc}{ucs4}".format(
+ version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
+ ),
+ )
+ return abis
+
+
+def cpython_tags(
+ python_version: Optional[PythonVersion] = None,
+ abis: Optional[Iterable[str]] = None,
+ platforms: Optional[Iterable[str]] = None,
+ *,
+ warn: bool = False,
+) -> Iterator[Tag]:
+ """
+ Yields the tags for a CPython interpreter.
+
+ The tags consist of:
+ - cp<python_version>-<abi>-<platform>
+ - cp<python_version>-abi3-<platform>
+ - cp<python_version>-none-<platform>
+ - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
+
+ If python_version only specifies a major version then user-provided ABIs and
+ the 'none' ABItag will be used.
+
+ If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+ their normal position and not at the beginning.
+ """
+ if not python_version:
+ python_version = sys.version_info[:2]
+
+ interpreter = f"cp{_version_nodot(python_version[:2])}"
+
+ if abis is None:
+ if len(python_version) > 1:
+ abis = _cpython_abis(python_version, warn)
+ else:
+ abis = []
+ abis = list(abis)
+ # 'abi3' and 'none' are explicitly handled later.
+ for explicit_abi in ("abi3", "none"):
+ try:
+ abis.remove(explicit_abi)
+ except ValueError:
+ pass
+
+ platforms = list(platforms or platform_tags())
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
+ if _abi3_applies(python_version):
+ yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
+ yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
+
+ if _abi3_applies(python_version):
+ for minor_version in range(python_version[1] - 1, 1, -1):
+ for platform_ in platforms:
+ interpreter = "cp{version}".format(
+ version=_version_nodot((python_version[0], minor_version))
+ )
+ yield Tag(interpreter, "abi3", platform_)
+
+
+def _generic_abi() -> Iterator[str]:
+ abi = sysconfig.get_config_var("SOABI")
+ if abi:
+ yield _normalize_string(abi)
+
+
+def generic_tags(
+ interpreter: Optional[str] = None,
+ abis: Optional[Iterable[str]] = None,
+ platforms: Optional[Iterable[str]] = None,
+ *,
+ warn: bool = False,
+) -> Iterator[Tag]:
+ """
+ Yields the tags for a generic interpreter.
+
+ The tags consist of:
+ - <interpreter>-<abi>-<platform>
+
+ The "none" ABI will be added if it was not explicitly provided.
+ """
+ if not interpreter:
+ interp_name = interpreter_name()
+ interp_version = interpreter_version(warn=warn)
+ interpreter = "".join([interp_name, interp_version])
+ if abis is None:
+ abis = _generic_abi()
+ platforms = list(platforms or platform_tags())
+ abis = list(abis)
+ if "none" not in abis:
+ abis.append("none")
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
+
+
+def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
+ """
+ Yields Python versions in descending order.
+
+ After the latest version, the major-only version will be yielded, and then
+ all previous versions of that major version.
+ """
+ if len(py_version) > 1:
+ yield f"py{_version_nodot(py_version[:2])}"
+ yield f"py{py_version[0]}"
+ if len(py_version) > 1:
+ for minor in range(py_version[1] - 1, -1, -1):
+ yield f"py{_version_nodot((py_version[0], minor))}"
+
+
+def compatible_tags(
+ python_version: Optional[PythonVersion] = None,
+ interpreter: Optional[str] = None,
+ platforms: Optional[Iterable[str]] = None,
+) -> Iterator[Tag]:
+ """
+ Yields the sequence of tags that are compatible with a specific version of Python.
+
+ The tags consist of:
+ - py*-none-<platform>
+ - <interpreter>-none-any # ... if `interpreter` is provided.
+ - py*-none-any
+ """
+ if not python_version:
+ python_version = sys.version_info[:2]
+ platforms = list(platforms or platform_tags())
+ for version in _py_interpreter_range(python_version):
+ for platform_ in platforms:
+ yield Tag(version, "none", platform_)
+ if interpreter:
+ yield Tag(interpreter, "none", "any")
+ for version in _py_interpreter_range(python_version):
+ yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
+ if not is_32bit:
+ return arch
+
+ if arch.startswith("ppc"):
+ return "ppc"
+
+ return "i386"
+
+
+def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
+ formats = [cpu_arch]
+ if cpu_arch == "x86_64":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat64", "fat32"])
+
+ elif cpu_arch == "i386":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat32", "fat"])
+
+ elif cpu_arch == "ppc64":
+ # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+ if version > (10, 5) or version < (10, 4):
+ return []
+ formats.append("fat64")
+
+ elif cpu_arch == "ppc":
+ if version > (10, 6):
+ return []
+ formats.extend(["fat32", "fat"])
+
+ if cpu_arch in {"arm64", "x86_64"}:
+ formats.append("universal2")
+
+ if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
+ formats.append("universal")
+
+ return formats
+
+
+def mac_platforms(
+ version: Optional[MacVersion] = None, arch: Optional[str] = None
+) -> Iterator[str]:
+ """
+ Yields the platform tags for a macOS system.
+
+ The `version` parameter is a two-item tuple specifying the macOS version to
+ generate platform tags for. The `arch` parameter is the CPU architecture to
+ generate platform tags for. Both parameters default to the appropriate value
+ for the current system.
+ """
+ version_str, _, cpu_arch = platform.mac_ver()
+ if version is None:
+ version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+ else:
+ version = version
+ if arch is None:
+ arch = _mac_arch(cpu_arch)
+ else:
+ arch = arch
+
+ if (10, 0) <= version and version < (11, 0):
+ # Prior to Mac OS 11, each yearly release of Mac OS bumped the
+ # "minor" version number. The major version was always 10.
+ for minor_version in range(version[1], -1, -1):
+ compat_version = 10, minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=10, minor=minor_version, binary_format=binary_format
+ )
+
+ if version >= (11, 0):
+ # Starting with Mac OS 11, each yearly release bumps the major version
+ # number. The minor versions are now the midyear updates.
+ for major_version in range(version[0], 10, -1):
+ compat_version = major_version, 0
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=major_version, minor=0, binary_format=binary_format
+ )
+
+ if version >= (11, 0):
+ # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
+ # Arm64 support was introduced in 11.0, so no Arm binaries from previous
+ # releases exist.
+ #
+ # However, the "universal2" binary format can have a
+ # macOS version earlier than 11.0 when the x86_64 part of the binary supports
+ # that version of macOS.
+ if arch == "x86_64":
+ for minor_version in range(16, 3, -1):
+ compat_version = 10, minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=compat_version[0],
+ minor=compat_version[1],
+ binary_format=binary_format,
+ )
+ else:
+ for minor_version in range(16, 3, -1):
+ compat_version = 10, minor_version
+ binary_format = "universal2"
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=compat_version[0],
+ minor=compat_version[1],
+ binary_format=binary_format,
+ )
+
+
+def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
+ linux = _normalize_string(sysconfig.get_platform())
+ if is_32bit:
+ if linux == "linux_x86_64":
+ linux = "linux_i686"
+ elif linux == "linux_aarch64":
+ linux = "linux_armv7l"
+ _, arch = linux.split("_", 1)
+ yield from _manylinux.platform_tags(linux, arch)
+ yield from _musllinux.platform_tags(arch)
+ yield linux
+
+
+def _generic_platforms() -> Iterator[str]:
+ yield _normalize_string(sysconfig.get_platform())
+
+
+def platform_tags() -> Iterator[str]:
+ """
+ Provides the platform tags for this installation.
+ """
+ if platform.system() == "Darwin":
+ return mac_platforms()
+ elif platform.system() == "Linux":
+ return _linux_platforms()
+ else:
+ return _generic_platforms()
+
+
+def interpreter_name() -> str:
+ """
+ Returns the name of the running interpreter.
+ """
+ name = sys.implementation.name
+ return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def interpreter_version(*, warn: bool = False) -> str:
+ """
+ Returns the version of the running interpreter.
+ """
+ version = _get_config_var("py_version_nodot", warn=warn)
+ if version:
+ version = str(version)
+ else:
+ version = _version_nodot(sys.version_info[:2])
+ return version
+
+
+def _version_nodot(version: PythonVersion) -> str:
+ return "".join(map(str, version))
+
+
+def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
+ """
+ Returns the sequence of tag triples for the running interpreter.
+
+ The order of the sequence corresponds to priority order for the
+ interpreter, from most to least important.
+ """
+
+ interp_name = interpreter_name()
+ if interp_name == "cp":
+ yield from cpython_tags(warn=warn)
+ else:
+ yield from generic_tags()
+
+ if interp_name == "pp":
+ yield from compatible_tags(interpreter="pp3")
+ else:
+ yield from compatible_tags()
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/utils.py b/testing/web-platform/tests/tools/third_party/packaging/packaging/utils.py
new file mode 100644
index 0000000000..bab11b80c6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/utils.py
@@ -0,0 +1,136 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import re
+from typing import FrozenSet, NewType, Tuple, Union, cast
+
+from .tags import Tag, parse_tag
+from .version import InvalidVersion, Version
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+NormalizedName = NewType("NormalizedName", str)
+
+
+class InvalidWheelFilename(ValueError):
+ """
+ An invalid wheel filename was found, users should refer to PEP 427.
+ """
+
+
+class InvalidSdistFilename(ValueError):
+ """
+ An invalid sdist filename was found, users should refer to the packaging user guide.
+ """
+
+
+_canonicalize_regex = re.compile(r"[-_.]+")
+# PEP 427: The build number must start with a digit.
+_build_tag_regex = re.compile(r"(\d+)(.*)")
+
+
+def canonicalize_name(name: str) -> NormalizedName:
+ # This is taken from PEP 503.
+ value = _canonicalize_regex.sub("-", name).lower()
+ return cast(NormalizedName, value)
+
+
+def canonicalize_version(version: Union[Version, str]) -> str:
+ """
+ This is very similar to Version.__str__, but has one subtle difference
+ with the way it handles the release segment.
+ """
+ if isinstance(version, str):
+ try:
+ parsed = Version(version)
+ except InvalidVersion:
+ # Legacy versions cannot be normalized
+ return version
+ else:
+ parsed = version
+
+ parts = []
+
+ # Epoch
+ if parsed.epoch != 0:
+ parts.append(f"{parsed.epoch}!")
+
+ # Release segment
+ # NB: This strips trailing '.0's to normalize
+ parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release)))
+
+ # Pre-release
+ if parsed.pre is not None:
+ parts.append("".join(str(x) for x in parsed.pre))
+
+ # Post-release
+ if parsed.post is not None:
+ parts.append(f".post{parsed.post}")
+
+ # Development release
+ if parsed.dev is not None:
+ parts.append(f".dev{parsed.dev}")
+
+ # Local version segment
+ if parsed.local is not None:
+ parts.append(f"+{parsed.local}")
+
+ return "".join(parts)
+
+
+def parse_wheel_filename(
+ filename: str,
+) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
+ if not filename.endswith(".whl"):
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (extension must be '.whl'): {filename}"
+ )
+
+ filename = filename[:-4]
+ dashes = filename.count("-")
+ if dashes not in (4, 5):
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (wrong number of parts): {filename}"
+ )
+
+ parts = filename.split("-", dashes - 2)
+ name_part = parts[0]
+ # See PEP 427 for the rules on escaping the project name
+ if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
+ raise InvalidWheelFilename(f"Invalid project name: {filename}")
+ name = canonicalize_name(name_part)
+ version = Version(parts[1])
+ if dashes == 5:
+ build_part = parts[2]
+ build_match = _build_tag_regex.match(build_part)
+ if build_match is None:
+ raise InvalidWheelFilename(
+ f"Invalid build number: {build_part} in '{filename}'"
+ )
+ build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
+ else:
+ build = ()
+ tags = parse_tag(parts[-1])
+ return (name, version, build, tags)
+
+
+def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
+ if filename.endswith(".tar.gz"):
+ file_stem = filename[: -len(".tar.gz")]
+ elif filename.endswith(".zip"):
+ file_stem = filename[: -len(".zip")]
+ else:
+ raise InvalidSdistFilename(
+ f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
+ f" {filename}"
+ )
+
+ # We are requiring a PEP 440 version, which cannot contain dashes,
+ # so we split on the last dash.
+ name_part, sep, version_part = file_stem.rpartition("-")
+ if not sep:
+ raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
+
+ name = canonicalize_name(name_part)
+ version = Version(version_part)
+ return (name, version)
diff --git a/testing/web-platform/tests/tools/third_party/packaging/packaging/version.py b/testing/web-platform/tests/tools/third_party/packaging/packaging/version.py
new file mode 100644
index 0000000000..de9a09a4ed
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/packaging/version.py
@@ -0,0 +1,504 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import collections
+import itertools
+import re
+import warnings
+from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
+
+from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
+
+__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
+
+InfiniteTypes = Union[InfinityType, NegativeInfinityType]
+PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
+SubLocalType = Union[InfiniteTypes, int, str]
+LocalType = Union[
+ NegativeInfinityType,
+ Tuple[
+ Union[
+ SubLocalType,
+ Tuple[SubLocalType, str],
+ Tuple[NegativeInfinityType, SubLocalType],
+ ],
+ ...,
+ ],
+]
+CmpKey = Tuple[
+ int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
+]
+LegacyCmpKey = Tuple[int, Tuple[str, ...]]
+VersionComparisonMethod = Callable[
+ [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
+]
+
+_Version = collections.namedtuple(
+ "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
+)
+
+
+def parse(version: str) -> Union["LegacyVersion", "Version"]:
+ """
+ Parse the given version string and return either a :class:`Version` object
+ or a :class:`LegacyVersion` object depending on if the given version is
+ a valid PEP 440 version or a legacy version.
+ """
+ try:
+ return Version(version)
+ except InvalidVersion:
+ return LegacyVersion(version)
+
+
+class InvalidVersion(ValueError):
+ """
+ An invalid version was found, users should refer to PEP 440.
+ """
+
+
+class _BaseVersion:
+ _key: Union[CmpKey, LegacyCmpKey]
+
+ def __hash__(self) -> int:
+ return hash(self._key)
+
+ # Please keep the duplicated `isinstance` check
+ # in the six comparisons hereunder
+ # unless you find a way to avoid adding overhead function calls.
+ def __lt__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key < other._key
+
+ def __le__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key <= other._key
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key == other._key
+
+ def __ge__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key >= other._key
+
+ def __gt__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key > other._key
+
+ def __ne__(self, other: object) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key != other._key
+
+
+class LegacyVersion(_BaseVersion):
+ def __init__(self, version: str) -> None:
+ self._version = str(version)
+ self._key = _legacy_cmpkey(self._version)
+
+ warnings.warn(
+ "Creating a LegacyVersion has been deprecated and will be "
+ "removed in the next major release",
+ DeprecationWarning,
+ )
+
+ def __str__(self) -> str:
+ return self._version
+
+ def __repr__(self) -> str:
+ return f"<LegacyVersion('{self}')>"
+
+ @property
+ def public(self) -> str:
+ return self._version
+
+ @property
+ def base_version(self) -> str:
+ return self._version
+
+ @property
+ def epoch(self) -> int:
+ return -1
+
+ @property
+ def release(self) -> None:
+ return None
+
+ @property
+ def pre(self) -> None:
+ return None
+
+ @property
+ def post(self) -> None:
+ return None
+
+ @property
+ def dev(self) -> None:
+ return None
+
+ @property
+ def local(self) -> None:
+ return None
+
+ @property
+ def is_prerelease(self) -> bool:
+ return False
+
+ @property
+ def is_postrelease(self) -> bool:
+ return False
+
+ @property
+ def is_devrelease(self) -> bool:
+ return False
+
+
+_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
+
+_legacy_version_replacement_map = {
+ "pre": "c",
+ "preview": "c",
+ "-": "final-",
+ "rc": "c",
+ "dev": "@",
+}
+
+
+def _parse_version_parts(s: str) -> Iterator[str]:
+ for part in _legacy_version_component_re.split(s):
+ part = _legacy_version_replacement_map.get(part, part)
+
+ if not part or part == ".":
+ continue
+
+ if part[:1] in "0123456789":
+ # pad for numeric comparison
+ yield part.zfill(8)
+ else:
+ yield "*" + part
+
+ # ensure that alpha/beta/candidate are before final
+ yield "*final"
+
+
+def _legacy_cmpkey(version: str) -> LegacyCmpKey:
+
+ # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
+ # greater than or equal to 0. This will effectively put the LegacyVersion,
+ # which uses the defacto standard originally implemented by setuptools,
+ # as before all PEP 440 versions.
+ epoch = -1
+
+ # This scheme is taken from pkg_resources.parse_version setuptools prior to
+ # it's adoption of the packaging library.
+ parts: List[str] = []
+ for part in _parse_version_parts(version.lower()):
+ if part.startswith("*"):
+ # remove "-" before a prerelease tag
+ if part < "*final":
+ while parts and parts[-1] == "*final-":
+ parts.pop()
+
+ # remove trailing zeros from each series of numeric parts
+ while parts and parts[-1] == "00000000":
+ parts.pop()
+
+ parts.append(part)
+
+ return epoch, tuple(parts)
+
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+VERSION_PATTERN = r"""
+ v?
+ (?:
+ (?:(?P<epoch>[0-9]+)!)? # epoch
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
+ (?P<pre> # pre-release
+ [-_\.]?
+ (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+ [-_\.]?
+ (?P<pre_n>[0-9]+)?
+ )?
+ (?P<post> # post release
+ (?:-(?P<post_n1>[0-9]+))
+ |
+ (?:
+ [-_\.]?
+ (?P<post_l>post|rev|r)
+ [-_\.]?
+ (?P<post_n2>[0-9]+)?
+ )
+ )?
+ (?P<dev> # dev release
+ [-_\.]?
+ (?P<dev_l>dev)
+ [-_\.]?
+ (?P<dev_n>[0-9]+)?
+ )?
+ )
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
+"""
+
+
+class Version(_BaseVersion):
+
+ _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ def __init__(self, version: str) -> None:
+
+ # Validate the version and parse it into pieces
+ match = self._regex.search(version)
+ if not match:
+ raise InvalidVersion(f"Invalid version: '{version}'")
+
+ # Store the parsed out pieces of the version
+ self._version = _Version(
+ epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+ release=tuple(int(i) for i in match.group("release").split(".")),
+ pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+ post=_parse_letter_version(
+ match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+ ),
+ dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+ local=_parse_local_version(match.group("local")),
+ )
+
+ # Generate a key which will be used for sorting
+ self._key = _cmpkey(
+ self._version.epoch,
+ self._version.release,
+ self._version.pre,
+ self._version.post,
+ self._version.dev,
+ self._version.local,
+ )
+
+ def __repr__(self) -> str:
+ return f"<Version('{self}')>"
+
+ def __str__(self) -> str:
+ parts = []
+
+ # Epoch
+ if self.epoch != 0:
+ parts.append(f"{self.epoch}!")
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self.release))
+
+ # Pre-release
+ if self.pre is not None:
+ parts.append("".join(str(x) for x in self.pre))
+
+ # Post-release
+ if self.post is not None:
+ parts.append(f".post{self.post}")
+
+ # Development release
+ if self.dev is not None:
+ parts.append(f".dev{self.dev}")
+
+ # Local version segment
+ if self.local is not None:
+ parts.append(f"+{self.local}")
+
+ return "".join(parts)
+
+ @property
+ def epoch(self) -> int:
+ _epoch: int = self._version.epoch
+ return _epoch
+
+ @property
+ def release(self) -> Tuple[int, ...]:
+ _release: Tuple[int, ...] = self._version.release
+ return _release
+
+ @property
+ def pre(self) -> Optional[Tuple[str, int]]:
+ _pre: Optional[Tuple[str, int]] = self._version.pre
+ return _pre
+
+ @property
+ def post(self) -> Optional[int]:
+ return self._version.post[1] if self._version.post else None
+
+ @property
+ def dev(self) -> Optional[int]:
+ return self._version.dev[1] if self._version.dev else None
+
+ @property
+ def local(self) -> Optional[str]:
+ if self._version.local:
+ return ".".join(str(x) for x in self._version.local)
+ else:
+ return None
+
+ @property
+ def public(self) -> str:
+ return str(self).split("+", 1)[0]
+
+ @property
+ def base_version(self) -> str:
+ parts = []
+
+ # Epoch
+ if self.epoch != 0:
+ parts.append(f"{self.epoch}!")
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self.release))
+
+ return "".join(parts)
+
+ @property
+ def is_prerelease(self) -> bool:
+ return self.dev is not None or self.pre is not None
+
+ @property
+ def is_postrelease(self) -> bool:
+ return self.post is not None
+
+ @property
+ def is_devrelease(self) -> bool:
+ return self.dev is not None
+
+ @property
+ def major(self) -> int:
+ return self.release[0] if len(self.release) >= 1 else 0
+
+ @property
+ def minor(self) -> int:
+ return self.release[1] if len(self.release) >= 2 else 0
+
+ @property
+ def micro(self) -> int:
+ return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+ letter: str, number: Union[str, bytes, SupportsInt]
+) -> Optional[Tuple[str, int]]:
+
+ if letter:
+ # We consider there to be an implicit 0 in a pre-release if there is
+ # not a numeral associated with it.
+ if number is None:
+ number = 0
+
+ # We normalize any letters to their lower case form
+ letter = letter.lower()
+
+ # We consider some words to be alternate spellings of other words and
+ # in those cases we want to normalize the spellings to our preferred
+ # spelling.
+ if letter == "alpha":
+ letter = "a"
+ elif letter == "beta":
+ letter = "b"
+ elif letter in ["c", "pre", "preview"]:
+ letter = "rc"
+ elif letter in ["rev", "r"]:
+ letter = "post"
+
+ return letter, int(number)
+ if not letter and number:
+ # We assume if we are given a number, but we are not given a letter
+ # then this is using the implicit post release syntax (e.g. 1.0-1)
+ letter = "post"
+
+ return letter, int(number)
+
+ return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: str) -> Optional[LocalType]:
+ """
+ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+ """
+ if local is not None:
+ return tuple(
+ part.lower() if not part.isdigit() else int(part)
+ for part in _local_version_separators.split(local)
+ )
+ return None
+
+
+def _cmpkey(
+ epoch: int,
+ release: Tuple[int, ...],
+ pre: Optional[Tuple[str, int]],
+ post: Optional[Tuple[str, int]],
+ dev: Optional[Tuple[str, int]],
+ local: Optional[Tuple[SubLocalType]],
+) -> CmpKey:
+
+ # When we compare a release version, we want to compare it with all of the
+ # trailing zeros removed. So we'll use a reverse the list, drop all the now
+ # leading zeros until we come to something non zero, then take the rest
+ # re-reverse it back into the correct order and make it a tuple and use
+ # that for our sorting key.
+ _release = tuple(
+ reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+ )
+
+ # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+ # We'll do this by abusing the pre segment, but we _only_ want to do this
+ # if there is not a pre or a post segment. If we have one of those then
+ # the normal sorting rules will handle this case correctly.
+ if pre is None and post is None and dev is not None:
+ _pre: PrePostDevType = NegativeInfinity
+ # Versions without a pre-release (except as noted above) should sort after
+ # those with one.
+ elif pre is None:
+ _pre = Infinity
+ else:
+ _pre = pre
+
+ # Versions without a post segment should sort before those with one.
+ if post is None:
+ _post: PrePostDevType = NegativeInfinity
+
+ else:
+ _post = post
+
+ # Versions without a development segment should sort after those with one.
+ if dev is None:
+ _dev: PrePostDevType = Infinity
+
+ else:
+ _dev = dev
+
+ if local is None:
+ # Versions without a local segment should sort before those with one.
+ _local: LocalType = NegativeInfinity
+ else:
+ # Versions with a local segment need that segment parsed to implement
+ # the sorting rules in PEP440.
+ # - Alpha numeric segments sort before numeric segments
+ # - Alpha numeric segments sort lexicographically
+ # - Numeric segments sort numerically
+ # - Shorter versions sort before longer versions when the prefixes
+ # match exactly
+ _local = tuple(
+ (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+ )
+
+ return epoch, _release, _pre, _post, _dev, _local
diff --git a/testing/web-platform/tests/tools/third_party/packaging/pyproject.toml b/testing/web-platform/tests/tools/third_party/packaging/pyproject.toml
new file mode 100644
index 0000000000..cb37b725dc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+requires = ['setuptools >= 40.8.0', 'wheel']
+build-backend = 'setuptools.build_meta'
diff --git a/testing/web-platform/tests/tools/third_party/packaging/setup.cfg b/testing/web-platform/tests/tools/third_party/packaging/setup.cfg
new file mode 100644
index 0000000000..c97a4e4409
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/setup.cfg
@@ -0,0 +1,3 @@
+[isort]
+profile = black
+combine_as_imports = true
diff --git a/testing/web-platform/tests/tools/third_party/packaging/setup.py b/testing/web-platform/tests/tools/third_party/packaging/setup.py
new file mode 100644
index 0000000000..ba1023f58a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/setup.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import os
+import re
+
+# While I generally consider it an antipattern to try and support both
+# setuptools and distutils with a single setup.py, in this specific instance
+# where packaging is a dependency of setuptools, it can create a circular
+# dependency when projects attempt to unbundle stuff from setuptools and pip.
+# Though we don't really support that, it makes things easier if we do this and
+# should hopefully cause less issues for end users.
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+
+base_dir = os.path.dirname(__file__)
+
+about = {}
+with open(os.path.join(base_dir, "packaging", "__about__.py")) as f:
+ exec(f.read(), about)
+
+with open(os.path.join(base_dir, "README.rst")) as f:
+ long_description = f.read()
+
+with open(os.path.join(base_dir, "CHANGELOG.rst")) as f:
+ # Remove :issue:`ddd` tags that breaks the description rendering
+ changelog = re.sub(
+ r":issue:`(\d+)`",
+ r"`#\1 <https://github.com/pypa/packaging/issues/\1>`__",
+ f.read(),
+ )
+ long_description = "\n".join([long_description, changelog])
+
+
+setup(
+ name=about["__title__"],
+ version=about["__version__"],
+ description=about["__summary__"],
+ long_description=long_description,
+ long_description_content_type="text/x-rst",
+ license=about["__license__"],
+ url=about["__uri__"],
+ author=about["__author__"],
+ author_email=about["__email__"],
+ python_requires=">=3.6",
+ install_requires=["pyparsing>=2.0.2,!=3.0.5"], # 2.0.2 + needed to avoid issue #91
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Apache Software License",
+ "License :: OSI Approved :: BSD License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3 :: Only",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ ],
+ packages=["packaging"],
+ package_data={"packaging": ["py.typed"]},
+)
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tasks/__init__.py b/testing/web-platform/tests/tools/third_party/packaging/tasks/__init__.py
new file mode 100644
index 0000000000..883da23c96
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tasks/__init__.py
@@ -0,0 +1,9 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import invoke
+
+from . import check
+
+ns = invoke.Collection(check)
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tasks/check.py b/testing/web-platform/tests/tools/third_party/packaging/tasks/check.py
new file mode 100644
index 0000000000..b0896e1f76
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tasks/check.py
@@ -0,0 +1,141 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import itertools
+import json
+import os.path
+import xmlrpc.client
+
+import invoke
+import pkg_resources
+import progress.bar
+
+from packaging.version import Version
+
+from .paths import CACHE
+
+
+def _parse_version(value):
+ try:
+ return Version(value)
+ except ValueError:
+ return None
+
+
+@invoke.task
+def pep440(cached=False):
+ cache_path = os.path.join(CACHE, "pep440.json")
+
+ # If we were given --cached, then we want to attempt to use cached data if
+ # possible
+ if cached:
+ try:
+ with open(cache_path) as fp:
+ data = json.load(fp)
+ except Exception:
+ data = None
+ else:
+ data = None
+
+ # If we don't have data, then let's go fetch it from PyPI
+ if data is None:
+ bar = progress.bar.ShadyBar("Fetching Versions")
+ client = xmlrpc.client.Server("https://pypi.python.org/pypi")
+
+ data = {
+ project: client.package_releases(project, True)
+ for project in bar.iter(client.list_packages())
+ }
+
+ os.makedirs(os.path.dirname(cache_path), exist_ok=True)
+ with open(cache_path, "w") as fp:
+ json.dump(data, fp)
+
+ # Get a list of all of the version numbers on PyPI
+ all_versions = list(itertools.chain.from_iterable(data.values()))
+
+ # Determine the total number of versions which are compatible with the
+ # current routine
+ parsed_versions = [
+ _parse_version(v) for v in all_versions if _parse_version(v) is not None
+ ]
+
+ # Determine a list of projects that sort exactly the same between
+ # pkg_resources and PEP 440
+ compatible_sorting = [
+ project
+ for project, versions in data.items()
+ if (
+ sorted(versions, key=pkg_resources.parse_version)
+ == sorted((x for x in versions if _parse_version(x)), key=Version)
+ )
+ ]
+
+ # Determine a list of projects that sort exactly the same between
+ # pkg_resources and PEP 440 when invalid versions are filtered out
+ filtered_compatible_sorting = [
+ project
+ for project, versions in (
+ (p, [v for v in vs if _parse_version(v) is not None])
+ for p, vs in data.items()
+ )
+ if (
+ sorted(versions, key=pkg_resources.parse_version)
+ == sorted(versions, key=Version)
+ )
+ ]
+
+ # Determine a list of projects which do not have any versions that are
+ # valid with PEP 440 and which have any versions registered
+ only_invalid_versions = [
+ project
+ for project, versions in data.items()
+ if (versions and not [v for v in versions if _parse_version(v) is not None])
+ ]
+
+ # Determine a list of projects which have matching latest versions between
+ # pkg_resources and PEP 440
+ differing_latest_versions = [
+ project
+ for project, versions in data.items()
+ if (
+ sorted(versions, key=pkg_resources.parse_version)[-1:]
+ != sorted((x for x in versions if _parse_version(x)), key=Version)[-1:]
+ )
+ ]
+
+ # Print out our findings
+ print(
+ "Total Version Compatibility: {}/{} ({:.2%})".format(
+ len(parsed_versions),
+ len(all_versions),
+ len(parsed_versions) / len(all_versions),
+ )
+ )
+ print(
+ "Total Sorting Compatibility (Unfiltered): {}/{} ({:.2%})".format(
+ len(compatible_sorting), len(data), len(compatible_sorting) / len(data)
+ )
+ )
+ print(
+ "Total Sorting Compatibility (Filtered): {}/{} ({:.2%})".format(
+ len(filtered_compatible_sorting),
+ len(data),
+ len(filtered_compatible_sorting) / len(data),
+ )
+ )
+ print(
+ "Projects with No Compatible Versions: {}/{} ({:.2%})".format(
+ len(only_invalid_versions),
+ len(data),
+ len(only_invalid_versions) / len(data),
+ )
+ )
+ print(
+ "Projects with Differing Latest Version: {}/{} ({:.2%})".format(
+ len(differing_latest_versions),
+ len(data),
+ len(differing_latest_versions) / len(data),
+ )
+ )
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tasks/paths.py b/testing/web-platform/tests/tools/third_party/packaging/tasks/paths.py
new file mode 100644
index 0000000000..0888fb6967
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tasks/paths.py
@@ -0,0 +1,9 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import os.path
+
+PROJECT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
+
+CACHE = os.path.join(PROJECT, ".cache")
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tasks/requirements.txt b/testing/web-platform/tests/tools/third_party/packaging/tasks/requirements.txt
new file mode 100644
index 0000000000..5677c0e89b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tasks/requirements.txt
@@ -0,0 +1,3 @@
+# The requirements required to invoke the tasks
+invoke
+progress
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/__init__.py b/testing/web-platform/tests/tools/third_party/packaging/tests/__init__.py
new file mode 100644
index 0000000000..b509336233
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/hello-world.c b/testing/web-platform/tests/tools/third_party/packaging/tests/hello-world.c
new file mode 100644
index 0000000000..5e591c3ec5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/hello-world.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+int main(int argc, char* argv[])
+{
+ printf("Hello world");
+ return 0;
+}
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/build.sh b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/build.sh
new file mode 100755
index 0000000000..5071561560
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/build.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+
+set -x
+set -e
+
+if [ $# -eq 0 ]; then
+ docker run --rm -v $(pwd):/home/hello-world arm32v5/debian /home/hello-world/manylinux/build.sh incontainer 52
+ docker run --rm -v $(pwd):/home/hello-world arm32v7/debian /home/hello-world/manylinux/build.sh incontainer 52
+ docker run --rm -v $(pwd):/home/hello-world i386/debian /home/hello-world/manylinux/build.sh incontainer 52
+ docker run --rm -v $(pwd):/home/hello-world s390x/debian /home/hello-world/manylinux/build.sh incontainer 64
+ docker run --rm -v $(pwd):/home/hello-world debian /home/hello-world/manylinux/build.sh incontainer 64
+ docker run --rm -v $(pwd):/home/hello-world debian /home/hello-world/manylinux/build.sh x32 52
+ cp -f manylinux/hello-world-x86_64-i386 manylinux/hello-world-invalid-magic
+ printf "\x00" | dd of=manylinux/hello-world-invalid-magic bs=1 seek=0x00 count=1 conv=notrunc
+ cp -f manylinux/hello-world-x86_64-i386 manylinux/hello-world-invalid-class
+ printf "\x00" | dd of=manylinux/hello-world-invalid-class bs=1 seek=0x04 count=1 conv=notrunc
+ cp -f manylinux/hello-world-x86_64-i386 manylinux/hello-world-invalid-data
+ printf "\x00" | dd of=manylinux/hello-world-invalid-data bs=1 seek=0x05 count=1 conv=notrunc
+ head -c 40 manylinux/hello-world-x86_64-i386 > manylinux/hello-world-too-short
+ exit 0
+fi
+
+export DEBIAN_FRONTEND=noninteractive
+cd /home/hello-world/
+apt-get update
+apt-get install -y --no-install-recommends gcc libc6-dev
+if [ "$1" == "incontainer" ]; then
+ ARCH=$(dpkg --print-architecture)
+ CFLAGS=""
+else
+ ARCH=$1
+ dpkg --add-architecture ${ARCH}
+ apt-get install -y --no-install-recommends gcc-multilib libc6-dev-${ARCH}
+ CFLAGS="-mx32"
+fi
+NAME=hello-world-$(uname -m)-${ARCH}
+gcc -Os -s ${CFLAGS} -o ${NAME}-full hello-world.c
+head -c $2 ${NAME}-full > ${NAME}
+rm -f ${NAME}-full
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-armv7l-armel b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-armv7l-armel
new file mode 100755
index 0000000000..1dfd23fa3c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-armv7l-armel
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-armv7l-armhf b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-armv7l-armhf
new file mode 100755
index 0000000000..965ab3003a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-armv7l-armhf
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-class b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-class
new file mode 100755
index 0000000000..5e9899fc07
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-class
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-data b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-data
new file mode 100755
index 0000000000..2659b8ee25
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-data
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-magic b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-magic
new file mode 100755
index 0000000000..46066ad2de
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-invalid-magic
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-s390x-s390x b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-s390x-s390x
new file mode 100644
index 0000000000..c4e9578889
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-s390x-s390x
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-too-short b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-too-short
new file mode 100644
index 0000000000..4e5c0396b9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-too-short
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-amd64 b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-amd64
new file mode 100644
index 0000000000..c7f5b0b5e5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-amd64
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-i386 b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-i386
new file mode 100755
index 0000000000..ff1d540a30
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-i386
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-x32 b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-x32
new file mode 100755
index 0000000000..daf85d3473
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/manylinux/hello-world-x86_64-x32
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/musllinux/build.sh b/testing/web-platform/tests/tools/third_party/packaging/tests/musllinux/build.sh
new file mode 100644
index 0000000000..acd2b94c7f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/musllinux/build.sh
@@ -0,0 +1,61 @@
+# Build helper binaries for musllinux tests.
+# Usages:
+# build.sh # Build everything.
+# build.sh $DISTRO $ARCH # Build one executable in $ARCH using $DISTRO.
+#
+# Either invocation ultimately runs this script in a Docker container with
+# `build.sh glibc|musl $ARCH` to actually build the executable.
+
+set -euo pipefail
+set -x
+
+UBUNTU_VERSION='focal'
+ALPINE_VERSION='v3.13'
+
+build_one_in_ubuntu () {
+ $1 "multiarch/ubuntu-core:${2}-${UBUNTU_VERSION}" \
+ bash "/home/hello-world/musllinux/build.sh" glibc "glibc-${2}"
+}
+
+build_one_in_alpine () {
+ $1 "multiarch/alpine:${2}-${ALPINE_VERSION}" \
+ sh "/home/hello-world/musllinux/build.sh" musl "musl-${2}"
+}
+
+build_in_container () {
+ local SOURCE="$(dirname $(dirname $(realpath ${BASH_SOURCE[0]})))"
+ DOCKER="docker run --rm -v ${SOURCE}:/home/hello-world"
+
+ if [[ $# -ne 0 ]]; then
+ "build_one_in_${1}" "$DOCKER" "$2"
+ return
+ fi
+
+ build_one_in_alpine "$DOCKER" x86_64
+ build_one_in_alpine "$DOCKER" i386
+ build_one_in_alpine "$DOCKER" aarch64
+ build_one_in_ubuntu "$DOCKER" x86_64
+}
+
+if [[ $# -eq 0 ]]; then
+ build_in_container
+ exit 0
+elif [[ "$1" == "glibc" ]]; then
+ DEBIAN_FRONTEND=noninteractive apt-get update -qq \
+ && apt-get install -qqy --no-install-recommends gcc libc6-dev
+elif [[ "$1" == "musl" ]]; then
+ apk add -q build-base
+else
+ build_in_container "$@"
+ exit 0
+fi
+
+build () {
+ local CFLAGS=""
+ local OUT="/home/hello-world/musllinux/${2}"
+ gcc -Os ${CFLAGS} -o "${OUT}-full" "/home/hello-world/hello-world.c"
+ head -c1024 "${OUT}-full" > "$OUT"
+ rm -f "${OUT}-full"
+}
+
+build "$@"
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/test_manylinux.py b/testing/web-platform/tests/tools/third_party/packaging/tests/test_manylinux.py
new file mode 100644
index 0000000000..a04db15960
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/test_manylinux.py
@@ -0,0 +1,253 @@
+try:
+ import ctypes
+except ImportError:
+ ctypes = None
+import os
+import platform
+import sys
+import types
+import warnings
+
+import pretend
+import pytest
+
+from packaging import _manylinux
+from packaging._manylinux import (
+ _ELFFileHeader,
+ _get_elf_header,
+ _get_glibc_version,
+ _glibc_version_string,
+ _glibc_version_string_confstr,
+ _glibc_version_string_ctypes,
+ _is_compatible,
+ _is_linux_armhf,
+ _is_linux_i686,
+ _parse_glibc_version,
+)
+
+
+@pytest.fixture(autouse=True)
+def clear_lru_cache():
+ yield
+ _get_glibc_version.cache_clear()
+
+
+@pytest.fixture
+def manylinux_module(monkeypatch):
+ monkeypatch.setattr(_manylinux, "_get_glibc_version", lambda *args: (2, 20))
+ module_name = "_manylinux"
+ module = types.ModuleType(module_name)
+ monkeypatch.setitem(sys.modules, module_name, module)
+ return module
+
+
+@pytest.mark.parametrize("tf", (True, False))
+@pytest.mark.parametrize(
+ "attribute,glibc", (("1", (2, 5)), ("2010", (2, 12)), ("2014", (2, 17)))
+)
+def test_module_declaration(monkeypatch, manylinux_module, attribute, glibc, tf):
+ manylinux = f"manylinux{attribute}_compatible"
+ monkeypatch.setattr(manylinux_module, manylinux, tf, raising=False)
+ res = _is_compatible(manylinux, "x86_64", glibc)
+ assert tf is res
+
+
+@pytest.mark.parametrize(
+ "attribute,glibc", (("1", (2, 5)), ("2010", (2, 12)), ("2014", (2, 17)))
+)
+def test_module_declaration_missing_attribute(
+ monkeypatch, manylinux_module, attribute, glibc
+):
+ manylinux = f"manylinux{attribute}_compatible"
+ monkeypatch.delattr(manylinux_module, manylinux, raising=False)
+ assert _is_compatible(manylinux, "x86_64", glibc)
+
+
+@pytest.mark.parametrize(
+ "version,compatible", (((2, 0), True), ((2, 5), True), ((2, 10), False))
+)
+def test_is_manylinux_compatible_glibc_support(version, compatible, monkeypatch):
+ monkeypatch.setitem(sys.modules, "_manylinux", None)
+ monkeypatch.setattr(_manylinux, "_get_glibc_version", lambda: (2, 5))
+ assert bool(_is_compatible("manylinux1", "any", version)) == compatible
+
+
+@pytest.mark.parametrize("version_str", ["glibc-2.4.5", "2"])
+def test_check_glibc_version_warning(version_str):
+ with warnings.catch_warnings(record=True) as w:
+ _parse_glibc_version(version_str)
+ assert len(w) == 1
+ assert issubclass(w[0].category, RuntimeWarning)
+
+
+@pytest.mark.skipif(not ctypes, reason="requires ctypes")
+@pytest.mark.parametrize(
+ "version_str,expected",
+ [
+ # Be very explicit about bytes and Unicode for Python 2 testing.
+ (b"2.4", "2.4"),
+ ("2.4", "2.4"),
+ ],
+)
+def test_glibc_version_string(version_str, expected, monkeypatch):
+ class LibcVersion:
+ def __init__(self, version_str):
+ self.version_str = version_str
+
+ def __call__(self):
+ return version_str
+
+ class ProcessNamespace:
+ def __init__(self, libc_version):
+ self.gnu_get_libc_version = libc_version
+
+ process_namespace = ProcessNamespace(LibcVersion(version_str))
+ monkeypatch.setattr(ctypes, "CDLL", lambda _: process_namespace)
+ monkeypatch.setattr(_manylinux, "_glibc_version_string_confstr", lambda: False)
+
+ assert _glibc_version_string() == expected
+
+ del process_namespace.gnu_get_libc_version
+ assert _glibc_version_string() is None
+
+
+def test_glibc_version_string_confstr(monkeypatch):
+ monkeypatch.setattr(os, "confstr", lambda x: "glibc 2.20", raising=False)
+ assert _glibc_version_string_confstr() == "2.20"
+
+
+def test_glibc_version_string_fail(monkeypatch):
+ monkeypatch.setattr(os, "confstr", lambda x: None, raising=False)
+ monkeypatch.setitem(sys.modules, "ctypes", None)
+ assert _glibc_version_string() is None
+ assert _get_glibc_version() == (-1, -1)
+
+
+@pytest.mark.parametrize(
+ "failure",
+ [pretend.raiser(ValueError), pretend.raiser(OSError), lambda x: "XXX"],
+)
+def test_glibc_version_string_confstr_fail(monkeypatch, failure):
+ monkeypatch.setattr(os, "confstr", failure, raising=False)
+ assert _glibc_version_string_confstr() is None
+
+
+def test_glibc_version_string_confstr_missing(monkeypatch):
+ monkeypatch.delattr(os, "confstr", raising=False)
+ assert _glibc_version_string_confstr() is None
+
+
+def test_glibc_version_string_ctypes_missing(monkeypatch):
+ monkeypatch.setitem(sys.modules, "ctypes", None)
+ assert _glibc_version_string_ctypes() is None
+
+
+def test_glibc_version_string_ctypes_raise_oserror(monkeypatch):
+ def patched_cdll(name):
+ raise OSError("Dynamic loading not supported")
+
+ monkeypatch.setattr(ctypes, "CDLL", patched_cdll)
+ assert _glibc_version_string_ctypes() is None
+
+
+@pytest.mark.skipif(platform.system() != "Linux", reason="requires Linux")
+def test_is_manylinux_compatible_old():
+ # Assuming no one is running this test with a version of glibc released in
+ # 1997.
+ assert _is_compatible("any", "any", (2, 0))
+
+
+def test_is_manylinux_compatible(monkeypatch):
+ monkeypatch.setattr(_manylinux, "_glibc_version_string", lambda: "2.4")
+ assert _is_compatible("", "any", (2, 4))
+
+
+def test_glibc_version_string_none(monkeypatch):
+ monkeypatch.setattr(_manylinux, "_glibc_version_string", lambda: None)
+ assert not _is_compatible("any", "any", (2, 4))
+
+
+def test_is_linux_armhf_not_elf(monkeypatch):
+ monkeypatch.setattr(_manylinux, "_get_elf_header", lambda: None)
+ assert not _is_linux_armhf()
+
+
+def test_is_linux_i686_not_elf(monkeypatch):
+ monkeypatch.setattr(_manylinux, "_get_elf_header", lambda: None)
+ assert not _is_linux_i686()
+
+
+@pytest.mark.parametrize(
+ "machine, abi, elf_class, elf_data, elf_machine",
+ [
+ (
+ "x86_64",
+ "x32",
+ _ELFFileHeader.ELFCLASS32,
+ _ELFFileHeader.ELFDATA2LSB,
+ _ELFFileHeader.EM_X86_64,
+ ),
+ (
+ "x86_64",
+ "i386",
+ _ELFFileHeader.ELFCLASS32,
+ _ELFFileHeader.ELFDATA2LSB,
+ _ELFFileHeader.EM_386,
+ ),
+ (
+ "x86_64",
+ "amd64",
+ _ELFFileHeader.ELFCLASS64,
+ _ELFFileHeader.ELFDATA2LSB,
+ _ELFFileHeader.EM_X86_64,
+ ),
+ (
+ "armv7l",
+ "armel",
+ _ELFFileHeader.ELFCLASS32,
+ _ELFFileHeader.ELFDATA2LSB,
+ _ELFFileHeader.EM_ARM,
+ ),
+ (
+ "armv7l",
+ "armhf",
+ _ELFFileHeader.ELFCLASS32,
+ _ELFFileHeader.ELFDATA2LSB,
+ _ELFFileHeader.EM_ARM,
+ ),
+ (
+ "s390x",
+ "s390x",
+ _ELFFileHeader.ELFCLASS64,
+ _ELFFileHeader.ELFDATA2MSB,
+ _ELFFileHeader.EM_S390,
+ ),
+ ],
+)
+def test_get_elf_header(monkeypatch, machine, abi, elf_class, elf_data, elf_machine):
+ path = os.path.join(
+ os.path.dirname(__file__),
+ "manylinux",
+ f"hello-world-{machine}-{abi}",
+ )
+ monkeypatch.setattr(sys, "executable", path)
+ elf_header = _get_elf_header()
+ assert elf_header.e_ident_class == elf_class
+ assert elf_header.e_ident_data == elf_data
+ assert elf_header.e_machine == elf_machine
+
+
+@pytest.mark.parametrize(
+ "content", [None, "invalid-magic", "invalid-class", "invalid-data", "too-short"]
+)
+def test_get_elf_header_bad_executable(monkeypatch, content):
+ if content:
+ path = os.path.join(
+ os.path.dirname(__file__),
+ "manylinux",
+ f"hello-world-{content}",
+ )
+ else:
+ path = None
+ monkeypatch.setattr(sys, "executable", path)
+ assert _get_elf_header() is None
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/test_markers.py b/testing/web-platform/tests/tools/third_party/packaging/tests/test_markers.py
new file mode 100644
index 0000000000..c2640afeb6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/test_markers.py
@@ -0,0 +1,310 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import collections
+import itertools
+import os
+import platform
+import sys
+
+import pytest
+
+from packaging.markers import (
+ InvalidMarker,
+ Marker,
+ Node,
+ UndefinedComparison,
+ UndefinedEnvironmentName,
+ default_environment,
+ format_full_version,
+)
+
+VARIABLES = [
+ "extra",
+ "implementation_name",
+ "implementation_version",
+ "os_name",
+ "platform_machine",
+ "platform_release",
+ "platform_system",
+ "platform_version",
+ "python_full_version",
+ "python_version",
+ "platform_python_implementation",
+ "sys_platform",
+]
+
+PEP_345_VARIABLES = [
+ "os.name",
+ "sys.platform",
+ "platform.version",
+ "platform.machine",
+ "platform.python_implementation",
+]
+
+SETUPTOOLS_VARIABLES = ["python_implementation"]
+
+OPERATORS = ["===", "==", ">=", "<=", "!=", "~=", ">", "<", "in", "not in"]
+
+VALUES = [
+ "1.0",
+ "5.6a0",
+ "dog",
+ "freebsd",
+ "literally any string can go here",
+ "things @#4 dsfd (((",
+]
+
+
+class TestNode:
+ @pytest.mark.parametrize("value", ["one", "two", None, 3, 5, []])
+ def test_accepts_value(self, value):
+ assert Node(value).value == value
+
+ @pytest.mark.parametrize("value", ["one", "two", None, 3, 5, []])
+ def test_str(self, value):
+ assert str(Node(value)) == str(value)
+
+ @pytest.mark.parametrize("value", ["one", "two", None, 3, 5, []])
+ def test_repr(self, value):
+ assert repr(Node(value)) == f"<Node({str(value)!r})>"
+
+ def test_base_class(self):
+ with pytest.raises(NotImplementedError):
+ Node("cover all the code").serialize()
+
+
+class TestOperatorEvaluation:
+ def test_prefers_pep440(self):
+ assert Marker('"2.7.9" < "foo"').evaluate(dict(foo="2.7.10"))
+
+ def test_falls_back_to_python(self):
+ assert Marker('"b" > "a"').evaluate(dict(a="a"))
+
+ def test_fails_when_undefined(self):
+ with pytest.raises(UndefinedComparison):
+ Marker("'2.7.0' ~= os_name").evaluate()
+
+
+FakeVersionInfo = collections.namedtuple(
+ "FakeVersionInfo", ["major", "minor", "micro", "releaselevel", "serial"]
+)
+
+
+class TestDefaultEnvironment:
+ def test_matches_expected(self):
+ environment = default_environment()
+
+ iver = "{0.major}.{0.minor}.{0.micro}".format(sys.implementation.version)
+ if sys.implementation.version.releaselevel != "final":
+ iver = "{0}{1[0]}{2}".format(
+ iver,
+ sys.implementation.version.releaselevel,
+ sys.implementation.version.serial,
+ )
+
+ assert environment == {
+ "implementation_name": sys.implementation.name,
+ "implementation_version": iver,
+ "os_name": os.name,
+ "platform_machine": platform.machine(),
+ "platform_release": platform.release(),
+ "platform_system": platform.system(),
+ "platform_version": platform.version(),
+ "python_full_version": platform.python_version(),
+ "platform_python_implementation": platform.python_implementation(),
+ "python_version": ".".join(platform.python_version_tuple()[:2]),
+ "sys_platform": sys.platform,
+ }
+
+ def test_multidigit_minor_version(self, monkeypatch):
+ version_info = (3, 10, 0, "final", 0)
+ monkeypatch.setattr(sys, "version_info", version_info, raising=False)
+
+ monkeypatch.setattr(platform, "python_version", lambda: "3.10.0", raising=False)
+ monkeypatch.setattr(
+ platform, "python_version_tuple", lambda: ("3", "10", "0"), raising=False
+ )
+
+ environment = default_environment()
+ assert environment["python_version"] == "3.10"
+
+ def tests_when_releaselevel_final(self):
+ v = FakeVersionInfo(3, 4, 2, "final", 0)
+ assert format_full_version(v) == "3.4.2"
+
+ def tests_when_releaselevel_not_final(self):
+ v = FakeVersionInfo(3, 4, 2, "beta", 4)
+ assert format_full_version(v) == "3.4.2b4"
+
+
+class TestMarker:
+ @pytest.mark.parametrize(
+ "marker_string",
+ [
+ "{} {} {!r}".format(*i)
+ for i in itertools.product(VARIABLES, OPERATORS, VALUES)
+ ]
+ + [
+ "{2!r} {1} {0}".format(*i)
+ for i in itertools.product(VARIABLES, OPERATORS, VALUES)
+ ],
+ )
+ def test_parses_valid(self, marker_string):
+ Marker(marker_string)
+
+ @pytest.mark.parametrize(
+ "marker_string",
+ [
+ "this_isnt_a_real_variable >= '1.0'",
+ "python_version",
+ "(python_version)",
+ "python_version >= 1.0 and (python_version)",
+ ],
+ )
+ def test_parses_invalid(self, marker_string):
+ with pytest.raises(InvalidMarker):
+ Marker(marker_string)
+
+ @pytest.mark.parametrize(
+ ("marker_string", "expected"),
+ [
+ # Test the different quoting rules
+ ("python_version == '2.7'", 'python_version == "2.7"'),
+ ('python_version == "2.7"', 'python_version == "2.7"'),
+ # Test and/or expressions
+ (
+ 'python_version == "2.7" and os_name == "linux"',
+ 'python_version == "2.7" and os_name == "linux"',
+ ),
+ (
+ 'python_version == "2.7" or os_name == "linux"',
+ 'python_version == "2.7" or os_name == "linux"',
+ ),
+ (
+ 'python_version == "2.7" and os_name == "linux" or '
+ 'sys_platform == "win32"',
+ 'python_version == "2.7" and os_name == "linux" or '
+ 'sys_platform == "win32"',
+ ),
+ # Test nested expressions and grouping with ()
+ ('(python_version == "2.7")', 'python_version == "2.7"'),
+ (
+ '(python_version == "2.7" and sys_platform == "win32")',
+ 'python_version == "2.7" and sys_platform == "win32"',
+ ),
+ (
+ 'python_version == "2.7" and (sys_platform == "win32" or '
+ 'sys_platform == "linux")',
+ 'python_version == "2.7" and (sys_platform == "win32" or '
+ 'sys_platform == "linux")',
+ ),
+ ],
+ )
+ def test_str_and_repr(self, marker_string, expected):
+ m = Marker(marker_string)
+ assert str(m) == expected
+ assert repr(m) == f"<Marker({str(m)!r})>"
+
+ def test_extra_with_no_extra_in_environment(self):
+ # We can't evaluate an extra if no extra is passed into the environment
+ m = Marker("extra == 'security'")
+ with pytest.raises(UndefinedEnvironmentName):
+ m.evaluate()
+
+ @pytest.mark.parametrize(
+ ("marker_string", "environment", "expected"),
+ [
+ (f"os_name == '{os.name}'", None, True),
+ ("os_name == 'foo'", {"os_name": "foo"}, True),
+ ("os_name == 'foo'", {"os_name": "bar"}, False),
+ ("'2.7' in python_version", {"python_version": "2.7.5"}, True),
+ ("'2.7' not in python_version", {"python_version": "2.7"}, False),
+ (
+ "os_name == 'foo' and python_version ~= '2.7.0'",
+ {"os_name": "foo", "python_version": "2.7.6"},
+ True,
+ ),
+ (
+ "python_version ~= '2.7.0' and (os_name == 'foo' or "
+ "os_name == 'bar')",
+ {"os_name": "foo", "python_version": "2.7.4"},
+ True,
+ ),
+ (
+ "python_version ~= '2.7.0' and (os_name == 'foo' or "
+ "os_name == 'bar')",
+ {"os_name": "bar", "python_version": "2.7.4"},
+ True,
+ ),
+ (
+ "python_version ~= '2.7.0' and (os_name == 'foo' or "
+ "os_name == 'bar')",
+ {"os_name": "other", "python_version": "2.7.4"},
+ False,
+ ),
+ ("extra == 'security'", {"extra": "quux"}, False),
+ ("extra == 'security'", {"extra": "security"}, True),
+ ],
+ )
+ def test_evaluates(self, marker_string, environment, expected):
+ args = [] if environment is None else [environment]
+ assert Marker(marker_string).evaluate(*args) == expected
+
+ @pytest.mark.parametrize(
+ "marker_string",
+ [
+ "{} {} {!r}".format(*i)
+ for i in itertools.product(PEP_345_VARIABLES, OPERATORS, VALUES)
+ ]
+ + [
+ "{2!r} {1} {0}".format(*i)
+ for i in itertools.product(PEP_345_VARIABLES, OPERATORS, VALUES)
+ ],
+ )
+ def test_parses_pep345_valid(self, marker_string):
+ Marker(marker_string)
+
+ @pytest.mark.parametrize(
+ ("marker_string", "environment", "expected"),
+ [
+ (f"os.name == '{os.name}'", None, True),
+ ("sys.platform == 'win32'", {"sys_platform": "linux2"}, False),
+ ("platform.version in 'Ubuntu'", {"platform_version": "#39"}, False),
+ ("platform.machine=='x86_64'", {"platform_machine": "x86_64"}, True),
+ (
+ "platform.python_implementation=='Jython'",
+ {"platform_python_implementation": "CPython"},
+ False,
+ ),
+ (
+ "python_version == '2.5' and platform.python_implementation"
+ "!= 'Jython'",
+ {"python_version": "2.7"},
+ False,
+ ),
+ ],
+ )
+ def test_evaluate_pep345_markers(self, marker_string, environment, expected):
+ args = [] if environment is None else [environment]
+ assert Marker(marker_string).evaluate(*args) == expected
+
+ @pytest.mark.parametrize(
+ "marker_string",
+ [
+ "{} {} {!r}".format(*i)
+ for i in itertools.product(SETUPTOOLS_VARIABLES, OPERATORS, VALUES)
+ ]
+ + [
+ "{2!r} {1} {0}".format(*i)
+ for i in itertools.product(SETUPTOOLS_VARIABLES, OPERATORS, VALUES)
+ ],
+ )
+ def test_parses_setuptools_legacy_valid(self, marker_string):
+ Marker(marker_string)
+
+ def test_evaluate_setuptools_legacy_markers(self):
+ marker_string = "python_implementation=='Jython'"
+ args = [{"platform_python_implementation": "CPython"}]
+ assert Marker(marker_string).evaluate(*args) is False
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/test_musllinux.py b/testing/web-platform/tests/tools/third_party/packaging/tests/test_musllinux.py
new file mode 100644
index 0000000000..d2c87ca159
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/test_musllinux.py
@@ -0,0 +1,146 @@
+import collections
+import io
+import pathlib
+import struct
+import subprocess
+
+import pretend
+import pytest
+
+from packaging import _musllinux
+from packaging._musllinux import (
+ _get_musl_version,
+ _MuslVersion,
+ _parse_ld_musl_from_elf,
+ _parse_musl_version,
+)
+
+MUSL_AMD64 = "musl libc (x86_64)\nVersion 1.2.2\n"
+MUSL_I386 = "musl libc (i386)\nVersion 1.2.1\n"
+MUSL_AARCH64 = "musl libc (aarch64)\nVersion 1.1.24\n"
+MUSL_INVALID = "musl libc (invalid)\n"
+MUSL_UNKNOWN = "musl libc (unknown)\nVersion unknown\n"
+
+MUSL_DIR = pathlib.Path(__file__).with_name("musllinux").resolve()
+
+BIN_GLIBC_X86_64 = MUSL_DIR.joinpath("glibc-x86_64")
+BIN_MUSL_X86_64 = MUSL_DIR.joinpath("musl-x86_64")
+BIN_MUSL_I386 = MUSL_DIR.joinpath("musl-i386")
+BIN_MUSL_AARCH64 = MUSL_DIR.joinpath("musl-aarch64")
+
+LD_MUSL_X86_64 = "/lib/ld-musl-x86_64.so.1"
+LD_MUSL_I386 = "/lib/ld-musl-i386.so.1"
+LD_MUSL_AARCH64 = "/lib/ld-musl-aarch64.so.1"
+
+
+@pytest.fixture(autouse=True)
+def clear_lru_cache():
+ yield
+ _get_musl_version.cache_clear()
+
+
+@pytest.mark.parametrize(
+ "output, version",
+ [
+ (MUSL_AMD64, _MuslVersion(1, 2)),
+ (MUSL_I386, _MuslVersion(1, 2)),
+ (MUSL_AARCH64, _MuslVersion(1, 1)),
+ (MUSL_INVALID, None),
+ (MUSL_UNKNOWN, None),
+ ],
+ ids=["amd64-1.2.2", "i386-1.2.1", "aarch64-1.1.24", "invalid", "unknown"],
+)
+def test_parse_musl_version(output, version):
+ assert _parse_musl_version(output) == version
+
+
+@pytest.mark.parametrize(
+ "executable, location",
+ [
+ (BIN_GLIBC_X86_64, None),
+ (BIN_MUSL_X86_64, LD_MUSL_X86_64),
+ (BIN_MUSL_I386, LD_MUSL_I386),
+ (BIN_MUSL_AARCH64, LD_MUSL_AARCH64),
+ ],
+ ids=["glibc", "x86_64", "i386", "aarch64"],
+)
+def test_parse_ld_musl_from_elf(executable, location):
+ with executable.open("rb") as f:
+ assert _parse_ld_musl_from_elf(f) == location
+
+
+@pytest.mark.parametrize(
+ "data",
+ [
+ # Too short for magic.
+ b"\0",
+ # Enough for magic, but not ELF.
+ b"#!/bin/bash" + b"\0" * 16,
+ # ELF, but unknown byte declaration.
+ b"\x7fELF\3" + b"\0" * 16,
+ ],
+ ids=["no-magic", "wrong-magic", "unknown-format"],
+)
+def test_parse_ld_musl_from_elf_invalid(data):
+ assert _parse_ld_musl_from_elf(io.BytesIO(data)) is None
+
+
+@pytest.mark.parametrize(
+ "head",
+ [
+ 25, # Enough for magic, but not the section definitions.
+ 58, # Enough for section definitions, but not the actual sections.
+ ],
+)
+def test_parse_ld_musl_from_elf_invalid_section(head):
+ data = BIN_MUSL_X86_64.read_bytes()[:head]
+ assert _parse_ld_musl_from_elf(io.BytesIO(data)) is None
+
+
+def test_parse_ld_musl_from_elf_no_interpreter_section():
+ with BIN_MUSL_X86_64.open("rb") as f:
+ data = f.read()
+
+ # Change all sections to *not* PT_INTERP.
+ unpacked = struct.unpack("16BHHIQQQIHHH", data[:58])
+ *_, e_phoff, _, _, _, e_phentsize, e_phnum = unpacked
+ for i in range(e_phnum + 1):
+ sb = e_phoff + e_phentsize * i
+ se = sb + 56
+ section = struct.unpack("IIQQQQQQ", data[sb:se])
+ data = data[:sb] + struct.pack("IIQQQQQQ", 0, *section[1:]) + data[se:]
+
+ assert _parse_ld_musl_from_elf(io.BytesIO(data)) is None
+
+
+@pytest.mark.parametrize(
+ "executable, output, version, ld_musl",
+ [
+ (MUSL_DIR.joinpath("does-not-exist"), "error", None, None),
+ (BIN_GLIBC_X86_64, "error", None, None),
+ (BIN_MUSL_X86_64, MUSL_AMD64, _MuslVersion(1, 2), LD_MUSL_X86_64),
+ (BIN_MUSL_I386, MUSL_I386, _MuslVersion(1, 2), LD_MUSL_I386),
+ (BIN_MUSL_AARCH64, MUSL_AARCH64, _MuslVersion(1, 1), LD_MUSL_AARCH64),
+ ],
+ ids=["does-not-exist", "glibc", "x86_64", "i386", "aarch64"],
+)
+def test_get_musl_version(monkeypatch, executable, output, version, ld_musl):
+ def mock_run(*args, **kwargs):
+ return collections.namedtuple("Proc", "stderr")(output)
+
+ run_recorder = pretend.call_recorder(mock_run)
+ monkeypatch.setattr(_musllinux.subprocess, "run", run_recorder)
+
+ assert _get_musl_version(str(executable)) == version
+
+ if ld_musl is not None:
+ expected_calls = [
+ pretend.call(
+ [ld_musl],
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ )
+ ]
+ else:
+ expected_calls = []
+ assert run_recorder.calls == expected_calls
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/test_requirements.py b/testing/web-platform/tests/tools/third_party/packaging/tests/test_requirements.py
new file mode 100644
index 0000000000..f2c209c45c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/test_requirements.py
@@ -0,0 +1,197 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import pytest
+
+from packaging.markers import Marker
+from packaging.requirements import URL, URL_AND_MARKER, InvalidRequirement, Requirement
+from packaging.specifiers import SpecifierSet
+
+
+class TestRequirements:
+ def test_string_specifier_marker(self):
+ requirement = 'name[bar]>=3; python_version == "2.7"'
+ req = Requirement(requirement)
+ assert str(req) == requirement
+
+ def test_string_url(self):
+ requirement = "name@ http://foo.com"
+ req = Requirement(requirement)
+ assert str(req) == requirement
+
+ def test_string_url_with_marker(self):
+ requirement = 'name@ http://foo.com ; extra == "feature"'
+ req = Requirement(requirement)
+ assert str(req) == requirement
+
+ def test_repr(self):
+ req = Requirement("name")
+ assert repr(req) == "<Requirement('name')>"
+
+ def _assert_requirement(
+ self, req, name, url=None, extras=[], specifier="", marker=None
+ ):
+ assert req.name == name
+ assert req.url == url
+ assert sorted(req.extras) == sorted(extras)
+ assert str(req.specifier) == specifier
+ if marker:
+ assert str(req.marker) == marker
+ else:
+ assert req.marker is None
+
+ def test_simple_names(self):
+ for name in ("A", "aa", "name"):
+ req = Requirement(name)
+ self._assert_requirement(req, name)
+
+ def test_name_with_other_characters(self):
+ name = "foo-bar.quux_baz"
+ req = Requirement(name)
+ self._assert_requirement(req, name)
+
+ def test_invalid_name(self):
+ with pytest.raises(InvalidRequirement):
+ Requirement("foo!")
+
+ def test_name_with_version(self):
+ req = Requirement("name>=3")
+ self._assert_requirement(req, "name", specifier=">=3")
+
+ def test_with_legacy_version(self):
+ req = Requirement("name==1.0.org1")
+ self._assert_requirement(req, "name", specifier="==1.0.org1")
+
+ def test_with_legacy_version_and_marker(self):
+ req = Requirement("name>=1.x.y;python_version=='2.6'")
+ self._assert_requirement(
+ req, "name", specifier=">=1.x.y", marker='python_version == "2.6"'
+ )
+
+ def test_version_with_parens_and_whitespace(self):
+ req = Requirement("name (==4)")
+ self._assert_requirement(req, "name", specifier="==4")
+
+ def test_name_with_multiple_versions(self):
+ req = Requirement("name>=3,<2")
+ self._assert_requirement(req, "name", specifier="<2,>=3")
+
+ def test_name_with_multiple_versions_and_whitespace(self):
+ req = Requirement("name >=2, <3")
+ self._assert_requirement(req, "name", specifier="<3,>=2")
+
+ def test_extras(self):
+ req = Requirement("foobar [quux,bar]")
+ self._assert_requirement(req, "foobar", extras=["bar", "quux"])
+
+ def test_empty_extras(self):
+ req = Requirement("foo[]")
+ self._assert_requirement(req, "foo")
+
+ def test_url(self):
+ url_section = "@ http://example.com"
+ parsed = URL.parseString(url_section)
+ assert parsed.url == "http://example.com"
+
+ def test_url_and_marker(self):
+ instring = "@ http://example.com ; os_name=='a'"
+ parsed = URL_AND_MARKER.parseString(instring)
+ assert parsed.url == "http://example.com"
+ assert str(parsed.marker) == 'os_name == "a"'
+
+ def test_invalid_url(self):
+ with pytest.raises(InvalidRequirement) as e:
+ Requirement("name @ gopher:/foo/com")
+ assert "Invalid URL: " in str(e.value)
+ assert "gopher:/foo/com" in str(e.value)
+
+ def test_file_url(self):
+ req = Requirement("name @ file:///absolute/path")
+ self._assert_requirement(req, "name", "file:///absolute/path")
+ req = Requirement("name @ file://.")
+ self._assert_requirement(req, "name", "file://.")
+
+ def test_invalid_file_urls(self):
+ with pytest.raises(InvalidRequirement):
+ Requirement("name @ file:.")
+ with pytest.raises(InvalidRequirement):
+ Requirement("name @ file:/.")
+
+ def test_extras_and_url_and_marker(self):
+ req = Requirement("name [fred,bar] @ http://foo.com ; python_version=='2.7'")
+ self._assert_requirement(
+ req,
+ "name",
+ extras=["bar", "fred"],
+ url="http://foo.com",
+ marker='python_version == "2.7"',
+ )
+
+ def test_complex_url_and_marker(self):
+ url = "https://example.com/name;v=1.1/?query=foo&bar=baz#blah"
+ req = Requirement("foo @ %s ; python_version=='3.4'" % url)
+ self._assert_requirement(req, "foo", url=url, marker='python_version == "3.4"')
+
+ def test_multiple_markers(self):
+ req = Requirement(
+ "name[quux, strange];python_version<'2.7' and " "platform_version=='2'"
+ )
+ marker = 'python_version < "2.7" and platform_version == "2"'
+ self._assert_requirement(req, "name", extras=["strange", "quux"], marker=marker)
+
+ def test_multiple_comparison_markers(self):
+ req = Requirement("name; os_name=='a' and os_name=='b' or os_name=='c'")
+ marker = 'os_name == "a" and os_name == "b" or os_name == "c"'
+ self._assert_requirement(req, "name", marker=marker)
+
+ def test_invalid_marker(self):
+ with pytest.raises(InvalidRequirement):
+ Requirement("name; foobar=='x'")
+
+ def test_types(self):
+ req = Requirement("foobar[quux]<2,>=3; os_name=='a'")
+ assert isinstance(req.name, str)
+ assert isinstance(req.extras, set)
+ assert req.url is None
+ assert isinstance(req.specifier, SpecifierSet)
+ assert isinstance(req.marker, Marker)
+
+ def test_types_with_nothing(self):
+ req = Requirement("foobar")
+ assert isinstance(req.name, str)
+ assert isinstance(req.extras, set)
+ assert req.url is None
+ assert isinstance(req.specifier, SpecifierSet)
+ assert req.marker is None
+
+ def test_types_with_url(self):
+ req = Requirement("foobar @ http://foo.com")
+ assert isinstance(req.name, str)
+ assert isinstance(req.extras, set)
+ assert isinstance(req.url, str)
+ assert isinstance(req.specifier, SpecifierSet)
+ assert req.marker is None
+
+ def test_sys_platform_linux_equal(self):
+ req = Requirement('something>=1.2.3; sys_platform == "foo"')
+
+ assert req.name == "something"
+ assert req.marker is not None
+ assert req.marker.evaluate(dict(sys_platform="foo")) is True
+ assert req.marker.evaluate(dict(sys_platform="bar")) is False
+
+ def test_sys_platform_linux_in(self):
+ req = Requirement("aviato>=1.2.3; 'f' in sys_platform")
+
+ assert req.name == "aviato"
+ assert req.marker is not None
+ assert req.marker.evaluate(dict(sys_platform="foo")) is True
+ assert req.marker.evaluate(dict(sys_platform="bar")) is False
+
+ def test_parseexception_error_msg(self):
+ with pytest.raises(InvalidRequirement) as e:
+ Requirement("toto 42")
+ assert "Expected stringEnd" in str(e.value) or (
+ "Expected string_end" in str(e.value) # pyparsing>=3.0.0
+ )
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/test_specifiers.py b/testing/web-platform/tests/tools/third_party/packaging/tests/test_specifiers.py
new file mode 100644
index 0000000000..ca21fa1de0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/test_specifiers.py
@@ -0,0 +1,998 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import itertools
+import operator
+import warnings
+
+import pytest
+
+from packaging.specifiers import (
+ InvalidSpecifier,
+ LegacySpecifier,
+ Specifier,
+ SpecifierSet,
+)
+from packaging.version import LegacyVersion, Version, parse
+
+from .test_version import LEGACY_VERSIONS, VERSIONS
+
+LEGACY_SPECIFIERS = [
+ "==2.1.0.3",
+ "!=2.2.0.5",
+ "<=5",
+ ">=7.9a1",
+ "<1.0.dev1",
+ ">2.0.post1",
+]
+
+SPECIFIERS = [
+ "~=2.0",
+ "==2.1.*",
+ "==2.1.0.3",
+ "!=2.2.*",
+ "!=2.2.0.5",
+ "<=5",
+ ">=7.9a1",
+ "<1.0.dev1",
+ ">2.0.post1",
+ "===lolwat",
+]
+
+
+class TestSpecifier:
+ @pytest.mark.parametrize("specifier", SPECIFIERS)
+ def test_specifiers_valid(self, specifier):
+ Specifier(specifier)
+
+ @pytest.mark.parametrize(
+ "specifier",
+ [
+ # Operator-less specifier
+ "2.0",
+ # Invalid operator
+ "=>2.0",
+ # Version-less specifier
+ "==",
+ # Local segment on operators which don't support them
+ "~=1.0+5",
+ ">=1.0+deadbeef",
+ "<=1.0+abc123",
+ ">1.0+watwat",
+ "<1.0+1.0",
+ # Prefix matching on operators which don't support them
+ "~=1.0.*",
+ ">=1.0.*",
+ "<=1.0.*",
+ ">1.0.*",
+ "<1.0.*",
+ # Combination of local and prefix matching on operators which do
+ # support one or the other
+ "==1.0.*+5",
+ "!=1.0.*+deadbeef",
+ # Prefix matching cannot be used inside of a local version
+ "==1.0+5.*",
+ "!=1.0+deadbeef.*",
+ # Prefix matching must appear at the end
+ "==1.0.*.5",
+ # Compatible operator requires 2 digits in the release operator
+ "~=1",
+ # Cannot use a prefix matching after a .devN version
+ "==1.0.dev1.*",
+ "!=1.0.dev1.*",
+ ],
+ )
+ def test_specifiers_invalid(self, specifier):
+ with pytest.raises(InvalidSpecifier):
+ Specifier(specifier)
+
+ @pytest.mark.parametrize(
+ "version",
+ [
+ # Various development release incarnations
+ "1.0dev",
+ "1.0.dev",
+ "1.0dev1",
+ "1.0dev",
+ "1.0-dev",
+ "1.0-dev1",
+ "1.0DEV",
+ "1.0.DEV",
+ "1.0DEV1",
+ "1.0DEV",
+ "1.0.DEV1",
+ "1.0-DEV",
+ "1.0-DEV1",
+ # Various alpha incarnations
+ "1.0a",
+ "1.0.a",
+ "1.0.a1",
+ "1.0-a",
+ "1.0-a1",
+ "1.0alpha",
+ "1.0.alpha",
+ "1.0.alpha1",
+ "1.0-alpha",
+ "1.0-alpha1",
+ "1.0A",
+ "1.0.A",
+ "1.0.A1",
+ "1.0-A",
+ "1.0-A1",
+ "1.0ALPHA",
+ "1.0.ALPHA",
+ "1.0.ALPHA1",
+ "1.0-ALPHA",
+ "1.0-ALPHA1",
+ # Various beta incarnations
+ "1.0b",
+ "1.0.b",
+ "1.0.b1",
+ "1.0-b",
+ "1.0-b1",
+ "1.0beta",
+ "1.0.beta",
+ "1.0.beta1",
+ "1.0-beta",
+ "1.0-beta1",
+ "1.0B",
+ "1.0.B",
+ "1.0.B1",
+ "1.0-B",
+ "1.0-B1",
+ "1.0BETA",
+ "1.0.BETA",
+ "1.0.BETA1",
+ "1.0-BETA",
+ "1.0-BETA1",
+ # Various release candidate incarnations
+ "1.0c",
+ "1.0.c",
+ "1.0.c1",
+ "1.0-c",
+ "1.0-c1",
+ "1.0rc",
+ "1.0.rc",
+ "1.0.rc1",
+ "1.0-rc",
+ "1.0-rc1",
+ "1.0C",
+ "1.0.C",
+ "1.0.C1",
+ "1.0-C",
+ "1.0-C1",
+ "1.0RC",
+ "1.0.RC",
+ "1.0.RC1",
+ "1.0-RC",
+ "1.0-RC1",
+ # Various post release incarnations
+ "1.0post",
+ "1.0.post",
+ "1.0post1",
+ "1.0post",
+ "1.0-post",
+ "1.0-post1",
+ "1.0POST",
+ "1.0.POST",
+ "1.0POST1",
+ "1.0POST",
+ "1.0.POST1",
+ "1.0-POST",
+ "1.0-POST1",
+ "1.0-5",
+ # Local version case insensitivity
+ "1.0+AbC"
+ # Integer Normalization
+ "1.01",
+ "1.0a05",
+ "1.0b07",
+ "1.0c056",
+ "1.0rc09",
+ "1.0.post000",
+ "1.1.dev09000",
+ "00!1.2",
+ "0100!0.0",
+ # Various other normalizations
+ "v1.0",
+ " \r \f \v v1.0\t\n",
+ ],
+ )
+ def test_specifiers_normalized(self, version):
+ if "+" not in version:
+ ops = ["~=", "==", "!=", "<=", ">=", "<", ">"]
+ else:
+ ops = ["==", "!="]
+
+ for op in ops:
+ Specifier(op + version)
+
+ @pytest.mark.parametrize(
+ ("specifier", "expected"),
+ [
+ # Single item specifiers should just be reflexive
+ ("!=2.0", "!=2.0"),
+ ("<2.0", "<2.0"),
+ ("<=2.0", "<=2.0"),
+ ("==2.0", "==2.0"),
+ (">2.0", ">2.0"),
+ (">=2.0", ">=2.0"),
+ ("~=2.0", "~=2.0"),
+ # Spaces should be removed
+ ("< 2", "<2"),
+ ],
+ )
+ def test_specifiers_str_and_repr(self, specifier, expected):
+ spec = Specifier(specifier)
+
+ assert str(spec) == expected
+ assert repr(spec) == f"<Specifier({expected!r})>"
+
+ @pytest.mark.parametrize("specifier", SPECIFIERS)
+ def test_specifiers_hash(self, specifier):
+ assert hash(Specifier(specifier)) == hash(Specifier(specifier))
+
+ @pytest.mark.parametrize(
+ ("left", "right", "op"),
+ itertools.chain(
+ *
+ # Verify that the equal (==) operator works correctly
+ [[(x, x, operator.eq) for x in SPECIFIERS]]
+ +
+ # Verify that the not equal (!=) operator works correctly
+ [
+ [(x, y, operator.ne) for j, y in enumerate(SPECIFIERS) if i != j]
+ for i, x in enumerate(SPECIFIERS)
+ ]
+ ),
+ )
+ def test_comparison_true(self, left, right, op):
+ assert op(Specifier(left), Specifier(right))
+ assert op(left, Specifier(right))
+ assert op(Specifier(left), right)
+
+ @pytest.mark.parametrize(("left", "right"), [("==2.8.0", "==2.8")])
+ def test_comparison_canonicalizes(self, left, right):
+ assert Specifier(left) == Specifier(right)
+ assert left == Specifier(right)
+ assert Specifier(left) == right
+
+ @pytest.mark.parametrize(
+ ("left", "right", "op"),
+ itertools.chain(
+ *
+ # Verify that the equal (==) operator works correctly
+ [[(x, x, operator.ne) for x in SPECIFIERS]]
+ +
+ # Verify that the not equal (!=) operator works correctly
+ [
+ [(x, y, operator.eq) for j, y in enumerate(SPECIFIERS) if i != j]
+ for i, x in enumerate(SPECIFIERS)
+ ]
+ ),
+ )
+ def test_comparison_false(self, left, right, op):
+ assert not op(Specifier(left), Specifier(right))
+ assert not op(left, Specifier(right))
+ assert not op(Specifier(left), right)
+
+ def test_comparison_non_specifier(self):
+ assert Specifier("==1.0") != 12
+ assert not Specifier("==1.0") == 12
+ assert Specifier("==1.0") != "12"
+ assert not Specifier("==1.0") == "12"
+
+ @pytest.mark.parametrize(
+ ("version", "spec", "expected"),
+ [
+ (v, s, True)
+ for v, s in [
+ # Test the equality operation
+ ("2.0", "==2"),
+ ("2.0", "==2.0"),
+ ("2.0", "==2.0.0"),
+ ("2.0+deadbeef", "==2"),
+ ("2.0+deadbeef", "==2.0"),
+ ("2.0+deadbeef", "==2.0.0"),
+ ("2.0+deadbeef", "==2+deadbeef"),
+ ("2.0+deadbeef", "==2.0+deadbeef"),
+ ("2.0+deadbeef", "==2.0.0+deadbeef"),
+ ("2.0+deadbeef.0", "==2.0.0+deadbeef.00"),
+ # Test the equality operation with a prefix
+ ("2.dev1", "==2.*"),
+ ("2a1", "==2.*"),
+ ("2a1.post1", "==2.*"),
+ ("2b1", "==2.*"),
+ ("2b1.dev1", "==2.*"),
+ ("2c1", "==2.*"),
+ ("2c1.post1.dev1", "==2.*"),
+ ("2rc1", "==2.*"),
+ ("2", "==2.*"),
+ ("2.0", "==2.*"),
+ ("2.0.0", "==2.*"),
+ ("2.0.post1", "==2.0.post1.*"),
+ ("2.0.post1.dev1", "==2.0.post1.*"),
+ ("2.1+local.version", "==2.1.*"),
+ # Test the in-equality operation
+ ("2.1", "!=2"),
+ ("2.1", "!=2.0"),
+ ("2.0.1", "!=2"),
+ ("2.0.1", "!=2.0"),
+ ("2.0.1", "!=2.0.0"),
+ ("2.0", "!=2.0+deadbeef"),
+ # Test the in-equality operation with a prefix
+ ("2.0", "!=3.*"),
+ ("2.1", "!=2.0.*"),
+ # Test the greater than equal operation
+ ("2.0", ">=2"),
+ ("2.0", ">=2.0"),
+ ("2.0", ">=2.0.0"),
+ ("2.0.post1", ">=2"),
+ ("2.0.post1.dev1", ">=2"),
+ ("3", ">=2"),
+ # Test the less than equal operation
+ ("2.0", "<=2"),
+ ("2.0", "<=2.0"),
+ ("2.0", "<=2.0.0"),
+ ("2.0.dev1", "<=2"),
+ ("2.0a1", "<=2"),
+ ("2.0a1.dev1", "<=2"),
+ ("2.0b1", "<=2"),
+ ("2.0b1.post1", "<=2"),
+ ("2.0c1", "<=2"),
+ ("2.0c1.post1.dev1", "<=2"),
+ ("2.0rc1", "<=2"),
+ ("1", "<=2"),
+ # Test the greater than operation
+ ("3", ">2"),
+ ("2.1", ">2.0"),
+ ("2.0.1", ">2"),
+ ("2.1.post1", ">2"),
+ ("2.1+local.version", ">2"),
+ # Test the less than operation
+ ("1", "<2"),
+ ("2.0", "<2.1"),
+ ("2.0.dev0", "<2.1"),
+ # Test the compatibility operation
+ ("1", "~=1.0"),
+ ("1.0.1", "~=1.0"),
+ ("1.1", "~=1.0"),
+ ("1.9999999", "~=1.0"),
+ ("1.1", "~=1.0a1"),
+ # Test that epochs are handled sanely
+ ("2!1.0", "~=2!1.0"),
+ ("2!1.0", "==2!1.*"),
+ ("2!1.0", "==2!1.0"),
+ ("2!1.0", "!=1.0"),
+ ("1.0", "!=2!1.0"),
+ ("1.0", "<=2!0.1"),
+ ("2!1.0", ">=2.0"),
+ ("1.0", "<2!0.1"),
+ ("2!1.0", ">2.0"),
+ # Test some normalization rules
+ ("2.0.5", ">2.0dev"),
+ ]
+ ]
+ + [
+ (v, s, False)
+ for v, s in [
+ # Test the equality operation
+ ("2.1", "==2"),
+ ("2.1", "==2.0"),
+ ("2.1", "==2.0.0"),
+ ("2.0", "==2.0+deadbeef"),
+ # Test the equality operation with a prefix
+ ("2.0", "==3.*"),
+ ("2.1", "==2.0.*"),
+ # Test the in-equality operation
+ ("2.0", "!=2"),
+ ("2.0", "!=2.0"),
+ ("2.0", "!=2.0.0"),
+ ("2.0+deadbeef", "!=2"),
+ ("2.0+deadbeef", "!=2.0"),
+ ("2.0+deadbeef", "!=2.0.0"),
+ ("2.0+deadbeef", "!=2+deadbeef"),
+ ("2.0+deadbeef", "!=2.0+deadbeef"),
+ ("2.0+deadbeef", "!=2.0.0+deadbeef"),
+ ("2.0+deadbeef.0", "!=2.0.0+deadbeef.00"),
+ # Test the in-equality operation with a prefix
+ ("2.dev1", "!=2.*"),
+ ("2a1", "!=2.*"),
+ ("2a1.post1", "!=2.*"),
+ ("2b1", "!=2.*"),
+ ("2b1.dev1", "!=2.*"),
+ ("2c1", "!=2.*"),
+ ("2c1.post1.dev1", "!=2.*"),
+ ("2rc1", "!=2.*"),
+ ("2", "!=2.*"),
+ ("2.0", "!=2.*"),
+ ("2.0.0", "!=2.*"),
+ ("2.0.post1", "!=2.0.post1.*"),
+ ("2.0.post1.dev1", "!=2.0.post1.*"),
+ # Test the greater than equal operation
+ ("2.0.dev1", ">=2"),
+ ("2.0a1", ">=2"),
+ ("2.0a1.dev1", ">=2"),
+ ("2.0b1", ">=2"),
+ ("2.0b1.post1", ">=2"),
+ ("2.0c1", ">=2"),
+ ("2.0c1.post1.dev1", ">=2"),
+ ("2.0rc1", ">=2"),
+ ("1", ">=2"),
+ # Test the less than equal operation
+ ("2.0.post1", "<=2"),
+ ("2.0.post1.dev1", "<=2"),
+ ("3", "<=2"),
+ # Test the greater than operation
+ ("1", ">2"),
+ ("2.0.dev1", ">2"),
+ ("2.0a1", ">2"),
+ ("2.0a1.post1", ">2"),
+ ("2.0b1", ">2"),
+ ("2.0b1.dev1", ">2"),
+ ("2.0c1", ">2"),
+ ("2.0c1.post1.dev1", ">2"),
+ ("2.0rc1", ">2"),
+ ("2.0", ">2"),
+ ("2.0.post1", ">2"),
+ ("2.0.post1.dev1", ">2"),
+ ("2.0+local.version", ">2"),
+ # Test the less than operation
+ ("2.0.dev1", "<2"),
+ ("2.0a1", "<2"),
+ ("2.0a1.post1", "<2"),
+ ("2.0b1", "<2"),
+ ("2.0b2.dev1", "<2"),
+ ("2.0c1", "<2"),
+ ("2.0c1.post1.dev1", "<2"),
+ ("2.0rc1", "<2"),
+ ("2.0", "<2"),
+ ("2.post1", "<2"),
+ ("2.post1.dev1", "<2"),
+ ("3", "<2"),
+ # Test the compatibility operation
+ ("2.0", "~=1.0"),
+ ("1.1.0", "~=1.0.0"),
+ ("1.1.post1", "~=1.0.0"),
+ # Test that epochs are handled sanely
+ ("1.0", "~=2!1.0"),
+ ("2!1.0", "~=1.0"),
+ ("2!1.0", "==1.0"),
+ ("1.0", "==2!1.0"),
+ ("2!1.0", "==1.*"),
+ ("1.0", "==2!1.*"),
+ ("2!1.0", "!=2!1.0"),
+ ]
+ ],
+ )
+ def test_specifiers(self, version, spec, expected):
+ spec = Specifier(spec, prereleases=True)
+
+ if expected:
+ # Test that the plain string form works
+ assert version in spec
+ assert spec.contains(version)
+
+ # Test that the version instance form works
+ assert Version(version) in spec
+ assert spec.contains(Version(version))
+ else:
+ # Test that the plain string form works
+ assert version not in spec
+ assert not spec.contains(version)
+
+ # Test that the version instance form works
+ assert Version(version) not in spec
+ assert not spec.contains(Version(version))
+
+ @pytest.mark.parametrize(
+ ("version", "spec", "expected"),
+ [
+ # Test identity comparison by itself
+ ("lolwat", "===lolwat", True),
+ ("Lolwat", "===lolwat", True),
+ ("1.0", "===1.0", True),
+ ("nope", "===lolwat", False),
+ ("1.0.0", "===1.0", False),
+ ("1.0.dev0", "===1.0.dev0", True),
+ ],
+ )
+ def test_specifiers_identity(self, version, spec, expected):
+ spec = Specifier(spec)
+
+ if expected:
+ # Identity comparisons only support the plain string form
+ assert version in spec
+ else:
+ # Identity comparisons only support the plain string form
+ assert version not in spec
+
+ @pytest.mark.parametrize(
+ ("specifier", "expected"),
+ [
+ ("==1.0", False),
+ (">=1.0", False),
+ ("<=1.0", False),
+ ("~=1.0", False),
+ ("<1.0", False),
+ (">1.0", False),
+ ("<1.0.dev1", False),
+ (">1.0.dev1", False),
+ ("==1.0.*", False),
+ ("==1.0.dev1", True),
+ (">=1.0.dev1", True),
+ ("<=1.0.dev1", True),
+ ("~=1.0.dev1", True),
+ ],
+ )
+ def test_specifier_prereleases_detection(self, specifier, expected):
+ assert Specifier(specifier).prereleases == expected
+
+ @pytest.mark.parametrize(
+ ("specifier", "version", "expected"),
+ [
+ (">=1.0", "2.0.dev1", False),
+ (">=2.0.dev1", "2.0a1", True),
+ ("==2.0.*", "2.0a1.dev1", False),
+ ("==2.0a1.*", "2.0a1.dev1", True),
+ ("<=2.0", "1.0.dev1", False),
+ ("<=2.0.dev1", "1.0a1", True),
+ ],
+ )
+ def test_specifiers_prereleases(self, specifier, version, expected):
+ spec = Specifier(specifier)
+
+ if expected:
+ assert version in spec
+ spec.prereleases = False
+ assert version not in spec
+ else:
+ assert version not in spec
+ spec.prereleases = True
+ assert version in spec
+
+ @pytest.mark.parametrize(
+ ("specifier", "prereleases", "input", "expected"),
+ [
+ (">=1.0", None, ["2.0a1"], ["2.0a1"]),
+ (">=1.0.dev1", None, ["1.0", "2.0a1"], ["1.0", "2.0a1"]),
+ (">=1.0.dev1", False, ["1.0", "2.0a1"], ["1.0"]),
+ ],
+ )
+ def test_specifier_filter(self, specifier, prereleases, input, expected):
+ spec = Specifier(specifier)
+
+ kwargs = {"prereleases": prereleases} if prereleases is not None else {}
+
+ assert list(spec.filter(input, **kwargs)) == expected
+
+ @pytest.mark.xfail
+ def test_specifier_explicit_legacy(self):
+ assert Specifier("==1.0").contains(LegacyVersion("1.0"))
+
+ @pytest.mark.parametrize(
+ ("spec", "op"),
+ [
+ ("~=2.0", "~="),
+ ("==2.1.*", "=="),
+ ("==2.1.0.3", "=="),
+ ("!=2.2.*", "!="),
+ ("!=2.2.0.5", "!="),
+ ("<=5", "<="),
+ (">=7.9a1", ">="),
+ ("<1.0.dev1", "<"),
+ (">2.0.post1", ">"),
+ ("===lolwat", "==="),
+ ],
+ )
+ def test_specifier_operator_property(self, spec, op):
+ assert Specifier(spec).operator == op
+
+ @pytest.mark.parametrize(
+ ("spec", "version"),
+ [
+ ("~=2.0", "2.0"),
+ ("==2.1.*", "2.1.*"),
+ ("==2.1.0.3", "2.1.0.3"),
+ ("!=2.2.*", "2.2.*"),
+ ("!=2.2.0.5", "2.2.0.5"),
+ ("<=5", "5"),
+ (">=7.9a1", "7.9a1"),
+ ("<1.0.dev1", "1.0.dev1"),
+ (">2.0.post1", "2.0.post1"),
+ ("===lolwat", "lolwat"),
+ ],
+ )
+ def test_specifier_version_property(self, spec, version):
+ assert Specifier(spec).version == version
+
+ @pytest.mark.parametrize(
+ ("spec", "expected_length"),
+ [("", 0), ("==2.0", 1), (">=2.0", 1), (">=2.0,<3", 2), (">=2.0,<3,==2.4", 3)],
+ )
+ def test_length(self, spec, expected_length):
+ spec = SpecifierSet(spec)
+ assert len(spec) == expected_length
+
+ @pytest.mark.parametrize(
+ ("spec", "expected_items"),
+ [
+ ("", []),
+ ("==2.0", ["==2.0"]),
+ (">=2.0", [">=2.0"]),
+ (">=2.0,<3", [">=2.0", "<3"]),
+ (">=2.0,<3,==2.4", [">=2.0", "<3", "==2.4"]),
+ ],
+ )
+ def test_iteration(self, spec, expected_items):
+ spec = SpecifierSet(spec)
+ items = {str(item) for item in spec}
+ assert items == set(expected_items)
+
+
+class TestLegacySpecifier:
+ def test_legacy_specifier_is_deprecated(self):
+ with warnings.catch_warnings(record=True) as w:
+ LegacySpecifier(">=some-legacy-version")
+ assert len(w) == 1
+ assert issubclass(w[0].category, DeprecationWarning)
+
+ @pytest.mark.parametrize(
+ ("version", "spec", "expected"),
+ [
+ (v, s, True)
+ for v, s in [
+ # Test the equality operation
+ ("2.0", "==2"),
+ ("2.0", "==2.0"),
+ ("2.0", "==2.0.0"),
+ # Test the in-equality operation
+ ("2.1", "!=2"),
+ ("2.1", "!=2.0"),
+ ("2.0.1", "!=2"),
+ ("2.0.1", "!=2.0"),
+ ("2.0.1", "!=2.0.0"),
+ # Test the greater than equal operation
+ ("2.0", ">=2"),
+ ("2.0", ">=2.0"),
+ ("2.0", ">=2.0.0"),
+ ("2.0.post1", ">=2"),
+ ("2.0.post1.dev1", ">=2"),
+ ("3", ">=2"),
+ # Test the less than equal operation
+ ("2.0", "<=2"),
+ ("2.0", "<=2.0"),
+ ("2.0", "<=2.0.0"),
+ ("2.0.dev1", "<=2"),
+ ("2.0a1", "<=2"),
+ ("2.0a1.dev1", "<=2"),
+ ("2.0b1", "<=2"),
+ ("2.0b1.post1", "<=2"),
+ ("2.0c1", "<=2"),
+ ("2.0c1.post1.dev1", "<=2"),
+ ("2.0rc1", "<=2"),
+ ("1", "<=2"),
+ # Test the greater than operation
+ ("3", ">2"),
+ ("2.1", ">2.0"),
+ # Test the less than operation
+ ("1", "<2"),
+ ("2.0", "<2.1"),
+ ]
+ ]
+ + [
+ (v, s, False)
+ for v, s in [
+ # Test the equality operation
+ ("2.1", "==2"),
+ ("2.1", "==2.0"),
+ ("2.1", "==2.0.0"),
+ # Test the in-equality operation
+ ("2.0", "!=2"),
+ ("2.0", "!=2.0"),
+ ("2.0", "!=2.0.0"),
+ # Test the greater than equal operation
+ ("2.0.dev1", ">=2"),
+ ("2.0a1", ">=2"),
+ ("2.0a1.dev1", ">=2"),
+ ("2.0b1", ">=2"),
+ ("2.0b1.post1", ">=2"),
+ ("2.0c1", ">=2"),
+ ("2.0c1.post1.dev1", ">=2"),
+ ("2.0rc1", ">=2"),
+ ("1", ">=2"),
+ # Test the less than equal operation
+ ("2.0.post1", "<=2"),
+ ("2.0.post1.dev1", "<=2"),
+ ("3", "<=2"),
+ # Test the greater than operation
+ ("1", ">2"),
+ ("2.0.dev1", ">2"),
+ ("2.0a1", ">2"),
+ ("2.0a1.post1", ">2"),
+ ("2.0b1", ">2"),
+ ("2.0b1.dev1", ">2"),
+ ("2.0c1", ">2"),
+ ("2.0c1.post1.dev1", ">2"),
+ ("2.0rc1", ">2"),
+ ("2.0", ">2"),
+ # Test the less than operation
+ ("3", "<2"),
+ ]
+ ],
+ )
+ def test_specifiers(self, version, spec, expected):
+ spec = LegacySpecifier(spec, prereleases=True)
+
+ if expected:
+ # Test that the plain string form works
+ assert version in spec
+ assert spec.contains(version)
+
+ # Test that the version instance form works
+ assert LegacyVersion(version) in spec
+ assert spec.contains(LegacyVersion(version))
+ else:
+ # Test that the plain string form works
+ assert version not in spec
+ assert not spec.contains(version)
+
+ # Test that the version instance form works
+ assert LegacyVersion(version) not in spec
+ assert not spec.contains(LegacyVersion(version))
+
+ def test_specifier_explicit_prereleases(self):
+ spec = LegacySpecifier(">=1.0")
+ assert not spec.prereleases
+ spec.prereleases = True
+ assert spec.prereleases
+
+ spec = LegacySpecifier(">=1.0", prereleases=False)
+ assert not spec.prereleases
+ spec.prereleases = True
+ assert spec.prereleases
+
+ spec = LegacySpecifier(">=1.0", prereleases=True)
+ assert spec.prereleases
+ spec.prereleases = False
+ assert not spec.prereleases
+
+ spec = LegacySpecifier(">=1.0", prereleases=True)
+ assert spec.prereleases
+ spec.prereleases = None
+ assert not spec.prereleases
+
+
+class TestSpecifierSet:
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_empty_specifier(self, version):
+ spec = SpecifierSet(prereleases=True)
+
+ assert version in spec
+ assert spec.contains(version)
+ assert parse(version) in spec
+ assert spec.contains(parse(version))
+
+ def test_specifier_prereleases_explicit(self):
+ spec = SpecifierSet()
+ assert not spec.prereleases
+ assert "1.0.dev1" not in spec
+ assert not spec.contains("1.0.dev1")
+ spec.prereleases = True
+ assert spec.prereleases
+ assert "1.0.dev1" in spec
+ assert spec.contains("1.0.dev1")
+
+ spec = SpecifierSet(prereleases=True)
+ assert spec.prereleases
+ assert "1.0.dev1" in spec
+ assert spec.contains("1.0.dev1")
+ spec.prereleases = False
+ assert not spec.prereleases
+ assert "1.0.dev1" not in spec
+ assert not spec.contains("1.0.dev1")
+
+ spec = SpecifierSet(prereleases=True)
+ assert spec.prereleases
+ assert "1.0.dev1" in spec
+ assert spec.contains("1.0.dev1")
+ spec.prereleases = None
+ assert not spec.prereleases
+ assert "1.0.dev1" not in spec
+ assert not spec.contains("1.0.dev1")
+
+ def test_specifier_contains_prereleases(self):
+ spec = SpecifierSet()
+ assert spec.prereleases is None
+ assert not spec.contains("1.0.dev1")
+ assert spec.contains("1.0.dev1", prereleases=True)
+
+ spec = SpecifierSet(prereleases=True)
+ assert spec.prereleases
+ assert spec.contains("1.0.dev1")
+ assert not spec.contains("1.0.dev1", prereleases=False)
+
+ @pytest.mark.parametrize(
+ ("specifier", "specifier_prereleases", "prereleases", "input", "expected"),
+ [
+ # General test of the filter method
+ ("", None, None, ["1.0", "2.0a1"], ["1.0"]),
+ (">=1.0.dev1", None, None, ["1.0", "2.0a1"], ["1.0", "2.0a1"]),
+ ("", None, None, ["1.0a1"], ["1.0a1"]),
+ ("", None, None, ["1.0", Version("2.0")], ["1.0", Version("2.0")]),
+ ("", None, None, ["2.0dog", "1.0"], ["1.0"]),
+ # Test overriding with the prereleases parameter on filter
+ ("", None, False, ["1.0a1"], []),
+ (">=1.0.dev1", None, False, ["1.0", "2.0a1"], ["1.0"]),
+ ("", None, True, ["1.0", "2.0a1"], ["1.0", "2.0a1"]),
+ # Test overriding with the overall specifier
+ ("", True, None, ["1.0", "2.0a1"], ["1.0", "2.0a1"]),
+ ("", False, None, ["1.0", "2.0a1"], ["1.0"]),
+ (">=1.0.dev1", True, None, ["1.0", "2.0a1"], ["1.0", "2.0a1"]),
+ (">=1.0.dev1", False, None, ["1.0", "2.0a1"], ["1.0"]),
+ ("", True, None, ["1.0a1"], ["1.0a1"]),
+ ("", False, None, ["1.0a1"], []),
+ ],
+ )
+ def test_specifier_filter(
+ self, specifier_prereleases, specifier, prereleases, input, expected
+ ):
+ if specifier_prereleases is None:
+ spec = SpecifierSet(specifier)
+ else:
+ spec = SpecifierSet(specifier, prereleases=specifier_prereleases)
+
+ kwargs = {"prereleases": prereleases} if prereleases is not None else {}
+
+ assert list(spec.filter(input, **kwargs)) == expected
+
+ def test_legacy_specifiers_combined(self):
+ spec = SpecifierSet("<3,>1-1-1")
+ assert "2.0" in spec
+
+ @pytest.mark.parametrize(
+ ("specifier", "expected"),
+ [
+ # Single item specifiers should just be reflexive
+ ("!=2.0", "!=2.0"),
+ ("<2.0", "<2.0"),
+ ("<=2.0", "<=2.0"),
+ ("==2.0", "==2.0"),
+ (">2.0", ">2.0"),
+ (">=2.0", ">=2.0"),
+ ("~=2.0", "~=2.0"),
+ # Spaces should be removed
+ ("< 2", "<2"),
+ # Multiple item specifiers should work
+ ("!=2.0,>1.0", "!=2.0,>1.0"),
+ ("!=2.0 ,>1.0", "!=2.0,>1.0"),
+ ],
+ )
+ def test_specifiers_str_and_repr(self, specifier, expected):
+ spec = SpecifierSet(specifier)
+
+ assert str(spec) == expected
+ assert repr(spec) == f"<SpecifierSet({expected!r})>"
+
+ @pytest.mark.parametrize("specifier", SPECIFIERS + LEGACY_SPECIFIERS)
+ def test_specifiers_hash(self, specifier):
+ assert hash(SpecifierSet(specifier)) == hash(SpecifierSet(specifier))
+
+ @pytest.mark.parametrize(
+ ("left", "right", "expected"), [(">2.0", "<5.0", ">2.0,<5.0")]
+ )
+ def test_specifiers_combine(self, left, right, expected):
+ result = SpecifierSet(left) & SpecifierSet(right)
+ assert result == SpecifierSet(expected)
+
+ result = SpecifierSet(left) & right
+ assert result == SpecifierSet(expected)
+
+ result = SpecifierSet(left, prereleases=True) & SpecifierSet(right)
+ assert result == SpecifierSet(expected)
+ assert result.prereleases
+
+ result = SpecifierSet(left, prereleases=False) & SpecifierSet(right)
+ assert result == SpecifierSet(expected)
+ assert not result.prereleases
+
+ result = SpecifierSet(left) & SpecifierSet(right, prereleases=True)
+ assert result == SpecifierSet(expected)
+ assert result.prereleases
+
+ result = SpecifierSet(left) & SpecifierSet(right, prereleases=False)
+ assert result == SpecifierSet(expected)
+ assert not result.prereleases
+
+ result = SpecifierSet(left, prereleases=True) & SpecifierSet(
+ right, prereleases=True
+ )
+ assert result == SpecifierSet(expected)
+ assert result.prereleases
+
+ result = SpecifierSet(left, prereleases=False) & SpecifierSet(
+ right, prereleases=False
+ )
+ assert result == SpecifierSet(expected)
+ assert not result.prereleases
+
+ with pytest.raises(ValueError):
+ result = SpecifierSet(left, prereleases=True) & SpecifierSet(
+ right, prereleases=False
+ )
+
+ with pytest.raises(ValueError):
+ result = SpecifierSet(left, prereleases=False) & SpecifierSet(
+ right, prereleases=True
+ )
+
+ def test_specifiers_combine_not_implemented(self):
+ with pytest.raises(TypeError):
+ SpecifierSet() & 12
+
+ @pytest.mark.parametrize(
+ ("left", "right", "op"),
+ itertools.chain(
+ *
+ # Verify that the equal (==) operator works correctly
+ [[(x, x, operator.eq) for x in SPECIFIERS]]
+ +
+ # Verify that the not equal (!=) operator works correctly
+ [
+ [(x, y, operator.ne) for j, y in enumerate(SPECIFIERS) if i != j]
+ for i, x in enumerate(SPECIFIERS)
+ ]
+ ),
+ )
+ def test_comparison_true(self, left, right, op):
+ assert op(SpecifierSet(left), SpecifierSet(right))
+ assert op(SpecifierSet(left), Specifier(right))
+ assert op(Specifier(left), SpecifierSet(right))
+ assert op(left, SpecifierSet(right))
+ assert op(SpecifierSet(left), right)
+
+ @pytest.mark.parametrize(
+ ("left", "right", "op"),
+ itertools.chain(
+ *
+ # Verify that the equal (==) operator works correctly
+ [[(x, x, operator.ne) for x in SPECIFIERS]]
+ +
+ # Verify that the not equal (!=) operator works correctly
+ [
+ [(x, y, operator.eq) for j, y in enumerate(SPECIFIERS) if i != j]
+ for i, x in enumerate(SPECIFIERS)
+ ]
+ ),
+ )
+ def test_comparison_false(self, left, right, op):
+ assert not op(SpecifierSet(left), SpecifierSet(right))
+ assert not op(SpecifierSet(left), Specifier(right))
+ assert not op(Specifier(left), SpecifierSet(right))
+ assert not op(left, SpecifierSet(right))
+ assert not op(SpecifierSet(left), right)
+
+ @pytest.mark.parametrize(("left", "right"), [("==2.8.0", "==2.8")])
+ def test_comparison_canonicalizes(self, left, right):
+ assert SpecifierSet(left) == SpecifierSet(right)
+ assert left == SpecifierSet(right)
+ assert SpecifierSet(left) == right
+
+ def test_comparison_non_specifier(self):
+ assert SpecifierSet("==1.0") != 12
+ assert not SpecifierSet("==1.0") == 12
+
+ @pytest.mark.parametrize(
+ ("version", "specifier", "expected"),
+ [
+ ("1.0.0+local", "==1.0.0", True),
+ ("1.0.0+local", "!=1.0.0", False),
+ ("1.0.0+local", "<=1.0.0", True),
+ ("1.0.0+local", ">=1.0.0", True),
+ ("1.0.0+local", "<1.0.0", False),
+ ("1.0.0+local", ">1.0.0", False),
+ ],
+ )
+ def test_comparison_ignores_local(self, version, specifier, expected):
+ assert (Version(version) in SpecifierSet(specifier)) == expected
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/test_structures.py b/testing/web-platform/tests/tools/third_party/packaging/tests/test_structures.py
new file mode 100644
index 0000000000..f8115e5742
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/test_structures.py
@@ -0,0 +1,59 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import pytest
+
+from packaging._structures import Infinity, NegativeInfinity
+
+
+def test_infinity_repr():
+ repr(Infinity) == "Infinity"
+
+
+def test_negative_infinity_repr():
+ repr(NegativeInfinity) == "-Infinity"
+
+
+def test_infinity_hash():
+ assert hash(Infinity) == hash(Infinity)
+
+
+def test_negative_infinity_hash():
+ assert hash(NegativeInfinity) == hash(NegativeInfinity)
+
+
+@pytest.mark.parametrize("left", [1, "a", ("b", 4)])
+def test_infinity_comparison(left):
+ assert left < Infinity
+ assert left <= Infinity
+ assert not left == Infinity
+ assert left != Infinity
+ assert not left > Infinity
+ assert not left >= Infinity
+
+
+@pytest.mark.parametrize("left", [1, "a", ("b", 4)])
+def test_negative_infinity_lesser(left):
+ assert not left < NegativeInfinity
+ assert not left <= NegativeInfinity
+ assert not left == NegativeInfinity
+ assert left != NegativeInfinity
+ assert left > NegativeInfinity
+ assert left >= NegativeInfinity
+
+
+def test_infinity_equal():
+ assert Infinity == Infinity
+
+
+def test_negative_infinity_equal():
+ assert NegativeInfinity == NegativeInfinity
+
+
+def test_negate_infinity():
+ assert isinstance(-Infinity, NegativeInfinity.__class__)
+
+
+def test_negate_negative_infinity():
+ assert isinstance(-NegativeInfinity, Infinity.__class__)
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/test_tags.py b/testing/web-platform/tests/tools/third_party/packaging/tests/test_tags.py
new file mode 100644
index 0000000000..446dee4ef7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/test_tags.py
@@ -0,0 +1,1191 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import collections.abc
+
+try:
+ import ctypes
+except ImportError:
+ ctypes = None
+import os
+import pathlib
+import platform
+import sys
+import sysconfig
+import types
+
+import pretend
+import pytest
+
+from packaging import tags
+from packaging._musllinux import _MuslVersion
+
+
+@pytest.fixture
+def example_tag():
+ return tags.Tag("py3", "none", "any")
+
+
+@pytest.fixture
+def manylinux_module(monkeypatch):
+ monkeypatch.setattr(tags._manylinux, "_get_glibc_version", lambda *args: (2, 20))
+ module_name = "_manylinux"
+ module = types.ModuleType(module_name)
+ monkeypatch.setitem(sys.modules, module_name, module)
+ return module
+
+
+@pytest.fixture
+def mock_interpreter_name(monkeypatch):
+ def mock(name):
+ name = name.lower()
+ if sys.implementation.name != name:
+ monkeypatch.setattr(sys.implementation, "name", name)
+ return True
+ return False
+
+ return mock
+
+
+class TestTag:
+ def test_lowercasing(self):
+ tag = tags.Tag("PY3", "None", "ANY")
+ assert tag.interpreter == "py3"
+ assert tag.abi == "none"
+ assert tag.platform == "any"
+
+ def test_equality(self):
+ args = "py3", "none", "any"
+ assert tags.Tag(*args) == tags.Tag(*args)
+
+ def test_equality_fails_with_non_tag(self):
+ assert not tags.Tag("py3", "none", "any") == "non-tag"
+
+ def test_hashing(self, example_tag):
+ tags = {example_tag} # Should not raise TypeError.
+ assert example_tag in tags
+
+ def test_hash_equality(self, example_tag):
+ equal_tag = tags.Tag("py3", "none", "any")
+ assert example_tag == equal_tag # Sanity check.
+ assert example_tag.__hash__() == equal_tag.__hash__()
+
+ def test_str(self, example_tag):
+ assert str(example_tag) == "py3-none-any"
+
+ def test_repr(self, example_tag):
+ assert repr(example_tag) == "<py3-none-any @ {tag_id}>".format(
+ tag_id=id(example_tag)
+ )
+
+ def test_attribute_access(self, example_tag):
+ assert example_tag.interpreter == "py3"
+ assert example_tag.abi == "none"
+ assert example_tag.platform == "any"
+
+
+class TestParseTag:
+ def test_simple(self, example_tag):
+ parsed_tags = tags.parse_tag(str(example_tag))
+ assert parsed_tags == {example_tag}
+
+ def test_multi_interpreter(self, example_tag):
+ expected = {example_tag, tags.Tag("py2", "none", "any")}
+ given = tags.parse_tag("py2.py3-none-any")
+ assert given == expected
+
+ def test_multi_platform(self):
+ expected = {
+ tags.Tag("cp37", "cp37m", platform)
+ for platform in (
+ "macosx_10_6_intel",
+ "macosx_10_9_intel",
+ "macosx_10_9_x86_64",
+ "macosx_10_10_intel",
+ "macosx_10_10_x86_64",
+ )
+ }
+ given = tags.parse_tag(
+ "cp37-cp37m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64."
+ "macosx_10_10_intel.macosx_10_10_x86_64"
+ )
+ assert given == expected
+
+
+class TestInterpreterName:
+ def test_sys_implementation_name(self, monkeypatch):
+ class MockImplementation:
+ pass
+
+ mock_implementation = MockImplementation()
+ mock_implementation.name = "sillywalk"
+ monkeypatch.setattr(sys, "implementation", mock_implementation, raising=False)
+ assert tags.interpreter_name() == "sillywalk"
+
+ def test_interpreter_short_names(self, mock_interpreter_name, monkeypatch):
+ mock_interpreter_name("cpython")
+ assert tags.interpreter_name() == "cp"
+
+
+class TestInterpreterVersion:
+ def test_warn(self, monkeypatch):
+ class MockConfigVar:
+ def __init__(self, return_):
+ self.warn = None
+ self._return = return_
+
+ def __call__(self, name, warn):
+ self.warn = warn
+ return self._return
+
+ mock_config_var = MockConfigVar("38")
+ monkeypatch.setattr(tags, "_get_config_var", mock_config_var)
+ tags.interpreter_version(warn=True)
+ assert mock_config_var.warn
+
+ def test_python_version_nodot(self, monkeypatch):
+ monkeypatch.setattr(tags, "_get_config_var", lambda var, warn: "NN")
+ assert tags.interpreter_version() == "NN"
+
+ @pytest.mark.parametrize(
+ "version_info,version_str",
+ [
+ ((1, 2, 3), "12"),
+ ((1, 12, 3), "112"),
+ ((11, 2, 3), "112"),
+ ((11, 12, 3), "1112"),
+ ((1, 2, 13), "12"),
+ ],
+ )
+ def test_sys_version_info(self, version_info, version_str, monkeypatch):
+ monkeypatch.setattr(tags, "_get_config_var", lambda *args, **kwargs: None)
+ monkeypatch.setattr(sys, "version_info", version_info)
+ assert tags.interpreter_version() == version_str
+
+
+class TestMacOSPlatforms:
+ @pytest.mark.parametrize(
+ "arch, is_32bit, expected",
+ [
+ ("i386", True, "i386"),
+ ("ppc", True, "ppc"),
+ ("x86_64", False, "x86_64"),
+ ("x86_64", True, "i386"),
+ ("ppc64", False, "ppc64"),
+ ("ppc64", True, "ppc"),
+ ],
+ )
+ def test_architectures(self, arch, is_32bit, expected):
+ assert tags._mac_arch(arch, is_32bit=is_32bit) == expected
+
+ @pytest.mark.parametrize(
+ "version,arch,expected",
+ [
+ (
+ (10, 15),
+ "x86_64",
+ ["x86_64", "intel", "fat64", "fat32", "universal2", "universal"],
+ ),
+ (
+ (10, 4),
+ "x86_64",
+ ["x86_64", "intel", "fat64", "fat32", "universal2", "universal"],
+ ),
+ ((10, 3), "x86_64", []),
+ ((10, 15), "i386", ["i386", "intel", "fat32", "fat", "universal"]),
+ ((10, 4), "i386", ["i386", "intel", "fat32", "fat", "universal"]),
+ ((10, 3), "intel", ["intel", "universal"]),
+ ((10, 5), "intel", ["intel", "universal"]),
+ ((10, 15), "intel", ["intel", "universal"]),
+ ((10, 3), "i386", []),
+ ((10, 15), "ppc64", []),
+ ((10, 6), "ppc64", []),
+ ((10, 5), "ppc64", ["ppc64", "fat64", "universal"]),
+ ((10, 3), "ppc64", []),
+ ((10, 15), "ppc", []),
+ ((10, 7), "ppc", []),
+ ((10, 6), "ppc", ["ppc", "fat32", "fat", "universal"]),
+ ((10, 0), "ppc", ["ppc", "fat32", "fat", "universal"]),
+ ((11, 0), "riscv", ["riscv"]),
+ (
+ (11, 0),
+ "x86_64",
+ ["x86_64", "intel", "fat64", "fat32", "universal2", "universal"],
+ ),
+ ((11, 0), "arm64", ["arm64", "universal2"]),
+ ((11, 1), "arm64", ["arm64", "universal2"]),
+ ((12, 0), "arm64", ["arm64", "universal2"]),
+ ],
+ )
+ def test_binary_formats(self, version, arch, expected):
+ assert tags._mac_binary_formats(version, arch) == expected
+
+ def test_version_detection(self, monkeypatch):
+ if platform.system() != "Darwin":
+ monkeypatch.setattr(
+ platform, "mac_ver", lambda: ("10.14", ("", "", ""), "x86_64")
+ )
+ version = platform.mac_ver()[0].split(".")
+ major = version[0]
+ minor = version[1] if major == "10" else "0"
+ expected = f"macosx_{major}_{minor}"
+
+ platforms = list(tags.mac_platforms(arch="x86_64"))
+ print(platforms, expected)
+ assert platforms[0].startswith(expected)
+
+ @pytest.mark.parametrize("arch", ["x86_64", "i386"])
+ def test_arch_detection(self, arch, monkeypatch):
+ if platform.system() != "Darwin" or platform.mac_ver()[2] != arch:
+ monkeypatch.setattr(
+ platform, "mac_ver", lambda: ("10.14", ("", "", ""), arch)
+ )
+ monkeypatch.setattr(tags, "_mac_arch", lambda *args: arch)
+ assert next(tags.mac_platforms((10, 14))).endswith(arch)
+
+ def test_mac_platforms(self):
+ platforms = list(tags.mac_platforms((10, 5), "x86_64"))
+ assert platforms == [
+ "macosx_10_5_x86_64",
+ "macosx_10_5_intel",
+ "macosx_10_5_fat64",
+ "macosx_10_5_fat32",
+ "macosx_10_5_universal2",
+ "macosx_10_5_universal",
+ "macosx_10_4_x86_64",
+ "macosx_10_4_intel",
+ "macosx_10_4_fat64",
+ "macosx_10_4_fat32",
+ "macosx_10_4_universal2",
+ "macosx_10_4_universal",
+ ]
+
+ assert len(list(tags.mac_platforms((10, 17), "x86_64"))) == 14 * 6
+
+ assert not list(tags.mac_platforms((10, 0), "x86_64"))
+
+ @pytest.mark.parametrize("major,minor", [(11, 0), (11, 3), (12, 0), (12, 3)])
+ def test_macos_11(self, major, minor):
+ platforms = list(tags.mac_platforms((major, minor), "x86_64"))
+ assert "macosx_11_0_arm64" not in platforms
+ assert "macosx_11_0_x86_64" in platforms
+ assert "macosx_11_3_x86_64" not in platforms
+ assert "macosx_11_0_universal" in platforms
+ assert "macosx_11_0_universal2" in platforms
+ # Mac OS "10.16" is the version number that binaries compiled against an old
+ # (pre 11.0) SDK will see. It can also be enabled explicitly for a process
+ # with the environment variable SYSTEM_VERSION_COMPAT=1.
+ assert "macosx_10_16_x86_64" in platforms
+ assert "macosx_10_15_x86_64" in platforms
+ assert "macosx_10_15_universal2" in platforms
+ assert "macosx_10_4_x86_64" in platforms
+ assert "macosx_10_3_x86_64" not in platforms
+ if major >= 12:
+ assert "macosx_12_0_x86_64" in platforms
+ assert "macosx_12_0_universal" in platforms
+ assert "macosx_12_0_universal2" in platforms
+
+ platforms = list(tags.mac_platforms((major, minor), "arm64"))
+ assert "macosx_11_0_arm64" in platforms
+ assert "macosx_11_3_arm64" not in platforms
+ assert "macosx_11_0_universal" not in platforms
+ assert "macosx_11_0_universal2" in platforms
+ assert "macosx_10_15_universal2" in platforms
+ assert "macosx_10_15_x86_64" not in platforms
+ assert "macosx_10_4_x86_64" not in platforms
+ assert "macosx_10_3_x86_64" not in platforms
+ if major >= 12:
+ assert "macosx_12_0_arm64" in platforms
+ assert "macosx_12_0_universal2" in platforms
+
+
+class TestManylinuxPlatform:
+ def teardown_method(self):
+ # Clear the version cache
+ tags._manylinux._get_glibc_version.cache_clear()
+
+ def test_get_config_var_does_not_log(self, monkeypatch):
+ debug = pretend.call_recorder(lambda *a: None)
+ monkeypatch.setattr(tags.logger, "debug", debug)
+ tags._get_config_var("missing")
+ assert debug.calls == []
+
+ def test_get_config_var_does_log(self, monkeypatch):
+ debug = pretend.call_recorder(lambda *a: None)
+ monkeypatch.setattr(tags.logger, "debug", debug)
+ tags._get_config_var("missing", warn=True)
+ assert debug.calls == [
+ pretend.call(
+ "Config variable '%s' is unset, Python ABI tag may be incorrect",
+ "missing",
+ )
+ ]
+
+ @pytest.mark.parametrize(
+ "arch,is_32bit,expected",
+ [
+ ("linux-x86_64", False, "linux_x86_64"),
+ ("linux-x86_64", True, "linux_i686"),
+ ("linux-aarch64", False, "linux_aarch64"),
+ ("linux-aarch64", True, "linux_armv7l"),
+ ],
+ )
+ def test_linux_platforms_32_64bit_on_64bit_os(
+ self, arch, is_32bit, expected, monkeypatch
+ ):
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: arch)
+ monkeypatch.setattr(os, "confstr", lambda x: "glibc 2.20", raising=False)
+ monkeypatch.setattr(tags._manylinux, "_is_compatible", lambda *args: False)
+ linux_platform = list(tags._linux_platforms(is_32bit=is_32bit))[-1]
+ assert linux_platform == expected
+
+ def test_linux_platforms_manylinux_unsupported(self, monkeypatch):
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: "linux_x86_64")
+ monkeypatch.setattr(os, "confstr", lambda x: "glibc 2.20", raising=False)
+ monkeypatch.setattr(tags._manylinux, "_is_compatible", lambda *args: False)
+ linux_platform = list(tags._linux_platforms(is_32bit=False))
+ assert linux_platform == ["linux_x86_64"]
+
+ def test_linux_platforms_manylinux1(self, monkeypatch):
+ monkeypatch.setattr(
+ tags._manylinux, "_is_compatible", lambda name, *args: name == "manylinux1"
+ )
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: "linux_x86_64")
+ monkeypatch.setattr(platform, "machine", lambda: "x86_64")
+ monkeypatch.setattr(os, "confstr", lambda x: "glibc 2.20", raising=False)
+ platforms = list(tags._linux_platforms(is_32bit=False))
+ arch = platform.machine()
+ assert platforms == ["manylinux1_" + arch, "linux_" + arch]
+
+ def test_linux_platforms_manylinux2010(self, monkeypatch):
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: "linux_x86_64")
+ monkeypatch.setattr(platform, "machine", lambda: "x86_64")
+ monkeypatch.setattr(os, "confstr", lambda x: "glibc 2.12", raising=False)
+ platforms = list(tags._linux_platforms(is_32bit=False))
+ arch = platform.machine()
+ expected = [
+ "manylinux_2_12_" + arch,
+ "manylinux2010_" + arch,
+ "manylinux_2_11_" + arch,
+ "manylinux_2_10_" + arch,
+ "manylinux_2_9_" + arch,
+ "manylinux_2_8_" + arch,
+ "manylinux_2_7_" + arch,
+ "manylinux_2_6_" + arch,
+ "manylinux_2_5_" + arch,
+ "manylinux1_" + arch,
+ "linux_" + arch,
+ ]
+ assert platforms == expected
+
+ def test_linux_platforms_manylinux2014(self, monkeypatch):
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: "linux_x86_64")
+ monkeypatch.setattr(platform, "machine", lambda: "x86_64")
+ monkeypatch.setattr(os, "confstr", lambda x: "glibc 2.17", raising=False)
+ platforms = list(tags._linux_platforms(is_32bit=False))
+ arch = platform.machine()
+ expected = [
+ "manylinux_2_17_" + arch,
+ "manylinux2014_" + arch,
+ "manylinux_2_16_" + arch,
+ "manylinux_2_15_" + arch,
+ "manylinux_2_14_" + arch,
+ "manylinux_2_13_" + arch,
+ "manylinux_2_12_" + arch,
+ "manylinux2010_" + arch,
+ "manylinux_2_11_" + arch,
+ "manylinux_2_10_" + arch,
+ "manylinux_2_9_" + arch,
+ "manylinux_2_8_" + arch,
+ "manylinux_2_7_" + arch,
+ "manylinux_2_6_" + arch,
+ "manylinux_2_5_" + arch,
+ "manylinux1_" + arch,
+ "linux_" + arch,
+ ]
+ assert platforms == expected
+
+ def test_linux_platforms_manylinux2014_armhf_abi(self, monkeypatch):
+ monkeypatch.setattr(tags._manylinux, "_glibc_version_string", lambda: "2.30")
+ monkeypatch.setattr(
+ tags._manylinux,
+ "_is_compatible",
+ lambda name, *args: name == "manylinux2014",
+ )
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: "linux_armv7l")
+ monkeypatch.setattr(
+ sys,
+ "executable",
+ os.path.join(
+ os.path.dirname(__file__),
+ "manylinux",
+ "hello-world-armv7l-armhf",
+ ),
+ )
+ platforms = list(tags._linux_platforms(is_32bit=True))
+ expected = ["manylinux2014_armv7l", "linux_armv7l"]
+ assert platforms == expected
+
+ def test_linux_platforms_manylinux2014_i386_abi(self, monkeypatch):
+ monkeypatch.setattr(tags._manylinux, "_glibc_version_string", lambda: "2.17")
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: "linux_x86_64")
+ monkeypatch.setattr(
+ sys,
+ "executable",
+ os.path.join(
+ os.path.dirname(__file__),
+ "manylinux",
+ "hello-world-x86_64-i386",
+ ),
+ )
+ platforms = list(tags._linux_platforms(is_32bit=True))
+ expected = [
+ "manylinux_2_17_i686",
+ "manylinux2014_i686",
+ "manylinux_2_16_i686",
+ "manylinux_2_15_i686",
+ "manylinux_2_14_i686",
+ "manylinux_2_13_i686",
+ "manylinux_2_12_i686",
+ "manylinux2010_i686",
+ "manylinux_2_11_i686",
+ "manylinux_2_10_i686",
+ "manylinux_2_9_i686",
+ "manylinux_2_8_i686",
+ "manylinux_2_7_i686",
+ "manylinux_2_6_i686",
+ "manylinux_2_5_i686",
+ "manylinux1_i686",
+ "linux_i686",
+ ]
+ assert platforms == expected
+
+ def test_linux_platforms_manylinux_glibc3(self, monkeypatch):
+ # test for a future glic 3.x version
+ monkeypatch.setattr(tags._manylinux, "_glibc_version_string", lambda: "3.2")
+ monkeypatch.setattr(tags._manylinux, "_is_compatible", lambda name, *args: True)
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: "linux_aarch64")
+ monkeypatch.setattr(
+ sys,
+ "executable",
+ os.path.join(
+ os.path.dirname(__file__),
+ "manylinux",
+ "hello-world-aarch64",
+ ),
+ )
+ platforms = list(tags._linux_platforms(is_32bit=False))
+ expected = (
+ ["manylinux_3_2_aarch64", "manylinux_3_1_aarch64", "manylinux_3_0_aarch64"]
+ + [f"manylinux_2_{i}_aarch64" for i in range(50, 16, -1)]
+ + ["manylinux2014_aarch64", "linux_aarch64"]
+ )
+ assert platforms == expected
+
+ @pytest.mark.parametrize(
+ "native_arch, cross32_arch, musl_version",
+ [
+ ("aarch64", "armv7l", _MuslVersion(1, 1)),
+ ("i386", "i386", _MuslVersion(1, 2)),
+ ("x86_64", "i686", _MuslVersion(1, 2)),
+ ],
+ )
+ @pytest.mark.parametrize("cross32", [True, False], ids=["cross", "native"])
+ def test_linux_platforms_musllinux(
+ self, monkeypatch, native_arch, cross32_arch, musl_version, cross32
+ ):
+ fake_executable = str(
+ pathlib.Path(__file__)
+ .parent.joinpath("musllinux", f"musl-{native_arch}")
+ .resolve()
+ )
+ monkeypatch.setattr(tags._musllinux.sys, "executable", fake_executable)
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: f"linux_{native_arch}")
+ monkeypatch.setattr(tags._manylinux, "platform_tags", lambda *_: ())
+
+ recorder = pretend.call_recorder(lambda _: musl_version)
+ monkeypatch.setattr(tags._musllinux, "_get_musl_version", recorder)
+
+ platforms = list(tags._linux_platforms(is_32bit=cross32))
+ target_arch = cross32_arch if cross32 else native_arch
+ expected = [
+ f"musllinux_{musl_version[0]}_{minor}_{target_arch}"
+ for minor in range(musl_version[1], -1, -1)
+ ] + [f"linux_{target_arch}"]
+ assert platforms == expected
+
+ assert recorder.calls == [pretend.call(fake_executable)]
+
+ def test_linux_platforms_manylinux2014_armv6l(self, monkeypatch):
+ monkeypatch.setattr(
+ tags._manylinux, "_is_compatible", lambda name, _: name == "manylinux2014"
+ )
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: "linux_armv6l")
+ monkeypatch.setattr(os, "confstr", lambda x: "glibc 2.20", raising=False)
+ platforms = list(tags._linux_platforms(is_32bit=True))
+ expected = ["linux_armv6l"]
+ assert platforms == expected
+
+ @pytest.mark.parametrize(
+ "machine, abi, alt_machine",
+ [("x86_64", "x32", "i686"), ("armv7l", "armel", "armv7l")],
+ )
+ def test_linux_platforms_not_manylinux_abi(
+ self, monkeypatch, machine, abi, alt_machine
+ ):
+ monkeypatch.setattr(tags._manylinux, "_is_compatible", lambda name, _: False)
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: f"linux_{machine}")
+ monkeypatch.setattr(
+ sys,
+ "executable",
+ os.path.join(
+ os.path.dirname(__file__),
+ "manylinux",
+ f"hello-world-{machine}-{abi}",
+ ),
+ )
+ platforms = list(tags._linux_platforms(is_32bit=True))
+ expected = [f"linux_{alt_machine}"]
+ assert platforms == expected
+
+
+@pytest.mark.parametrize(
+ "platform_name,dispatch_func",
+ [
+ ("Darwin", "mac_platforms"),
+ ("Linux", "_linux_platforms"),
+ ("Generic", "_generic_platforms"),
+ ],
+)
+def test_platform_tags(platform_name, dispatch_func, monkeypatch):
+ expected = ["sillywalk"]
+ monkeypatch.setattr(platform, "system", lambda: platform_name)
+ monkeypatch.setattr(tags, dispatch_func, lambda: expected)
+ assert tags.platform_tags() == expected
+
+
+class TestCPythonABI:
+ @pytest.mark.parametrize(
+ "py_debug,gettotalrefcount,result",
+ [(1, False, True), (0, False, False), (None, True, True)],
+ )
+ def test_debug(self, py_debug, gettotalrefcount, result, monkeypatch):
+ config = {"Py_DEBUG": py_debug, "WITH_PYMALLOC": 0, "Py_UNICODE_SIZE": 2}
+ monkeypatch.setattr(sysconfig, "get_config_var", config.__getitem__)
+ if gettotalrefcount:
+ monkeypatch.setattr(sys, "gettotalrefcount", 1, raising=False)
+ expected = ["cp37d" if result else "cp37"]
+ assert tags._cpython_abis((3, 7)) == expected
+
+ def test_debug_file_extension(self, monkeypatch):
+ config = {"Py_DEBUG": None}
+ monkeypatch.setattr(sysconfig, "get_config_var", config.__getitem__)
+ monkeypatch.delattr(sys, "gettotalrefcount", raising=False)
+ monkeypatch.setattr(tags, "EXTENSION_SUFFIXES", {"_d.pyd"})
+ assert tags._cpython_abis((3, 8)) == ["cp38d", "cp38"]
+
+ @pytest.mark.parametrize(
+ "debug,expected", [(True, ["cp38d", "cp38"]), (False, ["cp38"])]
+ )
+ def test__debug_cp38(self, debug, expected, monkeypatch):
+ config = {"Py_DEBUG": debug}
+ monkeypatch.setattr(sysconfig, "get_config_var", config.__getitem__)
+ assert tags._cpython_abis((3, 8)) == expected
+
+ @pytest.mark.parametrize(
+ "pymalloc,version,result",
+ [
+ (1, (3, 7), True),
+ (0, (3, 7), False),
+ (None, (3, 7), True),
+ (1, (3, 8), False),
+ ],
+ )
+ def test_pymalloc(self, pymalloc, version, result, monkeypatch):
+ config = {"Py_DEBUG": 0, "WITH_PYMALLOC": pymalloc, "Py_UNICODE_SIZE": 2}
+ monkeypatch.setattr(sysconfig, "get_config_var", config.__getitem__)
+ base_abi = f"cp{version[0]}{version[1]}"
+ expected = [base_abi + "m" if result else base_abi]
+ assert tags._cpython_abis(version) == expected
+
+ @pytest.mark.parametrize(
+ "unicode_size,maxunicode,version,result",
+ [
+ (4, 0x10FFFF, (3, 2), True),
+ (2, 0xFFFF, (3, 2), False),
+ (None, 0x10FFFF, (3, 2), True),
+ (None, 0xFFFF, (3, 2), False),
+ (4, 0x10FFFF, (3, 3), False),
+ ],
+ )
+ def test_wide_unicode(self, unicode_size, maxunicode, version, result, monkeypatch):
+ config = {"Py_DEBUG": 0, "WITH_PYMALLOC": 0, "Py_UNICODE_SIZE": unicode_size}
+ monkeypatch.setattr(sysconfig, "get_config_var", config.__getitem__)
+ monkeypatch.setattr(sys, "maxunicode", maxunicode)
+ base_abi = "cp" + tags._version_nodot(version)
+ expected = [base_abi + "u" if result else base_abi]
+ assert tags._cpython_abis(version) == expected
+
+
+class TestCPythonTags:
+ def test_iterator_returned(self):
+ result_iterator = tags.cpython_tags(
+ (3, 8), ["cp38d", "cp38"], ["plat1", "plat2"]
+ )
+ assert isinstance(result_iterator, collections.abc.Iterator)
+
+ def test_all_args(self):
+ result_iterator = tags.cpython_tags(
+ (3, 11), ["cp311d", "cp311"], ["plat1", "plat2"]
+ )
+ result = list(result_iterator)
+ assert result == [
+ tags.Tag("cp311", "cp311d", "plat1"),
+ tags.Tag("cp311", "cp311d", "plat2"),
+ tags.Tag("cp311", "cp311", "plat1"),
+ tags.Tag("cp311", "cp311", "plat2"),
+ tags.Tag("cp311", "abi3", "plat1"),
+ tags.Tag("cp311", "abi3", "plat2"),
+ tags.Tag("cp311", "none", "plat1"),
+ tags.Tag("cp311", "none", "plat2"),
+ tags.Tag("cp310", "abi3", "plat1"),
+ tags.Tag("cp310", "abi3", "plat2"),
+ tags.Tag("cp39", "abi3", "plat1"),
+ tags.Tag("cp39", "abi3", "plat2"),
+ tags.Tag("cp38", "abi3", "plat1"),
+ tags.Tag("cp38", "abi3", "plat2"),
+ tags.Tag("cp37", "abi3", "plat1"),
+ tags.Tag("cp37", "abi3", "plat2"),
+ tags.Tag("cp36", "abi3", "plat1"),
+ tags.Tag("cp36", "abi3", "plat2"),
+ tags.Tag("cp35", "abi3", "plat1"),
+ tags.Tag("cp35", "abi3", "plat2"),
+ tags.Tag("cp34", "abi3", "plat1"),
+ tags.Tag("cp34", "abi3", "plat2"),
+ tags.Tag("cp33", "abi3", "plat1"),
+ tags.Tag("cp33", "abi3", "plat2"),
+ tags.Tag("cp32", "abi3", "plat1"),
+ tags.Tag("cp32", "abi3", "plat2"),
+ ]
+ result_iterator = tags.cpython_tags(
+ (3, 8), ["cp38d", "cp38"], ["plat1", "plat2"]
+ )
+ result = list(result_iterator)
+ assert result == [
+ tags.Tag("cp38", "cp38d", "plat1"),
+ tags.Tag("cp38", "cp38d", "plat2"),
+ tags.Tag("cp38", "cp38", "plat1"),
+ tags.Tag("cp38", "cp38", "plat2"),
+ tags.Tag("cp38", "abi3", "plat1"),
+ tags.Tag("cp38", "abi3", "plat2"),
+ tags.Tag("cp38", "none", "plat1"),
+ tags.Tag("cp38", "none", "plat2"),
+ tags.Tag("cp37", "abi3", "plat1"),
+ tags.Tag("cp37", "abi3", "plat2"),
+ tags.Tag("cp36", "abi3", "plat1"),
+ tags.Tag("cp36", "abi3", "plat2"),
+ tags.Tag("cp35", "abi3", "plat1"),
+ tags.Tag("cp35", "abi3", "plat2"),
+ tags.Tag("cp34", "abi3", "plat1"),
+ tags.Tag("cp34", "abi3", "plat2"),
+ tags.Tag("cp33", "abi3", "plat1"),
+ tags.Tag("cp33", "abi3", "plat2"),
+ tags.Tag("cp32", "abi3", "plat1"),
+ tags.Tag("cp32", "abi3", "plat2"),
+ ]
+
+ result = list(tags.cpython_tags((3, 3), ["cp33m"], ["plat1", "plat2"]))
+ assert result == [
+ tags.Tag("cp33", "cp33m", "plat1"),
+ tags.Tag("cp33", "cp33m", "plat2"),
+ tags.Tag("cp33", "abi3", "plat1"),
+ tags.Tag("cp33", "abi3", "plat2"),
+ tags.Tag("cp33", "none", "plat1"),
+ tags.Tag("cp33", "none", "plat2"),
+ tags.Tag("cp32", "abi3", "plat1"),
+ tags.Tag("cp32", "abi3", "plat2"),
+ ]
+
+ def test_python_version_defaults(self):
+ tag = next(tags.cpython_tags(abis=["abi3"], platforms=["any"]))
+ interpreter = "cp" + tags._version_nodot(sys.version_info[:2])
+ assert interpreter == tag.interpreter
+
+ def test_abi_defaults(self, monkeypatch):
+ monkeypatch.setattr(tags, "_cpython_abis", lambda _1, _2: ["cp38"])
+ result = list(tags.cpython_tags((3, 8), platforms=["any"]))
+ assert tags.Tag("cp38", "cp38", "any") in result
+ assert tags.Tag("cp38", "abi3", "any") in result
+ assert tags.Tag("cp38", "none", "any") in result
+
+ def test_abi_defaults_needs_underscore(self, monkeypatch):
+ monkeypatch.setattr(tags, "_cpython_abis", lambda _1, _2: ["cp311"])
+ result = list(tags.cpython_tags((3, 11), platforms=["any"]))
+ assert tags.Tag("cp311", "cp311", "any") in result
+ assert tags.Tag("cp311", "abi3", "any") in result
+ assert tags.Tag("cp311", "none", "any") in result
+
+ def test_platforms_defaults(self, monkeypatch):
+ monkeypatch.setattr(tags, "platform_tags", lambda: ["plat1"])
+ result = list(tags.cpython_tags((3, 8), abis=["whatever"]))
+ assert tags.Tag("cp38", "whatever", "plat1") in result
+
+ def test_platforms_defaults_needs_underscore(self, monkeypatch):
+ monkeypatch.setattr(tags, "platform_tags", lambda: ["plat1"])
+ result = list(tags.cpython_tags((3, 11), abis=["whatever"]))
+ assert tags.Tag("cp311", "whatever", "plat1") in result
+
+ def test_major_only_python_version(self):
+ result = list(tags.cpython_tags((3,), ["abi"], ["plat"]))
+ assert result == [
+ tags.Tag("cp3", "abi", "plat"),
+ tags.Tag("cp3", "none", "plat"),
+ ]
+
+ def test_major_only_python_version_with_default_abis(self):
+ result = list(tags.cpython_tags((3,), platforms=["plat"]))
+ assert result == [tags.Tag("cp3", "none", "plat")]
+
+ @pytest.mark.parametrize("abis", [[], ["abi3"], ["none"]])
+ def test_skip_redundant_abis(self, abis):
+ results = list(tags.cpython_tags((3, 0), abis=abis, platforms=["any"]))
+ assert results == [tags.Tag("cp30", "none", "any")]
+
+ def test_abi3_python33(self):
+ results = list(tags.cpython_tags((3, 3), abis=["cp33"], platforms=["plat"]))
+ assert results == [
+ tags.Tag("cp33", "cp33", "plat"),
+ tags.Tag("cp33", "abi3", "plat"),
+ tags.Tag("cp33", "none", "plat"),
+ tags.Tag("cp32", "abi3", "plat"),
+ ]
+
+ def test_no_excess_abi3_python32(self):
+ results = list(tags.cpython_tags((3, 2), abis=["cp32"], platforms=["plat"]))
+ assert results == [
+ tags.Tag("cp32", "cp32", "plat"),
+ tags.Tag("cp32", "abi3", "plat"),
+ tags.Tag("cp32", "none", "plat"),
+ ]
+
+ def test_no_abi3_python31(self):
+ results = list(tags.cpython_tags((3, 1), abis=["cp31"], platforms=["plat"]))
+ assert results == [
+ tags.Tag("cp31", "cp31", "plat"),
+ tags.Tag("cp31", "none", "plat"),
+ ]
+
+ def test_no_abi3_python27(self):
+ results = list(tags.cpython_tags((2, 7), abis=["cp27"], platforms=["plat"]))
+ assert results == [
+ tags.Tag("cp27", "cp27", "plat"),
+ tags.Tag("cp27", "none", "plat"),
+ ]
+
+
+class TestGenericTags:
+ @pytest.mark.skipif(
+ not sysconfig.get_config_var("SOABI"), reason="SOABI not defined"
+ )
+ def test__generic_abi_soabi_provided(self):
+ abi = sysconfig.get_config_var("SOABI").replace(".", "_").replace("-", "_")
+ assert [abi] == list(tags._generic_abi())
+
+ def test__generic_abi(self, monkeypatch):
+ monkeypatch.setattr(
+ sysconfig, "get_config_var", lambda key: "cpython-37m-darwin"
+ )
+ assert list(tags._generic_abi()) == ["cpython_37m_darwin"]
+
+ def test__generic_abi_no_soabi(self, monkeypatch):
+ monkeypatch.setattr(sysconfig, "get_config_var", lambda key: None)
+ assert not list(tags._generic_abi())
+
+ def test_generic_platforms(self):
+ platform = sysconfig.get_platform().replace("-", "_")
+ platform = platform.replace(".", "_")
+ assert list(tags._generic_platforms()) == [platform]
+
+ def test_iterator_returned(self):
+ result_iterator = tags.generic_tags("sillywalk33", ["abi"], ["plat1", "plat2"])
+ assert isinstance(result_iterator, collections.abc.Iterator)
+
+ def test_all_args(self):
+ result_iterator = tags.generic_tags("sillywalk33", ["abi"], ["plat1", "plat2"])
+ result = list(result_iterator)
+ assert result == [
+ tags.Tag("sillywalk33", "abi", "plat1"),
+ tags.Tag("sillywalk33", "abi", "plat2"),
+ tags.Tag("sillywalk33", "none", "plat1"),
+ tags.Tag("sillywalk33", "none", "plat2"),
+ ]
+
+ @pytest.mark.parametrize("abi", [[], ["none"]])
+ def test_abi_unspecified(self, abi):
+ no_abi = list(tags.generic_tags("sillywalk34", abi, ["plat1", "plat2"]))
+ assert no_abi == [
+ tags.Tag("sillywalk34", "none", "plat1"),
+ tags.Tag("sillywalk34", "none", "plat2"),
+ ]
+
+ def test_interpreter_default(self, monkeypatch):
+ monkeypatch.setattr(tags, "interpreter_name", lambda: "sillywalk")
+ monkeypatch.setattr(tags, "interpreter_version", lambda warn: "NN")
+ result = list(tags.generic_tags(abis=["none"], platforms=["any"]))
+ assert result == [tags.Tag("sillywalkNN", "none", "any")]
+
+ def test_abis_default(self, monkeypatch):
+ monkeypatch.setattr(tags, "_generic_abi", lambda: iter(["abi"]))
+ result = list(tags.generic_tags(interpreter="sillywalk", platforms=["any"]))
+ assert result == [
+ tags.Tag("sillywalk", "abi", "any"),
+ tags.Tag("sillywalk", "none", "any"),
+ ]
+
+ def test_platforms_default(self, monkeypatch):
+ monkeypatch.setattr(tags, "platform_tags", lambda: ["plat"])
+ result = list(tags.generic_tags(interpreter="sillywalk", abis=["none"]))
+ assert result == [tags.Tag("sillywalk", "none", "plat")]
+
+
+class TestCompatibleTags:
+ def test_all_args(self):
+ result = list(tags.compatible_tags((3, 3), "cp33", ["plat1", "plat2"]))
+ assert result == [
+ tags.Tag("py33", "none", "plat1"),
+ tags.Tag("py33", "none", "plat2"),
+ tags.Tag("py3", "none", "plat1"),
+ tags.Tag("py3", "none", "plat2"),
+ tags.Tag("py32", "none", "plat1"),
+ tags.Tag("py32", "none", "plat2"),
+ tags.Tag("py31", "none", "plat1"),
+ tags.Tag("py31", "none", "plat2"),
+ tags.Tag("py30", "none", "plat1"),
+ tags.Tag("py30", "none", "plat2"),
+ tags.Tag("cp33", "none", "any"),
+ tags.Tag("py33", "none", "any"),
+ tags.Tag("py3", "none", "any"),
+ tags.Tag("py32", "none", "any"),
+ tags.Tag("py31", "none", "any"),
+ tags.Tag("py30", "none", "any"),
+ ]
+
+ def test_all_args_needs_underscore(self):
+ result = list(tags.compatible_tags((3, 11), "cp311", ["plat1", "plat2"]))
+ assert result == [
+ tags.Tag("py311", "none", "plat1"),
+ tags.Tag("py311", "none", "plat2"),
+ tags.Tag("py3", "none", "plat1"),
+ tags.Tag("py3", "none", "plat2"),
+ tags.Tag("py310", "none", "plat1"),
+ tags.Tag("py310", "none", "plat2"),
+ tags.Tag("py39", "none", "plat1"),
+ tags.Tag("py39", "none", "plat2"),
+ tags.Tag("py38", "none", "plat1"),
+ tags.Tag("py38", "none", "plat2"),
+ tags.Tag("py37", "none", "plat1"),
+ tags.Tag("py37", "none", "plat2"),
+ tags.Tag("py36", "none", "plat1"),
+ tags.Tag("py36", "none", "plat2"),
+ tags.Tag("py35", "none", "plat1"),
+ tags.Tag("py35", "none", "plat2"),
+ tags.Tag("py34", "none", "plat1"),
+ tags.Tag("py34", "none", "plat2"),
+ tags.Tag("py33", "none", "plat1"),
+ tags.Tag("py33", "none", "plat2"),
+ tags.Tag("py32", "none", "plat1"),
+ tags.Tag("py32", "none", "plat2"),
+ tags.Tag("py31", "none", "plat1"),
+ tags.Tag("py31", "none", "plat2"),
+ tags.Tag("py30", "none", "plat1"),
+ tags.Tag("py30", "none", "plat2"),
+ tags.Tag("cp311", "none", "any"),
+ tags.Tag("py311", "none", "any"),
+ tags.Tag("py3", "none", "any"),
+ tags.Tag("py310", "none", "any"),
+ tags.Tag("py39", "none", "any"),
+ tags.Tag("py38", "none", "any"),
+ tags.Tag("py37", "none", "any"),
+ tags.Tag("py36", "none", "any"),
+ tags.Tag("py35", "none", "any"),
+ tags.Tag("py34", "none", "any"),
+ tags.Tag("py33", "none", "any"),
+ tags.Tag("py32", "none", "any"),
+ tags.Tag("py31", "none", "any"),
+ tags.Tag("py30", "none", "any"),
+ ]
+
+ def test_major_only_python_version(self):
+ result = list(tags.compatible_tags((3,), "cp33", ["plat"]))
+ assert result == [
+ tags.Tag("py3", "none", "plat"),
+ tags.Tag("cp33", "none", "any"),
+ tags.Tag("py3", "none", "any"),
+ ]
+
+ def test_default_python_version(self, monkeypatch):
+ monkeypatch.setattr(sys, "version_info", (3, 1))
+ result = list(tags.compatible_tags(interpreter="cp31", platforms=["plat"]))
+ assert result == [
+ tags.Tag("py31", "none", "plat"),
+ tags.Tag("py3", "none", "plat"),
+ tags.Tag("py30", "none", "plat"),
+ tags.Tag("cp31", "none", "any"),
+ tags.Tag("py31", "none", "any"),
+ tags.Tag("py3", "none", "any"),
+ tags.Tag("py30", "none", "any"),
+ ]
+
+ def test_default_python_version_needs_underscore(self, monkeypatch):
+ monkeypatch.setattr(sys, "version_info", (3, 11))
+ result = list(tags.compatible_tags(interpreter="cp311", platforms=["plat"]))
+ assert result == [
+ tags.Tag("py311", "none", "plat"),
+ tags.Tag("py3", "none", "plat"),
+ tags.Tag("py310", "none", "plat"),
+ tags.Tag("py39", "none", "plat"),
+ tags.Tag("py38", "none", "plat"),
+ tags.Tag("py37", "none", "plat"),
+ tags.Tag("py36", "none", "plat"),
+ tags.Tag("py35", "none", "plat"),
+ tags.Tag("py34", "none", "plat"),
+ tags.Tag("py33", "none", "plat"),
+ tags.Tag("py32", "none", "plat"),
+ tags.Tag("py31", "none", "plat"),
+ tags.Tag("py30", "none", "plat"),
+ tags.Tag("cp311", "none", "any"),
+ tags.Tag("py311", "none", "any"),
+ tags.Tag("py3", "none", "any"),
+ tags.Tag("py310", "none", "any"),
+ tags.Tag("py39", "none", "any"),
+ tags.Tag("py38", "none", "any"),
+ tags.Tag("py37", "none", "any"),
+ tags.Tag("py36", "none", "any"),
+ tags.Tag("py35", "none", "any"),
+ tags.Tag("py34", "none", "any"),
+ tags.Tag("py33", "none", "any"),
+ tags.Tag("py32", "none", "any"),
+ tags.Tag("py31", "none", "any"),
+ tags.Tag("py30", "none", "any"),
+ ]
+
+ def test_default_interpreter(self):
+ result = list(tags.compatible_tags((3, 1), platforms=["plat"]))
+ assert result == [
+ tags.Tag("py31", "none", "plat"),
+ tags.Tag("py3", "none", "plat"),
+ tags.Tag("py30", "none", "plat"),
+ tags.Tag("py31", "none", "any"),
+ tags.Tag("py3", "none", "any"),
+ tags.Tag("py30", "none", "any"),
+ ]
+
+ def test_default_platforms(self, monkeypatch):
+ monkeypatch.setattr(tags, "platform_tags", lambda: iter(["plat", "plat2"]))
+ result = list(tags.compatible_tags((3, 1), "cp31"))
+ assert result == [
+ tags.Tag("py31", "none", "plat"),
+ tags.Tag("py31", "none", "plat2"),
+ tags.Tag("py3", "none", "plat"),
+ tags.Tag("py3", "none", "plat2"),
+ tags.Tag("py30", "none", "plat"),
+ tags.Tag("py30", "none", "plat2"),
+ tags.Tag("cp31", "none", "any"),
+ tags.Tag("py31", "none", "any"),
+ tags.Tag("py3", "none", "any"),
+ tags.Tag("py30", "none", "any"),
+ ]
+
+
+class TestSysTags:
+ def teardown_method(self):
+ # Clear the version cache
+ tags._glibc_version = []
+
+ @pytest.mark.parametrize(
+ "name,expected",
+ [("CPython", "cp"), ("PyPy", "pp"), ("Jython", "jy"), ("IronPython", "ip")],
+ )
+ def test_interpreter_name(self, name, expected, mock_interpreter_name):
+ mock_interpreter_name(name)
+ assert tags.interpreter_name() == expected
+
+ def test_iterator(self):
+ assert isinstance(tags.sys_tags(), collections.abc.Iterator)
+
+ def test_mac_cpython(self, mock_interpreter_name, monkeypatch):
+ if mock_interpreter_name("CPython"):
+ monkeypatch.setattr(tags, "_cpython_abis", lambda *a: ["cp33m"])
+ if platform.system() != "Darwin":
+ monkeypatch.setattr(platform, "system", lambda: "Darwin")
+ monkeypatch.setattr(tags, "mac_platforms", lambda: ["macosx_10_5_x86_64"])
+ abis = tags._cpython_abis(sys.version_info[:2])
+ platforms = list(tags.mac_platforms())
+ result = list(tags.sys_tags())
+ assert len(abis) == 1
+ assert result[0] == tags.Tag(
+ "cp" + tags._version_nodot(sys.version_info[:2]), abis[0], platforms[0]
+ )
+ assert result[-1] == tags.Tag(
+ "py" + tags._version_nodot((sys.version_info[0], 0)), "none", "any"
+ )
+
+ def test_windows_cpython(self, mock_interpreter_name, monkeypatch):
+ if mock_interpreter_name("CPython"):
+ monkeypatch.setattr(tags, "_cpython_abis", lambda *a: ["cp33m"])
+ if platform.system() != "Windows":
+ monkeypatch.setattr(platform, "system", lambda: "Windows")
+ monkeypatch.setattr(tags, "_generic_platforms", lambda: ["win_amd64"])
+ abis = list(tags._cpython_abis(sys.version_info[:2]))
+ platforms = list(tags._generic_platforms())
+ result = list(tags.sys_tags())
+ interpreter = "cp" + tags._version_nodot(sys.version_info[:2])
+ assert len(abis) == 1
+ expected = tags.Tag(interpreter, abis[0], platforms[0])
+ assert result[0] == expected
+ expected = tags.Tag(
+ "py" + tags._version_nodot((sys.version_info[0], 0)), "none", "any"
+ )
+ assert result[-1] == expected
+
+ def test_linux_cpython(self, mock_interpreter_name, monkeypatch):
+ if mock_interpreter_name("CPython"):
+ monkeypatch.setattr(tags, "_cpython_abis", lambda *a: ["cp33m"])
+ if platform.system() != "Linux":
+ monkeypatch.setattr(platform, "system", lambda: "Linux")
+ monkeypatch.setattr(tags, "_linux_platforms", lambda: ["linux_x86_64"])
+ abis = list(tags._cpython_abis(sys.version_info[:2]))
+ platforms = list(tags._linux_platforms())
+ result = list(tags.sys_tags())
+ expected_interpreter = "cp" + tags._version_nodot(sys.version_info[:2])
+ assert len(abis) == 1
+ assert result[0] == tags.Tag(expected_interpreter, abis[0], platforms[0])
+ expected = tags.Tag(
+ "py" + tags._version_nodot((sys.version_info[0], 0)), "none", "any"
+ )
+ assert result[-1] == expected
+
+ def test_generic(self, monkeypatch):
+ monkeypatch.setattr(platform, "system", lambda: "Generic")
+ monkeypatch.setattr(tags, "interpreter_name", lambda: "generic")
+
+ result = list(tags.sys_tags())
+ expected = tags.Tag(
+ "py" + tags._version_nodot((sys.version_info[0], 0)), "none", "any"
+ )
+ assert result[-1] == expected
+
+ def test_linux_platforms_manylinux2014_armv6l(self, monkeypatch, manylinux_module):
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: "linux_armv6l")
+ monkeypatch.setattr(os, "confstr", lambda x: "glibc 2.20", raising=False)
+ platforms = list(tags._linux_platforms(is_32bit=True))
+ expected = ["linux_armv6l"]
+ assert platforms == expected
+
+ def test_skip_manylinux_2014(self, monkeypatch, manylinux_module):
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: "linux_ppc64")
+ monkeypatch.setattr(tags._manylinux, "_get_glibc_version", lambda: (2, 20))
+ monkeypatch.setattr(
+ manylinux_module, "manylinux2014_compatible", False, raising=False
+ )
+ expected = [
+ "manylinux_2_20_ppc64",
+ "manylinux_2_19_ppc64",
+ "manylinux_2_18_ppc64",
+ # "manylinux2014_ppc64", # this one is skipped
+ # "manylinux_2_17_ppc64", # this one is also skipped
+ "linux_ppc64",
+ ]
+ platforms = list(tags._linux_platforms())
+ assert platforms == expected
+
+ @pytest.mark.parametrize(
+ "machine, abi, alt_machine",
+ [("x86_64", "x32", "i686"), ("armv7l", "armel", "armv7l")],
+ )
+ def test_linux_platforms_not_manylinux_abi(
+ self, monkeypatch, manylinux_module, machine, abi, alt_machine
+ ):
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: f"linux_{machine}")
+ monkeypatch.setattr(
+ sys,
+ "executable",
+ os.path.join(
+ os.path.dirname(__file__),
+ "manylinux",
+ f"hello-world-{machine}-{abi}",
+ ),
+ )
+ platforms = list(tags._linux_platforms(is_32bit=True))
+ expected = [f"linux_{alt_machine}"]
+ assert platforms == expected
+
+ @pytest.mark.parametrize(
+ "machine, major, minor, tf", [("x86_64", 2, 20, False), ("s390x", 2, 22, True)]
+ )
+ def test_linux_use_manylinux_compatible(
+ self, monkeypatch, manylinux_module, machine, major, minor, tf
+ ):
+ def manylinux_compatible(tag_major, tag_minor, tag_arch):
+ if tag_major == 2 and tag_minor == 22:
+ return tag_arch == "s390x"
+ return False
+
+ monkeypatch.setattr(
+ tags._manylinux,
+ "_get_glibc_version",
+ lambda: (major, minor),
+ )
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: f"linux_{machine}")
+ monkeypatch.setattr(
+ manylinux_module,
+ "manylinux_compatible",
+ manylinux_compatible,
+ raising=False,
+ )
+ platforms = list(tags._linux_platforms(is_32bit=False))
+ if tf:
+ expected = [f"manylinux_2_22_{machine}"]
+ else:
+ expected = []
+ expected.append(f"linux_{machine}")
+ assert platforms == expected
+
+ def test_linux_use_manylinux_compatible_none(self, monkeypatch, manylinux_module):
+ def manylinux_compatible(tag_major, tag_minor, tag_arch):
+ if tag_major == 2 and tag_minor < 25:
+ return False
+ return None
+
+ monkeypatch.setattr(tags._manylinux, "_get_glibc_version", lambda: (2, 30))
+ monkeypatch.setattr(sysconfig, "get_platform", lambda: "linux_x86_64")
+ monkeypatch.setattr(
+ manylinux_module,
+ "manylinux_compatible",
+ manylinux_compatible,
+ raising=False,
+ )
+ platforms = list(tags._linux_platforms(is_32bit=False))
+ expected = [
+ "manylinux_2_30_x86_64",
+ "manylinux_2_29_x86_64",
+ "manylinux_2_28_x86_64",
+ "manylinux_2_27_x86_64",
+ "manylinux_2_26_x86_64",
+ "manylinux_2_25_x86_64",
+ "linux_x86_64",
+ ]
+ assert platforms == expected
+
+ def test_pypy_first_none_any_tag(self, monkeypatch):
+ # When building the complete list of pypy tags, make sure the first
+ # <interpreter>-none-any one is pp3-none-any
+ monkeypatch.setattr(tags, "interpreter_name", lambda: "pp")
+
+ for tag in tags.sys_tags():
+ if tag.abi == "none" and tag.platform == "any":
+ break
+
+ assert tag == tags.Tag("pp3", "none", "any")
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/test_utils.py b/testing/web-platform/tests/tools/third_party/packaging/tests/test_utils.py
new file mode 100644
index 0000000000..be52d67048
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/test_utils.py
@@ -0,0 +1,124 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import pytest
+
+from packaging.tags import Tag
+from packaging.utils import (
+ InvalidSdistFilename,
+ InvalidWheelFilename,
+ canonicalize_name,
+ canonicalize_version,
+ parse_sdist_filename,
+ parse_wheel_filename,
+)
+from packaging.version import Version
+
+
+@pytest.mark.parametrize(
+ ("name", "expected"),
+ [
+ ("foo", "foo"),
+ ("Foo", "foo"),
+ ("fOo", "foo"),
+ ("foo.bar", "foo-bar"),
+ ("Foo.Bar", "foo-bar"),
+ ("Foo.....Bar", "foo-bar"),
+ ("foo_bar", "foo-bar"),
+ ("foo___bar", "foo-bar"),
+ ("foo-bar", "foo-bar"),
+ ("foo----bar", "foo-bar"),
+ ],
+)
+def test_canonicalize_name(name, expected):
+ assert canonicalize_name(name) == expected
+
+
+@pytest.mark.parametrize(
+ ("version", "expected"),
+ [
+ (Version("1.4.0"), "1.4"),
+ ("1.4.0", "1.4"),
+ ("1.40.0", "1.40"),
+ ("1.4.0.0.00.000.0000", "1.4"),
+ ("1.0", "1"),
+ ("1.0+abc", "1+abc"),
+ ("1.0.dev0", "1.dev0"),
+ ("1.0.post0", "1.post0"),
+ ("1.0a0", "1a0"),
+ ("1.0rc0", "1rc0"),
+ ("100!0.0", "100!0"),
+ ("1.0.1-test7", "1.0.1-test7"), # LegacyVersion is unchanged
+ ],
+)
+def test_canonicalize_version(version, expected):
+ assert canonicalize_version(version) == expected
+
+
+@pytest.mark.parametrize(
+ ("filename", "name", "version", "build", "tags"),
+ [
+ (
+ "foo-1.0-py3-none-any.whl",
+ "foo",
+ Version("1.0"),
+ (),
+ {Tag("py3", "none", "any")},
+ ),
+ (
+ "some_PACKAGE-1.0-py3-none-any.whl",
+ "some-package",
+ Version("1.0"),
+ (),
+ {Tag("py3", "none", "any")},
+ ),
+ (
+ "foo-1.0-1000-py3-none-any.whl",
+ "foo",
+ Version("1.0"),
+ (1000, ""),
+ {Tag("py3", "none", "any")},
+ ),
+ (
+ "foo-1.0-1000abc-py3-none-any.whl",
+ "foo",
+ Version("1.0"),
+ (1000, "abc"),
+ {Tag("py3", "none", "any")},
+ ),
+ ],
+)
+def test_parse_wheel_filename(filename, name, version, build, tags):
+ assert parse_wheel_filename(filename) == (name, version, build, tags)
+
+
+@pytest.mark.parametrize(
+ ("filename"),
+ [
+ ("foo-1.0.whl"), # Missing tags
+ ("foo-1.0-py3-none-any.wheel"), # Incorrect file extension (`.wheel`)
+ ("foo__bar-1.0-py3-none-any.whl"), # Invalid name (`__`)
+ ("foo#bar-1.0-py3-none-any.whl"), # Invalid name (`#`)
+ # Build number doesn't start with a digit (`abc`)
+ ("foo-1.0-abc-py3-none-any.whl"),
+ ("foo-1.0-200-py3-none-any-junk.whl"), # Too many dashes (`-junk`)
+ ],
+)
+def test_parse_wheel_invalid_filename(filename):
+ with pytest.raises(InvalidWheelFilename):
+ parse_wheel_filename(filename)
+
+
+@pytest.mark.parametrize(
+ ("filename", "name", "version"),
+ [("foo-1.0.tar.gz", "foo", Version("1.0")), ("foo-1.0.zip", "foo", Version("1.0"))],
+)
+def test_parse_sdist_filename(filename, name, version):
+ assert parse_sdist_filename(filename) == (name, version)
+
+
+@pytest.mark.parametrize(("filename"), [("foo-1.0.xz"), ("foo1.0.tar.gz")])
+def test_parse_sdist_invalid_filename(filename):
+ with pytest.raises(InvalidSdistFilename):
+ parse_sdist_filename(filename)
diff --git a/testing/web-platform/tests/tools/third_party/packaging/tests/test_version.py b/testing/web-platform/tests/tools/third_party/packaging/tests/test_version.py
new file mode 100644
index 0000000000..5f2251e11e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/packaging/tests/test_version.py
@@ -0,0 +1,904 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import itertools
+import operator
+import warnings
+
+import pretend
+import pytest
+
+from packaging.version import InvalidVersion, LegacyVersion, Version, parse
+
+
+@pytest.mark.parametrize(
+ ("version", "klass"), [("1.0", Version), ("1-1-1", LegacyVersion)]
+)
+def test_parse(version, klass):
+ assert isinstance(parse(version), klass)
+
+
+# This list must be in the correct sorting order
+VERSIONS = [
+ # Implicit epoch of 0
+ "1.0.dev456",
+ "1.0a1",
+ "1.0a2.dev456",
+ "1.0a12.dev456",
+ "1.0a12",
+ "1.0b1.dev456",
+ "1.0b2",
+ "1.0b2.post345.dev456",
+ "1.0b2.post345",
+ "1.0b2-346",
+ "1.0c1.dev456",
+ "1.0c1",
+ "1.0rc2",
+ "1.0c3",
+ "1.0",
+ "1.0.post456.dev34",
+ "1.0.post456",
+ "1.1.dev1",
+ "1.2+123abc",
+ "1.2+123abc456",
+ "1.2+abc",
+ "1.2+abc123",
+ "1.2+abc123def",
+ "1.2+1234.abc",
+ "1.2+123456",
+ "1.2.r32+123456",
+ "1.2.rev33+123456",
+ # Explicit epoch of 1
+ "1!1.0.dev456",
+ "1!1.0a1",
+ "1!1.0a2.dev456",
+ "1!1.0a12.dev456",
+ "1!1.0a12",
+ "1!1.0b1.dev456",
+ "1!1.0b2",
+ "1!1.0b2.post345.dev456",
+ "1!1.0b2.post345",
+ "1!1.0b2-346",
+ "1!1.0c1.dev456",
+ "1!1.0c1",
+ "1!1.0rc2",
+ "1!1.0c3",
+ "1!1.0",
+ "1!1.0.post456.dev34",
+ "1!1.0.post456",
+ "1!1.1.dev1",
+ "1!1.2+123abc",
+ "1!1.2+123abc456",
+ "1!1.2+abc",
+ "1!1.2+abc123",
+ "1!1.2+abc123def",
+ "1!1.2+1234.abc",
+ "1!1.2+123456",
+ "1!1.2.r32+123456",
+ "1!1.2.rev33+123456",
+]
+
+
+class TestVersion:
+ @pytest.mark.parametrize("version", VERSIONS)
+ def test_valid_versions(self, version):
+ Version(version)
+
+ @pytest.mark.parametrize(
+ "version",
+ [
+ # Non sensical versions should be invalid
+ "french toast",
+ # Versions with invalid local versions
+ "1.0+a+",
+ "1.0++",
+ "1.0+_foobar",
+ "1.0+foo&asd",
+ "1.0+1+1",
+ ],
+ )
+ def test_invalid_versions(self, version):
+ with pytest.raises(InvalidVersion):
+ Version(version)
+
+ @pytest.mark.parametrize(
+ ("version", "normalized"),
+ [
+ # Various development release incarnations
+ ("1.0dev", "1.0.dev0"),
+ ("1.0.dev", "1.0.dev0"),
+ ("1.0dev1", "1.0.dev1"),
+ ("1.0dev", "1.0.dev0"),
+ ("1.0-dev", "1.0.dev0"),
+ ("1.0-dev1", "1.0.dev1"),
+ ("1.0DEV", "1.0.dev0"),
+ ("1.0.DEV", "1.0.dev0"),
+ ("1.0DEV1", "1.0.dev1"),
+ ("1.0DEV", "1.0.dev0"),
+ ("1.0.DEV1", "1.0.dev1"),
+ ("1.0-DEV", "1.0.dev0"),
+ ("1.0-DEV1", "1.0.dev1"),
+ # Various alpha incarnations
+ ("1.0a", "1.0a0"),
+ ("1.0.a", "1.0a0"),
+ ("1.0.a1", "1.0a1"),
+ ("1.0-a", "1.0a0"),
+ ("1.0-a1", "1.0a1"),
+ ("1.0alpha", "1.0a0"),
+ ("1.0.alpha", "1.0a0"),
+ ("1.0.alpha1", "1.0a1"),
+ ("1.0-alpha", "1.0a0"),
+ ("1.0-alpha1", "1.0a1"),
+ ("1.0A", "1.0a0"),
+ ("1.0.A", "1.0a0"),
+ ("1.0.A1", "1.0a1"),
+ ("1.0-A", "1.0a0"),
+ ("1.0-A1", "1.0a1"),
+ ("1.0ALPHA", "1.0a0"),
+ ("1.0.ALPHA", "1.0a0"),
+ ("1.0.ALPHA1", "1.0a1"),
+ ("1.0-ALPHA", "1.0a0"),
+ ("1.0-ALPHA1", "1.0a1"),
+ # Various beta incarnations
+ ("1.0b", "1.0b0"),
+ ("1.0.b", "1.0b0"),
+ ("1.0.b1", "1.0b1"),
+ ("1.0-b", "1.0b0"),
+ ("1.0-b1", "1.0b1"),
+ ("1.0beta", "1.0b0"),
+ ("1.0.beta", "1.0b0"),
+ ("1.0.beta1", "1.0b1"),
+ ("1.0-beta", "1.0b0"),
+ ("1.0-beta1", "1.0b1"),
+ ("1.0B", "1.0b0"),
+ ("1.0.B", "1.0b0"),
+ ("1.0.B1", "1.0b1"),
+ ("1.0-B", "1.0b0"),
+ ("1.0-B1", "1.0b1"),
+ ("1.0BETA", "1.0b0"),
+ ("1.0.BETA", "1.0b0"),
+ ("1.0.BETA1", "1.0b1"),
+ ("1.0-BETA", "1.0b0"),
+ ("1.0-BETA1", "1.0b1"),
+ # Various release candidate incarnations
+ ("1.0c", "1.0rc0"),
+ ("1.0.c", "1.0rc0"),
+ ("1.0.c1", "1.0rc1"),
+ ("1.0-c", "1.0rc0"),
+ ("1.0-c1", "1.0rc1"),
+ ("1.0rc", "1.0rc0"),
+ ("1.0.rc", "1.0rc0"),
+ ("1.0.rc1", "1.0rc1"),
+ ("1.0-rc", "1.0rc0"),
+ ("1.0-rc1", "1.0rc1"),
+ ("1.0C", "1.0rc0"),
+ ("1.0.C", "1.0rc0"),
+ ("1.0.C1", "1.0rc1"),
+ ("1.0-C", "1.0rc0"),
+ ("1.0-C1", "1.0rc1"),
+ ("1.0RC", "1.0rc0"),
+ ("1.0.RC", "1.0rc0"),
+ ("1.0.RC1", "1.0rc1"),
+ ("1.0-RC", "1.0rc0"),
+ ("1.0-RC1", "1.0rc1"),
+ # Various post release incarnations
+ ("1.0post", "1.0.post0"),
+ ("1.0.post", "1.0.post0"),
+ ("1.0post1", "1.0.post1"),
+ ("1.0post", "1.0.post0"),
+ ("1.0-post", "1.0.post0"),
+ ("1.0-post1", "1.0.post1"),
+ ("1.0POST", "1.0.post0"),
+ ("1.0.POST", "1.0.post0"),
+ ("1.0POST1", "1.0.post1"),
+ ("1.0POST", "1.0.post0"),
+ ("1.0r", "1.0.post0"),
+ ("1.0rev", "1.0.post0"),
+ ("1.0.POST1", "1.0.post1"),
+ ("1.0.r1", "1.0.post1"),
+ ("1.0.rev1", "1.0.post1"),
+ ("1.0-POST", "1.0.post0"),
+ ("1.0-POST1", "1.0.post1"),
+ ("1.0-5", "1.0.post5"),
+ ("1.0-r5", "1.0.post5"),
+ ("1.0-rev5", "1.0.post5"),
+ # Local version case insensitivity
+ ("1.0+AbC", "1.0+abc"),
+ # Integer Normalization
+ ("1.01", "1.1"),
+ ("1.0a05", "1.0a5"),
+ ("1.0b07", "1.0b7"),
+ ("1.0c056", "1.0rc56"),
+ ("1.0rc09", "1.0rc9"),
+ ("1.0.post000", "1.0.post0"),
+ ("1.1.dev09000", "1.1.dev9000"),
+ ("00!1.2", "1.2"),
+ ("0100!0.0", "100!0.0"),
+ # Various other normalizations
+ ("v1.0", "1.0"),
+ (" v1.0\t\n", "1.0"),
+ ],
+ )
+ def test_normalized_versions(self, version, normalized):
+ assert str(Version(version)) == normalized
+
+ @pytest.mark.parametrize(
+ ("version", "expected"),
+ [
+ ("1.0.dev456", "1.0.dev456"),
+ ("1.0a1", "1.0a1"),
+ ("1.0a2.dev456", "1.0a2.dev456"),
+ ("1.0a12.dev456", "1.0a12.dev456"),
+ ("1.0a12", "1.0a12"),
+ ("1.0b1.dev456", "1.0b1.dev456"),
+ ("1.0b2", "1.0b2"),
+ ("1.0b2.post345.dev456", "1.0b2.post345.dev456"),
+ ("1.0b2.post345", "1.0b2.post345"),
+ ("1.0rc1.dev456", "1.0rc1.dev456"),
+ ("1.0rc1", "1.0rc1"),
+ ("1.0", "1.0"),
+ ("1.0.post456.dev34", "1.0.post456.dev34"),
+ ("1.0.post456", "1.0.post456"),
+ ("1.0.1", "1.0.1"),
+ ("0!1.0.2", "1.0.2"),
+ ("1.0.3+7", "1.0.3+7"),
+ ("0!1.0.4+8.0", "1.0.4+8.0"),
+ ("1.0.5+9.5", "1.0.5+9.5"),
+ ("1.2+1234.abc", "1.2+1234.abc"),
+ ("1.2+123456", "1.2+123456"),
+ ("1.2+123abc", "1.2+123abc"),
+ ("1.2+123abc456", "1.2+123abc456"),
+ ("1.2+abc", "1.2+abc"),
+ ("1.2+abc123", "1.2+abc123"),
+ ("1.2+abc123def", "1.2+abc123def"),
+ ("1.1.dev1", "1.1.dev1"),
+ ("7!1.0.dev456", "7!1.0.dev456"),
+ ("7!1.0a1", "7!1.0a1"),
+ ("7!1.0a2.dev456", "7!1.0a2.dev456"),
+ ("7!1.0a12.dev456", "7!1.0a12.dev456"),
+ ("7!1.0a12", "7!1.0a12"),
+ ("7!1.0b1.dev456", "7!1.0b1.dev456"),
+ ("7!1.0b2", "7!1.0b2"),
+ ("7!1.0b2.post345.dev456", "7!1.0b2.post345.dev456"),
+ ("7!1.0b2.post345", "7!1.0b2.post345"),
+ ("7!1.0rc1.dev456", "7!1.0rc1.dev456"),
+ ("7!1.0rc1", "7!1.0rc1"),
+ ("7!1.0", "7!1.0"),
+ ("7!1.0.post456.dev34", "7!1.0.post456.dev34"),
+ ("7!1.0.post456", "7!1.0.post456"),
+ ("7!1.0.1", "7!1.0.1"),
+ ("7!1.0.2", "7!1.0.2"),
+ ("7!1.0.3+7", "7!1.0.3+7"),
+ ("7!1.0.4+8.0", "7!1.0.4+8.0"),
+ ("7!1.0.5+9.5", "7!1.0.5+9.5"),
+ ("7!1.1.dev1", "7!1.1.dev1"),
+ ],
+ )
+ def test_version_str_repr(self, version, expected):
+ assert str(Version(version)) == expected
+ assert repr(Version(version)) == f"<Version({expected!r})>"
+
+ def test_version_rc_and_c_equals(self):
+ assert Version("1.0rc1") == Version("1.0c1")
+
+ @pytest.mark.parametrize("version", VERSIONS)
+ def test_version_hash(self, version):
+ assert hash(Version(version)) == hash(Version(version))
+
+ @pytest.mark.parametrize(
+ ("version", "public"),
+ [
+ ("1.0", "1.0"),
+ ("1.0.dev0", "1.0.dev0"),
+ ("1.0.dev6", "1.0.dev6"),
+ ("1.0a1", "1.0a1"),
+ ("1.0a1.post5", "1.0a1.post5"),
+ ("1.0a1.post5.dev6", "1.0a1.post5.dev6"),
+ ("1.0rc4", "1.0rc4"),
+ ("1.0.post5", "1.0.post5"),
+ ("1!1.0", "1!1.0"),
+ ("1!1.0.dev6", "1!1.0.dev6"),
+ ("1!1.0a1", "1!1.0a1"),
+ ("1!1.0a1.post5", "1!1.0a1.post5"),
+ ("1!1.0a1.post5.dev6", "1!1.0a1.post5.dev6"),
+ ("1!1.0rc4", "1!1.0rc4"),
+ ("1!1.0.post5", "1!1.0.post5"),
+ ("1.0+deadbeef", "1.0"),
+ ("1.0.dev6+deadbeef", "1.0.dev6"),
+ ("1.0a1+deadbeef", "1.0a1"),
+ ("1.0a1.post5+deadbeef", "1.0a1.post5"),
+ ("1.0a1.post5.dev6+deadbeef", "1.0a1.post5.dev6"),
+ ("1.0rc4+deadbeef", "1.0rc4"),
+ ("1.0.post5+deadbeef", "1.0.post5"),
+ ("1!1.0+deadbeef", "1!1.0"),
+ ("1!1.0.dev6+deadbeef", "1!1.0.dev6"),
+ ("1!1.0a1+deadbeef", "1!1.0a1"),
+ ("1!1.0a1.post5+deadbeef", "1!1.0a1.post5"),
+ ("1!1.0a1.post5.dev6+deadbeef", "1!1.0a1.post5.dev6"),
+ ("1!1.0rc4+deadbeef", "1!1.0rc4"),
+ ("1!1.0.post5+deadbeef", "1!1.0.post5"),
+ ],
+ )
+ def test_version_public(self, version, public):
+ assert Version(version).public == public
+
+ @pytest.mark.parametrize(
+ ("version", "base_version"),
+ [
+ ("1.0", "1.0"),
+ ("1.0.dev0", "1.0"),
+ ("1.0.dev6", "1.0"),
+ ("1.0a1", "1.0"),
+ ("1.0a1.post5", "1.0"),
+ ("1.0a1.post5.dev6", "1.0"),
+ ("1.0rc4", "1.0"),
+ ("1.0.post5", "1.0"),
+ ("1!1.0", "1!1.0"),
+ ("1!1.0.dev6", "1!1.0"),
+ ("1!1.0a1", "1!1.0"),
+ ("1!1.0a1.post5", "1!1.0"),
+ ("1!1.0a1.post5.dev6", "1!1.0"),
+ ("1!1.0rc4", "1!1.0"),
+ ("1!1.0.post5", "1!1.0"),
+ ("1.0+deadbeef", "1.0"),
+ ("1.0.dev6+deadbeef", "1.0"),
+ ("1.0a1+deadbeef", "1.0"),
+ ("1.0a1.post5+deadbeef", "1.0"),
+ ("1.0a1.post5.dev6+deadbeef", "1.0"),
+ ("1.0rc4+deadbeef", "1.0"),
+ ("1.0.post5+deadbeef", "1.0"),
+ ("1!1.0+deadbeef", "1!1.0"),
+ ("1!1.0.dev6+deadbeef", "1!1.0"),
+ ("1!1.0a1+deadbeef", "1!1.0"),
+ ("1!1.0a1.post5+deadbeef", "1!1.0"),
+ ("1!1.0a1.post5.dev6+deadbeef", "1!1.0"),
+ ("1!1.0rc4+deadbeef", "1!1.0"),
+ ("1!1.0.post5+deadbeef", "1!1.0"),
+ ],
+ )
+ def test_version_base_version(self, version, base_version):
+ assert Version(version).base_version == base_version
+
+ @pytest.mark.parametrize(
+ ("version", "epoch"),
+ [
+ ("1.0", 0),
+ ("1.0.dev0", 0),
+ ("1.0.dev6", 0),
+ ("1.0a1", 0),
+ ("1.0a1.post5", 0),
+ ("1.0a1.post5.dev6", 0),
+ ("1.0rc4", 0),
+ ("1.0.post5", 0),
+ ("1!1.0", 1),
+ ("1!1.0.dev6", 1),
+ ("1!1.0a1", 1),
+ ("1!1.0a1.post5", 1),
+ ("1!1.0a1.post5.dev6", 1),
+ ("1!1.0rc4", 1),
+ ("1!1.0.post5", 1),
+ ("1.0+deadbeef", 0),
+ ("1.0.dev6+deadbeef", 0),
+ ("1.0a1+deadbeef", 0),
+ ("1.0a1.post5+deadbeef", 0),
+ ("1.0a1.post5.dev6+deadbeef", 0),
+ ("1.0rc4+deadbeef", 0),
+ ("1.0.post5+deadbeef", 0),
+ ("1!1.0+deadbeef", 1),
+ ("1!1.0.dev6+deadbeef", 1),
+ ("1!1.0a1+deadbeef", 1),
+ ("1!1.0a1.post5+deadbeef", 1),
+ ("1!1.0a1.post5.dev6+deadbeef", 1),
+ ("1!1.0rc4+deadbeef", 1),
+ ("1!1.0.post5+deadbeef", 1),
+ ],
+ )
+ def test_version_epoch(self, version, epoch):
+ assert Version(version).epoch == epoch
+
+ @pytest.mark.parametrize(
+ ("version", "release"),
+ [
+ ("1.0", (1, 0)),
+ ("1.0.dev0", (1, 0)),
+ ("1.0.dev6", (1, 0)),
+ ("1.0a1", (1, 0)),
+ ("1.0a1.post5", (1, 0)),
+ ("1.0a1.post5.dev6", (1, 0)),
+ ("1.0rc4", (1, 0)),
+ ("1.0.post5", (1, 0)),
+ ("1!1.0", (1, 0)),
+ ("1!1.0.dev6", (1, 0)),
+ ("1!1.0a1", (1, 0)),
+ ("1!1.0a1.post5", (1, 0)),
+ ("1!1.0a1.post5.dev6", (1, 0)),
+ ("1!1.0rc4", (1, 0)),
+ ("1!1.0.post5", (1, 0)),
+ ("1.0+deadbeef", (1, 0)),
+ ("1.0.dev6+deadbeef", (1, 0)),
+ ("1.0a1+deadbeef", (1, 0)),
+ ("1.0a1.post5+deadbeef", (1, 0)),
+ ("1.0a1.post5.dev6+deadbeef", (1, 0)),
+ ("1.0rc4+deadbeef", (1, 0)),
+ ("1.0.post5+deadbeef", (1, 0)),
+ ("1!1.0+deadbeef", (1, 0)),
+ ("1!1.0.dev6+deadbeef", (1, 0)),
+ ("1!1.0a1+deadbeef", (1, 0)),
+ ("1!1.0a1.post5+deadbeef", (1, 0)),
+ ("1!1.0a1.post5.dev6+deadbeef", (1, 0)),
+ ("1!1.0rc4+deadbeef", (1, 0)),
+ ("1!1.0.post5+deadbeef", (1, 0)),
+ ],
+ )
+ def test_version_release(self, version, release):
+ assert Version(version).release == release
+
+ @pytest.mark.parametrize(
+ ("version", "local"),
+ [
+ ("1.0", None),
+ ("1.0.dev0", None),
+ ("1.0.dev6", None),
+ ("1.0a1", None),
+ ("1.0a1.post5", None),
+ ("1.0a1.post5.dev6", None),
+ ("1.0rc4", None),
+ ("1.0.post5", None),
+ ("1!1.0", None),
+ ("1!1.0.dev6", None),
+ ("1!1.0a1", None),
+ ("1!1.0a1.post5", None),
+ ("1!1.0a1.post5.dev6", None),
+ ("1!1.0rc4", None),
+ ("1!1.0.post5", None),
+ ("1.0+deadbeef", "deadbeef"),
+ ("1.0.dev6+deadbeef", "deadbeef"),
+ ("1.0a1+deadbeef", "deadbeef"),
+ ("1.0a1.post5+deadbeef", "deadbeef"),
+ ("1.0a1.post5.dev6+deadbeef", "deadbeef"),
+ ("1.0rc4+deadbeef", "deadbeef"),
+ ("1.0.post5+deadbeef", "deadbeef"),
+ ("1!1.0+deadbeef", "deadbeef"),
+ ("1!1.0.dev6+deadbeef", "deadbeef"),
+ ("1!1.0a1+deadbeef", "deadbeef"),
+ ("1!1.0a1.post5+deadbeef", "deadbeef"),
+ ("1!1.0a1.post5.dev6+deadbeef", "deadbeef"),
+ ("1!1.0rc4+deadbeef", "deadbeef"),
+ ("1!1.0.post5+deadbeef", "deadbeef"),
+ ],
+ )
+ def test_version_local(self, version, local):
+ assert Version(version).local == local
+
+ @pytest.mark.parametrize(
+ ("version", "pre"),
+ [
+ ("1.0", None),
+ ("1.0.dev0", None),
+ ("1.0.dev6", None),
+ ("1.0a1", ("a", 1)),
+ ("1.0a1.post5", ("a", 1)),
+ ("1.0a1.post5.dev6", ("a", 1)),
+ ("1.0rc4", ("rc", 4)),
+ ("1.0.post5", None),
+ ("1!1.0", None),
+ ("1!1.0.dev6", None),
+ ("1!1.0a1", ("a", 1)),
+ ("1!1.0a1.post5", ("a", 1)),
+ ("1!1.0a1.post5.dev6", ("a", 1)),
+ ("1!1.0rc4", ("rc", 4)),
+ ("1!1.0.post5", None),
+ ("1.0+deadbeef", None),
+ ("1.0.dev6+deadbeef", None),
+ ("1.0a1+deadbeef", ("a", 1)),
+ ("1.0a1.post5+deadbeef", ("a", 1)),
+ ("1.0a1.post5.dev6+deadbeef", ("a", 1)),
+ ("1.0rc4+deadbeef", ("rc", 4)),
+ ("1.0.post5+deadbeef", None),
+ ("1!1.0+deadbeef", None),
+ ("1!1.0.dev6+deadbeef", None),
+ ("1!1.0a1+deadbeef", ("a", 1)),
+ ("1!1.0a1.post5+deadbeef", ("a", 1)),
+ ("1!1.0a1.post5.dev6+deadbeef", ("a", 1)),
+ ("1!1.0rc4+deadbeef", ("rc", 4)),
+ ("1!1.0.post5+deadbeef", None),
+ ],
+ )
+ def test_version_pre(self, version, pre):
+ assert Version(version).pre == pre
+
+ @pytest.mark.parametrize(
+ ("version", "expected"),
+ [
+ ("1.0.dev0", True),
+ ("1.0.dev1", True),
+ ("1.0a1.dev1", True),
+ ("1.0b1.dev1", True),
+ ("1.0c1.dev1", True),
+ ("1.0rc1.dev1", True),
+ ("1.0a1", True),
+ ("1.0b1", True),
+ ("1.0c1", True),
+ ("1.0rc1", True),
+ ("1.0a1.post1.dev1", True),
+ ("1.0b1.post1.dev1", True),
+ ("1.0c1.post1.dev1", True),
+ ("1.0rc1.post1.dev1", True),
+ ("1.0a1.post1", True),
+ ("1.0b1.post1", True),
+ ("1.0c1.post1", True),
+ ("1.0rc1.post1", True),
+ ("1.0", False),
+ ("1.0+dev", False),
+ ("1.0.post1", False),
+ ("1.0.post1+dev", False),
+ ],
+ )
+ def test_version_is_prerelease(self, version, expected):
+ assert Version(version).is_prerelease is expected
+
+ @pytest.mark.parametrize(
+ ("version", "dev"),
+ [
+ ("1.0", None),
+ ("1.0.dev0", 0),
+ ("1.0.dev6", 6),
+ ("1.0a1", None),
+ ("1.0a1.post5", None),
+ ("1.0a1.post5.dev6", 6),
+ ("1.0rc4", None),
+ ("1.0.post5", None),
+ ("1!1.0", None),
+ ("1!1.0.dev6", 6),
+ ("1!1.0a1", None),
+ ("1!1.0a1.post5", None),
+ ("1!1.0a1.post5.dev6", 6),
+ ("1!1.0rc4", None),
+ ("1!1.0.post5", None),
+ ("1.0+deadbeef", None),
+ ("1.0.dev6+deadbeef", 6),
+ ("1.0a1+deadbeef", None),
+ ("1.0a1.post5+deadbeef", None),
+ ("1.0a1.post5.dev6+deadbeef", 6),
+ ("1.0rc4+deadbeef", None),
+ ("1.0.post5+deadbeef", None),
+ ("1!1.0+deadbeef", None),
+ ("1!1.0.dev6+deadbeef", 6),
+ ("1!1.0a1+deadbeef", None),
+ ("1!1.0a1.post5+deadbeef", None),
+ ("1!1.0a1.post5.dev6+deadbeef", 6),
+ ("1!1.0rc4+deadbeef", None),
+ ("1!1.0.post5+deadbeef", None),
+ ],
+ )
+ def test_version_dev(self, version, dev):
+ assert Version(version).dev == dev
+
+ @pytest.mark.parametrize(
+ ("version", "expected"),
+ [
+ ("1.0", False),
+ ("1.0.dev0", True),
+ ("1.0.dev6", True),
+ ("1.0a1", False),
+ ("1.0a1.post5", False),
+ ("1.0a1.post5.dev6", True),
+ ("1.0rc4", False),
+ ("1.0.post5", False),
+ ("1!1.0", False),
+ ("1!1.0.dev6", True),
+ ("1!1.0a1", False),
+ ("1!1.0a1.post5", False),
+ ("1!1.0a1.post5.dev6", True),
+ ("1!1.0rc4", False),
+ ("1!1.0.post5", False),
+ ("1.0+deadbeef", False),
+ ("1.0.dev6+deadbeef", True),
+ ("1.0a1+deadbeef", False),
+ ("1.0a1.post5+deadbeef", False),
+ ("1.0a1.post5.dev6+deadbeef", True),
+ ("1.0rc4+deadbeef", False),
+ ("1.0.post5+deadbeef", False),
+ ("1!1.0+deadbeef", False),
+ ("1!1.0.dev6+deadbeef", True),
+ ("1!1.0a1+deadbeef", False),
+ ("1!1.0a1.post5+deadbeef", False),
+ ("1!1.0a1.post5.dev6+deadbeef", True),
+ ("1!1.0rc4+deadbeef", False),
+ ("1!1.0.post5+deadbeef", False),
+ ],
+ )
+ def test_version_is_devrelease(self, version, expected):
+ assert Version(version).is_devrelease is expected
+
+ @pytest.mark.parametrize(
+ ("version", "post"),
+ [
+ ("1.0", None),
+ ("1.0.dev0", None),
+ ("1.0.dev6", None),
+ ("1.0a1", None),
+ ("1.0a1.post5", 5),
+ ("1.0a1.post5.dev6", 5),
+ ("1.0rc4", None),
+ ("1.0.post5", 5),
+ ("1!1.0", None),
+ ("1!1.0.dev6", None),
+ ("1!1.0a1", None),
+ ("1!1.0a1.post5", 5),
+ ("1!1.0a1.post5.dev6", 5),
+ ("1!1.0rc4", None),
+ ("1!1.0.post5", 5),
+ ("1.0+deadbeef", None),
+ ("1.0.dev6+deadbeef", None),
+ ("1.0a1+deadbeef", None),
+ ("1.0a1.post5+deadbeef", 5),
+ ("1.0a1.post5.dev6+deadbeef", 5),
+ ("1.0rc4+deadbeef", None),
+ ("1.0.post5+deadbeef", 5),
+ ("1!1.0+deadbeef", None),
+ ("1!1.0.dev6+deadbeef", None),
+ ("1!1.0a1+deadbeef", None),
+ ("1!1.0a1.post5+deadbeef", 5),
+ ("1!1.0a1.post5.dev6+deadbeef", 5),
+ ("1!1.0rc4+deadbeef", None),
+ ("1!1.0.post5+deadbeef", 5),
+ ],
+ )
+ def test_version_post(self, version, post):
+ assert Version(version).post == post
+
+ @pytest.mark.parametrize(
+ ("version", "expected"),
+ [
+ ("1.0.dev1", False),
+ ("1.0", False),
+ ("1.0+foo", False),
+ ("1.0.post1.dev1", True),
+ ("1.0.post1", True),
+ ],
+ )
+ def test_version_is_postrelease(self, version, expected):
+ assert Version(version).is_postrelease is expected
+
+ @pytest.mark.parametrize(
+ ("left", "right", "op"),
+ # Below we'll generate every possible combination of VERSIONS that
+ # should be True for the given operator
+ itertools.chain(
+ *
+ # Verify that the less than (<) operator works correctly
+ [
+ [(x, y, operator.lt) for y in VERSIONS[i + 1 :]]
+ for i, x in enumerate(VERSIONS)
+ ]
+ +
+ # Verify that the less than equal (<=) operator works correctly
+ [
+ [(x, y, operator.le) for y in VERSIONS[i:]]
+ for i, x in enumerate(VERSIONS)
+ ]
+ +
+ # Verify that the equal (==) operator works correctly
+ [[(x, x, operator.eq) for x in VERSIONS]]
+ +
+ # Verify that the not equal (!=) operator works correctly
+ [
+ [(x, y, operator.ne) for j, y in enumerate(VERSIONS) if i != j]
+ for i, x in enumerate(VERSIONS)
+ ]
+ +
+ # Verify that the greater than equal (>=) operator works correctly
+ [
+ [(x, y, operator.ge) for y in VERSIONS[: i + 1]]
+ for i, x in enumerate(VERSIONS)
+ ]
+ +
+ # Verify that the greater than (>) operator works correctly
+ [
+ [(x, y, operator.gt) for y in VERSIONS[:i]]
+ for i, x in enumerate(VERSIONS)
+ ]
+ ),
+ )
+ def test_comparison_true(self, left, right, op):
+ assert op(Version(left), Version(right))
+
+ @pytest.mark.parametrize(
+ ("left", "right", "op"),
+ # Below we'll generate every possible combination of VERSIONS that
+ # should be False for the given operator
+ itertools.chain(
+ *
+ # Verify that the less than (<) operator works correctly
+ [
+ [(x, y, operator.lt) for y in VERSIONS[: i + 1]]
+ for i, x in enumerate(VERSIONS)
+ ]
+ +
+ # Verify that the less than equal (<=) operator works correctly
+ [
+ [(x, y, operator.le) for y in VERSIONS[:i]]
+ for i, x in enumerate(VERSIONS)
+ ]
+ +
+ # Verify that the equal (==) operator works correctly
+ [
+ [(x, y, operator.eq) for j, y in enumerate(VERSIONS) if i != j]
+ for i, x in enumerate(VERSIONS)
+ ]
+ +
+ # Verify that the not equal (!=) operator works correctly
+ [[(x, x, operator.ne) for x in VERSIONS]]
+ +
+ # Verify that the greater than equal (>=) operator works correctly
+ [
+ [(x, y, operator.ge) for y in VERSIONS[i + 1 :]]
+ for i, x in enumerate(VERSIONS)
+ ]
+ +
+ # Verify that the greater than (>) operator works correctly
+ [
+ [(x, y, operator.gt) for y in VERSIONS[i:]]
+ for i, x in enumerate(VERSIONS)
+ ]
+ ),
+ )
+ def test_comparison_false(self, left, right, op):
+ assert not op(Version(left), Version(right))
+
+ @pytest.mark.parametrize("op", ["lt", "le", "eq", "ge", "gt", "ne"])
+ def test_dunder_op_returns_notimplemented(self, op):
+ method = getattr(Version, f"__{op}__")
+ assert method(Version("1"), 1) is NotImplemented
+
+ @pytest.mark.parametrize(("op", "expected"), [("eq", False), ("ne", True)])
+ def test_compare_other(self, op, expected):
+ other = pretend.stub(**{f"__{op}__": lambda other: NotImplemented})
+
+ assert getattr(operator, op)(Version("1"), other) is expected
+
+ def test_compare_legacyversion_version(self):
+ result = sorted([Version("0"), LegacyVersion("1")])
+ assert result == [LegacyVersion("1"), Version("0")]
+
+ def test_major_version(self):
+ assert Version("2.1.0").major == 2
+
+ def test_minor_version(self):
+ assert Version("2.1.0").minor == 1
+ assert Version("2").minor == 0
+
+ def test_micro_version(self):
+ assert Version("2.1.3").micro == 3
+ assert Version("2.1").micro == 0
+ assert Version("2").micro == 0
+
+
+LEGACY_VERSIONS = ["foobar", "a cat is fine too", "lolwut", "1-0", "2.0-a1"]
+
+
+class TestLegacyVersion:
+ def test_legacy_version_is_deprecated(self):
+ with warnings.catch_warnings(record=True) as w:
+ LegacyVersion("some-legacy-version")
+ assert len(w) == 1
+ assert issubclass(w[0].category, DeprecationWarning)
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_valid_legacy_versions(self, version):
+ LegacyVersion(version)
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_str_repr(self, version):
+ assert str(LegacyVersion(version)) == version
+ assert repr(LegacyVersion(version)) == "<LegacyVersion({})>".format(
+ repr(version)
+ )
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_hash(self, version):
+ assert hash(LegacyVersion(version)) == hash(LegacyVersion(version))
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_public(self, version):
+ assert LegacyVersion(version).public == version
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_base_version(self, version):
+ assert LegacyVersion(version).base_version == version
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_epoch(self, version):
+ assert LegacyVersion(version).epoch == -1
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_release(self, version):
+ assert LegacyVersion(version).release is None
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_local(self, version):
+ assert LegacyVersion(version).local is None
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_pre(self, version):
+ assert LegacyVersion(version).pre is None
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_is_prerelease(self, version):
+ assert not LegacyVersion(version).is_prerelease
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_dev(self, version):
+ assert LegacyVersion(version).dev is None
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_is_devrelease(self, version):
+ assert not LegacyVersion(version).is_devrelease
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_post(self, version):
+ assert LegacyVersion(version).post is None
+
+ @pytest.mark.parametrize("version", VERSIONS + LEGACY_VERSIONS)
+ def test_legacy_version_is_postrelease(self, version):
+ assert not LegacyVersion(version).is_postrelease
+
+ @pytest.mark.parametrize(
+ ("left", "right", "op"),
+ # Below we'll generate every possible combination of
+ # VERSIONS + LEGACY_VERSIONS that should be True for the given operator
+ itertools.chain(
+ *
+ # Verify that the equal (==) operator works correctly
+ [[(x, x, operator.eq) for x in VERSIONS + LEGACY_VERSIONS]]
+ +
+ # Verify that the not equal (!=) operator works correctly
+ [
+ [
+ (x, y, operator.ne)
+ for j, y in enumerate(VERSIONS + LEGACY_VERSIONS)
+ if i != j
+ ]
+ for i, x in enumerate(VERSIONS + LEGACY_VERSIONS)
+ ]
+ ),
+ )
+ def test_comparison_true(self, left, right, op):
+ assert op(LegacyVersion(left), LegacyVersion(right))
+
+ @pytest.mark.parametrize(
+ ("left", "right", "op"),
+ # Below we'll generate every possible combination of
+ # VERSIONS + LEGACY_VERSIONS that should be False for the given
+ # operator
+ itertools.chain(
+ *
+ # Verify that the equal (==) operator works correctly
+ [
+ [
+ (x, y, operator.eq)
+ for j, y in enumerate(VERSIONS + LEGACY_VERSIONS)
+ if i != j
+ ]
+ for i, x in enumerate(VERSIONS + LEGACY_VERSIONS)
+ ]
+ +
+ # Verify that the not equal (!=) operator works correctly
+ [[(x, x, operator.ne) for x in VERSIONS + LEGACY_VERSIONS]]
+ ),
+ )
+ def test_comparison_false(self, left, right, op):
+ assert not op(LegacyVersion(left), LegacyVersion(right))
+
+ @pytest.mark.parametrize("op", ["lt", "le", "eq", "ge", "gt", "ne"])
+ def test_dunder_op_returns_notimplemented(self, op):
+ method = getattr(LegacyVersion, f"__{op}__")
+ assert method(LegacyVersion("1"), 1) is NotImplemented
+
+ @pytest.mark.parametrize(("op", "expected"), [("eq", False), ("ne", True)])
+ def test_compare_other(self, op, expected):
+ other = pretend.stub(**{f"__{op}__": lambda other: NotImplemented})
+
+ assert getattr(operator, op)(LegacyVersion("1"), other) is expected
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/.gitignore b/testing/web-platform/tests/tools/third_party/pathlib2/.gitignore
new file mode 100644
index 0000000000..db4561eaa1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/.gitignore
@@ -0,0 +1,54 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.cache
+nosetests.xml
+coverage.xml
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/.travis.yml b/testing/web-platform/tests/tools/third_party/pathlib2/.travis.yml
new file mode 100644
index 0000000000..3166741220
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/.travis.yml
@@ -0,0 +1,47 @@
+language: python
+sudo: false
+matrix:
+ include:
+ - python: "3.7"
+ dist: xenial
+ env: LC_ALL="en_US.utf-8"
+ - python: "3.7"
+ dist: xenial
+ env: LC_ALL="en_US.ascii"
+ - python: "3.6"
+ dist: xenial
+ - python: "3.5"
+ dist: xenial
+ - python: "3.4"
+ dist: xenial
+ - python: "2.7"
+ dist: xenial
+ - python: "2.6"
+ dist: trusty
+ - python: "pypy"
+ dist: xenial
+ - python: "pypy3"
+ dist: xenial
+ allow_failures:
+ # pypy occasionally has some bugs
+ - python: "pypy"
+ - python: "pypy3"
+branches:
+ only:
+ - develop
+install:
+ - "python -c \"import sys; print(sys.getfilesystemencoding())\""
+ - "pip install --upgrade setuptools"
+ - "pip install --upgrade pytest"
+ - "pip install pytest-cov"
+ - "pip install -r requirements.txt"
+ - "if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install unittest2; fi"
+ - "if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then pip install check-manifest flake8; fi"
+ - "pip install ."
+script:
+ - "if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then check-manifest; fi"
+ - "if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then flake8; fi"
+ - cd tests
+ - "pytest --cov-report=xml --cov=pathlib2 ."
+after_success:
+ - bash <(curl -s https://codecov.io/bash)
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/CHANGELOG.rst b/testing/web-platform/tests/tools/third_party/pathlib2/CHANGELOG.rst
new file mode 100644
index 0000000000..867c1b38e9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/CHANGELOG.rst
@@ -0,0 +1,163 @@
+History
+-------
+
+Version 2.3.5
+^^^^^^^^^^^^^
+
+- Fall back to ascii when getfilesystemencoding returns None (see
+ issue #59).
+
+Version 2.3.4
+^^^^^^^^^^^^^
+
+- Do not raise windows error when calling resolve on a non-existing
+ path in Python 2.7, to match behaviour on Python 3.x (see issue #54).
+
+- Use the new collections.abc when possible (see issue #53).
+
+- Sync with upstream pathlib (see issues #47 and #51).
+
+Version 2.3.3
+^^^^^^^^^^^^^
+
+- Bring back old deprecated dependency syntax to ensure compatibility
+ with older systems (see issue #46).
+
+- Drop Python 3.3 support, as scandir no longer supports it.
+
+- Add Python 3.7 support.
+
+Version 2.3.2
+^^^^^^^^^^^^^
+
+- Hotfix for broken setup.py.
+
+Version 2.3.1
+^^^^^^^^^^^^^
+
+- Fix tests for systems where filesystem encoding only supports ascii
+ (reported by yurivict, fixed with help of honnibal, see issue #30).
+
+- Use modern setuptools syntax for specifying conditional scandir
+ dependency (see issue #31).
+
+- Remove legacy use of support module from old pathlib module (see
+ issue #39). This fixes the tests for Python 3.6.
+
+- Drop the "from __future__ import unicode_literals" and -Qnew tests
+ as it introduced subtle bugs in the tests, and maintaining separate
+ test modules for these legacy features seems not worth the effort.
+
+- Drop Python 3.2 support, as scandir no longer supports it.
+
+Version 2.3.0
+^^^^^^^^^^^^^
+
+- Sync with upstream pathlib from CPython 3.6.1 (7d1017d).
+
+Version 2.2.1
+^^^^^^^^^^^^^
+
+- Fix conditional scandir dependency in wheel (reported by AvdN, see
+ issue #20 and pull request #21).
+
+Version 2.2.0
+^^^^^^^^^^^^^
+
+- Sync with upstream pathlib from CPython 3.5.2 and 3.6.0: fix various
+ exceptions, empty glob pattern, scandir, __fspath__.
+
+- Support unicode strings to be used to construct paths in Python 2
+ (reported by native-api, see issue #13 and pull request #15).
+
+Version 2.1.0
+^^^^^^^^^^^^^
+
+- Sync with upstream pathlib from CPython 3.5.0: gethomedir, home,
+ expanduser.
+
+Version 2.0.1
+^^^^^^^^^^^^^
+
+- Fix TypeError exceptions in write_bytes and write_text (contributed
+ by Emanuele Gaifas, see pull request #2).
+
+Version 2.0
+^^^^^^^^^^^
+
+- Sync with upstream pathlib from CPython: read_text, write_text,
+ read_bytes, write_bytes, __enter__, __exit__, samefile.
+- Use travis and appveyor for continuous integration.
+- Fixed some bugs in test code.
+
+Version 1.0.1
+^^^^^^^^^^^^^
+
+- Pull request #4: Python 2.6 compatibility by eevee.
+
+Version 1.0
+^^^^^^^^^^^
+
+This version brings ``pathlib`` up to date with the official Python 3.4
+release, and also fixes a couple of 2.7-specific issues.
+
+- Python issue #20765: Add missing documentation for PurePath.with_name()
+ and PurePath.with_suffix().
+- Fix test_mkdir_parents when the working directory has additional bits
+ set (such as the setgid or sticky bits).
+- Python issue #20111: pathlib.Path.with_suffix() now sanity checks the
+ given suffix.
+- Python issue #19918: Fix PurePath.relative_to() under Windows.
+- Python issue #19921: When Path.mkdir() is called with parents=True, any
+ missing parent is created with the default permissions, ignoring the mode
+ argument (mimicking the POSIX "mkdir -p" command).
+- Python issue #19887: Improve the Path.resolve() algorithm to support
+ certain symlink chains.
+- Make pathlib usable under Python 2.7 with unicode pathnames (only pure
+ ASCII, though).
+- Issue #21: fix TypeError under Python 2.7 when using new division.
+- Add tox support for easier testing.
+
+Version 0.97
+^^^^^^^^^^^^
+
+This version brings ``pathlib`` up to date with the final API specified
+in :pep:`428`. The changes are too long to list here, it is recommended
+to read the `documentation <https://pathlib.readthedocs.org/>`_.
+
+.. warning::
+ The API in this version is partially incompatible with pathlib 0.8 and
+ earlier. Be sure to check your code for possible breakage!
+
+Version 0.8
+^^^^^^^^^^^
+
+- Add PurePath.name and PurePath.anchor.
+- Add Path.owner and Path.group.
+- Add Path.replace().
+- Add Path.as_uri().
+- Issue #10: when creating a file with Path.open(), don't set the executable
+ bit.
+- Issue #11: fix comparisons with non-Path objects.
+
+Version 0.7
+^^^^^^^^^^^
+
+- Add '**' (recursive) patterns to Path.glob().
+- Fix openat() support after the API refactoring in Python 3.3 beta1.
+- Add a *target_is_directory* argument to Path.symlink_to()
+
+Version 0.6
+^^^^^^^^^^^
+
+- Add Path.is_file() and Path.is_symlink()
+- Add Path.glob() and Path.rglob()
+- Add PurePath.match()
+
+Version 0.5
+^^^^^^^^^^^
+
+- Add Path.mkdir().
+- Add Python 2.7 compatibility by Michele Lacchia.
+- Make parent() raise ValueError when the level is greater than the path
+ length.
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/LICENSE.rst b/testing/web-platform/tests/tools/third_party/pathlib2/LICENSE.rst
new file mode 100644
index 0000000000..1715d3d7a2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/LICENSE.rst
@@ -0,0 +1,23 @@
+The MIT License (MIT)
+
+Copyright (c) 2014-2017 Matthias C. M. Troffaes
+Copyright (c) 2012-2014 Antoine Pitrou and contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/MANIFEST.in b/testing/web-platform/tests/tools/third_party/pathlib2/MANIFEST.in
new file mode 100644
index 0000000000..2f03369dc6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/MANIFEST.in
@@ -0,0 +1,10 @@
+include *.py
+recursive-include pathlib2 *.py
+recursive-include tests *.py
+include *.rst
+include VERSION
+include requirements.txt
+exclude .travis.yml
+exclude appveyor.yml
+exclude codecov.yml
+prune appveyor
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/README.rst b/testing/web-platform/tests/tools/third_party/pathlib2/README.rst
new file mode 100644
index 0000000000..6378f284dd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/README.rst
@@ -0,0 +1,66 @@
+pathlib2
+========
+
+|appveyor| |travis| |codecov|
+
+Fork of pathlib aiming to support the full stdlib Python API.
+
+The `old pathlib <https://bitbucket.org/pitrou/pathlib>`_
+module on bitbucket is in bugfix-only mode.
+The goal of pathlib2 is to provide a backport of
+`standard pathlib <http://docs.python.org/dev/library/pathlib.html>`_
+module which tracks the standard library module,
+so all the newest features of the standard pathlib can be
+used also on older Python versions.
+
+Download
+--------
+
+Standalone releases are available on PyPI:
+http://pypi.python.org/pypi/pathlib2/
+
+Development
+-----------
+
+The main development takes place in the Python standard library: see
+the `Python developer's guide <http://docs.python.org/devguide/>`_.
+In particular, new features should be submitted to the
+`Python bug tracker <http://bugs.python.org/>`_.
+
+Issues that occur in this backport, but that do not occur not in the
+standard Python pathlib module can be submitted on
+the `pathlib2 bug tracker <https://github.com/mcmtroffaes/pathlib2/issues>`_.
+
+Documentation
+-------------
+
+Refer to the
+`standard pathlib <http://docs.python.org/dev/library/pathlib.html>`_
+documentation.
+
+Known Issues
+------------
+
+For historic reasons, pathlib2 still uses bytes to represent file paths internally.
+Unfortunately, on Windows with Python 2.7, the file system encoder (``mcbs``)
+has only poor support for non-ascii characters,
+and can silently replace non-ascii characters without warning.
+For example, ``u'теÑÑ‚'.encode(sys.getfilesystemencoding())`` results in ``????``
+which is obviously completely useless.
+
+Therefore, on Windows with Python 2.7, until this problem is fixed upstream,
+unfortunately you cannot rely on pathlib2 to support the full unicode range for filenames.
+See `issue #56 <https://github.com/mcmtroffaes/pathlib2/issues/56>`_ for more details.
+
+.. |travis| image:: https://travis-ci.org/mcmtroffaes/pathlib2.png?branch=develop
+ :target: https://travis-ci.org/mcmtroffaes/pathlib2
+ :alt: travis-ci
+
+.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/baddx3rpet2wyi2c?svg=true
+ :target: https://ci.appveyor.com/project/mcmtroffaes/pathlib2
+ :alt: appveyor
+
+.. |codecov| image:: https://codecov.io/gh/mcmtroffaes/pathlib2/branch/develop/graph/badge.svg
+ :target: https://codecov.io/gh/mcmtroffaes/pathlib2
+ :alt: codecov
+
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/VERSION b/testing/web-platform/tests/tools/third_party/pathlib2/VERSION
new file mode 100644
index 0000000000..cc6c9a491e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/VERSION
@@ -0,0 +1 @@
+2.3.5
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/appveyor.yml b/testing/web-platform/tests/tools/third_party/pathlib2/appveyor.yml
new file mode 100644
index 0000000000..aae7f25f34
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/appveyor.yml
@@ -0,0 +1,30 @@
+environment:
+
+ matrix:
+ - PYTHON: "C:\\Python27"
+
+ - PYTHON: "C:\\Python34"
+
+ - PYTHON: "C:\\Python35"
+
+ - PYTHON: "C:\\Python36"
+
+ - PYTHON: "C:\\Python37"
+
+init:
+ - "%PYTHON%/python --version"
+
+install:
+ - "powershell appveyor\\install.ps1"
+
+build: off
+
+test_script:
+ - cd tests
+ - "%PYTHON%/Scripts/py.test --cov-report=xml --cov=pathlib2 ."
+
+after_test:
+ - ps: |
+ $env:PATH = 'C:\msys64\usr\bin;' + $env:PATH
+ Invoke-WebRequest -Uri 'https://codecov.io/bash' -OutFile codecov.sh
+ bash codecov.sh -f "coverage.xml"
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/appveyor/install.ps1 b/testing/web-platform/tests/tools/third_party/pathlib2/appveyor/install.ps1
new file mode 100644
index 0000000000..ebfbb8db96
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/appveyor/install.ps1
@@ -0,0 +1,44 @@
+# Sample script to install Python and pip under Windows
+# Authors: Olivier Grisel and Kyle Kastner
+# License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/
+
+$GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py"
+$GET_PIP_PATH = "C:\get-pip.py"
+
+
+function InstallPip ($python_home) {
+ $pip_path = $python_home + "/Scripts/pip.exe"
+ $python_path = $python_home + "/python.exe"
+ if (-not(Test-Path $pip_path)) {
+ Write-Host "Installing pip..."
+ $webclient = New-Object System.Net.WebClient
+ $webclient.DownloadFile($GET_PIP_URL, $GET_PIP_PATH)
+ Write-Host "Executing:" $python_path $GET_PIP_PATH
+ Start-Process -FilePath "$python_path" -ArgumentList "$GET_PIP_PATH" -Wait -Passthru
+ } else {
+ Write-Host "Upgrading pip..."
+ & $python_path -m pip install --upgrade pip
+ }
+ Write-Host "Upgrading setuptools..."
+ & $python_path -m pip install --upgrade setuptools
+}
+
+function InstallPackage ($python_home, $pkg) {
+ $pip_path = $python_home + "/Scripts/pip.exe"
+ & $pip_path install $pkg
+}
+
+function InstallRequirements ($python_home, $reqs) {
+ $pip_path = $python_home + "/Scripts/pip.exe"
+ & $pip_path install -r $reqs
+}
+
+function main () {
+ InstallPip $env:PYTHON
+ InstallRequirements $env:PYTHON -r requirements.txt
+ InstallPackage $env:PYTHON pytest-cov
+ InstallPackage $env:PYTHON unittest2
+ InstallPackage $env:PYTHON .
+}
+
+main
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/codecov.yml b/testing/web-platform/tests/tools/third_party/pathlib2/codecov.yml
new file mode 100644
index 0000000000..db2472009c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/codecov.yml
@@ -0,0 +1 @@
+comment: off
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/pathlib2/__init__.py b/testing/web-platform/tests/tools/third_party/pathlib2/pathlib2/__init__.py
new file mode 100644
index 0000000000..d5a47a66c6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/pathlib2/__init__.py
@@ -0,0 +1,1809 @@
+# Copyright (c) 2014-2017 Matthias C. M. Troffaes
+# Copyright (c) 2012-2014 Antoine Pitrou and contributors
+# Distributed under the terms of the MIT License.
+
+import ctypes
+import fnmatch
+import functools
+import io
+import ntpath
+import os
+import posixpath
+import re
+import six
+import sys
+
+from errno import EINVAL, ENOENT, ENOTDIR, EBADF
+from errno import EEXIST, EPERM, EACCES
+from operator import attrgetter
+from stat import (
+ S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO)
+
+try:
+ from collections.abc import Sequence
+except ImportError:
+ from collections import Sequence
+
+try:
+ from urllib import quote as urlquote_from_bytes
+except ImportError:
+ from urllib.parse import quote_from_bytes as urlquote_from_bytes
+
+
+try:
+ intern = intern
+except NameError:
+ intern = sys.intern
+
+supports_symlinks = True
+if os.name == 'nt':
+ import nt
+ if sys.getwindowsversion()[:2] >= (6, 0) and sys.version_info >= (3, 2):
+ from nt import _getfinalpathname
+ else:
+ supports_symlinks = False
+ _getfinalpathname = None
+else:
+ nt = None
+
+try:
+ from os import scandir as os_scandir
+except ImportError:
+ from scandir import scandir as os_scandir
+
+__all__ = [
+ "PurePath", "PurePosixPath", "PureWindowsPath",
+ "Path", "PosixPath", "WindowsPath",
+ ]
+
+#
+# Internals
+#
+
+# EBADF - guard agains macOS `stat` throwing EBADF
+_IGNORED_ERROS = (ENOENT, ENOTDIR, EBADF)
+
+_IGNORED_WINERRORS = (
+ 21, # ERROR_NOT_READY - drive exists but is not accessible
+)
+
+
+def _ignore_error(exception):
+ return (getattr(exception, 'errno', None) in _IGNORED_ERROS or
+ getattr(exception, 'winerror', None) in _IGNORED_WINERRORS)
+
+
+def _py2_fsencode(parts):
+ # py2 => minimal unicode support
+ assert six.PY2
+ return [part.encode('ascii') if isinstance(part, six.text_type)
+ else part for part in parts]
+
+
+def _try_except_fileexistserror(try_func, except_func, else_func=None):
+ if sys.version_info >= (3, 3):
+ try:
+ try_func()
+ except FileExistsError as exc:
+ except_func(exc)
+ else:
+ if else_func is not None:
+ else_func()
+ else:
+ try:
+ try_func()
+ except EnvironmentError as exc:
+ if exc.errno != EEXIST:
+ raise
+ else:
+ except_func(exc)
+ else:
+ if else_func is not None:
+ else_func()
+
+
+def _try_except_filenotfounderror(try_func, except_func):
+ if sys.version_info >= (3, 3):
+ try:
+ try_func()
+ except FileNotFoundError as exc:
+ except_func(exc)
+ elif os.name != 'nt':
+ try:
+ try_func()
+ except EnvironmentError as exc:
+ if exc.errno != ENOENT:
+ raise
+ else:
+ except_func(exc)
+ else:
+ try:
+ try_func()
+ except WindowsError as exc:
+ # errno contains winerror
+ # 2 = file not found
+ # 3 = path not found
+ if exc.errno not in (2, 3):
+ raise
+ else:
+ except_func(exc)
+ except EnvironmentError as exc:
+ if exc.errno != ENOENT:
+ raise
+ else:
+ except_func(exc)
+
+
+def _try_except_permissionerror_iter(try_iter, except_iter):
+ if sys.version_info >= (3, 3):
+ try:
+ for x in try_iter():
+ yield x
+ except PermissionError as exc:
+ for x in except_iter(exc):
+ yield x
+ else:
+ try:
+ for x in try_iter():
+ yield x
+ except EnvironmentError as exc:
+ if exc.errno not in (EPERM, EACCES):
+ raise
+ else:
+ for x in except_iter(exc):
+ yield x
+
+
+def _win32_get_unique_path_id(path):
+ # get file information, needed for samefile on older Python versions
+ # see http://timgolden.me.uk/python/win32_how_do_i/
+ # see_if_two_files_are_the_same_file.html
+ from ctypes import POINTER, Structure, WinError
+ from ctypes.wintypes import DWORD, HANDLE, BOOL
+
+ class FILETIME(Structure):
+ _fields_ = [("datetime_lo", DWORD),
+ ("datetime_hi", DWORD),
+ ]
+
+ class BY_HANDLE_FILE_INFORMATION(Structure):
+ _fields_ = [("attributes", DWORD),
+ ("created_at", FILETIME),
+ ("accessed_at", FILETIME),
+ ("written_at", FILETIME),
+ ("volume", DWORD),
+ ("file_hi", DWORD),
+ ("file_lo", DWORD),
+ ("n_links", DWORD),
+ ("index_hi", DWORD),
+ ("index_lo", DWORD),
+ ]
+
+ CreateFile = ctypes.windll.kernel32.CreateFileW
+ CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p,
+ DWORD, DWORD, HANDLE]
+ CreateFile.restype = HANDLE
+ GetFileInformationByHandle = (
+ ctypes.windll.kernel32.GetFileInformationByHandle)
+ GetFileInformationByHandle.argtypes = [
+ HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)]
+ GetFileInformationByHandle.restype = BOOL
+ CloseHandle = ctypes.windll.kernel32.CloseHandle
+ CloseHandle.argtypes = [HANDLE]
+ CloseHandle.restype = BOOL
+ GENERIC_READ = 0x80000000
+ FILE_SHARE_READ = 0x00000001
+ FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
+ OPEN_EXISTING = 3
+ if os.path.isdir(path):
+ flags = FILE_FLAG_BACKUP_SEMANTICS
+ else:
+ flags = 0
+ hfile = CreateFile(path, GENERIC_READ, FILE_SHARE_READ,
+ None, OPEN_EXISTING, flags, None)
+ if hfile == 0xffffffff:
+ if sys.version_info >= (3, 3):
+ raise FileNotFoundError(path)
+ else:
+ exc = OSError("file not found: path")
+ exc.errno = ENOENT
+ raise exc
+ info = BY_HANDLE_FILE_INFORMATION()
+ success = GetFileInformationByHandle(hfile, info)
+ CloseHandle(hfile)
+ if success == 0:
+ raise WinError()
+ return info.volume, info.index_hi, info.index_lo
+
+
+def _is_wildcard_pattern(pat):
+ # Whether this pattern needs actual matching using fnmatch, or can
+ # be looked up directly as a file.
+ return "*" in pat or "?" in pat or "[" in pat
+
+
+class _Flavour(object):
+
+ """A flavour implements a particular (platform-specific) set of path
+ semantics."""
+
+ def __init__(self):
+ self.join = self.sep.join
+
+ def parse_parts(self, parts):
+ if six.PY2:
+ parts = _py2_fsencode(parts)
+ parsed = []
+ sep = self.sep
+ altsep = self.altsep
+ drv = root = ''
+ it = reversed(parts)
+ for part in it:
+ if not part:
+ continue
+ if altsep:
+ part = part.replace(altsep, sep)
+ drv, root, rel = self.splitroot(part)
+ if sep in rel:
+ for x in reversed(rel.split(sep)):
+ if x and x != '.':
+ parsed.append(intern(x))
+ else:
+ if rel and rel != '.':
+ parsed.append(intern(rel))
+ if drv or root:
+ if not drv:
+ # If no drive is present, try to find one in the previous
+ # parts. This makes the result of parsing e.g.
+ # ("C:", "/", "a") reasonably intuitive.
+ for part in it:
+ if not part:
+ continue
+ if altsep:
+ part = part.replace(altsep, sep)
+ drv = self.splitroot(part)[0]
+ if drv:
+ break
+ break
+ if drv or root:
+ parsed.append(drv + root)
+ parsed.reverse()
+ return drv, root, parsed
+
+ def join_parsed_parts(self, drv, root, parts, drv2, root2, parts2):
+ """
+ Join the two paths represented by the respective
+ (drive, root, parts) tuples. Return a new (drive, root, parts) tuple.
+ """
+ if root2:
+ if not drv2 and drv:
+ return drv, root2, [drv + root2] + parts2[1:]
+ elif drv2:
+ if drv2 == drv or self.casefold(drv2) == self.casefold(drv):
+ # Same drive => second path is relative to the first
+ return drv, root, parts + parts2[1:]
+ else:
+ # Second path is non-anchored (common case)
+ return drv, root, parts + parts2
+ return drv2, root2, parts2
+
+
+class _WindowsFlavour(_Flavour):
+ # Reference for Windows paths can be found at
+ # http://msdn.microsoft.com/en-us/library/aa365247%28v=vs.85%29.aspx
+
+ sep = '\\'
+ altsep = '/'
+ has_drv = True
+ pathmod = ntpath
+
+ is_supported = (os.name == 'nt')
+
+ drive_letters = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')
+ ext_namespace_prefix = '\\\\?\\'
+
+ reserved_names = (
+ set(['CON', 'PRN', 'AUX', 'NUL']) |
+ set(['COM%d' % i for i in range(1, 10)]) |
+ set(['LPT%d' % i for i in range(1, 10)])
+ )
+
+ # Interesting findings about extended paths:
+ # - '\\?\c:\a', '//?/c:\a' and '//?/c:/a' are all supported
+ # but '\\?\c:/a' is not
+ # - extended paths are always absolute; "relative" extended paths will
+ # fail.
+
+ def splitroot(self, part, sep=sep):
+ first = part[0:1]
+ second = part[1:2]
+ if (second == sep and first == sep):
+ # XXX extended paths should also disable the collapsing of "."
+ # components (according to MSDN docs).
+ prefix, part = self._split_extended_path(part)
+ first = part[0:1]
+ second = part[1:2]
+ else:
+ prefix = ''
+ third = part[2:3]
+ if (second == sep and first == sep and third != sep):
+ # is a UNC path:
+ # vvvvvvvvvvvvvvvvvvvvv root
+ # \\machine\mountpoint\directory\etc\...
+ # directory ^^^^^^^^^^^^^^
+ index = part.find(sep, 2)
+ if index != -1:
+ index2 = part.find(sep, index + 1)
+ # a UNC path can't have two slashes in a row
+ # (after the initial two)
+ if index2 != index + 1:
+ if index2 == -1:
+ index2 = len(part)
+ if prefix:
+ return prefix + part[1:index2], sep, part[index2 + 1:]
+ else:
+ return part[:index2], sep, part[index2 + 1:]
+ drv = root = ''
+ if second == ':' and first in self.drive_letters:
+ drv = part[:2]
+ part = part[2:]
+ first = third
+ if first == sep:
+ root = first
+ part = part.lstrip(sep)
+ return prefix + drv, root, part
+
+ def casefold(self, s):
+ return s.lower()
+
+ def casefold_parts(self, parts):
+ return [p.lower() for p in parts]
+
+ def resolve(self, path, strict=False):
+ s = str(path)
+ if not s:
+ return os.getcwd()
+ previous_s = None
+ if _getfinalpathname is not None:
+ if strict:
+ return self._ext_to_normal(_getfinalpathname(s))
+ else:
+ # End of the path after the first one not found
+ tail_parts = []
+
+ def _try_func():
+ result[0] = self._ext_to_normal(_getfinalpathname(s))
+ # if there was no exception, set flag to 0
+ result[1] = 0
+
+ def _exc_func(exc):
+ pass
+
+ while True:
+ result = [None, 1]
+ _try_except_filenotfounderror(_try_func, _exc_func)
+ if result[1] == 1: # file not found exception raised
+ previous_s = s
+ s, tail = os.path.split(s)
+ tail_parts.append(tail)
+ if previous_s == s:
+ return path
+ else:
+ s = result[0]
+ return os.path.join(s, *reversed(tail_parts))
+ # Means fallback on absolute
+ return None
+
+ def _split_extended_path(self, s, ext_prefix=ext_namespace_prefix):
+ prefix = ''
+ if s.startswith(ext_prefix):
+ prefix = s[:4]
+ s = s[4:]
+ if s.startswith('UNC\\'):
+ prefix += s[:3]
+ s = '\\' + s[3:]
+ return prefix, s
+
+ def _ext_to_normal(self, s):
+ # Turn back an extended path into a normal DOS-like path
+ return self._split_extended_path(s)[1]
+
+ def is_reserved(self, parts):
+ # NOTE: the rules for reserved names seem somewhat complicated
+ # (e.g. r"..\NUL" is reserved but not r"foo\NUL").
+ # We err on the side of caution and return True for paths which are
+ # not considered reserved by Windows.
+ if not parts:
+ return False
+ if parts[0].startswith('\\\\'):
+ # UNC paths are never reserved
+ return False
+ return parts[-1].partition('.')[0].upper() in self.reserved_names
+
+ def make_uri(self, path):
+ # Under Windows, file URIs use the UTF-8 encoding.
+ drive = path.drive
+ if len(drive) == 2 and drive[1] == ':':
+ # It's a path on a local drive => 'file:///c:/a/b'
+ rest = path.as_posix()[2:].lstrip('/')
+ return 'file:///%s/%s' % (
+ drive, urlquote_from_bytes(rest.encode('utf-8')))
+ else:
+ # It's a path on a network drive => 'file://host/share/a/b'
+ return 'file:' + urlquote_from_bytes(
+ path.as_posix().encode('utf-8'))
+
+ def gethomedir(self, username):
+ if 'HOME' in os.environ:
+ userhome = os.environ['HOME']
+ elif 'USERPROFILE' in os.environ:
+ userhome = os.environ['USERPROFILE']
+ elif 'HOMEPATH' in os.environ:
+ try:
+ drv = os.environ['HOMEDRIVE']
+ except KeyError:
+ drv = ''
+ userhome = drv + os.environ['HOMEPATH']
+ else:
+ raise RuntimeError("Can't determine home directory")
+
+ if username:
+ # Try to guess user home directory. By default all users
+ # directories are located in the same place and are named by
+ # corresponding usernames. If current user home directory points
+ # to nonstandard place, this guess is likely wrong.
+ if os.environ['USERNAME'] != username:
+ drv, root, parts = self.parse_parts((userhome,))
+ if parts[-1] != os.environ['USERNAME']:
+ raise RuntimeError("Can't determine home directory "
+ "for %r" % username)
+ parts[-1] = username
+ if drv or root:
+ userhome = drv + root + self.join(parts[1:])
+ else:
+ userhome = self.join(parts)
+ return userhome
+
+
+class _PosixFlavour(_Flavour):
+ sep = '/'
+ altsep = ''
+ has_drv = False
+ pathmod = posixpath
+
+ is_supported = (os.name != 'nt')
+
+ def splitroot(self, part, sep=sep):
+ if part and part[0] == sep:
+ stripped_part = part.lstrip(sep)
+ # According to POSIX path resolution:
+ # http://pubs.opengroup.org/onlinepubs/009695399/basedefs/
+ # xbd_chap04.html#tag_04_11
+ # "A pathname that begins with two successive slashes may be
+ # interpreted in an implementation-defined manner, although more
+ # than two leading slashes shall be treated as a single slash".
+ if len(part) - len(stripped_part) == 2:
+ return '', sep * 2, stripped_part
+ else:
+ return '', sep, stripped_part
+ else:
+ return '', '', part
+
+ def casefold(self, s):
+ return s
+
+ def casefold_parts(self, parts):
+ return parts
+
+ def resolve(self, path, strict=False):
+ sep = self.sep
+ accessor = path._accessor
+ seen = {}
+
+ def _resolve(path, rest):
+ if rest.startswith(sep):
+ path = ''
+
+ for name in rest.split(sep):
+ if not name or name == '.':
+ # current dir
+ continue
+ if name == '..':
+ # parent dir
+ path, _, _ = path.rpartition(sep)
+ continue
+ newpath = path + sep + name
+ if newpath in seen:
+ # Already seen this path
+ path = seen[newpath]
+ if path is not None:
+ # use cached value
+ continue
+ # The symlink is not resolved, so we must have a symlink
+ # loop.
+ raise RuntimeError("Symlink loop from %r" % newpath)
+ # Resolve the symbolic link
+ try:
+ target = accessor.readlink(newpath)
+ except OSError as e:
+ if e.errno != EINVAL and strict:
+ raise
+ # Not a symlink, or non-strict mode. We just leave the path
+ # untouched.
+ path = newpath
+ else:
+ seen[newpath] = None # not resolved symlink
+ path = _resolve(path, target)
+ seen[newpath] = path # resolved symlink
+
+ return path
+ # NOTE: according to POSIX, getcwd() cannot contain path components
+ # which are symlinks.
+ base = '' if path.is_absolute() else os.getcwd()
+ return _resolve(base, str(path)) or sep
+
+ def is_reserved(self, parts):
+ return False
+
+ def make_uri(self, path):
+ # We represent the path using the local filesystem encoding,
+ # for portability to other applications.
+ bpath = bytes(path)
+ return 'file://' + urlquote_from_bytes(bpath)
+
+ def gethomedir(self, username):
+ if not username:
+ try:
+ return os.environ['HOME']
+ except KeyError:
+ import pwd
+ return pwd.getpwuid(os.getuid()).pw_dir
+ else:
+ import pwd
+ try:
+ return pwd.getpwnam(username).pw_dir
+ except KeyError:
+ raise RuntimeError("Can't determine home directory "
+ "for %r" % username)
+
+
+_windows_flavour = _WindowsFlavour()
+_posix_flavour = _PosixFlavour()
+
+
+class _Accessor:
+
+ """An accessor implements a particular (system-specific or not) way of
+ accessing paths on the filesystem."""
+
+
+class _NormalAccessor(_Accessor):
+
+ def _wrap_strfunc(strfunc):
+ @functools.wraps(strfunc)
+ def wrapped(pathobj, *args):
+ return strfunc(str(pathobj), *args)
+ return staticmethod(wrapped)
+
+ def _wrap_binary_strfunc(strfunc):
+ @functools.wraps(strfunc)
+ def wrapped(pathobjA, pathobjB, *args):
+ return strfunc(str(pathobjA), str(pathobjB), *args)
+ return staticmethod(wrapped)
+
+ stat = _wrap_strfunc(os.stat)
+
+ lstat = _wrap_strfunc(os.lstat)
+
+ open = _wrap_strfunc(os.open)
+
+ listdir = _wrap_strfunc(os.listdir)
+
+ scandir = _wrap_strfunc(os_scandir)
+
+ chmod = _wrap_strfunc(os.chmod)
+
+ if hasattr(os, "lchmod"):
+ lchmod = _wrap_strfunc(os.lchmod)
+ else:
+ def lchmod(self, pathobj, mode):
+ raise NotImplementedError("lchmod() not available on this system")
+
+ mkdir = _wrap_strfunc(os.mkdir)
+
+ unlink = _wrap_strfunc(os.unlink)
+
+ rmdir = _wrap_strfunc(os.rmdir)
+
+ rename = _wrap_binary_strfunc(os.rename)
+
+ if sys.version_info >= (3, 3):
+ replace = _wrap_binary_strfunc(os.replace)
+
+ if nt:
+ if supports_symlinks:
+ symlink = _wrap_binary_strfunc(os.symlink)
+ else:
+ def symlink(a, b, target_is_directory):
+ raise NotImplementedError(
+ "symlink() not available on this system")
+ else:
+ # Under POSIX, os.symlink() takes two args
+ @staticmethod
+ def symlink(a, b, target_is_directory):
+ return os.symlink(str(a), str(b))
+
+ utime = _wrap_strfunc(os.utime)
+
+ # Helper for resolve()
+ def readlink(self, path):
+ return os.readlink(path)
+
+
+_normal_accessor = _NormalAccessor()
+
+
+#
+# Globbing helpers
+#
+
+def _make_selector(pattern_parts):
+ pat = pattern_parts[0]
+ child_parts = pattern_parts[1:]
+ if pat == '**':
+ cls = _RecursiveWildcardSelector
+ elif '**' in pat:
+ raise ValueError(
+ "Invalid pattern: '**' can only be an entire path component")
+ elif _is_wildcard_pattern(pat):
+ cls = _WildcardSelector
+ else:
+ cls = _PreciseSelector
+ return cls(pat, child_parts)
+
+
+if hasattr(functools, "lru_cache"):
+ _make_selector = functools.lru_cache()(_make_selector)
+
+
+class _Selector:
+
+ """A selector matches a specific glob pattern part against the children
+ of a given path."""
+
+ def __init__(self, child_parts):
+ self.child_parts = child_parts
+ if child_parts:
+ self.successor = _make_selector(child_parts)
+ self.dironly = True
+ else:
+ self.successor = _TerminatingSelector()
+ self.dironly = False
+
+ def select_from(self, parent_path):
+ """Iterate over all child paths of `parent_path` matched by this
+ selector. This can contain parent_path itself."""
+ path_cls = type(parent_path)
+ is_dir = path_cls.is_dir
+ exists = path_cls.exists
+ scandir = parent_path._accessor.scandir
+ if not is_dir(parent_path):
+ return iter([])
+ return self._select_from(parent_path, is_dir, exists, scandir)
+
+
+class _TerminatingSelector:
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ yield parent_path
+
+
+class _PreciseSelector(_Selector):
+
+ def __init__(self, name, child_parts):
+ self.name = name
+ _Selector.__init__(self, child_parts)
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ def try_iter():
+ path = parent_path._make_child_relpath(self.name)
+ if (is_dir if self.dironly else exists)(path):
+ for p in self.successor._select_from(
+ path, is_dir, exists, scandir):
+ yield p
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+
+class _WildcardSelector(_Selector):
+
+ def __init__(self, pat, child_parts):
+ self.pat = re.compile(fnmatch.translate(pat))
+ _Selector.__init__(self, child_parts)
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ def try_iter():
+ cf = parent_path._flavour.casefold
+ entries = list(scandir(parent_path))
+ for entry in entries:
+ if not self.dironly or entry.is_dir():
+ name = entry.name
+ casefolded = cf(name)
+ if self.pat.match(casefolded):
+ path = parent_path._make_child_relpath(name)
+ for p in self.successor._select_from(
+ path, is_dir, exists, scandir):
+ yield p
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+
+class _RecursiveWildcardSelector(_Selector):
+
+ def __init__(self, pat, child_parts):
+ _Selector.__init__(self, child_parts)
+
+ def _iterate_directories(self, parent_path, is_dir, scandir):
+ yield parent_path
+
+ def try_iter():
+ entries = list(scandir(parent_path))
+ for entry in entries:
+ entry_is_dir = False
+ try:
+ entry_is_dir = entry.is_dir()
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ if entry_is_dir and not entry.is_symlink():
+ path = parent_path._make_child_relpath(entry.name)
+ for p in self._iterate_directories(path, is_dir, scandir):
+ yield p
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+ def _select_from(self, parent_path, is_dir, exists, scandir):
+ def try_iter():
+ yielded = set()
+ try:
+ successor_select = self.successor._select_from
+ for starting_point in self._iterate_directories(
+ parent_path, is_dir, scandir):
+ for p in successor_select(
+ starting_point, is_dir, exists, scandir):
+ if p not in yielded:
+ yield p
+ yielded.add(p)
+ finally:
+ yielded.clear()
+
+ def except_iter(exc):
+ return
+ yield
+
+ for x in _try_except_permissionerror_iter(try_iter, except_iter):
+ yield x
+
+
+#
+# Public API
+#
+
+class _PathParents(Sequence):
+
+ """This object provides sequence-like access to the logical ancestors
+ of a path. Don't try to construct it yourself."""
+ __slots__ = ('_pathcls', '_drv', '_root', '_parts')
+
+ def __init__(self, path):
+ # We don't store the instance to avoid reference cycles
+ self._pathcls = type(path)
+ self._drv = path._drv
+ self._root = path._root
+ self._parts = path._parts
+
+ def __len__(self):
+ if self._drv or self._root:
+ return len(self._parts) - 1
+ else:
+ return len(self._parts)
+
+ def __getitem__(self, idx):
+ if idx < 0 or idx >= len(self):
+ raise IndexError(idx)
+ return self._pathcls._from_parsed_parts(self._drv, self._root,
+ self._parts[:-idx - 1])
+
+ def __repr__(self):
+ return "<{0}.parents>".format(self._pathcls.__name__)
+
+
+class PurePath(object):
+
+ """PurePath represents a filesystem path and offers operations which
+ don't imply any actual filesystem I/O. Depending on your system,
+ instantiating a PurePath will return either a PurePosixPath or a
+ PureWindowsPath object. You can also instantiate either of these classes
+ directly, regardless of your system.
+ """
+ __slots__ = (
+ '_drv', '_root', '_parts',
+ '_str', '_hash', '_pparts', '_cached_cparts',
+ )
+
+ def __new__(cls, *args):
+ """Construct a PurePath from one or several strings and or existing
+ PurePath objects. The strings and path objects are combined so as
+ to yield a canonicalized path, which is incorporated into the
+ new PurePath object.
+ """
+ if cls is PurePath:
+ cls = PureWindowsPath if os.name == 'nt' else PurePosixPath
+ return cls._from_parts(args)
+
+ def __reduce__(self):
+ # Using the parts tuple helps share interned path parts
+ # when pickling related paths.
+ return (self.__class__, tuple(self._parts))
+
+ @classmethod
+ def _parse_args(cls, args):
+ # This is useful when you don't want to create an instance, just
+ # canonicalize some constructor arguments.
+ parts = []
+ for a in args:
+ if isinstance(a, PurePath):
+ parts += a._parts
+ else:
+ if sys.version_info >= (3, 6):
+ a = os.fspath(a)
+ else:
+ # duck typing for older Python versions
+ if hasattr(a, "__fspath__"):
+ a = a.__fspath__()
+ if isinstance(a, str):
+ # Force-cast str subclasses to str (issue #21127)
+ parts.append(str(a))
+ # also handle unicode for PY2 (six.text_type = unicode)
+ elif six.PY2 and isinstance(a, six.text_type):
+ # cast to str using filesystem encoding
+ # note: in rare circumstances, on Python < 3.2,
+ # getfilesystemencoding can return None, in that
+ # case fall back to ascii
+ parts.append(a.encode(
+ sys.getfilesystemencoding() or "ascii"))
+ else:
+ raise TypeError(
+ "argument should be a str object or an os.PathLike "
+ "object returning str, not %r"
+ % type(a))
+ return cls._flavour.parse_parts(parts)
+
+ @classmethod
+ def _from_parts(cls, args, init=True):
+ # We need to call _parse_args on the instance, so as to get the
+ # right flavour.
+ self = object.__new__(cls)
+ drv, root, parts = self._parse_args(args)
+ self._drv = drv
+ self._root = root
+ self._parts = parts
+ if init:
+ self._init()
+ return self
+
+ @classmethod
+ def _from_parsed_parts(cls, drv, root, parts, init=True):
+ self = object.__new__(cls)
+ self._drv = drv
+ self._root = root
+ self._parts = parts
+ if init:
+ self._init()
+ return self
+
+ @classmethod
+ def _format_parsed_parts(cls, drv, root, parts):
+ if drv or root:
+ return drv + root + cls._flavour.join(parts[1:])
+ else:
+ return cls._flavour.join(parts)
+
+ def _init(self):
+ # Overridden in concrete Path
+ pass
+
+ def _make_child(self, args):
+ drv, root, parts = self._parse_args(args)
+ drv, root, parts = self._flavour.join_parsed_parts(
+ self._drv, self._root, self._parts, drv, root, parts)
+ return self._from_parsed_parts(drv, root, parts)
+
+ def __str__(self):
+ """Return the string representation of the path, suitable for
+ passing to system calls."""
+ try:
+ return self._str
+ except AttributeError:
+ self._str = self._format_parsed_parts(self._drv, self._root,
+ self._parts) or '.'
+ return self._str
+
+ def __fspath__(self):
+ return str(self)
+
+ def as_posix(self):
+ """Return the string representation of the path with forward (/)
+ slashes."""
+ f = self._flavour
+ return str(self).replace(f.sep, '/')
+
+ def __bytes__(self):
+ """Return the bytes representation of the path. This is only
+ recommended to use under Unix."""
+ if sys.version_info < (3, 2):
+ raise NotImplementedError("needs Python 3.2 or later")
+ return os.fsencode(str(self))
+
+ def __repr__(self):
+ return "{0}({1!r})".format(self.__class__.__name__, self.as_posix())
+
+ def as_uri(self):
+ """Return the path as a 'file' URI."""
+ if not self.is_absolute():
+ raise ValueError("relative path can't be expressed as a file URI")
+ return self._flavour.make_uri(self)
+
+ @property
+ def _cparts(self):
+ # Cached casefolded parts, for hashing and comparison
+ try:
+ return self._cached_cparts
+ except AttributeError:
+ self._cached_cparts = self._flavour.casefold_parts(self._parts)
+ return self._cached_cparts
+
+ def __eq__(self, other):
+ if not isinstance(other, PurePath):
+ return NotImplemented
+ return (
+ self._cparts == other._cparts
+ and self._flavour is other._flavour)
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ try:
+ return self._hash
+ except AttributeError:
+ self._hash = hash(tuple(self._cparts))
+ return self._hash
+
+ def __lt__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts < other._cparts
+
+ def __le__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts <= other._cparts
+
+ def __gt__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts > other._cparts
+
+ def __ge__(self, other):
+ if (not isinstance(other, PurePath)
+ or self._flavour is not other._flavour):
+ return NotImplemented
+ return self._cparts >= other._cparts
+
+ drive = property(attrgetter('_drv'),
+ doc="""The drive prefix (letter or UNC path), if any.""")
+
+ root = property(attrgetter('_root'),
+ doc="""The root of the path, if any.""")
+
+ @property
+ def anchor(self):
+ """The concatenation of the drive and root, or ''."""
+ anchor = self._drv + self._root
+ return anchor
+
+ @property
+ def name(self):
+ """The final path component, if any."""
+ parts = self._parts
+ if len(parts) == (1 if (self._drv or self._root) else 0):
+ return ''
+ return parts[-1]
+
+ @property
+ def suffix(self):
+ """The final component's last suffix, if any."""
+ name = self.name
+ i = name.rfind('.')
+ if 0 < i < len(name) - 1:
+ return name[i:]
+ else:
+ return ''
+
+ @property
+ def suffixes(self):
+ """A list of the final component's suffixes, if any."""
+ name = self.name
+ if name.endswith('.'):
+ return []
+ name = name.lstrip('.')
+ return ['.' + suffix for suffix in name.split('.')[1:]]
+
+ @property
+ def stem(self):
+ """The final path component, minus its last suffix."""
+ name = self.name
+ i = name.rfind('.')
+ if 0 < i < len(name) - 1:
+ return name[:i]
+ else:
+ return name
+
+ def with_name(self, name):
+ """Return a new path with the file name changed."""
+ if not self.name:
+ raise ValueError("%r has an empty name" % (self,))
+ drv, root, parts = self._flavour.parse_parts((name,))
+ if (not name or name[-1] in [self._flavour.sep, self._flavour.altsep]
+ or drv or root or len(parts) != 1):
+ raise ValueError("Invalid name %r" % (name))
+ return self._from_parsed_parts(self._drv, self._root,
+ self._parts[:-1] + [name])
+
+ def with_suffix(self, suffix):
+ """Return a new path with the file suffix changed. If the path
+ has no suffix, add given suffix. If the given suffix is an empty
+ string, remove the suffix from the path.
+ """
+ # XXX if suffix is None, should the current suffix be removed?
+ f = self._flavour
+ if f.sep in suffix or f.altsep and f.altsep in suffix:
+ raise ValueError("Invalid suffix %r" % (suffix))
+ if suffix and not suffix.startswith('.') or suffix == '.':
+ raise ValueError("Invalid suffix %r" % (suffix))
+ name = self.name
+ if not name:
+ raise ValueError("%r has an empty name" % (self,))
+ old_suffix = self.suffix
+ if not old_suffix:
+ name = name + suffix
+ else:
+ name = name[:-len(old_suffix)] + suffix
+ return self._from_parsed_parts(self._drv, self._root,
+ self._parts[:-1] + [name])
+
+ def relative_to(self, *other):
+ """Return the relative path to another path identified by the passed
+ arguments. If the operation is not possible (because this is not
+ a subpath of the other path), raise ValueError.
+ """
+ # For the purpose of this method, drive and root are considered
+ # separate parts, i.e.:
+ # Path('c:/').relative_to('c:') gives Path('/')
+ # Path('c:/').relative_to('/') raise ValueError
+ if not other:
+ raise TypeError("need at least one argument")
+ parts = self._parts
+ drv = self._drv
+ root = self._root
+ if root:
+ abs_parts = [drv, root] + parts[1:]
+ else:
+ abs_parts = parts
+ to_drv, to_root, to_parts = self._parse_args(other)
+ if to_root:
+ to_abs_parts = [to_drv, to_root] + to_parts[1:]
+ else:
+ to_abs_parts = to_parts
+ n = len(to_abs_parts)
+ cf = self._flavour.casefold_parts
+ if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts):
+ formatted = self._format_parsed_parts(to_drv, to_root, to_parts)
+ raise ValueError("{0!r} does not start with {1!r}"
+ .format(str(self), str(formatted)))
+ return self._from_parsed_parts('', root if n == 1 else '',
+ abs_parts[n:])
+
+ @property
+ def parts(self):
+ """An object providing sequence-like access to the
+ components in the filesystem path."""
+ # We cache the tuple to avoid building a new one each time .parts
+ # is accessed. XXX is this necessary?
+ try:
+ return self._pparts
+ except AttributeError:
+ self._pparts = tuple(self._parts)
+ return self._pparts
+
+ def joinpath(self, *args):
+ """Combine this path with one or several arguments, and return a
+ new path representing either a subpath (if all arguments are relative
+ paths) or a totally different path (if one of the arguments is
+ anchored).
+ """
+ return self._make_child(args)
+
+ def __truediv__(self, key):
+ return self._make_child((key,))
+
+ def __rtruediv__(self, key):
+ return self._from_parts([key] + self._parts)
+
+ if six.PY2:
+ __div__ = __truediv__
+ __rdiv__ = __rtruediv__
+
+ @property
+ def parent(self):
+ """The logical parent of the path."""
+ drv = self._drv
+ root = self._root
+ parts = self._parts
+ if len(parts) == 1 and (drv or root):
+ return self
+ return self._from_parsed_parts(drv, root, parts[:-1])
+
+ @property
+ def parents(self):
+ """A sequence of this path's logical parents."""
+ return _PathParents(self)
+
+ def is_absolute(self):
+ """True if the path is absolute (has both a root and, if applicable,
+ a drive)."""
+ if not self._root:
+ return False
+ return not self._flavour.has_drv or bool(self._drv)
+
+ def is_reserved(self):
+ """Return True if the path contains one of the special names reserved
+ by the system, if any."""
+ return self._flavour.is_reserved(self._parts)
+
+ def match(self, path_pattern):
+ """
+ Return True if this path matches the given pattern.
+ """
+ cf = self._flavour.casefold
+ path_pattern = cf(path_pattern)
+ drv, root, pat_parts = self._flavour.parse_parts((path_pattern,))
+ if not pat_parts:
+ raise ValueError("empty pattern")
+ if drv and drv != cf(self._drv):
+ return False
+ if root and root != cf(self._root):
+ return False
+ parts = self._cparts
+ if drv or root:
+ if len(pat_parts) != len(parts):
+ return False
+ pat_parts = pat_parts[1:]
+ elif len(pat_parts) > len(parts):
+ return False
+ for part, pat in zip(reversed(parts), reversed(pat_parts)):
+ if not fnmatch.fnmatchcase(part, pat):
+ return False
+ return True
+
+
+# Can't subclass os.PathLike from PurePath and keep the constructor
+# optimizations in PurePath._parse_args().
+if sys.version_info >= (3, 6):
+ os.PathLike.register(PurePath)
+
+
+class PurePosixPath(PurePath):
+ _flavour = _posix_flavour
+ __slots__ = ()
+
+
+class PureWindowsPath(PurePath):
+ """PurePath subclass for Windows systems.
+
+ On a Windows system, instantiating a PurePath should return this object.
+ However, you can also instantiate it directly on any system.
+ """
+ _flavour = _windows_flavour
+ __slots__ = ()
+
+
+# Filesystem-accessing classes
+
+
+class Path(PurePath):
+ """PurePath subclass that can make system calls.
+
+ Path represents a filesystem path but unlike PurePath, also offers
+ methods to do system calls on path objects. Depending on your system,
+ instantiating a Path will return either a PosixPath or a WindowsPath
+ object. You can also instantiate a PosixPath or WindowsPath directly,
+ but cannot instantiate a WindowsPath on a POSIX system or vice versa.
+ """
+ __slots__ = (
+ '_accessor',
+ '_closed',
+ )
+
+ def __new__(cls, *args, **kwargs):
+ if cls is Path:
+ cls = WindowsPath if os.name == 'nt' else PosixPath
+ self = cls._from_parts(args, init=False)
+ if not self._flavour.is_supported:
+ raise NotImplementedError("cannot instantiate %r on your system"
+ % (cls.__name__,))
+ self._init()
+ return self
+
+ def _init(self,
+ # Private non-constructor arguments
+ template=None,
+ ):
+ self._closed = False
+ if template is not None:
+ self._accessor = template._accessor
+ else:
+ self._accessor = _normal_accessor
+
+ def _make_child_relpath(self, part):
+ # This is an optimization used for dir walking. `part` must be
+ # a single part relative to this path.
+ parts = self._parts + [part]
+ return self._from_parsed_parts(self._drv, self._root, parts)
+
+ def __enter__(self):
+ if self._closed:
+ self._raise_closed()
+ return self
+
+ def __exit__(self, t, v, tb):
+ self._closed = True
+
+ def _raise_closed(self):
+ raise ValueError("I/O operation on closed path")
+
+ def _opener(self, name, flags, mode=0o666):
+ # A stub for the opener argument to built-in open()
+ return self._accessor.open(self, flags, mode)
+
+ def _raw_open(self, flags, mode=0o777):
+ """
+ Open the file pointed by this path and return a file descriptor,
+ as os.open() does.
+ """
+ if self._closed:
+ self._raise_closed()
+ return self._accessor.open(self, flags, mode)
+
+ # Public API
+
+ @classmethod
+ def cwd(cls):
+ """Return a new path pointing to the current working directory
+ (as returned by os.getcwd()).
+ """
+ return cls(os.getcwd())
+
+ @classmethod
+ def home(cls):
+ """Return a new path pointing to the user's home directory (as
+ returned by os.path.expanduser('~')).
+ """
+ return cls(cls()._flavour.gethomedir(None))
+
+ def samefile(self, other_path):
+ """Return whether other_path is the same or not as this file
+ (as returned by os.path.samefile()).
+ """
+ if hasattr(os.path, "samestat"):
+ st = self.stat()
+ try:
+ other_st = other_path.stat()
+ except AttributeError:
+ other_st = os.stat(other_path)
+ return os.path.samestat(st, other_st)
+ else:
+ filename1 = six.text_type(self)
+ filename2 = six.text_type(other_path)
+ st1 = _win32_get_unique_path_id(filename1)
+ st2 = _win32_get_unique_path_id(filename2)
+ return st1 == st2
+
+ def iterdir(self):
+ """Iterate over the files in this directory. Does not yield any
+ result for the special paths '.' and '..'.
+ """
+ if self._closed:
+ self._raise_closed()
+ for name in self._accessor.listdir(self):
+ if name in ('.', '..'):
+ # Yielding a path object for these makes little sense
+ continue
+ yield self._make_child_relpath(name)
+ if self._closed:
+ self._raise_closed()
+
+ def glob(self, pattern):
+ """Iterate over this subtree and yield all existing files (of any
+ kind, including directories) matching the given relative pattern.
+ """
+ if not pattern:
+ raise ValueError("Unacceptable pattern: {0!r}".format(pattern))
+ pattern = self._flavour.casefold(pattern)
+ drv, root, pattern_parts = self._flavour.parse_parts((pattern,))
+ if drv or root:
+ raise NotImplementedError("Non-relative patterns are unsupported")
+ selector = _make_selector(tuple(pattern_parts))
+ for p in selector.select_from(self):
+ yield p
+
+ def rglob(self, pattern):
+ """Recursively yield all existing files (of any kind, including
+ directories) matching the given relative pattern, anywhere in
+ this subtree.
+ """
+ pattern = self._flavour.casefold(pattern)
+ drv, root, pattern_parts = self._flavour.parse_parts((pattern,))
+ if drv or root:
+ raise NotImplementedError("Non-relative patterns are unsupported")
+ selector = _make_selector(("**",) + tuple(pattern_parts))
+ for p in selector.select_from(self):
+ yield p
+
+ def absolute(self):
+ """Return an absolute version of this path. This function works
+ even if the path doesn't point to anything.
+
+ No normalization is done, i.e. all '.' and '..' will be kept along.
+ Use resolve() to get the canonical path to a file.
+ """
+ # XXX untested yet!
+ if self._closed:
+ self._raise_closed()
+ if self.is_absolute():
+ return self
+ # FIXME this must defer to the specific flavour (and, under Windows,
+ # use nt._getfullpathname())
+ obj = self._from_parts([os.getcwd()] + self._parts, init=False)
+ obj._init(template=self)
+ return obj
+
+ def resolve(self, strict=False):
+ """
+ Make the path absolute, resolving all symlinks on the way and also
+ normalizing it (for example turning slashes into backslashes under
+ Windows).
+ """
+ if self._closed:
+ self._raise_closed()
+ s = self._flavour.resolve(self, strict=strict)
+ if s is None:
+ # No symlink resolution => for consistency, raise an error if
+ # the path is forbidden
+ # but not raise error if file does not exist (see issue #54).
+
+ def _try_func():
+ self.stat()
+
+ def _exc_func(exc):
+ pass
+
+ _try_except_filenotfounderror(_try_func, _exc_func)
+ s = str(self.absolute())
+ else:
+ # ensure s is a string (normpath requires this on older python)
+ s = str(s)
+ # Now we have no symlinks in the path, it's safe to normalize it.
+ normed = self._flavour.pathmod.normpath(s)
+ obj = self._from_parts((normed,), init=False)
+ obj._init(template=self)
+ return obj
+
+ def stat(self):
+ """
+ Return the result of the stat() system call on this path, like
+ os.stat() does.
+ """
+ return self._accessor.stat(self)
+
+ def owner(self):
+ """
+ Return the login name of the file owner.
+ """
+ import pwd
+ return pwd.getpwuid(self.stat().st_uid).pw_name
+
+ def group(self):
+ """
+ Return the group name of the file gid.
+ """
+ import grp
+ return grp.getgrgid(self.stat().st_gid).gr_name
+
+ def open(self, mode='r', buffering=-1, encoding=None,
+ errors=None, newline=None):
+ """
+ Open the file pointed by this path and return a file object, as
+ the built-in open() function does.
+ """
+ if self._closed:
+ self._raise_closed()
+ if sys.version_info >= (3, 3):
+ return io.open(
+ str(self), mode, buffering, encoding, errors, newline,
+ opener=self._opener)
+ else:
+ return io.open(str(self), mode, buffering,
+ encoding, errors, newline)
+
+ def read_bytes(self):
+ """
+ Open the file in bytes mode, read it, and close the file.
+ """
+ with self.open(mode='rb') as f:
+ return f.read()
+
+ def read_text(self, encoding=None, errors=None):
+ """
+ Open the file in text mode, read it, and close the file.
+ """
+ with self.open(mode='r', encoding=encoding, errors=errors) as f:
+ return f.read()
+
+ def write_bytes(self, data):
+ """
+ Open the file in bytes mode, write to it, and close the file.
+ """
+ if not isinstance(data, six.binary_type):
+ raise TypeError(
+ 'data must be %s, not %s' %
+ (six.binary_type.__name__, data.__class__.__name__))
+ with self.open(mode='wb') as f:
+ return f.write(data)
+
+ def write_text(self, data, encoding=None, errors=None):
+ """
+ Open the file in text mode, write to it, and close the file.
+ """
+ if not isinstance(data, six.text_type):
+ raise TypeError(
+ 'data must be %s, not %s' %
+ (six.text_type.__name__, data.__class__.__name__))
+ with self.open(mode='w', encoding=encoding, errors=errors) as f:
+ return f.write(data)
+
+ def touch(self, mode=0o666, exist_ok=True):
+ """
+ Create this file with the given access mode, if it doesn't exist.
+ """
+ if self._closed:
+ self._raise_closed()
+ if exist_ok:
+ # First try to bump modification time
+ # Implementation note: GNU touch uses the UTIME_NOW option of
+ # the utimensat() / futimens() functions.
+ try:
+ self._accessor.utime(self, None)
+ except OSError:
+ # Avoid exception chaining
+ pass
+ else:
+ return
+ flags = os.O_CREAT | os.O_WRONLY
+ if not exist_ok:
+ flags |= os.O_EXCL
+ fd = self._raw_open(flags, mode)
+ os.close(fd)
+
+ def mkdir(self, mode=0o777, parents=False, exist_ok=False):
+ """
+ Create a new directory at this given path.
+ """
+ if self._closed:
+ self._raise_closed()
+
+ def _try_func():
+ self._accessor.mkdir(self, mode)
+
+ def _exc_func(exc):
+ if not parents or self.parent == self:
+ raise exc
+ self.parent.mkdir(parents=True, exist_ok=True)
+ self.mkdir(mode, parents=False, exist_ok=exist_ok)
+
+ try:
+ _try_except_filenotfounderror(_try_func, _exc_func)
+ except OSError:
+ # Cannot rely on checking for EEXIST, since the operating system
+ # could give priority to other errors like EACCES or EROFS
+ if not exist_ok or not self.is_dir():
+ raise
+
+ def chmod(self, mode):
+ """
+ Change the permissions of the path, like os.chmod().
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.chmod(self, mode)
+
+ def lchmod(self, mode):
+ """
+ Like chmod(), except if the path points to a symlink, the symlink's
+ permissions are changed, rather than its target's.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.lchmod(self, mode)
+
+ def unlink(self):
+ """
+ Remove this file or link.
+ If the path is a directory, use rmdir() instead.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.unlink(self)
+
+ def rmdir(self):
+ """
+ Remove this directory. The directory must be empty.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.rmdir(self)
+
+ def lstat(self):
+ """
+ Like stat(), except if the path points to a symlink, the symlink's
+ status information is returned, rather than its target's.
+ """
+ if self._closed:
+ self._raise_closed()
+ return self._accessor.lstat(self)
+
+ def rename(self, target):
+ """
+ Rename this path to the given path.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.rename(self, target)
+
+ def replace(self, target):
+ """
+ Rename this path to the given path, clobbering the existing
+ destination if it exists.
+ """
+ if sys.version_info < (3, 3):
+ raise NotImplementedError("replace() is only available "
+ "with Python 3.3 and later")
+ if self._closed:
+ self._raise_closed()
+ self._accessor.replace(self, target)
+
+ def symlink_to(self, target, target_is_directory=False):
+ """
+ Make this path a symlink pointing to the given path.
+ Note the order of arguments (self, target) is the reverse of
+ os.symlink's.
+ """
+ if self._closed:
+ self._raise_closed()
+ self._accessor.symlink(target, self, target_is_directory)
+
+ # Convenience functions for querying the stat results
+
+ def exists(self):
+ """
+ Whether this path exists.
+ """
+ try:
+ self.stat()
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+ return True
+
+ def is_dir(self):
+ """
+ Whether this path is a directory.
+ """
+ try:
+ return S_ISDIR(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_file(self):
+ """
+ Whether this path is a regular file (also True for symlinks pointing
+ to regular files).
+ """
+ try:
+ return S_ISREG(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_mount(self):
+ """
+ Check if this path is a POSIX mount point
+ """
+ # Need to exist and be a dir
+ if not self.exists() or not self.is_dir():
+ return False
+
+ parent = Path(self.parent)
+ try:
+ parent_dev = parent.stat().st_dev
+ except OSError:
+ return False
+
+ dev = self.stat().st_dev
+ if dev != parent_dev:
+ return True
+ ino = self.stat().st_ino
+ parent_ino = parent.stat().st_ino
+ return ino == parent_ino
+
+ def is_symlink(self):
+ """
+ Whether this path is a symbolic link.
+ """
+ try:
+ return S_ISLNK(self.lstat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_block_device(self):
+ """
+ Whether this path is a block device.
+ """
+ try:
+ return S_ISBLK(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_char_device(self):
+ """
+ Whether this path is a character device.
+ """
+ try:
+ return S_ISCHR(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_fifo(self):
+ """
+ Whether this path is a FIFO.
+ """
+ try:
+ return S_ISFIFO(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def is_socket(self):
+ """
+ Whether this path is a socket.
+ """
+ try:
+ return S_ISSOCK(self.stat().st_mode)
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ # Path doesn't exist or is a broken symlink
+ # (see https://bitbucket.org/pitrou/pathlib/issue/12/)
+ return False
+ except ValueError:
+ # Non-encodable path
+ return False
+
+ def expanduser(self):
+ """ Return a new path with expanded ~ and ~user constructs
+ (as returned by os.path.expanduser)
+ """
+ if (not (self._drv or self._root)
+ and self._parts and self._parts[0][:1] == '~'):
+ homedir = self._flavour.gethomedir(self._parts[0][1:])
+ return self._from_parts([homedir] + self._parts[1:])
+
+ return self
+
+
+class PosixPath(Path, PurePosixPath):
+ """Path subclass for non-Windows systems.
+
+ On a POSIX system, instantiating a Path should return this object.
+ """
+ __slots__ = ()
+
+
+class WindowsPath(Path, PureWindowsPath):
+ """Path subclass for Windows systems.
+
+ On a Windows system, instantiating a Path should return this object.
+ """
+ __slots__ = ()
+
+ def owner(self):
+ raise NotImplementedError("Path.owner() is unsupported on this system")
+
+ def group(self):
+ raise NotImplementedError("Path.group() is unsupported on this system")
+
+ def is_mount(self):
+ raise NotImplementedError(
+ "Path.is_mount() is unsupported on this system")
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/requirements.txt b/testing/web-platform/tests/tools/third_party/pathlib2/requirements.txt
new file mode 100644
index 0000000000..9d43212790
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/requirements.txt
@@ -0,0 +1,3 @@
+six
+scandir; python_version < '3.5'
+mock; python_version < '3.3'
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/setup.cfg b/testing/web-platform/tests/tools/third_party/pathlib2/setup.cfg
new file mode 100644
index 0000000000..32afc861d3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/setup.cfg
@@ -0,0 +1,8 @@
+[nosetests]
+with-coverage=1
+cover-package=pathlib2
+cover-branches=1
+cover-html=1
+
+[wheel]
+universal = 1
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/setup.py b/testing/web-platform/tests/tools/third_party/pathlib2/setup.py
new file mode 100644
index 0000000000..cbb6aaa0df
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/setup.py
@@ -0,0 +1,48 @@
+# Copyright (c) 2014-2017 Matthias C. M. Troffaes
+# Copyright (c) 2012-2014 Antoine Pitrou and contributors
+# Distributed under the terms of the MIT License.
+
+import io
+from setuptools import setup, find_packages
+
+
+def readfile(filename):
+ with io.open(filename, encoding="utf-8") as stream:
+ return stream.read().split("\n")
+
+
+readme = readfile("README.rst")[5:] # skip title and badges
+version = readfile("VERSION")[0].strip()
+
+setup(
+ name='pathlib2',
+ version=version,
+ packages=find_packages(),
+ license='MIT',
+ description='Object-oriented filesystem paths',
+ long_description="\n".join(readme[2:]),
+ author='Matthias C. M. Troffaes',
+ author_email='matthias.troffaes@gmail.com',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: System :: Filesystems',
+ ],
+ url='https://github.com/mcmtroffaes/pathlib2',
+ install_requires=['six'],
+ extras_require={
+ ':python_version<"3.5"': ['scandir'],
+ },
+)
diff --git a/testing/web-platform/tests/tools/third_party/pathlib2/tests/test_pathlib2.py b/testing/web-platform/tests/tools/third_party/pathlib2/tests/test_pathlib2.py
new file mode 100644
index 0000000000..65a5281a9d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pathlib2/tests/test_pathlib2.py
@@ -0,0 +1,2406 @@
+# Copyright (c) 2014-2017 Matthias C. M. Troffaes
+# Copyright (c) 2012-2014 Antoine Pitrou and contributors
+# Distributed under the terms of the MIT License.
+
+
+import io
+import os
+import errno
+import pathlib2 as pathlib
+import pickle
+import six
+import socket
+import stat
+import sys
+import tempfile
+
+if sys.version_info >= (3, 3):
+ import collections.abc as collections_abc
+else:
+ import collections as collections_abc
+
+if sys.version_info < (2, 7):
+ try:
+ import unittest2 as unittest
+ except ImportError:
+ raise ImportError("unittest2 is required for tests on pre-2.7")
+else:
+ import unittest
+
+if sys.version_info < (3, 3):
+ try:
+ import mock
+ except ImportError:
+ raise ImportError("mock is required for tests on pre-3.3")
+else:
+ from unittest import mock
+
+# assertRaisesRegex is missing prior to Python 3.2
+if sys.version_info < (3, 2):
+ unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
+
+try:
+ from test import support
+except ImportError:
+ from test import test_support as support
+
+android_not_root = getattr(support, "android_not_root", False)
+
+TESTFN = support.TESTFN
+
+# work around broken support.rmtree on Python 3.3 on Windows
+if (os.name == 'nt'
+ and sys.version_info >= (3, 0) and sys.version_info < (3, 4)):
+ import shutil
+ support.rmtree = shutil.rmtree
+
+try:
+ import grp
+ import pwd
+except ImportError:
+ grp = pwd = None
+
+# support.can_symlink is missing prior to Python 3
+if six.PY2:
+
+ def support_can_symlink():
+ return pathlib.supports_symlinks
+
+ support_skip_unless_symlink = unittest.skipIf(
+ not pathlib.supports_symlinks,
+ "symlinks not supported on this platform")
+else:
+ support_can_symlink = support.can_symlink
+ support_skip_unless_symlink = support.skip_unless_symlink
+
+
+# Backported from 3.4
+def fs_is_case_insensitive(directory):
+ """Detects if the file system for the specified directory is
+ case-insensitive.
+ """
+ base_fp, base_path = tempfile.mkstemp(dir=directory)
+ case_path = base_path.upper()
+ if case_path == base_path:
+ case_path = base_path.lower()
+ try:
+ return os.path.samefile(base_path, case_path)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ return False
+ finally:
+ os.unlink(base_path)
+
+
+support.fs_is_case_insensitive = fs_is_case_insensitive
+
+
+class _BaseFlavourTest(object):
+
+ def _check_parse_parts(self, arg, expected):
+ f = self.flavour.parse_parts
+ sep = self.flavour.sep
+ altsep = self.flavour.altsep
+ actual = f([x.replace('/', sep) for x in arg])
+ self.assertEqual(actual, expected)
+ if altsep:
+ actual = f([x.replace('/', altsep) for x in arg])
+ self.assertEqual(actual, expected)
+ drv, root, parts = actual
+ # neither bytes (py3) nor unicode (py2)
+ self.assertIsInstance(drv, str)
+ self.assertIsInstance(root, str)
+ for p in parts:
+ self.assertIsInstance(p, str)
+
+ def test_parse_parts_common(self):
+ check = self._check_parse_parts
+ sep = self.flavour.sep
+ # Unanchored parts
+ check([], ('', '', []))
+ check(['a'], ('', '', ['a']))
+ check(['a/'], ('', '', ['a']))
+ check(['a', 'b'], ('', '', ['a', 'b']))
+ # Expansion
+ check(['a/b'], ('', '', ['a', 'b']))
+ check(['a/b/'], ('', '', ['a', 'b']))
+ check(['a', 'b/c', 'd'], ('', '', ['a', 'b', 'c', 'd']))
+ # Collapsing and stripping excess slashes
+ check(['a', 'b//c', 'd'], ('', '', ['a', 'b', 'c', 'd']))
+ check(['a', 'b/c/', 'd'], ('', '', ['a', 'b', 'c', 'd']))
+ # Eliminating standalone dots
+ check(['.'], ('', '', []))
+ check(['.', '.', 'b'], ('', '', ['b']))
+ check(['a', '.', 'b'], ('', '', ['a', 'b']))
+ check(['a', '.', '.'], ('', '', ['a']))
+ # The first part is anchored
+ check(['/a/b'], ('', sep, [sep, 'a', 'b']))
+ check(['/a', 'b'], ('', sep, [sep, 'a', 'b']))
+ check(['/a/', 'b'], ('', sep, [sep, 'a', 'b']))
+ # Ignoring parts before an anchored part
+ check(['a', '/b', 'c'], ('', sep, [sep, 'b', 'c']))
+ check(['a', '/b', '/c'], ('', sep, [sep, 'c']))
+
+
+class PosixFlavourTest(_BaseFlavourTest, unittest.TestCase):
+ flavour = pathlib._posix_flavour
+
+ def test_parse_parts(self):
+ check = self._check_parse_parts
+ # Collapsing of excess leading slashes, except for the double-slash
+ # special case.
+ check(['//a', 'b'], ('', '//', ['//', 'a', 'b']))
+ check(['///a', 'b'], ('', '/', ['/', 'a', 'b']))
+ check(['////a', 'b'], ('', '/', ['/', 'a', 'b']))
+ # Paths which look like NT paths aren't treated specially
+ check(['c:a'], ('', '', ['c:a']))
+ check(['c:\\a'], ('', '', ['c:\\a']))
+ check(['\\a'], ('', '', ['\\a']))
+
+ def test_splitroot(self):
+ f = self.flavour.splitroot
+ self.assertEqual(f(''), ('', '', ''))
+ self.assertEqual(f('a'), ('', '', 'a'))
+ self.assertEqual(f('a/b'), ('', '', 'a/b'))
+ self.assertEqual(f('a/b/'), ('', '', 'a/b/'))
+ self.assertEqual(f('/a'), ('', '/', 'a'))
+ self.assertEqual(f('/a/b'), ('', '/', 'a/b'))
+ self.assertEqual(f('/a/b/'), ('', '/', 'a/b/'))
+ # The root is collapsed when there are redundant slashes
+ # except when there are exactly two leading slashes, which
+ # is a special case in POSIX.
+ self.assertEqual(f('//a'), ('', '//', 'a'))
+ self.assertEqual(f('///a'), ('', '/', 'a'))
+ self.assertEqual(f('///a/b'), ('', '/', 'a/b'))
+ # Paths which look like NT paths aren't treated specially
+ self.assertEqual(f('c:/a/b'), ('', '', 'c:/a/b'))
+ self.assertEqual(f('\\/a/b'), ('', '', '\\/a/b'))
+ self.assertEqual(f('\\a\\b'), ('', '', '\\a\\b'))
+
+
+class NTFlavourTest(_BaseFlavourTest, unittest.TestCase):
+ flavour = pathlib._windows_flavour
+
+ def test_parse_parts(self):
+ check = self._check_parse_parts
+ # First part is anchored
+ check(['c:'], ('c:', '', ['c:']))
+ check(['c:/'], ('c:', '\\', ['c:\\']))
+ check(['/'], ('', '\\', ['\\']))
+ check(['c:a'], ('c:', '', ['c:', 'a']))
+ check(['c:/a'], ('c:', '\\', ['c:\\', 'a']))
+ check(['/a'], ('', '\\', ['\\', 'a']))
+ # UNC paths
+ check(['//a/b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
+ check(['//a/b/'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
+ check(['//a/b/c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c']))
+ # Second part is anchored, so that the first part is ignored
+ check(['a', 'Z:b', 'c'], ('Z:', '', ['Z:', 'b', 'c']))
+ check(['a', 'Z:/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
+ # UNC paths
+ check(['a', '//b/c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
+ # Collapsing and stripping excess slashes
+ check(['a', 'Z://b//c/', 'd/'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd']))
+ # UNC paths
+ check(['a', '//b/c//', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
+ # Extended paths
+ check(['//?/c:/'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\']))
+ check(['//?/c:/a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a']))
+ check(['//?/c:/a', '/b'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'b']))
+ # Extended UNC paths (format is "\\?\UNC\server\share")
+ check(['//?/UNC/b/c'],
+ ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\']))
+ check(['//?/UNC/b/c/d'],
+ ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd']))
+ # Second part has a root but not drive
+ check(['a', '/b', 'c'], ('', '\\', ['\\', 'b', 'c']))
+ check(['Z:/a', '/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
+ check(['//?/Z:/a', '/b', 'c'],
+ ('\\\\?\\Z:', '\\', ['\\\\?\\Z:\\', 'b', 'c']))
+
+ def test_splitroot(self):
+ f = self.flavour.splitroot
+ self.assertEqual(f(''), ('', '', ''))
+ self.assertEqual(f('a'), ('', '', 'a'))
+ self.assertEqual(f('a\\b'), ('', '', 'a\\b'))
+ self.assertEqual(f('\\a'), ('', '\\', 'a'))
+ self.assertEqual(f('\\a\\b'), ('', '\\', 'a\\b'))
+ self.assertEqual(f('c:a\\b'), ('c:', '', 'a\\b'))
+ self.assertEqual(f('c:\\a\\b'), ('c:', '\\', 'a\\b'))
+ # Redundant slashes in the root are collapsed
+ self.assertEqual(f('\\\\a'), ('', '\\', 'a'))
+ self.assertEqual(f('\\\\\\a/b'), ('', '\\', 'a/b'))
+ self.assertEqual(f('c:\\\\a'), ('c:', '\\', 'a'))
+ self.assertEqual(f('c:\\\\\\a/b'), ('c:', '\\', 'a/b'))
+ # Valid UNC paths
+ self.assertEqual(f('\\\\a\\b'), ('\\\\a\\b', '\\', ''))
+ self.assertEqual(f('\\\\a\\b\\'), ('\\\\a\\b', '\\', ''))
+ self.assertEqual(f('\\\\a\\b\\c\\d'), ('\\\\a\\b', '\\', 'c\\d'))
+ # These are non-UNC paths (according to ntpath.py and test_ntpath)
+ # However, command.com says such paths are invalid, so it's
+ # difficult to know what the right semantics are
+ self.assertEqual(f('\\\\\\a\\b'), ('', '\\', 'a\\b'))
+ self.assertEqual(f('\\\\a'), ('', '\\', 'a'))
+
+
+#
+# Tests for the pure classes
+#
+
+with_fsencode = unittest.skipIf(
+ sys.version_info < (3, 2),
+ 'os.fsencode has been introduced in version 3.2')
+
+
+class _BasePurePathTest(object):
+
+ # keys are canonical paths, values are list of tuples of arguments
+ # supposed to produce equal paths
+ equivalences = {
+ 'a/b': [
+ ('a', 'b'), ('a/', 'b'), ('a', 'b/'), ('a/', 'b/'),
+ ('a/b/',), ('a//b',), ('a//b//',),
+ # empty components get removed
+ ('', 'a', 'b'), ('a', '', 'b'), ('a', 'b', ''),
+ ],
+ '/b/c/d': [
+ ('a', '/b/c', 'd'), ('a', '///b//c', 'd/'),
+ ('/a', '/b/c', 'd'),
+ # empty components get removed
+ ('/', 'b', '', 'c/d'), ('/', '', 'b/c/d'), ('', '/b/c/d'),
+ ],
+ }
+
+ def setUp(self):
+ p = self.cls('a')
+ self.flavour = p._flavour
+ self.sep = self.flavour.sep
+ self.altsep = self.flavour.altsep
+
+ def test_constructor_common(self):
+ P = self.cls
+ p = P('a')
+ self.assertIsInstance(p, P)
+
+ class PathLike:
+ def __fspath__(self):
+ return "a/b/c"
+
+ P('a', 'b', 'c')
+ P('/a', 'b', 'c')
+ P('a/b/c')
+ P('/a/b/c')
+ P(PathLike())
+ self.assertEqual(P(P('a')), P('a'))
+ self.assertEqual(P(P('a'), 'b'), P('a/b'))
+ self.assertEqual(P(P('a'), P('b')), P('a/b'))
+ self.assertEqual(P(P('a'), P('b'), P('c')), P(PathLike()))
+
+ def _check_str_subclass(self, *args):
+ # Issue #21127: it should be possible to construct a PurePath object
+ # from a str subclass instance, and it then gets converted to
+ # a pure str object.
+ class StrSubclass(str):
+ pass
+ P = self.cls
+ p = P(*(StrSubclass(x) for x in args))
+ self.assertEqual(p, P(*args))
+ for part in p.parts:
+ self.assertIs(type(part), str)
+
+ def test_str_subclass_common(self):
+ self._check_str_subclass('')
+ self._check_str_subclass('.')
+ self._check_str_subclass('a')
+ self._check_str_subclass('a/b.txt')
+ self._check_str_subclass('/a/b.txt')
+
+ def test_join_common(self):
+ P = self.cls
+ p = P('a/b')
+ pp = p.joinpath('c')
+ self.assertEqual(pp, P('a/b/c'))
+ self.assertIs(type(pp), type(p))
+ pp = p.joinpath('c', 'd')
+ self.assertEqual(pp, P('a/b/c/d'))
+ pp = p.joinpath(P('c'))
+ self.assertEqual(pp, P('a/b/c'))
+ pp = p.joinpath('/c')
+ self.assertEqual(pp, P('/c'))
+
+ def test_div_common(self):
+ # Basically the same as joinpath()
+ P = self.cls
+ p = P('a/b')
+ pp = p / 'c'
+ self.assertEqual(pp, P('a/b/c'))
+ self.assertIs(type(pp), type(p))
+ pp = p / 'c/d'
+ self.assertEqual(pp, P('a/b/c/d'))
+ pp = p / 'c' / 'd'
+ self.assertEqual(pp, P('a/b/c/d'))
+ pp = 'c' / p / 'd'
+ self.assertEqual(pp, P('c/a/b/d'))
+ pp = p / P('c')
+ self.assertEqual(pp, P('a/b/c'))
+ pp = p / '/c'
+ self.assertEqual(pp, P('/c'))
+
+ def _check_str(self, expected, args):
+ p = self.cls(*args)
+ self.assertEqual(str(p), expected.replace('/', self.sep))
+
+ def test_str_common(self):
+ # Canonicalized paths roundtrip
+ for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
+ self._check_str(pathstr, (pathstr,))
+ # Special case for the empty path
+ self._check_str('.', ('',))
+ # Other tests for str() are in test_equivalences()
+
+ def test_as_posix_common(self):
+ P = self.cls
+ for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
+ self.assertEqual(P(pathstr).as_posix(), pathstr)
+ # Other tests for as_posix() are in test_equivalences()
+
+ @with_fsencode
+ def test_as_bytes_common(self):
+ sep = os.fsencode(self.sep)
+ P = self.cls
+ self.assertEqual(bytes(P('a/b')), b'a' + sep + b'b')
+
+ def test_as_uri_common(self):
+ P = self.cls
+ with self.assertRaises(ValueError):
+ P('a').as_uri()
+ with self.assertRaises(ValueError):
+ P().as_uri()
+
+ def test_repr_common(self):
+ for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
+ p = self.cls(pathstr)
+ clsname = p.__class__.__name__
+ r = repr(p)
+ # The repr() is in the form ClassName("forward-slashes path")
+ self.assertTrue(r.startswith(clsname + '('), r)
+ self.assertTrue(r.endswith(')'), r)
+ inner = r[len(clsname) + 1: -1]
+ self.assertEqual(eval(inner), p.as_posix())
+ # The repr() roundtrips
+ q = eval(r, pathlib.__dict__)
+ self.assertIs(q.__class__, p.__class__)
+ self.assertEqual(q, p)
+ self.assertEqual(repr(q), r)
+
+ def test_eq_common(self):
+ P = self.cls
+ self.assertEqual(P('a/b'), P('a/b'))
+ self.assertEqual(P('a/b'), P('a', 'b'))
+ self.assertNotEqual(P('a/b'), P('a'))
+ self.assertNotEqual(P('a/b'), P('/a/b'))
+ self.assertNotEqual(P('a/b'), P())
+ self.assertNotEqual(P('/a/b'), P('/'))
+ self.assertNotEqual(P(), P('/'))
+ self.assertNotEqual(P(), "")
+ self.assertNotEqual(P(), {})
+ self.assertNotEqual(P(), int)
+
+ def test_match_common(self):
+ P = self.cls
+ self.assertRaises(ValueError, P('a').match, '')
+ self.assertRaises(ValueError, P('a').match, '.')
+ # Simple relative pattern
+ self.assertTrue(P('b.py').match('b.py'))
+ self.assertTrue(P('a/b.py').match('b.py'))
+ self.assertTrue(P('/a/b.py').match('b.py'))
+ self.assertFalse(P('a.py').match('b.py'))
+ self.assertFalse(P('b/py').match('b.py'))
+ self.assertFalse(P('/a.py').match('b.py'))
+ self.assertFalse(P('b.py/c').match('b.py'))
+ # Wilcard relative pattern
+ self.assertTrue(P('b.py').match('*.py'))
+ self.assertTrue(P('a/b.py').match('*.py'))
+ self.assertTrue(P('/a/b.py').match('*.py'))
+ self.assertFalse(P('b.pyc').match('*.py'))
+ self.assertFalse(P('b./py').match('*.py'))
+ self.assertFalse(P('b.py/c').match('*.py'))
+ # Multi-part relative pattern
+ self.assertTrue(P('ab/c.py').match('a*/*.py'))
+ self.assertTrue(P('/d/ab/c.py').match('a*/*.py'))
+ self.assertFalse(P('a.py').match('a*/*.py'))
+ self.assertFalse(P('/dab/c.py').match('a*/*.py'))
+ self.assertFalse(P('ab/c.py/d').match('a*/*.py'))
+ # Absolute pattern
+ self.assertTrue(P('/b.py').match('/*.py'))
+ self.assertFalse(P('b.py').match('/*.py'))
+ self.assertFalse(P('a/b.py').match('/*.py'))
+ self.assertFalse(P('/a/b.py').match('/*.py'))
+ # Multi-part absolute pattern
+ self.assertTrue(P('/a/b.py').match('/a/*.py'))
+ self.assertFalse(P('/ab.py').match('/a/*.py'))
+ self.assertFalse(P('/a/b/c.py').match('/a/*.py'))
+
+ def test_ordering_common(self):
+ # Ordering is tuple-alike
+ def assertLess(a, b):
+ self.assertLess(a, b)
+ self.assertGreater(b, a)
+ P = self.cls
+ a = P('a')
+ b = P('a/b')
+ c = P('abc')
+ d = P('b')
+ assertLess(a, b)
+ assertLess(a, c)
+ assertLess(a, d)
+ assertLess(b, c)
+ assertLess(c, d)
+ P = self.cls
+ a = P('/a')
+ b = P('/a/b')
+ c = P('/abc')
+ d = P('/b')
+ assertLess(a, b)
+ assertLess(a, c)
+ assertLess(a, d)
+ assertLess(b, c)
+ assertLess(c, d)
+ if sys.version_info > (3,):
+ with self.assertRaises(TypeError):
+ P() < {}
+ else:
+ P() < {}
+
+ def test_parts_common(self):
+ # `parts` returns a tuple
+ sep = self.sep
+ P = self.cls
+ p = P('a/b')
+ parts = p.parts
+ self.assertEqual(parts, ('a', 'b'))
+ # The object gets reused
+ self.assertIs(parts, p.parts)
+ # When the path is absolute, the anchor is a separate part
+ p = P('/a/b')
+ parts = p.parts
+ self.assertEqual(parts, (sep, 'a', 'b'))
+
+ def test_fspath_common(self):
+ P = self.cls
+ p = P('a/b')
+ self._check_str(p.__fspath__(), ('a/b',))
+ if sys.version_info >= (3, 6):
+ self._check_str(os.fspath(p), ('a/b',))
+
+ def test_equivalences(self):
+ for k, tuples in self.equivalences.items():
+ canon = k.replace('/', self.sep)
+ posix = k.replace(self.sep, '/')
+ if canon != posix:
+ tuples = tuples + [
+ tuple(part.replace('/', self.sep) for part in t)
+ for t in tuples
+ ]
+ tuples.append((posix, ))
+ pcanon = self.cls(canon)
+ for t in tuples:
+ p = self.cls(*t)
+ self.assertEqual(p, pcanon, "failed with args {0}".format(t))
+ self.assertEqual(hash(p), hash(pcanon))
+ self.assertEqual(str(p), canon)
+ self.assertEqual(p.as_posix(), posix)
+
+ def test_parent_common(self):
+ # Relative
+ P = self.cls
+ p = P('a/b/c')
+ self.assertEqual(p.parent, P('a/b'))
+ self.assertEqual(p.parent.parent, P('a'))
+ self.assertEqual(p.parent.parent.parent, P())
+ self.assertEqual(p.parent.parent.parent.parent, P())
+ # Anchored
+ p = P('/a/b/c')
+ self.assertEqual(p.parent, P('/a/b'))
+ self.assertEqual(p.parent.parent, P('/a'))
+ self.assertEqual(p.parent.parent.parent, P('/'))
+ self.assertEqual(p.parent.parent.parent.parent, P('/'))
+
+ def test_parents_common(self):
+ # Relative
+ P = self.cls
+ p = P('a/b/c')
+ par = p.parents
+ self.assertEqual(len(par), 3)
+ self.assertEqual(par[0], P('a/b'))
+ self.assertEqual(par[1], P('a'))
+ self.assertEqual(par[2], P('.'))
+ self.assertEqual(list(par), [P('a/b'), P('a'), P('.')])
+ with self.assertRaises(IndexError):
+ par[-1]
+ with self.assertRaises(IndexError):
+ par[3]
+ with self.assertRaises(TypeError):
+ par[0] = p
+ # Anchored
+ p = P('/a/b/c')
+ par = p.parents
+ self.assertEqual(len(par), 3)
+ self.assertEqual(par[0], P('/a/b'))
+ self.assertEqual(par[1], P('/a'))
+ self.assertEqual(par[2], P('/'))
+ self.assertEqual(list(par), [P('/a/b'), P('/a'), P('/')])
+ with self.assertRaises(IndexError):
+ par[3]
+
+ def test_drive_common(self):
+ P = self.cls
+ self.assertEqual(P('a/b').drive, '')
+ self.assertEqual(P('/a/b').drive, '')
+ self.assertEqual(P('').drive, '')
+
+ def test_root_common(self):
+ P = self.cls
+ sep = self.sep
+ self.assertEqual(P('').root, '')
+ self.assertEqual(P('a/b').root, '')
+ self.assertEqual(P('/').root, sep)
+ self.assertEqual(P('/a/b').root, sep)
+
+ def test_anchor_common(self):
+ P = self.cls
+ sep = self.sep
+ self.assertEqual(P('').anchor, '')
+ self.assertEqual(P('a/b').anchor, '')
+ self.assertEqual(P('/').anchor, sep)
+ self.assertEqual(P('/a/b').anchor, sep)
+
+ def test_name_common(self):
+ P = self.cls
+ self.assertEqual(P('').name, '')
+ self.assertEqual(P('.').name, '')
+ self.assertEqual(P('/').name, '')
+ self.assertEqual(P('a/b').name, 'b')
+ self.assertEqual(P('/a/b').name, 'b')
+ self.assertEqual(P('/a/b/.').name, 'b')
+ self.assertEqual(P('a/b.py').name, 'b.py')
+ self.assertEqual(P('/a/b.py').name, 'b.py')
+
+ def test_suffix_common(self):
+ P = self.cls
+ self.assertEqual(P('').suffix, '')
+ self.assertEqual(P('.').suffix, '')
+ self.assertEqual(P('..').suffix, '')
+ self.assertEqual(P('/').suffix, '')
+ self.assertEqual(P('a/b').suffix, '')
+ self.assertEqual(P('/a/b').suffix, '')
+ self.assertEqual(P('/a/b/.').suffix, '')
+ self.assertEqual(P('a/b.py').suffix, '.py')
+ self.assertEqual(P('/a/b.py').suffix, '.py')
+ self.assertEqual(P('a/.hgrc').suffix, '')
+ self.assertEqual(P('/a/.hgrc').suffix, '')
+ self.assertEqual(P('a/.hg.rc').suffix, '.rc')
+ self.assertEqual(P('/a/.hg.rc').suffix, '.rc')
+ self.assertEqual(P('a/b.tar.gz').suffix, '.gz')
+ self.assertEqual(P('/a/b.tar.gz').suffix, '.gz')
+ self.assertEqual(P('a/Some name. Ending with a dot.').suffix, '')
+ self.assertEqual(P('/a/Some name. Ending with a dot.').suffix, '')
+
+ def test_suffixes_common(self):
+ P = self.cls
+ self.assertEqual(P('').suffixes, [])
+ self.assertEqual(P('.').suffixes, [])
+ self.assertEqual(P('/').suffixes, [])
+ self.assertEqual(P('a/b').suffixes, [])
+ self.assertEqual(P('/a/b').suffixes, [])
+ self.assertEqual(P('/a/b/.').suffixes, [])
+ self.assertEqual(P('a/b.py').suffixes, ['.py'])
+ self.assertEqual(P('/a/b.py').suffixes, ['.py'])
+ self.assertEqual(P('a/.hgrc').suffixes, [])
+ self.assertEqual(P('/a/.hgrc').suffixes, [])
+ self.assertEqual(P('a/.hg.rc').suffixes, ['.rc'])
+ self.assertEqual(P('/a/.hg.rc').suffixes, ['.rc'])
+ self.assertEqual(P('a/b.tar.gz').suffixes, ['.tar', '.gz'])
+ self.assertEqual(P('/a/b.tar.gz').suffixes, ['.tar', '.gz'])
+ self.assertEqual(P('a/Some name. Ending with a dot.').suffixes, [])
+ self.assertEqual(P('/a/Some name. Ending with a dot.').suffixes, [])
+
+ def test_stem_common(self):
+ P = self.cls
+ self.assertEqual(P('').stem, '')
+ self.assertEqual(P('.').stem, '')
+ self.assertEqual(P('..').stem, '..')
+ self.assertEqual(P('/').stem, '')
+ self.assertEqual(P('a/b').stem, 'b')
+ self.assertEqual(P('a/b.py').stem, 'b')
+ self.assertEqual(P('a/.hgrc').stem, '.hgrc')
+ self.assertEqual(P('a/.hg.rc').stem, '.hg')
+ self.assertEqual(P('a/b.tar.gz').stem, 'b.tar')
+ self.assertEqual(P('a/Some name. Ending with a dot.').stem,
+ 'Some name. Ending with a dot.')
+
+ def test_with_name_common(self):
+ P = self.cls
+ self.assertEqual(P('a/b').with_name('d.xml'), P('a/d.xml'))
+ self.assertEqual(P('/a/b').with_name('d.xml'), P('/a/d.xml'))
+ self.assertEqual(P('a/b.py').with_name('d.xml'), P('a/d.xml'))
+ self.assertEqual(P('/a/b.py').with_name('d.xml'), P('/a/d.xml'))
+ self.assertEqual(P('a/Dot ending.').with_name('d.xml'), P('a/d.xml'))
+ self.assertEqual(P('/a/Dot ending.').with_name('d.xml'), P('/a/d.xml'))
+ self.assertRaises(ValueError, P('').with_name, 'd.xml')
+ self.assertRaises(ValueError, P('.').with_name, 'd.xml')
+ self.assertRaises(ValueError, P('/').with_name, 'd.xml')
+ self.assertRaises(ValueError, P('a/b').with_name, '')
+ self.assertRaises(ValueError, P('a/b').with_name, '/c')
+ self.assertRaises(ValueError, P('a/b').with_name, 'c/')
+ self.assertRaises(ValueError, P('a/b').with_name, 'c/d')
+
+ def test_with_suffix_common(self):
+ P = self.cls
+ self.assertEqual(P('a/b').with_suffix('.gz'), P('a/b.gz'))
+ self.assertEqual(P('/a/b').with_suffix('.gz'), P('/a/b.gz'))
+ self.assertEqual(P('a/b.py').with_suffix('.gz'), P('a/b.gz'))
+ self.assertEqual(P('/a/b.py').with_suffix('.gz'), P('/a/b.gz'))
+ # Stripping suffix
+ self.assertEqual(P('a/b.py').with_suffix(''), P('a/b'))
+ self.assertEqual(P('/a/b').with_suffix(''), P('/a/b'))
+ # Path doesn't have a "filename" component
+ self.assertRaises(ValueError, P('').with_suffix, '.gz')
+ self.assertRaises(ValueError, P('.').with_suffix, '.gz')
+ self.assertRaises(ValueError, P('/').with_suffix, '.gz')
+ # Invalid suffix
+ self.assertRaises(ValueError, P('a/b').with_suffix, 'gz')
+ self.assertRaises(ValueError, P('a/b').with_suffix, '/')
+ self.assertRaises(ValueError, P('a/b').with_suffix, '.')
+ self.assertRaises(ValueError, P('a/b').with_suffix, '/.gz')
+ self.assertRaises(ValueError, P('a/b').with_suffix, 'c/d')
+ self.assertRaises(ValueError, P('a/b').with_suffix, '.c/.d')
+ self.assertRaises(ValueError, P('a/b').with_suffix, './.d')
+ self.assertRaises(ValueError, P('a/b').with_suffix, '.d/.')
+
+ def test_relative_to_common(self):
+ P = self.cls
+ p = P('a/b')
+ self.assertRaises(TypeError, p.relative_to)
+ if six.PY3:
+ self.assertRaises(TypeError, p.relative_to, b'a')
+ self.assertEqual(p.relative_to(P()), P('a/b'))
+ self.assertEqual(p.relative_to(''), P('a/b'))
+ self.assertEqual(p.relative_to(P('a')), P('b'))
+ self.assertEqual(p.relative_to('a'), P('b'))
+ self.assertEqual(p.relative_to('a/'), P('b'))
+ self.assertEqual(p.relative_to(P('a/b')), P())
+ self.assertEqual(p.relative_to('a/b'), P())
+ # With several args
+ self.assertEqual(p.relative_to('a', 'b'), P())
+ # Unrelated paths
+ self.assertRaises(ValueError, p.relative_to, P('c'))
+ self.assertRaises(ValueError, p.relative_to, P('a/b/c'))
+ self.assertRaises(ValueError, p.relative_to, P('a/c'))
+ self.assertRaises(ValueError, p.relative_to, P('/a'))
+ p = P('/a/b')
+ self.assertEqual(p.relative_to(P('/')), P('a/b'))
+ self.assertEqual(p.relative_to('/'), P('a/b'))
+ self.assertEqual(p.relative_to(P('/a')), P('b'))
+ self.assertEqual(p.relative_to('/a'), P('b'))
+ self.assertEqual(p.relative_to('/a/'), P('b'))
+ self.assertEqual(p.relative_to(P('/a/b')), P())
+ self.assertEqual(p.relative_to('/a/b'), P())
+ # Unrelated paths
+ self.assertRaises(ValueError, p.relative_to, P('/c'))
+ self.assertRaises(ValueError, p.relative_to, P('/a/b/c'))
+ self.assertRaises(ValueError, p.relative_to, P('/a/c'))
+ self.assertRaises(ValueError, p.relative_to, P())
+ self.assertRaises(ValueError, p.relative_to, '')
+ self.assertRaises(ValueError, p.relative_to, P('a'))
+
+ def test_pickling_common(self):
+ P = self.cls
+ p = P('/a/b')
+ for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
+ dumped = pickle.dumps(p, proto)
+ pp = pickle.loads(dumped)
+ self.assertIs(pp.__class__, p.__class__)
+ self.assertEqual(pp, p)
+ self.assertEqual(hash(pp), hash(p))
+ self.assertEqual(str(pp), str(p))
+
+ # note: this is a new test not part of upstream
+ # test that unicode works on Python 2
+ @unittest.skipIf(
+ six.unichr(0x0100).encode(
+ sys.getfilesystemencoding(), "replace") == b"?",
+ "file system encoding only supports ascii")
+ def test_unicode(self):
+ self.cls(six.unichr(0x0100))
+
+
+class PurePosixPathTest(_BasePurePathTest, unittest.TestCase):
+ cls = pathlib.PurePosixPath
+
+ def test_root(self):
+ P = self.cls
+ self.assertEqual(P('/a/b').root, '/')
+ self.assertEqual(P('///a/b').root, '/')
+ # POSIX special case for two leading slashes
+ self.assertEqual(P('//a/b').root, '//')
+
+ def test_eq(self):
+ P = self.cls
+ self.assertNotEqual(P('a/b'), P('A/b'))
+ self.assertEqual(P('/a'), P('///a'))
+ self.assertNotEqual(P('/a'), P('//a'))
+
+ def test_as_uri(self):
+ P = self.cls
+ self.assertEqual(P('/').as_uri(), 'file:///')
+ self.assertEqual(P('/a/b.c').as_uri(), 'file:///a/b.c')
+ self.assertEqual(P('/a/b%#c').as_uri(), 'file:///a/b%25%23c')
+
+ @with_fsencode
+ def test_as_uri_non_ascii(self):
+ from urllib.parse import quote_from_bytes
+ P = self.cls
+ try:
+ os.fsencode('\xe9')
+ except UnicodeEncodeError:
+ self.skipTest("\\xe9 cannot be encoded to the filesystem encoding")
+ self.assertEqual(P('/a/b\xe9').as_uri(),
+ 'file:///a/b' + quote_from_bytes(os.fsencode('\xe9')))
+
+ def test_match(self):
+ P = self.cls
+ self.assertFalse(P('A.py').match('a.PY'))
+
+ def test_is_absolute(self):
+ P = self.cls
+ self.assertFalse(P().is_absolute())
+ self.assertFalse(P('a').is_absolute())
+ self.assertFalse(P('a/b/').is_absolute())
+ self.assertTrue(P('/').is_absolute())
+ self.assertTrue(P('/a').is_absolute())
+ self.assertTrue(P('/a/b/').is_absolute())
+ self.assertTrue(P('//a').is_absolute())
+ self.assertTrue(P('//a/b').is_absolute())
+
+ def test_is_reserved(self):
+ P = self.cls
+ self.assertIs(False, P('').is_reserved())
+ self.assertIs(False, P('/').is_reserved())
+ self.assertIs(False, P('/foo/bar').is_reserved())
+ self.assertIs(False, P('/dev/con/PRN/NUL').is_reserved())
+
+ def test_join(self):
+ P = self.cls
+ p = P('//a')
+ pp = p.joinpath('b')
+ self.assertEqual(pp, P('//a/b'))
+ pp = P('/a').joinpath('//c')
+ self.assertEqual(pp, P('//c'))
+ pp = P('//a').joinpath('/c')
+ self.assertEqual(pp, P('/c'))
+
+ def test_div(self):
+ # Basically the same as joinpath()
+ P = self.cls
+ p = P('//a')
+ pp = p / 'b'
+ self.assertEqual(pp, P('//a/b'))
+ pp = P('/a') / '//c'
+ self.assertEqual(pp, P('//c'))
+ pp = P('//a') / '/c'
+ self.assertEqual(pp, P('/c'))
+
+
+class PureWindowsPathTest(_BasePurePathTest, unittest.TestCase):
+ cls = pathlib.PureWindowsPath
+
+ equivalences = _BasePurePathTest.equivalences.copy()
+ equivalences.update({
+ 'c:a': [('c:', 'a'), ('c:', 'a/'), ('/', 'c:', 'a')],
+ 'c:/a': [
+ ('c:/', 'a'), ('c:', '/', 'a'), ('c:', '/a'),
+ ('/z', 'c:/', 'a'), ('//x/y', 'c:/', 'a'),
+ ],
+ '//a/b/': [('//a/b',)],
+ '//a/b/c': [
+ ('//a/b', 'c'), ('//a/b/', 'c'),
+ ],
+ })
+
+ def test_str(self):
+ p = self.cls('a/b/c')
+ self.assertEqual(str(p), 'a\\b\\c')
+ p = self.cls('c:/a/b/c')
+ self.assertEqual(str(p), 'c:\\a\\b\\c')
+ p = self.cls('//a/b')
+ self.assertEqual(str(p), '\\\\a\\b\\')
+ p = self.cls('//a/b/c')
+ self.assertEqual(str(p), '\\\\a\\b\\c')
+ p = self.cls('//a/b/c/d')
+ self.assertEqual(str(p), '\\\\a\\b\\c\\d')
+
+ def test_str_subclass(self):
+ self._check_str_subclass('c:')
+ self._check_str_subclass('c:a')
+ self._check_str_subclass('c:a\\b.txt')
+ self._check_str_subclass('c:\\')
+ self._check_str_subclass('c:\\a')
+ self._check_str_subclass('c:\\a\\b.txt')
+ self._check_str_subclass('\\\\some\\share')
+ self._check_str_subclass('\\\\some\\share\\a')
+ self._check_str_subclass('\\\\some\\share\\a\\b.txt')
+
+ def test_eq(self):
+ P = self.cls
+ self.assertEqual(P('c:a/b'), P('c:a/b'))
+ self.assertEqual(P('c:a/b'), P('c:', 'a', 'b'))
+ self.assertNotEqual(P('c:a/b'), P('d:a/b'))
+ self.assertNotEqual(P('c:a/b'), P('c:/a/b'))
+ self.assertNotEqual(P('/a/b'), P('c:/a/b'))
+ # Case-insensitivity
+ self.assertEqual(P('a/B'), P('A/b'))
+ self.assertEqual(P('C:a/B'), P('c:A/b'))
+ self.assertEqual(P('//Some/SHARE/a/B'), P('//somE/share/A/b'))
+
+ @with_fsencode
+ def test_as_uri(self):
+ P = self.cls
+ with self.assertRaises(ValueError):
+ P('/a/b').as_uri()
+ with self.assertRaises(ValueError):
+ P('c:a/b').as_uri()
+ self.assertEqual(P('c:/').as_uri(), 'file:///c:/')
+ self.assertEqual(P('c:/a/b.c').as_uri(), 'file:///c:/a/b.c')
+ self.assertEqual(P('c:/a/b%#c').as_uri(), 'file:///c:/a/b%25%23c')
+ self.assertEqual(P('c:/a/b\xe9').as_uri(), 'file:///c:/a/b%C3%A9')
+ self.assertEqual(P('//some/share/').as_uri(), 'file://some/share/')
+ self.assertEqual(P('//some/share/a/b.c').as_uri(),
+ 'file://some/share/a/b.c')
+ self.assertEqual(P('//some/share/a/b%#c\xe9').as_uri(),
+ 'file://some/share/a/b%25%23c%C3%A9')
+
+ def test_match_common(self):
+ P = self.cls
+ # Absolute patterns
+ self.assertTrue(P('c:/b.py').match('/*.py'))
+ self.assertTrue(P('c:/b.py').match('c:*.py'))
+ self.assertTrue(P('c:/b.py').match('c:/*.py'))
+ self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive
+ self.assertFalse(P('b.py').match('/*.py'))
+ self.assertFalse(P('b.py').match('c:*.py'))
+ self.assertFalse(P('b.py').match('c:/*.py'))
+ self.assertFalse(P('c:b.py').match('/*.py'))
+ self.assertFalse(P('c:b.py').match('c:/*.py'))
+ self.assertFalse(P('/b.py').match('c:*.py'))
+ self.assertFalse(P('/b.py').match('c:/*.py'))
+ # UNC patterns
+ self.assertTrue(P('//some/share/a.py').match('/*.py'))
+ self.assertTrue(P('//some/share/a.py').match('//some/share/*.py'))
+ self.assertFalse(P('//other/share/a.py').match('//some/share/*.py'))
+ self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py'))
+ # Case-insensitivity
+ self.assertTrue(P('B.py').match('b.PY'))
+ self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY'))
+ self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY'))
+
+ def test_ordering_common(self):
+ # Case-insensitivity
+ def assertOrderedEqual(a, b):
+ self.assertLessEqual(a, b)
+ self.assertGreaterEqual(b, a)
+ P = self.cls
+ p = P('c:A/b')
+ q = P('C:a/B')
+ assertOrderedEqual(p, q)
+ self.assertFalse(p < q)
+ self.assertFalse(p > q)
+ p = P('//some/Share/A/b')
+ q = P('//Some/SHARE/a/B')
+ assertOrderedEqual(p, q)
+ self.assertFalse(p < q)
+ self.assertFalse(p > q)
+
+ def test_parts(self):
+ P = self.cls
+ p = P('c:a/b')
+ parts = p.parts
+ self.assertEqual(parts, ('c:', 'a', 'b'))
+ p = P('c:/a/b')
+ parts = p.parts
+ self.assertEqual(parts, ('c:\\', 'a', 'b'))
+ p = P('//a/b/c/d')
+ parts = p.parts
+ self.assertEqual(parts, ('\\\\a\\b\\', 'c', 'd'))
+
+ def test_parent(self):
+ # Anchored
+ P = self.cls
+ p = P('z:a/b/c')
+ self.assertEqual(p.parent, P('z:a/b'))
+ self.assertEqual(p.parent.parent, P('z:a'))
+ self.assertEqual(p.parent.parent.parent, P('z:'))
+ self.assertEqual(p.parent.parent.parent.parent, P('z:'))
+ p = P('z:/a/b/c')
+ self.assertEqual(p.parent, P('z:/a/b'))
+ self.assertEqual(p.parent.parent, P('z:/a'))
+ self.assertEqual(p.parent.parent.parent, P('z:/'))
+ self.assertEqual(p.parent.parent.parent.parent, P('z:/'))
+ p = P('//a/b/c/d')
+ self.assertEqual(p.parent, P('//a/b/c'))
+ self.assertEqual(p.parent.parent, P('//a/b'))
+ self.assertEqual(p.parent.parent.parent, P('//a/b'))
+
+ def test_parents(self):
+ # Anchored
+ P = self.cls
+ p = P('z:a/b/')
+ par = p.parents
+ self.assertEqual(len(par), 2)
+ self.assertEqual(par[0], P('z:a'))
+ self.assertEqual(par[1], P('z:'))
+ self.assertEqual(list(par), [P('z:a'), P('z:')])
+ with self.assertRaises(IndexError):
+ par[2]
+ p = P('z:/a/b/')
+ par = p.parents
+ self.assertEqual(len(par), 2)
+ self.assertEqual(par[0], P('z:/a'))
+ self.assertEqual(par[1], P('z:/'))
+ self.assertEqual(list(par), [P('z:/a'), P('z:/')])
+ with self.assertRaises(IndexError):
+ par[2]
+ p = P('//a/b/c/d')
+ par = p.parents
+ self.assertEqual(len(par), 2)
+ self.assertEqual(par[0], P('//a/b/c'))
+ self.assertEqual(par[1], P('//a/b'))
+ self.assertEqual(list(par), [P('//a/b/c'), P('//a/b')])
+ with self.assertRaises(IndexError):
+ par[2]
+
+ def test_drive(self):
+ P = self.cls
+ self.assertEqual(P('c:').drive, 'c:')
+ self.assertEqual(P('c:a/b').drive, 'c:')
+ self.assertEqual(P('c:/').drive, 'c:')
+ self.assertEqual(P('c:/a/b/').drive, 'c:')
+ self.assertEqual(P('//a/b').drive, '\\\\a\\b')
+ self.assertEqual(P('//a/b/').drive, '\\\\a\\b')
+ self.assertEqual(P('//a/b/c/d').drive, '\\\\a\\b')
+
+ def test_root(self):
+ P = self.cls
+ self.assertEqual(P('c:').root, '')
+ self.assertEqual(P('c:a/b').root, '')
+ self.assertEqual(P('c:/').root, '\\')
+ self.assertEqual(P('c:/a/b/').root, '\\')
+ self.assertEqual(P('//a/b').root, '\\')
+ self.assertEqual(P('//a/b/').root, '\\')
+ self.assertEqual(P('//a/b/c/d').root, '\\')
+
+ def test_anchor(self):
+ P = self.cls
+ self.assertEqual(P('c:').anchor, 'c:')
+ self.assertEqual(P('c:a/b').anchor, 'c:')
+ self.assertEqual(P('c:/').anchor, 'c:\\')
+ self.assertEqual(P('c:/a/b/').anchor, 'c:\\')
+ self.assertEqual(P('//a/b').anchor, '\\\\a\\b\\')
+ self.assertEqual(P('//a/b/').anchor, '\\\\a\\b\\')
+ self.assertEqual(P('//a/b/c/d').anchor, '\\\\a\\b\\')
+
+ def test_name(self):
+ P = self.cls
+ self.assertEqual(P('c:').name, '')
+ self.assertEqual(P('c:/').name, '')
+ self.assertEqual(P('c:a/b').name, 'b')
+ self.assertEqual(P('c:/a/b').name, 'b')
+ self.assertEqual(P('c:a/b.py').name, 'b.py')
+ self.assertEqual(P('c:/a/b.py').name, 'b.py')
+ self.assertEqual(P('//My.py/Share.php').name, '')
+ self.assertEqual(P('//My.py/Share.php/a/b').name, 'b')
+
+ def test_suffix(self):
+ P = self.cls
+ self.assertEqual(P('c:').suffix, '')
+ self.assertEqual(P('c:/').suffix, '')
+ self.assertEqual(P('c:a/b').suffix, '')
+ self.assertEqual(P('c:/a/b').suffix, '')
+ self.assertEqual(P('c:a/b.py').suffix, '.py')
+ self.assertEqual(P('c:/a/b.py').suffix, '.py')
+ self.assertEqual(P('c:a/.hgrc').suffix, '')
+ self.assertEqual(P('c:/a/.hgrc').suffix, '')
+ self.assertEqual(P('c:a/.hg.rc').suffix, '.rc')
+ self.assertEqual(P('c:/a/.hg.rc').suffix, '.rc')
+ self.assertEqual(P('c:a/b.tar.gz').suffix, '.gz')
+ self.assertEqual(P('c:/a/b.tar.gz').suffix, '.gz')
+ self.assertEqual(P('c:a/Some name. Ending with a dot.').suffix, '')
+ self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffix, '')
+ self.assertEqual(P('//My.py/Share.php').suffix, '')
+ self.assertEqual(P('//My.py/Share.php/a/b').suffix, '')
+
+ def test_suffixes(self):
+ P = self.cls
+ self.assertEqual(P('c:').suffixes, [])
+ self.assertEqual(P('c:/').suffixes, [])
+ self.assertEqual(P('c:a/b').suffixes, [])
+ self.assertEqual(P('c:/a/b').suffixes, [])
+ self.assertEqual(P('c:a/b.py').suffixes, ['.py'])
+ self.assertEqual(P('c:/a/b.py').suffixes, ['.py'])
+ self.assertEqual(P('c:a/.hgrc').suffixes, [])
+ self.assertEqual(P('c:/a/.hgrc').suffixes, [])
+ self.assertEqual(P('c:a/.hg.rc').suffixes, ['.rc'])
+ self.assertEqual(P('c:/a/.hg.rc').suffixes, ['.rc'])
+ self.assertEqual(P('c:a/b.tar.gz').suffixes, ['.tar', '.gz'])
+ self.assertEqual(P('c:/a/b.tar.gz').suffixes, ['.tar', '.gz'])
+ self.assertEqual(P('//My.py/Share.php').suffixes, [])
+ self.assertEqual(P('//My.py/Share.php/a/b').suffixes, [])
+ self.assertEqual(P('c:a/Some name. Ending with a dot.').suffixes, [])
+ self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffixes, [])
+
+ def test_stem(self):
+ P = self.cls
+ self.assertEqual(P('c:').stem, '')
+ self.assertEqual(P('c:.').stem, '')
+ self.assertEqual(P('c:..').stem, '..')
+ self.assertEqual(P('c:/').stem, '')
+ self.assertEqual(P('c:a/b').stem, 'b')
+ self.assertEqual(P('c:a/b.py').stem, 'b')
+ self.assertEqual(P('c:a/.hgrc').stem, '.hgrc')
+ self.assertEqual(P('c:a/.hg.rc').stem, '.hg')
+ self.assertEqual(P('c:a/b.tar.gz').stem, 'b.tar')
+ self.assertEqual(P('c:a/Some name. Ending with a dot.').stem,
+ 'Some name. Ending with a dot.')
+
+ def test_with_name(self):
+ P = self.cls
+ self.assertEqual(P('c:a/b').with_name('d.xml'), P('c:a/d.xml'))
+ self.assertEqual(P('c:/a/b').with_name('d.xml'), P('c:/a/d.xml'))
+ self.assertEqual(
+ P('c:a/Dot ending.').with_name('d.xml'), P('c:a/d.xml'))
+ self.assertEqual(
+ P('c:/a/Dot ending.').with_name('d.xml'), P('c:/a/d.xml'))
+ self.assertRaises(ValueError, P('c:').with_name, 'd.xml')
+ self.assertRaises(ValueError, P('c:/').with_name, 'd.xml')
+ self.assertRaises(ValueError, P('//My/Share').with_name, 'd.xml')
+ self.assertRaises(ValueError, P('c:a/b').with_name, 'd:')
+ self.assertRaises(ValueError, P('c:a/b').with_name, 'd:e')
+ self.assertRaises(ValueError, P('c:a/b').with_name, 'd:/e')
+ self.assertRaises(ValueError, P('c:a/b').with_name, '//My/Share')
+
+ def test_with_suffix(self):
+ P = self.cls
+ self.assertEqual(P('c:a/b').with_suffix('.gz'), P('c:a/b.gz'))
+ self.assertEqual(P('c:/a/b').with_suffix('.gz'), P('c:/a/b.gz'))
+ self.assertEqual(P('c:a/b.py').with_suffix('.gz'), P('c:a/b.gz'))
+ self.assertEqual(P('c:/a/b.py').with_suffix('.gz'), P('c:/a/b.gz'))
+ # Path doesn't have a "filename" component
+ self.assertRaises(ValueError, P('').with_suffix, '.gz')
+ self.assertRaises(ValueError, P('.').with_suffix, '.gz')
+ self.assertRaises(ValueError, P('/').with_suffix, '.gz')
+ self.assertRaises(ValueError, P('//My/Share').with_suffix, '.gz')
+ # Invalid suffix
+ self.assertRaises(ValueError, P('c:a/b').with_suffix, 'gz')
+ self.assertRaises(ValueError, P('c:a/b').with_suffix, '/')
+ self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\')
+ self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:')
+ self.assertRaises(ValueError, P('c:a/b').with_suffix, '/.gz')
+ self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\.gz')
+ self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:.gz')
+ self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c/d')
+ self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c\\d')
+ self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c/d')
+ self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c\\d')
+
+ def test_relative_to(self):
+ P = self.cls
+ p = P('C:Foo/Bar')
+ self.assertEqual(p.relative_to(P('c:')), P('Foo/Bar'))
+ self.assertEqual(p.relative_to('c:'), P('Foo/Bar'))
+ self.assertEqual(p.relative_to(P('c:foO')), P('Bar'))
+ self.assertEqual(p.relative_to('c:foO'), P('Bar'))
+ self.assertEqual(p.relative_to('c:foO/'), P('Bar'))
+ self.assertEqual(p.relative_to(P('c:foO/baR')), P())
+ self.assertEqual(p.relative_to('c:foO/baR'), P())
+ # Unrelated paths
+ self.assertRaises(ValueError, p.relative_to, P())
+ self.assertRaises(ValueError, p.relative_to, '')
+ self.assertRaises(ValueError, p.relative_to, P('d:'))
+ self.assertRaises(ValueError, p.relative_to, P('/'))
+ self.assertRaises(ValueError, p.relative_to, P('Foo'))
+ self.assertRaises(ValueError, p.relative_to, P('/Foo'))
+ self.assertRaises(ValueError, p.relative_to, P('C:/Foo'))
+ self.assertRaises(ValueError, p.relative_to, P('C:Foo/Bar/Baz'))
+ self.assertRaises(ValueError, p.relative_to, P('C:Foo/Baz'))
+ p = P('C:/Foo/Bar')
+ self.assertEqual(p.relative_to(P('c:')), P('/Foo/Bar'))
+ self.assertEqual(p.relative_to('c:'), P('/Foo/Bar'))
+ self.assertEqual(str(p.relative_to(P('c:'))), '\\Foo\\Bar')
+ self.assertEqual(str(p.relative_to('c:')), '\\Foo\\Bar')
+ self.assertEqual(p.relative_to(P('c:/')), P('Foo/Bar'))
+ self.assertEqual(p.relative_to('c:/'), P('Foo/Bar'))
+ self.assertEqual(p.relative_to(P('c:/foO')), P('Bar'))
+ self.assertEqual(p.relative_to('c:/foO'), P('Bar'))
+ self.assertEqual(p.relative_to('c:/foO/'), P('Bar'))
+ self.assertEqual(p.relative_to(P('c:/foO/baR')), P())
+ self.assertEqual(p.relative_to('c:/foO/baR'), P())
+ # Unrelated paths
+ self.assertRaises(ValueError, p.relative_to, P('C:/Baz'))
+ self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Bar/Baz'))
+ self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Baz'))
+ self.assertRaises(ValueError, p.relative_to, P('C:Foo'))
+ self.assertRaises(ValueError, p.relative_to, P('d:'))
+ self.assertRaises(ValueError, p.relative_to, P('d:/'))
+ self.assertRaises(ValueError, p.relative_to, P('/'))
+ self.assertRaises(ValueError, p.relative_to, P('/Foo'))
+ self.assertRaises(ValueError, p.relative_to, P('//C/Foo'))
+ # UNC paths
+ p = P('//Server/Share/Foo/Bar')
+ self.assertEqual(p.relative_to(P('//sErver/sHare')), P('Foo/Bar'))
+ self.assertEqual(p.relative_to('//sErver/sHare'), P('Foo/Bar'))
+ self.assertEqual(p.relative_to('//sErver/sHare/'), P('Foo/Bar'))
+ self.assertEqual(p.relative_to(P('//sErver/sHare/Foo')), P('Bar'))
+ self.assertEqual(p.relative_to('//sErver/sHare/Foo'), P('Bar'))
+ self.assertEqual(p.relative_to('//sErver/sHare/Foo/'), P('Bar'))
+ self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar')), P())
+ self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar'), P())
+ # Unrelated paths
+ self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo'))
+ self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo'))
+ self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo'))
+ self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo'))
+
+ def test_is_absolute(self):
+ P = self.cls
+ # Under NT, only paths with both a drive and a root are absolute
+ self.assertFalse(P().is_absolute())
+ self.assertFalse(P('a').is_absolute())
+ self.assertFalse(P('a/b/').is_absolute())
+ self.assertFalse(P('/').is_absolute())
+ self.assertFalse(P('/a').is_absolute())
+ self.assertFalse(P('/a/b/').is_absolute())
+ self.assertFalse(P('c:').is_absolute())
+ self.assertFalse(P('c:a').is_absolute())
+ self.assertFalse(P('c:a/b/').is_absolute())
+ self.assertTrue(P('c:/').is_absolute())
+ self.assertTrue(P('c:/a').is_absolute())
+ self.assertTrue(P('c:/a/b/').is_absolute())
+ # UNC paths are absolute by definition
+ self.assertTrue(P('//a/b').is_absolute())
+ self.assertTrue(P('//a/b/').is_absolute())
+ self.assertTrue(P('//a/b/c').is_absolute())
+ self.assertTrue(P('//a/b/c/d').is_absolute())
+
+ def test_join(self):
+ P = self.cls
+ p = P('C:/a/b')
+ pp = p.joinpath('x/y')
+ self.assertEqual(pp, P('C:/a/b/x/y'))
+ pp = p.joinpath('/x/y')
+ self.assertEqual(pp, P('C:/x/y'))
+ # Joining with a different drive => the first path is ignored, even
+ # if the second path is relative.
+ pp = p.joinpath('D:x/y')
+ self.assertEqual(pp, P('D:x/y'))
+ pp = p.joinpath('D:/x/y')
+ self.assertEqual(pp, P('D:/x/y'))
+ pp = p.joinpath('//host/share/x/y')
+ self.assertEqual(pp, P('//host/share/x/y'))
+ # Joining with the same drive => the first path is appended to if
+ # the second path is relative.
+ pp = p.joinpath('c:x/y')
+ self.assertEqual(pp, P('C:/a/b/x/y'))
+ pp = p.joinpath('c:/x/y')
+ self.assertEqual(pp, P('C:/x/y'))
+
+ def test_div(self):
+ # Basically the same as joinpath()
+ P = self.cls
+ p = P('C:/a/b')
+ self.assertEqual(p / 'x/y', P('C:/a/b/x/y'))
+ self.assertEqual(p / 'x' / 'y', P('C:/a/b/x/y'))
+ self.assertEqual(p / '/x/y', P('C:/x/y'))
+ self.assertEqual(p / '/x' / 'y', P('C:/x/y'))
+ # Joining with a different drive => the first path is ignored, even
+ # if the second path is relative.
+ self.assertEqual(p / 'D:x/y', P('D:x/y'))
+ self.assertEqual(p / 'D:' / 'x/y', P('D:x/y'))
+ self.assertEqual(p / 'D:/x/y', P('D:/x/y'))
+ self.assertEqual(p / 'D:' / '/x/y', P('D:/x/y'))
+ self.assertEqual(p / '//host/share/x/y', P('//host/share/x/y'))
+ # Joining with the same drive => the first path is appended to if
+ # the second path is relative.
+ self.assertEqual(p / 'c:x/y', P('C:/a/b/x/y'))
+ self.assertEqual(p / 'c:/x/y', P('C:/x/y'))
+
+ def test_is_reserved(self):
+ P = self.cls
+ self.assertIs(False, P('').is_reserved())
+ self.assertIs(False, P('/').is_reserved())
+ self.assertIs(False, P('/foo/bar').is_reserved())
+ self.assertIs(True, P('con').is_reserved())
+ self.assertIs(True, P('NUL').is_reserved())
+ self.assertIs(True, P('NUL.txt').is_reserved())
+ self.assertIs(True, P('com1').is_reserved())
+ self.assertIs(True, P('com9.bar').is_reserved())
+ self.assertIs(False, P('bar.com9').is_reserved())
+ self.assertIs(True, P('lpt1').is_reserved())
+ self.assertIs(True, P('lpt9.bar').is_reserved())
+ self.assertIs(False, P('bar.lpt9').is_reserved())
+ # Only the last component matters
+ self.assertIs(False, P('c:/NUL/con/baz').is_reserved())
+ # UNC paths are never reserved
+ self.assertIs(False, P('//my/share/nul/con/aux').is_reserved())
+
+
+class PurePathTest(_BasePurePathTest, unittest.TestCase):
+ cls = pathlib.PurePath
+
+ def test_concrete_class(self):
+ p = self.cls('a')
+ self.assertIs(
+ type(p),
+ pathlib.PureWindowsPath
+ if os.name == 'nt' else pathlib.PurePosixPath)
+
+ def test_different_flavours_unequal(self):
+ p = pathlib.PurePosixPath('a')
+ q = pathlib.PureWindowsPath('a')
+ self.assertNotEqual(p, q)
+
+ @unittest.skipIf(sys.version_info < (3, 0),
+ 'Most types are orderable in Python 2')
+ def test_different_flavours_unordered(self):
+ p = pathlib.PurePosixPath('a')
+ q = pathlib.PureWindowsPath('a')
+ with self.assertRaises(TypeError):
+ p < q
+ with self.assertRaises(TypeError):
+ p <= q
+ with self.assertRaises(TypeError):
+ p > q
+ with self.assertRaises(TypeError):
+ p >= q
+
+
+#
+# Tests for the concrete classes
+#
+
+# Make sure any symbolic links in the base test path are resolved
+BASE = os.path.realpath(TESTFN)
+
+
+def join(*x):
+ return os.path.join(BASE, *x)
+
+
+def rel_join(*x):
+ return os.path.join(TESTFN, *x)
+
+
+only_nt = unittest.skipIf(os.name != 'nt',
+ 'test requires a Windows-compatible system')
+only_posix = unittest.skipIf(os.name == 'nt',
+ 'test requires a POSIX-compatible system')
+
+
+@only_posix
+class PosixPathAsPureTest(PurePosixPathTest):
+ cls = pathlib.PosixPath
+
+
+@only_nt
+class WindowsPathAsPureTest(PureWindowsPathTest):
+ cls = pathlib.WindowsPath
+
+ def test_owner(self):
+ P = self.cls
+ with self.assertRaises(NotImplementedError):
+ P('c:/').owner()
+
+ def test_group(self):
+ P = self.cls
+ with self.assertRaises(NotImplementedError):
+ P('c:/').group()
+
+
+class _BasePathTest(object):
+ """Tests for the FS-accessing functionalities of the Path classes."""
+
+ # (BASE)
+ # |
+ # |-- brokenLink -> non-existing
+ # |-- dirA
+ # | `-- linkC -> ../dirB
+ # |-- dirB
+ # | |-- fileB
+ # | `-- linkD -> ../dirB
+ # |-- dirC
+ # | |-- dirD
+ # | | `-- fileD
+ # | `-- fileC
+ # |-- dirE # No permissions
+ # |-- fileA
+ # |-- linkA -> fileA
+ # `-- linkB -> dirB
+ #
+
+ def setUp(self):
+ def cleanup():
+ os.chmod(join('dirE'), 0o777)
+ support.rmtree(BASE)
+ self.addCleanup(cleanup)
+ os.mkdir(BASE)
+ os.mkdir(join('dirA'))
+ os.mkdir(join('dirB'))
+ os.mkdir(join('dirC'))
+ os.mkdir(join('dirC', 'dirD'))
+ os.mkdir(join('dirE'))
+ with open(join('fileA'), 'wb') as f:
+ f.write(b"this is file A\n")
+ with open(join('dirB', 'fileB'), 'wb') as f:
+ f.write(b"this is file B\n")
+ with open(join('dirC', 'fileC'), 'wb') as f:
+ f.write(b"this is file C\n")
+ with open(join('dirC', 'dirD', 'fileD'), 'wb') as f:
+ f.write(b"this is file D\n")
+ os.chmod(join('dirE'), 0)
+ if support_can_symlink():
+ # Relative symlinks
+ os.symlink('fileA', join('linkA'))
+ os.symlink('non-existing', join('brokenLink'))
+ self.dirlink('dirB', join('linkB'))
+ self.dirlink(os.path.join('..', 'dirB'), join('dirA', 'linkC'))
+ # This one goes upwards, creating a loop
+ self.dirlink(os.path.join('..', 'dirB'), join('dirB', 'linkD'))
+
+ if os.name == 'nt':
+ # Workaround for http://bugs.python.org/issue13772
+ def dirlink(self, src, dest):
+ os.symlink(src, dest, target_is_directory=True)
+ else:
+ def dirlink(self, src, dest):
+ os.symlink(src, dest)
+
+ def assertSame(self, path_a, path_b):
+ self.assertTrue(os.path.samefile(str(path_a), str(path_b)),
+ "%r and %r don't point to the same file" %
+ (path_a, path_b))
+
+ def assertFileNotFound(self, func, *args, **kwargs):
+ if sys.version_info >= (3, 3):
+ with self.assertRaises(FileNotFoundError) as cm:
+ func(*args, **kwargs)
+ else:
+ with self.assertRaises(OSError) as cm:
+ # Python 2.6 kludge for http://bugs.python.org/issue7853
+ try:
+ func(*args, **kwargs)
+ except: # noqa: E722
+ raise
+ self.assertEqual(cm.exception.errno, errno.ENOENT)
+
+ def assertFileExists(self, func, *args, **kwargs):
+ if sys.version_info >= (3, 3):
+ with self.assertRaises(FileExistsError) as cm:
+ func(*args, **kwargs)
+ else:
+ with self.assertRaises(OSError) as cm:
+ # Python 2.6 kludge for http://bugs.python.org/issue7853
+ try:
+ func(*args, **kwargs)
+ except: # noqa: E722
+ raise
+ self.assertEqual(cm.exception.errno, errno.EEXIST)
+
+ def _test_cwd(self, p):
+ q = self.cls(os.getcwd())
+ self.assertEqual(p, q)
+ self.assertEqual(str(p), str(q))
+ self.assertIs(type(p), type(q))
+ self.assertTrue(p.is_absolute())
+
+ def test_cwd(self):
+ p = self.cls.cwd()
+ self._test_cwd(p)
+
+ def _test_home(self, p):
+ q = self.cls(os.path.expanduser('~'))
+ self.assertEqual(p, q)
+ self.assertEqual(str(p), str(q))
+ self.assertIs(type(p), type(q))
+ self.assertTrue(p.is_absolute())
+
+ def test_home(self):
+ p = self.cls.home()
+ self._test_home(p)
+
+ def test_samefile(self):
+ fileA_path = os.path.join(BASE, 'fileA')
+ fileB_path = os.path.join(BASE, 'dirB', 'fileB')
+ p = self.cls(fileA_path)
+ pp = self.cls(fileA_path)
+ q = self.cls(fileB_path)
+ self.assertTrue(p.samefile(fileA_path))
+ self.assertTrue(p.samefile(pp))
+ self.assertFalse(p.samefile(fileB_path))
+ self.assertFalse(p.samefile(q))
+ # Test the non-existent file case
+ non_existent = os.path.join(BASE, 'foo')
+ r = self.cls(non_existent)
+ self.assertFileNotFound(p.samefile, r)
+ self.assertFileNotFound(p.samefile, non_existent)
+ self.assertFileNotFound(r.samefile, p)
+ self.assertFileNotFound(r.samefile, non_existent)
+ self.assertFileNotFound(r.samefile, r)
+ self.assertFileNotFound(r.samefile, non_existent)
+
+ def test_empty_path(self):
+ # The empty path points to '.'
+ p = self.cls('')
+ self.assertEqual(p.stat(), os.stat('.'))
+
+ def test_expanduser_common(self):
+ P = self.cls
+ p = P('~')
+ self.assertEqual(p.expanduser(), P(os.path.expanduser('~')))
+ p = P('foo')
+ self.assertEqual(p.expanduser(), p)
+ p = P('/~')
+ self.assertEqual(p.expanduser(), p)
+ p = P('../~')
+ self.assertEqual(p.expanduser(), p)
+ p = P(P('').absolute().anchor) / '~'
+ self.assertEqual(p.expanduser(), p)
+
+ def test_exists(self):
+ P = self.cls
+ p = P(BASE)
+ self.assertIs(True, p.exists())
+ self.assertIs(True, (p / 'dirA').exists())
+ self.assertIs(True, (p / 'fileA').exists())
+ self.assertIs(False, (p / 'fileA' / 'bah').exists())
+ if support_can_symlink():
+ self.assertIs(True, (p / 'linkA').exists())
+ self.assertIs(True, (p / 'linkB').exists())
+ self.assertIs(True, (p / 'linkB' / 'fileB').exists())
+ self.assertIs(False, (p / 'linkA' / 'bah').exists())
+ self.assertIs(False, (p / 'foo').exists())
+ self.assertIs(False, P('/xyzzy').exists())
+
+ def test_open_common(self):
+ p = self.cls(BASE)
+ with (p / 'fileA').open('r') as f:
+ self.assertIsInstance(f, io.TextIOBase)
+ self.assertEqual(f.read(), "this is file A\n")
+ with (p / 'fileA').open('rb') as f:
+ self.assertIsInstance(f, io.BufferedIOBase)
+ self.assertEqual(f.read().strip(), b"this is file A")
+ with (p / 'fileA').open('rb', buffering=0) as f:
+ self.assertIsInstance(f, io.RawIOBase)
+ self.assertEqual(f.read().strip(), b"this is file A")
+
+ def test_read_write_bytes(self):
+ p = self.cls(BASE)
+ (p / 'fileA').write_bytes(b'abcdefg')
+ self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg')
+ # check that trying to write str does not truncate the file
+ with self.assertRaises(TypeError) as cm:
+ (p / 'fileA').write_bytes(six.u('somestr'))
+ self.assertTrue(str(cm.exception).startswith('data must be'))
+ self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg')
+
+ def test_read_write_text(self):
+ p = self.cls(BASE)
+ (p / 'fileA').write_text(six.u('\u00e4bcdefg'), encoding='latin-1')
+ self.assertEqual((p / 'fileA').read_text(
+ encoding='utf-8', errors='ignore'), six.u('bcdefg'))
+ # check that trying to write bytes does not truncate the file
+ with self.assertRaises(TypeError) as cm:
+ (p / 'fileA').write_text(b'somebytes')
+ self.assertTrue(str(cm.exception).startswith('data must be'))
+ self.assertEqual((p / 'fileA').read_text(encoding='latin-1'),
+ six.u('\u00e4bcdefg'))
+
+ def test_iterdir(self):
+ P = self.cls
+ p = P(BASE)
+ it = p.iterdir()
+ paths = set(it)
+ expected = ['dirA', 'dirB', 'dirC', 'dirE', 'fileA']
+ if support_can_symlink():
+ expected += ['linkA', 'linkB', 'brokenLink']
+ self.assertEqual(paths, set(P(BASE, q) for q in expected))
+
+ @support_skip_unless_symlink
+ def test_iterdir_symlink(self):
+ # __iter__ on a symlink to a directory
+ P = self.cls
+ p = P(BASE, 'linkB')
+ paths = set(p.iterdir())
+ expected = set(P(BASE, 'linkB', q) for q in ['fileB', 'linkD'])
+ self.assertEqual(paths, expected)
+
+ def test_iterdir_nodir(self):
+ # __iter__ on something that is not a directory
+ p = self.cls(BASE, 'fileA')
+ with self.assertRaises(OSError) as cm:
+ # Python 2.6 kludge for http://bugs.python.org/issue7853
+ try:
+ next(p.iterdir())
+ except: # noqa: E722s
+ raise
+ # ENOENT or EINVAL under Windows, ENOTDIR otherwise
+ # (see issue #12802)
+ self.assertIn(cm.exception.errno, (errno.ENOTDIR,
+ errno.ENOENT, errno.EINVAL))
+
+ def test_glob_common(self):
+ def _check(glob, expected):
+ self.assertEqual(set(glob), set(P(BASE, q) for q in expected))
+ P = self.cls
+ p = P(BASE)
+ it = p.glob("fileA")
+ self.assertIsInstance(it, collections_abc.Iterator)
+ _check(it, ["fileA"])
+ _check(p.glob("fileB"), [])
+ _check(p.glob("dir*/file*"), ["dirB/fileB", "dirC/fileC"])
+ if not support_can_symlink():
+ _check(p.glob("*A"), ['dirA', 'fileA'])
+ else:
+ _check(p.glob("*A"), ['dirA', 'fileA', 'linkA'])
+ if not support_can_symlink():
+ _check(p.glob("*B/*"), ['dirB/fileB'])
+ else:
+ _check(p.glob("*B/*"), ['dirB/fileB', 'dirB/linkD',
+ 'linkB/fileB', 'linkB/linkD'])
+ if not support_can_symlink():
+ _check(p.glob("*/fileB"), ['dirB/fileB'])
+ else:
+ _check(p.glob("*/fileB"), ['dirB/fileB', 'linkB/fileB'])
+
+ def test_rglob_common(self):
+ def _check(glob, expected):
+ self.assertEqual(set(glob), set(P(BASE, q) for q in expected))
+ P = self.cls
+ p = P(BASE)
+ it = p.rglob("fileA")
+ self.assertIsInstance(it, collections_abc.Iterator)
+ _check(it, ["fileA"])
+ _check(p.rglob("fileB"), ["dirB/fileB"])
+ _check(p.rglob("*/fileA"), [])
+ if not support_can_symlink():
+ _check(p.rglob("*/fileB"), ["dirB/fileB"])
+ else:
+ _check(p.rglob("*/fileB"), ["dirB/fileB", "dirB/linkD/fileB",
+ "linkB/fileB", "dirA/linkC/fileB"])
+ _check(p.rglob("file*"), ["fileA", "dirB/fileB",
+ "dirC/fileC", "dirC/dirD/fileD"])
+ p = P(BASE, "dirC")
+ _check(p.rglob("file*"), ["dirC/fileC", "dirC/dirD/fileD"])
+ _check(p.rglob("*/*"), ["dirC/dirD/fileD"])
+
+ @support_skip_unless_symlink
+ def test_rglob_symlink_loop(self):
+ # Don't get fooled by symlink loops (Issue #26012)
+ P = self.cls
+ p = P(BASE)
+ given = set(p.rglob('*'))
+ expect = set([
+ 'brokenLink',
+ 'dirA', 'dirA/linkC',
+ 'dirB', 'dirB/fileB', 'dirB/linkD',
+ 'dirC', 'dirC/dirD', 'dirC/dirD/fileD', 'dirC/fileC',
+ 'dirE',
+ 'fileA',
+ 'linkA',
+ 'linkB',
+ ])
+ self.assertEqual(given, set([p / x for x in expect]))
+
+ def test_glob_dotdot(self):
+ # ".." is not special in globs
+ P = self.cls
+ p = P(BASE)
+ self.assertEqual(set(p.glob("..")), set([P(BASE, "..")]))
+ self.assertEqual(set(p.glob("dirA/../file*")),
+ set([P(BASE, "dirA/../fileA")]))
+ self.assertEqual(set(p.glob("../xyzzy")), set())
+
+ def _check_resolve(self, p, expected, strict=True):
+ q = p.resolve(strict)
+ self.assertEqual(q, expected)
+
+ # this can be used to check both relative and absolute resolutions
+ _check_resolve_relative = _check_resolve_absolute = _check_resolve
+
+ @support_skip_unless_symlink
+ def test_resolve_common(self):
+ P = self.cls
+ p = P(BASE, 'foo')
+ with self.assertRaises(OSError) as cm:
+ p.resolve(strict=True)
+ self.assertEqual(cm.exception.errno, errno.ENOENT)
+ # Non-strict
+ self.assertEqual(str(p.resolve(strict=False)),
+ os.path.join(BASE, 'foo'))
+ p = P(BASE, 'foo', 'in', 'spam')
+ self.assertEqual(str(p.resolve(strict=False)),
+ os.path.join(BASE, 'foo', 'in', 'spam'))
+ p = P(BASE, '..', 'foo', 'in', 'spam')
+ self.assertEqual(str(p.resolve(strict=False)),
+ os.path.abspath(os.path.join('foo', 'in', 'spam')))
+ # These are all relative symlinks
+ p = P(BASE, 'dirB', 'fileB')
+ self._check_resolve_relative(p, p)
+ p = P(BASE, 'linkA')
+ self._check_resolve_relative(p, P(BASE, 'fileA'))
+ p = P(BASE, 'dirA', 'linkC', 'fileB')
+ self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB'))
+ p = P(BASE, 'dirB', 'linkD', 'fileB')
+ self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB'))
+ # Non-strict
+ p = P(BASE, 'dirA', 'linkC', 'fileB', 'foo', 'in', 'spam')
+ self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB', 'foo', 'in',
+ 'spam'), False)
+ p = P(BASE, 'dirA', 'linkC', '..', 'foo', 'in', 'spam')
+ if os.name == 'nt':
+ # In Windows, if linkY points to dirB, 'dirA\linkY\..'
+ # resolves to 'dirA' without resolving linkY first.
+ self._check_resolve_relative(p, P(BASE, 'dirA', 'foo', 'in',
+ 'spam'), False)
+ else:
+ # In Posix, if linkY points to dirB, 'dirA/linkY/..'
+ # resolves to 'dirB/..' first before resolving to parent of dirB.
+ self._check_resolve_relative(
+ p, P(BASE, 'foo', 'in', 'spam'), False)
+ # Now create absolute symlinks
+ d = tempfile.mkdtemp(suffix='-dirD')
+ self.addCleanup(support.rmtree, d)
+ os.symlink(os.path.join(d), join('dirA', 'linkX'))
+ os.symlink(join('dirB'), os.path.join(d, 'linkY'))
+ p = P(BASE, 'dirA', 'linkX', 'linkY', 'fileB')
+ self._check_resolve_absolute(p, P(BASE, 'dirB', 'fileB'))
+ # Non-strict
+ p = P(BASE, 'dirA', 'linkX', 'linkY', 'foo', 'in', 'spam')
+ self._check_resolve_relative(p, P(BASE, 'dirB', 'foo', 'in', 'spam'),
+ False)
+ p = P(BASE, 'dirA', 'linkX', 'linkY', '..', 'foo', 'in', 'spam')
+ if os.name == 'nt':
+ # In Windows, if linkY points to dirB, 'dirA\linkY\..'
+ # resolves to 'dirA' without resolving linkY first.
+ self._check_resolve_relative(p, P(d, 'foo', 'in', 'spam'), False)
+ else:
+ # In Posix, if linkY points to dirB, 'dirA/linkY/..'
+ # resolves to 'dirB/..' first before resolving to parent of dirB.
+ self._check_resolve_relative(
+ p, P(BASE, 'foo', 'in', 'spam'), False)
+
+ @support_skip_unless_symlink
+ def test_resolve_dot(self):
+ # See https://bitbucket.org/pitrou/pathlib/issue/9/
+ # pathresolve-fails-on-complex-symlinks
+ p = self.cls(BASE)
+ self.dirlink('.', join('0'))
+ self.dirlink(os.path.join('0', '0'), join('1'))
+ self.dirlink(os.path.join('1', '1'), join('2'))
+ q = p / '2'
+ self.assertEqual(q.resolve(strict=True), p)
+ r = q / '3' / '4'
+ self.assertFileNotFound(r.resolve, strict=True)
+ # Non-strict
+ self.assertEqual(r.resolve(strict=False), p / '3' / '4')
+
+ def test_with(self):
+ p = self.cls(BASE)
+ it = p.iterdir()
+ it2 = p.iterdir()
+ next(it2)
+ with p:
+ pass
+ # I/O operation on closed path
+ self.assertRaises(ValueError, next, it)
+ self.assertRaises(ValueError, next, it2)
+ self.assertRaises(ValueError, p.open)
+ self.assertRaises(ValueError, p.resolve)
+ self.assertRaises(ValueError, p.absolute)
+ self.assertRaises(ValueError, p.__enter__)
+
+ def test_chmod(self):
+ p = self.cls(BASE) / 'fileA'
+ mode = p.stat().st_mode
+ # Clear writable bit
+ new_mode = mode & ~0o222
+ p.chmod(new_mode)
+ self.assertEqual(p.stat().st_mode, new_mode)
+ # Set writable bit
+ new_mode = mode | 0o222
+ p.chmod(new_mode)
+ self.assertEqual(p.stat().st_mode, new_mode)
+
+ # XXX also need a test for lchmod
+
+ def test_stat(self):
+ p = self.cls(BASE) / 'fileA'
+ st = p.stat()
+ self.assertEqual(p.stat(), st)
+ # Change file mode by flipping write bit
+ p.chmod(st.st_mode ^ 0o222)
+ self.addCleanup(p.chmod, st.st_mode)
+ self.assertNotEqual(p.stat(), st)
+
+ @support_skip_unless_symlink
+ def test_lstat(self):
+ p = self.cls(BASE) / 'linkA'
+ st = p.stat()
+ self.assertNotEqual(st, p.lstat())
+
+ def test_lstat_nosymlink(self):
+ p = self.cls(BASE) / 'fileA'
+ st = p.stat()
+ self.assertEqual(st, p.lstat())
+
+ @unittest.skipUnless(pwd, "the pwd module is needed for this test")
+ def test_owner(self):
+ p = self.cls(BASE) / 'fileA'
+ uid = p.stat().st_uid
+ try:
+ name = pwd.getpwuid(uid).pw_name
+ except KeyError:
+ self.skipTest(
+ "user %d doesn't have an entry in the system database" % uid)
+ self.assertEqual(name, p.owner())
+
+ @unittest.skipUnless(grp, "the grp module is needed for this test")
+ def test_group(self):
+ p = self.cls(BASE) / 'fileA'
+ gid = p.stat().st_gid
+ try:
+ name = grp.getgrgid(gid).gr_name
+ except KeyError:
+ self.skipTest(
+ "group %d doesn't have an entry in the system database" % gid)
+ self.assertEqual(name, p.group())
+
+ def test_unlink(self):
+ p = self.cls(BASE) / 'fileA'
+ p.unlink()
+ self.assertFileNotFound(p.stat)
+ self.assertFileNotFound(p.unlink)
+
+ def test_rmdir(self):
+ p = self.cls(BASE) / 'dirA'
+ for q in p.iterdir():
+ q.unlink()
+ p.rmdir()
+ self.assertFileNotFound(p.stat)
+ self.assertFileNotFound(p.unlink)
+
+ def test_rename(self):
+ P = self.cls(BASE)
+ p = P / 'fileA'
+ size = p.stat().st_size
+ # Renaming to another path
+ q = P / 'dirA' / 'fileAA'
+ p.rename(q)
+ self.assertEqual(q.stat().st_size, size)
+ self.assertFileNotFound(p.stat)
+ # Renaming to a str of a relative path
+ r = rel_join('fileAAA')
+ q.rename(r)
+ self.assertEqual(os.stat(r).st_size, size)
+ self.assertFileNotFound(q.stat)
+
+ def test_replace(self):
+ P = self.cls(BASE)
+ p = P / 'fileA'
+ if sys.version_info < (3, 3):
+ self.assertRaises(NotImplementedError, p.replace, p)
+ return
+ size = p.stat().st_size
+ # Replacing a non-existing path
+ q = P / 'dirA' / 'fileAA'
+ p.replace(q)
+ self.assertEqual(q.stat().st_size, size)
+ self.assertFileNotFound(p.stat)
+ # Replacing another (existing) path
+ r = rel_join('dirB', 'fileB')
+ q.replace(r)
+ self.assertEqual(os.stat(r).st_size, size)
+ self.assertFileNotFound(q.stat)
+
+ def test_touch_common(self):
+ P = self.cls(BASE)
+ p = P / 'newfileA'
+ self.assertFalse(p.exists())
+ p.touch()
+ self.assertTrue(p.exists())
+ # Rewind the mtime sufficiently far in the past to work around
+ # filesystem-specific timestamp granularity.
+ old_mtime = p.stat().st_mtime - 10
+ os.utime(str(p), (old_mtime, old_mtime))
+ # The file mtime should be refreshed by calling touch() again
+ p.touch()
+ self.assertGreaterEqual(p.stat().st_mtime, old_mtime)
+ # Now with exist_ok=False
+ p = P / 'newfileB'
+ self.assertFalse(p.exists())
+ p.touch(mode=0o700, exist_ok=False)
+ self.assertTrue(p.exists())
+ self.assertRaises(OSError, p.touch, exist_ok=False)
+
+ def test_touch_nochange(self):
+ P = self.cls(BASE)
+ p = P / 'fileA'
+ p.touch()
+ with p.open('rb') as f:
+ self.assertEqual(f.read().strip(), b"this is file A")
+
+ def test_mkdir(self):
+ P = self.cls(BASE)
+ p = P / 'newdirA'
+ self.assertFalse(p.exists())
+ p.mkdir()
+ self.assertTrue(p.exists())
+ self.assertTrue(p.is_dir())
+ with self.assertRaises(OSError) as cm:
+ # Python 2.6 kludge for http://bugs.python.org/issue7853
+ try:
+ p.mkdir()
+ except: # noqa: E722
+ raise
+ self.assertEqual(cm.exception.errno, errno.EEXIST)
+
+ def test_mkdir_parents(self):
+ # Creating a chain of directories
+ p = self.cls(BASE, 'newdirB', 'newdirC')
+ self.assertFalse(p.exists())
+ with self.assertRaises(OSError) as cm:
+ p.mkdir()
+ self.assertEqual(cm.exception.errno, errno.ENOENT)
+ p.mkdir(parents=True)
+ self.assertTrue(p.exists())
+ self.assertTrue(p.is_dir())
+ with self.assertRaises(OSError) as cm:
+ p.mkdir(parents=True)
+ self.assertEqual(cm.exception.errno, errno.EEXIST)
+ # test `mode` arg
+ mode = stat.S_IMODE(p.stat().st_mode) # default mode
+ p = self.cls(BASE, 'newdirD', 'newdirE')
+ p.mkdir(0o555, parents=True)
+ self.assertTrue(p.exists())
+ self.assertTrue(p.is_dir())
+ if os.name != 'nt':
+ # the directory's permissions follow the mode argument
+ self.assertEqual(stat.S_IMODE(p.stat().st_mode), 0o7555 & mode)
+ # the parent's permissions follow the default process settings
+ self.assertEqual(stat.S_IMODE(p.parent.stat().st_mode), mode)
+
+ def test_mkdir_exist_ok(self):
+ p = self.cls(BASE, 'dirB')
+ st_ctime_first = p.stat().st_ctime
+ self.assertTrue(p.exists())
+ self.assertTrue(p.is_dir())
+ self.assertFileExists(p.mkdir)
+ p.mkdir(exist_ok=True)
+ self.assertTrue(p.exists())
+ self.assertEqual(p.stat().st_ctime, st_ctime_first)
+
+ def test_mkdir_exist_ok_with_parent(self):
+ p = self.cls(BASE, 'dirC')
+ self.assertTrue(p.exists())
+ self.assertFileExists(p.mkdir)
+ p = p / 'newdirC'
+ p.mkdir(parents=True)
+ st_ctime_first = p.stat().st_ctime
+ self.assertTrue(p.exists())
+ self.assertFileExists(p.mkdir, parents=True)
+ p.mkdir(parents=True, exist_ok=True)
+ self.assertTrue(p.exists())
+ self.assertEqual(p.stat().st_ctime, st_ctime_first)
+
+ def test_mkdir_exist_ok_root(self):
+ # Issue #25803: A drive root could raise PermissionError on Windows
+ self.cls('/').resolve().mkdir(exist_ok=True)
+ self.cls('/').resolve().mkdir(parents=True, exist_ok=True)
+
+ @only_nt # XXX: not sure how to test this on POSIX
+ def test_mkdir_with_unknown_drive(self):
+ for d in 'ZYXWVUTSRQPONMLKJIHGFEDCBA':
+ p = self.cls(d + ':\\')
+ if not p.is_dir():
+ break
+ else:
+ self.skipTest("cannot find a drive that doesn't exist")
+ with self.assertRaises(OSError):
+ (p / 'child' / 'path').mkdir(parents=True)
+
+ def test_mkdir_with_child_file(self):
+ p = self.cls(BASE, 'dirB', 'fileB')
+ self.assertTrue(p.exists())
+ # An exception is raised when the last path component is an existing
+ # regular file, regardless of whether exist_ok is true or not.
+ self.assertFileExists(p.mkdir, parents=True)
+ self.assertFileExists(p.mkdir, parents=True, exist_ok=True)
+
+ def test_mkdir_no_parents_file(self):
+ p = self.cls(BASE, 'fileA')
+ self.assertTrue(p.exists())
+ # An exception is raised when the last path component is an existing
+ # regular file, regardless of whether exist_ok is true or not.
+ self.assertFileExists(p.mkdir)
+ self.assertFileExists(p.mkdir, exist_ok=True)
+
+ def test_mkdir_concurrent_parent_creation(self):
+ for pattern_num in range(32):
+ p = self.cls(BASE, 'dirCPC%d' % pattern_num)
+ self.assertFalse(p.exists())
+
+ def my_mkdir(path, mode=0o777):
+ path = str(path)
+ # Emulate another process that would create the directory
+ # just before we try to create it ourselves. We do it
+ # in all possible pattern combinations, assuming that this
+ # function is called at most 5 times (dirCPC/dir1/dir2,
+ # dirCPC/dir1, dirCPC, dirCPC/dir1, dirCPC/dir1/dir2).
+ if pattern.pop():
+ os.mkdir(path, mode) # from another process
+ concurrently_created.add(path)
+ os.mkdir(path, mode) # our real call
+
+ pattern = [bool(pattern_num & (1 << n)) for n in range(5)]
+ concurrently_created = set()
+ p12 = p / 'dir1' / 'dir2'
+
+ def _try_func():
+ with mock.patch("pathlib2._normal_accessor.mkdir", my_mkdir):
+ p12.mkdir(parents=True, exist_ok=False)
+
+ def _exc_func(exc):
+ self.assertIn(str(p12), concurrently_created)
+
+ def _else_func():
+ self.assertNotIn(str(p12), concurrently_created)
+
+ pathlib._try_except_fileexistserror(
+ _try_func, _exc_func, _else_func)
+ self.assertTrue(p.exists())
+
+ @support_skip_unless_symlink
+ def test_symlink_to(self):
+ P = self.cls(BASE)
+ target = P / 'fileA'
+ # Symlinking a path target
+ link = P / 'dirA' / 'linkAA'
+ link.symlink_to(target)
+ self.assertEqual(link.stat(), target.stat())
+ self.assertNotEqual(link.lstat(), target.stat())
+ # Symlinking a str target
+ link = P / 'dirA' / 'linkAAA'
+ link.symlink_to(str(target))
+ self.assertEqual(link.stat(), target.stat())
+ self.assertNotEqual(link.lstat(), target.stat())
+ self.assertFalse(link.is_dir())
+ # Symlinking to a directory
+ target = P / 'dirB'
+ link = P / 'dirA' / 'linkAAAA'
+ link.symlink_to(target, target_is_directory=True)
+ self.assertEqual(link.stat(), target.stat())
+ self.assertNotEqual(link.lstat(), target.stat())
+ self.assertTrue(link.is_dir())
+ self.assertTrue(list(link.iterdir()))
+
+ def test_is_dir(self):
+ P = self.cls(BASE)
+ self.assertTrue((P / 'dirA').is_dir())
+ self.assertFalse((P / 'fileA').is_dir())
+ self.assertFalse((P / 'non-existing').is_dir())
+ self.assertFalse((P / 'fileA' / 'bah').is_dir())
+ if support_can_symlink():
+ self.assertFalse((P / 'linkA').is_dir())
+ self.assertTrue((P / 'linkB').is_dir())
+ self.assertFalse((P / 'brokenLink').is_dir())
+
+ def test_is_file(self):
+ P = self.cls(BASE)
+ self.assertTrue((P / 'fileA').is_file())
+ self.assertFalse((P / 'dirA').is_file())
+ self.assertFalse((P / 'non-existing').is_file())
+ self.assertFalse((P / 'fileA' / 'bah').is_file())
+ if support_can_symlink():
+ self.assertTrue((P / 'linkA').is_file())
+ self.assertFalse((P / 'linkB').is_file())
+ self.assertFalse((P / 'brokenLink').is_file())
+
+ def test_is_symlink(self):
+ P = self.cls(BASE)
+ self.assertFalse((P / 'fileA').is_symlink())
+ self.assertFalse((P / 'dirA').is_symlink())
+ self.assertFalse((P / 'non-existing').is_symlink())
+ self.assertFalse((P / 'fileA' / 'bah').is_symlink())
+ if support_can_symlink():
+ self.assertTrue((P / 'linkA').is_symlink())
+ self.assertTrue((P / 'linkB').is_symlink())
+ self.assertTrue((P / 'brokenLink').is_symlink())
+
+ def test_is_fifo_false(self):
+ P = self.cls(BASE)
+ self.assertFalse((P / 'fileA').is_fifo())
+ self.assertFalse((P / 'dirA').is_fifo())
+ self.assertFalse((P / 'non-existing').is_fifo())
+ self.assertFalse((P / 'fileA' / 'bah').is_fifo())
+
+ @unittest.skipUnless(hasattr(os, "mkfifo"), "os.mkfifo() required")
+ @unittest.skipIf(android_not_root, "mkfifo not allowed, non root user")
+ def test_is_fifo_true(self):
+ P = self.cls(BASE, 'myfifo')
+ os.mkfifo(str(P))
+ self.assertTrue(P.is_fifo())
+ self.assertFalse(P.is_socket())
+ self.assertFalse(P.is_file())
+
+ def test_is_socket_false(self):
+ P = self.cls(BASE)
+ self.assertFalse((P / 'fileA').is_socket())
+ self.assertFalse((P / 'dirA').is_socket())
+ self.assertFalse((P / 'non-existing').is_socket())
+ self.assertFalse((P / 'fileA' / 'bah').is_socket())
+
+ @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required")
+ def test_is_socket_true(self):
+ P = self.cls(BASE, 'mysock')
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ self.addCleanup(sock.close)
+ try:
+ sock.bind(str(P))
+ except OSError as e:
+ if "AF_UNIX path too long" in str(e):
+ self.skipTest("cannot bind Unix socket: " + str(e))
+ self.assertTrue(P.is_socket())
+ self.assertFalse(P.is_fifo())
+ self.assertFalse(P.is_file())
+
+ def test_is_block_device_false(self):
+ P = self.cls(BASE)
+ self.assertFalse((P / 'fileA').is_block_device())
+ self.assertFalse((P / 'dirA').is_block_device())
+ self.assertFalse((P / 'non-existing').is_block_device())
+ self.assertFalse((P / 'fileA' / 'bah').is_block_device())
+
+ def test_is_char_device_false(self):
+ P = self.cls(BASE)
+ self.assertFalse((P / 'fileA').is_char_device())
+ self.assertFalse((P / 'dirA').is_char_device())
+ self.assertFalse((P / 'non-existing').is_char_device())
+ self.assertFalse((P / 'fileA' / 'bah').is_char_device())
+
+ @only_posix
+ def test_is_char_device_true(self):
+ # Under Unix, /dev/null should generally be a char device
+ P = self.cls('/dev/null')
+ if not P.exists():
+ self.skipTest("/dev/null required")
+ self.assertTrue(P.is_char_device())
+ self.assertFalse(P.is_block_device())
+ self.assertFalse(P.is_file())
+
+ def test_pickling_common(self):
+ p = self.cls(BASE, 'fileA')
+ for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
+ dumped = pickle.dumps(p, proto)
+ pp = pickle.loads(dumped)
+ self.assertEqual(pp.stat(), p.stat())
+
+ def test_parts_interning(self):
+ P = self.cls
+ p = P('/usr/bin/foo')
+ q = P('/usr/local/bin')
+ # 'usr'
+ self.assertIs(p.parts[1], q.parts[1])
+ # 'bin'
+ self.assertIs(p.parts[2], q.parts[3])
+
+ def _check_complex_symlinks(self, link0_target):
+ # Test solving a non-looping chain of symlinks (issue #19887)
+ P = self.cls(BASE)
+ self.dirlink(os.path.join('link0', 'link0'), join('link1'))
+ self.dirlink(os.path.join('link1', 'link1'), join('link2'))
+ self.dirlink(os.path.join('link2', 'link2'), join('link3'))
+ self.dirlink(link0_target, join('link0'))
+
+ # Resolve absolute paths
+ p = (P / 'link0').resolve()
+ self.assertEqual(p, P)
+ self.assertEqual(str(p), BASE)
+ p = (P / 'link1').resolve()
+ self.assertEqual(p, P)
+ self.assertEqual(str(p), BASE)
+ p = (P / 'link2').resolve()
+ self.assertEqual(p, P)
+ self.assertEqual(str(p), BASE)
+ p = (P / 'link3').resolve()
+ self.assertEqual(p, P)
+ self.assertEqual(str(p), BASE)
+
+ # Resolve relative paths
+ old_path = os.getcwd()
+ os.chdir(BASE)
+ try:
+ p = self.cls('link0').resolve()
+ self.assertEqual(p, P)
+ self.assertEqual(str(p), BASE)
+ p = self.cls('link1').resolve()
+ self.assertEqual(p, P)
+ self.assertEqual(str(p), BASE)
+ p = self.cls('link2').resolve()
+ self.assertEqual(p, P)
+ self.assertEqual(str(p), BASE)
+ p = self.cls('link3').resolve()
+ self.assertEqual(p, P)
+ self.assertEqual(str(p), BASE)
+ finally:
+ os.chdir(old_path)
+
+ @support_skip_unless_symlink
+ def test_complex_symlinks_absolute(self):
+ self._check_complex_symlinks(BASE)
+
+ @support_skip_unless_symlink
+ def test_complex_symlinks_relative(self):
+ self._check_complex_symlinks('.')
+
+ @support_skip_unless_symlink
+ def test_complex_symlinks_relative_dot_dot(self):
+ self._check_complex_symlinks(os.path.join('dirA', '..'))
+
+
+class PathTest(_BasePathTest, unittest.TestCase):
+ cls = pathlib.Path
+
+ def test_concrete_class(self):
+ p = self.cls('a')
+ self.assertIs(
+ type(p),
+ pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath)
+
+ def test_unsupported_flavour(self):
+ if os.name == 'nt':
+ self.assertRaises(NotImplementedError, pathlib.PosixPath)
+ else:
+ self.assertRaises(NotImplementedError, pathlib.WindowsPath)
+
+ def test_glob_empty_pattern(self):
+ p = self.cls()
+ with self.assertRaisesRegex(ValueError, 'Unacceptable pattern'):
+ list(p.glob(''))
+
+
+@only_posix
+class PosixPathTest(_BasePathTest, unittest.TestCase):
+ cls = pathlib.PosixPath
+
+ def _check_symlink_loop(self, *args):
+ path = self.cls(*args)
+ with self.assertRaises(RuntimeError):
+ print(path.resolve(strict=True))
+
+ def _check_symlink_loop_nonstrict(self, *args):
+ path = self.cls(*args)
+ with self.assertRaises(RuntimeError):
+ print(path.resolve(strict=False))
+
+ def test_open_mode(self):
+ old_mask = os.umask(0)
+ self.addCleanup(os.umask, old_mask)
+ p = self.cls(BASE)
+ with (p / 'new_file').open('wb'):
+ pass
+ st = os.stat(join('new_file'))
+ self.assertEqual(stat.S_IMODE(st.st_mode), 0o666)
+ os.umask(0o022)
+ with (p / 'other_new_file').open('wb'):
+ pass
+ st = os.stat(join('other_new_file'))
+ self.assertEqual(stat.S_IMODE(st.st_mode), 0o644)
+
+ def test_touch_mode(self):
+ old_mask = os.umask(0)
+ self.addCleanup(os.umask, old_mask)
+ p = self.cls(BASE)
+ (p / 'new_file').touch()
+ st = os.stat(join('new_file'))
+ self.assertEqual(stat.S_IMODE(st.st_mode), 0o666)
+ os.umask(0o022)
+ (p / 'other_new_file').touch()
+ st = os.stat(join('other_new_file'))
+ self.assertEqual(stat.S_IMODE(st.st_mode), 0o644)
+ (p / 'masked_new_file').touch(mode=0o750)
+ st = os.stat(join('masked_new_file'))
+ self.assertEqual(stat.S_IMODE(st.st_mode), 0o750)
+
+ @support_skip_unless_symlink
+ def test_resolve_loop(self):
+ # Loops with relative symlinks
+ os.symlink('linkX/inside', join('linkX'))
+ self._check_symlink_loop(BASE, 'linkX')
+ os.symlink('linkY', join('linkY'))
+ self._check_symlink_loop(BASE, 'linkY')
+ os.symlink('linkZ/../linkZ', join('linkZ'))
+ self._check_symlink_loop(BASE, 'linkZ')
+ # Non-strict
+ self._check_symlink_loop_nonstrict(BASE, 'linkZ', 'foo')
+ # Loops with absolute symlinks
+ os.symlink(join('linkU/inside'), join('linkU'))
+ self._check_symlink_loop(BASE, 'linkU')
+ os.symlink(join('linkV'), join('linkV'))
+ self._check_symlink_loop(BASE, 'linkV')
+ os.symlink(join('linkW/../linkW'), join('linkW'))
+ self._check_symlink_loop(BASE, 'linkW')
+ # Non-strict
+ self._check_symlink_loop_nonstrict(BASE, 'linkW', 'foo')
+
+ def test_glob(self):
+ P = self.cls
+ p = P(BASE)
+ given = set(p.glob("FILEa"))
+ expect = set() if not support.fs_is_case_insensitive(BASE) else given
+ self.assertEqual(given, expect)
+ self.assertEqual(set(p.glob("FILEa*")), set())
+
+ def test_rglob(self):
+ P = self.cls
+ p = P(BASE, "dirC")
+ given = set(p.rglob("FILEd"))
+ expect = set() if not support.fs_is_case_insensitive(BASE) else given
+ self.assertEqual(given, expect)
+ self.assertEqual(set(p.rglob("FILEd*")), set())
+
+ @unittest.skipUnless(hasattr(pwd, 'getpwall'),
+ 'pwd module does not expose getpwall()')
+ def test_expanduser(self):
+ P = self.cls
+ support.import_module('pwd')
+ import pwd
+ pwdent = pwd.getpwuid(os.getuid())
+ username = pwdent.pw_name
+ userhome = pwdent.pw_dir.rstrip('/') or '/'
+ # find arbitrary different user (if exists)
+ for pwdent in pwd.getpwall():
+ othername = pwdent.pw_name
+ otherhome = pwdent.pw_dir.rstrip('/')
+ if othername != username and otherhome:
+ break
+
+ p1 = P('~/Documents')
+ p2 = P('~' + username + '/Documents')
+ p3 = P('~' + othername + '/Documents')
+ p4 = P('../~' + username + '/Documents')
+ p5 = P('/~' + username + '/Documents')
+ p6 = P('')
+ p7 = P('~fakeuser/Documents')
+
+ with support.EnvironmentVarGuard() as env:
+ env.unset('HOME')
+
+ self.assertEqual(p1.expanduser(), P(userhome) / 'Documents')
+ self.assertEqual(p2.expanduser(), P(userhome) / 'Documents')
+ self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents')
+ self.assertEqual(p4.expanduser(), p4)
+ self.assertEqual(p5.expanduser(), p5)
+ self.assertEqual(p6.expanduser(), p6)
+ self.assertRaises(RuntimeError, p7.expanduser)
+
+ env.set('HOME', '/tmp')
+ self.assertEqual(p1.expanduser(), P('/tmp/Documents'))
+ self.assertEqual(p2.expanduser(), P(userhome) / 'Documents')
+ self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents')
+ self.assertEqual(p4.expanduser(), p4)
+ self.assertEqual(p5.expanduser(), p5)
+ self.assertEqual(p6.expanduser(), p6)
+ self.assertRaises(RuntimeError, p7.expanduser)
+
+
+@only_nt
+class WindowsPathTest(_BasePathTest, unittest.TestCase):
+ cls = pathlib.WindowsPath
+
+ def test_glob(self):
+ P = self.cls
+ p = P(BASE)
+ self.assertEqual(set(p.glob("FILEa")), set([P(BASE, "fileA")]))
+
+ def test_rglob(self):
+ P = self.cls
+ p = P(BASE, "dirC")
+ self.assertEqual(set(p.rglob("FILEd")),
+ set([P(BASE, "dirC/dirD/fileD")]))
+
+ def test_expanduser(self):
+ P = self.cls
+ with support.EnvironmentVarGuard() as env:
+ env.unset('HOME')
+ env.unset('USERPROFILE')
+ env.unset('HOMEPATH')
+ env.unset('HOMEDRIVE')
+ env.set('USERNAME', 'alice')
+
+ # test that the path returns unchanged
+ p1 = P('~/My Documents')
+ p2 = P('~alice/My Documents')
+ p3 = P('~bob/My Documents')
+ p4 = P('/~/My Documents')
+ p5 = P('d:~/My Documents')
+ p6 = P('')
+ self.assertRaises(RuntimeError, p1.expanduser)
+ self.assertRaises(RuntimeError, p2.expanduser)
+ self.assertRaises(RuntimeError, p3.expanduser)
+ self.assertEqual(p4.expanduser(), p4)
+ self.assertEqual(p5.expanduser(), p5)
+ self.assertEqual(p6.expanduser(), p6)
+
+ def check():
+ env.unset('USERNAME')
+ self.assertEqual(p1.expanduser(),
+ P('C:/Users/alice/My Documents'))
+ self.assertRaises(KeyError, p2.expanduser)
+ env.set('USERNAME', 'alice')
+ self.assertEqual(p2.expanduser(),
+ P('C:/Users/alice/My Documents'))
+ self.assertEqual(p3.expanduser(),
+ P('C:/Users/bob/My Documents'))
+ self.assertEqual(p4.expanduser(), p4)
+ self.assertEqual(p5.expanduser(), p5)
+ self.assertEqual(p6.expanduser(), p6)
+
+ # test the first lookup key in the env vars
+ env.set('HOME', 'C:\\Users\\alice')
+ check()
+
+ # test that HOMEPATH is available instead
+ env.unset('HOME')
+ env.set('HOMEPATH', 'C:\\Users\\alice')
+ check()
+
+ env.set('HOMEDRIVE', 'C:\\')
+ env.set('HOMEPATH', 'Users\\alice')
+ check()
+
+ env.unset('HOMEDRIVE')
+ env.unset('HOMEPATH')
+ env.set('USERPROFILE', 'C:\\Users\\alice')
+ check()
+
+
+# extra test to ensure coverage of issue #54
+def test_resolve_extra():
+ pathlib.Path("~/does_not_exist").resolve()
+
+
+def main():
+ unittest.main(__name__)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/testing/web-platform/tests/tools/third_party/pdf_js/LICENSE b/testing/web-platform/tests/tools/third_party/pdf_js/LICENSE
new file mode 100644
index 0000000000..f433b1a53f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pdf_js/LICENSE
@@ -0,0 +1,177 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/testing/web-platform/tests/tools/third_party/pdf_js/pdf.js b/testing/web-platform/tests/tools/third_party/pdf_js/pdf.js
new file mode 100644
index 0000000000..dcefe070cc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pdf_js/pdf.js
@@ -0,0 +1,24624 @@
+/**
+ * @licstart The following is the entire license notice for the
+ * Javascript code in this page
+ *
+ * Copyright 2019 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * @licend The above is the entire license notice for the
+ * Javascript code in this page
+ */
+
+(function webpackUniversalModuleDefinition(root, factory) {
+ if(typeof exports === 'object' && typeof module === 'object')
+ module.exports = factory();
+ else if(typeof define === 'function' && define.amd)
+ define("pdfjs-dist/build/pdf", [], factory);
+ else if(typeof exports === 'object')
+ exports["pdfjs-dist/build/pdf"] = factory();
+ else
+ root["pdfjs-dist/build/pdf"] = root.pdfjsLib = factory();
+})(this, function() {
+return /******/ (function(modules) { // webpackBootstrap
+/******/ // The module cache
+/******/ var installedModules = {};
+/******/
+/******/ // The require function
+/******/ function __w_pdfjs_require__(moduleId) {
+/******/
+/******/ // Check if module is in cache
+/******/ if(installedModules[moduleId]) {
+/******/ return installedModules[moduleId].exports;
+/******/ }
+/******/ // Create a new module (and put it into the cache)
+/******/ var module = installedModules[moduleId] = {
+/******/ i: moduleId,
+/******/ l: false,
+/******/ exports: {}
+/******/ };
+/******/
+/******/ // Execute the module function
+/******/ modules[moduleId].call(module.exports, module, module.exports, __w_pdfjs_require__);
+/******/
+/******/ // Flag the module as loaded
+/******/ module.l = true;
+/******/
+/******/ // Return the exports of the module
+/******/ return module.exports;
+/******/ }
+/******/
+/******/
+/******/ // expose the modules object (__webpack_modules__)
+/******/ __w_pdfjs_require__.m = modules;
+/******/
+/******/ // expose the module cache
+/******/ __w_pdfjs_require__.c = installedModules;
+/******/
+/******/ // define getter function for harmony exports
+/******/ __w_pdfjs_require__.d = function(exports, name, getter) {
+/******/ if(!__w_pdfjs_require__.o(exports, name)) {
+/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
+/******/ }
+/******/ };
+/******/
+/******/ // define __esModule on exports
+/******/ __w_pdfjs_require__.r = function(exports) {
+/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
+/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
+/******/ }
+/******/ Object.defineProperty(exports, '__esModule', { value: true });
+/******/ };
+/******/
+/******/ // create a fake namespace object
+/******/ // mode & 1: value is a module id, require it
+/******/ // mode & 2: merge all properties of value into the ns
+/******/ // mode & 4: return value when already ns object
+/******/ // mode & 8|1: behave like require
+/******/ __w_pdfjs_require__.t = function(value, mode) {
+/******/ if(mode & 1) value = __w_pdfjs_require__(value);
+/******/ if(mode & 8) return value;
+/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
+/******/ var ns = Object.create(null);
+/******/ __w_pdfjs_require__.r(ns);
+/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
+/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __w_pdfjs_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
+/******/ return ns;
+/******/ };
+/******/
+/******/ // getDefaultExport function for compatibility with non-harmony modules
+/******/ __w_pdfjs_require__.n = function(module) {
+/******/ var getter = module && module.__esModule ?
+/******/ function getDefault() { return module['default']; } :
+/******/ function getModuleExports() { return module; };
+/******/ __w_pdfjs_require__.d(getter, 'a', getter);
+/******/ return getter;
+/******/ };
+/******/
+/******/ // Object.prototype.hasOwnProperty.call
+/******/ __w_pdfjs_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
+/******/
+/******/ // __webpack_public_path__
+/******/ __w_pdfjs_require__.p = "";
+/******/
+/******/
+/******/ // Load entry module and return exports
+/******/ return __w_pdfjs_require__(__w_pdfjs_require__.s = 0);
+/******/ })
+/************************************************************************/
+/******/ ([
+/* 0 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var pdfjsVersion = '2.2.228';
+var pdfjsBuild = 'd7afb74a';
+
+var pdfjsSharedUtil = __w_pdfjs_require__(1);
+
+var pdfjsDisplayAPI = __w_pdfjs_require__(147);
+
+var pdfjsDisplayTextLayer = __w_pdfjs_require__(162);
+
+var pdfjsDisplayAnnotationLayer = __w_pdfjs_require__(163);
+
+var pdfjsDisplayDisplayUtils = __w_pdfjs_require__(151);
+
+var pdfjsDisplaySVG = __w_pdfjs_require__(164);
+
+var pdfjsDisplayWorkerOptions = __w_pdfjs_require__(156);
+
+var pdfjsDisplayAPICompatibility = __w_pdfjs_require__(153);
+
+{
+ var isNodeJS = __w_pdfjs_require__(4);
+
+ if (isNodeJS()) {
+ var PDFNodeStream = __w_pdfjs_require__(165).PDFNodeStream;
+
+ pdfjsDisplayAPI.setPDFNetworkStreamFactory(function (params) {
+ return new PDFNodeStream(params);
+ });
+ } else {
+ var PDFNetworkStream = __w_pdfjs_require__(168).PDFNetworkStream;
+
+ var PDFFetchStream;
+
+ if (pdfjsDisplayDisplayUtils.isFetchSupported()) {
+ PDFFetchStream = __w_pdfjs_require__(169).PDFFetchStream;
+ }
+
+ pdfjsDisplayAPI.setPDFNetworkStreamFactory(function (params) {
+ if (PDFFetchStream && pdfjsDisplayDisplayUtils.isValidFetchUrl(params.url)) {
+ return new PDFFetchStream(params);
+ }
+
+ return new PDFNetworkStream(params);
+ });
+ }
+}
+exports.build = pdfjsDisplayAPI.build;
+exports.version = pdfjsDisplayAPI.version;
+exports.getDocument = pdfjsDisplayAPI.getDocument;
+exports.LoopbackPort = pdfjsDisplayAPI.LoopbackPort;
+exports.PDFDataRangeTransport = pdfjsDisplayAPI.PDFDataRangeTransport;
+exports.PDFWorker = pdfjsDisplayAPI.PDFWorker;
+exports.renderTextLayer = pdfjsDisplayTextLayer.renderTextLayer;
+exports.AnnotationLayer = pdfjsDisplayAnnotationLayer.AnnotationLayer;
+exports.createPromiseCapability = pdfjsSharedUtil.createPromiseCapability;
+exports.PasswordResponses = pdfjsSharedUtil.PasswordResponses;
+exports.InvalidPDFException = pdfjsSharedUtil.InvalidPDFException;
+exports.MissingPDFException = pdfjsSharedUtil.MissingPDFException;
+exports.SVGGraphics = pdfjsDisplaySVG.SVGGraphics;
+exports.NativeImageDecoding = pdfjsSharedUtil.NativeImageDecoding;
+exports.CMapCompressionType = pdfjsSharedUtil.CMapCompressionType;
+exports.PermissionFlag = pdfjsSharedUtil.PermissionFlag;
+exports.UnexpectedResponseException = pdfjsSharedUtil.UnexpectedResponseException;
+exports.OPS = pdfjsSharedUtil.OPS;
+exports.VerbosityLevel = pdfjsSharedUtil.VerbosityLevel;
+exports.UNSUPPORTED_FEATURES = pdfjsSharedUtil.UNSUPPORTED_FEATURES;
+exports.createValidAbsoluteUrl = pdfjsSharedUtil.createValidAbsoluteUrl;
+exports.createObjectURL = pdfjsSharedUtil.createObjectURL;
+exports.removeNullCharacters = pdfjsSharedUtil.removeNullCharacters;
+exports.shadow = pdfjsSharedUtil.shadow;
+exports.Util = pdfjsSharedUtil.Util;
+exports.ReadableStream = pdfjsSharedUtil.ReadableStream;
+exports.URL = pdfjsSharedUtil.URL;
+exports.RenderingCancelledException = pdfjsDisplayDisplayUtils.RenderingCancelledException;
+exports.getFilenameFromUrl = pdfjsDisplayDisplayUtils.getFilenameFromUrl;
+exports.LinkTarget = pdfjsDisplayDisplayUtils.LinkTarget;
+exports.addLinkAttributes = pdfjsDisplayDisplayUtils.addLinkAttributes;
+exports.loadScript = pdfjsDisplayDisplayUtils.loadScript;
+exports.PDFDateString = pdfjsDisplayDisplayUtils.PDFDateString;
+exports.GlobalWorkerOptions = pdfjsDisplayWorkerOptions.GlobalWorkerOptions;
+exports.apiCompatibilityParams = pdfjsDisplayAPICompatibility.apiCompatibilityParams;
+
+/***/ }),
+/* 1 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.arrayByteLength = arrayByteLength;
+exports.arraysToBytes = arraysToBytes;
+exports.assert = assert;
+exports.bytesToString = bytesToString;
+exports.createPromiseCapability = createPromiseCapability;
+exports.getVerbosityLevel = getVerbosityLevel;
+exports.info = info;
+exports.isArrayBuffer = isArrayBuffer;
+exports.isArrayEqual = isArrayEqual;
+exports.isBool = isBool;
+exports.isEmptyObj = isEmptyObj;
+exports.isNum = isNum;
+exports.isString = isString;
+exports.isSpace = isSpace;
+exports.isSameOrigin = isSameOrigin;
+exports.createValidAbsoluteUrl = createValidAbsoluteUrl;
+exports.isLittleEndian = isLittleEndian;
+exports.isEvalSupported = isEvalSupported;
+exports.log2 = log2;
+exports.readInt8 = readInt8;
+exports.readUint16 = readUint16;
+exports.readUint32 = readUint32;
+exports.removeNullCharacters = removeNullCharacters;
+exports.setVerbosityLevel = setVerbosityLevel;
+exports.shadow = shadow;
+exports.string32 = string32;
+exports.stringToBytes = stringToBytes;
+exports.stringToPDFString = stringToPDFString;
+exports.stringToUTF8String = stringToUTF8String;
+exports.utf8StringToString = utf8StringToString;
+exports.warn = warn;
+exports.unreachable = unreachable;
+Object.defineProperty(exports, "ReadableStream", {
+ enumerable: true,
+ get: function get() {
+ return _streams_polyfill.ReadableStream;
+ }
+});
+Object.defineProperty(exports, "URL", {
+ enumerable: true,
+ get: function get() {
+ return _url_polyfill.URL;
+ }
+});
+exports.createObjectURL = exports.FormatError = exports.Util = exports.UnknownErrorException = exports.UnexpectedResponseException = exports.TextRenderingMode = exports.StreamType = exports.PermissionFlag = exports.PasswordResponses = exports.PasswordException = exports.NativeImageDecoding = exports.MissingPDFException = exports.InvalidPDFException = exports.AbortException = exports.CMapCompressionType = exports.ImageKind = exports.FontType = exports.AnnotationType = exports.AnnotationFlag = exports.AnnotationFieldFlag = exports.AnnotationBorderStyleType = exports.UNSUPPORTED_FEATURES = exports.VerbosityLevel = exports.OPS = exports.IDENTITY_MATRIX = exports.FONT_IDENTITY_MATRIX = void 0;
+
+__w_pdfjs_require__(2);
+
+var _streams_polyfill = __w_pdfjs_require__(143);
+
+var _url_polyfill = __w_pdfjs_require__(145);
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var IDENTITY_MATRIX = [1, 0, 0, 1, 0, 0];
+exports.IDENTITY_MATRIX = IDENTITY_MATRIX;
+var FONT_IDENTITY_MATRIX = [0.001, 0, 0, 0.001, 0, 0];
+exports.FONT_IDENTITY_MATRIX = FONT_IDENTITY_MATRIX;
+var NativeImageDecoding = {
+ NONE: 'none',
+ DECODE: 'decode',
+ DISPLAY: 'display'
+};
+exports.NativeImageDecoding = NativeImageDecoding;
+var PermissionFlag = {
+ PRINT: 0x04,
+ MODIFY_CONTENTS: 0x08,
+ COPY: 0x10,
+ MODIFY_ANNOTATIONS: 0x20,
+ FILL_INTERACTIVE_FORMS: 0x100,
+ COPY_FOR_ACCESSIBILITY: 0x200,
+ ASSEMBLE: 0x400,
+ PRINT_HIGH_QUALITY: 0x800
+};
+exports.PermissionFlag = PermissionFlag;
+var TextRenderingMode = {
+ FILL: 0,
+ STROKE: 1,
+ FILL_STROKE: 2,
+ INVISIBLE: 3,
+ FILL_ADD_TO_PATH: 4,
+ STROKE_ADD_TO_PATH: 5,
+ FILL_STROKE_ADD_TO_PATH: 6,
+ ADD_TO_PATH: 7,
+ FILL_STROKE_MASK: 3,
+ ADD_TO_PATH_FLAG: 4
+};
+exports.TextRenderingMode = TextRenderingMode;
+var ImageKind = {
+ GRAYSCALE_1BPP: 1,
+ RGB_24BPP: 2,
+ RGBA_32BPP: 3
+};
+exports.ImageKind = ImageKind;
+var AnnotationType = {
+ TEXT: 1,
+ LINK: 2,
+ FREETEXT: 3,
+ LINE: 4,
+ SQUARE: 5,
+ CIRCLE: 6,
+ POLYGON: 7,
+ POLYLINE: 8,
+ HIGHLIGHT: 9,
+ UNDERLINE: 10,
+ SQUIGGLY: 11,
+ STRIKEOUT: 12,
+ STAMP: 13,
+ CARET: 14,
+ INK: 15,
+ POPUP: 16,
+ FILEATTACHMENT: 17,
+ SOUND: 18,
+ MOVIE: 19,
+ WIDGET: 20,
+ SCREEN: 21,
+ PRINTERMARK: 22,
+ TRAPNET: 23,
+ WATERMARK: 24,
+ THREED: 25,
+ REDACT: 26
+};
+exports.AnnotationType = AnnotationType;
+var AnnotationFlag = {
+ INVISIBLE: 0x01,
+ HIDDEN: 0x02,
+ PRINT: 0x04,
+ NOZOOM: 0x08,
+ NOROTATE: 0x10,
+ NOVIEW: 0x20,
+ READONLY: 0x40,
+ LOCKED: 0x80,
+ TOGGLENOVIEW: 0x100,
+ LOCKEDCONTENTS: 0x200
+};
+exports.AnnotationFlag = AnnotationFlag;
+var AnnotationFieldFlag = {
+ READONLY: 0x0000001,
+ REQUIRED: 0x0000002,
+ NOEXPORT: 0x0000004,
+ MULTILINE: 0x0001000,
+ PASSWORD: 0x0002000,
+ NOTOGGLETOOFF: 0x0004000,
+ RADIO: 0x0008000,
+ PUSHBUTTON: 0x0010000,
+ COMBO: 0x0020000,
+ EDIT: 0x0040000,
+ SORT: 0x0080000,
+ FILESELECT: 0x0100000,
+ MULTISELECT: 0x0200000,
+ DONOTSPELLCHECK: 0x0400000,
+ DONOTSCROLL: 0x0800000,
+ COMB: 0x1000000,
+ RICHTEXT: 0x2000000,
+ RADIOSINUNISON: 0x2000000,
+ COMMITONSELCHANGE: 0x4000000
+};
+exports.AnnotationFieldFlag = AnnotationFieldFlag;
+var AnnotationBorderStyleType = {
+ SOLID: 1,
+ DASHED: 2,
+ BEVELED: 3,
+ INSET: 4,
+ UNDERLINE: 5
+};
+exports.AnnotationBorderStyleType = AnnotationBorderStyleType;
+var StreamType = {
+ UNKNOWN: 0,
+ FLATE: 1,
+ LZW: 2,
+ DCT: 3,
+ JPX: 4,
+ JBIG: 5,
+ A85: 6,
+ AHX: 7,
+ CCF: 8,
+ RL: 9
+};
+exports.StreamType = StreamType;
+var FontType = {
+ UNKNOWN: 0,
+ TYPE1: 1,
+ TYPE1C: 2,
+ CIDFONTTYPE0: 3,
+ CIDFONTTYPE0C: 4,
+ TRUETYPE: 5,
+ CIDFONTTYPE2: 6,
+ TYPE3: 7,
+ OPENTYPE: 8,
+ TYPE0: 9,
+ MMTYPE1: 10
+};
+exports.FontType = FontType;
+var VerbosityLevel = {
+ ERRORS: 0,
+ WARNINGS: 1,
+ INFOS: 5
+};
+exports.VerbosityLevel = VerbosityLevel;
+var CMapCompressionType = {
+ NONE: 0,
+ BINARY: 1,
+ STREAM: 2
+};
+exports.CMapCompressionType = CMapCompressionType;
+var OPS = {
+ dependency: 1,
+ setLineWidth: 2,
+ setLineCap: 3,
+ setLineJoin: 4,
+ setMiterLimit: 5,
+ setDash: 6,
+ setRenderingIntent: 7,
+ setFlatness: 8,
+ setGState: 9,
+ save: 10,
+ restore: 11,
+ transform: 12,
+ moveTo: 13,
+ lineTo: 14,
+ curveTo: 15,
+ curveTo2: 16,
+ curveTo3: 17,
+ closePath: 18,
+ rectangle: 19,
+ stroke: 20,
+ closeStroke: 21,
+ fill: 22,
+ eoFill: 23,
+ fillStroke: 24,
+ eoFillStroke: 25,
+ closeFillStroke: 26,
+ closeEOFillStroke: 27,
+ endPath: 28,
+ clip: 29,
+ eoClip: 30,
+ beginText: 31,
+ endText: 32,
+ setCharSpacing: 33,
+ setWordSpacing: 34,
+ setHScale: 35,
+ setLeading: 36,
+ setFont: 37,
+ setTextRenderingMode: 38,
+ setTextRise: 39,
+ moveText: 40,
+ setLeadingMoveText: 41,
+ setTextMatrix: 42,
+ nextLine: 43,
+ showText: 44,
+ showSpacedText: 45,
+ nextLineShowText: 46,
+ nextLineSetSpacingShowText: 47,
+ setCharWidth: 48,
+ setCharWidthAndBounds: 49,
+ setStrokeColorSpace: 50,
+ setFillColorSpace: 51,
+ setStrokeColor: 52,
+ setStrokeColorN: 53,
+ setFillColor: 54,
+ setFillColorN: 55,
+ setStrokeGray: 56,
+ setFillGray: 57,
+ setStrokeRGBColor: 58,
+ setFillRGBColor: 59,
+ setStrokeCMYKColor: 60,
+ setFillCMYKColor: 61,
+ shadingFill: 62,
+ beginInlineImage: 63,
+ beginImageData: 64,
+ endInlineImage: 65,
+ paintXObject: 66,
+ markPoint: 67,
+ markPointProps: 68,
+ beginMarkedContent: 69,
+ beginMarkedContentProps: 70,
+ endMarkedContent: 71,
+ beginCompat: 72,
+ endCompat: 73,
+ paintFormXObjectBegin: 74,
+ paintFormXObjectEnd: 75,
+ beginGroup: 76,
+ endGroup: 77,
+ beginAnnotations: 78,
+ endAnnotations: 79,
+ beginAnnotation: 80,
+ endAnnotation: 81,
+ paintJpegXObject: 82,
+ paintImageMaskXObject: 83,
+ paintImageMaskXObjectGroup: 84,
+ paintImageXObject: 85,
+ paintInlineImageXObject: 86,
+ paintInlineImageXObjectGroup: 87,
+ paintImageXObjectRepeat: 88,
+ paintImageMaskXObjectRepeat: 89,
+ paintSolidColorImageMask: 90,
+ constructPath: 91
+};
+exports.OPS = OPS;
+var UNSUPPORTED_FEATURES = {
+ unknown: 'unknown',
+ forms: 'forms',
+ javaScript: 'javaScript',
+ smask: 'smask',
+ shadingPattern: 'shadingPattern',
+ font: 'font'
+};
+exports.UNSUPPORTED_FEATURES = UNSUPPORTED_FEATURES;
+var PasswordResponses = {
+ NEED_PASSWORD: 1,
+ INCORRECT_PASSWORD: 2
+};
+exports.PasswordResponses = PasswordResponses;
+var verbosity = VerbosityLevel.WARNINGS;
+
+function setVerbosityLevel(level) {
+ if (Number.isInteger(level)) {
+ verbosity = level;
+ }
+}
+
+function getVerbosityLevel() {
+ return verbosity;
+}
+
+function info(msg) {
+ if (verbosity >= VerbosityLevel.INFOS) {
+ console.log('Info: ' + msg);
+ }
+}
+
+function warn(msg) {
+ if (verbosity >= VerbosityLevel.WARNINGS) {
+ console.log('Warning: ' + msg);
+ }
+}
+
+function unreachable(msg) {
+ throw new Error(msg);
+}
+
+function assert(cond, msg) {
+ if (!cond) {
+ unreachable(msg);
+ }
+}
+
+function isSameOrigin(baseUrl, otherUrl) {
+ try {
+ var base = new _url_polyfill.URL(baseUrl);
+
+ if (!base.origin || base.origin === 'null') {
+ return false;
+ }
+ } catch (e) {
+ return false;
+ }
+
+ var other = new _url_polyfill.URL(otherUrl, base);
+ return base.origin === other.origin;
+}
+
+function _isValidProtocol(url) {
+ if (!url) {
+ return false;
+ }
+
+ switch (url.protocol) {
+ case 'http:':
+ case 'https:':
+ case 'ftp:':
+ case 'mailto:':
+ case 'tel:':
+ return true;
+
+ default:
+ return false;
+ }
+}
+
+function createValidAbsoluteUrl(url, baseUrl) {
+ if (!url) {
+ return null;
+ }
+
+ try {
+ var absoluteUrl = baseUrl ? new _url_polyfill.URL(url, baseUrl) : new _url_polyfill.URL(url);
+
+ if (_isValidProtocol(absoluteUrl)) {
+ return absoluteUrl;
+ }
+ } catch (ex) {}
+
+ return null;
+}
+
+function shadow(obj, prop, value) {
+ Object.defineProperty(obj, prop, {
+ value: value,
+ enumerable: true,
+ configurable: true,
+ writable: false
+ });
+ return value;
+}
+
+var PasswordException = function PasswordExceptionClosure() {
+ function PasswordException(msg, code) {
+ this.name = 'PasswordException';
+ this.message = msg;
+ this.code = code;
+ }
+
+ PasswordException.prototype = new Error();
+ PasswordException.constructor = PasswordException;
+ return PasswordException;
+}();
+
+exports.PasswordException = PasswordException;
+
+var UnknownErrorException = function UnknownErrorExceptionClosure() {
+ function UnknownErrorException(msg, details) {
+ this.name = 'UnknownErrorException';
+ this.message = msg;
+ this.details = details;
+ }
+
+ UnknownErrorException.prototype = new Error();
+ UnknownErrorException.constructor = UnknownErrorException;
+ return UnknownErrorException;
+}();
+
+exports.UnknownErrorException = UnknownErrorException;
+
+var InvalidPDFException = function InvalidPDFExceptionClosure() {
+ function InvalidPDFException(msg) {
+ this.name = 'InvalidPDFException';
+ this.message = msg;
+ }
+
+ InvalidPDFException.prototype = new Error();
+ InvalidPDFException.constructor = InvalidPDFException;
+ return InvalidPDFException;
+}();
+
+exports.InvalidPDFException = InvalidPDFException;
+
+var MissingPDFException = function MissingPDFExceptionClosure() {
+ function MissingPDFException(msg) {
+ this.name = 'MissingPDFException';
+ this.message = msg;
+ }
+
+ MissingPDFException.prototype = new Error();
+ MissingPDFException.constructor = MissingPDFException;
+ return MissingPDFException;
+}();
+
+exports.MissingPDFException = MissingPDFException;
+
+var UnexpectedResponseException = function UnexpectedResponseExceptionClosure() {
+ function UnexpectedResponseException(msg, status) {
+ this.name = 'UnexpectedResponseException';
+ this.message = msg;
+ this.status = status;
+ }
+
+ UnexpectedResponseException.prototype = new Error();
+ UnexpectedResponseException.constructor = UnexpectedResponseException;
+ return UnexpectedResponseException;
+}();
+
+exports.UnexpectedResponseException = UnexpectedResponseException;
+
+var FormatError = function FormatErrorClosure() {
+ function FormatError(msg) {
+ this.message = msg;
+ }
+
+ FormatError.prototype = new Error();
+ FormatError.prototype.name = 'FormatError';
+ FormatError.constructor = FormatError;
+ return FormatError;
+}();
+
+exports.FormatError = FormatError;
+
+var AbortException = function AbortExceptionClosure() {
+ function AbortException(msg) {
+ this.name = 'AbortException';
+ this.message = msg;
+ }
+
+ AbortException.prototype = new Error();
+ AbortException.constructor = AbortException;
+ return AbortException;
+}();
+
+exports.AbortException = AbortException;
+var NullCharactersRegExp = /\x00/g;
+
+function removeNullCharacters(str) {
+ if (typeof str !== 'string') {
+ warn('The argument for removeNullCharacters must be a string.');
+ return str;
+ }
+
+ return str.replace(NullCharactersRegExp, '');
+}
+
+function bytesToString(bytes) {
+ assert(bytes !== null && _typeof(bytes) === 'object' && bytes.length !== undefined, 'Invalid argument for bytesToString');
+ var length = bytes.length;
+ var MAX_ARGUMENT_COUNT = 8192;
+
+ if (length < MAX_ARGUMENT_COUNT) {
+ return String.fromCharCode.apply(null, bytes);
+ }
+
+ var strBuf = [];
+
+ for (var i = 0; i < length; i += MAX_ARGUMENT_COUNT) {
+ var chunkEnd = Math.min(i + MAX_ARGUMENT_COUNT, length);
+ var chunk = bytes.subarray(i, chunkEnd);
+ strBuf.push(String.fromCharCode.apply(null, chunk));
+ }
+
+ return strBuf.join('');
+}
+
+function stringToBytes(str) {
+ assert(typeof str === 'string', 'Invalid argument for stringToBytes');
+ var length = str.length;
+ var bytes = new Uint8Array(length);
+
+ for (var i = 0; i < length; ++i) {
+ bytes[i] = str.charCodeAt(i) & 0xFF;
+ }
+
+ return bytes;
+}
+
+function arrayByteLength(arr) {
+ if (arr.length !== undefined) {
+ return arr.length;
+ }
+
+ assert(arr.byteLength !== undefined);
+ return arr.byteLength;
+}
+
+function arraysToBytes(arr) {
+ if (arr.length === 1 && arr[0] instanceof Uint8Array) {
+ return arr[0];
+ }
+
+ var resultLength = 0;
+ var i,
+ ii = arr.length;
+ var item, itemLength;
+
+ for (i = 0; i < ii; i++) {
+ item = arr[i];
+ itemLength = arrayByteLength(item);
+ resultLength += itemLength;
+ }
+
+ var pos = 0;
+ var data = new Uint8Array(resultLength);
+
+ for (i = 0; i < ii; i++) {
+ item = arr[i];
+
+ if (!(item instanceof Uint8Array)) {
+ if (typeof item === 'string') {
+ item = stringToBytes(item);
+ } else {
+ item = new Uint8Array(item);
+ }
+ }
+
+ itemLength = item.byteLength;
+ data.set(item, pos);
+ pos += itemLength;
+ }
+
+ return data;
+}
+
+function string32(value) {
+ return String.fromCharCode(value >> 24 & 0xff, value >> 16 & 0xff, value >> 8 & 0xff, value & 0xff);
+}
+
+function log2(x) {
+ if (x <= 0) {
+ return 0;
+ }
+
+ return Math.ceil(Math.log2(x));
+}
+
+function readInt8(data, start) {
+ return data[start] << 24 >> 24;
+}
+
+function readUint16(data, offset) {
+ return data[offset] << 8 | data[offset + 1];
+}
+
+function readUint32(data, offset) {
+ return (data[offset] << 24 | data[offset + 1] << 16 | data[offset + 2] << 8 | data[offset + 3]) >>> 0;
+}
+
+function isLittleEndian() {
+ var buffer8 = new Uint8Array(4);
+ buffer8[0] = 1;
+ var view32 = new Uint32Array(buffer8.buffer, 0, 1);
+ return view32[0] === 1;
+}
+
+function isEvalSupported() {
+ try {
+ new Function('');
+ return true;
+ } catch (e) {
+ return false;
+ }
+}
+
+var Util = function UtilClosure() {
+ function Util() {}
+
+ var rgbBuf = ['rgb(', 0, ',', 0, ',', 0, ')'];
+
+ Util.makeCssRgb = function Util_makeCssRgb(r, g, b) {
+ rgbBuf[1] = r;
+ rgbBuf[3] = g;
+ rgbBuf[5] = b;
+ return rgbBuf.join('');
+ };
+
+ Util.transform = function Util_transform(m1, m2) {
+ return [m1[0] * m2[0] + m1[2] * m2[1], m1[1] * m2[0] + m1[3] * m2[1], m1[0] * m2[2] + m1[2] * m2[3], m1[1] * m2[2] + m1[3] * m2[3], m1[0] * m2[4] + m1[2] * m2[5] + m1[4], m1[1] * m2[4] + m1[3] * m2[5] + m1[5]];
+ };
+
+ Util.applyTransform = function Util_applyTransform(p, m) {
+ var xt = p[0] * m[0] + p[1] * m[2] + m[4];
+ var yt = p[0] * m[1] + p[1] * m[3] + m[5];
+ return [xt, yt];
+ };
+
+ Util.applyInverseTransform = function Util_applyInverseTransform(p, m) {
+ var d = m[0] * m[3] - m[1] * m[2];
+ var xt = (p[0] * m[3] - p[1] * m[2] + m[2] * m[5] - m[4] * m[3]) / d;
+ var yt = (-p[0] * m[1] + p[1] * m[0] + m[4] * m[1] - m[5] * m[0]) / d;
+ return [xt, yt];
+ };
+
+ Util.getAxialAlignedBoundingBox = function Util_getAxialAlignedBoundingBox(r, m) {
+ var p1 = Util.applyTransform(r, m);
+ var p2 = Util.applyTransform(r.slice(2, 4), m);
+ var p3 = Util.applyTransform([r[0], r[3]], m);
+ var p4 = Util.applyTransform([r[2], r[1]], m);
+ return [Math.min(p1[0], p2[0], p3[0], p4[0]), Math.min(p1[1], p2[1], p3[1], p4[1]), Math.max(p1[0], p2[0], p3[0], p4[0]), Math.max(p1[1], p2[1], p3[1], p4[1])];
+ };
+
+ Util.inverseTransform = function Util_inverseTransform(m) {
+ var d = m[0] * m[3] - m[1] * m[2];
+ return [m[3] / d, -m[1] / d, -m[2] / d, m[0] / d, (m[2] * m[5] - m[4] * m[3]) / d, (m[4] * m[1] - m[5] * m[0]) / d];
+ };
+
+ Util.apply3dTransform = function Util_apply3dTransform(m, v) {
+ return [m[0] * v[0] + m[1] * v[1] + m[2] * v[2], m[3] * v[0] + m[4] * v[1] + m[5] * v[2], m[6] * v[0] + m[7] * v[1] + m[8] * v[2]];
+ };
+
+ Util.singularValueDecompose2dScale = function Util_singularValueDecompose2dScale(m) {
+ var transpose = [m[0], m[2], m[1], m[3]];
+ var a = m[0] * transpose[0] + m[1] * transpose[2];
+ var b = m[0] * transpose[1] + m[1] * transpose[3];
+ var c = m[2] * transpose[0] + m[3] * transpose[2];
+ var d = m[2] * transpose[1] + m[3] * transpose[3];
+ var first = (a + d) / 2;
+ var second = Math.sqrt((a + d) * (a + d) - 4 * (a * d - c * b)) / 2;
+ var sx = first + second || 1;
+ var sy = first - second || 1;
+ return [Math.sqrt(sx), Math.sqrt(sy)];
+ };
+
+ Util.normalizeRect = function Util_normalizeRect(rect) {
+ var r = rect.slice(0);
+
+ if (rect[0] > rect[2]) {
+ r[0] = rect[2];
+ r[2] = rect[0];
+ }
+
+ if (rect[1] > rect[3]) {
+ r[1] = rect[3];
+ r[3] = rect[1];
+ }
+
+ return r;
+ };
+
+ Util.intersect = function Util_intersect(rect1, rect2) {
+ function compare(a, b) {
+ return a - b;
+ }
+
+ var orderedX = [rect1[0], rect1[2], rect2[0], rect2[2]].sort(compare),
+ orderedY = [rect1[1], rect1[3], rect2[1], rect2[3]].sort(compare),
+ result = [];
+ rect1 = Util.normalizeRect(rect1);
+ rect2 = Util.normalizeRect(rect2);
+
+ if (orderedX[0] === rect1[0] && orderedX[1] === rect2[0] || orderedX[0] === rect2[0] && orderedX[1] === rect1[0]) {
+ result[0] = orderedX[1];
+ result[2] = orderedX[2];
+ } else {
+ return false;
+ }
+
+ if (orderedY[0] === rect1[1] && orderedY[1] === rect2[1] || orderedY[0] === rect2[1] && orderedY[1] === rect1[1]) {
+ result[1] = orderedY[1];
+ result[3] = orderedY[2];
+ } else {
+ return false;
+ }
+
+ return result;
+ };
+
+ return Util;
+}();
+
+exports.Util = Util;
+var PDFStringTranslateTable = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x2D8, 0x2C7, 0x2C6, 0x2D9, 0x2DD, 0x2DB, 0x2DA, 0x2DC, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x2022, 0x2020, 0x2021, 0x2026, 0x2014, 0x2013, 0x192, 0x2044, 0x2039, 0x203A, 0x2212, 0x2030, 0x201E, 0x201C, 0x201D, 0x2018, 0x2019, 0x201A, 0x2122, 0xFB01, 0xFB02, 0x141, 0x152, 0x160, 0x178, 0x17D, 0x131, 0x142, 0x153, 0x161, 0x17E, 0, 0x20AC];
+
+function stringToPDFString(str) {
+ var i,
+ n = str.length,
+ strBuf = [];
+
+ if (str[0] === '\xFE' && str[1] === '\xFF') {
+ for (i = 2; i < n; i += 2) {
+ strBuf.push(String.fromCharCode(str.charCodeAt(i) << 8 | str.charCodeAt(i + 1)));
+ }
+ } else {
+ for (i = 0; i < n; ++i) {
+ var code = PDFStringTranslateTable[str.charCodeAt(i)];
+ strBuf.push(code ? String.fromCharCode(code) : str.charAt(i));
+ }
+ }
+
+ return strBuf.join('');
+}
+
+function stringToUTF8String(str) {
+ return decodeURIComponent(escape(str));
+}
+
+function utf8StringToString(str) {
+ return unescape(encodeURIComponent(str));
+}
+
+function isEmptyObj(obj) {
+ for (var key in obj) {
+ return false;
+ }
+
+ return true;
+}
+
+function isBool(v) {
+ return typeof v === 'boolean';
+}
+
+function isNum(v) {
+ return typeof v === 'number';
+}
+
+function isString(v) {
+ return typeof v === 'string';
+}
+
+function isArrayBuffer(v) {
+ return _typeof(v) === 'object' && v !== null && v.byteLength !== undefined;
+}
+
+function isArrayEqual(arr1, arr2) {
+ if (arr1.length !== arr2.length) {
+ return false;
+ }
+
+ return arr1.every(function (element, index) {
+ return element === arr2[index];
+ });
+}
+
+function isSpace(ch) {
+ return ch === 0x20 || ch === 0x09 || ch === 0x0D || ch === 0x0A;
+}
+
+function createPromiseCapability() {
+ var capability = Object.create(null);
+ var isSettled = false;
+ Object.defineProperty(capability, 'settled', {
+ get: function get() {
+ return isSettled;
+ }
+ });
+ capability.promise = new Promise(function (resolve, reject) {
+ capability.resolve = function (data) {
+ isSettled = true;
+ resolve(data);
+ };
+
+ capability.reject = function (reason) {
+ isSettled = true;
+ reject(reason);
+ };
+ });
+ return capability;
+}
+
+var createObjectURL = function createObjectURLClosure() {
+ var digits = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
+ return function createObjectURL(data, contentType) {
+ var forceDataSchema = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
+
+ if (!forceDataSchema && _url_polyfill.URL.createObjectURL) {
+ var blob = new Blob([data], {
+ type: contentType
+ });
+ return _url_polyfill.URL.createObjectURL(blob);
+ }
+
+ var buffer = 'data:' + contentType + ';base64,';
+
+ for (var i = 0, ii = data.length; i < ii; i += 3) {
+ var b1 = data[i] & 0xFF;
+ var b2 = data[i + 1] & 0xFF;
+ var b3 = data[i + 2] & 0xFF;
+ var d1 = b1 >> 2,
+ d2 = (b1 & 3) << 4 | b2 >> 4;
+ var d3 = i + 1 < ii ? (b2 & 0xF) << 2 | b3 >> 6 : 64;
+ var d4 = i + 2 < ii ? b3 & 0x3F : 64;
+ buffer += digits[d1] + digits[d2] + digits[d3] + digits[d4];
+ }
+
+ return buffer;
+ };
+}();
+
+exports.createObjectURL = createObjectURL;
+
+/***/ }),
+/* 2 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var globalScope = __w_pdfjs_require__(3);
+
+if (!globalScope._pdfjsCompatibilityChecked) {
+ globalScope._pdfjsCompatibilityChecked = true;
+
+ var isNodeJS = __w_pdfjs_require__(4);
+
+ var hasDOM = (typeof window === "undefined" ? "undefined" : _typeof(window)) === 'object' && (typeof document === "undefined" ? "undefined" : _typeof(document)) === 'object';
+
+ (function checkNodeBtoa() {
+ if (globalScope.btoa || !isNodeJS()) {
+ return;
+ }
+
+ globalScope.btoa = function (chars) {
+ return Buffer.from(chars, 'binary').toString('base64');
+ };
+ })();
+
+ (function checkNodeAtob() {
+ if (globalScope.atob || !isNodeJS()) {
+ return;
+ }
+
+ globalScope.atob = function (input) {
+ return Buffer.from(input, 'base64').toString('binary');
+ };
+ })();
+
+ (function checkChildNodeRemove() {
+ if (!hasDOM) {
+ return;
+ }
+
+ if (typeof Element.prototype.remove !== 'undefined') {
+ return;
+ }
+
+ Element.prototype.remove = function () {
+ if (this.parentNode) {
+ this.parentNode.removeChild(this);
+ }
+ };
+ })();
+
+ (function checkDOMTokenListAddRemove() {
+ if (!hasDOM || isNodeJS()) {
+ return;
+ }
+
+ var div = document.createElement('div');
+ div.classList.add('testOne', 'testTwo');
+
+ if (div.classList.contains('testOne') === true && div.classList.contains('testTwo') === true) {
+ return;
+ }
+
+ var OriginalDOMTokenListAdd = DOMTokenList.prototype.add;
+ var OriginalDOMTokenListRemove = DOMTokenList.prototype.remove;
+
+ DOMTokenList.prototype.add = function () {
+ for (var _len = arguments.length, tokens = new Array(_len), _key = 0; _key < _len; _key++) {
+ tokens[_key] = arguments[_key];
+ }
+
+ for (var _i = 0, _tokens = tokens; _i < _tokens.length; _i++) {
+ var token = _tokens[_i];
+ OriginalDOMTokenListAdd.call(this, token);
+ }
+ };
+
+ DOMTokenList.prototype.remove = function () {
+ for (var _len2 = arguments.length, tokens = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
+ tokens[_key2] = arguments[_key2];
+ }
+
+ for (var _i2 = 0, _tokens2 = tokens; _i2 < _tokens2.length; _i2++) {
+ var token = _tokens2[_i2];
+ OriginalDOMTokenListRemove.call(this, token);
+ }
+ };
+ })();
+
+ (function checkDOMTokenListToggle() {
+ if (!hasDOM || isNodeJS()) {
+ return;
+ }
+
+ var div = document.createElement('div');
+
+ if (div.classList.toggle('test', 0) === false) {
+ return;
+ }
+
+ DOMTokenList.prototype.toggle = function (token) {
+ var force = arguments.length > 1 ? !!arguments[1] : !this.contains(token);
+ return this[force ? 'add' : 'remove'](token), force;
+ };
+ })();
+
+ (function checkStringStartsWith() {
+ if (String.prototype.startsWith) {
+ return;
+ }
+
+ __w_pdfjs_require__(5);
+ })();
+
+ (function checkStringEndsWith() {
+ if (String.prototype.endsWith) {
+ return;
+ }
+
+ __w_pdfjs_require__(36);
+ })();
+
+ (function checkStringIncludes() {
+ if (String.prototype.includes) {
+ return;
+ }
+
+ __w_pdfjs_require__(38);
+ })();
+
+ (function checkArrayIncludes() {
+ if (Array.prototype.includes) {
+ return;
+ }
+
+ __w_pdfjs_require__(40);
+ })();
+
+ (function checkArrayFrom() {
+ if (Array.from) {
+ return;
+ }
+
+ __w_pdfjs_require__(47);
+ })();
+
+ (function checkObjectAssign() {
+ if (Object.assign) {
+ return;
+ }
+
+ __w_pdfjs_require__(70);
+ })();
+
+ (function checkMathLog2() {
+ if (Math.log2) {
+ return;
+ }
+
+ Math.log2 = __w_pdfjs_require__(75);
+ })();
+
+ (function checkNumberIsNaN() {
+ if (Number.isNaN) {
+ return;
+ }
+
+ Number.isNaN = __w_pdfjs_require__(77);
+ })();
+
+ (function checkNumberIsInteger() {
+ if (Number.isInteger) {
+ return;
+ }
+
+ Number.isInteger = __w_pdfjs_require__(79);
+ })();
+
+ (function checkPromise() {
+ if (globalScope.Promise && globalScope.Promise.prototype && globalScope.Promise.prototype["finally"]) {
+ return;
+ }
+
+ globalScope.Promise = __w_pdfjs_require__(82);
+ })();
+
+ (function checkWeakMap() {
+ if (globalScope.WeakMap) {
+ return;
+ }
+
+ globalScope.WeakMap = __w_pdfjs_require__(102);
+ })();
+
+ (function checkWeakSet() {
+ if (globalScope.WeakSet) {
+ return;
+ }
+
+ globalScope.WeakSet = __w_pdfjs_require__(119);
+ })();
+
+ (function checkStringCodePointAt() {
+ if (String.codePointAt) {
+ return;
+ }
+
+ String.codePointAt = __w_pdfjs_require__(123);
+ })();
+
+ (function checkStringFromCodePoint() {
+ if (String.fromCodePoint) {
+ return;
+ }
+
+ String.fromCodePoint = __w_pdfjs_require__(125);
+ })();
+
+ (function checkSymbol() {
+ if (globalScope.Symbol) {
+ return;
+ }
+
+ __w_pdfjs_require__(127);
+ })();
+
+ (function checkStringPadStart() {
+ if (String.prototype.padStart) {
+ return;
+ }
+
+ __w_pdfjs_require__(134);
+ })();
+
+ (function checkStringPadEnd() {
+ if (String.prototype.padEnd) {
+ return;
+ }
+
+ __w_pdfjs_require__(138);
+ })();
+
+ (function checkObjectValues() {
+ if (Object.values) {
+ return;
+ }
+
+ Object.values = __w_pdfjs_require__(140);
+ })();
+}
+
+/***/ }),
+/* 3 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = typeof window !== 'undefined' && window.Math === Math ? window : typeof global !== 'undefined' && global.Math === Math ? global : typeof self !== 'undefined' && self.Math === Math ? self : {};
+
+/***/ }),
+/* 4 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+module.exports = function isNodeJS() {
+ return (typeof process === "undefined" ? "undefined" : _typeof(process)) === 'object' && process + '' === '[object process]' && !process.versions['nw'] && !process.versions['electron'];
+};
+
+/***/ }),
+/* 5 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(6);
+
+module.exports = __w_pdfjs_require__(9).String.startsWith;
+
+/***/ }),
+/* 6 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var toLength = __w_pdfjs_require__(28);
+
+var context = __w_pdfjs_require__(30);
+
+var STARTS_WITH = 'startsWith';
+var $startsWith = ''[STARTS_WITH];
+$export($export.P + $export.F * __w_pdfjs_require__(35)(STARTS_WITH), 'String', {
+ startsWith: function startsWith(searchString) {
+ var that = context(this, searchString, STARTS_WITH);
+ var index = toLength(Math.min(arguments.length > 1 ? arguments[1] : undefined, that.length));
+ var search = String(searchString);
+ return $startsWith ? $startsWith.call(that, search, index) : that.slice(index, index + search.length) === search;
+ }
+});
+
+/***/ }),
+/* 7 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(8);
+
+var core = __w_pdfjs_require__(9);
+
+var hide = __w_pdfjs_require__(10);
+
+var redefine = __w_pdfjs_require__(20);
+
+var ctx = __w_pdfjs_require__(26);
+
+var PROTOTYPE = 'prototype';
+
+var $export = function $export(type, name, source) {
+ var IS_FORCED = type & $export.F;
+ var IS_GLOBAL = type & $export.G;
+ var IS_STATIC = type & $export.S;
+ var IS_PROTO = type & $export.P;
+ var IS_BIND = type & $export.B;
+ var target = IS_GLOBAL ? global : IS_STATIC ? global[name] || (global[name] = {}) : (global[name] || {})[PROTOTYPE];
+ var exports = IS_GLOBAL ? core : core[name] || (core[name] = {});
+ var expProto = exports[PROTOTYPE] || (exports[PROTOTYPE] = {});
+ var key, own, out, exp;
+ if (IS_GLOBAL) source = name;
+
+ for (key in source) {
+ own = !IS_FORCED && target && target[key] !== undefined;
+ out = (own ? target : source)[key];
+ exp = IS_BIND && own ? ctx(out, global) : IS_PROTO && typeof out == 'function' ? ctx(Function.call, out) : out;
+ if (target) redefine(target, key, out, type & $export.U);
+ if (exports[key] != out) hide(exports, key, exp);
+ if (IS_PROTO && expProto[key] != out) expProto[key] = out;
+ }
+};
+
+global.core = core;
+$export.F = 1;
+$export.G = 2;
+$export.S = 4;
+$export.P = 8;
+$export.B = 16;
+$export.W = 32;
+$export.U = 64;
+$export.R = 128;
+module.exports = $export;
+
+/***/ }),
+/* 8 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = module.exports = typeof window != 'undefined' && window.Math == Math ? window : typeof self != 'undefined' && self.Math == Math ? self : Function('return this')();
+if (typeof __g == 'number') __g = global;
+
+/***/ }),
+/* 9 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var core = module.exports = {
+ version: '2.6.9'
+};
+if (typeof __e == 'number') __e = core;
+
+/***/ }),
+/* 10 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var dP = __w_pdfjs_require__(11);
+
+var createDesc = __w_pdfjs_require__(19);
+
+module.exports = __w_pdfjs_require__(15) ? function (object, key, value) {
+ return dP.f(object, key, createDesc(1, value));
+} : function (object, key, value) {
+ object[key] = value;
+ return object;
+};
+
+/***/ }),
+/* 11 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var anObject = __w_pdfjs_require__(12);
+
+var IE8_DOM_DEFINE = __w_pdfjs_require__(14);
+
+var toPrimitive = __w_pdfjs_require__(18);
+
+var dP = Object.defineProperty;
+exports.f = __w_pdfjs_require__(15) ? Object.defineProperty : function defineProperty(O, P, Attributes) {
+ anObject(O);
+ P = toPrimitive(P, true);
+ anObject(Attributes);
+ if (IE8_DOM_DEFINE) try {
+ return dP(O, P, Attributes);
+ } catch (e) {}
+ if ('get' in Attributes || 'set' in Attributes) throw TypeError('Accessors not supported!');
+ if ('value' in Attributes) O[P] = Attributes.value;
+ return O;
+};
+
+/***/ }),
+/* 12 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(13);
+
+module.exports = function (it) {
+ if (!isObject(it)) throw TypeError(it + ' is not an object!');
+ return it;
+};
+
+/***/ }),
+/* 13 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+module.exports = function (it) {
+ return _typeof(it) === 'object' ? it !== null : typeof it === 'function';
+};
+
+/***/ }),
+/* 14 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = !__w_pdfjs_require__(15) && !__w_pdfjs_require__(16)(function () {
+ return Object.defineProperty(__w_pdfjs_require__(17)('div'), 'a', {
+ get: function get() {
+ return 7;
+ }
+ }).a != 7;
+});
+
+/***/ }),
+/* 15 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = !__w_pdfjs_require__(16)(function () {
+ return Object.defineProperty({}, 'a', {
+ get: function get() {
+ return 7;
+ }
+ }).a != 7;
+});
+
+/***/ }),
+/* 16 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (exec) {
+ try {
+ return !!exec();
+ } catch (e) {
+ return true;
+ }
+};
+
+/***/ }),
+/* 17 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(13);
+
+var document = __w_pdfjs_require__(8).document;
+
+var is = isObject(document) && isObject(document.createElement);
+
+module.exports = function (it) {
+ return is ? document.createElement(it) : {};
+};
+
+/***/ }),
+/* 18 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(13);
+
+module.exports = function (it, S) {
+ if (!isObject(it)) return it;
+ var fn, val;
+ if (S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it))) return val;
+ if (typeof (fn = it.valueOf) == 'function' && !isObject(val = fn.call(it))) return val;
+ if (!S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it))) return val;
+ throw TypeError("Can't convert object to primitive value");
+};
+
+/***/ }),
+/* 19 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (bitmap, value) {
+ return {
+ enumerable: !(bitmap & 1),
+ configurable: !(bitmap & 2),
+ writable: !(bitmap & 4),
+ value: value
+ };
+};
+
+/***/ }),
+/* 20 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(8);
+
+var hide = __w_pdfjs_require__(10);
+
+var has = __w_pdfjs_require__(21);
+
+var SRC = __w_pdfjs_require__(22)('src');
+
+var $toString = __w_pdfjs_require__(23);
+
+var TO_STRING = 'toString';
+var TPL = ('' + $toString).split(TO_STRING);
+
+__w_pdfjs_require__(9).inspectSource = function (it) {
+ return $toString.call(it);
+};
+
+(module.exports = function (O, key, val, safe) {
+ var isFunction = typeof val == 'function';
+ if (isFunction) has(val, 'name') || hide(val, 'name', key);
+ if (O[key] === val) return;
+ if (isFunction) has(val, SRC) || hide(val, SRC, O[key] ? '' + O[key] : TPL.join(String(key)));
+
+ if (O === global) {
+ O[key] = val;
+ } else if (!safe) {
+ delete O[key];
+ hide(O, key, val);
+ } else if (O[key]) {
+ O[key] = val;
+ } else {
+ hide(O, key, val);
+ }
+})(Function.prototype, TO_STRING, function toString() {
+ return typeof this == 'function' && this[SRC] || $toString.call(this);
+});
+
+/***/ }),
+/* 21 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var hasOwnProperty = {}.hasOwnProperty;
+
+module.exports = function (it, key) {
+ return hasOwnProperty.call(it, key);
+};
+
+/***/ }),
+/* 22 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var id = 0;
+var px = Math.random();
+
+module.exports = function (key) {
+ return 'Symbol('.concat(key === undefined ? '' : key, ')_', (++id + px).toString(36));
+};
+
+/***/ }),
+/* 23 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = __w_pdfjs_require__(24)('native-function-to-string', Function.toString);
+
+/***/ }),
+/* 24 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var core = __w_pdfjs_require__(9);
+
+var global = __w_pdfjs_require__(8);
+
+var SHARED = '__core-js_shared__';
+var store = global[SHARED] || (global[SHARED] = {});
+(module.exports = function (key, value) {
+ return store[key] || (store[key] = value !== undefined ? value : {});
+})('versions', []).push({
+ version: core.version,
+ mode: __w_pdfjs_require__(25) ? 'pure' : 'global',
+ copyright: '© 2019 Denis Pushkarev (zloirock.ru)'
+});
+
+/***/ }),
+/* 25 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = false;
+
+/***/ }),
+/* 26 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var aFunction = __w_pdfjs_require__(27);
+
+module.exports = function (fn, that, length) {
+ aFunction(fn);
+ if (that === undefined) return fn;
+
+ switch (length) {
+ case 1:
+ return function (a) {
+ return fn.call(that, a);
+ };
+
+ case 2:
+ return function (a, b) {
+ return fn.call(that, a, b);
+ };
+
+ case 3:
+ return function (a, b, c) {
+ return fn.call(that, a, b, c);
+ };
+ }
+
+ return function () {
+ return fn.apply(that, arguments);
+ };
+};
+
+/***/ }),
+/* 27 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (it) {
+ if (typeof it != 'function') throw TypeError(it + ' is not a function!');
+ return it;
+};
+
+/***/ }),
+/* 28 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toInteger = __w_pdfjs_require__(29);
+
+var min = Math.min;
+
+module.exports = function (it) {
+ return it > 0 ? min(toInteger(it), 0x1fffffffffffff) : 0;
+};
+
+/***/ }),
+/* 29 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ceil = Math.ceil;
+var floor = Math.floor;
+
+module.exports = function (it) {
+ return isNaN(it = +it) ? 0 : (it > 0 ? floor : ceil)(it);
+};
+
+/***/ }),
+/* 30 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isRegExp = __w_pdfjs_require__(31);
+
+var defined = __w_pdfjs_require__(34);
+
+module.exports = function (that, searchString, NAME) {
+ if (isRegExp(searchString)) throw TypeError('String#' + NAME + " doesn't accept regex!");
+ return String(defined(that));
+};
+
+/***/ }),
+/* 31 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(13);
+
+var cof = __w_pdfjs_require__(32);
+
+var MATCH = __w_pdfjs_require__(33)('match');
+
+module.exports = function (it) {
+ var isRegExp;
+ return isObject(it) && ((isRegExp = it[MATCH]) !== undefined ? !!isRegExp : cof(it) == 'RegExp');
+};
+
+/***/ }),
+/* 32 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toString = {}.toString;
+
+module.exports = function (it) {
+ return toString.call(it).slice(8, -1);
+};
+
+/***/ }),
+/* 33 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var store = __w_pdfjs_require__(24)('wks');
+
+var uid = __w_pdfjs_require__(22);
+
+var _Symbol = __w_pdfjs_require__(8).Symbol;
+
+var USE_SYMBOL = typeof _Symbol == 'function';
+
+var $exports = module.exports = function (name) {
+ return store[name] || (store[name] = USE_SYMBOL && _Symbol[name] || (USE_SYMBOL ? _Symbol : uid)('Symbol.' + name));
+};
+
+$exports.store = store;
+
+/***/ }),
+/* 34 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (it) {
+ if (it == undefined) throw TypeError("Can't call method on " + it);
+ return it;
+};
+
+/***/ }),
+/* 35 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var MATCH = __w_pdfjs_require__(33)('match');
+
+module.exports = function (KEY) {
+ var re = /./;
+
+ try {
+ '/./'[KEY](re);
+ } catch (e) {
+ try {
+ re[MATCH] = false;
+ return !'/./'[KEY](re);
+ } catch (f) {}
+ }
+
+ return true;
+};
+
+/***/ }),
+/* 36 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(37);
+
+module.exports = __w_pdfjs_require__(9).String.endsWith;
+
+/***/ }),
+/* 37 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var toLength = __w_pdfjs_require__(28);
+
+var context = __w_pdfjs_require__(30);
+
+var ENDS_WITH = 'endsWith';
+var $endsWith = ''[ENDS_WITH];
+$export($export.P + $export.F * __w_pdfjs_require__(35)(ENDS_WITH), 'String', {
+ endsWith: function endsWith(searchString) {
+ var that = context(this, searchString, ENDS_WITH);
+ var endPosition = arguments.length > 1 ? arguments[1] : undefined;
+ var len = toLength(that.length);
+ var end = endPosition === undefined ? len : Math.min(toLength(endPosition), len);
+ var search = String(searchString);
+ return $endsWith ? $endsWith.call(that, search, end) : that.slice(end - search.length, end) === search;
+ }
+});
+
+/***/ }),
+/* 38 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(39);
+
+module.exports = __w_pdfjs_require__(9).String.includes;
+
+/***/ }),
+/* 39 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var context = __w_pdfjs_require__(30);
+
+var INCLUDES = 'includes';
+$export($export.P + $export.F * __w_pdfjs_require__(35)(INCLUDES), 'String', {
+ includes: function includes(searchString) {
+ return !!~context(this, searchString, INCLUDES).indexOf(searchString, arguments.length > 1 ? arguments[1] : undefined);
+ }
+});
+
+/***/ }),
+/* 40 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(41);
+
+module.exports = __w_pdfjs_require__(9).Array.includes;
+
+/***/ }),
+/* 41 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var $includes = __w_pdfjs_require__(42)(true);
+
+$export($export.P, 'Array', {
+ includes: function includes(el) {
+ return $includes(this, el, arguments.length > 1 ? arguments[1] : undefined);
+ }
+});
+
+__w_pdfjs_require__(46)('includes');
+
+/***/ }),
+/* 42 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toIObject = __w_pdfjs_require__(43);
+
+var toLength = __w_pdfjs_require__(28);
+
+var toAbsoluteIndex = __w_pdfjs_require__(45);
+
+module.exports = function (IS_INCLUDES) {
+ return function ($this, el, fromIndex) {
+ var O = toIObject($this);
+ var length = toLength(O.length);
+ var index = toAbsoluteIndex(fromIndex, length);
+ var value;
+ if (IS_INCLUDES && el != el) while (length > index) {
+ value = O[index++];
+ if (value != value) return true;
+ } else for (; length > index; index++) {
+ if (IS_INCLUDES || index in O) {
+ if (O[index] === el) return IS_INCLUDES || index || 0;
+ }
+ }
+ return !IS_INCLUDES && -1;
+ };
+};
+
+/***/ }),
+/* 43 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var IObject = __w_pdfjs_require__(44);
+
+var defined = __w_pdfjs_require__(34);
+
+module.exports = function (it) {
+ return IObject(defined(it));
+};
+
+/***/ }),
+/* 44 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var cof = __w_pdfjs_require__(32);
+
+module.exports = Object('z').propertyIsEnumerable(0) ? Object : function (it) {
+ return cof(it) == 'String' ? it.split('') : Object(it);
+};
+
+/***/ }),
+/* 45 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toInteger = __w_pdfjs_require__(29);
+
+var max = Math.max;
+var min = Math.min;
+
+module.exports = function (index, length) {
+ index = toInteger(index);
+ return index < 0 ? max(index + length, 0) : min(index, length);
+};
+
+/***/ }),
+/* 46 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var UNSCOPABLES = __w_pdfjs_require__(33)('unscopables');
+
+var ArrayProto = Array.prototype;
+if (ArrayProto[UNSCOPABLES] == undefined) __w_pdfjs_require__(10)(ArrayProto, UNSCOPABLES, {});
+
+module.exports = function (key) {
+ ArrayProto[UNSCOPABLES][key] = true;
+};
+
+/***/ }),
+/* 47 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(48);
+
+__w_pdfjs_require__(63);
+
+module.exports = __w_pdfjs_require__(9).Array.from;
+
+/***/ }),
+/* 48 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $at = __w_pdfjs_require__(49)(true);
+
+__w_pdfjs_require__(50)(String, 'String', function (iterated) {
+ this._t = String(iterated);
+ this._i = 0;
+}, function () {
+ var O = this._t;
+ var index = this._i;
+ var point;
+ if (index >= O.length) return {
+ value: undefined,
+ done: true
+ };
+ point = $at(O, index);
+ this._i += point.length;
+ return {
+ value: point,
+ done: false
+ };
+});
+
+/***/ }),
+/* 49 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toInteger = __w_pdfjs_require__(29);
+
+var defined = __w_pdfjs_require__(34);
+
+module.exports = function (TO_STRING) {
+ return function (that, pos) {
+ var s = String(defined(that));
+ var i = toInteger(pos);
+ var l = s.length;
+ var a, b;
+ if (i < 0 || i >= l) return TO_STRING ? '' : undefined;
+ a = s.charCodeAt(i);
+ return a < 0xd800 || a > 0xdbff || i + 1 === l || (b = s.charCodeAt(i + 1)) < 0xdc00 || b > 0xdfff ? TO_STRING ? s.charAt(i) : a : TO_STRING ? s.slice(i, i + 2) : (a - 0xd800 << 10) + (b - 0xdc00) + 0x10000;
+ };
+};
+
+/***/ }),
+/* 50 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var LIBRARY = __w_pdfjs_require__(25);
+
+var $export = __w_pdfjs_require__(7);
+
+var redefine = __w_pdfjs_require__(20);
+
+var hide = __w_pdfjs_require__(10);
+
+var Iterators = __w_pdfjs_require__(51);
+
+var $iterCreate = __w_pdfjs_require__(52);
+
+var setToStringTag = __w_pdfjs_require__(60);
+
+var getPrototypeOf = __w_pdfjs_require__(61);
+
+var ITERATOR = __w_pdfjs_require__(33)('iterator');
+
+var BUGGY = !([].keys && 'next' in [].keys());
+var FF_ITERATOR = '@@iterator';
+var KEYS = 'keys';
+var VALUES = 'values';
+
+var returnThis = function returnThis() {
+ return this;
+};
+
+module.exports = function (Base, NAME, Constructor, next, DEFAULT, IS_SET, FORCED) {
+ $iterCreate(Constructor, NAME, next);
+
+ var getMethod = function getMethod(kind) {
+ if (!BUGGY && kind in proto) return proto[kind];
+
+ switch (kind) {
+ case KEYS:
+ return function keys() {
+ return new Constructor(this, kind);
+ };
+
+ case VALUES:
+ return function values() {
+ return new Constructor(this, kind);
+ };
+ }
+
+ return function entries() {
+ return new Constructor(this, kind);
+ };
+ };
+
+ var TAG = NAME + ' Iterator';
+ var DEF_VALUES = DEFAULT == VALUES;
+ var VALUES_BUG = false;
+ var proto = Base.prototype;
+ var $native = proto[ITERATOR] || proto[FF_ITERATOR] || DEFAULT && proto[DEFAULT];
+ var $default = $native || getMethod(DEFAULT);
+ var $entries = DEFAULT ? !DEF_VALUES ? $default : getMethod('entries') : undefined;
+ var $anyNative = NAME == 'Array' ? proto.entries || $native : $native;
+ var methods, key, IteratorPrototype;
+
+ if ($anyNative) {
+ IteratorPrototype = getPrototypeOf($anyNative.call(new Base()));
+
+ if (IteratorPrototype !== Object.prototype && IteratorPrototype.next) {
+ setToStringTag(IteratorPrototype, TAG, true);
+ if (!LIBRARY && typeof IteratorPrototype[ITERATOR] != 'function') hide(IteratorPrototype, ITERATOR, returnThis);
+ }
+ }
+
+ if (DEF_VALUES && $native && $native.name !== VALUES) {
+ VALUES_BUG = true;
+
+ $default = function values() {
+ return $native.call(this);
+ };
+ }
+
+ if ((!LIBRARY || FORCED) && (BUGGY || VALUES_BUG || !proto[ITERATOR])) {
+ hide(proto, ITERATOR, $default);
+ }
+
+ Iterators[NAME] = $default;
+ Iterators[TAG] = returnThis;
+
+ if (DEFAULT) {
+ methods = {
+ values: DEF_VALUES ? $default : getMethod(VALUES),
+ keys: IS_SET ? $default : getMethod(KEYS),
+ entries: $entries
+ };
+ if (FORCED) for (key in methods) {
+ if (!(key in proto)) redefine(proto, key, methods[key]);
+ } else $export($export.P + $export.F * (BUGGY || VALUES_BUG), NAME, methods);
+ }
+
+ return methods;
+};
+
+/***/ }),
+/* 51 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = {};
+
+/***/ }),
+/* 52 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var create = __w_pdfjs_require__(53);
+
+var descriptor = __w_pdfjs_require__(19);
+
+var setToStringTag = __w_pdfjs_require__(60);
+
+var IteratorPrototype = {};
+
+__w_pdfjs_require__(10)(IteratorPrototype, __w_pdfjs_require__(33)('iterator'), function () {
+ return this;
+});
+
+module.exports = function (Constructor, NAME, next) {
+ Constructor.prototype = create(IteratorPrototype, {
+ next: descriptor(1, next)
+ });
+ setToStringTag(Constructor, NAME + ' Iterator');
+};
+
+/***/ }),
+/* 53 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var anObject = __w_pdfjs_require__(12);
+
+var dPs = __w_pdfjs_require__(54);
+
+var enumBugKeys = __w_pdfjs_require__(58);
+
+var IE_PROTO = __w_pdfjs_require__(57)('IE_PROTO');
+
+var Empty = function Empty() {};
+
+var PROTOTYPE = 'prototype';
+
+var _createDict = function createDict() {
+ var iframe = __w_pdfjs_require__(17)('iframe');
+
+ var i = enumBugKeys.length;
+ var lt = '<';
+ var gt = '>';
+ var iframeDocument;
+ iframe.style.display = 'none';
+
+ __w_pdfjs_require__(59).appendChild(iframe);
+
+ iframe.src = 'javascript:';
+ iframeDocument = iframe.contentWindow.document;
+ iframeDocument.open();
+ iframeDocument.write(lt + 'script' + gt + 'document.F=Object' + lt + '/script' + gt);
+ iframeDocument.close();
+ _createDict = iframeDocument.F;
+
+ while (i--) {
+ delete _createDict[PROTOTYPE][enumBugKeys[i]];
+ }
+
+ return _createDict();
+};
+
+module.exports = Object.create || function create(O, Properties) {
+ var result;
+
+ if (O !== null) {
+ Empty[PROTOTYPE] = anObject(O);
+ result = new Empty();
+ Empty[PROTOTYPE] = null;
+ result[IE_PROTO] = O;
+ } else result = _createDict();
+
+ return Properties === undefined ? result : dPs(result, Properties);
+};
+
+/***/ }),
+/* 54 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var dP = __w_pdfjs_require__(11);
+
+var anObject = __w_pdfjs_require__(12);
+
+var getKeys = __w_pdfjs_require__(55);
+
+module.exports = __w_pdfjs_require__(15) ? Object.defineProperties : function defineProperties(O, Properties) {
+ anObject(O);
+ var keys = getKeys(Properties);
+ var length = keys.length;
+ var i = 0;
+ var P;
+
+ while (length > i) {
+ dP.f(O, P = keys[i++], Properties[P]);
+ }
+
+ return O;
+};
+
+/***/ }),
+/* 55 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $keys = __w_pdfjs_require__(56);
+
+var enumBugKeys = __w_pdfjs_require__(58);
+
+module.exports = Object.keys || function keys(O) {
+ return $keys(O, enumBugKeys);
+};
+
+/***/ }),
+/* 56 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var has = __w_pdfjs_require__(21);
+
+var toIObject = __w_pdfjs_require__(43);
+
+var arrayIndexOf = __w_pdfjs_require__(42)(false);
+
+var IE_PROTO = __w_pdfjs_require__(57)('IE_PROTO');
+
+module.exports = function (object, names) {
+ var O = toIObject(object);
+ var i = 0;
+ var result = [];
+ var key;
+
+ for (key in O) {
+ if (key != IE_PROTO) has(O, key) && result.push(key);
+ }
+
+ while (names.length > i) {
+ if (has(O, key = names[i++])) {
+ ~arrayIndexOf(result, key) || result.push(key);
+ }
+ }
+
+ return result;
+};
+
+/***/ }),
+/* 57 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var shared = __w_pdfjs_require__(24)('keys');
+
+var uid = __w_pdfjs_require__(22);
+
+module.exports = function (key) {
+ return shared[key] || (shared[key] = uid(key));
+};
+
+/***/ }),
+/* 58 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = 'constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf'.split(',');
+
+/***/ }),
+/* 59 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var document = __w_pdfjs_require__(8).document;
+
+module.exports = document && document.documentElement;
+
+/***/ }),
+/* 60 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var def = __w_pdfjs_require__(11).f;
+
+var has = __w_pdfjs_require__(21);
+
+var TAG = __w_pdfjs_require__(33)('toStringTag');
+
+module.exports = function (it, tag, stat) {
+ if (it && !has(it = stat ? it : it.prototype, TAG)) def(it, TAG, {
+ configurable: true,
+ value: tag
+ });
+};
+
+/***/ }),
+/* 61 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var has = __w_pdfjs_require__(21);
+
+var toObject = __w_pdfjs_require__(62);
+
+var IE_PROTO = __w_pdfjs_require__(57)('IE_PROTO');
+
+var ObjectProto = Object.prototype;
+
+module.exports = Object.getPrototypeOf || function (O) {
+ O = toObject(O);
+ if (has(O, IE_PROTO)) return O[IE_PROTO];
+
+ if (typeof O.constructor == 'function' && O instanceof O.constructor) {
+ return O.constructor.prototype;
+ }
+
+ return O instanceof Object ? ObjectProto : null;
+};
+
+/***/ }),
+/* 62 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var defined = __w_pdfjs_require__(34);
+
+module.exports = function (it) {
+ return Object(defined(it));
+};
+
+/***/ }),
+/* 63 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ctx = __w_pdfjs_require__(26);
+
+var $export = __w_pdfjs_require__(7);
+
+var toObject = __w_pdfjs_require__(62);
+
+var call = __w_pdfjs_require__(64);
+
+var isArrayIter = __w_pdfjs_require__(65);
+
+var toLength = __w_pdfjs_require__(28);
+
+var createProperty = __w_pdfjs_require__(66);
+
+var getIterFn = __w_pdfjs_require__(67);
+
+$export($export.S + $export.F * !__w_pdfjs_require__(69)(function (iter) {
+ Array.from(iter);
+}), 'Array', {
+ from: function from(arrayLike) {
+ var O = toObject(arrayLike);
+ var C = typeof this == 'function' ? this : Array;
+ var aLen = arguments.length;
+ var mapfn = aLen > 1 ? arguments[1] : undefined;
+ var mapping = mapfn !== undefined;
+ var index = 0;
+ var iterFn = getIterFn(O);
+ var length, result, step, iterator;
+ if (mapping) mapfn = ctx(mapfn, aLen > 2 ? arguments[2] : undefined, 2);
+
+ if (iterFn != undefined && !(C == Array && isArrayIter(iterFn))) {
+ for (iterator = iterFn.call(O), result = new C(); !(step = iterator.next()).done; index++) {
+ createProperty(result, index, mapping ? call(iterator, mapfn, [step.value, index], true) : step.value);
+ }
+ } else {
+ length = toLength(O.length);
+
+ for (result = new C(length); length > index; index++) {
+ createProperty(result, index, mapping ? mapfn(O[index], index) : O[index]);
+ }
+ }
+
+ result.length = index;
+ return result;
+ }
+});
+
+/***/ }),
+/* 64 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var anObject = __w_pdfjs_require__(12);
+
+module.exports = function (iterator, fn, value, entries) {
+ try {
+ return entries ? fn(anObject(value)[0], value[1]) : fn(value);
+ } catch (e) {
+ var ret = iterator['return'];
+ if (ret !== undefined) anObject(ret.call(iterator));
+ throw e;
+ }
+};
+
+/***/ }),
+/* 65 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var Iterators = __w_pdfjs_require__(51);
+
+var ITERATOR = __w_pdfjs_require__(33)('iterator');
+
+var ArrayProto = Array.prototype;
+
+module.exports = function (it) {
+ return it !== undefined && (Iterators.Array === it || ArrayProto[ITERATOR] === it);
+};
+
+/***/ }),
+/* 66 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $defineProperty = __w_pdfjs_require__(11);
+
+var createDesc = __w_pdfjs_require__(19);
+
+module.exports = function (object, index, value) {
+ if (index in object) $defineProperty.f(object, index, createDesc(0, value));else object[index] = value;
+};
+
+/***/ }),
+/* 67 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var classof = __w_pdfjs_require__(68);
+
+var ITERATOR = __w_pdfjs_require__(33)('iterator');
+
+var Iterators = __w_pdfjs_require__(51);
+
+module.exports = __w_pdfjs_require__(9).getIteratorMethod = function (it) {
+ if (it != undefined) return it[ITERATOR] || it['@@iterator'] || Iterators[classof(it)];
+};
+
+/***/ }),
+/* 68 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var cof = __w_pdfjs_require__(32);
+
+var TAG = __w_pdfjs_require__(33)('toStringTag');
+
+var ARG = cof(function () {
+ return arguments;
+}()) == 'Arguments';
+
+var tryGet = function tryGet(it, key) {
+ try {
+ return it[key];
+ } catch (e) {}
+};
+
+module.exports = function (it) {
+ var O, T, B;
+ return it === undefined ? 'Undefined' : it === null ? 'Null' : typeof (T = tryGet(O = Object(it), TAG)) == 'string' ? T : ARG ? cof(O) : (B = cof(O)) == 'Object' && typeof O.callee == 'function' ? 'Arguments' : B;
+};
+
+/***/ }),
+/* 69 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ITERATOR = __w_pdfjs_require__(33)('iterator');
+
+var SAFE_CLOSING = false;
+
+try {
+ var riter = [7][ITERATOR]();
+
+ riter['return'] = function () {
+ SAFE_CLOSING = true;
+ };
+
+ Array.from(riter, function () {
+ throw 2;
+ });
+} catch (e) {}
+
+module.exports = function (exec, skipClosing) {
+ if (!skipClosing && !SAFE_CLOSING) return false;
+ var safe = false;
+
+ try {
+ var arr = [7];
+ var iter = arr[ITERATOR]();
+
+ iter.next = function () {
+ return {
+ done: safe = true
+ };
+ };
+
+ arr[ITERATOR] = function () {
+ return iter;
+ };
+
+ exec(arr);
+ } catch (e) {}
+
+ return safe;
+};
+
+/***/ }),
+/* 70 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(71);
+
+module.exports = __w_pdfjs_require__(9).Object.assign;
+
+/***/ }),
+/* 71 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+$export($export.S + $export.F, 'Object', {
+ assign: __w_pdfjs_require__(72)
+});
+
+/***/ }),
+/* 72 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var DESCRIPTORS = __w_pdfjs_require__(15);
+
+var getKeys = __w_pdfjs_require__(55);
+
+var gOPS = __w_pdfjs_require__(73);
+
+var pIE = __w_pdfjs_require__(74);
+
+var toObject = __w_pdfjs_require__(62);
+
+var IObject = __w_pdfjs_require__(44);
+
+var $assign = Object.assign;
+module.exports = !$assign || __w_pdfjs_require__(16)(function () {
+ var A = {};
+ var B = {};
+ var S = Symbol();
+ var K = 'abcdefghijklmnopqrst';
+ A[S] = 7;
+ K.split('').forEach(function (k) {
+ B[k] = k;
+ });
+ return $assign({}, A)[S] != 7 || Object.keys($assign({}, B)).join('') != K;
+}) ? function assign(target, source) {
+ var T = toObject(target);
+ var aLen = arguments.length;
+ var index = 1;
+ var getSymbols = gOPS.f;
+ var isEnum = pIE.f;
+
+ while (aLen > index) {
+ var S = IObject(arguments[index++]);
+ var keys = getSymbols ? getKeys(S).concat(getSymbols(S)) : getKeys(S);
+ var length = keys.length;
+ var j = 0;
+ var key;
+
+ while (length > j) {
+ key = keys[j++];
+ if (!DESCRIPTORS || isEnum.call(S, key)) T[key] = S[key];
+ }
+ }
+
+ return T;
+} : $assign;
+
+/***/ }),
+/* 73 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+exports.f = Object.getOwnPropertySymbols;
+
+/***/ }),
+/* 74 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+exports.f = {}.propertyIsEnumerable;
+
+/***/ }),
+/* 75 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(76);
+
+module.exports = __w_pdfjs_require__(9).Math.log2;
+
+/***/ }),
+/* 76 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+$export($export.S, 'Math', {
+ log2: function log2(x) {
+ return Math.log(x) / Math.LN2;
+ }
+});
+
+/***/ }),
+/* 77 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(78);
+
+module.exports = __w_pdfjs_require__(9).Number.isNaN;
+
+/***/ }),
+/* 78 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+$export($export.S, 'Number', {
+ isNaN: function isNaN(number) {
+ return number != number;
+ }
+});
+
+/***/ }),
+/* 79 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(80);
+
+module.exports = __w_pdfjs_require__(9).Number.isInteger;
+
+/***/ }),
+/* 80 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+$export($export.S, 'Number', {
+ isInteger: __w_pdfjs_require__(81)
+});
+
+/***/ }),
+/* 81 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(13);
+
+var floor = Math.floor;
+
+module.exports = function isInteger(it) {
+ return !isObject(it) && isFinite(it) && floor(it) === it;
+};
+
+/***/ }),
+/* 82 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(83);
+
+__w_pdfjs_require__(48);
+
+__w_pdfjs_require__(84);
+
+__w_pdfjs_require__(87);
+
+__w_pdfjs_require__(100);
+
+__w_pdfjs_require__(101);
+
+module.exports = __w_pdfjs_require__(9).Promise;
+
+/***/ }),
+/* 83 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var classof = __w_pdfjs_require__(68);
+
+var test = {};
+test[__w_pdfjs_require__(33)('toStringTag')] = 'z';
+
+if (test + '' != '[object z]') {
+ __w_pdfjs_require__(20)(Object.prototype, 'toString', function toString() {
+ return '[object ' + classof(this) + ']';
+ }, true);
+}
+
+/***/ }),
+/* 84 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $iterators = __w_pdfjs_require__(85);
+
+var getKeys = __w_pdfjs_require__(55);
+
+var redefine = __w_pdfjs_require__(20);
+
+var global = __w_pdfjs_require__(8);
+
+var hide = __w_pdfjs_require__(10);
+
+var Iterators = __w_pdfjs_require__(51);
+
+var wks = __w_pdfjs_require__(33);
+
+var ITERATOR = wks('iterator');
+var TO_STRING_TAG = wks('toStringTag');
+var ArrayValues = Iterators.Array;
+var DOMIterables = {
+ CSSRuleList: true,
+ CSSStyleDeclaration: false,
+ CSSValueList: false,
+ ClientRectList: false,
+ DOMRectList: false,
+ DOMStringList: false,
+ DOMTokenList: true,
+ DataTransferItemList: false,
+ FileList: false,
+ HTMLAllCollection: false,
+ HTMLCollection: false,
+ HTMLFormElement: false,
+ HTMLSelectElement: false,
+ MediaList: true,
+ MimeTypeArray: false,
+ NamedNodeMap: false,
+ NodeList: true,
+ PaintRequestList: false,
+ Plugin: false,
+ PluginArray: false,
+ SVGLengthList: false,
+ SVGNumberList: false,
+ SVGPathSegList: false,
+ SVGPointList: false,
+ SVGStringList: false,
+ SVGTransformList: false,
+ SourceBufferList: false,
+ StyleSheetList: true,
+ TextTrackCueList: false,
+ TextTrackList: false,
+ TouchList: false
+};
+
+for (var collections = getKeys(DOMIterables), i = 0; i < collections.length; i++) {
+ var NAME = collections[i];
+ var explicit = DOMIterables[NAME];
+ var Collection = global[NAME];
+ var proto = Collection && Collection.prototype;
+ var key;
+
+ if (proto) {
+ if (!proto[ITERATOR]) hide(proto, ITERATOR, ArrayValues);
+ if (!proto[TO_STRING_TAG]) hide(proto, TO_STRING_TAG, NAME);
+ Iterators[NAME] = ArrayValues;
+ if (explicit) for (key in $iterators) {
+ if (!proto[key]) redefine(proto, key, $iterators[key], true);
+ }
+ }
+}
+
+/***/ }),
+/* 85 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var addToUnscopables = __w_pdfjs_require__(46);
+
+var step = __w_pdfjs_require__(86);
+
+var Iterators = __w_pdfjs_require__(51);
+
+var toIObject = __w_pdfjs_require__(43);
+
+module.exports = __w_pdfjs_require__(50)(Array, 'Array', function (iterated, kind) {
+ this._t = toIObject(iterated);
+ this._i = 0;
+ this._k = kind;
+}, function () {
+ var O = this._t;
+ var kind = this._k;
+ var index = this._i++;
+
+ if (!O || index >= O.length) {
+ this._t = undefined;
+ return step(1);
+ }
+
+ if (kind == 'keys') return step(0, index);
+ if (kind == 'values') return step(0, O[index]);
+ return step(0, [index, O[index]]);
+}, 'values');
+Iterators.Arguments = Iterators.Array;
+addToUnscopables('keys');
+addToUnscopables('values');
+addToUnscopables('entries');
+
+/***/ }),
+/* 86 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (done, value) {
+ return {
+ value: value,
+ done: !!done
+ };
+};
+
+/***/ }),
+/* 87 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var LIBRARY = __w_pdfjs_require__(25);
+
+var global = __w_pdfjs_require__(8);
+
+var ctx = __w_pdfjs_require__(26);
+
+var classof = __w_pdfjs_require__(68);
+
+var $export = __w_pdfjs_require__(7);
+
+var isObject = __w_pdfjs_require__(13);
+
+var aFunction = __w_pdfjs_require__(27);
+
+var anInstance = __w_pdfjs_require__(88);
+
+var forOf = __w_pdfjs_require__(89);
+
+var speciesConstructor = __w_pdfjs_require__(90);
+
+var task = __w_pdfjs_require__(91).set;
+
+var microtask = __w_pdfjs_require__(93)();
+
+var newPromiseCapabilityModule = __w_pdfjs_require__(94);
+
+var perform = __w_pdfjs_require__(95);
+
+var userAgent = __w_pdfjs_require__(96);
+
+var promiseResolve = __w_pdfjs_require__(97);
+
+var PROMISE = 'Promise';
+var TypeError = global.TypeError;
+var process = global.process;
+var versions = process && process.versions;
+var v8 = versions && versions.v8 || '';
+var $Promise = global[PROMISE];
+var isNode = classof(process) == 'process';
+
+var empty = function empty() {};
+
+var Internal, newGenericPromiseCapability, OwnPromiseCapability, Wrapper;
+var newPromiseCapability = newGenericPromiseCapability = newPromiseCapabilityModule.f;
+var USE_NATIVE = !!function () {
+ try {
+ var promise = $Promise.resolve(1);
+
+ var FakePromise = (promise.constructor = {})[__w_pdfjs_require__(33)('species')] = function (exec) {
+ exec(empty, empty);
+ };
+
+ return (isNode || typeof PromiseRejectionEvent == 'function') && promise.then(empty) instanceof FakePromise && v8.indexOf('6.6') !== 0 && userAgent.indexOf('Chrome/66') === -1;
+ } catch (e) {}
+}();
+
+var isThenable = function isThenable(it) {
+ var then;
+ return isObject(it) && typeof (then = it.then) == 'function' ? then : false;
+};
+
+var notify = function notify(promise, isReject) {
+ if (promise._n) return;
+ promise._n = true;
+ var chain = promise._c;
+ microtask(function () {
+ var value = promise._v;
+ var ok = promise._s == 1;
+ var i = 0;
+
+ var run = function run(reaction) {
+ var handler = ok ? reaction.ok : reaction.fail;
+ var resolve = reaction.resolve;
+ var reject = reaction.reject;
+ var domain = reaction.domain;
+ var result, then, exited;
+
+ try {
+ if (handler) {
+ if (!ok) {
+ if (promise._h == 2) onHandleUnhandled(promise);
+ promise._h = 1;
+ }
+
+ if (handler === true) result = value;else {
+ if (domain) domain.enter();
+ result = handler(value);
+
+ if (domain) {
+ domain.exit();
+ exited = true;
+ }
+ }
+
+ if (result === reaction.promise) {
+ reject(TypeError('Promise-chain cycle'));
+ } else if (then = isThenable(result)) {
+ then.call(result, resolve, reject);
+ } else resolve(result);
+ } else reject(value);
+ } catch (e) {
+ if (domain && !exited) domain.exit();
+ reject(e);
+ }
+ };
+
+ while (chain.length > i) {
+ run(chain[i++]);
+ }
+
+ promise._c = [];
+ promise._n = false;
+ if (isReject && !promise._h) onUnhandled(promise);
+ });
+};
+
+var onUnhandled = function onUnhandled(promise) {
+ task.call(global, function () {
+ var value = promise._v;
+ var unhandled = isUnhandled(promise);
+ var result, handler, console;
+
+ if (unhandled) {
+ result = perform(function () {
+ if (isNode) {
+ process.emit('unhandledRejection', value, promise);
+ } else if (handler = global.onunhandledrejection) {
+ handler({
+ promise: promise,
+ reason: value
+ });
+ } else if ((console = global.console) && console.error) {
+ console.error('Unhandled promise rejection', value);
+ }
+ });
+ promise._h = isNode || isUnhandled(promise) ? 2 : 1;
+ }
+
+ promise._a = undefined;
+ if (unhandled && result.e) throw result.v;
+ });
+};
+
+var isUnhandled = function isUnhandled(promise) {
+ return promise._h !== 1 && (promise._a || promise._c).length === 0;
+};
+
+var onHandleUnhandled = function onHandleUnhandled(promise) {
+ task.call(global, function () {
+ var handler;
+
+ if (isNode) {
+ process.emit('rejectionHandled', promise);
+ } else if (handler = global.onrejectionhandled) {
+ handler({
+ promise: promise,
+ reason: promise._v
+ });
+ }
+ });
+};
+
+var $reject = function $reject(value) {
+ var promise = this;
+ if (promise._d) return;
+ promise._d = true;
+ promise = promise._w || promise;
+ promise._v = value;
+ promise._s = 2;
+ if (!promise._a) promise._a = promise._c.slice();
+ notify(promise, true);
+};
+
+var $resolve = function $resolve(value) {
+ var promise = this;
+ var then;
+ if (promise._d) return;
+ promise._d = true;
+ promise = promise._w || promise;
+
+ try {
+ if (promise === value) throw TypeError("Promise can't be resolved itself");
+
+ if (then = isThenable(value)) {
+ microtask(function () {
+ var wrapper = {
+ _w: promise,
+ _d: false
+ };
+
+ try {
+ then.call(value, ctx($resolve, wrapper, 1), ctx($reject, wrapper, 1));
+ } catch (e) {
+ $reject.call(wrapper, e);
+ }
+ });
+ } else {
+ promise._v = value;
+ promise._s = 1;
+ notify(promise, false);
+ }
+ } catch (e) {
+ $reject.call({
+ _w: promise,
+ _d: false
+ }, e);
+ }
+};
+
+if (!USE_NATIVE) {
+ $Promise = function Promise(executor) {
+ anInstance(this, $Promise, PROMISE, '_h');
+ aFunction(executor);
+ Internal.call(this);
+
+ try {
+ executor(ctx($resolve, this, 1), ctx($reject, this, 1));
+ } catch (err) {
+ $reject.call(this, err);
+ }
+ };
+
+ Internal = function Promise(executor) {
+ this._c = [];
+ this._a = undefined;
+ this._s = 0;
+ this._d = false;
+ this._v = undefined;
+ this._h = 0;
+ this._n = false;
+ };
+
+ Internal.prototype = __w_pdfjs_require__(98)($Promise.prototype, {
+ then: function then(onFulfilled, onRejected) {
+ var reaction = newPromiseCapability(speciesConstructor(this, $Promise));
+ reaction.ok = typeof onFulfilled == 'function' ? onFulfilled : true;
+ reaction.fail = typeof onRejected == 'function' && onRejected;
+ reaction.domain = isNode ? process.domain : undefined;
+
+ this._c.push(reaction);
+
+ if (this._a) this._a.push(reaction);
+ if (this._s) notify(this, false);
+ return reaction.promise;
+ },
+ 'catch': function _catch(onRejected) {
+ return this.then(undefined, onRejected);
+ }
+ });
+
+ OwnPromiseCapability = function OwnPromiseCapability() {
+ var promise = new Internal();
+ this.promise = promise;
+ this.resolve = ctx($resolve, promise, 1);
+ this.reject = ctx($reject, promise, 1);
+ };
+
+ newPromiseCapabilityModule.f = newPromiseCapability = function newPromiseCapability(C) {
+ return C === $Promise || C === Wrapper ? new OwnPromiseCapability(C) : newGenericPromiseCapability(C);
+ };
+}
+
+$export($export.G + $export.W + $export.F * !USE_NATIVE, {
+ Promise: $Promise
+});
+
+__w_pdfjs_require__(60)($Promise, PROMISE);
+
+__w_pdfjs_require__(99)(PROMISE);
+
+Wrapper = __w_pdfjs_require__(9)[PROMISE];
+$export($export.S + $export.F * !USE_NATIVE, PROMISE, {
+ reject: function reject(r) {
+ var capability = newPromiseCapability(this);
+ var $$reject = capability.reject;
+ $$reject(r);
+ return capability.promise;
+ }
+});
+$export($export.S + $export.F * (LIBRARY || !USE_NATIVE), PROMISE, {
+ resolve: function resolve(x) {
+ return promiseResolve(LIBRARY && this === Wrapper ? $Promise : this, x);
+ }
+});
+$export($export.S + $export.F * !(USE_NATIVE && __w_pdfjs_require__(69)(function (iter) {
+ $Promise.all(iter)['catch'](empty);
+})), PROMISE, {
+ all: function all(iterable) {
+ var C = this;
+ var capability = newPromiseCapability(C);
+ var resolve = capability.resolve;
+ var reject = capability.reject;
+ var result = perform(function () {
+ var values = [];
+ var index = 0;
+ var remaining = 1;
+ forOf(iterable, false, function (promise) {
+ var $index = index++;
+ var alreadyCalled = false;
+ values.push(undefined);
+ remaining++;
+ C.resolve(promise).then(function (value) {
+ if (alreadyCalled) return;
+ alreadyCalled = true;
+ values[$index] = value;
+ --remaining || resolve(values);
+ }, reject);
+ });
+ --remaining || resolve(values);
+ });
+ if (result.e) reject(result.v);
+ return capability.promise;
+ },
+ race: function race(iterable) {
+ var C = this;
+ var capability = newPromiseCapability(C);
+ var reject = capability.reject;
+ var result = perform(function () {
+ forOf(iterable, false, function (promise) {
+ C.resolve(promise).then(capability.resolve, reject);
+ });
+ });
+ if (result.e) reject(result.v);
+ return capability.promise;
+ }
+});
+
+/***/ }),
+/* 88 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (it, Constructor, name, forbiddenField) {
+ if (!(it instanceof Constructor) || forbiddenField !== undefined && forbiddenField in it) {
+ throw TypeError(name + ': incorrect invocation!');
+ }
+
+ return it;
+};
+
+/***/ }),
+/* 89 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ctx = __w_pdfjs_require__(26);
+
+var call = __w_pdfjs_require__(64);
+
+var isArrayIter = __w_pdfjs_require__(65);
+
+var anObject = __w_pdfjs_require__(12);
+
+var toLength = __w_pdfjs_require__(28);
+
+var getIterFn = __w_pdfjs_require__(67);
+
+var BREAK = {};
+var RETURN = {};
+
+var _exports = module.exports = function (iterable, entries, fn, that, ITERATOR) {
+ var iterFn = ITERATOR ? function () {
+ return iterable;
+ } : getIterFn(iterable);
+ var f = ctx(fn, that, entries ? 2 : 1);
+ var index = 0;
+ var length, step, iterator, result;
+ if (typeof iterFn != 'function') throw TypeError(iterable + ' is not iterable!');
+ if (isArrayIter(iterFn)) for (length = toLength(iterable.length); length > index; index++) {
+ result = entries ? f(anObject(step = iterable[index])[0], step[1]) : f(iterable[index]);
+ if (result === BREAK || result === RETURN) return result;
+ } else for (iterator = iterFn.call(iterable); !(step = iterator.next()).done;) {
+ result = call(iterator, f, step.value, entries);
+ if (result === BREAK || result === RETURN) return result;
+ }
+};
+
+_exports.BREAK = BREAK;
+_exports.RETURN = RETURN;
+
+/***/ }),
+/* 90 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var anObject = __w_pdfjs_require__(12);
+
+var aFunction = __w_pdfjs_require__(27);
+
+var SPECIES = __w_pdfjs_require__(33)('species');
+
+module.exports = function (O, D) {
+ var C = anObject(O).constructor;
+ var S;
+ return C === undefined || (S = anObject(C)[SPECIES]) == undefined ? D : aFunction(S);
+};
+
+/***/ }),
+/* 91 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ctx = __w_pdfjs_require__(26);
+
+var invoke = __w_pdfjs_require__(92);
+
+var html = __w_pdfjs_require__(59);
+
+var cel = __w_pdfjs_require__(17);
+
+var global = __w_pdfjs_require__(8);
+
+var process = global.process;
+var setTask = global.setImmediate;
+var clearTask = global.clearImmediate;
+var MessageChannel = global.MessageChannel;
+var Dispatch = global.Dispatch;
+var counter = 0;
+var queue = {};
+var ONREADYSTATECHANGE = 'onreadystatechange';
+var defer, channel, port;
+
+var run = function run() {
+ var id = +this;
+
+ if (queue.hasOwnProperty(id)) {
+ var fn = queue[id];
+ delete queue[id];
+ fn();
+ }
+};
+
+var listener = function listener(event) {
+ run.call(event.data);
+};
+
+if (!setTask || !clearTask) {
+ setTask = function setImmediate(fn) {
+ var args = [];
+ var i = 1;
+
+ while (arguments.length > i) {
+ args.push(arguments[i++]);
+ }
+
+ queue[++counter] = function () {
+ invoke(typeof fn == 'function' ? fn : Function(fn), args);
+ };
+
+ defer(counter);
+ return counter;
+ };
+
+ clearTask = function clearImmediate(id) {
+ delete queue[id];
+ };
+
+ if (__w_pdfjs_require__(32)(process) == 'process') {
+ defer = function defer(id) {
+ process.nextTick(ctx(run, id, 1));
+ };
+ } else if (Dispatch && Dispatch.now) {
+ defer = function defer(id) {
+ Dispatch.now(ctx(run, id, 1));
+ };
+ } else if (MessageChannel) {
+ channel = new MessageChannel();
+ port = channel.port2;
+ channel.port1.onmessage = listener;
+ defer = ctx(port.postMessage, port, 1);
+ } else if (global.addEventListener && typeof postMessage == 'function' && !global.importScripts) {
+ defer = function defer(id) {
+ global.postMessage(id + '', '*');
+ };
+
+ global.addEventListener('message', listener, false);
+ } else if (ONREADYSTATECHANGE in cel('script')) {
+ defer = function defer(id) {
+ html.appendChild(cel('script'))[ONREADYSTATECHANGE] = function () {
+ html.removeChild(this);
+ run.call(id);
+ };
+ };
+ } else {
+ defer = function defer(id) {
+ setTimeout(ctx(run, id, 1), 0);
+ };
+ }
+}
+
+module.exports = {
+ set: setTask,
+ clear: clearTask
+};
+
+/***/ }),
+/* 92 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (fn, args, that) {
+ var un = that === undefined;
+
+ switch (args.length) {
+ case 0:
+ return un ? fn() : fn.call(that);
+
+ case 1:
+ return un ? fn(args[0]) : fn.call(that, args[0]);
+
+ case 2:
+ return un ? fn(args[0], args[1]) : fn.call(that, args[0], args[1]);
+
+ case 3:
+ return un ? fn(args[0], args[1], args[2]) : fn.call(that, args[0], args[1], args[2]);
+
+ case 4:
+ return un ? fn(args[0], args[1], args[2], args[3]) : fn.call(that, args[0], args[1], args[2], args[3]);
+ }
+
+ return fn.apply(that, args);
+};
+
+/***/ }),
+/* 93 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(8);
+
+var macrotask = __w_pdfjs_require__(91).set;
+
+var Observer = global.MutationObserver || global.WebKitMutationObserver;
+var process = global.process;
+var Promise = global.Promise;
+var isNode = __w_pdfjs_require__(32)(process) == 'process';
+
+module.exports = function () {
+ var head, last, notify;
+
+ var flush = function flush() {
+ var parent, fn;
+ if (isNode && (parent = process.domain)) parent.exit();
+
+ while (head) {
+ fn = head.fn;
+ head = head.next;
+
+ try {
+ fn();
+ } catch (e) {
+ if (head) notify();else last = undefined;
+ throw e;
+ }
+ }
+
+ last = undefined;
+ if (parent) parent.enter();
+ };
+
+ if (isNode) {
+ notify = function notify() {
+ process.nextTick(flush);
+ };
+ } else if (Observer && !(global.navigator && global.navigator.standalone)) {
+ var toggle = true;
+ var node = document.createTextNode('');
+ new Observer(flush).observe(node, {
+ characterData: true
+ });
+
+ notify = function notify() {
+ node.data = toggle = !toggle;
+ };
+ } else if (Promise && Promise.resolve) {
+ var promise = Promise.resolve(undefined);
+
+ notify = function notify() {
+ promise.then(flush);
+ };
+ } else {
+ notify = function notify() {
+ macrotask.call(global, flush);
+ };
+ }
+
+ return function (fn) {
+ var task = {
+ fn: fn,
+ next: undefined
+ };
+ if (last) last.next = task;
+
+ if (!head) {
+ head = task;
+ notify();
+ }
+
+ last = task;
+ };
+};
+
+/***/ }),
+/* 94 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var aFunction = __w_pdfjs_require__(27);
+
+function PromiseCapability(C) {
+ var resolve, reject;
+ this.promise = new C(function ($$resolve, $$reject) {
+ if (resolve !== undefined || reject !== undefined) throw TypeError('Bad Promise constructor');
+ resolve = $$resolve;
+ reject = $$reject;
+ });
+ this.resolve = aFunction(resolve);
+ this.reject = aFunction(reject);
+}
+
+module.exports.f = function (C) {
+ return new PromiseCapability(C);
+};
+
+/***/ }),
+/* 95 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (exec) {
+ try {
+ return {
+ e: false,
+ v: exec()
+ };
+ } catch (e) {
+ return {
+ e: true,
+ v: e
+ };
+ }
+};
+
+/***/ }),
+/* 96 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(8);
+
+var navigator = global.navigator;
+module.exports = navigator && navigator.userAgent || '';
+
+/***/ }),
+/* 97 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var anObject = __w_pdfjs_require__(12);
+
+var isObject = __w_pdfjs_require__(13);
+
+var newPromiseCapability = __w_pdfjs_require__(94);
+
+module.exports = function (C, x) {
+ anObject(C);
+ if (isObject(x) && x.constructor === C) return x;
+ var promiseCapability = newPromiseCapability.f(C);
+ var resolve = promiseCapability.resolve;
+ resolve(x);
+ return promiseCapability.promise;
+};
+
+/***/ }),
+/* 98 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var redefine = __w_pdfjs_require__(20);
+
+module.exports = function (target, src, safe) {
+ for (var key in src) {
+ redefine(target, key, src[key], safe);
+ }
+
+ return target;
+};
+
+/***/ }),
+/* 99 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(8);
+
+var dP = __w_pdfjs_require__(11);
+
+var DESCRIPTORS = __w_pdfjs_require__(15);
+
+var SPECIES = __w_pdfjs_require__(33)('species');
+
+module.exports = function (KEY) {
+ var C = global[KEY];
+ if (DESCRIPTORS && C && !C[SPECIES]) dP.f(C, SPECIES, {
+ configurable: true,
+ get: function get() {
+ return this;
+ }
+ });
+};
+
+/***/ }),
+/* 100 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var core = __w_pdfjs_require__(9);
+
+var global = __w_pdfjs_require__(8);
+
+var speciesConstructor = __w_pdfjs_require__(90);
+
+var promiseResolve = __w_pdfjs_require__(97);
+
+$export($export.P + $export.R, 'Promise', {
+ 'finally': function _finally(onFinally) {
+ var C = speciesConstructor(this, core.Promise || global.Promise);
+ var isFunction = typeof onFinally == 'function';
+ return this.then(isFunction ? function (x) {
+ return promiseResolve(C, onFinally()).then(function () {
+ return x;
+ });
+ } : onFinally, isFunction ? function (e) {
+ return promiseResolve(C, onFinally()).then(function () {
+ throw e;
+ });
+ } : onFinally);
+ }
+});
+
+/***/ }),
+/* 101 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var newPromiseCapability = __w_pdfjs_require__(94);
+
+var perform = __w_pdfjs_require__(95);
+
+$export($export.S, 'Promise', {
+ 'try': function _try(callbackfn) {
+ var promiseCapability = newPromiseCapability.f(this);
+ var result = perform(callbackfn);
+ (result.e ? promiseCapability.reject : promiseCapability.resolve)(result.v);
+ return promiseCapability.promise;
+ }
+});
+
+/***/ }),
+/* 102 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(83);
+
+__w_pdfjs_require__(84);
+
+__w_pdfjs_require__(103);
+
+__w_pdfjs_require__(115);
+
+__w_pdfjs_require__(117);
+
+module.exports = __w_pdfjs_require__(9).WeakMap;
+
+/***/ }),
+/* 103 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(8);
+
+var each = __w_pdfjs_require__(104)(0);
+
+var redefine = __w_pdfjs_require__(20);
+
+var meta = __w_pdfjs_require__(108);
+
+var assign = __w_pdfjs_require__(72);
+
+var weak = __w_pdfjs_require__(109);
+
+var isObject = __w_pdfjs_require__(13);
+
+var validate = __w_pdfjs_require__(110);
+
+var NATIVE_WEAK_MAP = __w_pdfjs_require__(110);
+
+var IS_IE11 = !global.ActiveXObject && 'ActiveXObject' in global;
+var WEAK_MAP = 'WeakMap';
+var getWeak = meta.getWeak;
+var isExtensible = Object.isExtensible;
+var uncaughtFrozenStore = weak.ufstore;
+var InternalMap;
+
+var wrapper = function wrapper(get) {
+ return function WeakMap() {
+ return get(this, arguments.length > 0 ? arguments[0] : undefined);
+ };
+};
+
+var methods = {
+ get: function get(key) {
+ if (isObject(key)) {
+ var data = getWeak(key);
+ if (data === true) return uncaughtFrozenStore(validate(this, WEAK_MAP)).get(key);
+ return data ? data[this._i] : undefined;
+ }
+ },
+ set: function set(key, value) {
+ return weak.def(validate(this, WEAK_MAP), key, value);
+ }
+};
+
+var $WeakMap = module.exports = __w_pdfjs_require__(111)(WEAK_MAP, wrapper, methods, weak, true, true);
+
+if (NATIVE_WEAK_MAP && IS_IE11) {
+ InternalMap = weak.getConstructor(wrapper, WEAK_MAP);
+ assign(InternalMap.prototype, methods);
+ meta.NEED = true;
+ each(['delete', 'has', 'get', 'set'], function (key) {
+ var proto = $WeakMap.prototype;
+ var method = proto[key];
+ redefine(proto, key, function (a, b) {
+ if (isObject(a) && !isExtensible(a)) {
+ if (!this._f) this._f = new InternalMap();
+
+ var result = this._f[key](a, b);
+
+ return key == 'set' ? this : result;
+ }
+
+ return method.call(this, a, b);
+ });
+ });
+}
+
+/***/ }),
+/* 104 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ctx = __w_pdfjs_require__(26);
+
+var IObject = __w_pdfjs_require__(44);
+
+var toObject = __w_pdfjs_require__(62);
+
+var toLength = __w_pdfjs_require__(28);
+
+var asc = __w_pdfjs_require__(105);
+
+module.exports = function (TYPE, $create) {
+ var IS_MAP = TYPE == 1;
+ var IS_FILTER = TYPE == 2;
+ var IS_SOME = TYPE == 3;
+ var IS_EVERY = TYPE == 4;
+ var IS_FIND_INDEX = TYPE == 6;
+ var NO_HOLES = TYPE == 5 || IS_FIND_INDEX;
+ var create = $create || asc;
+ return function ($this, callbackfn, that) {
+ var O = toObject($this);
+ var self = IObject(O);
+ var f = ctx(callbackfn, that, 3);
+ var length = toLength(self.length);
+ var index = 0;
+ var result = IS_MAP ? create($this, length) : IS_FILTER ? create($this, 0) : undefined;
+ var val, res;
+
+ for (; length > index; index++) {
+ if (NO_HOLES || index in self) {
+ val = self[index];
+ res = f(val, index, O);
+
+ if (TYPE) {
+ if (IS_MAP) result[index] = res;else if (res) switch (TYPE) {
+ case 3:
+ return true;
+
+ case 5:
+ return val;
+
+ case 6:
+ return index;
+
+ case 2:
+ result.push(val);
+ } else if (IS_EVERY) return false;
+ }
+ }
+ }
+
+ return IS_FIND_INDEX ? -1 : IS_SOME || IS_EVERY ? IS_EVERY : result;
+ };
+};
+
+/***/ }),
+/* 105 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var speciesConstructor = __w_pdfjs_require__(106);
+
+module.exports = function (original, length) {
+ return new (speciesConstructor(original))(length);
+};
+
+/***/ }),
+/* 106 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(13);
+
+var isArray = __w_pdfjs_require__(107);
+
+var SPECIES = __w_pdfjs_require__(33)('species');
+
+module.exports = function (original) {
+ var C;
+
+ if (isArray(original)) {
+ C = original.constructor;
+ if (typeof C == 'function' && (C === Array || isArray(C.prototype))) C = undefined;
+
+ if (isObject(C)) {
+ C = C[SPECIES];
+ if (C === null) C = undefined;
+ }
+ }
+
+ return C === undefined ? Array : C;
+};
+
+/***/ }),
+/* 107 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var cof = __w_pdfjs_require__(32);
+
+module.exports = Array.isArray || function isArray(arg) {
+ return cof(arg) == 'Array';
+};
+
+/***/ }),
+/* 108 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var META = __w_pdfjs_require__(22)('meta');
+
+var isObject = __w_pdfjs_require__(13);
+
+var has = __w_pdfjs_require__(21);
+
+var setDesc = __w_pdfjs_require__(11).f;
+
+var id = 0;
+
+var isExtensible = Object.isExtensible || function () {
+ return true;
+};
+
+var FREEZE = !__w_pdfjs_require__(16)(function () {
+ return isExtensible(Object.preventExtensions({}));
+});
+
+var setMeta = function setMeta(it) {
+ setDesc(it, META, {
+ value: {
+ i: 'O' + ++id,
+ w: {}
+ }
+ });
+};
+
+var fastKey = function fastKey(it, create) {
+ if (!isObject(it)) return _typeof(it) == 'symbol' ? it : (typeof it == 'string' ? 'S' : 'P') + it;
+
+ if (!has(it, META)) {
+ if (!isExtensible(it)) return 'F';
+ if (!create) return 'E';
+ setMeta(it);
+ }
+
+ return it[META].i;
+};
+
+var getWeak = function getWeak(it, create) {
+ if (!has(it, META)) {
+ if (!isExtensible(it)) return true;
+ if (!create) return false;
+ setMeta(it);
+ }
+
+ return it[META].w;
+};
+
+var onFreeze = function onFreeze(it) {
+ if (FREEZE && meta.NEED && isExtensible(it) && !has(it, META)) setMeta(it);
+ return it;
+};
+
+var meta = module.exports = {
+ KEY: META,
+ NEED: false,
+ fastKey: fastKey,
+ getWeak: getWeak,
+ onFreeze: onFreeze
+};
+
+/***/ }),
+/* 109 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var redefineAll = __w_pdfjs_require__(98);
+
+var getWeak = __w_pdfjs_require__(108).getWeak;
+
+var anObject = __w_pdfjs_require__(12);
+
+var isObject = __w_pdfjs_require__(13);
+
+var anInstance = __w_pdfjs_require__(88);
+
+var forOf = __w_pdfjs_require__(89);
+
+var createArrayMethod = __w_pdfjs_require__(104);
+
+var $has = __w_pdfjs_require__(21);
+
+var validate = __w_pdfjs_require__(110);
+
+var arrayFind = createArrayMethod(5);
+var arrayFindIndex = createArrayMethod(6);
+var id = 0;
+
+var uncaughtFrozenStore = function uncaughtFrozenStore(that) {
+ return that._l || (that._l = new UncaughtFrozenStore());
+};
+
+var UncaughtFrozenStore = function UncaughtFrozenStore() {
+ this.a = [];
+};
+
+var findUncaughtFrozen = function findUncaughtFrozen(store, key) {
+ return arrayFind(store.a, function (it) {
+ return it[0] === key;
+ });
+};
+
+UncaughtFrozenStore.prototype = {
+ get: function get(key) {
+ var entry = findUncaughtFrozen(this, key);
+ if (entry) return entry[1];
+ },
+ has: function has(key) {
+ return !!findUncaughtFrozen(this, key);
+ },
+ set: function set(key, value) {
+ var entry = findUncaughtFrozen(this, key);
+ if (entry) entry[1] = value;else this.a.push([key, value]);
+ },
+ 'delete': function _delete(key) {
+ var index = arrayFindIndex(this.a, function (it) {
+ return it[0] === key;
+ });
+ if (~index) this.a.splice(index, 1);
+ return !!~index;
+ }
+};
+module.exports = {
+ getConstructor: function getConstructor(wrapper, NAME, IS_MAP, ADDER) {
+ var C = wrapper(function (that, iterable) {
+ anInstance(that, C, NAME, '_i');
+ that._t = NAME;
+ that._i = id++;
+ that._l = undefined;
+ if (iterable != undefined) forOf(iterable, IS_MAP, that[ADDER], that);
+ });
+ redefineAll(C.prototype, {
+ 'delete': function _delete(key) {
+ if (!isObject(key)) return false;
+ var data = getWeak(key);
+ if (data === true) return uncaughtFrozenStore(validate(this, NAME))['delete'](key);
+ return data && $has(data, this._i) && delete data[this._i];
+ },
+ has: function has(key) {
+ if (!isObject(key)) return false;
+ var data = getWeak(key);
+ if (data === true) return uncaughtFrozenStore(validate(this, NAME)).has(key);
+ return data && $has(data, this._i);
+ }
+ });
+ return C;
+ },
+ def: function def(that, key, value) {
+ var data = getWeak(anObject(key), true);
+ if (data === true) uncaughtFrozenStore(that).set(key, value);else data[that._i] = value;
+ return that;
+ },
+ ufstore: uncaughtFrozenStore
+};
+
+/***/ }),
+/* 110 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(13);
+
+module.exports = function (it, TYPE) {
+ if (!isObject(it) || it._t !== TYPE) throw TypeError('Incompatible receiver, ' + TYPE + ' required!');
+ return it;
+};
+
+/***/ }),
+/* 111 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(8);
+
+var $export = __w_pdfjs_require__(7);
+
+var redefine = __w_pdfjs_require__(20);
+
+var redefineAll = __w_pdfjs_require__(98);
+
+var meta = __w_pdfjs_require__(108);
+
+var forOf = __w_pdfjs_require__(89);
+
+var anInstance = __w_pdfjs_require__(88);
+
+var isObject = __w_pdfjs_require__(13);
+
+var fails = __w_pdfjs_require__(16);
+
+var $iterDetect = __w_pdfjs_require__(69);
+
+var setToStringTag = __w_pdfjs_require__(60);
+
+var inheritIfRequired = __w_pdfjs_require__(112);
+
+module.exports = function (NAME, wrapper, methods, common, IS_MAP, IS_WEAK) {
+ var Base = global[NAME];
+ var C = Base;
+ var ADDER = IS_MAP ? 'set' : 'add';
+ var proto = C && C.prototype;
+ var O = {};
+
+ var fixMethod = function fixMethod(KEY) {
+ var fn = proto[KEY];
+ redefine(proto, KEY, KEY == 'delete' ? function (a) {
+ return IS_WEAK && !isObject(a) ? false : fn.call(this, a === 0 ? 0 : a);
+ } : KEY == 'has' ? function has(a) {
+ return IS_WEAK && !isObject(a) ? false : fn.call(this, a === 0 ? 0 : a);
+ } : KEY == 'get' ? function get(a) {
+ return IS_WEAK && !isObject(a) ? undefined : fn.call(this, a === 0 ? 0 : a);
+ } : KEY == 'add' ? function add(a) {
+ fn.call(this, a === 0 ? 0 : a);
+ return this;
+ } : function set(a, b) {
+ fn.call(this, a === 0 ? 0 : a, b);
+ return this;
+ });
+ };
+
+ if (typeof C != 'function' || !(IS_WEAK || proto.forEach && !fails(function () {
+ new C().entries().next();
+ }))) {
+ C = common.getConstructor(wrapper, NAME, IS_MAP, ADDER);
+ redefineAll(C.prototype, methods);
+ meta.NEED = true;
+ } else {
+ var instance = new C();
+ var HASNT_CHAINING = instance[ADDER](IS_WEAK ? {} : -0, 1) != instance;
+ var THROWS_ON_PRIMITIVES = fails(function () {
+ instance.has(1);
+ });
+ var ACCEPT_ITERABLES = $iterDetect(function (iter) {
+ new C(iter);
+ });
+ var BUGGY_ZERO = !IS_WEAK && fails(function () {
+ var $instance = new C();
+ var index = 5;
+
+ while (index--) {
+ $instance[ADDER](index, index);
+ }
+
+ return !$instance.has(-0);
+ });
+
+ if (!ACCEPT_ITERABLES) {
+ C = wrapper(function (target, iterable) {
+ anInstance(target, C, NAME);
+ var that = inheritIfRequired(new Base(), target, C);
+ if (iterable != undefined) forOf(iterable, IS_MAP, that[ADDER], that);
+ return that;
+ });
+ C.prototype = proto;
+ proto.constructor = C;
+ }
+
+ if (THROWS_ON_PRIMITIVES || BUGGY_ZERO) {
+ fixMethod('delete');
+ fixMethod('has');
+ IS_MAP && fixMethod('get');
+ }
+
+ if (BUGGY_ZERO || HASNT_CHAINING) fixMethod(ADDER);
+ if (IS_WEAK && proto.clear) delete proto.clear;
+ }
+
+ setToStringTag(C, NAME);
+ O[NAME] = C;
+ $export($export.G + $export.W + $export.F * (C != Base), O);
+ if (!IS_WEAK) common.setStrong(C, NAME, IS_MAP);
+ return C;
+};
+
+/***/ }),
+/* 112 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(13);
+
+var setPrototypeOf = __w_pdfjs_require__(113).set;
+
+module.exports = function (that, target, C) {
+ var S = target.constructor;
+ var P;
+
+ if (S !== C && typeof S == 'function' && (P = S.prototype) !== C.prototype && isObject(P) && setPrototypeOf) {
+ setPrototypeOf(that, P);
+ }
+
+ return that;
+};
+
+/***/ }),
+/* 113 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(13);
+
+var anObject = __w_pdfjs_require__(12);
+
+var check = function check(O, proto) {
+ anObject(O);
+ if (!isObject(proto) && proto !== null) throw TypeError(proto + ": can't set as prototype!");
+};
+
+module.exports = {
+ set: Object.setPrototypeOf || ('__proto__' in {} ? function (test, buggy, set) {
+ try {
+ set = __w_pdfjs_require__(26)(Function.call, __w_pdfjs_require__(114).f(Object.prototype, '__proto__').set, 2);
+ set(test, []);
+ buggy = !(test instanceof Array);
+ } catch (e) {
+ buggy = true;
+ }
+
+ return function setPrototypeOf(O, proto) {
+ check(O, proto);
+ if (buggy) O.__proto__ = proto;else set(O, proto);
+ return O;
+ };
+ }({}, false) : undefined),
+ check: check
+};
+
+/***/ }),
+/* 114 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var pIE = __w_pdfjs_require__(74);
+
+var createDesc = __w_pdfjs_require__(19);
+
+var toIObject = __w_pdfjs_require__(43);
+
+var toPrimitive = __w_pdfjs_require__(18);
+
+var has = __w_pdfjs_require__(21);
+
+var IE8_DOM_DEFINE = __w_pdfjs_require__(14);
+
+var gOPD = Object.getOwnPropertyDescriptor;
+exports.f = __w_pdfjs_require__(15) ? gOPD : function getOwnPropertyDescriptor(O, P) {
+ O = toIObject(O);
+ P = toPrimitive(P, true);
+ if (IE8_DOM_DEFINE) try {
+ return gOPD(O, P);
+ } catch (e) {}
+ if (has(O, P)) return createDesc(!pIE.f.call(O, P), O[P]);
+};
+
+/***/ }),
+/* 115 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(116)('WeakMap');
+
+/***/ }),
+/* 116 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+module.exports = function (COLLECTION) {
+ $export($export.S, COLLECTION, {
+ of: function of() {
+ var length = arguments.length;
+ var A = new Array(length);
+
+ while (length--) {
+ A[length] = arguments[length];
+ }
+
+ return new this(A);
+ }
+ });
+};
+
+/***/ }),
+/* 117 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(118)('WeakMap');
+
+/***/ }),
+/* 118 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var aFunction = __w_pdfjs_require__(27);
+
+var ctx = __w_pdfjs_require__(26);
+
+var forOf = __w_pdfjs_require__(89);
+
+module.exports = function (COLLECTION) {
+ $export($export.S, COLLECTION, {
+ from: function from(source) {
+ var mapFn = arguments[1];
+ var mapping, A, n, cb;
+ aFunction(this);
+ mapping = mapFn !== undefined;
+ if (mapping) aFunction(mapFn);
+ if (source == undefined) return new this();
+ A = [];
+
+ if (mapping) {
+ n = 0;
+ cb = ctx(mapFn, arguments[2], 2);
+ forOf(source, false, function (nextItem) {
+ A.push(cb(nextItem, n++));
+ });
+ } else {
+ forOf(source, false, A.push, A);
+ }
+
+ return new this(A);
+ }
+ });
+};
+
+/***/ }),
+/* 119 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(83);
+
+__w_pdfjs_require__(84);
+
+__w_pdfjs_require__(120);
+
+__w_pdfjs_require__(121);
+
+__w_pdfjs_require__(122);
+
+module.exports = __w_pdfjs_require__(9).WeakSet;
+
+/***/ }),
+/* 120 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var weak = __w_pdfjs_require__(109);
+
+var validate = __w_pdfjs_require__(110);
+
+var WEAK_SET = 'WeakSet';
+
+__w_pdfjs_require__(111)(WEAK_SET, function (get) {
+ return function WeakSet() {
+ return get(this, arguments.length > 0 ? arguments[0] : undefined);
+ };
+}, {
+ add: function add(value) {
+ return weak.def(validate(this, WEAK_SET), value, true);
+ }
+}, weak, false, true);
+
+/***/ }),
+/* 121 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(116)('WeakSet');
+
+/***/ }),
+/* 122 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(118)('WeakSet');
+
+/***/ }),
+/* 123 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(124);
+
+module.exports = __w_pdfjs_require__(9).String.codePointAt;
+
+/***/ }),
+/* 124 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var $at = __w_pdfjs_require__(49)(false);
+
+$export($export.P, 'String', {
+ codePointAt: function codePointAt(pos) {
+ return $at(this, pos);
+ }
+});
+
+/***/ }),
+/* 125 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(126);
+
+module.exports = __w_pdfjs_require__(9).String.fromCodePoint;
+
+/***/ }),
+/* 126 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var toAbsoluteIndex = __w_pdfjs_require__(45);
+
+var fromCharCode = String.fromCharCode;
+var $fromCodePoint = String.fromCodePoint;
+$export($export.S + $export.F * (!!$fromCodePoint && $fromCodePoint.length != 1), 'String', {
+ fromCodePoint: function fromCodePoint(x) {
+ var res = [];
+ var aLen = arguments.length;
+ var i = 0;
+ var code;
+
+ while (aLen > i) {
+ code = +arguments[i++];
+ if (toAbsoluteIndex(code, 0x10ffff) !== code) throw RangeError(code + ' is not a valid code point');
+ res.push(code < 0x10000 ? fromCharCode(code) : fromCharCode(((code -= 0x10000) >> 10) + 0xd800, code % 0x400 + 0xdc00));
+ }
+
+ return res.join('');
+ }
+});
+
+/***/ }),
+/* 127 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(128);
+
+__w_pdfjs_require__(83);
+
+module.exports = __w_pdfjs_require__(9).Symbol;
+
+/***/ }),
+/* 128 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var global = __w_pdfjs_require__(8);
+
+var has = __w_pdfjs_require__(21);
+
+var DESCRIPTORS = __w_pdfjs_require__(15);
+
+var $export = __w_pdfjs_require__(7);
+
+var redefine = __w_pdfjs_require__(20);
+
+var META = __w_pdfjs_require__(108).KEY;
+
+var $fails = __w_pdfjs_require__(16);
+
+var shared = __w_pdfjs_require__(24);
+
+var setToStringTag = __w_pdfjs_require__(60);
+
+var uid = __w_pdfjs_require__(22);
+
+var wks = __w_pdfjs_require__(33);
+
+var wksExt = __w_pdfjs_require__(129);
+
+var wksDefine = __w_pdfjs_require__(130);
+
+var enumKeys = __w_pdfjs_require__(131);
+
+var isArray = __w_pdfjs_require__(107);
+
+var anObject = __w_pdfjs_require__(12);
+
+var isObject = __w_pdfjs_require__(13);
+
+var toObject = __w_pdfjs_require__(62);
+
+var toIObject = __w_pdfjs_require__(43);
+
+var toPrimitive = __w_pdfjs_require__(18);
+
+var createDesc = __w_pdfjs_require__(19);
+
+var _create = __w_pdfjs_require__(53);
+
+var gOPNExt = __w_pdfjs_require__(132);
+
+var $GOPD = __w_pdfjs_require__(114);
+
+var $GOPS = __w_pdfjs_require__(73);
+
+var $DP = __w_pdfjs_require__(11);
+
+var $keys = __w_pdfjs_require__(55);
+
+var gOPD = $GOPD.f;
+var dP = $DP.f;
+var gOPN = gOPNExt.f;
+var $Symbol = global.Symbol;
+var $JSON = global.JSON;
+
+var _stringify = $JSON && $JSON.stringify;
+
+var PROTOTYPE = 'prototype';
+var HIDDEN = wks('_hidden');
+var TO_PRIMITIVE = wks('toPrimitive');
+var isEnum = {}.propertyIsEnumerable;
+var SymbolRegistry = shared('symbol-registry');
+var AllSymbols = shared('symbols');
+var OPSymbols = shared('op-symbols');
+var ObjectProto = Object[PROTOTYPE];
+var USE_NATIVE = typeof $Symbol == 'function' && !!$GOPS.f;
+var QObject = global.QObject;
+var setter = !QObject || !QObject[PROTOTYPE] || !QObject[PROTOTYPE].findChild;
+var setSymbolDesc = DESCRIPTORS && $fails(function () {
+ return _create(dP({}, 'a', {
+ get: function get() {
+ return dP(this, 'a', {
+ value: 7
+ }).a;
+ }
+ })).a != 7;
+}) ? function (it, key, D) {
+ var protoDesc = gOPD(ObjectProto, key);
+ if (protoDesc) delete ObjectProto[key];
+ dP(it, key, D);
+ if (protoDesc && it !== ObjectProto) dP(ObjectProto, key, protoDesc);
+} : dP;
+
+var wrap = function wrap(tag) {
+ var sym = AllSymbols[tag] = _create($Symbol[PROTOTYPE]);
+
+ sym._k = tag;
+ return sym;
+};
+
+var isSymbol = USE_NATIVE && _typeof($Symbol.iterator) == 'symbol' ? function (it) {
+ return _typeof(it) == 'symbol';
+} : function (it) {
+ return it instanceof $Symbol;
+};
+
+var $defineProperty = function defineProperty(it, key, D) {
+ if (it === ObjectProto) $defineProperty(OPSymbols, key, D);
+ anObject(it);
+ key = toPrimitive(key, true);
+ anObject(D);
+
+ if (has(AllSymbols, key)) {
+ if (!D.enumerable) {
+ if (!has(it, HIDDEN)) dP(it, HIDDEN, createDesc(1, {}));
+ it[HIDDEN][key] = true;
+ } else {
+ if (has(it, HIDDEN) && it[HIDDEN][key]) it[HIDDEN][key] = false;
+ D = _create(D, {
+ enumerable: createDesc(0, false)
+ });
+ }
+
+ return setSymbolDesc(it, key, D);
+ }
+
+ return dP(it, key, D);
+};
+
+var $defineProperties = function defineProperties(it, P) {
+ anObject(it);
+ var keys = enumKeys(P = toIObject(P));
+ var i = 0;
+ var l = keys.length;
+ var key;
+
+ while (l > i) {
+ $defineProperty(it, key = keys[i++], P[key]);
+ }
+
+ return it;
+};
+
+var $create = function create(it, P) {
+ return P === undefined ? _create(it) : $defineProperties(_create(it), P);
+};
+
+var $propertyIsEnumerable = function propertyIsEnumerable(key) {
+ var E = isEnum.call(this, key = toPrimitive(key, true));
+ if (this === ObjectProto && has(AllSymbols, key) && !has(OPSymbols, key)) return false;
+ return E || !has(this, key) || !has(AllSymbols, key) || has(this, HIDDEN) && this[HIDDEN][key] ? E : true;
+};
+
+var $getOwnPropertyDescriptor = function getOwnPropertyDescriptor(it, key) {
+ it = toIObject(it);
+ key = toPrimitive(key, true);
+ if (it === ObjectProto && has(AllSymbols, key) && !has(OPSymbols, key)) return;
+ var D = gOPD(it, key);
+ if (D && has(AllSymbols, key) && !(has(it, HIDDEN) && it[HIDDEN][key])) D.enumerable = true;
+ return D;
+};
+
+var $getOwnPropertyNames = function getOwnPropertyNames(it) {
+ var names = gOPN(toIObject(it));
+ var result = [];
+ var i = 0;
+ var key;
+
+ while (names.length > i) {
+ if (!has(AllSymbols, key = names[i++]) && key != HIDDEN && key != META) result.push(key);
+ }
+
+ return result;
+};
+
+var $getOwnPropertySymbols = function getOwnPropertySymbols(it) {
+ var IS_OP = it === ObjectProto;
+ var names = gOPN(IS_OP ? OPSymbols : toIObject(it));
+ var result = [];
+ var i = 0;
+ var key;
+
+ while (names.length > i) {
+ if (has(AllSymbols, key = names[i++]) && (IS_OP ? has(ObjectProto, key) : true)) result.push(AllSymbols[key]);
+ }
+
+ return result;
+};
+
+if (!USE_NATIVE) {
+ $Symbol = function _Symbol() {
+ if (this instanceof $Symbol) throw TypeError('Symbol is not a constructor!');
+ var tag = uid(arguments.length > 0 ? arguments[0] : undefined);
+
+ var $set = function $set(value) {
+ if (this === ObjectProto) $set.call(OPSymbols, value);
+ if (has(this, HIDDEN) && has(this[HIDDEN], tag)) this[HIDDEN][tag] = false;
+ setSymbolDesc(this, tag, createDesc(1, value));
+ };
+
+ if (DESCRIPTORS && setter) setSymbolDesc(ObjectProto, tag, {
+ configurable: true,
+ set: $set
+ });
+ return wrap(tag);
+ };
+
+ redefine($Symbol[PROTOTYPE], 'toString', function toString() {
+ return this._k;
+ });
+ $GOPD.f = $getOwnPropertyDescriptor;
+ $DP.f = $defineProperty;
+ __w_pdfjs_require__(133).f = gOPNExt.f = $getOwnPropertyNames;
+ __w_pdfjs_require__(74).f = $propertyIsEnumerable;
+ $GOPS.f = $getOwnPropertySymbols;
+
+ if (DESCRIPTORS && !__w_pdfjs_require__(25)) {
+ redefine(ObjectProto, 'propertyIsEnumerable', $propertyIsEnumerable, true);
+ }
+
+ wksExt.f = function (name) {
+ return wrap(wks(name));
+ };
+}
+
+$export($export.G + $export.W + $export.F * !USE_NATIVE, {
+ Symbol: $Symbol
+});
+
+for (var es6Symbols = 'hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables'.split(','), j = 0; es6Symbols.length > j;) {
+ wks(es6Symbols[j++]);
+}
+
+for (var wellKnownSymbols = $keys(wks.store), k = 0; wellKnownSymbols.length > k;) {
+ wksDefine(wellKnownSymbols[k++]);
+}
+
+$export($export.S + $export.F * !USE_NATIVE, 'Symbol', {
+ 'for': function _for(key) {
+ return has(SymbolRegistry, key += '') ? SymbolRegistry[key] : SymbolRegistry[key] = $Symbol(key);
+ },
+ keyFor: function keyFor(sym) {
+ if (!isSymbol(sym)) throw TypeError(sym + ' is not a symbol!');
+
+ for (var key in SymbolRegistry) {
+ if (SymbolRegistry[key] === sym) return key;
+ }
+ },
+ useSetter: function useSetter() {
+ setter = true;
+ },
+ useSimple: function useSimple() {
+ setter = false;
+ }
+});
+$export($export.S + $export.F * !USE_NATIVE, 'Object', {
+ create: $create,
+ defineProperty: $defineProperty,
+ defineProperties: $defineProperties,
+ getOwnPropertyDescriptor: $getOwnPropertyDescriptor,
+ getOwnPropertyNames: $getOwnPropertyNames,
+ getOwnPropertySymbols: $getOwnPropertySymbols
+});
+var FAILS_ON_PRIMITIVES = $fails(function () {
+ $GOPS.f(1);
+});
+$export($export.S + $export.F * FAILS_ON_PRIMITIVES, 'Object', {
+ getOwnPropertySymbols: function getOwnPropertySymbols(it) {
+ return $GOPS.f(toObject(it));
+ }
+});
+$JSON && $export($export.S + $export.F * (!USE_NATIVE || $fails(function () {
+ var S = $Symbol();
+ return _stringify([S]) != '[null]' || _stringify({
+ a: S
+ }) != '{}' || _stringify(Object(S)) != '{}';
+})), 'JSON', {
+ stringify: function stringify(it) {
+ var args = [it];
+ var i = 1;
+ var replacer, $replacer;
+
+ while (arguments.length > i) {
+ args.push(arguments[i++]);
+ }
+
+ $replacer = replacer = args[1];
+ if (!isObject(replacer) && it === undefined || isSymbol(it)) return;
+ if (!isArray(replacer)) replacer = function replacer(key, value) {
+ if (typeof $replacer == 'function') value = $replacer.call(this, key, value);
+ if (!isSymbol(value)) return value;
+ };
+ args[1] = replacer;
+ return _stringify.apply($JSON, args);
+ }
+});
+$Symbol[PROTOTYPE][TO_PRIMITIVE] || __w_pdfjs_require__(10)($Symbol[PROTOTYPE], TO_PRIMITIVE, $Symbol[PROTOTYPE].valueOf);
+setToStringTag($Symbol, 'Symbol');
+setToStringTag(Math, 'Math', true);
+setToStringTag(global.JSON, 'JSON', true);
+
+/***/ }),
+/* 129 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+exports.f = __w_pdfjs_require__(33);
+
+/***/ }),
+/* 130 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(8);
+
+var core = __w_pdfjs_require__(9);
+
+var LIBRARY = __w_pdfjs_require__(25);
+
+var wksExt = __w_pdfjs_require__(129);
+
+var defineProperty = __w_pdfjs_require__(11).f;
+
+module.exports = function (name) {
+ var $Symbol = core.Symbol || (core.Symbol = LIBRARY ? {} : global.Symbol || {});
+ if (name.charAt(0) != '_' && !(name in $Symbol)) defineProperty($Symbol, name, {
+ value: wksExt.f(name)
+ });
+};
+
+/***/ }),
+/* 131 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var getKeys = __w_pdfjs_require__(55);
+
+var gOPS = __w_pdfjs_require__(73);
+
+var pIE = __w_pdfjs_require__(74);
+
+module.exports = function (it) {
+ var result = getKeys(it);
+ var getSymbols = gOPS.f;
+
+ if (getSymbols) {
+ var symbols = getSymbols(it);
+ var isEnum = pIE.f;
+ var i = 0;
+ var key;
+
+ while (symbols.length > i) {
+ if (isEnum.call(it, key = symbols[i++])) result.push(key);
+ }
+ }
+
+ return result;
+};
+
+/***/ }),
+/* 132 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var toIObject = __w_pdfjs_require__(43);
+
+var gOPN = __w_pdfjs_require__(133).f;
+
+var toString = {}.toString;
+var windowNames = (typeof window === "undefined" ? "undefined" : _typeof(window)) == 'object' && window && Object.getOwnPropertyNames ? Object.getOwnPropertyNames(window) : [];
+
+var getWindowNames = function getWindowNames(it) {
+ try {
+ return gOPN(it);
+ } catch (e) {
+ return windowNames.slice();
+ }
+};
+
+module.exports.f = function getOwnPropertyNames(it) {
+ return windowNames && toString.call(it) == '[object Window]' ? getWindowNames(it) : gOPN(toIObject(it));
+};
+
+/***/ }),
+/* 133 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $keys = __w_pdfjs_require__(56);
+
+var hiddenKeys = __w_pdfjs_require__(58).concat('length', 'prototype');
+
+exports.f = Object.getOwnPropertyNames || function getOwnPropertyNames(O) {
+ return $keys(O, hiddenKeys);
+};
+
+/***/ }),
+/* 134 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(135);
+
+module.exports = __w_pdfjs_require__(9).String.padStart;
+
+/***/ }),
+/* 135 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var $pad = __w_pdfjs_require__(136);
+
+var userAgent = __w_pdfjs_require__(96);
+
+var WEBKIT_BUG = /Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(userAgent);
+$export($export.P + $export.F * WEBKIT_BUG, 'String', {
+ padStart: function padStart(maxLength) {
+ return $pad(this, maxLength, arguments.length > 1 ? arguments[1] : undefined, true);
+ }
+});
+
+/***/ }),
+/* 136 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toLength = __w_pdfjs_require__(28);
+
+var repeat = __w_pdfjs_require__(137);
+
+var defined = __w_pdfjs_require__(34);
+
+module.exports = function (that, maxLength, fillString, left) {
+ var S = String(defined(that));
+ var stringLength = S.length;
+ var fillStr = fillString === undefined ? ' ' : String(fillString);
+ var intMaxLength = toLength(maxLength);
+ if (intMaxLength <= stringLength || fillStr == '') return S;
+ var fillLen = intMaxLength - stringLength;
+ var stringFiller = repeat.call(fillStr, Math.ceil(fillLen / fillStr.length));
+ if (stringFiller.length > fillLen) stringFiller = stringFiller.slice(0, fillLen);
+ return left ? stringFiller + S : S + stringFiller;
+};
+
+/***/ }),
+/* 137 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toInteger = __w_pdfjs_require__(29);
+
+var defined = __w_pdfjs_require__(34);
+
+module.exports = function repeat(count) {
+ var str = String(defined(this));
+ var res = '';
+ var n = toInteger(count);
+ if (n < 0 || n == Infinity) throw RangeError("Count can't be negative");
+
+ for (; n > 0; (n >>>= 1) && (str += str)) {
+ if (n & 1) res += str;
+ }
+
+ return res;
+};
+
+/***/ }),
+/* 138 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(139);
+
+module.exports = __w_pdfjs_require__(9).String.padEnd;
+
+/***/ }),
+/* 139 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var $pad = __w_pdfjs_require__(136);
+
+var userAgent = __w_pdfjs_require__(96);
+
+var WEBKIT_BUG = /Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(userAgent);
+$export($export.P + $export.F * WEBKIT_BUG, 'String', {
+ padEnd: function padEnd(maxLength) {
+ return $pad(this, maxLength, arguments.length > 1 ? arguments[1] : undefined, false);
+ }
+});
+
+/***/ }),
+/* 140 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(141);
+
+module.exports = __w_pdfjs_require__(9).Object.values;
+
+/***/ }),
+/* 141 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(7);
+
+var $values = __w_pdfjs_require__(142)(false);
+
+$export($export.S, 'Object', {
+ values: function values(it) {
+ return $values(it);
+ }
+});
+
+/***/ }),
+/* 142 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var DESCRIPTORS = __w_pdfjs_require__(15);
+
+var getKeys = __w_pdfjs_require__(55);
+
+var toIObject = __w_pdfjs_require__(43);
+
+var isEnum = __w_pdfjs_require__(74).f;
+
+module.exports = function (isEntries) {
+ return function (it) {
+ var O = toIObject(it);
+ var keys = getKeys(O);
+ var length = keys.length;
+ var i = 0;
+ var result = [];
+ var key;
+
+ while (length > i) {
+ key = keys[i++];
+
+ if (!DESCRIPTORS || isEnum.call(O, key)) {
+ result.push(isEntries ? [key, O[key]] : O[key]);
+ }
+ }
+
+ return result;
+ };
+};
+
+/***/ }),
+/* 143 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+{
+ var isReadableStreamSupported = false;
+
+ if (typeof ReadableStream !== 'undefined') {
+ try {
+ new ReadableStream({
+ start: function start(controller) {
+ controller.close();
+ }
+ });
+ isReadableStreamSupported = true;
+ } catch (e) {}
+ }
+
+ if (isReadableStreamSupported) {
+ exports.ReadableStream = ReadableStream;
+ } else {
+ exports.ReadableStream = __w_pdfjs_require__(144).ReadableStream;
+ }
+}
+
+/***/ }),
+/* 144 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof2(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof2 = function _typeof2(obj) { return typeof obj; }; } else { _typeof2 = function _typeof2(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof2(obj); }
+
+(function (e, a) {
+ for (var i in a) {
+ e[i] = a[i];
+ }
+})(exports, function (modules) {
+ var installedModules = {};
+
+ function __w_pdfjs_require__(moduleId) {
+ if (installedModules[moduleId]) return installedModules[moduleId].exports;
+ var module = installedModules[moduleId] = {
+ i: moduleId,
+ l: false,
+ exports: {}
+ };
+ modules[moduleId].call(module.exports, module, module.exports, __w_pdfjs_require__);
+ module.l = true;
+ return module.exports;
+ }
+
+ __w_pdfjs_require__.m = modules;
+ __w_pdfjs_require__.c = installedModules;
+
+ __w_pdfjs_require__.i = function (value) {
+ return value;
+ };
+
+ __w_pdfjs_require__.d = function (exports, name, getter) {
+ if (!__w_pdfjs_require__.o(exports, name)) {
+ Object.defineProperty(exports, name, {
+ configurable: false,
+ enumerable: true,
+ get: getter
+ });
+ }
+ };
+
+ __w_pdfjs_require__.n = function (module) {
+ var getter = module && module.__esModule ? function getDefault() {
+ return module['default'];
+ } : function getModuleExports() {
+ return module;
+ };
+
+ __w_pdfjs_require__.d(getter, 'a', getter);
+
+ return getter;
+ };
+
+ __w_pdfjs_require__.o = function (object, property) {
+ return Object.prototype.hasOwnProperty.call(object, property);
+ };
+
+ __w_pdfjs_require__.p = "";
+ return __w_pdfjs_require__(__w_pdfjs_require__.s = 7);
+}([function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var _typeof = typeof Symbol === "function" && _typeof2(Symbol.iterator) === "symbol" ? function (obj) {
+ return _typeof2(obj);
+ } : function (obj) {
+ return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : _typeof2(obj);
+ };
+
+ var _require = __w_pdfjs_require__(1),
+ assert = _require.assert;
+
+ function IsPropertyKey(argument) {
+ return typeof argument === 'string' || (typeof argument === 'undefined' ? 'undefined' : _typeof(argument)) === 'symbol';
+ }
+
+ exports.typeIsObject = function (x) {
+ return (typeof x === 'undefined' ? 'undefined' : _typeof(x)) === 'object' && x !== null || typeof x === 'function';
+ };
+
+ exports.createDataProperty = function (o, p, v) {
+ assert(exports.typeIsObject(o));
+ Object.defineProperty(o, p, {
+ value: v,
+ writable: true,
+ enumerable: true,
+ configurable: true
+ });
+ };
+
+ exports.createArrayFromList = function (elements) {
+ return elements.slice();
+ };
+
+ exports.ArrayBufferCopy = function (dest, destOffset, src, srcOffset, n) {
+ new Uint8Array(dest).set(new Uint8Array(src, srcOffset, n), destOffset);
+ };
+
+ exports.CreateIterResultObject = function (value, done) {
+ assert(typeof done === 'boolean');
+ var obj = {};
+ Object.defineProperty(obj, 'value', {
+ value: value,
+ enumerable: true,
+ writable: true,
+ configurable: true
+ });
+ Object.defineProperty(obj, 'done', {
+ value: done,
+ enumerable: true,
+ writable: true,
+ configurable: true
+ });
+ return obj;
+ };
+
+ exports.IsFiniteNonNegativeNumber = function (v) {
+ if (Number.isNaN(v)) {
+ return false;
+ }
+
+ if (v === Infinity) {
+ return false;
+ }
+
+ if (v < 0) {
+ return false;
+ }
+
+ return true;
+ };
+
+ function Call(F, V, args) {
+ if (typeof F !== 'function') {
+ throw new TypeError('Argument is not a function');
+ }
+
+ return Function.prototype.apply.call(F, V, args);
+ }
+
+ exports.InvokeOrNoop = function (O, P, args) {
+ assert(O !== undefined);
+ assert(IsPropertyKey(P));
+ assert(Array.isArray(args));
+ var method = O[P];
+
+ if (method === undefined) {
+ return undefined;
+ }
+
+ return Call(method, O, args);
+ };
+
+ exports.PromiseInvokeOrNoop = function (O, P, args) {
+ assert(O !== undefined);
+ assert(IsPropertyKey(P));
+ assert(Array.isArray(args));
+
+ try {
+ return Promise.resolve(exports.InvokeOrNoop(O, P, args));
+ } catch (returnValueE) {
+ return Promise.reject(returnValueE);
+ }
+ };
+
+ exports.PromiseInvokeOrPerformFallback = function (O, P, args, F, argsF) {
+ assert(O !== undefined);
+ assert(IsPropertyKey(P));
+ assert(Array.isArray(args));
+ assert(Array.isArray(argsF));
+ var method = void 0;
+
+ try {
+ method = O[P];
+ } catch (methodE) {
+ return Promise.reject(methodE);
+ }
+
+ if (method === undefined) {
+ return F.apply(null, argsF);
+ }
+
+ try {
+ return Promise.resolve(Call(method, O, args));
+ } catch (e) {
+ return Promise.reject(e);
+ }
+ };
+
+ exports.TransferArrayBuffer = function (O) {
+ return O.slice();
+ };
+
+ exports.ValidateAndNormalizeHighWaterMark = function (highWaterMark) {
+ highWaterMark = Number(highWaterMark);
+
+ if (Number.isNaN(highWaterMark) || highWaterMark < 0) {
+ throw new RangeError('highWaterMark property of a queuing strategy must be non-negative and non-NaN');
+ }
+
+ return highWaterMark;
+ };
+
+ exports.ValidateAndNormalizeQueuingStrategy = function (size, highWaterMark) {
+ if (size !== undefined && typeof size !== 'function') {
+ throw new TypeError('size property of a queuing strategy must be a function');
+ }
+
+ highWaterMark = exports.ValidateAndNormalizeHighWaterMark(highWaterMark);
+ return {
+ size: size,
+ highWaterMark: highWaterMark
+ };
+ };
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ function rethrowAssertionErrorRejection(e) {
+ if (e && e.constructor === AssertionError) {
+ setTimeout(function () {
+ throw e;
+ }, 0);
+ }
+ }
+
+ function AssertionError(message) {
+ this.name = 'AssertionError';
+ this.message = message || '';
+ this.stack = new Error().stack;
+ }
+
+ AssertionError.prototype = Object.create(Error.prototype);
+ AssertionError.prototype.constructor = AssertionError;
+
+ function assert(value, message) {
+ if (!value) {
+ throw new AssertionError(message);
+ }
+ }
+
+ module.exports = {
+ rethrowAssertionErrorRejection: rethrowAssertionErrorRejection,
+ AssertionError: AssertionError,
+ assert: assert
+ };
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var _createClass = function () {
+ function defineProperties(target, props) {
+ for (var i = 0; i < props.length; i++) {
+ var descriptor = props[i];
+ descriptor.enumerable = descriptor.enumerable || false;
+ descriptor.configurable = true;
+ if ("value" in descriptor) descriptor.writable = true;
+ Object.defineProperty(target, descriptor.key, descriptor);
+ }
+ }
+
+ return function (Constructor, protoProps, staticProps) {
+ if (protoProps) defineProperties(Constructor.prototype, protoProps);
+ if (staticProps) defineProperties(Constructor, staticProps);
+ return Constructor;
+ };
+ }();
+
+ function _classCallCheck(instance, Constructor) {
+ if (!(instance instanceof Constructor)) {
+ throw new TypeError("Cannot call a class as a function");
+ }
+ }
+
+ var _require = __w_pdfjs_require__(0),
+ InvokeOrNoop = _require.InvokeOrNoop,
+ PromiseInvokeOrNoop = _require.PromiseInvokeOrNoop,
+ ValidateAndNormalizeQueuingStrategy = _require.ValidateAndNormalizeQueuingStrategy,
+ typeIsObject = _require.typeIsObject;
+
+ var _require2 = __w_pdfjs_require__(1),
+ assert = _require2.assert,
+ rethrowAssertionErrorRejection = _require2.rethrowAssertionErrorRejection;
+
+ var _require3 = __w_pdfjs_require__(3),
+ DequeueValue = _require3.DequeueValue,
+ EnqueueValueWithSize = _require3.EnqueueValueWithSize,
+ PeekQueueValue = _require3.PeekQueueValue,
+ ResetQueue = _require3.ResetQueue;
+
+ var WritableStream = function () {
+ function WritableStream() {
+ var underlyingSink = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ size = _ref.size,
+ _ref$highWaterMark = _ref.highWaterMark,
+ highWaterMark = _ref$highWaterMark === undefined ? 1 : _ref$highWaterMark;
+
+ _classCallCheck(this, WritableStream);
+
+ this._state = 'writable';
+ this._storedError = undefined;
+ this._writer = undefined;
+ this._writableStreamController = undefined;
+ this._writeRequests = [];
+ this._inFlightWriteRequest = undefined;
+ this._closeRequest = undefined;
+ this._inFlightCloseRequest = undefined;
+ this._pendingAbortRequest = undefined;
+ this._backpressure = false;
+ var type = underlyingSink.type;
+
+ if (type !== undefined) {
+ throw new RangeError('Invalid type is specified');
+ }
+
+ this._writableStreamController = new WritableStreamDefaultController(this, underlyingSink, size, highWaterMark);
+
+ this._writableStreamController.__startSteps();
+ }
+
+ _createClass(WritableStream, [{
+ key: 'abort',
+ value: function abort(reason) {
+ if (IsWritableStream(this) === false) {
+ return Promise.reject(streamBrandCheckException('abort'));
+ }
+
+ if (IsWritableStreamLocked(this) === true) {
+ return Promise.reject(new TypeError('Cannot abort a stream that already has a writer'));
+ }
+
+ return WritableStreamAbort(this, reason);
+ }
+ }, {
+ key: 'getWriter',
+ value: function getWriter() {
+ if (IsWritableStream(this) === false) {
+ throw streamBrandCheckException('getWriter');
+ }
+
+ return AcquireWritableStreamDefaultWriter(this);
+ }
+ }, {
+ key: 'locked',
+ get: function get() {
+ if (IsWritableStream(this) === false) {
+ throw streamBrandCheckException('locked');
+ }
+
+ return IsWritableStreamLocked(this);
+ }
+ }]);
+
+ return WritableStream;
+ }();
+
+ module.exports = {
+ AcquireWritableStreamDefaultWriter: AcquireWritableStreamDefaultWriter,
+ IsWritableStream: IsWritableStream,
+ IsWritableStreamLocked: IsWritableStreamLocked,
+ WritableStream: WritableStream,
+ WritableStreamAbort: WritableStreamAbort,
+ WritableStreamDefaultControllerError: WritableStreamDefaultControllerError,
+ WritableStreamDefaultWriterCloseWithErrorPropagation: WritableStreamDefaultWriterCloseWithErrorPropagation,
+ WritableStreamDefaultWriterRelease: WritableStreamDefaultWriterRelease,
+ WritableStreamDefaultWriterWrite: WritableStreamDefaultWriterWrite,
+ WritableStreamCloseQueuedOrInFlight: WritableStreamCloseQueuedOrInFlight
+ };
+
+ function AcquireWritableStreamDefaultWriter(stream) {
+ return new WritableStreamDefaultWriter(stream);
+ }
+
+ function IsWritableStream(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_writableStreamController')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function IsWritableStreamLocked(stream) {
+ assert(IsWritableStream(stream) === true, 'IsWritableStreamLocked should only be used on known writable streams');
+
+ if (stream._writer === undefined) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function WritableStreamAbort(stream, reason) {
+ var state = stream._state;
+
+ if (state === 'closed') {
+ return Promise.resolve(undefined);
+ }
+
+ if (state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ var error = new TypeError('Requested to abort');
+
+ if (stream._pendingAbortRequest !== undefined) {
+ return Promise.reject(error);
+ }
+
+ assert(state === 'writable' || state === 'erroring', 'state must be writable or erroring');
+ var wasAlreadyErroring = false;
+
+ if (state === 'erroring') {
+ wasAlreadyErroring = true;
+ reason = undefined;
+ }
+
+ var promise = new Promise(function (resolve, reject) {
+ stream._pendingAbortRequest = {
+ _resolve: resolve,
+ _reject: reject,
+ _reason: reason,
+ _wasAlreadyErroring: wasAlreadyErroring
+ };
+ });
+
+ if (wasAlreadyErroring === false) {
+ WritableStreamStartErroring(stream, error);
+ }
+
+ return promise;
+ }
+
+ function WritableStreamAddWriteRequest(stream) {
+ assert(IsWritableStreamLocked(stream) === true);
+ assert(stream._state === 'writable');
+ var promise = new Promise(function (resolve, reject) {
+ var writeRequest = {
+ _resolve: resolve,
+ _reject: reject
+ };
+
+ stream._writeRequests.push(writeRequest);
+ });
+ return promise;
+ }
+
+ function WritableStreamDealWithRejection(stream, error) {
+ var state = stream._state;
+
+ if (state === 'writable') {
+ WritableStreamStartErroring(stream, error);
+ return;
+ }
+
+ assert(state === 'erroring');
+ WritableStreamFinishErroring(stream);
+ }
+
+ function WritableStreamStartErroring(stream, reason) {
+ assert(stream._storedError === undefined, 'stream._storedError === undefined');
+ assert(stream._state === 'writable', 'state must be writable');
+ var controller = stream._writableStreamController;
+ assert(controller !== undefined, 'controller must not be undefined');
+ stream._state = 'erroring';
+ stream._storedError = reason;
+ var writer = stream._writer;
+
+ if (writer !== undefined) {
+ WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason);
+ }
+
+ if (WritableStreamHasOperationMarkedInFlight(stream) === false && controller._started === true) {
+ WritableStreamFinishErroring(stream);
+ }
+ }
+
+ function WritableStreamFinishErroring(stream) {
+ assert(stream._state === 'erroring', 'stream._state === erroring');
+ assert(WritableStreamHasOperationMarkedInFlight(stream) === false, 'WritableStreamHasOperationMarkedInFlight(stream) === false');
+ stream._state = 'errored';
+
+ stream._writableStreamController.__errorSteps();
+
+ var storedError = stream._storedError;
+
+ for (var i = 0; i < stream._writeRequests.length; i++) {
+ var writeRequest = stream._writeRequests[i];
+
+ writeRequest._reject(storedError);
+ }
+
+ stream._writeRequests = [];
+
+ if (stream._pendingAbortRequest === undefined) {
+ WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ return;
+ }
+
+ var abortRequest = stream._pendingAbortRequest;
+ stream._pendingAbortRequest = undefined;
+
+ if (abortRequest._wasAlreadyErroring === true) {
+ abortRequest._reject(storedError);
+
+ WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ return;
+ }
+
+ var promise = stream._writableStreamController.__abortSteps(abortRequest._reason);
+
+ promise.then(function () {
+ abortRequest._resolve();
+
+ WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ }, function (reason) {
+ abortRequest._reject(reason);
+
+ WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ });
+ }
+
+ function WritableStreamFinishInFlightWrite(stream) {
+ assert(stream._inFlightWriteRequest !== undefined);
+
+ stream._inFlightWriteRequest._resolve(undefined);
+
+ stream._inFlightWriteRequest = undefined;
+ }
+
+ function WritableStreamFinishInFlightWriteWithError(stream, error) {
+ assert(stream._inFlightWriteRequest !== undefined);
+
+ stream._inFlightWriteRequest._reject(error);
+
+ stream._inFlightWriteRequest = undefined;
+ assert(stream._state === 'writable' || stream._state === 'erroring');
+ WritableStreamDealWithRejection(stream, error);
+ }
+
+ function WritableStreamFinishInFlightClose(stream) {
+ assert(stream._inFlightCloseRequest !== undefined);
+
+ stream._inFlightCloseRequest._resolve(undefined);
+
+ stream._inFlightCloseRequest = undefined;
+ var state = stream._state;
+ assert(state === 'writable' || state === 'erroring');
+
+ if (state === 'erroring') {
+ stream._storedError = undefined;
+
+ if (stream._pendingAbortRequest !== undefined) {
+ stream._pendingAbortRequest._resolve();
+
+ stream._pendingAbortRequest = undefined;
+ }
+ }
+
+ stream._state = 'closed';
+ var writer = stream._writer;
+
+ if (writer !== undefined) {
+ defaultWriterClosedPromiseResolve(writer);
+ }
+
+ assert(stream._pendingAbortRequest === undefined, 'stream._pendingAbortRequest === undefined');
+ assert(stream._storedError === undefined, 'stream._storedError === undefined');
+ }
+
+ function WritableStreamFinishInFlightCloseWithError(stream, error) {
+ assert(stream._inFlightCloseRequest !== undefined);
+
+ stream._inFlightCloseRequest._reject(error);
+
+ stream._inFlightCloseRequest = undefined;
+ assert(stream._state === 'writable' || stream._state === 'erroring');
+
+ if (stream._pendingAbortRequest !== undefined) {
+ stream._pendingAbortRequest._reject(error);
+
+ stream._pendingAbortRequest = undefined;
+ }
+
+ WritableStreamDealWithRejection(stream, error);
+ }
+
+ function WritableStreamCloseQueuedOrInFlight(stream) {
+ if (stream._closeRequest === undefined && stream._inFlightCloseRequest === undefined) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function WritableStreamHasOperationMarkedInFlight(stream) {
+ if (stream._inFlightWriteRequest === undefined && stream._inFlightCloseRequest === undefined) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function WritableStreamMarkCloseRequestInFlight(stream) {
+ assert(stream._inFlightCloseRequest === undefined);
+ assert(stream._closeRequest !== undefined);
+ stream._inFlightCloseRequest = stream._closeRequest;
+ stream._closeRequest = undefined;
+ }
+
+ function WritableStreamMarkFirstWriteRequestInFlight(stream) {
+ assert(stream._inFlightWriteRequest === undefined, 'there must be no pending write request');
+ assert(stream._writeRequests.length !== 0, 'writeRequests must not be empty');
+ stream._inFlightWriteRequest = stream._writeRequests.shift();
+ }
+
+ function WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream) {
+ assert(stream._state === 'errored', '_stream_.[[state]] is `"errored"`');
+
+ if (stream._closeRequest !== undefined) {
+ assert(stream._inFlightCloseRequest === undefined);
+
+ stream._closeRequest._reject(stream._storedError);
+
+ stream._closeRequest = undefined;
+ }
+
+ var writer = stream._writer;
+
+ if (writer !== undefined) {
+ defaultWriterClosedPromiseReject(writer, stream._storedError);
+
+ writer._closedPromise["catch"](function () {});
+ }
+ }
+
+ function WritableStreamUpdateBackpressure(stream, backpressure) {
+ assert(stream._state === 'writable');
+ assert(WritableStreamCloseQueuedOrInFlight(stream) === false);
+ var writer = stream._writer;
+
+ if (writer !== undefined && backpressure !== stream._backpressure) {
+ if (backpressure === true) {
+ defaultWriterReadyPromiseReset(writer);
+ } else {
+ assert(backpressure === false);
+ defaultWriterReadyPromiseResolve(writer);
+ }
+ }
+
+ stream._backpressure = backpressure;
+ }
+
+ var WritableStreamDefaultWriter = function () {
+ function WritableStreamDefaultWriter(stream) {
+ _classCallCheck(this, WritableStreamDefaultWriter);
+
+ if (IsWritableStream(stream) === false) {
+ throw new TypeError('WritableStreamDefaultWriter can only be constructed with a WritableStream instance');
+ }
+
+ if (IsWritableStreamLocked(stream) === true) {
+ throw new TypeError('This stream has already been locked for exclusive writing by another writer');
+ }
+
+ this._ownerWritableStream = stream;
+ stream._writer = this;
+ var state = stream._state;
+
+ if (state === 'writable') {
+ if (WritableStreamCloseQueuedOrInFlight(stream) === false && stream._backpressure === true) {
+ defaultWriterReadyPromiseInitialize(this);
+ } else {
+ defaultWriterReadyPromiseInitializeAsResolved(this);
+ }
+
+ defaultWriterClosedPromiseInitialize(this);
+ } else if (state === 'erroring') {
+ defaultWriterReadyPromiseInitializeAsRejected(this, stream._storedError);
+
+ this._readyPromise["catch"](function () {});
+
+ defaultWriterClosedPromiseInitialize(this);
+ } else if (state === 'closed') {
+ defaultWriterReadyPromiseInitializeAsResolved(this);
+ defaultWriterClosedPromiseInitializeAsResolved(this);
+ } else {
+ assert(state === 'errored', 'state must be errored');
+ var storedError = stream._storedError;
+ defaultWriterReadyPromiseInitializeAsRejected(this, storedError);
+
+ this._readyPromise["catch"](function () {});
+
+ defaultWriterClosedPromiseInitializeAsRejected(this, storedError);
+
+ this._closedPromise["catch"](function () {});
+ }
+ }
+
+ _createClass(WritableStreamDefaultWriter, [{
+ key: 'abort',
+ value: function abort(reason) {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ return Promise.reject(defaultWriterBrandCheckException('abort'));
+ }
+
+ if (this._ownerWritableStream === undefined) {
+ return Promise.reject(defaultWriterLockException('abort'));
+ }
+
+ return WritableStreamDefaultWriterAbort(this, reason);
+ }
+ }, {
+ key: 'close',
+ value: function close() {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ return Promise.reject(defaultWriterBrandCheckException('close'));
+ }
+
+ var stream = this._ownerWritableStream;
+
+ if (stream === undefined) {
+ return Promise.reject(defaultWriterLockException('close'));
+ }
+
+ if (WritableStreamCloseQueuedOrInFlight(stream) === true) {
+ return Promise.reject(new TypeError('cannot close an already-closing stream'));
+ }
+
+ return WritableStreamDefaultWriterClose(this);
+ }
+ }, {
+ key: 'releaseLock',
+ value: function releaseLock() {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ throw defaultWriterBrandCheckException('releaseLock');
+ }
+
+ var stream = this._ownerWritableStream;
+
+ if (stream === undefined) {
+ return;
+ }
+
+ assert(stream._writer !== undefined);
+ WritableStreamDefaultWriterRelease(this);
+ }
+ }, {
+ key: 'write',
+ value: function write(chunk) {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ return Promise.reject(defaultWriterBrandCheckException('write'));
+ }
+
+ if (this._ownerWritableStream === undefined) {
+ return Promise.reject(defaultWriterLockException('write to'));
+ }
+
+ return WritableStreamDefaultWriterWrite(this, chunk);
+ }
+ }, {
+ key: 'closed',
+ get: function get() {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ return Promise.reject(defaultWriterBrandCheckException('closed'));
+ }
+
+ return this._closedPromise;
+ }
+ }, {
+ key: 'desiredSize',
+ get: function get() {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ throw defaultWriterBrandCheckException('desiredSize');
+ }
+
+ if (this._ownerWritableStream === undefined) {
+ throw defaultWriterLockException('desiredSize');
+ }
+
+ return WritableStreamDefaultWriterGetDesiredSize(this);
+ }
+ }, {
+ key: 'ready',
+ get: function get() {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ return Promise.reject(defaultWriterBrandCheckException('ready'));
+ }
+
+ return this._readyPromise;
+ }
+ }]);
+
+ return WritableStreamDefaultWriter;
+ }();
+
+ function IsWritableStreamDefaultWriter(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_ownerWritableStream')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function WritableStreamDefaultWriterAbort(writer, reason) {
+ var stream = writer._ownerWritableStream;
+ assert(stream !== undefined);
+ return WritableStreamAbort(stream, reason);
+ }
+
+ function WritableStreamDefaultWriterClose(writer) {
+ var stream = writer._ownerWritableStream;
+ assert(stream !== undefined);
+ var state = stream._state;
+
+ if (state === 'closed' || state === 'errored') {
+ return Promise.reject(new TypeError('The stream (in ' + state + ' state) is not in the writable state and cannot be closed'));
+ }
+
+ assert(state === 'writable' || state === 'erroring');
+ assert(WritableStreamCloseQueuedOrInFlight(stream) === false);
+ var promise = new Promise(function (resolve, reject) {
+ var closeRequest = {
+ _resolve: resolve,
+ _reject: reject
+ };
+ stream._closeRequest = closeRequest;
+ });
+
+ if (stream._backpressure === true && state === 'writable') {
+ defaultWriterReadyPromiseResolve(writer);
+ }
+
+ WritableStreamDefaultControllerClose(stream._writableStreamController);
+ return promise;
+ }
+
+ function WritableStreamDefaultWriterCloseWithErrorPropagation(writer) {
+ var stream = writer._ownerWritableStream;
+ assert(stream !== undefined);
+ var state = stream._state;
+
+ if (WritableStreamCloseQueuedOrInFlight(stream) === true || state === 'closed') {
+ return Promise.resolve();
+ }
+
+ if (state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ assert(state === 'writable' || state === 'erroring');
+ return WritableStreamDefaultWriterClose(writer);
+ }
+
+ function WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) {
+ if (writer._closedPromiseState === 'pending') {
+ defaultWriterClosedPromiseReject(writer, error);
+ } else {
+ defaultWriterClosedPromiseResetToRejected(writer, error);
+ }
+
+ writer._closedPromise["catch"](function () {});
+ }
+
+ function WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) {
+ if (writer._readyPromiseState === 'pending') {
+ defaultWriterReadyPromiseReject(writer, error);
+ } else {
+ defaultWriterReadyPromiseResetToRejected(writer, error);
+ }
+
+ writer._readyPromise["catch"](function () {});
+ }
+
+ function WritableStreamDefaultWriterGetDesiredSize(writer) {
+ var stream = writer._ownerWritableStream;
+ var state = stream._state;
+
+ if (state === 'errored' || state === 'erroring') {
+ return null;
+ }
+
+ if (state === 'closed') {
+ return 0;
+ }
+
+ return WritableStreamDefaultControllerGetDesiredSize(stream._writableStreamController);
+ }
+
+ function WritableStreamDefaultWriterRelease(writer) {
+ var stream = writer._ownerWritableStream;
+ assert(stream !== undefined);
+ assert(stream._writer === writer);
+ var releasedError = new TypeError('Writer was released and can no longer be used to monitor the stream\'s closedness');
+ WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError);
+ WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError);
+ stream._writer = undefined;
+ writer._ownerWritableStream = undefined;
+ }
+
+ function WritableStreamDefaultWriterWrite(writer, chunk) {
+ var stream = writer._ownerWritableStream;
+ assert(stream !== undefined);
+ var controller = stream._writableStreamController;
+ var chunkSize = WritableStreamDefaultControllerGetChunkSize(controller, chunk);
+
+ if (stream !== writer._ownerWritableStream) {
+ return Promise.reject(defaultWriterLockException('write to'));
+ }
+
+ var state = stream._state;
+
+ if (state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ if (WritableStreamCloseQueuedOrInFlight(stream) === true || state === 'closed') {
+ return Promise.reject(new TypeError('The stream is closing or closed and cannot be written to'));
+ }
+
+ if (state === 'erroring') {
+ return Promise.reject(stream._storedError);
+ }
+
+ assert(state === 'writable');
+ var promise = WritableStreamAddWriteRequest(stream);
+ WritableStreamDefaultControllerWrite(controller, chunk, chunkSize);
+ return promise;
+ }
+
+ var WritableStreamDefaultController = function () {
+ function WritableStreamDefaultController(stream, underlyingSink, size, highWaterMark) {
+ _classCallCheck(this, WritableStreamDefaultController);
+
+ if (IsWritableStream(stream) === false) {
+ throw new TypeError('WritableStreamDefaultController can only be constructed with a WritableStream instance');
+ }
+
+ if (stream._writableStreamController !== undefined) {
+ throw new TypeError('WritableStreamDefaultController instances can only be created by the WritableStream constructor');
+ }
+
+ this._controlledWritableStream = stream;
+ this._underlyingSink = underlyingSink;
+ this._queue = undefined;
+ this._queueTotalSize = undefined;
+ ResetQueue(this);
+ this._started = false;
+ var normalizedStrategy = ValidateAndNormalizeQueuingStrategy(size, highWaterMark);
+ this._strategySize = normalizedStrategy.size;
+ this._strategyHWM = normalizedStrategy.highWaterMark;
+ var backpressure = WritableStreamDefaultControllerGetBackpressure(this);
+ WritableStreamUpdateBackpressure(stream, backpressure);
+ }
+
+ _createClass(WritableStreamDefaultController, [{
+ key: 'error',
+ value: function error(e) {
+ if (IsWritableStreamDefaultController(this) === false) {
+ throw new TypeError('WritableStreamDefaultController.prototype.error can only be used on a WritableStreamDefaultController');
+ }
+
+ var state = this._controlledWritableStream._state;
+
+ if (state !== 'writable') {
+ return;
+ }
+
+ WritableStreamDefaultControllerError(this, e);
+ }
+ }, {
+ key: '__abortSteps',
+ value: function __abortSteps(reason) {
+ return PromiseInvokeOrNoop(this._underlyingSink, 'abort', [reason]);
+ }
+ }, {
+ key: '__errorSteps',
+ value: function __errorSteps() {
+ ResetQueue(this);
+ }
+ }, {
+ key: '__startSteps',
+ value: function __startSteps() {
+ var _this = this;
+
+ var startResult = InvokeOrNoop(this._underlyingSink, 'start', [this]);
+ var stream = this._controlledWritableStream;
+ Promise.resolve(startResult).then(function () {
+ assert(stream._state === 'writable' || stream._state === 'erroring');
+ _this._started = true;
+ WritableStreamDefaultControllerAdvanceQueueIfNeeded(_this);
+ }, function (r) {
+ assert(stream._state === 'writable' || stream._state === 'erroring');
+ _this._started = true;
+ WritableStreamDealWithRejection(stream, r);
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+ }]);
+
+ return WritableStreamDefaultController;
+ }();
+
+ function WritableStreamDefaultControllerClose(controller) {
+ EnqueueValueWithSize(controller, 'close', 0);
+ WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+ }
+
+ function WritableStreamDefaultControllerGetChunkSize(controller, chunk) {
+ var strategySize = controller._strategySize;
+
+ if (strategySize === undefined) {
+ return 1;
+ }
+
+ try {
+ return strategySize(chunk);
+ } catch (chunkSizeE) {
+ WritableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE);
+ return 1;
+ }
+ }
+
+ function WritableStreamDefaultControllerGetDesiredSize(controller) {
+ return controller._strategyHWM - controller._queueTotalSize;
+ }
+
+ function WritableStreamDefaultControllerWrite(controller, chunk, chunkSize) {
+ var writeRecord = {
+ chunk: chunk
+ };
+
+ try {
+ EnqueueValueWithSize(controller, writeRecord, chunkSize);
+ } catch (enqueueE) {
+ WritableStreamDefaultControllerErrorIfNeeded(controller, enqueueE);
+ return;
+ }
+
+ var stream = controller._controlledWritableStream;
+
+ if (WritableStreamCloseQueuedOrInFlight(stream) === false && stream._state === 'writable') {
+ var backpressure = WritableStreamDefaultControllerGetBackpressure(controller);
+ WritableStreamUpdateBackpressure(stream, backpressure);
+ }
+
+ WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+ }
+
+ function IsWritableStreamDefaultController(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_underlyingSink')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller) {
+ var stream = controller._controlledWritableStream;
+
+ if (controller._started === false) {
+ return;
+ }
+
+ if (stream._inFlightWriteRequest !== undefined) {
+ return;
+ }
+
+ var state = stream._state;
+
+ if (state === 'closed' || state === 'errored') {
+ return;
+ }
+
+ if (state === 'erroring') {
+ WritableStreamFinishErroring(stream);
+ return;
+ }
+
+ if (controller._queue.length === 0) {
+ return;
+ }
+
+ var writeRecord = PeekQueueValue(controller);
+
+ if (writeRecord === 'close') {
+ WritableStreamDefaultControllerProcessClose(controller);
+ } else {
+ WritableStreamDefaultControllerProcessWrite(controller, writeRecord.chunk);
+ }
+ }
+
+ function WritableStreamDefaultControllerErrorIfNeeded(controller, error) {
+ if (controller._controlledWritableStream._state === 'writable') {
+ WritableStreamDefaultControllerError(controller, error);
+ }
+ }
+
+ function WritableStreamDefaultControllerProcessClose(controller) {
+ var stream = controller._controlledWritableStream;
+ WritableStreamMarkCloseRequestInFlight(stream);
+ DequeueValue(controller);
+ assert(controller._queue.length === 0, 'queue must be empty once the final write record is dequeued');
+ var sinkClosePromise = PromiseInvokeOrNoop(controller._underlyingSink, 'close', []);
+ sinkClosePromise.then(function () {
+ WritableStreamFinishInFlightClose(stream);
+ }, function (reason) {
+ WritableStreamFinishInFlightCloseWithError(stream, reason);
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+
+ function WritableStreamDefaultControllerProcessWrite(controller, chunk) {
+ var stream = controller._controlledWritableStream;
+ WritableStreamMarkFirstWriteRequestInFlight(stream);
+ var sinkWritePromise = PromiseInvokeOrNoop(controller._underlyingSink, 'write', [chunk, controller]);
+ sinkWritePromise.then(function () {
+ WritableStreamFinishInFlightWrite(stream);
+ var state = stream._state;
+ assert(state === 'writable' || state === 'erroring');
+ DequeueValue(controller);
+
+ if (WritableStreamCloseQueuedOrInFlight(stream) === false && state === 'writable') {
+ var backpressure = WritableStreamDefaultControllerGetBackpressure(controller);
+ WritableStreamUpdateBackpressure(stream, backpressure);
+ }
+
+ WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+ }, function (reason) {
+ WritableStreamFinishInFlightWriteWithError(stream, reason);
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+
+ function WritableStreamDefaultControllerGetBackpressure(controller) {
+ var desiredSize = WritableStreamDefaultControllerGetDesiredSize(controller);
+ return desiredSize <= 0;
+ }
+
+ function WritableStreamDefaultControllerError(controller, error) {
+ var stream = controller._controlledWritableStream;
+ assert(stream._state === 'writable');
+ WritableStreamStartErroring(stream, error);
+ }
+
+ function streamBrandCheckException(name) {
+ return new TypeError('WritableStream.prototype.' + name + ' can only be used on a WritableStream');
+ }
+
+ function defaultWriterBrandCheckException(name) {
+ return new TypeError('WritableStreamDefaultWriter.prototype.' + name + ' can only be used on a WritableStreamDefaultWriter');
+ }
+
+ function defaultWriterLockException(name) {
+ return new TypeError('Cannot ' + name + ' a stream using a released writer');
+ }
+
+ function defaultWriterClosedPromiseInitialize(writer) {
+ writer._closedPromise = new Promise(function (resolve, reject) {
+ writer._closedPromise_resolve = resolve;
+ writer._closedPromise_reject = reject;
+ writer._closedPromiseState = 'pending';
+ });
+ }
+
+ function defaultWriterClosedPromiseInitializeAsRejected(writer, reason) {
+ writer._closedPromise = Promise.reject(reason);
+ writer._closedPromise_resolve = undefined;
+ writer._closedPromise_reject = undefined;
+ writer._closedPromiseState = 'rejected';
+ }
+
+ function defaultWriterClosedPromiseInitializeAsResolved(writer) {
+ writer._closedPromise = Promise.resolve(undefined);
+ writer._closedPromise_resolve = undefined;
+ writer._closedPromise_reject = undefined;
+ writer._closedPromiseState = 'resolved';
+ }
+
+ function defaultWriterClosedPromiseReject(writer, reason) {
+ assert(writer._closedPromise_resolve !== undefined, 'writer._closedPromise_resolve !== undefined');
+ assert(writer._closedPromise_reject !== undefined, 'writer._closedPromise_reject !== undefined');
+ assert(writer._closedPromiseState === 'pending', 'writer._closedPromiseState is pending');
+
+ writer._closedPromise_reject(reason);
+
+ writer._closedPromise_resolve = undefined;
+ writer._closedPromise_reject = undefined;
+ writer._closedPromiseState = 'rejected';
+ }
+
+ function defaultWriterClosedPromiseResetToRejected(writer, reason) {
+ assert(writer._closedPromise_resolve === undefined, 'writer._closedPromise_resolve === undefined');
+ assert(writer._closedPromise_reject === undefined, 'writer._closedPromise_reject === undefined');
+ assert(writer._closedPromiseState !== 'pending', 'writer._closedPromiseState is not pending');
+ writer._closedPromise = Promise.reject(reason);
+ writer._closedPromiseState = 'rejected';
+ }
+
+ function defaultWriterClosedPromiseResolve(writer) {
+ assert(writer._closedPromise_resolve !== undefined, 'writer._closedPromise_resolve !== undefined');
+ assert(writer._closedPromise_reject !== undefined, 'writer._closedPromise_reject !== undefined');
+ assert(writer._closedPromiseState === 'pending', 'writer._closedPromiseState is pending');
+
+ writer._closedPromise_resolve(undefined);
+
+ writer._closedPromise_resolve = undefined;
+ writer._closedPromise_reject = undefined;
+ writer._closedPromiseState = 'resolved';
+ }
+
+ function defaultWriterReadyPromiseInitialize(writer) {
+ writer._readyPromise = new Promise(function (resolve, reject) {
+ writer._readyPromise_resolve = resolve;
+ writer._readyPromise_reject = reject;
+ });
+ writer._readyPromiseState = 'pending';
+ }
+
+ function defaultWriterReadyPromiseInitializeAsRejected(writer, reason) {
+ writer._readyPromise = Promise.reject(reason);
+ writer._readyPromise_resolve = undefined;
+ writer._readyPromise_reject = undefined;
+ writer._readyPromiseState = 'rejected';
+ }
+
+ function defaultWriterReadyPromiseInitializeAsResolved(writer) {
+ writer._readyPromise = Promise.resolve(undefined);
+ writer._readyPromise_resolve = undefined;
+ writer._readyPromise_reject = undefined;
+ writer._readyPromiseState = 'fulfilled';
+ }
+
+ function defaultWriterReadyPromiseReject(writer, reason) {
+ assert(writer._readyPromise_resolve !== undefined, 'writer._readyPromise_resolve !== undefined');
+ assert(writer._readyPromise_reject !== undefined, 'writer._readyPromise_reject !== undefined');
+
+ writer._readyPromise_reject(reason);
+
+ writer._readyPromise_resolve = undefined;
+ writer._readyPromise_reject = undefined;
+ writer._readyPromiseState = 'rejected';
+ }
+
+ function defaultWriterReadyPromiseReset(writer) {
+ assert(writer._readyPromise_resolve === undefined, 'writer._readyPromise_resolve === undefined');
+ assert(writer._readyPromise_reject === undefined, 'writer._readyPromise_reject === undefined');
+ writer._readyPromise = new Promise(function (resolve, reject) {
+ writer._readyPromise_resolve = resolve;
+ writer._readyPromise_reject = reject;
+ });
+ writer._readyPromiseState = 'pending';
+ }
+
+ function defaultWriterReadyPromiseResetToRejected(writer, reason) {
+ assert(writer._readyPromise_resolve === undefined, 'writer._readyPromise_resolve === undefined');
+ assert(writer._readyPromise_reject === undefined, 'writer._readyPromise_reject === undefined');
+ writer._readyPromise = Promise.reject(reason);
+ writer._readyPromiseState = 'rejected';
+ }
+
+ function defaultWriterReadyPromiseResolve(writer) {
+ assert(writer._readyPromise_resolve !== undefined, 'writer._readyPromise_resolve !== undefined');
+ assert(writer._readyPromise_reject !== undefined, 'writer._readyPromise_reject !== undefined');
+
+ writer._readyPromise_resolve(undefined);
+
+ writer._readyPromise_resolve = undefined;
+ writer._readyPromise_reject = undefined;
+ writer._readyPromiseState = 'fulfilled';
+ }
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var _require = __w_pdfjs_require__(0),
+ IsFiniteNonNegativeNumber = _require.IsFiniteNonNegativeNumber;
+
+ var _require2 = __w_pdfjs_require__(1),
+ assert = _require2.assert;
+
+ exports.DequeueValue = function (container) {
+ assert('_queue' in container && '_queueTotalSize' in container, 'Spec-level failure: DequeueValue should only be used on containers with [[queue]] and [[queueTotalSize]].');
+ assert(container._queue.length > 0, 'Spec-level failure: should never dequeue from an empty queue.');
+
+ var pair = container._queue.shift();
+
+ container._queueTotalSize -= pair.size;
+
+ if (container._queueTotalSize < 0) {
+ container._queueTotalSize = 0;
+ }
+
+ return pair.value;
+ };
+
+ exports.EnqueueValueWithSize = function (container, value, size) {
+ assert('_queue' in container && '_queueTotalSize' in container, 'Spec-level failure: EnqueueValueWithSize should only be used on containers with [[queue]] and ' + '[[queueTotalSize]].');
+ size = Number(size);
+
+ if (!IsFiniteNonNegativeNumber(size)) {
+ throw new RangeError('Size must be a finite, non-NaN, non-negative number.');
+ }
+
+ container._queue.push({
+ value: value,
+ size: size
+ });
+
+ container._queueTotalSize += size;
+ };
+
+ exports.PeekQueueValue = function (container) {
+ assert('_queue' in container && '_queueTotalSize' in container, 'Spec-level failure: PeekQueueValue should only be used on containers with [[queue]] and [[queueTotalSize]].');
+ assert(container._queue.length > 0, 'Spec-level failure: should never peek at an empty queue.');
+ var pair = container._queue[0];
+ return pair.value;
+ };
+
+ exports.ResetQueue = function (container) {
+ assert('_queue' in container && '_queueTotalSize' in container, 'Spec-level failure: ResetQueue should only be used on containers with [[queue]] and [[queueTotalSize]].');
+ container._queue = [];
+ container._queueTotalSize = 0;
+ };
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var _createClass = function () {
+ function defineProperties(target, props) {
+ for (var i = 0; i < props.length; i++) {
+ var descriptor = props[i];
+ descriptor.enumerable = descriptor.enumerable || false;
+ descriptor.configurable = true;
+ if ("value" in descriptor) descriptor.writable = true;
+ Object.defineProperty(target, descriptor.key, descriptor);
+ }
+ }
+
+ return function (Constructor, protoProps, staticProps) {
+ if (protoProps) defineProperties(Constructor.prototype, protoProps);
+ if (staticProps) defineProperties(Constructor, staticProps);
+ return Constructor;
+ };
+ }();
+
+ function _classCallCheck(instance, Constructor) {
+ if (!(instance instanceof Constructor)) {
+ throw new TypeError("Cannot call a class as a function");
+ }
+ }
+
+ var _require = __w_pdfjs_require__(0),
+ ArrayBufferCopy = _require.ArrayBufferCopy,
+ CreateIterResultObject = _require.CreateIterResultObject,
+ IsFiniteNonNegativeNumber = _require.IsFiniteNonNegativeNumber,
+ InvokeOrNoop = _require.InvokeOrNoop,
+ PromiseInvokeOrNoop = _require.PromiseInvokeOrNoop,
+ TransferArrayBuffer = _require.TransferArrayBuffer,
+ ValidateAndNormalizeQueuingStrategy = _require.ValidateAndNormalizeQueuingStrategy,
+ ValidateAndNormalizeHighWaterMark = _require.ValidateAndNormalizeHighWaterMark;
+
+ var _require2 = __w_pdfjs_require__(0),
+ createArrayFromList = _require2.createArrayFromList,
+ createDataProperty = _require2.createDataProperty,
+ typeIsObject = _require2.typeIsObject;
+
+ var _require3 = __w_pdfjs_require__(1),
+ assert = _require3.assert,
+ rethrowAssertionErrorRejection = _require3.rethrowAssertionErrorRejection;
+
+ var _require4 = __w_pdfjs_require__(3),
+ DequeueValue = _require4.DequeueValue,
+ EnqueueValueWithSize = _require4.EnqueueValueWithSize,
+ ResetQueue = _require4.ResetQueue;
+
+ var _require5 = __w_pdfjs_require__(2),
+ AcquireWritableStreamDefaultWriter = _require5.AcquireWritableStreamDefaultWriter,
+ IsWritableStream = _require5.IsWritableStream,
+ IsWritableStreamLocked = _require5.IsWritableStreamLocked,
+ WritableStreamAbort = _require5.WritableStreamAbort,
+ WritableStreamDefaultWriterCloseWithErrorPropagation = _require5.WritableStreamDefaultWriterCloseWithErrorPropagation,
+ WritableStreamDefaultWriterRelease = _require5.WritableStreamDefaultWriterRelease,
+ WritableStreamDefaultWriterWrite = _require5.WritableStreamDefaultWriterWrite,
+ WritableStreamCloseQueuedOrInFlight = _require5.WritableStreamCloseQueuedOrInFlight;
+
+ var ReadableStream = function () {
+ function ReadableStream() {
+ var underlyingSource = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ size = _ref.size,
+ highWaterMark = _ref.highWaterMark;
+
+ _classCallCheck(this, ReadableStream);
+
+ this._state = 'readable';
+ this._reader = undefined;
+ this._storedError = undefined;
+ this._disturbed = false;
+ this._readableStreamController = undefined;
+ var type = underlyingSource.type;
+ var typeString = String(type);
+
+ if (typeString === 'bytes') {
+ if (highWaterMark === undefined) {
+ highWaterMark = 0;
+ }
+
+ this._readableStreamController = new ReadableByteStreamController(this, underlyingSource, highWaterMark);
+ } else if (type === undefined) {
+ if (highWaterMark === undefined) {
+ highWaterMark = 1;
+ }
+
+ this._readableStreamController = new ReadableStreamDefaultController(this, underlyingSource, size, highWaterMark);
+ } else {
+ throw new RangeError('Invalid type is specified');
+ }
+ }
+
+ _createClass(ReadableStream, [{
+ key: 'cancel',
+ value: function cancel(reason) {
+ if (IsReadableStream(this) === false) {
+ return Promise.reject(streamBrandCheckException('cancel'));
+ }
+
+ if (IsReadableStreamLocked(this) === true) {
+ return Promise.reject(new TypeError('Cannot cancel a stream that already has a reader'));
+ }
+
+ return ReadableStreamCancel(this, reason);
+ }
+ }, {
+ key: 'getReader',
+ value: function getReader() {
+ var _ref2 = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
+ mode = _ref2.mode;
+
+ if (IsReadableStream(this) === false) {
+ throw streamBrandCheckException('getReader');
+ }
+
+ if (mode === undefined) {
+ return AcquireReadableStreamDefaultReader(this);
+ }
+
+ mode = String(mode);
+
+ if (mode === 'byob') {
+ return AcquireReadableStreamBYOBReader(this);
+ }
+
+ throw new RangeError('Invalid mode is specified');
+ }
+ }, {
+ key: 'pipeThrough',
+ value: function pipeThrough(_ref3, options) {
+ var writable = _ref3.writable,
+ readable = _ref3.readable;
+ var promise = this.pipeTo(writable, options);
+ ifIsObjectAndHasAPromiseIsHandledInternalSlotSetPromiseIsHandledToTrue(promise);
+ return readable;
+ }
+ }, {
+ key: 'pipeTo',
+ value: function pipeTo(dest) {
+ var _this = this;
+
+ var _ref4 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ preventClose = _ref4.preventClose,
+ preventAbort = _ref4.preventAbort,
+ preventCancel = _ref4.preventCancel;
+
+ if (IsReadableStream(this) === false) {
+ return Promise.reject(streamBrandCheckException('pipeTo'));
+ }
+
+ if (IsWritableStream(dest) === false) {
+ return Promise.reject(new TypeError('ReadableStream.prototype.pipeTo\'s first argument must be a WritableStream'));
+ }
+
+ preventClose = Boolean(preventClose);
+ preventAbort = Boolean(preventAbort);
+ preventCancel = Boolean(preventCancel);
+
+ if (IsReadableStreamLocked(this) === true) {
+ return Promise.reject(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream'));
+ }
+
+ if (IsWritableStreamLocked(dest) === true) {
+ return Promise.reject(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream'));
+ }
+
+ var reader = AcquireReadableStreamDefaultReader(this);
+ var writer = AcquireWritableStreamDefaultWriter(dest);
+ var shuttingDown = false;
+ var currentWrite = Promise.resolve();
+ return new Promise(function (resolve, reject) {
+ function pipeLoop() {
+ currentWrite = Promise.resolve();
+
+ if (shuttingDown === true) {
+ return Promise.resolve();
+ }
+
+ return writer._readyPromise.then(function () {
+ return ReadableStreamDefaultReaderRead(reader).then(function (_ref5) {
+ var value = _ref5.value,
+ done = _ref5.done;
+
+ if (done === true) {
+ return;
+ }
+
+ currentWrite = WritableStreamDefaultWriterWrite(writer, value)["catch"](function () {});
+ });
+ }).then(pipeLoop);
+ }
+
+ isOrBecomesErrored(_this, reader._closedPromise, function (storedError) {
+ if (preventAbort === false) {
+ shutdownWithAction(function () {
+ return WritableStreamAbort(dest, storedError);
+ }, true, storedError);
+ } else {
+ shutdown(true, storedError);
+ }
+ });
+ isOrBecomesErrored(dest, writer._closedPromise, function (storedError) {
+ if (preventCancel === false) {
+ shutdownWithAction(function () {
+ return ReadableStreamCancel(_this, storedError);
+ }, true, storedError);
+ } else {
+ shutdown(true, storedError);
+ }
+ });
+ isOrBecomesClosed(_this, reader._closedPromise, function () {
+ if (preventClose === false) {
+ shutdownWithAction(function () {
+ return WritableStreamDefaultWriterCloseWithErrorPropagation(writer);
+ });
+ } else {
+ shutdown();
+ }
+ });
+
+ if (WritableStreamCloseQueuedOrInFlight(dest) === true || dest._state === 'closed') {
+ var destClosed = new TypeError('the destination writable stream closed before all data could be piped to it');
+
+ if (preventCancel === false) {
+ shutdownWithAction(function () {
+ return ReadableStreamCancel(_this, destClosed);
+ }, true, destClosed);
+ } else {
+ shutdown(true, destClosed);
+ }
+ }
+
+ pipeLoop()["catch"](function (err) {
+ currentWrite = Promise.resolve();
+ rethrowAssertionErrorRejection(err);
+ });
+
+ function waitForWritesToFinish() {
+ var oldCurrentWrite = currentWrite;
+ return currentWrite.then(function () {
+ return oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : undefined;
+ });
+ }
+
+ function isOrBecomesErrored(stream, promise, action) {
+ if (stream._state === 'errored') {
+ action(stream._storedError);
+ } else {
+ promise["catch"](action)["catch"](rethrowAssertionErrorRejection);
+ }
+ }
+
+ function isOrBecomesClosed(stream, promise, action) {
+ if (stream._state === 'closed') {
+ action();
+ } else {
+ promise.then(action)["catch"](rethrowAssertionErrorRejection);
+ }
+ }
+
+ function shutdownWithAction(action, originalIsError, originalError) {
+ if (shuttingDown === true) {
+ return;
+ }
+
+ shuttingDown = true;
+
+ if (dest._state === 'writable' && WritableStreamCloseQueuedOrInFlight(dest) === false) {
+ waitForWritesToFinish().then(doTheRest);
+ } else {
+ doTheRest();
+ }
+
+ function doTheRest() {
+ action().then(function () {
+ return finalize(originalIsError, originalError);
+ }, function (newError) {
+ return finalize(true, newError);
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+ }
+
+ function shutdown(isError, error) {
+ if (shuttingDown === true) {
+ return;
+ }
+
+ shuttingDown = true;
+
+ if (dest._state === 'writable' && WritableStreamCloseQueuedOrInFlight(dest) === false) {
+ waitForWritesToFinish().then(function () {
+ return finalize(isError, error);
+ })["catch"](rethrowAssertionErrorRejection);
+ } else {
+ finalize(isError, error);
+ }
+ }
+
+ function finalize(isError, error) {
+ WritableStreamDefaultWriterRelease(writer);
+ ReadableStreamReaderGenericRelease(reader);
+
+ if (isError) {
+ reject(error);
+ } else {
+ resolve(undefined);
+ }
+ }
+ });
+ }
+ }, {
+ key: 'tee',
+ value: function tee() {
+ if (IsReadableStream(this) === false) {
+ throw streamBrandCheckException('tee');
+ }
+
+ var branches = ReadableStreamTee(this, false);
+ return createArrayFromList(branches);
+ }
+ }, {
+ key: 'locked',
+ get: function get() {
+ if (IsReadableStream(this) === false) {
+ throw streamBrandCheckException('locked');
+ }
+
+ return IsReadableStreamLocked(this);
+ }
+ }]);
+
+ return ReadableStream;
+ }();
+
+ module.exports = {
+ ReadableStream: ReadableStream,
+ IsReadableStreamDisturbed: IsReadableStreamDisturbed,
+ ReadableStreamDefaultControllerClose: ReadableStreamDefaultControllerClose,
+ ReadableStreamDefaultControllerEnqueue: ReadableStreamDefaultControllerEnqueue,
+ ReadableStreamDefaultControllerError: ReadableStreamDefaultControllerError,
+ ReadableStreamDefaultControllerGetDesiredSize: ReadableStreamDefaultControllerGetDesiredSize
+ };
+
+ function AcquireReadableStreamBYOBReader(stream) {
+ return new ReadableStreamBYOBReader(stream);
+ }
+
+ function AcquireReadableStreamDefaultReader(stream) {
+ return new ReadableStreamDefaultReader(stream);
+ }
+
+ function IsReadableStream(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_readableStreamController')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function IsReadableStreamDisturbed(stream) {
+ assert(IsReadableStream(stream) === true, 'IsReadableStreamDisturbed should only be used on known readable streams');
+ return stream._disturbed;
+ }
+
+ function IsReadableStreamLocked(stream) {
+ assert(IsReadableStream(stream) === true, 'IsReadableStreamLocked should only be used on known readable streams');
+
+ if (stream._reader === undefined) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function ReadableStreamTee(stream, cloneForBranch2) {
+ assert(IsReadableStream(stream) === true);
+ assert(typeof cloneForBranch2 === 'boolean');
+ var reader = AcquireReadableStreamDefaultReader(stream);
+ var teeState = {
+ closedOrErrored: false,
+ canceled1: false,
+ canceled2: false,
+ reason1: undefined,
+ reason2: undefined
+ };
+ teeState.promise = new Promise(function (resolve) {
+ teeState._resolve = resolve;
+ });
+ var pull = create_ReadableStreamTeePullFunction();
+ pull._reader = reader;
+ pull._teeState = teeState;
+ pull._cloneForBranch2 = cloneForBranch2;
+ var cancel1 = create_ReadableStreamTeeBranch1CancelFunction();
+ cancel1._stream = stream;
+ cancel1._teeState = teeState;
+ var cancel2 = create_ReadableStreamTeeBranch2CancelFunction();
+ cancel2._stream = stream;
+ cancel2._teeState = teeState;
+ var underlyingSource1 = Object.create(Object.prototype);
+ createDataProperty(underlyingSource1, 'pull', pull);
+ createDataProperty(underlyingSource1, 'cancel', cancel1);
+ var branch1Stream = new ReadableStream(underlyingSource1);
+ var underlyingSource2 = Object.create(Object.prototype);
+ createDataProperty(underlyingSource2, 'pull', pull);
+ createDataProperty(underlyingSource2, 'cancel', cancel2);
+ var branch2Stream = new ReadableStream(underlyingSource2);
+ pull._branch1 = branch1Stream._readableStreamController;
+ pull._branch2 = branch2Stream._readableStreamController;
+
+ reader._closedPromise["catch"](function (r) {
+ if (teeState.closedOrErrored === true) {
+ return;
+ }
+
+ ReadableStreamDefaultControllerError(pull._branch1, r);
+ ReadableStreamDefaultControllerError(pull._branch2, r);
+ teeState.closedOrErrored = true;
+ });
+
+ return [branch1Stream, branch2Stream];
+ }
+
+ function create_ReadableStreamTeePullFunction() {
+ function f() {
+ var reader = f._reader,
+ branch1 = f._branch1,
+ branch2 = f._branch2,
+ teeState = f._teeState;
+ return ReadableStreamDefaultReaderRead(reader).then(function (result) {
+ assert(typeIsObject(result));
+ var value = result.value;
+ var done = result.done;
+ assert(typeof done === 'boolean');
+
+ if (done === true && teeState.closedOrErrored === false) {
+ if (teeState.canceled1 === false) {
+ ReadableStreamDefaultControllerClose(branch1);
+ }
+
+ if (teeState.canceled2 === false) {
+ ReadableStreamDefaultControllerClose(branch2);
+ }
+
+ teeState.closedOrErrored = true;
+ }
+
+ if (teeState.closedOrErrored === true) {
+ return;
+ }
+
+ var value1 = value;
+ var value2 = value;
+
+ if (teeState.canceled1 === false) {
+ ReadableStreamDefaultControllerEnqueue(branch1, value1);
+ }
+
+ if (teeState.canceled2 === false) {
+ ReadableStreamDefaultControllerEnqueue(branch2, value2);
+ }
+ });
+ }
+
+ return f;
+ }
+
+ function create_ReadableStreamTeeBranch1CancelFunction() {
+ function f(reason) {
+ var stream = f._stream,
+ teeState = f._teeState;
+ teeState.canceled1 = true;
+ teeState.reason1 = reason;
+
+ if (teeState.canceled2 === true) {
+ var compositeReason = createArrayFromList([teeState.reason1, teeState.reason2]);
+ var cancelResult = ReadableStreamCancel(stream, compositeReason);
+
+ teeState._resolve(cancelResult);
+ }
+
+ return teeState.promise;
+ }
+
+ return f;
+ }
+
+ function create_ReadableStreamTeeBranch2CancelFunction() {
+ function f(reason) {
+ var stream = f._stream,
+ teeState = f._teeState;
+ teeState.canceled2 = true;
+ teeState.reason2 = reason;
+
+ if (teeState.canceled1 === true) {
+ var compositeReason = createArrayFromList([teeState.reason1, teeState.reason2]);
+ var cancelResult = ReadableStreamCancel(stream, compositeReason);
+
+ teeState._resolve(cancelResult);
+ }
+
+ return teeState.promise;
+ }
+
+ return f;
+ }
+
+ function ReadableStreamAddReadIntoRequest(stream) {
+ assert(IsReadableStreamBYOBReader(stream._reader) === true);
+ assert(stream._state === 'readable' || stream._state === 'closed');
+ var promise = new Promise(function (resolve, reject) {
+ var readIntoRequest = {
+ _resolve: resolve,
+ _reject: reject
+ };
+
+ stream._reader._readIntoRequests.push(readIntoRequest);
+ });
+ return promise;
+ }
+
+ function ReadableStreamAddReadRequest(stream) {
+ assert(IsReadableStreamDefaultReader(stream._reader) === true);
+ assert(stream._state === 'readable');
+ var promise = new Promise(function (resolve, reject) {
+ var readRequest = {
+ _resolve: resolve,
+ _reject: reject
+ };
+
+ stream._reader._readRequests.push(readRequest);
+ });
+ return promise;
+ }
+
+ function ReadableStreamCancel(stream, reason) {
+ stream._disturbed = true;
+
+ if (stream._state === 'closed') {
+ return Promise.resolve(undefined);
+ }
+
+ if (stream._state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ ReadableStreamClose(stream);
+
+ var sourceCancelPromise = stream._readableStreamController.__cancelSteps(reason);
+
+ return sourceCancelPromise.then(function () {
+ return undefined;
+ });
+ }
+
+ function ReadableStreamClose(stream) {
+ assert(stream._state === 'readable');
+ stream._state = 'closed';
+ var reader = stream._reader;
+
+ if (reader === undefined) {
+ return undefined;
+ }
+
+ if (IsReadableStreamDefaultReader(reader) === true) {
+ for (var i = 0; i < reader._readRequests.length; i++) {
+ var _resolve = reader._readRequests[i]._resolve;
+
+ _resolve(CreateIterResultObject(undefined, true));
+ }
+
+ reader._readRequests = [];
+ }
+
+ defaultReaderClosedPromiseResolve(reader);
+ return undefined;
+ }
+
+ function ReadableStreamError(stream, e) {
+ assert(IsReadableStream(stream) === true, 'stream must be ReadableStream');
+ assert(stream._state === 'readable', 'state must be readable');
+ stream._state = 'errored';
+ stream._storedError = e;
+ var reader = stream._reader;
+
+ if (reader === undefined) {
+ return undefined;
+ }
+
+ if (IsReadableStreamDefaultReader(reader) === true) {
+ for (var i = 0; i < reader._readRequests.length; i++) {
+ var readRequest = reader._readRequests[i];
+
+ readRequest._reject(e);
+ }
+
+ reader._readRequests = [];
+ } else {
+ assert(IsReadableStreamBYOBReader(reader), 'reader must be ReadableStreamBYOBReader');
+
+ for (var _i = 0; _i < reader._readIntoRequests.length; _i++) {
+ var readIntoRequest = reader._readIntoRequests[_i];
+
+ readIntoRequest._reject(e);
+ }
+
+ reader._readIntoRequests = [];
+ }
+
+ defaultReaderClosedPromiseReject(reader, e);
+
+ reader._closedPromise["catch"](function () {});
+ }
+
+ function ReadableStreamFulfillReadIntoRequest(stream, chunk, done) {
+ var reader = stream._reader;
+ assert(reader._readIntoRequests.length > 0);
+
+ var readIntoRequest = reader._readIntoRequests.shift();
+
+ readIntoRequest._resolve(CreateIterResultObject(chunk, done));
+ }
+
+ function ReadableStreamFulfillReadRequest(stream, chunk, done) {
+ var reader = stream._reader;
+ assert(reader._readRequests.length > 0);
+
+ var readRequest = reader._readRequests.shift();
+
+ readRequest._resolve(CreateIterResultObject(chunk, done));
+ }
+
+ function ReadableStreamGetNumReadIntoRequests(stream) {
+ return stream._reader._readIntoRequests.length;
+ }
+
+ function ReadableStreamGetNumReadRequests(stream) {
+ return stream._reader._readRequests.length;
+ }
+
+ function ReadableStreamHasBYOBReader(stream) {
+ var reader = stream._reader;
+
+ if (reader === undefined) {
+ return false;
+ }
+
+ if (IsReadableStreamBYOBReader(reader) === false) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function ReadableStreamHasDefaultReader(stream) {
+ var reader = stream._reader;
+
+ if (reader === undefined) {
+ return false;
+ }
+
+ if (IsReadableStreamDefaultReader(reader) === false) {
+ return false;
+ }
+
+ return true;
+ }
+
+ var ReadableStreamDefaultReader = function () {
+ function ReadableStreamDefaultReader(stream) {
+ _classCallCheck(this, ReadableStreamDefaultReader);
+
+ if (IsReadableStream(stream) === false) {
+ throw new TypeError('ReadableStreamDefaultReader can only be constructed with a ReadableStream instance');
+ }
+
+ if (IsReadableStreamLocked(stream) === true) {
+ throw new TypeError('This stream has already been locked for exclusive reading by another reader');
+ }
+
+ ReadableStreamReaderGenericInitialize(this, stream);
+ this._readRequests = [];
+ }
+
+ _createClass(ReadableStreamDefaultReader, [{
+ key: 'cancel',
+ value: function cancel(reason) {
+ if (IsReadableStreamDefaultReader(this) === false) {
+ return Promise.reject(defaultReaderBrandCheckException('cancel'));
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return Promise.reject(readerLockException('cancel'));
+ }
+
+ return ReadableStreamReaderGenericCancel(this, reason);
+ }
+ }, {
+ key: 'read',
+ value: function read() {
+ if (IsReadableStreamDefaultReader(this) === false) {
+ return Promise.reject(defaultReaderBrandCheckException('read'));
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return Promise.reject(readerLockException('read from'));
+ }
+
+ return ReadableStreamDefaultReaderRead(this);
+ }
+ }, {
+ key: 'releaseLock',
+ value: function releaseLock() {
+ if (IsReadableStreamDefaultReader(this) === false) {
+ throw defaultReaderBrandCheckException('releaseLock');
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return;
+ }
+
+ if (this._readRequests.length > 0) {
+ throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');
+ }
+
+ ReadableStreamReaderGenericRelease(this);
+ }
+ }, {
+ key: 'closed',
+ get: function get() {
+ if (IsReadableStreamDefaultReader(this) === false) {
+ return Promise.reject(defaultReaderBrandCheckException('closed'));
+ }
+
+ return this._closedPromise;
+ }
+ }]);
+
+ return ReadableStreamDefaultReader;
+ }();
+
+ var ReadableStreamBYOBReader = function () {
+ function ReadableStreamBYOBReader(stream) {
+ _classCallCheck(this, ReadableStreamBYOBReader);
+
+ if (!IsReadableStream(stream)) {
+ throw new TypeError('ReadableStreamBYOBReader can only be constructed with a ReadableStream instance given a ' + 'byte source');
+ }
+
+ if (IsReadableByteStreamController(stream._readableStreamController) === false) {
+ throw new TypeError('Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte ' + 'source');
+ }
+
+ if (IsReadableStreamLocked(stream)) {
+ throw new TypeError('This stream has already been locked for exclusive reading by another reader');
+ }
+
+ ReadableStreamReaderGenericInitialize(this, stream);
+ this._readIntoRequests = [];
+ }
+
+ _createClass(ReadableStreamBYOBReader, [{
+ key: 'cancel',
+ value: function cancel(reason) {
+ if (!IsReadableStreamBYOBReader(this)) {
+ return Promise.reject(byobReaderBrandCheckException('cancel'));
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return Promise.reject(readerLockException('cancel'));
+ }
+
+ return ReadableStreamReaderGenericCancel(this, reason);
+ }
+ }, {
+ key: 'read',
+ value: function read(view) {
+ if (!IsReadableStreamBYOBReader(this)) {
+ return Promise.reject(byobReaderBrandCheckException('read'));
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return Promise.reject(readerLockException('read from'));
+ }
+
+ if (!ArrayBuffer.isView(view)) {
+ return Promise.reject(new TypeError('view must be an array buffer view'));
+ }
+
+ if (view.byteLength === 0) {
+ return Promise.reject(new TypeError('view must have non-zero byteLength'));
+ }
+
+ return ReadableStreamBYOBReaderRead(this, view);
+ }
+ }, {
+ key: 'releaseLock',
+ value: function releaseLock() {
+ if (!IsReadableStreamBYOBReader(this)) {
+ throw byobReaderBrandCheckException('releaseLock');
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return;
+ }
+
+ if (this._readIntoRequests.length > 0) {
+ throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');
+ }
+
+ ReadableStreamReaderGenericRelease(this);
+ }
+ }, {
+ key: 'closed',
+ get: function get() {
+ if (!IsReadableStreamBYOBReader(this)) {
+ return Promise.reject(byobReaderBrandCheckException('closed'));
+ }
+
+ return this._closedPromise;
+ }
+ }]);
+
+ return ReadableStreamBYOBReader;
+ }();
+
+ function IsReadableStreamBYOBReader(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_readIntoRequests')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function IsReadableStreamDefaultReader(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_readRequests')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function ReadableStreamReaderGenericInitialize(reader, stream) {
+ reader._ownerReadableStream = stream;
+ stream._reader = reader;
+
+ if (stream._state === 'readable') {
+ defaultReaderClosedPromiseInitialize(reader);
+ } else if (stream._state === 'closed') {
+ defaultReaderClosedPromiseInitializeAsResolved(reader);
+ } else {
+ assert(stream._state === 'errored', 'state must be errored');
+ defaultReaderClosedPromiseInitializeAsRejected(reader, stream._storedError);
+
+ reader._closedPromise["catch"](function () {});
+ }
+ }
+
+ function ReadableStreamReaderGenericCancel(reader, reason) {
+ var stream = reader._ownerReadableStream;
+ assert(stream !== undefined);
+ return ReadableStreamCancel(stream, reason);
+ }
+
+ function ReadableStreamReaderGenericRelease(reader) {
+ assert(reader._ownerReadableStream !== undefined);
+ assert(reader._ownerReadableStream._reader === reader);
+
+ if (reader._ownerReadableStream._state === 'readable') {
+ defaultReaderClosedPromiseReject(reader, new TypeError('Reader was released and can no longer be used to monitor the stream\'s closedness'));
+ } else {
+ defaultReaderClosedPromiseResetToRejected(reader, new TypeError('Reader was released and can no longer be used to monitor the stream\'s closedness'));
+ }
+
+ reader._closedPromise["catch"](function () {});
+
+ reader._ownerReadableStream._reader = undefined;
+ reader._ownerReadableStream = undefined;
+ }
+
+ function ReadableStreamBYOBReaderRead(reader, view) {
+ var stream = reader._ownerReadableStream;
+ assert(stream !== undefined);
+ stream._disturbed = true;
+
+ if (stream._state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ return ReadableByteStreamControllerPullInto(stream._readableStreamController, view);
+ }
+
+ function ReadableStreamDefaultReaderRead(reader) {
+ var stream = reader._ownerReadableStream;
+ assert(stream !== undefined);
+ stream._disturbed = true;
+
+ if (stream._state === 'closed') {
+ return Promise.resolve(CreateIterResultObject(undefined, true));
+ }
+
+ if (stream._state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ assert(stream._state === 'readable');
+ return stream._readableStreamController.__pullSteps();
+ }
+
+ var ReadableStreamDefaultController = function () {
+ function ReadableStreamDefaultController(stream, underlyingSource, size, highWaterMark) {
+ _classCallCheck(this, ReadableStreamDefaultController);
+
+ if (IsReadableStream(stream) === false) {
+ throw new TypeError('ReadableStreamDefaultController can only be constructed with a ReadableStream instance');
+ }
+
+ if (stream._readableStreamController !== undefined) {
+ throw new TypeError('ReadableStreamDefaultController instances can only be created by the ReadableStream constructor');
+ }
+
+ this._controlledReadableStream = stream;
+ this._underlyingSource = underlyingSource;
+ this._queue = undefined;
+ this._queueTotalSize = undefined;
+ ResetQueue(this);
+ this._started = false;
+ this._closeRequested = false;
+ this._pullAgain = false;
+ this._pulling = false;
+ var normalizedStrategy = ValidateAndNormalizeQueuingStrategy(size, highWaterMark);
+ this._strategySize = normalizedStrategy.size;
+ this._strategyHWM = normalizedStrategy.highWaterMark;
+ var controller = this;
+ var startResult = InvokeOrNoop(underlyingSource, 'start', [this]);
+ Promise.resolve(startResult).then(function () {
+ controller._started = true;
+ assert(controller._pulling === false);
+ assert(controller._pullAgain === false);
+ ReadableStreamDefaultControllerCallPullIfNeeded(controller);
+ }, function (r) {
+ ReadableStreamDefaultControllerErrorIfNeeded(controller, r);
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+
+ _createClass(ReadableStreamDefaultController, [{
+ key: 'close',
+ value: function close() {
+ if (IsReadableStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('close');
+ }
+
+ if (this._closeRequested === true) {
+ throw new TypeError('The stream has already been closed; do not close it again!');
+ }
+
+ var state = this._controlledReadableStream._state;
+
+ if (state !== 'readable') {
+ throw new TypeError('The stream (in ' + state + ' state) is not in the readable state and cannot be closed');
+ }
+
+ ReadableStreamDefaultControllerClose(this);
+ }
+ }, {
+ key: 'enqueue',
+ value: function enqueue(chunk) {
+ if (IsReadableStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('enqueue');
+ }
+
+ if (this._closeRequested === true) {
+ throw new TypeError('stream is closed or draining');
+ }
+
+ var state = this._controlledReadableStream._state;
+
+ if (state !== 'readable') {
+ throw new TypeError('The stream (in ' + state + ' state) is not in the readable state and cannot be enqueued to');
+ }
+
+ return ReadableStreamDefaultControllerEnqueue(this, chunk);
+ }
+ }, {
+ key: 'error',
+ value: function error(e) {
+ if (IsReadableStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('error');
+ }
+
+ var stream = this._controlledReadableStream;
+
+ if (stream._state !== 'readable') {
+ throw new TypeError('The stream is ' + stream._state + ' and so cannot be errored');
+ }
+
+ ReadableStreamDefaultControllerError(this, e);
+ }
+ }, {
+ key: '__cancelSteps',
+ value: function __cancelSteps(reason) {
+ ResetQueue(this);
+ return PromiseInvokeOrNoop(this._underlyingSource, 'cancel', [reason]);
+ }
+ }, {
+ key: '__pullSteps',
+ value: function __pullSteps() {
+ var stream = this._controlledReadableStream;
+
+ if (this._queue.length > 0) {
+ var chunk = DequeueValue(this);
+
+ if (this._closeRequested === true && this._queue.length === 0) {
+ ReadableStreamClose(stream);
+ } else {
+ ReadableStreamDefaultControllerCallPullIfNeeded(this);
+ }
+
+ return Promise.resolve(CreateIterResultObject(chunk, false));
+ }
+
+ var pendingPromise = ReadableStreamAddReadRequest(stream);
+ ReadableStreamDefaultControllerCallPullIfNeeded(this);
+ return pendingPromise;
+ }
+ }, {
+ key: 'desiredSize',
+ get: function get() {
+ if (IsReadableStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('desiredSize');
+ }
+
+ return ReadableStreamDefaultControllerGetDesiredSize(this);
+ }
+ }]);
+
+ return ReadableStreamDefaultController;
+ }();
+
+ function IsReadableStreamDefaultController(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_underlyingSource')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function ReadableStreamDefaultControllerCallPullIfNeeded(controller) {
+ var shouldPull = ReadableStreamDefaultControllerShouldCallPull(controller);
+
+ if (shouldPull === false) {
+ return undefined;
+ }
+
+ if (controller._pulling === true) {
+ controller._pullAgain = true;
+ return undefined;
+ }
+
+ assert(controller._pullAgain === false);
+ controller._pulling = true;
+ var pullPromise = PromiseInvokeOrNoop(controller._underlyingSource, 'pull', [controller]);
+ pullPromise.then(function () {
+ controller._pulling = false;
+
+ if (controller._pullAgain === true) {
+ controller._pullAgain = false;
+ return ReadableStreamDefaultControllerCallPullIfNeeded(controller);
+ }
+
+ return undefined;
+ }, function (e) {
+ ReadableStreamDefaultControllerErrorIfNeeded(controller, e);
+ })["catch"](rethrowAssertionErrorRejection);
+ return undefined;
+ }
+
+ function ReadableStreamDefaultControllerShouldCallPull(controller) {
+ var stream = controller._controlledReadableStream;
+
+ if (stream._state === 'closed' || stream._state === 'errored') {
+ return false;
+ }
+
+ if (controller._closeRequested === true) {
+ return false;
+ }
+
+ if (controller._started === false) {
+ return false;
+ }
+
+ if (IsReadableStreamLocked(stream) === true && ReadableStreamGetNumReadRequests(stream) > 0) {
+ return true;
+ }
+
+ var desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller);
+
+ if (desiredSize > 0) {
+ return true;
+ }
+
+ return false;
+ }
+
+ function ReadableStreamDefaultControllerClose(controller) {
+ var stream = controller._controlledReadableStream;
+ assert(controller._closeRequested === false);
+ assert(stream._state === 'readable');
+ controller._closeRequested = true;
+
+ if (controller._queue.length === 0) {
+ ReadableStreamClose(stream);
+ }
+ }
+
+ function ReadableStreamDefaultControllerEnqueue(controller, chunk) {
+ var stream = controller._controlledReadableStream;
+ assert(controller._closeRequested === false);
+ assert(stream._state === 'readable');
+
+ if (IsReadableStreamLocked(stream) === true && ReadableStreamGetNumReadRequests(stream) > 0) {
+ ReadableStreamFulfillReadRequest(stream, chunk, false);
+ } else {
+ var chunkSize = 1;
+
+ if (controller._strategySize !== undefined) {
+ var strategySize = controller._strategySize;
+
+ try {
+ chunkSize = strategySize(chunk);
+ } catch (chunkSizeE) {
+ ReadableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE);
+ throw chunkSizeE;
+ }
+ }
+
+ try {
+ EnqueueValueWithSize(controller, chunk, chunkSize);
+ } catch (enqueueE) {
+ ReadableStreamDefaultControllerErrorIfNeeded(controller, enqueueE);
+ throw enqueueE;
+ }
+ }
+
+ ReadableStreamDefaultControllerCallPullIfNeeded(controller);
+ return undefined;
+ }
+
+ function ReadableStreamDefaultControllerError(controller, e) {
+ var stream = controller._controlledReadableStream;
+ assert(stream._state === 'readable');
+ ResetQueue(controller);
+ ReadableStreamError(stream, e);
+ }
+
+ function ReadableStreamDefaultControllerErrorIfNeeded(controller, e) {
+ if (controller._controlledReadableStream._state === 'readable') {
+ ReadableStreamDefaultControllerError(controller, e);
+ }
+ }
+
+ function ReadableStreamDefaultControllerGetDesiredSize(controller) {
+ var stream = controller._controlledReadableStream;
+ var state = stream._state;
+
+ if (state === 'errored') {
+ return null;
+ }
+
+ if (state === 'closed') {
+ return 0;
+ }
+
+ return controller._strategyHWM - controller._queueTotalSize;
+ }
+
+ var ReadableStreamBYOBRequest = function () {
+ function ReadableStreamBYOBRequest(controller, view) {
+ _classCallCheck(this, ReadableStreamBYOBRequest);
+
+ this._associatedReadableByteStreamController = controller;
+ this._view = view;
+ }
+
+ _createClass(ReadableStreamBYOBRequest, [{
+ key: 'respond',
+ value: function respond(bytesWritten) {
+ if (IsReadableStreamBYOBRequest(this) === false) {
+ throw byobRequestBrandCheckException('respond');
+ }
+
+ if (this._associatedReadableByteStreamController === undefined) {
+ throw new TypeError('This BYOB request has been invalidated');
+ }
+
+ ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten);
+ }
+ }, {
+ key: 'respondWithNewView',
+ value: function respondWithNewView(view) {
+ if (IsReadableStreamBYOBRequest(this) === false) {
+ throw byobRequestBrandCheckException('respond');
+ }
+
+ if (this._associatedReadableByteStreamController === undefined) {
+ throw new TypeError('This BYOB request has been invalidated');
+ }
+
+ if (!ArrayBuffer.isView(view)) {
+ throw new TypeError('You can only respond with array buffer views');
+ }
+
+ ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view);
+ }
+ }, {
+ key: 'view',
+ get: function get() {
+ return this._view;
+ }
+ }]);
+
+ return ReadableStreamBYOBRequest;
+ }();
+
+ var ReadableByteStreamController = function () {
+ function ReadableByteStreamController(stream, underlyingByteSource, highWaterMark) {
+ _classCallCheck(this, ReadableByteStreamController);
+
+ if (IsReadableStream(stream) === false) {
+ throw new TypeError('ReadableByteStreamController can only be constructed with a ReadableStream instance given ' + 'a byte source');
+ }
+
+ if (stream._readableStreamController !== undefined) {
+ throw new TypeError('ReadableByteStreamController instances can only be created by the ReadableStream constructor given a byte ' + 'source');
+ }
+
+ this._controlledReadableStream = stream;
+ this._underlyingByteSource = underlyingByteSource;
+ this._pullAgain = false;
+ this._pulling = false;
+ ReadableByteStreamControllerClearPendingPullIntos(this);
+ this._queue = this._queueTotalSize = undefined;
+ ResetQueue(this);
+ this._closeRequested = false;
+ this._started = false;
+ this._strategyHWM = ValidateAndNormalizeHighWaterMark(highWaterMark);
+ var autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize;
+
+ if (autoAllocateChunkSize !== undefined) {
+ if (Number.isInteger(autoAllocateChunkSize) === false || autoAllocateChunkSize <= 0) {
+ throw new RangeError('autoAllocateChunkSize must be a positive integer');
+ }
+ }
+
+ this._autoAllocateChunkSize = autoAllocateChunkSize;
+ this._pendingPullIntos = [];
+ var controller = this;
+ var startResult = InvokeOrNoop(underlyingByteSource, 'start', [this]);
+ Promise.resolve(startResult).then(function () {
+ controller._started = true;
+ assert(controller._pulling === false);
+ assert(controller._pullAgain === false);
+ ReadableByteStreamControllerCallPullIfNeeded(controller);
+ }, function (r) {
+ if (stream._state === 'readable') {
+ ReadableByteStreamControllerError(controller, r);
+ }
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+
+ _createClass(ReadableByteStreamController, [{
+ key: 'close',
+ value: function close() {
+ if (IsReadableByteStreamController(this) === false) {
+ throw byteStreamControllerBrandCheckException('close');
+ }
+
+ if (this._closeRequested === true) {
+ throw new TypeError('The stream has already been closed; do not close it again!');
+ }
+
+ var state = this._controlledReadableStream._state;
+
+ if (state !== 'readable') {
+ throw new TypeError('The stream (in ' + state + ' state) is not in the readable state and cannot be closed');
+ }
+
+ ReadableByteStreamControllerClose(this);
+ }
+ }, {
+ key: 'enqueue',
+ value: function enqueue(chunk) {
+ if (IsReadableByteStreamController(this) === false) {
+ throw byteStreamControllerBrandCheckException('enqueue');
+ }
+
+ if (this._closeRequested === true) {
+ throw new TypeError('stream is closed or draining');
+ }
+
+ var state = this._controlledReadableStream._state;
+
+ if (state !== 'readable') {
+ throw new TypeError('The stream (in ' + state + ' state) is not in the readable state and cannot be enqueued to');
+ }
+
+ if (!ArrayBuffer.isView(chunk)) {
+ throw new TypeError('You can only enqueue array buffer views when using a ReadableByteStreamController');
+ }
+
+ ReadableByteStreamControllerEnqueue(this, chunk);
+ }
+ }, {
+ key: 'error',
+ value: function error(e) {
+ if (IsReadableByteStreamController(this) === false) {
+ throw byteStreamControllerBrandCheckException('error');
+ }
+
+ var stream = this._controlledReadableStream;
+
+ if (stream._state !== 'readable') {
+ throw new TypeError('The stream is ' + stream._state + ' and so cannot be errored');
+ }
+
+ ReadableByteStreamControllerError(this, e);
+ }
+ }, {
+ key: '__cancelSteps',
+ value: function __cancelSteps(reason) {
+ if (this._pendingPullIntos.length > 0) {
+ var firstDescriptor = this._pendingPullIntos[0];
+ firstDescriptor.bytesFilled = 0;
+ }
+
+ ResetQueue(this);
+ return PromiseInvokeOrNoop(this._underlyingByteSource, 'cancel', [reason]);
+ }
+ }, {
+ key: '__pullSteps',
+ value: function __pullSteps() {
+ var stream = this._controlledReadableStream;
+ assert(ReadableStreamHasDefaultReader(stream) === true);
+
+ if (this._queueTotalSize > 0) {
+ assert(ReadableStreamGetNumReadRequests(stream) === 0);
+
+ var entry = this._queue.shift();
+
+ this._queueTotalSize -= entry.byteLength;
+ ReadableByteStreamControllerHandleQueueDrain(this);
+ var view = void 0;
+
+ try {
+ view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);
+ } catch (viewE) {
+ return Promise.reject(viewE);
+ }
+
+ return Promise.resolve(CreateIterResultObject(view, false));
+ }
+
+ var autoAllocateChunkSize = this._autoAllocateChunkSize;
+
+ if (autoAllocateChunkSize !== undefined) {
+ var buffer = void 0;
+
+ try {
+ buffer = new ArrayBuffer(autoAllocateChunkSize);
+ } catch (bufferE) {
+ return Promise.reject(bufferE);
+ }
+
+ var pullIntoDescriptor = {
+ buffer: buffer,
+ byteOffset: 0,
+ byteLength: autoAllocateChunkSize,
+ bytesFilled: 0,
+ elementSize: 1,
+ ctor: Uint8Array,
+ readerType: 'default'
+ };
+
+ this._pendingPullIntos.push(pullIntoDescriptor);
+ }
+
+ var promise = ReadableStreamAddReadRequest(stream);
+ ReadableByteStreamControllerCallPullIfNeeded(this);
+ return promise;
+ }
+ }, {
+ key: 'byobRequest',
+ get: function get() {
+ if (IsReadableByteStreamController(this) === false) {
+ throw byteStreamControllerBrandCheckException('byobRequest');
+ }
+
+ if (this._byobRequest === undefined && this._pendingPullIntos.length > 0) {
+ var firstDescriptor = this._pendingPullIntos[0];
+ var view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);
+ this._byobRequest = new ReadableStreamBYOBRequest(this, view);
+ }
+
+ return this._byobRequest;
+ }
+ }, {
+ key: 'desiredSize',
+ get: function get() {
+ if (IsReadableByteStreamController(this) === false) {
+ throw byteStreamControllerBrandCheckException('desiredSize');
+ }
+
+ return ReadableByteStreamControllerGetDesiredSize(this);
+ }
+ }]);
+
+ return ReadableByteStreamController;
+ }();
+
+ function IsReadableByteStreamController(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_underlyingByteSource')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function IsReadableStreamBYOBRequest(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_associatedReadableByteStreamController')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function ReadableByteStreamControllerCallPullIfNeeded(controller) {
+ var shouldPull = ReadableByteStreamControllerShouldCallPull(controller);
+
+ if (shouldPull === false) {
+ return undefined;
+ }
+
+ if (controller._pulling === true) {
+ controller._pullAgain = true;
+ return undefined;
+ }
+
+ assert(controller._pullAgain === false);
+ controller._pulling = true;
+ var pullPromise = PromiseInvokeOrNoop(controller._underlyingByteSource, 'pull', [controller]);
+ pullPromise.then(function () {
+ controller._pulling = false;
+
+ if (controller._pullAgain === true) {
+ controller._pullAgain = false;
+ ReadableByteStreamControllerCallPullIfNeeded(controller);
+ }
+ }, function (e) {
+ if (controller._controlledReadableStream._state === 'readable') {
+ ReadableByteStreamControllerError(controller, e);
+ }
+ })["catch"](rethrowAssertionErrorRejection);
+ return undefined;
+ }
+
+ function ReadableByteStreamControllerClearPendingPullIntos(controller) {
+ ReadableByteStreamControllerInvalidateBYOBRequest(controller);
+ controller._pendingPullIntos = [];
+ }
+
+ function ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor) {
+ assert(stream._state !== 'errored', 'state must not be errored');
+ var done = false;
+
+ if (stream._state === 'closed') {
+ assert(pullIntoDescriptor.bytesFilled === 0);
+ done = true;
+ }
+
+ var filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);
+
+ if (pullIntoDescriptor.readerType === 'default') {
+ ReadableStreamFulfillReadRequest(stream, filledView, done);
+ } else {
+ assert(pullIntoDescriptor.readerType === 'byob');
+ ReadableStreamFulfillReadIntoRequest(stream, filledView, done);
+ }
+ }
+
+ function ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor) {
+ var bytesFilled = pullIntoDescriptor.bytesFilled;
+ var elementSize = pullIntoDescriptor.elementSize;
+ assert(bytesFilled <= pullIntoDescriptor.byteLength);
+ assert(bytesFilled % elementSize === 0);
+ return new pullIntoDescriptor.ctor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, bytesFilled / elementSize);
+ }
+
+ function ReadableByteStreamControllerEnqueueChunkToQueue(controller, buffer, byteOffset, byteLength) {
+ controller._queue.push({
+ buffer: buffer,
+ byteOffset: byteOffset,
+ byteLength: byteLength
+ });
+
+ controller._queueTotalSize += byteLength;
+ }
+
+ function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) {
+ var elementSize = pullIntoDescriptor.elementSize;
+ var currentAlignedBytes = pullIntoDescriptor.bytesFilled - pullIntoDescriptor.bytesFilled % elementSize;
+ var maxBytesToCopy = Math.min(controller._queueTotalSize, pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled);
+ var maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy;
+ var maxAlignedBytes = maxBytesFilled - maxBytesFilled % elementSize;
+ var totalBytesToCopyRemaining = maxBytesToCopy;
+ var ready = false;
+
+ if (maxAlignedBytes > currentAlignedBytes) {
+ totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor.bytesFilled;
+ ready = true;
+ }
+
+ var queue = controller._queue;
+
+ while (totalBytesToCopyRemaining > 0) {
+ var headOfQueue = queue[0];
+ var bytesToCopy = Math.min(totalBytesToCopyRemaining, headOfQueue.byteLength);
+ var destStart = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;
+ ArrayBufferCopy(pullIntoDescriptor.buffer, destStart, headOfQueue.buffer, headOfQueue.byteOffset, bytesToCopy);
+
+ if (headOfQueue.byteLength === bytesToCopy) {
+ queue.shift();
+ } else {
+ headOfQueue.byteOffset += bytesToCopy;
+ headOfQueue.byteLength -= bytesToCopy;
+ }
+
+ controller._queueTotalSize -= bytesToCopy;
+ ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesToCopy, pullIntoDescriptor);
+ totalBytesToCopyRemaining -= bytesToCopy;
+ }
+
+ if (ready === false) {
+ assert(controller._queueTotalSize === 0, 'queue must be empty');
+ assert(pullIntoDescriptor.bytesFilled > 0);
+ assert(pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize);
+ }
+
+ return ready;
+ }
+
+ function ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, size, pullIntoDescriptor) {
+ assert(controller._pendingPullIntos.length === 0 || controller._pendingPullIntos[0] === pullIntoDescriptor);
+ ReadableByteStreamControllerInvalidateBYOBRequest(controller);
+ pullIntoDescriptor.bytesFilled += size;
+ }
+
+ function ReadableByteStreamControllerHandleQueueDrain(controller) {
+ assert(controller._controlledReadableStream._state === 'readable');
+
+ if (controller._queueTotalSize === 0 && controller._closeRequested === true) {
+ ReadableStreamClose(controller._controlledReadableStream);
+ } else {
+ ReadableByteStreamControllerCallPullIfNeeded(controller);
+ }
+ }
+
+ function ReadableByteStreamControllerInvalidateBYOBRequest(controller) {
+ if (controller._byobRequest === undefined) {
+ return;
+ }
+
+ controller._byobRequest._associatedReadableByteStreamController = undefined;
+ controller._byobRequest._view = undefined;
+ controller._byobRequest = undefined;
+ }
+
+ function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller) {
+ assert(controller._closeRequested === false);
+
+ while (controller._pendingPullIntos.length > 0) {
+ if (controller._queueTotalSize === 0) {
+ return;
+ }
+
+ var pullIntoDescriptor = controller._pendingPullIntos[0];
+
+ if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) === true) {
+ ReadableByteStreamControllerShiftPendingPullInto(controller);
+ ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableStream, pullIntoDescriptor);
+ }
+ }
+ }
+
+ function ReadableByteStreamControllerPullInto(controller, view) {
+ var stream = controller._controlledReadableStream;
+ var elementSize = 1;
+
+ if (view.constructor !== DataView) {
+ elementSize = view.constructor.BYTES_PER_ELEMENT;
+ }
+
+ var ctor = view.constructor;
+ var pullIntoDescriptor = {
+ buffer: view.buffer,
+ byteOffset: view.byteOffset,
+ byteLength: view.byteLength,
+ bytesFilled: 0,
+ elementSize: elementSize,
+ ctor: ctor,
+ readerType: 'byob'
+ };
+
+ if (controller._pendingPullIntos.length > 0) {
+ pullIntoDescriptor.buffer = TransferArrayBuffer(pullIntoDescriptor.buffer);
+
+ controller._pendingPullIntos.push(pullIntoDescriptor);
+
+ return ReadableStreamAddReadIntoRequest(stream);
+ }
+
+ if (stream._state === 'closed') {
+ var emptyView = new view.constructor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, 0);
+ return Promise.resolve(CreateIterResultObject(emptyView, true));
+ }
+
+ if (controller._queueTotalSize > 0) {
+ if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) === true) {
+ var filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);
+ ReadableByteStreamControllerHandleQueueDrain(controller);
+ return Promise.resolve(CreateIterResultObject(filledView, false));
+ }
+
+ if (controller._closeRequested === true) {
+ var e = new TypeError('Insufficient bytes to fill elements in the given buffer');
+ ReadableByteStreamControllerError(controller, e);
+ return Promise.reject(e);
+ }
+ }
+
+ pullIntoDescriptor.buffer = TransferArrayBuffer(pullIntoDescriptor.buffer);
+
+ controller._pendingPullIntos.push(pullIntoDescriptor);
+
+ var promise = ReadableStreamAddReadIntoRequest(stream);
+ ReadableByteStreamControllerCallPullIfNeeded(controller);
+ return promise;
+ }
+
+ function ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor) {
+ firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer);
+ assert(firstDescriptor.bytesFilled === 0, 'bytesFilled must be 0');
+ var stream = controller._controlledReadableStream;
+
+ if (ReadableStreamHasBYOBReader(stream) === true) {
+ while (ReadableStreamGetNumReadIntoRequests(stream) > 0) {
+ var pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto(controller);
+ ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor);
+ }
+ }
+ }
+
+ function ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) {
+ if (pullIntoDescriptor.bytesFilled + bytesWritten > pullIntoDescriptor.byteLength) {
+ throw new RangeError('bytesWritten out of range');
+ }
+
+ ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesWritten, pullIntoDescriptor);
+
+ if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) {
+ return;
+ }
+
+ ReadableByteStreamControllerShiftPendingPullInto(controller);
+ var remainderSize = pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize;
+
+ if (remainderSize > 0) {
+ var end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;
+ var remainder = pullIntoDescriptor.buffer.slice(end - remainderSize, end);
+ ReadableByteStreamControllerEnqueueChunkToQueue(controller, remainder, 0, remainder.byteLength);
+ }
+
+ pullIntoDescriptor.buffer = TransferArrayBuffer(pullIntoDescriptor.buffer);
+ pullIntoDescriptor.bytesFilled -= remainderSize;
+ ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableStream, pullIntoDescriptor);
+ ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);
+ }
+
+ function ReadableByteStreamControllerRespondInternal(controller, bytesWritten) {
+ var firstDescriptor = controller._pendingPullIntos[0];
+ var stream = controller._controlledReadableStream;
+
+ if (stream._state === 'closed') {
+ if (bytesWritten !== 0) {
+ throw new TypeError('bytesWritten must be 0 when calling respond() on a closed stream');
+ }
+
+ ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor);
+ } else {
+ assert(stream._state === 'readable');
+ ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, firstDescriptor);
+ }
+ }
+
+ function ReadableByteStreamControllerShiftPendingPullInto(controller) {
+ var descriptor = controller._pendingPullIntos.shift();
+
+ ReadableByteStreamControllerInvalidateBYOBRequest(controller);
+ return descriptor;
+ }
+
+ function ReadableByteStreamControllerShouldCallPull(controller) {
+ var stream = controller._controlledReadableStream;
+
+ if (stream._state !== 'readable') {
+ return false;
+ }
+
+ if (controller._closeRequested === true) {
+ return false;
+ }
+
+ if (controller._started === false) {
+ return false;
+ }
+
+ if (ReadableStreamHasDefaultReader(stream) === true && ReadableStreamGetNumReadRequests(stream) > 0) {
+ return true;
+ }
+
+ if (ReadableStreamHasBYOBReader(stream) === true && ReadableStreamGetNumReadIntoRequests(stream) > 0) {
+ return true;
+ }
+
+ if (ReadableByteStreamControllerGetDesiredSize(controller) > 0) {
+ return true;
+ }
+
+ return false;
+ }
+
+ function ReadableByteStreamControllerClose(controller) {
+ var stream = controller._controlledReadableStream;
+ assert(controller._closeRequested === false);
+ assert(stream._state === 'readable');
+
+ if (controller._queueTotalSize > 0) {
+ controller._closeRequested = true;
+ return;
+ }
+
+ if (controller._pendingPullIntos.length > 0) {
+ var firstPendingPullInto = controller._pendingPullIntos[0];
+
+ if (firstPendingPullInto.bytesFilled > 0) {
+ var e = new TypeError('Insufficient bytes to fill elements in the given buffer');
+ ReadableByteStreamControllerError(controller, e);
+ throw e;
+ }
+ }
+
+ ReadableStreamClose(stream);
+ }
+
+ function ReadableByteStreamControllerEnqueue(controller, chunk) {
+ var stream = controller._controlledReadableStream;
+ assert(controller._closeRequested === false);
+ assert(stream._state === 'readable');
+ var buffer = chunk.buffer;
+ var byteOffset = chunk.byteOffset;
+ var byteLength = chunk.byteLength;
+ var transferredBuffer = TransferArrayBuffer(buffer);
+
+ if (ReadableStreamHasDefaultReader(stream) === true) {
+ if (ReadableStreamGetNumReadRequests(stream) === 0) {
+ ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);
+ } else {
+ assert(controller._queue.length === 0);
+ var transferredView = new Uint8Array(transferredBuffer, byteOffset, byteLength);
+ ReadableStreamFulfillReadRequest(stream, transferredView, false);
+ }
+ } else if (ReadableStreamHasBYOBReader(stream) === true) {
+ ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);
+ ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);
+ } else {
+ assert(IsReadableStreamLocked(stream) === false, 'stream must not be locked');
+ ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);
+ }
+ }
+
+ function ReadableByteStreamControllerError(controller, e) {
+ var stream = controller._controlledReadableStream;
+ assert(stream._state === 'readable');
+ ReadableByteStreamControllerClearPendingPullIntos(controller);
+ ResetQueue(controller);
+ ReadableStreamError(stream, e);
+ }
+
+ function ReadableByteStreamControllerGetDesiredSize(controller) {
+ var stream = controller._controlledReadableStream;
+ var state = stream._state;
+
+ if (state === 'errored') {
+ return null;
+ }
+
+ if (state === 'closed') {
+ return 0;
+ }
+
+ return controller._strategyHWM - controller._queueTotalSize;
+ }
+
+ function ReadableByteStreamControllerRespond(controller, bytesWritten) {
+ bytesWritten = Number(bytesWritten);
+
+ if (IsFiniteNonNegativeNumber(bytesWritten) === false) {
+ throw new RangeError('bytesWritten must be a finite');
+ }
+
+ assert(controller._pendingPullIntos.length > 0);
+ ReadableByteStreamControllerRespondInternal(controller, bytesWritten);
+ }
+
+ function ReadableByteStreamControllerRespondWithNewView(controller, view) {
+ assert(controller._pendingPullIntos.length > 0);
+ var firstDescriptor = controller._pendingPullIntos[0];
+
+ if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) {
+ throw new RangeError('The region specified by view does not match byobRequest');
+ }
+
+ if (firstDescriptor.byteLength !== view.byteLength) {
+ throw new RangeError('The buffer of view has different capacity than byobRequest');
+ }
+
+ firstDescriptor.buffer = view.buffer;
+ ReadableByteStreamControllerRespondInternal(controller, view.byteLength);
+ }
+
+ function streamBrandCheckException(name) {
+ return new TypeError('ReadableStream.prototype.' + name + ' can only be used on a ReadableStream');
+ }
+
+ function readerLockException(name) {
+ return new TypeError('Cannot ' + name + ' a stream using a released reader');
+ }
+
+ function defaultReaderBrandCheckException(name) {
+ return new TypeError('ReadableStreamDefaultReader.prototype.' + name + ' can only be used on a ReadableStreamDefaultReader');
+ }
+
+ function defaultReaderClosedPromiseInitialize(reader) {
+ reader._closedPromise = new Promise(function (resolve, reject) {
+ reader._closedPromise_resolve = resolve;
+ reader._closedPromise_reject = reject;
+ });
+ }
+
+ function defaultReaderClosedPromiseInitializeAsRejected(reader, reason) {
+ reader._closedPromise = Promise.reject(reason);
+ reader._closedPromise_resolve = undefined;
+ reader._closedPromise_reject = undefined;
+ }
+
+ function defaultReaderClosedPromiseInitializeAsResolved(reader) {
+ reader._closedPromise = Promise.resolve(undefined);
+ reader._closedPromise_resolve = undefined;
+ reader._closedPromise_reject = undefined;
+ }
+
+ function defaultReaderClosedPromiseReject(reader, reason) {
+ assert(reader._closedPromise_resolve !== undefined);
+ assert(reader._closedPromise_reject !== undefined);
+
+ reader._closedPromise_reject(reason);
+
+ reader._closedPromise_resolve = undefined;
+ reader._closedPromise_reject = undefined;
+ }
+
+ function defaultReaderClosedPromiseResetToRejected(reader, reason) {
+ assert(reader._closedPromise_resolve === undefined);
+ assert(reader._closedPromise_reject === undefined);
+ reader._closedPromise = Promise.reject(reason);
+ }
+
+ function defaultReaderClosedPromiseResolve(reader) {
+ assert(reader._closedPromise_resolve !== undefined);
+ assert(reader._closedPromise_reject !== undefined);
+
+ reader._closedPromise_resolve(undefined);
+
+ reader._closedPromise_resolve = undefined;
+ reader._closedPromise_reject = undefined;
+ }
+
+ function byobReaderBrandCheckException(name) {
+ return new TypeError('ReadableStreamBYOBReader.prototype.' + name + ' can only be used on a ReadableStreamBYOBReader');
+ }
+
+ function defaultControllerBrandCheckException(name) {
+ return new TypeError('ReadableStreamDefaultController.prototype.' + name + ' can only be used on a ReadableStreamDefaultController');
+ }
+
+ function byobRequestBrandCheckException(name) {
+ return new TypeError('ReadableStreamBYOBRequest.prototype.' + name + ' can only be used on a ReadableStreamBYOBRequest');
+ }
+
+ function byteStreamControllerBrandCheckException(name) {
+ return new TypeError('ReadableByteStreamController.prototype.' + name + ' can only be used on a ReadableByteStreamController');
+ }
+
+ function ifIsObjectAndHasAPromiseIsHandledInternalSlotSetPromiseIsHandledToTrue(promise) {
+ try {
+ Promise.prototype.then.call(promise, undefined, function () {});
+ } catch (e) {}
+ }
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var transformStream = __w_pdfjs_require__(6);
+
+ var readableStream = __w_pdfjs_require__(4);
+
+ var writableStream = __w_pdfjs_require__(2);
+
+ exports.TransformStream = transformStream.TransformStream;
+ exports.ReadableStream = readableStream.ReadableStream;
+ exports.IsReadableStreamDisturbed = readableStream.IsReadableStreamDisturbed;
+ exports.ReadableStreamDefaultControllerClose = readableStream.ReadableStreamDefaultControllerClose;
+ exports.ReadableStreamDefaultControllerEnqueue = readableStream.ReadableStreamDefaultControllerEnqueue;
+ exports.ReadableStreamDefaultControllerError = readableStream.ReadableStreamDefaultControllerError;
+ exports.ReadableStreamDefaultControllerGetDesiredSize = readableStream.ReadableStreamDefaultControllerGetDesiredSize;
+ exports.AcquireWritableStreamDefaultWriter = writableStream.AcquireWritableStreamDefaultWriter;
+ exports.IsWritableStream = writableStream.IsWritableStream;
+ exports.IsWritableStreamLocked = writableStream.IsWritableStreamLocked;
+ exports.WritableStream = writableStream.WritableStream;
+ exports.WritableStreamAbort = writableStream.WritableStreamAbort;
+ exports.WritableStreamDefaultControllerError = writableStream.WritableStreamDefaultControllerError;
+ exports.WritableStreamDefaultWriterCloseWithErrorPropagation = writableStream.WritableStreamDefaultWriterCloseWithErrorPropagation;
+ exports.WritableStreamDefaultWriterRelease = writableStream.WritableStreamDefaultWriterRelease;
+ exports.WritableStreamDefaultWriterWrite = writableStream.WritableStreamDefaultWriterWrite;
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var _createClass = function () {
+ function defineProperties(target, props) {
+ for (var i = 0; i < props.length; i++) {
+ var descriptor = props[i];
+ descriptor.enumerable = descriptor.enumerable || false;
+ descriptor.configurable = true;
+ if ("value" in descriptor) descriptor.writable = true;
+ Object.defineProperty(target, descriptor.key, descriptor);
+ }
+ }
+
+ return function (Constructor, protoProps, staticProps) {
+ if (protoProps) defineProperties(Constructor.prototype, protoProps);
+ if (staticProps) defineProperties(Constructor, staticProps);
+ return Constructor;
+ };
+ }();
+
+ function _classCallCheck(instance, Constructor) {
+ if (!(instance instanceof Constructor)) {
+ throw new TypeError("Cannot call a class as a function");
+ }
+ }
+
+ var _require = __w_pdfjs_require__(1),
+ assert = _require.assert;
+
+ var _require2 = __w_pdfjs_require__(0),
+ InvokeOrNoop = _require2.InvokeOrNoop,
+ PromiseInvokeOrPerformFallback = _require2.PromiseInvokeOrPerformFallback,
+ PromiseInvokeOrNoop = _require2.PromiseInvokeOrNoop,
+ typeIsObject = _require2.typeIsObject;
+
+ var _require3 = __w_pdfjs_require__(4),
+ ReadableStream = _require3.ReadableStream,
+ ReadableStreamDefaultControllerClose = _require3.ReadableStreamDefaultControllerClose,
+ ReadableStreamDefaultControllerEnqueue = _require3.ReadableStreamDefaultControllerEnqueue,
+ ReadableStreamDefaultControllerError = _require3.ReadableStreamDefaultControllerError,
+ ReadableStreamDefaultControllerGetDesiredSize = _require3.ReadableStreamDefaultControllerGetDesiredSize;
+
+ var _require4 = __w_pdfjs_require__(2),
+ WritableStream = _require4.WritableStream,
+ WritableStreamDefaultControllerError = _require4.WritableStreamDefaultControllerError;
+
+ function TransformStreamCloseReadable(transformStream) {
+ if (transformStream._errored === true) {
+ throw new TypeError('TransformStream is already errored');
+ }
+
+ if (transformStream._readableClosed === true) {
+ throw new TypeError('Readable side is already closed');
+ }
+
+ TransformStreamCloseReadableInternal(transformStream);
+ }
+
+ function TransformStreamEnqueueToReadable(transformStream, chunk) {
+ if (transformStream._errored === true) {
+ throw new TypeError('TransformStream is already errored');
+ }
+
+ if (transformStream._readableClosed === true) {
+ throw new TypeError('Readable side is already closed');
+ }
+
+ var controller = transformStream._readableController;
+
+ try {
+ ReadableStreamDefaultControllerEnqueue(controller, chunk);
+ } catch (e) {
+ transformStream._readableClosed = true;
+ TransformStreamErrorIfNeeded(transformStream, e);
+ throw transformStream._storedError;
+ }
+
+ var desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller);
+ var maybeBackpressure = desiredSize <= 0;
+
+ if (maybeBackpressure === true && transformStream._backpressure === false) {
+ TransformStreamSetBackpressure(transformStream, true);
+ }
+ }
+
+ function TransformStreamError(transformStream, e) {
+ if (transformStream._errored === true) {
+ throw new TypeError('TransformStream is already errored');
+ }
+
+ TransformStreamErrorInternal(transformStream, e);
+ }
+
+ function TransformStreamCloseReadableInternal(transformStream) {
+ assert(transformStream._errored === false);
+ assert(transformStream._readableClosed === false);
+
+ try {
+ ReadableStreamDefaultControllerClose(transformStream._readableController);
+ } catch (e) {
+ assert(false);
+ }
+
+ transformStream._readableClosed = true;
+ }
+
+ function TransformStreamErrorIfNeeded(transformStream, e) {
+ if (transformStream._errored === false) {
+ TransformStreamErrorInternal(transformStream, e);
+ }
+ }
+
+ function TransformStreamErrorInternal(transformStream, e) {
+ assert(transformStream._errored === false);
+ transformStream._errored = true;
+ transformStream._storedError = e;
+
+ if (transformStream._writableDone === false) {
+ WritableStreamDefaultControllerError(transformStream._writableController, e);
+ }
+
+ if (transformStream._readableClosed === false) {
+ ReadableStreamDefaultControllerError(transformStream._readableController, e);
+ }
+ }
+
+ function TransformStreamReadableReadyPromise(transformStream) {
+ assert(transformStream._backpressureChangePromise !== undefined, '_backpressureChangePromise should have been initialized');
+
+ if (transformStream._backpressure === false) {
+ return Promise.resolve();
+ }
+
+ assert(transformStream._backpressure === true, '_backpressure should have been initialized');
+ return transformStream._backpressureChangePromise;
+ }
+
+ function TransformStreamSetBackpressure(transformStream, backpressure) {
+ assert(transformStream._backpressure !== backpressure, 'TransformStreamSetBackpressure() should be called only when backpressure is changed');
+
+ if (transformStream._backpressureChangePromise !== undefined) {
+ transformStream._backpressureChangePromise_resolve(backpressure);
+ }
+
+ transformStream._backpressureChangePromise = new Promise(function (resolve) {
+ transformStream._backpressureChangePromise_resolve = resolve;
+ });
+
+ transformStream._backpressureChangePromise.then(function (resolution) {
+ assert(resolution !== backpressure, '_backpressureChangePromise should be fulfilled only when backpressure is changed');
+ });
+
+ transformStream._backpressure = backpressure;
+ }
+
+ function TransformStreamDefaultTransform(chunk, transformStreamController) {
+ var transformStream = transformStreamController._controlledTransformStream;
+ TransformStreamEnqueueToReadable(transformStream, chunk);
+ return Promise.resolve();
+ }
+
+ function TransformStreamTransform(transformStream, chunk) {
+ assert(transformStream._errored === false);
+ assert(transformStream._transforming === false);
+ assert(transformStream._backpressure === false);
+ transformStream._transforming = true;
+ var transformer = transformStream._transformer;
+ var controller = transformStream._transformStreamController;
+ var transformPromise = PromiseInvokeOrPerformFallback(transformer, 'transform', [chunk, controller], TransformStreamDefaultTransform, [chunk, controller]);
+ return transformPromise.then(function () {
+ transformStream._transforming = false;
+ return TransformStreamReadableReadyPromise(transformStream);
+ }, function (e) {
+ TransformStreamErrorIfNeeded(transformStream, e);
+ return Promise.reject(e);
+ });
+ }
+
+ function IsTransformStreamDefaultController(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_controlledTransformStream')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function IsTransformStream(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_transformStreamController')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ var TransformStreamSink = function () {
+ function TransformStreamSink(transformStream, startPromise) {
+ _classCallCheck(this, TransformStreamSink);
+
+ this._transformStream = transformStream;
+ this._startPromise = startPromise;
+ }
+
+ _createClass(TransformStreamSink, [{
+ key: 'start',
+ value: function start(c) {
+ var transformStream = this._transformStream;
+ transformStream._writableController = c;
+ return this._startPromise.then(function () {
+ return TransformStreamReadableReadyPromise(transformStream);
+ });
+ }
+ }, {
+ key: 'write',
+ value: function write(chunk) {
+ var transformStream = this._transformStream;
+ return TransformStreamTransform(transformStream, chunk);
+ }
+ }, {
+ key: 'abort',
+ value: function abort() {
+ var transformStream = this._transformStream;
+ transformStream._writableDone = true;
+ TransformStreamErrorInternal(transformStream, new TypeError('Writable side aborted'));
+ }
+ }, {
+ key: 'close',
+ value: function close() {
+ var transformStream = this._transformStream;
+ assert(transformStream._transforming === false);
+ transformStream._writableDone = true;
+ var flushPromise = PromiseInvokeOrNoop(transformStream._transformer, 'flush', [transformStream._transformStreamController]);
+ return flushPromise.then(function () {
+ if (transformStream._errored === true) {
+ return Promise.reject(transformStream._storedError);
+ }
+
+ if (transformStream._readableClosed === false) {
+ TransformStreamCloseReadableInternal(transformStream);
+ }
+
+ return Promise.resolve();
+ })["catch"](function (r) {
+ TransformStreamErrorIfNeeded(transformStream, r);
+ return Promise.reject(transformStream._storedError);
+ });
+ }
+ }]);
+
+ return TransformStreamSink;
+ }();
+
+ var TransformStreamSource = function () {
+ function TransformStreamSource(transformStream, startPromise) {
+ _classCallCheck(this, TransformStreamSource);
+
+ this._transformStream = transformStream;
+ this._startPromise = startPromise;
+ }
+
+ _createClass(TransformStreamSource, [{
+ key: 'start',
+ value: function start(c) {
+ var transformStream = this._transformStream;
+ transformStream._readableController = c;
+ return this._startPromise.then(function () {
+ assert(transformStream._backpressureChangePromise !== undefined, '_backpressureChangePromise should have been initialized');
+
+ if (transformStream._backpressure === true) {
+ return Promise.resolve();
+ }
+
+ assert(transformStream._backpressure === false, '_backpressure should have been initialized');
+ return transformStream._backpressureChangePromise;
+ });
+ }
+ }, {
+ key: 'pull',
+ value: function pull() {
+ var transformStream = this._transformStream;
+ assert(transformStream._backpressure === true, 'pull() should be never called while _backpressure is false');
+ assert(transformStream._backpressureChangePromise !== undefined, '_backpressureChangePromise should have been initialized');
+ TransformStreamSetBackpressure(transformStream, false);
+ return transformStream._backpressureChangePromise;
+ }
+ }, {
+ key: 'cancel',
+ value: function cancel() {
+ var transformStream = this._transformStream;
+ transformStream._readableClosed = true;
+ TransformStreamErrorInternal(transformStream, new TypeError('Readable side canceled'));
+ }
+ }]);
+
+ return TransformStreamSource;
+ }();
+
+ var TransformStreamDefaultController = function () {
+ function TransformStreamDefaultController(transformStream) {
+ _classCallCheck(this, TransformStreamDefaultController);
+
+ if (IsTransformStream(transformStream) === false) {
+ throw new TypeError('TransformStreamDefaultController can only be ' + 'constructed with a TransformStream instance');
+ }
+
+ if (transformStream._transformStreamController !== undefined) {
+ throw new TypeError('TransformStreamDefaultController instances can ' + 'only be created by the TransformStream constructor');
+ }
+
+ this._controlledTransformStream = transformStream;
+ }
+
+ _createClass(TransformStreamDefaultController, [{
+ key: 'enqueue',
+ value: function enqueue(chunk) {
+ if (IsTransformStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('enqueue');
+ }
+
+ TransformStreamEnqueueToReadable(this._controlledTransformStream, chunk);
+ }
+ }, {
+ key: 'close',
+ value: function close() {
+ if (IsTransformStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('close');
+ }
+
+ TransformStreamCloseReadable(this._controlledTransformStream);
+ }
+ }, {
+ key: 'error',
+ value: function error(reason) {
+ if (IsTransformStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('error');
+ }
+
+ TransformStreamError(this._controlledTransformStream, reason);
+ }
+ }, {
+ key: 'desiredSize',
+ get: function get() {
+ if (IsTransformStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('desiredSize');
+ }
+
+ var transformStream = this._controlledTransformStream;
+ var readableController = transformStream._readableController;
+ return ReadableStreamDefaultControllerGetDesiredSize(readableController);
+ }
+ }]);
+
+ return TransformStreamDefaultController;
+ }();
+
+ var TransformStream = function () {
+ function TransformStream() {
+ var transformer = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
+
+ _classCallCheck(this, TransformStream);
+
+ this._transformer = transformer;
+ var readableStrategy = transformer.readableStrategy,
+ writableStrategy = transformer.writableStrategy;
+ this._transforming = false;
+ this._errored = false;
+ this._storedError = undefined;
+ this._writableController = undefined;
+ this._readableController = undefined;
+ this._transformStreamController = undefined;
+ this._writableDone = false;
+ this._readableClosed = false;
+ this._backpressure = undefined;
+ this._backpressureChangePromise = undefined;
+ this._backpressureChangePromise_resolve = undefined;
+ this._transformStreamController = new TransformStreamDefaultController(this);
+ var startPromise_resolve = void 0;
+ var startPromise = new Promise(function (resolve) {
+ startPromise_resolve = resolve;
+ });
+ var source = new TransformStreamSource(this, startPromise);
+ this._readable = new ReadableStream(source, readableStrategy);
+ var sink = new TransformStreamSink(this, startPromise);
+ this._writable = new WritableStream(sink, writableStrategy);
+ assert(this._writableController !== undefined);
+ assert(this._readableController !== undefined);
+ var desiredSize = ReadableStreamDefaultControllerGetDesiredSize(this._readableController);
+ TransformStreamSetBackpressure(this, desiredSize <= 0);
+ var transformStream = this;
+ var startResult = InvokeOrNoop(transformer, 'start', [transformStream._transformStreamController]);
+ startPromise_resolve(startResult);
+ startPromise["catch"](function (e) {
+ if (transformStream._errored === false) {
+ transformStream._errored = true;
+ transformStream._storedError = e;
+ }
+ });
+ }
+
+ _createClass(TransformStream, [{
+ key: 'readable',
+ get: function get() {
+ if (IsTransformStream(this) === false) {
+ throw streamBrandCheckException('readable');
+ }
+
+ return this._readable;
+ }
+ }, {
+ key: 'writable',
+ get: function get() {
+ if (IsTransformStream(this) === false) {
+ throw streamBrandCheckException('writable');
+ }
+
+ return this._writable;
+ }
+ }]);
+
+ return TransformStream;
+ }();
+
+ module.exports = {
+ TransformStream: TransformStream
+ };
+
+ function defaultControllerBrandCheckException(name) {
+ return new TypeError('TransformStreamDefaultController.prototype.' + name + ' can only be used on a TransformStreamDefaultController');
+ }
+
+ function streamBrandCheckException(name) {
+ return new TypeError('TransformStream.prototype.' + name + ' can only be used on a TransformStream');
+ }
+}, function (module, exports, __w_pdfjs_require__) {
+ module.exports = __w_pdfjs_require__(5);
+}]));
+
+/***/ }),
+/* 145 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+{
+ var isURLSupported = false;
+
+ try {
+ if (typeof URL === 'function' && _typeof(URL.prototype) === 'object' && 'origin' in URL.prototype) {
+ var u = new URL('b', 'http://a');
+ u.pathname = 'c%20d';
+ isURLSupported = u.href === 'http://a/c%20d';
+ }
+ } catch (ex) {}
+
+ if (isURLSupported) {
+ exports.URL = URL;
+ } else {
+ var PolyfillURL = __w_pdfjs_require__(146).URL;
+
+ var OriginalURL = __w_pdfjs_require__(3).URL;
+
+ if (OriginalURL) {
+ PolyfillURL.createObjectURL = function (blob) {
+ return OriginalURL.createObjectURL.apply(OriginalURL, arguments);
+ };
+
+ PolyfillURL.revokeObjectURL = function (url) {
+ OriginalURL.revokeObjectURL(url);
+ };
+ }
+
+ exports.URL = PolyfillURL;
+ }
+}
+
+/***/ }),
+/* 146 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+(function URLConstructorClosure() {
+ 'use strict';
+
+ var relative = Object.create(null);
+ relative['ftp'] = 21;
+ relative['file'] = 0;
+ relative['gopher'] = 70;
+ relative['http'] = 80;
+ relative['https'] = 443;
+ relative['ws'] = 80;
+ relative['wss'] = 443;
+ var relativePathDotMapping = Object.create(null);
+ relativePathDotMapping['%2e'] = '.';
+ relativePathDotMapping['.%2e'] = '..';
+ relativePathDotMapping['%2e.'] = '..';
+ relativePathDotMapping['%2e%2e'] = '..';
+
+ function isRelativeScheme(scheme) {
+ return relative[scheme] !== undefined;
+ }
+
+ function invalid() {
+ clear.call(this);
+ this._isInvalid = true;
+ }
+
+ function IDNAToASCII(h) {
+ if (h === '') {
+ invalid.call(this);
+ }
+
+ return h.toLowerCase();
+ }
+
+ function percentEscape(c) {
+ var unicode = c.charCodeAt(0);
+
+ if (unicode > 0x20 && unicode < 0x7F && [0x22, 0x23, 0x3C, 0x3E, 0x3F, 0x60].indexOf(unicode) === -1) {
+ return c;
+ }
+
+ return encodeURIComponent(c);
+ }
+
+ function percentEscapeQuery(c) {
+ var unicode = c.charCodeAt(0);
+
+ if (unicode > 0x20 && unicode < 0x7F && [0x22, 0x23, 0x3C, 0x3E, 0x60].indexOf(unicode) === -1) {
+ return c;
+ }
+
+ return encodeURIComponent(c);
+ }
+
+ var EOF,
+ ALPHA = /[a-zA-Z]/,
+ ALPHANUMERIC = /[a-zA-Z0-9\+\-\.]/;
+
+ function parse(input, stateOverride, base) {
+ function err(message) {
+ errors.push(message);
+ }
+
+ var state = stateOverride || 'scheme start',
+ cursor = 0,
+ buffer = '',
+ seenAt = false,
+ seenBracket = false,
+ errors = [];
+
+ loop: while ((input[cursor - 1] !== EOF || cursor === 0) && !this._isInvalid) {
+ var c = input[cursor];
+
+ switch (state) {
+ case 'scheme start':
+ if (c && ALPHA.test(c)) {
+ buffer += c.toLowerCase();
+ state = 'scheme';
+ } else if (!stateOverride) {
+ buffer = '';
+ state = 'no scheme';
+ continue;
+ } else {
+ err('Invalid scheme.');
+ break loop;
+ }
+
+ break;
+
+ case 'scheme':
+ if (c && ALPHANUMERIC.test(c)) {
+ buffer += c.toLowerCase();
+ } else if (c === ':') {
+ this._scheme = buffer;
+ buffer = '';
+
+ if (stateOverride) {
+ break loop;
+ }
+
+ if (isRelativeScheme(this._scheme)) {
+ this._isRelative = true;
+ }
+
+ if (this._scheme === 'file') {
+ state = 'relative';
+ } else if (this._isRelative && base && base._scheme === this._scheme) {
+ state = 'relative or authority';
+ } else if (this._isRelative) {
+ state = 'authority first slash';
+ } else {
+ state = 'scheme data';
+ }
+ } else if (!stateOverride) {
+ buffer = '';
+ cursor = 0;
+ state = 'no scheme';
+ continue;
+ } else if (c === EOF) {
+ break loop;
+ } else {
+ err('Code point not allowed in scheme: ' + c);
+ break loop;
+ }
+
+ break;
+
+ case 'scheme data':
+ if (c === '?') {
+ this._query = '?';
+ state = 'query';
+ } else if (c === '#') {
+ this._fragment = '#';
+ state = 'fragment';
+ } else {
+ if (c !== EOF && c !== '\t' && c !== '\n' && c !== '\r') {
+ this._schemeData += percentEscape(c);
+ }
+ }
+
+ break;
+
+ case 'no scheme':
+ if (!base || !isRelativeScheme(base._scheme)) {
+ err('Missing scheme.');
+ invalid.call(this);
+ } else {
+ state = 'relative';
+ continue;
+ }
+
+ break;
+
+ case 'relative or authority':
+ if (c === '/' && input[cursor + 1] === '/') {
+ state = 'authority ignore slashes';
+ } else {
+ err('Expected /, got: ' + c);
+ state = 'relative';
+ continue;
+ }
+
+ break;
+
+ case 'relative':
+ this._isRelative = true;
+
+ if (this._scheme !== 'file') {
+ this._scheme = base._scheme;
+ }
+
+ if (c === EOF) {
+ this._host = base._host;
+ this._port = base._port;
+ this._path = base._path.slice();
+ this._query = base._query;
+ this._username = base._username;
+ this._password = base._password;
+ break loop;
+ } else if (c === '/' || c === '\\') {
+ if (c === '\\') {
+ err('\\ is an invalid code point.');
+ }
+
+ state = 'relative slash';
+ } else if (c === '?') {
+ this._host = base._host;
+ this._port = base._port;
+ this._path = base._path.slice();
+ this._query = '?';
+ this._username = base._username;
+ this._password = base._password;
+ state = 'query';
+ } else if (c === '#') {
+ this._host = base._host;
+ this._port = base._port;
+ this._path = base._path.slice();
+ this._query = base._query;
+ this._fragment = '#';
+ this._username = base._username;
+ this._password = base._password;
+ state = 'fragment';
+ } else {
+ var nextC = input[cursor + 1];
+ var nextNextC = input[cursor + 2];
+
+ if (this._scheme !== 'file' || !ALPHA.test(c) || nextC !== ':' && nextC !== '|' || nextNextC !== EOF && nextNextC !== '/' && nextNextC !== '\\' && nextNextC !== '?' && nextNextC !== '#') {
+ this._host = base._host;
+ this._port = base._port;
+ this._username = base._username;
+ this._password = base._password;
+ this._path = base._path.slice();
+
+ this._path.pop();
+ }
+
+ state = 'relative path';
+ continue;
+ }
+
+ break;
+
+ case 'relative slash':
+ if (c === '/' || c === '\\') {
+ if (c === '\\') {
+ err('\\ is an invalid code point.');
+ }
+
+ if (this._scheme === 'file') {
+ state = 'file host';
+ } else {
+ state = 'authority ignore slashes';
+ }
+ } else {
+ if (this._scheme !== 'file') {
+ this._host = base._host;
+ this._port = base._port;
+ this._username = base._username;
+ this._password = base._password;
+ }
+
+ state = 'relative path';
+ continue;
+ }
+
+ break;
+
+ case 'authority first slash':
+ if (c === '/') {
+ state = 'authority second slash';
+ } else {
+ err('Expected \'/\', got: ' + c);
+ state = 'authority ignore slashes';
+ continue;
+ }
+
+ break;
+
+ case 'authority second slash':
+ state = 'authority ignore slashes';
+
+ if (c !== '/') {
+ err('Expected \'/\', got: ' + c);
+ continue;
+ }
+
+ break;
+
+ case 'authority ignore slashes':
+ if (c !== '/' && c !== '\\') {
+ state = 'authority';
+ continue;
+ } else {
+ err('Expected authority, got: ' + c);
+ }
+
+ break;
+
+ case 'authority':
+ if (c === '@') {
+ if (seenAt) {
+ err('@ already seen.');
+ buffer += '%40';
+ }
+
+ seenAt = true;
+
+ for (var i = 0; i < buffer.length; i++) {
+ var cp = buffer[i];
+
+ if (cp === '\t' || cp === '\n' || cp === '\r') {
+ err('Invalid whitespace in authority.');
+ continue;
+ }
+
+ if (cp === ':' && this._password === null) {
+ this._password = '';
+ continue;
+ }
+
+ var tempC = percentEscape(cp);
+
+ if (this._password !== null) {
+ this._password += tempC;
+ } else {
+ this._username += tempC;
+ }
+ }
+
+ buffer = '';
+ } else if (c === EOF || c === '/' || c === '\\' || c === '?' || c === '#') {
+ cursor -= buffer.length;
+ buffer = '';
+ state = 'host';
+ continue;
+ } else {
+ buffer += c;
+ }
+
+ break;
+
+ case 'file host':
+ if (c === EOF || c === '/' || c === '\\' || c === '?' || c === '#') {
+ if (buffer.length === 2 && ALPHA.test(buffer[0]) && (buffer[1] === ':' || buffer[1] === '|')) {
+ state = 'relative path';
+ } else if (buffer.length === 0) {
+ state = 'relative path start';
+ } else {
+ this._host = IDNAToASCII.call(this, buffer);
+ buffer = '';
+ state = 'relative path start';
+ }
+
+ continue;
+ } else if (c === '\t' || c === '\n' || c === '\r') {
+ err('Invalid whitespace in file host.');
+ } else {
+ buffer += c;
+ }
+
+ break;
+
+ case 'host':
+ case 'hostname':
+ if (c === ':' && !seenBracket) {
+ this._host = IDNAToASCII.call(this, buffer);
+ buffer = '';
+ state = 'port';
+
+ if (stateOverride === 'hostname') {
+ break loop;
+ }
+ } else if (c === EOF || c === '/' || c === '\\' || c === '?' || c === '#') {
+ this._host = IDNAToASCII.call(this, buffer);
+ buffer = '';
+ state = 'relative path start';
+
+ if (stateOverride) {
+ break loop;
+ }
+
+ continue;
+ } else if (c !== '\t' && c !== '\n' && c !== '\r') {
+ if (c === '[') {
+ seenBracket = true;
+ } else if (c === ']') {
+ seenBracket = false;
+ }
+
+ buffer += c;
+ } else {
+ err('Invalid code point in host/hostname: ' + c);
+ }
+
+ break;
+
+ case 'port':
+ if (/[0-9]/.test(c)) {
+ buffer += c;
+ } else if (c === EOF || c === '/' || c === '\\' || c === '?' || c === '#' || stateOverride) {
+ if (buffer !== '') {
+ var temp = parseInt(buffer, 10);
+
+ if (temp !== relative[this._scheme]) {
+ this._port = temp + '';
+ }
+
+ buffer = '';
+ }
+
+ if (stateOverride) {
+ break loop;
+ }
+
+ state = 'relative path start';
+ continue;
+ } else if (c === '\t' || c === '\n' || c === '\r') {
+ err('Invalid code point in port: ' + c);
+ } else {
+ invalid.call(this);
+ }
+
+ break;
+
+ case 'relative path start':
+ if (c === '\\') {
+ err('\'\\\' not allowed in path.');
+ }
+
+ state = 'relative path';
+
+ if (c !== '/' && c !== '\\') {
+ continue;
+ }
+
+ break;
+
+ case 'relative path':
+ if (c === EOF || c === '/' || c === '\\' || !stateOverride && (c === '?' || c === '#')) {
+ if (c === '\\') {
+ err('\\ not allowed in relative path.');
+ }
+
+ var tmp;
+
+ if (tmp = relativePathDotMapping[buffer.toLowerCase()]) {
+ buffer = tmp;
+ }
+
+ if (buffer === '..') {
+ this._path.pop();
+
+ if (c !== '/' && c !== '\\') {
+ this._path.push('');
+ }
+ } else if (buffer === '.' && c !== '/' && c !== '\\') {
+ this._path.push('');
+ } else if (buffer !== '.') {
+ if (this._scheme === 'file' && this._path.length === 0 && buffer.length === 2 && ALPHA.test(buffer[0]) && buffer[1] === '|') {
+ buffer = buffer[0] + ':';
+ }
+
+ this._path.push(buffer);
+ }
+
+ buffer = '';
+
+ if (c === '?') {
+ this._query = '?';
+ state = 'query';
+ } else if (c === '#') {
+ this._fragment = '#';
+ state = 'fragment';
+ }
+ } else if (c !== '\t' && c !== '\n' && c !== '\r') {
+ buffer += percentEscape(c);
+ }
+
+ break;
+
+ case 'query':
+ if (!stateOverride && c === '#') {
+ this._fragment = '#';
+ state = 'fragment';
+ } else if (c !== EOF && c !== '\t' && c !== '\n' && c !== '\r') {
+ this._query += percentEscapeQuery(c);
+ }
+
+ break;
+
+ case 'fragment':
+ if (c !== EOF && c !== '\t' && c !== '\n' && c !== '\r') {
+ this._fragment += c;
+ }
+
+ break;
+ }
+
+ cursor++;
+ }
+ }
+
+ function clear() {
+ this._scheme = '';
+ this._schemeData = '';
+ this._username = '';
+ this._password = null;
+ this._host = '';
+ this._port = '';
+ this._path = [];
+ this._query = '';
+ this._fragment = '';
+ this._isInvalid = false;
+ this._isRelative = false;
+ }
+
+ function JURL(url, base) {
+ if (base !== undefined && !(base instanceof JURL)) {
+ base = new JURL(String(base));
+ }
+
+ this._url = url;
+ clear.call(this);
+ var input = url.replace(/^[ \t\r\n\f]+|[ \t\r\n\f]+$/g, '');
+ parse.call(this, input, null, base);
+ }
+
+ JURL.prototype = {
+ toString: function toString() {
+ return this.href;
+ },
+
+ get href() {
+ if (this._isInvalid) {
+ return this._url;
+ }
+
+ var authority = '';
+
+ if (this._username !== '' || this._password !== null) {
+ authority = this._username + (this._password !== null ? ':' + this._password : '') + '@';
+ }
+
+ return this.protocol + (this._isRelative ? '//' + authority + this.host : '') + this.pathname + this._query + this._fragment;
+ },
+
+ set href(value) {
+ clear.call(this);
+ parse.call(this, value);
+ },
+
+ get protocol() {
+ return this._scheme + ':';
+ },
+
+ set protocol(value) {
+ if (this._isInvalid) {
+ return;
+ }
+
+ parse.call(this, value + ':', 'scheme start');
+ },
+
+ get host() {
+ return this._isInvalid ? '' : this._port ? this._host + ':' + this._port : this._host;
+ },
+
+ set host(value) {
+ if (this._isInvalid || !this._isRelative) {
+ return;
+ }
+
+ parse.call(this, value, 'host');
+ },
+
+ get hostname() {
+ return this._host;
+ },
+
+ set hostname(value) {
+ if (this._isInvalid || !this._isRelative) {
+ return;
+ }
+
+ parse.call(this, value, 'hostname');
+ },
+
+ get port() {
+ return this._port;
+ },
+
+ set port(value) {
+ if (this._isInvalid || !this._isRelative) {
+ return;
+ }
+
+ parse.call(this, value, 'port');
+ },
+
+ get pathname() {
+ return this._isInvalid ? '' : this._isRelative ? '/' + this._path.join('/') : this._schemeData;
+ },
+
+ set pathname(value) {
+ if (this._isInvalid || !this._isRelative) {
+ return;
+ }
+
+ this._path = [];
+ parse.call(this, value, 'relative path start');
+ },
+
+ get search() {
+ return this._isInvalid || !this._query || this._query === '?' ? '' : this._query;
+ },
+
+ set search(value) {
+ if (this._isInvalid || !this._isRelative) {
+ return;
+ }
+
+ this._query = '?';
+
+ if (value[0] === '?') {
+ value = value.slice(1);
+ }
+
+ parse.call(this, value, 'query');
+ },
+
+ get hash() {
+ return this._isInvalid || !this._fragment || this._fragment === '#' ? '' : this._fragment;
+ },
+
+ set hash(value) {
+ if (this._isInvalid) {
+ return;
+ }
+
+ this._fragment = '#';
+
+ if (value[0] === '#') {
+ value = value.slice(1);
+ }
+
+ parse.call(this, value, 'fragment');
+ },
+
+ get origin() {
+ var host;
+
+ if (this._isInvalid || !this._scheme) {
+ return '';
+ }
+
+ switch (this._scheme) {
+ case 'data':
+ case 'file':
+ case 'javascript':
+ case 'mailto':
+ return 'null';
+
+ case 'blob':
+ try {
+ return new JURL(this._schemeData).origin || 'null';
+ } catch (_) {}
+
+ return 'null';
+ }
+
+ host = this.host;
+
+ if (!host) {
+ return '';
+ }
+
+ return this._scheme + '://' + host;
+ }
+
+ };
+ exports.URL = JURL;
+})();
+
+/***/ }),
+/* 147 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.getDocument = getDocument;
+exports.setPDFNetworkStreamFactory = setPDFNetworkStreamFactory;
+exports.build = exports.version = exports.PDFPageProxy = exports.PDFDocumentProxy = exports.PDFWorker = exports.PDFDataRangeTransport = exports.LoopbackPort = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(148));
+
+var _util = __w_pdfjs_require__(1);
+
+var _display_utils = __w_pdfjs_require__(151);
+
+var _font_loader = __w_pdfjs_require__(152);
+
+var _api_compatibility = __w_pdfjs_require__(153);
+
+var _canvas = __w_pdfjs_require__(154);
+
+var _global_scope = _interopRequireDefault(__w_pdfjs_require__(3));
+
+var _worker_options = __w_pdfjs_require__(156);
+
+var _message_handler = __w_pdfjs_require__(157);
+
+var _metadata = __w_pdfjs_require__(158);
+
+var _transport_stream = __w_pdfjs_require__(160);
+
+var _webgl = __w_pdfjs_require__(161);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
+
+function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
+
+function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
+
+function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
+
+function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread(); }
+
+function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance"); }
+
+function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); }
+
+function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var DEFAULT_RANGE_CHUNK_SIZE = 65536;
+var isWorkerDisabled = false;
+var fallbackWorkerSrc;
+var fakeWorkerFilesLoader = null;
+{
+ var useRequireEnsure = false;
+
+ if (typeof window === 'undefined') {
+ isWorkerDisabled = true;
+
+ if (typeof require.ensure === 'undefined') {
+ require.ensure = require('node-ensure');
+ }
+
+ useRequireEnsure = true;
+ } else if (typeof require !== 'undefined' && typeof require.ensure === 'function') {
+ useRequireEnsure = true;
+ }
+
+ if (typeof requirejs !== 'undefined' && requirejs.toUrl) {
+ fallbackWorkerSrc = requirejs.toUrl('pdfjs-dist/build/pdf.worker.js');
+ }
+
+ var dynamicLoaderSupported = typeof requirejs !== 'undefined' && requirejs.load;
+ fakeWorkerFilesLoader = useRequireEnsure ? function () {
+ return new Promise(function (resolve, reject) {
+ require.ensure([], function () {
+ try {
+ var worker;
+ worker = require('./pdf.worker.js');
+ resolve(worker.WorkerMessageHandler);
+ } catch (ex) {
+ reject(ex);
+ }
+ }, reject, 'pdfjsWorker');
+ });
+ } : dynamicLoaderSupported ? function () {
+ return new Promise(function (resolve, reject) {
+ requirejs(['pdfjs-dist/build/pdf.worker'], function (worker) {
+ try {
+ resolve(worker.WorkerMessageHandler);
+ } catch (ex) {
+ reject(ex);
+ }
+ }, reject);
+ });
+ } : null;
+
+ if (!fallbackWorkerSrc && (typeof document === "undefined" ? "undefined" : _typeof(document)) === 'object' && 'currentScript' in document) {
+ var pdfjsFilePath = document.currentScript && document.currentScript.src;
+
+ if (pdfjsFilePath) {
+ fallbackWorkerSrc = pdfjsFilePath.replace(/(\.(?:min\.)?js)(\?.*)?$/i, '.worker$1$2');
+ }
+ }
+}
+var createPDFNetworkStream;
+
+function setPDFNetworkStreamFactory(pdfNetworkStreamFactory) {
+ createPDFNetworkStream = pdfNetworkStreamFactory;
+}
+
+function getDocument(src) {
+ var task = new PDFDocumentLoadingTask();
+ var source;
+
+ if (typeof src === 'string') {
+ source = {
+ url: src
+ };
+ } else if ((0, _util.isArrayBuffer)(src)) {
+ source = {
+ data: src
+ };
+ } else if (src instanceof PDFDataRangeTransport) {
+ source = {
+ range: src
+ };
+ } else {
+ if (_typeof(src) !== 'object') {
+ throw new Error('Invalid parameter in getDocument, ' + 'need either Uint8Array, string or a parameter object');
+ }
+
+ if (!src.url && !src.data && !src.range) {
+ throw new Error('Invalid parameter object: need either .data, .range or .url');
+ }
+
+ source = src;
+ }
+
+ var params = Object.create(null);
+ var rangeTransport = null,
+ worker = null;
+
+ for (var key in source) {
+ if (key === 'url' && typeof window !== 'undefined') {
+ params[key] = new _util.URL(source[key], window.location).href;
+ continue;
+ } else if (key === 'range') {
+ rangeTransport = source[key];
+ continue;
+ } else if (key === 'worker') {
+ worker = source[key];
+ continue;
+ } else if (key === 'data' && !(source[key] instanceof Uint8Array)) {
+ var pdfBytes = source[key];
+
+ if (typeof pdfBytes === 'string') {
+ params[key] = (0, _util.stringToBytes)(pdfBytes);
+ } else if (_typeof(pdfBytes) === 'object' && pdfBytes !== null && !isNaN(pdfBytes.length)) {
+ params[key] = new Uint8Array(pdfBytes);
+ } else if ((0, _util.isArrayBuffer)(pdfBytes)) {
+ params[key] = new Uint8Array(pdfBytes);
+ } else {
+ throw new Error('Invalid PDF binary data: either typed array, ' + 'string or array-like object is expected in the ' + 'data property.');
+ }
+
+ continue;
+ }
+
+ params[key] = source[key];
+ }
+
+ params.rangeChunkSize = params.rangeChunkSize || DEFAULT_RANGE_CHUNK_SIZE;
+ params.CMapReaderFactory = params.CMapReaderFactory || _display_utils.DOMCMapReaderFactory;
+ params.ignoreErrors = params.stopAtErrors !== true;
+ params.pdfBug = params.pdfBug === true;
+ var NativeImageDecoderValues = Object.values(_util.NativeImageDecoding);
+
+ if (params.nativeImageDecoderSupport === undefined || !NativeImageDecoderValues.includes(params.nativeImageDecoderSupport)) {
+ params.nativeImageDecoderSupport = _api_compatibility.apiCompatibilityParams.nativeImageDecoderSupport || _util.NativeImageDecoding.DECODE;
+ }
+
+ if (!Number.isInteger(params.maxImageSize)) {
+ params.maxImageSize = -1;
+ }
+
+ if (typeof params.isEvalSupported !== 'boolean') {
+ params.isEvalSupported = true;
+ }
+
+ if (typeof params.disableFontFace !== 'boolean') {
+ params.disableFontFace = _api_compatibility.apiCompatibilityParams.disableFontFace || false;
+ }
+
+ if (typeof params.disableRange !== 'boolean') {
+ params.disableRange = false;
+ }
+
+ if (typeof params.disableStream !== 'boolean') {
+ params.disableStream = false;
+ }
+
+ if (typeof params.disableAutoFetch !== 'boolean') {
+ params.disableAutoFetch = false;
+ }
+
+ if (typeof params.disableCreateObjectURL !== 'boolean') {
+ params.disableCreateObjectURL = _api_compatibility.apiCompatibilityParams.disableCreateObjectURL || false;
+ }
+
+ (0, _util.setVerbosityLevel)(params.verbosity);
+
+ if (!worker) {
+ var workerParams = {
+ postMessageTransfers: params.postMessageTransfers,
+ verbosity: params.verbosity,
+ port: _worker_options.GlobalWorkerOptions.workerPort
+ };
+ worker = workerParams.port ? PDFWorker.fromPort(workerParams) : new PDFWorker(workerParams);
+ task._worker = worker;
+ }
+
+ var docId = task.docId;
+ worker.promise.then(function () {
+ if (task.destroyed) {
+ throw new Error('Loading aborted');
+ }
+
+ return _fetchDocument(worker, params, rangeTransport, docId).then(function (workerId) {
+ if (task.destroyed) {
+ throw new Error('Loading aborted');
+ }
+
+ var networkStream;
+
+ if (rangeTransport) {
+ networkStream = new _transport_stream.PDFDataTransportStream({
+ length: params.length,
+ initialData: params.initialData,
+ progressiveDone: params.progressiveDone,
+ disableRange: params.disableRange,
+ disableStream: params.disableStream
+ }, rangeTransport);
+ } else if (!params.data) {
+ networkStream = createPDFNetworkStream({
+ url: params.url,
+ length: params.length,
+ httpHeaders: params.httpHeaders,
+ withCredentials: params.withCredentials,
+ rangeChunkSize: params.rangeChunkSize,
+ disableRange: params.disableRange,
+ disableStream: params.disableStream
+ });
+ }
+
+ var messageHandler = new _message_handler.MessageHandler(docId, workerId, worker.port);
+ messageHandler.postMessageTransfers = worker.postMessageTransfers;
+ var transport = new WorkerTransport(messageHandler, task, networkStream, params);
+ task._transport = transport;
+ messageHandler.send('Ready', null);
+ });
+ })["catch"](task._capability.reject);
+ return task;
+}
+
+function _fetchDocument(worker, source, pdfDataRangeTransport, docId) {
+ if (worker.destroyed) {
+ return Promise.reject(new Error('Worker was destroyed'));
+ }
+
+ if (pdfDataRangeTransport) {
+ source.length = pdfDataRangeTransport.length;
+ source.initialData = pdfDataRangeTransport.initialData;
+ source.progressiveDone = pdfDataRangeTransport.progressiveDone;
+ }
+
+ return worker.messageHandler.sendWithPromise('GetDocRequest', {
+ docId: docId,
+ apiVersion: '2.2.228',
+ source: {
+ data: source.data,
+ url: source.url,
+ password: source.password,
+ disableAutoFetch: source.disableAutoFetch,
+ rangeChunkSize: source.rangeChunkSize,
+ length: source.length
+ },
+ maxImageSize: source.maxImageSize,
+ disableFontFace: source.disableFontFace,
+ disableCreateObjectURL: source.disableCreateObjectURL,
+ postMessageTransfers: worker.postMessageTransfers,
+ docBaseUrl: source.docBaseUrl,
+ nativeImageDecoderSupport: source.nativeImageDecoderSupport,
+ ignoreErrors: source.ignoreErrors,
+ isEvalSupported: source.isEvalSupported
+ }).then(function (workerId) {
+ if (worker.destroyed) {
+ throw new Error('Worker was destroyed');
+ }
+
+ return workerId;
+ });
+}
+
+var PDFDocumentLoadingTask = function PDFDocumentLoadingTaskClosure() {
+ var nextDocumentId = 0;
+
+ var PDFDocumentLoadingTask =
+ /*#__PURE__*/
+ function () {
+ function PDFDocumentLoadingTask() {
+ _classCallCheck(this, PDFDocumentLoadingTask);
+
+ this._capability = (0, _util.createPromiseCapability)();
+ this._transport = null;
+ this._worker = null;
+ this.docId = 'd' + nextDocumentId++;
+ this.destroyed = false;
+ this.onPassword = null;
+ this.onProgress = null;
+ this.onUnsupportedFeature = null;
+ }
+
+ _createClass(PDFDocumentLoadingTask, [{
+ key: "destroy",
+ value: function destroy() {
+ var _this = this;
+
+ this.destroyed = true;
+ var transportDestroyed = !this._transport ? Promise.resolve() : this._transport.destroy();
+ return transportDestroyed.then(function () {
+ _this._transport = null;
+
+ if (_this._worker) {
+ _this._worker.destroy();
+
+ _this._worker = null;
+ }
+ });
+ }
+ }, {
+ key: "then",
+ value: function then(onFulfilled, onRejected) {
+ (0, _display_utils.deprecated)('PDFDocumentLoadingTask.then method, ' + 'use the `promise` getter instead.');
+ return this.promise.then.apply(this.promise, arguments);
+ }
+ }, {
+ key: "promise",
+ get: function get() {
+ return this._capability.promise;
+ }
+ }]);
+
+ return PDFDocumentLoadingTask;
+ }();
+
+ return PDFDocumentLoadingTask;
+}();
+
+var PDFDataRangeTransport =
+/*#__PURE__*/
+function () {
+ function PDFDataRangeTransport(length, initialData) {
+ var progressiveDone = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
+
+ _classCallCheck(this, PDFDataRangeTransport);
+
+ this.length = length;
+ this.initialData = initialData;
+ this.progressiveDone = progressiveDone;
+ this._rangeListeners = [];
+ this._progressListeners = [];
+ this._progressiveReadListeners = [];
+ this._progressiveDoneListeners = [];
+ this._readyCapability = (0, _util.createPromiseCapability)();
+ }
+
+ _createClass(PDFDataRangeTransport, [{
+ key: "addRangeListener",
+ value: function addRangeListener(listener) {
+ this._rangeListeners.push(listener);
+ }
+ }, {
+ key: "addProgressListener",
+ value: function addProgressListener(listener) {
+ this._progressListeners.push(listener);
+ }
+ }, {
+ key: "addProgressiveReadListener",
+ value: function addProgressiveReadListener(listener) {
+ this._progressiveReadListeners.push(listener);
+ }
+ }, {
+ key: "addProgressiveDoneListener",
+ value: function addProgressiveDoneListener(listener) {
+ this._progressiveDoneListeners.push(listener);
+ }
+ }, {
+ key: "onDataRange",
+ value: function onDataRange(begin, chunk) {
+ var _iteratorNormalCompletion = true;
+ var _didIteratorError = false;
+ var _iteratorError = undefined;
+
+ try {
+ for (var _iterator = this._rangeListeners[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
+ var listener = _step.value;
+ listener(begin, chunk);
+ }
+ } catch (err) {
+ _didIteratorError = true;
+ _iteratorError = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion && _iterator["return"] != null) {
+ _iterator["return"]();
+ }
+ } finally {
+ if (_didIteratorError) {
+ throw _iteratorError;
+ }
+ }
+ }
+ }
+ }, {
+ key: "onDataProgress",
+ value: function onDataProgress(loaded, total) {
+ var _this2 = this;
+
+ this._readyCapability.promise.then(function () {
+ var _iteratorNormalCompletion2 = true;
+ var _didIteratorError2 = false;
+ var _iteratorError2 = undefined;
+
+ try {
+ for (var _iterator2 = _this2._progressListeners[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
+ var listener = _step2.value;
+ listener(loaded, total);
+ }
+ } catch (err) {
+ _didIteratorError2 = true;
+ _iteratorError2 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) {
+ _iterator2["return"]();
+ }
+ } finally {
+ if (_didIteratorError2) {
+ throw _iteratorError2;
+ }
+ }
+ }
+ });
+ }
+ }, {
+ key: "onDataProgressiveRead",
+ value: function onDataProgressiveRead(chunk) {
+ var _this3 = this;
+
+ this._readyCapability.promise.then(function () {
+ var _iteratorNormalCompletion3 = true;
+ var _didIteratorError3 = false;
+ var _iteratorError3 = undefined;
+
+ try {
+ for (var _iterator3 = _this3._progressiveReadListeners[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
+ var listener = _step3.value;
+ listener(chunk);
+ }
+ } catch (err) {
+ _didIteratorError3 = true;
+ _iteratorError3 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion3 && _iterator3["return"] != null) {
+ _iterator3["return"]();
+ }
+ } finally {
+ if (_didIteratorError3) {
+ throw _iteratorError3;
+ }
+ }
+ }
+ });
+ }
+ }, {
+ key: "onDataProgressiveDone",
+ value: function onDataProgressiveDone() {
+ var _this4 = this;
+
+ this._readyCapability.promise.then(function () {
+ var _iteratorNormalCompletion4 = true;
+ var _didIteratorError4 = false;
+ var _iteratorError4 = undefined;
+
+ try {
+ for (var _iterator4 = _this4._progressiveDoneListeners[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) {
+ var listener = _step4.value;
+ listener();
+ }
+ } catch (err) {
+ _didIteratorError4 = true;
+ _iteratorError4 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion4 && _iterator4["return"] != null) {
+ _iterator4["return"]();
+ }
+ } finally {
+ if (_didIteratorError4) {
+ throw _iteratorError4;
+ }
+ }
+ }
+ });
+ }
+ }, {
+ key: "transportReady",
+ value: function transportReady() {
+ this._readyCapability.resolve();
+ }
+ }, {
+ key: "requestDataRange",
+ value: function requestDataRange(begin, end) {
+ (0, _util.unreachable)('Abstract method PDFDataRangeTransport.requestDataRange');
+ }
+ }, {
+ key: "abort",
+ value: function abort() {}
+ }]);
+
+ return PDFDataRangeTransport;
+}();
+
+exports.PDFDataRangeTransport = PDFDataRangeTransport;
+
+var PDFDocumentProxy =
+/*#__PURE__*/
+function () {
+ function PDFDocumentProxy(pdfInfo, transport) {
+ _classCallCheck(this, PDFDocumentProxy);
+
+ this._pdfInfo = pdfInfo;
+ this._transport = transport;
+ }
+
+ _createClass(PDFDocumentProxy, [{
+ key: "getPage",
+ value: function getPage(pageNumber) {
+ return this._transport.getPage(pageNumber);
+ }
+ }, {
+ key: "getPageIndex",
+ value: function getPageIndex(ref) {
+ return this._transport.getPageIndex(ref);
+ }
+ }, {
+ key: "getDestinations",
+ value: function getDestinations() {
+ return this._transport.getDestinations();
+ }
+ }, {
+ key: "getDestination",
+ value: function getDestination(id) {
+ return this._transport.getDestination(id);
+ }
+ }, {
+ key: "getPageLabels",
+ value: function getPageLabels() {
+ return this._transport.getPageLabels();
+ }
+ }, {
+ key: "getPageLayout",
+ value: function getPageLayout() {
+ return this._transport.getPageLayout();
+ }
+ }, {
+ key: "getPageMode",
+ value: function getPageMode() {
+ return this._transport.getPageMode();
+ }
+ }, {
+ key: "getViewerPreferences",
+ value: function getViewerPreferences() {
+ return this._transport.getViewerPreferences();
+ }
+ }, {
+ key: "getOpenActionDestination",
+ value: function getOpenActionDestination() {
+ return this._transport.getOpenActionDestination();
+ }
+ }, {
+ key: "getAttachments",
+ value: function getAttachments() {
+ return this._transport.getAttachments();
+ }
+ }, {
+ key: "getJavaScript",
+ value: function getJavaScript() {
+ return this._transport.getJavaScript();
+ }
+ }, {
+ key: "getOutline",
+ value: function getOutline() {
+ return this._transport.getOutline();
+ }
+ }, {
+ key: "getPermissions",
+ value: function getPermissions() {
+ return this._transport.getPermissions();
+ }
+ }, {
+ key: "getMetadata",
+ value: function getMetadata() {
+ return this._transport.getMetadata();
+ }
+ }, {
+ key: "getData",
+ value: function getData() {
+ return this._transport.getData();
+ }
+ }, {
+ key: "getDownloadInfo",
+ value: function getDownloadInfo() {
+ return this._transport.downloadInfoCapability.promise;
+ }
+ }, {
+ key: "getStats",
+ value: function getStats() {
+ return this._transport.getStats();
+ }
+ }, {
+ key: "cleanup",
+ value: function cleanup() {
+ this._transport.startCleanup();
+ }
+ }, {
+ key: "destroy",
+ value: function destroy() {
+ return this.loadingTask.destroy();
+ }
+ }, {
+ key: "numPages",
+ get: function get() {
+ return this._pdfInfo.numPages;
+ }
+ }, {
+ key: "fingerprint",
+ get: function get() {
+ return this._pdfInfo.fingerprint;
+ }
+ }, {
+ key: "loadingParams",
+ get: function get() {
+ return this._transport.loadingParams;
+ }
+ }, {
+ key: "loadingTask",
+ get: function get() {
+ return this._transport.loadingTask;
+ }
+ }]);
+
+ return PDFDocumentProxy;
+}();
+
+exports.PDFDocumentProxy = PDFDocumentProxy;
+
+var PDFPageProxy =
+/*#__PURE__*/
+function () {
+ function PDFPageProxy(pageIndex, pageInfo, transport) {
+ var pdfBug = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
+
+ _classCallCheck(this, PDFPageProxy);
+
+ this.pageIndex = pageIndex;
+ this._pageInfo = pageInfo;
+ this._transport = transport;
+ this._stats = pdfBug ? new _display_utils.StatTimer() : _display_utils.DummyStatTimer;
+ this._pdfBug = pdfBug;
+ this.commonObjs = transport.commonObjs;
+ this.objs = new PDFObjects();
+ this.cleanupAfterRender = false;
+ this.pendingCleanup = false;
+ this.intentStates = Object.create(null);
+ this.destroyed = false;
+ }
+
+ _createClass(PDFPageProxy, [{
+ key: "getViewport",
+ value: function getViewport() {
+ var _ref = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
+ scale = _ref.scale,
+ _ref$rotation = _ref.rotation,
+ rotation = _ref$rotation === void 0 ? this.rotate : _ref$rotation,
+ _ref$dontFlip = _ref.dontFlip,
+ dontFlip = _ref$dontFlip === void 0 ? false : _ref$dontFlip;
+
+ if (arguments.length > 1 || typeof arguments[0] === 'number') {
+ (0, _display_utils.deprecated)('getViewport is called with obsolete arguments.');
+ scale = arguments[0];
+ rotation = typeof arguments[1] === 'number' ? arguments[1] : this.rotate;
+ dontFlip = typeof arguments[2] === 'boolean' ? arguments[2] : false;
+ }
+
+ return new _display_utils.PageViewport({
+ viewBox: this.view,
+ scale: scale,
+ rotation: rotation,
+ dontFlip: dontFlip
+ });
+ }
+ }, {
+ key: "getAnnotations",
+ value: function getAnnotations() {
+ var _ref2 = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
+ _ref2$intent = _ref2.intent,
+ intent = _ref2$intent === void 0 ? null : _ref2$intent;
+
+ if (!this.annotationsPromise || this.annotationsIntent !== intent) {
+ this.annotationsPromise = this._transport.getAnnotations(this.pageIndex, intent);
+ this.annotationsIntent = intent;
+ }
+
+ return this.annotationsPromise;
+ }
+ }, {
+ key: "render",
+ value: function render(_ref3) {
+ var _this5 = this;
+
+ var canvasContext = _ref3.canvasContext,
+ viewport = _ref3.viewport,
+ _ref3$intent = _ref3.intent,
+ intent = _ref3$intent === void 0 ? 'display' : _ref3$intent,
+ _ref3$enableWebGL = _ref3.enableWebGL,
+ enableWebGL = _ref3$enableWebGL === void 0 ? false : _ref3$enableWebGL,
+ _ref3$renderInteracti = _ref3.renderInteractiveForms,
+ renderInteractiveForms = _ref3$renderInteracti === void 0 ? false : _ref3$renderInteracti,
+ _ref3$transform = _ref3.transform,
+ transform = _ref3$transform === void 0 ? null : _ref3$transform,
+ _ref3$imageLayer = _ref3.imageLayer,
+ imageLayer = _ref3$imageLayer === void 0 ? null : _ref3$imageLayer,
+ _ref3$canvasFactory = _ref3.canvasFactory,
+ canvasFactory = _ref3$canvasFactory === void 0 ? null : _ref3$canvasFactory,
+ _ref3$background = _ref3.background,
+ background = _ref3$background === void 0 ? null : _ref3$background;
+ var stats = this._stats;
+ stats.time('Overall');
+ this.pendingCleanup = false;
+ var renderingIntent = intent === 'print' ? 'print' : 'display';
+ var canvasFactoryInstance = canvasFactory || new _display_utils.DOMCanvasFactory();
+ var webGLContext = new _webgl.WebGLContext({
+ enable: enableWebGL
+ });
+
+ if (!this.intentStates[renderingIntent]) {
+ this.intentStates[renderingIntent] = Object.create(null);
+ }
+
+ var intentState = this.intentStates[renderingIntent];
+
+ if (!intentState.displayReadyCapability) {
+ intentState.receivingOperatorList = true;
+ intentState.displayReadyCapability = (0, _util.createPromiseCapability)();
+ intentState.operatorList = {
+ fnArray: [],
+ argsArray: [],
+ lastChunk: false
+ };
+ stats.time('Page Request');
+
+ this._transport.messageHandler.send('RenderPageRequest', {
+ pageIndex: this.pageNumber - 1,
+ intent: renderingIntent,
+ renderInteractiveForms: renderInteractiveForms === true
+ });
+ }
+
+ var complete = function complete(error) {
+ var i = intentState.renderTasks.indexOf(internalRenderTask);
+
+ if (i >= 0) {
+ intentState.renderTasks.splice(i, 1);
+ }
+
+ if (_this5.cleanupAfterRender || renderingIntent === 'print') {
+ _this5.pendingCleanup = true;
+ }
+
+ _this5._tryCleanup();
+
+ if (error) {
+ internalRenderTask.capability.reject(error);
+ } else {
+ internalRenderTask.capability.resolve();
+ }
+
+ stats.timeEnd('Rendering');
+ stats.timeEnd('Overall');
+ };
+
+ var internalRenderTask = new InternalRenderTask({
+ callback: complete,
+ params: {
+ canvasContext: canvasContext,
+ viewport: viewport,
+ transform: transform,
+ imageLayer: imageLayer,
+ background: background
+ },
+ objs: this.objs,
+ commonObjs: this.commonObjs,
+ operatorList: intentState.operatorList,
+ pageNumber: this.pageNumber,
+ canvasFactory: canvasFactoryInstance,
+ webGLContext: webGLContext,
+ useRequestAnimationFrame: renderingIntent !== 'print',
+ pdfBug: this._pdfBug
+ });
+
+ if (!intentState.renderTasks) {
+ intentState.renderTasks = [];
+ }
+
+ intentState.renderTasks.push(internalRenderTask);
+ var renderTask = internalRenderTask.task;
+ intentState.displayReadyCapability.promise.then(function (transparency) {
+ if (_this5.pendingCleanup) {
+ complete();
+ return;
+ }
+
+ stats.time('Rendering');
+ internalRenderTask.initializeGraphics(transparency);
+ internalRenderTask.operatorListChanged();
+ })["catch"](complete);
+ return renderTask;
+ }
+ }, {
+ key: "getOperatorList",
+ value: function getOperatorList() {
+ function operatorListChanged() {
+ if (intentState.operatorList.lastChunk) {
+ intentState.opListReadCapability.resolve(intentState.operatorList);
+ var i = intentState.renderTasks.indexOf(opListTask);
+
+ if (i >= 0) {
+ intentState.renderTasks.splice(i, 1);
+ }
+ }
+ }
+
+ var renderingIntent = 'oplist';
+
+ if (!this.intentStates[renderingIntent]) {
+ this.intentStates[renderingIntent] = Object.create(null);
+ }
+
+ var intentState = this.intentStates[renderingIntent];
+ var opListTask;
+
+ if (!intentState.opListReadCapability) {
+ opListTask = {};
+ opListTask.operatorListChanged = operatorListChanged;
+ intentState.receivingOperatorList = true;
+ intentState.opListReadCapability = (0, _util.createPromiseCapability)();
+ intentState.renderTasks = [];
+ intentState.renderTasks.push(opListTask);
+ intentState.operatorList = {
+ fnArray: [],
+ argsArray: [],
+ lastChunk: false
+ };
+
+ this._stats.time('Page Request');
+
+ this._transport.messageHandler.send('RenderPageRequest', {
+ pageIndex: this.pageIndex,
+ intent: renderingIntent
+ });
+ }
+
+ return intentState.opListReadCapability.promise;
+ }
+ }, {
+ key: "streamTextContent",
+ value: function streamTextContent() {
+ var _ref4 = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
+ _ref4$normalizeWhites = _ref4.normalizeWhitespace,
+ normalizeWhitespace = _ref4$normalizeWhites === void 0 ? false : _ref4$normalizeWhites,
+ _ref4$disableCombineT = _ref4.disableCombineTextItems,
+ disableCombineTextItems = _ref4$disableCombineT === void 0 ? false : _ref4$disableCombineT;
+
+ var TEXT_CONTENT_CHUNK_SIZE = 100;
+ return this._transport.messageHandler.sendWithStream('GetTextContent', {
+ pageIndex: this.pageNumber - 1,
+ normalizeWhitespace: normalizeWhitespace === true,
+ combineTextItems: disableCombineTextItems !== true
+ }, {
+ highWaterMark: TEXT_CONTENT_CHUNK_SIZE,
+ size: function size(textContent) {
+ return textContent.items.length;
+ }
+ });
+ }
+ }, {
+ key: "getTextContent",
+ value: function getTextContent() {
+ var params = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
+ var readableStream = this.streamTextContent(params);
+ return new Promise(function (resolve, reject) {
+ function pump() {
+ reader.read().then(function (_ref5) {
+ var _textContent$items;
+
+ var value = _ref5.value,
+ done = _ref5.done;
+
+ if (done) {
+ resolve(textContent);
+ return;
+ }
+
+ Object.assign(textContent.styles, value.styles);
+
+ (_textContent$items = textContent.items).push.apply(_textContent$items, _toConsumableArray(value.items));
+
+ pump();
+ }, reject);
+ }
+
+ var reader = readableStream.getReader();
+ var textContent = {
+ items: [],
+ styles: Object.create(null)
+ };
+ pump();
+ });
+ }
+ }, {
+ key: "_destroy",
+ value: function _destroy() {
+ this.destroyed = true;
+ this._transport.pageCache[this.pageIndex] = null;
+ var waitOn = [];
+ Object.keys(this.intentStates).forEach(function (intent) {
+ if (intent === 'oplist') {
+ return;
+ }
+
+ var intentState = this.intentStates[intent];
+ intentState.renderTasks.forEach(function (renderTask) {
+ var renderCompleted = renderTask.capability.promise["catch"](function () {});
+ waitOn.push(renderCompleted);
+ renderTask.cancel();
+ });
+ }, this);
+ this.objs.clear();
+ this.annotationsPromise = null;
+ this.pendingCleanup = false;
+ return Promise.all(waitOn);
+ }
+ }, {
+ key: "cleanup",
+ value: function cleanup() {
+ var resetStats = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
+ this.pendingCleanup = true;
+
+ this._tryCleanup(resetStats);
+ }
+ }, {
+ key: "_tryCleanup",
+ value: function _tryCleanup() {
+ var resetStats = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
+
+ if (!this.pendingCleanup || Object.keys(this.intentStates).some(function (intent) {
+ var intentState = this.intentStates[intent];
+ return intentState.renderTasks.length !== 0 || intentState.receivingOperatorList;
+ }, this)) {
+ return;
+ }
+
+ Object.keys(this.intentStates).forEach(function (intent) {
+ delete this.intentStates[intent];
+ }, this);
+ this.objs.clear();
+ this.annotationsPromise = null;
+
+ if (resetStats && this._stats instanceof _display_utils.StatTimer) {
+ this._stats = new _display_utils.StatTimer();
+ }
+
+ this.pendingCleanup = false;
+ }
+ }, {
+ key: "_startRenderPage",
+ value: function _startRenderPage(transparency, intent) {
+ var intentState = this.intentStates[intent];
+
+ if (intentState.displayReadyCapability) {
+ intentState.displayReadyCapability.resolve(transparency);
+ }
+ }
+ }, {
+ key: "_renderPageChunk",
+ value: function _renderPageChunk(operatorListChunk, intent) {
+ var intentState = this.intentStates[intent];
+
+ for (var i = 0, ii = operatorListChunk.length; i < ii; i++) {
+ intentState.operatorList.fnArray.push(operatorListChunk.fnArray[i]);
+ intentState.operatorList.argsArray.push(operatorListChunk.argsArray[i]);
+ }
+
+ intentState.operatorList.lastChunk = operatorListChunk.lastChunk;
+
+ for (var _i = 0; _i < intentState.renderTasks.length; _i++) {
+ intentState.renderTasks[_i].operatorListChanged();
+ }
+
+ if (operatorListChunk.lastChunk) {
+ intentState.receivingOperatorList = false;
+
+ this._tryCleanup();
+ }
+ }
+ }, {
+ key: "pageNumber",
+ get: function get() {
+ return this.pageIndex + 1;
+ }
+ }, {
+ key: "rotate",
+ get: function get() {
+ return this._pageInfo.rotate;
+ }
+ }, {
+ key: "ref",
+ get: function get() {
+ return this._pageInfo.ref;
+ }
+ }, {
+ key: "userUnit",
+ get: function get() {
+ return this._pageInfo.userUnit;
+ }
+ }, {
+ key: "view",
+ get: function get() {
+ return this._pageInfo.view;
+ }
+ }, {
+ key: "stats",
+ get: function get() {
+ return this._stats instanceof _display_utils.StatTimer ? this._stats : null;
+ }
+ }]);
+
+ return PDFPageProxy;
+}();
+
+exports.PDFPageProxy = PDFPageProxy;
+
+var LoopbackPort =
+/*#__PURE__*/
+function () {
+ function LoopbackPort() {
+ var defer = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
+
+ _classCallCheck(this, LoopbackPort);
+
+ this._listeners = [];
+ this._defer = defer;
+ this._deferred = Promise.resolve(undefined);
+ }
+
+ _createClass(LoopbackPort, [{
+ key: "postMessage",
+ value: function postMessage(obj, transfers) {
+ var _this6 = this;
+
+ function cloneValue(value) {
+ if (_typeof(value) !== 'object' || value === null) {
+ return value;
+ }
+
+ if (cloned.has(value)) {
+ return cloned.get(value);
+ }
+
+ var buffer, result;
+
+ if ((buffer = value.buffer) && (0, _util.isArrayBuffer)(buffer)) {
+ var transferable = transfers && transfers.includes(buffer);
+
+ if (value === buffer) {
+ result = value;
+ } else if (transferable) {
+ result = new value.constructor(buffer, value.byteOffset, value.byteLength);
+ } else {
+ result = new value.constructor(value);
+ }
+
+ cloned.set(value, result);
+ return result;
+ }
+
+ result = Array.isArray(value) ? [] : {};
+ cloned.set(value, result);
+
+ for (var i in value) {
+ var desc = void 0,
+ p = value;
+
+ while (!(desc = Object.getOwnPropertyDescriptor(p, i))) {
+ p = Object.getPrototypeOf(p);
+ }
+
+ if (typeof desc.value === 'undefined' || typeof desc.value === 'function') {
+ continue;
+ }
+
+ result[i] = cloneValue(desc.value);
+ }
+
+ return result;
+ }
+
+ if (!this._defer) {
+ this._listeners.forEach(function (listener) {
+ listener.call(this, {
+ data: obj
+ });
+ }, this);
+
+ return;
+ }
+
+ var cloned = new WeakMap();
+ var e = {
+ data: cloneValue(obj)
+ };
+
+ this._deferred.then(function () {
+ _this6._listeners.forEach(function (listener) {
+ listener.call(this, e);
+ }, _this6);
+ });
+ }
+ }, {
+ key: "addEventListener",
+ value: function addEventListener(name, listener) {
+ this._listeners.push(listener);
+ }
+ }, {
+ key: "removeEventListener",
+ value: function removeEventListener(name, listener) {
+ var i = this._listeners.indexOf(listener);
+
+ this._listeners.splice(i, 1);
+ }
+ }, {
+ key: "terminate",
+ value: function terminate() {
+ this._listeners.length = 0;
+ }
+ }]);
+
+ return LoopbackPort;
+}();
+
+exports.LoopbackPort = LoopbackPort;
+
+var PDFWorker = function PDFWorkerClosure() {
+ var pdfWorkerPorts = new WeakMap();
+ var nextFakeWorkerId = 0;
+ var fakeWorkerFilesLoadedCapability;
+
+ function _getWorkerSrc() {
+ if (_worker_options.GlobalWorkerOptions.workerSrc) {
+ return _worker_options.GlobalWorkerOptions.workerSrc;
+ }
+
+ if (typeof fallbackWorkerSrc !== 'undefined') {
+ return fallbackWorkerSrc;
+ }
+
+ throw new Error('No "GlobalWorkerOptions.workerSrc" specified.');
+ }
+
+ function getMainThreadWorkerMessageHandler() {
+ try {
+ if (typeof window !== 'undefined') {
+ return window.pdfjsWorker && window.pdfjsWorker.WorkerMessageHandler;
+ }
+ } catch (ex) {}
+
+ return null;
+ }
+
+ function setupFakeWorkerGlobal() {
+ if (fakeWorkerFilesLoadedCapability) {
+ return fakeWorkerFilesLoadedCapability.promise;
+ }
+
+ fakeWorkerFilesLoadedCapability = (0, _util.createPromiseCapability)();
+ var mainWorkerMessageHandler = getMainThreadWorkerMessageHandler();
+
+ if (mainWorkerMessageHandler) {
+ fakeWorkerFilesLoadedCapability.resolve(mainWorkerMessageHandler);
+ return fakeWorkerFilesLoadedCapability.promise;
+ }
+
+ var loader = fakeWorkerFilesLoader || function () {
+ return (0, _display_utils.loadScript)(_getWorkerSrc()).then(function () {
+ return window.pdfjsWorker.WorkerMessageHandler;
+ });
+ };
+
+ loader().then(fakeWorkerFilesLoadedCapability.resolve, fakeWorkerFilesLoadedCapability.reject);
+ return fakeWorkerFilesLoadedCapability.promise;
+ }
+
+ function createCDNWrapper(url) {
+ var wrapper = 'importScripts(\'' + url + '\');';
+ return _util.URL.createObjectURL(new Blob([wrapper]));
+ }
+
+ var PDFWorker =
+ /*#__PURE__*/
+ function () {
+ function PDFWorker() {
+ var _ref6 = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
+ _ref6$name = _ref6.name,
+ name = _ref6$name === void 0 ? null : _ref6$name,
+ _ref6$port = _ref6.port,
+ port = _ref6$port === void 0 ? null : _ref6$port,
+ _ref6$postMessageTran = _ref6.postMessageTransfers,
+ postMessageTransfers = _ref6$postMessageTran === void 0 ? true : _ref6$postMessageTran,
+ _ref6$verbosity = _ref6.verbosity,
+ verbosity = _ref6$verbosity === void 0 ? (0, _util.getVerbosityLevel)() : _ref6$verbosity;
+
+ _classCallCheck(this, PDFWorker);
+
+ if (port && pdfWorkerPorts.has(port)) {
+ throw new Error('Cannot use more than one PDFWorker per port');
+ }
+
+ this.name = name;
+ this.destroyed = false;
+ this.postMessageTransfers = postMessageTransfers !== false;
+ this.verbosity = verbosity;
+ this._readyCapability = (0, _util.createPromiseCapability)();
+ this._port = null;
+ this._webWorker = null;
+ this._messageHandler = null;
+
+ if (port) {
+ pdfWorkerPorts.set(port, this);
+
+ this._initializeFromPort(port);
+
+ return;
+ }
+
+ this._initialize();
+ }
+
+ _createClass(PDFWorker, [{
+ key: "_initializeFromPort",
+ value: function _initializeFromPort(port) {
+ this._port = port;
+ this._messageHandler = new _message_handler.MessageHandler('main', 'worker', port);
+
+ this._messageHandler.on('ready', function () {});
+
+ this._readyCapability.resolve();
+ }
+ }, {
+ key: "_initialize",
+ value: function _initialize() {
+ var _this7 = this;
+
+ if (typeof Worker !== 'undefined' && !isWorkerDisabled && !getMainThreadWorkerMessageHandler()) {
+ var workerSrc = _getWorkerSrc();
+
+ try {
+ if (!(0, _util.isSameOrigin)(window.location.href, workerSrc)) {
+ workerSrc = createCDNWrapper(new _util.URL(workerSrc, window.location).href);
+ }
+
+ var worker = new Worker(workerSrc);
+ var messageHandler = new _message_handler.MessageHandler('main', 'worker', worker);
+
+ var terminateEarly = function terminateEarly() {
+ worker.removeEventListener('error', onWorkerError);
+ messageHandler.destroy();
+ worker.terminate();
+
+ if (_this7.destroyed) {
+ _this7._readyCapability.reject(new Error('Worker was destroyed'));
+ } else {
+ _this7._setupFakeWorker();
+ }
+ };
+
+ var onWorkerError = function onWorkerError() {
+ if (!_this7._webWorker) {
+ terminateEarly();
+ }
+ };
+
+ worker.addEventListener('error', onWorkerError);
+ messageHandler.on('test', function (data) {
+ worker.removeEventListener('error', onWorkerError);
+
+ if (_this7.destroyed) {
+ terminateEarly();
+ return;
+ }
+
+ if (data && data.supportTypedArray) {
+ _this7._messageHandler = messageHandler;
+ _this7._port = worker;
+ _this7._webWorker = worker;
+
+ if (!data.supportTransfers) {
+ _this7.postMessageTransfers = false;
+ }
+
+ _this7._readyCapability.resolve();
+
+ messageHandler.send('configure', {
+ verbosity: _this7.verbosity
+ });
+ } else {
+ _this7._setupFakeWorker();
+
+ messageHandler.destroy();
+ worker.terminate();
+ }
+ });
+ messageHandler.on('ready', function (data) {
+ worker.removeEventListener('error', onWorkerError);
+
+ if (_this7.destroyed) {
+ terminateEarly();
+ return;
+ }
+
+ try {
+ sendTest();
+ } catch (e) {
+ _this7._setupFakeWorker();
+ }
+ });
+
+ var sendTest = function sendTest() {
+ var testObj = new Uint8Array([_this7.postMessageTransfers ? 255 : 0]);
+
+ try {
+ messageHandler.send('test', testObj, [testObj.buffer]);
+ } catch (ex) {
+ (0, _util.info)('Cannot use postMessage transfers');
+ testObj[0] = 0;
+ messageHandler.send('test', testObj);
+ }
+ };
+
+ sendTest();
+ return;
+ } catch (e) {
+ (0, _util.info)('The worker has been disabled.');
+ }
+ }
+
+ this._setupFakeWorker();
+ }
+ }, {
+ key: "_setupFakeWorker",
+ value: function _setupFakeWorker() {
+ var _this8 = this;
+
+ if (!isWorkerDisabled) {
+ (0, _util.warn)('Setting up fake worker.');
+ isWorkerDisabled = true;
+ }
+
+ setupFakeWorkerGlobal().then(function (WorkerMessageHandler) {
+ if (_this8.destroyed) {
+ _this8._readyCapability.reject(new Error('Worker was destroyed'));
+
+ return;
+ }
+
+ var port = new LoopbackPort();
+ _this8._port = port;
+ var id = 'fake' + nextFakeWorkerId++;
+ var workerHandler = new _message_handler.MessageHandler(id + '_worker', id, port);
+ WorkerMessageHandler.setup(workerHandler, port);
+ var messageHandler = new _message_handler.MessageHandler(id, id + '_worker', port);
+ _this8._messageHandler = messageHandler;
+
+ _this8._readyCapability.resolve();
+ })["catch"](function (reason) {
+ _this8._readyCapability.reject(new Error("Setting up fake worker failed: \"".concat(reason.message, "\".")));
+ });
+ }
+ }, {
+ key: "destroy",
+ value: function destroy() {
+ this.destroyed = true;
+
+ if (this._webWorker) {
+ this._webWorker.terminate();
+
+ this._webWorker = null;
+ }
+
+ pdfWorkerPorts["delete"](this._port);
+ this._port = null;
+
+ if (this._messageHandler) {
+ this._messageHandler.destroy();
+
+ this._messageHandler = null;
+ }
+ }
+ }, {
+ key: "promise",
+ get: function get() {
+ return this._readyCapability.promise;
+ }
+ }, {
+ key: "port",
+ get: function get() {
+ return this._port;
+ }
+ }, {
+ key: "messageHandler",
+ get: function get() {
+ return this._messageHandler;
+ }
+ }], [{
+ key: "fromPort",
+ value: function fromPort(params) {
+ if (!params || !params.port) {
+ throw new Error('PDFWorker.fromPort - invalid method signature.');
+ }
+
+ if (pdfWorkerPorts.has(params.port)) {
+ return pdfWorkerPorts.get(params.port);
+ }
+
+ return new PDFWorker(params);
+ }
+ }, {
+ key: "getWorkerSrc",
+ value: function getWorkerSrc() {
+ return _getWorkerSrc();
+ }
+ }]);
+
+ return PDFWorker;
+ }();
+
+ return PDFWorker;
+}();
+
+exports.PDFWorker = PDFWorker;
+
+var WorkerTransport =
+/*#__PURE__*/
+function () {
+ function WorkerTransport(messageHandler, loadingTask, networkStream, params) {
+ _classCallCheck(this, WorkerTransport);
+
+ this.messageHandler = messageHandler;
+ this.loadingTask = loadingTask;
+ this.commonObjs = new PDFObjects();
+ this.fontLoader = new _font_loader.FontLoader({
+ docId: loadingTask.docId,
+ onUnsupportedFeature: this._onUnsupportedFeature.bind(this)
+ });
+ this._params = params;
+ this.CMapReaderFactory = new params.CMapReaderFactory({
+ baseUrl: params.cMapUrl,
+ isCompressed: params.cMapPacked
+ });
+ this.destroyed = false;
+ this.destroyCapability = null;
+ this._passwordCapability = null;
+ this._networkStream = networkStream;
+ this._fullReader = null;
+ this._lastProgress = null;
+ this.pageCache = [];
+ this.pagePromises = [];
+ this.downloadInfoCapability = (0, _util.createPromiseCapability)();
+ this.setupMessageHandler();
+ }
+
+ _createClass(WorkerTransport, [{
+ key: "destroy",
+ value: function destroy() {
+ var _this9 = this;
+
+ if (this.destroyCapability) {
+ return this.destroyCapability.promise;
+ }
+
+ this.destroyed = true;
+ this.destroyCapability = (0, _util.createPromiseCapability)();
+
+ if (this._passwordCapability) {
+ this._passwordCapability.reject(new Error('Worker was destroyed during onPassword callback'));
+ }
+
+ var waitOn = [];
+ this.pageCache.forEach(function (page) {
+ if (page) {
+ waitOn.push(page._destroy());
+ }
+ });
+ this.pageCache.length = 0;
+ this.pagePromises.length = 0;
+ var terminated = this.messageHandler.sendWithPromise('Terminate', null);
+ waitOn.push(terminated);
+ Promise.all(waitOn).then(function () {
+ _this9.fontLoader.clear();
+
+ if (_this9._networkStream) {
+ _this9._networkStream.cancelAllRequests();
+ }
+
+ if (_this9.messageHandler) {
+ _this9.messageHandler.destroy();
+
+ _this9.messageHandler = null;
+ }
+
+ _this9.destroyCapability.resolve();
+ }, this.destroyCapability.reject);
+ return this.destroyCapability.promise;
+ }
+ }, {
+ key: "setupMessageHandler",
+ value: function setupMessageHandler() {
+ var messageHandler = this.messageHandler,
+ loadingTask = this.loadingTask;
+ messageHandler.on('GetReader', function (data, sink) {
+ var _this10 = this;
+
+ (0, _util.assert)(this._networkStream);
+ this._fullReader = this._networkStream.getFullReader();
+
+ this._fullReader.onProgress = function (evt) {
+ _this10._lastProgress = {
+ loaded: evt.loaded,
+ total: evt.total
+ };
+ };
+
+ sink.onPull = function () {
+ _this10._fullReader.read().then(function (_ref7) {
+ var value = _ref7.value,
+ done = _ref7.done;
+
+ if (done) {
+ sink.close();
+ return;
+ }
+
+ (0, _util.assert)((0, _util.isArrayBuffer)(value));
+ sink.enqueue(new Uint8Array(value), 1, [value]);
+ })["catch"](function (reason) {
+ sink.error(reason);
+ });
+ };
+
+ sink.onCancel = function (reason) {
+ _this10._fullReader.cancel(reason);
+ };
+ }, this);
+ messageHandler.on('ReaderHeadersReady', function (data) {
+ var _this11 = this;
+
+ var headersCapability = (0, _util.createPromiseCapability)();
+ var fullReader = this._fullReader;
+ fullReader.headersReady.then(function () {
+ if (!fullReader.isStreamingSupported || !fullReader.isRangeSupported) {
+ if (_this11._lastProgress && loadingTask.onProgress) {
+ loadingTask.onProgress(_this11._lastProgress);
+ }
+
+ fullReader.onProgress = function (evt) {
+ if (loadingTask.onProgress) {
+ loadingTask.onProgress({
+ loaded: evt.loaded,
+ total: evt.total
+ });
+ }
+ };
+ }
+
+ headersCapability.resolve({
+ isStreamingSupported: fullReader.isStreamingSupported,
+ isRangeSupported: fullReader.isRangeSupported,
+ contentLength: fullReader.contentLength
+ });
+ }, headersCapability.reject);
+ return headersCapability.promise;
+ }, this);
+ messageHandler.on('GetRangeReader', function (data, sink) {
+ (0, _util.assert)(this._networkStream);
+
+ var rangeReader = this._networkStream.getRangeReader(data.begin, data.end);
+
+ if (!rangeReader) {
+ sink.close();
+ return;
+ }
+
+ sink.onPull = function () {
+ rangeReader.read().then(function (_ref8) {
+ var value = _ref8.value,
+ done = _ref8.done;
+
+ if (done) {
+ sink.close();
+ return;
+ }
+
+ (0, _util.assert)((0, _util.isArrayBuffer)(value));
+ sink.enqueue(new Uint8Array(value), 1, [value]);
+ })["catch"](function (reason) {
+ sink.error(reason);
+ });
+ };
+
+ sink.onCancel = function (reason) {
+ rangeReader.cancel(reason);
+ };
+ }, this);
+ messageHandler.on('GetDoc', function (_ref9) {
+ var pdfInfo = _ref9.pdfInfo;
+ this._numPages = pdfInfo.numPages;
+
+ loadingTask._capability.resolve(new PDFDocumentProxy(pdfInfo, this));
+ }, this);
+ messageHandler.on('PasswordRequest', function (exception) {
+ var _this12 = this;
+
+ this._passwordCapability = (0, _util.createPromiseCapability)();
+
+ if (loadingTask.onPassword) {
+ var updatePassword = function updatePassword(password) {
+ _this12._passwordCapability.resolve({
+ password: password
+ });
+ };
+
+ try {
+ loadingTask.onPassword(updatePassword, exception.code);
+ } catch (ex) {
+ this._passwordCapability.reject(ex);
+ }
+ } else {
+ this._passwordCapability.reject(new _util.PasswordException(exception.message, exception.code));
+ }
+
+ return this._passwordCapability.promise;
+ }, this);
+ messageHandler.on('PasswordException', function (exception) {
+ loadingTask._capability.reject(new _util.PasswordException(exception.message, exception.code));
+ }, this);
+ messageHandler.on('InvalidPDF', function (exception) {
+ loadingTask._capability.reject(new _util.InvalidPDFException(exception.message));
+ }, this);
+ messageHandler.on('MissingPDF', function (exception) {
+ loadingTask._capability.reject(new _util.MissingPDFException(exception.message));
+ }, this);
+ messageHandler.on('UnexpectedResponse', function (exception) {
+ loadingTask._capability.reject(new _util.UnexpectedResponseException(exception.message, exception.status));
+ }, this);
+ messageHandler.on('UnknownError', function (exception) {
+ loadingTask._capability.reject(new _util.UnknownErrorException(exception.message, exception.details));
+ }, this);
+ messageHandler.on('DataLoaded', function (data) {
+ if (loadingTask.onProgress) {
+ loadingTask.onProgress({
+ loaded: data.length,
+ total: data.length
+ });
+ }
+
+ this.downloadInfoCapability.resolve(data);
+ }, this);
+ messageHandler.on('StartRenderPage', function (data) {
+ if (this.destroyed) {
+ return;
+ }
+
+ var page = this.pageCache[data.pageIndex];
+
+ page._stats.timeEnd('Page Request');
+
+ page._startRenderPage(data.transparency, data.intent);
+ }, this);
+ messageHandler.on('RenderPageChunk', function (data) {
+ if (this.destroyed) {
+ return;
+ }
+
+ var page = this.pageCache[data.pageIndex];
+
+ page._renderPageChunk(data.operatorList, data.intent);
+ }, this);
+ messageHandler.on('commonobj', function (data) {
+ var _this13 = this;
+
+ if (this.destroyed) {
+ return;
+ }
+
+ var _data = _slicedToArray(data, 3),
+ id = _data[0],
+ type = _data[1],
+ exportedData = _data[2];
+
+ if (this.commonObjs.has(id)) {
+ return;
+ }
+
+ switch (type) {
+ case 'Font':
+ var params = this._params;
+
+ if ('error' in exportedData) {
+ var exportedError = exportedData.error;
+ (0, _util.warn)("Error during font loading: ".concat(exportedError));
+ this.commonObjs.resolve(id, exportedError);
+ break;
+ }
+
+ var fontRegistry = null;
+
+ if (params.pdfBug && _global_scope["default"].FontInspector && _global_scope["default"].FontInspector.enabled) {
+ fontRegistry = {
+ registerFont: function registerFont(font, url) {
+ _global_scope["default"]['FontInspector'].fontAdded(font, url);
+ }
+ };
+ }
+
+ var font = new _font_loader.FontFaceObject(exportedData, {
+ isEvalSupported: params.isEvalSupported,
+ disableFontFace: params.disableFontFace,
+ ignoreErrors: params.ignoreErrors,
+ onUnsupportedFeature: this._onUnsupportedFeature.bind(this),
+ fontRegistry: fontRegistry
+ });
+ this.fontLoader.bind(font).then(function () {
+ _this13.commonObjs.resolve(id, font);
+ }, function (reason) {
+ messageHandler.sendWithPromise('FontFallback', {
+ id: id
+ })["finally"](function () {
+ _this13.commonObjs.resolve(id, font);
+ });
+ });
+ break;
+
+ case 'FontPath':
+ case 'FontType3Res':
+ this.commonObjs.resolve(id, exportedData);
+ break;
+
+ default:
+ throw new Error("Got unknown common object type ".concat(type));
+ }
+ }, this);
+ messageHandler.on('obj', function (data) {
+ if (this.destroyed) {
+ return undefined;
+ }
+
+ var _data2 = _slicedToArray(data, 4),
+ id = _data2[0],
+ pageIndex = _data2[1],
+ type = _data2[2],
+ imageData = _data2[3];
+
+ var pageProxy = this.pageCache[pageIndex];
+
+ if (pageProxy.objs.has(id)) {
+ return undefined;
+ }
+
+ switch (type) {
+ case 'JpegStream':
+ return new Promise(function (resolve, reject) {
+ var img = new Image();
+
+ img.onload = function () {
+ resolve(img);
+ };
+
+ img.onerror = function () {
+ reject(new Error('Error during JPEG image loading'));
+ (0, _display_utils.releaseImageResources)(img);
+ };
+
+ img.src = imageData;
+ }).then(function (img) {
+ pageProxy.objs.resolve(id, img);
+ });
+
+ case 'Image':
+ pageProxy.objs.resolve(id, imageData);
+ var MAX_IMAGE_SIZE_TO_STORE = 8000000;
+
+ if (imageData && 'data' in imageData && imageData.data.length > MAX_IMAGE_SIZE_TO_STORE) {
+ pageProxy.cleanupAfterRender = true;
+ }
+
+ break;
+
+ default:
+ throw new Error("Got unknown object type ".concat(type));
+ }
+
+ return undefined;
+ }, this);
+ messageHandler.on('DocProgress', function (data) {
+ if (this.destroyed) {
+ return;
+ }
+
+ if (loadingTask.onProgress) {
+ loadingTask.onProgress({
+ loaded: data.loaded,
+ total: data.total
+ });
+ }
+ }, this);
+ messageHandler.on('PageError', function (data) {
+ if (this.destroyed) {
+ return;
+ }
+
+ var page = this.pageCache[data.pageIndex];
+ var intentState = page.intentStates[data.intent];
+
+ if (intentState.displayReadyCapability) {
+ intentState.displayReadyCapability.reject(new Error(data.error));
+ } else {
+ throw new Error(data.error);
+ }
+
+ if (intentState.operatorList) {
+ intentState.operatorList.lastChunk = true;
+
+ for (var i = 0; i < intentState.renderTasks.length; i++) {
+ intentState.renderTasks[i].operatorListChanged();
+ }
+ }
+ }, this);
+ messageHandler.on('UnsupportedFeature', this._onUnsupportedFeature, this);
+ messageHandler.on('JpegDecode', function (data) {
+ if (this.destroyed) {
+ return Promise.reject(new Error('Worker was destroyed'));
+ }
+
+ if (typeof document === 'undefined') {
+ return Promise.reject(new Error('"document" is not defined.'));
+ }
+
+ var _data3 = _slicedToArray(data, 2),
+ imageUrl = _data3[0],
+ components = _data3[1];
+
+ if (components !== 3 && components !== 1) {
+ return Promise.reject(new Error('Only 3 components or 1 component can be returned'));
+ }
+
+ return new Promise(function (resolve, reject) {
+ var img = new Image();
+
+ img.onload = function () {
+ var width = img.width,
+ height = img.height;
+ var size = width * height;
+ var rgbaLength = size * 4;
+ var buf = new Uint8ClampedArray(size * components);
+ var tmpCanvas = document.createElement('canvas');
+ tmpCanvas.width = width;
+ tmpCanvas.height = height;
+ var tmpCtx = tmpCanvas.getContext('2d');
+ tmpCtx.drawImage(img, 0, 0);
+ var data = tmpCtx.getImageData(0, 0, width, height).data;
+
+ if (components === 3) {
+ for (var i = 0, j = 0; i < rgbaLength; i += 4, j += 3) {
+ buf[j] = data[i];
+ buf[j + 1] = data[i + 1];
+ buf[j + 2] = data[i + 2];
+ }
+ } else if (components === 1) {
+ for (var _i2 = 0, _j = 0; _i2 < rgbaLength; _i2 += 4, _j++) {
+ buf[_j] = data[_i2];
+ }
+ }
+
+ resolve({
+ data: buf,
+ width: width,
+ height: height
+ });
+ (0, _display_utils.releaseImageResources)(img);
+ tmpCanvas.width = 0;
+ tmpCanvas.height = 0;
+ tmpCanvas = null;
+ tmpCtx = null;
+ };
+
+ img.onerror = function () {
+ reject(new Error('JpegDecode failed to load image'));
+ (0, _display_utils.releaseImageResources)(img);
+ };
+
+ img.src = imageUrl;
+ });
+ }, this);
+ messageHandler.on('FetchBuiltInCMap', function (data) {
+ if (this.destroyed) {
+ return Promise.reject(new Error('Worker was destroyed'));
+ }
+
+ return this.CMapReaderFactory.fetch({
+ name: data.name
+ });
+ }, this);
+ }
+ }, {
+ key: "_onUnsupportedFeature",
+ value: function _onUnsupportedFeature(_ref10) {
+ var featureId = _ref10.featureId;
+
+ if (this.destroyed) {
+ return;
+ }
+
+ if (this.loadingTask.onUnsupportedFeature) {
+ this.loadingTask.onUnsupportedFeature(featureId);
+ }
+ }
+ }, {
+ key: "getData",
+ value: function getData() {
+ return this.messageHandler.sendWithPromise('GetData', null);
+ }
+ }, {
+ key: "getPage",
+ value: function getPage(pageNumber) {
+ var _this14 = this;
+
+ if (!Number.isInteger(pageNumber) || pageNumber <= 0 || pageNumber > this._numPages) {
+ return Promise.reject(new Error('Invalid page request'));
+ }
+
+ var pageIndex = pageNumber - 1;
+
+ if (pageIndex in this.pagePromises) {
+ return this.pagePromises[pageIndex];
+ }
+
+ var promise = this.messageHandler.sendWithPromise('GetPage', {
+ pageIndex: pageIndex
+ }).then(function (pageInfo) {
+ if (_this14.destroyed) {
+ throw new Error('Transport destroyed');
+ }
+
+ var page = new PDFPageProxy(pageIndex, pageInfo, _this14, _this14._params.pdfBug);
+ _this14.pageCache[pageIndex] = page;
+ return page;
+ });
+ this.pagePromises[pageIndex] = promise;
+ return promise;
+ }
+ }, {
+ key: "getPageIndex",
+ value: function getPageIndex(ref) {
+ return this.messageHandler.sendWithPromise('GetPageIndex', {
+ ref: ref
+ })["catch"](function (reason) {
+ return Promise.reject(new Error(reason));
+ });
+ }
+ }, {
+ key: "getAnnotations",
+ value: function getAnnotations(pageIndex, intent) {
+ return this.messageHandler.sendWithPromise('GetAnnotations', {
+ pageIndex: pageIndex,
+ intent: intent
+ });
+ }
+ }, {
+ key: "getDestinations",
+ value: function getDestinations() {
+ return this.messageHandler.sendWithPromise('GetDestinations', null);
+ }
+ }, {
+ key: "getDestination",
+ value: function getDestination(id) {
+ if (typeof id !== 'string') {
+ return Promise.reject(new Error('Invalid destination request.'));
+ }
+
+ return this.messageHandler.sendWithPromise('GetDestination', {
+ id: id
+ });
+ }
+ }, {
+ key: "getPageLabels",
+ value: function getPageLabels() {
+ return this.messageHandler.sendWithPromise('GetPageLabels', null);
+ }
+ }, {
+ key: "getPageLayout",
+ value: function getPageLayout() {
+ return this.messageHandler.sendWithPromise('GetPageLayout', null);
+ }
+ }, {
+ key: "getPageMode",
+ value: function getPageMode() {
+ return this.messageHandler.sendWithPromise('GetPageMode', null);
+ }
+ }, {
+ key: "getViewerPreferences",
+ value: function getViewerPreferences() {
+ return this.messageHandler.sendWithPromise('GetViewerPreferences', null);
+ }
+ }, {
+ key: "getOpenActionDestination",
+ value: function getOpenActionDestination() {
+ return this.messageHandler.sendWithPromise('GetOpenActionDestination', null);
+ }
+ }, {
+ key: "getAttachments",
+ value: function getAttachments() {
+ return this.messageHandler.sendWithPromise('GetAttachments', null);
+ }
+ }, {
+ key: "getJavaScript",
+ value: function getJavaScript() {
+ return this.messageHandler.sendWithPromise('GetJavaScript', null);
+ }
+ }, {
+ key: "getOutline",
+ value: function getOutline() {
+ return this.messageHandler.sendWithPromise('GetOutline', null);
+ }
+ }, {
+ key: "getPermissions",
+ value: function getPermissions() {
+ return this.messageHandler.sendWithPromise('GetPermissions', null);
+ }
+ }, {
+ key: "getMetadata",
+ value: function getMetadata() {
+ var _this15 = this;
+
+ return this.messageHandler.sendWithPromise('GetMetadata', null).then(function (results) {
+ return {
+ info: results[0],
+ metadata: results[1] ? new _metadata.Metadata(results[1]) : null,
+ contentDispositionFilename: _this15._fullReader ? _this15._fullReader.filename : null
+ };
+ });
+ }
+ }, {
+ key: "getStats",
+ value: function getStats() {
+ return this.messageHandler.sendWithPromise('GetStats', null);
+ }
+ }, {
+ key: "startCleanup",
+ value: function startCleanup() {
+ var _this16 = this;
+
+ this.messageHandler.sendWithPromise('Cleanup', null).then(function () {
+ for (var i = 0, ii = _this16.pageCache.length; i < ii; i++) {
+ var page = _this16.pageCache[i];
+
+ if (page) {
+ page.cleanup();
+ }
+ }
+
+ _this16.commonObjs.clear();
+
+ _this16.fontLoader.clear();
+ });
+ }
+ }, {
+ key: "loadingParams",
+ get: function get() {
+ var params = this._params;
+ return (0, _util.shadow)(this, 'loadingParams', {
+ disableAutoFetch: params.disableAutoFetch,
+ disableCreateObjectURL: params.disableCreateObjectURL,
+ disableFontFace: params.disableFontFace,
+ nativeImageDecoderSupport: params.nativeImageDecoderSupport
+ });
+ }
+ }]);
+
+ return WorkerTransport;
+}();
+
+var PDFObjects =
+/*#__PURE__*/
+function () {
+ function PDFObjects() {
+ _classCallCheck(this, PDFObjects);
+
+ this._objs = Object.create(null);
+ }
+
+ _createClass(PDFObjects, [{
+ key: "_ensureObj",
+ value: function _ensureObj(objId) {
+ if (this._objs[objId]) {
+ return this._objs[objId];
+ }
+
+ return this._objs[objId] = {
+ capability: (0, _util.createPromiseCapability)(),
+ data: null,
+ resolved: false
+ };
+ }
+ }, {
+ key: "get",
+ value: function get(objId) {
+ var callback = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;
+
+ if (callback) {
+ this._ensureObj(objId).capability.promise.then(callback);
+
+ return null;
+ }
+
+ var obj = this._objs[objId];
+
+ if (!obj || !obj.resolved) {
+ throw new Error("Requesting object that isn't resolved yet ".concat(objId, "."));
+ }
+
+ return obj.data;
+ }
+ }, {
+ key: "has",
+ value: function has(objId) {
+ var obj = this._objs[objId];
+ return obj ? obj.resolved : false;
+ }
+ }, {
+ key: "resolve",
+ value: function resolve(objId, data) {
+ var obj = this._ensureObj(objId);
+
+ obj.resolved = true;
+ obj.data = data;
+ obj.capability.resolve(data);
+ }
+ }, {
+ key: "clear",
+ value: function clear() {
+ for (var objId in this._objs) {
+ var data = this._objs[objId].data;
+
+ if (typeof Image !== 'undefined' && data instanceof Image) {
+ (0, _display_utils.releaseImageResources)(data);
+ }
+ }
+
+ this._objs = Object.create(null);
+ }
+ }]);
+
+ return PDFObjects;
+}();
+
+var RenderTask =
+/*#__PURE__*/
+function () {
+ function RenderTask(internalRenderTask) {
+ _classCallCheck(this, RenderTask);
+
+ this._internalRenderTask = internalRenderTask;
+ this.onContinue = null;
+ }
+
+ _createClass(RenderTask, [{
+ key: "cancel",
+ value: function cancel() {
+ this._internalRenderTask.cancel();
+ }
+ }, {
+ key: "then",
+ value: function then(onFulfilled, onRejected) {
+ (0, _display_utils.deprecated)('RenderTask.then method, use the `promise` getter instead.');
+ return this.promise.then.apply(this.promise, arguments);
+ }
+ }, {
+ key: "promise",
+ get: function get() {
+ return this._internalRenderTask.capability.promise;
+ }
+ }]);
+
+ return RenderTask;
+}();
+
+var InternalRenderTask = function InternalRenderTaskClosure() {
+ var canvasInRendering = new WeakSet();
+
+ var InternalRenderTask =
+ /*#__PURE__*/
+ function () {
+ function InternalRenderTask(_ref11) {
+ var callback = _ref11.callback,
+ params = _ref11.params,
+ objs = _ref11.objs,
+ commonObjs = _ref11.commonObjs,
+ operatorList = _ref11.operatorList,
+ pageNumber = _ref11.pageNumber,
+ canvasFactory = _ref11.canvasFactory,
+ webGLContext = _ref11.webGLContext,
+ _ref11$useRequestAnim = _ref11.useRequestAnimationFrame,
+ useRequestAnimationFrame = _ref11$useRequestAnim === void 0 ? false : _ref11$useRequestAnim,
+ _ref11$pdfBug = _ref11.pdfBug,
+ pdfBug = _ref11$pdfBug === void 0 ? false : _ref11$pdfBug;
+
+ _classCallCheck(this, InternalRenderTask);
+
+ this.callback = callback;
+ this.params = params;
+ this.objs = objs;
+ this.commonObjs = commonObjs;
+ this.operatorListIdx = null;
+ this.operatorList = operatorList;
+ this.pageNumber = pageNumber;
+ this.canvasFactory = canvasFactory;
+ this.webGLContext = webGLContext;
+ this._pdfBug = pdfBug;
+ this.running = false;
+ this.graphicsReadyCallback = null;
+ this.graphicsReady = false;
+ this._useRequestAnimationFrame = useRequestAnimationFrame === true && typeof window !== 'undefined';
+ this.cancelled = false;
+ this.capability = (0, _util.createPromiseCapability)();
+ this.task = new RenderTask(this);
+ this._continueBound = this._continue.bind(this);
+ this._scheduleNextBound = this._scheduleNext.bind(this);
+ this._nextBound = this._next.bind(this);
+ this._canvas = params.canvasContext.canvas;
+ }
+
+ _createClass(InternalRenderTask, [{
+ key: "initializeGraphics",
+ value: function initializeGraphics() {
+ var transparency = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
+
+ if (this.cancelled) {
+ return;
+ }
+
+ if (this._canvas) {
+ if (canvasInRendering.has(this._canvas)) {
+ throw new Error('Cannot use the same canvas during multiple render() operations. ' + 'Use different canvas or ensure previous operations were ' + 'cancelled or completed.');
+ }
+
+ canvasInRendering.add(this._canvas);
+ }
+
+ if (this._pdfBug && _global_scope["default"].StepperManager && _global_scope["default"].StepperManager.enabled) {
+ this.stepper = _global_scope["default"].StepperManager.create(this.pageNumber - 1);
+ this.stepper.init(this.operatorList);
+ this.stepper.nextBreakPoint = this.stepper.getNextBreakPoint();
+ }
+
+ var _this$params = this.params,
+ canvasContext = _this$params.canvasContext,
+ viewport = _this$params.viewport,
+ transform = _this$params.transform,
+ imageLayer = _this$params.imageLayer,
+ background = _this$params.background;
+ this.gfx = new _canvas.CanvasGraphics(canvasContext, this.commonObjs, this.objs, this.canvasFactory, this.webGLContext, imageLayer);
+ this.gfx.beginDrawing({
+ transform: transform,
+ viewport: viewport,
+ transparency: transparency,
+ background: background
+ });
+ this.operatorListIdx = 0;
+ this.graphicsReady = true;
+
+ if (this.graphicsReadyCallback) {
+ this.graphicsReadyCallback();
+ }
+ }
+ }, {
+ key: "cancel",
+ value: function cancel() {
+ var error = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
+ this.running = false;
+ this.cancelled = true;
+
+ if (this.gfx) {
+ this.gfx.endDrawing();
+ }
+
+ if (this._canvas) {
+ canvasInRendering["delete"](this._canvas);
+ }
+
+ this.callback(error || new _display_utils.RenderingCancelledException("Rendering cancelled, page ".concat(this.pageNumber), 'canvas'));
+ }
+ }, {
+ key: "operatorListChanged",
+ value: function operatorListChanged() {
+ if (!this.graphicsReady) {
+ if (!this.graphicsReadyCallback) {
+ this.graphicsReadyCallback = this._continueBound;
+ }
+
+ return;
+ }
+
+ if (this.stepper) {
+ this.stepper.updateOperatorList(this.operatorList);
+ }
+
+ if (this.running) {
+ return;
+ }
+
+ this._continue();
+ }
+ }, {
+ key: "_continue",
+ value: function _continue() {
+ this.running = true;
+
+ if (this.cancelled) {
+ return;
+ }
+
+ if (this.task.onContinue) {
+ this.task.onContinue(this._scheduleNextBound);
+ } else {
+ this._scheduleNext();
+ }
+ }
+ }, {
+ key: "_scheduleNext",
+ value: function _scheduleNext() {
+ var _this17 = this;
+
+ if (this._useRequestAnimationFrame) {
+ window.requestAnimationFrame(function () {
+ _this17._nextBound()["catch"](_this17.cancel.bind(_this17));
+ });
+ } else {
+ Promise.resolve().then(this._nextBound)["catch"](this.cancel.bind(this));
+ }
+ }
+ }, {
+ key: "_next",
+ value: function () {
+ var _next2 = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee() {
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ if (!this.cancelled) {
+ _context.next = 2;
+ break;
+ }
+
+ return _context.abrupt("return");
+
+ case 2:
+ this.operatorListIdx = this.gfx.executeOperatorList(this.operatorList, this.operatorListIdx, this._continueBound, this.stepper);
+
+ if (this.operatorListIdx === this.operatorList.argsArray.length) {
+ this.running = false;
+
+ if (this.operatorList.lastChunk) {
+ this.gfx.endDrawing();
+
+ if (this._canvas) {
+ canvasInRendering["delete"](this._canvas);
+ }
+
+ this.callback();
+ }
+ }
+
+ case 4:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee, this);
+ }));
+
+ function _next() {
+ return _next2.apply(this, arguments);
+ }
+
+ return _next;
+ }()
+ }]);
+
+ return InternalRenderTask;
+ }();
+
+ return InternalRenderTask;
+}();
+
+var version = '2.2.228';
+exports.version = version;
+var build = 'd7afb74a';
+exports.build = build;
+
+/***/ }),
+/* 148 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = __w_pdfjs_require__(149);
+
+/***/ }),
+/* 149 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+/* WEBPACK VAR INJECTION */(function(module) {
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var runtime = function (exports) {
+ "use strict";
+
+ var Op = Object.prototype;
+ var hasOwn = Op.hasOwnProperty;
+ var undefined;
+ var $Symbol = typeof Symbol === "function" ? Symbol : {};
+ var iteratorSymbol = $Symbol.iterator || "@@iterator";
+ var asyncIteratorSymbol = $Symbol.asyncIterator || "@@asyncIterator";
+ var toStringTagSymbol = $Symbol.toStringTag || "@@toStringTag";
+
+ function wrap(innerFn, outerFn, self, tryLocsList) {
+ var protoGenerator = outerFn && outerFn.prototype instanceof Generator ? outerFn : Generator;
+ var generator = Object.create(protoGenerator.prototype);
+ var context = new Context(tryLocsList || []);
+ generator._invoke = makeInvokeMethod(innerFn, self, context);
+ return generator;
+ }
+
+ exports.wrap = wrap;
+
+ function tryCatch(fn, obj, arg) {
+ try {
+ return {
+ type: "normal",
+ arg: fn.call(obj, arg)
+ };
+ } catch (err) {
+ return {
+ type: "throw",
+ arg: err
+ };
+ }
+ }
+
+ var GenStateSuspendedStart = "suspendedStart";
+ var GenStateSuspendedYield = "suspendedYield";
+ var GenStateExecuting = "executing";
+ var GenStateCompleted = "completed";
+ var ContinueSentinel = {};
+
+ function Generator() {}
+
+ function GeneratorFunction() {}
+
+ function GeneratorFunctionPrototype() {}
+
+ var IteratorPrototype = {};
+
+ IteratorPrototype[iteratorSymbol] = function () {
+ return this;
+ };
+
+ var getProto = Object.getPrototypeOf;
+ var NativeIteratorPrototype = getProto && getProto(getProto(values([])));
+
+ if (NativeIteratorPrototype && NativeIteratorPrototype !== Op && hasOwn.call(NativeIteratorPrototype, iteratorSymbol)) {
+ IteratorPrototype = NativeIteratorPrototype;
+ }
+
+ var Gp = GeneratorFunctionPrototype.prototype = Generator.prototype = Object.create(IteratorPrototype);
+ GeneratorFunction.prototype = Gp.constructor = GeneratorFunctionPrototype;
+ GeneratorFunctionPrototype.constructor = GeneratorFunction;
+ GeneratorFunctionPrototype[toStringTagSymbol] = GeneratorFunction.displayName = "GeneratorFunction";
+
+ function defineIteratorMethods(prototype) {
+ ["next", "throw", "return"].forEach(function (method) {
+ prototype[method] = function (arg) {
+ return this._invoke(method, arg);
+ };
+ });
+ }
+
+ exports.isGeneratorFunction = function (genFun) {
+ var ctor = typeof genFun === "function" && genFun.constructor;
+ return ctor ? ctor === GeneratorFunction || (ctor.displayName || ctor.name) === "GeneratorFunction" : false;
+ };
+
+ exports.mark = function (genFun) {
+ if (Object.setPrototypeOf) {
+ Object.setPrototypeOf(genFun, GeneratorFunctionPrototype);
+ } else {
+ genFun.__proto__ = GeneratorFunctionPrototype;
+
+ if (!(toStringTagSymbol in genFun)) {
+ genFun[toStringTagSymbol] = "GeneratorFunction";
+ }
+ }
+
+ genFun.prototype = Object.create(Gp);
+ return genFun;
+ };
+
+ exports.awrap = function (arg) {
+ return {
+ __await: arg
+ };
+ };
+
+ function AsyncIterator(generator) {
+ function invoke(method, arg, resolve, reject) {
+ var record = tryCatch(generator[method], generator, arg);
+
+ if (record.type === "throw") {
+ reject(record.arg);
+ } else {
+ var result = record.arg;
+ var value = result.value;
+
+ if (value && _typeof(value) === "object" && hasOwn.call(value, "__await")) {
+ return Promise.resolve(value.__await).then(function (value) {
+ invoke("next", value, resolve, reject);
+ }, function (err) {
+ invoke("throw", err, resolve, reject);
+ });
+ }
+
+ return Promise.resolve(value).then(function (unwrapped) {
+ result.value = unwrapped;
+ resolve(result);
+ }, function (error) {
+ return invoke("throw", error, resolve, reject);
+ });
+ }
+ }
+
+ var previousPromise;
+
+ function enqueue(method, arg) {
+ function callInvokeWithMethodAndArg() {
+ return new Promise(function (resolve, reject) {
+ invoke(method, arg, resolve, reject);
+ });
+ }
+
+ return previousPromise = previousPromise ? previousPromise.then(callInvokeWithMethodAndArg, callInvokeWithMethodAndArg) : callInvokeWithMethodAndArg();
+ }
+
+ this._invoke = enqueue;
+ }
+
+ defineIteratorMethods(AsyncIterator.prototype);
+
+ AsyncIterator.prototype[asyncIteratorSymbol] = function () {
+ return this;
+ };
+
+ exports.AsyncIterator = AsyncIterator;
+
+ exports.async = function (innerFn, outerFn, self, tryLocsList) {
+ var iter = new AsyncIterator(wrap(innerFn, outerFn, self, tryLocsList));
+ return exports.isGeneratorFunction(outerFn) ? iter : iter.next().then(function (result) {
+ return result.done ? result.value : iter.next();
+ });
+ };
+
+ function makeInvokeMethod(innerFn, self, context) {
+ var state = GenStateSuspendedStart;
+ return function invoke(method, arg) {
+ if (state === GenStateExecuting) {
+ throw new Error("Generator is already running");
+ }
+
+ if (state === GenStateCompleted) {
+ if (method === "throw") {
+ throw arg;
+ }
+
+ return doneResult();
+ }
+
+ context.method = method;
+ context.arg = arg;
+
+ while (true) {
+ var delegate = context.delegate;
+
+ if (delegate) {
+ var delegateResult = maybeInvokeDelegate(delegate, context);
+
+ if (delegateResult) {
+ if (delegateResult === ContinueSentinel) continue;
+ return delegateResult;
+ }
+ }
+
+ if (context.method === "next") {
+ context.sent = context._sent = context.arg;
+ } else if (context.method === "throw") {
+ if (state === GenStateSuspendedStart) {
+ state = GenStateCompleted;
+ throw context.arg;
+ }
+
+ context.dispatchException(context.arg);
+ } else if (context.method === "return") {
+ context.abrupt("return", context.arg);
+ }
+
+ state = GenStateExecuting;
+ var record = tryCatch(innerFn, self, context);
+
+ if (record.type === "normal") {
+ state = context.done ? GenStateCompleted : GenStateSuspendedYield;
+
+ if (record.arg === ContinueSentinel) {
+ continue;
+ }
+
+ return {
+ value: record.arg,
+ done: context.done
+ };
+ } else if (record.type === "throw") {
+ state = GenStateCompleted;
+ context.method = "throw";
+ context.arg = record.arg;
+ }
+ }
+ };
+ }
+
+ function maybeInvokeDelegate(delegate, context) {
+ var method = delegate.iterator[context.method];
+
+ if (method === undefined) {
+ context.delegate = null;
+
+ if (context.method === "throw") {
+ if (delegate.iterator["return"]) {
+ context.method = "return";
+ context.arg = undefined;
+ maybeInvokeDelegate(delegate, context);
+
+ if (context.method === "throw") {
+ return ContinueSentinel;
+ }
+ }
+
+ context.method = "throw";
+ context.arg = new TypeError("The iterator does not provide a 'throw' method");
+ }
+
+ return ContinueSentinel;
+ }
+
+ var record = tryCatch(method, delegate.iterator, context.arg);
+
+ if (record.type === "throw") {
+ context.method = "throw";
+ context.arg = record.arg;
+ context.delegate = null;
+ return ContinueSentinel;
+ }
+
+ var info = record.arg;
+
+ if (!info) {
+ context.method = "throw";
+ context.arg = new TypeError("iterator result is not an object");
+ context.delegate = null;
+ return ContinueSentinel;
+ }
+
+ if (info.done) {
+ context[delegate.resultName] = info.value;
+ context.next = delegate.nextLoc;
+
+ if (context.method !== "return") {
+ context.method = "next";
+ context.arg = undefined;
+ }
+ } else {
+ return info;
+ }
+
+ context.delegate = null;
+ return ContinueSentinel;
+ }
+
+ defineIteratorMethods(Gp);
+ Gp[toStringTagSymbol] = "Generator";
+
+ Gp[iteratorSymbol] = function () {
+ return this;
+ };
+
+ Gp.toString = function () {
+ return "[object Generator]";
+ };
+
+ function pushTryEntry(locs) {
+ var entry = {
+ tryLoc: locs[0]
+ };
+
+ if (1 in locs) {
+ entry.catchLoc = locs[1];
+ }
+
+ if (2 in locs) {
+ entry.finallyLoc = locs[2];
+ entry.afterLoc = locs[3];
+ }
+
+ this.tryEntries.push(entry);
+ }
+
+ function resetTryEntry(entry) {
+ var record = entry.completion || {};
+ record.type = "normal";
+ delete record.arg;
+ entry.completion = record;
+ }
+
+ function Context(tryLocsList) {
+ this.tryEntries = [{
+ tryLoc: "root"
+ }];
+ tryLocsList.forEach(pushTryEntry, this);
+ this.reset(true);
+ }
+
+ exports.keys = function (object) {
+ var keys = [];
+
+ for (var key in object) {
+ keys.push(key);
+ }
+
+ keys.reverse();
+ return function next() {
+ while (keys.length) {
+ var key = keys.pop();
+
+ if (key in object) {
+ next.value = key;
+ next.done = false;
+ return next;
+ }
+ }
+
+ next.done = true;
+ return next;
+ };
+ };
+
+ function values(iterable) {
+ if (iterable) {
+ var iteratorMethod = iterable[iteratorSymbol];
+
+ if (iteratorMethod) {
+ return iteratorMethod.call(iterable);
+ }
+
+ if (typeof iterable.next === "function") {
+ return iterable;
+ }
+
+ if (!isNaN(iterable.length)) {
+ var i = -1,
+ next = function next() {
+ while (++i < iterable.length) {
+ if (hasOwn.call(iterable, i)) {
+ next.value = iterable[i];
+ next.done = false;
+ return next;
+ }
+ }
+
+ next.value = undefined;
+ next.done = true;
+ return next;
+ };
+
+ return next.next = next;
+ }
+ }
+
+ return {
+ next: doneResult
+ };
+ }
+
+ exports.values = values;
+
+ function doneResult() {
+ return {
+ value: undefined,
+ done: true
+ };
+ }
+
+ Context.prototype = {
+ constructor: Context,
+ reset: function reset(skipTempReset) {
+ this.prev = 0;
+ this.next = 0;
+ this.sent = this._sent = undefined;
+ this.done = false;
+ this.delegate = null;
+ this.method = "next";
+ this.arg = undefined;
+ this.tryEntries.forEach(resetTryEntry);
+
+ if (!skipTempReset) {
+ for (var name in this) {
+ if (name.charAt(0) === "t" && hasOwn.call(this, name) && !isNaN(+name.slice(1))) {
+ this[name] = undefined;
+ }
+ }
+ }
+ },
+ stop: function stop() {
+ this.done = true;
+ var rootEntry = this.tryEntries[0];
+ var rootRecord = rootEntry.completion;
+
+ if (rootRecord.type === "throw") {
+ throw rootRecord.arg;
+ }
+
+ return this.rval;
+ },
+ dispatchException: function dispatchException(exception) {
+ if (this.done) {
+ throw exception;
+ }
+
+ var context = this;
+
+ function handle(loc, caught) {
+ record.type = "throw";
+ record.arg = exception;
+ context.next = loc;
+
+ if (caught) {
+ context.method = "next";
+ context.arg = undefined;
+ }
+
+ return !!caught;
+ }
+
+ for (var i = this.tryEntries.length - 1; i >= 0; --i) {
+ var entry = this.tryEntries[i];
+ var record = entry.completion;
+
+ if (entry.tryLoc === "root") {
+ return handle("end");
+ }
+
+ if (entry.tryLoc <= this.prev) {
+ var hasCatch = hasOwn.call(entry, "catchLoc");
+ var hasFinally = hasOwn.call(entry, "finallyLoc");
+
+ if (hasCatch && hasFinally) {
+ if (this.prev < entry.catchLoc) {
+ return handle(entry.catchLoc, true);
+ } else if (this.prev < entry.finallyLoc) {
+ return handle(entry.finallyLoc);
+ }
+ } else if (hasCatch) {
+ if (this.prev < entry.catchLoc) {
+ return handle(entry.catchLoc, true);
+ }
+ } else if (hasFinally) {
+ if (this.prev < entry.finallyLoc) {
+ return handle(entry.finallyLoc);
+ }
+ } else {
+ throw new Error("try statement without catch or finally");
+ }
+ }
+ }
+ },
+ abrupt: function abrupt(type, arg) {
+ for (var i = this.tryEntries.length - 1; i >= 0; --i) {
+ var entry = this.tryEntries[i];
+
+ if (entry.tryLoc <= this.prev && hasOwn.call(entry, "finallyLoc") && this.prev < entry.finallyLoc) {
+ var finallyEntry = entry;
+ break;
+ }
+ }
+
+ if (finallyEntry && (type === "break" || type === "continue") && finallyEntry.tryLoc <= arg && arg <= finallyEntry.finallyLoc) {
+ finallyEntry = null;
+ }
+
+ var record = finallyEntry ? finallyEntry.completion : {};
+ record.type = type;
+ record.arg = arg;
+
+ if (finallyEntry) {
+ this.method = "next";
+ this.next = finallyEntry.finallyLoc;
+ return ContinueSentinel;
+ }
+
+ return this.complete(record);
+ },
+ complete: function complete(record, afterLoc) {
+ if (record.type === "throw") {
+ throw record.arg;
+ }
+
+ if (record.type === "break" || record.type === "continue") {
+ this.next = record.arg;
+ } else if (record.type === "return") {
+ this.rval = this.arg = record.arg;
+ this.method = "return";
+ this.next = "end";
+ } else if (record.type === "normal" && afterLoc) {
+ this.next = afterLoc;
+ }
+
+ return ContinueSentinel;
+ },
+ finish: function finish(finallyLoc) {
+ for (var i = this.tryEntries.length - 1; i >= 0; --i) {
+ var entry = this.tryEntries[i];
+
+ if (entry.finallyLoc === finallyLoc) {
+ this.complete(entry.completion, entry.afterLoc);
+ resetTryEntry(entry);
+ return ContinueSentinel;
+ }
+ }
+ },
+ "catch": function _catch(tryLoc) {
+ for (var i = this.tryEntries.length - 1; i >= 0; --i) {
+ var entry = this.tryEntries[i];
+
+ if (entry.tryLoc === tryLoc) {
+ var record = entry.completion;
+
+ if (record.type === "throw") {
+ var thrown = record.arg;
+ resetTryEntry(entry);
+ }
+
+ return thrown;
+ }
+ }
+
+ throw new Error("illegal catch attempt");
+ },
+ delegateYield: function delegateYield(iterable, resultName, nextLoc) {
+ this.delegate = {
+ iterator: values(iterable),
+ resultName: resultName,
+ nextLoc: nextLoc
+ };
+
+ if (this.method === "next") {
+ this.arg = undefined;
+ }
+
+ return ContinueSentinel;
+ }
+ };
+ return exports;
+}(( false ? undefined : _typeof(module)) === "object" ? module.exports : {});
+
+try {
+ regeneratorRuntime = runtime;
+} catch (accidentalStrictMode) {
+ Function("r", "regeneratorRuntime = r")(runtime);
+}
+/* WEBPACK VAR INJECTION */}.call(this, __w_pdfjs_require__(150)(module)))
+
+/***/ }),
+/* 150 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (module) {
+ if (!module.webpackPolyfill) {
+ module.deprecate = function () {};
+
+ module.paths = [];
+ if (!module.children) module.children = [];
+ Object.defineProperty(module, "loaded", {
+ enumerable: true,
+ get: function get() {
+ return module.l;
+ }
+ });
+ Object.defineProperty(module, "id", {
+ enumerable: true,
+ get: function get() {
+ return module.i;
+ }
+ });
+ module.webpackPolyfill = 1;
+ }
+
+ return module;
+};
+
+/***/ }),
+/* 151 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.addLinkAttributes = addLinkAttributes;
+exports.getFilenameFromUrl = getFilenameFromUrl;
+exports.isFetchSupported = isFetchSupported;
+exports.isValidFetchUrl = isValidFetchUrl;
+exports.loadScript = loadScript;
+exports.deprecated = deprecated;
+exports.releaseImageResources = releaseImageResources;
+exports.PDFDateString = exports.DummyStatTimer = exports.StatTimer = exports.DOMSVGFactory = exports.DOMCMapReaderFactory = exports.DOMCanvasFactory = exports.DEFAULT_LINK_REL = exports.LinkTarget = exports.RenderingCancelledException = exports.PageViewport = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(148));
+
+var _util = __w_pdfjs_require__(1);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var DEFAULT_LINK_REL = 'noopener noreferrer nofollow';
+exports.DEFAULT_LINK_REL = DEFAULT_LINK_REL;
+var SVG_NS = 'http://www.w3.org/2000/svg';
+
+var DOMCanvasFactory =
+/*#__PURE__*/
+function () {
+ function DOMCanvasFactory() {
+ _classCallCheck(this, DOMCanvasFactory);
+ }
+
+ _createClass(DOMCanvasFactory, [{
+ key: "create",
+ value: function create(width, height) {
+ if (width <= 0 || height <= 0) {
+ throw new Error('Invalid canvas size');
+ }
+
+ var canvas = document.createElement('canvas');
+ var context = canvas.getContext('2d');
+ canvas.width = width;
+ canvas.height = height;
+ return {
+ canvas: canvas,
+ context: context
+ };
+ }
+ }, {
+ key: "reset",
+ value: function reset(canvasAndContext, width, height) {
+ if (!canvasAndContext.canvas) {
+ throw new Error('Canvas is not specified');
+ }
+
+ if (width <= 0 || height <= 0) {
+ throw new Error('Invalid canvas size');
+ }
+
+ canvasAndContext.canvas.width = width;
+ canvasAndContext.canvas.height = height;
+ }
+ }, {
+ key: "destroy",
+ value: function destroy(canvasAndContext) {
+ if (!canvasAndContext.canvas) {
+ throw new Error('Canvas is not specified');
+ }
+
+ canvasAndContext.canvas.width = 0;
+ canvasAndContext.canvas.height = 0;
+ canvasAndContext.canvas = null;
+ canvasAndContext.context = null;
+ }
+ }]);
+
+ return DOMCanvasFactory;
+}();
+
+exports.DOMCanvasFactory = DOMCanvasFactory;
+
+var DOMCMapReaderFactory =
+/*#__PURE__*/
+function () {
+ function DOMCMapReaderFactory(_ref) {
+ var _ref$baseUrl = _ref.baseUrl,
+ baseUrl = _ref$baseUrl === void 0 ? null : _ref$baseUrl,
+ _ref$isCompressed = _ref.isCompressed,
+ isCompressed = _ref$isCompressed === void 0 ? false : _ref$isCompressed;
+
+ _classCallCheck(this, DOMCMapReaderFactory);
+
+ this.baseUrl = baseUrl;
+ this.isCompressed = isCompressed;
+ }
+
+ _createClass(DOMCMapReaderFactory, [{
+ key: "fetch",
+ value: function (_fetch) {
+ function fetch(_x) {
+ return _fetch.apply(this, arguments);
+ }
+
+ fetch.toString = function () {
+ return _fetch.toString();
+ };
+
+ return fetch;
+ }(
+ /*#__PURE__*/
+ function () {
+ var _ref3 = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee2(_ref2) {
+ var _this = this;
+
+ var name, url, compressionType;
+ return _regenerator["default"].wrap(function _callee2$(_context2) {
+ while (1) {
+ switch (_context2.prev = _context2.next) {
+ case 0:
+ name = _ref2.name;
+
+ if (this.baseUrl) {
+ _context2.next = 3;
+ break;
+ }
+
+ throw new Error('The CMap "baseUrl" parameter must be specified, ensure that ' + 'the "cMapUrl" and "cMapPacked" API parameters are provided.');
+
+ case 3:
+ if (name) {
+ _context2.next = 5;
+ break;
+ }
+
+ throw new Error('CMap name must be specified.');
+
+ case 5:
+ url = this.baseUrl + name + (this.isCompressed ? '.bcmap' : '');
+ compressionType = this.isCompressed ? _util.CMapCompressionType.BINARY : _util.CMapCompressionType.NONE;
+
+ if (!(isFetchSupported() && isValidFetchUrl(url, document.baseURI))) {
+ _context2.next = 9;
+ break;
+ }
+
+ return _context2.abrupt("return", fetch(url).then(
+ /*#__PURE__*/
+ function () {
+ var _ref4 = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee(response) {
+ var cMapData;
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ if (response.ok) {
+ _context.next = 2;
+ break;
+ }
+
+ throw new Error(response.statusText);
+
+ case 2:
+ if (!_this.isCompressed) {
+ _context.next = 10;
+ break;
+ }
+
+ _context.t0 = Uint8Array;
+ _context.next = 6;
+ return response.arrayBuffer();
+
+ case 6:
+ _context.t1 = _context.sent;
+ cMapData = new _context.t0(_context.t1);
+ _context.next = 15;
+ break;
+
+ case 10:
+ _context.t2 = _util.stringToBytes;
+ _context.next = 13;
+ return response.text();
+
+ case 13:
+ _context.t3 = _context.sent;
+ cMapData = (0, _context.t2)(_context.t3);
+
+ case 15:
+ return _context.abrupt("return", {
+ cMapData: cMapData,
+ compressionType: compressionType
+ });
+
+ case 16:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee);
+ }));
+
+ return function (_x3) {
+ return _ref4.apply(this, arguments);
+ };
+ }())["catch"](function (reason) {
+ throw new Error("Unable to load ".concat(_this.isCompressed ? 'binary ' : '') + "CMap at: ".concat(url));
+ }));
+
+ case 9:
+ return _context2.abrupt("return", new Promise(function (resolve, reject) {
+ var request = new XMLHttpRequest();
+ request.open('GET', url, true);
+
+ if (_this.isCompressed) {
+ request.responseType = 'arraybuffer';
+ }
+
+ request.onreadystatechange = function () {
+ if (request.readyState !== XMLHttpRequest.DONE) {
+ return;
+ }
+
+ if (request.status === 200 || request.status === 0) {
+ var cMapData;
+
+ if (_this.isCompressed && request.response) {
+ cMapData = new Uint8Array(request.response);
+ } else if (!_this.isCompressed && request.responseText) {
+ cMapData = (0, _util.stringToBytes)(request.responseText);
+ }
+
+ if (cMapData) {
+ resolve({
+ cMapData: cMapData,
+ compressionType: compressionType
+ });
+ return;
+ }
+ }
+
+ reject(new Error(request.statusText));
+ };
+
+ request.send(null);
+ })["catch"](function (reason) {
+ throw new Error("Unable to load ".concat(_this.isCompressed ? 'binary ' : '') + "CMap at: ".concat(url));
+ }));
+
+ case 10:
+ case "end":
+ return _context2.stop();
+ }
+ }
+ }, _callee2, this);
+ }));
+
+ return function (_x2) {
+ return _ref3.apply(this, arguments);
+ };
+ }())
+ }]);
+
+ return DOMCMapReaderFactory;
+}();
+
+exports.DOMCMapReaderFactory = DOMCMapReaderFactory;
+
+var DOMSVGFactory =
+/*#__PURE__*/
+function () {
+ function DOMSVGFactory() {
+ _classCallCheck(this, DOMSVGFactory);
+ }
+
+ _createClass(DOMSVGFactory, [{
+ key: "create",
+ value: function create(width, height) {
+ (0, _util.assert)(width > 0 && height > 0, 'Invalid SVG dimensions');
+ var svg = document.createElementNS(SVG_NS, 'svg:svg');
+ svg.setAttribute('version', '1.1');
+ svg.setAttribute('width', width + 'px');
+ svg.setAttribute('height', height + 'px');
+ svg.setAttribute('preserveAspectRatio', 'none');
+ svg.setAttribute('viewBox', '0 0 ' + width + ' ' + height);
+ return svg;
+ }
+ }, {
+ key: "createElement",
+ value: function createElement(type) {
+ (0, _util.assert)(typeof type === 'string', 'Invalid SVG element type');
+ return document.createElementNS(SVG_NS, type);
+ }
+ }]);
+
+ return DOMSVGFactory;
+}();
+
+exports.DOMSVGFactory = DOMSVGFactory;
+
+var PageViewport =
+/*#__PURE__*/
+function () {
+ function PageViewport(_ref5) {
+ var viewBox = _ref5.viewBox,
+ scale = _ref5.scale,
+ rotation = _ref5.rotation,
+ _ref5$offsetX = _ref5.offsetX,
+ offsetX = _ref5$offsetX === void 0 ? 0 : _ref5$offsetX,
+ _ref5$offsetY = _ref5.offsetY,
+ offsetY = _ref5$offsetY === void 0 ? 0 : _ref5$offsetY,
+ _ref5$dontFlip = _ref5.dontFlip,
+ dontFlip = _ref5$dontFlip === void 0 ? false : _ref5$dontFlip;
+
+ _classCallCheck(this, PageViewport);
+
+ this.viewBox = viewBox;
+ this.scale = scale;
+ this.rotation = rotation;
+ this.offsetX = offsetX;
+ this.offsetY = offsetY;
+ var centerX = (viewBox[2] + viewBox[0]) / 2;
+ var centerY = (viewBox[3] + viewBox[1]) / 2;
+ var rotateA, rotateB, rotateC, rotateD;
+ rotation = rotation % 360;
+ rotation = rotation < 0 ? rotation + 360 : rotation;
+
+ switch (rotation) {
+ case 180:
+ rotateA = -1;
+ rotateB = 0;
+ rotateC = 0;
+ rotateD = 1;
+ break;
+
+ case 90:
+ rotateA = 0;
+ rotateB = 1;
+ rotateC = 1;
+ rotateD = 0;
+ break;
+
+ case 270:
+ rotateA = 0;
+ rotateB = -1;
+ rotateC = -1;
+ rotateD = 0;
+ break;
+
+ default:
+ rotateA = 1;
+ rotateB = 0;
+ rotateC = 0;
+ rotateD = -1;
+ break;
+ }
+
+ if (dontFlip) {
+ rotateC = -rotateC;
+ rotateD = -rotateD;
+ }
+
+ var offsetCanvasX, offsetCanvasY;
+ var width, height;
+
+ if (rotateA === 0) {
+ offsetCanvasX = Math.abs(centerY - viewBox[1]) * scale + offsetX;
+ offsetCanvasY = Math.abs(centerX - viewBox[0]) * scale + offsetY;
+ width = Math.abs(viewBox[3] - viewBox[1]) * scale;
+ height = Math.abs(viewBox[2] - viewBox[0]) * scale;
+ } else {
+ offsetCanvasX = Math.abs(centerX - viewBox[0]) * scale + offsetX;
+ offsetCanvasY = Math.abs(centerY - viewBox[1]) * scale + offsetY;
+ width = Math.abs(viewBox[2] - viewBox[0]) * scale;
+ height = Math.abs(viewBox[3] - viewBox[1]) * scale;
+ }
+
+ this.transform = [rotateA * scale, rotateB * scale, rotateC * scale, rotateD * scale, offsetCanvasX - rotateA * scale * centerX - rotateC * scale * centerY, offsetCanvasY - rotateB * scale * centerX - rotateD * scale * centerY];
+ this.width = width;
+ this.height = height;
+ }
+
+ _createClass(PageViewport, [{
+ key: "clone",
+ value: function clone() {
+ var _ref6 = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
+ _ref6$scale = _ref6.scale,
+ scale = _ref6$scale === void 0 ? this.scale : _ref6$scale,
+ _ref6$rotation = _ref6.rotation,
+ rotation = _ref6$rotation === void 0 ? this.rotation : _ref6$rotation,
+ _ref6$dontFlip = _ref6.dontFlip,
+ dontFlip = _ref6$dontFlip === void 0 ? false : _ref6$dontFlip;
+
+ return new PageViewport({
+ viewBox: this.viewBox.slice(),
+ scale: scale,
+ rotation: rotation,
+ offsetX: this.offsetX,
+ offsetY: this.offsetY,
+ dontFlip: dontFlip
+ });
+ }
+ }, {
+ key: "convertToViewportPoint",
+ value: function convertToViewportPoint(x, y) {
+ return _util.Util.applyTransform([x, y], this.transform);
+ }
+ }, {
+ key: "convertToViewportRectangle",
+ value: function convertToViewportRectangle(rect) {
+ var topLeft = _util.Util.applyTransform([rect[0], rect[1]], this.transform);
+
+ var bottomRight = _util.Util.applyTransform([rect[2], rect[3]], this.transform);
+
+ return [topLeft[0], topLeft[1], bottomRight[0], bottomRight[1]];
+ }
+ }, {
+ key: "convertToPdfPoint",
+ value: function convertToPdfPoint(x, y) {
+ return _util.Util.applyInverseTransform([x, y], this.transform);
+ }
+ }]);
+
+ return PageViewport;
+}();
+
+exports.PageViewport = PageViewport;
+
+var RenderingCancelledException = function RenderingCancelledException() {
+ function RenderingCancelledException(msg, type) {
+ this.message = msg;
+ this.type = type;
+ }
+
+ RenderingCancelledException.prototype = new Error();
+ RenderingCancelledException.prototype.name = 'RenderingCancelledException';
+ RenderingCancelledException.constructor = RenderingCancelledException;
+ return RenderingCancelledException;
+}();
+
+exports.RenderingCancelledException = RenderingCancelledException;
+var LinkTarget = {
+ NONE: 0,
+ SELF: 1,
+ BLANK: 2,
+ PARENT: 3,
+ TOP: 4
+};
+exports.LinkTarget = LinkTarget;
+var LinkTargetStringMap = ['', '_self', '_blank', '_parent', '_top'];
+
+function addLinkAttributes(link) {
+ var _ref7 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ url = _ref7.url,
+ target = _ref7.target,
+ rel = _ref7.rel;
+
+ link.href = link.title = url ? (0, _util.removeNullCharacters)(url) : '';
+
+ if (url) {
+ var LinkTargetValues = Object.values(LinkTarget);
+ var targetIndex = LinkTargetValues.includes(target) ? target : LinkTarget.NONE;
+ link.target = LinkTargetStringMap[targetIndex];
+ link.rel = typeof rel === 'string' ? rel : DEFAULT_LINK_REL;
+ }
+}
+
+function getFilenameFromUrl(url) {
+ var anchor = url.indexOf('#');
+ var query = url.indexOf('?');
+ var end = Math.min(anchor > 0 ? anchor : url.length, query > 0 ? query : url.length);
+ return url.substring(url.lastIndexOf('/', end) + 1, end);
+}
+
+var StatTimer =
+/*#__PURE__*/
+function () {
+ function StatTimer() {
+ var enable = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
+
+ _classCallCheck(this, StatTimer);
+
+ this.enabled = !!enable;
+ this.started = Object.create(null);
+ this.times = [];
+ }
+
+ _createClass(StatTimer, [{
+ key: "time",
+ value: function time(name) {
+ if (!this.enabled) {
+ return;
+ }
+
+ if (name in this.started) {
+ (0, _util.warn)('Timer is already running for ' + name);
+ }
+
+ this.started[name] = Date.now();
+ }
+ }, {
+ key: "timeEnd",
+ value: function timeEnd(name) {
+ if (!this.enabled) {
+ return;
+ }
+
+ if (!(name in this.started)) {
+ (0, _util.warn)('Timer has not been started for ' + name);
+ }
+
+ this.times.push({
+ 'name': name,
+ 'start': this.started[name],
+ 'end': Date.now()
+ });
+ delete this.started[name];
+ }
+ }, {
+ key: "toString",
+ value: function toString() {
+ var out = '',
+ longest = 0;
+ var _iteratorNormalCompletion = true;
+ var _didIteratorError = false;
+ var _iteratorError = undefined;
+
+ try {
+ for (var _iterator = this.times[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
+ var time = _step.value;
+ var name = time.name;
+
+ if (name.length > longest) {
+ longest = name.length;
+ }
+ }
+ } catch (err) {
+ _didIteratorError = true;
+ _iteratorError = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion && _iterator["return"] != null) {
+ _iterator["return"]();
+ }
+ } finally {
+ if (_didIteratorError) {
+ throw _iteratorError;
+ }
+ }
+ }
+
+ var _iteratorNormalCompletion2 = true;
+ var _didIteratorError2 = false;
+ var _iteratorError2 = undefined;
+
+ try {
+ for (var _iterator2 = this.times[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
+ var _time = _step2.value;
+ var duration = _time.end - _time.start;
+ out += "".concat(_time.name.padEnd(longest), " ").concat(duration, "ms\n");
+ }
+ } catch (err) {
+ _didIteratorError2 = true;
+ _iteratorError2 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) {
+ _iterator2["return"]();
+ }
+ } finally {
+ if (_didIteratorError2) {
+ throw _iteratorError2;
+ }
+ }
+ }
+
+ return out;
+ }
+ }]);
+
+ return StatTimer;
+}();
+
+exports.StatTimer = StatTimer;
+
+var DummyStatTimer =
+/*#__PURE__*/
+function () {
+ function DummyStatTimer() {
+ _classCallCheck(this, DummyStatTimer);
+
+ (0, _util.unreachable)('Cannot initialize DummyStatTimer.');
+ }
+
+ _createClass(DummyStatTimer, null, [{
+ key: "time",
+ value: function time(name) {}
+ }, {
+ key: "timeEnd",
+ value: function timeEnd(name) {}
+ }, {
+ key: "toString",
+ value: function toString() {
+ return '';
+ }
+ }]);
+
+ return DummyStatTimer;
+}();
+
+exports.DummyStatTimer = DummyStatTimer;
+
+function isFetchSupported() {
+ return typeof fetch !== 'undefined' && typeof Response !== 'undefined' && 'body' in Response.prototype && typeof ReadableStream !== 'undefined';
+}
+
+function isValidFetchUrl(url, baseUrl) {
+ try {
+ var _ref8 = baseUrl ? new _util.URL(url, baseUrl) : new _util.URL(url),
+ protocol = _ref8.protocol;
+
+ return protocol === 'http:' || protocol === 'https:';
+ } catch (ex) {
+ return false;
+ }
+}
+
+function loadScript(src) {
+ return new Promise(function (resolve, reject) {
+ var script = document.createElement('script');
+ script.src = src;
+ script.onload = resolve;
+
+ script.onerror = function () {
+ reject(new Error("Cannot load script at: ".concat(script.src)));
+ };
+
+ (document.head || document.documentElement).appendChild(script);
+ });
+}
+
+function deprecated(details) {
+ console.log('Deprecated API usage: ' + details);
+}
+
+function releaseImageResources(img) {
+ (0, _util.assert)(img instanceof Image, 'Invalid `img` parameter.');
+ var url = img.src;
+
+ if (typeof url === 'string' && url.startsWith('blob:') && _util.URL.revokeObjectURL) {
+ _util.URL.revokeObjectURL(url);
+ }
+
+ img.removeAttribute('src');
+}
+
+var pdfDateStringRegex;
+
+var PDFDateString =
+/*#__PURE__*/
+function () {
+ function PDFDateString() {
+ _classCallCheck(this, PDFDateString);
+ }
+
+ _createClass(PDFDateString, null, [{
+ key: "toDateObject",
+ value: function toDateObject(input) {
+ if (!input || !(0, _util.isString)(input)) {
+ return null;
+ }
+
+ if (!pdfDateStringRegex) {
+ pdfDateStringRegex = new RegExp('^D:' + '(\\d{4})' + '(\\d{2})?' + '(\\d{2})?' + '(\\d{2})?' + '(\\d{2})?' + '(\\d{2})?' + '([Z|+|-])?' + '(\\d{2})?' + '\'?' + '(\\d{2})?' + '\'?');
+ }
+
+ var matches = pdfDateStringRegex.exec(input);
+
+ if (!matches) {
+ return null;
+ }
+
+ var year = parseInt(matches[1], 10);
+ var month = parseInt(matches[2], 10);
+ month = month >= 1 && month <= 12 ? month - 1 : 0;
+ var day = parseInt(matches[3], 10);
+ day = day >= 1 && day <= 31 ? day : 1;
+ var hour = parseInt(matches[4], 10);
+ hour = hour >= 0 && hour <= 23 ? hour : 0;
+ var minute = parseInt(matches[5], 10);
+ minute = minute >= 0 && minute <= 59 ? minute : 0;
+ var second = parseInt(matches[6], 10);
+ second = second >= 0 && second <= 59 ? second : 0;
+ var universalTimeRelation = matches[7] || 'Z';
+ var offsetHour = parseInt(matches[8], 10);
+ offsetHour = offsetHour >= 0 && offsetHour <= 23 ? offsetHour : 0;
+ var offsetMinute = parseInt(matches[9], 10) || 0;
+ offsetMinute = offsetMinute >= 0 && offsetMinute <= 59 ? offsetMinute : 0;
+
+ if (universalTimeRelation === '-') {
+ hour += offsetHour;
+ minute += offsetMinute;
+ } else if (universalTimeRelation === '+') {
+ hour -= offsetHour;
+ minute -= offsetMinute;
+ }
+
+ return new Date(Date.UTC(year, month, day, hour, minute, second));
+ }
+ }]);
+
+ return PDFDateString;
+}();
+
+exports.PDFDateString = PDFDateString;
+
+/***/ }),
+/* 152 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.FontLoader = exports.FontFaceObject = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(148));
+
+var _util = __w_pdfjs_require__(1);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var BaseFontLoader =
+/*#__PURE__*/
+function () {
+ function BaseFontLoader(_ref) {
+ var docId = _ref.docId,
+ onUnsupportedFeature = _ref.onUnsupportedFeature;
+
+ _classCallCheck(this, BaseFontLoader);
+
+ if (this.constructor === BaseFontLoader) {
+ (0, _util.unreachable)('Cannot initialize BaseFontLoader.');
+ }
+
+ this.docId = docId;
+ this._onUnsupportedFeature = onUnsupportedFeature;
+ this.nativeFontFaces = [];
+ this.styleElement = null;
+ }
+
+ _createClass(BaseFontLoader, [{
+ key: "addNativeFontFace",
+ value: function addNativeFontFace(nativeFontFace) {
+ this.nativeFontFaces.push(nativeFontFace);
+ document.fonts.add(nativeFontFace);
+ }
+ }, {
+ key: "insertRule",
+ value: function insertRule(rule) {
+ var styleElement = this.styleElement;
+
+ if (!styleElement) {
+ styleElement = this.styleElement = document.createElement('style');
+ styleElement.id = "PDFJS_FONT_STYLE_TAG_".concat(this.docId);
+ document.documentElement.getElementsByTagName('head')[0].appendChild(styleElement);
+ }
+
+ var styleSheet = styleElement.sheet;
+ styleSheet.insertRule(rule, styleSheet.cssRules.length);
+ }
+ }, {
+ key: "clear",
+ value: function clear() {
+ this.nativeFontFaces.forEach(function (nativeFontFace) {
+ document.fonts["delete"](nativeFontFace);
+ });
+ this.nativeFontFaces.length = 0;
+
+ if (this.styleElement) {
+ this.styleElement.remove();
+ this.styleElement = null;
+ }
+ }
+ }, {
+ key: "bind",
+ value: function () {
+ var _bind = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee(font) {
+ var _this = this;
+
+ var nativeFontFace, rule;
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ if (!(font.attached || font.missingFile)) {
+ _context.next = 2;
+ break;
+ }
+
+ return _context.abrupt("return", undefined);
+
+ case 2:
+ font.attached = true;
+
+ if (!this.isFontLoadingAPISupported) {
+ _context.next = 19;
+ break;
+ }
+
+ nativeFontFace = font.createNativeFontFace();
+
+ if (!nativeFontFace) {
+ _context.next = 18;
+ break;
+ }
+
+ this.addNativeFontFace(nativeFontFace);
+ _context.prev = 7;
+ _context.next = 10;
+ return nativeFontFace.loaded;
+
+ case 10:
+ _context.next = 18;
+ break;
+
+ case 12:
+ _context.prev = 12;
+ _context.t0 = _context["catch"](7);
+
+ this._onUnsupportedFeature({
+ featureId: _util.UNSUPPORTED_FEATURES.font
+ });
+
+ (0, _util.warn)("Failed to load font '".concat(nativeFontFace.family, "': '").concat(_context.t0, "'."));
+ font.disableFontFace = true;
+ throw _context.t0;
+
+ case 18:
+ return _context.abrupt("return", undefined);
+
+ case 19:
+ rule = font.createFontFaceRule();
+
+ if (!rule) {
+ _context.next = 25;
+ break;
+ }
+
+ this.insertRule(rule);
+
+ if (!this.isSyncFontLoadingSupported) {
+ _context.next = 24;
+ break;
+ }
+
+ return _context.abrupt("return", undefined);
+
+ case 24:
+ return _context.abrupt("return", new Promise(function (resolve) {
+ var request = _this._queueLoadingCallback(resolve);
+
+ _this._prepareFontLoadEvent([rule], [font], request);
+ }));
+
+ case 25:
+ return _context.abrupt("return", undefined);
+
+ case 26:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee, this, [[7, 12]]);
+ }));
+
+ function bind(_x) {
+ return _bind.apply(this, arguments);
+ }
+
+ return bind;
+ }()
+ }, {
+ key: "_queueLoadingCallback",
+ value: function _queueLoadingCallback(callback) {
+ (0, _util.unreachable)('Abstract method `_queueLoadingCallback`.');
+ }
+ }, {
+ key: "_prepareFontLoadEvent",
+ value: function _prepareFontLoadEvent(rules, fontsToLoad, request) {
+ (0, _util.unreachable)('Abstract method `_prepareFontLoadEvent`.');
+ }
+ }, {
+ key: "isFontLoadingAPISupported",
+ get: function get() {
+ (0, _util.unreachable)('Abstract method `isFontLoadingAPISupported`.');
+ }
+ }, {
+ key: "isSyncFontLoadingSupported",
+ get: function get() {
+ (0, _util.unreachable)('Abstract method `isSyncFontLoadingSupported`.');
+ }
+ }, {
+ key: "_loadTestFont",
+ get: function get() {
+ (0, _util.unreachable)('Abstract method `_loadTestFont`.');
+ }
+ }]);
+
+ return BaseFontLoader;
+}();
+
+var FontLoader;
+exports.FontLoader = FontLoader;
+{
+ exports.FontLoader = FontLoader =
+ /*#__PURE__*/
+ function (_BaseFontLoader) {
+ _inherits(GenericFontLoader, _BaseFontLoader);
+
+ function GenericFontLoader(docId) {
+ var _this2;
+
+ _classCallCheck(this, GenericFontLoader);
+
+ _this2 = _possibleConstructorReturn(this, _getPrototypeOf(GenericFontLoader).call(this, docId));
+ _this2.loadingContext = {
+ requests: [],
+ nextRequestId: 0
+ };
+ _this2.loadTestFontId = 0;
+ return _this2;
+ }
+
+ _createClass(GenericFontLoader, [{
+ key: "_queueLoadingCallback",
+ value: function _queueLoadingCallback(callback) {
+ function completeRequest() {
+ (0, _util.assert)(!request.done, 'completeRequest() cannot be called twice.');
+ request.done = true;
+
+ while (context.requests.length > 0 && context.requests[0].done) {
+ var otherRequest = context.requests.shift();
+ setTimeout(otherRequest.callback, 0);
+ }
+ }
+
+ var context = this.loadingContext;
+ var request = {
+ id: "pdfjs-font-loading-".concat(context.nextRequestId++),
+ done: false,
+ complete: completeRequest,
+ callback: callback
+ };
+ context.requests.push(request);
+ return request;
+ }
+ }, {
+ key: "_prepareFontLoadEvent",
+ value: function _prepareFontLoadEvent(rules, fonts, request) {
+ function int32(data, offset) {
+ return data.charCodeAt(offset) << 24 | data.charCodeAt(offset + 1) << 16 | data.charCodeAt(offset + 2) << 8 | data.charCodeAt(offset + 3) & 0xff;
+ }
+
+ function spliceString(s, offset, remove, insert) {
+ var chunk1 = s.substring(0, offset);
+ var chunk2 = s.substring(offset + remove);
+ return chunk1 + insert + chunk2;
+ }
+
+ var i, ii;
+ var canvas = document.createElement('canvas');
+ canvas.width = 1;
+ canvas.height = 1;
+ var ctx = canvas.getContext('2d');
+ var called = 0;
+
+ function isFontReady(name, callback) {
+ called++;
+
+ if (called > 30) {
+ (0, _util.warn)('Load test font never loaded.');
+ callback();
+ return;
+ }
+
+ ctx.font = '30px ' + name;
+ ctx.fillText('.', 0, 20);
+ var imageData = ctx.getImageData(0, 0, 1, 1);
+
+ if (imageData.data[3] > 0) {
+ callback();
+ return;
+ }
+
+ setTimeout(isFontReady.bind(null, name, callback));
+ }
+
+ var loadTestFontId = "lt".concat(Date.now()).concat(this.loadTestFontId++);
+ var data = this._loadTestFont;
+ var COMMENT_OFFSET = 976;
+ data = spliceString(data, COMMENT_OFFSET, loadTestFontId.length, loadTestFontId);
+ var CFF_CHECKSUM_OFFSET = 16;
+ var XXXX_VALUE = 0x58585858;
+ var checksum = int32(data, CFF_CHECKSUM_OFFSET);
+
+ for (i = 0, ii = loadTestFontId.length - 3; i < ii; i += 4) {
+ checksum = checksum - XXXX_VALUE + int32(loadTestFontId, i) | 0;
+ }
+
+ if (i < loadTestFontId.length) {
+ checksum = checksum - XXXX_VALUE + int32(loadTestFontId + 'XXX', i) | 0;
+ }
+
+ data = spliceString(data, CFF_CHECKSUM_OFFSET, 4, (0, _util.string32)(checksum));
+ var url = "url(data:font/opentype;base64,".concat(btoa(data), ");");
+ var rule = "@font-face {font-family:\"".concat(loadTestFontId, "\";src:").concat(url, "}");
+ this.insertRule(rule);
+ var names = [];
+
+ for (i = 0, ii = fonts.length; i < ii; i++) {
+ names.push(fonts[i].loadedName);
+ }
+
+ names.push(loadTestFontId);
+ var div = document.createElement('div');
+ div.setAttribute('style', 'visibility: hidden;' + 'width: 10px; height: 10px;' + 'position: absolute; top: 0px; left: 0px;');
+
+ for (i = 0, ii = names.length; i < ii; ++i) {
+ var span = document.createElement('span');
+ span.textContent = 'Hi';
+ span.style.fontFamily = names[i];
+ div.appendChild(span);
+ }
+
+ document.body.appendChild(div);
+ isFontReady(loadTestFontId, function () {
+ document.body.removeChild(div);
+ request.complete();
+ });
+ }
+ }, {
+ key: "isFontLoadingAPISupported",
+ get: function get() {
+ var supported = typeof document !== 'undefined' && !!document.fonts;
+
+ if (supported && typeof navigator !== 'undefined') {
+ var m = /Mozilla\/5.0.*?rv:(\d+).*? Gecko/.exec(navigator.userAgent);
+
+ if (m && m[1] < 63) {
+ supported = false;
+ }
+ }
+
+ return (0, _util.shadow)(this, 'isFontLoadingAPISupported', supported);
+ }
+ }, {
+ key: "isSyncFontLoadingSupported",
+ get: function get() {
+ var supported = false;
+
+ if (typeof navigator === 'undefined') {
+ supported = true;
+ } else {
+ var m = /Mozilla\/5.0.*?rv:(\d+).*? Gecko/.exec(navigator.userAgent);
+
+ if (m && m[1] >= 14) {
+ supported = true;
+ }
+ }
+
+ return (0, _util.shadow)(this, 'isSyncFontLoadingSupported', supported);
+ }
+ }, {
+ key: "_loadTestFont",
+ get: function get() {
+ var getLoadTestFont = function getLoadTestFont() {
+ return atob('T1RUTwALAIAAAwAwQ0ZGIDHtZg4AAAOYAAAAgUZGVE1lkzZwAAAEHAAAABxHREVGABQA' + 'FQAABDgAAAAeT1MvMlYNYwkAAAEgAAAAYGNtYXABDQLUAAACNAAAAUJoZWFk/xVFDQAA' + 'ALwAAAA2aGhlYQdkA+oAAAD0AAAAJGhtdHgD6AAAAAAEWAAAAAZtYXhwAAJQAAAAARgA' + 'AAAGbmFtZVjmdH4AAAGAAAAAsXBvc3T/hgAzAAADeAAAACAAAQAAAAEAALZRFsRfDzz1' + 'AAsD6AAAAADOBOTLAAAAAM4KHDwAAAAAA+gDIQAAAAgAAgAAAAAAAAABAAADIQAAAFoD' + '6AAAAAAD6AABAAAAAAAAAAAAAAAAAAAAAQAAUAAAAgAAAAQD6AH0AAUAAAKKArwAAACM' + 'AooCvAAAAeAAMQECAAACAAYJAAAAAAAAAAAAAQAAAAAAAAAAAAAAAFBmRWQAwAAuAC4D' + 'IP84AFoDIQAAAAAAAQAAAAAAAAAAACAAIAABAAAADgCuAAEAAAAAAAAAAQAAAAEAAAAA' + 'AAEAAQAAAAEAAAAAAAIAAQAAAAEAAAAAAAMAAQAAAAEAAAAAAAQAAQAAAAEAAAAAAAUA' + 'AQAAAAEAAAAAAAYAAQAAAAMAAQQJAAAAAgABAAMAAQQJAAEAAgABAAMAAQQJAAIAAgAB' + 'AAMAAQQJAAMAAgABAAMAAQQJAAQAAgABAAMAAQQJAAUAAgABAAMAAQQJAAYAAgABWABY' + 'AAAAAAAAAwAAAAMAAAAcAAEAAAAAADwAAwABAAAAHAAEACAAAAAEAAQAAQAAAC7//wAA' + 'AC7////TAAEAAAAAAAABBgAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + 'AAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMAAAAAAAD/gwAyAAAAAQAAAAAAAAAAAAAAAAAA' + 'AAABAAQEAAEBAQJYAAEBASH4DwD4GwHEAvgcA/gXBIwMAYuL+nz5tQXkD5j3CBLnEQAC' + 'AQEBIVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYAAABAQAADwACAQEEE/t3' + 'Dov6fAH6fAT+fPp8+nwHDosMCvm1Cvm1DAz6fBQAAAAAAAABAAAAAMmJbzEAAAAAzgTj' + 'FQAAAADOBOQpAAEAAAAAAAAADAAUAAQAAAABAAAAAgABAAAAAAAAAAAD6AAAAAAAAA==');
+ };
+
+ return (0, _util.shadow)(this, '_loadTestFont', getLoadTestFont());
+ }
+ }]);
+
+ return GenericFontLoader;
+ }(BaseFontLoader);
+}
+var IsEvalSupportedCached = {
+ get value() {
+ return (0, _util.shadow)(this, 'value', (0, _util.isEvalSupported)());
+ }
+
+};
+
+var FontFaceObject =
+/*#__PURE__*/
+function () {
+ function FontFaceObject(translatedData, _ref2) {
+ var _ref2$isEvalSupported = _ref2.isEvalSupported,
+ isEvalSupported = _ref2$isEvalSupported === void 0 ? true : _ref2$isEvalSupported,
+ _ref2$disableFontFace = _ref2.disableFontFace,
+ disableFontFace = _ref2$disableFontFace === void 0 ? false : _ref2$disableFontFace,
+ _ref2$ignoreErrors = _ref2.ignoreErrors,
+ ignoreErrors = _ref2$ignoreErrors === void 0 ? false : _ref2$ignoreErrors,
+ _ref2$onUnsupportedFe = _ref2.onUnsupportedFeature,
+ onUnsupportedFeature = _ref2$onUnsupportedFe === void 0 ? null : _ref2$onUnsupportedFe,
+ _ref2$fontRegistry = _ref2.fontRegistry,
+ fontRegistry = _ref2$fontRegistry === void 0 ? null : _ref2$fontRegistry;
+
+ _classCallCheck(this, FontFaceObject);
+
+ this.compiledGlyphs = Object.create(null);
+
+ for (var i in translatedData) {
+ this[i] = translatedData[i];
+ }
+
+ this.isEvalSupported = isEvalSupported !== false;
+ this.disableFontFace = disableFontFace === true;
+ this.ignoreErrors = ignoreErrors === true;
+ this._onUnsupportedFeature = onUnsupportedFeature;
+ this.fontRegistry = fontRegistry;
+ }
+
+ _createClass(FontFaceObject, [{
+ key: "createNativeFontFace",
+ value: function createNativeFontFace() {
+ if (!this.data || this.disableFontFace) {
+ return null;
+ }
+
+ var nativeFontFace = new FontFace(this.loadedName, this.data, {});
+
+ if (this.fontRegistry) {
+ this.fontRegistry.registerFont(this);
+ }
+
+ return nativeFontFace;
+ }
+ }, {
+ key: "createFontFaceRule",
+ value: function createFontFaceRule() {
+ if (!this.data || this.disableFontFace) {
+ return null;
+ }
+
+ var data = (0, _util.bytesToString)(new Uint8Array(this.data));
+ var url = "url(data:".concat(this.mimetype, ";base64,").concat(btoa(data), ");");
+ var rule = "@font-face {font-family:\"".concat(this.loadedName, "\";src:").concat(url, "}");
+
+ if (this.fontRegistry) {
+ this.fontRegistry.registerFont(this, url);
+ }
+
+ return rule;
+ }
+ }, {
+ key: "getPathGenerator",
+ value: function getPathGenerator(objs, character) {
+ if (this.compiledGlyphs[character] !== undefined) {
+ return this.compiledGlyphs[character];
+ }
+
+ var cmds, current;
+
+ try {
+ cmds = objs.get(this.loadedName + '_path_' + character);
+ } catch (ex) {
+ if (!this.ignoreErrors) {
+ throw ex;
+ }
+
+ if (this._onUnsupportedFeature) {
+ this._onUnsupportedFeature({
+ featureId: _util.UNSUPPORTED_FEATURES.font
+ });
+ }
+
+ (0, _util.warn)("getPathGenerator - ignoring character: \"".concat(ex, "\"."));
+ return this.compiledGlyphs[character] = function (c, size) {};
+ }
+
+ if (this.isEvalSupported && IsEvalSupportedCached.value) {
+ var args,
+ js = '';
+
+ for (var i = 0, ii = cmds.length; i < ii; i++) {
+ current = cmds[i];
+
+ if (current.args !== undefined) {
+ args = current.args.join(',');
+ } else {
+ args = '';
+ }
+
+ js += 'c.' + current.cmd + '(' + args + ');\n';
+ }
+
+ return this.compiledGlyphs[character] = new Function('c', 'size', js);
+ }
+
+ return this.compiledGlyphs[character] = function (c, size) {
+ for (var _i = 0, _ii = cmds.length; _i < _ii; _i++) {
+ current = cmds[_i];
+
+ if (current.cmd === 'scale') {
+ current.args = [size, -size];
+ }
+
+ c[current.cmd].apply(c, current.args);
+ }
+ };
+ }
+ }]);
+
+ return FontFaceObject;
+}();
+
+exports.FontFaceObject = FontFaceObject;
+
+/***/ }),
+/* 153 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var compatibilityParams = Object.create(null);
+{
+ var isNodeJS = __w_pdfjs_require__(4);
+
+ var userAgent = typeof navigator !== 'undefined' && navigator.userAgent || '';
+ var isIE = /Trident/.test(userAgent);
+ var isIOSChrome = /CriOS/.test(userAgent);
+
+ (function checkOnBlobSupport() {
+ if (isIE || isIOSChrome) {
+ compatibilityParams.disableCreateObjectURL = true;
+ }
+ })();
+
+ (function checkFontFaceAndImage() {
+ if (isNodeJS()) {
+ compatibilityParams.disableFontFace = true;
+ compatibilityParams.nativeImageDecoderSupport = 'none';
+ }
+ })();
+}
+exports.apiCompatibilityParams = Object.freeze(compatibilityParams);
+
+/***/ }),
+/* 154 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.CanvasGraphics = void 0;
+
+var _util = __w_pdfjs_require__(1);
+
+var _pattern_helper = __w_pdfjs_require__(155);
+
+var MIN_FONT_SIZE = 16;
+var MAX_FONT_SIZE = 100;
+var MAX_GROUP_SIZE = 4096;
+var MIN_WIDTH_FACTOR = 0.65;
+var COMPILE_TYPE3_GLYPHS = true;
+var MAX_SIZE_TO_COMPILE = 1000;
+var FULL_CHUNK_HEIGHT = 16;
+var IsLittleEndianCached = {
+ get value() {
+ return (0, _util.shadow)(IsLittleEndianCached, 'value', (0, _util.isLittleEndian)());
+ }
+
+};
+
+function addContextCurrentTransform(ctx) {
+ if (!ctx.mozCurrentTransform) {
+ ctx._originalSave = ctx.save;
+ ctx._originalRestore = ctx.restore;
+ ctx._originalRotate = ctx.rotate;
+ ctx._originalScale = ctx.scale;
+ ctx._originalTranslate = ctx.translate;
+ ctx._originalTransform = ctx.transform;
+ ctx._originalSetTransform = ctx.setTransform;
+ ctx._transformMatrix = ctx._transformMatrix || [1, 0, 0, 1, 0, 0];
+ ctx._transformStack = [];
+ Object.defineProperty(ctx, 'mozCurrentTransform', {
+ get: function getCurrentTransform() {
+ return this._transformMatrix;
+ }
+ });
+ Object.defineProperty(ctx, 'mozCurrentTransformInverse', {
+ get: function getCurrentTransformInverse() {
+ var m = this._transformMatrix;
+ var a = m[0],
+ b = m[1],
+ c = m[2],
+ d = m[3],
+ e = m[4],
+ f = m[5];
+ var ad_bc = a * d - b * c;
+ var bc_ad = b * c - a * d;
+ return [d / ad_bc, b / bc_ad, c / bc_ad, a / ad_bc, (d * e - c * f) / bc_ad, (b * e - a * f) / ad_bc];
+ }
+ });
+
+ ctx.save = function ctxSave() {
+ var old = this._transformMatrix;
+
+ this._transformStack.push(old);
+
+ this._transformMatrix = old.slice(0, 6);
+
+ this._originalSave();
+ };
+
+ ctx.restore = function ctxRestore() {
+ var prev = this._transformStack.pop();
+
+ if (prev) {
+ this._transformMatrix = prev;
+
+ this._originalRestore();
+ }
+ };
+
+ ctx.translate = function ctxTranslate(x, y) {
+ var m = this._transformMatrix;
+ m[4] = m[0] * x + m[2] * y + m[4];
+ m[5] = m[1] * x + m[3] * y + m[5];
+
+ this._originalTranslate(x, y);
+ };
+
+ ctx.scale = function ctxScale(x, y) {
+ var m = this._transformMatrix;
+ m[0] = m[0] * x;
+ m[1] = m[1] * x;
+ m[2] = m[2] * y;
+ m[3] = m[3] * y;
+
+ this._originalScale(x, y);
+ };
+
+ ctx.transform = function ctxTransform(a, b, c, d, e, f) {
+ var m = this._transformMatrix;
+ this._transformMatrix = [m[0] * a + m[2] * b, m[1] * a + m[3] * b, m[0] * c + m[2] * d, m[1] * c + m[3] * d, m[0] * e + m[2] * f + m[4], m[1] * e + m[3] * f + m[5]];
+
+ ctx._originalTransform(a, b, c, d, e, f);
+ };
+
+ ctx.setTransform = function ctxSetTransform(a, b, c, d, e, f) {
+ this._transformMatrix = [a, b, c, d, e, f];
+
+ ctx._originalSetTransform(a, b, c, d, e, f);
+ };
+
+ ctx.rotate = function ctxRotate(angle) {
+ var cosValue = Math.cos(angle);
+ var sinValue = Math.sin(angle);
+ var m = this._transformMatrix;
+ this._transformMatrix = [m[0] * cosValue + m[2] * sinValue, m[1] * cosValue + m[3] * sinValue, m[0] * -sinValue + m[2] * cosValue, m[1] * -sinValue + m[3] * cosValue, m[4], m[5]];
+
+ this._originalRotate(angle);
+ };
+ }
+}
+
+var CachedCanvases = function CachedCanvasesClosure() {
+ function CachedCanvases(canvasFactory) {
+ this.canvasFactory = canvasFactory;
+ this.cache = Object.create(null);
+ }
+
+ CachedCanvases.prototype = {
+ getCanvas: function CachedCanvases_getCanvas(id, width, height, trackTransform) {
+ var canvasEntry;
+
+ if (this.cache[id] !== undefined) {
+ canvasEntry = this.cache[id];
+ this.canvasFactory.reset(canvasEntry, width, height);
+ canvasEntry.context.setTransform(1, 0, 0, 1, 0, 0);
+ } else {
+ canvasEntry = this.canvasFactory.create(width, height);
+ this.cache[id] = canvasEntry;
+ }
+
+ if (trackTransform) {
+ addContextCurrentTransform(canvasEntry.context);
+ }
+
+ return canvasEntry;
+ },
+ clear: function clear() {
+ for (var id in this.cache) {
+ var canvasEntry = this.cache[id];
+ this.canvasFactory.destroy(canvasEntry);
+ delete this.cache[id];
+ }
+ }
+ };
+ return CachedCanvases;
+}();
+
+function compileType3Glyph(imgData) {
+ var POINT_TO_PROCESS_LIMIT = 1000;
+ var width = imgData.width,
+ height = imgData.height;
+ var i,
+ j,
+ j0,
+ width1 = width + 1;
+ var points = new Uint8Array(width1 * (height + 1));
+ var POINT_TYPES = new Uint8Array([0, 2, 4, 0, 1, 0, 5, 4, 8, 10, 0, 8, 0, 2, 1, 0]);
+ var lineSize = width + 7 & ~7,
+ data0 = imgData.data;
+ var data = new Uint8Array(lineSize * height),
+ pos = 0,
+ ii;
+
+ for (i = 0, ii = data0.length; i < ii; i++) {
+ var mask = 128,
+ elem = data0[i];
+
+ while (mask > 0) {
+ data[pos++] = elem & mask ? 0 : 255;
+ mask >>= 1;
+ }
+ }
+
+ var count = 0;
+ pos = 0;
+
+ if (data[pos] !== 0) {
+ points[0] = 1;
+ ++count;
+ }
+
+ for (j = 1; j < width; j++) {
+ if (data[pos] !== data[pos + 1]) {
+ points[j] = data[pos] ? 2 : 1;
+ ++count;
+ }
+
+ pos++;
+ }
+
+ if (data[pos] !== 0) {
+ points[j] = 2;
+ ++count;
+ }
+
+ for (i = 1; i < height; i++) {
+ pos = i * lineSize;
+ j0 = i * width1;
+
+ if (data[pos - lineSize] !== data[pos]) {
+ points[j0] = data[pos] ? 1 : 8;
+ ++count;
+ }
+
+ var sum = (data[pos] ? 4 : 0) + (data[pos - lineSize] ? 8 : 0);
+
+ for (j = 1; j < width; j++) {
+ sum = (sum >> 2) + (data[pos + 1] ? 4 : 0) + (data[pos - lineSize + 1] ? 8 : 0);
+
+ if (POINT_TYPES[sum]) {
+ points[j0 + j] = POINT_TYPES[sum];
+ ++count;
+ }
+
+ pos++;
+ }
+
+ if (data[pos - lineSize] !== data[pos]) {
+ points[j0 + j] = data[pos] ? 2 : 4;
+ ++count;
+ }
+
+ if (count > POINT_TO_PROCESS_LIMIT) {
+ return null;
+ }
+ }
+
+ pos = lineSize * (height - 1);
+ j0 = i * width1;
+
+ if (data[pos] !== 0) {
+ points[j0] = 8;
+ ++count;
+ }
+
+ for (j = 1; j < width; j++) {
+ if (data[pos] !== data[pos + 1]) {
+ points[j0 + j] = data[pos] ? 4 : 8;
+ ++count;
+ }
+
+ pos++;
+ }
+
+ if (data[pos] !== 0) {
+ points[j0 + j] = 4;
+ ++count;
+ }
+
+ if (count > POINT_TO_PROCESS_LIMIT) {
+ return null;
+ }
+
+ var steps = new Int32Array([0, width1, -1, 0, -width1, 0, 0, 0, 1]);
+ var outlines = [];
+
+ for (i = 0; count && i <= height; i++) {
+ var p = i * width1;
+ var end = p + width;
+
+ while (p < end && !points[p]) {
+ p++;
+ }
+
+ if (p === end) {
+ continue;
+ }
+
+ var coords = [p % width1, i];
+ var type = points[p],
+ p0 = p,
+ pp;
+
+ do {
+ var step = steps[type];
+
+ do {
+ p += step;
+ } while (!points[p]);
+
+ pp = points[p];
+
+ if (pp !== 5 && pp !== 10) {
+ type = pp;
+ points[p] = 0;
+ } else {
+ type = pp & 0x33 * type >> 4;
+ points[p] &= type >> 2 | type << 2;
+ }
+
+ coords.push(p % width1);
+ coords.push(p / width1 | 0);
+
+ if (!points[p]) {
+ --count;
+ }
+ } while (p0 !== p);
+
+ outlines.push(coords);
+ --i;
+ }
+
+ var drawOutline = function drawOutline(c) {
+ c.save();
+ c.scale(1 / width, -1 / height);
+ c.translate(0, -height);
+ c.beginPath();
+
+ for (var i = 0, ii = outlines.length; i < ii; i++) {
+ var o = outlines[i];
+ c.moveTo(o[0], o[1]);
+
+ for (var j = 2, jj = o.length; j < jj; j += 2) {
+ c.lineTo(o[j], o[j + 1]);
+ }
+ }
+
+ c.fill();
+ c.beginPath();
+ c.restore();
+ };
+
+ return drawOutline;
+}
+
+var CanvasExtraState = function CanvasExtraStateClosure() {
+ function CanvasExtraState() {
+ this.alphaIsShape = false;
+ this.fontSize = 0;
+ this.fontSizeScale = 1;
+ this.textMatrix = _util.IDENTITY_MATRIX;
+ this.textMatrixScale = 1;
+ this.fontMatrix = _util.FONT_IDENTITY_MATRIX;
+ this.leading = 0;
+ this.x = 0;
+ this.y = 0;
+ this.lineX = 0;
+ this.lineY = 0;
+ this.charSpacing = 0;
+ this.wordSpacing = 0;
+ this.textHScale = 1;
+ this.textRenderingMode = _util.TextRenderingMode.FILL;
+ this.textRise = 0;
+ this.fillColor = '#000000';
+ this.strokeColor = '#000000';
+ this.patternFill = false;
+ this.fillAlpha = 1;
+ this.strokeAlpha = 1;
+ this.lineWidth = 1;
+ this.activeSMask = null;
+ this.resumeSMaskCtx = null;
+ }
+
+ CanvasExtraState.prototype = {
+ clone: function CanvasExtraState_clone() {
+ return Object.create(this);
+ },
+ setCurrentPoint: function CanvasExtraState_setCurrentPoint(x, y) {
+ this.x = x;
+ this.y = y;
+ }
+ };
+ return CanvasExtraState;
+}();
+
+var CanvasGraphics = function CanvasGraphicsClosure() {
+ var EXECUTION_TIME = 15;
+ var EXECUTION_STEPS = 10;
+
+ function CanvasGraphics(canvasCtx, commonObjs, objs, canvasFactory, webGLContext, imageLayer) {
+ this.ctx = canvasCtx;
+ this.current = new CanvasExtraState();
+ this.stateStack = [];
+ this.pendingClip = null;
+ this.pendingEOFill = false;
+ this.res = null;
+ this.xobjs = null;
+ this.commonObjs = commonObjs;
+ this.objs = objs;
+ this.canvasFactory = canvasFactory;
+ this.webGLContext = webGLContext;
+ this.imageLayer = imageLayer;
+ this.groupStack = [];
+ this.processingType3 = null;
+ this.baseTransform = null;
+ this.baseTransformStack = [];
+ this.groupLevel = 0;
+ this.smaskStack = [];
+ this.smaskCounter = 0;
+ this.tempSMask = null;
+ this.cachedCanvases = new CachedCanvases(this.canvasFactory);
+
+ if (canvasCtx) {
+ addContextCurrentTransform(canvasCtx);
+ }
+
+ this._cachedGetSinglePixelWidth = null;
+ }
+
+ function putBinaryImageData(ctx, imgData) {
+ if (typeof ImageData !== 'undefined' && imgData instanceof ImageData) {
+ ctx.putImageData(imgData, 0, 0);
+ return;
+ }
+
+ var height = imgData.height,
+ width = imgData.width;
+ var partialChunkHeight = height % FULL_CHUNK_HEIGHT;
+ var fullChunks = (height - partialChunkHeight) / FULL_CHUNK_HEIGHT;
+ var totalChunks = partialChunkHeight === 0 ? fullChunks : fullChunks + 1;
+ var chunkImgData = ctx.createImageData(width, FULL_CHUNK_HEIGHT);
+ var srcPos = 0,
+ destPos;
+ var src = imgData.data;
+ var dest = chunkImgData.data;
+ var i, j, thisChunkHeight, elemsInThisChunk;
+
+ if (imgData.kind === _util.ImageKind.GRAYSCALE_1BPP) {
+ var srcLength = src.byteLength;
+ var dest32 = new Uint32Array(dest.buffer, 0, dest.byteLength >> 2);
+ var dest32DataLength = dest32.length;
+ var fullSrcDiff = width + 7 >> 3;
+ var white = 0xFFFFFFFF;
+ var black = IsLittleEndianCached.value ? 0xFF000000 : 0x000000FF;
+
+ for (i = 0; i < totalChunks; i++) {
+ thisChunkHeight = i < fullChunks ? FULL_CHUNK_HEIGHT : partialChunkHeight;
+ destPos = 0;
+
+ for (j = 0; j < thisChunkHeight; j++) {
+ var srcDiff = srcLength - srcPos;
+ var k = 0;
+ var kEnd = srcDiff > fullSrcDiff ? width : srcDiff * 8 - 7;
+ var kEndUnrolled = kEnd & ~7;
+ var mask = 0;
+ var srcByte = 0;
+
+ for (; k < kEndUnrolled; k += 8) {
+ srcByte = src[srcPos++];
+ dest32[destPos++] = srcByte & 128 ? white : black;
+ dest32[destPos++] = srcByte & 64 ? white : black;
+ dest32[destPos++] = srcByte & 32 ? white : black;
+ dest32[destPos++] = srcByte & 16 ? white : black;
+ dest32[destPos++] = srcByte & 8 ? white : black;
+ dest32[destPos++] = srcByte & 4 ? white : black;
+ dest32[destPos++] = srcByte & 2 ? white : black;
+ dest32[destPos++] = srcByte & 1 ? white : black;
+ }
+
+ for (; k < kEnd; k++) {
+ if (mask === 0) {
+ srcByte = src[srcPos++];
+ mask = 128;
+ }
+
+ dest32[destPos++] = srcByte & mask ? white : black;
+ mask >>= 1;
+ }
+ }
+
+ while (destPos < dest32DataLength) {
+ dest32[destPos++] = 0;
+ }
+
+ ctx.putImageData(chunkImgData, 0, i * FULL_CHUNK_HEIGHT);
+ }
+ } else if (imgData.kind === _util.ImageKind.RGBA_32BPP) {
+ j = 0;
+ elemsInThisChunk = width * FULL_CHUNK_HEIGHT * 4;
+
+ for (i = 0; i < fullChunks; i++) {
+ dest.set(src.subarray(srcPos, srcPos + elemsInThisChunk));
+ srcPos += elemsInThisChunk;
+ ctx.putImageData(chunkImgData, 0, j);
+ j += FULL_CHUNK_HEIGHT;
+ }
+
+ if (i < totalChunks) {
+ elemsInThisChunk = width * partialChunkHeight * 4;
+ dest.set(src.subarray(srcPos, srcPos + elemsInThisChunk));
+ ctx.putImageData(chunkImgData, 0, j);
+ }
+ } else if (imgData.kind === _util.ImageKind.RGB_24BPP) {
+ thisChunkHeight = FULL_CHUNK_HEIGHT;
+ elemsInThisChunk = width * thisChunkHeight;
+
+ for (i = 0; i < totalChunks; i++) {
+ if (i >= fullChunks) {
+ thisChunkHeight = partialChunkHeight;
+ elemsInThisChunk = width * thisChunkHeight;
+ }
+
+ destPos = 0;
+
+ for (j = elemsInThisChunk; j--;) {
+ dest[destPos++] = src[srcPos++];
+ dest[destPos++] = src[srcPos++];
+ dest[destPos++] = src[srcPos++];
+ dest[destPos++] = 255;
+ }
+
+ ctx.putImageData(chunkImgData, 0, i * FULL_CHUNK_HEIGHT);
+ }
+ } else {
+ throw new Error("bad image kind: ".concat(imgData.kind));
+ }
+ }
+
+ function putBinaryImageMask(ctx, imgData) {
+ var height = imgData.height,
+ width = imgData.width;
+ var partialChunkHeight = height % FULL_CHUNK_HEIGHT;
+ var fullChunks = (height - partialChunkHeight) / FULL_CHUNK_HEIGHT;
+ var totalChunks = partialChunkHeight === 0 ? fullChunks : fullChunks + 1;
+ var chunkImgData = ctx.createImageData(width, FULL_CHUNK_HEIGHT);
+ var srcPos = 0;
+ var src = imgData.data;
+ var dest = chunkImgData.data;
+
+ for (var i = 0; i < totalChunks; i++) {
+ var thisChunkHeight = i < fullChunks ? FULL_CHUNK_HEIGHT : partialChunkHeight;
+ var destPos = 3;
+
+ for (var j = 0; j < thisChunkHeight; j++) {
+ var mask = 0;
+
+ for (var k = 0; k < width; k++) {
+ if (!mask) {
+ var elem = src[srcPos++];
+ mask = 128;
+ }
+
+ dest[destPos] = elem & mask ? 0 : 255;
+ destPos += 4;
+ mask >>= 1;
+ }
+ }
+
+ ctx.putImageData(chunkImgData, 0, i * FULL_CHUNK_HEIGHT);
+ }
+ }
+
+ function copyCtxState(sourceCtx, destCtx) {
+ var properties = ['strokeStyle', 'fillStyle', 'fillRule', 'globalAlpha', 'lineWidth', 'lineCap', 'lineJoin', 'miterLimit', 'globalCompositeOperation', 'font'];
+
+ for (var i = 0, ii = properties.length; i < ii; i++) {
+ var property = properties[i];
+
+ if (sourceCtx[property] !== undefined) {
+ destCtx[property] = sourceCtx[property];
+ }
+ }
+
+ if (sourceCtx.setLineDash !== undefined) {
+ destCtx.setLineDash(sourceCtx.getLineDash());
+ destCtx.lineDashOffset = sourceCtx.lineDashOffset;
+ }
+ }
+
+ function resetCtxToDefault(ctx) {
+ ctx.strokeStyle = '#000000';
+ ctx.fillStyle = '#000000';
+ ctx.fillRule = 'nonzero';
+ ctx.globalAlpha = 1;
+ ctx.lineWidth = 1;
+ ctx.lineCap = 'butt';
+ ctx.lineJoin = 'miter';
+ ctx.miterLimit = 10;
+ ctx.globalCompositeOperation = 'source-over';
+ ctx.font = '10px sans-serif';
+
+ if (ctx.setLineDash !== undefined) {
+ ctx.setLineDash([]);
+ ctx.lineDashOffset = 0;
+ }
+ }
+
+ function composeSMaskBackdrop(bytes, r0, g0, b0) {
+ var length = bytes.length;
+
+ for (var i = 3; i < length; i += 4) {
+ var alpha = bytes[i];
+
+ if (alpha === 0) {
+ bytes[i - 3] = r0;
+ bytes[i - 2] = g0;
+ bytes[i - 1] = b0;
+ } else if (alpha < 255) {
+ var alpha_ = 255 - alpha;
+ bytes[i - 3] = bytes[i - 3] * alpha + r0 * alpha_ >> 8;
+ bytes[i - 2] = bytes[i - 2] * alpha + g0 * alpha_ >> 8;
+ bytes[i - 1] = bytes[i - 1] * alpha + b0 * alpha_ >> 8;
+ }
+ }
+ }
+
+ function composeSMaskAlpha(maskData, layerData, transferMap) {
+ var length = maskData.length;
+ var scale = 1 / 255;
+
+ for (var i = 3; i < length; i += 4) {
+ var alpha = transferMap ? transferMap[maskData[i]] : maskData[i];
+ layerData[i] = layerData[i] * alpha * scale | 0;
+ }
+ }
+
+ function composeSMaskLuminosity(maskData, layerData, transferMap) {
+ var length = maskData.length;
+
+ for (var i = 3; i < length; i += 4) {
+ var y = maskData[i - 3] * 77 + maskData[i - 2] * 152 + maskData[i - 1] * 28;
+ layerData[i] = transferMap ? layerData[i] * transferMap[y >> 8] >> 8 : layerData[i] * y >> 16;
+ }
+ }
+
+ function genericComposeSMask(maskCtx, layerCtx, width, height, subtype, backdrop, transferMap) {
+ var hasBackdrop = !!backdrop;
+ var r0 = hasBackdrop ? backdrop[0] : 0;
+ var g0 = hasBackdrop ? backdrop[1] : 0;
+ var b0 = hasBackdrop ? backdrop[2] : 0;
+ var composeFn;
+
+ if (subtype === 'Luminosity') {
+ composeFn = composeSMaskLuminosity;
+ } else {
+ composeFn = composeSMaskAlpha;
+ }
+
+ var PIXELS_TO_PROCESS = 1048576;
+ var chunkSize = Math.min(height, Math.ceil(PIXELS_TO_PROCESS / width));
+
+ for (var row = 0; row < height; row += chunkSize) {
+ var chunkHeight = Math.min(chunkSize, height - row);
+ var maskData = maskCtx.getImageData(0, row, width, chunkHeight);
+ var layerData = layerCtx.getImageData(0, row, width, chunkHeight);
+
+ if (hasBackdrop) {
+ composeSMaskBackdrop(maskData.data, r0, g0, b0);
+ }
+
+ composeFn(maskData.data, layerData.data, transferMap);
+ maskCtx.putImageData(layerData, 0, row);
+ }
+ }
+
+ function composeSMask(ctx, smask, layerCtx, webGLContext) {
+ var mask = smask.canvas;
+ var maskCtx = smask.context;
+ ctx.setTransform(smask.scaleX, 0, 0, smask.scaleY, smask.offsetX, smask.offsetY);
+ var backdrop = smask.backdrop || null;
+
+ if (!smask.transferMap && webGLContext.isEnabled) {
+ var composed = webGLContext.composeSMask({
+ layer: layerCtx.canvas,
+ mask: mask,
+ properties: {
+ subtype: smask.subtype,
+ backdrop: backdrop
+ }
+ });
+ ctx.setTransform(1, 0, 0, 1, 0, 0);
+ ctx.drawImage(composed, smask.offsetX, smask.offsetY);
+ return;
+ }
+
+ genericComposeSMask(maskCtx, layerCtx, mask.width, mask.height, smask.subtype, backdrop, smask.transferMap);
+ ctx.drawImage(mask, 0, 0);
+ }
+
+ var LINE_CAP_STYLES = ['butt', 'round', 'square'];
+ var LINE_JOIN_STYLES = ['miter', 'round', 'bevel'];
+ var NORMAL_CLIP = {};
+ var EO_CLIP = {};
+ CanvasGraphics.prototype = {
+ beginDrawing: function beginDrawing(_ref) {
+ var transform = _ref.transform,
+ viewport = _ref.viewport,
+ _ref$transparency = _ref.transparency,
+ transparency = _ref$transparency === void 0 ? false : _ref$transparency,
+ _ref$background = _ref.background,
+ background = _ref$background === void 0 ? null : _ref$background;
+ var width = this.ctx.canvas.width;
+ var height = this.ctx.canvas.height;
+ this.ctx.save();
+ this.ctx.fillStyle = background || 'rgb(255, 255, 255)';
+ this.ctx.fillRect(0, 0, width, height);
+ this.ctx.restore();
+
+ if (transparency) {
+ var transparentCanvas = this.cachedCanvases.getCanvas('transparent', width, height, true);
+ this.compositeCtx = this.ctx;
+ this.transparentCanvas = transparentCanvas.canvas;
+ this.ctx = transparentCanvas.context;
+ this.ctx.save();
+ this.ctx.transform.apply(this.ctx, this.compositeCtx.mozCurrentTransform);
+ }
+
+ this.ctx.save();
+ resetCtxToDefault(this.ctx);
+
+ if (transform) {
+ this.ctx.transform.apply(this.ctx, transform);
+ }
+
+ this.ctx.transform.apply(this.ctx, viewport.transform);
+ this.baseTransform = this.ctx.mozCurrentTransform.slice();
+
+ if (this.imageLayer) {
+ this.imageLayer.beginLayout();
+ }
+ },
+ executeOperatorList: function CanvasGraphics_executeOperatorList(operatorList, executionStartIdx, continueCallback, stepper) {
+ var argsArray = operatorList.argsArray;
+ var fnArray = operatorList.fnArray;
+ var i = executionStartIdx || 0;
+ var argsArrayLen = argsArray.length;
+
+ if (argsArrayLen === i) {
+ return i;
+ }
+
+ var chunkOperations = argsArrayLen - i > EXECUTION_STEPS && typeof continueCallback === 'function';
+ var endTime = chunkOperations ? Date.now() + EXECUTION_TIME : 0;
+ var steps = 0;
+ var commonObjs = this.commonObjs;
+ var objs = this.objs;
+ var fnId;
+
+ while (true) {
+ if (stepper !== undefined && i === stepper.nextBreakPoint) {
+ stepper.breakIt(i, continueCallback);
+ return i;
+ }
+
+ fnId = fnArray[i];
+
+ if (fnId !== _util.OPS.dependency) {
+ this[fnId].apply(this, argsArray[i]);
+ } else {
+ var _iteratorNormalCompletion = true;
+ var _didIteratorError = false;
+ var _iteratorError = undefined;
+
+ try {
+ for (var _iterator = argsArray[i][Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
+ var depObjId = _step.value;
+ var objsPool = depObjId.startsWith('g_') ? commonObjs : objs;
+
+ if (!objsPool.has(depObjId)) {
+ objsPool.get(depObjId, continueCallback);
+ return i;
+ }
+ }
+ } catch (err) {
+ _didIteratorError = true;
+ _iteratorError = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion && _iterator["return"] != null) {
+ _iterator["return"]();
+ }
+ } finally {
+ if (_didIteratorError) {
+ throw _iteratorError;
+ }
+ }
+ }
+ }
+
+ i++;
+
+ if (i === argsArrayLen) {
+ return i;
+ }
+
+ if (chunkOperations && ++steps > EXECUTION_STEPS) {
+ if (Date.now() > endTime) {
+ continueCallback();
+ return i;
+ }
+
+ steps = 0;
+ }
+ }
+ },
+ endDrawing: function CanvasGraphics_endDrawing() {
+ if (this.current.activeSMask !== null) {
+ this.endSMaskGroup();
+ }
+
+ this.ctx.restore();
+
+ if (this.transparentCanvas) {
+ this.ctx = this.compositeCtx;
+ this.ctx.save();
+ this.ctx.setTransform(1, 0, 0, 1, 0, 0);
+ this.ctx.drawImage(this.transparentCanvas, 0, 0);
+ this.ctx.restore();
+ this.transparentCanvas = null;
+ }
+
+ this.cachedCanvases.clear();
+ this.webGLContext.clear();
+
+ if (this.imageLayer) {
+ this.imageLayer.endLayout();
+ }
+ },
+ setLineWidth: function CanvasGraphics_setLineWidth(width) {
+ this.current.lineWidth = width;
+ this.ctx.lineWidth = width;
+ },
+ setLineCap: function CanvasGraphics_setLineCap(style) {
+ this.ctx.lineCap = LINE_CAP_STYLES[style];
+ },
+ setLineJoin: function CanvasGraphics_setLineJoin(style) {
+ this.ctx.lineJoin = LINE_JOIN_STYLES[style];
+ },
+ setMiterLimit: function CanvasGraphics_setMiterLimit(limit) {
+ this.ctx.miterLimit = limit;
+ },
+ setDash: function CanvasGraphics_setDash(dashArray, dashPhase) {
+ var ctx = this.ctx;
+
+ if (ctx.setLineDash !== undefined) {
+ ctx.setLineDash(dashArray);
+ ctx.lineDashOffset = dashPhase;
+ }
+ },
+ setRenderingIntent: function setRenderingIntent(intent) {},
+ setFlatness: function setFlatness(flatness) {},
+ setGState: function CanvasGraphics_setGState(states) {
+ for (var i = 0, ii = states.length; i < ii; i++) {
+ var state = states[i];
+ var key = state[0];
+ var value = state[1];
+
+ switch (key) {
+ case 'LW':
+ this.setLineWidth(value);
+ break;
+
+ case 'LC':
+ this.setLineCap(value);
+ break;
+
+ case 'LJ':
+ this.setLineJoin(value);
+ break;
+
+ case 'ML':
+ this.setMiterLimit(value);
+ break;
+
+ case 'D':
+ this.setDash(value[0], value[1]);
+ break;
+
+ case 'RI':
+ this.setRenderingIntent(value);
+ break;
+
+ case 'FL':
+ this.setFlatness(value);
+ break;
+
+ case 'Font':
+ this.setFont(value[0], value[1]);
+ break;
+
+ case 'CA':
+ this.current.strokeAlpha = state[1];
+ break;
+
+ case 'ca':
+ this.current.fillAlpha = state[1];
+ this.ctx.globalAlpha = state[1];
+ break;
+
+ case 'BM':
+ this.ctx.globalCompositeOperation = value;
+ break;
+
+ case 'SMask':
+ if (this.current.activeSMask) {
+ if (this.stateStack.length > 0 && this.stateStack[this.stateStack.length - 1].activeSMask === this.current.activeSMask) {
+ this.suspendSMaskGroup();
+ } else {
+ this.endSMaskGroup();
+ }
+ }
+
+ this.current.activeSMask = value ? this.tempSMask : null;
+
+ if (this.current.activeSMask) {
+ this.beginSMaskGroup();
+ }
+
+ this.tempSMask = null;
+ break;
+ }
+ }
+ },
+ beginSMaskGroup: function CanvasGraphics_beginSMaskGroup() {
+ var activeSMask = this.current.activeSMask;
+ var drawnWidth = activeSMask.canvas.width;
+ var drawnHeight = activeSMask.canvas.height;
+ var cacheId = 'smaskGroupAt' + this.groupLevel;
+ var scratchCanvas = this.cachedCanvases.getCanvas(cacheId, drawnWidth, drawnHeight, true);
+ var currentCtx = this.ctx;
+ var currentTransform = currentCtx.mozCurrentTransform;
+ this.ctx.save();
+ var groupCtx = scratchCanvas.context;
+ groupCtx.scale(1 / activeSMask.scaleX, 1 / activeSMask.scaleY);
+ groupCtx.translate(-activeSMask.offsetX, -activeSMask.offsetY);
+ groupCtx.transform.apply(groupCtx, currentTransform);
+ activeSMask.startTransformInverse = groupCtx.mozCurrentTransformInverse;
+ copyCtxState(currentCtx, groupCtx);
+ this.ctx = groupCtx;
+ this.setGState([['BM', 'source-over'], ['ca', 1], ['CA', 1]]);
+ this.groupStack.push(currentCtx);
+ this.groupLevel++;
+ },
+ suspendSMaskGroup: function CanvasGraphics_endSMaskGroup() {
+ var groupCtx = this.ctx;
+ this.groupLevel--;
+ this.ctx = this.groupStack.pop();
+ composeSMask(this.ctx, this.current.activeSMask, groupCtx, this.webGLContext);
+ this.ctx.restore();
+ this.ctx.save();
+ copyCtxState(groupCtx, this.ctx);
+ this.current.resumeSMaskCtx = groupCtx;
+
+ var deltaTransform = _util.Util.transform(this.current.activeSMask.startTransformInverse, groupCtx.mozCurrentTransform);
+
+ this.ctx.transform.apply(this.ctx, deltaTransform);
+ groupCtx.save();
+ groupCtx.setTransform(1, 0, 0, 1, 0, 0);
+ groupCtx.clearRect(0, 0, groupCtx.canvas.width, groupCtx.canvas.height);
+ groupCtx.restore();
+ },
+ resumeSMaskGroup: function CanvasGraphics_endSMaskGroup() {
+ var groupCtx = this.current.resumeSMaskCtx;
+ var currentCtx = this.ctx;
+ this.ctx = groupCtx;
+ this.groupStack.push(currentCtx);
+ this.groupLevel++;
+ },
+ endSMaskGroup: function CanvasGraphics_endSMaskGroup() {
+ var groupCtx = this.ctx;
+ this.groupLevel--;
+ this.ctx = this.groupStack.pop();
+ composeSMask(this.ctx, this.current.activeSMask, groupCtx, this.webGLContext);
+ this.ctx.restore();
+ copyCtxState(groupCtx, this.ctx);
+
+ var deltaTransform = _util.Util.transform(this.current.activeSMask.startTransformInverse, groupCtx.mozCurrentTransform);
+
+ this.ctx.transform.apply(this.ctx, deltaTransform);
+ },
+ save: function CanvasGraphics_save() {
+ this.ctx.save();
+ var old = this.current;
+ this.stateStack.push(old);
+ this.current = old.clone();
+ this.current.resumeSMaskCtx = null;
+ },
+ restore: function CanvasGraphics_restore() {
+ if (this.current.resumeSMaskCtx) {
+ this.resumeSMaskGroup();
+ }
+
+ if (this.current.activeSMask !== null && (this.stateStack.length === 0 || this.stateStack[this.stateStack.length - 1].activeSMask !== this.current.activeSMask)) {
+ this.endSMaskGroup();
+ }
+
+ if (this.stateStack.length !== 0) {
+ this.current = this.stateStack.pop();
+ this.ctx.restore();
+ this.pendingClip = null;
+ this._cachedGetSinglePixelWidth = null;
+ }
+ },
+ transform: function CanvasGraphics_transform(a, b, c, d, e, f) {
+ this.ctx.transform(a, b, c, d, e, f);
+ this._cachedGetSinglePixelWidth = null;
+ },
+ constructPath: function CanvasGraphics_constructPath(ops, args) {
+ var ctx = this.ctx;
+ var current = this.current;
+ var x = current.x,
+ y = current.y;
+
+ for (var i = 0, j = 0, ii = ops.length; i < ii; i++) {
+ switch (ops[i] | 0) {
+ case _util.OPS.rectangle:
+ x = args[j++];
+ y = args[j++];
+ var width = args[j++];
+ var height = args[j++];
+
+ if (width === 0) {
+ width = this.getSinglePixelWidth();
+ }
+
+ if (height === 0) {
+ height = this.getSinglePixelWidth();
+ }
+
+ var xw = x + width;
+ var yh = y + height;
+ this.ctx.moveTo(x, y);
+ this.ctx.lineTo(xw, y);
+ this.ctx.lineTo(xw, yh);
+ this.ctx.lineTo(x, yh);
+ this.ctx.lineTo(x, y);
+ this.ctx.closePath();
+ break;
+
+ case _util.OPS.moveTo:
+ x = args[j++];
+ y = args[j++];
+ ctx.moveTo(x, y);
+ break;
+
+ case _util.OPS.lineTo:
+ x = args[j++];
+ y = args[j++];
+ ctx.lineTo(x, y);
+ break;
+
+ case _util.OPS.curveTo:
+ x = args[j + 4];
+ y = args[j + 5];
+ ctx.bezierCurveTo(args[j], args[j + 1], args[j + 2], args[j + 3], x, y);
+ j += 6;
+ break;
+
+ case _util.OPS.curveTo2:
+ ctx.bezierCurveTo(x, y, args[j], args[j + 1], args[j + 2], args[j + 3]);
+ x = args[j + 2];
+ y = args[j + 3];
+ j += 4;
+ break;
+
+ case _util.OPS.curveTo3:
+ x = args[j + 2];
+ y = args[j + 3];
+ ctx.bezierCurveTo(args[j], args[j + 1], x, y, x, y);
+ j += 4;
+ break;
+
+ case _util.OPS.closePath:
+ ctx.closePath();
+ break;
+ }
+ }
+
+ current.setCurrentPoint(x, y);
+ },
+ closePath: function CanvasGraphics_closePath() {
+ this.ctx.closePath();
+ },
+ stroke: function CanvasGraphics_stroke(consumePath) {
+ consumePath = typeof consumePath !== 'undefined' ? consumePath : true;
+ var ctx = this.ctx;
+ var strokeColor = this.current.strokeColor;
+ ctx.lineWidth = Math.max(this.getSinglePixelWidth() * MIN_WIDTH_FACTOR, this.current.lineWidth);
+ ctx.globalAlpha = this.current.strokeAlpha;
+
+ if (strokeColor && strokeColor.hasOwnProperty('type') && strokeColor.type === 'Pattern') {
+ ctx.save();
+ ctx.strokeStyle = strokeColor.getPattern(ctx, this);
+ ctx.stroke();
+ ctx.restore();
+ } else {
+ ctx.stroke();
+ }
+
+ if (consumePath) {
+ this.consumePath();
+ }
+
+ ctx.globalAlpha = this.current.fillAlpha;
+ },
+ closeStroke: function CanvasGraphics_closeStroke() {
+ this.closePath();
+ this.stroke();
+ },
+ fill: function CanvasGraphics_fill(consumePath) {
+ consumePath = typeof consumePath !== 'undefined' ? consumePath : true;
+ var ctx = this.ctx;
+ var fillColor = this.current.fillColor;
+ var isPatternFill = this.current.patternFill;
+ var needRestore = false;
+
+ if (isPatternFill) {
+ ctx.save();
+
+ if (this.baseTransform) {
+ ctx.setTransform.apply(ctx, this.baseTransform);
+ }
+
+ ctx.fillStyle = fillColor.getPattern(ctx, this);
+ needRestore = true;
+ }
+
+ if (this.pendingEOFill) {
+ ctx.fill('evenodd');
+ this.pendingEOFill = false;
+ } else {
+ ctx.fill();
+ }
+
+ if (needRestore) {
+ ctx.restore();
+ }
+
+ if (consumePath) {
+ this.consumePath();
+ }
+ },
+ eoFill: function CanvasGraphics_eoFill() {
+ this.pendingEOFill = true;
+ this.fill();
+ },
+ fillStroke: function CanvasGraphics_fillStroke() {
+ this.fill(false);
+ this.stroke(false);
+ this.consumePath();
+ },
+ eoFillStroke: function CanvasGraphics_eoFillStroke() {
+ this.pendingEOFill = true;
+ this.fillStroke();
+ },
+ closeFillStroke: function CanvasGraphics_closeFillStroke() {
+ this.closePath();
+ this.fillStroke();
+ },
+ closeEOFillStroke: function CanvasGraphics_closeEOFillStroke() {
+ this.pendingEOFill = true;
+ this.closePath();
+ this.fillStroke();
+ },
+ endPath: function CanvasGraphics_endPath() {
+ this.consumePath();
+ },
+ clip: function CanvasGraphics_clip() {
+ this.pendingClip = NORMAL_CLIP;
+ },
+ eoClip: function CanvasGraphics_eoClip() {
+ this.pendingClip = EO_CLIP;
+ },
+ beginText: function CanvasGraphics_beginText() {
+ this.current.textMatrix = _util.IDENTITY_MATRIX;
+ this.current.textMatrixScale = 1;
+ this.current.x = this.current.lineX = 0;
+ this.current.y = this.current.lineY = 0;
+ },
+ endText: function CanvasGraphics_endText() {
+ var paths = this.pendingTextPaths;
+ var ctx = this.ctx;
+
+ if (paths === undefined) {
+ ctx.beginPath();
+ return;
+ }
+
+ ctx.save();
+ ctx.beginPath();
+
+ for (var i = 0; i < paths.length; i++) {
+ var path = paths[i];
+ ctx.setTransform.apply(ctx, path.transform);
+ ctx.translate(path.x, path.y);
+ path.addToPath(ctx, path.fontSize);
+ }
+
+ ctx.restore();
+ ctx.clip();
+ ctx.beginPath();
+ delete this.pendingTextPaths;
+ },
+ setCharSpacing: function CanvasGraphics_setCharSpacing(spacing) {
+ this.current.charSpacing = spacing;
+ },
+ setWordSpacing: function CanvasGraphics_setWordSpacing(spacing) {
+ this.current.wordSpacing = spacing;
+ },
+ setHScale: function CanvasGraphics_setHScale(scale) {
+ this.current.textHScale = scale / 100;
+ },
+ setLeading: function CanvasGraphics_setLeading(leading) {
+ this.current.leading = -leading;
+ },
+ setFont: function CanvasGraphics_setFont(fontRefName, size) {
+ var fontObj = this.commonObjs.get(fontRefName);
+ var current = this.current;
+
+ if (!fontObj) {
+ throw new Error("Can't find font for ".concat(fontRefName));
+ }
+
+ current.fontMatrix = fontObj.fontMatrix ? fontObj.fontMatrix : _util.FONT_IDENTITY_MATRIX;
+
+ if (current.fontMatrix[0] === 0 || current.fontMatrix[3] === 0) {
+ (0, _util.warn)('Invalid font matrix for font ' + fontRefName);
+ }
+
+ if (size < 0) {
+ size = -size;
+ current.fontDirection = -1;
+ } else {
+ current.fontDirection = 1;
+ }
+
+ this.current.font = fontObj;
+ this.current.fontSize = size;
+
+ if (fontObj.isType3Font) {
+ return;
+ }
+
+ var name = fontObj.loadedName || 'sans-serif';
+ var bold = fontObj.black ? '900' : fontObj.bold ? 'bold' : 'normal';
+ var italic = fontObj.italic ? 'italic' : 'normal';
+ var typeface = "\"".concat(name, "\", ").concat(fontObj.fallbackName);
+ var browserFontSize = size < MIN_FONT_SIZE ? MIN_FONT_SIZE : size > MAX_FONT_SIZE ? MAX_FONT_SIZE : size;
+ this.current.fontSizeScale = size / browserFontSize;
+ this.ctx.font = "".concat(italic, " ").concat(bold, " ").concat(browserFontSize, "px ").concat(typeface);
+ },
+ setTextRenderingMode: function CanvasGraphics_setTextRenderingMode(mode) {
+ this.current.textRenderingMode = mode;
+ },
+ setTextRise: function CanvasGraphics_setTextRise(rise) {
+ this.current.textRise = rise;
+ },
+ moveText: function CanvasGraphics_moveText(x, y) {
+ this.current.x = this.current.lineX += x;
+ this.current.y = this.current.lineY += y;
+ },
+ setLeadingMoveText: function CanvasGraphics_setLeadingMoveText(x, y) {
+ this.setLeading(-y);
+ this.moveText(x, y);
+ },
+ setTextMatrix: function CanvasGraphics_setTextMatrix(a, b, c, d, e, f) {
+ this.current.textMatrix = [a, b, c, d, e, f];
+ this.current.textMatrixScale = Math.sqrt(a * a + b * b);
+ this.current.x = this.current.lineX = 0;
+ this.current.y = this.current.lineY = 0;
+ },
+ nextLine: function CanvasGraphics_nextLine() {
+ this.moveText(0, this.current.leading);
+ },
+ paintChar: function paintChar(character, x, y, patternTransform) {
+ var ctx = this.ctx;
+ var current = this.current;
+ var font = current.font;
+ var textRenderingMode = current.textRenderingMode;
+ var fontSize = current.fontSize / current.fontSizeScale;
+ var fillStrokeMode = textRenderingMode & _util.TextRenderingMode.FILL_STROKE_MASK;
+ var isAddToPathSet = !!(textRenderingMode & _util.TextRenderingMode.ADD_TO_PATH_FLAG);
+ var patternFill = current.patternFill && font.data;
+ var addToPath;
+
+ if (font.disableFontFace || isAddToPathSet || patternFill) {
+ addToPath = font.getPathGenerator(this.commonObjs, character);
+ }
+
+ if (font.disableFontFace || patternFill) {
+ ctx.save();
+ ctx.translate(x, y);
+ ctx.beginPath();
+ addToPath(ctx, fontSize);
+
+ if (patternTransform) {
+ ctx.setTransform.apply(ctx, patternTransform);
+ }
+
+ if (fillStrokeMode === _util.TextRenderingMode.FILL || fillStrokeMode === _util.TextRenderingMode.FILL_STROKE) {
+ ctx.fill();
+ }
+
+ if (fillStrokeMode === _util.TextRenderingMode.STROKE || fillStrokeMode === _util.TextRenderingMode.FILL_STROKE) {
+ ctx.stroke();
+ }
+
+ ctx.restore();
+ } else {
+ if (fillStrokeMode === _util.TextRenderingMode.FILL || fillStrokeMode === _util.TextRenderingMode.FILL_STROKE) {
+ ctx.fillText(character, x, y);
+ }
+
+ if (fillStrokeMode === _util.TextRenderingMode.STROKE || fillStrokeMode === _util.TextRenderingMode.FILL_STROKE) {
+ ctx.strokeText(character, x, y);
+ }
+ }
+
+ if (isAddToPathSet) {
+ var paths = this.pendingTextPaths || (this.pendingTextPaths = []);
+ paths.push({
+ transform: ctx.mozCurrentTransform,
+ x: x,
+ y: y,
+ fontSize: fontSize,
+ addToPath: addToPath
+ });
+ }
+ },
+
+ get isFontSubpixelAAEnabled() {
+ var _this$cachedCanvases$ = this.cachedCanvases.getCanvas('isFontSubpixelAAEnabled', 10, 10),
+ ctx = _this$cachedCanvases$.context;
+
+ ctx.scale(1.5, 1);
+ ctx.fillText('I', 0, 10);
+ var data = ctx.getImageData(0, 0, 10, 10).data;
+ var enabled = false;
+
+ for (var i = 3; i < data.length; i += 4) {
+ if (data[i] > 0 && data[i] < 255) {
+ enabled = true;
+ break;
+ }
+ }
+
+ return (0, _util.shadow)(this, 'isFontSubpixelAAEnabled', enabled);
+ },
+
+ showText: function CanvasGraphics_showText(glyphs) {
+ var current = this.current;
+ var font = current.font;
+
+ if (font.isType3Font) {
+ return this.showType3Text(glyphs);
+ }
+
+ var fontSize = current.fontSize;
+
+ if (fontSize === 0) {
+ return undefined;
+ }
+
+ var ctx = this.ctx;
+ var fontSizeScale = current.fontSizeScale;
+ var charSpacing = current.charSpacing;
+ var wordSpacing = current.wordSpacing;
+ var fontDirection = current.fontDirection;
+ var textHScale = current.textHScale * fontDirection;
+ var glyphsLength = glyphs.length;
+ var vertical = font.vertical;
+ var spacingDir = vertical ? 1 : -1;
+ var defaultVMetrics = font.defaultVMetrics;
+ var widthAdvanceScale = fontSize * current.fontMatrix[0];
+ var simpleFillText = current.textRenderingMode === _util.TextRenderingMode.FILL && !font.disableFontFace && !current.patternFill;
+ ctx.save();
+ var patternTransform;
+
+ if (current.patternFill) {
+ ctx.save();
+ var pattern = current.fillColor.getPattern(ctx, this);
+ patternTransform = ctx.mozCurrentTransform;
+ ctx.restore();
+ ctx.fillStyle = pattern;
+ }
+
+ ctx.transform.apply(ctx, current.textMatrix);
+ ctx.translate(current.x, current.y + current.textRise);
+
+ if (fontDirection > 0) {
+ ctx.scale(textHScale, -1);
+ } else {
+ ctx.scale(textHScale, 1);
+ }
+
+ var lineWidth = current.lineWidth;
+ var scale = current.textMatrixScale;
+
+ if (scale === 0 || lineWidth === 0) {
+ var fillStrokeMode = current.textRenderingMode & _util.TextRenderingMode.FILL_STROKE_MASK;
+
+ if (fillStrokeMode === _util.TextRenderingMode.STROKE || fillStrokeMode === _util.TextRenderingMode.FILL_STROKE) {
+ this._cachedGetSinglePixelWidth = null;
+ lineWidth = this.getSinglePixelWidth() * MIN_WIDTH_FACTOR;
+ }
+ } else {
+ lineWidth /= scale;
+ }
+
+ if (fontSizeScale !== 1.0) {
+ ctx.scale(fontSizeScale, fontSizeScale);
+ lineWidth /= fontSizeScale;
+ }
+
+ ctx.lineWidth = lineWidth;
+ var x = 0,
+ i;
+
+ for (i = 0; i < glyphsLength; ++i) {
+ var glyph = glyphs[i];
+
+ if ((0, _util.isNum)(glyph)) {
+ x += spacingDir * glyph * fontSize / 1000;
+ continue;
+ }
+
+ var restoreNeeded = false;
+ var spacing = (glyph.isSpace ? wordSpacing : 0) + charSpacing;
+ var character = glyph.fontChar;
+ var accent = glyph.accent;
+ var scaledX, scaledY, scaledAccentX, scaledAccentY;
+ var width = glyph.width;
+
+ if (vertical) {
+ var vmetric, vx, vy;
+ vmetric = glyph.vmetric || defaultVMetrics;
+ vx = glyph.vmetric ? vmetric[1] : width * 0.5;
+ vx = -vx * widthAdvanceScale;
+ vy = vmetric[2] * widthAdvanceScale;
+ width = vmetric ? -vmetric[0] : width;
+ scaledX = vx / fontSizeScale;
+ scaledY = (x + vy) / fontSizeScale;
+ } else {
+ scaledX = x / fontSizeScale;
+ scaledY = 0;
+ }
+
+ if (font.remeasure && width > 0) {
+ var measuredWidth = ctx.measureText(character).width * 1000 / fontSize * fontSizeScale;
+
+ if (width < measuredWidth && this.isFontSubpixelAAEnabled) {
+ var characterScaleX = width / measuredWidth;
+ restoreNeeded = true;
+ ctx.save();
+ ctx.scale(characterScaleX, 1);
+ scaledX /= characterScaleX;
+ } else if (width !== measuredWidth) {
+ scaledX += (width - measuredWidth) / 2000 * fontSize / fontSizeScale;
+ }
+ }
+
+ if (glyph.isInFont || font.missingFile) {
+ if (simpleFillText && !accent) {
+ ctx.fillText(character, scaledX, scaledY);
+ } else {
+ this.paintChar(character, scaledX, scaledY, patternTransform);
+
+ if (accent) {
+ scaledAccentX = scaledX + accent.offset.x / fontSizeScale;
+ scaledAccentY = scaledY - accent.offset.y / fontSizeScale;
+ this.paintChar(accent.fontChar, scaledAccentX, scaledAccentY, patternTransform);
+ }
+ }
+ }
+
+ var charWidth = width * widthAdvanceScale + spacing * fontDirection;
+ x += charWidth;
+
+ if (restoreNeeded) {
+ ctx.restore();
+ }
+ }
+
+ if (vertical) {
+ current.y -= x * textHScale;
+ } else {
+ current.x += x * textHScale;
+ }
+
+ ctx.restore();
+ },
+ showType3Text: function CanvasGraphics_showType3Text(glyphs) {
+ var ctx = this.ctx;
+ var current = this.current;
+ var font = current.font;
+ var fontSize = current.fontSize;
+ var fontDirection = current.fontDirection;
+ var spacingDir = font.vertical ? 1 : -1;
+ var charSpacing = current.charSpacing;
+ var wordSpacing = current.wordSpacing;
+ var textHScale = current.textHScale * fontDirection;
+ var fontMatrix = current.fontMatrix || _util.FONT_IDENTITY_MATRIX;
+ var glyphsLength = glyphs.length;
+ var isTextInvisible = current.textRenderingMode === _util.TextRenderingMode.INVISIBLE;
+ var i, glyph, width, spacingLength;
+
+ if (isTextInvisible || fontSize === 0) {
+ return;
+ }
+
+ this._cachedGetSinglePixelWidth = null;
+ ctx.save();
+ ctx.transform.apply(ctx, current.textMatrix);
+ ctx.translate(current.x, current.y);
+ ctx.scale(textHScale, fontDirection);
+
+ for (i = 0; i < glyphsLength; ++i) {
+ glyph = glyphs[i];
+
+ if ((0, _util.isNum)(glyph)) {
+ spacingLength = spacingDir * glyph * fontSize / 1000;
+ this.ctx.translate(spacingLength, 0);
+ current.x += spacingLength * textHScale;
+ continue;
+ }
+
+ var spacing = (glyph.isSpace ? wordSpacing : 0) + charSpacing;
+ var operatorList = font.charProcOperatorList[glyph.operatorListId];
+
+ if (!operatorList) {
+ (0, _util.warn)("Type3 character \"".concat(glyph.operatorListId, "\" is not available."));
+ continue;
+ }
+
+ this.processingType3 = glyph;
+ this.save();
+ ctx.scale(fontSize, fontSize);
+ ctx.transform.apply(ctx, fontMatrix);
+ this.executeOperatorList(operatorList);
+ this.restore();
+
+ var transformed = _util.Util.applyTransform([glyph.width, 0], fontMatrix);
+
+ width = transformed[0] * fontSize + spacing;
+ ctx.translate(width, 0);
+ current.x += width * textHScale;
+ }
+
+ ctx.restore();
+ this.processingType3 = null;
+ },
+ setCharWidth: function CanvasGraphics_setCharWidth(xWidth, yWidth) {},
+ setCharWidthAndBounds: function CanvasGraphics_setCharWidthAndBounds(xWidth, yWidth, llx, lly, urx, ury) {
+ this.ctx.rect(llx, lly, urx - llx, ury - lly);
+ this.clip();
+ this.endPath();
+ },
+ getColorN_Pattern: function CanvasGraphics_getColorN_Pattern(IR) {
+ var _this = this;
+
+ var pattern;
+
+ if (IR[0] === 'TilingPattern') {
+ var color = IR[1];
+ var baseTransform = this.baseTransform || this.ctx.mozCurrentTransform.slice();
+ var canvasGraphicsFactory = {
+ createCanvasGraphics: function createCanvasGraphics(ctx) {
+ return new CanvasGraphics(ctx, _this.commonObjs, _this.objs, _this.canvasFactory, _this.webGLContext);
+ }
+ };
+ pattern = new _pattern_helper.TilingPattern(IR, color, this.ctx, canvasGraphicsFactory, baseTransform);
+ } else {
+ pattern = (0, _pattern_helper.getShadingPatternFromIR)(IR);
+ }
+
+ return pattern;
+ },
+ setStrokeColorN: function CanvasGraphics_setStrokeColorN() {
+ this.current.strokeColor = this.getColorN_Pattern(arguments);
+ },
+ setFillColorN: function CanvasGraphics_setFillColorN() {
+ this.current.fillColor = this.getColorN_Pattern(arguments);
+ this.current.patternFill = true;
+ },
+ setStrokeRGBColor: function CanvasGraphics_setStrokeRGBColor(r, g, b) {
+ var color = _util.Util.makeCssRgb(r, g, b);
+
+ this.ctx.strokeStyle = color;
+ this.current.strokeColor = color;
+ },
+ setFillRGBColor: function CanvasGraphics_setFillRGBColor(r, g, b) {
+ var color = _util.Util.makeCssRgb(r, g, b);
+
+ this.ctx.fillStyle = color;
+ this.current.fillColor = color;
+ this.current.patternFill = false;
+ },
+ shadingFill: function CanvasGraphics_shadingFill(patternIR) {
+ var ctx = this.ctx;
+ this.save();
+ var pattern = (0, _pattern_helper.getShadingPatternFromIR)(patternIR);
+ ctx.fillStyle = pattern.getPattern(ctx, this, true);
+ var inv = ctx.mozCurrentTransformInverse;
+
+ if (inv) {
+ var canvas = ctx.canvas;
+ var width = canvas.width;
+ var height = canvas.height;
+
+ var bl = _util.Util.applyTransform([0, 0], inv);
+
+ var br = _util.Util.applyTransform([0, height], inv);
+
+ var ul = _util.Util.applyTransform([width, 0], inv);
+
+ var ur = _util.Util.applyTransform([width, height], inv);
+
+ var x0 = Math.min(bl[0], br[0], ul[0], ur[0]);
+ var y0 = Math.min(bl[1], br[1], ul[1], ur[1]);
+ var x1 = Math.max(bl[0], br[0], ul[0], ur[0]);
+ var y1 = Math.max(bl[1], br[1], ul[1], ur[1]);
+ this.ctx.fillRect(x0, y0, x1 - x0, y1 - y0);
+ } else {
+ this.ctx.fillRect(-1e10, -1e10, 2e10, 2e10);
+ }
+
+ this.restore();
+ },
+ beginInlineImage: function CanvasGraphics_beginInlineImage() {
+ (0, _util.unreachable)('Should not call beginInlineImage');
+ },
+ beginImageData: function CanvasGraphics_beginImageData() {
+ (0, _util.unreachable)('Should not call beginImageData');
+ },
+ paintFormXObjectBegin: function CanvasGraphics_paintFormXObjectBegin(matrix, bbox) {
+ this.save();
+ this.baseTransformStack.push(this.baseTransform);
+
+ if (Array.isArray(matrix) && matrix.length === 6) {
+ this.transform.apply(this, matrix);
+ }
+
+ this.baseTransform = this.ctx.mozCurrentTransform;
+
+ if (bbox) {
+ var width = bbox[2] - bbox[0];
+ var height = bbox[3] - bbox[1];
+ this.ctx.rect(bbox[0], bbox[1], width, height);
+ this.clip();
+ this.endPath();
+ }
+ },
+ paintFormXObjectEnd: function CanvasGraphics_paintFormXObjectEnd() {
+ this.restore();
+ this.baseTransform = this.baseTransformStack.pop();
+ },
+ beginGroup: function CanvasGraphics_beginGroup(group) {
+ this.save();
+ var currentCtx = this.ctx;
+
+ if (!group.isolated) {
+ (0, _util.info)('TODO: Support non-isolated groups.');
+ }
+
+ if (group.knockout) {
+ (0, _util.warn)('Knockout groups not supported.');
+ }
+
+ var currentTransform = currentCtx.mozCurrentTransform;
+
+ if (group.matrix) {
+ currentCtx.transform.apply(currentCtx, group.matrix);
+ }
+
+ if (!group.bbox) {
+ throw new Error('Bounding box is required.');
+ }
+
+ var bounds = _util.Util.getAxialAlignedBoundingBox(group.bbox, currentCtx.mozCurrentTransform);
+
+ var canvasBounds = [0, 0, currentCtx.canvas.width, currentCtx.canvas.height];
+ bounds = _util.Util.intersect(bounds, canvasBounds) || [0, 0, 0, 0];
+ var offsetX = Math.floor(bounds[0]);
+ var offsetY = Math.floor(bounds[1]);
+ var drawnWidth = Math.max(Math.ceil(bounds[2]) - offsetX, 1);
+ var drawnHeight = Math.max(Math.ceil(bounds[3]) - offsetY, 1);
+ var scaleX = 1,
+ scaleY = 1;
+
+ if (drawnWidth > MAX_GROUP_SIZE) {
+ scaleX = drawnWidth / MAX_GROUP_SIZE;
+ drawnWidth = MAX_GROUP_SIZE;
+ }
+
+ if (drawnHeight > MAX_GROUP_SIZE) {
+ scaleY = drawnHeight / MAX_GROUP_SIZE;
+ drawnHeight = MAX_GROUP_SIZE;
+ }
+
+ var cacheId = 'groupAt' + this.groupLevel;
+
+ if (group.smask) {
+ cacheId += '_smask_' + this.smaskCounter++ % 2;
+ }
+
+ var scratchCanvas = this.cachedCanvases.getCanvas(cacheId, drawnWidth, drawnHeight, true);
+ var groupCtx = scratchCanvas.context;
+ groupCtx.scale(1 / scaleX, 1 / scaleY);
+ groupCtx.translate(-offsetX, -offsetY);
+ groupCtx.transform.apply(groupCtx, currentTransform);
+
+ if (group.smask) {
+ this.smaskStack.push({
+ canvas: scratchCanvas.canvas,
+ context: groupCtx,
+ offsetX: offsetX,
+ offsetY: offsetY,
+ scaleX: scaleX,
+ scaleY: scaleY,
+ subtype: group.smask.subtype,
+ backdrop: group.smask.backdrop,
+ transferMap: group.smask.transferMap || null,
+ startTransformInverse: null
+ });
+ } else {
+ currentCtx.setTransform(1, 0, 0, 1, 0, 0);
+ currentCtx.translate(offsetX, offsetY);
+ currentCtx.scale(scaleX, scaleY);
+ }
+
+ copyCtxState(currentCtx, groupCtx);
+ this.ctx = groupCtx;
+ this.setGState([['BM', 'source-over'], ['ca', 1], ['CA', 1]]);
+ this.groupStack.push(currentCtx);
+ this.groupLevel++;
+ this.current.activeSMask = null;
+ },
+ endGroup: function CanvasGraphics_endGroup(group) {
+ this.groupLevel--;
+ var groupCtx = this.ctx;
+ this.ctx = this.groupStack.pop();
+
+ if (this.ctx.imageSmoothingEnabled !== undefined) {
+ this.ctx.imageSmoothingEnabled = false;
+ } else {
+ this.ctx.mozImageSmoothingEnabled = false;
+ }
+
+ if (group.smask) {
+ this.tempSMask = this.smaskStack.pop();
+ } else {
+ this.ctx.drawImage(groupCtx.canvas, 0, 0);
+ }
+
+ this.restore();
+ },
+ beginAnnotations: function CanvasGraphics_beginAnnotations() {
+ this.save();
+
+ if (this.baseTransform) {
+ this.ctx.setTransform.apply(this.ctx, this.baseTransform);
+ }
+ },
+ endAnnotations: function CanvasGraphics_endAnnotations() {
+ this.restore();
+ },
+ beginAnnotation: function CanvasGraphics_beginAnnotation(rect, transform, matrix) {
+ this.save();
+ resetCtxToDefault(this.ctx);
+ this.current = new CanvasExtraState();
+
+ if (Array.isArray(rect) && rect.length === 4) {
+ var width = rect[2] - rect[0];
+ var height = rect[3] - rect[1];
+ this.ctx.rect(rect[0], rect[1], width, height);
+ this.clip();
+ this.endPath();
+ }
+
+ this.transform.apply(this, transform);
+ this.transform.apply(this, matrix);
+ },
+ endAnnotation: function CanvasGraphics_endAnnotation() {
+ this.restore();
+ },
+ paintJpegXObject: function CanvasGraphics_paintJpegXObject(objId, w, h) {
+ var domImage = this.processingType3 ? this.commonObjs.get(objId) : this.objs.get(objId);
+
+ if (!domImage) {
+ (0, _util.warn)('Dependent image isn\'t ready yet');
+ return;
+ }
+
+ this.save();
+ var ctx = this.ctx;
+ ctx.scale(1 / w, -1 / h);
+ ctx.drawImage(domImage, 0, 0, domImage.width, domImage.height, 0, -h, w, h);
+
+ if (this.imageLayer) {
+ var currentTransform = ctx.mozCurrentTransformInverse;
+ var position = this.getCanvasPosition(0, 0);
+ this.imageLayer.appendImage({
+ objId: objId,
+ left: position[0],
+ top: position[1],
+ width: w / currentTransform[0],
+ height: h / currentTransform[3]
+ });
+ }
+
+ this.restore();
+ },
+ paintImageMaskXObject: function CanvasGraphics_paintImageMaskXObject(img) {
+ var ctx = this.ctx;
+ var width = img.width,
+ height = img.height;
+ var fillColor = this.current.fillColor;
+ var isPatternFill = this.current.patternFill;
+ var glyph = this.processingType3;
+
+ if (COMPILE_TYPE3_GLYPHS && glyph && glyph.compiled === undefined) {
+ if (width <= MAX_SIZE_TO_COMPILE && height <= MAX_SIZE_TO_COMPILE) {
+ glyph.compiled = compileType3Glyph({
+ data: img.data,
+ width: width,
+ height: height
+ });
+ } else {
+ glyph.compiled = null;
+ }
+ }
+
+ if (glyph && glyph.compiled) {
+ glyph.compiled(ctx);
+ return;
+ }
+
+ var maskCanvas = this.cachedCanvases.getCanvas('maskCanvas', width, height);
+ var maskCtx = maskCanvas.context;
+ maskCtx.save();
+ putBinaryImageMask(maskCtx, img);
+ maskCtx.globalCompositeOperation = 'source-in';
+ maskCtx.fillStyle = isPatternFill ? fillColor.getPattern(maskCtx, this) : fillColor;
+ maskCtx.fillRect(0, 0, width, height);
+ maskCtx.restore();
+ this.paintInlineImageXObject(maskCanvas.canvas);
+ },
+ paintImageMaskXObjectRepeat: function CanvasGraphics_paintImageMaskXObjectRepeat(imgData, scaleX, scaleY, positions) {
+ var width = imgData.width;
+ var height = imgData.height;
+ var fillColor = this.current.fillColor;
+ var isPatternFill = this.current.patternFill;
+ var maskCanvas = this.cachedCanvases.getCanvas('maskCanvas', width, height);
+ var maskCtx = maskCanvas.context;
+ maskCtx.save();
+ putBinaryImageMask(maskCtx, imgData);
+ maskCtx.globalCompositeOperation = 'source-in';
+ maskCtx.fillStyle = isPatternFill ? fillColor.getPattern(maskCtx, this) : fillColor;
+ maskCtx.fillRect(0, 0, width, height);
+ maskCtx.restore();
+ var ctx = this.ctx;
+
+ for (var i = 0, ii = positions.length; i < ii; i += 2) {
+ ctx.save();
+ ctx.transform(scaleX, 0, 0, scaleY, positions[i], positions[i + 1]);
+ ctx.scale(1, -1);
+ ctx.drawImage(maskCanvas.canvas, 0, 0, width, height, 0, -1, 1, 1);
+ ctx.restore();
+ }
+ },
+ paintImageMaskXObjectGroup: function CanvasGraphics_paintImageMaskXObjectGroup(images) {
+ var ctx = this.ctx;
+ var fillColor = this.current.fillColor;
+ var isPatternFill = this.current.patternFill;
+
+ for (var i = 0, ii = images.length; i < ii; i++) {
+ var image = images[i];
+ var width = image.width,
+ height = image.height;
+ var maskCanvas = this.cachedCanvases.getCanvas('maskCanvas', width, height);
+ var maskCtx = maskCanvas.context;
+ maskCtx.save();
+ putBinaryImageMask(maskCtx, image);
+ maskCtx.globalCompositeOperation = 'source-in';
+ maskCtx.fillStyle = isPatternFill ? fillColor.getPattern(maskCtx, this) : fillColor;
+ maskCtx.fillRect(0, 0, width, height);
+ maskCtx.restore();
+ ctx.save();
+ ctx.transform.apply(ctx, image.transform);
+ ctx.scale(1, -1);
+ ctx.drawImage(maskCanvas.canvas, 0, 0, width, height, 0, -1, 1, 1);
+ ctx.restore();
+ }
+ },
+ paintImageXObject: function CanvasGraphics_paintImageXObject(objId) {
+ var imgData = this.processingType3 ? this.commonObjs.get(objId) : this.objs.get(objId);
+
+ if (!imgData) {
+ (0, _util.warn)('Dependent image isn\'t ready yet');
+ return;
+ }
+
+ this.paintInlineImageXObject(imgData);
+ },
+ paintImageXObjectRepeat: function CanvasGraphics_paintImageXObjectRepeat(objId, scaleX, scaleY, positions) {
+ var imgData = this.processingType3 ? this.commonObjs.get(objId) : this.objs.get(objId);
+
+ if (!imgData) {
+ (0, _util.warn)('Dependent image isn\'t ready yet');
+ return;
+ }
+
+ var width = imgData.width;
+ var height = imgData.height;
+ var map = [];
+
+ for (var i = 0, ii = positions.length; i < ii; i += 2) {
+ map.push({
+ transform: [scaleX, 0, 0, scaleY, positions[i], positions[i + 1]],
+ x: 0,
+ y: 0,
+ w: width,
+ h: height
+ });
+ }
+
+ this.paintInlineImageXObjectGroup(imgData, map);
+ },
+ paintInlineImageXObject: function CanvasGraphics_paintInlineImageXObject(imgData) {
+ var width = imgData.width;
+ var height = imgData.height;
+ var ctx = this.ctx;
+ this.save();
+ ctx.scale(1 / width, -1 / height);
+ var currentTransform = ctx.mozCurrentTransformInverse;
+ var a = currentTransform[0],
+ b = currentTransform[1];
+ var widthScale = Math.max(Math.sqrt(a * a + b * b), 1);
+ var c = currentTransform[2],
+ d = currentTransform[3];
+ var heightScale = Math.max(Math.sqrt(c * c + d * d), 1);
+ var imgToPaint, tmpCanvas;
+
+ if (typeof HTMLElement === 'function' && imgData instanceof HTMLElement || !imgData.data) {
+ imgToPaint = imgData;
+ } else {
+ tmpCanvas = this.cachedCanvases.getCanvas('inlineImage', width, height);
+ var tmpCtx = tmpCanvas.context;
+ putBinaryImageData(tmpCtx, imgData);
+ imgToPaint = tmpCanvas.canvas;
+ }
+
+ var paintWidth = width,
+ paintHeight = height;
+ var tmpCanvasId = 'prescale1';
+
+ while (widthScale > 2 && paintWidth > 1 || heightScale > 2 && paintHeight > 1) {
+ var newWidth = paintWidth,
+ newHeight = paintHeight;
+
+ if (widthScale > 2 && paintWidth > 1) {
+ newWidth = Math.ceil(paintWidth / 2);
+ widthScale /= paintWidth / newWidth;
+ }
+
+ if (heightScale > 2 && paintHeight > 1) {
+ newHeight = Math.ceil(paintHeight / 2);
+ heightScale /= paintHeight / newHeight;
+ }
+
+ tmpCanvas = this.cachedCanvases.getCanvas(tmpCanvasId, newWidth, newHeight);
+ tmpCtx = tmpCanvas.context;
+ tmpCtx.clearRect(0, 0, newWidth, newHeight);
+ tmpCtx.drawImage(imgToPaint, 0, 0, paintWidth, paintHeight, 0, 0, newWidth, newHeight);
+ imgToPaint = tmpCanvas.canvas;
+ paintWidth = newWidth;
+ paintHeight = newHeight;
+ tmpCanvasId = tmpCanvasId === 'prescale1' ? 'prescale2' : 'prescale1';
+ }
+
+ ctx.drawImage(imgToPaint, 0, 0, paintWidth, paintHeight, 0, -height, width, height);
+
+ if (this.imageLayer) {
+ var position = this.getCanvasPosition(0, -height);
+ this.imageLayer.appendImage({
+ imgData: imgData,
+ left: position[0],
+ top: position[1],
+ width: width / currentTransform[0],
+ height: height / currentTransform[3]
+ });
+ }
+
+ this.restore();
+ },
+ paintInlineImageXObjectGroup: function CanvasGraphics_paintInlineImageXObjectGroup(imgData, map) {
+ var ctx = this.ctx;
+ var w = imgData.width;
+ var h = imgData.height;
+ var tmpCanvas = this.cachedCanvases.getCanvas('inlineImage', w, h);
+ var tmpCtx = tmpCanvas.context;
+ putBinaryImageData(tmpCtx, imgData);
+
+ for (var i = 0, ii = map.length; i < ii; i++) {
+ var entry = map[i];
+ ctx.save();
+ ctx.transform.apply(ctx, entry.transform);
+ ctx.scale(1, -1);
+ ctx.drawImage(tmpCanvas.canvas, entry.x, entry.y, entry.w, entry.h, 0, -1, 1, 1);
+
+ if (this.imageLayer) {
+ var position = this.getCanvasPosition(entry.x, entry.y);
+ this.imageLayer.appendImage({
+ imgData: imgData,
+ left: position[0],
+ top: position[1],
+ width: w,
+ height: h
+ });
+ }
+
+ ctx.restore();
+ }
+ },
+ paintSolidColorImageMask: function CanvasGraphics_paintSolidColorImageMask() {
+ this.ctx.fillRect(0, 0, 1, 1);
+ },
+ paintXObject: function CanvasGraphics_paintXObject() {
+ (0, _util.warn)('Unsupported \'paintXObject\' command.');
+ },
+ markPoint: function CanvasGraphics_markPoint(tag) {},
+ markPointProps: function CanvasGraphics_markPointProps(tag, properties) {},
+ beginMarkedContent: function CanvasGraphics_beginMarkedContent(tag) {},
+ beginMarkedContentProps: function CanvasGraphics_beginMarkedContentProps(tag, properties) {},
+ endMarkedContent: function CanvasGraphics_endMarkedContent() {},
+ beginCompat: function CanvasGraphics_beginCompat() {},
+ endCompat: function CanvasGraphics_endCompat() {},
+ consumePath: function CanvasGraphics_consumePath() {
+ var ctx = this.ctx;
+
+ if (this.pendingClip) {
+ if (this.pendingClip === EO_CLIP) {
+ ctx.clip('evenodd');
+ } else {
+ ctx.clip();
+ }
+
+ this.pendingClip = null;
+ }
+
+ ctx.beginPath();
+ },
+ getSinglePixelWidth: function getSinglePixelWidth(scale) {
+ if (this._cachedGetSinglePixelWidth === null) {
+ var inverse = this.ctx.mozCurrentTransformInverse;
+ this._cachedGetSinglePixelWidth = Math.sqrt(Math.max(inverse[0] * inverse[0] + inverse[1] * inverse[1], inverse[2] * inverse[2] + inverse[3] * inverse[3]));
+ }
+
+ return this._cachedGetSinglePixelWidth;
+ },
+ getCanvasPosition: function CanvasGraphics_getCanvasPosition(x, y) {
+ var transform = this.ctx.mozCurrentTransform;
+ return [transform[0] * x + transform[2] * y + transform[4], transform[1] * x + transform[3] * y + transform[5]];
+ }
+ };
+
+ for (var op in _util.OPS) {
+ CanvasGraphics.prototype[_util.OPS[op]] = CanvasGraphics.prototype[op];
+ }
+
+ return CanvasGraphics;
+}();
+
+exports.CanvasGraphics = CanvasGraphics;
+
+/***/ }),
+/* 155 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.getShadingPatternFromIR = getShadingPatternFromIR;
+exports.TilingPattern = void 0;
+
+var _util = __w_pdfjs_require__(1);
+
+var ShadingIRs = {};
+ShadingIRs.RadialAxial = {
+ fromIR: function RadialAxial_fromIR(raw) {
+ var type = raw[1];
+ var colorStops = raw[2];
+ var p0 = raw[3];
+ var p1 = raw[4];
+ var r0 = raw[5];
+ var r1 = raw[6];
+ return {
+ type: 'Pattern',
+ getPattern: function RadialAxial_getPattern(ctx) {
+ var grad;
+
+ if (type === 'axial') {
+ grad = ctx.createLinearGradient(p0[0], p0[1], p1[0], p1[1]);
+ } else if (type === 'radial') {
+ grad = ctx.createRadialGradient(p0[0], p0[1], r0, p1[0], p1[1], r1);
+ }
+
+ for (var i = 0, ii = colorStops.length; i < ii; ++i) {
+ var c = colorStops[i];
+ grad.addColorStop(c[0], c[1]);
+ }
+
+ return grad;
+ }
+ };
+ }
+};
+
+var createMeshCanvas = function createMeshCanvasClosure() {
+ function drawTriangle(data, context, p1, p2, p3, c1, c2, c3) {
+ var coords = context.coords,
+ colors = context.colors;
+ var bytes = data.data,
+ rowSize = data.width * 4;
+ var tmp;
+
+ if (coords[p1 + 1] > coords[p2 + 1]) {
+ tmp = p1;
+ p1 = p2;
+ p2 = tmp;
+ tmp = c1;
+ c1 = c2;
+ c2 = tmp;
+ }
+
+ if (coords[p2 + 1] > coords[p3 + 1]) {
+ tmp = p2;
+ p2 = p3;
+ p3 = tmp;
+ tmp = c2;
+ c2 = c3;
+ c3 = tmp;
+ }
+
+ if (coords[p1 + 1] > coords[p2 + 1]) {
+ tmp = p1;
+ p1 = p2;
+ p2 = tmp;
+ tmp = c1;
+ c1 = c2;
+ c2 = tmp;
+ }
+
+ var x1 = (coords[p1] + context.offsetX) * context.scaleX;
+ var y1 = (coords[p1 + 1] + context.offsetY) * context.scaleY;
+ var x2 = (coords[p2] + context.offsetX) * context.scaleX;
+ var y2 = (coords[p2 + 1] + context.offsetY) * context.scaleY;
+ var x3 = (coords[p3] + context.offsetX) * context.scaleX;
+ var y3 = (coords[p3 + 1] + context.offsetY) * context.scaleY;
+
+ if (y1 >= y3) {
+ return;
+ }
+
+ var c1r = colors[c1],
+ c1g = colors[c1 + 1],
+ c1b = colors[c1 + 2];
+ var c2r = colors[c2],
+ c2g = colors[c2 + 1],
+ c2b = colors[c2 + 2];
+ var c3r = colors[c3],
+ c3g = colors[c3 + 1],
+ c3b = colors[c3 + 2];
+ var minY = Math.round(y1),
+ maxY = Math.round(y3);
+ var xa, car, cag, cab;
+ var xb, cbr, cbg, cbb;
+ var k;
+
+ for (var y = minY; y <= maxY; y++) {
+ if (y < y2) {
+ k = y < y1 ? 0 : y1 === y2 ? 1 : (y1 - y) / (y1 - y2);
+ xa = x1 - (x1 - x2) * k;
+ car = c1r - (c1r - c2r) * k;
+ cag = c1g - (c1g - c2g) * k;
+ cab = c1b - (c1b - c2b) * k;
+ } else {
+ k = y > y3 ? 1 : y2 === y3 ? 0 : (y2 - y) / (y2 - y3);
+ xa = x2 - (x2 - x3) * k;
+ car = c2r - (c2r - c3r) * k;
+ cag = c2g - (c2g - c3g) * k;
+ cab = c2b - (c2b - c3b) * k;
+ }
+
+ k = y < y1 ? 0 : y > y3 ? 1 : (y1 - y) / (y1 - y3);
+ xb = x1 - (x1 - x3) * k;
+ cbr = c1r - (c1r - c3r) * k;
+ cbg = c1g - (c1g - c3g) * k;
+ cbb = c1b - (c1b - c3b) * k;
+ var x1_ = Math.round(Math.min(xa, xb));
+ var x2_ = Math.round(Math.max(xa, xb));
+ var j = rowSize * y + x1_ * 4;
+
+ for (var x = x1_; x <= x2_; x++) {
+ k = (xa - x) / (xa - xb);
+ k = k < 0 ? 0 : k > 1 ? 1 : k;
+ bytes[j++] = car - (car - cbr) * k | 0;
+ bytes[j++] = cag - (cag - cbg) * k | 0;
+ bytes[j++] = cab - (cab - cbb) * k | 0;
+ bytes[j++] = 255;
+ }
+ }
+ }
+
+ function drawFigure(data, figure, context) {
+ var ps = figure.coords;
+ var cs = figure.colors;
+ var i, ii;
+
+ switch (figure.type) {
+ case 'lattice':
+ var verticesPerRow = figure.verticesPerRow;
+ var rows = Math.floor(ps.length / verticesPerRow) - 1;
+ var cols = verticesPerRow - 1;
+
+ for (i = 0; i < rows; i++) {
+ var q = i * verticesPerRow;
+
+ for (var j = 0; j < cols; j++, q++) {
+ drawTriangle(data, context, ps[q], ps[q + 1], ps[q + verticesPerRow], cs[q], cs[q + 1], cs[q + verticesPerRow]);
+ drawTriangle(data, context, ps[q + verticesPerRow + 1], ps[q + 1], ps[q + verticesPerRow], cs[q + verticesPerRow + 1], cs[q + 1], cs[q + verticesPerRow]);
+ }
+ }
+
+ break;
+
+ case 'triangles':
+ for (i = 0, ii = ps.length; i < ii; i += 3) {
+ drawTriangle(data, context, ps[i], ps[i + 1], ps[i + 2], cs[i], cs[i + 1], cs[i + 2]);
+ }
+
+ break;
+
+ default:
+ throw new Error('illegal figure');
+ }
+ }
+
+ function createMeshCanvas(bounds, combinesScale, coords, colors, figures, backgroundColor, cachedCanvases, webGLContext) {
+ var EXPECTED_SCALE = 1.1;
+ var MAX_PATTERN_SIZE = 3000;
+ var BORDER_SIZE = 2;
+ var offsetX = Math.floor(bounds[0]);
+ var offsetY = Math.floor(bounds[1]);
+ var boundsWidth = Math.ceil(bounds[2]) - offsetX;
+ var boundsHeight = Math.ceil(bounds[3]) - offsetY;
+ var width = Math.min(Math.ceil(Math.abs(boundsWidth * combinesScale[0] * EXPECTED_SCALE)), MAX_PATTERN_SIZE);
+ var height = Math.min(Math.ceil(Math.abs(boundsHeight * combinesScale[1] * EXPECTED_SCALE)), MAX_PATTERN_SIZE);
+ var scaleX = boundsWidth / width;
+ var scaleY = boundsHeight / height;
+ var context = {
+ coords: coords,
+ colors: colors,
+ offsetX: -offsetX,
+ offsetY: -offsetY,
+ scaleX: 1 / scaleX,
+ scaleY: 1 / scaleY
+ };
+ var paddedWidth = width + BORDER_SIZE * 2;
+ var paddedHeight = height + BORDER_SIZE * 2;
+ var canvas, tmpCanvas, i, ii;
+
+ if (webGLContext.isEnabled) {
+ canvas = webGLContext.drawFigures({
+ width: width,
+ height: height,
+ backgroundColor: backgroundColor,
+ figures: figures,
+ context: context
+ });
+ tmpCanvas = cachedCanvases.getCanvas('mesh', paddedWidth, paddedHeight, false);
+ tmpCanvas.context.drawImage(canvas, BORDER_SIZE, BORDER_SIZE);
+ canvas = tmpCanvas.canvas;
+ } else {
+ tmpCanvas = cachedCanvases.getCanvas('mesh', paddedWidth, paddedHeight, false);
+ var tmpCtx = tmpCanvas.context;
+ var data = tmpCtx.createImageData(width, height);
+
+ if (backgroundColor) {
+ var bytes = data.data;
+
+ for (i = 0, ii = bytes.length; i < ii; i += 4) {
+ bytes[i] = backgroundColor[0];
+ bytes[i + 1] = backgroundColor[1];
+ bytes[i + 2] = backgroundColor[2];
+ bytes[i + 3] = 255;
+ }
+ }
+
+ for (i = 0; i < figures.length; i++) {
+ drawFigure(data, figures[i], context);
+ }
+
+ tmpCtx.putImageData(data, BORDER_SIZE, BORDER_SIZE);
+ canvas = tmpCanvas.canvas;
+ }
+
+ return {
+ canvas: canvas,
+ offsetX: offsetX - BORDER_SIZE * scaleX,
+ offsetY: offsetY - BORDER_SIZE * scaleY,
+ scaleX: scaleX,
+ scaleY: scaleY
+ };
+ }
+
+ return createMeshCanvas;
+}();
+
+ShadingIRs.Mesh = {
+ fromIR: function Mesh_fromIR(raw) {
+ var coords = raw[2];
+ var colors = raw[3];
+ var figures = raw[4];
+ var bounds = raw[5];
+ var matrix = raw[6];
+ var background = raw[8];
+ return {
+ type: 'Pattern',
+ getPattern: function Mesh_getPattern(ctx, owner, shadingFill) {
+ var scale;
+
+ if (shadingFill) {
+ scale = _util.Util.singularValueDecompose2dScale(ctx.mozCurrentTransform);
+ } else {
+ scale = _util.Util.singularValueDecompose2dScale(owner.baseTransform);
+
+ if (matrix) {
+ var matrixScale = _util.Util.singularValueDecompose2dScale(matrix);
+
+ scale = [scale[0] * matrixScale[0], scale[1] * matrixScale[1]];
+ }
+ }
+
+ var temporaryPatternCanvas = createMeshCanvas(bounds, scale, coords, colors, figures, shadingFill ? null : background, owner.cachedCanvases, owner.webGLContext);
+
+ if (!shadingFill) {
+ ctx.setTransform.apply(ctx, owner.baseTransform);
+
+ if (matrix) {
+ ctx.transform.apply(ctx, matrix);
+ }
+ }
+
+ ctx.translate(temporaryPatternCanvas.offsetX, temporaryPatternCanvas.offsetY);
+ ctx.scale(temporaryPatternCanvas.scaleX, temporaryPatternCanvas.scaleY);
+ return ctx.createPattern(temporaryPatternCanvas.canvas, 'no-repeat');
+ }
+ };
+ }
+};
+ShadingIRs.Dummy = {
+ fromIR: function Dummy_fromIR() {
+ return {
+ type: 'Pattern',
+ getPattern: function Dummy_fromIR_getPattern() {
+ return 'hotpink';
+ }
+ };
+ }
+};
+
+function getShadingPatternFromIR(raw) {
+ var shadingIR = ShadingIRs[raw[0]];
+
+ if (!shadingIR) {
+ throw new Error("Unknown IR type: ".concat(raw[0]));
+ }
+
+ return shadingIR.fromIR(raw);
+}
+
+var TilingPattern = function TilingPatternClosure() {
+ var PaintType = {
+ COLORED: 1,
+ UNCOLORED: 2
+ };
+ var MAX_PATTERN_SIZE = 3000;
+
+ function TilingPattern(IR, color, ctx, canvasGraphicsFactory, baseTransform) {
+ this.operatorList = IR[2];
+ this.matrix = IR[3] || [1, 0, 0, 1, 0, 0];
+ this.bbox = IR[4];
+ this.xstep = IR[5];
+ this.ystep = IR[6];
+ this.paintType = IR[7];
+ this.tilingType = IR[8];
+ this.color = color;
+ this.canvasGraphicsFactory = canvasGraphicsFactory;
+ this.baseTransform = baseTransform;
+ this.type = 'Pattern';
+ this.ctx = ctx;
+ }
+
+ TilingPattern.prototype = {
+ createPatternCanvas: function TilinPattern_createPatternCanvas(owner) {
+ var operatorList = this.operatorList;
+ var bbox = this.bbox;
+ var xstep = this.xstep;
+ var ystep = this.ystep;
+ var paintType = this.paintType;
+ var tilingType = this.tilingType;
+ var color = this.color;
+ var canvasGraphicsFactory = this.canvasGraphicsFactory;
+ (0, _util.info)('TilingType: ' + tilingType);
+ var x0 = bbox[0],
+ y0 = bbox[1],
+ x1 = bbox[2],
+ y1 = bbox[3];
+
+ var matrixScale = _util.Util.singularValueDecompose2dScale(this.matrix);
+
+ var curMatrixScale = _util.Util.singularValueDecompose2dScale(this.baseTransform);
+
+ var combinedScale = [matrixScale[0] * curMatrixScale[0], matrixScale[1] * curMatrixScale[1]];
+ var dimx = this.getSizeAndScale(xstep, this.ctx.canvas.width, combinedScale[0]);
+ var dimy = this.getSizeAndScale(ystep, this.ctx.canvas.height, combinedScale[1]);
+ var tmpCanvas = owner.cachedCanvases.getCanvas('pattern', dimx.size, dimy.size, true);
+ var tmpCtx = tmpCanvas.context;
+ var graphics = canvasGraphicsFactory.createCanvasGraphics(tmpCtx);
+ graphics.groupLevel = owner.groupLevel;
+ this.setFillAndStrokeStyleToContext(graphics, paintType, color);
+ graphics.transform(dimx.scale, 0, 0, dimy.scale, 0, 0);
+ graphics.transform(1, 0, 0, 1, -x0, -y0);
+ this.clipBbox(graphics, bbox, x0, y0, x1, y1);
+ graphics.executeOperatorList(operatorList);
+ this.ctx.transform(1, 0, 0, 1, x0, y0);
+ this.ctx.scale(1 / dimx.scale, 1 / dimy.scale);
+ return tmpCanvas.canvas;
+ },
+ getSizeAndScale: function TilingPattern_getSizeAndScale(step, realOutputSize, scale) {
+ step = Math.abs(step);
+ var maxSize = Math.max(MAX_PATTERN_SIZE, realOutputSize);
+ var size = Math.ceil(step * scale);
+
+ if (size >= maxSize) {
+ size = maxSize;
+ } else {
+ scale = size / step;
+ }
+
+ return {
+ scale: scale,
+ size: size
+ };
+ },
+ clipBbox: function clipBbox(graphics, bbox, x0, y0, x1, y1) {
+ if (Array.isArray(bbox) && bbox.length === 4) {
+ var bboxWidth = x1 - x0;
+ var bboxHeight = y1 - y0;
+ graphics.ctx.rect(x0, y0, bboxWidth, bboxHeight);
+ graphics.clip();
+ graphics.endPath();
+ }
+ },
+ setFillAndStrokeStyleToContext: function setFillAndStrokeStyleToContext(graphics, paintType, color) {
+ var context = graphics.ctx,
+ current = graphics.current;
+
+ switch (paintType) {
+ case PaintType.COLORED:
+ var ctx = this.ctx;
+ context.fillStyle = ctx.fillStyle;
+ context.strokeStyle = ctx.strokeStyle;
+ current.fillColor = ctx.fillStyle;
+ current.strokeColor = ctx.strokeStyle;
+ break;
+
+ case PaintType.UNCOLORED:
+ var cssColor = _util.Util.makeCssRgb(color[0], color[1], color[2]);
+
+ context.fillStyle = cssColor;
+ context.strokeStyle = cssColor;
+ current.fillColor = cssColor;
+ current.strokeColor = cssColor;
+ break;
+
+ default:
+ throw new _util.FormatError("Unsupported paint type: ".concat(paintType));
+ }
+ },
+ getPattern: function TilingPattern_getPattern(ctx, owner) {
+ ctx = this.ctx;
+ ctx.setTransform.apply(ctx, this.baseTransform);
+ ctx.transform.apply(ctx, this.matrix);
+ var temporaryPatternCanvas = this.createPatternCanvas(owner);
+ return ctx.createPattern(temporaryPatternCanvas, 'repeat');
+ }
+ };
+ return TilingPattern;
+}();
+
+exports.TilingPattern = TilingPattern;
+
+/***/ }),
+/* 156 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.GlobalWorkerOptions = void 0;
+var GlobalWorkerOptions = Object.create(null);
+exports.GlobalWorkerOptions = GlobalWorkerOptions;
+GlobalWorkerOptions.workerPort = GlobalWorkerOptions.workerPort === undefined ? null : GlobalWorkerOptions.workerPort;
+GlobalWorkerOptions.workerSrc = GlobalWorkerOptions.workerSrc === undefined ? '' : GlobalWorkerOptions.workerSrc;
+
+/***/ }),
+/* 157 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.MessageHandler = MessageHandler;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(148));
+
+var _util = __w_pdfjs_require__(1);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function resolveCall(_x, _x2) {
+ return _resolveCall.apply(this, arguments);
+}
+
+function _resolveCall() {
+ _resolveCall = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee(fn, args) {
+ var thisArg,
+ _args = arguments;
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ thisArg = _args.length > 2 && _args[2] !== undefined ? _args[2] : null;
+
+ if (fn) {
+ _context.next = 3;
+ break;
+ }
+
+ return _context.abrupt("return", undefined);
+
+ case 3:
+ return _context.abrupt("return", fn.apply(thisArg, args));
+
+ case 4:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee);
+ }));
+ return _resolveCall.apply(this, arguments);
+}
+
+function wrapReason(reason) {
+ if (_typeof(reason) !== 'object') {
+ return reason;
+ }
+
+ switch (reason.name) {
+ case 'AbortException':
+ return new _util.AbortException(reason.message);
+
+ case 'MissingPDFException':
+ return new _util.MissingPDFException(reason.message);
+
+ case 'UnexpectedResponseException':
+ return new _util.UnexpectedResponseException(reason.message, reason.status);
+
+ default:
+ return new _util.UnknownErrorException(reason.message, reason.details);
+ }
+}
+
+function makeReasonSerializable(reason) {
+ if (!(reason instanceof Error) || reason instanceof _util.AbortException || reason instanceof _util.MissingPDFException || reason instanceof _util.UnexpectedResponseException || reason instanceof _util.UnknownErrorException) {
+ return reason;
+ }
+
+ return new _util.UnknownErrorException(reason.message, reason.toString());
+}
+
+function resolveOrReject(capability, success, reason) {
+ if (success) {
+ capability.resolve();
+ } else {
+ capability.reject(reason);
+ }
+}
+
+function finalize(promise) {
+ return Promise.resolve(promise)["catch"](function () {});
+}
+
+function MessageHandler(sourceName, targetName, comObj) {
+ var _this = this;
+
+ this.sourceName = sourceName;
+ this.targetName = targetName;
+ this.comObj = comObj;
+ this.callbackId = 1;
+ this.streamId = 1;
+ this.postMessageTransfers = true;
+ this.streamSinks = Object.create(null);
+ this.streamControllers = Object.create(null);
+ var callbacksCapabilities = this.callbacksCapabilities = Object.create(null);
+ var ah = this.actionHandler = Object.create(null);
+
+ this._onComObjOnMessage = function (event) {
+ var data = event.data;
+
+ if (data.targetName !== _this.sourceName) {
+ return;
+ }
+
+ if (data.stream) {
+ _this._processStreamMessage(data);
+ } else if (data.isReply) {
+ var callbackId = data.callbackId;
+
+ if (data.callbackId in callbacksCapabilities) {
+ var callback = callbacksCapabilities[callbackId];
+ delete callbacksCapabilities[callbackId];
+
+ if ('error' in data) {
+ callback.reject(wrapReason(data.error));
+ } else {
+ callback.resolve(data.data);
+ }
+ } else {
+ throw new Error("Cannot resolve callback ".concat(callbackId));
+ }
+ } else if (data.action in ah) {
+ var action = ah[data.action];
+
+ if (data.callbackId) {
+ var _sourceName = _this.sourceName;
+ var _targetName = data.sourceName;
+ Promise.resolve().then(function () {
+ return action[0].call(action[1], data.data);
+ }).then(function (result) {
+ comObj.postMessage({
+ sourceName: _sourceName,
+ targetName: _targetName,
+ isReply: true,
+ callbackId: data.callbackId,
+ data: result
+ });
+ }, function (reason) {
+ comObj.postMessage({
+ sourceName: _sourceName,
+ targetName: _targetName,
+ isReply: true,
+ callbackId: data.callbackId,
+ error: makeReasonSerializable(reason)
+ });
+ });
+ } else if (data.streamId) {
+ _this._createStreamSink(data);
+ } else {
+ action[0].call(action[1], data.data);
+ }
+ } else {
+ throw new Error("Unknown action from worker: ".concat(data.action));
+ }
+ };
+
+ comObj.addEventListener('message', this._onComObjOnMessage);
+}
+
+MessageHandler.prototype = {
+ on: function on(actionName, handler, scope) {
+ var ah = this.actionHandler;
+
+ if (ah[actionName]) {
+ throw new Error("There is already an actionName called \"".concat(actionName, "\""));
+ }
+
+ ah[actionName] = [handler, scope];
+ },
+ send: function send(actionName, data, transfers) {
+ var message = {
+ sourceName: this.sourceName,
+ targetName: this.targetName,
+ action: actionName,
+ data: data
+ };
+ this.postMessage(message, transfers);
+ },
+ sendWithPromise: function sendWithPromise(actionName, data, transfers) {
+ var callbackId = this.callbackId++;
+ var message = {
+ sourceName: this.sourceName,
+ targetName: this.targetName,
+ action: actionName,
+ data: data,
+ callbackId: callbackId
+ };
+ var capability = (0, _util.createPromiseCapability)();
+ this.callbacksCapabilities[callbackId] = capability;
+
+ try {
+ this.postMessage(message, transfers);
+ } catch (e) {
+ capability.reject(e);
+ }
+
+ return capability.promise;
+ },
+ sendWithStream: function sendWithStream(actionName, data, queueingStrategy, transfers) {
+ var _this2 = this;
+
+ var streamId = this.streamId++;
+ var sourceName = this.sourceName;
+ var targetName = this.targetName;
+ return new _util.ReadableStream({
+ start: function start(controller) {
+ var startCapability = (0, _util.createPromiseCapability)();
+ _this2.streamControllers[streamId] = {
+ controller: controller,
+ startCall: startCapability,
+ isClosed: false
+ };
+
+ _this2.postMessage({
+ sourceName: sourceName,
+ targetName: targetName,
+ action: actionName,
+ streamId: streamId,
+ data: data,
+ desiredSize: controller.desiredSize
+ });
+
+ return startCapability.promise;
+ },
+ pull: function pull(controller) {
+ var pullCapability = (0, _util.createPromiseCapability)();
+ _this2.streamControllers[streamId].pullCall = pullCapability;
+
+ _this2.postMessage({
+ sourceName: sourceName,
+ targetName: targetName,
+ stream: 'pull',
+ streamId: streamId,
+ desiredSize: controller.desiredSize
+ });
+
+ return pullCapability.promise;
+ },
+ cancel: function cancel(reason) {
+ var cancelCapability = (0, _util.createPromiseCapability)();
+ _this2.streamControllers[streamId].cancelCall = cancelCapability;
+ _this2.streamControllers[streamId].isClosed = true;
+
+ _this2.postMessage({
+ sourceName: sourceName,
+ targetName: targetName,
+ stream: 'cancel',
+ reason: reason,
+ streamId: streamId
+ });
+
+ return cancelCapability.promise;
+ }
+ }, queueingStrategy);
+ },
+ _createStreamSink: function _createStreamSink(data) {
+ var _this3 = this;
+
+ var self = this;
+ var action = this.actionHandler[data.action];
+ var streamId = data.streamId;
+ var desiredSize = data.desiredSize;
+ var sourceName = this.sourceName;
+ var targetName = data.sourceName;
+ var capability = (0, _util.createPromiseCapability)();
+
+ var sendStreamRequest = function sendStreamRequest(_ref) {
+ var stream = _ref.stream,
+ chunk = _ref.chunk,
+ transfers = _ref.transfers,
+ success = _ref.success,
+ reason = _ref.reason;
+
+ _this3.postMessage({
+ sourceName: sourceName,
+ targetName: targetName,
+ stream: stream,
+ streamId: streamId,
+ chunk: chunk,
+ success: success,
+ reason: reason
+ }, transfers);
+ };
+
+ var streamSink = {
+ enqueue: function enqueue(chunk) {
+ var size = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1;
+ var transfers = arguments.length > 2 ? arguments[2] : undefined;
+
+ if (this.isCancelled) {
+ return;
+ }
+
+ var lastDesiredSize = this.desiredSize;
+ this.desiredSize -= size;
+
+ if (lastDesiredSize > 0 && this.desiredSize <= 0) {
+ this.sinkCapability = (0, _util.createPromiseCapability)();
+ this.ready = this.sinkCapability.promise;
+ }
+
+ sendStreamRequest({
+ stream: 'enqueue',
+ chunk: chunk,
+ transfers: transfers
+ });
+ },
+ close: function close() {
+ if (this.isCancelled) {
+ return;
+ }
+
+ this.isCancelled = true;
+ sendStreamRequest({
+ stream: 'close'
+ });
+ delete self.streamSinks[streamId];
+ },
+ error: function error(reason) {
+ if (this.isCancelled) {
+ return;
+ }
+
+ this.isCancelled = true;
+ sendStreamRequest({
+ stream: 'error',
+ reason: reason
+ });
+ },
+ sinkCapability: capability,
+ onPull: null,
+ onCancel: null,
+ isCancelled: false,
+ desiredSize: desiredSize,
+ ready: null
+ };
+ streamSink.sinkCapability.resolve();
+ streamSink.ready = streamSink.sinkCapability.promise;
+ this.streamSinks[streamId] = streamSink;
+ resolveCall(action[0], [data.data, streamSink], action[1]).then(function () {
+ sendStreamRequest({
+ stream: 'start_complete',
+ success: true
+ });
+ }, function (reason) {
+ sendStreamRequest({
+ stream: 'start_complete',
+ success: false,
+ reason: reason
+ });
+ });
+ },
+ _processStreamMessage: function _processStreamMessage(data) {
+ var _this4 = this;
+
+ var sourceName = this.sourceName;
+ var targetName = data.sourceName;
+ var streamId = data.streamId;
+
+ var sendStreamResponse = function sendStreamResponse(_ref2) {
+ var stream = _ref2.stream,
+ success = _ref2.success,
+ reason = _ref2.reason;
+
+ _this4.comObj.postMessage({
+ sourceName: sourceName,
+ targetName: targetName,
+ stream: stream,
+ success: success,
+ streamId: streamId,
+ reason: reason
+ });
+ };
+
+ var deleteStreamController = function deleteStreamController() {
+ Promise.all([_this4.streamControllers[data.streamId].startCall, _this4.streamControllers[data.streamId].pullCall, _this4.streamControllers[data.streamId].cancelCall].map(function (capability) {
+ return capability && finalize(capability.promise);
+ })).then(function () {
+ delete _this4.streamControllers[data.streamId];
+ });
+ };
+
+ switch (data.stream) {
+ case 'start_complete':
+ resolveOrReject(this.streamControllers[data.streamId].startCall, data.success, wrapReason(data.reason));
+ break;
+
+ case 'pull_complete':
+ resolveOrReject(this.streamControllers[data.streamId].pullCall, data.success, wrapReason(data.reason));
+ break;
+
+ case 'pull':
+ if (!this.streamSinks[data.streamId]) {
+ sendStreamResponse({
+ stream: 'pull_complete',
+ success: true
+ });
+ break;
+ }
+
+ if (this.streamSinks[data.streamId].desiredSize <= 0 && data.desiredSize > 0) {
+ this.streamSinks[data.streamId].sinkCapability.resolve();
+ }
+
+ this.streamSinks[data.streamId].desiredSize = data.desiredSize;
+ resolveCall(this.streamSinks[data.streamId].onPull).then(function () {
+ sendStreamResponse({
+ stream: 'pull_complete',
+ success: true
+ });
+ }, function (reason) {
+ sendStreamResponse({
+ stream: 'pull_complete',
+ success: false,
+ reason: reason
+ });
+ });
+ break;
+
+ case 'enqueue':
+ (0, _util.assert)(this.streamControllers[data.streamId], 'enqueue should have stream controller');
+
+ if (!this.streamControllers[data.streamId].isClosed) {
+ this.streamControllers[data.streamId].controller.enqueue(data.chunk);
+ }
+
+ break;
+
+ case 'close':
+ (0, _util.assert)(this.streamControllers[data.streamId], 'close should have stream controller');
+
+ if (this.streamControllers[data.streamId].isClosed) {
+ break;
+ }
+
+ this.streamControllers[data.streamId].isClosed = true;
+ this.streamControllers[data.streamId].controller.close();
+ deleteStreamController();
+ break;
+
+ case 'error':
+ (0, _util.assert)(this.streamControllers[data.streamId], 'error should have stream controller');
+ this.streamControllers[data.streamId].controller.error(wrapReason(data.reason));
+ deleteStreamController();
+ break;
+
+ case 'cancel_complete':
+ resolveOrReject(this.streamControllers[data.streamId].cancelCall, data.success, wrapReason(data.reason));
+ deleteStreamController();
+ break;
+
+ case 'cancel':
+ if (!this.streamSinks[data.streamId]) {
+ break;
+ }
+
+ resolveCall(this.streamSinks[data.streamId].onCancel, [wrapReason(data.reason)]).then(function () {
+ sendStreamResponse({
+ stream: 'cancel_complete',
+ success: true
+ });
+ }, function (reason) {
+ sendStreamResponse({
+ stream: 'cancel_complete',
+ success: false,
+ reason: reason
+ });
+ });
+ this.streamSinks[data.streamId].sinkCapability.reject(wrapReason(data.reason));
+ this.streamSinks[data.streamId].isCancelled = true;
+ delete this.streamSinks[data.streamId];
+ break;
+
+ default:
+ throw new Error('Unexpected stream case');
+ }
+ },
+ postMessage: function postMessage(message, transfers) {
+ if (transfers && this.postMessageTransfers) {
+ this.comObj.postMessage(message, transfers);
+ } else {
+ this.comObj.postMessage(message);
+ }
+ },
+ destroy: function destroy() {
+ this.comObj.removeEventListener('message', this._onComObjOnMessage);
+ }
+};
+
+/***/ }),
+/* 158 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.Metadata = void 0;
+
+var _util = __w_pdfjs_require__(1);
+
+var _xml_parser = __w_pdfjs_require__(159);
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var Metadata =
+/*#__PURE__*/
+function () {
+ function Metadata(data) {
+ _classCallCheck(this, Metadata);
+
+ (0, _util.assert)(typeof data === 'string', 'Metadata: input is not a string');
+ data = this._repair(data);
+ var parser = new _xml_parser.SimpleXMLParser();
+ var xmlDocument = parser.parseFromString(data);
+ this._metadata = Object.create(null);
+
+ if (xmlDocument) {
+ this._parse(xmlDocument);
+ }
+ }
+
+ _createClass(Metadata, [{
+ key: "_repair",
+ value: function _repair(data) {
+ return data.replace(/^([^<]+)/, '').replace(/>\\376\\377([^<]+)/g, function (all, codes) {
+ var bytes = codes.replace(/\\([0-3])([0-7])([0-7])/g, function (code, d1, d2, d3) {
+ return String.fromCharCode(d1 * 64 + d2 * 8 + d3 * 1);
+ }).replace(/&(amp|apos|gt|lt|quot);/g, function (str, name) {
+ switch (name) {
+ case 'amp':
+ return '&';
+
+ case 'apos':
+ return '\'';
+
+ case 'gt':
+ return '>';
+
+ case 'lt':
+ return '<';
+
+ case 'quot':
+ return '\"';
+ }
+
+ throw new Error("_repair: ".concat(name, " isn't defined."));
+ });
+ var chars = '';
+
+ for (var i = 0, ii = bytes.length; i < ii; i += 2) {
+ var code = bytes.charCodeAt(i) * 256 + bytes.charCodeAt(i + 1);
+
+ if (code >= 32 && code < 127 && code !== 60 && code !== 62 && code !== 38) {
+ chars += String.fromCharCode(code);
+ } else {
+ chars += '&#x' + (0x10000 + code).toString(16).substring(1) + ';';
+ }
+ }
+
+ return '>' + chars;
+ });
+ }
+ }, {
+ key: "_parse",
+ value: function _parse(xmlDocument) {
+ var rdf = xmlDocument.documentElement;
+
+ if (rdf.nodeName.toLowerCase() !== 'rdf:rdf') {
+ rdf = rdf.firstChild;
+
+ while (rdf && rdf.nodeName.toLowerCase() !== 'rdf:rdf') {
+ rdf = rdf.nextSibling;
+ }
+ }
+
+ var nodeName = rdf ? rdf.nodeName.toLowerCase() : null;
+
+ if (!rdf || nodeName !== 'rdf:rdf' || !rdf.hasChildNodes()) {
+ return;
+ }
+
+ var children = rdf.childNodes;
+
+ for (var i = 0, ii = children.length; i < ii; i++) {
+ var desc = children[i];
+
+ if (desc.nodeName.toLowerCase() !== 'rdf:description') {
+ continue;
+ }
+
+ for (var j = 0, jj = desc.childNodes.length; j < jj; j++) {
+ if (desc.childNodes[j].nodeName.toLowerCase() !== '#text') {
+ var entry = desc.childNodes[j];
+ var name = entry.nodeName.toLowerCase();
+ this._metadata[name] = entry.textContent.trim();
+ }
+ }
+ }
+ }
+ }, {
+ key: "get",
+ value: function get(name) {
+ var data = this._metadata[name];
+ return typeof data !== 'undefined' ? data : null;
+ }
+ }, {
+ key: "getAll",
+ value: function getAll() {
+ return this._metadata;
+ }
+ }, {
+ key: "has",
+ value: function has(name) {
+ return typeof this._metadata[name] !== 'undefined';
+ }
+ }]);
+
+ return Metadata;
+}();
+
+exports.Metadata = Metadata;
+
+/***/ }),
+/* 159 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.SimpleXMLParser = void 0;
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
+
+function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
+
+function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
+
+function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
+
+function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { _get = Reflect.get; } else { _get = function _get(target, property, receiver) { var base = _superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return _get(target, property, receiver || target); }
+
+function _superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = _getPrototypeOf(object); if (object === null) break; } return object; }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var XMLParserErrorCode = {
+ NoError: 0,
+ EndOfDocument: -1,
+ UnterminatedCdat: -2,
+ UnterminatedXmlDeclaration: -3,
+ UnterminatedDoctypeDeclaration: -4,
+ UnterminatedComment: -5,
+ MalformedElement: -6,
+ OutOfMemory: -7,
+ UnterminatedAttributeValue: -8,
+ UnterminatedElement: -9,
+ ElementNeverBegun: -10
+};
+
+function isWhitespace(s, index) {
+ var ch = s[index];
+ return ch === ' ' || ch === '\n' || ch === '\r' || ch === '\t';
+}
+
+function isWhitespaceString(s) {
+ for (var i = 0, ii = s.length; i < ii; i++) {
+ if (!isWhitespace(s, i)) {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+var XMLParserBase =
+/*#__PURE__*/
+function () {
+ function XMLParserBase() {
+ _classCallCheck(this, XMLParserBase);
+ }
+
+ _createClass(XMLParserBase, [{
+ key: "_resolveEntities",
+ value: function _resolveEntities(s) {
+ var _this = this;
+
+ return s.replace(/&([^;]+);/g, function (all, entity) {
+ if (entity.substring(0, 2) === '#x') {
+ return String.fromCharCode(parseInt(entity.substring(2), 16));
+ } else if (entity.substring(0, 1) === '#') {
+ return String.fromCharCode(parseInt(entity.substring(1), 10));
+ }
+
+ switch (entity) {
+ case 'lt':
+ return '<';
+
+ case 'gt':
+ return '>';
+
+ case 'amp':
+ return '&';
+
+ case 'quot':
+ return '\"';
+ }
+
+ return _this.onResolveEntity(entity);
+ });
+ }
+ }, {
+ key: "_parseContent",
+ value: function _parseContent(s, start) {
+ var pos = start,
+ name,
+ attributes = [];
+
+ function skipWs() {
+ while (pos < s.length && isWhitespace(s, pos)) {
+ ++pos;
+ }
+ }
+
+ while (pos < s.length && !isWhitespace(s, pos) && s[pos] !== '>' && s[pos] !== '/') {
+ ++pos;
+ }
+
+ name = s.substring(start, pos);
+ skipWs();
+
+ while (pos < s.length && s[pos] !== '>' && s[pos] !== '/' && s[pos] !== '?') {
+ skipWs();
+ var attrName = '',
+ attrValue = '';
+
+ while (pos < s.length && !isWhitespace(s, pos) && s[pos] !== '=') {
+ attrName += s[pos];
+ ++pos;
+ }
+
+ skipWs();
+
+ if (s[pos] !== '=') {
+ return null;
+ }
+
+ ++pos;
+ skipWs();
+ var attrEndChar = s[pos];
+
+ if (attrEndChar !== '\"' && attrEndChar !== '\'') {
+ return null;
+ }
+
+ var attrEndIndex = s.indexOf(attrEndChar, ++pos);
+
+ if (attrEndIndex < 0) {
+ return null;
+ }
+
+ attrValue = s.substring(pos, attrEndIndex);
+ attributes.push({
+ name: attrName,
+ value: this._resolveEntities(attrValue)
+ });
+ pos = attrEndIndex + 1;
+ skipWs();
+ }
+
+ return {
+ name: name,
+ attributes: attributes,
+ parsed: pos - start
+ };
+ }
+ }, {
+ key: "_parseProcessingInstruction",
+ value: function _parseProcessingInstruction(s, start) {
+ var pos = start,
+ name,
+ value;
+
+ function skipWs() {
+ while (pos < s.length && isWhitespace(s, pos)) {
+ ++pos;
+ }
+ }
+
+ while (pos < s.length && !isWhitespace(s, pos) && s[pos] !== '>' && s[pos] !== '/') {
+ ++pos;
+ }
+
+ name = s.substring(start, pos);
+ skipWs();
+ var attrStart = pos;
+
+ while (pos < s.length && (s[pos] !== '?' || s[pos + 1] !== '>')) {
+ ++pos;
+ }
+
+ value = s.substring(attrStart, pos);
+ return {
+ name: name,
+ value: value,
+ parsed: pos - start
+ };
+ }
+ }, {
+ key: "parseXml",
+ value: function parseXml(s) {
+ var i = 0;
+
+ while (i < s.length) {
+ var ch = s[i];
+ var j = i;
+
+ if (ch === '<') {
+ ++j;
+ var ch2 = s[j];
+ var q = void 0;
+
+ switch (ch2) {
+ case '/':
+ ++j;
+ q = s.indexOf('>', j);
+
+ if (q < 0) {
+ this.onError(XMLParserErrorCode.UnterminatedElement);
+ return;
+ }
+
+ this.onEndElement(s.substring(j, q));
+ j = q + 1;
+ break;
+
+ case '?':
+ ++j;
+
+ var pi = this._parseProcessingInstruction(s, j);
+
+ if (s.substring(j + pi.parsed, j + pi.parsed + 2) !== '?>') {
+ this.onError(XMLParserErrorCode.UnterminatedXmlDeclaration);
+ return;
+ }
+
+ this.onPi(pi.name, pi.value);
+ j += pi.parsed + 2;
+ break;
+
+ case '!':
+ if (s.substring(j + 1, j + 3) === '--') {
+ q = s.indexOf('-->', j + 3);
+
+ if (q < 0) {
+ this.onError(XMLParserErrorCode.UnterminatedComment);
+ return;
+ }
+
+ this.onComment(s.substring(j + 3, q));
+ j = q + 3;
+ } else if (s.substring(j + 1, j + 8) === '[CDATA[') {
+ q = s.indexOf(']]>', j + 8);
+
+ if (q < 0) {
+ this.onError(XMLParserErrorCode.UnterminatedCdat);
+ return;
+ }
+
+ this.onCdata(s.substring(j + 8, q));
+ j = q + 3;
+ } else if (s.substring(j + 1, j + 8) === 'DOCTYPE') {
+ var q2 = s.indexOf('[', j + 8);
+ var complexDoctype = false;
+ q = s.indexOf('>', j + 8);
+
+ if (q < 0) {
+ this.onError(XMLParserErrorCode.UnterminatedDoctypeDeclaration);
+ return;
+ }
+
+ if (q2 > 0 && q > q2) {
+ q = s.indexOf(']>', j + 8);
+
+ if (q < 0) {
+ this.onError(XMLParserErrorCode.UnterminatedDoctypeDeclaration);
+ return;
+ }
+
+ complexDoctype = true;
+ }
+
+ var doctypeContent = s.substring(j + 8, q + (complexDoctype ? 1 : 0));
+ this.onDoctype(doctypeContent);
+ j = q + (complexDoctype ? 2 : 1);
+ } else {
+ this.onError(XMLParserErrorCode.MalformedElement);
+ return;
+ }
+
+ break;
+
+ default:
+ var content = this._parseContent(s, j);
+
+ if (content === null) {
+ this.onError(XMLParserErrorCode.MalformedElement);
+ return;
+ }
+
+ var isClosed = false;
+
+ if (s.substring(j + content.parsed, j + content.parsed + 2) === '/>') {
+ isClosed = true;
+ } else if (s.substring(j + content.parsed, j + content.parsed + 1) !== '>') {
+ this.onError(XMLParserErrorCode.UnterminatedElement);
+ return;
+ }
+
+ this.onBeginElement(content.name, content.attributes, isClosed);
+ j += content.parsed + (isClosed ? 2 : 1);
+ break;
+ }
+ } else {
+ while (j < s.length && s[j] !== '<') {
+ j++;
+ }
+
+ var text = s.substring(i, j);
+ this.onText(this._resolveEntities(text));
+ }
+
+ i = j;
+ }
+ }
+ }, {
+ key: "onResolveEntity",
+ value: function onResolveEntity(name) {
+ return "&".concat(name, ";");
+ }
+ }, {
+ key: "onPi",
+ value: function onPi(name, value) {}
+ }, {
+ key: "onComment",
+ value: function onComment(text) {}
+ }, {
+ key: "onCdata",
+ value: function onCdata(text) {}
+ }, {
+ key: "onDoctype",
+ value: function onDoctype(doctypeContent) {}
+ }, {
+ key: "onText",
+ value: function onText(text) {}
+ }, {
+ key: "onBeginElement",
+ value: function onBeginElement(name, attributes, isEmpty) {}
+ }, {
+ key: "onEndElement",
+ value: function onEndElement(name) {}
+ }, {
+ key: "onError",
+ value: function onError(code) {}
+ }]);
+
+ return XMLParserBase;
+}();
+
+var SimpleDOMNode =
+/*#__PURE__*/
+function () {
+ function SimpleDOMNode(nodeName, nodeValue) {
+ _classCallCheck(this, SimpleDOMNode);
+
+ this.nodeName = nodeName;
+ this.nodeValue = nodeValue;
+ Object.defineProperty(this, 'parentNode', {
+ value: null,
+ writable: true
+ });
+ }
+
+ _createClass(SimpleDOMNode, [{
+ key: "hasChildNodes",
+ value: function hasChildNodes() {
+ return this.childNodes && this.childNodes.length > 0;
+ }
+ }, {
+ key: "firstChild",
+ get: function get() {
+ return this.childNodes && this.childNodes[0];
+ }
+ }, {
+ key: "nextSibling",
+ get: function get() {
+ var childNodes = this.parentNode.childNodes;
+
+ if (!childNodes) {
+ return undefined;
+ }
+
+ var index = childNodes.indexOf(this);
+
+ if (index === -1) {
+ return undefined;
+ }
+
+ return childNodes[index + 1];
+ }
+ }, {
+ key: "textContent",
+ get: function get() {
+ if (!this.childNodes) {
+ return this.nodeValue || '';
+ }
+
+ return this.childNodes.map(function (child) {
+ return child.textContent;
+ }).join('');
+ }
+ }]);
+
+ return SimpleDOMNode;
+}();
+
+var SimpleXMLParser =
+/*#__PURE__*/
+function (_XMLParserBase) {
+ _inherits(SimpleXMLParser, _XMLParserBase);
+
+ function SimpleXMLParser() {
+ var _this2;
+
+ _classCallCheck(this, SimpleXMLParser);
+
+ _this2 = _possibleConstructorReturn(this, _getPrototypeOf(SimpleXMLParser).call(this));
+ _this2._currentFragment = null;
+ _this2._stack = null;
+ _this2._errorCode = XMLParserErrorCode.NoError;
+ return _this2;
+ }
+
+ _createClass(SimpleXMLParser, [{
+ key: "parseFromString",
+ value: function parseFromString(data) {
+ this._currentFragment = [];
+ this._stack = [];
+ this._errorCode = XMLParserErrorCode.NoError;
+ this.parseXml(data);
+
+ if (this._errorCode !== XMLParserErrorCode.NoError) {
+ return undefined;
+ }
+
+ var _this$_currentFragmen = _slicedToArray(this._currentFragment, 1),
+ documentElement = _this$_currentFragmen[0];
+
+ if (!documentElement) {
+ return undefined;
+ }
+
+ return {
+ documentElement: documentElement
+ };
+ }
+ }, {
+ key: "onResolveEntity",
+ value: function onResolveEntity(name) {
+ switch (name) {
+ case 'apos':
+ return '\'';
+ }
+
+ return _get(_getPrototypeOf(SimpleXMLParser.prototype), "onResolveEntity", this).call(this, name);
+ }
+ }, {
+ key: "onText",
+ value: function onText(text) {
+ if (isWhitespaceString(text)) {
+ return;
+ }
+
+ var node = new SimpleDOMNode('#text', text);
+
+ this._currentFragment.push(node);
+ }
+ }, {
+ key: "onCdata",
+ value: function onCdata(text) {
+ var node = new SimpleDOMNode('#text', text);
+
+ this._currentFragment.push(node);
+ }
+ }, {
+ key: "onBeginElement",
+ value: function onBeginElement(name, attributes, isEmpty) {
+ var node = new SimpleDOMNode(name);
+ node.childNodes = [];
+
+ this._currentFragment.push(node);
+
+ if (isEmpty) {
+ return;
+ }
+
+ this._stack.push(this._currentFragment);
+
+ this._currentFragment = node.childNodes;
+ }
+ }, {
+ key: "onEndElement",
+ value: function onEndElement(name) {
+ this._currentFragment = this._stack.pop() || [];
+ var lastElement = this._currentFragment[this._currentFragment.length - 1];
+
+ if (!lastElement) {
+ return;
+ }
+
+ for (var i = 0, ii = lastElement.childNodes.length; i < ii; i++) {
+ lastElement.childNodes[i].parentNode = lastElement;
+ }
+ }
+ }, {
+ key: "onError",
+ value: function onError(code) {
+ this._errorCode = code;
+ }
+ }]);
+
+ return SimpleXMLParser;
+}(XMLParserBase);
+
+exports.SimpleXMLParser = SimpleXMLParser;
+
+/***/ }),
+/* 160 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.PDFDataTransportStream = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(148));
+
+var _util = __w_pdfjs_require__(1);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var PDFDataTransportStream =
+/*#__PURE__*/
+function () {
+ function PDFDataTransportStream(params, pdfDataRangeTransport) {
+ var _this = this;
+
+ _classCallCheck(this, PDFDataTransportStream);
+
+ (0, _util.assert)(pdfDataRangeTransport);
+ this._queuedChunks = [];
+ this._progressiveDone = params.progressiveDone || false;
+ var initialData = params.initialData;
+
+ if (initialData && initialData.length > 0) {
+ var buffer = new Uint8Array(initialData).buffer;
+
+ this._queuedChunks.push(buffer);
+ }
+
+ this._pdfDataRangeTransport = pdfDataRangeTransport;
+ this._isStreamingSupported = !params.disableStream;
+ this._isRangeSupported = !params.disableRange;
+ this._contentLength = params.length;
+ this._fullRequestReader = null;
+ this._rangeReaders = [];
+
+ this._pdfDataRangeTransport.addRangeListener(function (begin, chunk) {
+ _this._onReceiveData({
+ begin: begin,
+ chunk: chunk
+ });
+ });
+
+ this._pdfDataRangeTransport.addProgressListener(function (loaded, total) {
+ _this._onProgress({
+ loaded: loaded,
+ total: total
+ });
+ });
+
+ this._pdfDataRangeTransport.addProgressiveReadListener(function (chunk) {
+ _this._onReceiveData({
+ chunk: chunk
+ });
+ });
+
+ this._pdfDataRangeTransport.addProgressiveDoneListener(function () {
+ _this._onProgressiveDone();
+ });
+
+ this._pdfDataRangeTransport.transportReady();
+ }
+
+ _createClass(PDFDataTransportStream, [{
+ key: "_onReceiveData",
+ value: function _onReceiveData(args) {
+ var buffer = new Uint8Array(args.chunk).buffer;
+
+ if (args.begin === undefined) {
+ if (this._fullRequestReader) {
+ this._fullRequestReader._enqueue(buffer);
+ } else {
+ this._queuedChunks.push(buffer);
+ }
+ } else {
+ var found = this._rangeReaders.some(function (rangeReader) {
+ if (rangeReader._begin !== args.begin) {
+ return false;
+ }
+
+ rangeReader._enqueue(buffer);
+
+ return true;
+ });
+
+ (0, _util.assert)(found);
+ }
+ }
+ }, {
+ key: "_onProgress",
+ value: function _onProgress(evt) {
+ if (evt.total === undefined) {
+ var firstReader = this._rangeReaders[0];
+
+ if (firstReader && firstReader.onProgress) {
+ firstReader.onProgress({
+ loaded: evt.loaded
+ });
+ }
+ } else {
+ var fullReader = this._fullRequestReader;
+
+ if (fullReader && fullReader.onProgress) {
+ fullReader.onProgress({
+ loaded: evt.loaded,
+ total: evt.total
+ });
+ }
+ }
+ }
+ }, {
+ key: "_onProgressiveDone",
+ value: function _onProgressiveDone() {
+ if (this._fullRequestReader) {
+ this._fullRequestReader.progressiveDone();
+ }
+
+ this._progressiveDone = true;
+ }
+ }, {
+ key: "_removeRangeReader",
+ value: function _removeRangeReader(reader) {
+ var i = this._rangeReaders.indexOf(reader);
+
+ if (i >= 0) {
+ this._rangeReaders.splice(i, 1);
+ }
+ }
+ }, {
+ key: "getFullReader",
+ value: function getFullReader() {
+ (0, _util.assert)(!this._fullRequestReader);
+ var queuedChunks = this._queuedChunks;
+ this._queuedChunks = null;
+ return new PDFDataTransportStreamReader(this, queuedChunks, this._progressiveDone);
+ }
+ }, {
+ key: "getRangeReader",
+ value: function getRangeReader(begin, end) {
+ if (end <= this._progressiveDataLength) {
+ return null;
+ }
+
+ var reader = new PDFDataTransportStreamRangeReader(this, begin, end);
+
+ this._pdfDataRangeTransport.requestDataRange(begin, end);
+
+ this._rangeReaders.push(reader);
+
+ return reader;
+ }
+ }, {
+ key: "cancelAllRequests",
+ value: function cancelAllRequests(reason) {
+ if (this._fullRequestReader) {
+ this._fullRequestReader.cancel(reason);
+ }
+
+ var readers = this._rangeReaders.slice(0);
+
+ readers.forEach(function (rangeReader) {
+ rangeReader.cancel(reason);
+ });
+
+ this._pdfDataRangeTransport.abort();
+ }
+ }, {
+ key: "_progressiveDataLength",
+ get: function get() {
+ return this._fullRequestReader ? this._fullRequestReader._loaded : 0;
+ }
+ }]);
+
+ return PDFDataTransportStream;
+}();
+
+exports.PDFDataTransportStream = PDFDataTransportStream;
+
+var PDFDataTransportStreamReader =
+/*#__PURE__*/
+function () {
+ function PDFDataTransportStreamReader(stream, queuedChunks) {
+ var progressiveDone = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
+
+ _classCallCheck(this, PDFDataTransportStreamReader);
+
+ this._stream = stream;
+ this._done = progressiveDone || false;
+ this._filename = null;
+ this._queuedChunks = queuedChunks || [];
+ this._loaded = 0;
+ var _iteratorNormalCompletion = true;
+ var _didIteratorError = false;
+ var _iteratorError = undefined;
+
+ try {
+ for (var _iterator = this._queuedChunks[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
+ var chunk = _step.value;
+ this._loaded += chunk.byteLength;
+ }
+ } catch (err) {
+ _didIteratorError = true;
+ _iteratorError = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion && _iterator["return"] != null) {
+ _iterator["return"]();
+ }
+ } finally {
+ if (_didIteratorError) {
+ throw _iteratorError;
+ }
+ }
+ }
+
+ this._requests = [];
+ this._headersReady = Promise.resolve();
+ stream._fullRequestReader = this;
+ this.onProgress = null;
+ }
+
+ _createClass(PDFDataTransportStreamReader, [{
+ key: "_enqueue",
+ value: function _enqueue(chunk) {
+ if (this._done) {
+ return;
+ }
+
+ if (this._requests.length > 0) {
+ var requestCapability = this._requests.shift();
+
+ requestCapability.resolve({
+ value: chunk,
+ done: false
+ });
+ } else {
+ this._queuedChunks.push(chunk);
+ }
+
+ this._loaded += chunk.byteLength;
+ }
+ }, {
+ key: "read",
+ value: function () {
+ var _read = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee() {
+ var chunk, requestCapability;
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ if (!(this._queuedChunks.length > 0)) {
+ _context.next = 3;
+ break;
+ }
+
+ chunk = this._queuedChunks.shift();
+ return _context.abrupt("return", {
+ value: chunk,
+ done: false
+ });
+
+ case 3:
+ if (!this._done) {
+ _context.next = 5;
+ break;
+ }
+
+ return _context.abrupt("return", {
+ value: undefined,
+ done: true
+ });
+
+ case 5:
+ requestCapability = (0, _util.createPromiseCapability)();
+
+ this._requests.push(requestCapability);
+
+ return _context.abrupt("return", requestCapability.promise);
+
+ case 8:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee, this);
+ }));
+
+ function read() {
+ return _read.apply(this, arguments);
+ }
+
+ return read;
+ }()
+ }, {
+ key: "cancel",
+ value: function cancel(reason) {
+ this._done = true;
+
+ this._requests.forEach(function (requestCapability) {
+ requestCapability.resolve({
+ value: undefined,
+ done: true
+ });
+ });
+
+ this._requests = [];
+ }
+ }, {
+ key: "progressiveDone",
+ value: function progressiveDone() {
+ if (this._done) {
+ return;
+ }
+
+ this._done = true;
+ }
+ }, {
+ key: "headersReady",
+ get: function get() {
+ return this._headersReady;
+ }
+ }, {
+ key: "filename",
+ get: function get() {
+ return this._filename;
+ }
+ }, {
+ key: "isRangeSupported",
+ get: function get() {
+ return this._stream._isRangeSupported;
+ }
+ }, {
+ key: "isStreamingSupported",
+ get: function get() {
+ return this._stream._isStreamingSupported;
+ }
+ }, {
+ key: "contentLength",
+ get: function get() {
+ return this._stream._contentLength;
+ }
+ }]);
+
+ return PDFDataTransportStreamReader;
+}();
+
+var PDFDataTransportStreamRangeReader =
+/*#__PURE__*/
+function () {
+ function PDFDataTransportStreamRangeReader(stream, begin, end) {
+ _classCallCheck(this, PDFDataTransportStreamRangeReader);
+
+ this._stream = stream;
+ this._begin = begin;
+ this._end = end;
+ this._queuedChunk = null;
+ this._requests = [];
+ this._done = false;
+ this.onProgress = null;
+ }
+
+ _createClass(PDFDataTransportStreamRangeReader, [{
+ key: "_enqueue",
+ value: function _enqueue(chunk) {
+ if (this._done) {
+ return;
+ }
+
+ if (this._requests.length === 0) {
+ this._queuedChunk = chunk;
+ } else {
+ var requestsCapability = this._requests.shift();
+
+ requestsCapability.resolve({
+ value: chunk,
+ done: false
+ });
+
+ this._requests.forEach(function (requestCapability) {
+ requestCapability.resolve({
+ value: undefined,
+ done: true
+ });
+ });
+
+ this._requests = [];
+ }
+
+ this._done = true;
+
+ this._stream._removeRangeReader(this);
+ }
+ }, {
+ key: "read",
+ value: function () {
+ var _read2 = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee2() {
+ var chunk, requestCapability;
+ return _regenerator["default"].wrap(function _callee2$(_context2) {
+ while (1) {
+ switch (_context2.prev = _context2.next) {
+ case 0:
+ if (!this._queuedChunk) {
+ _context2.next = 4;
+ break;
+ }
+
+ chunk = this._queuedChunk;
+ this._queuedChunk = null;
+ return _context2.abrupt("return", {
+ value: chunk,
+ done: false
+ });
+
+ case 4:
+ if (!this._done) {
+ _context2.next = 6;
+ break;
+ }
+
+ return _context2.abrupt("return", {
+ value: undefined,
+ done: true
+ });
+
+ case 6:
+ requestCapability = (0, _util.createPromiseCapability)();
+
+ this._requests.push(requestCapability);
+
+ return _context2.abrupt("return", requestCapability.promise);
+
+ case 9:
+ case "end":
+ return _context2.stop();
+ }
+ }
+ }, _callee2, this);
+ }));
+
+ function read() {
+ return _read2.apply(this, arguments);
+ }
+
+ return read;
+ }()
+ }, {
+ key: "cancel",
+ value: function cancel(reason) {
+ this._done = true;
+
+ this._requests.forEach(function (requestCapability) {
+ requestCapability.resolve({
+ value: undefined,
+ done: true
+ });
+ });
+
+ this._requests = [];
+
+ this._stream._removeRangeReader(this);
+ }
+ }, {
+ key: "isStreamingSupported",
+ get: function get() {
+ return false;
+ }
+ }]);
+
+ return PDFDataTransportStreamRangeReader;
+}();
+
+/***/ }),
+/* 161 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.WebGLContext = void 0;
+
+var _util = __w_pdfjs_require__(1);
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var WebGLContext =
+/*#__PURE__*/
+function () {
+ function WebGLContext(_ref) {
+ var _ref$enable = _ref.enable,
+ enable = _ref$enable === void 0 ? false : _ref$enable;
+
+ _classCallCheck(this, WebGLContext);
+
+ this._enabled = enable === true;
+ }
+
+ _createClass(WebGLContext, [{
+ key: "composeSMask",
+ value: function composeSMask(_ref2) {
+ var layer = _ref2.layer,
+ mask = _ref2.mask,
+ properties = _ref2.properties;
+ return WebGLUtils.composeSMask(layer, mask, properties);
+ }
+ }, {
+ key: "drawFigures",
+ value: function drawFigures(_ref3) {
+ var width = _ref3.width,
+ height = _ref3.height,
+ backgroundColor = _ref3.backgroundColor,
+ figures = _ref3.figures,
+ context = _ref3.context;
+ return WebGLUtils.drawFigures(width, height, backgroundColor, figures, context);
+ }
+ }, {
+ key: "clear",
+ value: function clear() {
+ WebGLUtils.cleanup();
+ }
+ }, {
+ key: "isEnabled",
+ get: function get() {
+ var enabled = this._enabled;
+
+ if (enabled) {
+ enabled = WebGLUtils.tryInitGL();
+ }
+
+ return (0, _util.shadow)(this, 'isEnabled', enabled);
+ }
+ }]);
+
+ return WebGLContext;
+}();
+
+exports.WebGLContext = WebGLContext;
+
+var WebGLUtils = function WebGLUtilsClosure() {
+ function loadShader(gl, code, shaderType) {
+ var shader = gl.createShader(shaderType);
+ gl.shaderSource(shader, code);
+ gl.compileShader(shader);
+ var compiled = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
+
+ if (!compiled) {
+ var errorMsg = gl.getShaderInfoLog(shader);
+ throw new Error('Error during shader compilation: ' + errorMsg);
+ }
+
+ return shader;
+ }
+
+ function createVertexShader(gl, code) {
+ return loadShader(gl, code, gl.VERTEX_SHADER);
+ }
+
+ function createFragmentShader(gl, code) {
+ return loadShader(gl, code, gl.FRAGMENT_SHADER);
+ }
+
+ function createProgram(gl, shaders) {
+ var program = gl.createProgram();
+
+ for (var i = 0, ii = shaders.length; i < ii; ++i) {
+ gl.attachShader(program, shaders[i]);
+ }
+
+ gl.linkProgram(program);
+ var linked = gl.getProgramParameter(program, gl.LINK_STATUS);
+
+ if (!linked) {
+ var errorMsg = gl.getProgramInfoLog(program);
+ throw new Error('Error during program linking: ' + errorMsg);
+ }
+
+ return program;
+ }
+
+ function createTexture(gl, image, textureId) {
+ gl.activeTexture(textureId);
+ var texture = gl.createTexture();
+ gl.bindTexture(gl.TEXTURE_2D, texture);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
+ return texture;
+ }
+
+ var currentGL, currentCanvas;
+
+ function generateGL() {
+ if (currentGL) {
+ return;
+ }
+
+ currentCanvas = document.createElement('canvas');
+ currentGL = currentCanvas.getContext('webgl', {
+ premultipliedalpha: false
+ });
+ }
+
+ var smaskVertexShaderCode = '\
+ attribute vec2 a_position; \
+ attribute vec2 a_texCoord; \
+ \
+ uniform vec2 u_resolution; \
+ \
+ varying vec2 v_texCoord; \
+ \
+ void main() { \
+ vec2 clipSpace = (a_position / u_resolution) * 2.0 - 1.0; \
+ gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1); \
+ \
+ v_texCoord = a_texCoord; \
+ } ';
+ var smaskFragmentShaderCode = '\
+ precision mediump float; \
+ \
+ uniform vec4 u_backdrop; \
+ uniform int u_subtype; \
+ uniform sampler2D u_image; \
+ uniform sampler2D u_mask; \
+ \
+ varying vec2 v_texCoord; \
+ \
+ void main() { \
+ vec4 imageColor = texture2D(u_image, v_texCoord); \
+ vec4 maskColor = texture2D(u_mask, v_texCoord); \
+ if (u_backdrop.a > 0.0) { \
+ maskColor.rgb = maskColor.rgb * maskColor.a + \
+ u_backdrop.rgb * (1.0 - maskColor.a); \
+ } \
+ float lum; \
+ if (u_subtype == 0) { \
+ lum = maskColor.a; \
+ } else { \
+ lum = maskColor.r * 0.3 + maskColor.g * 0.59 + \
+ maskColor.b * 0.11; \
+ } \
+ imageColor.a *= lum; \
+ imageColor.rgb *= imageColor.a; \
+ gl_FragColor = imageColor; \
+ } ';
+ var smaskCache = null;
+
+ function initSmaskGL() {
+ var canvas, gl;
+ generateGL();
+ canvas = currentCanvas;
+ currentCanvas = null;
+ gl = currentGL;
+ currentGL = null;
+ var vertexShader = createVertexShader(gl, smaskVertexShaderCode);
+ var fragmentShader = createFragmentShader(gl, smaskFragmentShaderCode);
+ var program = createProgram(gl, [vertexShader, fragmentShader]);
+ gl.useProgram(program);
+ var cache = {};
+ cache.gl = gl;
+ cache.canvas = canvas;
+ cache.resolutionLocation = gl.getUniformLocation(program, 'u_resolution');
+ cache.positionLocation = gl.getAttribLocation(program, 'a_position');
+ cache.backdropLocation = gl.getUniformLocation(program, 'u_backdrop');
+ cache.subtypeLocation = gl.getUniformLocation(program, 'u_subtype');
+ var texCoordLocation = gl.getAttribLocation(program, 'a_texCoord');
+ var texLayerLocation = gl.getUniformLocation(program, 'u_image');
+ var texMaskLocation = gl.getUniformLocation(program, 'u_mask');
+ var texCoordBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0]), gl.STATIC_DRAW);
+ gl.enableVertexAttribArray(texCoordLocation);
+ gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
+ gl.uniform1i(texLayerLocation, 0);
+ gl.uniform1i(texMaskLocation, 1);
+ smaskCache = cache;
+ }
+
+ function composeSMask(layer, mask, properties) {
+ var width = layer.width,
+ height = layer.height;
+
+ if (!smaskCache) {
+ initSmaskGL();
+ }
+
+ var cache = smaskCache,
+ canvas = cache.canvas,
+ gl = cache.gl;
+ canvas.width = width;
+ canvas.height = height;
+ gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
+ gl.uniform2f(cache.resolutionLocation, width, height);
+
+ if (properties.backdrop) {
+ gl.uniform4f(cache.resolutionLocation, properties.backdrop[0], properties.backdrop[1], properties.backdrop[2], 1);
+ } else {
+ gl.uniform4f(cache.resolutionLocation, 0, 0, 0, 0);
+ }
+
+ gl.uniform1i(cache.subtypeLocation, properties.subtype === 'Luminosity' ? 1 : 0);
+ var texture = createTexture(gl, layer, gl.TEXTURE0);
+ var maskTexture = createTexture(gl, mask, gl.TEXTURE1);
+ var buffer = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0, 0, width, 0, 0, height, 0, height, width, 0, width, height]), gl.STATIC_DRAW);
+ gl.enableVertexAttribArray(cache.positionLocation);
+ gl.vertexAttribPointer(cache.positionLocation, 2, gl.FLOAT, false, 0, 0);
+ gl.clearColor(0, 0, 0, 0);
+ gl.enable(gl.BLEND);
+ gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
+ gl.clear(gl.COLOR_BUFFER_BIT);
+ gl.drawArrays(gl.TRIANGLES, 0, 6);
+ gl.flush();
+ gl.deleteTexture(texture);
+ gl.deleteTexture(maskTexture);
+ gl.deleteBuffer(buffer);
+ return canvas;
+ }
+
+ var figuresVertexShaderCode = '\
+ attribute vec2 a_position; \
+ attribute vec3 a_color; \
+ \
+ uniform vec2 u_resolution; \
+ uniform vec2 u_scale; \
+ uniform vec2 u_offset; \
+ \
+ varying vec4 v_color; \
+ \
+ void main() { \
+ vec2 position = (a_position + u_offset) * u_scale; \
+ vec2 clipSpace = (position / u_resolution) * 2.0 - 1.0; \
+ gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1); \
+ \
+ v_color = vec4(a_color / 255.0, 1.0); \
+ } ';
+ var figuresFragmentShaderCode = '\
+ precision mediump float; \
+ \
+ varying vec4 v_color; \
+ \
+ void main() { \
+ gl_FragColor = v_color; \
+ } ';
+ var figuresCache = null;
+
+ function initFiguresGL() {
+ var canvas, gl;
+ generateGL();
+ canvas = currentCanvas;
+ currentCanvas = null;
+ gl = currentGL;
+ currentGL = null;
+ var vertexShader = createVertexShader(gl, figuresVertexShaderCode);
+ var fragmentShader = createFragmentShader(gl, figuresFragmentShaderCode);
+ var program = createProgram(gl, [vertexShader, fragmentShader]);
+ gl.useProgram(program);
+ var cache = {};
+ cache.gl = gl;
+ cache.canvas = canvas;
+ cache.resolutionLocation = gl.getUniformLocation(program, 'u_resolution');
+ cache.scaleLocation = gl.getUniformLocation(program, 'u_scale');
+ cache.offsetLocation = gl.getUniformLocation(program, 'u_offset');
+ cache.positionLocation = gl.getAttribLocation(program, 'a_position');
+ cache.colorLocation = gl.getAttribLocation(program, 'a_color');
+ figuresCache = cache;
+ }
+
+ function drawFigures(width, height, backgroundColor, figures, context) {
+ if (!figuresCache) {
+ initFiguresGL();
+ }
+
+ var cache = figuresCache,
+ canvas = cache.canvas,
+ gl = cache.gl;
+ canvas.width = width;
+ canvas.height = height;
+ gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
+ gl.uniform2f(cache.resolutionLocation, width, height);
+ var count = 0;
+ var i, ii, rows;
+
+ for (i = 0, ii = figures.length; i < ii; i++) {
+ switch (figures[i].type) {
+ case 'lattice':
+ rows = figures[i].coords.length / figures[i].verticesPerRow | 0;
+ count += (rows - 1) * (figures[i].verticesPerRow - 1) * 6;
+ break;
+
+ case 'triangles':
+ count += figures[i].coords.length;
+ break;
+ }
+ }
+
+ var coords = new Float32Array(count * 2);
+ var colors = new Uint8Array(count * 3);
+ var coordsMap = context.coords,
+ colorsMap = context.colors;
+ var pIndex = 0,
+ cIndex = 0;
+
+ for (i = 0, ii = figures.length; i < ii; i++) {
+ var figure = figures[i],
+ ps = figure.coords,
+ cs = figure.colors;
+
+ switch (figure.type) {
+ case 'lattice':
+ var cols = figure.verticesPerRow;
+ rows = ps.length / cols | 0;
+
+ for (var row = 1; row < rows; row++) {
+ var offset = row * cols + 1;
+
+ for (var col = 1; col < cols; col++, offset++) {
+ coords[pIndex] = coordsMap[ps[offset - cols - 1]];
+ coords[pIndex + 1] = coordsMap[ps[offset - cols - 1] + 1];
+ coords[pIndex + 2] = coordsMap[ps[offset - cols]];
+ coords[pIndex + 3] = coordsMap[ps[offset - cols] + 1];
+ coords[pIndex + 4] = coordsMap[ps[offset - 1]];
+ coords[pIndex + 5] = coordsMap[ps[offset - 1] + 1];
+ colors[cIndex] = colorsMap[cs[offset - cols - 1]];
+ colors[cIndex + 1] = colorsMap[cs[offset - cols - 1] + 1];
+ colors[cIndex + 2] = colorsMap[cs[offset - cols - 1] + 2];
+ colors[cIndex + 3] = colorsMap[cs[offset - cols]];
+ colors[cIndex + 4] = colorsMap[cs[offset - cols] + 1];
+ colors[cIndex + 5] = colorsMap[cs[offset - cols] + 2];
+ colors[cIndex + 6] = colorsMap[cs[offset - 1]];
+ colors[cIndex + 7] = colorsMap[cs[offset - 1] + 1];
+ colors[cIndex + 8] = colorsMap[cs[offset - 1] + 2];
+ coords[pIndex + 6] = coords[pIndex + 2];
+ coords[pIndex + 7] = coords[pIndex + 3];
+ coords[pIndex + 8] = coords[pIndex + 4];
+ coords[pIndex + 9] = coords[pIndex + 5];
+ coords[pIndex + 10] = coordsMap[ps[offset]];
+ coords[pIndex + 11] = coordsMap[ps[offset] + 1];
+ colors[cIndex + 9] = colors[cIndex + 3];
+ colors[cIndex + 10] = colors[cIndex + 4];
+ colors[cIndex + 11] = colors[cIndex + 5];
+ colors[cIndex + 12] = colors[cIndex + 6];
+ colors[cIndex + 13] = colors[cIndex + 7];
+ colors[cIndex + 14] = colors[cIndex + 8];
+ colors[cIndex + 15] = colorsMap[cs[offset]];
+ colors[cIndex + 16] = colorsMap[cs[offset] + 1];
+ colors[cIndex + 17] = colorsMap[cs[offset] + 2];
+ pIndex += 12;
+ cIndex += 18;
+ }
+ }
+
+ break;
+
+ case 'triangles':
+ for (var j = 0, jj = ps.length; j < jj; j++) {
+ coords[pIndex] = coordsMap[ps[j]];
+ coords[pIndex + 1] = coordsMap[ps[j] + 1];
+ colors[cIndex] = colorsMap[cs[j]];
+ colors[cIndex + 1] = colorsMap[cs[j] + 1];
+ colors[cIndex + 2] = colorsMap[cs[j] + 2];
+ pIndex += 2;
+ cIndex += 3;
+ }
+
+ break;
+ }
+ }
+
+ if (backgroundColor) {
+ gl.clearColor(backgroundColor[0] / 255, backgroundColor[1] / 255, backgroundColor[2] / 255, 1.0);
+ } else {
+ gl.clearColor(0, 0, 0, 0);
+ }
+
+ gl.clear(gl.COLOR_BUFFER_BIT);
+ var coordsBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, coordsBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, coords, gl.STATIC_DRAW);
+ gl.enableVertexAttribArray(cache.positionLocation);
+ gl.vertexAttribPointer(cache.positionLocation, 2, gl.FLOAT, false, 0, 0);
+ var colorsBuffer = gl.createBuffer();
+ gl.bindBuffer(gl.ARRAY_BUFFER, colorsBuffer);
+ gl.bufferData(gl.ARRAY_BUFFER, colors, gl.STATIC_DRAW);
+ gl.enableVertexAttribArray(cache.colorLocation);
+ gl.vertexAttribPointer(cache.colorLocation, 3, gl.UNSIGNED_BYTE, false, 0, 0);
+ gl.uniform2f(cache.scaleLocation, context.scaleX, context.scaleY);
+ gl.uniform2f(cache.offsetLocation, context.offsetX, context.offsetY);
+ gl.drawArrays(gl.TRIANGLES, 0, count);
+ gl.flush();
+ gl.deleteBuffer(coordsBuffer);
+ gl.deleteBuffer(colorsBuffer);
+ return canvas;
+ }
+
+ return {
+ tryInitGL: function tryInitGL() {
+ try {
+ generateGL();
+ return !!currentGL;
+ } catch (ex) {}
+
+ return false;
+ },
+ composeSMask: composeSMask,
+ drawFigures: drawFigures,
+ cleanup: function cleanup() {
+ if (smaskCache && smaskCache.canvas) {
+ smaskCache.canvas.width = 0;
+ smaskCache.canvas.height = 0;
+ }
+
+ if (figuresCache && figuresCache.canvas) {
+ figuresCache.canvas.width = 0;
+ figuresCache.canvas.height = 0;
+ }
+
+ smaskCache = null;
+ figuresCache = null;
+ }
+ };
+}();
+
+/***/ }),
+/* 162 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.renderTextLayer = void 0;
+
+var _util = __w_pdfjs_require__(1);
+
+var _global_scope = _interopRequireDefault(__w_pdfjs_require__(3));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+var renderTextLayer = function renderTextLayerClosure() {
+ var MAX_TEXT_DIVS_TO_RENDER = 100000;
+ var NonWhitespaceRegexp = /\S/;
+
+ function isAllWhitespace(str) {
+ return !NonWhitespaceRegexp.test(str);
+ }
+
+ var styleBuf = ['left: ', 0, 'px; top: ', 0, 'px; font-size: ', 0, 'px; font-family: ', '', ';'];
+
+ function appendText(task, geom, styles) {
+ var textDiv = document.createElement('span');
+ var textDivProperties = {
+ style: null,
+ angle: 0,
+ canvasWidth: 0,
+ isWhitespace: false,
+ originalTransform: null,
+ paddingBottom: 0,
+ paddingLeft: 0,
+ paddingRight: 0,
+ paddingTop: 0,
+ scale: 1
+ };
+
+ task._textDivs.push(textDiv);
+
+ if (isAllWhitespace(geom.str)) {
+ textDivProperties.isWhitespace = true;
+
+ task._textDivProperties.set(textDiv, textDivProperties);
+
+ return;
+ }
+
+ var tx = _util.Util.transform(task._viewport.transform, geom.transform);
+
+ var angle = Math.atan2(tx[1], tx[0]);
+ var style = styles[geom.fontName];
+
+ if (style.vertical) {
+ angle += Math.PI / 2;
+ }
+
+ var fontHeight = Math.sqrt(tx[2] * tx[2] + tx[3] * tx[3]);
+ var fontAscent = fontHeight;
+
+ if (style.ascent) {
+ fontAscent = style.ascent * fontAscent;
+ } else if (style.descent) {
+ fontAscent = (1 + style.descent) * fontAscent;
+ }
+
+ var left;
+ var top;
+
+ if (angle === 0) {
+ left = tx[4];
+ top = tx[5] - fontAscent;
+ } else {
+ left = tx[4] + fontAscent * Math.sin(angle);
+ top = tx[5] - fontAscent * Math.cos(angle);
+ }
+
+ styleBuf[1] = left;
+ styleBuf[3] = top;
+ styleBuf[5] = fontHeight;
+ styleBuf[7] = style.fontFamily;
+ textDivProperties.style = styleBuf.join('');
+ textDiv.setAttribute('style', textDivProperties.style);
+ textDiv.textContent = geom.str;
+
+ if (task._fontInspectorEnabled) {
+ textDiv.dataset.fontName = geom.fontName;
+ }
+
+ if (angle !== 0) {
+ textDivProperties.angle = angle * (180 / Math.PI);
+ }
+
+ if (geom.str.length > 1) {
+ if (style.vertical) {
+ textDivProperties.canvasWidth = geom.height * task._viewport.scale;
+ } else {
+ textDivProperties.canvasWidth = geom.width * task._viewport.scale;
+ }
+ }
+
+ task._textDivProperties.set(textDiv, textDivProperties);
+
+ if (task._textContentStream) {
+ task._layoutText(textDiv);
+ }
+
+ if (task._enhanceTextSelection) {
+ var angleCos = 1,
+ angleSin = 0;
+
+ if (angle !== 0) {
+ angleCos = Math.cos(angle);
+ angleSin = Math.sin(angle);
+ }
+
+ var divWidth = (style.vertical ? geom.height : geom.width) * task._viewport.scale;
+ var divHeight = fontHeight;
+ var m, b;
+
+ if (angle !== 0) {
+ m = [angleCos, angleSin, -angleSin, angleCos, left, top];
+ b = _util.Util.getAxialAlignedBoundingBox([0, 0, divWidth, divHeight], m);
+ } else {
+ b = [left, top, left + divWidth, top + divHeight];
+ }
+
+ task._bounds.push({
+ left: b[0],
+ top: b[1],
+ right: b[2],
+ bottom: b[3],
+ div: textDiv,
+ size: [divWidth, divHeight],
+ m: m
+ });
+ }
+ }
+
+ function render(task) {
+ if (task._canceled) {
+ return;
+ }
+
+ var textDivs = task._textDivs;
+ var capability = task._capability;
+ var textDivsLength = textDivs.length;
+
+ if (textDivsLength > MAX_TEXT_DIVS_TO_RENDER) {
+ task._renderingDone = true;
+ capability.resolve();
+ return;
+ }
+
+ if (!task._textContentStream) {
+ for (var i = 0; i < textDivsLength; i++) {
+ task._layoutText(textDivs[i]);
+ }
+ }
+
+ task._renderingDone = true;
+ capability.resolve();
+ }
+
+ function expand(task) {
+ var bounds = task._bounds;
+ var viewport = task._viewport;
+ var expanded = expandBounds(viewport.width, viewport.height, bounds);
+
+ for (var i = 0; i < expanded.length; i++) {
+ var div = bounds[i].div;
+
+ var divProperties = task._textDivProperties.get(div);
+
+ if (divProperties.angle === 0) {
+ divProperties.paddingLeft = bounds[i].left - expanded[i].left;
+ divProperties.paddingTop = bounds[i].top - expanded[i].top;
+ divProperties.paddingRight = expanded[i].right - bounds[i].right;
+ divProperties.paddingBottom = expanded[i].bottom - bounds[i].bottom;
+
+ task._textDivProperties.set(div, divProperties);
+
+ continue;
+ }
+
+ var e = expanded[i],
+ b = bounds[i];
+ var m = b.m,
+ c = m[0],
+ s = m[1];
+ var points = [[0, 0], [0, b.size[1]], [b.size[0], 0], b.size];
+ var ts = new Float64Array(64);
+ points.forEach(function (p, i) {
+ var t = _util.Util.applyTransform(p, m);
+
+ ts[i + 0] = c && (e.left - t[0]) / c;
+ ts[i + 4] = s && (e.top - t[1]) / s;
+ ts[i + 8] = c && (e.right - t[0]) / c;
+ ts[i + 12] = s && (e.bottom - t[1]) / s;
+ ts[i + 16] = s && (e.left - t[0]) / -s;
+ ts[i + 20] = c && (e.top - t[1]) / c;
+ ts[i + 24] = s && (e.right - t[0]) / -s;
+ ts[i + 28] = c && (e.bottom - t[1]) / c;
+ ts[i + 32] = c && (e.left - t[0]) / -c;
+ ts[i + 36] = s && (e.top - t[1]) / -s;
+ ts[i + 40] = c && (e.right - t[0]) / -c;
+ ts[i + 44] = s && (e.bottom - t[1]) / -s;
+ ts[i + 48] = s && (e.left - t[0]) / s;
+ ts[i + 52] = c && (e.top - t[1]) / -c;
+ ts[i + 56] = s && (e.right - t[0]) / s;
+ ts[i + 60] = c && (e.bottom - t[1]) / -c;
+ });
+
+ var findPositiveMin = function findPositiveMin(ts, offset, count) {
+ var result = 0;
+
+ for (var i = 0; i < count; i++) {
+ var t = ts[offset++];
+
+ if (t > 0) {
+ result = result ? Math.min(t, result) : t;
+ }
+ }
+
+ return result;
+ };
+
+ var boxScale = 1 + Math.min(Math.abs(c), Math.abs(s));
+ divProperties.paddingLeft = findPositiveMin(ts, 32, 16) / boxScale;
+ divProperties.paddingTop = findPositiveMin(ts, 48, 16) / boxScale;
+ divProperties.paddingRight = findPositiveMin(ts, 0, 16) / boxScale;
+ divProperties.paddingBottom = findPositiveMin(ts, 16, 16) / boxScale;
+
+ task._textDivProperties.set(div, divProperties);
+ }
+ }
+
+ function expandBounds(width, height, boxes) {
+ var bounds = boxes.map(function (box, i) {
+ return {
+ x1: box.left,
+ y1: box.top,
+ x2: box.right,
+ y2: box.bottom,
+ index: i,
+ x1New: undefined,
+ x2New: undefined
+ };
+ });
+ expandBoundsLTR(width, bounds);
+ var expanded = new Array(boxes.length);
+ bounds.forEach(function (b) {
+ var i = b.index;
+ expanded[i] = {
+ left: b.x1New,
+ top: 0,
+ right: b.x2New,
+ bottom: 0
+ };
+ });
+ boxes.map(function (box, i) {
+ var e = expanded[i],
+ b = bounds[i];
+ b.x1 = box.top;
+ b.y1 = width - e.right;
+ b.x2 = box.bottom;
+ b.y2 = width - e.left;
+ b.index = i;
+ b.x1New = undefined;
+ b.x2New = undefined;
+ });
+ expandBoundsLTR(height, bounds);
+ bounds.forEach(function (b) {
+ var i = b.index;
+ expanded[i].top = b.x1New;
+ expanded[i].bottom = b.x2New;
+ });
+ return expanded;
+ }
+
+ function expandBoundsLTR(width, bounds) {
+ bounds.sort(function (a, b) {
+ return a.x1 - b.x1 || a.index - b.index;
+ });
+ var fakeBoundary = {
+ x1: -Infinity,
+ y1: -Infinity,
+ x2: 0,
+ y2: Infinity,
+ index: -1,
+ x1New: 0,
+ x2New: 0
+ };
+ var horizon = [{
+ start: -Infinity,
+ end: Infinity,
+ boundary: fakeBoundary
+ }];
+ bounds.forEach(function (boundary) {
+ var i = 0;
+
+ while (i < horizon.length && horizon[i].end <= boundary.y1) {
+ i++;
+ }
+
+ var j = horizon.length - 1;
+
+ while (j >= 0 && horizon[j].start >= boundary.y2) {
+ j--;
+ }
+
+ var horizonPart, affectedBoundary;
+ var q,
+ k,
+ maxXNew = -Infinity;
+
+ for (q = i; q <= j; q++) {
+ horizonPart = horizon[q];
+ affectedBoundary = horizonPart.boundary;
+ var xNew;
+
+ if (affectedBoundary.x2 > boundary.x1) {
+ xNew = affectedBoundary.index > boundary.index ? affectedBoundary.x1New : boundary.x1;
+ } else if (affectedBoundary.x2New === undefined) {
+ xNew = (affectedBoundary.x2 + boundary.x1) / 2;
+ } else {
+ xNew = affectedBoundary.x2New;
+ }
+
+ if (xNew > maxXNew) {
+ maxXNew = xNew;
+ }
+ }
+
+ boundary.x1New = maxXNew;
+
+ for (q = i; q <= j; q++) {
+ horizonPart = horizon[q];
+ affectedBoundary = horizonPart.boundary;
+
+ if (affectedBoundary.x2New === undefined) {
+ if (affectedBoundary.x2 > boundary.x1) {
+ if (affectedBoundary.index > boundary.index) {
+ affectedBoundary.x2New = affectedBoundary.x2;
+ }
+ } else {
+ affectedBoundary.x2New = maxXNew;
+ }
+ } else if (affectedBoundary.x2New > maxXNew) {
+ affectedBoundary.x2New = Math.max(maxXNew, affectedBoundary.x2);
+ }
+ }
+
+ var changedHorizon = [],
+ lastBoundary = null;
+
+ for (q = i; q <= j; q++) {
+ horizonPart = horizon[q];
+ affectedBoundary = horizonPart.boundary;
+ var useBoundary = affectedBoundary.x2 > boundary.x2 ? affectedBoundary : boundary;
+
+ if (lastBoundary === useBoundary) {
+ changedHorizon[changedHorizon.length - 1].end = horizonPart.end;
+ } else {
+ changedHorizon.push({
+ start: horizonPart.start,
+ end: horizonPart.end,
+ boundary: useBoundary
+ });
+ lastBoundary = useBoundary;
+ }
+ }
+
+ if (horizon[i].start < boundary.y1) {
+ changedHorizon[0].start = boundary.y1;
+ changedHorizon.unshift({
+ start: horizon[i].start,
+ end: boundary.y1,
+ boundary: horizon[i].boundary
+ });
+ }
+
+ if (boundary.y2 < horizon[j].end) {
+ changedHorizon[changedHorizon.length - 1].end = boundary.y2;
+ changedHorizon.push({
+ start: boundary.y2,
+ end: horizon[j].end,
+ boundary: horizon[j].boundary
+ });
+ }
+
+ for (q = i; q <= j; q++) {
+ horizonPart = horizon[q];
+ affectedBoundary = horizonPart.boundary;
+
+ if (affectedBoundary.x2New !== undefined) {
+ continue;
+ }
+
+ var used = false;
+
+ for (k = i - 1; !used && k >= 0 && horizon[k].start >= affectedBoundary.y1; k--) {
+ used = horizon[k].boundary === affectedBoundary;
+ }
+
+ for (k = j + 1; !used && k < horizon.length && horizon[k].end <= affectedBoundary.y2; k++) {
+ used = horizon[k].boundary === affectedBoundary;
+ }
+
+ for (k = 0; !used && k < changedHorizon.length; k++) {
+ used = changedHorizon[k].boundary === affectedBoundary;
+ }
+
+ if (!used) {
+ affectedBoundary.x2New = maxXNew;
+ }
+ }
+
+ Array.prototype.splice.apply(horizon, [i, j - i + 1].concat(changedHorizon));
+ });
+ horizon.forEach(function (horizonPart) {
+ var affectedBoundary = horizonPart.boundary;
+
+ if (affectedBoundary.x2New === undefined) {
+ affectedBoundary.x2New = Math.max(width, affectedBoundary.x2);
+ }
+ });
+ }
+
+ function TextLayerRenderTask(_ref) {
+ var _this = this;
+
+ var textContent = _ref.textContent,
+ textContentStream = _ref.textContentStream,
+ container = _ref.container,
+ viewport = _ref.viewport,
+ textDivs = _ref.textDivs,
+ textContentItemsStr = _ref.textContentItemsStr,
+ enhanceTextSelection = _ref.enhanceTextSelection;
+ this._textContent = textContent;
+ this._textContentStream = textContentStream;
+ this._container = container;
+ this._viewport = viewport;
+ this._textDivs = textDivs || [];
+ this._textContentItemsStr = textContentItemsStr || [];
+ this._enhanceTextSelection = !!enhanceTextSelection;
+ this._fontInspectorEnabled = !!(_global_scope["default"].FontInspector && _global_scope["default"].FontInspector.enabled);
+ this._reader = null;
+ this._layoutTextLastFontSize = null;
+ this._layoutTextLastFontFamily = null;
+ this._layoutTextCtx = null;
+ this._textDivProperties = new WeakMap();
+ this._renderingDone = false;
+ this._canceled = false;
+ this._capability = (0, _util.createPromiseCapability)();
+ this._renderTimer = null;
+ this._bounds = [];
+
+ this._capability.promise["finally"](function () {
+ if (_this._layoutTextCtx) {
+ _this._layoutTextCtx.canvas.width = 0;
+ _this._layoutTextCtx.canvas.height = 0;
+ _this._layoutTextCtx = null;
+ }
+ });
+ }
+
+ TextLayerRenderTask.prototype = {
+ get promise() {
+ return this._capability.promise;
+ },
+
+ cancel: function TextLayer_cancel() {
+ this._canceled = true;
+
+ if (this._reader) {
+ this._reader.cancel(new _util.AbortException('TextLayer task cancelled.'));
+
+ this._reader = null;
+ }
+
+ if (this._renderTimer !== null) {
+ clearTimeout(this._renderTimer);
+ this._renderTimer = null;
+ }
+
+ this._capability.reject(new Error('TextLayer task cancelled.'));
+ },
+ _processItems: function _processItems(items, styleCache) {
+ for (var i = 0, len = items.length; i < len; i++) {
+ this._textContentItemsStr.push(items[i].str);
+
+ appendText(this, items[i], styleCache);
+ }
+ },
+ _layoutText: function _layoutText(textDiv) {
+ var textLayerFrag = this._container;
+
+ var textDivProperties = this._textDivProperties.get(textDiv);
+
+ if (textDivProperties.isWhitespace) {
+ return;
+ }
+
+ var fontSize = textDiv.style.fontSize;
+ var fontFamily = textDiv.style.fontFamily;
+
+ if (fontSize !== this._layoutTextLastFontSize || fontFamily !== this._layoutTextLastFontFamily) {
+ this._layoutTextCtx.font = fontSize + ' ' + fontFamily;
+ this._layoutTextLastFontSize = fontSize;
+ this._layoutTextLastFontFamily = fontFamily;
+ }
+
+ var width = this._layoutTextCtx.measureText(textDiv.textContent).width;
+
+ var transform = '';
+
+ if (textDivProperties.canvasWidth !== 0 && width > 0) {
+ textDivProperties.scale = textDivProperties.canvasWidth / width;
+ transform = "scaleX(".concat(textDivProperties.scale, ")");
+ }
+
+ if (textDivProperties.angle !== 0) {
+ transform = "rotate(".concat(textDivProperties.angle, "deg) ").concat(transform);
+ }
+
+ if (transform.length > 0) {
+ textDivProperties.originalTransform = transform;
+ textDiv.style.transform = transform;
+ }
+
+ this._textDivProperties.set(textDiv, textDivProperties);
+
+ textLayerFrag.appendChild(textDiv);
+ },
+ _render: function TextLayer_render(timeout) {
+ var _this2 = this;
+
+ var capability = (0, _util.createPromiseCapability)();
+ var styleCache = Object.create(null);
+ var canvas = document.createElement('canvas');
+ canvas.mozOpaque = true;
+ this._layoutTextCtx = canvas.getContext('2d', {
+ alpha: false
+ });
+
+ if (this._textContent) {
+ var textItems = this._textContent.items;
+ var textStyles = this._textContent.styles;
+
+ this._processItems(textItems, textStyles);
+
+ capability.resolve();
+ } else if (this._textContentStream) {
+ var pump = function pump() {
+ _this2._reader.read().then(function (_ref2) {
+ var value = _ref2.value,
+ done = _ref2.done;
+
+ if (done) {
+ capability.resolve();
+ return;
+ }
+
+ Object.assign(styleCache, value.styles);
+
+ _this2._processItems(value.items, styleCache);
+
+ pump();
+ }, capability.reject);
+ };
+
+ this._reader = this._textContentStream.getReader();
+ pump();
+ } else {
+ throw new Error('Neither "textContent" nor "textContentStream"' + ' parameters specified.');
+ }
+
+ capability.promise.then(function () {
+ styleCache = null;
+
+ if (!timeout) {
+ render(_this2);
+ } else {
+ _this2._renderTimer = setTimeout(function () {
+ render(_this2);
+ _this2._renderTimer = null;
+ }, timeout);
+ }
+ }, this._capability.reject);
+ },
+ expandTextDivs: function TextLayer_expandTextDivs(expandDivs) {
+ if (!this._enhanceTextSelection || !this._renderingDone) {
+ return;
+ }
+
+ if (this._bounds !== null) {
+ expand(this);
+ this._bounds = null;
+ }
+
+ for (var i = 0, ii = this._textDivs.length; i < ii; i++) {
+ var div = this._textDivs[i];
+
+ var divProperties = this._textDivProperties.get(div);
+
+ if (divProperties.isWhitespace) {
+ continue;
+ }
+
+ if (expandDivs) {
+ var transform = '',
+ padding = '';
+
+ if (divProperties.scale !== 1) {
+ transform = 'scaleX(' + divProperties.scale + ')';
+ }
+
+ if (divProperties.angle !== 0) {
+ transform = 'rotate(' + divProperties.angle + 'deg) ' + transform;
+ }
+
+ if (divProperties.paddingLeft !== 0) {
+ padding += ' padding-left: ' + divProperties.paddingLeft / divProperties.scale + 'px;';
+ transform += ' translateX(' + -divProperties.paddingLeft / divProperties.scale + 'px)';
+ }
+
+ if (divProperties.paddingTop !== 0) {
+ padding += ' padding-top: ' + divProperties.paddingTop + 'px;';
+ transform += ' translateY(' + -divProperties.paddingTop + 'px)';
+ }
+
+ if (divProperties.paddingRight !== 0) {
+ padding += ' padding-right: ' + divProperties.paddingRight / divProperties.scale + 'px;';
+ }
+
+ if (divProperties.paddingBottom !== 0) {
+ padding += ' padding-bottom: ' + divProperties.paddingBottom + 'px;';
+ }
+
+ if (padding !== '') {
+ div.setAttribute('style', divProperties.style + padding);
+ }
+
+ if (transform !== '') {
+ div.style.transform = transform;
+ }
+ } else {
+ div.style.padding = 0;
+ div.style.transform = divProperties.originalTransform || '';
+ }
+ }
+ }
+ };
+
+ function renderTextLayer(renderParameters) {
+ var task = new TextLayerRenderTask({
+ textContent: renderParameters.textContent,
+ textContentStream: renderParameters.textContentStream,
+ container: renderParameters.container,
+ viewport: renderParameters.viewport,
+ textDivs: renderParameters.textDivs,
+ textContentItemsStr: renderParameters.textContentItemsStr,
+ enhanceTextSelection: renderParameters.enhanceTextSelection
+ });
+
+ task._render(renderParameters.timeout);
+
+ return task;
+ }
+
+ return renderTextLayer;
+}();
+
+exports.renderTextLayer = renderTextLayer;
+
+/***/ }),
+/* 163 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.AnnotationLayer = void 0;
+
+var _display_utils = __w_pdfjs_require__(151);
+
+var _util = __w_pdfjs_require__(1);
+
+function _get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { _get = Reflect.get; } else { _get = function _get(target, property, receiver) { var base = _superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return _get(target, property, receiver || target); }
+
+function _superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = _getPrototypeOf(object); if (object === null) break; } return object; }
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var AnnotationElementFactory =
+/*#__PURE__*/
+function () {
+ function AnnotationElementFactory() {
+ _classCallCheck(this, AnnotationElementFactory);
+ }
+
+ _createClass(AnnotationElementFactory, null, [{
+ key: "create",
+ value: function create(parameters) {
+ var subtype = parameters.data.annotationType;
+
+ switch (subtype) {
+ case _util.AnnotationType.LINK:
+ return new LinkAnnotationElement(parameters);
+
+ case _util.AnnotationType.TEXT:
+ return new TextAnnotationElement(parameters);
+
+ case _util.AnnotationType.WIDGET:
+ var fieldType = parameters.data.fieldType;
+
+ switch (fieldType) {
+ case 'Tx':
+ return new TextWidgetAnnotationElement(parameters);
+
+ case 'Btn':
+ if (parameters.data.radioButton) {
+ return new RadioButtonWidgetAnnotationElement(parameters);
+ } else if (parameters.data.checkBox) {
+ return new CheckboxWidgetAnnotationElement(parameters);
+ }
+
+ return new PushButtonWidgetAnnotationElement(parameters);
+
+ case 'Ch':
+ return new ChoiceWidgetAnnotationElement(parameters);
+ }
+
+ return new WidgetAnnotationElement(parameters);
+
+ case _util.AnnotationType.POPUP:
+ return new PopupAnnotationElement(parameters);
+
+ case _util.AnnotationType.FREETEXT:
+ return new FreeTextAnnotationElement(parameters);
+
+ case _util.AnnotationType.LINE:
+ return new LineAnnotationElement(parameters);
+
+ case _util.AnnotationType.SQUARE:
+ return new SquareAnnotationElement(parameters);
+
+ case _util.AnnotationType.CIRCLE:
+ return new CircleAnnotationElement(parameters);
+
+ case _util.AnnotationType.POLYLINE:
+ return new PolylineAnnotationElement(parameters);
+
+ case _util.AnnotationType.CARET:
+ return new CaretAnnotationElement(parameters);
+
+ case _util.AnnotationType.INK:
+ return new InkAnnotationElement(parameters);
+
+ case _util.AnnotationType.POLYGON:
+ return new PolygonAnnotationElement(parameters);
+
+ case _util.AnnotationType.HIGHLIGHT:
+ return new HighlightAnnotationElement(parameters);
+
+ case _util.AnnotationType.UNDERLINE:
+ return new UnderlineAnnotationElement(parameters);
+
+ case _util.AnnotationType.SQUIGGLY:
+ return new SquigglyAnnotationElement(parameters);
+
+ case _util.AnnotationType.STRIKEOUT:
+ return new StrikeOutAnnotationElement(parameters);
+
+ case _util.AnnotationType.STAMP:
+ return new StampAnnotationElement(parameters);
+
+ case _util.AnnotationType.FILEATTACHMENT:
+ return new FileAttachmentAnnotationElement(parameters);
+
+ default:
+ return new AnnotationElement(parameters);
+ }
+ }
+ }]);
+
+ return AnnotationElementFactory;
+}();
+
+var AnnotationElement =
+/*#__PURE__*/
+function () {
+ function AnnotationElement(parameters) {
+ var isRenderable = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
+ var ignoreBorder = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
+
+ _classCallCheck(this, AnnotationElement);
+
+ this.isRenderable = isRenderable;
+ this.data = parameters.data;
+ this.layer = parameters.layer;
+ this.page = parameters.page;
+ this.viewport = parameters.viewport;
+ this.linkService = parameters.linkService;
+ this.downloadManager = parameters.downloadManager;
+ this.imageResourcesPath = parameters.imageResourcesPath;
+ this.renderInteractiveForms = parameters.renderInteractiveForms;
+ this.svgFactory = parameters.svgFactory;
+
+ if (isRenderable) {
+ this.container = this._createContainer(ignoreBorder);
+ }
+ }
+
+ _createClass(AnnotationElement, [{
+ key: "_createContainer",
+ value: function _createContainer() {
+ var ignoreBorder = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
+ var data = this.data,
+ page = this.page,
+ viewport = this.viewport;
+ var container = document.createElement('section');
+ var width = data.rect[2] - data.rect[0];
+ var height = data.rect[3] - data.rect[1];
+ container.setAttribute('data-annotation-id', data.id);
+
+ var rect = _util.Util.normalizeRect([data.rect[0], page.view[3] - data.rect[1] + page.view[1], data.rect[2], page.view[3] - data.rect[3] + page.view[1]]);
+
+ container.style.transform = 'matrix(' + viewport.transform.join(',') + ')';
+ container.style.transformOrigin = -rect[0] + 'px ' + -rect[1] + 'px';
+
+ if (!ignoreBorder && data.borderStyle.width > 0) {
+ container.style.borderWidth = data.borderStyle.width + 'px';
+
+ if (data.borderStyle.style !== _util.AnnotationBorderStyleType.UNDERLINE) {
+ width = width - 2 * data.borderStyle.width;
+ height = height - 2 * data.borderStyle.width;
+ }
+
+ var horizontalRadius = data.borderStyle.horizontalCornerRadius;
+ var verticalRadius = data.borderStyle.verticalCornerRadius;
+
+ if (horizontalRadius > 0 || verticalRadius > 0) {
+ var radius = horizontalRadius + 'px / ' + verticalRadius + 'px';
+ container.style.borderRadius = radius;
+ }
+
+ switch (data.borderStyle.style) {
+ case _util.AnnotationBorderStyleType.SOLID:
+ container.style.borderStyle = 'solid';
+ break;
+
+ case _util.AnnotationBorderStyleType.DASHED:
+ container.style.borderStyle = 'dashed';
+ break;
+
+ case _util.AnnotationBorderStyleType.BEVELED:
+ (0, _util.warn)('Unimplemented border style: beveled');
+ break;
+
+ case _util.AnnotationBorderStyleType.INSET:
+ (0, _util.warn)('Unimplemented border style: inset');
+ break;
+
+ case _util.AnnotationBorderStyleType.UNDERLINE:
+ container.style.borderBottomStyle = 'solid';
+ break;
+
+ default:
+ break;
+ }
+
+ if (data.color) {
+ container.style.borderColor = _util.Util.makeCssRgb(data.color[0] | 0, data.color[1] | 0, data.color[2] | 0);
+ } else {
+ container.style.borderWidth = 0;
+ }
+ }
+
+ container.style.left = rect[0] + 'px';
+ container.style.top = rect[1] + 'px';
+ container.style.width = width + 'px';
+ container.style.height = height + 'px';
+ return container;
+ }
+ }, {
+ key: "_createPopup",
+ value: function _createPopup(container, trigger, data) {
+ if (!trigger) {
+ trigger = document.createElement('div');
+ trigger.style.height = container.style.height;
+ trigger.style.width = container.style.width;
+ container.appendChild(trigger);
+ }
+
+ var popupElement = new PopupElement({
+ container: container,
+ trigger: trigger,
+ color: data.color,
+ title: data.title,
+ modificationDate: data.modificationDate,
+ contents: data.contents,
+ hideWrapper: true
+ });
+ var popup = popupElement.render();
+ popup.style.left = container.style.width;
+ container.appendChild(popup);
+ }
+ }, {
+ key: "render",
+ value: function render() {
+ (0, _util.unreachable)('Abstract method `AnnotationElement.render` called');
+ }
+ }]);
+
+ return AnnotationElement;
+}();
+
+var LinkAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement) {
+ _inherits(LinkAnnotationElement, _AnnotationElement);
+
+ function LinkAnnotationElement(parameters) {
+ _classCallCheck(this, LinkAnnotationElement);
+
+ var isRenderable = !!(parameters.data.url || parameters.data.dest || parameters.data.action);
+ return _possibleConstructorReturn(this, _getPrototypeOf(LinkAnnotationElement).call(this, parameters, isRenderable));
+ }
+
+ _createClass(LinkAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'linkAnnotation';
+ var data = this.data,
+ linkService = this.linkService;
+ var link = document.createElement('a');
+ (0, _display_utils.addLinkAttributes)(link, {
+ url: data.url,
+ target: data.newWindow ? _display_utils.LinkTarget.BLANK : linkService.externalLinkTarget,
+ rel: linkService.externalLinkRel
+ });
+
+ if (!data.url) {
+ if (data.action) {
+ this._bindNamedAction(link, data.action);
+ } else {
+ this._bindLink(link, data.dest);
+ }
+ }
+
+ this.container.appendChild(link);
+ return this.container;
+ }
+ }, {
+ key: "_bindLink",
+ value: function _bindLink(link, destination) {
+ var _this = this;
+
+ link.href = this.linkService.getDestinationHash(destination);
+
+ link.onclick = function () {
+ if (destination) {
+ _this.linkService.navigateTo(destination);
+ }
+
+ return false;
+ };
+
+ if (destination) {
+ link.className = 'internalLink';
+ }
+ }
+ }, {
+ key: "_bindNamedAction",
+ value: function _bindNamedAction(link, action) {
+ var _this2 = this;
+
+ link.href = this.linkService.getAnchorUrl('');
+
+ link.onclick = function () {
+ _this2.linkService.executeNamedAction(action);
+
+ return false;
+ };
+
+ link.className = 'internalLink';
+ }
+ }]);
+
+ return LinkAnnotationElement;
+}(AnnotationElement);
+
+var TextAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement2) {
+ _inherits(TextAnnotationElement, _AnnotationElement2);
+
+ function TextAnnotationElement(parameters) {
+ _classCallCheck(this, TextAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(TextAnnotationElement).call(this, parameters, isRenderable));
+ }
+
+ _createClass(TextAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'textAnnotation';
+ var image = document.createElement('img');
+ image.style.height = this.container.style.height;
+ image.style.width = this.container.style.width;
+ image.src = this.imageResourcesPath + 'annotation-' + this.data.name.toLowerCase() + '.svg';
+ image.alt = '[{{type}} Annotation]';
+ image.dataset.l10nId = 'text_annotation_type';
+ image.dataset.l10nArgs = JSON.stringify({
+ type: this.data.name
+ });
+
+ if (!this.data.hasPopup) {
+ this._createPopup(this.container, image, this.data);
+ }
+
+ this.container.appendChild(image);
+ return this.container;
+ }
+ }]);
+
+ return TextAnnotationElement;
+}(AnnotationElement);
+
+var WidgetAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement3) {
+ _inherits(WidgetAnnotationElement, _AnnotationElement3);
+
+ function WidgetAnnotationElement() {
+ _classCallCheck(this, WidgetAnnotationElement);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(WidgetAnnotationElement).apply(this, arguments));
+ }
+
+ _createClass(WidgetAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ return this.container;
+ }
+ }]);
+
+ return WidgetAnnotationElement;
+}(AnnotationElement);
+
+var TextWidgetAnnotationElement =
+/*#__PURE__*/
+function (_WidgetAnnotationElem) {
+ _inherits(TextWidgetAnnotationElement, _WidgetAnnotationElem);
+
+ function TextWidgetAnnotationElement(parameters) {
+ _classCallCheck(this, TextWidgetAnnotationElement);
+
+ var isRenderable = parameters.renderInteractiveForms || !parameters.data.hasAppearance && !!parameters.data.fieldValue;
+ return _possibleConstructorReturn(this, _getPrototypeOf(TextWidgetAnnotationElement).call(this, parameters, isRenderable));
+ }
+
+ _createClass(TextWidgetAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ var TEXT_ALIGNMENT = ['left', 'center', 'right'];
+ this.container.className = 'textWidgetAnnotation';
+ var element = null;
+
+ if (this.renderInteractiveForms) {
+ if (this.data.multiLine) {
+ element = document.createElement('textarea');
+ element.textContent = this.data.fieldValue;
+ } else {
+ element = document.createElement('input');
+ element.type = 'text';
+ element.setAttribute('value', this.data.fieldValue);
+ }
+
+ element.disabled = this.data.readOnly;
+
+ if (this.data.maxLen !== null) {
+ element.maxLength = this.data.maxLen;
+ }
+
+ if (this.data.comb) {
+ var fieldWidth = this.data.rect[2] - this.data.rect[0];
+ var combWidth = fieldWidth / this.data.maxLen;
+ element.classList.add('comb');
+ element.style.letterSpacing = 'calc(' + combWidth + 'px - 1ch)';
+ }
+ } else {
+ element = document.createElement('div');
+ element.textContent = this.data.fieldValue;
+ element.style.verticalAlign = 'middle';
+ element.style.display = 'table-cell';
+ var font = null;
+
+ if (this.data.fontRefName && this.page.commonObjs.has(this.data.fontRefName)) {
+ font = this.page.commonObjs.get(this.data.fontRefName);
+ }
+
+ this._setTextStyle(element, font);
+ }
+
+ if (this.data.textAlignment !== null) {
+ element.style.textAlign = TEXT_ALIGNMENT[this.data.textAlignment];
+ }
+
+ this.container.appendChild(element);
+ return this.container;
+ }
+ }, {
+ key: "_setTextStyle",
+ value: function _setTextStyle(element, font) {
+ var style = element.style;
+ style.fontSize = this.data.fontSize + 'px';
+ style.direction = this.data.fontDirection < 0 ? 'rtl' : 'ltr';
+
+ if (!font) {
+ return;
+ }
+
+ style.fontWeight = font.black ? font.bold ? '900' : 'bold' : font.bold ? 'bold' : 'normal';
+ style.fontStyle = font.italic ? 'italic' : 'normal';
+ var fontFamily = font.loadedName ? '"' + font.loadedName + '", ' : '';
+ var fallbackName = font.fallbackName || 'Helvetica, sans-serif';
+ style.fontFamily = fontFamily + fallbackName;
+ }
+ }]);
+
+ return TextWidgetAnnotationElement;
+}(WidgetAnnotationElement);
+
+var CheckboxWidgetAnnotationElement =
+/*#__PURE__*/
+function (_WidgetAnnotationElem2) {
+ _inherits(CheckboxWidgetAnnotationElement, _WidgetAnnotationElem2);
+
+ function CheckboxWidgetAnnotationElement(parameters) {
+ _classCallCheck(this, CheckboxWidgetAnnotationElement);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(CheckboxWidgetAnnotationElement).call(this, parameters, parameters.renderInteractiveForms));
+ }
+
+ _createClass(CheckboxWidgetAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'buttonWidgetAnnotation checkBox';
+ var element = document.createElement('input');
+ element.disabled = this.data.readOnly;
+ element.type = 'checkbox';
+
+ if (this.data.fieldValue && this.data.fieldValue !== 'Off') {
+ element.setAttribute('checked', true);
+ }
+
+ this.container.appendChild(element);
+ return this.container;
+ }
+ }]);
+
+ return CheckboxWidgetAnnotationElement;
+}(WidgetAnnotationElement);
+
+var RadioButtonWidgetAnnotationElement =
+/*#__PURE__*/
+function (_WidgetAnnotationElem3) {
+ _inherits(RadioButtonWidgetAnnotationElement, _WidgetAnnotationElem3);
+
+ function RadioButtonWidgetAnnotationElement(parameters) {
+ _classCallCheck(this, RadioButtonWidgetAnnotationElement);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(RadioButtonWidgetAnnotationElement).call(this, parameters, parameters.renderInteractiveForms));
+ }
+
+ _createClass(RadioButtonWidgetAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'buttonWidgetAnnotation radioButton';
+ var element = document.createElement('input');
+ element.disabled = this.data.readOnly;
+ element.type = 'radio';
+ element.name = this.data.fieldName;
+
+ if (this.data.fieldValue === this.data.buttonValue) {
+ element.setAttribute('checked', true);
+ }
+
+ this.container.appendChild(element);
+ return this.container;
+ }
+ }]);
+
+ return RadioButtonWidgetAnnotationElement;
+}(WidgetAnnotationElement);
+
+var PushButtonWidgetAnnotationElement =
+/*#__PURE__*/
+function (_LinkAnnotationElemen) {
+ _inherits(PushButtonWidgetAnnotationElement, _LinkAnnotationElemen);
+
+ function PushButtonWidgetAnnotationElement() {
+ _classCallCheck(this, PushButtonWidgetAnnotationElement);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(PushButtonWidgetAnnotationElement).apply(this, arguments));
+ }
+
+ _createClass(PushButtonWidgetAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ var container = _get(_getPrototypeOf(PushButtonWidgetAnnotationElement.prototype), "render", this).call(this);
+
+ container.className = 'buttonWidgetAnnotation pushButton';
+ return container;
+ }
+ }]);
+
+ return PushButtonWidgetAnnotationElement;
+}(LinkAnnotationElement);
+
+var ChoiceWidgetAnnotationElement =
+/*#__PURE__*/
+function (_WidgetAnnotationElem4) {
+ _inherits(ChoiceWidgetAnnotationElement, _WidgetAnnotationElem4);
+
+ function ChoiceWidgetAnnotationElement(parameters) {
+ _classCallCheck(this, ChoiceWidgetAnnotationElement);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(ChoiceWidgetAnnotationElement).call(this, parameters, parameters.renderInteractiveForms));
+ }
+
+ _createClass(ChoiceWidgetAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'choiceWidgetAnnotation';
+ var selectElement = document.createElement('select');
+ selectElement.disabled = this.data.readOnly;
+
+ if (!this.data.combo) {
+ selectElement.size = this.data.options.length;
+
+ if (this.data.multiSelect) {
+ selectElement.multiple = true;
+ }
+ }
+
+ for (var i = 0, ii = this.data.options.length; i < ii; i++) {
+ var option = this.data.options[i];
+ var optionElement = document.createElement('option');
+ optionElement.textContent = option.displayValue;
+ optionElement.value = option.exportValue;
+
+ if (this.data.fieldValue.includes(option.displayValue)) {
+ optionElement.setAttribute('selected', true);
+ }
+
+ selectElement.appendChild(optionElement);
+ }
+
+ this.container.appendChild(selectElement);
+ return this.container;
+ }
+ }]);
+
+ return ChoiceWidgetAnnotationElement;
+}(WidgetAnnotationElement);
+
+var PopupAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement4) {
+ _inherits(PopupAnnotationElement, _AnnotationElement4);
+
+ function PopupAnnotationElement(parameters) {
+ _classCallCheck(this, PopupAnnotationElement);
+
+ var isRenderable = !!(parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(PopupAnnotationElement).call(this, parameters, isRenderable));
+ }
+
+ _createClass(PopupAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ var IGNORE_TYPES = ['Line', 'Square', 'Circle', 'PolyLine', 'Polygon', 'Ink'];
+ this.container.className = 'popupAnnotation';
+
+ if (IGNORE_TYPES.includes(this.data.parentType)) {
+ return this.container;
+ }
+
+ var selector = '[data-annotation-id="' + this.data.parentId + '"]';
+ var parentElement = this.layer.querySelector(selector);
+
+ if (!parentElement) {
+ return this.container;
+ }
+
+ var popup = new PopupElement({
+ container: this.container,
+ trigger: parentElement,
+ color: this.data.color,
+ title: this.data.title,
+ modificationDate: this.data.modificationDate,
+ contents: this.data.contents
+ });
+ var parentLeft = parseFloat(parentElement.style.left);
+ var parentWidth = parseFloat(parentElement.style.width);
+ this.container.style.transformOrigin = -(parentLeft + parentWidth) + 'px -' + parentElement.style.top;
+ this.container.style.left = parentLeft + parentWidth + 'px';
+ this.container.appendChild(popup.render());
+ return this.container;
+ }
+ }]);
+
+ return PopupAnnotationElement;
+}(AnnotationElement);
+
+var PopupElement =
+/*#__PURE__*/
+function () {
+ function PopupElement(parameters) {
+ _classCallCheck(this, PopupElement);
+
+ this.container = parameters.container;
+ this.trigger = parameters.trigger;
+ this.color = parameters.color;
+ this.title = parameters.title;
+ this.modificationDate = parameters.modificationDate;
+ this.contents = parameters.contents;
+ this.hideWrapper = parameters.hideWrapper || false;
+ this.pinned = false;
+ }
+
+ _createClass(PopupElement, [{
+ key: "render",
+ value: function render() {
+ var BACKGROUND_ENLIGHT = 0.7;
+ var wrapper = document.createElement('div');
+ wrapper.className = 'popupWrapper';
+ this.hideElement = this.hideWrapper ? wrapper : this.container;
+ this.hideElement.setAttribute('hidden', true);
+ var popup = document.createElement('div');
+ popup.className = 'popup';
+ var color = this.color;
+
+ if (color) {
+ var r = BACKGROUND_ENLIGHT * (255 - color[0]) + color[0];
+ var g = BACKGROUND_ENLIGHT * (255 - color[1]) + color[1];
+ var b = BACKGROUND_ENLIGHT * (255 - color[2]) + color[2];
+ popup.style.backgroundColor = _util.Util.makeCssRgb(r | 0, g | 0, b | 0);
+ }
+
+ var title = document.createElement('h1');
+ title.textContent = this.title;
+ popup.appendChild(title);
+
+ var dateObject = _display_utils.PDFDateString.toDateObject(this.modificationDate);
+
+ if (dateObject) {
+ var modificationDate = document.createElement('span');
+ modificationDate.textContent = '{{date}}, {{time}}';
+ modificationDate.dataset.l10nId = 'annotation_date_string';
+ modificationDate.dataset.l10nArgs = JSON.stringify({
+ date: dateObject.toLocaleDateString(),
+ time: dateObject.toLocaleTimeString()
+ });
+ popup.appendChild(modificationDate);
+ }
+
+ var contents = this._formatContents(this.contents);
+
+ popup.appendChild(contents);
+ this.trigger.addEventListener('click', this._toggle.bind(this));
+ this.trigger.addEventListener('mouseover', this._show.bind(this, false));
+ this.trigger.addEventListener('mouseout', this._hide.bind(this, false));
+ popup.addEventListener('click', this._hide.bind(this, true));
+ wrapper.appendChild(popup);
+ return wrapper;
+ }
+ }, {
+ key: "_formatContents",
+ value: function _formatContents(contents) {
+ var p = document.createElement('p');
+ var lines = contents.split(/(?:\r\n?|\n)/);
+
+ for (var i = 0, ii = lines.length; i < ii; ++i) {
+ var line = lines[i];
+ p.appendChild(document.createTextNode(line));
+
+ if (i < ii - 1) {
+ p.appendChild(document.createElement('br'));
+ }
+ }
+
+ return p;
+ }
+ }, {
+ key: "_toggle",
+ value: function _toggle() {
+ if (this.pinned) {
+ this._hide(true);
+ } else {
+ this._show(true);
+ }
+ }
+ }, {
+ key: "_show",
+ value: function _show() {
+ var pin = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
+
+ if (pin) {
+ this.pinned = true;
+ }
+
+ if (this.hideElement.hasAttribute('hidden')) {
+ this.hideElement.removeAttribute('hidden');
+ this.container.style.zIndex += 1;
+ }
+ }
+ }, {
+ key: "_hide",
+ value: function _hide() {
+ var unpin = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
+
+ if (unpin) {
+ this.pinned = false;
+ }
+
+ if (!this.hideElement.hasAttribute('hidden') && !this.pinned) {
+ this.hideElement.setAttribute('hidden', true);
+ this.container.style.zIndex -= 1;
+ }
+ }
+ }]);
+
+ return PopupElement;
+}();
+
+var FreeTextAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement5) {
+ _inherits(FreeTextAnnotationElement, _AnnotationElement5);
+
+ function FreeTextAnnotationElement(parameters) {
+ _classCallCheck(this, FreeTextAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(FreeTextAnnotationElement).call(this, parameters, isRenderable, true));
+ }
+
+ _createClass(FreeTextAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'freeTextAnnotation';
+
+ if (!this.data.hasPopup) {
+ this._createPopup(this.container, null, this.data);
+ }
+
+ return this.container;
+ }
+ }]);
+
+ return FreeTextAnnotationElement;
+}(AnnotationElement);
+
+var LineAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement6) {
+ _inherits(LineAnnotationElement, _AnnotationElement6);
+
+ function LineAnnotationElement(parameters) {
+ _classCallCheck(this, LineAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(LineAnnotationElement).call(this, parameters, isRenderable, true));
+ }
+
+ _createClass(LineAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'lineAnnotation';
+ var data = this.data;
+ var width = data.rect[2] - data.rect[0];
+ var height = data.rect[3] - data.rect[1];
+ var svg = this.svgFactory.create(width, height);
+ var line = this.svgFactory.createElement('svg:line');
+ line.setAttribute('x1', data.rect[2] - data.lineCoordinates[0]);
+ line.setAttribute('y1', data.rect[3] - data.lineCoordinates[1]);
+ line.setAttribute('x2', data.rect[2] - data.lineCoordinates[2]);
+ line.setAttribute('y2', data.rect[3] - data.lineCoordinates[3]);
+ line.setAttribute('stroke-width', data.borderStyle.width);
+ line.setAttribute('stroke', 'transparent');
+ svg.appendChild(line);
+ this.container.append(svg);
+
+ this._createPopup(this.container, line, data);
+
+ return this.container;
+ }
+ }]);
+
+ return LineAnnotationElement;
+}(AnnotationElement);
+
+var SquareAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement7) {
+ _inherits(SquareAnnotationElement, _AnnotationElement7);
+
+ function SquareAnnotationElement(parameters) {
+ _classCallCheck(this, SquareAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(SquareAnnotationElement).call(this, parameters, isRenderable, true));
+ }
+
+ _createClass(SquareAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'squareAnnotation';
+ var data = this.data;
+ var width = data.rect[2] - data.rect[0];
+ var height = data.rect[3] - data.rect[1];
+ var svg = this.svgFactory.create(width, height);
+ var borderWidth = data.borderStyle.width;
+ var square = this.svgFactory.createElement('svg:rect');
+ square.setAttribute('x', borderWidth / 2);
+ square.setAttribute('y', borderWidth / 2);
+ square.setAttribute('width', width - borderWidth);
+ square.setAttribute('height', height - borderWidth);
+ square.setAttribute('stroke-width', borderWidth);
+ square.setAttribute('stroke', 'transparent');
+ square.setAttribute('fill', 'none');
+ svg.appendChild(square);
+ this.container.append(svg);
+
+ this._createPopup(this.container, square, data);
+
+ return this.container;
+ }
+ }]);
+
+ return SquareAnnotationElement;
+}(AnnotationElement);
+
+var CircleAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement8) {
+ _inherits(CircleAnnotationElement, _AnnotationElement8);
+
+ function CircleAnnotationElement(parameters) {
+ _classCallCheck(this, CircleAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(CircleAnnotationElement).call(this, parameters, isRenderable, true));
+ }
+
+ _createClass(CircleAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'circleAnnotation';
+ var data = this.data;
+ var width = data.rect[2] - data.rect[0];
+ var height = data.rect[3] - data.rect[1];
+ var svg = this.svgFactory.create(width, height);
+ var borderWidth = data.borderStyle.width;
+ var circle = this.svgFactory.createElement('svg:ellipse');
+ circle.setAttribute('cx', width / 2);
+ circle.setAttribute('cy', height / 2);
+ circle.setAttribute('rx', width / 2 - borderWidth / 2);
+ circle.setAttribute('ry', height / 2 - borderWidth / 2);
+ circle.setAttribute('stroke-width', borderWidth);
+ circle.setAttribute('stroke', 'transparent');
+ circle.setAttribute('fill', 'none');
+ svg.appendChild(circle);
+ this.container.append(svg);
+
+ this._createPopup(this.container, circle, data);
+
+ return this.container;
+ }
+ }]);
+
+ return CircleAnnotationElement;
+}(AnnotationElement);
+
+var PolylineAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement9) {
+ _inherits(PolylineAnnotationElement, _AnnotationElement9);
+
+ function PolylineAnnotationElement(parameters) {
+ var _this3;
+
+ _classCallCheck(this, PolylineAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ _this3 = _possibleConstructorReturn(this, _getPrototypeOf(PolylineAnnotationElement).call(this, parameters, isRenderable, true));
+ _this3.containerClassName = 'polylineAnnotation';
+ _this3.svgElementName = 'svg:polyline';
+ return _this3;
+ }
+
+ _createClass(PolylineAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = this.containerClassName;
+ var data = this.data;
+ var width = data.rect[2] - data.rect[0];
+ var height = data.rect[3] - data.rect[1];
+ var svg = this.svgFactory.create(width, height);
+ var vertices = data.vertices;
+ var points = [];
+
+ for (var i = 0, ii = vertices.length; i < ii; i++) {
+ var x = vertices[i].x - data.rect[0];
+ var y = data.rect[3] - vertices[i].y;
+ points.push(x + ',' + y);
+ }
+
+ points = points.join(' ');
+ var borderWidth = data.borderStyle.width;
+ var polyline = this.svgFactory.createElement(this.svgElementName);
+ polyline.setAttribute('points', points);
+ polyline.setAttribute('stroke-width', borderWidth);
+ polyline.setAttribute('stroke', 'transparent');
+ polyline.setAttribute('fill', 'none');
+ svg.appendChild(polyline);
+ this.container.append(svg);
+
+ this._createPopup(this.container, polyline, data);
+
+ return this.container;
+ }
+ }]);
+
+ return PolylineAnnotationElement;
+}(AnnotationElement);
+
+var PolygonAnnotationElement =
+/*#__PURE__*/
+function (_PolylineAnnotationEl) {
+ _inherits(PolygonAnnotationElement, _PolylineAnnotationEl);
+
+ function PolygonAnnotationElement(parameters) {
+ var _this4;
+
+ _classCallCheck(this, PolygonAnnotationElement);
+
+ _this4 = _possibleConstructorReturn(this, _getPrototypeOf(PolygonAnnotationElement).call(this, parameters));
+ _this4.containerClassName = 'polygonAnnotation';
+ _this4.svgElementName = 'svg:polygon';
+ return _this4;
+ }
+
+ return PolygonAnnotationElement;
+}(PolylineAnnotationElement);
+
+var CaretAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement10) {
+ _inherits(CaretAnnotationElement, _AnnotationElement10);
+
+ function CaretAnnotationElement(parameters) {
+ _classCallCheck(this, CaretAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(CaretAnnotationElement).call(this, parameters, isRenderable, true));
+ }
+
+ _createClass(CaretAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'caretAnnotation';
+
+ if (!this.data.hasPopup) {
+ this._createPopup(this.container, null, this.data);
+ }
+
+ return this.container;
+ }
+ }]);
+
+ return CaretAnnotationElement;
+}(AnnotationElement);
+
+var InkAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement11) {
+ _inherits(InkAnnotationElement, _AnnotationElement11);
+
+ function InkAnnotationElement(parameters) {
+ var _this5;
+
+ _classCallCheck(this, InkAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ _this5 = _possibleConstructorReturn(this, _getPrototypeOf(InkAnnotationElement).call(this, parameters, isRenderable, true));
+ _this5.containerClassName = 'inkAnnotation';
+ _this5.svgElementName = 'svg:polyline';
+ return _this5;
+ }
+
+ _createClass(InkAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = this.containerClassName;
+ var data = this.data;
+ var width = data.rect[2] - data.rect[0];
+ var height = data.rect[3] - data.rect[1];
+ var svg = this.svgFactory.create(width, height);
+ var inkLists = data.inkLists;
+
+ for (var i = 0, ii = inkLists.length; i < ii; i++) {
+ var inkList = inkLists[i];
+ var points = [];
+
+ for (var j = 0, jj = inkList.length; j < jj; j++) {
+ var x = inkList[j].x - data.rect[0];
+ var y = data.rect[3] - inkList[j].y;
+ points.push(x + ',' + y);
+ }
+
+ points = points.join(' ');
+ var borderWidth = data.borderStyle.width;
+ var polyline = this.svgFactory.createElement(this.svgElementName);
+ polyline.setAttribute('points', points);
+ polyline.setAttribute('stroke-width', borderWidth);
+ polyline.setAttribute('stroke', 'transparent');
+ polyline.setAttribute('fill', 'none');
+
+ this._createPopup(this.container, polyline, data);
+
+ svg.appendChild(polyline);
+ }
+
+ this.container.append(svg);
+ return this.container;
+ }
+ }]);
+
+ return InkAnnotationElement;
+}(AnnotationElement);
+
+var HighlightAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement12) {
+ _inherits(HighlightAnnotationElement, _AnnotationElement12);
+
+ function HighlightAnnotationElement(parameters) {
+ _classCallCheck(this, HighlightAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(HighlightAnnotationElement).call(this, parameters, isRenderable, true));
+ }
+
+ _createClass(HighlightAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'highlightAnnotation';
+
+ if (!this.data.hasPopup) {
+ this._createPopup(this.container, null, this.data);
+ }
+
+ return this.container;
+ }
+ }]);
+
+ return HighlightAnnotationElement;
+}(AnnotationElement);
+
+var UnderlineAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement13) {
+ _inherits(UnderlineAnnotationElement, _AnnotationElement13);
+
+ function UnderlineAnnotationElement(parameters) {
+ _classCallCheck(this, UnderlineAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(UnderlineAnnotationElement).call(this, parameters, isRenderable, true));
+ }
+
+ _createClass(UnderlineAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'underlineAnnotation';
+
+ if (!this.data.hasPopup) {
+ this._createPopup(this.container, null, this.data);
+ }
+
+ return this.container;
+ }
+ }]);
+
+ return UnderlineAnnotationElement;
+}(AnnotationElement);
+
+var SquigglyAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement14) {
+ _inherits(SquigglyAnnotationElement, _AnnotationElement14);
+
+ function SquigglyAnnotationElement(parameters) {
+ _classCallCheck(this, SquigglyAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(SquigglyAnnotationElement).call(this, parameters, isRenderable, true));
+ }
+
+ _createClass(SquigglyAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'squigglyAnnotation';
+
+ if (!this.data.hasPopup) {
+ this._createPopup(this.container, null, this.data);
+ }
+
+ return this.container;
+ }
+ }]);
+
+ return SquigglyAnnotationElement;
+}(AnnotationElement);
+
+var StrikeOutAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement15) {
+ _inherits(StrikeOutAnnotationElement, _AnnotationElement15);
+
+ function StrikeOutAnnotationElement(parameters) {
+ _classCallCheck(this, StrikeOutAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(StrikeOutAnnotationElement).call(this, parameters, isRenderable, true));
+ }
+
+ _createClass(StrikeOutAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'strikeoutAnnotation';
+
+ if (!this.data.hasPopup) {
+ this._createPopup(this.container, null, this.data);
+ }
+
+ return this.container;
+ }
+ }]);
+
+ return StrikeOutAnnotationElement;
+}(AnnotationElement);
+
+var StampAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement16) {
+ _inherits(StampAnnotationElement, _AnnotationElement16);
+
+ function StampAnnotationElement(parameters) {
+ _classCallCheck(this, StampAnnotationElement);
+
+ var isRenderable = !!(parameters.data.hasPopup || parameters.data.title || parameters.data.contents);
+ return _possibleConstructorReturn(this, _getPrototypeOf(StampAnnotationElement).call(this, parameters, isRenderable, true));
+ }
+
+ _createClass(StampAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'stampAnnotation';
+
+ if (!this.data.hasPopup) {
+ this._createPopup(this.container, null, this.data);
+ }
+
+ return this.container;
+ }
+ }]);
+
+ return StampAnnotationElement;
+}(AnnotationElement);
+
+var FileAttachmentAnnotationElement =
+/*#__PURE__*/
+function (_AnnotationElement17) {
+ _inherits(FileAttachmentAnnotationElement, _AnnotationElement17);
+
+ function FileAttachmentAnnotationElement(parameters) {
+ var _this6;
+
+ _classCallCheck(this, FileAttachmentAnnotationElement);
+
+ _this6 = _possibleConstructorReturn(this, _getPrototypeOf(FileAttachmentAnnotationElement).call(this, parameters, true));
+ var _this6$data$file = _this6.data.file,
+ filename = _this6$data$file.filename,
+ content = _this6$data$file.content;
+ _this6.filename = (0, _display_utils.getFilenameFromUrl)(filename);
+ _this6.content = content;
+
+ if (_this6.linkService.eventBus) {
+ _this6.linkService.eventBus.dispatch('fileattachmentannotation', {
+ source: _assertThisInitialized(_this6),
+ id: (0, _util.stringToPDFString)(filename),
+ filename: filename,
+ content: content
+ });
+ }
+
+ return _this6;
+ }
+
+ _createClass(FileAttachmentAnnotationElement, [{
+ key: "render",
+ value: function render() {
+ this.container.className = 'fileAttachmentAnnotation';
+ var trigger = document.createElement('div');
+ trigger.style.height = this.container.style.height;
+ trigger.style.width = this.container.style.width;
+ trigger.addEventListener('dblclick', this._download.bind(this));
+
+ if (!this.data.hasPopup && (this.data.title || this.data.contents)) {
+ this._createPopup(this.container, trigger, this.data);
+ }
+
+ this.container.appendChild(trigger);
+ return this.container;
+ }
+ }, {
+ key: "_download",
+ value: function _download() {
+ if (!this.downloadManager) {
+ (0, _util.warn)('Download cannot be started due to unavailable download manager');
+ return;
+ }
+
+ this.downloadManager.downloadData(this.content, this.filename, '');
+ }
+ }]);
+
+ return FileAttachmentAnnotationElement;
+}(AnnotationElement);
+
+var AnnotationLayer =
+/*#__PURE__*/
+function () {
+ function AnnotationLayer() {
+ _classCallCheck(this, AnnotationLayer);
+ }
+
+ _createClass(AnnotationLayer, null, [{
+ key: "render",
+ value: function render(parameters) {
+ for (var i = 0, ii = parameters.annotations.length; i < ii; i++) {
+ var data = parameters.annotations[i];
+
+ if (!data) {
+ continue;
+ }
+
+ var element = AnnotationElementFactory.create({
+ data: data,
+ layer: parameters.div,
+ page: parameters.page,
+ viewport: parameters.viewport,
+ linkService: parameters.linkService,
+ downloadManager: parameters.downloadManager,
+ imageResourcesPath: parameters.imageResourcesPath || '',
+ renderInteractiveForms: parameters.renderInteractiveForms || false,
+ svgFactory: new _display_utils.DOMSVGFactory()
+ });
+
+ if (element.isRenderable) {
+ parameters.div.appendChild(element.render());
+ }
+ }
+ }
+ }, {
+ key: "update",
+ value: function update(parameters) {
+ for (var i = 0, ii = parameters.annotations.length; i < ii; i++) {
+ var data = parameters.annotations[i];
+ var element = parameters.div.querySelector('[data-annotation-id="' + data.id + '"]');
+
+ if (element) {
+ element.style.transform = 'matrix(' + parameters.viewport.transform.join(',') + ')';
+ }
+ }
+
+ parameters.div.removeAttribute('hidden');
+ }
+ }]);
+
+ return AnnotationLayer;
+}();
+
+exports.AnnotationLayer = AnnotationLayer;
+
+/***/ }),
+/* 164 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.SVGGraphics = void 0;
+
+var _util = __w_pdfjs_require__(1);
+
+var _display_utils = __w_pdfjs_require__(151);
+
+var _is_node = _interopRequireDefault(__w_pdfjs_require__(4));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread(); }
+
+function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance"); }
+
+function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); }
+
+function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } }
+
+function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
+
+function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
+
+function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
+
+function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var SVGGraphics = function SVGGraphics() {
+ throw new Error('Not implemented: SVGGraphics');
+};
+
+exports.SVGGraphics = SVGGraphics;
+{
+ var opListToTree = function opListToTree(opList) {
+ var opTree = [];
+ var tmp = [];
+ var _iteratorNormalCompletion = true;
+ var _didIteratorError = false;
+ var _iteratorError = undefined;
+
+ try {
+ for (var _iterator = opList[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
+ var opListElement = _step.value;
+
+ if (opListElement.fn === 'save') {
+ opTree.push({
+ 'fnId': 92,
+ 'fn': 'group',
+ 'items': []
+ });
+ tmp.push(opTree);
+ opTree = opTree[opTree.length - 1].items;
+ continue;
+ }
+
+ if (opListElement.fn === 'restore') {
+ opTree = tmp.pop();
+ } else {
+ opTree.push(opListElement);
+ }
+ }
+ } catch (err) {
+ _didIteratorError = true;
+ _iteratorError = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion && _iterator["return"] != null) {
+ _iterator["return"]();
+ }
+ } finally {
+ if (_didIteratorError) {
+ throw _iteratorError;
+ }
+ }
+ }
+
+ return opTree;
+ };
+
+ var pf = function pf(value) {
+ if (Number.isInteger(value)) {
+ return value.toString();
+ }
+
+ var s = value.toFixed(10);
+ var i = s.length - 1;
+
+ if (s[i] !== '0') {
+ return s;
+ }
+
+ do {
+ i--;
+ } while (s[i] === '0');
+
+ return s.substring(0, s[i] === '.' ? i : i + 1);
+ };
+
+ var pm = function pm(m) {
+ if (m[4] === 0 && m[5] === 0) {
+ if (m[1] === 0 && m[2] === 0) {
+ if (m[0] === 1 && m[3] === 1) {
+ return '';
+ }
+
+ return "scale(".concat(pf(m[0]), " ").concat(pf(m[3]), ")");
+ }
+
+ if (m[0] === m[3] && m[1] === -m[2]) {
+ var a = Math.acos(m[0]) * 180 / Math.PI;
+ return "rotate(".concat(pf(a), ")");
+ }
+ } else {
+ if (m[0] === 1 && m[1] === 0 && m[2] === 0 && m[3] === 1) {
+ return "translate(".concat(pf(m[4]), " ").concat(pf(m[5]), ")");
+ }
+ }
+
+ return "matrix(".concat(pf(m[0]), " ").concat(pf(m[1]), " ").concat(pf(m[2]), " ").concat(pf(m[3]), " ").concat(pf(m[4]), " ") + "".concat(pf(m[5]), ")");
+ };
+
+ var SVG_DEFAULTS = {
+ fontStyle: 'normal',
+ fontWeight: 'normal',
+ fillColor: '#000000'
+ };
+ var XML_NS = 'http://www.w3.org/XML/1998/namespace';
+ var XLINK_NS = 'http://www.w3.org/1999/xlink';
+ var LINE_CAP_STYLES = ['butt', 'round', 'square'];
+ var LINE_JOIN_STYLES = ['miter', 'round', 'bevel'];
+
+ var convertImgDataToPng = function () {
+ var PNG_HEADER = new Uint8Array([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
+ var CHUNK_WRAPPER_SIZE = 12;
+ var crcTable = new Int32Array(256);
+
+ for (var i = 0; i < 256; i++) {
+ var c = i;
+
+ for (var h = 0; h < 8; h++) {
+ if (c & 1) {
+ c = 0xedB88320 ^ c >> 1 & 0x7fffffff;
+ } else {
+ c = c >> 1 & 0x7fffffff;
+ }
+ }
+
+ crcTable[i] = c;
+ }
+
+ function crc32(data, start, end) {
+ var crc = -1;
+
+ for (var _i = start; _i < end; _i++) {
+ var a = (crc ^ data[_i]) & 0xff;
+ var b = crcTable[a];
+ crc = crc >>> 8 ^ b;
+ }
+
+ return crc ^ -1;
+ }
+
+ function writePngChunk(type, body, data, offset) {
+ var p = offset;
+ var len = body.length;
+ data[p] = len >> 24 & 0xff;
+ data[p + 1] = len >> 16 & 0xff;
+ data[p + 2] = len >> 8 & 0xff;
+ data[p + 3] = len & 0xff;
+ p += 4;
+ data[p] = type.charCodeAt(0) & 0xff;
+ data[p + 1] = type.charCodeAt(1) & 0xff;
+ data[p + 2] = type.charCodeAt(2) & 0xff;
+ data[p + 3] = type.charCodeAt(3) & 0xff;
+ p += 4;
+ data.set(body, p);
+ p += body.length;
+ var crc = crc32(data, offset + 4, p);
+ data[p] = crc >> 24 & 0xff;
+ data[p + 1] = crc >> 16 & 0xff;
+ data[p + 2] = crc >> 8 & 0xff;
+ data[p + 3] = crc & 0xff;
+ }
+
+ function adler32(data, start, end) {
+ var a = 1;
+ var b = 0;
+
+ for (var _i2 = start; _i2 < end; ++_i2) {
+ a = (a + (data[_i2] & 0xff)) % 65521;
+ b = (b + a) % 65521;
+ }
+
+ return b << 16 | a;
+ }
+
+ function deflateSync(literals) {
+ if (!(0, _is_node["default"])()) {
+ return deflateSyncUncompressed(literals);
+ }
+
+ try {
+ var input;
+
+ if (parseInt(process.versions.node) >= 8) {
+ input = literals;
+ } else {
+ input = new Buffer(literals);
+ }
+
+ var output = require('zlib').deflateSync(input, {
+ level: 9
+ });
+
+ return output instanceof Uint8Array ? output : new Uint8Array(output);
+ } catch (e) {
+ (0, _util.warn)('Not compressing PNG because zlib.deflateSync is unavailable: ' + e);
+ }
+
+ return deflateSyncUncompressed(literals);
+ }
+
+ function deflateSyncUncompressed(literals) {
+ var len = literals.length;
+ var maxBlockLength = 0xFFFF;
+ var deflateBlocks = Math.ceil(len / maxBlockLength);
+ var idat = new Uint8Array(2 + len + deflateBlocks * 5 + 4);
+ var pi = 0;
+ idat[pi++] = 0x78;
+ idat[pi++] = 0x9c;
+ var pos = 0;
+
+ while (len > maxBlockLength) {
+ idat[pi++] = 0x00;
+ idat[pi++] = 0xff;
+ idat[pi++] = 0xff;
+ idat[pi++] = 0x00;
+ idat[pi++] = 0x00;
+ idat.set(literals.subarray(pos, pos + maxBlockLength), pi);
+ pi += maxBlockLength;
+ pos += maxBlockLength;
+ len -= maxBlockLength;
+ }
+
+ idat[pi++] = 0x01;
+ idat[pi++] = len & 0xff;
+ idat[pi++] = len >> 8 & 0xff;
+ idat[pi++] = ~len & 0xffff & 0xff;
+ idat[pi++] = (~len & 0xffff) >> 8 & 0xff;
+ idat.set(literals.subarray(pos), pi);
+ pi += literals.length - pos;
+ var adler = adler32(literals, 0, literals.length);
+ idat[pi++] = adler >> 24 & 0xff;
+ idat[pi++] = adler >> 16 & 0xff;
+ idat[pi++] = adler >> 8 & 0xff;
+ idat[pi++] = adler & 0xff;
+ return idat;
+ }
+
+ function encode(imgData, kind, forceDataSchema, isMask) {
+ var width = imgData.width;
+ var height = imgData.height;
+ var bitDepth, colorType, lineSize;
+ var bytes = imgData.data;
+
+ switch (kind) {
+ case _util.ImageKind.GRAYSCALE_1BPP:
+ colorType = 0;
+ bitDepth = 1;
+ lineSize = width + 7 >> 3;
+ break;
+
+ case _util.ImageKind.RGB_24BPP:
+ colorType = 2;
+ bitDepth = 8;
+ lineSize = width * 3;
+ break;
+
+ case _util.ImageKind.RGBA_32BPP:
+ colorType = 6;
+ bitDepth = 8;
+ lineSize = width * 4;
+ break;
+
+ default:
+ throw new Error('invalid format');
+ }
+
+ var literals = new Uint8Array((1 + lineSize) * height);
+ var offsetLiterals = 0,
+ offsetBytes = 0;
+
+ for (var y = 0; y < height; ++y) {
+ literals[offsetLiterals++] = 0;
+ literals.set(bytes.subarray(offsetBytes, offsetBytes + lineSize), offsetLiterals);
+ offsetBytes += lineSize;
+ offsetLiterals += lineSize;
+ }
+
+ if (kind === _util.ImageKind.GRAYSCALE_1BPP && isMask) {
+ offsetLiterals = 0;
+
+ for (var _y = 0; _y < height; _y++) {
+ offsetLiterals++;
+
+ for (var _i3 = 0; _i3 < lineSize; _i3++) {
+ literals[offsetLiterals++] ^= 0xFF;
+ }
+ }
+ }
+
+ var ihdr = new Uint8Array([width >> 24 & 0xff, width >> 16 & 0xff, width >> 8 & 0xff, width & 0xff, height >> 24 & 0xff, height >> 16 & 0xff, height >> 8 & 0xff, height & 0xff, bitDepth, colorType, 0x00, 0x00, 0x00]);
+ var idat = deflateSync(literals);
+ var pngLength = PNG_HEADER.length + CHUNK_WRAPPER_SIZE * 3 + ihdr.length + idat.length;
+ var data = new Uint8Array(pngLength);
+ var offset = 0;
+ data.set(PNG_HEADER, offset);
+ offset += PNG_HEADER.length;
+ writePngChunk('IHDR', ihdr, data, offset);
+ offset += CHUNK_WRAPPER_SIZE + ihdr.length;
+ writePngChunk('IDATA', idat, data, offset);
+ offset += CHUNK_WRAPPER_SIZE + idat.length;
+ writePngChunk('IEND', new Uint8Array(0), data, offset);
+ return (0, _util.createObjectURL)(data, 'image/png', forceDataSchema);
+ }
+
+ return function convertImgDataToPng(imgData, forceDataSchema, isMask) {
+ var kind = imgData.kind === undefined ? _util.ImageKind.GRAYSCALE_1BPP : imgData.kind;
+ return encode(imgData, kind, forceDataSchema, isMask);
+ };
+ }();
+
+ var SVGExtraState =
+ /*#__PURE__*/
+ function () {
+ function SVGExtraState() {
+ _classCallCheck(this, SVGExtraState);
+
+ this.fontSizeScale = 1;
+ this.fontWeight = SVG_DEFAULTS.fontWeight;
+ this.fontSize = 0;
+ this.textMatrix = _util.IDENTITY_MATRIX;
+ this.fontMatrix = _util.FONT_IDENTITY_MATRIX;
+ this.leading = 0;
+ this.textRenderingMode = _util.TextRenderingMode.FILL;
+ this.textMatrixScale = 1;
+ this.x = 0;
+ this.y = 0;
+ this.lineX = 0;
+ this.lineY = 0;
+ this.charSpacing = 0;
+ this.wordSpacing = 0;
+ this.textHScale = 1;
+ this.textRise = 0;
+ this.fillColor = SVG_DEFAULTS.fillColor;
+ this.strokeColor = '#000000';
+ this.fillAlpha = 1;
+ this.strokeAlpha = 1;
+ this.lineWidth = 1;
+ this.lineJoin = '';
+ this.lineCap = '';
+ this.miterLimit = 0;
+ this.dashArray = [];
+ this.dashPhase = 0;
+ this.dependencies = [];
+ this.activeClipUrl = null;
+ this.clipGroup = null;
+ this.maskId = '';
+ }
+
+ _createClass(SVGExtraState, [{
+ key: "clone",
+ value: function clone() {
+ return Object.create(this);
+ }
+ }, {
+ key: "setCurrentPoint",
+ value: function setCurrentPoint(x, y) {
+ this.x = x;
+ this.y = y;
+ }
+ }]);
+
+ return SVGExtraState;
+ }();
+
+ var clipCount = 0;
+ var maskCount = 0;
+ var shadingCount = 0;
+
+ exports.SVGGraphics = SVGGraphics =
+ /*#__PURE__*/
+ function () {
+ function SVGGraphics(commonObjs, objs, forceDataSchema) {
+ _classCallCheck(this, SVGGraphics);
+
+ this.svgFactory = new _display_utils.DOMSVGFactory();
+ this.current = new SVGExtraState();
+ this.transformMatrix = _util.IDENTITY_MATRIX;
+ this.transformStack = [];
+ this.extraStack = [];
+ this.commonObjs = commonObjs;
+ this.objs = objs;
+ this.pendingClip = null;
+ this.pendingEOFill = false;
+ this.embedFonts = false;
+ this.embeddedFonts = Object.create(null);
+ this.cssStyle = null;
+ this.forceDataSchema = !!forceDataSchema;
+ this._operatorIdMapping = [];
+
+ for (var op in _util.OPS) {
+ this._operatorIdMapping[_util.OPS[op]] = op;
+ }
+ }
+
+ _createClass(SVGGraphics, [{
+ key: "save",
+ value: function save() {
+ this.transformStack.push(this.transformMatrix);
+ var old = this.current;
+ this.extraStack.push(old);
+ this.current = old.clone();
+ }
+ }, {
+ key: "restore",
+ value: function restore() {
+ this.transformMatrix = this.transformStack.pop();
+ this.current = this.extraStack.pop();
+ this.pendingClip = null;
+ this.tgrp = null;
+ }
+ }, {
+ key: "group",
+ value: function group(items) {
+ this.save();
+ this.executeOpTree(items);
+ this.restore();
+ }
+ }, {
+ key: "loadDependencies",
+ value: function loadDependencies(operatorList) {
+ var _this = this;
+
+ var fnArray = operatorList.fnArray;
+ var argsArray = operatorList.argsArray;
+
+ for (var i = 0, ii = fnArray.length; i < ii; i++) {
+ if (fnArray[i] !== _util.OPS.dependency) {
+ continue;
+ }
+
+ var _iteratorNormalCompletion2 = true;
+ var _didIteratorError2 = false;
+ var _iteratorError2 = undefined;
+
+ try {
+ var _loop = function _loop() {
+ var obj = _step2.value;
+ var objsPool = obj.startsWith('g_') ? _this.commonObjs : _this.objs;
+ var promise = new Promise(function (resolve) {
+ objsPool.get(obj, resolve);
+ });
+
+ _this.current.dependencies.push(promise);
+ };
+
+ for (var _iterator2 = argsArray[i][Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
+ _loop();
+ }
+ } catch (err) {
+ _didIteratorError2 = true;
+ _iteratorError2 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) {
+ _iterator2["return"]();
+ }
+ } finally {
+ if (_didIteratorError2) {
+ throw _iteratorError2;
+ }
+ }
+ }
+ }
+
+ return Promise.all(this.current.dependencies);
+ }
+ }, {
+ key: "transform",
+ value: function transform(a, b, c, d, e, f) {
+ var transformMatrix = [a, b, c, d, e, f];
+ this.transformMatrix = _util.Util.transform(this.transformMatrix, transformMatrix);
+ this.tgrp = null;
+ }
+ }, {
+ key: "getSVG",
+ value: function getSVG(operatorList, viewport) {
+ var _this2 = this;
+
+ this.viewport = viewport;
+
+ var svgElement = this._initialize(viewport);
+
+ return this.loadDependencies(operatorList).then(function () {
+ _this2.transformMatrix = _util.IDENTITY_MATRIX;
+
+ _this2.executeOpTree(_this2.convertOpList(operatorList));
+
+ return svgElement;
+ });
+ }
+ }, {
+ key: "convertOpList",
+ value: function convertOpList(operatorList) {
+ var operatorIdMapping = this._operatorIdMapping;
+ var argsArray = operatorList.argsArray;
+ var fnArray = operatorList.fnArray;
+ var opList = [];
+
+ for (var i = 0, ii = fnArray.length; i < ii; i++) {
+ var fnId = fnArray[i];
+ opList.push({
+ 'fnId': fnId,
+ 'fn': operatorIdMapping[fnId],
+ 'args': argsArray[i]
+ });
+ }
+
+ return opListToTree(opList);
+ }
+ }, {
+ key: "executeOpTree",
+ value: function executeOpTree(opTree) {
+ var _iteratorNormalCompletion3 = true;
+ var _didIteratorError3 = false;
+ var _iteratorError3 = undefined;
+
+ try {
+ for (var _iterator3 = opTree[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
+ var opTreeElement = _step3.value;
+ var fn = opTreeElement.fn;
+ var fnId = opTreeElement.fnId;
+ var args = opTreeElement.args;
+
+ switch (fnId | 0) {
+ case _util.OPS.beginText:
+ this.beginText();
+ break;
+
+ case _util.OPS.dependency:
+ break;
+
+ case _util.OPS.setLeading:
+ this.setLeading(args);
+ break;
+
+ case _util.OPS.setLeadingMoveText:
+ this.setLeadingMoveText(args[0], args[1]);
+ break;
+
+ case _util.OPS.setFont:
+ this.setFont(args);
+ break;
+
+ case _util.OPS.showText:
+ this.showText(args[0]);
+ break;
+
+ case _util.OPS.showSpacedText:
+ this.showText(args[0]);
+ break;
+
+ case _util.OPS.endText:
+ this.endText();
+ break;
+
+ case _util.OPS.moveText:
+ this.moveText(args[0], args[1]);
+ break;
+
+ case _util.OPS.setCharSpacing:
+ this.setCharSpacing(args[0]);
+ break;
+
+ case _util.OPS.setWordSpacing:
+ this.setWordSpacing(args[0]);
+ break;
+
+ case _util.OPS.setHScale:
+ this.setHScale(args[0]);
+ break;
+
+ case _util.OPS.setTextMatrix:
+ this.setTextMatrix(args[0], args[1], args[2], args[3], args[4], args[5]);
+ break;
+
+ case _util.OPS.setTextRise:
+ this.setTextRise(args[0]);
+ break;
+
+ case _util.OPS.setTextRenderingMode:
+ this.setTextRenderingMode(args[0]);
+ break;
+
+ case _util.OPS.setLineWidth:
+ this.setLineWidth(args[0]);
+ break;
+
+ case _util.OPS.setLineJoin:
+ this.setLineJoin(args[0]);
+ break;
+
+ case _util.OPS.setLineCap:
+ this.setLineCap(args[0]);
+ break;
+
+ case _util.OPS.setMiterLimit:
+ this.setMiterLimit(args[0]);
+ break;
+
+ case _util.OPS.setFillRGBColor:
+ this.setFillRGBColor(args[0], args[1], args[2]);
+ break;
+
+ case _util.OPS.setStrokeRGBColor:
+ this.setStrokeRGBColor(args[0], args[1], args[2]);
+ break;
+
+ case _util.OPS.setStrokeColorN:
+ this.setStrokeColorN(args);
+ break;
+
+ case _util.OPS.setFillColorN:
+ this.setFillColorN(args);
+ break;
+
+ case _util.OPS.shadingFill:
+ this.shadingFill(args[0]);
+ break;
+
+ case _util.OPS.setDash:
+ this.setDash(args[0], args[1]);
+ break;
+
+ case _util.OPS.setRenderingIntent:
+ this.setRenderingIntent(args[0]);
+ break;
+
+ case _util.OPS.setFlatness:
+ this.setFlatness(args[0]);
+ break;
+
+ case _util.OPS.setGState:
+ this.setGState(args[0]);
+ break;
+
+ case _util.OPS.fill:
+ this.fill();
+ break;
+
+ case _util.OPS.eoFill:
+ this.eoFill();
+ break;
+
+ case _util.OPS.stroke:
+ this.stroke();
+ break;
+
+ case _util.OPS.fillStroke:
+ this.fillStroke();
+ break;
+
+ case _util.OPS.eoFillStroke:
+ this.eoFillStroke();
+ break;
+
+ case _util.OPS.clip:
+ this.clip('nonzero');
+ break;
+
+ case _util.OPS.eoClip:
+ this.clip('evenodd');
+ break;
+
+ case _util.OPS.paintSolidColorImageMask:
+ this.paintSolidColorImageMask();
+ break;
+
+ case _util.OPS.paintJpegXObject:
+ this.paintJpegXObject(args[0], args[1], args[2]);
+ break;
+
+ case _util.OPS.paintImageXObject:
+ this.paintImageXObject(args[0]);
+ break;
+
+ case _util.OPS.paintInlineImageXObject:
+ this.paintInlineImageXObject(args[0]);
+ break;
+
+ case _util.OPS.paintImageMaskXObject:
+ this.paintImageMaskXObject(args[0]);
+ break;
+
+ case _util.OPS.paintFormXObjectBegin:
+ this.paintFormXObjectBegin(args[0], args[1]);
+ break;
+
+ case _util.OPS.paintFormXObjectEnd:
+ this.paintFormXObjectEnd();
+ break;
+
+ case _util.OPS.closePath:
+ this.closePath();
+ break;
+
+ case _util.OPS.closeStroke:
+ this.closeStroke();
+ break;
+
+ case _util.OPS.closeFillStroke:
+ this.closeFillStroke();
+ break;
+
+ case _util.OPS.closeEOFillStroke:
+ this.closeEOFillStroke();
+ break;
+
+ case _util.OPS.nextLine:
+ this.nextLine();
+ break;
+
+ case _util.OPS.transform:
+ this.transform(args[0], args[1], args[2], args[3], args[4], args[5]);
+ break;
+
+ case _util.OPS.constructPath:
+ this.constructPath(args[0], args[1]);
+ break;
+
+ case _util.OPS.endPath:
+ this.endPath();
+ break;
+
+ case 92:
+ this.group(opTreeElement.items);
+ break;
+
+ default:
+ (0, _util.warn)("Unimplemented operator ".concat(fn));
+ break;
+ }
+ }
+ } catch (err) {
+ _didIteratorError3 = true;
+ _iteratorError3 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion3 && _iterator3["return"] != null) {
+ _iterator3["return"]();
+ }
+ } finally {
+ if (_didIteratorError3) {
+ throw _iteratorError3;
+ }
+ }
+ }
+ }
+ }, {
+ key: "setWordSpacing",
+ value: function setWordSpacing(wordSpacing) {
+ this.current.wordSpacing = wordSpacing;
+ }
+ }, {
+ key: "setCharSpacing",
+ value: function setCharSpacing(charSpacing) {
+ this.current.charSpacing = charSpacing;
+ }
+ }, {
+ key: "nextLine",
+ value: function nextLine() {
+ this.moveText(0, this.current.leading);
+ }
+ }, {
+ key: "setTextMatrix",
+ value: function setTextMatrix(a, b, c, d, e, f) {
+ var current = this.current;
+ current.textMatrix = current.lineMatrix = [a, b, c, d, e, f];
+ current.textMatrixScale = Math.sqrt(a * a + b * b);
+ current.x = current.lineX = 0;
+ current.y = current.lineY = 0;
+ current.xcoords = [];
+ current.tspan = this.svgFactory.createElement('svg:tspan');
+ current.tspan.setAttributeNS(null, 'font-family', current.fontFamily);
+ current.tspan.setAttributeNS(null, 'font-size', "".concat(pf(current.fontSize), "px"));
+ current.tspan.setAttributeNS(null, 'y', pf(-current.y));
+ current.txtElement = this.svgFactory.createElement('svg:text');
+ current.txtElement.appendChild(current.tspan);
+ }
+ }, {
+ key: "beginText",
+ value: function beginText() {
+ var current = this.current;
+ current.x = current.lineX = 0;
+ current.y = current.lineY = 0;
+ current.textMatrix = _util.IDENTITY_MATRIX;
+ current.lineMatrix = _util.IDENTITY_MATRIX;
+ current.textMatrixScale = 1;
+ current.tspan = this.svgFactory.createElement('svg:tspan');
+ current.txtElement = this.svgFactory.createElement('svg:text');
+ current.txtgrp = this.svgFactory.createElement('svg:g');
+ current.xcoords = [];
+ }
+ }, {
+ key: "moveText",
+ value: function moveText(x, y) {
+ var current = this.current;
+ current.x = current.lineX += x;
+ current.y = current.lineY += y;
+ current.xcoords = [];
+ current.tspan = this.svgFactory.createElement('svg:tspan');
+ current.tspan.setAttributeNS(null, 'font-family', current.fontFamily);
+ current.tspan.setAttributeNS(null, 'font-size', "".concat(pf(current.fontSize), "px"));
+ current.tspan.setAttributeNS(null, 'y', pf(-current.y));
+ }
+ }, {
+ key: "showText",
+ value: function showText(glyphs) {
+ var current = this.current;
+ var font = current.font;
+ var fontSize = current.fontSize;
+
+ if (fontSize === 0) {
+ return;
+ }
+
+ var charSpacing = current.charSpacing;
+ var wordSpacing = current.wordSpacing;
+ var fontDirection = current.fontDirection;
+ var textHScale = current.textHScale * fontDirection;
+ var vertical = font.vertical;
+ var widthAdvanceScale = fontSize * current.fontMatrix[0];
+ var x = 0;
+ var _iteratorNormalCompletion4 = true;
+ var _didIteratorError4 = false;
+ var _iteratorError4 = undefined;
+
+ try {
+ for (var _iterator4 = glyphs[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) {
+ var glyph = _step4.value;
+
+ if (glyph === null) {
+ x += fontDirection * wordSpacing;
+ continue;
+ } else if ((0, _util.isNum)(glyph)) {
+ x += -glyph * fontSize * 0.001;
+ continue;
+ }
+
+ var width = glyph.width;
+ var character = glyph.fontChar;
+ var spacing = (glyph.isSpace ? wordSpacing : 0) + charSpacing;
+ var charWidth = width * widthAdvanceScale + spacing * fontDirection;
+
+ if (!glyph.isInFont && !font.missingFile) {
+ x += charWidth;
+ continue;
+ }
+
+ current.xcoords.push(current.x + x * textHScale);
+ current.tspan.textContent += character;
+ x += charWidth;
+ }
+ } catch (err) {
+ _didIteratorError4 = true;
+ _iteratorError4 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion4 && _iterator4["return"] != null) {
+ _iterator4["return"]();
+ }
+ } finally {
+ if (_didIteratorError4) {
+ throw _iteratorError4;
+ }
+ }
+ }
+
+ if (vertical) {
+ current.y -= x * textHScale;
+ } else {
+ current.x += x * textHScale;
+ }
+
+ current.tspan.setAttributeNS(null, 'x', current.xcoords.map(pf).join(' '));
+ current.tspan.setAttributeNS(null, 'y', pf(-current.y));
+ current.tspan.setAttributeNS(null, 'font-family', current.fontFamily);
+ current.tspan.setAttributeNS(null, 'font-size', "".concat(pf(current.fontSize), "px"));
+
+ if (current.fontStyle !== SVG_DEFAULTS.fontStyle) {
+ current.tspan.setAttributeNS(null, 'font-style', current.fontStyle);
+ }
+
+ if (current.fontWeight !== SVG_DEFAULTS.fontWeight) {
+ current.tspan.setAttributeNS(null, 'font-weight', current.fontWeight);
+ }
+
+ var fillStrokeMode = current.textRenderingMode & _util.TextRenderingMode.FILL_STROKE_MASK;
+
+ if (fillStrokeMode === _util.TextRenderingMode.FILL || fillStrokeMode === _util.TextRenderingMode.FILL_STROKE) {
+ if (current.fillColor !== SVG_DEFAULTS.fillColor) {
+ current.tspan.setAttributeNS(null, 'fill', current.fillColor);
+ }
+
+ if (current.fillAlpha < 1) {
+ current.tspan.setAttributeNS(null, 'fill-opacity', current.fillAlpha);
+ }
+ } else if (current.textRenderingMode === _util.TextRenderingMode.ADD_TO_PATH) {
+ current.tspan.setAttributeNS(null, 'fill', 'transparent');
+ } else {
+ current.tspan.setAttributeNS(null, 'fill', 'none');
+ }
+
+ if (fillStrokeMode === _util.TextRenderingMode.STROKE || fillStrokeMode === _util.TextRenderingMode.FILL_STROKE) {
+ var lineWidthScale = 1 / (current.textMatrixScale || 1);
+
+ this._setStrokeAttributes(current.tspan, lineWidthScale);
+ }
+
+ var textMatrix = current.textMatrix;
+
+ if (current.textRise !== 0) {
+ textMatrix = textMatrix.slice();
+ textMatrix[5] += current.textRise;
+ }
+
+ current.txtElement.setAttributeNS(null, 'transform', "".concat(pm(textMatrix), " scale(1, -1)"));
+ current.txtElement.setAttributeNS(XML_NS, 'xml:space', 'preserve');
+ current.txtElement.appendChild(current.tspan);
+ current.txtgrp.appendChild(current.txtElement);
+
+ this._ensureTransformGroup().appendChild(current.txtElement);
+ }
+ }, {
+ key: "setLeadingMoveText",
+ value: function setLeadingMoveText(x, y) {
+ this.setLeading(-y);
+ this.moveText(x, y);
+ }
+ }, {
+ key: "addFontStyle",
+ value: function addFontStyle(fontObj) {
+ if (!this.cssStyle) {
+ this.cssStyle = this.svgFactory.createElement('svg:style');
+ this.cssStyle.setAttributeNS(null, 'type', 'text/css');
+ this.defs.appendChild(this.cssStyle);
+ }
+
+ var url = (0, _util.createObjectURL)(fontObj.data, fontObj.mimetype, this.forceDataSchema);
+ this.cssStyle.textContent += "@font-face { font-family: \"".concat(fontObj.loadedName, "\";") + " src: url(".concat(url, "); }\n");
+ }
+ }, {
+ key: "setFont",
+ value: function setFont(details) {
+ var current = this.current;
+ var fontObj = this.commonObjs.get(details[0]);
+ var size = details[1];
+ current.font = fontObj;
+
+ if (this.embedFonts && fontObj.data && !this.embeddedFonts[fontObj.loadedName]) {
+ this.addFontStyle(fontObj);
+ this.embeddedFonts[fontObj.loadedName] = fontObj;
+ }
+
+ current.fontMatrix = fontObj.fontMatrix ? fontObj.fontMatrix : _util.FONT_IDENTITY_MATRIX;
+ var bold = fontObj.black ? fontObj.bold ? 'bolder' : 'bold' : fontObj.bold ? 'bold' : 'normal';
+ var italic = fontObj.italic ? 'italic' : 'normal';
+
+ if (size < 0) {
+ size = -size;
+ current.fontDirection = -1;
+ } else {
+ current.fontDirection = 1;
+ }
+
+ current.fontSize = size;
+ current.fontFamily = fontObj.loadedName;
+ current.fontWeight = bold;
+ current.fontStyle = italic;
+ current.tspan = this.svgFactory.createElement('svg:tspan');
+ current.tspan.setAttributeNS(null, 'y', pf(-current.y));
+ current.xcoords = [];
+ }
+ }, {
+ key: "endText",
+ value: function endText() {
+ var current = this.current;
+
+ if (current.textRenderingMode & _util.TextRenderingMode.ADD_TO_PATH_FLAG && current.txtElement && current.txtElement.hasChildNodes()) {
+ current.element = current.txtElement;
+ this.clip('nonzero');
+ this.endPath();
+ }
+ }
+ }, {
+ key: "setLineWidth",
+ value: function setLineWidth(width) {
+ if (width > 0) {
+ this.current.lineWidth = width;
+ }
+ }
+ }, {
+ key: "setLineCap",
+ value: function setLineCap(style) {
+ this.current.lineCap = LINE_CAP_STYLES[style];
+ }
+ }, {
+ key: "setLineJoin",
+ value: function setLineJoin(style) {
+ this.current.lineJoin = LINE_JOIN_STYLES[style];
+ }
+ }, {
+ key: "setMiterLimit",
+ value: function setMiterLimit(limit) {
+ this.current.miterLimit = limit;
+ }
+ }, {
+ key: "setStrokeAlpha",
+ value: function setStrokeAlpha(strokeAlpha) {
+ this.current.strokeAlpha = strokeAlpha;
+ }
+ }, {
+ key: "setStrokeRGBColor",
+ value: function setStrokeRGBColor(r, g, b) {
+ this.current.strokeColor = _util.Util.makeCssRgb(r, g, b);
+ }
+ }, {
+ key: "setFillAlpha",
+ value: function setFillAlpha(fillAlpha) {
+ this.current.fillAlpha = fillAlpha;
+ }
+ }, {
+ key: "setFillRGBColor",
+ value: function setFillRGBColor(r, g, b) {
+ this.current.fillColor = _util.Util.makeCssRgb(r, g, b);
+ this.current.tspan = this.svgFactory.createElement('svg:tspan');
+ this.current.xcoords = [];
+ }
+ }, {
+ key: "setStrokeColorN",
+ value: function setStrokeColorN(args) {
+ this.current.strokeColor = this._makeColorN_Pattern(args);
+ }
+ }, {
+ key: "setFillColorN",
+ value: function setFillColorN(args) {
+ this.current.fillColor = this._makeColorN_Pattern(args);
+ }
+ }, {
+ key: "shadingFill",
+ value: function shadingFill(args) {
+ var width = this.viewport.width;
+ var height = this.viewport.height;
+
+ var inv = _util.Util.inverseTransform(this.transformMatrix);
+
+ var bl = _util.Util.applyTransform([0, 0], inv);
+
+ var br = _util.Util.applyTransform([0, height], inv);
+
+ var ul = _util.Util.applyTransform([width, 0], inv);
+
+ var ur = _util.Util.applyTransform([width, height], inv);
+
+ var x0 = Math.min(bl[0], br[0], ul[0], ur[0]);
+ var y0 = Math.min(bl[1], br[1], ul[1], ur[1]);
+ var x1 = Math.max(bl[0], br[0], ul[0], ur[0]);
+ var y1 = Math.max(bl[1], br[1], ul[1], ur[1]);
+ var rect = this.svgFactory.createElement('svg:rect');
+ rect.setAttributeNS(null, 'x', x0);
+ rect.setAttributeNS(null, 'y', y0);
+ rect.setAttributeNS(null, 'width', x1 - x0);
+ rect.setAttributeNS(null, 'height', y1 - y0);
+ rect.setAttributeNS(null, 'fill', this._makeShadingPattern(args));
+
+ this._ensureTransformGroup().appendChild(rect);
+ }
+ }, {
+ key: "_makeColorN_Pattern",
+ value: function _makeColorN_Pattern(args) {
+ if (args[0] === 'TilingPattern') {
+ return this._makeTilingPattern(args);
+ }
+
+ return this._makeShadingPattern(args);
+ }
+ }, {
+ key: "_makeTilingPattern",
+ value: function _makeTilingPattern(args) {
+ var color = args[1];
+ var operatorList = args[2];
+ var matrix = args[3] || _util.IDENTITY_MATRIX;
+
+ var _args$ = _slicedToArray(args[4], 4),
+ x0 = _args$[0],
+ y0 = _args$[1],
+ x1 = _args$[2],
+ y1 = _args$[3];
+
+ var xstep = args[5];
+ var ystep = args[6];
+ var paintType = args[7];
+ var tilingId = "shading".concat(shadingCount++);
+
+ var _Util$applyTransform = _util.Util.applyTransform([x0, y0], matrix),
+ _Util$applyTransform2 = _slicedToArray(_Util$applyTransform, 2),
+ tx0 = _Util$applyTransform2[0],
+ ty0 = _Util$applyTransform2[1];
+
+ var _Util$applyTransform3 = _util.Util.applyTransform([x1, y1], matrix),
+ _Util$applyTransform4 = _slicedToArray(_Util$applyTransform3, 2),
+ tx1 = _Util$applyTransform4[0],
+ ty1 = _Util$applyTransform4[1];
+
+ var _Util$singularValueDe = _util.Util.singularValueDecompose2dScale(matrix),
+ _Util$singularValueDe2 = _slicedToArray(_Util$singularValueDe, 2),
+ xscale = _Util$singularValueDe2[0],
+ yscale = _Util$singularValueDe2[1];
+
+ var txstep = xstep * xscale;
+ var tystep = ystep * yscale;
+ var tiling = this.svgFactory.createElement('svg:pattern');
+ tiling.setAttributeNS(null, 'id', tilingId);
+ tiling.setAttributeNS(null, 'patternUnits', 'userSpaceOnUse');
+ tiling.setAttributeNS(null, 'width', txstep);
+ tiling.setAttributeNS(null, 'height', tystep);
+ tiling.setAttributeNS(null, 'x', "".concat(tx0));
+ tiling.setAttributeNS(null, 'y', "".concat(ty0));
+ var svg = this.svg;
+ var transformMatrix = this.transformMatrix;
+ var fillColor = this.current.fillColor;
+ var strokeColor = this.current.strokeColor;
+ var bbox = this.svgFactory.create(tx1 - tx0, ty1 - ty0);
+ this.svg = bbox;
+ this.transformMatrix = matrix;
+
+ if (paintType === 2) {
+ var cssColor = _util.Util.makeCssRgb.apply(_util.Util, _toConsumableArray(color));
+
+ this.current.fillColor = cssColor;
+ this.current.strokeColor = cssColor;
+ }
+
+ this.executeOpTree(this.convertOpList(operatorList));
+ this.svg = svg;
+ this.transformMatrix = transformMatrix;
+ this.current.fillColor = fillColor;
+ this.current.strokeColor = strokeColor;
+ tiling.appendChild(bbox.childNodes[0]);
+ this.defs.appendChild(tiling);
+ return "url(#".concat(tilingId, ")");
+ }
+ }, {
+ key: "_makeShadingPattern",
+ value: function _makeShadingPattern(args) {
+ switch (args[0]) {
+ case 'RadialAxial':
+ var shadingId = "shading".concat(shadingCount++);
+ var colorStops = args[2];
+ var gradient;
+
+ switch (args[1]) {
+ case 'axial':
+ var point0 = args[3];
+ var point1 = args[4];
+ gradient = this.svgFactory.createElement('svg:linearGradient');
+ gradient.setAttributeNS(null, 'id', shadingId);
+ gradient.setAttributeNS(null, 'gradientUnits', 'userSpaceOnUse');
+ gradient.setAttributeNS(null, 'x1', point0[0]);
+ gradient.setAttributeNS(null, 'y1', point0[1]);
+ gradient.setAttributeNS(null, 'x2', point1[0]);
+ gradient.setAttributeNS(null, 'y2', point1[1]);
+ break;
+
+ case 'radial':
+ var focalPoint = args[3];
+ var circlePoint = args[4];
+ var focalRadius = args[5];
+ var circleRadius = args[6];
+ gradient = this.svgFactory.createElement('svg:radialGradient');
+ gradient.setAttributeNS(null, 'id', shadingId);
+ gradient.setAttributeNS(null, 'gradientUnits', 'userSpaceOnUse');
+ gradient.setAttributeNS(null, 'cx', circlePoint[0]);
+ gradient.setAttributeNS(null, 'cy', circlePoint[1]);
+ gradient.setAttributeNS(null, 'r', circleRadius);
+ gradient.setAttributeNS(null, 'fx', focalPoint[0]);
+ gradient.setAttributeNS(null, 'fy', focalPoint[1]);
+ gradient.setAttributeNS(null, 'fr', focalRadius);
+ break;
+
+ default:
+ throw new Error("Unknown RadialAxial type: ".concat(args[1]));
+ }
+
+ var _iteratorNormalCompletion5 = true;
+ var _didIteratorError5 = false;
+ var _iteratorError5 = undefined;
+
+ try {
+ for (var _iterator5 = colorStops[Symbol.iterator](), _step5; !(_iteratorNormalCompletion5 = (_step5 = _iterator5.next()).done); _iteratorNormalCompletion5 = true) {
+ var colorStop = _step5.value;
+ var stop = this.svgFactory.createElement('svg:stop');
+ stop.setAttributeNS(null, 'offset', colorStop[0]);
+ stop.setAttributeNS(null, 'stop-color', colorStop[1]);
+ gradient.appendChild(stop);
+ }
+ } catch (err) {
+ _didIteratorError5 = true;
+ _iteratorError5 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion5 && _iterator5["return"] != null) {
+ _iterator5["return"]();
+ }
+ } finally {
+ if (_didIteratorError5) {
+ throw _iteratorError5;
+ }
+ }
+ }
+
+ this.defs.appendChild(gradient);
+ return "url(#".concat(shadingId, ")");
+
+ case 'Mesh':
+ (0, _util.warn)('Unimplemented pattern Mesh');
+ return null;
+
+ case 'Dummy':
+ return 'hotpink';
+
+ default:
+ throw new Error("Unknown IR type: ".concat(args[0]));
+ }
+ }
+ }, {
+ key: "setDash",
+ value: function setDash(dashArray, dashPhase) {
+ this.current.dashArray = dashArray;
+ this.current.dashPhase = dashPhase;
+ }
+ }, {
+ key: "constructPath",
+ value: function constructPath(ops, args) {
+ var current = this.current;
+ var x = current.x,
+ y = current.y;
+ var d = [];
+ var j = 0;
+ var _iteratorNormalCompletion6 = true;
+ var _didIteratorError6 = false;
+ var _iteratorError6 = undefined;
+
+ try {
+ for (var _iterator6 = ops[Symbol.iterator](), _step6; !(_iteratorNormalCompletion6 = (_step6 = _iterator6.next()).done); _iteratorNormalCompletion6 = true) {
+ var op = _step6.value;
+
+ switch (op | 0) {
+ case _util.OPS.rectangle:
+ x = args[j++];
+ y = args[j++];
+ var width = args[j++];
+ var height = args[j++];
+ var xw = x + width;
+ var yh = y + height;
+ d.push('M', pf(x), pf(y), 'L', pf(xw), pf(y), 'L', pf(xw), pf(yh), 'L', pf(x), pf(yh), 'Z');
+ break;
+
+ case _util.OPS.moveTo:
+ x = args[j++];
+ y = args[j++];
+ d.push('M', pf(x), pf(y));
+ break;
+
+ case _util.OPS.lineTo:
+ x = args[j++];
+ y = args[j++];
+ d.push('L', pf(x), pf(y));
+ break;
+
+ case _util.OPS.curveTo:
+ x = args[j + 4];
+ y = args[j + 5];
+ d.push('C', pf(args[j]), pf(args[j + 1]), pf(args[j + 2]), pf(args[j + 3]), pf(x), pf(y));
+ j += 6;
+ break;
+
+ case _util.OPS.curveTo2:
+ x = args[j + 2];
+ y = args[j + 3];
+ d.push('C', pf(x), pf(y), pf(args[j]), pf(args[j + 1]), pf(args[j + 2]), pf(args[j + 3]));
+ j += 4;
+ break;
+
+ case _util.OPS.curveTo3:
+ x = args[j + 2];
+ y = args[j + 3];
+ d.push('C', pf(args[j]), pf(args[j + 1]), pf(x), pf(y), pf(x), pf(y));
+ j += 4;
+ break;
+
+ case _util.OPS.closePath:
+ d.push('Z');
+ break;
+ }
+ }
+ } catch (err) {
+ _didIteratorError6 = true;
+ _iteratorError6 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion6 && _iterator6["return"] != null) {
+ _iterator6["return"]();
+ }
+ } finally {
+ if (_didIteratorError6) {
+ throw _iteratorError6;
+ }
+ }
+ }
+
+ d = d.join(' ');
+
+ if (current.path && ops.length > 0 && ops[0] !== _util.OPS.rectangle && ops[0] !== _util.OPS.moveTo) {
+ d = current.path.getAttributeNS(null, 'd') + d;
+ } else {
+ current.path = this.svgFactory.createElement('svg:path');
+
+ this._ensureTransformGroup().appendChild(current.path);
+ }
+
+ current.path.setAttributeNS(null, 'd', d);
+ current.path.setAttributeNS(null, 'fill', 'none');
+ current.element = current.path;
+ current.setCurrentPoint(x, y);
+ }
+ }, {
+ key: "endPath",
+ value: function endPath() {
+ var current = this.current;
+ current.path = null;
+
+ if (!this.pendingClip) {
+ return;
+ }
+
+ if (!current.element) {
+ this.pendingClip = null;
+ return;
+ }
+
+ var clipId = "clippath".concat(clipCount++);
+ var clipPath = this.svgFactory.createElement('svg:clipPath');
+ clipPath.setAttributeNS(null, 'id', clipId);
+ clipPath.setAttributeNS(null, 'transform', pm(this.transformMatrix));
+ var clipElement = current.element.cloneNode(true);
+
+ if (this.pendingClip === 'evenodd') {
+ clipElement.setAttributeNS(null, 'clip-rule', 'evenodd');
+ } else {
+ clipElement.setAttributeNS(null, 'clip-rule', 'nonzero');
+ }
+
+ this.pendingClip = null;
+ clipPath.appendChild(clipElement);
+ this.defs.appendChild(clipPath);
+
+ if (current.activeClipUrl) {
+ current.clipGroup = null;
+ this.extraStack.forEach(function (prev) {
+ prev.clipGroup = null;
+ });
+ clipPath.setAttributeNS(null, 'clip-path', current.activeClipUrl);
+ }
+
+ current.activeClipUrl = "url(#".concat(clipId, ")");
+ this.tgrp = null;
+ }
+ }, {
+ key: "clip",
+ value: function clip(type) {
+ this.pendingClip = type;
+ }
+ }, {
+ key: "closePath",
+ value: function closePath() {
+ var current = this.current;
+
+ if (current.path) {
+ var d = "".concat(current.path.getAttributeNS(null, 'd'), "Z");
+ current.path.setAttributeNS(null, 'd', d);
+ }
+ }
+ }, {
+ key: "setLeading",
+ value: function setLeading(leading) {
+ this.current.leading = -leading;
+ }
+ }, {
+ key: "setTextRise",
+ value: function setTextRise(textRise) {
+ this.current.textRise = textRise;
+ }
+ }, {
+ key: "setTextRenderingMode",
+ value: function setTextRenderingMode(textRenderingMode) {
+ this.current.textRenderingMode = textRenderingMode;
+ }
+ }, {
+ key: "setHScale",
+ value: function setHScale(scale) {
+ this.current.textHScale = scale / 100;
+ }
+ }, {
+ key: "setRenderingIntent",
+ value: function setRenderingIntent(intent) {}
+ }, {
+ key: "setFlatness",
+ value: function setFlatness(flatness) {}
+ }, {
+ key: "setGState",
+ value: function setGState(states) {
+ var _iteratorNormalCompletion7 = true;
+ var _didIteratorError7 = false;
+ var _iteratorError7 = undefined;
+
+ try {
+ for (var _iterator7 = states[Symbol.iterator](), _step7; !(_iteratorNormalCompletion7 = (_step7 = _iterator7.next()).done); _iteratorNormalCompletion7 = true) {
+ var _step7$value = _slicedToArray(_step7.value, 2),
+ key = _step7$value[0],
+ value = _step7$value[1];
+
+ switch (key) {
+ case 'LW':
+ this.setLineWidth(value);
+ break;
+
+ case 'LC':
+ this.setLineCap(value);
+ break;
+
+ case 'LJ':
+ this.setLineJoin(value);
+ break;
+
+ case 'ML':
+ this.setMiterLimit(value);
+ break;
+
+ case 'D':
+ this.setDash(value[0], value[1]);
+ break;
+
+ case 'RI':
+ this.setRenderingIntent(value);
+ break;
+
+ case 'FL':
+ this.setFlatness(value);
+ break;
+
+ case 'Font':
+ this.setFont(value);
+ break;
+
+ case 'CA':
+ this.setStrokeAlpha(value);
+ break;
+
+ case 'ca':
+ this.setFillAlpha(value);
+ break;
+
+ default:
+ (0, _util.warn)("Unimplemented graphic state operator ".concat(key));
+ break;
+ }
+ }
+ } catch (err) {
+ _didIteratorError7 = true;
+ _iteratorError7 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion7 && _iterator7["return"] != null) {
+ _iterator7["return"]();
+ }
+ } finally {
+ if (_didIteratorError7) {
+ throw _iteratorError7;
+ }
+ }
+ }
+ }
+ }, {
+ key: "fill",
+ value: function fill() {
+ var current = this.current;
+
+ if (current.element) {
+ current.element.setAttributeNS(null, 'fill', current.fillColor);
+ current.element.setAttributeNS(null, 'fill-opacity', current.fillAlpha);
+ this.endPath();
+ }
+ }
+ }, {
+ key: "stroke",
+ value: function stroke() {
+ var current = this.current;
+
+ if (current.element) {
+ this._setStrokeAttributes(current.element);
+
+ current.element.setAttributeNS(null, 'fill', 'none');
+ this.endPath();
+ }
+ }
+ }, {
+ key: "_setStrokeAttributes",
+ value: function _setStrokeAttributes(element) {
+ var lineWidthScale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1;
+ var current = this.current;
+ var dashArray = current.dashArray;
+
+ if (lineWidthScale !== 1 && dashArray.length > 0) {
+ dashArray = dashArray.map(function (value) {
+ return lineWidthScale * value;
+ });
+ }
+
+ element.setAttributeNS(null, 'stroke', current.strokeColor);
+ element.setAttributeNS(null, 'stroke-opacity', current.strokeAlpha);
+ element.setAttributeNS(null, 'stroke-miterlimit', pf(current.miterLimit));
+ element.setAttributeNS(null, 'stroke-linecap', current.lineCap);
+ element.setAttributeNS(null, 'stroke-linejoin', current.lineJoin);
+ element.setAttributeNS(null, 'stroke-width', pf(lineWidthScale * current.lineWidth) + 'px');
+ element.setAttributeNS(null, 'stroke-dasharray', dashArray.map(pf).join(' '));
+ element.setAttributeNS(null, 'stroke-dashoffset', pf(lineWidthScale * current.dashPhase) + 'px');
+ }
+ }, {
+ key: "eoFill",
+ value: function eoFill() {
+ if (this.current.element) {
+ this.current.element.setAttributeNS(null, 'fill-rule', 'evenodd');
+ }
+
+ this.fill();
+ }
+ }, {
+ key: "fillStroke",
+ value: function fillStroke() {
+ this.stroke();
+ this.fill();
+ }
+ }, {
+ key: "eoFillStroke",
+ value: function eoFillStroke() {
+ if (this.current.element) {
+ this.current.element.setAttributeNS(null, 'fill-rule', 'evenodd');
+ }
+
+ this.fillStroke();
+ }
+ }, {
+ key: "closeStroke",
+ value: function closeStroke() {
+ this.closePath();
+ this.stroke();
+ }
+ }, {
+ key: "closeFillStroke",
+ value: function closeFillStroke() {
+ this.closePath();
+ this.fillStroke();
+ }
+ }, {
+ key: "closeEOFillStroke",
+ value: function closeEOFillStroke() {
+ this.closePath();
+ this.eoFillStroke();
+ }
+ }, {
+ key: "paintSolidColorImageMask",
+ value: function paintSolidColorImageMask() {
+ var rect = this.svgFactory.createElement('svg:rect');
+ rect.setAttributeNS(null, 'x', '0');
+ rect.setAttributeNS(null, 'y', '0');
+ rect.setAttributeNS(null, 'width', '1px');
+ rect.setAttributeNS(null, 'height', '1px');
+ rect.setAttributeNS(null, 'fill', this.current.fillColor);
+
+ this._ensureTransformGroup().appendChild(rect);
+ }
+ }, {
+ key: "paintJpegXObject",
+ value: function paintJpegXObject(objId, w, h) {
+ var imgObj = this.objs.get(objId);
+ var imgEl = this.svgFactory.createElement('svg:image');
+ imgEl.setAttributeNS(XLINK_NS, 'xlink:href', imgObj.src);
+ imgEl.setAttributeNS(null, 'width', pf(w));
+ imgEl.setAttributeNS(null, 'height', pf(h));
+ imgEl.setAttributeNS(null, 'x', '0');
+ imgEl.setAttributeNS(null, 'y', pf(-h));
+ imgEl.setAttributeNS(null, 'transform', "scale(".concat(pf(1 / w), " ").concat(pf(-1 / h), ")"));
+
+ this._ensureTransformGroup().appendChild(imgEl);
+ }
+ }, {
+ key: "paintImageXObject",
+ value: function paintImageXObject(objId) {
+ var imgData = this.objs.get(objId);
+
+ if (!imgData) {
+ (0, _util.warn)("Dependent image with object ID ".concat(objId, " is not ready yet"));
+ return;
+ }
+
+ this.paintInlineImageXObject(imgData);
+ }
+ }, {
+ key: "paintInlineImageXObject",
+ value: function paintInlineImageXObject(imgData, mask) {
+ var width = imgData.width;
+ var height = imgData.height;
+ var imgSrc = convertImgDataToPng(imgData, this.forceDataSchema, !!mask);
+ var cliprect = this.svgFactory.createElement('svg:rect');
+ cliprect.setAttributeNS(null, 'x', '0');
+ cliprect.setAttributeNS(null, 'y', '0');
+ cliprect.setAttributeNS(null, 'width', pf(width));
+ cliprect.setAttributeNS(null, 'height', pf(height));
+ this.current.element = cliprect;
+ this.clip('nonzero');
+ var imgEl = this.svgFactory.createElement('svg:image');
+ imgEl.setAttributeNS(XLINK_NS, 'xlink:href', imgSrc);
+ imgEl.setAttributeNS(null, 'x', '0');
+ imgEl.setAttributeNS(null, 'y', pf(-height));
+ imgEl.setAttributeNS(null, 'width', pf(width) + 'px');
+ imgEl.setAttributeNS(null, 'height', pf(height) + 'px');
+ imgEl.setAttributeNS(null, 'transform', "scale(".concat(pf(1 / width), " ").concat(pf(-1 / height), ")"));
+
+ if (mask) {
+ mask.appendChild(imgEl);
+ } else {
+ this._ensureTransformGroup().appendChild(imgEl);
+ }
+ }
+ }, {
+ key: "paintImageMaskXObject",
+ value: function paintImageMaskXObject(imgData) {
+ var current = this.current;
+ var width = imgData.width;
+ var height = imgData.height;
+ var fillColor = current.fillColor;
+ current.maskId = "mask".concat(maskCount++);
+ var mask = this.svgFactory.createElement('svg:mask');
+ mask.setAttributeNS(null, 'id', current.maskId);
+ var rect = this.svgFactory.createElement('svg:rect');
+ rect.setAttributeNS(null, 'x', '0');
+ rect.setAttributeNS(null, 'y', '0');
+ rect.setAttributeNS(null, 'width', pf(width));
+ rect.setAttributeNS(null, 'height', pf(height));
+ rect.setAttributeNS(null, 'fill', fillColor);
+ rect.setAttributeNS(null, 'mask', "url(#".concat(current.maskId, ")"));
+ this.defs.appendChild(mask);
+
+ this._ensureTransformGroup().appendChild(rect);
+
+ this.paintInlineImageXObject(imgData, mask);
+ }
+ }, {
+ key: "paintFormXObjectBegin",
+ value: function paintFormXObjectBegin(matrix, bbox) {
+ if (Array.isArray(matrix) && matrix.length === 6) {
+ this.transform(matrix[0], matrix[1], matrix[2], matrix[3], matrix[4], matrix[5]);
+ }
+
+ if (bbox) {
+ var width = bbox[2] - bbox[0];
+ var height = bbox[3] - bbox[1];
+ var cliprect = this.svgFactory.createElement('svg:rect');
+ cliprect.setAttributeNS(null, 'x', bbox[0]);
+ cliprect.setAttributeNS(null, 'y', bbox[1]);
+ cliprect.setAttributeNS(null, 'width', pf(width));
+ cliprect.setAttributeNS(null, 'height', pf(height));
+ this.current.element = cliprect;
+ this.clip('nonzero');
+ this.endPath();
+ }
+ }
+ }, {
+ key: "paintFormXObjectEnd",
+ value: function paintFormXObjectEnd() {}
+ }, {
+ key: "_initialize",
+ value: function _initialize(viewport) {
+ var svg = this.svgFactory.create(viewport.width, viewport.height);
+ var definitions = this.svgFactory.createElement('svg:defs');
+ svg.appendChild(definitions);
+ this.defs = definitions;
+ var rootGroup = this.svgFactory.createElement('svg:g');
+ rootGroup.setAttributeNS(null, 'transform', pm(viewport.transform));
+ svg.appendChild(rootGroup);
+ this.svg = rootGroup;
+ return svg;
+ }
+ }, {
+ key: "_ensureClipGroup",
+ value: function _ensureClipGroup() {
+ if (!this.current.clipGroup) {
+ var clipGroup = this.svgFactory.createElement('svg:g');
+ clipGroup.setAttributeNS(null, 'clip-path', this.current.activeClipUrl);
+ this.svg.appendChild(clipGroup);
+ this.current.clipGroup = clipGroup;
+ }
+
+ return this.current.clipGroup;
+ }
+ }, {
+ key: "_ensureTransformGroup",
+ value: function _ensureTransformGroup() {
+ if (!this.tgrp) {
+ this.tgrp = this.svgFactory.createElement('svg:g');
+ this.tgrp.setAttributeNS(null, 'transform', pm(this.transformMatrix));
+
+ if (this.current.activeClipUrl) {
+ this._ensureClipGroup().appendChild(this.tgrp);
+ } else {
+ this.svg.appendChild(this.tgrp);
+ }
+ }
+
+ return this.tgrp;
+ }
+ }]);
+
+ return SVGGraphics;
+ }();
+}
+
+/***/ }),
+/* 165 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.PDFNodeStream = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(148));
+
+var _util = __w_pdfjs_require__(1);
+
+var _network_utils = __w_pdfjs_require__(166);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var fs = require('fs');
+
+var http = require('http');
+
+var https = require('https');
+
+var url = require('url');
+
+var fileUriRegex = /^file:\/\/\/[a-zA-Z]:\//;
+
+function parseUrl(sourceUrl) {
+ var parsedUrl = url.parse(sourceUrl);
+
+ if (parsedUrl.protocol === 'file:' || parsedUrl.host) {
+ return parsedUrl;
+ }
+
+ if (/^[a-z]:[/\\]/i.test(sourceUrl)) {
+ return url.parse("file:///".concat(sourceUrl));
+ }
+
+ if (!parsedUrl.host) {
+ parsedUrl.protocol = 'file:';
+ }
+
+ return parsedUrl;
+}
+
+var PDFNodeStream =
+/*#__PURE__*/
+function () {
+ function PDFNodeStream(source) {
+ _classCallCheck(this, PDFNodeStream);
+
+ this.source = source;
+ this.url = parseUrl(source.url);
+ this.isHttp = this.url.protocol === 'http:' || this.url.protocol === 'https:';
+ this.isFsUrl = this.url.protocol === 'file:';
+ this.httpHeaders = this.isHttp && source.httpHeaders || {};
+ this._fullRequestReader = null;
+ this._rangeRequestReaders = [];
+ }
+
+ _createClass(PDFNodeStream, [{
+ key: "getFullReader",
+ value: function getFullReader() {
+ (0, _util.assert)(!this._fullRequestReader);
+ this._fullRequestReader = this.isFsUrl ? new PDFNodeStreamFsFullReader(this) : new PDFNodeStreamFullReader(this);
+ return this._fullRequestReader;
+ }
+ }, {
+ key: "getRangeReader",
+ value: function getRangeReader(start, end) {
+ if (end <= this._progressiveDataLength) {
+ return null;
+ }
+
+ var rangeReader = this.isFsUrl ? new PDFNodeStreamFsRangeReader(this, start, end) : new PDFNodeStreamRangeReader(this, start, end);
+
+ this._rangeRequestReaders.push(rangeReader);
+
+ return rangeReader;
+ }
+ }, {
+ key: "cancelAllRequests",
+ value: function cancelAllRequests(reason) {
+ if (this._fullRequestReader) {
+ this._fullRequestReader.cancel(reason);
+ }
+
+ var readers = this._rangeRequestReaders.slice(0);
+
+ readers.forEach(function (reader) {
+ reader.cancel(reason);
+ });
+ }
+ }, {
+ key: "_progressiveDataLength",
+ get: function get() {
+ return this._fullRequestReader ? this._fullRequestReader._loaded : 0;
+ }
+ }]);
+
+ return PDFNodeStream;
+}();
+
+exports.PDFNodeStream = PDFNodeStream;
+
+var BaseFullReader =
+/*#__PURE__*/
+function () {
+ function BaseFullReader(stream) {
+ _classCallCheck(this, BaseFullReader);
+
+ this._url = stream.url;
+ this._done = false;
+ this._storedError = null;
+ this.onProgress = null;
+ var source = stream.source;
+ this._contentLength = source.length;
+ this._loaded = 0;
+ this._filename = null;
+ this._disableRange = source.disableRange || false;
+ this._rangeChunkSize = source.rangeChunkSize;
+
+ if (!this._rangeChunkSize && !this._disableRange) {
+ this._disableRange = true;
+ }
+
+ this._isStreamingSupported = !source.disableStream;
+ this._isRangeSupported = !source.disableRange;
+ this._readableStream = null;
+ this._readCapability = (0, _util.createPromiseCapability)();
+ this._headersCapability = (0, _util.createPromiseCapability)();
+ }
+
+ _createClass(BaseFullReader, [{
+ key: "read",
+ value: function () {
+ var _read = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee() {
+ var chunk, buffer;
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ _context.next = 2;
+ return this._readCapability.promise;
+
+ case 2:
+ if (!this._done) {
+ _context.next = 4;
+ break;
+ }
+
+ return _context.abrupt("return", {
+ value: undefined,
+ done: true
+ });
+
+ case 4:
+ if (!this._storedError) {
+ _context.next = 6;
+ break;
+ }
+
+ throw this._storedError;
+
+ case 6:
+ chunk = this._readableStream.read();
+
+ if (!(chunk === null)) {
+ _context.next = 10;
+ break;
+ }
+
+ this._readCapability = (0, _util.createPromiseCapability)();
+ return _context.abrupt("return", this.read());
+
+ case 10:
+ this._loaded += chunk.length;
+
+ if (this.onProgress) {
+ this.onProgress({
+ loaded: this._loaded,
+ total: this._contentLength
+ });
+ }
+
+ buffer = new Uint8Array(chunk).buffer;
+ return _context.abrupt("return", {
+ value: buffer,
+ done: false
+ });
+
+ case 14:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee, this);
+ }));
+
+ function read() {
+ return _read.apply(this, arguments);
+ }
+
+ return read;
+ }()
+ }, {
+ key: "cancel",
+ value: function cancel(reason) {
+ if (!this._readableStream) {
+ this._error(reason);
+
+ return;
+ }
+
+ this._readableStream.destroy(reason);
+ }
+ }, {
+ key: "_error",
+ value: function _error(reason) {
+ this._storedError = reason;
+
+ this._readCapability.resolve();
+ }
+ }, {
+ key: "_setReadableStream",
+ value: function _setReadableStream(readableStream) {
+ var _this = this;
+
+ this._readableStream = readableStream;
+ readableStream.on('readable', function () {
+ _this._readCapability.resolve();
+ });
+ readableStream.on('end', function () {
+ readableStream.destroy();
+ _this._done = true;
+
+ _this._readCapability.resolve();
+ });
+ readableStream.on('error', function (reason) {
+ _this._error(reason);
+ });
+
+ if (!this._isStreamingSupported && this._isRangeSupported) {
+ this._error(new _util.AbortException('streaming is disabled'));
+ }
+
+ if (this._storedError) {
+ this._readableStream.destroy(this._storedError);
+ }
+ }
+ }, {
+ key: "headersReady",
+ get: function get() {
+ return this._headersCapability.promise;
+ }
+ }, {
+ key: "filename",
+ get: function get() {
+ return this._filename;
+ }
+ }, {
+ key: "contentLength",
+ get: function get() {
+ return this._contentLength;
+ }
+ }, {
+ key: "isRangeSupported",
+ get: function get() {
+ return this._isRangeSupported;
+ }
+ }, {
+ key: "isStreamingSupported",
+ get: function get() {
+ return this._isStreamingSupported;
+ }
+ }]);
+
+ return BaseFullReader;
+}();
+
+var BaseRangeReader =
+/*#__PURE__*/
+function () {
+ function BaseRangeReader(stream) {
+ _classCallCheck(this, BaseRangeReader);
+
+ this._url = stream.url;
+ this._done = false;
+ this._storedError = null;
+ this.onProgress = null;
+ this._loaded = 0;
+ this._readableStream = null;
+ this._readCapability = (0, _util.createPromiseCapability)();
+ var source = stream.source;
+ this._isStreamingSupported = !source.disableStream;
+ }
+
+ _createClass(BaseRangeReader, [{
+ key: "read",
+ value: function () {
+ var _read2 = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee2() {
+ var chunk, buffer;
+ return _regenerator["default"].wrap(function _callee2$(_context2) {
+ while (1) {
+ switch (_context2.prev = _context2.next) {
+ case 0:
+ _context2.next = 2;
+ return this._readCapability.promise;
+
+ case 2:
+ if (!this._done) {
+ _context2.next = 4;
+ break;
+ }
+
+ return _context2.abrupt("return", {
+ value: undefined,
+ done: true
+ });
+
+ case 4:
+ if (!this._storedError) {
+ _context2.next = 6;
+ break;
+ }
+
+ throw this._storedError;
+
+ case 6:
+ chunk = this._readableStream.read();
+
+ if (!(chunk === null)) {
+ _context2.next = 10;
+ break;
+ }
+
+ this._readCapability = (0, _util.createPromiseCapability)();
+ return _context2.abrupt("return", this.read());
+
+ case 10:
+ this._loaded += chunk.length;
+
+ if (this.onProgress) {
+ this.onProgress({
+ loaded: this._loaded
+ });
+ }
+
+ buffer = new Uint8Array(chunk).buffer;
+ return _context2.abrupt("return", {
+ value: buffer,
+ done: false
+ });
+
+ case 14:
+ case "end":
+ return _context2.stop();
+ }
+ }
+ }, _callee2, this);
+ }));
+
+ function read() {
+ return _read2.apply(this, arguments);
+ }
+
+ return read;
+ }()
+ }, {
+ key: "cancel",
+ value: function cancel(reason) {
+ if (!this._readableStream) {
+ this._error(reason);
+
+ return;
+ }
+
+ this._readableStream.destroy(reason);
+ }
+ }, {
+ key: "_error",
+ value: function _error(reason) {
+ this._storedError = reason;
+
+ this._readCapability.resolve();
+ }
+ }, {
+ key: "_setReadableStream",
+ value: function _setReadableStream(readableStream) {
+ var _this2 = this;
+
+ this._readableStream = readableStream;
+ readableStream.on('readable', function () {
+ _this2._readCapability.resolve();
+ });
+ readableStream.on('end', function () {
+ readableStream.destroy();
+ _this2._done = true;
+
+ _this2._readCapability.resolve();
+ });
+ readableStream.on('error', function (reason) {
+ _this2._error(reason);
+ });
+
+ if (this._storedError) {
+ this._readableStream.destroy(this._storedError);
+ }
+ }
+ }, {
+ key: "isStreamingSupported",
+ get: function get() {
+ return this._isStreamingSupported;
+ }
+ }]);
+
+ return BaseRangeReader;
+}();
+
+function createRequestOptions(url, headers) {
+ return {
+ protocol: url.protocol,
+ auth: url.auth,
+ host: url.hostname,
+ port: url.port,
+ path: url.path,
+ method: 'GET',
+ headers: headers
+ };
+}
+
+var PDFNodeStreamFullReader =
+/*#__PURE__*/
+function (_BaseFullReader) {
+ _inherits(PDFNodeStreamFullReader, _BaseFullReader);
+
+ function PDFNodeStreamFullReader(stream) {
+ var _this3;
+
+ _classCallCheck(this, PDFNodeStreamFullReader);
+
+ _this3 = _possibleConstructorReturn(this, _getPrototypeOf(PDFNodeStreamFullReader).call(this, stream));
+
+ var handleResponse = function handleResponse(response) {
+ if (response.statusCode === 404) {
+ var error = new _util.MissingPDFException("Missing PDF \"".concat(_this3._url, "\"."));
+ _this3._storedError = error;
+
+ _this3._headersCapability.reject(error);
+
+ return;
+ }
+
+ _this3._headersCapability.resolve();
+
+ _this3._setReadableStream(response);
+
+ var getResponseHeader = function getResponseHeader(name) {
+ return _this3._readableStream.headers[name.toLowerCase()];
+ };
+
+ var _validateRangeRequest = (0, _network_utils.validateRangeRequestCapabilities)({
+ getResponseHeader: getResponseHeader,
+ isHttp: stream.isHttp,
+ rangeChunkSize: _this3._rangeChunkSize,
+ disableRange: _this3._disableRange
+ }),
+ allowRangeRequests = _validateRangeRequest.allowRangeRequests,
+ suggestedLength = _validateRangeRequest.suggestedLength;
+
+ _this3._isRangeSupported = allowRangeRequests;
+ _this3._contentLength = suggestedLength || _this3._contentLength;
+ _this3._filename = (0, _network_utils.extractFilenameFromHeader)(getResponseHeader);
+ };
+
+ _this3._request = null;
+
+ if (_this3._url.protocol === 'http:') {
+ _this3._request = http.request(createRequestOptions(_this3._url, stream.httpHeaders), handleResponse);
+ } else {
+ _this3._request = https.request(createRequestOptions(_this3._url, stream.httpHeaders), handleResponse);
+ }
+
+ _this3._request.on('error', function (reason) {
+ _this3._storedError = reason;
+
+ _this3._headersCapability.reject(reason);
+ });
+
+ _this3._request.end();
+
+ return _this3;
+ }
+
+ return PDFNodeStreamFullReader;
+}(BaseFullReader);
+
+var PDFNodeStreamRangeReader =
+/*#__PURE__*/
+function (_BaseRangeReader) {
+ _inherits(PDFNodeStreamRangeReader, _BaseRangeReader);
+
+ function PDFNodeStreamRangeReader(stream, start, end) {
+ var _this4;
+
+ _classCallCheck(this, PDFNodeStreamRangeReader);
+
+ _this4 = _possibleConstructorReturn(this, _getPrototypeOf(PDFNodeStreamRangeReader).call(this, stream));
+ _this4._httpHeaders = {};
+
+ for (var property in stream.httpHeaders) {
+ var value = stream.httpHeaders[property];
+
+ if (typeof value === 'undefined') {
+ continue;
+ }
+
+ _this4._httpHeaders[property] = value;
+ }
+
+ _this4._httpHeaders['Range'] = "bytes=".concat(start, "-").concat(end - 1);
+
+ var handleResponse = function handleResponse(response) {
+ if (response.statusCode === 404) {
+ var error = new _util.MissingPDFException("Missing PDF \"".concat(_this4._url, "\"."));
+ _this4._storedError = error;
+ return;
+ }
+
+ _this4._setReadableStream(response);
+ };
+
+ _this4._request = null;
+
+ if (_this4._url.protocol === 'http:') {
+ _this4._request = http.request(createRequestOptions(_this4._url, _this4._httpHeaders), handleResponse);
+ } else {
+ _this4._request = https.request(createRequestOptions(_this4._url, _this4._httpHeaders), handleResponse);
+ }
+
+ _this4._request.on('error', function (reason) {
+ _this4._storedError = reason;
+ });
+
+ _this4._request.end();
+
+ return _this4;
+ }
+
+ return PDFNodeStreamRangeReader;
+}(BaseRangeReader);
+
+var PDFNodeStreamFsFullReader =
+/*#__PURE__*/
+function (_BaseFullReader2) {
+ _inherits(PDFNodeStreamFsFullReader, _BaseFullReader2);
+
+ function PDFNodeStreamFsFullReader(stream) {
+ var _this5;
+
+ _classCallCheck(this, PDFNodeStreamFsFullReader);
+
+ _this5 = _possibleConstructorReturn(this, _getPrototypeOf(PDFNodeStreamFsFullReader).call(this, stream));
+ var path = decodeURIComponent(_this5._url.path);
+
+ if (fileUriRegex.test(_this5._url.href)) {
+ path = path.replace(/^\//, '');
+ }
+
+ fs.lstat(path, function (error, stat) {
+ if (error) {
+ if (error.code === 'ENOENT') {
+ error = new _util.MissingPDFException("Missing PDF \"".concat(path, "\"."));
+ }
+
+ _this5._storedError = error;
+
+ _this5._headersCapability.reject(error);
+
+ return;
+ }
+
+ _this5._contentLength = stat.size;
+
+ _this5._setReadableStream(fs.createReadStream(path));
+
+ _this5._headersCapability.resolve();
+ });
+ return _this5;
+ }
+
+ return PDFNodeStreamFsFullReader;
+}(BaseFullReader);
+
+var PDFNodeStreamFsRangeReader =
+/*#__PURE__*/
+function (_BaseRangeReader2) {
+ _inherits(PDFNodeStreamFsRangeReader, _BaseRangeReader2);
+
+ function PDFNodeStreamFsRangeReader(stream, start, end) {
+ var _this6;
+
+ _classCallCheck(this, PDFNodeStreamFsRangeReader);
+
+ _this6 = _possibleConstructorReturn(this, _getPrototypeOf(PDFNodeStreamFsRangeReader).call(this, stream));
+ var path = decodeURIComponent(_this6._url.path);
+
+ if (fileUriRegex.test(_this6._url.href)) {
+ path = path.replace(/^\//, '');
+ }
+
+ _this6._setReadableStream(fs.createReadStream(path, {
+ start: start,
+ end: end - 1
+ }));
+
+ return _this6;
+ }
+
+ return PDFNodeStreamFsRangeReader;
+}(BaseRangeReader);
+
+/***/ }),
+/* 166 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.createResponseStatusError = createResponseStatusError;
+exports.extractFilenameFromHeader = extractFilenameFromHeader;
+exports.validateRangeRequestCapabilities = validateRangeRequestCapabilities;
+exports.validateResponseStatus = validateResponseStatus;
+
+var _util = __w_pdfjs_require__(1);
+
+var _content_disposition = __w_pdfjs_require__(167);
+
+function validateRangeRequestCapabilities(_ref) {
+ var getResponseHeader = _ref.getResponseHeader,
+ isHttp = _ref.isHttp,
+ rangeChunkSize = _ref.rangeChunkSize,
+ disableRange = _ref.disableRange;
+ (0, _util.assert)(rangeChunkSize > 0, 'Range chunk size must be larger than zero');
+ var returnValues = {
+ allowRangeRequests: false,
+ suggestedLength: undefined
+ };
+ var length = parseInt(getResponseHeader('Content-Length'), 10);
+
+ if (!Number.isInteger(length)) {
+ return returnValues;
+ }
+
+ returnValues.suggestedLength = length;
+
+ if (length <= 2 * rangeChunkSize) {
+ return returnValues;
+ }
+
+ if (disableRange || !isHttp) {
+ return returnValues;
+ }
+
+ if (getResponseHeader('Accept-Ranges') !== 'bytes') {
+ return returnValues;
+ }
+
+ var contentEncoding = getResponseHeader('Content-Encoding') || 'identity';
+
+ if (contentEncoding !== 'identity') {
+ return returnValues;
+ }
+
+ returnValues.allowRangeRequests = true;
+ return returnValues;
+}
+
+function extractFilenameFromHeader(getResponseHeader) {
+ var contentDisposition = getResponseHeader('Content-Disposition');
+
+ if (contentDisposition) {
+ var filename = (0, _content_disposition.getFilenameFromContentDispositionHeader)(contentDisposition);
+
+ if (/\.pdf$/i.test(filename)) {
+ return filename;
+ }
+ }
+
+ return null;
+}
+
+function createResponseStatusError(status, url) {
+ if (status === 404 || status === 0 && /^file:/.test(url)) {
+ return new _util.MissingPDFException('Missing PDF "' + url + '".');
+ }
+
+ return new _util.UnexpectedResponseException('Unexpected server response (' + status + ') while retrieving PDF "' + url + '".', status);
+}
+
+function validateResponseStatus(status) {
+ return status === 200 || status === 206;
+}
+
+/***/ }),
+/* 167 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.getFilenameFromContentDispositionHeader = getFilenameFromContentDispositionHeader;
+
+function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
+
+function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
+
+function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
+
+function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
+
+function getFilenameFromContentDispositionHeader(contentDisposition) {
+ var needsEncodingFixup = true;
+ var tmp = toParamRegExp('filename\\*', 'i').exec(contentDisposition);
+
+ if (tmp) {
+ tmp = tmp[1];
+ var filename = rfc2616unquote(tmp);
+ filename = unescape(filename);
+ filename = rfc5987decode(filename);
+ filename = rfc2047decode(filename);
+ return fixupEncoding(filename);
+ }
+
+ tmp = rfc2231getparam(contentDisposition);
+
+ if (tmp) {
+ var _filename = rfc2047decode(tmp);
+
+ return fixupEncoding(_filename);
+ }
+
+ tmp = toParamRegExp('filename', 'i').exec(contentDisposition);
+
+ if (tmp) {
+ tmp = tmp[1];
+
+ var _filename2 = rfc2616unquote(tmp);
+
+ _filename2 = rfc2047decode(_filename2);
+ return fixupEncoding(_filename2);
+ }
+
+ function toParamRegExp(attributePattern, flags) {
+ return new RegExp('(?:^|;)\\s*' + attributePattern + '\\s*=\\s*' + '(' + '[^";\\s][^;\\s]*' + '|' + '"(?:[^"\\\\]|\\\\"?)+"?' + ')', flags);
+ }
+
+ function textdecode(encoding, value) {
+ if (encoding) {
+ if (!/^[\x00-\xFF]+$/.test(value)) {
+ return value;
+ }
+
+ try {
+ var decoder = new TextDecoder(encoding, {
+ fatal: true
+ });
+ var bytes = Array.from(value, function (ch) {
+ return ch.charCodeAt(0) & 0xFF;
+ });
+ value = decoder.decode(new Uint8Array(bytes));
+ needsEncodingFixup = false;
+ } catch (e) {
+ if (/^utf-?8$/i.test(encoding)) {
+ try {
+ value = decodeURIComponent(escape(value));
+ needsEncodingFixup = false;
+ } catch (err) {}
+ }
+ }
+ }
+
+ return value;
+ }
+
+ function fixupEncoding(value) {
+ if (needsEncodingFixup && /[\x80-\xff]/.test(value)) {
+ value = textdecode('utf-8', value);
+
+ if (needsEncodingFixup) {
+ value = textdecode('iso-8859-1', value);
+ }
+ }
+
+ return value;
+ }
+
+ function rfc2231getparam(contentDisposition) {
+ var matches = [],
+ match;
+ var iter = toParamRegExp('filename\\*((?!0\\d)\\d+)(\\*?)', 'ig');
+
+ while ((match = iter.exec(contentDisposition)) !== null) {
+ var _match = match,
+ _match2 = _slicedToArray(_match, 4),
+ n = _match2[1],
+ quot = _match2[2],
+ part = _match2[3];
+
+ n = parseInt(n, 10);
+
+ if (n in matches) {
+ if (n === 0) {
+ break;
+ }
+
+ continue;
+ }
+
+ matches[n] = [quot, part];
+ }
+
+ var parts = [];
+
+ for (var n = 0; n < matches.length; ++n) {
+ if (!(n in matches)) {
+ break;
+ }
+
+ var _matches$n = _slicedToArray(matches[n], 2),
+ quot = _matches$n[0],
+ part = _matches$n[1];
+
+ part = rfc2616unquote(part);
+
+ if (quot) {
+ part = unescape(part);
+
+ if (n === 0) {
+ part = rfc5987decode(part);
+ }
+ }
+
+ parts.push(part);
+ }
+
+ return parts.join('');
+ }
+
+ function rfc2616unquote(value) {
+ if (value.startsWith('"')) {
+ var parts = value.slice(1).split('\\"');
+
+ for (var i = 0; i < parts.length; ++i) {
+ var quotindex = parts[i].indexOf('"');
+
+ if (quotindex !== -1) {
+ parts[i] = parts[i].slice(0, quotindex);
+ parts.length = i + 1;
+ }
+
+ parts[i] = parts[i].replace(/\\(.)/g, '$1');
+ }
+
+ value = parts.join('"');
+ }
+
+ return value;
+ }
+
+ function rfc5987decode(extvalue) {
+ var encodingend = extvalue.indexOf('\'');
+
+ if (encodingend === -1) {
+ return extvalue;
+ }
+
+ var encoding = extvalue.slice(0, encodingend);
+ var langvalue = extvalue.slice(encodingend + 1);
+ var value = langvalue.replace(/^[^']*'/, '');
+ return textdecode(encoding, value);
+ }
+
+ function rfc2047decode(value) {
+ if (!value.startsWith('=?') || /[\x00-\x19\x80-\xff]/.test(value)) {
+ return value;
+ }
+
+ return value.replace(/=\?([\w-]*)\?([QqBb])\?((?:[^?]|\?(?!=))*)\?=/g, function (_, charset, encoding, text) {
+ if (encoding === 'q' || encoding === 'Q') {
+ text = text.replace(/_/g, ' ');
+ text = text.replace(/=([0-9a-fA-F]{2})/g, function (_, hex) {
+ return String.fromCharCode(parseInt(hex, 16));
+ });
+ return textdecode(charset, text);
+ }
+
+ try {
+ text = atob(text);
+ } catch (e) {}
+
+ return textdecode(charset, text);
+ });
+ }
+
+ return '';
+}
+
+/***/ }),
+/* 168 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.PDFNetworkStream = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(148));
+
+var _util = __w_pdfjs_require__(1);
+
+var _network_utils = __w_pdfjs_require__(166);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+;
+var OK_RESPONSE = 200;
+var PARTIAL_CONTENT_RESPONSE = 206;
+
+function getArrayBuffer(xhr) {
+ var data = xhr.response;
+
+ if (typeof data !== 'string') {
+ return data;
+ }
+
+ var array = (0, _util.stringToBytes)(data);
+ return array.buffer;
+}
+
+var NetworkManager =
+/*#__PURE__*/
+function () {
+ function NetworkManager(url, args) {
+ _classCallCheck(this, NetworkManager);
+
+ this.url = url;
+ args = args || {};
+ this.isHttp = /^https?:/i.test(url);
+ this.httpHeaders = this.isHttp && args.httpHeaders || {};
+ this.withCredentials = args.withCredentials || false;
+
+ this.getXhr = args.getXhr || function NetworkManager_getXhr() {
+ return new XMLHttpRequest();
+ };
+
+ this.currXhrId = 0;
+ this.pendingRequests = Object.create(null);
+ }
+
+ _createClass(NetworkManager, [{
+ key: "requestRange",
+ value: function requestRange(begin, end, listeners) {
+ var args = {
+ begin: begin,
+ end: end
+ };
+
+ for (var prop in listeners) {
+ args[prop] = listeners[prop];
+ }
+
+ return this.request(args);
+ }
+ }, {
+ key: "requestFull",
+ value: function requestFull(listeners) {
+ return this.request(listeners);
+ }
+ }, {
+ key: "request",
+ value: function request(args) {
+ var xhr = this.getXhr();
+ var xhrId = this.currXhrId++;
+ var pendingRequest = this.pendingRequests[xhrId] = {
+ xhr: xhr
+ };
+ xhr.open('GET', this.url);
+ xhr.withCredentials = this.withCredentials;
+
+ for (var property in this.httpHeaders) {
+ var value = this.httpHeaders[property];
+
+ if (typeof value === 'undefined') {
+ continue;
+ }
+
+ xhr.setRequestHeader(property, value);
+ }
+
+ if (this.isHttp && 'begin' in args && 'end' in args) {
+ xhr.setRequestHeader('Range', "bytes=".concat(args.begin, "-").concat(args.end - 1));
+ pendingRequest.expectedStatus = PARTIAL_CONTENT_RESPONSE;
+ } else {
+ pendingRequest.expectedStatus = OK_RESPONSE;
+ }
+
+ xhr.responseType = 'arraybuffer';
+
+ if (args.onError) {
+ xhr.onerror = function (evt) {
+ args.onError(xhr.status);
+ };
+ }
+
+ xhr.onreadystatechange = this.onStateChange.bind(this, xhrId);
+ xhr.onprogress = this.onProgress.bind(this, xhrId);
+ pendingRequest.onHeadersReceived = args.onHeadersReceived;
+ pendingRequest.onDone = args.onDone;
+ pendingRequest.onError = args.onError;
+ pendingRequest.onProgress = args.onProgress;
+ xhr.send(null);
+ return xhrId;
+ }
+ }, {
+ key: "onProgress",
+ value: function onProgress(xhrId, evt) {
+ var pendingRequest = this.pendingRequests[xhrId];
+
+ if (!pendingRequest) {
+ return;
+ }
+
+ if (pendingRequest.onProgress) {
+ pendingRequest.onProgress(evt);
+ }
+ }
+ }, {
+ key: "onStateChange",
+ value: function onStateChange(xhrId, evt) {
+ var pendingRequest = this.pendingRequests[xhrId];
+
+ if (!pendingRequest) {
+ return;
+ }
+
+ var xhr = pendingRequest.xhr;
+
+ if (xhr.readyState >= 2 && pendingRequest.onHeadersReceived) {
+ pendingRequest.onHeadersReceived();
+ delete pendingRequest.onHeadersReceived;
+ }
+
+ if (xhr.readyState !== 4) {
+ return;
+ }
+
+ if (!(xhrId in this.pendingRequests)) {
+ return;
+ }
+
+ delete this.pendingRequests[xhrId];
+
+ if (xhr.status === 0 && this.isHttp) {
+ if (pendingRequest.onError) {
+ pendingRequest.onError(xhr.status);
+ }
+
+ return;
+ }
+
+ var xhrStatus = xhr.status || OK_RESPONSE;
+ var ok_response_on_range_request = xhrStatus === OK_RESPONSE && pendingRequest.expectedStatus === PARTIAL_CONTENT_RESPONSE;
+
+ if (!ok_response_on_range_request && xhrStatus !== pendingRequest.expectedStatus) {
+ if (pendingRequest.onError) {
+ pendingRequest.onError(xhr.status);
+ }
+
+ return;
+ }
+
+ var chunk = getArrayBuffer(xhr);
+
+ if (xhrStatus === PARTIAL_CONTENT_RESPONSE) {
+ var rangeHeader = xhr.getResponseHeader('Content-Range');
+ var matches = /bytes (\d+)-(\d+)\/(\d+)/.exec(rangeHeader);
+ pendingRequest.onDone({
+ begin: parseInt(matches[1], 10),
+ chunk: chunk
+ });
+ } else if (chunk) {
+ pendingRequest.onDone({
+ begin: 0,
+ chunk: chunk
+ });
+ } else if (pendingRequest.onError) {
+ pendingRequest.onError(xhr.status);
+ }
+ }
+ }, {
+ key: "hasPendingRequests",
+ value: function hasPendingRequests() {
+ for (var xhrId in this.pendingRequests) {
+ return true;
+ }
+
+ return false;
+ }
+ }, {
+ key: "getRequestXhr",
+ value: function getRequestXhr(xhrId) {
+ return this.pendingRequests[xhrId].xhr;
+ }
+ }, {
+ key: "isPendingRequest",
+ value: function isPendingRequest(xhrId) {
+ return xhrId in this.pendingRequests;
+ }
+ }, {
+ key: "abortAllRequests",
+ value: function abortAllRequests() {
+ for (var xhrId in this.pendingRequests) {
+ this.abortRequest(xhrId | 0);
+ }
+ }
+ }, {
+ key: "abortRequest",
+ value: function abortRequest(xhrId) {
+ var xhr = this.pendingRequests[xhrId].xhr;
+ delete this.pendingRequests[xhrId];
+ xhr.abort();
+ }
+ }]);
+
+ return NetworkManager;
+}();
+
+var PDFNetworkStream =
+/*#__PURE__*/
+function () {
+ function PDFNetworkStream(source) {
+ _classCallCheck(this, PDFNetworkStream);
+
+ this._source = source;
+ this._manager = new NetworkManager(source.url, {
+ httpHeaders: source.httpHeaders,
+ withCredentials: source.withCredentials
+ });
+ this._rangeChunkSize = source.rangeChunkSize;
+ this._fullRequestReader = null;
+ this._rangeRequestReaders = [];
+ }
+
+ _createClass(PDFNetworkStream, [{
+ key: "_onRangeRequestReaderClosed",
+ value: function _onRangeRequestReaderClosed(reader) {
+ var i = this._rangeRequestReaders.indexOf(reader);
+
+ if (i >= 0) {
+ this._rangeRequestReaders.splice(i, 1);
+ }
+ }
+ }, {
+ key: "getFullReader",
+ value: function getFullReader() {
+ (0, _util.assert)(!this._fullRequestReader);
+ this._fullRequestReader = new PDFNetworkStreamFullRequestReader(this._manager, this._source);
+ return this._fullRequestReader;
+ }
+ }, {
+ key: "getRangeReader",
+ value: function getRangeReader(begin, end) {
+ var reader = new PDFNetworkStreamRangeRequestReader(this._manager, begin, end);
+ reader.onClosed = this._onRangeRequestReaderClosed.bind(this);
+
+ this._rangeRequestReaders.push(reader);
+
+ return reader;
+ }
+ }, {
+ key: "cancelAllRequests",
+ value: function cancelAllRequests(reason) {
+ if (this._fullRequestReader) {
+ this._fullRequestReader.cancel(reason);
+ }
+
+ var readers = this._rangeRequestReaders.slice(0);
+
+ readers.forEach(function (reader) {
+ reader.cancel(reason);
+ });
+ }
+ }]);
+
+ return PDFNetworkStream;
+}();
+
+exports.PDFNetworkStream = PDFNetworkStream;
+
+var PDFNetworkStreamFullRequestReader =
+/*#__PURE__*/
+function () {
+ function PDFNetworkStreamFullRequestReader(manager, source) {
+ _classCallCheck(this, PDFNetworkStreamFullRequestReader);
+
+ this._manager = manager;
+ var args = {
+ onHeadersReceived: this._onHeadersReceived.bind(this),
+ onDone: this._onDone.bind(this),
+ onError: this._onError.bind(this),
+ onProgress: this._onProgress.bind(this)
+ };
+ this._url = source.url;
+ this._fullRequestId = manager.requestFull(args);
+ this._headersReceivedCapability = (0, _util.createPromiseCapability)();
+ this._disableRange = source.disableRange || false;
+ this._contentLength = source.length;
+ this._rangeChunkSize = source.rangeChunkSize;
+
+ if (!this._rangeChunkSize && !this._disableRange) {
+ this._disableRange = true;
+ }
+
+ this._isStreamingSupported = false;
+ this._isRangeSupported = false;
+ this._cachedChunks = [];
+ this._requests = [];
+ this._done = false;
+ this._storedError = undefined;
+ this._filename = null;
+ this.onProgress = null;
+ }
+
+ _createClass(PDFNetworkStreamFullRequestReader, [{
+ key: "_onHeadersReceived",
+ value: function _onHeadersReceived() {
+ var fullRequestXhrId = this._fullRequestId;
+
+ var fullRequestXhr = this._manager.getRequestXhr(fullRequestXhrId);
+
+ var getResponseHeader = function getResponseHeader(name) {
+ return fullRequestXhr.getResponseHeader(name);
+ };
+
+ var _validateRangeRequest = (0, _network_utils.validateRangeRequestCapabilities)({
+ getResponseHeader: getResponseHeader,
+ isHttp: this._manager.isHttp,
+ rangeChunkSize: this._rangeChunkSize,
+ disableRange: this._disableRange
+ }),
+ allowRangeRequests = _validateRangeRequest.allowRangeRequests,
+ suggestedLength = _validateRangeRequest.suggestedLength;
+
+ if (allowRangeRequests) {
+ this._isRangeSupported = true;
+ }
+
+ this._contentLength = suggestedLength || this._contentLength;
+ this._filename = (0, _network_utils.extractFilenameFromHeader)(getResponseHeader);
+
+ if (this._isRangeSupported) {
+ this._manager.abortRequest(fullRequestXhrId);
+ }
+
+ this._headersReceivedCapability.resolve();
+ }
+ }, {
+ key: "_onDone",
+ value: function _onDone(args) {
+ if (args) {
+ if (this._requests.length > 0) {
+ var requestCapability = this._requests.shift();
+
+ requestCapability.resolve({
+ value: args.chunk,
+ done: false
+ });
+ } else {
+ this._cachedChunks.push(args.chunk);
+ }
+ }
+
+ this._done = true;
+
+ if (this._cachedChunks.length > 0) {
+ return;
+ }
+
+ this._requests.forEach(function (requestCapability) {
+ requestCapability.resolve({
+ value: undefined,
+ done: true
+ });
+ });
+
+ this._requests = [];
+ }
+ }, {
+ key: "_onError",
+ value: function _onError(status) {
+ var url = this._url;
+ var exception = (0, _network_utils.createResponseStatusError)(status, url);
+ this._storedError = exception;
+
+ this._headersReceivedCapability.reject(exception);
+
+ this._requests.forEach(function (requestCapability) {
+ requestCapability.reject(exception);
+ });
+
+ this._requests = [];
+ this._cachedChunks = [];
+ }
+ }, {
+ key: "_onProgress",
+ value: function _onProgress(data) {
+ if (this.onProgress) {
+ this.onProgress({
+ loaded: data.loaded,
+ total: data.lengthComputable ? data.total : this._contentLength
+ });
+ }
+ }
+ }, {
+ key: "read",
+ value: function () {
+ var _read = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee() {
+ var chunk, requestCapability;
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ if (!this._storedError) {
+ _context.next = 2;
+ break;
+ }
+
+ throw this._storedError;
+
+ case 2:
+ if (!(this._cachedChunks.length > 0)) {
+ _context.next = 5;
+ break;
+ }
+
+ chunk = this._cachedChunks.shift();
+ return _context.abrupt("return", {
+ value: chunk,
+ done: false
+ });
+
+ case 5:
+ if (!this._done) {
+ _context.next = 7;
+ break;
+ }
+
+ return _context.abrupt("return", {
+ value: undefined,
+ done: true
+ });
+
+ case 7:
+ requestCapability = (0, _util.createPromiseCapability)();
+
+ this._requests.push(requestCapability);
+
+ return _context.abrupt("return", requestCapability.promise);
+
+ case 10:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee, this);
+ }));
+
+ function read() {
+ return _read.apply(this, arguments);
+ }
+
+ return read;
+ }()
+ }, {
+ key: "cancel",
+ value: function cancel(reason) {
+ this._done = true;
+
+ this._headersReceivedCapability.reject(reason);
+
+ this._requests.forEach(function (requestCapability) {
+ requestCapability.resolve({
+ value: undefined,
+ done: true
+ });
+ });
+
+ this._requests = [];
+
+ if (this._manager.isPendingRequest(this._fullRequestId)) {
+ this._manager.abortRequest(this._fullRequestId);
+ }
+
+ this._fullRequestReader = null;
+ }
+ }, {
+ key: "filename",
+ get: function get() {
+ return this._filename;
+ }
+ }, {
+ key: "isRangeSupported",
+ get: function get() {
+ return this._isRangeSupported;
+ }
+ }, {
+ key: "isStreamingSupported",
+ get: function get() {
+ return this._isStreamingSupported;
+ }
+ }, {
+ key: "contentLength",
+ get: function get() {
+ return this._contentLength;
+ }
+ }, {
+ key: "headersReady",
+ get: function get() {
+ return this._headersReceivedCapability.promise;
+ }
+ }]);
+
+ return PDFNetworkStreamFullRequestReader;
+}();
+
+var PDFNetworkStreamRangeRequestReader =
+/*#__PURE__*/
+function () {
+ function PDFNetworkStreamRangeRequestReader(manager, begin, end) {
+ _classCallCheck(this, PDFNetworkStreamRangeRequestReader);
+
+ this._manager = manager;
+ var args = {
+ onDone: this._onDone.bind(this),
+ onProgress: this._onProgress.bind(this)
+ };
+ this._requestId = manager.requestRange(begin, end, args);
+ this._requests = [];
+ this._queuedChunk = null;
+ this._done = false;
+ this.onProgress = null;
+ this.onClosed = null;
+ }
+
+ _createClass(PDFNetworkStreamRangeRequestReader, [{
+ key: "_close",
+ value: function _close() {
+ if (this.onClosed) {
+ this.onClosed(this);
+ }
+ }
+ }, {
+ key: "_onDone",
+ value: function _onDone(data) {
+ var chunk = data.chunk;
+
+ if (this._requests.length > 0) {
+ var requestCapability = this._requests.shift();
+
+ requestCapability.resolve({
+ value: chunk,
+ done: false
+ });
+ } else {
+ this._queuedChunk = chunk;
+ }
+
+ this._done = true;
+
+ this._requests.forEach(function (requestCapability) {
+ requestCapability.resolve({
+ value: undefined,
+ done: true
+ });
+ });
+
+ this._requests = [];
+
+ this._close();
+ }
+ }, {
+ key: "_onProgress",
+ value: function _onProgress(evt) {
+ if (!this.isStreamingSupported && this.onProgress) {
+ this.onProgress({
+ loaded: evt.loaded
+ });
+ }
+ }
+ }, {
+ key: "read",
+ value: function () {
+ var _read2 = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee2() {
+ var chunk, requestCapability;
+ return _regenerator["default"].wrap(function _callee2$(_context2) {
+ while (1) {
+ switch (_context2.prev = _context2.next) {
+ case 0:
+ if (!(this._queuedChunk !== null)) {
+ _context2.next = 4;
+ break;
+ }
+
+ chunk = this._queuedChunk;
+ this._queuedChunk = null;
+ return _context2.abrupt("return", {
+ value: chunk,
+ done: false
+ });
+
+ case 4:
+ if (!this._done) {
+ _context2.next = 6;
+ break;
+ }
+
+ return _context2.abrupt("return", {
+ value: undefined,
+ done: true
+ });
+
+ case 6:
+ requestCapability = (0, _util.createPromiseCapability)();
+
+ this._requests.push(requestCapability);
+
+ return _context2.abrupt("return", requestCapability.promise);
+
+ case 9:
+ case "end":
+ return _context2.stop();
+ }
+ }
+ }, _callee2, this);
+ }));
+
+ function read() {
+ return _read2.apply(this, arguments);
+ }
+
+ return read;
+ }()
+ }, {
+ key: "cancel",
+ value: function cancel(reason) {
+ this._done = true;
+
+ this._requests.forEach(function (requestCapability) {
+ requestCapability.resolve({
+ value: undefined,
+ done: true
+ });
+ });
+
+ this._requests = [];
+
+ if (this._manager.isPendingRequest(this._requestId)) {
+ this._manager.abortRequest(this._requestId);
+ }
+
+ this._close();
+ }
+ }, {
+ key: "isStreamingSupported",
+ get: function get() {
+ return false;
+ }
+ }]);
+
+ return PDFNetworkStreamRangeRequestReader;
+}();
+
+/***/ }),
+/* 169 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.PDFFetchStream = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(148));
+
+var _util = __w_pdfjs_require__(1);
+
+var _network_utils = __w_pdfjs_require__(166);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+function createFetchOptions(headers, withCredentials, abortController) {
+ return {
+ method: 'GET',
+ headers: headers,
+ signal: abortController && abortController.signal,
+ mode: 'cors',
+ credentials: withCredentials ? 'include' : 'same-origin',
+ redirect: 'follow'
+ };
+}
+
+var PDFFetchStream =
+/*#__PURE__*/
+function () {
+ function PDFFetchStream(source) {
+ _classCallCheck(this, PDFFetchStream);
+
+ this.source = source;
+ this.isHttp = /^https?:/i.test(source.url);
+ this.httpHeaders = this.isHttp && source.httpHeaders || {};
+ this._fullRequestReader = null;
+ this._rangeRequestReaders = [];
+ }
+
+ _createClass(PDFFetchStream, [{
+ key: "getFullReader",
+ value: function getFullReader() {
+ (0, _util.assert)(!this._fullRequestReader);
+ this._fullRequestReader = new PDFFetchStreamReader(this);
+ return this._fullRequestReader;
+ }
+ }, {
+ key: "getRangeReader",
+ value: function getRangeReader(begin, end) {
+ if (end <= this._progressiveDataLength) {
+ return null;
+ }
+
+ var reader = new PDFFetchStreamRangeReader(this, begin, end);
+
+ this._rangeRequestReaders.push(reader);
+
+ return reader;
+ }
+ }, {
+ key: "cancelAllRequests",
+ value: function cancelAllRequests(reason) {
+ if (this._fullRequestReader) {
+ this._fullRequestReader.cancel(reason);
+ }
+
+ var readers = this._rangeRequestReaders.slice(0);
+
+ readers.forEach(function (reader) {
+ reader.cancel(reason);
+ });
+ }
+ }, {
+ key: "_progressiveDataLength",
+ get: function get() {
+ return this._fullRequestReader ? this._fullRequestReader._loaded : 0;
+ }
+ }]);
+
+ return PDFFetchStream;
+}();
+
+exports.PDFFetchStream = PDFFetchStream;
+
+var PDFFetchStreamReader =
+/*#__PURE__*/
+function () {
+ function PDFFetchStreamReader(stream) {
+ var _this = this;
+
+ _classCallCheck(this, PDFFetchStreamReader);
+
+ this._stream = stream;
+ this._reader = null;
+ this._loaded = 0;
+ this._filename = null;
+ var source = stream.source;
+ this._withCredentials = source.withCredentials || false;
+ this._contentLength = source.length;
+ this._headersCapability = (0, _util.createPromiseCapability)();
+ this._disableRange = source.disableRange || false;
+ this._rangeChunkSize = source.rangeChunkSize;
+
+ if (!this._rangeChunkSize && !this._disableRange) {
+ this._disableRange = true;
+ }
+
+ if (typeof AbortController !== 'undefined') {
+ this._abortController = new AbortController();
+ }
+
+ this._isStreamingSupported = !source.disableStream;
+ this._isRangeSupported = !source.disableRange;
+ this._headers = new Headers();
+
+ for (var property in this._stream.httpHeaders) {
+ var value = this._stream.httpHeaders[property];
+
+ if (typeof value === 'undefined') {
+ continue;
+ }
+
+ this._headers.append(property, value);
+ }
+
+ var url = source.url;
+ fetch(url, createFetchOptions(this._headers, this._withCredentials, this._abortController)).then(function (response) {
+ if (!(0, _network_utils.validateResponseStatus)(response.status)) {
+ throw (0, _network_utils.createResponseStatusError)(response.status, url);
+ }
+
+ _this._reader = response.body.getReader();
+
+ _this._headersCapability.resolve();
+
+ var getResponseHeader = function getResponseHeader(name) {
+ return response.headers.get(name);
+ };
+
+ var _validateRangeRequest = (0, _network_utils.validateRangeRequestCapabilities)({
+ getResponseHeader: getResponseHeader,
+ isHttp: _this._stream.isHttp,
+ rangeChunkSize: _this._rangeChunkSize,
+ disableRange: _this._disableRange
+ }),
+ allowRangeRequests = _validateRangeRequest.allowRangeRequests,
+ suggestedLength = _validateRangeRequest.suggestedLength;
+
+ _this._isRangeSupported = allowRangeRequests;
+ _this._contentLength = suggestedLength || _this._contentLength;
+ _this._filename = (0, _network_utils.extractFilenameFromHeader)(getResponseHeader);
+
+ if (!_this._isStreamingSupported && _this._isRangeSupported) {
+ _this.cancel(new _util.AbortException('Streaming is disabled.'));
+ }
+ })["catch"](this._headersCapability.reject);
+ this.onProgress = null;
+ }
+
+ _createClass(PDFFetchStreamReader, [{
+ key: "read",
+ value: function () {
+ var _read = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee() {
+ var _ref, value, done, buffer;
+
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ _context.next = 2;
+ return this._headersCapability.promise;
+
+ case 2:
+ _context.next = 4;
+ return this._reader.read();
+
+ case 4:
+ _ref = _context.sent;
+ value = _ref.value;
+ done = _ref.done;
+
+ if (!done) {
+ _context.next = 9;
+ break;
+ }
+
+ return _context.abrupt("return", {
+ value: value,
+ done: done
+ });
+
+ case 9:
+ this._loaded += value.byteLength;
+
+ if (this.onProgress) {
+ this.onProgress({
+ loaded: this._loaded,
+ total: this._contentLength
+ });
+ }
+
+ buffer = new Uint8Array(value).buffer;
+ return _context.abrupt("return", {
+ value: buffer,
+ done: false
+ });
+
+ case 13:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee, this);
+ }));
+
+ function read() {
+ return _read.apply(this, arguments);
+ }
+
+ return read;
+ }()
+ }, {
+ key: "cancel",
+ value: function cancel(reason) {
+ if (this._reader) {
+ this._reader.cancel(reason);
+ }
+
+ if (this._abortController) {
+ this._abortController.abort();
+ }
+ }
+ }, {
+ key: "headersReady",
+ get: function get() {
+ return this._headersCapability.promise;
+ }
+ }, {
+ key: "filename",
+ get: function get() {
+ return this._filename;
+ }
+ }, {
+ key: "contentLength",
+ get: function get() {
+ return this._contentLength;
+ }
+ }, {
+ key: "isRangeSupported",
+ get: function get() {
+ return this._isRangeSupported;
+ }
+ }, {
+ key: "isStreamingSupported",
+ get: function get() {
+ return this._isStreamingSupported;
+ }
+ }]);
+
+ return PDFFetchStreamReader;
+}();
+
+var PDFFetchStreamRangeReader =
+/*#__PURE__*/
+function () {
+ function PDFFetchStreamRangeReader(stream, begin, end) {
+ var _this2 = this;
+
+ _classCallCheck(this, PDFFetchStreamRangeReader);
+
+ this._stream = stream;
+ this._reader = null;
+ this._loaded = 0;
+ var source = stream.source;
+ this._withCredentials = source.withCredentials || false;
+ this._readCapability = (0, _util.createPromiseCapability)();
+ this._isStreamingSupported = !source.disableStream;
+
+ if (typeof AbortController !== 'undefined') {
+ this._abortController = new AbortController();
+ }
+
+ this._headers = new Headers();
+
+ for (var property in this._stream.httpHeaders) {
+ var value = this._stream.httpHeaders[property];
+
+ if (typeof value === 'undefined') {
+ continue;
+ }
+
+ this._headers.append(property, value);
+ }
+
+ this._headers.append('Range', "bytes=".concat(begin, "-").concat(end - 1));
+
+ var url = source.url;
+ fetch(url, createFetchOptions(this._headers, this._withCredentials, this._abortController)).then(function (response) {
+ if (!(0, _network_utils.validateResponseStatus)(response.status)) {
+ throw (0, _network_utils.createResponseStatusError)(response.status, url);
+ }
+
+ _this2._readCapability.resolve();
+
+ _this2._reader = response.body.getReader();
+ });
+ this.onProgress = null;
+ }
+
+ _createClass(PDFFetchStreamRangeReader, [{
+ key: "read",
+ value: function () {
+ var _read2 = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee2() {
+ var _ref2, value, done, buffer;
+
+ return _regenerator["default"].wrap(function _callee2$(_context2) {
+ while (1) {
+ switch (_context2.prev = _context2.next) {
+ case 0:
+ _context2.next = 2;
+ return this._readCapability.promise;
+
+ case 2:
+ _context2.next = 4;
+ return this._reader.read();
+
+ case 4:
+ _ref2 = _context2.sent;
+ value = _ref2.value;
+ done = _ref2.done;
+
+ if (!done) {
+ _context2.next = 9;
+ break;
+ }
+
+ return _context2.abrupt("return", {
+ value: value,
+ done: done
+ });
+
+ case 9:
+ this._loaded += value.byteLength;
+
+ if (this.onProgress) {
+ this.onProgress({
+ loaded: this._loaded
+ });
+ }
+
+ buffer = new Uint8Array(value).buffer;
+ return _context2.abrupt("return", {
+ value: buffer,
+ done: false
+ });
+
+ case 13:
+ case "end":
+ return _context2.stop();
+ }
+ }
+ }, _callee2, this);
+ }));
+
+ function read() {
+ return _read2.apply(this, arguments);
+ }
+
+ return read;
+ }()
+ }, {
+ key: "cancel",
+ value: function cancel(reason) {
+ if (this._reader) {
+ this._reader.cancel(reason);
+ }
+
+ if (this._abortController) {
+ this._abortController.abort();
+ }
+ }
+ }, {
+ key: "isStreamingSupported",
+ get: function get() {
+ return this._isStreamingSupported;
+ }
+ }]);
+
+ return PDFFetchStreamRangeReader;
+}();
+
+/***/ })
+/******/ ]);
+});
+//# sourceMappingURL=pdf.js.map \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/pdf_js/pdf.worker.js b/testing/web-platform/tests/tools/third_party/pdf_js/pdf.worker.js
new file mode 100644
index 0000000000..211fbbdc4c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pdf_js/pdf.worker.js
@@ -0,0 +1,56199 @@
+/**
+ * @licstart The following is the entire license notice for the
+ * Javascript code in this page
+ *
+ * Copyright 2019 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * @licend The above is the entire license notice for the
+ * Javascript code in this page
+ */
+
+(function webpackUniversalModuleDefinition(root, factory) {
+ if(typeof exports === 'object' && typeof module === 'object')
+ module.exports = factory();
+ else if(typeof define === 'function' && define.amd)
+ define("pdfjs-dist/build/pdf.worker", [], factory);
+ else if(typeof exports === 'object')
+ exports["pdfjs-dist/build/pdf.worker"] = factory();
+ else
+ root["pdfjs-dist/build/pdf.worker"] = root.pdfjsWorker = factory();
+})(this, function() {
+return /******/ (function(modules) { // webpackBootstrap
+/******/ // The module cache
+/******/ var installedModules = {};
+/******/
+/******/ // The require function
+/******/ function __w_pdfjs_require__(moduleId) {
+/******/
+/******/ // Check if module is in cache
+/******/ if(installedModules[moduleId]) {
+/******/ return installedModules[moduleId].exports;
+/******/ }
+/******/ // Create a new module (and put it into the cache)
+/******/ var module = installedModules[moduleId] = {
+/******/ i: moduleId,
+/******/ l: false,
+/******/ exports: {}
+/******/ };
+/******/
+/******/ // Execute the module function
+/******/ modules[moduleId].call(module.exports, module, module.exports, __w_pdfjs_require__);
+/******/
+/******/ // Flag the module as loaded
+/******/ module.l = true;
+/******/
+/******/ // Return the exports of the module
+/******/ return module.exports;
+/******/ }
+/******/
+/******/
+/******/ // expose the modules object (__webpack_modules__)
+/******/ __w_pdfjs_require__.m = modules;
+/******/
+/******/ // expose the module cache
+/******/ __w_pdfjs_require__.c = installedModules;
+/******/
+/******/ // define getter function for harmony exports
+/******/ __w_pdfjs_require__.d = function(exports, name, getter) {
+/******/ if(!__w_pdfjs_require__.o(exports, name)) {
+/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
+/******/ }
+/******/ };
+/******/
+/******/ // define __esModule on exports
+/******/ __w_pdfjs_require__.r = function(exports) {
+/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
+/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
+/******/ }
+/******/ Object.defineProperty(exports, '__esModule', { value: true });
+/******/ };
+/******/
+/******/ // create a fake namespace object
+/******/ // mode & 1: value is a module id, require it
+/******/ // mode & 2: merge all properties of value into the ns
+/******/ // mode & 4: return value when already ns object
+/******/ // mode & 8|1: behave like require
+/******/ __w_pdfjs_require__.t = function(value, mode) {
+/******/ if(mode & 1) value = __w_pdfjs_require__(value);
+/******/ if(mode & 8) return value;
+/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
+/******/ var ns = Object.create(null);
+/******/ __w_pdfjs_require__.r(ns);
+/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
+/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __w_pdfjs_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
+/******/ return ns;
+/******/ };
+/******/
+/******/ // getDefaultExport function for compatibility with non-harmony modules
+/******/ __w_pdfjs_require__.n = function(module) {
+/******/ var getter = module && module.__esModule ?
+/******/ function getDefault() { return module['default']; } :
+/******/ function getModuleExports() { return module; };
+/******/ __w_pdfjs_require__.d(getter, 'a', getter);
+/******/ return getter;
+/******/ };
+/******/
+/******/ // Object.prototype.hasOwnProperty.call
+/******/ __w_pdfjs_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
+/******/
+/******/ // __webpack_public_path__
+/******/ __w_pdfjs_require__.p = "";
+/******/
+/******/
+/******/ // Load entry module and return exports
+/******/ return __w_pdfjs_require__(__w_pdfjs_require__.s = 0);
+/******/ })
+/************************************************************************/
+/******/ ([
+/* 0 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var pdfjsVersion = '2.2.228';
+var pdfjsBuild = 'd7afb74a';
+
+var pdfjsCoreWorker = __w_pdfjs_require__(1);
+
+exports.WorkerMessageHandler = pdfjsCoreWorker.WorkerMessageHandler;
+
+/***/ }),
+/* 1 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.WorkerMessageHandler = exports.WorkerTask = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(2));
+
+var _util = __w_pdfjs_require__(5);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _pdf_manager = __w_pdfjs_require__(152);
+
+var _is_node = _interopRequireDefault(__w_pdfjs_require__(8));
+
+var _message_handler = __w_pdfjs_require__(191);
+
+var _worker_stream = __w_pdfjs_require__(192);
+
+var _core_utils = __w_pdfjs_require__(154);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
+
+function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
+
+function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
+
+function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+var WorkerTask = function WorkerTaskClosure() {
+ function WorkerTask(name) {
+ this.name = name;
+ this.terminated = false;
+ this._capability = (0, _util.createPromiseCapability)();
+ }
+
+ WorkerTask.prototype = {
+ get finished() {
+ return this._capability.promise;
+ },
+
+ finish: function finish() {
+ this._capability.resolve();
+ },
+ terminate: function terminate() {
+ this.terminated = true;
+ },
+ ensureNotTerminated: function ensureNotTerminated() {
+ if (this.terminated) {
+ throw new Error('Worker task was terminated');
+ }
+ }
+ };
+ return WorkerTask;
+}();
+
+exports.WorkerTask = WorkerTask;
+var WorkerMessageHandler = {
+ setup: function setup(handler, port) {
+ var testMessageProcessed = false;
+ handler.on('test', function wphSetupTest(data) {
+ if (testMessageProcessed) {
+ return;
+ }
+
+ testMessageProcessed = true;
+
+ if (!(data instanceof Uint8Array)) {
+ handler.send('test', false);
+ return;
+ }
+
+ var supportTransfers = data[0] === 255;
+ handler.postMessageTransfers = supportTransfers;
+ var xhr = new XMLHttpRequest();
+ var responseExists = 'response' in xhr;
+
+ try {
+ xhr.responseType;
+ } catch (e) {
+ responseExists = false;
+ }
+
+ if (!responseExists) {
+ handler.send('test', false);
+ return;
+ }
+
+ handler.send('test', {
+ supportTypedArray: true,
+ supportTransfers: supportTransfers
+ });
+ });
+ handler.on('configure', function wphConfigure(data) {
+ (0, _util.setVerbosityLevel)(data.verbosity);
+ });
+ handler.on('GetDocRequest', function wphSetupDoc(data) {
+ return WorkerMessageHandler.createDocumentHandler(data, port);
+ });
+ },
+ createDocumentHandler: function createDocumentHandler(docParams, port) {
+ var pdfManager;
+ var terminated = false;
+ var cancelXHRs = null;
+ var WorkerTasks = [];
+ var verbosity = (0, _util.getVerbosityLevel)();
+ var apiVersion = docParams.apiVersion;
+ var workerVersion = '2.2.228';
+
+ if (apiVersion !== workerVersion) {
+ throw new Error("The API version \"".concat(apiVersion, "\" does not match ") + "the Worker version \"".concat(workerVersion, "\"."));
+ }
+
+ var docId = docParams.docId;
+ var docBaseUrl = docParams.docBaseUrl;
+ var workerHandlerName = docParams.docId + '_worker';
+ var handler = new _message_handler.MessageHandler(workerHandlerName, docId, port);
+ handler.postMessageTransfers = docParams.postMessageTransfers;
+
+ function ensureNotTerminated() {
+ if (terminated) {
+ throw new Error('Worker was terminated');
+ }
+ }
+
+ function startWorkerTask(task) {
+ WorkerTasks.push(task);
+ }
+
+ function finishWorkerTask(task) {
+ task.finish();
+ var i = WorkerTasks.indexOf(task);
+ WorkerTasks.splice(i, 1);
+ }
+
+ function loadDocument(_x) {
+ return _loadDocument.apply(this, arguments);
+ }
+
+ function _loadDocument() {
+ _loadDocument = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee(recoveryMode) {
+ var _ref4, _ref5, numPages, fingerprint;
+
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ _context.next = 2;
+ return pdfManager.ensureDoc('checkHeader');
+
+ case 2:
+ _context.next = 4;
+ return pdfManager.ensureDoc('parseStartXRef');
+
+ case 4:
+ _context.next = 6;
+ return pdfManager.ensureDoc('parse', [recoveryMode]);
+
+ case 6:
+ if (recoveryMode) {
+ _context.next = 9;
+ break;
+ }
+
+ _context.next = 9;
+ return pdfManager.ensureDoc('checkFirstPage');
+
+ case 9:
+ _context.next = 11;
+ return Promise.all([pdfManager.ensureDoc('numPages'), pdfManager.ensureDoc('fingerprint')]);
+
+ case 11:
+ _ref4 = _context.sent;
+ _ref5 = _slicedToArray(_ref4, 2);
+ numPages = _ref5[0];
+ fingerprint = _ref5[1];
+ return _context.abrupt("return", {
+ numPages: numPages,
+ fingerprint: fingerprint
+ });
+
+ case 16:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee);
+ }));
+ return _loadDocument.apply(this, arguments);
+ }
+
+ function getPdfManager(data, evaluatorOptions) {
+ var pdfManagerCapability = (0, _util.createPromiseCapability)();
+ var pdfManager;
+ var source = data.source;
+
+ if (source.data) {
+ try {
+ pdfManager = new _pdf_manager.LocalPdfManager(docId, source.data, source.password, evaluatorOptions, docBaseUrl);
+ pdfManagerCapability.resolve(pdfManager);
+ } catch (ex) {
+ pdfManagerCapability.reject(ex);
+ }
+
+ return pdfManagerCapability.promise;
+ }
+
+ var pdfStream,
+ cachedChunks = [];
+
+ try {
+ pdfStream = new _worker_stream.PDFWorkerStream(handler);
+ } catch (ex) {
+ pdfManagerCapability.reject(ex);
+ return pdfManagerCapability.promise;
+ }
+
+ var fullRequest = pdfStream.getFullReader();
+ fullRequest.headersReady.then(function () {
+ if (!fullRequest.isRangeSupported) {
+ return;
+ }
+
+ var disableAutoFetch = source.disableAutoFetch || fullRequest.isStreamingSupported;
+ pdfManager = new _pdf_manager.NetworkPdfManager(docId, pdfStream, {
+ msgHandler: handler,
+ password: source.password,
+ length: fullRequest.contentLength,
+ disableAutoFetch: disableAutoFetch,
+ rangeChunkSize: source.rangeChunkSize
+ }, evaluatorOptions, docBaseUrl);
+
+ for (var i = 0; i < cachedChunks.length; i++) {
+ pdfManager.sendProgressiveData(cachedChunks[i]);
+ }
+
+ cachedChunks = [];
+ pdfManagerCapability.resolve(pdfManager);
+ cancelXHRs = null;
+ })["catch"](function (reason) {
+ pdfManagerCapability.reject(reason);
+ cancelXHRs = null;
+ });
+ var loaded = 0;
+
+ var flushChunks = function flushChunks() {
+ var pdfFile = (0, _util.arraysToBytes)(cachedChunks);
+
+ if (source.length && pdfFile.length !== source.length) {
+ (0, _util.warn)('reported HTTP length is different from actual');
+ }
+
+ try {
+ pdfManager = new _pdf_manager.LocalPdfManager(docId, pdfFile, source.password, evaluatorOptions, docBaseUrl);
+ pdfManagerCapability.resolve(pdfManager);
+ } catch (ex) {
+ pdfManagerCapability.reject(ex);
+ }
+
+ cachedChunks = [];
+ };
+
+ var readPromise = new Promise(function (resolve, reject) {
+ var readChunk = function readChunk(chunk) {
+ try {
+ ensureNotTerminated();
+
+ if (chunk.done) {
+ if (!pdfManager) {
+ flushChunks();
+ }
+
+ cancelXHRs = null;
+ return;
+ }
+
+ var data = chunk.value;
+ loaded += (0, _util.arrayByteLength)(data);
+
+ if (!fullRequest.isStreamingSupported) {
+ handler.send('DocProgress', {
+ loaded: loaded,
+ total: Math.max(loaded, fullRequest.contentLength || 0)
+ });
+ }
+
+ if (pdfManager) {
+ pdfManager.sendProgressiveData(data);
+ } else {
+ cachedChunks.push(data);
+ }
+
+ fullRequest.read().then(readChunk, reject);
+ } catch (e) {
+ reject(e);
+ }
+ };
+
+ fullRequest.read().then(readChunk, reject);
+ });
+ readPromise["catch"](function (e) {
+ pdfManagerCapability.reject(e);
+ cancelXHRs = null;
+ });
+
+ cancelXHRs = function cancelXHRs() {
+ pdfStream.cancelAllRequests('abort');
+ };
+
+ return pdfManagerCapability.promise;
+ }
+
+ function setupDoc(data) {
+ function onSuccess(doc) {
+ ensureNotTerminated();
+ handler.send('GetDoc', {
+ pdfInfo: doc
+ });
+ }
+
+ function onFailure(e) {
+ ensureNotTerminated();
+
+ if (e instanceof _util.PasswordException) {
+ var task = new WorkerTask('PasswordException: response ' + e.code);
+ startWorkerTask(task);
+ handler.sendWithPromise('PasswordRequest', e).then(function (data) {
+ finishWorkerTask(task);
+ pdfManager.updatePassword(data.password);
+ pdfManagerReady();
+ })["catch"](function (boundException) {
+ finishWorkerTask(task);
+ handler.send('PasswordException', boundException);
+ }.bind(null, e));
+ } else if (e instanceof _util.InvalidPDFException) {
+ handler.send('InvalidPDF', e);
+ } else if (e instanceof _util.MissingPDFException) {
+ handler.send('MissingPDF', e);
+ } else if (e instanceof _util.UnexpectedResponseException) {
+ handler.send('UnexpectedResponse', e);
+ } else {
+ handler.send('UnknownError', new _util.UnknownErrorException(e.message, e.toString()));
+ }
+ }
+
+ function pdfManagerReady() {
+ ensureNotTerminated();
+ loadDocument(false).then(onSuccess, function loadFailure(ex) {
+ ensureNotTerminated();
+
+ if (!(ex instanceof _core_utils.XRefParseException)) {
+ onFailure(ex);
+ return;
+ }
+
+ pdfManager.requestLoadedStream();
+ pdfManager.onLoadedStream().then(function () {
+ ensureNotTerminated();
+ loadDocument(true).then(onSuccess, onFailure);
+ });
+ }, onFailure);
+ }
+
+ ensureNotTerminated();
+ var evaluatorOptions = {
+ forceDataSchema: data.disableCreateObjectURL,
+ maxImageSize: data.maxImageSize,
+ disableFontFace: data.disableFontFace,
+ nativeImageDecoderSupport: data.nativeImageDecoderSupport,
+ ignoreErrors: data.ignoreErrors,
+ isEvalSupported: data.isEvalSupported
+ };
+ getPdfManager(data, evaluatorOptions).then(function (newPdfManager) {
+ if (terminated) {
+ newPdfManager.terminate();
+ throw new Error('Worker was terminated');
+ }
+
+ pdfManager = newPdfManager;
+ pdfManager.onLoadedStream().then(function (stream) {
+ handler.send('DataLoaded', {
+ length: stream.bytes.byteLength
+ });
+ });
+ }).then(pdfManagerReady, onFailure);
+ }
+
+ handler.on('GetPage', function wphSetupGetPage(data) {
+ return pdfManager.getPage(data.pageIndex).then(function (page) {
+ return Promise.all([pdfManager.ensure(page, 'rotate'), pdfManager.ensure(page, 'ref'), pdfManager.ensure(page, 'userUnit'), pdfManager.ensure(page, 'view')]).then(function (_ref) {
+ var _ref2 = _slicedToArray(_ref, 4),
+ rotate = _ref2[0],
+ ref = _ref2[1],
+ userUnit = _ref2[2],
+ view = _ref2[3];
+
+ return {
+ rotate: rotate,
+ ref: ref,
+ userUnit: userUnit,
+ view: view
+ };
+ });
+ });
+ });
+ handler.on('GetPageIndex', function wphSetupGetPageIndex(data) {
+ var ref = _primitives.Ref.get(data.ref.num, data.ref.gen);
+
+ var catalog = pdfManager.pdfDocument.catalog;
+ return catalog.getPageIndex(ref);
+ });
+ handler.on('GetDestinations', function wphSetupGetDestinations(data) {
+ return pdfManager.ensureCatalog('destinations');
+ });
+ handler.on('GetDestination', function wphSetupGetDestination(data) {
+ return pdfManager.ensureCatalog('getDestination', [data.id]);
+ });
+ handler.on('GetPageLabels', function wphSetupGetPageLabels(data) {
+ return pdfManager.ensureCatalog('pageLabels');
+ });
+ handler.on('GetPageLayout', function wphSetupGetPageLayout(data) {
+ return pdfManager.ensureCatalog('pageLayout');
+ });
+ handler.on('GetPageMode', function wphSetupGetPageMode(data) {
+ return pdfManager.ensureCatalog('pageMode');
+ });
+ handler.on('GetViewerPreferences', function (data) {
+ return pdfManager.ensureCatalog('viewerPreferences');
+ });
+ handler.on('GetOpenActionDestination', function (data) {
+ return pdfManager.ensureCatalog('openActionDestination');
+ });
+ handler.on('GetAttachments', function wphSetupGetAttachments(data) {
+ return pdfManager.ensureCatalog('attachments');
+ });
+ handler.on('GetJavaScript', function wphSetupGetJavaScript(data) {
+ return pdfManager.ensureCatalog('javaScript');
+ });
+ handler.on('GetOutline', function wphSetupGetOutline(data) {
+ return pdfManager.ensureCatalog('documentOutline');
+ });
+ handler.on('GetPermissions', function (data) {
+ return pdfManager.ensureCatalog('permissions');
+ });
+ handler.on('GetMetadata', function wphSetupGetMetadata(data) {
+ return Promise.all([pdfManager.ensureDoc('documentInfo'), pdfManager.ensureCatalog('metadata')]);
+ });
+ handler.on('GetData', function wphSetupGetData(data) {
+ pdfManager.requestLoadedStream();
+ return pdfManager.onLoadedStream().then(function (stream) {
+ return stream.bytes;
+ });
+ });
+ handler.on('GetStats', function wphSetupGetStats(data) {
+ return pdfManager.pdfDocument.xref.stats;
+ });
+ handler.on('GetAnnotations', function (_ref3) {
+ var pageIndex = _ref3.pageIndex,
+ intent = _ref3.intent;
+ return pdfManager.getPage(pageIndex).then(function (page) {
+ return page.getAnnotationsData(intent);
+ });
+ });
+ handler.on('RenderPageRequest', function wphSetupRenderPage(data) {
+ var pageIndex = data.pageIndex;
+ pdfManager.getPage(pageIndex).then(function (page) {
+ var task = new WorkerTask('RenderPageRequest: page ' + pageIndex);
+ startWorkerTask(task);
+ var start = verbosity >= _util.VerbosityLevel.INFOS ? Date.now() : 0;
+ page.getOperatorList({
+ handler: handler,
+ task: task,
+ intent: data.intent,
+ renderInteractiveForms: data.renderInteractiveForms
+ }).then(function (operatorList) {
+ finishWorkerTask(task);
+
+ if (start) {
+ (0, _util.info)("page=".concat(pageIndex + 1, " - getOperatorList: time=") + "".concat(Date.now() - start, "ms, len=").concat(operatorList.totalLength));
+ }
+ }, function (e) {
+ finishWorkerTask(task);
+
+ if (task.terminated) {
+ return;
+ }
+
+ handler.send('UnsupportedFeature', {
+ featureId: _util.UNSUPPORTED_FEATURES.unknown
+ });
+ var minimumStackMessage = 'worker.js: while trying to getPage() and getOperatorList()';
+ var wrappedException;
+
+ if (typeof e === 'string') {
+ wrappedException = {
+ message: e,
+ stack: minimumStackMessage
+ };
+ } else if (_typeof(e) === 'object') {
+ wrappedException = {
+ message: e.message || e.toString(),
+ stack: e.stack || minimumStackMessage
+ };
+ } else {
+ wrappedException = {
+ message: 'Unknown exception type: ' + _typeof(e),
+ stack: minimumStackMessage
+ };
+ }
+
+ handler.send('PageError', {
+ pageIndex: pageIndex,
+ error: wrappedException,
+ intent: data.intent
+ });
+ });
+ });
+ }, this);
+ handler.on('GetTextContent', function wphExtractText(data, sink) {
+ var pageIndex = data.pageIndex;
+
+ sink.onPull = function (desiredSize) {};
+
+ sink.onCancel = function (reason) {};
+
+ pdfManager.getPage(pageIndex).then(function (page) {
+ var task = new WorkerTask('GetTextContent: page ' + pageIndex);
+ startWorkerTask(task);
+ var start = verbosity >= _util.VerbosityLevel.INFOS ? Date.now() : 0;
+ page.extractTextContent({
+ handler: handler,
+ task: task,
+ sink: sink,
+ normalizeWhitespace: data.normalizeWhitespace,
+ combineTextItems: data.combineTextItems
+ }).then(function () {
+ finishWorkerTask(task);
+
+ if (start) {
+ (0, _util.info)("page=".concat(pageIndex + 1, " - getTextContent: time=") + "".concat(Date.now() - start, "ms"));
+ }
+
+ sink.close();
+ }, function (reason) {
+ finishWorkerTask(task);
+
+ if (task.terminated) {
+ return;
+ }
+
+ sink.error(reason);
+ throw reason;
+ });
+ });
+ });
+ handler.on('FontFallback', function (data) {
+ return pdfManager.fontFallback(data.id, handler);
+ });
+ handler.on('Cleanup', function wphCleanup(data) {
+ return pdfManager.cleanup();
+ });
+ handler.on('Terminate', function wphTerminate(data) {
+ terminated = true;
+
+ if (pdfManager) {
+ pdfManager.terminate();
+ pdfManager = null;
+ }
+
+ if (cancelXHRs) {
+ cancelXHRs();
+ }
+
+ (0, _primitives.clearPrimitiveCaches)();
+ var waitOn = [];
+ WorkerTasks.forEach(function (task) {
+ waitOn.push(task.finished);
+ task.terminate();
+ });
+ return Promise.all(waitOn).then(function () {
+ handler.destroy();
+ handler = null;
+ });
+ });
+ handler.on('Ready', function wphReady(data) {
+ setupDoc(docParams);
+ docParams = null;
+ });
+ return workerHandlerName;
+ },
+ initializeFromPort: function initializeFromPort(port) {
+ var handler = new _message_handler.MessageHandler('worker', 'main', port);
+ WorkerMessageHandler.setup(handler, port);
+ handler.send('ready', null);
+ }
+};
+exports.WorkerMessageHandler = WorkerMessageHandler;
+
+function isMessagePort(maybePort) {
+ return typeof maybePort.postMessage === 'function' && 'onmessage' in maybePort;
+}
+
+if (typeof window === 'undefined' && !(0, _is_node["default"])() && typeof self !== 'undefined' && isMessagePort(self)) {
+ WorkerMessageHandler.initializeFromPort(self);
+}
+
+/***/ }),
+/* 2 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = __w_pdfjs_require__(3);
+
+/***/ }),
+/* 3 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+/* WEBPACK VAR INJECTION */(function(module) {
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var runtime = function (exports) {
+ "use strict";
+
+ var Op = Object.prototype;
+ var hasOwn = Op.hasOwnProperty;
+ var undefined;
+ var $Symbol = typeof Symbol === "function" ? Symbol : {};
+ var iteratorSymbol = $Symbol.iterator || "@@iterator";
+ var asyncIteratorSymbol = $Symbol.asyncIterator || "@@asyncIterator";
+ var toStringTagSymbol = $Symbol.toStringTag || "@@toStringTag";
+
+ function wrap(innerFn, outerFn, self, tryLocsList) {
+ var protoGenerator = outerFn && outerFn.prototype instanceof Generator ? outerFn : Generator;
+ var generator = Object.create(protoGenerator.prototype);
+ var context = new Context(tryLocsList || []);
+ generator._invoke = makeInvokeMethod(innerFn, self, context);
+ return generator;
+ }
+
+ exports.wrap = wrap;
+
+ function tryCatch(fn, obj, arg) {
+ try {
+ return {
+ type: "normal",
+ arg: fn.call(obj, arg)
+ };
+ } catch (err) {
+ return {
+ type: "throw",
+ arg: err
+ };
+ }
+ }
+
+ var GenStateSuspendedStart = "suspendedStart";
+ var GenStateSuspendedYield = "suspendedYield";
+ var GenStateExecuting = "executing";
+ var GenStateCompleted = "completed";
+ var ContinueSentinel = {};
+
+ function Generator() {}
+
+ function GeneratorFunction() {}
+
+ function GeneratorFunctionPrototype() {}
+
+ var IteratorPrototype = {};
+
+ IteratorPrototype[iteratorSymbol] = function () {
+ return this;
+ };
+
+ var getProto = Object.getPrototypeOf;
+ var NativeIteratorPrototype = getProto && getProto(getProto(values([])));
+
+ if (NativeIteratorPrototype && NativeIteratorPrototype !== Op && hasOwn.call(NativeIteratorPrototype, iteratorSymbol)) {
+ IteratorPrototype = NativeIteratorPrototype;
+ }
+
+ var Gp = GeneratorFunctionPrototype.prototype = Generator.prototype = Object.create(IteratorPrototype);
+ GeneratorFunction.prototype = Gp.constructor = GeneratorFunctionPrototype;
+ GeneratorFunctionPrototype.constructor = GeneratorFunction;
+ GeneratorFunctionPrototype[toStringTagSymbol] = GeneratorFunction.displayName = "GeneratorFunction";
+
+ function defineIteratorMethods(prototype) {
+ ["next", "throw", "return"].forEach(function (method) {
+ prototype[method] = function (arg) {
+ return this._invoke(method, arg);
+ };
+ });
+ }
+
+ exports.isGeneratorFunction = function (genFun) {
+ var ctor = typeof genFun === "function" && genFun.constructor;
+ return ctor ? ctor === GeneratorFunction || (ctor.displayName || ctor.name) === "GeneratorFunction" : false;
+ };
+
+ exports.mark = function (genFun) {
+ if (Object.setPrototypeOf) {
+ Object.setPrototypeOf(genFun, GeneratorFunctionPrototype);
+ } else {
+ genFun.__proto__ = GeneratorFunctionPrototype;
+
+ if (!(toStringTagSymbol in genFun)) {
+ genFun[toStringTagSymbol] = "GeneratorFunction";
+ }
+ }
+
+ genFun.prototype = Object.create(Gp);
+ return genFun;
+ };
+
+ exports.awrap = function (arg) {
+ return {
+ __await: arg
+ };
+ };
+
+ function AsyncIterator(generator) {
+ function invoke(method, arg, resolve, reject) {
+ var record = tryCatch(generator[method], generator, arg);
+
+ if (record.type === "throw") {
+ reject(record.arg);
+ } else {
+ var result = record.arg;
+ var value = result.value;
+
+ if (value && _typeof(value) === "object" && hasOwn.call(value, "__await")) {
+ return Promise.resolve(value.__await).then(function (value) {
+ invoke("next", value, resolve, reject);
+ }, function (err) {
+ invoke("throw", err, resolve, reject);
+ });
+ }
+
+ return Promise.resolve(value).then(function (unwrapped) {
+ result.value = unwrapped;
+ resolve(result);
+ }, function (error) {
+ return invoke("throw", error, resolve, reject);
+ });
+ }
+ }
+
+ var previousPromise;
+
+ function enqueue(method, arg) {
+ function callInvokeWithMethodAndArg() {
+ return new Promise(function (resolve, reject) {
+ invoke(method, arg, resolve, reject);
+ });
+ }
+
+ return previousPromise = previousPromise ? previousPromise.then(callInvokeWithMethodAndArg, callInvokeWithMethodAndArg) : callInvokeWithMethodAndArg();
+ }
+
+ this._invoke = enqueue;
+ }
+
+ defineIteratorMethods(AsyncIterator.prototype);
+
+ AsyncIterator.prototype[asyncIteratorSymbol] = function () {
+ return this;
+ };
+
+ exports.AsyncIterator = AsyncIterator;
+
+ exports.async = function (innerFn, outerFn, self, tryLocsList) {
+ var iter = new AsyncIterator(wrap(innerFn, outerFn, self, tryLocsList));
+ return exports.isGeneratorFunction(outerFn) ? iter : iter.next().then(function (result) {
+ return result.done ? result.value : iter.next();
+ });
+ };
+
+ function makeInvokeMethod(innerFn, self, context) {
+ var state = GenStateSuspendedStart;
+ return function invoke(method, arg) {
+ if (state === GenStateExecuting) {
+ throw new Error("Generator is already running");
+ }
+
+ if (state === GenStateCompleted) {
+ if (method === "throw") {
+ throw arg;
+ }
+
+ return doneResult();
+ }
+
+ context.method = method;
+ context.arg = arg;
+
+ while (true) {
+ var delegate = context.delegate;
+
+ if (delegate) {
+ var delegateResult = maybeInvokeDelegate(delegate, context);
+
+ if (delegateResult) {
+ if (delegateResult === ContinueSentinel) continue;
+ return delegateResult;
+ }
+ }
+
+ if (context.method === "next") {
+ context.sent = context._sent = context.arg;
+ } else if (context.method === "throw") {
+ if (state === GenStateSuspendedStart) {
+ state = GenStateCompleted;
+ throw context.arg;
+ }
+
+ context.dispatchException(context.arg);
+ } else if (context.method === "return") {
+ context.abrupt("return", context.arg);
+ }
+
+ state = GenStateExecuting;
+ var record = tryCatch(innerFn, self, context);
+
+ if (record.type === "normal") {
+ state = context.done ? GenStateCompleted : GenStateSuspendedYield;
+
+ if (record.arg === ContinueSentinel) {
+ continue;
+ }
+
+ return {
+ value: record.arg,
+ done: context.done
+ };
+ } else if (record.type === "throw") {
+ state = GenStateCompleted;
+ context.method = "throw";
+ context.arg = record.arg;
+ }
+ }
+ };
+ }
+
+ function maybeInvokeDelegate(delegate, context) {
+ var method = delegate.iterator[context.method];
+
+ if (method === undefined) {
+ context.delegate = null;
+
+ if (context.method === "throw") {
+ if (delegate.iterator["return"]) {
+ context.method = "return";
+ context.arg = undefined;
+ maybeInvokeDelegate(delegate, context);
+
+ if (context.method === "throw") {
+ return ContinueSentinel;
+ }
+ }
+
+ context.method = "throw";
+ context.arg = new TypeError("The iterator does not provide a 'throw' method");
+ }
+
+ return ContinueSentinel;
+ }
+
+ var record = tryCatch(method, delegate.iterator, context.arg);
+
+ if (record.type === "throw") {
+ context.method = "throw";
+ context.arg = record.arg;
+ context.delegate = null;
+ return ContinueSentinel;
+ }
+
+ var info = record.arg;
+
+ if (!info) {
+ context.method = "throw";
+ context.arg = new TypeError("iterator result is not an object");
+ context.delegate = null;
+ return ContinueSentinel;
+ }
+
+ if (info.done) {
+ context[delegate.resultName] = info.value;
+ context.next = delegate.nextLoc;
+
+ if (context.method !== "return") {
+ context.method = "next";
+ context.arg = undefined;
+ }
+ } else {
+ return info;
+ }
+
+ context.delegate = null;
+ return ContinueSentinel;
+ }
+
+ defineIteratorMethods(Gp);
+ Gp[toStringTagSymbol] = "Generator";
+
+ Gp[iteratorSymbol] = function () {
+ return this;
+ };
+
+ Gp.toString = function () {
+ return "[object Generator]";
+ };
+
+ function pushTryEntry(locs) {
+ var entry = {
+ tryLoc: locs[0]
+ };
+
+ if (1 in locs) {
+ entry.catchLoc = locs[1];
+ }
+
+ if (2 in locs) {
+ entry.finallyLoc = locs[2];
+ entry.afterLoc = locs[3];
+ }
+
+ this.tryEntries.push(entry);
+ }
+
+ function resetTryEntry(entry) {
+ var record = entry.completion || {};
+ record.type = "normal";
+ delete record.arg;
+ entry.completion = record;
+ }
+
+ function Context(tryLocsList) {
+ this.tryEntries = [{
+ tryLoc: "root"
+ }];
+ tryLocsList.forEach(pushTryEntry, this);
+ this.reset(true);
+ }
+
+ exports.keys = function (object) {
+ var keys = [];
+
+ for (var key in object) {
+ keys.push(key);
+ }
+
+ keys.reverse();
+ return function next() {
+ while (keys.length) {
+ var key = keys.pop();
+
+ if (key in object) {
+ next.value = key;
+ next.done = false;
+ return next;
+ }
+ }
+
+ next.done = true;
+ return next;
+ };
+ };
+
+ function values(iterable) {
+ if (iterable) {
+ var iteratorMethod = iterable[iteratorSymbol];
+
+ if (iteratorMethod) {
+ return iteratorMethod.call(iterable);
+ }
+
+ if (typeof iterable.next === "function") {
+ return iterable;
+ }
+
+ if (!isNaN(iterable.length)) {
+ var i = -1,
+ next = function next() {
+ while (++i < iterable.length) {
+ if (hasOwn.call(iterable, i)) {
+ next.value = iterable[i];
+ next.done = false;
+ return next;
+ }
+ }
+
+ next.value = undefined;
+ next.done = true;
+ return next;
+ };
+
+ return next.next = next;
+ }
+ }
+
+ return {
+ next: doneResult
+ };
+ }
+
+ exports.values = values;
+
+ function doneResult() {
+ return {
+ value: undefined,
+ done: true
+ };
+ }
+
+ Context.prototype = {
+ constructor: Context,
+ reset: function reset(skipTempReset) {
+ this.prev = 0;
+ this.next = 0;
+ this.sent = this._sent = undefined;
+ this.done = false;
+ this.delegate = null;
+ this.method = "next";
+ this.arg = undefined;
+ this.tryEntries.forEach(resetTryEntry);
+
+ if (!skipTempReset) {
+ for (var name in this) {
+ if (name.charAt(0) === "t" && hasOwn.call(this, name) && !isNaN(+name.slice(1))) {
+ this[name] = undefined;
+ }
+ }
+ }
+ },
+ stop: function stop() {
+ this.done = true;
+ var rootEntry = this.tryEntries[0];
+ var rootRecord = rootEntry.completion;
+
+ if (rootRecord.type === "throw") {
+ throw rootRecord.arg;
+ }
+
+ return this.rval;
+ },
+ dispatchException: function dispatchException(exception) {
+ if (this.done) {
+ throw exception;
+ }
+
+ var context = this;
+
+ function handle(loc, caught) {
+ record.type = "throw";
+ record.arg = exception;
+ context.next = loc;
+
+ if (caught) {
+ context.method = "next";
+ context.arg = undefined;
+ }
+
+ return !!caught;
+ }
+
+ for (var i = this.tryEntries.length - 1; i >= 0; --i) {
+ var entry = this.tryEntries[i];
+ var record = entry.completion;
+
+ if (entry.tryLoc === "root") {
+ return handle("end");
+ }
+
+ if (entry.tryLoc <= this.prev) {
+ var hasCatch = hasOwn.call(entry, "catchLoc");
+ var hasFinally = hasOwn.call(entry, "finallyLoc");
+
+ if (hasCatch && hasFinally) {
+ if (this.prev < entry.catchLoc) {
+ return handle(entry.catchLoc, true);
+ } else if (this.prev < entry.finallyLoc) {
+ return handle(entry.finallyLoc);
+ }
+ } else if (hasCatch) {
+ if (this.prev < entry.catchLoc) {
+ return handle(entry.catchLoc, true);
+ }
+ } else if (hasFinally) {
+ if (this.prev < entry.finallyLoc) {
+ return handle(entry.finallyLoc);
+ }
+ } else {
+ throw new Error("try statement without catch or finally");
+ }
+ }
+ }
+ },
+ abrupt: function abrupt(type, arg) {
+ for (var i = this.tryEntries.length - 1; i >= 0; --i) {
+ var entry = this.tryEntries[i];
+
+ if (entry.tryLoc <= this.prev && hasOwn.call(entry, "finallyLoc") && this.prev < entry.finallyLoc) {
+ var finallyEntry = entry;
+ break;
+ }
+ }
+
+ if (finallyEntry && (type === "break" || type === "continue") && finallyEntry.tryLoc <= arg && arg <= finallyEntry.finallyLoc) {
+ finallyEntry = null;
+ }
+
+ var record = finallyEntry ? finallyEntry.completion : {};
+ record.type = type;
+ record.arg = arg;
+
+ if (finallyEntry) {
+ this.method = "next";
+ this.next = finallyEntry.finallyLoc;
+ return ContinueSentinel;
+ }
+
+ return this.complete(record);
+ },
+ complete: function complete(record, afterLoc) {
+ if (record.type === "throw") {
+ throw record.arg;
+ }
+
+ if (record.type === "break" || record.type === "continue") {
+ this.next = record.arg;
+ } else if (record.type === "return") {
+ this.rval = this.arg = record.arg;
+ this.method = "return";
+ this.next = "end";
+ } else if (record.type === "normal" && afterLoc) {
+ this.next = afterLoc;
+ }
+
+ return ContinueSentinel;
+ },
+ finish: function finish(finallyLoc) {
+ for (var i = this.tryEntries.length - 1; i >= 0; --i) {
+ var entry = this.tryEntries[i];
+
+ if (entry.finallyLoc === finallyLoc) {
+ this.complete(entry.completion, entry.afterLoc);
+ resetTryEntry(entry);
+ return ContinueSentinel;
+ }
+ }
+ },
+ "catch": function _catch(tryLoc) {
+ for (var i = this.tryEntries.length - 1; i >= 0; --i) {
+ var entry = this.tryEntries[i];
+
+ if (entry.tryLoc === tryLoc) {
+ var record = entry.completion;
+
+ if (record.type === "throw") {
+ var thrown = record.arg;
+ resetTryEntry(entry);
+ }
+
+ return thrown;
+ }
+ }
+
+ throw new Error("illegal catch attempt");
+ },
+ delegateYield: function delegateYield(iterable, resultName, nextLoc) {
+ this.delegate = {
+ iterator: values(iterable),
+ resultName: resultName,
+ nextLoc: nextLoc
+ };
+
+ if (this.method === "next") {
+ this.arg = undefined;
+ }
+
+ return ContinueSentinel;
+ }
+ };
+ return exports;
+}(( false ? undefined : _typeof(module)) === "object" ? module.exports : {});
+
+try {
+ regeneratorRuntime = runtime;
+} catch (accidentalStrictMode) {
+ Function("r", "regeneratorRuntime = r")(runtime);
+}
+/* WEBPACK VAR INJECTION */}.call(this, __w_pdfjs_require__(4)(module)))
+
+/***/ }),
+/* 4 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (module) {
+ if (!module.webpackPolyfill) {
+ module.deprecate = function () {};
+
+ module.paths = [];
+ if (!module.children) module.children = [];
+ Object.defineProperty(module, "loaded", {
+ enumerable: true,
+ get: function get() {
+ return module.l;
+ }
+ });
+ Object.defineProperty(module, "id", {
+ enumerable: true,
+ get: function get() {
+ return module.i;
+ }
+ });
+ module.webpackPolyfill = 1;
+ }
+
+ return module;
+};
+
+/***/ }),
+/* 5 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.arrayByteLength = arrayByteLength;
+exports.arraysToBytes = arraysToBytes;
+exports.assert = assert;
+exports.bytesToString = bytesToString;
+exports.createPromiseCapability = createPromiseCapability;
+exports.getVerbosityLevel = getVerbosityLevel;
+exports.info = info;
+exports.isArrayBuffer = isArrayBuffer;
+exports.isArrayEqual = isArrayEqual;
+exports.isBool = isBool;
+exports.isEmptyObj = isEmptyObj;
+exports.isNum = isNum;
+exports.isString = isString;
+exports.isSpace = isSpace;
+exports.isSameOrigin = isSameOrigin;
+exports.createValidAbsoluteUrl = createValidAbsoluteUrl;
+exports.isLittleEndian = isLittleEndian;
+exports.isEvalSupported = isEvalSupported;
+exports.log2 = log2;
+exports.readInt8 = readInt8;
+exports.readUint16 = readUint16;
+exports.readUint32 = readUint32;
+exports.removeNullCharacters = removeNullCharacters;
+exports.setVerbosityLevel = setVerbosityLevel;
+exports.shadow = shadow;
+exports.string32 = string32;
+exports.stringToBytes = stringToBytes;
+exports.stringToPDFString = stringToPDFString;
+exports.stringToUTF8String = stringToUTF8String;
+exports.utf8StringToString = utf8StringToString;
+exports.warn = warn;
+exports.unreachable = unreachable;
+Object.defineProperty(exports, "ReadableStream", {
+ enumerable: true,
+ get: function get() {
+ return _streams_polyfill.ReadableStream;
+ }
+});
+Object.defineProperty(exports, "URL", {
+ enumerable: true,
+ get: function get() {
+ return _url_polyfill.URL;
+ }
+});
+exports.createObjectURL = exports.FormatError = exports.Util = exports.UnknownErrorException = exports.UnexpectedResponseException = exports.TextRenderingMode = exports.StreamType = exports.PermissionFlag = exports.PasswordResponses = exports.PasswordException = exports.NativeImageDecoding = exports.MissingPDFException = exports.InvalidPDFException = exports.AbortException = exports.CMapCompressionType = exports.ImageKind = exports.FontType = exports.AnnotationType = exports.AnnotationFlag = exports.AnnotationFieldFlag = exports.AnnotationBorderStyleType = exports.UNSUPPORTED_FEATURES = exports.VerbosityLevel = exports.OPS = exports.IDENTITY_MATRIX = exports.FONT_IDENTITY_MATRIX = void 0;
+
+__w_pdfjs_require__(6);
+
+var _streams_polyfill = __w_pdfjs_require__(147);
+
+var _url_polyfill = __w_pdfjs_require__(149);
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var IDENTITY_MATRIX = [1, 0, 0, 1, 0, 0];
+exports.IDENTITY_MATRIX = IDENTITY_MATRIX;
+var FONT_IDENTITY_MATRIX = [0.001, 0, 0, 0.001, 0, 0];
+exports.FONT_IDENTITY_MATRIX = FONT_IDENTITY_MATRIX;
+var NativeImageDecoding = {
+ NONE: 'none',
+ DECODE: 'decode',
+ DISPLAY: 'display'
+};
+exports.NativeImageDecoding = NativeImageDecoding;
+var PermissionFlag = {
+ PRINT: 0x04,
+ MODIFY_CONTENTS: 0x08,
+ COPY: 0x10,
+ MODIFY_ANNOTATIONS: 0x20,
+ FILL_INTERACTIVE_FORMS: 0x100,
+ COPY_FOR_ACCESSIBILITY: 0x200,
+ ASSEMBLE: 0x400,
+ PRINT_HIGH_QUALITY: 0x800
+};
+exports.PermissionFlag = PermissionFlag;
+var TextRenderingMode = {
+ FILL: 0,
+ STROKE: 1,
+ FILL_STROKE: 2,
+ INVISIBLE: 3,
+ FILL_ADD_TO_PATH: 4,
+ STROKE_ADD_TO_PATH: 5,
+ FILL_STROKE_ADD_TO_PATH: 6,
+ ADD_TO_PATH: 7,
+ FILL_STROKE_MASK: 3,
+ ADD_TO_PATH_FLAG: 4
+};
+exports.TextRenderingMode = TextRenderingMode;
+var ImageKind = {
+ GRAYSCALE_1BPP: 1,
+ RGB_24BPP: 2,
+ RGBA_32BPP: 3
+};
+exports.ImageKind = ImageKind;
+var AnnotationType = {
+ TEXT: 1,
+ LINK: 2,
+ FREETEXT: 3,
+ LINE: 4,
+ SQUARE: 5,
+ CIRCLE: 6,
+ POLYGON: 7,
+ POLYLINE: 8,
+ HIGHLIGHT: 9,
+ UNDERLINE: 10,
+ SQUIGGLY: 11,
+ STRIKEOUT: 12,
+ STAMP: 13,
+ CARET: 14,
+ INK: 15,
+ POPUP: 16,
+ FILEATTACHMENT: 17,
+ SOUND: 18,
+ MOVIE: 19,
+ WIDGET: 20,
+ SCREEN: 21,
+ PRINTERMARK: 22,
+ TRAPNET: 23,
+ WATERMARK: 24,
+ THREED: 25,
+ REDACT: 26
+};
+exports.AnnotationType = AnnotationType;
+var AnnotationFlag = {
+ INVISIBLE: 0x01,
+ HIDDEN: 0x02,
+ PRINT: 0x04,
+ NOZOOM: 0x08,
+ NOROTATE: 0x10,
+ NOVIEW: 0x20,
+ READONLY: 0x40,
+ LOCKED: 0x80,
+ TOGGLENOVIEW: 0x100,
+ LOCKEDCONTENTS: 0x200
+};
+exports.AnnotationFlag = AnnotationFlag;
+var AnnotationFieldFlag = {
+ READONLY: 0x0000001,
+ REQUIRED: 0x0000002,
+ NOEXPORT: 0x0000004,
+ MULTILINE: 0x0001000,
+ PASSWORD: 0x0002000,
+ NOTOGGLETOOFF: 0x0004000,
+ RADIO: 0x0008000,
+ PUSHBUTTON: 0x0010000,
+ COMBO: 0x0020000,
+ EDIT: 0x0040000,
+ SORT: 0x0080000,
+ FILESELECT: 0x0100000,
+ MULTISELECT: 0x0200000,
+ DONOTSPELLCHECK: 0x0400000,
+ DONOTSCROLL: 0x0800000,
+ COMB: 0x1000000,
+ RICHTEXT: 0x2000000,
+ RADIOSINUNISON: 0x2000000,
+ COMMITONSELCHANGE: 0x4000000
+};
+exports.AnnotationFieldFlag = AnnotationFieldFlag;
+var AnnotationBorderStyleType = {
+ SOLID: 1,
+ DASHED: 2,
+ BEVELED: 3,
+ INSET: 4,
+ UNDERLINE: 5
+};
+exports.AnnotationBorderStyleType = AnnotationBorderStyleType;
+var StreamType = {
+ UNKNOWN: 0,
+ FLATE: 1,
+ LZW: 2,
+ DCT: 3,
+ JPX: 4,
+ JBIG: 5,
+ A85: 6,
+ AHX: 7,
+ CCF: 8,
+ RL: 9
+};
+exports.StreamType = StreamType;
+var FontType = {
+ UNKNOWN: 0,
+ TYPE1: 1,
+ TYPE1C: 2,
+ CIDFONTTYPE0: 3,
+ CIDFONTTYPE0C: 4,
+ TRUETYPE: 5,
+ CIDFONTTYPE2: 6,
+ TYPE3: 7,
+ OPENTYPE: 8,
+ TYPE0: 9,
+ MMTYPE1: 10
+};
+exports.FontType = FontType;
+var VerbosityLevel = {
+ ERRORS: 0,
+ WARNINGS: 1,
+ INFOS: 5
+};
+exports.VerbosityLevel = VerbosityLevel;
+var CMapCompressionType = {
+ NONE: 0,
+ BINARY: 1,
+ STREAM: 2
+};
+exports.CMapCompressionType = CMapCompressionType;
+var OPS = {
+ dependency: 1,
+ setLineWidth: 2,
+ setLineCap: 3,
+ setLineJoin: 4,
+ setMiterLimit: 5,
+ setDash: 6,
+ setRenderingIntent: 7,
+ setFlatness: 8,
+ setGState: 9,
+ save: 10,
+ restore: 11,
+ transform: 12,
+ moveTo: 13,
+ lineTo: 14,
+ curveTo: 15,
+ curveTo2: 16,
+ curveTo3: 17,
+ closePath: 18,
+ rectangle: 19,
+ stroke: 20,
+ closeStroke: 21,
+ fill: 22,
+ eoFill: 23,
+ fillStroke: 24,
+ eoFillStroke: 25,
+ closeFillStroke: 26,
+ closeEOFillStroke: 27,
+ endPath: 28,
+ clip: 29,
+ eoClip: 30,
+ beginText: 31,
+ endText: 32,
+ setCharSpacing: 33,
+ setWordSpacing: 34,
+ setHScale: 35,
+ setLeading: 36,
+ setFont: 37,
+ setTextRenderingMode: 38,
+ setTextRise: 39,
+ moveText: 40,
+ setLeadingMoveText: 41,
+ setTextMatrix: 42,
+ nextLine: 43,
+ showText: 44,
+ showSpacedText: 45,
+ nextLineShowText: 46,
+ nextLineSetSpacingShowText: 47,
+ setCharWidth: 48,
+ setCharWidthAndBounds: 49,
+ setStrokeColorSpace: 50,
+ setFillColorSpace: 51,
+ setStrokeColor: 52,
+ setStrokeColorN: 53,
+ setFillColor: 54,
+ setFillColorN: 55,
+ setStrokeGray: 56,
+ setFillGray: 57,
+ setStrokeRGBColor: 58,
+ setFillRGBColor: 59,
+ setStrokeCMYKColor: 60,
+ setFillCMYKColor: 61,
+ shadingFill: 62,
+ beginInlineImage: 63,
+ beginImageData: 64,
+ endInlineImage: 65,
+ paintXObject: 66,
+ markPoint: 67,
+ markPointProps: 68,
+ beginMarkedContent: 69,
+ beginMarkedContentProps: 70,
+ endMarkedContent: 71,
+ beginCompat: 72,
+ endCompat: 73,
+ paintFormXObjectBegin: 74,
+ paintFormXObjectEnd: 75,
+ beginGroup: 76,
+ endGroup: 77,
+ beginAnnotations: 78,
+ endAnnotations: 79,
+ beginAnnotation: 80,
+ endAnnotation: 81,
+ paintJpegXObject: 82,
+ paintImageMaskXObject: 83,
+ paintImageMaskXObjectGroup: 84,
+ paintImageXObject: 85,
+ paintInlineImageXObject: 86,
+ paintInlineImageXObjectGroup: 87,
+ paintImageXObjectRepeat: 88,
+ paintImageMaskXObjectRepeat: 89,
+ paintSolidColorImageMask: 90,
+ constructPath: 91
+};
+exports.OPS = OPS;
+var UNSUPPORTED_FEATURES = {
+ unknown: 'unknown',
+ forms: 'forms',
+ javaScript: 'javaScript',
+ smask: 'smask',
+ shadingPattern: 'shadingPattern',
+ font: 'font'
+};
+exports.UNSUPPORTED_FEATURES = UNSUPPORTED_FEATURES;
+var PasswordResponses = {
+ NEED_PASSWORD: 1,
+ INCORRECT_PASSWORD: 2
+};
+exports.PasswordResponses = PasswordResponses;
+var verbosity = VerbosityLevel.WARNINGS;
+
+function setVerbosityLevel(level) {
+ if (Number.isInteger(level)) {
+ verbosity = level;
+ }
+}
+
+function getVerbosityLevel() {
+ return verbosity;
+}
+
+function info(msg) {
+ if (verbosity >= VerbosityLevel.INFOS) {
+ console.log('Info: ' + msg);
+ }
+}
+
+function warn(msg) {
+ if (verbosity >= VerbosityLevel.WARNINGS) {
+ console.log('Warning: ' + msg);
+ }
+}
+
+function unreachable(msg) {
+ throw new Error(msg);
+}
+
+function assert(cond, msg) {
+ if (!cond) {
+ unreachable(msg);
+ }
+}
+
+function isSameOrigin(baseUrl, otherUrl) {
+ try {
+ var base = new _url_polyfill.URL(baseUrl);
+
+ if (!base.origin || base.origin === 'null') {
+ return false;
+ }
+ } catch (e) {
+ return false;
+ }
+
+ var other = new _url_polyfill.URL(otherUrl, base);
+ return base.origin === other.origin;
+}
+
+function _isValidProtocol(url) {
+ if (!url) {
+ return false;
+ }
+
+ switch (url.protocol) {
+ case 'http:':
+ case 'https:':
+ case 'ftp:':
+ case 'mailto:':
+ case 'tel:':
+ return true;
+
+ default:
+ return false;
+ }
+}
+
+function createValidAbsoluteUrl(url, baseUrl) {
+ if (!url) {
+ return null;
+ }
+
+ try {
+ var absoluteUrl = baseUrl ? new _url_polyfill.URL(url, baseUrl) : new _url_polyfill.URL(url);
+
+ if (_isValidProtocol(absoluteUrl)) {
+ return absoluteUrl;
+ }
+ } catch (ex) {}
+
+ return null;
+}
+
+function shadow(obj, prop, value) {
+ Object.defineProperty(obj, prop, {
+ value: value,
+ enumerable: true,
+ configurable: true,
+ writable: false
+ });
+ return value;
+}
+
+var PasswordException = function PasswordExceptionClosure() {
+ function PasswordException(msg, code) {
+ this.name = 'PasswordException';
+ this.message = msg;
+ this.code = code;
+ }
+
+ PasswordException.prototype = new Error();
+ PasswordException.constructor = PasswordException;
+ return PasswordException;
+}();
+
+exports.PasswordException = PasswordException;
+
+var UnknownErrorException = function UnknownErrorExceptionClosure() {
+ function UnknownErrorException(msg, details) {
+ this.name = 'UnknownErrorException';
+ this.message = msg;
+ this.details = details;
+ }
+
+ UnknownErrorException.prototype = new Error();
+ UnknownErrorException.constructor = UnknownErrorException;
+ return UnknownErrorException;
+}();
+
+exports.UnknownErrorException = UnknownErrorException;
+
+var InvalidPDFException = function InvalidPDFExceptionClosure() {
+ function InvalidPDFException(msg) {
+ this.name = 'InvalidPDFException';
+ this.message = msg;
+ }
+
+ InvalidPDFException.prototype = new Error();
+ InvalidPDFException.constructor = InvalidPDFException;
+ return InvalidPDFException;
+}();
+
+exports.InvalidPDFException = InvalidPDFException;
+
+var MissingPDFException = function MissingPDFExceptionClosure() {
+ function MissingPDFException(msg) {
+ this.name = 'MissingPDFException';
+ this.message = msg;
+ }
+
+ MissingPDFException.prototype = new Error();
+ MissingPDFException.constructor = MissingPDFException;
+ return MissingPDFException;
+}();
+
+exports.MissingPDFException = MissingPDFException;
+
+var UnexpectedResponseException = function UnexpectedResponseExceptionClosure() {
+ function UnexpectedResponseException(msg, status) {
+ this.name = 'UnexpectedResponseException';
+ this.message = msg;
+ this.status = status;
+ }
+
+ UnexpectedResponseException.prototype = new Error();
+ UnexpectedResponseException.constructor = UnexpectedResponseException;
+ return UnexpectedResponseException;
+}();
+
+exports.UnexpectedResponseException = UnexpectedResponseException;
+
+var FormatError = function FormatErrorClosure() {
+ function FormatError(msg) {
+ this.message = msg;
+ }
+
+ FormatError.prototype = new Error();
+ FormatError.prototype.name = 'FormatError';
+ FormatError.constructor = FormatError;
+ return FormatError;
+}();
+
+exports.FormatError = FormatError;
+
+var AbortException = function AbortExceptionClosure() {
+ function AbortException(msg) {
+ this.name = 'AbortException';
+ this.message = msg;
+ }
+
+ AbortException.prototype = new Error();
+ AbortException.constructor = AbortException;
+ return AbortException;
+}();
+
+exports.AbortException = AbortException;
+var NullCharactersRegExp = /\x00/g;
+
+function removeNullCharacters(str) {
+ if (typeof str !== 'string') {
+ warn('The argument for removeNullCharacters must be a string.');
+ return str;
+ }
+
+ return str.replace(NullCharactersRegExp, '');
+}
+
+function bytesToString(bytes) {
+ assert(bytes !== null && _typeof(bytes) === 'object' && bytes.length !== undefined, 'Invalid argument for bytesToString');
+ var length = bytes.length;
+ var MAX_ARGUMENT_COUNT = 8192;
+
+ if (length < MAX_ARGUMENT_COUNT) {
+ return String.fromCharCode.apply(null, bytes);
+ }
+
+ var strBuf = [];
+
+ for (var i = 0; i < length; i += MAX_ARGUMENT_COUNT) {
+ var chunkEnd = Math.min(i + MAX_ARGUMENT_COUNT, length);
+ var chunk = bytes.subarray(i, chunkEnd);
+ strBuf.push(String.fromCharCode.apply(null, chunk));
+ }
+
+ return strBuf.join('');
+}
+
+function stringToBytes(str) {
+ assert(typeof str === 'string', 'Invalid argument for stringToBytes');
+ var length = str.length;
+ var bytes = new Uint8Array(length);
+
+ for (var i = 0; i < length; ++i) {
+ bytes[i] = str.charCodeAt(i) & 0xFF;
+ }
+
+ return bytes;
+}
+
+function arrayByteLength(arr) {
+ if (arr.length !== undefined) {
+ return arr.length;
+ }
+
+ assert(arr.byteLength !== undefined);
+ return arr.byteLength;
+}
+
+function arraysToBytes(arr) {
+ if (arr.length === 1 && arr[0] instanceof Uint8Array) {
+ return arr[0];
+ }
+
+ var resultLength = 0;
+ var i,
+ ii = arr.length;
+ var item, itemLength;
+
+ for (i = 0; i < ii; i++) {
+ item = arr[i];
+ itemLength = arrayByteLength(item);
+ resultLength += itemLength;
+ }
+
+ var pos = 0;
+ var data = new Uint8Array(resultLength);
+
+ for (i = 0; i < ii; i++) {
+ item = arr[i];
+
+ if (!(item instanceof Uint8Array)) {
+ if (typeof item === 'string') {
+ item = stringToBytes(item);
+ } else {
+ item = new Uint8Array(item);
+ }
+ }
+
+ itemLength = item.byteLength;
+ data.set(item, pos);
+ pos += itemLength;
+ }
+
+ return data;
+}
+
+function string32(value) {
+ return String.fromCharCode(value >> 24 & 0xff, value >> 16 & 0xff, value >> 8 & 0xff, value & 0xff);
+}
+
+function log2(x) {
+ if (x <= 0) {
+ return 0;
+ }
+
+ return Math.ceil(Math.log2(x));
+}
+
+function readInt8(data, start) {
+ return data[start] << 24 >> 24;
+}
+
+function readUint16(data, offset) {
+ return data[offset] << 8 | data[offset + 1];
+}
+
+function readUint32(data, offset) {
+ return (data[offset] << 24 | data[offset + 1] << 16 | data[offset + 2] << 8 | data[offset + 3]) >>> 0;
+}
+
+function isLittleEndian() {
+ var buffer8 = new Uint8Array(4);
+ buffer8[0] = 1;
+ var view32 = new Uint32Array(buffer8.buffer, 0, 1);
+ return view32[0] === 1;
+}
+
+function isEvalSupported() {
+ try {
+ new Function('');
+ return true;
+ } catch (e) {
+ return false;
+ }
+}
+
+var Util = function UtilClosure() {
+ function Util() {}
+
+ var rgbBuf = ['rgb(', 0, ',', 0, ',', 0, ')'];
+
+ Util.makeCssRgb = function Util_makeCssRgb(r, g, b) {
+ rgbBuf[1] = r;
+ rgbBuf[3] = g;
+ rgbBuf[5] = b;
+ return rgbBuf.join('');
+ };
+
+ Util.transform = function Util_transform(m1, m2) {
+ return [m1[0] * m2[0] + m1[2] * m2[1], m1[1] * m2[0] + m1[3] * m2[1], m1[0] * m2[2] + m1[2] * m2[3], m1[1] * m2[2] + m1[3] * m2[3], m1[0] * m2[4] + m1[2] * m2[5] + m1[4], m1[1] * m2[4] + m1[3] * m2[5] + m1[5]];
+ };
+
+ Util.applyTransform = function Util_applyTransform(p, m) {
+ var xt = p[0] * m[0] + p[1] * m[2] + m[4];
+ var yt = p[0] * m[1] + p[1] * m[3] + m[5];
+ return [xt, yt];
+ };
+
+ Util.applyInverseTransform = function Util_applyInverseTransform(p, m) {
+ var d = m[0] * m[3] - m[1] * m[2];
+ var xt = (p[0] * m[3] - p[1] * m[2] + m[2] * m[5] - m[4] * m[3]) / d;
+ var yt = (-p[0] * m[1] + p[1] * m[0] + m[4] * m[1] - m[5] * m[0]) / d;
+ return [xt, yt];
+ };
+
+ Util.getAxialAlignedBoundingBox = function Util_getAxialAlignedBoundingBox(r, m) {
+ var p1 = Util.applyTransform(r, m);
+ var p2 = Util.applyTransform(r.slice(2, 4), m);
+ var p3 = Util.applyTransform([r[0], r[3]], m);
+ var p4 = Util.applyTransform([r[2], r[1]], m);
+ return [Math.min(p1[0], p2[0], p3[0], p4[0]), Math.min(p1[1], p2[1], p3[1], p4[1]), Math.max(p1[0], p2[0], p3[0], p4[0]), Math.max(p1[1], p2[1], p3[1], p4[1])];
+ };
+
+ Util.inverseTransform = function Util_inverseTransform(m) {
+ var d = m[0] * m[3] - m[1] * m[2];
+ return [m[3] / d, -m[1] / d, -m[2] / d, m[0] / d, (m[2] * m[5] - m[4] * m[3]) / d, (m[4] * m[1] - m[5] * m[0]) / d];
+ };
+
+ Util.apply3dTransform = function Util_apply3dTransform(m, v) {
+ return [m[0] * v[0] + m[1] * v[1] + m[2] * v[2], m[3] * v[0] + m[4] * v[1] + m[5] * v[2], m[6] * v[0] + m[7] * v[1] + m[8] * v[2]];
+ };
+
+ Util.singularValueDecompose2dScale = function Util_singularValueDecompose2dScale(m) {
+ var transpose = [m[0], m[2], m[1], m[3]];
+ var a = m[0] * transpose[0] + m[1] * transpose[2];
+ var b = m[0] * transpose[1] + m[1] * transpose[3];
+ var c = m[2] * transpose[0] + m[3] * transpose[2];
+ var d = m[2] * transpose[1] + m[3] * transpose[3];
+ var first = (a + d) / 2;
+ var second = Math.sqrt((a + d) * (a + d) - 4 * (a * d - c * b)) / 2;
+ var sx = first + second || 1;
+ var sy = first - second || 1;
+ return [Math.sqrt(sx), Math.sqrt(sy)];
+ };
+
+ Util.normalizeRect = function Util_normalizeRect(rect) {
+ var r = rect.slice(0);
+
+ if (rect[0] > rect[2]) {
+ r[0] = rect[2];
+ r[2] = rect[0];
+ }
+
+ if (rect[1] > rect[3]) {
+ r[1] = rect[3];
+ r[3] = rect[1];
+ }
+
+ return r;
+ };
+
+ Util.intersect = function Util_intersect(rect1, rect2) {
+ function compare(a, b) {
+ return a - b;
+ }
+
+ var orderedX = [rect1[0], rect1[2], rect2[0], rect2[2]].sort(compare),
+ orderedY = [rect1[1], rect1[3], rect2[1], rect2[3]].sort(compare),
+ result = [];
+ rect1 = Util.normalizeRect(rect1);
+ rect2 = Util.normalizeRect(rect2);
+
+ if (orderedX[0] === rect1[0] && orderedX[1] === rect2[0] || orderedX[0] === rect2[0] && orderedX[1] === rect1[0]) {
+ result[0] = orderedX[1];
+ result[2] = orderedX[2];
+ } else {
+ return false;
+ }
+
+ if (orderedY[0] === rect1[1] && orderedY[1] === rect2[1] || orderedY[0] === rect2[1] && orderedY[1] === rect1[1]) {
+ result[1] = orderedY[1];
+ result[3] = orderedY[2];
+ } else {
+ return false;
+ }
+
+ return result;
+ };
+
+ return Util;
+}();
+
+exports.Util = Util;
+var PDFStringTranslateTable = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x2D8, 0x2C7, 0x2C6, 0x2D9, 0x2DD, 0x2DB, 0x2DA, 0x2DC, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x2022, 0x2020, 0x2021, 0x2026, 0x2014, 0x2013, 0x192, 0x2044, 0x2039, 0x203A, 0x2212, 0x2030, 0x201E, 0x201C, 0x201D, 0x2018, 0x2019, 0x201A, 0x2122, 0xFB01, 0xFB02, 0x141, 0x152, 0x160, 0x178, 0x17D, 0x131, 0x142, 0x153, 0x161, 0x17E, 0, 0x20AC];
+
+function stringToPDFString(str) {
+ var i,
+ n = str.length,
+ strBuf = [];
+
+ if (str[0] === '\xFE' && str[1] === '\xFF') {
+ for (i = 2; i < n; i += 2) {
+ strBuf.push(String.fromCharCode(str.charCodeAt(i) << 8 | str.charCodeAt(i + 1)));
+ }
+ } else {
+ for (i = 0; i < n; ++i) {
+ var code = PDFStringTranslateTable[str.charCodeAt(i)];
+ strBuf.push(code ? String.fromCharCode(code) : str.charAt(i));
+ }
+ }
+
+ return strBuf.join('');
+}
+
+function stringToUTF8String(str) {
+ return decodeURIComponent(escape(str));
+}
+
+function utf8StringToString(str) {
+ return unescape(encodeURIComponent(str));
+}
+
+function isEmptyObj(obj) {
+ for (var key in obj) {
+ return false;
+ }
+
+ return true;
+}
+
+function isBool(v) {
+ return typeof v === 'boolean';
+}
+
+function isNum(v) {
+ return typeof v === 'number';
+}
+
+function isString(v) {
+ return typeof v === 'string';
+}
+
+function isArrayBuffer(v) {
+ return _typeof(v) === 'object' && v !== null && v.byteLength !== undefined;
+}
+
+function isArrayEqual(arr1, arr2) {
+ if (arr1.length !== arr2.length) {
+ return false;
+ }
+
+ return arr1.every(function (element, index) {
+ return element === arr2[index];
+ });
+}
+
+function isSpace(ch) {
+ return ch === 0x20 || ch === 0x09 || ch === 0x0D || ch === 0x0A;
+}
+
+function createPromiseCapability() {
+ var capability = Object.create(null);
+ var isSettled = false;
+ Object.defineProperty(capability, 'settled', {
+ get: function get() {
+ return isSettled;
+ }
+ });
+ capability.promise = new Promise(function (resolve, reject) {
+ capability.resolve = function (data) {
+ isSettled = true;
+ resolve(data);
+ };
+
+ capability.reject = function (reason) {
+ isSettled = true;
+ reject(reason);
+ };
+ });
+ return capability;
+}
+
+var createObjectURL = function createObjectURLClosure() {
+ var digits = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
+ return function createObjectURL(data, contentType) {
+ var forceDataSchema = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
+
+ if (!forceDataSchema && _url_polyfill.URL.createObjectURL) {
+ var blob = new Blob([data], {
+ type: contentType
+ });
+ return _url_polyfill.URL.createObjectURL(blob);
+ }
+
+ var buffer = 'data:' + contentType + ';base64,';
+
+ for (var i = 0, ii = data.length; i < ii; i += 3) {
+ var b1 = data[i] & 0xFF;
+ var b2 = data[i + 1] & 0xFF;
+ var b3 = data[i + 2] & 0xFF;
+ var d1 = b1 >> 2,
+ d2 = (b1 & 3) << 4 | b2 >> 4;
+ var d3 = i + 1 < ii ? (b2 & 0xF) << 2 | b3 >> 6 : 64;
+ var d4 = i + 2 < ii ? b3 & 0x3F : 64;
+ buffer += digits[d1] + digits[d2] + digits[d3] + digits[d4];
+ }
+
+ return buffer;
+ };
+}();
+
+exports.createObjectURL = createObjectURL;
+
+/***/ }),
+/* 6 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var globalScope = __w_pdfjs_require__(7);
+
+if (!globalScope._pdfjsCompatibilityChecked) {
+ globalScope._pdfjsCompatibilityChecked = true;
+
+ var isNodeJS = __w_pdfjs_require__(8);
+
+ var hasDOM = (typeof window === "undefined" ? "undefined" : _typeof(window)) === 'object' && (typeof document === "undefined" ? "undefined" : _typeof(document)) === 'object';
+
+ (function checkNodeBtoa() {
+ if (globalScope.btoa || !isNodeJS()) {
+ return;
+ }
+
+ globalScope.btoa = function (chars) {
+ return Buffer.from(chars, 'binary').toString('base64');
+ };
+ })();
+
+ (function checkNodeAtob() {
+ if (globalScope.atob || !isNodeJS()) {
+ return;
+ }
+
+ globalScope.atob = function (input) {
+ return Buffer.from(input, 'base64').toString('binary');
+ };
+ })();
+
+ (function checkChildNodeRemove() {
+ if (!hasDOM) {
+ return;
+ }
+
+ if (typeof Element.prototype.remove !== 'undefined') {
+ return;
+ }
+
+ Element.prototype.remove = function () {
+ if (this.parentNode) {
+ this.parentNode.removeChild(this);
+ }
+ };
+ })();
+
+ (function checkDOMTokenListAddRemove() {
+ if (!hasDOM || isNodeJS()) {
+ return;
+ }
+
+ var div = document.createElement('div');
+ div.classList.add('testOne', 'testTwo');
+
+ if (div.classList.contains('testOne') === true && div.classList.contains('testTwo') === true) {
+ return;
+ }
+
+ var OriginalDOMTokenListAdd = DOMTokenList.prototype.add;
+ var OriginalDOMTokenListRemove = DOMTokenList.prototype.remove;
+
+ DOMTokenList.prototype.add = function () {
+ for (var _len = arguments.length, tokens = new Array(_len), _key = 0; _key < _len; _key++) {
+ tokens[_key] = arguments[_key];
+ }
+
+ for (var _i = 0, _tokens = tokens; _i < _tokens.length; _i++) {
+ var token = _tokens[_i];
+ OriginalDOMTokenListAdd.call(this, token);
+ }
+ };
+
+ DOMTokenList.prototype.remove = function () {
+ for (var _len2 = arguments.length, tokens = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
+ tokens[_key2] = arguments[_key2];
+ }
+
+ for (var _i2 = 0, _tokens2 = tokens; _i2 < _tokens2.length; _i2++) {
+ var token = _tokens2[_i2];
+ OriginalDOMTokenListRemove.call(this, token);
+ }
+ };
+ })();
+
+ (function checkDOMTokenListToggle() {
+ if (!hasDOM || isNodeJS()) {
+ return;
+ }
+
+ var div = document.createElement('div');
+
+ if (div.classList.toggle('test', 0) === false) {
+ return;
+ }
+
+ DOMTokenList.prototype.toggle = function (token) {
+ var force = arguments.length > 1 ? !!arguments[1] : !this.contains(token);
+ return this[force ? 'add' : 'remove'](token), force;
+ };
+ })();
+
+ (function checkStringStartsWith() {
+ if (String.prototype.startsWith) {
+ return;
+ }
+
+ __w_pdfjs_require__(9);
+ })();
+
+ (function checkStringEndsWith() {
+ if (String.prototype.endsWith) {
+ return;
+ }
+
+ __w_pdfjs_require__(40);
+ })();
+
+ (function checkStringIncludes() {
+ if (String.prototype.includes) {
+ return;
+ }
+
+ __w_pdfjs_require__(42);
+ })();
+
+ (function checkArrayIncludes() {
+ if (Array.prototype.includes) {
+ return;
+ }
+
+ __w_pdfjs_require__(44);
+ })();
+
+ (function checkArrayFrom() {
+ if (Array.from) {
+ return;
+ }
+
+ __w_pdfjs_require__(51);
+ })();
+
+ (function checkObjectAssign() {
+ if (Object.assign) {
+ return;
+ }
+
+ __w_pdfjs_require__(74);
+ })();
+
+ (function checkMathLog2() {
+ if (Math.log2) {
+ return;
+ }
+
+ Math.log2 = __w_pdfjs_require__(79);
+ })();
+
+ (function checkNumberIsNaN() {
+ if (Number.isNaN) {
+ return;
+ }
+
+ Number.isNaN = __w_pdfjs_require__(81);
+ })();
+
+ (function checkNumberIsInteger() {
+ if (Number.isInteger) {
+ return;
+ }
+
+ Number.isInteger = __w_pdfjs_require__(83);
+ })();
+
+ (function checkPromise() {
+ if (globalScope.Promise && globalScope.Promise.prototype && globalScope.Promise.prototype["finally"]) {
+ return;
+ }
+
+ globalScope.Promise = __w_pdfjs_require__(86);
+ })();
+
+ (function checkWeakMap() {
+ if (globalScope.WeakMap) {
+ return;
+ }
+
+ globalScope.WeakMap = __w_pdfjs_require__(106);
+ })();
+
+ (function checkWeakSet() {
+ if (globalScope.WeakSet) {
+ return;
+ }
+
+ globalScope.WeakSet = __w_pdfjs_require__(123);
+ })();
+
+ (function checkStringCodePointAt() {
+ if (String.codePointAt) {
+ return;
+ }
+
+ String.codePointAt = __w_pdfjs_require__(127);
+ })();
+
+ (function checkStringFromCodePoint() {
+ if (String.fromCodePoint) {
+ return;
+ }
+
+ String.fromCodePoint = __w_pdfjs_require__(129);
+ })();
+
+ (function checkSymbol() {
+ if (globalScope.Symbol) {
+ return;
+ }
+
+ __w_pdfjs_require__(131);
+ })();
+
+ (function checkStringPadStart() {
+ if (String.prototype.padStart) {
+ return;
+ }
+
+ __w_pdfjs_require__(138);
+ })();
+
+ (function checkStringPadEnd() {
+ if (String.prototype.padEnd) {
+ return;
+ }
+
+ __w_pdfjs_require__(142);
+ })();
+
+ (function checkObjectValues() {
+ if (Object.values) {
+ return;
+ }
+
+ Object.values = __w_pdfjs_require__(144);
+ })();
+}
+
+/***/ }),
+/* 7 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = typeof window !== 'undefined' && window.Math === Math ? window : typeof global !== 'undefined' && global.Math === Math ? global : typeof self !== 'undefined' && self.Math === Math ? self : {};
+
+/***/ }),
+/* 8 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+module.exports = function isNodeJS() {
+ return (typeof process === "undefined" ? "undefined" : _typeof(process)) === 'object' && process + '' === '[object process]' && !process.versions['nw'] && !process.versions['electron'];
+};
+
+/***/ }),
+/* 9 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(10);
+
+module.exports = __w_pdfjs_require__(13).String.startsWith;
+
+/***/ }),
+/* 10 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var toLength = __w_pdfjs_require__(32);
+
+var context = __w_pdfjs_require__(34);
+
+var STARTS_WITH = 'startsWith';
+var $startsWith = ''[STARTS_WITH];
+$export($export.P + $export.F * __w_pdfjs_require__(39)(STARTS_WITH), 'String', {
+ startsWith: function startsWith(searchString) {
+ var that = context(this, searchString, STARTS_WITH);
+ var index = toLength(Math.min(arguments.length > 1 ? arguments[1] : undefined, that.length));
+ var search = String(searchString);
+ return $startsWith ? $startsWith.call(that, search, index) : that.slice(index, index + search.length) === search;
+ }
+});
+
+/***/ }),
+/* 11 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(12);
+
+var core = __w_pdfjs_require__(13);
+
+var hide = __w_pdfjs_require__(14);
+
+var redefine = __w_pdfjs_require__(24);
+
+var ctx = __w_pdfjs_require__(30);
+
+var PROTOTYPE = 'prototype';
+
+var $export = function $export(type, name, source) {
+ var IS_FORCED = type & $export.F;
+ var IS_GLOBAL = type & $export.G;
+ var IS_STATIC = type & $export.S;
+ var IS_PROTO = type & $export.P;
+ var IS_BIND = type & $export.B;
+ var target = IS_GLOBAL ? global : IS_STATIC ? global[name] || (global[name] = {}) : (global[name] || {})[PROTOTYPE];
+ var exports = IS_GLOBAL ? core : core[name] || (core[name] = {});
+ var expProto = exports[PROTOTYPE] || (exports[PROTOTYPE] = {});
+ var key, own, out, exp;
+ if (IS_GLOBAL) source = name;
+
+ for (key in source) {
+ own = !IS_FORCED && target && target[key] !== undefined;
+ out = (own ? target : source)[key];
+ exp = IS_BIND && own ? ctx(out, global) : IS_PROTO && typeof out == 'function' ? ctx(Function.call, out) : out;
+ if (target) redefine(target, key, out, type & $export.U);
+ if (exports[key] != out) hide(exports, key, exp);
+ if (IS_PROTO && expProto[key] != out) expProto[key] = out;
+ }
+};
+
+global.core = core;
+$export.F = 1;
+$export.G = 2;
+$export.S = 4;
+$export.P = 8;
+$export.B = 16;
+$export.W = 32;
+$export.U = 64;
+$export.R = 128;
+module.exports = $export;
+
+/***/ }),
+/* 12 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = module.exports = typeof window != 'undefined' && window.Math == Math ? window : typeof self != 'undefined' && self.Math == Math ? self : Function('return this')();
+if (typeof __g == 'number') __g = global;
+
+/***/ }),
+/* 13 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var core = module.exports = {
+ version: '2.6.9'
+};
+if (typeof __e == 'number') __e = core;
+
+/***/ }),
+/* 14 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var dP = __w_pdfjs_require__(15);
+
+var createDesc = __w_pdfjs_require__(23);
+
+module.exports = __w_pdfjs_require__(19) ? function (object, key, value) {
+ return dP.f(object, key, createDesc(1, value));
+} : function (object, key, value) {
+ object[key] = value;
+ return object;
+};
+
+/***/ }),
+/* 15 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var anObject = __w_pdfjs_require__(16);
+
+var IE8_DOM_DEFINE = __w_pdfjs_require__(18);
+
+var toPrimitive = __w_pdfjs_require__(22);
+
+var dP = Object.defineProperty;
+exports.f = __w_pdfjs_require__(19) ? Object.defineProperty : function defineProperty(O, P, Attributes) {
+ anObject(O);
+ P = toPrimitive(P, true);
+ anObject(Attributes);
+ if (IE8_DOM_DEFINE) try {
+ return dP(O, P, Attributes);
+ } catch (e) {}
+ if ('get' in Attributes || 'set' in Attributes) throw TypeError('Accessors not supported!');
+ if ('value' in Attributes) O[P] = Attributes.value;
+ return O;
+};
+
+/***/ }),
+/* 16 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(17);
+
+module.exports = function (it) {
+ if (!isObject(it)) throw TypeError(it + ' is not an object!');
+ return it;
+};
+
+/***/ }),
+/* 17 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+module.exports = function (it) {
+ return _typeof(it) === 'object' ? it !== null : typeof it === 'function';
+};
+
+/***/ }),
+/* 18 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = !__w_pdfjs_require__(19) && !__w_pdfjs_require__(20)(function () {
+ return Object.defineProperty(__w_pdfjs_require__(21)('div'), 'a', {
+ get: function get() {
+ return 7;
+ }
+ }).a != 7;
+});
+
+/***/ }),
+/* 19 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = !__w_pdfjs_require__(20)(function () {
+ return Object.defineProperty({}, 'a', {
+ get: function get() {
+ return 7;
+ }
+ }).a != 7;
+});
+
+/***/ }),
+/* 20 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (exec) {
+ try {
+ return !!exec();
+ } catch (e) {
+ return true;
+ }
+};
+
+/***/ }),
+/* 21 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(17);
+
+var document = __w_pdfjs_require__(12).document;
+
+var is = isObject(document) && isObject(document.createElement);
+
+module.exports = function (it) {
+ return is ? document.createElement(it) : {};
+};
+
+/***/ }),
+/* 22 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(17);
+
+module.exports = function (it, S) {
+ if (!isObject(it)) return it;
+ var fn, val;
+ if (S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it))) return val;
+ if (typeof (fn = it.valueOf) == 'function' && !isObject(val = fn.call(it))) return val;
+ if (!S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it))) return val;
+ throw TypeError("Can't convert object to primitive value");
+};
+
+/***/ }),
+/* 23 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (bitmap, value) {
+ return {
+ enumerable: !(bitmap & 1),
+ configurable: !(bitmap & 2),
+ writable: !(bitmap & 4),
+ value: value
+ };
+};
+
+/***/ }),
+/* 24 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(12);
+
+var hide = __w_pdfjs_require__(14);
+
+var has = __w_pdfjs_require__(25);
+
+var SRC = __w_pdfjs_require__(26)('src');
+
+var $toString = __w_pdfjs_require__(27);
+
+var TO_STRING = 'toString';
+var TPL = ('' + $toString).split(TO_STRING);
+
+__w_pdfjs_require__(13).inspectSource = function (it) {
+ return $toString.call(it);
+};
+
+(module.exports = function (O, key, val, safe) {
+ var isFunction = typeof val == 'function';
+ if (isFunction) has(val, 'name') || hide(val, 'name', key);
+ if (O[key] === val) return;
+ if (isFunction) has(val, SRC) || hide(val, SRC, O[key] ? '' + O[key] : TPL.join(String(key)));
+
+ if (O === global) {
+ O[key] = val;
+ } else if (!safe) {
+ delete O[key];
+ hide(O, key, val);
+ } else if (O[key]) {
+ O[key] = val;
+ } else {
+ hide(O, key, val);
+ }
+})(Function.prototype, TO_STRING, function toString() {
+ return typeof this == 'function' && this[SRC] || $toString.call(this);
+});
+
+/***/ }),
+/* 25 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var hasOwnProperty = {}.hasOwnProperty;
+
+module.exports = function (it, key) {
+ return hasOwnProperty.call(it, key);
+};
+
+/***/ }),
+/* 26 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var id = 0;
+var px = Math.random();
+
+module.exports = function (key) {
+ return 'Symbol('.concat(key === undefined ? '' : key, ')_', (++id + px).toString(36));
+};
+
+/***/ }),
+/* 27 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = __w_pdfjs_require__(28)('native-function-to-string', Function.toString);
+
+/***/ }),
+/* 28 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var core = __w_pdfjs_require__(13);
+
+var global = __w_pdfjs_require__(12);
+
+var SHARED = '__core-js_shared__';
+var store = global[SHARED] || (global[SHARED] = {});
+(module.exports = function (key, value) {
+ return store[key] || (store[key] = value !== undefined ? value : {});
+})('versions', []).push({
+ version: core.version,
+ mode: __w_pdfjs_require__(29) ? 'pure' : 'global',
+ copyright: '© 2019 Denis Pushkarev (zloirock.ru)'
+});
+
+/***/ }),
+/* 29 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = false;
+
+/***/ }),
+/* 30 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var aFunction = __w_pdfjs_require__(31);
+
+module.exports = function (fn, that, length) {
+ aFunction(fn);
+ if (that === undefined) return fn;
+
+ switch (length) {
+ case 1:
+ return function (a) {
+ return fn.call(that, a);
+ };
+
+ case 2:
+ return function (a, b) {
+ return fn.call(that, a, b);
+ };
+
+ case 3:
+ return function (a, b, c) {
+ return fn.call(that, a, b, c);
+ };
+ }
+
+ return function () {
+ return fn.apply(that, arguments);
+ };
+};
+
+/***/ }),
+/* 31 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (it) {
+ if (typeof it != 'function') throw TypeError(it + ' is not a function!');
+ return it;
+};
+
+/***/ }),
+/* 32 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toInteger = __w_pdfjs_require__(33);
+
+var min = Math.min;
+
+module.exports = function (it) {
+ return it > 0 ? min(toInteger(it), 0x1fffffffffffff) : 0;
+};
+
+/***/ }),
+/* 33 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ceil = Math.ceil;
+var floor = Math.floor;
+
+module.exports = function (it) {
+ return isNaN(it = +it) ? 0 : (it > 0 ? floor : ceil)(it);
+};
+
+/***/ }),
+/* 34 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isRegExp = __w_pdfjs_require__(35);
+
+var defined = __w_pdfjs_require__(38);
+
+module.exports = function (that, searchString, NAME) {
+ if (isRegExp(searchString)) throw TypeError('String#' + NAME + " doesn't accept regex!");
+ return String(defined(that));
+};
+
+/***/ }),
+/* 35 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(17);
+
+var cof = __w_pdfjs_require__(36);
+
+var MATCH = __w_pdfjs_require__(37)('match');
+
+module.exports = function (it) {
+ var isRegExp;
+ return isObject(it) && ((isRegExp = it[MATCH]) !== undefined ? !!isRegExp : cof(it) == 'RegExp');
+};
+
+/***/ }),
+/* 36 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toString = {}.toString;
+
+module.exports = function (it) {
+ return toString.call(it).slice(8, -1);
+};
+
+/***/ }),
+/* 37 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var store = __w_pdfjs_require__(28)('wks');
+
+var uid = __w_pdfjs_require__(26);
+
+var _Symbol = __w_pdfjs_require__(12).Symbol;
+
+var USE_SYMBOL = typeof _Symbol == 'function';
+
+var $exports = module.exports = function (name) {
+ return store[name] || (store[name] = USE_SYMBOL && _Symbol[name] || (USE_SYMBOL ? _Symbol : uid)('Symbol.' + name));
+};
+
+$exports.store = store;
+
+/***/ }),
+/* 38 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (it) {
+ if (it == undefined) throw TypeError("Can't call method on " + it);
+ return it;
+};
+
+/***/ }),
+/* 39 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var MATCH = __w_pdfjs_require__(37)('match');
+
+module.exports = function (KEY) {
+ var re = /./;
+
+ try {
+ '/./'[KEY](re);
+ } catch (e) {
+ try {
+ re[MATCH] = false;
+ return !'/./'[KEY](re);
+ } catch (f) {}
+ }
+
+ return true;
+};
+
+/***/ }),
+/* 40 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(41);
+
+module.exports = __w_pdfjs_require__(13).String.endsWith;
+
+/***/ }),
+/* 41 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var toLength = __w_pdfjs_require__(32);
+
+var context = __w_pdfjs_require__(34);
+
+var ENDS_WITH = 'endsWith';
+var $endsWith = ''[ENDS_WITH];
+$export($export.P + $export.F * __w_pdfjs_require__(39)(ENDS_WITH), 'String', {
+ endsWith: function endsWith(searchString) {
+ var that = context(this, searchString, ENDS_WITH);
+ var endPosition = arguments.length > 1 ? arguments[1] : undefined;
+ var len = toLength(that.length);
+ var end = endPosition === undefined ? len : Math.min(toLength(endPosition), len);
+ var search = String(searchString);
+ return $endsWith ? $endsWith.call(that, search, end) : that.slice(end - search.length, end) === search;
+ }
+});
+
+/***/ }),
+/* 42 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(43);
+
+module.exports = __w_pdfjs_require__(13).String.includes;
+
+/***/ }),
+/* 43 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var context = __w_pdfjs_require__(34);
+
+var INCLUDES = 'includes';
+$export($export.P + $export.F * __w_pdfjs_require__(39)(INCLUDES), 'String', {
+ includes: function includes(searchString) {
+ return !!~context(this, searchString, INCLUDES).indexOf(searchString, arguments.length > 1 ? arguments[1] : undefined);
+ }
+});
+
+/***/ }),
+/* 44 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(45);
+
+module.exports = __w_pdfjs_require__(13).Array.includes;
+
+/***/ }),
+/* 45 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var $includes = __w_pdfjs_require__(46)(true);
+
+$export($export.P, 'Array', {
+ includes: function includes(el) {
+ return $includes(this, el, arguments.length > 1 ? arguments[1] : undefined);
+ }
+});
+
+__w_pdfjs_require__(50)('includes');
+
+/***/ }),
+/* 46 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toIObject = __w_pdfjs_require__(47);
+
+var toLength = __w_pdfjs_require__(32);
+
+var toAbsoluteIndex = __w_pdfjs_require__(49);
+
+module.exports = function (IS_INCLUDES) {
+ return function ($this, el, fromIndex) {
+ var O = toIObject($this);
+ var length = toLength(O.length);
+ var index = toAbsoluteIndex(fromIndex, length);
+ var value;
+ if (IS_INCLUDES && el != el) while (length > index) {
+ value = O[index++];
+ if (value != value) return true;
+ } else for (; length > index; index++) {
+ if (IS_INCLUDES || index in O) {
+ if (O[index] === el) return IS_INCLUDES || index || 0;
+ }
+ }
+ return !IS_INCLUDES && -1;
+ };
+};
+
+/***/ }),
+/* 47 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var IObject = __w_pdfjs_require__(48);
+
+var defined = __w_pdfjs_require__(38);
+
+module.exports = function (it) {
+ return IObject(defined(it));
+};
+
+/***/ }),
+/* 48 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var cof = __w_pdfjs_require__(36);
+
+module.exports = Object('z').propertyIsEnumerable(0) ? Object : function (it) {
+ return cof(it) == 'String' ? it.split('') : Object(it);
+};
+
+/***/ }),
+/* 49 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toInteger = __w_pdfjs_require__(33);
+
+var max = Math.max;
+var min = Math.min;
+
+module.exports = function (index, length) {
+ index = toInteger(index);
+ return index < 0 ? max(index + length, 0) : min(index, length);
+};
+
+/***/ }),
+/* 50 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var UNSCOPABLES = __w_pdfjs_require__(37)('unscopables');
+
+var ArrayProto = Array.prototype;
+if (ArrayProto[UNSCOPABLES] == undefined) __w_pdfjs_require__(14)(ArrayProto, UNSCOPABLES, {});
+
+module.exports = function (key) {
+ ArrayProto[UNSCOPABLES][key] = true;
+};
+
+/***/ }),
+/* 51 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(52);
+
+__w_pdfjs_require__(67);
+
+module.exports = __w_pdfjs_require__(13).Array.from;
+
+/***/ }),
+/* 52 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $at = __w_pdfjs_require__(53)(true);
+
+__w_pdfjs_require__(54)(String, 'String', function (iterated) {
+ this._t = String(iterated);
+ this._i = 0;
+}, function () {
+ var O = this._t;
+ var index = this._i;
+ var point;
+ if (index >= O.length) return {
+ value: undefined,
+ done: true
+ };
+ point = $at(O, index);
+ this._i += point.length;
+ return {
+ value: point,
+ done: false
+ };
+});
+
+/***/ }),
+/* 53 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toInteger = __w_pdfjs_require__(33);
+
+var defined = __w_pdfjs_require__(38);
+
+module.exports = function (TO_STRING) {
+ return function (that, pos) {
+ var s = String(defined(that));
+ var i = toInteger(pos);
+ var l = s.length;
+ var a, b;
+ if (i < 0 || i >= l) return TO_STRING ? '' : undefined;
+ a = s.charCodeAt(i);
+ return a < 0xd800 || a > 0xdbff || i + 1 === l || (b = s.charCodeAt(i + 1)) < 0xdc00 || b > 0xdfff ? TO_STRING ? s.charAt(i) : a : TO_STRING ? s.slice(i, i + 2) : (a - 0xd800 << 10) + (b - 0xdc00) + 0x10000;
+ };
+};
+
+/***/ }),
+/* 54 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var LIBRARY = __w_pdfjs_require__(29);
+
+var $export = __w_pdfjs_require__(11);
+
+var redefine = __w_pdfjs_require__(24);
+
+var hide = __w_pdfjs_require__(14);
+
+var Iterators = __w_pdfjs_require__(55);
+
+var $iterCreate = __w_pdfjs_require__(56);
+
+var setToStringTag = __w_pdfjs_require__(64);
+
+var getPrototypeOf = __w_pdfjs_require__(65);
+
+var ITERATOR = __w_pdfjs_require__(37)('iterator');
+
+var BUGGY = !([].keys && 'next' in [].keys());
+var FF_ITERATOR = '@@iterator';
+var KEYS = 'keys';
+var VALUES = 'values';
+
+var returnThis = function returnThis() {
+ return this;
+};
+
+module.exports = function (Base, NAME, Constructor, next, DEFAULT, IS_SET, FORCED) {
+ $iterCreate(Constructor, NAME, next);
+
+ var getMethod = function getMethod(kind) {
+ if (!BUGGY && kind in proto) return proto[kind];
+
+ switch (kind) {
+ case KEYS:
+ return function keys() {
+ return new Constructor(this, kind);
+ };
+
+ case VALUES:
+ return function values() {
+ return new Constructor(this, kind);
+ };
+ }
+
+ return function entries() {
+ return new Constructor(this, kind);
+ };
+ };
+
+ var TAG = NAME + ' Iterator';
+ var DEF_VALUES = DEFAULT == VALUES;
+ var VALUES_BUG = false;
+ var proto = Base.prototype;
+ var $native = proto[ITERATOR] || proto[FF_ITERATOR] || DEFAULT && proto[DEFAULT];
+ var $default = $native || getMethod(DEFAULT);
+ var $entries = DEFAULT ? !DEF_VALUES ? $default : getMethod('entries') : undefined;
+ var $anyNative = NAME == 'Array' ? proto.entries || $native : $native;
+ var methods, key, IteratorPrototype;
+
+ if ($anyNative) {
+ IteratorPrototype = getPrototypeOf($anyNative.call(new Base()));
+
+ if (IteratorPrototype !== Object.prototype && IteratorPrototype.next) {
+ setToStringTag(IteratorPrototype, TAG, true);
+ if (!LIBRARY && typeof IteratorPrototype[ITERATOR] != 'function') hide(IteratorPrototype, ITERATOR, returnThis);
+ }
+ }
+
+ if (DEF_VALUES && $native && $native.name !== VALUES) {
+ VALUES_BUG = true;
+
+ $default = function values() {
+ return $native.call(this);
+ };
+ }
+
+ if ((!LIBRARY || FORCED) && (BUGGY || VALUES_BUG || !proto[ITERATOR])) {
+ hide(proto, ITERATOR, $default);
+ }
+
+ Iterators[NAME] = $default;
+ Iterators[TAG] = returnThis;
+
+ if (DEFAULT) {
+ methods = {
+ values: DEF_VALUES ? $default : getMethod(VALUES),
+ keys: IS_SET ? $default : getMethod(KEYS),
+ entries: $entries
+ };
+ if (FORCED) for (key in methods) {
+ if (!(key in proto)) redefine(proto, key, methods[key]);
+ } else $export($export.P + $export.F * (BUGGY || VALUES_BUG), NAME, methods);
+ }
+
+ return methods;
+};
+
+/***/ }),
+/* 55 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = {};
+
+/***/ }),
+/* 56 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var create = __w_pdfjs_require__(57);
+
+var descriptor = __w_pdfjs_require__(23);
+
+var setToStringTag = __w_pdfjs_require__(64);
+
+var IteratorPrototype = {};
+
+__w_pdfjs_require__(14)(IteratorPrototype, __w_pdfjs_require__(37)('iterator'), function () {
+ return this;
+});
+
+module.exports = function (Constructor, NAME, next) {
+ Constructor.prototype = create(IteratorPrototype, {
+ next: descriptor(1, next)
+ });
+ setToStringTag(Constructor, NAME + ' Iterator');
+};
+
+/***/ }),
+/* 57 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var anObject = __w_pdfjs_require__(16);
+
+var dPs = __w_pdfjs_require__(58);
+
+var enumBugKeys = __w_pdfjs_require__(62);
+
+var IE_PROTO = __w_pdfjs_require__(61)('IE_PROTO');
+
+var Empty = function Empty() {};
+
+var PROTOTYPE = 'prototype';
+
+var _createDict = function createDict() {
+ var iframe = __w_pdfjs_require__(21)('iframe');
+
+ var i = enumBugKeys.length;
+ var lt = '<';
+ var gt = '>';
+ var iframeDocument;
+ iframe.style.display = 'none';
+
+ __w_pdfjs_require__(63).appendChild(iframe);
+
+ iframe.src = 'javascript:';
+ iframeDocument = iframe.contentWindow.document;
+ iframeDocument.open();
+ iframeDocument.write(lt + 'script' + gt + 'document.F=Object' + lt + '/script' + gt);
+ iframeDocument.close();
+ _createDict = iframeDocument.F;
+
+ while (i--) {
+ delete _createDict[PROTOTYPE][enumBugKeys[i]];
+ }
+
+ return _createDict();
+};
+
+module.exports = Object.create || function create(O, Properties) {
+ var result;
+
+ if (O !== null) {
+ Empty[PROTOTYPE] = anObject(O);
+ result = new Empty();
+ Empty[PROTOTYPE] = null;
+ result[IE_PROTO] = O;
+ } else result = _createDict();
+
+ return Properties === undefined ? result : dPs(result, Properties);
+};
+
+/***/ }),
+/* 58 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var dP = __w_pdfjs_require__(15);
+
+var anObject = __w_pdfjs_require__(16);
+
+var getKeys = __w_pdfjs_require__(59);
+
+module.exports = __w_pdfjs_require__(19) ? Object.defineProperties : function defineProperties(O, Properties) {
+ anObject(O);
+ var keys = getKeys(Properties);
+ var length = keys.length;
+ var i = 0;
+ var P;
+
+ while (length > i) {
+ dP.f(O, P = keys[i++], Properties[P]);
+ }
+
+ return O;
+};
+
+/***/ }),
+/* 59 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $keys = __w_pdfjs_require__(60);
+
+var enumBugKeys = __w_pdfjs_require__(62);
+
+module.exports = Object.keys || function keys(O) {
+ return $keys(O, enumBugKeys);
+};
+
+/***/ }),
+/* 60 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var has = __w_pdfjs_require__(25);
+
+var toIObject = __w_pdfjs_require__(47);
+
+var arrayIndexOf = __w_pdfjs_require__(46)(false);
+
+var IE_PROTO = __w_pdfjs_require__(61)('IE_PROTO');
+
+module.exports = function (object, names) {
+ var O = toIObject(object);
+ var i = 0;
+ var result = [];
+ var key;
+
+ for (key in O) {
+ if (key != IE_PROTO) has(O, key) && result.push(key);
+ }
+
+ while (names.length > i) {
+ if (has(O, key = names[i++])) {
+ ~arrayIndexOf(result, key) || result.push(key);
+ }
+ }
+
+ return result;
+};
+
+/***/ }),
+/* 61 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var shared = __w_pdfjs_require__(28)('keys');
+
+var uid = __w_pdfjs_require__(26);
+
+module.exports = function (key) {
+ return shared[key] || (shared[key] = uid(key));
+};
+
+/***/ }),
+/* 62 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = 'constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf'.split(',');
+
+/***/ }),
+/* 63 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var document = __w_pdfjs_require__(12).document;
+
+module.exports = document && document.documentElement;
+
+/***/ }),
+/* 64 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var def = __w_pdfjs_require__(15).f;
+
+var has = __w_pdfjs_require__(25);
+
+var TAG = __w_pdfjs_require__(37)('toStringTag');
+
+module.exports = function (it, tag, stat) {
+ if (it && !has(it = stat ? it : it.prototype, TAG)) def(it, TAG, {
+ configurable: true,
+ value: tag
+ });
+};
+
+/***/ }),
+/* 65 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var has = __w_pdfjs_require__(25);
+
+var toObject = __w_pdfjs_require__(66);
+
+var IE_PROTO = __w_pdfjs_require__(61)('IE_PROTO');
+
+var ObjectProto = Object.prototype;
+
+module.exports = Object.getPrototypeOf || function (O) {
+ O = toObject(O);
+ if (has(O, IE_PROTO)) return O[IE_PROTO];
+
+ if (typeof O.constructor == 'function' && O instanceof O.constructor) {
+ return O.constructor.prototype;
+ }
+
+ return O instanceof Object ? ObjectProto : null;
+};
+
+/***/ }),
+/* 66 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var defined = __w_pdfjs_require__(38);
+
+module.exports = function (it) {
+ return Object(defined(it));
+};
+
+/***/ }),
+/* 67 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ctx = __w_pdfjs_require__(30);
+
+var $export = __w_pdfjs_require__(11);
+
+var toObject = __w_pdfjs_require__(66);
+
+var call = __w_pdfjs_require__(68);
+
+var isArrayIter = __w_pdfjs_require__(69);
+
+var toLength = __w_pdfjs_require__(32);
+
+var createProperty = __w_pdfjs_require__(70);
+
+var getIterFn = __w_pdfjs_require__(71);
+
+$export($export.S + $export.F * !__w_pdfjs_require__(73)(function (iter) {
+ Array.from(iter);
+}), 'Array', {
+ from: function from(arrayLike) {
+ var O = toObject(arrayLike);
+ var C = typeof this == 'function' ? this : Array;
+ var aLen = arguments.length;
+ var mapfn = aLen > 1 ? arguments[1] : undefined;
+ var mapping = mapfn !== undefined;
+ var index = 0;
+ var iterFn = getIterFn(O);
+ var length, result, step, iterator;
+ if (mapping) mapfn = ctx(mapfn, aLen > 2 ? arguments[2] : undefined, 2);
+
+ if (iterFn != undefined && !(C == Array && isArrayIter(iterFn))) {
+ for (iterator = iterFn.call(O), result = new C(); !(step = iterator.next()).done; index++) {
+ createProperty(result, index, mapping ? call(iterator, mapfn, [step.value, index], true) : step.value);
+ }
+ } else {
+ length = toLength(O.length);
+
+ for (result = new C(length); length > index; index++) {
+ createProperty(result, index, mapping ? mapfn(O[index], index) : O[index]);
+ }
+ }
+
+ result.length = index;
+ return result;
+ }
+});
+
+/***/ }),
+/* 68 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var anObject = __w_pdfjs_require__(16);
+
+module.exports = function (iterator, fn, value, entries) {
+ try {
+ return entries ? fn(anObject(value)[0], value[1]) : fn(value);
+ } catch (e) {
+ var ret = iterator['return'];
+ if (ret !== undefined) anObject(ret.call(iterator));
+ throw e;
+ }
+};
+
+/***/ }),
+/* 69 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var Iterators = __w_pdfjs_require__(55);
+
+var ITERATOR = __w_pdfjs_require__(37)('iterator');
+
+var ArrayProto = Array.prototype;
+
+module.exports = function (it) {
+ return it !== undefined && (Iterators.Array === it || ArrayProto[ITERATOR] === it);
+};
+
+/***/ }),
+/* 70 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $defineProperty = __w_pdfjs_require__(15);
+
+var createDesc = __w_pdfjs_require__(23);
+
+module.exports = function (object, index, value) {
+ if (index in object) $defineProperty.f(object, index, createDesc(0, value));else object[index] = value;
+};
+
+/***/ }),
+/* 71 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var classof = __w_pdfjs_require__(72);
+
+var ITERATOR = __w_pdfjs_require__(37)('iterator');
+
+var Iterators = __w_pdfjs_require__(55);
+
+module.exports = __w_pdfjs_require__(13).getIteratorMethod = function (it) {
+ if (it != undefined) return it[ITERATOR] || it['@@iterator'] || Iterators[classof(it)];
+};
+
+/***/ }),
+/* 72 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var cof = __w_pdfjs_require__(36);
+
+var TAG = __w_pdfjs_require__(37)('toStringTag');
+
+var ARG = cof(function () {
+ return arguments;
+}()) == 'Arguments';
+
+var tryGet = function tryGet(it, key) {
+ try {
+ return it[key];
+ } catch (e) {}
+};
+
+module.exports = function (it) {
+ var O, T, B;
+ return it === undefined ? 'Undefined' : it === null ? 'Null' : typeof (T = tryGet(O = Object(it), TAG)) == 'string' ? T : ARG ? cof(O) : (B = cof(O)) == 'Object' && typeof O.callee == 'function' ? 'Arguments' : B;
+};
+
+/***/ }),
+/* 73 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ITERATOR = __w_pdfjs_require__(37)('iterator');
+
+var SAFE_CLOSING = false;
+
+try {
+ var riter = [7][ITERATOR]();
+
+ riter['return'] = function () {
+ SAFE_CLOSING = true;
+ };
+
+ Array.from(riter, function () {
+ throw 2;
+ });
+} catch (e) {}
+
+module.exports = function (exec, skipClosing) {
+ if (!skipClosing && !SAFE_CLOSING) return false;
+ var safe = false;
+
+ try {
+ var arr = [7];
+ var iter = arr[ITERATOR]();
+
+ iter.next = function () {
+ return {
+ done: safe = true
+ };
+ };
+
+ arr[ITERATOR] = function () {
+ return iter;
+ };
+
+ exec(arr);
+ } catch (e) {}
+
+ return safe;
+};
+
+/***/ }),
+/* 74 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(75);
+
+module.exports = __w_pdfjs_require__(13).Object.assign;
+
+/***/ }),
+/* 75 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+$export($export.S + $export.F, 'Object', {
+ assign: __w_pdfjs_require__(76)
+});
+
+/***/ }),
+/* 76 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var DESCRIPTORS = __w_pdfjs_require__(19);
+
+var getKeys = __w_pdfjs_require__(59);
+
+var gOPS = __w_pdfjs_require__(77);
+
+var pIE = __w_pdfjs_require__(78);
+
+var toObject = __w_pdfjs_require__(66);
+
+var IObject = __w_pdfjs_require__(48);
+
+var $assign = Object.assign;
+module.exports = !$assign || __w_pdfjs_require__(20)(function () {
+ var A = {};
+ var B = {};
+ var S = Symbol();
+ var K = 'abcdefghijklmnopqrst';
+ A[S] = 7;
+ K.split('').forEach(function (k) {
+ B[k] = k;
+ });
+ return $assign({}, A)[S] != 7 || Object.keys($assign({}, B)).join('') != K;
+}) ? function assign(target, source) {
+ var T = toObject(target);
+ var aLen = arguments.length;
+ var index = 1;
+ var getSymbols = gOPS.f;
+ var isEnum = pIE.f;
+
+ while (aLen > index) {
+ var S = IObject(arguments[index++]);
+ var keys = getSymbols ? getKeys(S).concat(getSymbols(S)) : getKeys(S);
+ var length = keys.length;
+ var j = 0;
+ var key;
+
+ while (length > j) {
+ key = keys[j++];
+ if (!DESCRIPTORS || isEnum.call(S, key)) T[key] = S[key];
+ }
+ }
+
+ return T;
+} : $assign;
+
+/***/ }),
+/* 77 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+exports.f = Object.getOwnPropertySymbols;
+
+/***/ }),
+/* 78 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+exports.f = {}.propertyIsEnumerable;
+
+/***/ }),
+/* 79 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(80);
+
+module.exports = __w_pdfjs_require__(13).Math.log2;
+
+/***/ }),
+/* 80 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+$export($export.S, 'Math', {
+ log2: function log2(x) {
+ return Math.log(x) / Math.LN2;
+ }
+});
+
+/***/ }),
+/* 81 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(82);
+
+module.exports = __w_pdfjs_require__(13).Number.isNaN;
+
+/***/ }),
+/* 82 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+$export($export.S, 'Number', {
+ isNaN: function isNaN(number) {
+ return number != number;
+ }
+});
+
+/***/ }),
+/* 83 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(84);
+
+module.exports = __w_pdfjs_require__(13).Number.isInteger;
+
+/***/ }),
+/* 84 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+$export($export.S, 'Number', {
+ isInteger: __w_pdfjs_require__(85)
+});
+
+/***/ }),
+/* 85 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(17);
+
+var floor = Math.floor;
+
+module.exports = function isInteger(it) {
+ return !isObject(it) && isFinite(it) && floor(it) === it;
+};
+
+/***/ }),
+/* 86 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(87);
+
+__w_pdfjs_require__(52);
+
+__w_pdfjs_require__(88);
+
+__w_pdfjs_require__(91);
+
+__w_pdfjs_require__(104);
+
+__w_pdfjs_require__(105);
+
+module.exports = __w_pdfjs_require__(13).Promise;
+
+/***/ }),
+/* 87 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var classof = __w_pdfjs_require__(72);
+
+var test = {};
+test[__w_pdfjs_require__(37)('toStringTag')] = 'z';
+
+if (test + '' != '[object z]') {
+ __w_pdfjs_require__(24)(Object.prototype, 'toString', function toString() {
+ return '[object ' + classof(this) + ']';
+ }, true);
+}
+
+/***/ }),
+/* 88 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $iterators = __w_pdfjs_require__(89);
+
+var getKeys = __w_pdfjs_require__(59);
+
+var redefine = __w_pdfjs_require__(24);
+
+var global = __w_pdfjs_require__(12);
+
+var hide = __w_pdfjs_require__(14);
+
+var Iterators = __w_pdfjs_require__(55);
+
+var wks = __w_pdfjs_require__(37);
+
+var ITERATOR = wks('iterator');
+var TO_STRING_TAG = wks('toStringTag');
+var ArrayValues = Iterators.Array;
+var DOMIterables = {
+ CSSRuleList: true,
+ CSSStyleDeclaration: false,
+ CSSValueList: false,
+ ClientRectList: false,
+ DOMRectList: false,
+ DOMStringList: false,
+ DOMTokenList: true,
+ DataTransferItemList: false,
+ FileList: false,
+ HTMLAllCollection: false,
+ HTMLCollection: false,
+ HTMLFormElement: false,
+ HTMLSelectElement: false,
+ MediaList: true,
+ MimeTypeArray: false,
+ NamedNodeMap: false,
+ NodeList: true,
+ PaintRequestList: false,
+ Plugin: false,
+ PluginArray: false,
+ SVGLengthList: false,
+ SVGNumberList: false,
+ SVGPathSegList: false,
+ SVGPointList: false,
+ SVGStringList: false,
+ SVGTransformList: false,
+ SourceBufferList: false,
+ StyleSheetList: true,
+ TextTrackCueList: false,
+ TextTrackList: false,
+ TouchList: false
+};
+
+for (var collections = getKeys(DOMIterables), i = 0; i < collections.length; i++) {
+ var NAME = collections[i];
+ var explicit = DOMIterables[NAME];
+ var Collection = global[NAME];
+ var proto = Collection && Collection.prototype;
+ var key;
+
+ if (proto) {
+ if (!proto[ITERATOR]) hide(proto, ITERATOR, ArrayValues);
+ if (!proto[TO_STRING_TAG]) hide(proto, TO_STRING_TAG, NAME);
+ Iterators[NAME] = ArrayValues;
+ if (explicit) for (key in $iterators) {
+ if (!proto[key]) redefine(proto, key, $iterators[key], true);
+ }
+ }
+}
+
+/***/ }),
+/* 89 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var addToUnscopables = __w_pdfjs_require__(50);
+
+var step = __w_pdfjs_require__(90);
+
+var Iterators = __w_pdfjs_require__(55);
+
+var toIObject = __w_pdfjs_require__(47);
+
+module.exports = __w_pdfjs_require__(54)(Array, 'Array', function (iterated, kind) {
+ this._t = toIObject(iterated);
+ this._i = 0;
+ this._k = kind;
+}, function () {
+ var O = this._t;
+ var kind = this._k;
+ var index = this._i++;
+
+ if (!O || index >= O.length) {
+ this._t = undefined;
+ return step(1);
+ }
+
+ if (kind == 'keys') return step(0, index);
+ if (kind == 'values') return step(0, O[index]);
+ return step(0, [index, O[index]]);
+}, 'values');
+Iterators.Arguments = Iterators.Array;
+addToUnscopables('keys');
+addToUnscopables('values');
+addToUnscopables('entries');
+
+/***/ }),
+/* 90 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (done, value) {
+ return {
+ value: value,
+ done: !!done
+ };
+};
+
+/***/ }),
+/* 91 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var LIBRARY = __w_pdfjs_require__(29);
+
+var global = __w_pdfjs_require__(12);
+
+var ctx = __w_pdfjs_require__(30);
+
+var classof = __w_pdfjs_require__(72);
+
+var $export = __w_pdfjs_require__(11);
+
+var isObject = __w_pdfjs_require__(17);
+
+var aFunction = __w_pdfjs_require__(31);
+
+var anInstance = __w_pdfjs_require__(92);
+
+var forOf = __w_pdfjs_require__(93);
+
+var speciesConstructor = __w_pdfjs_require__(94);
+
+var task = __w_pdfjs_require__(95).set;
+
+var microtask = __w_pdfjs_require__(97)();
+
+var newPromiseCapabilityModule = __w_pdfjs_require__(98);
+
+var perform = __w_pdfjs_require__(99);
+
+var userAgent = __w_pdfjs_require__(100);
+
+var promiseResolve = __w_pdfjs_require__(101);
+
+var PROMISE = 'Promise';
+var TypeError = global.TypeError;
+var process = global.process;
+var versions = process && process.versions;
+var v8 = versions && versions.v8 || '';
+var $Promise = global[PROMISE];
+var isNode = classof(process) == 'process';
+
+var empty = function empty() {};
+
+var Internal, newGenericPromiseCapability, OwnPromiseCapability, Wrapper;
+var newPromiseCapability = newGenericPromiseCapability = newPromiseCapabilityModule.f;
+var USE_NATIVE = !!function () {
+ try {
+ var promise = $Promise.resolve(1);
+
+ var FakePromise = (promise.constructor = {})[__w_pdfjs_require__(37)('species')] = function (exec) {
+ exec(empty, empty);
+ };
+
+ return (isNode || typeof PromiseRejectionEvent == 'function') && promise.then(empty) instanceof FakePromise && v8.indexOf('6.6') !== 0 && userAgent.indexOf('Chrome/66') === -1;
+ } catch (e) {}
+}();
+
+var isThenable = function isThenable(it) {
+ var then;
+ return isObject(it) && typeof (then = it.then) == 'function' ? then : false;
+};
+
+var notify = function notify(promise, isReject) {
+ if (promise._n) return;
+ promise._n = true;
+ var chain = promise._c;
+ microtask(function () {
+ var value = promise._v;
+ var ok = promise._s == 1;
+ var i = 0;
+
+ var run = function run(reaction) {
+ var handler = ok ? reaction.ok : reaction.fail;
+ var resolve = reaction.resolve;
+ var reject = reaction.reject;
+ var domain = reaction.domain;
+ var result, then, exited;
+
+ try {
+ if (handler) {
+ if (!ok) {
+ if (promise._h == 2) onHandleUnhandled(promise);
+ promise._h = 1;
+ }
+
+ if (handler === true) result = value;else {
+ if (domain) domain.enter();
+ result = handler(value);
+
+ if (domain) {
+ domain.exit();
+ exited = true;
+ }
+ }
+
+ if (result === reaction.promise) {
+ reject(TypeError('Promise-chain cycle'));
+ } else if (then = isThenable(result)) {
+ then.call(result, resolve, reject);
+ } else resolve(result);
+ } else reject(value);
+ } catch (e) {
+ if (domain && !exited) domain.exit();
+ reject(e);
+ }
+ };
+
+ while (chain.length > i) {
+ run(chain[i++]);
+ }
+
+ promise._c = [];
+ promise._n = false;
+ if (isReject && !promise._h) onUnhandled(promise);
+ });
+};
+
+var onUnhandled = function onUnhandled(promise) {
+ task.call(global, function () {
+ var value = promise._v;
+ var unhandled = isUnhandled(promise);
+ var result, handler, console;
+
+ if (unhandled) {
+ result = perform(function () {
+ if (isNode) {
+ process.emit('unhandledRejection', value, promise);
+ } else if (handler = global.onunhandledrejection) {
+ handler({
+ promise: promise,
+ reason: value
+ });
+ } else if ((console = global.console) && console.error) {
+ console.error('Unhandled promise rejection', value);
+ }
+ });
+ promise._h = isNode || isUnhandled(promise) ? 2 : 1;
+ }
+
+ promise._a = undefined;
+ if (unhandled && result.e) throw result.v;
+ });
+};
+
+var isUnhandled = function isUnhandled(promise) {
+ return promise._h !== 1 && (promise._a || promise._c).length === 0;
+};
+
+var onHandleUnhandled = function onHandleUnhandled(promise) {
+ task.call(global, function () {
+ var handler;
+
+ if (isNode) {
+ process.emit('rejectionHandled', promise);
+ } else if (handler = global.onrejectionhandled) {
+ handler({
+ promise: promise,
+ reason: promise._v
+ });
+ }
+ });
+};
+
+var $reject = function $reject(value) {
+ var promise = this;
+ if (promise._d) return;
+ promise._d = true;
+ promise = promise._w || promise;
+ promise._v = value;
+ promise._s = 2;
+ if (!promise._a) promise._a = promise._c.slice();
+ notify(promise, true);
+};
+
+var $resolve = function $resolve(value) {
+ var promise = this;
+ var then;
+ if (promise._d) return;
+ promise._d = true;
+ promise = promise._w || promise;
+
+ try {
+ if (promise === value) throw TypeError("Promise can't be resolved itself");
+
+ if (then = isThenable(value)) {
+ microtask(function () {
+ var wrapper = {
+ _w: promise,
+ _d: false
+ };
+
+ try {
+ then.call(value, ctx($resolve, wrapper, 1), ctx($reject, wrapper, 1));
+ } catch (e) {
+ $reject.call(wrapper, e);
+ }
+ });
+ } else {
+ promise._v = value;
+ promise._s = 1;
+ notify(promise, false);
+ }
+ } catch (e) {
+ $reject.call({
+ _w: promise,
+ _d: false
+ }, e);
+ }
+};
+
+if (!USE_NATIVE) {
+ $Promise = function Promise(executor) {
+ anInstance(this, $Promise, PROMISE, '_h');
+ aFunction(executor);
+ Internal.call(this);
+
+ try {
+ executor(ctx($resolve, this, 1), ctx($reject, this, 1));
+ } catch (err) {
+ $reject.call(this, err);
+ }
+ };
+
+ Internal = function Promise(executor) {
+ this._c = [];
+ this._a = undefined;
+ this._s = 0;
+ this._d = false;
+ this._v = undefined;
+ this._h = 0;
+ this._n = false;
+ };
+
+ Internal.prototype = __w_pdfjs_require__(102)($Promise.prototype, {
+ then: function then(onFulfilled, onRejected) {
+ var reaction = newPromiseCapability(speciesConstructor(this, $Promise));
+ reaction.ok = typeof onFulfilled == 'function' ? onFulfilled : true;
+ reaction.fail = typeof onRejected == 'function' && onRejected;
+ reaction.domain = isNode ? process.domain : undefined;
+
+ this._c.push(reaction);
+
+ if (this._a) this._a.push(reaction);
+ if (this._s) notify(this, false);
+ return reaction.promise;
+ },
+ 'catch': function _catch(onRejected) {
+ return this.then(undefined, onRejected);
+ }
+ });
+
+ OwnPromiseCapability = function OwnPromiseCapability() {
+ var promise = new Internal();
+ this.promise = promise;
+ this.resolve = ctx($resolve, promise, 1);
+ this.reject = ctx($reject, promise, 1);
+ };
+
+ newPromiseCapabilityModule.f = newPromiseCapability = function newPromiseCapability(C) {
+ return C === $Promise || C === Wrapper ? new OwnPromiseCapability(C) : newGenericPromiseCapability(C);
+ };
+}
+
+$export($export.G + $export.W + $export.F * !USE_NATIVE, {
+ Promise: $Promise
+});
+
+__w_pdfjs_require__(64)($Promise, PROMISE);
+
+__w_pdfjs_require__(103)(PROMISE);
+
+Wrapper = __w_pdfjs_require__(13)[PROMISE];
+$export($export.S + $export.F * !USE_NATIVE, PROMISE, {
+ reject: function reject(r) {
+ var capability = newPromiseCapability(this);
+ var $$reject = capability.reject;
+ $$reject(r);
+ return capability.promise;
+ }
+});
+$export($export.S + $export.F * (LIBRARY || !USE_NATIVE), PROMISE, {
+ resolve: function resolve(x) {
+ return promiseResolve(LIBRARY && this === Wrapper ? $Promise : this, x);
+ }
+});
+$export($export.S + $export.F * !(USE_NATIVE && __w_pdfjs_require__(73)(function (iter) {
+ $Promise.all(iter)['catch'](empty);
+})), PROMISE, {
+ all: function all(iterable) {
+ var C = this;
+ var capability = newPromiseCapability(C);
+ var resolve = capability.resolve;
+ var reject = capability.reject;
+ var result = perform(function () {
+ var values = [];
+ var index = 0;
+ var remaining = 1;
+ forOf(iterable, false, function (promise) {
+ var $index = index++;
+ var alreadyCalled = false;
+ values.push(undefined);
+ remaining++;
+ C.resolve(promise).then(function (value) {
+ if (alreadyCalled) return;
+ alreadyCalled = true;
+ values[$index] = value;
+ --remaining || resolve(values);
+ }, reject);
+ });
+ --remaining || resolve(values);
+ });
+ if (result.e) reject(result.v);
+ return capability.promise;
+ },
+ race: function race(iterable) {
+ var C = this;
+ var capability = newPromiseCapability(C);
+ var reject = capability.reject;
+ var result = perform(function () {
+ forOf(iterable, false, function (promise) {
+ C.resolve(promise).then(capability.resolve, reject);
+ });
+ });
+ if (result.e) reject(result.v);
+ return capability.promise;
+ }
+});
+
+/***/ }),
+/* 92 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (it, Constructor, name, forbiddenField) {
+ if (!(it instanceof Constructor) || forbiddenField !== undefined && forbiddenField in it) {
+ throw TypeError(name + ': incorrect invocation!');
+ }
+
+ return it;
+};
+
+/***/ }),
+/* 93 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ctx = __w_pdfjs_require__(30);
+
+var call = __w_pdfjs_require__(68);
+
+var isArrayIter = __w_pdfjs_require__(69);
+
+var anObject = __w_pdfjs_require__(16);
+
+var toLength = __w_pdfjs_require__(32);
+
+var getIterFn = __w_pdfjs_require__(71);
+
+var BREAK = {};
+var RETURN = {};
+
+var _exports = module.exports = function (iterable, entries, fn, that, ITERATOR) {
+ var iterFn = ITERATOR ? function () {
+ return iterable;
+ } : getIterFn(iterable);
+ var f = ctx(fn, that, entries ? 2 : 1);
+ var index = 0;
+ var length, step, iterator, result;
+ if (typeof iterFn != 'function') throw TypeError(iterable + ' is not iterable!');
+ if (isArrayIter(iterFn)) for (length = toLength(iterable.length); length > index; index++) {
+ result = entries ? f(anObject(step = iterable[index])[0], step[1]) : f(iterable[index]);
+ if (result === BREAK || result === RETURN) return result;
+ } else for (iterator = iterFn.call(iterable); !(step = iterator.next()).done;) {
+ result = call(iterator, f, step.value, entries);
+ if (result === BREAK || result === RETURN) return result;
+ }
+};
+
+_exports.BREAK = BREAK;
+_exports.RETURN = RETURN;
+
+/***/ }),
+/* 94 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var anObject = __w_pdfjs_require__(16);
+
+var aFunction = __w_pdfjs_require__(31);
+
+var SPECIES = __w_pdfjs_require__(37)('species');
+
+module.exports = function (O, D) {
+ var C = anObject(O).constructor;
+ var S;
+ return C === undefined || (S = anObject(C)[SPECIES]) == undefined ? D : aFunction(S);
+};
+
+/***/ }),
+/* 95 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ctx = __w_pdfjs_require__(30);
+
+var invoke = __w_pdfjs_require__(96);
+
+var html = __w_pdfjs_require__(63);
+
+var cel = __w_pdfjs_require__(21);
+
+var global = __w_pdfjs_require__(12);
+
+var process = global.process;
+var setTask = global.setImmediate;
+var clearTask = global.clearImmediate;
+var MessageChannel = global.MessageChannel;
+var Dispatch = global.Dispatch;
+var counter = 0;
+var queue = {};
+var ONREADYSTATECHANGE = 'onreadystatechange';
+var defer, channel, port;
+
+var run = function run() {
+ var id = +this;
+
+ if (queue.hasOwnProperty(id)) {
+ var fn = queue[id];
+ delete queue[id];
+ fn();
+ }
+};
+
+var listener = function listener(event) {
+ run.call(event.data);
+};
+
+if (!setTask || !clearTask) {
+ setTask = function setImmediate(fn) {
+ var args = [];
+ var i = 1;
+
+ while (arguments.length > i) {
+ args.push(arguments[i++]);
+ }
+
+ queue[++counter] = function () {
+ invoke(typeof fn == 'function' ? fn : Function(fn), args);
+ };
+
+ defer(counter);
+ return counter;
+ };
+
+ clearTask = function clearImmediate(id) {
+ delete queue[id];
+ };
+
+ if (__w_pdfjs_require__(36)(process) == 'process') {
+ defer = function defer(id) {
+ process.nextTick(ctx(run, id, 1));
+ };
+ } else if (Dispatch && Dispatch.now) {
+ defer = function defer(id) {
+ Dispatch.now(ctx(run, id, 1));
+ };
+ } else if (MessageChannel) {
+ channel = new MessageChannel();
+ port = channel.port2;
+ channel.port1.onmessage = listener;
+ defer = ctx(port.postMessage, port, 1);
+ } else if (global.addEventListener && typeof postMessage == 'function' && !global.importScripts) {
+ defer = function defer(id) {
+ global.postMessage(id + '', '*');
+ };
+
+ global.addEventListener('message', listener, false);
+ } else if (ONREADYSTATECHANGE in cel('script')) {
+ defer = function defer(id) {
+ html.appendChild(cel('script'))[ONREADYSTATECHANGE] = function () {
+ html.removeChild(this);
+ run.call(id);
+ };
+ };
+ } else {
+ defer = function defer(id) {
+ setTimeout(ctx(run, id, 1), 0);
+ };
+ }
+}
+
+module.exports = {
+ set: setTask,
+ clear: clearTask
+};
+
+/***/ }),
+/* 96 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (fn, args, that) {
+ var un = that === undefined;
+
+ switch (args.length) {
+ case 0:
+ return un ? fn() : fn.call(that);
+
+ case 1:
+ return un ? fn(args[0]) : fn.call(that, args[0]);
+
+ case 2:
+ return un ? fn(args[0], args[1]) : fn.call(that, args[0], args[1]);
+
+ case 3:
+ return un ? fn(args[0], args[1], args[2]) : fn.call(that, args[0], args[1], args[2]);
+
+ case 4:
+ return un ? fn(args[0], args[1], args[2], args[3]) : fn.call(that, args[0], args[1], args[2], args[3]);
+ }
+
+ return fn.apply(that, args);
+};
+
+/***/ }),
+/* 97 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(12);
+
+var macrotask = __w_pdfjs_require__(95).set;
+
+var Observer = global.MutationObserver || global.WebKitMutationObserver;
+var process = global.process;
+var Promise = global.Promise;
+var isNode = __w_pdfjs_require__(36)(process) == 'process';
+
+module.exports = function () {
+ var head, last, notify;
+
+ var flush = function flush() {
+ var parent, fn;
+ if (isNode && (parent = process.domain)) parent.exit();
+
+ while (head) {
+ fn = head.fn;
+ head = head.next;
+
+ try {
+ fn();
+ } catch (e) {
+ if (head) notify();else last = undefined;
+ throw e;
+ }
+ }
+
+ last = undefined;
+ if (parent) parent.enter();
+ };
+
+ if (isNode) {
+ notify = function notify() {
+ process.nextTick(flush);
+ };
+ } else if (Observer && !(global.navigator && global.navigator.standalone)) {
+ var toggle = true;
+ var node = document.createTextNode('');
+ new Observer(flush).observe(node, {
+ characterData: true
+ });
+
+ notify = function notify() {
+ node.data = toggle = !toggle;
+ };
+ } else if (Promise && Promise.resolve) {
+ var promise = Promise.resolve(undefined);
+
+ notify = function notify() {
+ promise.then(flush);
+ };
+ } else {
+ notify = function notify() {
+ macrotask.call(global, flush);
+ };
+ }
+
+ return function (fn) {
+ var task = {
+ fn: fn,
+ next: undefined
+ };
+ if (last) last.next = task;
+
+ if (!head) {
+ head = task;
+ notify();
+ }
+
+ last = task;
+ };
+};
+
+/***/ }),
+/* 98 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var aFunction = __w_pdfjs_require__(31);
+
+function PromiseCapability(C) {
+ var resolve, reject;
+ this.promise = new C(function ($$resolve, $$reject) {
+ if (resolve !== undefined || reject !== undefined) throw TypeError('Bad Promise constructor');
+ resolve = $$resolve;
+ reject = $$reject;
+ });
+ this.resolve = aFunction(resolve);
+ this.reject = aFunction(reject);
+}
+
+module.exports.f = function (C) {
+ return new PromiseCapability(C);
+};
+
+/***/ }),
+/* 99 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+module.exports = function (exec) {
+ try {
+ return {
+ e: false,
+ v: exec()
+ };
+ } catch (e) {
+ return {
+ e: true,
+ v: e
+ };
+ }
+};
+
+/***/ }),
+/* 100 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(12);
+
+var navigator = global.navigator;
+module.exports = navigator && navigator.userAgent || '';
+
+/***/ }),
+/* 101 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var anObject = __w_pdfjs_require__(16);
+
+var isObject = __w_pdfjs_require__(17);
+
+var newPromiseCapability = __w_pdfjs_require__(98);
+
+module.exports = function (C, x) {
+ anObject(C);
+ if (isObject(x) && x.constructor === C) return x;
+ var promiseCapability = newPromiseCapability.f(C);
+ var resolve = promiseCapability.resolve;
+ resolve(x);
+ return promiseCapability.promise;
+};
+
+/***/ }),
+/* 102 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var redefine = __w_pdfjs_require__(24);
+
+module.exports = function (target, src, safe) {
+ for (var key in src) {
+ redefine(target, key, src[key], safe);
+ }
+
+ return target;
+};
+
+/***/ }),
+/* 103 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(12);
+
+var dP = __w_pdfjs_require__(15);
+
+var DESCRIPTORS = __w_pdfjs_require__(19);
+
+var SPECIES = __w_pdfjs_require__(37)('species');
+
+module.exports = function (KEY) {
+ var C = global[KEY];
+ if (DESCRIPTORS && C && !C[SPECIES]) dP.f(C, SPECIES, {
+ configurable: true,
+ get: function get() {
+ return this;
+ }
+ });
+};
+
+/***/ }),
+/* 104 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var core = __w_pdfjs_require__(13);
+
+var global = __w_pdfjs_require__(12);
+
+var speciesConstructor = __w_pdfjs_require__(94);
+
+var promiseResolve = __w_pdfjs_require__(101);
+
+$export($export.P + $export.R, 'Promise', {
+ 'finally': function _finally(onFinally) {
+ var C = speciesConstructor(this, core.Promise || global.Promise);
+ var isFunction = typeof onFinally == 'function';
+ return this.then(isFunction ? function (x) {
+ return promiseResolve(C, onFinally()).then(function () {
+ return x;
+ });
+ } : onFinally, isFunction ? function (e) {
+ return promiseResolve(C, onFinally()).then(function () {
+ throw e;
+ });
+ } : onFinally);
+ }
+});
+
+/***/ }),
+/* 105 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var newPromiseCapability = __w_pdfjs_require__(98);
+
+var perform = __w_pdfjs_require__(99);
+
+$export($export.S, 'Promise', {
+ 'try': function _try(callbackfn) {
+ var promiseCapability = newPromiseCapability.f(this);
+ var result = perform(callbackfn);
+ (result.e ? promiseCapability.reject : promiseCapability.resolve)(result.v);
+ return promiseCapability.promise;
+ }
+});
+
+/***/ }),
+/* 106 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(87);
+
+__w_pdfjs_require__(88);
+
+__w_pdfjs_require__(107);
+
+__w_pdfjs_require__(119);
+
+__w_pdfjs_require__(121);
+
+module.exports = __w_pdfjs_require__(13).WeakMap;
+
+/***/ }),
+/* 107 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(12);
+
+var each = __w_pdfjs_require__(108)(0);
+
+var redefine = __w_pdfjs_require__(24);
+
+var meta = __w_pdfjs_require__(112);
+
+var assign = __w_pdfjs_require__(76);
+
+var weak = __w_pdfjs_require__(113);
+
+var isObject = __w_pdfjs_require__(17);
+
+var validate = __w_pdfjs_require__(114);
+
+var NATIVE_WEAK_MAP = __w_pdfjs_require__(114);
+
+var IS_IE11 = !global.ActiveXObject && 'ActiveXObject' in global;
+var WEAK_MAP = 'WeakMap';
+var getWeak = meta.getWeak;
+var isExtensible = Object.isExtensible;
+var uncaughtFrozenStore = weak.ufstore;
+var InternalMap;
+
+var wrapper = function wrapper(get) {
+ return function WeakMap() {
+ return get(this, arguments.length > 0 ? arguments[0] : undefined);
+ };
+};
+
+var methods = {
+ get: function get(key) {
+ if (isObject(key)) {
+ var data = getWeak(key);
+ if (data === true) return uncaughtFrozenStore(validate(this, WEAK_MAP)).get(key);
+ return data ? data[this._i] : undefined;
+ }
+ },
+ set: function set(key, value) {
+ return weak.def(validate(this, WEAK_MAP), key, value);
+ }
+};
+
+var $WeakMap = module.exports = __w_pdfjs_require__(115)(WEAK_MAP, wrapper, methods, weak, true, true);
+
+if (NATIVE_WEAK_MAP && IS_IE11) {
+ InternalMap = weak.getConstructor(wrapper, WEAK_MAP);
+ assign(InternalMap.prototype, methods);
+ meta.NEED = true;
+ each(['delete', 'has', 'get', 'set'], function (key) {
+ var proto = $WeakMap.prototype;
+ var method = proto[key];
+ redefine(proto, key, function (a, b) {
+ if (isObject(a) && !isExtensible(a)) {
+ if (!this._f) this._f = new InternalMap();
+
+ var result = this._f[key](a, b);
+
+ return key == 'set' ? this : result;
+ }
+
+ return method.call(this, a, b);
+ });
+ });
+}
+
+/***/ }),
+/* 108 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var ctx = __w_pdfjs_require__(30);
+
+var IObject = __w_pdfjs_require__(48);
+
+var toObject = __w_pdfjs_require__(66);
+
+var toLength = __w_pdfjs_require__(32);
+
+var asc = __w_pdfjs_require__(109);
+
+module.exports = function (TYPE, $create) {
+ var IS_MAP = TYPE == 1;
+ var IS_FILTER = TYPE == 2;
+ var IS_SOME = TYPE == 3;
+ var IS_EVERY = TYPE == 4;
+ var IS_FIND_INDEX = TYPE == 6;
+ var NO_HOLES = TYPE == 5 || IS_FIND_INDEX;
+ var create = $create || asc;
+ return function ($this, callbackfn, that) {
+ var O = toObject($this);
+ var self = IObject(O);
+ var f = ctx(callbackfn, that, 3);
+ var length = toLength(self.length);
+ var index = 0;
+ var result = IS_MAP ? create($this, length) : IS_FILTER ? create($this, 0) : undefined;
+ var val, res;
+
+ for (; length > index; index++) {
+ if (NO_HOLES || index in self) {
+ val = self[index];
+ res = f(val, index, O);
+
+ if (TYPE) {
+ if (IS_MAP) result[index] = res;else if (res) switch (TYPE) {
+ case 3:
+ return true;
+
+ case 5:
+ return val;
+
+ case 6:
+ return index;
+
+ case 2:
+ result.push(val);
+ } else if (IS_EVERY) return false;
+ }
+ }
+ }
+
+ return IS_FIND_INDEX ? -1 : IS_SOME || IS_EVERY ? IS_EVERY : result;
+ };
+};
+
+/***/ }),
+/* 109 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var speciesConstructor = __w_pdfjs_require__(110);
+
+module.exports = function (original, length) {
+ return new (speciesConstructor(original))(length);
+};
+
+/***/ }),
+/* 110 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(17);
+
+var isArray = __w_pdfjs_require__(111);
+
+var SPECIES = __w_pdfjs_require__(37)('species');
+
+module.exports = function (original) {
+ var C;
+
+ if (isArray(original)) {
+ C = original.constructor;
+ if (typeof C == 'function' && (C === Array || isArray(C.prototype))) C = undefined;
+
+ if (isObject(C)) {
+ C = C[SPECIES];
+ if (C === null) C = undefined;
+ }
+ }
+
+ return C === undefined ? Array : C;
+};
+
+/***/ }),
+/* 111 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var cof = __w_pdfjs_require__(36);
+
+module.exports = Array.isArray || function isArray(arg) {
+ return cof(arg) == 'Array';
+};
+
+/***/ }),
+/* 112 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var META = __w_pdfjs_require__(26)('meta');
+
+var isObject = __w_pdfjs_require__(17);
+
+var has = __w_pdfjs_require__(25);
+
+var setDesc = __w_pdfjs_require__(15).f;
+
+var id = 0;
+
+var isExtensible = Object.isExtensible || function () {
+ return true;
+};
+
+var FREEZE = !__w_pdfjs_require__(20)(function () {
+ return isExtensible(Object.preventExtensions({}));
+});
+
+var setMeta = function setMeta(it) {
+ setDesc(it, META, {
+ value: {
+ i: 'O' + ++id,
+ w: {}
+ }
+ });
+};
+
+var fastKey = function fastKey(it, create) {
+ if (!isObject(it)) return _typeof(it) == 'symbol' ? it : (typeof it == 'string' ? 'S' : 'P') + it;
+
+ if (!has(it, META)) {
+ if (!isExtensible(it)) return 'F';
+ if (!create) return 'E';
+ setMeta(it);
+ }
+
+ return it[META].i;
+};
+
+var getWeak = function getWeak(it, create) {
+ if (!has(it, META)) {
+ if (!isExtensible(it)) return true;
+ if (!create) return false;
+ setMeta(it);
+ }
+
+ return it[META].w;
+};
+
+var onFreeze = function onFreeze(it) {
+ if (FREEZE && meta.NEED && isExtensible(it) && !has(it, META)) setMeta(it);
+ return it;
+};
+
+var meta = module.exports = {
+ KEY: META,
+ NEED: false,
+ fastKey: fastKey,
+ getWeak: getWeak,
+ onFreeze: onFreeze
+};
+
+/***/ }),
+/* 113 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var redefineAll = __w_pdfjs_require__(102);
+
+var getWeak = __w_pdfjs_require__(112).getWeak;
+
+var anObject = __w_pdfjs_require__(16);
+
+var isObject = __w_pdfjs_require__(17);
+
+var anInstance = __w_pdfjs_require__(92);
+
+var forOf = __w_pdfjs_require__(93);
+
+var createArrayMethod = __w_pdfjs_require__(108);
+
+var $has = __w_pdfjs_require__(25);
+
+var validate = __w_pdfjs_require__(114);
+
+var arrayFind = createArrayMethod(5);
+var arrayFindIndex = createArrayMethod(6);
+var id = 0;
+
+var uncaughtFrozenStore = function uncaughtFrozenStore(that) {
+ return that._l || (that._l = new UncaughtFrozenStore());
+};
+
+var UncaughtFrozenStore = function UncaughtFrozenStore() {
+ this.a = [];
+};
+
+var findUncaughtFrozen = function findUncaughtFrozen(store, key) {
+ return arrayFind(store.a, function (it) {
+ return it[0] === key;
+ });
+};
+
+UncaughtFrozenStore.prototype = {
+ get: function get(key) {
+ var entry = findUncaughtFrozen(this, key);
+ if (entry) return entry[1];
+ },
+ has: function has(key) {
+ return !!findUncaughtFrozen(this, key);
+ },
+ set: function set(key, value) {
+ var entry = findUncaughtFrozen(this, key);
+ if (entry) entry[1] = value;else this.a.push([key, value]);
+ },
+ 'delete': function _delete(key) {
+ var index = arrayFindIndex(this.a, function (it) {
+ return it[0] === key;
+ });
+ if (~index) this.a.splice(index, 1);
+ return !!~index;
+ }
+};
+module.exports = {
+ getConstructor: function getConstructor(wrapper, NAME, IS_MAP, ADDER) {
+ var C = wrapper(function (that, iterable) {
+ anInstance(that, C, NAME, '_i');
+ that._t = NAME;
+ that._i = id++;
+ that._l = undefined;
+ if (iterable != undefined) forOf(iterable, IS_MAP, that[ADDER], that);
+ });
+ redefineAll(C.prototype, {
+ 'delete': function _delete(key) {
+ if (!isObject(key)) return false;
+ var data = getWeak(key);
+ if (data === true) return uncaughtFrozenStore(validate(this, NAME))['delete'](key);
+ return data && $has(data, this._i) && delete data[this._i];
+ },
+ has: function has(key) {
+ if (!isObject(key)) return false;
+ var data = getWeak(key);
+ if (data === true) return uncaughtFrozenStore(validate(this, NAME)).has(key);
+ return data && $has(data, this._i);
+ }
+ });
+ return C;
+ },
+ def: function def(that, key, value) {
+ var data = getWeak(anObject(key), true);
+ if (data === true) uncaughtFrozenStore(that).set(key, value);else data[that._i] = value;
+ return that;
+ },
+ ufstore: uncaughtFrozenStore
+};
+
+/***/ }),
+/* 114 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(17);
+
+module.exports = function (it, TYPE) {
+ if (!isObject(it) || it._t !== TYPE) throw TypeError('Incompatible receiver, ' + TYPE + ' required!');
+ return it;
+};
+
+/***/ }),
+/* 115 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(12);
+
+var $export = __w_pdfjs_require__(11);
+
+var redefine = __w_pdfjs_require__(24);
+
+var redefineAll = __w_pdfjs_require__(102);
+
+var meta = __w_pdfjs_require__(112);
+
+var forOf = __w_pdfjs_require__(93);
+
+var anInstance = __w_pdfjs_require__(92);
+
+var isObject = __w_pdfjs_require__(17);
+
+var fails = __w_pdfjs_require__(20);
+
+var $iterDetect = __w_pdfjs_require__(73);
+
+var setToStringTag = __w_pdfjs_require__(64);
+
+var inheritIfRequired = __w_pdfjs_require__(116);
+
+module.exports = function (NAME, wrapper, methods, common, IS_MAP, IS_WEAK) {
+ var Base = global[NAME];
+ var C = Base;
+ var ADDER = IS_MAP ? 'set' : 'add';
+ var proto = C && C.prototype;
+ var O = {};
+
+ var fixMethod = function fixMethod(KEY) {
+ var fn = proto[KEY];
+ redefine(proto, KEY, KEY == 'delete' ? function (a) {
+ return IS_WEAK && !isObject(a) ? false : fn.call(this, a === 0 ? 0 : a);
+ } : KEY == 'has' ? function has(a) {
+ return IS_WEAK && !isObject(a) ? false : fn.call(this, a === 0 ? 0 : a);
+ } : KEY == 'get' ? function get(a) {
+ return IS_WEAK && !isObject(a) ? undefined : fn.call(this, a === 0 ? 0 : a);
+ } : KEY == 'add' ? function add(a) {
+ fn.call(this, a === 0 ? 0 : a);
+ return this;
+ } : function set(a, b) {
+ fn.call(this, a === 0 ? 0 : a, b);
+ return this;
+ });
+ };
+
+ if (typeof C != 'function' || !(IS_WEAK || proto.forEach && !fails(function () {
+ new C().entries().next();
+ }))) {
+ C = common.getConstructor(wrapper, NAME, IS_MAP, ADDER);
+ redefineAll(C.prototype, methods);
+ meta.NEED = true;
+ } else {
+ var instance = new C();
+ var HASNT_CHAINING = instance[ADDER](IS_WEAK ? {} : -0, 1) != instance;
+ var THROWS_ON_PRIMITIVES = fails(function () {
+ instance.has(1);
+ });
+ var ACCEPT_ITERABLES = $iterDetect(function (iter) {
+ new C(iter);
+ });
+ var BUGGY_ZERO = !IS_WEAK && fails(function () {
+ var $instance = new C();
+ var index = 5;
+
+ while (index--) {
+ $instance[ADDER](index, index);
+ }
+
+ return !$instance.has(-0);
+ });
+
+ if (!ACCEPT_ITERABLES) {
+ C = wrapper(function (target, iterable) {
+ anInstance(target, C, NAME);
+ var that = inheritIfRequired(new Base(), target, C);
+ if (iterable != undefined) forOf(iterable, IS_MAP, that[ADDER], that);
+ return that;
+ });
+ C.prototype = proto;
+ proto.constructor = C;
+ }
+
+ if (THROWS_ON_PRIMITIVES || BUGGY_ZERO) {
+ fixMethod('delete');
+ fixMethod('has');
+ IS_MAP && fixMethod('get');
+ }
+
+ if (BUGGY_ZERO || HASNT_CHAINING) fixMethod(ADDER);
+ if (IS_WEAK && proto.clear) delete proto.clear;
+ }
+
+ setToStringTag(C, NAME);
+ O[NAME] = C;
+ $export($export.G + $export.W + $export.F * (C != Base), O);
+ if (!IS_WEAK) common.setStrong(C, NAME, IS_MAP);
+ return C;
+};
+
+/***/ }),
+/* 116 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(17);
+
+var setPrototypeOf = __w_pdfjs_require__(117).set;
+
+module.exports = function (that, target, C) {
+ var S = target.constructor;
+ var P;
+
+ if (S !== C && typeof S == 'function' && (P = S.prototype) !== C.prototype && isObject(P) && setPrototypeOf) {
+ setPrototypeOf(that, P);
+ }
+
+ return that;
+};
+
+/***/ }),
+/* 117 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var isObject = __w_pdfjs_require__(17);
+
+var anObject = __w_pdfjs_require__(16);
+
+var check = function check(O, proto) {
+ anObject(O);
+ if (!isObject(proto) && proto !== null) throw TypeError(proto + ": can't set as prototype!");
+};
+
+module.exports = {
+ set: Object.setPrototypeOf || ('__proto__' in {} ? function (test, buggy, set) {
+ try {
+ set = __w_pdfjs_require__(30)(Function.call, __w_pdfjs_require__(118).f(Object.prototype, '__proto__').set, 2);
+ set(test, []);
+ buggy = !(test instanceof Array);
+ } catch (e) {
+ buggy = true;
+ }
+
+ return function setPrototypeOf(O, proto) {
+ check(O, proto);
+ if (buggy) O.__proto__ = proto;else set(O, proto);
+ return O;
+ };
+ }({}, false) : undefined),
+ check: check
+};
+
+/***/ }),
+/* 118 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var pIE = __w_pdfjs_require__(78);
+
+var createDesc = __w_pdfjs_require__(23);
+
+var toIObject = __w_pdfjs_require__(47);
+
+var toPrimitive = __w_pdfjs_require__(22);
+
+var has = __w_pdfjs_require__(25);
+
+var IE8_DOM_DEFINE = __w_pdfjs_require__(18);
+
+var gOPD = Object.getOwnPropertyDescriptor;
+exports.f = __w_pdfjs_require__(19) ? gOPD : function getOwnPropertyDescriptor(O, P) {
+ O = toIObject(O);
+ P = toPrimitive(P, true);
+ if (IE8_DOM_DEFINE) try {
+ return gOPD(O, P);
+ } catch (e) {}
+ if (has(O, P)) return createDesc(!pIE.f.call(O, P), O[P]);
+};
+
+/***/ }),
+/* 119 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(120)('WeakMap');
+
+/***/ }),
+/* 120 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+module.exports = function (COLLECTION) {
+ $export($export.S, COLLECTION, {
+ of: function of() {
+ var length = arguments.length;
+ var A = new Array(length);
+
+ while (length--) {
+ A[length] = arguments[length];
+ }
+
+ return new this(A);
+ }
+ });
+};
+
+/***/ }),
+/* 121 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(122)('WeakMap');
+
+/***/ }),
+/* 122 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var aFunction = __w_pdfjs_require__(31);
+
+var ctx = __w_pdfjs_require__(30);
+
+var forOf = __w_pdfjs_require__(93);
+
+module.exports = function (COLLECTION) {
+ $export($export.S, COLLECTION, {
+ from: function from(source) {
+ var mapFn = arguments[1];
+ var mapping, A, n, cb;
+ aFunction(this);
+ mapping = mapFn !== undefined;
+ if (mapping) aFunction(mapFn);
+ if (source == undefined) return new this();
+ A = [];
+
+ if (mapping) {
+ n = 0;
+ cb = ctx(mapFn, arguments[2], 2);
+ forOf(source, false, function (nextItem) {
+ A.push(cb(nextItem, n++));
+ });
+ } else {
+ forOf(source, false, A.push, A);
+ }
+
+ return new this(A);
+ }
+ });
+};
+
+/***/ }),
+/* 123 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(87);
+
+__w_pdfjs_require__(88);
+
+__w_pdfjs_require__(124);
+
+__w_pdfjs_require__(125);
+
+__w_pdfjs_require__(126);
+
+module.exports = __w_pdfjs_require__(13).WeakSet;
+
+/***/ }),
+/* 124 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var weak = __w_pdfjs_require__(113);
+
+var validate = __w_pdfjs_require__(114);
+
+var WEAK_SET = 'WeakSet';
+
+__w_pdfjs_require__(115)(WEAK_SET, function (get) {
+ return function WeakSet() {
+ return get(this, arguments.length > 0 ? arguments[0] : undefined);
+ };
+}, {
+ add: function add(value) {
+ return weak.def(validate(this, WEAK_SET), value, true);
+ }
+}, weak, false, true);
+
+/***/ }),
+/* 125 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(120)('WeakSet');
+
+/***/ }),
+/* 126 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(122)('WeakSet');
+
+/***/ }),
+/* 127 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(128);
+
+module.exports = __w_pdfjs_require__(13).String.codePointAt;
+
+/***/ }),
+/* 128 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var $at = __w_pdfjs_require__(53)(false);
+
+$export($export.P, 'String', {
+ codePointAt: function codePointAt(pos) {
+ return $at(this, pos);
+ }
+});
+
+/***/ }),
+/* 129 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(130);
+
+module.exports = __w_pdfjs_require__(13).String.fromCodePoint;
+
+/***/ }),
+/* 130 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var toAbsoluteIndex = __w_pdfjs_require__(49);
+
+var fromCharCode = String.fromCharCode;
+var $fromCodePoint = String.fromCodePoint;
+$export($export.S + $export.F * (!!$fromCodePoint && $fromCodePoint.length != 1), 'String', {
+ fromCodePoint: function fromCodePoint(x) {
+ var res = [];
+ var aLen = arguments.length;
+ var i = 0;
+ var code;
+
+ while (aLen > i) {
+ code = +arguments[i++];
+ if (toAbsoluteIndex(code, 0x10ffff) !== code) throw RangeError(code + ' is not a valid code point');
+ res.push(code < 0x10000 ? fromCharCode(code) : fromCharCode(((code -= 0x10000) >> 10) + 0xd800, code % 0x400 + 0xdc00));
+ }
+
+ return res.join('');
+ }
+});
+
+/***/ }),
+/* 131 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(132);
+
+__w_pdfjs_require__(87);
+
+module.exports = __w_pdfjs_require__(13).Symbol;
+
+/***/ }),
+/* 132 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var global = __w_pdfjs_require__(12);
+
+var has = __w_pdfjs_require__(25);
+
+var DESCRIPTORS = __w_pdfjs_require__(19);
+
+var $export = __w_pdfjs_require__(11);
+
+var redefine = __w_pdfjs_require__(24);
+
+var META = __w_pdfjs_require__(112).KEY;
+
+var $fails = __w_pdfjs_require__(20);
+
+var shared = __w_pdfjs_require__(28);
+
+var setToStringTag = __w_pdfjs_require__(64);
+
+var uid = __w_pdfjs_require__(26);
+
+var wks = __w_pdfjs_require__(37);
+
+var wksExt = __w_pdfjs_require__(133);
+
+var wksDefine = __w_pdfjs_require__(134);
+
+var enumKeys = __w_pdfjs_require__(135);
+
+var isArray = __w_pdfjs_require__(111);
+
+var anObject = __w_pdfjs_require__(16);
+
+var isObject = __w_pdfjs_require__(17);
+
+var toObject = __w_pdfjs_require__(66);
+
+var toIObject = __w_pdfjs_require__(47);
+
+var toPrimitive = __w_pdfjs_require__(22);
+
+var createDesc = __w_pdfjs_require__(23);
+
+var _create = __w_pdfjs_require__(57);
+
+var gOPNExt = __w_pdfjs_require__(136);
+
+var $GOPD = __w_pdfjs_require__(118);
+
+var $GOPS = __w_pdfjs_require__(77);
+
+var $DP = __w_pdfjs_require__(15);
+
+var $keys = __w_pdfjs_require__(59);
+
+var gOPD = $GOPD.f;
+var dP = $DP.f;
+var gOPN = gOPNExt.f;
+var $Symbol = global.Symbol;
+var $JSON = global.JSON;
+
+var _stringify = $JSON && $JSON.stringify;
+
+var PROTOTYPE = 'prototype';
+var HIDDEN = wks('_hidden');
+var TO_PRIMITIVE = wks('toPrimitive');
+var isEnum = {}.propertyIsEnumerable;
+var SymbolRegistry = shared('symbol-registry');
+var AllSymbols = shared('symbols');
+var OPSymbols = shared('op-symbols');
+var ObjectProto = Object[PROTOTYPE];
+var USE_NATIVE = typeof $Symbol == 'function' && !!$GOPS.f;
+var QObject = global.QObject;
+var setter = !QObject || !QObject[PROTOTYPE] || !QObject[PROTOTYPE].findChild;
+var setSymbolDesc = DESCRIPTORS && $fails(function () {
+ return _create(dP({}, 'a', {
+ get: function get() {
+ return dP(this, 'a', {
+ value: 7
+ }).a;
+ }
+ })).a != 7;
+}) ? function (it, key, D) {
+ var protoDesc = gOPD(ObjectProto, key);
+ if (protoDesc) delete ObjectProto[key];
+ dP(it, key, D);
+ if (protoDesc && it !== ObjectProto) dP(ObjectProto, key, protoDesc);
+} : dP;
+
+var wrap = function wrap(tag) {
+ var sym = AllSymbols[tag] = _create($Symbol[PROTOTYPE]);
+
+ sym._k = tag;
+ return sym;
+};
+
+var isSymbol = USE_NATIVE && _typeof($Symbol.iterator) == 'symbol' ? function (it) {
+ return _typeof(it) == 'symbol';
+} : function (it) {
+ return it instanceof $Symbol;
+};
+
+var $defineProperty = function defineProperty(it, key, D) {
+ if (it === ObjectProto) $defineProperty(OPSymbols, key, D);
+ anObject(it);
+ key = toPrimitive(key, true);
+ anObject(D);
+
+ if (has(AllSymbols, key)) {
+ if (!D.enumerable) {
+ if (!has(it, HIDDEN)) dP(it, HIDDEN, createDesc(1, {}));
+ it[HIDDEN][key] = true;
+ } else {
+ if (has(it, HIDDEN) && it[HIDDEN][key]) it[HIDDEN][key] = false;
+ D = _create(D, {
+ enumerable: createDesc(0, false)
+ });
+ }
+
+ return setSymbolDesc(it, key, D);
+ }
+
+ return dP(it, key, D);
+};
+
+var $defineProperties = function defineProperties(it, P) {
+ anObject(it);
+ var keys = enumKeys(P = toIObject(P));
+ var i = 0;
+ var l = keys.length;
+ var key;
+
+ while (l > i) {
+ $defineProperty(it, key = keys[i++], P[key]);
+ }
+
+ return it;
+};
+
+var $create = function create(it, P) {
+ return P === undefined ? _create(it) : $defineProperties(_create(it), P);
+};
+
+var $propertyIsEnumerable = function propertyIsEnumerable(key) {
+ var E = isEnum.call(this, key = toPrimitive(key, true));
+ if (this === ObjectProto && has(AllSymbols, key) && !has(OPSymbols, key)) return false;
+ return E || !has(this, key) || !has(AllSymbols, key) || has(this, HIDDEN) && this[HIDDEN][key] ? E : true;
+};
+
+var $getOwnPropertyDescriptor = function getOwnPropertyDescriptor(it, key) {
+ it = toIObject(it);
+ key = toPrimitive(key, true);
+ if (it === ObjectProto && has(AllSymbols, key) && !has(OPSymbols, key)) return;
+ var D = gOPD(it, key);
+ if (D && has(AllSymbols, key) && !(has(it, HIDDEN) && it[HIDDEN][key])) D.enumerable = true;
+ return D;
+};
+
+var $getOwnPropertyNames = function getOwnPropertyNames(it) {
+ var names = gOPN(toIObject(it));
+ var result = [];
+ var i = 0;
+ var key;
+
+ while (names.length > i) {
+ if (!has(AllSymbols, key = names[i++]) && key != HIDDEN && key != META) result.push(key);
+ }
+
+ return result;
+};
+
+var $getOwnPropertySymbols = function getOwnPropertySymbols(it) {
+ var IS_OP = it === ObjectProto;
+ var names = gOPN(IS_OP ? OPSymbols : toIObject(it));
+ var result = [];
+ var i = 0;
+ var key;
+
+ while (names.length > i) {
+ if (has(AllSymbols, key = names[i++]) && (IS_OP ? has(ObjectProto, key) : true)) result.push(AllSymbols[key]);
+ }
+
+ return result;
+};
+
+if (!USE_NATIVE) {
+ $Symbol = function _Symbol() {
+ if (this instanceof $Symbol) throw TypeError('Symbol is not a constructor!');
+ var tag = uid(arguments.length > 0 ? arguments[0] : undefined);
+
+ var $set = function $set(value) {
+ if (this === ObjectProto) $set.call(OPSymbols, value);
+ if (has(this, HIDDEN) && has(this[HIDDEN], tag)) this[HIDDEN][tag] = false;
+ setSymbolDesc(this, tag, createDesc(1, value));
+ };
+
+ if (DESCRIPTORS && setter) setSymbolDesc(ObjectProto, tag, {
+ configurable: true,
+ set: $set
+ });
+ return wrap(tag);
+ };
+
+ redefine($Symbol[PROTOTYPE], 'toString', function toString() {
+ return this._k;
+ });
+ $GOPD.f = $getOwnPropertyDescriptor;
+ $DP.f = $defineProperty;
+ __w_pdfjs_require__(137).f = gOPNExt.f = $getOwnPropertyNames;
+ __w_pdfjs_require__(78).f = $propertyIsEnumerable;
+ $GOPS.f = $getOwnPropertySymbols;
+
+ if (DESCRIPTORS && !__w_pdfjs_require__(29)) {
+ redefine(ObjectProto, 'propertyIsEnumerable', $propertyIsEnumerable, true);
+ }
+
+ wksExt.f = function (name) {
+ return wrap(wks(name));
+ };
+}
+
+$export($export.G + $export.W + $export.F * !USE_NATIVE, {
+ Symbol: $Symbol
+});
+
+for (var es6Symbols = 'hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables'.split(','), j = 0; es6Symbols.length > j;) {
+ wks(es6Symbols[j++]);
+}
+
+for (var wellKnownSymbols = $keys(wks.store), k = 0; wellKnownSymbols.length > k;) {
+ wksDefine(wellKnownSymbols[k++]);
+}
+
+$export($export.S + $export.F * !USE_NATIVE, 'Symbol', {
+ 'for': function _for(key) {
+ return has(SymbolRegistry, key += '') ? SymbolRegistry[key] : SymbolRegistry[key] = $Symbol(key);
+ },
+ keyFor: function keyFor(sym) {
+ if (!isSymbol(sym)) throw TypeError(sym + ' is not a symbol!');
+
+ for (var key in SymbolRegistry) {
+ if (SymbolRegistry[key] === sym) return key;
+ }
+ },
+ useSetter: function useSetter() {
+ setter = true;
+ },
+ useSimple: function useSimple() {
+ setter = false;
+ }
+});
+$export($export.S + $export.F * !USE_NATIVE, 'Object', {
+ create: $create,
+ defineProperty: $defineProperty,
+ defineProperties: $defineProperties,
+ getOwnPropertyDescriptor: $getOwnPropertyDescriptor,
+ getOwnPropertyNames: $getOwnPropertyNames,
+ getOwnPropertySymbols: $getOwnPropertySymbols
+});
+var FAILS_ON_PRIMITIVES = $fails(function () {
+ $GOPS.f(1);
+});
+$export($export.S + $export.F * FAILS_ON_PRIMITIVES, 'Object', {
+ getOwnPropertySymbols: function getOwnPropertySymbols(it) {
+ return $GOPS.f(toObject(it));
+ }
+});
+$JSON && $export($export.S + $export.F * (!USE_NATIVE || $fails(function () {
+ var S = $Symbol();
+ return _stringify([S]) != '[null]' || _stringify({
+ a: S
+ }) != '{}' || _stringify(Object(S)) != '{}';
+})), 'JSON', {
+ stringify: function stringify(it) {
+ var args = [it];
+ var i = 1;
+ var replacer, $replacer;
+
+ while (arguments.length > i) {
+ args.push(arguments[i++]);
+ }
+
+ $replacer = replacer = args[1];
+ if (!isObject(replacer) && it === undefined || isSymbol(it)) return;
+ if (!isArray(replacer)) replacer = function replacer(key, value) {
+ if (typeof $replacer == 'function') value = $replacer.call(this, key, value);
+ if (!isSymbol(value)) return value;
+ };
+ args[1] = replacer;
+ return _stringify.apply($JSON, args);
+ }
+});
+$Symbol[PROTOTYPE][TO_PRIMITIVE] || __w_pdfjs_require__(14)($Symbol[PROTOTYPE], TO_PRIMITIVE, $Symbol[PROTOTYPE].valueOf);
+setToStringTag($Symbol, 'Symbol');
+setToStringTag(Math, 'Math', true);
+setToStringTag(global.JSON, 'JSON', true);
+
+/***/ }),
+/* 133 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+exports.f = __w_pdfjs_require__(37);
+
+/***/ }),
+/* 134 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var global = __w_pdfjs_require__(12);
+
+var core = __w_pdfjs_require__(13);
+
+var LIBRARY = __w_pdfjs_require__(29);
+
+var wksExt = __w_pdfjs_require__(133);
+
+var defineProperty = __w_pdfjs_require__(15).f;
+
+module.exports = function (name) {
+ var $Symbol = core.Symbol || (core.Symbol = LIBRARY ? {} : global.Symbol || {});
+ if (name.charAt(0) != '_' && !(name in $Symbol)) defineProperty($Symbol, name, {
+ value: wksExt.f(name)
+ });
+};
+
+/***/ }),
+/* 135 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var getKeys = __w_pdfjs_require__(59);
+
+var gOPS = __w_pdfjs_require__(77);
+
+var pIE = __w_pdfjs_require__(78);
+
+module.exports = function (it) {
+ var result = getKeys(it);
+ var getSymbols = gOPS.f;
+
+ if (getSymbols) {
+ var symbols = getSymbols(it);
+ var isEnum = pIE.f;
+ var i = 0;
+ var key;
+
+ while (symbols.length > i) {
+ if (isEnum.call(it, key = symbols[i++])) result.push(key);
+ }
+ }
+
+ return result;
+};
+
+/***/ }),
+/* 136 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var toIObject = __w_pdfjs_require__(47);
+
+var gOPN = __w_pdfjs_require__(137).f;
+
+var toString = {}.toString;
+var windowNames = (typeof window === "undefined" ? "undefined" : _typeof(window)) == 'object' && window && Object.getOwnPropertyNames ? Object.getOwnPropertyNames(window) : [];
+
+var getWindowNames = function getWindowNames(it) {
+ try {
+ return gOPN(it);
+ } catch (e) {
+ return windowNames.slice();
+ }
+};
+
+module.exports.f = function getOwnPropertyNames(it) {
+ return windowNames && toString.call(it) == '[object Window]' ? getWindowNames(it) : gOPN(toIObject(it));
+};
+
+/***/ }),
+/* 137 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $keys = __w_pdfjs_require__(60);
+
+var hiddenKeys = __w_pdfjs_require__(62).concat('length', 'prototype');
+
+exports.f = Object.getOwnPropertyNames || function getOwnPropertyNames(O) {
+ return $keys(O, hiddenKeys);
+};
+
+/***/ }),
+/* 138 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(139);
+
+module.exports = __w_pdfjs_require__(13).String.padStart;
+
+/***/ }),
+/* 139 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var $pad = __w_pdfjs_require__(140);
+
+var userAgent = __w_pdfjs_require__(100);
+
+var WEBKIT_BUG = /Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(userAgent);
+$export($export.P + $export.F * WEBKIT_BUG, 'String', {
+ padStart: function padStart(maxLength) {
+ return $pad(this, maxLength, arguments.length > 1 ? arguments[1] : undefined, true);
+ }
+});
+
+/***/ }),
+/* 140 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toLength = __w_pdfjs_require__(32);
+
+var repeat = __w_pdfjs_require__(141);
+
+var defined = __w_pdfjs_require__(38);
+
+module.exports = function (that, maxLength, fillString, left) {
+ var S = String(defined(that));
+ var stringLength = S.length;
+ var fillStr = fillString === undefined ? ' ' : String(fillString);
+ var intMaxLength = toLength(maxLength);
+ if (intMaxLength <= stringLength || fillStr == '') return S;
+ var fillLen = intMaxLength - stringLength;
+ var stringFiller = repeat.call(fillStr, Math.ceil(fillLen / fillStr.length));
+ if (stringFiller.length > fillLen) stringFiller = stringFiller.slice(0, fillLen);
+ return left ? stringFiller + S : S + stringFiller;
+};
+
+/***/ }),
+/* 141 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var toInteger = __w_pdfjs_require__(33);
+
+var defined = __w_pdfjs_require__(38);
+
+module.exports = function repeat(count) {
+ var str = String(defined(this));
+ var res = '';
+ var n = toInteger(count);
+ if (n < 0 || n == Infinity) throw RangeError("Count can't be negative");
+
+ for (; n > 0; (n >>>= 1) && (str += str)) {
+ if (n & 1) res += str;
+ }
+
+ return res;
+};
+
+/***/ }),
+/* 142 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(143);
+
+module.exports = __w_pdfjs_require__(13).String.padEnd;
+
+/***/ }),
+/* 143 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var $pad = __w_pdfjs_require__(140);
+
+var userAgent = __w_pdfjs_require__(100);
+
+var WEBKIT_BUG = /Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(userAgent);
+$export($export.P + $export.F * WEBKIT_BUG, 'String', {
+ padEnd: function padEnd(maxLength) {
+ return $pad(this, maxLength, arguments.length > 1 ? arguments[1] : undefined, false);
+ }
+});
+
+/***/ }),
+/* 144 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+__w_pdfjs_require__(145);
+
+module.exports = __w_pdfjs_require__(13).Object.values;
+
+/***/ }),
+/* 145 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var $export = __w_pdfjs_require__(11);
+
+var $values = __w_pdfjs_require__(146)(false);
+
+$export($export.S, 'Object', {
+ values: function values(it) {
+ return $values(it);
+ }
+});
+
+/***/ }),
+/* 146 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+var DESCRIPTORS = __w_pdfjs_require__(19);
+
+var getKeys = __w_pdfjs_require__(59);
+
+var toIObject = __w_pdfjs_require__(47);
+
+var isEnum = __w_pdfjs_require__(78).f;
+
+module.exports = function (isEntries) {
+ return function (it) {
+ var O = toIObject(it);
+ var keys = getKeys(O);
+ var length = keys.length;
+ var i = 0;
+ var result = [];
+ var key;
+
+ while (length > i) {
+ key = keys[i++];
+
+ if (!DESCRIPTORS || isEnum.call(O, key)) {
+ result.push(isEntries ? [key, O[key]] : O[key]);
+ }
+ }
+
+ return result;
+ };
+};
+
+/***/ }),
+/* 147 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+{
+ var isReadableStreamSupported = false;
+
+ if (typeof ReadableStream !== 'undefined') {
+ try {
+ new ReadableStream({
+ start: function start(controller) {
+ controller.close();
+ }
+ });
+ isReadableStreamSupported = true;
+ } catch (e) {}
+ }
+
+ if (isReadableStreamSupported) {
+ exports.ReadableStream = ReadableStream;
+ } else {
+ exports.ReadableStream = __w_pdfjs_require__(148).ReadableStream;
+ }
+}
+
+/***/ }),
+/* 148 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof2(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof2 = function _typeof2(obj) { return typeof obj; }; } else { _typeof2 = function _typeof2(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof2(obj); }
+
+(function (e, a) {
+ for (var i in a) {
+ e[i] = a[i];
+ }
+})(exports, function (modules) {
+ var installedModules = {};
+
+ function __w_pdfjs_require__(moduleId) {
+ if (installedModules[moduleId]) return installedModules[moduleId].exports;
+ var module = installedModules[moduleId] = {
+ i: moduleId,
+ l: false,
+ exports: {}
+ };
+ modules[moduleId].call(module.exports, module, module.exports, __w_pdfjs_require__);
+ module.l = true;
+ return module.exports;
+ }
+
+ __w_pdfjs_require__.m = modules;
+ __w_pdfjs_require__.c = installedModules;
+
+ __w_pdfjs_require__.i = function (value) {
+ return value;
+ };
+
+ __w_pdfjs_require__.d = function (exports, name, getter) {
+ if (!__w_pdfjs_require__.o(exports, name)) {
+ Object.defineProperty(exports, name, {
+ configurable: false,
+ enumerable: true,
+ get: getter
+ });
+ }
+ };
+
+ __w_pdfjs_require__.n = function (module) {
+ var getter = module && module.__esModule ? function getDefault() {
+ return module['default'];
+ } : function getModuleExports() {
+ return module;
+ };
+
+ __w_pdfjs_require__.d(getter, 'a', getter);
+
+ return getter;
+ };
+
+ __w_pdfjs_require__.o = function (object, property) {
+ return Object.prototype.hasOwnProperty.call(object, property);
+ };
+
+ __w_pdfjs_require__.p = "";
+ return __w_pdfjs_require__(__w_pdfjs_require__.s = 7);
+}([function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var _typeof = typeof Symbol === "function" && _typeof2(Symbol.iterator) === "symbol" ? function (obj) {
+ return _typeof2(obj);
+ } : function (obj) {
+ return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : _typeof2(obj);
+ };
+
+ var _require = __w_pdfjs_require__(1),
+ assert = _require.assert;
+
+ function IsPropertyKey(argument) {
+ return typeof argument === 'string' || (typeof argument === 'undefined' ? 'undefined' : _typeof(argument)) === 'symbol';
+ }
+
+ exports.typeIsObject = function (x) {
+ return (typeof x === 'undefined' ? 'undefined' : _typeof(x)) === 'object' && x !== null || typeof x === 'function';
+ };
+
+ exports.createDataProperty = function (o, p, v) {
+ assert(exports.typeIsObject(o));
+ Object.defineProperty(o, p, {
+ value: v,
+ writable: true,
+ enumerable: true,
+ configurable: true
+ });
+ };
+
+ exports.createArrayFromList = function (elements) {
+ return elements.slice();
+ };
+
+ exports.ArrayBufferCopy = function (dest, destOffset, src, srcOffset, n) {
+ new Uint8Array(dest).set(new Uint8Array(src, srcOffset, n), destOffset);
+ };
+
+ exports.CreateIterResultObject = function (value, done) {
+ assert(typeof done === 'boolean');
+ var obj = {};
+ Object.defineProperty(obj, 'value', {
+ value: value,
+ enumerable: true,
+ writable: true,
+ configurable: true
+ });
+ Object.defineProperty(obj, 'done', {
+ value: done,
+ enumerable: true,
+ writable: true,
+ configurable: true
+ });
+ return obj;
+ };
+
+ exports.IsFiniteNonNegativeNumber = function (v) {
+ if (Number.isNaN(v)) {
+ return false;
+ }
+
+ if (v === Infinity) {
+ return false;
+ }
+
+ if (v < 0) {
+ return false;
+ }
+
+ return true;
+ };
+
+ function Call(F, V, args) {
+ if (typeof F !== 'function') {
+ throw new TypeError('Argument is not a function');
+ }
+
+ return Function.prototype.apply.call(F, V, args);
+ }
+
+ exports.InvokeOrNoop = function (O, P, args) {
+ assert(O !== undefined);
+ assert(IsPropertyKey(P));
+ assert(Array.isArray(args));
+ var method = O[P];
+
+ if (method === undefined) {
+ return undefined;
+ }
+
+ return Call(method, O, args);
+ };
+
+ exports.PromiseInvokeOrNoop = function (O, P, args) {
+ assert(O !== undefined);
+ assert(IsPropertyKey(P));
+ assert(Array.isArray(args));
+
+ try {
+ return Promise.resolve(exports.InvokeOrNoop(O, P, args));
+ } catch (returnValueE) {
+ return Promise.reject(returnValueE);
+ }
+ };
+
+ exports.PromiseInvokeOrPerformFallback = function (O, P, args, F, argsF) {
+ assert(O !== undefined);
+ assert(IsPropertyKey(P));
+ assert(Array.isArray(args));
+ assert(Array.isArray(argsF));
+ var method = void 0;
+
+ try {
+ method = O[P];
+ } catch (methodE) {
+ return Promise.reject(methodE);
+ }
+
+ if (method === undefined) {
+ return F.apply(null, argsF);
+ }
+
+ try {
+ return Promise.resolve(Call(method, O, args));
+ } catch (e) {
+ return Promise.reject(e);
+ }
+ };
+
+ exports.TransferArrayBuffer = function (O) {
+ return O.slice();
+ };
+
+ exports.ValidateAndNormalizeHighWaterMark = function (highWaterMark) {
+ highWaterMark = Number(highWaterMark);
+
+ if (Number.isNaN(highWaterMark) || highWaterMark < 0) {
+ throw new RangeError('highWaterMark property of a queuing strategy must be non-negative and non-NaN');
+ }
+
+ return highWaterMark;
+ };
+
+ exports.ValidateAndNormalizeQueuingStrategy = function (size, highWaterMark) {
+ if (size !== undefined && typeof size !== 'function') {
+ throw new TypeError('size property of a queuing strategy must be a function');
+ }
+
+ highWaterMark = exports.ValidateAndNormalizeHighWaterMark(highWaterMark);
+ return {
+ size: size,
+ highWaterMark: highWaterMark
+ };
+ };
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ function rethrowAssertionErrorRejection(e) {
+ if (e && e.constructor === AssertionError) {
+ setTimeout(function () {
+ throw e;
+ }, 0);
+ }
+ }
+
+ function AssertionError(message) {
+ this.name = 'AssertionError';
+ this.message = message || '';
+ this.stack = new Error().stack;
+ }
+
+ AssertionError.prototype = Object.create(Error.prototype);
+ AssertionError.prototype.constructor = AssertionError;
+
+ function assert(value, message) {
+ if (!value) {
+ throw new AssertionError(message);
+ }
+ }
+
+ module.exports = {
+ rethrowAssertionErrorRejection: rethrowAssertionErrorRejection,
+ AssertionError: AssertionError,
+ assert: assert
+ };
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var _createClass = function () {
+ function defineProperties(target, props) {
+ for (var i = 0; i < props.length; i++) {
+ var descriptor = props[i];
+ descriptor.enumerable = descriptor.enumerable || false;
+ descriptor.configurable = true;
+ if ("value" in descriptor) descriptor.writable = true;
+ Object.defineProperty(target, descriptor.key, descriptor);
+ }
+ }
+
+ return function (Constructor, protoProps, staticProps) {
+ if (protoProps) defineProperties(Constructor.prototype, protoProps);
+ if (staticProps) defineProperties(Constructor, staticProps);
+ return Constructor;
+ };
+ }();
+
+ function _classCallCheck(instance, Constructor) {
+ if (!(instance instanceof Constructor)) {
+ throw new TypeError("Cannot call a class as a function");
+ }
+ }
+
+ var _require = __w_pdfjs_require__(0),
+ InvokeOrNoop = _require.InvokeOrNoop,
+ PromiseInvokeOrNoop = _require.PromiseInvokeOrNoop,
+ ValidateAndNormalizeQueuingStrategy = _require.ValidateAndNormalizeQueuingStrategy,
+ typeIsObject = _require.typeIsObject;
+
+ var _require2 = __w_pdfjs_require__(1),
+ assert = _require2.assert,
+ rethrowAssertionErrorRejection = _require2.rethrowAssertionErrorRejection;
+
+ var _require3 = __w_pdfjs_require__(3),
+ DequeueValue = _require3.DequeueValue,
+ EnqueueValueWithSize = _require3.EnqueueValueWithSize,
+ PeekQueueValue = _require3.PeekQueueValue,
+ ResetQueue = _require3.ResetQueue;
+
+ var WritableStream = function () {
+ function WritableStream() {
+ var underlyingSink = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ size = _ref.size,
+ _ref$highWaterMark = _ref.highWaterMark,
+ highWaterMark = _ref$highWaterMark === undefined ? 1 : _ref$highWaterMark;
+
+ _classCallCheck(this, WritableStream);
+
+ this._state = 'writable';
+ this._storedError = undefined;
+ this._writer = undefined;
+ this._writableStreamController = undefined;
+ this._writeRequests = [];
+ this._inFlightWriteRequest = undefined;
+ this._closeRequest = undefined;
+ this._inFlightCloseRequest = undefined;
+ this._pendingAbortRequest = undefined;
+ this._backpressure = false;
+ var type = underlyingSink.type;
+
+ if (type !== undefined) {
+ throw new RangeError('Invalid type is specified');
+ }
+
+ this._writableStreamController = new WritableStreamDefaultController(this, underlyingSink, size, highWaterMark);
+
+ this._writableStreamController.__startSteps();
+ }
+
+ _createClass(WritableStream, [{
+ key: 'abort',
+ value: function abort(reason) {
+ if (IsWritableStream(this) === false) {
+ return Promise.reject(streamBrandCheckException('abort'));
+ }
+
+ if (IsWritableStreamLocked(this) === true) {
+ return Promise.reject(new TypeError('Cannot abort a stream that already has a writer'));
+ }
+
+ return WritableStreamAbort(this, reason);
+ }
+ }, {
+ key: 'getWriter',
+ value: function getWriter() {
+ if (IsWritableStream(this) === false) {
+ throw streamBrandCheckException('getWriter');
+ }
+
+ return AcquireWritableStreamDefaultWriter(this);
+ }
+ }, {
+ key: 'locked',
+ get: function get() {
+ if (IsWritableStream(this) === false) {
+ throw streamBrandCheckException('locked');
+ }
+
+ return IsWritableStreamLocked(this);
+ }
+ }]);
+
+ return WritableStream;
+ }();
+
+ module.exports = {
+ AcquireWritableStreamDefaultWriter: AcquireWritableStreamDefaultWriter,
+ IsWritableStream: IsWritableStream,
+ IsWritableStreamLocked: IsWritableStreamLocked,
+ WritableStream: WritableStream,
+ WritableStreamAbort: WritableStreamAbort,
+ WritableStreamDefaultControllerError: WritableStreamDefaultControllerError,
+ WritableStreamDefaultWriterCloseWithErrorPropagation: WritableStreamDefaultWriterCloseWithErrorPropagation,
+ WritableStreamDefaultWriterRelease: WritableStreamDefaultWriterRelease,
+ WritableStreamDefaultWriterWrite: WritableStreamDefaultWriterWrite,
+ WritableStreamCloseQueuedOrInFlight: WritableStreamCloseQueuedOrInFlight
+ };
+
+ function AcquireWritableStreamDefaultWriter(stream) {
+ return new WritableStreamDefaultWriter(stream);
+ }
+
+ function IsWritableStream(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_writableStreamController')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function IsWritableStreamLocked(stream) {
+ assert(IsWritableStream(stream) === true, 'IsWritableStreamLocked should only be used on known writable streams');
+
+ if (stream._writer === undefined) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function WritableStreamAbort(stream, reason) {
+ var state = stream._state;
+
+ if (state === 'closed') {
+ return Promise.resolve(undefined);
+ }
+
+ if (state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ var error = new TypeError('Requested to abort');
+
+ if (stream._pendingAbortRequest !== undefined) {
+ return Promise.reject(error);
+ }
+
+ assert(state === 'writable' || state === 'erroring', 'state must be writable or erroring');
+ var wasAlreadyErroring = false;
+
+ if (state === 'erroring') {
+ wasAlreadyErroring = true;
+ reason = undefined;
+ }
+
+ var promise = new Promise(function (resolve, reject) {
+ stream._pendingAbortRequest = {
+ _resolve: resolve,
+ _reject: reject,
+ _reason: reason,
+ _wasAlreadyErroring: wasAlreadyErroring
+ };
+ });
+
+ if (wasAlreadyErroring === false) {
+ WritableStreamStartErroring(stream, error);
+ }
+
+ return promise;
+ }
+
+ function WritableStreamAddWriteRequest(stream) {
+ assert(IsWritableStreamLocked(stream) === true);
+ assert(stream._state === 'writable');
+ var promise = new Promise(function (resolve, reject) {
+ var writeRequest = {
+ _resolve: resolve,
+ _reject: reject
+ };
+
+ stream._writeRequests.push(writeRequest);
+ });
+ return promise;
+ }
+
+ function WritableStreamDealWithRejection(stream, error) {
+ var state = stream._state;
+
+ if (state === 'writable') {
+ WritableStreamStartErroring(stream, error);
+ return;
+ }
+
+ assert(state === 'erroring');
+ WritableStreamFinishErroring(stream);
+ }
+
+ function WritableStreamStartErroring(stream, reason) {
+ assert(stream._storedError === undefined, 'stream._storedError === undefined');
+ assert(stream._state === 'writable', 'state must be writable');
+ var controller = stream._writableStreamController;
+ assert(controller !== undefined, 'controller must not be undefined');
+ stream._state = 'erroring';
+ stream._storedError = reason;
+ var writer = stream._writer;
+
+ if (writer !== undefined) {
+ WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason);
+ }
+
+ if (WritableStreamHasOperationMarkedInFlight(stream) === false && controller._started === true) {
+ WritableStreamFinishErroring(stream);
+ }
+ }
+
+ function WritableStreamFinishErroring(stream) {
+ assert(stream._state === 'erroring', 'stream._state === erroring');
+ assert(WritableStreamHasOperationMarkedInFlight(stream) === false, 'WritableStreamHasOperationMarkedInFlight(stream) === false');
+ stream._state = 'errored';
+
+ stream._writableStreamController.__errorSteps();
+
+ var storedError = stream._storedError;
+
+ for (var i = 0; i < stream._writeRequests.length; i++) {
+ var writeRequest = stream._writeRequests[i];
+
+ writeRequest._reject(storedError);
+ }
+
+ stream._writeRequests = [];
+
+ if (stream._pendingAbortRequest === undefined) {
+ WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ return;
+ }
+
+ var abortRequest = stream._pendingAbortRequest;
+ stream._pendingAbortRequest = undefined;
+
+ if (abortRequest._wasAlreadyErroring === true) {
+ abortRequest._reject(storedError);
+
+ WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ return;
+ }
+
+ var promise = stream._writableStreamController.__abortSteps(abortRequest._reason);
+
+ promise.then(function () {
+ abortRequest._resolve();
+
+ WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ }, function (reason) {
+ abortRequest._reject(reason);
+
+ WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);
+ });
+ }
+
+ function WritableStreamFinishInFlightWrite(stream) {
+ assert(stream._inFlightWriteRequest !== undefined);
+
+ stream._inFlightWriteRequest._resolve(undefined);
+
+ stream._inFlightWriteRequest = undefined;
+ }
+
+ function WritableStreamFinishInFlightWriteWithError(stream, error) {
+ assert(stream._inFlightWriteRequest !== undefined);
+
+ stream._inFlightWriteRequest._reject(error);
+
+ stream._inFlightWriteRequest = undefined;
+ assert(stream._state === 'writable' || stream._state === 'erroring');
+ WritableStreamDealWithRejection(stream, error);
+ }
+
+ function WritableStreamFinishInFlightClose(stream) {
+ assert(stream._inFlightCloseRequest !== undefined);
+
+ stream._inFlightCloseRequest._resolve(undefined);
+
+ stream._inFlightCloseRequest = undefined;
+ var state = stream._state;
+ assert(state === 'writable' || state === 'erroring');
+
+ if (state === 'erroring') {
+ stream._storedError = undefined;
+
+ if (stream._pendingAbortRequest !== undefined) {
+ stream._pendingAbortRequest._resolve();
+
+ stream._pendingAbortRequest = undefined;
+ }
+ }
+
+ stream._state = 'closed';
+ var writer = stream._writer;
+
+ if (writer !== undefined) {
+ defaultWriterClosedPromiseResolve(writer);
+ }
+
+ assert(stream._pendingAbortRequest === undefined, 'stream._pendingAbortRequest === undefined');
+ assert(stream._storedError === undefined, 'stream._storedError === undefined');
+ }
+
+ function WritableStreamFinishInFlightCloseWithError(stream, error) {
+ assert(stream._inFlightCloseRequest !== undefined);
+
+ stream._inFlightCloseRequest._reject(error);
+
+ stream._inFlightCloseRequest = undefined;
+ assert(stream._state === 'writable' || stream._state === 'erroring');
+
+ if (stream._pendingAbortRequest !== undefined) {
+ stream._pendingAbortRequest._reject(error);
+
+ stream._pendingAbortRequest = undefined;
+ }
+
+ WritableStreamDealWithRejection(stream, error);
+ }
+
+ function WritableStreamCloseQueuedOrInFlight(stream) {
+ if (stream._closeRequest === undefined && stream._inFlightCloseRequest === undefined) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function WritableStreamHasOperationMarkedInFlight(stream) {
+ if (stream._inFlightWriteRequest === undefined && stream._inFlightCloseRequest === undefined) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function WritableStreamMarkCloseRequestInFlight(stream) {
+ assert(stream._inFlightCloseRequest === undefined);
+ assert(stream._closeRequest !== undefined);
+ stream._inFlightCloseRequest = stream._closeRequest;
+ stream._closeRequest = undefined;
+ }
+
+ function WritableStreamMarkFirstWriteRequestInFlight(stream) {
+ assert(stream._inFlightWriteRequest === undefined, 'there must be no pending write request');
+ assert(stream._writeRequests.length !== 0, 'writeRequests must not be empty');
+ stream._inFlightWriteRequest = stream._writeRequests.shift();
+ }
+
+ function WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream) {
+ assert(stream._state === 'errored', '_stream_.[[state]] is `"errored"`');
+
+ if (stream._closeRequest !== undefined) {
+ assert(stream._inFlightCloseRequest === undefined);
+
+ stream._closeRequest._reject(stream._storedError);
+
+ stream._closeRequest = undefined;
+ }
+
+ var writer = stream._writer;
+
+ if (writer !== undefined) {
+ defaultWriterClosedPromiseReject(writer, stream._storedError);
+
+ writer._closedPromise["catch"](function () {});
+ }
+ }
+
+ function WritableStreamUpdateBackpressure(stream, backpressure) {
+ assert(stream._state === 'writable');
+ assert(WritableStreamCloseQueuedOrInFlight(stream) === false);
+ var writer = stream._writer;
+
+ if (writer !== undefined && backpressure !== stream._backpressure) {
+ if (backpressure === true) {
+ defaultWriterReadyPromiseReset(writer);
+ } else {
+ assert(backpressure === false);
+ defaultWriterReadyPromiseResolve(writer);
+ }
+ }
+
+ stream._backpressure = backpressure;
+ }
+
+ var WritableStreamDefaultWriter = function () {
+ function WritableStreamDefaultWriter(stream) {
+ _classCallCheck(this, WritableStreamDefaultWriter);
+
+ if (IsWritableStream(stream) === false) {
+ throw new TypeError('WritableStreamDefaultWriter can only be constructed with a WritableStream instance');
+ }
+
+ if (IsWritableStreamLocked(stream) === true) {
+ throw new TypeError('This stream has already been locked for exclusive writing by another writer');
+ }
+
+ this._ownerWritableStream = stream;
+ stream._writer = this;
+ var state = stream._state;
+
+ if (state === 'writable') {
+ if (WritableStreamCloseQueuedOrInFlight(stream) === false && stream._backpressure === true) {
+ defaultWriterReadyPromiseInitialize(this);
+ } else {
+ defaultWriterReadyPromiseInitializeAsResolved(this);
+ }
+
+ defaultWriterClosedPromiseInitialize(this);
+ } else if (state === 'erroring') {
+ defaultWriterReadyPromiseInitializeAsRejected(this, stream._storedError);
+
+ this._readyPromise["catch"](function () {});
+
+ defaultWriterClosedPromiseInitialize(this);
+ } else if (state === 'closed') {
+ defaultWriterReadyPromiseInitializeAsResolved(this);
+ defaultWriterClosedPromiseInitializeAsResolved(this);
+ } else {
+ assert(state === 'errored', 'state must be errored');
+ var storedError = stream._storedError;
+ defaultWriterReadyPromiseInitializeAsRejected(this, storedError);
+
+ this._readyPromise["catch"](function () {});
+
+ defaultWriterClosedPromiseInitializeAsRejected(this, storedError);
+
+ this._closedPromise["catch"](function () {});
+ }
+ }
+
+ _createClass(WritableStreamDefaultWriter, [{
+ key: 'abort',
+ value: function abort(reason) {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ return Promise.reject(defaultWriterBrandCheckException('abort'));
+ }
+
+ if (this._ownerWritableStream === undefined) {
+ return Promise.reject(defaultWriterLockException('abort'));
+ }
+
+ return WritableStreamDefaultWriterAbort(this, reason);
+ }
+ }, {
+ key: 'close',
+ value: function close() {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ return Promise.reject(defaultWriterBrandCheckException('close'));
+ }
+
+ var stream = this._ownerWritableStream;
+
+ if (stream === undefined) {
+ return Promise.reject(defaultWriterLockException('close'));
+ }
+
+ if (WritableStreamCloseQueuedOrInFlight(stream) === true) {
+ return Promise.reject(new TypeError('cannot close an already-closing stream'));
+ }
+
+ return WritableStreamDefaultWriterClose(this);
+ }
+ }, {
+ key: 'releaseLock',
+ value: function releaseLock() {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ throw defaultWriterBrandCheckException('releaseLock');
+ }
+
+ var stream = this._ownerWritableStream;
+
+ if (stream === undefined) {
+ return;
+ }
+
+ assert(stream._writer !== undefined);
+ WritableStreamDefaultWriterRelease(this);
+ }
+ }, {
+ key: 'write',
+ value: function write(chunk) {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ return Promise.reject(defaultWriterBrandCheckException('write'));
+ }
+
+ if (this._ownerWritableStream === undefined) {
+ return Promise.reject(defaultWriterLockException('write to'));
+ }
+
+ return WritableStreamDefaultWriterWrite(this, chunk);
+ }
+ }, {
+ key: 'closed',
+ get: function get() {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ return Promise.reject(defaultWriterBrandCheckException('closed'));
+ }
+
+ return this._closedPromise;
+ }
+ }, {
+ key: 'desiredSize',
+ get: function get() {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ throw defaultWriterBrandCheckException('desiredSize');
+ }
+
+ if (this._ownerWritableStream === undefined) {
+ throw defaultWriterLockException('desiredSize');
+ }
+
+ return WritableStreamDefaultWriterGetDesiredSize(this);
+ }
+ }, {
+ key: 'ready',
+ get: function get() {
+ if (IsWritableStreamDefaultWriter(this) === false) {
+ return Promise.reject(defaultWriterBrandCheckException('ready'));
+ }
+
+ return this._readyPromise;
+ }
+ }]);
+
+ return WritableStreamDefaultWriter;
+ }();
+
+ function IsWritableStreamDefaultWriter(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_ownerWritableStream')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function WritableStreamDefaultWriterAbort(writer, reason) {
+ var stream = writer._ownerWritableStream;
+ assert(stream !== undefined);
+ return WritableStreamAbort(stream, reason);
+ }
+
+ function WritableStreamDefaultWriterClose(writer) {
+ var stream = writer._ownerWritableStream;
+ assert(stream !== undefined);
+ var state = stream._state;
+
+ if (state === 'closed' || state === 'errored') {
+ return Promise.reject(new TypeError('The stream (in ' + state + ' state) is not in the writable state and cannot be closed'));
+ }
+
+ assert(state === 'writable' || state === 'erroring');
+ assert(WritableStreamCloseQueuedOrInFlight(stream) === false);
+ var promise = new Promise(function (resolve, reject) {
+ var closeRequest = {
+ _resolve: resolve,
+ _reject: reject
+ };
+ stream._closeRequest = closeRequest;
+ });
+
+ if (stream._backpressure === true && state === 'writable') {
+ defaultWriterReadyPromiseResolve(writer);
+ }
+
+ WritableStreamDefaultControllerClose(stream._writableStreamController);
+ return promise;
+ }
+
+ function WritableStreamDefaultWriterCloseWithErrorPropagation(writer) {
+ var stream = writer._ownerWritableStream;
+ assert(stream !== undefined);
+ var state = stream._state;
+
+ if (WritableStreamCloseQueuedOrInFlight(stream) === true || state === 'closed') {
+ return Promise.resolve();
+ }
+
+ if (state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ assert(state === 'writable' || state === 'erroring');
+ return WritableStreamDefaultWriterClose(writer);
+ }
+
+ function WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) {
+ if (writer._closedPromiseState === 'pending') {
+ defaultWriterClosedPromiseReject(writer, error);
+ } else {
+ defaultWriterClosedPromiseResetToRejected(writer, error);
+ }
+
+ writer._closedPromise["catch"](function () {});
+ }
+
+ function WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) {
+ if (writer._readyPromiseState === 'pending') {
+ defaultWriterReadyPromiseReject(writer, error);
+ } else {
+ defaultWriterReadyPromiseResetToRejected(writer, error);
+ }
+
+ writer._readyPromise["catch"](function () {});
+ }
+
+ function WritableStreamDefaultWriterGetDesiredSize(writer) {
+ var stream = writer._ownerWritableStream;
+ var state = stream._state;
+
+ if (state === 'errored' || state === 'erroring') {
+ return null;
+ }
+
+ if (state === 'closed') {
+ return 0;
+ }
+
+ return WritableStreamDefaultControllerGetDesiredSize(stream._writableStreamController);
+ }
+
+ function WritableStreamDefaultWriterRelease(writer) {
+ var stream = writer._ownerWritableStream;
+ assert(stream !== undefined);
+ assert(stream._writer === writer);
+ var releasedError = new TypeError('Writer was released and can no longer be used to monitor the stream\'s closedness');
+ WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError);
+ WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError);
+ stream._writer = undefined;
+ writer._ownerWritableStream = undefined;
+ }
+
+ function WritableStreamDefaultWriterWrite(writer, chunk) {
+ var stream = writer._ownerWritableStream;
+ assert(stream !== undefined);
+ var controller = stream._writableStreamController;
+ var chunkSize = WritableStreamDefaultControllerGetChunkSize(controller, chunk);
+
+ if (stream !== writer._ownerWritableStream) {
+ return Promise.reject(defaultWriterLockException('write to'));
+ }
+
+ var state = stream._state;
+
+ if (state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ if (WritableStreamCloseQueuedOrInFlight(stream) === true || state === 'closed') {
+ return Promise.reject(new TypeError('The stream is closing or closed and cannot be written to'));
+ }
+
+ if (state === 'erroring') {
+ return Promise.reject(stream._storedError);
+ }
+
+ assert(state === 'writable');
+ var promise = WritableStreamAddWriteRequest(stream);
+ WritableStreamDefaultControllerWrite(controller, chunk, chunkSize);
+ return promise;
+ }
+
+ var WritableStreamDefaultController = function () {
+ function WritableStreamDefaultController(stream, underlyingSink, size, highWaterMark) {
+ _classCallCheck(this, WritableStreamDefaultController);
+
+ if (IsWritableStream(stream) === false) {
+ throw new TypeError('WritableStreamDefaultController can only be constructed with a WritableStream instance');
+ }
+
+ if (stream._writableStreamController !== undefined) {
+ throw new TypeError('WritableStreamDefaultController instances can only be created by the WritableStream constructor');
+ }
+
+ this._controlledWritableStream = stream;
+ this._underlyingSink = underlyingSink;
+ this._queue = undefined;
+ this._queueTotalSize = undefined;
+ ResetQueue(this);
+ this._started = false;
+ var normalizedStrategy = ValidateAndNormalizeQueuingStrategy(size, highWaterMark);
+ this._strategySize = normalizedStrategy.size;
+ this._strategyHWM = normalizedStrategy.highWaterMark;
+ var backpressure = WritableStreamDefaultControllerGetBackpressure(this);
+ WritableStreamUpdateBackpressure(stream, backpressure);
+ }
+
+ _createClass(WritableStreamDefaultController, [{
+ key: 'error',
+ value: function error(e) {
+ if (IsWritableStreamDefaultController(this) === false) {
+ throw new TypeError('WritableStreamDefaultController.prototype.error can only be used on a WritableStreamDefaultController');
+ }
+
+ var state = this._controlledWritableStream._state;
+
+ if (state !== 'writable') {
+ return;
+ }
+
+ WritableStreamDefaultControllerError(this, e);
+ }
+ }, {
+ key: '__abortSteps',
+ value: function __abortSteps(reason) {
+ return PromiseInvokeOrNoop(this._underlyingSink, 'abort', [reason]);
+ }
+ }, {
+ key: '__errorSteps',
+ value: function __errorSteps() {
+ ResetQueue(this);
+ }
+ }, {
+ key: '__startSteps',
+ value: function __startSteps() {
+ var _this = this;
+
+ var startResult = InvokeOrNoop(this._underlyingSink, 'start', [this]);
+ var stream = this._controlledWritableStream;
+ Promise.resolve(startResult).then(function () {
+ assert(stream._state === 'writable' || stream._state === 'erroring');
+ _this._started = true;
+ WritableStreamDefaultControllerAdvanceQueueIfNeeded(_this);
+ }, function (r) {
+ assert(stream._state === 'writable' || stream._state === 'erroring');
+ _this._started = true;
+ WritableStreamDealWithRejection(stream, r);
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+ }]);
+
+ return WritableStreamDefaultController;
+ }();
+
+ function WritableStreamDefaultControllerClose(controller) {
+ EnqueueValueWithSize(controller, 'close', 0);
+ WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+ }
+
+ function WritableStreamDefaultControllerGetChunkSize(controller, chunk) {
+ var strategySize = controller._strategySize;
+
+ if (strategySize === undefined) {
+ return 1;
+ }
+
+ try {
+ return strategySize(chunk);
+ } catch (chunkSizeE) {
+ WritableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE);
+ return 1;
+ }
+ }
+
+ function WritableStreamDefaultControllerGetDesiredSize(controller) {
+ return controller._strategyHWM - controller._queueTotalSize;
+ }
+
+ function WritableStreamDefaultControllerWrite(controller, chunk, chunkSize) {
+ var writeRecord = {
+ chunk: chunk
+ };
+
+ try {
+ EnqueueValueWithSize(controller, writeRecord, chunkSize);
+ } catch (enqueueE) {
+ WritableStreamDefaultControllerErrorIfNeeded(controller, enqueueE);
+ return;
+ }
+
+ var stream = controller._controlledWritableStream;
+
+ if (WritableStreamCloseQueuedOrInFlight(stream) === false && stream._state === 'writable') {
+ var backpressure = WritableStreamDefaultControllerGetBackpressure(controller);
+ WritableStreamUpdateBackpressure(stream, backpressure);
+ }
+
+ WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+ }
+
+ function IsWritableStreamDefaultController(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_underlyingSink')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller) {
+ var stream = controller._controlledWritableStream;
+
+ if (controller._started === false) {
+ return;
+ }
+
+ if (stream._inFlightWriteRequest !== undefined) {
+ return;
+ }
+
+ var state = stream._state;
+
+ if (state === 'closed' || state === 'errored') {
+ return;
+ }
+
+ if (state === 'erroring') {
+ WritableStreamFinishErroring(stream);
+ return;
+ }
+
+ if (controller._queue.length === 0) {
+ return;
+ }
+
+ var writeRecord = PeekQueueValue(controller);
+
+ if (writeRecord === 'close') {
+ WritableStreamDefaultControllerProcessClose(controller);
+ } else {
+ WritableStreamDefaultControllerProcessWrite(controller, writeRecord.chunk);
+ }
+ }
+
+ function WritableStreamDefaultControllerErrorIfNeeded(controller, error) {
+ if (controller._controlledWritableStream._state === 'writable') {
+ WritableStreamDefaultControllerError(controller, error);
+ }
+ }
+
+ function WritableStreamDefaultControllerProcessClose(controller) {
+ var stream = controller._controlledWritableStream;
+ WritableStreamMarkCloseRequestInFlight(stream);
+ DequeueValue(controller);
+ assert(controller._queue.length === 0, 'queue must be empty once the final write record is dequeued');
+ var sinkClosePromise = PromiseInvokeOrNoop(controller._underlyingSink, 'close', []);
+ sinkClosePromise.then(function () {
+ WritableStreamFinishInFlightClose(stream);
+ }, function (reason) {
+ WritableStreamFinishInFlightCloseWithError(stream, reason);
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+
+ function WritableStreamDefaultControllerProcessWrite(controller, chunk) {
+ var stream = controller._controlledWritableStream;
+ WritableStreamMarkFirstWriteRequestInFlight(stream);
+ var sinkWritePromise = PromiseInvokeOrNoop(controller._underlyingSink, 'write', [chunk, controller]);
+ sinkWritePromise.then(function () {
+ WritableStreamFinishInFlightWrite(stream);
+ var state = stream._state;
+ assert(state === 'writable' || state === 'erroring');
+ DequeueValue(controller);
+
+ if (WritableStreamCloseQueuedOrInFlight(stream) === false && state === 'writable') {
+ var backpressure = WritableStreamDefaultControllerGetBackpressure(controller);
+ WritableStreamUpdateBackpressure(stream, backpressure);
+ }
+
+ WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);
+ }, function (reason) {
+ WritableStreamFinishInFlightWriteWithError(stream, reason);
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+
+ function WritableStreamDefaultControllerGetBackpressure(controller) {
+ var desiredSize = WritableStreamDefaultControllerGetDesiredSize(controller);
+ return desiredSize <= 0;
+ }
+
+ function WritableStreamDefaultControllerError(controller, error) {
+ var stream = controller._controlledWritableStream;
+ assert(stream._state === 'writable');
+ WritableStreamStartErroring(stream, error);
+ }
+
+ function streamBrandCheckException(name) {
+ return new TypeError('WritableStream.prototype.' + name + ' can only be used on a WritableStream');
+ }
+
+ function defaultWriterBrandCheckException(name) {
+ return new TypeError('WritableStreamDefaultWriter.prototype.' + name + ' can only be used on a WritableStreamDefaultWriter');
+ }
+
+ function defaultWriterLockException(name) {
+ return new TypeError('Cannot ' + name + ' a stream using a released writer');
+ }
+
+ function defaultWriterClosedPromiseInitialize(writer) {
+ writer._closedPromise = new Promise(function (resolve, reject) {
+ writer._closedPromise_resolve = resolve;
+ writer._closedPromise_reject = reject;
+ writer._closedPromiseState = 'pending';
+ });
+ }
+
+ function defaultWriterClosedPromiseInitializeAsRejected(writer, reason) {
+ writer._closedPromise = Promise.reject(reason);
+ writer._closedPromise_resolve = undefined;
+ writer._closedPromise_reject = undefined;
+ writer._closedPromiseState = 'rejected';
+ }
+
+ function defaultWriterClosedPromiseInitializeAsResolved(writer) {
+ writer._closedPromise = Promise.resolve(undefined);
+ writer._closedPromise_resolve = undefined;
+ writer._closedPromise_reject = undefined;
+ writer._closedPromiseState = 'resolved';
+ }
+
+ function defaultWriterClosedPromiseReject(writer, reason) {
+ assert(writer._closedPromise_resolve !== undefined, 'writer._closedPromise_resolve !== undefined');
+ assert(writer._closedPromise_reject !== undefined, 'writer._closedPromise_reject !== undefined');
+ assert(writer._closedPromiseState === 'pending', 'writer._closedPromiseState is pending');
+
+ writer._closedPromise_reject(reason);
+
+ writer._closedPromise_resolve = undefined;
+ writer._closedPromise_reject = undefined;
+ writer._closedPromiseState = 'rejected';
+ }
+
+ function defaultWriterClosedPromiseResetToRejected(writer, reason) {
+ assert(writer._closedPromise_resolve === undefined, 'writer._closedPromise_resolve === undefined');
+ assert(writer._closedPromise_reject === undefined, 'writer._closedPromise_reject === undefined');
+ assert(writer._closedPromiseState !== 'pending', 'writer._closedPromiseState is not pending');
+ writer._closedPromise = Promise.reject(reason);
+ writer._closedPromiseState = 'rejected';
+ }
+
+ function defaultWriterClosedPromiseResolve(writer) {
+ assert(writer._closedPromise_resolve !== undefined, 'writer._closedPromise_resolve !== undefined');
+ assert(writer._closedPromise_reject !== undefined, 'writer._closedPromise_reject !== undefined');
+ assert(writer._closedPromiseState === 'pending', 'writer._closedPromiseState is pending');
+
+ writer._closedPromise_resolve(undefined);
+
+ writer._closedPromise_resolve = undefined;
+ writer._closedPromise_reject = undefined;
+ writer._closedPromiseState = 'resolved';
+ }
+
+ function defaultWriterReadyPromiseInitialize(writer) {
+ writer._readyPromise = new Promise(function (resolve, reject) {
+ writer._readyPromise_resolve = resolve;
+ writer._readyPromise_reject = reject;
+ });
+ writer._readyPromiseState = 'pending';
+ }
+
+ function defaultWriterReadyPromiseInitializeAsRejected(writer, reason) {
+ writer._readyPromise = Promise.reject(reason);
+ writer._readyPromise_resolve = undefined;
+ writer._readyPromise_reject = undefined;
+ writer._readyPromiseState = 'rejected';
+ }
+
+ function defaultWriterReadyPromiseInitializeAsResolved(writer) {
+ writer._readyPromise = Promise.resolve(undefined);
+ writer._readyPromise_resolve = undefined;
+ writer._readyPromise_reject = undefined;
+ writer._readyPromiseState = 'fulfilled';
+ }
+
+ function defaultWriterReadyPromiseReject(writer, reason) {
+ assert(writer._readyPromise_resolve !== undefined, 'writer._readyPromise_resolve !== undefined');
+ assert(writer._readyPromise_reject !== undefined, 'writer._readyPromise_reject !== undefined');
+
+ writer._readyPromise_reject(reason);
+
+ writer._readyPromise_resolve = undefined;
+ writer._readyPromise_reject = undefined;
+ writer._readyPromiseState = 'rejected';
+ }
+
+ function defaultWriterReadyPromiseReset(writer) {
+ assert(writer._readyPromise_resolve === undefined, 'writer._readyPromise_resolve === undefined');
+ assert(writer._readyPromise_reject === undefined, 'writer._readyPromise_reject === undefined');
+ writer._readyPromise = new Promise(function (resolve, reject) {
+ writer._readyPromise_resolve = resolve;
+ writer._readyPromise_reject = reject;
+ });
+ writer._readyPromiseState = 'pending';
+ }
+
+ function defaultWriterReadyPromiseResetToRejected(writer, reason) {
+ assert(writer._readyPromise_resolve === undefined, 'writer._readyPromise_resolve === undefined');
+ assert(writer._readyPromise_reject === undefined, 'writer._readyPromise_reject === undefined');
+ writer._readyPromise = Promise.reject(reason);
+ writer._readyPromiseState = 'rejected';
+ }
+
+ function defaultWriterReadyPromiseResolve(writer) {
+ assert(writer._readyPromise_resolve !== undefined, 'writer._readyPromise_resolve !== undefined');
+ assert(writer._readyPromise_reject !== undefined, 'writer._readyPromise_reject !== undefined');
+
+ writer._readyPromise_resolve(undefined);
+
+ writer._readyPromise_resolve = undefined;
+ writer._readyPromise_reject = undefined;
+ writer._readyPromiseState = 'fulfilled';
+ }
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var _require = __w_pdfjs_require__(0),
+ IsFiniteNonNegativeNumber = _require.IsFiniteNonNegativeNumber;
+
+ var _require2 = __w_pdfjs_require__(1),
+ assert = _require2.assert;
+
+ exports.DequeueValue = function (container) {
+ assert('_queue' in container && '_queueTotalSize' in container, 'Spec-level failure: DequeueValue should only be used on containers with [[queue]] and [[queueTotalSize]].');
+ assert(container._queue.length > 0, 'Spec-level failure: should never dequeue from an empty queue.');
+
+ var pair = container._queue.shift();
+
+ container._queueTotalSize -= pair.size;
+
+ if (container._queueTotalSize < 0) {
+ container._queueTotalSize = 0;
+ }
+
+ return pair.value;
+ };
+
+ exports.EnqueueValueWithSize = function (container, value, size) {
+ assert('_queue' in container && '_queueTotalSize' in container, 'Spec-level failure: EnqueueValueWithSize should only be used on containers with [[queue]] and ' + '[[queueTotalSize]].');
+ size = Number(size);
+
+ if (!IsFiniteNonNegativeNumber(size)) {
+ throw new RangeError('Size must be a finite, non-NaN, non-negative number.');
+ }
+
+ container._queue.push({
+ value: value,
+ size: size
+ });
+
+ container._queueTotalSize += size;
+ };
+
+ exports.PeekQueueValue = function (container) {
+ assert('_queue' in container && '_queueTotalSize' in container, 'Spec-level failure: PeekQueueValue should only be used on containers with [[queue]] and [[queueTotalSize]].');
+ assert(container._queue.length > 0, 'Spec-level failure: should never peek at an empty queue.');
+ var pair = container._queue[0];
+ return pair.value;
+ };
+
+ exports.ResetQueue = function (container) {
+ assert('_queue' in container && '_queueTotalSize' in container, 'Spec-level failure: ResetQueue should only be used on containers with [[queue]] and [[queueTotalSize]].');
+ container._queue = [];
+ container._queueTotalSize = 0;
+ };
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var _createClass = function () {
+ function defineProperties(target, props) {
+ for (var i = 0; i < props.length; i++) {
+ var descriptor = props[i];
+ descriptor.enumerable = descriptor.enumerable || false;
+ descriptor.configurable = true;
+ if ("value" in descriptor) descriptor.writable = true;
+ Object.defineProperty(target, descriptor.key, descriptor);
+ }
+ }
+
+ return function (Constructor, protoProps, staticProps) {
+ if (protoProps) defineProperties(Constructor.prototype, protoProps);
+ if (staticProps) defineProperties(Constructor, staticProps);
+ return Constructor;
+ };
+ }();
+
+ function _classCallCheck(instance, Constructor) {
+ if (!(instance instanceof Constructor)) {
+ throw new TypeError("Cannot call a class as a function");
+ }
+ }
+
+ var _require = __w_pdfjs_require__(0),
+ ArrayBufferCopy = _require.ArrayBufferCopy,
+ CreateIterResultObject = _require.CreateIterResultObject,
+ IsFiniteNonNegativeNumber = _require.IsFiniteNonNegativeNumber,
+ InvokeOrNoop = _require.InvokeOrNoop,
+ PromiseInvokeOrNoop = _require.PromiseInvokeOrNoop,
+ TransferArrayBuffer = _require.TransferArrayBuffer,
+ ValidateAndNormalizeQueuingStrategy = _require.ValidateAndNormalizeQueuingStrategy,
+ ValidateAndNormalizeHighWaterMark = _require.ValidateAndNormalizeHighWaterMark;
+
+ var _require2 = __w_pdfjs_require__(0),
+ createArrayFromList = _require2.createArrayFromList,
+ createDataProperty = _require2.createDataProperty,
+ typeIsObject = _require2.typeIsObject;
+
+ var _require3 = __w_pdfjs_require__(1),
+ assert = _require3.assert,
+ rethrowAssertionErrorRejection = _require3.rethrowAssertionErrorRejection;
+
+ var _require4 = __w_pdfjs_require__(3),
+ DequeueValue = _require4.DequeueValue,
+ EnqueueValueWithSize = _require4.EnqueueValueWithSize,
+ ResetQueue = _require4.ResetQueue;
+
+ var _require5 = __w_pdfjs_require__(2),
+ AcquireWritableStreamDefaultWriter = _require5.AcquireWritableStreamDefaultWriter,
+ IsWritableStream = _require5.IsWritableStream,
+ IsWritableStreamLocked = _require5.IsWritableStreamLocked,
+ WritableStreamAbort = _require5.WritableStreamAbort,
+ WritableStreamDefaultWriterCloseWithErrorPropagation = _require5.WritableStreamDefaultWriterCloseWithErrorPropagation,
+ WritableStreamDefaultWriterRelease = _require5.WritableStreamDefaultWriterRelease,
+ WritableStreamDefaultWriterWrite = _require5.WritableStreamDefaultWriterWrite,
+ WritableStreamCloseQueuedOrInFlight = _require5.WritableStreamCloseQueuedOrInFlight;
+
+ var ReadableStream = function () {
+ function ReadableStream() {
+ var underlyingSource = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ size = _ref.size,
+ highWaterMark = _ref.highWaterMark;
+
+ _classCallCheck(this, ReadableStream);
+
+ this._state = 'readable';
+ this._reader = undefined;
+ this._storedError = undefined;
+ this._disturbed = false;
+ this._readableStreamController = undefined;
+ var type = underlyingSource.type;
+ var typeString = String(type);
+
+ if (typeString === 'bytes') {
+ if (highWaterMark === undefined) {
+ highWaterMark = 0;
+ }
+
+ this._readableStreamController = new ReadableByteStreamController(this, underlyingSource, highWaterMark);
+ } else if (type === undefined) {
+ if (highWaterMark === undefined) {
+ highWaterMark = 1;
+ }
+
+ this._readableStreamController = new ReadableStreamDefaultController(this, underlyingSource, size, highWaterMark);
+ } else {
+ throw new RangeError('Invalid type is specified');
+ }
+ }
+
+ _createClass(ReadableStream, [{
+ key: 'cancel',
+ value: function cancel(reason) {
+ if (IsReadableStream(this) === false) {
+ return Promise.reject(streamBrandCheckException('cancel'));
+ }
+
+ if (IsReadableStreamLocked(this) === true) {
+ return Promise.reject(new TypeError('Cannot cancel a stream that already has a reader'));
+ }
+
+ return ReadableStreamCancel(this, reason);
+ }
+ }, {
+ key: 'getReader',
+ value: function getReader() {
+ var _ref2 = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
+ mode = _ref2.mode;
+
+ if (IsReadableStream(this) === false) {
+ throw streamBrandCheckException('getReader');
+ }
+
+ if (mode === undefined) {
+ return AcquireReadableStreamDefaultReader(this);
+ }
+
+ mode = String(mode);
+
+ if (mode === 'byob') {
+ return AcquireReadableStreamBYOBReader(this);
+ }
+
+ throw new RangeError('Invalid mode is specified');
+ }
+ }, {
+ key: 'pipeThrough',
+ value: function pipeThrough(_ref3, options) {
+ var writable = _ref3.writable,
+ readable = _ref3.readable;
+ var promise = this.pipeTo(writable, options);
+ ifIsObjectAndHasAPromiseIsHandledInternalSlotSetPromiseIsHandledToTrue(promise);
+ return readable;
+ }
+ }, {
+ key: 'pipeTo',
+ value: function pipeTo(dest) {
+ var _this = this;
+
+ var _ref4 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ preventClose = _ref4.preventClose,
+ preventAbort = _ref4.preventAbort,
+ preventCancel = _ref4.preventCancel;
+
+ if (IsReadableStream(this) === false) {
+ return Promise.reject(streamBrandCheckException('pipeTo'));
+ }
+
+ if (IsWritableStream(dest) === false) {
+ return Promise.reject(new TypeError('ReadableStream.prototype.pipeTo\'s first argument must be a WritableStream'));
+ }
+
+ preventClose = Boolean(preventClose);
+ preventAbort = Boolean(preventAbort);
+ preventCancel = Boolean(preventCancel);
+
+ if (IsReadableStreamLocked(this) === true) {
+ return Promise.reject(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream'));
+ }
+
+ if (IsWritableStreamLocked(dest) === true) {
+ return Promise.reject(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream'));
+ }
+
+ var reader = AcquireReadableStreamDefaultReader(this);
+ var writer = AcquireWritableStreamDefaultWriter(dest);
+ var shuttingDown = false;
+ var currentWrite = Promise.resolve();
+ return new Promise(function (resolve, reject) {
+ function pipeLoop() {
+ currentWrite = Promise.resolve();
+
+ if (shuttingDown === true) {
+ return Promise.resolve();
+ }
+
+ return writer._readyPromise.then(function () {
+ return ReadableStreamDefaultReaderRead(reader).then(function (_ref5) {
+ var value = _ref5.value,
+ done = _ref5.done;
+
+ if (done === true) {
+ return;
+ }
+
+ currentWrite = WritableStreamDefaultWriterWrite(writer, value)["catch"](function () {});
+ });
+ }).then(pipeLoop);
+ }
+
+ isOrBecomesErrored(_this, reader._closedPromise, function (storedError) {
+ if (preventAbort === false) {
+ shutdownWithAction(function () {
+ return WritableStreamAbort(dest, storedError);
+ }, true, storedError);
+ } else {
+ shutdown(true, storedError);
+ }
+ });
+ isOrBecomesErrored(dest, writer._closedPromise, function (storedError) {
+ if (preventCancel === false) {
+ shutdownWithAction(function () {
+ return ReadableStreamCancel(_this, storedError);
+ }, true, storedError);
+ } else {
+ shutdown(true, storedError);
+ }
+ });
+ isOrBecomesClosed(_this, reader._closedPromise, function () {
+ if (preventClose === false) {
+ shutdownWithAction(function () {
+ return WritableStreamDefaultWriterCloseWithErrorPropagation(writer);
+ });
+ } else {
+ shutdown();
+ }
+ });
+
+ if (WritableStreamCloseQueuedOrInFlight(dest) === true || dest._state === 'closed') {
+ var destClosed = new TypeError('the destination writable stream closed before all data could be piped to it');
+
+ if (preventCancel === false) {
+ shutdownWithAction(function () {
+ return ReadableStreamCancel(_this, destClosed);
+ }, true, destClosed);
+ } else {
+ shutdown(true, destClosed);
+ }
+ }
+
+ pipeLoop()["catch"](function (err) {
+ currentWrite = Promise.resolve();
+ rethrowAssertionErrorRejection(err);
+ });
+
+ function waitForWritesToFinish() {
+ var oldCurrentWrite = currentWrite;
+ return currentWrite.then(function () {
+ return oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : undefined;
+ });
+ }
+
+ function isOrBecomesErrored(stream, promise, action) {
+ if (stream._state === 'errored') {
+ action(stream._storedError);
+ } else {
+ promise["catch"](action)["catch"](rethrowAssertionErrorRejection);
+ }
+ }
+
+ function isOrBecomesClosed(stream, promise, action) {
+ if (stream._state === 'closed') {
+ action();
+ } else {
+ promise.then(action)["catch"](rethrowAssertionErrorRejection);
+ }
+ }
+
+ function shutdownWithAction(action, originalIsError, originalError) {
+ if (shuttingDown === true) {
+ return;
+ }
+
+ shuttingDown = true;
+
+ if (dest._state === 'writable' && WritableStreamCloseQueuedOrInFlight(dest) === false) {
+ waitForWritesToFinish().then(doTheRest);
+ } else {
+ doTheRest();
+ }
+
+ function doTheRest() {
+ action().then(function () {
+ return finalize(originalIsError, originalError);
+ }, function (newError) {
+ return finalize(true, newError);
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+ }
+
+ function shutdown(isError, error) {
+ if (shuttingDown === true) {
+ return;
+ }
+
+ shuttingDown = true;
+
+ if (dest._state === 'writable' && WritableStreamCloseQueuedOrInFlight(dest) === false) {
+ waitForWritesToFinish().then(function () {
+ return finalize(isError, error);
+ })["catch"](rethrowAssertionErrorRejection);
+ } else {
+ finalize(isError, error);
+ }
+ }
+
+ function finalize(isError, error) {
+ WritableStreamDefaultWriterRelease(writer);
+ ReadableStreamReaderGenericRelease(reader);
+
+ if (isError) {
+ reject(error);
+ } else {
+ resolve(undefined);
+ }
+ }
+ });
+ }
+ }, {
+ key: 'tee',
+ value: function tee() {
+ if (IsReadableStream(this) === false) {
+ throw streamBrandCheckException('tee');
+ }
+
+ var branches = ReadableStreamTee(this, false);
+ return createArrayFromList(branches);
+ }
+ }, {
+ key: 'locked',
+ get: function get() {
+ if (IsReadableStream(this) === false) {
+ throw streamBrandCheckException('locked');
+ }
+
+ return IsReadableStreamLocked(this);
+ }
+ }]);
+
+ return ReadableStream;
+ }();
+
+ module.exports = {
+ ReadableStream: ReadableStream,
+ IsReadableStreamDisturbed: IsReadableStreamDisturbed,
+ ReadableStreamDefaultControllerClose: ReadableStreamDefaultControllerClose,
+ ReadableStreamDefaultControllerEnqueue: ReadableStreamDefaultControllerEnqueue,
+ ReadableStreamDefaultControllerError: ReadableStreamDefaultControllerError,
+ ReadableStreamDefaultControllerGetDesiredSize: ReadableStreamDefaultControllerGetDesiredSize
+ };
+
+ function AcquireReadableStreamBYOBReader(stream) {
+ return new ReadableStreamBYOBReader(stream);
+ }
+
+ function AcquireReadableStreamDefaultReader(stream) {
+ return new ReadableStreamDefaultReader(stream);
+ }
+
+ function IsReadableStream(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_readableStreamController')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function IsReadableStreamDisturbed(stream) {
+ assert(IsReadableStream(stream) === true, 'IsReadableStreamDisturbed should only be used on known readable streams');
+ return stream._disturbed;
+ }
+
+ function IsReadableStreamLocked(stream) {
+ assert(IsReadableStream(stream) === true, 'IsReadableStreamLocked should only be used on known readable streams');
+
+ if (stream._reader === undefined) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function ReadableStreamTee(stream, cloneForBranch2) {
+ assert(IsReadableStream(stream) === true);
+ assert(typeof cloneForBranch2 === 'boolean');
+ var reader = AcquireReadableStreamDefaultReader(stream);
+ var teeState = {
+ closedOrErrored: false,
+ canceled1: false,
+ canceled2: false,
+ reason1: undefined,
+ reason2: undefined
+ };
+ teeState.promise = new Promise(function (resolve) {
+ teeState._resolve = resolve;
+ });
+ var pull = create_ReadableStreamTeePullFunction();
+ pull._reader = reader;
+ pull._teeState = teeState;
+ pull._cloneForBranch2 = cloneForBranch2;
+ var cancel1 = create_ReadableStreamTeeBranch1CancelFunction();
+ cancel1._stream = stream;
+ cancel1._teeState = teeState;
+ var cancel2 = create_ReadableStreamTeeBranch2CancelFunction();
+ cancel2._stream = stream;
+ cancel2._teeState = teeState;
+ var underlyingSource1 = Object.create(Object.prototype);
+ createDataProperty(underlyingSource1, 'pull', pull);
+ createDataProperty(underlyingSource1, 'cancel', cancel1);
+ var branch1Stream = new ReadableStream(underlyingSource1);
+ var underlyingSource2 = Object.create(Object.prototype);
+ createDataProperty(underlyingSource2, 'pull', pull);
+ createDataProperty(underlyingSource2, 'cancel', cancel2);
+ var branch2Stream = new ReadableStream(underlyingSource2);
+ pull._branch1 = branch1Stream._readableStreamController;
+ pull._branch2 = branch2Stream._readableStreamController;
+
+ reader._closedPromise["catch"](function (r) {
+ if (teeState.closedOrErrored === true) {
+ return;
+ }
+
+ ReadableStreamDefaultControllerError(pull._branch1, r);
+ ReadableStreamDefaultControllerError(pull._branch2, r);
+ teeState.closedOrErrored = true;
+ });
+
+ return [branch1Stream, branch2Stream];
+ }
+
+ function create_ReadableStreamTeePullFunction() {
+ function f() {
+ var reader = f._reader,
+ branch1 = f._branch1,
+ branch2 = f._branch2,
+ teeState = f._teeState;
+ return ReadableStreamDefaultReaderRead(reader).then(function (result) {
+ assert(typeIsObject(result));
+ var value = result.value;
+ var done = result.done;
+ assert(typeof done === 'boolean');
+
+ if (done === true && teeState.closedOrErrored === false) {
+ if (teeState.canceled1 === false) {
+ ReadableStreamDefaultControllerClose(branch1);
+ }
+
+ if (teeState.canceled2 === false) {
+ ReadableStreamDefaultControllerClose(branch2);
+ }
+
+ teeState.closedOrErrored = true;
+ }
+
+ if (teeState.closedOrErrored === true) {
+ return;
+ }
+
+ var value1 = value;
+ var value2 = value;
+
+ if (teeState.canceled1 === false) {
+ ReadableStreamDefaultControllerEnqueue(branch1, value1);
+ }
+
+ if (teeState.canceled2 === false) {
+ ReadableStreamDefaultControllerEnqueue(branch2, value2);
+ }
+ });
+ }
+
+ return f;
+ }
+
+ function create_ReadableStreamTeeBranch1CancelFunction() {
+ function f(reason) {
+ var stream = f._stream,
+ teeState = f._teeState;
+ teeState.canceled1 = true;
+ teeState.reason1 = reason;
+
+ if (teeState.canceled2 === true) {
+ var compositeReason = createArrayFromList([teeState.reason1, teeState.reason2]);
+ var cancelResult = ReadableStreamCancel(stream, compositeReason);
+
+ teeState._resolve(cancelResult);
+ }
+
+ return teeState.promise;
+ }
+
+ return f;
+ }
+
+ function create_ReadableStreamTeeBranch2CancelFunction() {
+ function f(reason) {
+ var stream = f._stream,
+ teeState = f._teeState;
+ teeState.canceled2 = true;
+ teeState.reason2 = reason;
+
+ if (teeState.canceled1 === true) {
+ var compositeReason = createArrayFromList([teeState.reason1, teeState.reason2]);
+ var cancelResult = ReadableStreamCancel(stream, compositeReason);
+
+ teeState._resolve(cancelResult);
+ }
+
+ return teeState.promise;
+ }
+
+ return f;
+ }
+
+ function ReadableStreamAddReadIntoRequest(stream) {
+ assert(IsReadableStreamBYOBReader(stream._reader) === true);
+ assert(stream._state === 'readable' || stream._state === 'closed');
+ var promise = new Promise(function (resolve, reject) {
+ var readIntoRequest = {
+ _resolve: resolve,
+ _reject: reject
+ };
+
+ stream._reader._readIntoRequests.push(readIntoRequest);
+ });
+ return promise;
+ }
+
+ function ReadableStreamAddReadRequest(stream) {
+ assert(IsReadableStreamDefaultReader(stream._reader) === true);
+ assert(stream._state === 'readable');
+ var promise = new Promise(function (resolve, reject) {
+ var readRequest = {
+ _resolve: resolve,
+ _reject: reject
+ };
+
+ stream._reader._readRequests.push(readRequest);
+ });
+ return promise;
+ }
+
+ function ReadableStreamCancel(stream, reason) {
+ stream._disturbed = true;
+
+ if (stream._state === 'closed') {
+ return Promise.resolve(undefined);
+ }
+
+ if (stream._state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ ReadableStreamClose(stream);
+
+ var sourceCancelPromise = stream._readableStreamController.__cancelSteps(reason);
+
+ return sourceCancelPromise.then(function () {
+ return undefined;
+ });
+ }
+
+ function ReadableStreamClose(stream) {
+ assert(stream._state === 'readable');
+ stream._state = 'closed';
+ var reader = stream._reader;
+
+ if (reader === undefined) {
+ return undefined;
+ }
+
+ if (IsReadableStreamDefaultReader(reader) === true) {
+ for (var i = 0; i < reader._readRequests.length; i++) {
+ var _resolve = reader._readRequests[i]._resolve;
+
+ _resolve(CreateIterResultObject(undefined, true));
+ }
+
+ reader._readRequests = [];
+ }
+
+ defaultReaderClosedPromiseResolve(reader);
+ return undefined;
+ }
+
+ function ReadableStreamError(stream, e) {
+ assert(IsReadableStream(stream) === true, 'stream must be ReadableStream');
+ assert(stream._state === 'readable', 'state must be readable');
+ stream._state = 'errored';
+ stream._storedError = e;
+ var reader = stream._reader;
+
+ if (reader === undefined) {
+ return undefined;
+ }
+
+ if (IsReadableStreamDefaultReader(reader) === true) {
+ for (var i = 0; i < reader._readRequests.length; i++) {
+ var readRequest = reader._readRequests[i];
+
+ readRequest._reject(e);
+ }
+
+ reader._readRequests = [];
+ } else {
+ assert(IsReadableStreamBYOBReader(reader), 'reader must be ReadableStreamBYOBReader');
+
+ for (var _i = 0; _i < reader._readIntoRequests.length; _i++) {
+ var readIntoRequest = reader._readIntoRequests[_i];
+
+ readIntoRequest._reject(e);
+ }
+
+ reader._readIntoRequests = [];
+ }
+
+ defaultReaderClosedPromiseReject(reader, e);
+
+ reader._closedPromise["catch"](function () {});
+ }
+
+ function ReadableStreamFulfillReadIntoRequest(stream, chunk, done) {
+ var reader = stream._reader;
+ assert(reader._readIntoRequests.length > 0);
+
+ var readIntoRequest = reader._readIntoRequests.shift();
+
+ readIntoRequest._resolve(CreateIterResultObject(chunk, done));
+ }
+
+ function ReadableStreamFulfillReadRequest(stream, chunk, done) {
+ var reader = stream._reader;
+ assert(reader._readRequests.length > 0);
+
+ var readRequest = reader._readRequests.shift();
+
+ readRequest._resolve(CreateIterResultObject(chunk, done));
+ }
+
+ function ReadableStreamGetNumReadIntoRequests(stream) {
+ return stream._reader._readIntoRequests.length;
+ }
+
+ function ReadableStreamGetNumReadRequests(stream) {
+ return stream._reader._readRequests.length;
+ }
+
+ function ReadableStreamHasBYOBReader(stream) {
+ var reader = stream._reader;
+
+ if (reader === undefined) {
+ return false;
+ }
+
+ if (IsReadableStreamBYOBReader(reader) === false) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function ReadableStreamHasDefaultReader(stream) {
+ var reader = stream._reader;
+
+ if (reader === undefined) {
+ return false;
+ }
+
+ if (IsReadableStreamDefaultReader(reader) === false) {
+ return false;
+ }
+
+ return true;
+ }
+
+ var ReadableStreamDefaultReader = function () {
+ function ReadableStreamDefaultReader(stream) {
+ _classCallCheck(this, ReadableStreamDefaultReader);
+
+ if (IsReadableStream(stream) === false) {
+ throw new TypeError('ReadableStreamDefaultReader can only be constructed with a ReadableStream instance');
+ }
+
+ if (IsReadableStreamLocked(stream) === true) {
+ throw new TypeError('This stream has already been locked for exclusive reading by another reader');
+ }
+
+ ReadableStreamReaderGenericInitialize(this, stream);
+ this._readRequests = [];
+ }
+
+ _createClass(ReadableStreamDefaultReader, [{
+ key: 'cancel',
+ value: function cancel(reason) {
+ if (IsReadableStreamDefaultReader(this) === false) {
+ return Promise.reject(defaultReaderBrandCheckException('cancel'));
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return Promise.reject(readerLockException('cancel'));
+ }
+
+ return ReadableStreamReaderGenericCancel(this, reason);
+ }
+ }, {
+ key: 'read',
+ value: function read() {
+ if (IsReadableStreamDefaultReader(this) === false) {
+ return Promise.reject(defaultReaderBrandCheckException('read'));
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return Promise.reject(readerLockException('read from'));
+ }
+
+ return ReadableStreamDefaultReaderRead(this);
+ }
+ }, {
+ key: 'releaseLock',
+ value: function releaseLock() {
+ if (IsReadableStreamDefaultReader(this) === false) {
+ throw defaultReaderBrandCheckException('releaseLock');
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return;
+ }
+
+ if (this._readRequests.length > 0) {
+ throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');
+ }
+
+ ReadableStreamReaderGenericRelease(this);
+ }
+ }, {
+ key: 'closed',
+ get: function get() {
+ if (IsReadableStreamDefaultReader(this) === false) {
+ return Promise.reject(defaultReaderBrandCheckException('closed'));
+ }
+
+ return this._closedPromise;
+ }
+ }]);
+
+ return ReadableStreamDefaultReader;
+ }();
+
+ var ReadableStreamBYOBReader = function () {
+ function ReadableStreamBYOBReader(stream) {
+ _classCallCheck(this, ReadableStreamBYOBReader);
+
+ if (!IsReadableStream(stream)) {
+ throw new TypeError('ReadableStreamBYOBReader can only be constructed with a ReadableStream instance given a ' + 'byte source');
+ }
+
+ if (IsReadableByteStreamController(stream._readableStreamController) === false) {
+ throw new TypeError('Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte ' + 'source');
+ }
+
+ if (IsReadableStreamLocked(stream)) {
+ throw new TypeError('This stream has already been locked for exclusive reading by another reader');
+ }
+
+ ReadableStreamReaderGenericInitialize(this, stream);
+ this._readIntoRequests = [];
+ }
+
+ _createClass(ReadableStreamBYOBReader, [{
+ key: 'cancel',
+ value: function cancel(reason) {
+ if (!IsReadableStreamBYOBReader(this)) {
+ return Promise.reject(byobReaderBrandCheckException('cancel'));
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return Promise.reject(readerLockException('cancel'));
+ }
+
+ return ReadableStreamReaderGenericCancel(this, reason);
+ }
+ }, {
+ key: 'read',
+ value: function read(view) {
+ if (!IsReadableStreamBYOBReader(this)) {
+ return Promise.reject(byobReaderBrandCheckException('read'));
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return Promise.reject(readerLockException('read from'));
+ }
+
+ if (!ArrayBuffer.isView(view)) {
+ return Promise.reject(new TypeError('view must be an array buffer view'));
+ }
+
+ if (view.byteLength === 0) {
+ return Promise.reject(new TypeError('view must have non-zero byteLength'));
+ }
+
+ return ReadableStreamBYOBReaderRead(this, view);
+ }
+ }, {
+ key: 'releaseLock',
+ value: function releaseLock() {
+ if (!IsReadableStreamBYOBReader(this)) {
+ throw byobReaderBrandCheckException('releaseLock');
+ }
+
+ if (this._ownerReadableStream === undefined) {
+ return;
+ }
+
+ if (this._readIntoRequests.length > 0) {
+ throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');
+ }
+
+ ReadableStreamReaderGenericRelease(this);
+ }
+ }, {
+ key: 'closed',
+ get: function get() {
+ if (!IsReadableStreamBYOBReader(this)) {
+ return Promise.reject(byobReaderBrandCheckException('closed'));
+ }
+
+ return this._closedPromise;
+ }
+ }]);
+
+ return ReadableStreamBYOBReader;
+ }();
+
+ function IsReadableStreamBYOBReader(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_readIntoRequests')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function IsReadableStreamDefaultReader(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_readRequests')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function ReadableStreamReaderGenericInitialize(reader, stream) {
+ reader._ownerReadableStream = stream;
+ stream._reader = reader;
+
+ if (stream._state === 'readable') {
+ defaultReaderClosedPromiseInitialize(reader);
+ } else if (stream._state === 'closed') {
+ defaultReaderClosedPromiseInitializeAsResolved(reader);
+ } else {
+ assert(stream._state === 'errored', 'state must be errored');
+ defaultReaderClosedPromiseInitializeAsRejected(reader, stream._storedError);
+
+ reader._closedPromise["catch"](function () {});
+ }
+ }
+
+ function ReadableStreamReaderGenericCancel(reader, reason) {
+ var stream = reader._ownerReadableStream;
+ assert(stream !== undefined);
+ return ReadableStreamCancel(stream, reason);
+ }
+
+ function ReadableStreamReaderGenericRelease(reader) {
+ assert(reader._ownerReadableStream !== undefined);
+ assert(reader._ownerReadableStream._reader === reader);
+
+ if (reader._ownerReadableStream._state === 'readable') {
+ defaultReaderClosedPromiseReject(reader, new TypeError('Reader was released and can no longer be used to monitor the stream\'s closedness'));
+ } else {
+ defaultReaderClosedPromiseResetToRejected(reader, new TypeError('Reader was released and can no longer be used to monitor the stream\'s closedness'));
+ }
+
+ reader._closedPromise["catch"](function () {});
+
+ reader._ownerReadableStream._reader = undefined;
+ reader._ownerReadableStream = undefined;
+ }
+
+ function ReadableStreamBYOBReaderRead(reader, view) {
+ var stream = reader._ownerReadableStream;
+ assert(stream !== undefined);
+ stream._disturbed = true;
+
+ if (stream._state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ return ReadableByteStreamControllerPullInto(stream._readableStreamController, view);
+ }
+
+ function ReadableStreamDefaultReaderRead(reader) {
+ var stream = reader._ownerReadableStream;
+ assert(stream !== undefined);
+ stream._disturbed = true;
+
+ if (stream._state === 'closed') {
+ return Promise.resolve(CreateIterResultObject(undefined, true));
+ }
+
+ if (stream._state === 'errored') {
+ return Promise.reject(stream._storedError);
+ }
+
+ assert(stream._state === 'readable');
+ return stream._readableStreamController.__pullSteps();
+ }
+
+ var ReadableStreamDefaultController = function () {
+ function ReadableStreamDefaultController(stream, underlyingSource, size, highWaterMark) {
+ _classCallCheck(this, ReadableStreamDefaultController);
+
+ if (IsReadableStream(stream) === false) {
+ throw new TypeError('ReadableStreamDefaultController can only be constructed with a ReadableStream instance');
+ }
+
+ if (stream._readableStreamController !== undefined) {
+ throw new TypeError('ReadableStreamDefaultController instances can only be created by the ReadableStream constructor');
+ }
+
+ this._controlledReadableStream = stream;
+ this._underlyingSource = underlyingSource;
+ this._queue = undefined;
+ this._queueTotalSize = undefined;
+ ResetQueue(this);
+ this._started = false;
+ this._closeRequested = false;
+ this._pullAgain = false;
+ this._pulling = false;
+ var normalizedStrategy = ValidateAndNormalizeQueuingStrategy(size, highWaterMark);
+ this._strategySize = normalizedStrategy.size;
+ this._strategyHWM = normalizedStrategy.highWaterMark;
+ var controller = this;
+ var startResult = InvokeOrNoop(underlyingSource, 'start', [this]);
+ Promise.resolve(startResult).then(function () {
+ controller._started = true;
+ assert(controller._pulling === false);
+ assert(controller._pullAgain === false);
+ ReadableStreamDefaultControllerCallPullIfNeeded(controller);
+ }, function (r) {
+ ReadableStreamDefaultControllerErrorIfNeeded(controller, r);
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+
+ _createClass(ReadableStreamDefaultController, [{
+ key: 'close',
+ value: function close() {
+ if (IsReadableStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('close');
+ }
+
+ if (this._closeRequested === true) {
+ throw new TypeError('The stream has already been closed; do not close it again!');
+ }
+
+ var state = this._controlledReadableStream._state;
+
+ if (state !== 'readable') {
+ throw new TypeError('The stream (in ' + state + ' state) is not in the readable state and cannot be closed');
+ }
+
+ ReadableStreamDefaultControllerClose(this);
+ }
+ }, {
+ key: 'enqueue',
+ value: function enqueue(chunk) {
+ if (IsReadableStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('enqueue');
+ }
+
+ if (this._closeRequested === true) {
+ throw new TypeError('stream is closed or draining');
+ }
+
+ var state = this._controlledReadableStream._state;
+
+ if (state !== 'readable') {
+ throw new TypeError('The stream (in ' + state + ' state) is not in the readable state and cannot be enqueued to');
+ }
+
+ return ReadableStreamDefaultControllerEnqueue(this, chunk);
+ }
+ }, {
+ key: 'error',
+ value: function error(e) {
+ if (IsReadableStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('error');
+ }
+
+ var stream = this._controlledReadableStream;
+
+ if (stream._state !== 'readable') {
+ throw new TypeError('The stream is ' + stream._state + ' and so cannot be errored');
+ }
+
+ ReadableStreamDefaultControllerError(this, e);
+ }
+ }, {
+ key: '__cancelSteps',
+ value: function __cancelSteps(reason) {
+ ResetQueue(this);
+ return PromiseInvokeOrNoop(this._underlyingSource, 'cancel', [reason]);
+ }
+ }, {
+ key: '__pullSteps',
+ value: function __pullSteps() {
+ var stream = this._controlledReadableStream;
+
+ if (this._queue.length > 0) {
+ var chunk = DequeueValue(this);
+
+ if (this._closeRequested === true && this._queue.length === 0) {
+ ReadableStreamClose(stream);
+ } else {
+ ReadableStreamDefaultControllerCallPullIfNeeded(this);
+ }
+
+ return Promise.resolve(CreateIterResultObject(chunk, false));
+ }
+
+ var pendingPromise = ReadableStreamAddReadRequest(stream);
+ ReadableStreamDefaultControllerCallPullIfNeeded(this);
+ return pendingPromise;
+ }
+ }, {
+ key: 'desiredSize',
+ get: function get() {
+ if (IsReadableStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('desiredSize');
+ }
+
+ return ReadableStreamDefaultControllerGetDesiredSize(this);
+ }
+ }]);
+
+ return ReadableStreamDefaultController;
+ }();
+
+ function IsReadableStreamDefaultController(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_underlyingSource')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function ReadableStreamDefaultControllerCallPullIfNeeded(controller) {
+ var shouldPull = ReadableStreamDefaultControllerShouldCallPull(controller);
+
+ if (shouldPull === false) {
+ return undefined;
+ }
+
+ if (controller._pulling === true) {
+ controller._pullAgain = true;
+ return undefined;
+ }
+
+ assert(controller._pullAgain === false);
+ controller._pulling = true;
+ var pullPromise = PromiseInvokeOrNoop(controller._underlyingSource, 'pull', [controller]);
+ pullPromise.then(function () {
+ controller._pulling = false;
+
+ if (controller._pullAgain === true) {
+ controller._pullAgain = false;
+ return ReadableStreamDefaultControllerCallPullIfNeeded(controller);
+ }
+
+ return undefined;
+ }, function (e) {
+ ReadableStreamDefaultControllerErrorIfNeeded(controller, e);
+ })["catch"](rethrowAssertionErrorRejection);
+ return undefined;
+ }
+
+ function ReadableStreamDefaultControllerShouldCallPull(controller) {
+ var stream = controller._controlledReadableStream;
+
+ if (stream._state === 'closed' || stream._state === 'errored') {
+ return false;
+ }
+
+ if (controller._closeRequested === true) {
+ return false;
+ }
+
+ if (controller._started === false) {
+ return false;
+ }
+
+ if (IsReadableStreamLocked(stream) === true && ReadableStreamGetNumReadRequests(stream) > 0) {
+ return true;
+ }
+
+ var desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller);
+
+ if (desiredSize > 0) {
+ return true;
+ }
+
+ return false;
+ }
+
+ function ReadableStreamDefaultControllerClose(controller) {
+ var stream = controller._controlledReadableStream;
+ assert(controller._closeRequested === false);
+ assert(stream._state === 'readable');
+ controller._closeRequested = true;
+
+ if (controller._queue.length === 0) {
+ ReadableStreamClose(stream);
+ }
+ }
+
+ function ReadableStreamDefaultControllerEnqueue(controller, chunk) {
+ var stream = controller._controlledReadableStream;
+ assert(controller._closeRequested === false);
+ assert(stream._state === 'readable');
+
+ if (IsReadableStreamLocked(stream) === true && ReadableStreamGetNumReadRequests(stream) > 0) {
+ ReadableStreamFulfillReadRequest(stream, chunk, false);
+ } else {
+ var chunkSize = 1;
+
+ if (controller._strategySize !== undefined) {
+ var strategySize = controller._strategySize;
+
+ try {
+ chunkSize = strategySize(chunk);
+ } catch (chunkSizeE) {
+ ReadableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE);
+ throw chunkSizeE;
+ }
+ }
+
+ try {
+ EnqueueValueWithSize(controller, chunk, chunkSize);
+ } catch (enqueueE) {
+ ReadableStreamDefaultControllerErrorIfNeeded(controller, enqueueE);
+ throw enqueueE;
+ }
+ }
+
+ ReadableStreamDefaultControllerCallPullIfNeeded(controller);
+ return undefined;
+ }
+
+ function ReadableStreamDefaultControllerError(controller, e) {
+ var stream = controller._controlledReadableStream;
+ assert(stream._state === 'readable');
+ ResetQueue(controller);
+ ReadableStreamError(stream, e);
+ }
+
+ function ReadableStreamDefaultControllerErrorIfNeeded(controller, e) {
+ if (controller._controlledReadableStream._state === 'readable') {
+ ReadableStreamDefaultControllerError(controller, e);
+ }
+ }
+
+ function ReadableStreamDefaultControllerGetDesiredSize(controller) {
+ var stream = controller._controlledReadableStream;
+ var state = stream._state;
+
+ if (state === 'errored') {
+ return null;
+ }
+
+ if (state === 'closed') {
+ return 0;
+ }
+
+ return controller._strategyHWM - controller._queueTotalSize;
+ }
+
+ var ReadableStreamBYOBRequest = function () {
+ function ReadableStreamBYOBRequest(controller, view) {
+ _classCallCheck(this, ReadableStreamBYOBRequest);
+
+ this._associatedReadableByteStreamController = controller;
+ this._view = view;
+ }
+
+ _createClass(ReadableStreamBYOBRequest, [{
+ key: 'respond',
+ value: function respond(bytesWritten) {
+ if (IsReadableStreamBYOBRequest(this) === false) {
+ throw byobRequestBrandCheckException('respond');
+ }
+
+ if (this._associatedReadableByteStreamController === undefined) {
+ throw new TypeError('This BYOB request has been invalidated');
+ }
+
+ ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten);
+ }
+ }, {
+ key: 'respondWithNewView',
+ value: function respondWithNewView(view) {
+ if (IsReadableStreamBYOBRequest(this) === false) {
+ throw byobRequestBrandCheckException('respond');
+ }
+
+ if (this._associatedReadableByteStreamController === undefined) {
+ throw new TypeError('This BYOB request has been invalidated');
+ }
+
+ if (!ArrayBuffer.isView(view)) {
+ throw new TypeError('You can only respond with array buffer views');
+ }
+
+ ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view);
+ }
+ }, {
+ key: 'view',
+ get: function get() {
+ return this._view;
+ }
+ }]);
+
+ return ReadableStreamBYOBRequest;
+ }();
+
+ var ReadableByteStreamController = function () {
+ function ReadableByteStreamController(stream, underlyingByteSource, highWaterMark) {
+ _classCallCheck(this, ReadableByteStreamController);
+
+ if (IsReadableStream(stream) === false) {
+ throw new TypeError('ReadableByteStreamController can only be constructed with a ReadableStream instance given ' + 'a byte source');
+ }
+
+ if (stream._readableStreamController !== undefined) {
+ throw new TypeError('ReadableByteStreamController instances can only be created by the ReadableStream constructor given a byte ' + 'source');
+ }
+
+ this._controlledReadableStream = stream;
+ this._underlyingByteSource = underlyingByteSource;
+ this._pullAgain = false;
+ this._pulling = false;
+ ReadableByteStreamControllerClearPendingPullIntos(this);
+ this._queue = this._queueTotalSize = undefined;
+ ResetQueue(this);
+ this._closeRequested = false;
+ this._started = false;
+ this._strategyHWM = ValidateAndNormalizeHighWaterMark(highWaterMark);
+ var autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize;
+
+ if (autoAllocateChunkSize !== undefined) {
+ if (Number.isInteger(autoAllocateChunkSize) === false || autoAllocateChunkSize <= 0) {
+ throw new RangeError('autoAllocateChunkSize must be a positive integer');
+ }
+ }
+
+ this._autoAllocateChunkSize = autoAllocateChunkSize;
+ this._pendingPullIntos = [];
+ var controller = this;
+ var startResult = InvokeOrNoop(underlyingByteSource, 'start', [this]);
+ Promise.resolve(startResult).then(function () {
+ controller._started = true;
+ assert(controller._pulling === false);
+ assert(controller._pullAgain === false);
+ ReadableByteStreamControllerCallPullIfNeeded(controller);
+ }, function (r) {
+ if (stream._state === 'readable') {
+ ReadableByteStreamControllerError(controller, r);
+ }
+ })["catch"](rethrowAssertionErrorRejection);
+ }
+
+ _createClass(ReadableByteStreamController, [{
+ key: 'close',
+ value: function close() {
+ if (IsReadableByteStreamController(this) === false) {
+ throw byteStreamControllerBrandCheckException('close');
+ }
+
+ if (this._closeRequested === true) {
+ throw new TypeError('The stream has already been closed; do not close it again!');
+ }
+
+ var state = this._controlledReadableStream._state;
+
+ if (state !== 'readable') {
+ throw new TypeError('The stream (in ' + state + ' state) is not in the readable state and cannot be closed');
+ }
+
+ ReadableByteStreamControllerClose(this);
+ }
+ }, {
+ key: 'enqueue',
+ value: function enqueue(chunk) {
+ if (IsReadableByteStreamController(this) === false) {
+ throw byteStreamControllerBrandCheckException('enqueue');
+ }
+
+ if (this._closeRequested === true) {
+ throw new TypeError('stream is closed or draining');
+ }
+
+ var state = this._controlledReadableStream._state;
+
+ if (state !== 'readable') {
+ throw new TypeError('The stream (in ' + state + ' state) is not in the readable state and cannot be enqueued to');
+ }
+
+ if (!ArrayBuffer.isView(chunk)) {
+ throw new TypeError('You can only enqueue array buffer views when using a ReadableByteStreamController');
+ }
+
+ ReadableByteStreamControllerEnqueue(this, chunk);
+ }
+ }, {
+ key: 'error',
+ value: function error(e) {
+ if (IsReadableByteStreamController(this) === false) {
+ throw byteStreamControllerBrandCheckException('error');
+ }
+
+ var stream = this._controlledReadableStream;
+
+ if (stream._state !== 'readable') {
+ throw new TypeError('The stream is ' + stream._state + ' and so cannot be errored');
+ }
+
+ ReadableByteStreamControllerError(this, e);
+ }
+ }, {
+ key: '__cancelSteps',
+ value: function __cancelSteps(reason) {
+ if (this._pendingPullIntos.length > 0) {
+ var firstDescriptor = this._pendingPullIntos[0];
+ firstDescriptor.bytesFilled = 0;
+ }
+
+ ResetQueue(this);
+ return PromiseInvokeOrNoop(this._underlyingByteSource, 'cancel', [reason]);
+ }
+ }, {
+ key: '__pullSteps',
+ value: function __pullSteps() {
+ var stream = this._controlledReadableStream;
+ assert(ReadableStreamHasDefaultReader(stream) === true);
+
+ if (this._queueTotalSize > 0) {
+ assert(ReadableStreamGetNumReadRequests(stream) === 0);
+
+ var entry = this._queue.shift();
+
+ this._queueTotalSize -= entry.byteLength;
+ ReadableByteStreamControllerHandleQueueDrain(this);
+ var view = void 0;
+
+ try {
+ view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);
+ } catch (viewE) {
+ return Promise.reject(viewE);
+ }
+
+ return Promise.resolve(CreateIterResultObject(view, false));
+ }
+
+ var autoAllocateChunkSize = this._autoAllocateChunkSize;
+
+ if (autoAllocateChunkSize !== undefined) {
+ var buffer = void 0;
+
+ try {
+ buffer = new ArrayBuffer(autoAllocateChunkSize);
+ } catch (bufferE) {
+ return Promise.reject(bufferE);
+ }
+
+ var pullIntoDescriptor = {
+ buffer: buffer,
+ byteOffset: 0,
+ byteLength: autoAllocateChunkSize,
+ bytesFilled: 0,
+ elementSize: 1,
+ ctor: Uint8Array,
+ readerType: 'default'
+ };
+
+ this._pendingPullIntos.push(pullIntoDescriptor);
+ }
+
+ var promise = ReadableStreamAddReadRequest(stream);
+ ReadableByteStreamControllerCallPullIfNeeded(this);
+ return promise;
+ }
+ }, {
+ key: 'byobRequest',
+ get: function get() {
+ if (IsReadableByteStreamController(this) === false) {
+ throw byteStreamControllerBrandCheckException('byobRequest');
+ }
+
+ if (this._byobRequest === undefined && this._pendingPullIntos.length > 0) {
+ var firstDescriptor = this._pendingPullIntos[0];
+ var view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);
+ this._byobRequest = new ReadableStreamBYOBRequest(this, view);
+ }
+
+ return this._byobRequest;
+ }
+ }, {
+ key: 'desiredSize',
+ get: function get() {
+ if (IsReadableByteStreamController(this) === false) {
+ throw byteStreamControllerBrandCheckException('desiredSize');
+ }
+
+ return ReadableByteStreamControllerGetDesiredSize(this);
+ }
+ }]);
+
+ return ReadableByteStreamController;
+ }();
+
+ function IsReadableByteStreamController(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_underlyingByteSource')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function IsReadableStreamBYOBRequest(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_associatedReadableByteStreamController')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function ReadableByteStreamControllerCallPullIfNeeded(controller) {
+ var shouldPull = ReadableByteStreamControllerShouldCallPull(controller);
+
+ if (shouldPull === false) {
+ return undefined;
+ }
+
+ if (controller._pulling === true) {
+ controller._pullAgain = true;
+ return undefined;
+ }
+
+ assert(controller._pullAgain === false);
+ controller._pulling = true;
+ var pullPromise = PromiseInvokeOrNoop(controller._underlyingByteSource, 'pull', [controller]);
+ pullPromise.then(function () {
+ controller._pulling = false;
+
+ if (controller._pullAgain === true) {
+ controller._pullAgain = false;
+ ReadableByteStreamControllerCallPullIfNeeded(controller);
+ }
+ }, function (e) {
+ if (controller._controlledReadableStream._state === 'readable') {
+ ReadableByteStreamControllerError(controller, e);
+ }
+ })["catch"](rethrowAssertionErrorRejection);
+ return undefined;
+ }
+
+ function ReadableByteStreamControllerClearPendingPullIntos(controller) {
+ ReadableByteStreamControllerInvalidateBYOBRequest(controller);
+ controller._pendingPullIntos = [];
+ }
+
+ function ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor) {
+ assert(stream._state !== 'errored', 'state must not be errored');
+ var done = false;
+
+ if (stream._state === 'closed') {
+ assert(pullIntoDescriptor.bytesFilled === 0);
+ done = true;
+ }
+
+ var filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);
+
+ if (pullIntoDescriptor.readerType === 'default') {
+ ReadableStreamFulfillReadRequest(stream, filledView, done);
+ } else {
+ assert(pullIntoDescriptor.readerType === 'byob');
+ ReadableStreamFulfillReadIntoRequest(stream, filledView, done);
+ }
+ }
+
+ function ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor) {
+ var bytesFilled = pullIntoDescriptor.bytesFilled;
+ var elementSize = pullIntoDescriptor.elementSize;
+ assert(bytesFilled <= pullIntoDescriptor.byteLength);
+ assert(bytesFilled % elementSize === 0);
+ return new pullIntoDescriptor.ctor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, bytesFilled / elementSize);
+ }
+
+ function ReadableByteStreamControllerEnqueueChunkToQueue(controller, buffer, byteOffset, byteLength) {
+ controller._queue.push({
+ buffer: buffer,
+ byteOffset: byteOffset,
+ byteLength: byteLength
+ });
+
+ controller._queueTotalSize += byteLength;
+ }
+
+ function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) {
+ var elementSize = pullIntoDescriptor.elementSize;
+ var currentAlignedBytes = pullIntoDescriptor.bytesFilled - pullIntoDescriptor.bytesFilled % elementSize;
+ var maxBytesToCopy = Math.min(controller._queueTotalSize, pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled);
+ var maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy;
+ var maxAlignedBytes = maxBytesFilled - maxBytesFilled % elementSize;
+ var totalBytesToCopyRemaining = maxBytesToCopy;
+ var ready = false;
+
+ if (maxAlignedBytes > currentAlignedBytes) {
+ totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor.bytesFilled;
+ ready = true;
+ }
+
+ var queue = controller._queue;
+
+ while (totalBytesToCopyRemaining > 0) {
+ var headOfQueue = queue[0];
+ var bytesToCopy = Math.min(totalBytesToCopyRemaining, headOfQueue.byteLength);
+ var destStart = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;
+ ArrayBufferCopy(pullIntoDescriptor.buffer, destStart, headOfQueue.buffer, headOfQueue.byteOffset, bytesToCopy);
+
+ if (headOfQueue.byteLength === bytesToCopy) {
+ queue.shift();
+ } else {
+ headOfQueue.byteOffset += bytesToCopy;
+ headOfQueue.byteLength -= bytesToCopy;
+ }
+
+ controller._queueTotalSize -= bytesToCopy;
+ ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesToCopy, pullIntoDescriptor);
+ totalBytesToCopyRemaining -= bytesToCopy;
+ }
+
+ if (ready === false) {
+ assert(controller._queueTotalSize === 0, 'queue must be empty');
+ assert(pullIntoDescriptor.bytesFilled > 0);
+ assert(pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize);
+ }
+
+ return ready;
+ }
+
+ function ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, size, pullIntoDescriptor) {
+ assert(controller._pendingPullIntos.length === 0 || controller._pendingPullIntos[0] === pullIntoDescriptor);
+ ReadableByteStreamControllerInvalidateBYOBRequest(controller);
+ pullIntoDescriptor.bytesFilled += size;
+ }
+
+ function ReadableByteStreamControllerHandleQueueDrain(controller) {
+ assert(controller._controlledReadableStream._state === 'readable');
+
+ if (controller._queueTotalSize === 0 && controller._closeRequested === true) {
+ ReadableStreamClose(controller._controlledReadableStream);
+ } else {
+ ReadableByteStreamControllerCallPullIfNeeded(controller);
+ }
+ }
+
+ function ReadableByteStreamControllerInvalidateBYOBRequest(controller) {
+ if (controller._byobRequest === undefined) {
+ return;
+ }
+
+ controller._byobRequest._associatedReadableByteStreamController = undefined;
+ controller._byobRequest._view = undefined;
+ controller._byobRequest = undefined;
+ }
+
+ function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller) {
+ assert(controller._closeRequested === false);
+
+ while (controller._pendingPullIntos.length > 0) {
+ if (controller._queueTotalSize === 0) {
+ return;
+ }
+
+ var pullIntoDescriptor = controller._pendingPullIntos[0];
+
+ if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) === true) {
+ ReadableByteStreamControllerShiftPendingPullInto(controller);
+ ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableStream, pullIntoDescriptor);
+ }
+ }
+ }
+
+ function ReadableByteStreamControllerPullInto(controller, view) {
+ var stream = controller._controlledReadableStream;
+ var elementSize = 1;
+
+ if (view.constructor !== DataView) {
+ elementSize = view.constructor.BYTES_PER_ELEMENT;
+ }
+
+ var ctor = view.constructor;
+ var pullIntoDescriptor = {
+ buffer: view.buffer,
+ byteOffset: view.byteOffset,
+ byteLength: view.byteLength,
+ bytesFilled: 0,
+ elementSize: elementSize,
+ ctor: ctor,
+ readerType: 'byob'
+ };
+
+ if (controller._pendingPullIntos.length > 0) {
+ pullIntoDescriptor.buffer = TransferArrayBuffer(pullIntoDescriptor.buffer);
+
+ controller._pendingPullIntos.push(pullIntoDescriptor);
+
+ return ReadableStreamAddReadIntoRequest(stream);
+ }
+
+ if (stream._state === 'closed') {
+ var emptyView = new view.constructor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, 0);
+ return Promise.resolve(CreateIterResultObject(emptyView, true));
+ }
+
+ if (controller._queueTotalSize > 0) {
+ if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) === true) {
+ var filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);
+ ReadableByteStreamControllerHandleQueueDrain(controller);
+ return Promise.resolve(CreateIterResultObject(filledView, false));
+ }
+
+ if (controller._closeRequested === true) {
+ var e = new TypeError('Insufficient bytes to fill elements in the given buffer');
+ ReadableByteStreamControllerError(controller, e);
+ return Promise.reject(e);
+ }
+ }
+
+ pullIntoDescriptor.buffer = TransferArrayBuffer(pullIntoDescriptor.buffer);
+
+ controller._pendingPullIntos.push(pullIntoDescriptor);
+
+ var promise = ReadableStreamAddReadIntoRequest(stream);
+ ReadableByteStreamControllerCallPullIfNeeded(controller);
+ return promise;
+ }
+
+ function ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor) {
+ firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer);
+ assert(firstDescriptor.bytesFilled === 0, 'bytesFilled must be 0');
+ var stream = controller._controlledReadableStream;
+
+ if (ReadableStreamHasBYOBReader(stream) === true) {
+ while (ReadableStreamGetNumReadIntoRequests(stream) > 0) {
+ var pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto(controller);
+ ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor);
+ }
+ }
+ }
+
+ function ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) {
+ if (pullIntoDescriptor.bytesFilled + bytesWritten > pullIntoDescriptor.byteLength) {
+ throw new RangeError('bytesWritten out of range');
+ }
+
+ ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesWritten, pullIntoDescriptor);
+
+ if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) {
+ return;
+ }
+
+ ReadableByteStreamControllerShiftPendingPullInto(controller);
+ var remainderSize = pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize;
+
+ if (remainderSize > 0) {
+ var end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;
+ var remainder = pullIntoDescriptor.buffer.slice(end - remainderSize, end);
+ ReadableByteStreamControllerEnqueueChunkToQueue(controller, remainder, 0, remainder.byteLength);
+ }
+
+ pullIntoDescriptor.buffer = TransferArrayBuffer(pullIntoDescriptor.buffer);
+ pullIntoDescriptor.bytesFilled -= remainderSize;
+ ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableStream, pullIntoDescriptor);
+ ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);
+ }
+
+ function ReadableByteStreamControllerRespondInternal(controller, bytesWritten) {
+ var firstDescriptor = controller._pendingPullIntos[0];
+ var stream = controller._controlledReadableStream;
+
+ if (stream._state === 'closed') {
+ if (bytesWritten !== 0) {
+ throw new TypeError('bytesWritten must be 0 when calling respond() on a closed stream');
+ }
+
+ ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor);
+ } else {
+ assert(stream._state === 'readable');
+ ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, firstDescriptor);
+ }
+ }
+
+ function ReadableByteStreamControllerShiftPendingPullInto(controller) {
+ var descriptor = controller._pendingPullIntos.shift();
+
+ ReadableByteStreamControllerInvalidateBYOBRequest(controller);
+ return descriptor;
+ }
+
+ function ReadableByteStreamControllerShouldCallPull(controller) {
+ var stream = controller._controlledReadableStream;
+
+ if (stream._state !== 'readable') {
+ return false;
+ }
+
+ if (controller._closeRequested === true) {
+ return false;
+ }
+
+ if (controller._started === false) {
+ return false;
+ }
+
+ if (ReadableStreamHasDefaultReader(stream) === true && ReadableStreamGetNumReadRequests(stream) > 0) {
+ return true;
+ }
+
+ if (ReadableStreamHasBYOBReader(stream) === true && ReadableStreamGetNumReadIntoRequests(stream) > 0) {
+ return true;
+ }
+
+ if (ReadableByteStreamControllerGetDesiredSize(controller) > 0) {
+ return true;
+ }
+
+ return false;
+ }
+
+ function ReadableByteStreamControllerClose(controller) {
+ var stream = controller._controlledReadableStream;
+ assert(controller._closeRequested === false);
+ assert(stream._state === 'readable');
+
+ if (controller._queueTotalSize > 0) {
+ controller._closeRequested = true;
+ return;
+ }
+
+ if (controller._pendingPullIntos.length > 0) {
+ var firstPendingPullInto = controller._pendingPullIntos[0];
+
+ if (firstPendingPullInto.bytesFilled > 0) {
+ var e = new TypeError('Insufficient bytes to fill elements in the given buffer');
+ ReadableByteStreamControllerError(controller, e);
+ throw e;
+ }
+ }
+
+ ReadableStreamClose(stream);
+ }
+
+ function ReadableByteStreamControllerEnqueue(controller, chunk) {
+ var stream = controller._controlledReadableStream;
+ assert(controller._closeRequested === false);
+ assert(stream._state === 'readable');
+ var buffer = chunk.buffer;
+ var byteOffset = chunk.byteOffset;
+ var byteLength = chunk.byteLength;
+ var transferredBuffer = TransferArrayBuffer(buffer);
+
+ if (ReadableStreamHasDefaultReader(stream) === true) {
+ if (ReadableStreamGetNumReadRequests(stream) === 0) {
+ ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);
+ } else {
+ assert(controller._queue.length === 0);
+ var transferredView = new Uint8Array(transferredBuffer, byteOffset, byteLength);
+ ReadableStreamFulfillReadRequest(stream, transferredView, false);
+ }
+ } else if (ReadableStreamHasBYOBReader(stream) === true) {
+ ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);
+ ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);
+ } else {
+ assert(IsReadableStreamLocked(stream) === false, 'stream must not be locked');
+ ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);
+ }
+ }
+
+ function ReadableByteStreamControllerError(controller, e) {
+ var stream = controller._controlledReadableStream;
+ assert(stream._state === 'readable');
+ ReadableByteStreamControllerClearPendingPullIntos(controller);
+ ResetQueue(controller);
+ ReadableStreamError(stream, e);
+ }
+
+ function ReadableByteStreamControllerGetDesiredSize(controller) {
+ var stream = controller._controlledReadableStream;
+ var state = stream._state;
+
+ if (state === 'errored') {
+ return null;
+ }
+
+ if (state === 'closed') {
+ return 0;
+ }
+
+ return controller._strategyHWM - controller._queueTotalSize;
+ }
+
+ function ReadableByteStreamControllerRespond(controller, bytesWritten) {
+ bytesWritten = Number(bytesWritten);
+
+ if (IsFiniteNonNegativeNumber(bytesWritten) === false) {
+ throw new RangeError('bytesWritten must be a finite');
+ }
+
+ assert(controller._pendingPullIntos.length > 0);
+ ReadableByteStreamControllerRespondInternal(controller, bytesWritten);
+ }
+
+ function ReadableByteStreamControllerRespondWithNewView(controller, view) {
+ assert(controller._pendingPullIntos.length > 0);
+ var firstDescriptor = controller._pendingPullIntos[0];
+
+ if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) {
+ throw new RangeError('The region specified by view does not match byobRequest');
+ }
+
+ if (firstDescriptor.byteLength !== view.byteLength) {
+ throw new RangeError('The buffer of view has different capacity than byobRequest');
+ }
+
+ firstDescriptor.buffer = view.buffer;
+ ReadableByteStreamControllerRespondInternal(controller, view.byteLength);
+ }
+
+ function streamBrandCheckException(name) {
+ return new TypeError('ReadableStream.prototype.' + name + ' can only be used on a ReadableStream');
+ }
+
+ function readerLockException(name) {
+ return new TypeError('Cannot ' + name + ' a stream using a released reader');
+ }
+
+ function defaultReaderBrandCheckException(name) {
+ return new TypeError('ReadableStreamDefaultReader.prototype.' + name + ' can only be used on a ReadableStreamDefaultReader');
+ }
+
+ function defaultReaderClosedPromiseInitialize(reader) {
+ reader._closedPromise = new Promise(function (resolve, reject) {
+ reader._closedPromise_resolve = resolve;
+ reader._closedPromise_reject = reject;
+ });
+ }
+
+ function defaultReaderClosedPromiseInitializeAsRejected(reader, reason) {
+ reader._closedPromise = Promise.reject(reason);
+ reader._closedPromise_resolve = undefined;
+ reader._closedPromise_reject = undefined;
+ }
+
+ function defaultReaderClosedPromiseInitializeAsResolved(reader) {
+ reader._closedPromise = Promise.resolve(undefined);
+ reader._closedPromise_resolve = undefined;
+ reader._closedPromise_reject = undefined;
+ }
+
+ function defaultReaderClosedPromiseReject(reader, reason) {
+ assert(reader._closedPromise_resolve !== undefined);
+ assert(reader._closedPromise_reject !== undefined);
+
+ reader._closedPromise_reject(reason);
+
+ reader._closedPromise_resolve = undefined;
+ reader._closedPromise_reject = undefined;
+ }
+
+ function defaultReaderClosedPromiseResetToRejected(reader, reason) {
+ assert(reader._closedPromise_resolve === undefined);
+ assert(reader._closedPromise_reject === undefined);
+ reader._closedPromise = Promise.reject(reason);
+ }
+
+ function defaultReaderClosedPromiseResolve(reader) {
+ assert(reader._closedPromise_resolve !== undefined);
+ assert(reader._closedPromise_reject !== undefined);
+
+ reader._closedPromise_resolve(undefined);
+
+ reader._closedPromise_resolve = undefined;
+ reader._closedPromise_reject = undefined;
+ }
+
+ function byobReaderBrandCheckException(name) {
+ return new TypeError('ReadableStreamBYOBReader.prototype.' + name + ' can only be used on a ReadableStreamBYOBReader');
+ }
+
+ function defaultControllerBrandCheckException(name) {
+ return new TypeError('ReadableStreamDefaultController.prototype.' + name + ' can only be used on a ReadableStreamDefaultController');
+ }
+
+ function byobRequestBrandCheckException(name) {
+ return new TypeError('ReadableStreamBYOBRequest.prototype.' + name + ' can only be used on a ReadableStreamBYOBRequest');
+ }
+
+ function byteStreamControllerBrandCheckException(name) {
+ return new TypeError('ReadableByteStreamController.prototype.' + name + ' can only be used on a ReadableByteStreamController');
+ }
+
+ function ifIsObjectAndHasAPromiseIsHandledInternalSlotSetPromiseIsHandledToTrue(promise) {
+ try {
+ Promise.prototype.then.call(promise, undefined, function () {});
+ } catch (e) {}
+ }
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var transformStream = __w_pdfjs_require__(6);
+
+ var readableStream = __w_pdfjs_require__(4);
+
+ var writableStream = __w_pdfjs_require__(2);
+
+ exports.TransformStream = transformStream.TransformStream;
+ exports.ReadableStream = readableStream.ReadableStream;
+ exports.IsReadableStreamDisturbed = readableStream.IsReadableStreamDisturbed;
+ exports.ReadableStreamDefaultControllerClose = readableStream.ReadableStreamDefaultControllerClose;
+ exports.ReadableStreamDefaultControllerEnqueue = readableStream.ReadableStreamDefaultControllerEnqueue;
+ exports.ReadableStreamDefaultControllerError = readableStream.ReadableStreamDefaultControllerError;
+ exports.ReadableStreamDefaultControllerGetDesiredSize = readableStream.ReadableStreamDefaultControllerGetDesiredSize;
+ exports.AcquireWritableStreamDefaultWriter = writableStream.AcquireWritableStreamDefaultWriter;
+ exports.IsWritableStream = writableStream.IsWritableStream;
+ exports.IsWritableStreamLocked = writableStream.IsWritableStreamLocked;
+ exports.WritableStream = writableStream.WritableStream;
+ exports.WritableStreamAbort = writableStream.WritableStreamAbort;
+ exports.WritableStreamDefaultControllerError = writableStream.WritableStreamDefaultControllerError;
+ exports.WritableStreamDefaultWriterCloseWithErrorPropagation = writableStream.WritableStreamDefaultWriterCloseWithErrorPropagation;
+ exports.WritableStreamDefaultWriterRelease = writableStream.WritableStreamDefaultWriterRelease;
+ exports.WritableStreamDefaultWriterWrite = writableStream.WritableStreamDefaultWriterWrite;
+}, function (module, exports, __w_pdfjs_require__) {
+ "use strict";
+
+ var _createClass = function () {
+ function defineProperties(target, props) {
+ for (var i = 0; i < props.length; i++) {
+ var descriptor = props[i];
+ descriptor.enumerable = descriptor.enumerable || false;
+ descriptor.configurable = true;
+ if ("value" in descriptor) descriptor.writable = true;
+ Object.defineProperty(target, descriptor.key, descriptor);
+ }
+ }
+
+ return function (Constructor, protoProps, staticProps) {
+ if (protoProps) defineProperties(Constructor.prototype, protoProps);
+ if (staticProps) defineProperties(Constructor, staticProps);
+ return Constructor;
+ };
+ }();
+
+ function _classCallCheck(instance, Constructor) {
+ if (!(instance instanceof Constructor)) {
+ throw new TypeError("Cannot call a class as a function");
+ }
+ }
+
+ var _require = __w_pdfjs_require__(1),
+ assert = _require.assert;
+
+ var _require2 = __w_pdfjs_require__(0),
+ InvokeOrNoop = _require2.InvokeOrNoop,
+ PromiseInvokeOrPerformFallback = _require2.PromiseInvokeOrPerformFallback,
+ PromiseInvokeOrNoop = _require2.PromiseInvokeOrNoop,
+ typeIsObject = _require2.typeIsObject;
+
+ var _require3 = __w_pdfjs_require__(4),
+ ReadableStream = _require3.ReadableStream,
+ ReadableStreamDefaultControllerClose = _require3.ReadableStreamDefaultControllerClose,
+ ReadableStreamDefaultControllerEnqueue = _require3.ReadableStreamDefaultControllerEnqueue,
+ ReadableStreamDefaultControllerError = _require3.ReadableStreamDefaultControllerError,
+ ReadableStreamDefaultControllerGetDesiredSize = _require3.ReadableStreamDefaultControllerGetDesiredSize;
+
+ var _require4 = __w_pdfjs_require__(2),
+ WritableStream = _require4.WritableStream,
+ WritableStreamDefaultControllerError = _require4.WritableStreamDefaultControllerError;
+
+ function TransformStreamCloseReadable(transformStream) {
+ if (transformStream._errored === true) {
+ throw new TypeError('TransformStream is already errored');
+ }
+
+ if (transformStream._readableClosed === true) {
+ throw new TypeError('Readable side is already closed');
+ }
+
+ TransformStreamCloseReadableInternal(transformStream);
+ }
+
+ function TransformStreamEnqueueToReadable(transformStream, chunk) {
+ if (transformStream._errored === true) {
+ throw new TypeError('TransformStream is already errored');
+ }
+
+ if (transformStream._readableClosed === true) {
+ throw new TypeError('Readable side is already closed');
+ }
+
+ var controller = transformStream._readableController;
+
+ try {
+ ReadableStreamDefaultControllerEnqueue(controller, chunk);
+ } catch (e) {
+ transformStream._readableClosed = true;
+ TransformStreamErrorIfNeeded(transformStream, e);
+ throw transformStream._storedError;
+ }
+
+ var desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller);
+ var maybeBackpressure = desiredSize <= 0;
+
+ if (maybeBackpressure === true && transformStream._backpressure === false) {
+ TransformStreamSetBackpressure(transformStream, true);
+ }
+ }
+
+ function TransformStreamError(transformStream, e) {
+ if (transformStream._errored === true) {
+ throw new TypeError('TransformStream is already errored');
+ }
+
+ TransformStreamErrorInternal(transformStream, e);
+ }
+
+ function TransformStreamCloseReadableInternal(transformStream) {
+ assert(transformStream._errored === false);
+ assert(transformStream._readableClosed === false);
+
+ try {
+ ReadableStreamDefaultControllerClose(transformStream._readableController);
+ } catch (e) {
+ assert(false);
+ }
+
+ transformStream._readableClosed = true;
+ }
+
+ function TransformStreamErrorIfNeeded(transformStream, e) {
+ if (transformStream._errored === false) {
+ TransformStreamErrorInternal(transformStream, e);
+ }
+ }
+
+ function TransformStreamErrorInternal(transformStream, e) {
+ assert(transformStream._errored === false);
+ transformStream._errored = true;
+ transformStream._storedError = e;
+
+ if (transformStream._writableDone === false) {
+ WritableStreamDefaultControllerError(transformStream._writableController, e);
+ }
+
+ if (transformStream._readableClosed === false) {
+ ReadableStreamDefaultControllerError(transformStream._readableController, e);
+ }
+ }
+
+ function TransformStreamReadableReadyPromise(transformStream) {
+ assert(transformStream._backpressureChangePromise !== undefined, '_backpressureChangePromise should have been initialized');
+
+ if (transformStream._backpressure === false) {
+ return Promise.resolve();
+ }
+
+ assert(transformStream._backpressure === true, '_backpressure should have been initialized');
+ return transformStream._backpressureChangePromise;
+ }
+
+ function TransformStreamSetBackpressure(transformStream, backpressure) {
+ assert(transformStream._backpressure !== backpressure, 'TransformStreamSetBackpressure() should be called only when backpressure is changed');
+
+ if (transformStream._backpressureChangePromise !== undefined) {
+ transformStream._backpressureChangePromise_resolve(backpressure);
+ }
+
+ transformStream._backpressureChangePromise = new Promise(function (resolve) {
+ transformStream._backpressureChangePromise_resolve = resolve;
+ });
+
+ transformStream._backpressureChangePromise.then(function (resolution) {
+ assert(resolution !== backpressure, '_backpressureChangePromise should be fulfilled only when backpressure is changed');
+ });
+
+ transformStream._backpressure = backpressure;
+ }
+
+ function TransformStreamDefaultTransform(chunk, transformStreamController) {
+ var transformStream = transformStreamController._controlledTransformStream;
+ TransformStreamEnqueueToReadable(transformStream, chunk);
+ return Promise.resolve();
+ }
+
+ function TransformStreamTransform(transformStream, chunk) {
+ assert(transformStream._errored === false);
+ assert(transformStream._transforming === false);
+ assert(transformStream._backpressure === false);
+ transformStream._transforming = true;
+ var transformer = transformStream._transformer;
+ var controller = transformStream._transformStreamController;
+ var transformPromise = PromiseInvokeOrPerformFallback(transformer, 'transform', [chunk, controller], TransformStreamDefaultTransform, [chunk, controller]);
+ return transformPromise.then(function () {
+ transformStream._transforming = false;
+ return TransformStreamReadableReadyPromise(transformStream);
+ }, function (e) {
+ TransformStreamErrorIfNeeded(transformStream, e);
+ return Promise.reject(e);
+ });
+ }
+
+ function IsTransformStreamDefaultController(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_controlledTransformStream')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ function IsTransformStream(x) {
+ if (!typeIsObject(x)) {
+ return false;
+ }
+
+ if (!Object.prototype.hasOwnProperty.call(x, '_transformStreamController')) {
+ return false;
+ }
+
+ return true;
+ }
+
+ var TransformStreamSink = function () {
+ function TransformStreamSink(transformStream, startPromise) {
+ _classCallCheck(this, TransformStreamSink);
+
+ this._transformStream = transformStream;
+ this._startPromise = startPromise;
+ }
+
+ _createClass(TransformStreamSink, [{
+ key: 'start',
+ value: function start(c) {
+ var transformStream = this._transformStream;
+ transformStream._writableController = c;
+ return this._startPromise.then(function () {
+ return TransformStreamReadableReadyPromise(transformStream);
+ });
+ }
+ }, {
+ key: 'write',
+ value: function write(chunk) {
+ var transformStream = this._transformStream;
+ return TransformStreamTransform(transformStream, chunk);
+ }
+ }, {
+ key: 'abort',
+ value: function abort() {
+ var transformStream = this._transformStream;
+ transformStream._writableDone = true;
+ TransformStreamErrorInternal(transformStream, new TypeError('Writable side aborted'));
+ }
+ }, {
+ key: 'close',
+ value: function close() {
+ var transformStream = this._transformStream;
+ assert(transformStream._transforming === false);
+ transformStream._writableDone = true;
+ var flushPromise = PromiseInvokeOrNoop(transformStream._transformer, 'flush', [transformStream._transformStreamController]);
+ return flushPromise.then(function () {
+ if (transformStream._errored === true) {
+ return Promise.reject(transformStream._storedError);
+ }
+
+ if (transformStream._readableClosed === false) {
+ TransformStreamCloseReadableInternal(transformStream);
+ }
+
+ return Promise.resolve();
+ })["catch"](function (r) {
+ TransformStreamErrorIfNeeded(transformStream, r);
+ return Promise.reject(transformStream._storedError);
+ });
+ }
+ }]);
+
+ return TransformStreamSink;
+ }();
+
+ var TransformStreamSource = function () {
+ function TransformStreamSource(transformStream, startPromise) {
+ _classCallCheck(this, TransformStreamSource);
+
+ this._transformStream = transformStream;
+ this._startPromise = startPromise;
+ }
+
+ _createClass(TransformStreamSource, [{
+ key: 'start',
+ value: function start(c) {
+ var transformStream = this._transformStream;
+ transformStream._readableController = c;
+ return this._startPromise.then(function () {
+ assert(transformStream._backpressureChangePromise !== undefined, '_backpressureChangePromise should have been initialized');
+
+ if (transformStream._backpressure === true) {
+ return Promise.resolve();
+ }
+
+ assert(transformStream._backpressure === false, '_backpressure should have been initialized');
+ return transformStream._backpressureChangePromise;
+ });
+ }
+ }, {
+ key: 'pull',
+ value: function pull() {
+ var transformStream = this._transformStream;
+ assert(transformStream._backpressure === true, 'pull() should be never called while _backpressure is false');
+ assert(transformStream._backpressureChangePromise !== undefined, '_backpressureChangePromise should have been initialized');
+ TransformStreamSetBackpressure(transformStream, false);
+ return transformStream._backpressureChangePromise;
+ }
+ }, {
+ key: 'cancel',
+ value: function cancel() {
+ var transformStream = this._transformStream;
+ transformStream._readableClosed = true;
+ TransformStreamErrorInternal(transformStream, new TypeError('Readable side canceled'));
+ }
+ }]);
+
+ return TransformStreamSource;
+ }();
+
+ var TransformStreamDefaultController = function () {
+ function TransformStreamDefaultController(transformStream) {
+ _classCallCheck(this, TransformStreamDefaultController);
+
+ if (IsTransformStream(transformStream) === false) {
+ throw new TypeError('TransformStreamDefaultController can only be ' + 'constructed with a TransformStream instance');
+ }
+
+ if (transformStream._transformStreamController !== undefined) {
+ throw new TypeError('TransformStreamDefaultController instances can ' + 'only be created by the TransformStream constructor');
+ }
+
+ this._controlledTransformStream = transformStream;
+ }
+
+ _createClass(TransformStreamDefaultController, [{
+ key: 'enqueue',
+ value: function enqueue(chunk) {
+ if (IsTransformStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('enqueue');
+ }
+
+ TransformStreamEnqueueToReadable(this._controlledTransformStream, chunk);
+ }
+ }, {
+ key: 'close',
+ value: function close() {
+ if (IsTransformStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('close');
+ }
+
+ TransformStreamCloseReadable(this._controlledTransformStream);
+ }
+ }, {
+ key: 'error',
+ value: function error(reason) {
+ if (IsTransformStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('error');
+ }
+
+ TransformStreamError(this._controlledTransformStream, reason);
+ }
+ }, {
+ key: 'desiredSize',
+ get: function get() {
+ if (IsTransformStreamDefaultController(this) === false) {
+ throw defaultControllerBrandCheckException('desiredSize');
+ }
+
+ var transformStream = this._controlledTransformStream;
+ var readableController = transformStream._readableController;
+ return ReadableStreamDefaultControllerGetDesiredSize(readableController);
+ }
+ }]);
+
+ return TransformStreamDefaultController;
+ }();
+
+ var TransformStream = function () {
+ function TransformStream() {
+ var transformer = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
+
+ _classCallCheck(this, TransformStream);
+
+ this._transformer = transformer;
+ var readableStrategy = transformer.readableStrategy,
+ writableStrategy = transformer.writableStrategy;
+ this._transforming = false;
+ this._errored = false;
+ this._storedError = undefined;
+ this._writableController = undefined;
+ this._readableController = undefined;
+ this._transformStreamController = undefined;
+ this._writableDone = false;
+ this._readableClosed = false;
+ this._backpressure = undefined;
+ this._backpressureChangePromise = undefined;
+ this._backpressureChangePromise_resolve = undefined;
+ this._transformStreamController = new TransformStreamDefaultController(this);
+ var startPromise_resolve = void 0;
+ var startPromise = new Promise(function (resolve) {
+ startPromise_resolve = resolve;
+ });
+ var source = new TransformStreamSource(this, startPromise);
+ this._readable = new ReadableStream(source, readableStrategy);
+ var sink = new TransformStreamSink(this, startPromise);
+ this._writable = new WritableStream(sink, writableStrategy);
+ assert(this._writableController !== undefined);
+ assert(this._readableController !== undefined);
+ var desiredSize = ReadableStreamDefaultControllerGetDesiredSize(this._readableController);
+ TransformStreamSetBackpressure(this, desiredSize <= 0);
+ var transformStream = this;
+ var startResult = InvokeOrNoop(transformer, 'start', [transformStream._transformStreamController]);
+ startPromise_resolve(startResult);
+ startPromise["catch"](function (e) {
+ if (transformStream._errored === false) {
+ transformStream._errored = true;
+ transformStream._storedError = e;
+ }
+ });
+ }
+
+ _createClass(TransformStream, [{
+ key: 'readable',
+ get: function get() {
+ if (IsTransformStream(this) === false) {
+ throw streamBrandCheckException('readable');
+ }
+
+ return this._readable;
+ }
+ }, {
+ key: 'writable',
+ get: function get() {
+ if (IsTransformStream(this) === false) {
+ throw streamBrandCheckException('writable');
+ }
+
+ return this._writable;
+ }
+ }]);
+
+ return TransformStream;
+ }();
+
+ module.exports = {
+ TransformStream: TransformStream
+ };
+
+ function defaultControllerBrandCheckException(name) {
+ return new TypeError('TransformStreamDefaultController.prototype.' + name + ' can only be used on a TransformStreamDefaultController');
+ }
+
+ function streamBrandCheckException(name) {
+ return new TypeError('TransformStream.prototype.' + name + ' can only be used on a TransformStream');
+ }
+}, function (module, exports, __w_pdfjs_require__) {
+ module.exports = __w_pdfjs_require__(5);
+}]));
+
+/***/ }),
+/* 149 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+{
+ var isURLSupported = false;
+
+ try {
+ if (typeof URL === 'function' && _typeof(URL.prototype) === 'object' && 'origin' in URL.prototype) {
+ var u = new URL('b', 'http://a');
+ u.pathname = 'c%20d';
+ isURLSupported = u.href === 'http://a/c%20d';
+ }
+ } catch (ex) {}
+
+ if (isURLSupported) {
+ exports.URL = URL;
+ } else {
+ var PolyfillURL = __w_pdfjs_require__(150).URL;
+
+ var OriginalURL = __w_pdfjs_require__(7).URL;
+
+ if (OriginalURL) {
+ PolyfillURL.createObjectURL = function (blob) {
+ return OriginalURL.createObjectURL.apply(OriginalURL, arguments);
+ };
+
+ PolyfillURL.revokeObjectURL = function (url) {
+ OriginalURL.revokeObjectURL(url);
+ };
+ }
+
+ exports.URL = PolyfillURL;
+ }
+}
+
+/***/ }),
+/* 150 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+(function URLConstructorClosure() {
+ 'use strict';
+
+ var relative = Object.create(null);
+ relative['ftp'] = 21;
+ relative['file'] = 0;
+ relative['gopher'] = 70;
+ relative['http'] = 80;
+ relative['https'] = 443;
+ relative['ws'] = 80;
+ relative['wss'] = 443;
+ var relativePathDotMapping = Object.create(null);
+ relativePathDotMapping['%2e'] = '.';
+ relativePathDotMapping['.%2e'] = '..';
+ relativePathDotMapping['%2e.'] = '..';
+ relativePathDotMapping['%2e%2e'] = '..';
+
+ function isRelativeScheme(scheme) {
+ return relative[scheme] !== undefined;
+ }
+
+ function invalid() {
+ clear.call(this);
+ this._isInvalid = true;
+ }
+
+ function IDNAToASCII(h) {
+ if (h === '') {
+ invalid.call(this);
+ }
+
+ return h.toLowerCase();
+ }
+
+ function percentEscape(c) {
+ var unicode = c.charCodeAt(0);
+
+ if (unicode > 0x20 && unicode < 0x7F && [0x22, 0x23, 0x3C, 0x3E, 0x3F, 0x60].indexOf(unicode) === -1) {
+ return c;
+ }
+
+ return encodeURIComponent(c);
+ }
+
+ function percentEscapeQuery(c) {
+ var unicode = c.charCodeAt(0);
+
+ if (unicode > 0x20 && unicode < 0x7F && [0x22, 0x23, 0x3C, 0x3E, 0x60].indexOf(unicode) === -1) {
+ return c;
+ }
+
+ return encodeURIComponent(c);
+ }
+
+ var EOF,
+ ALPHA = /[a-zA-Z]/,
+ ALPHANUMERIC = /[a-zA-Z0-9\+\-\.]/;
+
+ function parse(input, stateOverride, base) {
+ function err(message) {
+ errors.push(message);
+ }
+
+ var state = stateOverride || 'scheme start',
+ cursor = 0,
+ buffer = '',
+ seenAt = false,
+ seenBracket = false,
+ errors = [];
+
+ loop: while ((input[cursor - 1] !== EOF || cursor === 0) && !this._isInvalid) {
+ var c = input[cursor];
+
+ switch (state) {
+ case 'scheme start':
+ if (c && ALPHA.test(c)) {
+ buffer += c.toLowerCase();
+ state = 'scheme';
+ } else if (!stateOverride) {
+ buffer = '';
+ state = 'no scheme';
+ continue;
+ } else {
+ err('Invalid scheme.');
+ break loop;
+ }
+
+ break;
+
+ case 'scheme':
+ if (c && ALPHANUMERIC.test(c)) {
+ buffer += c.toLowerCase();
+ } else if (c === ':') {
+ this._scheme = buffer;
+ buffer = '';
+
+ if (stateOverride) {
+ break loop;
+ }
+
+ if (isRelativeScheme(this._scheme)) {
+ this._isRelative = true;
+ }
+
+ if (this._scheme === 'file') {
+ state = 'relative';
+ } else if (this._isRelative && base && base._scheme === this._scheme) {
+ state = 'relative or authority';
+ } else if (this._isRelative) {
+ state = 'authority first slash';
+ } else {
+ state = 'scheme data';
+ }
+ } else if (!stateOverride) {
+ buffer = '';
+ cursor = 0;
+ state = 'no scheme';
+ continue;
+ } else if (c === EOF) {
+ break loop;
+ } else {
+ err('Code point not allowed in scheme: ' + c);
+ break loop;
+ }
+
+ break;
+
+ case 'scheme data':
+ if (c === '?') {
+ this._query = '?';
+ state = 'query';
+ } else if (c === '#') {
+ this._fragment = '#';
+ state = 'fragment';
+ } else {
+ if (c !== EOF && c !== '\t' && c !== '\n' && c !== '\r') {
+ this._schemeData += percentEscape(c);
+ }
+ }
+
+ break;
+
+ case 'no scheme':
+ if (!base || !isRelativeScheme(base._scheme)) {
+ err('Missing scheme.');
+ invalid.call(this);
+ } else {
+ state = 'relative';
+ continue;
+ }
+
+ break;
+
+ case 'relative or authority':
+ if (c === '/' && input[cursor + 1] === '/') {
+ state = 'authority ignore slashes';
+ } else {
+ err('Expected /, got: ' + c);
+ state = 'relative';
+ continue;
+ }
+
+ break;
+
+ case 'relative':
+ this._isRelative = true;
+
+ if (this._scheme !== 'file') {
+ this._scheme = base._scheme;
+ }
+
+ if (c === EOF) {
+ this._host = base._host;
+ this._port = base._port;
+ this._path = base._path.slice();
+ this._query = base._query;
+ this._username = base._username;
+ this._password = base._password;
+ break loop;
+ } else if (c === '/' || c === '\\') {
+ if (c === '\\') {
+ err('\\ is an invalid code point.');
+ }
+
+ state = 'relative slash';
+ } else if (c === '?') {
+ this._host = base._host;
+ this._port = base._port;
+ this._path = base._path.slice();
+ this._query = '?';
+ this._username = base._username;
+ this._password = base._password;
+ state = 'query';
+ } else if (c === '#') {
+ this._host = base._host;
+ this._port = base._port;
+ this._path = base._path.slice();
+ this._query = base._query;
+ this._fragment = '#';
+ this._username = base._username;
+ this._password = base._password;
+ state = 'fragment';
+ } else {
+ var nextC = input[cursor + 1];
+ var nextNextC = input[cursor + 2];
+
+ if (this._scheme !== 'file' || !ALPHA.test(c) || nextC !== ':' && nextC !== '|' || nextNextC !== EOF && nextNextC !== '/' && nextNextC !== '\\' && nextNextC !== '?' && nextNextC !== '#') {
+ this._host = base._host;
+ this._port = base._port;
+ this._username = base._username;
+ this._password = base._password;
+ this._path = base._path.slice();
+
+ this._path.pop();
+ }
+
+ state = 'relative path';
+ continue;
+ }
+
+ break;
+
+ case 'relative slash':
+ if (c === '/' || c === '\\') {
+ if (c === '\\') {
+ err('\\ is an invalid code point.');
+ }
+
+ if (this._scheme === 'file') {
+ state = 'file host';
+ } else {
+ state = 'authority ignore slashes';
+ }
+ } else {
+ if (this._scheme !== 'file') {
+ this._host = base._host;
+ this._port = base._port;
+ this._username = base._username;
+ this._password = base._password;
+ }
+
+ state = 'relative path';
+ continue;
+ }
+
+ break;
+
+ case 'authority first slash':
+ if (c === '/') {
+ state = 'authority second slash';
+ } else {
+ err('Expected \'/\', got: ' + c);
+ state = 'authority ignore slashes';
+ continue;
+ }
+
+ break;
+
+ case 'authority second slash':
+ state = 'authority ignore slashes';
+
+ if (c !== '/') {
+ err('Expected \'/\', got: ' + c);
+ continue;
+ }
+
+ break;
+
+ case 'authority ignore slashes':
+ if (c !== '/' && c !== '\\') {
+ state = 'authority';
+ continue;
+ } else {
+ err('Expected authority, got: ' + c);
+ }
+
+ break;
+
+ case 'authority':
+ if (c === '@') {
+ if (seenAt) {
+ err('@ already seen.');
+ buffer += '%40';
+ }
+
+ seenAt = true;
+
+ for (var i = 0; i < buffer.length; i++) {
+ var cp = buffer[i];
+
+ if (cp === '\t' || cp === '\n' || cp === '\r') {
+ err('Invalid whitespace in authority.');
+ continue;
+ }
+
+ if (cp === ':' && this._password === null) {
+ this._password = '';
+ continue;
+ }
+
+ var tempC = percentEscape(cp);
+
+ if (this._password !== null) {
+ this._password += tempC;
+ } else {
+ this._username += tempC;
+ }
+ }
+
+ buffer = '';
+ } else if (c === EOF || c === '/' || c === '\\' || c === '?' || c === '#') {
+ cursor -= buffer.length;
+ buffer = '';
+ state = 'host';
+ continue;
+ } else {
+ buffer += c;
+ }
+
+ break;
+
+ case 'file host':
+ if (c === EOF || c === '/' || c === '\\' || c === '?' || c === '#') {
+ if (buffer.length === 2 && ALPHA.test(buffer[0]) && (buffer[1] === ':' || buffer[1] === '|')) {
+ state = 'relative path';
+ } else if (buffer.length === 0) {
+ state = 'relative path start';
+ } else {
+ this._host = IDNAToASCII.call(this, buffer);
+ buffer = '';
+ state = 'relative path start';
+ }
+
+ continue;
+ } else if (c === '\t' || c === '\n' || c === '\r') {
+ err('Invalid whitespace in file host.');
+ } else {
+ buffer += c;
+ }
+
+ break;
+
+ case 'host':
+ case 'hostname':
+ if (c === ':' && !seenBracket) {
+ this._host = IDNAToASCII.call(this, buffer);
+ buffer = '';
+ state = 'port';
+
+ if (stateOverride === 'hostname') {
+ break loop;
+ }
+ } else if (c === EOF || c === '/' || c === '\\' || c === '?' || c === '#') {
+ this._host = IDNAToASCII.call(this, buffer);
+ buffer = '';
+ state = 'relative path start';
+
+ if (stateOverride) {
+ break loop;
+ }
+
+ continue;
+ } else if (c !== '\t' && c !== '\n' && c !== '\r') {
+ if (c === '[') {
+ seenBracket = true;
+ } else if (c === ']') {
+ seenBracket = false;
+ }
+
+ buffer += c;
+ } else {
+ err('Invalid code point in host/hostname: ' + c);
+ }
+
+ break;
+
+ case 'port':
+ if (/[0-9]/.test(c)) {
+ buffer += c;
+ } else if (c === EOF || c === '/' || c === '\\' || c === '?' || c === '#' || stateOverride) {
+ if (buffer !== '') {
+ var temp = parseInt(buffer, 10);
+
+ if (temp !== relative[this._scheme]) {
+ this._port = temp + '';
+ }
+
+ buffer = '';
+ }
+
+ if (stateOverride) {
+ break loop;
+ }
+
+ state = 'relative path start';
+ continue;
+ } else if (c === '\t' || c === '\n' || c === '\r') {
+ err('Invalid code point in port: ' + c);
+ } else {
+ invalid.call(this);
+ }
+
+ break;
+
+ case 'relative path start':
+ if (c === '\\') {
+ err('\'\\\' not allowed in path.');
+ }
+
+ state = 'relative path';
+
+ if (c !== '/' && c !== '\\') {
+ continue;
+ }
+
+ break;
+
+ case 'relative path':
+ if (c === EOF || c === '/' || c === '\\' || !stateOverride && (c === '?' || c === '#')) {
+ if (c === '\\') {
+ err('\\ not allowed in relative path.');
+ }
+
+ var tmp;
+
+ if (tmp = relativePathDotMapping[buffer.toLowerCase()]) {
+ buffer = tmp;
+ }
+
+ if (buffer === '..') {
+ this._path.pop();
+
+ if (c !== '/' && c !== '\\') {
+ this._path.push('');
+ }
+ } else if (buffer === '.' && c !== '/' && c !== '\\') {
+ this._path.push('');
+ } else if (buffer !== '.') {
+ if (this._scheme === 'file' && this._path.length === 0 && buffer.length === 2 && ALPHA.test(buffer[0]) && buffer[1] === '|') {
+ buffer = buffer[0] + ':';
+ }
+
+ this._path.push(buffer);
+ }
+
+ buffer = '';
+
+ if (c === '?') {
+ this._query = '?';
+ state = 'query';
+ } else if (c === '#') {
+ this._fragment = '#';
+ state = 'fragment';
+ }
+ } else if (c !== '\t' && c !== '\n' && c !== '\r') {
+ buffer += percentEscape(c);
+ }
+
+ break;
+
+ case 'query':
+ if (!stateOverride && c === '#') {
+ this._fragment = '#';
+ state = 'fragment';
+ } else if (c !== EOF && c !== '\t' && c !== '\n' && c !== '\r') {
+ this._query += percentEscapeQuery(c);
+ }
+
+ break;
+
+ case 'fragment':
+ if (c !== EOF && c !== '\t' && c !== '\n' && c !== '\r') {
+ this._fragment += c;
+ }
+
+ break;
+ }
+
+ cursor++;
+ }
+ }
+
+ function clear() {
+ this._scheme = '';
+ this._schemeData = '';
+ this._username = '';
+ this._password = null;
+ this._host = '';
+ this._port = '';
+ this._path = [];
+ this._query = '';
+ this._fragment = '';
+ this._isInvalid = false;
+ this._isRelative = false;
+ }
+
+ function JURL(url, base) {
+ if (base !== undefined && !(base instanceof JURL)) {
+ base = new JURL(String(base));
+ }
+
+ this._url = url;
+ clear.call(this);
+ var input = url.replace(/^[ \t\r\n\f]+|[ \t\r\n\f]+$/g, '');
+ parse.call(this, input, null, base);
+ }
+
+ JURL.prototype = {
+ toString: function toString() {
+ return this.href;
+ },
+
+ get href() {
+ if (this._isInvalid) {
+ return this._url;
+ }
+
+ var authority = '';
+
+ if (this._username !== '' || this._password !== null) {
+ authority = this._username + (this._password !== null ? ':' + this._password : '') + '@';
+ }
+
+ return this.protocol + (this._isRelative ? '//' + authority + this.host : '') + this.pathname + this._query + this._fragment;
+ },
+
+ set href(value) {
+ clear.call(this);
+ parse.call(this, value);
+ },
+
+ get protocol() {
+ return this._scheme + ':';
+ },
+
+ set protocol(value) {
+ if (this._isInvalid) {
+ return;
+ }
+
+ parse.call(this, value + ':', 'scheme start');
+ },
+
+ get host() {
+ return this._isInvalid ? '' : this._port ? this._host + ':' + this._port : this._host;
+ },
+
+ set host(value) {
+ if (this._isInvalid || !this._isRelative) {
+ return;
+ }
+
+ parse.call(this, value, 'host');
+ },
+
+ get hostname() {
+ return this._host;
+ },
+
+ set hostname(value) {
+ if (this._isInvalid || !this._isRelative) {
+ return;
+ }
+
+ parse.call(this, value, 'hostname');
+ },
+
+ get port() {
+ return this._port;
+ },
+
+ set port(value) {
+ if (this._isInvalid || !this._isRelative) {
+ return;
+ }
+
+ parse.call(this, value, 'port');
+ },
+
+ get pathname() {
+ return this._isInvalid ? '' : this._isRelative ? '/' + this._path.join('/') : this._schemeData;
+ },
+
+ set pathname(value) {
+ if (this._isInvalid || !this._isRelative) {
+ return;
+ }
+
+ this._path = [];
+ parse.call(this, value, 'relative path start');
+ },
+
+ get search() {
+ return this._isInvalid || !this._query || this._query === '?' ? '' : this._query;
+ },
+
+ set search(value) {
+ if (this._isInvalid || !this._isRelative) {
+ return;
+ }
+
+ this._query = '?';
+
+ if (value[0] === '?') {
+ value = value.slice(1);
+ }
+
+ parse.call(this, value, 'query');
+ },
+
+ get hash() {
+ return this._isInvalid || !this._fragment || this._fragment === '#' ? '' : this._fragment;
+ },
+
+ set hash(value) {
+ if (this._isInvalid) {
+ return;
+ }
+
+ this._fragment = '#';
+
+ if (value[0] === '#') {
+ value = value.slice(1);
+ }
+
+ parse.call(this, value, 'fragment');
+ },
+
+ get origin() {
+ var host;
+
+ if (this._isInvalid || !this._scheme) {
+ return '';
+ }
+
+ switch (this._scheme) {
+ case 'data':
+ case 'file':
+ case 'javascript':
+ case 'mailto':
+ return 'null';
+
+ case 'blob':
+ try {
+ return new JURL(this._schemeData).origin || 'null';
+ } catch (_) {}
+
+ return 'null';
+ }
+
+ host = this.host;
+
+ if (!host) {
+ return '';
+ }
+
+ return this._scheme + '://' + host;
+ }
+
+ };
+ exports.URL = JURL;
+})();
+
+/***/ }),
+/* 151 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.clearPrimitiveCaches = clearPrimitiveCaches;
+exports.isEOF = isEOF;
+exports.isCmd = isCmd;
+exports.isDict = isDict;
+exports.isName = isName;
+exports.isRef = isRef;
+exports.isRefsEqual = isRefsEqual;
+exports.isStream = isStream;
+exports.RefSetCache = exports.RefSet = exports.Ref = exports.Name = exports.Dict = exports.Cmd = exports.EOF = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var EOF = {};
+exports.EOF = EOF;
+
+var Name = function NameClosure() {
+ var nameCache = Object.create(null);
+
+ function Name(name) {
+ this.name = name;
+ }
+
+ Name.prototype = {};
+
+ Name.get = function Name_get(name) {
+ var nameValue = nameCache[name];
+ return nameValue ? nameValue : nameCache[name] = new Name(name);
+ };
+
+ Name._clearCache = function () {
+ nameCache = Object.create(null);
+ };
+
+ return Name;
+}();
+
+exports.Name = Name;
+
+var Cmd = function CmdClosure() {
+ var cmdCache = Object.create(null);
+
+ function Cmd(cmd) {
+ this.cmd = cmd;
+ }
+
+ Cmd.prototype = {};
+
+ Cmd.get = function Cmd_get(cmd) {
+ var cmdValue = cmdCache[cmd];
+ return cmdValue ? cmdValue : cmdCache[cmd] = new Cmd(cmd);
+ };
+
+ Cmd._clearCache = function () {
+ cmdCache = Object.create(null);
+ };
+
+ return Cmd;
+}();
+
+exports.Cmd = Cmd;
+
+var Dict = function DictClosure() {
+ var nonSerializable = function nonSerializableClosure() {
+ return nonSerializable;
+ };
+
+ function Dict(xref) {
+ this._map = Object.create(null);
+ this.xref = xref;
+ this.objId = null;
+ this.suppressEncryption = false;
+ this.__nonSerializable__ = nonSerializable;
+ }
+
+ Dict.prototype = {
+ assignXref: function Dict_assignXref(newXref) {
+ this.xref = newXref;
+ },
+ get: function Dict_get(key1, key2, key3) {
+ var value;
+ var xref = this.xref,
+ suppressEncryption = this.suppressEncryption;
+
+ if (typeof (value = this._map[key1]) !== 'undefined' || key1 in this._map || typeof key2 === 'undefined') {
+ return xref ? xref.fetchIfRef(value, suppressEncryption) : value;
+ }
+
+ if (typeof (value = this._map[key2]) !== 'undefined' || key2 in this._map || typeof key3 === 'undefined') {
+ return xref ? xref.fetchIfRef(value, suppressEncryption) : value;
+ }
+
+ value = this._map[key3] || null;
+ return xref ? xref.fetchIfRef(value, suppressEncryption) : value;
+ },
+ getAsync: function Dict_getAsync(key1, key2, key3) {
+ var value;
+ var xref = this.xref,
+ suppressEncryption = this.suppressEncryption;
+
+ if (typeof (value = this._map[key1]) !== 'undefined' || key1 in this._map || typeof key2 === 'undefined') {
+ if (xref) {
+ return xref.fetchIfRefAsync(value, suppressEncryption);
+ }
+
+ return Promise.resolve(value);
+ }
+
+ if (typeof (value = this._map[key2]) !== 'undefined' || key2 in this._map || typeof key3 === 'undefined') {
+ if (xref) {
+ return xref.fetchIfRefAsync(value, suppressEncryption);
+ }
+
+ return Promise.resolve(value);
+ }
+
+ value = this._map[key3] || null;
+
+ if (xref) {
+ return xref.fetchIfRefAsync(value, suppressEncryption);
+ }
+
+ return Promise.resolve(value);
+ },
+ getArray: function Dict_getArray(key1, key2, key3) {
+ var value = this.get(key1, key2, key3);
+ var xref = this.xref,
+ suppressEncryption = this.suppressEncryption;
+
+ if (!Array.isArray(value) || !xref) {
+ return value;
+ }
+
+ value = value.slice();
+
+ for (var i = 0, ii = value.length; i < ii; i++) {
+ if (!isRef(value[i])) {
+ continue;
+ }
+
+ value[i] = xref.fetch(value[i], suppressEncryption);
+ }
+
+ return value;
+ },
+ getRaw: function Dict_getRaw(key) {
+ return this._map[key];
+ },
+ getKeys: function Dict_getKeys() {
+ return Object.keys(this._map);
+ },
+ set: function Dict_set(key, value) {
+ this._map[key] = value;
+ },
+ has: function Dict_has(key) {
+ return key in this._map;
+ },
+ forEach: function Dict_forEach(callback) {
+ for (var key in this._map) {
+ callback(key, this.get(key));
+ }
+ }
+ };
+ Dict.empty = new Dict(null);
+
+ Dict.merge = function (xref, dictArray) {
+ var mergedDict = new Dict(xref);
+
+ for (var i = 0, ii = dictArray.length; i < ii; i++) {
+ var dict = dictArray[i];
+
+ if (!isDict(dict)) {
+ continue;
+ }
+
+ for (var keyName in dict._map) {
+ if (mergedDict._map[keyName] !== undefined) {
+ continue;
+ }
+
+ mergedDict._map[keyName] = dict._map[keyName];
+ }
+ }
+
+ return mergedDict;
+ };
+
+ return Dict;
+}();
+
+exports.Dict = Dict;
+
+var Ref = function RefClosure() {
+ var refCache = Object.create(null);
+
+ function Ref(num, gen) {
+ this.num = num;
+ this.gen = gen;
+ }
+
+ Ref.prototype = {
+ toString: function Ref_toString() {
+ if (this.gen === 0) {
+ return "".concat(this.num, "R");
+ }
+
+ return "".concat(this.num, "R").concat(this.gen);
+ }
+ };
+
+ Ref.get = function (num, gen) {
+ var key = gen === 0 ? "".concat(num, "R") : "".concat(num, "R").concat(gen);
+ var refValue = refCache[key];
+ return refValue ? refValue : refCache[key] = new Ref(num, gen);
+ };
+
+ Ref._clearCache = function () {
+ refCache = Object.create(null);
+ };
+
+ return Ref;
+}();
+
+exports.Ref = Ref;
+
+var RefSet = function RefSetClosure() {
+ function RefSet() {
+ this.dict = Object.create(null);
+ }
+
+ RefSet.prototype = {
+ has: function RefSet_has(ref) {
+ return ref.toString() in this.dict;
+ },
+ put: function RefSet_put(ref) {
+ this.dict[ref.toString()] = true;
+ },
+ remove: function RefSet_remove(ref) {
+ delete this.dict[ref.toString()];
+ }
+ };
+ return RefSet;
+}();
+
+exports.RefSet = RefSet;
+
+var RefSetCache = function RefSetCacheClosure() {
+ function RefSetCache() {
+ this.dict = Object.create(null);
+ }
+
+ RefSetCache.prototype = {
+ get: function RefSetCache_get(ref) {
+ return this.dict[ref.toString()];
+ },
+ has: function RefSetCache_has(ref) {
+ return ref.toString() in this.dict;
+ },
+ put: function RefSetCache_put(ref, obj) {
+ this.dict[ref.toString()] = obj;
+ },
+ putAlias: function RefSetCache_putAlias(ref, aliasRef) {
+ this.dict[ref.toString()] = this.get(aliasRef);
+ },
+ forEach: function RefSetCache_forEach(fn, thisArg) {
+ for (var i in this.dict) {
+ fn.call(thisArg, this.dict[i]);
+ }
+ },
+ clear: function RefSetCache_clear() {
+ this.dict = Object.create(null);
+ }
+ };
+ return RefSetCache;
+}();
+
+exports.RefSetCache = RefSetCache;
+
+function isEOF(v) {
+ return v === EOF;
+}
+
+function isName(v, name) {
+ return v instanceof Name && (name === undefined || v.name === name);
+}
+
+function isCmd(v, cmd) {
+ return v instanceof Cmd && (cmd === undefined || v.cmd === cmd);
+}
+
+function isDict(v, type) {
+ return v instanceof Dict && (type === undefined || isName(v.get('Type'), type));
+}
+
+function isRef(v) {
+ return v instanceof Ref;
+}
+
+function isRefsEqual(v1, v2) {
+ return v1.num === v2.num && v1.gen === v2.gen;
+}
+
+function isStream(v) {
+ return _typeof(v) === 'object' && v !== null && v.getBytes !== undefined;
+}
+
+function clearPrimitiveCaches() {
+ Cmd._clearCache();
+
+ Name._clearCache();
+
+ Ref._clearCache();
+}
+
+/***/ }),
+/* 152 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.NetworkPdfManager = exports.LocalPdfManager = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(2));
+
+var _util = __w_pdfjs_require__(5);
+
+var _chunked_stream = __w_pdfjs_require__(153);
+
+var _core_utils = __w_pdfjs_require__(154);
+
+var _document = __w_pdfjs_require__(155);
+
+var _stream = __w_pdfjs_require__(158);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var BasePdfManager =
+/*#__PURE__*/
+function () {
+ function BasePdfManager() {
+ _classCallCheck(this, BasePdfManager);
+
+ if (this.constructor === BasePdfManager) {
+ (0, _util.unreachable)('Cannot initialize BasePdfManager.');
+ }
+ }
+
+ _createClass(BasePdfManager, [{
+ key: "onLoadedStream",
+ value: function onLoadedStream() {
+ (0, _util.unreachable)('Abstract method `onLoadedStream` called');
+ }
+ }, {
+ key: "ensureDoc",
+ value: function ensureDoc(prop, args) {
+ return this.ensure(this.pdfDocument, prop, args);
+ }
+ }, {
+ key: "ensureXRef",
+ value: function ensureXRef(prop, args) {
+ return this.ensure(this.pdfDocument.xref, prop, args);
+ }
+ }, {
+ key: "ensureCatalog",
+ value: function ensureCatalog(prop, args) {
+ return this.ensure(this.pdfDocument.catalog, prop, args);
+ }
+ }, {
+ key: "getPage",
+ value: function getPage(pageIndex) {
+ return this.pdfDocument.getPage(pageIndex);
+ }
+ }, {
+ key: "fontFallback",
+ value: function fontFallback(id, handler) {
+ return this.pdfDocument.fontFallback(id, handler);
+ }
+ }, {
+ key: "cleanup",
+ value: function cleanup() {
+ return this.pdfDocument.cleanup();
+ }
+ }, {
+ key: "ensure",
+ value: function () {
+ var _ensure = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee(obj, prop, args) {
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ (0, _util.unreachable)('Abstract method `ensure` called');
+
+ case 1:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee);
+ }));
+
+ function ensure(_x, _x2, _x3) {
+ return _ensure.apply(this, arguments);
+ }
+
+ return ensure;
+ }()
+ }, {
+ key: "requestRange",
+ value: function requestRange(begin, end) {
+ (0, _util.unreachable)('Abstract method `requestRange` called');
+ }
+ }, {
+ key: "requestLoadedStream",
+ value: function requestLoadedStream() {
+ (0, _util.unreachable)('Abstract method `requestLoadedStream` called');
+ }
+ }, {
+ key: "sendProgressiveData",
+ value: function sendProgressiveData(chunk) {
+ (0, _util.unreachable)('Abstract method `sendProgressiveData` called');
+ }
+ }, {
+ key: "updatePassword",
+ value: function updatePassword(password) {
+ this._password = password;
+ }
+ }, {
+ key: "terminate",
+ value: function terminate() {
+ (0, _util.unreachable)('Abstract method `terminate` called');
+ }
+ }, {
+ key: "docId",
+ get: function get() {
+ return this._docId;
+ }
+ }, {
+ key: "password",
+ get: function get() {
+ return this._password;
+ }
+ }, {
+ key: "docBaseUrl",
+ get: function get() {
+ var docBaseUrl = null;
+
+ if (this._docBaseUrl) {
+ var absoluteUrl = (0, _util.createValidAbsoluteUrl)(this._docBaseUrl);
+
+ if (absoluteUrl) {
+ docBaseUrl = absoluteUrl.href;
+ } else {
+ (0, _util.warn)("Invalid absolute docBaseUrl: \"".concat(this._docBaseUrl, "\"."));
+ }
+ }
+
+ return (0, _util.shadow)(this, 'docBaseUrl', docBaseUrl);
+ }
+ }]);
+
+ return BasePdfManager;
+}();
+
+var LocalPdfManager =
+/*#__PURE__*/
+function (_BasePdfManager) {
+ _inherits(LocalPdfManager, _BasePdfManager);
+
+ function LocalPdfManager(docId, data, password, evaluatorOptions, docBaseUrl) {
+ var _this;
+
+ _classCallCheck(this, LocalPdfManager);
+
+ _this = _possibleConstructorReturn(this, _getPrototypeOf(LocalPdfManager).call(this));
+ _this._docId = docId;
+ _this._password = password;
+ _this._docBaseUrl = docBaseUrl;
+ _this.evaluatorOptions = evaluatorOptions;
+ var stream = new _stream.Stream(data);
+ _this.pdfDocument = new _document.PDFDocument(_assertThisInitialized(_this), stream);
+ _this._loadedStreamPromise = Promise.resolve(stream);
+ return _this;
+ }
+
+ _createClass(LocalPdfManager, [{
+ key: "ensure",
+ value: function () {
+ var _ensure2 = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee2(obj, prop, args) {
+ var value;
+ return _regenerator["default"].wrap(function _callee2$(_context2) {
+ while (1) {
+ switch (_context2.prev = _context2.next) {
+ case 0:
+ value = obj[prop];
+
+ if (!(typeof value === 'function')) {
+ _context2.next = 3;
+ break;
+ }
+
+ return _context2.abrupt("return", value.apply(obj, args));
+
+ case 3:
+ return _context2.abrupt("return", value);
+
+ case 4:
+ case "end":
+ return _context2.stop();
+ }
+ }
+ }, _callee2);
+ }));
+
+ function ensure(_x4, _x5, _x6) {
+ return _ensure2.apply(this, arguments);
+ }
+
+ return ensure;
+ }()
+ }, {
+ key: "requestRange",
+ value: function requestRange(begin, end) {
+ return Promise.resolve();
+ }
+ }, {
+ key: "requestLoadedStream",
+ value: function requestLoadedStream() {}
+ }, {
+ key: "onLoadedStream",
+ value: function onLoadedStream() {
+ return this._loadedStreamPromise;
+ }
+ }, {
+ key: "terminate",
+ value: function terminate() {}
+ }]);
+
+ return LocalPdfManager;
+}(BasePdfManager);
+
+exports.LocalPdfManager = LocalPdfManager;
+
+var NetworkPdfManager =
+/*#__PURE__*/
+function (_BasePdfManager2) {
+ _inherits(NetworkPdfManager, _BasePdfManager2);
+
+ function NetworkPdfManager(docId, pdfNetworkStream, args, evaluatorOptions, docBaseUrl) {
+ var _this2;
+
+ _classCallCheck(this, NetworkPdfManager);
+
+ _this2 = _possibleConstructorReturn(this, _getPrototypeOf(NetworkPdfManager).call(this));
+ _this2._docId = docId;
+ _this2._password = args.password;
+ _this2._docBaseUrl = docBaseUrl;
+ _this2.msgHandler = args.msgHandler;
+ _this2.evaluatorOptions = evaluatorOptions;
+ _this2.streamManager = new _chunked_stream.ChunkedStreamManager(pdfNetworkStream, {
+ msgHandler: args.msgHandler,
+ length: args.length,
+ disableAutoFetch: args.disableAutoFetch,
+ rangeChunkSize: args.rangeChunkSize
+ });
+ _this2.pdfDocument = new _document.PDFDocument(_assertThisInitialized(_this2), _this2.streamManager.getStream());
+ return _this2;
+ }
+
+ _createClass(NetworkPdfManager, [{
+ key: "ensure",
+ value: function () {
+ var _ensure3 = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee3(obj, prop, args) {
+ var value;
+ return _regenerator["default"].wrap(function _callee3$(_context3) {
+ while (1) {
+ switch (_context3.prev = _context3.next) {
+ case 0:
+ _context3.prev = 0;
+ value = obj[prop];
+
+ if (!(typeof value === 'function')) {
+ _context3.next = 4;
+ break;
+ }
+
+ return _context3.abrupt("return", value.apply(obj, args));
+
+ case 4:
+ return _context3.abrupt("return", value);
+
+ case 7:
+ _context3.prev = 7;
+ _context3.t0 = _context3["catch"](0);
+
+ if (_context3.t0 instanceof _core_utils.MissingDataException) {
+ _context3.next = 11;
+ break;
+ }
+
+ throw _context3.t0;
+
+ case 11:
+ _context3.next = 13;
+ return this.requestRange(_context3.t0.begin, _context3.t0.end);
+
+ case 13:
+ return _context3.abrupt("return", this.ensure(obj, prop, args));
+
+ case 14:
+ case "end":
+ return _context3.stop();
+ }
+ }
+ }, _callee3, this, [[0, 7]]);
+ }));
+
+ function ensure(_x7, _x8, _x9) {
+ return _ensure3.apply(this, arguments);
+ }
+
+ return ensure;
+ }()
+ }, {
+ key: "requestRange",
+ value: function requestRange(begin, end) {
+ return this.streamManager.requestRange(begin, end);
+ }
+ }, {
+ key: "requestLoadedStream",
+ value: function requestLoadedStream() {
+ this.streamManager.requestAllChunks();
+ }
+ }, {
+ key: "sendProgressiveData",
+ value: function sendProgressiveData(chunk) {
+ this.streamManager.onReceiveData({
+ chunk: chunk
+ });
+ }
+ }, {
+ key: "onLoadedStream",
+ value: function onLoadedStream() {
+ return this.streamManager.onLoadedStream();
+ }
+ }, {
+ key: "terminate",
+ value: function terminate() {
+ this.streamManager.abort();
+ }
+ }]);
+
+ return NetworkPdfManager;
+}(BasePdfManager);
+
+exports.NetworkPdfManager = NetworkPdfManager;
+
+/***/ }),
+/* 153 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.ChunkedStreamManager = exports.ChunkedStream = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _core_utils = __w_pdfjs_require__(154);
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var ChunkedStream =
+/*#__PURE__*/
+function () {
+ function ChunkedStream(length, chunkSize, manager) {
+ _classCallCheck(this, ChunkedStream);
+
+ this.bytes = new Uint8Array(length);
+ this.start = 0;
+ this.pos = 0;
+ this.end = length;
+ this.chunkSize = chunkSize;
+ this.loadedChunks = [];
+ this.numChunksLoaded = 0;
+ this.numChunks = Math.ceil(length / chunkSize);
+ this.manager = manager;
+ this.progressiveDataLength = 0;
+ this.lastSuccessfulEnsureByteChunk = -1;
+ }
+
+ _createClass(ChunkedStream, [{
+ key: "getMissingChunks",
+ value: function getMissingChunks() {
+ var chunks = [];
+
+ for (var chunk = 0, n = this.numChunks; chunk < n; ++chunk) {
+ if (!this.loadedChunks[chunk]) {
+ chunks.push(chunk);
+ }
+ }
+
+ return chunks;
+ }
+ }, {
+ key: "getBaseStreams",
+ value: function getBaseStreams() {
+ return [this];
+ }
+ }, {
+ key: "allChunksLoaded",
+ value: function allChunksLoaded() {
+ return this.numChunksLoaded === this.numChunks;
+ }
+ }, {
+ key: "onReceiveData",
+ value: function onReceiveData(begin, chunk) {
+ var chunkSize = this.chunkSize;
+
+ if (begin % chunkSize !== 0) {
+ throw new Error("Bad begin offset: ".concat(begin));
+ }
+
+ var end = begin + chunk.byteLength;
+
+ if (end % chunkSize !== 0 && end !== this.bytes.length) {
+ throw new Error("Bad end offset: ".concat(end));
+ }
+
+ this.bytes.set(new Uint8Array(chunk), begin);
+ var beginChunk = Math.floor(begin / chunkSize);
+ var endChunk = Math.floor((end - 1) / chunkSize) + 1;
+
+ for (var curChunk = beginChunk; curChunk < endChunk; ++curChunk) {
+ if (!this.loadedChunks[curChunk]) {
+ this.loadedChunks[curChunk] = true;
+ ++this.numChunksLoaded;
+ }
+ }
+ }
+ }, {
+ key: "onReceiveProgressiveData",
+ value: function onReceiveProgressiveData(data) {
+ var position = this.progressiveDataLength;
+ var beginChunk = Math.floor(position / this.chunkSize);
+ this.bytes.set(new Uint8Array(data), position);
+ position += data.byteLength;
+ this.progressiveDataLength = position;
+ var endChunk = position >= this.end ? this.numChunks : Math.floor(position / this.chunkSize);
+
+ for (var curChunk = beginChunk; curChunk < endChunk; ++curChunk) {
+ if (!this.loadedChunks[curChunk]) {
+ this.loadedChunks[curChunk] = true;
+ ++this.numChunksLoaded;
+ }
+ }
+ }
+ }, {
+ key: "ensureByte",
+ value: function ensureByte(pos) {
+ if (pos < this.progressiveDataLength) {
+ return;
+ }
+
+ var chunk = Math.floor(pos / this.chunkSize);
+
+ if (chunk === this.lastSuccessfulEnsureByteChunk) {
+ return;
+ }
+
+ if (!this.loadedChunks[chunk]) {
+ throw new _core_utils.MissingDataException(pos, pos + 1);
+ }
+
+ this.lastSuccessfulEnsureByteChunk = chunk;
+ }
+ }, {
+ key: "ensureRange",
+ value: function ensureRange(begin, end) {
+ if (begin >= end) {
+ return;
+ }
+
+ if (end <= this.progressiveDataLength) {
+ return;
+ }
+
+ var chunkSize = this.chunkSize;
+ var beginChunk = Math.floor(begin / chunkSize);
+ var endChunk = Math.floor((end - 1) / chunkSize) + 1;
+
+ for (var chunk = beginChunk; chunk < endChunk; ++chunk) {
+ if (!this.loadedChunks[chunk]) {
+ throw new _core_utils.MissingDataException(begin, end);
+ }
+ }
+ }
+ }, {
+ key: "nextEmptyChunk",
+ value: function nextEmptyChunk(beginChunk) {
+ var numChunks = this.numChunks;
+
+ for (var i = 0; i < numChunks; ++i) {
+ var chunk = (beginChunk + i) % numChunks;
+
+ if (!this.loadedChunks[chunk]) {
+ return chunk;
+ }
+ }
+
+ return null;
+ }
+ }, {
+ key: "hasChunk",
+ value: function hasChunk(chunk) {
+ return !!this.loadedChunks[chunk];
+ }
+ }, {
+ key: "getByte",
+ value: function getByte() {
+ var pos = this.pos;
+
+ if (pos >= this.end) {
+ return -1;
+ }
+
+ this.ensureByte(pos);
+ return this.bytes[this.pos++];
+ }
+ }, {
+ key: "getUint16",
+ value: function getUint16() {
+ var b0 = this.getByte();
+ var b1 = this.getByte();
+
+ if (b0 === -1 || b1 === -1) {
+ return -1;
+ }
+
+ return (b0 << 8) + b1;
+ }
+ }, {
+ key: "getInt32",
+ value: function getInt32() {
+ var b0 = this.getByte();
+ var b1 = this.getByte();
+ var b2 = this.getByte();
+ var b3 = this.getByte();
+ return (b0 << 24) + (b1 << 16) + (b2 << 8) + b3;
+ }
+ }, {
+ key: "getBytes",
+ value: function getBytes(length) {
+ var forceClamped = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
+ var bytes = this.bytes;
+ var pos = this.pos;
+ var strEnd = this.end;
+
+ if (!length) {
+ this.ensureRange(pos, strEnd);
+
+ var _subarray = bytes.subarray(pos, strEnd);
+
+ return forceClamped ? new Uint8ClampedArray(_subarray) : _subarray;
+ }
+
+ var end = pos + length;
+
+ if (end > strEnd) {
+ end = strEnd;
+ }
+
+ this.ensureRange(pos, end);
+ this.pos = end;
+ var subarray = bytes.subarray(pos, end);
+ return forceClamped ? new Uint8ClampedArray(subarray) : subarray;
+ }
+ }, {
+ key: "peekByte",
+ value: function peekByte() {
+ var peekedByte = this.getByte();
+ this.pos--;
+ return peekedByte;
+ }
+ }, {
+ key: "peekBytes",
+ value: function peekBytes(length) {
+ var forceClamped = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
+ var bytes = this.getBytes(length, forceClamped);
+ this.pos -= bytes.length;
+ return bytes;
+ }
+ }, {
+ key: "getByteRange",
+ value: function getByteRange(begin, end) {
+ this.ensureRange(begin, end);
+ return this.bytes.subarray(begin, end);
+ }
+ }, {
+ key: "skip",
+ value: function skip(n) {
+ if (!n) {
+ n = 1;
+ }
+
+ this.pos += n;
+ }
+ }, {
+ key: "reset",
+ value: function reset() {
+ this.pos = this.start;
+ }
+ }, {
+ key: "moveStart",
+ value: function moveStart() {
+ this.start = this.pos;
+ }
+ }, {
+ key: "makeSubStream",
+ value: function makeSubStream(start, length, dict) {
+ if (length) {
+ this.ensureRange(start, start + length);
+ } else {
+ this.ensureByte(start);
+ }
+
+ function ChunkedStreamSubstream() {}
+
+ ChunkedStreamSubstream.prototype = Object.create(this);
+
+ ChunkedStreamSubstream.prototype.getMissingChunks = function () {
+ var chunkSize = this.chunkSize;
+ var beginChunk = Math.floor(this.start / chunkSize);
+ var endChunk = Math.floor((this.end - 1) / chunkSize) + 1;
+ var missingChunks = [];
+
+ for (var chunk = beginChunk; chunk < endChunk; ++chunk) {
+ if (!this.loadedChunks[chunk]) {
+ missingChunks.push(chunk);
+ }
+ }
+
+ return missingChunks;
+ };
+
+ var subStream = new ChunkedStreamSubstream();
+ subStream.pos = subStream.start = start;
+ subStream.end = start + length || this.end;
+ subStream.dict = dict;
+ return subStream;
+ }
+ }, {
+ key: "length",
+ get: function get() {
+ return this.end - this.start;
+ }
+ }, {
+ key: "isEmpty",
+ get: function get() {
+ return this.length === 0;
+ }
+ }]);
+
+ return ChunkedStream;
+}();
+
+exports.ChunkedStream = ChunkedStream;
+
+var ChunkedStreamManager =
+/*#__PURE__*/
+function () {
+ function ChunkedStreamManager(pdfNetworkStream, args) {
+ _classCallCheck(this, ChunkedStreamManager);
+
+ this.length = args.length;
+ this.chunkSize = args.rangeChunkSize;
+ this.stream = new ChunkedStream(this.length, this.chunkSize, this);
+ this.pdfNetworkStream = pdfNetworkStream;
+ this.disableAutoFetch = args.disableAutoFetch;
+ this.msgHandler = args.msgHandler;
+ this.currRequestId = 0;
+ this.chunksNeededByRequest = Object.create(null);
+ this.requestsByChunk = Object.create(null);
+ this.promisesByRequest = Object.create(null);
+ this.progressiveDataLength = 0;
+ this.aborted = false;
+ this._loadedStreamCapability = (0, _util.createPromiseCapability)();
+ }
+
+ _createClass(ChunkedStreamManager, [{
+ key: "onLoadedStream",
+ value: function onLoadedStream() {
+ return this._loadedStreamCapability.promise;
+ }
+ }, {
+ key: "sendRequest",
+ value: function sendRequest(begin, end) {
+ var _this = this;
+
+ var rangeReader = this.pdfNetworkStream.getRangeReader(begin, end);
+
+ if (!rangeReader.isStreamingSupported) {
+ rangeReader.onProgress = this.onProgress.bind(this);
+ }
+
+ var chunks = [],
+ loaded = 0;
+ var promise = new Promise(function (resolve, reject) {
+ var readChunk = function readChunk(chunk) {
+ try {
+ if (!chunk.done) {
+ var data = chunk.value;
+ chunks.push(data);
+ loaded += (0, _util.arrayByteLength)(data);
+
+ if (rangeReader.isStreamingSupported) {
+ _this.onProgress({
+ loaded: loaded
+ });
+ }
+
+ rangeReader.read().then(readChunk, reject);
+ return;
+ }
+
+ var chunkData = (0, _util.arraysToBytes)(chunks);
+ chunks = null;
+ resolve(chunkData);
+ } catch (e) {
+ reject(e);
+ }
+ };
+
+ rangeReader.read().then(readChunk, reject);
+ });
+ promise.then(function (data) {
+ if (_this.aborted) {
+ return;
+ }
+
+ _this.onReceiveData({
+ chunk: data,
+ begin: begin
+ });
+ });
+ }
+ }, {
+ key: "requestAllChunks",
+ value: function requestAllChunks() {
+ var missingChunks = this.stream.getMissingChunks();
+
+ this._requestChunks(missingChunks);
+
+ return this._loadedStreamCapability.promise;
+ }
+ }, {
+ key: "_requestChunks",
+ value: function _requestChunks(chunks) {
+ var requestId = this.currRequestId++;
+ var chunksNeeded = Object.create(null);
+ this.chunksNeededByRequest[requestId] = chunksNeeded;
+ var _iteratorNormalCompletion = true;
+ var _didIteratorError = false;
+ var _iteratorError = undefined;
+
+ try {
+ for (var _iterator = chunks[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
+ var _chunk = _step.value;
+
+ if (!this.stream.hasChunk(_chunk)) {
+ chunksNeeded[_chunk] = true;
+ }
+ }
+ } catch (err) {
+ _didIteratorError = true;
+ _iteratorError = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion && _iterator["return"] != null) {
+ _iterator["return"]();
+ }
+ } finally {
+ if (_didIteratorError) {
+ throw _iteratorError;
+ }
+ }
+ }
+
+ if ((0, _util.isEmptyObj)(chunksNeeded)) {
+ return Promise.resolve();
+ }
+
+ var capability = (0, _util.createPromiseCapability)();
+ this.promisesByRequest[requestId] = capability;
+ var chunksToRequest = [];
+
+ for (var chunk in chunksNeeded) {
+ chunk = chunk | 0;
+
+ if (!(chunk in this.requestsByChunk)) {
+ this.requestsByChunk[chunk] = [];
+ chunksToRequest.push(chunk);
+ }
+
+ this.requestsByChunk[chunk].push(requestId);
+ }
+
+ if (!chunksToRequest.length) {
+ return capability.promise;
+ }
+
+ var groupedChunksToRequest = this.groupChunks(chunksToRequest);
+ var _iteratorNormalCompletion2 = true;
+ var _didIteratorError2 = false;
+ var _iteratorError2 = undefined;
+
+ try {
+ for (var _iterator2 = groupedChunksToRequest[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
+ var groupedChunk = _step2.value;
+ var begin = groupedChunk.beginChunk * this.chunkSize;
+ var end = Math.min(groupedChunk.endChunk * this.chunkSize, this.length);
+ this.sendRequest(begin, end);
+ }
+ } catch (err) {
+ _didIteratorError2 = true;
+ _iteratorError2 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) {
+ _iterator2["return"]();
+ }
+ } finally {
+ if (_didIteratorError2) {
+ throw _iteratorError2;
+ }
+ }
+ }
+
+ return capability.promise;
+ }
+ }, {
+ key: "getStream",
+ value: function getStream() {
+ return this.stream;
+ }
+ }, {
+ key: "requestRange",
+ value: function requestRange(begin, end) {
+ end = Math.min(end, this.length);
+ var beginChunk = this.getBeginChunk(begin);
+ var endChunk = this.getEndChunk(end);
+ var chunks = [];
+
+ for (var chunk = beginChunk; chunk < endChunk; ++chunk) {
+ chunks.push(chunk);
+ }
+
+ return this._requestChunks(chunks);
+ }
+ }, {
+ key: "requestRanges",
+ value: function requestRanges() {
+ var ranges = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : [];
+ var chunksToRequest = [];
+ var _iteratorNormalCompletion3 = true;
+ var _didIteratorError3 = false;
+ var _iteratorError3 = undefined;
+
+ try {
+ for (var _iterator3 = ranges[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
+ var range = _step3.value;
+ var beginChunk = this.getBeginChunk(range.begin);
+ var endChunk = this.getEndChunk(range.end);
+
+ for (var chunk = beginChunk; chunk < endChunk; ++chunk) {
+ if (!chunksToRequest.includes(chunk)) {
+ chunksToRequest.push(chunk);
+ }
+ }
+ }
+ } catch (err) {
+ _didIteratorError3 = true;
+ _iteratorError3 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion3 && _iterator3["return"] != null) {
+ _iterator3["return"]();
+ }
+ } finally {
+ if (_didIteratorError3) {
+ throw _iteratorError3;
+ }
+ }
+ }
+
+ chunksToRequest.sort(function (a, b) {
+ return a - b;
+ });
+ return this._requestChunks(chunksToRequest);
+ }
+ }, {
+ key: "groupChunks",
+ value: function groupChunks(chunks) {
+ var groupedChunks = [];
+ var beginChunk = -1;
+ var prevChunk = -1;
+
+ for (var i = 0, ii = chunks.length; i < ii; ++i) {
+ var chunk = chunks[i];
+
+ if (beginChunk < 0) {
+ beginChunk = chunk;
+ }
+
+ if (prevChunk >= 0 && prevChunk + 1 !== chunk) {
+ groupedChunks.push({
+ beginChunk: beginChunk,
+ endChunk: prevChunk + 1
+ });
+ beginChunk = chunk;
+ }
+
+ if (i + 1 === chunks.length) {
+ groupedChunks.push({
+ beginChunk: beginChunk,
+ endChunk: chunk + 1
+ });
+ }
+
+ prevChunk = chunk;
+ }
+
+ return groupedChunks;
+ }
+ }, {
+ key: "onProgress",
+ value: function onProgress(args) {
+ this.msgHandler.send('DocProgress', {
+ loaded: this.stream.numChunksLoaded * this.chunkSize + args.loaded,
+ total: this.length
+ });
+ }
+ }, {
+ key: "onReceiveData",
+ value: function onReceiveData(args) {
+ var chunk = args.chunk;
+ var isProgressive = args.begin === undefined;
+ var begin = isProgressive ? this.progressiveDataLength : args.begin;
+ var end = begin + chunk.byteLength;
+ var beginChunk = Math.floor(begin / this.chunkSize);
+ var endChunk = end < this.length ? Math.floor(end / this.chunkSize) : Math.ceil(end / this.chunkSize);
+
+ if (isProgressive) {
+ this.stream.onReceiveProgressiveData(chunk);
+ this.progressiveDataLength = end;
+ } else {
+ this.stream.onReceiveData(begin, chunk);
+ }
+
+ if (this.stream.allChunksLoaded()) {
+ this._loadedStreamCapability.resolve(this.stream);
+ }
+
+ var loadedRequests = [];
+
+ for (var _chunk2 = beginChunk; _chunk2 < endChunk; ++_chunk2) {
+ var requestIds = this.requestsByChunk[_chunk2] || [];
+ delete this.requestsByChunk[_chunk2];
+ var _iteratorNormalCompletion4 = true;
+ var _didIteratorError4 = false;
+ var _iteratorError4 = undefined;
+
+ try {
+ for (var _iterator4 = requestIds[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) {
+ var requestId = _step4.value;
+ var chunksNeeded = this.chunksNeededByRequest[requestId];
+
+ if (_chunk2 in chunksNeeded) {
+ delete chunksNeeded[_chunk2];
+ }
+
+ if (!(0, _util.isEmptyObj)(chunksNeeded)) {
+ continue;
+ }
+
+ loadedRequests.push(requestId);
+ }
+ } catch (err) {
+ _didIteratorError4 = true;
+ _iteratorError4 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion4 && _iterator4["return"] != null) {
+ _iterator4["return"]();
+ }
+ } finally {
+ if (_didIteratorError4) {
+ throw _iteratorError4;
+ }
+ }
+ }
+ }
+
+ if (!this.disableAutoFetch && (0, _util.isEmptyObj)(this.requestsByChunk)) {
+ var nextEmptyChunk;
+
+ if (this.stream.numChunksLoaded === 1) {
+ var lastChunk = this.stream.numChunks - 1;
+
+ if (!this.stream.hasChunk(lastChunk)) {
+ nextEmptyChunk = lastChunk;
+ }
+ } else {
+ nextEmptyChunk = this.stream.nextEmptyChunk(endChunk);
+ }
+
+ if (Number.isInteger(nextEmptyChunk)) {
+ this._requestChunks([nextEmptyChunk]);
+ }
+ }
+
+ for (var _i = 0, _loadedRequests = loadedRequests; _i < _loadedRequests.length; _i++) {
+ var _requestId = _loadedRequests[_i];
+ var capability = this.promisesByRequest[_requestId];
+ delete this.promisesByRequest[_requestId];
+ capability.resolve();
+ }
+
+ this.msgHandler.send('DocProgress', {
+ loaded: this.stream.numChunksLoaded * this.chunkSize,
+ total: this.length
+ });
+ }
+ }, {
+ key: "onError",
+ value: function onError(err) {
+ this._loadedStreamCapability.reject(err);
+ }
+ }, {
+ key: "getBeginChunk",
+ value: function getBeginChunk(begin) {
+ return Math.floor(begin / this.chunkSize);
+ }
+ }, {
+ key: "getEndChunk",
+ value: function getEndChunk(end) {
+ return Math.floor((end - 1) / this.chunkSize) + 1;
+ }
+ }, {
+ key: "abort",
+ value: function abort() {
+ this.aborted = true;
+
+ if (this.pdfNetworkStream) {
+ this.pdfNetworkStream.cancelAllRequests('abort');
+ }
+
+ for (var requestId in this.promisesByRequest) {
+ this.promisesByRequest[requestId].reject(new Error('Request was aborted'));
+ }
+ }
+ }]);
+
+ return ChunkedStreamManager;
+}();
+
+exports.ChunkedStreamManager = ChunkedStreamManager;
+
+/***/ }),
+/* 154 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.getLookupTableFactory = getLookupTableFactory;
+exports.getInheritableProperty = getInheritableProperty;
+exports.toRomanNumerals = toRomanNumerals;
+exports.XRefParseException = exports.XRefEntryException = exports.MissingDataException = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+function getLookupTableFactory(initializer) {
+ var lookup;
+ return function () {
+ if (initializer) {
+ lookup = Object.create(null);
+ initializer(lookup);
+ initializer = null;
+ }
+
+ return lookup;
+ };
+}
+
+var MissingDataException = function MissingDataExceptionClosure() {
+ function MissingDataException(begin, end) {
+ this.begin = begin;
+ this.end = end;
+ this.message = "Missing data [".concat(begin, ", ").concat(end, ")");
+ }
+
+ MissingDataException.prototype = new Error();
+ MissingDataException.prototype.name = 'MissingDataException';
+ MissingDataException.constructor = MissingDataException;
+ return MissingDataException;
+}();
+
+exports.MissingDataException = MissingDataException;
+
+var XRefEntryException = function XRefEntryExceptionClosure() {
+ function XRefEntryException(msg) {
+ this.message = msg;
+ }
+
+ XRefEntryException.prototype = new Error();
+ XRefEntryException.prototype.name = 'XRefEntryException';
+ XRefEntryException.constructor = XRefEntryException;
+ return XRefEntryException;
+}();
+
+exports.XRefEntryException = XRefEntryException;
+
+var XRefParseException = function XRefParseExceptionClosure() {
+ function XRefParseException(msg) {
+ this.message = msg;
+ }
+
+ XRefParseException.prototype = new Error();
+ XRefParseException.prototype.name = 'XRefParseException';
+ XRefParseException.constructor = XRefParseException;
+ return XRefParseException;
+}();
+
+exports.XRefParseException = XRefParseException;
+
+function getInheritableProperty(_ref) {
+ var dict = _ref.dict,
+ key = _ref.key,
+ _ref$getArray = _ref.getArray,
+ getArray = _ref$getArray === void 0 ? false : _ref$getArray,
+ _ref$stopWhenFound = _ref.stopWhenFound,
+ stopWhenFound = _ref$stopWhenFound === void 0 ? true : _ref$stopWhenFound;
+ var LOOP_LIMIT = 100;
+ var loopCount = 0;
+ var values;
+
+ while (dict) {
+ var value = getArray ? dict.getArray(key) : dict.get(key);
+
+ if (value !== undefined) {
+ if (stopWhenFound) {
+ return value;
+ }
+
+ if (!values) {
+ values = [];
+ }
+
+ values.push(value);
+ }
+
+ if (++loopCount > LOOP_LIMIT) {
+ (0, _util.warn)("getInheritableProperty: maximum loop count exceeded for \"".concat(key, "\""));
+ break;
+ }
+
+ dict = dict.get('Parent');
+ }
+
+ return values;
+}
+
+var ROMAN_NUMBER_MAP = ['', 'C', 'CC', 'CCC', 'CD', 'D', 'DC', 'DCC', 'DCCC', 'CM', '', 'X', 'XX', 'XXX', 'XL', 'L', 'LX', 'LXX', 'LXXX', 'XC', '', 'I', 'II', 'III', 'IV', 'V', 'VI', 'VII', 'VIII', 'IX'];
+
+function toRomanNumerals(number) {
+ var lowerCase = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
+ (0, _util.assert)(Number.isInteger(number) && number > 0, 'The number should be a positive integer.');
+ var pos,
+ romanBuf = [];
+
+ while (number >= 1000) {
+ number -= 1000;
+ romanBuf.push('M');
+ }
+
+ pos = number / 100 | 0;
+ number %= 100;
+ romanBuf.push(ROMAN_NUMBER_MAP[pos]);
+ pos = number / 10 | 0;
+ number %= 10;
+ romanBuf.push(ROMAN_NUMBER_MAP[10 + pos]);
+ romanBuf.push(ROMAN_NUMBER_MAP[20 + number]);
+ var romanStr = romanBuf.join('');
+ return lowerCase ? romanStr.toLowerCase() : romanStr;
+}
+
+/***/ }),
+/* 155 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.PDFDocument = exports.Page = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _obj = __w_pdfjs_require__(156);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _core_utils = __w_pdfjs_require__(154);
+
+var _stream2 = __w_pdfjs_require__(158);
+
+var _annotation = __w_pdfjs_require__(170);
+
+var _crypto = __w_pdfjs_require__(168);
+
+var _parser = __w_pdfjs_require__(157);
+
+var _operator_list = __w_pdfjs_require__(171);
+
+var _evaluator = __w_pdfjs_require__(172);
+
+var _function = __w_pdfjs_require__(186);
+
+function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
+
+function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
+
+function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
+
+function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var DEFAULT_USER_UNIT = 1.0;
+var LETTER_SIZE_MEDIABOX = [0, 0, 612, 792];
+
+function isAnnotationRenderable(annotation, intent) {
+ return intent === 'display' && annotation.viewable || intent === 'print' && annotation.printable;
+}
+
+var Page =
+/*#__PURE__*/
+function () {
+ function Page(_ref) {
+ var pdfManager = _ref.pdfManager,
+ xref = _ref.xref,
+ pageIndex = _ref.pageIndex,
+ pageDict = _ref.pageDict,
+ ref = _ref.ref,
+ fontCache = _ref.fontCache,
+ builtInCMapCache = _ref.builtInCMapCache,
+ pdfFunctionFactory = _ref.pdfFunctionFactory;
+
+ _classCallCheck(this, Page);
+
+ this.pdfManager = pdfManager;
+ this.pageIndex = pageIndex;
+ this.pageDict = pageDict;
+ this.xref = xref;
+ this.ref = ref;
+ this.fontCache = fontCache;
+ this.builtInCMapCache = builtInCMapCache;
+ this.pdfFunctionFactory = pdfFunctionFactory;
+ this.evaluatorOptions = pdfManager.evaluatorOptions;
+ this.resourcesPromise = null;
+ var idCounters = {
+ obj: 0
+ };
+ this.idFactory = {
+ createObjId: function createObjId() {
+ return "p".concat(pageIndex, "_").concat(++idCounters.obj);
+ },
+ getDocId: function getDocId() {
+ return "g_".concat(pdfManager.docId);
+ }
+ };
+ }
+
+ _createClass(Page, [{
+ key: "_getInheritableProperty",
+ value: function _getInheritableProperty(key) {
+ var getArray = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
+ var value = (0, _core_utils.getInheritableProperty)({
+ dict: this.pageDict,
+ key: key,
+ getArray: getArray,
+ stopWhenFound: false
+ });
+
+ if (!Array.isArray(value)) {
+ return value;
+ }
+
+ if (value.length === 1 || !(0, _primitives.isDict)(value[0])) {
+ return value[0];
+ }
+
+ return _primitives.Dict.merge(this.xref, value);
+ }
+ }, {
+ key: "getContentStream",
+ value: function getContentStream() {
+ var content = this.content;
+ var stream;
+
+ if (Array.isArray(content)) {
+ var xref = this.xref;
+ var streams = [];
+ var _iteratorNormalCompletion = true;
+ var _didIteratorError = false;
+ var _iteratorError = undefined;
+
+ try {
+ for (var _iterator = content[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
+ var _stream = _step.value;
+ streams.push(xref.fetchIfRef(_stream));
+ }
+ } catch (err) {
+ _didIteratorError = true;
+ _iteratorError = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion && _iterator["return"] != null) {
+ _iterator["return"]();
+ }
+ } finally {
+ if (_didIteratorError) {
+ throw _iteratorError;
+ }
+ }
+ }
+
+ stream = new _stream2.StreamsSequenceStream(streams);
+ } else if ((0, _primitives.isStream)(content)) {
+ stream = content;
+ } else {
+ stream = new _stream2.NullStream();
+ }
+
+ return stream;
+ }
+ }, {
+ key: "loadResources",
+ value: function loadResources(keys) {
+ var _this = this;
+
+ if (!this.resourcesPromise) {
+ this.resourcesPromise = this.pdfManager.ensure(this, 'resources');
+ }
+
+ return this.resourcesPromise.then(function () {
+ var objectLoader = new _obj.ObjectLoader(_this.resources, keys, _this.xref);
+ return objectLoader.load();
+ });
+ }
+ }, {
+ key: "getOperatorList",
+ value: function getOperatorList(_ref2) {
+ var _this2 = this;
+
+ var handler = _ref2.handler,
+ task = _ref2.task,
+ intent = _ref2.intent,
+ renderInteractiveForms = _ref2.renderInteractiveForms;
+ var contentStreamPromise = this.pdfManager.ensure(this, 'getContentStream');
+ var resourcesPromise = this.loadResources(['ExtGState', 'ColorSpace', 'Pattern', 'Shading', 'XObject', 'Font']);
+ var partialEvaluator = new _evaluator.PartialEvaluator({
+ xref: this.xref,
+ handler: handler,
+ pageIndex: this.pageIndex,
+ idFactory: this.idFactory,
+ fontCache: this.fontCache,
+ builtInCMapCache: this.builtInCMapCache,
+ options: this.evaluatorOptions,
+ pdfFunctionFactory: this.pdfFunctionFactory
+ });
+ var dataPromises = Promise.all([contentStreamPromise, resourcesPromise]);
+ var pageListPromise = dataPromises.then(function (_ref3) {
+ var _ref4 = _slicedToArray(_ref3, 1),
+ contentStream = _ref4[0];
+
+ var opList = new _operator_list.OperatorList(intent, handler, _this2.pageIndex);
+ handler.send('StartRenderPage', {
+ transparency: partialEvaluator.hasBlendModes(_this2.resources),
+ pageIndex: _this2.pageIndex,
+ intent: intent
+ });
+ return partialEvaluator.getOperatorList({
+ stream: contentStream,
+ task: task,
+ resources: _this2.resources,
+ operatorList: opList
+ }).then(function () {
+ return opList;
+ });
+ });
+ return Promise.all([pageListPromise, this._parsedAnnotations]).then(function (_ref5) {
+ var _ref6 = _slicedToArray(_ref5, 2),
+ pageOpList = _ref6[0],
+ annotations = _ref6[1];
+
+ if (annotations.length === 0) {
+ pageOpList.flush(true);
+ return pageOpList;
+ }
+
+ var opListPromises = [];
+ var _iteratorNormalCompletion2 = true;
+ var _didIteratorError2 = false;
+ var _iteratorError2 = undefined;
+
+ try {
+ for (var _iterator2 = annotations[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
+ var annotation = _step2.value;
+
+ if (isAnnotationRenderable(annotation, intent)) {
+ opListPromises.push(annotation.getOperatorList(partialEvaluator, task, renderInteractiveForms));
+ }
+ }
+ } catch (err) {
+ _didIteratorError2 = true;
+ _iteratorError2 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) {
+ _iterator2["return"]();
+ }
+ } finally {
+ if (_didIteratorError2) {
+ throw _iteratorError2;
+ }
+ }
+ }
+
+ return Promise.all(opListPromises).then(function (opLists) {
+ pageOpList.addOp(_util.OPS.beginAnnotations, []);
+ var _iteratorNormalCompletion3 = true;
+ var _didIteratorError3 = false;
+ var _iteratorError3 = undefined;
+
+ try {
+ for (var _iterator3 = opLists[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
+ var opList = _step3.value;
+ pageOpList.addOpList(opList);
+ }
+ } catch (err) {
+ _didIteratorError3 = true;
+ _iteratorError3 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion3 && _iterator3["return"] != null) {
+ _iterator3["return"]();
+ }
+ } finally {
+ if (_didIteratorError3) {
+ throw _iteratorError3;
+ }
+ }
+ }
+
+ pageOpList.addOp(_util.OPS.endAnnotations, []);
+ pageOpList.flush(true);
+ return pageOpList;
+ });
+ });
+ }
+ }, {
+ key: "extractTextContent",
+ value: function extractTextContent(_ref7) {
+ var _this3 = this;
+
+ var handler = _ref7.handler,
+ task = _ref7.task,
+ normalizeWhitespace = _ref7.normalizeWhitespace,
+ sink = _ref7.sink,
+ combineTextItems = _ref7.combineTextItems;
+ var contentStreamPromise = this.pdfManager.ensure(this, 'getContentStream');
+ var resourcesPromise = this.loadResources(['ExtGState', 'XObject', 'Font']);
+ var dataPromises = Promise.all([contentStreamPromise, resourcesPromise]);
+ return dataPromises.then(function (_ref8) {
+ var _ref9 = _slicedToArray(_ref8, 1),
+ contentStream = _ref9[0];
+
+ var partialEvaluator = new _evaluator.PartialEvaluator({
+ xref: _this3.xref,
+ handler: handler,
+ pageIndex: _this3.pageIndex,
+ idFactory: _this3.idFactory,
+ fontCache: _this3.fontCache,
+ builtInCMapCache: _this3.builtInCMapCache,
+ options: _this3.evaluatorOptions,
+ pdfFunctionFactory: _this3.pdfFunctionFactory
+ });
+ return partialEvaluator.getTextContent({
+ stream: contentStream,
+ task: task,
+ resources: _this3.resources,
+ normalizeWhitespace: normalizeWhitespace,
+ combineTextItems: combineTextItems,
+ sink: sink
+ });
+ });
+ }
+ }, {
+ key: "getAnnotationsData",
+ value: function getAnnotationsData(intent) {
+ return this._parsedAnnotations.then(function (annotations) {
+ var annotationsData = [];
+
+ for (var i = 0, ii = annotations.length; i < ii; i++) {
+ if (!intent || isAnnotationRenderable(annotations[i], intent)) {
+ annotationsData.push(annotations[i].data);
+ }
+ }
+
+ return annotationsData;
+ });
+ }
+ }, {
+ key: "content",
+ get: function get() {
+ return this.pageDict.get('Contents');
+ }
+ }, {
+ key: "resources",
+ get: function get() {
+ return (0, _util.shadow)(this, 'resources', this._getInheritableProperty('Resources') || _primitives.Dict.empty);
+ }
+ }, {
+ key: "mediaBox",
+ get: function get() {
+ var mediaBox = this._getInheritableProperty('MediaBox', true);
+
+ if (!Array.isArray(mediaBox) || mediaBox.length !== 4) {
+ return (0, _util.shadow)(this, 'mediaBox', LETTER_SIZE_MEDIABOX);
+ }
+
+ return (0, _util.shadow)(this, 'mediaBox', mediaBox);
+ }
+ }, {
+ key: "cropBox",
+ get: function get() {
+ var cropBox = this._getInheritableProperty('CropBox', true);
+
+ if (!Array.isArray(cropBox) || cropBox.length !== 4) {
+ return (0, _util.shadow)(this, 'cropBox', this.mediaBox);
+ }
+
+ return (0, _util.shadow)(this, 'cropBox', cropBox);
+ }
+ }, {
+ key: "userUnit",
+ get: function get() {
+ var obj = this.pageDict.get('UserUnit');
+
+ if (!(0, _util.isNum)(obj) || obj <= 0) {
+ obj = DEFAULT_USER_UNIT;
+ }
+
+ return (0, _util.shadow)(this, 'userUnit', obj);
+ }
+ }, {
+ key: "view",
+ get: function get() {
+ var mediaBox = this.mediaBox,
+ cropBox = this.cropBox;
+
+ if (mediaBox === cropBox) {
+ return (0, _util.shadow)(this, 'view', mediaBox);
+ }
+
+ var intersection = _util.Util.intersect(cropBox, mediaBox);
+
+ return (0, _util.shadow)(this, 'view', intersection || mediaBox);
+ }
+ }, {
+ key: "rotate",
+ get: function get() {
+ var rotate = this._getInheritableProperty('Rotate') || 0;
+
+ if (rotate % 90 !== 0) {
+ rotate = 0;
+ } else if (rotate >= 360) {
+ rotate = rotate % 360;
+ } else if (rotate < 0) {
+ rotate = (rotate % 360 + 360) % 360;
+ }
+
+ return (0, _util.shadow)(this, 'rotate', rotate);
+ }
+ }, {
+ key: "annotations",
+ get: function get() {
+ return (0, _util.shadow)(this, 'annotations', this._getInheritableProperty('Annots') || []);
+ }
+ }, {
+ key: "_parsedAnnotations",
+ get: function get() {
+ var _this4 = this;
+
+ var parsedAnnotations = this.pdfManager.ensure(this, 'annotations').then(function () {
+ var annotationRefs = _this4.annotations;
+ var annotationPromises = [];
+
+ for (var i = 0, ii = annotationRefs.length; i < ii; i++) {
+ annotationPromises.push(_annotation.AnnotationFactory.create(_this4.xref, annotationRefs[i], _this4.pdfManager, _this4.idFactory));
+ }
+
+ return Promise.all(annotationPromises).then(function (annotations) {
+ return annotations.filter(function isDefined(annotation) {
+ return !!annotation;
+ });
+ }, function (reason) {
+ (0, _util.warn)("_parsedAnnotations: \"".concat(reason, "\"."));
+ return [];
+ });
+ });
+ return (0, _util.shadow)(this, '_parsedAnnotations', parsedAnnotations);
+ }
+ }]);
+
+ return Page;
+}();
+
+exports.Page = Page;
+var FINGERPRINT_FIRST_BYTES = 1024;
+var EMPTY_FINGERPRINT = '\x00\x00\x00\x00\x00\x00\x00' + '\x00\x00\x00\x00\x00\x00\x00\x00\x00';
+
+function find(stream, needle, limit) {
+ var backwards = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
+ (0, _util.assert)(limit > 0, 'The "limit" must be a positive integer.');
+ var str = (0, _util.bytesToString)(stream.peekBytes(limit));
+ var index = backwards ? str.lastIndexOf(needle) : str.indexOf(needle);
+
+ if (index === -1) {
+ return false;
+ }
+
+ stream.pos += index;
+ return true;
+}
+
+var PDFDocument =
+/*#__PURE__*/
+function () {
+ function PDFDocument(pdfManager, arg) {
+ _classCallCheck(this, PDFDocument);
+
+ var stream;
+
+ if ((0, _primitives.isStream)(arg)) {
+ stream = arg;
+ } else if ((0, _util.isArrayBuffer)(arg)) {
+ stream = new _stream2.Stream(arg);
+ } else {
+ throw new Error('PDFDocument: Unknown argument type');
+ }
+
+ if (stream.length <= 0) {
+ throw new Error('PDFDocument: Stream must have data');
+ }
+
+ this.pdfManager = pdfManager;
+ this.stream = stream;
+ this.xref = new _obj.XRef(stream, pdfManager);
+ this.pdfFunctionFactory = new _function.PDFFunctionFactory({
+ xref: this.xref,
+ isEvalSupported: pdfManager.evaluatorOptions.isEvalSupported
+ });
+ this._pagePromises = [];
+ }
+
+ _createClass(PDFDocument, [{
+ key: "parse",
+ value: function parse(recoveryMode) {
+ this.setup(recoveryMode);
+ var version = this.catalog.catDict.get('Version');
+
+ if ((0, _primitives.isName)(version)) {
+ this.pdfFormatVersion = version.name;
+ }
+
+ try {
+ this.acroForm = this.catalog.catDict.get('AcroForm');
+
+ if (this.acroForm) {
+ this.xfa = this.acroForm.get('XFA');
+ var fields = this.acroForm.get('Fields');
+
+ if ((!Array.isArray(fields) || fields.length === 0) && !this.xfa) {
+ this.acroForm = null;
+ }
+ }
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+
+ (0, _util.info)('Cannot fetch AcroForm entry; assuming no AcroForms are present');
+ this.acroForm = null;
+ }
+
+ try {
+ var collection = this.catalog.catDict.get('Collection');
+
+ if ((0, _primitives.isDict)(collection) && collection.getKeys().length > 0) {
+ this.collection = collection;
+ }
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+
+ (0, _util.info)('Cannot fetch Collection dictionary.');
+ }
+ }
+ }, {
+ key: "checkHeader",
+ value: function checkHeader() {
+ var stream = this.stream;
+ stream.reset();
+
+ if (!find(stream, '%PDF-', 1024)) {
+ return;
+ }
+
+ stream.moveStart();
+ var MAX_PDF_VERSION_LENGTH = 12;
+ var version = '',
+ ch;
+
+ while ((ch = stream.getByte()) > 0x20) {
+ if (version.length >= MAX_PDF_VERSION_LENGTH) {
+ break;
+ }
+
+ version += String.fromCharCode(ch);
+ }
+
+ if (!this.pdfFormatVersion) {
+ this.pdfFormatVersion = version.substring(5);
+ }
+ }
+ }, {
+ key: "parseStartXRef",
+ value: function parseStartXRef() {
+ this.xref.setStartXRef(this.startXRef);
+ }
+ }, {
+ key: "setup",
+ value: function setup(recoveryMode) {
+ this.xref.parse(recoveryMode);
+ this.catalog = new _obj.Catalog(this.pdfManager, this.xref);
+ }
+ }, {
+ key: "_getLinearizationPage",
+ value: function _getLinearizationPage(pageIndex) {
+ var catalog = this.catalog,
+ linearization = this.linearization;
+ (0, _util.assert)(linearization && linearization.pageFirst === pageIndex);
+
+ var ref = _primitives.Ref.get(linearization.objectNumberFirst, 0);
+
+ return this.xref.fetchAsync(ref).then(function (obj) {
+ if ((0, _primitives.isDict)(obj, 'Page') || (0, _primitives.isDict)(obj) && !obj.has('Type') && obj.has('Contents')) {
+ if (ref && !catalog.pageKidsCountCache.has(ref)) {
+ catalog.pageKidsCountCache.put(ref, 1);
+ }
+
+ return [obj, ref];
+ }
+
+ throw new _util.FormatError('The Linearization dictionary doesn\'t point ' + 'to a valid Page dictionary.');
+ })["catch"](function (reason) {
+ (0, _util.info)(reason);
+ return catalog.getPageDict(pageIndex);
+ });
+ }
+ }, {
+ key: "getPage",
+ value: function getPage(pageIndex) {
+ var _this5 = this;
+
+ if (this._pagePromises[pageIndex] !== undefined) {
+ return this._pagePromises[pageIndex];
+ }
+
+ var catalog = this.catalog,
+ linearization = this.linearization;
+ var promise = linearization && linearization.pageFirst === pageIndex ? this._getLinearizationPage(pageIndex) : catalog.getPageDict(pageIndex);
+ return this._pagePromises[pageIndex] = promise.then(function (_ref10) {
+ var _ref11 = _slicedToArray(_ref10, 2),
+ pageDict = _ref11[0],
+ ref = _ref11[1];
+
+ return new Page({
+ pdfManager: _this5.pdfManager,
+ xref: _this5.xref,
+ pageIndex: pageIndex,
+ pageDict: pageDict,
+ ref: ref,
+ fontCache: catalog.fontCache,
+ builtInCMapCache: catalog.builtInCMapCache,
+ pdfFunctionFactory: _this5.pdfFunctionFactory
+ });
+ });
+ }
+ }, {
+ key: "checkFirstPage",
+ value: function checkFirstPage() {
+ var _this6 = this;
+
+ return this.getPage(0)["catch"](function (reason) {
+ if (reason instanceof _core_utils.XRefEntryException) {
+ _this6._pagePromises.length = 0;
+
+ _this6.cleanup();
+
+ throw new _core_utils.XRefParseException();
+ }
+ });
+ }
+ }, {
+ key: "fontFallback",
+ value: function fontFallback(id, handler) {
+ return this.catalog.fontFallback(id, handler);
+ }
+ }, {
+ key: "cleanup",
+ value: function cleanup() {
+ return this.catalog.cleanup();
+ }
+ }, {
+ key: "linearization",
+ get: function get() {
+ var linearization = null;
+
+ try {
+ linearization = _parser.Linearization.create(this.stream);
+ } catch (err) {
+ if (err instanceof _core_utils.MissingDataException) {
+ throw err;
+ }
+
+ (0, _util.info)(err);
+ }
+
+ return (0, _util.shadow)(this, 'linearization', linearization);
+ }
+ }, {
+ key: "startXRef",
+ get: function get() {
+ var stream = this.stream;
+ var startXRef = 0;
+
+ if (this.linearization) {
+ stream.reset();
+
+ if (find(stream, 'endobj', 1024)) {
+ startXRef = stream.pos + 6;
+ }
+ } else {
+ var step = 1024;
+ var startXRefLength = 'startxref'.length;
+ var found = false,
+ pos = stream.end;
+
+ while (!found && pos > 0) {
+ pos -= step - startXRefLength;
+
+ if (pos < 0) {
+ pos = 0;
+ }
+
+ stream.pos = pos;
+ found = find(stream, 'startxref', step, true);
+ }
+
+ if (found) {
+ stream.skip(9);
+ var ch;
+
+ do {
+ ch = stream.getByte();
+ } while ((0, _util.isSpace)(ch));
+
+ var str = '';
+
+ while (ch >= 0x20 && ch <= 0x39) {
+ str += String.fromCharCode(ch);
+ ch = stream.getByte();
+ }
+
+ startXRef = parseInt(str, 10);
+
+ if (isNaN(startXRef)) {
+ startXRef = 0;
+ }
+ }
+ }
+
+ return (0, _util.shadow)(this, 'startXRef', startXRef);
+ }
+ }, {
+ key: "numPages",
+ get: function get() {
+ var linearization = this.linearization;
+ var num = linearization ? linearization.numPages : this.catalog.numPages;
+ return (0, _util.shadow)(this, 'numPages', num);
+ }
+ }, {
+ key: "documentInfo",
+ get: function get() {
+ var DocumentInfoValidators = {
+ Title: _util.isString,
+ Author: _util.isString,
+ Subject: _util.isString,
+ Keywords: _util.isString,
+ Creator: _util.isString,
+ Producer: _util.isString,
+ CreationDate: _util.isString,
+ ModDate: _util.isString,
+ Trapped: _primitives.isName
+ };
+ var docInfo = {
+ PDFFormatVersion: this.pdfFormatVersion,
+ IsLinearized: !!this.linearization,
+ IsAcroFormPresent: !!this.acroForm,
+ IsXFAPresent: !!this.xfa,
+ IsCollectionPresent: !!this.collection
+ };
+ var infoDict;
+
+ try {
+ infoDict = this.xref.trailer.get('Info');
+ } catch (err) {
+ if (err instanceof _core_utils.MissingDataException) {
+ throw err;
+ }
+
+ (0, _util.info)('The document information dictionary is invalid.');
+ }
+
+ if ((0, _primitives.isDict)(infoDict)) {
+ var _iteratorNormalCompletion4 = true;
+ var _didIteratorError4 = false;
+ var _iteratorError4 = undefined;
+
+ try {
+ for (var _iterator4 = infoDict.getKeys()[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) {
+ var key = _step4.value;
+ var value = infoDict.get(key);
+
+ if (DocumentInfoValidators[key]) {
+ if (DocumentInfoValidators[key](value)) {
+ docInfo[key] = typeof value !== 'string' ? value : (0, _util.stringToPDFString)(value);
+ } else {
+ (0, _util.info)("Bad value in document info for \"".concat(key, "\"."));
+ }
+ } else if (typeof key === 'string') {
+ var customValue = void 0;
+
+ if ((0, _util.isString)(value)) {
+ customValue = (0, _util.stringToPDFString)(value);
+ } else if ((0, _primitives.isName)(value) || (0, _util.isNum)(value) || (0, _util.isBool)(value)) {
+ customValue = value;
+ } else {
+ (0, _util.info)("Unsupported value in document info for (custom) \"".concat(key, "\"."));
+ continue;
+ }
+
+ if (!docInfo['Custom']) {
+ docInfo['Custom'] = Object.create(null);
+ }
+
+ docInfo['Custom'][key] = customValue;
+ }
+ }
+ } catch (err) {
+ _didIteratorError4 = true;
+ _iteratorError4 = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion4 && _iterator4["return"] != null) {
+ _iterator4["return"]();
+ }
+ } finally {
+ if (_didIteratorError4) {
+ throw _iteratorError4;
+ }
+ }
+ }
+ }
+
+ return (0, _util.shadow)(this, 'documentInfo', docInfo);
+ }
+ }, {
+ key: "fingerprint",
+ get: function get() {
+ var hash;
+ var idArray = this.xref.trailer.get('ID');
+
+ if (Array.isArray(idArray) && idArray[0] && (0, _util.isString)(idArray[0]) && idArray[0] !== EMPTY_FINGERPRINT) {
+ hash = (0, _util.stringToBytes)(idArray[0]);
+ } else {
+ if (this.stream.ensureRange) {
+ this.stream.ensureRange(0, Math.min(FINGERPRINT_FIRST_BYTES, this.stream.end));
+ }
+
+ hash = (0, _crypto.calculateMD5)(this.stream.bytes.subarray(0, FINGERPRINT_FIRST_BYTES), 0, FINGERPRINT_FIRST_BYTES);
+ }
+
+ var fingerprint = '';
+
+ for (var i = 0, ii = hash.length; i < ii; i++) {
+ var hex = hash[i].toString(16);
+ fingerprint += hex.length === 1 ? '0' + hex : hex;
+ }
+
+ return (0, _util.shadow)(this, 'fingerprint', fingerprint);
+ }
+ }]);
+
+ return PDFDocument;
+}();
+
+exports.PDFDocument = PDFDocument;
+
+/***/ }),
+/* 156 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.FileSpec = exports.XRef = exports.ObjectLoader = exports.Catalog = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(2));
+
+var _util = __w_pdfjs_require__(5);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _parser = __w_pdfjs_require__(157);
+
+var _core_utils = __w_pdfjs_require__(154);
+
+var _chunked_stream = __w_pdfjs_require__(153);
+
+var _crypto = __w_pdfjs_require__(168);
+
+var _colorspace = __w_pdfjs_require__(169);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
+
+function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
+
+function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
+
+function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+function fetchDestination(dest) {
+ return (0, _primitives.isDict)(dest) ? dest.get('D') : dest;
+}
+
+var Catalog =
+/*#__PURE__*/
+function () {
+ function Catalog(pdfManager, xref) {
+ _classCallCheck(this, Catalog);
+
+ this.pdfManager = pdfManager;
+ this.xref = xref;
+ this.catDict = xref.getCatalogObj();
+
+ if (!(0, _primitives.isDict)(this.catDict)) {
+ throw new _util.FormatError('Catalog object is not a dictionary.');
+ }
+
+ this.fontCache = new _primitives.RefSetCache();
+ this.builtInCMapCache = new Map();
+ this.pageKidsCountCache = new _primitives.RefSetCache();
+ }
+
+ _createClass(Catalog, [{
+ key: "_readDocumentOutline",
+ value: function _readDocumentOutline() {
+ var obj = this.catDict.get('Outlines');
+
+ if (!(0, _primitives.isDict)(obj)) {
+ return null;
+ }
+
+ obj = obj.getRaw('First');
+
+ if (!(0, _primitives.isRef)(obj)) {
+ return null;
+ }
+
+ var root = {
+ items: []
+ };
+ var queue = [{
+ obj: obj,
+ parent: root
+ }];
+ var processed = new _primitives.RefSet();
+ processed.put(obj);
+ var xref = this.xref,
+ blackColor = new Uint8ClampedArray(3);
+
+ while (queue.length > 0) {
+ var i = queue.shift();
+ var outlineDict = xref.fetchIfRef(i.obj);
+
+ if (outlineDict === null) {
+ continue;
+ }
+
+ if (!outlineDict.has('Title')) {
+ throw new _util.FormatError('Invalid outline item encountered.');
+ }
+
+ var data = {
+ url: null,
+ dest: null
+ };
+ Catalog.parseDestDictionary({
+ destDict: outlineDict,
+ resultObj: data,
+ docBaseUrl: this.pdfManager.docBaseUrl
+ });
+ var title = outlineDict.get('Title');
+ var flags = outlineDict.get('F') || 0;
+ var color = outlineDict.getArray('C');
+ var count = outlineDict.get('Count');
+ var rgbColor = blackColor;
+
+ if (Array.isArray(color) && color.length === 3 && (color[0] !== 0 || color[1] !== 0 || color[2] !== 0)) {
+ rgbColor = _colorspace.ColorSpace.singletons.rgb.getRgb(color, 0);
+ }
+
+ var outlineItem = {
+ dest: data.dest,
+ url: data.url,
+ unsafeUrl: data.unsafeUrl,
+ newWindow: data.newWindow,
+ title: (0, _util.stringToPDFString)(title),
+ color: rgbColor,
+ count: Number.isInteger(count) ? count : undefined,
+ bold: !!(flags & 2),
+ italic: !!(flags & 1),
+ items: []
+ };
+ i.parent.items.push(outlineItem);
+ obj = outlineDict.getRaw('First');
+
+ if ((0, _primitives.isRef)(obj) && !processed.has(obj)) {
+ queue.push({
+ obj: obj,
+ parent: outlineItem
+ });
+ processed.put(obj);
+ }
+
+ obj = outlineDict.getRaw('Next');
+
+ if ((0, _primitives.isRef)(obj) && !processed.has(obj)) {
+ queue.push({
+ obj: obj,
+ parent: i.parent
+ });
+ processed.put(obj);
+ }
+ }
+
+ return root.items.length > 0 ? root.items : null;
+ }
+ }, {
+ key: "_readPermissions",
+ value: function _readPermissions() {
+ var encrypt = this.xref.trailer.get('Encrypt');
+
+ if (!(0, _primitives.isDict)(encrypt)) {
+ return null;
+ }
+
+ var flags = encrypt.get('P');
+
+ if (!(0, _util.isNum)(flags)) {
+ return null;
+ }
+
+ flags += Math.pow(2, 32);
+ var permissions = [];
+
+ for (var key in _util.PermissionFlag) {
+ var value = _util.PermissionFlag[key];
+
+ if (flags & value) {
+ permissions.push(value);
+ }
+ }
+
+ return permissions;
+ }
+ }, {
+ key: "getDestination",
+ value: function getDestination(destinationId) {
+ var obj = this._readDests();
+
+ if (obj instanceof NameTree || obj instanceof _primitives.Dict) {
+ return fetchDestination(obj.get(destinationId) || null);
+ }
+
+ return null;
+ }
+ }, {
+ key: "_readDests",
+ value: function _readDests() {
+ var obj = this.catDict.get('Names');
+
+ if (obj && obj.has('Dests')) {
+ return new NameTree(obj.getRaw('Dests'), this.xref);
+ } else if (this.catDict.has('Dests')) {
+ return this.catDict.get('Dests');
+ }
+
+ return undefined;
+ }
+ }, {
+ key: "_readPageLabels",
+ value: function _readPageLabels() {
+ var obj = this.catDict.getRaw('PageLabels');
+
+ if (!obj) {
+ return null;
+ }
+
+ var pageLabels = new Array(this.numPages);
+ var style = null,
+ prefix = '';
+ var numberTree = new NumberTree(obj, this.xref);
+ var nums = numberTree.getAll();
+ var currentLabel = '',
+ currentIndex = 1;
+
+ for (var i = 0, ii = this.numPages; i < ii; i++) {
+ if (i in nums) {
+ var labelDict = nums[i];
+
+ if (!(0, _primitives.isDict)(labelDict)) {
+ throw new _util.FormatError('PageLabel is not a dictionary.');
+ }
+
+ if (labelDict.has('Type') && !(0, _primitives.isName)(labelDict.get('Type'), 'PageLabel')) {
+ throw new _util.FormatError('Invalid type in PageLabel dictionary.');
+ }
+
+ if (labelDict.has('S')) {
+ var s = labelDict.get('S');
+
+ if (!(0, _primitives.isName)(s)) {
+ throw new _util.FormatError('Invalid style in PageLabel dictionary.');
+ }
+
+ style = s.name;
+ } else {
+ style = null;
+ }
+
+ if (labelDict.has('P')) {
+ var p = labelDict.get('P');
+
+ if (!(0, _util.isString)(p)) {
+ throw new _util.FormatError('Invalid prefix in PageLabel dictionary.');
+ }
+
+ prefix = (0, _util.stringToPDFString)(p);
+ } else {
+ prefix = '';
+ }
+
+ if (labelDict.has('St')) {
+ var st = labelDict.get('St');
+
+ if (!(Number.isInteger(st) && st >= 1)) {
+ throw new _util.FormatError('Invalid start in PageLabel dictionary.');
+ }
+
+ currentIndex = st;
+ } else {
+ currentIndex = 1;
+ }
+ }
+
+ switch (style) {
+ case 'D':
+ currentLabel = currentIndex;
+ break;
+
+ case 'R':
+ case 'r':
+ currentLabel = (0, _core_utils.toRomanNumerals)(currentIndex, style === 'r');
+ break;
+
+ case 'A':
+ case 'a':
+ var LIMIT = 26;
+ var A_UPPER_CASE = 0x41,
+ A_LOWER_CASE = 0x61;
+ var baseCharCode = style === 'a' ? A_LOWER_CASE : A_UPPER_CASE;
+ var letterIndex = currentIndex - 1;
+ var character = String.fromCharCode(baseCharCode + letterIndex % LIMIT);
+ var charBuf = [];
+
+ for (var j = 0, jj = letterIndex / LIMIT | 0; j <= jj; j++) {
+ charBuf.push(character);
+ }
+
+ currentLabel = charBuf.join('');
+ break;
+
+ default:
+ if (style) {
+ throw new _util.FormatError("Invalid style \"".concat(style, "\" in PageLabel dictionary."));
+ }
+
+ currentLabel = '';
+ }
+
+ pageLabels[i] = prefix + currentLabel;
+ currentIndex++;
+ }
+
+ return pageLabels;
+ }
+ }, {
+ key: "fontFallback",
+ value: function fontFallback(id, handler) {
+ var promises = [];
+ this.fontCache.forEach(function (promise) {
+ promises.push(promise);
+ });
+ return Promise.all(promises).then(function (translatedFonts) {
+ var _iteratorNormalCompletion = true;
+ var _didIteratorError = false;
+ var _iteratorError = undefined;
+
+ try {
+ for (var _iterator = translatedFonts[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
+ var translatedFont = _step.value;
+
+ if (translatedFont.loadedName === id) {
+ translatedFont.fallback(handler);
+ return;
+ }
+ }
+ } catch (err) {
+ _didIteratorError = true;
+ _iteratorError = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion && _iterator["return"] != null) {
+ _iterator["return"]();
+ }
+ } finally {
+ if (_didIteratorError) {
+ throw _iteratorError;
+ }
+ }
+ }
+ });
+ }
+ }, {
+ key: "cleanup",
+ value: function cleanup() {
+ var _this = this;
+
+ (0, _primitives.clearPrimitiveCaches)();
+ this.pageKidsCountCache.clear();
+ var promises = [];
+ this.fontCache.forEach(function (promise) {
+ promises.push(promise);
+ });
+ return Promise.all(promises).then(function (translatedFonts) {
+ for (var i = 0, ii = translatedFonts.length; i < ii; i++) {
+ var font = translatedFonts[i].dict;
+ delete font.translated;
+ }
+
+ _this.fontCache.clear();
+
+ _this.builtInCMapCache.clear();
+ });
+ }
+ }, {
+ key: "getPageDict",
+ value: function getPageDict(pageIndex) {
+ var capability = (0, _util.createPromiseCapability)();
+ var nodesToVisit = [this.catDict.getRaw('Pages')];
+ var xref = this.xref,
+ pageKidsCountCache = this.pageKidsCountCache;
+ var count,
+ currentPageIndex = 0;
+
+ function next() {
+ var _loop = function _loop() {
+ var currentNode = nodesToVisit.pop();
+
+ if ((0, _primitives.isRef)(currentNode)) {
+ count = pageKidsCountCache.get(currentNode);
+
+ if (count > 0 && currentPageIndex + count < pageIndex) {
+ currentPageIndex += count;
+ return "continue";
+ }
+
+ xref.fetchAsync(currentNode).then(function (obj) {
+ if ((0, _primitives.isDict)(obj, 'Page') || (0, _primitives.isDict)(obj) && !obj.has('Kids')) {
+ if (pageIndex === currentPageIndex) {
+ if (currentNode && !pageKidsCountCache.has(currentNode)) {
+ pageKidsCountCache.put(currentNode, 1);
+ }
+
+ capability.resolve([obj, currentNode]);
+ } else {
+ currentPageIndex++;
+ next();
+ }
+
+ return;
+ }
+
+ nodesToVisit.push(obj);
+ next();
+ }, capability.reject);
+ return {
+ v: void 0
+ };
+ }
+
+ if (!(0, _primitives.isDict)(currentNode)) {
+ capability.reject(new _util.FormatError('Page dictionary kid reference points to wrong type of object.'));
+ return {
+ v: void 0
+ };
+ }
+
+ count = currentNode.get('Count');
+
+ if (Number.isInteger(count) && count >= 0) {
+ var objId = currentNode.objId;
+
+ if (objId && !pageKidsCountCache.has(objId)) {
+ pageKidsCountCache.put(objId, count);
+ }
+
+ if (currentPageIndex + count <= pageIndex) {
+ currentPageIndex += count;
+ return "continue";
+ }
+ }
+
+ var kids = currentNode.get('Kids');
+
+ if (!Array.isArray(kids)) {
+ if ((0, _primitives.isName)(currentNode.get('Type'), 'Page') || !currentNode.has('Type') && currentNode.has('Contents')) {
+ if (currentPageIndex === pageIndex) {
+ capability.resolve([currentNode, null]);
+ return {
+ v: void 0
+ };
+ }
+
+ currentPageIndex++;
+ return "continue";
+ }
+
+ capability.reject(new _util.FormatError('Page dictionary kids object is not an array.'));
+ return {
+ v: void 0
+ };
+ }
+
+ for (var last = kids.length - 1; last >= 0; last--) {
+ nodesToVisit.push(kids[last]);
+ }
+ };
+
+ while (nodesToVisit.length) {
+ var _ret = _loop();
+
+ switch (_ret) {
+ case "continue":
+ continue;
+
+ default:
+ if (_typeof(_ret) === "object") return _ret.v;
+ }
+ }
+
+ capability.reject(new Error("Page index ".concat(pageIndex, " not found.")));
+ }
+
+ next();
+ return capability.promise;
+ }
+ }, {
+ key: "getPageIndex",
+ value: function getPageIndex(pageRef) {
+ var xref = this.xref;
+
+ function pagesBeforeRef(kidRef) {
+ var total = 0,
+ parentRef;
+ return xref.fetchAsync(kidRef).then(function (node) {
+ if ((0, _primitives.isRefsEqual)(kidRef, pageRef) && !(0, _primitives.isDict)(node, 'Page') && !((0, _primitives.isDict)(node) && !node.has('Type') && node.has('Contents'))) {
+ throw new _util.FormatError('The reference does not point to a /Page dictionary.');
+ }
+
+ if (!node) {
+ return null;
+ }
+
+ if (!(0, _primitives.isDict)(node)) {
+ throw new _util.FormatError('Node must be a dictionary.');
+ }
+
+ parentRef = node.getRaw('Parent');
+ return node.getAsync('Parent');
+ }).then(function (parent) {
+ if (!parent) {
+ return null;
+ }
+
+ if (!(0, _primitives.isDict)(parent)) {
+ throw new _util.FormatError('Parent must be a dictionary.');
+ }
+
+ return parent.getAsync('Kids');
+ }).then(function (kids) {
+ if (!kids) {
+ return null;
+ }
+
+ var kidPromises = [];
+ var found = false;
+
+ for (var i = 0, ii = kids.length; i < ii; i++) {
+ var kid = kids[i];
+
+ if (!(0, _primitives.isRef)(kid)) {
+ throw new _util.FormatError('Kid must be a reference.');
+ }
+
+ if ((0, _primitives.isRefsEqual)(kid, kidRef)) {
+ found = true;
+ break;
+ }
+
+ kidPromises.push(xref.fetchAsync(kid).then(function (kid) {
+ if (!(0, _primitives.isDict)(kid)) {
+ throw new _util.FormatError('Kid node must be a dictionary.');
+ }
+
+ if (kid.has('Count')) {
+ total += kid.get('Count');
+ } else {
+ total++;
+ }
+ }));
+ }
+
+ if (!found) {
+ throw new _util.FormatError('Kid reference not found in parent\'s kids.');
+ }
+
+ return Promise.all(kidPromises).then(function () {
+ return [total, parentRef];
+ });
+ });
+ }
+
+ var total = 0;
+
+ function next(ref) {
+ return pagesBeforeRef(ref).then(function (args) {
+ if (!args) {
+ return total;
+ }
+
+ var _args = _slicedToArray(args, 2),
+ count = _args[0],
+ parentRef = _args[1];
+
+ total += count;
+ return next(parentRef);
+ });
+ }
+
+ return next(pageRef);
+ }
+ }, {
+ key: "metadata",
+ get: function get() {
+ var streamRef = this.catDict.getRaw('Metadata');
+
+ if (!(0, _primitives.isRef)(streamRef)) {
+ return (0, _util.shadow)(this, 'metadata', null);
+ }
+
+ var suppressEncryption = !(this.xref.encrypt && this.xref.encrypt.encryptMetadata);
+ var stream = this.xref.fetch(streamRef, suppressEncryption);
+ var metadata;
+
+ if (stream && (0, _primitives.isDict)(stream.dict)) {
+ var type = stream.dict.get('Type');
+ var subtype = stream.dict.get('Subtype');
+
+ if ((0, _primitives.isName)(type, 'Metadata') && (0, _primitives.isName)(subtype, 'XML')) {
+ try {
+ metadata = (0, _util.stringToUTF8String)((0, _util.bytesToString)(stream.getBytes()));
+ } catch (e) {
+ if (e instanceof _core_utils.MissingDataException) {
+ throw e;
+ }
+
+ (0, _util.info)('Skipping invalid metadata.');
+ }
+ }
+ }
+
+ return (0, _util.shadow)(this, 'metadata', metadata);
+ }
+ }, {
+ key: "toplevelPagesDict",
+ get: function get() {
+ var pagesObj = this.catDict.get('Pages');
+
+ if (!(0, _primitives.isDict)(pagesObj)) {
+ throw new _util.FormatError('Invalid top-level pages dictionary.');
+ }
+
+ return (0, _util.shadow)(this, 'toplevelPagesDict', pagesObj);
+ }
+ }, {
+ key: "documentOutline",
+ get: function get() {
+ var obj = null;
+
+ try {
+ obj = this._readDocumentOutline();
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+
+ (0, _util.warn)('Unable to read document outline.');
+ }
+
+ return (0, _util.shadow)(this, 'documentOutline', obj);
+ }
+ }, {
+ key: "permissions",
+ get: function get() {
+ var permissions = null;
+
+ try {
+ permissions = this._readPermissions();
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+
+ (0, _util.warn)('Unable to read permissions.');
+ }
+
+ return (0, _util.shadow)(this, 'permissions', permissions);
+ }
+ }, {
+ key: "numPages",
+ get: function get() {
+ var obj = this.toplevelPagesDict.get('Count');
+
+ if (!Number.isInteger(obj)) {
+ throw new _util.FormatError('Page count in top-level pages dictionary is not an integer.');
+ }
+
+ return (0, _util.shadow)(this, 'numPages', obj);
+ }
+ }, {
+ key: "destinations",
+ get: function get() {
+ var obj = this._readDests(),
+ dests = Object.create(null);
+
+ if (obj instanceof NameTree) {
+ var names = obj.getAll();
+
+ for (var name in names) {
+ dests[name] = fetchDestination(names[name]);
+ }
+ } else if (obj instanceof _primitives.Dict) {
+ obj.forEach(function (key, value) {
+ if (value) {
+ dests[key] = fetchDestination(value);
+ }
+ });
+ }
+
+ return (0, _util.shadow)(this, 'destinations', dests);
+ }
+ }, {
+ key: "pageLabels",
+ get: function get() {
+ var obj = null;
+
+ try {
+ obj = this._readPageLabels();
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+
+ (0, _util.warn)('Unable to read page labels.');
+ }
+
+ return (0, _util.shadow)(this, 'pageLabels', obj);
+ }
+ }, {
+ key: "pageLayout",
+ get: function get() {
+ var obj = this.catDict.get('PageLayout');
+ var pageLayout = '';
+
+ if ((0, _primitives.isName)(obj)) {
+ switch (obj.name) {
+ case 'SinglePage':
+ case 'OneColumn':
+ case 'TwoColumnLeft':
+ case 'TwoColumnRight':
+ case 'TwoPageLeft':
+ case 'TwoPageRight':
+ pageLayout = obj.name;
+ }
+ }
+
+ return (0, _util.shadow)(this, 'pageLayout', pageLayout);
+ }
+ }, {
+ key: "pageMode",
+ get: function get() {
+ var obj = this.catDict.get('PageMode');
+ var pageMode = 'UseNone';
+
+ if ((0, _primitives.isName)(obj)) {
+ switch (obj.name) {
+ case 'UseNone':
+ case 'UseOutlines':
+ case 'UseThumbs':
+ case 'FullScreen':
+ case 'UseOC':
+ case 'UseAttachments':
+ pageMode = obj.name;
+ }
+ }
+
+ return (0, _util.shadow)(this, 'pageMode', pageMode);
+ }
+ }, {
+ key: "viewerPreferences",
+ get: function get() {
+ var _this2 = this;
+
+ var ViewerPreferencesValidators = {
+ HideToolbar: _util.isBool,
+ HideMenubar: _util.isBool,
+ HideWindowUI: _util.isBool,
+ FitWindow: _util.isBool,
+ CenterWindow: _util.isBool,
+ DisplayDocTitle: _util.isBool,
+ NonFullScreenPageMode: _primitives.isName,
+ Direction: _primitives.isName,
+ ViewArea: _primitives.isName,
+ ViewClip: _primitives.isName,
+ PrintArea: _primitives.isName,
+ PrintClip: _primitives.isName,
+ PrintScaling: _primitives.isName,
+ Duplex: _primitives.isName,
+ PickTrayByPDFSize: _util.isBool,
+ PrintPageRange: Array.isArray,
+ NumCopies: Number.isInteger
+ };
+ var obj = this.catDict.get('ViewerPreferences');
+ var prefs = Object.create(null);
+
+ if ((0, _primitives.isDict)(obj)) {
+ for (var key in ViewerPreferencesValidators) {
+ if (!obj.has(key)) {
+ continue;
+ }
+
+ var value = obj.get(key);
+
+ if (!ViewerPreferencesValidators[key](value)) {
+ (0, _util.info)("Bad value in ViewerPreferences for \"".concat(key, "\"."));
+ continue;
+ }
+
+ var prefValue = void 0;
+
+ switch (key) {
+ case 'NonFullScreenPageMode':
+ switch (value.name) {
+ case 'UseNone':
+ case 'UseOutlines':
+ case 'UseThumbs':
+ case 'UseOC':
+ prefValue = value.name;
+ break;
+
+ default:
+ prefValue = 'UseNone';
+ }
+
+ break;
+
+ case 'Direction':
+ switch (value.name) {
+ case 'L2R':
+ case 'R2L':
+ prefValue = value.name;
+ break;
+
+ default:
+ prefValue = 'L2R';
+ }
+
+ break;
+
+ case 'ViewArea':
+ case 'ViewClip':
+ case 'PrintArea':
+ case 'PrintClip':
+ switch (value.name) {
+ case 'MediaBox':
+ case 'CropBox':
+ case 'BleedBox':
+ case 'TrimBox':
+ case 'ArtBox':
+ prefValue = value.name;
+ break;
+
+ default:
+ prefValue = 'CropBox';
+ }
+
+ break;
+
+ case 'PrintScaling':
+ switch (value.name) {
+ case 'None':
+ case 'AppDefault':
+ prefValue = value.name;
+ break;
+
+ default:
+ prefValue = 'AppDefault';
+ }
+
+ break;
+
+ case 'Duplex':
+ switch (value.name) {
+ case 'Simplex':
+ case 'DuplexFlipShortEdge':
+ case 'DuplexFlipLongEdge':
+ prefValue = value.name;
+ break;
+
+ default:
+ prefValue = 'None';
+ }
+
+ break;
+
+ case 'PrintPageRange':
+ var length = value.length;
+
+ if (length % 2 !== 0) {
+ break;
+ }
+
+ var isValid = value.every(function (page, i, arr) {
+ return Number.isInteger(page) && page > 0 && (i === 0 || page >= arr[i - 1]) && page <= _this2.numPages;
+ });
+
+ if (isValid) {
+ prefValue = value;
+ }
+
+ break;
+
+ case 'NumCopies':
+ if (value > 0) {
+ prefValue = value;
+ }
+
+ break;
+
+ default:
+ (0, _util.assert)(typeof value === 'boolean');
+ prefValue = value;
+ }
+
+ if (prefValue !== undefined) {
+ prefs[key] = prefValue;
+ } else {
+ (0, _util.info)("Bad value in ViewerPreferences for \"".concat(key, "\"."));
+ }
+ }
+ }
+
+ return (0, _util.shadow)(this, 'viewerPreferences', prefs);
+ }
+ }, {
+ key: "openActionDestination",
+ get: function get() {
+ var obj = this.catDict.get('OpenAction');
+ var openActionDest = null;
+
+ if ((0, _primitives.isDict)(obj)) {
+ var destDict = new _primitives.Dict(this.xref);
+ destDict.set('A', obj);
+ var resultObj = {
+ url: null,
+ dest: null
+ };
+ Catalog.parseDestDictionary({
+ destDict: destDict,
+ resultObj: resultObj
+ });
+
+ if (Array.isArray(resultObj.dest)) {
+ openActionDest = resultObj.dest;
+ }
+ } else if (Array.isArray(obj)) {
+ openActionDest = obj;
+ }
+
+ return (0, _util.shadow)(this, 'openActionDestination', openActionDest);
+ }
+ }, {
+ key: "attachments",
+ get: function get() {
+ var obj = this.catDict.get('Names');
+ var attachments = null;
+
+ if (obj && obj.has('EmbeddedFiles')) {
+ var nameTree = new NameTree(obj.getRaw('EmbeddedFiles'), this.xref);
+ var names = nameTree.getAll();
+
+ for (var name in names) {
+ var fs = new FileSpec(names[name], this.xref);
+
+ if (!attachments) {
+ attachments = Object.create(null);
+ }
+
+ attachments[(0, _util.stringToPDFString)(name)] = fs.serializable;
+ }
+ }
+
+ return (0, _util.shadow)(this, 'attachments', attachments);
+ }
+ }, {
+ key: "javaScript",
+ get: function get() {
+ var obj = this.catDict.get('Names');
+ var javaScript = null;
+
+ function appendIfJavaScriptDict(jsDict) {
+ var type = jsDict.get('S');
+
+ if (!(0, _primitives.isName)(type, 'JavaScript')) {
+ return;
+ }
+
+ var js = jsDict.get('JS');
+
+ if ((0, _primitives.isStream)(js)) {
+ js = (0, _util.bytesToString)(js.getBytes());
+ } else if (!(0, _util.isString)(js)) {
+ return;
+ }
+
+ if (!javaScript) {
+ javaScript = [];
+ }
+
+ javaScript.push((0, _util.stringToPDFString)(js));
+ }
+
+ if (obj && obj.has('JavaScript')) {
+ var nameTree = new NameTree(obj.getRaw('JavaScript'), this.xref);
+ var names = nameTree.getAll();
+
+ for (var name in names) {
+ var jsDict = names[name];
+
+ if ((0, _primitives.isDict)(jsDict)) {
+ appendIfJavaScriptDict(jsDict);
+ }
+ }
+ }
+
+ var openActionDict = this.catDict.get('OpenAction');
+
+ if ((0, _primitives.isDict)(openActionDict, 'Action')) {
+ var actionType = openActionDict.get('S');
+
+ if ((0, _primitives.isName)(actionType, 'Named')) {
+ var action = openActionDict.get('N');
+
+ if ((0, _primitives.isName)(action, 'Print')) {
+ if (!javaScript) {
+ javaScript = [];
+ }
+
+ javaScript.push('print({});');
+ }
+ } else {
+ appendIfJavaScriptDict(openActionDict);
+ }
+ }
+
+ return (0, _util.shadow)(this, 'javaScript', javaScript);
+ }
+ }], [{
+ key: "parseDestDictionary",
+ value: function parseDestDictionary(params) {
+ function addDefaultProtocolToUrl(url) {
+ return url.startsWith('www.') ? "http://".concat(url) : url;
+ }
+
+ function tryConvertUrlEncoding(url) {
+ try {
+ return (0, _util.stringToUTF8String)(url);
+ } catch (e) {
+ return url;
+ }
+ }
+
+ var destDict = params.destDict;
+
+ if (!(0, _primitives.isDict)(destDict)) {
+ (0, _util.warn)('parseDestDictionary: `destDict` must be a dictionary.');
+ return;
+ }
+
+ var resultObj = params.resultObj;
+
+ if (_typeof(resultObj) !== 'object') {
+ (0, _util.warn)('parseDestDictionary: `resultObj` must be an object.');
+ return;
+ }
+
+ var docBaseUrl = params.docBaseUrl || null;
+ var action = destDict.get('A'),
+ url,
+ dest;
+
+ if (!(0, _primitives.isDict)(action) && destDict.has('Dest')) {
+ action = destDict.get('Dest');
+ }
+
+ if ((0, _primitives.isDict)(action)) {
+ var actionType = action.get('S');
+
+ if (!(0, _primitives.isName)(actionType)) {
+ (0, _util.warn)('parseDestDictionary: Invalid type in Action dictionary.');
+ return;
+ }
+
+ var actionName = actionType.name;
+
+ switch (actionName) {
+ case 'URI':
+ url = action.get('URI');
+
+ if ((0, _primitives.isName)(url)) {
+ url = '/' + url.name;
+ } else if ((0, _util.isString)(url)) {
+ url = addDefaultProtocolToUrl(url);
+ }
+
+ break;
+
+ case 'GoTo':
+ dest = action.get('D');
+ break;
+
+ case 'Launch':
+ case 'GoToR':
+ var urlDict = action.get('F');
+
+ if ((0, _primitives.isDict)(urlDict)) {
+ url = urlDict.get('F') || null;
+ } else if ((0, _util.isString)(urlDict)) {
+ url = urlDict;
+ }
+
+ var remoteDest = action.get('D');
+
+ if (remoteDest) {
+ if ((0, _primitives.isName)(remoteDest)) {
+ remoteDest = remoteDest.name;
+ }
+
+ if ((0, _util.isString)(url)) {
+ var baseUrl = url.split('#')[0];
+
+ if ((0, _util.isString)(remoteDest)) {
+ url = baseUrl + '#' + remoteDest;
+ } else if (Array.isArray(remoteDest)) {
+ url = baseUrl + '#' + JSON.stringify(remoteDest);
+ }
+ }
+ }
+
+ var newWindow = action.get('NewWindow');
+
+ if ((0, _util.isBool)(newWindow)) {
+ resultObj.newWindow = newWindow;
+ }
+
+ break;
+
+ case 'Named':
+ var namedAction = action.get('N');
+
+ if ((0, _primitives.isName)(namedAction)) {
+ resultObj.action = namedAction.name;
+ }
+
+ break;
+
+ case 'JavaScript':
+ var jsAction = action.get('JS');
+ var js;
+
+ if ((0, _primitives.isStream)(jsAction)) {
+ js = (0, _util.bytesToString)(jsAction.getBytes());
+ } else if ((0, _util.isString)(jsAction)) {
+ js = jsAction;
+ }
+
+ if (js) {
+ var URL_OPEN_METHODS = ['app.launchURL', 'window.open'];
+ var regex = new RegExp('^\\s*(' + URL_OPEN_METHODS.join('|').split('.').join('\\.') + ')\\((?:\'|\")([^\'\"]*)(?:\'|\")(?:,\\s*(\\w+)\\)|\\))', 'i');
+ var jsUrl = regex.exec((0, _util.stringToPDFString)(js));
+
+ if (jsUrl && jsUrl[2]) {
+ url = jsUrl[2];
+
+ if (jsUrl[3] === 'true' && jsUrl[1] === 'app.launchURL') {
+ resultObj.newWindow = true;
+ }
+
+ break;
+ }
+ }
+
+ default:
+ (0, _util.warn)("parseDestDictionary: unsupported action type \"".concat(actionName, "\"."));
+ break;
+ }
+ } else if (destDict.has('Dest')) {
+ dest = destDict.get('Dest');
+ }
+
+ if ((0, _util.isString)(url)) {
+ url = tryConvertUrlEncoding(url);
+ var absoluteUrl = (0, _util.createValidAbsoluteUrl)(url, docBaseUrl);
+
+ if (absoluteUrl) {
+ resultObj.url = absoluteUrl.href;
+ }
+
+ resultObj.unsafeUrl = url;
+ }
+
+ if (dest) {
+ if ((0, _primitives.isName)(dest)) {
+ dest = dest.name;
+ }
+
+ if ((0, _util.isString)(dest) || Array.isArray(dest)) {
+ resultObj.dest = dest;
+ }
+ }
+ }
+ }]);
+
+ return Catalog;
+}();
+
+exports.Catalog = Catalog;
+
+var XRef = function XRefClosure() {
+ function XRef(stream, pdfManager) {
+ this.stream = stream;
+ this.pdfManager = pdfManager;
+ this.entries = [];
+ this.xrefstms = Object.create(null);
+ this.cache = [];
+ this.stats = {
+ streamTypes: [],
+ fontTypes: []
+ };
+ }
+
+ XRef.prototype = {
+ setStartXRef: function XRef_setStartXRef(startXRef) {
+ this.startXRefQueue = [startXRef];
+ },
+ parse: function XRef_parse(recoveryMode) {
+ var trailerDict;
+
+ if (!recoveryMode) {
+ trailerDict = this.readXRef();
+ } else {
+ (0, _util.warn)('Indexing all PDF objects');
+ trailerDict = this.indexObjects();
+ }
+
+ trailerDict.assignXref(this);
+ this.trailer = trailerDict;
+ var encrypt;
+
+ try {
+ encrypt = trailerDict.get('Encrypt');
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+
+ (0, _util.warn)("XRef.parse - Invalid \"Encrypt\" reference: \"".concat(ex, "\"."));
+ }
+
+ if ((0, _primitives.isDict)(encrypt)) {
+ var ids = trailerDict.get('ID');
+ var fileId = ids && ids.length ? ids[0] : '';
+ encrypt.suppressEncryption = true;
+ this.encrypt = new _crypto.CipherTransformFactory(encrypt, fileId, this.pdfManager.password);
+ }
+
+ var root;
+
+ try {
+ root = trailerDict.get('Root');
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+
+ (0, _util.warn)("XRef.parse - Invalid \"Root\" reference: \"".concat(ex, "\"."));
+ }
+
+ if ((0, _primitives.isDict)(root) && root.has('Pages')) {
+ this.root = root;
+ } else {
+ if (!recoveryMode) {
+ throw new _core_utils.XRefParseException();
+ }
+
+ throw new _util.FormatError('Invalid root reference');
+ }
+ },
+ processXRefTable: function XRef_processXRefTable(parser) {
+ if (!('tableState' in this)) {
+ this.tableState = {
+ entryNum: 0,
+ streamPos: parser.lexer.stream.pos,
+ parserBuf1: parser.buf1,
+ parserBuf2: parser.buf2
+ };
+ }
+
+ var obj = this.readXRefTable(parser);
+
+ if (!(0, _primitives.isCmd)(obj, 'trailer')) {
+ throw new _util.FormatError('Invalid XRef table: could not find trailer dictionary');
+ }
+
+ var dict = parser.getObj();
+
+ if (!(0, _primitives.isDict)(dict) && dict.dict) {
+ dict = dict.dict;
+ }
+
+ if (!(0, _primitives.isDict)(dict)) {
+ throw new _util.FormatError('Invalid XRef table: could not parse trailer dictionary');
+ }
+
+ delete this.tableState;
+ return dict;
+ },
+ readXRefTable: function XRef_readXRefTable(parser) {
+ var stream = parser.lexer.stream;
+ var tableState = this.tableState;
+ stream.pos = tableState.streamPos;
+ parser.buf1 = tableState.parserBuf1;
+ parser.buf2 = tableState.parserBuf2;
+ var obj;
+
+ while (true) {
+ if (!('firstEntryNum' in tableState) || !('entryCount' in tableState)) {
+ if ((0, _primitives.isCmd)(obj = parser.getObj(), 'trailer')) {
+ break;
+ }
+
+ tableState.firstEntryNum = obj;
+ tableState.entryCount = parser.getObj();
+ }
+
+ var first = tableState.firstEntryNum;
+ var count = tableState.entryCount;
+
+ if (!Number.isInteger(first) || !Number.isInteger(count)) {
+ throw new _util.FormatError('Invalid XRef table: wrong types in subsection header');
+ }
+
+ for (var i = tableState.entryNum; i < count; i++) {
+ tableState.streamPos = stream.pos;
+ tableState.entryNum = i;
+ tableState.parserBuf1 = parser.buf1;
+ tableState.parserBuf2 = parser.buf2;
+ var entry = {};
+ entry.offset = parser.getObj();
+ entry.gen = parser.getObj();
+ var type = parser.getObj();
+
+ if (type instanceof _primitives.Cmd) {
+ switch (type.cmd) {
+ case 'f':
+ entry.free = true;
+ break;
+
+ case 'n':
+ entry.uncompressed = true;
+ break;
+ }
+ }
+
+ if (!Number.isInteger(entry.offset) || !Number.isInteger(entry.gen) || !(entry.free || entry.uncompressed)) {
+ throw new _util.FormatError("Invalid entry in XRef subsection: ".concat(first, ", ").concat(count));
+ }
+
+ if (i === 0 && entry.free && first === 1) {
+ first = 0;
+ }
+
+ if (!this.entries[i + first]) {
+ this.entries[i + first] = entry;
+ }
+ }
+
+ tableState.entryNum = 0;
+ tableState.streamPos = stream.pos;
+ tableState.parserBuf1 = parser.buf1;
+ tableState.parserBuf2 = parser.buf2;
+ delete tableState.firstEntryNum;
+ delete tableState.entryCount;
+ }
+
+ if (this.entries[0] && !this.entries[0].free) {
+ throw new _util.FormatError('Invalid XRef table: unexpected first object');
+ }
+
+ return obj;
+ },
+ processXRefStream: function XRef_processXRefStream(stream) {
+ if (!('streamState' in this)) {
+ var streamParameters = stream.dict;
+ var byteWidths = streamParameters.get('W');
+ var range = streamParameters.get('Index');
+
+ if (!range) {
+ range = [0, streamParameters.get('Size')];
+ }
+
+ this.streamState = {
+ entryRanges: range,
+ byteWidths: byteWidths,
+ entryNum: 0,
+ streamPos: stream.pos
+ };
+ }
+
+ this.readXRefStream(stream);
+ delete this.streamState;
+ return stream.dict;
+ },
+ readXRefStream: function XRef_readXRefStream(stream) {
+ var i, j;
+ var streamState = this.streamState;
+ stream.pos = streamState.streamPos;
+ var byteWidths = streamState.byteWidths;
+ var typeFieldWidth = byteWidths[0];
+ var offsetFieldWidth = byteWidths[1];
+ var generationFieldWidth = byteWidths[2];
+ var entryRanges = streamState.entryRanges;
+
+ while (entryRanges.length > 0) {
+ var first = entryRanges[0];
+ var n = entryRanges[1];
+
+ if (!Number.isInteger(first) || !Number.isInteger(n)) {
+ throw new _util.FormatError("Invalid XRef range fields: ".concat(first, ", ").concat(n));
+ }
+
+ if (!Number.isInteger(typeFieldWidth) || !Number.isInteger(offsetFieldWidth) || !Number.isInteger(generationFieldWidth)) {
+ throw new _util.FormatError("Invalid XRef entry fields length: ".concat(first, ", ").concat(n));
+ }
+
+ for (i = streamState.entryNum; i < n; ++i) {
+ streamState.entryNum = i;
+ streamState.streamPos = stream.pos;
+ var type = 0,
+ offset = 0,
+ generation = 0;
+
+ for (j = 0; j < typeFieldWidth; ++j) {
+ type = type << 8 | stream.getByte();
+ }
+
+ if (typeFieldWidth === 0) {
+ type = 1;
+ }
+
+ for (j = 0; j < offsetFieldWidth; ++j) {
+ offset = offset << 8 | stream.getByte();
+ }
+
+ for (j = 0; j < generationFieldWidth; ++j) {
+ generation = generation << 8 | stream.getByte();
+ }
+
+ var entry = {};
+ entry.offset = offset;
+ entry.gen = generation;
+
+ switch (type) {
+ case 0:
+ entry.free = true;
+ break;
+
+ case 1:
+ entry.uncompressed = true;
+ break;
+
+ case 2:
+ break;
+
+ default:
+ throw new _util.FormatError("Invalid XRef entry type: ".concat(type));
+ }
+
+ if (!this.entries[first + i]) {
+ this.entries[first + i] = entry;
+ }
+ }
+
+ streamState.entryNum = 0;
+ streamState.streamPos = stream.pos;
+ entryRanges.splice(0, 2);
+ }
+ },
+ indexObjects: function XRef_indexObjects() {
+ var TAB = 0x9,
+ LF = 0xA,
+ CR = 0xD,
+ SPACE = 0x20;
+ var PERCENT = 0x25,
+ LT = 0x3C;
+
+ function readToken(data, offset) {
+ var token = '',
+ ch = data[offset];
+
+ while (ch !== LF && ch !== CR && ch !== LT) {
+ if (++offset >= data.length) {
+ break;
+ }
+
+ token += String.fromCharCode(ch);
+ ch = data[offset];
+ }
+
+ return token;
+ }
+
+ function skipUntil(data, offset, what) {
+ var length = what.length,
+ dataLength = data.length;
+ var skipped = 0;
+
+ while (offset < dataLength) {
+ var i = 0;
+
+ while (i < length && data[offset + i] === what[i]) {
+ ++i;
+ }
+
+ if (i >= length) {
+ break;
+ }
+
+ offset++;
+ skipped++;
+ }
+
+ return skipped;
+ }
+
+ var objRegExp = /^(\d+)\s+(\d+)\s+obj\b/;
+ var endobjRegExp = /\bendobj[\b\s]$/;
+ var nestedObjRegExp = /\s+(\d+\s+\d+\s+obj[\b\s<])$/;
+ var CHECK_CONTENT_LENGTH = 25;
+ var trailerBytes = new Uint8Array([116, 114, 97, 105, 108, 101, 114]);
+ var startxrefBytes = new Uint8Array([115, 116, 97, 114, 116, 120, 114, 101, 102]);
+ var objBytes = new Uint8Array([111, 98, 106]);
+ var xrefBytes = new Uint8Array([47, 88, 82, 101, 102]);
+ this.entries.length = 0;
+ var stream = this.stream;
+ stream.pos = 0;
+ var buffer = stream.getBytes();
+ var position = stream.start,
+ length = buffer.length;
+ var trailers = [],
+ xrefStms = [];
+
+ while (position < length) {
+ var ch = buffer[position];
+
+ if (ch === TAB || ch === LF || ch === CR || ch === SPACE) {
+ ++position;
+ continue;
+ }
+
+ if (ch === PERCENT) {
+ do {
+ ++position;
+
+ if (position >= length) {
+ break;
+ }
+
+ ch = buffer[position];
+ } while (ch !== LF && ch !== CR);
+
+ continue;
+ }
+
+ var token = readToken(buffer, position);
+ var m;
+
+ if (token.startsWith('xref') && (token.length === 4 || /\s/.test(token[4]))) {
+ position += skipUntil(buffer, position, trailerBytes);
+ trailers.push(position);
+ position += skipUntil(buffer, position, startxrefBytes);
+ } else if (m = objRegExp.exec(token)) {
+ var num = m[1] | 0,
+ gen = m[2] | 0;
+
+ if (typeof this.entries[num] === 'undefined') {
+ this.entries[num] = {
+ offset: position - stream.start,
+ gen: gen,
+ uncompressed: true
+ };
+ }
+
+ var contentLength = void 0,
+ startPos = position + token.length;
+
+ while (startPos < buffer.length) {
+ var endPos = startPos + skipUntil(buffer, startPos, objBytes) + 4;
+ contentLength = endPos - position;
+ var checkPos = Math.max(endPos - CHECK_CONTENT_LENGTH, startPos);
+ var tokenStr = (0, _util.bytesToString)(buffer.subarray(checkPos, endPos));
+
+ if (endobjRegExp.test(tokenStr)) {
+ break;
+ } else {
+ var objToken = nestedObjRegExp.exec(tokenStr);
+
+ if (objToken && objToken[1]) {
+ (0, _util.warn)('indexObjects: Found new "obj" inside of another "obj", ' + 'caused by missing "endobj" -- trying to recover.');
+ contentLength -= objToken[1].length;
+ break;
+ }
+ }
+
+ startPos = endPos;
+ }
+
+ var content = buffer.subarray(position, position + contentLength);
+ var xrefTagOffset = skipUntil(content, 0, xrefBytes);
+
+ if (xrefTagOffset < contentLength && content[xrefTagOffset + 5] < 64) {
+ xrefStms.push(position - stream.start);
+ this.xrefstms[position - stream.start] = 1;
+ }
+
+ position += contentLength;
+ } else if (token.startsWith('trailer') && (token.length === 7 || /\s/.test(token[7]))) {
+ trailers.push(position);
+ position += skipUntil(buffer, position, startxrefBytes);
+ } else {
+ position += token.length + 1;
+ }
+ }
+
+ var i, ii;
+
+ for (i = 0, ii = xrefStms.length; i < ii; ++i) {
+ this.startXRefQueue.push(xrefStms[i]);
+ this.readXRef(true);
+ }
+
+ var trailerDict;
+
+ for (i = 0, ii = trailers.length; i < ii; ++i) {
+ stream.pos = trailers[i];
+ var parser = new _parser.Parser({
+ lexer: new _parser.Lexer(stream),
+ xref: this,
+ allowStreams: true,
+ recoveryMode: true
+ });
+ var obj = parser.getObj();
+
+ if (!(0, _primitives.isCmd)(obj, 'trailer')) {
+ continue;
+ }
+
+ var dict = parser.getObj();
+
+ if (!(0, _primitives.isDict)(dict)) {
+ continue;
+ }
+
+ var rootDict = void 0;
+
+ try {
+ rootDict = dict.get('Root');
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+
+ continue;
+ }
+
+ if (!(0, _primitives.isDict)(rootDict) || !rootDict.has('Pages')) {
+ continue;
+ }
+
+ if (dict.has('ID')) {
+ return dict;
+ }
+
+ trailerDict = dict;
+ }
+
+ if (trailerDict) {
+ return trailerDict;
+ }
+
+ throw new _util.InvalidPDFException('Invalid PDF structure');
+ },
+ readXRef: function XRef_readXRef(recoveryMode) {
+ var stream = this.stream;
+ var startXRefParsedCache = Object.create(null);
+
+ try {
+ while (this.startXRefQueue.length) {
+ var startXRef = this.startXRefQueue[0];
+
+ if (startXRefParsedCache[startXRef]) {
+ (0, _util.warn)('readXRef - skipping XRef table since it was already parsed.');
+ this.startXRefQueue.shift();
+ continue;
+ }
+
+ startXRefParsedCache[startXRef] = true;
+ stream.pos = startXRef + stream.start;
+ var parser = new _parser.Parser({
+ lexer: new _parser.Lexer(stream),
+ xref: this,
+ allowStreams: true
+ });
+ var obj = parser.getObj();
+ var dict;
+
+ if ((0, _primitives.isCmd)(obj, 'xref')) {
+ dict = this.processXRefTable(parser);
+
+ if (!this.topDict) {
+ this.topDict = dict;
+ }
+
+ obj = dict.get('XRefStm');
+
+ if (Number.isInteger(obj)) {
+ var pos = obj;
+
+ if (!(pos in this.xrefstms)) {
+ this.xrefstms[pos] = 1;
+ this.startXRefQueue.push(pos);
+ }
+ }
+ } else if (Number.isInteger(obj)) {
+ if (!Number.isInteger(parser.getObj()) || !(0, _primitives.isCmd)(parser.getObj(), 'obj') || !(0, _primitives.isStream)(obj = parser.getObj())) {
+ throw new _util.FormatError('Invalid XRef stream');
+ }
+
+ dict = this.processXRefStream(obj);
+
+ if (!this.topDict) {
+ this.topDict = dict;
+ }
+
+ if (!dict) {
+ throw new _util.FormatError('Failed to read XRef stream');
+ }
+ } else {
+ throw new _util.FormatError('Invalid XRef stream header');
+ }
+
+ obj = dict.get('Prev');
+
+ if (Number.isInteger(obj)) {
+ this.startXRefQueue.push(obj);
+ } else if ((0, _primitives.isRef)(obj)) {
+ this.startXRefQueue.push(obj.num);
+ }
+
+ this.startXRefQueue.shift();
+ }
+
+ return this.topDict;
+ } catch (e) {
+ if (e instanceof _core_utils.MissingDataException) {
+ throw e;
+ }
+
+ (0, _util.info)('(while reading XRef): ' + e);
+ }
+
+ if (recoveryMode) {
+ return undefined;
+ }
+
+ throw new _core_utils.XRefParseException();
+ },
+ getEntry: function XRef_getEntry(i) {
+ var xrefEntry = this.entries[i];
+
+ if (xrefEntry && !xrefEntry.free && xrefEntry.offset) {
+ return xrefEntry;
+ }
+
+ return null;
+ },
+ fetchIfRef: function XRef_fetchIfRef(obj, suppressEncryption) {
+ if (!(0, _primitives.isRef)(obj)) {
+ return obj;
+ }
+
+ return this.fetch(obj, suppressEncryption);
+ },
+ fetch: function XRef_fetch(ref, suppressEncryption) {
+ if (!(0, _primitives.isRef)(ref)) {
+ throw new Error('ref object is not a reference');
+ }
+
+ var num = ref.num;
+
+ if (num in this.cache) {
+ var cacheEntry = this.cache[num];
+
+ if (cacheEntry instanceof _primitives.Dict && !cacheEntry.objId) {
+ cacheEntry.objId = ref.toString();
+ }
+
+ return cacheEntry;
+ }
+
+ var xrefEntry = this.getEntry(num);
+
+ if (xrefEntry === null) {
+ return this.cache[num] = null;
+ }
+
+ if (xrefEntry.uncompressed) {
+ xrefEntry = this.fetchUncompressed(ref, xrefEntry, suppressEncryption);
+ } else {
+ xrefEntry = this.fetchCompressed(ref, xrefEntry, suppressEncryption);
+ }
+
+ if ((0, _primitives.isDict)(xrefEntry)) {
+ xrefEntry.objId = ref.toString();
+ } else if ((0, _primitives.isStream)(xrefEntry)) {
+ xrefEntry.dict.objId = ref.toString();
+ }
+
+ return xrefEntry;
+ },
+ fetchUncompressed: function fetchUncompressed(ref, xrefEntry) {
+ var suppressEncryption = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
+ var gen = ref.gen;
+ var num = ref.num;
+
+ if (xrefEntry.gen !== gen) {
+ throw new _core_utils.XRefEntryException("Inconsistent generation in XRef: ".concat(ref));
+ }
+
+ var stream = this.stream.makeSubStream(xrefEntry.offset + this.stream.start);
+ var parser = new _parser.Parser({
+ lexer: new _parser.Lexer(stream),
+ xref: this,
+ allowStreams: true
+ });
+ var obj1 = parser.getObj();
+ var obj2 = parser.getObj();
+ var obj3 = parser.getObj();
+
+ if (!Number.isInteger(obj1)) {
+ obj1 = parseInt(obj1, 10);
+ }
+
+ if (!Number.isInteger(obj2)) {
+ obj2 = parseInt(obj2, 10);
+ }
+
+ if (obj1 !== num || obj2 !== gen || !(obj3 instanceof _primitives.Cmd)) {
+ throw new _core_utils.XRefEntryException("Bad (uncompressed) XRef entry: ".concat(ref));
+ }
+
+ if (obj3.cmd !== 'obj') {
+ if (obj3.cmd.startsWith('obj')) {
+ num = parseInt(obj3.cmd.substring(3), 10);
+
+ if (!Number.isNaN(num)) {
+ return num;
+ }
+ }
+
+ throw new _core_utils.XRefEntryException("Bad (uncompressed) XRef entry: ".concat(ref));
+ }
+
+ if (this.encrypt && !suppressEncryption) {
+ xrefEntry = parser.getObj(this.encrypt.createCipherTransform(num, gen));
+ } else {
+ xrefEntry = parser.getObj();
+ }
+
+ if (!(0, _primitives.isStream)(xrefEntry)) {
+ this.cache[num] = xrefEntry;
+ }
+
+ return xrefEntry;
+ },
+ fetchCompressed: function fetchCompressed(ref, xrefEntry) {
+ var suppressEncryption = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
+ var tableOffset = xrefEntry.offset;
+ var stream = this.fetch(_primitives.Ref.get(tableOffset, 0));
+
+ if (!(0, _primitives.isStream)(stream)) {
+ throw new _util.FormatError('bad ObjStm stream');
+ }
+
+ var first = stream.dict.get('First');
+ var n = stream.dict.get('N');
+
+ if (!Number.isInteger(first) || !Number.isInteger(n)) {
+ throw new _util.FormatError('invalid first and n parameters for ObjStm stream');
+ }
+
+ var parser = new _parser.Parser({
+ lexer: new _parser.Lexer(stream),
+ xref: this,
+ allowStreams: true
+ });
+ var i,
+ entries = [],
+ num,
+ nums = [];
+
+ for (i = 0; i < n; ++i) {
+ num = parser.getObj();
+
+ if (!Number.isInteger(num)) {
+ throw new _util.FormatError("invalid object number in the ObjStm stream: ".concat(num));
+ }
+
+ nums.push(num);
+ var offset = parser.getObj();
+
+ if (!Number.isInteger(offset)) {
+ throw new _util.FormatError("invalid object offset in the ObjStm stream: ".concat(offset));
+ }
+ }
+
+ for (i = 0; i < n; ++i) {
+ entries.push(parser.getObj());
+
+ if ((0, _primitives.isCmd)(parser.buf1, 'endobj')) {
+ parser.shift();
+ }
+
+ num = nums[i];
+ var entry = this.entries[num];
+
+ if (entry && entry.offset === tableOffset && entry.gen === i) {
+ this.cache[num] = entries[i];
+ }
+ }
+
+ xrefEntry = entries[xrefEntry.gen];
+
+ if (xrefEntry === undefined) {
+ throw new _core_utils.XRefEntryException("Bad (compressed) XRef entry: ".concat(ref));
+ }
+
+ return xrefEntry;
+ },
+ fetchIfRefAsync: function () {
+ var _fetchIfRefAsync = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee(obj, suppressEncryption) {
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ if ((0, _primitives.isRef)(obj)) {
+ _context.next = 2;
+ break;
+ }
+
+ return _context.abrupt("return", obj);
+
+ case 2:
+ return _context.abrupt("return", this.fetchAsync(obj, suppressEncryption));
+
+ case 3:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee, this);
+ }));
+
+ function fetchIfRefAsync(_x, _x2) {
+ return _fetchIfRefAsync.apply(this, arguments);
+ }
+
+ return fetchIfRefAsync;
+ }(),
+ fetchAsync: function () {
+ var _fetchAsync = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee2(ref, suppressEncryption) {
+ return _regenerator["default"].wrap(function _callee2$(_context2) {
+ while (1) {
+ switch (_context2.prev = _context2.next) {
+ case 0:
+ _context2.prev = 0;
+ return _context2.abrupt("return", this.fetch(ref, suppressEncryption));
+
+ case 4:
+ _context2.prev = 4;
+ _context2.t0 = _context2["catch"](0);
+
+ if (_context2.t0 instanceof _core_utils.MissingDataException) {
+ _context2.next = 8;
+ break;
+ }
+
+ throw _context2.t0;
+
+ case 8:
+ _context2.next = 10;
+ return this.pdfManager.requestRange(_context2.t0.begin, _context2.t0.end);
+
+ case 10:
+ return _context2.abrupt("return", this.fetchAsync(ref, suppressEncryption));
+
+ case 11:
+ case "end":
+ return _context2.stop();
+ }
+ }
+ }, _callee2, this, [[0, 4]]);
+ }));
+
+ function fetchAsync(_x3, _x4) {
+ return _fetchAsync.apply(this, arguments);
+ }
+
+ return fetchAsync;
+ }(),
+ getCatalogObj: function XRef_getCatalogObj() {
+ return this.root;
+ }
+ };
+ return XRef;
+}();
+
+exports.XRef = XRef;
+
+var NameOrNumberTree =
+/*#__PURE__*/
+function () {
+ function NameOrNumberTree(root, xref, type) {
+ _classCallCheck(this, NameOrNumberTree);
+
+ if (this.constructor === NameOrNumberTree) {
+ (0, _util.unreachable)('Cannot initialize NameOrNumberTree.');
+ }
+
+ this.root = root;
+ this.xref = xref;
+ this._type = type;
+ }
+
+ _createClass(NameOrNumberTree, [{
+ key: "getAll",
+ value: function getAll() {
+ var dict = Object.create(null);
+
+ if (!this.root) {
+ return dict;
+ }
+
+ var xref = this.xref;
+ var processed = new _primitives.RefSet();
+ processed.put(this.root);
+ var queue = [this.root];
+
+ while (queue.length > 0) {
+ var obj = xref.fetchIfRef(queue.shift());
+
+ if (!(0, _primitives.isDict)(obj)) {
+ continue;
+ }
+
+ if (obj.has('Kids')) {
+ var kids = obj.get('Kids');
+
+ for (var i = 0, ii = kids.length; i < ii; i++) {
+ var kid = kids[i];
+
+ if (processed.has(kid)) {
+ throw new _util.FormatError("Duplicate entry in \"".concat(this._type, "\" tree."));
+ }
+
+ queue.push(kid);
+ processed.put(kid);
+ }
+
+ continue;
+ }
+
+ var entries = obj.get(this._type);
+
+ if (Array.isArray(entries)) {
+ for (var _i2 = 0, _ii = entries.length; _i2 < _ii; _i2 += 2) {
+ dict[xref.fetchIfRef(entries[_i2])] = xref.fetchIfRef(entries[_i2 + 1]);
+ }
+ }
+ }
+
+ return dict;
+ }
+ }, {
+ key: "get",
+ value: function get(key) {
+ if (!this.root) {
+ return null;
+ }
+
+ var xref = this.xref;
+ var kidsOrEntries = xref.fetchIfRef(this.root);
+ var loopCount = 0;
+ var MAX_LEVELS = 10;
+
+ while (kidsOrEntries.has('Kids')) {
+ if (++loopCount > MAX_LEVELS) {
+ (0, _util.warn)("Search depth limit reached for \"".concat(this._type, "\" tree."));
+ return null;
+ }
+
+ var kids = kidsOrEntries.get('Kids');
+
+ if (!Array.isArray(kids)) {
+ return null;
+ }
+
+ var l = 0,
+ r = kids.length - 1;
+
+ while (l <= r) {
+ var m = l + r >> 1;
+ var kid = xref.fetchIfRef(kids[m]);
+ var limits = kid.get('Limits');
+
+ if (key < xref.fetchIfRef(limits[0])) {
+ r = m - 1;
+ } else if (key > xref.fetchIfRef(limits[1])) {
+ l = m + 1;
+ } else {
+ kidsOrEntries = xref.fetchIfRef(kids[m]);
+ break;
+ }
+ }
+
+ if (l > r) {
+ return null;
+ }
+ }
+
+ var entries = kidsOrEntries.get(this._type);
+
+ if (Array.isArray(entries)) {
+ var _l = 0,
+ _r = entries.length - 2;
+
+ while (_l <= _r) {
+ var tmp = _l + _r >> 1,
+ _m = tmp + (tmp & 1);
+
+ var currentKey = xref.fetchIfRef(entries[_m]);
+
+ if (key < currentKey) {
+ _r = _m - 2;
+ } else if (key > currentKey) {
+ _l = _m + 2;
+ } else {
+ return xref.fetchIfRef(entries[_m + 1]);
+ }
+ }
+
+ (0, _util.info)("Falling back to an exhaustive search, for key \"".concat(key, "\", ") + "in \"".concat(this._type, "\" tree."));
+
+ for (var _m2 = 0, mm = entries.length; _m2 < mm; _m2 += 2) {
+ var _currentKey = xref.fetchIfRef(entries[_m2]);
+
+ if (_currentKey === key) {
+ (0, _util.warn)("The \"".concat(key, "\" key was found at an incorrect, ") + "i.e. out-of-order, position in \"".concat(this._type, "\" tree."));
+ return xref.fetchIfRef(entries[_m2 + 1]);
+ }
+ }
+ }
+
+ return null;
+ }
+ }]);
+
+ return NameOrNumberTree;
+}();
+
+var NameTree =
+/*#__PURE__*/
+function (_NameOrNumberTree) {
+ _inherits(NameTree, _NameOrNumberTree);
+
+ function NameTree(root, xref) {
+ _classCallCheck(this, NameTree);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(NameTree).call(this, root, xref, 'Names'));
+ }
+
+ return NameTree;
+}(NameOrNumberTree);
+
+var NumberTree =
+/*#__PURE__*/
+function (_NameOrNumberTree2) {
+ _inherits(NumberTree, _NameOrNumberTree2);
+
+ function NumberTree(root, xref) {
+ _classCallCheck(this, NumberTree);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(NumberTree).call(this, root, xref, 'Nums'));
+ }
+
+ return NumberTree;
+}(NameOrNumberTree);
+
+var FileSpec = function FileSpecClosure() {
+ function FileSpec(root, xref) {
+ if (!root || !(0, _primitives.isDict)(root)) {
+ return;
+ }
+
+ this.xref = xref;
+ this.root = root;
+
+ if (root.has('FS')) {
+ this.fs = root.get('FS');
+ }
+
+ this.description = root.has('Desc') ? (0, _util.stringToPDFString)(root.get('Desc')) : '';
+
+ if (root.has('RF')) {
+ (0, _util.warn)('Related file specifications are not supported');
+ }
+
+ this.contentAvailable = true;
+
+ if (!root.has('EF')) {
+ this.contentAvailable = false;
+ (0, _util.warn)('Non-embedded file specifications are not supported');
+ }
+ }
+
+ function pickPlatformItem(dict) {
+ if (dict.has('UF')) {
+ return dict.get('UF');
+ } else if (dict.has('F')) {
+ return dict.get('F');
+ } else if (dict.has('Unix')) {
+ return dict.get('Unix');
+ } else if (dict.has('Mac')) {
+ return dict.get('Mac');
+ } else if (dict.has('DOS')) {
+ return dict.get('DOS');
+ }
+
+ return null;
+ }
+
+ FileSpec.prototype = {
+ get filename() {
+ if (!this._filename && this.root) {
+ var filename = pickPlatformItem(this.root) || 'unnamed';
+ this._filename = (0, _util.stringToPDFString)(filename).replace(/\\\\/g, '\\').replace(/\\\//g, '/').replace(/\\/g, '/');
+ }
+
+ return this._filename;
+ },
+
+ get content() {
+ if (!this.contentAvailable) {
+ return null;
+ }
+
+ if (!this.contentRef && this.root) {
+ this.contentRef = pickPlatformItem(this.root.get('EF'));
+ }
+
+ var content = null;
+
+ if (this.contentRef) {
+ var xref = this.xref;
+ var fileObj = xref.fetchIfRef(this.contentRef);
+
+ if (fileObj && (0, _primitives.isStream)(fileObj)) {
+ content = fileObj.getBytes();
+ } else {
+ (0, _util.warn)('Embedded file specification points to non-existing/invalid ' + 'content');
+ }
+ } else {
+ (0, _util.warn)('Embedded file specification does not have a content');
+ }
+
+ return content;
+ },
+
+ get serializable() {
+ return {
+ filename: this.filename,
+ content: this.content
+ };
+ }
+
+ };
+ return FileSpec;
+}();
+
+exports.FileSpec = FileSpec;
+
+var ObjectLoader = function () {
+ function mayHaveChildren(value) {
+ return (0, _primitives.isRef)(value) || (0, _primitives.isDict)(value) || Array.isArray(value) || (0, _primitives.isStream)(value);
+ }
+
+ function addChildren(node, nodesToVisit) {
+ if ((0, _primitives.isDict)(node) || (0, _primitives.isStream)(node)) {
+ var dict = (0, _primitives.isDict)(node) ? node : node.dict;
+ var dictKeys = dict.getKeys();
+
+ for (var i = 0, ii = dictKeys.length; i < ii; i++) {
+ var rawValue = dict.getRaw(dictKeys[i]);
+
+ if (mayHaveChildren(rawValue)) {
+ nodesToVisit.push(rawValue);
+ }
+ }
+ } else if (Array.isArray(node)) {
+ for (var _i3 = 0, _ii2 = node.length; _i3 < _ii2; _i3++) {
+ var value = node[_i3];
+
+ if (mayHaveChildren(value)) {
+ nodesToVisit.push(value);
+ }
+ }
+ }
+ }
+
+ function ObjectLoader(dict, keys, xref) {
+ this.dict = dict;
+ this.keys = keys;
+ this.xref = xref;
+ this.refSet = null;
+ this.capability = null;
+ }
+
+ ObjectLoader.prototype = {
+ load: function load() {
+ this.capability = (0, _util.createPromiseCapability)();
+
+ if (!(this.xref.stream instanceof _chunked_stream.ChunkedStream) || this.xref.stream.getMissingChunks().length === 0) {
+ this.capability.resolve();
+ return this.capability.promise;
+ }
+
+ var keys = this.keys,
+ dict = this.dict;
+ this.refSet = new _primitives.RefSet();
+ var nodesToVisit = [];
+
+ for (var i = 0, ii = keys.length; i < ii; i++) {
+ var rawValue = dict.getRaw(keys[i]);
+
+ if (rawValue !== undefined) {
+ nodesToVisit.push(rawValue);
+ }
+ }
+
+ this._walk(nodesToVisit);
+
+ return this.capability.promise;
+ },
+ _walk: function _walk(nodesToVisit) {
+ var _this3 = this;
+
+ var nodesToRevisit = [];
+ var pendingRequests = [];
+
+ while (nodesToVisit.length) {
+ var currentNode = nodesToVisit.pop();
+
+ if ((0, _primitives.isRef)(currentNode)) {
+ if (this.refSet.has(currentNode)) {
+ continue;
+ }
+
+ try {
+ this.refSet.put(currentNode);
+ currentNode = this.xref.fetch(currentNode);
+ } catch (ex) {
+ if (!(ex instanceof _core_utils.MissingDataException)) {
+ throw ex;
+ }
+
+ nodesToRevisit.push(currentNode);
+ pendingRequests.push({
+ begin: ex.begin,
+ end: ex.end
+ });
+ }
+ }
+
+ if (currentNode && currentNode.getBaseStreams) {
+ var baseStreams = currentNode.getBaseStreams();
+ var foundMissingData = false;
+
+ for (var i = 0, ii = baseStreams.length; i < ii; i++) {
+ var stream = baseStreams[i];
+
+ if (stream.getMissingChunks && stream.getMissingChunks().length) {
+ foundMissingData = true;
+ pendingRequests.push({
+ begin: stream.start,
+ end: stream.end
+ });
+ }
+ }
+
+ if (foundMissingData) {
+ nodesToRevisit.push(currentNode);
+ }
+ }
+
+ addChildren(currentNode, nodesToVisit);
+ }
+
+ if (pendingRequests.length) {
+ this.xref.stream.manager.requestRanges(pendingRequests).then(function () {
+ for (var _i4 = 0, _ii3 = nodesToRevisit.length; _i4 < _ii3; _i4++) {
+ var node = nodesToRevisit[_i4];
+
+ if ((0, _primitives.isRef)(node)) {
+ _this3.refSet.remove(node);
+ }
+ }
+
+ _this3._walk(nodesToRevisit);
+ }, this.capability.reject);
+ return;
+ }
+
+ this.refSet = null;
+ this.capability.resolve();
+ }
+ };
+ return ObjectLoader;
+}();
+
+exports.ObjectLoader = ObjectLoader;
+
+/***/ }),
+/* 157 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.Parser = exports.Linearization = exports.Lexer = void 0;
+
+var _stream = __w_pdfjs_require__(158);
+
+var _util = __w_pdfjs_require__(5);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _ccitt_stream = __w_pdfjs_require__(159);
+
+var _jbig2_stream = __w_pdfjs_require__(161);
+
+var _jpeg_stream = __w_pdfjs_require__(164);
+
+var _jpx_stream = __w_pdfjs_require__(166);
+
+var _core_utils = __w_pdfjs_require__(154);
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var MAX_LENGTH_TO_CACHE = 1000;
+var MAX_ADLER32_LENGTH = 5552;
+
+function computeAdler32(bytes) {
+ var bytesLength = bytes.length;
+ var a = 1,
+ b = 0;
+
+ for (var i = 0; i < bytesLength; ++i) {
+ a += bytes[i] & 0xFF;
+ b += a;
+ }
+
+ return b % 65521 << 16 | a % 65521;
+}
+
+var Parser =
+/*#__PURE__*/
+function () {
+ function Parser(_ref) {
+ var lexer = _ref.lexer,
+ xref = _ref.xref,
+ _ref$allowStreams = _ref.allowStreams,
+ allowStreams = _ref$allowStreams === void 0 ? false : _ref$allowStreams,
+ _ref$recoveryMode = _ref.recoveryMode,
+ recoveryMode = _ref$recoveryMode === void 0 ? false : _ref$recoveryMode;
+
+ _classCallCheck(this, Parser);
+
+ this.lexer = lexer;
+ this.xref = xref;
+ this.allowStreams = allowStreams;
+ this.recoveryMode = recoveryMode;
+ this.imageCache = Object.create(null);
+ this.refill();
+ }
+
+ _createClass(Parser, [{
+ key: "refill",
+ value: function refill() {
+ this.buf1 = this.lexer.getObj();
+ this.buf2 = this.lexer.getObj();
+ }
+ }, {
+ key: "shift",
+ value: function shift() {
+ if ((0, _primitives.isCmd)(this.buf2, 'ID')) {
+ this.buf1 = this.buf2;
+ this.buf2 = null;
+ } else {
+ this.buf1 = this.buf2;
+ this.buf2 = this.lexer.getObj();
+ }
+ }
+ }, {
+ key: "tryShift",
+ value: function tryShift() {
+ try {
+ this.shift();
+ return true;
+ } catch (e) {
+ if (e instanceof _core_utils.MissingDataException) {
+ throw e;
+ }
+
+ return false;
+ }
+ }
+ }, {
+ key: "getObj",
+ value: function getObj(cipherTransform) {
+ var buf1 = this.buf1;
+ this.shift();
+
+ if (buf1 instanceof _primitives.Cmd) {
+ switch (buf1.cmd) {
+ case 'BI':
+ return this.makeInlineImage(cipherTransform);
+
+ case '[':
+ var array = [];
+
+ while (!(0, _primitives.isCmd)(this.buf1, ']') && !(0, _primitives.isEOF)(this.buf1)) {
+ array.push(this.getObj(cipherTransform));
+ }
+
+ if ((0, _primitives.isEOF)(this.buf1)) {
+ if (!this.recoveryMode) {
+ throw new _util.FormatError('End of file inside array');
+ }
+
+ return array;
+ }
+
+ this.shift();
+ return array;
+
+ case '<<':
+ var dict = new _primitives.Dict(this.xref);
+
+ while (!(0, _primitives.isCmd)(this.buf1, '>>') && !(0, _primitives.isEOF)(this.buf1)) {
+ if (!(0, _primitives.isName)(this.buf1)) {
+ (0, _util.info)('Malformed dictionary: key must be a name object');
+ this.shift();
+ continue;
+ }
+
+ var key = this.buf1.name;
+ this.shift();
+
+ if ((0, _primitives.isEOF)(this.buf1)) {
+ break;
+ }
+
+ dict.set(key, this.getObj(cipherTransform));
+ }
+
+ if ((0, _primitives.isEOF)(this.buf1)) {
+ if (!this.recoveryMode) {
+ throw new _util.FormatError('End of file inside dictionary');
+ }
+
+ return dict;
+ }
+
+ if ((0, _primitives.isCmd)(this.buf2, 'stream')) {
+ return this.allowStreams ? this.makeStream(dict, cipherTransform) : dict;
+ }
+
+ this.shift();
+ return dict;
+
+ default:
+ return buf1;
+ }
+ }
+
+ if (Number.isInteger(buf1)) {
+ var num = buf1;
+
+ if (Number.isInteger(this.buf1) && (0, _primitives.isCmd)(this.buf2, 'R')) {
+ var ref = _primitives.Ref.get(num, this.buf1);
+
+ this.shift();
+ this.shift();
+ return ref;
+ }
+
+ return num;
+ }
+
+ if ((0, _util.isString)(buf1)) {
+ var str = buf1;
+
+ if (cipherTransform) {
+ str = cipherTransform.decryptString(str);
+ }
+
+ return str;
+ }
+
+ return buf1;
+ }
+ }, {
+ key: "findDefaultInlineStreamEnd",
+ value: function findDefaultInlineStreamEnd(stream) {
+ var E = 0x45,
+ I = 0x49,
+ SPACE = 0x20,
+ LF = 0xA,
+ CR = 0xD;
+ var n = 10,
+ NUL = 0x0;
+ var startPos = stream.pos,
+ state = 0,
+ ch,
+ maybeEIPos;
+
+ while ((ch = stream.getByte()) !== -1) {
+ if (state === 0) {
+ state = ch === E ? 1 : 0;
+ } else if (state === 1) {
+ state = ch === I ? 2 : 0;
+ } else {
+ (0, _util.assert)(state === 2);
+
+ if (ch === SPACE || ch === LF || ch === CR) {
+ maybeEIPos = stream.pos;
+ var followingBytes = stream.peekBytes(n);
+
+ for (var i = 0, ii = followingBytes.length; i < ii; i++) {
+ ch = followingBytes[i];
+
+ if (ch === NUL && followingBytes[i + 1] !== NUL) {
+ continue;
+ }
+
+ if (ch !== LF && ch !== CR && (ch < SPACE || ch > 0x7F)) {
+ state = 0;
+ break;
+ }
+ }
+
+ if (state === 2) {
+ break;
+ }
+ } else {
+ state = 0;
+ }
+ }
+ }
+
+ if (ch === -1) {
+ (0, _util.warn)('findDefaultInlineStreamEnd: ' + 'Reached the end of the stream without finding a valid EI marker');
+
+ if (maybeEIPos) {
+ (0, _util.warn)('... trying to recover by using the last "EI" occurrence.');
+ stream.skip(-(stream.pos - maybeEIPos));
+ }
+ }
+
+ var endOffset = 4;
+ stream.skip(-endOffset);
+ ch = stream.peekByte();
+ stream.skip(endOffset);
+
+ if (!(0, _util.isSpace)(ch)) {
+ endOffset--;
+ }
+
+ return stream.pos - endOffset - startPos;
+ }
+ }, {
+ key: "findDCTDecodeInlineStreamEnd",
+ value: function findDCTDecodeInlineStreamEnd(stream) {
+ var startPos = stream.pos,
+ foundEOI = false,
+ b,
+ markerLength,
+ length;
+
+ while ((b = stream.getByte()) !== -1) {
+ if (b !== 0xFF) {
+ continue;
+ }
+
+ switch (stream.getByte()) {
+ case 0x00:
+ break;
+
+ case 0xFF:
+ stream.skip(-1);
+ break;
+
+ case 0xD9:
+ foundEOI = true;
+ break;
+
+ case 0xC0:
+ case 0xC1:
+ case 0xC2:
+ case 0xC3:
+ case 0xC5:
+ case 0xC6:
+ case 0xC7:
+ case 0xC9:
+ case 0xCA:
+ case 0xCB:
+ case 0xCD:
+ case 0xCE:
+ case 0xCF:
+ case 0xC4:
+ case 0xCC:
+ case 0xDA:
+ case 0xDB:
+ case 0xDC:
+ case 0xDD:
+ case 0xDE:
+ case 0xDF:
+ case 0xE0:
+ case 0xE1:
+ case 0xE2:
+ case 0xE3:
+ case 0xE4:
+ case 0xE5:
+ case 0xE6:
+ case 0xE7:
+ case 0xE8:
+ case 0xE9:
+ case 0xEA:
+ case 0xEB:
+ case 0xEC:
+ case 0xED:
+ case 0xEE:
+ case 0xEF:
+ case 0xFE:
+ markerLength = stream.getUint16();
+
+ if (markerLength > 2) {
+ stream.skip(markerLength - 2);
+ } else {
+ stream.skip(-2);
+ }
+
+ break;
+ }
+
+ if (foundEOI) {
+ break;
+ }
+ }
+
+ length = stream.pos - startPos;
+
+ if (b === -1) {
+ (0, _util.warn)('Inline DCTDecode image stream: ' + 'EOI marker not found, searching for /EI/ instead.');
+ stream.skip(-length);
+ return this.findDefaultInlineStreamEnd(stream);
+ }
+
+ this.inlineStreamSkipEI(stream);
+ return length;
+ }
+ }, {
+ key: "findASCII85DecodeInlineStreamEnd",
+ value: function findASCII85DecodeInlineStreamEnd(stream) {
+ var TILDE = 0x7E,
+ GT = 0x3E;
+ var startPos = stream.pos,
+ ch,
+ length;
+
+ while ((ch = stream.getByte()) !== -1) {
+ if (ch === TILDE) {
+ ch = stream.peekByte();
+
+ while ((0, _util.isSpace)(ch)) {
+ stream.skip();
+ ch = stream.peekByte();
+ }
+
+ if (ch === GT) {
+ stream.skip();
+ break;
+ }
+ }
+ }
+
+ length = stream.pos - startPos;
+
+ if (ch === -1) {
+ (0, _util.warn)('Inline ASCII85Decode image stream: ' + 'EOD marker not found, searching for /EI/ instead.');
+ stream.skip(-length);
+ return this.findDefaultInlineStreamEnd(stream);
+ }
+
+ this.inlineStreamSkipEI(stream);
+ return length;
+ }
+ }, {
+ key: "findASCIIHexDecodeInlineStreamEnd",
+ value: function findASCIIHexDecodeInlineStreamEnd(stream) {
+ var GT = 0x3E;
+ var startPos = stream.pos,
+ ch,
+ length;
+
+ while ((ch = stream.getByte()) !== -1) {
+ if (ch === GT) {
+ break;
+ }
+ }
+
+ length = stream.pos - startPos;
+
+ if (ch === -1) {
+ (0, _util.warn)('Inline ASCIIHexDecode image stream: ' + 'EOD marker not found, searching for /EI/ instead.');
+ stream.skip(-length);
+ return this.findDefaultInlineStreamEnd(stream);
+ }
+
+ this.inlineStreamSkipEI(stream);
+ return length;
+ }
+ }, {
+ key: "inlineStreamSkipEI",
+ value: function inlineStreamSkipEI(stream) {
+ var E = 0x45,
+ I = 0x49;
+ var state = 0,
+ ch;
+
+ while ((ch = stream.getByte()) !== -1) {
+ if (state === 0) {
+ state = ch === E ? 1 : 0;
+ } else if (state === 1) {
+ state = ch === I ? 2 : 0;
+ } else if (state === 2) {
+ break;
+ }
+ }
+ }
+ }, {
+ key: "makeInlineImage",
+ value: function makeInlineImage(cipherTransform) {
+ var lexer = this.lexer;
+ var stream = lexer.stream;
+ var dict = new _primitives.Dict(this.xref);
+ var dictLength;
+
+ while (!(0, _primitives.isCmd)(this.buf1, 'ID') && !(0, _primitives.isEOF)(this.buf1)) {
+ if (!(0, _primitives.isName)(this.buf1)) {
+ throw new _util.FormatError('Dictionary key must be a name object');
+ }
+
+ var key = this.buf1.name;
+ this.shift();
+
+ if ((0, _primitives.isEOF)(this.buf1)) {
+ break;
+ }
+
+ dict.set(key, this.getObj(cipherTransform));
+ }
+
+ if (lexer.beginInlineImagePos !== -1) {
+ dictLength = stream.pos - lexer.beginInlineImagePos;
+ }
+
+ var filter = dict.get('Filter', 'F');
+ var filterName;
+
+ if ((0, _primitives.isName)(filter)) {
+ filterName = filter.name;
+ } else if (Array.isArray(filter)) {
+ var filterZero = this.xref.fetchIfRef(filter[0]);
+
+ if ((0, _primitives.isName)(filterZero)) {
+ filterName = filterZero.name;
+ }
+ }
+
+ var startPos = stream.pos;
+ var length;
+
+ if (filterName === 'DCTDecode' || filterName === 'DCT') {
+ length = this.findDCTDecodeInlineStreamEnd(stream);
+ } else if (filterName === 'ASCII85Decode' || filterName === 'A85') {
+ length = this.findASCII85DecodeInlineStreamEnd(stream);
+ } else if (filterName === 'ASCIIHexDecode' || filterName === 'AHx') {
+ length = this.findASCIIHexDecodeInlineStreamEnd(stream);
+ } else {
+ length = this.findDefaultInlineStreamEnd(stream);
+ }
+
+ var imageStream = stream.makeSubStream(startPos, length, dict);
+ var cacheKey;
+
+ if (length < MAX_LENGTH_TO_CACHE && dictLength < MAX_ADLER32_LENGTH) {
+ var imageBytes = imageStream.getBytes();
+ imageStream.reset();
+ var initialStreamPos = stream.pos;
+ stream.pos = lexer.beginInlineImagePos;
+ var dictBytes = stream.getBytes(dictLength);
+ stream.pos = initialStreamPos;
+ cacheKey = computeAdler32(imageBytes) + '_' + computeAdler32(dictBytes);
+ var cacheEntry = this.imageCache[cacheKey];
+
+ if (cacheEntry !== undefined) {
+ this.buf2 = _primitives.Cmd.get('EI');
+ this.shift();
+ cacheEntry.reset();
+ return cacheEntry;
+ }
+ }
+
+ if (cipherTransform) {
+ imageStream = cipherTransform.createStream(imageStream, length);
+ }
+
+ imageStream = this.filter(imageStream, dict, length);
+ imageStream.dict = dict;
+
+ if (cacheKey !== undefined) {
+ imageStream.cacheKey = "inline_".concat(length, "_").concat(cacheKey);
+ this.imageCache[cacheKey] = imageStream;
+ }
+
+ this.buf2 = _primitives.Cmd.get('EI');
+ this.shift();
+ return imageStream;
+ }
+ }, {
+ key: "_findStreamLength",
+ value: function _findStreamLength(startPos, signature) {
+ var stream = this.lexer.stream;
+ stream.pos = startPos;
+ var SCAN_BLOCK_LENGTH = 2048;
+ var signatureLength = signature.length;
+
+ while (stream.pos < stream.end) {
+ var scanBytes = stream.peekBytes(SCAN_BLOCK_LENGTH);
+ var scanLength = scanBytes.length - signatureLength;
+
+ if (scanLength <= 0) {
+ break;
+ }
+
+ var pos = 0;
+
+ while (pos < scanLength) {
+ var j = 0;
+
+ while (j < signatureLength && scanBytes[pos + j] === signature[j]) {
+ j++;
+ }
+
+ if (j >= signatureLength) {
+ stream.pos += pos;
+ return stream.pos - startPos;
+ }
+
+ pos++;
+ }
+
+ stream.pos += scanLength;
+ }
+
+ return -1;
+ }
+ }, {
+ key: "makeStream",
+ value: function makeStream(dict, cipherTransform) {
+ var lexer = this.lexer;
+ var stream = lexer.stream;
+ lexer.skipToNextLine();
+ var startPos = stream.pos - 1;
+ var length = dict.get('Length');
+
+ if (!Number.isInteger(length)) {
+ (0, _util.info)("Bad length \"".concat(length, "\" in stream"));
+ length = 0;
+ }
+
+ stream.pos = startPos + length;
+ lexer.nextChar();
+
+ if (this.tryShift() && (0, _primitives.isCmd)(this.buf2, 'endstream')) {
+ this.shift();
+ } else {
+ var ENDSTREAM_SIGNATURE = new Uint8Array([0x65, 0x6E, 0x64, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6D]);
+
+ var actualLength = this._findStreamLength(startPos, ENDSTREAM_SIGNATURE);
+
+ if (actualLength < 0) {
+ var MAX_TRUNCATION = 1;
+
+ for (var i = 1; i <= MAX_TRUNCATION; i++) {
+ var end = ENDSTREAM_SIGNATURE.length - i;
+ var TRUNCATED_SIGNATURE = ENDSTREAM_SIGNATURE.slice(0, end);
+
+ var maybeLength = this._findStreamLength(startPos, TRUNCATED_SIGNATURE);
+
+ if (maybeLength >= 0) {
+ var lastByte = stream.peekBytes(end + 1)[end];
+
+ if (!(0, _util.isSpace)(lastByte)) {
+ break;
+ }
+
+ (0, _util.info)("Found \"".concat((0, _util.bytesToString)(TRUNCATED_SIGNATURE), "\" when ") + 'searching for endstream command.');
+ actualLength = maybeLength;
+ break;
+ }
+ }
+
+ if (actualLength < 0) {
+ throw new _util.FormatError('Missing endstream command.');
+ }
+ }
+
+ length = actualLength;
+ lexer.nextChar();
+ this.shift();
+ this.shift();
+ }
+
+ this.shift();
+ stream = stream.makeSubStream(startPos, length, dict);
+
+ if (cipherTransform) {
+ stream = cipherTransform.createStream(stream, length);
+ }
+
+ stream = this.filter(stream, dict, length);
+ stream.dict = dict;
+ return stream;
+ }
+ }, {
+ key: "filter",
+ value: function filter(stream, dict, length) {
+ var filter = dict.get('Filter', 'F');
+ var params = dict.get('DecodeParms', 'DP');
+
+ if ((0, _primitives.isName)(filter)) {
+ if (Array.isArray(params)) {
+ (0, _util.warn)('/DecodeParms should not contain an Array, ' + 'when /Filter contains a Name.');
+ }
+
+ return this.makeFilter(stream, filter.name, length, params);
+ }
+
+ var maybeLength = length;
+
+ if (Array.isArray(filter)) {
+ var filterArray = filter;
+ var paramsArray = params;
+
+ for (var i = 0, ii = filterArray.length; i < ii; ++i) {
+ filter = this.xref.fetchIfRef(filterArray[i]);
+
+ if (!(0, _primitives.isName)(filter)) {
+ throw new _util.FormatError("Bad filter name \"".concat(filter, "\""));
+ }
+
+ params = null;
+
+ if (Array.isArray(paramsArray) && i in paramsArray) {
+ params = this.xref.fetchIfRef(paramsArray[i]);
+ }
+
+ stream = this.makeFilter(stream, filter.name, maybeLength, params);
+ maybeLength = null;
+ }
+ }
+
+ return stream;
+ }
+ }, {
+ key: "makeFilter",
+ value: function makeFilter(stream, name, maybeLength, params) {
+ if (maybeLength === 0) {
+ (0, _util.warn)("Empty \"".concat(name, "\" stream."));
+ return new _stream.NullStream();
+ }
+
+ try {
+ var xrefStreamStats = this.xref.stats.streamTypes;
+
+ if (name === 'FlateDecode' || name === 'Fl') {
+ xrefStreamStats[_util.StreamType.FLATE] = true;
+
+ if (params) {
+ return new _stream.PredictorStream(new _stream.FlateStream(stream, maybeLength), maybeLength, params);
+ }
+
+ return new _stream.FlateStream(stream, maybeLength);
+ }
+
+ if (name === 'LZWDecode' || name === 'LZW') {
+ xrefStreamStats[_util.StreamType.LZW] = true;
+ var earlyChange = 1;
+
+ if (params) {
+ if (params.has('EarlyChange')) {
+ earlyChange = params.get('EarlyChange');
+ }
+
+ return new _stream.PredictorStream(new _stream.LZWStream(stream, maybeLength, earlyChange), maybeLength, params);
+ }
+
+ return new _stream.LZWStream(stream, maybeLength, earlyChange);
+ }
+
+ if (name === 'DCTDecode' || name === 'DCT') {
+ xrefStreamStats[_util.StreamType.DCT] = true;
+ return new _jpeg_stream.JpegStream(stream, maybeLength, stream.dict, params);
+ }
+
+ if (name === 'JPXDecode' || name === 'JPX') {
+ xrefStreamStats[_util.StreamType.JPX] = true;
+ return new _jpx_stream.JpxStream(stream, maybeLength, stream.dict, params);
+ }
+
+ if (name === 'ASCII85Decode' || name === 'A85') {
+ xrefStreamStats[_util.StreamType.A85] = true;
+ return new _stream.Ascii85Stream(stream, maybeLength);
+ }
+
+ if (name === 'ASCIIHexDecode' || name === 'AHx') {
+ xrefStreamStats[_util.StreamType.AHX] = true;
+ return new _stream.AsciiHexStream(stream, maybeLength);
+ }
+
+ if (name === 'CCITTFaxDecode' || name === 'CCF') {
+ xrefStreamStats[_util.StreamType.CCF] = true;
+ return new _ccitt_stream.CCITTFaxStream(stream, maybeLength, params);
+ }
+
+ if (name === 'RunLengthDecode' || name === 'RL') {
+ xrefStreamStats[_util.StreamType.RL] = true;
+ return new _stream.RunLengthStream(stream, maybeLength);
+ }
+
+ if (name === 'JBIG2Decode') {
+ xrefStreamStats[_util.StreamType.JBIG] = true;
+ return new _jbig2_stream.Jbig2Stream(stream, maybeLength, stream.dict, params);
+ }
+
+ (0, _util.warn)("Filter \"".concat(name, "\" is not supported."));
+ return stream;
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+
+ (0, _util.warn)("Invalid stream: \"".concat(ex, "\""));
+ return new _stream.NullStream();
+ }
+ }
+ }]);
+
+ return Parser;
+}();
+
+exports.Parser = Parser;
+var specialChars = [1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 2, 0, 0, 2, 2, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
+
+function toHexDigit(ch) {
+ if (ch >= 0x30 && ch <= 0x39) {
+ return ch & 0x0F;
+ }
+
+ if (ch >= 0x41 && ch <= 0x46 || ch >= 0x61 && ch <= 0x66) {
+ return (ch & 0x0F) + 9;
+ }
+
+ return -1;
+}
+
+var Lexer =
+/*#__PURE__*/
+function () {
+ function Lexer(stream) {
+ var knownCommands = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;
+
+ _classCallCheck(this, Lexer);
+
+ this.stream = stream;
+ this.nextChar();
+ this.strBuf = [];
+ this.knownCommands = knownCommands;
+ this.beginInlineImagePos = -1;
+ }
+
+ _createClass(Lexer, [{
+ key: "nextChar",
+ value: function nextChar() {
+ return this.currentChar = this.stream.getByte();
+ }
+ }, {
+ key: "peekChar",
+ value: function peekChar() {
+ return this.stream.peekByte();
+ }
+ }, {
+ key: "getNumber",
+ value: function getNumber() {
+ var ch = this.currentChar;
+ var eNotation = false;
+ var divideBy = 0;
+ var sign = 0;
+
+ if (ch === 0x2D) {
+ sign = -1;
+ ch = this.nextChar();
+
+ if (ch === 0x2D) {
+ ch = this.nextChar();
+ }
+ } else if (ch === 0x2B) {
+ sign = 1;
+ ch = this.nextChar();
+ }
+
+ if (ch === 0x0A || ch === 0x0D) {
+ do {
+ ch = this.nextChar();
+ } while (ch === 0x0A || ch === 0x0D);
+ }
+
+ if (ch === 0x2E) {
+ divideBy = 10;
+ ch = this.nextChar();
+ }
+
+ if (ch < 0x30 || ch > 0x39) {
+ if (divideBy === 10 && sign === 0 && ((0, _util.isSpace)(ch) || ch === -1)) {
+ (0, _util.warn)('Lexer.getNumber - treating a single decimal point as zero.');
+ return 0;
+ }
+
+ throw new _util.FormatError("Invalid number: ".concat(String.fromCharCode(ch), " (charCode ").concat(ch, ")"));
+ }
+
+ sign = sign || 1;
+ var baseValue = ch - 0x30;
+ var powerValue = 0;
+ var powerValueSign = 1;
+
+ while ((ch = this.nextChar()) >= 0) {
+ if (0x30 <= ch && ch <= 0x39) {
+ var currentDigit = ch - 0x30;
+
+ if (eNotation) {
+ powerValue = powerValue * 10 + currentDigit;
+ } else {
+ if (divideBy !== 0) {
+ divideBy *= 10;
+ }
+
+ baseValue = baseValue * 10 + currentDigit;
+ }
+ } else if (ch === 0x2E) {
+ if (divideBy === 0) {
+ divideBy = 1;
+ } else {
+ break;
+ }
+ } else if (ch === 0x2D) {
+ (0, _util.warn)('Badly formatted number: minus sign in the middle');
+ } else if (ch === 0x45 || ch === 0x65) {
+ ch = this.peekChar();
+
+ if (ch === 0x2B || ch === 0x2D) {
+ powerValueSign = ch === 0x2D ? -1 : 1;
+ this.nextChar();
+ } else if (ch < 0x30 || ch > 0x39) {
+ break;
+ }
+
+ eNotation = true;
+ } else {
+ break;
+ }
+ }
+
+ if (divideBy !== 0) {
+ baseValue /= divideBy;
+ }
+
+ if (eNotation) {
+ baseValue *= Math.pow(10, powerValueSign * powerValue);
+ }
+
+ return sign * baseValue;
+ }
+ }, {
+ key: "getString",
+ value: function getString() {
+ var numParen = 1;
+ var done = false;
+ var strBuf = this.strBuf;
+ strBuf.length = 0;
+ var ch = this.nextChar();
+
+ while (true) {
+ var charBuffered = false;
+
+ switch (ch | 0) {
+ case -1:
+ (0, _util.warn)('Unterminated string');
+ done = true;
+ break;
+
+ case 0x28:
+ ++numParen;
+ strBuf.push('(');
+ break;
+
+ case 0x29:
+ if (--numParen === 0) {
+ this.nextChar();
+ done = true;
+ } else {
+ strBuf.push(')');
+ }
+
+ break;
+
+ case 0x5C:
+ ch = this.nextChar();
+
+ switch (ch) {
+ case -1:
+ (0, _util.warn)('Unterminated string');
+ done = true;
+ break;
+
+ case 0x6E:
+ strBuf.push('\n');
+ break;
+
+ case 0x72:
+ strBuf.push('\r');
+ break;
+
+ case 0x74:
+ strBuf.push('\t');
+ break;
+
+ case 0x62:
+ strBuf.push('\b');
+ break;
+
+ case 0x66:
+ strBuf.push('\f');
+ break;
+
+ case 0x5C:
+ case 0x28:
+ case 0x29:
+ strBuf.push(String.fromCharCode(ch));
+ break;
+
+ case 0x30:
+ case 0x31:
+ case 0x32:
+ case 0x33:
+ case 0x34:
+ case 0x35:
+ case 0x36:
+ case 0x37:
+ var x = ch & 0x0F;
+ ch = this.nextChar();
+ charBuffered = true;
+
+ if (ch >= 0x30 && ch <= 0x37) {
+ x = (x << 3) + (ch & 0x0F);
+ ch = this.nextChar();
+
+ if (ch >= 0x30 && ch <= 0x37) {
+ charBuffered = false;
+ x = (x << 3) + (ch & 0x0F);
+ }
+ }
+
+ strBuf.push(String.fromCharCode(x));
+ break;
+
+ case 0x0D:
+ if (this.peekChar() === 0x0A) {
+ this.nextChar();
+ }
+
+ break;
+
+ case 0x0A:
+ break;
+
+ default:
+ strBuf.push(String.fromCharCode(ch));
+ break;
+ }
+
+ break;
+
+ default:
+ strBuf.push(String.fromCharCode(ch));
+ break;
+ }
+
+ if (done) {
+ break;
+ }
+
+ if (!charBuffered) {
+ ch = this.nextChar();
+ }
+ }
+
+ return strBuf.join('');
+ }
+ }, {
+ key: "getName",
+ value: function getName() {
+ var ch, previousCh;
+ var strBuf = this.strBuf;
+ strBuf.length = 0;
+
+ while ((ch = this.nextChar()) >= 0 && !specialChars[ch]) {
+ if (ch === 0x23) {
+ ch = this.nextChar();
+
+ if (specialChars[ch]) {
+ (0, _util.warn)('Lexer_getName: ' + 'NUMBER SIGN (#) should be followed by a hexadecimal number.');
+ strBuf.push('#');
+ break;
+ }
+
+ var x = toHexDigit(ch);
+
+ if (x !== -1) {
+ previousCh = ch;
+ ch = this.nextChar();
+ var x2 = toHexDigit(ch);
+
+ if (x2 === -1) {
+ (0, _util.warn)("Lexer_getName: Illegal digit (".concat(String.fromCharCode(ch), ") ") + 'in hexadecimal number.');
+ strBuf.push('#', String.fromCharCode(previousCh));
+
+ if (specialChars[ch]) {
+ break;
+ }
+
+ strBuf.push(String.fromCharCode(ch));
+ continue;
+ }
+
+ strBuf.push(String.fromCharCode(x << 4 | x2));
+ } else {
+ strBuf.push('#', String.fromCharCode(ch));
+ }
+ } else {
+ strBuf.push(String.fromCharCode(ch));
+ }
+ }
+
+ if (strBuf.length > 127) {
+ (0, _util.warn)("Name token is longer than allowed by the spec: ".concat(strBuf.length));
+ }
+
+ return _primitives.Name.get(strBuf.join(''));
+ }
+ }, {
+ key: "getHexString",
+ value: function getHexString() {
+ var strBuf = this.strBuf;
+ strBuf.length = 0;
+ var ch = this.currentChar;
+ var isFirstHex = true;
+ var firstDigit, secondDigit;
+
+ while (true) {
+ if (ch < 0) {
+ (0, _util.warn)('Unterminated hex string');
+ break;
+ } else if (ch === 0x3E) {
+ this.nextChar();
+ break;
+ } else if (specialChars[ch] === 1) {
+ ch = this.nextChar();
+ continue;
+ } else {
+ if (isFirstHex) {
+ firstDigit = toHexDigit(ch);
+
+ if (firstDigit === -1) {
+ (0, _util.warn)("Ignoring invalid character \"".concat(ch, "\" in hex string"));
+ ch = this.nextChar();
+ continue;
+ }
+ } else {
+ secondDigit = toHexDigit(ch);
+
+ if (secondDigit === -1) {
+ (0, _util.warn)("Ignoring invalid character \"".concat(ch, "\" in hex string"));
+ ch = this.nextChar();
+ continue;
+ }
+
+ strBuf.push(String.fromCharCode(firstDigit << 4 | secondDigit));
+ }
+
+ isFirstHex = !isFirstHex;
+ ch = this.nextChar();
+ }
+ }
+
+ return strBuf.join('');
+ }
+ }, {
+ key: "getObj",
+ value: function getObj() {
+ var comment = false;
+ var ch = this.currentChar;
+
+ while (true) {
+ if (ch < 0) {
+ return _primitives.EOF;
+ }
+
+ if (comment) {
+ if (ch === 0x0A || ch === 0x0D) {
+ comment = false;
+ }
+ } else if (ch === 0x25) {
+ comment = true;
+ } else if (specialChars[ch] !== 1) {
+ break;
+ }
+
+ ch = this.nextChar();
+ }
+
+ switch (ch | 0) {
+ case 0x30:
+ case 0x31:
+ case 0x32:
+ case 0x33:
+ case 0x34:
+ case 0x35:
+ case 0x36:
+ case 0x37:
+ case 0x38:
+ case 0x39:
+ case 0x2B:
+ case 0x2D:
+ case 0x2E:
+ return this.getNumber();
+
+ case 0x28:
+ return this.getString();
+
+ case 0x2F:
+ return this.getName();
+
+ case 0x5B:
+ this.nextChar();
+ return _primitives.Cmd.get('[');
+
+ case 0x5D:
+ this.nextChar();
+ return _primitives.Cmd.get(']');
+
+ case 0x3C:
+ ch = this.nextChar();
+
+ if (ch === 0x3C) {
+ this.nextChar();
+ return _primitives.Cmd.get('<<');
+ }
+
+ return this.getHexString();
+
+ case 0x3E:
+ ch = this.nextChar();
+
+ if (ch === 0x3E) {
+ this.nextChar();
+ return _primitives.Cmd.get('>>');
+ }
+
+ return _primitives.Cmd.get('>');
+
+ case 0x7B:
+ this.nextChar();
+ return _primitives.Cmd.get('{');
+
+ case 0x7D:
+ this.nextChar();
+ return _primitives.Cmd.get('}');
+
+ case 0x29:
+ this.nextChar();
+ throw new _util.FormatError("Illegal character: ".concat(ch));
+ }
+
+ var str = String.fromCharCode(ch);
+ var knownCommands = this.knownCommands;
+ var knownCommandFound = knownCommands && knownCommands[str] !== undefined;
+
+ while ((ch = this.nextChar()) >= 0 && !specialChars[ch]) {
+ var possibleCommand = str + String.fromCharCode(ch);
+
+ if (knownCommandFound && knownCommands[possibleCommand] === undefined) {
+ break;
+ }
+
+ if (str.length === 128) {
+ throw new _util.FormatError("Command token too long: ".concat(str.length));
+ }
+
+ str = possibleCommand;
+ knownCommandFound = knownCommands && knownCommands[str] !== undefined;
+ }
+
+ if (str === 'true') {
+ return true;
+ }
+
+ if (str === 'false') {
+ return false;
+ }
+
+ if (str === 'null') {
+ return null;
+ }
+
+ if (str === 'BI') {
+ this.beginInlineImagePos = this.stream.pos;
+ }
+
+ return _primitives.Cmd.get(str);
+ }
+ }, {
+ key: "skipToNextLine",
+ value: function skipToNextLine() {
+ var ch = this.currentChar;
+
+ while (ch >= 0) {
+ if (ch === 0x0D) {
+ ch = this.nextChar();
+
+ if (ch === 0x0A) {
+ this.nextChar();
+ }
+
+ break;
+ } else if (ch === 0x0A) {
+ this.nextChar();
+ break;
+ }
+
+ ch = this.nextChar();
+ }
+ }
+ }]);
+
+ return Lexer;
+}();
+
+exports.Lexer = Lexer;
+
+var Linearization =
+/*#__PURE__*/
+function () {
+ function Linearization() {
+ _classCallCheck(this, Linearization);
+ }
+
+ _createClass(Linearization, null, [{
+ key: "create",
+ value: function create(stream) {
+ function getInt(linDict, name) {
+ var allowZeroValue = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
+ var obj = linDict.get(name);
+
+ if (Number.isInteger(obj) && (allowZeroValue ? obj >= 0 : obj > 0)) {
+ return obj;
+ }
+
+ throw new Error("The \"".concat(name, "\" parameter in the linearization ") + 'dictionary is invalid.');
+ }
+
+ function getHints(linDict) {
+ var hints = linDict.get('H');
+ var hintsLength;
+
+ if (Array.isArray(hints) && ((hintsLength = hints.length) === 2 || hintsLength === 4)) {
+ for (var index = 0; index < hintsLength; index++) {
+ var hint = hints[index];
+
+ if (!(Number.isInteger(hint) && hint > 0)) {
+ throw new Error("Hint (".concat(index, ") in the linearization dictionary ") + 'is invalid.');
+ }
+ }
+
+ return hints;
+ }
+
+ throw new Error('Hint array in the linearization dictionary is invalid.');
+ }
+
+ var parser = new Parser({
+ lexer: new Lexer(stream),
+ xref: null
+ });
+ var obj1 = parser.getObj();
+ var obj2 = parser.getObj();
+ var obj3 = parser.getObj();
+ var linDict = parser.getObj();
+ var obj, length;
+
+ if (!(Number.isInteger(obj1) && Number.isInteger(obj2) && (0, _primitives.isCmd)(obj3, 'obj') && (0, _primitives.isDict)(linDict) && (0, _util.isNum)(obj = linDict.get('Linearized')) && obj > 0)) {
+ return null;
+ } else if ((length = getInt(linDict, 'L')) !== stream.length) {
+ throw new Error('The "L" parameter in the linearization dictionary ' + 'does not equal the stream length.');
+ }
+
+ return {
+ length: length,
+ hints: getHints(linDict),
+ objectNumberFirst: getInt(linDict, 'O'),
+ endFirst: getInt(linDict, 'E'),
+ numPages: getInt(linDict, 'N'),
+ mainXRefEntriesOffset: getInt(linDict, 'T'),
+ pageFirst: linDict.has('P') ? getInt(linDict, 'P', true) : 0
+ };
+ }
+ }]);
+
+ return Linearization;
+}();
+
+exports.Linearization = Linearization;
+
+/***/ }),
+/* 158 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.LZWStream = exports.StringStream = exports.StreamsSequenceStream = exports.Stream = exports.RunLengthStream = exports.PredictorStream = exports.NullStream = exports.FlateStream = exports.DecodeStream = exports.DecryptStream = exports.AsciiHexStream = exports.Ascii85Stream = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _primitives = __w_pdfjs_require__(151);
+
+function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread(); }
+
+function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance"); }
+
+function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); }
+
+function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } }
+
+var Stream = function StreamClosure() {
+ function Stream(arrayBuffer, start, length, dict) {
+ this.bytes = arrayBuffer instanceof Uint8Array ? arrayBuffer : new Uint8Array(arrayBuffer);
+ this.start = start || 0;
+ this.pos = this.start;
+ this.end = start + length || this.bytes.length;
+ this.dict = dict;
+ }
+
+ Stream.prototype = {
+ get length() {
+ return this.end - this.start;
+ },
+
+ get isEmpty() {
+ return this.length === 0;
+ },
+
+ getByte: function Stream_getByte() {
+ if (this.pos >= this.end) {
+ return -1;
+ }
+
+ return this.bytes[this.pos++];
+ },
+ getUint16: function Stream_getUint16() {
+ var b0 = this.getByte();
+ var b1 = this.getByte();
+
+ if (b0 === -1 || b1 === -1) {
+ return -1;
+ }
+
+ return (b0 << 8) + b1;
+ },
+ getInt32: function Stream_getInt32() {
+ var b0 = this.getByte();
+ var b1 = this.getByte();
+ var b2 = this.getByte();
+ var b3 = this.getByte();
+ return (b0 << 24) + (b1 << 16) + (b2 << 8) + b3;
+ },
+ getBytes: function getBytes(length) {
+ var forceClamped = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
+ var bytes = this.bytes;
+ var pos = this.pos;
+ var strEnd = this.end;
+
+ if (!length) {
+ var _subarray = bytes.subarray(pos, strEnd);
+
+ return forceClamped ? new Uint8ClampedArray(_subarray) : _subarray;
+ }
+
+ var end = pos + length;
+
+ if (end > strEnd) {
+ end = strEnd;
+ }
+
+ this.pos = end;
+ var subarray = bytes.subarray(pos, end);
+ return forceClamped ? new Uint8ClampedArray(subarray) : subarray;
+ },
+ peekByte: function Stream_peekByte() {
+ var peekedByte = this.getByte();
+ this.pos--;
+ return peekedByte;
+ },
+ peekBytes: function peekBytes(length) {
+ var forceClamped = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
+ var bytes = this.getBytes(length, forceClamped);
+ this.pos -= bytes.length;
+ return bytes;
+ },
+ skip: function Stream_skip(n) {
+ if (!n) {
+ n = 1;
+ }
+
+ this.pos += n;
+ },
+ reset: function Stream_reset() {
+ this.pos = this.start;
+ },
+ moveStart: function Stream_moveStart() {
+ this.start = this.pos;
+ },
+ makeSubStream: function Stream_makeSubStream(start, length, dict) {
+ return new Stream(this.bytes.buffer, start, length, dict);
+ }
+ };
+ return Stream;
+}();
+
+exports.Stream = Stream;
+
+var StringStream = function StringStreamClosure() {
+ function StringStream(str) {
+ var bytes = (0, _util.stringToBytes)(str);
+ Stream.call(this, bytes);
+ }
+
+ StringStream.prototype = Stream.prototype;
+ return StringStream;
+}();
+
+exports.StringStream = StringStream;
+
+var DecodeStream = function DecodeStreamClosure() {
+ var emptyBuffer = new Uint8Array(0);
+
+ function DecodeStream(maybeMinBufferLength) {
+ this._rawMinBufferLength = maybeMinBufferLength || 0;
+ this.pos = 0;
+ this.bufferLength = 0;
+ this.eof = false;
+ this.buffer = emptyBuffer;
+ this.minBufferLength = 512;
+
+ if (maybeMinBufferLength) {
+ while (this.minBufferLength < maybeMinBufferLength) {
+ this.minBufferLength *= 2;
+ }
+ }
+ }
+
+ DecodeStream.prototype = {
+ get isEmpty() {
+ while (!this.eof && this.bufferLength === 0) {
+ this.readBlock();
+ }
+
+ return this.bufferLength === 0;
+ },
+
+ ensureBuffer: function DecodeStream_ensureBuffer(requested) {
+ var buffer = this.buffer;
+
+ if (requested <= buffer.byteLength) {
+ return buffer;
+ }
+
+ var size = this.minBufferLength;
+
+ while (size < requested) {
+ size *= 2;
+ }
+
+ var buffer2 = new Uint8Array(size);
+ buffer2.set(buffer);
+ return this.buffer = buffer2;
+ },
+ getByte: function DecodeStream_getByte() {
+ var pos = this.pos;
+
+ while (this.bufferLength <= pos) {
+ if (this.eof) {
+ return -1;
+ }
+
+ this.readBlock();
+ }
+
+ return this.buffer[this.pos++];
+ },
+ getUint16: function DecodeStream_getUint16() {
+ var b0 = this.getByte();
+ var b1 = this.getByte();
+
+ if (b0 === -1 || b1 === -1) {
+ return -1;
+ }
+
+ return (b0 << 8) + b1;
+ },
+ getInt32: function DecodeStream_getInt32() {
+ var b0 = this.getByte();
+ var b1 = this.getByte();
+ var b2 = this.getByte();
+ var b3 = this.getByte();
+ return (b0 << 24) + (b1 << 16) + (b2 << 8) + b3;
+ },
+ getBytes: function getBytes(length) {
+ var forceClamped = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
+ var end,
+ pos = this.pos;
+
+ if (length) {
+ this.ensureBuffer(pos + length);
+ end = pos + length;
+
+ while (!this.eof && this.bufferLength < end) {
+ this.readBlock();
+ }
+
+ var bufEnd = this.bufferLength;
+
+ if (end > bufEnd) {
+ end = bufEnd;
+ }
+ } else {
+ while (!this.eof) {
+ this.readBlock();
+ }
+
+ end = this.bufferLength;
+ }
+
+ this.pos = end;
+ var subarray = this.buffer.subarray(pos, end);
+ return forceClamped && !(subarray instanceof Uint8ClampedArray) ? new Uint8ClampedArray(subarray) : subarray;
+ },
+ peekByte: function DecodeStream_peekByte() {
+ var peekedByte = this.getByte();
+ this.pos--;
+ return peekedByte;
+ },
+ peekBytes: function peekBytes(length) {
+ var forceClamped = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
+ var bytes = this.getBytes(length, forceClamped);
+ this.pos -= bytes.length;
+ return bytes;
+ },
+ makeSubStream: function DecodeStream_makeSubStream(start, length, dict) {
+ var end = start + length;
+
+ while (this.bufferLength <= end && !this.eof) {
+ this.readBlock();
+ }
+
+ return new Stream(this.buffer, start, length, dict);
+ },
+ skip: function DecodeStream_skip(n) {
+ if (!n) {
+ n = 1;
+ }
+
+ this.pos += n;
+ },
+ reset: function DecodeStream_reset() {
+ this.pos = 0;
+ },
+ getBaseStreams: function DecodeStream_getBaseStreams() {
+ if (this.str && this.str.getBaseStreams) {
+ return this.str.getBaseStreams();
+ }
+
+ return [];
+ }
+ };
+ return DecodeStream;
+}();
+
+exports.DecodeStream = DecodeStream;
+
+var StreamsSequenceStream = function StreamsSequenceStreamClosure() {
+ function StreamsSequenceStream(streams) {
+ this.streams = streams;
+ var maybeLength = 0;
+
+ for (var i = 0, ii = streams.length; i < ii; i++) {
+ var stream = streams[i];
+
+ if (stream instanceof DecodeStream) {
+ maybeLength += stream._rawMinBufferLength;
+ } else {
+ maybeLength += stream.length;
+ }
+ }
+
+ DecodeStream.call(this, maybeLength);
+ }
+
+ StreamsSequenceStream.prototype = Object.create(DecodeStream.prototype);
+
+ StreamsSequenceStream.prototype.readBlock = function streamSequenceStreamReadBlock() {
+ var streams = this.streams;
+
+ if (streams.length === 0) {
+ this.eof = true;
+ return;
+ }
+
+ var stream = streams.shift();
+ var chunk = stream.getBytes();
+ var bufferLength = this.bufferLength;
+ var newLength = bufferLength + chunk.length;
+ var buffer = this.ensureBuffer(newLength);
+ buffer.set(chunk, bufferLength);
+ this.bufferLength = newLength;
+ };
+
+ StreamsSequenceStream.prototype.getBaseStreams = function StreamsSequenceStream_getBaseStreams() {
+ var baseStreams = [];
+
+ for (var i = 0, ii = this.streams.length; i < ii; i++) {
+ var stream = this.streams[i];
+
+ if (stream.getBaseStreams) {
+ baseStreams.push.apply(baseStreams, _toConsumableArray(stream.getBaseStreams()));
+ }
+ }
+
+ return baseStreams;
+ };
+
+ return StreamsSequenceStream;
+}();
+
+exports.StreamsSequenceStream = StreamsSequenceStream;
+
+var FlateStream = function FlateStreamClosure() {
+ var codeLenCodeMap = new Int32Array([16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15]);
+ var lengthDecode = new Int32Array([0x00003, 0x00004, 0x00005, 0x00006, 0x00007, 0x00008, 0x00009, 0x0000a, 0x1000b, 0x1000d, 0x1000f, 0x10011, 0x20013, 0x20017, 0x2001b, 0x2001f, 0x30023, 0x3002b, 0x30033, 0x3003b, 0x40043, 0x40053, 0x40063, 0x40073, 0x50083, 0x500a3, 0x500c3, 0x500e3, 0x00102, 0x00102, 0x00102]);
+ var distDecode = new Int32Array([0x00001, 0x00002, 0x00003, 0x00004, 0x10005, 0x10007, 0x20009, 0x2000d, 0x30011, 0x30019, 0x40021, 0x40031, 0x50041, 0x50061, 0x60081, 0x600c1, 0x70101, 0x70181, 0x80201, 0x80301, 0x90401, 0x90601, 0xa0801, 0xa0c01, 0xb1001, 0xb1801, 0xc2001, 0xc3001, 0xd4001, 0xd6001]);
+ var fixedLitCodeTab = [new Int32Array([0x70100, 0x80050, 0x80010, 0x80118, 0x70110, 0x80070, 0x80030, 0x900c0, 0x70108, 0x80060, 0x80020, 0x900a0, 0x80000, 0x80080, 0x80040, 0x900e0, 0x70104, 0x80058, 0x80018, 0x90090, 0x70114, 0x80078, 0x80038, 0x900d0, 0x7010c, 0x80068, 0x80028, 0x900b0, 0x80008, 0x80088, 0x80048, 0x900f0, 0x70102, 0x80054, 0x80014, 0x8011c, 0x70112, 0x80074, 0x80034, 0x900c8, 0x7010a, 0x80064, 0x80024, 0x900a8, 0x80004, 0x80084, 0x80044, 0x900e8, 0x70106, 0x8005c, 0x8001c, 0x90098, 0x70116, 0x8007c, 0x8003c, 0x900d8, 0x7010e, 0x8006c, 0x8002c, 0x900b8, 0x8000c, 0x8008c, 0x8004c, 0x900f8, 0x70101, 0x80052, 0x80012, 0x8011a, 0x70111, 0x80072, 0x80032, 0x900c4, 0x70109, 0x80062, 0x80022, 0x900a4, 0x80002, 0x80082, 0x80042, 0x900e4, 0x70105, 0x8005a, 0x8001a, 0x90094, 0x70115, 0x8007a, 0x8003a, 0x900d4, 0x7010d, 0x8006a, 0x8002a, 0x900b4, 0x8000a, 0x8008a, 0x8004a, 0x900f4, 0x70103, 0x80056, 0x80016, 0x8011e, 0x70113, 0x80076, 0x80036, 0x900cc, 0x7010b, 0x80066, 0x80026, 0x900ac, 0x80006, 0x80086, 0x80046, 0x900ec, 0x70107, 0x8005e, 0x8001e, 0x9009c, 0x70117, 0x8007e, 0x8003e, 0x900dc, 0x7010f, 0x8006e, 0x8002e, 0x900bc, 0x8000e, 0x8008e, 0x8004e, 0x900fc, 0x70100, 0x80051, 0x80011, 0x80119, 0x70110, 0x80071, 0x80031, 0x900c2, 0x70108, 0x80061, 0x80021, 0x900a2, 0x80001, 0x80081, 0x80041, 0x900e2, 0x70104, 0x80059, 0x80019, 0x90092, 0x70114, 0x80079, 0x80039, 0x900d2, 0x7010c, 0x80069, 0x80029, 0x900b2, 0x80009, 0x80089, 0x80049, 0x900f2, 0x70102, 0x80055, 0x80015, 0x8011d, 0x70112, 0x80075, 0x80035, 0x900ca, 0x7010a, 0x80065, 0x80025, 0x900aa, 0x80005, 0x80085, 0x80045, 0x900ea, 0x70106, 0x8005d, 0x8001d, 0x9009a, 0x70116, 0x8007d, 0x8003d, 0x900da, 0x7010e, 0x8006d, 0x8002d, 0x900ba, 0x8000d, 0x8008d, 0x8004d, 0x900fa, 0x70101, 0x80053, 0x80013, 0x8011b, 0x70111, 0x80073, 0x80033, 0x900c6, 0x70109, 0x80063, 0x80023, 0x900a6, 0x80003, 0x80083, 0x80043, 0x900e6, 0x70105, 0x8005b, 0x8001b, 0x90096, 0x70115, 0x8007b, 0x8003b, 0x900d6, 0x7010d, 0x8006b, 0x8002b, 0x900b6, 0x8000b, 0x8008b, 0x8004b, 0x900f6, 0x70103, 0x80057, 0x80017, 0x8011f, 0x70113, 0x80077, 0x80037, 0x900ce, 0x7010b, 0x80067, 0x80027, 0x900ae, 0x80007, 0x80087, 0x80047, 0x900ee, 0x70107, 0x8005f, 0x8001f, 0x9009e, 0x70117, 0x8007f, 0x8003f, 0x900de, 0x7010f, 0x8006f, 0x8002f, 0x900be, 0x8000f, 0x8008f, 0x8004f, 0x900fe, 0x70100, 0x80050, 0x80010, 0x80118, 0x70110, 0x80070, 0x80030, 0x900c1, 0x70108, 0x80060, 0x80020, 0x900a1, 0x80000, 0x80080, 0x80040, 0x900e1, 0x70104, 0x80058, 0x80018, 0x90091, 0x70114, 0x80078, 0x80038, 0x900d1, 0x7010c, 0x80068, 0x80028, 0x900b1, 0x80008, 0x80088, 0x80048, 0x900f1, 0x70102, 0x80054, 0x80014, 0x8011c, 0x70112, 0x80074, 0x80034, 0x900c9, 0x7010a, 0x80064, 0x80024, 0x900a9, 0x80004, 0x80084, 0x80044, 0x900e9, 0x70106, 0x8005c, 0x8001c, 0x90099, 0x70116, 0x8007c, 0x8003c, 0x900d9, 0x7010e, 0x8006c, 0x8002c, 0x900b9, 0x8000c, 0x8008c, 0x8004c, 0x900f9, 0x70101, 0x80052, 0x80012, 0x8011a, 0x70111, 0x80072, 0x80032, 0x900c5, 0x70109, 0x80062, 0x80022, 0x900a5, 0x80002, 0x80082, 0x80042, 0x900e5, 0x70105, 0x8005a, 0x8001a, 0x90095, 0x70115, 0x8007a, 0x8003a, 0x900d5, 0x7010d, 0x8006a, 0x8002a, 0x900b5, 0x8000a, 0x8008a, 0x8004a, 0x900f5, 0x70103, 0x80056, 0x80016, 0x8011e, 0x70113, 0x80076, 0x80036, 0x900cd, 0x7010b, 0x80066, 0x80026, 0x900ad, 0x80006, 0x80086, 0x80046, 0x900ed, 0x70107, 0x8005e, 0x8001e, 0x9009d, 0x70117, 0x8007e, 0x8003e, 0x900dd, 0x7010f, 0x8006e, 0x8002e, 0x900bd, 0x8000e, 0x8008e, 0x8004e, 0x900fd, 0x70100, 0x80051, 0x80011, 0x80119, 0x70110, 0x80071, 0x80031, 0x900c3, 0x70108, 0x80061, 0x80021, 0x900a3, 0x80001, 0x80081, 0x80041, 0x900e3, 0x70104, 0x80059, 0x80019, 0x90093, 0x70114, 0x80079, 0x80039, 0x900d3, 0x7010c, 0x80069, 0x80029, 0x900b3, 0x80009, 0x80089, 0x80049, 0x900f3, 0x70102, 0x80055, 0x80015, 0x8011d, 0x70112, 0x80075, 0x80035, 0x900cb, 0x7010a, 0x80065, 0x80025, 0x900ab, 0x80005, 0x80085, 0x80045, 0x900eb, 0x70106, 0x8005d, 0x8001d, 0x9009b, 0x70116, 0x8007d, 0x8003d, 0x900db, 0x7010e, 0x8006d, 0x8002d, 0x900bb, 0x8000d, 0x8008d, 0x8004d, 0x900fb, 0x70101, 0x80053, 0x80013, 0x8011b, 0x70111, 0x80073, 0x80033, 0x900c7, 0x70109, 0x80063, 0x80023, 0x900a7, 0x80003, 0x80083, 0x80043, 0x900e7, 0x70105, 0x8005b, 0x8001b, 0x90097, 0x70115, 0x8007b, 0x8003b, 0x900d7, 0x7010d, 0x8006b, 0x8002b, 0x900b7, 0x8000b, 0x8008b, 0x8004b, 0x900f7, 0x70103, 0x80057, 0x80017, 0x8011f, 0x70113, 0x80077, 0x80037, 0x900cf, 0x7010b, 0x80067, 0x80027, 0x900af, 0x80007, 0x80087, 0x80047, 0x900ef, 0x70107, 0x8005f, 0x8001f, 0x9009f, 0x70117, 0x8007f, 0x8003f, 0x900df, 0x7010f, 0x8006f, 0x8002f, 0x900bf, 0x8000f, 0x8008f, 0x8004f, 0x900ff]), 9];
+ var fixedDistCodeTab = [new Int32Array([0x50000, 0x50010, 0x50008, 0x50018, 0x50004, 0x50014, 0x5000c, 0x5001c, 0x50002, 0x50012, 0x5000a, 0x5001a, 0x50006, 0x50016, 0x5000e, 0x00000, 0x50001, 0x50011, 0x50009, 0x50019, 0x50005, 0x50015, 0x5000d, 0x5001d, 0x50003, 0x50013, 0x5000b, 0x5001b, 0x50007, 0x50017, 0x5000f, 0x00000]), 5];
+
+ function FlateStream(str, maybeLength) {
+ this.str = str;
+ this.dict = str.dict;
+ var cmf = str.getByte();
+ var flg = str.getByte();
+
+ if (cmf === -1 || flg === -1) {
+ throw new _util.FormatError("Invalid header in flate stream: ".concat(cmf, ", ").concat(flg));
+ }
+
+ if ((cmf & 0x0f) !== 0x08) {
+ throw new _util.FormatError("Unknown compression method in flate stream: ".concat(cmf, ", ").concat(flg));
+ }
+
+ if (((cmf << 8) + flg) % 31 !== 0) {
+ throw new _util.FormatError("Bad FCHECK in flate stream: ".concat(cmf, ", ").concat(flg));
+ }
+
+ if (flg & 0x20) {
+ throw new _util.FormatError("FDICT bit set in flate stream: ".concat(cmf, ", ").concat(flg));
+ }
+
+ this.codeSize = 0;
+ this.codeBuf = 0;
+ DecodeStream.call(this, maybeLength);
+ }
+
+ FlateStream.prototype = Object.create(DecodeStream.prototype);
+
+ FlateStream.prototype.getBits = function FlateStream_getBits(bits) {
+ var str = this.str;
+ var codeSize = this.codeSize;
+ var codeBuf = this.codeBuf;
+ var b;
+
+ while (codeSize < bits) {
+ if ((b = str.getByte()) === -1) {
+ throw new _util.FormatError('Bad encoding in flate stream');
+ }
+
+ codeBuf |= b << codeSize;
+ codeSize += 8;
+ }
+
+ b = codeBuf & (1 << bits) - 1;
+ this.codeBuf = codeBuf >> bits;
+ this.codeSize = codeSize -= bits;
+ return b;
+ };
+
+ FlateStream.prototype.getCode = function FlateStream_getCode(table) {
+ var str = this.str;
+ var codes = table[0];
+ var maxLen = table[1];
+ var codeSize = this.codeSize;
+ var codeBuf = this.codeBuf;
+ var b;
+
+ while (codeSize < maxLen) {
+ if ((b = str.getByte()) === -1) {
+ break;
+ }
+
+ codeBuf |= b << codeSize;
+ codeSize += 8;
+ }
+
+ var code = codes[codeBuf & (1 << maxLen) - 1];
+ var codeLen = code >> 16;
+ var codeVal = code & 0xffff;
+
+ if (codeLen < 1 || codeSize < codeLen) {
+ throw new _util.FormatError('Bad encoding in flate stream');
+ }
+
+ this.codeBuf = codeBuf >> codeLen;
+ this.codeSize = codeSize - codeLen;
+ return codeVal;
+ };
+
+ FlateStream.prototype.generateHuffmanTable = function flateStreamGenerateHuffmanTable(lengths) {
+ var n = lengths.length;
+ var maxLen = 0;
+ var i;
+
+ for (i = 0; i < n; ++i) {
+ if (lengths[i] > maxLen) {
+ maxLen = lengths[i];
+ }
+ }
+
+ var size = 1 << maxLen;
+ var codes = new Int32Array(size);
+
+ for (var len = 1, code = 0, skip = 2; len <= maxLen; ++len, code <<= 1, skip <<= 1) {
+ for (var val = 0; val < n; ++val) {
+ if (lengths[val] === len) {
+ var code2 = 0;
+ var t = code;
+
+ for (i = 0; i < len; ++i) {
+ code2 = code2 << 1 | t & 1;
+ t >>= 1;
+ }
+
+ for (i = code2; i < size; i += skip) {
+ codes[i] = len << 16 | val;
+ }
+
+ ++code;
+ }
+ }
+ }
+
+ return [codes, maxLen];
+ };
+
+ FlateStream.prototype.readBlock = function FlateStream_readBlock() {
+ var buffer, len;
+ var str = this.str;
+ var hdr = this.getBits(3);
+
+ if (hdr & 1) {
+ this.eof = true;
+ }
+
+ hdr >>= 1;
+
+ if (hdr === 0) {
+ var b;
+
+ if ((b = str.getByte()) === -1) {
+ throw new _util.FormatError('Bad block header in flate stream');
+ }
+
+ var blockLen = b;
+
+ if ((b = str.getByte()) === -1) {
+ throw new _util.FormatError('Bad block header in flate stream');
+ }
+
+ blockLen |= b << 8;
+
+ if ((b = str.getByte()) === -1) {
+ throw new _util.FormatError('Bad block header in flate stream');
+ }
+
+ var check = b;
+
+ if ((b = str.getByte()) === -1) {
+ throw new _util.FormatError('Bad block header in flate stream');
+ }
+
+ check |= b << 8;
+
+ if (check !== (~blockLen & 0xffff) && (blockLen !== 0 || check !== 0)) {
+ throw new _util.FormatError('Bad uncompressed block length in flate stream');
+ }
+
+ this.codeBuf = 0;
+ this.codeSize = 0;
+ var bufferLength = this.bufferLength;
+ buffer = this.ensureBuffer(bufferLength + blockLen);
+ var end = bufferLength + blockLen;
+ this.bufferLength = end;
+
+ if (blockLen === 0) {
+ if (str.peekByte() === -1) {
+ this.eof = true;
+ }
+ } else {
+ for (var n = bufferLength; n < end; ++n) {
+ if ((b = str.getByte()) === -1) {
+ this.eof = true;
+ break;
+ }
+
+ buffer[n] = b;
+ }
+ }
+
+ return;
+ }
+
+ var litCodeTable;
+ var distCodeTable;
+
+ if (hdr === 1) {
+ litCodeTable = fixedLitCodeTab;
+ distCodeTable = fixedDistCodeTab;
+ } else if (hdr === 2) {
+ var numLitCodes = this.getBits(5) + 257;
+ var numDistCodes = this.getBits(5) + 1;
+ var numCodeLenCodes = this.getBits(4) + 4;
+ var codeLenCodeLengths = new Uint8Array(codeLenCodeMap.length);
+ var i;
+
+ for (i = 0; i < numCodeLenCodes; ++i) {
+ codeLenCodeLengths[codeLenCodeMap[i]] = this.getBits(3);
+ }
+
+ var codeLenCodeTab = this.generateHuffmanTable(codeLenCodeLengths);
+ len = 0;
+ i = 0;
+ var codes = numLitCodes + numDistCodes;
+ var codeLengths = new Uint8Array(codes);
+ var bitsLength, bitsOffset, what;
+
+ while (i < codes) {
+ var code = this.getCode(codeLenCodeTab);
+
+ if (code === 16) {
+ bitsLength = 2;
+ bitsOffset = 3;
+ what = len;
+ } else if (code === 17) {
+ bitsLength = 3;
+ bitsOffset = 3;
+ what = len = 0;
+ } else if (code === 18) {
+ bitsLength = 7;
+ bitsOffset = 11;
+ what = len = 0;
+ } else {
+ codeLengths[i++] = len = code;
+ continue;
+ }
+
+ var repeatLength = this.getBits(bitsLength) + bitsOffset;
+
+ while (repeatLength-- > 0) {
+ codeLengths[i++] = what;
+ }
+ }
+
+ litCodeTable = this.generateHuffmanTable(codeLengths.subarray(0, numLitCodes));
+ distCodeTable = this.generateHuffmanTable(codeLengths.subarray(numLitCodes, codes));
+ } else {
+ throw new _util.FormatError('Unknown block type in flate stream');
+ }
+
+ buffer = this.buffer;
+ var limit = buffer ? buffer.length : 0;
+ var pos = this.bufferLength;
+
+ while (true) {
+ var code1 = this.getCode(litCodeTable);
+
+ if (code1 < 256) {
+ if (pos + 1 >= limit) {
+ buffer = this.ensureBuffer(pos + 1);
+ limit = buffer.length;
+ }
+
+ buffer[pos++] = code1;
+ continue;
+ }
+
+ if (code1 === 256) {
+ this.bufferLength = pos;
+ return;
+ }
+
+ code1 -= 257;
+ code1 = lengthDecode[code1];
+ var code2 = code1 >> 16;
+
+ if (code2 > 0) {
+ code2 = this.getBits(code2);
+ }
+
+ len = (code1 & 0xffff) + code2;
+ code1 = this.getCode(distCodeTable);
+ code1 = distDecode[code1];
+ code2 = code1 >> 16;
+
+ if (code2 > 0) {
+ code2 = this.getBits(code2);
+ }
+
+ var dist = (code1 & 0xffff) + code2;
+
+ if (pos + len >= limit) {
+ buffer = this.ensureBuffer(pos + len);
+ limit = buffer.length;
+ }
+
+ for (var k = 0; k < len; ++k, ++pos) {
+ buffer[pos] = buffer[pos - dist];
+ }
+ }
+ };
+
+ return FlateStream;
+}();
+
+exports.FlateStream = FlateStream;
+
+var PredictorStream = function PredictorStreamClosure() {
+ function PredictorStream(str, maybeLength, params) {
+ if (!(0, _primitives.isDict)(params)) {
+ return str;
+ }
+
+ var predictor = this.predictor = params.get('Predictor') || 1;
+
+ if (predictor <= 1) {
+ return str;
+ }
+
+ if (predictor !== 2 && (predictor < 10 || predictor > 15)) {
+ throw new _util.FormatError("Unsupported predictor: ".concat(predictor));
+ }
+
+ if (predictor === 2) {
+ this.readBlock = this.readBlockTiff;
+ } else {
+ this.readBlock = this.readBlockPng;
+ }
+
+ this.str = str;
+ this.dict = str.dict;
+ var colors = this.colors = params.get('Colors') || 1;
+ var bits = this.bits = params.get('BitsPerComponent') || 8;
+ var columns = this.columns = params.get('Columns') || 1;
+ this.pixBytes = colors * bits + 7 >> 3;
+ this.rowBytes = columns * colors * bits + 7 >> 3;
+ DecodeStream.call(this, maybeLength);
+ return this;
+ }
+
+ PredictorStream.prototype = Object.create(DecodeStream.prototype);
+
+ PredictorStream.prototype.readBlockTiff = function predictorStreamReadBlockTiff() {
+ var rowBytes = this.rowBytes;
+ var bufferLength = this.bufferLength;
+ var buffer = this.ensureBuffer(bufferLength + rowBytes);
+ var bits = this.bits;
+ var colors = this.colors;
+ var rawBytes = this.str.getBytes(rowBytes);
+ this.eof = !rawBytes.length;
+
+ if (this.eof) {
+ return;
+ }
+
+ var inbuf = 0,
+ outbuf = 0;
+ var inbits = 0,
+ outbits = 0;
+ var pos = bufferLength;
+ var i;
+
+ if (bits === 1 && colors === 1) {
+ for (i = 0; i < rowBytes; ++i) {
+ var c = rawBytes[i] ^ inbuf;
+ c ^= c >> 1;
+ c ^= c >> 2;
+ c ^= c >> 4;
+ inbuf = (c & 1) << 7;
+ buffer[pos++] = c;
+ }
+ } else if (bits === 8) {
+ for (i = 0; i < colors; ++i) {
+ buffer[pos++] = rawBytes[i];
+ }
+
+ for (; i < rowBytes; ++i) {
+ buffer[pos] = buffer[pos - colors] + rawBytes[i];
+ pos++;
+ }
+ } else if (bits === 16) {
+ var bytesPerPixel = colors * 2;
+
+ for (i = 0; i < bytesPerPixel; ++i) {
+ buffer[pos++] = rawBytes[i];
+ }
+
+ for (; i < rowBytes; i += 2) {
+ var sum = ((rawBytes[i] & 0xFF) << 8) + (rawBytes[i + 1] & 0xFF) + ((buffer[pos - bytesPerPixel] & 0xFF) << 8) + (buffer[pos - bytesPerPixel + 1] & 0xFF);
+ buffer[pos++] = sum >> 8 & 0xFF;
+ buffer[pos++] = sum & 0xFF;
+ }
+ } else {
+ var compArray = new Uint8Array(colors + 1);
+ var bitMask = (1 << bits) - 1;
+ var j = 0,
+ k = bufferLength;
+ var columns = this.columns;
+
+ for (i = 0; i < columns; ++i) {
+ for (var kk = 0; kk < colors; ++kk) {
+ if (inbits < bits) {
+ inbuf = inbuf << 8 | rawBytes[j++] & 0xFF;
+ inbits += 8;
+ }
+
+ compArray[kk] = compArray[kk] + (inbuf >> inbits - bits) & bitMask;
+ inbits -= bits;
+ outbuf = outbuf << bits | compArray[kk];
+ outbits += bits;
+
+ if (outbits >= 8) {
+ buffer[k++] = outbuf >> outbits - 8 & 0xFF;
+ outbits -= 8;
+ }
+ }
+ }
+
+ if (outbits > 0) {
+ buffer[k++] = (outbuf << 8 - outbits) + (inbuf & (1 << 8 - outbits) - 1);
+ }
+ }
+
+ this.bufferLength += rowBytes;
+ };
+
+ PredictorStream.prototype.readBlockPng = function predictorStreamReadBlockPng() {
+ var rowBytes = this.rowBytes;
+ var pixBytes = this.pixBytes;
+ var predictor = this.str.getByte();
+ var rawBytes = this.str.getBytes(rowBytes);
+ this.eof = !rawBytes.length;
+
+ if (this.eof) {
+ return;
+ }
+
+ var bufferLength = this.bufferLength;
+ var buffer = this.ensureBuffer(bufferLength + rowBytes);
+ var prevRow = buffer.subarray(bufferLength - rowBytes, bufferLength);
+
+ if (prevRow.length === 0) {
+ prevRow = new Uint8Array(rowBytes);
+ }
+
+ var i,
+ j = bufferLength,
+ up,
+ c;
+
+ switch (predictor) {
+ case 0:
+ for (i = 0; i < rowBytes; ++i) {
+ buffer[j++] = rawBytes[i];
+ }
+
+ break;
+
+ case 1:
+ for (i = 0; i < pixBytes; ++i) {
+ buffer[j++] = rawBytes[i];
+ }
+
+ for (; i < rowBytes; ++i) {
+ buffer[j] = buffer[j - pixBytes] + rawBytes[i] & 0xFF;
+ j++;
+ }
+
+ break;
+
+ case 2:
+ for (i = 0; i < rowBytes; ++i) {
+ buffer[j++] = prevRow[i] + rawBytes[i] & 0xFF;
+ }
+
+ break;
+
+ case 3:
+ for (i = 0; i < pixBytes; ++i) {
+ buffer[j++] = (prevRow[i] >> 1) + rawBytes[i];
+ }
+
+ for (; i < rowBytes; ++i) {
+ buffer[j] = (prevRow[i] + buffer[j - pixBytes] >> 1) + rawBytes[i] & 0xFF;
+ j++;
+ }
+
+ break;
+
+ case 4:
+ for (i = 0; i < pixBytes; ++i) {
+ up = prevRow[i];
+ c = rawBytes[i];
+ buffer[j++] = up + c;
+ }
+
+ for (; i < rowBytes; ++i) {
+ up = prevRow[i];
+ var upLeft = prevRow[i - pixBytes];
+ var left = buffer[j - pixBytes];
+ var p = left + up - upLeft;
+ var pa = p - left;
+
+ if (pa < 0) {
+ pa = -pa;
+ }
+
+ var pb = p - up;
+
+ if (pb < 0) {
+ pb = -pb;
+ }
+
+ var pc = p - upLeft;
+
+ if (pc < 0) {
+ pc = -pc;
+ }
+
+ c = rawBytes[i];
+
+ if (pa <= pb && pa <= pc) {
+ buffer[j++] = left + c;
+ } else if (pb <= pc) {
+ buffer[j++] = up + c;
+ } else {
+ buffer[j++] = upLeft + c;
+ }
+ }
+
+ break;
+
+ default:
+ throw new _util.FormatError("Unsupported predictor: ".concat(predictor));
+ }
+
+ this.bufferLength += rowBytes;
+ };
+
+ return PredictorStream;
+}();
+
+exports.PredictorStream = PredictorStream;
+
+var DecryptStream = function DecryptStreamClosure() {
+ function DecryptStream(str, maybeLength, decrypt) {
+ this.str = str;
+ this.dict = str.dict;
+ this.decrypt = decrypt;
+ this.nextChunk = null;
+ this.initialized = false;
+ DecodeStream.call(this, maybeLength);
+ }
+
+ var chunkSize = 512;
+ DecryptStream.prototype = Object.create(DecodeStream.prototype);
+
+ DecryptStream.prototype.readBlock = function DecryptStream_readBlock() {
+ var chunk;
+
+ if (this.initialized) {
+ chunk = this.nextChunk;
+ } else {
+ chunk = this.str.getBytes(chunkSize);
+ this.initialized = true;
+ }
+
+ if (!chunk || chunk.length === 0) {
+ this.eof = true;
+ return;
+ }
+
+ this.nextChunk = this.str.getBytes(chunkSize);
+ var hasMoreData = this.nextChunk && this.nextChunk.length > 0;
+ var decrypt = this.decrypt;
+ chunk = decrypt(chunk, !hasMoreData);
+ var bufferLength = this.bufferLength;
+ var i,
+ n = chunk.length;
+ var buffer = this.ensureBuffer(bufferLength + n);
+
+ for (i = 0; i < n; i++) {
+ buffer[bufferLength++] = chunk[i];
+ }
+
+ this.bufferLength = bufferLength;
+ };
+
+ return DecryptStream;
+}();
+
+exports.DecryptStream = DecryptStream;
+
+var Ascii85Stream = function Ascii85StreamClosure() {
+ function Ascii85Stream(str, maybeLength) {
+ this.str = str;
+ this.dict = str.dict;
+ this.input = new Uint8Array(5);
+
+ if (maybeLength) {
+ maybeLength = 0.8 * maybeLength;
+ }
+
+ DecodeStream.call(this, maybeLength);
+ }
+
+ Ascii85Stream.prototype = Object.create(DecodeStream.prototype);
+
+ Ascii85Stream.prototype.readBlock = function Ascii85Stream_readBlock() {
+ var TILDA_CHAR = 0x7E;
+ var Z_LOWER_CHAR = 0x7A;
+ var EOF = -1;
+ var str = this.str;
+ var c = str.getByte();
+
+ while ((0, _util.isSpace)(c)) {
+ c = str.getByte();
+ }
+
+ if (c === EOF || c === TILDA_CHAR) {
+ this.eof = true;
+ return;
+ }
+
+ var bufferLength = this.bufferLength,
+ buffer;
+ var i;
+
+ if (c === Z_LOWER_CHAR) {
+ buffer = this.ensureBuffer(bufferLength + 4);
+
+ for (i = 0; i < 4; ++i) {
+ buffer[bufferLength + i] = 0;
+ }
+
+ this.bufferLength += 4;
+ } else {
+ var input = this.input;
+ input[0] = c;
+
+ for (i = 1; i < 5; ++i) {
+ c = str.getByte();
+
+ while ((0, _util.isSpace)(c)) {
+ c = str.getByte();
+ }
+
+ input[i] = c;
+
+ if (c === EOF || c === TILDA_CHAR) {
+ break;
+ }
+ }
+
+ buffer = this.ensureBuffer(bufferLength + i - 1);
+ this.bufferLength += i - 1;
+
+ if (i < 5) {
+ for (; i < 5; ++i) {
+ input[i] = 0x21 + 84;
+ }
+
+ this.eof = true;
+ }
+
+ var t = 0;
+
+ for (i = 0; i < 5; ++i) {
+ t = t * 85 + (input[i] - 0x21);
+ }
+
+ for (i = 3; i >= 0; --i) {
+ buffer[bufferLength + i] = t & 0xFF;
+ t >>= 8;
+ }
+ }
+ };
+
+ return Ascii85Stream;
+}();
+
+exports.Ascii85Stream = Ascii85Stream;
+
+var AsciiHexStream = function AsciiHexStreamClosure() {
+ function AsciiHexStream(str, maybeLength) {
+ this.str = str;
+ this.dict = str.dict;
+ this.firstDigit = -1;
+
+ if (maybeLength) {
+ maybeLength = 0.5 * maybeLength;
+ }
+
+ DecodeStream.call(this, maybeLength);
+ }
+
+ AsciiHexStream.prototype = Object.create(DecodeStream.prototype);
+
+ AsciiHexStream.prototype.readBlock = function AsciiHexStream_readBlock() {
+ var UPSTREAM_BLOCK_SIZE = 8000;
+ var bytes = this.str.getBytes(UPSTREAM_BLOCK_SIZE);
+
+ if (!bytes.length) {
+ this.eof = true;
+ return;
+ }
+
+ var maxDecodeLength = bytes.length + 1 >> 1;
+ var buffer = this.ensureBuffer(this.bufferLength + maxDecodeLength);
+ var bufferLength = this.bufferLength;
+ var firstDigit = this.firstDigit;
+
+ for (var i = 0, ii = bytes.length; i < ii; i++) {
+ var ch = bytes[i],
+ digit;
+
+ if (ch >= 0x30 && ch <= 0x39) {
+ digit = ch & 0x0F;
+ } else if (ch >= 0x41 && ch <= 0x46 || ch >= 0x61 && ch <= 0x66) {
+ digit = (ch & 0x0F) + 9;
+ } else if (ch === 0x3E) {
+ this.eof = true;
+ break;
+ } else {
+ continue;
+ }
+
+ if (firstDigit < 0) {
+ firstDigit = digit;
+ } else {
+ buffer[bufferLength++] = firstDigit << 4 | digit;
+ firstDigit = -1;
+ }
+ }
+
+ if (firstDigit >= 0 && this.eof) {
+ buffer[bufferLength++] = firstDigit << 4;
+ firstDigit = -1;
+ }
+
+ this.firstDigit = firstDigit;
+ this.bufferLength = bufferLength;
+ };
+
+ return AsciiHexStream;
+}();
+
+exports.AsciiHexStream = AsciiHexStream;
+
+var RunLengthStream = function RunLengthStreamClosure() {
+ function RunLengthStream(str, maybeLength) {
+ this.str = str;
+ this.dict = str.dict;
+ DecodeStream.call(this, maybeLength);
+ }
+
+ RunLengthStream.prototype = Object.create(DecodeStream.prototype);
+
+ RunLengthStream.prototype.readBlock = function RunLengthStream_readBlock() {
+ var repeatHeader = this.str.getBytes(2);
+
+ if (!repeatHeader || repeatHeader.length < 2 || repeatHeader[0] === 128) {
+ this.eof = true;
+ return;
+ }
+
+ var buffer;
+ var bufferLength = this.bufferLength;
+ var n = repeatHeader[0];
+
+ if (n < 128) {
+ buffer = this.ensureBuffer(bufferLength + n + 1);
+ buffer[bufferLength++] = repeatHeader[1];
+
+ if (n > 0) {
+ var source = this.str.getBytes(n);
+ buffer.set(source, bufferLength);
+ bufferLength += n;
+ }
+ } else {
+ n = 257 - n;
+ var b = repeatHeader[1];
+ buffer = this.ensureBuffer(bufferLength + n + 1);
+
+ for (var i = 0; i < n; i++) {
+ buffer[bufferLength++] = b;
+ }
+ }
+
+ this.bufferLength = bufferLength;
+ };
+
+ return RunLengthStream;
+}();
+
+exports.RunLengthStream = RunLengthStream;
+
+var LZWStream = function LZWStreamClosure() {
+ function LZWStream(str, maybeLength, earlyChange) {
+ this.str = str;
+ this.dict = str.dict;
+ this.cachedData = 0;
+ this.bitsCached = 0;
+ var maxLzwDictionarySize = 4096;
+ var lzwState = {
+ earlyChange: earlyChange,
+ codeLength: 9,
+ nextCode: 258,
+ dictionaryValues: new Uint8Array(maxLzwDictionarySize),
+ dictionaryLengths: new Uint16Array(maxLzwDictionarySize),
+ dictionaryPrevCodes: new Uint16Array(maxLzwDictionarySize),
+ currentSequence: new Uint8Array(maxLzwDictionarySize),
+ currentSequenceLength: 0
+ };
+
+ for (var i = 0; i < 256; ++i) {
+ lzwState.dictionaryValues[i] = i;
+ lzwState.dictionaryLengths[i] = 1;
+ }
+
+ this.lzwState = lzwState;
+ DecodeStream.call(this, maybeLength);
+ }
+
+ LZWStream.prototype = Object.create(DecodeStream.prototype);
+
+ LZWStream.prototype.readBits = function LZWStream_readBits(n) {
+ var bitsCached = this.bitsCached;
+ var cachedData = this.cachedData;
+
+ while (bitsCached < n) {
+ var c = this.str.getByte();
+
+ if (c === -1) {
+ this.eof = true;
+ return null;
+ }
+
+ cachedData = cachedData << 8 | c;
+ bitsCached += 8;
+ }
+
+ this.bitsCached = bitsCached -= n;
+ this.cachedData = cachedData;
+ this.lastCode = null;
+ return cachedData >>> bitsCached & (1 << n) - 1;
+ };
+
+ LZWStream.prototype.readBlock = function LZWStream_readBlock() {
+ var blockSize = 512;
+ var estimatedDecodedSize = blockSize * 2,
+ decodedSizeDelta = blockSize;
+ var i, j, q;
+ var lzwState = this.lzwState;
+
+ if (!lzwState) {
+ return;
+ }
+
+ var earlyChange = lzwState.earlyChange;
+ var nextCode = lzwState.nextCode;
+ var dictionaryValues = lzwState.dictionaryValues;
+ var dictionaryLengths = lzwState.dictionaryLengths;
+ var dictionaryPrevCodes = lzwState.dictionaryPrevCodes;
+ var codeLength = lzwState.codeLength;
+ var prevCode = lzwState.prevCode;
+ var currentSequence = lzwState.currentSequence;
+ var currentSequenceLength = lzwState.currentSequenceLength;
+ var decodedLength = 0;
+ var currentBufferLength = this.bufferLength;
+ var buffer = this.ensureBuffer(this.bufferLength + estimatedDecodedSize);
+
+ for (i = 0; i < blockSize; i++) {
+ var code = this.readBits(codeLength);
+ var hasPrev = currentSequenceLength > 0;
+
+ if (code < 256) {
+ currentSequence[0] = code;
+ currentSequenceLength = 1;
+ } else if (code >= 258) {
+ if (code < nextCode) {
+ currentSequenceLength = dictionaryLengths[code];
+
+ for (j = currentSequenceLength - 1, q = code; j >= 0; j--) {
+ currentSequence[j] = dictionaryValues[q];
+ q = dictionaryPrevCodes[q];
+ }
+ } else {
+ currentSequence[currentSequenceLength++] = currentSequence[0];
+ }
+ } else if (code === 256) {
+ codeLength = 9;
+ nextCode = 258;
+ currentSequenceLength = 0;
+ continue;
+ } else {
+ this.eof = true;
+ delete this.lzwState;
+ break;
+ }
+
+ if (hasPrev) {
+ dictionaryPrevCodes[nextCode] = prevCode;
+ dictionaryLengths[nextCode] = dictionaryLengths[prevCode] + 1;
+ dictionaryValues[nextCode] = currentSequence[0];
+ nextCode++;
+ codeLength = nextCode + earlyChange & nextCode + earlyChange - 1 ? codeLength : Math.min(Math.log(nextCode + earlyChange) / 0.6931471805599453 + 1, 12) | 0;
+ }
+
+ prevCode = code;
+ decodedLength += currentSequenceLength;
+
+ if (estimatedDecodedSize < decodedLength) {
+ do {
+ estimatedDecodedSize += decodedSizeDelta;
+ } while (estimatedDecodedSize < decodedLength);
+
+ buffer = this.ensureBuffer(this.bufferLength + estimatedDecodedSize);
+ }
+
+ for (j = 0; j < currentSequenceLength; j++) {
+ buffer[currentBufferLength++] = currentSequence[j];
+ }
+ }
+
+ lzwState.nextCode = nextCode;
+ lzwState.codeLength = codeLength;
+ lzwState.prevCode = prevCode;
+ lzwState.currentSequenceLength = currentSequenceLength;
+ this.bufferLength = currentBufferLength;
+ };
+
+ return LZWStream;
+}();
+
+exports.LZWStream = LZWStream;
+
+var NullStream = function NullStreamClosure() {
+ function NullStream() {
+ Stream.call(this, new Uint8Array(0));
+ }
+
+ NullStream.prototype = Stream.prototype;
+ return NullStream;
+}();
+
+exports.NullStream = NullStream;
+
+/***/ }),
+/* 159 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.CCITTFaxStream = void 0;
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _ccitt = __w_pdfjs_require__(160);
+
+var _stream = __w_pdfjs_require__(158);
+
+var CCITTFaxStream = function CCITTFaxStreamClosure() {
+ function CCITTFaxStream(str, maybeLength, params) {
+ this.str = str;
+ this.dict = str.dict;
+
+ if (!(0, _primitives.isDict)(params)) {
+ params = _primitives.Dict.empty;
+ }
+
+ var source = {
+ next: function next() {
+ return str.getByte();
+ }
+ };
+ this.ccittFaxDecoder = new _ccitt.CCITTFaxDecoder(source, {
+ K: params.get('K'),
+ EndOfLine: params.get('EndOfLine'),
+ EncodedByteAlign: params.get('EncodedByteAlign'),
+ Columns: params.get('Columns'),
+ Rows: params.get('Rows'),
+ EndOfBlock: params.get('EndOfBlock'),
+ BlackIs1: params.get('BlackIs1')
+ });
+
+ _stream.DecodeStream.call(this, maybeLength);
+ }
+
+ CCITTFaxStream.prototype = Object.create(_stream.DecodeStream.prototype);
+
+ CCITTFaxStream.prototype.readBlock = function () {
+ while (!this.eof) {
+ var c = this.ccittFaxDecoder.readNextChar();
+
+ if (c === -1) {
+ this.eof = true;
+ return;
+ }
+
+ this.ensureBuffer(this.bufferLength + 1);
+ this.buffer[this.bufferLength++] = c;
+ }
+ };
+
+ return CCITTFaxStream;
+}();
+
+exports.CCITTFaxStream = CCITTFaxStream;
+
+/***/ }),
+/* 160 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.CCITTFaxDecoder = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var CCITTFaxDecoder = function CCITTFaxDecoder() {
+ var ccittEOL = -2;
+ var ccittEOF = -1;
+ var twoDimPass = 0;
+ var twoDimHoriz = 1;
+ var twoDimVert0 = 2;
+ var twoDimVertR1 = 3;
+ var twoDimVertL1 = 4;
+ var twoDimVertR2 = 5;
+ var twoDimVertL2 = 6;
+ var twoDimVertR3 = 7;
+ var twoDimVertL3 = 8;
+ var twoDimTable = [[-1, -1], [-1, -1], [7, twoDimVertL3], [7, twoDimVertR3], [6, twoDimVertL2], [6, twoDimVertL2], [6, twoDimVertR2], [6, twoDimVertR2], [4, twoDimPass], [4, twoDimPass], [4, twoDimPass], [4, twoDimPass], [4, twoDimPass], [4, twoDimPass], [4, twoDimPass], [4, twoDimPass], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimHoriz], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertL1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [3, twoDimVertR1], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0], [1, twoDimVert0]];
+ var whiteTable1 = [[-1, -1], [12, ccittEOL], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [11, 1792], [11, 1792], [12, 1984], [12, 2048], [12, 2112], [12, 2176], [12, 2240], [12, 2304], [11, 1856], [11, 1856], [11, 1920], [11, 1920], [12, 2368], [12, 2432], [12, 2496], [12, 2560]];
+ var whiteTable2 = [[-1, -1], [-1, -1], [-1, -1], [-1, -1], [8, 29], [8, 29], [8, 30], [8, 30], [8, 45], [8, 45], [8, 46], [8, 46], [7, 22], [7, 22], [7, 22], [7, 22], [7, 23], [7, 23], [7, 23], [7, 23], [8, 47], [8, 47], [8, 48], [8, 48], [6, 13], [6, 13], [6, 13], [6, 13], [6, 13], [6, 13], [6, 13], [6, 13], [7, 20], [7, 20], [7, 20], [7, 20], [8, 33], [8, 33], [8, 34], [8, 34], [8, 35], [8, 35], [8, 36], [8, 36], [8, 37], [8, 37], [8, 38], [8, 38], [7, 19], [7, 19], [7, 19], [7, 19], [8, 31], [8, 31], [8, 32], [8, 32], [6, 1], [6, 1], [6, 1], [6, 1], [6, 1], [6, 1], [6, 1], [6, 1], [6, 12], [6, 12], [6, 12], [6, 12], [6, 12], [6, 12], [6, 12], [6, 12], [8, 53], [8, 53], [8, 54], [8, 54], [7, 26], [7, 26], [7, 26], [7, 26], [8, 39], [8, 39], [8, 40], [8, 40], [8, 41], [8, 41], [8, 42], [8, 42], [8, 43], [8, 43], [8, 44], [8, 44], [7, 21], [7, 21], [7, 21], [7, 21], [7, 28], [7, 28], [7, 28], [7, 28], [8, 61], [8, 61], [8, 62], [8, 62], [8, 63], [8, 63], [8, 0], [8, 0], [8, 320], [8, 320], [8, 384], [8, 384], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 10], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [5, 11], [7, 27], [7, 27], [7, 27], [7, 27], [8, 59], [8, 59], [8, 60], [8, 60], [9, 1472], [9, 1536], [9, 1600], [9, 1728], [7, 18], [7, 18], [7, 18], [7, 18], [7, 24], [7, 24], [7, 24], [7, 24], [8, 49], [8, 49], [8, 50], [8, 50], [8, 51], [8, 51], [8, 52], [8, 52], [7, 25], [7, 25], [7, 25], [7, 25], [8, 55], [8, 55], [8, 56], [8, 56], [8, 57], [8, 57], [8, 58], [8, 58], [6, 192], [6, 192], [6, 192], [6, 192], [6, 192], [6, 192], [6, 192], [6, 192], [6, 1664], [6, 1664], [6, 1664], [6, 1664], [6, 1664], [6, 1664], [6, 1664], [6, 1664], [8, 448], [8, 448], [8, 512], [8, 512], [9, 704], [9, 768], [8, 640], [8, 640], [8, 576], [8, 576], [9, 832], [9, 896], [9, 960], [9, 1024], [9, 1088], [9, 1152], [9, 1216], [9, 1280], [9, 1344], [9, 1408], [7, 256], [7, 256], [7, 256], [7, 256], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 2], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [4, 3], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 128], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 8], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [5, 9], [6, 16], [6, 16], [6, 16], [6, 16], [6, 16], [6, 16], [6, 16], [6, 16], [6, 17], [6, 17], [6, 17], [6, 17], [6, 17], [6, 17], [6, 17], [6, 17], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 4], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [4, 5], [6, 14], [6, 14], [6, 14], [6, 14], [6, 14], [6, 14], [6, 14], [6, 14], [6, 15], [6, 15], [6, 15], [6, 15], [6, 15], [6, 15], [6, 15], [6, 15], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [5, 64], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 6], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7], [4, 7]];
+ var blackTable1 = [[-1, -1], [-1, -1], [12, ccittEOL], [12, ccittEOL], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [-1, -1], [11, 1792], [11, 1792], [11, 1792], [11, 1792], [12, 1984], [12, 1984], [12, 2048], [12, 2048], [12, 2112], [12, 2112], [12, 2176], [12, 2176], [12, 2240], [12, 2240], [12, 2304], [12, 2304], [11, 1856], [11, 1856], [11, 1856], [11, 1856], [11, 1920], [11, 1920], [11, 1920], [11, 1920], [12, 2368], [12, 2368], [12, 2432], [12, 2432], [12, 2496], [12, 2496], [12, 2560], [12, 2560], [10, 18], [10, 18], [10, 18], [10, 18], [10, 18], [10, 18], [10, 18], [10, 18], [12, 52], [12, 52], [13, 640], [13, 704], [13, 768], [13, 832], [12, 55], [12, 55], [12, 56], [12, 56], [13, 1280], [13, 1344], [13, 1408], [13, 1472], [12, 59], [12, 59], [12, 60], [12, 60], [13, 1536], [13, 1600], [11, 24], [11, 24], [11, 24], [11, 24], [11, 25], [11, 25], [11, 25], [11, 25], [13, 1664], [13, 1728], [12, 320], [12, 320], [12, 384], [12, 384], [12, 448], [12, 448], [13, 512], [13, 576], [12, 53], [12, 53], [12, 54], [12, 54], [13, 896], [13, 960], [13, 1024], [13, 1088], [13, 1152], [13, 1216], [10, 64], [10, 64], [10, 64], [10, 64], [10, 64], [10, 64], [10, 64], [10, 64]];
+ var blackTable2 = [[8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [8, 13], [11, 23], [11, 23], [12, 50], [12, 51], [12, 44], [12, 45], [12, 46], [12, 47], [12, 57], [12, 58], [12, 61], [12, 256], [10, 16], [10, 16], [10, 16], [10, 16], [10, 17], [10, 17], [10, 17], [10, 17], [12, 48], [12, 49], [12, 62], [12, 63], [12, 30], [12, 31], [12, 32], [12, 33], [12, 40], [12, 41], [11, 22], [11, 22], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [8, 14], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 10], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [7, 11], [9, 15], [9, 15], [9, 15], [9, 15], [9, 15], [9, 15], [9, 15], [9, 15], [12, 128], [12, 192], [12, 26], [12, 27], [12, 28], [12, 29], [11, 19], [11, 19], [11, 20], [11, 20], [12, 34], [12, 35], [12, 36], [12, 37], [12, 38], [12, 39], [11, 21], [11, 21], [12, 42], [12, 43], [10, 0], [10, 0], [10, 0], [10, 0], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12], [7, 12]];
+ var blackTable3 = [[-1, -1], [-1, -1], [-1, -1], [-1, -1], [6, 9], [6, 8], [5, 7], [5, 7], [4, 6], [4, 6], [4, 6], [4, 6], [4, 5], [4, 5], [4, 5], [4, 5], [3, 1], [3, 1], [3, 1], [3, 1], [3, 1], [3, 1], [3, 1], [3, 1], [3, 4], [3, 4], [3, 4], [3, 4], [3, 4], [3, 4], [3, 4], [3, 4], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2]];
+
+ function CCITTFaxDecoder(source) {
+ var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ if (!source || typeof source.next !== 'function') {
+ throw new Error('CCITTFaxDecoder - invalid "source" parameter.');
+ }
+
+ this.source = source;
+ this.eof = false;
+ this.encoding = options['K'] || 0;
+ this.eoline = options['EndOfLine'] || false;
+ this.byteAlign = options['EncodedByteAlign'] || false;
+ this.columns = options['Columns'] || 1728;
+ this.rows = options['Rows'] || 0;
+ var eoblock = options['EndOfBlock'];
+
+ if (eoblock === null || eoblock === undefined) {
+ eoblock = true;
+ }
+
+ this.eoblock = eoblock;
+ this.black = options['BlackIs1'] || false;
+ this.codingLine = new Uint32Array(this.columns + 1);
+ this.refLine = new Uint32Array(this.columns + 2);
+ this.codingLine[0] = this.columns;
+ this.codingPos = 0;
+ this.row = 0;
+ this.nextLine2D = this.encoding < 0;
+ this.inputBits = 0;
+ this.inputBuf = 0;
+ this.outputBits = 0;
+ this.rowsDone = false;
+ var code1;
+
+ while ((code1 = this._lookBits(12)) === 0) {
+ this._eatBits(1);
+ }
+
+ if (code1 === 1) {
+ this._eatBits(12);
+ }
+
+ if (this.encoding > 0) {
+ this.nextLine2D = !this._lookBits(1);
+
+ this._eatBits(1);
+ }
+ }
+
+ CCITTFaxDecoder.prototype = {
+ readNextChar: function readNextChar() {
+ if (this.eof) {
+ return -1;
+ }
+
+ var refLine = this.refLine;
+ var codingLine = this.codingLine;
+ var columns = this.columns;
+ var refPos, blackPixels, bits, i;
+
+ if (this.outputBits === 0) {
+ if (this.rowsDone) {
+ this.eof = true;
+ }
+
+ if (this.eof) {
+ return -1;
+ }
+
+ this.err = false;
+ var code1, code2, code3;
+
+ if (this.nextLine2D) {
+ for (i = 0; codingLine[i] < columns; ++i) {
+ refLine[i] = codingLine[i];
+ }
+
+ refLine[i++] = columns;
+ refLine[i] = columns;
+ codingLine[0] = 0;
+ this.codingPos = 0;
+ refPos = 0;
+ blackPixels = 0;
+
+ while (codingLine[this.codingPos] < columns) {
+ code1 = this._getTwoDimCode();
+
+ switch (code1) {
+ case twoDimPass:
+ this._addPixels(refLine[refPos + 1], blackPixels);
+
+ if (refLine[refPos + 1] < columns) {
+ refPos += 2;
+ }
+
+ break;
+
+ case twoDimHoriz:
+ code1 = code2 = 0;
+
+ if (blackPixels) {
+ do {
+ code1 += code3 = this._getBlackCode();
+ } while (code3 >= 64);
+
+ do {
+ code2 += code3 = this._getWhiteCode();
+ } while (code3 >= 64);
+ } else {
+ do {
+ code1 += code3 = this._getWhiteCode();
+ } while (code3 >= 64);
+
+ do {
+ code2 += code3 = this._getBlackCode();
+ } while (code3 >= 64);
+ }
+
+ this._addPixels(codingLine[this.codingPos] + code1, blackPixels);
+
+ if (codingLine[this.codingPos] < columns) {
+ this._addPixels(codingLine[this.codingPos] + code2, blackPixels ^ 1);
+ }
+
+ while (refLine[refPos] <= codingLine[this.codingPos] && refLine[refPos] < columns) {
+ refPos += 2;
+ }
+
+ break;
+
+ case twoDimVertR3:
+ this._addPixels(refLine[refPos] + 3, blackPixels);
+
+ blackPixels ^= 1;
+
+ if (codingLine[this.codingPos] < columns) {
+ ++refPos;
+
+ while (refLine[refPos] <= codingLine[this.codingPos] && refLine[refPos] < columns) {
+ refPos += 2;
+ }
+ }
+
+ break;
+
+ case twoDimVertR2:
+ this._addPixels(refLine[refPos] + 2, blackPixels);
+
+ blackPixels ^= 1;
+
+ if (codingLine[this.codingPos] < columns) {
+ ++refPos;
+
+ while (refLine[refPos] <= codingLine[this.codingPos] && refLine[refPos] < columns) {
+ refPos += 2;
+ }
+ }
+
+ break;
+
+ case twoDimVertR1:
+ this._addPixels(refLine[refPos] + 1, blackPixels);
+
+ blackPixels ^= 1;
+
+ if (codingLine[this.codingPos] < columns) {
+ ++refPos;
+
+ while (refLine[refPos] <= codingLine[this.codingPos] && refLine[refPos] < columns) {
+ refPos += 2;
+ }
+ }
+
+ break;
+
+ case twoDimVert0:
+ this._addPixels(refLine[refPos], blackPixels);
+
+ blackPixels ^= 1;
+
+ if (codingLine[this.codingPos] < columns) {
+ ++refPos;
+
+ while (refLine[refPos] <= codingLine[this.codingPos] && refLine[refPos] < columns) {
+ refPos += 2;
+ }
+ }
+
+ break;
+
+ case twoDimVertL3:
+ this._addPixelsNeg(refLine[refPos] - 3, blackPixels);
+
+ blackPixels ^= 1;
+
+ if (codingLine[this.codingPos] < columns) {
+ if (refPos > 0) {
+ --refPos;
+ } else {
+ ++refPos;
+ }
+
+ while (refLine[refPos] <= codingLine[this.codingPos] && refLine[refPos] < columns) {
+ refPos += 2;
+ }
+ }
+
+ break;
+
+ case twoDimVertL2:
+ this._addPixelsNeg(refLine[refPos] - 2, blackPixels);
+
+ blackPixels ^= 1;
+
+ if (codingLine[this.codingPos] < columns) {
+ if (refPos > 0) {
+ --refPos;
+ } else {
+ ++refPos;
+ }
+
+ while (refLine[refPos] <= codingLine[this.codingPos] && refLine[refPos] < columns) {
+ refPos += 2;
+ }
+ }
+
+ break;
+
+ case twoDimVertL1:
+ this._addPixelsNeg(refLine[refPos] - 1, blackPixels);
+
+ blackPixels ^= 1;
+
+ if (codingLine[this.codingPos] < columns) {
+ if (refPos > 0) {
+ --refPos;
+ } else {
+ ++refPos;
+ }
+
+ while (refLine[refPos] <= codingLine[this.codingPos] && refLine[refPos] < columns) {
+ refPos += 2;
+ }
+ }
+
+ break;
+
+ case ccittEOF:
+ this._addPixels(columns, 0);
+
+ this.eof = true;
+ break;
+
+ default:
+ (0, _util.info)('bad 2d code');
+
+ this._addPixels(columns, 0);
+
+ this.err = true;
+ }
+ }
+ } else {
+ codingLine[0] = 0;
+ this.codingPos = 0;
+ blackPixels = 0;
+
+ while (codingLine[this.codingPos] < columns) {
+ code1 = 0;
+
+ if (blackPixels) {
+ do {
+ code1 += code3 = this._getBlackCode();
+ } while (code3 >= 64);
+ } else {
+ do {
+ code1 += code3 = this._getWhiteCode();
+ } while (code3 >= 64);
+ }
+
+ this._addPixels(codingLine[this.codingPos] + code1, blackPixels);
+
+ blackPixels ^= 1;
+ }
+ }
+
+ var gotEOL = false;
+
+ if (this.byteAlign) {
+ this.inputBits &= ~7;
+ }
+
+ if (!this.eoblock && this.row === this.rows - 1) {
+ this.rowsDone = true;
+ } else {
+ code1 = this._lookBits(12);
+
+ if (this.eoline) {
+ while (code1 !== ccittEOF && code1 !== 1) {
+ this._eatBits(1);
+
+ code1 = this._lookBits(12);
+ }
+ } else {
+ while (code1 === 0) {
+ this._eatBits(1);
+
+ code1 = this._lookBits(12);
+ }
+ }
+
+ if (code1 === 1) {
+ this._eatBits(12);
+
+ gotEOL = true;
+ } else if (code1 === ccittEOF) {
+ this.eof = true;
+ }
+ }
+
+ if (!this.eof && this.encoding > 0 && !this.rowsDone) {
+ this.nextLine2D = !this._lookBits(1);
+
+ this._eatBits(1);
+ }
+
+ if (this.eoblock && gotEOL && this.byteAlign) {
+ code1 = this._lookBits(12);
+
+ if (code1 === 1) {
+ this._eatBits(12);
+
+ if (this.encoding > 0) {
+ this._lookBits(1);
+
+ this._eatBits(1);
+ }
+
+ if (this.encoding >= 0) {
+ for (i = 0; i < 4; ++i) {
+ code1 = this._lookBits(12);
+
+ if (code1 !== 1) {
+ (0, _util.info)('bad rtc code: ' + code1);
+ }
+
+ this._eatBits(12);
+
+ if (this.encoding > 0) {
+ this._lookBits(1);
+
+ this._eatBits(1);
+ }
+ }
+ }
+
+ this.eof = true;
+ }
+ } else if (this.err && this.eoline) {
+ while (true) {
+ code1 = this._lookBits(13);
+
+ if (code1 === ccittEOF) {
+ this.eof = true;
+ return -1;
+ }
+
+ if (code1 >> 1 === 1) {
+ break;
+ }
+
+ this._eatBits(1);
+ }
+
+ this._eatBits(12);
+
+ if (this.encoding > 0) {
+ this._eatBits(1);
+
+ this.nextLine2D = !(code1 & 1);
+ }
+ }
+
+ if (codingLine[0] > 0) {
+ this.outputBits = codingLine[this.codingPos = 0];
+ } else {
+ this.outputBits = codingLine[this.codingPos = 1];
+ }
+
+ this.row++;
+ }
+
+ var c;
+
+ if (this.outputBits >= 8) {
+ c = this.codingPos & 1 ? 0 : 0xFF;
+ this.outputBits -= 8;
+
+ if (this.outputBits === 0 && codingLine[this.codingPos] < columns) {
+ this.codingPos++;
+ this.outputBits = codingLine[this.codingPos] - codingLine[this.codingPos - 1];
+ }
+ } else {
+ bits = 8;
+ c = 0;
+
+ do {
+ if (this.outputBits > bits) {
+ c <<= bits;
+
+ if (!(this.codingPos & 1)) {
+ c |= 0xFF >> 8 - bits;
+ }
+
+ this.outputBits -= bits;
+ bits = 0;
+ } else {
+ c <<= this.outputBits;
+
+ if (!(this.codingPos & 1)) {
+ c |= 0xFF >> 8 - this.outputBits;
+ }
+
+ bits -= this.outputBits;
+ this.outputBits = 0;
+
+ if (codingLine[this.codingPos] < columns) {
+ this.codingPos++;
+ this.outputBits = codingLine[this.codingPos] - codingLine[this.codingPos - 1];
+ } else if (bits > 0) {
+ c <<= bits;
+ bits = 0;
+ }
+ }
+ } while (bits);
+ }
+
+ if (this.black) {
+ c ^= 0xFF;
+ }
+
+ return c;
+ },
+ _addPixels: function _addPixels(a1, blackPixels) {
+ var codingLine = this.codingLine;
+ var codingPos = this.codingPos;
+
+ if (a1 > codingLine[codingPos]) {
+ if (a1 > this.columns) {
+ (0, _util.info)('row is wrong length');
+ this.err = true;
+ a1 = this.columns;
+ }
+
+ if (codingPos & 1 ^ blackPixels) {
+ ++codingPos;
+ }
+
+ codingLine[codingPos] = a1;
+ }
+
+ this.codingPos = codingPos;
+ },
+ _addPixelsNeg: function _addPixelsNeg(a1, blackPixels) {
+ var codingLine = this.codingLine;
+ var codingPos = this.codingPos;
+
+ if (a1 > codingLine[codingPos]) {
+ if (a1 > this.columns) {
+ (0, _util.info)('row is wrong length');
+ this.err = true;
+ a1 = this.columns;
+ }
+
+ if (codingPos & 1 ^ blackPixels) {
+ ++codingPos;
+ }
+
+ codingLine[codingPos] = a1;
+ } else if (a1 < codingLine[codingPos]) {
+ if (a1 < 0) {
+ (0, _util.info)('invalid code');
+ this.err = true;
+ a1 = 0;
+ }
+
+ while (codingPos > 0 && a1 < codingLine[codingPos - 1]) {
+ --codingPos;
+ }
+
+ codingLine[codingPos] = a1;
+ }
+
+ this.codingPos = codingPos;
+ },
+ _findTableCode: function _findTableCode(start, end, table, limit) {
+ var limitValue = limit || 0;
+
+ for (var i = start; i <= end; ++i) {
+ var code = this._lookBits(i);
+
+ if (code === ccittEOF) {
+ return [true, 1, false];
+ }
+
+ if (i < end) {
+ code <<= end - i;
+ }
+
+ if (!limitValue || code >= limitValue) {
+ var p = table[code - limitValue];
+
+ if (p[0] === i) {
+ this._eatBits(i);
+
+ return [true, p[1], true];
+ }
+ }
+ }
+
+ return [false, 0, false];
+ },
+ _getTwoDimCode: function _getTwoDimCode() {
+ var code = 0;
+ var p;
+
+ if (this.eoblock) {
+ code = this._lookBits(7);
+ p = twoDimTable[code];
+
+ if (p && p[0] > 0) {
+ this._eatBits(p[0]);
+
+ return p[1];
+ }
+ } else {
+ var result = this._findTableCode(1, 7, twoDimTable);
+
+ if (result[0] && result[2]) {
+ return result[1];
+ }
+ }
+
+ (0, _util.info)('Bad two dim code');
+ return ccittEOF;
+ },
+ _getWhiteCode: function _getWhiteCode() {
+ var code = 0;
+ var p;
+
+ if (this.eoblock) {
+ code = this._lookBits(12);
+
+ if (code === ccittEOF) {
+ return 1;
+ }
+
+ if (code >> 5 === 0) {
+ p = whiteTable1[code];
+ } else {
+ p = whiteTable2[code >> 3];
+ }
+
+ if (p[0] > 0) {
+ this._eatBits(p[0]);
+
+ return p[1];
+ }
+ } else {
+ var result = this._findTableCode(1, 9, whiteTable2);
+
+ if (result[0]) {
+ return result[1];
+ }
+
+ result = this._findTableCode(11, 12, whiteTable1);
+
+ if (result[0]) {
+ return result[1];
+ }
+ }
+
+ (0, _util.info)('bad white code');
+
+ this._eatBits(1);
+
+ return 1;
+ },
+ _getBlackCode: function _getBlackCode() {
+ var code, p;
+
+ if (this.eoblock) {
+ code = this._lookBits(13);
+
+ if (code === ccittEOF) {
+ return 1;
+ }
+
+ if (code >> 7 === 0) {
+ p = blackTable1[code];
+ } else if (code >> 9 === 0 && code >> 7 !== 0) {
+ p = blackTable2[(code >> 1) - 64];
+ } else {
+ p = blackTable3[code >> 7];
+ }
+
+ if (p[0] > 0) {
+ this._eatBits(p[0]);
+
+ return p[1];
+ }
+ } else {
+ var result = this._findTableCode(2, 6, blackTable3);
+
+ if (result[0]) {
+ return result[1];
+ }
+
+ result = this._findTableCode(7, 12, blackTable2, 64);
+
+ if (result[0]) {
+ return result[1];
+ }
+
+ result = this._findTableCode(10, 13, blackTable1);
+
+ if (result[0]) {
+ return result[1];
+ }
+ }
+
+ (0, _util.info)('bad black code');
+
+ this._eatBits(1);
+
+ return 1;
+ },
+ _lookBits: function _lookBits(n) {
+ var c;
+
+ while (this.inputBits < n) {
+ if ((c = this.source.next()) === -1) {
+ if (this.inputBits === 0) {
+ return ccittEOF;
+ }
+
+ return this.inputBuf << n - this.inputBits & 0xFFFF >> 16 - n;
+ }
+
+ this.inputBuf = this.inputBuf << 8 | c;
+ this.inputBits += 8;
+ }
+
+ return this.inputBuf >> this.inputBits - n & 0xFFFF >> 16 - n;
+ },
+ _eatBits: function _eatBits(n) {
+ if ((this.inputBits -= n) < 0) {
+ this.inputBits = 0;
+ }
+ }
+ };
+ return CCITTFaxDecoder;
+}();
+
+exports.CCITTFaxDecoder = CCITTFaxDecoder;
+
+/***/ }),
+/* 161 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.Jbig2Stream = void 0;
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _stream = __w_pdfjs_require__(158);
+
+var _jbig = __w_pdfjs_require__(162);
+
+var _util = __w_pdfjs_require__(5);
+
+var Jbig2Stream = function Jbig2StreamClosure() {
+ function Jbig2Stream(stream, maybeLength, dict, params) {
+ this.stream = stream;
+ this.maybeLength = maybeLength;
+ this.dict = dict;
+ this.params = params;
+
+ _stream.DecodeStream.call(this, maybeLength);
+ }
+
+ Jbig2Stream.prototype = Object.create(_stream.DecodeStream.prototype);
+ Object.defineProperty(Jbig2Stream.prototype, 'bytes', {
+ get: function get() {
+ return (0, _util.shadow)(this, 'bytes', this.stream.getBytes(this.maybeLength));
+ },
+ configurable: true
+ });
+
+ Jbig2Stream.prototype.ensureBuffer = function (requested) {};
+
+ Jbig2Stream.prototype.readBlock = function () {
+ if (this.eof) {
+ return;
+ }
+
+ var jbig2Image = new _jbig.Jbig2Image();
+ var chunks = [];
+
+ if ((0, _primitives.isDict)(this.params)) {
+ var globalsStream = this.params.get('JBIG2Globals');
+
+ if ((0, _primitives.isStream)(globalsStream)) {
+ var globals = globalsStream.getBytes();
+ chunks.push({
+ data: globals,
+ start: 0,
+ end: globals.length
+ });
+ }
+ }
+
+ chunks.push({
+ data: this.bytes,
+ start: 0,
+ end: this.bytes.length
+ });
+ var data = jbig2Image.parseChunks(chunks);
+ var dataLength = data.length;
+
+ for (var i = 0; i < dataLength; i++) {
+ data[i] ^= 0xFF;
+ }
+
+ this.buffer = data;
+ this.bufferLength = dataLength;
+ this.eof = true;
+ };
+
+ return Jbig2Stream;
+}();
+
+exports.Jbig2Stream = Jbig2Stream;
+
+/***/ }),
+/* 162 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.Jbig2Image = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _arithmetic_decoder = __w_pdfjs_require__(163);
+
+var _ccitt = __w_pdfjs_require__(160);
+
+var Jbig2Error = function Jbig2ErrorClosure() {
+ function Jbig2Error(msg) {
+ this.message = 'JBIG2 error: ' + msg;
+ }
+
+ Jbig2Error.prototype = new Error();
+ Jbig2Error.prototype.name = 'Jbig2Error';
+ Jbig2Error.constructor = Jbig2Error;
+ return Jbig2Error;
+}();
+
+var Jbig2Image = function Jbig2ImageClosure() {
+ function ContextCache() {}
+
+ ContextCache.prototype = {
+ getContexts: function getContexts(id) {
+ if (id in this) {
+ return this[id];
+ }
+
+ return this[id] = new Int8Array(1 << 16);
+ }
+ };
+
+ function DecodingContext(data, start, end) {
+ this.data = data;
+ this.start = start;
+ this.end = end;
+ }
+
+ DecodingContext.prototype = {
+ get decoder() {
+ var decoder = new _arithmetic_decoder.ArithmeticDecoder(this.data, this.start, this.end);
+ return (0, _util.shadow)(this, 'decoder', decoder);
+ },
+
+ get contextCache() {
+ var cache = new ContextCache();
+ return (0, _util.shadow)(this, 'contextCache', cache);
+ }
+
+ };
+
+ function decodeInteger(contextCache, procedure, decoder) {
+ var contexts = contextCache.getContexts(procedure);
+ var prev = 1;
+
+ function readBits(length) {
+ var v = 0;
+
+ for (var i = 0; i < length; i++) {
+ var bit = decoder.readBit(contexts, prev);
+ prev = prev < 256 ? prev << 1 | bit : (prev << 1 | bit) & 511 | 256;
+ v = v << 1 | bit;
+ }
+
+ return v >>> 0;
+ }
+
+ var sign = readBits(1);
+ var value = readBits(1) ? readBits(1) ? readBits(1) ? readBits(1) ? readBits(1) ? readBits(32) + 4436 : readBits(12) + 340 : readBits(8) + 84 : readBits(6) + 20 : readBits(4) + 4 : readBits(2);
+ return sign === 0 ? value : value > 0 ? -value : null;
+ }
+
+ function decodeIAID(contextCache, decoder, codeLength) {
+ var contexts = contextCache.getContexts('IAID');
+ var prev = 1;
+
+ for (var i = 0; i < codeLength; i++) {
+ var bit = decoder.readBit(contexts, prev);
+ prev = prev << 1 | bit;
+ }
+
+ if (codeLength < 31) {
+ return prev & (1 << codeLength) - 1;
+ }
+
+ return prev & 0x7FFFFFFF;
+ }
+
+ var SegmentTypes = ['SymbolDictionary', null, null, null, 'IntermediateTextRegion', null, 'ImmediateTextRegion', 'ImmediateLosslessTextRegion', null, null, null, null, null, null, null, null, 'PatternDictionary', null, null, null, 'IntermediateHalftoneRegion', null, 'ImmediateHalftoneRegion', 'ImmediateLosslessHalftoneRegion', null, null, null, null, null, null, null, null, null, null, null, null, 'IntermediateGenericRegion', null, 'ImmediateGenericRegion', 'ImmediateLosslessGenericRegion', 'IntermediateGenericRefinementRegion', null, 'ImmediateGenericRefinementRegion', 'ImmediateLosslessGenericRefinementRegion', null, null, null, null, 'PageInformation', 'EndOfPage', 'EndOfStripe', 'EndOfFile', 'Profiles', 'Tables', null, null, null, null, null, null, null, null, 'Extension'];
+ var CodingTemplates = [[{
+ x: -1,
+ y: -2
+ }, {
+ x: 0,
+ y: -2
+ }, {
+ x: 1,
+ y: -2
+ }, {
+ x: -2,
+ y: -1
+ }, {
+ x: -1,
+ y: -1
+ }, {
+ x: 0,
+ y: -1
+ }, {
+ x: 1,
+ y: -1
+ }, {
+ x: 2,
+ y: -1
+ }, {
+ x: -4,
+ y: 0
+ }, {
+ x: -3,
+ y: 0
+ }, {
+ x: -2,
+ y: 0
+ }, {
+ x: -1,
+ y: 0
+ }], [{
+ x: -1,
+ y: -2
+ }, {
+ x: 0,
+ y: -2
+ }, {
+ x: 1,
+ y: -2
+ }, {
+ x: 2,
+ y: -2
+ }, {
+ x: -2,
+ y: -1
+ }, {
+ x: -1,
+ y: -1
+ }, {
+ x: 0,
+ y: -1
+ }, {
+ x: 1,
+ y: -1
+ }, {
+ x: 2,
+ y: -1
+ }, {
+ x: -3,
+ y: 0
+ }, {
+ x: -2,
+ y: 0
+ }, {
+ x: -1,
+ y: 0
+ }], [{
+ x: -1,
+ y: -2
+ }, {
+ x: 0,
+ y: -2
+ }, {
+ x: 1,
+ y: -2
+ }, {
+ x: -2,
+ y: -1
+ }, {
+ x: -1,
+ y: -1
+ }, {
+ x: 0,
+ y: -1
+ }, {
+ x: 1,
+ y: -1
+ }, {
+ x: -2,
+ y: 0
+ }, {
+ x: -1,
+ y: 0
+ }], [{
+ x: -3,
+ y: -1
+ }, {
+ x: -2,
+ y: -1
+ }, {
+ x: -1,
+ y: -1
+ }, {
+ x: 0,
+ y: -1
+ }, {
+ x: 1,
+ y: -1
+ }, {
+ x: -4,
+ y: 0
+ }, {
+ x: -3,
+ y: 0
+ }, {
+ x: -2,
+ y: 0
+ }, {
+ x: -1,
+ y: 0
+ }]];
+ var RefinementTemplates = [{
+ coding: [{
+ x: 0,
+ y: -1
+ }, {
+ x: 1,
+ y: -1
+ }, {
+ x: -1,
+ y: 0
+ }],
+ reference: [{
+ x: 0,
+ y: -1
+ }, {
+ x: 1,
+ y: -1
+ }, {
+ x: -1,
+ y: 0
+ }, {
+ x: 0,
+ y: 0
+ }, {
+ x: 1,
+ y: 0
+ }, {
+ x: -1,
+ y: 1
+ }, {
+ x: 0,
+ y: 1
+ }, {
+ x: 1,
+ y: 1
+ }]
+ }, {
+ coding: [{
+ x: -1,
+ y: -1
+ }, {
+ x: 0,
+ y: -1
+ }, {
+ x: 1,
+ y: -1
+ }, {
+ x: -1,
+ y: 0
+ }],
+ reference: [{
+ x: 0,
+ y: -1
+ }, {
+ x: -1,
+ y: 0
+ }, {
+ x: 0,
+ y: 0
+ }, {
+ x: 1,
+ y: 0
+ }, {
+ x: 0,
+ y: 1
+ }, {
+ x: 1,
+ y: 1
+ }]
+ }];
+ var ReusedContexts = [0x9B25, 0x0795, 0x00E5, 0x0195];
+ var RefinementReusedContexts = [0x0020, 0x0008];
+
+ function decodeBitmapTemplate0(width, height, decodingContext) {
+ var decoder = decodingContext.decoder;
+ var contexts = decodingContext.contextCache.getContexts('GB');
+ var contextLabel,
+ i,
+ j,
+ pixel,
+ row,
+ row1,
+ row2,
+ bitmap = [];
+ var OLD_PIXEL_MASK = 0x7BF7;
+
+ for (i = 0; i < height; i++) {
+ row = bitmap[i] = new Uint8Array(width);
+ row1 = i < 1 ? row : bitmap[i - 1];
+ row2 = i < 2 ? row : bitmap[i - 2];
+ contextLabel = row2[0] << 13 | row2[1] << 12 | row2[2] << 11 | row1[0] << 7 | row1[1] << 6 | row1[2] << 5 | row1[3] << 4;
+
+ for (j = 0; j < width; j++) {
+ row[j] = pixel = decoder.readBit(contexts, contextLabel);
+ contextLabel = (contextLabel & OLD_PIXEL_MASK) << 1 | (j + 3 < width ? row2[j + 3] << 11 : 0) | (j + 4 < width ? row1[j + 4] << 4 : 0) | pixel;
+ }
+ }
+
+ return bitmap;
+ }
+
+ function decodeBitmap(mmr, width, height, templateIndex, prediction, skip, at, decodingContext) {
+ if (mmr) {
+ var input = new Reader(decodingContext.data, decodingContext.start, decodingContext.end);
+ return decodeMMRBitmap(input, width, height, false);
+ }
+
+ if (templateIndex === 0 && !skip && !prediction && at.length === 4 && at[0].x === 3 && at[0].y === -1 && at[1].x === -3 && at[1].y === -1 && at[2].x === 2 && at[2].y === -2 && at[3].x === -2 && at[3].y === -2) {
+ return decodeBitmapTemplate0(width, height, decodingContext);
+ }
+
+ var useskip = !!skip;
+ var template = CodingTemplates[templateIndex].concat(at);
+ template.sort(function (a, b) {
+ return a.y - b.y || a.x - b.x;
+ });
+ var templateLength = template.length;
+ var templateX = new Int8Array(templateLength);
+ var templateY = new Int8Array(templateLength);
+ var changingTemplateEntries = [];
+ var reuseMask = 0,
+ minX = 0,
+ maxX = 0,
+ minY = 0;
+ var c, k;
+
+ for (k = 0; k < templateLength; k++) {
+ templateX[k] = template[k].x;
+ templateY[k] = template[k].y;
+ minX = Math.min(minX, template[k].x);
+ maxX = Math.max(maxX, template[k].x);
+ minY = Math.min(minY, template[k].y);
+
+ if (k < templateLength - 1 && template[k].y === template[k + 1].y && template[k].x === template[k + 1].x - 1) {
+ reuseMask |= 1 << templateLength - 1 - k;
+ } else {
+ changingTemplateEntries.push(k);
+ }
+ }
+
+ var changingEntriesLength = changingTemplateEntries.length;
+ var changingTemplateX = new Int8Array(changingEntriesLength);
+ var changingTemplateY = new Int8Array(changingEntriesLength);
+ var changingTemplateBit = new Uint16Array(changingEntriesLength);
+
+ for (c = 0; c < changingEntriesLength; c++) {
+ k = changingTemplateEntries[c];
+ changingTemplateX[c] = template[k].x;
+ changingTemplateY[c] = template[k].y;
+ changingTemplateBit[c] = 1 << templateLength - 1 - k;
+ }
+
+ var sbb_left = -minX;
+ var sbb_top = -minY;
+ var sbb_right = width - maxX;
+ var pseudoPixelContext = ReusedContexts[templateIndex];
+ var row = new Uint8Array(width);
+ var bitmap = [];
+ var decoder = decodingContext.decoder;
+ var contexts = decodingContext.contextCache.getContexts('GB');
+ var ltp = 0,
+ j,
+ i0,
+ j0,
+ contextLabel = 0,
+ bit,
+ shift;
+
+ for (var i = 0; i < height; i++) {
+ if (prediction) {
+ var sltp = decoder.readBit(contexts, pseudoPixelContext);
+ ltp ^= sltp;
+
+ if (ltp) {
+ bitmap.push(row);
+ continue;
+ }
+ }
+
+ row = new Uint8Array(row);
+ bitmap.push(row);
+
+ for (j = 0; j < width; j++) {
+ if (useskip && skip[i][j]) {
+ row[j] = 0;
+ continue;
+ }
+
+ if (j >= sbb_left && j < sbb_right && i >= sbb_top) {
+ contextLabel = contextLabel << 1 & reuseMask;
+
+ for (k = 0; k < changingEntriesLength; k++) {
+ i0 = i + changingTemplateY[k];
+ j0 = j + changingTemplateX[k];
+ bit = bitmap[i0][j0];
+
+ if (bit) {
+ bit = changingTemplateBit[k];
+ contextLabel |= bit;
+ }
+ }
+ } else {
+ contextLabel = 0;
+ shift = templateLength - 1;
+
+ for (k = 0; k < templateLength; k++, shift--) {
+ j0 = j + templateX[k];
+
+ if (j0 >= 0 && j0 < width) {
+ i0 = i + templateY[k];
+
+ if (i0 >= 0) {
+ bit = bitmap[i0][j0];
+
+ if (bit) {
+ contextLabel |= bit << shift;
+ }
+ }
+ }
+ }
+ }
+
+ var pixel = decoder.readBit(contexts, contextLabel);
+ row[j] = pixel;
+ }
+ }
+
+ return bitmap;
+ }
+
+ function decodeRefinement(width, height, templateIndex, referenceBitmap, offsetX, offsetY, prediction, at, decodingContext) {
+ var codingTemplate = RefinementTemplates[templateIndex].coding;
+
+ if (templateIndex === 0) {
+ codingTemplate = codingTemplate.concat([at[0]]);
+ }
+
+ var codingTemplateLength = codingTemplate.length;
+ var codingTemplateX = new Int32Array(codingTemplateLength);
+ var codingTemplateY = new Int32Array(codingTemplateLength);
+ var k;
+
+ for (k = 0; k < codingTemplateLength; k++) {
+ codingTemplateX[k] = codingTemplate[k].x;
+ codingTemplateY[k] = codingTemplate[k].y;
+ }
+
+ var referenceTemplate = RefinementTemplates[templateIndex].reference;
+
+ if (templateIndex === 0) {
+ referenceTemplate = referenceTemplate.concat([at[1]]);
+ }
+
+ var referenceTemplateLength = referenceTemplate.length;
+ var referenceTemplateX = new Int32Array(referenceTemplateLength);
+ var referenceTemplateY = new Int32Array(referenceTemplateLength);
+
+ for (k = 0; k < referenceTemplateLength; k++) {
+ referenceTemplateX[k] = referenceTemplate[k].x;
+ referenceTemplateY[k] = referenceTemplate[k].y;
+ }
+
+ var referenceWidth = referenceBitmap[0].length;
+ var referenceHeight = referenceBitmap.length;
+ var pseudoPixelContext = RefinementReusedContexts[templateIndex];
+ var bitmap = [];
+ var decoder = decodingContext.decoder;
+ var contexts = decodingContext.contextCache.getContexts('GR');
+ var ltp = 0;
+
+ for (var i = 0; i < height; i++) {
+ if (prediction) {
+ var sltp = decoder.readBit(contexts, pseudoPixelContext);
+ ltp ^= sltp;
+
+ if (ltp) {
+ throw new Jbig2Error('prediction is not supported');
+ }
+ }
+
+ var row = new Uint8Array(width);
+ bitmap.push(row);
+
+ for (var j = 0; j < width; j++) {
+ var i0, j0;
+ var contextLabel = 0;
+
+ for (k = 0; k < codingTemplateLength; k++) {
+ i0 = i + codingTemplateY[k];
+ j0 = j + codingTemplateX[k];
+
+ if (i0 < 0 || j0 < 0 || j0 >= width) {
+ contextLabel <<= 1;
+ } else {
+ contextLabel = contextLabel << 1 | bitmap[i0][j0];
+ }
+ }
+
+ for (k = 0; k < referenceTemplateLength; k++) {
+ i0 = i + referenceTemplateY[k] - offsetY;
+ j0 = j + referenceTemplateX[k] - offsetX;
+
+ if (i0 < 0 || i0 >= referenceHeight || j0 < 0 || j0 >= referenceWidth) {
+ contextLabel <<= 1;
+ } else {
+ contextLabel = contextLabel << 1 | referenceBitmap[i0][j0];
+ }
+ }
+
+ var pixel = decoder.readBit(contexts, contextLabel);
+ row[j] = pixel;
+ }
+ }
+
+ return bitmap;
+ }
+
+ function decodeSymbolDictionary(huffman, refinement, symbols, numberOfNewSymbols, numberOfExportedSymbols, huffmanTables, templateIndex, at, refinementTemplateIndex, refinementAt, decodingContext, huffmanInput) {
+ if (huffman && refinement) {
+ throw new Jbig2Error('symbol refinement with Huffman is not supported');
+ }
+
+ var newSymbols = [];
+ var currentHeight = 0;
+ var symbolCodeLength = (0, _util.log2)(symbols.length + numberOfNewSymbols);
+ var decoder = decodingContext.decoder;
+ var contextCache = decodingContext.contextCache;
+ var tableB1, symbolWidths;
+
+ if (huffman) {
+ tableB1 = getStandardTable(1);
+ symbolWidths = [];
+ symbolCodeLength = Math.max(symbolCodeLength, 1);
+ }
+
+ while (newSymbols.length < numberOfNewSymbols) {
+ var deltaHeight = huffman ? huffmanTables.tableDeltaHeight.decode(huffmanInput) : decodeInteger(contextCache, 'IADH', decoder);
+ currentHeight += deltaHeight;
+ var currentWidth = 0,
+ totalWidth = 0;
+ var firstSymbol = huffman ? symbolWidths.length : 0;
+
+ while (true) {
+ var deltaWidth = huffman ? huffmanTables.tableDeltaWidth.decode(huffmanInput) : decodeInteger(contextCache, 'IADW', decoder);
+
+ if (deltaWidth === null) {
+ break;
+ }
+
+ currentWidth += deltaWidth;
+ totalWidth += currentWidth;
+ var bitmap;
+
+ if (refinement) {
+ var numberOfInstances = decodeInteger(contextCache, 'IAAI', decoder);
+
+ if (numberOfInstances > 1) {
+ bitmap = decodeTextRegion(huffman, refinement, currentWidth, currentHeight, 0, numberOfInstances, 1, symbols.concat(newSymbols), symbolCodeLength, 0, 0, 1, 0, huffmanTables, refinementTemplateIndex, refinementAt, decodingContext, 0, huffmanInput);
+ } else {
+ var symbolId = decodeIAID(contextCache, decoder, symbolCodeLength);
+ var rdx = decodeInteger(contextCache, 'IARDX', decoder);
+ var rdy = decodeInteger(contextCache, 'IARDY', decoder);
+ var symbol = symbolId < symbols.length ? symbols[symbolId] : newSymbols[symbolId - symbols.length];
+ bitmap = decodeRefinement(currentWidth, currentHeight, refinementTemplateIndex, symbol, rdx, rdy, false, refinementAt, decodingContext);
+ }
+
+ newSymbols.push(bitmap);
+ } else if (huffman) {
+ symbolWidths.push(currentWidth);
+ } else {
+ bitmap = decodeBitmap(false, currentWidth, currentHeight, templateIndex, false, null, at, decodingContext);
+ newSymbols.push(bitmap);
+ }
+ }
+
+ if (huffman && !refinement) {
+ var bitmapSize = huffmanTables.tableBitmapSize.decode(huffmanInput);
+ huffmanInput.byteAlign();
+ var collectiveBitmap = void 0;
+
+ if (bitmapSize === 0) {
+ collectiveBitmap = readUncompressedBitmap(huffmanInput, totalWidth, currentHeight);
+ } else {
+ var originalEnd = huffmanInput.end;
+ var bitmapEnd = huffmanInput.position + bitmapSize;
+ huffmanInput.end = bitmapEnd;
+ collectiveBitmap = decodeMMRBitmap(huffmanInput, totalWidth, currentHeight, false);
+ huffmanInput.end = originalEnd;
+ huffmanInput.position = bitmapEnd;
+ }
+
+ var numberOfSymbolsDecoded = symbolWidths.length;
+
+ if (firstSymbol === numberOfSymbolsDecoded - 1) {
+ newSymbols.push(collectiveBitmap);
+ } else {
+ var _i = void 0,
+ y = void 0,
+ xMin = 0,
+ xMax = void 0,
+ bitmapWidth = void 0,
+ symbolBitmap = void 0;
+
+ for (_i = firstSymbol; _i < numberOfSymbolsDecoded; _i++) {
+ bitmapWidth = symbolWidths[_i];
+ xMax = xMin + bitmapWidth;
+ symbolBitmap = [];
+
+ for (y = 0; y < currentHeight; y++) {
+ symbolBitmap.push(collectiveBitmap[y].subarray(xMin, xMax));
+ }
+
+ newSymbols.push(symbolBitmap);
+ xMin = xMax;
+ }
+ }
+ }
+ }
+
+ var exportedSymbols = [];
+ var flags = [],
+ currentFlag = false;
+ var totalSymbolsLength = symbols.length + numberOfNewSymbols;
+
+ while (flags.length < totalSymbolsLength) {
+ var runLength = huffman ? tableB1.decode(huffmanInput) : decodeInteger(contextCache, 'IAEX', decoder);
+
+ while (runLength--) {
+ flags.push(currentFlag);
+ }
+
+ currentFlag = !currentFlag;
+ }
+
+ for (var i = 0, ii = symbols.length; i < ii; i++) {
+ if (flags[i]) {
+ exportedSymbols.push(symbols[i]);
+ }
+ }
+
+ for (var j = 0; j < numberOfNewSymbols; i++, j++) {
+ if (flags[i]) {
+ exportedSymbols.push(newSymbols[j]);
+ }
+ }
+
+ return exportedSymbols;
+ }
+
+ function decodeTextRegion(huffman, refinement, width, height, defaultPixelValue, numberOfSymbolInstances, stripSize, inputSymbols, symbolCodeLength, transposed, dsOffset, referenceCorner, combinationOperator, huffmanTables, refinementTemplateIndex, refinementAt, decodingContext, logStripSize, huffmanInput) {
+ if (huffman && refinement) {
+ throw new Jbig2Error('refinement with Huffman is not supported');
+ }
+
+ var bitmap = [];
+ var i, row;
+
+ for (i = 0; i < height; i++) {
+ row = new Uint8Array(width);
+
+ if (defaultPixelValue) {
+ for (var j = 0; j < width; j++) {
+ row[j] = defaultPixelValue;
+ }
+ }
+
+ bitmap.push(row);
+ }
+
+ var decoder = decodingContext.decoder;
+ var contextCache = decodingContext.contextCache;
+ var stripT = huffman ? -huffmanTables.tableDeltaT.decode(huffmanInput) : -decodeInteger(contextCache, 'IADT', decoder);
+ var firstS = 0;
+ i = 0;
+
+ while (i < numberOfSymbolInstances) {
+ var deltaT = huffman ? huffmanTables.tableDeltaT.decode(huffmanInput) : decodeInteger(contextCache, 'IADT', decoder);
+ stripT += deltaT;
+ var deltaFirstS = huffman ? huffmanTables.tableFirstS.decode(huffmanInput) : decodeInteger(contextCache, 'IAFS', decoder);
+ firstS += deltaFirstS;
+ var currentS = firstS;
+
+ do {
+ var currentT = 0;
+
+ if (stripSize > 1) {
+ currentT = huffman ? huffmanInput.readBits(logStripSize) : decodeInteger(contextCache, 'IAIT', decoder);
+ }
+
+ var t = stripSize * stripT + currentT;
+ var symbolId = huffman ? huffmanTables.symbolIDTable.decode(huffmanInput) : decodeIAID(contextCache, decoder, symbolCodeLength);
+ var applyRefinement = refinement && (huffman ? huffmanInput.readBit() : decodeInteger(contextCache, 'IARI', decoder));
+ var symbolBitmap = inputSymbols[symbolId];
+ var symbolWidth = symbolBitmap[0].length;
+ var symbolHeight = symbolBitmap.length;
+
+ if (applyRefinement) {
+ var rdw = decodeInteger(contextCache, 'IARDW', decoder);
+ var rdh = decodeInteger(contextCache, 'IARDH', decoder);
+ var rdx = decodeInteger(contextCache, 'IARDX', decoder);
+ var rdy = decodeInteger(contextCache, 'IARDY', decoder);
+ symbolWidth += rdw;
+ symbolHeight += rdh;
+ symbolBitmap = decodeRefinement(symbolWidth, symbolHeight, refinementTemplateIndex, symbolBitmap, (rdw >> 1) + rdx, (rdh >> 1) + rdy, false, refinementAt, decodingContext);
+ }
+
+ var offsetT = t - (referenceCorner & 1 ? 0 : symbolHeight - 1);
+ var offsetS = currentS - (referenceCorner & 2 ? symbolWidth - 1 : 0);
+ var s2, t2, symbolRow;
+
+ if (transposed) {
+ for (s2 = 0; s2 < symbolHeight; s2++) {
+ row = bitmap[offsetS + s2];
+
+ if (!row) {
+ continue;
+ }
+
+ symbolRow = symbolBitmap[s2];
+ var maxWidth = Math.min(width - offsetT, symbolWidth);
+
+ switch (combinationOperator) {
+ case 0:
+ for (t2 = 0; t2 < maxWidth; t2++) {
+ row[offsetT + t2] |= symbolRow[t2];
+ }
+
+ break;
+
+ case 2:
+ for (t2 = 0; t2 < maxWidth; t2++) {
+ row[offsetT + t2] ^= symbolRow[t2];
+ }
+
+ break;
+
+ default:
+ throw new Jbig2Error("operator ".concat(combinationOperator, " is not supported"));
+ }
+ }
+
+ currentS += symbolHeight - 1;
+ } else {
+ for (t2 = 0; t2 < symbolHeight; t2++) {
+ row = bitmap[offsetT + t2];
+
+ if (!row) {
+ continue;
+ }
+
+ symbolRow = symbolBitmap[t2];
+
+ switch (combinationOperator) {
+ case 0:
+ for (s2 = 0; s2 < symbolWidth; s2++) {
+ row[offsetS + s2] |= symbolRow[s2];
+ }
+
+ break;
+
+ case 2:
+ for (s2 = 0; s2 < symbolWidth; s2++) {
+ row[offsetS + s2] ^= symbolRow[s2];
+ }
+
+ break;
+
+ default:
+ throw new Jbig2Error("operator ".concat(combinationOperator, " is not supported"));
+ }
+ }
+
+ currentS += symbolWidth - 1;
+ }
+
+ i++;
+ var deltaS = huffman ? huffmanTables.tableDeltaS.decode(huffmanInput) : decodeInteger(contextCache, 'IADS', decoder);
+
+ if (deltaS === null) {
+ break;
+ }
+
+ currentS += deltaS + dsOffset;
+ } while (true);
+ }
+
+ return bitmap;
+ }
+
+ function decodePatternDictionary(mmr, patternWidth, patternHeight, maxPatternIndex, template, decodingContext) {
+ var at = [];
+
+ if (!mmr) {
+ at.push({
+ x: -patternWidth,
+ y: 0
+ });
+
+ if (template === 0) {
+ at.push({
+ x: -3,
+ y: -1
+ });
+ at.push({
+ x: 2,
+ y: -2
+ });
+ at.push({
+ x: -2,
+ y: -2
+ });
+ }
+ }
+
+ var collectiveWidth = (maxPatternIndex + 1) * patternWidth;
+ var collectiveBitmap = decodeBitmap(mmr, collectiveWidth, patternHeight, template, false, null, at, decodingContext);
+ var patterns = [],
+ i = 0,
+ patternBitmap,
+ xMin,
+ xMax,
+ y;
+
+ while (i <= maxPatternIndex) {
+ patternBitmap = [];
+ xMin = patternWidth * i;
+ xMax = xMin + patternWidth;
+
+ for (y = 0; y < patternHeight; y++) {
+ patternBitmap.push(collectiveBitmap[y].subarray(xMin, xMax));
+ }
+
+ patterns.push(patternBitmap);
+ i++;
+ }
+
+ return patterns;
+ }
+
+ function decodeHalftoneRegion(mmr, patterns, template, regionWidth, regionHeight, defaultPixelValue, enableSkip, combinationOperator, gridWidth, gridHeight, gridOffsetX, gridOffsetY, gridVectorX, gridVectorY, decodingContext) {
+ var skip = null;
+
+ if (enableSkip) {
+ throw new Jbig2Error('skip is not supported');
+ }
+
+ if (combinationOperator !== 0) {
+ throw new Jbig2Error('operator ' + combinationOperator + ' is not supported in halftone region');
+ }
+
+ var regionBitmap = [];
+ var i, j, row;
+
+ for (i = 0; i < regionHeight; i++) {
+ row = new Uint8Array(regionWidth);
+
+ if (defaultPixelValue) {
+ for (j = 0; j < regionWidth; j++) {
+ row[j] = defaultPixelValue;
+ }
+ }
+
+ regionBitmap.push(row);
+ }
+
+ var numberOfPatterns = patterns.length;
+ var pattern0 = patterns[0];
+ var patternWidth = pattern0[0].length,
+ patternHeight = pattern0.length;
+ var bitsPerValue = (0, _util.log2)(numberOfPatterns);
+ var at = [];
+
+ if (!mmr) {
+ at.push({
+ x: template <= 1 ? 3 : 2,
+ y: -1
+ });
+
+ if (template === 0) {
+ at.push({
+ x: -3,
+ y: -1
+ });
+ at.push({
+ x: 2,
+ y: -2
+ });
+ at.push({
+ x: -2,
+ y: -2
+ });
+ }
+ }
+
+ var grayScaleBitPlanes = [],
+ mmrInput,
+ bitmap;
+
+ if (mmr) {
+ mmrInput = new Reader(decodingContext.data, decodingContext.start, decodingContext.end);
+ }
+
+ for (i = bitsPerValue - 1; i >= 0; i--) {
+ if (mmr) {
+ bitmap = decodeMMRBitmap(mmrInput, gridWidth, gridHeight, true);
+ } else {
+ bitmap = decodeBitmap(false, gridWidth, gridHeight, template, false, skip, at, decodingContext);
+ }
+
+ grayScaleBitPlanes[i] = bitmap;
+ }
+
+ var mg, ng, bit, patternIndex, patternBitmap, x, y, patternRow, regionRow;
+
+ for (mg = 0; mg < gridHeight; mg++) {
+ for (ng = 0; ng < gridWidth; ng++) {
+ bit = 0;
+ patternIndex = 0;
+
+ for (j = bitsPerValue - 1; j >= 0; j--) {
+ bit = grayScaleBitPlanes[j][mg][ng] ^ bit;
+ patternIndex |= bit << j;
+ }
+
+ patternBitmap = patterns[patternIndex];
+ x = gridOffsetX + mg * gridVectorY + ng * gridVectorX >> 8;
+ y = gridOffsetY + mg * gridVectorX - ng * gridVectorY >> 8;
+
+ if (x >= 0 && x + patternWidth <= regionWidth && y >= 0 && y + patternHeight <= regionHeight) {
+ for (i = 0; i < patternHeight; i++) {
+ regionRow = regionBitmap[y + i];
+ patternRow = patternBitmap[i];
+
+ for (j = 0; j < patternWidth; j++) {
+ regionRow[x + j] |= patternRow[j];
+ }
+ }
+ } else {
+ var regionX = void 0,
+ regionY = void 0;
+
+ for (i = 0; i < patternHeight; i++) {
+ regionY = y + i;
+
+ if (regionY < 0 || regionY >= regionHeight) {
+ continue;
+ }
+
+ regionRow = regionBitmap[regionY];
+ patternRow = patternBitmap[i];
+
+ for (j = 0; j < patternWidth; j++) {
+ regionX = x + j;
+
+ if (regionX >= 0 && regionX < regionWidth) {
+ regionRow[regionX] |= patternRow[j];
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return regionBitmap;
+ }
+
+ function readSegmentHeader(data, start) {
+ var segmentHeader = {};
+ segmentHeader.number = (0, _util.readUint32)(data, start);
+ var flags = data[start + 4];
+ var segmentType = flags & 0x3F;
+
+ if (!SegmentTypes[segmentType]) {
+ throw new Jbig2Error('invalid segment type: ' + segmentType);
+ }
+
+ segmentHeader.type = segmentType;
+ segmentHeader.typeName = SegmentTypes[segmentType];
+ segmentHeader.deferredNonRetain = !!(flags & 0x80);
+ var pageAssociationFieldSize = !!(flags & 0x40);
+ var referredFlags = data[start + 5];
+ var referredToCount = referredFlags >> 5 & 7;
+ var retainBits = [referredFlags & 31];
+ var position = start + 6;
+
+ if (referredFlags === 7) {
+ referredToCount = (0, _util.readUint32)(data, position - 1) & 0x1FFFFFFF;
+ position += 3;
+ var bytes = referredToCount + 7 >> 3;
+ retainBits[0] = data[position++];
+
+ while (--bytes > 0) {
+ retainBits.push(data[position++]);
+ }
+ } else if (referredFlags === 5 || referredFlags === 6) {
+ throw new Jbig2Error('invalid referred-to flags');
+ }
+
+ segmentHeader.retainBits = retainBits;
+ var referredToSegmentNumberSize = segmentHeader.number <= 256 ? 1 : segmentHeader.number <= 65536 ? 2 : 4;
+ var referredTo = [];
+ var i, ii;
+
+ for (i = 0; i < referredToCount; i++) {
+ var number = referredToSegmentNumberSize === 1 ? data[position] : referredToSegmentNumberSize === 2 ? (0, _util.readUint16)(data, position) : (0, _util.readUint32)(data, position);
+ referredTo.push(number);
+ position += referredToSegmentNumberSize;
+ }
+
+ segmentHeader.referredTo = referredTo;
+
+ if (!pageAssociationFieldSize) {
+ segmentHeader.pageAssociation = data[position++];
+ } else {
+ segmentHeader.pageAssociation = (0, _util.readUint32)(data, position);
+ position += 4;
+ }
+
+ segmentHeader.length = (0, _util.readUint32)(data, position);
+ position += 4;
+
+ if (segmentHeader.length === 0xFFFFFFFF) {
+ if (segmentType === 38) {
+ var genericRegionInfo = readRegionSegmentInformation(data, position);
+ var genericRegionSegmentFlags = data[position + RegionSegmentInformationFieldLength];
+ var genericRegionMmr = !!(genericRegionSegmentFlags & 1);
+ var searchPatternLength = 6;
+ var searchPattern = new Uint8Array(searchPatternLength);
+
+ if (!genericRegionMmr) {
+ searchPattern[0] = 0xFF;
+ searchPattern[1] = 0xAC;
+ }
+
+ searchPattern[2] = genericRegionInfo.height >>> 24 & 0xFF;
+ searchPattern[3] = genericRegionInfo.height >> 16 & 0xFF;
+ searchPattern[4] = genericRegionInfo.height >> 8 & 0xFF;
+ searchPattern[5] = genericRegionInfo.height & 0xFF;
+
+ for (i = position, ii = data.length; i < ii; i++) {
+ var j = 0;
+
+ while (j < searchPatternLength && searchPattern[j] === data[i + j]) {
+ j++;
+ }
+
+ if (j === searchPatternLength) {
+ segmentHeader.length = i + searchPatternLength;
+ break;
+ }
+ }
+
+ if (segmentHeader.length === 0xFFFFFFFF) {
+ throw new Jbig2Error('segment end was not found');
+ }
+ } else {
+ throw new Jbig2Error('invalid unknown segment length');
+ }
+ }
+
+ segmentHeader.headerEnd = position;
+ return segmentHeader;
+ }
+
+ function readSegments(header, data, start, end) {
+ var segments = [];
+ var position = start;
+
+ while (position < end) {
+ var segmentHeader = readSegmentHeader(data, position);
+ position = segmentHeader.headerEnd;
+ var segment = {
+ header: segmentHeader,
+ data: data
+ };
+
+ if (!header.randomAccess) {
+ segment.start = position;
+ position += segmentHeader.length;
+ segment.end = position;
+ }
+
+ segments.push(segment);
+
+ if (segmentHeader.type === 51) {
+ break;
+ }
+ }
+
+ if (header.randomAccess) {
+ for (var i = 0, ii = segments.length; i < ii; i++) {
+ segments[i].start = position;
+ position += segments[i].header.length;
+ segments[i].end = position;
+ }
+ }
+
+ return segments;
+ }
+
+ function readRegionSegmentInformation(data, start) {
+ return {
+ width: (0, _util.readUint32)(data, start),
+ height: (0, _util.readUint32)(data, start + 4),
+ x: (0, _util.readUint32)(data, start + 8),
+ y: (0, _util.readUint32)(data, start + 12),
+ combinationOperator: data[start + 16] & 7
+ };
+ }
+
+ var RegionSegmentInformationFieldLength = 17;
+
+ function processSegment(segment, visitor) {
+ var header = segment.header;
+ var data = segment.data,
+ position = segment.start,
+ end = segment.end;
+ var args, at, i, atLength;
+
+ switch (header.type) {
+ case 0:
+ var dictionary = {};
+ var dictionaryFlags = (0, _util.readUint16)(data, position);
+ dictionary.huffman = !!(dictionaryFlags & 1);
+ dictionary.refinement = !!(dictionaryFlags & 2);
+ dictionary.huffmanDHSelector = dictionaryFlags >> 2 & 3;
+ dictionary.huffmanDWSelector = dictionaryFlags >> 4 & 3;
+ dictionary.bitmapSizeSelector = dictionaryFlags >> 6 & 1;
+ dictionary.aggregationInstancesSelector = dictionaryFlags >> 7 & 1;
+ dictionary.bitmapCodingContextUsed = !!(dictionaryFlags & 256);
+ dictionary.bitmapCodingContextRetained = !!(dictionaryFlags & 512);
+ dictionary.template = dictionaryFlags >> 10 & 3;
+ dictionary.refinementTemplate = dictionaryFlags >> 12 & 1;
+ position += 2;
+
+ if (!dictionary.huffman) {
+ atLength = dictionary.template === 0 ? 4 : 1;
+ at = [];
+
+ for (i = 0; i < atLength; i++) {
+ at.push({
+ x: (0, _util.readInt8)(data, position),
+ y: (0, _util.readInt8)(data, position + 1)
+ });
+ position += 2;
+ }
+
+ dictionary.at = at;
+ }
+
+ if (dictionary.refinement && !dictionary.refinementTemplate) {
+ at = [];
+
+ for (i = 0; i < 2; i++) {
+ at.push({
+ x: (0, _util.readInt8)(data, position),
+ y: (0, _util.readInt8)(data, position + 1)
+ });
+ position += 2;
+ }
+
+ dictionary.refinementAt = at;
+ }
+
+ dictionary.numberOfExportedSymbols = (0, _util.readUint32)(data, position);
+ position += 4;
+ dictionary.numberOfNewSymbols = (0, _util.readUint32)(data, position);
+ position += 4;
+ args = [dictionary, header.number, header.referredTo, data, position, end];
+ break;
+
+ case 6:
+ case 7:
+ var textRegion = {};
+ textRegion.info = readRegionSegmentInformation(data, position);
+ position += RegionSegmentInformationFieldLength;
+ var textRegionSegmentFlags = (0, _util.readUint16)(data, position);
+ position += 2;
+ textRegion.huffman = !!(textRegionSegmentFlags & 1);
+ textRegion.refinement = !!(textRegionSegmentFlags & 2);
+ textRegion.logStripSize = textRegionSegmentFlags >> 2 & 3;
+ textRegion.stripSize = 1 << textRegion.logStripSize;
+ textRegion.referenceCorner = textRegionSegmentFlags >> 4 & 3;
+ textRegion.transposed = !!(textRegionSegmentFlags & 64);
+ textRegion.combinationOperator = textRegionSegmentFlags >> 7 & 3;
+ textRegion.defaultPixelValue = textRegionSegmentFlags >> 9 & 1;
+ textRegion.dsOffset = textRegionSegmentFlags << 17 >> 27;
+ textRegion.refinementTemplate = textRegionSegmentFlags >> 15 & 1;
+
+ if (textRegion.huffman) {
+ var textRegionHuffmanFlags = (0, _util.readUint16)(data, position);
+ position += 2;
+ textRegion.huffmanFS = textRegionHuffmanFlags & 3;
+ textRegion.huffmanDS = textRegionHuffmanFlags >> 2 & 3;
+ textRegion.huffmanDT = textRegionHuffmanFlags >> 4 & 3;
+ textRegion.huffmanRefinementDW = textRegionHuffmanFlags >> 6 & 3;
+ textRegion.huffmanRefinementDH = textRegionHuffmanFlags >> 8 & 3;
+ textRegion.huffmanRefinementDX = textRegionHuffmanFlags >> 10 & 3;
+ textRegion.huffmanRefinementDY = textRegionHuffmanFlags >> 12 & 3;
+ textRegion.huffmanRefinementSizeSelector = !!(textRegionHuffmanFlags & 0x4000);
+ }
+
+ if (textRegion.refinement && !textRegion.refinementTemplate) {
+ at = [];
+
+ for (i = 0; i < 2; i++) {
+ at.push({
+ x: (0, _util.readInt8)(data, position),
+ y: (0, _util.readInt8)(data, position + 1)
+ });
+ position += 2;
+ }
+
+ textRegion.refinementAt = at;
+ }
+
+ textRegion.numberOfSymbolInstances = (0, _util.readUint32)(data, position);
+ position += 4;
+ args = [textRegion, header.referredTo, data, position, end];
+ break;
+
+ case 16:
+ var patternDictionary = {};
+ var patternDictionaryFlags = data[position++];
+ patternDictionary.mmr = !!(patternDictionaryFlags & 1);
+ patternDictionary.template = patternDictionaryFlags >> 1 & 3;
+ patternDictionary.patternWidth = data[position++];
+ patternDictionary.patternHeight = data[position++];
+ patternDictionary.maxPatternIndex = (0, _util.readUint32)(data, position);
+ position += 4;
+ args = [patternDictionary, header.number, data, position, end];
+ break;
+
+ case 22:
+ case 23:
+ var halftoneRegion = {};
+ halftoneRegion.info = readRegionSegmentInformation(data, position);
+ position += RegionSegmentInformationFieldLength;
+ var halftoneRegionFlags = data[position++];
+ halftoneRegion.mmr = !!(halftoneRegionFlags & 1);
+ halftoneRegion.template = halftoneRegionFlags >> 1 & 3;
+ halftoneRegion.enableSkip = !!(halftoneRegionFlags & 8);
+ halftoneRegion.combinationOperator = halftoneRegionFlags >> 4 & 7;
+ halftoneRegion.defaultPixelValue = halftoneRegionFlags >> 7 & 1;
+ halftoneRegion.gridWidth = (0, _util.readUint32)(data, position);
+ position += 4;
+ halftoneRegion.gridHeight = (0, _util.readUint32)(data, position);
+ position += 4;
+ halftoneRegion.gridOffsetX = (0, _util.readUint32)(data, position) & 0xFFFFFFFF;
+ position += 4;
+ halftoneRegion.gridOffsetY = (0, _util.readUint32)(data, position) & 0xFFFFFFFF;
+ position += 4;
+ halftoneRegion.gridVectorX = (0, _util.readUint16)(data, position);
+ position += 2;
+ halftoneRegion.gridVectorY = (0, _util.readUint16)(data, position);
+ position += 2;
+ args = [halftoneRegion, header.referredTo, data, position, end];
+ break;
+
+ case 38:
+ case 39:
+ var genericRegion = {};
+ genericRegion.info = readRegionSegmentInformation(data, position);
+ position += RegionSegmentInformationFieldLength;
+ var genericRegionSegmentFlags = data[position++];
+ genericRegion.mmr = !!(genericRegionSegmentFlags & 1);
+ genericRegion.template = genericRegionSegmentFlags >> 1 & 3;
+ genericRegion.prediction = !!(genericRegionSegmentFlags & 8);
+
+ if (!genericRegion.mmr) {
+ atLength = genericRegion.template === 0 ? 4 : 1;
+ at = [];
+
+ for (i = 0; i < atLength; i++) {
+ at.push({
+ x: (0, _util.readInt8)(data, position),
+ y: (0, _util.readInt8)(data, position + 1)
+ });
+ position += 2;
+ }
+
+ genericRegion.at = at;
+ }
+
+ args = [genericRegion, data, position, end];
+ break;
+
+ case 48:
+ var pageInfo = {
+ width: (0, _util.readUint32)(data, position),
+ height: (0, _util.readUint32)(data, position + 4),
+ resolutionX: (0, _util.readUint32)(data, position + 8),
+ resolutionY: (0, _util.readUint32)(data, position + 12)
+ };
+
+ if (pageInfo.height === 0xFFFFFFFF) {
+ delete pageInfo.height;
+ }
+
+ var pageSegmentFlags = data[position + 16];
+ (0, _util.readUint16)(data, position + 17);
+ pageInfo.lossless = !!(pageSegmentFlags & 1);
+ pageInfo.refinement = !!(pageSegmentFlags & 2);
+ pageInfo.defaultPixelValue = pageSegmentFlags >> 2 & 1;
+ pageInfo.combinationOperator = pageSegmentFlags >> 3 & 3;
+ pageInfo.requiresBuffer = !!(pageSegmentFlags & 32);
+ pageInfo.combinationOperatorOverride = !!(pageSegmentFlags & 64);
+ args = [pageInfo];
+ break;
+
+ case 49:
+ break;
+
+ case 50:
+ break;
+
+ case 51:
+ break;
+
+ case 53:
+ args = [header.number, data, position, end];
+ break;
+
+ case 62:
+ break;
+
+ default:
+ throw new Jbig2Error("segment type ".concat(header.typeName, "(").concat(header.type, ")") + ' is not implemented');
+ }
+
+ var callbackName = 'on' + header.typeName;
+
+ if (callbackName in visitor) {
+ visitor[callbackName].apply(visitor, args);
+ }
+ }
+
+ function processSegments(segments, visitor) {
+ for (var i = 0, ii = segments.length; i < ii; i++) {
+ processSegment(segments[i], visitor);
+ }
+ }
+
+ function parseJbig2Chunks(chunks) {
+ var visitor = new SimpleSegmentVisitor();
+
+ for (var i = 0, ii = chunks.length; i < ii; i++) {
+ var chunk = chunks[i];
+ var segments = readSegments({}, chunk.data, chunk.start, chunk.end);
+ processSegments(segments, visitor);
+ }
+
+ return visitor.buffer;
+ }
+
+ function parseJbig2(data) {
+ var position = 0,
+ end = data.length;
+
+ if (data[position] !== 0x97 || data[position + 1] !== 0x4A || data[position + 2] !== 0x42 || data[position + 3] !== 0x32 || data[position + 4] !== 0x0D || data[position + 5] !== 0x0A || data[position + 6] !== 0x1A || data[position + 7] !== 0x0A) {
+ throw new Jbig2Error('parseJbig2 - invalid header.');
+ }
+
+ var header = Object.create(null);
+ position += 8;
+ var flags = data[position++];
+ header.randomAccess = !(flags & 1);
+
+ if (!(flags & 2)) {
+ header.numberOfPages = (0, _util.readUint32)(data, position);
+ position += 4;
+ }
+
+ var segments = readSegments(header, data, position, end);
+ var visitor = new SimpleSegmentVisitor();
+ processSegments(segments, visitor);
+ var _visitor$currentPageI = visitor.currentPageInfo,
+ width = _visitor$currentPageI.width,
+ height = _visitor$currentPageI.height;
+ var bitPacked = visitor.buffer;
+ var imgData = new Uint8ClampedArray(width * height);
+ var q = 0,
+ k = 0;
+
+ for (var i = 0; i < height; i++) {
+ var mask = 0,
+ buffer = void 0;
+
+ for (var j = 0; j < width; j++) {
+ if (!mask) {
+ mask = 128;
+ buffer = bitPacked[k++];
+ }
+
+ imgData[q++] = buffer & mask ? 0 : 255;
+ mask >>= 1;
+ }
+ }
+
+ return {
+ imgData: imgData,
+ width: width,
+ height: height
+ };
+ }
+
+ function SimpleSegmentVisitor() {}
+
+ SimpleSegmentVisitor.prototype = {
+ onPageInformation: function SimpleSegmentVisitor_onPageInformation(info) {
+ this.currentPageInfo = info;
+ var rowSize = info.width + 7 >> 3;
+ var buffer = new Uint8ClampedArray(rowSize * info.height);
+
+ if (info.defaultPixelValue) {
+ for (var i = 0, ii = buffer.length; i < ii; i++) {
+ buffer[i] = 0xFF;
+ }
+ }
+
+ this.buffer = buffer;
+ },
+ drawBitmap: function SimpleSegmentVisitor_drawBitmap(regionInfo, bitmap) {
+ var pageInfo = this.currentPageInfo;
+ var width = regionInfo.width,
+ height = regionInfo.height;
+ var rowSize = pageInfo.width + 7 >> 3;
+ var combinationOperator = pageInfo.combinationOperatorOverride ? regionInfo.combinationOperator : pageInfo.combinationOperator;
+ var buffer = this.buffer;
+ var mask0 = 128 >> (regionInfo.x & 7);
+ var offset0 = regionInfo.y * rowSize + (regionInfo.x >> 3);
+ var i, j, mask, offset;
+
+ switch (combinationOperator) {
+ case 0:
+ for (i = 0; i < height; i++) {
+ mask = mask0;
+ offset = offset0;
+
+ for (j = 0; j < width; j++) {
+ if (bitmap[i][j]) {
+ buffer[offset] |= mask;
+ }
+
+ mask >>= 1;
+
+ if (!mask) {
+ mask = 128;
+ offset++;
+ }
+ }
+
+ offset0 += rowSize;
+ }
+
+ break;
+
+ case 2:
+ for (i = 0; i < height; i++) {
+ mask = mask0;
+ offset = offset0;
+
+ for (j = 0; j < width; j++) {
+ if (bitmap[i][j]) {
+ buffer[offset] ^= mask;
+ }
+
+ mask >>= 1;
+
+ if (!mask) {
+ mask = 128;
+ offset++;
+ }
+ }
+
+ offset0 += rowSize;
+ }
+
+ break;
+
+ default:
+ throw new Jbig2Error("operator ".concat(combinationOperator, " is not supported"));
+ }
+ },
+ onImmediateGenericRegion: function SimpleSegmentVisitor_onImmediateGenericRegion(region, data, start, end) {
+ var regionInfo = region.info;
+ var decodingContext = new DecodingContext(data, start, end);
+ var bitmap = decodeBitmap(region.mmr, regionInfo.width, regionInfo.height, region.template, region.prediction, null, region.at, decodingContext);
+ this.drawBitmap(regionInfo, bitmap);
+ },
+ onImmediateLosslessGenericRegion: function SimpleSegmentVisitor_onImmediateLosslessGenericRegion() {
+ this.onImmediateGenericRegion.apply(this, arguments);
+ },
+ onSymbolDictionary: function SimpleSegmentVisitor_onSymbolDictionary(dictionary, currentSegment, referredSegments, data, start, end) {
+ var huffmanTables, huffmanInput;
+
+ if (dictionary.huffman) {
+ huffmanTables = getSymbolDictionaryHuffmanTables(dictionary, referredSegments, this.customTables);
+ huffmanInput = new Reader(data, start, end);
+ }
+
+ var symbols = this.symbols;
+
+ if (!symbols) {
+ this.symbols = symbols = {};
+ }
+
+ var inputSymbols = [];
+
+ for (var i = 0, ii = referredSegments.length; i < ii; i++) {
+ var referredSymbols = symbols[referredSegments[i]];
+
+ if (referredSymbols) {
+ inputSymbols = inputSymbols.concat(referredSymbols);
+ }
+ }
+
+ var decodingContext = new DecodingContext(data, start, end);
+ symbols[currentSegment] = decodeSymbolDictionary(dictionary.huffman, dictionary.refinement, inputSymbols, dictionary.numberOfNewSymbols, dictionary.numberOfExportedSymbols, huffmanTables, dictionary.template, dictionary.at, dictionary.refinementTemplate, dictionary.refinementAt, decodingContext, huffmanInput);
+ },
+ onImmediateTextRegion: function SimpleSegmentVisitor_onImmediateTextRegion(region, referredSegments, data, start, end) {
+ var regionInfo = region.info;
+ var huffmanTables, huffmanInput;
+ var symbols = this.symbols;
+ var inputSymbols = [];
+
+ for (var i = 0, ii = referredSegments.length; i < ii; i++) {
+ var referredSymbols = symbols[referredSegments[i]];
+
+ if (referredSymbols) {
+ inputSymbols = inputSymbols.concat(referredSymbols);
+ }
+ }
+
+ var symbolCodeLength = (0, _util.log2)(inputSymbols.length);
+
+ if (region.huffman) {
+ huffmanInput = new Reader(data, start, end);
+ huffmanTables = getTextRegionHuffmanTables(region, referredSegments, this.customTables, inputSymbols.length, huffmanInput);
+ }
+
+ var decodingContext = new DecodingContext(data, start, end);
+ var bitmap = decodeTextRegion(region.huffman, region.refinement, regionInfo.width, regionInfo.height, region.defaultPixelValue, region.numberOfSymbolInstances, region.stripSize, inputSymbols, symbolCodeLength, region.transposed, region.dsOffset, region.referenceCorner, region.combinationOperator, huffmanTables, region.refinementTemplate, region.refinementAt, decodingContext, region.logStripSize, huffmanInput);
+ this.drawBitmap(regionInfo, bitmap);
+ },
+ onImmediateLosslessTextRegion: function SimpleSegmentVisitor_onImmediateLosslessTextRegion() {
+ this.onImmediateTextRegion.apply(this, arguments);
+ },
+ onPatternDictionary: function onPatternDictionary(dictionary, currentSegment, data, start, end) {
+ var patterns = this.patterns;
+
+ if (!patterns) {
+ this.patterns = patterns = {};
+ }
+
+ var decodingContext = new DecodingContext(data, start, end);
+ patterns[currentSegment] = decodePatternDictionary(dictionary.mmr, dictionary.patternWidth, dictionary.patternHeight, dictionary.maxPatternIndex, dictionary.template, decodingContext);
+ },
+ onImmediateHalftoneRegion: function onImmediateHalftoneRegion(region, referredSegments, data, start, end) {
+ var patterns = this.patterns[referredSegments[0]];
+ var regionInfo = region.info;
+ var decodingContext = new DecodingContext(data, start, end);
+ var bitmap = decodeHalftoneRegion(region.mmr, patterns, region.template, regionInfo.width, regionInfo.height, region.defaultPixelValue, region.enableSkip, region.combinationOperator, region.gridWidth, region.gridHeight, region.gridOffsetX, region.gridOffsetY, region.gridVectorX, region.gridVectorY, decodingContext);
+ this.drawBitmap(regionInfo, bitmap);
+ },
+ onImmediateLosslessHalftoneRegion: function onImmediateLosslessHalftoneRegion() {
+ this.onImmediateHalftoneRegion.apply(this, arguments);
+ },
+ onTables: function onTables(currentSegment, data, start, end) {
+ var customTables = this.customTables;
+
+ if (!customTables) {
+ this.customTables = customTables = {};
+ }
+
+ customTables[currentSegment] = decodeTablesSegment(data, start, end);
+ }
+ };
+
+ function HuffmanLine(lineData) {
+ if (lineData.length === 2) {
+ this.isOOB = true;
+ this.rangeLow = 0;
+ this.prefixLength = lineData[0];
+ this.rangeLength = 0;
+ this.prefixCode = lineData[1];
+ this.isLowerRange = false;
+ } else {
+ this.isOOB = false;
+ this.rangeLow = lineData[0];
+ this.prefixLength = lineData[1];
+ this.rangeLength = lineData[2];
+ this.prefixCode = lineData[3];
+ this.isLowerRange = lineData[4] === 'lower';
+ }
+ }
+
+ function HuffmanTreeNode(line) {
+ this.children = [];
+
+ if (line) {
+ this.isLeaf = true;
+ this.rangeLength = line.rangeLength;
+ this.rangeLow = line.rangeLow;
+ this.isLowerRange = line.isLowerRange;
+ this.isOOB = line.isOOB;
+ } else {
+ this.isLeaf = false;
+ }
+ }
+
+ HuffmanTreeNode.prototype = {
+ buildTree: function buildTree(line, shift) {
+ var bit = line.prefixCode >> shift & 1;
+
+ if (shift <= 0) {
+ this.children[bit] = new HuffmanTreeNode(line);
+ } else {
+ var node = this.children[bit];
+
+ if (!node) {
+ this.children[bit] = node = new HuffmanTreeNode(null);
+ }
+
+ node.buildTree(line, shift - 1);
+ }
+ },
+ decodeNode: function decodeNode(reader) {
+ if (this.isLeaf) {
+ if (this.isOOB) {
+ return null;
+ }
+
+ var htOffset = reader.readBits(this.rangeLength);
+ return this.rangeLow + (this.isLowerRange ? -htOffset : htOffset);
+ }
+
+ var node = this.children[reader.readBit()];
+
+ if (!node) {
+ throw new Jbig2Error('invalid Huffman data');
+ }
+
+ return node.decodeNode(reader);
+ }
+ };
+
+ function HuffmanTable(lines, prefixCodesDone) {
+ if (!prefixCodesDone) {
+ this.assignPrefixCodes(lines);
+ }
+
+ this.rootNode = new HuffmanTreeNode(null);
+ var i,
+ ii = lines.length,
+ line;
+
+ for (i = 0; i < ii; i++) {
+ line = lines[i];
+
+ if (line.prefixLength > 0) {
+ this.rootNode.buildTree(line, line.prefixLength - 1);
+ }
+ }
+ }
+
+ HuffmanTable.prototype = {
+ decode: function decode(reader) {
+ return this.rootNode.decodeNode(reader);
+ },
+ assignPrefixCodes: function assignPrefixCodes(lines) {
+ var linesLength = lines.length,
+ prefixLengthMax = 0,
+ i;
+
+ for (i = 0; i < linesLength; i++) {
+ prefixLengthMax = Math.max(prefixLengthMax, lines[i].prefixLength);
+ }
+
+ var histogram = new Uint32Array(prefixLengthMax + 1);
+
+ for (i = 0; i < linesLength; i++) {
+ histogram[lines[i].prefixLength]++;
+ }
+
+ var currentLength = 1,
+ firstCode = 0,
+ currentCode,
+ currentTemp,
+ line;
+ histogram[0] = 0;
+
+ while (currentLength <= prefixLengthMax) {
+ firstCode = firstCode + histogram[currentLength - 1] << 1;
+ currentCode = firstCode;
+ currentTemp = 0;
+
+ while (currentTemp < linesLength) {
+ line = lines[currentTemp];
+
+ if (line.prefixLength === currentLength) {
+ line.prefixCode = currentCode;
+ currentCode++;
+ }
+
+ currentTemp++;
+ }
+
+ currentLength++;
+ }
+ }
+ };
+
+ function decodeTablesSegment(data, start, end) {
+ var flags = data[start];
+ var lowestValue = (0, _util.readUint32)(data, start + 1) & 0xFFFFFFFF;
+ var highestValue = (0, _util.readUint32)(data, start + 5) & 0xFFFFFFFF;
+ var reader = new Reader(data, start + 9, end);
+ var prefixSizeBits = (flags >> 1 & 7) + 1;
+ var rangeSizeBits = (flags >> 4 & 7) + 1;
+ var lines = [];
+ var prefixLength,
+ rangeLength,
+ currentRangeLow = lowestValue;
+
+ do {
+ prefixLength = reader.readBits(prefixSizeBits);
+ rangeLength = reader.readBits(rangeSizeBits);
+ lines.push(new HuffmanLine([currentRangeLow, prefixLength, rangeLength, 0]));
+ currentRangeLow += 1 << rangeLength;
+ } while (currentRangeLow < highestValue);
+
+ prefixLength = reader.readBits(prefixSizeBits);
+ lines.push(new HuffmanLine([lowestValue - 1, prefixLength, 32, 0, 'lower']));
+ prefixLength = reader.readBits(prefixSizeBits);
+ lines.push(new HuffmanLine([highestValue, prefixLength, 32, 0]));
+
+ if (flags & 1) {
+ prefixLength = reader.readBits(prefixSizeBits);
+ lines.push(new HuffmanLine([prefixLength, 0]));
+ }
+
+ return new HuffmanTable(lines, false);
+ }
+
+ var standardTablesCache = {};
+
+ function getStandardTable(number) {
+ var table = standardTablesCache[number];
+
+ if (table) {
+ return table;
+ }
+
+ var lines;
+
+ switch (number) {
+ case 1:
+ lines = [[0, 1, 4, 0x0], [16, 2, 8, 0x2], [272, 3, 16, 0x6], [65808, 3, 32, 0x7]];
+ break;
+
+ case 2:
+ lines = [[0, 1, 0, 0x0], [1, 2, 0, 0x2], [2, 3, 0, 0x6], [3, 4, 3, 0xE], [11, 5, 6, 0x1E], [75, 6, 32, 0x3E], [6, 0x3F]];
+ break;
+
+ case 3:
+ lines = [[-256, 8, 8, 0xFE], [0, 1, 0, 0x0], [1, 2, 0, 0x2], [2, 3, 0, 0x6], [3, 4, 3, 0xE], [11, 5, 6, 0x1E], [-257, 8, 32, 0xFF, 'lower'], [75, 7, 32, 0x7E], [6, 0x3E]];
+ break;
+
+ case 4:
+ lines = [[1, 1, 0, 0x0], [2, 2, 0, 0x2], [3, 3, 0, 0x6], [4, 4, 3, 0xE], [12, 5, 6, 0x1E], [76, 5, 32, 0x1F]];
+ break;
+
+ case 5:
+ lines = [[-255, 7, 8, 0x7E], [1, 1, 0, 0x0], [2, 2, 0, 0x2], [3, 3, 0, 0x6], [4, 4, 3, 0xE], [12, 5, 6, 0x1E], [-256, 7, 32, 0x7F, 'lower'], [76, 6, 32, 0x3E]];
+ break;
+
+ case 6:
+ lines = [[-2048, 5, 10, 0x1C], [-1024, 4, 9, 0x8], [-512, 4, 8, 0x9], [-256, 4, 7, 0xA], [-128, 5, 6, 0x1D], [-64, 5, 5, 0x1E], [-32, 4, 5, 0xB], [0, 2, 7, 0x0], [128, 3, 7, 0x2], [256, 3, 8, 0x3], [512, 4, 9, 0xC], [1024, 4, 10, 0xD], [-2049, 6, 32, 0x3E, 'lower'], [2048, 6, 32, 0x3F]];
+ break;
+
+ case 7:
+ lines = [[-1024, 4, 9, 0x8], [-512, 3, 8, 0x0], [-256, 4, 7, 0x9], [-128, 5, 6, 0x1A], [-64, 5, 5, 0x1B], [-32, 4, 5, 0xA], [0, 4, 5, 0xB], [32, 5, 5, 0x1C], [64, 5, 6, 0x1D], [128, 4, 7, 0xC], [256, 3, 8, 0x1], [512, 3, 9, 0x2], [1024, 3, 10, 0x3], [-1025, 5, 32, 0x1E, 'lower'], [2048, 5, 32, 0x1F]];
+ break;
+
+ case 8:
+ lines = [[-15, 8, 3, 0xFC], [-7, 9, 1, 0x1FC], [-5, 8, 1, 0xFD], [-3, 9, 0, 0x1FD], [-2, 7, 0, 0x7C], [-1, 4, 0, 0xA], [0, 2, 1, 0x0], [2, 5, 0, 0x1A], [3, 6, 0, 0x3A], [4, 3, 4, 0x4], [20, 6, 1, 0x3B], [22, 4, 4, 0xB], [38, 4, 5, 0xC], [70, 5, 6, 0x1B], [134, 5, 7, 0x1C], [262, 6, 7, 0x3C], [390, 7, 8, 0x7D], [646, 6, 10, 0x3D], [-16, 9, 32, 0x1FE, 'lower'], [1670, 9, 32, 0x1FF], [2, 0x1]];
+ break;
+
+ case 9:
+ lines = [[-31, 8, 4, 0xFC], [-15, 9, 2, 0x1FC], [-11, 8, 2, 0xFD], [-7, 9, 1, 0x1FD], [-5, 7, 1, 0x7C], [-3, 4, 1, 0xA], [-1, 3, 1, 0x2], [1, 3, 1, 0x3], [3, 5, 1, 0x1A], [5, 6, 1, 0x3A], [7, 3, 5, 0x4], [39, 6, 2, 0x3B], [43, 4, 5, 0xB], [75, 4, 6, 0xC], [139, 5, 7, 0x1B], [267, 5, 8, 0x1C], [523, 6, 8, 0x3C], [779, 7, 9, 0x7D], [1291, 6, 11, 0x3D], [-32, 9, 32, 0x1FE, 'lower'], [3339, 9, 32, 0x1FF], [2, 0x0]];
+ break;
+
+ case 10:
+ lines = [[-21, 7, 4, 0x7A], [-5, 8, 0, 0xFC], [-4, 7, 0, 0x7B], [-3, 5, 0, 0x18], [-2, 2, 2, 0x0], [2, 5, 0, 0x19], [3, 6, 0, 0x36], [4, 7, 0, 0x7C], [5, 8, 0, 0xFD], [6, 2, 6, 0x1], [70, 5, 5, 0x1A], [102, 6, 5, 0x37], [134, 6, 6, 0x38], [198, 6, 7, 0x39], [326, 6, 8, 0x3A], [582, 6, 9, 0x3B], [1094, 6, 10, 0x3C], [2118, 7, 11, 0x7D], [-22, 8, 32, 0xFE, 'lower'], [4166, 8, 32, 0xFF], [2, 0x2]];
+ break;
+
+ case 11:
+ lines = [[1, 1, 0, 0x0], [2, 2, 1, 0x2], [4, 4, 0, 0xC], [5, 4, 1, 0xD], [7, 5, 1, 0x1C], [9, 5, 2, 0x1D], [13, 6, 2, 0x3C], [17, 7, 2, 0x7A], [21, 7, 3, 0x7B], [29, 7, 4, 0x7C], [45, 7, 5, 0x7D], [77, 7, 6, 0x7E], [141, 7, 32, 0x7F]];
+ break;
+
+ case 12:
+ lines = [[1, 1, 0, 0x0], [2, 2, 0, 0x2], [3, 3, 1, 0x6], [5, 5, 0, 0x1C], [6, 5, 1, 0x1D], [8, 6, 1, 0x3C], [10, 7, 0, 0x7A], [11, 7, 1, 0x7B], [13, 7, 2, 0x7C], [17, 7, 3, 0x7D], [25, 7, 4, 0x7E], [41, 8, 5, 0xFE], [73, 8, 32, 0xFF]];
+ break;
+
+ case 13:
+ lines = [[1, 1, 0, 0x0], [2, 3, 0, 0x4], [3, 4, 0, 0xC], [4, 5, 0, 0x1C], [5, 4, 1, 0xD], [7, 3, 3, 0x5], [15, 6, 1, 0x3A], [17, 6, 2, 0x3B], [21, 6, 3, 0x3C], [29, 6, 4, 0x3D], [45, 6, 5, 0x3E], [77, 7, 6, 0x7E], [141, 7, 32, 0x7F]];
+ break;
+
+ case 14:
+ lines = [[-2, 3, 0, 0x4], [-1, 3, 0, 0x5], [0, 1, 0, 0x0], [1, 3, 0, 0x6], [2, 3, 0, 0x7]];
+ break;
+
+ case 15:
+ lines = [[-24, 7, 4, 0x7C], [-8, 6, 2, 0x3C], [-4, 5, 1, 0x1C], [-2, 4, 0, 0xC], [-1, 3, 0, 0x4], [0, 1, 0, 0x0], [1, 3, 0, 0x5], [2, 4, 0, 0xD], [3, 5, 1, 0x1D], [5, 6, 2, 0x3D], [9, 7, 4, 0x7D], [-25, 7, 32, 0x7E, 'lower'], [25, 7, 32, 0x7F]];
+ break;
+
+ default:
+ throw new Jbig2Error("standard table B.".concat(number, " does not exist"));
+ }
+
+ var length = lines.length,
+ i;
+
+ for (i = 0; i < length; i++) {
+ lines[i] = new HuffmanLine(lines[i]);
+ }
+
+ table = new HuffmanTable(lines, true);
+ standardTablesCache[number] = table;
+ return table;
+ }
+
+ function Reader(data, start, end) {
+ this.data = data;
+ this.start = start;
+ this.end = end;
+ this.position = start;
+ this.shift = -1;
+ this.currentByte = 0;
+ }
+
+ Reader.prototype = {
+ readBit: function readBit() {
+ if (this.shift < 0) {
+ if (this.position >= this.end) {
+ throw new Jbig2Error('end of data while reading bit');
+ }
+
+ this.currentByte = this.data[this.position++];
+ this.shift = 7;
+ }
+
+ var bit = this.currentByte >> this.shift & 1;
+ this.shift--;
+ return bit;
+ },
+ readBits: function readBits(numBits) {
+ var result = 0,
+ i;
+
+ for (i = numBits - 1; i >= 0; i--) {
+ result |= this.readBit() << i;
+ }
+
+ return result;
+ },
+ byteAlign: function byteAlign() {
+ this.shift = -1;
+ },
+ next: function next() {
+ if (this.position >= this.end) {
+ return -1;
+ }
+
+ return this.data[this.position++];
+ }
+ };
+
+ function getCustomHuffmanTable(index, referredTo, customTables) {
+ var currentIndex = 0,
+ i,
+ ii = referredTo.length,
+ table;
+
+ for (i = 0; i < ii; i++) {
+ table = customTables[referredTo[i]];
+
+ if (table) {
+ if (index === currentIndex) {
+ return table;
+ }
+
+ currentIndex++;
+ }
+ }
+
+ throw new Jbig2Error('can\'t find custom Huffman table');
+ }
+
+ function getTextRegionHuffmanTables(textRegion, referredTo, customTables, numberOfSymbols, reader) {
+ var codes = [],
+ i,
+ codeLength;
+
+ for (i = 0; i <= 34; i++) {
+ codeLength = reader.readBits(4);
+ codes.push(new HuffmanLine([i, codeLength, 0, 0]));
+ }
+
+ var runCodesTable = new HuffmanTable(codes, false);
+ codes.length = 0;
+
+ for (i = 0; i < numberOfSymbols;) {
+ codeLength = runCodesTable.decode(reader);
+
+ if (codeLength >= 32) {
+ var repeatedLength = void 0,
+ numberOfRepeats = void 0,
+ j = void 0;
+
+ switch (codeLength) {
+ case 32:
+ if (i === 0) {
+ throw new Jbig2Error('no previous value in symbol ID table');
+ }
+
+ numberOfRepeats = reader.readBits(2) + 3;
+ repeatedLength = codes[i - 1].prefixLength;
+ break;
+
+ case 33:
+ numberOfRepeats = reader.readBits(3) + 3;
+ repeatedLength = 0;
+ break;
+
+ case 34:
+ numberOfRepeats = reader.readBits(7) + 11;
+ repeatedLength = 0;
+ break;
+
+ default:
+ throw new Jbig2Error('invalid code length in symbol ID table');
+ }
+
+ for (j = 0; j < numberOfRepeats; j++) {
+ codes.push(new HuffmanLine([i, repeatedLength, 0, 0]));
+ i++;
+ }
+ } else {
+ codes.push(new HuffmanLine([i, codeLength, 0, 0]));
+ i++;
+ }
+ }
+
+ reader.byteAlign();
+ var symbolIDTable = new HuffmanTable(codes, false);
+ var customIndex = 0,
+ tableFirstS,
+ tableDeltaS,
+ tableDeltaT;
+
+ switch (textRegion.huffmanFS) {
+ case 0:
+ case 1:
+ tableFirstS = getStandardTable(textRegion.huffmanFS + 6);
+ break;
+
+ case 3:
+ tableFirstS = getCustomHuffmanTable(customIndex, referredTo, customTables);
+ customIndex++;
+ break;
+
+ default:
+ throw new Jbig2Error('invalid Huffman FS selector');
+ }
+
+ switch (textRegion.huffmanDS) {
+ case 0:
+ case 1:
+ case 2:
+ tableDeltaS = getStandardTable(textRegion.huffmanDS + 8);
+ break;
+
+ case 3:
+ tableDeltaS = getCustomHuffmanTable(customIndex, referredTo, customTables);
+ customIndex++;
+ break;
+
+ default:
+ throw new Jbig2Error('invalid Huffman DS selector');
+ }
+
+ switch (textRegion.huffmanDT) {
+ case 0:
+ case 1:
+ case 2:
+ tableDeltaT = getStandardTable(textRegion.huffmanDT + 11);
+ break;
+
+ case 3:
+ tableDeltaT = getCustomHuffmanTable(customIndex, referredTo, customTables);
+ customIndex++;
+ break;
+
+ default:
+ throw new Jbig2Error('invalid Huffman DT selector');
+ }
+
+ if (textRegion.refinement) {
+ throw new Jbig2Error('refinement with Huffman is not supported');
+ }
+
+ return {
+ symbolIDTable: symbolIDTable,
+ tableFirstS: tableFirstS,
+ tableDeltaS: tableDeltaS,
+ tableDeltaT: tableDeltaT
+ };
+ }
+
+ function getSymbolDictionaryHuffmanTables(dictionary, referredTo, customTables) {
+ var customIndex = 0,
+ tableDeltaHeight,
+ tableDeltaWidth;
+
+ switch (dictionary.huffmanDHSelector) {
+ case 0:
+ case 1:
+ tableDeltaHeight = getStandardTable(dictionary.huffmanDHSelector + 4);
+ break;
+
+ case 3:
+ tableDeltaHeight = getCustomHuffmanTable(customIndex, referredTo, customTables);
+ customIndex++;
+ break;
+
+ default:
+ throw new Jbig2Error('invalid Huffman DH selector');
+ }
+
+ switch (dictionary.huffmanDWSelector) {
+ case 0:
+ case 1:
+ tableDeltaWidth = getStandardTable(dictionary.huffmanDWSelector + 2);
+ break;
+
+ case 3:
+ tableDeltaWidth = getCustomHuffmanTable(customIndex, referredTo, customTables);
+ customIndex++;
+ break;
+
+ default:
+ throw new Jbig2Error('invalid Huffman DW selector');
+ }
+
+ var tableBitmapSize, tableAggregateInstances;
+
+ if (dictionary.bitmapSizeSelector) {
+ tableBitmapSize = getCustomHuffmanTable(customIndex, referredTo, customTables);
+ customIndex++;
+ } else {
+ tableBitmapSize = getStandardTable(1);
+ }
+
+ if (dictionary.aggregationInstancesSelector) {
+ tableAggregateInstances = getCustomHuffmanTable(customIndex, referredTo, customTables);
+ } else {
+ tableAggregateInstances = getStandardTable(1);
+ }
+
+ return {
+ tableDeltaHeight: tableDeltaHeight,
+ tableDeltaWidth: tableDeltaWidth,
+ tableBitmapSize: tableBitmapSize,
+ tableAggregateInstances: tableAggregateInstances
+ };
+ }
+
+ function readUncompressedBitmap(reader, width, height) {
+ var bitmap = [],
+ x,
+ y,
+ row;
+
+ for (y = 0; y < height; y++) {
+ row = new Uint8Array(width);
+ bitmap.push(row);
+
+ for (x = 0; x < width; x++) {
+ row[x] = reader.readBit();
+ }
+
+ reader.byteAlign();
+ }
+
+ return bitmap;
+ }
+
+ function decodeMMRBitmap(input, width, height, endOfBlock) {
+ var params = {
+ K: -1,
+ Columns: width,
+ Rows: height,
+ BlackIs1: true,
+ EndOfBlock: endOfBlock
+ };
+ var decoder = new _ccitt.CCITTFaxDecoder(input, params);
+ var bitmap = [],
+ x,
+ y,
+ row,
+ currentByte,
+ shift,
+ eof = false;
+
+ for (y = 0; y < height; y++) {
+ row = new Uint8Array(width);
+ bitmap.push(row);
+ shift = -1;
+
+ for (x = 0; x < width; x++) {
+ if (shift < 0) {
+ currentByte = decoder.readNextChar();
+
+ if (currentByte === -1) {
+ currentByte = 0;
+ eof = true;
+ }
+
+ shift = 7;
+ }
+
+ row[x] = currentByte >> shift & 1;
+ shift--;
+ }
+ }
+
+ if (endOfBlock && !eof) {
+ var lookForEOFLimit = 5;
+
+ for (var i = 0; i < lookForEOFLimit; i++) {
+ if (decoder.readNextChar() === -1) {
+ break;
+ }
+ }
+ }
+
+ return bitmap;
+ }
+
+ function Jbig2Image() {}
+
+ Jbig2Image.prototype = {
+ parseChunks: function parseChunks(chunks) {
+ return parseJbig2Chunks(chunks);
+ },
+ parse: function parse(data) {
+ var _parseJbig = parseJbig2(data),
+ imgData = _parseJbig.imgData,
+ width = _parseJbig.width,
+ height = _parseJbig.height;
+
+ this.width = width;
+ this.height = height;
+ return imgData;
+ }
+ };
+ return Jbig2Image;
+}();
+
+exports.Jbig2Image = Jbig2Image;
+
+/***/ }),
+/* 163 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.ArithmeticDecoder = void 0;
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var QeTable = [{
+ qe: 0x5601,
+ nmps: 1,
+ nlps: 1,
+ switchFlag: 1
+}, {
+ qe: 0x3401,
+ nmps: 2,
+ nlps: 6,
+ switchFlag: 0
+}, {
+ qe: 0x1801,
+ nmps: 3,
+ nlps: 9,
+ switchFlag: 0
+}, {
+ qe: 0x0AC1,
+ nmps: 4,
+ nlps: 12,
+ switchFlag: 0
+}, {
+ qe: 0x0521,
+ nmps: 5,
+ nlps: 29,
+ switchFlag: 0
+}, {
+ qe: 0x0221,
+ nmps: 38,
+ nlps: 33,
+ switchFlag: 0
+}, {
+ qe: 0x5601,
+ nmps: 7,
+ nlps: 6,
+ switchFlag: 1
+}, {
+ qe: 0x5401,
+ nmps: 8,
+ nlps: 14,
+ switchFlag: 0
+}, {
+ qe: 0x4801,
+ nmps: 9,
+ nlps: 14,
+ switchFlag: 0
+}, {
+ qe: 0x3801,
+ nmps: 10,
+ nlps: 14,
+ switchFlag: 0
+}, {
+ qe: 0x3001,
+ nmps: 11,
+ nlps: 17,
+ switchFlag: 0
+}, {
+ qe: 0x2401,
+ nmps: 12,
+ nlps: 18,
+ switchFlag: 0
+}, {
+ qe: 0x1C01,
+ nmps: 13,
+ nlps: 20,
+ switchFlag: 0
+}, {
+ qe: 0x1601,
+ nmps: 29,
+ nlps: 21,
+ switchFlag: 0
+}, {
+ qe: 0x5601,
+ nmps: 15,
+ nlps: 14,
+ switchFlag: 1
+}, {
+ qe: 0x5401,
+ nmps: 16,
+ nlps: 14,
+ switchFlag: 0
+}, {
+ qe: 0x5101,
+ nmps: 17,
+ nlps: 15,
+ switchFlag: 0
+}, {
+ qe: 0x4801,
+ nmps: 18,
+ nlps: 16,
+ switchFlag: 0
+}, {
+ qe: 0x3801,
+ nmps: 19,
+ nlps: 17,
+ switchFlag: 0
+}, {
+ qe: 0x3401,
+ nmps: 20,
+ nlps: 18,
+ switchFlag: 0
+}, {
+ qe: 0x3001,
+ nmps: 21,
+ nlps: 19,
+ switchFlag: 0
+}, {
+ qe: 0x2801,
+ nmps: 22,
+ nlps: 19,
+ switchFlag: 0
+}, {
+ qe: 0x2401,
+ nmps: 23,
+ nlps: 20,
+ switchFlag: 0
+}, {
+ qe: 0x2201,
+ nmps: 24,
+ nlps: 21,
+ switchFlag: 0
+}, {
+ qe: 0x1C01,
+ nmps: 25,
+ nlps: 22,
+ switchFlag: 0
+}, {
+ qe: 0x1801,
+ nmps: 26,
+ nlps: 23,
+ switchFlag: 0
+}, {
+ qe: 0x1601,
+ nmps: 27,
+ nlps: 24,
+ switchFlag: 0
+}, {
+ qe: 0x1401,
+ nmps: 28,
+ nlps: 25,
+ switchFlag: 0
+}, {
+ qe: 0x1201,
+ nmps: 29,
+ nlps: 26,
+ switchFlag: 0
+}, {
+ qe: 0x1101,
+ nmps: 30,
+ nlps: 27,
+ switchFlag: 0
+}, {
+ qe: 0x0AC1,
+ nmps: 31,
+ nlps: 28,
+ switchFlag: 0
+}, {
+ qe: 0x09C1,
+ nmps: 32,
+ nlps: 29,
+ switchFlag: 0
+}, {
+ qe: 0x08A1,
+ nmps: 33,
+ nlps: 30,
+ switchFlag: 0
+}, {
+ qe: 0x0521,
+ nmps: 34,
+ nlps: 31,
+ switchFlag: 0
+}, {
+ qe: 0x0441,
+ nmps: 35,
+ nlps: 32,
+ switchFlag: 0
+}, {
+ qe: 0x02A1,
+ nmps: 36,
+ nlps: 33,
+ switchFlag: 0
+}, {
+ qe: 0x0221,
+ nmps: 37,
+ nlps: 34,
+ switchFlag: 0
+}, {
+ qe: 0x0141,
+ nmps: 38,
+ nlps: 35,
+ switchFlag: 0
+}, {
+ qe: 0x0111,
+ nmps: 39,
+ nlps: 36,
+ switchFlag: 0
+}, {
+ qe: 0x0085,
+ nmps: 40,
+ nlps: 37,
+ switchFlag: 0
+}, {
+ qe: 0x0049,
+ nmps: 41,
+ nlps: 38,
+ switchFlag: 0
+}, {
+ qe: 0x0025,
+ nmps: 42,
+ nlps: 39,
+ switchFlag: 0
+}, {
+ qe: 0x0015,
+ nmps: 43,
+ nlps: 40,
+ switchFlag: 0
+}, {
+ qe: 0x0009,
+ nmps: 44,
+ nlps: 41,
+ switchFlag: 0
+}, {
+ qe: 0x0005,
+ nmps: 45,
+ nlps: 42,
+ switchFlag: 0
+}, {
+ qe: 0x0001,
+ nmps: 45,
+ nlps: 43,
+ switchFlag: 0
+}, {
+ qe: 0x5601,
+ nmps: 46,
+ nlps: 46,
+ switchFlag: 0
+}];
+
+var ArithmeticDecoder =
+/*#__PURE__*/
+function () {
+ function ArithmeticDecoder(data, start, end) {
+ _classCallCheck(this, ArithmeticDecoder);
+
+ this.data = data;
+ this.bp = start;
+ this.dataEnd = end;
+ this.chigh = data[start];
+ this.clow = 0;
+ this.byteIn();
+ this.chigh = this.chigh << 7 & 0xFFFF | this.clow >> 9 & 0x7F;
+ this.clow = this.clow << 7 & 0xFFFF;
+ this.ct -= 7;
+ this.a = 0x8000;
+ }
+
+ _createClass(ArithmeticDecoder, [{
+ key: "byteIn",
+ value: function byteIn() {
+ var data = this.data;
+ var bp = this.bp;
+
+ if (data[bp] === 0xFF) {
+ if (data[bp + 1] > 0x8F) {
+ this.clow += 0xFF00;
+ this.ct = 8;
+ } else {
+ bp++;
+ this.clow += data[bp] << 9;
+ this.ct = 7;
+ this.bp = bp;
+ }
+ } else {
+ bp++;
+ this.clow += bp < this.dataEnd ? data[bp] << 8 : 0xFF00;
+ this.ct = 8;
+ this.bp = bp;
+ }
+
+ if (this.clow > 0xFFFF) {
+ this.chigh += this.clow >> 16;
+ this.clow &= 0xFFFF;
+ }
+ }
+ }, {
+ key: "readBit",
+ value: function readBit(contexts, pos) {
+ var cx_index = contexts[pos] >> 1,
+ cx_mps = contexts[pos] & 1;
+ var qeTableIcx = QeTable[cx_index];
+ var qeIcx = qeTableIcx.qe;
+ var d;
+ var a = this.a - qeIcx;
+
+ if (this.chigh < qeIcx) {
+ if (a < qeIcx) {
+ a = qeIcx;
+ d = cx_mps;
+ cx_index = qeTableIcx.nmps;
+ } else {
+ a = qeIcx;
+ d = 1 ^ cx_mps;
+
+ if (qeTableIcx.switchFlag === 1) {
+ cx_mps = d;
+ }
+
+ cx_index = qeTableIcx.nlps;
+ }
+ } else {
+ this.chigh -= qeIcx;
+
+ if ((a & 0x8000) !== 0) {
+ this.a = a;
+ return cx_mps;
+ }
+
+ if (a < qeIcx) {
+ d = 1 ^ cx_mps;
+
+ if (qeTableIcx.switchFlag === 1) {
+ cx_mps = d;
+ }
+
+ cx_index = qeTableIcx.nlps;
+ } else {
+ d = cx_mps;
+ cx_index = qeTableIcx.nmps;
+ }
+ }
+
+ do {
+ if (this.ct === 0) {
+ this.byteIn();
+ }
+
+ a <<= 1;
+ this.chigh = this.chigh << 1 & 0xFFFF | this.clow >> 15 & 1;
+ this.clow = this.clow << 1 & 0xFFFF;
+ this.ct--;
+ } while ((a & 0x8000) === 0);
+
+ this.a = a;
+ contexts[pos] = cx_index << 1 | cx_mps;
+ return d;
+ }
+ }]);
+
+ return ArithmeticDecoder;
+}();
+
+exports.ArithmeticDecoder = ArithmeticDecoder;
+
+/***/ }),
+/* 164 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.JpegStream = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _stream = __w_pdfjs_require__(158);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _jpg = __w_pdfjs_require__(165);
+
+var JpegStream = function JpegStreamClosure() {
+ function JpegStream(stream, maybeLength, dict, params) {
+ var ch;
+
+ while ((ch = stream.getByte()) !== -1) {
+ if (ch === 0xFF) {
+ stream.skip(-1);
+ break;
+ }
+ }
+
+ this.stream = stream;
+ this.maybeLength = maybeLength;
+ this.dict = dict;
+ this.params = params;
+
+ _stream.DecodeStream.call(this, maybeLength);
+ }
+
+ JpegStream.prototype = Object.create(_stream.DecodeStream.prototype);
+ Object.defineProperty(JpegStream.prototype, 'bytes', {
+ get: function JpegStream_bytes() {
+ return (0, _util.shadow)(this, 'bytes', this.stream.getBytes(this.maybeLength));
+ },
+ configurable: true
+ });
+
+ JpegStream.prototype.ensureBuffer = function (requested) {};
+
+ JpegStream.prototype.readBlock = function () {
+ if (this.eof) {
+ return;
+ }
+
+ var jpegOptions = {
+ decodeTransform: undefined,
+ colorTransform: undefined
+ };
+ var decodeArr = this.dict.getArray('Decode', 'D');
+
+ if (this.forceRGB && Array.isArray(decodeArr)) {
+ var bitsPerComponent = this.dict.get('BitsPerComponent') || 8;
+ var decodeArrLength = decodeArr.length;
+ var transform = new Int32Array(decodeArrLength);
+ var transformNeeded = false;
+ var maxValue = (1 << bitsPerComponent) - 1;
+
+ for (var i = 0; i < decodeArrLength; i += 2) {
+ transform[i] = (decodeArr[i + 1] - decodeArr[i]) * 256 | 0;
+ transform[i + 1] = decodeArr[i] * maxValue | 0;
+
+ if (transform[i] !== 256 || transform[i + 1] !== 0) {
+ transformNeeded = true;
+ }
+ }
+
+ if (transformNeeded) {
+ jpegOptions.decodeTransform = transform;
+ }
+ }
+
+ if ((0, _primitives.isDict)(this.params)) {
+ var colorTransform = this.params.get('ColorTransform');
+
+ if (Number.isInteger(colorTransform)) {
+ jpegOptions.colorTransform = colorTransform;
+ }
+ }
+
+ var jpegImage = new _jpg.JpegImage(jpegOptions);
+ jpegImage.parse(this.bytes);
+ var data = jpegImage.getData({
+ width: this.drawWidth,
+ height: this.drawHeight,
+ forceRGB: this.forceRGB,
+ isSourcePDF: true
+ });
+ this.buffer = data;
+ this.bufferLength = data.length;
+ this.eof = true;
+ };
+
+ JpegStream.prototype.getIR = function () {
+ var forceDataSchema = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
+ return (0, _util.createObjectURL)(this.bytes, 'image/jpeg', forceDataSchema);
+ };
+
+ return JpegStream;
+}();
+
+exports.JpegStream = JpegStream;
+
+/***/ }),
+/* 165 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.JpegImage = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+var JpegError = function JpegErrorClosure() {
+ function JpegError(msg) {
+ this.message = 'JPEG error: ' + msg;
+ }
+
+ JpegError.prototype = new Error();
+ JpegError.prototype.name = 'JpegError';
+ JpegError.constructor = JpegError;
+ return JpegError;
+}();
+
+var DNLMarkerError = function DNLMarkerErrorClosure() {
+ function DNLMarkerError(message, scanLines) {
+ this.message = message;
+ this.scanLines = scanLines;
+ }
+
+ DNLMarkerError.prototype = new Error();
+ DNLMarkerError.prototype.name = 'DNLMarkerError';
+ DNLMarkerError.constructor = DNLMarkerError;
+ return DNLMarkerError;
+}();
+
+var EOIMarkerError = function EOIMarkerErrorClosure() {
+ function EOIMarkerError(message) {
+ this.message = message;
+ }
+
+ EOIMarkerError.prototype = new Error();
+ EOIMarkerError.prototype.name = 'EOIMarkerError';
+ EOIMarkerError.constructor = EOIMarkerError;
+ return EOIMarkerError;
+}();
+
+var JpegImage = function JpegImageClosure() {
+ var dctZigZag = new Uint8Array([0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5, 12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28, 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51, 58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63]);
+ var dctCos1 = 4017;
+ var dctSin1 = 799;
+ var dctCos3 = 3406;
+ var dctSin3 = 2276;
+ var dctCos6 = 1567;
+ var dctSin6 = 3784;
+ var dctSqrt2 = 5793;
+ var dctSqrt1d2 = 2896;
+
+ function JpegImage() {
+ var _ref = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
+ _ref$decodeTransform = _ref.decodeTransform,
+ decodeTransform = _ref$decodeTransform === void 0 ? null : _ref$decodeTransform,
+ _ref$colorTransform = _ref.colorTransform,
+ colorTransform = _ref$colorTransform === void 0 ? -1 : _ref$colorTransform;
+
+ this._decodeTransform = decodeTransform;
+ this._colorTransform = colorTransform;
+ }
+
+ function buildHuffmanTable(codeLengths, values) {
+ var k = 0,
+ code = [],
+ i,
+ j,
+ length = 16;
+
+ while (length > 0 && !codeLengths[length - 1]) {
+ length--;
+ }
+
+ code.push({
+ children: [],
+ index: 0
+ });
+ var p = code[0],
+ q;
+
+ for (i = 0; i < length; i++) {
+ for (j = 0; j < codeLengths[i]; j++) {
+ p = code.pop();
+ p.children[p.index] = values[k];
+
+ while (p.index > 0) {
+ p = code.pop();
+ }
+
+ p.index++;
+ code.push(p);
+
+ while (code.length <= i) {
+ code.push(q = {
+ children: [],
+ index: 0
+ });
+ p.children[p.index] = q.children;
+ p = q;
+ }
+
+ k++;
+ }
+
+ if (i + 1 < length) {
+ code.push(q = {
+ children: [],
+ index: 0
+ });
+ p.children[p.index] = q.children;
+ p = q;
+ }
+ }
+
+ return code[0].children;
+ }
+
+ function getBlockBufferOffset(component, row, col) {
+ return 64 * ((component.blocksPerLine + 1) * row + col);
+ }
+
+ function decodeScan(data, offset, frame, components, resetInterval, spectralStart, spectralEnd, successivePrev, successive) {
+ var parseDNLMarker = arguments.length > 9 && arguments[9] !== undefined ? arguments[9] : false;
+ var mcusPerLine = frame.mcusPerLine;
+ var progressive = frame.progressive;
+ var startOffset = offset,
+ bitsData = 0,
+ bitsCount = 0;
+
+ function readBit() {
+ if (bitsCount > 0) {
+ bitsCount--;
+ return bitsData >> bitsCount & 1;
+ }
+
+ bitsData = data[offset++];
+
+ if (bitsData === 0xFF) {
+ var nextByte = data[offset++];
+
+ if (nextByte) {
+ if (nextByte === 0xDC && parseDNLMarker) {
+ offset += 2;
+ var scanLines = data[offset++] << 8 | data[offset++];
+
+ if (scanLines > 0 && scanLines !== frame.scanLines) {
+ throw new DNLMarkerError('Found DNL marker (0xFFDC) while parsing scan data', scanLines);
+ }
+ } else if (nextByte === 0xD9) {
+ throw new EOIMarkerError('Found EOI marker (0xFFD9) while parsing scan data');
+ }
+
+ throw new JpegError("unexpected marker ".concat((bitsData << 8 | nextByte).toString(16)));
+ }
+ }
+
+ bitsCount = 7;
+ return bitsData >>> 7;
+ }
+
+ function decodeHuffman(tree) {
+ var node = tree;
+
+ while (true) {
+ node = node[readBit()];
+
+ if (typeof node === 'number') {
+ return node;
+ }
+
+ if (_typeof(node) !== 'object') {
+ throw new JpegError('invalid huffman sequence');
+ }
+ }
+ }
+
+ function receive(length) {
+ var n = 0;
+
+ while (length > 0) {
+ n = n << 1 | readBit();
+ length--;
+ }
+
+ return n;
+ }
+
+ function receiveAndExtend(length) {
+ if (length === 1) {
+ return readBit() === 1 ? 1 : -1;
+ }
+
+ var n = receive(length);
+
+ if (n >= 1 << length - 1) {
+ return n;
+ }
+
+ return n + (-1 << length) + 1;
+ }
+
+ function decodeBaseline(component, offset) {
+ var t = decodeHuffman(component.huffmanTableDC);
+ var diff = t === 0 ? 0 : receiveAndExtend(t);
+ component.blockData[offset] = component.pred += diff;
+ var k = 1;
+
+ while (k < 64) {
+ var rs = decodeHuffman(component.huffmanTableAC);
+ var s = rs & 15,
+ r = rs >> 4;
+
+ if (s === 0) {
+ if (r < 15) {
+ break;
+ }
+
+ k += 16;
+ continue;
+ }
+
+ k += r;
+ var z = dctZigZag[k];
+ component.blockData[offset + z] = receiveAndExtend(s);
+ k++;
+ }
+ }
+
+ function decodeDCFirst(component, offset) {
+ var t = decodeHuffman(component.huffmanTableDC);
+ var diff = t === 0 ? 0 : receiveAndExtend(t) << successive;
+ component.blockData[offset] = component.pred += diff;
+ }
+
+ function decodeDCSuccessive(component, offset) {
+ component.blockData[offset] |= readBit() << successive;
+ }
+
+ var eobrun = 0;
+
+ function decodeACFirst(component, offset) {
+ if (eobrun > 0) {
+ eobrun--;
+ return;
+ }
+
+ var k = spectralStart,
+ e = spectralEnd;
+
+ while (k <= e) {
+ var rs = decodeHuffman(component.huffmanTableAC);
+ var s = rs & 15,
+ r = rs >> 4;
+
+ if (s === 0) {
+ if (r < 15) {
+ eobrun = receive(r) + (1 << r) - 1;
+ break;
+ }
+
+ k += 16;
+ continue;
+ }
+
+ k += r;
+ var z = dctZigZag[k];
+ component.blockData[offset + z] = receiveAndExtend(s) * (1 << successive);
+ k++;
+ }
+ }
+
+ var successiveACState = 0,
+ successiveACNextValue;
+
+ function decodeACSuccessive(component, offset) {
+ var k = spectralStart;
+ var e = spectralEnd;
+ var r = 0;
+ var s;
+ var rs;
+
+ while (k <= e) {
+ var offsetZ = offset + dctZigZag[k];
+ var sign = component.blockData[offsetZ] < 0 ? -1 : 1;
+
+ switch (successiveACState) {
+ case 0:
+ rs = decodeHuffman(component.huffmanTableAC);
+ s = rs & 15;
+ r = rs >> 4;
+
+ if (s === 0) {
+ if (r < 15) {
+ eobrun = receive(r) + (1 << r);
+ successiveACState = 4;
+ } else {
+ r = 16;
+ successiveACState = 1;
+ }
+ } else {
+ if (s !== 1) {
+ throw new JpegError('invalid ACn encoding');
+ }
+
+ successiveACNextValue = receiveAndExtend(s);
+ successiveACState = r ? 2 : 3;
+ }
+
+ continue;
+
+ case 1:
+ case 2:
+ if (component.blockData[offsetZ]) {
+ component.blockData[offsetZ] += sign * (readBit() << successive);
+ } else {
+ r--;
+
+ if (r === 0) {
+ successiveACState = successiveACState === 2 ? 3 : 0;
+ }
+ }
+
+ break;
+
+ case 3:
+ if (component.blockData[offsetZ]) {
+ component.blockData[offsetZ] += sign * (readBit() << successive);
+ } else {
+ component.blockData[offsetZ] = successiveACNextValue << successive;
+ successiveACState = 0;
+ }
+
+ break;
+
+ case 4:
+ if (component.blockData[offsetZ]) {
+ component.blockData[offsetZ] += sign * (readBit() << successive);
+ }
+
+ break;
+ }
+
+ k++;
+ }
+
+ if (successiveACState === 4) {
+ eobrun--;
+
+ if (eobrun === 0) {
+ successiveACState = 0;
+ }
+ }
+ }
+
+ function decodeMcu(component, decode, mcu, row, col) {
+ var mcuRow = mcu / mcusPerLine | 0;
+ var mcuCol = mcu % mcusPerLine;
+ var blockRow = mcuRow * component.v + row;
+ var blockCol = mcuCol * component.h + col;
+ var offset = getBlockBufferOffset(component, blockRow, blockCol);
+ decode(component, offset);
+ }
+
+ function decodeBlock(component, decode, mcu) {
+ var blockRow = mcu / component.blocksPerLine | 0;
+ var blockCol = mcu % component.blocksPerLine;
+ var offset = getBlockBufferOffset(component, blockRow, blockCol);
+ decode(component, offset);
+ }
+
+ var componentsLength = components.length;
+ var component, i, j, k, n;
+ var decodeFn;
+
+ if (progressive) {
+ if (spectralStart === 0) {
+ decodeFn = successivePrev === 0 ? decodeDCFirst : decodeDCSuccessive;
+ } else {
+ decodeFn = successivePrev === 0 ? decodeACFirst : decodeACSuccessive;
+ }
+ } else {
+ decodeFn = decodeBaseline;
+ }
+
+ var mcu = 0,
+ fileMarker;
+ var mcuExpected;
+
+ if (componentsLength === 1) {
+ mcuExpected = components[0].blocksPerLine * components[0].blocksPerColumn;
+ } else {
+ mcuExpected = mcusPerLine * frame.mcusPerColumn;
+ }
+
+ var h, v;
+
+ while (mcu < mcuExpected) {
+ var mcuToRead = resetInterval ? Math.min(mcuExpected - mcu, resetInterval) : mcuExpected;
+
+ for (i = 0; i < componentsLength; i++) {
+ components[i].pred = 0;
+ }
+
+ eobrun = 0;
+
+ if (componentsLength === 1) {
+ component = components[0];
+
+ for (n = 0; n < mcuToRead; n++) {
+ decodeBlock(component, decodeFn, mcu);
+ mcu++;
+ }
+ } else {
+ for (n = 0; n < mcuToRead; n++) {
+ for (i = 0; i < componentsLength; i++) {
+ component = components[i];
+ h = component.h;
+ v = component.v;
+
+ for (j = 0; j < v; j++) {
+ for (k = 0; k < h; k++) {
+ decodeMcu(component, decodeFn, mcu, j, k);
+ }
+ }
+ }
+
+ mcu++;
+ }
+ }
+
+ bitsCount = 0;
+ fileMarker = findNextFileMarker(data, offset);
+
+ if (fileMarker && fileMarker.invalid) {
+ (0, _util.warn)('decodeScan - unexpected MCU data, current marker is: ' + fileMarker.invalid);
+ offset = fileMarker.offset;
+ }
+
+ var marker = fileMarker && fileMarker.marker;
+
+ if (!marker || marker <= 0xFF00) {
+ throw new JpegError('marker was not found');
+ }
+
+ if (marker >= 0xFFD0 && marker <= 0xFFD7) {
+ offset += 2;
+ } else {
+ break;
+ }
+ }
+
+ fileMarker = findNextFileMarker(data, offset);
+
+ if (fileMarker && fileMarker.invalid) {
+ (0, _util.warn)('decodeScan - unexpected Scan data, current marker is: ' + fileMarker.invalid);
+ offset = fileMarker.offset;
+ }
+
+ return offset - startOffset;
+ }
+
+ function quantizeAndInverse(component, blockBufferOffset, p) {
+ var qt = component.quantizationTable,
+ blockData = component.blockData;
+ var v0, v1, v2, v3, v4, v5, v6, v7;
+ var p0, p1, p2, p3, p4, p5, p6, p7;
+ var t;
+
+ if (!qt) {
+ throw new JpegError('missing required Quantization Table.');
+ }
+
+ for (var row = 0; row < 64; row += 8) {
+ p0 = blockData[blockBufferOffset + row];
+ p1 = blockData[blockBufferOffset + row + 1];
+ p2 = blockData[blockBufferOffset + row + 2];
+ p3 = blockData[blockBufferOffset + row + 3];
+ p4 = blockData[blockBufferOffset + row + 4];
+ p5 = blockData[blockBufferOffset + row + 5];
+ p6 = blockData[blockBufferOffset + row + 6];
+ p7 = blockData[blockBufferOffset + row + 7];
+ p0 *= qt[row];
+
+ if ((p1 | p2 | p3 | p4 | p5 | p6 | p7) === 0) {
+ t = dctSqrt2 * p0 + 512 >> 10;
+ p[row] = t;
+ p[row + 1] = t;
+ p[row + 2] = t;
+ p[row + 3] = t;
+ p[row + 4] = t;
+ p[row + 5] = t;
+ p[row + 6] = t;
+ p[row + 7] = t;
+ continue;
+ }
+
+ p1 *= qt[row + 1];
+ p2 *= qt[row + 2];
+ p3 *= qt[row + 3];
+ p4 *= qt[row + 4];
+ p5 *= qt[row + 5];
+ p6 *= qt[row + 6];
+ p7 *= qt[row + 7];
+ v0 = dctSqrt2 * p0 + 128 >> 8;
+ v1 = dctSqrt2 * p4 + 128 >> 8;
+ v2 = p2;
+ v3 = p6;
+ v4 = dctSqrt1d2 * (p1 - p7) + 128 >> 8;
+ v7 = dctSqrt1d2 * (p1 + p7) + 128 >> 8;
+ v5 = p3 << 4;
+ v6 = p5 << 4;
+ v0 = v0 + v1 + 1 >> 1;
+ v1 = v0 - v1;
+ t = v2 * dctSin6 + v3 * dctCos6 + 128 >> 8;
+ v2 = v2 * dctCos6 - v3 * dctSin6 + 128 >> 8;
+ v3 = t;
+ v4 = v4 + v6 + 1 >> 1;
+ v6 = v4 - v6;
+ v7 = v7 + v5 + 1 >> 1;
+ v5 = v7 - v5;
+ v0 = v0 + v3 + 1 >> 1;
+ v3 = v0 - v3;
+ v1 = v1 + v2 + 1 >> 1;
+ v2 = v1 - v2;
+ t = v4 * dctSin3 + v7 * dctCos3 + 2048 >> 12;
+ v4 = v4 * dctCos3 - v7 * dctSin3 + 2048 >> 12;
+ v7 = t;
+ t = v5 * dctSin1 + v6 * dctCos1 + 2048 >> 12;
+ v5 = v5 * dctCos1 - v6 * dctSin1 + 2048 >> 12;
+ v6 = t;
+ p[row] = v0 + v7;
+ p[row + 7] = v0 - v7;
+ p[row + 1] = v1 + v6;
+ p[row + 6] = v1 - v6;
+ p[row + 2] = v2 + v5;
+ p[row + 5] = v2 - v5;
+ p[row + 3] = v3 + v4;
+ p[row + 4] = v3 - v4;
+ }
+
+ for (var col = 0; col < 8; ++col) {
+ p0 = p[col];
+ p1 = p[col + 8];
+ p2 = p[col + 16];
+ p3 = p[col + 24];
+ p4 = p[col + 32];
+ p5 = p[col + 40];
+ p6 = p[col + 48];
+ p7 = p[col + 56];
+
+ if ((p1 | p2 | p3 | p4 | p5 | p6 | p7) === 0) {
+ t = dctSqrt2 * p0 + 8192 >> 14;
+ t = t < -2040 ? 0 : t >= 2024 ? 255 : t + 2056 >> 4;
+ blockData[blockBufferOffset + col] = t;
+ blockData[blockBufferOffset + col + 8] = t;
+ blockData[blockBufferOffset + col + 16] = t;
+ blockData[blockBufferOffset + col + 24] = t;
+ blockData[blockBufferOffset + col + 32] = t;
+ blockData[blockBufferOffset + col + 40] = t;
+ blockData[blockBufferOffset + col + 48] = t;
+ blockData[blockBufferOffset + col + 56] = t;
+ continue;
+ }
+
+ v0 = dctSqrt2 * p0 + 2048 >> 12;
+ v1 = dctSqrt2 * p4 + 2048 >> 12;
+ v2 = p2;
+ v3 = p6;
+ v4 = dctSqrt1d2 * (p1 - p7) + 2048 >> 12;
+ v7 = dctSqrt1d2 * (p1 + p7) + 2048 >> 12;
+ v5 = p3;
+ v6 = p5;
+ v0 = (v0 + v1 + 1 >> 1) + 4112;
+ v1 = v0 - v1;
+ t = v2 * dctSin6 + v3 * dctCos6 + 2048 >> 12;
+ v2 = v2 * dctCos6 - v3 * dctSin6 + 2048 >> 12;
+ v3 = t;
+ v4 = v4 + v6 + 1 >> 1;
+ v6 = v4 - v6;
+ v7 = v7 + v5 + 1 >> 1;
+ v5 = v7 - v5;
+ v0 = v0 + v3 + 1 >> 1;
+ v3 = v0 - v3;
+ v1 = v1 + v2 + 1 >> 1;
+ v2 = v1 - v2;
+ t = v4 * dctSin3 + v7 * dctCos3 + 2048 >> 12;
+ v4 = v4 * dctCos3 - v7 * dctSin3 + 2048 >> 12;
+ v7 = t;
+ t = v5 * dctSin1 + v6 * dctCos1 + 2048 >> 12;
+ v5 = v5 * dctCos1 - v6 * dctSin1 + 2048 >> 12;
+ v6 = t;
+ p0 = v0 + v7;
+ p7 = v0 - v7;
+ p1 = v1 + v6;
+ p6 = v1 - v6;
+ p2 = v2 + v5;
+ p5 = v2 - v5;
+ p3 = v3 + v4;
+ p4 = v3 - v4;
+ p0 = p0 < 16 ? 0 : p0 >= 4080 ? 255 : p0 >> 4;
+ p1 = p1 < 16 ? 0 : p1 >= 4080 ? 255 : p1 >> 4;
+ p2 = p2 < 16 ? 0 : p2 >= 4080 ? 255 : p2 >> 4;
+ p3 = p3 < 16 ? 0 : p3 >= 4080 ? 255 : p3 >> 4;
+ p4 = p4 < 16 ? 0 : p4 >= 4080 ? 255 : p4 >> 4;
+ p5 = p5 < 16 ? 0 : p5 >= 4080 ? 255 : p5 >> 4;
+ p6 = p6 < 16 ? 0 : p6 >= 4080 ? 255 : p6 >> 4;
+ p7 = p7 < 16 ? 0 : p7 >= 4080 ? 255 : p7 >> 4;
+ blockData[blockBufferOffset + col] = p0;
+ blockData[blockBufferOffset + col + 8] = p1;
+ blockData[blockBufferOffset + col + 16] = p2;
+ blockData[blockBufferOffset + col + 24] = p3;
+ blockData[blockBufferOffset + col + 32] = p4;
+ blockData[blockBufferOffset + col + 40] = p5;
+ blockData[blockBufferOffset + col + 48] = p6;
+ blockData[blockBufferOffset + col + 56] = p7;
+ }
+ }
+
+ function buildComponentData(frame, component) {
+ var blocksPerLine = component.blocksPerLine;
+ var blocksPerColumn = component.blocksPerColumn;
+ var computationBuffer = new Int16Array(64);
+
+ for (var blockRow = 0; blockRow < blocksPerColumn; blockRow++) {
+ for (var blockCol = 0; blockCol < blocksPerLine; blockCol++) {
+ var offset = getBlockBufferOffset(component, blockRow, blockCol);
+ quantizeAndInverse(component, offset, computationBuffer);
+ }
+ }
+
+ return component.blockData;
+ }
+
+ function findNextFileMarker(data, currentPos) {
+ var startPos = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : currentPos;
+
+ function peekUint16(pos) {
+ return data[pos] << 8 | data[pos + 1];
+ }
+
+ var maxPos = data.length - 1;
+ var newPos = startPos < currentPos ? startPos : currentPos;
+
+ if (currentPos >= maxPos) {
+ return null;
+ }
+
+ var currentMarker = peekUint16(currentPos);
+
+ if (currentMarker >= 0xFFC0 && currentMarker <= 0xFFFE) {
+ return {
+ invalid: null,
+ marker: currentMarker,
+ offset: currentPos
+ };
+ }
+
+ var newMarker = peekUint16(newPos);
+
+ while (!(newMarker >= 0xFFC0 && newMarker <= 0xFFFE)) {
+ if (++newPos >= maxPos) {
+ return null;
+ }
+
+ newMarker = peekUint16(newPos);
+ }
+
+ return {
+ invalid: currentMarker.toString(16),
+ marker: newMarker,
+ offset: newPos
+ };
+ }
+
+ JpegImage.prototype = {
+ parse: function parse(data) {
+ var _ref2 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ _ref2$dnlScanLines = _ref2.dnlScanLines,
+ dnlScanLines = _ref2$dnlScanLines === void 0 ? null : _ref2$dnlScanLines;
+
+ function readUint16() {
+ var value = data[offset] << 8 | data[offset + 1];
+ offset += 2;
+ return value;
+ }
+
+ function readDataBlock() {
+ var length = readUint16();
+ var endOffset = offset + length - 2;
+ var fileMarker = findNextFileMarker(data, endOffset, offset);
+
+ if (fileMarker && fileMarker.invalid) {
+ (0, _util.warn)('readDataBlock - incorrect length, current marker is: ' + fileMarker.invalid);
+ endOffset = fileMarker.offset;
+ }
+
+ var array = data.subarray(offset, endOffset);
+ offset += array.length;
+ return array;
+ }
+
+ function prepareComponents(frame) {
+ var mcusPerLine = Math.ceil(frame.samplesPerLine / 8 / frame.maxH);
+ var mcusPerColumn = Math.ceil(frame.scanLines / 8 / frame.maxV);
+
+ for (var i = 0; i < frame.components.length; i++) {
+ component = frame.components[i];
+ var blocksPerLine = Math.ceil(Math.ceil(frame.samplesPerLine / 8) * component.h / frame.maxH);
+ var blocksPerColumn = Math.ceil(Math.ceil(frame.scanLines / 8) * component.v / frame.maxV);
+ var blocksPerLineForMcu = mcusPerLine * component.h;
+ var blocksPerColumnForMcu = mcusPerColumn * component.v;
+ var blocksBufferSize = 64 * blocksPerColumnForMcu * (blocksPerLineForMcu + 1);
+ component.blockData = new Int16Array(blocksBufferSize);
+ component.blocksPerLine = blocksPerLine;
+ component.blocksPerColumn = blocksPerColumn;
+ }
+
+ frame.mcusPerLine = mcusPerLine;
+ frame.mcusPerColumn = mcusPerColumn;
+ }
+
+ var offset = 0;
+ var jfif = null;
+ var adobe = null;
+ var frame, resetInterval;
+ var numSOSMarkers = 0;
+ var quantizationTables = [];
+ var huffmanTablesAC = [],
+ huffmanTablesDC = [];
+ var fileMarker = readUint16();
+
+ if (fileMarker !== 0xFFD8) {
+ throw new JpegError('SOI not found');
+ }
+
+ fileMarker = readUint16();
+
+ markerLoop: while (fileMarker !== 0xFFD9) {
+ var i, j, l;
+
+ switch (fileMarker) {
+ case 0xFFE0:
+ case 0xFFE1:
+ case 0xFFE2:
+ case 0xFFE3:
+ case 0xFFE4:
+ case 0xFFE5:
+ case 0xFFE6:
+ case 0xFFE7:
+ case 0xFFE8:
+ case 0xFFE9:
+ case 0xFFEA:
+ case 0xFFEB:
+ case 0xFFEC:
+ case 0xFFED:
+ case 0xFFEE:
+ case 0xFFEF:
+ case 0xFFFE:
+ var appData = readDataBlock();
+
+ if (fileMarker === 0xFFE0) {
+ if (appData[0] === 0x4A && appData[1] === 0x46 && appData[2] === 0x49 && appData[3] === 0x46 && appData[4] === 0) {
+ jfif = {
+ version: {
+ major: appData[5],
+ minor: appData[6]
+ },
+ densityUnits: appData[7],
+ xDensity: appData[8] << 8 | appData[9],
+ yDensity: appData[10] << 8 | appData[11],
+ thumbWidth: appData[12],
+ thumbHeight: appData[13],
+ thumbData: appData.subarray(14, 14 + 3 * appData[12] * appData[13])
+ };
+ }
+ }
+
+ if (fileMarker === 0xFFEE) {
+ if (appData[0] === 0x41 && appData[1] === 0x64 && appData[2] === 0x6F && appData[3] === 0x62 && appData[4] === 0x65) {
+ adobe = {
+ version: appData[5] << 8 | appData[6],
+ flags0: appData[7] << 8 | appData[8],
+ flags1: appData[9] << 8 | appData[10],
+ transformCode: appData[11]
+ };
+ }
+ }
+
+ break;
+
+ case 0xFFDB:
+ var quantizationTablesLength = readUint16();
+ var quantizationTablesEnd = quantizationTablesLength + offset - 2;
+ var z;
+
+ while (offset < quantizationTablesEnd) {
+ var quantizationTableSpec = data[offset++];
+ var tableData = new Uint16Array(64);
+
+ if (quantizationTableSpec >> 4 === 0) {
+ for (j = 0; j < 64; j++) {
+ z = dctZigZag[j];
+ tableData[z] = data[offset++];
+ }
+ } else if (quantizationTableSpec >> 4 === 1) {
+ for (j = 0; j < 64; j++) {
+ z = dctZigZag[j];
+ tableData[z] = readUint16();
+ }
+ } else {
+ throw new JpegError('DQT - invalid table spec');
+ }
+
+ quantizationTables[quantizationTableSpec & 15] = tableData;
+ }
+
+ break;
+
+ case 0xFFC0:
+ case 0xFFC1:
+ case 0xFFC2:
+ if (frame) {
+ throw new JpegError('Only single frame JPEGs supported');
+ }
+
+ readUint16();
+ frame = {};
+ frame.extended = fileMarker === 0xFFC1;
+ frame.progressive = fileMarker === 0xFFC2;
+ frame.precision = data[offset++];
+ var sofScanLines = readUint16();
+ frame.scanLines = dnlScanLines || sofScanLines;
+ frame.samplesPerLine = readUint16();
+ frame.components = [];
+ frame.componentIds = {};
+ var componentsCount = data[offset++],
+ componentId;
+ var maxH = 0,
+ maxV = 0;
+
+ for (i = 0; i < componentsCount; i++) {
+ componentId = data[offset];
+ var h = data[offset + 1] >> 4;
+ var v = data[offset + 1] & 15;
+
+ if (maxH < h) {
+ maxH = h;
+ }
+
+ if (maxV < v) {
+ maxV = v;
+ }
+
+ var qId = data[offset + 2];
+ l = frame.components.push({
+ h: h,
+ v: v,
+ quantizationId: qId,
+ quantizationTable: null
+ });
+ frame.componentIds[componentId] = l - 1;
+ offset += 3;
+ }
+
+ frame.maxH = maxH;
+ frame.maxV = maxV;
+ prepareComponents(frame);
+ break;
+
+ case 0xFFC4:
+ var huffmanLength = readUint16();
+
+ for (i = 2; i < huffmanLength;) {
+ var huffmanTableSpec = data[offset++];
+ var codeLengths = new Uint8Array(16);
+ var codeLengthSum = 0;
+
+ for (j = 0; j < 16; j++, offset++) {
+ codeLengthSum += codeLengths[j] = data[offset];
+ }
+
+ var huffmanValues = new Uint8Array(codeLengthSum);
+
+ for (j = 0; j < codeLengthSum; j++, offset++) {
+ huffmanValues[j] = data[offset];
+ }
+
+ i += 17 + codeLengthSum;
+ (huffmanTableSpec >> 4 === 0 ? huffmanTablesDC : huffmanTablesAC)[huffmanTableSpec & 15] = buildHuffmanTable(codeLengths, huffmanValues);
+ }
+
+ break;
+
+ case 0xFFDD:
+ readUint16();
+ resetInterval = readUint16();
+ break;
+
+ case 0xFFDA:
+ var parseDNLMarker = ++numSOSMarkers === 1 && !dnlScanLines;
+ readUint16();
+ var selectorsCount = data[offset++];
+ var components = [],
+ component;
+
+ for (i = 0; i < selectorsCount; i++) {
+ var componentIndex = frame.componentIds[data[offset++]];
+ component = frame.components[componentIndex];
+ var tableSpec = data[offset++];
+ component.huffmanTableDC = huffmanTablesDC[tableSpec >> 4];
+ component.huffmanTableAC = huffmanTablesAC[tableSpec & 15];
+ components.push(component);
+ }
+
+ var spectralStart = data[offset++];
+ var spectralEnd = data[offset++];
+ var successiveApproximation = data[offset++];
+
+ try {
+ var processed = decodeScan(data, offset, frame, components, resetInterval, spectralStart, spectralEnd, successiveApproximation >> 4, successiveApproximation & 15, parseDNLMarker);
+ offset += processed;
+ } catch (ex) {
+ if (ex instanceof DNLMarkerError) {
+ (0, _util.warn)("".concat(ex.message, " -- attempting to re-parse the JPEG image."));
+ return this.parse(data, {
+ dnlScanLines: ex.scanLines
+ });
+ } else if (ex instanceof EOIMarkerError) {
+ (0, _util.warn)("".concat(ex.message, " -- ignoring the rest of the image data."));
+ break markerLoop;
+ }
+
+ throw ex;
+ }
+
+ break;
+
+ case 0xFFDC:
+ offset += 4;
+ break;
+
+ case 0xFFFF:
+ if (data[offset] !== 0xFF) {
+ offset--;
+ }
+
+ break;
+
+ default:
+ if (data[offset - 3] === 0xFF && data[offset - 2] >= 0xC0 && data[offset - 2] <= 0xFE) {
+ offset -= 3;
+ break;
+ }
+
+ var nextFileMarker = findNextFileMarker(data, offset - 2);
+
+ if (nextFileMarker && nextFileMarker.invalid) {
+ (0, _util.warn)('JpegImage.parse - unexpected data, current marker is: ' + nextFileMarker.invalid);
+ offset = nextFileMarker.offset;
+ break;
+ }
+
+ throw new JpegError('unknown marker ' + fileMarker.toString(16));
+ }
+
+ fileMarker = readUint16();
+ }
+
+ this.width = frame.samplesPerLine;
+ this.height = frame.scanLines;
+ this.jfif = jfif;
+ this.adobe = adobe;
+ this.components = [];
+
+ for (i = 0; i < frame.components.length; i++) {
+ component = frame.components[i];
+ var quantizationTable = quantizationTables[component.quantizationId];
+
+ if (quantizationTable) {
+ component.quantizationTable = quantizationTable;
+ }
+
+ this.components.push({
+ output: buildComponentData(frame, component),
+ scaleX: component.h / frame.maxH,
+ scaleY: component.v / frame.maxV,
+ blocksPerLine: component.blocksPerLine,
+ blocksPerColumn: component.blocksPerColumn
+ });
+ }
+
+ this.numComponents = this.components.length;
+ return undefined;
+ },
+ _getLinearizedBlockData: function _getLinearizedBlockData(width, height) {
+ var isSourcePDF = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
+ var scaleX = this.width / width,
+ scaleY = this.height / height;
+ var component, componentScaleX, componentScaleY, blocksPerScanline;
+ var x, y, i, j, k;
+ var index;
+ var offset = 0;
+ var output;
+ var numComponents = this.components.length;
+ var dataLength = width * height * numComponents;
+ var data = new Uint8ClampedArray(dataLength);
+ var xScaleBlockOffset = new Uint32Array(width);
+ var mask3LSB = 0xfffffff8;
+
+ for (i = 0; i < numComponents; i++) {
+ component = this.components[i];
+ componentScaleX = component.scaleX * scaleX;
+ componentScaleY = component.scaleY * scaleY;
+ offset = i;
+ output = component.output;
+ blocksPerScanline = component.blocksPerLine + 1 << 3;
+
+ for (x = 0; x < width; x++) {
+ j = 0 | x * componentScaleX;
+ xScaleBlockOffset[x] = (j & mask3LSB) << 3 | j & 7;
+ }
+
+ for (y = 0; y < height; y++) {
+ j = 0 | y * componentScaleY;
+ index = blocksPerScanline * (j & mask3LSB) | (j & 7) << 3;
+
+ for (x = 0; x < width; x++) {
+ data[offset] = output[index + xScaleBlockOffset[x]];
+ offset += numComponents;
+ }
+ }
+ }
+
+ var transform = this._decodeTransform;
+
+ if (!isSourcePDF && numComponents === 4 && !transform) {
+ transform = new Int32Array([-256, 255, -256, 255, -256, 255, -256, 255]);
+ }
+
+ if (transform) {
+ for (i = 0; i < dataLength;) {
+ for (j = 0, k = 0; j < numComponents; j++, i++, k += 2) {
+ data[i] = (data[i] * transform[k] >> 8) + transform[k + 1];
+ }
+ }
+ }
+
+ return data;
+ },
+
+ get _isColorConversionNeeded() {
+ if (this.adobe) {
+ return !!this.adobe.transformCode;
+ }
+
+ if (this.numComponents === 3) {
+ if (this._colorTransform === 0) {
+ return false;
+ }
+
+ return true;
+ }
+
+ if (this._colorTransform === 1) {
+ return true;
+ }
+
+ return false;
+ },
+
+ _convertYccToRgb: function convertYccToRgb(data) {
+ var Y, Cb, Cr;
+
+ for (var i = 0, length = data.length; i < length; i += 3) {
+ Y = data[i];
+ Cb = data[i + 1];
+ Cr = data[i + 2];
+ data[i] = Y - 179.456 + 1.402 * Cr;
+ data[i + 1] = Y + 135.459 - 0.344 * Cb - 0.714 * Cr;
+ data[i + 2] = Y - 226.816 + 1.772 * Cb;
+ }
+
+ return data;
+ },
+ _convertYcckToRgb: function convertYcckToRgb(data) {
+ var Y, Cb, Cr, k;
+ var offset = 0;
+
+ for (var i = 0, length = data.length; i < length; i += 4) {
+ Y = data[i];
+ Cb = data[i + 1];
+ Cr = data[i + 2];
+ k = data[i + 3];
+ data[offset++] = -122.67195406894 + Cb * (-6.60635669420364e-5 * Cb + 0.000437130475926232 * Cr - 5.4080610064599e-5 * Y + 0.00048449797120281 * k - 0.154362151871126) + Cr * (-0.000957964378445773 * Cr + 0.000817076911346625 * Y - 0.00477271405408747 * k + 1.53380253221734) + Y * (0.000961250184130688 * Y - 0.00266257332283933 * k + 0.48357088451265) + k * (-0.000336197177618394 * k + 0.484791561490776);
+ data[offset++] = 107.268039397724 + Cb * (2.19927104525741e-5 * Cb - 0.000640992018297945 * Cr + 0.000659397001245577 * Y + 0.000426105652938837 * k - 0.176491792462875) + Cr * (-0.000778269941513683 * Cr + 0.00130872261408275 * Y + 0.000770482631801132 * k - 0.151051492775562) + Y * (0.00126935368114843 * Y - 0.00265090189010898 * k + 0.25802910206845) + k * (-0.000318913117588328 * k - 0.213742400323665);
+ data[offset++] = -20.810012546947 + Cb * (-0.000570115196973677 * Cb - 2.63409051004589e-5 * Cr + 0.0020741088115012 * Y - 0.00288260236853442 * k + 0.814272968359295) + Cr * (-1.53496057440975e-5 * Cr - 0.000132689043961446 * Y + 0.000560833691242812 * k - 0.195152027534049) + Y * (0.00174418132927582 * Y - 0.00255243321439347 * k + 0.116935020465145) + k * (-0.000343531996510555 * k + 0.24165260232407);
+ }
+
+ return data.subarray(0, offset);
+ },
+ _convertYcckToCmyk: function convertYcckToCmyk(data) {
+ var Y, Cb, Cr;
+
+ for (var i = 0, length = data.length; i < length; i += 4) {
+ Y = data[i];
+ Cb = data[i + 1];
+ Cr = data[i + 2];
+ data[i] = 434.456 - Y - 1.402 * Cr;
+ data[i + 1] = 119.541 - Y + 0.344 * Cb + 0.714 * Cr;
+ data[i + 2] = 481.816 - Y - 1.772 * Cb;
+ }
+
+ return data;
+ },
+ _convertCmykToRgb: function convertCmykToRgb(data) {
+ var c, m, y, k;
+ var offset = 0;
+ var scale = 1 / 255;
+
+ for (var i = 0, length = data.length; i < length; i += 4) {
+ c = data[i] * scale;
+ m = data[i + 1] * scale;
+ y = data[i + 2] * scale;
+ k = data[i + 3] * scale;
+ data[offset++] = 255 + c * (-4.387332384609988 * c + 54.48615194189176 * m + 18.82290502165302 * y + 212.25662451639585 * k - 285.2331026137004) + m * (1.7149763477362134 * m - 5.6096736904047315 * y - 17.873870861415444 * k - 5.497006427196366) + y * (-2.5217340131683033 * y - 21.248923337353073 * k + 17.5119270841813) - k * (21.86122147463605 * k + 189.48180835922747);
+ data[offset++] = 255 + c * (8.841041422036149 * c + 60.118027045597366 * m + 6.871425592049007 * y + 31.159100130055922 * k - 79.2970844816548) + m * (-15.310361306967817 * m + 17.575251261109482 * y + 131.35250912493976 * k - 190.9453302588951) + y * (4.444339102852739 * y + 9.8632861493405 * k - 24.86741582555878) - k * (20.737325471181034 * k + 187.80453709719578);
+ data[offset++] = 255 + c * (0.8842522430003296 * c + 8.078677503112928 * m + 30.89978309703729 * y - 0.23883238689178934 * k - 14.183576799673286) + m * (10.49593273432072 * m + 63.02378494754052 * y + 50.606957656360734 * k - 112.23884253719248) + y * (0.03296041114873217 * y + 115.60384449646641 * k - 193.58209356861505) - k * (22.33816807309886 * k + 180.12613974708367);
+ }
+
+ return data.subarray(0, offset);
+ },
+ getData: function getData(_ref3) {
+ var width = _ref3.width,
+ height = _ref3.height,
+ _ref3$forceRGB = _ref3.forceRGB,
+ forceRGB = _ref3$forceRGB === void 0 ? false : _ref3$forceRGB,
+ _ref3$isSourcePDF = _ref3.isSourcePDF,
+ isSourcePDF = _ref3$isSourcePDF === void 0 ? false : _ref3$isSourcePDF;
+
+ if (this.numComponents > 4) {
+ throw new JpegError('Unsupported color mode');
+ }
+
+ var data = this._getLinearizedBlockData(width, height, isSourcePDF);
+
+ if (this.numComponents === 1 && forceRGB) {
+ var dataLength = data.length;
+ var rgbData = new Uint8ClampedArray(dataLength * 3);
+ var offset = 0;
+
+ for (var i = 0; i < dataLength; i++) {
+ var grayColor = data[i];
+ rgbData[offset++] = grayColor;
+ rgbData[offset++] = grayColor;
+ rgbData[offset++] = grayColor;
+ }
+
+ return rgbData;
+ } else if (this.numComponents === 3 && this._isColorConversionNeeded) {
+ return this._convertYccToRgb(data);
+ } else if (this.numComponents === 4) {
+ if (this._isColorConversionNeeded) {
+ if (forceRGB) {
+ return this._convertYcckToRgb(data);
+ }
+
+ return this._convertYcckToCmyk(data);
+ } else if (forceRGB) {
+ return this._convertCmykToRgb(data);
+ }
+ }
+
+ return data;
+ }
+ };
+ return JpegImage;
+}();
+
+exports.JpegImage = JpegImage;
+
+/***/ }),
+/* 166 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.JpxStream = void 0;
+
+var _stream = __w_pdfjs_require__(158);
+
+var _jpx = __w_pdfjs_require__(167);
+
+var _util = __w_pdfjs_require__(5);
+
+var JpxStream = function JpxStreamClosure() {
+ function JpxStream(stream, maybeLength, dict, params) {
+ this.stream = stream;
+ this.maybeLength = maybeLength;
+ this.dict = dict;
+ this.params = params;
+
+ _stream.DecodeStream.call(this, maybeLength);
+ }
+
+ JpxStream.prototype = Object.create(_stream.DecodeStream.prototype);
+ Object.defineProperty(JpxStream.prototype, 'bytes', {
+ get: function JpxStream_bytes() {
+ return (0, _util.shadow)(this, 'bytes', this.stream.getBytes(this.maybeLength));
+ },
+ configurable: true
+ });
+
+ JpxStream.prototype.ensureBuffer = function (requested) {};
+
+ JpxStream.prototype.readBlock = function () {
+ if (this.eof) {
+ return;
+ }
+
+ var jpxImage = new _jpx.JpxImage();
+ jpxImage.parse(this.bytes);
+ var width = jpxImage.width;
+ var height = jpxImage.height;
+ var componentsCount = jpxImage.componentsCount;
+ var tileCount = jpxImage.tiles.length;
+
+ if (tileCount === 1) {
+ this.buffer = jpxImage.tiles[0].items;
+ } else {
+ var data = new Uint8ClampedArray(width * height * componentsCount);
+
+ for (var k = 0; k < tileCount; k++) {
+ var tileComponents = jpxImage.tiles[k];
+ var tileWidth = tileComponents.width;
+ var tileHeight = tileComponents.height;
+ var tileLeft = tileComponents.left;
+ var tileTop = tileComponents.top;
+ var src = tileComponents.items;
+ var srcPosition = 0;
+ var dataPosition = (width * tileTop + tileLeft) * componentsCount;
+ var imgRowSize = width * componentsCount;
+ var tileRowSize = tileWidth * componentsCount;
+
+ for (var j = 0; j < tileHeight; j++) {
+ var rowBytes = src.subarray(srcPosition, srcPosition + tileRowSize);
+ data.set(rowBytes, dataPosition);
+ srcPosition += tileRowSize;
+ dataPosition += imgRowSize;
+ }
+ }
+
+ this.buffer = data;
+ }
+
+ this.bufferLength = this.buffer.length;
+ this.eof = true;
+ };
+
+ return JpxStream;
+}();
+
+exports.JpxStream = JpxStream;
+
+/***/ }),
+/* 167 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.JpxImage = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _arithmetic_decoder = __w_pdfjs_require__(163);
+
+var JpxError = function JpxErrorClosure() {
+ function JpxError(msg) {
+ this.message = 'JPX error: ' + msg;
+ }
+
+ JpxError.prototype = new Error();
+ JpxError.prototype.name = 'JpxError';
+ JpxError.constructor = JpxError;
+ return JpxError;
+}();
+
+var JpxImage = function JpxImageClosure() {
+ var SubbandsGainLog2 = {
+ 'LL': 0,
+ 'LH': 1,
+ 'HL': 1,
+ 'HH': 2
+ };
+
+ function JpxImage() {
+ this.failOnCorruptedImage = false;
+ }
+
+ JpxImage.prototype = {
+ parse: function JpxImage_parse(data) {
+ var head = (0, _util.readUint16)(data, 0);
+
+ if (head === 0xFF4F) {
+ this.parseCodestream(data, 0, data.length);
+ return;
+ }
+
+ var position = 0,
+ length = data.length;
+
+ while (position < length) {
+ var headerSize = 8;
+ var lbox = (0, _util.readUint32)(data, position);
+ var tbox = (0, _util.readUint32)(data, position + 4);
+ position += headerSize;
+
+ if (lbox === 1) {
+ lbox = (0, _util.readUint32)(data, position) * 4294967296 + (0, _util.readUint32)(data, position + 4);
+ position += 8;
+ headerSize += 8;
+ }
+
+ if (lbox === 0) {
+ lbox = length - position + headerSize;
+ }
+
+ if (lbox < headerSize) {
+ throw new JpxError('Invalid box field size');
+ }
+
+ var dataLength = lbox - headerSize;
+ var jumpDataLength = true;
+
+ switch (tbox) {
+ case 0x6A703268:
+ jumpDataLength = false;
+ break;
+
+ case 0x636F6C72:
+ var method = data[position];
+
+ if (method === 1) {
+ var colorspace = (0, _util.readUint32)(data, position + 3);
+
+ switch (colorspace) {
+ case 16:
+ case 17:
+ case 18:
+ break;
+
+ default:
+ (0, _util.warn)('Unknown colorspace ' + colorspace);
+ break;
+ }
+ } else if (method === 2) {
+ (0, _util.info)('ICC profile not supported');
+ }
+
+ break;
+
+ case 0x6A703263:
+ this.parseCodestream(data, position, position + dataLength);
+ break;
+
+ case 0x6A502020:
+ if ((0, _util.readUint32)(data, position) !== 0x0d0a870a) {
+ (0, _util.warn)('Invalid JP2 signature');
+ }
+
+ break;
+
+ case 0x6A501A1A:
+ case 0x66747970:
+ case 0x72726571:
+ case 0x72657320:
+ case 0x69686472:
+ break;
+
+ default:
+ var headerType = String.fromCharCode(tbox >> 24 & 0xFF, tbox >> 16 & 0xFF, tbox >> 8 & 0xFF, tbox & 0xFF);
+ (0, _util.warn)('Unsupported header type ' + tbox + ' (' + headerType + ')');
+ break;
+ }
+
+ if (jumpDataLength) {
+ position += dataLength;
+ }
+ }
+ },
+ parseImageProperties: function JpxImage_parseImageProperties(stream) {
+ var newByte = stream.getByte();
+
+ while (newByte >= 0) {
+ var oldByte = newByte;
+ newByte = stream.getByte();
+ var code = oldByte << 8 | newByte;
+
+ if (code === 0xFF51) {
+ stream.skip(4);
+ var Xsiz = stream.getInt32() >>> 0;
+ var Ysiz = stream.getInt32() >>> 0;
+ var XOsiz = stream.getInt32() >>> 0;
+ var YOsiz = stream.getInt32() >>> 0;
+ stream.skip(16);
+ var Csiz = stream.getUint16();
+ this.width = Xsiz - XOsiz;
+ this.height = Ysiz - YOsiz;
+ this.componentsCount = Csiz;
+ this.bitsPerComponent = 8;
+ return;
+ }
+ }
+
+ throw new JpxError('No size marker found in JPX stream');
+ },
+ parseCodestream: function JpxImage_parseCodestream(data, start, end) {
+ var context = {};
+ var doNotRecover = false;
+
+ try {
+ var position = start;
+
+ while (position + 1 < end) {
+ var code = (0, _util.readUint16)(data, position);
+ position += 2;
+ var length = 0,
+ j,
+ sqcd,
+ spqcds,
+ spqcdSize,
+ scalarExpounded,
+ tile;
+
+ switch (code) {
+ case 0xFF4F:
+ context.mainHeader = true;
+ break;
+
+ case 0xFFD9:
+ break;
+
+ case 0xFF51:
+ length = (0, _util.readUint16)(data, position);
+ var siz = {};
+ siz.Xsiz = (0, _util.readUint32)(data, position + 4);
+ siz.Ysiz = (0, _util.readUint32)(data, position + 8);
+ siz.XOsiz = (0, _util.readUint32)(data, position + 12);
+ siz.YOsiz = (0, _util.readUint32)(data, position + 16);
+ siz.XTsiz = (0, _util.readUint32)(data, position + 20);
+ siz.YTsiz = (0, _util.readUint32)(data, position + 24);
+ siz.XTOsiz = (0, _util.readUint32)(data, position + 28);
+ siz.YTOsiz = (0, _util.readUint32)(data, position + 32);
+ var componentsCount = (0, _util.readUint16)(data, position + 36);
+ siz.Csiz = componentsCount;
+ var components = [];
+ j = position + 38;
+
+ for (var i = 0; i < componentsCount; i++) {
+ var component = {
+ precision: (data[j] & 0x7F) + 1,
+ isSigned: !!(data[j] & 0x80),
+ XRsiz: data[j + 1],
+ YRsiz: data[j + 2]
+ };
+ j += 3;
+ calculateComponentDimensions(component, siz);
+ components.push(component);
+ }
+
+ context.SIZ = siz;
+ context.components = components;
+ calculateTileGrids(context, components);
+ context.QCC = [];
+ context.COC = [];
+ break;
+
+ case 0xFF5C:
+ length = (0, _util.readUint16)(data, position);
+ var qcd = {};
+ j = position + 2;
+ sqcd = data[j++];
+
+ switch (sqcd & 0x1F) {
+ case 0:
+ spqcdSize = 8;
+ scalarExpounded = true;
+ break;
+
+ case 1:
+ spqcdSize = 16;
+ scalarExpounded = false;
+ break;
+
+ case 2:
+ spqcdSize = 16;
+ scalarExpounded = true;
+ break;
+
+ default:
+ throw new Error('Invalid SQcd value ' + sqcd);
+ }
+
+ qcd.noQuantization = spqcdSize === 8;
+ qcd.scalarExpounded = scalarExpounded;
+ qcd.guardBits = sqcd >> 5;
+ spqcds = [];
+
+ while (j < length + position) {
+ var spqcd = {};
+
+ if (spqcdSize === 8) {
+ spqcd.epsilon = data[j++] >> 3;
+ spqcd.mu = 0;
+ } else {
+ spqcd.epsilon = data[j] >> 3;
+ spqcd.mu = (data[j] & 0x7) << 8 | data[j + 1];
+ j += 2;
+ }
+
+ spqcds.push(spqcd);
+ }
+
+ qcd.SPqcds = spqcds;
+
+ if (context.mainHeader) {
+ context.QCD = qcd;
+ } else {
+ context.currentTile.QCD = qcd;
+ context.currentTile.QCC = [];
+ }
+
+ break;
+
+ case 0xFF5D:
+ length = (0, _util.readUint16)(data, position);
+ var qcc = {};
+ j = position + 2;
+ var cqcc;
+
+ if (context.SIZ.Csiz < 257) {
+ cqcc = data[j++];
+ } else {
+ cqcc = (0, _util.readUint16)(data, j);
+ j += 2;
+ }
+
+ sqcd = data[j++];
+
+ switch (sqcd & 0x1F) {
+ case 0:
+ spqcdSize = 8;
+ scalarExpounded = true;
+ break;
+
+ case 1:
+ spqcdSize = 16;
+ scalarExpounded = false;
+ break;
+
+ case 2:
+ spqcdSize = 16;
+ scalarExpounded = true;
+ break;
+
+ default:
+ throw new Error('Invalid SQcd value ' + sqcd);
+ }
+
+ qcc.noQuantization = spqcdSize === 8;
+ qcc.scalarExpounded = scalarExpounded;
+ qcc.guardBits = sqcd >> 5;
+ spqcds = [];
+
+ while (j < length + position) {
+ spqcd = {};
+
+ if (spqcdSize === 8) {
+ spqcd.epsilon = data[j++] >> 3;
+ spqcd.mu = 0;
+ } else {
+ spqcd.epsilon = data[j] >> 3;
+ spqcd.mu = (data[j] & 0x7) << 8 | data[j + 1];
+ j += 2;
+ }
+
+ spqcds.push(spqcd);
+ }
+
+ qcc.SPqcds = spqcds;
+
+ if (context.mainHeader) {
+ context.QCC[cqcc] = qcc;
+ } else {
+ context.currentTile.QCC[cqcc] = qcc;
+ }
+
+ break;
+
+ case 0xFF52:
+ length = (0, _util.readUint16)(data, position);
+ var cod = {};
+ j = position + 2;
+ var scod = data[j++];
+ cod.entropyCoderWithCustomPrecincts = !!(scod & 1);
+ cod.sopMarkerUsed = !!(scod & 2);
+ cod.ephMarkerUsed = !!(scod & 4);
+ cod.progressionOrder = data[j++];
+ cod.layersCount = (0, _util.readUint16)(data, j);
+ j += 2;
+ cod.multipleComponentTransform = data[j++];
+ cod.decompositionLevelsCount = data[j++];
+ cod.xcb = (data[j++] & 0xF) + 2;
+ cod.ycb = (data[j++] & 0xF) + 2;
+ var blockStyle = data[j++];
+ cod.selectiveArithmeticCodingBypass = !!(blockStyle & 1);
+ cod.resetContextProbabilities = !!(blockStyle & 2);
+ cod.terminationOnEachCodingPass = !!(blockStyle & 4);
+ cod.verticallyStripe = !!(blockStyle & 8);
+ cod.predictableTermination = !!(blockStyle & 16);
+ cod.segmentationSymbolUsed = !!(blockStyle & 32);
+ cod.reversibleTransformation = data[j++];
+
+ if (cod.entropyCoderWithCustomPrecincts) {
+ var precinctsSizes = [];
+
+ while (j < length + position) {
+ var precinctsSize = data[j++];
+ precinctsSizes.push({
+ PPx: precinctsSize & 0xF,
+ PPy: precinctsSize >> 4
+ });
+ }
+
+ cod.precinctsSizes = precinctsSizes;
+ }
+
+ var unsupported = [];
+
+ if (cod.selectiveArithmeticCodingBypass) {
+ unsupported.push('selectiveArithmeticCodingBypass');
+ }
+
+ if (cod.resetContextProbabilities) {
+ unsupported.push('resetContextProbabilities');
+ }
+
+ if (cod.terminationOnEachCodingPass) {
+ unsupported.push('terminationOnEachCodingPass');
+ }
+
+ if (cod.verticallyStripe) {
+ unsupported.push('verticallyStripe');
+ }
+
+ if (cod.predictableTermination) {
+ unsupported.push('predictableTermination');
+ }
+
+ if (unsupported.length > 0) {
+ doNotRecover = true;
+ throw new Error('Unsupported COD options (' + unsupported.join(', ') + ')');
+ }
+
+ if (context.mainHeader) {
+ context.COD = cod;
+ } else {
+ context.currentTile.COD = cod;
+ context.currentTile.COC = [];
+ }
+
+ break;
+
+ case 0xFF90:
+ length = (0, _util.readUint16)(data, position);
+ tile = {};
+ tile.index = (0, _util.readUint16)(data, position + 2);
+ tile.length = (0, _util.readUint32)(data, position + 4);
+ tile.dataEnd = tile.length + position - 2;
+ tile.partIndex = data[position + 8];
+ tile.partsCount = data[position + 9];
+ context.mainHeader = false;
+
+ if (tile.partIndex === 0) {
+ tile.COD = context.COD;
+ tile.COC = context.COC.slice(0);
+ tile.QCD = context.QCD;
+ tile.QCC = context.QCC.slice(0);
+ }
+
+ context.currentTile = tile;
+ break;
+
+ case 0xFF93:
+ tile = context.currentTile;
+
+ if (tile.partIndex === 0) {
+ initializeTile(context, tile.index);
+ buildPackets(context);
+ }
+
+ length = tile.dataEnd - position;
+ parseTilePackets(context, data, position, length);
+ break;
+
+ case 0xFF55:
+ case 0xFF57:
+ case 0xFF58:
+ case 0xFF64:
+ length = (0, _util.readUint16)(data, position);
+ break;
+
+ case 0xFF53:
+ throw new Error('Codestream code 0xFF53 (COC) is ' + 'not implemented');
+
+ default:
+ throw new Error('Unknown codestream code: ' + code.toString(16));
+ }
+
+ position += length;
+ }
+ } catch (e) {
+ if (doNotRecover || this.failOnCorruptedImage) {
+ throw new JpxError(e.message);
+ } else {
+ (0, _util.warn)('JPX: Trying to recover from: ' + e.message);
+ }
+ }
+
+ this.tiles = transformComponents(context);
+ this.width = context.SIZ.Xsiz - context.SIZ.XOsiz;
+ this.height = context.SIZ.Ysiz - context.SIZ.YOsiz;
+ this.componentsCount = context.SIZ.Csiz;
+ }
+ };
+
+ function calculateComponentDimensions(component, siz) {
+ component.x0 = Math.ceil(siz.XOsiz / component.XRsiz);
+ component.x1 = Math.ceil(siz.Xsiz / component.XRsiz);
+ component.y0 = Math.ceil(siz.YOsiz / component.YRsiz);
+ component.y1 = Math.ceil(siz.Ysiz / component.YRsiz);
+ component.width = component.x1 - component.x0;
+ component.height = component.y1 - component.y0;
+ }
+
+ function calculateTileGrids(context, components) {
+ var siz = context.SIZ;
+ var tile,
+ tiles = [];
+ var numXtiles = Math.ceil((siz.Xsiz - siz.XTOsiz) / siz.XTsiz);
+ var numYtiles = Math.ceil((siz.Ysiz - siz.YTOsiz) / siz.YTsiz);
+
+ for (var q = 0; q < numYtiles; q++) {
+ for (var p = 0; p < numXtiles; p++) {
+ tile = {};
+ tile.tx0 = Math.max(siz.XTOsiz + p * siz.XTsiz, siz.XOsiz);
+ tile.ty0 = Math.max(siz.YTOsiz + q * siz.YTsiz, siz.YOsiz);
+ tile.tx1 = Math.min(siz.XTOsiz + (p + 1) * siz.XTsiz, siz.Xsiz);
+ tile.ty1 = Math.min(siz.YTOsiz + (q + 1) * siz.YTsiz, siz.Ysiz);
+ tile.width = tile.tx1 - tile.tx0;
+ tile.height = tile.ty1 - tile.ty0;
+ tile.components = [];
+ tiles.push(tile);
+ }
+ }
+
+ context.tiles = tiles;
+ var componentsCount = siz.Csiz;
+
+ for (var i = 0, ii = componentsCount; i < ii; i++) {
+ var component = components[i];
+
+ for (var j = 0, jj = tiles.length; j < jj; j++) {
+ var tileComponent = {};
+ tile = tiles[j];
+ tileComponent.tcx0 = Math.ceil(tile.tx0 / component.XRsiz);
+ tileComponent.tcy0 = Math.ceil(tile.ty0 / component.YRsiz);
+ tileComponent.tcx1 = Math.ceil(tile.tx1 / component.XRsiz);
+ tileComponent.tcy1 = Math.ceil(tile.ty1 / component.YRsiz);
+ tileComponent.width = tileComponent.tcx1 - tileComponent.tcx0;
+ tileComponent.height = tileComponent.tcy1 - tileComponent.tcy0;
+ tile.components[i] = tileComponent;
+ }
+ }
+ }
+
+ function getBlocksDimensions(context, component, r) {
+ var codOrCoc = component.codingStyleParameters;
+ var result = {};
+
+ if (!codOrCoc.entropyCoderWithCustomPrecincts) {
+ result.PPx = 15;
+ result.PPy = 15;
+ } else {
+ result.PPx = codOrCoc.precinctsSizes[r].PPx;
+ result.PPy = codOrCoc.precinctsSizes[r].PPy;
+ }
+
+ result.xcb_ = r > 0 ? Math.min(codOrCoc.xcb, result.PPx - 1) : Math.min(codOrCoc.xcb, result.PPx);
+ result.ycb_ = r > 0 ? Math.min(codOrCoc.ycb, result.PPy - 1) : Math.min(codOrCoc.ycb, result.PPy);
+ return result;
+ }
+
+ function buildPrecincts(context, resolution, dimensions) {
+ var precinctWidth = 1 << dimensions.PPx;
+ var precinctHeight = 1 << dimensions.PPy;
+ var isZeroRes = resolution.resLevel === 0;
+ var precinctWidthInSubband = 1 << dimensions.PPx + (isZeroRes ? 0 : -1);
+ var precinctHeightInSubband = 1 << dimensions.PPy + (isZeroRes ? 0 : -1);
+ var numprecinctswide = resolution.trx1 > resolution.trx0 ? Math.ceil(resolution.trx1 / precinctWidth) - Math.floor(resolution.trx0 / precinctWidth) : 0;
+ var numprecinctshigh = resolution.try1 > resolution.try0 ? Math.ceil(resolution.try1 / precinctHeight) - Math.floor(resolution.try0 / precinctHeight) : 0;
+ var numprecincts = numprecinctswide * numprecinctshigh;
+ resolution.precinctParameters = {
+ precinctWidth: precinctWidth,
+ precinctHeight: precinctHeight,
+ numprecinctswide: numprecinctswide,
+ numprecinctshigh: numprecinctshigh,
+ numprecincts: numprecincts,
+ precinctWidthInSubband: precinctWidthInSubband,
+ precinctHeightInSubband: precinctHeightInSubband
+ };
+ }
+
+ function buildCodeblocks(context, subband, dimensions) {
+ var xcb_ = dimensions.xcb_;
+ var ycb_ = dimensions.ycb_;
+ var codeblockWidth = 1 << xcb_;
+ var codeblockHeight = 1 << ycb_;
+ var cbx0 = subband.tbx0 >> xcb_;
+ var cby0 = subband.tby0 >> ycb_;
+ var cbx1 = subband.tbx1 + codeblockWidth - 1 >> xcb_;
+ var cby1 = subband.tby1 + codeblockHeight - 1 >> ycb_;
+ var precinctParameters = subband.resolution.precinctParameters;
+ var codeblocks = [];
+ var precincts = [];
+ var i, j, codeblock, precinctNumber;
+
+ for (j = cby0; j < cby1; j++) {
+ for (i = cbx0; i < cbx1; i++) {
+ codeblock = {
+ cbx: i,
+ cby: j,
+ tbx0: codeblockWidth * i,
+ tby0: codeblockHeight * j,
+ tbx1: codeblockWidth * (i + 1),
+ tby1: codeblockHeight * (j + 1)
+ };
+ codeblock.tbx0_ = Math.max(subband.tbx0, codeblock.tbx0);
+ codeblock.tby0_ = Math.max(subband.tby0, codeblock.tby0);
+ codeblock.tbx1_ = Math.min(subband.tbx1, codeblock.tbx1);
+ codeblock.tby1_ = Math.min(subband.tby1, codeblock.tby1);
+ var pi = Math.floor((codeblock.tbx0_ - subband.tbx0) / precinctParameters.precinctWidthInSubband);
+ var pj = Math.floor((codeblock.tby0_ - subband.tby0) / precinctParameters.precinctHeightInSubband);
+ precinctNumber = pi + pj * precinctParameters.numprecinctswide;
+ codeblock.precinctNumber = precinctNumber;
+ codeblock.subbandType = subband.type;
+ codeblock.Lblock = 3;
+
+ if (codeblock.tbx1_ <= codeblock.tbx0_ || codeblock.tby1_ <= codeblock.tby0_) {
+ continue;
+ }
+
+ codeblocks.push(codeblock);
+ var precinct = precincts[precinctNumber];
+
+ if (precinct !== undefined) {
+ if (i < precinct.cbxMin) {
+ precinct.cbxMin = i;
+ } else if (i > precinct.cbxMax) {
+ precinct.cbxMax = i;
+ }
+
+ if (j < precinct.cbyMin) {
+ precinct.cbxMin = j;
+ } else if (j > precinct.cbyMax) {
+ precinct.cbyMax = j;
+ }
+ } else {
+ precincts[precinctNumber] = precinct = {
+ cbxMin: i,
+ cbyMin: j,
+ cbxMax: i,
+ cbyMax: j
+ };
+ }
+
+ codeblock.precinct = precinct;
+ }
+ }
+
+ subband.codeblockParameters = {
+ codeblockWidth: xcb_,
+ codeblockHeight: ycb_,
+ numcodeblockwide: cbx1 - cbx0 + 1,
+ numcodeblockhigh: cby1 - cby0 + 1
+ };
+ subband.codeblocks = codeblocks;
+ subband.precincts = precincts;
+ }
+
+ function createPacket(resolution, precinctNumber, layerNumber) {
+ var precinctCodeblocks = [];
+ var subbands = resolution.subbands;
+
+ for (var i = 0, ii = subbands.length; i < ii; i++) {
+ var subband = subbands[i];
+ var codeblocks = subband.codeblocks;
+
+ for (var j = 0, jj = codeblocks.length; j < jj; j++) {
+ var codeblock = codeblocks[j];
+
+ if (codeblock.precinctNumber !== precinctNumber) {
+ continue;
+ }
+
+ precinctCodeblocks.push(codeblock);
+ }
+ }
+
+ return {
+ layerNumber: layerNumber,
+ codeblocks: precinctCodeblocks
+ };
+ }
+
+ function LayerResolutionComponentPositionIterator(context) {
+ var siz = context.SIZ;
+ var tileIndex = context.currentTile.index;
+ var tile = context.tiles[tileIndex];
+ var layersCount = tile.codingStyleDefaultParameters.layersCount;
+ var componentsCount = siz.Csiz;
+ var maxDecompositionLevelsCount = 0;
+
+ for (var q = 0; q < componentsCount; q++) {
+ maxDecompositionLevelsCount = Math.max(maxDecompositionLevelsCount, tile.components[q].codingStyleParameters.decompositionLevelsCount);
+ }
+
+ var l = 0,
+ r = 0,
+ i = 0,
+ k = 0;
+
+ this.nextPacket = function JpxImage_nextPacket() {
+ for (; l < layersCount; l++) {
+ for (; r <= maxDecompositionLevelsCount; r++) {
+ for (; i < componentsCount; i++) {
+ var component = tile.components[i];
+
+ if (r > component.codingStyleParameters.decompositionLevelsCount) {
+ continue;
+ }
+
+ var resolution = component.resolutions[r];
+ var numprecincts = resolution.precinctParameters.numprecincts;
+
+ for (; k < numprecincts;) {
+ var packet = createPacket(resolution, k, l);
+ k++;
+ return packet;
+ }
+
+ k = 0;
+ }
+
+ i = 0;
+ }
+
+ r = 0;
+ }
+
+ throw new JpxError('Out of packets');
+ };
+ }
+
+ function ResolutionLayerComponentPositionIterator(context) {
+ var siz = context.SIZ;
+ var tileIndex = context.currentTile.index;
+ var tile = context.tiles[tileIndex];
+ var layersCount = tile.codingStyleDefaultParameters.layersCount;
+ var componentsCount = siz.Csiz;
+ var maxDecompositionLevelsCount = 0;
+
+ for (var q = 0; q < componentsCount; q++) {
+ maxDecompositionLevelsCount = Math.max(maxDecompositionLevelsCount, tile.components[q].codingStyleParameters.decompositionLevelsCount);
+ }
+
+ var r = 0,
+ l = 0,
+ i = 0,
+ k = 0;
+
+ this.nextPacket = function JpxImage_nextPacket() {
+ for (; r <= maxDecompositionLevelsCount; r++) {
+ for (; l < layersCount; l++) {
+ for (; i < componentsCount; i++) {
+ var component = tile.components[i];
+
+ if (r > component.codingStyleParameters.decompositionLevelsCount) {
+ continue;
+ }
+
+ var resolution = component.resolutions[r];
+ var numprecincts = resolution.precinctParameters.numprecincts;
+
+ for (; k < numprecincts;) {
+ var packet = createPacket(resolution, k, l);
+ k++;
+ return packet;
+ }
+
+ k = 0;
+ }
+
+ i = 0;
+ }
+
+ l = 0;
+ }
+
+ throw new JpxError('Out of packets');
+ };
+ }
+
+ function ResolutionPositionComponentLayerIterator(context) {
+ var siz = context.SIZ;
+ var tileIndex = context.currentTile.index;
+ var tile = context.tiles[tileIndex];
+ var layersCount = tile.codingStyleDefaultParameters.layersCount;
+ var componentsCount = siz.Csiz;
+ var l, r, c, p;
+ var maxDecompositionLevelsCount = 0;
+
+ for (c = 0; c < componentsCount; c++) {
+ var component = tile.components[c];
+ maxDecompositionLevelsCount = Math.max(maxDecompositionLevelsCount, component.codingStyleParameters.decompositionLevelsCount);
+ }
+
+ var maxNumPrecinctsInLevel = new Int32Array(maxDecompositionLevelsCount + 1);
+
+ for (r = 0; r <= maxDecompositionLevelsCount; ++r) {
+ var maxNumPrecincts = 0;
+
+ for (c = 0; c < componentsCount; ++c) {
+ var resolutions = tile.components[c].resolutions;
+
+ if (r < resolutions.length) {
+ maxNumPrecincts = Math.max(maxNumPrecincts, resolutions[r].precinctParameters.numprecincts);
+ }
+ }
+
+ maxNumPrecinctsInLevel[r] = maxNumPrecincts;
+ }
+
+ l = 0;
+ r = 0;
+ c = 0;
+ p = 0;
+
+ this.nextPacket = function JpxImage_nextPacket() {
+ for (; r <= maxDecompositionLevelsCount; r++) {
+ for (; p < maxNumPrecinctsInLevel[r]; p++) {
+ for (; c < componentsCount; c++) {
+ var component = tile.components[c];
+
+ if (r > component.codingStyleParameters.decompositionLevelsCount) {
+ continue;
+ }
+
+ var resolution = component.resolutions[r];
+ var numprecincts = resolution.precinctParameters.numprecincts;
+
+ if (p >= numprecincts) {
+ continue;
+ }
+
+ for (; l < layersCount;) {
+ var packet = createPacket(resolution, p, l);
+ l++;
+ return packet;
+ }
+
+ l = 0;
+ }
+
+ c = 0;
+ }
+
+ p = 0;
+ }
+
+ throw new JpxError('Out of packets');
+ };
+ }
+
+ function PositionComponentResolutionLayerIterator(context) {
+ var siz = context.SIZ;
+ var tileIndex = context.currentTile.index;
+ var tile = context.tiles[tileIndex];
+ var layersCount = tile.codingStyleDefaultParameters.layersCount;
+ var componentsCount = siz.Csiz;
+ var precinctsSizes = getPrecinctSizesInImageScale(tile);
+ var precinctsIterationSizes = precinctsSizes;
+ var l = 0,
+ r = 0,
+ c = 0,
+ px = 0,
+ py = 0;
+
+ this.nextPacket = function JpxImage_nextPacket() {
+ for (; py < precinctsIterationSizes.maxNumHigh; py++) {
+ for (; px < precinctsIterationSizes.maxNumWide; px++) {
+ for (; c < componentsCount; c++) {
+ var component = tile.components[c];
+ var decompositionLevelsCount = component.codingStyleParameters.decompositionLevelsCount;
+
+ for (; r <= decompositionLevelsCount; r++) {
+ var resolution = component.resolutions[r];
+ var sizeInImageScale = precinctsSizes.components[c].resolutions[r];
+ var k = getPrecinctIndexIfExist(px, py, sizeInImageScale, precinctsIterationSizes, resolution);
+
+ if (k === null) {
+ continue;
+ }
+
+ for (; l < layersCount;) {
+ var packet = createPacket(resolution, k, l);
+ l++;
+ return packet;
+ }
+
+ l = 0;
+ }
+
+ r = 0;
+ }
+
+ c = 0;
+ }
+
+ px = 0;
+ }
+
+ throw new JpxError('Out of packets');
+ };
+ }
+
+ function ComponentPositionResolutionLayerIterator(context) {
+ var siz = context.SIZ;
+ var tileIndex = context.currentTile.index;
+ var tile = context.tiles[tileIndex];
+ var layersCount = tile.codingStyleDefaultParameters.layersCount;
+ var componentsCount = siz.Csiz;
+ var precinctsSizes = getPrecinctSizesInImageScale(tile);
+ var l = 0,
+ r = 0,
+ c = 0,
+ px = 0,
+ py = 0;
+
+ this.nextPacket = function JpxImage_nextPacket() {
+ for (; c < componentsCount; ++c) {
+ var component = tile.components[c];
+ var precinctsIterationSizes = precinctsSizes.components[c];
+ var decompositionLevelsCount = component.codingStyleParameters.decompositionLevelsCount;
+
+ for (; py < precinctsIterationSizes.maxNumHigh; py++) {
+ for (; px < precinctsIterationSizes.maxNumWide; px++) {
+ for (; r <= decompositionLevelsCount; r++) {
+ var resolution = component.resolutions[r];
+ var sizeInImageScale = precinctsIterationSizes.resolutions[r];
+ var k = getPrecinctIndexIfExist(px, py, sizeInImageScale, precinctsIterationSizes, resolution);
+
+ if (k === null) {
+ continue;
+ }
+
+ for (; l < layersCount;) {
+ var packet = createPacket(resolution, k, l);
+ l++;
+ return packet;
+ }
+
+ l = 0;
+ }
+
+ r = 0;
+ }
+
+ px = 0;
+ }
+
+ py = 0;
+ }
+
+ throw new JpxError('Out of packets');
+ };
+ }
+
+ function getPrecinctIndexIfExist(pxIndex, pyIndex, sizeInImageScale, precinctIterationSizes, resolution) {
+ var posX = pxIndex * precinctIterationSizes.minWidth;
+ var posY = pyIndex * precinctIterationSizes.minHeight;
+
+ if (posX % sizeInImageScale.width !== 0 || posY % sizeInImageScale.height !== 0) {
+ return null;
+ }
+
+ var startPrecinctRowIndex = posY / sizeInImageScale.width * resolution.precinctParameters.numprecinctswide;
+ return posX / sizeInImageScale.height + startPrecinctRowIndex;
+ }
+
+ function getPrecinctSizesInImageScale(tile) {
+ var componentsCount = tile.components.length;
+ var minWidth = Number.MAX_VALUE;
+ var minHeight = Number.MAX_VALUE;
+ var maxNumWide = 0;
+ var maxNumHigh = 0;
+ var sizePerComponent = new Array(componentsCount);
+
+ for (var c = 0; c < componentsCount; c++) {
+ var component = tile.components[c];
+ var decompositionLevelsCount = component.codingStyleParameters.decompositionLevelsCount;
+ var sizePerResolution = new Array(decompositionLevelsCount + 1);
+ var minWidthCurrentComponent = Number.MAX_VALUE;
+ var minHeightCurrentComponent = Number.MAX_VALUE;
+ var maxNumWideCurrentComponent = 0;
+ var maxNumHighCurrentComponent = 0;
+ var scale = 1;
+
+ for (var r = decompositionLevelsCount; r >= 0; --r) {
+ var resolution = component.resolutions[r];
+ var widthCurrentResolution = scale * resolution.precinctParameters.precinctWidth;
+ var heightCurrentResolution = scale * resolution.precinctParameters.precinctHeight;
+ minWidthCurrentComponent = Math.min(minWidthCurrentComponent, widthCurrentResolution);
+ minHeightCurrentComponent = Math.min(minHeightCurrentComponent, heightCurrentResolution);
+ maxNumWideCurrentComponent = Math.max(maxNumWideCurrentComponent, resolution.precinctParameters.numprecinctswide);
+ maxNumHighCurrentComponent = Math.max(maxNumHighCurrentComponent, resolution.precinctParameters.numprecinctshigh);
+ sizePerResolution[r] = {
+ width: widthCurrentResolution,
+ height: heightCurrentResolution
+ };
+ scale <<= 1;
+ }
+
+ minWidth = Math.min(minWidth, minWidthCurrentComponent);
+ minHeight = Math.min(minHeight, minHeightCurrentComponent);
+ maxNumWide = Math.max(maxNumWide, maxNumWideCurrentComponent);
+ maxNumHigh = Math.max(maxNumHigh, maxNumHighCurrentComponent);
+ sizePerComponent[c] = {
+ resolutions: sizePerResolution,
+ minWidth: minWidthCurrentComponent,
+ minHeight: minHeightCurrentComponent,
+ maxNumWide: maxNumWideCurrentComponent,
+ maxNumHigh: maxNumHighCurrentComponent
+ };
+ }
+
+ return {
+ components: sizePerComponent,
+ minWidth: minWidth,
+ minHeight: minHeight,
+ maxNumWide: maxNumWide,
+ maxNumHigh: maxNumHigh
+ };
+ }
+
+ function buildPackets(context) {
+ var siz = context.SIZ;
+ var tileIndex = context.currentTile.index;
+ var tile = context.tiles[tileIndex];
+ var componentsCount = siz.Csiz;
+
+ for (var c = 0; c < componentsCount; c++) {
+ var component = tile.components[c];
+ var decompositionLevelsCount = component.codingStyleParameters.decompositionLevelsCount;
+ var resolutions = [];
+ var subbands = [];
+
+ for (var r = 0; r <= decompositionLevelsCount; r++) {
+ var blocksDimensions = getBlocksDimensions(context, component, r);
+ var resolution = {};
+ var scale = 1 << decompositionLevelsCount - r;
+ resolution.trx0 = Math.ceil(component.tcx0 / scale);
+ resolution.try0 = Math.ceil(component.tcy0 / scale);
+ resolution.trx1 = Math.ceil(component.tcx1 / scale);
+ resolution.try1 = Math.ceil(component.tcy1 / scale);
+ resolution.resLevel = r;
+ buildPrecincts(context, resolution, blocksDimensions);
+ resolutions.push(resolution);
+ var subband;
+
+ if (r === 0) {
+ subband = {};
+ subband.type = 'LL';
+ subband.tbx0 = Math.ceil(component.tcx0 / scale);
+ subband.tby0 = Math.ceil(component.tcy0 / scale);
+ subband.tbx1 = Math.ceil(component.tcx1 / scale);
+ subband.tby1 = Math.ceil(component.tcy1 / scale);
+ subband.resolution = resolution;
+ buildCodeblocks(context, subband, blocksDimensions);
+ subbands.push(subband);
+ resolution.subbands = [subband];
+ } else {
+ var bscale = 1 << decompositionLevelsCount - r + 1;
+ var resolutionSubbands = [];
+ subband = {};
+ subband.type = 'HL';
+ subband.tbx0 = Math.ceil(component.tcx0 / bscale - 0.5);
+ subband.tby0 = Math.ceil(component.tcy0 / bscale);
+ subband.tbx1 = Math.ceil(component.tcx1 / bscale - 0.5);
+ subband.tby1 = Math.ceil(component.tcy1 / bscale);
+ subband.resolution = resolution;
+ buildCodeblocks(context, subband, blocksDimensions);
+ subbands.push(subband);
+ resolutionSubbands.push(subband);
+ subband = {};
+ subband.type = 'LH';
+ subband.tbx0 = Math.ceil(component.tcx0 / bscale);
+ subband.tby0 = Math.ceil(component.tcy0 / bscale - 0.5);
+ subband.tbx1 = Math.ceil(component.tcx1 / bscale);
+ subband.tby1 = Math.ceil(component.tcy1 / bscale - 0.5);
+ subband.resolution = resolution;
+ buildCodeblocks(context, subband, blocksDimensions);
+ subbands.push(subband);
+ resolutionSubbands.push(subband);
+ subband = {};
+ subband.type = 'HH';
+ subband.tbx0 = Math.ceil(component.tcx0 / bscale - 0.5);
+ subband.tby0 = Math.ceil(component.tcy0 / bscale - 0.5);
+ subband.tbx1 = Math.ceil(component.tcx1 / bscale - 0.5);
+ subband.tby1 = Math.ceil(component.tcy1 / bscale - 0.5);
+ subband.resolution = resolution;
+ buildCodeblocks(context, subband, blocksDimensions);
+ subbands.push(subband);
+ resolutionSubbands.push(subband);
+ resolution.subbands = resolutionSubbands;
+ }
+ }
+
+ component.resolutions = resolutions;
+ component.subbands = subbands;
+ }
+
+ var progressionOrder = tile.codingStyleDefaultParameters.progressionOrder;
+
+ switch (progressionOrder) {
+ case 0:
+ tile.packetsIterator = new LayerResolutionComponentPositionIterator(context);
+ break;
+
+ case 1:
+ tile.packetsIterator = new ResolutionLayerComponentPositionIterator(context);
+ break;
+
+ case 2:
+ tile.packetsIterator = new ResolutionPositionComponentLayerIterator(context);
+ break;
+
+ case 3:
+ tile.packetsIterator = new PositionComponentResolutionLayerIterator(context);
+ break;
+
+ case 4:
+ tile.packetsIterator = new ComponentPositionResolutionLayerIterator(context);
+ break;
+
+ default:
+ throw new JpxError("Unsupported progression order ".concat(progressionOrder));
+ }
+ }
+
+ function parseTilePackets(context, data, offset, dataLength) {
+ var position = 0;
+ var buffer,
+ bufferSize = 0,
+ skipNextBit = false;
+
+ function readBits(count) {
+ while (bufferSize < count) {
+ var b = data[offset + position];
+ position++;
+
+ if (skipNextBit) {
+ buffer = buffer << 7 | b;
+ bufferSize += 7;
+ skipNextBit = false;
+ } else {
+ buffer = buffer << 8 | b;
+ bufferSize += 8;
+ }
+
+ if (b === 0xFF) {
+ skipNextBit = true;
+ }
+ }
+
+ bufferSize -= count;
+ return buffer >>> bufferSize & (1 << count) - 1;
+ }
+
+ function skipMarkerIfEqual(value) {
+ if (data[offset + position - 1] === 0xFF && data[offset + position] === value) {
+ skipBytes(1);
+ return true;
+ } else if (data[offset + position] === 0xFF && data[offset + position + 1] === value) {
+ skipBytes(2);
+ return true;
+ }
+
+ return false;
+ }
+
+ function skipBytes(count) {
+ position += count;
+ }
+
+ function alignToByte() {
+ bufferSize = 0;
+
+ if (skipNextBit) {
+ position++;
+ skipNextBit = false;
+ }
+ }
+
+ function readCodingpasses() {
+ if (readBits(1) === 0) {
+ return 1;
+ }
+
+ if (readBits(1) === 0) {
+ return 2;
+ }
+
+ var value = readBits(2);
+
+ if (value < 3) {
+ return value + 3;
+ }
+
+ value = readBits(5);
+
+ if (value < 31) {
+ return value + 6;
+ }
+
+ value = readBits(7);
+ return value + 37;
+ }
+
+ var tileIndex = context.currentTile.index;
+ var tile = context.tiles[tileIndex];
+ var sopMarkerUsed = context.COD.sopMarkerUsed;
+ var ephMarkerUsed = context.COD.ephMarkerUsed;
+ var packetsIterator = tile.packetsIterator;
+
+ while (position < dataLength) {
+ alignToByte();
+
+ if (sopMarkerUsed && skipMarkerIfEqual(0x91)) {
+ skipBytes(4);
+ }
+
+ var packet = packetsIterator.nextPacket();
+
+ if (!readBits(1)) {
+ continue;
+ }
+
+ var layerNumber = packet.layerNumber;
+ var queue = [],
+ codeblock;
+
+ for (var i = 0, ii = packet.codeblocks.length; i < ii; i++) {
+ codeblock = packet.codeblocks[i];
+ var precinct = codeblock.precinct;
+ var codeblockColumn = codeblock.cbx - precinct.cbxMin;
+ var codeblockRow = codeblock.cby - precinct.cbyMin;
+ var codeblockIncluded = false;
+ var firstTimeInclusion = false;
+ var valueReady;
+
+ if (codeblock['included'] !== undefined) {
+ codeblockIncluded = !!readBits(1);
+ } else {
+ precinct = codeblock.precinct;
+ var inclusionTree, zeroBitPlanesTree;
+
+ if (precinct['inclusionTree'] !== undefined) {
+ inclusionTree = precinct.inclusionTree;
+ } else {
+ var width = precinct.cbxMax - precinct.cbxMin + 1;
+ var height = precinct.cbyMax - precinct.cbyMin + 1;
+ inclusionTree = new InclusionTree(width, height, layerNumber);
+ zeroBitPlanesTree = new TagTree(width, height);
+ precinct.inclusionTree = inclusionTree;
+ precinct.zeroBitPlanesTree = zeroBitPlanesTree;
+ }
+
+ if (inclusionTree.reset(codeblockColumn, codeblockRow, layerNumber)) {
+ while (true) {
+ if (readBits(1)) {
+ valueReady = !inclusionTree.nextLevel();
+
+ if (valueReady) {
+ codeblock.included = true;
+ codeblockIncluded = firstTimeInclusion = true;
+ break;
+ }
+ } else {
+ inclusionTree.incrementValue(layerNumber);
+ break;
+ }
+ }
+ }
+ }
+
+ if (!codeblockIncluded) {
+ continue;
+ }
+
+ if (firstTimeInclusion) {
+ zeroBitPlanesTree = precinct.zeroBitPlanesTree;
+ zeroBitPlanesTree.reset(codeblockColumn, codeblockRow);
+
+ while (true) {
+ if (readBits(1)) {
+ valueReady = !zeroBitPlanesTree.nextLevel();
+
+ if (valueReady) {
+ break;
+ }
+ } else {
+ zeroBitPlanesTree.incrementValue();
+ }
+ }
+
+ codeblock.zeroBitPlanes = zeroBitPlanesTree.value;
+ }
+
+ var codingpasses = readCodingpasses();
+
+ while (readBits(1)) {
+ codeblock.Lblock++;
+ }
+
+ var codingpassesLog2 = (0, _util.log2)(codingpasses);
+ var bits = (codingpasses < 1 << codingpassesLog2 ? codingpassesLog2 - 1 : codingpassesLog2) + codeblock.Lblock;
+ var codedDataLength = readBits(bits);
+ queue.push({
+ codeblock: codeblock,
+ codingpasses: codingpasses,
+ dataLength: codedDataLength
+ });
+ }
+
+ alignToByte();
+
+ if (ephMarkerUsed) {
+ skipMarkerIfEqual(0x92);
+ }
+
+ while (queue.length > 0) {
+ var packetItem = queue.shift();
+ codeblock = packetItem.codeblock;
+
+ if (codeblock['data'] === undefined) {
+ codeblock.data = [];
+ }
+
+ codeblock.data.push({
+ data: data,
+ start: offset + position,
+ end: offset + position + packetItem.dataLength,
+ codingpasses: packetItem.codingpasses
+ });
+ position += packetItem.dataLength;
+ }
+ }
+
+ return position;
+ }
+
+ function copyCoefficients(coefficients, levelWidth, levelHeight, subband, delta, mb, reversible, segmentationSymbolUsed) {
+ var x0 = subband.tbx0;
+ var y0 = subband.tby0;
+ var width = subband.tbx1 - subband.tbx0;
+ var codeblocks = subband.codeblocks;
+ var right = subband.type.charAt(0) === 'H' ? 1 : 0;
+ var bottom = subband.type.charAt(1) === 'H' ? levelWidth : 0;
+
+ for (var i = 0, ii = codeblocks.length; i < ii; ++i) {
+ var codeblock = codeblocks[i];
+ var blockWidth = codeblock.tbx1_ - codeblock.tbx0_;
+ var blockHeight = codeblock.tby1_ - codeblock.tby0_;
+
+ if (blockWidth === 0 || blockHeight === 0) {
+ continue;
+ }
+
+ if (codeblock['data'] === undefined) {
+ continue;
+ }
+
+ var bitModel, currentCodingpassType;
+ bitModel = new BitModel(blockWidth, blockHeight, codeblock.subbandType, codeblock.zeroBitPlanes, mb);
+ currentCodingpassType = 2;
+ var data = codeblock.data,
+ totalLength = 0,
+ codingpasses = 0;
+ var j, jj, dataItem;
+
+ for (j = 0, jj = data.length; j < jj; j++) {
+ dataItem = data[j];
+ totalLength += dataItem.end - dataItem.start;
+ codingpasses += dataItem.codingpasses;
+ }
+
+ var encodedData = new Uint8Array(totalLength);
+ var position = 0;
+
+ for (j = 0, jj = data.length; j < jj; j++) {
+ dataItem = data[j];
+ var chunk = dataItem.data.subarray(dataItem.start, dataItem.end);
+ encodedData.set(chunk, position);
+ position += chunk.length;
+ }
+
+ var decoder = new _arithmetic_decoder.ArithmeticDecoder(encodedData, 0, totalLength);
+ bitModel.setDecoder(decoder);
+
+ for (j = 0; j < codingpasses; j++) {
+ switch (currentCodingpassType) {
+ case 0:
+ bitModel.runSignificancePropagationPass();
+ break;
+
+ case 1:
+ bitModel.runMagnitudeRefinementPass();
+ break;
+
+ case 2:
+ bitModel.runCleanupPass();
+
+ if (segmentationSymbolUsed) {
+ bitModel.checkSegmentationSymbol();
+ }
+
+ break;
+ }
+
+ currentCodingpassType = (currentCodingpassType + 1) % 3;
+ }
+
+ var offset = codeblock.tbx0_ - x0 + (codeblock.tby0_ - y0) * width;
+ var sign = bitModel.coefficentsSign;
+ var magnitude = bitModel.coefficentsMagnitude;
+ var bitsDecoded = bitModel.bitsDecoded;
+ var magnitudeCorrection = reversible ? 0 : 0.5;
+ var k, n, nb;
+ position = 0;
+ var interleave = subband.type !== 'LL';
+
+ for (j = 0; j < blockHeight; j++) {
+ var row = offset / width | 0;
+ var levelOffset = 2 * row * (levelWidth - width) + right + bottom;
+
+ for (k = 0; k < blockWidth; k++) {
+ n = magnitude[position];
+
+ if (n !== 0) {
+ n = (n + magnitudeCorrection) * delta;
+
+ if (sign[position] !== 0) {
+ n = -n;
+ }
+
+ nb = bitsDecoded[position];
+ var pos = interleave ? levelOffset + (offset << 1) : offset;
+
+ if (reversible && nb >= mb) {
+ coefficients[pos] = n;
+ } else {
+ coefficients[pos] = n * (1 << mb - nb);
+ }
+ }
+
+ offset++;
+ position++;
+ }
+
+ offset += width - blockWidth;
+ }
+ }
+ }
+
+ function transformTile(context, tile, c) {
+ var component = tile.components[c];
+ var codingStyleParameters = component.codingStyleParameters;
+ var quantizationParameters = component.quantizationParameters;
+ var decompositionLevelsCount = codingStyleParameters.decompositionLevelsCount;
+ var spqcds = quantizationParameters.SPqcds;
+ var scalarExpounded = quantizationParameters.scalarExpounded;
+ var guardBits = quantizationParameters.guardBits;
+ var segmentationSymbolUsed = codingStyleParameters.segmentationSymbolUsed;
+ var precision = context.components[c].precision;
+ var reversible = codingStyleParameters.reversibleTransformation;
+ var transform = reversible ? new ReversibleTransform() : new IrreversibleTransform();
+ var subbandCoefficients = [];
+ var b = 0;
+
+ for (var i = 0; i <= decompositionLevelsCount; i++) {
+ var resolution = component.resolutions[i];
+ var width = resolution.trx1 - resolution.trx0;
+ var height = resolution.try1 - resolution.try0;
+ var coefficients = new Float32Array(width * height);
+
+ for (var j = 0, jj = resolution.subbands.length; j < jj; j++) {
+ var mu, epsilon;
+
+ if (!scalarExpounded) {
+ mu = spqcds[0].mu;
+ epsilon = spqcds[0].epsilon + (i > 0 ? 1 - i : 0);
+ } else {
+ mu = spqcds[b].mu;
+ epsilon = spqcds[b].epsilon;
+ b++;
+ }
+
+ var subband = resolution.subbands[j];
+ var gainLog2 = SubbandsGainLog2[subband.type];
+ var delta = reversible ? 1 : Math.pow(2, precision + gainLog2 - epsilon) * (1 + mu / 2048);
+ var mb = guardBits + epsilon - 1;
+ copyCoefficients(coefficients, width, height, subband, delta, mb, reversible, segmentationSymbolUsed);
+ }
+
+ subbandCoefficients.push({
+ width: width,
+ height: height,
+ items: coefficients
+ });
+ }
+
+ var result = transform.calculate(subbandCoefficients, component.tcx0, component.tcy0);
+ return {
+ left: component.tcx0,
+ top: component.tcy0,
+ width: result.width,
+ height: result.height,
+ items: result.items
+ };
+ }
+
+ function transformComponents(context) {
+ var siz = context.SIZ;
+ var components = context.components;
+ var componentsCount = siz.Csiz;
+ var resultImages = [];
+
+ for (var i = 0, ii = context.tiles.length; i < ii; i++) {
+ var tile = context.tiles[i];
+ var transformedTiles = [];
+ var c;
+
+ for (c = 0; c < componentsCount; c++) {
+ transformedTiles[c] = transformTile(context, tile, c);
+ }
+
+ var tile0 = transformedTiles[0];
+ var out = new Uint8ClampedArray(tile0.items.length * componentsCount);
+ var result = {
+ left: tile0.left,
+ top: tile0.top,
+ width: tile0.width,
+ height: tile0.height,
+ items: out
+ };
+ var shift, offset;
+ var pos = 0,
+ j,
+ jj,
+ y0,
+ y1,
+ y2;
+
+ if (tile.codingStyleDefaultParameters.multipleComponentTransform) {
+ var fourComponents = componentsCount === 4;
+ var y0items = transformedTiles[0].items;
+ var y1items = transformedTiles[1].items;
+ var y2items = transformedTiles[2].items;
+ var y3items = fourComponents ? transformedTiles[3].items : null;
+ shift = components[0].precision - 8;
+ offset = (128 << shift) + 0.5;
+ var component0 = tile.components[0];
+ var alpha01 = componentsCount - 3;
+ jj = y0items.length;
+
+ if (!component0.codingStyleParameters.reversibleTransformation) {
+ for (j = 0; j < jj; j++, pos += alpha01) {
+ y0 = y0items[j] + offset;
+ y1 = y1items[j];
+ y2 = y2items[j];
+ out[pos++] = y0 + 1.402 * y2 >> shift;
+ out[pos++] = y0 - 0.34413 * y1 - 0.71414 * y2 >> shift;
+ out[pos++] = y0 + 1.772 * y1 >> shift;
+ }
+ } else {
+ for (j = 0; j < jj; j++, pos += alpha01) {
+ y0 = y0items[j] + offset;
+ y1 = y1items[j];
+ y2 = y2items[j];
+ var g = y0 - (y2 + y1 >> 2);
+ out[pos++] = g + y2 >> shift;
+ out[pos++] = g >> shift;
+ out[pos++] = g + y1 >> shift;
+ }
+ }
+
+ if (fourComponents) {
+ for (j = 0, pos = 3; j < jj; j++, pos += 4) {
+ out[pos] = y3items[j] + offset >> shift;
+ }
+ }
+ } else {
+ for (c = 0; c < componentsCount; c++) {
+ var items = transformedTiles[c].items;
+ shift = components[c].precision - 8;
+ offset = (128 << shift) + 0.5;
+
+ for (pos = c, j = 0, jj = items.length; j < jj; j++) {
+ out[pos] = items[j] + offset >> shift;
+ pos += componentsCount;
+ }
+ }
+ }
+
+ resultImages.push(result);
+ }
+
+ return resultImages;
+ }
+
+ function initializeTile(context, tileIndex) {
+ var siz = context.SIZ;
+ var componentsCount = siz.Csiz;
+ var tile = context.tiles[tileIndex];
+
+ for (var c = 0; c < componentsCount; c++) {
+ var component = tile.components[c];
+ var qcdOrQcc = context.currentTile.QCC[c] !== undefined ? context.currentTile.QCC[c] : context.currentTile.QCD;
+ component.quantizationParameters = qcdOrQcc;
+ var codOrCoc = context.currentTile.COC[c] !== undefined ? context.currentTile.COC[c] : context.currentTile.COD;
+ component.codingStyleParameters = codOrCoc;
+ }
+
+ tile.codingStyleDefaultParameters = context.currentTile.COD;
+ }
+
+ var TagTree = function TagTreeClosure() {
+ function TagTree(width, height) {
+ var levelsLength = (0, _util.log2)(Math.max(width, height)) + 1;
+ this.levels = [];
+
+ for (var i = 0; i < levelsLength; i++) {
+ var level = {
+ width: width,
+ height: height,
+ items: []
+ };
+ this.levels.push(level);
+ width = Math.ceil(width / 2);
+ height = Math.ceil(height / 2);
+ }
+ }
+
+ TagTree.prototype = {
+ reset: function TagTree_reset(i, j) {
+ var currentLevel = 0,
+ value = 0,
+ level;
+
+ while (currentLevel < this.levels.length) {
+ level = this.levels[currentLevel];
+ var index = i + j * level.width;
+
+ if (level.items[index] !== undefined) {
+ value = level.items[index];
+ break;
+ }
+
+ level.index = index;
+ i >>= 1;
+ j >>= 1;
+ currentLevel++;
+ }
+
+ currentLevel--;
+ level = this.levels[currentLevel];
+ level.items[level.index] = value;
+ this.currentLevel = currentLevel;
+ delete this.value;
+ },
+ incrementValue: function TagTree_incrementValue() {
+ var level = this.levels[this.currentLevel];
+ level.items[level.index]++;
+ },
+ nextLevel: function TagTree_nextLevel() {
+ var currentLevel = this.currentLevel;
+ var level = this.levels[currentLevel];
+ var value = level.items[level.index];
+ currentLevel--;
+
+ if (currentLevel < 0) {
+ this.value = value;
+ return false;
+ }
+
+ this.currentLevel = currentLevel;
+ level = this.levels[currentLevel];
+ level.items[level.index] = value;
+ return true;
+ }
+ };
+ return TagTree;
+ }();
+
+ var InclusionTree = function InclusionTreeClosure() {
+ function InclusionTree(width, height, defaultValue) {
+ var levelsLength = (0, _util.log2)(Math.max(width, height)) + 1;
+ this.levels = [];
+
+ for (var i = 0; i < levelsLength; i++) {
+ var items = new Uint8Array(width * height);
+
+ for (var j = 0, jj = items.length; j < jj; j++) {
+ items[j] = defaultValue;
+ }
+
+ var level = {
+ width: width,
+ height: height,
+ items: items
+ };
+ this.levels.push(level);
+ width = Math.ceil(width / 2);
+ height = Math.ceil(height / 2);
+ }
+ }
+
+ InclusionTree.prototype = {
+ reset: function InclusionTree_reset(i, j, stopValue) {
+ var currentLevel = 0;
+
+ while (currentLevel < this.levels.length) {
+ var level = this.levels[currentLevel];
+ var index = i + j * level.width;
+ level.index = index;
+ var value = level.items[index];
+
+ if (value === 0xFF) {
+ break;
+ }
+
+ if (value > stopValue) {
+ this.currentLevel = currentLevel;
+ this.propagateValues();
+ return false;
+ }
+
+ i >>= 1;
+ j >>= 1;
+ currentLevel++;
+ }
+
+ this.currentLevel = currentLevel - 1;
+ return true;
+ },
+ incrementValue: function InclusionTree_incrementValue(stopValue) {
+ var level = this.levels[this.currentLevel];
+ level.items[level.index] = stopValue + 1;
+ this.propagateValues();
+ },
+ propagateValues: function InclusionTree_propagateValues() {
+ var levelIndex = this.currentLevel;
+ var level = this.levels[levelIndex];
+ var currentValue = level.items[level.index];
+
+ while (--levelIndex >= 0) {
+ level = this.levels[levelIndex];
+ level.items[level.index] = currentValue;
+ }
+ },
+ nextLevel: function InclusionTree_nextLevel() {
+ var currentLevel = this.currentLevel;
+ var level = this.levels[currentLevel];
+ var value = level.items[level.index];
+ level.items[level.index] = 0xFF;
+ currentLevel--;
+
+ if (currentLevel < 0) {
+ return false;
+ }
+
+ this.currentLevel = currentLevel;
+ level = this.levels[currentLevel];
+ level.items[level.index] = value;
+ return true;
+ }
+ };
+ return InclusionTree;
+ }();
+
+ var BitModel = function BitModelClosure() {
+ var UNIFORM_CONTEXT = 17;
+ var RUNLENGTH_CONTEXT = 18;
+ var LLAndLHContextsLabel = new Uint8Array([0, 5, 8, 0, 3, 7, 8, 0, 4, 7, 8, 0, 0, 0, 0, 0, 1, 6, 8, 0, 3, 7, 8, 0, 4, 7, 8, 0, 0, 0, 0, 0, 2, 6, 8, 0, 3, 7, 8, 0, 4, 7, 8, 0, 0, 0, 0, 0, 2, 6, 8, 0, 3, 7, 8, 0, 4, 7, 8, 0, 0, 0, 0, 0, 2, 6, 8, 0, 3, 7, 8, 0, 4, 7, 8]);
+ var HLContextLabel = new Uint8Array([0, 3, 4, 0, 5, 7, 7, 0, 8, 8, 8, 0, 0, 0, 0, 0, 1, 3, 4, 0, 6, 7, 7, 0, 8, 8, 8, 0, 0, 0, 0, 0, 2, 3, 4, 0, 6, 7, 7, 0, 8, 8, 8, 0, 0, 0, 0, 0, 2, 3, 4, 0, 6, 7, 7, 0, 8, 8, 8, 0, 0, 0, 0, 0, 2, 3, 4, 0, 6, 7, 7, 0, 8, 8, 8]);
+ var HHContextLabel = new Uint8Array([0, 1, 2, 0, 1, 2, 2, 0, 2, 2, 2, 0, 0, 0, 0, 0, 3, 4, 5, 0, 4, 5, 5, 0, 5, 5, 5, 0, 0, 0, 0, 0, 6, 7, 7, 0, 7, 7, 7, 0, 7, 7, 7, 0, 0, 0, 0, 0, 8, 8, 8, 0, 8, 8, 8, 0, 8, 8, 8, 0, 0, 0, 0, 0, 8, 8, 8, 0, 8, 8, 8, 0, 8, 8, 8]);
+
+ function BitModel(width, height, subband, zeroBitPlanes, mb) {
+ this.width = width;
+ this.height = height;
+ this.contextLabelTable = subband === 'HH' ? HHContextLabel : subband === 'HL' ? HLContextLabel : LLAndLHContextsLabel;
+ var coefficientCount = width * height;
+ this.neighborsSignificance = new Uint8Array(coefficientCount);
+ this.coefficentsSign = new Uint8Array(coefficientCount);
+ this.coefficentsMagnitude = mb > 14 ? new Uint32Array(coefficientCount) : mb > 6 ? new Uint16Array(coefficientCount) : new Uint8Array(coefficientCount);
+ this.processingFlags = new Uint8Array(coefficientCount);
+ var bitsDecoded = new Uint8Array(coefficientCount);
+
+ if (zeroBitPlanes !== 0) {
+ for (var i = 0; i < coefficientCount; i++) {
+ bitsDecoded[i] = zeroBitPlanes;
+ }
+ }
+
+ this.bitsDecoded = bitsDecoded;
+ this.reset();
+ }
+
+ BitModel.prototype = {
+ setDecoder: function BitModel_setDecoder(decoder) {
+ this.decoder = decoder;
+ },
+ reset: function BitModel_reset() {
+ this.contexts = new Int8Array(19);
+ this.contexts[0] = 4 << 1 | 0;
+ this.contexts[UNIFORM_CONTEXT] = 46 << 1 | 0;
+ this.contexts[RUNLENGTH_CONTEXT] = 3 << 1 | 0;
+ },
+ setNeighborsSignificance: function BitModel_setNeighborsSignificance(row, column, index) {
+ var neighborsSignificance = this.neighborsSignificance;
+ var width = this.width,
+ height = this.height;
+ var left = column > 0;
+ var right = column + 1 < width;
+ var i;
+
+ if (row > 0) {
+ i = index - width;
+
+ if (left) {
+ neighborsSignificance[i - 1] += 0x10;
+ }
+
+ if (right) {
+ neighborsSignificance[i + 1] += 0x10;
+ }
+
+ neighborsSignificance[i] += 0x04;
+ }
+
+ if (row + 1 < height) {
+ i = index + width;
+
+ if (left) {
+ neighborsSignificance[i - 1] += 0x10;
+ }
+
+ if (right) {
+ neighborsSignificance[i + 1] += 0x10;
+ }
+
+ neighborsSignificance[i] += 0x04;
+ }
+
+ if (left) {
+ neighborsSignificance[index - 1] += 0x01;
+ }
+
+ if (right) {
+ neighborsSignificance[index + 1] += 0x01;
+ }
+
+ neighborsSignificance[index] |= 0x80;
+ },
+ runSignificancePropagationPass: function BitModel_runSignificancePropagationPass() {
+ var decoder = this.decoder;
+ var width = this.width,
+ height = this.height;
+ var coefficentsMagnitude = this.coefficentsMagnitude;
+ var coefficentsSign = this.coefficentsSign;
+ var neighborsSignificance = this.neighborsSignificance;
+ var processingFlags = this.processingFlags;
+ var contexts = this.contexts;
+ var labels = this.contextLabelTable;
+ var bitsDecoded = this.bitsDecoded;
+ var processedInverseMask = ~1;
+ var processedMask = 1;
+ var firstMagnitudeBitMask = 2;
+
+ for (var i0 = 0; i0 < height; i0 += 4) {
+ for (var j = 0; j < width; j++) {
+ var index = i0 * width + j;
+
+ for (var i1 = 0; i1 < 4; i1++, index += width) {
+ var i = i0 + i1;
+
+ if (i >= height) {
+ break;
+ }
+
+ processingFlags[index] &= processedInverseMask;
+
+ if (coefficentsMagnitude[index] || !neighborsSignificance[index]) {
+ continue;
+ }
+
+ var contextLabel = labels[neighborsSignificance[index]];
+ var decision = decoder.readBit(contexts, contextLabel);
+
+ if (decision) {
+ var sign = this.decodeSignBit(i, j, index);
+ coefficentsSign[index] = sign;
+ coefficentsMagnitude[index] = 1;
+ this.setNeighborsSignificance(i, j, index);
+ processingFlags[index] |= firstMagnitudeBitMask;
+ }
+
+ bitsDecoded[index]++;
+ processingFlags[index] |= processedMask;
+ }
+ }
+ }
+ },
+ decodeSignBit: function BitModel_decodeSignBit(row, column, index) {
+ var width = this.width,
+ height = this.height;
+ var coefficentsMagnitude = this.coefficentsMagnitude;
+ var coefficentsSign = this.coefficentsSign;
+ var contribution, sign0, sign1, significance1;
+ var contextLabel, decoded;
+ significance1 = column > 0 && coefficentsMagnitude[index - 1] !== 0;
+
+ if (column + 1 < width && coefficentsMagnitude[index + 1] !== 0) {
+ sign1 = coefficentsSign[index + 1];
+
+ if (significance1) {
+ sign0 = coefficentsSign[index - 1];
+ contribution = 1 - sign1 - sign0;
+ } else {
+ contribution = 1 - sign1 - sign1;
+ }
+ } else if (significance1) {
+ sign0 = coefficentsSign[index - 1];
+ contribution = 1 - sign0 - sign0;
+ } else {
+ contribution = 0;
+ }
+
+ var horizontalContribution = 3 * contribution;
+ significance1 = row > 0 && coefficentsMagnitude[index - width] !== 0;
+
+ if (row + 1 < height && coefficentsMagnitude[index + width] !== 0) {
+ sign1 = coefficentsSign[index + width];
+
+ if (significance1) {
+ sign0 = coefficentsSign[index - width];
+ contribution = 1 - sign1 - sign0 + horizontalContribution;
+ } else {
+ contribution = 1 - sign1 - sign1 + horizontalContribution;
+ }
+ } else if (significance1) {
+ sign0 = coefficentsSign[index - width];
+ contribution = 1 - sign0 - sign0 + horizontalContribution;
+ } else {
+ contribution = horizontalContribution;
+ }
+
+ if (contribution >= 0) {
+ contextLabel = 9 + contribution;
+ decoded = this.decoder.readBit(this.contexts, contextLabel);
+ } else {
+ contextLabel = 9 - contribution;
+ decoded = this.decoder.readBit(this.contexts, contextLabel) ^ 1;
+ }
+
+ return decoded;
+ },
+ runMagnitudeRefinementPass: function BitModel_runMagnitudeRefinementPass() {
+ var decoder = this.decoder;
+ var width = this.width,
+ height = this.height;
+ var coefficentsMagnitude = this.coefficentsMagnitude;
+ var neighborsSignificance = this.neighborsSignificance;
+ var contexts = this.contexts;
+ var bitsDecoded = this.bitsDecoded;
+ var processingFlags = this.processingFlags;
+ var processedMask = 1;
+ var firstMagnitudeBitMask = 2;
+ var length = width * height;
+ var width4 = width * 4;
+
+ for (var index0 = 0, indexNext; index0 < length; index0 = indexNext) {
+ indexNext = Math.min(length, index0 + width4);
+
+ for (var j = 0; j < width; j++) {
+ for (var index = index0 + j; index < indexNext; index += width) {
+ if (!coefficentsMagnitude[index] || (processingFlags[index] & processedMask) !== 0) {
+ continue;
+ }
+
+ var contextLabel = 16;
+
+ if ((processingFlags[index] & firstMagnitudeBitMask) !== 0) {
+ processingFlags[index] ^= firstMagnitudeBitMask;
+ var significance = neighborsSignificance[index] & 127;
+ contextLabel = significance === 0 ? 15 : 14;
+ }
+
+ var bit = decoder.readBit(contexts, contextLabel);
+ coefficentsMagnitude[index] = coefficentsMagnitude[index] << 1 | bit;
+ bitsDecoded[index]++;
+ processingFlags[index] |= processedMask;
+ }
+ }
+ }
+ },
+ runCleanupPass: function BitModel_runCleanupPass() {
+ var decoder = this.decoder;
+ var width = this.width,
+ height = this.height;
+ var neighborsSignificance = this.neighborsSignificance;
+ var coefficentsMagnitude = this.coefficentsMagnitude;
+ var coefficentsSign = this.coefficentsSign;
+ var contexts = this.contexts;
+ var labels = this.contextLabelTable;
+ var bitsDecoded = this.bitsDecoded;
+ var processingFlags = this.processingFlags;
+ var processedMask = 1;
+ var firstMagnitudeBitMask = 2;
+ var oneRowDown = width;
+ var twoRowsDown = width * 2;
+ var threeRowsDown = width * 3;
+ var iNext;
+
+ for (var i0 = 0; i0 < height; i0 = iNext) {
+ iNext = Math.min(i0 + 4, height);
+ var indexBase = i0 * width;
+ var checkAllEmpty = i0 + 3 < height;
+
+ for (var j = 0; j < width; j++) {
+ var index0 = indexBase + j;
+ var allEmpty = checkAllEmpty && processingFlags[index0] === 0 && processingFlags[index0 + oneRowDown] === 0 && processingFlags[index0 + twoRowsDown] === 0 && processingFlags[index0 + threeRowsDown] === 0 && neighborsSignificance[index0] === 0 && neighborsSignificance[index0 + oneRowDown] === 0 && neighborsSignificance[index0 + twoRowsDown] === 0 && neighborsSignificance[index0 + threeRowsDown] === 0;
+ var i1 = 0,
+ index = index0;
+ var i = i0,
+ sign;
+
+ if (allEmpty) {
+ var hasSignificantCoefficent = decoder.readBit(contexts, RUNLENGTH_CONTEXT);
+
+ if (!hasSignificantCoefficent) {
+ bitsDecoded[index0]++;
+ bitsDecoded[index0 + oneRowDown]++;
+ bitsDecoded[index0 + twoRowsDown]++;
+ bitsDecoded[index0 + threeRowsDown]++;
+ continue;
+ }
+
+ i1 = decoder.readBit(contexts, UNIFORM_CONTEXT) << 1 | decoder.readBit(contexts, UNIFORM_CONTEXT);
+
+ if (i1 !== 0) {
+ i = i0 + i1;
+ index += i1 * width;
+ }
+
+ sign = this.decodeSignBit(i, j, index);
+ coefficentsSign[index] = sign;
+ coefficentsMagnitude[index] = 1;
+ this.setNeighborsSignificance(i, j, index);
+ processingFlags[index] |= firstMagnitudeBitMask;
+ index = index0;
+
+ for (var i2 = i0; i2 <= i; i2++, index += width) {
+ bitsDecoded[index]++;
+ }
+
+ i1++;
+ }
+
+ for (i = i0 + i1; i < iNext; i++, index += width) {
+ if (coefficentsMagnitude[index] || (processingFlags[index] & processedMask) !== 0) {
+ continue;
+ }
+
+ var contextLabel = labels[neighborsSignificance[index]];
+ var decision = decoder.readBit(contexts, contextLabel);
+
+ if (decision === 1) {
+ sign = this.decodeSignBit(i, j, index);
+ coefficentsSign[index] = sign;
+ coefficentsMagnitude[index] = 1;
+ this.setNeighborsSignificance(i, j, index);
+ processingFlags[index] |= firstMagnitudeBitMask;
+ }
+
+ bitsDecoded[index]++;
+ }
+ }
+ }
+ },
+ checkSegmentationSymbol: function BitModel_checkSegmentationSymbol() {
+ var decoder = this.decoder;
+ var contexts = this.contexts;
+ var symbol = decoder.readBit(contexts, UNIFORM_CONTEXT) << 3 | decoder.readBit(contexts, UNIFORM_CONTEXT) << 2 | decoder.readBit(contexts, UNIFORM_CONTEXT) << 1 | decoder.readBit(contexts, UNIFORM_CONTEXT);
+
+ if (symbol !== 0xA) {
+ throw new JpxError('Invalid segmentation symbol');
+ }
+ }
+ };
+ return BitModel;
+ }();
+
+ var Transform = function TransformClosure() {
+ function Transform() {}
+
+ Transform.prototype.calculate = function transformCalculate(subbands, u0, v0) {
+ var ll = subbands[0];
+
+ for (var i = 1, ii = subbands.length; i < ii; i++) {
+ ll = this.iterate(ll, subbands[i], u0, v0);
+ }
+
+ return ll;
+ };
+
+ Transform.prototype.extend = function extend(buffer, offset, size) {
+ var i1 = offset - 1,
+ j1 = offset + 1;
+ var i2 = offset + size - 2,
+ j2 = offset + size;
+ buffer[i1--] = buffer[j1++];
+ buffer[j2++] = buffer[i2--];
+ buffer[i1--] = buffer[j1++];
+ buffer[j2++] = buffer[i2--];
+ buffer[i1--] = buffer[j1++];
+ buffer[j2++] = buffer[i2--];
+ buffer[i1] = buffer[j1];
+ buffer[j2] = buffer[i2];
+ };
+
+ Transform.prototype.iterate = function Transform_iterate(ll, hl_lh_hh, u0, v0) {
+ var llWidth = ll.width,
+ llHeight = ll.height,
+ llItems = ll.items;
+ var width = hl_lh_hh.width;
+ var height = hl_lh_hh.height;
+ var items = hl_lh_hh.items;
+ var i, j, k, l, u, v;
+
+ for (k = 0, i = 0; i < llHeight; i++) {
+ l = i * 2 * width;
+
+ for (j = 0; j < llWidth; j++, k++, l += 2) {
+ items[l] = llItems[k];
+ }
+ }
+
+ llItems = ll.items = null;
+ var bufferPadding = 4;
+ var rowBuffer = new Float32Array(width + 2 * bufferPadding);
+
+ if (width === 1) {
+ if ((u0 & 1) !== 0) {
+ for (v = 0, k = 0; v < height; v++, k += width) {
+ items[k] *= 0.5;
+ }
+ }
+ } else {
+ for (v = 0, k = 0; v < height; v++, k += width) {
+ rowBuffer.set(items.subarray(k, k + width), bufferPadding);
+ this.extend(rowBuffer, bufferPadding, width);
+ this.filter(rowBuffer, bufferPadding, width);
+ items.set(rowBuffer.subarray(bufferPadding, bufferPadding + width), k);
+ }
+ }
+
+ var numBuffers = 16;
+ var colBuffers = [];
+
+ for (i = 0; i < numBuffers; i++) {
+ colBuffers.push(new Float32Array(height + 2 * bufferPadding));
+ }
+
+ var b,
+ currentBuffer = 0;
+ ll = bufferPadding + height;
+
+ if (height === 1) {
+ if ((v0 & 1) !== 0) {
+ for (u = 0; u < width; u++) {
+ items[u] *= 0.5;
+ }
+ }
+ } else {
+ for (u = 0; u < width; u++) {
+ if (currentBuffer === 0) {
+ numBuffers = Math.min(width - u, numBuffers);
+
+ for (k = u, l = bufferPadding; l < ll; k += width, l++) {
+ for (b = 0; b < numBuffers; b++) {
+ colBuffers[b][l] = items[k + b];
+ }
+ }
+
+ currentBuffer = numBuffers;
+ }
+
+ currentBuffer--;
+ var buffer = colBuffers[currentBuffer];
+ this.extend(buffer, bufferPadding, height);
+ this.filter(buffer, bufferPadding, height);
+
+ if (currentBuffer === 0) {
+ k = u - numBuffers + 1;
+
+ for (l = bufferPadding; l < ll; k += width, l++) {
+ for (b = 0; b < numBuffers; b++) {
+ items[k + b] = colBuffers[b][l];
+ }
+ }
+ }
+ }
+ }
+
+ return {
+ width: width,
+ height: height,
+ items: items
+ };
+ };
+
+ return Transform;
+ }();
+
+ var IrreversibleTransform = function IrreversibleTransformClosure() {
+ function IrreversibleTransform() {
+ Transform.call(this);
+ }
+
+ IrreversibleTransform.prototype = Object.create(Transform.prototype);
+
+ IrreversibleTransform.prototype.filter = function irreversibleTransformFilter(x, offset, length) {
+ var len = length >> 1;
+ offset = offset | 0;
+ var j, n, current, next;
+ var alpha = -1.586134342059924;
+ var beta = -0.052980118572961;
+ var gamma = 0.882911075530934;
+ var delta = 0.443506852043971;
+ var K = 1.230174104914001;
+ var K_ = 1 / K;
+ j = offset - 3;
+
+ for (n = len + 4; n--; j += 2) {
+ x[j] *= K_;
+ }
+
+ j = offset - 2;
+ current = delta * x[j - 1];
+
+ for (n = len + 3; n--; j += 2) {
+ next = delta * x[j + 1];
+ x[j] = K * x[j] - current - next;
+
+ if (n--) {
+ j += 2;
+ current = delta * x[j + 1];
+ x[j] = K * x[j] - current - next;
+ } else {
+ break;
+ }
+ }
+
+ j = offset - 1;
+ current = gamma * x[j - 1];
+
+ for (n = len + 2; n--; j += 2) {
+ next = gamma * x[j + 1];
+ x[j] -= current + next;
+
+ if (n--) {
+ j += 2;
+ current = gamma * x[j + 1];
+ x[j] -= current + next;
+ } else {
+ break;
+ }
+ }
+
+ j = offset;
+ current = beta * x[j - 1];
+
+ for (n = len + 1; n--; j += 2) {
+ next = beta * x[j + 1];
+ x[j] -= current + next;
+
+ if (n--) {
+ j += 2;
+ current = beta * x[j + 1];
+ x[j] -= current + next;
+ } else {
+ break;
+ }
+ }
+
+ if (len !== 0) {
+ j = offset + 1;
+ current = alpha * x[j - 1];
+
+ for (n = len; n--; j += 2) {
+ next = alpha * x[j + 1];
+ x[j] -= current + next;
+
+ if (n--) {
+ j += 2;
+ current = alpha * x[j + 1];
+ x[j] -= current + next;
+ } else {
+ break;
+ }
+ }
+ }
+ };
+
+ return IrreversibleTransform;
+ }();
+
+ var ReversibleTransform = function ReversibleTransformClosure() {
+ function ReversibleTransform() {
+ Transform.call(this);
+ }
+
+ ReversibleTransform.prototype = Object.create(Transform.prototype);
+
+ ReversibleTransform.prototype.filter = function reversibleTransformFilter(x, offset, length) {
+ var len = length >> 1;
+ offset = offset | 0;
+ var j, n;
+
+ for (j = offset, n = len + 1; n--; j += 2) {
+ x[j] -= x[j - 1] + x[j + 1] + 2 >> 2;
+ }
+
+ for (j = offset + 1, n = len; n--; j += 2) {
+ x[j] += x[j - 1] + x[j + 1] >> 1;
+ }
+ };
+
+ return ReversibleTransform;
+ }();
+
+ return JpxImage;
+}();
+
+exports.JpxImage = JpxImage;
+
+/***/ }),
+/* 168 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.calculateSHA512 = exports.calculateSHA384 = exports.calculateSHA256 = exports.calculateMD5 = exports.PDF20 = exports.PDF17 = exports.CipherTransformFactory = exports.ARCFourCipher = exports.AES256Cipher = exports.AES128Cipher = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _stream = __w_pdfjs_require__(158);
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var ARCFourCipher = function ARCFourCipherClosure() {
+ function ARCFourCipher(key) {
+ this.a = 0;
+ this.b = 0;
+ var s = new Uint8Array(256);
+ var i,
+ j = 0,
+ tmp,
+ keyLength = key.length;
+
+ for (i = 0; i < 256; ++i) {
+ s[i] = i;
+ }
+
+ for (i = 0; i < 256; ++i) {
+ tmp = s[i];
+ j = j + tmp + key[i % keyLength] & 0xFF;
+ s[i] = s[j];
+ s[j] = tmp;
+ }
+
+ this.s = s;
+ }
+
+ ARCFourCipher.prototype = {
+ encryptBlock: function ARCFourCipher_encryptBlock(data) {
+ var i,
+ n = data.length,
+ tmp,
+ tmp2;
+ var a = this.a,
+ b = this.b,
+ s = this.s;
+ var output = new Uint8Array(n);
+
+ for (i = 0; i < n; ++i) {
+ a = a + 1 & 0xFF;
+ tmp = s[a];
+ b = b + tmp & 0xFF;
+ tmp2 = s[b];
+ s[a] = tmp2;
+ s[b] = tmp;
+ output[i] = data[i] ^ s[tmp + tmp2 & 0xFF];
+ }
+
+ this.a = a;
+ this.b = b;
+ return output;
+ }
+ };
+ ARCFourCipher.prototype.decryptBlock = ARCFourCipher.prototype.encryptBlock;
+ return ARCFourCipher;
+}();
+
+exports.ARCFourCipher = ARCFourCipher;
+
+var calculateMD5 = function calculateMD5Closure() {
+ var r = new Uint8Array([7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21]);
+ var k = new Int32Array([-680876936, -389564586, 606105819, -1044525330, -176418897, 1200080426, -1473231341, -45705983, 1770035416, -1958414417, -42063, -1990404162, 1804603682, -40341101, -1502002290, 1236535329, -165796510, -1069501632, 643717713, -373897302, -701558691, 38016083, -660478335, -405537848, 568446438, -1019803690, -187363961, 1163531501, -1444681467, -51403784, 1735328473, -1926607734, -378558, -2022574463, 1839030562, -35309556, -1530992060, 1272893353, -155497632, -1094730640, 681279174, -358537222, -722521979, 76029189, -640364487, -421815835, 530742520, -995338651, -198630844, 1126891415, -1416354905, -57434055, 1700485571, -1894986606, -1051523, -2054922799, 1873313359, -30611744, -1560198380, 1309151649, -145523070, -1120210379, 718787259, -343485551]);
+
+ function hash(data, offset, length) {
+ var h0 = 1732584193,
+ h1 = -271733879,
+ h2 = -1732584194,
+ h3 = 271733878;
+ var paddedLength = length + 72 & ~63;
+ var padded = new Uint8Array(paddedLength);
+ var i, j, n;
+
+ for (i = 0; i < length; ++i) {
+ padded[i] = data[offset++];
+ }
+
+ padded[i++] = 0x80;
+ n = paddedLength - 8;
+
+ while (i < n) {
+ padded[i++] = 0;
+ }
+
+ padded[i++] = length << 3 & 0xFF;
+ padded[i++] = length >> 5 & 0xFF;
+ padded[i++] = length >> 13 & 0xFF;
+ padded[i++] = length >> 21 & 0xFF;
+ padded[i++] = length >>> 29 & 0xFF;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ var w = new Int32Array(16);
+
+ for (i = 0; i < paddedLength;) {
+ for (j = 0; j < 16; ++j, i += 4) {
+ w[j] = padded[i] | padded[i + 1] << 8 | padded[i + 2] << 16 | padded[i + 3] << 24;
+ }
+
+ var a = h0,
+ b = h1,
+ c = h2,
+ d = h3,
+ f,
+ g;
+
+ for (j = 0; j < 64; ++j) {
+ if (j < 16) {
+ f = b & c | ~b & d;
+ g = j;
+ } else if (j < 32) {
+ f = d & b | ~d & c;
+ g = 5 * j + 1 & 15;
+ } else if (j < 48) {
+ f = b ^ c ^ d;
+ g = 3 * j + 5 & 15;
+ } else {
+ f = c ^ (b | ~d);
+ g = 7 * j & 15;
+ }
+
+ var tmp = d,
+ rotateArg = a + f + k[j] + w[g] | 0,
+ rotate = r[j];
+ d = c;
+ c = b;
+ b = b + (rotateArg << rotate | rotateArg >>> 32 - rotate) | 0;
+ a = tmp;
+ }
+
+ h0 = h0 + a | 0;
+ h1 = h1 + b | 0;
+ h2 = h2 + c | 0;
+ h3 = h3 + d | 0;
+ }
+
+ return new Uint8Array([h0 & 0xFF, h0 >> 8 & 0xFF, h0 >> 16 & 0xFF, h0 >>> 24 & 0xFF, h1 & 0xFF, h1 >> 8 & 0xFF, h1 >> 16 & 0xFF, h1 >>> 24 & 0xFF, h2 & 0xFF, h2 >> 8 & 0xFF, h2 >> 16 & 0xFF, h2 >>> 24 & 0xFF, h3 & 0xFF, h3 >> 8 & 0xFF, h3 >> 16 & 0xFF, h3 >>> 24 & 0xFF]);
+ }
+
+ return hash;
+}();
+
+exports.calculateMD5 = calculateMD5;
+
+var Word64 = function Word64Closure() {
+ function Word64(highInteger, lowInteger) {
+ this.high = highInteger | 0;
+ this.low = lowInteger | 0;
+ }
+
+ Word64.prototype = {
+ and: function Word64_and(word) {
+ this.high &= word.high;
+ this.low &= word.low;
+ },
+ xor: function Word64_xor(word) {
+ this.high ^= word.high;
+ this.low ^= word.low;
+ },
+ or: function Word64_or(word) {
+ this.high |= word.high;
+ this.low |= word.low;
+ },
+ shiftRight: function Word64_shiftRight(places) {
+ if (places >= 32) {
+ this.low = this.high >>> places - 32 | 0;
+ this.high = 0;
+ } else {
+ this.low = this.low >>> places | this.high << 32 - places;
+ this.high = this.high >>> places | 0;
+ }
+ },
+ shiftLeft: function Word64_shiftLeft(places) {
+ if (places >= 32) {
+ this.high = this.low << places - 32;
+ this.low = 0;
+ } else {
+ this.high = this.high << places | this.low >>> 32 - places;
+ this.low = this.low << places;
+ }
+ },
+ rotateRight: function Word64_rotateRight(places) {
+ var low, high;
+
+ if (places & 32) {
+ high = this.low;
+ low = this.high;
+ } else {
+ low = this.low;
+ high = this.high;
+ }
+
+ places &= 31;
+ this.low = low >>> places | high << 32 - places;
+ this.high = high >>> places | low << 32 - places;
+ },
+ not: function Word64_not() {
+ this.high = ~this.high;
+ this.low = ~this.low;
+ },
+ add: function Word64_add(word) {
+ var lowAdd = (this.low >>> 0) + (word.low >>> 0);
+ var highAdd = (this.high >>> 0) + (word.high >>> 0);
+
+ if (lowAdd > 0xFFFFFFFF) {
+ highAdd += 1;
+ }
+
+ this.low = lowAdd | 0;
+ this.high = highAdd | 0;
+ },
+ copyTo: function Word64_copyTo(bytes, offset) {
+ bytes[offset] = this.high >>> 24 & 0xFF;
+ bytes[offset + 1] = this.high >> 16 & 0xFF;
+ bytes[offset + 2] = this.high >> 8 & 0xFF;
+ bytes[offset + 3] = this.high & 0xFF;
+ bytes[offset + 4] = this.low >>> 24 & 0xFF;
+ bytes[offset + 5] = this.low >> 16 & 0xFF;
+ bytes[offset + 6] = this.low >> 8 & 0xFF;
+ bytes[offset + 7] = this.low & 0xFF;
+ },
+ assign: function Word64_assign(word) {
+ this.high = word.high;
+ this.low = word.low;
+ }
+ };
+ return Word64;
+}();
+
+var calculateSHA256 = function calculateSHA256Closure() {
+ function rotr(x, n) {
+ return x >>> n | x << 32 - n;
+ }
+
+ function ch(x, y, z) {
+ return x & y ^ ~x & z;
+ }
+
+ function maj(x, y, z) {
+ return x & y ^ x & z ^ y & z;
+ }
+
+ function sigma(x) {
+ return rotr(x, 2) ^ rotr(x, 13) ^ rotr(x, 22);
+ }
+
+ function sigmaPrime(x) {
+ return rotr(x, 6) ^ rotr(x, 11) ^ rotr(x, 25);
+ }
+
+ function littleSigma(x) {
+ return rotr(x, 7) ^ rotr(x, 18) ^ x >>> 3;
+ }
+
+ function littleSigmaPrime(x) {
+ return rotr(x, 17) ^ rotr(x, 19) ^ x >>> 10;
+ }
+
+ var k = [0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2];
+
+ function hash(data, offset, length) {
+ var h0 = 0x6a09e667,
+ h1 = 0xbb67ae85,
+ h2 = 0x3c6ef372,
+ h3 = 0xa54ff53a,
+ h4 = 0x510e527f,
+ h5 = 0x9b05688c,
+ h6 = 0x1f83d9ab,
+ h7 = 0x5be0cd19;
+ var paddedLength = Math.ceil((length + 9) / 64) * 64;
+ var padded = new Uint8Array(paddedLength);
+ var i, j, n;
+
+ for (i = 0; i < length; ++i) {
+ padded[i] = data[offset++];
+ }
+
+ padded[i++] = 0x80;
+ n = paddedLength - 8;
+
+ while (i < n) {
+ padded[i++] = 0;
+ }
+
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = length >>> 29 & 0xFF;
+ padded[i++] = length >> 21 & 0xFF;
+ padded[i++] = length >> 13 & 0xFF;
+ padded[i++] = length >> 5 & 0xFF;
+ padded[i++] = length << 3 & 0xFF;
+ var w = new Uint32Array(64);
+
+ for (i = 0; i < paddedLength;) {
+ for (j = 0; j < 16; ++j) {
+ w[j] = padded[i] << 24 | padded[i + 1] << 16 | padded[i + 2] << 8 | padded[i + 3];
+ i += 4;
+ }
+
+ for (j = 16; j < 64; ++j) {
+ w[j] = littleSigmaPrime(w[j - 2]) + w[j - 7] + littleSigma(w[j - 15]) + w[j - 16] | 0;
+ }
+
+ var a = h0,
+ b = h1,
+ c = h2,
+ d = h3,
+ e = h4,
+ f = h5,
+ g = h6,
+ h = h7,
+ t1,
+ t2;
+
+ for (j = 0; j < 64; ++j) {
+ t1 = h + sigmaPrime(e) + ch(e, f, g) + k[j] + w[j];
+ t2 = sigma(a) + maj(a, b, c);
+ h = g;
+ g = f;
+ f = e;
+ e = d + t1 | 0;
+ d = c;
+ c = b;
+ b = a;
+ a = t1 + t2 | 0;
+ }
+
+ h0 = h0 + a | 0;
+ h1 = h1 + b | 0;
+ h2 = h2 + c | 0;
+ h3 = h3 + d | 0;
+ h4 = h4 + e | 0;
+ h5 = h5 + f | 0;
+ h6 = h6 + g | 0;
+ h7 = h7 + h | 0;
+ }
+
+ return new Uint8Array([h0 >> 24 & 0xFF, h0 >> 16 & 0xFF, h0 >> 8 & 0xFF, h0 & 0xFF, h1 >> 24 & 0xFF, h1 >> 16 & 0xFF, h1 >> 8 & 0xFF, h1 & 0xFF, h2 >> 24 & 0xFF, h2 >> 16 & 0xFF, h2 >> 8 & 0xFF, h2 & 0xFF, h3 >> 24 & 0xFF, h3 >> 16 & 0xFF, h3 >> 8 & 0xFF, h3 & 0xFF, h4 >> 24 & 0xFF, h4 >> 16 & 0xFF, h4 >> 8 & 0xFF, h4 & 0xFF, h5 >> 24 & 0xFF, h5 >> 16 & 0xFF, h5 >> 8 & 0xFF, h5 & 0xFF, h6 >> 24 & 0xFF, h6 >> 16 & 0xFF, h6 >> 8 & 0xFF, h6 & 0xFF, h7 >> 24 & 0xFF, h7 >> 16 & 0xFF, h7 >> 8 & 0xFF, h7 & 0xFF]);
+ }
+
+ return hash;
+}();
+
+exports.calculateSHA256 = calculateSHA256;
+
+var calculateSHA512 = function calculateSHA512Closure() {
+ function ch(result, x, y, z, tmp) {
+ result.assign(x);
+ result.and(y);
+ tmp.assign(x);
+ tmp.not();
+ tmp.and(z);
+ result.xor(tmp);
+ }
+
+ function maj(result, x, y, z, tmp) {
+ result.assign(x);
+ result.and(y);
+ tmp.assign(x);
+ tmp.and(z);
+ result.xor(tmp);
+ tmp.assign(y);
+ tmp.and(z);
+ result.xor(tmp);
+ }
+
+ function sigma(result, x, tmp) {
+ result.assign(x);
+ result.rotateRight(28);
+ tmp.assign(x);
+ tmp.rotateRight(34);
+ result.xor(tmp);
+ tmp.assign(x);
+ tmp.rotateRight(39);
+ result.xor(tmp);
+ }
+
+ function sigmaPrime(result, x, tmp) {
+ result.assign(x);
+ result.rotateRight(14);
+ tmp.assign(x);
+ tmp.rotateRight(18);
+ result.xor(tmp);
+ tmp.assign(x);
+ tmp.rotateRight(41);
+ result.xor(tmp);
+ }
+
+ function littleSigma(result, x, tmp) {
+ result.assign(x);
+ result.rotateRight(1);
+ tmp.assign(x);
+ tmp.rotateRight(8);
+ result.xor(tmp);
+ tmp.assign(x);
+ tmp.shiftRight(7);
+ result.xor(tmp);
+ }
+
+ function littleSigmaPrime(result, x, tmp) {
+ result.assign(x);
+ result.rotateRight(19);
+ tmp.assign(x);
+ tmp.rotateRight(61);
+ result.xor(tmp);
+ tmp.assign(x);
+ tmp.shiftRight(6);
+ result.xor(tmp);
+ }
+
+ var k = [new Word64(0x428a2f98, 0xd728ae22), new Word64(0x71374491, 0x23ef65cd), new Word64(0xb5c0fbcf, 0xec4d3b2f), new Word64(0xe9b5dba5, 0x8189dbbc), new Word64(0x3956c25b, 0xf348b538), new Word64(0x59f111f1, 0xb605d019), new Word64(0x923f82a4, 0xaf194f9b), new Word64(0xab1c5ed5, 0xda6d8118), new Word64(0xd807aa98, 0xa3030242), new Word64(0x12835b01, 0x45706fbe), new Word64(0x243185be, 0x4ee4b28c), new Word64(0x550c7dc3, 0xd5ffb4e2), new Word64(0x72be5d74, 0xf27b896f), new Word64(0x80deb1fe, 0x3b1696b1), new Word64(0x9bdc06a7, 0x25c71235), new Word64(0xc19bf174, 0xcf692694), new Word64(0xe49b69c1, 0x9ef14ad2), new Word64(0xefbe4786, 0x384f25e3), new Word64(0x0fc19dc6, 0x8b8cd5b5), new Word64(0x240ca1cc, 0x77ac9c65), new Word64(0x2de92c6f, 0x592b0275), new Word64(0x4a7484aa, 0x6ea6e483), new Word64(0x5cb0a9dc, 0xbd41fbd4), new Word64(0x76f988da, 0x831153b5), new Word64(0x983e5152, 0xee66dfab), new Word64(0xa831c66d, 0x2db43210), new Word64(0xb00327c8, 0x98fb213f), new Word64(0xbf597fc7, 0xbeef0ee4), new Word64(0xc6e00bf3, 0x3da88fc2), new Word64(0xd5a79147, 0x930aa725), new Word64(0x06ca6351, 0xe003826f), new Word64(0x14292967, 0x0a0e6e70), new Word64(0x27b70a85, 0x46d22ffc), new Word64(0x2e1b2138, 0x5c26c926), new Word64(0x4d2c6dfc, 0x5ac42aed), new Word64(0x53380d13, 0x9d95b3df), new Word64(0x650a7354, 0x8baf63de), new Word64(0x766a0abb, 0x3c77b2a8), new Word64(0x81c2c92e, 0x47edaee6), new Word64(0x92722c85, 0x1482353b), new Word64(0xa2bfe8a1, 0x4cf10364), new Word64(0xa81a664b, 0xbc423001), new Word64(0xc24b8b70, 0xd0f89791), new Word64(0xc76c51a3, 0x0654be30), new Word64(0xd192e819, 0xd6ef5218), new Word64(0xd6990624, 0x5565a910), new Word64(0xf40e3585, 0x5771202a), new Word64(0x106aa070, 0x32bbd1b8), new Word64(0x19a4c116, 0xb8d2d0c8), new Word64(0x1e376c08, 0x5141ab53), new Word64(0x2748774c, 0xdf8eeb99), new Word64(0x34b0bcb5, 0xe19b48a8), new Word64(0x391c0cb3, 0xc5c95a63), new Word64(0x4ed8aa4a, 0xe3418acb), new Word64(0x5b9cca4f, 0x7763e373), new Word64(0x682e6ff3, 0xd6b2b8a3), new Word64(0x748f82ee, 0x5defb2fc), new Word64(0x78a5636f, 0x43172f60), new Word64(0x84c87814, 0xa1f0ab72), new Word64(0x8cc70208, 0x1a6439ec), new Word64(0x90befffa, 0x23631e28), new Word64(0xa4506ceb, 0xde82bde9), new Word64(0xbef9a3f7, 0xb2c67915), new Word64(0xc67178f2, 0xe372532b), new Word64(0xca273ece, 0xea26619c), new Word64(0xd186b8c7, 0x21c0c207), new Word64(0xeada7dd6, 0xcde0eb1e), new Word64(0xf57d4f7f, 0xee6ed178), new Word64(0x06f067aa, 0x72176fba), new Word64(0x0a637dc5, 0xa2c898a6), new Word64(0x113f9804, 0xbef90dae), new Word64(0x1b710b35, 0x131c471b), new Word64(0x28db77f5, 0x23047d84), new Word64(0x32caab7b, 0x40c72493), new Word64(0x3c9ebe0a, 0x15c9bebc), new Word64(0x431d67c4, 0x9c100d4c), new Word64(0x4cc5d4be, 0xcb3e42b6), new Word64(0x597f299c, 0xfc657e2a), new Word64(0x5fcb6fab, 0x3ad6faec), new Word64(0x6c44198c, 0x4a475817)];
+
+ function hash(data, offset, length, mode384) {
+ mode384 = !!mode384;
+ var h0, h1, h2, h3, h4, h5, h6, h7;
+
+ if (!mode384) {
+ h0 = new Word64(0x6a09e667, 0xf3bcc908);
+ h1 = new Word64(0xbb67ae85, 0x84caa73b);
+ h2 = new Word64(0x3c6ef372, 0xfe94f82b);
+ h3 = new Word64(0xa54ff53a, 0x5f1d36f1);
+ h4 = new Word64(0x510e527f, 0xade682d1);
+ h5 = new Word64(0x9b05688c, 0x2b3e6c1f);
+ h6 = new Word64(0x1f83d9ab, 0xfb41bd6b);
+ h7 = new Word64(0x5be0cd19, 0x137e2179);
+ } else {
+ h0 = new Word64(0xcbbb9d5d, 0xc1059ed8);
+ h1 = new Word64(0x629a292a, 0x367cd507);
+ h2 = new Word64(0x9159015a, 0x3070dd17);
+ h3 = new Word64(0x152fecd8, 0xf70e5939);
+ h4 = new Word64(0x67332667, 0xffc00b31);
+ h5 = new Word64(0x8eb44a87, 0x68581511);
+ h6 = new Word64(0xdb0c2e0d, 0x64f98fa7);
+ h7 = new Word64(0x47b5481d, 0xbefa4fa4);
+ }
+
+ var paddedLength = Math.ceil((length + 17) / 128) * 128;
+ var padded = new Uint8Array(paddedLength);
+ var i, j, n;
+
+ for (i = 0; i < length; ++i) {
+ padded[i] = data[offset++];
+ }
+
+ padded[i++] = 0x80;
+ n = paddedLength - 16;
+
+ while (i < n) {
+ padded[i++] = 0;
+ }
+
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = 0;
+ padded[i++] = length >>> 29 & 0xFF;
+ padded[i++] = length >> 21 & 0xFF;
+ padded[i++] = length >> 13 & 0xFF;
+ padded[i++] = length >> 5 & 0xFF;
+ padded[i++] = length << 3 & 0xFF;
+ var w = new Array(80);
+
+ for (i = 0; i < 80; i++) {
+ w[i] = new Word64(0, 0);
+ }
+
+ var a = new Word64(0, 0),
+ b = new Word64(0, 0),
+ c = new Word64(0, 0);
+ var d = new Word64(0, 0),
+ e = new Word64(0, 0),
+ f = new Word64(0, 0);
+ var g = new Word64(0, 0),
+ h = new Word64(0, 0);
+ var t1 = new Word64(0, 0),
+ t2 = new Word64(0, 0);
+ var tmp1 = new Word64(0, 0),
+ tmp2 = new Word64(0, 0),
+ tmp3;
+
+ for (i = 0; i < paddedLength;) {
+ for (j = 0; j < 16; ++j) {
+ w[j].high = padded[i] << 24 | padded[i + 1] << 16 | padded[i + 2] << 8 | padded[i + 3];
+ w[j].low = padded[i + 4] << 24 | padded[i + 5] << 16 | padded[i + 6] << 8 | padded[i + 7];
+ i += 8;
+ }
+
+ for (j = 16; j < 80; ++j) {
+ tmp3 = w[j];
+ littleSigmaPrime(tmp3, w[j - 2], tmp2);
+ tmp3.add(w[j - 7]);
+ littleSigma(tmp1, w[j - 15], tmp2);
+ tmp3.add(tmp1);
+ tmp3.add(w[j - 16]);
+ }
+
+ a.assign(h0);
+ b.assign(h1);
+ c.assign(h2);
+ d.assign(h3);
+ e.assign(h4);
+ f.assign(h5);
+ g.assign(h6);
+ h.assign(h7);
+
+ for (j = 0; j < 80; ++j) {
+ t1.assign(h);
+ sigmaPrime(tmp1, e, tmp2);
+ t1.add(tmp1);
+ ch(tmp1, e, f, g, tmp2);
+ t1.add(tmp1);
+ t1.add(k[j]);
+ t1.add(w[j]);
+ sigma(t2, a, tmp2);
+ maj(tmp1, a, b, c, tmp2);
+ t2.add(tmp1);
+ tmp3 = h;
+ h = g;
+ g = f;
+ f = e;
+ d.add(t1);
+ e = d;
+ d = c;
+ c = b;
+ b = a;
+ tmp3.assign(t1);
+ tmp3.add(t2);
+ a = tmp3;
+ }
+
+ h0.add(a);
+ h1.add(b);
+ h2.add(c);
+ h3.add(d);
+ h4.add(e);
+ h5.add(f);
+ h6.add(g);
+ h7.add(h);
+ }
+
+ var result;
+
+ if (!mode384) {
+ result = new Uint8Array(64);
+ h0.copyTo(result, 0);
+ h1.copyTo(result, 8);
+ h2.copyTo(result, 16);
+ h3.copyTo(result, 24);
+ h4.copyTo(result, 32);
+ h5.copyTo(result, 40);
+ h6.copyTo(result, 48);
+ h7.copyTo(result, 56);
+ } else {
+ result = new Uint8Array(48);
+ h0.copyTo(result, 0);
+ h1.copyTo(result, 8);
+ h2.copyTo(result, 16);
+ h3.copyTo(result, 24);
+ h4.copyTo(result, 32);
+ h5.copyTo(result, 40);
+ }
+
+ return result;
+ }
+
+ return hash;
+}();
+
+exports.calculateSHA512 = calculateSHA512;
+
+var calculateSHA384 = function calculateSHA384Closure() {
+ function hash(data, offset, length) {
+ return calculateSHA512(data, offset, length, true);
+ }
+
+ return hash;
+}();
+
+exports.calculateSHA384 = calculateSHA384;
+
+var NullCipher = function NullCipherClosure() {
+ function NullCipher() {}
+
+ NullCipher.prototype = {
+ decryptBlock: function NullCipher_decryptBlock(data) {
+ return data;
+ }
+ };
+ return NullCipher;
+}();
+
+var AESBaseCipher =
+/*#__PURE__*/
+function () {
+ function AESBaseCipher() {
+ _classCallCheck(this, AESBaseCipher);
+
+ if (this.constructor === AESBaseCipher) {
+ (0, _util.unreachable)('Cannot initialize AESBaseCipher.');
+ }
+
+ this._s = new Uint8Array([0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76, 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15, 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75, 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84, 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8, 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73, 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb, 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79, 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08, 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a, 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e, 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16]);
+ this._inv_s = new Uint8Array([0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e, 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92, 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06, 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73, 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b, 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4, 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef, 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d]);
+ this._mix = new Uint32Array([0x00000000, 0x0e090d0b, 0x1c121a16, 0x121b171d, 0x3824342c, 0x362d3927, 0x24362e3a, 0x2a3f2331, 0x70486858, 0x7e416553, 0x6c5a724e, 0x62537f45, 0x486c5c74, 0x4665517f, 0x547e4662, 0x5a774b69, 0xe090d0b0, 0xee99ddbb, 0xfc82caa6, 0xf28bc7ad, 0xd8b4e49c, 0xd6bde997, 0xc4a6fe8a, 0xcaaff381, 0x90d8b8e8, 0x9ed1b5e3, 0x8ccaa2fe, 0x82c3aff5, 0xa8fc8cc4, 0xa6f581cf, 0xb4ee96d2, 0xbae79bd9, 0xdb3bbb7b, 0xd532b670, 0xc729a16d, 0xc920ac66, 0xe31f8f57, 0xed16825c, 0xff0d9541, 0xf104984a, 0xab73d323, 0xa57ade28, 0xb761c935, 0xb968c43e, 0x9357e70f, 0x9d5eea04, 0x8f45fd19, 0x814cf012, 0x3bab6bcb, 0x35a266c0, 0x27b971dd, 0x29b07cd6, 0x038f5fe7, 0x0d8652ec, 0x1f9d45f1, 0x119448fa, 0x4be30393, 0x45ea0e98, 0x57f11985, 0x59f8148e, 0x73c737bf, 0x7dce3ab4, 0x6fd52da9, 0x61dc20a2, 0xad766df6, 0xa37f60fd, 0xb16477e0, 0xbf6d7aeb, 0x955259da, 0x9b5b54d1, 0x894043cc, 0x87494ec7, 0xdd3e05ae, 0xd33708a5, 0xc12c1fb8, 0xcf2512b3, 0xe51a3182, 0xeb133c89, 0xf9082b94, 0xf701269f, 0x4de6bd46, 0x43efb04d, 0x51f4a750, 0x5ffdaa5b, 0x75c2896a, 0x7bcb8461, 0x69d0937c, 0x67d99e77, 0x3daed51e, 0x33a7d815, 0x21bccf08, 0x2fb5c203, 0x058ae132, 0x0b83ec39, 0x1998fb24, 0x1791f62f, 0x764dd68d, 0x7844db86, 0x6a5fcc9b, 0x6456c190, 0x4e69e2a1, 0x4060efaa, 0x527bf8b7, 0x5c72f5bc, 0x0605bed5, 0x080cb3de, 0x1a17a4c3, 0x141ea9c8, 0x3e218af9, 0x302887f2, 0x223390ef, 0x2c3a9de4, 0x96dd063d, 0x98d40b36, 0x8acf1c2b, 0x84c61120, 0xaef93211, 0xa0f03f1a, 0xb2eb2807, 0xbce2250c, 0xe6956e65, 0xe89c636e, 0xfa877473, 0xf48e7978, 0xdeb15a49, 0xd0b85742, 0xc2a3405f, 0xccaa4d54, 0x41ecdaf7, 0x4fe5d7fc, 0x5dfec0e1, 0x53f7cdea, 0x79c8eedb, 0x77c1e3d0, 0x65daf4cd, 0x6bd3f9c6, 0x31a4b2af, 0x3fadbfa4, 0x2db6a8b9, 0x23bfa5b2, 0x09808683, 0x07898b88, 0x15929c95, 0x1b9b919e, 0xa17c0a47, 0xaf75074c, 0xbd6e1051, 0xb3671d5a, 0x99583e6b, 0x97513360, 0x854a247d, 0x8b432976, 0xd134621f, 0xdf3d6f14, 0xcd267809, 0xc32f7502, 0xe9105633, 0xe7195b38, 0xf5024c25, 0xfb0b412e, 0x9ad7618c, 0x94de6c87, 0x86c57b9a, 0x88cc7691, 0xa2f355a0, 0xacfa58ab, 0xbee14fb6, 0xb0e842bd, 0xea9f09d4, 0xe49604df, 0xf68d13c2, 0xf8841ec9, 0xd2bb3df8, 0xdcb230f3, 0xcea927ee, 0xc0a02ae5, 0x7a47b13c, 0x744ebc37, 0x6655ab2a, 0x685ca621, 0x42638510, 0x4c6a881b, 0x5e719f06, 0x5078920d, 0x0a0fd964, 0x0406d46f, 0x161dc372, 0x1814ce79, 0x322bed48, 0x3c22e043, 0x2e39f75e, 0x2030fa55, 0xec9ab701, 0xe293ba0a, 0xf088ad17, 0xfe81a01c, 0xd4be832d, 0xdab78e26, 0xc8ac993b, 0xc6a59430, 0x9cd2df59, 0x92dbd252, 0x80c0c54f, 0x8ec9c844, 0xa4f6eb75, 0xaaffe67e, 0xb8e4f163, 0xb6edfc68, 0x0c0a67b1, 0x02036aba, 0x10187da7, 0x1e1170ac, 0x342e539d, 0x3a275e96, 0x283c498b, 0x26354480, 0x7c420fe9, 0x724b02e2, 0x605015ff, 0x6e5918f4, 0x44663bc5, 0x4a6f36ce, 0x587421d3, 0x567d2cd8, 0x37a10c7a, 0x39a80171, 0x2bb3166c, 0x25ba1b67, 0x0f853856, 0x018c355d, 0x13972240, 0x1d9e2f4b, 0x47e96422, 0x49e06929, 0x5bfb7e34, 0x55f2733f, 0x7fcd500e, 0x71c45d05, 0x63df4a18, 0x6dd64713, 0xd731dcca, 0xd938d1c1, 0xcb23c6dc, 0xc52acbd7, 0xef15e8e6, 0xe11ce5ed, 0xf307f2f0, 0xfd0efffb, 0xa779b492, 0xa970b999, 0xbb6bae84, 0xb562a38f, 0x9f5d80be, 0x91548db5, 0x834f9aa8, 0x8d4697a3]);
+ this._mixCol = new Uint8Array(256);
+
+ for (var i = 0; i < 256; i++) {
+ if (i < 128) {
+ this._mixCol[i] = i << 1;
+ } else {
+ this._mixCol[i] = i << 1 ^ 0x1b;
+ }
+ }
+
+ this.buffer = new Uint8Array(16);
+ this.bufferPosition = 0;
+ }
+
+ _createClass(AESBaseCipher, [{
+ key: "_expandKey",
+ value: function _expandKey(cipherKey) {
+ (0, _util.unreachable)('Cannot call `_expandKey` on the base class');
+ }
+ }, {
+ key: "_decrypt",
+ value: function _decrypt(input, key) {
+ var t, u, v;
+ var state = new Uint8Array(16);
+ state.set(input);
+
+ for (var j = 0, k = this._keySize; j < 16; ++j, ++k) {
+ state[j] ^= key[k];
+ }
+
+ for (var i = this._cyclesOfRepetition - 1; i >= 1; --i) {
+ t = state[13];
+ state[13] = state[9];
+ state[9] = state[5];
+ state[5] = state[1];
+ state[1] = t;
+ t = state[14];
+ u = state[10];
+ state[14] = state[6];
+ state[10] = state[2];
+ state[6] = t;
+ state[2] = u;
+ t = state[15];
+ u = state[11];
+ v = state[7];
+ state[15] = state[3];
+ state[11] = t;
+ state[7] = u;
+ state[3] = v;
+
+ for (var _j = 0; _j < 16; ++_j) {
+ state[_j] = this._inv_s[state[_j]];
+ }
+
+ for (var _j2 = 0, _k = i * 16; _j2 < 16; ++_j2, ++_k) {
+ state[_j2] ^= key[_k];
+ }
+
+ for (var _j3 = 0; _j3 < 16; _j3 += 4) {
+ var s0 = this._mix[state[_j3]];
+ var s1 = this._mix[state[_j3 + 1]];
+ var s2 = this._mix[state[_j3 + 2]];
+ var s3 = this._mix[state[_j3 + 3]];
+ t = s0 ^ s1 >>> 8 ^ s1 << 24 ^ s2 >>> 16 ^ s2 << 16 ^ s3 >>> 24 ^ s3 << 8;
+ state[_j3] = t >>> 24 & 0xFF;
+ state[_j3 + 1] = t >> 16 & 0xFF;
+ state[_j3 + 2] = t >> 8 & 0xFF;
+ state[_j3 + 3] = t & 0xFF;
+ }
+ }
+
+ t = state[13];
+ state[13] = state[9];
+ state[9] = state[5];
+ state[5] = state[1];
+ state[1] = t;
+ t = state[14];
+ u = state[10];
+ state[14] = state[6];
+ state[10] = state[2];
+ state[6] = t;
+ state[2] = u;
+ t = state[15];
+ u = state[11];
+ v = state[7];
+ state[15] = state[3];
+ state[11] = t;
+ state[7] = u;
+ state[3] = v;
+
+ for (var _j4 = 0; _j4 < 16; ++_j4) {
+ state[_j4] = this._inv_s[state[_j4]];
+ state[_j4] ^= key[_j4];
+ }
+
+ return state;
+ }
+ }, {
+ key: "_encrypt",
+ value: function _encrypt(input, key) {
+ var s = this._s;
+ var t, u, v;
+ var state = new Uint8Array(16);
+ state.set(input);
+
+ for (var j = 0; j < 16; ++j) {
+ state[j] ^= key[j];
+ }
+
+ for (var i = 1; i < this._cyclesOfRepetition; i++) {
+ for (var _j5 = 0; _j5 < 16; ++_j5) {
+ state[_j5] = s[state[_j5]];
+ }
+
+ v = state[1];
+ state[1] = state[5];
+ state[5] = state[9];
+ state[9] = state[13];
+ state[13] = v;
+ v = state[2];
+ u = state[6];
+ state[2] = state[10];
+ state[6] = state[14];
+ state[10] = v;
+ state[14] = u;
+ v = state[3];
+ u = state[7];
+ t = state[11];
+ state[3] = state[15];
+ state[7] = v;
+ state[11] = u;
+ state[15] = t;
+
+ for (var _j6 = 0; _j6 < 16; _j6 += 4) {
+ var s0 = state[_j6 + 0];
+ var s1 = state[_j6 + 1];
+ var s2 = state[_j6 + 2];
+ var s3 = state[_j6 + 3];
+ t = s0 ^ s1 ^ s2 ^ s3;
+ state[_j6 + 0] ^= t ^ this._mixCol[s0 ^ s1];
+ state[_j6 + 1] ^= t ^ this._mixCol[s1 ^ s2];
+ state[_j6 + 2] ^= t ^ this._mixCol[s2 ^ s3];
+ state[_j6 + 3] ^= t ^ this._mixCol[s3 ^ s0];
+ }
+
+ for (var _j7 = 0, k = i * 16; _j7 < 16; ++_j7, ++k) {
+ state[_j7] ^= key[k];
+ }
+ }
+
+ for (var _j8 = 0; _j8 < 16; ++_j8) {
+ state[_j8] = s[state[_j8]];
+ }
+
+ v = state[1];
+ state[1] = state[5];
+ state[5] = state[9];
+ state[9] = state[13];
+ state[13] = v;
+ v = state[2];
+ u = state[6];
+ state[2] = state[10];
+ state[6] = state[14];
+ state[10] = v;
+ state[14] = u;
+ v = state[3];
+ u = state[7];
+ t = state[11];
+ state[3] = state[15];
+ state[7] = v;
+ state[11] = u;
+ state[15] = t;
+
+ for (var _j9 = 0, _k2 = this._keySize; _j9 < 16; ++_j9, ++_k2) {
+ state[_j9] ^= key[_k2];
+ }
+
+ return state;
+ }
+ }, {
+ key: "_decryptBlock2",
+ value: function _decryptBlock2(data, finalize) {
+ var sourceLength = data.length;
+ var buffer = this.buffer,
+ bufferLength = this.bufferPosition;
+ var result = [],
+ iv = this.iv;
+
+ for (var i = 0; i < sourceLength; ++i) {
+ buffer[bufferLength] = data[i];
+ ++bufferLength;
+
+ if (bufferLength < 16) {
+ continue;
+ }
+
+ var plain = this._decrypt(buffer, this._key);
+
+ for (var j = 0; j < 16; ++j) {
+ plain[j] ^= iv[j];
+ }
+
+ iv = buffer;
+ result.push(plain);
+ buffer = new Uint8Array(16);
+ bufferLength = 0;
+ }
+
+ this.buffer = buffer;
+ this.bufferLength = bufferLength;
+ this.iv = iv;
+
+ if (result.length === 0) {
+ return new Uint8Array(0);
+ }
+
+ var outputLength = 16 * result.length;
+
+ if (finalize) {
+ var lastBlock = result[result.length - 1];
+ var psLen = lastBlock[15];
+
+ if (psLen <= 16) {
+ for (var _i = 15, ii = 16 - psLen; _i >= ii; --_i) {
+ if (lastBlock[_i] !== psLen) {
+ psLen = 0;
+ break;
+ }
+ }
+
+ outputLength -= psLen;
+ result[result.length - 1] = lastBlock.subarray(0, 16 - psLen);
+ }
+ }
+
+ var output = new Uint8Array(outputLength);
+
+ for (var _i2 = 0, _j10 = 0, _ii = result.length; _i2 < _ii; ++_i2, _j10 += 16) {
+ output.set(result[_i2], _j10);
+ }
+
+ return output;
+ }
+ }, {
+ key: "decryptBlock",
+ value: function decryptBlock(data, finalize) {
+ var iv = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : null;
+ var sourceLength = data.length;
+ var buffer = this.buffer,
+ bufferLength = this.bufferPosition;
+
+ if (iv) {
+ this.iv = iv;
+ } else {
+ for (var i = 0; bufferLength < 16 && i < sourceLength; ++i, ++bufferLength) {
+ buffer[bufferLength] = data[i];
+ }
+
+ if (bufferLength < 16) {
+ this.bufferLength = bufferLength;
+ return new Uint8Array(0);
+ }
+
+ this.iv = buffer;
+ data = data.subarray(16);
+ }
+
+ this.buffer = new Uint8Array(16);
+ this.bufferLength = 0;
+ this.decryptBlock = this._decryptBlock2;
+ return this.decryptBlock(data, finalize);
+ }
+ }, {
+ key: "encrypt",
+ value: function encrypt(data, iv) {
+ var sourceLength = data.length;
+ var buffer = this.buffer,
+ bufferLength = this.bufferPosition;
+ var result = [];
+
+ if (!iv) {
+ iv = new Uint8Array(16);
+ }
+
+ for (var i = 0; i < sourceLength; ++i) {
+ buffer[bufferLength] = data[i];
+ ++bufferLength;
+
+ if (bufferLength < 16) {
+ continue;
+ }
+
+ for (var j = 0; j < 16; ++j) {
+ buffer[j] ^= iv[j];
+ }
+
+ var cipher = this._encrypt(buffer, this._key);
+
+ iv = cipher;
+ result.push(cipher);
+ buffer = new Uint8Array(16);
+ bufferLength = 0;
+ }
+
+ this.buffer = buffer;
+ this.bufferLength = bufferLength;
+ this.iv = iv;
+
+ if (result.length === 0) {
+ return new Uint8Array(0);
+ }
+
+ var outputLength = 16 * result.length;
+ var output = new Uint8Array(outputLength);
+
+ for (var _i3 = 0, _j11 = 0, ii = result.length; _i3 < ii; ++_i3, _j11 += 16) {
+ output.set(result[_i3], _j11);
+ }
+
+ return output;
+ }
+ }]);
+
+ return AESBaseCipher;
+}();
+
+var AES128Cipher =
+/*#__PURE__*/
+function (_AESBaseCipher) {
+ _inherits(AES128Cipher, _AESBaseCipher);
+
+ function AES128Cipher(key) {
+ var _this;
+
+ _classCallCheck(this, AES128Cipher);
+
+ _this = _possibleConstructorReturn(this, _getPrototypeOf(AES128Cipher).call(this));
+ _this._cyclesOfRepetition = 10;
+ _this._keySize = 160;
+ _this._rcon = new Uint8Array([0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d]);
+ _this._key = _this._expandKey(key);
+ return _this;
+ }
+
+ _createClass(AES128Cipher, [{
+ key: "_expandKey",
+ value: function _expandKey(cipherKey) {
+ var b = 176;
+ var s = this._s;
+ var rcon = this._rcon;
+ var result = new Uint8Array(b);
+ result.set(cipherKey);
+
+ for (var j = 16, i = 1; j < b; ++i) {
+ var t1 = result[j - 3];
+ var t2 = result[j - 2];
+ var t3 = result[j - 1];
+ var t4 = result[j - 4];
+ t1 = s[t1];
+ t2 = s[t2];
+ t3 = s[t3];
+ t4 = s[t4];
+ t1 = t1 ^ rcon[i];
+
+ for (var n = 0; n < 4; ++n) {
+ result[j] = t1 ^= result[j - 16];
+ j++;
+ result[j] = t2 ^= result[j - 16];
+ j++;
+ result[j] = t3 ^= result[j - 16];
+ j++;
+ result[j] = t4 ^= result[j - 16];
+ j++;
+ }
+ }
+
+ return result;
+ }
+ }]);
+
+ return AES128Cipher;
+}(AESBaseCipher);
+
+exports.AES128Cipher = AES128Cipher;
+
+var AES256Cipher =
+/*#__PURE__*/
+function (_AESBaseCipher2) {
+ _inherits(AES256Cipher, _AESBaseCipher2);
+
+ function AES256Cipher(key) {
+ var _this2;
+
+ _classCallCheck(this, AES256Cipher);
+
+ _this2 = _possibleConstructorReturn(this, _getPrototypeOf(AES256Cipher).call(this));
+ _this2._cyclesOfRepetition = 14;
+ _this2._keySize = 224;
+ _this2._key = _this2._expandKey(key);
+ return _this2;
+ }
+
+ _createClass(AES256Cipher, [{
+ key: "_expandKey",
+ value: function _expandKey(cipherKey) {
+ var b = 240;
+ var s = this._s;
+ var result = new Uint8Array(b);
+ result.set(cipherKey);
+ var r = 1;
+ var t1, t2, t3, t4;
+
+ for (var j = 32, i = 1; j < b; ++i) {
+ if (j % 32 === 16) {
+ t1 = s[t1];
+ t2 = s[t2];
+ t3 = s[t3];
+ t4 = s[t4];
+ } else if (j % 32 === 0) {
+ t1 = result[j - 3];
+ t2 = result[j - 2];
+ t3 = result[j - 1];
+ t4 = result[j - 4];
+ t1 = s[t1];
+ t2 = s[t2];
+ t3 = s[t3];
+ t4 = s[t4];
+ t1 = t1 ^ r;
+
+ if ((r <<= 1) >= 256) {
+ r = (r ^ 0x1b) & 0xFF;
+ }
+ }
+
+ for (var n = 0; n < 4; ++n) {
+ result[j] = t1 ^= result[j - 32];
+ j++;
+ result[j] = t2 ^= result[j - 32];
+ j++;
+ result[j] = t3 ^= result[j - 32];
+ j++;
+ result[j] = t4 ^= result[j - 32];
+ j++;
+ }
+ }
+
+ return result;
+ }
+ }]);
+
+ return AES256Cipher;
+}(AESBaseCipher);
+
+exports.AES256Cipher = AES256Cipher;
+
+var PDF17 = function PDF17Closure() {
+ function compareByteArrays(array1, array2) {
+ if (array1.length !== array2.length) {
+ return false;
+ }
+
+ for (var i = 0; i < array1.length; i++) {
+ if (array1[i] !== array2[i]) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ function PDF17() {}
+
+ PDF17.prototype = {
+ checkOwnerPassword: function PDF17_checkOwnerPassword(password, ownerValidationSalt, userBytes, ownerPassword) {
+ var hashData = new Uint8Array(password.length + 56);
+ hashData.set(password, 0);
+ hashData.set(ownerValidationSalt, password.length);
+ hashData.set(userBytes, password.length + ownerValidationSalt.length);
+ var result = calculateSHA256(hashData, 0, hashData.length);
+ return compareByteArrays(result, ownerPassword);
+ },
+ checkUserPassword: function PDF17_checkUserPassword(password, userValidationSalt, userPassword) {
+ var hashData = new Uint8Array(password.length + 8);
+ hashData.set(password, 0);
+ hashData.set(userValidationSalt, password.length);
+ var result = calculateSHA256(hashData, 0, hashData.length);
+ return compareByteArrays(result, userPassword);
+ },
+ getOwnerKey: function PDF17_getOwnerKey(password, ownerKeySalt, userBytes, ownerEncryption) {
+ var hashData = new Uint8Array(password.length + 56);
+ hashData.set(password, 0);
+ hashData.set(ownerKeySalt, password.length);
+ hashData.set(userBytes, password.length + ownerKeySalt.length);
+ var key = calculateSHA256(hashData, 0, hashData.length);
+ var cipher = new AES256Cipher(key);
+ return cipher.decryptBlock(ownerEncryption, false, new Uint8Array(16));
+ },
+ getUserKey: function PDF17_getUserKey(password, userKeySalt, userEncryption) {
+ var hashData = new Uint8Array(password.length + 8);
+ hashData.set(password, 0);
+ hashData.set(userKeySalt, password.length);
+ var key = calculateSHA256(hashData, 0, hashData.length);
+ var cipher = new AES256Cipher(key);
+ return cipher.decryptBlock(userEncryption, false, new Uint8Array(16));
+ }
+ };
+ return PDF17;
+}();
+
+exports.PDF17 = PDF17;
+
+var PDF20 = function PDF20Closure() {
+ function concatArrays(array1, array2) {
+ var t = new Uint8Array(array1.length + array2.length);
+ t.set(array1, 0);
+ t.set(array2, array1.length);
+ return t;
+ }
+
+ function calculatePDF20Hash(password, input, userBytes) {
+ var k = calculateSHA256(input, 0, input.length).subarray(0, 32);
+ var e = [0];
+ var i = 0;
+
+ while (i < 64 || e[e.length - 1] > i - 32) {
+ var arrayLength = password.length + k.length + userBytes.length;
+ var k1 = new Uint8Array(arrayLength * 64);
+ var array = concatArrays(password, k);
+ array = concatArrays(array, userBytes);
+
+ for (var j = 0, pos = 0; j < 64; j++, pos += arrayLength) {
+ k1.set(array, pos);
+ }
+
+ var cipher = new AES128Cipher(k.subarray(0, 16));
+ e = cipher.encrypt(k1, k.subarray(16, 32));
+ var remainder = 0;
+
+ for (var z = 0; z < 16; z++) {
+ remainder *= 256 % 3;
+ remainder %= 3;
+ remainder += (e[z] >>> 0) % 3;
+ remainder %= 3;
+ }
+
+ if (remainder === 0) {
+ k = calculateSHA256(e, 0, e.length);
+ } else if (remainder === 1) {
+ k = calculateSHA384(e, 0, e.length);
+ } else if (remainder === 2) {
+ k = calculateSHA512(e, 0, e.length);
+ }
+
+ i++;
+ }
+
+ return k.subarray(0, 32);
+ }
+
+ function PDF20() {}
+
+ function compareByteArrays(array1, array2) {
+ if (array1.length !== array2.length) {
+ return false;
+ }
+
+ for (var i = 0; i < array1.length; i++) {
+ if (array1[i] !== array2[i]) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ PDF20.prototype = {
+ hash: function PDF20_hash(password, concatBytes, userBytes) {
+ return calculatePDF20Hash(password, concatBytes, userBytes);
+ },
+ checkOwnerPassword: function PDF20_checkOwnerPassword(password, ownerValidationSalt, userBytes, ownerPassword) {
+ var hashData = new Uint8Array(password.length + 56);
+ hashData.set(password, 0);
+ hashData.set(ownerValidationSalt, password.length);
+ hashData.set(userBytes, password.length + ownerValidationSalt.length);
+ var result = calculatePDF20Hash(password, hashData, userBytes);
+ return compareByteArrays(result, ownerPassword);
+ },
+ checkUserPassword: function PDF20_checkUserPassword(password, userValidationSalt, userPassword) {
+ var hashData = new Uint8Array(password.length + 8);
+ hashData.set(password, 0);
+ hashData.set(userValidationSalt, password.length);
+ var result = calculatePDF20Hash(password, hashData, []);
+ return compareByteArrays(result, userPassword);
+ },
+ getOwnerKey: function PDF20_getOwnerKey(password, ownerKeySalt, userBytes, ownerEncryption) {
+ var hashData = new Uint8Array(password.length + 56);
+ hashData.set(password, 0);
+ hashData.set(ownerKeySalt, password.length);
+ hashData.set(userBytes, password.length + ownerKeySalt.length);
+ var key = calculatePDF20Hash(password, hashData, userBytes);
+ var cipher = new AES256Cipher(key);
+ return cipher.decryptBlock(ownerEncryption, false, new Uint8Array(16));
+ },
+ getUserKey: function PDF20_getUserKey(password, userKeySalt, userEncryption) {
+ var hashData = new Uint8Array(password.length + 8);
+ hashData.set(password, 0);
+ hashData.set(userKeySalt, password.length);
+ var key = calculatePDF20Hash(password, hashData, []);
+ var cipher = new AES256Cipher(key);
+ return cipher.decryptBlock(userEncryption, false, new Uint8Array(16));
+ }
+ };
+ return PDF20;
+}();
+
+exports.PDF20 = PDF20;
+
+var CipherTransform = function CipherTransformClosure() {
+ function CipherTransform(stringCipherConstructor, streamCipherConstructor) {
+ this.StringCipherConstructor = stringCipherConstructor;
+ this.StreamCipherConstructor = streamCipherConstructor;
+ }
+
+ CipherTransform.prototype = {
+ createStream: function CipherTransform_createStream(stream, length) {
+ var cipher = new this.StreamCipherConstructor();
+ return new _stream.DecryptStream(stream, length, function cipherTransformDecryptStream(data, finalize) {
+ return cipher.decryptBlock(data, finalize);
+ });
+ },
+ decryptString: function CipherTransform_decryptString(s) {
+ var cipher = new this.StringCipherConstructor();
+ var data = (0, _util.stringToBytes)(s);
+ data = cipher.decryptBlock(data, true);
+ return (0, _util.bytesToString)(data);
+ }
+ };
+ return CipherTransform;
+}();
+
+var CipherTransformFactory = function CipherTransformFactoryClosure() {
+ var defaultPasswordBytes = new Uint8Array([0x28, 0xBF, 0x4E, 0x5E, 0x4E, 0x75, 0x8A, 0x41, 0x64, 0x00, 0x4E, 0x56, 0xFF, 0xFA, 0x01, 0x08, 0x2E, 0x2E, 0x00, 0xB6, 0xD0, 0x68, 0x3E, 0x80, 0x2F, 0x0C, 0xA9, 0xFE, 0x64, 0x53, 0x69, 0x7A]);
+
+ function createEncryptionKey20(revision, password, ownerPassword, ownerValidationSalt, ownerKeySalt, uBytes, userPassword, userValidationSalt, userKeySalt, ownerEncryption, userEncryption, perms) {
+ if (password) {
+ var passwordLength = Math.min(127, password.length);
+ password = password.subarray(0, passwordLength);
+ } else {
+ password = [];
+ }
+
+ var pdfAlgorithm;
+
+ if (revision === 6) {
+ pdfAlgorithm = new PDF20();
+ } else {
+ pdfAlgorithm = new PDF17();
+ }
+
+ if (pdfAlgorithm.checkUserPassword(password, userValidationSalt, userPassword)) {
+ return pdfAlgorithm.getUserKey(password, userKeySalt, userEncryption);
+ } else if (password.length && pdfAlgorithm.checkOwnerPassword(password, ownerValidationSalt, uBytes, ownerPassword)) {
+ return pdfAlgorithm.getOwnerKey(password, ownerKeySalt, uBytes, ownerEncryption);
+ }
+
+ return null;
+ }
+
+ function prepareKeyData(fileId, password, ownerPassword, userPassword, flags, revision, keyLength, encryptMetadata) {
+ var hashDataSize = 40 + ownerPassword.length + fileId.length;
+ var hashData = new Uint8Array(hashDataSize),
+ i = 0,
+ j,
+ n;
+
+ if (password) {
+ n = Math.min(32, password.length);
+
+ for (; i < n; ++i) {
+ hashData[i] = password[i];
+ }
+ }
+
+ j = 0;
+
+ while (i < 32) {
+ hashData[i++] = defaultPasswordBytes[j++];
+ }
+
+ for (j = 0, n = ownerPassword.length; j < n; ++j) {
+ hashData[i++] = ownerPassword[j];
+ }
+
+ hashData[i++] = flags & 0xFF;
+ hashData[i++] = flags >> 8 & 0xFF;
+ hashData[i++] = flags >> 16 & 0xFF;
+ hashData[i++] = flags >>> 24 & 0xFF;
+
+ for (j = 0, n = fileId.length; j < n; ++j) {
+ hashData[i++] = fileId[j];
+ }
+
+ if (revision >= 4 && !encryptMetadata) {
+ hashData[i++] = 0xFF;
+ hashData[i++] = 0xFF;
+ hashData[i++] = 0xFF;
+ hashData[i++] = 0xFF;
+ }
+
+ var hash = calculateMD5(hashData, 0, i);
+ var keyLengthInBytes = keyLength >> 3;
+
+ if (revision >= 3) {
+ for (j = 0; j < 50; ++j) {
+ hash = calculateMD5(hash, 0, keyLengthInBytes);
+ }
+ }
+
+ var encryptionKey = hash.subarray(0, keyLengthInBytes);
+ var cipher, checkData;
+
+ if (revision >= 3) {
+ for (i = 0; i < 32; ++i) {
+ hashData[i] = defaultPasswordBytes[i];
+ }
+
+ for (j = 0, n = fileId.length; j < n; ++j) {
+ hashData[i++] = fileId[j];
+ }
+
+ cipher = new ARCFourCipher(encryptionKey);
+ checkData = cipher.encryptBlock(calculateMD5(hashData, 0, i));
+ n = encryptionKey.length;
+ var derivedKey = new Uint8Array(n),
+ k;
+
+ for (j = 1; j <= 19; ++j) {
+ for (k = 0; k < n; ++k) {
+ derivedKey[k] = encryptionKey[k] ^ j;
+ }
+
+ cipher = new ARCFourCipher(derivedKey);
+ checkData = cipher.encryptBlock(checkData);
+ }
+
+ for (j = 0, n = checkData.length; j < n; ++j) {
+ if (userPassword[j] !== checkData[j]) {
+ return null;
+ }
+ }
+ } else {
+ cipher = new ARCFourCipher(encryptionKey);
+ checkData = cipher.encryptBlock(defaultPasswordBytes);
+
+ for (j = 0, n = checkData.length; j < n; ++j) {
+ if (userPassword[j] !== checkData[j]) {
+ return null;
+ }
+ }
+ }
+
+ return encryptionKey;
+ }
+
+ function decodeUserPassword(password, ownerPassword, revision, keyLength) {
+ var hashData = new Uint8Array(32),
+ i = 0,
+ j,
+ n;
+ n = Math.min(32, password.length);
+
+ for (; i < n; ++i) {
+ hashData[i] = password[i];
+ }
+
+ j = 0;
+
+ while (i < 32) {
+ hashData[i++] = defaultPasswordBytes[j++];
+ }
+
+ var hash = calculateMD5(hashData, 0, i);
+ var keyLengthInBytes = keyLength >> 3;
+
+ if (revision >= 3) {
+ for (j = 0; j < 50; ++j) {
+ hash = calculateMD5(hash, 0, hash.length);
+ }
+ }
+
+ var cipher, userPassword;
+
+ if (revision >= 3) {
+ userPassword = ownerPassword;
+ var derivedKey = new Uint8Array(keyLengthInBytes),
+ k;
+
+ for (j = 19; j >= 0; j--) {
+ for (k = 0; k < keyLengthInBytes; ++k) {
+ derivedKey[k] = hash[k] ^ j;
+ }
+
+ cipher = new ARCFourCipher(derivedKey);
+ userPassword = cipher.encryptBlock(userPassword);
+ }
+ } else {
+ cipher = new ARCFourCipher(hash.subarray(0, keyLengthInBytes));
+ userPassword = cipher.encryptBlock(ownerPassword);
+ }
+
+ return userPassword;
+ }
+
+ var identityName = _primitives.Name.get('Identity');
+
+ function CipherTransformFactory(dict, fileId, password) {
+ var filter = dict.get('Filter');
+
+ if (!(0, _primitives.isName)(filter, 'Standard')) {
+ throw new _util.FormatError('unknown encryption method');
+ }
+
+ this.dict = dict;
+ var algorithm = dict.get('V');
+
+ if (!Number.isInteger(algorithm) || algorithm !== 1 && algorithm !== 2 && algorithm !== 4 && algorithm !== 5) {
+ throw new _util.FormatError('unsupported encryption algorithm');
+ }
+
+ this.algorithm = algorithm;
+ var keyLength = dict.get('Length');
+
+ if (!keyLength) {
+ if (algorithm <= 3) {
+ keyLength = 40;
+ } else {
+ var cfDict = dict.get('CF');
+ var streamCryptoName = dict.get('StmF');
+
+ if ((0, _primitives.isDict)(cfDict) && (0, _primitives.isName)(streamCryptoName)) {
+ cfDict.suppressEncryption = true;
+ var handlerDict = cfDict.get(streamCryptoName.name);
+ keyLength = handlerDict && handlerDict.get('Length') || 128;
+
+ if (keyLength < 40) {
+ keyLength <<= 3;
+ }
+ }
+ }
+ }
+
+ if (!Number.isInteger(keyLength) || keyLength < 40 || keyLength % 8 !== 0) {
+ throw new _util.FormatError('invalid key length');
+ }
+
+ var ownerPassword = (0, _util.stringToBytes)(dict.get('O')).subarray(0, 32);
+ var userPassword = (0, _util.stringToBytes)(dict.get('U')).subarray(0, 32);
+ var flags = dict.get('P');
+ var revision = dict.get('R');
+ var encryptMetadata = (algorithm === 4 || algorithm === 5) && dict.get('EncryptMetadata') !== false;
+ this.encryptMetadata = encryptMetadata;
+ var fileIdBytes = (0, _util.stringToBytes)(fileId);
+ var passwordBytes;
+
+ if (password) {
+ if (revision === 6) {
+ try {
+ password = (0, _util.utf8StringToString)(password);
+ } catch (ex) {
+ (0, _util.warn)('CipherTransformFactory: ' + 'Unable to convert UTF8 encoded password.');
+ }
+ }
+
+ passwordBytes = (0, _util.stringToBytes)(password);
+ }
+
+ var encryptionKey;
+
+ if (algorithm !== 5) {
+ encryptionKey = prepareKeyData(fileIdBytes, passwordBytes, ownerPassword, userPassword, flags, revision, keyLength, encryptMetadata);
+ } else {
+ var ownerValidationSalt = (0, _util.stringToBytes)(dict.get('O')).subarray(32, 40);
+ var ownerKeySalt = (0, _util.stringToBytes)(dict.get('O')).subarray(40, 48);
+ var uBytes = (0, _util.stringToBytes)(dict.get('U')).subarray(0, 48);
+ var userValidationSalt = (0, _util.stringToBytes)(dict.get('U')).subarray(32, 40);
+ var userKeySalt = (0, _util.stringToBytes)(dict.get('U')).subarray(40, 48);
+ var ownerEncryption = (0, _util.stringToBytes)(dict.get('OE'));
+ var userEncryption = (0, _util.stringToBytes)(dict.get('UE'));
+ var perms = (0, _util.stringToBytes)(dict.get('Perms'));
+ encryptionKey = createEncryptionKey20(revision, passwordBytes, ownerPassword, ownerValidationSalt, ownerKeySalt, uBytes, userPassword, userValidationSalt, userKeySalt, ownerEncryption, userEncryption, perms);
+ }
+
+ if (!encryptionKey && !password) {
+ throw new _util.PasswordException('No password given', _util.PasswordResponses.NEED_PASSWORD);
+ } else if (!encryptionKey && password) {
+ var decodedPassword = decodeUserPassword(passwordBytes, ownerPassword, revision, keyLength);
+ encryptionKey = prepareKeyData(fileIdBytes, decodedPassword, ownerPassword, userPassword, flags, revision, keyLength, encryptMetadata);
+ }
+
+ if (!encryptionKey) {
+ throw new _util.PasswordException('Incorrect Password', _util.PasswordResponses.INCORRECT_PASSWORD);
+ }
+
+ this.encryptionKey = encryptionKey;
+
+ if (algorithm >= 4) {
+ var cf = dict.get('CF');
+
+ if ((0, _primitives.isDict)(cf)) {
+ cf.suppressEncryption = true;
+ }
+
+ this.cf = cf;
+ this.stmf = dict.get('StmF') || identityName;
+ this.strf = dict.get('StrF') || identityName;
+ this.eff = dict.get('EFF') || this.stmf;
+ }
+ }
+
+ function buildObjectKey(num, gen, encryptionKey, isAes) {
+ var key = new Uint8Array(encryptionKey.length + 9),
+ i,
+ n;
+
+ for (i = 0, n = encryptionKey.length; i < n; ++i) {
+ key[i] = encryptionKey[i];
+ }
+
+ key[i++] = num & 0xFF;
+ key[i++] = num >> 8 & 0xFF;
+ key[i++] = num >> 16 & 0xFF;
+ key[i++] = gen & 0xFF;
+ key[i++] = gen >> 8 & 0xFF;
+
+ if (isAes) {
+ key[i++] = 0x73;
+ key[i++] = 0x41;
+ key[i++] = 0x6C;
+ key[i++] = 0x54;
+ }
+
+ var hash = calculateMD5(key, 0, i);
+ return hash.subarray(0, Math.min(encryptionKey.length + 5, 16));
+ }
+
+ function buildCipherConstructor(cf, name, num, gen, key) {
+ if (!(0, _primitives.isName)(name)) {
+ throw new _util.FormatError('Invalid crypt filter name.');
+ }
+
+ var cryptFilter = cf.get(name.name);
+ var cfm;
+
+ if (cryptFilter !== null && cryptFilter !== undefined) {
+ cfm = cryptFilter.get('CFM');
+ }
+
+ if (!cfm || cfm.name === 'None') {
+ return function cipherTransformFactoryBuildCipherConstructorNone() {
+ return new NullCipher();
+ };
+ }
+
+ if (cfm.name === 'V2') {
+ return function cipherTransformFactoryBuildCipherConstructorV2() {
+ return new ARCFourCipher(buildObjectKey(num, gen, key, false));
+ };
+ }
+
+ if (cfm.name === 'AESV2') {
+ return function cipherTransformFactoryBuildCipherConstructorAESV2() {
+ return new AES128Cipher(buildObjectKey(num, gen, key, true));
+ };
+ }
+
+ if (cfm.name === 'AESV3') {
+ return function cipherTransformFactoryBuildCipherConstructorAESV3() {
+ return new AES256Cipher(key);
+ };
+ }
+
+ throw new _util.FormatError('Unknown crypto method');
+ }
+
+ CipherTransformFactory.prototype = {
+ createCipherTransform: function CipherTransformFactory_createCipherTransform(num, gen) {
+ if (this.algorithm === 4 || this.algorithm === 5) {
+ return new CipherTransform(buildCipherConstructor(this.cf, this.stmf, num, gen, this.encryptionKey), buildCipherConstructor(this.cf, this.strf, num, gen, this.encryptionKey));
+ }
+
+ var key = buildObjectKey(num, gen, this.encryptionKey, false);
+
+ var cipherConstructor = function buildCipherCipherConstructor() {
+ return new ARCFourCipher(key);
+ };
+
+ return new CipherTransform(cipherConstructor, cipherConstructor);
+ }
+ };
+ return CipherTransformFactory;
+}();
+
+exports.CipherTransformFactory = CipherTransformFactory;
+
+/***/ }),
+/* 169 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.ColorSpace = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _primitives = __w_pdfjs_require__(151);
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+function resizeRgbImage(src, dest, w1, h1, w2, h2, alpha01) {
+ var COMPONENTS = 3;
+ alpha01 = alpha01 !== 1 ? 0 : alpha01;
+ var xRatio = w1 / w2;
+ var yRatio = h1 / h2;
+ var newIndex = 0,
+ oldIndex;
+ var xScaled = new Uint16Array(w2);
+ var w1Scanline = w1 * COMPONENTS;
+
+ for (var i = 0; i < w2; i++) {
+ xScaled[i] = Math.floor(i * xRatio) * COMPONENTS;
+ }
+
+ for (var _i = 0; _i < h2; _i++) {
+ var py = Math.floor(_i * yRatio) * w1Scanline;
+
+ for (var j = 0; j < w2; j++) {
+ oldIndex = py + xScaled[j];
+ dest[newIndex++] = src[oldIndex++];
+ dest[newIndex++] = src[oldIndex++];
+ dest[newIndex++] = src[oldIndex++];
+ newIndex += alpha01;
+ }
+ }
+}
+
+var ColorSpace =
+/*#__PURE__*/
+function () {
+ function ColorSpace(name, numComps) {
+ _classCallCheck(this, ColorSpace);
+
+ if (this.constructor === ColorSpace) {
+ (0, _util.unreachable)('Cannot initialize ColorSpace.');
+ }
+
+ this.name = name;
+ this.numComps = numComps;
+ }
+
+ _createClass(ColorSpace, [{
+ key: "getRgb",
+ value: function getRgb(src, srcOffset) {
+ var rgb = new Uint8ClampedArray(3);
+ this.getRgbItem(src, srcOffset, rgb, 0);
+ return rgb;
+ }
+ }, {
+ key: "getRgbItem",
+ value: function getRgbItem(src, srcOffset, dest, destOffset) {
+ (0, _util.unreachable)('Should not call ColorSpace.getRgbItem');
+ }
+ }, {
+ key: "getRgbBuffer",
+ value: function getRgbBuffer(src, srcOffset, count, dest, destOffset, bits, alpha01) {
+ (0, _util.unreachable)('Should not call ColorSpace.getRgbBuffer');
+ }
+ }, {
+ key: "getOutputLength",
+ value: function getOutputLength(inputLength, alpha01) {
+ (0, _util.unreachable)('Should not call ColorSpace.getOutputLength');
+ }
+ }, {
+ key: "isPassthrough",
+ value: function isPassthrough(bits) {
+ return false;
+ }
+ }, {
+ key: "isDefaultDecode",
+ value: function isDefaultDecode(decodeMap, bpc) {
+ return ColorSpace.isDefaultDecode(decodeMap, this.numComps);
+ }
+ }, {
+ key: "fillRgb",
+ value: function fillRgb(dest, originalWidth, originalHeight, width, height, actualHeight, bpc, comps, alpha01) {
+ var count = originalWidth * originalHeight;
+ var rgbBuf = null;
+ var numComponentColors = 1 << bpc;
+ var needsResizing = originalHeight !== height || originalWidth !== width;
+
+ if (this.isPassthrough(bpc)) {
+ rgbBuf = comps;
+ } else if (this.numComps === 1 && count > numComponentColors && this.name !== 'DeviceGray' && this.name !== 'DeviceRGB') {
+ var allColors = bpc <= 8 ? new Uint8Array(numComponentColors) : new Uint16Array(numComponentColors);
+
+ for (var i = 0; i < numComponentColors; i++) {
+ allColors[i] = i;
+ }
+
+ var colorMap = new Uint8ClampedArray(numComponentColors * 3);
+ this.getRgbBuffer(allColors, 0, numComponentColors, colorMap, 0, bpc, 0);
+
+ if (!needsResizing) {
+ var destPos = 0;
+
+ for (var _i2 = 0; _i2 < count; ++_i2) {
+ var key = comps[_i2] * 3;
+ dest[destPos++] = colorMap[key];
+ dest[destPos++] = colorMap[key + 1];
+ dest[destPos++] = colorMap[key + 2];
+ destPos += alpha01;
+ }
+ } else {
+ rgbBuf = new Uint8Array(count * 3);
+ var rgbPos = 0;
+
+ for (var _i3 = 0; _i3 < count; ++_i3) {
+ var _key = comps[_i3] * 3;
+
+ rgbBuf[rgbPos++] = colorMap[_key];
+ rgbBuf[rgbPos++] = colorMap[_key + 1];
+ rgbBuf[rgbPos++] = colorMap[_key + 2];
+ }
+ }
+ } else {
+ if (!needsResizing) {
+ this.getRgbBuffer(comps, 0, width * actualHeight, dest, 0, bpc, alpha01);
+ } else {
+ rgbBuf = new Uint8ClampedArray(count * 3);
+ this.getRgbBuffer(comps, 0, count, rgbBuf, 0, bpc, 0);
+ }
+ }
+
+ if (rgbBuf) {
+ if (needsResizing) {
+ resizeRgbImage(rgbBuf, dest, originalWidth, originalHeight, width, height, alpha01);
+ } else {
+ var _destPos = 0,
+ _rgbPos = 0;
+
+ for (var _i4 = 0, ii = width * actualHeight; _i4 < ii; _i4++) {
+ dest[_destPos++] = rgbBuf[_rgbPos++];
+ dest[_destPos++] = rgbBuf[_rgbPos++];
+ dest[_destPos++] = rgbBuf[_rgbPos++];
+ _destPos += alpha01;
+ }
+ }
+ }
+ }
+ }, {
+ key: "usesZeroToOneRange",
+ get: function get() {
+ return (0, _util.shadow)(this, 'usesZeroToOneRange', true);
+ }
+ }], [{
+ key: "parse",
+ value: function parse(cs, xref, res, pdfFunctionFactory) {
+ var IR = this.parseToIR(cs, xref, res, pdfFunctionFactory);
+ return this.fromIR(IR);
+ }
+ }, {
+ key: "fromIR",
+ value: function fromIR(IR) {
+ var name = Array.isArray(IR) ? IR[0] : IR;
+ var whitePoint, blackPoint, gamma;
+
+ switch (name) {
+ case 'DeviceGrayCS':
+ return this.singletons.gray;
+
+ case 'DeviceRgbCS':
+ return this.singletons.rgb;
+
+ case 'DeviceCmykCS':
+ return this.singletons.cmyk;
+
+ case 'CalGrayCS':
+ whitePoint = IR[1];
+ blackPoint = IR[2];
+ gamma = IR[3];
+ return new CalGrayCS(whitePoint, blackPoint, gamma);
+
+ case 'CalRGBCS':
+ whitePoint = IR[1];
+ blackPoint = IR[2];
+ gamma = IR[3];
+ var matrix = IR[4];
+ return new CalRGBCS(whitePoint, blackPoint, gamma, matrix);
+
+ case 'PatternCS':
+ var basePatternCS = IR[1];
+
+ if (basePatternCS) {
+ basePatternCS = this.fromIR(basePatternCS);
+ }
+
+ return new PatternCS(basePatternCS);
+
+ case 'IndexedCS':
+ var baseIndexedCS = IR[1];
+ var hiVal = IR[2];
+ var lookup = IR[3];
+ return new IndexedCS(this.fromIR(baseIndexedCS), hiVal, lookup);
+
+ case 'AlternateCS':
+ var numComps = IR[1];
+ var alt = IR[2];
+ var tintFn = IR[3];
+ return new AlternateCS(numComps, this.fromIR(alt), tintFn);
+
+ case 'LabCS':
+ whitePoint = IR[1];
+ blackPoint = IR[2];
+ var range = IR[3];
+ return new LabCS(whitePoint, blackPoint, range);
+
+ default:
+ throw new _util.FormatError("Unknown colorspace name: ".concat(name));
+ }
+ }
+ }, {
+ key: "parseToIR",
+ value: function parseToIR(cs, xref) {
+ var res = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : null;
+ var pdfFunctionFactory = arguments.length > 3 ? arguments[3] : undefined;
+ cs = xref.fetchIfRef(cs);
+
+ if ((0, _primitives.isName)(cs)) {
+ switch (cs.name) {
+ case 'DeviceGray':
+ case 'G':
+ return 'DeviceGrayCS';
+
+ case 'DeviceRGB':
+ case 'RGB':
+ return 'DeviceRgbCS';
+
+ case 'DeviceCMYK':
+ case 'CMYK':
+ return 'DeviceCmykCS';
+
+ case 'Pattern':
+ return ['PatternCS', null];
+
+ default:
+ if ((0, _primitives.isDict)(res)) {
+ var colorSpaces = res.get('ColorSpace');
+
+ if ((0, _primitives.isDict)(colorSpaces)) {
+ var resCS = colorSpaces.get(cs.name);
+
+ if (resCS) {
+ if ((0, _primitives.isName)(resCS)) {
+ return this.parseToIR(resCS, xref, res, pdfFunctionFactory);
+ }
+
+ cs = resCS;
+ break;
+ }
+ }
+ }
+
+ throw new _util.FormatError("unrecognized colorspace ".concat(cs.name));
+ }
+ }
+
+ if (Array.isArray(cs)) {
+ var mode = xref.fetchIfRef(cs[0]).name;
+ var numComps, params, alt, whitePoint, blackPoint, gamma;
+
+ switch (mode) {
+ case 'DeviceGray':
+ case 'G':
+ return 'DeviceGrayCS';
+
+ case 'DeviceRGB':
+ case 'RGB':
+ return 'DeviceRgbCS';
+
+ case 'DeviceCMYK':
+ case 'CMYK':
+ return 'DeviceCmykCS';
+
+ case 'CalGray':
+ params = xref.fetchIfRef(cs[1]);
+ whitePoint = params.getArray('WhitePoint');
+ blackPoint = params.getArray('BlackPoint');
+ gamma = params.get('Gamma');
+ return ['CalGrayCS', whitePoint, blackPoint, gamma];
+
+ case 'CalRGB':
+ params = xref.fetchIfRef(cs[1]);
+ whitePoint = params.getArray('WhitePoint');
+ blackPoint = params.getArray('BlackPoint');
+ gamma = params.getArray('Gamma');
+ var matrix = params.getArray('Matrix');
+ return ['CalRGBCS', whitePoint, blackPoint, gamma, matrix];
+
+ case 'ICCBased':
+ var stream = xref.fetchIfRef(cs[1]);
+ var dict = stream.dict;
+ numComps = dict.get('N');
+ alt = dict.get('Alternate');
+
+ if (alt) {
+ var altIR = this.parseToIR(alt, xref, res, pdfFunctionFactory);
+ var altCS = this.fromIR(altIR, pdfFunctionFactory);
+
+ if (altCS.numComps === numComps) {
+ return altIR;
+ }
+
+ (0, _util.warn)('ICCBased color space: Ignoring incorrect /Alternate entry.');
+ }
+
+ if (numComps === 1) {
+ return 'DeviceGrayCS';
+ } else if (numComps === 3) {
+ return 'DeviceRgbCS';
+ } else if (numComps === 4) {
+ return 'DeviceCmykCS';
+ }
+
+ break;
+
+ case 'Pattern':
+ var basePatternCS = cs[1] || null;
+
+ if (basePatternCS) {
+ basePatternCS = this.parseToIR(basePatternCS, xref, res, pdfFunctionFactory);
+ }
+
+ return ['PatternCS', basePatternCS];
+
+ case 'Indexed':
+ case 'I':
+ var baseIndexedCS = this.parseToIR(cs[1], xref, res, pdfFunctionFactory);
+ var hiVal = xref.fetchIfRef(cs[2]) + 1;
+ var lookup = xref.fetchIfRef(cs[3]);
+
+ if ((0, _primitives.isStream)(lookup)) {
+ lookup = lookup.getBytes();
+ }
+
+ return ['IndexedCS', baseIndexedCS, hiVal, lookup];
+
+ case 'Separation':
+ case 'DeviceN':
+ var name = xref.fetchIfRef(cs[1]);
+ numComps = Array.isArray(name) ? name.length : 1;
+ alt = this.parseToIR(cs[2], xref, res, pdfFunctionFactory);
+ var tintFn = pdfFunctionFactory.create(xref.fetchIfRef(cs[3]));
+ return ['AlternateCS', numComps, alt, tintFn];
+
+ case 'Lab':
+ params = xref.fetchIfRef(cs[1]);
+ whitePoint = params.getArray('WhitePoint');
+ blackPoint = params.getArray('BlackPoint');
+ var range = params.getArray('Range');
+ return ['LabCS', whitePoint, blackPoint, range];
+
+ default:
+ throw new _util.FormatError("unimplemented color space object \"".concat(mode, "\""));
+ }
+ }
+
+ throw new _util.FormatError("unrecognized color space object: \"".concat(cs, "\""));
+ }
+ }, {
+ key: "isDefaultDecode",
+ value: function isDefaultDecode(decode, numComps) {
+ if (!Array.isArray(decode)) {
+ return true;
+ }
+
+ if (numComps * 2 !== decode.length) {
+ (0, _util.warn)('The decode map is not the correct length');
+ return true;
+ }
+
+ for (var i = 0, ii = decode.length; i < ii; i += 2) {
+ if (decode[i] !== 0 || decode[i + 1] !== 1) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+ }, {
+ key: "singletons",
+ get: function get() {
+ return (0, _util.shadow)(this, 'singletons', {
+ get gray() {
+ return (0, _util.shadow)(this, 'gray', new DeviceGrayCS());
+ },
+
+ get rgb() {
+ return (0, _util.shadow)(this, 'rgb', new DeviceRgbCS());
+ },
+
+ get cmyk() {
+ return (0, _util.shadow)(this, 'cmyk', new DeviceCmykCS());
+ }
+
+ });
+ }
+ }]);
+
+ return ColorSpace;
+}();
+
+exports.ColorSpace = ColorSpace;
+
+var AlternateCS =
+/*#__PURE__*/
+function (_ColorSpace) {
+ _inherits(AlternateCS, _ColorSpace);
+
+ function AlternateCS(numComps, base, tintFn) {
+ var _this;
+
+ _classCallCheck(this, AlternateCS);
+
+ _this = _possibleConstructorReturn(this, _getPrototypeOf(AlternateCS).call(this, 'Alternate', numComps));
+ _this.base = base;
+ _this.tintFn = tintFn;
+ _this.tmpBuf = new Float32Array(base.numComps);
+ return _this;
+ }
+
+ _createClass(AlternateCS, [{
+ key: "getRgbItem",
+ value: function getRgbItem(src, srcOffset, dest, destOffset) {
+ var tmpBuf = this.tmpBuf;
+ this.tintFn(src, srcOffset, tmpBuf, 0);
+ this.base.getRgbItem(tmpBuf, 0, dest, destOffset);
+ }
+ }, {
+ key: "getRgbBuffer",
+ value: function getRgbBuffer(src, srcOffset, count, dest, destOffset, bits, alpha01) {
+ var tintFn = this.tintFn;
+ var base = this.base;
+ var scale = 1 / ((1 << bits) - 1);
+ var baseNumComps = base.numComps;
+ var usesZeroToOneRange = base.usesZeroToOneRange;
+ var isPassthrough = (base.isPassthrough(8) || !usesZeroToOneRange) && alpha01 === 0;
+ var pos = isPassthrough ? destOffset : 0;
+ var baseBuf = isPassthrough ? dest : new Uint8ClampedArray(baseNumComps * count);
+ var numComps = this.numComps;
+ var scaled = new Float32Array(numComps);
+ var tinted = new Float32Array(baseNumComps);
+ var i, j;
+
+ for (i = 0; i < count; i++) {
+ for (j = 0; j < numComps; j++) {
+ scaled[j] = src[srcOffset++] * scale;
+ }
+
+ tintFn(scaled, 0, tinted, 0);
+
+ if (usesZeroToOneRange) {
+ for (j = 0; j < baseNumComps; j++) {
+ baseBuf[pos++] = tinted[j] * 255;
+ }
+ } else {
+ base.getRgbItem(tinted, 0, baseBuf, pos);
+ pos += baseNumComps;
+ }
+ }
+
+ if (!isPassthrough) {
+ base.getRgbBuffer(baseBuf, 0, count, dest, destOffset, 8, alpha01);
+ }
+ }
+ }, {
+ key: "getOutputLength",
+ value: function getOutputLength(inputLength, alpha01) {
+ return this.base.getOutputLength(inputLength * this.base.numComps / this.numComps, alpha01);
+ }
+ }]);
+
+ return AlternateCS;
+}(ColorSpace);
+
+var PatternCS =
+/*#__PURE__*/
+function (_ColorSpace2) {
+ _inherits(PatternCS, _ColorSpace2);
+
+ function PatternCS(baseCS) {
+ var _this2;
+
+ _classCallCheck(this, PatternCS);
+
+ _this2 = _possibleConstructorReturn(this, _getPrototypeOf(PatternCS).call(this, 'Pattern', null));
+ _this2.base = baseCS;
+ return _this2;
+ }
+
+ _createClass(PatternCS, [{
+ key: "isDefaultDecode",
+ value: function isDefaultDecode(decodeMap, bpc) {
+ (0, _util.unreachable)('Should not call PatternCS.isDefaultDecode');
+ }
+ }]);
+
+ return PatternCS;
+}(ColorSpace);
+
+var IndexedCS =
+/*#__PURE__*/
+function (_ColorSpace3) {
+ _inherits(IndexedCS, _ColorSpace3);
+
+ function IndexedCS(base, highVal, lookup) {
+ var _this3;
+
+ _classCallCheck(this, IndexedCS);
+
+ _this3 = _possibleConstructorReturn(this, _getPrototypeOf(IndexedCS).call(this, 'Indexed', 1));
+ _this3.base = base;
+ _this3.highVal = highVal;
+ var baseNumComps = base.numComps;
+ var length = baseNumComps * highVal;
+
+ if ((0, _primitives.isStream)(lookup)) {
+ _this3.lookup = new Uint8Array(length);
+ var bytes = lookup.getBytes(length);
+
+ _this3.lookup.set(bytes);
+ } else if ((0, _util.isString)(lookup)) {
+ _this3.lookup = new Uint8Array(length);
+
+ for (var i = 0; i < length; ++i) {
+ _this3.lookup[i] = lookup.charCodeAt(i);
+ }
+ } else if (lookup instanceof Uint8Array) {
+ _this3.lookup = lookup;
+ } else {
+ throw new _util.FormatError("Unrecognized lookup table: ".concat(lookup));
+ }
+
+ return _this3;
+ }
+
+ _createClass(IndexedCS, [{
+ key: "getRgbItem",
+ value: function getRgbItem(src, srcOffset, dest, destOffset) {
+ var numComps = this.base.numComps;
+ var start = src[srcOffset] * numComps;
+ this.base.getRgbBuffer(this.lookup, start, 1, dest, destOffset, 8, 0);
+ }
+ }, {
+ key: "getRgbBuffer",
+ value: function getRgbBuffer(src, srcOffset, count, dest, destOffset, bits, alpha01) {
+ var base = this.base;
+ var numComps = base.numComps;
+ var outputDelta = base.getOutputLength(numComps, alpha01);
+ var lookup = this.lookup;
+
+ for (var i = 0; i < count; ++i) {
+ var lookupPos = src[srcOffset++] * numComps;
+ base.getRgbBuffer(lookup, lookupPos, 1, dest, destOffset, 8, alpha01);
+ destOffset += outputDelta;
+ }
+ }
+ }, {
+ key: "getOutputLength",
+ value: function getOutputLength(inputLength, alpha01) {
+ return this.base.getOutputLength(inputLength * this.base.numComps, alpha01);
+ }
+ }, {
+ key: "isDefaultDecode",
+ value: function isDefaultDecode(decodeMap, bpc) {
+ if (!Array.isArray(decodeMap)) {
+ return true;
+ }
+
+ if (decodeMap.length !== 2) {
+ (0, _util.warn)('Decode map length is not correct');
+ return true;
+ }
+
+ if (!Number.isInteger(bpc) || bpc < 1) {
+ (0, _util.warn)('Bits per component is not correct');
+ return true;
+ }
+
+ return decodeMap[0] === 0 && decodeMap[1] === (1 << bpc) - 1;
+ }
+ }]);
+
+ return IndexedCS;
+}(ColorSpace);
+
+var DeviceGrayCS =
+/*#__PURE__*/
+function (_ColorSpace4) {
+ _inherits(DeviceGrayCS, _ColorSpace4);
+
+ function DeviceGrayCS() {
+ _classCallCheck(this, DeviceGrayCS);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(DeviceGrayCS).call(this, 'DeviceGray', 1));
+ }
+
+ _createClass(DeviceGrayCS, [{
+ key: "getRgbItem",
+ value: function getRgbItem(src, srcOffset, dest, destOffset) {
+ var c = src[srcOffset] * 255;
+ dest[destOffset] = dest[destOffset + 1] = dest[destOffset + 2] = c;
+ }
+ }, {
+ key: "getRgbBuffer",
+ value: function getRgbBuffer(src, srcOffset, count, dest, destOffset, bits, alpha01) {
+ var scale = 255 / ((1 << bits) - 1);
+ var j = srcOffset,
+ q = destOffset;
+
+ for (var i = 0; i < count; ++i) {
+ var c = scale * src[j++];
+ dest[q++] = c;
+ dest[q++] = c;
+ dest[q++] = c;
+ q += alpha01;
+ }
+ }
+ }, {
+ key: "getOutputLength",
+ value: function getOutputLength(inputLength, alpha01) {
+ return inputLength * (3 + alpha01);
+ }
+ }]);
+
+ return DeviceGrayCS;
+}(ColorSpace);
+
+var DeviceRgbCS =
+/*#__PURE__*/
+function (_ColorSpace5) {
+ _inherits(DeviceRgbCS, _ColorSpace5);
+
+ function DeviceRgbCS() {
+ _classCallCheck(this, DeviceRgbCS);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(DeviceRgbCS).call(this, 'DeviceRGB', 3));
+ }
+
+ _createClass(DeviceRgbCS, [{
+ key: "getRgbItem",
+ value: function getRgbItem(src, srcOffset, dest, destOffset) {
+ dest[destOffset] = src[srcOffset] * 255;
+ dest[destOffset + 1] = src[srcOffset + 1] * 255;
+ dest[destOffset + 2] = src[srcOffset + 2] * 255;
+ }
+ }, {
+ key: "getRgbBuffer",
+ value: function getRgbBuffer(src, srcOffset, count, dest, destOffset, bits, alpha01) {
+ if (bits === 8 && alpha01 === 0) {
+ dest.set(src.subarray(srcOffset, srcOffset + count * 3), destOffset);
+ return;
+ }
+
+ var scale = 255 / ((1 << bits) - 1);
+ var j = srcOffset,
+ q = destOffset;
+
+ for (var i = 0; i < count; ++i) {
+ dest[q++] = scale * src[j++];
+ dest[q++] = scale * src[j++];
+ dest[q++] = scale * src[j++];
+ q += alpha01;
+ }
+ }
+ }, {
+ key: "getOutputLength",
+ value: function getOutputLength(inputLength, alpha01) {
+ return inputLength * (3 + alpha01) / 3 | 0;
+ }
+ }, {
+ key: "isPassthrough",
+ value: function isPassthrough(bits) {
+ return bits === 8;
+ }
+ }]);
+
+ return DeviceRgbCS;
+}(ColorSpace);
+
+var DeviceCmykCS = function DeviceCmykCSClosure() {
+ function convertToRgb(src, srcOffset, srcScale, dest, destOffset) {
+ var c = src[srcOffset] * srcScale;
+ var m = src[srcOffset + 1] * srcScale;
+ var y = src[srcOffset + 2] * srcScale;
+ var k = src[srcOffset + 3] * srcScale;
+ dest[destOffset] = 255 + c * (-4.387332384609988 * c + 54.48615194189176 * m + 18.82290502165302 * y + 212.25662451639585 * k + -285.2331026137004) + m * (1.7149763477362134 * m - 5.6096736904047315 * y + -17.873870861415444 * k - 5.497006427196366) + y * (-2.5217340131683033 * y - 21.248923337353073 * k + 17.5119270841813) + k * (-21.86122147463605 * k - 189.48180835922747);
+ dest[destOffset + 1] = 255 + c * (8.841041422036149 * c + 60.118027045597366 * m + 6.871425592049007 * y + 31.159100130055922 * k + -79.2970844816548) + m * (-15.310361306967817 * m + 17.575251261109482 * y + 131.35250912493976 * k - 190.9453302588951) + y * (4.444339102852739 * y + 9.8632861493405 * k - 24.86741582555878) + k * (-20.737325471181034 * k - 187.80453709719578);
+ dest[destOffset + 2] = 255 + c * (0.8842522430003296 * c + 8.078677503112928 * m + 30.89978309703729 * y - 0.23883238689178934 * k + -14.183576799673286) + m * (10.49593273432072 * m + 63.02378494754052 * y + 50.606957656360734 * k - 112.23884253719248) + y * (0.03296041114873217 * y + 115.60384449646641 * k + -193.58209356861505) + k * (-22.33816807309886 * k - 180.12613974708367);
+ }
+
+ var DeviceCmykCS =
+ /*#__PURE__*/
+ function (_ColorSpace6) {
+ _inherits(DeviceCmykCS, _ColorSpace6);
+
+ function DeviceCmykCS() {
+ _classCallCheck(this, DeviceCmykCS);
+
+ return _possibleConstructorReturn(this, _getPrototypeOf(DeviceCmykCS).call(this, 'DeviceCMYK', 4));
+ }
+
+ _createClass(DeviceCmykCS, [{
+ key: "getRgbItem",
+ value: function getRgbItem(src, srcOffset, dest, destOffset) {
+ convertToRgb(src, srcOffset, 1, dest, destOffset);
+ }
+ }, {
+ key: "getRgbBuffer",
+ value: function getRgbBuffer(src, srcOffset, count, dest, destOffset, bits, alpha01) {
+ var scale = 1 / ((1 << bits) - 1);
+
+ for (var i = 0; i < count; i++) {
+ convertToRgb(src, srcOffset, scale, dest, destOffset);
+ srcOffset += 4;
+ destOffset += 3 + alpha01;
+ }
+ }
+ }, {
+ key: "getOutputLength",
+ value: function getOutputLength(inputLength, alpha01) {
+ return inputLength / 4 * (3 + alpha01) | 0;
+ }
+ }]);
+
+ return DeviceCmykCS;
+ }(ColorSpace);
+
+ return DeviceCmykCS;
+}();
+
+var CalGrayCS = function CalGrayCSClosure() {
+ function convertToRgb(cs, src, srcOffset, dest, destOffset, scale) {
+ var A = src[srcOffset] * scale;
+ var AG = Math.pow(A, cs.G);
+ var L = cs.YW * AG;
+ var val = Math.max(295.8 * Math.pow(L, 0.333333333333333333) - 40.8, 0);
+ dest[destOffset] = val;
+ dest[destOffset + 1] = val;
+ dest[destOffset + 2] = val;
+ }
+
+ var CalGrayCS =
+ /*#__PURE__*/
+ function (_ColorSpace7) {
+ _inherits(CalGrayCS, _ColorSpace7);
+
+ function CalGrayCS(whitePoint, blackPoint, gamma) {
+ var _this4;
+
+ _classCallCheck(this, CalGrayCS);
+
+ _this4 = _possibleConstructorReturn(this, _getPrototypeOf(CalGrayCS).call(this, 'CalGray', 1));
+
+ if (!whitePoint) {
+ throw new _util.FormatError('WhitePoint missing - required for color space CalGray');
+ }
+
+ blackPoint = blackPoint || [0, 0, 0];
+ gamma = gamma || 1;
+ _this4.XW = whitePoint[0];
+ _this4.YW = whitePoint[1];
+ _this4.ZW = whitePoint[2];
+ _this4.XB = blackPoint[0];
+ _this4.YB = blackPoint[1];
+ _this4.ZB = blackPoint[2];
+ _this4.G = gamma;
+
+ if (_this4.XW < 0 || _this4.ZW < 0 || _this4.YW !== 1) {
+ throw new _util.FormatError("Invalid WhitePoint components for ".concat(_this4.name) + ', no fallback available');
+ }
+
+ if (_this4.XB < 0 || _this4.YB < 0 || _this4.ZB < 0) {
+ (0, _util.info)("Invalid BlackPoint for ".concat(_this4.name, ", falling back to default."));
+ _this4.XB = _this4.YB = _this4.ZB = 0;
+ }
+
+ if (_this4.XB !== 0 || _this4.YB !== 0 || _this4.ZB !== 0) {
+ (0, _util.warn)("".concat(_this4.name, ", BlackPoint: XB: ").concat(_this4.XB, ", YB: ").concat(_this4.YB, ", ") + "ZB: ".concat(_this4.ZB, ", only default values are supported."));
+ }
+
+ if (_this4.G < 1) {
+ (0, _util.info)("Invalid Gamma: ".concat(_this4.G, " for ").concat(_this4.name, ", ") + 'falling back to default.');
+ _this4.G = 1;
+ }
+
+ return _this4;
+ }
+
+ _createClass(CalGrayCS, [{
+ key: "getRgbItem",
+ value: function getRgbItem(src, srcOffset, dest, destOffset) {
+ convertToRgb(this, src, srcOffset, dest, destOffset, 1);
+ }
+ }, {
+ key: "getRgbBuffer",
+ value: function getRgbBuffer(src, srcOffset, count, dest, destOffset, bits, alpha01) {
+ var scale = 1 / ((1 << bits) - 1);
+
+ for (var i = 0; i < count; ++i) {
+ convertToRgb(this, src, srcOffset, dest, destOffset, scale);
+ srcOffset += 1;
+ destOffset += 3 + alpha01;
+ }
+ }
+ }, {
+ key: "getOutputLength",
+ value: function getOutputLength(inputLength, alpha01) {
+ return inputLength * (3 + alpha01);
+ }
+ }]);
+
+ return CalGrayCS;
+ }(ColorSpace);
+
+ return CalGrayCS;
+}();
+
+var CalRGBCS = function CalRGBCSClosure() {
+ var BRADFORD_SCALE_MATRIX = new Float32Array([0.8951, 0.2664, -0.1614, -0.7502, 1.7135, 0.0367, 0.0389, -0.0685, 1.0296]);
+ var BRADFORD_SCALE_INVERSE_MATRIX = new Float32Array([0.9869929, -0.1470543, 0.1599627, 0.4323053, 0.5183603, 0.0492912, -0.0085287, 0.0400428, 0.9684867]);
+ var SRGB_D65_XYZ_TO_RGB_MATRIX = new Float32Array([3.2404542, -1.5371385, -0.4985314, -0.9692660, 1.8760108, 0.0415560, 0.0556434, -0.2040259, 1.0572252]);
+ var FLAT_WHITEPOINT_MATRIX = new Float32Array([1, 1, 1]);
+ var tempNormalizeMatrix = new Float32Array(3);
+ var tempConvertMatrix1 = new Float32Array(3);
+ var tempConvertMatrix2 = new Float32Array(3);
+ var DECODE_L_CONSTANT = Math.pow((8 + 16) / 116, 3) / 8.0;
+
+ function matrixProduct(a, b, result) {
+ result[0] = a[0] * b[0] + a[1] * b[1] + a[2] * b[2];
+ result[1] = a[3] * b[0] + a[4] * b[1] + a[5] * b[2];
+ result[2] = a[6] * b[0] + a[7] * b[1] + a[8] * b[2];
+ }
+
+ function convertToFlat(sourceWhitePoint, LMS, result) {
+ result[0] = LMS[0] * 1 / sourceWhitePoint[0];
+ result[1] = LMS[1] * 1 / sourceWhitePoint[1];
+ result[2] = LMS[2] * 1 / sourceWhitePoint[2];
+ }
+
+ function convertToD65(sourceWhitePoint, LMS, result) {
+ var D65X = 0.95047;
+ var D65Y = 1;
+ var D65Z = 1.08883;
+ result[0] = LMS[0] * D65X / sourceWhitePoint[0];
+ result[1] = LMS[1] * D65Y / sourceWhitePoint[1];
+ result[2] = LMS[2] * D65Z / sourceWhitePoint[2];
+ }
+
+ function sRGBTransferFunction(color) {
+ if (color <= 0.0031308) {
+ return adjustToRange(0, 1, 12.92 * color);
+ }
+
+ return adjustToRange(0, 1, (1 + 0.055) * Math.pow(color, 1 / 2.4) - 0.055);
+ }
+
+ function adjustToRange(min, max, value) {
+ return Math.max(min, Math.min(max, value));
+ }
+
+ function decodeL(L) {
+ if (L < 0) {
+ return -decodeL(-L);
+ }
+
+ if (L > 8.0) {
+ return Math.pow((L + 16) / 116, 3);
+ }
+
+ return L * DECODE_L_CONSTANT;
+ }
+
+ function compensateBlackPoint(sourceBlackPoint, XYZ_Flat, result) {
+ if (sourceBlackPoint[0] === 0 && sourceBlackPoint[1] === 0 && sourceBlackPoint[2] === 0) {
+ result[0] = XYZ_Flat[0];
+ result[1] = XYZ_Flat[1];
+ result[2] = XYZ_Flat[2];
+ return;
+ }
+
+ var zeroDecodeL = decodeL(0);
+ var X_DST = zeroDecodeL;
+ var X_SRC = decodeL(sourceBlackPoint[0]);
+ var Y_DST = zeroDecodeL;
+ var Y_SRC = decodeL(sourceBlackPoint[1]);
+ var Z_DST = zeroDecodeL;
+ var Z_SRC = decodeL(sourceBlackPoint[2]);
+ var X_Scale = (1 - X_DST) / (1 - X_SRC);
+ var X_Offset = 1 - X_Scale;
+ var Y_Scale = (1 - Y_DST) / (1 - Y_SRC);
+ var Y_Offset = 1 - Y_Scale;
+ var Z_Scale = (1 - Z_DST) / (1 - Z_SRC);
+ var Z_Offset = 1 - Z_Scale;
+ result[0] = XYZ_Flat[0] * X_Scale + X_Offset;
+ result[1] = XYZ_Flat[1] * Y_Scale + Y_Offset;
+ result[2] = XYZ_Flat[2] * Z_Scale + Z_Offset;
+ }
+
+ function normalizeWhitePointToFlat(sourceWhitePoint, XYZ_In, result) {
+ if (sourceWhitePoint[0] === 1 && sourceWhitePoint[2] === 1) {
+ result[0] = XYZ_In[0];
+ result[1] = XYZ_In[1];
+ result[2] = XYZ_In[2];
+ return;
+ }
+
+ var LMS = result;
+ matrixProduct(BRADFORD_SCALE_MATRIX, XYZ_In, LMS);
+ var LMS_Flat = tempNormalizeMatrix;
+ convertToFlat(sourceWhitePoint, LMS, LMS_Flat);
+ matrixProduct(BRADFORD_SCALE_INVERSE_MATRIX, LMS_Flat, result);
+ }
+
+ function normalizeWhitePointToD65(sourceWhitePoint, XYZ_In, result) {
+ var LMS = result;
+ matrixProduct(BRADFORD_SCALE_MATRIX, XYZ_In, LMS);
+ var LMS_D65 = tempNormalizeMatrix;
+ convertToD65(sourceWhitePoint, LMS, LMS_D65);
+ matrixProduct(BRADFORD_SCALE_INVERSE_MATRIX, LMS_D65, result);
+ }
+
+ function convertToRgb(cs, src, srcOffset, dest, destOffset, scale) {
+ var A = adjustToRange(0, 1, src[srcOffset] * scale);
+ var B = adjustToRange(0, 1, src[srcOffset + 1] * scale);
+ var C = adjustToRange(0, 1, src[srcOffset + 2] * scale);
+ var AGR = Math.pow(A, cs.GR);
+ var BGG = Math.pow(B, cs.GG);
+ var CGB = Math.pow(C, cs.GB);
+ var X = cs.MXA * AGR + cs.MXB * BGG + cs.MXC * CGB;
+ var Y = cs.MYA * AGR + cs.MYB * BGG + cs.MYC * CGB;
+ var Z = cs.MZA * AGR + cs.MZB * BGG + cs.MZC * CGB;
+ var XYZ = tempConvertMatrix1;
+ XYZ[0] = X;
+ XYZ[1] = Y;
+ XYZ[2] = Z;
+ var XYZ_Flat = tempConvertMatrix2;
+ normalizeWhitePointToFlat(cs.whitePoint, XYZ, XYZ_Flat);
+ var XYZ_Black = tempConvertMatrix1;
+ compensateBlackPoint(cs.blackPoint, XYZ_Flat, XYZ_Black);
+ var XYZ_D65 = tempConvertMatrix2;
+ normalizeWhitePointToD65(FLAT_WHITEPOINT_MATRIX, XYZ_Black, XYZ_D65);
+ var SRGB = tempConvertMatrix1;
+ matrixProduct(SRGB_D65_XYZ_TO_RGB_MATRIX, XYZ_D65, SRGB);
+ dest[destOffset] = sRGBTransferFunction(SRGB[0]) * 255;
+ dest[destOffset + 1] = sRGBTransferFunction(SRGB[1]) * 255;
+ dest[destOffset + 2] = sRGBTransferFunction(SRGB[2]) * 255;
+ }
+
+ var CalRGBCS =
+ /*#__PURE__*/
+ function (_ColorSpace8) {
+ _inherits(CalRGBCS, _ColorSpace8);
+
+ function CalRGBCS(whitePoint, blackPoint, gamma, matrix) {
+ var _this5;
+
+ _classCallCheck(this, CalRGBCS);
+
+ _this5 = _possibleConstructorReturn(this, _getPrototypeOf(CalRGBCS).call(this, 'CalRGB', 3));
+
+ if (!whitePoint) {
+ throw new _util.FormatError('WhitePoint missing - required for color space CalRGB');
+ }
+
+ blackPoint = blackPoint || new Float32Array(3);
+ gamma = gamma || new Float32Array([1, 1, 1]);
+ matrix = matrix || new Float32Array([1, 0, 0, 0, 1, 0, 0, 0, 1]);
+ var XW = whitePoint[0];
+ var YW = whitePoint[1];
+ var ZW = whitePoint[2];
+ _this5.whitePoint = whitePoint;
+ var XB = blackPoint[0];
+ var YB = blackPoint[1];
+ var ZB = blackPoint[2];
+ _this5.blackPoint = blackPoint;
+ _this5.GR = gamma[0];
+ _this5.GG = gamma[1];
+ _this5.GB = gamma[2];
+ _this5.MXA = matrix[0];
+ _this5.MYA = matrix[1];
+ _this5.MZA = matrix[2];
+ _this5.MXB = matrix[3];
+ _this5.MYB = matrix[4];
+ _this5.MZB = matrix[5];
+ _this5.MXC = matrix[6];
+ _this5.MYC = matrix[7];
+ _this5.MZC = matrix[8];
+
+ if (XW < 0 || ZW < 0 || YW !== 1) {
+ throw new _util.FormatError("Invalid WhitePoint components for ".concat(_this5.name) + ', no fallback available');
+ }
+
+ if (XB < 0 || YB < 0 || ZB < 0) {
+ (0, _util.info)("Invalid BlackPoint for ".concat(_this5.name, " [").concat(XB, ", ").concat(YB, ", ").concat(ZB, "], ") + 'falling back to default.');
+ _this5.blackPoint = new Float32Array(3);
+ }
+
+ if (_this5.GR < 0 || _this5.GG < 0 || _this5.GB < 0) {
+ (0, _util.info)("Invalid Gamma [".concat(_this5.GR, ", ").concat(_this5.GG, ", ").concat(_this5.GB, "] for ") + "".concat(_this5.name, ", falling back to default."));
+ _this5.GR = _this5.GG = _this5.GB = 1;
+ }
+
+ return _this5;
+ }
+
+ _createClass(CalRGBCS, [{
+ key: "getRgbItem",
+ value: function getRgbItem(src, srcOffset, dest, destOffset) {
+ convertToRgb(this, src, srcOffset, dest, destOffset, 1);
+ }
+ }, {
+ key: "getRgbBuffer",
+ value: function getRgbBuffer(src, srcOffset, count, dest, destOffset, bits, alpha01) {
+ var scale = 1 / ((1 << bits) - 1);
+
+ for (var i = 0; i < count; ++i) {
+ convertToRgb(this, src, srcOffset, dest, destOffset, scale);
+ srcOffset += 3;
+ destOffset += 3 + alpha01;
+ }
+ }
+ }, {
+ key: "getOutputLength",
+ value: function getOutputLength(inputLength, alpha01) {
+ return inputLength * (3 + alpha01) / 3 | 0;
+ }
+ }]);
+
+ return CalRGBCS;
+ }(ColorSpace);
+
+ return CalRGBCS;
+}();
+
+var LabCS = function LabCSClosure() {
+ function fn_g(x) {
+ var result;
+
+ if (x >= 6 / 29) {
+ result = x * x * x;
+ } else {
+ result = 108 / 841 * (x - 4 / 29);
+ }
+
+ return result;
+ }
+
+ function decode(value, high1, low2, high2) {
+ return low2 + value * (high2 - low2) / high1;
+ }
+
+ function convertToRgb(cs, src, srcOffset, maxVal, dest, destOffset) {
+ var Ls = src[srcOffset];
+ var as = src[srcOffset + 1];
+ var bs = src[srcOffset + 2];
+
+ if (maxVal !== false) {
+ Ls = decode(Ls, maxVal, 0, 100);
+ as = decode(as, maxVal, cs.amin, cs.amax);
+ bs = decode(bs, maxVal, cs.bmin, cs.bmax);
+ }
+
+ as = as > cs.amax ? cs.amax : as < cs.amin ? cs.amin : as;
+ bs = bs > cs.bmax ? cs.bmax : bs < cs.bmin ? cs.bmin : bs;
+ var M = (Ls + 16) / 116;
+ var L = M + as / 500;
+ var N = M - bs / 200;
+ var X = cs.XW * fn_g(L);
+ var Y = cs.YW * fn_g(M);
+ var Z = cs.ZW * fn_g(N);
+ var r, g, b;
+
+ if (cs.ZW < 1) {
+ r = X * 3.1339 + Y * -1.6170 + Z * -0.4906;
+ g = X * -0.9785 + Y * 1.9160 + Z * 0.0333;
+ b = X * 0.0720 + Y * -0.2290 + Z * 1.4057;
+ } else {
+ r = X * 3.2406 + Y * -1.5372 + Z * -0.4986;
+ g = X * -0.9689 + Y * 1.8758 + Z * 0.0415;
+ b = X * 0.0557 + Y * -0.2040 + Z * 1.0570;
+ }
+
+ dest[destOffset] = Math.sqrt(r) * 255;
+ dest[destOffset + 1] = Math.sqrt(g) * 255;
+ dest[destOffset + 2] = Math.sqrt(b) * 255;
+ }
+
+ var LabCS =
+ /*#__PURE__*/
+ function (_ColorSpace9) {
+ _inherits(LabCS, _ColorSpace9);
+
+ function LabCS(whitePoint, blackPoint, range) {
+ var _this6;
+
+ _classCallCheck(this, LabCS);
+
+ _this6 = _possibleConstructorReturn(this, _getPrototypeOf(LabCS).call(this, 'Lab', 3));
+
+ if (!whitePoint) {
+ throw new _util.FormatError('WhitePoint missing - required for color space Lab');
+ }
+
+ blackPoint = blackPoint || [0, 0, 0];
+ range = range || [-100, 100, -100, 100];
+ _this6.XW = whitePoint[0];
+ _this6.YW = whitePoint[1];
+ _this6.ZW = whitePoint[2];
+ _this6.amin = range[0];
+ _this6.amax = range[1];
+ _this6.bmin = range[2];
+ _this6.bmax = range[3];
+ _this6.XB = blackPoint[0];
+ _this6.YB = blackPoint[1];
+ _this6.ZB = blackPoint[2];
+
+ if (_this6.XW < 0 || _this6.ZW < 0 || _this6.YW !== 1) {
+ throw new _util.FormatError('Invalid WhitePoint components, no fallback available');
+ }
+
+ if (_this6.XB < 0 || _this6.YB < 0 || _this6.ZB < 0) {
+ (0, _util.info)('Invalid BlackPoint, falling back to default');
+ _this6.XB = _this6.YB = _this6.ZB = 0;
+ }
+
+ if (_this6.amin > _this6.amax || _this6.bmin > _this6.bmax) {
+ (0, _util.info)('Invalid Range, falling back to defaults');
+ _this6.amin = -100;
+ _this6.amax = 100;
+ _this6.bmin = -100;
+ _this6.bmax = 100;
+ }
+
+ return _this6;
+ }
+
+ _createClass(LabCS, [{
+ key: "getRgbItem",
+ value: function getRgbItem(src, srcOffset, dest, destOffset) {
+ convertToRgb(this, src, srcOffset, false, dest, destOffset);
+ }
+ }, {
+ key: "getRgbBuffer",
+ value: function getRgbBuffer(src, srcOffset, count, dest, destOffset, bits, alpha01) {
+ var maxVal = (1 << bits) - 1;
+
+ for (var i = 0; i < count; i++) {
+ convertToRgb(this, src, srcOffset, maxVal, dest, destOffset);
+ srcOffset += 3;
+ destOffset += 3 + alpha01;
+ }
+ }
+ }, {
+ key: "getOutputLength",
+ value: function getOutputLength(inputLength, alpha01) {
+ return inputLength * (3 + alpha01) / 3 | 0;
+ }
+ }, {
+ key: "isDefaultDecode",
+ value: function isDefaultDecode(decodeMap, bpc) {
+ return true;
+ }
+ }, {
+ key: "usesZeroToOneRange",
+ get: function get() {
+ return (0, _util.shadow)(this, 'usesZeroToOneRange', false);
+ }
+ }]);
+
+ return LabCS;
+ }(ColorSpace);
+
+ return LabCS;
+}();
+
+/***/ }),
+/* 170 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.MarkupAnnotation = exports.AnnotationFactory = exports.AnnotationBorderStyle = exports.Annotation = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _obj = __w_pdfjs_require__(156);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _colorspace = __w_pdfjs_require__(169);
+
+var _core_utils = __w_pdfjs_require__(154);
+
+var _operator_list = __w_pdfjs_require__(171);
+
+var _stream = __w_pdfjs_require__(158);
+
+function _get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { _get = Reflect.get; } else { _get = function _get(target, property, receiver) { var base = _superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return _get(target, property, receiver || target); }
+
+function _superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = _getPrototypeOf(object); if (object === null) break; } return object; }
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var AnnotationFactory =
+/*#__PURE__*/
+function () {
+ function AnnotationFactory() {
+ _classCallCheck(this, AnnotationFactory);
+ }
+
+ _createClass(AnnotationFactory, null, [{
+ key: "create",
+ value: function create(xref, ref, pdfManager, idFactory) {
+ return pdfManager.ensure(this, '_create', [xref, ref, pdfManager, idFactory]);
+ }
+ }, {
+ key: "_create",
+ value: function _create(xref, ref, pdfManager, idFactory) {
+ var dict = xref.fetchIfRef(ref);
+
+ if (!(0, _primitives.isDict)(dict)) {
+ return undefined;
+ }
+
+ var id = (0, _primitives.isRef)(ref) ? ref.toString() : "annot_".concat(idFactory.createObjId());
+ var subtype = dict.get('Subtype');
+ subtype = (0, _primitives.isName)(subtype) ? subtype.name : null;
+ var parameters = {
+ xref: xref,
+ dict: dict,
+ subtype: subtype,
+ id: id,
+ pdfManager: pdfManager
+ };
+
+ switch (subtype) {
+ case 'Link':
+ return new LinkAnnotation(parameters);
+
+ case 'Text':
+ return new TextAnnotation(parameters);
+
+ case 'Widget':
+ var fieldType = (0, _core_utils.getInheritableProperty)({
+ dict: dict,
+ key: 'FT'
+ });
+ fieldType = (0, _primitives.isName)(fieldType) ? fieldType.name : null;
+
+ switch (fieldType) {
+ case 'Tx':
+ return new TextWidgetAnnotation(parameters);
+
+ case 'Btn':
+ return new ButtonWidgetAnnotation(parameters);
+
+ case 'Ch':
+ return new ChoiceWidgetAnnotation(parameters);
+ }
+
+ (0, _util.warn)('Unimplemented widget field type "' + fieldType + '", ' + 'falling back to base field type.');
+ return new WidgetAnnotation(parameters);
+
+ case 'Popup':
+ return new PopupAnnotation(parameters);
+
+ case 'FreeText':
+ return new FreeTextAnnotation(parameters);
+
+ case 'Line':
+ return new LineAnnotation(parameters);
+
+ case 'Square':
+ return new SquareAnnotation(parameters);
+
+ case 'Circle':
+ return new CircleAnnotation(parameters);
+
+ case 'PolyLine':
+ return new PolylineAnnotation(parameters);
+
+ case 'Polygon':
+ return new PolygonAnnotation(parameters);
+
+ case 'Caret':
+ return new CaretAnnotation(parameters);
+
+ case 'Ink':
+ return new InkAnnotation(parameters);
+
+ case 'Highlight':
+ return new HighlightAnnotation(parameters);
+
+ case 'Underline':
+ return new UnderlineAnnotation(parameters);
+
+ case 'Squiggly':
+ return new SquigglyAnnotation(parameters);
+
+ case 'StrikeOut':
+ return new StrikeOutAnnotation(parameters);
+
+ case 'Stamp':
+ return new StampAnnotation(parameters);
+
+ case 'FileAttachment':
+ return new FileAttachmentAnnotation(parameters);
+
+ default:
+ if (!subtype) {
+ (0, _util.warn)('Annotation is missing the required /Subtype.');
+ } else {
+ (0, _util.warn)('Unimplemented annotation type "' + subtype + '", ' + 'falling back to base annotation.');
+ }
+
+ return new Annotation(parameters);
+ }
+ }
+ }]);
+
+ return AnnotationFactory;
+}();
+
+exports.AnnotationFactory = AnnotationFactory;
+
+function getTransformMatrix(rect, bbox, matrix) {
+ var bounds = _util.Util.getAxialAlignedBoundingBox(bbox, matrix);
+
+ var minX = bounds[0];
+ var minY = bounds[1];
+ var maxX = bounds[2];
+ var maxY = bounds[3];
+
+ if (minX === maxX || minY === maxY) {
+ return [1, 0, 0, 1, rect[0], rect[1]];
+ }
+
+ var xRatio = (rect[2] - rect[0]) / (maxX - minX);
+ var yRatio = (rect[3] - rect[1]) / (maxY - minY);
+ return [xRatio, 0, 0, yRatio, rect[0] - minX * xRatio, rect[1] - minY * yRatio];
+}
+
+var Annotation =
+/*#__PURE__*/
+function () {
+ function Annotation(params) {
+ _classCallCheck(this, Annotation);
+
+ var dict = params.dict;
+ this.setContents(dict.get('Contents'));
+ this.setModificationDate(dict.get('M'));
+ this.setFlags(dict.get('F'));
+ this.setRectangle(dict.getArray('Rect'));
+ this.setColor(dict.getArray('C'));
+ this.setBorderStyle(dict);
+ this.setAppearance(dict);
+ this.data = {
+ annotationFlags: this.flags,
+ borderStyle: this.borderStyle,
+ color: this.color,
+ contents: this.contents,
+ hasAppearance: !!this.appearance,
+ id: params.id,
+ modificationDate: this.modificationDate,
+ rect: this.rectangle,
+ subtype: params.subtype
+ };
+ }
+
+ _createClass(Annotation, [{
+ key: "_hasFlag",
+ value: function _hasFlag(flags, flag) {
+ return !!(flags & flag);
+ }
+ }, {
+ key: "_isViewable",
+ value: function _isViewable(flags) {
+ return !this._hasFlag(flags, _util.AnnotationFlag.INVISIBLE) && !this._hasFlag(flags, _util.AnnotationFlag.HIDDEN) && !this._hasFlag(flags, _util.AnnotationFlag.NOVIEW);
+ }
+ }, {
+ key: "_isPrintable",
+ value: function _isPrintable(flags) {
+ return this._hasFlag(flags, _util.AnnotationFlag.PRINT) && !this._hasFlag(flags, _util.AnnotationFlag.INVISIBLE) && !this._hasFlag(flags, _util.AnnotationFlag.HIDDEN);
+ }
+ }, {
+ key: "setContents",
+ value: function setContents(contents) {
+ this.contents = (0, _util.stringToPDFString)(contents || '');
+ }
+ }, {
+ key: "setModificationDate",
+ value: function setModificationDate(modificationDate) {
+ this.modificationDate = (0, _util.isString)(modificationDate) ? modificationDate : null;
+ }
+ }, {
+ key: "setFlags",
+ value: function setFlags(flags) {
+ this.flags = Number.isInteger(flags) && flags > 0 ? flags : 0;
+ }
+ }, {
+ key: "hasFlag",
+ value: function hasFlag(flag) {
+ return this._hasFlag(this.flags, flag);
+ }
+ }, {
+ key: "setRectangle",
+ value: function setRectangle(rectangle) {
+ if (Array.isArray(rectangle) && rectangle.length === 4) {
+ this.rectangle = _util.Util.normalizeRect(rectangle);
+ } else {
+ this.rectangle = [0, 0, 0, 0];
+ }
+ }
+ }, {
+ key: "setColor",
+ value: function setColor(color) {
+ var rgbColor = new Uint8ClampedArray(3);
+
+ if (!Array.isArray(color)) {
+ this.color = rgbColor;
+ return;
+ }
+
+ switch (color.length) {
+ case 0:
+ this.color = null;
+ break;
+
+ case 1:
+ _colorspace.ColorSpace.singletons.gray.getRgbItem(color, 0, rgbColor, 0);
+
+ this.color = rgbColor;
+ break;
+
+ case 3:
+ _colorspace.ColorSpace.singletons.rgb.getRgbItem(color, 0, rgbColor, 0);
+
+ this.color = rgbColor;
+ break;
+
+ case 4:
+ _colorspace.ColorSpace.singletons.cmyk.getRgbItem(color, 0, rgbColor, 0);
+
+ this.color = rgbColor;
+ break;
+
+ default:
+ this.color = rgbColor;
+ break;
+ }
+ }
+ }, {
+ key: "setBorderStyle",
+ value: function setBorderStyle(borderStyle) {
+ this.borderStyle = new AnnotationBorderStyle();
+
+ if (!(0, _primitives.isDict)(borderStyle)) {
+ return;
+ }
+
+ if (borderStyle.has('BS')) {
+ var dict = borderStyle.get('BS');
+ var dictType = dict.get('Type');
+
+ if (!dictType || (0, _primitives.isName)(dictType, 'Border')) {
+ this.borderStyle.setWidth(dict.get('W'), this.rectangle);
+ this.borderStyle.setStyle(dict.get('S'));
+ this.borderStyle.setDashArray(dict.getArray('D'));
+ }
+ } else if (borderStyle.has('Border')) {
+ var array = borderStyle.getArray('Border');
+
+ if (Array.isArray(array) && array.length >= 3) {
+ this.borderStyle.setHorizontalCornerRadius(array[0]);
+ this.borderStyle.setVerticalCornerRadius(array[1]);
+ this.borderStyle.setWidth(array[2], this.rectangle);
+
+ if (array.length === 4) {
+ this.borderStyle.setDashArray(array[3]);
+ }
+ }
+ } else {
+ this.borderStyle.setWidth(0);
+ }
+ }
+ }, {
+ key: "setAppearance",
+ value: function setAppearance(dict) {
+ this.appearance = null;
+ var appearanceStates = dict.get('AP');
+
+ if (!(0, _primitives.isDict)(appearanceStates)) {
+ return;
+ }
+
+ var normalAppearanceState = appearanceStates.get('N');
+
+ if ((0, _primitives.isStream)(normalAppearanceState)) {
+ this.appearance = normalAppearanceState;
+ return;
+ }
+
+ if (!(0, _primitives.isDict)(normalAppearanceState)) {
+ return;
+ }
+
+ var as = dict.get('AS');
+
+ if (!(0, _primitives.isName)(as) || !normalAppearanceState.has(as.name)) {
+ return;
+ }
+
+ this.appearance = normalAppearanceState.get(as.name);
+ }
+ }, {
+ key: "loadResources",
+ value: function loadResources(keys) {
+ return this.appearance.dict.getAsync('Resources').then(function (resources) {
+ if (!resources) {
+ return undefined;
+ }
+
+ var objectLoader = new _obj.ObjectLoader(resources, keys, resources.xref);
+ return objectLoader.load().then(function () {
+ return resources;
+ });
+ });
+ }
+ }, {
+ key: "getOperatorList",
+ value: function getOperatorList(evaluator, task, renderForms) {
+ var _this = this;
+
+ if (!this.appearance) {
+ return Promise.resolve(new _operator_list.OperatorList());
+ }
+
+ var data = this.data;
+ var appearanceDict = this.appearance.dict;
+ var resourcesPromise = this.loadResources(['ExtGState', 'ColorSpace', 'Pattern', 'Shading', 'XObject', 'Font']);
+ var bbox = appearanceDict.getArray('BBox') || [0, 0, 1, 1];
+ var matrix = appearanceDict.getArray('Matrix') || [1, 0, 0, 1, 0, 0];
+ var transform = getTransformMatrix(data.rect, bbox, matrix);
+ return resourcesPromise.then(function (resources) {
+ var opList = new _operator_list.OperatorList();
+ opList.addOp(_util.OPS.beginAnnotation, [data.rect, transform, matrix]);
+ return evaluator.getOperatorList({
+ stream: _this.appearance,
+ task: task,
+ resources: resources,
+ operatorList: opList
+ }).then(function () {
+ opList.addOp(_util.OPS.endAnnotation, []);
+
+ _this.appearance.reset();
+
+ return opList;
+ });
+ });
+ }
+ }, {
+ key: "viewable",
+ get: function get() {
+ if (this.flags === 0) {
+ return true;
+ }
+
+ return this._isViewable(this.flags);
+ }
+ }, {
+ key: "printable",
+ get: function get() {
+ if (this.flags === 0) {
+ return false;
+ }
+
+ return this._isPrintable(this.flags);
+ }
+ }]);
+
+ return Annotation;
+}();
+
+exports.Annotation = Annotation;
+
+var AnnotationBorderStyle =
+/*#__PURE__*/
+function () {
+ function AnnotationBorderStyle() {
+ _classCallCheck(this, AnnotationBorderStyle);
+
+ this.width = 1;
+ this.style = _util.AnnotationBorderStyleType.SOLID;
+ this.dashArray = [3];
+ this.horizontalCornerRadius = 0;
+ this.verticalCornerRadius = 0;
+ }
+
+ _createClass(AnnotationBorderStyle, [{
+ key: "setWidth",
+ value: function setWidth(width) {
+ var rect = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [0, 0, 0, 0];
+
+ if ((0, _primitives.isName)(width)) {
+ this.width = 0;
+ return;
+ }
+
+ if (Number.isInteger(width)) {
+ if (width > 0) {
+ var maxWidth = (rect[2] - rect[0]) / 2;
+ var maxHeight = (rect[3] - rect[1]) / 2;
+
+ if (maxWidth > 0 && maxHeight > 0 && (width > maxWidth || width > maxHeight)) {
+ (0, _util.warn)("AnnotationBorderStyle.setWidth - ignoring width: ".concat(width));
+ width = 1;
+ }
+ }
+
+ this.width = width;
+ }
+ }
+ }, {
+ key: "setStyle",
+ value: function setStyle(style) {
+ if (!(0, _primitives.isName)(style)) {
+ return;
+ }
+
+ switch (style.name) {
+ case 'S':
+ this.style = _util.AnnotationBorderStyleType.SOLID;
+ break;
+
+ case 'D':
+ this.style = _util.AnnotationBorderStyleType.DASHED;
+ break;
+
+ case 'B':
+ this.style = _util.AnnotationBorderStyleType.BEVELED;
+ break;
+
+ case 'I':
+ this.style = _util.AnnotationBorderStyleType.INSET;
+ break;
+
+ case 'U':
+ this.style = _util.AnnotationBorderStyleType.UNDERLINE;
+ break;
+
+ default:
+ break;
+ }
+ }
+ }, {
+ key: "setDashArray",
+ value: function setDashArray(dashArray) {
+ if (Array.isArray(dashArray) && dashArray.length > 0) {
+ var isValid = true;
+ var allZeros = true;
+
+ for (var i = 0, len = dashArray.length; i < len; i++) {
+ var element = dashArray[i];
+ var validNumber = +element >= 0;
+
+ if (!validNumber) {
+ isValid = false;
+ break;
+ } else if (element > 0) {
+ allZeros = false;
+ }
+ }
+
+ if (isValid && !allZeros) {
+ this.dashArray = dashArray;
+ } else {
+ this.width = 0;
+ }
+ } else if (dashArray) {
+ this.width = 0;
+ }
+ }
+ }, {
+ key: "setHorizontalCornerRadius",
+ value: function setHorizontalCornerRadius(radius) {
+ if (Number.isInteger(radius)) {
+ this.horizontalCornerRadius = radius;
+ }
+ }
+ }, {
+ key: "setVerticalCornerRadius",
+ value: function setVerticalCornerRadius(radius) {
+ if (Number.isInteger(radius)) {
+ this.verticalCornerRadius = radius;
+ }
+ }
+ }]);
+
+ return AnnotationBorderStyle;
+}();
+
+exports.AnnotationBorderStyle = AnnotationBorderStyle;
+
+var MarkupAnnotation =
+/*#__PURE__*/
+function (_Annotation) {
+ _inherits(MarkupAnnotation, _Annotation);
+
+ function MarkupAnnotation(parameters) {
+ var _this2;
+
+ _classCallCheck(this, MarkupAnnotation);
+
+ _this2 = _possibleConstructorReturn(this, _getPrototypeOf(MarkupAnnotation).call(this, parameters));
+ var dict = parameters.dict;
+
+ if (!dict.has('C')) {
+ _this2.data.color = null;
+ }
+
+ _this2.setCreationDate(dict.get('CreationDate'));
+
+ _this2.data.creationDate = _this2.creationDate;
+ _this2.data.hasPopup = dict.has('Popup');
+ _this2.data.title = (0, _util.stringToPDFString)(dict.get('T') || '');
+ return _this2;
+ }
+
+ _createClass(MarkupAnnotation, [{
+ key: "setCreationDate",
+ value: function setCreationDate(creationDate) {
+ this.creationDate = (0, _util.isString)(creationDate) ? creationDate : null;
+ }
+ }]);
+
+ return MarkupAnnotation;
+}(Annotation);
+
+exports.MarkupAnnotation = MarkupAnnotation;
+
+var WidgetAnnotation =
+/*#__PURE__*/
+function (_Annotation2) {
+ _inherits(WidgetAnnotation, _Annotation2);
+
+ function WidgetAnnotation(params) {
+ var _this3;
+
+ _classCallCheck(this, WidgetAnnotation);
+
+ _this3 = _possibleConstructorReturn(this, _getPrototypeOf(WidgetAnnotation).call(this, params));
+ var dict = params.dict;
+ var data = _this3.data;
+ data.annotationType = _util.AnnotationType.WIDGET;
+ data.fieldName = _this3._constructFieldName(dict);
+ data.fieldValue = (0, _core_utils.getInheritableProperty)({
+ dict: dict,
+ key: 'V',
+ getArray: true
+ });
+ data.alternativeText = (0, _util.stringToPDFString)(dict.get('TU') || '');
+ data.defaultAppearance = (0, _core_utils.getInheritableProperty)({
+ dict: dict,
+ key: 'DA'
+ }) || '';
+ var fieldType = (0, _core_utils.getInheritableProperty)({
+ dict: dict,
+ key: 'FT'
+ });
+ data.fieldType = (0, _primitives.isName)(fieldType) ? fieldType.name : null;
+ _this3.fieldResources = (0, _core_utils.getInheritableProperty)({
+ dict: dict,
+ key: 'DR'
+ }) || _primitives.Dict.empty;
+ data.fieldFlags = (0, _core_utils.getInheritableProperty)({
+ dict: dict,
+ key: 'Ff'
+ });
+
+ if (!Number.isInteger(data.fieldFlags) || data.fieldFlags < 0) {
+ data.fieldFlags = 0;
+ }
+
+ data.readOnly = _this3.hasFieldFlag(_util.AnnotationFieldFlag.READONLY);
+
+ if (data.fieldType === 'Sig') {
+ data.fieldValue = null;
+
+ _this3.setFlags(_util.AnnotationFlag.HIDDEN);
+ }
+
+ return _this3;
+ }
+
+ _createClass(WidgetAnnotation, [{
+ key: "_constructFieldName",
+ value: function _constructFieldName(dict) {
+ if (!dict.has('T') && !dict.has('Parent')) {
+ (0, _util.warn)('Unknown field name, falling back to empty field name.');
+ return '';
+ }
+
+ if (!dict.has('Parent')) {
+ return (0, _util.stringToPDFString)(dict.get('T'));
+ }
+
+ var fieldName = [];
+
+ if (dict.has('T')) {
+ fieldName.unshift((0, _util.stringToPDFString)(dict.get('T')));
+ }
+
+ var loopDict = dict;
+
+ while (loopDict.has('Parent')) {
+ loopDict = loopDict.get('Parent');
+
+ if (!(0, _primitives.isDict)(loopDict)) {
+ break;
+ }
+
+ if (loopDict.has('T')) {
+ fieldName.unshift((0, _util.stringToPDFString)(loopDict.get('T')));
+ }
+ }
+
+ return fieldName.join('.');
+ }
+ }, {
+ key: "hasFieldFlag",
+ value: function hasFieldFlag(flag) {
+ return !!(this.data.fieldFlags & flag);
+ }
+ }, {
+ key: "getOperatorList",
+ value: function getOperatorList(evaluator, task, renderForms) {
+ if (renderForms) {
+ return Promise.resolve(new _operator_list.OperatorList());
+ }
+
+ return _get(_getPrototypeOf(WidgetAnnotation.prototype), "getOperatorList", this).call(this, evaluator, task, renderForms);
+ }
+ }]);
+
+ return WidgetAnnotation;
+}(Annotation);
+
+var TextWidgetAnnotation =
+/*#__PURE__*/
+function (_WidgetAnnotation) {
+ _inherits(TextWidgetAnnotation, _WidgetAnnotation);
+
+ function TextWidgetAnnotation(params) {
+ var _this4;
+
+ _classCallCheck(this, TextWidgetAnnotation);
+
+ _this4 = _possibleConstructorReturn(this, _getPrototypeOf(TextWidgetAnnotation).call(this, params));
+ var dict = params.dict;
+ _this4.data.fieldValue = (0, _util.stringToPDFString)(_this4.data.fieldValue || '');
+ var alignment = (0, _core_utils.getInheritableProperty)({
+ dict: dict,
+ key: 'Q'
+ });
+
+ if (!Number.isInteger(alignment) || alignment < 0 || alignment > 2) {
+ alignment = null;
+ }
+
+ _this4.data.textAlignment = alignment;
+ var maximumLength = (0, _core_utils.getInheritableProperty)({
+ dict: dict,
+ key: 'MaxLen'
+ });
+
+ if (!Number.isInteger(maximumLength) || maximumLength < 0) {
+ maximumLength = null;
+ }
+
+ _this4.data.maxLen = maximumLength;
+ _this4.data.multiLine = _this4.hasFieldFlag(_util.AnnotationFieldFlag.MULTILINE);
+ _this4.data.comb = _this4.hasFieldFlag(_util.AnnotationFieldFlag.COMB) && !_this4.hasFieldFlag(_util.AnnotationFieldFlag.MULTILINE) && !_this4.hasFieldFlag(_util.AnnotationFieldFlag.PASSWORD) && !_this4.hasFieldFlag(_util.AnnotationFieldFlag.FILESELECT) && _this4.data.maxLen !== null;
+ return _this4;
+ }
+
+ _createClass(TextWidgetAnnotation, [{
+ key: "getOperatorList",
+ value: function getOperatorList(evaluator, task, renderForms) {
+ if (renderForms || this.appearance) {
+ return _get(_getPrototypeOf(TextWidgetAnnotation.prototype), "getOperatorList", this).call(this, evaluator, task, renderForms);
+ }
+
+ var operatorList = new _operator_list.OperatorList();
+
+ if (!this.data.defaultAppearance) {
+ return Promise.resolve(operatorList);
+ }
+
+ var stream = new _stream.Stream((0, _util.stringToBytes)(this.data.defaultAppearance));
+ return evaluator.getOperatorList({
+ stream: stream,
+ task: task,
+ resources: this.fieldResources,
+ operatorList: operatorList
+ }).then(function () {
+ return operatorList;
+ });
+ }
+ }]);
+
+ return TextWidgetAnnotation;
+}(WidgetAnnotation);
+
+var ButtonWidgetAnnotation =
+/*#__PURE__*/
+function (_WidgetAnnotation2) {
+ _inherits(ButtonWidgetAnnotation, _WidgetAnnotation2);
+
+ function ButtonWidgetAnnotation(params) {
+ var _this5;
+
+ _classCallCheck(this, ButtonWidgetAnnotation);
+
+ _this5 = _possibleConstructorReturn(this, _getPrototypeOf(ButtonWidgetAnnotation).call(this, params));
+ _this5.data.checkBox = !_this5.hasFieldFlag(_util.AnnotationFieldFlag.RADIO) && !_this5.hasFieldFlag(_util.AnnotationFieldFlag.PUSHBUTTON);
+ _this5.data.radioButton = _this5.hasFieldFlag(_util.AnnotationFieldFlag.RADIO) && !_this5.hasFieldFlag(_util.AnnotationFieldFlag.PUSHBUTTON);
+ _this5.data.pushButton = _this5.hasFieldFlag(_util.AnnotationFieldFlag.PUSHBUTTON);
+
+ if (_this5.data.checkBox) {
+ _this5._processCheckBox(params);
+ } else if (_this5.data.radioButton) {
+ _this5._processRadioButton(params);
+ } else if (_this5.data.pushButton) {
+ _this5._processPushButton(params);
+ } else {
+ (0, _util.warn)('Invalid field flags for button widget annotation');
+ }
+
+ return _this5;
+ }
+
+ _createClass(ButtonWidgetAnnotation, [{
+ key: "_processCheckBox",
+ value: function _processCheckBox(params) {
+ if ((0, _primitives.isName)(this.data.fieldValue)) {
+ this.data.fieldValue = this.data.fieldValue.name;
+ }
+
+ var customAppearance = params.dict.get('AP');
+
+ if (!(0, _primitives.isDict)(customAppearance)) {
+ return;
+ }
+
+ var exportValueOptionsDict = customAppearance.get('D');
+
+ if (!(0, _primitives.isDict)(exportValueOptionsDict)) {
+ return;
+ }
+
+ var exportValues = exportValueOptionsDict.getKeys();
+ var hasCorrectOptionCount = exportValues.length === 2;
+
+ if (!hasCorrectOptionCount) {
+ return;
+ }
+
+ this.data.exportValue = exportValues[0] === 'Off' ? exportValues[1] : exportValues[0];
+ }
+ }, {
+ key: "_processRadioButton",
+ value: function _processRadioButton(params) {
+ this.data.fieldValue = this.data.buttonValue = null;
+ var fieldParent = params.dict.get('Parent');
+
+ if ((0, _primitives.isDict)(fieldParent) && fieldParent.has('V')) {
+ var fieldParentValue = fieldParent.get('V');
+
+ if ((0, _primitives.isName)(fieldParentValue)) {
+ this.data.fieldValue = fieldParentValue.name;
+ }
+ }
+
+ var appearanceStates = params.dict.get('AP');
+
+ if (!(0, _primitives.isDict)(appearanceStates)) {
+ return;
+ }
+
+ var normalAppearanceState = appearanceStates.get('N');
+
+ if (!(0, _primitives.isDict)(normalAppearanceState)) {
+ return;
+ }
+
+ var keys = normalAppearanceState.getKeys();
+
+ for (var i = 0, ii = keys.length; i < ii; i++) {
+ if (keys[i] !== 'Off') {
+ this.data.buttonValue = keys[i];
+ break;
+ }
+ }
+ }
+ }, {
+ key: "_processPushButton",
+ value: function _processPushButton(params) {
+ if (!params.dict.has('A')) {
+ (0, _util.warn)('Push buttons without action dictionaries are not supported');
+ return;
+ }
+
+ _obj.Catalog.parseDestDictionary({
+ destDict: params.dict,
+ resultObj: this.data,
+ docBaseUrl: params.pdfManager.docBaseUrl
+ });
+ }
+ }]);
+
+ return ButtonWidgetAnnotation;
+}(WidgetAnnotation);
+
+var ChoiceWidgetAnnotation =
+/*#__PURE__*/
+function (_WidgetAnnotation3) {
+ _inherits(ChoiceWidgetAnnotation, _WidgetAnnotation3);
+
+ function ChoiceWidgetAnnotation(params) {
+ var _this6;
+
+ _classCallCheck(this, ChoiceWidgetAnnotation);
+
+ _this6 = _possibleConstructorReturn(this, _getPrototypeOf(ChoiceWidgetAnnotation).call(this, params));
+ _this6.data.options = [];
+ var options = (0, _core_utils.getInheritableProperty)({
+ dict: params.dict,
+ key: 'Opt'
+ });
+
+ if (Array.isArray(options)) {
+ var xref = params.xref;
+
+ for (var i = 0, ii = options.length; i < ii; i++) {
+ var option = xref.fetchIfRef(options[i]);
+ var isOptionArray = Array.isArray(option);
+ _this6.data.options[i] = {
+ exportValue: isOptionArray ? xref.fetchIfRef(option[0]) : option,
+ displayValue: (0, _util.stringToPDFString)(isOptionArray ? xref.fetchIfRef(option[1]) : option)
+ };
+ }
+ }
+
+ if (!Array.isArray(_this6.data.fieldValue)) {
+ _this6.data.fieldValue = [_this6.data.fieldValue];
+ }
+
+ _this6.data.combo = _this6.hasFieldFlag(_util.AnnotationFieldFlag.COMBO);
+ _this6.data.multiSelect = _this6.hasFieldFlag(_util.AnnotationFieldFlag.MULTISELECT);
+ return _this6;
+ }
+
+ return ChoiceWidgetAnnotation;
+}(WidgetAnnotation);
+
+var TextAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation) {
+ _inherits(TextAnnotation, _MarkupAnnotation);
+
+ function TextAnnotation(parameters) {
+ var _this7;
+
+ _classCallCheck(this, TextAnnotation);
+
+ var DEFAULT_ICON_SIZE = 22;
+ _this7 = _possibleConstructorReturn(this, _getPrototypeOf(TextAnnotation).call(this, parameters));
+ _this7.data.annotationType = _util.AnnotationType.TEXT;
+
+ if (_this7.data.hasAppearance) {
+ _this7.data.name = 'NoIcon';
+ } else {
+ _this7.data.rect[1] = _this7.data.rect[3] - DEFAULT_ICON_SIZE;
+ _this7.data.rect[2] = _this7.data.rect[0] + DEFAULT_ICON_SIZE;
+ _this7.data.name = parameters.dict.has('Name') ? parameters.dict.get('Name').name : 'Note';
+ }
+
+ return _this7;
+ }
+
+ return TextAnnotation;
+}(MarkupAnnotation);
+
+var LinkAnnotation =
+/*#__PURE__*/
+function (_Annotation3) {
+ _inherits(LinkAnnotation, _Annotation3);
+
+ function LinkAnnotation(params) {
+ var _this8;
+
+ _classCallCheck(this, LinkAnnotation);
+
+ _this8 = _possibleConstructorReturn(this, _getPrototypeOf(LinkAnnotation).call(this, params));
+ _this8.data.annotationType = _util.AnnotationType.LINK;
+
+ _obj.Catalog.parseDestDictionary({
+ destDict: params.dict,
+ resultObj: _this8.data,
+ docBaseUrl: params.pdfManager.docBaseUrl
+ });
+
+ return _this8;
+ }
+
+ return LinkAnnotation;
+}(Annotation);
+
+var PopupAnnotation =
+/*#__PURE__*/
+function (_Annotation4) {
+ _inherits(PopupAnnotation, _Annotation4);
+
+ function PopupAnnotation(parameters) {
+ var _this9;
+
+ _classCallCheck(this, PopupAnnotation);
+
+ _this9 = _possibleConstructorReturn(this, _getPrototypeOf(PopupAnnotation).call(this, parameters));
+ _this9.data.annotationType = _util.AnnotationType.POPUP;
+ var dict = parameters.dict;
+ var parentItem = dict.get('Parent');
+
+ if (!parentItem) {
+ (0, _util.warn)('Popup annotation has a missing or invalid parent annotation.');
+ return _possibleConstructorReturn(_this9);
+ }
+
+ var parentSubtype = parentItem.get('Subtype');
+ _this9.data.parentType = (0, _primitives.isName)(parentSubtype) ? parentSubtype.name : null;
+ _this9.data.parentId = dict.getRaw('Parent').toString();
+ _this9.data.title = (0, _util.stringToPDFString)(parentItem.get('T') || '');
+ _this9.data.contents = (0, _util.stringToPDFString)(parentItem.get('Contents') || '');
+
+ if (!parentItem.has('M')) {
+ _this9.data.modificationDate = null;
+ } else {
+ _this9.setModificationDate(parentItem.get('M'));
+
+ _this9.data.modificationDate = _this9.modificationDate;
+ }
+
+ if (!parentItem.has('C')) {
+ _this9.data.color = null;
+ } else {
+ _this9.setColor(parentItem.getArray('C'));
+
+ _this9.data.color = _this9.color;
+ }
+
+ if (!_this9.viewable) {
+ var parentFlags = parentItem.get('F');
+
+ if (_this9._isViewable(parentFlags)) {
+ _this9.setFlags(parentFlags);
+ }
+ }
+
+ return _this9;
+ }
+
+ return PopupAnnotation;
+}(Annotation);
+
+var FreeTextAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation2) {
+ _inherits(FreeTextAnnotation, _MarkupAnnotation2);
+
+ function FreeTextAnnotation(parameters) {
+ var _this10;
+
+ _classCallCheck(this, FreeTextAnnotation);
+
+ _this10 = _possibleConstructorReturn(this, _getPrototypeOf(FreeTextAnnotation).call(this, parameters));
+ _this10.data.annotationType = _util.AnnotationType.FREETEXT;
+ return _this10;
+ }
+
+ return FreeTextAnnotation;
+}(MarkupAnnotation);
+
+var LineAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation3) {
+ _inherits(LineAnnotation, _MarkupAnnotation3);
+
+ function LineAnnotation(parameters) {
+ var _this11;
+
+ _classCallCheck(this, LineAnnotation);
+
+ _this11 = _possibleConstructorReturn(this, _getPrototypeOf(LineAnnotation).call(this, parameters));
+ _this11.data.annotationType = _util.AnnotationType.LINE;
+ var dict = parameters.dict;
+ _this11.data.lineCoordinates = _util.Util.normalizeRect(dict.getArray('L'));
+ return _this11;
+ }
+
+ return LineAnnotation;
+}(MarkupAnnotation);
+
+var SquareAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation4) {
+ _inherits(SquareAnnotation, _MarkupAnnotation4);
+
+ function SquareAnnotation(parameters) {
+ var _this12;
+
+ _classCallCheck(this, SquareAnnotation);
+
+ _this12 = _possibleConstructorReturn(this, _getPrototypeOf(SquareAnnotation).call(this, parameters));
+ _this12.data.annotationType = _util.AnnotationType.SQUARE;
+ return _this12;
+ }
+
+ return SquareAnnotation;
+}(MarkupAnnotation);
+
+var CircleAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation5) {
+ _inherits(CircleAnnotation, _MarkupAnnotation5);
+
+ function CircleAnnotation(parameters) {
+ var _this13;
+
+ _classCallCheck(this, CircleAnnotation);
+
+ _this13 = _possibleConstructorReturn(this, _getPrototypeOf(CircleAnnotation).call(this, parameters));
+ _this13.data.annotationType = _util.AnnotationType.CIRCLE;
+ return _this13;
+ }
+
+ return CircleAnnotation;
+}(MarkupAnnotation);
+
+var PolylineAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation6) {
+ _inherits(PolylineAnnotation, _MarkupAnnotation6);
+
+ function PolylineAnnotation(parameters) {
+ var _this14;
+
+ _classCallCheck(this, PolylineAnnotation);
+
+ _this14 = _possibleConstructorReturn(this, _getPrototypeOf(PolylineAnnotation).call(this, parameters));
+ _this14.data.annotationType = _util.AnnotationType.POLYLINE;
+ var dict = parameters.dict;
+ var rawVertices = dict.getArray('Vertices');
+ _this14.data.vertices = [];
+
+ for (var i = 0, ii = rawVertices.length; i < ii; i += 2) {
+ _this14.data.vertices.push({
+ x: rawVertices[i],
+ y: rawVertices[i + 1]
+ });
+ }
+
+ return _this14;
+ }
+
+ return PolylineAnnotation;
+}(MarkupAnnotation);
+
+var PolygonAnnotation =
+/*#__PURE__*/
+function (_PolylineAnnotation) {
+ _inherits(PolygonAnnotation, _PolylineAnnotation);
+
+ function PolygonAnnotation(parameters) {
+ var _this15;
+
+ _classCallCheck(this, PolygonAnnotation);
+
+ _this15 = _possibleConstructorReturn(this, _getPrototypeOf(PolygonAnnotation).call(this, parameters));
+ _this15.data.annotationType = _util.AnnotationType.POLYGON;
+ return _this15;
+ }
+
+ return PolygonAnnotation;
+}(PolylineAnnotation);
+
+var CaretAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation7) {
+ _inherits(CaretAnnotation, _MarkupAnnotation7);
+
+ function CaretAnnotation(parameters) {
+ var _this16;
+
+ _classCallCheck(this, CaretAnnotation);
+
+ _this16 = _possibleConstructorReturn(this, _getPrototypeOf(CaretAnnotation).call(this, parameters));
+ _this16.data.annotationType = _util.AnnotationType.CARET;
+ return _this16;
+ }
+
+ return CaretAnnotation;
+}(MarkupAnnotation);
+
+var InkAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation8) {
+ _inherits(InkAnnotation, _MarkupAnnotation8);
+
+ function InkAnnotation(parameters) {
+ var _this17;
+
+ _classCallCheck(this, InkAnnotation);
+
+ _this17 = _possibleConstructorReturn(this, _getPrototypeOf(InkAnnotation).call(this, parameters));
+ _this17.data.annotationType = _util.AnnotationType.INK;
+ var dict = parameters.dict;
+ var xref = parameters.xref;
+ var originalInkLists = dict.getArray('InkList');
+ _this17.data.inkLists = [];
+
+ for (var i = 0, ii = originalInkLists.length; i < ii; ++i) {
+ _this17.data.inkLists.push([]);
+
+ for (var j = 0, jj = originalInkLists[i].length; j < jj; j += 2) {
+ _this17.data.inkLists[i].push({
+ x: xref.fetchIfRef(originalInkLists[i][j]),
+ y: xref.fetchIfRef(originalInkLists[i][j + 1])
+ });
+ }
+ }
+
+ return _this17;
+ }
+
+ return InkAnnotation;
+}(MarkupAnnotation);
+
+var HighlightAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation9) {
+ _inherits(HighlightAnnotation, _MarkupAnnotation9);
+
+ function HighlightAnnotation(parameters) {
+ var _this18;
+
+ _classCallCheck(this, HighlightAnnotation);
+
+ _this18 = _possibleConstructorReturn(this, _getPrototypeOf(HighlightAnnotation).call(this, parameters));
+ _this18.data.annotationType = _util.AnnotationType.HIGHLIGHT;
+ return _this18;
+ }
+
+ return HighlightAnnotation;
+}(MarkupAnnotation);
+
+var UnderlineAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation10) {
+ _inherits(UnderlineAnnotation, _MarkupAnnotation10);
+
+ function UnderlineAnnotation(parameters) {
+ var _this19;
+
+ _classCallCheck(this, UnderlineAnnotation);
+
+ _this19 = _possibleConstructorReturn(this, _getPrototypeOf(UnderlineAnnotation).call(this, parameters));
+ _this19.data.annotationType = _util.AnnotationType.UNDERLINE;
+ return _this19;
+ }
+
+ return UnderlineAnnotation;
+}(MarkupAnnotation);
+
+var SquigglyAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation11) {
+ _inherits(SquigglyAnnotation, _MarkupAnnotation11);
+
+ function SquigglyAnnotation(parameters) {
+ var _this20;
+
+ _classCallCheck(this, SquigglyAnnotation);
+
+ _this20 = _possibleConstructorReturn(this, _getPrototypeOf(SquigglyAnnotation).call(this, parameters));
+ _this20.data.annotationType = _util.AnnotationType.SQUIGGLY;
+ return _this20;
+ }
+
+ return SquigglyAnnotation;
+}(MarkupAnnotation);
+
+var StrikeOutAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation12) {
+ _inherits(StrikeOutAnnotation, _MarkupAnnotation12);
+
+ function StrikeOutAnnotation(parameters) {
+ var _this21;
+
+ _classCallCheck(this, StrikeOutAnnotation);
+
+ _this21 = _possibleConstructorReturn(this, _getPrototypeOf(StrikeOutAnnotation).call(this, parameters));
+ _this21.data.annotationType = _util.AnnotationType.STRIKEOUT;
+ return _this21;
+ }
+
+ return StrikeOutAnnotation;
+}(MarkupAnnotation);
+
+var StampAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation13) {
+ _inherits(StampAnnotation, _MarkupAnnotation13);
+
+ function StampAnnotation(parameters) {
+ var _this22;
+
+ _classCallCheck(this, StampAnnotation);
+
+ _this22 = _possibleConstructorReturn(this, _getPrototypeOf(StampAnnotation).call(this, parameters));
+ _this22.data.annotationType = _util.AnnotationType.STAMP;
+ return _this22;
+ }
+
+ return StampAnnotation;
+}(MarkupAnnotation);
+
+var FileAttachmentAnnotation =
+/*#__PURE__*/
+function (_MarkupAnnotation14) {
+ _inherits(FileAttachmentAnnotation, _MarkupAnnotation14);
+
+ function FileAttachmentAnnotation(parameters) {
+ var _this23;
+
+ _classCallCheck(this, FileAttachmentAnnotation);
+
+ _this23 = _possibleConstructorReturn(this, _getPrototypeOf(FileAttachmentAnnotation).call(this, parameters));
+ var file = new _obj.FileSpec(parameters.dict.get('FS'), parameters.xref);
+ _this23.data.annotationType = _util.AnnotationType.FILEATTACHMENT;
+ _this23.data.file = file.serializable;
+ return _this23;
+ }
+
+ return FileAttachmentAnnotation;
+}(MarkupAnnotation);
+
+/***/ }),
+/* 171 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.OperatorList = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var QueueOptimizer = function QueueOptimizerClosure() {
+ function addState(parentState, pattern, checkFn, iterateFn, processFn) {
+ var state = parentState;
+
+ for (var i = 0, ii = pattern.length - 1; i < ii; i++) {
+ var item = pattern[i];
+ state = state[item] || (state[item] = []);
+ }
+
+ state[pattern[pattern.length - 1]] = {
+ checkFn: checkFn,
+ iterateFn: iterateFn,
+ processFn: processFn
+ };
+ }
+
+ function handlePaintSolidColorImageMask(iFirstSave, count, fnArray, argsArray) {
+ var iFirstPIMXO = iFirstSave + 2;
+
+ for (var i = 0; i < count; i++) {
+ var arg = argsArray[iFirstPIMXO + 4 * i];
+ var imageMask = arg.length === 1 && arg[0];
+
+ if (imageMask && imageMask.width === 1 && imageMask.height === 1 && (!imageMask.data.length || imageMask.data.length === 1 && imageMask.data[0] === 0)) {
+ fnArray[iFirstPIMXO + 4 * i] = _util.OPS.paintSolidColorImageMask;
+ continue;
+ }
+
+ break;
+ }
+
+ return count - i;
+ }
+
+ var InitialState = [];
+ addState(InitialState, [_util.OPS.save, _util.OPS.transform, _util.OPS.paintInlineImageXObject, _util.OPS.restore], null, function iterateInlineImageGroup(context, i) {
+ var fnArray = context.fnArray;
+ var iFirstSave = context.iCurr - 3;
+ var pos = (i - iFirstSave) % 4;
+
+ switch (pos) {
+ case 0:
+ return fnArray[i] === _util.OPS.save;
+
+ case 1:
+ return fnArray[i] === _util.OPS.transform;
+
+ case 2:
+ return fnArray[i] === _util.OPS.paintInlineImageXObject;
+
+ case 3:
+ return fnArray[i] === _util.OPS.restore;
+ }
+
+ throw new Error("iterateInlineImageGroup - invalid pos: ".concat(pos));
+ }, function foundInlineImageGroup(context, i) {
+ var MIN_IMAGES_IN_INLINE_IMAGES_BLOCK = 10;
+ var MAX_IMAGES_IN_INLINE_IMAGES_BLOCK = 200;
+ var MAX_WIDTH = 1000;
+ var IMAGE_PADDING = 1;
+ var fnArray = context.fnArray,
+ argsArray = context.argsArray;
+ var curr = context.iCurr;
+ var iFirstSave = curr - 3;
+ var iFirstTransform = curr - 2;
+ var iFirstPIIXO = curr - 1;
+ var count = Math.min(Math.floor((i - iFirstSave) / 4), MAX_IMAGES_IN_INLINE_IMAGES_BLOCK);
+
+ if (count < MIN_IMAGES_IN_INLINE_IMAGES_BLOCK) {
+ return i - (i - iFirstSave) % 4;
+ }
+
+ var maxX = 0;
+ var map = [],
+ maxLineHeight = 0;
+ var currentX = IMAGE_PADDING,
+ currentY = IMAGE_PADDING;
+ var q;
+
+ for (q = 0; q < count; q++) {
+ var transform = argsArray[iFirstTransform + (q << 2)];
+ var img = argsArray[iFirstPIIXO + (q << 2)][0];
+
+ if (currentX + img.width > MAX_WIDTH) {
+ maxX = Math.max(maxX, currentX);
+ currentY += maxLineHeight + 2 * IMAGE_PADDING;
+ currentX = 0;
+ maxLineHeight = 0;
+ }
+
+ map.push({
+ transform: transform,
+ x: currentX,
+ y: currentY,
+ w: img.width,
+ h: img.height
+ });
+ currentX += img.width + 2 * IMAGE_PADDING;
+ maxLineHeight = Math.max(maxLineHeight, img.height);
+ }
+
+ var imgWidth = Math.max(maxX, currentX) + IMAGE_PADDING;
+ var imgHeight = currentY + maxLineHeight + IMAGE_PADDING;
+ var imgData = new Uint8ClampedArray(imgWidth * imgHeight * 4);
+ var imgRowSize = imgWidth << 2;
+
+ for (q = 0; q < count; q++) {
+ var data = argsArray[iFirstPIIXO + (q << 2)][0].data;
+ var rowSize = map[q].w << 2;
+ var dataOffset = 0;
+ var offset = map[q].x + map[q].y * imgWidth << 2;
+ imgData.set(data.subarray(0, rowSize), offset - imgRowSize);
+
+ for (var k = 0, kk = map[q].h; k < kk; k++) {
+ imgData.set(data.subarray(dataOffset, dataOffset + rowSize), offset);
+ dataOffset += rowSize;
+ offset += imgRowSize;
+ }
+
+ imgData.set(data.subarray(dataOffset - rowSize, dataOffset), offset);
+
+ while (offset >= 0) {
+ data[offset - 4] = data[offset];
+ data[offset - 3] = data[offset + 1];
+ data[offset - 2] = data[offset + 2];
+ data[offset - 1] = data[offset + 3];
+ data[offset + rowSize] = data[offset + rowSize - 4];
+ data[offset + rowSize + 1] = data[offset + rowSize - 3];
+ data[offset + rowSize + 2] = data[offset + rowSize - 2];
+ data[offset + rowSize + 3] = data[offset + rowSize - 1];
+ offset -= imgRowSize;
+ }
+ }
+
+ fnArray.splice(iFirstSave, count * 4, _util.OPS.paintInlineImageXObjectGroup);
+ argsArray.splice(iFirstSave, count * 4, [{
+ width: imgWidth,
+ height: imgHeight,
+ kind: _util.ImageKind.RGBA_32BPP,
+ data: imgData
+ }, map]);
+ return iFirstSave + 1;
+ });
+ addState(InitialState, [_util.OPS.save, _util.OPS.transform, _util.OPS.paintImageMaskXObject, _util.OPS.restore], null, function iterateImageMaskGroup(context, i) {
+ var fnArray = context.fnArray;
+ var iFirstSave = context.iCurr - 3;
+ var pos = (i - iFirstSave) % 4;
+
+ switch (pos) {
+ case 0:
+ return fnArray[i] === _util.OPS.save;
+
+ case 1:
+ return fnArray[i] === _util.OPS.transform;
+
+ case 2:
+ return fnArray[i] === _util.OPS.paintImageMaskXObject;
+
+ case 3:
+ return fnArray[i] === _util.OPS.restore;
+ }
+
+ throw new Error("iterateImageMaskGroup - invalid pos: ".concat(pos));
+ }, function foundImageMaskGroup(context, i) {
+ var MIN_IMAGES_IN_MASKS_BLOCK = 10;
+ var MAX_IMAGES_IN_MASKS_BLOCK = 100;
+ var MAX_SAME_IMAGES_IN_MASKS_BLOCK = 1000;
+ var fnArray = context.fnArray,
+ argsArray = context.argsArray;
+ var curr = context.iCurr;
+ var iFirstSave = curr - 3;
+ var iFirstTransform = curr - 2;
+ var iFirstPIMXO = curr - 1;
+ var count = Math.floor((i - iFirstSave) / 4);
+ count = handlePaintSolidColorImageMask(iFirstSave, count, fnArray, argsArray);
+
+ if (count < MIN_IMAGES_IN_MASKS_BLOCK) {
+ return i - (i - iFirstSave) % 4;
+ }
+
+ var q;
+ var isSameImage = false;
+ var iTransform, transformArgs;
+ var firstPIMXOArg0 = argsArray[iFirstPIMXO][0];
+
+ if (argsArray[iFirstTransform][1] === 0 && argsArray[iFirstTransform][2] === 0) {
+ isSameImage = true;
+ var firstTransformArg0 = argsArray[iFirstTransform][0];
+ var firstTransformArg3 = argsArray[iFirstTransform][3];
+ iTransform = iFirstTransform + 4;
+ var iPIMXO = iFirstPIMXO + 4;
+
+ for (q = 1; q < count; q++, iTransform += 4, iPIMXO += 4) {
+ transformArgs = argsArray[iTransform];
+
+ if (argsArray[iPIMXO][0] !== firstPIMXOArg0 || transformArgs[0] !== firstTransformArg0 || transformArgs[1] !== 0 || transformArgs[2] !== 0 || transformArgs[3] !== firstTransformArg3) {
+ if (q < MIN_IMAGES_IN_MASKS_BLOCK) {
+ isSameImage = false;
+ } else {
+ count = q;
+ }
+
+ break;
+ }
+ }
+ }
+
+ if (isSameImage) {
+ count = Math.min(count, MAX_SAME_IMAGES_IN_MASKS_BLOCK);
+ var positions = new Float32Array(count * 2);
+ iTransform = iFirstTransform;
+
+ for (q = 0; q < count; q++, iTransform += 4) {
+ transformArgs = argsArray[iTransform];
+ positions[q << 1] = transformArgs[4];
+ positions[(q << 1) + 1] = transformArgs[5];
+ }
+
+ fnArray.splice(iFirstSave, count * 4, _util.OPS.paintImageMaskXObjectRepeat);
+ argsArray.splice(iFirstSave, count * 4, [firstPIMXOArg0, firstTransformArg0, firstTransformArg3, positions]);
+ } else {
+ count = Math.min(count, MAX_IMAGES_IN_MASKS_BLOCK);
+ var images = [];
+
+ for (q = 0; q < count; q++) {
+ transformArgs = argsArray[iFirstTransform + (q << 2)];
+ var maskParams = argsArray[iFirstPIMXO + (q << 2)][0];
+ images.push({
+ data: maskParams.data,
+ width: maskParams.width,
+ height: maskParams.height,
+ transform: transformArgs
+ });
+ }
+
+ fnArray.splice(iFirstSave, count * 4, _util.OPS.paintImageMaskXObjectGroup);
+ argsArray.splice(iFirstSave, count * 4, [images]);
+ }
+
+ return iFirstSave + 1;
+ });
+ addState(InitialState, [_util.OPS.save, _util.OPS.transform, _util.OPS.paintImageXObject, _util.OPS.restore], function (context) {
+ var argsArray = context.argsArray;
+ var iFirstTransform = context.iCurr - 2;
+ return argsArray[iFirstTransform][1] === 0 && argsArray[iFirstTransform][2] === 0;
+ }, function iterateImageGroup(context, i) {
+ var fnArray = context.fnArray,
+ argsArray = context.argsArray;
+ var iFirstSave = context.iCurr - 3;
+ var pos = (i - iFirstSave) % 4;
+
+ switch (pos) {
+ case 0:
+ return fnArray[i] === _util.OPS.save;
+
+ case 1:
+ if (fnArray[i] !== _util.OPS.transform) {
+ return false;
+ }
+
+ var iFirstTransform = context.iCurr - 2;
+ var firstTransformArg0 = argsArray[iFirstTransform][0];
+ var firstTransformArg3 = argsArray[iFirstTransform][3];
+
+ if (argsArray[i][0] !== firstTransformArg0 || argsArray[i][1] !== 0 || argsArray[i][2] !== 0 || argsArray[i][3] !== firstTransformArg3) {
+ return false;
+ }
+
+ return true;
+
+ case 2:
+ if (fnArray[i] !== _util.OPS.paintImageXObject) {
+ return false;
+ }
+
+ var iFirstPIXO = context.iCurr - 1;
+ var firstPIXOArg0 = argsArray[iFirstPIXO][0];
+
+ if (argsArray[i][0] !== firstPIXOArg0) {
+ return false;
+ }
+
+ return true;
+
+ case 3:
+ return fnArray[i] === _util.OPS.restore;
+ }
+
+ throw new Error("iterateImageGroup - invalid pos: ".concat(pos));
+ }, function (context, i) {
+ var MIN_IMAGES_IN_BLOCK = 3;
+ var MAX_IMAGES_IN_BLOCK = 1000;
+ var fnArray = context.fnArray,
+ argsArray = context.argsArray;
+ var curr = context.iCurr;
+ var iFirstSave = curr - 3;
+ var iFirstTransform = curr - 2;
+ var iFirstPIXO = curr - 1;
+ var firstPIXOArg0 = argsArray[iFirstPIXO][0];
+ var firstTransformArg0 = argsArray[iFirstTransform][0];
+ var firstTransformArg3 = argsArray[iFirstTransform][3];
+ var count = Math.min(Math.floor((i - iFirstSave) / 4), MAX_IMAGES_IN_BLOCK);
+
+ if (count < MIN_IMAGES_IN_BLOCK) {
+ return i - (i - iFirstSave) % 4;
+ }
+
+ var positions = new Float32Array(count * 2);
+ var iTransform = iFirstTransform;
+
+ for (var q = 0; q < count; q++, iTransform += 4) {
+ var transformArgs = argsArray[iTransform];
+ positions[q << 1] = transformArgs[4];
+ positions[(q << 1) + 1] = transformArgs[5];
+ }
+
+ var args = [firstPIXOArg0, firstTransformArg0, firstTransformArg3, positions];
+ fnArray.splice(iFirstSave, count * 4, _util.OPS.paintImageXObjectRepeat);
+ argsArray.splice(iFirstSave, count * 4, args);
+ return iFirstSave + 1;
+ });
+ addState(InitialState, [_util.OPS.beginText, _util.OPS.setFont, _util.OPS.setTextMatrix, _util.OPS.showText, _util.OPS.endText], null, function iterateShowTextGroup(context, i) {
+ var fnArray = context.fnArray,
+ argsArray = context.argsArray;
+ var iFirstSave = context.iCurr - 4;
+ var pos = (i - iFirstSave) % 5;
+
+ switch (pos) {
+ case 0:
+ return fnArray[i] === _util.OPS.beginText;
+
+ case 1:
+ return fnArray[i] === _util.OPS.setFont;
+
+ case 2:
+ return fnArray[i] === _util.OPS.setTextMatrix;
+
+ case 3:
+ if (fnArray[i] !== _util.OPS.showText) {
+ return false;
+ }
+
+ var iFirstSetFont = context.iCurr - 3;
+ var firstSetFontArg0 = argsArray[iFirstSetFont][0];
+ var firstSetFontArg1 = argsArray[iFirstSetFont][1];
+
+ if (argsArray[i][0] !== firstSetFontArg0 || argsArray[i][1] !== firstSetFontArg1) {
+ return false;
+ }
+
+ return true;
+
+ case 4:
+ return fnArray[i] === _util.OPS.endText;
+ }
+
+ throw new Error("iterateShowTextGroup - invalid pos: ".concat(pos));
+ }, function (context, i) {
+ var MIN_CHARS_IN_BLOCK = 3;
+ var MAX_CHARS_IN_BLOCK = 1000;
+ var fnArray = context.fnArray,
+ argsArray = context.argsArray;
+ var curr = context.iCurr;
+ var iFirstBeginText = curr - 4;
+ var iFirstSetFont = curr - 3;
+ var iFirstSetTextMatrix = curr - 2;
+ var iFirstShowText = curr - 1;
+ var iFirstEndText = curr;
+ var firstSetFontArg0 = argsArray[iFirstSetFont][0];
+ var firstSetFontArg1 = argsArray[iFirstSetFont][1];
+ var count = Math.min(Math.floor((i - iFirstBeginText) / 5), MAX_CHARS_IN_BLOCK);
+
+ if (count < MIN_CHARS_IN_BLOCK) {
+ return i - (i - iFirstBeginText) % 5;
+ }
+
+ var iFirst = iFirstBeginText;
+
+ if (iFirstBeginText >= 4 && fnArray[iFirstBeginText - 4] === fnArray[iFirstSetFont] && fnArray[iFirstBeginText - 3] === fnArray[iFirstSetTextMatrix] && fnArray[iFirstBeginText - 2] === fnArray[iFirstShowText] && fnArray[iFirstBeginText - 1] === fnArray[iFirstEndText] && argsArray[iFirstBeginText - 4][0] === firstSetFontArg0 && argsArray[iFirstBeginText - 4][1] === firstSetFontArg1) {
+ count++;
+ iFirst -= 5;
+ }
+
+ var iEndText = iFirst + 4;
+
+ for (var q = 1; q < count; q++) {
+ fnArray.splice(iEndText, 3);
+ argsArray.splice(iEndText, 3);
+ iEndText += 2;
+ }
+
+ return iEndText + 1;
+ });
+
+ function QueueOptimizer(queue) {
+ this.queue = queue;
+ this.state = null;
+ this.context = {
+ iCurr: 0,
+ fnArray: queue.fnArray,
+ argsArray: queue.argsArray
+ };
+ this.match = null;
+ this.lastProcessed = 0;
+ }
+
+ QueueOptimizer.prototype = {
+ _optimize: function _optimize() {
+ var fnArray = this.queue.fnArray;
+ var i = this.lastProcessed,
+ ii = fnArray.length;
+ var state = this.state;
+ var match = this.match;
+
+ if (!state && !match && i + 1 === ii && !InitialState[fnArray[i]]) {
+ this.lastProcessed = ii;
+ return;
+ }
+
+ var context = this.context;
+
+ while (i < ii) {
+ if (match) {
+ var iterate = (0, match.iterateFn)(context, i);
+
+ if (iterate) {
+ i++;
+ continue;
+ }
+
+ i = (0, match.processFn)(context, i + 1);
+ ii = fnArray.length;
+ match = null;
+ state = null;
+
+ if (i >= ii) {
+ break;
+ }
+ }
+
+ state = (state || InitialState)[fnArray[i]];
+
+ if (!state || Array.isArray(state)) {
+ i++;
+ continue;
+ }
+
+ context.iCurr = i;
+ i++;
+
+ if (state.checkFn && !(0, state.checkFn)(context)) {
+ state = null;
+ continue;
+ }
+
+ match = state;
+ state = null;
+ }
+
+ this.state = state;
+ this.match = match;
+ this.lastProcessed = i;
+ },
+ push: function push(fn, args) {
+ this.queue.fnArray.push(fn);
+ this.queue.argsArray.push(args);
+
+ this._optimize();
+ },
+ flush: function flush() {
+ while (this.match) {
+ var length = this.queue.fnArray.length;
+ this.lastProcessed = (0, this.match.processFn)(this.context, length);
+ this.match = null;
+ this.state = null;
+
+ this._optimize();
+ }
+ },
+ reset: function reset() {
+ this.state = null;
+ this.match = null;
+ this.lastProcessed = 0;
+ }
+ };
+ return QueueOptimizer;
+}();
+
+var NullOptimizer = function NullOptimizerClosure() {
+ function NullOptimizer(queue) {
+ this.queue = queue;
+ }
+
+ NullOptimizer.prototype = {
+ push: function push(fn, args) {
+ this.queue.fnArray.push(fn);
+ this.queue.argsArray.push(args);
+ },
+ flush: function flush() {},
+ reset: function reset() {}
+ };
+ return NullOptimizer;
+}();
+
+var OperatorList = function OperatorListClosure() {
+ var CHUNK_SIZE = 1000;
+ var CHUNK_SIZE_ABOUT = CHUNK_SIZE - 5;
+
+ function OperatorList(intent, messageHandler, pageIndex) {
+ this.messageHandler = messageHandler;
+ this.fnArray = [];
+ this.argsArray = [];
+
+ if (messageHandler && intent !== 'oplist') {
+ this.optimizer = new QueueOptimizer(this);
+ } else {
+ this.optimizer = new NullOptimizer(this);
+ }
+
+ this.dependencies = Object.create(null);
+ this._totalLength = 0;
+ this.pageIndex = pageIndex;
+ this.intent = intent;
+ this.weight = 0;
+ }
+
+ OperatorList.prototype = {
+ get length() {
+ return this.argsArray.length;
+ },
+
+ get totalLength() {
+ return this._totalLength + this.length;
+ },
+
+ addOp: function addOp(fn, args) {
+ this.optimizer.push(fn, args);
+ this.weight++;
+
+ if (this.messageHandler) {
+ if (this.weight >= CHUNK_SIZE) {
+ this.flush();
+ } else if (this.weight >= CHUNK_SIZE_ABOUT && (fn === _util.OPS.restore || fn === _util.OPS.endText)) {
+ this.flush();
+ }
+ }
+ },
+ addDependency: function addDependency(dependency) {
+ if (dependency in this.dependencies) {
+ return;
+ }
+
+ this.dependencies[dependency] = true;
+ this.addOp(_util.OPS.dependency, [dependency]);
+ },
+ addDependencies: function addDependencies(dependencies) {
+ for (var key in dependencies) {
+ this.addDependency(key);
+ }
+ },
+ addOpList: function addOpList(opList) {
+ Object.assign(this.dependencies, opList.dependencies);
+
+ for (var i = 0, ii = opList.length; i < ii; i++) {
+ this.addOp(opList.fnArray[i], opList.argsArray[i]);
+ }
+ },
+ getIR: function getIR() {
+ return {
+ fnArray: this.fnArray,
+ argsArray: this.argsArray,
+ length: this.length
+ };
+ },
+
+ get _transfers() {
+ var transfers = [];
+ var fnArray = this.fnArray,
+ argsArray = this.argsArray,
+ length = this.length;
+
+ for (var i = 0; i < length; i++) {
+ switch (fnArray[i]) {
+ case _util.OPS.paintInlineImageXObject:
+ case _util.OPS.paintInlineImageXObjectGroup:
+ case _util.OPS.paintImageMaskXObject:
+ var arg = argsArray[i][0];
+ ;
+
+ if (!arg.cached) {
+ transfers.push(arg.data.buffer);
+ }
+
+ break;
+ }
+ }
+
+ return transfers;
+ },
+
+ flush: function flush() {
+ var lastChunk = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
+ this.optimizer.flush();
+ var length = this.length;
+ this._totalLength += length;
+ this.messageHandler.send('RenderPageChunk', {
+ operatorList: {
+ fnArray: this.fnArray,
+ argsArray: this.argsArray,
+ lastChunk: lastChunk,
+ length: length
+ },
+ pageIndex: this.pageIndex,
+ intent: this.intent
+ }, this._transfers);
+ this.dependencies = Object.create(null);
+ this.fnArray.length = 0;
+ this.argsArray.length = 0;
+ this.weight = 0;
+ this.optimizer.reset();
+ }
+ };
+ return OperatorList;
+}();
+
+exports.OperatorList = OperatorList;
+
+/***/ }),
+/* 172 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.PartialEvaluator = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(2));
+
+var _util = __w_pdfjs_require__(5);
+
+var _cmap = __w_pdfjs_require__(173);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _fonts = __w_pdfjs_require__(174);
+
+var _encodings = __w_pdfjs_require__(177);
+
+var _unicode = __w_pdfjs_require__(180);
+
+var _standard_fonts = __w_pdfjs_require__(179);
+
+var _pattern = __w_pdfjs_require__(183);
+
+var _parser = __w_pdfjs_require__(157);
+
+var _bidi = __w_pdfjs_require__(184);
+
+var _colorspace = __w_pdfjs_require__(169);
+
+var _stream = __w_pdfjs_require__(158);
+
+var _glyphlist = __w_pdfjs_require__(178);
+
+var _core_utils = __w_pdfjs_require__(154);
+
+var _metrics = __w_pdfjs_require__(185);
+
+var _function = __w_pdfjs_require__(186);
+
+var _jpeg_stream = __w_pdfjs_require__(164);
+
+var _murmurhash = __w_pdfjs_require__(188);
+
+var _image_utils = __w_pdfjs_require__(189);
+
+var _operator_list = __w_pdfjs_require__(171);
+
+var _image = __w_pdfjs_require__(190);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+var PartialEvaluator = function PartialEvaluatorClosure() {
+ var DefaultPartialEvaluatorOptions = {
+ forceDataSchema: false,
+ maxImageSize: -1,
+ disableFontFace: false,
+ nativeImageDecoderSupport: _util.NativeImageDecoding.DECODE,
+ ignoreErrors: false,
+ isEvalSupported: true
+ };
+
+ function PartialEvaluator(_ref) {
+ var _this = this;
+
+ var xref = _ref.xref,
+ handler = _ref.handler,
+ pageIndex = _ref.pageIndex,
+ idFactory = _ref.idFactory,
+ fontCache = _ref.fontCache,
+ builtInCMapCache = _ref.builtInCMapCache,
+ _ref$options = _ref.options,
+ options = _ref$options === void 0 ? null : _ref$options,
+ pdfFunctionFactory = _ref.pdfFunctionFactory;
+ this.xref = xref;
+ this.handler = handler;
+ this.pageIndex = pageIndex;
+ this.idFactory = idFactory;
+ this.fontCache = fontCache;
+ this.builtInCMapCache = builtInCMapCache;
+ this.options = options || DefaultPartialEvaluatorOptions;
+ this.pdfFunctionFactory = pdfFunctionFactory;
+ this.parsingType3Font = false;
+
+ this.fetchBuiltInCMap =
+ /*#__PURE__*/
+ function () {
+ var _ref2 = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee(name) {
+ var data;
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ if (!_this.builtInCMapCache.has(name)) {
+ _context.next = 2;
+ break;
+ }
+
+ return _context.abrupt("return", _this.builtInCMapCache.get(name));
+
+ case 2:
+ _context.next = 4;
+ return _this.handler.sendWithPromise('FetchBuiltInCMap', {
+ name: name
+ });
+
+ case 4:
+ data = _context.sent;
+
+ if (data.compressionType !== _util.CMapCompressionType.NONE) {
+ _this.builtInCMapCache.set(name, data);
+ }
+
+ return _context.abrupt("return", data);
+
+ case 7:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee);
+ }));
+
+ return function (_x) {
+ return _ref2.apply(this, arguments);
+ };
+ }();
+ }
+
+ var TIME_SLOT_DURATION_MS = 20;
+ var CHECK_TIME_EVERY = 100;
+
+ function TimeSlotManager() {
+ this.reset();
+ }
+
+ TimeSlotManager.prototype = {
+ check: function TimeSlotManager_check() {
+ if (++this.checked < CHECK_TIME_EVERY) {
+ return false;
+ }
+
+ this.checked = 0;
+ return this.endTime <= Date.now();
+ },
+ reset: function TimeSlotManager_reset() {
+ this.endTime = Date.now() + TIME_SLOT_DURATION_MS;
+ this.checked = 0;
+ }
+ };
+
+ function normalizeBlendMode(value) {
+ if (!(0, _primitives.isName)(value)) {
+ return 'source-over';
+ }
+
+ switch (value.name) {
+ case 'Normal':
+ case 'Compatible':
+ return 'source-over';
+
+ case 'Multiply':
+ return 'multiply';
+
+ case 'Screen':
+ return 'screen';
+
+ case 'Overlay':
+ return 'overlay';
+
+ case 'Darken':
+ return 'darken';
+
+ case 'Lighten':
+ return 'lighten';
+
+ case 'ColorDodge':
+ return 'color-dodge';
+
+ case 'ColorBurn':
+ return 'color-burn';
+
+ case 'HardLight':
+ return 'hard-light';
+
+ case 'SoftLight':
+ return 'soft-light';
+
+ case 'Difference':
+ return 'difference';
+
+ case 'Exclusion':
+ return 'exclusion';
+
+ case 'Hue':
+ return 'hue';
+
+ case 'Saturation':
+ return 'saturation';
+
+ case 'Color':
+ return 'color';
+
+ case 'Luminosity':
+ return 'luminosity';
+ }
+
+ (0, _util.warn)('Unsupported blend mode: ' + value.name);
+ return 'source-over';
+ }
+
+ var deferred = Promise.resolve();
+ var TILING_PATTERN = 1,
+ SHADING_PATTERN = 2;
+ PartialEvaluator.prototype = {
+ clone: function clone() {
+ var newOptions = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : DefaultPartialEvaluatorOptions;
+ var newEvaluator = Object.create(this);
+ newEvaluator.options = newOptions;
+ return newEvaluator;
+ },
+ hasBlendModes: function PartialEvaluator_hasBlendModes(resources) {
+ if (!(0, _primitives.isDict)(resources)) {
+ return false;
+ }
+
+ var processed = Object.create(null);
+
+ if (resources.objId) {
+ processed[resources.objId] = true;
+ }
+
+ var nodes = [resources],
+ xref = this.xref;
+
+ while (nodes.length) {
+ var key, i, ii;
+ var node = nodes.shift();
+ var graphicStates = node.get('ExtGState');
+
+ if ((0, _primitives.isDict)(graphicStates)) {
+ var graphicStatesKeys = graphicStates.getKeys();
+
+ for (i = 0, ii = graphicStatesKeys.length; i < ii; i++) {
+ key = graphicStatesKeys[i];
+ var graphicState = graphicStates.get(key);
+ var bm = graphicState.get('BM');
+
+ if ((0, _primitives.isName)(bm) && bm.name !== 'Normal') {
+ return true;
+ }
+ }
+ }
+
+ var xObjects = node.get('XObject');
+
+ if (!(0, _primitives.isDict)(xObjects)) {
+ continue;
+ }
+
+ var xObjectsKeys = xObjects.getKeys();
+
+ for (i = 0, ii = xObjectsKeys.length; i < ii; i++) {
+ key = xObjectsKeys[i];
+ var xObject = xObjects.getRaw(key);
+
+ if ((0, _primitives.isRef)(xObject)) {
+ if (processed[xObject.toString()]) {
+ continue;
+ }
+
+ xObject = xref.fetch(xObject);
+ }
+
+ if (!(0, _primitives.isStream)(xObject)) {
+ continue;
+ }
+
+ if (xObject.dict.objId) {
+ if (processed[xObject.dict.objId]) {
+ continue;
+ }
+
+ processed[xObject.dict.objId] = true;
+ }
+
+ var xResources = xObject.dict.get('Resources');
+
+ if ((0, _primitives.isDict)(xResources) && (!xResources.objId || !processed[xResources.objId])) {
+ nodes.push(xResources);
+
+ if (xResources.objId) {
+ processed[xResources.objId] = true;
+ }
+ }
+ }
+ }
+
+ return false;
+ },
+ buildFormXObject: function PartialEvaluator_buildFormXObject(resources, xobj, smask, operatorList, task, initialState) {
+ var dict = xobj.dict;
+ var matrix = dict.getArray('Matrix');
+ var bbox = dict.getArray('BBox');
+
+ if (Array.isArray(bbox) && bbox.length === 4) {
+ bbox = _util.Util.normalizeRect(bbox);
+ } else {
+ bbox = null;
+ }
+
+ var group = dict.get('Group');
+
+ if (group) {
+ var groupOptions = {
+ matrix: matrix,
+ bbox: bbox,
+ smask: smask,
+ isolated: false,
+ knockout: false
+ };
+ var groupSubtype = group.get('S');
+ var colorSpace = null;
+
+ if ((0, _primitives.isName)(groupSubtype, 'Transparency')) {
+ groupOptions.isolated = group.get('I') || false;
+ groupOptions.knockout = group.get('K') || false;
+
+ if (group.has('CS')) {
+ colorSpace = _colorspace.ColorSpace.parse(group.get('CS'), this.xref, resources, this.pdfFunctionFactory);
+ }
+ }
+
+ if (smask && smask.backdrop) {
+ colorSpace = colorSpace || _colorspace.ColorSpace.singletons.rgb;
+ smask.backdrop = colorSpace.getRgb(smask.backdrop, 0);
+ }
+
+ operatorList.addOp(_util.OPS.beginGroup, [groupOptions]);
+ }
+
+ operatorList.addOp(_util.OPS.paintFormXObjectBegin, [matrix, bbox]);
+ return this.getOperatorList({
+ stream: xobj,
+ task: task,
+ resources: dict.get('Resources') || resources,
+ operatorList: operatorList,
+ initialState: initialState
+ }).then(function () {
+ operatorList.addOp(_util.OPS.paintFormXObjectEnd, []);
+
+ if (group) {
+ operatorList.addOp(_util.OPS.endGroup, [groupOptions]);
+ }
+ });
+ },
+ buildPaintImageXObject: function () {
+ var _buildPaintImageXObject = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee2(_ref3) {
+ var _this2 = this;
+
+ var resources, image, _ref3$isInline, isInline, operatorList, cacheKey, imageCache, _ref3$forceDisableNat, forceDisableNativeImageDecoder, dict, w, h, maxImageSize, imageMask, imgData, args, width, height, bitStrideLength, imgArray, decode, softMask, mask, SMALL_IMAGE_DIMENSIONS, imageObj, nativeImageDecoderSupport, objId, nativeImageDecoder, imgPromise;
+
+ return _regenerator["default"].wrap(function _callee2$(_context2) {
+ while (1) {
+ switch (_context2.prev = _context2.next) {
+ case 0:
+ resources = _ref3.resources, image = _ref3.image, _ref3$isInline = _ref3.isInline, isInline = _ref3$isInline === void 0 ? false : _ref3$isInline, operatorList = _ref3.operatorList, cacheKey = _ref3.cacheKey, imageCache = _ref3.imageCache, _ref3$forceDisableNat = _ref3.forceDisableNativeImageDecoder, forceDisableNativeImageDecoder = _ref3$forceDisableNat === void 0 ? false : _ref3$forceDisableNat;
+ dict = image.dict;
+ w = dict.get('Width', 'W');
+ h = dict.get('Height', 'H');
+
+ if (!(!(w && (0, _util.isNum)(w)) || !(h && (0, _util.isNum)(h)))) {
+ _context2.next = 7;
+ break;
+ }
+
+ (0, _util.warn)('Image dimensions are missing, or not numbers.');
+ return _context2.abrupt("return", undefined);
+
+ case 7:
+ maxImageSize = this.options.maxImageSize;
+
+ if (!(maxImageSize !== -1 && w * h > maxImageSize)) {
+ _context2.next = 11;
+ break;
+ }
+
+ (0, _util.warn)('Image exceeded maximum allowed size and was removed.');
+ return _context2.abrupt("return", undefined);
+
+ case 11:
+ imageMask = dict.get('ImageMask', 'IM') || false;
+
+ if (!imageMask) {
+ _context2.next = 24;
+ break;
+ }
+
+ width = dict.get('Width', 'W');
+ height = dict.get('Height', 'H');
+ bitStrideLength = width + 7 >> 3;
+ imgArray = image.getBytes(bitStrideLength * height, true);
+ decode = dict.getArray('Decode', 'D');
+ imgData = _image.PDFImage.createMask({
+ imgArray: imgArray,
+ width: width,
+ height: height,
+ imageIsFromDecodeStream: image instanceof _stream.DecodeStream,
+ inverseDecode: !!decode && decode[0] > 0
+ });
+ imgData.cached = !!cacheKey;
+ args = [imgData];
+ operatorList.addOp(_util.OPS.paintImageMaskXObject, args);
+
+ if (cacheKey) {
+ imageCache[cacheKey] = {
+ fn: _util.OPS.paintImageMaskXObject,
+ args: args
+ };
+ }
+
+ return _context2.abrupt("return", undefined);
+
+ case 24:
+ softMask = dict.get('SMask', 'SM') || false;
+ mask = dict.get('Mask') || false;
+ SMALL_IMAGE_DIMENSIONS = 200;
+
+ if (!(isInline && !softMask && !mask && !(image instanceof _jpeg_stream.JpegStream) && w + h < SMALL_IMAGE_DIMENSIONS)) {
+ _context2.next = 32;
+ break;
+ }
+
+ imageObj = new _image.PDFImage({
+ xref: this.xref,
+ res: resources,
+ image: image,
+ isInline: isInline,
+ pdfFunctionFactory: this.pdfFunctionFactory
+ });
+ imgData = imageObj.createImageData(true);
+ operatorList.addOp(_util.OPS.paintInlineImageXObject, [imgData]);
+ return _context2.abrupt("return", undefined);
+
+ case 32:
+ nativeImageDecoderSupport = forceDisableNativeImageDecoder ? _util.NativeImageDecoding.NONE : this.options.nativeImageDecoderSupport;
+ objId = "img_".concat(this.idFactory.createObjId());
+
+ if (this.parsingType3Font) {
+ (0, _util.assert)(nativeImageDecoderSupport === _util.NativeImageDecoding.NONE, 'Type3 image resources should be completely decoded in the worker.');
+ objId = "".concat(this.idFactory.getDocId(), "_type3res_").concat(objId);
+ }
+
+ if (!(nativeImageDecoderSupport !== _util.NativeImageDecoding.NONE && !softMask && !mask && image instanceof _jpeg_stream.JpegStream && _image_utils.NativeImageDecoder.isSupported(image, this.xref, resources, this.pdfFunctionFactory))) {
+ _context2.next = 37;
+ break;
+ }
+
+ return _context2.abrupt("return", this.handler.sendWithPromise('obj', [objId, this.pageIndex, 'JpegStream', image.getIR(this.options.forceDataSchema)]).then(function () {
+ operatorList.addDependency(objId);
+ args = [objId, w, h];
+ operatorList.addOp(_util.OPS.paintJpegXObject, args);
+
+ if (cacheKey) {
+ imageCache[cacheKey] = {
+ fn: _util.OPS.paintJpegXObject,
+ args: args
+ };
+ }
+ }, function (reason) {
+ (0, _util.warn)('Native JPEG decoding failed -- trying to recover: ' + (reason && reason.message));
+ return _this2.buildPaintImageXObject({
+ resources: resources,
+ image: image,
+ isInline: isInline,
+ operatorList: operatorList,
+ cacheKey: cacheKey,
+ imageCache: imageCache,
+ forceDisableNativeImageDecoder: true
+ });
+ }));
+
+ case 37:
+ nativeImageDecoder = null;
+
+ if (nativeImageDecoderSupport === _util.NativeImageDecoding.DECODE && (image instanceof _jpeg_stream.JpegStream || mask instanceof _jpeg_stream.JpegStream || softMask instanceof _jpeg_stream.JpegStream)) {
+ nativeImageDecoder = new _image_utils.NativeImageDecoder({
+ xref: this.xref,
+ resources: resources,
+ handler: this.handler,
+ forceDataSchema: this.options.forceDataSchema,
+ pdfFunctionFactory: this.pdfFunctionFactory
+ });
+ }
+
+ operatorList.addDependency(objId);
+ args = [objId, w, h];
+ imgPromise = _image.PDFImage.buildImage({
+ handler: this.handler,
+ xref: this.xref,
+ res: resources,
+ image: image,
+ isInline: isInline,
+ nativeDecoder: nativeImageDecoder,
+ pdfFunctionFactory: this.pdfFunctionFactory
+ }).then(function (imageObj) {
+ var imgData = imageObj.createImageData(false);
+
+ if (_this2.parsingType3Font) {
+ return _this2.handler.sendWithPromise('commonobj', [objId, 'FontType3Res', imgData], [imgData.data.buffer]);
+ }
+
+ _this2.handler.send('obj', [objId, _this2.pageIndex, 'Image', imgData], [imgData.data.buffer]);
+
+ return undefined;
+ })["catch"](function (reason) {
+ (0, _util.warn)('Unable to decode image: ' + reason);
+
+ if (_this2.parsingType3Font) {
+ return _this2.handler.sendWithPromise('commonobj', [objId, 'FontType3Res', null]);
+ }
+
+ _this2.handler.send('obj', [objId, _this2.pageIndex, 'Image', null]);
+
+ return undefined;
+ });
+
+ if (!this.parsingType3Font) {
+ _context2.next = 45;
+ break;
+ }
+
+ _context2.next = 45;
+ return imgPromise;
+
+ case 45:
+ operatorList.addOp(_util.OPS.paintImageXObject, args);
+
+ if (cacheKey) {
+ imageCache[cacheKey] = {
+ fn: _util.OPS.paintImageXObject,
+ args: args
+ };
+ }
+
+ return _context2.abrupt("return", undefined);
+
+ case 48:
+ case "end":
+ return _context2.stop();
+ }
+ }
+ }, _callee2, this);
+ }));
+
+ function buildPaintImageXObject(_x2) {
+ return _buildPaintImageXObject.apply(this, arguments);
+ }
+
+ return buildPaintImageXObject;
+ }(),
+ handleSMask: function PartialEvaluator_handleSmask(smask, resources, operatorList, task, stateManager) {
+ var smaskContent = smask.get('G');
+ var smaskOptions = {
+ subtype: smask.get('S').name,
+ backdrop: smask.get('BC')
+ };
+ var transferObj = smask.get('TR');
+
+ if ((0, _function.isPDFFunction)(transferObj)) {
+ var transferFn = this.pdfFunctionFactory.create(transferObj);
+ var transferMap = new Uint8Array(256);
+ var tmp = new Float32Array(1);
+
+ for (var i = 0; i < 256; i++) {
+ tmp[0] = i / 255;
+ transferFn(tmp, 0, tmp, 0);
+ transferMap[i] = tmp[0] * 255 | 0;
+ }
+
+ smaskOptions.transferMap = transferMap;
+ }
+
+ return this.buildFormXObject(resources, smaskContent, smaskOptions, operatorList, task, stateManager.state.clone());
+ },
+ handleTilingType: function handleTilingType(fn, args, resources, pattern, patternDict, operatorList, task) {
+ var _this3 = this;
+
+ var tilingOpList = new _operator_list.OperatorList();
+ var resourcesArray = [patternDict.get('Resources'), resources];
+
+ var patternResources = _primitives.Dict.merge(this.xref, resourcesArray);
+
+ return this.getOperatorList({
+ stream: pattern,
+ task: task,
+ resources: patternResources,
+ operatorList: tilingOpList
+ }).then(function () {
+ return (0, _pattern.getTilingPatternIR)({
+ fnArray: tilingOpList.fnArray,
+ argsArray: tilingOpList.argsArray
+ }, patternDict, args);
+ }).then(function (tilingPatternIR) {
+ operatorList.addDependencies(tilingOpList.dependencies);
+ operatorList.addOp(fn, tilingPatternIR);
+ }, function (reason) {
+ if (_this3.options.ignoreErrors) {
+ _this3.handler.send('UnsupportedFeature', {
+ featureId: _util.UNSUPPORTED_FEATURES.unknown
+ });
+
+ (0, _util.warn)("handleTilingType - ignoring pattern: \"".concat(reason, "\"."));
+ return;
+ }
+
+ throw reason;
+ });
+ },
+ handleSetFont: function PartialEvaluator_handleSetFont(resources, fontArgs, fontRef, operatorList, task, state) {
+ var _this4 = this;
+
+ var fontName;
+
+ if (fontArgs) {
+ fontArgs = fontArgs.slice();
+ fontName = fontArgs[0].name;
+ }
+
+ return this.loadFont(fontName, fontRef, resources).then(function (translated) {
+ if (!translated.font.isType3Font) {
+ return translated;
+ }
+
+ return translated.loadType3Data(_this4, resources, operatorList, task).then(function () {
+ return translated;
+ })["catch"](function (reason) {
+ _this4.handler.send('UnsupportedFeature', {
+ featureId: _util.UNSUPPORTED_FEATURES.font
+ });
+
+ return new TranslatedFont('g_font_error', new _fonts.ErrorFont('Type3 font load error: ' + reason), translated.font);
+ });
+ }).then(function (translated) {
+ state.font = translated.font;
+ translated.send(_this4.handler);
+ return translated.loadedName;
+ });
+ },
+ handleText: function handleText(chars, state) {
+ var font = state.font;
+ var glyphs = font.charsToGlyphs(chars);
+
+ if (font.data) {
+ var isAddToPathSet = !!(state.textRenderingMode & _util.TextRenderingMode.ADD_TO_PATH_FLAG);
+
+ if (isAddToPathSet || state.fillColorSpace.name === 'Pattern' || font.disableFontFace || this.options.disableFontFace) {
+ PartialEvaluator.buildFontPaths(font, glyphs, this.handler);
+ }
+ }
+
+ return glyphs;
+ },
+ setGState: function PartialEvaluator_setGState(resources, gState, operatorList, task, stateManager) {
+ var _this5 = this;
+
+ var gStateObj = [];
+ var gStateKeys = gState.getKeys();
+ var promise = Promise.resolve();
+
+ var _loop = function _loop() {
+ var key = gStateKeys[i];
+ var value = gState.get(key);
+
+ switch (key) {
+ case 'Type':
+ break;
+
+ case 'LW':
+ case 'LC':
+ case 'LJ':
+ case 'ML':
+ case 'D':
+ case 'RI':
+ case 'FL':
+ case 'CA':
+ case 'ca':
+ gStateObj.push([key, value]);
+ break;
+
+ case 'Font':
+ promise = promise.then(function () {
+ return _this5.handleSetFont(resources, null, value[0], operatorList, task, stateManager.state).then(function (loadedName) {
+ operatorList.addDependency(loadedName);
+ gStateObj.push([key, [loadedName, value[1]]]);
+ });
+ });
+ break;
+
+ case 'BM':
+ gStateObj.push([key, normalizeBlendMode(value)]);
+ break;
+
+ case 'SMask':
+ if ((0, _primitives.isName)(value, 'None')) {
+ gStateObj.push([key, false]);
+ break;
+ }
+
+ if ((0, _primitives.isDict)(value)) {
+ promise = promise.then(function () {
+ return _this5.handleSMask(value, resources, operatorList, task, stateManager);
+ });
+ gStateObj.push([key, true]);
+ } else {
+ (0, _util.warn)('Unsupported SMask type');
+ }
+
+ break;
+
+ case 'OP':
+ case 'op':
+ case 'OPM':
+ case 'BG':
+ case 'BG2':
+ case 'UCR':
+ case 'UCR2':
+ case 'TR':
+ case 'TR2':
+ case 'HT':
+ case 'SM':
+ case 'SA':
+ case 'AIS':
+ case 'TK':
+ (0, _util.info)('graphic state operator ' + key);
+ break;
+
+ default:
+ (0, _util.info)('Unknown graphic state operator ' + key);
+ break;
+ }
+ };
+
+ for (var i = 0, ii = gStateKeys.length; i < ii; i++) {
+ _loop();
+ }
+
+ return promise.then(function () {
+ if (gStateObj.length > 0) {
+ operatorList.addOp(_util.OPS.setGState, [gStateObj]);
+ }
+ });
+ },
+ loadFont: function PartialEvaluator_loadFont(fontName, font, resources) {
+ var _this6 = this;
+
+ function errorFont() {
+ return Promise.resolve(new TranslatedFont('g_font_error', new _fonts.ErrorFont('Font ' + fontName + ' is not available'), font));
+ }
+
+ var fontRef,
+ xref = this.xref;
+
+ if (font) {
+ if (!(0, _primitives.isRef)(font)) {
+ throw new Error('The "font" object should be a reference.');
+ }
+
+ fontRef = font;
+ } else {
+ var fontRes = resources.get('Font');
+
+ if (fontRes) {
+ fontRef = fontRes.getRaw(fontName);
+ } else {
+ (0, _util.warn)('fontRes not available');
+ return errorFont();
+ }
+ }
+
+ if (!fontRef) {
+ (0, _util.warn)('fontRef not available');
+ return errorFont();
+ }
+
+ if (this.fontCache.has(fontRef)) {
+ return this.fontCache.get(fontRef);
+ }
+
+ font = xref.fetchIfRef(fontRef);
+
+ if (!(0, _primitives.isDict)(font)) {
+ return errorFont();
+ }
+
+ if (font.translated) {
+ return font.translated;
+ }
+
+ var fontCapability = (0, _util.createPromiseCapability)();
+ var preEvaluatedFont = this.preEvaluateFont(font);
+ var descriptor = preEvaluatedFont.descriptor,
+ hash = preEvaluatedFont.hash;
+ var fontRefIsRef = (0, _primitives.isRef)(fontRef),
+ fontID;
+
+ if (fontRefIsRef) {
+ fontID = fontRef.toString();
+ }
+
+ if (hash && (0, _primitives.isDict)(descriptor)) {
+ if (!descriptor.fontAliases) {
+ descriptor.fontAliases = Object.create(null);
+ }
+
+ var fontAliases = descriptor.fontAliases;
+
+ if (fontAliases[hash]) {
+ var aliasFontRef = fontAliases[hash].aliasRef;
+
+ if (fontRefIsRef && aliasFontRef && this.fontCache.has(aliasFontRef)) {
+ this.fontCache.putAlias(fontRef, aliasFontRef);
+ return this.fontCache.get(fontRef);
+ }
+ } else {
+ fontAliases[hash] = {
+ fontID: _fonts.Font.getFontID()
+ };
+ }
+
+ if (fontRefIsRef) {
+ fontAliases[hash].aliasRef = fontRef;
+ }
+
+ fontID = fontAliases[hash].fontID;
+ }
+
+ if (fontRefIsRef) {
+ this.fontCache.put(fontRef, fontCapability.promise);
+ } else {
+ if (!fontID) {
+ fontID = this.idFactory.createObjId();
+ }
+
+ this.fontCache.put("id_".concat(fontID), fontCapability.promise);
+ }
+
+ (0, _util.assert)(fontID, 'The "fontID" must be defined.');
+ font.loadedName = "".concat(this.idFactory.getDocId(), "_f").concat(fontID);
+ font.translated = fontCapability.promise;
+ var translatedPromise;
+
+ try {
+ translatedPromise = this.translateFont(preEvaluatedFont);
+ } catch (e) {
+ translatedPromise = Promise.reject(e);
+ }
+
+ translatedPromise.then(function (translatedFont) {
+ if (translatedFont.fontType !== undefined) {
+ var xrefFontStats = xref.stats.fontTypes;
+ xrefFontStats[translatedFont.fontType] = true;
+ }
+
+ fontCapability.resolve(new TranslatedFont(font.loadedName, translatedFont, font));
+ })["catch"](function (reason) {
+ _this6.handler.send('UnsupportedFeature', {
+ featureId: _util.UNSUPPORTED_FEATURES.font
+ });
+
+ try {
+ var fontFile3 = descriptor && descriptor.get('FontFile3');
+ var subtype = fontFile3 && fontFile3.get('Subtype');
+ var fontType = (0, _fonts.getFontType)(preEvaluatedFont.type, subtype && subtype.name);
+ var xrefFontStats = xref.stats.fontTypes;
+ xrefFontStats[fontType] = true;
+ } catch (ex) {}
+
+ fontCapability.resolve(new TranslatedFont(font.loadedName, new _fonts.ErrorFont(reason instanceof Error ? reason.message : reason), font));
+ });
+ return fontCapability.promise;
+ },
+ buildPath: function buildPath(operatorList, fn, args) {
+ var parsingText = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
+ var lastIndex = operatorList.length - 1;
+
+ if (!args) {
+ args = [];
+ }
+
+ if (lastIndex < 0 || operatorList.fnArray[lastIndex] !== _util.OPS.constructPath) {
+ if (parsingText) {
+ (0, _util.warn)("Encountered path operator \"".concat(fn, "\" inside of a text object."));
+ operatorList.addOp(_util.OPS.save, null);
+ }
+
+ operatorList.addOp(_util.OPS.constructPath, [[fn], args]);
+
+ if (parsingText) {
+ operatorList.addOp(_util.OPS.restore, null);
+ }
+ } else {
+ var opArgs = operatorList.argsArray[lastIndex];
+ opArgs[0].push(fn);
+ Array.prototype.push.apply(opArgs[1], args);
+ }
+ },
+ handleColorN: function () {
+ var _handleColorN = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee3(operatorList, fn, args, cs, patterns, resources, task) {
+ var patternName, pattern, dict, typeNum, color, shading, matrix;
+ return _regenerator["default"].wrap(function _callee3$(_context3) {
+ while (1) {
+ switch (_context3.prev = _context3.next) {
+ case 0:
+ patternName = args[args.length - 1];
+
+ if (!((0, _primitives.isName)(patternName) && (pattern = patterns.get(patternName.name)))) {
+ _context3.next = 16;
+ break;
+ }
+
+ dict = (0, _primitives.isStream)(pattern) ? pattern.dict : pattern;
+ typeNum = dict.get('PatternType');
+
+ if (!(typeNum === TILING_PATTERN)) {
+ _context3.next = 9;
+ break;
+ }
+
+ color = cs.base ? cs.base.getRgb(args, 0) : null;
+ return _context3.abrupt("return", this.handleTilingType(fn, color, resources, pattern, dict, operatorList, task));
+
+ case 9:
+ if (!(typeNum === SHADING_PATTERN)) {
+ _context3.next = 15;
+ break;
+ }
+
+ shading = dict.get('Shading');
+ matrix = dict.getArray('Matrix');
+ pattern = _pattern.Pattern.parseShading(shading, matrix, this.xref, resources, this.handler, this.pdfFunctionFactory);
+ operatorList.addOp(fn, pattern.getIR());
+ return _context3.abrupt("return", undefined);
+
+ case 15:
+ throw new _util.FormatError("Unknown PatternType: ".concat(typeNum));
+
+ case 16:
+ throw new _util.FormatError("Unknown PatternName: ".concat(patternName));
+
+ case 17:
+ case "end":
+ return _context3.stop();
+ }
+ }
+ }, _callee3, this);
+ }));
+
+ function handleColorN(_x3, _x4, _x5, _x6, _x7, _x8, _x9) {
+ return _handleColorN.apply(this, arguments);
+ }
+
+ return handleColorN;
+ }(),
+ getOperatorList: function getOperatorList(_ref4) {
+ var _this7 = this;
+
+ var stream = _ref4.stream,
+ task = _ref4.task,
+ resources = _ref4.resources,
+ operatorList = _ref4.operatorList,
+ _ref4$initialState = _ref4.initialState,
+ initialState = _ref4$initialState === void 0 ? null : _ref4$initialState;
+ resources = resources || _primitives.Dict.empty;
+ initialState = initialState || new EvalState();
+
+ if (!operatorList) {
+ throw new Error('getOperatorList: missing "operatorList" parameter');
+ }
+
+ var self = this;
+ var xref = this.xref;
+ var parsingText = false;
+ var imageCache = Object.create(null);
+
+ var xobjs = resources.get('XObject') || _primitives.Dict.empty;
+
+ var patterns = resources.get('Pattern') || _primitives.Dict.empty;
+
+ var stateManager = new StateManager(initialState);
+ var preprocessor = new EvaluatorPreprocessor(stream, xref, stateManager);
+ var timeSlotManager = new TimeSlotManager();
+
+ function closePendingRestoreOPS(argument) {
+ for (var i = 0, ii = preprocessor.savedStatesDepth; i < ii; i++) {
+ operatorList.addOp(_util.OPS.restore, []);
+ }
+ }
+
+ return new Promise(function promiseBody(resolve, reject) {
+ var next = function next(promise) {
+ promise.then(function () {
+ try {
+ promiseBody(resolve, reject);
+ } catch (ex) {
+ reject(ex);
+ }
+ }, reject);
+ };
+
+ task.ensureNotTerminated();
+ timeSlotManager.reset();
+ var stop,
+ operation = {},
+ i,
+ ii,
+ cs;
+
+ while (!(stop = timeSlotManager.check())) {
+ operation.args = null;
+
+ if (!preprocessor.read(operation)) {
+ break;
+ }
+
+ var args = operation.args;
+ var fn = operation.fn;
+
+ switch (fn | 0) {
+ case _util.OPS.paintXObject:
+ var name = args[0].name;
+
+ if (name && imageCache[name] !== undefined) {
+ operatorList.addOp(imageCache[name].fn, imageCache[name].args);
+ args = null;
+ continue;
+ }
+
+ next(new Promise(function (resolveXObject, rejectXObject) {
+ if (!name) {
+ throw new _util.FormatError('XObject must be referred to by name.');
+ }
+
+ var xobj = xobjs.get(name);
+
+ if (!xobj) {
+ operatorList.addOp(fn, args);
+ resolveXObject();
+ return;
+ }
+
+ if (!(0, _primitives.isStream)(xobj)) {
+ throw new _util.FormatError('XObject should be a stream');
+ }
+
+ var type = xobj.dict.get('Subtype');
+
+ if (!(0, _primitives.isName)(type)) {
+ throw new _util.FormatError('XObject should have a Name subtype');
+ }
+
+ if (type.name === 'Form') {
+ stateManager.save();
+ self.buildFormXObject(resources, xobj, null, operatorList, task, stateManager.state.clone()).then(function () {
+ stateManager.restore();
+ resolveXObject();
+ }, rejectXObject);
+ return;
+ } else if (type.name === 'Image') {
+ self.buildPaintImageXObject({
+ resources: resources,
+ image: xobj,
+ operatorList: operatorList,
+ cacheKey: name,
+ imageCache: imageCache
+ }).then(resolveXObject, rejectXObject);
+ return;
+ } else if (type.name === 'PS') {
+ (0, _util.info)('Ignored XObject subtype PS');
+ } else {
+ throw new _util.FormatError("Unhandled XObject subtype ".concat(type.name));
+ }
+
+ resolveXObject();
+ })["catch"](function (reason) {
+ if (self.options.ignoreErrors) {
+ self.handler.send('UnsupportedFeature', {
+ featureId: _util.UNSUPPORTED_FEATURES.unknown
+ });
+ (0, _util.warn)("getOperatorList - ignoring XObject: \"".concat(reason, "\"."));
+ return;
+ }
+
+ throw reason;
+ }));
+ return;
+
+ case _util.OPS.setFont:
+ var fontSize = args[1];
+ next(self.handleSetFont(resources, args, null, operatorList, task, stateManager.state).then(function (loadedName) {
+ operatorList.addDependency(loadedName);
+ operatorList.addOp(_util.OPS.setFont, [loadedName, fontSize]);
+ }));
+ return;
+
+ case _util.OPS.beginText:
+ parsingText = true;
+ break;
+
+ case _util.OPS.endText:
+ parsingText = false;
+ break;
+
+ case _util.OPS.endInlineImage:
+ var cacheKey = args[0].cacheKey;
+
+ if (cacheKey) {
+ var cacheEntry = imageCache[cacheKey];
+
+ if (cacheEntry !== undefined) {
+ operatorList.addOp(cacheEntry.fn, cacheEntry.args);
+ args = null;
+ continue;
+ }
+ }
+
+ next(self.buildPaintImageXObject({
+ resources: resources,
+ image: args[0],
+ isInline: true,
+ operatorList: operatorList,
+ cacheKey: cacheKey,
+ imageCache: imageCache
+ }));
+ return;
+
+ case _util.OPS.showText:
+ args[0] = self.handleText(args[0], stateManager.state);
+ break;
+
+ case _util.OPS.showSpacedText:
+ var arr = args[0];
+ var combinedGlyphs = [];
+ var arrLength = arr.length;
+ var state = stateManager.state;
+
+ for (i = 0; i < arrLength; ++i) {
+ var arrItem = arr[i];
+
+ if ((0, _util.isString)(arrItem)) {
+ Array.prototype.push.apply(combinedGlyphs, self.handleText(arrItem, state));
+ } else if ((0, _util.isNum)(arrItem)) {
+ combinedGlyphs.push(arrItem);
+ }
+ }
+
+ args[0] = combinedGlyphs;
+ fn = _util.OPS.showText;
+ break;
+
+ case _util.OPS.nextLineShowText:
+ operatorList.addOp(_util.OPS.nextLine);
+ args[0] = self.handleText(args[0], stateManager.state);
+ fn = _util.OPS.showText;
+ break;
+
+ case _util.OPS.nextLineSetSpacingShowText:
+ operatorList.addOp(_util.OPS.nextLine);
+ operatorList.addOp(_util.OPS.setWordSpacing, [args.shift()]);
+ operatorList.addOp(_util.OPS.setCharSpacing, [args.shift()]);
+ args[0] = self.handleText(args[0], stateManager.state);
+ fn = _util.OPS.showText;
+ break;
+
+ case _util.OPS.setTextRenderingMode:
+ stateManager.state.textRenderingMode = args[0];
+ break;
+
+ case _util.OPS.setFillColorSpace:
+ stateManager.state.fillColorSpace = _colorspace.ColorSpace.parse(args[0], xref, resources, self.pdfFunctionFactory);
+ continue;
+
+ case _util.OPS.setStrokeColorSpace:
+ stateManager.state.strokeColorSpace = _colorspace.ColorSpace.parse(args[0], xref, resources, self.pdfFunctionFactory);
+ continue;
+
+ case _util.OPS.setFillColor:
+ cs = stateManager.state.fillColorSpace;
+ args = cs.getRgb(args, 0);
+ fn = _util.OPS.setFillRGBColor;
+ break;
+
+ case _util.OPS.setStrokeColor:
+ cs = stateManager.state.strokeColorSpace;
+ args = cs.getRgb(args, 0);
+ fn = _util.OPS.setStrokeRGBColor;
+ break;
+
+ case _util.OPS.setFillGray:
+ stateManager.state.fillColorSpace = _colorspace.ColorSpace.singletons.gray;
+ args = _colorspace.ColorSpace.singletons.gray.getRgb(args, 0);
+ fn = _util.OPS.setFillRGBColor;
+ break;
+
+ case _util.OPS.setStrokeGray:
+ stateManager.state.strokeColorSpace = _colorspace.ColorSpace.singletons.gray;
+ args = _colorspace.ColorSpace.singletons.gray.getRgb(args, 0);
+ fn = _util.OPS.setStrokeRGBColor;
+ break;
+
+ case _util.OPS.setFillCMYKColor:
+ stateManager.state.fillColorSpace = _colorspace.ColorSpace.singletons.cmyk;
+ args = _colorspace.ColorSpace.singletons.cmyk.getRgb(args, 0);
+ fn = _util.OPS.setFillRGBColor;
+ break;
+
+ case _util.OPS.setStrokeCMYKColor:
+ stateManager.state.strokeColorSpace = _colorspace.ColorSpace.singletons.cmyk;
+ args = _colorspace.ColorSpace.singletons.cmyk.getRgb(args, 0);
+ fn = _util.OPS.setStrokeRGBColor;
+ break;
+
+ case _util.OPS.setFillRGBColor:
+ stateManager.state.fillColorSpace = _colorspace.ColorSpace.singletons.rgb;
+ args = _colorspace.ColorSpace.singletons.rgb.getRgb(args, 0);
+ break;
+
+ case _util.OPS.setStrokeRGBColor:
+ stateManager.state.strokeColorSpace = _colorspace.ColorSpace.singletons.rgb;
+ args = _colorspace.ColorSpace.singletons.rgb.getRgb(args, 0);
+ break;
+
+ case _util.OPS.setFillColorN:
+ cs = stateManager.state.fillColorSpace;
+
+ if (cs.name === 'Pattern') {
+ next(self.handleColorN(operatorList, _util.OPS.setFillColorN, args, cs, patterns, resources, task));
+ return;
+ }
+
+ args = cs.getRgb(args, 0);
+ fn = _util.OPS.setFillRGBColor;
+ break;
+
+ case _util.OPS.setStrokeColorN:
+ cs = stateManager.state.strokeColorSpace;
+
+ if (cs.name === 'Pattern') {
+ next(self.handleColorN(operatorList, _util.OPS.setStrokeColorN, args, cs, patterns, resources, task));
+ return;
+ }
+
+ args = cs.getRgb(args, 0);
+ fn = _util.OPS.setStrokeRGBColor;
+ break;
+
+ case _util.OPS.shadingFill:
+ var shadingRes = resources.get('Shading');
+
+ if (!shadingRes) {
+ throw new _util.FormatError('No shading resource found');
+ }
+
+ var shading = shadingRes.get(args[0].name);
+
+ if (!shading) {
+ throw new _util.FormatError('No shading object found');
+ }
+
+ var shadingFill = _pattern.Pattern.parseShading(shading, null, xref, resources, self.handler, self.pdfFunctionFactory);
+
+ var patternIR = shadingFill.getIR();
+ args = [patternIR];
+ fn = _util.OPS.shadingFill;
+ break;
+
+ case _util.OPS.setGState:
+ var dictName = args[0];
+ var extGState = resources.get('ExtGState');
+
+ if (!(0, _primitives.isDict)(extGState) || !extGState.has(dictName.name)) {
+ break;
+ }
+
+ var gState = extGState.get(dictName.name);
+ next(self.setGState(resources, gState, operatorList, task, stateManager));
+ return;
+
+ case _util.OPS.moveTo:
+ case _util.OPS.lineTo:
+ case _util.OPS.curveTo:
+ case _util.OPS.curveTo2:
+ case _util.OPS.curveTo3:
+ case _util.OPS.closePath:
+ case _util.OPS.rectangle:
+ self.buildPath(operatorList, fn, args, parsingText);
+ continue;
+
+ case _util.OPS.markPoint:
+ case _util.OPS.markPointProps:
+ case _util.OPS.beginMarkedContent:
+ case _util.OPS.beginMarkedContentProps:
+ case _util.OPS.endMarkedContent:
+ case _util.OPS.beginCompat:
+ case _util.OPS.endCompat:
+ continue;
+
+ default:
+ if (args !== null) {
+ for (i = 0, ii = args.length; i < ii; i++) {
+ if (args[i] instanceof _primitives.Dict) {
+ break;
+ }
+ }
+
+ if (i < ii) {
+ (0, _util.warn)('getOperatorList - ignoring operator: ' + fn);
+ continue;
+ }
+ }
+
+ }
+
+ operatorList.addOp(fn, args);
+ }
+
+ if (stop) {
+ next(deferred);
+ return;
+ }
+
+ closePendingRestoreOPS();
+ resolve();
+ })["catch"](function (reason) {
+ if (_this7.options.ignoreErrors) {
+ _this7.handler.send('UnsupportedFeature', {
+ featureId: _util.UNSUPPORTED_FEATURES.unknown
+ });
+
+ (0, _util.warn)("getOperatorList - ignoring errors during \"".concat(task.name, "\" ") + "task: \"".concat(reason, "\"."));
+ closePendingRestoreOPS();
+ return;
+ }
+
+ throw reason;
+ });
+ },
+ getTextContent: function getTextContent(_ref5) {
+ var _this8 = this;
+
+ var stream = _ref5.stream,
+ task = _ref5.task,
+ resources = _ref5.resources,
+ _ref5$stateManager = _ref5.stateManager,
+ stateManager = _ref5$stateManager === void 0 ? null : _ref5$stateManager,
+ _ref5$normalizeWhites = _ref5.normalizeWhitespace,
+ normalizeWhitespace = _ref5$normalizeWhites === void 0 ? false : _ref5$normalizeWhites,
+ _ref5$combineTextItem = _ref5.combineTextItems,
+ combineTextItems = _ref5$combineTextItem === void 0 ? false : _ref5$combineTextItem,
+ sink = _ref5.sink,
+ _ref5$seenStyles = _ref5.seenStyles,
+ seenStyles = _ref5$seenStyles === void 0 ? Object.create(null) : _ref5$seenStyles;
+ resources = resources || _primitives.Dict.empty;
+ stateManager = stateManager || new StateManager(new TextState());
+ var WhitespaceRegexp = /\s/g;
+ var textContent = {
+ items: [],
+ styles: Object.create(null)
+ };
+ var textContentItem = {
+ initialized: false,
+ str: [],
+ width: 0,
+ height: 0,
+ vertical: false,
+ lastAdvanceWidth: 0,
+ lastAdvanceHeight: 0,
+ textAdvanceScale: 0,
+ spaceWidth: 0,
+ fakeSpaceMin: Infinity,
+ fakeMultiSpaceMin: Infinity,
+ fakeMultiSpaceMax: -0,
+ textRunBreakAllowed: false,
+ transform: null,
+ fontName: null
+ };
+ var SPACE_FACTOR = 0.3;
+ var MULTI_SPACE_FACTOR = 1.5;
+ var MULTI_SPACE_FACTOR_MAX = 4;
+ var self = this;
+ var xref = this.xref;
+ var xobjs = null;
+ var skipEmptyXObjs = Object.create(null);
+ var preprocessor = new EvaluatorPreprocessor(stream, xref, stateManager);
+ var textState;
+
+ function ensureTextContentItem() {
+ if (textContentItem.initialized) {
+ return textContentItem;
+ }
+
+ var font = textState.font;
+
+ if (!(font.loadedName in seenStyles)) {
+ seenStyles[font.loadedName] = true;
+ textContent.styles[font.loadedName] = {
+ fontFamily: font.fallbackName,
+ ascent: font.ascent,
+ descent: font.descent,
+ vertical: !!font.vertical
+ };
+ }
+
+ textContentItem.fontName = font.loadedName;
+ var tsm = [textState.fontSize * textState.textHScale, 0, 0, textState.fontSize, 0, textState.textRise];
+
+ if (font.isType3Font && textState.fontSize <= 1 && !(0, _util.isArrayEqual)(textState.fontMatrix, _util.FONT_IDENTITY_MATRIX)) {
+ var glyphHeight = font.bbox[3] - font.bbox[1];
+
+ if (glyphHeight > 0) {
+ tsm[3] *= glyphHeight * textState.fontMatrix[3];
+ }
+ }
+
+ var trm = _util.Util.transform(textState.ctm, _util.Util.transform(textState.textMatrix, tsm));
+
+ textContentItem.transform = trm;
+
+ if (!font.vertical) {
+ textContentItem.width = 0;
+ textContentItem.height = Math.sqrt(trm[2] * trm[2] + trm[3] * trm[3]);
+ textContentItem.vertical = false;
+ } else {
+ textContentItem.width = Math.sqrt(trm[0] * trm[0] + trm[1] * trm[1]);
+ textContentItem.height = 0;
+ textContentItem.vertical = true;
+ }
+
+ var a = textState.textLineMatrix[0];
+ var b = textState.textLineMatrix[1];
+ var scaleLineX = Math.sqrt(a * a + b * b);
+ a = textState.ctm[0];
+ b = textState.ctm[1];
+ var scaleCtmX = Math.sqrt(a * a + b * b);
+ textContentItem.textAdvanceScale = scaleCtmX * scaleLineX;
+ textContentItem.lastAdvanceWidth = 0;
+ textContentItem.lastAdvanceHeight = 0;
+ var spaceWidth = font.spaceWidth / 1000 * textState.fontSize;
+
+ if (spaceWidth) {
+ textContentItem.spaceWidth = spaceWidth;
+ textContentItem.fakeSpaceMin = spaceWidth * SPACE_FACTOR;
+ textContentItem.fakeMultiSpaceMin = spaceWidth * MULTI_SPACE_FACTOR;
+ textContentItem.fakeMultiSpaceMax = spaceWidth * MULTI_SPACE_FACTOR_MAX;
+ textContentItem.textRunBreakAllowed = !font.isMonospace;
+ } else {
+ textContentItem.spaceWidth = 0;
+ textContentItem.fakeSpaceMin = Infinity;
+ textContentItem.fakeMultiSpaceMin = Infinity;
+ textContentItem.fakeMultiSpaceMax = 0;
+ textContentItem.textRunBreakAllowed = false;
+ }
+
+ textContentItem.initialized = true;
+ return textContentItem;
+ }
+
+ function replaceWhitespace(str) {
+ var i = 0,
+ ii = str.length,
+ code;
+
+ while (i < ii && (code = str.charCodeAt(i)) >= 0x20 && code <= 0x7F) {
+ i++;
+ }
+
+ return i < ii ? str.replace(WhitespaceRegexp, ' ') : str;
+ }
+
+ function runBidiTransform(textChunk) {
+ var str = textChunk.str.join('');
+ var bidiResult = (0, _bidi.bidi)(str, -1, textChunk.vertical);
+ return {
+ str: normalizeWhitespace ? replaceWhitespace(bidiResult.str) : bidiResult.str,
+ dir: bidiResult.dir,
+ width: textChunk.width,
+ height: textChunk.height,
+ transform: textChunk.transform,
+ fontName: textChunk.fontName
+ };
+ }
+
+ function handleSetFont(fontName, fontRef) {
+ return self.loadFont(fontName, fontRef, resources).then(function (translated) {
+ textState.font = translated.font;
+ textState.fontMatrix = translated.font.fontMatrix || _util.FONT_IDENTITY_MATRIX;
+ });
+ }
+
+ function buildTextContentItem(chars) {
+ var font = textState.font;
+ var textChunk = ensureTextContentItem();
+ var width = 0;
+ var height = 0;
+ var glyphs = font.charsToGlyphs(chars);
+
+ for (var i = 0; i < glyphs.length; i++) {
+ var glyph = glyphs[i];
+ var glyphWidth = null;
+
+ if (font.vertical && glyph.vmetric) {
+ glyphWidth = glyph.vmetric[0];
+ } else {
+ glyphWidth = glyph.width;
+ }
+
+ var glyphUnicode = glyph.unicode;
+ var NormalizedUnicodes = (0, _unicode.getNormalizedUnicodes)();
+
+ if (NormalizedUnicodes[glyphUnicode] !== undefined) {
+ glyphUnicode = NormalizedUnicodes[glyphUnicode];
+ }
+
+ glyphUnicode = (0, _unicode.reverseIfRtl)(glyphUnicode);
+ var charSpacing = textState.charSpacing;
+
+ if (glyph.isSpace) {
+ var wordSpacing = textState.wordSpacing;
+ charSpacing += wordSpacing;
+
+ if (wordSpacing > 0) {
+ addFakeSpaces(wordSpacing, textChunk.str);
+ }
+ }
+
+ var tx = 0;
+ var ty = 0;
+
+ if (!font.vertical) {
+ var w0 = glyphWidth * textState.fontMatrix[0];
+ tx = (w0 * textState.fontSize + charSpacing) * textState.textHScale;
+ width += tx;
+ } else {
+ var w1 = glyphWidth * textState.fontMatrix[0];
+ ty = w1 * textState.fontSize + charSpacing;
+ height += ty;
+ }
+
+ textState.translateTextMatrix(tx, ty);
+ textChunk.str.push(glyphUnicode);
+ }
+
+ if (!font.vertical) {
+ textChunk.lastAdvanceWidth = width;
+ textChunk.width += width;
+ } else {
+ textChunk.lastAdvanceHeight = height;
+ textChunk.height += Math.abs(height);
+ }
+
+ return textChunk;
+ }
+
+ function addFakeSpaces(width, strBuf) {
+ if (width < textContentItem.fakeSpaceMin) {
+ return;
+ }
+
+ if (width < textContentItem.fakeMultiSpaceMin) {
+ strBuf.push(' ');
+ return;
+ }
+
+ var fakeSpaces = Math.round(width / textContentItem.spaceWidth);
+
+ while (fakeSpaces-- > 0) {
+ strBuf.push(' ');
+ }
+ }
+
+ function flushTextContentItem() {
+ if (!textContentItem.initialized) {
+ return;
+ }
+
+ if (!textContentItem.vertical) {
+ textContentItem.width *= textContentItem.textAdvanceScale;
+ } else {
+ textContentItem.height *= textContentItem.textAdvanceScale;
+ }
+
+ textContent.items.push(runBidiTransform(textContentItem));
+ textContentItem.initialized = false;
+ textContentItem.str.length = 0;
+ }
+
+ function enqueueChunk() {
+ var length = textContent.items.length;
+
+ if (length > 0) {
+ sink.enqueue(textContent, length);
+ textContent.items = [];
+ textContent.styles = Object.create(null);
+ }
+ }
+
+ var timeSlotManager = new TimeSlotManager();
+ return new Promise(function promiseBody(resolve, reject) {
+ var next = function next(promise) {
+ enqueueChunk();
+ Promise.all([promise, sink.ready]).then(function () {
+ try {
+ promiseBody(resolve, reject);
+ } catch (ex) {
+ reject(ex);
+ }
+ }, reject);
+ };
+
+ task.ensureNotTerminated();
+ timeSlotManager.reset();
+ var stop,
+ operation = {},
+ args = [];
+
+ while (!(stop = timeSlotManager.check())) {
+ args.length = 0;
+ operation.args = args;
+
+ if (!preprocessor.read(operation)) {
+ break;
+ }
+
+ textState = stateManager.state;
+ var fn = operation.fn;
+ args = operation.args;
+ var advance, diff;
+
+ switch (fn | 0) {
+ case _util.OPS.setFont:
+ var fontNameArg = args[0].name,
+ fontSizeArg = args[1];
+
+ if (textState.font && fontNameArg === textState.fontName && fontSizeArg === textState.fontSize) {
+ break;
+ }
+
+ flushTextContentItem();
+ textState.fontName = fontNameArg;
+ textState.fontSize = fontSizeArg;
+ next(handleSetFont(fontNameArg, null));
+ return;
+
+ case _util.OPS.setTextRise:
+ flushTextContentItem();
+ textState.textRise = args[0];
+ break;
+
+ case _util.OPS.setHScale:
+ flushTextContentItem();
+ textState.textHScale = args[0] / 100;
+ break;
+
+ case _util.OPS.setLeading:
+ flushTextContentItem();
+ textState.leading = args[0];
+ break;
+
+ case _util.OPS.moveText:
+ var isSameTextLine = !textState.font ? false : (textState.font.vertical ? args[0] : args[1]) === 0;
+ advance = args[0] - args[1];
+
+ if (combineTextItems && isSameTextLine && textContentItem.initialized && advance > 0 && advance <= textContentItem.fakeMultiSpaceMax) {
+ textState.translateTextLineMatrix(args[0], args[1]);
+ textContentItem.width += args[0] - textContentItem.lastAdvanceWidth;
+ textContentItem.height += args[1] - textContentItem.lastAdvanceHeight;
+ diff = args[0] - textContentItem.lastAdvanceWidth - (args[1] - textContentItem.lastAdvanceHeight);
+ addFakeSpaces(diff, textContentItem.str);
+ break;
+ }
+
+ flushTextContentItem();
+ textState.translateTextLineMatrix(args[0], args[1]);
+ textState.textMatrix = textState.textLineMatrix.slice();
+ break;
+
+ case _util.OPS.setLeadingMoveText:
+ flushTextContentItem();
+ textState.leading = -args[1];
+ textState.translateTextLineMatrix(args[0], args[1]);
+ textState.textMatrix = textState.textLineMatrix.slice();
+ break;
+
+ case _util.OPS.nextLine:
+ flushTextContentItem();
+ textState.carriageReturn();
+ break;
+
+ case _util.OPS.setTextMatrix:
+ advance = textState.calcTextLineMatrixAdvance(args[0], args[1], args[2], args[3], args[4], args[5]);
+
+ if (combineTextItems && advance !== null && textContentItem.initialized && advance.value > 0 && advance.value <= textContentItem.fakeMultiSpaceMax) {
+ textState.translateTextLineMatrix(advance.width, advance.height);
+ textContentItem.width += advance.width - textContentItem.lastAdvanceWidth;
+ textContentItem.height += advance.height - textContentItem.lastAdvanceHeight;
+ diff = advance.width - textContentItem.lastAdvanceWidth - (advance.height - textContentItem.lastAdvanceHeight);
+ addFakeSpaces(diff, textContentItem.str);
+ break;
+ }
+
+ flushTextContentItem();
+ textState.setTextMatrix(args[0], args[1], args[2], args[3], args[4], args[5]);
+ textState.setTextLineMatrix(args[0], args[1], args[2], args[3], args[4], args[5]);
+ break;
+
+ case _util.OPS.setCharSpacing:
+ textState.charSpacing = args[0];
+ break;
+
+ case _util.OPS.setWordSpacing:
+ textState.wordSpacing = args[0];
+ break;
+
+ case _util.OPS.beginText:
+ flushTextContentItem();
+ textState.textMatrix = _util.IDENTITY_MATRIX.slice();
+ textState.textLineMatrix = _util.IDENTITY_MATRIX.slice();
+ break;
+
+ case _util.OPS.showSpacedText:
+ var items = args[0];
+ var offset;
+
+ for (var j = 0, jj = items.length; j < jj; j++) {
+ if (typeof items[j] === 'string') {
+ buildTextContentItem(items[j]);
+ } else if ((0, _util.isNum)(items[j])) {
+ ensureTextContentItem();
+ advance = items[j] * textState.fontSize / 1000;
+ var breakTextRun = false;
+
+ if (textState.font.vertical) {
+ offset = advance;
+ textState.translateTextMatrix(0, offset);
+ breakTextRun = textContentItem.textRunBreakAllowed && advance > textContentItem.fakeMultiSpaceMax;
+
+ if (!breakTextRun) {
+ textContentItem.height += offset;
+ }
+ } else {
+ advance = -advance;
+ offset = advance * textState.textHScale;
+ textState.translateTextMatrix(offset, 0);
+ breakTextRun = textContentItem.textRunBreakAllowed && advance > textContentItem.fakeMultiSpaceMax;
+
+ if (!breakTextRun) {
+ textContentItem.width += offset;
+ }
+ }
+
+ if (breakTextRun) {
+ flushTextContentItem();
+ } else if (advance > 0) {
+ addFakeSpaces(advance, textContentItem.str);
+ }
+ }
+ }
+
+ break;
+
+ case _util.OPS.showText:
+ buildTextContentItem(args[0]);
+ break;
+
+ case _util.OPS.nextLineShowText:
+ flushTextContentItem();
+ textState.carriageReturn();
+ buildTextContentItem(args[0]);
+ break;
+
+ case _util.OPS.nextLineSetSpacingShowText:
+ flushTextContentItem();
+ textState.wordSpacing = args[0];
+ textState.charSpacing = args[1];
+ textState.carriageReturn();
+ buildTextContentItem(args[2]);
+ break;
+
+ case _util.OPS.paintXObject:
+ flushTextContentItem();
+
+ if (!xobjs) {
+ xobjs = resources.get('XObject') || _primitives.Dict.empty;
+ }
+
+ var name = args[0].name;
+
+ if (name && skipEmptyXObjs[name] !== undefined) {
+ break;
+ }
+
+ next(new Promise(function (resolveXObject, rejectXObject) {
+ if (!name) {
+ throw new _util.FormatError('XObject must be referred to by name.');
+ }
+
+ var xobj = xobjs.get(name);
+
+ if (!xobj) {
+ resolveXObject();
+ return;
+ }
+
+ if (!(0, _primitives.isStream)(xobj)) {
+ throw new _util.FormatError('XObject should be a stream');
+ }
+
+ var type = xobj.dict.get('Subtype');
+
+ if (!(0, _primitives.isName)(type)) {
+ throw new _util.FormatError('XObject should have a Name subtype');
+ }
+
+ if (type.name !== 'Form') {
+ skipEmptyXObjs[name] = true;
+ resolveXObject();
+ return;
+ }
+
+ var currentState = stateManager.state.clone();
+ var xObjStateManager = new StateManager(currentState);
+ var matrix = xobj.dict.getArray('Matrix');
+
+ if (Array.isArray(matrix) && matrix.length === 6) {
+ xObjStateManager.transform(matrix);
+ }
+
+ enqueueChunk();
+ var sinkWrapper = {
+ enqueueInvoked: false,
+ enqueue: function enqueue(chunk, size) {
+ this.enqueueInvoked = true;
+ sink.enqueue(chunk, size);
+ },
+
+ get desiredSize() {
+ return sink.desiredSize;
+ },
+
+ get ready() {
+ return sink.ready;
+ }
+
+ };
+ self.getTextContent({
+ stream: xobj,
+ task: task,
+ resources: xobj.dict.get('Resources') || resources,
+ stateManager: xObjStateManager,
+ normalizeWhitespace: normalizeWhitespace,
+ combineTextItems: combineTextItems,
+ sink: sinkWrapper,
+ seenStyles: seenStyles
+ }).then(function () {
+ if (!sinkWrapper.enqueueInvoked) {
+ skipEmptyXObjs[name] = true;
+ }
+
+ resolveXObject();
+ }, rejectXObject);
+ })["catch"](function (reason) {
+ if (reason instanceof _util.AbortException) {
+ return;
+ }
+
+ if (self.options.ignoreErrors) {
+ (0, _util.warn)("getTextContent - ignoring XObject: \"".concat(reason, "\"."));
+ return;
+ }
+
+ throw reason;
+ }));
+ return;
+
+ case _util.OPS.setGState:
+ flushTextContentItem();
+ var dictName = args[0];
+ var extGState = resources.get('ExtGState');
+
+ if (!(0, _primitives.isDict)(extGState) || !(0, _primitives.isName)(dictName)) {
+ break;
+ }
+
+ var gState = extGState.get(dictName.name);
+
+ if (!(0, _primitives.isDict)(gState)) {
+ break;
+ }
+
+ var gStateFont = gState.get('Font');
+
+ if (gStateFont) {
+ textState.fontName = null;
+ textState.fontSize = gStateFont[1];
+ next(handleSetFont(null, gStateFont[0]));
+ return;
+ }
+
+ break;
+ }
+
+ if (textContent.items.length >= sink.desiredSize) {
+ stop = true;
+ break;
+ }
+ }
+
+ if (stop) {
+ next(deferred);
+ return;
+ }
+
+ flushTextContentItem();
+ enqueueChunk();
+ resolve();
+ })["catch"](function (reason) {
+ if (reason instanceof _util.AbortException) {
+ return;
+ }
+
+ if (_this8.options.ignoreErrors) {
+ (0, _util.warn)("getTextContent - ignoring errors during \"".concat(task.name, "\" ") + "task: \"".concat(reason, "\"."));
+ flushTextContentItem();
+ enqueueChunk();
+ return;
+ }
+
+ throw reason;
+ });
+ },
+ extractDataStructures: function PartialEvaluator_extractDataStructures(dict, baseDict, properties) {
+ var _this9 = this;
+
+ var xref = this.xref,
+ cidToGidBytes;
+ var toUnicode = dict.get('ToUnicode') || baseDict.get('ToUnicode');
+ var toUnicodePromise = toUnicode ? this.readToUnicode(toUnicode) : Promise.resolve(undefined);
+
+ if (properties.composite) {
+ var cidSystemInfo = dict.get('CIDSystemInfo');
+
+ if ((0, _primitives.isDict)(cidSystemInfo)) {
+ properties.cidSystemInfo = {
+ registry: (0, _util.stringToPDFString)(cidSystemInfo.get('Registry')),
+ ordering: (0, _util.stringToPDFString)(cidSystemInfo.get('Ordering')),
+ supplement: cidSystemInfo.get('Supplement')
+ };
+ }
+
+ var cidToGidMap = dict.get('CIDToGIDMap');
+
+ if ((0, _primitives.isStream)(cidToGidMap)) {
+ cidToGidBytes = cidToGidMap.getBytes();
+ }
+ }
+
+ var differences = [];
+ var baseEncodingName = null;
+ var encoding;
+
+ if (dict.has('Encoding')) {
+ encoding = dict.get('Encoding');
+
+ if ((0, _primitives.isDict)(encoding)) {
+ baseEncodingName = encoding.get('BaseEncoding');
+ baseEncodingName = (0, _primitives.isName)(baseEncodingName) ? baseEncodingName.name : null;
+
+ if (encoding.has('Differences')) {
+ var diffEncoding = encoding.get('Differences');
+ var index = 0;
+
+ for (var j = 0, jj = diffEncoding.length; j < jj; j++) {
+ var data = xref.fetchIfRef(diffEncoding[j]);
+
+ if ((0, _util.isNum)(data)) {
+ index = data;
+ } else if ((0, _primitives.isName)(data)) {
+ differences[index++] = data.name;
+ } else {
+ throw new _util.FormatError("Invalid entry in 'Differences' array: ".concat(data));
+ }
+ }
+ }
+ } else if ((0, _primitives.isName)(encoding)) {
+ baseEncodingName = encoding.name;
+ } else {
+ throw new _util.FormatError('Encoding is not a Name nor a Dict');
+ }
+
+ if (baseEncodingName !== 'MacRomanEncoding' && baseEncodingName !== 'MacExpertEncoding' && baseEncodingName !== 'WinAnsiEncoding') {
+ baseEncodingName = null;
+ }
+ }
+
+ if (baseEncodingName) {
+ properties.defaultEncoding = (0, _encodings.getEncoding)(baseEncodingName).slice();
+ } else {
+ var isSymbolicFont = !!(properties.flags & _fonts.FontFlags.Symbolic);
+ var isNonsymbolicFont = !!(properties.flags & _fonts.FontFlags.Nonsymbolic);
+ encoding = _encodings.StandardEncoding;
+
+ if (properties.type === 'TrueType' && !isNonsymbolicFont) {
+ encoding = _encodings.WinAnsiEncoding;
+ }
+
+ if (isSymbolicFont) {
+ encoding = _encodings.MacRomanEncoding;
+
+ if (!properties.file) {
+ if (/Symbol/i.test(properties.name)) {
+ encoding = _encodings.SymbolSetEncoding;
+ } else if (/Dingbats/i.test(properties.name)) {
+ encoding = _encodings.ZapfDingbatsEncoding;
+ }
+ }
+ }
+
+ properties.defaultEncoding = encoding;
+ }
+
+ properties.differences = differences;
+ properties.baseEncodingName = baseEncodingName;
+ properties.hasEncoding = !!baseEncodingName || differences.length > 0;
+ properties.dict = dict;
+ return toUnicodePromise.then(function (toUnicode) {
+ properties.toUnicode = toUnicode;
+ return _this9.buildToUnicode(properties);
+ }).then(function (toUnicode) {
+ properties.toUnicode = toUnicode;
+
+ if (cidToGidBytes) {
+ properties.cidToGidMap = _this9.readCidToGidMap(cidToGidBytes, toUnicode);
+ }
+
+ return properties;
+ });
+ },
+ _buildSimpleFontToUnicode: function _buildSimpleFontToUnicode(properties) {
+ (0, _util.assert)(!properties.composite, 'Must be a simple font.');
+ var toUnicode = [],
+ charcode,
+ glyphName;
+ var encoding = properties.defaultEncoding.slice();
+ var baseEncodingName = properties.baseEncodingName;
+ var differences = properties.differences;
+
+ for (charcode in differences) {
+ glyphName = differences[charcode];
+
+ if (glyphName === '.notdef') {
+ continue;
+ }
+
+ encoding[charcode] = glyphName;
+ }
+
+ var glyphsUnicodeMap = (0, _glyphlist.getGlyphsUnicode)();
+
+ for (charcode in encoding) {
+ glyphName = encoding[charcode];
+
+ if (glyphName === '') {
+ continue;
+ } else if (glyphsUnicodeMap[glyphName] === undefined) {
+ var code = 0;
+
+ switch (glyphName[0]) {
+ case 'G':
+ if (glyphName.length === 3) {
+ code = parseInt(glyphName.substring(1), 16);
+ }
+
+ break;
+
+ case 'g':
+ if (glyphName.length === 5) {
+ code = parseInt(glyphName.substring(1), 16);
+ }
+
+ break;
+
+ case 'C':
+ case 'c':
+ if (glyphName.length >= 3) {
+ code = +glyphName.substring(1);
+ }
+
+ break;
+
+ default:
+ var unicode = (0, _unicode.getUnicodeForGlyph)(glyphName, glyphsUnicodeMap);
+
+ if (unicode !== -1) {
+ code = unicode;
+ }
+
+ }
+
+ if (code) {
+ if (baseEncodingName && code === +charcode) {
+ var baseEncoding = (0, _encodings.getEncoding)(baseEncodingName);
+
+ if (baseEncoding && (glyphName = baseEncoding[charcode])) {
+ toUnicode[charcode] = String.fromCharCode(glyphsUnicodeMap[glyphName]);
+ continue;
+ }
+ }
+
+ toUnicode[charcode] = String.fromCodePoint(code);
+ }
+
+ continue;
+ }
+
+ toUnicode[charcode] = String.fromCharCode(glyphsUnicodeMap[glyphName]);
+ }
+
+ return new _fonts.ToUnicodeMap(toUnicode);
+ },
+ buildToUnicode: function buildToUnicode(properties) {
+ properties.hasIncludedToUnicodeMap = !!properties.toUnicode && properties.toUnicode.length > 0;
+
+ if (properties.hasIncludedToUnicodeMap) {
+ if (!properties.composite && properties.hasEncoding) {
+ properties.fallbackToUnicode = this._buildSimpleFontToUnicode(properties);
+ }
+
+ return Promise.resolve(properties.toUnicode);
+ }
+
+ if (!properties.composite) {
+ return Promise.resolve(this._buildSimpleFontToUnicode(properties));
+ }
+
+ if (properties.composite && (properties.cMap.builtInCMap && !(properties.cMap instanceof _cmap.IdentityCMap) || properties.cidSystemInfo.registry === 'Adobe' && (properties.cidSystemInfo.ordering === 'GB1' || properties.cidSystemInfo.ordering === 'CNS1' || properties.cidSystemInfo.ordering === 'Japan1' || properties.cidSystemInfo.ordering === 'Korea1'))) {
+ var registry = properties.cidSystemInfo.registry;
+ var ordering = properties.cidSystemInfo.ordering;
+
+ var ucs2CMapName = _primitives.Name.get(registry + '-' + ordering + '-UCS2');
+
+ return _cmap.CMapFactory.create({
+ encoding: ucs2CMapName,
+ fetchBuiltInCMap: this.fetchBuiltInCMap,
+ useCMap: null
+ }).then(function (ucs2CMap) {
+ var cMap = properties.cMap;
+ var toUnicode = [];
+ cMap.forEach(function (charcode, cid) {
+ if (cid > 0xffff) {
+ throw new _util.FormatError('Max size of CID is 65,535');
+ }
+
+ var ucs2 = ucs2CMap.lookup(cid);
+
+ if (ucs2) {
+ toUnicode[charcode] = String.fromCharCode((ucs2.charCodeAt(0) << 8) + ucs2.charCodeAt(1));
+ }
+ });
+ return new _fonts.ToUnicodeMap(toUnicode);
+ });
+ }
+
+ return Promise.resolve(new _fonts.IdentityToUnicodeMap(properties.firstChar, properties.lastChar));
+ },
+ readToUnicode: function PartialEvaluator_readToUnicode(toUnicode) {
+ var cmapObj = toUnicode;
+
+ if ((0, _primitives.isName)(cmapObj)) {
+ return _cmap.CMapFactory.create({
+ encoding: cmapObj,
+ fetchBuiltInCMap: this.fetchBuiltInCMap,
+ useCMap: null
+ }).then(function (cmap) {
+ if (cmap instanceof _cmap.IdentityCMap) {
+ return new _fonts.IdentityToUnicodeMap(0, 0xFFFF);
+ }
+
+ return new _fonts.ToUnicodeMap(cmap.getMap());
+ });
+ } else if ((0, _primitives.isStream)(cmapObj)) {
+ return _cmap.CMapFactory.create({
+ encoding: cmapObj,
+ fetchBuiltInCMap: this.fetchBuiltInCMap,
+ useCMap: null
+ }).then(function (cmap) {
+ if (cmap instanceof _cmap.IdentityCMap) {
+ return new _fonts.IdentityToUnicodeMap(0, 0xFFFF);
+ }
+
+ var map = new Array(cmap.length);
+ cmap.forEach(function (charCode, token) {
+ var str = [];
+
+ for (var k = 0; k < token.length; k += 2) {
+ var w1 = token.charCodeAt(k) << 8 | token.charCodeAt(k + 1);
+
+ if ((w1 & 0xF800) !== 0xD800) {
+ str.push(w1);
+ continue;
+ }
+
+ k += 2;
+ var w2 = token.charCodeAt(k) << 8 | token.charCodeAt(k + 1);
+ str.push(((w1 & 0x3ff) << 10) + (w2 & 0x3ff) + 0x10000);
+ }
+
+ map[charCode] = String.fromCodePoint.apply(String, str);
+ });
+ return new _fonts.ToUnicodeMap(map);
+ });
+ }
+
+ return Promise.resolve(null);
+ },
+ readCidToGidMap: function readCidToGidMap(glyphsData, toUnicode) {
+ var result = [];
+
+ for (var j = 0, jj = glyphsData.length; j < jj; j++) {
+ var glyphID = glyphsData[j++] << 8 | glyphsData[j];
+ var code = j >> 1;
+
+ if (glyphID === 0 && !toUnicode.has(code)) {
+ continue;
+ }
+
+ result[code] = glyphID;
+ }
+
+ return result;
+ },
+ extractWidths: function PartialEvaluator_extractWidths(dict, descriptor, properties) {
+ var xref = this.xref;
+ var glyphsWidths = [];
+ var defaultWidth = 0;
+ var glyphsVMetrics = [];
+ var defaultVMetrics;
+ var i, ii, j, jj, start, code, widths;
+
+ if (properties.composite) {
+ defaultWidth = dict.has('DW') ? dict.get('DW') : 1000;
+ widths = dict.get('W');
+
+ if (widths) {
+ for (i = 0, ii = widths.length; i < ii; i++) {
+ start = xref.fetchIfRef(widths[i++]);
+ code = xref.fetchIfRef(widths[i]);
+
+ if (Array.isArray(code)) {
+ for (j = 0, jj = code.length; j < jj; j++) {
+ glyphsWidths[start++] = xref.fetchIfRef(code[j]);
+ }
+ } else {
+ var width = xref.fetchIfRef(widths[++i]);
+
+ for (j = start; j <= code; j++) {
+ glyphsWidths[j] = width;
+ }
+ }
+ }
+ }
+
+ if (properties.vertical) {
+ var vmetrics = dict.getArray('DW2') || [880, -1000];
+ defaultVMetrics = [vmetrics[1], defaultWidth * 0.5, vmetrics[0]];
+ vmetrics = dict.get('W2');
+
+ if (vmetrics) {
+ for (i = 0, ii = vmetrics.length; i < ii; i++) {
+ start = xref.fetchIfRef(vmetrics[i++]);
+ code = xref.fetchIfRef(vmetrics[i]);
+
+ if (Array.isArray(code)) {
+ for (j = 0, jj = code.length; j < jj; j++) {
+ glyphsVMetrics[start++] = [xref.fetchIfRef(code[j++]), xref.fetchIfRef(code[j++]), xref.fetchIfRef(code[j])];
+ }
+ } else {
+ var vmetric = [xref.fetchIfRef(vmetrics[++i]), xref.fetchIfRef(vmetrics[++i]), xref.fetchIfRef(vmetrics[++i])];
+
+ for (j = start; j <= code; j++) {
+ glyphsVMetrics[j] = vmetric;
+ }
+ }
+ }
+ }
+ }
+ } else {
+ var firstChar = properties.firstChar;
+ widths = dict.get('Widths');
+
+ if (widths) {
+ j = firstChar;
+
+ for (i = 0, ii = widths.length; i < ii; i++) {
+ glyphsWidths[j++] = xref.fetchIfRef(widths[i]);
+ }
+
+ defaultWidth = parseFloat(descriptor.get('MissingWidth')) || 0;
+ } else {
+ var baseFontName = dict.get('BaseFont');
+
+ if ((0, _primitives.isName)(baseFontName)) {
+ var metrics = this.getBaseFontMetrics(baseFontName.name);
+ glyphsWidths = this.buildCharCodeToWidth(metrics.widths, properties);
+ defaultWidth = metrics.defaultWidth;
+ }
+ }
+ }
+
+ var isMonospace = true;
+ var firstWidth = defaultWidth;
+
+ for (var glyph in glyphsWidths) {
+ var glyphWidth = glyphsWidths[glyph];
+
+ if (!glyphWidth) {
+ continue;
+ }
+
+ if (!firstWidth) {
+ firstWidth = glyphWidth;
+ continue;
+ }
+
+ if (firstWidth !== glyphWidth) {
+ isMonospace = false;
+ break;
+ }
+ }
+
+ if (isMonospace) {
+ properties.flags |= _fonts.FontFlags.FixedPitch;
+ }
+
+ properties.defaultWidth = defaultWidth;
+ properties.widths = glyphsWidths;
+ properties.defaultVMetrics = defaultVMetrics;
+ properties.vmetrics = glyphsVMetrics;
+ },
+ isSerifFont: function PartialEvaluator_isSerifFont(baseFontName) {
+ var fontNameWoStyle = baseFontName.split('-')[0];
+ return fontNameWoStyle in (0, _standard_fonts.getSerifFonts)() || fontNameWoStyle.search(/serif/gi) !== -1;
+ },
+ getBaseFontMetrics: function PartialEvaluator_getBaseFontMetrics(name) {
+ var defaultWidth = 0;
+ var widths = [];
+ var monospace = false;
+ var stdFontMap = (0, _standard_fonts.getStdFontMap)();
+ var lookupName = stdFontMap[name] || name;
+ var Metrics = (0, _metrics.getMetrics)();
+
+ if (!(lookupName in Metrics)) {
+ if (this.isSerifFont(name)) {
+ lookupName = 'Times-Roman';
+ } else {
+ lookupName = 'Helvetica';
+ }
+ }
+
+ var glyphWidths = Metrics[lookupName];
+
+ if ((0, _util.isNum)(glyphWidths)) {
+ defaultWidth = glyphWidths;
+ monospace = true;
+ } else {
+ widths = glyphWidths();
+ }
+
+ return {
+ defaultWidth: defaultWidth,
+ monospace: monospace,
+ widths: widths
+ };
+ },
+ buildCharCodeToWidth: function PartialEvaluator_bulildCharCodeToWidth(widthsByGlyphName, properties) {
+ var widths = Object.create(null);
+ var differences = properties.differences;
+ var encoding = properties.defaultEncoding;
+
+ for (var charCode = 0; charCode < 256; charCode++) {
+ if (charCode in differences && widthsByGlyphName[differences[charCode]]) {
+ widths[charCode] = widthsByGlyphName[differences[charCode]];
+ continue;
+ }
+
+ if (charCode in encoding && widthsByGlyphName[encoding[charCode]]) {
+ widths[charCode] = widthsByGlyphName[encoding[charCode]];
+ continue;
+ }
+ }
+
+ return widths;
+ },
+ preEvaluateFont: function PartialEvaluator_preEvaluateFont(dict) {
+ var baseDict = dict;
+ var type = dict.get('Subtype');
+
+ if (!(0, _primitives.isName)(type)) {
+ throw new _util.FormatError('invalid font Subtype');
+ }
+
+ var composite = false;
+ var uint8array;
+
+ if (type.name === 'Type0') {
+ var df = dict.get('DescendantFonts');
+
+ if (!df) {
+ throw new _util.FormatError('Descendant fonts are not specified');
+ }
+
+ dict = Array.isArray(df) ? this.xref.fetchIfRef(df[0]) : df;
+ type = dict.get('Subtype');
+
+ if (!(0, _primitives.isName)(type)) {
+ throw new _util.FormatError('invalid font Subtype');
+ }
+
+ composite = true;
+ }
+
+ var descriptor = dict.get('FontDescriptor');
+
+ if (descriptor) {
+ var hash = new _murmurhash.MurmurHash3_64();
+ var encoding = baseDict.getRaw('Encoding');
+
+ if ((0, _primitives.isName)(encoding)) {
+ hash.update(encoding.name);
+ } else if ((0, _primitives.isRef)(encoding)) {
+ hash.update(encoding.toString());
+ } else if ((0, _primitives.isDict)(encoding)) {
+ var keys = encoding.getKeys();
+
+ for (var i = 0, ii = keys.length; i < ii; i++) {
+ var entry = encoding.getRaw(keys[i]);
+
+ if ((0, _primitives.isName)(entry)) {
+ hash.update(entry.name);
+ } else if ((0, _primitives.isRef)(entry)) {
+ hash.update(entry.toString());
+ } else if (Array.isArray(entry)) {
+ var diffLength = entry.length,
+ diffBuf = new Array(diffLength);
+
+ for (var j = 0; j < diffLength; j++) {
+ var diffEntry = entry[j];
+
+ if ((0, _primitives.isName)(diffEntry)) {
+ diffBuf[j] = diffEntry.name;
+ } else if ((0, _util.isNum)(diffEntry) || (0, _primitives.isRef)(diffEntry)) {
+ diffBuf[j] = diffEntry.toString();
+ }
+ }
+
+ hash.update(diffBuf.join());
+ }
+ }
+ }
+
+ var firstChar = dict.get('FirstChar') || 0;
+ var lastChar = dict.get('LastChar') || (composite ? 0xFFFF : 0xFF);
+ hash.update("".concat(firstChar, "-").concat(lastChar));
+ var toUnicode = dict.get('ToUnicode') || baseDict.get('ToUnicode');
+
+ if ((0, _primitives.isStream)(toUnicode)) {
+ var stream = toUnicode.str || toUnicode;
+ uint8array = stream.buffer ? new Uint8Array(stream.buffer.buffer, 0, stream.bufferLength) : new Uint8Array(stream.bytes.buffer, stream.start, stream.end - stream.start);
+ hash.update(uint8array);
+ } else if ((0, _primitives.isName)(toUnicode)) {
+ hash.update(toUnicode.name);
+ }
+
+ var widths = dict.get('Widths') || baseDict.get('Widths');
+
+ if (widths) {
+ uint8array = new Uint8Array(new Uint32Array(widths).buffer);
+ hash.update(uint8array);
+ }
+ }
+
+ return {
+ descriptor: descriptor,
+ dict: dict,
+ baseDict: baseDict,
+ composite: composite,
+ type: type.name,
+ hash: hash ? hash.hexdigest() : ''
+ };
+ },
+ translateFont: function PartialEvaluator_translateFont(preEvaluatedFont) {
+ var _this10 = this;
+
+ var baseDict = preEvaluatedFont.baseDict;
+ var dict = preEvaluatedFont.dict;
+ var composite = preEvaluatedFont.composite;
+ var descriptor = preEvaluatedFont.descriptor;
+ var type = preEvaluatedFont.type;
+ var maxCharIndex = composite ? 0xFFFF : 0xFF;
+ var properties;
+
+ if (!descriptor) {
+ if (type === 'Type3') {
+ descriptor = new _primitives.Dict(null);
+ descriptor.set('FontName', _primitives.Name.get(type));
+ descriptor.set('FontBBox', dict.getArray('FontBBox'));
+ } else {
+ var baseFontName = dict.get('BaseFont');
+
+ if (!(0, _primitives.isName)(baseFontName)) {
+ throw new _util.FormatError('Base font is not specified');
+ }
+
+ baseFontName = baseFontName.name.replace(/[,_]/g, '-');
+ var metrics = this.getBaseFontMetrics(baseFontName);
+ var fontNameWoStyle = baseFontName.split('-')[0];
+ var flags = (this.isSerifFont(fontNameWoStyle) ? _fonts.FontFlags.Serif : 0) | (metrics.monospace ? _fonts.FontFlags.FixedPitch : 0) | ((0, _standard_fonts.getSymbolsFonts)()[fontNameWoStyle] ? _fonts.FontFlags.Symbolic : _fonts.FontFlags.Nonsymbolic);
+ properties = {
+ type: type,
+ name: baseFontName,
+ widths: metrics.widths,
+ defaultWidth: metrics.defaultWidth,
+ flags: flags,
+ firstChar: 0,
+ lastChar: maxCharIndex
+ };
+ return this.extractDataStructures(dict, dict, properties).then(function (properties) {
+ properties.widths = _this10.buildCharCodeToWidth(metrics.widths, properties);
+ return new _fonts.Font(baseFontName, null, properties);
+ });
+ }
+ }
+
+ var firstChar = dict.get('FirstChar') || 0;
+ var lastChar = dict.get('LastChar') || maxCharIndex;
+ var fontName = descriptor.get('FontName');
+ var baseFont = dict.get('BaseFont');
+
+ if ((0, _util.isString)(fontName)) {
+ fontName = _primitives.Name.get(fontName);
+ }
+
+ if ((0, _util.isString)(baseFont)) {
+ baseFont = _primitives.Name.get(baseFont);
+ }
+
+ if (type !== 'Type3') {
+ var fontNameStr = fontName && fontName.name;
+ var baseFontStr = baseFont && baseFont.name;
+
+ if (fontNameStr !== baseFontStr) {
+ (0, _util.info)("The FontDescriptor's FontName is \"".concat(fontNameStr, "\" but ") + "should be the same as the Font's BaseFont \"".concat(baseFontStr, "\"."));
+
+ if (fontNameStr && baseFontStr && baseFontStr.startsWith(fontNameStr)) {
+ fontName = baseFont;
+ }
+ }
+ }
+
+ fontName = fontName || baseFont;
+
+ if (!(0, _primitives.isName)(fontName)) {
+ throw new _util.FormatError('invalid font name');
+ }
+
+ var fontFile = descriptor.get('FontFile', 'FontFile2', 'FontFile3');
+
+ if (fontFile) {
+ if (fontFile.dict) {
+ var subtype = fontFile.dict.get('Subtype');
+
+ if (subtype) {
+ subtype = subtype.name;
+ }
+
+ var length1 = fontFile.dict.get('Length1');
+ var length2 = fontFile.dict.get('Length2');
+ var length3 = fontFile.dict.get('Length3');
+ }
+ }
+
+ properties = {
+ type: type,
+ name: fontName.name,
+ subtype: subtype,
+ file: fontFile,
+ length1: length1,
+ length2: length2,
+ length3: length3,
+ loadedName: baseDict.loadedName,
+ composite: composite,
+ wideChars: composite,
+ fixedPitch: false,
+ fontMatrix: dict.getArray('FontMatrix') || _util.FONT_IDENTITY_MATRIX,
+ firstChar: firstChar || 0,
+ lastChar: lastChar || maxCharIndex,
+ bbox: descriptor.getArray('FontBBox'),
+ ascent: descriptor.get('Ascent'),
+ descent: descriptor.get('Descent'),
+ xHeight: descriptor.get('XHeight'),
+ capHeight: descriptor.get('CapHeight'),
+ flags: descriptor.get('Flags'),
+ italicAngle: descriptor.get('ItalicAngle'),
+ isType3Font: false
+ };
+ var cMapPromise;
+
+ if (composite) {
+ var cidEncoding = baseDict.get('Encoding');
+
+ if ((0, _primitives.isName)(cidEncoding)) {
+ properties.cidEncoding = cidEncoding.name;
+ }
+
+ cMapPromise = _cmap.CMapFactory.create({
+ encoding: cidEncoding,
+ fetchBuiltInCMap: this.fetchBuiltInCMap,
+ useCMap: null
+ }).then(function (cMap) {
+ properties.cMap = cMap;
+ properties.vertical = properties.cMap.vertical;
+ });
+ } else {
+ cMapPromise = Promise.resolve(undefined);
+ }
+
+ return cMapPromise.then(function () {
+ return _this10.extractDataStructures(dict, baseDict, properties);
+ }).then(function (properties) {
+ _this10.extractWidths(dict, descriptor, properties);
+
+ if (type === 'Type3') {
+ properties.isType3Font = true;
+ }
+
+ return new _fonts.Font(fontName.name, fontFile, properties);
+ });
+ }
+ };
+
+ PartialEvaluator.buildFontPaths = function (font, glyphs, handler) {
+ function buildPath(fontChar) {
+ if (font.renderer.hasBuiltPath(fontChar)) {
+ return;
+ }
+
+ handler.send('commonobj', ["".concat(font.loadedName, "_path_").concat(fontChar), 'FontPath', font.renderer.getPathJs(fontChar)]);
+ }
+
+ var _iteratorNormalCompletion = true;
+ var _didIteratorError = false;
+ var _iteratorError = undefined;
+
+ try {
+ for (var _iterator = glyphs[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
+ var glyph = _step.value;
+ buildPath(glyph.fontChar);
+ var accent = glyph.accent;
+
+ if (accent && accent.fontChar) {
+ buildPath(accent.fontChar);
+ }
+ }
+ } catch (err) {
+ _didIteratorError = true;
+ _iteratorError = err;
+ } finally {
+ try {
+ if (!_iteratorNormalCompletion && _iterator["return"] != null) {
+ _iterator["return"]();
+ }
+ } finally {
+ if (_didIteratorError) {
+ throw _iteratorError;
+ }
+ }
+ }
+ };
+
+ return PartialEvaluator;
+}();
+
+exports.PartialEvaluator = PartialEvaluator;
+
+var TranslatedFont = function TranslatedFontClosure() {
+ function TranslatedFont(loadedName, font, dict) {
+ this.loadedName = loadedName;
+ this.font = font;
+ this.dict = dict;
+ this.type3Loaded = null;
+ this.sent = false;
+ }
+
+ TranslatedFont.prototype = {
+ send: function send(handler) {
+ if (this.sent) {
+ return;
+ }
+
+ this.sent = true;
+ handler.send('commonobj', [this.loadedName, 'Font', this.font.exportData()]);
+ },
+ fallback: function fallback(handler) {
+ if (!this.font.data) {
+ return;
+ }
+
+ this.font.disableFontFace = true;
+ var glyphs = this.font.glyphCacheValues;
+ PartialEvaluator.buildFontPaths(this.font, glyphs, handler);
+ },
+ loadType3Data: function loadType3Data(evaluator, resources, parentOperatorList, task) {
+ if (!this.font.isType3Font) {
+ throw new Error('Must be a Type3 font.');
+ }
+
+ if (this.type3Loaded) {
+ return this.type3Loaded;
+ }
+
+ var type3Options = Object.create(evaluator.options);
+ type3Options.ignoreErrors = false;
+ type3Options.nativeImageDecoderSupport = _util.NativeImageDecoding.NONE;
+ var type3Evaluator = evaluator.clone(type3Options);
+ type3Evaluator.parsingType3Font = true;
+ var translatedFont = this.font;
+ var loadCharProcsPromise = Promise.resolve();
+ var charProcs = this.dict.get('CharProcs');
+ var fontResources = this.dict.get('Resources') || resources;
+ var charProcKeys = charProcs.getKeys();
+ var charProcOperatorList = Object.create(null);
+
+ var _loop2 = function _loop2() {
+ var key = charProcKeys[i];
+ loadCharProcsPromise = loadCharProcsPromise.then(function () {
+ var glyphStream = charProcs.get(key);
+ var operatorList = new _operator_list.OperatorList();
+ return type3Evaluator.getOperatorList({
+ stream: glyphStream,
+ task: task,
+ resources: fontResources,
+ operatorList: operatorList
+ }).then(function () {
+ charProcOperatorList[key] = operatorList.getIR();
+ parentOperatorList.addDependencies(operatorList.dependencies);
+ })["catch"](function (reason) {
+ (0, _util.warn)("Type3 font resource \"".concat(key, "\" is not available."));
+ var operatorList = new _operator_list.OperatorList();
+ charProcOperatorList[key] = operatorList.getIR();
+ });
+ });
+ };
+
+ for (var i = 0, n = charProcKeys.length; i < n; ++i) {
+ _loop2();
+ }
+
+ this.type3Loaded = loadCharProcsPromise.then(function () {
+ translatedFont.charProcOperatorList = charProcOperatorList;
+ });
+ return this.type3Loaded;
+ }
+ };
+ return TranslatedFont;
+}();
+
+var StateManager = function StateManagerClosure() {
+ function StateManager(initialState) {
+ this.state = initialState;
+ this.stateStack = [];
+ }
+
+ StateManager.prototype = {
+ save: function save() {
+ var old = this.state;
+ this.stateStack.push(this.state);
+ this.state = old.clone();
+ },
+ restore: function restore() {
+ var prev = this.stateStack.pop();
+
+ if (prev) {
+ this.state = prev;
+ }
+ },
+ transform: function transform(args) {
+ this.state.ctm = _util.Util.transform(this.state.ctm, args);
+ }
+ };
+ return StateManager;
+}();
+
+var TextState = function TextStateClosure() {
+ function TextState() {
+ this.ctm = new Float32Array(_util.IDENTITY_MATRIX);
+ this.fontName = null;
+ this.fontSize = 0;
+ this.font = null;
+ this.fontMatrix = _util.FONT_IDENTITY_MATRIX;
+ this.textMatrix = _util.IDENTITY_MATRIX.slice();
+ this.textLineMatrix = _util.IDENTITY_MATRIX.slice();
+ this.charSpacing = 0;
+ this.wordSpacing = 0;
+ this.leading = 0;
+ this.textHScale = 1;
+ this.textRise = 0;
+ }
+
+ TextState.prototype = {
+ setTextMatrix: function TextState_setTextMatrix(a, b, c, d, e, f) {
+ var m = this.textMatrix;
+ m[0] = a;
+ m[1] = b;
+ m[2] = c;
+ m[3] = d;
+ m[4] = e;
+ m[5] = f;
+ },
+ setTextLineMatrix: function TextState_setTextMatrix(a, b, c, d, e, f) {
+ var m = this.textLineMatrix;
+ m[0] = a;
+ m[1] = b;
+ m[2] = c;
+ m[3] = d;
+ m[4] = e;
+ m[5] = f;
+ },
+ translateTextMatrix: function TextState_translateTextMatrix(x, y) {
+ var m = this.textMatrix;
+ m[4] = m[0] * x + m[2] * y + m[4];
+ m[5] = m[1] * x + m[3] * y + m[5];
+ },
+ translateTextLineMatrix: function TextState_translateTextMatrix(x, y) {
+ var m = this.textLineMatrix;
+ m[4] = m[0] * x + m[2] * y + m[4];
+ m[5] = m[1] * x + m[3] * y + m[5];
+ },
+ calcTextLineMatrixAdvance: function TextState_calcTextLineMatrixAdvance(a, b, c, d, e, f) {
+ var font = this.font;
+
+ if (!font) {
+ return null;
+ }
+
+ var m = this.textLineMatrix;
+
+ if (!(a === m[0] && b === m[1] && c === m[2] && d === m[3])) {
+ return null;
+ }
+
+ var txDiff = e - m[4],
+ tyDiff = f - m[5];
+
+ if (font.vertical && txDiff !== 0 || !font.vertical && tyDiff !== 0) {
+ return null;
+ }
+
+ var tx,
+ ty,
+ denominator = a * d - b * c;
+
+ if (font.vertical) {
+ tx = -tyDiff * c / denominator;
+ ty = tyDiff * a / denominator;
+ } else {
+ tx = txDiff * d / denominator;
+ ty = -txDiff * b / denominator;
+ }
+
+ return {
+ width: tx,
+ height: ty,
+ value: font.vertical ? ty : tx
+ };
+ },
+ calcRenderMatrix: function TextState_calcRendeMatrix(ctm) {
+ var tsm = [this.fontSize * this.textHScale, 0, 0, this.fontSize, 0, this.textRise];
+ return _util.Util.transform(ctm, _util.Util.transform(this.textMatrix, tsm));
+ },
+ carriageReturn: function TextState_carriageReturn() {
+ this.translateTextLineMatrix(0, -this.leading);
+ this.textMatrix = this.textLineMatrix.slice();
+ },
+ clone: function TextState_clone() {
+ var clone = Object.create(this);
+ clone.textMatrix = this.textMatrix.slice();
+ clone.textLineMatrix = this.textLineMatrix.slice();
+ clone.fontMatrix = this.fontMatrix.slice();
+ return clone;
+ }
+ };
+ return TextState;
+}();
+
+var EvalState = function EvalStateClosure() {
+ function EvalState() {
+ this.ctm = new Float32Array(_util.IDENTITY_MATRIX);
+ this.font = null;
+ this.textRenderingMode = _util.TextRenderingMode.FILL;
+ this.fillColorSpace = _colorspace.ColorSpace.singletons.gray;
+ this.strokeColorSpace = _colorspace.ColorSpace.singletons.gray;
+ }
+
+ EvalState.prototype = {
+ clone: function CanvasExtraState_clone() {
+ return Object.create(this);
+ }
+ };
+ return EvalState;
+}();
+
+var EvaluatorPreprocessor = function EvaluatorPreprocessorClosure() {
+ var getOPMap = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['w'] = {
+ id: _util.OPS.setLineWidth,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['J'] = {
+ id: _util.OPS.setLineCap,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['j'] = {
+ id: _util.OPS.setLineJoin,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['M'] = {
+ id: _util.OPS.setMiterLimit,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['d'] = {
+ id: _util.OPS.setDash,
+ numArgs: 2,
+ variableArgs: false
+ };
+ t['ri'] = {
+ id: _util.OPS.setRenderingIntent,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['i'] = {
+ id: _util.OPS.setFlatness,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['gs'] = {
+ id: _util.OPS.setGState,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['q'] = {
+ id: _util.OPS.save,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['Q'] = {
+ id: _util.OPS.restore,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['cm'] = {
+ id: _util.OPS.transform,
+ numArgs: 6,
+ variableArgs: false
+ };
+ t['m'] = {
+ id: _util.OPS.moveTo,
+ numArgs: 2,
+ variableArgs: false
+ };
+ t['l'] = {
+ id: _util.OPS.lineTo,
+ numArgs: 2,
+ variableArgs: false
+ };
+ t['c'] = {
+ id: _util.OPS.curveTo,
+ numArgs: 6,
+ variableArgs: false
+ };
+ t['v'] = {
+ id: _util.OPS.curveTo2,
+ numArgs: 4,
+ variableArgs: false
+ };
+ t['y'] = {
+ id: _util.OPS.curveTo3,
+ numArgs: 4,
+ variableArgs: false
+ };
+ t['h'] = {
+ id: _util.OPS.closePath,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['re'] = {
+ id: _util.OPS.rectangle,
+ numArgs: 4,
+ variableArgs: false
+ };
+ t['S'] = {
+ id: _util.OPS.stroke,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['s'] = {
+ id: _util.OPS.closeStroke,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['f'] = {
+ id: _util.OPS.fill,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['F'] = {
+ id: _util.OPS.fill,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['f*'] = {
+ id: _util.OPS.eoFill,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['B'] = {
+ id: _util.OPS.fillStroke,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['B*'] = {
+ id: _util.OPS.eoFillStroke,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['b'] = {
+ id: _util.OPS.closeFillStroke,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['b*'] = {
+ id: _util.OPS.closeEOFillStroke,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['n'] = {
+ id: _util.OPS.endPath,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['W'] = {
+ id: _util.OPS.clip,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['W*'] = {
+ id: _util.OPS.eoClip,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['BT'] = {
+ id: _util.OPS.beginText,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['ET'] = {
+ id: _util.OPS.endText,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['Tc'] = {
+ id: _util.OPS.setCharSpacing,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['Tw'] = {
+ id: _util.OPS.setWordSpacing,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['Tz'] = {
+ id: _util.OPS.setHScale,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['TL'] = {
+ id: _util.OPS.setLeading,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['Tf'] = {
+ id: _util.OPS.setFont,
+ numArgs: 2,
+ variableArgs: false
+ };
+ t['Tr'] = {
+ id: _util.OPS.setTextRenderingMode,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['Ts'] = {
+ id: _util.OPS.setTextRise,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['Td'] = {
+ id: _util.OPS.moveText,
+ numArgs: 2,
+ variableArgs: false
+ };
+ t['TD'] = {
+ id: _util.OPS.setLeadingMoveText,
+ numArgs: 2,
+ variableArgs: false
+ };
+ t['Tm'] = {
+ id: _util.OPS.setTextMatrix,
+ numArgs: 6,
+ variableArgs: false
+ };
+ t['T*'] = {
+ id: _util.OPS.nextLine,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['Tj'] = {
+ id: _util.OPS.showText,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['TJ'] = {
+ id: _util.OPS.showSpacedText,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['\''] = {
+ id: _util.OPS.nextLineShowText,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['"'] = {
+ id: _util.OPS.nextLineSetSpacingShowText,
+ numArgs: 3,
+ variableArgs: false
+ };
+ t['d0'] = {
+ id: _util.OPS.setCharWidth,
+ numArgs: 2,
+ variableArgs: false
+ };
+ t['d1'] = {
+ id: _util.OPS.setCharWidthAndBounds,
+ numArgs: 6,
+ variableArgs: false
+ };
+ t['CS'] = {
+ id: _util.OPS.setStrokeColorSpace,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['cs'] = {
+ id: _util.OPS.setFillColorSpace,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['SC'] = {
+ id: _util.OPS.setStrokeColor,
+ numArgs: 4,
+ variableArgs: true
+ };
+ t['SCN'] = {
+ id: _util.OPS.setStrokeColorN,
+ numArgs: 33,
+ variableArgs: true
+ };
+ t['sc'] = {
+ id: _util.OPS.setFillColor,
+ numArgs: 4,
+ variableArgs: true
+ };
+ t['scn'] = {
+ id: _util.OPS.setFillColorN,
+ numArgs: 33,
+ variableArgs: true
+ };
+ t['G'] = {
+ id: _util.OPS.setStrokeGray,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['g'] = {
+ id: _util.OPS.setFillGray,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['RG'] = {
+ id: _util.OPS.setStrokeRGBColor,
+ numArgs: 3,
+ variableArgs: false
+ };
+ t['rg'] = {
+ id: _util.OPS.setFillRGBColor,
+ numArgs: 3,
+ variableArgs: false
+ };
+ t['K'] = {
+ id: _util.OPS.setStrokeCMYKColor,
+ numArgs: 4,
+ variableArgs: false
+ };
+ t['k'] = {
+ id: _util.OPS.setFillCMYKColor,
+ numArgs: 4,
+ variableArgs: false
+ };
+ t['sh'] = {
+ id: _util.OPS.shadingFill,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['BI'] = {
+ id: _util.OPS.beginInlineImage,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['ID'] = {
+ id: _util.OPS.beginImageData,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['EI'] = {
+ id: _util.OPS.endInlineImage,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['Do'] = {
+ id: _util.OPS.paintXObject,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['MP'] = {
+ id: _util.OPS.markPoint,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['DP'] = {
+ id: _util.OPS.markPointProps,
+ numArgs: 2,
+ variableArgs: false
+ };
+ t['BMC'] = {
+ id: _util.OPS.beginMarkedContent,
+ numArgs: 1,
+ variableArgs: false
+ };
+ t['BDC'] = {
+ id: _util.OPS.beginMarkedContentProps,
+ numArgs: 2,
+ variableArgs: false
+ };
+ t['EMC'] = {
+ id: _util.OPS.endMarkedContent,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['BX'] = {
+ id: _util.OPS.beginCompat,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['EX'] = {
+ id: _util.OPS.endCompat,
+ numArgs: 0,
+ variableArgs: false
+ };
+ t['BM'] = null;
+ t['BD'] = null;
+ t['true'] = null;
+ t['fa'] = null;
+ t['fal'] = null;
+ t['fals'] = null;
+ t['false'] = null;
+ t['nu'] = null;
+ t['nul'] = null;
+ t['null'] = null;
+ });
+ var MAX_INVALID_PATH_OPS = 20;
+
+ function EvaluatorPreprocessor(stream, xref, stateManager) {
+ this.opMap = getOPMap();
+ this.parser = new _parser.Parser({
+ lexer: new _parser.Lexer(stream, this.opMap),
+ xref: xref
+ });
+ this.stateManager = stateManager;
+ this.nonProcessedArgs = [];
+ this._numInvalidPathOPS = 0;
+ }
+
+ EvaluatorPreprocessor.prototype = {
+ get savedStatesDepth() {
+ return this.stateManager.stateStack.length;
+ },
+
+ read: function EvaluatorPreprocessor_read(operation) {
+ var args = operation.args;
+
+ while (true) {
+ var obj = this.parser.getObj();
+
+ if ((0, _primitives.isCmd)(obj)) {
+ var cmd = obj.cmd;
+ var opSpec = this.opMap[cmd];
+
+ if (!opSpec) {
+ (0, _util.warn)("Unknown command \"".concat(cmd, "\"."));
+ continue;
+ }
+
+ var fn = opSpec.id;
+ var numArgs = opSpec.numArgs;
+ var argsLength = args !== null ? args.length : 0;
+
+ if (!opSpec.variableArgs) {
+ if (argsLength !== numArgs) {
+ var nonProcessedArgs = this.nonProcessedArgs;
+
+ while (argsLength > numArgs) {
+ nonProcessedArgs.push(args.shift());
+ argsLength--;
+ }
+
+ while (argsLength < numArgs && nonProcessedArgs.length !== 0) {
+ if (args === null) {
+ args = [];
+ }
+
+ args.unshift(nonProcessedArgs.pop());
+ argsLength++;
+ }
+ }
+
+ if (argsLength < numArgs) {
+ var partialMsg = "command ".concat(cmd, ": expected ").concat(numArgs, " args, ") + "but received ".concat(argsLength, " args.");
+
+ if (fn >= _util.OPS.moveTo && fn <= _util.OPS.endPath && ++this._numInvalidPathOPS > MAX_INVALID_PATH_OPS) {
+ throw new _util.FormatError("Invalid ".concat(partialMsg));
+ }
+
+ (0, _util.warn)("Skipping ".concat(partialMsg));
+
+ if (args !== null) {
+ args.length = 0;
+ }
+
+ continue;
+ }
+ } else if (argsLength > numArgs) {
+ (0, _util.info)("Command ".concat(cmd, ": expected [0, ").concat(numArgs, "] args, ") + "but received ".concat(argsLength, " args."));
+ }
+
+ this.preprocessCommand(fn, args);
+ operation.fn = fn;
+ operation.args = args;
+ return true;
+ }
+
+ if ((0, _primitives.isEOF)(obj)) {
+ return false;
+ }
+
+ if (obj !== null) {
+ if (args === null) {
+ args = [];
+ }
+
+ args.push(obj);
+
+ if (args.length > 33) {
+ throw new _util.FormatError('Too many arguments');
+ }
+ }
+ }
+ },
+ preprocessCommand: function EvaluatorPreprocessor_preprocessCommand(fn, args) {
+ switch (fn | 0) {
+ case _util.OPS.save:
+ this.stateManager.save();
+ break;
+
+ case _util.OPS.restore:
+ this.stateManager.restore();
+ break;
+
+ case _util.OPS.transform:
+ this.stateManager.transform(args);
+ break;
+ }
+ }
+ };
+ return EvaluatorPreprocessor;
+}();
+
+/***/ }),
+/* 173 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.CMapFactory = exports.IdentityCMap = exports.CMap = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _parser = __w_pdfjs_require__(157);
+
+var _core_utils = __w_pdfjs_require__(154);
+
+var _stream = __w_pdfjs_require__(158);
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var BUILT_IN_CMAPS = ['Adobe-GB1-UCS2', 'Adobe-CNS1-UCS2', 'Adobe-Japan1-UCS2', 'Adobe-Korea1-UCS2', '78-EUC-H', '78-EUC-V', '78-H', '78-RKSJ-H', '78-RKSJ-V', '78-V', '78ms-RKSJ-H', '78ms-RKSJ-V', '83pv-RKSJ-H', '90ms-RKSJ-H', '90ms-RKSJ-V', '90msp-RKSJ-H', '90msp-RKSJ-V', '90pv-RKSJ-H', '90pv-RKSJ-V', 'Add-H', 'Add-RKSJ-H', 'Add-RKSJ-V', 'Add-V', 'Adobe-CNS1-0', 'Adobe-CNS1-1', 'Adobe-CNS1-2', 'Adobe-CNS1-3', 'Adobe-CNS1-4', 'Adobe-CNS1-5', 'Adobe-CNS1-6', 'Adobe-GB1-0', 'Adobe-GB1-1', 'Adobe-GB1-2', 'Adobe-GB1-3', 'Adobe-GB1-4', 'Adobe-GB1-5', 'Adobe-Japan1-0', 'Adobe-Japan1-1', 'Adobe-Japan1-2', 'Adobe-Japan1-3', 'Adobe-Japan1-4', 'Adobe-Japan1-5', 'Adobe-Japan1-6', 'Adobe-Korea1-0', 'Adobe-Korea1-1', 'Adobe-Korea1-2', 'B5-H', 'B5-V', 'B5pc-H', 'B5pc-V', 'CNS-EUC-H', 'CNS-EUC-V', 'CNS1-H', 'CNS1-V', 'CNS2-H', 'CNS2-V', 'ETHK-B5-H', 'ETHK-B5-V', 'ETen-B5-H', 'ETen-B5-V', 'ETenms-B5-H', 'ETenms-B5-V', 'EUC-H', 'EUC-V', 'Ext-H', 'Ext-RKSJ-H', 'Ext-RKSJ-V', 'Ext-V', 'GB-EUC-H', 'GB-EUC-V', 'GB-H', 'GB-V', 'GBK-EUC-H', 'GBK-EUC-V', 'GBK2K-H', 'GBK2K-V', 'GBKp-EUC-H', 'GBKp-EUC-V', 'GBT-EUC-H', 'GBT-EUC-V', 'GBT-H', 'GBT-V', 'GBTpc-EUC-H', 'GBTpc-EUC-V', 'GBpc-EUC-H', 'GBpc-EUC-V', 'H', 'HKdla-B5-H', 'HKdla-B5-V', 'HKdlb-B5-H', 'HKdlb-B5-V', 'HKgccs-B5-H', 'HKgccs-B5-V', 'HKm314-B5-H', 'HKm314-B5-V', 'HKm471-B5-H', 'HKm471-B5-V', 'HKscs-B5-H', 'HKscs-B5-V', 'Hankaku', 'Hiragana', 'KSC-EUC-H', 'KSC-EUC-V', 'KSC-H', 'KSC-Johab-H', 'KSC-Johab-V', 'KSC-V', 'KSCms-UHC-H', 'KSCms-UHC-HW-H', 'KSCms-UHC-HW-V', 'KSCms-UHC-V', 'KSCpc-EUC-H', 'KSCpc-EUC-V', 'Katakana', 'NWP-H', 'NWP-V', 'RKSJ-H', 'RKSJ-V', 'Roman', 'UniCNS-UCS2-H', 'UniCNS-UCS2-V', 'UniCNS-UTF16-H', 'UniCNS-UTF16-V', 'UniCNS-UTF32-H', 'UniCNS-UTF32-V', 'UniCNS-UTF8-H', 'UniCNS-UTF8-V', 'UniGB-UCS2-H', 'UniGB-UCS2-V', 'UniGB-UTF16-H', 'UniGB-UTF16-V', 'UniGB-UTF32-H', 'UniGB-UTF32-V', 'UniGB-UTF8-H', 'UniGB-UTF8-V', 'UniJIS-UCS2-H', 'UniJIS-UCS2-HW-H', 'UniJIS-UCS2-HW-V', 'UniJIS-UCS2-V', 'UniJIS-UTF16-H', 'UniJIS-UTF16-V', 'UniJIS-UTF32-H', 'UniJIS-UTF32-V', 'UniJIS-UTF8-H', 'UniJIS-UTF8-V', 'UniJIS2004-UTF16-H', 'UniJIS2004-UTF16-V', 'UniJIS2004-UTF32-H', 'UniJIS2004-UTF32-V', 'UniJIS2004-UTF8-H', 'UniJIS2004-UTF8-V', 'UniJISPro-UCS2-HW-V', 'UniJISPro-UCS2-V', 'UniJISPro-UTF8-V', 'UniJISX0213-UTF32-H', 'UniJISX0213-UTF32-V', 'UniJISX02132004-UTF32-H', 'UniJISX02132004-UTF32-V', 'UniKS-UCS2-H', 'UniKS-UCS2-V', 'UniKS-UTF16-H', 'UniKS-UTF16-V', 'UniKS-UTF32-H', 'UniKS-UTF32-V', 'UniKS-UTF8-H', 'UniKS-UTF8-V', 'V', 'WP-Symbol'];
+
+var CMap =
+/*#__PURE__*/
+function () {
+ function CMap() {
+ var builtInCMap = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
+
+ _classCallCheck(this, CMap);
+
+ this.codespaceRanges = [[], [], [], []];
+ this.numCodespaceRanges = 0;
+ this._map = [];
+ this.name = '';
+ this.vertical = false;
+ this.useCMap = null;
+ this.builtInCMap = builtInCMap;
+ }
+
+ _createClass(CMap, [{
+ key: "addCodespaceRange",
+ value: function addCodespaceRange(n, low, high) {
+ this.codespaceRanges[n - 1].push(low, high);
+ this.numCodespaceRanges++;
+ }
+ }, {
+ key: "mapCidRange",
+ value: function mapCidRange(low, high, dstLow) {
+ while (low <= high) {
+ this._map[low++] = dstLow++;
+ }
+ }
+ }, {
+ key: "mapBfRange",
+ value: function mapBfRange(low, high, dstLow) {
+ var lastByte = dstLow.length - 1;
+
+ while (low <= high) {
+ this._map[low++] = dstLow;
+ dstLow = dstLow.substring(0, lastByte) + String.fromCharCode(dstLow.charCodeAt(lastByte) + 1);
+ }
+ }
+ }, {
+ key: "mapBfRangeToArray",
+ value: function mapBfRangeToArray(low, high, array) {
+ var i = 0,
+ ii = array.length;
+
+ while (low <= high && i < ii) {
+ this._map[low] = array[i++];
+ ++low;
+ }
+ }
+ }, {
+ key: "mapOne",
+ value: function mapOne(src, dst) {
+ this._map[src] = dst;
+ }
+ }, {
+ key: "lookup",
+ value: function lookup(code) {
+ return this._map[code];
+ }
+ }, {
+ key: "contains",
+ value: function contains(code) {
+ return this._map[code] !== undefined;
+ }
+ }, {
+ key: "forEach",
+ value: function forEach(callback) {
+ var map = this._map;
+ var length = map.length;
+
+ if (length <= 0x10000) {
+ for (var i = 0; i < length; i++) {
+ if (map[i] !== undefined) {
+ callback(i, map[i]);
+ }
+ }
+ } else {
+ for (var _i in map) {
+ callback(_i, map[_i]);
+ }
+ }
+ }
+ }, {
+ key: "charCodeOf",
+ value: function charCodeOf(value) {
+ var map = this._map;
+
+ if (map.length <= 0x10000) {
+ return map.indexOf(value);
+ }
+
+ for (var charCode in map) {
+ if (map[charCode] === value) {
+ return charCode | 0;
+ }
+ }
+
+ return -1;
+ }
+ }, {
+ key: "getMap",
+ value: function getMap() {
+ return this._map;
+ }
+ }, {
+ key: "readCharCode",
+ value: function readCharCode(str, offset, out) {
+ var c = 0;
+ var codespaceRanges = this.codespaceRanges;
+
+ for (var n = 0, nn = codespaceRanges.length; n < nn; n++) {
+ c = (c << 8 | str.charCodeAt(offset + n)) >>> 0;
+ var codespaceRange = codespaceRanges[n];
+
+ for (var k = 0, kk = codespaceRange.length; k < kk;) {
+ var low = codespaceRange[k++];
+ var high = codespaceRange[k++];
+
+ if (c >= low && c <= high) {
+ out.charcode = c;
+ out.length = n + 1;
+ return;
+ }
+ }
+ }
+
+ out.charcode = 0;
+ out.length = 1;
+ }
+ }, {
+ key: "length",
+ get: function get() {
+ return this._map.length;
+ }
+ }, {
+ key: "isIdentityCMap",
+ get: function get() {
+ if (!(this.name === 'Identity-H' || this.name === 'Identity-V')) {
+ return false;
+ }
+
+ if (this._map.length !== 0x10000) {
+ return false;
+ }
+
+ for (var i = 0; i < 0x10000; i++) {
+ if (this._map[i] !== i) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+ }]);
+
+ return CMap;
+}();
+
+exports.CMap = CMap;
+
+var IdentityCMap =
+/*#__PURE__*/
+function (_CMap) {
+ _inherits(IdentityCMap, _CMap);
+
+ function IdentityCMap(vertical, n) {
+ var _this;
+
+ _classCallCheck(this, IdentityCMap);
+
+ _this = _possibleConstructorReturn(this, _getPrototypeOf(IdentityCMap).call(this));
+ _this.vertical = vertical;
+
+ _this.addCodespaceRange(n, 0, 0xffff);
+
+ return _this;
+ }
+
+ _createClass(IdentityCMap, [{
+ key: "mapCidRange",
+ value: function mapCidRange(low, high, dstLow) {
+ (0, _util.unreachable)('should not call mapCidRange');
+ }
+ }, {
+ key: "mapBfRange",
+ value: function mapBfRange(low, high, dstLow) {
+ (0, _util.unreachable)('should not call mapBfRange');
+ }
+ }, {
+ key: "mapBfRangeToArray",
+ value: function mapBfRangeToArray(low, high, array) {
+ (0, _util.unreachable)('should not call mapBfRangeToArray');
+ }
+ }, {
+ key: "mapOne",
+ value: function mapOne(src, dst) {
+ (0, _util.unreachable)('should not call mapCidOne');
+ }
+ }, {
+ key: "lookup",
+ value: function lookup(code) {
+ return Number.isInteger(code) && code <= 0xffff ? code : undefined;
+ }
+ }, {
+ key: "contains",
+ value: function contains(code) {
+ return Number.isInteger(code) && code <= 0xffff;
+ }
+ }, {
+ key: "forEach",
+ value: function forEach(callback) {
+ for (var i = 0; i <= 0xffff; i++) {
+ callback(i, i);
+ }
+ }
+ }, {
+ key: "charCodeOf",
+ value: function charCodeOf(value) {
+ return Number.isInteger(value) && value <= 0xffff ? value : -1;
+ }
+ }, {
+ key: "getMap",
+ value: function getMap() {
+ var map = new Array(0x10000);
+
+ for (var i = 0; i <= 0xffff; i++) {
+ map[i] = i;
+ }
+
+ return map;
+ }
+ }, {
+ key: "length",
+ get: function get() {
+ return 0x10000;
+ }
+ }, {
+ key: "isIdentityCMap",
+ get: function get() {
+ (0, _util.unreachable)('should not access .isIdentityCMap');
+ }
+ }]);
+
+ return IdentityCMap;
+}(CMap);
+
+exports.IdentityCMap = IdentityCMap;
+
+var BinaryCMapReader = function BinaryCMapReaderClosure() {
+ function hexToInt(a, size) {
+ var n = 0;
+
+ for (var i = 0; i <= size; i++) {
+ n = n << 8 | a[i];
+ }
+
+ return n >>> 0;
+ }
+
+ function hexToStr(a, size) {
+ if (size === 1) {
+ return String.fromCharCode(a[0], a[1]);
+ }
+
+ if (size === 3) {
+ return String.fromCharCode(a[0], a[1], a[2], a[3]);
+ }
+
+ return String.fromCharCode.apply(null, a.subarray(0, size + 1));
+ }
+
+ function addHex(a, b, size) {
+ var c = 0;
+
+ for (var i = size; i >= 0; i--) {
+ c += a[i] + b[i];
+ a[i] = c & 255;
+ c >>= 8;
+ }
+ }
+
+ function incHex(a, size) {
+ var c = 1;
+
+ for (var i = size; i >= 0 && c > 0; i--) {
+ c += a[i];
+ a[i] = c & 255;
+ c >>= 8;
+ }
+ }
+
+ var MAX_NUM_SIZE = 16;
+ var MAX_ENCODED_NUM_SIZE = 19;
+
+ function BinaryCMapStream(data) {
+ this.buffer = data;
+ this.pos = 0;
+ this.end = data.length;
+ this.tmpBuf = new Uint8Array(MAX_ENCODED_NUM_SIZE);
+ }
+
+ BinaryCMapStream.prototype = {
+ readByte: function readByte() {
+ if (this.pos >= this.end) {
+ return -1;
+ }
+
+ return this.buffer[this.pos++];
+ },
+ readNumber: function readNumber() {
+ var n = 0;
+ var last;
+
+ do {
+ var b = this.readByte();
+
+ if (b < 0) {
+ throw new _util.FormatError('unexpected EOF in bcmap');
+ }
+
+ last = !(b & 0x80);
+ n = n << 7 | b & 0x7F;
+ } while (!last);
+
+ return n;
+ },
+ readSigned: function readSigned() {
+ var n = this.readNumber();
+ return n & 1 ? ~(n >>> 1) : n >>> 1;
+ },
+ readHex: function readHex(num, size) {
+ num.set(this.buffer.subarray(this.pos, this.pos + size + 1));
+ this.pos += size + 1;
+ },
+ readHexNumber: function readHexNumber(num, size) {
+ var last;
+ var stack = this.tmpBuf,
+ sp = 0;
+
+ do {
+ var b = this.readByte();
+
+ if (b < 0) {
+ throw new _util.FormatError('unexpected EOF in bcmap');
+ }
+
+ last = !(b & 0x80);
+ stack[sp++] = b & 0x7F;
+ } while (!last);
+
+ var i = size,
+ buffer = 0,
+ bufferSize = 0;
+
+ while (i >= 0) {
+ while (bufferSize < 8 && stack.length > 0) {
+ buffer = stack[--sp] << bufferSize | buffer;
+ bufferSize += 7;
+ }
+
+ num[i] = buffer & 255;
+ i--;
+ buffer >>= 8;
+ bufferSize -= 8;
+ }
+ },
+ readHexSigned: function readHexSigned(num, size) {
+ this.readHexNumber(num, size);
+ var sign = num[size] & 1 ? 255 : 0;
+ var c = 0;
+
+ for (var i = 0; i <= size; i++) {
+ c = (c & 1) << 8 | num[i];
+ num[i] = c >> 1 ^ sign;
+ }
+ },
+ readString: function readString() {
+ var len = this.readNumber();
+ var s = '';
+
+ for (var i = 0; i < len; i++) {
+ s += String.fromCharCode(this.readNumber());
+ }
+
+ return s;
+ }
+ };
+
+ function processBinaryCMap(data, cMap, extend) {
+ return new Promise(function (resolve, reject) {
+ var stream = new BinaryCMapStream(data);
+ var header = stream.readByte();
+ cMap.vertical = !!(header & 1);
+ var useCMap = null;
+ var start = new Uint8Array(MAX_NUM_SIZE);
+ var end = new Uint8Array(MAX_NUM_SIZE);
+
+ var _char = new Uint8Array(MAX_NUM_SIZE);
+
+ var charCode = new Uint8Array(MAX_NUM_SIZE);
+ var tmp = new Uint8Array(MAX_NUM_SIZE);
+ var code;
+ var b;
+
+ while ((b = stream.readByte()) >= 0) {
+ var type = b >> 5;
+
+ if (type === 7) {
+ switch (b & 0x1F) {
+ case 0:
+ stream.readString();
+ break;
+
+ case 1:
+ useCMap = stream.readString();
+ break;
+ }
+
+ continue;
+ }
+
+ var sequence = !!(b & 0x10);
+ var dataSize = b & 15;
+
+ if (dataSize + 1 > MAX_NUM_SIZE) {
+ throw new Error('processBinaryCMap: Invalid dataSize.');
+ }
+
+ var ucs2DataSize = 1;
+ var subitemsCount = stream.readNumber();
+ var i;
+
+ switch (type) {
+ case 0:
+ stream.readHex(start, dataSize);
+ stream.readHexNumber(end, dataSize);
+ addHex(end, start, dataSize);
+ cMap.addCodespaceRange(dataSize + 1, hexToInt(start, dataSize), hexToInt(end, dataSize));
+
+ for (i = 1; i < subitemsCount; i++) {
+ incHex(end, dataSize);
+ stream.readHexNumber(start, dataSize);
+ addHex(start, end, dataSize);
+ stream.readHexNumber(end, dataSize);
+ addHex(end, start, dataSize);
+ cMap.addCodespaceRange(dataSize + 1, hexToInt(start, dataSize), hexToInt(end, dataSize));
+ }
+
+ break;
+
+ case 1:
+ stream.readHex(start, dataSize);
+ stream.readHexNumber(end, dataSize);
+ addHex(end, start, dataSize);
+ stream.readNumber();
+
+ for (i = 1; i < subitemsCount; i++) {
+ incHex(end, dataSize);
+ stream.readHexNumber(start, dataSize);
+ addHex(start, end, dataSize);
+ stream.readHexNumber(end, dataSize);
+ addHex(end, start, dataSize);
+ stream.readNumber();
+ }
+
+ break;
+
+ case 2:
+ stream.readHex(_char, dataSize);
+ code = stream.readNumber();
+ cMap.mapOne(hexToInt(_char, dataSize), code);
+
+ for (i = 1; i < subitemsCount; i++) {
+ incHex(_char, dataSize);
+
+ if (!sequence) {
+ stream.readHexNumber(tmp, dataSize);
+ addHex(_char, tmp, dataSize);
+ }
+
+ code = stream.readSigned() + (code + 1);
+ cMap.mapOne(hexToInt(_char, dataSize), code);
+ }
+
+ break;
+
+ case 3:
+ stream.readHex(start, dataSize);
+ stream.readHexNumber(end, dataSize);
+ addHex(end, start, dataSize);
+ code = stream.readNumber();
+ cMap.mapCidRange(hexToInt(start, dataSize), hexToInt(end, dataSize), code);
+
+ for (i = 1; i < subitemsCount; i++) {
+ incHex(end, dataSize);
+
+ if (!sequence) {
+ stream.readHexNumber(start, dataSize);
+ addHex(start, end, dataSize);
+ } else {
+ start.set(end);
+ }
+
+ stream.readHexNumber(end, dataSize);
+ addHex(end, start, dataSize);
+ code = stream.readNumber();
+ cMap.mapCidRange(hexToInt(start, dataSize), hexToInt(end, dataSize), code);
+ }
+
+ break;
+
+ case 4:
+ stream.readHex(_char, ucs2DataSize);
+ stream.readHex(charCode, dataSize);
+ cMap.mapOne(hexToInt(_char, ucs2DataSize), hexToStr(charCode, dataSize));
+
+ for (i = 1; i < subitemsCount; i++) {
+ incHex(_char, ucs2DataSize);
+
+ if (!sequence) {
+ stream.readHexNumber(tmp, ucs2DataSize);
+ addHex(_char, tmp, ucs2DataSize);
+ }
+
+ incHex(charCode, dataSize);
+ stream.readHexSigned(tmp, dataSize);
+ addHex(charCode, tmp, dataSize);
+ cMap.mapOne(hexToInt(_char, ucs2DataSize), hexToStr(charCode, dataSize));
+ }
+
+ break;
+
+ case 5:
+ stream.readHex(start, ucs2DataSize);
+ stream.readHexNumber(end, ucs2DataSize);
+ addHex(end, start, ucs2DataSize);
+ stream.readHex(charCode, dataSize);
+ cMap.mapBfRange(hexToInt(start, ucs2DataSize), hexToInt(end, ucs2DataSize), hexToStr(charCode, dataSize));
+
+ for (i = 1; i < subitemsCount; i++) {
+ incHex(end, ucs2DataSize);
+
+ if (!sequence) {
+ stream.readHexNumber(start, ucs2DataSize);
+ addHex(start, end, ucs2DataSize);
+ } else {
+ start.set(end);
+ }
+
+ stream.readHexNumber(end, ucs2DataSize);
+ addHex(end, start, ucs2DataSize);
+ stream.readHex(charCode, dataSize);
+ cMap.mapBfRange(hexToInt(start, ucs2DataSize), hexToInt(end, ucs2DataSize), hexToStr(charCode, dataSize));
+ }
+
+ break;
+
+ default:
+ reject(new Error('processBinaryCMap: Unknown type: ' + type));
+ return;
+ }
+ }
+
+ if (useCMap) {
+ resolve(extend(useCMap));
+ return;
+ }
+
+ resolve(cMap);
+ });
+ }
+
+ function BinaryCMapReader() {}
+
+ BinaryCMapReader.prototype = {
+ process: processBinaryCMap
+ };
+ return BinaryCMapReader;
+}();
+
+var CMapFactory = function CMapFactoryClosure() {
+ function strToInt(str) {
+ var a = 0;
+
+ for (var i = 0; i < str.length; i++) {
+ a = a << 8 | str.charCodeAt(i);
+ }
+
+ return a >>> 0;
+ }
+
+ function expectString(obj) {
+ if (!(0, _util.isString)(obj)) {
+ throw new _util.FormatError('Malformed CMap: expected string.');
+ }
+ }
+
+ function expectInt(obj) {
+ if (!Number.isInteger(obj)) {
+ throw new _util.FormatError('Malformed CMap: expected int.');
+ }
+ }
+
+ function parseBfChar(cMap, lexer) {
+ while (true) {
+ var obj = lexer.getObj();
+
+ if ((0, _primitives.isEOF)(obj)) {
+ break;
+ }
+
+ if ((0, _primitives.isCmd)(obj, 'endbfchar')) {
+ return;
+ }
+
+ expectString(obj);
+ var src = strToInt(obj);
+ obj = lexer.getObj();
+ expectString(obj);
+ var dst = obj;
+ cMap.mapOne(src, dst);
+ }
+ }
+
+ function parseBfRange(cMap, lexer) {
+ while (true) {
+ var obj = lexer.getObj();
+
+ if ((0, _primitives.isEOF)(obj)) {
+ break;
+ }
+
+ if ((0, _primitives.isCmd)(obj, 'endbfrange')) {
+ return;
+ }
+
+ expectString(obj);
+ var low = strToInt(obj);
+ obj = lexer.getObj();
+ expectString(obj);
+ var high = strToInt(obj);
+ obj = lexer.getObj();
+
+ if (Number.isInteger(obj) || (0, _util.isString)(obj)) {
+ var dstLow = Number.isInteger(obj) ? String.fromCharCode(obj) : obj;
+ cMap.mapBfRange(low, high, dstLow);
+ } else if ((0, _primitives.isCmd)(obj, '[')) {
+ obj = lexer.getObj();
+ var array = [];
+
+ while (!(0, _primitives.isCmd)(obj, ']') && !(0, _primitives.isEOF)(obj)) {
+ array.push(obj);
+ obj = lexer.getObj();
+ }
+
+ cMap.mapBfRangeToArray(low, high, array);
+ } else {
+ break;
+ }
+ }
+
+ throw new _util.FormatError('Invalid bf range.');
+ }
+
+ function parseCidChar(cMap, lexer) {
+ while (true) {
+ var obj = lexer.getObj();
+
+ if ((0, _primitives.isEOF)(obj)) {
+ break;
+ }
+
+ if ((0, _primitives.isCmd)(obj, 'endcidchar')) {
+ return;
+ }
+
+ expectString(obj);
+ var src = strToInt(obj);
+ obj = lexer.getObj();
+ expectInt(obj);
+ var dst = obj;
+ cMap.mapOne(src, dst);
+ }
+ }
+
+ function parseCidRange(cMap, lexer) {
+ while (true) {
+ var obj = lexer.getObj();
+
+ if ((0, _primitives.isEOF)(obj)) {
+ break;
+ }
+
+ if ((0, _primitives.isCmd)(obj, 'endcidrange')) {
+ return;
+ }
+
+ expectString(obj);
+ var low = strToInt(obj);
+ obj = lexer.getObj();
+ expectString(obj);
+ var high = strToInt(obj);
+ obj = lexer.getObj();
+ expectInt(obj);
+ var dstLow = obj;
+ cMap.mapCidRange(low, high, dstLow);
+ }
+ }
+
+ function parseCodespaceRange(cMap, lexer) {
+ while (true) {
+ var obj = lexer.getObj();
+
+ if ((0, _primitives.isEOF)(obj)) {
+ break;
+ }
+
+ if ((0, _primitives.isCmd)(obj, 'endcodespacerange')) {
+ return;
+ }
+
+ if (!(0, _util.isString)(obj)) {
+ break;
+ }
+
+ var low = strToInt(obj);
+ obj = lexer.getObj();
+
+ if (!(0, _util.isString)(obj)) {
+ break;
+ }
+
+ var high = strToInt(obj);
+ cMap.addCodespaceRange(obj.length, low, high);
+ }
+
+ throw new _util.FormatError('Invalid codespace range.');
+ }
+
+ function parseWMode(cMap, lexer) {
+ var obj = lexer.getObj();
+
+ if (Number.isInteger(obj)) {
+ cMap.vertical = !!obj;
+ }
+ }
+
+ function parseCMapName(cMap, lexer) {
+ var obj = lexer.getObj();
+
+ if ((0, _primitives.isName)(obj) && (0, _util.isString)(obj.name)) {
+ cMap.name = obj.name;
+ }
+ }
+
+ function parseCMap(cMap, lexer, fetchBuiltInCMap, useCMap) {
+ var previous;
+ var embeddedUseCMap;
+
+ objLoop: while (true) {
+ try {
+ var obj = lexer.getObj();
+
+ if ((0, _primitives.isEOF)(obj)) {
+ break;
+ } else if ((0, _primitives.isName)(obj)) {
+ if (obj.name === 'WMode') {
+ parseWMode(cMap, lexer);
+ } else if (obj.name === 'CMapName') {
+ parseCMapName(cMap, lexer);
+ }
+
+ previous = obj;
+ } else if ((0, _primitives.isCmd)(obj)) {
+ switch (obj.cmd) {
+ case 'endcmap':
+ break objLoop;
+
+ case 'usecmap':
+ if ((0, _primitives.isName)(previous)) {
+ embeddedUseCMap = previous.name;
+ }
+
+ break;
+
+ case 'begincodespacerange':
+ parseCodespaceRange(cMap, lexer);
+ break;
+
+ case 'beginbfchar':
+ parseBfChar(cMap, lexer);
+ break;
+
+ case 'begincidchar':
+ parseCidChar(cMap, lexer);
+ break;
+
+ case 'beginbfrange':
+ parseBfRange(cMap, lexer);
+ break;
+
+ case 'begincidrange':
+ parseCidRange(cMap, lexer);
+ break;
+ }
+ }
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+
+ (0, _util.warn)('Invalid cMap data: ' + ex);
+ continue;
+ }
+ }
+
+ if (!useCMap && embeddedUseCMap) {
+ useCMap = embeddedUseCMap;
+ }
+
+ if (useCMap) {
+ return extendCMap(cMap, fetchBuiltInCMap, useCMap);
+ }
+
+ return Promise.resolve(cMap);
+ }
+
+ function extendCMap(cMap, fetchBuiltInCMap, useCMap) {
+ return createBuiltInCMap(useCMap, fetchBuiltInCMap).then(function (newCMap) {
+ cMap.useCMap = newCMap;
+
+ if (cMap.numCodespaceRanges === 0) {
+ var useCodespaceRanges = cMap.useCMap.codespaceRanges;
+
+ for (var i = 0; i < useCodespaceRanges.length; i++) {
+ cMap.codespaceRanges[i] = useCodespaceRanges[i].slice();
+ }
+
+ cMap.numCodespaceRanges = cMap.useCMap.numCodespaceRanges;
+ }
+
+ cMap.useCMap.forEach(function (key, value) {
+ if (!cMap.contains(key)) {
+ cMap.mapOne(key, cMap.useCMap.lookup(key));
+ }
+ });
+ return cMap;
+ });
+ }
+
+ function createBuiltInCMap(name, fetchBuiltInCMap) {
+ if (name === 'Identity-H') {
+ return Promise.resolve(new IdentityCMap(false, 2));
+ } else if (name === 'Identity-V') {
+ return Promise.resolve(new IdentityCMap(true, 2));
+ }
+
+ if (!BUILT_IN_CMAPS.includes(name)) {
+ return Promise.reject(new Error('Unknown CMap name: ' + name));
+ }
+
+ if (!fetchBuiltInCMap) {
+ return Promise.reject(new Error('Built-in CMap parameters are not provided.'));
+ }
+
+ return fetchBuiltInCMap(name).then(function (data) {
+ var cMapData = data.cMapData,
+ compressionType = data.compressionType;
+ var cMap = new CMap(true);
+
+ if (compressionType === _util.CMapCompressionType.BINARY) {
+ return new BinaryCMapReader().process(cMapData, cMap, function (useCMap) {
+ return extendCMap(cMap, fetchBuiltInCMap, useCMap);
+ });
+ }
+
+ if (compressionType === _util.CMapCompressionType.NONE) {
+ var lexer = new _parser.Lexer(new _stream.Stream(cMapData));
+ return parseCMap(cMap, lexer, fetchBuiltInCMap, null);
+ }
+
+ return Promise.reject(new Error('TODO: Only BINARY/NONE CMap compression is currently supported.'));
+ });
+ }
+
+ return {
+ create: function create(params) {
+ var encoding = params.encoding;
+ var fetchBuiltInCMap = params.fetchBuiltInCMap;
+ var useCMap = params.useCMap;
+
+ if ((0, _primitives.isName)(encoding)) {
+ return createBuiltInCMap(encoding.name, fetchBuiltInCMap);
+ } else if ((0, _primitives.isStream)(encoding)) {
+ var cMap = new CMap();
+ var lexer = new _parser.Lexer(encoding);
+ return parseCMap(cMap, lexer, fetchBuiltInCMap, useCMap).then(function (parsedCMap) {
+ if (parsedCMap.isIdentityCMap) {
+ return createBuiltInCMap(parsedCMap.name, fetchBuiltInCMap);
+ }
+
+ return parsedCMap;
+ });
+ }
+
+ return Promise.reject(new Error('Encoding required.'));
+ }
+ };
+}();
+
+exports.CMapFactory = CMapFactory;
+
+/***/ }),
+/* 174 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.getFontType = getFontType;
+exports.IdentityToUnicodeMap = exports.ToUnicodeMap = exports.FontFlags = exports.Font = exports.ErrorFont = exports.SEAC_ANALYSIS_ENABLED = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _cff_parser = __w_pdfjs_require__(175);
+
+var _glyphlist = __w_pdfjs_require__(178);
+
+var _encodings = __w_pdfjs_require__(177);
+
+var _standard_fonts = __w_pdfjs_require__(179);
+
+var _unicode = __w_pdfjs_require__(180);
+
+var _font_renderer = __w_pdfjs_require__(181);
+
+var _cmap = __w_pdfjs_require__(173);
+
+var _core_utils = __w_pdfjs_require__(154);
+
+var _stream = __w_pdfjs_require__(158);
+
+var _type1_parser = __w_pdfjs_require__(182);
+
+function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
+
+function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
+
+function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
+
+function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
+
+var PRIVATE_USE_AREAS = [[0xE000, 0xF8FF], [0x100000, 0x10FFFD]];
+var PDF_GLYPH_SPACE_UNITS = 1000;
+var SEAC_ANALYSIS_ENABLED = true;
+exports.SEAC_ANALYSIS_ENABLED = SEAC_ANALYSIS_ENABLED;
+var FontFlags = {
+ FixedPitch: 1,
+ Serif: 2,
+ Symbolic: 4,
+ Script: 8,
+ Nonsymbolic: 32,
+ Italic: 64,
+ AllCap: 65536,
+ SmallCap: 131072,
+ ForceBold: 262144
+};
+exports.FontFlags = FontFlags;
+var MacStandardGlyphOrdering = ['.notdef', '.null', 'nonmarkingreturn', 'space', 'exclam', 'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand', 'quotesingle', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright', 'asciicircum', 'underscore', 'grave', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright', 'asciitilde', 'Adieresis', 'Aring', 'Ccedilla', 'Eacute', 'Ntilde', 'Odieresis', 'Udieresis', 'aacute', 'agrave', 'acircumflex', 'adieresis', 'atilde', 'aring', 'ccedilla', 'eacute', 'egrave', 'ecircumflex', 'edieresis', 'iacute', 'igrave', 'icircumflex', 'idieresis', 'ntilde', 'oacute', 'ograve', 'ocircumflex', 'odieresis', 'otilde', 'uacute', 'ugrave', 'ucircumflex', 'udieresis', 'dagger', 'degree', 'cent', 'sterling', 'section', 'bullet', 'paragraph', 'germandbls', 'registered', 'copyright', 'trademark', 'acute', 'dieresis', 'notequal', 'AE', 'Oslash', 'infinity', 'plusminus', 'lessequal', 'greaterequal', 'yen', 'mu', 'partialdiff', 'summation', 'product', 'pi', 'integral', 'ordfeminine', 'ordmasculine', 'Omega', 'ae', 'oslash', 'questiondown', 'exclamdown', 'logicalnot', 'radical', 'florin', 'approxequal', 'Delta', 'guillemotleft', 'guillemotright', 'ellipsis', 'nonbreakingspace', 'Agrave', 'Atilde', 'Otilde', 'OE', 'oe', 'endash', 'emdash', 'quotedblleft', 'quotedblright', 'quoteleft', 'quoteright', 'divide', 'lozenge', 'ydieresis', 'Ydieresis', 'fraction', 'currency', 'guilsinglleft', 'guilsinglright', 'fi', 'fl', 'daggerdbl', 'periodcentered', 'quotesinglbase', 'quotedblbase', 'perthousand', 'Acircumflex', 'Ecircumflex', 'Aacute', 'Edieresis', 'Egrave', 'Iacute', 'Icircumflex', 'Idieresis', 'Igrave', 'Oacute', 'Ocircumflex', 'apple', 'Ograve', 'Uacute', 'Ucircumflex', 'Ugrave', 'dotlessi', 'circumflex', 'tilde', 'macron', 'breve', 'dotaccent', 'ring', 'cedilla', 'hungarumlaut', 'ogonek', 'caron', 'Lslash', 'lslash', 'Scaron', 'scaron', 'Zcaron', 'zcaron', 'brokenbar', 'Eth', 'eth', 'Yacute', 'yacute', 'Thorn', 'thorn', 'minus', 'multiply', 'onesuperior', 'twosuperior', 'threesuperior', 'onehalf', 'onequarter', 'threequarters', 'franc', 'Gbreve', 'gbreve', 'Idotaccent', 'Scedilla', 'scedilla', 'Cacute', 'cacute', 'Ccaron', 'ccaron', 'dcroat'];
+
+function adjustWidths(properties) {
+ if (!properties.fontMatrix) {
+ return;
+ }
+
+ if (properties.fontMatrix[0] === _util.FONT_IDENTITY_MATRIX[0]) {
+ return;
+ }
+
+ var scale = 0.001 / properties.fontMatrix[0];
+ var glyphsWidths = properties.widths;
+
+ for (var glyph in glyphsWidths) {
+ glyphsWidths[glyph] *= scale;
+ }
+
+ properties.defaultWidth *= scale;
+}
+
+function adjustToUnicode(properties, builtInEncoding) {
+ if (properties.hasIncludedToUnicodeMap) {
+ return;
+ }
+
+ if (properties.hasEncoding) {
+ return;
+ }
+
+ if (builtInEncoding === properties.defaultEncoding) {
+ return;
+ }
+
+ if (properties.toUnicode instanceof IdentityToUnicodeMap) {
+ return;
+ }
+
+ var toUnicode = [],
+ glyphsUnicodeMap = (0, _glyphlist.getGlyphsUnicode)();
+
+ for (var charCode in builtInEncoding) {
+ var glyphName = builtInEncoding[charCode];
+ var unicode = (0, _unicode.getUnicodeForGlyph)(glyphName, glyphsUnicodeMap);
+
+ if (unicode !== -1) {
+ toUnicode[charCode] = String.fromCharCode(unicode);
+ }
+ }
+
+ properties.toUnicode.amend(toUnicode);
+}
+
+function getFontType(type, subtype) {
+ switch (type) {
+ case 'Type1':
+ return subtype === 'Type1C' ? _util.FontType.TYPE1C : _util.FontType.TYPE1;
+
+ case 'CIDFontType0':
+ return subtype === 'CIDFontType0C' ? _util.FontType.CIDFONTTYPE0C : _util.FontType.CIDFONTTYPE0;
+
+ case 'OpenType':
+ return _util.FontType.OPENTYPE;
+
+ case 'TrueType':
+ return _util.FontType.TRUETYPE;
+
+ case 'CIDFontType2':
+ return _util.FontType.CIDFONTTYPE2;
+
+ case 'MMType1':
+ return _util.FontType.MMTYPE1;
+
+ case 'Type0':
+ return _util.FontType.TYPE0;
+
+ default:
+ return _util.FontType.UNKNOWN;
+ }
+}
+
+function recoverGlyphName(name, glyphsUnicodeMap) {
+ if (glyphsUnicodeMap[name] !== undefined) {
+ return name;
+ }
+
+ var unicode = (0, _unicode.getUnicodeForGlyph)(name, glyphsUnicodeMap);
+
+ if (unicode !== -1) {
+ for (var key in glyphsUnicodeMap) {
+ if (glyphsUnicodeMap[key] === unicode) {
+ return key;
+ }
+ }
+ }
+
+ (0, _util.info)('Unable to recover a standard glyph name for: ' + name);
+ return name;
+}
+
+var Glyph = function GlyphClosure() {
+ function Glyph(fontChar, unicode, accent, width, vmetric, operatorListId, isSpace, isInFont) {
+ this.fontChar = fontChar;
+ this.unicode = unicode;
+ this.accent = accent;
+ this.width = width;
+ this.vmetric = vmetric;
+ this.operatorListId = operatorListId;
+ this.isSpace = isSpace;
+ this.isInFont = isInFont;
+ }
+
+ Glyph.prototype.matchesForCache = function (fontChar, unicode, accent, width, vmetric, operatorListId, isSpace, isInFont) {
+ return this.fontChar === fontChar && this.unicode === unicode && this.accent === accent && this.width === width && this.vmetric === vmetric && this.operatorListId === operatorListId && this.isSpace === isSpace && this.isInFont === isInFont;
+ };
+
+ return Glyph;
+}();
+
+var ToUnicodeMap = function ToUnicodeMapClosure() {
+ function ToUnicodeMap() {
+ var cmap = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : [];
+ this._map = cmap;
+ }
+
+ ToUnicodeMap.prototype = {
+ get length() {
+ return this._map.length;
+ },
+
+ forEach: function forEach(callback) {
+ for (var charCode in this._map) {
+ callback(charCode, this._map[charCode].charCodeAt(0));
+ }
+ },
+ has: function has(i) {
+ return this._map[i] !== undefined;
+ },
+ get: function get(i) {
+ return this._map[i];
+ },
+ charCodeOf: function charCodeOf(value) {
+ var map = this._map;
+
+ if (map.length <= 0x10000) {
+ return map.indexOf(value);
+ }
+
+ for (var charCode in map) {
+ if (map[charCode] === value) {
+ return charCode | 0;
+ }
+ }
+
+ return -1;
+ },
+ amend: function amend(map) {
+ for (var charCode in map) {
+ this._map[charCode] = map[charCode];
+ }
+ }
+ };
+ return ToUnicodeMap;
+}();
+
+exports.ToUnicodeMap = ToUnicodeMap;
+
+var IdentityToUnicodeMap = function IdentityToUnicodeMapClosure() {
+ function IdentityToUnicodeMap(firstChar, lastChar) {
+ this.firstChar = firstChar;
+ this.lastChar = lastChar;
+ }
+
+ IdentityToUnicodeMap.prototype = {
+ get length() {
+ return this.lastChar + 1 - this.firstChar;
+ },
+
+ forEach: function forEach(callback) {
+ for (var i = this.firstChar, ii = this.lastChar; i <= ii; i++) {
+ callback(i, i);
+ }
+ },
+ has: function has(i) {
+ return this.firstChar <= i && i <= this.lastChar;
+ },
+ get: function get(i) {
+ if (this.firstChar <= i && i <= this.lastChar) {
+ return String.fromCharCode(i);
+ }
+
+ return undefined;
+ },
+ charCodeOf: function charCodeOf(v) {
+ return Number.isInteger(v) && v >= this.firstChar && v <= this.lastChar ? v : -1;
+ },
+ amend: function amend(map) {
+ (0, _util.unreachable)('Should not call amend()');
+ }
+ };
+ return IdentityToUnicodeMap;
+}();
+
+exports.IdentityToUnicodeMap = IdentityToUnicodeMap;
+
+var OpenTypeFileBuilder = function OpenTypeFileBuilderClosure() {
+ function writeInt16(dest, offset, num) {
+ dest[offset] = num >> 8 & 0xFF;
+ dest[offset + 1] = num & 0xFF;
+ }
+
+ function writeInt32(dest, offset, num) {
+ dest[offset] = num >> 24 & 0xFF;
+ dest[offset + 1] = num >> 16 & 0xFF;
+ dest[offset + 2] = num >> 8 & 0xFF;
+ dest[offset + 3] = num & 0xFF;
+ }
+
+ function writeData(dest, offset, data) {
+ var i, ii;
+
+ if (data instanceof Uint8Array) {
+ dest.set(data, offset);
+ } else if (typeof data === 'string') {
+ for (i = 0, ii = data.length; i < ii; i++) {
+ dest[offset++] = data.charCodeAt(i) & 0xFF;
+ }
+ } else {
+ for (i = 0, ii = data.length; i < ii; i++) {
+ dest[offset++] = data[i] & 0xFF;
+ }
+ }
+ }
+
+ function OpenTypeFileBuilder(sfnt) {
+ this.sfnt = sfnt;
+ this.tables = Object.create(null);
+ }
+
+ OpenTypeFileBuilder.getSearchParams = function OpenTypeFileBuilder_getSearchParams(entriesCount, entrySize) {
+ var maxPower2 = 1,
+ log2 = 0;
+
+ while ((maxPower2 ^ entriesCount) > maxPower2) {
+ maxPower2 <<= 1;
+ log2++;
+ }
+
+ var searchRange = maxPower2 * entrySize;
+ return {
+ range: searchRange,
+ entry: log2,
+ rangeShift: entrySize * entriesCount - searchRange
+ };
+ };
+
+ var OTF_HEADER_SIZE = 12;
+ var OTF_TABLE_ENTRY_SIZE = 16;
+ OpenTypeFileBuilder.prototype = {
+ toArray: function OpenTypeFileBuilder_toArray() {
+ var sfnt = this.sfnt;
+ var tables = this.tables;
+ var tablesNames = Object.keys(tables);
+ tablesNames.sort();
+ var numTables = tablesNames.length;
+ var i, j, jj, table, tableName;
+ var offset = OTF_HEADER_SIZE + numTables * OTF_TABLE_ENTRY_SIZE;
+ var tableOffsets = [offset];
+
+ for (i = 0; i < numTables; i++) {
+ table = tables[tablesNames[i]];
+ var paddedLength = (table.length + 3 & ~3) >>> 0;
+ offset += paddedLength;
+ tableOffsets.push(offset);
+ }
+
+ var file = new Uint8Array(offset);
+
+ for (i = 0; i < numTables; i++) {
+ table = tables[tablesNames[i]];
+ writeData(file, tableOffsets[i], table);
+ }
+
+ if (sfnt === 'true') {
+ sfnt = (0, _util.string32)(0x00010000);
+ }
+
+ file[0] = sfnt.charCodeAt(0) & 0xFF;
+ file[1] = sfnt.charCodeAt(1) & 0xFF;
+ file[2] = sfnt.charCodeAt(2) & 0xFF;
+ file[3] = sfnt.charCodeAt(3) & 0xFF;
+ writeInt16(file, 4, numTables);
+ var searchParams = OpenTypeFileBuilder.getSearchParams(numTables, 16);
+ writeInt16(file, 6, searchParams.range);
+ writeInt16(file, 8, searchParams.entry);
+ writeInt16(file, 10, searchParams.rangeShift);
+ offset = OTF_HEADER_SIZE;
+
+ for (i = 0; i < numTables; i++) {
+ tableName = tablesNames[i];
+ file[offset] = tableName.charCodeAt(0) & 0xFF;
+ file[offset + 1] = tableName.charCodeAt(1) & 0xFF;
+ file[offset + 2] = tableName.charCodeAt(2) & 0xFF;
+ file[offset + 3] = tableName.charCodeAt(3) & 0xFF;
+ var checksum = 0;
+
+ for (j = tableOffsets[i], jj = tableOffsets[i + 1]; j < jj; j += 4) {
+ var quad = (0, _util.readUint32)(file, j);
+ checksum = checksum + quad >>> 0;
+ }
+
+ writeInt32(file, offset + 4, checksum);
+ writeInt32(file, offset + 8, tableOffsets[i]);
+ writeInt32(file, offset + 12, tables[tableName].length);
+ offset += OTF_TABLE_ENTRY_SIZE;
+ }
+
+ return file;
+ },
+ addTable: function OpenTypeFileBuilder_addTable(tag, data) {
+ if (tag in this.tables) {
+ throw new Error('Table ' + tag + ' already exists');
+ }
+
+ this.tables[tag] = data;
+ }
+ };
+ return OpenTypeFileBuilder;
+}();
+
+var Font = function FontClosure() {
+ function Font(name, file, properties) {
+ var charCode;
+ this.name = name;
+ this.loadedName = properties.loadedName;
+ this.isType3Font = properties.isType3Font;
+ this.sizes = [];
+ this.missingFile = false;
+ this.glyphCache = Object.create(null);
+ this.isSerifFont = !!(properties.flags & FontFlags.Serif);
+ this.isSymbolicFont = !!(properties.flags & FontFlags.Symbolic);
+ this.isMonospace = !!(properties.flags & FontFlags.FixedPitch);
+ var type = properties.type;
+ var subtype = properties.subtype;
+ this.type = type;
+ this.subtype = subtype;
+ this.fallbackName = this.isMonospace ? 'monospace' : this.isSerifFont ? 'serif' : 'sans-serif';
+ this.differences = properties.differences;
+ this.widths = properties.widths;
+ this.defaultWidth = properties.defaultWidth;
+ this.composite = properties.composite;
+ this.wideChars = properties.wideChars;
+ this.cMap = properties.cMap;
+ this.ascent = properties.ascent / PDF_GLYPH_SPACE_UNITS;
+ this.descent = properties.descent / PDF_GLYPH_SPACE_UNITS;
+ this.fontMatrix = properties.fontMatrix;
+ this.bbox = properties.bbox;
+ this.defaultEncoding = properties.defaultEncoding;
+ this.toUnicode = properties.toUnicode;
+ this.fallbackToUnicode = properties.fallbackToUnicode || new ToUnicodeMap();
+ this.toFontChar = [];
+
+ if (properties.type === 'Type3') {
+ for (charCode = 0; charCode < 256; charCode++) {
+ this.toFontChar[charCode] = this.differences[charCode] || properties.defaultEncoding[charCode];
+ }
+
+ this.fontType = _util.FontType.TYPE3;
+ return;
+ }
+
+ this.cidEncoding = properties.cidEncoding;
+ this.vertical = properties.vertical;
+
+ if (this.vertical) {
+ this.vmetrics = properties.vmetrics;
+ this.defaultVMetrics = properties.defaultVMetrics;
+ }
+
+ if (!file || file.isEmpty) {
+ if (file) {
+ (0, _util.warn)('Font file is empty in "' + name + '" (' + this.loadedName + ')');
+ }
+
+ this.fallbackToSystemFont();
+ return;
+ }
+
+ var _getFontFileType = getFontFileType(file, properties);
+
+ var _getFontFileType2 = _slicedToArray(_getFontFileType, 2);
+
+ type = _getFontFileType2[0];
+ subtype = _getFontFileType2[1];
+
+ if (type !== this.type || subtype !== this.subtype) {
+ (0, _util.info)('Inconsistent font file Type/SubType, expected: ' + "".concat(this.type, "/").concat(this.subtype, " but found: ").concat(type, "/").concat(subtype, "."));
+ }
+
+ try {
+ var data;
+
+ switch (type) {
+ case 'MMType1':
+ (0, _util.info)('MMType1 font (' + name + '), falling back to Type1.');
+
+ case 'Type1':
+ case 'CIDFontType0':
+ this.mimetype = 'font/opentype';
+ var cff = subtype === 'Type1C' || subtype === 'CIDFontType0C' ? new CFFFont(file, properties) : new Type1Font(name, file, properties);
+ adjustWidths(properties);
+ data = this.convert(name, cff, properties);
+ break;
+
+ case 'OpenType':
+ case 'TrueType':
+ case 'CIDFontType2':
+ this.mimetype = 'font/opentype';
+ data = this.checkAndRepair(name, file, properties);
+
+ if (this.isOpenType) {
+ adjustWidths(properties);
+ type = 'OpenType';
+ }
+
+ break;
+
+ default:
+ throw new _util.FormatError("Font ".concat(type, " is not supported"));
+ }
+ } catch (e) {
+ (0, _util.warn)(e);
+ this.fallbackToSystemFont();
+ return;
+ }
+
+ this.data = data;
+ this.fontType = getFontType(type, subtype);
+ this.fontMatrix = properties.fontMatrix;
+ this.widths = properties.widths;
+ this.defaultWidth = properties.defaultWidth;
+ this.toUnicode = properties.toUnicode;
+ this.encoding = properties.baseEncoding;
+ this.seacMap = properties.seacMap;
+ }
+
+ Font.getFontID = function () {
+ var ID = 1;
+ return function Font_getFontID() {
+ return String(ID++);
+ };
+ }();
+
+ function int16(b0, b1) {
+ return (b0 << 8) + b1;
+ }
+
+ function writeSignedInt16(bytes, index, value) {
+ bytes[index + 1] = value;
+ bytes[index] = value >>> 8;
+ }
+
+ function signedInt16(b0, b1) {
+ var value = (b0 << 8) + b1;
+ return value & 1 << 15 ? value - 0x10000 : value;
+ }
+
+ function int32(b0, b1, b2, b3) {
+ return (b0 << 24) + (b1 << 16) + (b2 << 8) + b3;
+ }
+
+ function string16(value) {
+ return String.fromCharCode(value >> 8 & 0xff, value & 0xff);
+ }
+
+ function safeString16(value) {
+ value = value > 0x7FFF ? 0x7FFF : value < -0x8000 ? -0x8000 : value;
+ return String.fromCharCode(value >> 8 & 0xff, value & 0xff);
+ }
+
+ function isTrueTypeFile(file) {
+ var header = file.peekBytes(4);
+ return (0, _util.readUint32)(header, 0) === 0x00010000 || (0, _util.bytesToString)(header) === 'true';
+ }
+
+ function isTrueTypeCollectionFile(file) {
+ var header = file.peekBytes(4);
+ return (0, _util.bytesToString)(header) === 'ttcf';
+ }
+
+ function isOpenTypeFile(file) {
+ var header = file.peekBytes(4);
+ return (0, _util.bytesToString)(header) === 'OTTO';
+ }
+
+ function isType1File(file) {
+ var header = file.peekBytes(2);
+
+ if (header[0] === 0x25 && header[1] === 0x21) {
+ return true;
+ }
+
+ if (header[0] === 0x80 && header[1] === 0x01) {
+ return true;
+ }
+
+ return false;
+ }
+
+ function isCFFFile(file) {
+ var header = file.peekBytes(4);
+
+ if (header[0] >= 1 && header[3] >= 1 && header[3] <= 4) {
+ return true;
+ }
+
+ return false;
+ }
+
+ function getFontFileType(file, _ref) {
+ var type = _ref.type,
+ subtype = _ref.subtype,
+ composite = _ref.composite;
+ var fileType, fileSubtype;
+
+ if (isTrueTypeFile(file) || isTrueTypeCollectionFile(file)) {
+ if (composite) {
+ fileType = 'CIDFontType2';
+ } else {
+ fileType = 'TrueType';
+ }
+ } else if (isOpenTypeFile(file)) {
+ if (composite) {
+ fileType = 'CIDFontType2';
+ } else {
+ fileType = 'OpenType';
+ }
+ } else if (isType1File(file)) {
+ if (composite) {
+ fileType = 'CIDFontType0';
+ } else {
+ fileType = type === 'MMType1' ? 'MMType1' : 'Type1';
+ }
+ } else if (isCFFFile(file)) {
+ if (composite) {
+ fileType = 'CIDFontType0';
+ fileSubtype = 'CIDFontType0C';
+ } else {
+ fileType = type === 'MMType1' ? 'MMType1' : 'Type1';
+ fileSubtype = 'Type1C';
+ }
+ } else {
+ (0, _util.warn)('getFontFileType: Unable to detect correct font file Type/Subtype.');
+ fileType = type;
+ fileSubtype = subtype;
+ }
+
+ return [fileType, fileSubtype];
+ }
+
+ function buildToFontChar(encoding, glyphsUnicodeMap, differences) {
+ var toFontChar = [],
+ unicode;
+
+ for (var i = 0, ii = encoding.length; i < ii; i++) {
+ unicode = (0, _unicode.getUnicodeForGlyph)(encoding[i], glyphsUnicodeMap);
+
+ if (unicode !== -1) {
+ toFontChar[i] = unicode;
+ }
+ }
+
+ for (var charCode in differences) {
+ unicode = (0, _unicode.getUnicodeForGlyph)(differences[charCode], glyphsUnicodeMap);
+
+ if (unicode !== -1) {
+ toFontChar[+charCode] = unicode;
+ }
+ }
+
+ return toFontChar;
+ }
+
+ function adjustMapping(charCodeToGlyphId, hasGlyph, newGlyphZeroId) {
+ var newMap = Object.create(null);
+ var toFontChar = [];
+ var privateUseAreaIndex = 0;
+ var nextAvailableFontCharCode = PRIVATE_USE_AREAS[privateUseAreaIndex][0];
+ var privateUseOffetEnd = PRIVATE_USE_AREAS[privateUseAreaIndex][1];
+
+ for (var originalCharCode in charCodeToGlyphId) {
+ originalCharCode |= 0;
+ var glyphId = charCodeToGlyphId[originalCharCode];
+
+ if (!hasGlyph(glyphId)) {
+ continue;
+ }
+
+ if (nextAvailableFontCharCode > privateUseOffetEnd) {
+ privateUseAreaIndex++;
+
+ if (privateUseAreaIndex >= PRIVATE_USE_AREAS.length) {
+ (0, _util.warn)('Ran out of space in font private use area.');
+ break;
+ }
+
+ nextAvailableFontCharCode = PRIVATE_USE_AREAS[privateUseAreaIndex][0];
+ privateUseOffetEnd = PRIVATE_USE_AREAS[privateUseAreaIndex][1];
+ }
+
+ var fontCharCode = nextAvailableFontCharCode++;
+
+ if (glyphId === 0) {
+ glyphId = newGlyphZeroId;
+ }
+
+ newMap[fontCharCode] = glyphId;
+ toFontChar[originalCharCode] = fontCharCode;
+ }
+
+ return {
+ toFontChar: toFontChar,
+ charCodeToGlyphId: newMap,
+ nextAvailableFontCharCode: nextAvailableFontCharCode
+ };
+ }
+
+ function getRanges(glyphs, numGlyphs) {
+ var codes = [];
+
+ for (var charCode in glyphs) {
+ if (glyphs[charCode] >= numGlyphs) {
+ continue;
+ }
+
+ codes.push({
+ fontCharCode: charCode | 0,
+ glyphId: glyphs[charCode]
+ });
+ }
+
+ if (codes.length === 0) {
+ codes.push({
+ fontCharCode: 0,
+ glyphId: 0
+ });
+ }
+
+ codes.sort(function fontGetRangesSort(a, b) {
+ return a.fontCharCode - b.fontCharCode;
+ });
+ var ranges = [];
+ var length = codes.length;
+
+ for (var n = 0; n < length;) {
+ var start = codes[n].fontCharCode;
+ var codeIndices = [codes[n].glyphId];
+ ++n;
+ var end = start;
+
+ while (n < length && end + 1 === codes[n].fontCharCode) {
+ codeIndices.push(codes[n].glyphId);
+ ++end;
+ ++n;
+
+ if (end === 0xFFFF) {
+ break;
+ }
+ }
+
+ ranges.push([start, end, codeIndices]);
+ }
+
+ return ranges;
+ }
+
+ function createCmapTable(glyphs, numGlyphs) {
+ var ranges = getRanges(glyphs, numGlyphs);
+ var numTables = ranges[ranges.length - 1][1] > 0xFFFF ? 2 : 1;
+ var cmap = '\x00\x00' + string16(numTables) + '\x00\x03' + '\x00\x01' + (0, _util.string32)(4 + numTables * 8);
+ var i, ii, j, jj;
+
+ for (i = ranges.length - 1; i >= 0; --i) {
+ if (ranges[i][0] <= 0xFFFF) {
+ break;
+ }
+ }
+
+ var bmpLength = i + 1;
+
+ if (ranges[i][0] < 0xFFFF && ranges[i][1] === 0xFFFF) {
+ ranges[i][1] = 0xFFFE;
+ }
+
+ var trailingRangesCount = ranges[i][1] < 0xFFFF ? 1 : 0;
+ var segCount = bmpLength + trailingRangesCount;
+ var searchParams = OpenTypeFileBuilder.getSearchParams(segCount, 2);
+ var startCount = '';
+ var endCount = '';
+ var idDeltas = '';
+ var idRangeOffsets = '';
+ var glyphsIds = '';
+ var bias = 0;
+ var range, start, end, codes;
+
+ for (i = 0, ii = bmpLength; i < ii; i++) {
+ range = ranges[i];
+ start = range[0];
+ end = range[1];
+ startCount += string16(start);
+ endCount += string16(end);
+ codes = range[2];
+ var contiguous = true;
+
+ for (j = 1, jj = codes.length; j < jj; ++j) {
+ if (codes[j] !== codes[j - 1] + 1) {
+ contiguous = false;
+ break;
+ }
+ }
+
+ if (!contiguous) {
+ var offset = (segCount - i) * 2 + bias * 2;
+ bias += end - start + 1;
+ idDeltas += string16(0);
+ idRangeOffsets += string16(offset);
+
+ for (j = 0, jj = codes.length; j < jj; ++j) {
+ glyphsIds += string16(codes[j]);
+ }
+ } else {
+ var startCode = codes[0];
+ idDeltas += string16(startCode - start & 0xFFFF);
+ idRangeOffsets += string16(0);
+ }
+ }
+
+ if (trailingRangesCount > 0) {
+ endCount += '\xFF\xFF';
+ startCount += '\xFF\xFF';
+ idDeltas += '\x00\x01';
+ idRangeOffsets += '\x00\x00';
+ }
+
+ var format314 = '\x00\x00' + string16(2 * segCount) + string16(searchParams.range) + string16(searchParams.entry) + string16(searchParams.rangeShift) + endCount + '\x00\x00' + startCount + idDeltas + idRangeOffsets + glyphsIds;
+ var format31012 = '';
+ var header31012 = '';
+
+ if (numTables > 1) {
+ cmap += '\x00\x03' + '\x00\x0A' + (0, _util.string32)(4 + numTables * 8 + 4 + format314.length);
+ format31012 = '';
+
+ for (i = 0, ii = ranges.length; i < ii; i++) {
+ range = ranges[i];
+ start = range[0];
+ codes = range[2];
+ var code = codes[0];
+
+ for (j = 1, jj = codes.length; j < jj; ++j) {
+ if (codes[j] !== codes[j - 1] + 1) {
+ end = range[0] + j - 1;
+ format31012 += (0, _util.string32)(start) + (0, _util.string32)(end) + (0, _util.string32)(code);
+ start = end + 1;
+ code = codes[j];
+ }
+ }
+
+ format31012 += (0, _util.string32)(start) + (0, _util.string32)(range[1]) + (0, _util.string32)(code);
+ }
+
+ header31012 = '\x00\x0C' + '\x00\x00' + (0, _util.string32)(format31012.length + 16) + '\x00\x00\x00\x00' + (0, _util.string32)(format31012.length / 12);
+ }
+
+ return cmap + '\x00\x04' + string16(format314.length + 4) + format314 + header31012 + format31012;
+ }
+
+ function validateOS2Table(os2) {
+ var stream = new _stream.Stream(os2.data);
+ var version = stream.getUint16();
+ stream.getBytes(60);
+ var selection = stream.getUint16();
+
+ if (version < 4 && selection & 0x0300) {
+ return false;
+ }
+
+ var firstChar = stream.getUint16();
+ var lastChar = stream.getUint16();
+
+ if (firstChar > lastChar) {
+ return false;
+ }
+
+ stream.getBytes(6);
+ var usWinAscent = stream.getUint16();
+
+ if (usWinAscent === 0) {
+ return false;
+ }
+
+ os2.data[8] = os2.data[9] = 0;
+ return true;
+ }
+
+ function createOS2Table(properties, charstrings, override) {
+ override = override || {
+ unitsPerEm: 0,
+ yMax: 0,
+ yMin: 0,
+ ascent: 0,
+ descent: 0
+ };
+ var ulUnicodeRange1 = 0;
+ var ulUnicodeRange2 = 0;
+ var ulUnicodeRange3 = 0;
+ var ulUnicodeRange4 = 0;
+ var firstCharIndex = null;
+ var lastCharIndex = 0;
+
+ if (charstrings) {
+ for (var code in charstrings) {
+ code |= 0;
+
+ if (firstCharIndex > code || !firstCharIndex) {
+ firstCharIndex = code;
+ }
+
+ if (lastCharIndex < code) {
+ lastCharIndex = code;
+ }
+
+ var position = (0, _unicode.getUnicodeRangeFor)(code);
+
+ if (position < 32) {
+ ulUnicodeRange1 |= 1 << position;
+ } else if (position < 64) {
+ ulUnicodeRange2 |= 1 << position - 32;
+ } else if (position < 96) {
+ ulUnicodeRange3 |= 1 << position - 64;
+ } else if (position < 123) {
+ ulUnicodeRange4 |= 1 << position - 96;
+ } else {
+ throw new _util.FormatError('Unicode ranges Bits > 123 are reserved for internal usage');
+ }
+ }
+
+ if (lastCharIndex > 0xFFFF) {
+ lastCharIndex = 0xFFFF;
+ }
+ } else {
+ firstCharIndex = 0;
+ lastCharIndex = 255;
+ }
+
+ var bbox = properties.bbox || [0, 0, 0, 0];
+ var unitsPerEm = override.unitsPerEm || 1 / (properties.fontMatrix || _util.FONT_IDENTITY_MATRIX)[0];
+ var scale = properties.ascentScaled ? 1.0 : unitsPerEm / PDF_GLYPH_SPACE_UNITS;
+ var typoAscent = override.ascent || Math.round(scale * (properties.ascent || bbox[3]));
+ var typoDescent = override.descent || Math.round(scale * (properties.descent || bbox[1]));
+
+ if (typoDescent > 0 && properties.descent > 0 && bbox[1] < 0) {
+ typoDescent = -typoDescent;
+ }
+
+ var winAscent = override.yMax || typoAscent;
+ var winDescent = -override.yMin || -typoDescent;
+ return '\x00\x03' + '\x02\x24' + '\x01\xF4' + '\x00\x05' + '\x00\x00' + '\x02\x8A' + '\x02\xBB' + '\x00\x00' + '\x00\x8C' + '\x02\x8A' + '\x02\xBB' + '\x00\x00' + '\x01\xDF' + '\x00\x31' + '\x01\x02' + '\x00\x00' + '\x00\x00\x06' + String.fromCharCode(properties.fixedPitch ? 0x09 : 0x00) + '\x00\x00\x00\x00\x00\x00' + (0, _util.string32)(ulUnicodeRange1) + (0, _util.string32)(ulUnicodeRange2) + (0, _util.string32)(ulUnicodeRange3) + (0, _util.string32)(ulUnicodeRange4) + '\x2A\x32\x31\x2A' + string16(properties.italicAngle ? 1 : 0) + string16(firstCharIndex || properties.firstChar) + string16(lastCharIndex || properties.lastChar) + string16(typoAscent) + string16(typoDescent) + '\x00\x64' + string16(winAscent) + string16(winDescent) + '\x00\x00\x00\x00' + '\x00\x00\x00\x00' + string16(properties.xHeight) + string16(properties.capHeight) + string16(0) + string16(firstCharIndex || properties.firstChar) + '\x00\x03';
+ }
+
+ function createPostTable(properties) {
+ var angle = Math.floor(properties.italicAngle * Math.pow(2, 16));
+ return '\x00\x03\x00\x00' + (0, _util.string32)(angle) + '\x00\x00' + '\x00\x00' + (0, _util.string32)(properties.fixedPitch) + '\x00\x00\x00\x00' + '\x00\x00\x00\x00' + '\x00\x00\x00\x00' + '\x00\x00\x00\x00';
+ }
+
+ function createNameTable(name, proto) {
+ if (!proto) {
+ proto = [[], []];
+ }
+
+ var strings = [proto[0][0] || 'Original licence', proto[0][1] || name, proto[0][2] || 'Unknown', proto[0][3] || 'uniqueID', proto[0][4] || name, proto[0][5] || 'Version 0.11', proto[0][6] || '', proto[0][7] || 'Unknown', proto[0][8] || 'Unknown', proto[0][9] || 'Unknown'];
+ var stringsUnicode = [];
+ var i, ii, j, jj, str;
+
+ for (i = 0, ii = strings.length; i < ii; i++) {
+ str = proto[1][i] || strings[i];
+ var strBufUnicode = [];
+
+ for (j = 0, jj = str.length; j < jj; j++) {
+ strBufUnicode.push(string16(str.charCodeAt(j)));
+ }
+
+ stringsUnicode.push(strBufUnicode.join(''));
+ }
+
+ var names = [strings, stringsUnicode];
+ var platforms = ['\x00\x01', '\x00\x03'];
+ var encodings = ['\x00\x00', '\x00\x01'];
+ var languages = ['\x00\x00', '\x04\x09'];
+ var namesRecordCount = strings.length * platforms.length;
+ var nameTable = '\x00\x00' + string16(namesRecordCount) + string16(namesRecordCount * 12 + 6);
+ var strOffset = 0;
+
+ for (i = 0, ii = platforms.length; i < ii; i++) {
+ var strs = names[i];
+
+ for (j = 0, jj = strs.length; j < jj; j++) {
+ str = strs[j];
+ var nameRecord = platforms[i] + encodings[i] + languages[i] + string16(j) + string16(str.length) + string16(strOffset);
+ nameTable += nameRecord;
+ strOffset += str.length;
+ }
+ }
+
+ nameTable += strings.join('') + stringsUnicode.join('');
+ return nameTable;
+ }
+
+ Font.prototype = {
+ name: null,
+ font: null,
+ mimetype: null,
+ encoding: null,
+ disableFontFace: false,
+
+ get renderer() {
+ var renderer = _font_renderer.FontRendererFactory.create(this, SEAC_ANALYSIS_ENABLED);
+
+ return (0, _util.shadow)(this, 'renderer', renderer);
+ },
+
+ exportData: function Font_exportData() {
+ var data = {};
+
+ for (var i in this) {
+ if (this.hasOwnProperty(i)) {
+ data[i] = this[i];
+ }
+ }
+
+ return data;
+ },
+ fallbackToSystemFont: function Font_fallbackToSystemFont() {
+ var _this = this;
+
+ this.missingFile = true;
+ var charCode, unicode;
+ var name = this.name;
+ var type = this.type;
+ var subtype = this.subtype;
+ var fontName = name.replace(/[,_]/g, '-');
+ var stdFontMap = (0, _standard_fonts.getStdFontMap)(),
+ nonStdFontMap = (0, _standard_fonts.getNonStdFontMap)();
+ var isStandardFont = !!stdFontMap[fontName] || !!(nonStdFontMap[fontName] && stdFontMap[nonStdFontMap[fontName]]);
+ fontName = stdFontMap[fontName] || nonStdFontMap[fontName] || fontName;
+ this.bold = fontName.search(/bold/gi) !== -1;
+ this.italic = fontName.search(/oblique/gi) !== -1 || fontName.search(/italic/gi) !== -1;
+ this.black = name.search(/Black/g) !== -1;
+ this.remeasure = Object.keys(this.widths).length > 0;
+
+ if (isStandardFont && type === 'CIDFontType2' && this.cidEncoding.startsWith('Identity-')) {
+ var GlyphMapForStandardFonts = (0, _standard_fonts.getGlyphMapForStandardFonts)();
+ var map = [];
+
+ for (charCode in GlyphMapForStandardFonts) {
+ map[+charCode] = GlyphMapForStandardFonts[charCode];
+ }
+
+ if (/Arial-?Black/i.test(name)) {
+ var SupplementalGlyphMapForArialBlack = (0, _standard_fonts.getSupplementalGlyphMapForArialBlack)();
+
+ for (charCode in SupplementalGlyphMapForArialBlack) {
+ map[+charCode] = SupplementalGlyphMapForArialBlack[charCode];
+ }
+ } else if (/Calibri/i.test(name)) {
+ var SupplementalGlyphMapForCalibri = (0, _standard_fonts.getSupplementalGlyphMapForCalibri)();
+
+ for (charCode in SupplementalGlyphMapForCalibri) {
+ map[+charCode] = SupplementalGlyphMapForCalibri[charCode];
+ }
+ }
+
+ var isIdentityUnicode = this.toUnicode instanceof IdentityToUnicodeMap;
+
+ if (!isIdentityUnicode) {
+ this.toUnicode.forEach(function (charCode, unicodeCharCode) {
+ map[+charCode] = unicodeCharCode;
+ });
+ }
+
+ this.toFontChar = map;
+ this.toUnicode = new ToUnicodeMap(map);
+ } else if (/Symbol/i.test(fontName)) {
+ this.toFontChar = buildToFontChar(_encodings.SymbolSetEncoding, (0, _glyphlist.getGlyphsUnicode)(), this.differences);
+ } else if (/Dingbats/i.test(fontName)) {
+ if (/Wingdings/i.test(name)) {
+ (0, _util.warn)('Non-embedded Wingdings font, falling back to ZapfDingbats.');
+ }
+
+ this.toFontChar = buildToFontChar(_encodings.ZapfDingbatsEncoding, (0, _glyphlist.getDingbatsGlyphsUnicode)(), this.differences);
+ } else if (isStandardFont) {
+ this.toFontChar = buildToFontChar(this.defaultEncoding, (0, _glyphlist.getGlyphsUnicode)(), this.differences);
+ } else {
+ var glyphsUnicodeMap = (0, _glyphlist.getGlyphsUnicode)();
+ this.toUnicode.forEach(function (charCode, unicodeCharCode) {
+ if (!_this.composite) {
+ var glyphName = _this.differences[charCode] || _this.defaultEncoding[charCode];
+ unicode = (0, _unicode.getUnicodeForGlyph)(glyphName, glyphsUnicodeMap);
+
+ if (unicode !== -1) {
+ unicodeCharCode = unicode;
+ }
+ }
+
+ _this.toFontChar[charCode] = unicodeCharCode;
+ });
+ }
+
+ this.loadedName = fontName.split('-')[0];
+ this.fontType = getFontType(type, subtype);
+ },
+ checkAndRepair: function Font_checkAndRepair(name, font, properties) {
+ var VALID_TABLES = ['OS/2', 'cmap', 'head', 'hhea', 'hmtx', 'maxp', 'name', 'post', 'loca', 'glyf', 'fpgm', 'prep', 'cvt ', 'CFF '];
+
+ function readTables(file, numTables) {
+ var tables = Object.create(null);
+ tables['OS/2'] = null;
+ tables['cmap'] = null;
+ tables['head'] = null;
+ tables['hhea'] = null;
+ tables['hmtx'] = null;
+ tables['maxp'] = null;
+ tables['name'] = null;
+ tables['post'] = null;
+
+ for (var i = 0; i < numTables; i++) {
+ var table = readTableEntry(font);
+
+ if (!VALID_TABLES.includes(table.tag)) {
+ continue;
+ }
+
+ if (table.length === 0) {
+ continue;
+ }
+
+ tables[table.tag] = table;
+ }
+
+ return tables;
+ }
+
+ function readTableEntry(file) {
+ var tag = (0, _util.bytesToString)(file.getBytes(4));
+ var checksum = file.getInt32() >>> 0;
+ var offset = file.getInt32() >>> 0;
+ var length = file.getInt32() >>> 0;
+ var previousPosition = file.pos;
+ file.pos = file.start ? file.start : 0;
+ file.skip(offset);
+ var data = file.getBytes(length);
+ file.pos = previousPosition;
+
+ if (tag === 'head') {
+ data[8] = data[9] = data[10] = data[11] = 0;
+ data[17] |= 0x20;
+ }
+
+ return {
+ tag: tag,
+ checksum: checksum,
+ length: length,
+ offset: offset,
+ data: data
+ };
+ }
+
+ function readOpenTypeHeader(ttf) {
+ return {
+ version: (0, _util.bytesToString)(ttf.getBytes(4)),
+ numTables: ttf.getUint16(),
+ searchRange: ttf.getUint16(),
+ entrySelector: ttf.getUint16(),
+ rangeShift: ttf.getUint16()
+ };
+ }
+
+ function readTrueTypeCollectionHeader(ttc) {
+ var ttcTag = (0, _util.bytesToString)(ttc.getBytes(4));
+ (0, _util.assert)(ttcTag === 'ttcf', 'Must be a TrueType Collection font.');
+ var majorVersion = ttc.getUint16();
+ var minorVersion = ttc.getUint16();
+ var numFonts = ttc.getInt32() >>> 0;
+ var offsetTable = [];
+
+ for (var i = 0; i < numFonts; i++) {
+ offsetTable.push(ttc.getInt32() >>> 0);
+ }
+
+ var header = {
+ ttcTag: ttcTag,
+ majorVersion: majorVersion,
+ minorVersion: minorVersion,
+ numFonts: numFonts,
+ offsetTable: offsetTable
+ };
+
+ switch (majorVersion) {
+ case 1:
+ return header;
+
+ case 2:
+ header.dsigTag = ttc.getInt32() >>> 0;
+ header.dsigLength = ttc.getInt32() >>> 0;
+ header.dsigOffset = ttc.getInt32() >>> 0;
+ return header;
+ }
+
+ throw new _util.FormatError("Invalid TrueType Collection majorVersion: ".concat(majorVersion, "."));
+ }
+
+ function readTrueTypeCollectionData(ttc, fontName) {
+ var _readTrueTypeCollecti = readTrueTypeCollectionHeader(ttc),
+ numFonts = _readTrueTypeCollecti.numFonts,
+ offsetTable = _readTrueTypeCollecti.offsetTable;
+
+ for (var i = 0; i < numFonts; i++) {
+ ttc.pos = (ttc.start || 0) + offsetTable[i];
+ var potentialHeader = readOpenTypeHeader(ttc);
+ var potentialTables = readTables(ttc, potentialHeader.numTables);
+
+ if (!potentialTables['name']) {
+ throw new _util.FormatError('TrueType Collection font must contain a "name" table.');
+ }
+
+ var nameTable = readNameTable(potentialTables['name']);
+
+ for (var j = 0, jj = nameTable.length; j < jj; j++) {
+ for (var k = 0, kk = nameTable[j].length; k < kk; k++) {
+ var nameEntry = nameTable[j][k];
+
+ if (nameEntry && nameEntry.replace(/\s/g, '') === fontName) {
+ return {
+ header: potentialHeader,
+ tables: potentialTables
+ };
+ }
+ }
+ }
+ }
+
+ throw new _util.FormatError("TrueType Collection does not contain \"".concat(fontName, "\" font."));
+ }
+
+ function readCmapTable(cmap, font, isSymbolicFont, hasEncoding) {
+ if (!cmap) {
+ (0, _util.warn)('No cmap table available.');
+ return {
+ platformId: -1,
+ encodingId: -1,
+ mappings: [],
+ hasShortCmap: false
+ };
+ }
+
+ var segment;
+ var start = (font.start ? font.start : 0) + cmap.offset;
+ font.pos = start;
+ font.getUint16();
+ var numTables = font.getUint16();
+ var potentialTable;
+ var canBreak = false;
+
+ for (var i = 0; i < numTables; i++) {
+ var platformId = font.getUint16();
+ var encodingId = font.getUint16();
+ var offset = font.getInt32() >>> 0;
+ var useTable = false;
+
+ if (potentialTable && potentialTable.platformId === platformId && potentialTable.encodingId === encodingId) {
+ continue;
+ }
+
+ if (platformId === 0 && encodingId === 0) {
+ useTable = true;
+ } else if (platformId === 1 && encodingId === 0) {
+ useTable = true;
+ } else if (platformId === 3 && encodingId === 1 && (hasEncoding || !potentialTable)) {
+ useTable = true;
+
+ if (!isSymbolicFont) {
+ canBreak = true;
+ }
+ } else if (isSymbolicFont && platformId === 3 && encodingId === 0) {
+ useTable = true;
+ canBreak = true;
+ }
+
+ if (useTable) {
+ potentialTable = {
+ platformId: platformId,
+ encodingId: encodingId,
+ offset: offset
+ };
+ }
+
+ if (canBreak) {
+ break;
+ }
+ }
+
+ if (potentialTable) {
+ font.pos = start + potentialTable.offset;
+ }
+
+ if (!potentialTable || font.peekByte() === -1) {
+ (0, _util.warn)('Could not find a preferred cmap table.');
+ return {
+ platformId: -1,
+ encodingId: -1,
+ mappings: [],
+ hasShortCmap: false
+ };
+ }
+
+ var format = font.getUint16();
+ font.getUint16();
+ font.getUint16();
+ var hasShortCmap = false;
+ var mappings = [];
+ var j, glyphId;
+
+ if (format === 0) {
+ for (j = 0; j < 256; j++) {
+ var index = font.getByte();
+
+ if (!index) {
+ continue;
+ }
+
+ mappings.push({
+ charCode: j,
+ glyphId: index
+ });
+ }
+
+ hasShortCmap = true;
+ } else if (format === 4) {
+ var segCount = font.getUint16() >> 1;
+ font.getBytes(6);
+ var segIndex,
+ segments = [];
+
+ for (segIndex = 0; segIndex < segCount; segIndex++) {
+ segments.push({
+ end: font.getUint16()
+ });
+ }
+
+ font.getUint16();
+
+ for (segIndex = 0; segIndex < segCount; segIndex++) {
+ segments[segIndex].start = font.getUint16();
+ }
+
+ for (segIndex = 0; segIndex < segCount; segIndex++) {
+ segments[segIndex].delta = font.getUint16();
+ }
+
+ var offsetsCount = 0;
+
+ for (segIndex = 0; segIndex < segCount; segIndex++) {
+ segment = segments[segIndex];
+ var rangeOffset = font.getUint16();
+
+ if (!rangeOffset) {
+ segment.offsetIndex = -1;
+ continue;
+ }
+
+ var offsetIndex = (rangeOffset >> 1) - (segCount - segIndex);
+ segment.offsetIndex = offsetIndex;
+ offsetsCount = Math.max(offsetsCount, offsetIndex + segment.end - segment.start + 1);
+ }
+
+ var offsets = [];
+
+ for (j = 0; j < offsetsCount; j++) {
+ offsets.push(font.getUint16());
+ }
+
+ for (segIndex = 0; segIndex < segCount; segIndex++) {
+ segment = segments[segIndex];
+ start = segment.start;
+ var end = segment.end;
+ var delta = segment.delta;
+ offsetIndex = segment.offsetIndex;
+
+ for (j = start; j <= end; j++) {
+ if (j === 0xFFFF) {
+ continue;
+ }
+
+ glyphId = offsetIndex < 0 ? j : offsets[offsetIndex + j - start];
+ glyphId = glyphId + delta & 0xFFFF;
+ mappings.push({
+ charCode: j,
+ glyphId: glyphId
+ });
+ }
+ }
+ } else if (format === 6) {
+ var firstCode = font.getUint16();
+ var entryCount = font.getUint16();
+
+ for (j = 0; j < entryCount; j++) {
+ glyphId = font.getUint16();
+ var charCode = firstCode + j;
+ mappings.push({
+ charCode: charCode,
+ glyphId: glyphId
+ });
+ }
+ } else {
+ (0, _util.warn)('cmap table has unsupported format: ' + format);
+ return {
+ platformId: -1,
+ encodingId: -1,
+ mappings: [],
+ hasShortCmap: false
+ };
+ }
+
+ mappings.sort(function (a, b) {
+ return a.charCode - b.charCode;
+ });
+
+ for (i = 1; i < mappings.length; i++) {
+ if (mappings[i - 1].charCode === mappings[i].charCode) {
+ mappings.splice(i, 1);
+ i--;
+ }
+ }
+
+ return {
+ platformId: potentialTable.platformId,
+ encodingId: potentialTable.encodingId,
+ mappings: mappings,
+ hasShortCmap: hasShortCmap
+ };
+ }
+
+ function sanitizeMetrics(font, header, metrics, numGlyphs, dupFirstEntry) {
+ if (!header) {
+ if (metrics) {
+ metrics.data = null;
+ }
+
+ return;
+ }
+
+ font.pos = (font.start ? font.start : 0) + header.offset;
+ font.pos += 4;
+ font.pos += 2;
+ font.pos += 2;
+ font.pos += 2;
+ font.pos += 2;
+ font.pos += 2;
+ font.pos += 2;
+ font.pos += 2;
+ font.pos += 2;
+ font.pos += 2;
+ font.pos += 2;
+ font.pos += 8;
+ font.pos += 2;
+ var numOfMetrics = font.getUint16();
+
+ if (numOfMetrics > numGlyphs) {
+ (0, _util.info)('The numOfMetrics (' + numOfMetrics + ') should not be ' + 'greater than the numGlyphs (' + numGlyphs + ')');
+ numOfMetrics = numGlyphs;
+ header.data[34] = (numOfMetrics & 0xff00) >> 8;
+ header.data[35] = numOfMetrics & 0x00ff;
+ }
+
+ var numOfSidebearings = numGlyphs - numOfMetrics;
+ var numMissing = numOfSidebearings - (metrics.length - numOfMetrics * 4 >> 1);
+
+ if (numMissing > 0) {
+ var entries = new Uint8Array(metrics.length + numMissing * 2);
+ entries.set(metrics.data);
+
+ if (dupFirstEntry) {
+ entries[metrics.length] = metrics.data[2];
+ entries[metrics.length + 1] = metrics.data[3];
+ }
+
+ metrics.data = entries;
+ }
+ }
+
+ function sanitizeGlyph(source, sourceStart, sourceEnd, dest, destStart, hintsValid) {
+ var glyphProfile = {
+ length: 0,
+ sizeOfInstructions: 0
+ };
+
+ if (sourceEnd - sourceStart <= 12) {
+ return glyphProfile;
+ }
+
+ var glyf = source.subarray(sourceStart, sourceEnd);
+ var contoursCount = signedInt16(glyf[0], glyf[1]);
+
+ if (contoursCount < 0) {
+ contoursCount = -1;
+ writeSignedInt16(glyf, 0, contoursCount);
+ dest.set(glyf, destStart);
+ glyphProfile.length = glyf.length;
+ return glyphProfile;
+ }
+
+ var i,
+ j = 10,
+ flagsCount = 0;
+
+ for (i = 0; i < contoursCount; i++) {
+ var endPoint = glyf[j] << 8 | glyf[j + 1];
+ flagsCount = endPoint + 1;
+ j += 2;
+ }
+
+ var instructionsStart = j;
+ var instructionsLength = glyf[j] << 8 | glyf[j + 1];
+ glyphProfile.sizeOfInstructions = instructionsLength;
+ j += 2 + instructionsLength;
+ var instructionsEnd = j;
+ var coordinatesLength = 0;
+
+ for (i = 0; i < flagsCount; i++) {
+ var flag = glyf[j++];
+
+ if (flag & 0xC0) {
+ glyf[j - 1] = flag & 0x3F;
+ }
+
+ var xyLength = (flag & 2 ? 1 : flag & 16 ? 0 : 2) + (flag & 4 ? 1 : flag & 32 ? 0 : 2);
+ coordinatesLength += xyLength;
+
+ if (flag & 8) {
+ var repeat = glyf[j++];
+ i += repeat;
+ coordinatesLength += repeat * xyLength;
+ }
+ }
+
+ if (coordinatesLength === 0) {
+ return glyphProfile;
+ }
+
+ var glyphDataLength = j + coordinatesLength;
+
+ if (glyphDataLength > glyf.length) {
+ return glyphProfile;
+ }
+
+ if (!hintsValid && instructionsLength > 0) {
+ dest.set(glyf.subarray(0, instructionsStart), destStart);
+ dest.set([0, 0], destStart + instructionsStart);
+ dest.set(glyf.subarray(instructionsEnd, glyphDataLength), destStart + instructionsStart + 2);
+ glyphDataLength -= instructionsLength;
+
+ if (glyf.length - glyphDataLength > 3) {
+ glyphDataLength = glyphDataLength + 3 & ~3;
+ }
+
+ glyphProfile.length = glyphDataLength;
+ return glyphProfile;
+ }
+
+ if (glyf.length - glyphDataLength > 3) {
+ glyphDataLength = glyphDataLength + 3 & ~3;
+ dest.set(glyf.subarray(0, glyphDataLength), destStart);
+ glyphProfile.length = glyphDataLength;
+ return glyphProfile;
+ }
+
+ dest.set(glyf, destStart);
+ glyphProfile.length = glyf.length;
+ return glyphProfile;
+ }
+
+ function sanitizeHead(head, numGlyphs, locaLength) {
+ var data = head.data;
+ var version = int32(data[0], data[1], data[2], data[3]);
+
+ if (version >> 16 !== 1) {
+ (0, _util.info)('Attempting to fix invalid version in head table: ' + version);
+ data[0] = 0;
+ data[1] = 1;
+ data[2] = 0;
+ data[3] = 0;
+ }
+
+ var indexToLocFormat = int16(data[50], data[51]);
+
+ if (indexToLocFormat < 0 || indexToLocFormat > 1) {
+ (0, _util.info)('Attempting to fix invalid indexToLocFormat in head table: ' + indexToLocFormat);
+ var numGlyphsPlusOne = numGlyphs + 1;
+
+ if (locaLength === numGlyphsPlusOne << 1) {
+ data[50] = 0;
+ data[51] = 0;
+ } else if (locaLength === numGlyphsPlusOne << 2) {
+ data[50] = 0;
+ data[51] = 1;
+ } else {
+ throw new _util.FormatError('Could not fix indexToLocFormat: ' + indexToLocFormat);
+ }
+ }
+ }
+
+ function sanitizeGlyphLocations(loca, glyf, numGlyphs, isGlyphLocationsLong, hintsValid, dupFirstEntry, maxSizeOfInstructions) {
+ var itemSize, itemDecode, itemEncode;
+
+ if (isGlyphLocationsLong) {
+ itemSize = 4;
+
+ itemDecode = function fontItemDecodeLong(data, offset) {
+ return data[offset] << 24 | data[offset + 1] << 16 | data[offset + 2] << 8 | data[offset + 3];
+ };
+
+ itemEncode = function fontItemEncodeLong(data, offset, value) {
+ data[offset] = value >>> 24 & 0xFF;
+ data[offset + 1] = value >> 16 & 0xFF;
+ data[offset + 2] = value >> 8 & 0xFF;
+ data[offset + 3] = value & 0xFF;
+ };
+ } else {
+ itemSize = 2;
+
+ itemDecode = function fontItemDecode(data, offset) {
+ return data[offset] << 9 | data[offset + 1] << 1;
+ };
+
+ itemEncode = function fontItemEncode(data, offset, value) {
+ data[offset] = value >> 9 & 0xFF;
+ data[offset + 1] = value >> 1 & 0xFF;
+ };
+ }
+
+ var numGlyphsOut = dupFirstEntry ? numGlyphs + 1 : numGlyphs;
+ var locaData = loca.data;
+ var locaDataSize = itemSize * (1 + numGlyphsOut);
+ locaData = new Uint8Array(locaDataSize);
+ locaData.set(loca.data.subarray(0, locaDataSize));
+ loca.data = locaData;
+ var oldGlyfData = glyf.data;
+ var oldGlyfDataLength = oldGlyfData.length;
+ var newGlyfData = new Uint8Array(oldGlyfDataLength);
+ var startOffset = itemDecode(locaData, 0);
+ var writeOffset = 0;
+ var missingGlyphs = Object.create(null);
+ itemEncode(locaData, 0, writeOffset);
+ var i, j;
+
+ for (i = 0, j = itemSize; i < numGlyphs; i++, j += itemSize) {
+ var endOffset = itemDecode(locaData, j);
+
+ if (endOffset === 0) {
+ endOffset = startOffset;
+ }
+
+ if (endOffset > oldGlyfDataLength && (oldGlyfDataLength + 3 & ~3) === endOffset) {
+ endOffset = oldGlyfDataLength;
+ }
+
+ if (endOffset > oldGlyfDataLength) {
+ startOffset = endOffset;
+ }
+
+ var glyphProfile = sanitizeGlyph(oldGlyfData, startOffset, endOffset, newGlyfData, writeOffset, hintsValid);
+ var newLength = glyphProfile.length;
+
+ if (newLength === 0) {
+ missingGlyphs[i] = true;
+ }
+
+ if (glyphProfile.sizeOfInstructions > maxSizeOfInstructions) {
+ maxSizeOfInstructions = glyphProfile.sizeOfInstructions;
+ }
+
+ writeOffset += newLength;
+ itemEncode(locaData, j, writeOffset);
+ startOffset = endOffset;
+ }
+
+ if (writeOffset === 0) {
+ var simpleGlyph = new Uint8Array([0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 0]);
+
+ for (i = 0, j = itemSize; i < numGlyphsOut; i++, j += itemSize) {
+ itemEncode(locaData, j, simpleGlyph.length);
+ }
+
+ glyf.data = simpleGlyph;
+ } else if (dupFirstEntry) {
+ var firstEntryLength = itemDecode(locaData, itemSize);
+
+ if (newGlyfData.length > firstEntryLength + writeOffset) {
+ glyf.data = newGlyfData.subarray(0, firstEntryLength + writeOffset);
+ } else {
+ glyf.data = new Uint8Array(firstEntryLength + writeOffset);
+ glyf.data.set(newGlyfData.subarray(0, writeOffset));
+ }
+
+ glyf.data.set(newGlyfData.subarray(0, firstEntryLength), writeOffset);
+ itemEncode(loca.data, locaData.length - itemSize, writeOffset + firstEntryLength);
+ } else {
+ glyf.data = newGlyfData.subarray(0, writeOffset);
+ }
+
+ return {
+ missingGlyphs: missingGlyphs,
+ maxSizeOfInstructions: maxSizeOfInstructions
+ };
+ }
+
+ function readPostScriptTable(post, properties, maxpNumGlyphs) {
+ var start = (font.start ? font.start : 0) + post.offset;
+ font.pos = start;
+ var length = post.length,
+ end = start + length;
+ var version = font.getInt32();
+ font.getBytes(28);
+ var glyphNames;
+ var valid = true;
+ var i;
+
+ switch (version) {
+ case 0x00010000:
+ glyphNames = MacStandardGlyphOrdering;
+ break;
+
+ case 0x00020000:
+ var numGlyphs = font.getUint16();
+
+ if (numGlyphs !== maxpNumGlyphs) {
+ valid = false;
+ break;
+ }
+
+ var glyphNameIndexes = [];
+
+ for (i = 0; i < numGlyphs; ++i) {
+ var index = font.getUint16();
+
+ if (index >= 32768) {
+ valid = false;
+ break;
+ }
+
+ glyphNameIndexes.push(index);
+ }
+
+ if (!valid) {
+ break;
+ }
+
+ var customNames = [];
+ var strBuf = [];
+
+ while (font.pos < end) {
+ var stringLength = font.getByte();
+ strBuf.length = stringLength;
+
+ for (i = 0; i < stringLength; ++i) {
+ strBuf[i] = String.fromCharCode(font.getByte());
+ }
+
+ customNames.push(strBuf.join(''));
+ }
+
+ glyphNames = [];
+
+ for (i = 0; i < numGlyphs; ++i) {
+ var j = glyphNameIndexes[i];
+
+ if (j < 258) {
+ glyphNames.push(MacStandardGlyphOrdering[j]);
+ continue;
+ }
+
+ glyphNames.push(customNames[j - 258]);
+ }
+
+ break;
+
+ case 0x00030000:
+ break;
+
+ default:
+ (0, _util.warn)('Unknown/unsupported post table version ' + version);
+ valid = false;
+
+ if (properties.defaultEncoding) {
+ glyphNames = properties.defaultEncoding;
+ }
+
+ break;
+ }
+
+ properties.glyphNames = glyphNames;
+ return valid;
+ }
+
+ function readNameTable(nameTable) {
+ var start = (font.start ? font.start : 0) + nameTable.offset;
+ font.pos = start;
+ var names = [[], []];
+ var length = nameTable.length,
+ end = start + length;
+ var format = font.getUint16();
+ var FORMAT_0_HEADER_LENGTH = 6;
+
+ if (format !== 0 || length < FORMAT_0_HEADER_LENGTH) {
+ return names;
+ }
+
+ var numRecords = font.getUint16();
+ var stringsStart = font.getUint16();
+ var records = [];
+ var NAME_RECORD_LENGTH = 12;
+ var i, ii;
+
+ for (i = 0; i < numRecords && font.pos + NAME_RECORD_LENGTH <= end; i++) {
+ var r = {
+ platform: font.getUint16(),
+ encoding: font.getUint16(),
+ language: font.getUint16(),
+ name: font.getUint16(),
+ length: font.getUint16(),
+ offset: font.getUint16()
+ };
+
+ if (r.platform === 1 && r.encoding === 0 && r.language === 0 || r.platform === 3 && r.encoding === 1 && r.language === 0x409) {
+ records.push(r);
+ }
+ }
+
+ for (i = 0, ii = records.length; i < ii; i++) {
+ var record = records[i];
+
+ if (record.length <= 0) {
+ continue;
+ }
+
+ var pos = start + stringsStart + record.offset;
+
+ if (pos + record.length > end) {
+ continue;
+ }
+
+ font.pos = pos;
+ var nameIndex = record.name;
+
+ if (record.encoding) {
+ var str = '';
+
+ for (var j = 0, jj = record.length; j < jj; j += 2) {
+ str += String.fromCharCode(font.getUint16());
+ }
+
+ names[1][nameIndex] = str;
+ } else {
+ names[0][nameIndex] = (0, _util.bytesToString)(font.getBytes(record.length));
+ }
+ }
+
+ return names;
+ }
+
+ var TTOpsStackDeltas = [0, 0, 0, 0, 0, 0, 0, 0, -2, -2, -2, -2, 0, 0, -2, -5, -1, -1, -1, -1, -1, -1, -1, -1, 0, 0, -1, 0, -1, -1, -1, -1, 1, -1, -999, 0, 1, 0, -1, -2, 0, -1, -2, -1, -1, 0, -1, -1, 0, 0, -999, -999, -1, -1, -1, -1, -2, -999, -2, -2, -999, 0, -2, -2, 0, 0, -2, 0, -2, 0, 0, 0, -2, -1, -1, 1, 1, 0, 0, -1, -1, -1, -1, -1, -1, -1, 0, 0, -1, 0, -1, -1, 0, -999, -1, -1, -1, -1, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -2, -999, -999, -999, -999, -999, -1, -1, -2, -2, 0, 0, 0, 0, -1, -1, -999, -2, -2, 0, 0, -1, -2, -2, 0, 0, 0, -1, -1, -1, -2];
+
+ function sanitizeTTProgram(table, ttContext) {
+ var data = table.data;
+ var i = 0,
+ j,
+ n,
+ b,
+ funcId,
+ pc,
+ lastEndf = 0,
+ lastDeff = 0;
+ var stack = [];
+ var callstack = [];
+ var functionsCalled = [];
+ var tooComplexToFollowFunctions = ttContext.tooComplexToFollowFunctions;
+ var inFDEF = false,
+ ifLevel = 0,
+ inELSE = 0;
+
+ for (var ii = data.length; i < ii;) {
+ var op = data[i++];
+
+ if (op === 0x40) {
+ n = data[i++];
+
+ if (inFDEF || inELSE) {
+ i += n;
+ } else {
+ for (j = 0; j < n; j++) {
+ stack.push(data[i++]);
+ }
+ }
+ } else if (op === 0x41) {
+ n = data[i++];
+
+ if (inFDEF || inELSE) {
+ i += n * 2;
+ } else {
+ for (j = 0; j < n; j++) {
+ b = data[i++];
+ stack.push(b << 8 | data[i++]);
+ }
+ }
+ } else if ((op & 0xF8) === 0xB0) {
+ n = op - 0xB0 + 1;
+
+ if (inFDEF || inELSE) {
+ i += n;
+ } else {
+ for (j = 0; j < n; j++) {
+ stack.push(data[i++]);
+ }
+ }
+ } else if ((op & 0xF8) === 0xB8) {
+ n = op - 0xB8 + 1;
+
+ if (inFDEF || inELSE) {
+ i += n * 2;
+ } else {
+ for (j = 0; j < n; j++) {
+ b = data[i++];
+ stack.push(b << 8 | data[i++]);
+ }
+ }
+ } else if (op === 0x2B && !tooComplexToFollowFunctions) {
+ if (!inFDEF && !inELSE) {
+ funcId = stack[stack.length - 1];
+
+ if (isNaN(funcId)) {
+ (0, _util.info)('TT: CALL empty stack (or invalid entry).');
+ } else {
+ ttContext.functionsUsed[funcId] = true;
+
+ if (funcId in ttContext.functionsStackDeltas) {
+ var newStackLength = stack.length + ttContext.functionsStackDeltas[funcId];
+
+ if (newStackLength < 0) {
+ (0, _util.warn)('TT: CALL invalid functions stack delta.');
+ ttContext.hintsValid = false;
+ return;
+ }
+
+ stack.length = newStackLength;
+ } else if (funcId in ttContext.functionsDefined && !functionsCalled.includes(funcId)) {
+ callstack.push({
+ data: data,
+ i: i,
+ stackTop: stack.length - 1
+ });
+ functionsCalled.push(funcId);
+ pc = ttContext.functionsDefined[funcId];
+
+ if (!pc) {
+ (0, _util.warn)('TT: CALL non-existent function');
+ ttContext.hintsValid = false;
+ return;
+ }
+
+ data = pc.data;
+ i = pc.i;
+ }
+ }
+ }
+ } else if (op === 0x2C && !tooComplexToFollowFunctions) {
+ if (inFDEF || inELSE) {
+ (0, _util.warn)('TT: nested FDEFs not allowed');
+ tooComplexToFollowFunctions = true;
+ }
+
+ inFDEF = true;
+ lastDeff = i;
+ funcId = stack.pop();
+ ttContext.functionsDefined[funcId] = {
+ data: data,
+ i: i
+ };
+ } else if (op === 0x2D) {
+ if (inFDEF) {
+ inFDEF = false;
+ lastEndf = i;
+ } else {
+ pc = callstack.pop();
+
+ if (!pc) {
+ (0, _util.warn)('TT: ENDF bad stack');
+ ttContext.hintsValid = false;
+ return;
+ }
+
+ funcId = functionsCalled.pop();
+ data = pc.data;
+ i = pc.i;
+ ttContext.functionsStackDeltas[funcId] = stack.length - pc.stackTop;
+ }
+ } else if (op === 0x89) {
+ if (inFDEF || inELSE) {
+ (0, _util.warn)('TT: nested IDEFs not allowed');
+ tooComplexToFollowFunctions = true;
+ }
+
+ inFDEF = true;
+ lastDeff = i;
+ } else if (op === 0x58) {
+ ++ifLevel;
+ } else if (op === 0x1B) {
+ inELSE = ifLevel;
+ } else if (op === 0x59) {
+ if (inELSE === ifLevel) {
+ inELSE = 0;
+ }
+
+ --ifLevel;
+ } else if (op === 0x1C) {
+ if (!inFDEF && !inELSE) {
+ var offset = stack[stack.length - 1];
+
+ if (offset > 0) {
+ i += offset - 1;
+ }
+ }
+ }
+
+ if (!inFDEF && !inELSE) {
+ var stackDelta = op <= 0x8E ? TTOpsStackDeltas[op] : op >= 0xC0 && op <= 0xDF ? -1 : op >= 0xE0 ? -2 : 0;
+
+ if (op >= 0x71 && op <= 0x75) {
+ n = stack.pop();
+
+ if (!isNaN(n)) {
+ stackDelta = -n * 2;
+ }
+ }
+
+ while (stackDelta < 0 && stack.length > 0) {
+ stack.pop();
+ stackDelta++;
+ }
+
+ while (stackDelta > 0) {
+ stack.push(NaN);
+ stackDelta--;
+ }
+ }
+ }
+
+ ttContext.tooComplexToFollowFunctions = tooComplexToFollowFunctions;
+ var content = [data];
+
+ if (i > data.length) {
+ content.push(new Uint8Array(i - data.length));
+ }
+
+ if (lastDeff > lastEndf) {
+ (0, _util.warn)('TT: complementing a missing function tail');
+ content.push(new Uint8Array([0x22, 0x2D]));
+ }
+
+ foldTTTable(table, content);
+ }
+
+ function checkInvalidFunctions(ttContext, maxFunctionDefs) {
+ if (ttContext.tooComplexToFollowFunctions) {
+ return;
+ }
+
+ if (ttContext.functionsDefined.length > maxFunctionDefs) {
+ (0, _util.warn)('TT: more functions defined than expected');
+ ttContext.hintsValid = false;
+ return;
+ }
+
+ for (var j = 0, jj = ttContext.functionsUsed.length; j < jj; j++) {
+ if (j > maxFunctionDefs) {
+ (0, _util.warn)('TT: invalid function id: ' + j);
+ ttContext.hintsValid = false;
+ return;
+ }
+
+ if (ttContext.functionsUsed[j] && !ttContext.functionsDefined[j]) {
+ (0, _util.warn)('TT: undefined function: ' + j);
+ ttContext.hintsValid = false;
+ return;
+ }
+ }
+ }
+
+ function foldTTTable(table, content) {
+ if (content.length > 1) {
+ var newLength = 0;
+ var j, jj;
+
+ for (j = 0, jj = content.length; j < jj; j++) {
+ newLength += content[j].length;
+ }
+
+ newLength = newLength + 3 & ~3;
+ var result = new Uint8Array(newLength);
+ var pos = 0;
+
+ for (j = 0, jj = content.length; j < jj; j++) {
+ result.set(content[j], pos);
+ pos += content[j].length;
+ }
+
+ table.data = result;
+ table.length = newLength;
+ }
+ }
+
+ function sanitizeTTPrograms(fpgm, prep, cvt, maxFunctionDefs) {
+ var ttContext = {
+ functionsDefined: [],
+ functionsUsed: [],
+ functionsStackDeltas: [],
+ tooComplexToFollowFunctions: false,
+ hintsValid: true
+ };
+
+ if (fpgm) {
+ sanitizeTTProgram(fpgm, ttContext);
+ }
+
+ if (prep) {
+ sanitizeTTProgram(prep, ttContext);
+ }
+
+ if (fpgm) {
+ checkInvalidFunctions(ttContext, maxFunctionDefs);
+ }
+
+ if (cvt && cvt.length & 1) {
+ var cvtData = new Uint8Array(cvt.length + 1);
+ cvtData.set(cvt.data);
+ cvt.data = cvtData;
+ }
+
+ return ttContext.hintsValid;
+ }
+
+ font = new _stream.Stream(new Uint8Array(font.getBytes()));
+ var header, tables;
+
+ if (isTrueTypeCollectionFile(font)) {
+ var ttcData = readTrueTypeCollectionData(font, this.name);
+ header = ttcData.header;
+ tables = ttcData.tables;
+ } else {
+ header = readOpenTypeHeader(font);
+ tables = readTables(font, header.numTables);
+ }
+
+ var cff, cffFile;
+ var isTrueType = !tables['CFF '];
+
+ if (!isTrueType) {
+ var isComposite = properties.composite && ((properties.cidToGidMap || []).length > 0 || !(properties.cMap instanceof _cmap.IdentityCMap));
+
+ if (header.version === 'OTTO' && !isComposite || !tables['head'] || !tables['hhea'] || !tables['maxp'] || !tables['post']) {
+ cffFile = new _stream.Stream(tables['CFF '].data);
+ cff = new CFFFont(cffFile, properties);
+ adjustWidths(properties);
+ return this.convert(name, cff, properties);
+ }
+
+ delete tables['glyf'];
+ delete tables['loca'];
+ delete tables['fpgm'];
+ delete tables['prep'];
+ delete tables['cvt '];
+ this.isOpenType = true;
+ } else {
+ if (!tables['loca']) {
+ throw new _util.FormatError('Required "loca" table is not found');
+ }
+
+ if (!tables['glyf']) {
+ (0, _util.warn)('Required "glyf" table is not found -- trying to recover.');
+ tables['glyf'] = {
+ tag: 'glyf',
+ data: new Uint8Array(0)
+ };
+ }
+
+ this.isOpenType = false;
+ }
+
+ if (!tables['maxp']) {
+ throw new _util.FormatError('Required "maxp" table is not found');
+ }
+
+ font.pos = (font.start || 0) + tables['maxp'].offset;
+ var version = font.getInt32();
+ var numGlyphs = font.getUint16();
+ var numGlyphsOut = numGlyphs + 1;
+ var dupFirstEntry = true;
+
+ if (numGlyphsOut > 0xFFFF) {
+ dupFirstEntry = false;
+ numGlyphsOut = numGlyphs;
+ (0, _util.warn)('Not enough space in glyfs to duplicate first glyph.');
+ }
+
+ var maxFunctionDefs = 0;
+ var maxSizeOfInstructions = 0;
+
+ if (version >= 0x00010000 && tables['maxp'].length >= 22) {
+ font.pos += 8;
+ var maxZones = font.getUint16();
+
+ if (maxZones > 2) {
+ tables['maxp'].data[14] = 0;
+ tables['maxp'].data[15] = 2;
+ }
+
+ font.pos += 4;
+ maxFunctionDefs = font.getUint16();
+ font.pos += 4;
+ maxSizeOfInstructions = font.getUint16();
+ }
+
+ tables['maxp'].data[4] = numGlyphsOut >> 8;
+ tables['maxp'].data[5] = numGlyphsOut & 255;
+ var hintsValid = sanitizeTTPrograms(tables['fpgm'], tables['prep'], tables['cvt '], maxFunctionDefs);
+
+ if (!hintsValid) {
+ delete tables['fpgm'];
+ delete tables['prep'];
+ delete tables['cvt '];
+ }
+
+ sanitizeMetrics(font, tables['hhea'], tables['hmtx'], numGlyphsOut, dupFirstEntry);
+
+ if (!tables['head']) {
+ throw new _util.FormatError('Required "head" table is not found');
+ }
+
+ sanitizeHead(tables['head'], numGlyphs, isTrueType ? tables['loca'].length : 0);
+ var missingGlyphs = Object.create(null);
+
+ if (isTrueType) {
+ var isGlyphLocationsLong = int16(tables['head'].data[50], tables['head'].data[51]);
+ var glyphsInfo = sanitizeGlyphLocations(tables['loca'], tables['glyf'], numGlyphs, isGlyphLocationsLong, hintsValid, dupFirstEntry, maxSizeOfInstructions);
+ missingGlyphs = glyphsInfo.missingGlyphs;
+
+ if (version >= 0x00010000 && tables['maxp'].length >= 22) {
+ tables['maxp'].data[26] = glyphsInfo.maxSizeOfInstructions >> 8;
+ tables['maxp'].data[27] = glyphsInfo.maxSizeOfInstructions & 255;
+ }
+ }
+
+ if (!tables['hhea']) {
+ throw new _util.FormatError('Required "hhea" table is not found');
+ }
+
+ if (tables['hhea'].data[10] === 0 && tables['hhea'].data[11] === 0) {
+ tables['hhea'].data[10] = 0xFF;
+ tables['hhea'].data[11] = 0xFF;
+ }
+
+ var metricsOverride = {
+ unitsPerEm: int16(tables['head'].data[18], tables['head'].data[19]),
+ yMax: int16(tables['head'].data[42], tables['head'].data[43]),
+ yMin: signedInt16(tables['head'].data[38], tables['head'].data[39]),
+ ascent: int16(tables['hhea'].data[4], tables['hhea'].data[5]),
+ descent: signedInt16(tables['hhea'].data[6], tables['hhea'].data[7])
+ };
+ this.ascent = metricsOverride.ascent / metricsOverride.unitsPerEm;
+ this.descent = metricsOverride.descent / metricsOverride.unitsPerEm;
+
+ if (tables['post']) {
+ readPostScriptTable(tables['post'], properties, numGlyphs);
+ }
+
+ tables['post'] = {
+ tag: 'post',
+ data: createPostTable(properties)
+ };
+ var charCodeToGlyphId = [],
+ charCode;
+
+ function hasGlyph(glyphId) {
+ return !missingGlyphs[glyphId];
+ }
+
+ if (properties.composite) {
+ var cidToGidMap = properties.cidToGidMap || [];
+ var isCidToGidMapEmpty = cidToGidMap.length === 0;
+ properties.cMap.forEach(function (charCode, cid) {
+ if (cid > 0xffff) {
+ throw new _util.FormatError('Max size of CID is 65,535');
+ }
+
+ var glyphId = -1;
+
+ if (isCidToGidMapEmpty) {
+ glyphId = cid;
+ } else if (cidToGidMap[cid] !== undefined) {
+ glyphId = cidToGidMap[cid];
+ }
+
+ if (glyphId >= 0 && glyphId < numGlyphs && hasGlyph(glyphId)) {
+ charCodeToGlyphId[charCode] = glyphId;
+ }
+ });
+ } else {
+ var cmapTable = readCmapTable(tables['cmap'], font, this.isSymbolicFont, properties.hasEncoding);
+ var cmapPlatformId = cmapTable.platformId;
+ var cmapEncodingId = cmapTable.encodingId;
+ var cmapMappings = cmapTable.mappings;
+ var cmapMappingsLength = cmapMappings.length;
+
+ if (properties.hasEncoding && (cmapPlatformId === 3 && cmapEncodingId === 1 || cmapPlatformId === 1 && cmapEncodingId === 0) || cmapPlatformId === -1 && cmapEncodingId === -1 && !!(0, _encodings.getEncoding)(properties.baseEncodingName)) {
+ var baseEncoding = [];
+
+ if (properties.baseEncodingName === 'MacRomanEncoding' || properties.baseEncodingName === 'WinAnsiEncoding') {
+ baseEncoding = (0, _encodings.getEncoding)(properties.baseEncodingName);
+ }
+
+ var glyphsUnicodeMap = (0, _glyphlist.getGlyphsUnicode)();
+
+ for (charCode = 0; charCode < 256; charCode++) {
+ var glyphName, standardGlyphName;
+
+ if (this.differences && charCode in this.differences) {
+ glyphName = this.differences[charCode];
+ } else if (charCode in baseEncoding && baseEncoding[charCode] !== '') {
+ glyphName = baseEncoding[charCode];
+ } else {
+ glyphName = _encodings.StandardEncoding[charCode];
+ }
+
+ if (!glyphName) {
+ continue;
+ }
+
+ standardGlyphName = recoverGlyphName(glyphName, glyphsUnicodeMap);
+ var unicodeOrCharCode;
+
+ if (cmapPlatformId === 3 && cmapEncodingId === 1) {
+ unicodeOrCharCode = glyphsUnicodeMap[standardGlyphName];
+ } else if (cmapPlatformId === 1 && cmapEncodingId === 0) {
+ unicodeOrCharCode = _encodings.MacRomanEncoding.indexOf(standardGlyphName);
+ }
+
+ var found = false;
+
+ for (var i = 0; i < cmapMappingsLength; ++i) {
+ if (cmapMappings[i].charCode !== unicodeOrCharCode) {
+ continue;
+ }
+
+ charCodeToGlyphId[charCode] = cmapMappings[i].glyphId;
+ found = true;
+ break;
+ }
+
+ if (!found && properties.glyphNames) {
+ var glyphId = properties.glyphNames.indexOf(glyphName);
+
+ if (glyphId === -1 && standardGlyphName !== glyphName) {
+ glyphId = properties.glyphNames.indexOf(standardGlyphName);
+ }
+
+ if (glyphId > 0 && hasGlyph(glyphId)) {
+ charCodeToGlyphId[charCode] = glyphId;
+ }
+ }
+ }
+ } else if (cmapPlatformId === 0 && cmapEncodingId === 0) {
+ for (var _i2 = 0; _i2 < cmapMappingsLength; ++_i2) {
+ charCodeToGlyphId[cmapMappings[_i2].charCode] = cmapMappings[_i2].glyphId;
+ }
+ } else {
+ for (var _i3 = 0; _i3 < cmapMappingsLength; ++_i3) {
+ charCode = cmapMappings[_i3].charCode;
+
+ if (cmapPlatformId === 3 && charCode >= 0xF000 && charCode <= 0xF0FF) {
+ charCode &= 0xFF;
+ }
+
+ charCodeToGlyphId[charCode] = cmapMappings[_i3].glyphId;
+ }
+ }
+ }
+
+ if (charCodeToGlyphId.length === 0) {
+ charCodeToGlyphId[0] = 0;
+ }
+
+ var glyphZeroId = numGlyphsOut - 1;
+
+ if (!dupFirstEntry) {
+ glyphZeroId = 0;
+ }
+
+ var newMapping = adjustMapping(charCodeToGlyphId, hasGlyph, glyphZeroId);
+ this.toFontChar = newMapping.toFontChar;
+ tables['cmap'] = {
+ tag: 'cmap',
+ data: createCmapTable(newMapping.charCodeToGlyphId, numGlyphsOut)
+ };
+
+ if (!tables['OS/2'] || !validateOS2Table(tables['OS/2'])) {
+ tables['OS/2'] = {
+ tag: 'OS/2',
+ data: createOS2Table(properties, newMapping.charCodeToGlyphId, metricsOverride)
+ };
+ }
+
+ if (!isTrueType) {
+ try {
+ cffFile = new _stream.Stream(tables['CFF '].data);
+ var parser = new _cff_parser.CFFParser(cffFile, properties, SEAC_ANALYSIS_ENABLED);
+ cff = parser.parse();
+ cff.duplicateFirstGlyph();
+ var compiler = new _cff_parser.CFFCompiler(cff);
+ tables['CFF '].data = compiler.compile();
+ } catch (e) {
+ (0, _util.warn)('Failed to compile font ' + properties.loadedName);
+ }
+ }
+
+ if (!tables['name']) {
+ tables['name'] = {
+ tag: 'name',
+ data: createNameTable(this.name)
+ };
+ } else {
+ var namePrototype = readNameTable(tables['name']);
+ tables['name'].data = createNameTable(name, namePrototype);
+ }
+
+ var builder = new OpenTypeFileBuilder(header.version);
+
+ for (var tableTag in tables) {
+ builder.addTable(tableTag, tables[tableTag].data);
+ }
+
+ return builder.toArray();
+ },
+ convert: function Font_convert(fontName, font, properties) {
+ properties.fixedPitch = false;
+
+ if (properties.builtInEncoding) {
+ adjustToUnicode(properties, properties.builtInEncoding);
+ }
+
+ var glyphZeroId = 1;
+
+ if (font instanceof CFFFont) {
+ glyphZeroId = font.numGlyphs - 1;
+ }
+
+ var mapping = font.getGlyphMapping(properties);
+ var newMapping = adjustMapping(mapping, font.hasGlyphId.bind(font), glyphZeroId);
+ this.toFontChar = newMapping.toFontChar;
+ var numGlyphs = font.numGlyphs;
+
+ function getCharCodes(charCodeToGlyphId, glyphId) {
+ var charCodes = null;
+
+ for (var charCode in charCodeToGlyphId) {
+ if (glyphId === charCodeToGlyphId[charCode]) {
+ if (!charCodes) {
+ charCodes = [];
+ }
+
+ charCodes.push(charCode | 0);
+ }
+ }
+
+ return charCodes;
+ }
+
+ function createCharCode(charCodeToGlyphId, glyphId) {
+ for (var charCode in charCodeToGlyphId) {
+ if (glyphId === charCodeToGlyphId[charCode]) {
+ return charCode | 0;
+ }
+ }
+
+ newMapping.charCodeToGlyphId[newMapping.nextAvailableFontCharCode] = glyphId;
+ return newMapping.nextAvailableFontCharCode++;
+ }
+
+ var seacs = font.seacs;
+
+ if (SEAC_ANALYSIS_ENABLED && seacs && seacs.length) {
+ var matrix = properties.fontMatrix || _util.FONT_IDENTITY_MATRIX;
+ var charset = font.getCharset();
+ var seacMap = Object.create(null);
+
+ for (var glyphId in seacs) {
+ glyphId |= 0;
+ var seac = seacs[glyphId];
+ var baseGlyphName = _encodings.StandardEncoding[seac[2]];
+ var accentGlyphName = _encodings.StandardEncoding[seac[3]];
+ var baseGlyphId = charset.indexOf(baseGlyphName);
+ var accentGlyphId = charset.indexOf(accentGlyphName);
+
+ if (baseGlyphId < 0 || accentGlyphId < 0) {
+ continue;
+ }
+
+ var accentOffset = {
+ x: seac[0] * matrix[0] + seac[1] * matrix[2] + matrix[4],
+ y: seac[0] * matrix[1] + seac[1] * matrix[3] + matrix[5]
+ };
+ var charCodes = getCharCodes(mapping, glyphId);
+
+ if (!charCodes) {
+ continue;
+ }
+
+ for (var i = 0, ii = charCodes.length; i < ii; i++) {
+ var charCode = charCodes[i];
+ var charCodeToGlyphId = newMapping.charCodeToGlyphId;
+ var baseFontCharCode = createCharCode(charCodeToGlyphId, baseGlyphId);
+ var accentFontCharCode = createCharCode(charCodeToGlyphId, accentGlyphId);
+ seacMap[charCode] = {
+ baseFontCharCode: baseFontCharCode,
+ accentFontCharCode: accentFontCharCode,
+ accentOffset: accentOffset
+ };
+ }
+ }
+
+ properties.seacMap = seacMap;
+ }
+
+ var unitsPerEm = 1 / (properties.fontMatrix || _util.FONT_IDENTITY_MATRIX)[0];
+ var builder = new OpenTypeFileBuilder('\x4F\x54\x54\x4F');
+ builder.addTable('CFF ', font.data);
+ builder.addTable('OS/2', createOS2Table(properties, newMapping.charCodeToGlyphId));
+ builder.addTable('cmap', createCmapTable(newMapping.charCodeToGlyphId, numGlyphs));
+ builder.addTable('head', '\x00\x01\x00\x00' + '\x00\x00\x10\x00' + '\x00\x00\x00\x00' + '\x5F\x0F\x3C\xF5' + '\x00\x00' + safeString16(unitsPerEm) + '\x00\x00\x00\x00\x9e\x0b\x7e\x27' + '\x00\x00\x00\x00\x9e\x0b\x7e\x27' + '\x00\x00' + safeString16(properties.descent) + '\x0F\xFF' + safeString16(properties.ascent) + string16(properties.italicAngle ? 2 : 0) + '\x00\x11' + '\x00\x00' + '\x00\x00' + '\x00\x00');
+ builder.addTable('hhea', '\x00\x01\x00\x00' + safeString16(properties.ascent) + safeString16(properties.descent) + '\x00\x00' + '\xFF\xFF' + '\x00\x00' + '\x00\x00' + '\x00\x00' + safeString16(properties.capHeight) + safeString16(Math.tan(properties.italicAngle) * properties.xHeight) + '\x00\x00' + '\x00\x00' + '\x00\x00' + '\x00\x00' + '\x00\x00' + '\x00\x00' + string16(numGlyphs));
+ builder.addTable('hmtx', function fontFieldsHmtx() {
+ var charstrings = font.charstrings;
+ var cffWidths = font.cff ? font.cff.widths : null;
+ var hmtx = '\x00\x00\x00\x00';
+
+ for (var i = 1, ii = numGlyphs; i < ii; i++) {
+ var width = 0;
+
+ if (charstrings) {
+ var charstring = charstrings[i - 1];
+ width = 'width' in charstring ? charstring.width : 0;
+ } else if (cffWidths) {
+ width = Math.ceil(cffWidths[i] || 0);
+ }
+
+ hmtx += string16(width) + string16(0);
+ }
+
+ return hmtx;
+ }());
+ builder.addTable('maxp', '\x00\x00\x50\x00' + string16(numGlyphs));
+ builder.addTable('name', createNameTable(fontName));
+ builder.addTable('post', createPostTable(properties));
+ return builder.toArray();
+ },
+
+ get spaceWidth() {
+ if ('_shadowWidth' in this) {
+ return this._shadowWidth;
+ }
+
+ var possibleSpaceReplacements = ['space', 'minus', 'one', 'i', 'I'];
+ var width;
+
+ for (var i = 0, ii = possibleSpaceReplacements.length; i < ii; i++) {
+ var glyphName = possibleSpaceReplacements[i];
+
+ if (glyphName in this.widths) {
+ width = this.widths[glyphName];
+ break;
+ }
+
+ var glyphsUnicodeMap = (0, _glyphlist.getGlyphsUnicode)();
+ var glyphUnicode = glyphsUnicodeMap[glyphName];
+ var charcode = 0;
+
+ if (this.composite) {
+ if (this.cMap.contains(glyphUnicode)) {
+ charcode = this.cMap.lookup(glyphUnicode);
+ }
+ }
+
+ if (!charcode && this.toUnicode) {
+ charcode = this.toUnicode.charCodeOf(glyphUnicode);
+ }
+
+ if (charcode <= 0) {
+ charcode = glyphUnicode;
+ }
+
+ width = this.widths[charcode];
+
+ if (width) {
+ break;
+ }
+ }
+
+ width = width || this.defaultWidth;
+ this._shadowWidth = width;
+ return width;
+ },
+
+ charToGlyph: function Font_charToGlyph(charcode, isSpace) {
+ var fontCharCode, width, operatorListId;
+ var widthCode = charcode;
+
+ if (this.cMap && this.cMap.contains(charcode)) {
+ widthCode = this.cMap.lookup(charcode);
+ }
+
+ width = this.widths[widthCode];
+ width = (0, _util.isNum)(width) ? width : this.defaultWidth;
+ var vmetric = this.vmetrics && this.vmetrics[widthCode];
+ var unicode = this.toUnicode.get(charcode) || this.fallbackToUnicode.get(charcode) || charcode;
+
+ if (typeof unicode === 'number') {
+ unicode = String.fromCharCode(unicode);
+ }
+
+ var isInFont = charcode in this.toFontChar;
+ fontCharCode = this.toFontChar[charcode] || charcode;
+
+ if (this.missingFile) {
+ fontCharCode = (0, _unicode.mapSpecialUnicodeValues)(fontCharCode);
+ }
+
+ if (this.isType3Font) {
+ operatorListId = fontCharCode;
+ }
+
+ var accent = null;
+
+ if (this.seacMap && this.seacMap[charcode]) {
+ isInFont = true;
+ var seac = this.seacMap[charcode];
+ fontCharCode = seac.baseFontCharCode;
+ accent = {
+ fontChar: String.fromCodePoint(seac.accentFontCharCode),
+ offset: seac.accentOffset
+ };
+ }
+
+ var fontChar = typeof fontCharCode === 'number' ? String.fromCodePoint(fontCharCode) : '';
+ var glyph = this.glyphCache[charcode];
+
+ if (!glyph || !glyph.matchesForCache(fontChar, unicode, accent, width, vmetric, operatorListId, isSpace, isInFont)) {
+ glyph = new Glyph(fontChar, unicode, accent, width, vmetric, operatorListId, isSpace, isInFont);
+ this.glyphCache[charcode] = glyph;
+ }
+
+ return glyph;
+ },
+ charsToGlyphs: function Font_charsToGlyphs(chars) {
+ var charsCache = this.charsCache;
+ var glyphs, glyph, charcode;
+
+ if (charsCache) {
+ glyphs = charsCache[chars];
+
+ if (glyphs) {
+ return glyphs;
+ }
+ }
+
+ if (!charsCache) {
+ charsCache = this.charsCache = Object.create(null);
+ }
+
+ glyphs = [];
+ var charsCacheKey = chars;
+ var i = 0,
+ ii;
+
+ if (this.cMap) {
+ var c = Object.create(null);
+
+ while (i < chars.length) {
+ this.cMap.readCharCode(chars, i, c);
+ charcode = c.charcode;
+ var length = c.length;
+ i += length;
+ var isSpace = length === 1 && chars.charCodeAt(i - 1) === 0x20;
+ glyph = this.charToGlyph(charcode, isSpace);
+ glyphs.push(glyph);
+ }
+ } else {
+ for (i = 0, ii = chars.length; i < ii; ++i) {
+ charcode = chars.charCodeAt(i);
+ glyph = this.charToGlyph(charcode, charcode === 0x20);
+ glyphs.push(glyph);
+ }
+ }
+
+ return charsCache[charsCacheKey] = glyphs;
+ },
+
+ get glyphCacheValues() {
+ return Object.values(this.glyphCache);
+ }
+
+ };
+ return Font;
+}();
+
+exports.Font = Font;
+
+var ErrorFont = function ErrorFontClosure() {
+ function ErrorFont(error) {
+ this.error = error;
+ this.loadedName = 'g_font_error';
+ this.missingFile = true;
+ }
+
+ ErrorFont.prototype = {
+ charsToGlyphs: function ErrorFont_charsToGlyphs() {
+ return [];
+ },
+ exportData: function ErrorFont_exportData() {
+ return {
+ error: this.error
+ };
+ }
+ };
+ return ErrorFont;
+}();
+
+exports.ErrorFont = ErrorFont;
+
+function type1FontGlyphMapping(properties, builtInEncoding, glyphNames) {
+ var charCodeToGlyphId = Object.create(null);
+ var glyphId, charCode, baseEncoding;
+ var isSymbolicFont = !!(properties.flags & FontFlags.Symbolic);
+
+ if (properties.baseEncodingName) {
+ baseEncoding = (0, _encodings.getEncoding)(properties.baseEncodingName);
+
+ for (charCode = 0; charCode < baseEncoding.length; charCode++) {
+ glyphId = glyphNames.indexOf(baseEncoding[charCode]);
+
+ if (glyphId >= 0) {
+ charCodeToGlyphId[charCode] = glyphId;
+ } else {
+ charCodeToGlyphId[charCode] = 0;
+ }
+ }
+ } else if (isSymbolicFont) {
+ for (charCode in builtInEncoding) {
+ charCodeToGlyphId[charCode] = builtInEncoding[charCode];
+ }
+ } else {
+ baseEncoding = _encodings.StandardEncoding;
+
+ for (charCode = 0; charCode < baseEncoding.length; charCode++) {
+ glyphId = glyphNames.indexOf(baseEncoding[charCode]);
+
+ if (glyphId >= 0) {
+ charCodeToGlyphId[charCode] = glyphId;
+ } else {
+ charCodeToGlyphId[charCode] = 0;
+ }
+ }
+ }
+
+ var differences = properties.differences,
+ glyphsUnicodeMap;
+
+ if (differences) {
+ for (charCode in differences) {
+ var glyphName = differences[charCode];
+ glyphId = glyphNames.indexOf(glyphName);
+
+ if (glyphId === -1) {
+ if (!glyphsUnicodeMap) {
+ glyphsUnicodeMap = (0, _glyphlist.getGlyphsUnicode)();
+ }
+
+ var standardGlyphName = recoverGlyphName(glyphName, glyphsUnicodeMap);
+
+ if (standardGlyphName !== glyphName) {
+ glyphId = glyphNames.indexOf(standardGlyphName);
+ }
+ }
+
+ if (glyphId >= 0) {
+ charCodeToGlyphId[charCode] = glyphId;
+ } else {
+ charCodeToGlyphId[charCode] = 0;
+ }
+ }
+ }
+
+ return charCodeToGlyphId;
+}
+
+var Type1Font = function Type1FontClosure() {
+ function findBlock(streamBytes, signature, startIndex) {
+ var streamBytesLength = streamBytes.length;
+ var signatureLength = signature.length;
+ var scanLength = streamBytesLength - signatureLength;
+ var i = startIndex,
+ j,
+ found = false;
+
+ while (i < scanLength) {
+ j = 0;
+
+ while (j < signatureLength && streamBytes[i + j] === signature[j]) {
+ j++;
+ }
+
+ if (j >= signatureLength) {
+ i += j;
+
+ while (i < streamBytesLength && (0, _util.isSpace)(streamBytes[i])) {
+ i++;
+ }
+
+ found = true;
+ break;
+ }
+
+ i++;
+ }
+
+ return {
+ found: found,
+ length: i
+ };
+ }
+
+ function getHeaderBlock(stream, suggestedLength) {
+ var EEXEC_SIGNATURE = [0x65, 0x65, 0x78, 0x65, 0x63];
+ var streamStartPos = stream.pos;
+ var headerBytes, headerBytesLength, block;
+
+ try {
+ headerBytes = stream.getBytes(suggestedLength);
+ headerBytesLength = headerBytes.length;
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+ }
+
+ if (headerBytesLength === suggestedLength) {
+ block = findBlock(headerBytes, EEXEC_SIGNATURE, suggestedLength - 2 * EEXEC_SIGNATURE.length);
+
+ if (block.found && block.length === suggestedLength) {
+ return {
+ stream: new _stream.Stream(headerBytes),
+ length: suggestedLength
+ };
+ }
+ }
+
+ (0, _util.warn)('Invalid "Length1" property in Type1 font -- trying to recover.');
+ stream.pos = streamStartPos;
+ var SCAN_BLOCK_LENGTH = 2048;
+ var actualLength;
+
+ while (true) {
+ var scanBytes = stream.peekBytes(SCAN_BLOCK_LENGTH);
+ block = findBlock(scanBytes, EEXEC_SIGNATURE, 0);
+
+ if (block.length === 0) {
+ break;
+ }
+
+ stream.pos += block.length;
+
+ if (block.found) {
+ actualLength = stream.pos - streamStartPos;
+ break;
+ }
+ }
+
+ stream.pos = streamStartPos;
+
+ if (actualLength) {
+ return {
+ stream: new _stream.Stream(stream.getBytes(actualLength)),
+ length: actualLength
+ };
+ }
+
+ (0, _util.warn)('Unable to recover "Length1" property in Type1 font -- using as is.');
+ return {
+ stream: new _stream.Stream(stream.getBytes(suggestedLength)),
+ length: suggestedLength
+ };
+ }
+
+ function getEexecBlock(stream, suggestedLength) {
+ var eexecBytes = stream.getBytes();
+ return {
+ stream: new _stream.Stream(eexecBytes),
+ length: eexecBytes.length
+ };
+ }
+
+ function Type1Font(name, file, properties) {
+ var PFB_HEADER_SIZE = 6;
+ var headerBlockLength = properties.length1;
+ var eexecBlockLength = properties.length2;
+ var pfbHeader = file.peekBytes(PFB_HEADER_SIZE);
+ var pfbHeaderPresent = pfbHeader[0] === 0x80 && pfbHeader[1] === 0x01;
+
+ if (pfbHeaderPresent) {
+ file.skip(PFB_HEADER_SIZE);
+ headerBlockLength = pfbHeader[5] << 24 | pfbHeader[4] << 16 | pfbHeader[3] << 8 | pfbHeader[2];
+ }
+
+ var headerBlock = getHeaderBlock(file, headerBlockLength);
+ var headerBlockParser = new _type1_parser.Type1Parser(headerBlock.stream, false, SEAC_ANALYSIS_ENABLED);
+ headerBlockParser.extractFontHeader(properties);
+
+ if (pfbHeaderPresent) {
+ pfbHeader = file.getBytes(PFB_HEADER_SIZE);
+ eexecBlockLength = pfbHeader[5] << 24 | pfbHeader[4] << 16 | pfbHeader[3] << 8 | pfbHeader[2];
+ }
+
+ var eexecBlock = getEexecBlock(file, eexecBlockLength);
+ var eexecBlockParser = new _type1_parser.Type1Parser(eexecBlock.stream, true, SEAC_ANALYSIS_ENABLED);
+ var data = eexecBlockParser.extractFontProgram();
+
+ for (var info in data.properties) {
+ properties[info] = data.properties[info];
+ }
+
+ var charstrings = data.charstrings;
+ var type2Charstrings = this.getType2Charstrings(charstrings);
+ var subrs = this.getType2Subrs(data.subrs);
+ this.charstrings = charstrings;
+ this.data = this.wrap(name, type2Charstrings, this.charstrings, subrs, properties);
+ this.seacs = this.getSeacs(data.charstrings);
+ }
+
+ Type1Font.prototype = {
+ get numGlyphs() {
+ return this.charstrings.length + 1;
+ },
+
+ getCharset: function Type1Font_getCharset() {
+ var charset = ['.notdef'];
+ var charstrings = this.charstrings;
+
+ for (var glyphId = 0; glyphId < charstrings.length; glyphId++) {
+ charset.push(charstrings[glyphId].glyphName);
+ }
+
+ return charset;
+ },
+ getGlyphMapping: function Type1Font_getGlyphMapping(properties) {
+ var charstrings = this.charstrings;
+ var glyphNames = ['.notdef'],
+ glyphId;
+
+ for (glyphId = 0; glyphId < charstrings.length; glyphId++) {
+ glyphNames.push(charstrings[glyphId].glyphName);
+ }
+
+ var encoding = properties.builtInEncoding;
+
+ if (encoding) {
+ var builtInEncoding = Object.create(null);
+
+ for (var charCode in encoding) {
+ glyphId = glyphNames.indexOf(encoding[charCode]);
+
+ if (glyphId >= 0) {
+ builtInEncoding[charCode] = glyphId;
+ }
+ }
+ }
+
+ return type1FontGlyphMapping(properties, builtInEncoding, glyphNames);
+ },
+ hasGlyphId: function Type1Font_hasGlyphID(id) {
+ if (id < 0 || id >= this.numGlyphs) {
+ return false;
+ }
+
+ if (id === 0) {
+ return true;
+ }
+
+ var glyph = this.charstrings[id - 1];
+ return glyph.charstring.length > 0;
+ },
+ getSeacs: function Type1Font_getSeacs(charstrings) {
+ var i, ii;
+ var seacMap = [];
+
+ for (i = 0, ii = charstrings.length; i < ii; i++) {
+ var charstring = charstrings[i];
+
+ if (charstring.seac) {
+ seacMap[i + 1] = charstring.seac;
+ }
+ }
+
+ return seacMap;
+ },
+ getType2Charstrings: function Type1Font_getType2Charstrings(type1Charstrings) {
+ var type2Charstrings = [];
+
+ for (var i = 0, ii = type1Charstrings.length; i < ii; i++) {
+ type2Charstrings.push(type1Charstrings[i].charstring);
+ }
+
+ return type2Charstrings;
+ },
+ getType2Subrs: function Type1Font_getType2Subrs(type1Subrs) {
+ var bias = 0;
+ var count = type1Subrs.length;
+
+ if (count < 1133) {
+ bias = 107;
+ } else if (count < 33769) {
+ bias = 1131;
+ } else {
+ bias = 32768;
+ }
+
+ var type2Subrs = [];
+ var i;
+
+ for (i = 0; i < bias; i++) {
+ type2Subrs.push([0x0B]);
+ }
+
+ for (i = 0; i < count; i++) {
+ type2Subrs.push(type1Subrs[i]);
+ }
+
+ return type2Subrs;
+ },
+ wrap: function Type1Font_wrap(name, glyphs, charstrings, subrs, properties) {
+ var cff = new _cff_parser.CFF();
+ cff.header = new _cff_parser.CFFHeader(1, 0, 4, 4);
+ cff.names = [name];
+ var topDict = new _cff_parser.CFFTopDict();
+ topDict.setByName('version', 391);
+ topDict.setByName('Notice', 392);
+ topDict.setByName('FullName', 393);
+ topDict.setByName('FamilyName', 394);
+ topDict.setByName('Weight', 395);
+ topDict.setByName('Encoding', null);
+ topDict.setByName('FontMatrix', properties.fontMatrix);
+ topDict.setByName('FontBBox', properties.bbox);
+ topDict.setByName('charset', null);
+ topDict.setByName('CharStrings', null);
+ topDict.setByName('Private', null);
+ cff.topDict = topDict;
+ var strings = new _cff_parser.CFFStrings();
+ strings.add('Version 0.11');
+ strings.add('See original notice');
+ strings.add(name);
+ strings.add(name);
+ strings.add('Medium');
+ cff.strings = strings;
+ cff.globalSubrIndex = new _cff_parser.CFFIndex();
+ var count = glyphs.length;
+ var charsetArray = ['.notdef'];
+ var i, ii;
+
+ for (i = 0; i < count; i++) {
+ var glyphName = charstrings[i].glyphName;
+
+ var index = _cff_parser.CFFStandardStrings.indexOf(glyphName);
+
+ if (index === -1) {
+ strings.add(glyphName);
+ }
+
+ charsetArray.push(glyphName);
+ }
+
+ cff.charset = new _cff_parser.CFFCharset(false, 0, charsetArray);
+ var charStringsIndex = new _cff_parser.CFFIndex();
+ charStringsIndex.add([0x8B, 0x0E]);
+
+ for (i = 0; i < count; i++) {
+ charStringsIndex.add(glyphs[i]);
+ }
+
+ cff.charStrings = charStringsIndex;
+ var privateDict = new _cff_parser.CFFPrivateDict();
+ privateDict.setByName('Subrs', null);
+ var fields = ['BlueValues', 'OtherBlues', 'FamilyBlues', 'FamilyOtherBlues', 'StemSnapH', 'StemSnapV', 'BlueShift', 'BlueFuzz', 'BlueScale', 'LanguageGroup', 'ExpansionFactor', 'ForceBold', 'StdHW', 'StdVW'];
+
+ for (i = 0, ii = fields.length; i < ii; i++) {
+ var field = fields[i];
+
+ if (!(field in properties.privateData)) {
+ continue;
+ }
+
+ var value = properties.privateData[field];
+
+ if (Array.isArray(value)) {
+ for (var j = value.length - 1; j > 0; j--) {
+ value[j] -= value[j - 1];
+ }
+ }
+
+ privateDict.setByName(field, value);
+ }
+
+ cff.topDict.privateDict = privateDict;
+ var subrIndex = new _cff_parser.CFFIndex();
+
+ for (i = 0, ii = subrs.length; i < ii; i++) {
+ subrIndex.add(subrs[i]);
+ }
+
+ privateDict.subrsIndex = subrIndex;
+ var compiler = new _cff_parser.CFFCompiler(cff);
+ return compiler.compile();
+ }
+ };
+ return Type1Font;
+}();
+
+var CFFFont = function CFFFontClosure() {
+ function CFFFont(file, properties) {
+ this.properties = properties;
+ var parser = new _cff_parser.CFFParser(file, properties, SEAC_ANALYSIS_ENABLED);
+ this.cff = parser.parse();
+ this.cff.duplicateFirstGlyph();
+ var compiler = new _cff_parser.CFFCompiler(this.cff);
+ this.seacs = this.cff.seacs;
+
+ try {
+ this.data = compiler.compile();
+ } catch (e) {
+ (0, _util.warn)('Failed to compile font ' + properties.loadedName);
+ this.data = file;
+ }
+ }
+
+ CFFFont.prototype = {
+ get numGlyphs() {
+ return this.cff.charStrings.count;
+ },
+
+ getCharset: function CFFFont_getCharset() {
+ return this.cff.charset.charset;
+ },
+ getGlyphMapping: function CFFFont_getGlyphMapping() {
+ var cff = this.cff;
+ var properties = this.properties;
+ var charsets = cff.charset.charset;
+ var charCodeToGlyphId;
+ var glyphId;
+
+ if (properties.composite) {
+ charCodeToGlyphId = Object.create(null);
+ var charCode;
+
+ if (cff.isCIDFont) {
+ for (glyphId = 0; glyphId < charsets.length; glyphId++) {
+ var cid = charsets[glyphId];
+ charCode = properties.cMap.charCodeOf(cid);
+ charCodeToGlyphId[charCode] = glyphId;
+ }
+ } else {
+ for (glyphId = 0; glyphId < cff.charStrings.count; glyphId++) {
+ charCode = properties.cMap.charCodeOf(glyphId);
+ charCodeToGlyphId[charCode] = glyphId;
+ }
+ }
+
+ return charCodeToGlyphId;
+ }
+
+ var encoding = cff.encoding ? cff.encoding.encoding : null;
+ charCodeToGlyphId = type1FontGlyphMapping(properties, encoding, charsets);
+ return charCodeToGlyphId;
+ },
+ hasGlyphId: function CFFFont_hasGlyphID(id) {
+ return this.cff.hasGlyphId(id);
+ }
+ };
+ return CFFFont;
+}();
+
+/***/ }),
+/* 175 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.CFFFDSelect = exports.CFFCompiler = exports.CFFPrivateDict = exports.CFFTopDict = exports.CFFCharset = exports.CFFIndex = exports.CFFStrings = exports.CFFHeader = exports.CFF = exports.CFFParser = exports.CFFStandardStrings = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _charsets = __w_pdfjs_require__(176);
+
+var _encodings = __w_pdfjs_require__(177);
+
+var MAX_SUBR_NESTING = 10;
+var CFFStandardStrings = ['.notdef', 'space', 'exclam', 'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand', 'quoteright', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright', 'asciicircum', 'underscore', 'quoteleft', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright', 'asciitilde', 'exclamdown', 'cent', 'sterling', 'fraction', 'yen', 'florin', 'section', 'currency', 'quotesingle', 'quotedblleft', 'guillemotleft', 'guilsinglleft', 'guilsinglright', 'fi', 'fl', 'endash', 'dagger', 'daggerdbl', 'periodcentered', 'paragraph', 'bullet', 'quotesinglbase', 'quotedblbase', 'quotedblright', 'guillemotright', 'ellipsis', 'perthousand', 'questiondown', 'grave', 'acute', 'circumflex', 'tilde', 'macron', 'breve', 'dotaccent', 'dieresis', 'ring', 'cedilla', 'hungarumlaut', 'ogonek', 'caron', 'emdash', 'AE', 'ordfeminine', 'Lslash', 'Oslash', 'OE', 'ordmasculine', 'ae', 'dotlessi', 'lslash', 'oslash', 'oe', 'germandbls', 'onesuperior', 'logicalnot', 'mu', 'trademark', 'Eth', 'onehalf', 'plusminus', 'Thorn', 'onequarter', 'divide', 'brokenbar', 'degree', 'thorn', 'threequarters', 'twosuperior', 'registered', 'minus', 'eth', 'multiply', 'threesuperior', 'copyright', 'Aacute', 'Acircumflex', 'Adieresis', 'Agrave', 'Aring', 'Atilde', 'Ccedilla', 'Eacute', 'Ecircumflex', 'Edieresis', 'Egrave', 'Iacute', 'Icircumflex', 'Idieresis', 'Igrave', 'Ntilde', 'Oacute', 'Ocircumflex', 'Odieresis', 'Ograve', 'Otilde', 'Scaron', 'Uacute', 'Ucircumflex', 'Udieresis', 'Ugrave', 'Yacute', 'Ydieresis', 'Zcaron', 'aacute', 'acircumflex', 'adieresis', 'agrave', 'aring', 'atilde', 'ccedilla', 'eacute', 'ecircumflex', 'edieresis', 'egrave', 'iacute', 'icircumflex', 'idieresis', 'igrave', 'ntilde', 'oacute', 'ocircumflex', 'odieresis', 'ograve', 'otilde', 'scaron', 'uacute', 'ucircumflex', 'udieresis', 'ugrave', 'yacute', 'ydieresis', 'zcaron', 'exclamsmall', 'Hungarumlautsmall', 'dollaroldstyle', 'dollarsuperior', 'ampersandsmall', 'Acutesmall', 'parenleftsuperior', 'parenrightsuperior', 'twodotenleader', 'onedotenleader', 'zerooldstyle', 'oneoldstyle', 'twooldstyle', 'threeoldstyle', 'fouroldstyle', 'fiveoldstyle', 'sixoldstyle', 'sevenoldstyle', 'eightoldstyle', 'nineoldstyle', 'commasuperior', 'threequartersemdash', 'periodsuperior', 'questionsmall', 'asuperior', 'bsuperior', 'centsuperior', 'dsuperior', 'esuperior', 'isuperior', 'lsuperior', 'msuperior', 'nsuperior', 'osuperior', 'rsuperior', 'ssuperior', 'tsuperior', 'ff', 'ffi', 'ffl', 'parenleftinferior', 'parenrightinferior', 'Circumflexsmall', 'hyphensuperior', 'Gravesmall', 'Asmall', 'Bsmall', 'Csmall', 'Dsmall', 'Esmall', 'Fsmall', 'Gsmall', 'Hsmall', 'Ismall', 'Jsmall', 'Ksmall', 'Lsmall', 'Msmall', 'Nsmall', 'Osmall', 'Psmall', 'Qsmall', 'Rsmall', 'Ssmall', 'Tsmall', 'Usmall', 'Vsmall', 'Wsmall', 'Xsmall', 'Ysmall', 'Zsmall', 'colonmonetary', 'onefitted', 'rupiah', 'Tildesmall', 'exclamdownsmall', 'centoldstyle', 'Lslashsmall', 'Scaronsmall', 'Zcaronsmall', 'Dieresissmall', 'Brevesmall', 'Caronsmall', 'Dotaccentsmall', 'Macronsmall', 'figuredash', 'hypheninferior', 'Ogoneksmall', 'Ringsmall', 'Cedillasmall', 'questiondownsmall', 'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths', 'onethird', 'twothirds', 'zerosuperior', 'foursuperior', 'fivesuperior', 'sixsuperior', 'sevensuperior', 'eightsuperior', 'ninesuperior', 'zeroinferior', 'oneinferior', 'twoinferior', 'threeinferior', 'fourinferior', 'fiveinferior', 'sixinferior', 'seveninferior', 'eightinferior', 'nineinferior', 'centinferior', 'dollarinferior', 'periodinferior', 'commainferior', 'Agravesmall', 'Aacutesmall', 'Acircumflexsmall', 'Atildesmall', 'Adieresissmall', 'Aringsmall', 'AEsmall', 'Ccedillasmall', 'Egravesmall', 'Eacutesmall', 'Ecircumflexsmall', 'Edieresissmall', 'Igravesmall', 'Iacutesmall', 'Icircumflexsmall', 'Idieresissmall', 'Ethsmall', 'Ntildesmall', 'Ogravesmall', 'Oacutesmall', 'Ocircumflexsmall', 'Otildesmall', 'Odieresissmall', 'OEsmall', 'Oslashsmall', 'Ugravesmall', 'Uacutesmall', 'Ucircumflexsmall', 'Udieresissmall', 'Yacutesmall', 'Thornsmall', 'Ydieresissmall', '001.000', '001.001', '001.002', '001.003', 'Black', 'Bold', 'Book', 'Light', 'Medium', 'Regular', 'Roman', 'Semibold'];
+exports.CFFStandardStrings = CFFStandardStrings;
+var NUM_STANDARD_CFF_STRINGS = 391;
+
+var CFFParser = function CFFParserClosure() {
+ var CharstringValidationData = [null, {
+ id: 'hstem',
+ min: 2,
+ stackClearing: true,
+ stem: true
+ }, null, {
+ id: 'vstem',
+ min: 2,
+ stackClearing: true,
+ stem: true
+ }, {
+ id: 'vmoveto',
+ min: 1,
+ stackClearing: true
+ }, {
+ id: 'rlineto',
+ min: 2,
+ resetStack: true
+ }, {
+ id: 'hlineto',
+ min: 1,
+ resetStack: true
+ }, {
+ id: 'vlineto',
+ min: 1,
+ resetStack: true
+ }, {
+ id: 'rrcurveto',
+ min: 6,
+ resetStack: true
+ }, null, {
+ id: 'callsubr',
+ min: 1,
+ undefStack: true
+ }, {
+ id: 'return',
+ min: 0,
+ undefStack: true
+ }, null, null, {
+ id: 'endchar',
+ min: 0,
+ stackClearing: true
+ }, null, null, null, {
+ id: 'hstemhm',
+ min: 2,
+ stackClearing: true,
+ stem: true
+ }, {
+ id: 'hintmask',
+ min: 0,
+ stackClearing: true
+ }, {
+ id: 'cntrmask',
+ min: 0,
+ stackClearing: true
+ }, {
+ id: 'rmoveto',
+ min: 2,
+ stackClearing: true
+ }, {
+ id: 'hmoveto',
+ min: 1,
+ stackClearing: true
+ }, {
+ id: 'vstemhm',
+ min: 2,
+ stackClearing: true,
+ stem: true
+ }, {
+ id: 'rcurveline',
+ min: 8,
+ resetStack: true
+ }, {
+ id: 'rlinecurve',
+ min: 8,
+ resetStack: true
+ }, {
+ id: 'vvcurveto',
+ min: 4,
+ resetStack: true
+ }, {
+ id: 'hhcurveto',
+ min: 4,
+ resetStack: true
+ }, null, {
+ id: 'callgsubr',
+ min: 1,
+ undefStack: true
+ }, {
+ id: 'vhcurveto',
+ min: 4,
+ resetStack: true
+ }, {
+ id: 'hvcurveto',
+ min: 4,
+ resetStack: true
+ }];
+ var CharstringValidationData12 = [null, null, null, {
+ id: 'and',
+ min: 2,
+ stackDelta: -1
+ }, {
+ id: 'or',
+ min: 2,
+ stackDelta: -1
+ }, {
+ id: 'not',
+ min: 1,
+ stackDelta: 0
+ }, null, null, null, {
+ id: 'abs',
+ min: 1,
+ stackDelta: 0
+ }, {
+ id: 'add',
+ min: 2,
+ stackDelta: -1,
+ stackFn: function stack_div(stack, index) {
+ stack[index - 2] = stack[index - 2] + stack[index - 1];
+ }
+ }, {
+ id: 'sub',
+ min: 2,
+ stackDelta: -1,
+ stackFn: function stack_div(stack, index) {
+ stack[index - 2] = stack[index - 2] - stack[index - 1];
+ }
+ }, {
+ id: 'div',
+ min: 2,
+ stackDelta: -1,
+ stackFn: function stack_div(stack, index) {
+ stack[index - 2] = stack[index - 2] / stack[index - 1];
+ }
+ }, null, {
+ id: 'neg',
+ min: 1,
+ stackDelta: 0,
+ stackFn: function stack_div(stack, index) {
+ stack[index - 1] = -stack[index - 1];
+ }
+ }, {
+ id: 'eq',
+ min: 2,
+ stackDelta: -1
+ }, null, null, {
+ id: 'drop',
+ min: 1,
+ stackDelta: -1
+ }, null, {
+ id: 'put',
+ min: 2,
+ stackDelta: -2
+ }, {
+ id: 'get',
+ min: 1,
+ stackDelta: 0
+ }, {
+ id: 'ifelse',
+ min: 4,
+ stackDelta: -3
+ }, {
+ id: 'random',
+ min: 0,
+ stackDelta: 1
+ }, {
+ id: 'mul',
+ min: 2,
+ stackDelta: -1,
+ stackFn: function stack_div(stack, index) {
+ stack[index - 2] = stack[index - 2] * stack[index - 1];
+ }
+ }, null, {
+ id: 'sqrt',
+ min: 1,
+ stackDelta: 0
+ }, {
+ id: 'dup',
+ min: 1,
+ stackDelta: 1
+ }, {
+ id: 'exch',
+ min: 2,
+ stackDelta: 0
+ }, {
+ id: 'index',
+ min: 2,
+ stackDelta: 0
+ }, {
+ id: 'roll',
+ min: 3,
+ stackDelta: -2
+ }, null, null, null, {
+ id: 'hflex',
+ min: 7,
+ resetStack: true
+ }, {
+ id: 'flex',
+ min: 13,
+ resetStack: true
+ }, {
+ id: 'hflex1',
+ min: 9,
+ resetStack: true
+ }, {
+ id: 'flex1',
+ min: 11,
+ resetStack: true
+ }];
+
+ function CFFParser(file, properties, seacAnalysisEnabled) {
+ this.bytes = file.getBytes();
+ this.properties = properties;
+ this.seacAnalysisEnabled = !!seacAnalysisEnabled;
+ }
+
+ CFFParser.prototype = {
+ parse: function CFFParser_parse() {
+ var properties = this.properties;
+ var cff = new CFF();
+ this.cff = cff;
+ var header = this.parseHeader();
+ var nameIndex = this.parseIndex(header.endPos);
+ var topDictIndex = this.parseIndex(nameIndex.endPos);
+ var stringIndex = this.parseIndex(topDictIndex.endPos);
+ var globalSubrIndex = this.parseIndex(stringIndex.endPos);
+ var topDictParsed = this.parseDict(topDictIndex.obj.get(0));
+ var topDict = this.createDict(CFFTopDict, topDictParsed, cff.strings);
+ cff.header = header.obj;
+ cff.names = this.parseNameIndex(nameIndex.obj);
+ cff.strings = this.parseStringIndex(stringIndex.obj);
+ cff.topDict = topDict;
+ cff.globalSubrIndex = globalSubrIndex.obj;
+ this.parsePrivateDict(cff.topDict);
+ cff.isCIDFont = topDict.hasName('ROS');
+ var charStringOffset = topDict.getByName('CharStrings');
+ var charStringIndex = this.parseIndex(charStringOffset).obj;
+ var fontMatrix = topDict.getByName('FontMatrix');
+
+ if (fontMatrix) {
+ properties.fontMatrix = fontMatrix;
+ }
+
+ var fontBBox = topDict.getByName('FontBBox');
+
+ if (fontBBox) {
+ properties.ascent = Math.max(fontBBox[3], fontBBox[1]);
+ properties.descent = Math.min(fontBBox[1], fontBBox[3]);
+ properties.ascentScaled = true;
+ }
+
+ var charset, encoding;
+
+ if (cff.isCIDFont) {
+ var fdArrayIndex = this.parseIndex(topDict.getByName('FDArray')).obj;
+
+ for (var i = 0, ii = fdArrayIndex.count; i < ii; ++i) {
+ var dictRaw = fdArrayIndex.get(i);
+ var fontDict = this.createDict(CFFTopDict, this.parseDict(dictRaw), cff.strings);
+ this.parsePrivateDict(fontDict);
+ cff.fdArray.push(fontDict);
+ }
+
+ encoding = null;
+ charset = this.parseCharsets(topDict.getByName('charset'), charStringIndex.count, cff.strings, true);
+ cff.fdSelect = this.parseFDSelect(topDict.getByName('FDSelect'), charStringIndex.count);
+ } else {
+ charset = this.parseCharsets(topDict.getByName('charset'), charStringIndex.count, cff.strings, false);
+ encoding = this.parseEncoding(topDict.getByName('Encoding'), properties, cff.strings, charset.charset);
+ }
+
+ cff.charset = charset;
+ cff.encoding = encoding;
+ var charStringsAndSeacs = this.parseCharStrings({
+ charStrings: charStringIndex,
+ localSubrIndex: topDict.privateDict.subrsIndex,
+ globalSubrIndex: globalSubrIndex.obj,
+ fdSelect: cff.fdSelect,
+ fdArray: cff.fdArray,
+ privateDict: topDict.privateDict
+ });
+ cff.charStrings = charStringsAndSeacs.charStrings;
+ cff.seacs = charStringsAndSeacs.seacs;
+ cff.widths = charStringsAndSeacs.widths;
+ return cff;
+ },
+ parseHeader: function CFFParser_parseHeader() {
+ var bytes = this.bytes;
+ var bytesLength = bytes.length;
+ var offset = 0;
+
+ while (offset < bytesLength && bytes[offset] !== 1) {
+ ++offset;
+ }
+
+ if (offset >= bytesLength) {
+ throw new _util.FormatError('Invalid CFF header');
+ }
+
+ if (offset !== 0) {
+ (0, _util.info)('cff data is shifted');
+ bytes = bytes.subarray(offset);
+ this.bytes = bytes;
+ }
+
+ var major = bytes[0];
+ var minor = bytes[1];
+ var hdrSize = bytes[2];
+ var offSize = bytes[3];
+ var header = new CFFHeader(major, minor, hdrSize, offSize);
+ return {
+ obj: header,
+ endPos: hdrSize
+ };
+ },
+ parseDict: function CFFParser_parseDict(dict) {
+ var pos = 0;
+
+ function parseOperand() {
+ var value = dict[pos++];
+
+ if (value === 30) {
+ return parseFloatOperand();
+ } else if (value === 28) {
+ value = dict[pos++];
+ value = (value << 24 | dict[pos++] << 16) >> 16;
+ return value;
+ } else if (value === 29) {
+ value = dict[pos++];
+ value = value << 8 | dict[pos++];
+ value = value << 8 | dict[pos++];
+ value = value << 8 | dict[pos++];
+ return value;
+ } else if (value >= 32 && value <= 246) {
+ return value - 139;
+ } else if (value >= 247 && value <= 250) {
+ return (value - 247) * 256 + dict[pos++] + 108;
+ } else if (value >= 251 && value <= 254) {
+ return -((value - 251) * 256) - dict[pos++] - 108;
+ }
+
+ (0, _util.warn)('CFFParser_parseDict: "' + value + '" is a reserved command.');
+ return NaN;
+ }
+
+ function parseFloatOperand() {
+ var str = '';
+ var eof = 15;
+ var lookup = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '.', 'E', 'E-', null, '-'];
+ var length = dict.length;
+
+ while (pos < length) {
+ var b = dict[pos++];
+ var b1 = b >> 4;
+ var b2 = b & 15;
+
+ if (b1 === eof) {
+ break;
+ }
+
+ str += lookup[b1];
+
+ if (b2 === eof) {
+ break;
+ }
+
+ str += lookup[b2];
+ }
+
+ return parseFloat(str);
+ }
+
+ var operands = [];
+ var entries = [];
+ pos = 0;
+ var end = dict.length;
+
+ while (pos < end) {
+ var b = dict[pos];
+
+ if (b <= 21) {
+ if (b === 12) {
+ b = b << 8 | dict[++pos];
+ }
+
+ entries.push([b, operands]);
+ operands = [];
+ ++pos;
+ } else {
+ operands.push(parseOperand());
+ }
+ }
+
+ return entries;
+ },
+ parseIndex: function CFFParser_parseIndex(pos) {
+ var cffIndex = new CFFIndex();
+ var bytes = this.bytes;
+ var count = bytes[pos++] << 8 | bytes[pos++];
+ var offsets = [];
+ var end = pos;
+ var i, ii;
+
+ if (count !== 0) {
+ var offsetSize = bytes[pos++];
+ var startPos = pos + (count + 1) * offsetSize - 1;
+
+ for (i = 0, ii = count + 1; i < ii; ++i) {
+ var offset = 0;
+
+ for (var j = 0; j < offsetSize; ++j) {
+ offset <<= 8;
+ offset += bytes[pos++];
+ }
+
+ offsets.push(startPos + offset);
+ }
+
+ end = offsets[count];
+ }
+
+ for (i = 0, ii = offsets.length - 1; i < ii; ++i) {
+ var offsetStart = offsets[i];
+ var offsetEnd = offsets[i + 1];
+ cffIndex.add(bytes.subarray(offsetStart, offsetEnd));
+ }
+
+ return {
+ obj: cffIndex,
+ endPos: end
+ };
+ },
+ parseNameIndex: function CFFParser_parseNameIndex(index) {
+ var names = [];
+
+ for (var i = 0, ii = index.count; i < ii; ++i) {
+ var name = index.get(i);
+ names.push((0, _util.bytesToString)(name));
+ }
+
+ return names;
+ },
+ parseStringIndex: function CFFParser_parseStringIndex(index) {
+ var strings = new CFFStrings();
+
+ for (var i = 0, ii = index.count; i < ii; ++i) {
+ var data = index.get(i);
+ strings.add((0, _util.bytesToString)(data));
+ }
+
+ return strings;
+ },
+ createDict: function CFFParser_createDict(Type, dict, strings) {
+ var cffDict = new Type(strings);
+
+ for (var i = 0, ii = dict.length; i < ii; ++i) {
+ var pair = dict[i];
+ var key = pair[0];
+ var value = pair[1];
+ cffDict.setByKey(key, value);
+ }
+
+ return cffDict;
+ },
+ parseCharString: function CFFParser_parseCharString(state, data, localSubrIndex, globalSubrIndex) {
+ if (!data || state.callDepth > MAX_SUBR_NESTING) {
+ return false;
+ }
+
+ var stackSize = state.stackSize;
+ var stack = state.stack;
+ var length = data.length;
+
+ for (var j = 0; j < length;) {
+ var value = data[j++];
+ var validationCommand = null;
+
+ if (value === 12) {
+ var q = data[j++];
+
+ if (q === 0) {
+ data[j - 2] = 139;
+ data[j - 1] = 22;
+ stackSize = 0;
+ } else {
+ validationCommand = CharstringValidationData12[q];
+ }
+ } else if (value === 28) {
+ stack[stackSize] = (data[j] << 24 | data[j + 1] << 16) >> 16;
+ j += 2;
+ stackSize++;
+ } else if (value === 14) {
+ if (stackSize >= 4) {
+ stackSize -= 4;
+
+ if (this.seacAnalysisEnabled) {
+ state.seac = stack.slice(stackSize, stackSize + 4);
+ return false;
+ }
+ }
+
+ validationCommand = CharstringValidationData[value];
+ } else if (value >= 32 && value <= 246) {
+ stack[stackSize] = value - 139;
+ stackSize++;
+ } else if (value >= 247 && value <= 254) {
+ stack[stackSize] = value < 251 ? (value - 247 << 8) + data[j] + 108 : -(value - 251 << 8) - data[j] - 108;
+ j++;
+ stackSize++;
+ } else if (value === 255) {
+ stack[stackSize] = (data[j] << 24 | data[j + 1] << 16 | data[j + 2] << 8 | data[j + 3]) / 65536;
+ j += 4;
+ stackSize++;
+ } else if (value === 19 || value === 20) {
+ state.hints += stackSize >> 1;
+ j += state.hints + 7 >> 3;
+ stackSize %= 2;
+ validationCommand = CharstringValidationData[value];
+ } else if (value === 10 || value === 29) {
+ var subrsIndex;
+
+ if (value === 10) {
+ subrsIndex = localSubrIndex;
+ } else {
+ subrsIndex = globalSubrIndex;
+ }
+
+ if (!subrsIndex) {
+ validationCommand = CharstringValidationData[value];
+ (0, _util.warn)('Missing subrsIndex for ' + validationCommand.id);
+ return false;
+ }
+
+ var bias = 32768;
+
+ if (subrsIndex.count < 1240) {
+ bias = 107;
+ } else if (subrsIndex.count < 33900) {
+ bias = 1131;
+ }
+
+ var subrNumber = stack[--stackSize] + bias;
+
+ if (subrNumber < 0 || subrNumber >= subrsIndex.count || isNaN(subrNumber)) {
+ validationCommand = CharstringValidationData[value];
+ (0, _util.warn)('Out of bounds subrIndex for ' + validationCommand.id);
+ return false;
+ }
+
+ state.stackSize = stackSize;
+ state.callDepth++;
+ var valid = this.parseCharString(state, subrsIndex.get(subrNumber), localSubrIndex, globalSubrIndex);
+
+ if (!valid) {
+ return false;
+ }
+
+ state.callDepth--;
+ stackSize = state.stackSize;
+ continue;
+ } else if (value === 11) {
+ state.stackSize = stackSize;
+ return true;
+ } else {
+ validationCommand = CharstringValidationData[value];
+ }
+
+ if (validationCommand) {
+ if (validationCommand.stem) {
+ state.hints += stackSize >> 1;
+
+ if (value === 3 || value === 23) {
+ state.hasVStems = true;
+ } else if (state.hasVStems && (value === 1 || value === 18)) {
+ (0, _util.warn)('CFF stem hints are in wrong order');
+ data[j - 1] = value === 1 ? 3 : 23;
+ }
+ }
+
+ if ('min' in validationCommand) {
+ if (!state.undefStack && stackSize < validationCommand.min) {
+ (0, _util.warn)('Not enough parameters for ' + validationCommand.id + '; actual: ' + stackSize + ', expected: ' + validationCommand.min);
+ return false;
+ }
+ }
+
+ if (state.firstStackClearing && validationCommand.stackClearing) {
+ state.firstStackClearing = false;
+ stackSize -= validationCommand.min;
+
+ if (stackSize >= 2 && validationCommand.stem) {
+ stackSize %= 2;
+ } else if (stackSize > 1) {
+ (0, _util.warn)('Found too many parameters for stack-clearing command');
+ }
+
+ if (stackSize > 0 && stack[stackSize - 1] >= 0) {
+ state.width = stack[stackSize - 1];
+ }
+ }
+
+ if ('stackDelta' in validationCommand) {
+ if ('stackFn' in validationCommand) {
+ validationCommand.stackFn(stack, stackSize);
+ }
+
+ stackSize += validationCommand.stackDelta;
+ } else if (validationCommand.stackClearing) {
+ stackSize = 0;
+ } else if (validationCommand.resetStack) {
+ stackSize = 0;
+ state.undefStack = false;
+ } else if (validationCommand.undefStack) {
+ stackSize = 0;
+ state.undefStack = true;
+ state.firstStackClearing = false;
+ }
+ }
+ }
+
+ state.stackSize = stackSize;
+ return true;
+ },
+ parseCharStrings: function parseCharStrings(_ref) {
+ var charStrings = _ref.charStrings,
+ localSubrIndex = _ref.localSubrIndex,
+ globalSubrIndex = _ref.globalSubrIndex,
+ fdSelect = _ref.fdSelect,
+ fdArray = _ref.fdArray,
+ privateDict = _ref.privateDict;
+ var seacs = [];
+ var widths = [];
+ var count = charStrings.count;
+
+ for (var i = 0; i < count; i++) {
+ var charstring = charStrings.get(i);
+ var state = {
+ callDepth: 0,
+ stackSize: 0,
+ stack: [],
+ undefStack: true,
+ hints: 0,
+ firstStackClearing: true,
+ seac: null,
+ width: null,
+ hasVStems: false
+ };
+ var valid = true;
+ var localSubrToUse = null;
+ var privateDictToUse = privateDict;
+
+ if (fdSelect && fdArray.length) {
+ var fdIndex = fdSelect.getFDIndex(i);
+
+ if (fdIndex === -1) {
+ (0, _util.warn)('Glyph index is not in fd select.');
+ valid = false;
+ }
+
+ if (fdIndex >= fdArray.length) {
+ (0, _util.warn)('Invalid fd index for glyph index.');
+ valid = false;
+ }
+
+ if (valid) {
+ privateDictToUse = fdArray[fdIndex].privateDict;
+ localSubrToUse = privateDictToUse.subrsIndex;
+ }
+ } else if (localSubrIndex) {
+ localSubrToUse = localSubrIndex;
+ }
+
+ if (valid) {
+ valid = this.parseCharString(state, charstring, localSubrToUse, globalSubrIndex);
+ }
+
+ if (state.width !== null) {
+ var nominalWidth = privateDictToUse.getByName('nominalWidthX');
+ widths[i] = nominalWidth + state.width;
+ } else {
+ var defaultWidth = privateDictToUse.getByName('defaultWidthX');
+ widths[i] = defaultWidth;
+ }
+
+ if (state.seac !== null) {
+ seacs[i] = state.seac;
+ }
+
+ if (!valid) {
+ charStrings.set(i, new Uint8Array([14]));
+ }
+ }
+
+ return {
+ charStrings: charStrings,
+ seacs: seacs,
+ widths: widths
+ };
+ },
+ emptyPrivateDictionary: function CFFParser_emptyPrivateDictionary(parentDict) {
+ var privateDict = this.createDict(CFFPrivateDict, [], parentDict.strings);
+ parentDict.setByKey(18, [0, 0]);
+ parentDict.privateDict = privateDict;
+ },
+ parsePrivateDict: function CFFParser_parsePrivateDict(parentDict) {
+ if (!parentDict.hasName('Private')) {
+ this.emptyPrivateDictionary(parentDict);
+ return;
+ }
+
+ var privateOffset = parentDict.getByName('Private');
+
+ if (!Array.isArray(privateOffset) || privateOffset.length !== 2) {
+ parentDict.removeByName('Private');
+ return;
+ }
+
+ var size = privateOffset[0];
+ var offset = privateOffset[1];
+
+ if (size === 0 || offset >= this.bytes.length) {
+ this.emptyPrivateDictionary(parentDict);
+ return;
+ }
+
+ var privateDictEnd = offset + size;
+ var dictData = this.bytes.subarray(offset, privateDictEnd);
+ var dict = this.parseDict(dictData);
+ var privateDict = this.createDict(CFFPrivateDict, dict, parentDict.strings);
+ parentDict.privateDict = privateDict;
+
+ if (!privateDict.getByName('Subrs')) {
+ return;
+ }
+
+ var subrsOffset = privateDict.getByName('Subrs');
+ var relativeOffset = offset + subrsOffset;
+
+ if (subrsOffset === 0 || relativeOffset >= this.bytes.length) {
+ this.emptyPrivateDictionary(parentDict);
+ return;
+ }
+
+ var subrsIndex = this.parseIndex(relativeOffset);
+ privateDict.subrsIndex = subrsIndex.obj;
+ },
+ parseCharsets: function CFFParser_parseCharsets(pos, length, strings, cid) {
+ if (pos === 0) {
+ return new CFFCharset(true, CFFCharsetPredefinedTypes.ISO_ADOBE, _charsets.ISOAdobeCharset);
+ } else if (pos === 1) {
+ return new CFFCharset(true, CFFCharsetPredefinedTypes.EXPERT, _charsets.ExpertCharset);
+ } else if (pos === 2) {
+ return new CFFCharset(true, CFFCharsetPredefinedTypes.EXPERT_SUBSET, _charsets.ExpertSubsetCharset);
+ }
+
+ var bytes = this.bytes;
+ var start = pos;
+ var format = bytes[pos++];
+ var charset = ['.notdef'];
+ var id, count, i;
+ length -= 1;
+
+ switch (format) {
+ case 0:
+ for (i = 0; i < length; i++) {
+ id = bytes[pos++] << 8 | bytes[pos++];
+ charset.push(cid ? id : strings.get(id));
+ }
+
+ break;
+
+ case 1:
+ while (charset.length <= length) {
+ id = bytes[pos++] << 8 | bytes[pos++];
+ count = bytes[pos++];
+
+ for (i = 0; i <= count; i++) {
+ charset.push(cid ? id++ : strings.get(id++));
+ }
+ }
+
+ break;
+
+ case 2:
+ while (charset.length <= length) {
+ id = bytes[pos++] << 8 | bytes[pos++];
+ count = bytes[pos++] << 8 | bytes[pos++];
+
+ for (i = 0; i <= count; i++) {
+ charset.push(cid ? id++ : strings.get(id++));
+ }
+ }
+
+ break;
+
+ default:
+ throw new _util.FormatError('Unknown charset format');
+ }
+
+ var end = pos;
+ var raw = bytes.subarray(start, end);
+ return new CFFCharset(false, format, charset, raw);
+ },
+ parseEncoding: function CFFParser_parseEncoding(pos, properties, strings, charset) {
+ var encoding = Object.create(null);
+ var bytes = this.bytes;
+ var predefined = false;
+ var format, i, ii;
+ var raw = null;
+
+ function readSupplement() {
+ var supplementsCount = bytes[pos++];
+
+ for (i = 0; i < supplementsCount; i++) {
+ var code = bytes[pos++];
+ var sid = (bytes[pos++] << 8) + (bytes[pos++] & 0xff);
+ encoding[code] = charset.indexOf(strings.get(sid));
+ }
+ }
+
+ if (pos === 0 || pos === 1) {
+ predefined = true;
+ format = pos;
+ var baseEncoding = pos ? _encodings.ExpertEncoding : _encodings.StandardEncoding;
+
+ for (i = 0, ii = charset.length; i < ii; i++) {
+ var index = baseEncoding.indexOf(charset[i]);
+
+ if (index !== -1) {
+ encoding[index] = i;
+ }
+ }
+ } else {
+ var dataStart = pos;
+ format = bytes[pos++];
+
+ switch (format & 0x7f) {
+ case 0:
+ var glyphsCount = bytes[pos++];
+
+ for (i = 1; i <= glyphsCount; i++) {
+ encoding[bytes[pos++]] = i;
+ }
+
+ break;
+
+ case 1:
+ var rangesCount = bytes[pos++];
+ var gid = 1;
+
+ for (i = 0; i < rangesCount; i++) {
+ var start = bytes[pos++];
+ var left = bytes[pos++];
+
+ for (var j = start; j <= start + left; j++) {
+ encoding[j] = gid++;
+ }
+ }
+
+ break;
+
+ default:
+ throw new _util.FormatError("Unknown encoding format: ".concat(format, " in CFF"));
+ }
+
+ var dataEnd = pos;
+
+ if (format & 0x80) {
+ bytes[dataStart] &= 0x7f;
+ readSupplement();
+ }
+
+ raw = bytes.subarray(dataStart, dataEnd);
+ }
+
+ format = format & 0x7f;
+ return new CFFEncoding(predefined, format, encoding, raw);
+ },
+ parseFDSelect: function CFFParser_parseFDSelect(pos, length) {
+ var bytes = this.bytes;
+ var format = bytes[pos++];
+ var fdSelect = [];
+ var i;
+
+ switch (format) {
+ case 0:
+ for (i = 0; i < length; ++i) {
+ var id = bytes[pos++];
+ fdSelect.push(id);
+ }
+
+ break;
+
+ case 3:
+ var rangesCount = bytes[pos++] << 8 | bytes[pos++];
+
+ for (i = 0; i < rangesCount; ++i) {
+ var first = bytes[pos++] << 8 | bytes[pos++];
+
+ if (i === 0 && first !== 0) {
+ (0, _util.warn)('parseFDSelect: The first range must have a first GID of 0' + ' -- trying to recover.');
+ first = 0;
+ }
+
+ var fdIndex = bytes[pos++];
+ var next = bytes[pos] << 8 | bytes[pos + 1];
+
+ for (var j = first; j < next; ++j) {
+ fdSelect.push(fdIndex);
+ }
+ }
+
+ pos += 2;
+ break;
+
+ default:
+ throw new _util.FormatError("parseFDSelect: Unknown format \"".concat(format, "\"."));
+ }
+
+ if (fdSelect.length !== length) {
+ throw new _util.FormatError('parseFDSelect: Invalid font data.');
+ }
+
+ return new CFFFDSelect(format, fdSelect);
+ }
+ };
+ return CFFParser;
+}();
+
+exports.CFFParser = CFFParser;
+
+var CFF = function CFFClosure() {
+ function CFF() {
+ this.header = null;
+ this.names = [];
+ this.topDict = null;
+ this.strings = new CFFStrings();
+ this.globalSubrIndex = null;
+ this.encoding = null;
+ this.charset = null;
+ this.charStrings = null;
+ this.fdArray = [];
+ this.fdSelect = null;
+ this.isCIDFont = false;
+ }
+
+ CFF.prototype = {
+ duplicateFirstGlyph: function CFF_duplicateFirstGlyph() {
+ if (this.charStrings.count >= 65535) {
+ (0, _util.warn)('Not enough space in charstrings to duplicate first glyph.');
+ return;
+ }
+
+ var glyphZero = this.charStrings.get(0);
+ this.charStrings.add(glyphZero);
+
+ if (this.isCIDFont) {
+ this.fdSelect.fdSelect.push(this.fdSelect.fdSelect[0]);
+ }
+ },
+ hasGlyphId: function CFF_hasGlyphID(id) {
+ if (id < 0 || id >= this.charStrings.count) {
+ return false;
+ }
+
+ var glyph = this.charStrings.get(id);
+ return glyph.length > 0;
+ }
+ };
+ return CFF;
+}();
+
+exports.CFF = CFF;
+
+var CFFHeader = function CFFHeaderClosure() {
+ function CFFHeader(major, minor, hdrSize, offSize) {
+ this.major = major;
+ this.minor = minor;
+ this.hdrSize = hdrSize;
+ this.offSize = offSize;
+ }
+
+ return CFFHeader;
+}();
+
+exports.CFFHeader = CFFHeader;
+
+var CFFStrings = function CFFStringsClosure() {
+ function CFFStrings() {
+ this.strings = [];
+ }
+
+ CFFStrings.prototype = {
+ get: function CFFStrings_get(index) {
+ if (index >= 0 && index <= NUM_STANDARD_CFF_STRINGS - 1) {
+ return CFFStandardStrings[index];
+ }
+
+ if (index - NUM_STANDARD_CFF_STRINGS <= this.strings.length) {
+ return this.strings[index - NUM_STANDARD_CFF_STRINGS];
+ }
+
+ return CFFStandardStrings[0];
+ },
+ getSID: function CFFStrings_getSID(str) {
+ var index = CFFStandardStrings.indexOf(str);
+
+ if (index !== -1) {
+ return index;
+ }
+
+ index = this.strings.indexOf(str);
+
+ if (index !== -1) {
+ return index + NUM_STANDARD_CFF_STRINGS;
+ }
+
+ return -1;
+ },
+ add: function CFFStrings_add(value) {
+ this.strings.push(value);
+ },
+
+ get count() {
+ return this.strings.length;
+ }
+
+ };
+ return CFFStrings;
+}();
+
+exports.CFFStrings = CFFStrings;
+
+var CFFIndex = function CFFIndexClosure() {
+ function CFFIndex() {
+ this.objects = [];
+ this.length = 0;
+ }
+
+ CFFIndex.prototype = {
+ add: function CFFIndex_add(data) {
+ this.length += data.length;
+ this.objects.push(data);
+ },
+ set: function CFFIndex_set(index, data) {
+ this.length += data.length - this.objects[index].length;
+ this.objects[index] = data;
+ },
+ get: function CFFIndex_get(index) {
+ return this.objects[index];
+ },
+
+ get count() {
+ return this.objects.length;
+ }
+
+ };
+ return CFFIndex;
+}();
+
+exports.CFFIndex = CFFIndex;
+
+var CFFDict = function CFFDictClosure() {
+ function CFFDict(tables, strings) {
+ this.keyToNameMap = tables.keyToNameMap;
+ this.nameToKeyMap = tables.nameToKeyMap;
+ this.defaults = tables.defaults;
+ this.types = tables.types;
+ this.opcodes = tables.opcodes;
+ this.order = tables.order;
+ this.strings = strings;
+ this.values = Object.create(null);
+ }
+
+ CFFDict.prototype = {
+ setByKey: function CFFDict_setByKey(key, value) {
+ if (!(key in this.keyToNameMap)) {
+ return false;
+ }
+
+ var valueLength = value.length;
+
+ if (valueLength === 0) {
+ return true;
+ }
+
+ for (var i = 0; i < valueLength; i++) {
+ if (isNaN(value[i])) {
+ (0, _util.warn)('Invalid CFFDict value: "' + value + '" for key "' + key + '".');
+ return true;
+ }
+ }
+
+ var type = this.types[key];
+
+ if (type === 'num' || type === 'sid' || type === 'offset') {
+ value = value[0];
+ }
+
+ this.values[key] = value;
+ return true;
+ },
+ setByName: function CFFDict_setByName(name, value) {
+ if (!(name in this.nameToKeyMap)) {
+ throw new _util.FormatError("Invalid dictionary name \"".concat(name, "\""));
+ }
+
+ this.values[this.nameToKeyMap[name]] = value;
+ },
+ hasName: function CFFDict_hasName(name) {
+ return this.nameToKeyMap[name] in this.values;
+ },
+ getByName: function CFFDict_getByName(name) {
+ if (!(name in this.nameToKeyMap)) {
+ throw new _util.FormatError("Invalid dictionary name ".concat(name, "\""));
+ }
+
+ var key = this.nameToKeyMap[name];
+
+ if (!(key in this.values)) {
+ return this.defaults[key];
+ }
+
+ return this.values[key];
+ },
+ removeByName: function CFFDict_removeByName(name) {
+ delete this.values[this.nameToKeyMap[name]];
+ }
+ };
+
+ CFFDict.createTables = function CFFDict_createTables(layout) {
+ var tables = {
+ keyToNameMap: {},
+ nameToKeyMap: {},
+ defaults: {},
+ types: {},
+ opcodes: {},
+ order: []
+ };
+
+ for (var i = 0, ii = layout.length; i < ii; ++i) {
+ var entry = layout[i];
+ var key = Array.isArray(entry[0]) ? (entry[0][0] << 8) + entry[0][1] : entry[0];
+ tables.keyToNameMap[key] = entry[1];
+ tables.nameToKeyMap[entry[1]] = key;
+ tables.types[key] = entry[2];
+ tables.defaults[key] = entry[3];
+ tables.opcodes[key] = Array.isArray(entry[0]) ? entry[0] : [entry[0]];
+ tables.order.push(key);
+ }
+
+ return tables;
+ };
+
+ return CFFDict;
+}();
+
+var CFFTopDict = function CFFTopDictClosure() {
+ var layout = [[[12, 30], 'ROS', ['sid', 'sid', 'num'], null], [[12, 20], 'SyntheticBase', 'num', null], [0, 'version', 'sid', null], [1, 'Notice', 'sid', null], [[12, 0], 'Copyright', 'sid', null], [2, 'FullName', 'sid', null], [3, 'FamilyName', 'sid', null], [4, 'Weight', 'sid', null], [[12, 1], 'isFixedPitch', 'num', 0], [[12, 2], 'ItalicAngle', 'num', 0], [[12, 3], 'UnderlinePosition', 'num', -100], [[12, 4], 'UnderlineThickness', 'num', 50], [[12, 5], 'PaintType', 'num', 0], [[12, 6], 'CharstringType', 'num', 2], [[12, 7], 'FontMatrix', ['num', 'num', 'num', 'num', 'num', 'num'], [0.001, 0, 0, 0.001, 0, 0]], [13, 'UniqueID', 'num', null], [5, 'FontBBox', ['num', 'num', 'num', 'num'], [0, 0, 0, 0]], [[12, 8], 'StrokeWidth', 'num', 0], [14, 'XUID', 'array', null], [15, 'charset', 'offset', 0], [16, 'Encoding', 'offset', 0], [17, 'CharStrings', 'offset', 0], [18, 'Private', ['offset', 'offset'], null], [[12, 21], 'PostScript', 'sid', null], [[12, 22], 'BaseFontName', 'sid', null], [[12, 23], 'BaseFontBlend', 'delta', null], [[12, 31], 'CIDFontVersion', 'num', 0], [[12, 32], 'CIDFontRevision', 'num', 0], [[12, 33], 'CIDFontType', 'num', 0], [[12, 34], 'CIDCount', 'num', 8720], [[12, 35], 'UIDBase', 'num', null], [[12, 37], 'FDSelect', 'offset', null], [[12, 36], 'FDArray', 'offset', null], [[12, 38], 'FontName', 'sid', null]];
+ var tables = null;
+
+ function CFFTopDict(strings) {
+ if (tables === null) {
+ tables = CFFDict.createTables(layout);
+ }
+
+ CFFDict.call(this, tables, strings);
+ this.privateDict = null;
+ }
+
+ CFFTopDict.prototype = Object.create(CFFDict.prototype);
+ return CFFTopDict;
+}();
+
+exports.CFFTopDict = CFFTopDict;
+
+var CFFPrivateDict = function CFFPrivateDictClosure() {
+ var layout = [[6, 'BlueValues', 'delta', null], [7, 'OtherBlues', 'delta', null], [8, 'FamilyBlues', 'delta', null], [9, 'FamilyOtherBlues', 'delta', null], [[12, 9], 'BlueScale', 'num', 0.039625], [[12, 10], 'BlueShift', 'num', 7], [[12, 11], 'BlueFuzz', 'num', 1], [10, 'StdHW', 'num', null], [11, 'StdVW', 'num', null], [[12, 12], 'StemSnapH', 'delta', null], [[12, 13], 'StemSnapV', 'delta', null], [[12, 14], 'ForceBold', 'num', 0], [[12, 17], 'LanguageGroup', 'num', 0], [[12, 18], 'ExpansionFactor', 'num', 0.06], [[12, 19], 'initialRandomSeed', 'num', 0], [20, 'defaultWidthX', 'num', 0], [21, 'nominalWidthX', 'num', 0], [19, 'Subrs', 'offset', null]];
+ var tables = null;
+
+ function CFFPrivateDict(strings) {
+ if (tables === null) {
+ tables = CFFDict.createTables(layout);
+ }
+
+ CFFDict.call(this, tables, strings);
+ this.subrsIndex = null;
+ }
+
+ CFFPrivateDict.prototype = Object.create(CFFDict.prototype);
+ return CFFPrivateDict;
+}();
+
+exports.CFFPrivateDict = CFFPrivateDict;
+var CFFCharsetPredefinedTypes = {
+ ISO_ADOBE: 0,
+ EXPERT: 1,
+ EXPERT_SUBSET: 2
+};
+
+var CFFCharset = function CFFCharsetClosure() {
+ function CFFCharset(predefined, format, charset, raw) {
+ this.predefined = predefined;
+ this.format = format;
+ this.charset = charset;
+ this.raw = raw;
+ }
+
+ return CFFCharset;
+}();
+
+exports.CFFCharset = CFFCharset;
+
+var CFFEncoding = function CFFEncodingClosure() {
+ function CFFEncoding(predefined, format, encoding, raw) {
+ this.predefined = predefined;
+ this.format = format;
+ this.encoding = encoding;
+ this.raw = raw;
+ }
+
+ return CFFEncoding;
+}();
+
+var CFFFDSelect = function CFFFDSelectClosure() {
+ function CFFFDSelect(format, fdSelect) {
+ this.format = format;
+ this.fdSelect = fdSelect;
+ }
+
+ CFFFDSelect.prototype = {
+ getFDIndex: function CFFFDSelect_get(glyphIndex) {
+ if (glyphIndex < 0 || glyphIndex >= this.fdSelect.length) {
+ return -1;
+ }
+
+ return this.fdSelect[glyphIndex];
+ }
+ };
+ return CFFFDSelect;
+}();
+
+exports.CFFFDSelect = CFFFDSelect;
+
+var CFFOffsetTracker = function CFFOffsetTrackerClosure() {
+ function CFFOffsetTracker() {
+ this.offsets = Object.create(null);
+ }
+
+ CFFOffsetTracker.prototype = {
+ isTracking: function CFFOffsetTracker_isTracking(key) {
+ return key in this.offsets;
+ },
+ track: function CFFOffsetTracker_track(key, location) {
+ if (key in this.offsets) {
+ throw new _util.FormatError("Already tracking location of ".concat(key));
+ }
+
+ this.offsets[key] = location;
+ },
+ offset: function CFFOffsetTracker_offset(value) {
+ for (var key in this.offsets) {
+ this.offsets[key] += value;
+ }
+ },
+ setEntryLocation: function CFFOffsetTracker_setEntryLocation(key, values, output) {
+ if (!(key in this.offsets)) {
+ throw new _util.FormatError("Not tracking location of ".concat(key));
+ }
+
+ var data = output.data;
+ var dataOffset = this.offsets[key];
+ var size = 5;
+
+ for (var i = 0, ii = values.length; i < ii; ++i) {
+ var offset0 = i * size + dataOffset;
+ var offset1 = offset0 + 1;
+ var offset2 = offset0 + 2;
+ var offset3 = offset0 + 3;
+ var offset4 = offset0 + 4;
+
+ if (data[offset0] !== 0x1d || data[offset1] !== 0 || data[offset2] !== 0 || data[offset3] !== 0 || data[offset4] !== 0) {
+ throw new _util.FormatError('writing to an offset that is not empty');
+ }
+
+ var value = values[i];
+ data[offset0] = 0x1d;
+ data[offset1] = value >> 24 & 0xFF;
+ data[offset2] = value >> 16 & 0xFF;
+ data[offset3] = value >> 8 & 0xFF;
+ data[offset4] = value & 0xFF;
+ }
+ }
+ };
+ return CFFOffsetTracker;
+}();
+
+var CFFCompiler = function CFFCompilerClosure() {
+ function CFFCompiler(cff) {
+ this.cff = cff;
+ }
+
+ CFFCompiler.prototype = {
+ compile: function CFFCompiler_compile() {
+ var cff = this.cff;
+ var output = {
+ data: [],
+ length: 0,
+ add: function CFFCompiler_add(data) {
+ this.data = this.data.concat(data);
+ this.length = this.data.length;
+ }
+ };
+ var header = this.compileHeader(cff.header);
+ output.add(header);
+ var nameIndex = this.compileNameIndex(cff.names);
+ output.add(nameIndex);
+
+ if (cff.isCIDFont) {
+ if (cff.topDict.hasName('FontMatrix')) {
+ var base = cff.topDict.getByName('FontMatrix');
+ cff.topDict.removeByName('FontMatrix');
+
+ for (var i = 0, ii = cff.fdArray.length; i < ii; i++) {
+ var subDict = cff.fdArray[i];
+ var matrix = base.slice(0);
+
+ if (subDict.hasName('FontMatrix')) {
+ matrix = _util.Util.transform(matrix, subDict.getByName('FontMatrix'));
+ }
+
+ subDict.setByName('FontMatrix', matrix);
+ }
+ }
+ }
+
+ cff.topDict.setByName('charset', 0);
+ var compiled = this.compileTopDicts([cff.topDict], output.length, cff.isCIDFont);
+ output.add(compiled.output);
+ var topDictTracker = compiled.trackers[0];
+ var stringIndex = this.compileStringIndex(cff.strings.strings);
+ output.add(stringIndex);
+ var globalSubrIndex = this.compileIndex(cff.globalSubrIndex);
+ output.add(globalSubrIndex);
+
+ if (cff.encoding && cff.topDict.hasName('Encoding')) {
+ if (cff.encoding.predefined) {
+ topDictTracker.setEntryLocation('Encoding', [cff.encoding.format], output);
+ } else {
+ var encoding = this.compileEncoding(cff.encoding);
+ topDictTracker.setEntryLocation('Encoding', [output.length], output);
+ output.add(encoding);
+ }
+ }
+
+ var charset = this.compileCharset(cff.charset, cff.charStrings.count, cff.strings, cff.isCIDFont);
+ topDictTracker.setEntryLocation('charset', [output.length], output);
+ output.add(charset);
+ var charStrings = this.compileCharStrings(cff.charStrings);
+ topDictTracker.setEntryLocation('CharStrings', [output.length], output);
+ output.add(charStrings);
+
+ if (cff.isCIDFont) {
+ topDictTracker.setEntryLocation('FDSelect', [output.length], output);
+ var fdSelect = this.compileFDSelect(cff.fdSelect);
+ output.add(fdSelect);
+ compiled = this.compileTopDicts(cff.fdArray, output.length, true);
+ topDictTracker.setEntryLocation('FDArray', [output.length], output);
+ output.add(compiled.output);
+ var fontDictTrackers = compiled.trackers;
+ this.compilePrivateDicts(cff.fdArray, fontDictTrackers, output);
+ }
+
+ this.compilePrivateDicts([cff.topDict], [topDictTracker], output);
+ output.add([0]);
+ return output.data;
+ },
+ encodeNumber: function CFFCompiler_encodeNumber(value) {
+ if (parseFloat(value) === parseInt(value, 10) && !isNaN(value)) {
+ return this.encodeInteger(value);
+ }
+
+ return this.encodeFloat(value);
+ },
+ encodeFloat: function CFFCompiler_encodeFloat(num) {
+ var value = num.toString();
+ var m = /\.(\d*?)(?:9{5,20}|0{5,20})\d{0,2}(?:e(.+)|$)/.exec(value);
+
+ if (m) {
+ var epsilon = parseFloat('1e' + ((m[2] ? +m[2] : 0) + m[1].length));
+ value = (Math.round(num * epsilon) / epsilon).toString();
+ }
+
+ var nibbles = '';
+ var i, ii;
+
+ for (i = 0, ii = value.length; i < ii; ++i) {
+ var a = value[i];
+
+ if (a === 'e') {
+ nibbles += value[++i] === '-' ? 'c' : 'b';
+ } else if (a === '.') {
+ nibbles += 'a';
+ } else if (a === '-') {
+ nibbles += 'e';
+ } else {
+ nibbles += a;
+ }
+ }
+
+ nibbles += nibbles.length & 1 ? 'f' : 'ff';
+ var out = [30];
+
+ for (i = 0, ii = nibbles.length; i < ii; i += 2) {
+ out.push(parseInt(nibbles.substring(i, i + 2), 16));
+ }
+
+ return out;
+ },
+ encodeInteger: function CFFCompiler_encodeInteger(value) {
+ var code;
+
+ if (value >= -107 && value <= 107) {
+ code = [value + 139];
+ } else if (value >= 108 && value <= 1131) {
+ value = value - 108;
+ code = [(value >> 8) + 247, value & 0xFF];
+ } else if (value >= -1131 && value <= -108) {
+ value = -value - 108;
+ code = [(value >> 8) + 251, value & 0xFF];
+ } else if (value >= -32768 && value <= 32767) {
+ code = [0x1c, value >> 8 & 0xFF, value & 0xFF];
+ } else {
+ code = [0x1d, value >> 24 & 0xFF, value >> 16 & 0xFF, value >> 8 & 0xFF, value & 0xFF];
+ }
+
+ return code;
+ },
+ compileHeader: function CFFCompiler_compileHeader(header) {
+ return [header.major, header.minor, header.hdrSize, header.offSize];
+ },
+ compileNameIndex: function CFFCompiler_compileNameIndex(names) {
+ var nameIndex = new CFFIndex();
+
+ for (var i = 0, ii = names.length; i < ii; ++i) {
+ var name = names[i];
+ var length = Math.min(name.length, 127);
+ var sanitizedName = new Array(length);
+
+ for (var j = 0; j < length; j++) {
+ var _char = name[j];
+
+ if (_char < '!' || _char > '~' || _char === '[' || _char === ']' || _char === '(' || _char === ')' || _char === '{' || _char === '}' || _char === '<' || _char === '>' || _char === '/' || _char === '%') {
+ _char = '_';
+ }
+
+ sanitizedName[j] = _char;
+ }
+
+ sanitizedName = sanitizedName.join('');
+
+ if (sanitizedName === '') {
+ sanitizedName = 'Bad_Font_Name';
+ }
+
+ nameIndex.add((0, _util.stringToBytes)(sanitizedName));
+ }
+
+ return this.compileIndex(nameIndex);
+ },
+ compileTopDicts: function CFFCompiler_compileTopDicts(dicts, length, removeCidKeys) {
+ var fontDictTrackers = [];
+ var fdArrayIndex = new CFFIndex();
+
+ for (var i = 0, ii = dicts.length; i < ii; ++i) {
+ var fontDict = dicts[i];
+
+ if (removeCidKeys) {
+ fontDict.removeByName('CIDFontVersion');
+ fontDict.removeByName('CIDFontRevision');
+ fontDict.removeByName('CIDFontType');
+ fontDict.removeByName('CIDCount');
+ fontDict.removeByName('UIDBase');
+ }
+
+ var fontDictTracker = new CFFOffsetTracker();
+ var fontDictData = this.compileDict(fontDict, fontDictTracker);
+ fontDictTrackers.push(fontDictTracker);
+ fdArrayIndex.add(fontDictData);
+ fontDictTracker.offset(length);
+ }
+
+ fdArrayIndex = this.compileIndex(fdArrayIndex, fontDictTrackers);
+ return {
+ trackers: fontDictTrackers,
+ output: fdArrayIndex
+ };
+ },
+ compilePrivateDicts: function CFFCompiler_compilePrivateDicts(dicts, trackers, output) {
+ for (var i = 0, ii = dicts.length; i < ii; ++i) {
+ var fontDict = dicts[i];
+ var privateDict = fontDict.privateDict;
+
+ if (!privateDict || !fontDict.hasName('Private')) {
+ throw new _util.FormatError('There must be a private dictionary.');
+ }
+
+ var privateDictTracker = new CFFOffsetTracker();
+ var privateDictData = this.compileDict(privateDict, privateDictTracker);
+ var outputLength = output.length;
+ privateDictTracker.offset(outputLength);
+
+ if (!privateDictData.length) {
+ outputLength = 0;
+ }
+
+ trackers[i].setEntryLocation('Private', [privateDictData.length, outputLength], output);
+ output.add(privateDictData);
+
+ if (privateDict.subrsIndex && privateDict.hasName('Subrs')) {
+ var subrs = this.compileIndex(privateDict.subrsIndex);
+ privateDictTracker.setEntryLocation('Subrs', [privateDictData.length], output);
+ output.add(subrs);
+ }
+ }
+ },
+ compileDict: function CFFCompiler_compileDict(dict, offsetTracker) {
+ var out = [];
+ var order = dict.order;
+
+ for (var i = 0; i < order.length; ++i) {
+ var key = order[i];
+
+ if (!(key in dict.values)) {
+ continue;
+ }
+
+ var values = dict.values[key];
+ var types = dict.types[key];
+
+ if (!Array.isArray(types)) {
+ types = [types];
+ }
+
+ if (!Array.isArray(values)) {
+ values = [values];
+ }
+
+ if (values.length === 0) {
+ continue;
+ }
+
+ for (var j = 0, jj = types.length; j < jj; ++j) {
+ var type = types[j];
+ var value = values[j];
+
+ switch (type) {
+ case 'num':
+ case 'sid':
+ out = out.concat(this.encodeNumber(value));
+ break;
+
+ case 'offset':
+ var name = dict.keyToNameMap[key];
+
+ if (!offsetTracker.isTracking(name)) {
+ offsetTracker.track(name, out.length);
+ }
+
+ out = out.concat([0x1d, 0, 0, 0, 0]);
+ break;
+
+ case 'array':
+ case 'delta':
+ out = out.concat(this.encodeNumber(value));
+
+ for (var k = 1, kk = values.length; k < kk; ++k) {
+ out = out.concat(this.encodeNumber(values[k]));
+ }
+
+ break;
+
+ default:
+ throw new _util.FormatError("Unknown data type of ".concat(type));
+ }
+ }
+
+ out = out.concat(dict.opcodes[key]);
+ }
+
+ return out;
+ },
+ compileStringIndex: function CFFCompiler_compileStringIndex(strings) {
+ var stringIndex = new CFFIndex();
+
+ for (var i = 0, ii = strings.length; i < ii; ++i) {
+ stringIndex.add((0, _util.stringToBytes)(strings[i]));
+ }
+
+ return this.compileIndex(stringIndex);
+ },
+ compileGlobalSubrIndex: function CFFCompiler_compileGlobalSubrIndex() {
+ var globalSubrIndex = this.cff.globalSubrIndex;
+ this.out.writeByteArray(this.compileIndex(globalSubrIndex));
+ },
+ compileCharStrings: function CFFCompiler_compileCharStrings(charStrings) {
+ var charStringsIndex = new CFFIndex();
+
+ for (var i = 0; i < charStrings.count; i++) {
+ var glyph = charStrings.get(i);
+
+ if (glyph.length === 0) {
+ charStringsIndex.add(new Uint8Array([0x8B, 0x0E]));
+ continue;
+ }
+
+ charStringsIndex.add(glyph);
+ }
+
+ return this.compileIndex(charStringsIndex);
+ },
+ compileCharset: function CFFCompiler_compileCharset(charset, numGlyphs, strings, isCIDFont) {
+ var out;
+ var numGlyphsLessNotDef = numGlyphs - 1;
+
+ if (isCIDFont) {
+ out = new Uint8Array([2, 0, 0, numGlyphsLessNotDef >> 8 & 0xFF, numGlyphsLessNotDef & 0xFF]);
+ } else {
+ var length = 1 + numGlyphsLessNotDef * 2;
+ out = new Uint8Array(length);
+ out[0] = 0;
+ var charsetIndex = 0;
+ var numCharsets = charset.charset.length;
+ var warned = false;
+
+ for (var i = 1; i < out.length; i += 2) {
+ var sid = 0;
+
+ if (charsetIndex < numCharsets) {
+ var name = charset.charset[charsetIndex++];
+ sid = strings.getSID(name);
+
+ if (sid === -1) {
+ sid = 0;
+
+ if (!warned) {
+ warned = true;
+ (0, _util.warn)("Couldn't find ".concat(name, " in CFF strings"));
+ }
+ }
+ }
+
+ out[i] = sid >> 8 & 0xFF;
+ out[i + 1] = sid & 0xFF;
+ }
+ }
+
+ return this.compileTypedArray(out);
+ },
+ compileEncoding: function CFFCompiler_compileEncoding(encoding) {
+ return this.compileTypedArray(encoding.raw);
+ },
+ compileFDSelect: function CFFCompiler_compileFDSelect(fdSelect) {
+ var format = fdSelect.format;
+ var out, i;
+
+ switch (format) {
+ case 0:
+ out = new Uint8Array(1 + fdSelect.fdSelect.length);
+ out[0] = format;
+
+ for (i = 0; i < fdSelect.fdSelect.length; i++) {
+ out[i + 1] = fdSelect.fdSelect[i];
+ }
+
+ break;
+
+ case 3:
+ var start = 0;
+ var lastFD = fdSelect.fdSelect[0];
+ var ranges = [format, 0, 0, start >> 8 & 0xFF, start & 0xFF, lastFD];
+
+ for (i = 1; i < fdSelect.fdSelect.length; i++) {
+ var currentFD = fdSelect.fdSelect[i];
+
+ if (currentFD !== lastFD) {
+ ranges.push(i >> 8 & 0xFF, i & 0xFF, currentFD);
+ lastFD = currentFD;
+ }
+ }
+
+ var numRanges = (ranges.length - 3) / 3;
+ ranges[1] = numRanges >> 8 & 0xFF;
+ ranges[2] = numRanges & 0xFF;
+ ranges.push(i >> 8 & 0xFF, i & 0xFF);
+ out = new Uint8Array(ranges);
+ break;
+ }
+
+ return this.compileTypedArray(out);
+ },
+ compileTypedArray: function CFFCompiler_compileTypedArray(data) {
+ var out = [];
+
+ for (var i = 0, ii = data.length; i < ii; ++i) {
+ out[i] = data[i];
+ }
+
+ return out;
+ },
+ compileIndex: function CFFCompiler_compileIndex(index, trackers) {
+ trackers = trackers || [];
+ var objects = index.objects;
+ var count = objects.length;
+
+ if (count === 0) {
+ return [0, 0, 0];
+ }
+
+ var data = [count >> 8 & 0xFF, count & 0xff];
+ var lastOffset = 1,
+ i;
+
+ for (i = 0; i < count; ++i) {
+ lastOffset += objects[i].length;
+ }
+
+ var offsetSize;
+
+ if (lastOffset < 0x100) {
+ offsetSize = 1;
+ } else if (lastOffset < 0x10000) {
+ offsetSize = 2;
+ } else if (lastOffset < 0x1000000) {
+ offsetSize = 3;
+ } else {
+ offsetSize = 4;
+ }
+
+ data.push(offsetSize);
+ var relativeOffset = 1;
+
+ for (i = 0; i < count + 1; i++) {
+ if (offsetSize === 1) {
+ data.push(relativeOffset & 0xFF);
+ } else if (offsetSize === 2) {
+ data.push(relativeOffset >> 8 & 0xFF, relativeOffset & 0xFF);
+ } else if (offsetSize === 3) {
+ data.push(relativeOffset >> 16 & 0xFF, relativeOffset >> 8 & 0xFF, relativeOffset & 0xFF);
+ } else {
+ data.push(relativeOffset >>> 24 & 0xFF, relativeOffset >> 16 & 0xFF, relativeOffset >> 8 & 0xFF, relativeOffset & 0xFF);
+ }
+
+ if (objects[i]) {
+ relativeOffset += objects[i].length;
+ }
+ }
+
+ for (i = 0; i < count; i++) {
+ if (trackers[i]) {
+ trackers[i].offset(data.length);
+ }
+
+ for (var j = 0, jj = objects[i].length; j < jj; j++) {
+ data.push(objects[i][j]);
+ }
+ }
+
+ return data;
+ }
+ };
+ return CFFCompiler;
+}();
+
+exports.CFFCompiler = CFFCompiler;
+
+/***/ }),
+/* 176 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.ExpertSubsetCharset = exports.ExpertCharset = exports.ISOAdobeCharset = void 0;
+var ISOAdobeCharset = ['.notdef', 'space', 'exclam', 'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand', 'quoteright', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright', 'asciicircum', 'underscore', 'quoteleft', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright', 'asciitilde', 'exclamdown', 'cent', 'sterling', 'fraction', 'yen', 'florin', 'section', 'currency', 'quotesingle', 'quotedblleft', 'guillemotleft', 'guilsinglleft', 'guilsinglright', 'fi', 'fl', 'endash', 'dagger', 'daggerdbl', 'periodcentered', 'paragraph', 'bullet', 'quotesinglbase', 'quotedblbase', 'quotedblright', 'guillemotright', 'ellipsis', 'perthousand', 'questiondown', 'grave', 'acute', 'circumflex', 'tilde', 'macron', 'breve', 'dotaccent', 'dieresis', 'ring', 'cedilla', 'hungarumlaut', 'ogonek', 'caron', 'emdash', 'AE', 'ordfeminine', 'Lslash', 'Oslash', 'OE', 'ordmasculine', 'ae', 'dotlessi', 'lslash', 'oslash', 'oe', 'germandbls', 'onesuperior', 'logicalnot', 'mu', 'trademark', 'Eth', 'onehalf', 'plusminus', 'Thorn', 'onequarter', 'divide', 'brokenbar', 'degree', 'thorn', 'threequarters', 'twosuperior', 'registered', 'minus', 'eth', 'multiply', 'threesuperior', 'copyright', 'Aacute', 'Acircumflex', 'Adieresis', 'Agrave', 'Aring', 'Atilde', 'Ccedilla', 'Eacute', 'Ecircumflex', 'Edieresis', 'Egrave', 'Iacute', 'Icircumflex', 'Idieresis', 'Igrave', 'Ntilde', 'Oacute', 'Ocircumflex', 'Odieresis', 'Ograve', 'Otilde', 'Scaron', 'Uacute', 'Ucircumflex', 'Udieresis', 'Ugrave', 'Yacute', 'Ydieresis', 'Zcaron', 'aacute', 'acircumflex', 'adieresis', 'agrave', 'aring', 'atilde', 'ccedilla', 'eacute', 'ecircumflex', 'edieresis', 'egrave', 'iacute', 'icircumflex', 'idieresis', 'igrave', 'ntilde', 'oacute', 'ocircumflex', 'odieresis', 'ograve', 'otilde', 'scaron', 'uacute', 'ucircumflex', 'udieresis', 'ugrave', 'yacute', 'ydieresis', 'zcaron'];
+exports.ISOAdobeCharset = ISOAdobeCharset;
+var ExpertCharset = ['.notdef', 'space', 'exclamsmall', 'Hungarumlautsmall', 'dollaroldstyle', 'dollarsuperior', 'ampersandsmall', 'Acutesmall', 'parenleftsuperior', 'parenrightsuperior', 'twodotenleader', 'onedotenleader', 'comma', 'hyphen', 'period', 'fraction', 'zerooldstyle', 'oneoldstyle', 'twooldstyle', 'threeoldstyle', 'fouroldstyle', 'fiveoldstyle', 'sixoldstyle', 'sevenoldstyle', 'eightoldstyle', 'nineoldstyle', 'colon', 'semicolon', 'commasuperior', 'threequartersemdash', 'periodsuperior', 'questionsmall', 'asuperior', 'bsuperior', 'centsuperior', 'dsuperior', 'esuperior', 'isuperior', 'lsuperior', 'msuperior', 'nsuperior', 'osuperior', 'rsuperior', 'ssuperior', 'tsuperior', 'ff', 'fi', 'fl', 'ffi', 'ffl', 'parenleftinferior', 'parenrightinferior', 'Circumflexsmall', 'hyphensuperior', 'Gravesmall', 'Asmall', 'Bsmall', 'Csmall', 'Dsmall', 'Esmall', 'Fsmall', 'Gsmall', 'Hsmall', 'Ismall', 'Jsmall', 'Ksmall', 'Lsmall', 'Msmall', 'Nsmall', 'Osmall', 'Psmall', 'Qsmall', 'Rsmall', 'Ssmall', 'Tsmall', 'Usmall', 'Vsmall', 'Wsmall', 'Xsmall', 'Ysmall', 'Zsmall', 'colonmonetary', 'onefitted', 'rupiah', 'Tildesmall', 'exclamdownsmall', 'centoldstyle', 'Lslashsmall', 'Scaronsmall', 'Zcaronsmall', 'Dieresissmall', 'Brevesmall', 'Caronsmall', 'Dotaccentsmall', 'Macronsmall', 'figuredash', 'hypheninferior', 'Ogoneksmall', 'Ringsmall', 'Cedillasmall', 'onequarter', 'onehalf', 'threequarters', 'questiondownsmall', 'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths', 'onethird', 'twothirds', 'zerosuperior', 'onesuperior', 'twosuperior', 'threesuperior', 'foursuperior', 'fivesuperior', 'sixsuperior', 'sevensuperior', 'eightsuperior', 'ninesuperior', 'zeroinferior', 'oneinferior', 'twoinferior', 'threeinferior', 'fourinferior', 'fiveinferior', 'sixinferior', 'seveninferior', 'eightinferior', 'nineinferior', 'centinferior', 'dollarinferior', 'periodinferior', 'commainferior', 'Agravesmall', 'Aacutesmall', 'Acircumflexsmall', 'Atildesmall', 'Adieresissmall', 'Aringsmall', 'AEsmall', 'Ccedillasmall', 'Egravesmall', 'Eacutesmall', 'Ecircumflexsmall', 'Edieresissmall', 'Igravesmall', 'Iacutesmall', 'Icircumflexsmall', 'Idieresissmall', 'Ethsmall', 'Ntildesmall', 'Ogravesmall', 'Oacutesmall', 'Ocircumflexsmall', 'Otildesmall', 'Odieresissmall', 'OEsmall', 'Oslashsmall', 'Ugravesmall', 'Uacutesmall', 'Ucircumflexsmall', 'Udieresissmall', 'Yacutesmall', 'Thornsmall', 'Ydieresissmall'];
+exports.ExpertCharset = ExpertCharset;
+var ExpertSubsetCharset = ['.notdef', 'space', 'dollaroldstyle', 'dollarsuperior', 'parenleftsuperior', 'parenrightsuperior', 'twodotenleader', 'onedotenleader', 'comma', 'hyphen', 'period', 'fraction', 'zerooldstyle', 'oneoldstyle', 'twooldstyle', 'threeoldstyle', 'fouroldstyle', 'fiveoldstyle', 'sixoldstyle', 'sevenoldstyle', 'eightoldstyle', 'nineoldstyle', 'colon', 'semicolon', 'commasuperior', 'threequartersemdash', 'periodsuperior', 'asuperior', 'bsuperior', 'centsuperior', 'dsuperior', 'esuperior', 'isuperior', 'lsuperior', 'msuperior', 'nsuperior', 'osuperior', 'rsuperior', 'ssuperior', 'tsuperior', 'ff', 'fi', 'fl', 'ffi', 'ffl', 'parenleftinferior', 'parenrightinferior', 'hyphensuperior', 'colonmonetary', 'onefitted', 'rupiah', 'centoldstyle', 'figuredash', 'hypheninferior', 'onequarter', 'onehalf', 'threequarters', 'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths', 'onethird', 'twothirds', 'zerosuperior', 'onesuperior', 'twosuperior', 'threesuperior', 'foursuperior', 'fivesuperior', 'sixsuperior', 'sevensuperior', 'eightsuperior', 'ninesuperior', 'zeroinferior', 'oneinferior', 'twoinferior', 'threeinferior', 'fourinferior', 'fiveinferior', 'sixinferior', 'seveninferior', 'eightinferior', 'nineinferior', 'centinferior', 'dollarinferior', 'periodinferior', 'commainferior'];
+exports.ExpertSubsetCharset = ExpertSubsetCharset;
+
+/***/ }),
+/* 177 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.getEncoding = getEncoding;
+exports.ExpertEncoding = exports.ZapfDingbatsEncoding = exports.SymbolSetEncoding = exports.MacRomanEncoding = exports.StandardEncoding = exports.WinAnsiEncoding = void 0;
+var ExpertEncoding = ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'space', 'exclamsmall', 'Hungarumlautsmall', '', 'dollaroldstyle', 'dollarsuperior', 'ampersandsmall', 'Acutesmall', 'parenleftsuperior', 'parenrightsuperior', 'twodotenleader', 'onedotenleader', 'comma', 'hyphen', 'period', 'fraction', 'zerooldstyle', 'oneoldstyle', 'twooldstyle', 'threeoldstyle', 'fouroldstyle', 'fiveoldstyle', 'sixoldstyle', 'sevenoldstyle', 'eightoldstyle', 'nineoldstyle', 'colon', 'semicolon', 'commasuperior', 'threequartersemdash', 'periodsuperior', 'questionsmall', '', 'asuperior', 'bsuperior', 'centsuperior', 'dsuperior', 'esuperior', '', '', '', 'isuperior', '', '', 'lsuperior', 'msuperior', 'nsuperior', 'osuperior', '', '', 'rsuperior', 'ssuperior', 'tsuperior', '', 'ff', 'fi', 'fl', 'ffi', 'ffl', 'parenleftinferior', '', 'parenrightinferior', 'Circumflexsmall', 'hyphensuperior', 'Gravesmall', 'Asmall', 'Bsmall', 'Csmall', 'Dsmall', 'Esmall', 'Fsmall', 'Gsmall', 'Hsmall', 'Ismall', 'Jsmall', 'Ksmall', 'Lsmall', 'Msmall', 'Nsmall', 'Osmall', 'Psmall', 'Qsmall', 'Rsmall', 'Ssmall', 'Tsmall', 'Usmall', 'Vsmall', 'Wsmall', 'Xsmall', 'Ysmall', 'Zsmall', 'colonmonetary', 'onefitted', 'rupiah', 'Tildesmall', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'exclamdownsmall', 'centoldstyle', 'Lslashsmall', '', '', 'Scaronsmall', 'Zcaronsmall', 'Dieresissmall', 'Brevesmall', 'Caronsmall', '', 'Dotaccentsmall', '', '', 'Macronsmall', '', '', 'figuredash', 'hypheninferior', '', '', 'Ogoneksmall', 'Ringsmall', 'Cedillasmall', '', '', '', 'onequarter', 'onehalf', 'threequarters', 'questiondownsmall', 'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths', 'onethird', 'twothirds', '', '', 'zerosuperior', 'onesuperior', 'twosuperior', 'threesuperior', 'foursuperior', 'fivesuperior', 'sixsuperior', 'sevensuperior', 'eightsuperior', 'ninesuperior', 'zeroinferior', 'oneinferior', 'twoinferior', 'threeinferior', 'fourinferior', 'fiveinferior', 'sixinferior', 'seveninferior', 'eightinferior', 'nineinferior', 'centinferior', 'dollarinferior', 'periodinferior', 'commainferior', 'Agravesmall', 'Aacutesmall', 'Acircumflexsmall', 'Atildesmall', 'Adieresissmall', 'Aringsmall', 'AEsmall', 'Ccedillasmall', 'Egravesmall', 'Eacutesmall', 'Ecircumflexsmall', 'Edieresissmall', 'Igravesmall', 'Iacutesmall', 'Icircumflexsmall', 'Idieresissmall', 'Ethsmall', 'Ntildesmall', 'Ogravesmall', 'Oacutesmall', 'Ocircumflexsmall', 'Otildesmall', 'Odieresissmall', 'OEsmall', 'Oslashsmall', 'Ugravesmall', 'Uacutesmall', 'Ucircumflexsmall', 'Udieresissmall', 'Yacutesmall', 'Thornsmall', 'Ydieresissmall'];
+exports.ExpertEncoding = ExpertEncoding;
+var MacExpertEncoding = ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'space', 'exclamsmall', 'Hungarumlautsmall', 'centoldstyle', 'dollaroldstyle', 'dollarsuperior', 'ampersandsmall', 'Acutesmall', 'parenleftsuperior', 'parenrightsuperior', 'twodotenleader', 'onedotenleader', 'comma', 'hyphen', 'period', 'fraction', 'zerooldstyle', 'oneoldstyle', 'twooldstyle', 'threeoldstyle', 'fouroldstyle', 'fiveoldstyle', 'sixoldstyle', 'sevenoldstyle', 'eightoldstyle', 'nineoldstyle', 'colon', 'semicolon', '', 'threequartersemdash', '', 'questionsmall', '', '', '', '', 'Ethsmall', '', '', 'onequarter', 'onehalf', 'threequarters', 'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths', 'onethird', 'twothirds', '', '', '', '', '', '', 'ff', 'fi', 'fl', 'ffi', 'ffl', 'parenleftinferior', '', 'parenrightinferior', 'Circumflexsmall', 'hypheninferior', 'Gravesmall', 'Asmall', 'Bsmall', 'Csmall', 'Dsmall', 'Esmall', 'Fsmall', 'Gsmall', 'Hsmall', 'Ismall', 'Jsmall', 'Ksmall', 'Lsmall', 'Msmall', 'Nsmall', 'Osmall', 'Psmall', 'Qsmall', 'Rsmall', 'Ssmall', 'Tsmall', 'Usmall', 'Vsmall', 'Wsmall', 'Xsmall', 'Ysmall', 'Zsmall', 'colonmonetary', 'onefitted', 'rupiah', 'Tildesmall', '', '', 'asuperior', 'centsuperior', '', '', '', '', 'Aacutesmall', 'Agravesmall', 'Acircumflexsmall', 'Adieresissmall', 'Atildesmall', 'Aringsmall', 'Ccedillasmall', 'Eacutesmall', 'Egravesmall', 'Ecircumflexsmall', 'Edieresissmall', 'Iacutesmall', 'Igravesmall', 'Icircumflexsmall', 'Idieresissmall', 'Ntildesmall', 'Oacutesmall', 'Ogravesmall', 'Ocircumflexsmall', 'Odieresissmall', 'Otildesmall', 'Uacutesmall', 'Ugravesmall', 'Ucircumflexsmall', 'Udieresissmall', '', 'eightsuperior', 'fourinferior', 'threeinferior', 'sixinferior', 'eightinferior', 'seveninferior', 'Scaronsmall', '', 'centinferior', 'twoinferior', '', 'Dieresissmall', '', 'Caronsmall', 'osuperior', 'fiveinferior', '', 'commainferior', 'periodinferior', 'Yacutesmall', '', 'dollarinferior', '', '', 'Thornsmall', '', 'nineinferior', 'zeroinferior', 'Zcaronsmall', 'AEsmall', 'Oslashsmall', 'questiondownsmall', 'oneinferior', 'Lslashsmall', '', '', '', '', '', '', 'Cedillasmall', '', '', '', '', '', 'OEsmall', 'figuredash', 'hyphensuperior', '', '', '', '', 'exclamdownsmall', '', 'Ydieresissmall', '', 'onesuperior', 'twosuperior', 'threesuperior', 'foursuperior', 'fivesuperior', 'sixsuperior', 'sevensuperior', 'ninesuperior', 'zerosuperior', '', 'esuperior', 'rsuperior', 'tsuperior', '', '', 'isuperior', 'ssuperior', 'dsuperior', '', '', '', '', '', 'lsuperior', 'Ogoneksmall', 'Brevesmall', 'Macronsmall', 'bsuperior', 'nsuperior', 'msuperior', 'commasuperior', 'periodsuperior', 'Dotaccentsmall', 'Ringsmall', '', '', '', ''];
+var MacRomanEncoding = ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'space', 'exclam', 'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand', 'quotesingle', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright', 'asciicircum', 'underscore', 'grave', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright', 'asciitilde', '', 'Adieresis', 'Aring', 'Ccedilla', 'Eacute', 'Ntilde', 'Odieresis', 'Udieresis', 'aacute', 'agrave', 'acircumflex', 'adieresis', 'atilde', 'aring', 'ccedilla', 'eacute', 'egrave', 'ecircumflex', 'edieresis', 'iacute', 'igrave', 'icircumflex', 'idieresis', 'ntilde', 'oacute', 'ograve', 'ocircumflex', 'odieresis', 'otilde', 'uacute', 'ugrave', 'ucircumflex', 'udieresis', 'dagger', 'degree', 'cent', 'sterling', 'section', 'bullet', 'paragraph', 'germandbls', 'registered', 'copyright', 'trademark', 'acute', 'dieresis', 'notequal', 'AE', 'Oslash', 'infinity', 'plusminus', 'lessequal', 'greaterequal', 'yen', 'mu', 'partialdiff', 'summation', 'product', 'pi', 'integral', 'ordfeminine', 'ordmasculine', 'Omega', 'ae', 'oslash', 'questiondown', 'exclamdown', 'logicalnot', 'radical', 'florin', 'approxequal', 'Delta', 'guillemotleft', 'guillemotright', 'ellipsis', 'space', 'Agrave', 'Atilde', 'Otilde', 'OE', 'oe', 'endash', 'emdash', 'quotedblleft', 'quotedblright', 'quoteleft', 'quoteright', 'divide', 'lozenge', 'ydieresis', 'Ydieresis', 'fraction', 'currency', 'guilsinglleft', 'guilsinglright', 'fi', 'fl', 'daggerdbl', 'periodcentered', 'quotesinglbase', 'quotedblbase', 'perthousand', 'Acircumflex', 'Ecircumflex', 'Aacute', 'Edieresis', 'Egrave', 'Iacute', 'Icircumflex', 'Idieresis', 'Igrave', 'Oacute', 'Ocircumflex', 'apple', 'Ograve', 'Uacute', 'Ucircumflex', 'Ugrave', 'dotlessi', 'circumflex', 'tilde', 'macron', 'breve', 'dotaccent', 'ring', 'cedilla', 'hungarumlaut', 'ogonek', 'caron'];
+exports.MacRomanEncoding = MacRomanEncoding;
+var StandardEncoding = ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'space', 'exclam', 'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand', 'quoteright', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright', 'asciicircum', 'underscore', 'quoteleft', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright', 'asciitilde', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'exclamdown', 'cent', 'sterling', 'fraction', 'yen', 'florin', 'section', 'currency', 'quotesingle', 'quotedblleft', 'guillemotleft', 'guilsinglleft', 'guilsinglright', 'fi', 'fl', '', 'endash', 'dagger', 'daggerdbl', 'periodcentered', '', 'paragraph', 'bullet', 'quotesinglbase', 'quotedblbase', 'quotedblright', 'guillemotright', 'ellipsis', 'perthousand', '', 'questiondown', '', 'grave', 'acute', 'circumflex', 'tilde', 'macron', 'breve', 'dotaccent', 'dieresis', '', 'ring', 'cedilla', '', 'hungarumlaut', 'ogonek', 'caron', 'emdash', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'AE', '', 'ordfeminine', '', '', '', '', 'Lslash', 'Oslash', 'OE', 'ordmasculine', '', '', '', '', '', 'ae', '', '', '', 'dotlessi', '', '', 'lslash', 'oslash', 'oe', 'germandbls', '', '', '', ''];
+exports.StandardEncoding = StandardEncoding;
+var WinAnsiEncoding = ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'space', 'exclam', 'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand', 'quotesingle', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright', 'asciicircum', 'underscore', 'grave', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright', 'asciitilde', 'bullet', 'Euro', 'bullet', 'quotesinglbase', 'florin', 'quotedblbase', 'ellipsis', 'dagger', 'daggerdbl', 'circumflex', 'perthousand', 'Scaron', 'guilsinglleft', 'OE', 'bullet', 'Zcaron', 'bullet', 'bullet', 'quoteleft', 'quoteright', 'quotedblleft', 'quotedblright', 'bullet', 'endash', 'emdash', 'tilde', 'trademark', 'scaron', 'guilsinglright', 'oe', 'bullet', 'zcaron', 'Ydieresis', 'space', 'exclamdown', 'cent', 'sterling', 'currency', 'yen', 'brokenbar', 'section', 'dieresis', 'copyright', 'ordfeminine', 'guillemotleft', 'logicalnot', 'hyphen', 'registered', 'macron', 'degree', 'plusminus', 'twosuperior', 'threesuperior', 'acute', 'mu', 'paragraph', 'periodcentered', 'cedilla', 'onesuperior', 'ordmasculine', 'guillemotright', 'onequarter', 'onehalf', 'threequarters', 'questiondown', 'Agrave', 'Aacute', 'Acircumflex', 'Atilde', 'Adieresis', 'Aring', 'AE', 'Ccedilla', 'Egrave', 'Eacute', 'Ecircumflex', 'Edieresis', 'Igrave', 'Iacute', 'Icircumflex', 'Idieresis', 'Eth', 'Ntilde', 'Ograve', 'Oacute', 'Ocircumflex', 'Otilde', 'Odieresis', 'multiply', 'Oslash', 'Ugrave', 'Uacute', 'Ucircumflex', 'Udieresis', 'Yacute', 'Thorn', 'germandbls', 'agrave', 'aacute', 'acircumflex', 'atilde', 'adieresis', 'aring', 'ae', 'ccedilla', 'egrave', 'eacute', 'ecircumflex', 'edieresis', 'igrave', 'iacute', 'icircumflex', 'idieresis', 'eth', 'ntilde', 'ograve', 'oacute', 'ocircumflex', 'otilde', 'odieresis', 'divide', 'oslash', 'ugrave', 'uacute', 'ucircumflex', 'udieresis', 'yacute', 'thorn', 'ydieresis'];
+exports.WinAnsiEncoding = WinAnsiEncoding;
+var SymbolSetEncoding = ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'space', 'exclam', 'universal', 'numbersign', 'existential', 'percent', 'ampersand', 'suchthat', 'parenleft', 'parenright', 'asteriskmath', 'plus', 'comma', 'minus', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal', 'greater', 'question', 'congruent', 'Alpha', 'Beta', 'Chi', 'Delta', 'Epsilon', 'Phi', 'Gamma', 'Eta', 'Iota', 'theta1', 'Kappa', 'Lambda', 'Mu', 'Nu', 'Omicron', 'Pi', 'Theta', 'Rho', 'Sigma', 'Tau', 'Upsilon', 'sigma1', 'Omega', 'Xi', 'Psi', 'Zeta', 'bracketleft', 'therefore', 'bracketright', 'perpendicular', 'underscore', 'radicalex', 'alpha', 'beta', 'chi', 'delta', 'epsilon', 'phi', 'gamma', 'eta', 'iota', 'phi1', 'kappa', 'lambda', 'mu', 'nu', 'omicron', 'pi', 'theta', 'rho', 'sigma', 'tau', 'upsilon', 'omega1', 'omega', 'xi', 'psi', 'zeta', 'braceleft', 'bar', 'braceright', 'similar', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'Euro', 'Upsilon1', 'minute', 'lessequal', 'fraction', 'infinity', 'florin', 'club', 'diamond', 'heart', 'spade', 'arrowboth', 'arrowleft', 'arrowup', 'arrowright', 'arrowdown', 'degree', 'plusminus', 'second', 'greaterequal', 'multiply', 'proportional', 'partialdiff', 'bullet', 'divide', 'notequal', 'equivalence', 'approxequal', 'ellipsis', 'arrowvertex', 'arrowhorizex', 'carriagereturn', 'aleph', 'Ifraktur', 'Rfraktur', 'weierstrass', 'circlemultiply', 'circleplus', 'emptyset', 'intersection', 'union', 'propersuperset', 'reflexsuperset', 'notsubset', 'propersubset', 'reflexsubset', 'element', 'notelement', 'angle', 'gradient', 'registerserif', 'copyrightserif', 'trademarkserif', 'product', 'radical', 'dotmath', 'logicalnot', 'logicaland', 'logicalor', 'arrowdblboth', 'arrowdblleft', 'arrowdblup', 'arrowdblright', 'arrowdbldown', 'lozenge', 'angleleft', 'registersans', 'copyrightsans', 'trademarksans', 'summation', 'parenlefttp', 'parenleftex', 'parenleftbt', 'bracketlefttp', 'bracketleftex', 'bracketleftbt', 'bracelefttp', 'braceleftmid', 'braceleftbt', 'braceex', '', 'angleright', 'integral', 'integraltp', 'integralex', 'integralbt', 'parenrighttp', 'parenrightex', 'parenrightbt', 'bracketrighttp', 'bracketrightex', 'bracketrightbt', 'bracerighttp', 'bracerightmid', 'bracerightbt', ''];
+exports.SymbolSetEncoding = SymbolSetEncoding;
+var ZapfDingbatsEncoding = ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'space', 'a1', 'a2', 'a202', 'a3', 'a4', 'a5', 'a119', 'a118', 'a117', 'a11', 'a12', 'a13', 'a14', 'a15', 'a16', 'a105', 'a17', 'a18', 'a19', 'a20', 'a21', 'a22', 'a23', 'a24', 'a25', 'a26', 'a27', 'a28', 'a6', 'a7', 'a8', 'a9', 'a10', 'a29', 'a30', 'a31', 'a32', 'a33', 'a34', 'a35', 'a36', 'a37', 'a38', 'a39', 'a40', 'a41', 'a42', 'a43', 'a44', 'a45', 'a46', 'a47', 'a48', 'a49', 'a50', 'a51', 'a52', 'a53', 'a54', 'a55', 'a56', 'a57', 'a58', 'a59', 'a60', 'a61', 'a62', 'a63', 'a64', 'a65', 'a66', 'a67', 'a68', 'a69', 'a70', 'a71', 'a72', 'a73', 'a74', 'a203', 'a75', 'a204', 'a76', 'a77', 'a78', 'a79', 'a81', 'a82', 'a83', 'a84', 'a97', 'a98', 'a99', 'a100', '', 'a89', 'a90', 'a93', 'a94', 'a91', 'a92', 'a205', 'a85', 'a206', 'a86', 'a87', 'a88', 'a95', 'a96', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'a101', 'a102', 'a103', 'a104', 'a106', 'a107', 'a108', 'a112', 'a111', 'a110', 'a109', 'a120', 'a121', 'a122', 'a123', 'a124', 'a125', 'a126', 'a127', 'a128', 'a129', 'a130', 'a131', 'a132', 'a133', 'a134', 'a135', 'a136', 'a137', 'a138', 'a139', 'a140', 'a141', 'a142', 'a143', 'a144', 'a145', 'a146', 'a147', 'a148', 'a149', 'a150', 'a151', 'a152', 'a153', 'a154', 'a155', 'a156', 'a157', 'a158', 'a159', 'a160', 'a161', 'a163', 'a164', 'a196', 'a165', 'a192', 'a166', 'a167', 'a168', 'a169', 'a170', 'a171', 'a172', 'a173', 'a162', 'a174', 'a175', 'a176', 'a177', 'a178', 'a179', 'a193', 'a180', 'a199', 'a181', 'a200', 'a182', '', 'a201', 'a183', 'a184', 'a197', 'a185', 'a194', 'a198', 'a186', 'a195', 'a187', 'a188', 'a189', 'a190', 'a191', ''];
+exports.ZapfDingbatsEncoding = ZapfDingbatsEncoding;
+
+function getEncoding(encodingName) {
+ switch (encodingName) {
+ case 'WinAnsiEncoding':
+ return WinAnsiEncoding;
+
+ case 'StandardEncoding':
+ return StandardEncoding;
+
+ case 'MacRomanEncoding':
+ return MacRomanEncoding;
+
+ case 'SymbolSetEncoding':
+ return SymbolSetEncoding;
+
+ case 'ZapfDingbatsEncoding':
+ return ZapfDingbatsEncoding;
+
+ case 'ExpertEncoding':
+ return ExpertEncoding;
+
+ case 'MacExpertEncoding':
+ return MacExpertEncoding;
+
+ default:
+ return null;
+ }
+}
+
+/***/ }),
+/* 178 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+var getLookupTableFactory = __w_pdfjs_require__(154).getLookupTableFactory;
+var getGlyphsUnicode = getLookupTableFactory(function (t) {
+ t['A'] = 0x0041;
+ t['AE'] = 0x00C6;
+ t['AEacute'] = 0x01FC;
+ t['AEmacron'] = 0x01E2;
+ t['AEsmall'] = 0xF7E6;
+ t['Aacute'] = 0x00C1;
+ t['Aacutesmall'] = 0xF7E1;
+ t['Abreve'] = 0x0102;
+ t['Abreveacute'] = 0x1EAE;
+ t['Abrevecyrillic'] = 0x04D0;
+ t['Abrevedotbelow'] = 0x1EB6;
+ t['Abrevegrave'] = 0x1EB0;
+ t['Abrevehookabove'] = 0x1EB2;
+ t['Abrevetilde'] = 0x1EB4;
+ t['Acaron'] = 0x01CD;
+ t['Acircle'] = 0x24B6;
+ t['Acircumflex'] = 0x00C2;
+ t['Acircumflexacute'] = 0x1EA4;
+ t['Acircumflexdotbelow'] = 0x1EAC;
+ t['Acircumflexgrave'] = 0x1EA6;
+ t['Acircumflexhookabove'] = 0x1EA8;
+ t['Acircumflexsmall'] = 0xF7E2;
+ t['Acircumflextilde'] = 0x1EAA;
+ t['Acute'] = 0xF6C9;
+ t['Acutesmall'] = 0xF7B4;
+ t['Acyrillic'] = 0x0410;
+ t['Adblgrave'] = 0x0200;
+ t['Adieresis'] = 0x00C4;
+ t['Adieresiscyrillic'] = 0x04D2;
+ t['Adieresismacron'] = 0x01DE;
+ t['Adieresissmall'] = 0xF7E4;
+ t['Adotbelow'] = 0x1EA0;
+ t['Adotmacron'] = 0x01E0;
+ t['Agrave'] = 0x00C0;
+ t['Agravesmall'] = 0xF7E0;
+ t['Ahookabove'] = 0x1EA2;
+ t['Aiecyrillic'] = 0x04D4;
+ t['Ainvertedbreve'] = 0x0202;
+ t['Alpha'] = 0x0391;
+ t['Alphatonos'] = 0x0386;
+ t['Amacron'] = 0x0100;
+ t['Amonospace'] = 0xFF21;
+ t['Aogonek'] = 0x0104;
+ t['Aring'] = 0x00C5;
+ t['Aringacute'] = 0x01FA;
+ t['Aringbelow'] = 0x1E00;
+ t['Aringsmall'] = 0xF7E5;
+ t['Asmall'] = 0xF761;
+ t['Atilde'] = 0x00C3;
+ t['Atildesmall'] = 0xF7E3;
+ t['Aybarmenian'] = 0x0531;
+ t['B'] = 0x0042;
+ t['Bcircle'] = 0x24B7;
+ t['Bdotaccent'] = 0x1E02;
+ t['Bdotbelow'] = 0x1E04;
+ t['Becyrillic'] = 0x0411;
+ t['Benarmenian'] = 0x0532;
+ t['Beta'] = 0x0392;
+ t['Bhook'] = 0x0181;
+ t['Blinebelow'] = 0x1E06;
+ t['Bmonospace'] = 0xFF22;
+ t['Brevesmall'] = 0xF6F4;
+ t['Bsmall'] = 0xF762;
+ t['Btopbar'] = 0x0182;
+ t['C'] = 0x0043;
+ t['Caarmenian'] = 0x053E;
+ t['Cacute'] = 0x0106;
+ t['Caron'] = 0xF6CA;
+ t['Caronsmall'] = 0xF6F5;
+ t['Ccaron'] = 0x010C;
+ t['Ccedilla'] = 0x00C7;
+ t['Ccedillaacute'] = 0x1E08;
+ t['Ccedillasmall'] = 0xF7E7;
+ t['Ccircle'] = 0x24B8;
+ t['Ccircumflex'] = 0x0108;
+ t['Cdot'] = 0x010A;
+ t['Cdotaccent'] = 0x010A;
+ t['Cedillasmall'] = 0xF7B8;
+ t['Chaarmenian'] = 0x0549;
+ t['Cheabkhasiancyrillic'] = 0x04BC;
+ t['Checyrillic'] = 0x0427;
+ t['Chedescenderabkhasiancyrillic'] = 0x04BE;
+ t['Chedescendercyrillic'] = 0x04B6;
+ t['Chedieresiscyrillic'] = 0x04F4;
+ t['Cheharmenian'] = 0x0543;
+ t['Chekhakassiancyrillic'] = 0x04CB;
+ t['Cheverticalstrokecyrillic'] = 0x04B8;
+ t['Chi'] = 0x03A7;
+ t['Chook'] = 0x0187;
+ t['Circumflexsmall'] = 0xF6F6;
+ t['Cmonospace'] = 0xFF23;
+ t['Coarmenian'] = 0x0551;
+ t['Csmall'] = 0xF763;
+ t['D'] = 0x0044;
+ t['DZ'] = 0x01F1;
+ t['DZcaron'] = 0x01C4;
+ t['Daarmenian'] = 0x0534;
+ t['Dafrican'] = 0x0189;
+ t['Dcaron'] = 0x010E;
+ t['Dcedilla'] = 0x1E10;
+ t['Dcircle'] = 0x24B9;
+ t['Dcircumflexbelow'] = 0x1E12;
+ t['Dcroat'] = 0x0110;
+ t['Ddotaccent'] = 0x1E0A;
+ t['Ddotbelow'] = 0x1E0C;
+ t['Decyrillic'] = 0x0414;
+ t['Deicoptic'] = 0x03EE;
+ t['Delta'] = 0x2206;
+ t['Deltagreek'] = 0x0394;
+ t['Dhook'] = 0x018A;
+ t['Dieresis'] = 0xF6CB;
+ t['DieresisAcute'] = 0xF6CC;
+ t['DieresisGrave'] = 0xF6CD;
+ t['Dieresissmall'] = 0xF7A8;
+ t['Digammagreek'] = 0x03DC;
+ t['Djecyrillic'] = 0x0402;
+ t['Dlinebelow'] = 0x1E0E;
+ t['Dmonospace'] = 0xFF24;
+ t['Dotaccentsmall'] = 0xF6F7;
+ t['Dslash'] = 0x0110;
+ t['Dsmall'] = 0xF764;
+ t['Dtopbar'] = 0x018B;
+ t['Dz'] = 0x01F2;
+ t['Dzcaron'] = 0x01C5;
+ t['Dzeabkhasiancyrillic'] = 0x04E0;
+ t['Dzecyrillic'] = 0x0405;
+ t['Dzhecyrillic'] = 0x040F;
+ t['E'] = 0x0045;
+ t['Eacute'] = 0x00C9;
+ t['Eacutesmall'] = 0xF7E9;
+ t['Ebreve'] = 0x0114;
+ t['Ecaron'] = 0x011A;
+ t['Ecedillabreve'] = 0x1E1C;
+ t['Echarmenian'] = 0x0535;
+ t['Ecircle'] = 0x24BA;
+ t['Ecircumflex'] = 0x00CA;
+ t['Ecircumflexacute'] = 0x1EBE;
+ t['Ecircumflexbelow'] = 0x1E18;
+ t['Ecircumflexdotbelow'] = 0x1EC6;
+ t['Ecircumflexgrave'] = 0x1EC0;
+ t['Ecircumflexhookabove'] = 0x1EC2;
+ t['Ecircumflexsmall'] = 0xF7EA;
+ t['Ecircumflextilde'] = 0x1EC4;
+ t['Ecyrillic'] = 0x0404;
+ t['Edblgrave'] = 0x0204;
+ t['Edieresis'] = 0x00CB;
+ t['Edieresissmall'] = 0xF7EB;
+ t['Edot'] = 0x0116;
+ t['Edotaccent'] = 0x0116;
+ t['Edotbelow'] = 0x1EB8;
+ t['Efcyrillic'] = 0x0424;
+ t['Egrave'] = 0x00C8;
+ t['Egravesmall'] = 0xF7E8;
+ t['Eharmenian'] = 0x0537;
+ t['Ehookabove'] = 0x1EBA;
+ t['Eightroman'] = 0x2167;
+ t['Einvertedbreve'] = 0x0206;
+ t['Eiotifiedcyrillic'] = 0x0464;
+ t['Elcyrillic'] = 0x041B;
+ t['Elevenroman'] = 0x216A;
+ t['Emacron'] = 0x0112;
+ t['Emacronacute'] = 0x1E16;
+ t['Emacrongrave'] = 0x1E14;
+ t['Emcyrillic'] = 0x041C;
+ t['Emonospace'] = 0xFF25;
+ t['Encyrillic'] = 0x041D;
+ t['Endescendercyrillic'] = 0x04A2;
+ t['Eng'] = 0x014A;
+ t['Enghecyrillic'] = 0x04A4;
+ t['Enhookcyrillic'] = 0x04C7;
+ t['Eogonek'] = 0x0118;
+ t['Eopen'] = 0x0190;
+ t['Epsilon'] = 0x0395;
+ t['Epsilontonos'] = 0x0388;
+ t['Ercyrillic'] = 0x0420;
+ t['Ereversed'] = 0x018E;
+ t['Ereversedcyrillic'] = 0x042D;
+ t['Escyrillic'] = 0x0421;
+ t['Esdescendercyrillic'] = 0x04AA;
+ t['Esh'] = 0x01A9;
+ t['Esmall'] = 0xF765;
+ t['Eta'] = 0x0397;
+ t['Etarmenian'] = 0x0538;
+ t['Etatonos'] = 0x0389;
+ t['Eth'] = 0x00D0;
+ t['Ethsmall'] = 0xF7F0;
+ t['Etilde'] = 0x1EBC;
+ t['Etildebelow'] = 0x1E1A;
+ t['Euro'] = 0x20AC;
+ t['Ezh'] = 0x01B7;
+ t['Ezhcaron'] = 0x01EE;
+ t['Ezhreversed'] = 0x01B8;
+ t['F'] = 0x0046;
+ t['Fcircle'] = 0x24BB;
+ t['Fdotaccent'] = 0x1E1E;
+ t['Feharmenian'] = 0x0556;
+ t['Feicoptic'] = 0x03E4;
+ t['Fhook'] = 0x0191;
+ t['Fitacyrillic'] = 0x0472;
+ t['Fiveroman'] = 0x2164;
+ t['Fmonospace'] = 0xFF26;
+ t['Fourroman'] = 0x2163;
+ t['Fsmall'] = 0xF766;
+ t['G'] = 0x0047;
+ t['GBsquare'] = 0x3387;
+ t['Gacute'] = 0x01F4;
+ t['Gamma'] = 0x0393;
+ t['Gammaafrican'] = 0x0194;
+ t['Gangiacoptic'] = 0x03EA;
+ t['Gbreve'] = 0x011E;
+ t['Gcaron'] = 0x01E6;
+ t['Gcedilla'] = 0x0122;
+ t['Gcircle'] = 0x24BC;
+ t['Gcircumflex'] = 0x011C;
+ t['Gcommaaccent'] = 0x0122;
+ t['Gdot'] = 0x0120;
+ t['Gdotaccent'] = 0x0120;
+ t['Gecyrillic'] = 0x0413;
+ t['Ghadarmenian'] = 0x0542;
+ t['Ghemiddlehookcyrillic'] = 0x0494;
+ t['Ghestrokecyrillic'] = 0x0492;
+ t['Gheupturncyrillic'] = 0x0490;
+ t['Ghook'] = 0x0193;
+ t['Gimarmenian'] = 0x0533;
+ t['Gjecyrillic'] = 0x0403;
+ t['Gmacron'] = 0x1E20;
+ t['Gmonospace'] = 0xFF27;
+ t['Grave'] = 0xF6CE;
+ t['Gravesmall'] = 0xF760;
+ t['Gsmall'] = 0xF767;
+ t['Gsmallhook'] = 0x029B;
+ t['Gstroke'] = 0x01E4;
+ t['H'] = 0x0048;
+ t['H18533'] = 0x25CF;
+ t['H18543'] = 0x25AA;
+ t['H18551'] = 0x25AB;
+ t['H22073'] = 0x25A1;
+ t['HPsquare'] = 0x33CB;
+ t['Haabkhasiancyrillic'] = 0x04A8;
+ t['Hadescendercyrillic'] = 0x04B2;
+ t['Hardsigncyrillic'] = 0x042A;
+ t['Hbar'] = 0x0126;
+ t['Hbrevebelow'] = 0x1E2A;
+ t['Hcedilla'] = 0x1E28;
+ t['Hcircle'] = 0x24BD;
+ t['Hcircumflex'] = 0x0124;
+ t['Hdieresis'] = 0x1E26;
+ t['Hdotaccent'] = 0x1E22;
+ t['Hdotbelow'] = 0x1E24;
+ t['Hmonospace'] = 0xFF28;
+ t['Hoarmenian'] = 0x0540;
+ t['Horicoptic'] = 0x03E8;
+ t['Hsmall'] = 0xF768;
+ t['Hungarumlaut'] = 0xF6CF;
+ t['Hungarumlautsmall'] = 0xF6F8;
+ t['Hzsquare'] = 0x3390;
+ t['I'] = 0x0049;
+ t['IAcyrillic'] = 0x042F;
+ t['IJ'] = 0x0132;
+ t['IUcyrillic'] = 0x042E;
+ t['Iacute'] = 0x00CD;
+ t['Iacutesmall'] = 0xF7ED;
+ t['Ibreve'] = 0x012C;
+ t['Icaron'] = 0x01CF;
+ t['Icircle'] = 0x24BE;
+ t['Icircumflex'] = 0x00CE;
+ t['Icircumflexsmall'] = 0xF7EE;
+ t['Icyrillic'] = 0x0406;
+ t['Idblgrave'] = 0x0208;
+ t['Idieresis'] = 0x00CF;
+ t['Idieresisacute'] = 0x1E2E;
+ t['Idieresiscyrillic'] = 0x04E4;
+ t['Idieresissmall'] = 0xF7EF;
+ t['Idot'] = 0x0130;
+ t['Idotaccent'] = 0x0130;
+ t['Idotbelow'] = 0x1ECA;
+ t['Iebrevecyrillic'] = 0x04D6;
+ t['Iecyrillic'] = 0x0415;
+ t['Ifraktur'] = 0x2111;
+ t['Igrave'] = 0x00CC;
+ t['Igravesmall'] = 0xF7EC;
+ t['Ihookabove'] = 0x1EC8;
+ t['Iicyrillic'] = 0x0418;
+ t['Iinvertedbreve'] = 0x020A;
+ t['Iishortcyrillic'] = 0x0419;
+ t['Imacron'] = 0x012A;
+ t['Imacroncyrillic'] = 0x04E2;
+ t['Imonospace'] = 0xFF29;
+ t['Iniarmenian'] = 0x053B;
+ t['Iocyrillic'] = 0x0401;
+ t['Iogonek'] = 0x012E;
+ t['Iota'] = 0x0399;
+ t['Iotaafrican'] = 0x0196;
+ t['Iotadieresis'] = 0x03AA;
+ t['Iotatonos'] = 0x038A;
+ t['Ismall'] = 0xF769;
+ t['Istroke'] = 0x0197;
+ t['Itilde'] = 0x0128;
+ t['Itildebelow'] = 0x1E2C;
+ t['Izhitsacyrillic'] = 0x0474;
+ t['Izhitsadblgravecyrillic'] = 0x0476;
+ t['J'] = 0x004A;
+ t['Jaarmenian'] = 0x0541;
+ t['Jcircle'] = 0x24BF;
+ t['Jcircumflex'] = 0x0134;
+ t['Jecyrillic'] = 0x0408;
+ t['Jheharmenian'] = 0x054B;
+ t['Jmonospace'] = 0xFF2A;
+ t['Jsmall'] = 0xF76A;
+ t['K'] = 0x004B;
+ t['KBsquare'] = 0x3385;
+ t['KKsquare'] = 0x33CD;
+ t['Kabashkircyrillic'] = 0x04A0;
+ t['Kacute'] = 0x1E30;
+ t['Kacyrillic'] = 0x041A;
+ t['Kadescendercyrillic'] = 0x049A;
+ t['Kahookcyrillic'] = 0x04C3;
+ t['Kappa'] = 0x039A;
+ t['Kastrokecyrillic'] = 0x049E;
+ t['Kaverticalstrokecyrillic'] = 0x049C;
+ t['Kcaron'] = 0x01E8;
+ t['Kcedilla'] = 0x0136;
+ t['Kcircle'] = 0x24C0;
+ t['Kcommaaccent'] = 0x0136;
+ t['Kdotbelow'] = 0x1E32;
+ t['Keharmenian'] = 0x0554;
+ t['Kenarmenian'] = 0x053F;
+ t['Khacyrillic'] = 0x0425;
+ t['Kheicoptic'] = 0x03E6;
+ t['Khook'] = 0x0198;
+ t['Kjecyrillic'] = 0x040C;
+ t['Klinebelow'] = 0x1E34;
+ t['Kmonospace'] = 0xFF2B;
+ t['Koppacyrillic'] = 0x0480;
+ t['Koppagreek'] = 0x03DE;
+ t['Ksicyrillic'] = 0x046E;
+ t['Ksmall'] = 0xF76B;
+ t['L'] = 0x004C;
+ t['LJ'] = 0x01C7;
+ t['LL'] = 0xF6BF;
+ t['Lacute'] = 0x0139;
+ t['Lambda'] = 0x039B;
+ t['Lcaron'] = 0x013D;
+ t['Lcedilla'] = 0x013B;
+ t['Lcircle'] = 0x24C1;
+ t['Lcircumflexbelow'] = 0x1E3C;
+ t['Lcommaaccent'] = 0x013B;
+ t['Ldot'] = 0x013F;
+ t['Ldotaccent'] = 0x013F;
+ t['Ldotbelow'] = 0x1E36;
+ t['Ldotbelowmacron'] = 0x1E38;
+ t['Liwnarmenian'] = 0x053C;
+ t['Lj'] = 0x01C8;
+ t['Ljecyrillic'] = 0x0409;
+ t['Llinebelow'] = 0x1E3A;
+ t['Lmonospace'] = 0xFF2C;
+ t['Lslash'] = 0x0141;
+ t['Lslashsmall'] = 0xF6F9;
+ t['Lsmall'] = 0xF76C;
+ t['M'] = 0x004D;
+ t['MBsquare'] = 0x3386;
+ t['Macron'] = 0xF6D0;
+ t['Macronsmall'] = 0xF7AF;
+ t['Macute'] = 0x1E3E;
+ t['Mcircle'] = 0x24C2;
+ t['Mdotaccent'] = 0x1E40;
+ t['Mdotbelow'] = 0x1E42;
+ t['Menarmenian'] = 0x0544;
+ t['Mmonospace'] = 0xFF2D;
+ t['Msmall'] = 0xF76D;
+ t['Mturned'] = 0x019C;
+ t['Mu'] = 0x039C;
+ t['N'] = 0x004E;
+ t['NJ'] = 0x01CA;
+ t['Nacute'] = 0x0143;
+ t['Ncaron'] = 0x0147;
+ t['Ncedilla'] = 0x0145;
+ t['Ncircle'] = 0x24C3;
+ t['Ncircumflexbelow'] = 0x1E4A;
+ t['Ncommaaccent'] = 0x0145;
+ t['Ndotaccent'] = 0x1E44;
+ t['Ndotbelow'] = 0x1E46;
+ t['Nhookleft'] = 0x019D;
+ t['Nineroman'] = 0x2168;
+ t['Nj'] = 0x01CB;
+ t['Njecyrillic'] = 0x040A;
+ t['Nlinebelow'] = 0x1E48;
+ t['Nmonospace'] = 0xFF2E;
+ t['Nowarmenian'] = 0x0546;
+ t['Nsmall'] = 0xF76E;
+ t['Ntilde'] = 0x00D1;
+ t['Ntildesmall'] = 0xF7F1;
+ t['Nu'] = 0x039D;
+ t['O'] = 0x004F;
+ t['OE'] = 0x0152;
+ t['OEsmall'] = 0xF6FA;
+ t['Oacute'] = 0x00D3;
+ t['Oacutesmall'] = 0xF7F3;
+ t['Obarredcyrillic'] = 0x04E8;
+ t['Obarreddieresiscyrillic'] = 0x04EA;
+ t['Obreve'] = 0x014E;
+ t['Ocaron'] = 0x01D1;
+ t['Ocenteredtilde'] = 0x019F;
+ t['Ocircle'] = 0x24C4;
+ t['Ocircumflex'] = 0x00D4;
+ t['Ocircumflexacute'] = 0x1ED0;
+ t['Ocircumflexdotbelow'] = 0x1ED8;
+ t['Ocircumflexgrave'] = 0x1ED2;
+ t['Ocircumflexhookabove'] = 0x1ED4;
+ t['Ocircumflexsmall'] = 0xF7F4;
+ t['Ocircumflextilde'] = 0x1ED6;
+ t['Ocyrillic'] = 0x041E;
+ t['Odblacute'] = 0x0150;
+ t['Odblgrave'] = 0x020C;
+ t['Odieresis'] = 0x00D6;
+ t['Odieresiscyrillic'] = 0x04E6;
+ t['Odieresissmall'] = 0xF7F6;
+ t['Odotbelow'] = 0x1ECC;
+ t['Ogoneksmall'] = 0xF6FB;
+ t['Ograve'] = 0x00D2;
+ t['Ogravesmall'] = 0xF7F2;
+ t['Oharmenian'] = 0x0555;
+ t['Ohm'] = 0x2126;
+ t['Ohookabove'] = 0x1ECE;
+ t['Ohorn'] = 0x01A0;
+ t['Ohornacute'] = 0x1EDA;
+ t['Ohorndotbelow'] = 0x1EE2;
+ t['Ohorngrave'] = 0x1EDC;
+ t['Ohornhookabove'] = 0x1EDE;
+ t['Ohorntilde'] = 0x1EE0;
+ t['Ohungarumlaut'] = 0x0150;
+ t['Oi'] = 0x01A2;
+ t['Oinvertedbreve'] = 0x020E;
+ t['Omacron'] = 0x014C;
+ t['Omacronacute'] = 0x1E52;
+ t['Omacrongrave'] = 0x1E50;
+ t['Omega'] = 0x2126;
+ t['Omegacyrillic'] = 0x0460;
+ t['Omegagreek'] = 0x03A9;
+ t['Omegaroundcyrillic'] = 0x047A;
+ t['Omegatitlocyrillic'] = 0x047C;
+ t['Omegatonos'] = 0x038F;
+ t['Omicron'] = 0x039F;
+ t['Omicrontonos'] = 0x038C;
+ t['Omonospace'] = 0xFF2F;
+ t['Oneroman'] = 0x2160;
+ t['Oogonek'] = 0x01EA;
+ t['Oogonekmacron'] = 0x01EC;
+ t['Oopen'] = 0x0186;
+ t['Oslash'] = 0x00D8;
+ t['Oslashacute'] = 0x01FE;
+ t['Oslashsmall'] = 0xF7F8;
+ t['Osmall'] = 0xF76F;
+ t['Ostrokeacute'] = 0x01FE;
+ t['Otcyrillic'] = 0x047E;
+ t['Otilde'] = 0x00D5;
+ t['Otildeacute'] = 0x1E4C;
+ t['Otildedieresis'] = 0x1E4E;
+ t['Otildesmall'] = 0xF7F5;
+ t['P'] = 0x0050;
+ t['Pacute'] = 0x1E54;
+ t['Pcircle'] = 0x24C5;
+ t['Pdotaccent'] = 0x1E56;
+ t['Pecyrillic'] = 0x041F;
+ t['Peharmenian'] = 0x054A;
+ t['Pemiddlehookcyrillic'] = 0x04A6;
+ t['Phi'] = 0x03A6;
+ t['Phook'] = 0x01A4;
+ t['Pi'] = 0x03A0;
+ t['Piwrarmenian'] = 0x0553;
+ t['Pmonospace'] = 0xFF30;
+ t['Psi'] = 0x03A8;
+ t['Psicyrillic'] = 0x0470;
+ t['Psmall'] = 0xF770;
+ t['Q'] = 0x0051;
+ t['Qcircle'] = 0x24C6;
+ t['Qmonospace'] = 0xFF31;
+ t['Qsmall'] = 0xF771;
+ t['R'] = 0x0052;
+ t['Raarmenian'] = 0x054C;
+ t['Racute'] = 0x0154;
+ t['Rcaron'] = 0x0158;
+ t['Rcedilla'] = 0x0156;
+ t['Rcircle'] = 0x24C7;
+ t['Rcommaaccent'] = 0x0156;
+ t['Rdblgrave'] = 0x0210;
+ t['Rdotaccent'] = 0x1E58;
+ t['Rdotbelow'] = 0x1E5A;
+ t['Rdotbelowmacron'] = 0x1E5C;
+ t['Reharmenian'] = 0x0550;
+ t['Rfraktur'] = 0x211C;
+ t['Rho'] = 0x03A1;
+ t['Ringsmall'] = 0xF6FC;
+ t['Rinvertedbreve'] = 0x0212;
+ t['Rlinebelow'] = 0x1E5E;
+ t['Rmonospace'] = 0xFF32;
+ t['Rsmall'] = 0xF772;
+ t['Rsmallinverted'] = 0x0281;
+ t['Rsmallinvertedsuperior'] = 0x02B6;
+ t['S'] = 0x0053;
+ t['SF010000'] = 0x250C;
+ t['SF020000'] = 0x2514;
+ t['SF030000'] = 0x2510;
+ t['SF040000'] = 0x2518;
+ t['SF050000'] = 0x253C;
+ t['SF060000'] = 0x252C;
+ t['SF070000'] = 0x2534;
+ t['SF080000'] = 0x251C;
+ t['SF090000'] = 0x2524;
+ t['SF100000'] = 0x2500;
+ t['SF110000'] = 0x2502;
+ t['SF190000'] = 0x2561;
+ t['SF200000'] = 0x2562;
+ t['SF210000'] = 0x2556;
+ t['SF220000'] = 0x2555;
+ t['SF230000'] = 0x2563;
+ t['SF240000'] = 0x2551;
+ t['SF250000'] = 0x2557;
+ t['SF260000'] = 0x255D;
+ t['SF270000'] = 0x255C;
+ t['SF280000'] = 0x255B;
+ t['SF360000'] = 0x255E;
+ t['SF370000'] = 0x255F;
+ t['SF380000'] = 0x255A;
+ t['SF390000'] = 0x2554;
+ t['SF400000'] = 0x2569;
+ t['SF410000'] = 0x2566;
+ t['SF420000'] = 0x2560;
+ t['SF430000'] = 0x2550;
+ t['SF440000'] = 0x256C;
+ t['SF450000'] = 0x2567;
+ t['SF460000'] = 0x2568;
+ t['SF470000'] = 0x2564;
+ t['SF480000'] = 0x2565;
+ t['SF490000'] = 0x2559;
+ t['SF500000'] = 0x2558;
+ t['SF510000'] = 0x2552;
+ t['SF520000'] = 0x2553;
+ t['SF530000'] = 0x256B;
+ t['SF540000'] = 0x256A;
+ t['Sacute'] = 0x015A;
+ t['Sacutedotaccent'] = 0x1E64;
+ t['Sampigreek'] = 0x03E0;
+ t['Scaron'] = 0x0160;
+ t['Scarondotaccent'] = 0x1E66;
+ t['Scaronsmall'] = 0xF6FD;
+ t['Scedilla'] = 0x015E;
+ t['Schwa'] = 0x018F;
+ t['Schwacyrillic'] = 0x04D8;
+ t['Schwadieresiscyrillic'] = 0x04DA;
+ t['Scircle'] = 0x24C8;
+ t['Scircumflex'] = 0x015C;
+ t['Scommaaccent'] = 0x0218;
+ t['Sdotaccent'] = 0x1E60;
+ t['Sdotbelow'] = 0x1E62;
+ t['Sdotbelowdotaccent'] = 0x1E68;
+ t['Seharmenian'] = 0x054D;
+ t['Sevenroman'] = 0x2166;
+ t['Shaarmenian'] = 0x0547;
+ t['Shacyrillic'] = 0x0428;
+ t['Shchacyrillic'] = 0x0429;
+ t['Sheicoptic'] = 0x03E2;
+ t['Shhacyrillic'] = 0x04BA;
+ t['Shimacoptic'] = 0x03EC;
+ t['Sigma'] = 0x03A3;
+ t['Sixroman'] = 0x2165;
+ t['Smonospace'] = 0xFF33;
+ t['Softsigncyrillic'] = 0x042C;
+ t['Ssmall'] = 0xF773;
+ t['Stigmagreek'] = 0x03DA;
+ t['T'] = 0x0054;
+ t['Tau'] = 0x03A4;
+ t['Tbar'] = 0x0166;
+ t['Tcaron'] = 0x0164;
+ t['Tcedilla'] = 0x0162;
+ t['Tcircle'] = 0x24C9;
+ t['Tcircumflexbelow'] = 0x1E70;
+ t['Tcommaaccent'] = 0x0162;
+ t['Tdotaccent'] = 0x1E6A;
+ t['Tdotbelow'] = 0x1E6C;
+ t['Tecyrillic'] = 0x0422;
+ t['Tedescendercyrillic'] = 0x04AC;
+ t['Tenroman'] = 0x2169;
+ t['Tetsecyrillic'] = 0x04B4;
+ t['Theta'] = 0x0398;
+ t['Thook'] = 0x01AC;
+ t['Thorn'] = 0x00DE;
+ t['Thornsmall'] = 0xF7FE;
+ t['Threeroman'] = 0x2162;
+ t['Tildesmall'] = 0xF6FE;
+ t['Tiwnarmenian'] = 0x054F;
+ t['Tlinebelow'] = 0x1E6E;
+ t['Tmonospace'] = 0xFF34;
+ t['Toarmenian'] = 0x0539;
+ t['Tonefive'] = 0x01BC;
+ t['Tonesix'] = 0x0184;
+ t['Tonetwo'] = 0x01A7;
+ t['Tretroflexhook'] = 0x01AE;
+ t['Tsecyrillic'] = 0x0426;
+ t['Tshecyrillic'] = 0x040B;
+ t['Tsmall'] = 0xF774;
+ t['Twelveroman'] = 0x216B;
+ t['Tworoman'] = 0x2161;
+ t['U'] = 0x0055;
+ t['Uacute'] = 0x00DA;
+ t['Uacutesmall'] = 0xF7FA;
+ t['Ubreve'] = 0x016C;
+ t['Ucaron'] = 0x01D3;
+ t['Ucircle'] = 0x24CA;
+ t['Ucircumflex'] = 0x00DB;
+ t['Ucircumflexbelow'] = 0x1E76;
+ t['Ucircumflexsmall'] = 0xF7FB;
+ t['Ucyrillic'] = 0x0423;
+ t['Udblacute'] = 0x0170;
+ t['Udblgrave'] = 0x0214;
+ t['Udieresis'] = 0x00DC;
+ t['Udieresisacute'] = 0x01D7;
+ t['Udieresisbelow'] = 0x1E72;
+ t['Udieresiscaron'] = 0x01D9;
+ t['Udieresiscyrillic'] = 0x04F0;
+ t['Udieresisgrave'] = 0x01DB;
+ t['Udieresismacron'] = 0x01D5;
+ t['Udieresissmall'] = 0xF7FC;
+ t['Udotbelow'] = 0x1EE4;
+ t['Ugrave'] = 0x00D9;
+ t['Ugravesmall'] = 0xF7F9;
+ t['Uhookabove'] = 0x1EE6;
+ t['Uhorn'] = 0x01AF;
+ t['Uhornacute'] = 0x1EE8;
+ t['Uhorndotbelow'] = 0x1EF0;
+ t['Uhorngrave'] = 0x1EEA;
+ t['Uhornhookabove'] = 0x1EEC;
+ t['Uhorntilde'] = 0x1EEE;
+ t['Uhungarumlaut'] = 0x0170;
+ t['Uhungarumlautcyrillic'] = 0x04F2;
+ t['Uinvertedbreve'] = 0x0216;
+ t['Ukcyrillic'] = 0x0478;
+ t['Umacron'] = 0x016A;
+ t['Umacroncyrillic'] = 0x04EE;
+ t['Umacrondieresis'] = 0x1E7A;
+ t['Umonospace'] = 0xFF35;
+ t['Uogonek'] = 0x0172;
+ t['Upsilon'] = 0x03A5;
+ t['Upsilon1'] = 0x03D2;
+ t['Upsilonacutehooksymbolgreek'] = 0x03D3;
+ t['Upsilonafrican'] = 0x01B1;
+ t['Upsilondieresis'] = 0x03AB;
+ t['Upsilondieresishooksymbolgreek'] = 0x03D4;
+ t['Upsilonhooksymbol'] = 0x03D2;
+ t['Upsilontonos'] = 0x038E;
+ t['Uring'] = 0x016E;
+ t['Ushortcyrillic'] = 0x040E;
+ t['Usmall'] = 0xF775;
+ t['Ustraightcyrillic'] = 0x04AE;
+ t['Ustraightstrokecyrillic'] = 0x04B0;
+ t['Utilde'] = 0x0168;
+ t['Utildeacute'] = 0x1E78;
+ t['Utildebelow'] = 0x1E74;
+ t['V'] = 0x0056;
+ t['Vcircle'] = 0x24CB;
+ t['Vdotbelow'] = 0x1E7E;
+ t['Vecyrillic'] = 0x0412;
+ t['Vewarmenian'] = 0x054E;
+ t['Vhook'] = 0x01B2;
+ t['Vmonospace'] = 0xFF36;
+ t['Voarmenian'] = 0x0548;
+ t['Vsmall'] = 0xF776;
+ t['Vtilde'] = 0x1E7C;
+ t['W'] = 0x0057;
+ t['Wacute'] = 0x1E82;
+ t['Wcircle'] = 0x24CC;
+ t['Wcircumflex'] = 0x0174;
+ t['Wdieresis'] = 0x1E84;
+ t['Wdotaccent'] = 0x1E86;
+ t['Wdotbelow'] = 0x1E88;
+ t['Wgrave'] = 0x1E80;
+ t['Wmonospace'] = 0xFF37;
+ t['Wsmall'] = 0xF777;
+ t['X'] = 0x0058;
+ t['Xcircle'] = 0x24CD;
+ t['Xdieresis'] = 0x1E8C;
+ t['Xdotaccent'] = 0x1E8A;
+ t['Xeharmenian'] = 0x053D;
+ t['Xi'] = 0x039E;
+ t['Xmonospace'] = 0xFF38;
+ t['Xsmall'] = 0xF778;
+ t['Y'] = 0x0059;
+ t['Yacute'] = 0x00DD;
+ t['Yacutesmall'] = 0xF7FD;
+ t['Yatcyrillic'] = 0x0462;
+ t['Ycircle'] = 0x24CE;
+ t['Ycircumflex'] = 0x0176;
+ t['Ydieresis'] = 0x0178;
+ t['Ydieresissmall'] = 0xF7FF;
+ t['Ydotaccent'] = 0x1E8E;
+ t['Ydotbelow'] = 0x1EF4;
+ t['Yericyrillic'] = 0x042B;
+ t['Yerudieresiscyrillic'] = 0x04F8;
+ t['Ygrave'] = 0x1EF2;
+ t['Yhook'] = 0x01B3;
+ t['Yhookabove'] = 0x1EF6;
+ t['Yiarmenian'] = 0x0545;
+ t['Yicyrillic'] = 0x0407;
+ t['Yiwnarmenian'] = 0x0552;
+ t['Ymonospace'] = 0xFF39;
+ t['Ysmall'] = 0xF779;
+ t['Ytilde'] = 0x1EF8;
+ t['Yusbigcyrillic'] = 0x046A;
+ t['Yusbigiotifiedcyrillic'] = 0x046C;
+ t['Yuslittlecyrillic'] = 0x0466;
+ t['Yuslittleiotifiedcyrillic'] = 0x0468;
+ t['Z'] = 0x005A;
+ t['Zaarmenian'] = 0x0536;
+ t['Zacute'] = 0x0179;
+ t['Zcaron'] = 0x017D;
+ t['Zcaronsmall'] = 0xF6FF;
+ t['Zcircle'] = 0x24CF;
+ t['Zcircumflex'] = 0x1E90;
+ t['Zdot'] = 0x017B;
+ t['Zdotaccent'] = 0x017B;
+ t['Zdotbelow'] = 0x1E92;
+ t['Zecyrillic'] = 0x0417;
+ t['Zedescendercyrillic'] = 0x0498;
+ t['Zedieresiscyrillic'] = 0x04DE;
+ t['Zeta'] = 0x0396;
+ t['Zhearmenian'] = 0x053A;
+ t['Zhebrevecyrillic'] = 0x04C1;
+ t['Zhecyrillic'] = 0x0416;
+ t['Zhedescendercyrillic'] = 0x0496;
+ t['Zhedieresiscyrillic'] = 0x04DC;
+ t['Zlinebelow'] = 0x1E94;
+ t['Zmonospace'] = 0xFF3A;
+ t['Zsmall'] = 0xF77A;
+ t['Zstroke'] = 0x01B5;
+ t['a'] = 0x0061;
+ t['aabengali'] = 0x0986;
+ t['aacute'] = 0x00E1;
+ t['aadeva'] = 0x0906;
+ t['aagujarati'] = 0x0A86;
+ t['aagurmukhi'] = 0x0A06;
+ t['aamatragurmukhi'] = 0x0A3E;
+ t['aarusquare'] = 0x3303;
+ t['aavowelsignbengali'] = 0x09BE;
+ t['aavowelsigndeva'] = 0x093E;
+ t['aavowelsigngujarati'] = 0x0ABE;
+ t['abbreviationmarkarmenian'] = 0x055F;
+ t['abbreviationsigndeva'] = 0x0970;
+ t['abengali'] = 0x0985;
+ t['abopomofo'] = 0x311A;
+ t['abreve'] = 0x0103;
+ t['abreveacute'] = 0x1EAF;
+ t['abrevecyrillic'] = 0x04D1;
+ t['abrevedotbelow'] = 0x1EB7;
+ t['abrevegrave'] = 0x1EB1;
+ t['abrevehookabove'] = 0x1EB3;
+ t['abrevetilde'] = 0x1EB5;
+ t['acaron'] = 0x01CE;
+ t['acircle'] = 0x24D0;
+ t['acircumflex'] = 0x00E2;
+ t['acircumflexacute'] = 0x1EA5;
+ t['acircumflexdotbelow'] = 0x1EAD;
+ t['acircumflexgrave'] = 0x1EA7;
+ t['acircumflexhookabove'] = 0x1EA9;
+ t['acircumflextilde'] = 0x1EAB;
+ t['acute'] = 0x00B4;
+ t['acutebelowcmb'] = 0x0317;
+ t['acutecmb'] = 0x0301;
+ t['acutecomb'] = 0x0301;
+ t['acutedeva'] = 0x0954;
+ t['acutelowmod'] = 0x02CF;
+ t['acutetonecmb'] = 0x0341;
+ t['acyrillic'] = 0x0430;
+ t['adblgrave'] = 0x0201;
+ t['addakgurmukhi'] = 0x0A71;
+ t['adeva'] = 0x0905;
+ t['adieresis'] = 0x00E4;
+ t['adieresiscyrillic'] = 0x04D3;
+ t['adieresismacron'] = 0x01DF;
+ t['adotbelow'] = 0x1EA1;
+ t['adotmacron'] = 0x01E1;
+ t['ae'] = 0x00E6;
+ t['aeacute'] = 0x01FD;
+ t['aekorean'] = 0x3150;
+ t['aemacron'] = 0x01E3;
+ t['afii00208'] = 0x2015;
+ t['afii08941'] = 0x20A4;
+ t['afii10017'] = 0x0410;
+ t['afii10018'] = 0x0411;
+ t['afii10019'] = 0x0412;
+ t['afii10020'] = 0x0413;
+ t['afii10021'] = 0x0414;
+ t['afii10022'] = 0x0415;
+ t['afii10023'] = 0x0401;
+ t['afii10024'] = 0x0416;
+ t['afii10025'] = 0x0417;
+ t['afii10026'] = 0x0418;
+ t['afii10027'] = 0x0419;
+ t['afii10028'] = 0x041A;
+ t['afii10029'] = 0x041B;
+ t['afii10030'] = 0x041C;
+ t['afii10031'] = 0x041D;
+ t['afii10032'] = 0x041E;
+ t['afii10033'] = 0x041F;
+ t['afii10034'] = 0x0420;
+ t['afii10035'] = 0x0421;
+ t['afii10036'] = 0x0422;
+ t['afii10037'] = 0x0423;
+ t['afii10038'] = 0x0424;
+ t['afii10039'] = 0x0425;
+ t['afii10040'] = 0x0426;
+ t['afii10041'] = 0x0427;
+ t['afii10042'] = 0x0428;
+ t['afii10043'] = 0x0429;
+ t['afii10044'] = 0x042A;
+ t['afii10045'] = 0x042B;
+ t['afii10046'] = 0x042C;
+ t['afii10047'] = 0x042D;
+ t['afii10048'] = 0x042E;
+ t['afii10049'] = 0x042F;
+ t['afii10050'] = 0x0490;
+ t['afii10051'] = 0x0402;
+ t['afii10052'] = 0x0403;
+ t['afii10053'] = 0x0404;
+ t['afii10054'] = 0x0405;
+ t['afii10055'] = 0x0406;
+ t['afii10056'] = 0x0407;
+ t['afii10057'] = 0x0408;
+ t['afii10058'] = 0x0409;
+ t['afii10059'] = 0x040A;
+ t['afii10060'] = 0x040B;
+ t['afii10061'] = 0x040C;
+ t['afii10062'] = 0x040E;
+ t['afii10063'] = 0xF6C4;
+ t['afii10064'] = 0xF6C5;
+ t['afii10065'] = 0x0430;
+ t['afii10066'] = 0x0431;
+ t['afii10067'] = 0x0432;
+ t['afii10068'] = 0x0433;
+ t['afii10069'] = 0x0434;
+ t['afii10070'] = 0x0435;
+ t['afii10071'] = 0x0451;
+ t['afii10072'] = 0x0436;
+ t['afii10073'] = 0x0437;
+ t['afii10074'] = 0x0438;
+ t['afii10075'] = 0x0439;
+ t['afii10076'] = 0x043A;
+ t['afii10077'] = 0x043B;
+ t['afii10078'] = 0x043C;
+ t['afii10079'] = 0x043D;
+ t['afii10080'] = 0x043E;
+ t['afii10081'] = 0x043F;
+ t['afii10082'] = 0x0440;
+ t['afii10083'] = 0x0441;
+ t['afii10084'] = 0x0442;
+ t['afii10085'] = 0x0443;
+ t['afii10086'] = 0x0444;
+ t['afii10087'] = 0x0445;
+ t['afii10088'] = 0x0446;
+ t['afii10089'] = 0x0447;
+ t['afii10090'] = 0x0448;
+ t['afii10091'] = 0x0449;
+ t['afii10092'] = 0x044A;
+ t['afii10093'] = 0x044B;
+ t['afii10094'] = 0x044C;
+ t['afii10095'] = 0x044D;
+ t['afii10096'] = 0x044E;
+ t['afii10097'] = 0x044F;
+ t['afii10098'] = 0x0491;
+ t['afii10099'] = 0x0452;
+ t['afii10100'] = 0x0453;
+ t['afii10101'] = 0x0454;
+ t['afii10102'] = 0x0455;
+ t['afii10103'] = 0x0456;
+ t['afii10104'] = 0x0457;
+ t['afii10105'] = 0x0458;
+ t['afii10106'] = 0x0459;
+ t['afii10107'] = 0x045A;
+ t['afii10108'] = 0x045B;
+ t['afii10109'] = 0x045C;
+ t['afii10110'] = 0x045E;
+ t['afii10145'] = 0x040F;
+ t['afii10146'] = 0x0462;
+ t['afii10147'] = 0x0472;
+ t['afii10148'] = 0x0474;
+ t['afii10192'] = 0xF6C6;
+ t['afii10193'] = 0x045F;
+ t['afii10194'] = 0x0463;
+ t['afii10195'] = 0x0473;
+ t['afii10196'] = 0x0475;
+ t['afii10831'] = 0xF6C7;
+ t['afii10832'] = 0xF6C8;
+ t['afii10846'] = 0x04D9;
+ t['afii299'] = 0x200E;
+ t['afii300'] = 0x200F;
+ t['afii301'] = 0x200D;
+ t['afii57381'] = 0x066A;
+ t['afii57388'] = 0x060C;
+ t['afii57392'] = 0x0660;
+ t['afii57393'] = 0x0661;
+ t['afii57394'] = 0x0662;
+ t['afii57395'] = 0x0663;
+ t['afii57396'] = 0x0664;
+ t['afii57397'] = 0x0665;
+ t['afii57398'] = 0x0666;
+ t['afii57399'] = 0x0667;
+ t['afii57400'] = 0x0668;
+ t['afii57401'] = 0x0669;
+ t['afii57403'] = 0x061B;
+ t['afii57407'] = 0x061F;
+ t['afii57409'] = 0x0621;
+ t['afii57410'] = 0x0622;
+ t['afii57411'] = 0x0623;
+ t['afii57412'] = 0x0624;
+ t['afii57413'] = 0x0625;
+ t['afii57414'] = 0x0626;
+ t['afii57415'] = 0x0627;
+ t['afii57416'] = 0x0628;
+ t['afii57417'] = 0x0629;
+ t['afii57418'] = 0x062A;
+ t['afii57419'] = 0x062B;
+ t['afii57420'] = 0x062C;
+ t['afii57421'] = 0x062D;
+ t['afii57422'] = 0x062E;
+ t['afii57423'] = 0x062F;
+ t['afii57424'] = 0x0630;
+ t['afii57425'] = 0x0631;
+ t['afii57426'] = 0x0632;
+ t['afii57427'] = 0x0633;
+ t['afii57428'] = 0x0634;
+ t['afii57429'] = 0x0635;
+ t['afii57430'] = 0x0636;
+ t['afii57431'] = 0x0637;
+ t['afii57432'] = 0x0638;
+ t['afii57433'] = 0x0639;
+ t['afii57434'] = 0x063A;
+ t['afii57440'] = 0x0640;
+ t['afii57441'] = 0x0641;
+ t['afii57442'] = 0x0642;
+ t['afii57443'] = 0x0643;
+ t['afii57444'] = 0x0644;
+ t['afii57445'] = 0x0645;
+ t['afii57446'] = 0x0646;
+ t['afii57448'] = 0x0648;
+ t['afii57449'] = 0x0649;
+ t['afii57450'] = 0x064A;
+ t['afii57451'] = 0x064B;
+ t['afii57452'] = 0x064C;
+ t['afii57453'] = 0x064D;
+ t['afii57454'] = 0x064E;
+ t['afii57455'] = 0x064F;
+ t['afii57456'] = 0x0650;
+ t['afii57457'] = 0x0651;
+ t['afii57458'] = 0x0652;
+ t['afii57470'] = 0x0647;
+ t['afii57505'] = 0x06A4;
+ t['afii57506'] = 0x067E;
+ t['afii57507'] = 0x0686;
+ t['afii57508'] = 0x0698;
+ t['afii57509'] = 0x06AF;
+ t['afii57511'] = 0x0679;
+ t['afii57512'] = 0x0688;
+ t['afii57513'] = 0x0691;
+ t['afii57514'] = 0x06BA;
+ t['afii57519'] = 0x06D2;
+ t['afii57534'] = 0x06D5;
+ t['afii57636'] = 0x20AA;
+ t['afii57645'] = 0x05BE;
+ t['afii57658'] = 0x05C3;
+ t['afii57664'] = 0x05D0;
+ t['afii57665'] = 0x05D1;
+ t['afii57666'] = 0x05D2;
+ t['afii57667'] = 0x05D3;
+ t['afii57668'] = 0x05D4;
+ t['afii57669'] = 0x05D5;
+ t['afii57670'] = 0x05D6;
+ t['afii57671'] = 0x05D7;
+ t['afii57672'] = 0x05D8;
+ t['afii57673'] = 0x05D9;
+ t['afii57674'] = 0x05DA;
+ t['afii57675'] = 0x05DB;
+ t['afii57676'] = 0x05DC;
+ t['afii57677'] = 0x05DD;
+ t['afii57678'] = 0x05DE;
+ t['afii57679'] = 0x05DF;
+ t['afii57680'] = 0x05E0;
+ t['afii57681'] = 0x05E1;
+ t['afii57682'] = 0x05E2;
+ t['afii57683'] = 0x05E3;
+ t['afii57684'] = 0x05E4;
+ t['afii57685'] = 0x05E5;
+ t['afii57686'] = 0x05E6;
+ t['afii57687'] = 0x05E7;
+ t['afii57688'] = 0x05E8;
+ t['afii57689'] = 0x05E9;
+ t['afii57690'] = 0x05EA;
+ t['afii57694'] = 0xFB2A;
+ t['afii57695'] = 0xFB2B;
+ t['afii57700'] = 0xFB4B;
+ t['afii57705'] = 0xFB1F;
+ t['afii57716'] = 0x05F0;
+ t['afii57717'] = 0x05F1;
+ t['afii57718'] = 0x05F2;
+ t['afii57723'] = 0xFB35;
+ t['afii57793'] = 0x05B4;
+ t['afii57794'] = 0x05B5;
+ t['afii57795'] = 0x05B6;
+ t['afii57796'] = 0x05BB;
+ t['afii57797'] = 0x05B8;
+ t['afii57798'] = 0x05B7;
+ t['afii57799'] = 0x05B0;
+ t['afii57800'] = 0x05B2;
+ t['afii57801'] = 0x05B1;
+ t['afii57802'] = 0x05B3;
+ t['afii57803'] = 0x05C2;
+ t['afii57804'] = 0x05C1;
+ t['afii57806'] = 0x05B9;
+ t['afii57807'] = 0x05BC;
+ t['afii57839'] = 0x05BD;
+ t['afii57841'] = 0x05BF;
+ t['afii57842'] = 0x05C0;
+ t['afii57929'] = 0x02BC;
+ t['afii61248'] = 0x2105;
+ t['afii61289'] = 0x2113;
+ t['afii61352'] = 0x2116;
+ t['afii61573'] = 0x202C;
+ t['afii61574'] = 0x202D;
+ t['afii61575'] = 0x202E;
+ t['afii61664'] = 0x200C;
+ t['afii63167'] = 0x066D;
+ t['afii64937'] = 0x02BD;
+ t['agrave'] = 0x00E0;
+ t['agujarati'] = 0x0A85;
+ t['agurmukhi'] = 0x0A05;
+ t['ahiragana'] = 0x3042;
+ t['ahookabove'] = 0x1EA3;
+ t['aibengali'] = 0x0990;
+ t['aibopomofo'] = 0x311E;
+ t['aideva'] = 0x0910;
+ t['aiecyrillic'] = 0x04D5;
+ t['aigujarati'] = 0x0A90;
+ t['aigurmukhi'] = 0x0A10;
+ t['aimatragurmukhi'] = 0x0A48;
+ t['ainarabic'] = 0x0639;
+ t['ainfinalarabic'] = 0xFECA;
+ t['aininitialarabic'] = 0xFECB;
+ t['ainmedialarabic'] = 0xFECC;
+ t['ainvertedbreve'] = 0x0203;
+ t['aivowelsignbengali'] = 0x09C8;
+ t['aivowelsigndeva'] = 0x0948;
+ t['aivowelsigngujarati'] = 0x0AC8;
+ t['akatakana'] = 0x30A2;
+ t['akatakanahalfwidth'] = 0xFF71;
+ t['akorean'] = 0x314F;
+ t['alef'] = 0x05D0;
+ t['alefarabic'] = 0x0627;
+ t['alefdageshhebrew'] = 0xFB30;
+ t['aleffinalarabic'] = 0xFE8E;
+ t['alefhamzaabovearabic'] = 0x0623;
+ t['alefhamzaabovefinalarabic'] = 0xFE84;
+ t['alefhamzabelowarabic'] = 0x0625;
+ t['alefhamzabelowfinalarabic'] = 0xFE88;
+ t['alefhebrew'] = 0x05D0;
+ t['aleflamedhebrew'] = 0xFB4F;
+ t['alefmaddaabovearabic'] = 0x0622;
+ t['alefmaddaabovefinalarabic'] = 0xFE82;
+ t['alefmaksuraarabic'] = 0x0649;
+ t['alefmaksurafinalarabic'] = 0xFEF0;
+ t['alefmaksurainitialarabic'] = 0xFEF3;
+ t['alefmaksuramedialarabic'] = 0xFEF4;
+ t['alefpatahhebrew'] = 0xFB2E;
+ t['alefqamatshebrew'] = 0xFB2F;
+ t['aleph'] = 0x2135;
+ t['allequal'] = 0x224C;
+ t['alpha'] = 0x03B1;
+ t['alphatonos'] = 0x03AC;
+ t['amacron'] = 0x0101;
+ t['amonospace'] = 0xFF41;
+ t['ampersand'] = 0x0026;
+ t['ampersandmonospace'] = 0xFF06;
+ t['ampersandsmall'] = 0xF726;
+ t['amsquare'] = 0x33C2;
+ t['anbopomofo'] = 0x3122;
+ t['angbopomofo'] = 0x3124;
+ t['angbracketleft'] = 0x3008;
+ t['angbracketright'] = 0x3009;
+ t['angkhankhuthai'] = 0x0E5A;
+ t['angle'] = 0x2220;
+ t['anglebracketleft'] = 0x3008;
+ t['anglebracketleftvertical'] = 0xFE3F;
+ t['anglebracketright'] = 0x3009;
+ t['anglebracketrightvertical'] = 0xFE40;
+ t['angleleft'] = 0x2329;
+ t['angleright'] = 0x232A;
+ t['angstrom'] = 0x212B;
+ t['anoteleia'] = 0x0387;
+ t['anudattadeva'] = 0x0952;
+ t['anusvarabengali'] = 0x0982;
+ t['anusvaradeva'] = 0x0902;
+ t['anusvaragujarati'] = 0x0A82;
+ t['aogonek'] = 0x0105;
+ t['apaatosquare'] = 0x3300;
+ t['aparen'] = 0x249C;
+ t['apostrophearmenian'] = 0x055A;
+ t['apostrophemod'] = 0x02BC;
+ t['apple'] = 0xF8FF;
+ t['approaches'] = 0x2250;
+ t['approxequal'] = 0x2248;
+ t['approxequalorimage'] = 0x2252;
+ t['approximatelyequal'] = 0x2245;
+ t['araeaekorean'] = 0x318E;
+ t['araeakorean'] = 0x318D;
+ t['arc'] = 0x2312;
+ t['arighthalfring'] = 0x1E9A;
+ t['aring'] = 0x00E5;
+ t['aringacute'] = 0x01FB;
+ t['aringbelow'] = 0x1E01;
+ t['arrowboth'] = 0x2194;
+ t['arrowdashdown'] = 0x21E3;
+ t['arrowdashleft'] = 0x21E0;
+ t['arrowdashright'] = 0x21E2;
+ t['arrowdashup'] = 0x21E1;
+ t['arrowdblboth'] = 0x21D4;
+ t['arrowdbldown'] = 0x21D3;
+ t['arrowdblleft'] = 0x21D0;
+ t['arrowdblright'] = 0x21D2;
+ t['arrowdblup'] = 0x21D1;
+ t['arrowdown'] = 0x2193;
+ t['arrowdownleft'] = 0x2199;
+ t['arrowdownright'] = 0x2198;
+ t['arrowdownwhite'] = 0x21E9;
+ t['arrowheaddownmod'] = 0x02C5;
+ t['arrowheadleftmod'] = 0x02C2;
+ t['arrowheadrightmod'] = 0x02C3;
+ t['arrowheadupmod'] = 0x02C4;
+ t['arrowhorizex'] = 0xF8E7;
+ t['arrowleft'] = 0x2190;
+ t['arrowleftdbl'] = 0x21D0;
+ t['arrowleftdblstroke'] = 0x21CD;
+ t['arrowleftoverright'] = 0x21C6;
+ t['arrowleftwhite'] = 0x21E6;
+ t['arrowright'] = 0x2192;
+ t['arrowrightdblstroke'] = 0x21CF;
+ t['arrowrightheavy'] = 0x279E;
+ t['arrowrightoverleft'] = 0x21C4;
+ t['arrowrightwhite'] = 0x21E8;
+ t['arrowtableft'] = 0x21E4;
+ t['arrowtabright'] = 0x21E5;
+ t['arrowup'] = 0x2191;
+ t['arrowupdn'] = 0x2195;
+ t['arrowupdnbse'] = 0x21A8;
+ t['arrowupdownbase'] = 0x21A8;
+ t['arrowupleft'] = 0x2196;
+ t['arrowupleftofdown'] = 0x21C5;
+ t['arrowupright'] = 0x2197;
+ t['arrowupwhite'] = 0x21E7;
+ t['arrowvertex'] = 0xF8E6;
+ t['asciicircum'] = 0x005E;
+ t['asciicircummonospace'] = 0xFF3E;
+ t['asciitilde'] = 0x007E;
+ t['asciitildemonospace'] = 0xFF5E;
+ t['ascript'] = 0x0251;
+ t['ascriptturned'] = 0x0252;
+ t['asmallhiragana'] = 0x3041;
+ t['asmallkatakana'] = 0x30A1;
+ t['asmallkatakanahalfwidth'] = 0xFF67;
+ t['asterisk'] = 0x002A;
+ t['asteriskaltonearabic'] = 0x066D;
+ t['asteriskarabic'] = 0x066D;
+ t['asteriskmath'] = 0x2217;
+ t['asteriskmonospace'] = 0xFF0A;
+ t['asterisksmall'] = 0xFE61;
+ t['asterism'] = 0x2042;
+ t['asuperior'] = 0xF6E9;
+ t['asymptoticallyequal'] = 0x2243;
+ t['at'] = 0x0040;
+ t['atilde'] = 0x00E3;
+ t['atmonospace'] = 0xFF20;
+ t['atsmall'] = 0xFE6B;
+ t['aturned'] = 0x0250;
+ t['aubengali'] = 0x0994;
+ t['aubopomofo'] = 0x3120;
+ t['audeva'] = 0x0914;
+ t['augujarati'] = 0x0A94;
+ t['augurmukhi'] = 0x0A14;
+ t['aulengthmarkbengali'] = 0x09D7;
+ t['aumatragurmukhi'] = 0x0A4C;
+ t['auvowelsignbengali'] = 0x09CC;
+ t['auvowelsigndeva'] = 0x094C;
+ t['auvowelsigngujarati'] = 0x0ACC;
+ t['avagrahadeva'] = 0x093D;
+ t['aybarmenian'] = 0x0561;
+ t['ayin'] = 0x05E2;
+ t['ayinaltonehebrew'] = 0xFB20;
+ t['ayinhebrew'] = 0x05E2;
+ t['b'] = 0x0062;
+ t['babengali'] = 0x09AC;
+ t['backslash'] = 0x005C;
+ t['backslashmonospace'] = 0xFF3C;
+ t['badeva'] = 0x092C;
+ t['bagujarati'] = 0x0AAC;
+ t['bagurmukhi'] = 0x0A2C;
+ t['bahiragana'] = 0x3070;
+ t['bahtthai'] = 0x0E3F;
+ t['bakatakana'] = 0x30D0;
+ t['bar'] = 0x007C;
+ t['barmonospace'] = 0xFF5C;
+ t['bbopomofo'] = 0x3105;
+ t['bcircle'] = 0x24D1;
+ t['bdotaccent'] = 0x1E03;
+ t['bdotbelow'] = 0x1E05;
+ t['beamedsixteenthnotes'] = 0x266C;
+ t['because'] = 0x2235;
+ t['becyrillic'] = 0x0431;
+ t['beharabic'] = 0x0628;
+ t['behfinalarabic'] = 0xFE90;
+ t['behinitialarabic'] = 0xFE91;
+ t['behiragana'] = 0x3079;
+ t['behmedialarabic'] = 0xFE92;
+ t['behmeeminitialarabic'] = 0xFC9F;
+ t['behmeemisolatedarabic'] = 0xFC08;
+ t['behnoonfinalarabic'] = 0xFC6D;
+ t['bekatakana'] = 0x30D9;
+ t['benarmenian'] = 0x0562;
+ t['bet'] = 0x05D1;
+ t['beta'] = 0x03B2;
+ t['betasymbolgreek'] = 0x03D0;
+ t['betdagesh'] = 0xFB31;
+ t['betdageshhebrew'] = 0xFB31;
+ t['bethebrew'] = 0x05D1;
+ t['betrafehebrew'] = 0xFB4C;
+ t['bhabengali'] = 0x09AD;
+ t['bhadeva'] = 0x092D;
+ t['bhagujarati'] = 0x0AAD;
+ t['bhagurmukhi'] = 0x0A2D;
+ t['bhook'] = 0x0253;
+ t['bihiragana'] = 0x3073;
+ t['bikatakana'] = 0x30D3;
+ t['bilabialclick'] = 0x0298;
+ t['bindigurmukhi'] = 0x0A02;
+ t['birusquare'] = 0x3331;
+ t['blackcircle'] = 0x25CF;
+ t['blackdiamond'] = 0x25C6;
+ t['blackdownpointingtriangle'] = 0x25BC;
+ t['blackleftpointingpointer'] = 0x25C4;
+ t['blackleftpointingtriangle'] = 0x25C0;
+ t['blacklenticularbracketleft'] = 0x3010;
+ t['blacklenticularbracketleftvertical'] = 0xFE3B;
+ t['blacklenticularbracketright'] = 0x3011;
+ t['blacklenticularbracketrightvertical'] = 0xFE3C;
+ t['blacklowerlefttriangle'] = 0x25E3;
+ t['blacklowerrighttriangle'] = 0x25E2;
+ t['blackrectangle'] = 0x25AC;
+ t['blackrightpointingpointer'] = 0x25BA;
+ t['blackrightpointingtriangle'] = 0x25B6;
+ t['blacksmallsquare'] = 0x25AA;
+ t['blacksmilingface'] = 0x263B;
+ t['blacksquare'] = 0x25A0;
+ t['blackstar'] = 0x2605;
+ t['blackupperlefttriangle'] = 0x25E4;
+ t['blackupperrighttriangle'] = 0x25E5;
+ t['blackuppointingsmalltriangle'] = 0x25B4;
+ t['blackuppointingtriangle'] = 0x25B2;
+ t['blank'] = 0x2423;
+ t['blinebelow'] = 0x1E07;
+ t['block'] = 0x2588;
+ t['bmonospace'] = 0xFF42;
+ t['bobaimaithai'] = 0x0E1A;
+ t['bohiragana'] = 0x307C;
+ t['bokatakana'] = 0x30DC;
+ t['bparen'] = 0x249D;
+ t['bqsquare'] = 0x33C3;
+ t['braceex'] = 0xF8F4;
+ t['braceleft'] = 0x007B;
+ t['braceleftbt'] = 0xF8F3;
+ t['braceleftmid'] = 0xF8F2;
+ t['braceleftmonospace'] = 0xFF5B;
+ t['braceleftsmall'] = 0xFE5B;
+ t['bracelefttp'] = 0xF8F1;
+ t['braceleftvertical'] = 0xFE37;
+ t['braceright'] = 0x007D;
+ t['bracerightbt'] = 0xF8FE;
+ t['bracerightmid'] = 0xF8FD;
+ t['bracerightmonospace'] = 0xFF5D;
+ t['bracerightsmall'] = 0xFE5C;
+ t['bracerighttp'] = 0xF8FC;
+ t['bracerightvertical'] = 0xFE38;
+ t['bracketleft'] = 0x005B;
+ t['bracketleftbt'] = 0xF8F0;
+ t['bracketleftex'] = 0xF8EF;
+ t['bracketleftmonospace'] = 0xFF3B;
+ t['bracketlefttp'] = 0xF8EE;
+ t['bracketright'] = 0x005D;
+ t['bracketrightbt'] = 0xF8FB;
+ t['bracketrightex'] = 0xF8FA;
+ t['bracketrightmonospace'] = 0xFF3D;
+ t['bracketrighttp'] = 0xF8F9;
+ t['breve'] = 0x02D8;
+ t['brevebelowcmb'] = 0x032E;
+ t['brevecmb'] = 0x0306;
+ t['breveinvertedbelowcmb'] = 0x032F;
+ t['breveinvertedcmb'] = 0x0311;
+ t['breveinverteddoublecmb'] = 0x0361;
+ t['bridgebelowcmb'] = 0x032A;
+ t['bridgeinvertedbelowcmb'] = 0x033A;
+ t['brokenbar'] = 0x00A6;
+ t['bstroke'] = 0x0180;
+ t['bsuperior'] = 0xF6EA;
+ t['btopbar'] = 0x0183;
+ t['buhiragana'] = 0x3076;
+ t['bukatakana'] = 0x30D6;
+ t['bullet'] = 0x2022;
+ t['bulletinverse'] = 0x25D8;
+ t['bulletoperator'] = 0x2219;
+ t['bullseye'] = 0x25CE;
+ t['c'] = 0x0063;
+ t['caarmenian'] = 0x056E;
+ t['cabengali'] = 0x099A;
+ t['cacute'] = 0x0107;
+ t['cadeva'] = 0x091A;
+ t['cagujarati'] = 0x0A9A;
+ t['cagurmukhi'] = 0x0A1A;
+ t['calsquare'] = 0x3388;
+ t['candrabindubengali'] = 0x0981;
+ t['candrabinducmb'] = 0x0310;
+ t['candrabindudeva'] = 0x0901;
+ t['candrabindugujarati'] = 0x0A81;
+ t['capslock'] = 0x21EA;
+ t['careof'] = 0x2105;
+ t['caron'] = 0x02C7;
+ t['caronbelowcmb'] = 0x032C;
+ t['caroncmb'] = 0x030C;
+ t['carriagereturn'] = 0x21B5;
+ t['cbopomofo'] = 0x3118;
+ t['ccaron'] = 0x010D;
+ t['ccedilla'] = 0x00E7;
+ t['ccedillaacute'] = 0x1E09;
+ t['ccircle'] = 0x24D2;
+ t['ccircumflex'] = 0x0109;
+ t['ccurl'] = 0x0255;
+ t['cdot'] = 0x010B;
+ t['cdotaccent'] = 0x010B;
+ t['cdsquare'] = 0x33C5;
+ t['cedilla'] = 0x00B8;
+ t['cedillacmb'] = 0x0327;
+ t['cent'] = 0x00A2;
+ t['centigrade'] = 0x2103;
+ t['centinferior'] = 0xF6DF;
+ t['centmonospace'] = 0xFFE0;
+ t['centoldstyle'] = 0xF7A2;
+ t['centsuperior'] = 0xF6E0;
+ t['chaarmenian'] = 0x0579;
+ t['chabengali'] = 0x099B;
+ t['chadeva'] = 0x091B;
+ t['chagujarati'] = 0x0A9B;
+ t['chagurmukhi'] = 0x0A1B;
+ t['chbopomofo'] = 0x3114;
+ t['cheabkhasiancyrillic'] = 0x04BD;
+ t['checkmark'] = 0x2713;
+ t['checyrillic'] = 0x0447;
+ t['chedescenderabkhasiancyrillic'] = 0x04BF;
+ t['chedescendercyrillic'] = 0x04B7;
+ t['chedieresiscyrillic'] = 0x04F5;
+ t['cheharmenian'] = 0x0573;
+ t['chekhakassiancyrillic'] = 0x04CC;
+ t['cheverticalstrokecyrillic'] = 0x04B9;
+ t['chi'] = 0x03C7;
+ t['chieuchacirclekorean'] = 0x3277;
+ t['chieuchaparenkorean'] = 0x3217;
+ t['chieuchcirclekorean'] = 0x3269;
+ t['chieuchkorean'] = 0x314A;
+ t['chieuchparenkorean'] = 0x3209;
+ t['chochangthai'] = 0x0E0A;
+ t['chochanthai'] = 0x0E08;
+ t['chochingthai'] = 0x0E09;
+ t['chochoethai'] = 0x0E0C;
+ t['chook'] = 0x0188;
+ t['cieucacirclekorean'] = 0x3276;
+ t['cieucaparenkorean'] = 0x3216;
+ t['cieuccirclekorean'] = 0x3268;
+ t['cieuckorean'] = 0x3148;
+ t['cieucparenkorean'] = 0x3208;
+ t['cieucuparenkorean'] = 0x321C;
+ t['circle'] = 0x25CB;
+ t['circlecopyrt'] = 0x00A9;
+ t['circlemultiply'] = 0x2297;
+ t['circleot'] = 0x2299;
+ t['circleplus'] = 0x2295;
+ t['circlepostalmark'] = 0x3036;
+ t['circlewithlefthalfblack'] = 0x25D0;
+ t['circlewithrighthalfblack'] = 0x25D1;
+ t['circumflex'] = 0x02C6;
+ t['circumflexbelowcmb'] = 0x032D;
+ t['circumflexcmb'] = 0x0302;
+ t['clear'] = 0x2327;
+ t['clickalveolar'] = 0x01C2;
+ t['clickdental'] = 0x01C0;
+ t['clicklateral'] = 0x01C1;
+ t['clickretroflex'] = 0x01C3;
+ t['club'] = 0x2663;
+ t['clubsuitblack'] = 0x2663;
+ t['clubsuitwhite'] = 0x2667;
+ t['cmcubedsquare'] = 0x33A4;
+ t['cmonospace'] = 0xFF43;
+ t['cmsquaredsquare'] = 0x33A0;
+ t['coarmenian'] = 0x0581;
+ t['colon'] = 0x003A;
+ t['colonmonetary'] = 0x20A1;
+ t['colonmonospace'] = 0xFF1A;
+ t['colonsign'] = 0x20A1;
+ t['colonsmall'] = 0xFE55;
+ t['colontriangularhalfmod'] = 0x02D1;
+ t['colontriangularmod'] = 0x02D0;
+ t['comma'] = 0x002C;
+ t['commaabovecmb'] = 0x0313;
+ t['commaaboverightcmb'] = 0x0315;
+ t['commaaccent'] = 0xF6C3;
+ t['commaarabic'] = 0x060C;
+ t['commaarmenian'] = 0x055D;
+ t['commainferior'] = 0xF6E1;
+ t['commamonospace'] = 0xFF0C;
+ t['commareversedabovecmb'] = 0x0314;
+ t['commareversedmod'] = 0x02BD;
+ t['commasmall'] = 0xFE50;
+ t['commasuperior'] = 0xF6E2;
+ t['commaturnedabovecmb'] = 0x0312;
+ t['commaturnedmod'] = 0x02BB;
+ t['compass'] = 0x263C;
+ t['congruent'] = 0x2245;
+ t['contourintegral'] = 0x222E;
+ t['control'] = 0x2303;
+ t['controlACK'] = 0x0006;
+ t['controlBEL'] = 0x0007;
+ t['controlBS'] = 0x0008;
+ t['controlCAN'] = 0x0018;
+ t['controlCR'] = 0x000D;
+ t['controlDC1'] = 0x0011;
+ t['controlDC2'] = 0x0012;
+ t['controlDC3'] = 0x0013;
+ t['controlDC4'] = 0x0014;
+ t['controlDEL'] = 0x007F;
+ t['controlDLE'] = 0x0010;
+ t['controlEM'] = 0x0019;
+ t['controlENQ'] = 0x0005;
+ t['controlEOT'] = 0x0004;
+ t['controlESC'] = 0x001B;
+ t['controlETB'] = 0x0017;
+ t['controlETX'] = 0x0003;
+ t['controlFF'] = 0x000C;
+ t['controlFS'] = 0x001C;
+ t['controlGS'] = 0x001D;
+ t['controlHT'] = 0x0009;
+ t['controlLF'] = 0x000A;
+ t['controlNAK'] = 0x0015;
+ t['controlNULL'] = 0x0000;
+ t['controlRS'] = 0x001E;
+ t['controlSI'] = 0x000F;
+ t['controlSO'] = 0x000E;
+ t['controlSOT'] = 0x0002;
+ t['controlSTX'] = 0x0001;
+ t['controlSUB'] = 0x001A;
+ t['controlSYN'] = 0x0016;
+ t['controlUS'] = 0x001F;
+ t['controlVT'] = 0x000B;
+ t['copyright'] = 0x00A9;
+ t['copyrightsans'] = 0xF8E9;
+ t['copyrightserif'] = 0xF6D9;
+ t['cornerbracketleft'] = 0x300C;
+ t['cornerbracketlefthalfwidth'] = 0xFF62;
+ t['cornerbracketleftvertical'] = 0xFE41;
+ t['cornerbracketright'] = 0x300D;
+ t['cornerbracketrighthalfwidth'] = 0xFF63;
+ t['cornerbracketrightvertical'] = 0xFE42;
+ t['corporationsquare'] = 0x337F;
+ t['cosquare'] = 0x33C7;
+ t['coverkgsquare'] = 0x33C6;
+ t['cparen'] = 0x249E;
+ t['cruzeiro'] = 0x20A2;
+ t['cstretched'] = 0x0297;
+ t['curlyand'] = 0x22CF;
+ t['curlyor'] = 0x22CE;
+ t['currency'] = 0x00A4;
+ t['cyrBreve'] = 0xF6D1;
+ t['cyrFlex'] = 0xF6D2;
+ t['cyrbreve'] = 0xF6D4;
+ t['cyrflex'] = 0xF6D5;
+ t['d'] = 0x0064;
+ t['daarmenian'] = 0x0564;
+ t['dabengali'] = 0x09A6;
+ t['dadarabic'] = 0x0636;
+ t['dadeva'] = 0x0926;
+ t['dadfinalarabic'] = 0xFEBE;
+ t['dadinitialarabic'] = 0xFEBF;
+ t['dadmedialarabic'] = 0xFEC0;
+ t['dagesh'] = 0x05BC;
+ t['dageshhebrew'] = 0x05BC;
+ t['dagger'] = 0x2020;
+ t['daggerdbl'] = 0x2021;
+ t['dagujarati'] = 0x0AA6;
+ t['dagurmukhi'] = 0x0A26;
+ t['dahiragana'] = 0x3060;
+ t['dakatakana'] = 0x30C0;
+ t['dalarabic'] = 0x062F;
+ t['dalet'] = 0x05D3;
+ t['daletdagesh'] = 0xFB33;
+ t['daletdageshhebrew'] = 0xFB33;
+ t['dalethebrew'] = 0x05D3;
+ t['dalfinalarabic'] = 0xFEAA;
+ t['dammaarabic'] = 0x064F;
+ t['dammalowarabic'] = 0x064F;
+ t['dammatanaltonearabic'] = 0x064C;
+ t['dammatanarabic'] = 0x064C;
+ t['danda'] = 0x0964;
+ t['dargahebrew'] = 0x05A7;
+ t['dargalefthebrew'] = 0x05A7;
+ t['dasiapneumatacyrilliccmb'] = 0x0485;
+ t['dblGrave'] = 0xF6D3;
+ t['dblanglebracketleft'] = 0x300A;
+ t['dblanglebracketleftvertical'] = 0xFE3D;
+ t['dblanglebracketright'] = 0x300B;
+ t['dblanglebracketrightvertical'] = 0xFE3E;
+ t['dblarchinvertedbelowcmb'] = 0x032B;
+ t['dblarrowleft'] = 0x21D4;
+ t['dblarrowright'] = 0x21D2;
+ t['dbldanda'] = 0x0965;
+ t['dblgrave'] = 0xF6D6;
+ t['dblgravecmb'] = 0x030F;
+ t['dblintegral'] = 0x222C;
+ t['dbllowline'] = 0x2017;
+ t['dbllowlinecmb'] = 0x0333;
+ t['dbloverlinecmb'] = 0x033F;
+ t['dblprimemod'] = 0x02BA;
+ t['dblverticalbar'] = 0x2016;
+ t['dblverticallineabovecmb'] = 0x030E;
+ t['dbopomofo'] = 0x3109;
+ t['dbsquare'] = 0x33C8;
+ t['dcaron'] = 0x010F;
+ t['dcedilla'] = 0x1E11;
+ t['dcircle'] = 0x24D3;
+ t['dcircumflexbelow'] = 0x1E13;
+ t['dcroat'] = 0x0111;
+ t['ddabengali'] = 0x09A1;
+ t['ddadeva'] = 0x0921;
+ t['ddagujarati'] = 0x0AA1;
+ t['ddagurmukhi'] = 0x0A21;
+ t['ddalarabic'] = 0x0688;
+ t['ddalfinalarabic'] = 0xFB89;
+ t['dddhadeva'] = 0x095C;
+ t['ddhabengali'] = 0x09A2;
+ t['ddhadeva'] = 0x0922;
+ t['ddhagujarati'] = 0x0AA2;
+ t['ddhagurmukhi'] = 0x0A22;
+ t['ddotaccent'] = 0x1E0B;
+ t['ddotbelow'] = 0x1E0D;
+ t['decimalseparatorarabic'] = 0x066B;
+ t['decimalseparatorpersian'] = 0x066B;
+ t['decyrillic'] = 0x0434;
+ t['degree'] = 0x00B0;
+ t['dehihebrew'] = 0x05AD;
+ t['dehiragana'] = 0x3067;
+ t['deicoptic'] = 0x03EF;
+ t['dekatakana'] = 0x30C7;
+ t['deleteleft'] = 0x232B;
+ t['deleteright'] = 0x2326;
+ t['delta'] = 0x03B4;
+ t['deltaturned'] = 0x018D;
+ t['denominatorminusonenumeratorbengali'] = 0x09F8;
+ t['dezh'] = 0x02A4;
+ t['dhabengali'] = 0x09A7;
+ t['dhadeva'] = 0x0927;
+ t['dhagujarati'] = 0x0AA7;
+ t['dhagurmukhi'] = 0x0A27;
+ t['dhook'] = 0x0257;
+ t['dialytikatonos'] = 0x0385;
+ t['dialytikatonoscmb'] = 0x0344;
+ t['diamond'] = 0x2666;
+ t['diamondsuitwhite'] = 0x2662;
+ t['dieresis'] = 0x00A8;
+ t['dieresisacute'] = 0xF6D7;
+ t['dieresisbelowcmb'] = 0x0324;
+ t['dieresiscmb'] = 0x0308;
+ t['dieresisgrave'] = 0xF6D8;
+ t['dieresistonos'] = 0x0385;
+ t['dihiragana'] = 0x3062;
+ t['dikatakana'] = 0x30C2;
+ t['dittomark'] = 0x3003;
+ t['divide'] = 0x00F7;
+ t['divides'] = 0x2223;
+ t['divisionslash'] = 0x2215;
+ t['djecyrillic'] = 0x0452;
+ t['dkshade'] = 0x2593;
+ t['dlinebelow'] = 0x1E0F;
+ t['dlsquare'] = 0x3397;
+ t['dmacron'] = 0x0111;
+ t['dmonospace'] = 0xFF44;
+ t['dnblock'] = 0x2584;
+ t['dochadathai'] = 0x0E0E;
+ t['dodekthai'] = 0x0E14;
+ t['dohiragana'] = 0x3069;
+ t['dokatakana'] = 0x30C9;
+ t['dollar'] = 0x0024;
+ t['dollarinferior'] = 0xF6E3;
+ t['dollarmonospace'] = 0xFF04;
+ t['dollaroldstyle'] = 0xF724;
+ t['dollarsmall'] = 0xFE69;
+ t['dollarsuperior'] = 0xF6E4;
+ t['dong'] = 0x20AB;
+ t['dorusquare'] = 0x3326;
+ t['dotaccent'] = 0x02D9;
+ t['dotaccentcmb'] = 0x0307;
+ t['dotbelowcmb'] = 0x0323;
+ t['dotbelowcomb'] = 0x0323;
+ t['dotkatakana'] = 0x30FB;
+ t['dotlessi'] = 0x0131;
+ t['dotlessj'] = 0xF6BE;
+ t['dotlessjstrokehook'] = 0x0284;
+ t['dotmath'] = 0x22C5;
+ t['dottedcircle'] = 0x25CC;
+ t['doubleyodpatah'] = 0xFB1F;
+ t['doubleyodpatahhebrew'] = 0xFB1F;
+ t['downtackbelowcmb'] = 0x031E;
+ t['downtackmod'] = 0x02D5;
+ t['dparen'] = 0x249F;
+ t['dsuperior'] = 0xF6EB;
+ t['dtail'] = 0x0256;
+ t['dtopbar'] = 0x018C;
+ t['duhiragana'] = 0x3065;
+ t['dukatakana'] = 0x30C5;
+ t['dz'] = 0x01F3;
+ t['dzaltone'] = 0x02A3;
+ t['dzcaron'] = 0x01C6;
+ t['dzcurl'] = 0x02A5;
+ t['dzeabkhasiancyrillic'] = 0x04E1;
+ t['dzecyrillic'] = 0x0455;
+ t['dzhecyrillic'] = 0x045F;
+ t['e'] = 0x0065;
+ t['eacute'] = 0x00E9;
+ t['earth'] = 0x2641;
+ t['ebengali'] = 0x098F;
+ t['ebopomofo'] = 0x311C;
+ t['ebreve'] = 0x0115;
+ t['ecandradeva'] = 0x090D;
+ t['ecandragujarati'] = 0x0A8D;
+ t['ecandravowelsigndeva'] = 0x0945;
+ t['ecandravowelsigngujarati'] = 0x0AC5;
+ t['ecaron'] = 0x011B;
+ t['ecedillabreve'] = 0x1E1D;
+ t['echarmenian'] = 0x0565;
+ t['echyiwnarmenian'] = 0x0587;
+ t['ecircle'] = 0x24D4;
+ t['ecircumflex'] = 0x00EA;
+ t['ecircumflexacute'] = 0x1EBF;
+ t['ecircumflexbelow'] = 0x1E19;
+ t['ecircumflexdotbelow'] = 0x1EC7;
+ t['ecircumflexgrave'] = 0x1EC1;
+ t['ecircumflexhookabove'] = 0x1EC3;
+ t['ecircumflextilde'] = 0x1EC5;
+ t['ecyrillic'] = 0x0454;
+ t['edblgrave'] = 0x0205;
+ t['edeva'] = 0x090F;
+ t['edieresis'] = 0x00EB;
+ t['edot'] = 0x0117;
+ t['edotaccent'] = 0x0117;
+ t['edotbelow'] = 0x1EB9;
+ t['eegurmukhi'] = 0x0A0F;
+ t['eematragurmukhi'] = 0x0A47;
+ t['efcyrillic'] = 0x0444;
+ t['egrave'] = 0x00E8;
+ t['egujarati'] = 0x0A8F;
+ t['eharmenian'] = 0x0567;
+ t['ehbopomofo'] = 0x311D;
+ t['ehiragana'] = 0x3048;
+ t['ehookabove'] = 0x1EBB;
+ t['eibopomofo'] = 0x311F;
+ t['eight'] = 0x0038;
+ t['eightarabic'] = 0x0668;
+ t['eightbengali'] = 0x09EE;
+ t['eightcircle'] = 0x2467;
+ t['eightcircleinversesansserif'] = 0x2791;
+ t['eightdeva'] = 0x096E;
+ t['eighteencircle'] = 0x2471;
+ t['eighteenparen'] = 0x2485;
+ t['eighteenperiod'] = 0x2499;
+ t['eightgujarati'] = 0x0AEE;
+ t['eightgurmukhi'] = 0x0A6E;
+ t['eighthackarabic'] = 0x0668;
+ t['eighthangzhou'] = 0x3028;
+ t['eighthnotebeamed'] = 0x266B;
+ t['eightideographicparen'] = 0x3227;
+ t['eightinferior'] = 0x2088;
+ t['eightmonospace'] = 0xFF18;
+ t['eightoldstyle'] = 0xF738;
+ t['eightparen'] = 0x247B;
+ t['eightperiod'] = 0x248F;
+ t['eightpersian'] = 0x06F8;
+ t['eightroman'] = 0x2177;
+ t['eightsuperior'] = 0x2078;
+ t['eightthai'] = 0x0E58;
+ t['einvertedbreve'] = 0x0207;
+ t['eiotifiedcyrillic'] = 0x0465;
+ t['ekatakana'] = 0x30A8;
+ t['ekatakanahalfwidth'] = 0xFF74;
+ t['ekonkargurmukhi'] = 0x0A74;
+ t['ekorean'] = 0x3154;
+ t['elcyrillic'] = 0x043B;
+ t['element'] = 0x2208;
+ t['elevencircle'] = 0x246A;
+ t['elevenparen'] = 0x247E;
+ t['elevenperiod'] = 0x2492;
+ t['elevenroman'] = 0x217A;
+ t['ellipsis'] = 0x2026;
+ t['ellipsisvertical'] = 0x22EE;
+ t['emacron'] = 0x0113;
+ t['emacronacute'] = 0x1E17;
+ t['emacrongrave'] = 0x1E15;
+ t['emcyrillic'] = 0x043C;
+ t['emdash'] = 0x2014;
+ t['emdashvertical'] = 0xFE31;
+ t['emonospace'] = 0xFF45;
+ t['emphasismarkarmenian'] = 0x055B;
+ t['emptyset'] = 0x2205;
+ t['enbopomofo'] = 0x3123;
+ t['encyrillic'] = 0x043D;
+ t['endash'] = 0x2013;
+ t['endashvertical'] = 0xFE32;
+ t['endescendercyrillic'] = 0x04A3;
+ t['eng'] = 0x014B;
+ t['engbopomofo'] = 0x3125;
+ t['enghecyrillic'] = 0x04A5;
+ t['enhookcyrillic'] = 0x04C8;
+ t['enspace'] = 0x2002;
+ t['eogonek'] = 0x0119;
+ t['eokorean'] = 0x3153;
+ t['eopen'] = 0x025B;
+ t['eopenclosed'] = 0x029A;
+ t['eopenreversed'] = 0x025C;
+ t['eopenreversedclosed'] = 0x025E;
+ t['eopenreversedhook'] = 0x025D;
+ t['eparen'] = 0x24A0;
+ t['epsilon'] = 0x03B5;
+ t['epsilontonos'] = 0x03AD;
+ t['equal'] = 0x003D;
+ t['equalmonospace'] = 0xFF1D;
+ t['equalsmall'] = 0xFE66;
+ t['equalsuperior'] = 0x207C;
+ t['equivalence'] = 0x2261;
+ t['erbopomofo'] = 0x3126;
+ t['ercyrillic'] = 0x0440;
+ t['ereversed'] = 0x0258;
+ t['ereversedcyrillic'] = 0x044D;
+ t['escyrillic'] = 0x0441;
+ t['esdescendercyrillic'] = 0x04AB;
+ t['esh'] = 0x0283;
+ t['eshcurl'] = 0x0286;
+ t['eshortdeva'] = 0x090E;
+ t['eshortvowelsigndeva'] = 0x0946;
+ t['eshreversedloop'] = 0x01AA;
+ t['eshsquatreversed'] = 0x0285;
+ t['esmallhiragana'] = 0x3047;
+ t['esmallkatakana'] = 0x30A7;
+ t['esmallkatakanahalfwidth'] = 0xFF6A;
+ t['estimated'] = 0x212E;
+ t['esuperior'] = 0xF6EC;
+ t['eta'] = 0x03B7;
+ t['etarmenian'] = 0x0568;
+ t['etatonos'] = 0x03AE;
+ t['eth'] = 0x00F0;
+ t['etilde'] = 0x1EBD;
+ t['etildebelow'] = 0x1E1B;
+ t['etnahtafoukhhebrew'] = 0x0591;
+ t['etnahtafoukhlefthebrew'] = 0x0591;
+ t['etnahtahebrew'] = 0x0591;
+ t['etnahtalefthebrew'] = 0x0591;
+ t['eturned'] = 0x01DD;
+ t['eukorean'] = 0x3161;
+ t['euro'] = 0x20AC;
+ t['evowelsignbengali'] = 0x09C7;
+ t['evowelsigndeva'] = 0x0947;
+ t['evowelsigngujarati'] = 0x0AC7;
+ t['exclam'] = 0x0021;
+ t['exclamarmenian'] = 0x055C;
+ t['exclamdbl'] = 0x203C;
+ t['exclamdown'] = 0x00A1;
+ t['exclamdownsmall'] = 0xF7A1;
+ t['exclammonospace'] = 0xFF01;
+ t['exclamsmall'] = 0xF721;
+ t['existential'] = 0x2203;
+ t['ezh'] = 0x0292;
+ t['ezhcaron'] = 0x01EF;
+ t['ezhcurl'] = 0x0293;
+ t['ezhreversed'] = 0x01B9;
+ t['ezhtail'] = 0x01BA;
+ t['f'] = 0x0066;
+ t['fadeva'] = 0x095E;
+ t['fagurmukhi'] = 0x0A5E;
+ t['fahrenheit'] = 0x2109;
+ t['fathaarabic'] = 0x064E;
+ t['fathalowarabic'] = 0x064E;
+ t['fathatanarabic'] = 0x064B;
+ t['fbopomofo'] = 0x3108;
+ t['fcircle'] = 0x24D5;
+ t['fdotaccent'] = 0x1E1F;
+ t['feharabic'] = 0x0641;
+ t['feharmenian'] = 0x0586;
+ t['fehfinalarabic'] = 0xFED2;
+ t['fehinitialarabic'] = 0xFED3;
+ t['fehmedialarabic'] = 0xFED4;
+ t['feicoptic'] = 0x03E5;
+ t['female'] = 0x2640;
+ t['ff'] = 0xFB00;
+ t['ffi'] = 0xFB03;
+ t['ffl'] = 0xFB04;
+ t['fi'] = 0xFB01;
+ t['fifteencircle'] = 0x246E;
+ t['fifteenparen'] = 0x2482;
+ t['fifteenperiod'] = 0x2496;
+ t['figuredash'] = 0x2012;
+ t['filledbox'] = 0x25A0;
+ t['filledrect'] = 0x25AC;
+ t['finalkaf'] = 0x05DA;
+ t['finalkafdagesh'] = 0xFB3A;
+ t['finalkafdageshhebrew'] = 0xFB3A;
+ t['finalkafhebrew'] = 0x05DA;
+ t['finalmem'] = 0x05DD;
+ t['finalmemhebrew'] = 0x05DD;
+ t['finalnun'] = 0x05DF;
+ t['finalnunhebrew'] = 0x05DF;
+ t['finalpe'] = 0x05E3;
+ t['finalpehebrew'] = 0x05E3;
+ t['finaltsadi'] = 0x05E5;
+ t['finaltsadihebrew'] = 0x05E5;
+ t['firsttonechinese'] = 0x02C9;
+ t['fisheye'] = 0x25C9;
+ t['fitacyrillic'] = 0x0473;
+ t['five'] = 0x0035;
+ t['fivearabic'] = 0x0665;
+ t['fivebengali'] = 0x09EB;
+ t['fivecircle'] = 0x2464;
+ t['fivecircleinversesansserif'] = 0x278E;
+ t['fivedeva'] = 0x096B;
+ t['fiveeighths'] = 0x215D;
+ t['fivegujarati'] = 0x0AEB;
+ t['fivegurmukhi'] = 0x0A6B;
+ t['fivehackarabic'] = 0x0665;
+ t['fivehangzhou'] = 0x3025;
+ t['fiveideographicparen'] = 0x3224;
+ t['fiveinferior'] = 0x2085;
+ t['fivemonospace'] = 0xFF15;
+ t['fiveoldstyle'] = 0xF735;
+ t['fiveparen'] = 0x2478;
+ t['fiveperiod'] = 0x248C;
+ t['fivepersian'] = 0x06F5;
+ t['fiveroman'] = 0x2174;
+ t['fivesuperior'] = 0x2075;
+ t['fivethai'] = 0x0E55;
+ t['fl'] = 0xFB02;
+ t['florin'] = 0x0192;
+ t['fmonospace'] = 0xFF46;
+ t['fmsquare'] = 0x3399;
+ t['fofanthai'] = 0x0E1F;
+ t['fofathai'] = 0x0E1D;
+ t['fongmanthai'] = 0x0E4F;
+ t['forall'] = 0x2200;
+ t['four'] = 0x0034;
+ t['fourarabic'] = 0x0664;
+ t['fourbengali'] = 0x09EA;
+ t['fourcircle'] = 0x2463;
+ t['fourcircleinversesansserif'] = 0x278D;
+ t['fourdeva'] = 0x096A;
+ t['fourgujarati'] = 0x0AEA;
+ t['fourgurmukhi'] = 0x0A6A;
+ t['fourhackarabic'] = 0x0664;
+ t['fourhangzhou'] = 0x3024;
+ t['fourideographicparen'] = 0x3223;
+ t['fourinferior'] = 0x2084;
+ t['fourmonospace'] = 0xFF14;
+ t['fournumeratorbengali'] = 0x09F7;
+ t['fouroldstyle'] = 0xF734;
+ t['fourparen'] = 0x2477;
+ t['fourperiod'] = 0x248B;
+ t['fourpersian'] = 0x06F4;
+ t['fourroman'] = 0x2173;
+ t['foursuperior'] = 0x2074;
+ t['fourteencircle'] = 0x246D;
+ t['fourteenparen'] = 0x2481;
+ t['fourteenperiod'] = 0x2495;
+ t['fourthai'] = 0x0E54;
+ t['fourthtonechinese'] = 0x02CB;
+ t['fparen'] = 0x24A1;
+ t['fraction'] = 0x2044;
+ t['franc'] = 0x20A3;
+ t['g'] = 0x0067;
+ t['gabengali'] = 0x0997;
+ t['gacute'] = 0x01F5;
+ t['gadeva'] = 0x0917;
+ t['gafarabic'] = 0x06AF;
+ t['gaffinalarabic'] = 0xFB93;
+ t['gafinitialarabic'] = 0xFB94;
+ t['gafmedialarabic'] = 0xFB95;
+ t['gagujarati'] = 0x0A97;
+ t['gagurmukhi'] = 0x0A17;
+ t['gahiragana'] = 0x304C;
+ t['gakatakana'] = 0x30AC;
+ t['gamma'] = 0x03B3;
+ t['gammalatinsmall'] = 0x0263;
+ t['gammasuperior'] = 0x02E0;
+ t['gangiacoptic'] = 0x03EB;
+ t['gbopomofo'] = 0x310D;
+ t['gbreve'] = 0x011F;
+ t['gcaron'] = 0x01E7;
+ t['gcedilla'] = 0x0123;
+ t['gcircle'] = 0x24D6;
+ t['gcircumflex'] = 0x011D;
+ t['gcommaaccent'] = 0x0123;
+ t['gdot'] = 0x0121;
+ t['gdotaccent'] = 0x0121;
+ t['gecyrillic'] = 0x0433;
+ t['gehiragana'] = 0x3052;
+ t['gekatakana'] = 0x30B2;
+ t['geometricallyequal'] = 0x2251;
+ t['gereshaccenthebrew'] = 0x059C;
+ t['gereshhebrew'] = 0x05F3;
+ t['gereshmuqdamhebrew'] = 0x059D;
+ t['germandbls'] = 0x00DF;
+ t['gershayimaccenthebrew'] = 0x059E;
+ t['gershayimhebrew'] = 0x05F4;
+ t['getamark'] = 0x3013;
+ t['ghabengali'] = 0x0998;
+ t['ghadarmenian'] = 0x0572;
+ t['ghadeva'] = 0x0918;
+ t['ghagujarati'] = 0x0A98;
+ t['ghagurmukhi'] = 0x0A18;
+ t['ghainarabic'] = 0x063A;
+ t['ghainfinalarabic'] = 0xFECE;
+ t['ghaininitialarabic'] = 0xFECF;
+ t['ghainmedialarabic'] = 0xFED0;
+ t['ghemiddlehookcyrillic'] = 0x0495;
+ t['ghestrokecyrillic'] = 0x0493;
+ t['gheupturncyrillic'] = 0x0491;
+ t['ghhadeva'] = 0x095A;
+ t['ghhagurmukhi'] = 0x0A5A;
+ t['ghook'] = 0x0260;
+ t['ghzsquare'] = 0x3393;
+ t['gihiragana'] = 0x304E;
+ t['gikatakana'] = 0x30AE;
+ t['gimarmenian'] = 0x0563;
+ t['gimel'] = 0x05D2;
+ t['gimeldagesh'] = 0xFB32;
+ t['gimeldageshhebrew'] = 0xFB32;
+ t['gimelhebrew'] = 0x05D2;
+ t['gjecyrillic'] = 0x0453;
+ t['glottalinvertedstroke'] = 0x01BE;
+ t['glottalstop'] = 0x0294;
+ t['glottalstopinverted'] = 0x0296;
+ t['glottalstopmod'] = 0x02C0;
+ t['glottalstopreversed'] = 0x0295;
+ t['glottalstopreversedmod'] = 0x02C1;
+ t['glottalstopreversedsuperior'] = 0x02E4;
+ t['glottalstopstroke'] = 0x02A1;
+ t['glottalstopstrokereversed'] = 0x02A2;
+ t['gmacron'] = 0x1E21;
+ t['gmonospace'] = 0xFF47;
+ t['gohiragana'] = 0x3054;
+ t['gokatakana'] = 0x30B4;
+ t['gparen'] = 0x24A2;
+ t['gpasquare'] = 0x33AC;
+ t['gradient'] = 0x2207;
+ t['grave'] = 0x0060;
+ t['gravebelowcmb'] = 0x0316;
+ t['gravecmb'] = 0x0300;
+ t['gravecomb'] = 0x0300;
+ t['gravedeva'] = 0x0953;
+ t['gravelowmod'] = 0x02CE;
+ t['gravemonospace'] = 0xFF40;
+ t['gravetonecmb'] = 0x0340;
+ t['greater'] = 0x003E;
+ t['greaterequal'] = 0x2265;
+ t['greaterequalorless'] = 0x22DB;
+ t['greatermonospace'] = 0xFF1E;
+ t['greaterorequivalent'] = 0x2273;
+ t['greaterorless'] = 0x2277;
+ t['greateroverequal'] = 0x2267;
+ t['greatersmall'] = 0xFE65;
+ t['gscript'] = 0x0261;
+ t['gstroke'] = 0x01E5;
+ t['guhiragana'] = 0x3050;
+ t['guillemotleft'] = 0x00AB;
+ t['guillemotright'] = 0x00BB;
+ t['guilsinglleft'] = 0x2039;
+ t['guilsinglright'] = 0x203A;
+ t['gukatakana'] = 0x30B0;
+ t['guramusquare'] = 0x3318;
+ t['gysquare'] = 0x33C9;
+ t['h'] = 0x0068;
+ t['haabkhasiancyrillic'] = 0x04A9;
+ t['haaltonearabic'] = 0x06C1;
+ t['habengali'] = 0x09B9;
+ t['hadescendercyrillic'] = 0x04B3;
+ t['hadeva'] = 0x0939;
+ t['hagujarati'] = 0x0AB9;
+ t['hagurmukhi'] = 0x0A39;
+ t['haharabic'] = 0x062D;
+ t['hahfinalarabic'] = 0xFEA2;
+ t['hahinitialarabic'] = 0xFEA3;
+ t['hahiragana'] = 0x306F;
+ t['hahmedialarabic'] = 0xFEA4;
+ t['haitusquare'] = 0x332A;
+ t['hakatakana'] = 0x30CF;
+ t['hakatakanahalfwidth'] = 0xFF8A;
+ t['halantgurmukhi'] = 0x0A4D;
+ t['hamzaarabic'] = 0x0621;
+ t['hamzalowarabic'] = 0x0621;
+ t['hangulfiller'] = 0x3164;
+ t['hardsigncyrillic'] = 0x044A;
+ t['harpoonleftbarbup'] = 0x21BC;
+ t['harpoonrightbarbup'] = 0x21C0;
+ t['hasquare'] = 0x33CA;
+ t['hatafpatah'] = 0x05B2;
+ t['hatafpatah16'] = 0x05B2;
+ t['hatafpatah23'] = 0x05B2;
+ t['hatafpatah2f'] = 0x05B2;
+ t['hatafpatahhebrew'] = 0x05B2;
+ t['hatafpatahnarrowhebrew'] = 0x05B2;
+ t['hatafpatahquarterhebrew'] = 0x05B2;
+ t['hatafpatahwidehebrew'] = 0x05B2;
+ t['hatafqamats'] = 0x05B3;
+ t['hatafqamats1b'] = 0x05B3;
+ t['hatafqamats28'] = 0x05B3;
+ t['hatafqamats34'] = 0x05B3;
+ t['hatafqamatshebrew'] = 0x05B3;
+ t['hatafqamatsnarrowhebrew'] = 0x05B3;
+ t['hatafqamatsquarterhebrew'] = 0x05B3;
+ t['hatafqamatswidehebrew'] = 0x05B3;
+ t['hatafsegol'] = 0x05B1;
+ t['hatafsegol17'] = 0x05B1;
+ t['hatafsegol24'] = 0x05B1;
+ t['hatafsegol30'] = 0x05B1;
+ t['hatafsegolhebrew'] = 0x05B1;
+ t['hatafsegolnarrowhebrew'] = 0x05B1;
+ t['hatafsegolquarterhebrew'] = 0x05B1;
+ t['hatafsegolwidehebrew'] = 0x05B1;
+ t['hbar'] = 0x0127;
+ t['hbopomofo'] = 0x310F;
+ t['hbrevebelow'] = 0x1E2B;
+ t['hcedilla'] = 0x1E29;
+ t['hcircle'] = 0x24D7;
+ t['hcircumflex'] = 0x0125;
+ t['hdieresis'] = 0x1E27;
+ t['hdotaccent'] = 0x1E23;
+ t['hdotbelow'] = 0x1E25;
+ t['he'] = 0x05D4;
+ t['heart'] = 0x2665;
+ t['heartsuitblack'] = 0x2665;
+ t['heartsuitwhite'] = 0x2661;
+ t['hedagesh'] = 0xFB34;
+ t['hedageshhebrew'] = 0xFB34;
+ t['hehaltonearabic'] = 0x06C1;
+ t['heharabic'] = 0x0647;
+ t['hehebrew'] = 0x05D4;
+ t['hehfinalaltonearabic'] = 0xFBA7;
+ t['hehfinalalttwoarabic'] = 0xFEEA;
+ t['hehfinalarabic'] = 0xFEEA;
+ t['hehhamzaabovefinalarabic'] = 0xFBA5;
+ t['hehhamzaaboveisolatedarabic'] = 0xFBA4;
+ t['hehinitialaltonearabic'] = 0xFBA8;
+ t['hehinitialarabic'] = 0xFEEB;
+ t['hehiragana'] = 0x3078;
+ t['hehmedialaltonearabic'] = 0xFBA9;
+ t['hehmedialarabic'] = 0xFEEC;
+ t['heiseierasquare'] = 0x337B;
+ t['hekatakana'] = 0x30D8;
+ t['hekatakanahalfwidth'] = 0xFF8D;
+ t['hekutaarusquare'] = 0x3336;
+ t['henghook'] = 0x0267;
+ t['herutusquare'] = 0x3339;
+ t['het'] = 0x05D7;
+ t['hethebrew'] = 0x05D7;
+ t['hhook'] = 0x0266;
+ t['hhooksuperior'] = 0x02B1;
+ t['hieuhacirclekorean'] = 0x327B;
+ t['hieuhaparenkorean'] = 0x321B;
+ t['hieuhcirclekorean'] = 0x326D;
+ t['hieuhkorean'] = 0x314E;
+ t['hieuhparenkorean'] = 0x320D;
+ t['hihiragana'] = 0x3072;
+ t['hikatakana'] = 0x30D2;
+ t['hikatakanahalfwidth'] = 0xFF8B;
+ t['hiriq'] = 0x05B4;
+ t['hiriq14'] = 0x05B4;
+ t['hiriq21'] = 0x05B4;
+ t['hiriq2d'] = 0x05B4;
+ t['hiriqhebrew'] = 0x05B4;
+ t['hiriqnarrowhebrew'] = 0x05B4;
+ t['hiriqquarterhebrew'] = 0x05B4;
+ t['hiriqwidehebrew'] = 0x05B4;
+ t['hlinebelow'] = 0x1E96;
+ t['hmonospace'] = 0xFF48;
+ t['hoarmenian'] = 0x0570;
+ t['hohipthai'] = 0x0E2B;
+ t['hohiragana'] = 0x307B;
+ t['hokatakana'] = 0x30DB;
+ t['hokatakanahalfwidth'] = 0xFF8E;
+ t['holam'] = 0x05B9;
+ t['holam19'] = 0x05B9;
+ t['holam26'] = 0x05B9;
+ t['holam32'] = 0x05B9;
+ t['holamhebrew'] = 0x05B9;
+ t['holamnarrowhebrew'] = 0x05B9;
+ t['holamquarterhebrew'] = 0x05B9;
+ t['holamwidehebrew'] = 0x05B9;
+ t['honokhukthai'] = 0x0E2E;
+ t['hookabovecomb'] = 0x0309;
+ t['hookcmb'] = 0x0309;
+ t['hookpalatalizedbelowcmb'] = 0x0321;
+ t['hookretroflexbelowcmb'] = 0x0322;
+ t['hoonsquare'] = 0x3342;
+ t['horicoptic'] = 0x03E9;
+ t['horizontalbar'] = 0x2015;
+ t['horncmb'] = 0x031B;
+ t['hotsprings'] = 0x2668;
+ t['house'] = 0x2302;
+ t['hparen'] = 0x24A3;
+ t['hsuperior'] = 0x02B0;
+ t['hturned'] = 0x0265;
+ t['huhiragana'] = 0x3075;
+ t['huiitosquare'] = 0x3333;
+ t['hukatakana'] = 0x30D5;
+ t['hukatakanahalfwidth'] = 0xFF8C;
+ t['hungarumlaut'] = 0x02DD;
+ t['hungarumlautcmb'] = 0x030B;
+ t['hv'] = 0x0195;
+ t['hyphen'] = 0x002D;
+ t['hypheninferior'] = 0xF6E5;
+ t['hyphenmonospace'] = 0xFF0D;
+ t['hyphensmall'] = 0xFE63;
+ t['hyphensuperior'] = 0xF6E6;
+ t['hyphentwo'] = 0x2010;
+ t['i'] = 0x0069;
+ t['iacute'] = 0x00ED;
+ t['iacyrillic'] = 0x044F;
+ t['ibengali'] = 0x0987;
+ t['ibopomofo'] = 0x3127;
+ t['ibreve'] = 0x012D;
+ t['icaron'] = 0x01D0;
+ t['icircle'] = 0x24D8;
+ t['icircumflex'] = 0x00EE;
+ t['icyrillic'] = 0x0456;
+ t['idblgrave'] = 0x0209;
+ t['ideographearthcircle'] = 0x328F;
+ t['ideographfirecircle'] = 0x328B;
+ t['ideographicallianceparen'] = 0x323F;
+ t['ideographiccallparen'] = 0x323A;
+ t['ideographiccentrecircle'] = 0x32A5;
+ t['ideographicclose'] = 0x3006;
+ t['ideographiccomma'] = 0x3001;
+ t['ideographiccommaleft'] = 0xFF64;
+ t['ideographiccongratulationparen'] = 0x3237;
+ t['ideographiccorrectcircle'] = 0x32A3;
+ t['ideographicearthparen'] = 0x322F;
+ t['ideographicenterpriseparen'] = 0x323D;
+ t['ideographicexcellentcircle'] = 0x329D;
+ t['ideographicfestivalparen'] = 0x3240;
+ t['ideographicfinancialcircle'] = 0x3296;
+ t['ideographicfinancialparen'] = 0x3236;
+ t['ideographicfireparen'] = 0x322B;
+ t['ideographichaveparen'] = 0x3232;
+ t['ideographichighcircle'] = 0x32A4;
+ t['ideographiciterationmark'] = 0x3005;
+ t['ideographiclaborcircle'] = 0x3298;
+ t['ideographiclaborparen'] = 0x3238;
+ t['ideographicleftcircle'] = 0x32A7;
+ t['ideographiclowcircle'] = 0x32A6;
+ t['ideographicmedicinecircle'] = 0x32A9;
+ t['ideographicmetalparen'] = 0x322E;
+ t['ideographicmoonparen'] = 0x322A;
+ t['ideographicnameparen'] = 0x3234;
+ t['ideographicperiod'] = 0x3002;
+ t['ideographicprintcircle'] = 0x329E;
+ t['ideographicreachparen'] = 0x3243;
+ t['ideographicrepresentparen'] = 0x3239;
+ t['ideographicresourceparen'] = 0x323E;
+ t['ideographicrightcircle'] = 0x32A8;
+ t['ideographicsecretcircle'] = 0x3299;
+ t['ideographicselfparen'] = 0x3242;
+ t['ideographicsocietyparen'] = 0x3233;
+ t['ideographicspace'] = 0x3000;
+ t['ideographicspecialparen'] = 0x3235;
+ t['ideographicstockparen'] = 0x3231;
+ t['ideographicstudyparen'] = 0x323B;
+ t['ideographicsunparen'] = 0x3230;
+ t['ideographicsuperviseparen'] = 0x323C;
+ t['ideographicwaterparen'] = 0x322C;
+ t['ideographicwoodparen'] = 0x322D;
+ t['ideographiczero'] = 0x3007;
+ t['ideographmetalcircle'] = 0x328E;
+ t['ideographmooncircle'] = 0x328A;
+ t['ideographnamecircle'] = 0x3294;
+ t['ideographsuncircle'] = 0x3290;
+ t['ideographwatercircle'] = 0x328C;
+ t['ideographwoodcircle'] = 0x328D;
+ t['ideva'] = 0x0907;
+ t['idieresis'] = 0x00EF;
+ t['idieresisacute'] = 0x1E2F;
+ t['idieresiscyrillic'] = 0x04E5;
+ t['idotbelow'] = 0x1ECB;
+ t['iebrevecyrillic'] = 0x04D7;
+ t['iecyrillic'] = 0x0435;
+ t['ieungacirclekorean'] = 0x3275;
+ t['ieungaparenkorean'] = 0x3215;
+ t['ieungcirclekorean'] = 0x3267;
+ t['ieungkorean'] = 0x3147;
+ t['ieungparenkorean'] = 0x3207;
+ t['igrave'] = 0x00EC;
+ t['igujarati'] = 0x0A87;
+ t['igurmukhi'] = 0x0A07;
+ t['ihiragana'] = 0x3044;
+ t['ihookabove'] = 0x1EC9;
+ t['iibengali'] = 0x0988;
+ t['iicyrillic'] = 0x0438;
+ t['iideva'] = 0x0908;
+ t['iigujarati'] = 0x0A88;
+ t['iigurmukhi'] = 0x0A08;
+ t['iimatragurmukhi'] = 0x0A40;
+ t['iinvertedbreve'] = 0x020B;
+ t['iishortcyrillic'] = 0x0439;
+ t['iivowelsignbengali'] = 0x09C0;
+ t['iivowelsigndeva'] = 0x0940;
+ t['iivowelsigngujarati'] = 0x0AC0;
+ t['ij'] = 0x0133;
+ t['ikatakana'] = 0x30A4;
+ t['ikatakanahalfwidth'] = 0xFF72;
+ t['ikorean'] = 0x3163;
+ t['ilde'] = 0x02DC;
+ t['iluyhebrew'] = 0x05AC;
+ t['imacron'] = 0x012B;
+ t['imacroncyrillic'] = 0x04E3;
+ t['imageorapproximatelyequal'] = 0x2253;
+ t['imatragurmukhi'] = 0x0A3F;
+ t['imonospace'] = 0xFF49;
+ t['increment'] = 0x2206;
+ t['infinity'] = 0x221E;
+ t['iniarmenian'] = 0x056B;
+ t['integral'] = 0x222B;
+ t['integralbottom'] = 0x2321;
+ t['integralbt'] = 0x2321;
+ t['integralex'] = 0xF8F5;
+ t['integraltop'] = 0x2320;
+ t['integraltp'] = 0x2320;
+ t['intersection'] = 0x2229;
+ t['intisquare'] = 0x3305;
+ t['invbullet'] = 0x25D8;
+ t['invcircle'] = 0x25D9;
+ t['invsmileface'] = 0x263B;
+ t['iocyrillic'] = 0x0451;
+ t['iogonek'] = 0x012F;
+ t['iota'] = 0x03B9;
+ t['iotadieresis'] = 0x03CA;
+ t['iotadieresistonos'] = 0x0390;
+ t['iotalatin'] = 0x0269;
+ t['iotatonos'] = 0x03AF;
+ t['iparen'] = 0x24A4;
+ t['irigurmukhi'] = 0x0A72;
+ t['ismallhiragana'] = 0x3043;
+ t['ismallkatakana'] = 0x30A3;
+ t['ismallkatakanahalfwidth'] = 0xFF68;
+ t['issharbengali'] = 0x09FA;
+ t['istroke'] = 0x0268;
+ t['isuperior'] = 0xF6ED;
+ t['iterationhiragana'] = 0x309D;
+ t['iterationkatakana'] = 0x30FD;
+ t['itilde'] = 0x0129;
+ t['itildebelow'] = 0x1E2D;
+ t['iubopomofo'] = 0x3129;
+ t['iucyrillic'] = 0x044E;
+ t['ivowelsignbengali'] = 0x09BF;
+ t['ivowelsigndeva'] = 0x093F;
+ t['ivowelsigngujarati'] = 0x0ABF;
+ t['izhitsacyrillic'] = 0x0475;
+ t['izhitsadblgravecyrillic'] = 0x0477;
+ t['j'] = 0x006A;
+ t['jaarmenian'] = 0x0571;
+ t['jabengali'] = 0x099C;
+ t['jadeva'] = 0x091C;
+ t['jagujarati'] = 0x0A9C;
+ t['jagurmukhi'] = 0x0A1C;
+ t['jbopomofo'] = 0x3110;
+ t['jcaron'] = 0x01F0;
+ t['jcircle'] = 0x24D9;
+ t['jcircumflex'] = 0x0135;
+ t['jcrossedtail'] = 0x029D;
+ t['jdotlessstroke'] = 0x025F;
+ t['jecyrillic'] = 0x0458;
+ t['jeemarabic'] = 0x062C;
+ t['jeemfinalarabic'] = 0xFE9E;
+ t['jeeminitialarabic'] = 0xFE9F;
+ t['jeemmedialarabic'] = 0xFEA0;
+ t['jeharabic'] = 0x0698;
+ t['jehfinalarabic'] = 0xFB8B;
+ t['jhabengali'] = 0x099D;
+ t['jhadeva'] = 0x091D;
+ t['jhagujarati'] = 0x0A9D;
+ t['jhagurmukhi'] = 0x0A1D;
+ t['jheharmenian'] = 0x057B;
+ t['jis'] = 0x3004;
+ t['jmonospace'] = 0xFF4A;
+ t['jparen'] = 0x24A5;
+ t['jsuperior'] = 0x02B2;
+ t['k'] = 0x006B;
+ t['kabashkircyrillic'] = 0x04A1;
+ t['kabengali'] = 0x0995;
+ t['kacute'] = 0x1E31;
+ t['kacyrillic'] = 0x043A;
+ t['kadescendercyrillic'] = 0x049B;
+ t['kadeva'] = 0x0915;
+ t['kaf'] = 0x05DB;
+ t['kafarabic'] = 0x0643;
+ t['kafdagesh'] = 0xFB3B;
+ t['kafdageshhebrew'] = 0xFB3B;
+ t['kaffinalarabic'] = 0xFEDA;
+ t['kafhebrew'] = 0x05DB;
+ t['kafinitialarabic'] = 0xFEDB;
+ t['kafmedialarabic'] = 0xFEDC;
+ t['kafrafehebrew'] = 0xFB4D;
+ t['kagujarati'] = 0x0A95;
+ t['kagurmukhi'] = 0x0A15;
+ t['kahiragana'] = 0x304B;
+ t['kahookcyrillic'] = 0x04C4;
+ t['kakatakana'] = 0x30AB;
+ t['kakatakanahalfwidth'] = 0xFF76;
+ t['kappa'] = 0x03BA;
+ t['kappasymbolgreek'] = 0x03F0;
+ t['kapyeounmieumkorean'] = 0x3171;
+ t['kapyeounphieuphkorean'] = 0x3184;
+ t['kapyeounpieupkorean'] = 0x3178;
+ t['kapyeounssangpieupkorean'] = 0x3179;
+ t['karoriisquare'] = 0x330D;
+ t['kashidaautoarabic'] = 0x0640;
+ t['kashidaautonosidebearingarabic'] = 0x0640;
+ t['kasmallkatakana'] = 0x30F5;
+ t['kasquare'] = 0x3384;
+ t['kasraarabic'] = 0x0650;
+ t['kasratanarabic'] = 0x064D;
+ t['kastrokecyrillic'] = 0x049F;
+ t['katahiraprolongmarkhalfwidth'] = 0xFF70;
+ t['kaverticalstrokecyrillic'] = 0x049D;
+ t['kbopomofo'] = 0x310E;
+ t['kcalsquare'] = 0x3389;
+ t['kcaron'] = 0x01E9;
+ t['kcedilla'] = 0x0137;
+ t['kcircle'] = 0x24DA;
+ t['kcommaaccent'] = 0x0137;
+ t['kdotbelow'] = 0x1E33;
+ t['keharmenian'] = 0x0584;
+ t['kehiragana'] = 0x3051;
+ t['kekatakana'] = 0x30B1;
+ t['kekatakanahalfwidth'] = 0xFF79;
+ t['kenarmenian'] = 0x056F;
+ t['kesmallkatakana'] = 0x30F6;
+ t['kgreenlandic'] = 0x0138;
+ t['khabengali'] = 0x0996;
+ t['khacyrillic'] = 0x0445;
+ t['khadeva'] = 0x0916;
+ t['khagujarati'] = 0x0A96;
+ t['khagurmukhi'] = 0x0A16;
+ t['khaharabic'] = 0x062E;
+ t['khahfinalarabic'] = 0xFEA6;
+ t['khahinitialarabic'] = 0xFEA7;
+ t['khahmedialarabic'] = 0xFEA8;
+ t['kheicoptic'] = 0x03E7;
+ t['khhadeva'] = 0x0959;
+ t['khhagurmukhi'] = 0x0A59;
+ t['khieukhacirclekorean'] = 0x3278;
+ t['khieukhaparenkorean'] = 0x3218;
+ t['khieukhcirclekorean'] = 0x326A;
+ t['khieukhkorean'] = 0x314B;
+ t['khieukhparenkorean'] = 0x320A;
+ t['khokhaithai'] = 0x0E02;
+ t['khokhonthai'] = 0x0E05;
+ t['khokhuatthai'] = 0x0E03;
+ t['khokhwaithai'] = 0x0E04;
+ t['khomutthai'] = 0x0E5B;
+ t['khook'] = 0x0199;
+ t['khorakhangthai'] = 0x0E06;
+ t['khzsquare'] = 0x3391;
+ t['kihiragana'] = 0x304D;
+ t['kikatakana'] = 0x30AD;
+ t['kikatakanahalfwidth'] = 0xFF77;
+ t['kiroguramusquare'] = 0x3315;
+ t['kiromeetorusquare'] = 0x3316;
+ t['kirosquare'] = 0x3314;
+ t['kiyeokacirclekorean'] = 0x326E;
+ t['kiyeokaparenkorean'] = 0x320E;
+ t['kiyeokcirclekorean'] = 0x3260;
+ t['kiyeokkorean'] = 0x3131;
+ t['kiyeokparenkorean'] = 0x3200;
+ t['kiyeoksioskorean'] = 0x3133;
+ t['kjecyrillic'] = 0x045C;
+ t['klinebelow'] = 0x1E35;
+ t['klsquare'] = 0x3398;
+ t['kmcubedsquare'] = 0x33A6;
+ t['kmonospace'] = 0xFF4B;
+ t['kmsquaredsquare'] = 0x33A2;
+ t['kohiragana'] = 0x3053;
+ t['kohmsquare'] = 0x33C0;
+ t['kokaithai'] = 0x0E01;
+ t['kokatakana'] = 0x30B3;
+ t['kokatakanahalfwidth'] = 0xFF7A;
+ t['kooposquare'] = 0x331E;
+ t['koppacyrillic'] = 0x0481;
+ t['koreanstandardsymbol'] = 0x327F;
+ t['koroniscmb'] = 0x0343;
+ t['kparen'] = 0x24A6;
+ t['kpasquare'] = 0x33AA;
+ t['ksicyrillic'] = 0x046F;
+ t['ktsquare'] = 0x33CF;
+ t['kturned'] = 0x029E;
+ t['kuhiragana'] = 0x304F;
+ t['kukatakana'] = 0x30AF;
+ t['kukatakanahalfwidth'] = 0xFF78;
+ t['kvsquare'] = 0x33B8;
+ t['kwsquare'] = 0x33BE;
+ t['l'] = 0x006C;
+ t['labengali'] = 0x09B2;
+ t['lacute'] = 0x013A;
+ t['ladeva'] = 0x0932;
+ t['lagujarati'] = 0x0AB2;
+ t['lagurmukhi'] = 0x0A32;
+ t['lakkhangyaothai'] = 0x0E45;
+ t['lamaleffinalarabic'] = 0xFEFC;
+ t['lamalefhamzaabovefinalarabic'] = 0xFEF8;
+ t['lamalefhamzaaboveisolatedarabic'] = 0xFEF7;
+ t['lamalefhamzabelowfinalarabic'] = 0xFEFA;
+ t['lamalefhamzabelowisolatedarabic'] = 0xFEF9;
+ t['lamalefisolatedarabic'] = 0xFEFB;
+ t['lamalefmaddaabovefinalarabic'] = 0xFEF6;
+ t['lamalefmaddaaboveisolatedarabic'] = 0xFEF5;
+ t['lamarabic'] = 0x0644;
+ t['lambda'] = 0x03BB;
+ t['lambdastroke'] = 0x019B;
+ t['lamed'] = 0x05DC;
+ t['lameddagesh'] = 0xFB3C;
+ t['lameddageshhebrew'] = 0xFB3C;
+ t['lamedhebrew'] = 0x05DC;
+ t['lamfinalarabic'] = 0xFEDE;
+ t['lamhahinitialarabic'] = 0xFCCA;
+ t['laminitialarabic'] = 0xFEDF;
+ t['lamjeeminitialarabic'] = 0xFCC9;
+ t['lamkhahinitialarabic'] = 0xFCCB;
+ t['lamlamhehisolatedarabic'] = 0xFDF2;
+ t['lammedialarabic'] = 0xFEE0;
+ t['lammeemhahinitialarabic'] = 0xFD88;
+ t['lammeeminitialarabic'] = 0xFCCC;
+ t['largecircle'] = 0x25EF;
+ t['lbar'] = 0x019A;
+ t['lbelt'] = 0x026C;
+ t['lbopomofo'] = 0x310C;
+ t['lcaron'] = 0x013E;
+ t['lcedilla'] = 0x013C;
+ t['lcircle'] = 0x24DB;
+ t['lcircumflexbelow'] = 0x1E3D;
+ t['lcommaaccent'] = 0x013C;
+ t['ldot'] = 0x0140;
+ t['ldotaccent'] = 0x0140;
+ t['ldotbelow'] = 0x1E37;
+ t['ldotbelowmacron'] = 0x1E39;
+ t['leftangleabovecmb'] = 0x031A;
+ t['lefttackbelowcmb'] = 0x0318;
+ t['less'] = 0x003C;
+ t['lessequal'] = 0x2264;
+ t['lessequalorgreater'] = 0x22DA;
+ t['lessmonospace'] = 0xFF1C;
+ t['lessorequivalent'] = 0x2272;
+ t['lessorgreater'] = 0x2276;
+ t['lessoverequal'] = 0x2266;
+ t['lesssmall'] = 0xFE64;
+ t['lezh'] = 0x026E;
+ t['lfblock'] = 0x258C;
+ t['lhookretroflex'] = 0x026D;
+ t['lira'] = 0x20A4;
+ t['liwnarmenian'] = 0x056C;
+ t['lj'] = 0x01C9;
+ t['ljecyrillic'] = 0x0459;
+ t['ll'] = 0xF6C0;
+ t['lladeva'] = 0x0933;
+ t['llagujarati'] = 0x0AB3;
+ t['llinebelow'] = 0x1E3B;
+ t['llladeva'] = 0x0934;
+ t['llvocalicbengali'] = 0x09E1;
+ t['llvocalicdeva'] = 0x0961;
+ t['llvocalicvowelsignbengali'] = 0x09E3;
+ t['llvocalicvowelsigndeva'] = 0x0963;
+ t['lmiddletilde'] = 0x026B;
+ t['lmonospace'] = 0xFF4C;
+ t['lmsquare'] = 0x33D0;
+ t['lochulathai'] = 0x0E2C;
+ t['logicaland'] = 0x2227;
+ t['logicalnot'] = 0x00AC;
+ t['logicalnotreversed'] = 0x2310;
+ t['logicalor'] = 0x2228;
+ t['lolingthai'] = 0x0E25;
+ t['longs'] = 0x017F;
+ t['lowlinecenterline'] = 0xFE4E;
+ t['lowlinecmb'] = 0x0332;
+ t['lowlinedashed'] = 0xFE4D;
+ t['lozenge'] = 0x25CA;
+ t['lparen'] = 0x24A7;
+ t['lslash'] = 0x0142;
+ t['lsquare'] = 0x2113;
+ t['lsuperior'] = 0xF6EE;
+ t['ltshade'] = 0x2591;
+ t['luthai'] = 0x0E26;
+ t['lvocalicbengali'] = 0x098C;
+ t['lvocalicdeva'] = 0x090C;
+ t['lvocalicvowelsignbengali'] = 0x09E2;
+ t['lvocalicvowelsigndeva'] = 0x0962;
+ t['lxsquare'] = 0x33D3;
+ t['m'] = 0x006D;
+ t['mabengali'] = 0x09AE;
+ t['macron'] = 0x00AF;
+ t['macronbelowcmb'] = 0x0331;
+ t['macroncmb'] = 0x0304;
+ t['macronlowmod'] = 0x02CD;
+ t['macronmonospace'] = 0xFFE3;
+ t['macute'] = 0x1E3F;
+ t['madeva'] = 0x092E;
+ t['magujarati'] = 0x0AAE;
+ t['magurmukhi'] = 0x0A2E;
+ t['mahapakhhebrew'] = 0x05A4;
+ t['mahapakhlefthebrew'] = 0x05A4;
+ t['mahiragana'] = 0x307E;
+ t['maichattawalowleftthai'] = 0xF895;
+ t['maichattawalowrightthai'] = 0xF894;
+ t['maichattawathai'] = 0x0E4B;
+ t['maichattawaupperleftthai'] = 0xF893;
+ t['maieklowleftthai'] = 0xF88C;
+ t['maieklowrightthai'] = 0xF88B;
+ t['maiekthai'] = 0x0E48;
+ t['maiekupperleftthai'] = 0xF88A;
+ t['maihanakatleftthai'] = 0xF884;
+ t['maihanakatthai'] = 0x0E31;
+ t['maitaikhuleftthai'] = 0xF889;
+ t['maitaikhuthai'] = 0x0E47;
+ t['maitholowleftthai'] = 0xF88F;
+ t['maitholowrightthai'] = 0xF88E;
+ t['maithothai'] = 0x0E49;
+ t['maithoupperleftthai'] = 0xF88D;
+ t['maitrilowleftthai'] = 0xF892;
+ t['maitrilowrightthai'] = 0xF891;
+ t['maitrithai'] = 0x0E4A;
+ t['maitriupperleftthai'] = 0xF890;
+ t['maiyamokthai'] = 0x0E46;
+ t['makatakana'] = 0x30DE;
+ t['makatakanahalfwidth'] = 0xFF8F;
+ t['male'] = 0x2642;
+ t['mansyonsquare'] = 0x3347;
+ t['maqafhebrew'] = 0x05BE;
+ t['mars'] = 0x2642;
+ t['masoracirclehebrew'] = 0x05AF;
+ t['masquare'] = 0x3383;
+ t['mbopomofo'] = 0x3107;
+ t['mbsquare'] = 0x33D4;
+ t['mcircle'] = 0x24DC;
+ t['mcubedsquare'] = 0x33A5;
+ t['mdotaccent'] = 0x1E41;
+ t['mdotbelow'] = 0x1E43;
+ t['meemarabic'] = 0x0645;
+ t['meemfinalarabic'] = 0xFEE2;
+ t['meeminitialarabic'] = 0xFEE3;
+ t['meemmedialarabic'] = 0xFEE4;
+ t['meemmeeminitialarabic'] = 0xFCD1;
+ t['meemmeemisolatedarabic'] = 0xFC48;
+ t['meetorusquare'] = 0x334D;
+ t['mehiragana'] = 0x3081;
+ t['meizierasquare'] = 0x337E;
+ t['mekatakana'] = 0x30E1;
+ t['mekatakanahalfwidth'] = 0xFF92;
+ t['mem'] = 0x05DE;
+ t['memdagesh'] = 0xFB3E;
+ t['memdageshhebrew'] = 0xFB3E;
+ t['memhebrew'] = 0x05DE;
+ t['menarmenian'] = 0x0574;
+ t['merkhahebrew'] = 0x05A5;
+ t['merkhakefulahebrew'] = 0x05A6;
+ t['merkhakefulalefthebrew'] = 0x05A6;
+ t['merkhalefthebrew'] = 0x05A5;
+ t['mhook'] = 0x0271;
+ t['mhzsquare'] = 0x3392;
+ t['middledotkatakanahalfwidth'] = 0xFF65;
+ t['middot'] = 0x00B7;
+ t['mieumacirclekorean'] = 0x3272;
+ t['mieumaparenkorean'] = 0x3212;
+ t['mieumcirclekorean'] = 0x3264;
+ t['mieumkorean'] = 0x3141;
+ t['mieumpansioskorean'] = 0x3170;
+ t['mieumparenkorean'] = 0x3204;
+ t['mieumpieupkorean'] = 0x316E;
+ t['mieumsioskorean'] = 0x316F;
+ t['mihiragana'] = 0x307F;
+ t['mikatakana'] = 0x30DF;
+ t['mikatakanahalfwidth'] = 0xFF90;
+ t['minus'] = 0x2212;
+ t['minusbelowcmb'] = 0x0320;
+ t['minuscircle'] = 0x2296;
+ t['minusmod'] = 0x02D7;
+ t['minusplus'] = 0x2213;
+ t['minute'] = 0x2032;
+ t['miribaarusquare'] = 0x334A;
+ t['mirisquare'] = 0x3349;
+ t['mlonglegturned'] = 0x0270;
+ t['mlsquare'] = 0x3396;
+ t['mmcubedsquare'] = 0x33A3;
+ t['mmonospace'] = 0xFF4D;
+ t['mmsquaredsquare'] = 0x339F;
+ t['mohiragana'] = 0x3082;
+ t['mohmsquare'] = 0x33C1;
+ t['mokatakana'] = 0x30E2;
+ t['mokatakanahalfwidth'] = 0xFF93;
+ t['molsquare'] = 0x33D6;
+ t['momathai'] = 0x0E21;
+ t['moverssquare'] = 0x33A7;
+ t['moverssquaredsquare'] = 0x33A8;
+ t['mparen'] = 0x24A8;
+ t['mpasquare'] = 0x33AB;
+ t['mssquare'] = 0x33B3;
+ t['msuperior'] = 0xF6EF;
+ t['mturned'] = 0x026F;
+ t['mu'] = 0x00B5;
+ t['mu1'] = 0x00B5;
+ t['muasquare'] = 0x3382;
+ t['muchgreater'] = 0x226B;
+ t['muchless'] = 0x226A;
+ t['mufsquare'] = 0x338C;
+ t['mugreek'] = 0x03BC;
+ t['mugsquare'] = 0x338D;
+ t['muhiragana'] = 0x3080;
+ t['mukatakana'] = 0x30E0;
+ t['mukatakanahalfwidth'] = 0xFF91;
+ t['mulsquare'] = 0x3395;
+ t['multiply'] = 0x00D7;
+ t['mumsquare'] = 0x339B;
+ t['munahhebrew'] = 0x05A3;
+ t['munahlefthebrew'] = 0x05A3;
+ t['musicalnote'] = 0x266A;
+ t['musicalnotedbl'] = 0x266B;
+ t['musicflatsign'] = 0x266D;
+ t['musicsharpsign'] = 0x266F;
+ t['mussquare'] = 0x33B2;
+ t['muvsquare'] = 0x33B6;
+ t['muwsquare'] = 0x33BC;
+ t['mvmegasquare'] = 0x33B9;
+ t['mvsquare'] = 0x33B7;
+ t['mwmegasquare'] = 0x33BF;
+ t['mwsquare'] = 0x33BD;
+ t['n'] = 0x006E;
+ t['nabengali'] = 0x09A8;
+ t['nabla'] = 0x2207;
+ t['nacute'] = 0x0144;
+ t['nadeva'] = 0x0928;
+ t['nagujarati'] = 0x0AA8;
+ t['nagurmukhi'] = 0x0A28;
+ t['nahiragana'] = 0x306A;
+ t['nakatakana'] = 0x30CA;
+ t['nakatakanahalfwidth'] = 0xFF85;
+ t['napostrophe'] = 0x0149;
+ t['nasquare'] = 0x3381;
+ t['nbopomofo'] = 0x310B;
+ t['nbspace'] = 0x00A0;
+ t['ncaron'] = 0x0148;
+ t['ncedilla'] = 0x0146;
+ t['ncircle'] = 0x24DD;
+ t['ncircumflexbelow'] = 0x1E4B;
+ t['ncommaaccent'] = 0x0146;
+ t['ndotaccent'] = 0x1E45;
+ t['ndotbelow'] = 0x1E47;
+ t['nehiragana'] = 0x306D;
+ t['nekatakana'] = 0x30CD;
+ t['nekatakanahalfwidth'] = 0xFF88;
+ t['newsheqelsign'] = 0x20AA;
+ t['nfsquare'] = 0x338B;
+ t['ngabengali'] = 0x0999;
+ t['ngadeva'] = 0x0919;
+ t['ngagujarati'] = 0x0A99;
+ t['ngagurmukhi'] = 0x0A19;
+ t['ngonguthai'] = 0x0E07;
+ t['nhiragana'] = 0x3093;
+ t['nhookleft'] = 0x0272;
+ t['nhookretroflex'] = 0x0273;
+ t['nieunacirclekorean'] = 0x326F;
+ t['nieunaparenkorean'] = 0x320F;
+ t['nieuncieuckorean'] = 0x3135;
+ t['nieuncirclekorean'] = 0x3261;
+ t['nieunhieuhkorean'] = 0x3136;
+ t['nieunkorean'] = 0x3134;
+ t['nieunpansioskorean'] = 0x3168;
+ t['nieunparenkorean'] = 0x3201;
+ t['nieunsioskorean'] = 0x3167;
+ t['nieuntikeutkorean'] = 0x3166;
+ t['nihiragana'] = 0x306B;
+ t['nikatakana'] = 0x30CB;
+ t['nikatakanahalfwidth'] = 0xFF86;
+ t['nikhahitleftthai'] = 0xF899;
+ t['nikhahitthai'] = 0x0E4D;
+ t['nine'] = 0x0039;
+ t['ninearabic'] = 0x0669;
+ t['ninebengali'] = 0x09EF;
+ t['ninecircle'] = 0x2468;
+ t['ninecircleinversesansserif'] = 0x2792;
+ t['ninedeva'] = 0x096F;
+ t['ninegujarati'] = 0x0AEF;
+ t['ninegurmukhi'] = 0x0A6F;
+ t['ninehackarabic'] = 0x0669;
+ t['ninehangzhou'] = 0x3029;
+ t['nineideographicparen'] = 0x3228;
+ t['nineinferior'] = 0x2089;
+ t['ninemonospace'] = 0xFF19;
+ t['nineoldstyle'] = 0xF739;
+ t['nineparen'] = 0x247C;
+ t['nineperiod'] = 0x2490;
+ t['ninepersian'] = 0x06F9;
+ t['nineroman'] = 0x2178;
+ t['ninesuperior'] = 0x2079;
+ t['nineteencircle'] = 0x2472;
+ t['nineteenparen'] = 0x2486;
+ t['nineteenperiod'] = 0x249A;
+ t['ninethai'] = 0x0E59;
+ t['nj'] = 0x01CC;
+ t['njecyrillic'] = 0x045A;
+ t['nkatakana'] = 0x30F3;
+ t['nkatakanahalfwidth'] = 0xFF9D;
+ t['nlegrightlong'] = 0x019E;
+ t['nlinebelow'] = 0x1E49;
+ t['nmonospace'] = 0xFF4E;
+ t['nmsquare'] = 0x339A;
+ t['nnabengali'] = 0x09A3;
+ t['nnadeva'] = 0x0923;
+ t['nnagujarati'] = 0x0AA3;
+ t['nnagurmukhi'] = 0x0A23;
+ t['nnnadeva'] = 0x0929;
+ t['nohiragana'] = 0x306E;
+ t['nokatakana'] = 0x30CE;
+ t['nokatakanahalfwidth'] = 0xFF89;
+ t['nonbreakingspace'] = 0x00A0;
+ t['nonenthai'] = 0x0E13;
+ t['nonuthai'] = 0x0E19;
+ t['noonarabic'] = 0x0646;
+ t['noonfinalarabic'] = 0xFEE6;
+ t['noonghunnaarabic'] = 0x06BA;
+ t['noonghunnafinalarabic'] = 0xFB9F;
+ t['nooninitialarabic'] = 0xFEE7;
+ t['noonjeeminitialarabic'] = 0xFCD2;
+ t['noonjeemisolatedarabic'] = 0xFC4B;
+ t['noonmedialarabic'] = 0xFEE8;
+ t['noonmeeminitialarabic'] = 0xFCD5;
+ t['noonmeemisolatedarabic'] = 0xFC4E;
+ t['noonnoonfinalarabic'] = 0xFC8D;
+ t['notcontains'] = 0x220C;
+ t['notelement'] = 0x2209;
+ t['notelementof'] = 0x2209;
+ t['notequal'] = 0x2260;
+ t['notgreater'] = 0x226F;
+ t['notgreaternorequal'] = 0x2271;
+ t['notgreaternorless'] = 0x2279;
+ t['notidentical'] = 0x2262;
+ t['notless'] = 0x226E;
+ t['notlessnorequal'] = 0x2270;
+ t['notparallel'] = 0x2226;
+ t['notprecedes'] = 0x2280;
+ t['notsubset'] = 0x2284;
+ t['notsucceeds'] = 0x2281;
+ t['notsuperset'] = 0x2285;
+ t['nowarmenian'] = 0x0576;
+ t['nparen'] = 0x24A9;
+ t['nssquare'] = 0x33B1;
+ t['nsuperior'] = 0x207F;
+ t['ntilde'] = 0x00F1;
+ t['nu'] = 0x03BD;
+ t['nuhiragana'] = 0x306C;
+ t['nukatakana'] = 0x30CC;
+ t['nukatakanahalfwidth'] = 0xFF87;
+ t['nuktabengali'] = 0x09BC;
+ t['nuktadeva'] = 0x093C;
+ t['nuktagujarati'] = 0x0ABC;
+ t['nuktagurmukhi'] = 0x0A3C;
+ t['numbersign'] = 0x0023;
+ t['numbersignmonospace'] = 0xFF03;
+ t['numbersignsmall'] = 0xFE5F;
+ t['numeralsigngreek'] = 0x0374;
+ t['numeralsignlowergreek'] = 0x0375;
+ t['numero'] = 0x2116;
+ t['nun'] = 0x05E0;
+ t['nundagesh'] = 0xFB40;
+ t['nundageshhebrew'] = 0xFB40;
+ t['nunhebrew'] = 0x05E0;
+ t['nvsquare'] = 0x33B5;
+ t['nwsquare'] = 0x33BB;
+ t['nyabengali'] = 0x099E;
+ t['nyadeva'] = 0x091E;
+ t['nyagujarati'] = 0x0A9E;
+ t['nyagurmukhi'] = 0x0A1E;
+ t['o'] = 0x006F;
+ t['oacute'] = 0x00F3;
+ t['oangthai'] = 0x0E2D;
+ t['obarred'] = 0x0275;
+ t['obarredcyrillic'] = 0x04E9;
+ t['obarreddieresiscyrillic'] = 0x04EB;
+ t['obengali'] = 0x0993;
+ t['obopomofo'] = 0x311B;
+ t['obreve'] = 0x014F;
+ t['ocandradeva'] = 0x0911;
+ t['ocandragujarati'] = 0x0A91;
+ t['ocandravowelsigndeva'] = 0x0949;
+ t['ocandravowelsigngujarati'] = 0x0AC9;
+ t['ocaron'] = 0x01D2;
+ t['ocircle'] = 0x24DE;
+ t['ocircumflex'] = 0x00F4;
+ t['ocircumflexacute'] = 0x1ED1;
+ t['ocircumflexdotbelow'] = 0x1ED9;
+ t['ocircumflexgrave'] = 0x1ED3;
+ t['ocircumflexhookabove'] = 0x1ED5;
+ t['ocircumflextilde'] = 0x1ED7;
+ t['ocyrillic'] = 0x043E;
+ t['odblacute'] = 0x0151;
+ t['odblgrave'] = 0x020D;
+ t['odeva'] = 0x0913;
+ t['odieresis'] = 0x00F6;
+ t['odieresiscyrillic'] = 0x04E7;
+ t['odotbelow'] = 0x1ECD;
+ t['oe'] = 0x0153;
+ t['oekorean'] = 0x315A;
+ t['ogonek'] = 0x02DB;
+ t['ogonekcmb'] = 0x0328;
+ t['ograve'] = 0x00F2;
+ t['ogujarati'] = 0x0A93;
+ t['oharmenian'] = 0x0585;
+ t['ohiragana'] = 0x304A;
+ t['ohookabove'] = 0x1ECF;
+ t['ohorn'] = 0x01A1;
+ t['ohornacute'] = 0x1EDB;
+ t['ohorndotbelow'] = 0x1EE3;
+ t['ohorngrave'] = 0x1EDD;
+ t['ohornhookabove'] = 0x1EDF;
+ t['ohorntilde'] = 0x1EE1;
+ t['ohungarumlaut'] = 0x0151;
+ t['oi'] = 0x01A3;
+ t['oinvertedbreve'] = 0x020F;
+ t['okatakana'] = 0x30AA;
+ t['okatakanahalfwidth'] = 0xFF75;
+ t['okorean'] = 0x3157;
+ t['olehebrew'] = 0x05AB;
+ t['omacron'] = 0x014D;
+ t['omacronacute'] = 0x1E53;
+ t['omacrongrave'] = 0x1E51;
+ t['omdeva'] = 0x0950;
+ t['omega'] = 0x03C9;
+ t['omega1'] = 0x03D6;
+ t['omegacyrillic'] = 0x0461;
+ t['omegalatinclosed'] = 0x0277;
+ t['omegaroundcyrillic'] = 0x047B;
+ t['omegatitlocyrillic'] = 0x047D;
+ t['omegatonos'] = 0x03CE;
+ t['omgujarati'] = 0x0AD0;
+ t['omicron'] = 0x03BF;
+ t['omicrontonos'] = 0x03CC;
+ t['omonospace'] = 0xFF4F;
+ t['one'] = 0x0031;
+ t['onearabic'] = 0x0661;
+ t['onebengali'] = 0x09E7;
+ t['onecircle'] = 0x2460;
+ t['onecircleinversesansserif'] = 0x278A;
+ t['onedeva'] = 0x0967;
+ t['onedotenleader'] = 0x2024;
+ t['oneeighth'] = 0x215B;
+ t['onefitted'] = 0xF6DC;
+ t['onegujarati'] = 0x0AE7;
+ t['onegurmukhi'] = 0x0A67;
+ t['onehackarabic'] = 0x0661;
+ t['onehalf'] = 0x00BD;
+ t['onehangzhou'] = 0x3021;
+ t['oneideographicparen'] = 0x3220;
+ t['oneinferior'] = 0x2081;
+ t['onemonospace'] = 0xFF11;
+ t['onenumeratorbengali'] = 0x09F4;
+ t['oneoldstyle'] = 0xF731;
+ t['oneparen'] = 0x2474;
+ t['oneperiod'] = 0x2488;
+ t['onepersian'] = 0x06F1;
+ t['onequarter'] = 0x00BC;
+ t['oneroman'] = 0x2170;
+ t['onesuperior'] = 0x00B9;
+ t['onethai'] = 0x0E51;
+ t['onethird'] = 0x2153;
+ t['oogonek'] = 0x01EB;
+ t['oogonekmacron'] = 0x01ED;
+ t['oogurmukhi'] = 0x0A13;
+ t['oomatragurmukhi'] = 0x0A4B;
+ t['oopen'] = 0x0254;
+ t['oparen'] = 0x24AA;
+ t['openbullet'] = 0x25E6;
+ t['option'] = 0x2325;
+ t['ordfeminine'] = 0x00AA;
+ t['ordmasculine'] = 0x00BA;
+ t['orthogonal'] = 0x221F;
+ t['oshortdeva'] = 0x0912;
+ t['oshortvowelsigndeva'] = 0x094A;
+ t['oslash'] = 0x00F8;
+ t['oslashacute'] = 0x01FF;
+ t['osmallhiragana'] = 0x3049;
+ t['osmallkatakana'] = 0x30A9;
+ t['osmallkatakanahalfwidth'] = 0xFF6B;
+ t['ostrokeacute'] = 0x01FF;
+ t['osuperior'] = 0xF6F0;
+ t['otcyrillic'] = 0x047F;
+ t['otilde'] = 0x00F5;
+ t['otildeacute'] = 0x1E4D;
+ t['otildedieresis'] = 0x1E4F;
+ t['oubopomofo'] = 0x3121;
+ t['overline'] = 0x203E;
+ t['overlinecenterline'] = 0xFE4A;
+ t['overlinecmb'] = 0x0305;
+ t['overlinedashed'] = 0xFE49;
+ t['overlinedblwavy'] = 0xFE4C;
+ t['overlinewavy'] = 0xFE4B;
+ t['overscore'] = 0x00AF;
+ t['ovowelsignbengali'] = 0x09CB;
+ t['ovowelsigndeva'] = 0x094B;
+ t['ovowelsigngujarati'] = 0x0ACB;
+ t['p'] = 0x0070;
+ t['paampssquare'] = 0x3380;
+ t['paasentosquare'] = 0x332B;
+ t['pabengali'] = 0x09AA;
+ t['pacute'] = 0x1E55;
+ t['padeva'] = 0x092A;
+ t['pagedown'] = 0x21DF;
+ t['pageup'] = 0x21DE;
+ t['pagujarati'] = 0x0AAA;
+ t['pagurmukhi'] = 0x0A2A;
+ t['pahiragana'] = 0x3071;
+ t['paiyannoithai'] = 0x0E2F;
+ t['pakatakana'] = 0x30D1;
+ t['palatalizationcyrilliccmb'] = 0x0484;
+ t['palochkacyrillic'] = 0x04C0;
+ t['pansioskorean'] = 0x317F;
+ t['paragraph'] = 0x00B6;
+ t['parallel'] = 0x2225;
+ t['parenleft'] = 0x0028;
+ t['parenleftaltonearabic'] = 0xFD3E;
+ t['parenleftbt'] = 0xF8ED;
+ t['parenleftex'] = 0xF8EC;
+ t['parenleftinferior'] = 0x208D;
+ t['parenleftmonospace'] = 0xFF08;
+ t['parenleftsmall'] = 0xFE59;
+ t['parenleftsuperior'] = 0x207D;
+ t['parenlefttp'] = 0xF8EB;
+ t['parenleftvertical'] = 0xFE35;
+ t['parenright'] = 0x0029;
+ t['parenrightaltonearabic'] = 0xFD3F;
+ t['parenrightbt'] = 0xF8F8;
+ t['parenrightex'] = 0xF8F7;
+ t['parenrightinferior'] = 0x208E;
+ t['parenrightmonospace'] = 0xFF09;
+ t['parenrightsmall'] = 0xFE5A;
+ t['parenrightsuperior'] = 0x207E;
+ t['parenrighttp'] = 0xF8F6;
+ t['parenrightvertical'] = 0xFE36;
+ t['partialdiff'] = 0x2202;
+ t['paseqhebrew'] = 0x05C0;
+ t['pashtahebrew'] = 0x0599;
+ t['pasquare'] = 0x33A9;
+ t['patah'] = 0x05B7;
+ t['patah11'] = 0x05B7;
+ t['patah1d'] = 0x05B7;
+ t['patah2a'] = 0x05B7;
+ t['patahhebrew'] = 0x05B7;
+ t['patahnarrowhebrew'] = 0x05B7;
+ t['patahquarterhebrew'] = 0x05B7;
+ t['patahwidehebrew'] = 0x05B7;
+ t['pazerhebrew'] = 0x05A1;
+ t['pbopomofo'] = 0x3106;
+ t['pcircle'] = 0x24DF;
+ t['pdotaccent'] = 0x1E57;
+ t['pe'] = 0x05E4;
+ t['pecyrillic'] = 0x043F;
+ t['pedagesh'] = 0xFB44;
+ t['pedageshhebrew'] = 0xFB44;
+ t['peezisquare'] = 0x333B;
+ t['pefinaldageshhebrew'] = 0xFB43;
+ t['peharabic'] = 0x067E;
+ t['peharmenian'] = 0x057A;
+ t['pehebrew'] = 0x05E4;
+ t['pehfinalarabic'] = 0xFB57;
+ t['pehinitialarabic'] = 0xFB58;
+ t['pehiragana'] = 0x307A;
+ t['pehmedialarabic'] = 0xFB59;
+ t['pekatakana'] = 0x30DA;
+ t['pemiddlehookcyrillic'] = 0x04A7;
+ t['perafehebrew'] = 0xFB4E;
+ t['percent'] = 0x0025;
+ t['percentarabic'] = 0x066A;
+ t['percentmonospace'] = 0xFF05;
+ t['percentsmall'] = 0xFE6A;
+ t['period'] = 0x002E;
+ t['periodarmenian'] = 0x0589;
+ t['periodcentered'] = 0x00B7;
+ t['periodhalfwidth'] = 0xFF61;
+ t['periodinferior'] = 0xF6E7;
+ t['periodmonospace'] = 0xFF0E;
+ t['periodsmall'] = 0xFE52;
+ t['periodsuperior'] = 0xF6E8;
+ t['perispomenigreekcmb'] = 0x0342;
+ t['perpendicular'] = 0x22A5;
+ t['perthousand'] = 0x2030;
+ t['peseta'] = 0x20A7;
+ t['pfsquare'] = 0x338A;
+ t['phabengali'] = 0x09AB;
+ t['phadeva'] = 0x092B;
+ t['phagujarati'] = 0x0AAB;
+ t['phagurmukhi'] = 0x0A2B;
+ t['phi'] = 0x03C6;
+ t['phi1'] = 0x03D5;
+ t['phieuphacirclekorean'] = 0x327A;
+ t['phieuphaparenkorean'] = 0x321A;
+ t['phieuphcirclekorean'] = 0x326C;
+ t['phieuphkorean'] = 0x314D;
+ t['phieuphparenkorean'] = 0x320C;
+ t['philatin'] = 0x0278;
+ t['phinthuthai'] = 0x0E3A;
+ t['phisymbolgreek'] = 0x03D5;
+ t['phook'] = 0x01A5;
+ t['phophanthai'] = 0x0E1E;
+ t['phophungthai'] = 0x0E1C;
+ t['phosamphaothai'] = 0x0E20;
+ t['pi'] = 0x03C0;
+ t['pieupacirclekorean'] = 0x3273;
+ t['pieupaparenkorean'] = 0x3213;
+ t['pieupcieuckorean'] = 0x3176;
+ t['pieupcirclekorean'] = 0x3265;
+ t['pieupkiyeokkorean'] = 0x3172;
+ t['pieupkorean'] = 0x3142;
+ t['pieupparenkorean'] = 0x3205;
+ t['pieupsioskiyeokkorean'] = 0x3174;
+ t['pieupsioskorean'] = 0x3144;
+ t['pieupsiostikeutkorean'] = 0x3175;
+ t['pieupthieuthkorean'] = 0x3177;
+ t['pieuptikeutkorean'] = 0x3173;
+ t['pihiragana'] = 0x3074;
+ t['pikatakana'] = 0x30D4;
+ t['pisymbolgreek'] = 0x03D6;
+ t['piwrarmenian'] = 0x0583;
+ t['plus'] = 0x002B;
+ t['plusbelowcmb'] = 0x031F;
+ t['pluscircle'] = 0x2295;
+ t['plusminus'] = 0x00B1;
+ t['plusmod'] = 0x02D6;
+ t['plusmonospace'] = 0xFF0B;
+ t['plussmall'] = 0xFE62;
+ t['plussuperior'] = 0x207A;
+ t['pmonospace'] = 0xFF50;
+ t['pmsquare'] = 0x33D8;
+ t['pohiragana'] = 0x307D;
+ t['pointingindexdownwhite'] = 0x261F;
+ t['pointingindexleftwhite'] = 0x261C;
+ t['pointingindexrightwhite'] = 0x261E;
+ t['pointingindexupwhite'] = 0x261D;
+ t['pokatakana'] = 0x30DD;
+ t['poplathai'] = 0x0E1B;
+ t['postalmark'] = 0x3012;
+ t['postalmarkface'] = 0x3020;
+ t['pparen'] = 0x24AB;
+ t['precedes'] = 0x227A;
+ t['prescription'] = 0x211E;
+ t['primemod'] = 0x02B9;
+ t['primereversed'] = 0x2035;
+ t['product'] = 0x220F;
+ t['projective'] = 0x2305;
+ t['prolongedkana'] = 0x30FC;
+ t['propellor'] = 0x2318;
+ t['propersubset'] = 0x2282;
+ t['propersuperset'] = 0x2283;
+ t['proportion'] = 0x2237;
+ t['proportional'] = 0x221D;
+ t['psi'] = 0x03C8;
+ t['psicyrillic'] = 0x0471;
+ t['psilipneumatacyrilliccmb'] = 0x0486;
+ t['pssquare'] = 0x33B0;
+ t['puhiragana'] = 0x3077;
+ t['pukatakana'] = 0x30D7;
+ t['pvsquare'] = 0x33B4;
+ t['pwsquare'] = 0x33BA;
+ t['q'] = 0x0071;
+ t['qadeva'] = 0x0958;
+ t['qadmahebrew'] = 0x05A8;
+ t['qafarabic'] = 0x0642;
+ t['qaffinalarabic'] = 0xFED6;
+ t['qafinitialarabic'] = 0xFED7;
+ t['qafmedialarabic'] = 0xFED8;
+ t['qamats'] = 0x05B8;
+ t['qamats10'] = 0x05B8;
+ t['qamats1a'] = 0x05B8;
+ t['qamats1c'] = 0x05B8;
+ t['qamats27'] = 0x05B8;
+ t['qamats29'] = 0x05B8;
+ t['qamats33'] = 0x05B8;
+ t['qamatsde'] = 0x05B8;
+ t['qamatshebrew'] = 0x05B8;
+ t['qamatsnarrowhebrew'] = 0x05B8;
+ t['qamatsqatanhebrew'] = 0x05B8;
+ t['qamatsqatannarrowhebrew'] = 0x05B8;
+ t['qamatsqatanquarterhebrew'] = 0x05B8;
+ t['qamatsqatanwidehebrew'] = 0x05B8;
+ t['qamatsquarterhebrew'] = 0x05B8;
+ t['qamatswidehebrew'] = 0x05B8;
+ t['qarneyparahebrew'] = 0x059F;
+ t['qbopomofo'] = 0x3111;
+ t['qcircle'] = 0x24E0;
+ t['qhook'] = 0x02A0;
+ t['qmonospace'] = 0xFF51;
+ t['qof'] = 0x05E7;
+ t['qofdagesh'] = 0xFB47;
+ t['qofdageshhebrew'] = 0xFB47;
+ t['qofhebrew'] = 0x05E7;
+ t['qparen'] = 0x24AC;
+ t['quarternote'] = 0x2669;
+ t['qubuts'] = 0x05BB;
+ t['qubuts18'] = 0x05BB;
+ t['qubuts25'] = 0x05BB;
+ t['qubuts31'] = 0x05BB;
+ t['qubutshebrew'] = 0x05BB;
+ t['qubutsnarrowhebrew'] = 0x05BB;
+ t['qubutsquarterhebrew'] = 0x05BB;
+ t['qubutswidehebrew'] = 0x05BB;
+ t['question'] = 0x003F;
+ t['questionarabic'] = 0x061F;
+ t['questionarmenian'] = 0x055E;
+ t['questiondown'] = 0x00BF;
+ t['questiondownsmall'] = 0xF7BF;
+ t['questiongreek'] = 0x037E;
+ t['questionmonospace'] = 0xFF1F;
+ t['questionsmall'] = 0xF73F;
+ t['quotedbl'] = 0x0022;
+ t['quotedblbase'] = 0x201E;
+ t['quotedblleft'] = 0x201C;
+ t['quotedblmonospace'] = 0xFF02;
+ t['quotedblprime'] = 0x301E;
+ t['quotedblprimereversed'] = 0x301D;
+ t['quotedblright'] = 0x201D;
+ t['quoteleft'] = 0x2018;
+ t['quoteleftreversed'] = 0x201B;
+ t['quotereversed'] = 0x201B;
+ t['quoteright'] = 0x2019;
+ t['quoterightn'] = 0x0149;
+ t['quotesinglbase'] = 0x201A;
+ t['quotesingle'] = 0x0027;
+ t['quotesinglemonospace'] = 0xFF07;
+ t['r'] = 0x0072;
+ t['raarmenian'] = 0x057C;
+ t['rabengali'] = 0x09B0;
+ t['racute'] = 0x0155;
+ t['radeva'] = 0x0930;
+ t['radical'] = 0x221A;
+ t['radicalex'] = 0xF8E5;
+ t['radoverssquare'] = 0x33AE;
+ t['radoverssquaredsquare'] = 0x33AF;
+ t['radsquare'] = 0x33AD;
+ t['rafe'] = 0x05BF;
+ t['rafehebrew'] = 0x05BF;
+ t['ragujarati'] = 0x0AB0;
+ t['ragurmukhi'] = 0x0A30;
+ t['rahiragana'] = 0x3089;
+ t['rakatakana'] = 0x30E9;
+ t['rakatakanahalfwidth'] = 0xFF97;
+ t['ralowerdiagonalbengali'] = 0x09F1;
+ t['ramiddlediagonalbengali'] = 0x09F0;
+ t['ramshorn'] = 0x0264;
+ t['ratio'] = 0x2236;
+ t['rbopomofo'] = 0x3116;
+ t['rcaron'] = 0x0159;
+ t['rcedilla'] = 0x0157;
+ t['rcircle'] = 0x24E1;
+ t['rcommaaccent'] = 0x0157;
+ t['rdblgrave'] = 0x0211;
+ t['rdotaccent'] = 0x1E59;
+ t['rdotbelow'] = 0x1E5B;
+ t['rdotbelowmacron'] = 0x1E5D;
+ t['referencemark'] = 0x203B;
+ t['reflexsubset'] = 0x2286;
+ t['reflexsuperset'] = 0x2287;
+ t['registered'] = 0x00AE;
+ t['registersans'] = 0xF8E8;
+ t['registerserif'] = 0xF6DA;
+ t['reharabic'] = 0x0631;
+ t['reharmenian'] = 0x0580;
+ t['rehfinalarabic'] = 0xFEAE;
+ t['rehiragana'] = 0x308C;
+ t['rekatakana'] = 0x30EC;
+ t['rekatakanahalfwidth'] = 0xFF9A;
+ t['resh'] = 0x05E8;
+ t['reshdageshhebrew'] = 0xFB48;
+ t['reshhebrew'] = 0x05E8;
+ t['reversedtilde'] = 0x223D;
+ t['reviahebrew'] = 0x0597;
+ t['reviamugrashhebrew'] = 0x0597;
+ t['revlogicalnot'] = 0x2310;
+ t['rfishhook'] = 0x027E;
+ t['rfishhookreversed'] = 0x027F;
+ t['rhabengali'] = 0x09DD;
+ t['rhadeva'] = 0x095D;
+ t['rho'] = 0x03C1;
+ t['rhook'] = 0x027D;
+ t['rhookturned'] = 0x027B;
+ t['rhookturnedsuperior'] = 0x02B5;
+ t['rhosymbolgreek'] = 0x03F1;
+ t['rhotichookmod'] = 0x02DE;
+ t['rieulacirclekorean'] = 0x3271;
+ t['rieulaparenkorean'] = 0x3211;
+ t['rieulcirclekorean'] = 0x3263;
+ t['rieulhieuhkorean'] = 0x3140;
+ t['rieulkiyeokkorean'] = 0x313A;
+ t['rieulkiyeoksioskorean'] = 0x3169;
+ t['rieulkorean'] = 0x3139;
+ t['rieulmieumkorean'] = 0x313B;
+ t['rieulpansioskorean'] = 0x316C;
+ t['rieulparenkorean'] = 0x3203;
+ t['rieulphieuphkorean'] = 0x313F;
+ t['rieulpieupkorean'] = 0x313C;
+ t['rieulpieupsioskorean'] = 0x316B;
+ t['rieulsioskorean'] = 0x313D;
+ t['rieulthieuthkorean'] = 0x313E;
+ t['rieultikeutkorean'] = 0x316A;
+ t['rieulyeorinhieuhkorean'] = 0x316D;
+ t['rightangle'] = 0x221F;
+ t['righttackbelowcmb'] = 0x0319;
+ t['righttriangle'] = 0x22BF;
+ t['rihiragana'] = 0x308A;
+ t['rikatakana'] = 0x30EA;
+ t['rikatakanahalfwidth'] = 0xFF98;
+ t['ring'] = 0x02DA;
+ t['ringbelowcmb'] = 0x0325;
+ t['ringcmb'] = 0x030A;
+ t['ringhalfleft'] = 0x02BF;
+ t['ringhalfleftarmenian'] = 0x0559;
+ t['ringhalfleftbelowcmb'] = 0x031C;
+ t['ringhalfleftcentered'] = 0x02D3;
+ t['ringhalfright'] = 0x02BE;
+ t['ringhalfrightbelowcmb'] = 0x0339;
+ t['ringhalfrightcentered'] = 0x02D2;
+ t['rinvertedbreve'] = 0x0213;
+ t['rittorusquare'] = 0x3351;
+ t['rlinebelow'] = 0x1E5F;
+ t['rlongleg'] = 0x027C;
+ t['rlonglegturned'] = 0x027A;
+ t['rmonospace'] = 0xFF52;
+ t['rohiragana'] = 0x308D;
+ t['rokatakana'] = 0x30ED;
+ t['rokatakanahalfwidth'] = 0xFF9B;
+ t['roruathai'] = 0x0E23;
+ t['rparen'] = 0x24AD;
+ t['rrabengali'] = 0x09DC;
+ t['rradeva'] = 0x0931;
+ t['rragurmukhi'] = 0x0A5C;
+ t['rreharabic'] = 0x0691;
+ t['rrehfinalarabic'] = 0xFB8D;
+ t['rrvocalicbengali'] = 0x09E0;
+ t['rrvocalicdeva'] = 0x0960;
+ t['rrvocalicgujarati'] = 0x0AE0;
+ t['rrvocalicvowelsignbengali'] = 0x09C4;
+ t['rrvocalicvowelsigndeva'] = 0x0944;
+ t['rrvocalicvowelsigngujarati'] = 0x0AC4;
+ t['rsuperior'] = 0xF6F1;
+ t['rtblock'] = 0x2590;
+ t['rturned'] = 0x0279;
+ t['rturnedsuperior'] = 0x02B4;
+ t['ruhiragana'] = 0x308B;
+ t['rukatakana'] = 0x30EB;
+ t['rukatakanahalfwidth'] = 0xFF99;
+ t['rupeemarkbengali'] = 0x09F2;
+ t['rupeesignbengali'] = 0x09F3;
+ t['rupiah'] = 0xF6DD;
+ t['ruthai'] = 0x0E24;
+ t['rvocalicbengali'] = 0x098B;
+ t['rvocalicdeva'] = 0x090B;
+ t['rvocalicgujarati'] = 0x0A8B;
+ t['rvocalicvowelsignbengali'] = 0x09C3;
+ t['rvocalicvowelsigndeva'] = 0x0943;
+ t['rvocalicvowelsigngujarati'] = 0x0AC3;
+ t['s'] = 0x0073;
+ t['sabengali'] = 0x09B8;
+ t['sacute'] = 0x015B;
+ t['sacutedotaccent'] = 0x1E65;
+ t['sadarabic'] = 0x0635;
+ t['sadeva'] = 0x0938;
+ t['sadfinalarabic'] = 0xFEBA;
+ t['sadinitialarabic'] = 0xFEBB;
+ t['sadmedialarabic'] = 0xFEBC;
+ t['sagujarati'] = 0x0AB8;
+ t['sagurmukhi'] = 0x0A38;
+ t['sahiragana'] = 0x3055;
+ t['sakatakana'] = 0x30B5;
+ t['sakatakanahalfwidth'] = 0xFF7B;
+ t['sallallahoualayhewasallamarabic'] = 0xFDFA;
+ t['samekh'] = 0x05E1;
+ t['samekhdagesh'] = 0xFB41;
+ t['samekhdageshhebrew'] = 0xFB41;
+ t['samekhhebrew'] = 0x05E1;
+ t['saraaathai'] = 0x0E32;
+ t['saraaethai'] = 0x0E41;
+ t['saraaimaimalaithai'] = 0x0E44;
+ t['saraaimaimuanthai'] = 0x0E43;
+ t['saraamthai'] = 0x0E33;
+ t['saraathai'] = 0x0E30;
+ t['saraethai'] = 0x0E40;
+ t['saraiileftthai'] = 0xF886;
+ t['saraiithai'] = 0x0E35;
+ t['saraileftthai'] = 0xF885;
+ t['saraithai'] = 0x0E34;
+ t['saraothai'] = 0x0E42;
+ t['saraueeleftthai'] = 0xF888;
+ t['saraueethai'] = 0x0E37;
+ t['saraueleftthai'] = 0xF887;
+ t['sarauethai'] = 0x0E36;
+ t['sarauthai'] = 0x0E38;
+ t['sarauuthai'] = 0x0E39;
+ t['sbopomofo'] = 0x3119;
+ t['scaron'] = 0x0161;
+ t['scarondotaccent'] = 0x1E67;
+ t['scedilla'] = 0x015F;
+ t['schwa'] = 0x0259;
+ t['schwacyrillic'] = 0x04D9;
+ t['schwadieresiscyrillic'] = 0x04DB;
+ t['schwahook'] = 0x025A;
+ t['scircle'] = 0x24E2;
+ t['scircumflex'] = 0x015D;
+ t['scommaaccent'] = 0x0219;
+ t['sdotaccent'] = 0x1E61;
+ t['sdotbelow'] = 0x1E63;
+ t['sdotbelowdotaccent'] = 0x1E69;
+ t['seagullbelowcmb'] = 0x033C;
+ t['second'] = 0x2033;
+ t['secondtonechinese'] = 0x02CA;
+ t['section'] = 0x00A7;
+ t['seenarabic'] = 0x0633;
+ t['seenfinalarabic'] = 0xFEB2;
+ t['seeninitialarabic'] = 0xFEB3;
+ t['seenmedialarabic'] = 0xFEB4;
+ t['segol'] = 0x05B6;
+ t['segol13'] = 0x05B6;
+ t['segol1f'] = 0x05B6;
+ t['segol2c'] = 0x05B6;
+ t['segolhebrew'] = 0x05B6;
+ t['segolnarrowhebrew'] = 0x05B6;
+ t['segolquarterhebrew'] = 0x05B6;
+ t['segoltahebrew'] = 0x0592;
+ t['segolwidehebrew'] = 0x05B6;
+ t['seharmenian'] = 0x057D;
+ t['sehiragana'] = 0x305B;
+ t['sekatakana'] = 0x30BB;
+ t['sekatakanahalfwidth'] = 0xFF7E;
+ t['semicolon'] = 0x003B;
+ t['semicolonarabic'] = 0x061B;
+ t['semicolonmonospace'] = 0xFF1B;
+ t['semicolonsmall'] = 0xFE54;
+ t['semivoicedmarkkana'] = 0x309C;
+ t['semivoicedmarkkanahalfwidth'] = 0xFF9F;
+ t['sentisquare'] = 0x3322;
+ t['sentosquare'] = 0x3323;
+ t['seven'] = 0x0037;
+ t['sevenarabic'] = 0x0667;
+ t['sevenbengali'] = 0x09ED;
+ t['sevencircle'] = 0x2466;
+ t['sevencircleinversesansserif'] = 0x2790;
+ t['sevendeva'] = 0x096D;
+ t['seveneighths'] = 0x215E;
+ t['sevengujarati'] = 0x0AED;
+ t['sevengurmukhi'] = 0x0A6D;
+ t['sevenhackarabic'] = 0x0667;
+ t['sevenhangzhou'] = 0x3027;
+ t['sevenideographicparen'] = 0x3226;
+ t['seveninferior'] = 0x2087;
+ t['sevenmonospace'] = 0xFF17;
+ t['sevenoldstyle'] = 0xF737;
+ t['sevenparen'] = 0x247A;
+ t['sevenperiod'] = 0x248E;
+ t['sevenpersian'] = 0x06F7;
+ t['sevenroman'] = 0x2176;
+ t['sevensuperior'] = 0x2077;
+ t['seventeencircle'] = 0x2470;
+ t['seventeenparen'] = 0x2484;
+ t['seventeenperiod'] = 0x2498;
+ t['seventhai'] = 0x0E57;
+ t['sfthyphen'] = 0x00AD;
+ t['shaarmenian'] = 0x0577;
+ t['shabengali'] = 0x09B6;
+ t['shacyrillic'] = 0x0448;
+ t['shaddaarabic'] = 0x0651;
+ t['shaddadammaarabic'] = 0xFC61;
+ t['shaddadammatanarabic'] = 0xFC5E;
+ t['shaddafathaarabic'] = 0xFC60;
+ t['shaddakasraarabic'] = 0xFC62;
+ t['shaddakasratanarabic'] = 0xFC5F;
+ t['shade'] = 0x2592;
+ t['shadedark'] = 0x2593;
+ t['shadelight'] = 0x2591;
+ t['shademedium'] = 0x2592;
+ t['shadeva'] = 0x0936;
+ t['shagujarati'] = 0x0AB6;
+ t['shagurmukhi'] = 0x0A36;
+ t['shalshelethebrew'] = 0x0593;
+ t['shbopomofo'] = 0x3115;
+ t['shchacyrillic'] = 0x0449;
+ t['sheenarabic'] = 0x0634;
+ t['sheenfinalarabic'] = 0xFEB6;
+ t['sheeninitialarabic'] = 0xFEB7;
+ t['sheenmedialarabic'] = 0xFEB8;
+ t['sheicoptic'] = 0x03E3;
+ t['sheqel'] = 0x20AA;
+ t['sheqelhebrew'] = 0x20AA;
+ t['sheva'] = 0x05B0;
+ t['sheva115'] = 0x05B0;
+ t['sheva15'] = 0x05B0;
+ t['sheva22'] = 0x05B0;
+ t['sheva2e'] = 0x05B0;
+ t['shevahebrew'] = 0x05B0;
+ t['shevanarrowhebrew'] = 0x05B0;
+ t['shevaquarterhebrew'] = 0x05B0;
+ t['shevawidehebrew'] = 0x05B0;
+ t['shhacyrillic'] = 0x04BB;
+ t['shimacoptic'] = 0x03ED;
+ t['shin'] = 0x05E9;
+ t['shindagesh'] = 0xFB49;
+ t['shindageshhebrew'] = 0xFB49;
+ t['shindageshshindot'] = 0xFB2C;
+ t['shindageshshindothebrew'] = 0xFB2C;
+ t['shindageshsindot'] = 0xFB2D;
+ t['shindageshsindothebrew'] = 0xFB2D;
+ t['shindothebrew'] = 0x05C1;
+ t['shinhebrew'] = 0x05E9;
+ t['shinshindot'] = 0xFB2A;
+ t['shinshindothebrew'] = 0xFB2A;
+ t['shinsindot'] = 0xFB2B;
+ t['shinsindothebrew'] = 0xFB2B;
+ t['shook'] = 0x0282;
+ t['sigma'] = 0x03C3;
+ t['sigma1'] = 0x03C2;
+ t['sigmafinal'] = 0x03C2;
+ t['sigmalunatesymbolgreek'] = 0x03F2;
+ t['sihiragana'] = 0x3057;
+ t['sikatakana'] = 0x30B7;
+ t['sikatakanahalfwidth'] = 0xFF7C;
+ t['siluqhebrew'] = 0x05BD;
+ t['siluqlefthebrew'] = 0x05BD;
+ t['similar'] = 0x223C;
+ t['sindothebrew'] = 0x05C2;
+ t['siosacirclekorean'] = 0x3274;
+ t['siosaparenkorean'] = 0x3214;
+ t['sioscieuckorean'] = 0x317E;
+ t['sioscirclekorean'] = 0x3266;
+ t['sioskiyeokkorean'] = 0x317A;
+ t['sioskorean'] = 0x3145;
+ t['siosnieunkorean'] = 0x317B;
+ t['siosparenkorean'] = 0x3206;
+ t['siospieupkorean'] = 0x317D;
+ t['siostikeutkorean'] = 0x317C;
+ t['six'] = 0x0036;
+ t['sixarabic'] = 0x0666;
+ t['sixbengali'] = 0x09EC;
+ t['sixcircle'] = 0x2465;
+ t['sixcircleinversesansserif'] = 0x278F;
+ t['sixdeva'] = 0x096C;
+ t['sixgujarati'] = 0x0AEC;
+ t['sixgurmukhi'] = 0x0A6C;
+ t['sixhackarabic'] = 0x0666;
+ t['sixhangzhou'] = 0x3026;
+ t['sixideographicparen'] = 0x3225;
+ t['sixinferior'] = 0x2086;
+ t['sixmonospace'] = 0xFF16;
+ t['sixoldstyle'] = 0xF736;
+ t['sixparen'] = 0x2479;
+ t['sixperiod'] = 0x248D;
+ t['sixpersian'] = 0x06F6;
+ t['sixroman'] = 0x2175;
+ t['sixsuperior'] = 0x2076;
+ t['sixteencircle'] = 0x246F;
+ t['sixteencurrencydenominatorbengali'] = 0x09F9;
+ t['sixteenparen'] = 0x2483;
+ t['sixteenperiod'] = 0x2497;
+ t['sixthai'] = 0x0E56;
+ t['slash'] = 0x002F;
+ t['slashmonospace'] = 0xFF0F;
+ t['slong'] = 0x017F;
+ t['slongdotaccent'] = 0x1E9B;
+ t['smileface'] = 0x263A;
+ t['smonospace'] = 0xFF53;
+ t['sofpasuqhebrew'] = 0x05C3;
+ t['softhyphen'] = 0x00AD;
+ t['softsigncyrillic'] = 0x044C;
+ t['sohiragana'] = 0x305D;
+ t['sokatakana'] = 0x30BD;
+ t['sokatakanahalfwidth'] = 0xFF7F;
+ t['soliduslongoverlaycmb'] = 0x0338;
+ t['solidusshortoverlaycmb'] = 0x0337;
+ t['sorusithai'] = 0x0E29;
+ t['sosalathai'] = 0x0E28;
+ t['sosothai'] = 0x0E0B;
+ t['sosuathai'] = 0x0E2A;
+ t['space'] = 0x0020;
+ t['spacehackarabic'] = 0x0020;
+ t['spade'] = 0x2660;
+ t['spadesuitblack'] = 0x2660;
+ t['spadesuitwhite'] = 0x2664;
+ t['sparen'] = 0x24AE;
+ t['squarebelowcmb'] = 0x033B;
+ t['squarecc'] = 0x33C4;
+ t['squarecm'] = 0x339D;
+ t['squarediagonalcrosshatchfill'] = 0x25A9;
+ t['squarehorizontalfill'] = 0x25A4;
+ t['squarekg'] = 0x338F;
+ t['squarekm'] = 0x339E;
+ t['squarekmcapital'] = 0x33CE;
+ t['squareln'] = 0x33D1;
+ t['squarelog'] = 0x33D2;
+ t['squaremg'] = 0x338E;
+ t['squaremil'] = 0x33D5;
+ t['squaremm'] = 0x339C;
+ t['squaremsquared'] = 0x33A1;
+ t['squareorthogonalcrosshatchfill'] = 0x25A6;
+ t['squareupperlefttolowerrightfill'] = 0x25A7;
+ t['squareupperrighttolowerleftfill'] = 0x25A8;
+ t['squareverticalfill'] = 0x25A5;
+ t['squarewhitewithsmallblack'] = 0x25A3;
+ t['srsquare'] = 0x33DB;
+ t['ssabengali'] = 0x09B7;
+ t['ssadeva'] = 0x0937;
+ t['ssagujarati'] = 0x0AB7;
+ t['ssangcieuckorean'] = 0x3149;
+ t['ssanghieuhkorean'] = 0x3185;
+ t['ssangieungkorean'] = 0x3180;
+ t['ssangkiyeokkorean'] = 0x3132;
+ t['ssangnieunkorean'] = 0x3165;
+ t['ssangpieupkorean'] = 0x3143;
+ t['ssangsioskorean'] = 0x3146;
+ t['ssangtikeutkorean'] = 0x3138;
+ t['ssuperior'] = 0xF6F2;
+ t['sterling'] = 0x00A3;
+ t['sterlingmonospace'] = 0xFFE1;
+ t['strokelongoverlaycmb'] = 0x0336;
+ t['strokeshortoverlaycmb'] = 0x0335;
+ t['subset'] = 0x2282;
+ t['subsetnotequal'] = 0x228A;
+ t['subsetorequal'] = 0x2286;
+ t['succeeds'] = 0x227B;
+ t['suchthat'] = 0x220B;
+ t['suhiragana'] = 0x3059;
+ t['sukatakana'] = 0x30B9;
+ t['sukatakanahalfwidth'] = 0xFF7D;
+ t['sukunarabic'] = 0x0652;
+ t['summation'] = 0x2211;
+ t['sun'] = 0x263C;
+ t['superset'] = 0x2283;
+ t['supersetnotequal'] = 0x228B;
+ t['supersetorequal'] = 0x2287;
+ t['svsquare'] = 0x33DC;
+ t['syouwaerasquare'] = 0x337C;
+ t['t'] = 0x0074;
+ t['tabengali'] = 0x09A4;
+ t['tackdown'] = 0x22A4;
+ t['tackleft'] = 0x22A3;
+ t['tadeva'] = 0x0924;
+ t['tagujarati'] = 0x0AA4;
+ t['tagurmukhi'] = 0x0A24;
+ t['taharabic'] = 0x0637;
+ t['tahfinalarabic'] = 0xFEC2;
+ t['tahinitialarabic'] = 0xFEC3;
+ t['tahiragana'] = 0x305F;
+ t['tahmedialarabic'] = 0xFEC4;
+ t['taisyouerasquare'] = 0x337D;
+ t['takatakana'] = 0x30BF;
+ t['takatakanahalfwidth'] = 0xFF80;
+ t['tatweelarabic'] = 0x0640;
+ t['tau'] = 0x03C4;
+ t['tav'] = 0x05EA;
+ t['tavdages'] = 0xFB4A;
+ t['tavdagesh'] = 0xFB4A;
+ t['tavdageshhebrew'] = 0xFB4A;
+ t['tavhebrew'] = 0x05EA;
+ t['tbar'] = 0x0167;
+ t['tbopomofo'] = 0x310A;
+ t['tcaron'] = 0x0165;
+ t['tccurl'] = 0x02A8;
+ t['tcedilla'] = 0x0163;
+ t['tcheharabic'] = 0x0686;
+ t['tchehfinalarabic'] = 0xFB7B;
+ t['tchehinitialarabic'] = 0xFB7C;
+ t['tchehmedialarabic'] = 0xFB7D;
+ t['tcircle'] = 0x24E3;
+ t['tcircumflexbelow'] = 0x1E71;
+ t['tcommaaccent'] = 0x0163;
+ t['tdieresis'] = 0x1E97;
+ t['tdotaccent'] = 0x1E6B;
+ t['tdotbelow'] = 0x1E6D;
+ t['tecyrillic'] = 0x0442;
+ t['tedescendercyrillic'] = 0x04AD;
+ t['teharabic'] = 0x062A;
+ t['tehfinalarabic'] = 0xFE96;
+ t['tehhahinitialarabic'] = 0xFCA2;
+ t['tehhahisolatedarabic'] = 0xFC0C;
+ t['tehinitialarabic'] = 0xFE97;
+ t['tehiragana'] = 0x3066;
+ t['tehjeeminitialarabic'] = 0xFCA1;
+ t['tehjeemisolatedarabic'] = 0xFC0B;
+ t['tehmarbutaarabic'] = 0x0629;
+ t['tehmarbutafinalarabic'] = 0xFE94;
+ t['tehmedialarabic'] = 0xFE98;
+ t['tehmeeminitialarabic'] = 0xFCA4;
+ t['tehmeemisolatedarabic'] = 0xFC0E;
+ t['tehnoonfinalarabic'] = 0xFC73;
+ t['tekatakana'] = 0x30C6;
+ t['tekatakanahalfwidth'] = 0xFF83;
+ t['telephone'] = 0x2121;
+ t['telephoneblack'] = 0x260E;
+ t['telishagedolahebrew'] = 0x05A0;
+ t['telishaqetanahebrew'] = 0x05A9;
+ t['tencircle'] = 0x2469;
+ t['tenideographicparen'] = 0x3229;
+ t['tenparen'] = 0x247D;
+ t['tenperiod'] = 0x2491;
+ t['tenroman'] = 0x2179;
+ t['tesh'] = 0x02A7;
+ t['tet'] = 0x05D8;
+ t['tetdagesh'] = 0xFB38;
+ t['tetdageshhebrew'] = 0xFB38;
+ t['tethebrew'] = 0x05D8;
+ t['tetsecyrillic'] = 0x04B5;
+ t['tevirhebrew'] = 0x059B;
+ t['tevirlefthebrew'] = 0x059B;
+ t['thabengali'] = 0x09A5;
+ t['thadeva'] = 0x0925;
+ t['thagujarati'] = 0x0AA5;
+ t['thagurmukhi'] = 0x0A25;
+ t['thalarabic'] = 0x0630;
+ t['thalfinalarabic'] = 0xFEAC;
+ t['thanthakhatlowleftthai'] = 0xF898;
+ t['thanthakhatlowrightthai'] = 0xF897;
+ t['thanthakhatthai'] = 0x0E4C;
+ t['thanthakhatupperleftthai'] = 0xF896;
+ t['theharabic'] = 0x062B;
+ t['thehfinalarabic'] = 0xFE9A;
+ t['thehinitialarabic'] = 0xFE9B;
+ t['thehmedialarabic'] = 0xFE9C;
+ t['thereexists'] = 0x2203;
+ t['therefore'] = 0x2234;
+ t['theta'] = 0x03B8;
+ t['theta1'] = 0x03D1;
+ t['thetasymbolgreek'] = 0x03D1;
+ t['thieuthacirclekorean'] = 0x3279;
+ t['thieuthaparenkorean'] = 0x3219;
+ t['thieuthcirclekorean'] = 0x326B;
+ t['thieuthkorean'] = 0x314C;
+ t['thieuthparenkorean'] = 0x320B;
+ t['thirteencircle'] = 0x246C;
+ t['thirteenparen'] = 0x2480;
+ t['thirteenperiod'] = 0x2494;
+ t['thonangmonthothai'] = 0x0E11;
+ t['thook'] = 0x01AD;
+ t['thophuthaothai'] = 0x0E12;
+ t['thorn'] = 0x00FE;
+ t['thothahanthai'] = 0x0E17;
+ t['thothanthai'] = 0x0E10;
+ t['thothongthai'] = 0x0E18;
+ t['thothungthai'] = 0x0E16;
+ t['thousandcyrillic'] = 0x0482;
+ t['thousandsseparatorarabic'] = 0x066C;
+ t['thousandsseparatorpersian'] = 0x066C;
+ t['three'] = 0x0033;
+ t['threearabic'] = 0x0663;
+ t['threebengali'] = 0x09E9;
+ t['threecircle'] = 0x2462;
+ t['threecircleinversesansserif'] = 0x278C;
+ t['threedeva'] = 0x0969;
+ t['threeeighths'] = 0x215C;
+ t['threegujarati'] = 0x0AE9;
+ t['threegurmukhi'] = 0x0A69;
+ t['threehackarabic'] = 0x0663;
+ t['threehangzhou'] = 0x3023;
+ t['threeideographicparen'] = 0x3222;
+ t['threeinferior'] = 0x2083;
+ t['threemonospace'] = 0xFF13;
+ t['threenumeratorbengali'] = 0x09F6;
+ t['threeoldstyle'] = 0xF733;
+ t['threeparen'] = 0x2476;
+ t['threeperiod'] = 0x248A;
+ t['threepersian'] = 0x06F3;
+ t['threequarters'] = 0x00BE;
+ t['threequartersemdash'] = 0xF6DE;
+ t['threeroman'] = 0x2172;
+ t['threesuperior'] = 0x00B3;
+ t['threethai'] = 0x0E53;
+ t['thzsquare'] = 0x3394;
+ t['tihiragana'] = 0x3061;
+ t['tikatakana'] = 0x30C1;
+ t['tikatakanahalfwidth'] = 0xFF81;
+ t['tikeutacirclekorean'] = 0x3270;
+ t['tikeutaparenkorean'] = 0x3210;
+ t['tikeutcirclekorean'] = 0x3262;
+ t['tikeutkorean'] = 0x3137;
+ t['tikeutparenkorean'] = 0x3202;
+ t['tilde'] = 0x02DC;
+ t['tildebelowcmb'] = 0x0330;
+ t['tildecmb'] = 0x0303;
+ t['tildecomb'] = 0x0303;
+ t['tildedoublecmb'] = 0x0360;
+ t['tildeoperator'] = 0x223C;
+ t['tildeoverlaycmb'] = 0x0334;
+ t['tildeverticalcmb'] = 0x033E;
+ t['timescircle'] = 0x2297;
+ t['tipehahebrew'] = 0x0596;
+ t['tipehalefthebrew'] = 0x0596;
+ t['tippigurmukhi'] = 0x0A70;
+ t['titlocyrilliccmb'] = 0x0483;
+ t['tiwnarmenian'] = 0x057F;
+ t['tlinebelow'] = 0x1E6F;
+ t['tmonospace'] = 0xFF54;
+ t['toarmenian'] = 0x0569;
+ t['tohiragana'] = 0x3068;
+ t['tokatakana'] = 0x30C8;
+ t['tokatakanahalfwidth'] = 0xFF84;
+ t['tonebarextrahighmod'] = 0x02E5;
+ t['tonebarextralowmod'] = 0x02E9;
+ t['tonebarhighmod'] = 0x02E6;
+ t['tonebarlowmod'] = 0x02E8;
+ t['tonebarmidmod'] = 0x02E7;
+ t['tonefive'] = 0x01BD;
+ t['tonesix'] = 0x0185;
+ t['tonetwo'] = 0x01A8;
+ t['tonos'] = 0x0384;
+ t['tonsquare'] = 0x3327;
+ t['topatakthai'] = 0x0E0F;
+ t['tortoiseshellbracketleft'] = 0x3014;
+ t['tortoiseshellbracketleftsmall'] = 0xFE5D;
+ t['tortoiseshellbracketleftvertical'] = 0xFE39;
+ t['tortoiseshellbracketright'] = 0x3015;
+ t['tortoiseshellbracketrightsmall'] = 0xFE5E;
+ t['tortoiseshellbracketrightvertical'] = 0xFE3A;
+ t['totaothai'] = 0x0E15;
+ t['tpalatalhook'] = 0x01AB;
+ t['tparen'] = 0x24AF;
+ t['trademark'] = 0x2122;
+ t['trademarksans'] = 0xF8EA;
+ t['trademarkserif'] = 0xF6DB;
+ t['tretroflexhook'] = 0x0288;
+ t['triagdn'] = 0x25BC;
+ t['triaglf'] = 0x25C4;
+ t['triagrt'] = 0x25BA;
+ t['triagup'] = 0x25B2;
+ t['ts'] = 0x02A6;
+ t['tsadi'] = 0x05E6;
+ t['tsadidagesh'] = 0xFB46;
+ t['tsadidageshhebrew'] = 0xFB46;
+ t['tsadihebrew'] = 0x05E6;
+ t['tsecyrillic'] = 0x0446;
+ t['tsere'] = 0x05B5;
+ t['tsere12'] = 0x05B5;
+ t['tsere1e'] = 0x05B5;
+ t['tsere2b'] = 0x05B5;
+ t['tserehebrew'] = 0x05B5;
+ t['tserenarrowhebrew'] = 0x05B5;
+ t['tserequarterhebrew'] = 0x05B5;
+ t['tserewidehebrew'] = 0x05B5;
+ t['tshecyrillic'] = 0x045B;
+ t['tsuperior'] = 0xF6F3;
+ t['ttabengali'] = 0x099F;
+ t['ttadeva'] = 0x091F;
+ t['ttagujarati'] = 0x0A9F;
+ t['ttagurmukhi'] = 0x0A1F;
+ t['tteharabic'] = 0x0679;
+ t['ttehfinalarabic'] = 0xFB67;
+ t['ttehinitialarabic'] = 0xFB68;
+ t['ttehmedialarabic'] = 0xFB69;
+ t['tthabengali'] = 0x09A0;
+ t['tthadeva'] = 0x0920;
+ t['tthagujarati'] = 0x0AA0;
+ t['tthagurmukhi'] = 0x0A20;
+ t['tturned'] = 0x0287;
+ t['tuhiragana'] = 0x3064;
+ t['tukatakana'] = 0x30C4;
+ t['tukatakanahalfwidth'] = 0xFF82;
+ t['tusmallhiragana'] = 0x3063;
+ t['tusmallkatakana'] = 0x30C3;
+ t['tusmallkatakanahalfwidth'] = 0xFF6F;
+ t['twelvecircle'] = 0x246B;
+ t['twelveparen'] = 0x247F;
+ t['twelveperiod'] = 0x2493;
+ t['twelveroman'] = 0x217B;
+ t['twentycircle'] = 0x2473;
+ t['twentyhangzhou'] = 0x5344;
+ t['twentyparen'] = 0x2487;
+ t['twentyperiod'] = 0x249B;
+ t['two'] = 0x0032;
+ t['twoarabic'] = 0x0662;
+ t['twobengali'] = 0x09E8;
+ t['twocircle'] = 0x2461;
+ t['twocircleinversesansserif'] = 0x278B;
+ t['twodeva'] = 0x0968;
+ t['twodotenleader'] = 0x2025;
+ t['twodotleader'] = 0x2025;
+ t['twodotleadervertical'] = 0xFE30;
+ t['twogujarati'] = 0x0AE8;
+ t['twogurmukhi'] = 0x0A68;
+ t['twohackarabic'] = 0x0662;
+ t['twohangzhou'] = 0x3022;
+ t['twoideographicparen'] = 0x3221;
+ t['twoinferior'] = 0x2082;
+ t['twomonospace'] = 0xFF12;
+ t['twonumeratorbengali'] = 0x09F5;
+ t['twooldstyle'] = 0xF732;
+ t['twoparen'] = 0x2475;
+ t['twoperiod'] = 0x2489;
+ t['twopersian'] = 0x06F2;
+ t['tworoman'] = 0x2171;
+ t['twostroke'] = 0x01BB;
+ t['twosuperior'] = 0x00B2;
+ t['twothai'] = 0x0E52;
+ t['twothirds'] = 0x2154;
+ t['u'] = 0x0075;
+ t['uacute'] = 0x00FA;
+ t['ubar'] = 0x0289;
+ t['ubengali'] = 0x0989;
+ t['ubopomofo'] = 0x3128;
+ t['ubreve'] = 0x016D;
+ t['ucaron'] = 0x01D4;
+ t['ucircle'] = 0x24E4;
+ t['ucircumflex'] = 0x00FB;
+ t['ucircumflexbelow'] = 0x1E77;
+ t['ucyrillic'] = 0x0443;
+ t['udattadeva'] = 0x0951;
+ t['udblacute'] = 0x0171;
+ t['udblgrave'] = 0x0215;
+ t['udeva'] = 0x0909;
+ t['udieresis'] = 0x00FC;
+ t['udieresisacute'] = 0x01D8;
+ t['udieresisbelow'] = 0x1E73;
+ t['udieresiscaron'] = 0x01DA;
+ t['udieresiscyrillic'] = 0x04F1;
+ t['udieresisgrave'] = 0x01DC;
+ t['udieresismacron'] = 0x01D6;
+ t['udotbelow'] = 0x1EE5;
+ t['ugrave'] = 0x00F9;
+ t['ugujarati'] = 0x0A89;
+ t['ugurmukhi'] = 0x0A09;
+ t['uhiragana'] = 0x3046;
+ t['uhookabove'] = 0x1EE7;
+ t['uhorn'] = 0x01B0;
+ t['uhornacute'] = 0x1EE9;
+ t['uhorndotbelow'] = 0x1EF1;
+ t['uhorngrave'] = 0x1EEB;
+ t['uhornhookabove'] = 0x1EED;
+ t['uhorntilde'] = 0x1EEF;
+ t['uhungarumlaut'] = 0x0171;
+ t['uhungarumlautcyrillic'] = 0x04F3;
+ t['uinvertedbreve'] = 0x0217;
+ t['ukatakana'] = 0x30A6;
+ t['ukatakanahalfwidth'] = 0xFF73;
+ t['ukcyrillic'] = 0x0479;
+ t['ukorean'] = 0x315C;
+ t['umacron'] = 0x016B;
+ t['umacroncyrillic'] = 0x04EF;
+ t['umacrondieresis'] = 0x1E7B;
+ t['umatragurmukhi'] = 0x0A41;
+ t['umonospace'] = 0xFF55;
+ t['underscore'] = 0x005F;
+ t['underscoredbl'] = 0x2017;
+ t['underscoremonospace'] = 0xFF3F;
+ t['underscorevertical'] = 0xFE33;
+ t['underscorewavy'] = 0xFE4F;
+ t['union'] = 0x222A;
+ t['universal'] = 0x2200;
+ t['uogonek'] = 0x0173;
+ t['uparen'] = 0x24B0;
+ t['upblock'] = 0x2580;
+ t['upperdothebrew'] = 0x05C4;
+ t['upsilon'] = 0x03C5;
+ t['upsilondieresis'] = 0x03CB;
+ t['upsilondieresistonos'] = 0x03B0;
+ t['upsilonlatin'] = 0x028A;
+ t['upsilontonos'] = 0x03CD;
+ t['uptackbelowcmb'] = 0x031D;
+ t['uptackmod'] = 0x02D4;
+ t['uragurmukhi'] = 0x0A73;
+ t['uring'] = 0x016F;
+ t['ushortcyrillic'] = 0x045E;
+ t['usmallhiragana'] = 0x3045;
+ t['usmallkatakana'] = 0x30A5;
+ t['usmallkatakanahalfwidth'] = 0xFF69;
+ t['ustraightcyrillic'] = 0x04AF;
+ t['ustraightstrokecyrillic'] = 0x04B1;
+ t['utilde'] = 0x0169;
+ t['utildeacute'] = 0x1E79;
+ t['utildebelow'] = 0x1E75;
+ t['uubengali'] = 0x098A;
+ t['uudeva'] = 0x090A;
+ t['uugujarati'] = 0x0A8A;
+ t['uugurmukhi'] = 0x0A0A;
+ t['uumatragurmukhi'] = 0x0A42;
+ t['uuvowelsignbengali'] = 0x09C2;
+ t['uuvowelsigndeva'] = 0x0942;
+ t['uuvowelsigngujarati'] = 0x0AC2;
+ t['uvowelsignbengali'] = 0x09C1;
+ t['uvowelsigndeva'] = 0x0941;
+ t['uvowelsigngujarati'] = 0x0AC1;
+ t['v'] = 0x0076;
+ t['vadeva'] = 0x0935;
+ t['vagujarati'] = 0x0AB5;
+ t['vagurmukhi'] = 0x0A35;
+ t['vakatakana'] = 0x30F7;
+ t['vav'] = 0x05D5;
+ t['vavdagesh'] = 0xFB35;
+ t['vavdagesh65'] = 0xFB35;
+ t['vavdageshhebrew'] = 0xFB35;
+ t['vavhebrew'] = 0x05D5;
+ t['vavholam'] = 0xFB4B;
+ t['vavholamhebrew'] = 0xFB4B;
+ t['vavvavhebrew'] = 0x05F0;
+ t['vavyodhebrew'] = 0x05F1;
+ t['vcircle'] = 0x24E5;
+ t['vdotbelow'] = 0x1E7F;
+ t['vecyrillic'] = 0x0432;
+ t['veharabic'] = 0x06A4;
+ t['vehfinalarabic'] = 0xFB6B;
+ t['vehinitialarabic'] = 0xFB6C;
+ t['vehmedialarabic'] = 0xFB6D;
+ t['vekatakana'] = 0x30F9;
+ t['venus'] = 0x2640;
+ t['verticalbar'] = 0x007C;
+ t['verticallineabovecmb'] = 0x030D;
+ t['verticallinebelowcmb'] = 0x0329;
+ t['verticallinelowmod'] = 0x02CC;
+ t['verticallinemod'] = 0x02C8;
+ t['vewarmenian'] = 0x057E;
+ t['vhook'] = 0x028B;
+ t['vikatakana'] = 0x30F8;
+ t['viramabengali'] = 0x09CD;
+ t['viramadeva'] = 0x094D;
+ t['viramagujarati'] = 0x0ACD;
+ t['visargabengali'] = 0x0983;
+ t['visargadeva'] = 0x0903;
+ t['visargagujarati'] = 0x0A83;
+ t['vmonospace'] = 0xFF56;
+ t['voarmenian'] = 0x0578;
+ t['voicediterationhiragana'] = 0x309E;
+ t['voicediterationkatakana'] = 0x30FE;
+ t['voicedmarkkana'] = 0x309B;
+ t['voicedmarkkanahalfwidth'] = 0xFF9E;
+ t['vokatakana'] = 0x30FA;
+ t['vparen'] = 0x24B1;
+ t['vtilde'] = 0x1E7D;
+ t['vturned'] = 0x028C;
+ t['vuhiragana'] = 0x3094;
+ t['vukatakana'] = 0x30F4;
+ t['w'] = 0x0077;
+ t['wacute'] = 0x1E83;
+ t['waekorean'] = 0x3159;
+ t['wahiragana'] = 0x308F;
+ t['wakatakana'] = 0x30EF;
+ t['wakatakanahalfwidth'] = 0xFF9C;
+ t['wakorean'] = 0x3158;
+ t['wasmallhiragana'] = 0x308E;
+ t['wasmallkatakana'] = 0x30EE;
+ t['wattosquare'] = 0x3357;
+ t['wavedash'] = 0x301C;
+ t['wavyunderscorevertical'] = 0xFE34;
+ t['wawarabic'] = 0x0648;
+ t['wawfinalarabic'] = 0xFEEE;
+ t['wawhamzaabovearabic'] = 0x0624;
+ t['wawhamzaabovefinalarabic'] = 0xFE86;
+ t['wbsquare'] = 0x33DD;
+ t['wcircle'] = 0x24E6;
+ t['wcircumflex'] = 0x0175;
+ t['wdieresis'] = 0x1E85;
+ t['wdotaccent'] = 0x1E87;
+ t['wdotbelow'] = 0x1E89;
+ t['wehiragana'] = 0x3091;
+ t['weierstrass'] = 0x2118;
+ t['wekatakana'] = 0x30F1;
+ t['wekorean'] = 0x315E;
+ t['weokorean'] = 0x315D;
+ t['wgrave'] = 0x1E81;
+ t['whitebullet'] = 0x25E6;
+ t['whitecircle'] = 0x25CB;
+ t['whitecircleinverse'] = 0x25D9;
+ t['whitecornerbracketleft'] = 0x300E;
+ t['whitecornerbracketleftvertical'] = 0xFE43;
+ t['whitecornerbracketright'] = 0x300F;
+ t['whitecornerbracketrightvertical'] = 0xFE44;
+ t['whitediamond'] = 0x25C7;
+ t['whitediamondcontainingblacksmalldiamond'] = 0x25C8;
+ t['whitedownpointingsmalltriangle'] = 0x25BF;
+ t['whitedownpointingtriangle'] = 0x25BD;
+ t['whiteleftpointingsmalltriangle'] = 0x25C3;
+ t['whiteleftpointingtriangle'] = 0x25C1;
+ t['whitelenticularbracketleft'] = 0x3016;
+ t['whitelenticularbracketright'] = 0x3017;
+ t['whiterightpointingsmalltriangle'] = 0x25B9;
+ t['whiterightpointingtriangle'] = 0x25B7;
+ t['whitesmallsquare'] = 0x25AB;
+ t['whitesmilingface'] = 0x263A;
+ t['whitesquare'] = 0x25A1;
+ t['whitestar'] = 0x2606;
+ t['whitetelephone'] = 0x260F;
+ t['whitetortoiseshellbracketleft'] = 0x3018;
+ t['whitetortoiseshellbracketright'] = 0x3019;
+ t['whiteuppointingsmalltriangle'] = 0x25B5;
+ t['whiteuppointingtriangle'] = 0x25B3;
+ t['wihiragana'] = 0x3090;
+ t['wikatakana'] = 0x30F0;
+ t['wikorean'] = 0x315F;
+ t['wmonospace'] = 0xFF57;
+ t['wohiragana'] = 0x3092;
+ t['wokatakana'] = 0x30F2;
+ t['wokatakanahalfwidth'] = 0xFF66;
+ t['won'] = 0x20A9;
+ t['wonmonospace'] = 0xFFE6;
+ t['wowaenthai'] = 0x0E27;
+ t['wparen'] = 0x24B2;
+ t['wring'] = 0x1E98;
+ t['wsuperior'] = 0x02B7;
+ t['wturned'] = 0x028D;
+ t['wynn'] = 0x01BF;
+ t['x'] = 0x0078;
+ t['xabovecmb'] = 0x033D;
+ t['xbopomofo'] = 0x3112;
+ t['xcircle'] = 0x24E7;
+ t['xdieresis'] = 0x1E8D;
+ t['xdotaccent'] = 0x1E8B;
+ t['xeharmenian'] = 0x056D;
+ t['xi'] = 0x03BE;
+ t['xmonospace'] = 0xFF58;
+ t['xparen'] = 0x24B3;
+ t['xsuperior'] = 0x02E3;
+ t['y'] = 0x0079;
+ t['yaadosquare'] = 0x334E;
+ t['yabengali'] = 0x09AF;
+ t['yacute'] = 0x00FD;
+ t['yadeva'] = 0x092F;
+ t['yaekorean'] = 0x3152;
+ t['yagujarati'] = 0x0AAF;
+ t['yagurmukhi'] = 0x0A2F;
+ t['yahiragana'] = 0x3084;
+ t['yakatakana'] = 0x30E4;
+ t['yakatakanahalfwidth'] = 0xFF94;
+ t['yakorean'] = 0x3151;
+ t['yamakkanthai'] = 0x0E4E;
+ t['yasmallhiragana'] = 0x3083;
+ t['yasmallkatakana'] = 0x30E3;
+ t['yasmallkatakanahalfwidth'] = 0xFF6C;
+ t['yatcyrillic'] = 0x0463;
+ t['ycircle'] = 0x24E8;
+ t['ycircumflex'] = 0x0177;
+ t['ydieresis'] = 0x00FF;
+ t['ydotaccent'] = 0x1E8F;
+ t['ydotbelow'] = 0x1EF5;
+ t['yeharabic'] = 0x064A;
+ t['yehbarreearabic'] = 0x06D2;
+ t['yehbarreefinalarabic'] = 0xFBAF;
+ t['yehfinalarabic'] = 0xFEF2;
+ t['yehhamzaabovearabic'] = 0x0626;
+ t['yehhamzaabovefinalarabic'] = 0xFE8A;
+ t['yehhamzaaboveinitialarabic'] = 0xFE8B;
+ t['yehhamzaabovemedialarabic'] = 0xFE8C;
+ t['yehinitialarabic'] = 0xFEF3;
+ t['yehmedialarabic'] = 0xFEF4;
+ t['yehmeeminitialarabic'] = 0xFCDD;
+ t['yehmeemisolatedarabic'] = 0xFC58;
+ t['yehnoonfinalarabic'] = 0xFC94;
+ t['yehthreedotsbelowarabic'] = 0x06D1;
+ t['yekorean'] = 0x3156;
+ t['yen'] = 0x00A5;
+ t['yenmonospace'] = 0xFFE5;
+ t['yeokorean'] = 0x3155;
+ t['yeorinhieuhkorean'] = 0x3186;
+ t['yerahbenyomohebrew'] = 0x05AA;
+ t['yerahbenyomolefthebrew'] = 0x05AA;
+ t['yericyrillic'] = 0x044B;
+ t['yerudieresiscyrillic'] = 0x04F9;
+ t['yesieungkorean'] = 0x3181;
+ t['yesieungpansioskorean'] = 0x3183;
+ t['yesieungsioskorean'] = 0x3182;
+ t['yetivhebrew'] = 0x059A;
+ t['ygrave'] = 0x1EF3;
+ t['yhook'] = 0x01B4;
+ t['yhookabove'] = 0x1EF7;
+ t['yiarmenian'] = 0x0575;
+ t['yicyrillic'] = 0x0457;
+ t['yikorean'] = 0x3162;
+ t['yinyang'] = 0x262F;
+ t['yiwnarmenian'] = 0x0582;
+ t['ymonospace'] = 0xFF59;
+ t['yod'] = 0x05D9;
+ t['yoddagesh'] = 0xFB39;
+ t['yoddageshhebrew'] = 0xFB39;
+ t['yodhebrew'] = 0x05D9;
+ t['yodyodhebrew'] = 0x05F2;
+ t['yodyodpatahhebrew'] = 0xFB1F;
+ t['yohiragana'] = 0x3088;
+ t['yoikorean'] = 0x3189;
+ t['yokatakana'] = 0x30E8;
+ t['yokatakanahalfwidth'] = 0xFF96;
+ t['yokorean'] = 0x315B;
+ t['yosmallhiragana'] = 0x3087;
+ t['yosmallkatakana'] = 0x30E7;
+ t['yosmallkatakanahalfwidth'] = 0xFF6E;
+ t['yotgreek'] = 0x03F3;
+ t['yoyaekorean'] = 0x3188;
+ t['yoyakorean'] = 0x3187;
+ t['yoyakthai'] = 0x0E22;
+ t['yoyingthai'] = 0x0E0D;
+ t['yparen'] = 0x24B4;
+ t['ypogegrammeni'] = 0x037A;
+ t['ypogegrammenigreekcmb'] = 0x0345;
+ t['yr'] = 0x01A6;
+ t['yring'] = 0x1E99;
+ t['ysuperior'] = 0x02B8;
+ t['ytilde'] = 0x1EF9;
+ t['yturned'] = 0x028E;
+ t['yuhiragana'] = 0x3086;
+ t['yuikorean'] = 0x318C;
+ t['yukatakana'] = 0x30E6;
+ t['yukatakanahalfwidth'] = 0xFF95;
+ t['yukorean'] = 0x3160;
+ t['yusbigcyrillic'] = 0x046B;
+ t['yusbigiotifiedcyrillic'] = 0x046D;
+ t['yuslittlecyrillic'] = 0x0467;
+ t['yuslittleiotifiedcyrillic'] = 0x0469;
+ t['yusmallhiragana'] = 0x3085;
+ t['yusmallkatakana'] = 0x30E5;
+ t['yusmallkatakanahalfwidth'] = 0xFF6D;
+ t['yuyekorean'] = 0x318B;
+ t['yuyeokorean'] = 0x318A;
+ t['yyabengali'] = 0x09DF;
+ t['yyadeva'] = 0x095F;
+ t['z'] = 0x007A;
+ t['zaarmenian'] = 0x0566;
+ t['zacute'] = 0x017A;
+ t['zadeva'] = 0x095B;
+ t['zagurmukhi'] = 0x0A5B;
+ t['zaharabic'] = 0x0638;
+ t['zahfinalarabic'] = 0xFEC6;
+ t['zahinitialarabic'] = 0xFEC7;
+ t['zahiragana'] = 0x3056;
+ t['zahmedialarabic'] = 0xFEC8;
+ t['zainarabic'] = 0x0632;
+ t['zainfinalarabic'] = 0xFEB0;
+ t['zakatakana'] = 0x30B6;
+ t['zaqefgadolhebrew'] = 0x0595;
+ t['zaqefqatanhebrew'] = 0x0594;
+ t['zarqahebrew'] = 0x0598;
+ t['zayin'] = 0x05D6;
+ t['zayindagesh'] = 0xFB36;
+ t['zayindageshhebrew'] = 0xFB36;
+ t['zayinhebrew'] = 0x05D6;
+ t['zbopomofo'] = 0x3117;
+ t['zcaron'] = 0x017E;
+ t['zcircle'] = 0x24E9;
+ t['zcircumflex'] = 0x1E91;
+ t['zcurl'] = 0x0291;
+ t['zdot'] = 0x017C;
+ t['zdotaccent'] = 0x017C;
+ t['zdotbelow'] = 0x1E93;
+ t['zecyrillic'] = 0x0437;
+ t['zedescendercyrillic'] = 0x0499;
+ t['zedieresiscyrillic'] = 0x04DF;
+ t['zehiragana'] = 0x305C;
+ t['zekatakana'] = 0x30BC;
+ t['zero'] = 0x0030;
+ t['zeroarabic'] = 0x0660;
+ t['zerobengali'] = 0x09E6;
+ t['zerodeva'] = 0x0966;
+ t['zerogujarati'] = 0x0AE6;
+ t['zerogurmukhi'] = 0x0A66;
+ t['zerohackarabic'] = 0x0660;
+ t['zeroinferior'] = 0x2080;
+ t['zeromonospace'] = 0xFF10;
+ t['zerooldstyle'] = 0xF730;
+ t['zeropersian'] = 0x06F0;
+ t['zerosuperior'] = 0x2070;
+ t['zerothai'] = 0x0E50;
+ t['zerowidthjoiner'] = 0xFEFF;
+ t['zerowidthnonjoiner'] = 0x200C;
+ t['zerowidthspace'] = 0x200B;
+ t['zeta'] = 0x03B6;
+ t['zhbopomofo'] = 0x3113;
+ t['zhearmenian'] = 0x056A;
+ t['zhebrevecyrillic'] = 0x04C2;
+ t['zhecyrillic'] = 0x0436;
+ t['zhedescendercyrillic'] = 0x0497;
+ t['zhedieresiscyrillic'] = 0x04DD;
+ t['zihiragana'] = 0x3058;
+ t['zikatakana'] = 0x30B8;
+ t['zinorhebrew'] = 0x05AE;
+ t['zlinebelow'] = 0x1E95;
+ t['zmonospace'] = 0xFF5A;
+ t['zohiragana'] = 0x305E;
+ t['zokatakana'] = 0x30BE;
+ t['zparen'] = 0x24B5;
+ t['zretroflexhook'] = 0x0290;
+ t['zstroke'] = 0x01B6;
+ t['zuhiragana'] = 0x305A;
+ t['zukatakana'] = 0x30BA;
+ t['.notdef'] = 0x0000;
+ t['angbracketleftbig'] = 0x2329;
+ t['angbracketleftBig'] = 0x2329;
+ t['angbracketleftbigg'] = 0x2329;
+ t['angbracketleftBigg'] = 0x2329;
+ t['angbracketrightBig'] = 0x232A;
+ t['angbracketrightbig'] = 0x232A;
+ t['angbracketrightBigg'] = 0x232A;
+ t['angbracketrightbigg'] = 0x232A;
+ t['arrowhookleft'] = 0x21AA;
+ t['arrowhookright'] = 0x21A9;
+ t['arrowlefttophalf'] = 0x21BC;
+ t['arrowleftbothalf'] = 0x21BD;
+ t['arrownortheast'] = 0x2197;
+ t['arrownorthwest'] = 0x2196;
+ t['arrowrighttophalf'] = 0x21C0;
+ t['arrowrightbothalf'] = 0x21C1;
+ t['arrowsoutheast'] = 0x2198;
+ t['arrowsouthwest'] = 0x2199;
+ t['backslashbig'] = 0x2216;
+ t['backslashBig'] = 0x2216;
+ t['backslashBigg'] = 0x2216;
+ t['backslashbigg'] = 0x2216;
+ t['bardbl'] = 0x2016;
+ t['bracehtipdownleft'] = 0xFE37;
+ t['bracehtipdownright'] = 0xFE37;
+ t['bracehtipupleft'] = 0xFE38;
+ t['bracehtipupright'] = 0xFE38;
+ t['braceleftBig'] = 0x007B;
+ t['braceleftbig'] = 0x007B;
+ t['braceleftbigg'] = 0x007B;
+ t['braceleftBigg'] = 0x007B;
+ t['bracerightBig'] = 0x007D;
+ t['bracerightbig'] = 0x007D;
+ t['bracerightbigg'] = 0x007D;
+ t['bracerightBigg'] = 0x007D;
+ t['bracketleftbig'] = 0x005B;
+ t['bracketleftBig'] = 0x005B;
+ t['bracketleftbigg'] = 0x005B;
+ t['bracketleftBigg'] = 0x005B;
+ t['bracketrightBig'] = 0x005D;
+ t['bracketrightbig'] = 0x005D;
+ t['bracketrightbigg'] = 0x005D;
+ t['bracketrightBigg'] = 0x005D;
+ t['ceilingleftbig'] = 0x2308;
+ t['ceilingleftBig'] = 0x2308;
+ t['ceilingleftBigg'] = 0x2308;
+ t['ceilingleftbigg'] = 0x2308;
+ t['ceilingrightbig'] = 0x2309;
+ t['ceilingrightBig'] = 0x2309;
+ t['ceilingrightbigg'] = 0x2309;
+ t['ceilingrightBigg'] = 0x2309;
+ t['circledotdisplay'] = 0x2299;
+ t['circledottext'] = 0x2299;
+ t['circlemultiplydisplay'] = 0x2297;
+ t['circlemultiplytext'] = 0x2297;
+ t['circleplusdisplay'] = 0x2295;
+ t['circleplustext'] = 0x2295;
+ t['contintegraldisplay'] = 0x222E;
+ t['contintegraltext'] = 0x222E;
+ t['coproductdisplay'] = 0x2210;
+ t['coproducttext'] = 0x2210;
+ t['floorleftBig'] = 0x230A;
+ t['floorleftbig'] = 0x230A;
+ t['floorleftbigg'] = 0x230A;
+ t['floorleftBigg'] = 0x230A;
+ t['floorrightbig'] = 0x230B;
+ t['floorrightBig'] = 0x230B;
+ t['floorrightBigg'] = 0x230B;
+ t['floorrightbigg'] = 0x230B;
+ t['hatwide'] = 0x0302;
+ t['hatwider'] = 0x0302;
+ t['hatwidest'] = 0x0302;
+ t['intercal'] = 0x1D40;
+ t['integraldisplay'] = 0x222B;
+ t['integraltext'] = 0x222B;
+ t['intersectiondisplay'] = 0x22C2;
+ t['intersectiontext'] = 0x22C2;
+ t['logicalanddisplay'] = 0x2227;
+ t['logicalandtext'] = 0x2227;
+ t['logicalordisplay'] = 0x2228;
+ t['logicalortext'] = 0x2228;
+ t['parenleftBig'] = 0x0028;
+ t['parenleftbig'] = 0x0028;
+ t['parenleftBigg'] = 0x0028;
+ t['parenleftbigg'] = 0x0028;
+ t['parenrightBig'] = 0x0029;
+ t['parenrightbig'] = 0x0029;
+ t['parenrightBigg'] = 0x0029;
+ t['parenrightbigg'] = 0x0029;
+ t['prime'] = 0x2032;
+ t['productdisplay'] = 0x220F;
+ t['producttext'] = 0x220F;
+ t['radicalbig'] = 0x221A;
+ t['radicalBig'] = 0x221A;
+ t['radicalBigg'] = 0x221A;
+ t['radicalbigg'] = 0x221A;
+ t['radicalbt'] = 0x221A;
+ t['radicaltp'] = 0x221A;
+ t['radicalvertex'] = 0x221A;
+ t['slashbig'] = 0x002F;
+ t['slashBig'] = 0x002F;
+ t['slashBigg'] = 0x002F;
+ t['slashbigg'] = 0x002F;
+ t['summationdisplay'] = 0x2211;
+ t['summationtext'] = 0x2211;
+ t['tildewide'] = 0x02DC;
+ t['tildewider'] = 0x02DC;
+ t['tildewidest'] = 0x02DC;
+ t['uniondisplay'] = 0x22C3;
+ t['unionmultidisplay'] = 0x228E;
+ t['unionmultitext'] = 0x228E;
+ t['unionsqdisplay'] = 0x2294;
+ t['unionsqtext'] = 0x2294;
+ t['uniontext'] = 0x22C3;
+ t['vextenddouble'] = 0x2225;
+ t['vextendsingle'] = 0x2223;
+});
+var getDingbatsGlyphsUnicode = getLookupTableFactory(function (t) {
+ t['space'] = 0x0020;
+ t['a1'] = 0x2701;
+ t['a2'] = 0x2702;
+ t['a202'] = 0x2703;
+ t['a3'] = 0x2704;
+ t['a4'] = 0x260E;
+ t['a5'] = 0x2706;
+ t['a119'] = 0x2707;
+ t['a118'] = 0x2708;
+ t['a117'] = 0x2709;
+ t['a11'] = 0x261B;
+ t['a12'] = 0x261E;
+ t['a13'] = 0x270C;
+ t['a14'] = 0x270D;
+ t['a15'] = 0x270E;
+ t['a16'] = 0x270F;
+ t['a105'] = 0x2710;
+ t['a17'] = 0x2711;
+ t['a18'] = 0x2712;
+ t['a19'] = 0x2713;
+ t['a20'] = 0x2714;
+ t['a21'] = 0x2715;
+ t['a22'] = 0x2716;
+ t['a23'] = 0x2717;
+ t['a24'] = 0x2718;
+ t['a25'] = 0x2719;
+ t['a26'] = 0x271A;
+ t['a27'] = 0x271B;
+ t['a28'] = 0x271C;
+ t['a6'] = 0x271D;
+ t['a7'] = 0x271E;
+ t['a8'] = 0x271F;
+ t['a9'] = 0x2720;
+ t['a10'] = 0x2721;
+ t['a29'] = 0x2722;
+ t['a30'] = 0x2723;
+ t['a31'] = 0x2724;
+ t['a32'] = 0x2725;
+ t['a33'] = 0x2726;
+ t['a34'] = 0x2727;
+ t['a35'] = 0x2605;
+ t['a36'] = 0x2729;
+ t['a37'] = 0x272A;
+ t['a38'] = 0x272B;
+ t['a39'] = 0x272C;
+ t['a40'] = 0x272D;
+ t['a41'] = 0x272E;
+ t['a42'] = 0x272F;
+ t['a43'] = 0x2730;
+ t['a44'] = 0x2731;
+ t['a45'] = 0x2732;
+ t['a46'] = 0x2733;
+ t['a47'] = 0x2734;
+ t['a48'] = 0x2735;
+ t['a49'] = 0x2736;
+ t['a50'] = 0x2737;
+ t['a51'] = 0x2738;
+ t['a52'] = 0x2739;
+ t['a53'] = 0x273A;
+ t['a54'] = 0x273B;
+ t['a55'] = 0x273C;
+ t['a56'] = 0x273D;
+ t['a57'] = 0x273E;
+ t['a58'] = 0x273F;
+ t['a59'] = 0x2740;
+ t['a60'] = 0x2741;
+ t['a61'] = 0x2742;
+ t['a62'] = 0x2743;
+ t['a63'] = 0x2744;
+ t['a64'] = 0x2745;
+ t['a65'] = 0x2746;
+ t['a66'] = 0x2747;
+ t['a67'] = 0x2748;
+ t['a68'] = 0x2749;
+ t['a69'] = 0x274A;
+ t['a70'] = 0x274B;
+ t['a71'] = 0x25CF;
+ t['a72'] = 0x274D;
+ t['a73'] = 0x25A0;
+ t['a74'] = 0x274F;
+ t['a203'] = 0x2750;
+ t['a75'] = 0x2751;
+ t['a204'] = 0x2752;
+ t['a76'] = 0x25B2;
+ t['a77'] = 0x25BC;
+ t['a78'] = 0x25C6;
+ t['a79'] = 0x2756;
+ t['a81'] = 0x25D7;
+ t['a82'] = 0x2758;
+ t['a83'] = 0x2759;
+ t['a84'] = 0x275A;
+ t['a97'] = 0x275B;
+ t['a98'] = 0x275C;
+ t['a99'] = 0x275D;
+ t['a100'] = 0x275E;
+ t['a101'] = 0x2761;
+ t['a102'] = 0x2762;
+ t['a103'] = 0x2763;
+ t['a104'] = 0x2764;
+ t['a106'] = 0x2765;
+ t['a107'] = 0x2766;
+ t['a108'] = 0x2767;
+ t['a112'] = 0x2663;
+ t['a111'] = 0x2666;
+ t['a110'] = 0x2665;
+ t['a109'] = 0x2660;
+ t['a120'] = 0x2460;
+ t['a121'] = 0x2461;
+ t['a122'] = 0x2462;
+ t['a123'] = 0x2463;
+ t['a124'] = 0x2464;
+ t['a125'] = 0x2465;
+ t['a126'] = 0x2466;
+ t['a127'] = 0x2467;
+ t['a128'] = 0x2468;
+ t['a129'] = 0x2469;
+ t['a130'] = 0x2776;
+ t['a131'] = 0x2777;
+ t['a132'] = 0x2778;
+ t['a133'] = 0x2779;
+ t['a134'] = 0x277A;
+ t['a135'] = 0x277B;
+ t['a136'] = 0x277C;
+ t['a137'] = 0x277D;
+ t['a138'] = 0x277E;
+ t['a139'] = 0x277F;
+ t['a140'] = 0x2780;
+ t['a141'] = 0x2781;
+ t['a142'] = 0x2782;
+ t['a143'] = 0x2783;
+ t['a144'] = 0x2784;
+ t['a145'] = 0x2785;
+ t['a146'] = 0x2786;
+ t['a147'] = 0x2787;
+ t['a148'] = 0x2788;
+ t['a149'] = 0x2789;
+ t['a150'] = 0x278A;
+ t['a151'] = 0x278B;
+ t['a152'] = 0x278C;
+ t['a153'] = 0x278D;
+ t['a154'] = 0x278E;
+ t['a155'] = 0x278F;
+ t['a156'] = 0x2790;
+ t['a157'] = 0x2791;
+ t['a158'] = 0x2792;
+ t['a159'] = 0x2793;
+ t['a160'] = 0x2794;
+ t['a161'] = 0x2192;
+ t['a163'] = 0x2194;
+ t['a164'] = 0x2195;
+ t['a196'] = 0x2798;
+ t['a165'] = 0x2799;
+ t['a192'] = 0x279A;
+ t['a166'] = 0x279B;
+ t['a167'] = 0x279C;
+ t['a168'] = 0x279D;
+ t['a169'] = 0x279E;
+ t['a170'] = 0x279F;
+ t['a171'] = 0x27A0;
+ t['a172'] = 0x27A1;
+ t['a173'] = 0x27A2;
+ t['a162'] = 0x27A3;
+ t['a174'] = 0x27A4;
+ t['a175'] = 0x27A5;
+ t['a176'] = 0x27A6;
+ t['a177'] = 0x27A7;
+ t['a178'] = 0x27A8;
+ t['a179'] = 0x27A9;
+ t['a193'] = 0x27AA;
+ t['a180'] = 0x27AB;
+ t['a199'] = 0x27AC;
+ t['a181'] = 0x27AD;
+ t['a200'] = 0x27AE;
+ t['a182'] = 0x27AF;
+ t['a201'] = 0x27B1;
+ t['a183'] = 0x27B2;
+ t['a184'] = 0x27B3;
+ t['a197'] = 0x27B4;
+ t['a185'] = 0x27B5;
+ t['a194'] = 0x27B6;
+ t['a198'] = 0x27B7;
+ t['a186'] = 0x27B8;
+ t['a195'] = 0x27B9;
+ t['a187'] = 0x27BA;
+ t['a188'] = 0x27BB;
+ t['a189'] = 0x27BC;
+ t['a190'] = 0x27BD;
+ t['a191'] = 0x27BE;
+ t['a89'] = 0x2768;
+ t['a90'] = 0x2769;
+ t['a93'] = 0x276A;
+ t['a94'] = 0x276B;
+ t['a91'] = 0x276C;
+ t['a92'] = 0x276D;
+ t['a205'] = 0x276E;
+ t['a85'] = 0x276F;
+ t['a206'] = 0x2770;
+ t['a86'] = 0x2771;
+ t['a87'] = 0x2772;
+ t['a88'] = 0x2773;
+ t['a95'] = 0x2774;
+ t['a96'] = 0x2775;
+ t['.notdef'] = 0x0000;
+});
+exports.getGlyphsUnicode = getGlyphsUnicode;
+exports.getDingbatsGlyphsUnicode = getDingbatsGlyphsUnicode;
+
+/***/ }),
+/* 179 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.getSupplementalGlyphMapForCalibri = exports.getSupplementalGlyphMapForArialBlack = exports.getGlyphMapForStandardFonts = exports.getSymbolsFonts = exports.getSerifFonts = exports.getNonStdFontMap = exports.getStdFontMap = void 0;
+
+var _core_utils = __w_pdfjs_require__(154);
+
+var getStdFontMap = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['ArialNarrow'] = 'Helvetica';
+ t['ArialNarrow-Bold'] = 'Helvetica-Bold';
+ t['ArialNarrow-BoldItalic'] = 'Helvetica-BoldOblique';
+ t['ArialNarrow-Italic'] = 'Helvetica-Oblique';
+ t['ArialBlack'] = 'Helvetica';
+ t['ArialBlack-Bold'] = 'Helvetica-Bold';
+ t['ArialBlack-BoldItalic'] = 'Helvetica-BoldOblique';
+ t['ArialBlack-Italic'] = 'Helvetica-Oblique';
+ t['Arial-Black'] = 'Helvetica';
+ t['Arial-Black-Bold'] = 'Helvetica-Bold';
+ t['Arial-Black-BoldItalic'] = 'Helvetica-BoldOblique';
+ t['Arial-Black-Italic'] = 'Helvetica-Oblique';
+ t['Arial'] = 'Helvetica';
+ t['Arial-Bold'] = 'Helvetica-Bold';
+ t['Arial-BoldItalic'] = 'Helvetica-BoldOblique';
+ t['Arial-Italic'] = 'Helvetica-Oblique';
+ t['Arial-BoldItalicMT'] = 'Helvetica-BoldOblique';
+ t['Arial-BoldMT'] = 'Helvetica-Bold';
+ t['Arial-ItalicMT'] = 'Helvetica-Oblique';
+ t['ArialMT'] = 'Helvetica';
+ t['Courier-Bold'] = 'Courier-Bold';
+ t['Courier-BoldItalic'] = 'Courier-BoldOblique';
+ t['Courier-Italic'] = 'Courier-Oblique';
+ t['CourierNew'] = 'Courier';
+ t['CourierNew-Bold'] = 'Courier-Bold';
+ t['CourierNew-BoldItalic'] = 'Courier-BoldOblique';
+ t['CourierNew-Italic'] = 'Courier-Oblique';
+ t['CourierNewPS-BoldItalicMT'] = 'Courier-BoldOblique';
+ t['CourierNewPS-BoldMT'] = 'Courier-Bold';
+ t['CourierNewPS-ItalicMT'] = 'Courier-Oblique';
+ t['CourierNewPSMT'] = 'Courier';
+ t['Helvetica'] = 'Helvetica';
+ t['Helvetica-Bold'] = 'Helvetica-Bold';
+ t['Helvetica-BoldItalic'] = 'Helvetica-BoldOblique';
+ t['Helvetica-BoldOblique'] = 'Helvetica-BoldOblique';
+ t['Helvetica-Italic'] = 'Helvetica-Oblique';
+ t['Helvetica-Oblique'] = 'Helvetica-Oblique';
+ t['SegoeUISymbol'] = 'Helvetica';
+ t['Symbol-Bold'] = 'Symbol';
+ t['Symbol-BoldItalic'] = 'Symbol';
+ t['Symbol-Italic'] = 'Symbol';
+ t['TimesNewRoman'] = 'Times-Roman';
+ t['TimesNewRoman-Bold'] = 'Times-Bold';
+ t['TimesNewRoman-BoldItalic'] = 'Times-BoldItalic';
+ t['TimesNewRoman-Italic'] = 'Times-Italic';
+ t['TimesNewRomanPS'] = 'Times-Roman';
+ t['TimesNewRomanPS-Bold'] = 'Times-Bold';
+ t['TimesNewRomanPS-BoldItalic'] = 'Times-BoldItalic';
+ t['TimesNewRomanPS-BoldItalicMT'] = 'Times-BoldItalic';
+ t['TimesNewRomanPS-BoldMT'] = 'Times-Bold';
+ t['TimesNewRomanPS-Italic'] = 'Times-Italic';
+ t['TimesNewRomanPS-ItalicMT'] = 'Times-Italic';
+ t['TimesNewRomanPSMT'] = 'Times-Roman';
+ t['TimesNewRomanPSMT-Bold'] = 'Times-Bold';
+ t['TimesNewRomanPSMT-BoldItalic'] = 'Times-BoldItalic';
+ t['TimesNewRomanPSMT-Italic'] = 'Times-Italic';
+});
+exports.getStdFontMap = getStdFontMap;
+var getNonStdFontMap = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['Calibri'] = 'Helvetica';
+ t['Calibri-Bold'] = 'Helvetica-Bold';
+ t['Calibri-BoldItalic'] = 'Helvetica-BoldOblique';
+ t['Calibri-Italic'] = 'Helvetica-Oblique';
+ t['CenturyGothic'] = 'Helvetica';
+ t['CenturyGothic-Bold'] = 'Helvetica-Bold';
+ t['CenturyGothic-BoldItalic'] = 'Helvetica-BoldOblique';
+ t['CenturyGothic-Italic'] = 'Helvetica-Oblique';
+ t['ComicSansMS'] = 'Comic Sans MS';
+ t['ComicSansMS-Bold'] = 'Comic Sans MS-Bold';
+ t['ComicSansMS-BoldItalic'] = 'Comic Sans MS-BoldItalic';
+ t['ComicSansMS-Italic'] = 'Comic Sans MS-Italic';
+ t['LucidaConsole'] = 'Courier';
+ t['LucidaConsole-Bold'] = 'Courier-Bold';
+ t['LucidaConsole-BoldItalic'] = 'Courier-BoldOblique';
+ t['LucidaConsole-Italic'] = 'Courier-Oblique';
+ t['LucidaSans-Demi'] = 'Helvetica-Bold';
+ t['MS-Gothic'] = 'MS Gothic';
+ t['MS-Gothic-Bold'] = 'MS Gothic-Bold';
+ t['MS-Gothic-BoldItalic'] = 'MS Gothic-BoldItalic';
+ t['MS-Gothic-Italic'] = 'MS Gothic-Italic';
+ t['MS-Mincho'] = 'MS Mincho';
+ t['MS-Mincho-Bold'] = 'MS Mincho-Bold';
+ t['MS-Mincho-BoldItalic'] = 'MS Mincho-BoldItalic';
+ t['MS-Mincho-Italic'] = 'MS Mincho-Italic';
+ t['MS-PGothic'] = 'MS PGothic';
+ t['MS-PGothic-Bold'] = 'MS PGothic-Bold';
+ t['MS-PGothic-BoldItalic'] = 'MS PGothic-BoldItalic';
+ t['MS-PGothic-Italic'] = 'MS PGothic-Italic';
+ t['MS-PMincho'] = 'MS PMincho';
+ t['MS-PMincho-Bold'] = 'MS PMincho-Bold';
+ t['MS-PMincho-BoldItalic'] = 'MS PMincho-BoldItalic';
+ t['MS-PMincho-Italic'] = 'MS PMincho-Italic';
+ t['NuptialScript'] = 'Times-Italic';
+ t['Wingdings'] = 'ZapfDingbats';
+});
+exports.getNonStdFontMap = getNonStdFontMap;
+var getSerifFonts = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['Adobe Jenson'] = true;
+ t['Adobe Text'] = true;
+ t['Albertus'] = true;
+ t['Aldus'] = true;
+ t['Alexandria'] = true;
+ t['Algerian'] = true;
+ t['American Typewriter'] = true;
+ t['Antiqua'] = true;
+ t['Apex'] = true;
+ t['Arno'] = true;
+ t['Aster'] = true;
+ t['Aurora'] = true;
+ t['Baskerville'] = true;
+ t['Bell'] = true;
+ t['Bembo'] = true;
+ t['Bembo Schoolbook'] = true;
+ t['Benguiat'] = true;
+ t['Berkeley Old Style'] = true;
+ t['Bernhard Modern'] = true;
+ t['Berthold City'] = true;
+ t['Bodoni'] = true;
+ t['Bauer Bodoni'] = true;
+ t['Book Antiqua'] = true;
+ t['Bookman'] = true;
+ t['Bordeaux Roman'] = true;
+ t['Californian FB'] = true;
+ t['Calisto'] = true;
+ t['Calvert'] = true;
+ t['Capitals'] = true;
+ t['Cambria'] = true;
+ t['Cartier'] = true;
+ t['Caslon'] = true;
+ t['Catull'] = true;
+ t['Centaur'] = true;
+ t['Century Old Style'] = true;
+ t['Century Schoolbook'] = true;
+ t['Chaparral'] = true;
+ t['Charis SIL'] = true;
+ t['Cheltenham'] = true;
+ t['Cholla Slab'] = true;
+ t['Clarendon'] = true;
+ t['Clearface'] = true;
+ t['Cochin'] = true;
+ t['Colonna'] = true;
+ t['Computer Modern'] = true;
+ t['Concrete Roman'] = true;
+ t['Constantia'] = true;
+ t['Cooper Black'] = true;
+ t['Corona'] = true;
+ t['Ecotype'] = true;
+ t['Egyptienne'] = true;
+ t['Elephant'] = true;
+ t['Excelsior'] = true;
+ t['Fairfield'] = true;
+ t['FF Scala'] = true;
+ t['Folkard'] = true;
+ t['Footlight'] = true;
+ t['FreeSerif'] = true;
+ t['Friz Quadrata'] = true;
+ t['Garamond'] = true;
+ t['Gentium'] = true;
+ t['Georgia'] = true;
+ t['Gloucester'] = true;
+ t['Goudy Old Style'] = true;
+ t['Goudy Schoolbook'] = true;
+ t['Goudy Pro Font'] = true;
+ t['Granjon'] = true;
+ t['Guardian Egyptian'] = true;
+ t['Heather'] = true;
+ t['Hercules'] = true;
+ t['High Tower Text'] = true;
+ t['Hiroshige'] = true;
+ t['Hoefler Text'] = true;
+ t['Humana Serif'] = true;
+ t['Imprint'] = true;
+ t['Ionic No. 5'] = true;
+ t['Janson'] = true;
+ t['Joanna'] = true;
+ t['Korinna'] = true;
+ t['Lexicon'] = true;
+ t['Liberation Serif'] = true;
+ t['Linux Libertine'] = true;
+ t['Literaturnaya'] = true;
+ t['Lucida'] = true;
+ t['Lucida Bright'] = true;
+ t['Melior'] = true;
+ t['Memphis'] = true;
+ t['Miller'] = true;
+ t['Minion'] = true;
+ t['Modern'] = true;
+ t['Mona Lisa'] = true;
+ t['Mrs Eaves'] = true;
+ t['MS Serif'] = true;
+ t['Museo Slab'] = true;
+ t['New York'] = true;
+ t['Nimbus Roman'] = true;
+ t['NPS Rawlinson Roadway'] = true;
+ t['NuptialScript'] = true;
+ t['Palatino'] = true;
+ t['Perpetua'] = true;
+ t['Plantin'] = true;
+ t['Plantin Schoolbook'] = true;
+ t['Playbill'] = true;
+ t['Poor Richard'] = true;
+ t['Rawlinson Roadway'] = true;
+ t['Renault'] = true;
+ t['Requiem'] = true;
+ t['Rockwell'] = true;
+ t['Roman'] = true;
+ t['Rotis Serif'] = true;
+ t['Sabon'] = true;
+ t['Scala'] = true;
+ t['Seagull'] = true;
+ t['Sistina'] = true;
+ t['Souvenir'] = true;
+ t['STIX'] = true;
+ t['Stone Informal'] = true;
+ t['Stone Serif'] = true;
+ t['Sylfaen'] = true;
+ t['Times'] = true;
+ t['Trajan'] = true;
+ t['Trinité'] = true;
+ t['Trump Mediaeval'] = true;
+ t['Utopia'] = true;
+ t['Vale Type'] = true;
+ t['Bitstream Vera'] = true;
+ t['Vera Serif'] = true;
+ t['Versailles'] = true;
+ t['Wanted'] = true;
+ t['Weiss'] = true;
+ t['Wide Latin'] = true;
+ t['Windsor'] = true;
+ t['XITS'] = true;
+});
+exports.getSerifFonts = getSerifFonts;
+var getSymbolsFonts = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['Dingbats'] = true;
+ t['Symbol'] = true;
+ t['ZapfDingbats'] = true;
+});
+exports.getSymbolsFonts = getSymbolsFonts;
+var getGlyphMapForStandardFonts = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t[2] = 10;
+ t[3] = 32;
+ t[4] = 33;
+ t[5] = 34;
+ t[6] = 35;
+ t[7] = 36;
+ t[8] = 37;
+ t[9] = 38;
+ t[10] = 39;
+ t[11] = 40;
+ t[12] = 41;
+ t[13] = 42;
+ t[14] = 43;
+ t[15] = 44;
+ t[16] = 45;
+ t[17] = 46;
+ t[18] = 47;
+ t[19] = 48;
+ t[20] = 49;
+ t[21] = 50;
+ t[22] = 51;
+ t[23] = 52;
+ t[24] = 53;
+ t[25] = 54;
+ t[26] = 55;
+ t[27] = 56;
+ t[28] = 57;
+ t[29] = 58;
+ t[30] = 894;
+ t[31] = 60;
+ t[32] = 61;
+ t[33] = 62;
+ t[34] = 63;
+ t[35] = 64;
+ t[36] = 65;
+ t[37] = 66;
+ t[38] = 67;
+ t[39] = 68;
+ t[40] = 69;
+ t[41] = 70;
+ t[42] = 71;
+ t[43] = 72;
+ t[44] = 73;
+ t[45] = 74;
+ t[46] = 75;
+ t[47] = 76;
+ t[48] = 77;
+ t[49] = 78;
+ t[50] = 79;
+ t[51] = 80;
+ t[52] = 81;
+ t[53] = 82;
+ t[54] = 83;
+ t[55] = 84;
+ t[56] = 85;
+ t[57] = 86;
+ t[58] = 87;
+ t[59] = 88;
+ t[60] = 89;
+ t[61] = 90;
+ t[62] = 91;
+ t[63] = 92;
+ t[64] = 93;
+ t[65] = 94;
+ t[66] = 95;
+ t[67] = 96;
+ t[68] = 97;
+ t[69] = 98;
+ t[70] = 99;
+ t[71] = 100;
+ t[72] = 101;
+ t[73] = 102;
+ t[74] = 103;
+ t[75] = 104;
+ t[76] = 105;
+ t[77] = 106;
+ t[78] = 107;
+ t[79] = 108;
+ t[80] = 109;
+ t[81] = 110;
+ t[82] = 111;
+ t[83] = 112;
+ t[84] = 113;
+ t[85] = 114;
+ t[86] = 115;
+ t[87] = 116;
+ t[88] = 117;
+ t[89] = 118;
+ t[90] = 119;
+ t[91] = 120;
+ t[92] = 121;
+ t[93] = 122;
+ t[94] = 123;
+ t[95] = 124;
+ t[96] = 125;
+ t[97] = 126;
+ t[98] = 196;
+ t[99] = 197;
+ t[100] = 199;
+ t[101] = 201;
+ t[102] = 209;
+ t[103] = 214;
+ t[104] = 220;
+ t[105] = 225;
+ t[106] = 224;
+ t[107] = 226;
+ t[108] = 228;
+ t[109] = 227;
+ t[110] = 229;
+ t[111] = 231;
+ t[112] = 233;
+ t[113] = 232;
+ t[114] = 234;
+ t[115] = 235;
+ t[116] = 237;
+ t[117] = 236;
+ t[118] = 238;
+ t[119] = 239;
+ t[120] = 241;
+ t[121] = 243;
+ t[122] = 242;
+ t[123] = 244;
+ t[124] = 246;
+ t[125] = 245;
+ t[126] = 250;
+ t[127] = 249;
+ t[128] = 251;
+ t[129] = 252;
+ t[130] = 8224;
+ t[131] = 176;
+ t[132] = 162;
+ t[133] = 163;
+ t[134] = 167;
+ t[135] = 8226;
+ t[136] = 182;
+ t[137] = 223;
+ t[138] = 174;
+ t[139] = 169;
+ t[140] = 8482;
+ t[141] = 180;
+ t[142] = 168;
+ t[143] = 8800;
+ t[144] = 198;
+ t[145] = 216;
+ t[146] = 8734;
+ t[147] = 177;
+ t[148] = 8804;
+ t[149] = 8805;
+ t[150] = 165;
+ t[151] = 181;
+ t[152] = 8706;
+ t[153] = 8721;
+ t[154] = 8719;
+ t[156] = 8747;
+ t[157] = 170;
+ t[158] = 186;
+ t[159] = 8486;
+ t[160] = 230;
+ t[161] = 248;
+ t[162] = 191;
+ t[163] = 161;
+ t[164] = 172;
+ t[165] = 8730;
+ t[166] = 402;
+ t[167] = 8776;
+ t[168] = 8710;
+ t[169] = 171;
+ t[170] = 187;
+ t[171] = 8230;
+ t[210] = 218;
+ t[223] = 711;
+ t[224] = 321;
+ t[225] = 322;
+ t[227] = 353;
+ t[229] = 382;
+ t[234] = 253;
+ t[252] = 263;
+ t[253] = 268;
+ t[254] = 269;
+ t[258] = 258;
+ t[260] = 260;
+ t[261] = 261;
+ t[265] = 280;
+ t[266] = 281;
+ t[268] = 283;
+ t[269] = 313;
+ t[275] = 323;
+ t[276] = 324;
+ t[278] = 328;
+ t[284] = 345;
+ t[285] = 346;
+ t[286] = 347;
+ t[292] = 367;
+ t[295] = 377;
+ t[296] = 378;
+ t[298] = 380;
+ t[305] = 963;
+ t[306] = 964;
+ t[307] = 966;
+ t[308] = 8215;
+ t[309] = 8252;
+ t[310] = 8319;
+ t[311] = 8359;
+ t[312] = 8592;
+ t[313] = 8593;
+ t[337] = 9552;
+ t[493] = 1039;
+ t[494] = 1040;
+ t[705] = 1524;
+ t[706] = 8362;
+ t[710] = 64288;
+ t[711] = 64298;
+ t[759] = 1617;
+ t[761] = 1776;
+ t[763] = 1778;
+ t[775] = 1652;
+ t[777] = 1764;
+ t[778] = 1780;
+ t[779] = 1781;
+ t[780] = 1782;
+ t[782] = 771;
+ t[783] = 64726;
+ t[786] = 8363;
+ t[788] = 8532;
+ t[790] = 768;
+ t[791] = 769;
+ t[792] = 768;
+ t[795] = 803;
+ t[797] = 64336;
+ t[798] = 64337;
+ t[799] = 64342;
+ t[800] = 64343;
+ t[801] = 64344;
+ t[802] = 64345;
+ t[803] = 64362;
+ t[804] = 64363;
+ t[805] = 64364;
+ t[2424] = 7821;
+ t[2425] = 7822;
+ t[2426] = 7823;
+ t[2427] = 7824;
+ t[2428] = 7825;
+ t[2429] = 7826;
+ t[2430] = 7827;
+ t[2433] = 7682;
+ t[2678] = 8045;
+ t[2679] = 8046;
+ t[2830] = 1552;
+ t[2838] = 686;
+ t[2840] = 751;
+ t[2842] = 753;
+ t[2843] = 754;
+ t[2844] = 755;
+ t[2846] = 757;
+ t[2856] = 767;
+ t[2857] = 848;
+ t[2858] = 849;
+ t[2862] = 853;
+ t[2863] = 854;
+ t[2864] = 855;
+ t[2865] = 861;
+ t[2866] = 862;
+ t[2906] = 7460;
+ t[2908] = 7462;
+ t[2909] = 7463;
+ t[2910] = 7464;
+ t[2912] = 7466;
+ t[2913] = 7467;
+ t[2914] = 7468;
+ t[2916] = 7470;
+ t[2917] = 7471;
+ t[2918] = 7472;
+ t[2920] = 7474;
+ t[2921] = 7475;
+ t[2922] = 7476;
+ t[2924] = 7478;
+ t[2925] = 7479;
+ t[2926] = 7480;
+ t[2928] = 7482;
+ t[2929] = 7483;
+ t[2930] = 7484;
+ t[2932] = 7486;
+ t[2933] = 7487;
+ t[2934] = 7488;
+ t[2936] = 7490;
+ t[2937] = 7491;
+ t[2938] = 7492;
+ t[2940] = 7494;
+ t[2941] = 7495;
+ t[2942] = 7496;
+ t[2944] = 7498;
+ t[2946] = 7500;
+ t[2948] = 7502;
+ t[2950] = 7504;
+ t[2951] = 7505;
+ t[2952] = 7506;
+ t[2954] = 7508;
+ t[2955] = 7509;
+ t[2956] = 7510;
+ t[2958] = 7512;
+ t[2959] = 7513;
+ t[2960] = 7514;
+ t[2962] = 7516;
+ t[2963] = 7517;
+ t[2964] = 7518;
+ t[2966] = 7520;
+ t[2967] = 7521;
+ t[2968] = 7522;
+ t[2970] = 7524;
+ t[2971] = 7525;
+ t[2972] = 7526;
+ t[2974] = 7528;
+ t[2975] = 7529;
+ t[2976] = 7530;
+ t[2978] = 1537;
+ t[2979] = 1538;
+ t[2980] = 1539;
+ t[2982] = 1549;
+ t[2983] = 1551;
+ t[2984] = 1552;
+ t[2986] = 1554;
+ t[2987] = 1555;
+ t[2988] = 1556;
+ t[2990] = 1623;
+ t[2991] = 1624;
+ t[2995] = 1775;
+ t[2999] = 1791;
+ t[3002] = 64290;
+ t[3003] = 64291;
+ t[3004] = 64292;
+ t[3006] = 64294;
+ t[3007] = 64295;
+ t[3008] = 64296;
+ t[3011] = 1900;
+ t[3014] = 8223;
+ t[3015] = 8244;
+ t[3017] = 7532;
+ t[3018] = 7533;
+ t[3019] = 7534;
+ t[3075] = 7590;
+ t[3076] = 7591;
+ t[3079] = 7594;
+ t[3080] = 7595;
+ t[3083] = 7598;
+ t[3084] = 7599;
+ t[3087] = 7602;
+ t[3088] = 7603;
+ t[3091] = 7606;
+ t[3092] = 7607;
+ t[3095] = 7610;
+ t[3096] = 7611;
+ t[3099] = 7614;
+ t[3100] = 7615;
+ t[3103] = 7618;
+ t[3104] = 7619;
+ t[3107] = 8337;
+ t[3108] = 8338;
+ t[3116] = 1884;
+ t[3119] = 1885;
+ t[3120] = 1885;
+ t[3123] = 1886;
+ t[3124] = 1886;
+ t[3127] = 1887;
+ t[3128] = 1887;
+ t[3131] = 1888;
+ t[3132] = 1888;
+ t[3135] = 1889;
+ t[3136] = 1889;
+ t[3139] = 1890;
+ t[3140] = 1890;
+ t[3143] = 1891;
+ t[3144] = 1891;
+ t[3147] = 1892;
+ t[3148] = 1892;
+ t[3153] = 580;
+ t[3154] = 581;
+ t[3157] = 584;
+ t[3158] = 585;
+ t[3161] = 588;
+ t[3162] = 589;
+ t[3165] = 891;
+ t[3166] = 892;
+ t[3169] = 1274;
+ t[3170] = 1275;
+ t[3173] = 1278;
+ t[3174] = 1279;
+ t[3181] = 7622;
+ t[3182] = 7623;
+ t[3282] = 11799;
+ t[3316] = 578;
+ t[3379] = 42785;
+ t[3393] = 1159;
+ t[3416] = 8377;
+});
+exports.getGlyphMapForStandardFonts = getGlyphMapForStandardFonts;
+var getSupplementalGlyphMapForArialBlack = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t[227] = 322;
+ t[264] = 261;
+ t[291] = 346;
+});
+exports.getSupplementalGlyphMapForArialBlack = getSupplementalGlyphMapForArialBlack;
+var getSupplementalGlyphMapForCalibri = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t[1] = 32;
+ t[4] = 65;
+ t[17] = 66;
+ t[18] = 67;
+ t[24] = 68;
+ t[28] = 69;
+ t[38] = 70;
+ t[39] = 71;
+ t[44] = 72;
+ t[47] = 73;
+ t[58] = 74;
+ t[60] = 75;
+ t[62] = 76;
+ t[68] = 77;
+ t[69] = 78;
+ t[75] = 79;
+ t[87] = 80;
+ t[89] = 81;
+ t[90] = 82;
+ t[94] = 83;
+ t[100] = 84;
+ t[104] = 85;
+ t[115] = 86;
+ t[116] = 87;
+ t[121] = 88;
+ t[122] = 89;
+ t[127] = 90;
+ t[258] = 97;
+ t[268] = 261;
+ t[271] = 98;
+ t[272] = 99;
+ t[273] = 263;
+ t[282] = 100;
+ t[286] = 101;
+ t[295] = 281;
+ t[296] = 102;
+ t[336] = 103;
+ t[346] = 104;
+ t[349] = 105;
+ t[361] = 106;
+ t[364] = 107;
+ t[367] = 108;
+ t[371] = 322;
+ t[373] = 109;
+ t[374] = 110;
+ t[381] = 111;
+ t[383] = 243;
+ t[393] = 112;
+ t[395] = 113;
+ t[396] = 114;
+ t[400] = 115;
+ t[401] = 347;
+ t[410] = 116;
+ t[437] = 117;
+ t[448] = 118;
+ t[449] = 119;
+ t[454] = 120;
+ t[455] = 121;
+ t[460] = 122;
+ t[463] = 380;
+ t[853] = 44;
+ t[855] = 58;
+ t[856] = 46;
+ t[876] = 47;
+ t[878] = 45;
+ t[882] = 45;
+ t[894] = 40;
+ t[895] = 41;
+ t[896] = 91;
+ t[897] = 93;
+ t[923] = 64;
+ t[1004] = 48;
+ t[1005] = 49;
+ t[1006] = 50;
+ t[1007] = 51;
+ t[1008] = 52;
+ t[1009] = 53;
+ t[1010] = 54;
+ t[1011] = 55;
+ t[1012] = 56;
+ t[1013] = 57;
+ t[1081] = 37;
+ t[1085] = 43;
+ t[1086] = 45;
+});
+exports.getSupplementalGlyphMapForCalibri = getSupplementalGlyphMapForCalibri;
+
+/***/ }),
+/* 180 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+var getLookupTableFactory = __w_pdfjs_require__(154).getLookupTableFactory;
+var getSpecialPUASymbols = getLookupTableFactory(function (t) {
+ t[63721] = 0x00A9;
+ t[63193] = 0x00A9;
+ t[63720] = 0x00AE;
+ t[63194] = 0x00AE;
+ t[63722] = 0x2122;
+ t[63195] = 0x2122;
+ t[63729] = 0x23A7;
+ t[63730] = 0x23A8;
+ t[63731] = 0x23A9;
+ t[63740] = 0x23AB;
+ t[63741] = 0x23AC;
+ t[63742] = 0x23AD;
+ t[63726] = 0x23A1;
+ t[63727] = 0x23A2;
+ t[63728] = 0x23A3;
+ t[63737] = 0x23A4;
+ t[63738] = 0x23A5;
+ t[63739] = 0x23A6;
+ t[63723] = 0x239B;
+ t[63724] = 0x239C;
+ t[63725] = 0x239D;
+ t[63734] = 0x239E;
+ t[63735] = 0x239F;
+ t[63736] = 0x23A0;
+});
+function mapSpecialUnicodeValues(code) {
+ if (code >= 0xFFF0 && code <= 0xFFFF) {
+ return 0;
+ } else if (code >= 0xF600 && code <= 0xF8FF) {
+ return getSpecialPUASymbols()[code] || code;
+ } else if (code === 0x00AD) {
+ return 0x002D;
+ }
+ return code;
+}
+function getUnicodeForGlyph(name, glyphsUnicodeMap) {
+ var unicode = glyphsUnicodeMap[name];
+ if (unicode !== undefined) {
+ return unicode;
+ }
+ if (!name) {
+ return -1;
+ }
+ if (name[0] === 'u') {
+ var nameLen = name.length, hexStr;
+ if (nameLen === 7 && name[1] === 'n' && name[2] === 'i') {
+ hexStr = name.substring(3);
+ } else if (nameLen >= 5 && nameLen <= 7) {
+ hexStr = name.substring(1);
+ } else {
+ return -1;
+ }
+ if (hexStr === hexStr.toUpperCase()) {
+ unicode = parseInt(hexStr, 16);
+ if (unicode >= 0) {
+ return unicode;
+ }
+ }
+ }
+ return -1;
+}
+var UnicodeRanges = [
+ {
+ 'begin': 0x0000,
+ 'end': 0x007F
+ },
+ {
+ 'begin': 0x0080,
+ 'end': 0x00FF
+ },
+ {
+ 'begin': 0x0100,
+ 'end': 0x017F
+ },
+ {
+ 'begin': 0x0180,
+ 'end': 0x024F
+ },
+ {
+ 'begin': 0x0250,
+ 'end': 0x02AF
+ },
+ {
+ 'begin': 0x02B0,
+ 'end': 0x02FF
+ },
+ {
+ 'begin': 0x0300,
+ 'end': 0x036F
+ },
+ {
+ 'begin': 0x0370,
+ 'end': 0x03FF
+ },
+ {
+ 'begin': 0x2C80,
+ 'end': 0x2CFF
+ },
+ {
+ 'begin': 0x0400,
+ 'end': 0x04FF
+ },
+ {
+ 'begin': 0x0530,
+ 'end': 0x058F
+ },
+ {
+ 'begin': 0x0590,
+ 'end': 0x05FF
+ },
+ {
+ 'begin': 0xA500,
+ 'end': 0xA63F
+ },
+ {
+ 'begin': 0x0600,
+ 'end': 0x06FF
+ },
+ {
+ 'begin': 0x07C0,
+ 'end': 0x07FF
+ },
+ {
+ 'begin': 0x0900,
+ 'end': 0x097F
+ },
+ {
+ 'begin': 0x0980,
+ 'end': 0x09FF
+ },
+ {
+ 'begin': 0x0A00,
+ 'end': 0x0A7F
+ },
+ {
+ 'begin': 0x0A80,
+ 'end': 0x0AFF
+ },
+ {
+ 'begin': 0x0B00,
+ 'end': 0x0B7F
+ },
+ {
+ 'begin': 0x0B80,
+ 'end': 0x0BFF
+ },
+ {
+ 'begin': 0x0C00,
+ 'end': 0x0C7F
+ },
+ {
+ 'begin': 0x0C80,
+ 'end': 0x0CFF
+ },
+ {
+ 'begin': 0x0D00,
+ 'end': 0x0D7F
+ },
+ {
+ 'begin': 0x0E00,
+ 'end': 0x0E7F
+ },
+ {
+ 'begin': 0x0E80,
+ 'end': 0x0EFF
+ },
+ {
+ 'begin': 0x10A0,
+ 'end': 0x10FF
+ },
+ {
+ 'begin': 0x1B00,
+ 'end': 0x1B7F
+ },
+ {
+ 'begin': 0x1100,
+ 'end': 0x11FF
+ },
+ {
+ 'begin': 0x1E00,
+ 'end': 0x1EFF
+ },
+ {
+ 'begin': 0x1F00,
+ 'end': 0x1FFF
+ },
+ {
+ 'begin': 0x2000,
+ 'end': 0x206F
+ },
+ {
+ 'begin': 0x2070,
+ 'end': 0x209F
+ },
+ {
+ 'begin': 0x20A0,
+ 'end': 0x20CF
+ },
+ {
+ 'begin': 0x20D0,
+ 'end': 0x20FF
+ },
+ {
+ 'begin': 0x2100,
+ 'end': 0x214F
+ },
+ {
+ 'begin': 0x2150,
+ 'end': 0x218F
+ },
+ {
+ 'begin': 0x2190,
+ 'end': 0x21FF
+ },
+ {
+ 'begin': 0x2200,
+ 'end': 0x22FF
+ },
+ {
+ 'begin': 0x2300,
+ 'end': 0x23FF
+ },
+ {
+ 'begin': 0x2400,
+ 'end': 0x243F
+ },
+ {
+ 'begin': 0x2440,
+ 'end': 0x245F
+ },
+ {
+ 'begin': 0x2460,
+ 'end': 0x24FF
+ },
+ {
+ 'begin': 0x2500,
+ 'end': 0x257F
+ },
+ {
+ 'begin': 0x2580,
+ 'end': 0x259F
+ },
+ {
+ 'begin': 0x25A0,
+ 'end': 0x25FF
+ },
+ {
+ 'begin': 0x2600,
+ 'end': 0x26FF
+ },
+ {
+ 'begin': 0x2700,
+ 'end': 0x27BF
+ },
+ {
+ 'begin': 0x3000,
+ 'end': 0x303F
+ },
+ {
+ 'begin': 0x3040,
+ 'end': 0x309F
+ },
+ {
+ 'begin': 0x30A0,
+ 'end': 0x30FF
+ },
+ {
+ 'begin': 0x3100,
+ 'end': 0x312F
+ },
+ {
+ 'begin': 0x3130,
+ 'end': 0x318F
+ },
+ {
+ 'begin': 0xA840,
+ 'end': 0xA87F
+ },
+ {
+ 'begin': 0x3200,
+ 'end': 0x32FF
+ },
+ {
+ 'begin': 0x3300,
+ 'end': 0x33FF
+ },
+ {
+ 'begin': 0xAC00,
+ 'end': 0xD7AF
+ },
+ {
+ 'begin': 0xD800,
+ 'end': 0xDFFF
+ },
+ {
+ 'begin': 0x10900,
+ 'end': 0x1091F
+ },
+ {
+ 'begin': 0x4E00,
+ 'end': 0x9FFF
+ },
+ {
+ 'begin': 0xE000,
+ 'end': 0xF8FF
+ },
+ {
+ 'begin': 0x31C0,
+ 'end': 0x31EF
+ },
+ {
+ 'begin': 0xFB00,
+ 'end': 0xFB4F
+ },
+ {
+ 'begin': 0xFB50,
+ 'end': 0xFDFF
+ },
+ {
+ 'begin': 0xFE20,
+ 'end': 0xFE2F
+ },
+ {
+ 'begin': 0xFE10,
+ 'end': 0xFE1F
+ },
+ {
+ 'begin': 0xFE50,
+ 'end': 0xFE6F
+ },
+ {
+ 'begin': 0xFE70,
+ 'end': 0xFEFF
+ },
+ {
+ 'begin': 0xFF00,
+ 'end': 0xFFEF
+ },
+ {
+ 'begin': 0xFFF0,
+ 'end': 0xFFFF
+ },
+ {
+ 'begin': 0x0F00,
+ 'end': 0x0FFF
+ },
+ {
+ 'begin': 0x0700,
+ 'end': 0x074F
+ },
+ {
+ 'begin': 0x0780,
+ 'end': 0x07BF
+ },
+ {
+ 'begin': 0x0D80,
+ 'end': 0x0DFF
+ },
+ {
+ 'begin': 0x1000,
+ 'end': 0x109F
+ },
+ {
+ 'begin': 0x1200,
+ 'end': 0x137F
+ },
+ {
+ 'begin': 0x13A0,
+ 'end': 0x13FF
+ },
+ {
+ 'begin': 0x1400,
+ 'end': 0x167F
+ },
+ {
+ 'begin': 0x1680,
+ 'end': 0x169F
+ },
+ {
+ 'begin': 0x16A0,
+ 'end': 0x16FF
+ },
+ {
+ 'begin': 0x1780,
+ 'end': 0x17FF
+ },
+ {
+ 'begin': 0x1800,
+ 'end': 0x18AF
+ },
+ {
+ 'begin': 0x2800,
+ 'end': 0x28FF
+ },
+ {
+ 'begin': 0xA000,
+ 'end': 0xA48F
+ },
+ {
+ 'begin': 0x1700,
+ 'end': 0x171F
+ },
+ {
+ 'begin': 0x10300,
+ 'end': 0x1032F
+ },
+ {
+ 'begin': 0x10330,
+ 'end': 0x1034F
+ },
+ {
+ 'begin': 0x10400,
+ 'end': 0x1044F
+ },
+ {
+ 'begin': 0x1D000,
+ 'end': 0x1D0FF
+ },
+ {
+ 'begin': 0x1D400,
+ 'end': 0x1D7FF
+ },
+ {
+ 'begin': 0xFF000,
+ 'end': 0xFFFFD
+ },
+ {
+ 'begin': 0xFE00,
+ 'end': 0xFE0F
+ },
+ {
+ 'begin': 0xE0000,
+ 'end': 0xE007F
+ },
+ {
+ 'begin': 0x1900,
+ 'end': 0x194F
+ },
+ {
+ 'begin': 0x1950,
+ 'end': 0x197F
+ },
+ {
+ 'begin': 0x1980,
+ 'end': 0x19DF
+ },
+ {
+ 'begin': 0x1A00,
+ 'end': 0x1A1F
+ },
+ {
+ 'begin': 0x2C00,
+ 'end': 0x2C5F
+ },
+ {
+ 'begin': 0x2D30,
+ 'end': 0x2D7F
+ },
+ {
+ 'begin': 0x4DC0,
+ 'end': 0x4DFF
+ },
+ {
+ 'begin': 0xA800,
+ 'end': 0xA82F
+ },
+ {
+ 'begin': 0x10000,
+ 'end': 0x1007F
+ },
+ {
+ 'begin': 0x10140,
+ 'end': 0x1018F
+ },
+ {
+ 'begin': 0x10380,
+ 'end': 0x1039F
+ },
+ {
+ 'begin': 0x103A0,
+ 'end': 0x103DF
+ },
+ {
+ 'begin': 0x10450,
+ 'end': 0x1047F
+ },
+ {
+ 'begin': 0x10480,
+ 'end': 0x104AF
+ },
+ {
+ 'begin': 0x10800,
+ 'end': 0x1083F
+ },
+ {
+ 'begin': 0x10A00,
+ 'end': 0x10A5F
+ },
+ {
+ 'begin': 0x1D300,
+ 'end': 0x1D35F
+ },
+ {
+ 'begin': 0x12000,
+ 'end': 0x123FF
+ },
+ {
+ 'begin': 0x1D360,
+ 'end': 0x1D37F
+ },
+ {
+ 'begin': 0x1B80,
+ 'end': 0x1BBF
+ },
+ {
+ 'begin': 0x1C00,
+ 'end': 0x1C4F
+ },
+ {
+ 'begin': 0x1C50,
+ 'end': 0x1C7F
+ },
+ {
+ 'begin': 0xA880,
+ 'end': 0xA8DF
+ },
+ {
+ 'begin': 0xA900,
+ 'end': 0xA92F
+ },
+ {
+ 'begin': 0xA930,
+ 'end': 0xA95F
+ },
+ {
+ 'begin': 0xAA00,
+ 'end': 0xAA5F
+ },
+ {
+ 'begin': 0x10190,
+ 'end': 0x101CF
+ },
+ {
+ 'begin': 0x101D0,
+ 'end': 0x101FF
+ },
+ {
+ 'begin': 0x102A0,
+ 'end': 0x102DF
+ },
+ {
+ 'begin': 0x1F030,
+ 'end': 0x1F09F
+ }
+];
+function getUnicodeRangeFor(value) {
+ for (var i = 0, ii = UnicodeRanges.length; i < ii; i++) {
+ var range = UnicodeRanges[i];
+ if (value >= range.begin && value < range.end) {
+ return i;
+ }
+ }
+ return -1;
+}
+function isRTLRangeFor(value) {
+ var range = UnicodeRanges[13];
+ if (value >= range.begin && value < range.end) {
+ return true;
+ }
+ range = UnicodeRanges[11];
+ if (value >= range.begin && value < range.end) {
+ return true;
+ }
+ return false;
+}
+var getNormalizedUnicodes = getLookupTableFactory(function (t) {
+ t['\u00A8'] = '\u0020\u0308';
+ t['\u00AF'] = '\u0020\u0304';
+ t['\u00B4'] = '\u0020\u0301';
+ t['\u00B5'] = '\u03BC';
+ t['\u00B8'] = '\u0020\u0327';
+ t['\u0132'] = '\u0049\u004A';
+ t['\u0133'] = '\u0069\u006A';
+ t['\u013F'] = '\u004C\u00B7';
+ t['\u0140'] = '\u006C\u00B7';
+ t['\u0149'] = '\u02BC\u006E';
+ t['\u017F'] = '\u0073';
+ t['\u01C4'] = '\u0044\u017D';
+ t['\u01C5'] = '\u0044\u017E';
+ t['\u01C6'] = '\u0064\u017E';
+ t['\u01C7'] = '\u004C\u004A';
+ t['\u01C8'] = '\u004C\u006A';
+ t['\u01C9'] = '\u006C\u006A';
+ t['\u01CA'] = '\u004E\u004A';
+ t['\u01CB'] = '\u004E\u006A';
+ t['\u01CC'] = '\u006E\u006A';
+ t['\u01F1'] = '\u0044\u005A';
+ t['\u01F2'] = '\u0044\u007A';
+ t['\u01F3'] = '\u0064\u007A';
+ t['\u02D8'] = '\u0020\u0306';
+ t['\u02D9'] = '\u0020\u0307';
+ t['\u02DA'] = '\u0020\u030A';
+ t['\u02DB'] = '\u0020\u0328';
+ t['\u02DC'] = '\u0020\u0303';
+ t['\u02DD'] = '\u0020\u030B';
+ t['\u037A'] = '\u0020\u0345';
+ t['\u0384'] = '\u0020\u0301';
+ t['\u03D0'] = '\u03B2';
+ t['\u03D1'] = '\u03B8';
+ t['\u03D2'] = '\u03A5';
+ t['\u03D5'] = '\u03C6';
+ t['\u03D6'] = '\u03C0';
+ t['\u03F0'] = '\u03BA';
+ t['\u03F1'] = '\u03C1';
+ t['\u03F2'] = '\u03C2';
+ t['\u03F4'] = '\u0398';
+ t['\u03F5'] = '\u03B5';
+ t['\u03F9'] = '\u03A3';
+ t['\u0587'] = '\u0565\u0582';
+ t['\u0675'] = '\u0627\u0674';
+ t['\u0676'] = '\u0648\u0674';
+ t['\u0677'] = '\u06C7\u0674';
+ t['\u0678'] = '\u064A\u0674';
+ t['\u0E33'] = '\u0E4D\u0E32';
+ t['\u0EB3'] = '\u0ECD\u0EB2';
+ t['\u0EDC'] = '\u0EAB\u0E99';
+ t['\u0EDD'] = '\u0EAB\u0EA1';
+ t['\u0F77'] = '\u0FB2\u0F81';
+ t['\u0F79'] = '\u0FB3\u0F81';
+ t['\u1E9A'] = '\u0061\u02BE';
+ t['\u1FBD'] = '\u0020\u0313';
+ t['\u1FBF'] = '\u0020\u0313';
+ t['\u1FC0'] = '\u0020\u0342';
+ t['\u1FFE'] = '\u0020\u0314';
+ t['\u2002'] = '\u0020';
+ t['\u2003'] = '\u0020';
+ t['\u2004'] = '\u0020';
+ t['\u2005'] = '\u0020';
+ t['\u2006'] = '\u0020';
+ t['\u2008'] = '\u0020';
+ t['\u2009'] = '\u0020';
+ t['\u200A'] = '\u0020';
+ t['\u2017'] = '\u0020\u0333';
+ t['\u2024'] = '\u002E';
+ t['\u2025'] = '\u002E\u002E';
+ t['\u2026'] = '\u002E\u002E\u002E';
+ t['\u2033'] = '\u2032\u2032';
+ t['\u2034'] = '\u2032\u2032\u2032';
+ t['\u2036'] = '\u2035\u2035';
+ t['\u2037'] = '\u2035\u2035\u2035';
+ t['\u203C'] = '\u0021\u0021';
+ t['\u203E'] = '\u0020\u0305';
+ t['\u2047'] = '\u003F\u003F';
+ t['\u2048'] = '\u003F\u0021';
+ t['\u2049'] = '\u0021\u003F';
+ t['\u2057'] = '\u2032\u2032\u2032\u2032';
+ t['\u205F'] = '\u0020';
+ t['\u20A8'] = '\u0052\u0073';
+ t['\u2100'] = '\u0061\u002F\u0063';
+ t['\u2101'] = '\u0061\u002F\u0073';
+ t['\u2103'] = '\u00B0\u0043';
+ t['\u2105'] = '\u0063\u002F\u006F';
+ t['\u2106'] = '\u0063\u002F\u0075';
+ t['\u2107'] = '\u0190';
+ t['\u2109'] = '\u00B0\u0046';
+ t['\u2116'] = '\u004E\u006F';
+ t['\u2121'] = '\u0054\u0045\u004C';
+ t['\u2135'] = '\u05D0';
+ t['\u2136'] = '\u05D1';
+ t['\u2137'] = '\u05D2';
+ t['\u2138'] = '\u05D3';
+ t['\u213B'] = '\u0046\u0041\u0058';
+ t['\u2160'] = '\u0049';
+ t['\u2161'] = '\u0049\u0049';
+ t['\u2162'] = '\u0049\u0049\u0049';
+ t['\u2163'] = '\u0049\u0056';
+ t['\u2164'] = '\u0056';
+ t['\u2165'] = '\u0056\u0049';
+ t['\u2166'] = '\u0056\u0049\u0049';
+ t['\u2167'] = '\u0056\u0049\u0049\u0049';
+ t['\u2168'] = '\u0049\u0058';
+ t['\u2169'] = '\u0058';
+ t['\u216A'] = '\u0058\u0049';
+ t['\u216B'] = '\u0058\u0049\u0049';
+ t['\u216C'] = '\u004C';
+ t['\u216D'] = '\u0043';
+ t['\u216E'] = '\u0044';
+ t['\u216F'] = '\u004D';
+ t['\u2170'] = '\u0069';
+ t['\u2171'] = '\u0069\u0069';
+ t['\u2172'] = '\u0069\u0069\u0069';
+ t['\u2173'] = '\u0069\u0076';
+ t['\u2174'] = '\u0076';
+ t['\u2175'] = '\u0076\u0069';
+ t['\u2176'] = '\u0076\u0069\u0069';
+ t['\u2177'] = '\u0076\u0069\u0069\u0069';
+ t['\u2178'] = '\u0069\u0078';
+ t['\u2179'] = '\u0078';
+ t['\u217A'] = '\u0078\u0069';
+ t['\u217B'] = '\u0078\u0069\u0069';
+ t['\u217C'] = '\u006C';
+ t['\u217D'] = '\u0063';
+ t['\u217E'] = '\u0064';
+ t['\u217F'] = '\u006D';
+ t['\u222C'] = '\u222B\u222B';
+ t['\u222D'] = '\u222B\u222B\u222B';
+ t['\u222F'] = '\u222E\u222E';
+ t['\u2230'] = '\u222E\u222E\u222E';
+ t['\u2474'] = '\u0028\u0031\u0029';
+ t['\u2475'] = '\u0028\u0032\u0029';
+ t['\u2476'] = '\u0028\u0033\u0029';
+ t['\u2477'] = '\u0028\u0034\u0029';
+ t['\u2478'] = '\u0028\u0035\u0029';
+ t['\u2479'] = '\u0028\u0036\u0029';
+ t['\u247A'] = '\u0028\u0037\u0029';
+ t['\u247B'] = '\u0028\u0038\u0029';
+ t['\u247C'] = '\u0028\u0039\u0029';
+ t['\u247D'] = '\u0028\u0031\u0030\u0029';
+ t['\u247E'] = '\u0028\u0031\u0031\u0029';
+ t['\u247F'] = '\u0028\u0031\u0032\u0029';
+ t['\u2480'] = '\u0028\u0031\u0033\u0029';
+ t['\u2481'] = '\u0028\u0031\u0034\u0029';
+ t['\u2482'] = '\u0028\u0031\u0035\u0029';
+ t['\u2483'] = '\u0028\u0031\u0036\u0029';
+ t['\u2484'] = '\u0028\u0031\u0037\u0029';
+ t['\u2485'] = '\u0028\u0031\u0038\u0029';
+ t['\u2486'] = '\u0028\u0031\u0039\u0029';
+ t['\u2487'] = '\u0028\u0032\u0030\u0029';
+ t['\u2488'] = '\u0031\u002E';
+ t['\u2489'] = '\u0032\u002E';
+ t['\u248A'] = '\u0033\u002E';
+ t['\u248B'] = '\u0034\u002E';
+ t['\u248C'] = '\u0035\u002E';
+ t['\u248D'] = '\u0036\u002E';
+ t['\u248E'] = '\u0037\u002E';
+ t['\u248F'] = '\u0038\u002E';
+ t['\u2490'] = '\u0039\u002E';
+ t['\u2491'] = '\u0031\u0030\u002E';
+ t['\u2492'] = '\u0031\u0031\u002E';
+ t['\u2493'] = '\u0031\u0032\u002E';
+ t['\u2494'] = '\u0031\u0033\u002E';
+ t['\u2495'] = '\u0031\u0034\u002E';
+ t['\u2496'] = '\u0031\u0035\u002E';
+ t['\u2497'] = '\u0031\u0036\u002E';
+ t['\u2498'] = '\u0031\u0037\u002E';
+ t['\u2499'] = '\u0031\u0038\u002E';
+ t['\u249A'] = '\u0031\u0039\u002E';
+ t['\u249B'] = '\u0032\u0030\u002E';
+ t['\u249C'] = '\u0028\u0061\u0029';
+ t['\u249D'] = '\u0028\u0062\u0029';
+ t['\u249E'] = '\u0028\u0063\u0029';
+ t['\u249F'] = '\u0028\u0064\u0029';
+ t['\u24A0'] = '\u0028\u0065\u0029';
+ t['\u24A1'] = '\u0028\u0066\u0029';
+ t['\u24A2'] = '\u0028\u0067\u0029';
+ t['\u24A3'] = '\u0028\u0068\u0029';
+ t['\u24A4'] = '\u0028\u0069\u0029';
+ t['\u24A5'] = '\u0028\u006A\u0029';
+ t['\u24A6'] = '\u0028\u006B\u0029';
+ t['\u24A7'] = '\u0028\u006C\u0029';
+ t['\u24A8'] = '\u0028\u006D\u0029';
+ t['\u24A9'] = '\u0028\u006E\u0029';
+ t['\u24AA'] = '\u0028\u006F\u0029';
+ t['\u24AB'] = '\u0028\u0070\u0029';
+ t['\u24AC'] = '\u0028\u0071\u0029';
+ t['\u24AD'] = '\u0028\u0072\u0029';
+ t['\u24AE'] = '\u0028\u0073\u0029';
+ t['\u24AF'] = '\u0028\u0074\u0029';
+ t['\u24B0'] = '\u0028\u0075\u0029';
+ t['\u24B1'] = '\u0028\u0076\u0029';
+ t['\u24B2'] = '\u0028\u0077\u0029';
+ t['\u24B3'] = '\u0028\u0078\u0029';
+ t['\u24B4'] = '\u0028\u0079\u0029';
+ t['\u24B5'] = '\u0028\u007A\u0029';
+ t['\u2A0C'] = '\u222B\u222B\u222B\u222B';
+ t['\u2A74'] = '\u003A\u003A\u003D';
+ t['\u2A75'] = '\u003D\u003D';
+ t['\u2A76'] = '\u003D\u003D\u003D';
+ t['\u2E9F'] = '\u6BCD';
+ t['\u2EF3'] = '\u9F9F';
+ t['\u2F00'] = '\u4E00';
+ t['\u2F01'] = '\u4E28';
+ t['\u2F02'] = '\u4E36';
+ t['\u2F03'] = '\u4E3F';
+ t['\u2F04'] = '\u4E59';
+ t['\u2F05'] = '\u4E85';
+ t['\u2F06'] = '\u4E8C';
+ t['\u2F07'] = '\u4EA0';
+ t['\u2F08'] = '\u4EBA';
+ t['\u2F09'] = '\u513F';
+ t['\u2F0A'] = '\u5165';
+ t['\u2F0B'] = '\u516B';
+ t['\u2F0C'] = '\u5182';
+ t['\u2F0D'] = '\u5196';
+ t['\u2F0E'] = '\u51AB';
+ t['\u2F0F'] = '\u51E0';
+ t['\u2F10'] = '\u51F5';
+ t['\u2F11'] = '\u5200';
+ t['\u2F12'] = '\u529B';
+ t['\u2F13'] = '\u52F9';
+ t['\u2F14'] = '\u5315';
+ t['\u2F15'] = '\u531A';
+ t['\u2F16'] = '\u5338';
+ t['\u2F17'] = '\u5341';
+ t['\u2F18'] = '\u535C';
+ t['\u2F19'] = '\u5369';
+ t['\u2F1A'] = '\u5382';
+ t['\u2F1B'] = '\u53B6';
+ t['\u2F1C'] = '\u53C8';
+ t['\u2F1D'] = '\u53E3';
+ t['\u2F1E'] = '\u56D7';
+ t['\u2F1F'] = '\u571F';
+ t['\u2F20'] = '\u58EB';
+ t['\u2F21'] = '\u5902';
+ t['\u2F22'] = '\u590A';
+ t['\u2F23'] = '\u5915';
+ t['\u2F24'] = '\u5927';
+ t['\u2F25'] = '\u5973';
+ t['\u2F26'] = '\u5B50';
+ t['\u2F27'] = '\u5B80';
+ t['\u2F28'] = '\u5BF8';
+ t['\u2F29'] = '\u5C0F';
+ t['\u2F2A'] = '\u5C22';
+ t['\u2F2B'] = '\u5C38';
+ t['\u2F2C'] = '\u5C6E';
+ t['\u2F2D'] = '\u5C71';
+ t['\u2F2E'] = '\u5DDB';
+ t['\u2F2F'] = '\u5DE5';
+ t['\u2F30'] = '\u5DF1';
+ t['\u2F31'] = '\u5DFE';
+ t['\u2F32'] = '\u5E72';
+ t['\u2F33'] = '\u5E7A';
+ t['\u2F34'] = '\u5E7F';
+ t['\u2F35'] = '\u5EF4';
+ t['\u2F36'] = '\u5EFE';
+ t['\u2F37'] = '\u5F0B';
+ t['\u2F38'] = '\u5F13';
+ t['\u2F39'] = '\u5F50';
+ t['\u2F3A'] = '\u5F61';
+ t['\u2F3B'] = '\u5F73';
+ t['\u2F3C'] = '\u5FC3';
+ t['\u2F3D'] = '\u6208';
+ t['\u2F3E'] = '\u6236';
+ t['\u2F3F'] = '\u624B';
+ t['\u2F40'] = '\u652F';
+ t['\u2F41'] = '\u6534';
+ t['\u2F42'] = '\u6587';
+ t['\u2F43'] = '\u6597';
+ t['\u2F44'] = '\u65A4';
+ t['\u2F45'] = '\u65B9';
+ t['\u2F46'] = '\u65E0';
+ t['\u2F47'] = '\u65E5';
+ t['\u2F48'] = '\u66F0';
+ t['\u2F49'] = '\u6708';
+ t['\u2F4A'] = '\u6728';
+ t['\u2F4B'] = '\u6B20';
+ t['\u2F4C'] = '\u6B62';
+ t['\u2F4D'] = '\u6B79';
+ t['\u2F4E'] = '\u6BB3';
+ t['\u2F4F'] = '\u6BCB';
+ t['\u2F50'] = '\u6BD4';
+ t['\u2F51'] = '\u6BDB';
+ t['\u2F52'] = '\u6C0F';
+ t['\u2F53'] = '\u6C14';
+ t['\u2F54'] = '\u6C34';
+ t['\u2F55'] = '\u706B';
+ t['\u2F56'] = '\u722A';
+ t['\u2F57'] = '\u7236';
+ t['\u2F58'] = '\u723B';
+ t['\u2F59'] = '\u723F';
+ t['\u2F5A'] = '\u7247';
+ t['\u2F5B'] = '\u7259';
+ t['\u2F5C'] = '\u725B';
+ t['\u2F5D'] = '\u72AC';
+ t['\u2F5E'] = '\u7384';
+ t['\u2F5F'] = '\u7389';
+ t['\u2F60'] = '\u74DC';
+ t['\u2F61'] = '\u74E6';
+ t['\u2F62'] = '\u7518';
+ t['\u2F63'] = '\u751F';
+ t['\u2F64'] = '\u7528';
+ t['\u2F65'] = '\u7530';
+ t['\u2F66'] = '\u758B';
+ t['\u2F67'] = '\u7592';
+ t['\u2F68'] = '\u7676';
+ t['\u2F69'] = '\u767D';
+ t['\u2F6A'] = '\u76AE';
+ t['\u2F6B'] = '\u76BF';
+ t['\u2F6C'] = '\u76EE';
+ t['\u2F6D'] = '\u77DB';
+ t['\u2F6E'] = '\u77E2';
+ t['\u2F6F'] = '\u77F3';
+ t['\u2F70'] = '\u793A';
+ t['\u2F71'] = '\u79B8';
+ t['\u2F72'] = '\u79BE';
+ t['\u2F73'] = '\u7A74';
+ t['\u2F74'] = '\u7ACB';
+ t['\u2F75'] = '\u7AF9';
+ t['\u2F76'] = '\u7C73';
+ t['\u2F77'] = '\u7CF8';
+ t['\u2F78'] = '\u7F36';
+ t['\u2F79'] = '\u7F51';
+ t['\u2F7A'] = '\u7F8A';
+ t['\u2F7B'] = '\u7FBD';
+ t['\u2F7C'] = '\u8001';
+ t['\u2F7D'] = '\u800C';
+ t['\u2F7E'] = '\u8012';
+ t['\u2F7F'] = '\u8033';
+ t['\u2F80'] = '\u807F';
+ t['\u2F81'] = '\u8089';
+ t['\u2F82'] = '\u81E3';
+ t['\u2F83'] = '\u81EA';
+ t['\u2F84'] = '\u81F3';
+ t['\u2F85'] = '\u81FC';
+ t['\u2F86'] = '\u820C';
+ t['\u2F87'] = '\u821B';
+ t['\u2F88'] = '\u821F';
+ t['\u2F89'] = '\u826E';
+ t['\u2F8A'] = '\u8272';
+ t['\u2F8B'] = '\u8278';
+ t['\u2F8C'] = '\u864D';
+ t['\u2F8D'] = '\u866B';
+ t['\u2F8E'] = '\u8840';
+ t['\u2F8F'] = '\u884C';
+ t['\u2F90'] = '\u8863';
+ t['\u2F91'] = '\u897E';
+ t['\u2F92'] = '\u898B';
+ t['\u2F93'] = '\u89D2';
+ t['\u2F94'] = '\u8A00';
+ t['\u2F95'] = '\u8C37';
+ t['\u2F96'] = '\u8C46';
+ t['\u2F97'] = '\u8C55';
+ t['\u2F98'] = '\u8C78';
+ t['\u2F99'] = '\u8C9D';
+ t['\u2F9A'] = '\u8D64';
+ t['\u2F9B'] = '\u8D70';
+ t['\u2F9C'] = '\u8DB3';
+ t['\u2F9D'] = '\u8EAB';
+ t['\u2F9E'] = '\u8ECA';
+ t['\u2F9F'] = '\u8F9B';
+ t['\u2FA0'] = '\u8FB0';
+ t['\u2FA1'] = '\u8FB5';
+ t['\u2FA2'] = '\u9091';
+ t['\u2FA3'] = '\u9149';
+ t['\u2FA4'] = '\u91C6';
+ t['\u2FA5'] = '\u91CC';
+ t['\u2FA6'] = '\u91D1';
+ t['\u2FA7'] = '\u9577';
+ t['\u2FA8'] = '\u9580';
+ t['\u2FA9'] = '\u961C';
+ t['\u2FAA'] = '\u96B6';
+ t['\u2FAB'] = '\u96B9';
+ t['\u2FAC'] = '\u96E8';
+ t['\u2FAD'] = '\u9751';
+ t['\u2FAE'] = '\u975E';
+ t['\u2FAF'] = '\u9762';
+ t['\u2FB0'] = '\u9769';
+ t['\u2FB1'] = '\u97CB';
+ t['\u2FB2'] = '\u97ED';
+ t['\u2FB3'] = '\u97F3';
+ t['\u2FB4'] = '\u9801';
+ t['\u2FB5'] = '\u98A8';
+ t['\u2FB6'] = '\u98DB';
+ t['\u2FB7'] = '\u98DF';
+ t['\u2FB8'] = '\u9996';
+ t['\u2FB9'] = '\u9999';
+ t['\u2FBA'] = '\u99AC';
+ t['\u2FBB'] = '\u9AA8';
+ t['\u2FBC'] = '\u9AD8';
+ t['\u2FBD'] = '\u9ADF';
+ t['\u2FBE'] = '\u9B25';
+ t['\u2FBF'] = '\u9B2F';
+ t['\u2FC0'] = '\u9B32';
+ t['\u2FC1'] = '\u9B3C';
+ t['\u2FC2'] = '\u9B5A';
+ t['\u2FC3'] = '\u9CE5';
+ t['\u2FC4'] = '\u9E75';
+ t['\u2FC5'] = '\u9E7F';
+ t['\u2FC6'] = '\u9EA5';
+ t['\u2FC7'] = '\u9EBB';
+ t['\u2FC8'] = '\u9EC3';
+ t['\u2FC9'] = '\u9ECD';
+ t['\u2FCA'] = '\u9ED1';
+ t['\u2FCB'] = '\u9EF9';
+ t['\u2FCC'] = '\u9EFD';
+ t['\u2FCD'] = '\u9F0E';
+ t['\u2FCE'] = '\u9F13';
+ t['\u2FCF'] = '\u9F20';
+ t['\u2FD0'] = '\u9F3B';
+ t['\u2FD1'] = '\u9F4A';
+ t['\u2FD2'] = '\u9F52';
+ t['\u2FD3'] = '\u9F8D';
+ t['\u2FD4'] = '\u9F9C';
+ t['\u2FD5'] = '\u9FA0';
+ t['\u3036'] = '\u3012';
+ t['\u3038'] = '\u5341';
+ t['\u3039'] = '\u5344';
+ t['\u303A'] = '\u5345';
+ t['\u309B'] = '\u0020\u3099';
+ t['\u309C'] = '\u0020\u309A';
+ t['\u3131'] = '\u1100';
+ t['\u3132'] = '\u1101';
+ t['\u3133'] = '\u11AA';
+ t['\u3134'] = '\u1102';
+ t['\u3135'] = '\u11AC';
+ t['\u3136'] = '\u11AD';
+ t['\u3137'] = '\u1103';
+ t['\u3138'] = '\u1104';
+ t['\u3139'] = '\u1105';
+ t['\u313A'] = '\u11B0';
+ t['\u313B'] = '\u11B1';
+ t['\u313C'] = '\u11B2';
+ t['\u313D'] = '\u11B3';
+ t['\u313E'] = '\u11B4';
+ t['\u313F'] = '\u11B5';
+ t['\u3140'] = '\u111A';
+ t['\u3141'] = '\u1106';
+ t['\u3142'] = '\u1107';
+ t['\u3143'] = '\u1108';
+ t['\u3144'] = '\u1121';
+ t['\u3145'] = '\u1109';
+ t['\u3146'] = '\u110A';
+ t['\u3147'] = '\u110B';
+ t['\u3148'] = '\u110C';
+ t['\u3149'] = '\u110D';
+ t['\u314A'] = '\u110E';
+ t['\u314B'] = '\u110F';
+ t['\u314C'] = '\u1110';
+ t['\u314D'] = '\u1111';
+ t['\u314E'] = '\u1112';
+ t['\u314F'] = '\u1161';
+ t['\u3150'] = '\u1162';
+ t['\u3151'] = '\u1163';
+ t['\u3152'] = '\u1164';
+ t['\u3153'] = '\u1165';
+ t['\u3154'] = '\u1166';
+ t['\u3155'] = '\u1167';
+ t['\u3156'] = '\u1168';
+ t['\u3157'] = '\u1169';
+ t['\u3158'] = '\u116A';
+ t['\u3159'] = '\u116B';
+ t['\u315A'] = '\u116C';
+ t['\u315B'] = '\u116D';
+ t['\u315C'] = '\u116E';
+ t['\u315D'] = '\u116F';
+ t['\u315E'] = '\u1170';
+ t['\u315F'] = '\u1171';
+ t['\u3160'] = '\u1172';
+ t['\u3161'] = '\u1173';
+ t['\u3162'] = '\u1174';
+ t['\u3163'] = '\u1175';
+ t['\u3164'] = '\u1160';
+ t['\u3165'] = '\u1114';
+ t['\u3166'] = '\u1115';
+ t['\u3167'] = '\u11C7';
+ t['\u3168'] = '\u11C8';
+ t['\u3169'] = '\u11CC';
+ t['\u316A'] = '\u11CE';
+ t['\u316B'] = '\u11D3';
+ t['\u316C'] = '\u11D7';
+ t['\u316D'] = '\u11D9';
+ t['\u316E'] = '\u111C';
+ t['\u316F'] = '\u11DD';
+ t['\u3170'] = '\u11DF';
+ t['\u3171'] = '\u111D';
+ t['\u3172'] = '\u111E';
+ t['\u3173'] = '\u1120';
+ t['\u3174'] = '\u1122';
+ t['\u3175'] = '\u1123';
+ t['\u3176'] = '\u1127';
+ t['\u3177'] = '\u1129';
+ t['\u3178'] = '\u112B';
+ t['\u3179'] = '\u112C';
+ t['\u317A'] = '\u112D';
+ t['\u317B'] = '\u112E';
+ t['\u317C'] = '\u112F';
+ t['\u317D'] = '\u1132';
+ t['\u317E'] = '\u1136';
+ t['\u317F'] = '\u1140';
+ t['\u3180'] = '\u1147';
+ t['\u3181'] = '\u114C';
+ t['\u3182'] = '\u11F1';
+ t['\u3183'] = '\u11F2';
+ t['\u3184'] = '\u1157';
+ t['\u3185'] = '\u1158';
+ t['\u3186'] = '\u1159';
+ t['\u3187'] = '\u1184';
+ t['\u3188'] = '\u1185';
+ t['\u3189'] = '\u1188';
+ t['\u318A'] = '\u1191';
+ t['\u318B'] = '\u1192';
+ t['\u318C'] = '\u1194';
+ t['\u318D'] = '\u119E';
+ t['\u318E'] = '\u11A1';
+ t['\u3200'] = '\u0028\u1100\u0029';
+ t['\u3201'] = '\u0028\u1102\u0029';
+ t['\u3202'] = '\u0028\u1103\u0029';
+ t['\u3203'] = '\u0028\u1105\u0029';
+ t['\u3204'] = '\u0028\u1106\u0029';
+ t['\u3205'] = '\u0028\u1107\u0029';
+ t['\u3206'] = '\u0028\u1109\u0029';
+ t['\u3207'] = '\u0028\u110B\u0029';
+ t['\u3208'] = '\u0028\u110C\u0029';
+ t['\u3209'] = '\u0028\u110E\u0029';
+ t['\u320A'] = '\u0028\u110F\u0029';
+ t['\u320B'] = '\u0028\u1110\u0029';
+ t['\u320C'] = '\u0028\u1111\u0029';
+ t['\u320D'] = '\u0028\u1112\u0029';
+ t['\u320E'] = '\u0028\u1100\u1161\u0029';
+ t['\u320F'] = '\u0028\u1102\u1161\u0029';
+ t['\u3210'] = '\u0028\u1103\u1161\u0029';
+ t['\u3211'] = '\u0028\u1105\u1161\u0029';
+ t['\u3212'] = '\u0028\u1106\u1161\u0029';
+ t['\u3213'] = '\u0028\u1107\u1161\u0029';
+ t['\u3214'] = '\u0028\u1109\u1161\u0029';
+ t['\u3215'] = '\u0028\u110B\u1161\u0029';
+ t['\u3216'] = '\u0028\u110C\u1161\u0029';
+ t['\u3217'] = '\u0028\u110E\u1161\u0029';
+ t['\u3218'] = '\u0028\u110F\u1161\u0029';
+ t['\u3219'] = '\u0028\u1110\u1161\u0029';
+ t['\u321A'] = '\u0028\u1111\u1161\u0029';
+ t['\u321B'] = '\u0028\u1112\u1161\u0029';
+ t['\u321C'] = '\u0028\u110C\u116E\u0029';
+ t['\u321D'] = '\u0028\u110B\u1169\u110C\u1165\u11AB\u0029';
+ t['\u321E'] = '\u0028\u110B\u1169\u1112\u116E\u0029';
+ t['\u3220'] = '\u0028\u4E00\u0029';
+ t['\u3221'] = '\u0028\u4E8C\u0029';
+ t['\u3222'] = '\u0028\u4E09\u0029';
+ t['\u3223'] = '\u0028\u56DB\u0029';
+ t['\u3224'] = '\u0028\u4E94\u0029';
+ t['\u3225'] = '\u0028\u516D\u0029';
+ t['\u3226'] = '\u0028\u4E03\u0029';
+ t['\u3227'] = '\u0028\u516B\u0029';
+ t['\u3228'] = '\u0028\u4E5D\u0029';
+ t['\u3229'] = '\u0028\u5341\u0029';
+ t['\u322A'] = '\u0028\u6708\u0029';
+ t['\u322B'] = '\u0028\u706B\u0029';
+ t['\u322C'] = '\u0028\u6C34\u0029';
+ t['\u322D'] = '\u0028\u6728\u0029';
+ t['\u322E'] = '\u0028\u91D1\u0029';
+ t['\u322F'] = '\u0028\u571F\u0029';
+ t['\u3230'] = '\u0028\u65E5\u0029';
+ t['\u3231'] = '\u0028\u682A\u0029';
+ t['\u3232'] = '\u0028\u6709\u0029';
+ t['\u3233'] = '\u0028\u793E\u0029';
+ t['\u3234'] = '\u0028\u540D\u0029';
+ t['\u3235'] = '\u0028\u7279\u0029';
+ t['\u3236'] = '\u0028\u8CA1\u0029';
+ t['\u3237'] = '\u0028\u795D\u0029';
+ t['\u3238'] = '\u0028\u52B4\u0029';
+ t['\u3239'] = '\u0028\u4EE3\u0029';
+ t['\u323A'] = '\u0028\u547C\u0029';
+ t['\u323B'] = '\u0028\u5B66\u0029';
+ t['\u323C'] = '\u0028\u76E3\u0029';
+ t['\u323D'] = '\u0028\u4F01\u0029';
+ t['\u323E'] = '\u0028\u8CC7\u0029';
+ t['\u323F'] = '\u0028\u5354\u0029';
+ t['\u3240'] = '\u0028\u796D\u0029';
+ t['\u3241'] = '\u0028\u4F11\u0029';
+ t['\u3242'] = '\u0028\u81EA\u0029';
+ t['\u3243'] = '\u0028\u81F3\u0029';
+ t['\u32C0'] = '\u0031\u6708';
+ t['\u32C1'] = '\u0032\u6708';
+ t['\u32C2'] = '\u0033\u6708';
+ t['\u32C3'] = '\u0034\u6708';
+ t['\u32C4'] = '\u0035\u6708';
+ t['\u32C5'] = '\u0036\u6708';
+ t['\u32C6'] = '\u0037\u6708';
+ t['\u32C7'] = '\u0038\u6708';
+ t['\u32C8'] = '\u0039\u6708';
+ t['\u32C9'] = '\u0031\u0030\u6708';
+ t['\u32CA'] = '\u0031\u0031\u6708';
+ t['\u32CB'] = '\u0031\u0032\u6708';
+ t['\u3358'] = '\u0030\u70B9';
+ t['\u3359'] = '\u0031\u70B9';
+ t['\u335A'] = '\u0032\u70B9';
+ t['\u335B'] = '\u0033\u70B9';
+ t['\u335C'] = '\u0034\u70B9';
+ t['\u335D'] = '\u0035\u70B9';
+ t['\u335E'] = '\u0036\u70B9';
+ t['\u335F'] = '\u0037\u70B9';
+ t['\u3360'] = '\u0038\u70B9';
+ t['\u3361'] = '\u0039\u70B9';
+ t['\u3362'] = '\u0031\u0030\u70B9';
+ t['\u3363'] = '\u0031\u0031\u70B9';
+ t['\u3364'] = '\u0031\u0032\u70B9';
+ t['\u3365'] = '\u0031\u0033\u70B9';
+ t['\u3366'] = '\u0031\u0034\u70B9';
+ t['\u3367'] = '\u0031\u0035\u70B9';
+ t['\u3368'] = '\u0031\u0036\u70B9';
+ t['\u3369'] = '\u0031\u0037\u70B9';
+ t['\u336A'] = '\u0031\u0038\u70B9';
+ t['\u336B'] = '\u0031\u0039\u70B9';
+ t['\u336C'] = '\u0032\u0030\u70B9';
+ t['\u336D'] = '\u0032\u0031\u70B9';
+ t['\u336E'] = '\u0032\u0032\u70B9';
+ t['\u336F'] = '\u0032\u0033\u70B9';
+ t['\u3370'] = '\u0032\u0034\u70B9';
+ t['\u33E0'] = '\u0031\u65E5';
+ t['\u33E1'] = '\u0032\u65E5';
+ t['\u33E2'] = '\u0033\u65E5';
+ t['\u33E3'] = '\u0034\u65E5';
+ t['\u33E4'] = '\u0035\u65E5';
+ t['\u33E5'] = '\u0036\u65E5';
+ t['\u33E6'] = '\u0037\u65E5';
+ t['\u33E7'] = '\u0038\u65E5';
+ t['\u33E8'] = '\u0039\u65E5';
+ t['\u33E9'] = '\u0031\u0030\u65E5';
+ t['\u33EA'] = '\u0031\u0031\u65E5';
+ t['\u33EB'] = '\u0031\u0032\u65E5';
+ t['\u33EC'] = '\u0031\u0033\u65E5';
+ t['\u33ED'] = '\u0031\u0034\u65E5';
+ t['\u33EE'] = '\u0031\u0035\u65E5';
+ t['\u33EF'] = '\u0031\u0036\u65E5';
+ t['\u33F0'] = '\u0031\u0037\u65E5';
+ t['\u33F1'] = '\u0031\u0038\u65E5';
+ t['\u33F2'] = '\u0031\u0039\u65E5';
+ t['\u33F3'] = '\u0032\u0030\u65E5';
+ t['\u33F4'] = '\u0032\u0031\u65E5';
+ t['\u33F5'] = '\u0032\u0032\u65E5';
+ t['\u33F6'] = '\u0032\u0033\u65E5';
+ t['\u33F7'] = '\u0032\u0034\u65E5';
+ t['\u33F8'] = '\u0032\u0035\u65E5';
+ t['\u33F9'] = '\u0032\u0036\u65E5';
+ t['\u33FA'] = '\u0032\u0037\u65E5';
+ t['\u33FB'] = '\u0032\u0038\u65E5';
+ t['\u33FC'] = '\u0032\u0039\u65E5';
+ t['\u33FD'] = '\u0033\u0030\u65E5';
+ t['\u33FE'] = '\u0033\u0031\u65E5';
+ t['\uFB00'] = '\u0066\u0066';
+ t['\uFB01'] = '\u0066\u0069';
+ t['\uFB02'] = '\u0066\u006C';
+ t['\uFB03'] = '\u0066\u0066\u0069';
+ t['\uFB04'] = '\u0066\u0066\u006C';
+ t['\uFB05'] = '\u017F\u0074';
+ t['\uFB06'] = '\u0073\u0074';
+ t['\uFB13'] = '\u0574\u0576';
+ t['\uFB14'] = '\u0574\u0565';
+ t['\uFB15'] = '\u0574\u056B';
+ t['\uFB16'] = '\u057E\u0576';
+ t['\uFB17'] = '\u0574\u056D';
+ t['\uFB4F'] = '\u05D0\u05DC';
+ t['\uFB50'] = '\u0671';
+ t['\uFB51'] = '\u0671';
+ t['\uFB52'] = '\u067B';
+ t['\uFB53'] = '\u067B';
+ t['\uFB54'] = '\u067B';
+ t['\uFB55'] = '\u067B';
+ t['\uFB56'] = '\u067E';
+ t['\uFB57'] = '\u067E';
+ t['\uFB58'] = '\u067E';
+ t['\uFB59'] = '\u067E';
+ t['\uFB5A'] = '\u0680';
+ t['\uFB5B'] = '\u0680';
+ t['\uFB5C'] = '\u0680';
+ t['\uFB5D'] = '\u0680';
+ t['\uFB5E'] = '\u067A';
+ t['\uFB5F'] = '\u067A';
+ t['\uFB60'] = '\u067A';
+ t['\uFB61'] = '\u067A';
+ t['\uFB62'] = '\u067F';
+ t['\uFB63'] = '\u067F';
+ t['\uFB64'] = '\u067F';
+ t['\uFB65'] = '\u067F';
+ t['\uFB66'] = '\u0679';
+ t['\uFB67'] = '\u0679';
+ t['\uFB68'] = '\u0679';
+ t['\uFB69'] = '\u0679';
+ t['\uFB6A'] = '\u06A4';
+ t['\uFB6B'] = '\u06A4';
+ t['\uFB6C'] = '\u06A4';
+ t['\uFB6D'] = '\u06A4';
+ t['\uFB6E'] = '\u06A6';
+ t['\uFB6F'] = '\u06A6';
+ t['\uFB70'] = '\u06A6';
+ t['\uFB71'] = '\u06A6';
+ t['\uFB72'] = '\u0684';
+ t['\uFB73'] = '\u0684';
+ t['\uFB74'] = '\u0684';
+ t['\uFB75'] = '\u0684';
+ t['\uFB76'] = '\u0683';
+ t['\uFB77'] = '\u0683';
+ t['\uFB78'] = '\u0683';
+ t['\uFB79'] = '\u0683';
+ t['\uFB7A'] = '\u0686';
+ t['\uFB7B'] = '\u0686';
+ t['\uFB7C'] = '\u0686';
+ t['\uFB7D'] = '\u0686';
+ t['\uFB7E'] = '\u0687';
+ t['\uFB7F'] = '\u0687';
+ t['\uFB80'] = '\u0687';
+ t['\uFB81'] = '\u0687';
+ t['\uFB82'] = '\u068D';
+ t['\uFB83'] = '\u068D';
+ t['\uFB84'] = '\u068C';
+ t['\uFB85'] = '\u068C';
+ t['\uFB86'] = '\u068E';
+ t['\uFB87'] = '\u068E';
+ t['\uFB88'] = '\u0688';
+ t['\uFB89'] = '\u0688';
+ t['\uFB8A'] = '\u0698';
+ t['\uFB8B'] = '\u0698';
+ t['\uFB8C'] = '\u0691';
+ t['\uFB8D'] = '\u0691';
+ t['\uFB8E'] = '\u06A9';
+ t['\uFB8F'] = '\u06A9';
+ t['\uFB90'] = '\u06A9';
+ t['\uFB91'] = '\u06A9';
+ t['\uFB92'] = '\u06AF';
+ t['\uFB93'] = '\u06AF';
+ t['\uFB94'] = '\u06AF';
+ t['\uFB95'] = '\u06AF';
+ t['\uFB96'] = '\u06B3';
+ t['\uFB97'] = '\u06B3';
+ t['\uFB98'] = '\u06B3';
+ t['\uFB99'] = '\u06B3';
+ t['\uFB9A'] = '\u06B1';
+ t['\uFB9B'] = '\u06B1';
+ t['\uFB9C'] = '\u06B1';
+ t['\uFB9D'] = '\u06B1';
+ t['\uFB9E'] = '\u06BA';
+ t['\uFB9F'] = '\u06BA';
+ t['\uFBA0'] = '\u06BB';
+ t['\uFBA1'] = '\u06BB';
+ t['\uFBA2'] = '\u06BB';
+ t['\uFBA3'] = '\u06BB';
+ t['\uFBA4'] = '\u06C0';
+ t['\uFBA5'] = '\u06C0';
+ t['\uFBA6'] = '\u06C1';
+ t['\uFBA7'] = '\u06C1';
+ t['\uFBA8'] = '\u06C1';
+ t['\uFBA9'] = '\u06C1';
+ t['\uFBAA'] = '\u06BE';
+ t['\uFBAB'] = '\u06BE';
+ t['\uFBAC'] = '\u06BE';
+ t['\uFBAD'] = '\u06BE';
+ t['\uFBAE'] = '\u06D2';
+ t['\uFBAF'] = '\u06D2';
+ t['\uFBB0'] = '\u06D3';
+ t['\uFBB1'] = '\u06D3';
+ t['\uFBD3'] = '\u06AD';
+ t['\uFBD4'] = '\u06AD';
+ t['\uFBD5'] = '\u06AD';
+ t['\uFBD6'] = '\u06AD';
+ t['\uFBD7'] = '\u06C7';
+ t['\uFBD8'] = '\u06C7';
+ t['\uFBD9'] = '\u06C6';
+ t['\uFBDA'] = '\u06C6';
+ t['\uFBDB'] = '\u06C8';
+ t['\uFBDC'] = '\u06C8';
+ t['\uFBDD'] = '\u0677';
+ t['\uFBDE'] = '\u06CB';
+ t['\uFBDF'] = '\u06CB';
+ t['\uFBE0'] = '\u06C5';
+ t['\uFBE1'] = '\u06C5';
+ t['\uFBE2'] = '\u06C9';
+ t['\uFBE3'] = '\u06C9';
+ t['\uFBE4'] = '\u06D0';
+ t['\uFBE5'] = '\u06D0';
+ t['\uFBE6'] = '\u06D0';
+ t['\uFBE7'] = '\u06D0';
+ t['\uFBE8'] = '\u0649';
+ t['\uFBE9'] = '\u0649';
+ t['\uFBEA'] = '\u0626\u0627';
+ t['\uFBEB'] = '\u0626\u0627';
+ t['\uFBEC'] = '\u0626\u06D5';
+ t['\uFBED'] = '\u0626\u06D5';
+ t['\uFBEE'] = '\u0626\u0648';
+ t['\uFBEF'] = '\u0626\u0648';
+ t['\uFBF0'] = '\u0626\u06C7';
+ t['\uFBF1'] = '\u0626\u06C7';
+ t['\uFBF2'] = '\u0626\u06C6';
+ t['\uFBF3'] = '\u0626\u06C6';
+ t['\uFBF4'] = '\u0626\u06C8';
+ t['\uFBF5'] = '\u0626\u06C8';
+ t['\uFBF6'] = '\u0626\u06D0';
+ t['\uFBF7'] = '\u0626\u06D0';
+ t['\uFBF8'] = '\u0626\u06D0';
+ t['\uFBF9'] = '\u0626\u0649';
+ t['\uFBFA'] = '\u0626\u0649';
+ t['\uFBFB'] = '\u0626\u0649';
+ t['\uFBFC'] = '\u06CC';
+ t['\uFBFD'] = '\u06CC';
+ t['\uFBFE'] = '\u06CC';
+ t['\uFBFF'] = '\u06CC';
+ t['\uFC00'] = '\u0626\u062C';
+ t['\uFC01'] = '\u0626\u062D';
+ t['\uFC02'] = '\u0626\u0645';
+ t['\uFC03'] = '\u0626\u0649';
+ t['\uFC04'] = '\u0626\u064A';
+ t['\uFC05'] = '\u0628\u062C';
+ t['\uFC06'] = '\u0628\u062D';
+ t['\uFC07'] = '\u0628\u062E';
+ t['\uFC08'] = '\u0628\u0645';
+ t['\uFC09'] = '\u0628\u0649';
+ t['\uFC0A'] = '\u0628\u064A';
+ t['\uFC0B'] = '\u062A\u062C';
+ t['\uFC0C'] = '\u062A\u062D';
+ t['\uFC0D'] = '\u062A\u062E';
+ t['\uFC0E'] = '\u062A\u0645';
+ t['\uFC0F'] = '\u062A\u0649';
+ t['\uFC10'] = '\u062A\u064A';
+ t['\uFC11'] = '\u062B\u062C';
+ t['\uFC12'] = '\u062B\u0645';
+ t['\uFC13'] = '\u062B\u0649';
+ t['\uFC14'] = '\u062B\u064A';
+ t['\uFC15'] = '\u062C\u062D';
+ t['\uFC16'] = '\u062C\u0645';
+ t['\uFC17'] = '\u062D\u062C';
+ t['\uFC18'] = '\u062D\u0645';
+ t['\uFC19'] = '\u062E\u062C';
+ t['\uFC1A'] = '\u062E\u062D';
+ t['\uFC1B'] = '\u062E\u0645';
+ t['\uFC1C'] = '\u0633\u062C';
+ t['\uFC1D'] = '\u0633\u062D';
+ t['\uFC1E'] = '\u0633\u062E';
+ t['\uFC1F'] = '\u0633\u0645';
+ t['\uFC20'] = '\u0635\u062D';
+ t['\uFC21'] = '\u0635\u0645';
+ t['\uFC22'] = '\u0636\u062C';
+ t['\uFC23'] = '\u0636\u062D';
+ t['\uFC24'] = '\u0636\u062E';
+ t['\uFC25'] = '\u0636\u0645';
+ t['\uFC26'] = '\u0637\u062D';
+ t['\uFC27'] = '\u0637\u0645';
+ t['\uFC28'] = '\u0638\u0645';
+ t['\uFC29'] = '\u0639\u062C';
+ t['\uFC2A'] = '\u0639\u0645';
+ t['\uFC2B'] = '\u063A\u062C';
+ t['\uFC2C'] = '\u063A\u0645';
+ t['\uFC2D'] = '\u0641\u062C';
+ t['\uFC2E'] = '\u0641\u062D';
+ t['\uFC2F'] = '\u0641\u062E';
+ t['\uFC30'] = '\u0641\u0645';
+ t['\uFC31'] = '\u0641\u0649';
+ t['\uFC32'] = '\u0641\u064A';
+ t['\uFC33'] = '\u0642\u062D';
+ t['\uFC34'] = '\u0642\u0645';
+ t['\uFC35'] = '\u0642\u0649';
+ t['\uFC36'] = '\u0642\u064A';
+ t['\uFC37'] = '\u0643\u0627';
+ t['\uFC38'] = '\u0643\u062C';
+ t['\uFC39'] = '\u0643\u062D';
+ t['\uFC3A'] = '\u0643\u062E';
+ t['\uFC3B'] = '\u0643\u0644';
+ t['\uFC3C'] = '\u0643\u0645';
+ t['\uFC3D'] = '\u0643\u0649';
+ t['\uFC3E'] = '\u0643\u064A';
+ t['\uFC3F'] = '\u0644\u062C';
+ t['\uFC40'] = '\u0644\u062D';
+ t['\uFC41'] = '\u0644\u062E';
+ t['\uFC42'] = '\u0644\u0645';
+ t['\uFC43'] = '\u0644\u0649';
+ t['\uFC44'] = '\u0644\u064A';
+ t['\uFC45'] = '\u0645\u062C';
+ t['\uFC46'] = '\u0645\u062D';
+ t['\uFC47'] = '\u0645\u062E';
+ t['\uFC48'] = '\u0645\u0645';
+ t['\uFC49'] = '\u0645\u0649';
+ t['\uFC4A'] = '\u0645\u064A';
+ t['\uFC4B'] = '\u0646\u062C';
+ t['\uFC4C'] = '\u0646\u062D';
+ t['\uFC4D'] = '\u0646\u062E';
+ t['\uFC4E'] = '\u0646\u0645';
+ t['\uFC4F'] = '\u0646\u0649';
+ t['\uFC50'] = '\u0646\u064A';
+ t['\uFC51'] = '\u0647\u062C';
+ t['\uFC52'] = '\u0647\u0645';
+ t['\uFC53'] = '\u0647\u0649';
+ t['\uFC54'] = '\u0647\u064A';
+ t['\uFC55'] = '\u064A\u062C';
+ t['\uFC56'] = '\u064A\u062D';
+ t['\uFC57'] = '\u064A\u062E';
+ t['\uFC58'] = '\u064A\u0645';
+ t['\uFC59'] = '\u064A\u0649';
+ t['\uFC5A'] = '\u064A\u064A';
+ t['\uFC5B'] = '\u0630\u0670';
+ t['\uFC5C'] = '\u0631\u0670';
+ t['\uFC5D'] = '\u0649\u0670';
+ t['\uFC5E'] = '\u0020\u064C\u0651';
+ t['\uFC5F'] = '\u0020\u064D\u0651';
+ t['\uFC60'] = '\u0020\u064E\u0651';
+ t['\uFC61'] = '\u0020\u064F\u0651';
+ t['\uFC62'] = '\u0020\u0650\u0651';
+ t['\uFC63'] = '\u0020\u0651\u0670';
+ t['\uFC64'] = '\u0626\u0631';
+ t['\uFC65'] = '\u0626\u0632';
+ t['\uFC66'] = '\u0626\u0645';
+ t['\uFC67'] = '\u0626\u0646';
+ t['\uFC68'] = '\u0626\u0649';
+ t['\uFC69'] = '\u0626\u064A';
+ t['\uFC6A'] = '\u0628\u0631';
+ t['\uFC6B'] = '\u0628\u0632';
+ t['\uFC6C'] = '\u0628\u0645';
+ t['\uFC6D'] = '\u0628\u0646';
+ t['\uFC6E'] = '\u0628\u0649';
+ t['\uFC6F'] = '\u0628\u064A';
+ t['\uFC70'] = '\u062A\u0631';
+ t['\uFC71'] = '\u062A\u0632';
+ t['\uFC72'] = '\u062A\u0645';
+ t['\uFC73'] = '\u062A\u0646';
+ t['\uFC74'] = '\u062A\u0649';
+ t['\uFC75'] = '\u062A\u064A';
+ t['\uFC76'] = '\u062B\u0631';
+ t['\uFC77'] = '\u062B\u0632';
+ t['\uFC78'] = '\u062B\u0645';
+ t['\uFC79'] = '\u062B\u0646';
+ t['\uFC7A'] = '\u062B\u0649';
+ t['\uFC7B'] = '\u062B\u064A';
+ t['\uFC7C'] = '\u0641\u0649';
+ t['\uFC7D'] = '\u0641\u064A';
+ t['\uFC7E'] = '\u0642\u0649';
+ t['\uFC7F'] = '\u0642\u064A';
+ t['\uFC80'] = '\u0643\u0627';
+ t['\uFC81'] = '\u0643\u0644';
+ t['\uFC82'] = '\u0643\u0645';
+ t['\uFC83'] = '\u0643\u0649';
+ t['\uFC84'] = '\u0643\u064A';
+ t['\uFC85'] = '\u0644\u0645';
+ t['\uFC86'] = '\u0644\u0649';
+ t['\uFC87'] = '\u0644\u064A';
+ t['\uFC88'] = '\u0645\u0627';
+ t['\uFC89'] = '\u0645\u0645';
+ t['\uFC8A'] = '\u0646\u0631';
+ t['\uFC8B'] = '\u0646\u0632';
+ t['\uFC8C'] = '\u0646\u0645';
+ t['\uFC8D'] = '\u0646\u0646';
+ t['\uFC8E'] = '\u0646\u0649';
+ t['\uFC8F'] = '\u0646\u064A';
+ t['\uFC90'] = '\u0649\u0670';
+ t['\uFC91'] = '\u064A\u0631';
+ t['\uFC92'] = '\u064A\u0632';
+ t['\uFC93'] = '\u064A\u0645';
+ t['\uFC94'] = '\u064A\u0646';
+ t['\uFC95'] = '\u064A\u0649';
+ t['\uFC96'] = '\u064A\u064A';
+ t['\uFC97'] = '\u0626\u062C';
+ t['\uFC98'] = '\u0626\u062D';
+ t['\uFC99'] = '\u0626\u062E';
+ t['\uFC9A'] = '\u0626\u0645';
+ t['\uFC9B'] = '\u0626\u0647';
+ t['\uFC9C'] = '\u0628\u062C';
+ t['\uFC9D'] = '\u0628\u062D';
+ t['\uFC9E'] = '\u0628\u062E';
+ t['\uFC9F'] = '\u0628\u0645';
+ t['\uFCA0'] = '\u0628\u0647';
+ t['\uFCA1'] = '\u062A\u062C';
+ t['\uFCA2'] = '\u062A\u062D';
+ t['\uFCA3'] = '\u062A\u062E';
+ t['\uFCA4'] = '\u062A\u0645';
+ t['\uFCA5'] = '\u062A\u0647';
+ t['\uFCA6'] = '\u062B\u0645';
+ t['\uFCA7'] = '\u062C\u062D';
+ t['\uFCA8'] = '\u062C\u0645';
+ t['\uFCA9'] = '\u062D\u062C';
+ t['\uFCAA'] = '\u062D\u0645';
+ t['\uFCAB'] = '\u062E\u062C';
+ t['\uFCAC'] = '\u062E\u0645';
+ t['\uFCAD'] = '\u0633\u062C';
+ t['\uFCAE'] = '\u0633\u062D';
+ t['\uFCAF'] = '\u0633\u062E';
+ t['\uFCB0'] = '\u0633\u0645';
+ t['\uFCB1'] = '\u0635\u062D';
+ t['\uFCB2'] = '\u0635\u062E';
+ t['\uFCB3'] = '\u0635\u0645';
+ t['\uFCB4'] = '\u0636\u062C';
+ t['\uFCB5'] = '\u0636\u062D';
+ t['\uFCB6'] = '\u0636\u062E';
+ t['\uFCB7'] = '\u0636\u0645';
+ t['\uFCB8'] = '\u0637\u062D';
+ t['\uFCB9'] = '\u0638\u0645';
+ t['\uFCBA'] = '\u0639\u062C';
+ t['\uFCBB'] = '\u0639\u0645';
+ t['\uFCBC'] = '\u063A\u062C';
+ t['\uFCBD'] = '\u063A\u0645';
+ t['\uFCBE'] = '\u0641\u062C';
+ t['\uFCBF'] = '\u0641\u062D';
+ t['\uFCC0'] = '\u0641\u062E';
+ t['\uFCC1'] = '\u0641\u0645';
+ t['\uFCC2'] = '\u0642\u062D';
+ t['\uFCC3'] = '\u0642\u0645';
+ t['\uFCC4'] = '\u0643\u062C';
+ t['\uFCC5'] = '\u0643\u062D';
+ t['\uFCC6'] = '\u0643\u062E';
+ t['\uFCC7'] = '\u0643\u0644';
+ t['\uFCC8'] = '\u0643\u0645';
+ t['\uFCC9'] = '\u0644\u062C';
+ t['\uFCCA'] = '\u0644\u062D';
+ t['\uFCCB'] = '\u0644\u062E';
+ t['\uFCCC'] = '\u0644\u0645';
+ t['\uFCCD'] = '\u0644\u0647';
+ t['\uFCCE'] = '\u0645\u062C';
+ t['\uFCCF'] = '\u0645\u062D';
+ t['\uFCD0'] = '\u0645\u062E';
+ t['\uFCD1'] = '\u0645\u0645';
+ t['\uFCD2'] = '\u0646\u062C';
+ t['\uFCD3'] = '\u0646\u062D';
+ t['\uFCD4'] = '\u0646\u062E';
+ t['\uFCD5'] = '\u0646\u0645';
+ t['\uFCD6'] = '\u0646\u0647';
+ t['\uFCD7'] = '\u0647\u062C';
+ t['\uFCD8'] = '\u0647\u0645';
+ t['\uFCD9'] = '\u0647\u0670';
+ t['\uFCDA'] = '\u064A\u062C';
+ t['\uFCDB'] = '\u064A\u062D';
+ t['\uFCDC'] = '\u064A\u062E';
+ t['\uFCDD'] = '\u064A\u0645';
+ t['\uFCDE'] = '\u064A\u0647';
+ t['\uFCDF'] = '\u0626\u0645';
+ t['\uFCE0'] = '\u0626\u0647';
+ t['\uFCE1'] = '\u0628\u0645';
+ t['\uFCE2'] = '\u0628\u0647';
+ t['\uFCE3'] = '\u062A\u0645';
+ t['\uFCE4'] = '\u062A\u0647';
+ t['\uFCE5'] = '\u062B\u0645';
+ t['\uFCE6'] = '\u062B\u0647';
+ t['\uFCE7'] = '\u0633\u0645';
+ t['\uFCE8'] = '\u0633\u0647';
+ t['\uFCE9'] = '\u0634\u0645';
+ t['\uFCEA'] = '\u0634\u0647';
+ t['\uFCEB'] = '\u0643\u0644';
+ t['\uFCEC'] = '\u0643\u0645';
+ t['\uFCED'] = '\u0644\u0645';
+ t['\uFCEE'] = '\u0646\u0645';
+ t['\uFCEF'] = '\u0646\u0647';
+ t['\uFCF0'] = '\u064A\u0645';
+ t['\uFCF1'] = '\u064A\u0647';
+ t['\uFCF2'] = '\u0640\u064E\u0651';
+ t['\uFCF3'] = '\u0640\u064F\u0651';
+ t['\uFCF4'] = '\u0640\u0650\u0651';
+ t['\uFCF5'] = '\u0637\u0649';
+ t['\uFCF6'] = '\u0637\u064A';
+ t['\uFCF7'] = '\u0639\u0649';
+ t['\uFCF8'] = '\u0639\u064A';
+ t['\uFCF9'] = '\u063A\u0649';
+ t['\uFCFA'] = '\u063A\u064A';
+ t['\uFCFB'] = '\u0633\u0649';
+ t['\uFCFC'] = '\u0633\u064A';
+ t['\uFCFD'] = '\u0634\u0649';
+ t['\uFCFE'] = '\u0634\u064A';
+ t['\uFCFF'] = '\u062D\u0649';
+ t['\uFD00'] = '\u062D\u064A';
+ t['\uFD01'] = '\u062C\u0649';
+ t['\uFD02'] = '\u062C\u064A';
+ t['\uFD03'] = '\u062E\u0649';
+ t['\uFD04'] = '\u062E\u064A';
+ t['\uFD05'] = '\u0635\u0649';
+ t['\uFD06'] = '\u0635\u064A';
+ t['\uFD07'] = '\u0636\u0649';
+ t['\uFD08'] = '\u0636\u064A';
+ t['\uFD09'] = '\u0634\u062C';
+ t['\uFD0A'] = '\u0634\u062D';
+ t['\uFD0B'] = '\u0634\u062E';
+ t['\uFD0C'] = '\u0634\u0645';
+ t['\uFD0D'] = '\u0634\u0631';
+ t['\uFD0E'] = '\u0633\u0631';
+ t['\uFD0F'] = '\u0635\u0631';
+ t['\uFD10'] = '\u0636\u0631';
+ t['\uFD11'] = '\u0637\u0649';
+ t['\uFD12'] = '\u0637\u064A';
+ t['\uFD13'] = '\u0639\u0649';
+ t['\uFD14'] = '\u0639\u064A';
+ t['\uFD15'] = '\u063A\u0649';
+ t['\uFD16'] = '\u063A\u064A';
+ t['\uFD17'] = '\u0633\u0649';
+ t['\uFD18'] = '\u0633\u064A';
+ t['\uFD19'] = '\u0634\u0649';
+ t['\uFD1A'] = '\u0634\u064A';
+ t['\uFD1B'] = '\u062D\u0649';
+ t['\uFD1C'] = '\u062D\u064A';
+ t['\uFD1D'] = '\u062C\u0649';
+ t['\uFD1E'] = '\u062C\u064A';
+ t['\uFD1F'] = '\u062E\u0649';
+ t['\uFD20'] = '\u062E\u064A';
+ t['\uFD21'] = '\u0635\u0649';
+ t['\uFD22'] = '\u0635\u064A';
+ t['\uFD23'] = '\u0636\u0649';
+ t['\uFD24'] = '\u0636\u064A';
+ t['\uFD25'] = '\u0634\u062C';
+ t['\uFD26'] = '\u0634\u062D';
+ t['\uFD27'] = '\u0634\u062E';
+ t['\uFD28'] = '\u0634\u0645';
+ t['\uFD29'] = '\u0634\u0631';
+ t['\uFD2A'] = '\u0633\u0631';
+ t['\uFD2B'] = '\u0635\u0631';
+ t['\uFD2C'] = '\u0636\u0631';
+ t['\uFD2D'] = '\u0634\u062C';
+ t['\uFD2E'] = '\u0634\u062D';
+ t['\uFD2F'] = '\u0634\u062E';
+ t['\uFD30'] = '\u0634\u0645';
+ t['\uFD31'] = '\u0633\u0647';
+ t['\uFD32'] = '\u0634\u0647';
+ t['\uFD33'] = '\u0637\u0645';
+ t['\uFD34'] = '\u0633\u062C';
+ t['\uFD35'] = '\u0633\u062D';
+ t['\uFD36'] = '\u0633\u062E';
+ t['\uFD37'] = '\u0634\u062C';
+ t['\uFD38'] = '\u0634\u062D';
+ t['\uFD39'] = '\u0634\u062E';
+ t['\uFD3A'] = '\u0637\u0645';
+ t['\uFD3B'] = '\u0638\u0645';
+ t['\uFD3C'] = '\u0627\u064B';
+ t['\uFD3D'] = '\u0627\u064B';
+ t['\uFD50'] = '\u062A\u062C\u0645';
+ t['\uFD51'] = '\u062A\u062D\u062C';
+ t['\uFD52'] = '\u062A\u062D\u062C';
+ t['\uFD53'] = '\u062A\u062D\u0645';
+ t['\uFD54'] = '\u062A\u062E\u0645';
+ t['\uFD55'] = '\u062A\u0645\u062C';
+ t['\uFD56'] = '\u062A\u0645\u062D';
+ t['\uFD57'] = '\u062A\u0645\u062E';
+ t['\uFD58'] = '\u062C\u0645\u062D';
+ t['\uFD59'] = '\u062C\u0645\u062D';
+ t['\uFD5A'] = '\u062D\u0645\u064A';
+ t['\uFD5B'] = '\u062D\u0645\u0649';
+ t['\uFD5C'] = '\u0633\u062D\u062C';
+ t['\uFD5D'] = '\u0633\u062C\u062D';
+ t['\uFD5E'] = '\u0633\u062C\u0649';
+ t['\uFD5F'] = '\u0633\u0645\u062D';
+ t['\uFD60'] = '\u0633\u0645\u062D';
+ t['\uFD61'] = '\u0633\u0645\u062C';
+ t['\uFD62'] = '\u0633\u0645\u0645';
+ t['\uFD63'] = '\u0633\u0645\u0645';
+ t['\uFD64'] = '\u0635\u062D\u062D';
+ t['\uFD65'] = '\u0635\u062D\u062D';
+ t['\uFD66'] = '\u0635\u0645\u0645';
+ t['\uFD67'] = '\u0634\u062D\u0645';
+ t['\uFD68'] = '\u0634\u062D\u0645';
+ t['\uFD69'] = '\u0634\u062C\u064A';
+ t['\uFD6A'] = '\u0634\u0645\u062E';
+ t['\uFD6B'] = '\u0634\u0645\u062E';
+ t['\uFD6C'] = '\u0634\u0645\u0645';
+ t['\uFD6D'] = '\u0634\u0645\u0645';
+ t['\uFD6E'] = '\u0636\u062D\u0649';
+ t['\uFD6F'] = '\u0636\u062E\u0645';
+ t['\uFD70'] = '\u0636\u062E\u0645';
+ t['\uFD71'] = '\u0637\u0645\u062D';
+ t['\uFD72'] = '\u0637\u0645\u062D';
+ t['\uFD73'] = '\u0637\u0645\u0645';
+ t['\uFD74'] = '\u0637\u0645\u064A';
+ t['\uFD75'] = '\u0639\u062C\u0645';
+ t['\uFD76'] = '\u0639\u0645\u0645';
+ t['\uFD77'] = '\u0639\u0645\u0645';
+ t['\uFD78'] = '\u0639\u0645\u0649';
+ t['\uFD79'] = '\u063A\u0645\u0645';
+ t['\uFD7A'] = '\u063A\u0645\u064A';
+ t['\uFD7B'] = '\u063A\u0645\u0649';
+ t['\uFD7C'] = '\u0641\u062E\u0645';
+ t['\uFD7D'] = '\u0641\u062E\u0645';
+ t['\uFD7E'] = '\u0642\u0645\u062D';
+ t['\uFD7F'] = '\u0642\u0645\u0645';
+ t['\uFD80'] = '\u0644\u062D\u0645';
+ t['\uFD81'] = '\u0644\u062D\u064A';
+ t['\uFD82'] = '\u0644\u062D\u0649';
+ t['\uFD83'] = '\u0644\u062C\u062C';
+ t['\uFD84'] = '\u0644\u062C\u062C';
+ t['\uFD85'] = '\u0644\u062E\u0645';
+ t['\uFD86'] = '\u0644\u062E\u0645';
+ t['\uFD87'] = '\u0644\u0645\u062D';
+ t['\uFD88'] = '\u0644\u0645\u062D';
+ t['\uFD89'] = '\u0645\u062D\u062C';
+ t['\uFD8A'] = '\u0645\u062D\u0645';
+ t['\uFD8B'] = '\u0645\u062D\u064A';
+ t['\uFD8C'] = '\u0645\u062C\u062D';
+ t['\uFD8D'] = '\u0645\u062C\u0645';
+ t['\uFD8E'] = '\u0645\u062E\u062C';
+ t['\uFD8F'] = '\u0645\u062E\u0645';
+ t['\uFD92'] = '\u0645\u062C\u062E';
+ t['\uFD93'] = '\u0647\u0645\u062C';
+ t['\uFD94'] = '\u0647\u0645\u0645';
+ t['\uFD95'] = '\u0646\u062D\u0645';
+ t['\uFD96'] = '\u0646\u062D\u0649';
+ t['\uFD97'] = '\u0646\u062C\u0645';
+ t['\uFD98'] = '\u0646\u062C\u0645';
+ t['\uFD99'] = '\u0646\u062C\u0649';
+ t['\uFD9A'] = '\u0646\u0645\u064A';
+ t['\uFD9B'] = '\u0646\u0645\u0649';
+ t['\uFD9C'] = '\u064A\u0645\u0645';
+ t['\uFD9D'] = '\u064A\u0645\u0645';
+ t['\uFD9E'] = '\u0628\u062E\u064A';
+ t['\uFD9F'] = '\u062A\u062C\u064A';
+ t['\uFDA0'] = '\u062A\u062C\u0649';
+ t['\uFDA1'] = '\u062A\u062E\u064A';
+ t['\uFDA2'] = '\u062A\u062E\u0649';
+ t['\uFDA3'] = '\u062A\u0645\u064A';
+ t['\uFDA4'] = '\u062A\u0645\u0649';
+ t['\uFDA5'] = '\u062C\u0645\u064A';
+ t['\uFDA6'] = '\u062C\u062D\u0649';
+ t['\uFDA7'] = '\u062C\u0645\u0649';
+ t['\uFDA8'] = '\u0633\u062E\u0649';
+ t['\uFDA9'] = '\u0635\u062D\u064A';
+ t['\uFDAA'] = '\u0634\u062D\u064A';
+ t['\uFDAB'] = '\u0636\u062D\u064A';
+ t['\uFDAC'] = '\u0644\u062C\u064A';
+ t['\uFDAD'] = '\u0644\u0645\u064A';
+ t['\uFDAE'] = '\u064A\u062D\u064A';
+ t['\uFDAF'] = '\u064A\u062C\u064A';
+ t['\uFDB0'] = '\u064A\u0645\u064A';
+ t['\uFDB1'] = '\u0645\u0645\u064A';
+ t['\uFDB2'] = '\u0642\u0645\u064A';
+ t['\uFDB3'] = '\u0646\u062D\u064A';
+ t['\uFDB4'] = '\u0642\u0645\u062D';
+ t['\uFDB5'] = '\u0644\u062D\u0645';
+ t['\uFDB6'] = '\u0639\u0645\u064A';
+ t['\uFDB7'] = '\u0643\u0645\u064A';
+ t['\uFDB8'] = '\u0646\u062C\u062D';
+ t['\uFDB9'] = '\u0645\u062E\u064A';
+ t['\uFDBA'] = '\u0644\u062C\u0645';
+ t['\uFDBB'] = '\u0643\u0645\u0645';
+ t['\uFDBC'] = '\u0644\u062C\u0645';
+ t['\uFDBD'] = '\u0646\u062C\u062D';
+ t['\uFDBE'] = '\u062C\u062D\u064A';
+ t['\uFDBF'] = '\u062D\u062C\u064A';
+ t['\uFDC0'] = '\u0645\u062C\u064A';
+ t['\uFDC1'] = '\u0641\u0645\u064A';
+ t['\uFDC2'] = '\u0628\u062D\u064A';
+ t['\uFDC3'] = '\u0643\u0645\u0645';
+ t['\uFDC4'] = '\u0639\u062C\u0645';
+ t['\uFDC5'] = '\u0635\u0645\u0645';
+ t['\uFDC6'] = '\u0633\u062E\u064A';
+ t['\uFDC7'] = '\u0646\u062C\u064A';
+ t['\uFE49'] = '\u203E';
+ t['\uFE4A'] = '\u203E';
+ t['\uFE4B'] = '\u203E';
+ t['\uFE4C'] = '\u203E';
+ t['\uFE4D'] = '\u005F';
+ t['\uFE4E'] = '\u005F';
+ t['\uFE4F'] = '\u005F';
+ t['\uFE80'] = '\u0621';
+ t['\uFE81'] = '\u0622';
+ t['\uFE82'] = '\u0622';
+ t['\uFE83'] = '\u0623';
+ t['\uFE84'] = '\u0623';
+ t['\uFE85'] = '\u0624';
+ t['\uFE86'] = '\u0624';
+ t['\uFE87'] = '\u0625';
+ t['\uFE88'] = '\u0625';
+ t['\uFE89'] = '\u0626';
+ t['\uFE8A'] = '\u0626';
+ t['\uFE8B'] = '\u0626';
+ t['\uFE8C'] = '\u0626';
+ t['\uFE8D'] = '\u0627';
+ t['\uFE8E'] = '\u0627';
+ t['\uFE8F'] = '\u0628';
+ t['\uFE90'] = '\u0628';
+ t['\uFE91'] = '\u0628';
+ t['\uFE92'] = '\u0628';
+ t['\uFE93'] = '\u0629';
+ t['\uFE94'] = '\u0629';
+ t['\uFE95'] = '\u062A';
+ t['\uFE96'] = '\u062A';
+ t['\uFE97'] = '\u062A';
+ t['\uFE98'] = '\u062A';
+ t['\uFE99'] = '\u062B';
+ t['\uFE9A'] = '\u062B';
+ t['\uFE9B'] = '\u062B';
+ t['\uFE9C'] = '\u062B';
+ t['\uFE9D'] = '\u062C';
+ t['\uFE9E'] = '\u062C';
+ t['\uFE9F'] = '\u062C';
+ t['\uFEA0'] = '\u062C';
+ t['\uFEA1'] = '\u062D';
+ t['\uFEA2'] = '\u062D';
+ t['\uFEA3'] = '\u062D';
+ t['\uFEA4'] = '\u062D';
+ t['\uFEA5'] = '\u062E';
+ t['\uFEA6'] = '\u062E';
+ t['\uFEA7'] = '\u062E';
+ t['\uFEA8'] = '\u062E';
+ t['\uFEA9'] = '\u062F';
+ t['\uFEAA'] = '\u062F';
+ t['\uFEAB'] = '\u0630';
+ t['\uFEAC'] = '\u0630';
+ t['\uFEAD'] = '\u0631';
+ t['\uFEAE'] = '\u0631';
+ t['\uFEAF'] = '\u0632';
+ t['\uFEB0'] = '\u0632';
+ t['\uFEB1'] = '\u0633';
+ t['\uFEB2'] = '\u0633';
+ t['\uFEB3'] = '\u0633';
+ t['\uFEB4'] = '\u0633';
+ t['\uFEB5'] = '\u0634';
+ t['\uFEB6'] = '\u0634';
+ t['\uFEB7'] = '\u0634';
+ t['\uFEB8'] = '\u0634';
+ t['\uFEB9'] = '\u0635';
+ t['\uFEBA'] = '\u0635';
+ t['\uFEBB'] = '\u0635';
+ t['\uFEBC'] = '\u0635';
+ t['\uFEBD'] = '\u0636';
+ t['\uFEBE'] = '\u0636';
+ t['\uFEBF'] = '\u0636';
+ t['\uFEC0'] = '\u0636';
+ t['\uFEC1'] = '\u0637';
+ t['\uFEC2'] = '\u0637';
+ t['\uFEC3'] = '\u0637';
+ t['\uFEC4'] = '\u0637';
+ t['\uFEC5'] = '\u0638';
+ t['\uFEC6'] = '\u0638';
+ t['\uFEC7'] = '\u0638';
+ t['\uFEC8'] = '\u0638';
+ t['\uFEC9'] = '\u0639';
+ t['\uFECA'] = '\u0639';
+ t['\uFECB'] = '\u0639';
+ t['\uFECC'] = '\u0639';
+ t['\uFECD'] = '\u063A';
+ t['\uFECE'] = '\u063A';
+ t['\uFECF'] = '\u063A';
+ t['\uFED0'] = '\u063A';
+ t['\uFED1'] = '\u0641';
+ t['\uFED2'] = '\u0641';
+ t['\uFED3'] = '\u0641';
+ t['\uFED4'] = '\u0641';
+ t['\uFED5'] = '\u0642';
+ t['\uFED6'] = '\u0642';
+ t['\uFED7'] = '\u0642';
+ t['\uFED8'] = '\u0642';
+ t['\uFED9'] = '\u0643';
+ t['\uFEDA'] = '\u0643';
+ t['\uFEDB'] = '\u0643';
+ t['\uFEDC'] = '\u0643';
+ t['\uFEDD'] = '\u0644';
+ t['\uFEDE'] = '\u0644';
+ t['\uFEDF'] = '\u0644';
+ t['\uFEE0'] = '\u0644';
+ t['\uFEE1'] = '\u0645';
+ t['\uFEE2'] = '\u0645';
+ t['\uFEE3'] = '\u0645';
+ t['\uFEE4'] = '\u0645';
+ t['\uFEE5'] = '\u0646';
+ t['\uFEE6'] = '\u0646';
+ t['\uFEE7'] = '\u0646';
+ t['\uFEE8'] = '\u0646';
+ t['\uFEE9'] = '\u0647';
+ t['\uFEEA'] = '\u0647';
+ t['\uFEEB'] = '\u0647';
+ t['\uFEEC'] = '\u0647';
+ t['\uFEED'] = '\u0648';
+ t['\uFEEE'] = '\u0648';
+ t['\uFEEF'] = '\u0649';
+ t['\uFEF0'] = '\u0649';
+ t['\uFEF1'] = '\u064A';
+ t['\uFEF2'] = '\u064A';
+ t['\uFEF3'] = '\u064A';
+ t['\uFEF4'] = '\u064A';
+ t['\uFEF5'] = '\u0644\u0622';
+ t['\uFEF6'] = '\u0644\u0622';
+ t['\uFEF7'] = '\u0644\u0623';
+ t['\uFEF8'] = '\u0644\u0623';
+ t['\uFEF9'] = '\u0644\u0625';
+ t['\uFEFA'] = '\u0644\u0625';
+ t['\uFEFB'] = '\u0644\u0627';
+ t['\uFEFC'] = '\u0644\u0627';
+});
+function reverseIfRtl(chars) {
+ var charsLength = chars.length;
+ if (charsLength <= 1 || !isRTLRangeFor(chars.charCodeAt(0))) {
+ return chars;
+ }
+ var s = '';
+ for (var ii = charsLength - 1; ii >= 0; ii--) {
+ s += chars[ii];
+ }
+ return s;
+}
+exports.mapSpecialUnicodeValues = mapSpecialUnicodeValues;
+exports.reverseIfRtl = reverseIfRtl;
+exports.getUnicodeRangeFor = getUnicodeRangeFor;
+exports.getNormalizedUnicodes = getNormalizedUnicodes;
+exports.getUnicodeForGlyph = getUnicodeForGlyph;
+
+/***/ }),
+/* 181 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.FontRendererFactory = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _cff_parser = __w_pdfjs_require__(175);
+
+var _glyphlist = __w_pdfjs_require__(178);
+
+var _encodings = __w_pdfjs_require__(177);
+
+var _stream = __w_pdfjs_require__(158);
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
+
+function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
+
+function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
+
+function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
+
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var FontRendererFactory = function FontRendererFactoryClosure() {
+ function getLong(data, offset) {
+ return data[offset] << 24 | data[offset + 1] << 16 | data[offset + 2] << 8 | data[offset + 3];
+ }
+
+ function getUshort(data, offset) {
+ return data[offset] << 8 | data[offset + 1];
+ }
+
+ function parseCmap(data, start, end) {
+ var offset = getUshort(data, start + 2) === 1 ? getLong(data, start + 8) : getLong(data, start + 16);
+ var format = getUshort(data, start + offset);
+ var ranges, p, i;
+
+ if (format === 4) {
+ getUshort(data, start + offset + 2);
+ var segCount = getUshort(data, start + offset + 6) >> 1;
+ p = start + offset + 14;
+ ranges = [];
+
+ for (i = 0; i < segCount; i++, p += 2) {
+ ranges[i] = {
+ end: getUshort(data, p)
+ };
+ }
+
+ p += 2;
+
+ for (i = 0; i < segCount; i++, p += 2) {
+ ranges[i].start = getUshort(data, p);
+ }
+
+ for (i = 0; i < segCount; i++, p += 2) {
+ ranges[i].idDelta = getUshort(data, p);
+ }
+
+ for (i = 0; i < segCount; i++, p += 2) {
+ var idOffset = getUshort(data, p);
+
+ if (idOffset === 0) {
+ continue;
+ }
+
+ ranges[i].ids = [];
+
+ for (var j = 0, jj = ranges[i].end - ranges[i].start + 1; j < jj; j++) {
+ ranges[i].ids[j] = getUshort(data, p + idOffset);
+ idOffset += 2;
+ }
+ }
+
+ return ranges;
+ } else if (format === 12) {
+ getLong(data, start + offset + 4);
+ var groups = getLong(data, start + offset + 12);
+ p = start + offset + 16;
+ ranges = [];
+
+ for (i = 0; i < groups; i++) {
+ ranges.push({
+ start: getLong(data, p),
+ end: getLong(data, p + 4),
+ idDelta: getLong(data, p + 8) - getLong(data, p)
+ });
+ p += 12;
+ }
+
+ return ranges;
+ }
+
+ throw new _util.FormatError("unsupported cmap: ".concat(format));
+ }
+
+ function parseCff(data, start, end, seacAnalysisEnabled) {
+ var properties = {};
+ var parser = new _cff_parser.CFFParser(new _stream.Stream(data, start, end - start), properties, seacAnalysisEnabled);
+ var cff = parser.parse();
+ return {
+ glyphs: cff.charStrings.objects,
+ subrs: cff.topDict.privateDict && cff.topDict.privateDict.subrsIndex && cff.topDict.privateDict.subrsIndex.objects,
+ gsubrs: cff.globalSubrIndex && cff.globalSubrIndex.objects,
+ isCFFCIDFont: cff.isCIDFont,
+ fdSelect: cff.fdSelect,
+ fdArray: cff.fdArray
+ };
+ }
+
+ function parseGlyfTable(glyf, loca, isGlyphLocationsLong) {
+ var itemSize, itemDecode;
+
+ if (isGlyphLocationsLong) {
+ itemSize = 4;
+
+ itemDecode = function fontItemDecodeLong(data, offset) {
+ return data[offset] << 24 | data[offset + 1] << 16 | data[offset + 2] << 8 | data[offset + 3];
+ };
+ } else {
+ itemSize = 2;
+
+ itemDecode = function fontItemDecode(data, offset) {
+ return data[offset] << 9 | data[offset + 1] << 1;
+ };
+ }
+
+ var glyphs = [];
+ var startOffset = itemDecode(loca, 0);
+
+ for (var j = itemSize; j < loca.length; j += itemSize) {
+ var endOffset = itemDecode(loca, j);
+ glyphs.push(glyf.subarray(startOffset, endOffset));
+ startOffset = endOffset;
+ }
+
+ return glyphs;
+ }
+
+ function lookupCmap(ranges, unicode) {
+ var code = unicode.codePointAt(0),
+ gid = 0;
+ var l = 0,
+ r = ranges.length - 1;
+
+ while (l < r) {
+ var c = l + r + 1 >> 1;
+
+ if (code < ranges[c].start) {
+ r = c - 1;
+ } else {
+ l = c;
+ }
+ }
+
+ if (ranges[l].start <= code && code <= ranges[l].end) {
+ gid = ranges[l].idDelta + (ranges[l].ids ? ranges[l].ids[code - ranges[l].start] : code) & 0xFFFF;
+ }
+
+ return {
+ charCode: code,
+ glyphId: gid
+ };
+ }
+
+ function compileGlyf(code, cmds, font) {
+ function moveTo(x, y) {
+ cmds.push({
+ cmd: 'moveTo',
+ args: [x, y]
+ });
+ }
+
+ function lineTo(x, y) {
+ cmds.push({
+ cmd: 'lineTo',
+ args: [x, y]
+ });
+ }
+
+ function quadraticCurveTo(xa, ya, x, y) {
+ cmds.push({
+ cmd: 'quadraticCurveTo',
+ args: [xa, ya, x, y]
+ });
+ }
+
+ var i = 0;
+ var numberOfContours = (code[i] << 24 | code[i + 1] << 16) >> 16;
+ var flags;
+ var x = 0,
+ y = 0;
+ i += 10;
+
+ if (numberOfContours < 0) {
+ do {
+ flags = code[i] << 8 | code[i + 1];
+ var glyphIndex = code[i + 2] << 8 | code[i + 3];
+ i += 4;
+ var arg1, arg2;
+
+ if (flags & 0x01) {
+ arg1 = (code[i] << 24 | code[i + 1] << 16) >> 16;
+ arg2 = (code[i + 2] << 24 | code[i + 3] << 16) >> 16;
+ i += 4;
+ } else {
+ arg1 = code[i++];
+ arg2 = code[i++];
+ }
+
+ if (flags & 0x02) {
+ x = arg1;
+ y = arg2;
+ } else {
+ x = 0;
+ y = 0;
+ }
+
+ var scaleX = 1,
+ scaleY = 1,
+ scale01 = 0,
+ scale10 = 0;
+
+ if (flags & 0x08) {
+ scaleX = scaleY = (code[i] << 24 | code[i + 1] << 16) / 1073741824;
+ i += 2;
+ } else if (flags & 0x40) {
+ scaleX = (code[i] << 24 | code[i + 1] << 16) / 1073741824;
+ scaleY = (code[i + 2] << 24 | code[i + 3] << 16) / 1073741824;
+ i += 4;
+ } else if (flags & 0x80) {
+ scaleX = (code[i] << 24 | code[i + 1] << 16) / 1073741824;
+ scale01 = (code[i + 2] << 24 | code[i + 3] << 16) / 1073741824;
+ scale10 = (code[i + 4] << 24 | code[i + 5] << 16) / 1073741824;
+ scaleY = (code[i + 6] << 24 | code[i + 7] << 16) / 1073741824;
+ i += 8;
+ }
+
+ var subglyph = font.glyphs[glyphIndex];
+
+ if (subglyph) {
+ cmds.push({
+ cmd: 'save'
+ });
+ cmds.push({
+ cmd: 'transform',
+ args: [scaleX, scale01, scale10, scaleY, x, y]
+ });
+ compileGlyf(subglyph, cmds, font);
+ cmds.push({
+ cmd: 'restore'
+ });
+ }
+ } while (flags & 0x20);
+ } else {
+ var endPtsOfContours = [];
+ var j, jj;
+
+ for (j = 0; j < numberOfContours; j++) {
+ endPtsOfContours.push(code[i] << 8 | code[i + 1]);
+ i += 2;
+ }
+
+ var instructionLength = code[i] << 8 | code[i + 1];
+ i += 2 + instructionLength;
+ var numberOfPoints = endPtsOfContours[endPtsOfContours.length - 1] + 1;
+ var points = [];
+
+ while (points.length < numberOfPoints) {
+ flags = code[i++];
+ var repeat = 1;
+
+ if (flags & 0x08) {
+ repeat += code[i++];
+ }
+
+ while (repeat-- > 0) {
+ points.push({
+ flags: flags
+ });
+ }
+ }
+
+ for (j = 0; j < numberOfPoints; j++) {
+ switch (points[j].flags & 0x12) {
+ case 0x00:
+ x += (code[i] << 24 | code[i + 1] << 16) >> 16;
+ i += 2;
+ break;
+
+ case 0x02:
+ x -= code[i++];
+ break;
+
+ case 0x12:
+ x += code[i++];
+ break;
+ }
+
+ points[j].x = x;
+ }
+
+ for (j = 0; j < numberOfPoints; j++) {
+ switch (points[j].flags & 0x24) {
+ case 0x00:
+ y += (code[i] << 24 | code[i + 1] << 16) >> 16;
+ i += 2;
+ break;
+
+ case 0x04:
+ y -= code[i++];
+ break;
+
+ case 0x24:
+ y += code[i++];
+ break;
+ }
+
+ points[j].y = y;
+ }
+
+ var startPoint = 0;
+
+ for (i = 0; i < numberOfContours; i++) {
+ var endPoint = endPtsOfContours[i];
+ var contour = points.slice(startPoint, endPoint + 1);
+
+ if (contour[0].flags & 1) {
+ contour.push(contour[0]);
+ } else if (contour[contour.length - 1].flags & 1) {
+ contour.unshift(contour[contour.length - 1]);
+ } else {
+ var p = {
+ flags: 1,
+ x: (contour[0].x + contour[contour.length - 1].x) / 2,
+ y: (contour[0].y + contour[contour.length - 1].y) / 2
+ };
+ contour.unshift(p);
+ contour.push(p);
+ }
+
+ moveTo(contour[0].x, contour[0].y);
+
+ for (j = 1, jj = contour.length; j < jj; j++) {
+ if (contour[j].flags & 1) {
+ lineTo(contour[j].x, contour[j].y);
+ } else if (contour[j + 1].flags & 1) {
+ quadraticCurveTo(contour[j].x, contour[j].y, contour[j + 1].x, contour[j + 1].y);
+ j++;
+ } else {
+ quadraticCurveTo(contour[j].x, contour[j].y, (contour[j].x + contour[j + 1].x) / 2, (contour[j].y + contour[j + 1].y) / 2);
+ }
+ }
+
+ startPoint = endPoint + 1;
+ }
+ }
+ }
+
+ function compileCharString(code, cmds, font, glyphId) {
+ var stack = [];
+ var x = 0,
+ y = 0;
+ var stems = 0;
+
+ function moveTo(x, y) {
+ cmds.push({
+ cmd: 'moveTo',
+ args: [x, y]
+ });
+ }
+
+ function lineTo(x, y) {
+ cmds.push({
+ cmd: 'lineTo',
+ args: [x, y]
+ });
+ }
+
+ function bezierCurveTo(x1, y1, x2, y2, x, y) {
+ cmds.push({
+ cmd: 'bezierCurveTo',
+ args: [x1, y1, x2, y2, x, y]
+ });
+ }
+
+ function parse(code) {
+ var i = 0;
+
+ while (i < code.length) {
+ var stackClean = false;
+ var v = code[i++];
+ var xa, xb, ya, yb, y1, y2, y3, n, subrCode;
+
+ switch (v) {
+ case 1:
+ stems += stack.length >> 1;
+ stackClean = true;
+ break;
+
+ case 3:
+ stems += stack.length >> 1;
+ stackClean = true;
+ break;
+
+ case 4:
+ y += stack.pop();
+ moveTo(x, y);
+ stackClean = true;
+ break;
+
+ case 5:
+ while (stack.length > 0) {
+ x += stack.shift();
+ y += stack.shift();
+ lineTo(x, y);
+ }
+
+ break;
+
+ case 6:
+ while (stack.length > 0) {
+ x += stack.shift();
+ lineTo(x, y);
+
+ if (stack.length === 0) {
+ break;
+ }
+
+ y += stack.shift();
+ lineTo(x, y);
+ }
+
+ break;
+
+ case 7:
+ while (stack.length > 0) {
+ y += stack.shift();
+ lineTo(x, y);
+
+ if (stack.length === 0) {
+ break;
+ }
+
+ x += stack.shift();
+ lineTo(x, y);
+ }
+
+ break;
+
+ case 8:
+ while (stack.length > 0) {
+ xa = x + stack.shift();
+ ya = y + stack.shift();
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ x = xb + stack.shift();
+ y = yb + stack.shift();
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+ }
+
+ break;
+
+ case 10:
+ n = stack.pop();
+ subrCode = null;
+
+ if (font.isCFFCIDFont) {
+ var fdIndex = font.fdSelect.getFDIndex(glyphId);
+
+ if (fdIndex >= 0 && fdIndex < font.fdArray.length) {
+ var fontDict = font.fdArray[fdIndex],
+ subrs = void 0;
+
+ if (fontDict.privateDict && fontDict.privateDict.subrsIndex) {
+ subrs = fontDict.privateDict.subrsIndex.objects;
+ }
+
+ if (subrs) {
+ var numSubrs = subrs.length;
+ n += numSubrs < 1240 ? 107 : numSubrs < 33900 ? 1131 : 32768;
+ subrCode = subrs[n];
+ }
+ } else {
+ (0, _util.warn)('Invalid fd index for glyph index.');
+ }
+ } else {
+ subrCode = font.subrs[n + font.subrsBias];
+ }
+
+ if (subrCode) {
+ parse(subrCode);
+ }
+
+ break;
+
+ case 11:
+ return;
+
+ case 12:
+ v = code[i++];
+
+ switch (v) {
+ case 34:
+ xa = x + stack.shift();
+ xb = xa + stack.shift();
+ y1 = y + stack.shift();
+ x = xb + stack.shift();
+ bezierCurveTo(xa, y, xb, y1, x, y1);
+ xa = x + stack.shift();
+ xb = xa + stack.shift();
+ x = xb + stack.shift();
+ bezierCurveTo(xa, y1, xb, y, x, y);
+ break;
+
+ case 35:
+ xa = x + stack.shift();
+ ya = y + stack.shift();
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ x = xb + stack.shift();
+ y = yb + stack.shift();
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+ xa = x + stack.shift();
+ ya = y + stack.shift();
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ x = xb + stack.shift();
+ y = yb + stack.shift();
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+ stack.pop();
+ break;
+
+ case 36:
+ xa = x + stack.shift();
+ y1 = y + stack.shift();
+ xb = xa + stack.shift();
+ y2 = y1 + stack.shift();
+ x = xb + stack.shift();
+ bezierCurveTo(xa, y1, xb, y2, x, y2);
+ xa = x + stack.shift();
+ xb = xa + stack.shift();
+ y3 = y2 + stack.shift();
+ x = xb + stack.shift();
+ bezierCurveTo(xa, y2, xb, y3, x, y);
+ break;
+
+ case 37:
+ var x0 = x,
+ y0 = y;
+ xa = x + stack.shift();
+ ya = y + stack.shift();
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ x = xb + stack.shift();
+ y = yb + stack.shift();
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+ xa = x + stack.shift();
+ ya = y + stack.shift();
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ x = xb;
+ y = yb;
+
+ if (Math.abs(x - x0) > Math.abs(y - y0)) {
+ x += stack.shift();
+ } else {
+ y += stack.shift();
+ }
+
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+ break;
+
+ default:
+ throw new _util.FormatError("unknown operator: 12 ".concat(v));
+ }
+
+ break;
+
+ case 14:
+ if (stack.length >= 4) {
+ var achar = stack.pop();
+ var bchar = stack.pop();
+ y = stack.pop();
+ x = stack.pop();
+ cmds.push({
+ cmd: 'save'
+ });
+ cmds.push({
+ cmd: 'translate',
+ args: [x, y]
+ });
+ var cmap = lookupCmap(font.cmap, String.fromCharCode(font.glyphNameMap[_encodings.StandardEncoding[achar]]));
+ compileCharString(font.glyphs[cmap.glyphId], cmds, font, cmap.glyphId);
+ cmds.push({
+ cmd: 'restore'
+ });
+ cmap = lookupCmap(font.cmap, String.fromCharCode(font.glyphNameMap[_encodings.StandardEncoding[bchar]]));
+ compileCharString(font.glyphs[cmap.glyphId], cmds, font, cmap.glyphId);
+ }
+
+ return;
+
+ case 18:
+ stems += stack.length >> 1;
+ stackClean = true;
+ break;
+
+ case 19:
+ stems += stack.length >> 1;
+ i += stems + 7 >> 3;
+ stackClean = true;
+ break;
+
+ case 20:
+ stems += stack.length >> 1;
+ i += stems + 7 >> 3;
+ stackClean = true;
+ break;
+
+ case 21:
+ y += stack.pop();
+ x += stack.pop();
+ moveTo(x, y);
+ stackClean = true;
+ break;
+
+ case 22:
+ x += stack.pop();
+ moveTo(x, y);
+ stackClean = true;
+ break;
+
+ case 23:
+ stems += stack.length >> 1;
+ stackClean = true;
+ break;
+
+ case 24:
+ while (stack.length > 2) {
+ xa = x + stack.shift();
+ ya = y + stack.shift();
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ x = xb + stack.shift();
+ y = yb + stack.shift();
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+ }
+
+ x += stack.shift();
+ y += stack.shift();
+ lineTo(x, y);
+ break;
+
+ case 25:
+ while (stack.length > 6) {
+ x += stack.shift();
+ y += stack.shift();
+ lineTo(x, y);
+ }
+
+ xa = x + stack.shift();
+ ya = y + stack.shift();
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ x = xb + stack.shift();
+ y = yb + stack.shift();
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+ break;
+
+ case 26:
+ if (stack.length % 2) {
+ x += stack.shift();
+ }
+
+ while (stack.length > 0) {
+ xa = x;
+ ya = y + stack.shift();
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ x = xb;
+ y = yb + stack.shift();
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+ }
+
+ break;
+
+ case 27:
+ if (stack.length % 2) {
+ y += stack.shift();
+ }
+
+ while (stack.length > 0) {
+ xa = x + stack.shift();
+ ya = y;
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ x = xb + stack.shift();
+ y = yb;
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+ }
+
+ break;
+
+ case 28:
+ stack.push((code[i] << 24 | code[i + 1] << 16) >> 16);
+ i += 2;
+ break;
+
+ case 29:
+ n = stack.pop() + font.gsubrsBias;
+ subrCode = font.gsubrs[n];
+
+ if (subrCode) {
+ parse(subrCode);
+ }
+
+ break;
+
+ case 30:
+ while (stack.length > 0) {
+ xa = x;
+ ya = y + stack.shift();
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ x = xb + stack.shift();
+ y = yb + (stack.length === 1 ? stack.shift() : 0);
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+
+ if (stack.length === 0) {
+ break;
+ }
+
+ xa = x + stack.shift();
+ ya = y;
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ y = yb + stack.shift();
+ x = xb + (stack.length === 1 ? stack.shift() : 0);
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+ }
+
+ break;
+
+ case 31:
+ while (stack.length > 0) {
+ xa = x + stack.shift();
+ ya = y;
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ y = yb + stack.shift();
+ x = xb + (stack.length === 1 ? stack.shift() : 0);
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+
+ if (stack.length === 0) {
+ break;
+ }
+
+ xa = x;
+ ya = y + stack.shift();
+ xb = xa + stack.shift();
+ yb = ya + stack.shift();
+ x = xb + stack.shift();
+ y = yb + (stack.length === 1 ? stack.shift() : 0);
+ bezierCurveTo(xa, ya, xb, yb, x, y);
+ }
+
+ break;
+
+ default:
+ if (v < 32) {
+ throw new _util.FormatError("unknown operator: ".concat(v));
+ }
+
+ if (v < 247) {
+ stack.push(v - 139);
+ } else if (v < 251) {
+ stack.push((v - 247) * 256 + code[i++] + 108);
+ } else if (v < 255) {
+ stack.push(-(v - 251) * 256 - code[i++] - 108);
+ } else {
+ stack.push((code[i] << 24 | code[i + 1] << 16 | code[i + 2] << 8 | code[i + 3]) / 65536);
+ i += 4;
+ }
+
+ break;
+ }
+
+ if (stackClean) {
+ stack.length = 0;
+ }
+ }
+ }
+
+ parse(code);
+ }
+
+ var NOOP = [];
+
+ var CompiledFont =
+ /*#__PURE__*/
+ function () {
+ function CompiledFont(fontMatrix) {
+ _classCallCheck(this, CompiledFont);
+
+ if (this.constructor === CompiledFont) {
+ (0, _util.unreachable)('Cannot initialize CompiledFont.');
+ }
+
+ this.fontMatrix = fontMatrix;
+ this.compiledGlyphs = Object.create(null);
+ this.compiledCharCodeToGlyphId = Object.create(null);
+ }
+
+ _createClass(CompiledFont, [{
+ key: "getPathJs",
+ value: function getPathJs(unicode) {
+ var cmap = lookupCmap(this.cmap, unicode);
+ var fn = this.compiledGlyphs[cmap.glyphId];
+
+ if (!fn) {
+ fn = this.compileGlyph(this.glyphs[cmap.glyphId], cmap.glyphId);
+ this.compiledGlyphs[cmap.glyphId] = fn;
+ }
+
+ if (this.compiledCharCodeToGlyphId[cmap.charCode] === undefined) {
+ this.compiledCharCodeToGlyphId[cmap.charCode] = cmap.glyphId;
+ }
+
+ return fn;
+ }
+ }, {
+ key: "compileGlyph",
+ value: function compileGlyph(code, glyphId) {
+ if (!code || code.length === 0 || code[0] === 14) {
+ return NOOP;
+ }
+
+ var fontMatrix = this.fontMatrix;
+
+ if (this.isCFFCIDFont) {
+ var fdIndex = this.fdSelect.getFDIndex(glyphId);
+
+ if (fdIndex >= 0 && fdIndex < this.fdArray.length) {
+ var fontDict = this.fdArray[fdIndex];
+ fontMatrix = fontDict.getByName('FontMatrix') || _util.FONT_IDENTITY_MATRIX;
+ } else {
+ (0, _util.warn)('Invalid fd index for glyph index.');
+ }
+ }
+
+ var cmds = [];
+ cmds.push({
+ cmd: 'save'
+ });
+ cmds.push({
+ cmd: 'transform',
+ args: fontMatrix.slice()
+ });
+ cmds.push({
+ cmd: 'scale',
+ args: ['size', '-size']
+ });
+ this.compileGlyphImpl(code, cmds, glyphId);
+ cmds.push({
+ cmd: 'restore'
+ });
+ return cmds;
+ }
+ }, {
+ key: "compileGlyphImpl",
+ value: function compileGlyphImpl() {
+ (0, _util.unreachable)('Children classes should implement this.');
+ }
+ }, {
+ key: "hasBuiltPath",
+ value: function hasBuiltPath(unicode) {
+ var cmap = lookupCmap(this.cmap, unicode);
+ return this.compiledGlyphs[cmap.glyphId] !== undefined && this.compiledCharCodeToGlyphId[cmap.charCode] !== undefined;
+ }
+ }]);
+
+ return CompiledFont;
+ }();
+
+ var TrueTypeCompiled =
+ /*#__PURE__*/
+ function (_CompiledFont) {
+ _inherits(TrueTypeCompiled, _CompiledFont);
+
+ function TrueTypeCompiled(glyphs, cmap, fontMatrix) {
+ var _this;
+
+ _classCallCheck(this, TrueTypeCompiled);
+
+ _this = _possibleConstructorReturn(this, _getPrototypeOf(TrueTypeCompiled).call(this, fontMatrix || [0.000488, 0, 0, 0.000488, 0, 0]));
+ _this.glyphs = glyphs;
+ _this.cmap = cmap;
+ return _this;
+ }
+
+ _createClass(TrueTypeCompiled, [{
+ key: "compileGlyphImpl",
+ value: function compileGlyphImpl(code, cmds) {
+ compileGlyf(code, cmds, this);
+ }
+ }]);
+
+ return TrueTypeCompiled;
+ }(CompiledFont);
+
+ var Type2Compiled =
+ /*#__PURE__*/
+ function (_CompiledFont2) {
+ _inherits(Type2Compiled, _CompiledFont2);
+
+ function Type2Compiled(cffInfo, cmap, fontMatrix, glyphNameMap) {
+ var _this2;
+
+ _classCallCheck(this, Type2Compiled);
+
+ _this2 = _possibleConstructorReturn(this, _getPrototypeOf(Type2Compiled).call(this, fontMatrix || [0.001, 0, 0, 0.001, 0, 0]));
+ _this2.glyphs = cffInfo.glyphs;
+ _this2.gsubrs = cffInfo.gsubrs || [];
+ _this2.subrs = cffInfo.subrs || [];
+ _this2.cmap = cmap;
+ _this2.glyphNameMap = glyphNameMap || (0, _glyphlist.getGlyphsUnicode)();
+ _this2.gsubrsBias = _this2.gsubrs.length < 1240 ? 107 : _this2.gsubrs.length < 33900 ? 1131 : 32768;
+ _this2.subrsBias = _this2.subrs.length < 1240 ? 107 : _this2.subrs.length < 33900 ? 1131 : 32768;
+ _this2.isCFFCIDFont = cffInfo.isCFFCIDFont;
+ _this2.fdSelect = cffInfo.fdSelect;
+ _this2.fdArray = cffInfo.fdArray;
+ return _this2;
+ }
+
+ _createClass(Type2Compiled, [{
+ key: "compileGlyphImpl",
+ value: function compileGlyphImpl(code, cmds, glyphId) {
+ compileCharString(code, cmds, this, glyphId);
+ }
+ }]);
+
+ return Type2Compiled;
+ }(CompiledFont);
+
+ return {
+ create: function FontRendererFactory_create(font, seacAnalysisEnabled) {
+ var data = new Uint8Array(font.data);
+ var cmap, glyf, loca, cff, indexToLocFormat, unitsPerEm;
+ var numTables = getUshort(data, 4);
+
+ for (var i = 0, p = 12; i < numTables; i++, p += 16) {
+ var tag = (0, _util.bytesToString)(data.subarray(p, p + 4));
+ var offset = getLong(data, p + 8);
+ var length = getLong(data, p + 12);
+
+ switch (tag) {
+ case 'cmap':
+ cmap = parseCmap(data, offset, offset + length);
+ break;
+
+ case 'glyf':
+ glyf = data.subarray(offset, offset + length);
+ break;
+
+ case 'loca':
+ loca = data.subarray(offset, offset + length);
+ break;
+
+ case 'head':
+ unitsPerEm = getUshort(data, offset + 18);
+ indexToLocFormat = getUshort(data, offset + 50);
+ break;
+
+ case 'CFF ':
+ cff = parseCff(data, offset, offset + length, seacAnalysisEnabled);
+ break;
+ }
+ }
+
+ if (glyf) {
+ var fontMatrix = !unitsPerEm ? font.fontMatrix : [1 / unitsPerEm, 0, 0, 1 / unitsPerEm, 0, 0];
+ return new TrueTypeCompiled(parseGlyfTable(glyf, loca, indexToLocFormat), cmap, fontMatrix);
+ }
+
+ return new Type2Compiled(cff, cmap, font.fontMatrix, font.glyphNameMap);
+ }
+ };
+}();
+
+exports.FontRendererFactory = FontRendererFactory;
+
+/***/ }),
+/* 182 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.Type1Parser = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _encodings = __w_pdfjs_require__(177);
+
+var _stream = __w_pdfjs_require__(158);
+
+var HINTING_ENABLED = false;
+
+var Type1CharString = function Type1CharStringClosure() {
+ var COMMAND_MAP = {
+ 'hstem': [1],
+ 'vstem': [3],
+ 'vmoveto': [4],
+ 'rlineto': [5],
+ 'hlineto': [6],
+ 'vlineto': [7],
+ 'rrcurveto': [8],
+ 'callsubr': [10],
+ 'flex': [12, 35],
+ 'drop': [12, 18],
+ 'endchar': [14],
+ 'rmoveto': [21],
+ 'hmoveto': [22],
+ 'vhcurveto': [30],
+ 'hvcurveto': [31]
+ };
+
+ function Type1CharString() {
+ this.width = 0;
+ this.lsb = 0;
+ this.flexing = false;
+ this.output = [];
+ this.stack = [];
+ }
+
+ Type1CharString.prototype = {
+ convert: function Type1CharString_convert(encoded, subrs, seacAnalysisEnabled) {
+ var count = encoded.length;
+ var error = false;
+ var wx, sbx, subrNumber;
+
+ for (var i = 0; i < count; i++) {
+ var value = encoded[i];
+
+ if (value < 32) {
+ if (value === 12) {
+ value = (value << 8) + encoded[++i];
+ }
+
+ switch (value) {
+ case 1:
+ if (!HINTING_ENABLED) {
+ this.stack = [];
+ break;
+ }
+
+ error = this.executeCommand(2, COMMAND_MAP.hstem);
+ break;
+
+ case 3:
+ if (!HINTING_ENABLED) {
+ this.stack = [];
+ break;
+ }
+
+ error = this.executeCommand(2, COMMAND_MAP.vstem);
+ break;
+
+ case 4:
+ if (this.flexing) {
+ if (this.stack.length < 1) {
+ error = true;
+ break;
+ }
+
+ var dy = this.stack.pop();
+ this.stack.push(0, dy);
+ break;
+ }
+
+ error = this.executeCommand(1, COMMAND_MAP.vmoveto);
+ break;
+
+ case 5:
+ error = this.executeCommand(2, COMMAND_MAP.rlineto);
+ break;
+
+ case 6:
+ error = this.executeCommand(1, COMMAND_MAP.hlineto);
+ break;
+
+ case 7:
+ error = this.executeCommand(1, COMMAND_MAP.vlineto);
+ break;
+
+ case 8:
+ error = this.executeCommand(6, COMMAND_MAP.rrcurveto);
+ break;
+
+ case 9:
+ this.stack = [];
+ break;
+
+ case 10:
+ if (this.stack.length < 1) {
+ error = true;
+ break;
+ }
+
+ subrNumber = this.stack.pop();
+
+ if (!subrs[subrNumber]) {
+ error = true;
+ break;
+ }
+
+ error = this.convert(subrs[subrNumber], subrs, seacAnalysisEnabled);
+ break;
+
+ case 11:
+ return error;
+
+ case 13:
+ if (this.stack.length < 2) {
+ error = true;
+ break;
+ }
+
+ wx = this.stack.pop();
+ sbx = this.stack.pop();
+ this.lsb = sbx;
+ this.width = wx;
+ this.stack.push(wx, sbx);
+ error = this.executeCommand(2, COMMAND_MAP.hmoveto);
+ break;
+
+ case 14:
+ this.output.push(COMMAND_MAP.endchar[0]);
+ break;
+
+ case 21:
+ if (this.flexing) {
+ break;
+ }
+
+ error = this.executeCommand(2, COMMAND_MAP.rmoveto);
+ break;
+
+ case 22:
+ if (this.flexing) {
+ this.stack.push(0);
+ break;
+ }
+
+ error = this.executeCommand(1, COMMAND_MAP.hmoveto);
+ break;
+
+ case 30:
+ error = this.executeCommand(4, COMMAND_MAP.vhcurveto);
+ break;
+
+ case 31:
+ error = this.executeCommand(4, COMMAND_MAP.hvcurveto);
+ break;
+
+ case (12 << 8) + 0:
+ this.stack = [];
+ break;
+
+ case (12 << 8) + 1:
+ if (!HINTING_ENABLED) {
+ this.stack = [];
+ break;
+ }
+
+ error = this.executeCommand(2, COMMAND_MAP.vstem);
+ break;
+
+ case (12 << 8) + 2:
+ if (!HINTING_ENABLED) {
+ this.stack = [];
+ break;
+ }
+
+ error = this.executeCommand(2, COMMAND_MAP.hstem);
+ break;
+
+ case (12 << 8) + 6:
+ if (seacAnalysisEnabled) {
+ this.seac = this.stack.splice(-4, 4);
+ error = this.executeCommand(0, COMMAND_MAP.endchar);
+ } else {
+ error = this.executeCommand(4, COMMAND_MAP.endchar);
+ }
+
+ break;
+
+ case (12 << 8) + 7:
+ if (this.stack.length < 4) {
+ error = true;
+ break;
+ }
+
+ this.stack.pop();
+ wx = this.stack.pop();
+ var sby = this.stack.pop();
+ sbx = this.stack.pop();
+ this.lsb = sbx;
+ this.width = wx;
+ this.stack.push(wx, sbx, sby);
+ error = this.executeCommand(3, COMMAND_MAP.rmoveto);
+ break;
+
+ case (12 << 8) + 12:
+ if (this.stack.length < 2) {
+ error = true;
+ break;
+ }
+
+ var num2 = this.stack.pop();
+ var num1 = this.stack.pop();
+ this.stack.push(num1 / num2);
+ break;
+
+ case (12 << 8) + 16:
+ if (this.stack.length < 2) {
+ error = true;
+ break;
+ }
+
+ subrNumber = this.stack.pop();
+ var numArgs = this.stack.pop();
+
+ if (subrNumber === 0 && numArgs === 3) {
+ var flexArgs = this.stack.splice(this.stack.length - 17, 17);
+ this.stack.push(flexArgs[2] + flexArgs[0], flexArgs[3] + flexArgs[1], flexArgs[4], flexArgs[5], flexArgs[6], flexArgs[7], flexArgs[8], flexArgs[9], flexArgs[10], flexArgs[11], flexArgs[12], flexArgs[13], flexArgs[14]);
+ error = this.executeCommand(13, COMMAND_MAP.flex, true);
+ this.flexing = false;
+ this.stack.push(flexArgs[15], flexArgs[16]);
+ } else if (subrNumber === 1 && numArgs === 0) {
+ this.flexing = true;
+ }
+
+ break;
+
+ case (12 << 8) + 17:
+ break;
+
+ case (12 << 8) + 33:
+ this.stack = [];
+ break;
+
+ default:
+ (0, _util.warn)('Unknown type 1 charstring command of "' + value + '"');
+ break;
+ }
+
+ if (error) {
+ break;
+ }
+
+ continue;
+ } else if (value <= 246) {
+ value = value - 139;
+ } else if (value <= 250) {
+ value = (value - 247) * 256 + encoded[++i] + 108;
+ } else if (value <= 254) {
+ value = -((value - 251) * 256) - encoded[++i] - 108;
+ } else {
+ value = (encoded[++i] & 0xff) << 24 | (encoded[++i] & 0xff) << 16 | (encoded[++i] & 0xff) << 8 | (encoded[++i] & 0xff) << 0;
+ }
+
+ this.stack.push(value);
+ }
+
+ return error;
+ },
+ executeCommand: function executeCommand(howManyArgs, command, keepStack) {
+ var stackLength = this.stack.length;
+
+ if (howManyArgs > stackLength) {
+ return true;
+ }
+
+ var start = stackLength - howManyArgs;
+
+ for (var i = start; i < stackLength; i++) {
+ var value = this.stack[i];
+
+ if (Number.isInteger(value)) {
+ this.output.push(28, value >> 8 & 0xff, value & 0xff);
+ } else {
+ value = 65536 * value | 0;
+ this.output.push(255, value >> 24 & 0xFF, value >> 16 & 0xFF, value >> 8 & 0xFF, value & 0xFF);
+ }
+ }
+
+ this.output.push.apply(this.output, command);
+
+ if (keepStack) {
+ this.stack.splice(start, howManyArgs);
+ } else {
+ this.stack.length = 0;
+ }
+
+ return false;
+ }
+ };
+ return Type1CharString;
+}();
+
+var Type1Parser = function Type1ParserClosure() {
+ var EEXEC_ENCRYPT_KEY = 55665;
+ var CHAR_STRS_ENCRYPT_KEY = 4330;
+
+ function isHexDigit(code) {
+ return code >= 48 && code <= 57 || code >= 65 && code <= 70 || code >= 97 && code <= 102;
+ }
+
+ function decrypt(data, key, discardNumber) {
+ if (discardNumber >= data.length) {
+ return new Uint8Array(0);
+ }
+
+ var r = key | 0,
+ c1 = 52845,
+ c2 = 22719,
+ i,
+ j;
+
+ for (i = 0; i < discardNumber; i++) {
+ r = (data[i] + r) * c1 + c2 & (1 << 16) - 1;
+ }
+
+ var count = data.length - discardNumber;
+ var decrypted = new Uint8Array(count);
+
+ for (i = discardNumber, j = 0; j < count; i++, j++) {
+ var value = data[i];
+ decrypted[j] = value ^ r >> 8;
+ r = (value + r) * c1 + c2 & (1 << 16) - 1;
+ }
+
+ return decrypted;
+ }
+
+ function decryptAscii(data, key, discardNumber) {
+ var r = key | 0,
+ c1 = 52845,
+ c2 = 22719;
+ var count = data.length,
+ maybeLength = count >>> 1;
+ var decrypted = new Uint8Array(maybeLength);
+ var i, j;
+
+ for (i = 0, j = 0; i < count; i++) {
+ var digit1 = data[i];
+
+ if (!isHexDigit(digit1)) {
+ continue;
+ }
+
+ i++;
+ var digit2;
+
+ while (i < count && !isHexDigit(digit2 = data[i])) {
+ i++;
+ }
+
+ if (i < count) {
+ var value = parseInt(String.fromCharCode(digit1, digit2), 16);
+ decrypted[j++] = value ^ r >> 8;
+ r = (value + r) * c1 + c2 & (1 << 16) - 1;
+ }
+ }
+
+ return Array.prototype.slice.call(decrypted, discardNumber, j);
+ }
+
+ function isSpecial(c) {
+ return c === 0x2F || c === 0x5B || c === 0x5D || c === 0x7B || c === 0x7D || c === 0x28 || c === 0x29;
+ }
+
+ function Type1Parser(stream, encrypted, seacAnalysisEnabled) {
+ if (encrypted) {
+ var data = stream.getBytes();
+ var isBinary = !(isHexDigit(data[0]) && isHexDigit(data[1]) && isHexDigit(data[2]) && isHexDigit(data[3]));
+ stream = new _stream.Stream(isBinary ? decrypt(data, EEXEC_ENCRYPT_KEY, 4) : decryptAscii(data, EEXEC_ENCRYPT_KEY, 4));
+ }
+
+ this.seacAnalysisEnabled = !!seacAnalysisEnabled;
+ this.stream = stream;
+ this.nextChar();
+ }
+
+ Type1Parser.prototype = {
+ readNumberArray: function Type1Parser_readNumberArray() {
+ this.getToken();
+ var array = [];
+
+ while (true) {
+ var token = this.getToken();
+
+ if (token === null || token === ']' || token === '}') {
+ break;
+ }
+
+ array.push(parseFloat(token || 0));
+ }
+
+ return array;
+ },
+ readNumber: function Type1Parser_readNumber() {
+ var token = this.getToken();
+ return parseFloat(token || 0);
+ },
+ readInt: function Type1Parser_readInt() {
+ var token = this.getToken();
+ return parseInt(token || 0, 10) | 0;
+ },
+ readBoolean: function Type1Parser_readBoolean() {
+ var token = this.getToken();
+ return token === 'true' ? 1 : 0;
+ },
+ nextChar: function Type1_nextChar() {
+ return this.currentChar = this.stream.getByte();
+ },
+ getToken: function Type1Parser_getToken() {
+ var comment = false;
+ var ch = this.currentChar;
+
+ while (true) {
+ if (ch === -1) {
+ return null;
+ }
+
+ if (comment) {
+ if (ch === 0x0A || ch === 0x0D) {
+ comment = false;
+ }
+ } else if (ch === 0x25) {
+ comment = true;
+ } else if (!(0, _util.isSpace)(ch)) {
+ break;
+ }
+
+ ch = this.nextChar();
+ }
+
+ if (isSpecial(ch)) {
+ this.nextChar();
+ return String.fromCharCode(ch);
+ }
+
+ var token = '';
+
+ do {
+ token += String.fromCharCode(ch);
+ ch = this.nextChar();
+ } while (ch >= 0 && !(0, _util.isSpace)(ch) && !isSpecial(ch));
+
+ return token;
+ },
+ readCharStrings: function Type1Parser_readCharStrings(bytes, lenIV) {
+ if (lenIV === -1) {
+ return bytes;
+ }
+
+ return decrypt(bytes, CHAR_STRS_ENCRYPT_KEY, lenIV);
+ },
+ extractFontProgram: function Type1Parser_extractFontProgram() {
+ var stream = this.stream;
+ var subrs = [],
+ charstrings = [];
+ var privateData = Object.create(null);
+ privateData['lenIV'] = 4;
+ var program = {
+ subrs: [],
+ charstrings: [],
+ properties: {
+ 'privateData': privateData
+ }
+ };
+ var token, length, data, lenIV, encoded;
+
+ while ((token = this.getToken()) !== null) {
+ if (token !== '/') {
+ continue;
+ }
+
+ token = this.getToken();
+
+ switch (token) {
+ case 'CharStrings':
+ this.getToken();
+ this.getToken();
+ this.getToken();
+ this.getToken();
+
+ while (true) {
+ token = this.getToken();
+
+ if (token === null || token === 'end') {
+ break;
+ }
+
+ if (token !== '/') {
+ continue;
+ }
+
+ var glyph = this.getToken();
+ length = this.readInt();
+ this.getToken();
+ data = length > 0 ? stream.getBytes(length) : new Uint8Array(0);
+ lenIV = program.properties.privateData['lenIV'];
+ encoded = this.readCharStrings(data, lenIV);
+ this.nextChar();
+ token = this.getToken();
+
+ if (token === 'noaccess') {
+ this.getToken();
+ }
+
+ charstrings.push({
+ glyph: glyph,
+ encoded: encoded
+ });
+ }
+
+ break;
+
+ case 'Subrs':
+ this.readInt();
+ this.getToken();
+
+ while (this.getToken() === 'dup') {
+ var index = this.readInt();
+ length = this.readInt();
+ this.getToken();
+ data = length > 0 ? stream.getBytes(length) : new Uint8Array(0);
+ lenIV = program.properties.privateData['lenIV'];
+ encoded = this.readCharStrings(data, lenIV);
+ this.nextChar();
+ token = this.getToken();
+
+ if (token === 'noaccess') {
+ this.getToken();
+ }
+
+ subrs[index] = encoded;
+ }
+
+ break;
+
+ case 'BlueValues':
+ case 'OtherBlues':
+ case 'FamilyBlues':
+ case 'FamilyOtherBlues':
+ var blueArray = this.readNumberArray();
+
+ if (blueArray.length > 0 && blueArray.length % 2 === 0 && HINTING_ENABLED) {
+ program.properties.privateData[token] = blueArray;
+ }
+
+ break;
+
+ case 'StemSnapH':
+ case 'StemSnapV':
+ program.properties.privateData[token] = this.readNumberArray();
+ break;
+
+ case 'StdHW':
+ case 'StdVW':
+ program.properties.privateData[token] = this.readNumberArray()[0];
+ break;
+
+ case 'BlueShift':
+ case 'lenIV':
+ case 'BlueFuzz':
+ case 'BlueScale':
+ case 'LanguageGroup':
+ case 'ExpansionFactor':
+ program.properties.privateData[token] = this.readNumber();
+ break;
+
+ case 'ForceBold':
+ program.properties.privateData[token] = this.readBoolean();
+ break;
+ }
+ }
+
+ for (var i = 0; i < charstrings.length; i++) {
+ glyph = charstrings[i].glyph;
+ encoded = charstrings[i].encoded;
+ var charString = new Type1CharString();
+ var error = charString.convert(encoded, subrs, this.seacAnalysisEnabled);
+ var output = charString.output;
+
+ if (error) {
+ output = [14];
+ }
+
+ program.charstrings.push({
+ glyphName: glyph,
+ charstring: output,
+ width: charString.width,
+ lsb: charString.lsb,
+ seac: charString.seac
+ });
+ }
+
+ return program;
+ },
+ extractFontHeader: function Type1Parser_extractFontHeader(properties) {
+ var token;
+
+ while ((token = this.getToken()) !== null) {
+ if (token !== '/') {
+ continue;
+ }
+
+ token = this.getToken();
+
+ switch (token) {
+ case 'FontMatrix':
+ var matrix = this.readNumberArray();
+ properties.fontMatrix = matrix;
+ break;
+
+ case 'Encoding':
+ var encodingArg = this.getToken();
+ var encoding;
+
+ if (!/^\d+$/.test(encodingArg)) {
+ encoding = (0, _encodings.getEncoding)(encodingArg);
+ } else {
+ encoding = [];
+ var size = parseInt(encodingArg, 10) | 0;
+ this.getToken();
+
+ for (var j = 0; j < size; j++) {
+ token = this.getToken();
+
+ while (token !== 'dup' && token !== 'def') {
+ token = this.getToken();
+
+ if (token === null) {
+ return;
+ }
+ }
+
+ if (token === 'def') {
+ break;
+ }
+
+ var index = this.readInt();
+ this.getToken();
+ var glyph = this.getToken();
+ encoding[index] = glyph;
+ this.getToken();
+ }
+ }
+
+ properties.builtInEncoding = encoding;
+ break;
+
+ case 'FontBBox':
+ var fontBBox = this.readNumberArray();
+ properties.ascent = Math.max(fontBBox[3], fontBBox[1]);
+ properties.descent = Math.min(fontBBox[1], fontBBox[3]);
+ properties.ascentScaled = true;
+ break;
+ }
+ }
+ }
+ };
+ return Type1Parser;
+}();
+
+exports.Type1Parser = Type1Parser;
+
+/***/ }),
+/* 183 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.getTilingPatternIR = getTilingPatternIR;
+exports.Pattern = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _colorspace = __w_pdfjs_require__(169);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _core_utils = __w_pdfjs_require__(154);
+
+var ShadingType = {
+ FUNCTION_BASED: 1,
+ AXIAL: 2,
+ RADIAL: 3,
+ FREE_FORM_MESH: 4,
+ LATTICE_FORM_MESH: 5,
+ COONS_PATCH_MESH: 6,
+ TENSOR_PATCH_MESH: 7
+};
+
+var Pattern = function PatternClosure() {
+ function Pattern() {
+ (0, _util.unreachable)('should not call Pattern constructor');
+ }
+
+ Pattern.prototype = {
+ getPattern: function Pattern_getPattern(ctx) {
+ (0, _util.unreachable)("Should not call Pattern.getStyle: ".concat(ctx));
+ }
+ };
+
+ Pattern.parseShading = function (shading, matrix, xref, res, handler, pdfFunctionFactory) {
+ var dict = (0, _primitives.isStream)(shading) ? shading.dict : shading;
+ var type = dict.get('ShadingType');
+
+ try {
+ switch (type) {
+ case ShadingType.AXIAL:
+ case ShadingType.RADIAL:
+ return new Shadings.RadialAxial(dict, matrix, xref, res, pdfFunctionFactory);
+
+ case ShadingType.FREE_FORM_MESH:
+ case ShadingType.LATTICE_FORM_MESH:
+ case ShadingType.COONS_PATCH_MESH:
+ case ShadingType.TENSOR_PATCH_MESH:
+ return new Shadings.Mesh(shading, matrix, xref, res, pdfFunctionFactory);
+
+ default:
+ throw new _util.FormatError('Unsupported ShadingType: ' + type);
+ }
+ } catch (ex) {
+ if (ex instanceof _core_utils.MissingDataException) {
+ throw ex;
+ }
+
+ handler.send('UnsupportedFeature', {
+ featureId: _util.UNSUPPORTED_FEATURES.shadingPattern
+ });
+ (0, _util.warn)(ex);
+ return new Shadings.Dummy();
+ }
+ };
+
+ return Pattern;
+}();
+
+exports.Pattern = Pattern;
+var Shadings = {};
+Shadings.SMALL_NUMBER = 1e-6;
+
+Shadings.RadialAxial = function RadialAxialClosure() {
+ function RadialAxial(dict, matrix, xref, res, pdfFunctionFactory) {
+ this.matrix = matrix;
+ this.coordsArr = dict.getArray('Coords');
+ this.shadingType = dict.get('ShadingType');
+ this.type = 'Pattern';
+ var cs = dict.get('ColorSpace', 'CS');
+ cs = _colorspace.ColorSpace.parse(cs, xref, res, pdfFunctionFactory);
+ this.cs = cs;
+ var t0 = 0.0,
+ t1 = 1.0;
+
+ if (dict.has('Domain')) {
+ var domainArr = dict.getArray('Domain');
+ t0 = domainArr[0];
+ t1 = domainArr[1];
+ }
+
+ var extendStart = false,
+ extendEnd = false;
+
+ if (dict.has('Extend')) {
+ var extendArr = dict.getArray('Extend');
+ extendStart = extendArr[0];
+ extendEnd = extendArr[1];
+ }
+
+ if (this.shadingType === ShadingType.RADIAL && (!extendStart || !extendEnd)) {
+ var x1 = this.coordsArr[0];
+ var y1 = this.coordsArr[1];
+ var r1 = this.coordsArr[2];
+ var x2 = this.coordsArr[3];
+ var y2 = this.coordsArr[4];
+ var r2 = this.coordsArr[5];
+ var distance = Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2));
+
+ if (r1 <= r2 + distance && r2 <= r1 + distance) {
+ (0, _util.warn)('Unsupported radial gradient.');
+ }
+ }
+
+ this.extendStart = extendStart;
+ this.extendEnd = extendEnd;
+ var fnObj = dict.get('Function');
+ var fn = pdfFunctionFactory.createFromArray(fnObj);
+ var diff = t1 - t0;
+ var step = diff / 10;
+ var colorStops = this.colorStops = [];
+
+ if (t0 >= t1 || step <= 0) {
+ (0, _util.info)('Bad shading domain.');
+ return;
+ }
+
+ var color = new Float32Array(cs.numComps),
+ ratio = new Float32Array(1);
+ var rgbColor;
+
+ for (var i = t0; i <= t1; i += step) {
+ ratio[0] = i;
+ fn(ratio, 0, color, 0);
+ rgbColor = cs.getRgb(color, 0);
+
+ var cssColor = _util.Util.makeCssRgb(rgbColor[0], rgbColor[1], rgbColor[2]);
+
+ colorStops.push([(i - t0) / diff, cssColor]);
+ }
+
+ var background = 'transparent';
+
+ if (dict.has('Background')) {
+ rgbColor = cs.getRgb(dict.get('Background'), 0);
+ background = _util.Util.makeCssRgb(rgbColor[0], rgbColor[1], rgbColor[2]);
+ }
+
+ if (!extendStart) {
+ colorStops.unshift([0, background]);
+ colorStops[1][0] += Shadings.SMALL_NUMBER;
+ }
+
+ if (!extendEnd) {
+ colorStops[colorStops.length - 1][0] -= Shadings.SMALL_NUMBER;
+ colorStops.push([1, background]);
+ }
+
+ this.colorStops = colorStops;
+ }
+
+ RadialAxial.prototype = {
+ getIR: function RadialAxial_getIR() {
+ var coordsArr = this.coordsArr;
+ var shadingType = this.shadingType;
+ var type, p0, p1, r0, r1;
+
+ if (shadingType === ShadingType.AXIAL) {
+ p0 = [coordsArr[0], coordsArr[1]];
+ p1 = [coordsArr[2], coordsArr[3]];
+ r0 = null;
+ r1 = null;
+ type = 'axial';
+ } else if (shadingType === ShadingType.RADIAL) {
+ p0 = [coordsArr[0], coordsArr[1]];
+ p1 = [coordsArr[3], coordsArr[4]];
+ r0 = coordsArr[2];
+ r1 = coordsArr[5];
+ type = 'radial';
+ } else {
+ (0, _util.unreachable)("getPattern type unknown: ".concat(shadingType));
+ }
+
+ var matrix = this.matrix;
+
+ if (matrix) {
+ p0 = _util.Util.applyTransform(p0, matrix);
+ p1 = _util.Util.applyTransform(p1, matrix);
+
+ if (shadingType === ShadingType.RADIAL) {
+ var scale = _util.Util.singularValueDecompose2dScale(matrix);
+
+ r0 *= scale[0];
+ r1 *= scale[1];
+ }
+ }
+
+ return ['RadialAxial', type, this.colorStops, p0, p1, r0, r1];
+ }
+ };
+ return RadialAxial;
+}();
+
+Shadings.Mesh = function MeshClosure() {
+ function MeshStreamReader(stream, context) {
+ this.stream = stream;
+ this.context = context;
+ this.buffer = 0;
+ this.bufferLength = 0;
+ var numComps = context.numComps;
+ this.tmpCompsBuf = new Float32Array(numComps);
+ var csNumComps = context.colorSpace.numComps;
+ this.tmpCsCompsBuf = context.colorFn ? new Float32Array(csNumComps) : this.tmpCompsBuf;
+ }
+
+ MeshStreamReader.prototype = {
+ get hasData() {
+ if (this.stream.end) {
+ return this.stream.pos < this.stream.end;
+ }
+
+ if (this.bufferLength > 0) {
+ return true;
+ }
+
+ var nextByte = this.stream.getByte();
+
+ if (nextByte < 0) {
+ return false;
+ }
+
+ this.buffer = nextByte;
+ this.bufferLength = 8;
+ return true;
+ },
+
+ readBits: function MeshStreamReader_readBits(n) {
+ var buffer = this.buffer;
+ var bufferLength = this.bufferLength;
+
+ if (n === 32) {
+ if (bufferLength === 0) {
+ return (this.stream.getByte() << 24 | this.stream.getByte() << 16 | this.stream.getByte() << 8 | this.stream.getByte()) >>> 0;
+ }
+
+ buffer = buffer << 24 | this.stream.getByte() << 16 | this.stream.getByte() << 8 | this.stream.getByte();
+ var nextByte = this.stream.getByte();
+ this.buffer = nextByte & (1 << bufferLength) - 1;
+ return (buffer << 8 - bufferLength | (nextByte & 0xFF) >> bufferLength) >>> 0;
+ }
+
+ if (n === 8 && bufferLength === 0) {
+ return this.stream.getByte();
+ }
+
+ while (bufferLength < n) {
+ buffer = buffer << 8 | this.stream.getByte();
+ bufferLength += 8;
+ }
+
+ bufferLength -= n;
+ this.bufferLength = bufferLength;
+ this.buffer = buffer & (1 << bufferLength) - 1;
+ return buffer >> bufferLength;
+ },
+ align: function MeshStreamReader_align() {
+ this.buffer = 0;
+ this.bufferLength = 0;
+ },
+ readFlag: function MeshStreamReader_readFlag() {
+ return this.readBits(this.context.bitsPerFlag);
+ },
+ readCoordinate: function MeshStreamReader_readCoordinate() {
+ var bitsPerCoordinate = this.context.bitsPerCoordinate;
+ var xi = this.readBits(bitsPerCoordinate);
+ var yi = this.readBits(bitsPerCoordinate);
+ var decode = this.context.decode;
+ var scale = bitsPerCoordinate < 32 ? 1 / ((1 << bitsPerCoordinate) - 1) : 2.3283064365386963e-10;
+ return [xi * scale * (decode[1] - decode[0]) + decode[0], yi * scale * (decode[3] - decode[2]) + decode[2]];
+ },
+ readComponents: function MeshStreamReader_readComponents() {
+ var numComps = this.context.numComps;
+ var bitsPerComponent = this.context.bitsPerComponent;
+ var scale = bitsPerComponent < 32 ? 1 / ((1 << bitsPerComponent) - 1) : 2.3283064365386963e-10;
+ var decode = this.context.decode;
+ var components = this.tmpCompsBuf;
+
+ for (var i = 0, j = 4; i < numComps; i++, j += 2) {
+ var ci = this.readBits(bitsPerComponent);
+ components[i] = ci * scale * (decode[j + 1] - decode[j]) + decode[j];
+ }
+
+ var color = this.tmpCsCompsBuf;
+
+ if (this.context.colorFn) {
+ this.context.colorFn(components, 0, color, 0);
+ }
+
+ return this.context.colorSpace.getRgb(color, 0);
+ }
+ };
+
+ function decodeType4Shading(mesh, reader) {
+ var coords = mesh.coords;
+ var colors = mesh.colors;
+ var operators = [];
+ var ps = [];
+ var verticesLeft = 0;
+
+ while (reader.hasData) {
+ var f = reader.readFlag();
+ var coord = reader.readCoordinate();
+ var color = reader.readComponents();
+
+ if (verticesLeft === 0) {
+ if (!(0 <= f && f <= 2)) {
+ throw new _util.FormatError('Unknown type4 flag');
+ }
+
+ switch (f) {
+ case 0:
+ verticesLeft = 3;
+ break;
+
+ case 1:
+ ps.push(ps[ps.length - 2], ps[ps.length - 1]);
+ verticesLeft = 1;
+ break;
+
+ case 2:
+ ps.push(ps[ps.length - 3], ps[ps.length - 1]);
+ verticesLeft = 1;
+ break;
+ }
+
+ operators.push(f);
+ }
+
+ ps.push(coords.length);
+ coords.push(coord);
+ colors.push(color);
+ verticesLeft--;
+ reader.align();
+ }
+
+ mesh.figures.push({
+ type: 'triangles',
+ coords: new Int32Array(ps),
+ colors: new Int32Array(ps)
+ });
+ }
+
+ function decodeType5Shading(mesh, reader, verticesPerRow) {
+ var coords = mesh.coords;
+ var colors = mesh.colors;
+ var ps = [];
+
+ while (reader.hasData) {
+ var coord = reader.readCoordinate();
+ var color = reader.readComponents();
+ ps.push(coords.length);
+ coords.push(coord);
+ colors.push(color);
+ }
+
+ mesh.figures.push({
+ type: 'lattice',
+ coords: new Int32Array(ps),
+ colors: new Int32Array(ps),
+ verticesPerRow: verticesPerRow
+ });
+ }
+
+ var MIN_SPLIT_PATCH_CHUNKS_AMOUNT = 3;
+ var MAX_SPLIT_PATCH_CHUNKS_AMOUNT = 20;
+ var TRIANGLE_DENSITY = 20;
+
+ var getB = function getBClosure() {
+ function buildB(count) {
+ var lut = [];
+
+ for (var i = 0; i <= count; i++) {
+ var t = i / count,
+ t_ = 1 - t;
+ lut.push(new Float32Array([t_ * t_ * t_, 3 * t * t_ * t_, 3 * t * t * t_, t * t * t]));
+ }
+
+ return lut;
+ }
+
+ var cache = [];
+ return function getB(count) {
+ if (!cache[count]) {
+ cache[count] = buildB(count);
+ }
+
+ return cache[count];
+ };
+ }();
+
+ function buildFigureFromPatch(mesh, index) {
+ var figure = mesh.figures[index];
+ (0, _util.assert)(figure.type === 'patch', 'Unexpected patch mesh figure');
+ var coords = mesh.coords,
+ colors = mesh.colors;
+ var pi = figure.coords;
+ var ci = figure.colors;
+ var figureMinX = Math.min(coords[pi[0]][0], coords[pi[3]][0], coords[pi[12]][0], coords[pi[15]][0]);
+ var figureMinY = Math.min(coords[pi[0]][1], coords[pi[3]][1], coords[pi[12]][1], coords[pi[15]][1]);
+ var figureMaxX = Math.max(coords[pi[0]][0], coords[pi[3]][0], coords[pi[12]][0], coords[pi[15]][0]);
+ var figureMaxY = Math.max(coords[pi[0]][1], coords[pi[3]][1], coords[pi[12]][1], coords[pi[15]][1]);
+ var splitXBy = Math.ceil((figureMaxX - figureMinX) * TRIANGLE_DENSITY / (mesh.bounds[2] - mesh.bounds[0]));
+ splitXBy = Math.max(MIN_SPLIT_PATCH_CHUNKS_AMOUNT, Math.min(MAX_SPLIT_PATCH_CHUNKS_AMOUNT, splitXBy));
+ var splitYBy = Math.ceil((figureMaxY - figureMinY) * TRIANGLE_DENSITY / (mesh.bounds[3] - mesh.bounds[1]));
+ splitYBy = Math.max(MIN_SPLIT_PATCH_CHUNKS_AMOUNT, Math.min(MAX_SPLIT_PATCH_CHUNKS_AMOUNT, splitYBy));
+ var verticesPerRow = splitXBy + 1;
+ var figureCoords = new Int32Array((splitYBy + 1) * verticesPerRow);
+ var figureColors = new Int32Array((splitYBy + 1) * verticesPerRow);
+ var k = 0;
+ var cl = new Uint8Array(3),
+ cr = new Uint8Array(3);
+ var c0 = colors[ci[0]],
+ c1 = colors[ci[1]],
+ c2 = colors[ci[2]],
+ c3 = colors[ci[3]];
+ var bRow = getB(splitYBy),
+ bCol = getB(splitXBy);
+
+ for (var row = 0; row <= splitYBy; row++) {
+ cl[0] = (c0[0] * (splitYBy - row) + c2[0] * row) / splitYBy | 0;
+ cl[1] = (c0[1] * (splitYBy - row) + c2[1] * row) / splitYBy | 0;
+ cl[2] = (c0[2] * (splitYBy - row) + c2[2] * row) / splitYBy | 0;
+ cr[0] = (c1[0] * (splitYBy - row) + c3[0] * row) / splitYBy | 0;
+ cr[1] = (c1[1] * (splitYBy - row) + c3[1] * row) / splitYBy | 0;
+ cr[2] = (c1[2] * (splitYBy - row) + c3[2] * row) / splitYBy | 0;
+
+ for (var col = 0; col <= splitXBy; col++, k++) {
+ if ((row === 0 || row === splitYBy) && (col === 0 || col === splitXBy)) {
+ continue;
+ }
+
+ var x = 0,
+ y = 0;
+ var q = 0;
+
+ for (var i = 0; i <= 3; i++) {
+ for (var j = 0; j <= 3; j++, q++) {
+ var m = bRow[row][i] * bCol[col][j];
+ x += coords[pi[q]][0] * m;
+ y += coords[pi[q]][1] * m;
+ }
+ }
+
+ figureCoords[k] = coords.length;
+ coords.push([x, y]);
+ figureColors[k] = colors.length;
+ var newColor = new Uint8Array(3);
+ newColor[0] = (cl[0] * (splitXBy - col) + cr[0] * col) / splitXBy | 0;
+ newColor[1] = (cl[1] * (splitXBy - col) + cr[1] * col) / splitXBy | 0;
+ newColor[2] = (cl[2] * (splitXBy - col) + cr[2] * col) / splitXBy | 0;
+ colors.push(newColor);
+ }
+ }
+
+ figureCoords[0] = pi[0];
+ figureColors[0] = ci[0];
+ figureCoords[splitXBy] = pi[3];
+ figureColors[splitXBy] = ci[1];
+ figureCoords[verticesPerRow * splitYBy] = pi[12];
+ figureColors[verticesPerRow * splitYBy] = ci[2];
+ figureCoords[verticesPerRow * splitYBy + splitXBy] = pi[15];
+ figureColors[verticesPerRow * splitYBy + splitXBy] = ci[3];
+ mesh.figures[index] = {
+ type: 'lattice',
+ coords: figureCoords,
+ colors: figureColors,
+ verticesPerRow: verticesPerRow
+ };
+ }
+
+ function decodeType6Shading(mesh, reader) {
+ var coords = mesh.coords;
+ var colors = mesh.colors;
+ var ps = new Int32Array(16);
+ var cs = new Int32Array(4);
+
+ while (reader.hasData) {
+ var f = reader.readFlag();
+
+ if (!(0 <= f && f <= 3)) {
+ throw new _util.FormatError('Unknown type6 flag');
+ }
+
+ var i, ii;
+ var pi = coords.length;
+
+ for (i = 0, ii = f !== 0 ? 8 : 12; i < ii; i++) {
+ coords.push(reader.readCoordinate());
+ }
+
+ var ci = colors.length;
+
+ for (i = 0, ii = f !== 0 ? 2 : 4; i < ii; i++) {
+ colors.push(reader.readComponents());
+ }
+
+ var tmp1, tmp2, tmp3, tmp4;
+
+ switch (f) {
+ case 0:
+ ps[12] = pi + 3;
+ ps[13] = pi + 4;
+ ps[14] = pi + 5;
+ ps[15] = pi + 6;
+ ps[8] = pi + 2;
+ ps[11] = pi + 7;
+ ps[4] = pi + 1;
+ ps[7] = pi + 8;
+ ps[0] = pi;
+ ps[1] = pi + 11;
+ ps[2] = pi + 10;
+ ps[3] = pi + 9;
+ cs[2] = ci + 1;
+ cs[3] = ci + 2;
+ cs[0] = ci;
+ cs[1] = ci + 3;
+ break;
+
+ case 1:
+ tmp1 = ps[12];
+ tmp2 = ps[13];
+ tmp3 = ps[14];
+ tmp4 = ps[15];
+ ps[12] = tmp4;
+ ps[13] = pi + 0;
+ ps[14] = pi + 1;
+ ps[15] = pi + 2;
+ ps[8] = tmp3;
+ ps[11] = pi + 3;
+ ps[4] = tmp2;
+ ps[7] = pi + 4;
+ ps[0] = tmp1;
+ ps[1] = pi + 7;
+ ps[2] = pi + 6;
+ ps[3] = pi + 5;
+ tmp1 = cs[2];
+ tmp2 = cs[3];
+ cs[2] = tmp2;
+ cs[3] = ci;
+ cs[0] = tmp1;
+ cs[1] = ci + 1;
+ break;
+
+ case 2:
+ tmp1 = ps[15];
+ tmp2 = ps[11];
+ ps[12] = ps[3];
+ ps[13] = pi + 0;
+ ps[14] = pi + 1;
+ ps[15] = pi + 2;
+ ps[8] = ps[7];
+ ps[11] = pi + 3;
+ ps[4] = tmp2;
+ ps[7] = pi + 4;
+ ps[0] = tmp1;
+ ps[1] = pi + 7;
+ ps[2] = pi + 6;
+ ps[3] = pi + 5;
+ tmp1 = cs[3];
+ cs[2] = cs[1];
+ cs[3] = ci;
+ cs[0] = tmp1;
+ cs[1] = ci + 1;
+ break;
+
+ case 3:
+ ps[12] = ps[0];
+ ps[13] = pi + 0;
+ ps[14] = pi + 1;
+ ps[15] = pi + 2;
+ ps[8] = ps[1];
+ ps[11] = pi + 3;
+ ps[4] = ps[2];
+ ps[7] = pi + 4;
+ ps[0] = ps[3];
+ ps[1] = pi + 7;
+ ps[2] = pi + 6;
+ ps[3] = pi + 5;
+ cs[2] = cs[0];
+ cs[3] = ci;
+ cs[0] = cs[1];
+ cs[1] = ci + 1;
+ break;
+ }
+
+ ps[5] = coords.length;
+ coords.push([(-4 * coords[ps[0]][0] - coords[ps[15]][0] + 6 * (coords[ps[4]][0] + coords[ps[1]][0]) - 2 * (coords[ps[12]][0] + coords[ps[3]][0]) + 3 * (coords[ps[13]][0] + coords[ps[7]][0])) / 9, (-4 * coords[ps[0]][1] - coords[ps[15]][1] + 6 * (coords[ps[4]][1] + coords[ps[1]][1]) - 2 * (coords[ps[12]][1] + coords[ps[3]][1]) + 3 * (coords[ps[13]][1] + coords[ps[7]][1])) / 9]);
+ ps[6] = coords.length;
+ coords.push([(-4 * coords[ps[3]][0] - coords[ps[12]][0] + 6 * (coords[ps[2]][0] + coords[ps[7]][0]) - 2 * (coords[ps[0]][0] + coords[ps[15]][0]) + 3 * (coords[ps[4]][0] + coords[ps[14]][0])) / 9, (-4 * coords[ps[3]][1] - coords[ps[12]][1] + 6 * (coords[ps[2]][1] + coords[ps[7]][1]) - 2 * (coords[ps[0]][1] + coords[ps[15]][1]) + 3 * (coords[ps[4]][1] + coords[ps[14]][1])) / 9]);
+ ps[9] = coords.length;
+ coords.push([(-4 * coords[ps[12]][0] - coords[ps[3]][0] + 6 * (coords[ps[8]][0] + coords[ps[13]][0]) - 2 * (coords[ps[0]][0] + coords[ps[15]][0]) + 3 * (coords[ps[11]][0] + coords[ps[1]][0])) / 9, (-4 * coords[ps[12]][1] - coords[ps[3]][1] + 6 * (coords[ps[8]][1] + coords[ps[13]][1]) - 2 * (coords[ps[0]][1] + coords[ps[15]][1]) + 3 * (coords[ps[11]][1] + coords[ps[1]][1])) / 9]);
+ ps[10] = coords.length;
+ coords.push([(-4 * coords[ps[15]][0] - coords[ps[0]][0] + 6 * (coords[ps[11]][0] + coords[ps[14]][0]) - 2 * (coords[ps[12]][0] + coords[ps[3]][0]) + 3 * (coords[ps[2]][0] + coords[ps[8]][0])) / 9, (-4 * coords[ps[15]][1] - coords[ps[0]][1] + 6 * (coords[ps[11]][1] + coords[ps[14]][1]) - 2 * (coords[ps[12]][1] + coords[ps[3]][1]) + 3 * (coords[ps[2]][1] + coords[ps[8]][1])) / 9]);
+ mesh.figures.push({
+ type: 'patch',
+ coords: new Int32Array(ps),
+ colors: new Int32Array(cs)
+ });
+ }
+ }
+
+ function decodeType7Shading(mesh, reader) {
+ var coords = mesh.coords;
+ var colors = mesh.colors;
+ var ps = new Int32Array(16);
+ var cs = new Int32Array(4);
+
+ while (reader.hasData) {
+ var f = reader.readFlag();
+
+ if (!(0 <= f && f <= 3)) {
+ throw new _util.FormatError('Unknown type7 flag');
+ }
+
+ var i, ii;
+ var pi = coords.length;
+
+ for (i = 0, ii = f !== 0 ? 12 : 16; i < ii; i++) {
+ coords.push(reader.readCoordinate());
+ }
+
+ var ci = colors.length;
+
+ for (i = 0, ii = f !== 0 ? 2 : 4; i < ii; i++) {
+ colors.push(reader.readComponents());
+ }
+
+ var tmp1, tmp2, tmp3, tmp4;
+
+ switch (f) {
+ case 0:
+ ps[12] = pi + 3;
+ ps[13] = pi + 4;
+ ps[14] = pi + 5;
+ ps[15] = pi + 6;
+ ps[8] = pi + 2;
+ ps[9] = pi + 13;
+ ps[10] = pi + 14;
+ ps[11] = pi + 7;
+ ps[4] = pi + 1;
+ ps[5] = pi + 12;
+ ps[6] = pi + 15;
+ ps[7] = pi + 8;
+ ps[0] = pi;
+ ps[1] = pi + 11;
+ ps[2] = pi + 10;
+ ps[3] = pi + 9;
+ cs[2] = ci + 1;
+ cs[3] = ci + 2;
+ cs[0] = ci;
+ cs[1] = ci + 3;
+ break;
+
+ case 1:
+ tmp1 = ps[12];
+ tmp2 = ps[13];
+ tmp3 = ps[14];
+ tmp4 = ps[15];
+ ps[12] = tmp4;
+ ps[13] = pi + 0;
+ ps[14] = pi + 1;
+ ps[15] = pi + 2;
+ ps[8] = tmp3;
+ ps[9] = pi + 9;
+ ps[10] = pi + 10;
+ ps[11] = pi + 3;
+ ps[4] = tmp2;
+ ps[5] = pi + 8;
+ ps[6] = pi + 11;
+ ps[7] = pi + 4;
+ ps[0] = tmp1;
+ ps[1] = pi + 7;
+ ps[2] = pi + 6;
+ ps[3] = pi + 5;
+ tmp1 = cs[2];
+ tmp2 = cs[3];
+ cs[2] = tmp2;
+ cs[3] = ci;
+ cs[0] = tmp1;
+ cs[1] = ci + 1;
+ break;
+
+ case 2:
+ tmp1 = ps[15];
+ tmp2 = ps[11];
+ ps[12] = ps[3];
+ ps[13] = pi + 0;
+ ps[14] = pi + 1;
+ ps[15] = pi + 2;
+ ps[8] = ps[7];
+ ps[9] = pi + 9;
+ ps[10] = pi + 10;
+ ps[11] = pi + 3;
+ ps[4] = tmp2;
+ ps[5] = pi + 8;
+ ps[6] = pi + 11;
+ ps[7] = pi + 4;
+ ps[0] = tmp1;
+ ps[1] = pi + 7;
+ ps[2] = pi + 6;
+ ps[3] = pi + 5;
+ tmp1 = cs[3];
+ cs[2] = cs[1];
+ cs[3] = ci;
+ cs[0] = tmp1;
+ cs[1] = ci + 1;
+ break;
+
+ case 3:
+ ps[12] = ps[0];
+ ps[13] = pi + 0;
+ ps[14] = pi + 1;
+ ps[15] = pi + 2;
+ ps[8] = ps[1];
+ ps[9] = pi + 9;
+ ps[10] = pi + 10;
+ ps[11] = pi + 3;
+ ps[4] = ps[2];
+ ps[5] = pi + 8;
+ ps[6] = pi + 11;
+ ps[7] = pi + 4;
+ ps[0] = ps[3];
+ ps[1] = pi + 7;
+ ps[2] = pi + 6;
+ ps[3] = pi + 5;
+ cs[2] = cs[0];
+ cs[3] = ci;
+ cs[0] = cs[1];
+ cs[1] = ci + 1;
+ break;
+ }
+
+ mesh.figures.push({
+ type: 'patch',
+ coords: new Int32Array(ps),
+ colors: new Int32Array(cs)
+ });
+ }
+ }
+
+ function updateBounds(mesh) {
+ var minX = mesh.coords[0][0],
+ minY = mesh.coords[0][1],
+ maxX = minX,
+ maxY = minY;
+
+ for (var i = 1, ii = mesh.coords.length; i < ii; i++) {
+ var x = mesh.coords[i][0],
+ y = mesh.coords[i][1];
+ minX = minX > x ? x : minX;
+ minY = minY > y ? y : minY;
+ maxX = maxX < x ? x : maxX;
+ maxY = maxY < y ? y : maxY;
+ }
+
+ mesh.bounds = [minX, minY, maxX, maxY];
+ }
+
+ function packData(mesh) {
+ var i, ii, j, jj;
+ var coords = mesh.coords;
+ var coordsPacked = new Float32Array(coords.length * 2);
+
+ for (i = 0, j = 0, ii = coords.length; i < ii; i++) {
+ var xy = coords[i];
+ coordsPacked[j++] = xy[0];
+ coordsPacked[j++] = xy[1];
+ }
+
+ mesh.coords = coordsPacked;
+ var colors = mesh.colors;
+ var colorsPacked = new Uint8Array(colors.length * 3);
+
+ for (i = 0, j = 0, ii = colors.length; i < ii; i++) {
+ var c = colors[i];
+ colorsPacked[j++] = c[0];
+ colorsPacked[j++] = c[1];
+ colorsPacked[j++] = c[2];
+ }
+
+ mesh.colors = colorsPacked;
+ var figures = mesh.figures;
+
+ for (i = 0, ii = figures.length; i < ii; i++) {
+ var figure = figures[i],
+ ps = figure.coords,
+ cs = figure.colors;
+
+ for (j = 0, jj = ps.length; j < jj; j++) {
+ ps[j] *= 2;
+ cs[j] *= 3;
+ }
+ }
+ }
+
+ function Mesh(stream, matrix, xref, res, pdfFunctionFactory) {
+ if (!(0, _primitives.isStream)(stream)) {
+ throw new _util.FormatError('Mesh data is not a stream');
+ }
+
+ var dict = stream.dict;
+ this.matrix = matrix;
+ this.shadingType = dict.get('ShadingType');
+ this.type = 'Pattern';
+ this.bbox = dict.getArray('BBox');
+ var cs = dict.get('ColorSpace', 'CS');
+ cs = _colorspace.ColorSpace.parse(cs, xref, res, pdfFunctionFactory);
+ this.cs = cs;
+ this.background = dict.has('Background') ? cs.getRgb(dict.get('Background'), 0) : null;
+ var fnObj = dict.get('Function');
+ var fn = fnObj ? pdfFunctionFactory.createFromArray(fnObj) : null;
+ this.coords = [];
+ this.colors = [];
+ this.figures = [];
+ var decodeContext = {
+ bitsPerCoordinate: dict.get('BitsPerCoordinate'),
+ bitsPerComponent: dict.get('BitsPerComponent'),
+ bitsPerFlag: dict.get('BitsPerFlag'),
+ decode: dict.getArray('Decode'),
+ colorFn: fn,
+ colorSpace: cs,
+ numComps: fn ? 1 : cs.numComps
+ };
+ var reader = new MeshStreamReader(stream, decodeContext);
+ var patchMesh = false;
+
+ switch (this.shadingType) {
+ case ShadingType.FREE_FORM_MESH:
+ decodeType4Shading(this, reader);
+ break;
+
+ case ShadingType.LATTICE_FORM_MESH:
+ var verticesPerRow = dict.get('VerticesPerRow') | 0;
+
+ if (verticesPerRow < 2) {
+ throw new _util.FormatError('Invalid VerticesPerRow');
+ }
+
+ decodeType5Shading(this, reader, verticesPerRow);
+ break;
+
+ case ShadingType.COONS_PATCH_MESH:
+ decodeType6Shading(this, reader);
+ patchMesh = true;
+ break;
+
+ case ShadingType.TENSOR_PATCH_MESH:
+ decodeType7Shading(this, reader);
+ patchMesh = true;
+ break;
+
+ default:
+ (0, _util.unreachable)('Unsupported mesh type.');
+ break;
+ }
+
+ if (patchMesh) {
+ updateBounds(this);
+
+ for (var i = 0, ii = this.figures.length; i < ii; i++) {
+ buildFigureFromPatch(this, i);
+ }
+ }
+
+ updateBounds(this);
+ packData(this);
+ }
+
+ Mesh.prototype = {
+ getIR: function Mesh_getIR() {
+ return ['Mesh', this.shadingType, this.coords, this.colors, this.figures, this.bounds, this.matrix, this.bbox, this.background];
+ }
+ };
+ return Mesh;
+}();
+
+Shadings.Dummy = function DummyClosure() {
+ function Dummy() {
+ this.type = 'Pattern';
+ }
+
+ Dummy.prototype = {
+ getIR: function Dummy_getIR() {
+ return ['Dummy'];
+ }
+ };
+ return Dummy;
+}();
+
+function getTilingPatternIR(operatorList, dict, args) {
+ var matrix = dict.getArray('Matrix');
+
+ var bbox = _util.Util.normalizeRect(dict.getArray('BBox'));
+
+ var xstep = dict.get('XStep');
+ var ystep = dict.get('YStep');
+ var paintType = dict.get('PaintType');
+ var tilingType = dict.get('TilingType');
+
+ if (bbox[2] - bbox[0] === 0 || bbox[3] - bbox[1] === 0) {
+ throw new _util.FormatError("Invalid getTilingPatternIR /BBox array: [".concat(bbox, "]."));
+ }
+
+ return ['TilingPattern', args, operatorList, matrix, bbox, xstep, ystep, paintType, tilingType];
+}
+
+/***/ }),
+/* 184 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.bidi = bidi;
+
+var _util = __w_pdfjs_require__(5);
+
+var baseTypes = ['BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'S', 'B', 'S', 'WS', 'B', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'B', 'B', 'B', 'S', 'WS', 'ON', 'ON', 'ET', 'ET', 'ET', 'ON', 'ON', 'ON', 'ON', 'ON', 'ES', 'CS', 'ES', 'CS', 'CS', 'EN', 'EN', 'EN', 'EN', 'EN', 'EN', 'EN', 'EN', 'EN', 'EN', 'CS', 'ON', 'ON', 'ON', 'ON', 'ON', 'ON', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'ON', 'ON', 'ON', 'ON', 'ON', 'ON', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'ON', 'ON', 'ON', 'ON', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'B', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'BN', 'CS', 'ON', 'ET', 'ET', 'ET', 'ET', 'ON', 'ON', 'ON', 'ON', 'L', 'ON', 'ON', 'BN', 'ON', 'ON', 'ET', 'ET', 'EN', 'EN', 'ON', 'L', 'ON', 'ON', 'ON', 'EN', 'L', 'ON', 'ON', 'ON', 'ON', 'ON', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'ON', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'ON', 'L', 'L', 'L', 'L', 'L', 'L', 'L', 'L'];
+var arabicTypes = ['AN', 'AN', 'AN', 'AN', 'AN', 'AN', 'ON', 'ON', 'AL', 'ET', 'ET', 'AL', 'CS', 'AL', 'ON', 'ON', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'AL', 'AL', '', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'AN', 'AN', 'AN', 'AN', 'AN', 'AN', 'AN', 'AN', 'AN', 'AN', 'ET', 'AN', 'AN', 'AL', 'AL', 'AL', 'NSM', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'AN', 'ON', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'NSM', 'AL', 'AL', 'NSM', 'NSM', 'ON', 'NSM', 'NSM', 'NSM', 'NSM', 'AL', 'AL', 'EN', 'EN', 'EN', 'EN', 'EN', 'EN', 'EN', 'EN', 'EN', 'EN', 'AL', 'AL', 'AL', 'AL', 'AL', 'AL'];
+
+function isOdd(i) {
+ return (i & 1) !== 0;
+}
+
+function isEven(i) {
+ return (i & 1) === 0;
+}
+
+function findUnequal(arr, start, value) {
+ for (var j = start, jj = arr.length; j < jj; ++j) {
+ if (arr[j] !== value) {
+ return j;
+ }
+ }
+
+ return j;
+}
+
+function setValues(arr, start, end, value) {
+ for (var j = start; j < end; ++j) {
+ arr[j] = value;
+ }
+}
+
+function reverseValues(arr, start, end) {
+ for (var i = start, j = end - 1; i < j; ++i, --j) {
+ var temp = arr[i];
+ arr[i] = arr[j];
+ arr[j] = temp;
+ }
+}
+
+function createBidiText(str, isLTR, vertical) {
+ return {
+ str: str,
+ dir: vertical ? 'ttb' : isLTR ? 'ltr' : 'rtl'
+ };
+}
+
+var chars = [];
+var types = [];
+
+function bidi(str, startLevel, vertical) {
+ var isLTR = true;
+ var strLength = str.length;
+
+ if (strLength === 0 || vertical) {
+ return createBidiText(str, isLTR, vertical);
+ }
+
+ chars.length = strLength;
+ types.length = strLength;
+ var numBidi = 0;
+ var i, ii;
+
+ for (i = 0; i < strLength; ++i) {
+ chars[i] = str.charAt(i);
+ var charCode = str.charCodeAt(i);
+ var charType = 'L';
+
+ if (charCode <= 0x00ff) {
+ charType = baseTypes[charCode];
+ } else if (0x0590 <= charCode && charCode <= 0x05f4) {
+ charType = 'R';
+ } else if (0x0600 <= charCode && charCode <= 0x06ff) {
+ charType = arabicTypes[charCode & 0xff];
+
+ if (!charType) {
+ (0, _util.warn)('Bidi: invalid Unicode character ' + charCode.toString(16));
+ }
+ } else if (0x0700 <= charCode && charCode <= 0x08AC) {
+ charType = 'AL';
+ }
+
+ if (charType === 'R' || charType === 'AL' || charType === 'AN') {
+ numBidi++;
+ }
+
+ types[i] = charType;
+ }
+
+ if (numBidi === 0) {
+ isLTR = true;
+ return createBidiText(str, isLTR);
+ }
+
+ if (startLevel === -1) {
+ if (numBidi / strLength < 0.3) {
+ isLTR = true;
+ startLevel = 0;
+ } else {
+ isLTR = false;
+ startLevel = 1;
+ }
+ }
+
+ var levels = [];
+
+ for (i = 0; i < strLength; ++i) {
+ levels[i] = startLevel;
+ }
+
+ var e = isOdd(startLevel) ? 'R' : 'L';
+ var sor = e;
+ var eor = sor;
+ var lastType = sor;
+
+ for (i = 0; i < strLength; ++i) {
+ if (types[i] === 'NSM') {
+ types[i] = lastType;
+ } else {
+ lastType = types[i];
+ }
+ }
+
+ lastType = sor;
+ var t;
+
+ for (i = 0; i < strLength; ++i) {
+ t = types[i];
+
+ if (t === 'EN') {
+ types[i] = lastType === 'AL' ? 'AN' : 'EN';
+ } else if (t === 'R' || t === 'L' || t === 'AL') {
+ lastType = t;
+ }
+ }
+
+ for (i = 0; i < strLength; ++i) {
+ t = types[i];
+
+ if (t === 'AL') {
+ types[i] = 'R';
+ }
+ }
+
+ for (i = 1; i < strLength - 1; ++i) {
+ if (types[i] === 'ES' && types[i - 1] === 'EN' && types[i + 1] === 'EN') {
+ types[i] = 'EN';
+ }
+
+ if (types[i] === 'CS' && (types[i - 1] === 'EN' || types[i - 1] === 'AN') && types[i + 1] === types[i - 1]) {
+ types[i] = types[i - 1];
+ }
+ }
+
+ for (i = 0; i < strLength; ++i) {
+ if (types[i] === 'EN') {
+ var j;
+
+ for (j = i - 1; j >= 0; --j) {
+ if (types[j] !== 'ET') {
+ break;
+ }
+
+ types[j] = 'EN';
+ }
+
+ for (j = i + 1; j < strLength; ++j) {
+ if (types[j] !== 'ET') {
+ break;
+ }
+
+ types[j] = 'EN';
+ }
+ }
+ }
+
+ for (i = 0; i < strLength; ++i) {
+ t = types[i];
+
+ if (t === 'WS' || t === 'ES' || t === 'ET' || t === 'CS') {
+ types[i] = 'ON';
+ }
+ }
+
+ lastType = sor;
+
+ for (i = 0; i < strLength; ++i) {
+ t = types[i];
+
+ if (t === 'EN') {
+ types[i] = lastType === 'L' ? 'L' : 'EN';
+ } else if (t === 'R' || t === 'L') {
+ lastType = t;
+ }
+ }
+
+ for (i = 0; i < strLength; ++i) {
+ if (types[i] === 'ON') {
+ var end = findUnequal(types, i + 1, 'ON');
+ var before = sor;
+
+ if (i > 0) {
+ before = types[i - 1];
+ }
+
+ var after = eor;
+
+ if (end + 1 < strLength) {
+ after = types[end + 1];
+ }
+
+ if (before !== 'L') {
+ before = 'R';
+ }
+
+ if (after !== 'L') {
+ after = 'R';
+ }
+
+ if (before === after) {
+ setValues(types, i, end, before);
+ }
+
+ i = end - 1;
+ }
+ }
+
+ for (i = 0; i < strLength; ++i) {
+ if (types[i] === 'ON') {
+ types[i] = e;
+ }
+ }
+
+ for (i = 0; i < strLength; ++i) {
+ t = types[i];
+
+ if (isEven(levels[i])) {
+ if (t === 'R') {
+ levels[i] += 1;
+ } else if (t === 'AN' || t === 'EN') {
+ levels[i] += 2;
+ }
+ } else {
+ if (t === 'L' || t === 'AN' || t === 'EN') {
+ levels[i] += 1;
+ }
+ }
+ }
+
+ var highestLevel = -1;
+ var lowestOddLevel = 99;
+ var level;
+
+ for (i = 0, ii = levels.length; i < ii; ++i) {
+ level = levels[i];
+
+ if (highestLevel < level) {
+ highestLevel = level;
+ }
+
+ if (lowestOddLevel > level && isOdd(level)) {
+ lowestOddLevel = level;
+ }
+ }
+
+ for (level = highestLevel; level >= lowestOddLevel; --level) {
+ var start = -1;
+
+ for (i = 0, ii = levels.length; i < ii; ++i) {
+ if (levels[i] < level) {
+ if (start >= 0) {
+ reverseValues(chars, start, i);
+ start = -1;
+ }
+ } else if (start < 0) {
+ start = i;
+ }
+ }
+
+ if (start >= 0) {
+ reverseValues(chars, start, levels.length);
+ }
+ }
+
+ for (i = 0, ii = chars.length; i < ii; ++i) {
+ var ch = chars[i];
+
+ if (ch === '<' || ch === '>') {
+ chars[i] = '';
+ }
+ }
+
+ return createBidiText(chars.join(''), isLTR);
+}
+
+/***/ }),
+/* 185 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.getMetrics = void 0;
+
+var _core_utils = __w_pdfjs_require__(154);
+
+var getMetrics = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['Courier'] = 600;
+ t['Courier-Bold'] = 600;
+ t['Courier-BoldOblique'] = 600;
+ t['Courier-Oblique'] = 600;
+ t['Helvetica'] = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['space'] = 278;
+ t['exclam'] = 278;
+ t['quotedbl'] = 355;
+ t['numbersign'] = 556;
+ t['dollar'] = 556;
+ t['percent'] = 889;
+ t['ampersand'] = 667;
+ t['quoteright'] = 222;
+ t['parenleft'] = 333;
+ t['parenright'] = 333;
+ t['asterisk'] = 389;
+ t['plus'] = 584;
+ t['comma'] = 278;
+ t['hyphen'] = 333;
+ t['period'] = 278;
+ t['slash'] = 278;
+ t['zero'] = 556;
+ t['one'] = 556;
+ t['two'] = 556;
+ t['three'] = 556;
+ t['four'] = 556;
+ t['five'] = 556;
+ t['six'] = 556;
+ t['seven'] = 556;
+ t['eight'] = 556;
+ t['nine'] = 556;
+ t['colon'] = 278;
+ t['semicolon'] = 278;
+ t['less'] = 584;
+ t['equal'] = 584;
+ t['greater'] = 584;
+ t['question'] = 556;
+ t['at'] = 1015;
+ t['A'] = 667;
+ t['B'] = 667;
+ t['C'] = 722;
+ t['D'] = 722;
+ t['E'] = 667;
+ t['F'] = 611;
+ t['G'] = 778;
+ t['H'] = 722;
+ t['I'] = 278;
+ t['J'] = 500;
+ t['K'] = 667;
+ t['L'] = 556;
+ t['M'] = 833;
+ t['N'] = 722;
+ t['O'] = 778;
+ t['P'] = 667;
+ t['Q'] = 778;
+ t['R'] = 722;
+ t['S'] = 667;
+ t['T'] = 611;
+ t['U'] = 722;
+ t['V'] = 667;
+ t['W'] = 944;
+ t['X'] = 667;
+ t['Y'] = 667;
+ t['Z'] = 611;
+ t['bracketleft'] = 278;
+ t['backslash'] = 278;
+ t['bracketright'] = 278;
+ t['asciicircum'] = 469;
+ t['underscore'] = 556;
+ t['quoteleft'] = 222;
+ t['a'] = 556;
+ t['b'] = 556;
+ t['c'] = 500;
+ t['d'] = 556;
+ t['e'] = 556;
+ t['f'] = 278;
+ t['g'] = 556;
+ t['h'] = 556;
+ t['i'] = 222;
+ t['j'] = 222;
+ t['k'] = 500;
+ t['l'] = 222;
+ t['m'] = 833;
+ t['n'] = 556;
+ t['o'] = 556;
+ t['p'] = 556;
+ t['q'] = 556;
+ t['r'] = 333;
+ t['s'] = 500;
+ t['t'] = 278;
+ t['u'] = 556;
+ t['v'] = 500;
+ t['w'] = 722;
+ t['x'] = 500;
+ t['y'] = 500;
+ t['z'] = 500;
+ t['braceleft'] = 334;
+ t['bar'] = 260;
+ t['braceright'] = 334;
+ t['asciitilde'] = 584;
+ t['exclamdown'] = 333;
+ t['cent'] = 556;
+ t['sterling'] = 556;
+ t['fraction'] = 167;
+ t['yen'] = 556;
+ t['florin'] = 556;
+ t['section'] = 556;
+ t['currency'] = 556;
+ t['quotesingle'] = 191;
+ t['quotedblleft'] = 333;
+ t['guillemotleft'] = 556;
+ t['guilsinglleft'] = 333;
+ t['guilsinglright'] = 333;
+ t['fi'] = 500;
+ t['fl'] = 500;
+ t['endash'] = 556;
+ t['dagger'] = 556;
+ t['daggerdbl'] = 556;
+ t['periodcentered'] = 278;
+ t['paragraph'] = 537;
+ t['bullet'] = 350;
+ t['quotesinglbase'] = 222;
+ t['quotedblbase'] = 333;
+ t['quotedblright'] = 333;
+ t['guillemotright'] = 556;
+ t['ellipsis'] = 1000;
+ t['perthousand'] = 1000;
+ t['questiondown'] = 611;
+ t['grave'] = 333;
+ t['acute'] = 333;
+ t['circumflex'] = 333;
+ t['tilde'] = 333;
+ t['macron'] = 333;
+ t['breve'] = 333;
+ t['dotaccent'] = 333;
+ t['dieresis'] = 333;
+ t['ring'] = 333;
+ t['cedilla'] = 333;
+ t['hungarumlaut'] = 333;
+ t['ogonek'] = 333;
+ t['caron'] = 333;
+ t['emdash'] = 1000;
+ t['AE'] = 1000;
+ t['ordfeminine'] = 370;
+ t['Lslash'] = 556;
+ t['Oslash'] = 778;
+ t['OE'] = 1000;
+ t['ordmasculine'] = 365;
+ t['ae'] = 889;
+ t['dotlessi'] = 278;
+ t['lslash'] = 222;
+ t['oslash'] = 611;
+ t['oe'] = 944;
+ t['germandbls'] = 611;
+ t['Idieresis'] = 278;
+ t['eacute'] = 556;
+ t['abreve'] = 556;
+ t['uhungarumlaut'] = 556;
+ t['ecaron'] = 556;
+ t['Ydieresis'] = 667;
+ t['divide'] = 584;
+ t['Yacute'] = 667;
+ t['Acircumflex'] = 667;
+ t['aacute'] = 556;
+ t['Ucircumflex'] = 722;
+ t['yacute'] = 500;
+ t['scommaaccent'] = 500;
+ t['ecircumflex'] = 556;
+ t['Uring'] = 722;
+ t['Udieresis'] = 722;
+ t['aogonek'] = 556;
+ t['Uacute'] = 722;
+ t['uogonek'] = 556;
+ t['Edieresis'] = 667;
+ t['Dcroat'] = 722;
+ t['commaaccent'] = 250;
+ t['copyright'] = 737;
+ t['Emacron'] = 667;
+ t['ccaron'] = 500;
+ t['aring'] = 556;
+ t['Ncommaaccent'] = 722;
+ t['lacute'] = 222;
+ t['agrave'] = 556;
+ t['Tcommaaccent'] = 611;
+ t['Cacute'] = 722;
+ t['atilde'] = 556;
+ t['Edotaccent'] = 667;
+ t['scaron'] = 500;
+ t['scedilla'] = 500;
+ t['iacute'] = 278;
+ t['lozenge'] = 471;
+ t['Rcaron'] = 722;
+ t['Gcommaaccent'] = 778;
+ t['ucircumflex'] = 556;
+ t['acircumflex'] = 556;
+ t['Amacron'] = 667;
+ t['rcaron'] = 333;
+ t['ccedilla'] = 500;
+ t['Zdotaccent'] = 611;
+ t['Thorn'] = 667;
+ t['Omacron'] = 778;
+ t['Racute'] = 722;
+ t['Sacute'] = 667;
+ t['dcaron'] = 643;
+ t['Umacron'] = 722;
+ t['uring'] = 556;
+ t['threesuperior'] = 333;
+ t['Ograve'] = 778;
+ t['Agrave'] = 667;
+ t['Abreve'] = 667;
+ t['multiply'] = 584;
+ t['uacute'] = 556;
+ t['Tcaron'] = 611;
+ t['partialdiff'] = 476;
+ t['ydieresis'] = 500;
+ t['Nacute'] = 722;
+ t['icircumflex'] = 278;
+ t['Ecircumflex'] = 667;
+ t['adieresis'] = 556;
+ t['edieresis'] = 556;
+ t['cacute'] = 500;
+ t['nacute'] = 556;
+ t['umacron'] = 556;
+ t['Ncaron'] = 722;
+ t['Iacute'] = 278;
+ t['plusminus'] = 584;
+ t['brokenbar'] = 260;
+ t['registered'] = 737;
+ t['Gbreve'] = 778;
+ t['Idotaccent'] = 278;
+ t['summation'] = 600;
+ t['Egrave'] = 667;
+ t['racute'] = 333;
+ t['omacron'] = 556;
+ t['Zacute'] = 611;
+ t['Zcaron'] = 611;
+ t['greaterequal'] = 549;
+ t['Eth'] = 722;
+ t['Ccedilla'] = 722;
+ t['lcommaaccent'] = 222;
+ t['tcaron'] = 317;
+ t['eogonek'] = 556;
+ t['Uogonek'] = 722;
+ t['Aacute'] = 667;
+ t['Adieresis'] = 667;
+ t['egrave'] = 556;
+ t['zacute'] = 500;
+ t['iogonek'] = 222;
+ t['Oacute'] = 778;
+ t['oacute'] = 556;
+ t['amacron'] = 556;
+ t['sacute'] = 500;
+ t['idieresis'] = 278;
+ t['Ocircumflex'] = 778;
+ t['Ugrave'] = 722;
+ t['Delta'] = 612;
+ t['thorn'] = 556;
+ t['twosuperior'] = 333;
+ t['Odieresis'] = 778;
+ t['mu'] = 556;
+ t['igrave'] = 278;
+ t['ohungarumlaut'] = 556;
+ t['Eogonek'] = 667;
+ t['dcroat'] = 556;
+ t['threequarters'] = 834;
+ t['Scedilla'] = 667;
+ t['lcaron'] = 299;
+ t['Kcommaaccent'] = 667;
+ t['Lacute'] = 556;
+ t['trademark'] = 1000;
+ t['edotaccent'] = 556;
+ t['Igrave'] = 278;
+ t['Imacron'] = 278;
+ t['Lcaron'] = 556;
+ t['onehalf'] = 834;
+ t['lessequal'] = 549;
+ t['ocircumflex'] = 556;
+ t['ntilde'] = 556;
+ t['Uhungarumlaut'] = 722;
+ t['Eacute'] = 667;
+ t['emacron'] = 556;
+ t['gbreve'] = 556;
+ t['onequarter'] = 834;
+ t['Scaron'] = 667;
+ t['Scommaaccent'] = 667;
+ t['Ohungarumlaut'] = 778;
+ t['degree'] = 400;
+ t['ograve'] = 556;
+ t['Ccaron'] = 722;
+ t['ugrave'] = 556;
+ t['radical'] = 453;
+ t['Dcaron'] = 722;
+ t['rcommaaccent'] = 333;
+ t['Ntilde'] = 722;
+ t['otilde'] = 556;
+ t['Rcommaaccent'] = 722;
+ t['Lcommaaccent'] = 556;
+ t['Atilde'] = 667;
+ t['Aogonek'] = 667;
+ t['Aring'] = 667;
+ t['Otilde'] = 778;
+ t['zdotaccent'] = 500;
+ t['Ecaron'] = 667;
+ t['Iogonek'] = 278;
+ t['kcommaaccent'] = 500;
+ t['minus'] = 584;
+ t['Icircumflex'] = 278;
+ t['ncaron'] = 556;
+ t['tcommaaccent'] = 278;
+ t['logicalnot'] = 584;
+ t['odieresis'] = 556;
+ t['udieresis'] = 556;
+ t['notequal'] = 549;
+ t['gcommaaccent'] = 556;
+ t['eth'] = 556;
+ t['zcaron'] = 500;
+ t['ncommaaccent'] = 556;
+ t['onesuperior'] = 333;
+ t['imacron'] = 278;
+ t['Euro'] = 556;
+ });
+ t['Helvetica-Bold'] = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['space'] = 278;
+ t['exclam'] = 333;
+ t['quotedbl'] = 474;
+ t['numbersign'] = 556;
+ t['dollar'] = 556;
+ t['percent'] = 889;
+ t['ampersand'] = 722;
+ t['quoteright'] = 278;
+ t['parenleft'] = 333;
+ t['parenright'] = 333;
+ t['asterisk'] = 389;
+ t['plus'] = 584;
+ t['comma'] = 278;
+ t['hyphen'] = 333;
+ t['period'] = 278;
+ t['slash'] = 278;
+ t['zero'] = 556;
+ t['one'] = 556;
+ t['two'] = 556;
+ t['three'] = 556;
+ t['four'] = 556;
+ t['five'] = 556;
+ t['six'] = 556;
+ t['seven'] = 556;
+ t['eight'] = 556;
+ t['nine'] = 556;
+ t['colon'] = 333;
+ t['semicolon'] = 333;
+ t['less'] = 584;
+ t['equal'] = 584;
+ t['greater'] = 584;
+ t['question'] = 611;
+ t['at'] = 975;
+ t['A'] = 722;
+ t['B'] = 722;
+ t['C'] = 722;
+ t['D'] = 722;
+ t['E'] = 667;
+ t['F'] = 611;
+ t['G'] = 778;
+ t['H'] = 722;
+ t['I'] = 278;
+ t['J'] = 556;
+ t['K'] = 722;
+ t['L'] = 611;
+ t['M'] = 833;
+ t['N'] = 722;
+ t['O'] = 778;
+ t['P'] = 667;
+ t['Q'] = 778;
+ t['R'] = 722;
+ t['S'] = 667;
+ t['T'] = 611;
+ t['U'] = 722;
+ t['V'] = 667;
+ t['W'] = 944;
+ t['X'] = 667;
+ t['Y'] = 667;
+ t['Z'] = 611;
+ t['bracketleft'] = 333;
+ t['backslash'] = 278;
+ t['bracketright'] = 333;
+ t['asciicircum'] = 584;
+ t['underscore'] = 556;
+ t['quoteleft'] = 278;
+ t['a'] = 556;
+ t['b'] = 611;
+ t['c'] = 556;
+ t['d'] = 611;
+ t['e'] = 556;
+ t['f'] = 333;
+ t['g'] = 611;
+ t['h'] = 611;
+ t['i'] = 278;
+ t['j'] = 278;
+ t['k'] = 556;
+ t['l'] = 278;
+ t['m'] = 889;
+ t['n'] = 611;
+ t['o'] = 611;
+ t['p'] = 611;
+ t['q'] = 611;
+ t['r'] = 389;
+ t['s'] = 556;
+ t['t'] = 333;
+ t['u'] = 611;
+ t['v'] = 556;
+ t['w'] = 778;
+ t['x'] = 556;
+ t['y'] = 556;
+ t['z'] = 500;
+ t['braceleft'] = 389;
+ t['bar'] = 280;
+ t['braceright'] = 389;
+ t['asciitilde'] = 584;
+ t['exclamdown'] = 333;
+ t['cent'] = 556;
+ t['sterling'] = 556;
+ t['fraction'] = 167;
+ t['yen'] = 556;
+ t['florin'] = 556;
+ t['section'] = 556;
+ t['currency'] = 556;
+ t['quotesingle'] = 238;
+ t['quotedblleft'] = 500;
+ t['guillemotleft'] = 556;
+ t['guilsinglleft'] = 333;
+ t['guilsinglright'] = 333;
+ t['fi'] = 611;
+ t['fl'] = 611;
+ t['endash'] = 556;
+ t['dagger'] = 556;
+ t['daggerdbl'] = 556;
+ t['periodcentered'] = 278;
+ t['paragraph'] = 556;
+ t['bullet'] = 350;
+ t['quotesinglbase'] = 278;
+ t['quotedblbase'] = 500;
+ t['quotedblright'] = 500;
+ t['guillemotright'] = 556;
+ t['ellipsis'] = 1000;
+ t['perthousand'] = 1000;
+ t['questiondown'] = 611;
+ t['grave'] = 333;
+ t['acute'] = 333;
+ t['circumflex'] = 333;
+ t['tilde'] = 333;
+ t['macron'] = 333;
+ t['breve'] = 333;
+ t['dotaccent'] = 333;
+ t['dieresis'] = 333;
+ t['ring'] = 333;
+ t['cedilla'] = 333;
+ t['hungarumlaut'] = 333;
+ t['ogonek'] = 333;
+ t['caron'] = 333;
+ t['emdash'] = 1000;
+ t['AE'] = 1000;
+ t['ordfeminine'] = 370;
+ t['Lslash'] = 611;
+ t['Oslash'] = 778;
+ t['OE'] = 1000;
+ t['ordmasculine'] = 365;
+ t['ae'] = 889;
+ t['dotlessi'] = 278;
+ t['lslash'] = 278;
+ t['oslash'] = 611;
+ t['oe'] = 944;
+ t['germandbls'] = 611;
+ t['Idieresis'] = 278;
+ t['eacute'] = 556;
+ t['abreve'] = 556;
+ t['uhungarumlaut'] = 611;
+ t['ecaron'] = 556;
+ t['Ydieresis'] = 667;
+ t['divide'] = 584;
+ t['Yacute'] = 667;
+ t['Acircumflex'] = 722;
+ t['aacute'] = 556;
+ t['Ucircumflex'] = 722;
+ t['yacute'] = 556;
+ t['scommaaccent'] = 556;
+ t['ecircumflex'] = 556;
+ t['Uring'] = 722;
+ t['Udieresis'] = 722;
+ t['aogonek'] = 556;
+ t['Uacute'] = 722;
+ t['uogonek'] = 611;
+ t['Edieresis'] = 667;
+ t['Dcroat'] = 722;
+ t['commaaccent'] = 250;
+ t['copyright'] = 737;
+ t['Emacron'] = 667;
+ t['ccaron'] = 556;
+ t['aring'] = 556;
+ t['Ncommaaccent'] = 722;
+ t['lacute'] = 278;
+ t['agrave'] = 556;
+ t['Tcommaaccent'] = 611;
+ t['Cacute'] = 722;
+ t['atilde'] = 556;
+ t['Edotaccent'] = 667;
+ t['scaron'] = 556;
+ t['scedilla'] = 556;
+ t['iacute'] = 278;
+ t['lozenge'] = 494;
+ t['Rcaron'] = 722;
+ t['Gcommaaccent'] = 778;
+ t['ucircumflex'] = 611;
+ t['acircumflex'] = 556;
+ t['Amacron'] = 722;
+ t['rcaron'] = 389;
+ t['ccedilla'] = 556;
+ t['Zdotaccent'] = 611;
+ t['Thorn'] = 667;
+ t['Omacron'] = 778;
+ t['Racute'] = 722;
+ t['Sacute'] = 667;
+ t['dcaron'] = 743;
+ t['Umacron'] = 722;
+ t['uring'] = 611;
+ t['threesuperior'] = 333;
+ t['Ograve'] = 778;
+ t['Agrave'] = 722;
+ t['Abreve'] = 722;
+ t['multiply'] = 584;
+ t['uacute'] = 611;
+ t['Tcaron'] = 611;
+ t['partialdiff'] = 494;
+ t['ydieresis'] = 556;
+ t['Nacute'] = 722;
+ t['icircumflex'] = 278;
+ t['Ecircumflex'] = 667;
+ t['adieresis'] = 556;
+ t['edieresis'] = 556;
+ t['cacute'] = 556;
+ t['nacute'] = 611;
+ t['umacron'] = 611;
+ t['Ncaron'] = 722;
+ t['Iacute'] = 278;
+ t['plusminus'] = 584;
+ t['brokenbar'] = 280;
+ t['registered'] = 737;
+ t['Gbreve'] = 778;
+ t['Idotaccent'] = 278;
+ t['summation'] = 600;
+ t['Egrave'] = 667;
+ t['racute'] = 389;
+ t['omacron'] = 611;
+ t['Zacute'] = 611;
+ t['Zcaron'] = 611;
+ t['greaterequal'] = 549;
+ t['Eth'] = 722;
+ t['Ccedilla'] = 722;
+ t['lcommaaccent'] = 278;
+ t['tcaron'] = 389;
+ t['eogonek'] = 556;
+ t['Uogonek'] = 722;
+ t['Aacute'] = 722;
+ t['Adieresis'] = 722;
+ t['egrave'] = 556;
+ t['zacute'] = 500;
+ t['iogonek'] = 278;
+ t['Oacute'] = 778;
+ t['oacute'] = 611;
+ t['amacron'] = 556;
+ t['sacute'] = 556;
+ t['idieresis'] = 278;
+ t['Ocircumflex'] = 778;
+ t['Ugrave'] = 722;
+ t['Delta'] = 612;
+ t['thorn'] = 611;
+ t['twosuperior'] = 333;
+ t['Odieresis'] = 778;
+ t['mu'] = 611;
+ t['igrave'] = 278;
+ t['ohungarumlaut'] = 611;
+ t['Eogonek'] = 667;
+ t['dcroat'] = 611;
+ t['threequarters'] = 834;
+ t['Scedilla'] = 667;
+ t['lcaron'] = 400;
+ t['Kcommaaccent'] = 722;
+ t['Lacute'] = 611;
+ t['trademark'] = 1000;
+ t['edotaccent'] = 556;
+ t['Igrave'] = 278;
+ t['Imacron'] = 278;
+ t['Lcaron'] = 611;
+ t['onehalf'] = 834;
+ t['lessequal'] = 549;
+ t['ocircumflex'] = 611;
+ t['ntilde'] = 611;
+ t['Uhungarumlaut'] = 722;
+ t['Eacute'] = 667;
+ t['emacron'] = 556;
+ t['gbreve'] = 611;
+ t['onequarter'] = 834;
+ t['Scaron'] = 667;
+ t['Scommaaccent'] = 667;
+ t['Ohungarumlaut'] = 778;
+ t['degree'] = 400;
+ t['ograve'] = 611;
+ t['Ccaron'] = 722;
+ t['ugrave'] = 611;
+ t['radical'] = 549;
+ t['Dcaron'] = 722;
+ t['rcommaaccent'] = 389;
+ t['Ntilde'] = 722;
+ t['otilde'] = 611;
+ t['Rcommaaccent'] = 722;
+ t['Lcommaaccent'] = 611;
+ t['Atilde'] = 722;
+ t['Aogonek'] = 722;
+ t['Aring'] = 722;
+ t['Otilde'] = 778;
+ t['zdotaccent'] = 500;
+ t['Ecaron'] = 667;
+ t['Iogonek'] = 278;
+ t['kcommaaccent'] = 556;
+ t['minus'] = 584;
+ t['Icircumflex'] = 278;
+ t['ncaron'] = 611;
+ t['tcommaaccent'] = 333;
+ t['logicalnot'] = 584;
+ t['odieresis'] = 611;
+ t['udieresis'] = 611;
+ t['notequal'] = 549;
+ t['gcommaaccent'] = 611;
+ t['eth'] = 611;
+ t['zcaron'] = 500;
+ t['ncommaaccent'] = 611;
+ t['onesuperior'] = 333;
+ t['imacron'] = 278;
+ t['Euro'] = 556;
+ });
+ t['Helvetica-BoldOblique'] = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['space'] = 278;
+ t['exclam'] = 333;
+ t['quotedbl'] = 474;
+ t['numbersign'] = 556;
+ t['dollar'] = 556;
+ t['percent'] = 889;
+ t['ampersand'] = 722;
+ t['quoteright'] = 278;
+ t['parenleft'] = 333;
+ t['parenright'] = 333;
+ t['asterisk'] = 389;
+ t['plus'] = 584;
+ t['comma'] = 278;
+ t['hyphen'] = 333;
+ t['period'] = 278;
+ t['slash'] = 278;
+ t['zero'] = 556;
+ t['one'] = 556;
+ t['two'] = 556;
+ t['three'] = 556;
+ t['four'] = 556;
+ t['five'] = 556;
+ t['six'] = 556;
+ t['seven'] = 556;
+ t['eight'] = 556;
+ t['nine'] = 556;
+ t['colon'] = 333;
+ t['semicolon'] = 333;
+ t['less'] = 584;
+ t['equal'] = 584;
+ t['greater'] = 584;
+ t['question'] = 611;
+ t['at'] = 975;
+ t['A'] = 722;
+ t['B'] = 722;
+ t['C'] = 722;
+ t['D'] = 722;
+ t['E'] = 667;
+ t['F'] = 611;
+ t['G'] = 778;
+ t['H'] = 722;
+ t['I'] = 278;
+ t['J'] = 556;
+ t['K'] = 722;
+ t['L'] = 611;
+ t['M'] = 833;
+ t['N'] = 722;
+ t['O'] = 778;
+ t['P'] = 667;
+ t['Q'] = 778;
+ t['R'] = 722;
+ t['S'] = 667;
+ t['T'] = 611;
+ t['U'] = 722;
+ t['V'] = 667;
+ t['W'] = 944;
+ t['X'] = 667;
+ t['Y'] = 667;
+ t['Z'] = 611;
+ t['bracketleft'] = 333;
+ t['backslash'] = 278;
+ t['bracketright'] = 333;
+ t['asciicircum'] = 584;
+ t['underscore'] = 556;
+ t['quoteleft'] = 278;
+ t['a'] = 556;
+ t['b'] = 611;
+ t['c'] = 556;
+ t['d'] = 611;
+ t['e'] = 556;
+ t['f'] = 333;
+ t['g'] = 611;
+ t['h'] = 611;
+ t['i'] = 278;
+ t['j'] = 278;
+ t['k'] = 556;
+ t['l'] = 278;
+ t['m'] = 889;
+ t['n'] = 611;
+ t['o'] = 611;
+ t['p'] = 611;
+ t['q'] = 611;
+ t['r'] = 389;
+ t['s'] = 556;
+ t['t'] = 333;
+ t['u'] = 611;
+ t['v'] = 556;
+ t['w'] = 778;
+ t['x'] = 556;
+ t['y'] = 556;
+ t['z'] = 500;
+ t['braceleft'] = 389;
+ t['bar'] = 280;
+ t['braceright'] = 389;
+ t['asciitilde'] = 584;
+ t['exclamdown'] = 333;
+ t['cent'] = 556;
+ t['sterling'] = 556;
+ t['fraction'] = 167;
+ t['yen'] = 556;
+ t['florin'] = 556;
+ t['section'] = 556;
+ t['currency'] = 556;
+ t['quotesingle'] = 238;
+ t['quotedblleft'] = 500;
+ t['guillemotleft'] = 556;
+ t['guilsinglleft'] = 333;
+ t['guilsinglright'] = 333;
+ t['fi'] = 611;
+ t['fl'] = 611;
+ t['endash'] = 556;
+ t['dagger'] = 556;
+ t['daggerdbl'] = 556;
+ t['periodcentered'] = 278;
+ t['paragraph'] = 556;
+ t['bullet'] = 350;
+ t['quotesinglbase'] = 278;
+ t['quotedblbase'] = 500;
+ t['quotedblright'] = 500;
+ t['guillemotright'] = 556;
+ t['ellipsis'] = 1000;
+ t['perthousand'] = 1000;
+ t['questiondown'] = 611;
+ t['grave'] = 333;
+ t['acute'] = 333;
+ t['circumflex'] = 333;
+ t['tilde'] = 333;
+ t['macron'] = 333;
+ t['breve'] = 333;
+ t['dotaccent'] = 333;
+ t['dieresis'] = 333;
+ t['ring'] = 333;
+ t['cedilla'] = 333;
+ t['hungarumlaut'] = 333;
+ t['ogonek'] = 333;
+ t['caron'] = 333;
+ t['emdash'] = 1000;
+ t['AE'] = 1000;
+ t['ordfeminine'] = 370;
+ t['Lslash'] = 611;
+ t['Oslash'] = 778;
+ t['OE'] = 1000;
+ t['ordmasculine'] = 365;
+ t['ae'] = 889;
+ t['dotlessi'] = 278;
+ t['lslash'] = 278;
+ t['oslash'] = 611;
+ t['oe'] = 944;
+ t['germandbls'] = 611;
+ t['Idieresis'] = 278;
+ t['eacute'] = 556;
+ t['abreve'] = 556;
+ t['uhungarumlaut'] = 611;
+ t['ecaron'] = 556;
+ t['Ydieresis'] = 667;
+ t['divide'] = 584;
+ t['Yacute'] = 667;
+ t['Acircumflex'] = 722;
+ t['aacute'] = 556;
+ t['Ucircumflex'] = 722;
+ t['yacute'] = 556;
+ t['scommaaccent'] = 556;
+ t['ecircumflex'] = 556;
+ t['Uring'] = 722;
+ t['Udieresis'] = 722;
+ t['aogonek'] = 556;
+ t['Uacute'] = 722;
+ t['uogonek'] = 611;
+ t['Edieresis'] = 667;
+ t['Dcroat'] = 722;
+ t['commaaccent'] = 250;
+ t['copyright'] = 737;
+ t['Emacron'] = 667;
+ t['ccaron'] = 556;
+ t['aring'] = 556;
+ t['Ncommaaccent'] = 722;
+ t['lacute'] = 278;
+ t['agrave'] = 556;
+ t['Tcommaaccent'] = 611;
+ t['Cacute'] = 722;
+ t['atilde'] = 556;
+ t['Edotaccent'] = 667;
+ t['scaron'] = 556;
+ t['scedilla'] = 556;
+ t['iacute'] = 278;
+ t['lozenge'] = 494;
+ t['Rcaron'] = 722;
+ t['Gcommaaccent'] = 778;
+ t['ucircumflex'] = 611;
+ t['acircumflex'] = 556;
+ t['Amacron'] = 722;
+ t['rcaron'] = 389;
+ t['ccedilla'] = 556;
+ t['Zdotaccent'] = 611;
+ t['Thorn'] = 667;
+ t['Omacron'] = 778;
+ t['Racute'] = 722;
+ t['Sacute'] = 667;
+ t['dcaron'] = 743;
+ t['Umacron'] = 722;
+ t['uring'] = 611;
+ t['threesuperior'] = 333;
+ t['Ograve'] = 778;
+ t['Agrave'] = 722;
+ t['Abreve'] = 722;
+ t['multiply'] = 584;
+ t['uacute'] = 611;
+ t['Tcaron'] = 611;
+ t['partialdiff'] = 494;
+ t['ydieresis'] = 556;
+ t['Nacute'] = 722;
+ t['icircumflex'] = 278;
+ t['Ecircumflex'] = 667;
+ t['adieresis'] = 556;
+ t['edieresis'] = 556;
+ t['cacute'] = 556;
+ t['nacute'] = 611;
+ t['umacron'] = 611;
+ t['Ncaron'] = 722;
+ t['Iacute'] = 278;
+ t['plusminus'] = 584;
+ t['brokenbar'] = 280;
+ t['registered'] = 737;
+ t['Gbreve'] = 778;
+ t['Idotaccent'] = 278;
+ t['summation'] = 600;
+ t['Egrave'] = 667;
+ t['racute'] = 389;
+ t['omacron'] = 611;
+ t['Zacute'] = 611;
+ t['Zcaron'] = 611;
+ t['greaterequal'] = 549;
+ t['Eth'] = 722;
+ t['Ccedilla'] = 722;
+ t['lcommaaccent'] = 278;
+ t['tcaron'] = 389;
+ t['eogonek'] = 556;
+ t['Uogonek'] = 722;
+ t['Aacute'] = 722;
+ t['Adieresis'] = 722;
+ t['egrave'] = 556;
+ t['zacute'] = 500;
+ t['iogonek'] = 278;
+ t['Oacute'] = 778;
+ t['oacute'] = 611;
+ t['amacron'] = 556;
+ t['sacute'] = 556;
+ t['idieresis'] = 278;
+ t['Ocircumflex'] = 778;
+ t['Ugrave'] = 722;
+ t['Delta'] = 612;
+ t['thorn'] = 611;
+ t['twosuperior'] = 333;
+ t['Odieresis'] = 778;
+ t['mu'] = 611;
+ t['igrave'] = 278;
+ t['ohungarumlaut'] = 611;
+ t['Eogonek'] = 667;
+ t['dcroat'] = 611;
+ t['threequarters'] = 834;
+ t['Scedilla'] = 667;
+ t['lcaron'] = 400;
+ t['Kcommaaccent'] = 722;
+ t['Lacute'] = 611;
+ t['trademark'] = 1000;
+ t['edotaccent'] = 556;
+ t['Igrave'] = 278;
+ t['Imacron'] = 278;
+ t['Lcaron'] = 611;
+ t['onehalf'] = 834;
+ t['lessequal'] = 549;
+ t['ocircumflex'] = 611;
+ t['ntilde'] = 611;
+ t['Uhungarumlaut'] = 722;
+ t['Eacute'] = 667;
+ t['emacron'] = 556;
+ t['gbreve'] = 611;
+ t['onequarter'] = 834;
+ t['Scaron'] = 667;
+ t['Scommaaccent'] = 667;
+ t['Ohungarumlaut'] = 778;
+ t['degree'] = 400;
+ t['ograve'] = 611;
+ t['Ccaron'] = 722;
+ t['ugrave'] = 611;
+ t['radical'] = 549;
+ t['Dcaron'] = 722;
+ t['rcommaaccent'] = 389;
+ t['Ntilde'] = 722;
+ t['otilde'] = 611;
+ t['Rcommaaccent'] = 722;
+ t['Lcommaaccent'] = 611;
+ t['Atilde'] = 722;
+ t['Aogonek'] = 722;
+ t['Aring'] = 722;
+ t['Otilde'] = 778;
+ t['zdotaccent'] = 500;
+ t['Ecaron'] = 667;
+ t['Iogonek'] = 278;
+ t['kcommaaccent'] = 556;
+ t['minus'] = 584;
+ t['Icircumflex'] = 278;
+ t['ncaron'] = 611;
+ t['tcommaaccent'] = 333;
+ t['logicalnot'] = 584;
+ t['odieresis'] = 611;
+ t['udieresis'] = 611;
+ t['notequal'] = 549;
+ t['gcommaaccent'] = 611;
+ t['eth'] = 611;
+ t['zcaron'] = 500;
+ t['ncommaaccent'] = 611;
+ t['onesuperior'] = 333;
+ t['imacron'] = 278;
+ t['Euro'] = 556;
+ });
+ t['Helvetica-Oblique'] = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['space'] = 278;
+ t['exclam'] = 278;
+ t['quotedbl'] = 355;
+ t['numbersign'] = 556;
+ t['dollar'] = 556;
+ t['percent'] = 889;
+ t['ampersand'] = 667;
+ t['quoteright'] = 222;
+ t['parenleft'] = 333;
+ t['parenright'] = 333;
+ t['asterisk'] = 389;
+ t['plus'] = 584;
+ t['comma'] = 278;
+ t['hyphen'] = 333;
+ t['period'] = 278;
+ t['slash'] = 278;
+ t['zero'] = 556;
+ t['one'] = 556;
+ t['two'] = 556;
+ t['three'] = 556;
+ t['four'] = 556;
+ t['five'] = 556;
+ t['six'] = 556;
+ t['seven'] = 556;
+ t['eight'] = 556;
+ t['nine'] = 556;
+ t['colon'] = 278;
+ t['semicolon'] = 278;
+ t['less'] = 584;
+ t['equal'] = 584;
+ t['greater'] = 584;
+ t['question'] = 556;
+ t['at'] = 1015;
+ t['A'] = 667;
+ t['B'] = 667;
+ t['C'] = 722;
+ t['D'] = 722;
+ t['E'] = 667;
+ t['F'] = 611;
+ t['G'] = 778;
+ t['H'] = 722;
+ t['I'] = 278;
+ t['J'] = 500;
+ t['K'] = 667;
+ t['L'] = 556;
+ t['M'] = 833;
+ t['N'] = 722;
+ t['O'] = 778;
+ t['P'] = 667;
+ t['Q'] = 778;
+ t['R'] = 722;
+ t['S'] = 667;
+ t['T'] = 611;
+ t['U'] = 722;
+ t['V'] = 667;
+ t['W'] = 944;
+ t['X'] = 667;
+ t['Y'] = 667;
+ t['Z'] = 611;
+ t['bracketleft'] = 278;
+ t['backslash'] = 278;
+ t['bracketright'] = 278;
+ t['asciicircum'] = 469;
+ t['underscore'] = 556;
+ t['quoteleft'] = 222;
+ t['a'] = 556;
+ t['b'] = 556;
+ t['c'] = 500;
+ t['d'] = 556;
+ t['e'] = 556;
+ t['f'] = 278;
+ t['g'] = 556;
+ t['h'] = 556;
+ t['i'] = 222;
+ t['j'] = 222;
+ t['k'] = 500;
+ t['l'] = 222;
+ t['m'] = 833;
+ t['n'] = 556;
+ t['o'] = 556;
+ t['p'] = 556;
+ t['q'] = 556;
+ t['r'] = 333;
+ t['s'] = 500;
+ t['t'] = 278;
+ t['u'] = 556;
+ t['v'] = 500;
+ t['w'] = 722;
+ t['x'] = 500;
+ t['y'] = 500;
+ t['z'] = 500;
+ t['braceleft'] = 334;
+ t['bar'] = 260;
+ t['braceright'] = 334;
+ t['asciitilde'] = 584;
+ t['exclamdown'] = 333;
+ t['cent'] = 556;
+ t['sterling'] = 556;
+ t['fraction'] = 167;
+ t['yen'] = 556;
+ t['florin'] = 556;
+ t['section'] = 556;
+ t['currency'] = 556;
+ t['quotesingle'] = 191;
+ t['quotedblleft'] = 333;
+ t['guillemotleft'] = 556;
+ t['guilsinglleft'] = 333;
+ t['guilsinglright'] = 333;
+ t['fi'] = 500;
+ t['fl'] = 500;
+ t['endash'] = 556;
+ t['dagger'] = 556;
+ t['daggerdbl'] = 556;
+ t['periodcentered'] = 278;
+ t['paragraph'] = 537;
+ t['bullet'] = 350;
+ t['quotesinglbase'] = 222;
+ t['quotedblbase'] = 333;
+ t['quotedblright'] = 333;
+ t['guillemotright'] = 556;
+ t['ellipsis'] = 1000;
+ t['perthousand'] = 1000;
+ t['questiondown'] = 611;
+ t['grave'] = 333;
+ t['acute'] = 333;
+ t['circumflex'] = 333;
+ t['tilde'] = 333;
+ t['macron'] = 333;
+ t['breve'] = 333;
+ t['dotaccent'] = 333;
+ t['dieresis'] = 333;
+ t['ring'] = 333;
+ t['cedilla'] = 333;
+ t['hungarumlaut'] = 333;
+ t['ogonek'] = 333;
+ t['caron'] = 333;
+ t['emdash'] = 1000;
+ t['AE'] = 1000;
+ t['ordfeminine'] = 370;
+ t['Lslash'] = 556;
+ t['Oslash'] = 778;
+ t['OE'] = 1000;
+ t['ordmasculine'] = 365;
+ t['ae'] = 889;
+ t['dotlessi'] = 278;
+ t['lslash'] = 222;
+ t['oslash'] = 611;
+ t['oe'] = 944;
+ t['germandbls'] = 611;
+ t['Idieresis'] = 278;
+ t['eacute'] = 556;
+ t['abreve'] = 556;
+ t['uhungarumlaut'] = 556;
+ t['ecaron'] = 556;
+ t['Ydieresis'] = 667;
+ t['divide'] = 584;
+ t['Yacute'] = 667;
+ t['Acircumflex'] = 667;
+ t['aacute'] = 556;
+ t['Ucircumflex'] = 722;
+ t['yacute'] = 500;
+ t['scommaaccent'] = 500;
+ t['ecircumflex'] = 556;
+ t['Uring'] = 722;
+ t['Udieresis'] = 722;
+ t['aogonek'] = 556;
+ t['Uacute'] = 722;
+ t['uogonek'] = 556;
+ t['Edieresis'] = 667;
+ t['Dcroat'] = 722;
+ t['commaaccent'] = 250;
+ t['copyright'] = 737;
+ t['Emacron'] = 667;
+ t['ccaron'] = 500;
+ t['aring'] = 556;
+ t['Ncommaaccent'] = 722;
+ t['lacute'] = 222;
+ t['agrave'] = 556;
+ t['Tcommaaccent'] = 611;
+ t['Cacute'] = 722;
+ t['atilde'] = 556;
+ t['Edotaccent'] = 667;
+ t['scaron'] = 500;
+ t['scedilla'] = 500;
+ t['iacute'] = 278;
+ t['lozenge'] = 471;
+ t['Rcaron'] = 722;
+ t['Gcommaaccent'] = 778;
+ t['ucircumflex'] = 556;
+ t['acircumflex'] = 556;
+ t['Amacron'] = 667;
+ t['rcaron'] = 333;
+ t['ccedilla'] = 500;
+ t['Zdotaccent'] = 611;
+ t['Thorn'] = 667;
+ t['Omacron'] = 778;
+ t['Racute'] = 722;
+ t['Sacute'] = 667;
+ t['dcaron'] = 643;
+ t['Umacron'] = 722;
+ t['uring'] = 556;
+ t['threesuperior'] = 333;
+ t['Ograve'] = 778;
+ t['Agrave'] = 667;
+ t['Abreve'] = 667;
+ t['multiply'] = 584;
+ t['uacute'] = 556;
+ t['Tcaron'] = 611;
+ t['partialdiff'] = 476;
+ t['ydieresis'] = 500;
+ t['Nacute'] = 722;
+ t['icircumflex'] = 278;
+ t['Ecircumflex'] = 667;
+ t['adieresis'] = 556;
+ t['edieresis'] = 556;
+ t['cacute'] = 500;
+ t['nacute'] = 556;
+ t['umacron'] = 556;
+ t['Ncaron'] = 722;
+ t['Iacute'] = 278;
+ t['plusminus'] = 584;
+ t['brokenbar'] = 260;
+ t['registered'] = 737;
+ t['Gbreve'] = 778;
+ t['Idotaccent'] = 278;
+ t['summation'] = 600;
+ t['Egrave'] = 667;
+ t['racute'] = 333;
+ t['omacron'] = 556;
+ t['Zacute'] = 611;
+ t['Zcaron'] = 611;
+ t['greaterequal'] = 549;
+ t['Eth'] = 722;
+ t['Ccedilla'] = 722;
+ t['lcommaaccent'] = 222;
+ t['tcaron'] = 317;
+ t['eogonek'] = 556;
+ t['Uogonek'] = 722;
+ t['Aacute'] = 667;
+ t['Adieresis'] = 667;
+ t['egrave'] = 556;
+ t['zacute'] = 500;
+ t['iogonek'] = 222;
+ t['Oacute'] = 778;
+ t['oacute'] = 556;
+ t['amacron'] = 556;
+ t['sacute'] = 500;
+ t['idieresis'] = 278;
+ t['Ocircumflex'] = 778;
+ t['Ugrave'] = 722;
+ t['Delta'] = 612;
+ t['thorn'] = 556;
+ t['twosuperior'] = 333;
+ t['Odieresis'] = 778;
+ t['mu'] = 556;
+ t['igrave'] = 278;
+ t['ohungarumlaut'] = 556;
+ t['Eogonek'] = 667;
+ t['dcroat'] = 556;
+ t['threequarters'] = 834;
+ t['Scedilla'] = 667;
+ t['lcaron'] = 299;
+ t['Kcommaaccent'] = 667;
+ t['Lacute'] = 556;
+ t['trademark'] = 1000;
+ t['edotaccent'] = 556;
+ t['Igrave'] = 278;
+ t['Imacron'] = 278;
+ t['Lcaron'] = 556;
+ t['onehalf'] = 834;
+ t['lessequal'] = 549;
+ t['ocircumflex'] = 556;
+ t['ntilde'] = 556;
+ t['Uhungarumlaut'] = 722;
+ t['Eacute'] = 667;
+ t['emacron'] = 556;
+ t['gbreve'] = 556;
+ t['onequarter'] = 834;
+ t['Scaron'] = 667;
+ t['Scommaaccent'] = 667;
+ t['Ohungarumlaut'] = 778;
+ t['degree'] = 400;
+ t['ograve'] = 556;
+ t['Ccaron'] = 722;
+ t['ugrave'] = 556;
+ t['radical'] = 453;
+ t['Dcaron'] = 722;
+ t['rcommaaccent'] = 333;
+ t['Ntilde'] = 722;
+ t['otilde'] = 556;
+ t['Rcommaaccent'] = 722;
+ t['Lcommaaccent'] = 556;
+ t['Atilde'] = 667;
+ t['Aogonek'] = 667;
+ t['Aring'] = 667;
+ t['Otilde'] = 778;
+ t['zdotaccent'] = 500;
+ t['Ecaron'] = 667;
+ t['Iogonek'] = 278;
+ t['kcommaaccent'] = 500;
+ t['minus'] = 584;
+ t['Icircumflex'] = 278;
+ t['ncaron'] = 556;
+ t['tcommaaccent'] = 278;
+ t['logicalnot'] = 584;
+ t['odieresis'] = 556;
+ t['udieresis'] = 556;
+ t['notequal'] = 549;
+ t['gcommaaccent'] = 556;
+ t['eth'] = 556;
+ t['zcaron'] = 500;
+ t['ncommaaccent'] = 556;
+ t['onesuperior'] = 333;
+ t['imacron'] = 278;
+ t['Euro'] = 556;
+ });
+ t['Symbol'] = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['space'] = 250;
+ t['exclam'] = 333;
+ t['universal'] = 713;
+ t['numbersign'] = 500;
+ t['existential'] = 549;
+ t['percent'] = 833;
+ t['ampersand'] = 778;
+ t['suchthat'] = 439;
+ t['parenleft'] = 333;
+ t['parenright'] = 333;
+ t['asteriskmath'] = 500;
+ t['plus'] = 549;
+ t['comma'] = 250;
+ t['minus'] = 549;
+ t['period'] = 250;
+ t['slash'] = 278;
+ t['zero'] = 500;
+ t['one'] = 500;
+ t['two'] = 500;
+ t['three'] = 500;
+ t['four'] = 500;
+ t['five'] = 500;
+ t['six'] = 500;
+ t['seven'] = 500;
+ t['eight'] = 500;
+ t['nine'] = 500;
+ t['colon'] = 278;
+ t['semicolon'] = 278;
+ t['less'] = 549;
+ t['equal'] = 549;
+ t['greater'] = 549;
+ t['question'] = 444;
+ t['congruent'] = 549;
+ t['Alpha'] = 722;
+ t['Beta'] = 667;
+ t['Chi'] = 722;
+ t['Delta'] = 612;
+ t['Epsilon'] = 611;
+ t['Phi'] = 763;
+ t['Gamma'] = 603;
+ t['Eta'] = 722;
+ t['Iota'] = 333;
+ t['theta1'] = 631;
+ t['Kappa'] = 722;
+ t['Lambda'] = 686;
+ t['Mu'] = 889;
+ t['Nu'] = 722;
+ t['Omicron'] = 722;
+ t['Pi'] = 768;
+ t['Theta'] = 741;
+ t['Rho'] = 556;
+ t['Sigma'] = 592;
+ t['Tau'] = 611;
+ t['Upsilon'] = 690;
+ t['sigma1'] = 439;
+ t['Omega'] = 768;
+ t['Xi'] = 645;
+ t['Psi'] = 795;
+ t['Zeta'] = 611;
+ t['bracketleft'] = 333;
+ t['therefore'] = 863;
+ t['bracketright'] = 333;
+ t['perpendicular'] = 658;
+ t['underscore'] = 500;
+ t['radicalex'] = 500;
+ t['alpha'] = 631;
+ t['beta'] = 549;
+ t['chi'] = 549;
+ t['delta'] = 494;
+ t['epsilon'] = 439;
+ t['phi'] = 521;
+ t['gamma'] = 411;
+ t['eta'] = 603;
+ t['iota'] = 329;
+ t['phi1'] = 603;
+ t['kappa'] = 549;
+ t['lambda'] = 549;
+ t['mu'] = 576;
+ t['nu'] = 521;
+ t['omicron'] = 549;
+ t['pi'] = 549;
+ t['theta'] = 521;
+ t['rho'] = 549;
+ t['sigma'] = 603;
+ t['tau'] = 439;
+ t['upsilon'] = 576;
+ t['omega1'] = 713;
+ t['omega'] = 686;
+ t['xi'] = 493;
+ t['psi'] = 686;
+ t['zeta'] = 494;
+ t['braceleft'] = 480;
+ t['bar'] = 200;
+ t['braceright'] = 480;
+ t['similar'] = 549;
+ t['Euro'] = 750;
+ t['Upsilon1'] = 620;
+ t['minute'] = 247;
+ t['lessequal'] = 549;
+ t['fraction'] = 167;
+ t['infinity'] = 713;
+ t['florin'] = 500;
+ t['club'] = 753;
+ t['diamond'] = 753;
+ t['heart'] = 753;
+ t['spade'] = 753;
+ t['arrowboth'] = 1042;
+ t['arrowleft'] = 987;
+ t['arrowup'] = 603;
+ t['arrowright'] = 987;
+ t['arrowdown'] = 603;
+ t['degree'] = 400;
+ t['plusminus'] = 549;
+ t['second'] = 411;
+ t['greaterequal'] = 549;
+ t['multiply'] = 549;
+ t['proportional'] = 713;
+ t['partialdiff'] = 494;
+ t['bullet'] = 460;
+ t['divide'] = 549;
+ t['notequal'] = 549;
+ t['equivalence'] = 549;
+ t['approxequal'] = 549;
+ t['ellipsis'] = 1000;
+ t['arrowvertex'] = 603;
+ t['arrowhorizex'] = 1000;
+ t['carriagereturn'] = 658;
+ t['aleph'] = 823;
+ t['Ifraktur'] = 686;
+ t['Rfraktur'] = 795;
+ t['weierstrass'] = 987;
+ t['circlemultiply'] = 768;
+ t['circleplus'] = 768;
+ t['emptyset'] = 823;
+ t['intersection'] = 768;
+ t['union'] = 768;
+ t['propersuperset'] = 713;
+ t['reflexsuperset'] = 713;
+ t['notsubset'] = 713;
+ t['propersubset'] = 713;
+ t['reflexsubset'] = 713;
+ t['element'] = 713;
+ t['notelement'] = 713;
+ t['angle'] = 768;
+ t['gradient'] = 713;
+ t['registerserif'] = 790;
+ t['copyrightserif'] = 790;
+ t['trademarkserif'] = 890;
+ t['product'] = 823;
+ t['radical'] = 549;
+ t['dotmath'] = 250;
+ t['logicalnot'] = 713;
+ t['logicaland'] = 603;
+ t['logicalor'] = 603;
+ t['arrowdblboth'] = 1042;
+ t['arrowdblleft'] = 987;
+ t['arrowdblup'] = 603;
+ t['arrowdblright'] = 987;
+ t['arrowdbldown'] = 603;
+ t['lozenge'] = 494;
+ t['angleleft'] = 329;
+ t['registersans'] = 790;
+ t['copyrightsans'] = 790;
+ t['trademarksans'] = 786;
+ t['summation'] = 713;
+ t['parenlefttp'] = 384;
+ t['parenleftex'] = 384;
+ t['parenleftbt'] = 384;
+ t['bracketlefttp'] = 384;
+ t['bracketleftex'] = 384;
+ t['bracketleftbt'] = 384;
+ t['bracelefttp'] = 494;
+ t['braceleftmid'] = 494;
+ t['braceleftbt'] = 494;
+ t['braceex'] = 494;
+ t['angleright'] = 329;
+ t['integral'] = 274;
+ t['integraltp'] = 686;
+ t['integralex'] = 686;
+ t['integralbt'] = 686;
+ t['parenrighttp'] = 384;
+ t['parenrightex'] = 384;
+ t['parenrightbt'] = 384;
+ t['bracketrighttp'] = 384;
+ t['bracketrightex'] = 384;
+ t['bracketrightbt'] = 384;
+ t['bracerighttp'] = 494;
+ t['bracerightmid'] = 494;
+ t['bracerightbt'] = 494;
+ t['apple'] = 790;
+ });
+ t['Times-Roman'] = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['space'] = 250;
+ t['exclam'] = 333;
+ t['quotedbl'] = 408;
+ t['numbersign'] = 500;
+ t['dollar'] = 500;
+ t['percent'] = 833;
+ t['ampersand'] = 778;
+ t['quoteright'] = 333;
+ t['parenleft'] = 333;
+ t['parenright'] = 333;
+ t['asterisk'] = 500;
+ t['plus'] = 564;
+ t['comma'] = 250;
+ t['hyphen'] = 333;
+ t['period'] = 250;
+ t['slash'] = 278;
+ t['zero'] = 500;
+ t['one'] = 500;
+ t['two'] = 500;
+ t['three'] = 500;
+ t['four'] = 500;
+ t['five'] = 500;
+ t['six'] = 500;
+ t['seven'] = 500;
+ t['eight'] = 500;
+ t['nine'] = 500;
+ t['colon'] = 278;
+ t['semicolon'] = 278;
+ t['less'] = 564;
+ t['equal'] = 564;
+ t['greater'] = 564;
+ t['question'] = 444;
+ t['at'] = 921;
+ t['A'] = 722;
+ t['B'] = 667;
+ t['C'] = 667;
+ t['D'] = 722;
+ t['E'] = 611;
+ t['F'] = 556;
+ t['G'] = 722;
+ t['H'] = 722;
+ t['I'] = 333;
+ t['J'] = 389;
+ t['K'] = 722;
+ t['L'] = 611;
+ t['M'] = 889;
+ t['N'] = 722;
+ t['O'] = 722;
+ t['P'] = 556;
+ t['Q'] = 722;
+ t['R'] = 667;
+ t['S'] = 556;
+ t['T'] = 611;
+ t['U'] = 722;
+ t['V'] = 722;
+ t['W'] = 944;
+ t['X'] = 722;
+ t['Y'] = 722;
+ t['Z'] = 611;
+ t['bracketleft'] = 333;
+ t['backslash'] = 278;
+ t['bracketright'] = 333;
+ t['asciicircum'] = 469;
+ t['underscore'] = 500;
+ t['quoteleft'] = 333;
+ t['a'] = 444;
+ t['b'] = 500;
+ t['c'] = 444;
+ t['d'] = 500;
+ t['e'] = 444;
+ t['f'] = 333;
+ t['g'] = 500;
+ t['h'] = 500;
+ t['i'] = 278;
+ t['j'] = 278;
+ t['k'] = 500;
+ t['l'] = 278;
+ t['m'] = 778;
+ t['n'] = 500;
+ t['o'] = 500;
+ t['p'] = 500;
+ t['q'] = 500;
+ t['r'] = 333;
+ t['s'] = 389;
+ t['t'] = 278;
+ t['u'] = 500;
+ t['v'] = 500;
+ t['w'] = 722;
+ t['x'] = 500;
+ t['y'] = 500;
+ t['z'] = 444;
+ t['braceleft'] = 480;
+ t['bar'] = 200;
+ t['braceright'] = 480;
+ t['asciitilde'] = 541;
+ t['exclamdown'] = 333;
+ t['cent'] = 500;
+ t['sterling'] = 500;
+ t['fraction'] = 167;
+ t['yen'] = 500;
+ t['florin'] = 500;
+ t['section'] = 500;
+ t['currency'] = 500;
+ t['quotesingle'] = 180;
+ t['quotedblleft'] = 444;
+ t['guillemotleft'] = 500;
+ t['guilsinglleft'] = 333;
+ t['guilsinglright'] = 333;
+ t['fi'] = 556;
+ t['fl'] = 556;
+ t['endash'] = 500;
+ t['dagger'] = 500;
+ t['daggerdbl'] = 500;
+ t['periodcentered'] = 250;
+ t['paragraph'] = 453;
+ t['bullet'] = 350;
+ t['quotesinglbase'] = 333;
+ t['quotedblbase'] = 444;
+ t['quotedblright'] = 444;
+ t['guillemotright'] = 500;
+ t['ellipsis'] = 1000;
+ t['perthousand'] = 1000;
+ t['questiondown'] = 444;
+ t['grave'] = 333;
+ t['acute'] = 333;
+ t['circumflex'] = 333;
+ t['tilde'] = 333;
+ t['macron'] = 333;
+ t['breve'] = 333;
+ t['dotaccent'] = 333;
+ t['dieresis'] = 333;
+ t['ring'] = 333;
+ t['cedilla'] = 333;
+ t['hungarumlaut'] = 333;
+ t['ogonek'] = 333;
+ t['caron'] = 333;
+ t['emdash'] = 1000;
+ t['AE'] = 889;
+ t['ordfeminine'] = 276;
+ t['Lslash'] = 611;
+ t['Oslash'] = 722;
+ t['OE'] = 889;
+ t['ordmasculine'] = 310;
+ t['ae'] = 667;
+ t['dotlessi'] = 278;
+ t['lslash'] = 278;
+ t['oslash'] = 500;
+ t['oe'] = 722;
+ t['germandbls'] = 500;
+ t['Idieresis'] = 333;
+ t['eacute'] = 444;
+ t['abreve'] = 444;
+ t['uhungarumlaut'] = 500;
+ t['ecaron'] = 444;
+ t['Ydieresis'] = 722;
+ t['divide'] = 564;
+ t['Yacute'] = 722;
+ t['Acircumflex'] = 722;
+ t['aacute'] = 444;
+ t['Ucircumflex'] = 722;
+ t['yacute'] = 500;
+ t['scommaaccent'] = 389;
+ t['ecircumflex'] = 444;
+ t['Uring'] = 722;
+ t['Udieresis'] = 722;
+ t['aogonek'] = 444;
+ t['Uacute'] = 722;
+ t['uogonek'] = 500;
+ t['Edieresis'] = 611;
+ t['Dcroat'] = 722;
+ t['commaaccent'] = 250;
+ t['copyright'] = 760;
+ t['Emacron'] = 611;
+ t['ccaron'] = 444;
+ t['aring'] = 444;
+ t['Ncommaaccent'] = 722;
+ t['lacute'] = 278;
+ t['agrave'] = 444;
+ t['Tcommaaccent'] = 611;
+ t['Cacute'] = 667;
+ t['atilde'] = 444;
+ t['Edotaccent'] = 611;
+ t['scaron'] = 389;
+ t['scedilla'] = 389;
+ t['iacute'] = 278;
+ t['lozenge'] = 471;
+ t['Rcaron'] = 667;
+ t['Gcommaaccent'] = 722;
+ t['ucircumflex'] = 500;
+ t['acircumflex'] = 444;
+ t['Amacron'] = 722;
+ t['rcaron'] = 333;
+ t['ccedilla'] = 444;
+ t['Zdotaccent'] = 611;
+ t['Thorn'] = 556;
+ t['Omacron'] = 722;
+ t['Racute'] = 667;
+ t['Sacute'] = 556;
+ t['dcaron'] = 588;
+ t['Umacron'] = 722;
+ t['uring'] = 500;
+ t['threesuperior'] = 300;
+ t['Ograve'] = 722;
+ t['Agrave'] = 722;
+ t['Abreve'] = 722;
+ t['multiply'] = 564;
+ t['uacute'] = 500;
+ t['Tcaron'] = 611;
+ t['partialdiff'] = 476;
+ t['ydieresis'] = 500;
+ t['Nacute'] = 722;
+ t['icircumflex'] = 278;
+ t['Ecircumflex'] = 611;
+ t['adieresis'] = 444;
+ t['edieresis'] = 444;
+ t['cacute'] = 444;
+ t['nacute'] = 500;
+ t['umacron'] = 500;
+ t['Ncaron'] = 722;
+ t['Iacute'] = 333;
+ t['plusminus'] = 564;
+ t['brokenbar'] = 200;
+ t['registered'] = 760;
+ t['Gbreve'] = 722;
+ t['Idotaccent'] = 333;
+ t['summation'] = 600;
+ t['Egrave'] = 611;
+ t['racute'] = 333;
+ t['omacron'] = 500;
+ t['Zacute'] = 611;
+ t['Zcaron'] = 611;
+ t['greaterequal'] = 549;
+ t['Eth'] = 722;
+ t['Ccedilla'] = 667;
+ t['lcommaaccent'] = 278;
+ t['tcaron'] = 326;
+ t['eogonek'] = 444;
+ t['Uogonek'] = 722;
+ t['Aacute'] = 722;
+ t['Adieresis'] = 722;
+ t['egrave'] = 444;
+ t['zacute'] = 444;
+ t['iogonek'] = 278;
+ t['Oacute'] = 722;
+ t['oacute'] = 500;
+ t['amacron'] = 444;
+ t['sacute'] = 389;
+ t['idieresis'] = 278;
+ t['Ocircumflex'] = 722;
+ t['Ugrave'] = 722;
+ t['Delta'] = 612;
+ t['thorn'] = 500;
+ t['twosuperior'] = 300;
+ t['Odieresis'] = 722;
+ t['mu'] = 500;
+ t['igrave'] = 278;
+ t['ohungarumlaut'] = 500;
+ t['Eogonek'] = 611;
+ t['dcroat'] = 500;
+ t['threequarters'] = 750;
+ t['Scedilla'] = 556;
+ t['lcaron'] = 344;
+ t['Kcommaaccent'] = 722;
+ t['Lacute'] = 611;
+ t['trademark'] = 980;
+ t['edotaccent'] = 444;
+ t['Igrave'] = 333;
+ t['Imacron'] = 333;
+ t['Lcaron'] = 611;
+ t['onehalf'] = 750;
+ t['lessequal'] = 549;
+ t['ocircumflex'] = 500;
+ t['ntilde'] = 500;
+ t['Uhungarumlaut'] = 722;
+ t['Eacute'] = 611;
+ t['emacron'] = 444;
+ t['gbreve'] = 500;
+ t['onequarter'] = 750;
+ t['Scaron'] = 556;
+ t['Scommaaccent'] = 556;
+ t['Ohungarumlaut'] = 722;
+ t['degree'] = 400;
+ t['ograve'] = 500;
+ t['Ccaron'] = 667;
+ t['ugrave'] = 500;
+ t['radical'] = 453;
+ t['Dcaron'] = 722;
+ t['rcommaaccent'] = 333;
+ t['Ntilde'] = 722;
+ t['otilde'] = 500;
+ t['Rcommaaccent'] = 667;
+ t['Lcommaaccent'] = 611;
+ t['Atilde'] = 722;
+ t['Aogonek'] = 722;
+ t['Aring'] = 722;
+ t['Otilde'] = 722;
+ t['zdotaccent'] = 444;
+ t['Ecaron'] = 611;
+ t['Iogonek'] = 333;
+ t['kcommaaccent'] = 500;
+ t['minus'] = 564;
+ t['Icircumflex'] = 333;
+ t['ncaron'] = 500;
+ t['tcommaaccent'] = 278;
+ t['logicalnot'] = 564;
+ t['odieresis'] = 500;
+ t['udieresis'] = 500;
+ t['notequal'] = 549;
+ t['gcommaaccent'] = 500;
+ t['eth'] = 500;
+ t['zcaron'] = 444;
+ t['ncommaaccent'] = 500;
+ t['onesuperior'] = 300;
+ t['imacron'] = 278;
+ t['Euro'] = 500;
+ });
+ t['Times-Bold'] = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['space'] = 250;
+ t['exclam'] = 333;
+ t['quotedbl'] = 555;
+ t['numbersign'] = 500;
+ t['dollar'] = 500;
+ t['percent'] = 1000;
+ t['ampersand'] = 833;
+ t['quoteright'] = 333;
+ t['parenleft'] = 333;
+ t['parenright'] = 333;
+ t['asterisk'] = 500;
+ t['plus'] = 570;
+ t['comma'] = 250;
+ t['hyphen'] = 333;
+ t['period'] = 250;
+ t['slash'] = 278;
+ t['zero'] = 500;
+ t['one'] = 500;
+ t['two'] = 500;
+ t['three'] = 500;
+ t['four'] = 500;
+ t['five'] = 500;
+ t['six'] = 500;
+ t['seven'] = 500;
+ t['eight'] = 500;
+ t['nine'] = 500;
+ t['colon'] = 333;
+ t['semicolon'] = 333;
+ t['less'] = 570;
+ t['equal'] = 570;
+ t['greater'] = 570;
+ t['question'] = 500;
+ t['at'] = 930;
+ t['A'] = 722;
+ t['B'] = 667;
+ t['C'] = 722;
+ t['D'] = 722;
+ t['E'] = 667;
+ t['F'] = 611;
+ t['G'] = 778;
+ t['H'] = 778;
+ t['I'] = 389;
+ t['J'] = 500;
+ t['K'] = 778;
+ t['L'] = 667;
+ t['M'] = 944;
+ t['N'] = 722;
+ t['O'] = 778;
+ t['P'] = 611;
+ t['Q'] = 778;
+ t['R'] = 722;
+ t['S'] = 556;
+ t['T'] = 667;
+ t['U'] = 722;
+ t['V'] = 722;
+ t['W'] = 1000;
+ t['X'] = 722;
+ t['Y'] = 722;
+ t['Z'] = 667;
+ t['bracketleft'] = 333;
+ t['backslash'] = 278;
+ t['bracketright'] = 333;
+ t['asciicircum'] = 581;
+ t['underscore'] = 500;
+ t['quoteleft'] = 333;
+ t['a'] = 500;
+ t['b'] = 556;
+ t['c'] = 444;
+ t['d'] = 556;
+ t['e'] = 444;
+ t['f'] = 333;
+ t['g'] = 500;
+ t['h'] = 556;
+ t['i'] = 278;
+ t['j'] = 333;
+ t['k'] = 556;
+ t['l'] = 278;
+ t['m'] = 833;
+ t['n'] = 556;
+ t['o'] = 500;
+ t['p'] = 556;
+ t['q'] = 556;
+ t['r'] = 444;
+ t['s'] = 389;
+ t['t'] = 333;
+ t['u'] = 556;
+ t['v'] = 500;
+ t['w'] = 722;
+ t['x'] = 500;
+ t['y'] = 500;
+ t['z'] = 444;
+ t['braceleft'] = 394;
+ t['bar'] = 220;
+ t['braceright'] = 394;
+ t['asciitilde'] = 520;
+ t['exclamdown'] = 333;
+ t['cent'] = 500;
+ t['sterling'] = 500;
+ t['fraction'] = 167;
+ t['yen'] = 500;
+ t['florin'] = 500;
+ t['section'] = 500;
+ t['currency'] = 500;
+ t['quotesingle'] = 278;
+ t['quotedblleft'] = 500;
+ t['guillemotleft'] = 500;
+ t['guilsinglleft'] = 333;
+ t['guilsinglright'] = 333;
+ t['fi'] = 556;
+ t['fl'] = 556;
+ t['endash'] = 500;
+ t['dagger'] = 500;
+ t['daggerdbl'] = 500;
+ t['periodcentered'] = 250;
+ t['paragraph'] = 540;
+ t['bullet'] = 350;
+ t['quotesinglbase'] = 333;
+ t['quotedblbase'] = 500;
+ t['quotedblright'] = 500;
+ t['guillemotright'] = 500;
+ t['ellipsis'] = 1000;
+ t['perthousand'] = 1000;
+ t['questiondown'] = 500;
+ t['grave'] = 333;
+ t['acute'] = 333;
+ t['circumflex'] = 333;
+ t['tilde'] = 333;
+ t['macron'] = 333;
+ t['breve'] = 333;
+ t['dotaccent'] = 333;
+ t['dieresis'] = 333;
+ t['ring'] = 333;
+ t['cedilla'] = 333;
+ t['hungarumlaut'] = 333;
+ t['ogonek'] = 333;
+ t['caron'] = 333;
+ t['emdash'] = 1000;
+ t['AE'] = 1000;
+ t['ordfeminine'] = 300;
+ t['Lslash'] = 667;
+ t['Oslash'] = 778;
+ t['OE'] = 1000;
+ t['ordmasculine'] = 330;
+ t['ae'] = 722;
+ t['dotlessi'] = 278;
+ t['lslash'] = 278;
+ t['oslash'] = 500;
+ t['oe'] = 722;
+ t['germandbls'] = 556;
+ t['Idieresis'] = 389;
+ t['eacute'] = 444;
+ t['abreve'] = 500;
+ t['uhungarumlaut'] = 556;
+ t['ecaron'] = 444;
+ t['Ydieresis'] = 722;
+ t['divide'] = 570;
+ t['Yacute'] = 722;
+ t['Acircumflex'] = 722;
+ t['aacute'] = 500;
+ t['Ucircumflex'] = 722;
+ t['yacute'] = 500;
+ t['scommaaccent'] = 389;
+ t['ecircumflex'] = 444;
+ t['Uring'] = 722;
+ t['Udieresis'] = 722;
+ t['aogonek'] = 500;
+ t['Uacute'] = 722;
+ t['uogonek'] = 556;
+ t['Edieresis'] = 667;
+ t['Dcroat'] = 722;
+ t['commaaccent'] = 250;
+ t['copyright'] = 747;
+ t['Emacron'] = 667;
+ t['ccaron'] = 444;
+ t['aring'] = 500;
+ t['Ncommaaccent'] = 722;
+ t['lacute'] = 278;
+ t['agrave'] = 500;
+ t['Tcommaaccent'] = 667;
+ t['Cacute'] = 722;
+ t['atilde'] = 500;
+ t['Edotaccent'] = 667;
+ t['scaron'] = 389;
+ t['scedilla'] = 389;
+ t['iacute'] = 278;
+ t['lozenge'] = 494;
+ t['Rcaron'] = 722;
+ t['Gcommaaccent'] = 778;
+ t['ucircumflex'] = 556;
+ t['acircumflex'] = 500;
+ t['Amacron'] = 722;
+ t['rcaron'] = 444;
+ t['ccedilla'] = 444;
+ t['Zdotaccent'] = 667;
+ t['Thorn'] = 611;
+ t['Omacron'] = 778;
+ t['Racute'] = 722;
+ t['Sacute'] = 556;
+ t['dcaron'] = 672;
+ t['Umacron'] = 722;
+ t['uring'] = 556;
+ t['threesuperior'] = 300;
+ t['Ograve'] = 778;
+ t['Agrave'] = 722;
+ t['Abreve'] = 722;
+ t['multiply'] = 570;
+ t['uacute'] = 556;
+ t['Tcaron'] = 667;
+ t['partialdiff'] = 494;
+ t['ydieresis'] = 500;
+ t['Nacute'] = 722;
+ t['icircumflex'] = 278;
+ t['Ecircumflex'] = 667;
+ t['adieresis'] = 500;
+ t['edieresis'] = 444;
+ t['cacute'] = 444;
+ t['nacute'] = 556;
+ t['umacron'] = 556;
+ t['Ncaron'] = 722;
+ t['Iacute'] = 389;
+ t['plusminus'] = 570;
+ t['brokenbar'] = 220;
+ t['registered'] = 747;
+ t['Gbreve'] = 778;
+ t['Idotaccent'] = 389;
+ t['summation'] = 600;
+ t['Egrave'] = 667;
+ t['racute'] = 444;
+ t['omacron'] = 500;
+ t['Zacute'] = 667;
+ t['Zcaron'] = 667;
+ t['greaterequal'] = 549;
+ t['Eth'] = 722;
+ t['Ccedilla'] = 722;
+ t['lcommaaccent'] = 278;
+ t['tcaron'] = 416;
+ t['eogonek'] = 444;
+ t['Uogonek'] = 722;
+ t['Aacute'] = 722;
+ t['Adieresis'] = 722;
+ t['egrave'] = 444;
+ t['zacute'] = 444;
+ t['iogonek'] = 278;
+ t['Oacute'] = 778;
+ t['oacute'] = 500;
+ t['amacron'] = 500;
+ t['sacute'] = 389;
+ t['idieresis'] = 278;
+ t['Ocircumflex'] = 778;
+ t['Ugrave'] = 722;
+ t['Delta'] = 612;
+ t['thorn'] = 556;
+ t['twosuperior'] = 300;
+ t['Odieresis'] = 778;
+ t['mu'] = 556;
+ t['igrave'] = 278;
+ t['ohungarumlaut'] = 500;
+ t['Eogonek'] = 667;
+ t['dcroat'] = 556;
+ t['threequarters'] = 750;
+ t['Scedilla'] = 556;
+ t['lcaron'] = 394;
+ t['Kcommaaccent'] = 778;
+ t['Lacute'] = 667;
+ t['trademark'] = 1000;
+ t['edotaccent'] = 444;
+ t['Igrave'] = 389;
+ t['Imacron'] = 389;
+ t['Lcaron'] = 667;
+ t['onehalf'] = 750;
+ t['lessequal'] = 549;
+ t['ocircumflex'] = 500;
+ t['ntilde'] = 556;
+ t['Uhungarumlaut'] = 722;
+ t['Eacute'] = 667;
+ t['emacron'] = 444;
+ t['gbreve'] = 500;
+ t['onequarter'] = 750;
+ t['Scaron'] = 556;
+ t['Scommaaccent'] = 556;
+ t['Ohungarumlaut'] = 778;
+ t['degree'] = 400;
+ t['ograve'] = 500;
+ t['Ccaron'] = 722;
+ t['ugrave'] = 556;
+ t['radical'] = 549;
+ t['Dcaron'] = 722;
+ t['rcommaaccent'] = 444;
+ t['Ntilde'] = 722;
+ t['otilde'] = 500;
+ t['Rcommaaccent'] = 722;
+ t['Lcommaaccent'] = 667;
+ t['Atilde'] = 722;
+ t['Aogonek'] = 722;
+ t['Aring'] = 722;
+ t['Otilde'] = 778;
+ t['zdotaccent'] = 444;
+ t['Ecaron'] = 667;
+ t['Iogonek'] = 389;
+ t['kcommaaccent'] = 556;
+ t['minus'] = 570;
+ t['Icircumflex'] = 389;
+ t['ncaron'] = 556;
+ t['tcommaaccent'] = 333;
+ t['logicalnot'] = 570;
+ t['odieresis'] = 500;
+ t['udieresis'] = 556;
+ t['notequal'] = 549;
+ t['gcommaaccent'] = 500;
+ t['eth'] = 500;
+ t['zcaron'] = 444;
+ t['ncommaaccent'] = 556;
+ t['onesuperior'] = 300;
+ t['imacron'] = 278;
+ t['Euro'] = 500;
+ });
+ t['Times-BoldItalic'] = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['space'] = 250;
+ t['exclam'] = 389;
+ t['quotedbl'] = 555;
+ t['numbersign'] = 500;
+ t['dollar'] = 500;
+ t['percent'] = 833;
+ t['ampersand'] = 778;
+ t['quoteright'] = 333;
+ t['parenleft'] = 333;
+ t['parenright'] = 333;
+ t['asterisk'] = 500;
+ t['plus'] = 570;
+ t['comma'] = 250;
+ t['hyphen'] = 333;
+ t['period'] = 250;
+ t['slash'] = 278;
+ t['zero'] = 500;
+ t['one'] = 500;
+ t['two'] = 500;
+ t['three'] = 500;
+ t['four'] = 500;
+ t['five'] = 500;
+ t['six'] = 500;
+ t['seven'] = 500;
+ t['eight'] = 500;
+ t['nine'] = 500;
+ t['colon'] = 333;
+ t['semicolon'] = 333;
+ t['less'] = 570;
+ t['equal'] = 570;
+ t['greater'] = 570;
+ t['question'] = 500;
+ t['at'] = 832;
+ t['A'] = 667;
+ t['B'] = 667;
+ t['C'] = 667;
+ t['D'] = 722;
+ t['E'] = 667;
+ t['F'] = 667;
+ t['G'] = 722;
+ t['H'] = 778;
+ t['I'] = 389;
+ t['J'] = 500;
+ t['K'] = 667;
+ t['L'] = 611;
+ t['M'] = 889;
+ t['N'] = 722;
+ t['O'] = 722;
+ t['P'] = 611;
+ t['Q'] = 722;
+ t['R'] = 667;
+ t['S'] = 556;
+ t['T'] = 611;
+ t['U'] = 722;
+ t['V'] = 667;
+ t['W'] = 889;
+ t['X'] = 667;
+ t['Y'] = 611;
+ t['Z'] = 611;
+ t['bracketleft'] = 333;
+ t['backslash'] = 278;
+ t['bracketright'] = 333;
+ t['asciicircum'] = 570;
+ t['underscore'] = 500;
+ t['quoteleft'] = 333;
+ t['a'] = 500;
+ t['b'] = 500;
+ t['c'] = 444;
+ t['d'] = 500;
+ t['e'] = 444;
+ t['f'] = 333;
+ t['g'] = 500;
+ t['h'] = 556;
+ t['i'] = 278;
+ t['j'] = 278;
+ t['k'] = 500;
+ t['l'] = 278;
+ t['m'] = 778;
+ t['n'] = 556;
+ t['o'] = 500;
+ t['p'] = 500;
+ t['q'] = 500;
+ t['r'] = 389;
+ t['s'] = 389;
+ t['t'] = 278;
+ t['u'] = 556;
+ t['v'] = 444;
+ t['w'] = 667;
+ t['x'] = 500;
+ t['y'] = 444;
+ t['z'] = 389;
+ t['braceleft'] = 348;
+ t['bar'] = 220;
+ t['braceright'] = 348;
+ t['asciitilde'] = 570;
+ t['exclamdown'] = 389;
+ t['cent'] = 500;
+ t['sterling'] = 500;
+ t['fraction'] = 167;
+ t['yen'] = 500;
+ t['florin'] = 500;
+ t['section'] = 500;
+ t['currency'] = 500;
+ t['quotesingle'] = 278;
+ t['quotedblleft'] = 500;
+ t['guillemotleft'] = 500;
+ t['guilsinglleft'] = 333;
+ t['guilsinglright'] = 333;
+ t['fi'] = 556;
+ t['fl'] = 556;
+ t['endash'] = 500;
+ t['dagger'] = 500;
+ t['daggerdbl'] = 500;
+ t['periodcentered'] = 250;
+ t['paragraph'] = 500;
+ t['bullet'] = 350;
+ t['quotesinglbase'] = 333;
+ t['quotedblbase'] = 500;
+ t['quotedblright'] = 500;
+ t['guillemotright'] = 500;
+ t['ellipsis'] = 1000;
+ t['perthousand'] = 1000;
+ t['questiondown'] = 500;
+ t['grave'] = 333;
+ t['acute'] = 333;
+ t['circumflex'] = 333;
+ t['tilde'] = 333;
+ t['macron'] = 333;
+ t['breve'] = 333;
+ t['dotaccent'] = 333;
+ t['dieresis'] = 333;
+ t['ring'] = 333;
+ t['cedilla'] = 333;
+ t['hungarumlaut'] = 333;
+ t['ogonek'] = 333;
+ t['caron'] = 333;
+ t['emdash'] = 1000;
+ t['AE'] = 944;
+ t['ordfeminine'] = 266;
+ t['Lslash'] = 611;
+ t['Oslash'] = 722;
+ t['OE'] = 944;
+ t['ordmasculine'] = 300;
+ t['ae'] = 722;
+ t['dotlessi'] = 278;
+ t['lslash'] = 278;
+ t['oslash'] = 500;
+ t['oe'] = 722;
+ t['germandbls'] = 500;
+ t['Idieresis'] = 389;
+ t['eacute'] = 444;
+ t['abreve'] = 500;
+ t['uhungarumlaut'] = 556;
+ t['ecaron'] = 444;
+ t['Ydieresis'] = 611;
+ t['divide'] = 570;
+ t['Yacute'] = 611;
+ t['Acircumflex'] = 667;
+ t['aacute'] = 500;
+ t['Ucircumflex'] = 722;
+ t['yacute'] = 444;
+ t['scommaaccent'] = 389;
+ t['ecircumflex'] = 444;
+ t['Uring'] = 722;
+ t['Udieresis'] = 722;
+ t['aogonek'] = 500;
+ t['Uacute'] = 722;
+ t['uogonek'] = 556;
+ t['Edieresis'] = 667;
+ t['Dcroat'] = 722;
+ t['commaaccent'] = 250;
+ t['copyright'] = 747;
+ t['Emacron'] = 667;
+ t['ccaron'] = 444;
+ t['aring'] = 500;
+ t['Ncommaaccent'] = 722;
+ t['lacute'] = 278;
+ t['agrave'] = 500;
+ t['Tcommaaccent'] = 611;
+ t['Cacute'] = 667;
+ t['atilde'] = 500;
+ t['Edotaccent'] = 667;
+ t['scaron'] = 389;
+ t['scedilla'] = 389;
+ t['iacute'] = 278;
+ t['lozenge'] = 494;
+ t['Rcaron'] = 667;
+ t['Gcommaaccent'] = 722;
+ t['ucircumflex'] = 556;
+ t['acircumflex'] = 500;
+ t['Amacron'] = 667;
+ t['rcaron'] = 389;
+ t['ccedilla'] = 444;
+ t['Zdotaccent'] = 611;
+ t['Thorn'] = 611;
+ t['Omacron'] = 722;
+ t['Racute'] = 667;
+ t['Sacute'] = 556;
+ t['dcaron'] = 608;
+ t['Umacron'] = 722;
+ t['uring'] = 556;
+ t['threesuperior'] = 300;
+ t['Ograve'] = 722;
+ t['Agrave'] = 667;
+ t['Abreve'] = 667;
+ t['multiply'] = 570;
+ t['uacute'] = 556;
+ t['Tcaron'] = 611;
+ t['partialdiff'] = 494;
+ t['ydieresis'] = 444;
+ t['Nacute'] = 722;
+ t['icircumflex'] = 278;
+ t['Ecircumflex'] = 667;
+ t['adieresis'] = 500;
+ t['edieresis'] = 444;
+ t['cacute'] = 444;
+ t['nacute'] = 556;
+ t['umacron'] = 556;
+ t['Ncaron'] = 722;
+ t['Iacute'] = 389;
+ t['plusminus'] = 570;
+ t['brokenbar'] = 220;
+ t['registered'] = 747;
+ t['Gbreve'] = 722;
+ t['Idotaccent'] = 389;
+ t['summation'] = 600;
+ t['Egrave'] = 667;
+ t['racute'] = 389;
+ t['omacron'] = 500;
+ t['Zacute'] = 611;
+ t['Zcaron'] = 611;
+ t['greaterequal'] = 549;
+ t['Eth'] = 722;
+ t['Ccedilla'] = 667;
+ t['lcommaaccent'] = 278;
+ t['tcaron'] = 366;
+ t['eogonek'] = 444;
+ t['Uogonek'] = 722;
+ t['Aacute'] = 667;
+ t['Adieresis'] = 667;
+ t['egrave'] = 444;
+ t['zacute'] = 389;
+ t['iogonek'] = 278;
+ t['Oacute'] = 722;
+ t['oacute'] = 500;
+ t['amacron'] = 500;
+ t['sacute'] = 389;
+ t['idieresis'] = 278;
+ t['Ocircumflex'] = 722;
+ t['Ugrave'] = 722;
+ t['Delta'] = 612;
+ t['thorn'] = 500;
+ t['twosuperior'] = 300;
+ t['Odieresis'] = 722;
+ t['mu'] = 576;
+ t['igrave'] = 278;
+ t['ohungarumlaut'] = 500;
+ t['Eogonek'] = 667;
+ t['dcroat'] = 500;
+ t['threequarters'] = 750;
+ t['Scedilla'] = 556;
+ t['lcaron'] = 382;
+ t['Kcommaaccent'] = 667;
+ t['Lacute'] = 611;
+ t['trademark'] = 1000;
+ t['edotaccent'] = 444;
+ t['Igrave'] = 389;
+ t['Imacron'] = 389;
+ t['Lcaron'] = 611;
+ t['onehalf'] = 750;
+ t['lessequal'] = 549;
+ t['ocircumflex'] = 500;
+ t['ntilde'] = 556;
+ t['Uhungarumlaut'] = 722;
+ t['Eacute'] = 667;
+ t['emacron'] = 444;
+ t['gbreve'] = 500;
+ t['onequarter'] = 750;
+ t['Scaron'] = 556;
+ t['Scommaaccent'] = 556;
+ t['Ohungarumlaut'] = 722;
+ t['degree'] = 400;
+ t['ograve'] = 500;
+ t['Ccaron'] = 667;
+ t['ugrave'] = 556;
+ t['radical'] = 549;
+ t['Dcaron'] = 722;
+ t['rcommaaccent'] = 389;
+ t['Ntilde'] = 722;
+ t['otilde'] = 500;
+ t['Rcommaaccent'] = 667;
+ t['Lcommaaccent'] = 611;
+ t['Atilde'] = 667;
+ t['Aogonek'] = 667;
+ t['Aring'] = 667;
+ t['Otilde'] = 722;
+ t['zdotaccent'] = 389;
+ t['Ecaron'] = 667;
+ t['Iogonek'] = 389;
+ t['kcommaaccent'] = 500;
+ t['minus'] = 606;
+ t['Icircumflex'] = 389;
+ t['ncaron'] = 556;
+ t['tcommaaccent'] = 278;
+ t['logicalnot'] = 606;
+ t['odieresis'] = 500;
+ t['udieresis'] = 556;
+ t['notequal'] = 549;
+ t['gcommaaccent'] = 500;
+ t['eth'] = 500;
+ t['zcaron'] = 389;
+ t['ncommaaccent'] = 556;
+ t['onesuperior'] = 300;
+ t['imacron'] = 278;
+ t['Euro'] = 500;
+ });
+ t['Times-Italic'] = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['space'] = 250;
+ t['exclam'] = 333;
+ t['quotedbl'] = 420;
+ t['numbersign'] = 500;
+ t['dollar'] = 500;
+ t['percent'] = 833;
+ t['ampersand'] = 778;
+ t['quoteright'] = 333;
+ t['parenleft'] = 333;
+ t['parenright'] = 333;
+ t['asterisk'] = 500;
+ t['plus'] = 675;
+ t['comma'] = 250;
+ t['hyphen'] = 333;
+ t['period'] = 250;
+ t['slash'] = 278;
+ t['zero'] = 500;
+ t['one'] = 500;
+ t['two'] = 500;
+ t['three'] = 500;
+ t['four'] = 500;
+ t['five'] = 500;
+ t['six'] = 500;
+ t['seven'] = 500;
+ t['eight'] = 500;
+ t['nine'] = 500;
+ t['colon'] = 333;
+ t['semicolon'] = 333;
+ t['less'] = 675;
+ t['equal'] = 675;
+ t['greater'] = 675;
+ t['question'] = 500;
+ t['at'] = 920;
+ t['A'] = 611;
+ t['B'] = 611;
+ t['C'] = 667;
+ t['D'] = 722;
+ t['E'] = 611;
+ t['F'] = 611;
+ t['G'] = 722;
+ t['H'] = 722;
+ t['I'] = 333;
+ t['J'] = 444;
+ t['K'] = 667;
+ t['L'] = 556;
+ t['M'] = 833;
+ t['N'] = 667;
+ t['O'] = 722;
+ t['P'] = 611;
+ t['Q'] = 722;
+ t['R'] = 611;
+ t['S'] = 500;
+ t['T'] = 556;
+ t['U'] = 722;
+ t['V'] = 611;
+ t['W'] = 833;
+ t['X'] = 611;
+ t['Y'] = 556;
+ t['Z'] = 556;
+ t['bracketleft'] = 389;
+ t['backslash'] = 278;
+ t['bracketright'] = 389;
+ t['asciicircum'] = 422;
+ t['underscore'] = 500;
+ t['quoteleft'] = 333;
+ t['a'] = 500;
+ t['b'] = 500;
+ t['c'] = 444;
+ t['d'] = 500;
+ t['e'] = 444;
+ t['f'] = 278;
+ t['g'] = 500;
+ t['h'] = 500;
+ t['i'] = 278;
+ t['j'] = 278;
+ t['k'] = 444;
+ t['l'] = 278;
+ t['m'] = 722;
+ t['n'] = 500;
+ t['o'] = 500;
+ t['p'] = 500;
+ t['q'] = 500;
+ t['r'] = 389;
+ t['s'] = 389;
+ t['t'] = 278;
+ t['u'] = 500;
+ t['v'] = 444;
+ t['w'] = 667;
+ t['x'] = 444;
+ t['y'] = 444;
+ t['z'] = 389;
+ t['braceleft'] = 400;
+ t['bar'] = 275;
+ t['braceright'] = 400;
+ t['asciitilde'] = 541;
+ t['exclamdown'] = 389;
+ t['cent'] = 500;
+ t['sterling'] = 500;
+ t['fraction'] = 167;
+ t['yen'] = 500;
+ t['florin'] = 500;
+ t['section'] = 500;
+ t['currency'] = 500;
+ t['quotesingle'] = 214;
+ t['quotedblleft'] = 556;
+ t['guillemotleft'] = 500;
+ t['guilsinglleft'] = 333;
+ t['guilsinglright'] = 333;
+ t['fi'] = 500;
+ t['fl'] = 500;
+ t['endash'] = 500;
+ t['dagger'] = 500;
+ t['daggerdbl'] = 500;
+ t['periodcentered'] = 250;
+ t['paragraph'] = 523;
+ t['bullet'] = 350;
+ t['quotesinglbase'] = 333;
+ t['quotedblbase'] = 556;
+ t['quotedblright'] = 556;
+ t['guillemotright'] = 500;
+ t['ellipsis'] = 889;
+ t['perthousand'] = 1000;
+ t['questiondown'] = 500;
+ t['grave'] = 333;
+ t['acute'] = 333;
+ t['circumflex'] = 333;
+ t['tilde'] = 333;
+ t['macron'] = 333;
+ t['breve'] = 333;
+ t['dotaccent'] = 333;
+ t['dieresis'] = 333;
+ t['ring'] = 333;
+ t['cedilla'] = 333;
+ t['hungarumlaut'] = 333;
+ t['ogonek'] = 333;
+ t['caron'] = 333;
+ t['emdash'] = 889;
+ t['AE'] = 889;
+ t['ordfeminine'] = 276;
+ t['Lslash'] = 556;
+ t['Oslash'] = 722;
+ t['OE'] = 944;
+ t['ordmasculine'] = 310;
+ t['ae'] = 667;
+ t['dotlessi'] = 278;
+ t['lslash'] = 278;
+ t['oslash'] = 500;
+ t['oe'] = 667;
+ t['germandbls'] = 500;
+ t['Idieresis'] = 333;
+ t['eacute'] = 444;
+ t['abreve'] = 500;
+ t['uhungarumlaut'] = 500;
+ t['ecaron'] = 444;
+ t['Ydieresis'] = 556;
+ t['divide'] = 675;
+ t['Yacute'] = 556;
+ t['Acircumflex'] = 611;
+ t['aacute'] = 500;
+ t['Ucircumflex'] = 722;
+ t['yacute'] = 444;
+ t['scommaaccent'] = 389;
+ t['ecircumflex'] = 444;
+ t['Uring'] = 722;
+ t['Udieresis'] = 722;
+ t['aogonek'] = 500;
+ t['Uacute'] = 722;
+ t['uogonek'] = 500;
+ t['Edieresis'] = 611;
+ t['Dcroat'] = 722;
+ t['commaaccent'] = 250;
+ t['copyright'] = 760;
+ t['Emacron'] = 611;
+ t['ccaron'] = 444;
+ t['aring'] = 500;
+ t['Ncommaaccent'] = 667;
+ t['lacute'] = 278;
+ t['agrave'] = 500;
+ t['Tcommaaccent'] = 556;
+ t['Cacute'] = 667;
+ t['atilde'] = 500;
+ t['Edotaccent'] = 611;
+ t['scaron'] = 389;
+ t['scedilla'] = 389;
+ t['iacute'] = 278;
+ t['lozenge'] = 471;
+ t['Rcaron'] = 611;
+ t['Gcommaaccent'] = 722;
+ t['ucircumflex'] = 500;
+ t['acircumflex'] = 500;
+ t['Amacron'] = 611;
+ t['rcaron'] = 389;
+ t['ccedilla'] = 444;
+ t['Zdotaccent'] = 556;
+ t['Thorn'] = 611;
+ t['Omacron'] = 722;
+ t['Racute'] = 611;
+ t['Sacute'] = 500;
+ t['dcaron'] = 544;
+ t['Umacron'] = 722;
+ t['uring'] = 500;
+ t['threesuperior'] = 300;
+ t['Ograve'] = 722;
+ t['Agrave'] = 611;
+ t['Abreve'] = 611;
+ t['multiply'] = 675;
+ t['uacute'] = 500;
+ t['Tcaron'] = 556;
+ t['partialdiff'] = 476;
+ t['ydieresis'] = 444;
+ t['Nacute'] = 667;
+ t['icircumflex'] = 278;
+ t['Ecircumflex'] = 611;
+ t['adieresis'] = 500;
+ t['edieresis'] = 444;
+ t['cacute'] = 444;
+ t['nacute'] = 500;
+ t['umacron'] = 500;
+ t['Ncaron'] = 667;
+ t['Iacute'] = 333;
+ t['plusminus'] = 675;
+ t['brokenbar'] = 275;
+ t['registered'] = 760;
+ t['Gbreve'] = 722;
+ t['Idotaccent'] = 333;
+ t['summation'] = 600;
+ t['Egrave'] = 611;
+ t['racute'] = 389;
+ t['omacron'] = 500;
+ t['Zacute'] = 556;
+ t['Zcaron'] = 556;
+ t['greaterequal'] = 549;
+ t['Eth'] = 722;
+ t['Ccedilla'] = 667;
+ t['lcommaaccent'] = 278;
+ t['tcaron'] = 300;
+ t['eogonek'] = 444;
+ t['Uogonek'] = 722;
+ t['Aacute'] = 611;
+ t['Adieresis'] = 611;
+ t['egrave'] = 444;
+ t['zacute'] = 389;
+ t['iogonek'] = 278;
+ t['Oacute'] = 722;
+ t['oacute'] = 500;
+ t['amacron'] = 500;
+ t['sacute'] = 389;
+ t['idieresis'] = 278;
+ t['Ocircumflex'] = 722;
+ t['Ugrave'] = 722;
+ t['Delta'] = 612;
+ t['thorn'] = 500;
+ t['twosuperior'] = 300;
+ t['Odieresis'] = 722;
+ t['mu'] = 500;
+ t['igrave'] = 278;
+ t['ohungarumlaut'] = 500;
+ t['Eogonek'] = 611;
+ t['dcroat'] = 500;
+ t['threequarters'] = 750;
+ t['Scedilla'] = 500;
+ t['lcaron'] = 300;
+ t['Kcommaaccent'] = 667;
+ t['Lacute'] = 556;
+ t['trademark'] = 980;
+ t['edotaccent'] = 444;
+ t['Igrave'] = 333;
+ t['Imacron'] = 333;
+ t['Lcaron'] = 611;
+ t['onehalf'] = 750;
+ t['lessequal'] = 549;
+ t['ocircumflex'] = 500;
+ t['ntilde'] = 500;
+ t['Uhungarumlaut'] = 722;
+ t['Eacute'] = 611;
+ t['emacron'] = 444;
+ t['gbreve'] = 500;
+ t['onequarter'] = 750;
+ t['Scaron'] = 500;
+ t['Scommaaccent'] = 500;
+ t['Ohungarumlaut'] = 722;
+ t['degree'] = 400;
+ t['ograve'] = 500;
+ t['Ccaron'] = 667;
+ t['ugrave'] = 500;
+ t['radical'] = 453;
+ t['Dcaron'] = 722;
+ t['rcommaaccent'] = 389;
+ t['Ntilde'] = 667;
+ t['otilde'] = 500;
+ t['Rcommaaccent'] = 611;
+ t['Lcommaaccent'] = 556;
+ t['Atilde'] = 611;
+ t['Aogonek'] = 611;
+ t['Aring'] = 611;
+ t['Otilde'] = 722;
+ t['zdotaccent'] = 389;
+ t['Ecaron'] = 611;
+ t['Iogonek'] = 333;
+ t['kcommaaccent'] = 444;
+ t['minus'] = 675;
+ t['Icircumflex'] = 333;
+ t['ncaron'] = 500;
+ t['tcommaaccent'] = 278;
+ t['logicalnot'] = 675;
+ t['odieresis'] = 500;
+ t['udieresis'] = 500;
+ t['notequal'] = 549;
+ t['gcommaaccent'] = 500;
+ t['eth'] = 500;
+ t['zcaron'] = 389;
+ t['ncommaaccent'] = 500;
+ t['onesuperior'] = 300;
+ t['imacron'] = 278;
+ t['Euro'] = 500;
+ });
+ t['ZapfDingbats'] = (0, _core_utils.getLookupTableFactory)(function (t) {
+ t['space'] = 278;
+ t['a1'] = 974;
+ t['a2'] = 961;
+ t['a202'] = 974;
+ t['a3'] = 980;
+ t['a4'] = 719;
+ t['a5'] = 789;
+ t['a119'] = 790;
+ t['a118'] = 791;
+ t['a117'] = 690;
+ t['a11'] = 960;
+ t['a12'] = 939;
+ t['a13'] = 549;
+ t['a14'] = 855;
+ t['a15'] = 911;
+ t['a16'] = 933;
+ t['a105'] = 911;
+ t['a17'] = 945;
+ t['a18'] = 974;
+ t['a19'] = 755;
+ t['a20'] = 846;
+ t['a21'] = 762;
+ t['a22'] = 761;
+ t['a23'] = 571;
+ t['a24'] = 677;
+ t['a25'] = 763;
+ t['a26'] = 760;
+ t['a27'] = 759;
+ t['a28'] = 754;
+ t['a6'] = 494;
+ t['a7'] = 552;
+ t['a8'] = 537;
+ t['a9'] = 577;
+ t['a10'] = 692;
+ t['a29'] = 786;
+ t['a30'] = 788;
+ t['a31'] = 788;
+ t['a32'] = 790;
+ t['a33'] = 793;
+ t['a34'] = 794;
+ t['a35'] = 816;
+ t['a36'] = 823;
+ t['a37'] = 789;
+ t['a38'] = 841;
+ t['a39'] = 823;
+ t['a40'] = 833;
+ t['a41'] = 816;
+ t['a42'] = 831;
+ t['a43'] = 923;
+ t['a44'] = 744;
+ t['a45'] = 723;
+ t['a46'] = 749;
+ t['a47'] = 790;
+ t['a48'] = 792;
+ t['a49'] = 695;
+ t['a50'] = 776;
+ t['a51'] = 768;
+ t['a52'] = 792;
+ t['a53'] = 759;
+ t['a54'] = 707;
+ t['a55'] = 708;
+ t['a56'] = 682;
+ t['a57'] = 701;
+ t['a58'] = 826;
+ t['a59'] = 815;
+ t['a60'] = 789;
+ t['a61'] = 789;
+ t['a62'] = 707;
+ t['a63'] = 687;
+ t['a64'] = 696;
+ t['a65'] = 689;
+ t['a66'] = 786;
+ t['a67'] = 787;
+ t['a68'] = 713;
+ t['a69'] = 791;
+ t['a70'] = 785;
+ t['a71'] = 791;
+ t['a72'] = 873;
+ t['a73'] = 761;
+ t['a74'] = 762;
+ t['a203'] = 762;
+ t['a75'] = 759;
+ t['a204'] = 759;
+ t['a76'] = 892;
+ t['a77'] = 892;
+ t['a78'] = 788;
+ t['a79'] = 784;
+ t['a81'] = 438;
+ t['a82'] = 138;
+ t['a83'] = 277;
+ t['a84'] = 415;
+ t['a97'] = 392;
+ t['a98'] = 392;
+ t['a99'] = 668;
+ t['a100'] = 668;
+ t['a89'] = 390;
+ t['a90'] = 390;
+ t['a93'] = 317;
+ t['a94'] = 317;
+ t['a91'] = 276;
+ t['a92'] = 276;
+ t['a205'] = 509;
+ t['a85'] = 509;
+ t['a206'] = 410;
+ t['a86'] = 410;
+ t['a87'] = 234;
+ t['a88'] = 234;
+ t['a95'] = 334;
+ t['a96'] = 334;
+ t['a101'] = 732;
+ t['a102'] = 544;
+ t['a103'] = 544;
+ t['a104'] = 910;
+ t['a106'] = 667;
+ t['a107'] = 760;
+ t['a108'] = 760;
+ t['a112'] = 776;
+ t['a111'] = 595;
+ t['a110'] = 694;
+ t['a109'] = 626;
+ t['a120'] = 788;
+ t['a121'] = 788;
+ t['a122'] = 788;
+ t['a123'] = 788;
+ t['a124'] = 788;
+ t['a125'] = 788;
+ t['a126'] = 788;
+ t['a127'] = 788;
+ t['a128'] = 788;
+ t['a129'] = 788;
+ t['a130'] = 788;
+ t['a131'] = 788;
+ t['a132'] = 788;
+ t['a133'] = 788;
+ t['a134'] = 788;
+ t['a135'] = 788;
+ t['a136'] = 788;
+ t['a137'] = 788;
+ t['a138'] = 788;
+ t['a139'] = 788;
+ t['a140'] = 788;
+ t['a141'] = 788;
+ t['a142'] = 788;
+ t['a143'] = 788;
+ t['a144'] = 788;
+ t['a145'] = 788;
+ t['a146'] = 788;
+ t['a147'] = 788;
+ t['a148'] = 788;
+ t['a149'] = 788;
+ t['a150'] = 788;
+ t['a151'] = 788;
+ t['a152'] = 788;
+ t['a153'] = 788;
+ t['a154'] = 788;
+ t['a155'] = 788;
+ t['a156'] = 788;
+ t['a157'] = 788;
+ t['a158'] = 788;
+ t['a159'] = 788;
+ t['a160'] = 894;
+ t['a161'] = 838;
+ t['a163'] = 1016;
+ t['a164'] = 458;
+ t['a196'] = 748;
+ t['a165'] = 924;
+ t['a192'] = 748;
+ t['a166'] = 918;
+ t['a167'] = 927;
+ t['a168'] = 928;
+ t['a169'] = 928;
+ t['a170'] = 834;
+ t['a171'] = 873;
+ t['a172'] = 828;
+ t['a173'] = 924;
+ t['a162'] = 924;
+ t['a174'] = 917;
+ t['a175'] = 930;
+ t['a176'] = 931;
+ t['a177'] = 463;
+ t['a178'] = 883;
+ t['a179'] = 836;
+ t['a193'] = 836;
+ t['a180'] = 867;
+ t['a199'] = 867;
+ t['a181'] = 696;
+ t['a200'] = 696;
+ t['a182'] = 874;
+ t['a201'] = 874;
+ t['a183'] = 760;
+ t['a184'] = 946;
+ t['a197'] = 771;
+ t['a185'] = 865;
+ t['a194'] = 771;
+ t['a198'] = 888;
+ t['a186'] = 967;
+ t['a195'] = 888;
+ t['a187'] = 831;
+ t['a188'] = 873;
+ t['a189'] = 927;
+ t['a190'] = 970;
+ t['a191'] = 918;
+ });
+});
+exports.getMetrics = getMetrics;
+
+/***/ }),
+/* 186 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.isPDFFunction = isPDFFunction;
+exports.PostScriptCompiler = exports.PostScriptEvaluator = exports.PDFFunctionFactory = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _ps_parser = __w_pdfjs_require__(187);
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var IsEvalSupportedCached = {
+ get value() {
+ return (0, _util.shadow)(this, 'value', (0, _util.isEvalSupported)());
+ }
+
+};
+
+var PDFFunctionFactory =
+/*#__PURE__*/
+function () {
+ function PDFFunctionFactory(_ref) {
+ var xref = _ref.xref,
+ _ref$isEvalSupported = _ref.isEvalSupported,
+ isEvalSupported = _ref$isEvalSupported === void 0 ? true : _ref$isEvalSupported;
+
+ _classCallCheck(this, PDFFunctionFactory);
+
+ this.xref = xref;
+ this.isEvalSupported = isEvalSupported !== false;
+ }
+
+ _createClass(PDFFunctionFactory, [{
+ key: "create",
+ value: function create(fn) {
+ return PDFFunction.parse({
+ xref: this.xref,
+ isEvalSupported: this.isEvalSupported,
+ fn: fn
+ });
+ }
+ }, {
+ key: "createFromArray",
+ value: function createFromArray(fnObj) {
+ return PDFFunction.parseArray({
+ xref: this.xref,
+ isEvalSupported: this.isEvalSupported,
+ fnObj: fnObj
+ });
+ }
+ }]);
+
+ return PDFFunctionFactory;
+}();
+
+exports.PDFFunctionFactory = PDFFunctionFactory;
+
+function toNumberArray(arr) {
+ if (!Array.isArray(arr)) {
+ return null;
+ }
+
+ var length = arr.length;
+
+ for (var i = 0; i < length; i++) {
+ if (typeof arr[i] !== 'number') {
+ var result = new Array(length);
+
+ for (var _i = 0; _i < length; _i++) {
+ result[_i] = +arr[_i];
+ }
+
+ return result;
+ }
+ }
+
+ return arr;
+}
+
+var PDFFunction = function PDFFunctionClosure() {
+ var CONSTRUCT_SAMPLED = 0;
+ var CONSTRUCT_INTERPOLATED = 2;
+ var CONSTRUCT_STICHED = 3;
+ var CONSTRUCT_POSTSCRIPT = 4;
+ return {
+ getSampleArray: function getSampleArray(size, outputSize, bps, stream) {
+ var i, ii;
+ var length = 1;
+
+ for (i = 0, ii = size.length; i < ii; i++) {
+ length *= size[i];
+ }
+
+ length *= outputSize;
+ var array = new Array(length);
+ var codeSize = 0;
+ var codeBuf = 0;
+ var sampleMul = 1.0 / (Math.pow(2.0, bps) - 1);
+ var strBytes = stream.getBytes((length * bps + 7) / 8);
+ var strIdx = 0;
+
+ for (i = 0; i < length; i++) {
+ while (codeSize < bps) {
+ codeBuf <<= 8;
+ codeBuf |= strBytes[strIdx++];
+ codeSize += 8;
+ }
+
+ codeSize -= bps;
+ array[i] = (codeBuf >> codeSize) * sampleMul;
+ codeBuf &= (1 << codeSize) - 1;
+ }
+
+ return array;
+ },
+ getIR: function getIR(_ref2) {
+ var xref = _ref2.xref,
+ isEvalSupported = _ref2.isEvalSupported,
+ fn = _ref2.fn;
+ var dict = fn.dict;
+
+ if (!dict) {
+ dict = fn;
+ }
+
+ var types = [this.constructSampled, null, this.constructInterpolated, this.constructStiched, this.constructPostScript];
+ var typeNum = dict.get('FunctionType');
+ var typeFn = types[typeNum];
+
+ if (!typeFn) {
+ throw new _util.FormatError('Unknown type of function');
+ }
+
+ return typeFn.call(this, {
+ xref: xref,
+ isEvalSupported: isEvalSupported,
+ fn: fn,
+ dict: dict
+ });
+ },
+ fromIR: function fromIR(_ref3) {
+ var xref = _ref3.xref,
+ isEvalSupported = _ref3.isEvalSupported,
+ IR = _ref3.IR;
+ var type = IR[0];
+
+ switch (type) {
+ case CONSTRUCT_SAMPLED:
+ return this.constructSampledFromIR({
+ xref: xref,
+ isEvalSupported: isEvalSupported,
+ IR: IR
+ });
+
+ case CONSTRUCT_INTERPOLATED:
+ return this.constructInterpolatedFromIR({
+ xref: xref,
+ isEvalSupported: isEvalSupported,
+ IR: IR
+ });
+
+ case CONSTRUCT_STICHED:
+ return this.constructStichedFromIR({
+ xref: xref,
+ isEvalSupported: isEvalSupported,
+ IR: IR
+ });
+
+ default:
+ return this.constructPostScriptFromIR({
+ xref: xref,
+ isEvalSupported: isEvalSupported,
+ IR: IR
+ });
+ }
+ },
+ parse: function parse(_ref4) {
+ var xref = _ref4.xref,
+ isEvalSupported = _ref4.isEvalSupported,
+ fn = _ref4.fn;
+ var IR = this.getIR({
+ xref: xref,
+ isEvalSupported: isEvalSupported,
+ fn: fn
+ });
+ return this.fromIR({
+ xref: xref,
+ isEvalSupported: isEvalSupported,
+ IR: IR
+ });
+ },
+ parseArray: function parseArray(_ref5) {
+ var xref = _ref5.xref,
+ isEvalSupported = _ref5.isEvalSupported,
+ fnObj = _ref5.fnObj;
+
+ if (!Array.isArray(fnObj)) {
+ return this.parse({
+ xref: xref,
+ isEvalSupported: isEvalSupported,
+ fn: fnObj
+ });
+ }
+
+ var fnArray = [];
+
+ for (var j = 0, jj = fnObj.length; j < jj; j++) {
+ fnArray.push(this.parse({
+ xref: xref,
+ isEvalSupported: isEvalSupported,
+ fn: xref.fetchIfRef(fnObj[j])
+ }));
+ }
+
+ return function (src, srcOffset, dest, destOffset) {
+ for (var i = 0, ii = fnArray.length; i < ii; i++) {
+ fnArray[i](src, srcOffset, dest, destOffset + i);
+ }
+ };
+ },
+ constructSampled: function constructSampled(_ref6) {
+ var xref = _ref6.xref,
+ isEvalSupported = _ref6.isEvalSupported,
+ fn = _ref6.fn,
+ dict = _ref6.dict;
+
+ function toMultiArray(arr) {
+ var inputLength = arr.length;
+ var out = [];
+ var index = 0;
+
+ for (var i = 0; i < inputLength; i += 2) {
+ out[index] = [arr[i], arr[i + 1]];
+ ++index;
+ }
+
+ return out;
+ }
+
+ var domain = toNumberArray(dict.getArray('Domain'));
+ var range = toNumberArray(dict.getArray('Range'));
+
+ if (!domain || !range) {
+ throw new _util.FormatError('No domain or range');
+ }
+
+ var inputSize = domain.length / 2;
+ var outputSize = range.length / 2;
+ domain = toMultiArray(domain);
+ range = toMultiArray(range);
+ var size = toNumberArray(dict.getArray('Size'));
+ var bps = dict.get('BitsPerSample');
+ var order = dict.get('Order') || 1;
+
+ if (order !== 1) {
+ (0, _util.info)('No support for cubic spline interpolation: ' + order);
+ }
+
+ var encode = toNumberArray(dict.getArray('Encode'));
+
+ if (!encode) {
+ encode = [];
+
+ for (var i = 0; i < inputSize; ++i) {
+ encode.push([0, size[i] - 1]);
+ }
+ } else {
+ encode = toMultiArray(encode);
+ }
+
+ var decode = toNumberArray(dict.getArray('Decode'));
+
+ if (!decode) {
+ decode = range;
+ } else {
+ decode = toMultiArray(decode);
+ }
+
+ var samples = this.getSampleArray(size, outputSize, bps, fn);
+ return [CONSTRUCT_SAMPLED, inputSize, domain, encode, decode, samples, size, outputSize, Math.pow(2, bps) - 1, range];
+ },
+ constructSampledFromIR: function constructSampledFromIR(_ref7) {
+ var xref = _ref7.xref,
+ isEvalSupported = _ref7.isEvalSupported,
+ IR = _ref7.IR;
+
+ function interpolate(x, xmin, xmax, ymin, ymax) {
+ return ymin + (x - xmin) * ((ymax - ymin) / (xmax - xmin));
+ }
+
+ return function constructSampledFromIRResult(src, srcOffset, dest, destOffset) {
+ var m = IR[1];
+ var domain = IR[2];
+ var encode = IR[3];
+ var decode = IR[4];
+ var samples = IR[5];
+ var size = IR[6];
+ var n = IR[7];
+ var range = IR[9];
+ var cubeVertices = 1 << m;
+ var cubeN = new Float64Array(cubeVertices);
+ var cubeVertex = new Uint32Array(cubeVertices);
+ var i, j;
+
+ for (j = 0; j < cubeVertices; j++) {
+ cubeN[j] = 1;
+ }
+
+ var k = n,
+ pos = 1;
+
+ for (i = 0; i < m; ++i) {
+ var domain_2i = domain[i][0];
+ var domain_2i_1 = domain[i][1];
+ var xi = Math.min(Math.max(src[srcOffset + i], domain_2i), domain_2i_1);
+ var e = interpolate(xi, domain_2i, domain_2i_1, encode[i][0], encode[i][1]);
+ var size_i = size[i];
+ e = Math.min(Math.max(e, 0), size_i - 1);
+ var e0 = e < size_i - 1 ? Math.floor(e) : e - 1;
+ var n0 = e0 + 1 - e;
+ var n1 = e - e0;
+ var offset0 = e0 * k;
+ var offset1 = offset0 + k;
+
+ for (j = 0; j < cubeVertices; j++) {
+ if (j & pos) {
+ cubeN[j] *= n1;
+ cubeVertex[j] += offset1;
+ } else {
+ cubeN[j] *= n0;
+ cubeVertex[j] += offset0;
+ }
+ }
+
+ k *= size_i;
+ pos <<= 1;
+ }
+
+ for (j = 0; j < n; ++j) {
+ var rj = 0;
+
+ for (i = 0; i < cubeVertices; i++) {
+ rj += samples[cubeVertex[i] + j] * cubeN[i];
+ }
+
+ rj = interpolate(rj, 0, 1, decode[j][0], decode[j][1]);
+ dest[destOffset + j] = Math.min(Math.max(rj, range[j][0]), range[j][1]);
+ }
+ };
+ },
+ constructInterpolated: function constructInterpolated(_ref8) {
+ var xref = _ref8.xref,
+ isEvalSupported = _ref8.isEvalSupported,
+ fn = _ref8.fn,
+ dict = _ref8.dict;
+ var c0 = toNumberArray(dict.getArray('C0')) || [0];
+ var c1 = toNumberArray(dict.getArray('C1')) || [1];
+ var n = dict.get('N');
+ var length = c0.length;
+ var diff = [];
+
+ for (var i = 0; i < length; ++i) {
+ diff.push(c1[i] - c0[i]);
+ }
+
+ return [CONSTRUCT_INTERPOLATED, c0, diff, n];
+ },
+ constructInterpolatedFromIR: function constructInterpolatedFromIR(_ref9) {
+ var xref = _ref9.xref,
+ isEvalSupported = _ref9.isEvalSupported,
+ IR = _ref9.IR;
+ var c0 = IR[1];
+ var diff = IR[2];
+ var n = IR[3];
+ var length = diff.length;
+ return function constructInterpolatedFromIRResult(src, srcOffset, dest, destOffset) {
+ var x = n === 1 ? src[srcOffset] : Math.pow(src[srcOffset], n);
+
+ for (var j = 0; j < length; ++j) {
+ dest[destOffset + j] = c0[j] + x * diff[j];
+ }
+ };
+ },
+ constructStiched: function constructStiched(_ref10) {
+ var xref = _ref10.xref,
+ isEvalSupported = _ref10.isEvalSupported,
+ fn = _ref10.fn,
+ dict = _ref10.dict;
+ var domain = toNumberArray(dict.getArray('Domain'));
+
+ if (!domain) {
+ throw new _util.FormatError('No domain');
+ }
+
+ var inputSize = domain.length / 2;
+
+ if (inputSize !== 1) {
+ throw new _util.FormatError('Bad domain for stiched function');
+ }
+
+ var fnRefs = dict.get('Functions');
+ var fns = [];
+
+ for (var i = 0, ii = fnRefs.length; i < ii; ++i) {
+ fns.push(this.parse({
+ xref: xref,
+ isEvalSupported: isEvalSupported,
+ fn: xref.fetchIfRef(fnRefs[i])
+ }));
+ }
+
+ var bounds = toNumberArray(dict.getArray('Bounds'));
+ var encode = toNumberArray(dict.getArray('Encode'));
+ return [CONSTRUCT_STICHED, domain, bounds, encode, fns];
+ },
+ constructStichedFromIR: function constructStichedFromIR(_ref11) {
+ var xref = _ref11.xref,
+ isEvalSupported = _ref11.isEvalSupported,
+ IR = _ref11.IR;
+ var domain = IR[1];
+ var bounds = IR[2];
+ var encode = IR[3];
+ var fns = IR[4];
+ var tmpBuf = new Float32Array(1);
+ return function constructStichedFromIRResult(src, srcOffset, dest, destOffset) {
+ var clip = function constructStichedFromIRClip(v, min, max) {
+ if (v > max) {
+ v = max;
+ } else if (v < min) {
+ v = min;
+ }
+
+ return v;
+ };
+
+ var v = clip(src[srcOffset], domain[0], domain[1]);
+
+ for (var i = 0, ii = bounds.length; i < ii; ++i) {
+ if (v < bounds[i]) {
+ break;
+ }
+ }
+
+ var dmin = domain[0];
+
+ if (i > 0) {
+ dmin = bounds[i - 1];
+ }
+
+ var dmax = domain[1];
+
+ if (i < bounds.length) {
+ dmax = bounds[i];
+ }
+
+ var rmin = encode[2 * i];
+ var rmax = encode[2 * i + 1];
+ tmpBuf[0] = dmin === dmax ? rmin : rmin + (v - dmin) * (rmax - rmin) / (dmax - dmin);
+ fns[i](tmpBuf, 0, dest, destOffset);
+ };
+ },
+ constructPostScript: function constructPostScript(_ref12) {
+ var xref = _ref12.xref,
+ isEvalSupported = _ref12.isEvalSupported,
+ fn = _ref12.fn,
+ dict = _ref12.dict;
+ var domain = toNumberArray(dict.getArray('Domain'));
+ var range = toNumberArray(dict.getArray('Range'));
+
+ if (!domain) {
+ throw new _util.FormatError('No domain.');
+ }
+
+ if (!range) {
+ throw new _util.FormatError('No range.');
+ }
+
+ var lexer = new _ps_parser.PostScriptLexer(fn);
+ var parser = new _ps_parser.PostScriptParser(lexer);
+ var code = parser.parse();
+ return [CONSTRUCT_POSTSCRIPT, domain, range, code];
+ },
+ constructPostScriptFromIR: function constructPostScriptFromIR(_ref13) {
+ var xref = _ref13.xref,
+ isEvalSupported = _ref13.isEvalSupported,
+ IR = _ref13.IR;
+ var domain = IR[1];
+ var range = IR[2];
+ var code = IR[3];
+
+ if (isEvalSupported && IsEvalSupportedCached.value) {
+ var compiled = new PostScriptCompiler().compile(code, domain, range);
+
+ if (compiled) {
+ return new Function('src', 'srcOffset', 'dest', 'destOffset', compiled);
+ }
+ }
+
+ (0, _util.info)('Unable to compile PS function');
+ var numOutputs = range.length >> 1;
+ var numInputs = domain.length >> 1;
+ var evaluator = new PostScriptEvaluator(code);
+ var cache = Object.create(null);
+ var MAX_CACHE_SIZE = 2048 * 4;
+ var cache_available = MAX_CACHE_SIZE;
+ var tmpBuf = new Float32Array(numInputs);
+ return function constructPostScriptFromIRResult(src, srcOffset, dest, destOffset) {
+ var i, value;
+ var key = '';
+ var input = tmpBuf;
+
+ for (i = 0; i < numInputs; i++) {
+ value = src[srcOffset + i];
+ input[i] = value;
+ key += value + '_';
+ }
+
+ var cachedValue = cache[key];
+
+ if (cachedValue !== undefined) {
+ dest.set(cachedValue, destOffset);
+ return;
+ }
+
+ var output = new Float32Array(numOutputs);
+ var stack = evaluator.execute(input);
+ var stackIndex = stack.length - numOutputs;
+
+ for (i = 0; i < numOutputs; i++) {
+ value = stack[stackIndex + i];
+ var bound = range[i * 2];
+
+ if (value < bound) {
+ value = bound;
+ } else {
+ bound = range[i * 2 + 1];
+
+ if (value > bound) {
+ value = bound;
+ }
+ }
+
+ output[i] = value;
+ }
+
+ if (cache_available > 0) {
+ cache_available--;
+ cache[key] = output;
+ }
+
+ dest.set(output, destOffset);
+ };
+ }
+ };
+}();
+
+function isPDFFunction(v) {
+ var fnDict;
+
+ if (_typeof(v) !== 'object') {
+ return false;
+ } else if ((0, _primitives.isDict)(v)) {
+ fnDict = v;
+ } else if ((0, _primitives.isStream)(v)) {
+ fnDict = v.dict;
+ } else {
+ return false;
+ }
+
+ return fnDict.has('FunctionType');
+}
+
+var PostScriptStack = function PostScriptStackClosure() {
+ var MAX_STACK_SIZE = 100;
+
+ function PostScriptStack(initialStack) {
+ this.stack = !initialStack ? [] : Array.prototype.slice.call(initialStack, 0);
+ }
+
+ PostScriptStack.prototype = {
+ push: function PostScriptStack_push(value) {
+ if (this.stack.length >= MAX_STACK_SIZE) {
+ throw new Error('PostScript function stack overflow.');
+ }
+
+ this.stack.push(value);
+ },
+ pop: function PostScriptStack_pop() {
+ if (this.stack.length <= 0) {
+ throw new Error('PostScript function stack underflow.');
+ }
+
+ return this.stack.pop();
+ },
+ copy: function PostScriptStack_copy(n) {
+ if (this.stack.length + n >= MAX_STACK_SIZE) {
+ throw new Error('PostScript function stack overflow.');
+ }
+
+ var stack = this.stack;
+
+ for (var i = stack.length - n, j = n - 1; j >= 0; j--, i++) {
+ stack.push(stack[i]);
+ }
+ },
+ index: function PostScriptStack_index(n) {
+ this.push(this.stack[this.stack.length - n - 1]);
+ },
+ roll: function PostScriptStack_roll(n, p) {
+ var stack = this.stack;
+ var l = stack.length - n;
+ var r = stack.length - 1,
+ c = l + (p - Math.floor(p / n) * n),
+ i,
+ j,
+ t;
+
+ for (i = l, j = r; i < j; i++, j--) {
+ t = stack[i];
+ stack[i] = stack[j];
+ stack[j] = t;
+ }
+
+ for (i = l, j = c - 1; i < j; i++, j--) {
+ t = stack[i];
+ stack[i] = stack[j];
+ stack[j] = t;
+ }
+
+ for (i = c, j = r; i < j; i++, j--) {
+ t = stack[i];
+ stack[i] = stack[j];
+ stack[j] = t;
+ }
+ }
+ };
+ return PostScriptStack;
+}();
+
+var PostScriptEvaluator = function PostScriptEvaluatorClosure() {
+ function PostScriptEvaluator(operators) {
+ this.operators = operators;
+ }
+
+ PostScriptEvaluator.prototype = {
+ execute: function PostScriptEvaluator_execute(initialStack) {
+ var stack = new PostScriptStack(initialStack);
+ var counter = 0;
+ var operators = this.operators;
+ var length = operators.length;
+ var operator, a, b;
+
+ while (counter < length) {
+ operator = operators[counter++];
+
+ if (typeof operator === 'number') {
+ stack.push(operator);
+ continue;
+ }
+
+ switch (operator) {
+ case 'jz':
+ b = stack.pop();
+ a = stack.pop();
+
+ if (!a) {
+ counter = b;
+ }
+
+ break;
+
+ case 'j':
+ a = stack.pop();
+ counter = a;
+ break;
+
+ case 'abs':
+ a = stack.pop();
+ stack.push(Math.abs(a));
+ break;
+
+ case 'add':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a + b);
+ break;
+
+ case 'and':
+ b = stack.pop();
+ a = stack.pop();
+
+ if ((0, _util.isBool)(a) && (0, _util.isBool)(b)) {
+ stack.push(a && b);
+ } else {
+ stack.push(a & b);
+ }
+
+ break;
+
+ case 'atan':
+ a = stack.pop();
+ stack.push(Math.atan(a));
+ break;
+
+ case 'bitshift':
+ b = stack.pop();
+ a = stack.pop();
+
+ if (a > 0) {
+ stack.push(a << b);
+ } else {
+ stack.push(a >> b);
+ }
+
+ break;
+
+ case 'ceiling':
+ a = stack.pop();
+ stack.push(Math.ceil(a));
+ break;
+
+ case 'copy':
+ a = stack.pop();
+ stack.copy(a);
+ break;
+
+ case 'cos':
+ a = stack.pop();
+ stack.push(Math.cos(a));
+ break;
+
+ case 'cvi':
+ a = stack.pop() | 0;
+ stack.push(a);
+ break;
+
+ case 'cvr':
+ break;
+
+ case 'div':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a / b);
+ break;
+
+ case 'dup':
+ stack.copy(1);
+ break;
+
+ case 'eq':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a === b);
+ break;
+
+ case 'exch':
+ stack.roll(2, 1);
+ break;
+
+ case 'exp':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(Math.pow(a, b));
+ break;
+
+ case 'false':
+ stack.push(false);
+ break;
+
+ case 'floor':
+ a = stack.pop();
+ stack.push(Math.floor(a));
+ break;
+
+ case 'ge':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a >= b);
+ break;
+
+ case 'gt':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a > b);
+ break;
+
+ case 'idiv':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a / b | 0);
+ break;
+
+ case 'index':
+ a = stack.pop();
+ stack.index(a);
+ break;
+
+ case 'le':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a <= b);
+ break;
+
+ case 'ln':
+ a = stack.pop();
+ stack.push(Math.log(a));
+ break;
+
+ case 'log':
+ a = stack.pop();
+ stack.push(Math.log(a) / Math.LN10);
+ break;
+
+ case 'lt':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a < b);
+ break;
+
+ case 'mod':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a % b);
+ break;
+
+ case 'mul':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a * b);
+ break;
+
+ case 'ne':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a !== b);
+ break;
+
+ case 'neg':
+ a = stack.pop();
+ stack.push(-a);
+ break;
+
+ case 'not':
+ a = stack.pop();
+
+ if ((0, _util.isBool)(a)) {
+ stack.push(!a);
+ } else {
+ stack.push(~a);
+ }
+
+ break;
+
+ case 'or':
+ b = stack.pop();
+ a = stack.pop();
+
+ if ((0, _util.isBool)(a) && (0, _util.isBool)(b)) {
+ stack.push(a || b);
+ } else {
+ stack.push(a | b);
+ }
+
+ break;
+
+ case 'pop':
+ stack.pop();
+ break;
+
+ case 'roll':
+ b = stack.pop();
+ a = stack.pop();
+ stack.roll(a, b);
+ break;
+
+ case 'round':
+ a = stack.pop();
+ stack.push(Math.round(a));
+ break;
+
+ case 'sin':
+ a = stack.pop();
+ stack.push(Math.sin(a));
+ break;
+
+ case 'sqrt':
+ a = stack.pop();
+ stack.push(Math.sqrt(a));
+ break;
+
+ case 'sub':
+ b = stack.pop();
+ a = stack.pop();
+ stack.push(a - b);
+ break;
+
+ case 'true':
+ stack.push(true);
+ break;
+
+ case 'truncate':
+ a = stack.pop();
+ a = a < 0 ? Math.ceil(a) : Math.floor(a);
+ stack.push(a);
+ break;
+
+ case 'xor':
+ b = stack.pop();
+ a = stack.pop();
+
+ if ((0, _util.isBool)(a) && (0, _util.isBool)(b)) {
+ stack.push(a !== b);
+ } else {
+ stack.push(a ^ b);
+ }
+
+ break;
+
+ default:
+ throw new _util.FormatError("Unknown operator ".concat(operator));
+ }
+ }
+
+ return stack.stack;
+ }
+ };
+ return PostScriptEvaluator;
+}();
+
+exports.PostScriptEvaluator = PostScriptEvaluator;
+
+var PostScriptCompiler = function PostScriptCompilerClosure() {
+ function AstNode(type) {
+ this.type = type;
+ }
+
+ AstNode.prototype.visit = function (visitor) {
+ (0, _util.unreachable)('abstract method');
+ };
+
+ function AstArgument(index, min, max) {
+ AstNode.call(this, 'args');
+ this.index = index;
+ this.min = min;
+ this.max = max;
+ }
+
+ AstArgument.prototype = Object.create(AstNode.prototype);
+
+ AstArgument.prototype.visit = function (visitor) {
+ visitor.visitArgument(this);
+ };
+
+ function AstLiteral(number) {
+ AstNode.call(this, 'literal');
+ this.number = number;
+ this.min = number;
+ this.max = number;
+ }
+
+ AstLiteral.prototype = Object.create(AstNode.prototype);
+
+ AstLiteral.prototype.visit = function (visitor) {
+ visitor.visitLiteral(this);
+ };
+
+ function AstBinaryOperation(op, arg1, arg2, min, max) {
+ AstNode.call(this, 'binary');
+ this.op = op;
+ this.arg1 = arg1;
+ this.arg2 = arg2;
+ this.min = min;
+ this.max = max;
+ }
+
+ AstBinaryOperation.prototype = Object.create(AstNode.prototype);
+
+ AstBinaryOperation.prototype.visit = function (visitor) {
+ visitor.visitBinaryOperation(this);
+ };
+
+ function AstMin(arg, max) {
+ AstNode.call(this, 'max');
+ this.arg = arg;
+ this.min = arg.min;
+ this.max = max;
+ }
+
+ AstMin.prototype = Object.create(AstNode.prototype);
+
+ AstMin.prototype.visit = function (visitor) {
+ visitor.visitMin(this);
+ };
+
+ function AstVariable(index, min, max) {
+ AstNode.call(this, 'var');
+ this.index = index;
+ this.min = min;
+ this.max = max;
+ }
+
+ AstVariable.prototype = Object.create(AstNode.prototype);
+
+ AstVariable.prototype.visit = function (visitor) {
+ visitor.visitVariable(this);
+ };
+
+ function AstVariableDefinition(variable, arg) {
+ AstNode.call(this, 'definition');
+ this.variable = variable;
+ this.arg = arg;
+ }
+
+ AstVariableDefinition.prototype = Object.create(AstNode.prototype);
+
+ AstVariableDefinition.prototype.visit = function (visitor) {
+ visitor.visitVariableDefinition(this);
+ };
+
+ function ExpressionBuilderVisitor() {
+ this.parts = [];
+ }
+
+ ExpressionBuilderVisitor.prototype = {
+ visitArgument: function visitArgument(arg) {
+ this.parts.push('Math.max(', arg.min, ', Math.min(', arg.max, ', src[srcOffset + ', arg.index, ']))');
+ },
+ visitVariable: function visitVariable(variable) {
+ this.parts.push('v', variable.index);
+ },
+ visitLiteral: function visitLiteral(literal) {
+ this.parts.push(literal.number);
+ },
+ visitBinaryOperation: function visitBinaryOperation(operation) {
+ this.parts.push('(');
+ operation.arg1.visit(this);
+ this.parts.push(' ', operation.op, ' ');
+ operation.arg2.visit(this);
+ this.parts.push(')');
+ },
+ visitVariableDefinition: function visitVariableDefinition(definition) {
+ this.parts.push('var ');
+ definition.variable.visit(this);
+ this.parts.push(' = ');
+ definition.arg.visit(this);
+ this.parts.push(';');
+ },
+ visitMin: function visitMin(max) {
+ this.parts.push('Math.min(');
+ max.arg.visit(this);
+ this.parts.push(', ', max.max, ')');
+ },
+ toString: function toString() {
+ return this.parts.join('');
+ }
+ };
+
+ function buildAddOperation(num1, num2) {
+ if (num2.type === 'literal' && num2.number === 0) {
+ return num1;
+ }
+
+ if (num1.type === 'literal' && num1.number === 0) {
+ return num2;
+ }
+
+ if (num2.type === 'literal' && num1.type === 'literal') {
+ return new AstLiteral(num1.number + num2.number);
+ }
+
+ return new AstBinaryOperation('+', num1, num2, num1.min + num2.min, num1.max + num2.max);
+ }
+
+ function buildMulOperation(num1, num2) {
+ if (num2.type === 'literal') {
+ if (num2.number === 0) {
+ return new AstLiteral(0);
+ } else if (num2.number === 1) {
+ return num1;
+ } else if (num1.type === 'literal') {
+ return new AstLiteral(num1.number * num2.number);
+ }
+ }
+
+ if (num1.type === 'literal') {
+ if (num1.number === 0) {
+ return new AstLiteral(0);
+ } else if (num1.number === 1) {
+ return num2;
+ }
+ }
+
+ var min = Math.min(num1.min * num2.min, num1.min * num2.max, num1.max * num2.min, num1.max * num2.max);
+ var max = Math.max(num1.min * num2.min, num1.min * num2.max, num1.max * num2.min, num1.max * num2.max);
+ return new AstBinaryOperation('*', num1, num2, min, max);
+ }
+
+ function buildSubOperation(num1, num2) {
+ if (num2.type === 'literal') {
+ if (num2.number === 0) {
+ return num1;
+ } else if (num1.type === 'literal') {
+ return new AstLiteral(num1.number - num2.number);
+ }
+ }
+
+ if (num2.type === 'binary' && num2.op === '-' && num1.type === 'literal' && num1.number === 1 && num2.arg1.type === 'literal' && num2.arg1.number === 1) {
+ return num2.arg2;
+ }
+
+ return new AstBinaryOperation('-', num1, num2, num1.min - num2.max, num1.max - num2.min);
+ }
+
+ function buildMinOperation(num1, max) {
+ if (num1.min >= max) {
+ return new AstLiteral(max);
+ } else if (num1.max <= max) {
+ return num1;
+ }
+
+ return new AstMin(num1, max);
+ }
+
+ function PostScriptCompiler() {}
+
+ PostScriptCompiler.prototype = {
+ compile: function PostScriptCompiler_compile(code, domain, range) {
+ var stack = [];
+ var i, ii;
+ var instructions = [];
+ var inputSize = domain.length >> 1,
+ outputSize = range.length >> 1;
+ var lastRegister = 0;
+ var n, j;
+ var num1, num2, ast1, ast2, tmpVar, item;
+
+ for (i = 0; i < inputSize; i++) {
+ stack.push(new AstArgument(i, domain[i * 2], domain[i * 2 + 1]));
+ }
+
+ for (i = 0, ii = code.length; i < ii; i++) {
+ item = code[i];
+
+ if (typeof item === 'number') {
+ stack.push(new AstLiteral(item));
+ continue;
+ }
+
+ switch (item) {
+ case 'add':
+ if (stack.length < 2) {
+ return null;
+ }
+
+ num2 = stack.pop();
+ num1 = stack.pop();
+ stack.push(buildAddOperation(num1, num2));
+ break;
+
+ case 'cvr':
+ if (stack.length < 1) {
+ return null;
+ }
+
+ break;
+
+ case 'mul':
+ if (stack.length < 2) {
+ return null;
+ }
+
+ num2 = stack.pop();
+ num1 = stack.pop();
+ stack.push(buildMulOperation(num1, num2));
+ break;
+
+ case 'sub':
+ if (stack.length < 2) {
+ return null;
+ }
+
+ num2 = stack.pop();
+ num1 = stack.pop();
+ stack.push(buildSubOperation(num1, num2));
+ break;
+
+ case 'exch':
+ if (stack.length < 2) {
+ return null;
+ }
+
+ ast1 = stack.pop();
+ ast2 = stack.pop();
+ stack.push(ast1, ast2);
+ break;
+
+ case 'pop':
+ if (stack.length < 1) {
+ return null;
+ }
+
+ stack.pop();
+ break;
+
+ case 'index':
+ if (stack.length < 1) {
+ return null;
+ }
+
+ num1 = stack.pop();
+
+ if (num1.type !== 'literal') {
+ return null;
+ }
+
+ n = num1.number;
+
+ if (n < 0 || !Number.isInteger(n) || stack.length < n) {
+ return null;
+ }
+
+ ast1 = stack[stack.length - n - 1];
+
+ if (ast1.type === 'literal' || ast1.type === 'var') {
+ stack.push(ast1);
+ break;
+ }
+
+ tmpVar = new AstVariable(lastRegister++, ast1.min, ast1.max);
+ stack[stack.length - n - 1] = tmpVar;
+ stack.push(tmpVar);
+ instructions.push(new AstVariableDefinition(tmpVar, ast1));
+ break;
+
+ case 'dup':
+ if (stack.length < 1) {
+ return null;
+ }
+
+ if (typeof code[i + 1] === 'number' && code[i + 2] === 'gt' && code[i + 3] === i + 7 && code[i + 4] === 'jz' && code[i + 5] === 'pop' && code[i + 6] === code[i + 1]) {
+ num1 = stack.pop();
+ stack.push(buildMinOperation(num1, code[i + 1]));
+ i += 6;
+ break;
+ }
+
+ ast1 = stack[stack.length - 1];
+
+ if (ast1.type === 'literal' || ast1.type === 'var') {
+ stack.push(ast1);
+ break;
+ }
+
+ tmpVar = new AstVariable(lastRegister++, ast1.min, ast1.max);
+ stack[stack.length - 1] = tmpVar;
+ stack.push(tmpVar);
+ instructions.push(new AstVariableDefinition(tmpVar, ast1));
+ break;
+
+ case 'roll':
+ if (stack.length < 2) {
+ return null;
+ }
+
+ num2 = stack.pop();
+ num1 = stack.pop();
+
+ if (num2.type !== 'literal' || num1.type !== 'literal') {
+ return null;
+ }
+
+ j = num2.number;
+ n = num1.number;
+
+ if (n <= 0 || !Number.isInteger(n) || !Number.isInteger(j) || stack.length < n) {
+ return null;
+ }
+
+ j = (j % n + n) % n;
+
+ if (j === 0) {
+ break;
+ }
+
+ Array.prototype.push.apply(stack, stack.splice(stack.length - n, n - j));
+ break;
+
+ default:
+ return null;
+ }
+ }
+
+ if (stack.length !== outputSize) {
+ return null;
+ }
+
+ var result = [];
+ instructions.forEach(function (instruction) {
+ var statementBuilder = new ExpressionBuilderVisitor();
+ instruction.visit(statementBuilder);
+ result.push(statementBuilder.toString());
+ });
+ stack.forEach(function (expr, i) {
+ var statementBuilder = new ExpressionBuilderVisitor();
+ expr.visit(statementBuilder);
+ var min = range[i * 2],
+ max = range[i * 2 + 1];
+ var out = [statementBuilder.toString()];
+
+ if (min > expr.min) {
+ out.unshift('Math.max(', min, ', ');
+ out.push(')');
+ }
+
+ if (max < expr.max) {
+ out.unshift('Math.min(', max, ', ');
+ out.push(')');
+ }
+
+ out.unshift('dest[destOffset + ', i, '] = ');
+ out.push(';');
+ result.push(out.join(''));
+ });
+ return result.join('\n');
+ }
+ };
+ return PostScriptCompiler;
+}();
+
+exports.PostScriptCompiler = PostScriptCompiler;
+
+/***/ }),
+/* 187 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.PostScriptParser = exports.PostScriptLexer = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _primitives = __w_pdfjs_require__(151);
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var PostScriptParser =
+/*#__PURE__*/
+function () {
+ function PostScriptParser(lexer) {
+ _classCallCheck(this, PostScriptParser);
+
+ this.lexer = lexer;
+ this.operators = [];
+ this.token = null;
+ this.prev = null;
+ }
+
+ _createClass(PostScriptParser, [{
+ key: "nextToken",
+ value: function nextToken() {
+ this.prev = this.token;
+ this.token = this.lexer.getToken();
+ }
+ }, {
+ key: "accept",
+ value: function accept(type) {
+ if (this.token.type === type) {
+ this.nextToken();
+ return true;
+ }
+
+ return false;
+ }
+ }, {
+ key: "expect",
+ value: function expect(type) {
+ if (this.accept(type)) {
+ return true;
+ }
+
+ throw new _util.FormatError("Unexpected symbol: found ".concat(this.token.type, " expected ").concat(type, "."));
+ }
+ }, {
+ key: "parse",
+ value: function parse() {
+ this.nextToken();
+ this.expect(PostScriptTokenTypes.LBRACE);
+ this.parseBlock();
+ this.expect(PostScriptTokenTypes.RBRACE);
+ return this.operators;
+ }
+ }, {
+ key: "parseBlock",
+ value: function parseBlock() {
+ while (true) {
+ if (this.accept(PostScriptTokenTypes.NUMBER)) {
+ this.operators.push(this.prev.value);
+ } else if (this.accept(PostScriptTokenTypes.OPERATOR)) {
+ this.operators.push(this.prev.value);
+ } else if (this.accept(PostScriptTokenTypes.LBRACE)) {
+ this.parseCondition();
+ } else {
+ return;
+ }
+ }
+ }
+ }, {
+ key: "parseCondition",
+ value: function parseCondition() {
+ var conditionLocation = this.operators.length;
+ this.operators.push(null, null);
+ this.parseBlock();
+ this.expect(PostScriptTokenTypes.RBRACE);
+
+ if (this.accept(PostScriptTokenTypes.IF)) {
+ this.operators[conditionLocation] = this.operators.length;
+ this.operators[conditionLocation + 1] = 'jz';
+ } else if (this.accept(PostScriptTokenTypes.LBRACE)) {
+ var jumpLocation = this.operators.length;
+ this.operators.push(null, null);
+ var endOfTrue = this.operators.length;
+ this.parseBlock();
+ this.expect(PostScriptTokenTypes.RBRACE);
+ this.expect(PostScriptTokenTypes.IFELSE);
+ this.operators[jumpLocation] = this.operators.length;
+ this.operators[jumpLocation + 1] = 'j';
+ this.operators[conditionLocation] = endOfTrue;
+ this.operators[conditionLocation + 1] = 'jz';
+ } else {
+ throw new _util.FormatError('PS Function: error parsing conditional.');
+ }
+ }
+ }]);
+
+ return PostScriptParser;
+}();
+
+exports.PostScriptParser = PostScriptParser;
+var PostScriptTokenTypes = {
+ LBRACE: 0,
+ RBRACE: 1,
+ NUMBER: 2,
+ OPERATOR: 3,
+ IF: 4,
+ IFELSE: 5
+};
+
+var PostScriptToken = function PostScriptTokenClosure() {
+ var opCache = Object.create(null);
+
+ var PostScriptToken =
+ /*#__PURE__*/
+ function () {
+ function PostScriptToken(type, value) {
+ _classCallCheck(this, PostScriptToken);
+
+ this.type = type;
+ this.value = value;
+ }
+
+ _createClass(PostScriptToken, null, [{
+ key: "getOperator",
+ value: function getOperator(op) {
+ var opValue = opCache[op];
+
+ if (opValue) {
+ return opValue;
+ }
+
+ return opCache[op] = new PostScriptToken(PostScriptTokenTypes.OPERATOR, op);
+ }
+ }, {
+ key: "LBRACE",
+ get: function get() {
+ return (0, _util.shadow)(this, 'LBRACE', new PostScriptToken(PostScriptTokenTypes.LBRACE, '{'));
+ }
+ }, {
+ key: "RBRACE",
+ get: function get() {
+ return (0, _util.shadow)(this, 'RBRACE', new PostScriptToken(PostScriptTokenTypes.RBRACE, '}'));
+ }
+ }, {
+ key: "IF",
+ get: function get() {
+ return (0, _util.shadow)(this, 'IF', new PostScriptToken(PostScriptTokenTypes.IF, 'IF'));
+ }
+ }, {
+ key: "IFELSE",
+ get: function get() {
+ return (0, _util.shadow)(this, 'IFELSE', new PostScriptToken(PostScriptTokenTypes.IFELSE, 'IFELSE'));
+ }
+ }]);
+
+ return PostScriptToken;
+ }();
+
+ return PostScriptToken;
+}();
+
+var PostScriptLexer =
+/*#__PURE__*/
+function () {
+ function PostScriptLexer(stream) {
+ _classCallCheck(this, PostScriptLexer);
+
+ this.stream = stream;
+ this.nextChar();
+ this.strBuf = [];
+ }
+
+ _createClass(PostScriptLexer, [{
+ key: "nextChar",
+ value: function nextChar() {
+ return this.currentChar = this.stream.getByte();
+ }
+ }, {
+ key: "getToken",
+ value: function getToken() {
+ var comment = false;
+ var ch = this.currentChar;
+
+ while (true) {
+ if (ch < 0) {
+ return _primitives.EOF;
+ }
+
+ if (comment) {
+ if (ch === 0x0A || ch === 0x0D) {
+ comment = false;
+ }
+ } else if (ch === 0x25) {
+ comment = true;
+ } else if (!(0, _util.isSpace)(ch)) {
+ break;
+ }
+
+ ch = this.nextChar();
+ }
+
+ switch (ch | 0) {
+ case 0x30:
+ case 0x31:
+ case 0x32:
+ case 0x33:
+ case 0x34:
+ case 0x35:
+ case 0x36:
+ case 0x37:
+ case 0x38:
+ case 0x39:
+ case 0x2B:
+ case 0x2D:
+ case 0x2E:
+ return new PostScriptToken(PostScriptTokenTypes.NUMBER, this.getNumber());
+
+ case 0x7B:
+ this.nextChar();
+ return PostScriptToken.LBRACE;
+
+ case 0x7D:
+ this.nextChar();
+ return PostScriptToken.RBRACE;
+ }
+
+ var strBuf = this.strBuf;
+ strBuf.length = 0;
+ strBuf[0] = String.fromCharCode(ch);
+
+ while ((ch = this.nextChar()) >= 0 && (ch >= 0x41 && ch <= 0x5A || ch >= 0x61 && ch <= 0x7A)) {
+ strBuf.push(String.fromCharCode(ch));
+ }
+
+ var str = strBuf.join('');
+
+ switch (str.toLowerCase()) {
+ case 'if':
+ return PostScriptToken.IF;
+
+ case 'ifelse':
+ return PostScriptToken.IFELSE;
+
+ default:
+ return PostScriptToken.getOperator(str);
+ }
+ }
+ }, {
+ key: "getNumber",
+ value: function getNumber() {
+ var ch = this.currentChar;
+ var strBuf = this.strBuf;
+ strBuf.length = 0;
+ strBuf[0] = String.fromCharCode(ch);
+
+ while ((ch = this.nextChar()) >= 0) {
+ if (ch >= 0x30 && ch <= 0x39 || ch === 0x2D || ch === 0x2E) {
+ strBuf.push(String.fromCharCode(ch));
+ } else {
+ break;
+ }
+ }
+
+ var value = parseFloat(strBuf.join(''));
+
+ if (isNaN(value)) {
+ throw new _util.FormatError("Invalid floating point number: ".concat(value));
+ }
+
+ return value;
+ }
+ }]);
+
+ return PostScriptLexer;
+}();
+
+exports.PostScriptLexer = PostScriptLexer;
+
+/***/ }),
+/* 188 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.MurmurHash3_64 = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var SEED = 0xc3d2e1f0;
+var MASK_HIGH = 0xffff0000;
+var MASK_LOW = 0xffff;
+
+var MurmurHash3_64 =
+/*#__PURE__*/
+function () {
+ function MurmurHash3_64(seed) {
+ _classCallCheck(this, MurmurHash3_64);
+
+ this.h1 = seed ? seed & 0xffffffff : SEED;
+ this.h2 = seed ? seed & 0xffffffff : SEED;
+ }
+
+ _createClass(MurmurHash3_64, [{
+ key: "update",
+ value: function update(input) {
+ var data, length;
+
+ if ((0, _util.isString)(input)) {
+ data = new Uint8Array(input.length * 2);
+ length = 0;
+
+ for (var i = 0, ii = input.length; i < ii; i++) {
+ var code = input.charCodeAt(i);
+
+ if (code <= 0xff) {
+ data[length++] = code;
+ } else {
+ data[length++] = code >>> 8;
+ data[length++] = code & 0xff;
+ }
+ }
+ } else if ((0, _util.isArrayBuffer)(input)) {
+ data = input;
+ length = data.byteLength;
+ } else {
+ throw new Error('Wrong data format in MurmurHash3_64_update. ' + 'Input must be a string or array.');
+ }
+
+ var blockCounts = length >> 2;
+ var tailLength = length - blockCounts * 4;
+ var dataUint32 = new Uint32Array(data.buffer, 0, blockCounts);
+ var k1 = 0,
+ k2 = 0;
+ var h1 = this.h1,
+ h2 = this.h2;
+ var C1 = 0xcc9e2d51,
+ C2 = 0x1b873593;
+ var C1_LOW = C1 & MASK_LOW,
+ C2_LOW = C2 & MASK_LOW;
+
+ for (var _i = 0; _i < blockCounts; _i++) {
+ if (_i & 1) {
+ k1 = dataUint32[_i];
+ k1 = k1 * C1 & MASK_HIGH | k1 * C1_LOW & MASK_LOW;
+ k1 = k1 << 15 | k1 >>> 17;
+ k1 = k1 * C2 & MASK_HIGH | k1 * C2_LOW & MASK_LOW;
+ h1 ^= k1;
+ h1 = h1 << 13 | h1 >>> 19;
+ h1 = h1 * 5 + 0xe6546b64;
+ } else {
+ k2 = dataUint32[_i];
+ k2 = k2 * C1 & MASK_HIGH | k2 * C1_LOW & MASK_LOW;
+ k2 = k2 << 15 | k2 >>> 17;
+ k2 = k2 * C2 & MASK_HIGH | k2 * C2_LOW & MASK_LOW;
+ h2 ^= k2;
+ h2 = h2 << 13 | h2 >>> 19;
+ h2 = h2 * 5 + 0xe6546b64;
+ }
+ }
+
+ k1 = 0;
+
+ switch (tailLength) {
+ case 3:
+ k1 ^= data[blockCounts * 4 + 2] << 16;
+
+ case 2:
+ k1 ^= data[blockCounts * 4 + 1] << 8;
+
+ case 1:
+ k1 ^= data[blockCounts * 4];
+ k1 = k1 * C1 & MASK_HIGH | k1 * C1_LOW & MASK_LOW;
+ k1 = k1 << 15 | k1 >>> 17;
+ k1 = k1 * C2 & MASK_HIGH | k1 * C2_LOW & MASK_LOW;
+
+ if (blockCounts & 1) {
+ h1 ^= k1;
+ } else {
+ h2 ^= k1;
+ }
+
+ }
+
+ this.h1 = h1;
+ this.h2 = h2;
+ }
+ }, {
+ key: "hexdigest",
+ value: function hexdigest() {
+ var h1 = this.h1,
+ h2 = this.h2;
+ h1 ^= h2 >>> 1;
+ h1 = h1 * 0xed558ccd & MASK_HIGH | h1 * 0x8ccd & MASK_LOW;
+ h2 = h2 * 0xff51afd7 & MASK_HIGH | ((h2 << 16 | h1 >>> 16) * 0xafd7ed55 & MASK_HIGH) >>> 16;
+ h1 ^= h2 >>> 1;
+ h1 = h1 * 0x1a85ec53 & MASK_HIGH | h1 * 0xec53 & MASK_LOW;
+ h2 = h2 * 0xc4ceb9fe & MASK_HIGH | ((h2 << 16 | h1 >>> 16) * 0xb9fe1a85 & MASK_HIGH) >>> 16;
+ h1 ^= h2 >>> 1;
+ var hex1 = (h1 >>> 0).toString(16),
+ hex2 = (h2 >>> 0).toString(16);
+ return hex1.padStart(8, '0') + hex2.padStart(8, '0');
+ }
+ }]);
+
+ return MurmurHash3_64;
+}();
+
+exports.MurmurHash3_64 = MurmurHash3_64;
+
+/***/ }),
+/* 189 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.NativeImageDecoder = void 0;
+
+var _colorspace = __w_pdfjs_require__(169);
+
+var _jpeg_stream = __w_pdfjs_require__(164);
+
+var _stream = __w_pdfjs_require__(158);
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var NativeImageDecoder =
+/*#__PURE__*/
+function () {
+ function NativeImageDecoder(_ref) {
+ var xref = _ref.xref,
+ resources = _ref.resources,
+ handler = _ref.handler,
+ _ref$forceDataSchema = _ref.forceDataSchema,
+ forceDataSchema = _ref$forceDataSchema === void 0 ? false : _ref$forceDataSchema,
+ pdfFunctionFactory = _ref.pdfFunctionFactory;
+
+ _classCallCheck(this, NativeImageDecoder);
+
+ this.xref = xref;
+ this.resources = resources;
+ this.handler = handler;
+ this.forceDataSchema = forceDataSchema;
+ this.pdfFunctionFactory = pdfFunctionFactory;
+ }
+
+ _createClass(NativeImageDecoder, [{
+ key: "canDecode",
+ value: function canDecode(image) {
+ return image instanceof _jpeg_stream.JpegStream && NativeImageDecoder.isDecodable(image, this.xref, this.resources, this.pdfFunctionFactory);
+ }
+ }, {
+ key: "decode",
+ value: function decode(image) {
+ var dict = image.dict;
+ var colorSpace = dict.get('ColorSpace', 'CS');
+ colorSpace = _colorspace.ColorSpace.parse(colorSpace, this.xref, this.resources, this.pdfFunctionFactory);
+ return this.handler.sendWithPromise('JpegDecode', [image.getIR(this.forceDataSchema), colorSpace.numComps]).then(function (_ref2) {
+ var data = _ref2.data,
+ width = _ref2.width,
+ height = _ref2.height;
+ return new _stream.Stream(data, 0, data.length, dict);
+ });
+ }
+ }], [{
+ key: "isSupported",
+ value: function isSupported(image, xref, res, pdfFunctionFactory) {
+ var dict = image.dict;
+
+ if (dict.has('DecodeParms') || dict.has('DP')) {
+ return false;
+ }
+
+ var cs = _colorspace.ColorSpace.parse(dict.get('ColorSpace', 'CS'), xref, res, pdfFunctionFactory);
+
+ return (cs.name === 'DeviceGray' || cs.name === 'DeviceRGB') && cs.isDefaultDecode(dict.getArray('Decode', 'D'));
+ }
+ }, {
+ key: "isDecodable",
+ value: function isDecodable(image, xref, res, pdfFunctionFactory) {
+ var dict = image.dict;
+
+ if (dict.has('DecodeParms') || dict.has('DP')) {
+ return false;
+ }
+
+ var cs = _colorspace.ColorSpace.parse(dict.get('ColorSpace', 'CS'), xref, res, pdfFunctionFactory);
+
+ var bpc = dict.get('BitsPerComponent', 'BPC') || 1;
+ return (cs.numComps === 1 || cs.numComps === 3) && cs.isDefaultDecode(dict.getArray('Decode', 'D'), bpc);
+ }
+ }]);
+
+ return NativeImageDecoder;
+}();
+
+exports.NativeImageDecoder = NativeImageDecoder;
+
+/***/ }),
+/* 190 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.PDFImage = void 0;
+
+var _util = __w_pdfjs_require__(5);
+
+var _primitives = __w_pdfjs_require__(151);
+
+var _colorspace = __w_pdfjs_require__(169);
+
+var _stream = __w_pdfjs_require__(158);
+
+var _jpeg_stream = __w_pdfjs_require__(164);
+
+var _jpx = __w_pdfjs_require__(167);
+
+function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
+
+function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
+
+function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
+
+function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
+
+var PDFImage = function PDFImageClosure() {
+ function handleImageData(image, nativeDecoder) {
+ if (nativeDecoder && nativeDecoder.canDecode(image)) {
+ return nativeDecoder.decode(image)["catch"](function (reason) {
+ (0, _util.warn)('Native image decoding failed -- trying to recover: ' + (reason && reason.message));
+ return image;
+ });
+ }
+
+ return Promise.resolve(image);
+ }
+
+ function decodeAndClamp(value, addend, coefficient, max) {
+ value = addend + value * coefficient;
+ return value < 0 ? 0 : value > max ? max : value;
+ }
+
+ function resizeImageMask(src, bpc, w1, h1, w2, h2) {
+ var length = w2 * h2;
+ var dest = bpc <= 8 ? new Uint8Array(length) : bpc <= 16 ? new Uint16Array(length) : new Uint32Array(length);
+ var xRatio = w1 / w2;
+ var yRatio = h1 / h2;
+ var i,
+ j,
+ py,
+ newIndex = 0,
+ oldIndex;
+ var xScaled = new Uint16Array(w2);
+ var w1Scanline = w1;
+
+ for (i = 0; i < w2; i++) {
+ xScaled[i] = Math.floor(i * xRatio);
+ }
+
+ for (i = 0; i < h2; i++) {
+ py = Math.floor(i * yRatio) * w1Scanline;
+
+ for (j = 0; j < w2; j++) {
+ oldIndex = py + xScaled[j];
+ dest[newIndex++] = src[oldIndex];
+ }
+ }
+
+ return dest;
+ }
+
+ function PDFImage(_ref) {
+ var xref = _ref.xref,
+ res = _ref.res,
+ image = _ref.image,
+ _ref$isInline = _ref.isInline,
+ isInline = _ref$isInline === void 0 ? false : _ref$isInline,
+ _ref$smask = _ref.smask,
+ smask = _ref$smask === void 0 ? null : _ref$smask,
+ _ref$mask = _ref.mask,
+ mask = _ref$mask === void 0 ? null : _ref$mask,
+ _ref$isMask = _ref.isMask,
+ isMask = _ref$isMask === void 0 ? false : _ref$isMask,
+ pdfFunctionFactory = _ref.pdfFunctionFactory;
+ this.image = image;
+ var dict = image.dict;
+ var filter = dict.get('Filter');
+
+ if ((0, _primitives.isName)(filter)) {
+ switch (filter.name) {
+ case 'JPXDecode':
+ var jpxImage = new _jpx.JpxImage();
+ jpxImage.parseImageProperties(image.stream);
+ image.stream.reset();
+ image.width = jpxImage.width;
+ image.height = jpxImage.height;
+ image.bitsPerComponent = jpxImage.bitsPerComponent;
+ image.numComps = jpxImage.componentsCount;
+ break;
+
+ case 'JBIG2Decode':
+ image.bitsPerComponent = 1;
+ image.numComps = 1;
+ break;
+ }
+ }
+
+ var width = dict.get('Width', 'W');
+ var height = dict.get('Height', 'H');
+
+ if (Number.isInteger(image.width) && image.width > 0 && Number.isInteger(image.height) && image.height > 0 && (image.width !== width || image.height !== height)) {
+ (0, _util.warn)('PDFImage - using the Width/Height of the image data, ' + 'rather than the image dictionary.');
+ width = image.width;
+ height = image.height;
+ }
+
+ if (width < 1 || height < 1) {
+ throw new _util.FormatError("Invalid image width: ".concat(width, " or ") + "height: ".concat(height));
+ }
+
+ this.width = width;
+ this.height = height;
+ this.interpolate = dict.get('Interpolate', 'I') || false;
+ this.imageMask = dict.get('ImageMask', 'IM') || false;
+ this.matte = dict.get('Matte') || false;
+ var bitsPerComponent = image.bitsPerComponent;
+
+ if (!bitsPerComponent) {
+ bitsPerComponent = dict.get('BitsPerComponent', 'BPC');
+
+ if (!bitsPerComponent) {
+ if (this.imageMask) {
+ bitsPerComponent = 1;
+ } else {
+ throw new _util.FormatError("Bits per component missing in image: ".concat(this.imageMask));
+ }
+ }
+ }
+
+ this.bpc = bitsPerComponent;
+
+ if (!this.imageMask) {
+ var colorSpace = dict.get('ColorSpace', 'CS');
+
+ if (!colorSpace) {
+ (0, _util.info)('JPX images (which do not require color spaces)');
+
+ switch (image.numComps) {
+ case 1:
+ colorSpace = _primitives.Name.get('DeviceGray');
+ break;
+
+ case 3:
+ colorSpace = _primitives.Name.get('DeviceRGB');
+ break;
+
+ case 4:
+ colorSpace = _primitives.Name.get('DeviceCMYK');
+ break;
+
+ default:
+ throw new Error("JPX images with ".concat(image.numComps, " ") + 'color components not supported.');
+ }
+ }
+
+ var resources = isInline ? res : null;
+ this.colorSpace = _colorspace.ColorSpace.parse(colorSpace, xref, resources, pdfFunctionFactory);
+ this.numComps = this.colorSpace.numComps;
+ }
+
+ this.decode = dict.getArray('Decode', 'D');
+ this.needsDecode = false;
+
+ if (this.decode && (this.colorSpace && !this.colorSpace.isDefaultDecode(this.decode, bitsPerComponent) || isMask && !_colorspace.ColorSpace.isDefaultDecode(this.decode, 1))) {
+ this.needsDecode = true;
+ var max = (1 << bitsPerComponent) - 1;
+ this.decodeCoefficients = [];
+ this.decodeAddends = [];
+ var isIndexed = this.colorSpace && this.colorSpace.name === 'Indexed';
+
+ for (var i = 0, j = 0; i < this.decode.length; i += 2, ++j) {
+ var dmin = this.decode[i];
+ var dmax = this.decode[i + 1];
+ this.decodeCoefficients[j] = isIndexed ? (dmax - dmin) / max : dmax - dmin;
+ this.decodeAddends[j] = isIndexed ? dmin : max * dmin;
+ }
+ }
+
+ if (smask) {
+ this.smask = new PDFImage({
+ xref: xref,
+ res: res,
+ image: smask,
+ isInline: isInline,
+ pdfFunctionFactory: pdfFunctionFactory
+ });
+ } else if (mask) {
+ if ((0, _primitives.isStream)(mask)) {
+ var maskDict = mask.dict,
+ imageMask = maskDict.get('ImageMask', 'IM');
+
+ if (!imageMask) {
+ (0, _util.warn)('Ignoring /Mask in image without /ImageMask.');
+ } else {
+ this.mask = new PDFImage({
+ xref: xref,
+ res: res,
+ image: mask,
+ isInline: isInline,
+ isMask: true,
+ pdfFunctionFactory: pdfFunctionFactory
+ });
+ }
+ } else {
+ this.mask = mask;
+ }
+ }
+ }
+
+ PDFImage.buildImage = function (_ref2) {
+ var handler = _ref2.handler,
+ xref = _ref2.xref,
+ res = _ref2.res,
+ image = _ref2.image,
+ _ref2$isInline = _ref2.isInline,
+ isInline = _ref2$isInline === void 0 ? false : _ref2$isInline,
+ _ref2$nativeDecoder = _ref2.nativeDecoder,
+ nativeDecoder = _ref2$nativeDecoder === void 0 ? null : _ref2$nativeDecoder,
+ pdfFunctionFactory = _ref2.pdfFunctionFactory;
+ var imagePromise = handleImageData(image, nativeDecoder);
+ var smaskPromise;
+ var maskPromise;
+ var smask = image.dict.get('SMask');
+ var mask = image.dict.get('Mask');
+
+ if (smask) {
+ smaskPromise = handleImageData(smask, nativeDecoder);
+ maskPromise = Promise.resolve(null);
+ } else {
+ smaskPromise = Promise.resolve(null);
+
+ if (mask) {
+ if ((0, _primitives.isStream)(mask)) {
+ maskPromise = handleImageData(mask, nativeDecoder);
+ } else if (Array.isArray(mask)) {
+ maskPromise = Promise.resolve(mask);
+ } else {
+ (0, _util.warn)('Unsupported mask format.');
+ maskPromise = Promise.resolve(null);
+ }
+ } else {
+ maskPromise = Promise.resolve(null);
+ }
+ }
+
+ return Promise.all([imagePromise, smaskPromise, maskPromise]).then(function (_ref3) {
+ var _ref4 = _slicedToArray(_ref3, 3),
+ imageData = _ref4[0],
+ smaskData = _ref4[1],
+ maskData = _ref4[2];
+
+ return new PDFImage({
+ xref: xref,
+ res: res,
+ image: imageData,
+ isInline: isInline,
+ smask: smaskData,
+ mask: maskData,
+ pdfFunctionFactory: pdfFunctionFactory
+ });
+ });
+ };
+
+ PDFImage.createMask = function (_ref5) {
+ var imgArray = _ref5.imgArray,
+ width = _ref5.width,
+ height = _ref5.height,
+ imageIsFromDecodeStream = _ref5.imageIsFromDecodeStream,
+ inverseDecode = _ref5.inverseDecode;
+ var computedLength = (width + 7 >> 3) * height;
+ var actualLength = imgArray.byteLength;
+ var haveFullData = computedLength === actualLength;
+ var data, i;
+
+ if (imageIsFromDecodeStream && (!inverseDecode || haveFullData)) {
+ data = imgArray;
+ } else if (!inverseDecode) {
+ data = new Uint8ClampedArray(actualLength);
+ data.set(imgArray);
+ } else {
+ data = new Uint8ClampedArray(computedLength);
+ data.set(imgArray);
+
+ for (i = actualLength; i < computedLength; i++) {
+ data[i] = 0xff;
+ }
+ }
+
+ if (inverseDecode) {
+ for (i = 0; i < actualLength; i++) {
+ data[i] ^= 0xFF;
+ }
+ }
+
+ return {
+ data: data,
+ width: width,
+ height: height
+ };
+ };
+
+ PDFImage.prototype = {
+ get drawWidth() {
+ return Math.max(this.width, this.smask && this.smask.width || 0, this.mask && this.mask.width || 0);
+ },
+
+ get drawHeight() {
+ return Math.max(this.height, this.smask && this.smask.height || 0, this.mask && this.mask.height || 0);
+ },
+
+ decodeBuffer: function decodeBuffer(buffer) {
+ var bpc = this.bpc;
+ var numComps = this.numComps;
+ var decodeAddends = this.decodeAddends;
+ var decodeCoefficients = this.decodeCoefficients;
+ var max = (1 << bpc) - 1;
+ var i, ii;
+
+ if (bpc === 1) {
+ for (i = 0, ii = buffer.length; i < ii; i++) {
+ buffer[i] = +!buffer[i];
+ }
+
+ return;
+ }
+
+ var index = 0;
+
+ for (i = 0, ii = this.width * this.height; i < ii; i++) {
+ for (var j = 0; j < numComps; j++) {
+ buffer[index] = decodeAndClamp(buffer[index], decodeAddends[j], decodeCoefficients[j], max);
+ index++;
+ }
+ }
+ },
+ getComponents: function getComponents(buffer) {
+ var bpc = this.bpc;
+
+ if (bpc === 8) {
+ return buffer;
+ }
+
+ var width = this.width;
+ var height = this.height;
+ var numComps = this.numComps;
+ var length = width * height * numComps;
+ var bufferPos = 0;
+ var output = bpc <= 8 ? new Uint8Array(length) : bpc <= 16 ? new Uint16Array(length) : new Uint32Array(length);
+ var rowComps = width * numComps;
+ var max = (1 << bpc) - 1;
+ var i = 0,
+ ii,
+ buf;
+
+ if (bpc === 1) {
+ var mask, loop1End, loop2End;
+
+ for (var j = 0; j < height; j++) {
+ loop1End = i + (rowComps & ~7);
+ loop2End = i + rowComps;
+
+ while (i < loop1End) {
+ buf = buffer[bufferPos++];
+ output[i] = buf >> 7 & 1;
+ output[i + 1] = buf >> 6 & 1;
+ output[i + 2] = buf >> 5 & 1;
+ output[i + 3] = buf >> 4 & 1;
+ output[i + 4] = buf >> 3 & 1;
+ output[i + 5] = buf >> 2 & 1;
+ output[i + 6] = buf >> 1 & 1;
+ output[i + 7] = buf & 1;
+ i += 8;
+ }
+
+ if (i < loop2End) {
+ buf = buffer[bufferPos++];
+ mask = 128;
+
+ while (i < loop2End) {
+ output[i++] = +!!(buf & mask);
+ mask >>= 1;
+ }
+ }
+ }
+ } else {
+ var bits = 0;
+ buf = 0;
+
+ for (i = 0, ii = length; i < ii; ++i) {
+ if (i % rowComps === 0) {
+ buf = 0;
+ bits = 0;
+ }
+
+ while (bits < bpc) {
+ buf = buf << 8 | buffer[bufferPos++];
+ bits += 8;
+ }
+
+ var remainingBits = bits - bpc;
+ var value = buf >> remainingBits;
+ output[i] = value < 0 ? 0 : value > max ? max : value;
+ buf = buf & (1 << remainingBits) - 1;
+ bits = remainingBits;
+ }
+ }
+
+ return output;
+ },
+ fillOpacity: function fillOpacity(rgbaBuf, width, height, actualHeight, image) {
+ var smask = this.smask;
+ var mask = this.mask;
+ var alphaBuf, sw, sh, i, ii, j;
+
+ if (smask) {
+ sw = smask.width;
+ sh = smask.height;
+ alphaBuf = new Uint8ClampedArray(sw * sh);
+ smask.fillGrayBuffer(alphaBuf);
+
+ if (sw !== width || sh !== height) {
+ alphaBuf = resizeImageMask(alphaBuf, smask.bpc, sw, sh, width, height);
+ }
+ } else if (mask) {
+ if (mask instanceof PDFImage) {
+ sw = mask.width;
+ sh = mask.height;
+ alphaBuf = new Uint8ClampedArray(sw * sh);
+ mask.numComps = 1;
+ mask.fillGrayBuffer(alphaBuf);
+
+ for (i = 0, ii = sw * sh; i < ii; ++i) {
+ alphaBuf[i] = 255 - alphaBuf[i];
+ }
+
+ if (sw !== width || sh !== height) {
+ alphaBuf = resizeImageMask(alphaBuf, mask.bpc, sw, sh, width, height);
+ }
+ } else if (Array.isArray(mask)) {
+ alphaBuf = new Uint8ClampedArray(width * height);
+ var numComps = this.numComps;
+
+ for (i = 0, ii = width * height; i < ii; ++i) {
+ var opacity = 0;
+ var imageOffset = i * numComps;
+
+ for (j = 0; j < numComps; ++j) {
+ var color = image[imageOffset + j];
+ var maskOffset = j * 2;
+
+ if (color < mask[maskOffset] || color > mask[maskOffset + 1]) {
+ opacity = 255;
+ break;
+ }
+ }
+
+ alphaBuf[i] = opacity;
+ }
+ } else {
+ throw new _util.FormatError('Unknown mask format.');
+ }
+ }
+
+ if (alphaBuf) {
+ for (i = 0, j = 3, ii = width * actualHeight; i < ii; ++i, j += 4) {
+ rgbaBuf[j] = alphaBuf[i];
+ }
+ } else {
+ for (i = 0, j = 3, ii = width * actualHeight; i < ii; ++i, j += 4) {
+ rgbaBuf[j] = 255;
+ }
+ }
+ },
+ undoPreblend: function undoPreblend(buffer, width, height) {
+ var matte = this.smask && this.smask.matte;
+
+ if (!matte) {
+ return;
+ }
+
+ var matteRgb = this.colorSpace.getRgb(matte, 0);
+ var matteR = matteRgb[0];
+ var matteG = matteRgb[1];
+ var matteB = matteRgb[2];
+ var length = width * height * 4;
+
+ for (var i = 0; i < length; i += 4) {
+ var alpha = buffer[i + 3];
+
+ if (alpha === 0) {
+ buffer[i] = 255;
+ buffer[i + 1] = 255;
+ buffer[i + 2] = 255;
+ continue;
+ }
+
+ var k = 255 / alpha;
+ buffer[i] = (buffer[i] - matteR) * k + matteR;
+ buffer[i + 1] = (buffer[i + 1] - matteG) * k + matteG;
+ buffer[i + 2] = (buffer[i + 2] - matteB) * k + matteB;
+ }
+ },
+ createImageData: function createImageData() {
+ var forceRGBA = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false;
+ var drawWidth = this.drawWidth;
+ var drawHeight = this.drawHeight;
+ var imgData = {
+ width: drawWidth,
+ height: drawHeight,
+ kind: 0,
+ data: null
+ };
+ var numComps = this.numComps;
+ var originalWidth = this.width;
+ var originalHeight = this.height;
+ var bpc = this.bpc;
+ var rowBytes = originalWidth * numComps * bpc + 7 >> 3;
+ var imgArray;
+
+ if (!forceRGBA) {
+ var kind;
+
+ if (this.colorSpace.name === 'DeviceGray' && bpc === 1) {
+ kind = _util.ImageKind.GRAYSCALE_1BPP;
+ } else if (this.colorSpace.name === 'DeviceRGB' && bpc === 8 && !this.needsDecode) {
+ kind = _util.ImageKind.RGB_24BPP;
+ }
+
+ if (kind && !this.smask && !this.mask && drawWidth === originalWidth && drawHeight === originalHeight) {
+ imgData.kind = kind;
+ imgArray = this.getImageBytes(originalHeight * rowBytes);
+
+ if (this.image instanceof _stream.DecodeStream) {
+ imgData.data = imgArray;
+ } else {
+ var newArray = new Uint8ClampedArray(imgArray.length);
+ newArray.set(imgArray);
+ imgData.data = newArray;
+ }
+
+ if (this.needsDecode) {
+ (0, _util.assert)(kind === _util.ImageKind.GRAYSCALE_1BPP, 'PDFImage.createImageData: The image must be grayscale.');
+ var buffer = imgData.data;
+
+ for (var i = 0, ii = buffer.length; i < ii; i++) {
+ buffer[i] ^= 0xff;
+ }
+ }
+
+ return imgData;
+ }
+
+ if (this.image instanceof _jpeg_stream.JpegStream && !this.smask && !this.mask) {
+ var imageLength = originalHeight * rowBytes;
+
+ switch (this.colorSpace.name) {
+ case 'DeviceGray':
+ imageLength *= 3;
+
+ case 'DeviceRGB':
+ case 'DeviceCMYK':
+ imgData.kind = _util.ImageKind.RGB_24BPP;
+ imgData.data = this.getImageBytes(imageLength, drawWidth, drawHeight, true);
+ return imgData;
+ }
+ }
+ }
+
+ imgArray = this.getImageBytes(originalHeight * rowBytes);
+ var actualHeight = 0 | imgArray.length / rowBytes * drawHeight / originalHeight;
+ var comps = this.getComponents(imgArray);
+ var alpha01, maybeUndoPreblend;
+
+ if (!forceRGBA && !this.smask && !this.mask) {
+ imgData.kind = _util.ImageKind.RGB_24BPP;
+ imgData.data = new Uint8ClampedArray(drawWidth * drawHeight * 3);
+ alpha01 = 0;
+ maybeUndoPreblend = false;
+ } else {
+ imgData.kind = _util.ImageKind.RGBA_32BPP;
+ imgData.data = new Uint8ClampedArray(drawWidth * drawHeight * 4);
+ alpha01 = 1;
+ maybeUndoPreblend = true;
+ this.fillOpacity(imgData.data, drawWidth, drawHeight, actualHeight, comps);
+ }
+
+ if (this.needsDecode) {
+ this.decodeBuffer(comps);
+ }
+
+ this.colorSpace.fillRgb(imgData.data, originalWidth, originalHeight, drawWidth, drawHeight, actualHeight, bpc, comps, alpha01);
+
+ if (maybeUndoPreblend) {
+ this.undoPreblend(imgData.data, drawWidth, actualHeight);
+ }
+
+ return imgData;
+ },
+ fillGrayBuffer: function fillGrayBuffer(buffer) {
+ var numComps = this.numComps;
+
+ if (numComps !== 1) {
+ throw new _util.FormatError("Reading gray scale from a color image: ".concat(numComps));
+ }
+
+ var width = this.width;
+ var height = this.height;
+ var bpc = this.bpc;
+ var rowBytes = width * numComps * bpc + 7 >> 3;
+ var imgArray = this.getImageBytes(height * rowBytes);
+ var comps = this.getComponents(imgArray);
+ var i, length;
+
+ if (bpc === 1) {
+ length = width * height;
+
+ if (this.needsDecode) {
+ for (i = 0; i < length; ++i) {
+ buffer[i] = comps[i] - 1 & 255;
+ }
+ } else {
+ for (i = 0; i < length; ++i) {
+ buffer[i] = -comps[i] & 255;
+ }
+ }
+
+ return;
+ }
+
+ if (this.needsDecode) {
+ this.decodeBuffer(comps);
+ }
+
+ length = width * height;
+ var scale = 255 / ((1 << bpc) - 1);
+
+ for (i = 0; i < length; ++i) {
+ buffer[i] = scale * comps[i];
+ }
+ },
+ getImageBytes: function getImageBytes(length, drawWidth, drawHeight) {
+ var forceRGB = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
+ this.image.reset();
+ this.image.drawWidth = drawWidth || this.width;
+ this.image.drawHeight = drawHeight || this.height;
+ this.image.forceRGB = !!forceRGB;
+ return this.image.getBytes(length, true);
+ }
+ };
+ return PDFImage;
+}();
+
+exports.PDFImage = PDFImage;
+
+/***/ }),
+/* 191 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.MessageHandler = MessageHandler;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(2));
+
+var _util = __w_pdfjs_require__(5);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+
+function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function resolveCall(_x, _x2) {
+ return _resolveCall.apply(this, arguments);
+}
+
+function _resolveCall() {
+ _resolveCall = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee(fn, args) {
+ var thisArg,
+ _args = arguments;
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ thisArg = _args.length > 2 && _args[2] !== undefined ? _args[2] : null;
+
+ if (fn) {
+ _context.next = 3;
+ break;
+ }
+
+ return _context.abrupt("return", undefined);
+
+ case 3:
+ return _context.abrupt("return", fn.apply(thisArg, args));
+
+ case 4:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee);
+ }));
+ return _resolveCall.apply(this, arguments);
+}
+
+function wrapReason(reason) {
+ if (_typeof(reason) !== 'object') {
+ return reason;
+ }
+
+ switch (reason.name) {
+ case 'AbortException':
+ return new _util.AbortException(reason.message);
+
+ case 'MissingPDFException':
+ return new _util.MissingPDFException(reason.message);
+
+ case 'UnexpectedResponseException':
+ return new _util.UnexpectedResponseException(reason.message, reason.status);
+
+ default:
+ return new _util.UnknownErrorException(reason.message, reason.details);
+ }
+}
+
+function makeReasonSerializable(reason) {
+ if (!(reason instanceof Error) || reason instanceof _util.AbortException || reason instanceof _util.MissingPDFException || reason instanceof _util.UnexpectedResponseException || reason instanceof _util.UnknownErrorException) {
+ return reason;
+ }
+
+ return new _util.UnknownErrorException(reason.message, reason.toString());
+}
+
+function resolveOrReject(capability, success, reason) {
+ if (success) {
+ capability.resolve();
+ } else {
+ capability.reject(reason);
+ }
+}
+
+function finalize(promise) {
+ return Promise.resolve(promise)["catch"](function () {});
+}
+
+function MessageHandler(sourceName, targetName, comObj) {
+ var _this = this;
+
+ this.sourceName = sourceName;
+ this.targetName = targetName;
+ this.comObj = comObj;
+ this.callbackId = 1;
+ this.streamId = 1;
+ this.postMessageTransfers = true;
+ this.streamSinks = Object.create(null);
+ this.streamControllers = Object.create(null);
+ var callbacksCapabilities = this.callbacksCapabilities = Object.create(null);
+ var ah = this.actionHandler = Object.create(null);
+
+ this._onComObjOnMessage = function (event) {
+ var data = event.data;
+
+ if (data.targetName !== _this.sourceName) {
+ return;
+ }
+
+ if (data.stream) {
+ _this._processStreamMessage(data);
+ } else if (data.isReply) {
+ var callbackId = data.callbackId;
+
+ if (data.callbackId in callbacksCapabilities) {
+ var callback = callbacksCapabilities[callbackId];
+ delete callbacksCapabilities[callbackId];
+
+ if ('error' in data) {
+ callback.reject(wrapReason(data.error));
+ } else {
+ callback.resolve(data.data);
+ }
+ } else {
+ throw new Error("Cannot resolve callback ".concat(callbackId));
+ }
+ } else if (data.action in ah) {
+ var action = ah[data.action];
+
+ if (data.callbackId) {
+ var _sourceName = _this.sourceName;
+ var _targetName = data.sourceName;
+ Promise.resolve().then(function () {
+ return action[0].call(action[1], data.data);
+ }).then(function (result) {
+ comObj.postMessage({
+ sourceName: _sourceName,
+ targetName: _targetName,
+ isReply: true,
+ callbackId: data.callbackId,
+ data: result
+ });
+ }, function (reason) {
+ comObj.postMessage({
+ sourceName: _sourceName,
+ targetName: _targetName,
+ isReply: true,
+ callbackId: data.callbackId,
+ error: makeReasonSerializable(reason)
+ });
+ });
+ } else if (data.streamId) {
+ _this._createStreamSink(data);
+ } else {
+ action[0].call(action[1], data.data);
+ }
+ } else {
+ throw new Error("Unknown action from worker: ".concat(data.action));
+ }
+ };
+
+ comObj.addEventListener('message', this._onComObjOnMessage);
+}
+
+MessageHandler.prototype = {
+ on: function on(actionName, handler, scope) {
+ var ah = this.actionHandler;
+
+ if (ah[actionName]) {
+ throw new Error("There is already an actionName called \"".concat(actionName, "\""));
+ }
+
+ ah[actionName] = [handler, scope];
+ },
+ send: function send(actionName, data, transfers) {
+ var message = {
+ sourceName: this.sourceName,
+ targetName: this.targetName,
+ action: actionName,
+ data: data
+ };
+ this.postMessage(message, transfers);
+ },
+ sendWithPromise: function sendWithPromise(actionName, data, transfers) {
+ var callbackId = this.callbackId++;
+ var message = {
+ sourceName: this.sourceName,
+ targetName: this.targetName,
+ action: actionName,
+ data: data,
+ callbackId: callbackId
+ };
+ var capability = (0, _util.createPromiseCapability)();
+ this.callbacksCapabilities[callbackId] = capability;
+
+ try {
+ this.postMessage(message, transfers);
+ } catch (e) {
+ capability.reject(e);
+ }
+
+ return capability.promise;
+ },
+ sendWithStream: function sendWithStream(actionName, data, queueingStrategy, transfers) {
+ var _this2 = this;
+
+ var streamId = this.streamId++;
+ var sourceName = this.sourceName;
+ var targetName = this.targetName;
+ return new _util.ReadableStream({
+ start: function start(controller) {
+ var startCapability = (0, _util.createPromiseCapability)();
+ _this2.streamControllers[streamId] = {
+ controller: controller,
+ startCall: startCapability,
+ isClosed: false
+ };
+
+ _this2.postMessage({
+ sourceName: sourceName,
+ targetName: targetName,
+ action: actionName,
+ streamId: streamId,
+ data: data,
+ desiredSize: controller.desiredSize
+ });
+
+ return startCapability.promise;
+ },
+ pull: function pull(controller) {
+ var pullCapability = (0, _util.createPromiseCapability)();
+ _this2.streamControllers[streamId].pullCall = pullCapability;
+
+ _this2.postMessage({
+ sourceName: sourceName,
+ targetName: targetName,
+ stream: 'pull',
+ streamId: streamId,
+ desiredSize: controller.desiredSize
+ });
+
+ return pullCapability.promise;
+ },
+ cancel: function cancel(reason) {
+ var cancelCapability = (0, _util.createPromiseCapability)();
+ _this2.streamControllers[streamId].cancelCall = cancelCapability;
+ _this2.streamControllers[streamId].isClosed = true;
+
+ _this2.postMessage({
+ sourceName: sourceName,
+ targetName: targetName,
+ stream: 'cancel',
+ reason: reason,
+ streamId: streamId
+ });
+
+ return cancelCapability.promise;
+ }
+ }, queueingStrategy);
+ },
+ _createStreamSink: function _createStreamSink(data) {
+ var _this3 = this;
+
+ var self = this;
+ var action = this.actionHandler[data.action];
+ var streamId = data.streamId;
+ var desiredSize = data.desiredSize;
+ var sourceName = this.sourceName;
+ var targetName = data.sourceName;
+ var capability = (0, _util.createPromiseCapability)();
+
+ var sendStreamRequest = function sendStreamRequest(_ref) {
+ var stream = _ref.stream,
+ chunk = _ref.chunk,
+ transfers = _ref.transfers,
+ success = _ref.success,
+ reason = _ref.reason;
+
+ _this3.postMessage({
+ sourceName: sourceName,
+ targetName: targetName,
+ stream: stream,
+ streamId: streamId,
+ chunk: chunk,
+ success: success,
+ reason: reason
+ }, transfers);
+ };
+
+ var streamSink = {
+ enqueue: function enqueue(chunk) {
+ var size = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1;
+ var transfers = arguments.length > 2 ? arguments[2] : undefined;
+
+ if (this.isCancelled) {
+ return;
+ }
+
+ var lastDesiredSize = this.desiredSize;
+ this.desiredSize -= size;
+
+ if (lastDesiredSize > 0 && this.desiredSize <= 0) {
+ this.sinkCapability = (0, _util.createPromiseCapability)();
+ this.ready = this.sinkCapability.promise;
+ }
+
+ sendStreamRequest({
+ stream: 'enqueue',
+ chunk: chunk,
+ transfers: transfers
+ });
+ },
+ close: function close() {
+ if (this.isCancelled) {
+ return;
+ }
+
+ this.isCancelled = true;
+ sendStreamRequest({
+ stream: 'close'
+ });
+ delete self.streamSinks[streamId];
+ },
+ error: function error(reason) {
+ if (this.isCancelled) {
+ return;
+ }
+
+ this.isCancelled = true;
+ sendStreamRequest({
+ stream: 'error',
+ reason: reason
+ });
+ },
+ sinkCapability: capability,
+ onPull: null,
+ onCancel: null,
+ isCancelled: false,
+ desiredSize: desiredSize,
+ ready: null
+ };
+ streamSink.sinkCapability.resolve();
+ streamSink.ready = streamSink.sinkCapability.promise;
+ this.streamSinks[streamId] = streamSink;
+ resolveCall(action[0], [data.data, streamSink], action[1]).then(function () {
+ sendStreamRequest({
+ stream: 'start_complete',
+ success: true
+ });
+ }, function (reason) {
+ sendStreamRequest({
+ stream: 'start_complete',
+ success: false,
+ reason: reason
+ });
+ });
+ },
+ _processStreamMessage: function _processStreamMessage(data) {
+ var _this4 = this;
+
+ var sourceName = this.sourceName;
+ var targetName = data.sourceName;
+ var streamId = data.streamId;
+
+ var sendStreamResponse = function sendStreamResponse(_ref2) {
+ var stream = _ref2.stream,
+ success = _ref2.success,
+ reason = _ref2.reason;
+
+ _this4.comObj.postMessage({
+ sourceName: sourceName,
+ targetName: targetName,
+ stream: stream,
+ success: success,
+ streamId: streamId,
+ reason: reason
+ });
+ };
+
+ var deleteStreamController = function deleteStreamController() {
+ Promise.all([_this4.streamControllers[data.streamId].startCall, _this4.streamControllers[data.streamId].pullCall, _this4.streamControllers[data.streamId].cancelCall].map(function (capability) {
+ return capability && finalize(capability.promise);
+ })).then(function () {
+ delete _this4.streamControllers[data.streamId];
+ });
+ };
+
+ switch (data.stream) {
+ case 'start_complete':
+ resolveOrReject(this.streamControllers[data.streamId].startCall, data.success, wrapReason(data.reason));
+ break;
+
+ case 'pull_complete':
+ resolveOrReject(this.streamControllers[data.streamId].pullCall, data.success, wrapReason(data.reason));
+ break;
+
+ case 'pull':
+ if (!this.streamSinks[data.streamId]) {
+ sendStreamResponse({
+ stream: 'pull_complete',
+ success: true
+ });
+ break;
+ }
+
+ if (this.streamSinks[data.streamId].desiredSize <= 0 && data.desiredSize > 0) {
+ this.streamSinks[data.streamId].sinkCapability.resolve();
+ }
+
+ this.streamSinks[data.streamId].desiredSize = data.desiredSize;
+ resolveCall(this.streamSinks[data.streamId].onPull).then(function () {
+ sendStreamResponse({
+ stream: 'pull_complete',
+ success: true
+ });
+ }, function (reason) {
+ sendStreamResponse({
+ stream: 'pull_complete',
+ success: false,
+ reason: reason
+ });
+ });
+ break;
+
+ case 'enqueue':
+ (0, _util.assert)(this.streamControllers[data.streamId], 'enqueue should have stream controller');
+
+ if (!this.streamControllers[data.streamId].isClosed) {
+ this.streamControllers[data.streamId].controller.enqueue(data.chunk);
+ }
+
+ break;
+
+ case 'close':
+ (0, _util.assert)(this.streamControllers[data.streamId], 'close should have stream controller');
+
+ if (this.streamControllers[data.streamId].isClosed) {
+ break;
+ }
+
+ this.streamControllers[data.streamId].isClosed = true;
+ this.streamControllers[data.streamId].controller.close();
+ deleteStreamController();
+ break;
+
+ case 'error':
+ (0, _util.assert)(this.streamControllers[data.streamId], 'error should have stream controller');
+ this.streamControllers[data.streamId].controller.error(wrapReason(data.reason));
+ deleteStreamController();
+ break;
+
+ case 'cancel_complete':
+ resolveOrReject(this.streamControllers[data.streamId].cancelCall, data.success, wrapReason(data.reason));
+ deleteStreamController();
+ break;
+
+ case 'cancel':
+ if (!this.streamSinks[data.streamId]) {
+ break;
+ }
+
+ resolveCall(this.streamSinks[data.streamId].onCancel, [wrapReason(data.reason)]).then(function () {
+ sendStreamResponse({
+ stream: 'cancel_complete',
+ success: true
+ });
+ }, function (reason) {
+ sendStreamResponse({
+ stream: 'cancel_complete',
+ success: false,
+ reason: reason
+ });
+ });
+ this.streamSinks[data.streamId].sinkCapability.reject(wrapReason(data.reason));
+ this.streamSinks[data.streamId].isCancelled = true;
+ delete this.streamSinks[data.streamId];
+ break;
+
+ default:
+ throw new Error('Unexpected stream case');
+ }
+ },
+ postMessage: function postMessage(message, transfers) {
+ if (transfers && this.postMessageTransfers) {
+ this.comObj.postMessage(message, transfers);
+ } else {
+ this.comObj.postMessage(message);
+ }
+ },
+ destroy: function destroy() {
+ this.comObj.removeEventListener('message', this._onComObjOnMessage);
+ }
+};
+
+/***/ }),
+/* 192 */
+/***/ (function(module, exports, __w_pdfjs_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.PDFWorkerStream = void 0;
+
+var _regenerator = _interopRequireDefault(__w_pdfjs_require__(2));
+
+var _util = __w_pdfjs_require__(5);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
+
+function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
+
+var PDFWorkerStream =
+/*#__PURE__*/
+function () {
+ function PDFWorkerStream(msgHandler) {
+ _classCallCheck(this, PDFWorkerStream);
+
+ this._msgHandler = msgHandler;
+ this._contentLength = null;
+ this._fullRequestReader = null;
+ this._rangeRequestReaders = [];
+ }
+
+ _createClass(PDFWorkerStream, [{
+ key: "getFullReader",
+ value: function getFullReader() {
+ (0, _util.assert)(!this._fullRequestReader);
+ this._fullRequestReader = new PDFWorkerStreamReader(this._msgHandler);
+ return this._fullRequestReader;
+ }
+ }, {
+ key: "getRangeReader",
+ value: function getRangeReader(begin, end) {
+ var reader = new PDFWorkerStreamRangeReader(begin, end, this._msgHandler);
+
+ this._rangeRequestReaders.push(reader);
+
+ return reader;
+ }
+ }, {
+ key: "cancelAllRequests",
+ value: function cancelAllRequests(reason) {
+ if (this._fullRequestReader) {
+ this._fullRequestReader.cancel(reason);
+ }
+
+ var readers = this._rangeRequestReaders.slice(0);
+
+ readers.forEach(function (reader) {
+ reader.cancel(reason);
+ });
+ }
+ }]);
+
+ return PDFWorkerStream;
+}();
+
+exports.PDFWorkerStream = PDFWorkerStream;
+
+var PDFWorkerStreamReader =
+/*#__PURE__*/
+function () {
+ function PDFWorkerStreamReader(msgHandler) {
+ var _this = this;
+
+ _classCallCheck(this, PDFWorkerStreamReader);
+
+ this._msgHandler = msgHandler;
+ this.onProgress = null;
+ this._contentLength = null;
+ this._isRangeSupported = false;
+ this._isStreamingSupported = false;
+
+ var readableStream = this._msgHandler.sendWithStream('GetReader');
+
+ this._reader = readableStream.getReader();
+ this._headersReady = this._msgHandler.sendWithPromise('ReaderHeadersReady').then(function (data) {
+ _this._isStreamingSupported = data.isStreamingSupported;
+ _this._isRangeSupported = data.isRangeSupported;
+ _this._contentLength = data.contentLength;
+ });
+ }
+
+ _createClass(PDFWorkerStreamReader, [{
+ key: "read",
+ value: function () {
+ var _read = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee() {
+ var _ref, value, done;
+
+ return _regenerator["default"].wrap(function _callee$(_context) {
+ while (1) {
+ switch (_context.prev = _context.next) {
+ case 0:
+ _context.next = 2;
+ return this._reader.read();
+
+ case 2:
+ _ref = _context.sent;
+ value = _ref.value;
+ done = _ref.done;
+
+ if (!done) {
+ _context.next = 7;
+ break;
+ }
+
+ return _context.abrupt("return", {
+ value: undefined,
+ done: true
+ });
+
+ case 7:
+ return _context.abrupt("return", {
+ value: value.buffer,
+ done: false
+ });
+
+ case 8:
+ case "end":
+ return _context.stop();
+ }
+ }
+ }, _callee, this);
+ }));
+
+ function read() {
+ return _read.apply(this, arguments);
+ }
+
+ return read;
+ }()
+ }, {
+ key: "cancel",
+ value: function cancel(reason) {
+ this._reader.cancel(reason);
+ }
+ }, {
+ key: "headersReady",
+ get: function get() {
+ return this._headersReady;
+ }
+ }, {
+ key: "contentLength",
+ get: function get() {
+ return this._contentLength;
+ }
+ }, {
+ key: "isStreamingSupported",
+ get: function get() {
+ return this._isStreamingSupported;
+ }
+ }, {
+ key: "isRangeSupported",
+ get: function get() {
+ return this._isRangeSupported;
+ }
+ }]);
+
+ return PDFWorkerStreamReader;
+}();
+
+var PDFWorkerStreamRangeReader =
+/*#__PURE__*/
+function () {
+ function PDFWorkerStreamRangeReader(begin, end, msgHandler) {
+ _classCallCheck(this, PDFWorkerStreamRangeReader);
+
+ this._msgHandler = msgHandler;
+ this.onProgress = null;
+
+ var readableStream = this._msgHandler.sendWithStream('GetRangeReader', {
+ begin: begin,
+ end: end
+ });
+
+ this._reader = readableStream.getReader();
+ }
+
+ _createClass(PDFWorkerStreamRangeReader, [{
+ key: "read",
+ value: function () {
+ var _read2 = _asyncToGenerator(
+ /*#__PURE__*/
+ _regenerator["default"].mark(function _callee2() {
+ var _ref2, value, done;
+
+ return _regenerator["default"].wrap(function _callee2$(_context2) {
+ while (1) {
+ switch (_context2.prev = _context2.next) {
+ case 0:
+ _context2.next = 2;
+ return this._reader.read();
+
+ case 2:
+ _ref2 = _context2.sent;
+ value = _ref2.value;
+ done = _ref2.done;
+
+ if (!done) {
+ _context2.next = 7;
+ break;
+ }
+
+ return _context2.abrupt("return", {
+ value: undefined,
+ done: true
+ });
+
+ case 7:
+ return _context2.abrupt("return", {
+ value: value.buffer,
+ done: false
+ });
+
+ case 8:
+ case "end":
+ return _context2.stop();
+ }
+ }
+ }, _callee2, this);
+ }));
+
+ function read() {
+ return _read2.apply(this, arguments);
+ }
+
+ return read;
+ }()
+ }, {
+ key: "cancel",
+ value: function cancel(reason) {
+ this._reader.cancel(reason);
+ }
+ }, {
+ key: "isStreamingSupported",
+ get: function get() {
+ return false;
+ }
+ }]);
+
+ return PDFWorkerStreamRangeReader;
+}();
+
+/***/ })
+/******/ ]);
+});
+//# sourceMappingURL=pdf.worker.js.map \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/.coveragerc b/testing/web-platform/tests/tools/third_party/pluggy/.coveragerc
new file mode 100644
index 0000000000..1b1de1cd24
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/.coveragerc
@@ -0,0 +1,14 @@
+[run]
+include =
+ pluggy/*
+ testing/*
+ */lib/python*/site-packages/pluggy/*
+ */pypy*/site-packages/pluggy/*
+ *\Lib\site-packages\pluggy\*
+branch = 1
+
+[paths]
+source = pluggy/
+ */lib/python*/site-packages/pluggy/
+ */pypy*/site-packages/pluggy/
+ *\Lib\site-packages\pluggy\
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/.github/workflows/main.yml b/testing/web-platform/tests/tools/third_party/pluggy/.github/workflows/main.yml
new file mode 100644
index 0000000000..e1022ca96d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/.github/workflows/main.yml
@@ -0,0 +1,148 @@
+name: main
+
+on:
+ push:
+ branches:
+ - main
+ tags:
+ - "*"
+
+ pull_request:
+ branches:
+ - main
+
+jobs:
+ build:
+ runs-on: ${{ matrix.os }}
+
+ strategy:
+ fail-fast: false
+ matrix:
+ name: [
+ "windows-py36",
+ "windows-py39",
+ "windows-pypy3",
+
+ "ubuntu-py36",
+ "ubuntu-py36-pytestmain",
+ "ubuntu-py37",
+ "ubuntu-py38",
+ "ubuntu-py39",
+ "ubuntu-pypy3",
+ "ubuntu-benchmark",
+
+ "linting",
+ "docs",
+ ]
+
+ include:
+ - name: "windows-py36"
+ python: "3.6"
+ os: windows-latest
+ tox_env: "py36"
+ - name: "windows-py39"
+ python: "3.9"
+ os: windows-latest
+ tox_env: "py39"
+ - name: "windows-pypy3"
+ python: "pypy3"
+ os: windows-latest
+ tox_env: "pypy3"
+ - name: "ubuntu-py36"
+ python: "3.6"
+ os: ubuntu-latest
+ tox_env: "py36"
+ use_coverage: true
+ - name: "ubuntu-py36-pytestmain"
+ python: "3.6"
+ os: ubuntu-latest
+ tox_env: "py36-pytestmain"
+ use_coverage: true
+ - name: "ubuntu-py37"
+ python: "3.7"
+ os: ubuntu-latest
+ tox_env: "py37"
+ use_coverage: true
+ - name: "ubuntu-py38"
+ python: "3.8"
+ os: ubuntu-latest
+ tox_env: "py38"
+ use_coverage: true
+ - name: "ubuntu-py39"
+ python: "3.9"
+ os: ubuntu-latest
+ tox_env: "py39"
+ use_coverage: true
+ - name: "ubuntu-pypy3"
+ python: "pypy3"
+ os: ubuntu-latest
+ tox_env: "pypy3"
+ use_coverage: true
+ - name: "ubuntu-benchmark"
+ python: "3.8"
+ os: ubuntu-latest
+ tox_env: "benchmark"
+ - name: "linting"
+ python: "3.8"
+ os: ubuntu-latest
+ tox_env: "linting"
+ - name: "docs"
+ python: "3.8"
+ os: ubuntu-latest
+ tox_env: "docs"
+
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+
+ - name: Set up Python ${{ matrix.python }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python }}
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip setuptools
+ python -m pip install tox coverage
+
+ - name: Test without coverage
+ if: "! matrix.use_coverage"
+ run: "tox -e ${{ matrix.tox_env }}"
+
+ - name: Test with coverage
+ if: "matrix.use_coverage"
+ run: "tox -e ${{ matrix.tox_env }}-coverage"
+
+ - name: Upload coverage
+ if: matrix.use_coverage && github.repository == 'pytest-dev/pluggy'
+ env:
+ CODECOV_NAME: ${{ matrix.name }}
+ run: bash scripts/upload-coverage.sh -F GHA,${{ runner.os }}
+
+ deploy:
+ if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') && github.repository == 'pytest-dev/pluggy'
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+
+ - uses: actions/setup-python@v2
+ with:
+ python-version: "3.8"
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install --upgrade wheel setuptools setuptools_scm
+
+ - name: Build package
+ run: python setup.py sdist bdist_wheel
+
+ - name: Publish package
+ uses: pypa/gh-action-pypi-publish@v1.4.1
+ with:
+ user: __token__
+ password: ${{ secrets.pypi_token }}
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/.gitignore b/testing/web-platform/tests/tools/third_party/pluggy/.gitignore
new file mode 100644
index 0000000000..4580536c7a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/.gitignore
@@ -0,0 +1,64 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+*.swp
+
+# generated by setuptools_scm
+src/pluggy/_version.py
+
+# generated by pip
+pip-wheel-metadata/
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/.pre-commit-config.yaml b/testing/web-platform/tests/tools/third_party/pluggy/.pre-commit-config.yaml
new file mode 100644
index 0000000000..d919ffeb2f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/.pre-commit-config.yaml
@@ -0,0 +1,34 @@
+repos:
+- repo: https://github.com/ambv/black
+ rev: 21.7b0
+ hooks:
+ - id: black
+ args: [--safe, --quiet]
+- repo: https://github.com/asottile/blacken-docs
+ rev: v1.10.0
+ hooks:
+ - id: blacken-docs
+ additional_dependencies: [black==21.7b0]
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v2.1.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: flake8
+- repo: local
+ hooks:
+ - id: rst
+ name: rst
+ entry: rst-lint --encoding utf-8
+ files: ^(CHANGELOG.rst|HOWTORELEASE.rst|README.rst|changelog/.*)$
+ language: python
+ additional_dependencies: [pygments, restructuredtext_lint]
+- repo: https://github.com/pre-commit/pygrep-hooks
+ rev: v1.9.0
+ hooks:
+ - id: rst-backticks
+- repo: https://github.com/asottile/pyupgrade
+ rev: v2.23.3
+ hooks:
+ - id: pyupgrade
+ args: [--py36-plus]
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/CHANGELOG.rst b/testing/web-platform/tests/tools/third_party/pluggy/CHANGELOG.rst
new file mode 100644
index 0000000000..13a388c435
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/CHANGELOG.rst
@@ -0,0 +1,409 @@
+=========
+Changelog
+=========
+
+.. towncrier release notes start
+
+pluggy 1.0.0 (2021-08-25)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- `#116 <https://github.com/pytest-dev/pluggy/issues/116>`_: Remove deprecated ``implprefix`` support.
+ Decorate hook implementations using an instance of HookimplMarker instead.
+ The deprecation was announced in release ``0.7.0``.
+
+
+- `#120 <https://github.com/pytest-dev/pluggy/issues/120>`_: Remove the deprecated ``proc`` argument to ``call_historic``.
+ Use ``result_callback`` instead, which has the same behavior.
+ The deprecation was announced in release ``0.7.0``.
+
+
+- `#265 <https://github.com/pytest-dev/pluggy/issues/265>`_: Remove the ``_Result.result`` property. Use ``_Result.get_result()`` instead.
+ Note that unlike ``result``, ``get_result()`` raises the exception if the hook raised.
+ The deprecation was announced in release ``0.6.0``.
+
+
+- `#267 <https://github.com/pytest-dev/pluggy/issues/267>`_: Remove official support for Python 3.4.
+
+
+- `#272 <https://github.com/pytest-dev/pluggy/issues/272>`_: Dropped support for Python 2.
+ Continue to use pluggy 0.13.x for Python 2 support.
+
+
+- `#308 <https://github.com/pytest-dev/pluggy/issues/308>`_: Remove official support for Python 3.5.
+
+
+- `#313 <https://github.com/pytest-dev/pluggy/issues/313>`_: The internal ``pluggy.callers``, ``pluggy.manager`` and ``pluggy.hooks`` are now explicitly marked private by a ``_`` prefix (e.g. ``pluggy._callers``).
+ Only API exported by the top-level ``pluggy`` module is considered public.
+
+
+- `#59 <https://github.com/pytest-dev/pluggy/issues/59>`_: Remove legacy ``__multicall__`` recursive hook calling system.
+ The deprecation was announced in release ``0.5.0``.
+
+
+
+Features
+--------
+
+- `#282 <https://github.com/pytest-dev/pluggy/issues/282>`_: When registering a hookimpl which is declared as ``hookwrapper=True`` but whose
+ function is not a generator function, a ``PluggyValidationError`` exception is
+ now raised.
+
+ Previously this problem would cause an error only later, when calling the hook.
+
+ In the unlikely case that you have a hookwrapper that *returns* a generator
+ instead of yielding directly, for example:
+
+ .. code-block:: python
+
+ def my_hook_real_implementation(arg):
+ print("before")
+ yield
+ print("after")
+
+
+ @hookimpl(hookwrapper=True)
+ def my_hook(arg):
+ return my_hook_implementation(arg)
+
+ change it to use ``yield from`` instead:
+
+ .. code-block:: python
+
+ @hookimpl(hookwrapper=True)
+ def my_hook(arg):
+ yield from my_hook_implementation(arg)
+
+
+- `#309 <https://github.com/pytest-dev/pluggy/issues/309>`_: Add official support for Python 3.9.
+
+
+pluggy 0.13.1 (2019-11-21)
+==========================
+
+Trivial/Internal Changes
+------------------------
+
+- `#236 <https://github.com/pytest-dev/pluggy/pull/236>`_: Improved documentation, especially with regard to references.
+
+
+pluggy 0.13.0 (2019-09-10)
+==========================
+
+Trivial/Internal Changes
+------------------------
+
+- `#222 <https://github.com/pytest-dev/pluggy/issues/222>`_: Replace ``importlib_metadata`` backport with ``importlib.metadata`` from the
+ standard library on Python 3.8+.
+
+
+pluggy 0.12.0 (2019-05-27)
+==========================
+
+Features
+--------
+
+- `#215 <https://github.com/pytest-dev/pluggy/issues/215>`_: Switch from ``pkg_resources`` to ``importlib-metadata`` for entrypoint detection for improved performance and import time. This time with ``.egg`` support.
+
+
+pluggy 0.11.0 (2019-05-07)
+==========================
+
+Bug Fixes
+---------
+
+- `#205 <https://github.com/pytest-dev/pluggy/issues/205>`_: Revert changes made in 0.10.0 release breaking ``.egg`` installs.
+
+
+pluggy 0.10.0 (2019-05-07)
+==========================
+
+Features
+--------
+
+- `#199 <https://github.com/pytest-dev/pluggy/issues/199>`_: Switch from ``pkg_resources`` to ``importlib-metadata`` for entrypoint detection for improved performance and import time.
+
+
+pluggy 0.9.0 (2019-02-21)
+=========================
+
+Features
+--------
+
+- `#189 <https://github.com/pytest-dev/pluggy/issues/189>`_: ``PluginManager.load_setuptools_entrypoints`` now accepts a ``name`` parameter that when given will
+ load only entry points with that name.
+
+ ``PluginManager.load_setuptools_entrypoints`` also now returns the number of plugins loaded by the
+ call, as opposed to the number of all plugins loaded by all calls to this method.
+
+
+
+Bug Fixes
+---------
+
+- `#187 <https://github.com/pytest-dev/pluggy/issues/187>`_: Fix internal ``varnames`` function for PyPy3.
+
+
+pluggy 0.8.1 (2018-11-09)
+=========================
+
+Trivial/Internal Changes
+------------------------
+
+- `#166 <https://github.com/pytest-dev/pluggy/issues/166>`_: Add ``stacklevel=2`` to implprefix warning so that the reported location of warning is the caller of PluginManager.
+
+
+pluggy 0.8.0 (2018-10-15)
+=========================
+
+Features
+--------
+
+- `#177 <https://github.com/pytest-dev/pluggy/issues/177>`_: Add ``get_hookimpls()`` method to hook callers.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- `#165 <https://github.com/pytest-dev/pluggy/issues/165>`_: Add changelog in long package description and documentation.
+
+
+- `#172 <https://github.com/pytest-dev/pluggy/issues/172>`_: Add a test exemplifying the opt-in nature of spec defined args.
+
+
+- `#57 <https://github.com/pytest-dev/pluggy/issues/57>`_: Encapsulate hook specifications in a type for easier introspection.
+
+
+pluggy 0.7.1 (2018-07-28)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- `#116 <https://github.com/pytest-dev/pluggy/issues/116>`_: Deprecate the ``implprefix`` kwarg to ``PluginManager`` and instead
+ expect users to start using explicit ``HookimplMarker`` everywhere.
+
+
+
+Features
+--------
+
+- `#122 <https://github.com/pytest-dev/pluggy/issues/122>`_: Add ``.plugin`` member to ``PluginValidationError`` to access failing plugin during post-mortem.
+
+
+- `#138 <https://github.com/pytest-dev/pluggy/issues/138>`_: Add per implementation warnings support for hookspecs allowing for both
+ deprecation and future warnings of legacy and (future) experimental hooks
+ respectively.
+
+
+
+Bug Fixes
+---------
+
+- `#110 <https://github.com/pytest-dev/pluggy/issues/110>`_: Fix a bug where ``_HookCaller.call_historic()`` would call the ``proc``
+ arg even when the default is ``None`` resulting in a ``TypeError``.
+
+- `#160 <https://github.com/pytest-dev/pluggy/issues/160>`_: Fix problem when handling ``VersionConflict`` errors when loading setuptools plugins.
+
+
+
+Improved Documentation
+----------------------
+
+- `#123 <https://github.com/pytest-dev/pluggy/issues/123>`_: Document how exceptions are handled and how the hook call loop
+ terminates immediately on the first error which is then delivered
+ to any surrounding wrappers.
+
+
+- `#136 <https://github.com/pytest-dev/pluggy/issues/136>`_: Docs rework including a much better introduction and comprehensive example
+ set for new users. A big thanks goes out to @obestwalter for the great work!
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- `#117 <https://github.com/pytest-dev/pluggy/issues/117>`_: Break up the main monolithic package modules into separate modules by concern
+
+
+- `#131 <https://github.com/pytest-dev/pluggy/issues/131>`_: Automate ``setuptools`` wheels building and PyPi upload using TravisCI.
+
+
+- `#153 <https://github.com/pytest-dev/pluggy/issues/153>`_: Reorganize tests more appropriately by modules relating to each
+ internal component/feature. This is in an effort to avoid (future)
+ duplication and better separation of concerns in the test set.
+
+
+- `#156 <https://github.com/pytest-dev/pluggy/issues/156>`_: Add ``HookImpl.__repr__()`` for better debugging.
+
+
+- `#66 <https://github.com/pytest-dev/pluggy/issues/66>`_: Start using ``towncrier`` and a custom ``tox`` environment to prepare releases!
+
+
+pluggy 0.7.0 (Unreleased)
+=========================
+
+* `#160 <https://github.com/pytest-dev/pluggy/issues/160>`_: We discovered a deployment issue so this version was never released to PyPI, only the tag exists.
+
+pluggy 0.6.0 (2017-11-24)
+=========================
+
+- Add CI testing for the features, release, and master
+ branches of ``pytest`` (PR `#79`_).
+- Document public API for ``_Result`` objects passed to wrappers
+ (PR `#85`_).
+- Document and test hook LIFO ordering (PR `#85`_).
+- Turn warnings into errors in test suite (PR `#89`_).
+- Deprecate ``_Result.result`` (PR `#88`_).
+- Convert ``_Multicall`` to a simple function distinguishing it from
+ the legacy version (PR `#90`_).
+- Resolve E741 errors (PR `#96`_).
+- Test and bug fix for unmarked hook collection (PRs `#97`_ and
+ `#102`_).
+- Drop support for EOL Python 2.6 and 3.3 (PR `#103`_).
+- Fix ``inspect`` based arg introspection on py3.6 (PR `#94`_).
+
+.. _#79: https://github.com/pytest-dev/pluggy/pull/79
+.. _#85: https://github.com/pytest-dev/pluggy/pull/85
+.. _#88: https://github.com/pytest-dev/pluggy/pull/88
+.. _#89: https://github.com/pytest-dev/pluggy/pull/89
+.. _#90: https://github.com/pytest-dev/pluggy/pull/90
+.. _#94: https://github.com/pytest-dev/pluggy/pull/94
+.. _#96: https://github.com/pytest-dev/pluggy/pull/96
+.. _#97: https://github.com/pytest-dev/pluggy/pull/97
+.. _#102: https://github.com/pytest-dev/pluggy/pull/102
+.. _#103: https://github.com/pytest-dev/pluggy/pull/103
+
+
+pluggy 0.5.2 (2017-09-06)
+=========================
+
+- fix bug where ``firstresult`` wrappers were being sent an incorrectly configured
+ ``_Result`` (a list was set instead of a single value). Add tests to check for
+ this as well as ``_Result.force_result()`` behaviour. Thanks to `@tgoodlet`_
+ for the PR `#72`_.
+
+- fix incorrect ``getattr`` of ``DeprecationWarning`` from the ``warnings``
+ module. Thanks to `@nicoddemus`_ for the PR `#77`_.
+
+- hide ``pytest`` tracebacks in certain core routines. Thanks to
+ `@nicoddemus`_ for the PR `#80`_.
+
+.. _#72: https://github.com/pytest-dev/pluggy/pull/72
+.. _#77: https://github.com/pytest-dev/pluggy/pull/77
+.. _#80: https://github.com/pytest-dev/pluggy/pull/80
+
+
+pluggy 0.5.1 (2017-08-29)
+=========================
+
+- fix a bug and add tests for case where ``firstresult`` hooks return
+ ``None`` results. Thanks to `@RonnyPfannschmidt`_ and `@tgoodlet`_
+ for the issue (`#68`_) and PR (`#69`_) respectively.
+
+.. _#69: https://github.com/pytest-dev/pluggy/pull/69
+.. _#68: https://github.com/pytest-dev/pluggy/issues/68
+
+
+pluggy 0.5.0 (2017-08-28)
+=========================
+
+- fix bug where callbacks for historic hooks would not be called for
+ already registered plugins. Thanks `@vodik`_ for the PR
+ and `@hpk42`_ for further fixes.
+
+- fix `#17`_ by considering only actual functions for hooks
+ this removes the ability to register arbitrary callable objects
+ which at first glance is a reasonable simplification,
+ thanks `@RonnyPfannschmidt`_ for report and pr.
+
+- fix `#19`_: allow registering hookspecs from instances. The PR from
+ `@tgoodlet`_ also modernized the varnames implementation.
+
+- resolve `#32`_: split up the test set into multiple modules.
+ Thanks to `@RonnyPfannschmidt`_ for the PR and `@tgoodlet`_ for
+ the initial request.
+
+- resolve `#14`_: add full sphinx docs. Thanks to `@tgoodlet`_ for
+ PR `#39`_.
+
+- add hook call mismatch warnings. Thanks to `@tgoodlet`_ for the
+ PR `#42`_.
+
+- resolve `#44`_: move to new-style classes. Thanks to `@MichalTHEDUDE`_
+ for PR `#46`_.
+
+- add baseline benchmarking/speed tests using ``pytest-benchmark``
+ in PR `#54`_. Thanks to `@tgoodlet`_.
+
+- update the README to showcase the API. Thanks to `@tgoodlet`_ for the
+ issue and PR `#55`_.
+
+- deprecate ``__multicall__`` and add a faster call loop implementation.
+ Thanks to `@tgoodlet`_ for PR `#58`_.
+
+- raise a comprehensible error when a ``hookimpl`` is called with positional
+ args. Thanks to `@RonnyPfannschmidt`_ for the issue and `@tgoodlet`_ for
+ PR `#60`_.
+
+- fix the ``firstresult`` test making it more complete
+ and remove a duplicate of that test. Thanks to `@tgoodlet`_
+ for PR `#62`_.
+
+.. _#62: https://github.com/pytest-dev/pluggy/pull/62
+.. _#60: https://github.com/pytest-dev/pluggy/pull/60
+.. _#58: https://github.com/pytest-dev/pluggy/pull/58
+.. _#55: https://github.com/pytest-dev/pluggy/pull/55
+.. _#54: https://github.com/pytest-dev/pluggy/pull/54
+.. _#46: https://github.com/pytest-dev/pluggy/pull/46
+.. _#44: https://github.com/pytest-dev/pluggy/issues/44
+.. _#42: https://github.com/pytest-dev/pluggy/pull/42
+.. _#39: https://github.com/pytest-dev/pluggy/pull/39
+.. _#32: https://github.com/pytest-dev/pluggy/pull/32
+.. _#19: https://github.com/pytest-dev/pluggy/issues/19
+.. _#17: https://github.com/pytest-dev/pluggy/issues/17
+.. _#14: https://github.com/pytest-dev/pluggy/issues/14
+
+
+pluggy 0.4.0 (2016-09-25)
+=========================
+
+- add ``has_plugin(name)`` method to pluginmanager. thanks `@nicoddemus`_.
+
+- fix `#11`_: make plugin parsing more resilient against exceptions
+ from ``__getattr__`` functions. Thanks `@nicoddemus`_.
+
+- fix issue `#4`_: specific ``HookCallError`` exception for when a hook call
+ provides not enough arguments.
+
+- better error message when loading setuptools entrypoints fails
+ due to a ``VersionConflict``. Thanks `@blueyed`_.
+
+.. _#11: https://github.com/pytest-dev/pluggy/issues/11
+.. _#4: https://github.com/pytest-dev/pluggy/issues/4
+
+
+pluggy 0.3.1 (2015-09-17)
+=========================
+
+- avoid using deprecated-in-python3.5 getargspec method. Thanks
+ `@mdboom`_.
+
+
+pluggy 0.3.0 (2015-05-07)
+=========================
+
+initial release
+
+.. contributors
+.. _@hpk42: https://github.com/hpk42
+.. _@tgoodlet: https://github.com/goodboy
+.. _@MichalTHEDUDE: https://github.com/MichalTHEDUDE
+.. _@vodik: https://github.com/vodik
+.. _@RonnyPfannschmidt: https://github.com/RonnyPfannschmidt
+.. _@blueyed: https://github.com/blueyed
+.. _@nicoddemus: https://github.com/nicoddemus
+.. _@mdboom: https://github.com/mdboom
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/LICENSE b/testing/web-platform/tests/tools/third_party/pluggy/LICENSE
new file mode 100644
index 0000000000..85f4dd63d2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 holger krekel (rather uses bitbucket/hpk42)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/MANIFEST.in b/testing/web-platform/tests/tools/third_party/pluggy/MANIFEST.in
new file mode 100644
index 0000000000..0cf8f3e088
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/MANIFEST.in
@@ -0,0 +1,7 @@
+include CHANGELOG
+include README.rst
+include setup.py
+include tox.ini
+include LICENSE
+graft testing
+recursive-exclude * *.pyc *.pyo
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/README.rst b/testing/web-platform/tests/tools/third_party/pluggy/README.rst
new file mode 100644
index 0000000000..3496617e1e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/README.rst
@@ -0,0 +1,101 @@
+====================================================
+pluggy - A minimalist production ready plugin system
+====================================================
+
+|pypi| |conda-forge| |versions| |github-actions| |gitter| |black| |codecov|
+
+This is the core framework used by the `pytest`_, `tox`_, and `devpi`_ projects.
+
+Please `read the docs`_ to learn more!
+
+A definitive example
+====================
+.. code-block:: python
+
+ import pluggy
+
+ hookspec = pluggy.HookspecMarker("myproject")
+ hookimpl = pluggy.HookimplMarker("myproject")
+
+
+ class MySpec:
+ """A hook specification namespace."""
+
+ @hookspec
+ def myhook(self, arg1, arg2):
+ """My special little hook that you can customize."""
+
+
+ class Plugin_1:
+ """A hook implementation namespace."""
+
+ @hookimpl
+ def myhook(self, arg1, arg2):
+ print("inside Plugin_1.myhook()")
+ return arg1 + arg2
+
+
+ class Plugin_2:
+ """A 2nd hook implementation namespace."""
+
+ @hookimpl
+ def myhook(self, arg1, arg2):
+ print("inside Plugin_2.myhook()")
+ return arg1 - arg2
+
+
+ # create a manager and add the spec
+ pm = pluggy.PluginManager("myproject")
+ pm.add_hookspecs(MySpec)
+
+ # register plugins
+ pm.register(Plugin_1())
+ pm.register(Plugin_2())
+
+ # call our ``myhook`` hook
+ results = pm.hook.myhook(arg1=1, arg2=2)
+ print(results)
+
+
+Running this directly gets us::
+
+ $ python docs/examples/toy-example.py
+ inside Plugin_2.myhook()
+ inside Plugin_1.myhook()
+ [-1, 3]
+
+
+.. badges
+
+.. |pypi| image:: https://img.shields.io/pypi/v/pluggy.svg
+ :target: https://pypi.org/pypi/pluggy
+
+.. |versions| image:: https://img.shields.io/pypi/pyversions/pluggy.svg
+ :target: https://pypi.org/pypi/pluggy
+
+.. |github-actions| image:: https://github.com/pytest-dev/pluggy/workflows/main/badge.svg
+ :target: https://github.com/pytest-dev/pluggy/actions
+
+.. |conda-forge| image:: https://img.shields.io/conda/vn/conda-forge/pluggy.svg
+ :target: https://anaconda.org/conda-forge/pytest
+
+.. |gitter| image:: https://badges.gitter.im/pytest-dev/pluggy.svg
+ :alt: Join the chat at https://gitter.im/pytest-dev/pluggy
+ :target: https://gitter.im/pytest-dev/pluggy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
+
+.. |black| image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+
+.. |codecov| image:: https://codecov.io/gh/pytest-dev/pluggy/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/pytest-dev/pluggy
+ :alt: Code coverage Status
+
+.. links
+.. _pytest:
+ http://pytest.org
+.. _tox:
+ https://tox.readthedocs.org
+.. _devpi:
+ http://doc.devpi.net
+.. _read the docs:
+ https://pluggy.readthedocs.io/en/latest/
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/RELEASING.rst b/testing/web-platform/tests/tools/third_party/pluggy/RELEASING.rst
new file mode 100644
index 0000000000..ee0d1331e0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/RELEASING.rst
@@ -0,0 +1,23 @@
+Release Procedure
+-----------------
+
+#. From a clean work tree, execute::
+
+ tox -e release -- VERSION
+
+ This will create the branch ready to be pushed.
+
+#. Open a PR targeting ``main``.
+
+#. All tests must pass and the PR must be approved by at least another maintainer.
+
+#. Publish to PyPI by pushing a tag::
+
+ git tag X.Y.Z release-X.Y.Z
+ git push git@github.com:pytest-dev/pluggy.git X.Y.Z
+
+ The tag will trigger a new build, which will deploy to PyPI.
+
+#. Make sure it is `available on PyPI <https://pypi.org/project/pluggy>`_.
+
+#. Merge the PR into ``main``, either manually or using GitHub's web interface.
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/changelog/README.rst b/testing/web-platform/tests/tools/third_party/pluggy/changelog/README.rst
new file mode 100644
index 0000000000..47e21fb33f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/changelog/README.rst
@@ -0,0 +1,32 @@
+This directory contains "newsfragments" which are short files that contain a small **ReST**-formatted
+text that will be added to the next ``CHANGELOG``.
+
+The ``CHANGELOG`` will be read by users, so this description should be aimed to pytest users
+instead of describing internal changes which are only relevant to the developers.
+
+Make sure to use full sentences with correct case and punctuation, for example::
+
+ Fix issue with non-ascii messages from the ``warnings`` module.
+
+Each file should be named like ``<ISSUE>.<TYPE>.rst``, where
+``<ISSUE>`` is an issue number, and ``<TYPE>`` is one of:
+
+* ``feature``: new user facing features, like new command-line options and new behavior.
+* ``bugfix``: fixes a reported bug.
+* ``doc``: documentation improvement, like rewording an entire session or adding missing docs.
+* ``removal``: feature deprecation or removal.
+* ``vendor``: changes in packages vendored in pytest.
+* ``trivial``: fixing a small typo or internal change that might be noteworthy.
+
+So for example: ``123.feature.rst``, ``456.bugfix.rst``.
+
+If your PR fixes an issue, use that number here. If there is no issue,
+then after you submit the PR and get the PR number you can add a
+changelog using that instead.
+
+If you are not sure what issue type to use, don't hesitate to ask in your PR.
+
+``towncrier`` preserves multiple paragraphs and formatting (code blocks, lists, and so on), but for entries
+other than ``features`` it is usually better to stick to a single paragraph to keep it concise. You can install
+``towncrier`` and then run ``towncrier --draft``
+if you want to get a preview of how your change will look in the final release notes.
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/changelog/_template.rst b/testing/web-platform/tests/tools/third_party/pluggy/changelog/_template.rst
new file mode 100644
index 0000000000..974e5c1b2d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/changelog/_template.rst
@@ -0,0 +1,40 @@
+{% for section in sections %}
+{% set underline = "-" %}
+{% if section %}
+{{section}}
+{{ underline * section|length }}{% set underline = "~" %}
+
+{% endif %}
+{% if sections[section] %}
+{% for category, val in definitions.items() if category in sections[section] %}
+
+{{ definitions[category]['name'] }}
+{{ underline * definitions[category]['name']|length }}
+
+{% if definitions[category]['showcontent'] %}
+{% for text, values in sections[section][category]|dictsort(by='value') %}
+{% set issue_joiner = joiner(', ') %}
+- {% for value in values|sort %}{{ issue_joiner() }}`{{ value }} <https://github.com/pytest-dev/pluggy/issues/{{ value[1:] }}>`_{% endfor %}: {{ text }}
+
+
+{% endfor %}
+{% else %}
+- {{ sections[section][category]['']|sort|join(', ') }}
+
+
+{% endif %}
+{% if sections[section][category]|length == 0 %}
+
+No significant changes.
+
+
+{% else %}
+{% endif %}
+{% endfor %}
+{% else %}
+
+No significant changes.
+
+
+{% endif %}
+{% endfor %}
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/codecov.yml b/testing/web-platform/tests/tools/third_party/pluggy/codecov.yml
new file mode 100644
index 0000000000..a0a308588e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/codecov.yml
@@ -0,0 +1,7 @@
+coverage:
+ status:
+ project: true
+ patch: true
+ changes: true
+
+comment: off
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/_static/img/plug.png b/testing/web-platform/tests/tools/third_party/pluggy/docs/_static/img/plug.png
new file mode 100644
index 0000000000..3339f8a608
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/_static/img/plug.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/api_reference.rst b/testing/web-platform/tests/tools/third_party/pluggy/docs/api_reference.rst
new file mode 100644
index 0000000000..d9552d4485
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/api_reference.rst
@@ -0,0 +1,19 @@
+:orphan:
+
+Api Reference
+=============
+
+.. automodule:: pluggy
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+.. autoclass:: pluggy._callers._Result
+.. automethod:: pluggy._callers._Result.get_result
+.. automethod:: pluggy._callers._Result.force_result
+
+.. autoclass:: pluggy._hooks._HookCaller
+.. automethod:: pluggy._hooks._HookCaller.call_extra
+.. automethod:: pluggy._hooks._HookCaller.call_historic
+
+.. autoclass:: pluggy._hooks._HookRelay
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/changelog.rst b/testing/web-platform/tests/tools/third_party/pluggy/docs/changelog.rst
new file mode 100644
index 0000000000..565b0521d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/changelog.rst
@@ -0,0 +1 @@
+.. include:: ../CHANGELOG.rst
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/conf.py b/testing/web-platform/tests/tools/third_party/pluggy/docs/conf.py
new file mode 100644
index 0000000000..f8e70c88bf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/conf.py
@@ -0,0 +1,87 @@
+import sys
+
+if sys.version_info >= (3, 8):
+ from importlib import metadata
+else:
+ import importlib_metadata as metadata
+
+
+extensions = [
+ "sphinx.ext.autodoc",
+ "sphinx.ext.doctest",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.coverage",
+ "sphinx.ext.viewcode",
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+source_suffix = ".rst"
+
+# The master toctree document.
+master_doc = "index"
+
+# General information about the project.
+
+project = "pluggy"
+copyright = "2016, Holger Krekel"
+author = "Holger Krekel"
+
+release = metadata.version(project)
+# The short X.Y version.
+version = ".".join(release.split(".")[:2])
+
+
+language = None
+
+pygments_style = "sphinx"
+# html_logo = "_static/img/plug.png"
+html_theme = "alabaster"
+html_theme_options = {
+ "logo": "img/plug.png",
+ "description": "The pytest plugin system",
+ "github_user": "pytest-dev",
+ "github_repo": "pluggy",
+ "github_button": "true",
+ "github_banner": "true",
+ "github_type": "star",
+ "badge_branch": "master",
+ "page_width": "1080px",
+ "fixed_sidebar": "false",
+}
+html_sidebars = {
+ "**": ["about.html", "localtoc.html", "relations.html", "searchbox.html"]
+}
+html_static_path = ["_static"]
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [(master_doc, "pluggy", "pluggy Documentation", [author], 1)]
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (
+ master_doc,
+ "pluggy",
+ "pluggy Documentation",
+ author,
+ "pluggy",
+ "One line description of project.",
+ "Miscellaneous",
+ )
+]
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+ "python": ("https://docs.python.org/3", None),
+ "pytest": ("https://docs.pytest.org/en/latest", None),
+ "setuptools": ("https://setuptools.readthedocs.io/en/latest", None),
+ "tox": ("https://tox.readthedocs.io/en/latest", None),
+ "devpi": ("https://devpi.net/docs/devpi/devpi/stable/+doc/", None),
+}
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample-spam/eggsample_spam.py b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample-spam/eggsample_spam.py
new file mode 100644
index 0000000000..500d885d55
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample-spam/eggsample_spam.py
@@ -0,0 +1,22 @@
+import eggsample
+
+
+@eggsample.hookimpl
+def eggsample_add_ingredients(ingredients):
+ """Here the caller expects us to return a list."""
+ if "egg" in ingredients:
+ spam = ["lovely spam", "wonderous spam"]
+ else:
+ spam = ["splendiferous spam", "magnificent spam"]
+ return spam
+
+
+@eggsample.hookimpl
+def eggsample_prep_condiments(condiments):
+ """Here the caller passes a mutable object, so we mess with it directly."""
+ try:
+ del condiments["steak sauce"]
+ except KeyError:
+ pass
+ condiments["spam sauce"] = 42
+ return "Now this is what I call a condiments tray!"
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample-spam/setup.py b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample-spam/setup.py
new file mode 100644
index 0000000000..f81a8eb403
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample-spam/setup.py
@@ -0,0 +1,8 @@
+from setuptools import setup
+
+setup(
+ name="eggsample-spam",
+ install_requires="eggsample",
+ entry_points={"eggsample": ["spam = eggsample_spam"]},
+ py_modules=["eggsample_spam"],
+)
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/__init__.py b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/__init__.py
new file mode 100644
index 0000000000..4dc4b36dec
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/__init__.py
@@ -0,0 +1,4 @@
+import pluggy
+
+hookimpl = pluggy.HookimplMarker("eggsample")
+"""Marker to be imported and used in plugins (and for own implementations)"""
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/hookspecs.py b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/hookspecs.py
new file mode 100644
index 0000000000..48866b2491
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/hookspecs.py
@@ -0,0 +1,21 @@
+import pluggy
+
+hookspec = pluggy.HookspecMarker("eggsample")
+
+
+@hookspec
+def eggsample_add_ingredients(ingredients: tuple):
+ """Have a look at the ingredients and offer your own.
+
+ :param ingredients: the ingredients, don't touch them!
+ :return: a list of ingredients
+ """
+
+
+@hookspec
+def eggsample_prep_condiments(condiments: dict):
+ """Reorganize the condiments tray to your heart's content.
+
+ :param condiments: some sauces and stuff
+ :return: a witty comment about your activity
+ """
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/host.py b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/host.py
new file mode 100644
index 0000000000..ac1d33b453
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/host.py
@@ -0,0 +1,57 @@
+import itertools
+import random
+
+import pluggy
+
+from eggsample import hookspecs, lib
+
+condiments_tray = {"pickled walnuts": 13, "steak sauce": 4, "mushy peas": 2}
+
+
+def main():
+ pm = get_plugin_manager()
+ cook = EggsellentCook(pm.hook)
+ cook.add_ingredients()
+ cook.prepare_the_food()
+ cook.serve_the_food()
+
+
+def get_plugin_manager():
+ pm = pluggy.PluginManager("eggsample")
+ pm.add_hookspecs(hookspecs)
+ pm.load_setuptools_entrypoints("eggsample")
+ pm.register(lib)
+ return pm
+
+
+class EggsellentCook:
+ FAVORITE_INGREDIENTS = ("egg", "egg", "egg")
+
+ def __init__(self, hook):
+ self.hook = hook
+ self.ingredients = None
+
+ def add_ingredients(self):
+ results = self.hook.eggsample_add_ingredients(
+ ingredients=self.FAVORITE_INGREDIENTS
+ )
+ my_ingredients = list(self.FAVORITE_INGREDIENTS)
+ # Each hook returns a list - so we chain this list of lists
+ other_ingredients = list(itertools.chain(*results))
+ self.ingredients = my_ingredients + other_ingredients
+
+ def prepare_the_food(self):
+ random.shuffle(self.ingredients)
+
+ def serve_the_food(self):
+ condiment_comments = self.hook.eggsample_prep_condiments(
+ condiments=condiments_tray
+ )
+ print(f"Your food. Enjoy some {', '.join(self.ingredients)}")
+ print(f"Some condiments? We have {', '.join(condiments_tray.keys())}")
+ if any(condiment_comments):
+ print("\n".join(condiment_comments))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/lib.py b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/lib.py
new file mode 100644
index 0000000000..62cea7458e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/eggsample/lib.py
@@ -0,0 +1,14 @@
+import eggsample
+
+
+@eggsample.hookimpl
+def eggsample_add_ingredients():
+ spices = ["salt", "pepper"]
+ you_can_never_have_enough_eggs = ["egg", "egg"]
+ ingredients = spices + you_can_never_have_enough_eggs
+ return ingredients
+
+
+@eggsample.hookimpl
+def eggsample_prep_condiments(condiments):
+ condiments["mint sauce"] = 1
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/setup.py b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/setup.py
new file mode 100644
index 0000000000..8b3facb3b6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/eggsample/setup.py
@@ -0,0 +1,8 @@
+from setuptools import setup, find_packages
+
+setup(
+ name="eggsample",
+ install_requires="pluggy>=0.3,<1.0",
+ entry_points={"console_scripts": ["eggsample=eggsample.host:main"]},
+ packages=find_packages(),
+)
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/toy-example.py b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/toy-example.py
new file mode 100644
index 0000000000..6d2086f9ba
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/examples/toy-example.py
@@ -0,0 +1,41 @@
+import pluggy
+
+hookspec = pluggy.HookspecMarker("myproject")
+hookimpl = pluggy.HookimplMarker("myproject")
+
+
+class MySpec:
+ """A hook specification namespace."""
+
+ @hookspec
+ def myhook(self, arg1, arg2):
+ """My special little hook that you can customize."""
+
+
+class Plugin_1:
+ """A hook implementation namespace."""
+
+ @hookimpl
+ def myhook(self, arg1, arg2):
+ print("inside Plugin_1.myhook()")
+ return arg1 + arg2
+
+
+class Plugin_2:
+ """A 2nd hook implementation namespace."""
+
+ @hookimpl
+ def myhook(self, arg1, arg2):
+ print("inside Plugin_2.myhook()")
+ return arg1 - arg2
+
+
+# create a manager and add the spec
+pm = pluggy.PluginManager("myproject")
+pm.add_hookspecs(MySpec)
+# register plugins
+pm.register(Plugin_1())
+pm.register(Plugin_2())
+# call our `myhook` hook
+results = pm.hook.myhook(arg1=1, arg2=2)
+print(results)
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/docs/index.rst b/testing/web-platform/tests/tools/third_party/pluggy/docs/index.rst
new file mode 100644
index 0000000000..eab08fcbbd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/docs/index.rst
@@ -0,0 +1,957 @@
+``pluggy``
+==========
+**The pytest plugin system**
+
+What is it?
+***********
+``pluggy`` is the crystallized core of :ref:`plugin management and hook
+calling <pytest:writing-plugins>` for :std:doc:`pytest <pytest:index>`.
+It enables `500+ plugins`_ to extend and customize ``pytest``'s default
+behaviour. Even ``pytest`` itself is composed as a set of ``pluggy`` plugins
+which are invoked in sequence according to a well defined set of protocols.
+
+It gives users the ability to extend or modify the behaviour of a
+``host program`` by installing a ``plugin`` for that program.
+The plugin code will run as part of normal program execution, changing or
+enhancing certain aspects of it.
+
+In essence, ``pluggy`` enables function `hooking`_ so you can build
+"pluggable" systems.
+
+Why is it useful?
+*****************
+There are some established mechanisms for modifying the behavior of other
+programs/libraries in Python like
+`method overriding <https://en.wikipedia.org/wiki/Method_overriding>`_
+(e.g. Jinja2) or
+`monkey patching <https://en.wikipedia.org/wiki/Monkey_patch>`_ (e.g. gevent
+or for :std:doc:`writing tests <pytest:how-to/monkeypatch>`).
+These strategies become problematic though when several parties want to
+participate in the modification of the same program. Therefore ``pluggy``
+does not rely on these mechanisms to enable a more structured approach and
+avoid unnecessary exposure of state and behaviour. This leads to a more
+`loosely coupled <https://en.wikipedia.org/wiki/Loose_coupling>`_ relationship
+between ``host`` and ``plugins``.
+
+The ``pluggy`` approach puts the burden on the designer of the
+``host program`` to think carefully about which objects are really
+needed in a hook implementation. This gives ``plugin`` creators a clear
+framework for how to extend the ``host`` via a well defined set of functions
+and objects to work with.
+
+How does it work?
+*****************
+Let us start with a short overview of what is involved:
+
+* ``host`` or ``host program``: the program offering extensibility
+ by specifying ``hook functions`` and invoking their implementation(s) as
+ part of program execution
+* ``plugin``: the program implementing (a subset of) the specified hooks and
+ participating in program execution when the implementations are invoked
+ by the ``host``
+* ``pluggy``: connects ``host`` and ``plugins`` by using ...
+
+ - the hook :ref:`specifications <specs>` defining call signatures
+ provided by the ``host`` (a.k.a ``hookspecs`` - see :ref:`marking_hooks`)
+ - the hook :ref:`implementations <impls>` provided by registered
+ ``plugins`` (a.k.a ``hookimpl`` - see `callbacks`_)
+ - the hook :ref:`caller <calling>` - a call loop triggered at appropriate
+ program positions in the ``host`` invoking the implementations and
+ collecting the results
+
+ ... where for each registered hook *specification*, a hook *call* will
+ invoke up to ``N`` registered hook *implementations*.
+* ``user``: the person who installed the ``host program`` and wants to
+ extend its functionality with ``plugins``. In the simplest case they install
+ the ``plugin`` in the same environment as the ``host`` and the magic will
+ happen when the ``host program`` is run the next time. Depending on
+ the ``plugin``, there might be other things they need to do. For example,
+ they might have to call the host with an additional commandline parameter
+ to the host that the ``plugin`` added.
+
+A toy example
+-------------
+Let us demonstrate the core functionality in one module and show how you can
+start experimenting with pluggy functionality.
+
+.. literalinclude:: examples/toy-example.py
+
+Running this directly gets us::
+
+ $ python docs/examples/toy-example.py
+
+ inside Plugin_2.myhook()
+ inside Plugin_1.myhook()
+ [-1, 3]
+
+A complete example
+------------------
+Now let us demonstrate how this plays together in a vaguely real world scenario.
+
+Let's assume our ``host program`` is called **eggsample** where some eggs will
+be prepared and served with a tray containing condiments. As everybody knows:
+the more cooks are involved the better the food, so let us make the process
+pluggable and write a plugin that improves the meal with some spam and replaces
+the steak sauce (nobody likes that anyway) with spam sauce (it's a thing - trust me).
+
+.. note::
+
+ **naming markers**: ``HookSpecMarker`` and ``HookImplMarker`` must be
+ initialized with the name of the ``host`` project (the ``name``
+ parameter in ``setup()``) - so **eggsample** in our case.
+
+ **naming plugin projects**: they should be named in the form of
+ ``<host>-<plugin>`` (e.g. ``pytest-xdist``), therefore we call our
+ plugin *eggsample-spam*.
+
+The host
+^^^^^^^^
+``eggsample/eggsample/__init__.py``
+
+.. literalinclude:: examples/eggsample/eggsample/__init__.py
+
+``eggsample/eggsample/hookspecs.py``
+
+.. literalinclude:: examples/eggsample/eggsample/hookspecs.py
+
+``eggsample/eggsample/lib.py``
+
+.. literalinclude:: examples/eggsample/eggsample/lib.py
+
+``eggsample/eggsample/host.py``
+
+.. literalinclude:: examples/eggsample/eggsample/host.py
+
+``eggsample/setup.py``
+
+.. literalinclude:: examples/eggsample/setup.py
+
+Let's get cooking - we install the host and see what a program run looks like::
+
+ $ pip install --editable pluggy/docs/examples/eggsample
+ $ eggsample
+
+ Your food. Enjoy some egg, egg, salt, egg, egg, pepper, egg
+ Some condiments? We have pickled walnuts, steak sauce, mushy peas, mint sauce
+
+The plugin
+^^^^^^^^^^
+``eggsample-spam/eggsample_spam.py``
+
+.. literalinclude:: examples/eggsample-spam/eggsample_spam.py
+
+``eggsample-spam/setup.py``
+
+.. literalinclude:: examples/eggsample-spam/setup.py
+
+Let's get cooking with more cooks - we install the plugin and and see what
+we get::
+
+ $ pip install --editable pluggy/docs/examples/eggsample-spam
+ $ eggsample
+
+ Your food. Enjoy some egg, lovely spam, salt, egg, egg, egg, wonderous spam, egg, pepper
+ Some condiments? We have pickled walnuts, mushy peas, mint sauce, spam sauce
+ Now this is what I call a condiments tray!
+
+More real world examples
+------------------------
+To see how ``pluggy`` is used in the real world, have a look at these projects
+documentation and source code:
+
+* :ref:`pytest <pytest:writing-plugins>`
+* :std:doc:`tox <tox:plugins>`
+* :std:doc:`devpi <devpi:devguide/index>`
+
+For more details and advanced usage please read on.
+
+.. _define:
+
+Define and collect hooks
+************************
+A *plugin* is a :ref:`namespace <python:tut-scopes>` type (currently one of a
+``class`` or module) which defines a set of *hook* functions.
+
+As mentioned in :ref:`manage`, all *plugins* which specify *hooks*
+are managed by an instance of a :py:class:`pluggy.PluginManager` which
+defines the primary ``pluggy`` API.
+
+In order for a :py:class:`~pluggy.PluginManager` to detect functions in a namespace
+intended to be *hooks*, they must be decorated using special ``pluggy`` *marks*.
+
+.. _marking_hooks:
+
+Marking hooks
+-------------
+The :py:class:`~pluggy.HookspecMarker` and :py:class:`~pluggy.HookimplMarker`
+decorators are used to *mark* functions for detection by a
+:py:class:`~pluggy.PluginManager`:
+
+.. code-block:: python
+
+ from pluggy import HookspecMarker, HookimplMarker
+
+ hookspec = HookspecMarker("project_name")
+ hookimpl = HookimplMarker("project_name")
+
+
+Each decorator type takes a single ``project_name`` string as its
+lone argument the value of which is used to mark hooks for detection by
+a similarly configured :py:class:`~pluggy.PluginManager` instance.
+
+That is, a *mark* type called with ``project_name`` returns an object which
+can be used to decorate functions which will then be detected by a
+:py:class:`~pluggy.PluginManager` which was instantiated with the same
+``project_name`` value.
+
+Furthermore, each *hookimpl* or *hookspec* decorator can configure the
+underlying call-time behavior of each *hook* object by providing special
+*options* passed as keyword arguments.
+
+
+.. note::
+ The following sections correspond to similar documentation in
+ ``pytest`` for :ref:`pytest:writinghooks` and can be used as
+ a supplementary resource.
+
+.. _impls:
+
+Implementations
+---------------
+A hook *implementation* (*hookimpl*) is just a (callback) function
+which has been appropriately marked.
+
+*hookimpls* are loaded from a plugin using the
+:py:meth:`~pluggy.PluginManager.register()` method:
+
+.. code-block:: python
+
+ import sys
+ from pluggy import PluginManager, HookimplMarker
+
+ hookimpl = HookimplMarker("myproject")
+
+
+ @hookimpl
+ def setup_project(config, args):
+ """This hook is used to process the initial config
+ and possibly input arguments.
+ """
+ if args:
+ config.process_args(args)
+
+ return config
+
+
+ pm = PluginManager("myproject")
+
+ # load all hookimpls from the local module's namespace
+ plugin_name = pm.register(sys.modules[__name__])
+
+.. _optionalhook:
+
+Optional validation
+^^^^^^^^^^^^^^^^^^^
+Normally each *hookimpl* should be validated against a corresponding
+hook :ref:`specification <specs>`. If you want to make an exception
+then the *hookimpl* should be marked with the ``"optionalhook"`` option:
+
+.. code-block:: python
+
+ @hookimpl(optionalhook=True)
+ def setup_project(config, args):
+ """This hook is used to process the initial config
+ and possibly input arguments.
+ """
+ if args:
+ config.process_args(args)
+
+ return config
+
+.. _specname:
+
+Hookspec name matching
+^^^^^^^^^^^^^^^^^^^^^^
+
+During plugin :ref:`registration <registration>`, pluggy attempts to match each
+hook implementation declared by the *plugin* to a hook
+:ref:`specification <specs>` in the *host* program with the **same name** as
+the function being decorated by ``@hookimpl`` (e.g. ``setup_project`` in the
+example above). Note: there is *no* strict requirement that each *hookimpl*
+has a corresponding *hookspec* (see
+:ref:`enforcing spec validation <enforcing>`).
+
+*new in version 0.13.2:*
+
+To override the default behavior, a *hookimpl* may also be matched to a
+*hookspec* in the *host* program with a non-matching function name by using
+the ``specname`` option. Continuing the example above, the *hookimpl* function
+does not need to be named ``setup_project``, but if the argument
+``specname="setup_project"`` is provided to the ``hookimpl`` decorator, it will
+be matched and checked against the ``setup_project`` hookspec:
+
+.. code-block:: python
+
+ @hookimpl(specname="setup_project")
+ def any_plugin_function(config, args):
+ """This hook is used to process the initial config
+ and possibly input arguments.
+ """
+ if args:
+ config.process_args(args)
+
+ return config
+
+Call time order
+^^^^^^^^^^^^^^^
+By default hooks are :ref:`called <calling>` in LIFO registered order, however,
+a *hookimpl* can influence its call-time invocation position using special
+attributes. If marked with a ``"tryfirst"`` or ``"trylast"`` option it
+will be executed *first* or *last* respectively in the hook call loop:
+
+.. code-block:: python
+
+ import sys
+ from pluggy import PluginManager, HookimplMarker
+
+ hookimpl = HookimplMarker("myproject")
+
+
+ @hookimpl(trylast=True)
+ def setup_project(config, args):
+ """Default implementation."""
+ if args:
+ config.process_args(args)
+
+ return config
+
+
+ class SomeOtherPlugin:
+ """Some other plugin defining the same hook."""
+
+ @hookimpl(tryfirst=True)
+ def setup_project(self, config, args):
+ """Report what args were passed before calling
+ downstream hooks.
+ """
+ if args:
+ print("Got args: {}".format(args))
+
+ return config
+
+
+ pm = PluginManager("myproject")
+
+ # load from the local module's namespace
+ pm.register(sys.modules[__name__])
+ # load a plugin defined on a class
+ pm.register(SomeOtherPlugin())
+
+For another example see the :ref:`pytest:plugin-hookorder` section of the
+``pytest`` docs.
+
+.. note::
+ ``tryfirst`` and ``trylast`` hooks are still invoked in LIFO order within
+ each category.
+
+
+.. _hookwrappers:
+
+Wrappers
+^^^^^^^^
+A *hookimpl* can be marked with a ``"hookwrapper"`` option which indicates that
+the function will be called to *wrap* (or surround) all other normal *hookimpl*
+calls. A *hookwrapper* can thus execute some code ahead and after the execution
+of all corresponding non-wrappper *hookimpls*.
+
+Much in the same way as a :py:func:`@contextlib.contextmanager <python:contextlib.contextmanager>`, *hookwrappers* must
+be implemented as generator function with a single ``yield`` in its body:
+
+
+.. code-block:: python
+
+ @hookimpl(hookwrapper=True)
+ def setup_project(config, args):
+ """Wrap calls to ``setup_project()`` implementations which
+ should return json encoded config options.
+ """
+ if config.debug:
+ print("Pre-hook config is {}".format(config.tojson()))
+
+ # get initial default config
+ defaults = config.tojson()
+
+ # all corresponding hookimpls are invoked here
+ outcome = yield
+
+ for item in outcome.get_result():
+ print("JSON config override is {}".format(item))
+
+ if config.debug:
+ print("Post-hook config is {}".format(config.tojson()))
+
+ if config.use_defaults:
+ outcome.force_result(defaults)
+
+The generator is :py:meth:`sent <python:generator.send>` a :py:class:`pluggy._callers._Result` object which can
+be assigned in the ``yield`` expression and used to override or inspect
+the final result(s) returned back to the caller using the
+:py:meth:`~pluggy._callers._Result.force_result` or
+:py:meth:`~pluggy._callers._Result.get_result` methods.
+
+.. note::
+ Hook wrappers can **not** return results (as per generator function
+ semantics); they can only modify them using the ``_Result`` API.
+
+Also see the :ref:`pytest:hookwrapper` section in the ``pytest`` docs.
+
+.. _specs:
+
+Specifications
+--------------
+A hook *specification* (*hookspec*) is a definition used to validate each
+*hookimpl* ensuring that an extension writer has correctly defined their
+callback function *implementation* .
+
+*hookspecs* are defined using similarly marked functions however only the
+function *signature* (its name and names of all its arguments) is analyzed
+and stored. As such, often you will see a *hookspec* defined with only
+a docstring in its body.
+
+*hookspecs* are loaded using the
+:py:meth:`~pluggy.PluginManager.add_hookspecs()` method and normally
+should be added before registering corresponding *hookimpls*:
+
+.. code-block:: python
+
+ import sys
+ from pluggy import PluginManager, HookspecMarker
+
+ hookspec = HookspecMarker("myproject")
+
+
+ @hookspec
+ def setup_project(config, args):
+ """This hook is used to process the initial config and input
+ arguments.
+ """
+
+
+ pm = PluginManager("myproject")
+
+ # load from the local module's namespace
+ pm.add_hookspecs(sys.modules[__name__])
+
+
+Registering a *hookimpl* which does not meet the constraints of its
+corresponding *hookspec* will result in an error.
+
+A *hookspec* can also be added **after** some *hookimpls* have been
+registered however this is not normally recommended as it results in
+delayed hook validation.
+
+.. note::
+ The term *hookspec* can sometimes refer to the plugin-namespace
+ which defines ``hookspec`` decorated functions as in the case of
+ ``pytest``'s `hookspec module`_
+
+.. _enforcing:
+
+Enforcing spec validation
+^^^^^^^^^^^^^^^^^^^^^^^^^
+By default there is no strict requirement that each *hookimpl* has
+a corresponding *hookspec*. However, if you'd like you enforce this
+behavior you can run a check with the
+:py:meth:`~pluggy.PluginManager.check_pending()` method. If you'd like
+to enforce requisite *hookspecs* but with certain exceptions for some hooks
+then make sure to mark those hooks as :ref:`optional <optionalhook>`.
+
+Opt-in arguments
+^^^^^^^^^^^^^^^^
+To allow for *hookspecs* to evolve over the lifetime of a project,
+*hookimpls* can accept **less** arguments then defined in the spec.
+This allows for extending hook arguments (and thus semantics) without
+breaking existing *hookimpls*.
+
+In other words this is ok:
+
+.. code-block:: python
+
+ @hookspec
+ def myhook(config, args):
+ pass
+
+
+ @hookimpl
+ def myhook(args):
+ print(args)
+
+
+whereas this is not:
+
+.. code-block:: python
+
+ @hookspec
+ def myhook(config, args):
+ pass
+
+
+ @hookimpl
+ def myhook(config, args, extra_arg):
+ print(args)
+
+.. note::
+ The one exception to this rule (that a *hookspec* must have as least as
+ many arguments as its *hookimpls*) is the conventional :ref:`self <python:tut-remarks>` arg; this
+ is always ignored when *hookimpls* are defined as :ref:`methods <python:tut-methodobjects>`.
+
+.. _firstresult:
+
+First result only
+^^^^^^^^^^^^^^^^^
+A *hookspec* can be marked such that when the *hook* is called the call loop
+will only invoke up to the first *hookimpl* which returns a result other
+then ``None``.
+
+.. code-block:: python
+
+ @hookspec(firstresult=True)
+ def myhook(config, args):
+ pass
+
+This can be useful for optimizing a call loop for which you are only
+interested in a single core *hookimpl*. An example is the
+:func:`~_pytest.hookspec.pytest_cmdline_main` central routine of ``pytest``.
+Note that all ``hookwrappers`` are still invoked with the first result.
+
+Also see the :ref:`pytest:firstresult` section in the ``pytest`` docs.
+
+.. _historic:
+
+Historic hooks
+^^^^^^^^^^^^^^
+You can mark a *hookspec* as being *historic* meaning that the hook
+can be called with :py:meth:`~pluggy._hooks._HookCaller.call_historic()` **before**
+having been registered:
+
+.. code-block:: python
+
+ @hookspec(historic=True)
+ def myhook(config, args):
+ pass
+
+The implication is that late registered *hookimpls* will be called back
+immediately at register time and **can not** return a result to the caller.
+
+This turns out to be particularly useful when dealing with lazy or
+dynamically loaded plugins.
+
+For more info see :ref:`call_historic`.
+
+
+Warnings on hook implementation
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As projects evolve new hooks may be introduced and/or deprecated.
+
+if a hookspec specifies a ``warn_on_impl``, pluggy will trigger it for any plugin implementing the hook.
+
+
+.. code-block:: python
+
+ @hookspec(
+ warn_on_impl=DeprecationWarning("oldhook is deprecated and will be removed soon")
+ )
+ def oldhook():
+ pass
+
+.. _manage:
+
+The Plugin registry
+*******************
+``pluggy`` manages plugins using instances of the
+:py:class:`pluggy.PluginManager`.
+
+A :py:class:`~pluggy.PluginManager` is instantiated with a single
+``str`` argument, the ``project_name``:
+
+.. code-block:: python
+
+ import pluggy
+
+ pm = pluggy.PluginManager("my_project_name")
+
+
+The ``project_name`` value is used when a :py:class:`~pluggy.PluginManager`
+scans for *hook* functions :ref:`defined on a plugin <define>`.
+This allows for multiple plugin managers from multiple projects
+to define hooks alongside each other.
+
+.. _registration:
+
+Registration
+------------
+Each :py:class:`~pluggy.PluginManager` maintains a *plugin* registry where each *plugin*
+contains a set of *hookimpl* definitions. Loading *hookimpl* and *hookspec*
+definitions to populate the registry is described in detail in the section on
+:ref:`define`.
+
+In summary, you pass a plugin namespace object to the
+:py:meth:`~pluggy.PluginManager.register()` and
+:py:meth:`~pluggy.PluginManager.add_hookspecs()` methods to collect
+hook *implementations* and *specifications* from *plugin* namespaces respectively.
+
+You can unregister any *plugin*'s hooks using
+:py:meth:`~pluggy.PluginManager.unregister()` and check if a plugin is
+registered by passing its name to the
+:py:meth:`~pluggy.PluginManager.is_registered()` method.
+
+Loading ``setuptools`` entry points
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+You can automatically load plugins registered through
+:ref:`setuptools entry points <setuptools:entry_points>`
+with the :py:meth:`~pluggy.PluginManager.load_setuptools_entrypoints()`
+method.
+
+An example use of this is the :ref:`pytest entry point <pytest:pip-installable plugins>`.
+
+
+Blocking
+--------
+You can block any plugin from being registered using
+:py:meth:`~pluggy.PluginManager.set_blocked()` and check if a given
+*plugin* is blocked by name using :py:meth:`~pluggy.PluginManager.is_blocked()`.
+
+
+Inspection
+----------
+You can use a variety of methods to inspect both the registry
+and particular plugins in it:
+
+- :py:meth:`~pluggy.PluginManager.list_name_plugin()` -
+ return a list of name-plugin pairs
+- :py:meth:`~pluggy.PluginManager.get_plugins()` - retrieve all plugins
+- :py:meth:`~pluggy.PluginManager.get_canonical_name()`- get a *plugin*'s
+ canonical name (the name it was registered with)
+- :py:meth:`~pluggy.PluginManager.get_plugin()` - retrieve a plugin by its
+ canonical name
+
+
+Parsing mark options
+^^^^^^^^^^^^^^^^^^^^
+You can retrieve the *options* applied to a particular
+*hookspec* or *hookimpl* as per :ref:`marking_hooks` using the
+:py:meth:`~pluggy.PluginManager.parse_hookspec_opts()` and
+:py:meth:`~pluggy.PluginManager.parse_hookimpl_opts()` respectively.
+
+
+.. _calling:
+
+Calling hooks
+*************
+The core functionality of ``pluggy`` enables an extension provider
+to override function calls made at certain points throughout a program.
+
+A particular *hook* is invoked by calling an instance of
+a :py:class:`pluggy._hooks._HookCaller` which in turn *loops* through the
+``1:N`` registered *hookimpls* and calls them in sequence.
+
+Every :py:class:`~pluggy.PluginManager` has a ``hook`` attribute
+which is an instance of this :py:class:`pluggy._hooks._HookRelay`.
+The :py:class:`~pluggy._hooks._HookRelay` itself contains references
+(by hook name) to each registered *hookimpl*'s :py:class:`~pluggy._hooks._HookCaller` instance.
+
+More practically you call a *hook* like so:
+
+.. code-block:: python
+
+ import sys
+ import pluggy
+ import mypluginspec
+ import myplugin
+ from configuration import config
+
+ pm = pluggy.PluginManager("myproject")
+ pm.add_hookspecs(mypluginspec)
+ pm.register(myplugin)
+
+ # we invoke the _HookCaller and thus all underlying hookimpls
+ result_list = pm.hook.myhook(config=config, args=sys.argv)
+
+Note that you **must** call hooks using keyword :std:term:`python:argument` syntax!
+
+Hook implementations are called in LIFO registered order: *the last
+registered plugin's hooks are called first*. As an example, the below
+assertion should not error:
+
+.. code-block:: python
+
+ from pluggy import PluginManager, HookimplMarker
+
+ hookimpl = HookimplMarker("myproject")
+
+
+ class Plugin1:
+ @hookimpl
+ def myhook(self, args):
+ """Default implementation."""
+ return 1
+
+
+ class Plugin2:
+ @hookimpl
+ def myhook(self, args):
+ """Default implementation."""
+ return 2
+
+
+ class Plugin3:
+ @hookimpl
+ def myhook(self, args):
+ """Default implementation."""
+ return 3
+
+
+ pm = PluginManager("myproject")
+ pm.register(Plugin1())
+ pm.register(Plugin2())
+ pm.register(Plugin3())
+
+ assert pm.hook.myhook(args=()) == [3, 2, 1]
+
+Collecting results
+------------------
+By default calling a hook results in all underlying :ref:`hookimpls
+<impls>` functions to be invoked in sequence via a loop. Any function
+which returns a value other then a ``None`` result will have that result
+appended to a :py:class:`list` which is returned by the call.
+
+The only exception to this behaviour is if the hook has been marked to return
+its :ref:`first result only <firstresult>` in which case only the first
+single value (which is not ``None``) will be returned.
+
+.. _call_historic:
+
+Exception handling
+------------------
+If any *hookimpl* errors with an exception no further callbacks
+are invoked and the exception is packaged up and delivered to
+any :ref:`wrappers <hookwrappers>` before being re-raised at the
+hook invocation point:
+
+.. code-block:: python
+
+ from pluggy import PluginManager, HookimplMarker
+
+ hookimpl = HookimplMarker("myproject")
+
+
+ class Plugin1:
+ @hookimpl
+ def myhook(self, args):
+ return 1
+
+
+ class Plugin2:
+ @hookimpl
+ def myhook(self, args):
+ raise RuntimeError
+
+
+ class Plugin3:
+ @hookimpl
+ def myhook(self, args):
+ return 3
+
+
+ @hookimpl(hookwrapper=True)
+ def myhook(self, args):
+ outcome = yield
+
+ try:
+ outcome.get_result()
+ except RuntimeError:
+ # log the error details
+ print(outcome.excinfo)
+
+
+ pm = PluginManager("myproject")
+
+ # register plugins
+ pm.register(Plugin1())
+ pm.register(Plugin2())
+ pm.register(Plugin3())
+
+ # register wrapper
+ pm.register(sys.modules[__name__])
+
+ # this raises RuntimeError due to Plugin2
+ pm.hook.myhook(args=())
+
+Historic calls
+--------------
+A *historic call* allows for all newly registered functions to receive all hook
+calls that happened before their registration. The implication is that this is
+only useful if you expect that some *hookimpls* may be registered **after** the
+hook is initially invoked.
+
+Historic hooks must be :ref:`specially marked <historic>` and called
+using the :py:meth:`~pluggy._hooks._HookCaller.call_historic()` method:
+
+.. code-block:: python
+
+ def callback(result):
+ print("historic call result is {result}".format(result=result))
+
+
+ # call with history; no results returned
+ pm.hook.myhook.call_historic(
+ kwargs={"config": config, "args": sys.argv}, result_callback=callback
+ )
+
+ # ... more of our program ...
+
+ # late loading of some plugin
+ import mylateplugin
+
+ # historic callback is invoked here
+ pm.register(mylateplugin)
+
+Note that if you :py:meth:`~pluggy._hooks._HookCaller.call_historic()`
+the :py:class:`~pluggy._hooks._HookCaller` (and thus your calling code)
+can not receive results back from the underlying *hookimpl* functions.
+Instead you can provide a *callback* for processing results (like the
+``callback`` function above) which will be called as each new plugin
+is registered.
+
+.. note::
+ *historic* calls are incompatible with :ref:`firstresult` marked
+ hooks since only the first registered plugin's hook(s) would
+ ever be called.
+
+Calling with extras
+-------------------
+You can call a hook with temporarily participating *implementation* functions
+(that aren't in the registry) using the
+:py:meth:`pluggy._hooks._HookCaller.call_extra()` method.
+
+
+Calling with a subset of registered plugins
+-------------------------------------------
+You can make a call using a subset of plugins by asking the
+:py:class:`~pluggy.PluginManager` first for a
+:py:class:`~pluggy._hooks._HookCaller` with those plugins removed
+using the :py:meth:`pluggy.PluginManager.subset_hook_caller()` method.
+
+You then can use that :py:class:`_HookCaller <pluggy._hooks._HookCaller>`
+to make normal, :py:meth:`~pluggy._hooks._HookCaller.call_historic`, or
+:py:meth:`~pluggy._hooks._HookCaller.call_extra` calls as necessary.
+
+Built-in tracing
+****************
+``pluggy`` comes with some batteries included hook tracing for your
+debugging needs.
+
+
+Call tracing
+------------
+To enable tracing use the
+:py:meth:`pluggy.PluginManager.enable_tracing()` method which returns an
+undo function to disable the behaviour.
+
+.. code-block:: python
+
+ pm = PluginManager("myproject")
+ # magic line to set a writer function
+ pm.trace.root.setwriter(print)
+ undo = pm.enable_tracing()
+
+
+Call monitoring
+---------------
+Instead of using the built-in tracing mechanism you can also add your
+own ``before`` and ``after`` monitoring functions using
+:py:class:`pluggy.PluginManager.add_hookcall_monitoring()`.
+
+The expected signature and default implementations for these functions is:
+
+.. code-block:: python
+
+ def before(hook_name, methods, kwargs):
+ pass
+
+
+ def after(outcome, hook_name, methods, kwargs):
+ pass
+
+Public API
+**********
+Please see the :doc:`api_reference`.
+
+Development
+***********
+Great care must taken when hacking on ``pluggy`` since multiple mature
+projects rely on it. Our Github integrated CI process runs the full
+`tox test suite`_ on each commit so be sure your changes can run on
+all required `Python interpreters`_ and ``pytest`` versions.
+
+For development, we suggest to create a virtual environment and install ``pluggy`` in
+editable mode and ``dev`` dependencies::
+
+ $ python3 -m venv .env
+ $ source .env/bin/activate
+ $ pip install -e .[dev]
+
+To make sure you follow the code style used in the project, install pre-commit_ which
+will run style checks before each commit::
+
+ $ pre-commit install
+
+
+Release Policy
+**************
+Pluggy uses `Semantic Versioning`_. Breaking changes are only foreseen for
+Major releases (incremented X in "X.Y.Z"). If you want to use ``pluggy``
+in your project you should thus use a dependency restriction like
+``"pluggy>=0.1.0,<1.0"`` to avoid surprises.
+
+
+Table of contents
+*****************
+
+.. toctree::
+ :maxdepth: 2
+
+ api_reference
+ changelog
+
+
+
+.. hyperlinks
+.. _hookspec module:
+ https://docs.pytest.org/en/latest/_modules/_pytest/hookspec.html
+.. _request-response pattern:
+ https://en.wikipedia.org/wiki/Request%E2%80%93response
+.. _publish-subscribe:
+ https://en.wikipedia.org/wiki/Publish%E2%80%93subscribe_pattern
+.. _hooking:
+ https://en.wikipedia.org/wiki/Hooking
+.. _callbacks:
+ https://en.wikipedia.org/wiki/Callback_(computer_programming)
+.. _tox test suite:
+ https://github.com/pytest-dev/pluggy/blob/master/tox.ini
+.. _Semantic Versioning:
+ https://semver.org/
+.. _Python interpreters:
+ https://github.com/pytest-dev/pluggy/blob/master/tox.ini#L2
+.. _500+ plugins:
+ http://plugincompat.herokuapp.com/
+.. _pre-commit:
+ https://pre-commit.com/
+
+
+.. Indices and tables
+.. ==================
+.. * :ref:`genindex`
+.. * :ref:`modindex`
+.. * :ref:`search`
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/pyproject.toml b/testing/web-platform/tests/tools/third_party/pluggy/pyproject.toml
new file mode 100644
index 0000000000..15eba26898
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/pyproject.toml
@@ -0,0 +1,47 @@
+[build-system]
+requires = [
+ "setuptools",
+ "setuptools-scm",
+ "wheel",
+]
+
+[tool.setuptools_scm]
+write_to = "src/pluggy/_version.py"
+
+[tool.towncrier]
+package = "pluggy"
+package_dir = "src/pluggy"
+filename = "CHANGELOG.rst"
+directory = "changelog/"
+title_format = "pluggy {version} ({project_date})"
+template = "changelog/_template.rst"
+
+ [[tool.towncrier.type]]
+ directory = "removal"
+ name = "Deprecations and Removals"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "feature"
+ name = "Features"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "bugfix"
+ name = "Bug Fixes"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "vendor"
+ name = "Vendored Libraries"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "doc"
+ name = "Improved Documentation"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "trivial"
+ name = "Trivial/Internal Changes"
+ showcontent = true
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/scripts/release.py b/testing/web-platform/tests/tools/third_party/pluggy/scripts/release.py
new file mode 100644
index 0000000000..e09b8c77b1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/scripts/release.py
@@ -0,0 +1,69 @@
+"""
+Release script.
+"""
+import argparse
+import sys
+from subprocess import check_call
+
+from colorama import init, Fore
+from git import Repo, Remote
+
+
+def create_branch(version):
+ """Create a fresh branch from upstream/main"""
+ repo = Repo.init(".")
+ if repo.is_dirty(untracked_files=True):
+ raise RuntimeError("Repository is dirty, please commit/stash your changes.")
+
+ branch_name = f"release-{version}"
+ print(f"{Fore.CYAN}Create {branch_name} branch from upstream main")
+ upstream = get_upstream(repo)
+ upstream.fetch()
+ release_branch = repo.create_head(branch_name, upstream.refs.main, force=True)
+ release_branch.checkout()
+ return repo
+
+
+def get_upstream(repo: Repo) -> Remote:
+ """Find upstream repository for pluggy on the remotes"""
+ for remote in repo.remotes:
+ for url in remote.urls:
+ if url.endswith(("pytest-dev/pluggy.git", "pytest-dev/pluggy")):
+ return remote
+ raise RuntimeError("could not find pytest-dev/pluggy remote")
+
+
+def pre_release(version):
+ """Generates new docs, release announcements and creates a local tag."""
+ create_branch(version)
+ changelog(version, write_out=True)
+
+ check_call(["git", "commit", "-a", "-m", f"Preparing release {version}"])
+
+ print()
+ print(f"{Fore.GREEN}Please push your branch to your fork and open a PR.")
+
+
+def changelog(version, write_out=False):
+ if write_out:
+ addopts = []
+ else:
+ addopts = ["--draft"]
+ print(f"{Fore.CYAN}Generating CHANGELOG")
+ check_call(["towncrier", "--yes", "--version", version] + addopts)
+
+
+def main():
+ init(autoreset=True)
+ parser = argparse.ArgumentParser()
+ parser.add_argument("version", help="Release version")
+ options = parser.parse_args()
+ try:
+ pre_release(options.version)
+ except RuntimeError as e:
+ print(f"{Fore.RED}ERROR: {e}")
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/scripts/upload-coverage.sh b/testing/web-platform/tests/tools/third_party/pluggy/scripts/upload-coverage.sh
new file mode 100755
index 0000000000..ad3dd48281
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/scripts/upload-coverage.sh
@@ -0,0 +1,16 @@
+#!/usr/bin/env bash
+
+set -e
+set -x
+
+if [ -z "$TOXENV" ]; then
+ python -m pip install coverage
+else
+ # Add last TOXENV to $PATH.
+ PATH="$PWD/.tox/${TOXENV##*,}/bin:$PATH"
+fi
+
+python -m coverage xml
+# Set --connect-timeout to work around https://github.com/curl/curl/issues/4461
+curl -S -L --connect-timeout 5 --retry 6 -s https://codecov.io/bash -o codecov-upload.sh
+bash codecov-upload.sh -Z -X fix -f coverage.xml "$@"
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/setup.cfg b/testing/web-platform/tests/tools/third_party/pluggy/setup.cfg
new file mode 100644
index 0000000000..7040bcb83b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/setup.cfg
@@ -0,0 +1,52 @@
+[bdist_wheel]
+universal=1
+
+[metadata]
+name = pluggy
+description = plugin and hook calling mechanisms for python
+long_description = file: README.rst
+long_description_content_type = text/x-rst
+license = MIT
+platforms = unix, linux, osx, win32
+author = Holger Krekel
+author_email = holger@merlinux.eu
+url = https://github.com/pytest-dev/pluggy
+classifiers =
+ Development Status :: 6 - Mature
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Operating System :: POSIX
+ Operating System :: Microsoft :: Windows
+ Operating System :: MacOS :: MacOS X
+ Topic :: Software Development :: Testing
+ Topic :: Software Development :: Libraries
+ Topic :: Utilities
+ Programming Language :: Python :: Implementation :: CPython
+ Programming Language :: Python :: Implementation :: PyPy
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3 :: Only
+ Programming Language :: Python :: 3.6
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: 3.9
+
+[options]
+packages =
+ pluggy
+install_requires =
+ importlib-metadata>=0.12;python_version<"3.8"
+python_requires = >=3.6
+package_dir =
+ =src
+setup_requires =
+ setuptools-scm
+[options.extras_require]
+dev =
+ pre-commit
+ tox
+testing =
+ pytest
+ pytest-benchmark
+
+[devpi:upload]
+formats=sdist.tgz,bdist_wheel
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/setup.py b/testing/web-platform/tests/tools/third_party/pluggy/setup.py
new file mode 100644
index 0000000000..ed442375f7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/setup.py
@@ -0,0 +1,5 @@
+from setuptools import setup
+
+
+if __name__ == "__main__":
+ setup(use_scm_version={"write_to": "src/pluggy/_version.py"})
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/__init__.py b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/__init__.py
new file mode 100644
index 0000000000..979028f759
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/__init__.py
@@ -0,0 +1,18 @@
+try:
+ from ._version import version as __version__
+except ImportError:
+ # broken installation, we don't even try
+ # unknown only works because we do poor mans version compare
+ __version__ = "unknown"
+
+__all__ = [
+ "PluginManager",
+ "PluginValidationError",
+ "HookCallError",
+ "HookspecMarker",
+ "HookimplMarker",
+]
+
+from ._manager import PluginManager, PluginValidationError
+from ._callers import HookCallError
+from ._hooks import HookspecMarker, HookimplMarker
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_callers.py b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_callers.py
new file mode 100644
index 0000000000..7a16f3bdd4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_callers.py
@@ -0,0 +1,60 @@
+"""
+Call loop machinery
+"""
+import sys
+
+from ._result import HookCallError, _Result, _raise_wrapfail
+
+
+def _multicall(hook_name, hook_impls, caller_kwargs, firstresult):
+ """Execute a call into multiple python functions/methods and return the
+ result(s).
+
+ ``caller_kwargs`` comes from _HookCaller.__call__().
+ """
+ __tracebackhide__ = True
+ results = []
+ excinfo = None
+ try: # run impl and wrapper setup functions in a loop
+ teardowns = []
+ try:
+ for hook_impl in reversed(hook_impls):
+ try:
+ args = [caller_kwargs[argname] for argname in hook_impl.argnames]
+ except KeyError:
+ for argname in hook_impl.argnames:
+ if argname not in caller_kwargs:
+ raise HookCallError(
+ f"hook call must provide argument {argname!r}"
+ )
+
+ if hook_impl.hookwrapper:
+ try:
+ gen = hook_impl.function(*args)
+ next(gen) # first yield
+ teardowns.append(gen)
+ except StopIteration:
+ _raise_wrapfail(gen, "did not yield")
+ else:
+ res = hook_impl.function(*args)
+ if res is not None:
+ results.append(res)
+ if firstresult: # halt further impl calls
+ break
+ except BaseException:
+ excinfo = sys.exc_info()
+ finally:
+ if firstresult: # first result hooks return a single value
+ outcome = _Result(results[0] if results else None, excinfo)
+ else:
+ outcome = _Result(results, excinfo)
+
+ # run all wrapper post-yield blocks
+ for gen in reversed(teardowns):
+ try:
+ gen.send(outcome)
+ _raise_wrapfail(gen, "has second yield")
+ except StopIteration:
+ pass
+
+ return outcome.get_result()
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_hooks.py b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_hooks.py
new file mode 100644
index 0000000000..1e5fbb7595
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_hooks.py
@@ -0,0 +1,325 @@
+"""
+Internal hook annotation, representation and calling machinery.
+"""
+import inspect
+import sys
+import warnings
+
+
+class HookspecMarker:
+ """Decorator helper class for marking functions as hook specifications.
+
+ You can instantiate it with a project_name to get a decorator.
+ Calling :py:meth:`.PluginManager.add_hookspecs` later will discover all marked functions
+ if the :py:class:`.PluginManager` uses the same project_name.
+ """
+
+ def __init__(self, project_name):
+ self.project_name = project_name
+
+ def __call__(
+ self, function=None, firstresult=False, historic=False, warn_on_impl=None
+ ):
+ """if passed a function, directly sets attributes on the function
+ which will make it discoverable to :py:meth:`.PluginManager.add_hookspecs`.
+ If passed no function, returns a decorator which can be applied to a function
+ later using the attributes supplied.
+
+ If ``firstresult`` is ``True`` the 1:N hook call (N being the number of registered
+ hook implementation functions) will stop at I<=N when the I'th function
+ returns a non-``None`` result.
+
+ If ``historic`` is ``True`` calls to a hook will be memorized and replayed
+ on later registered plugins.
+
+ """
+
+ def setattr_hookspec_opts(func):
+ if historic and firstresult:
+ raise ValueError("cannot have a historic firstresult hook")
+ setattr(
+ func,
+ self.project_name + "_spec",
+ dict(
+ firstresult=firstresult,
+ historic=historic,
+ warn_on_impl=warn_on_impl,
+ ),
+ )
+ return func
+
+ if function is not None:
+ return setattr_hookspec_opts(function)
+ else:
+ return setattr_hookspec_opts
+
+
+class HookimplMarker:
+ """Decorator helper class for marking functions as hook implementations.
+
+ You can instantiate with a ``project_name`` to get a decorator.
+ Calling :py:meth:`.PluginManager.register` later will discover all marked functions
+ if the :py:class:`.PluginManager` uses the same project_name.
+ """
+
+ def __init__(self, project_name):
+ self.project_name = project_name
+
+ def __call__(
+ self,
+ function=None,
+ hookwrapper=False,
+ optionalhook=False,
+ tryfirst=False,
+ trylast=False,
+ specname=None,
+ ):
+
+ """if passed a function, directly sets attributes on the function
+ which will make it discoverable to :py:meth:`.PluginManager.register`.
+ If passed no function, returns a decorator which can be applied to a
+ function later using the attributes supplied.
+
+ If ``optionalhook`` is ``True`` a missing matching hook specification will not result
+ in an error (by default it is an error if no matching spec is found).
+
+ If ``tryfirst`` is ``True`` this hook implementation will run as early as possible
+ in the chain of N hook implementations for a specification.
+
+ If ``trylast`` is ``True`` this hook implementation will run as late as possible
+ in the chain of N hook implementations.
+
+ If ``hookwrapper`` is ``True`` the hook implementations needs to execute exactly
+ one ``yield``. The code before the ``yield`` is run early before any non-hookwrapper
+ function is run. The code after the ``yield`` is run after all non-hookwrapper
+ function have run. The ``yield`` receives a :py:class:`.callers._Result` object
+ representing the exception or result outcome of the inner calls (including other
+ hookwrapper calls).
+
+ If ``specname`` is provided, it will be used instead of the function name when
+ matching this hook implementation to a hook specification during registration.
+
+ """
+
+ def setattr_hookimpl_opts(func):
+ setattr(
+ func,
+ self.project_name + "_impl",
+ dict(
+ hookwrapper=hookwrapper,
+ optionalhook=optionalhook,
+ tryfirst=tryfirst,
+ trylast=trylast,
+ specname=specname,
+ ),
+ )
+ return func
+
+ if function is None:
+ return setattr_hookimpl_opts
+ else:
+ return setattr_hookimpl_opts(function)
+
+
+def normalize_hookimpl_opts(opts):
+ opts.setdefault("tryfirst", False)
+ opts.setdefault("trylast", False)
+ opts.setdefault("hookwrapper", False)
+ opts.setdefault("optionalhook", False)
+ opts.setdefault("specname", None)
+
+
+_PYPY = hasattr(sys, "pypy_version_info")
+
+
+def varnames(func):
+ """Return tuple of positional and keywrord argument names for a function,
+ method, class or callable.
+
+ In case of a class, its ``__init__`` method is considered.
+ For methods the ``self`` parameter is not included.
+ """
+ if inspect.isclass(func):
+ try:
+ func = func.__init__
+ except AttributeError:
+ return (), ()
+ elif not inspect.isroutine(func): # callable object?
+ try:
+ func = getattr(func, "__call__", func)
+ except Exception:
+ return (), ()
+
+ try: # func MUST be a function or method here or we won't parse any args
+ spec = inspect.getfullargspec(func)
+ except TypeError:
+ return (), ()
+
+ args, defaults = tuple(spec.args), spec.defaults
+ if defaults:
+ index = -len(defaults)
+ args, kwargs = args[:index], tuple(args[index:])
+ else:
+ kwargs = ()
+
+ # strip any implicit instance arg
+ # pypy3 uses "obj" instead of "self" for default dunder methods
+ implicit_names = ("self",) if not _PYPY else ("self", "obj")
+ if args:
+ if inspect.ismethod(func) or (
+ "." in getattr(func, "__qualname__", ()) and args[0] in implicit_names
+ ):
+ args = args[1:]
+
+ return args, kwargs
+
+
+class _HookRelay:
+ """hook holder object for performing 1:N hook calls where N is the number
+ of registered plugins.
+
+ """
+
+
+class _HookCaller:
+ def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None):
+ self.name = name
+ self._wrappers = []
+ self._nonwrappers = []
+ self._hookexec = hook_execute
+ self._call_history = None
+ self.spec = None
+ if specmodule_or_class is not None:
+ assert spec_opts is not None
+ self.set_specification(specmodule_or_class, spec_opts)
+
+ def has_spec(self):
+ return self.spec is not None
+
+ def set_specification(self, specmodule_or_class, spec_opts):
+ assert not self.has_spec()
+ self.spec = HookSpec(specmodule_or_class, self.name, spec_opts)
+ if spec_opts.get("historic"):
+ self._call_history = []
+
+ def is_historic(self):
+ return self._call_history is not None
+
+ def _remove_plugin(self, plugin):
+ def remove(wrappers):
+ for i, method in enumerate(wrappers):
+ if method.plugin == plugin:
+ del wrappers[i]
+ return True
+
+ if remove(self._wrappers) is None:
+ if remove(self._nonwrappers) is None:
+ raise ValueError(f"plugin {plugin!r} not found")
+
+ def get_hookimpls(self):
+ # Order is important for _hookexec
+ return self._nonwrappers + self._wrappers
+
+ def _add_hookimpl(self, hookimpl):
+ """Add an implementation to the callback chain."""
+ if hookimpl.hookwrapper:
+ methods = self._wrappers
+ else:
+ methods = self._nonwrappers
+
+ if hookimpl.trylast:
+ methods.insert(0, hookimpl)
+ elif hookimpl.tryfirst:
+ methods.append(hookimpl)
+ else:
+ # find last non-tryfirst method
+ i = len(methods) - 1
+ while i >= 0 and methods[i].tryfirst:
+ i -= 1
+ methods.insert(i + 1, hookimpl)
+
+ def __repr__(self):
+ return f"<_HookCaller {self.name!r}>"
+
+ def __call__(self, *args, **kwargs):
+ if args:
+ raise TypeError("hook calling supports only keyword arguments")
+ assert not self.is_historic()
+
+ # This is written to avoid expensive operations when not needed.
+ if self.spec:
+ for argname in self.spec.argnames:
+ if argname not in kwargs:
+ notincall = tuple(set(self.spec.argnames) - kwargs.keys())
+ warnings.warn(
+ "Argument(s) {} which are declared in the hookspec "
+ "can not be found in this hook call".format(notincall),
+ stacklevel=2,
+ )
+ break
+
+ firstresult = self.spec.opts.get("firstresult")
+ else:
+ firstresult = False
+
+ return self._hookexec(self.name, self.get_hookimpls(), kwargs, firstresult)
+
+ def call_historic(self, result_callback=None, kwargs=None):
+ """Call the hook with given ``kwargs`` for all registered plugins and
+ for all plugins which will be registered afterwards.
+
+ If ``result_callback`` is not ``None`` it will be called for for each
+ non-``None`` result obtained from a hook implementation.
+ """
+ self._call_history.append((kwargs or {}, result_callback))
+ # Historizing hooks don't return results.
+ # Remember firstresult isn't compatible with historic.
+ res = self._hookexec(self.name, self.get_hookimpls(), kwargs, False)
+ if result_callback is None:
+ return
+ for x in res or []:
+ result_callback(x)
+
+ def call_extra(self, methods, kwargs):
+ """Call the hook with some additional temporarily participating
+ methods using the specified ``kwargs`` as call parameters."""
+ old = list(self._nonwrappers), list(self._wrappers)
+ for method in methods:
+ opts = dict(hookwrapper=False, trylast=False, tryfirst=False)
+ hookimpl = HookImpl(None, "<temp>", method, opts)
+ self._add_hookimpl(hookimpl)
+ try:
+ return self(**kwargs)
+ finally:
+ self._nonwrappers, self._wrappers = old
+
+ def _maybe_apply_history(self, method):
+ """Apply call history to a new hookimpl if it is marked as historic."""
+ if self.is_historic():
+ for kwargs, result_callback in self._call_history:
+ res = self._hookexec(self.name, [method], kwargs, False)
+ if res and result_callback is not None:
+ result_callback(res[0])
+
+
+class HookImpl:
+ def __init__(self, plugin, plugin_name, function, hook_impl_opts):
+ self.function = function
+ self.argnames, self.kwargnames = varnames(self.function)
+ self.plugin = plugin
+ self.opts = hook_impl_opts
+ self.plugin_name = plugin_name
+ self.__dict__.update(hook_impl_opts)
+
+ def __repr__(self):
+ return f"<HookImpl plugin_name={self.plugin_name!r}, plugin={self.plugin!r}>"
+
+
+class HookSpec:
+ def __init__(self, namespace, name, opts):
+ self.namespace = namespace
+ self.function = function = getattr(namespace, name)
+ self.name = name
+ self.argnames, self.kwargnames = varnames(function)
+ self.opts = opts
+ self.warn_on_impl = opts.get("warn_on_impl")
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_manager.py b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_manager.py
new file mode 100644
index 0000000000..65f4e50842
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_manager.py
@@ -0,0 +1,373 @@
+import inspect
+import sys
+import warnings
+
+from . import _tracing
+from ._callers import _Result, _multicall
+from ._hooks import HookImpl, _HookRelay, _HookCaller, normalize_hookimpl_opts
+
+if sys.version_info >= (3, 8):
+ from importlib import metadata as importlib_metadata
+else:
+ import importlib_metadata
+
+
+def _warn_for_function(warning, function):
+ warnings.warn_explicit(
+ warning,
+ type(warning),
+ lineno=function.__code__.co_firstlineno,
+ filename=function.__code__.co_filename,
+ )
+
+
+class PluginValidationError(Exception):
+ """plugin failed validation.
+
+ :param object plugin: the plugin which failed validation,
+ may be a module or an arbitrary object.
+ """
+
+ def __init__(self, plugin, message):
+ self.plugin = plugin
+ super(Exception, self).__init__(message)
+
+
+class DistFacade:
+ """Emulate a pkg_resources Distribution"""
+
+ def __init__(self, dist):
+ self._dist = dist
+
+ @property
+ def project_name(self):
+ return self.metadata["name"]
+
+ def __getattr__(self, attr, default=None):
+ return getattr(self._dist, attr, default)
+
+ def __dir__(self):
+ return sorted(dir(self._dist) + ["_dist", "project_name"])
+
+
+class PluginManager:
+ """Core :py:class:`.PluginManager` class which manages registration
+ of plugin objects and 1:N hook calling.
+
+ You can register new hooks by calling :py:meth:`add_hookspecs(module_or_class)
+ <.PluginManager.add_hookspecs>`.
+ You can register plugin objects (which contain hooks) by calling
+ :py:meth:`register(plugin) <.PluginManager.register>`. The :py:class:`.PluginManager`
+ is initialized with a prefix that is searched for in the names of the dict
+ of registered plugin objects.
+
+ For debugging purposes you can call :py:meth:`.PluginManager.enable_tracing`
+ which will subsequently send debug information to the trace helper.
+ """
+
+ def __init__(self, project_name):
+ self.project_name = project_name
+ self._name2plugin = {}
+ self._plugin2hookcallers = {}
+ self._plugin_distinfo = []
+ self.trace = _tracing.TagTracer().get("pluginmanage")
+ self.hook = _HookRelay()
+ self._inner_hookexec = _multicall
+
+ def _hookexec(self, hook_name, methods, kwargs, firstresult):
+ # called from all hookcaller instances.
+ # enable_tracing will set its own wrapping function at self._inner_hookexec
+ return self._inner_hookexec(hook_name, methods, kwargs, firstresult)
+
+ def register(self, plugin, name=None):
+ """Register a plugin and return its canonical name or ``None`` if the name
+ is blocked from registering. Raise a :py:class:`ValueError` if the plugin
+ is already registered."""
+ plugin_name = name or self.get_canonical_name(plugin)
+
+ if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
+ if self._name2plugin.get(plugin_name, -1) is None:
+ return # blocked plugin, return None to indicate no registration
+ raise ValueError(
+ "Plugin already registered: %s=%s\n%s"
+ % (plugin_name, plugin, self._name2plugin)
+ )
+
+ # XXX if an error happens we should make sure no state has been
+ # changed at point of return
+ self._name2plugin[plugin_name] = plugin
+
+ # register matching hook implementations of the plugin
+ self._plugin2hookcallers[plugin] = hookcallers = []
+ for name in dir(plugin):
+ hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
+ if hookimpl_opts is not None:
+ normalize_hookimpl_opts(hookimpl_opts)
+ method = getattr(plugin, name)
+ hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
+ name = hookimpl_opts.get("specname") or name
+ hook = getattr(self.hook, name, None)
+ if hook is None:
+ hook = _HookCaller(name, self._hookexec)
+ setattr(self.hook, name, hook)
+ elif hook.has_spec():
+ self._verify_hook(hook, hookimpl)
+ hook._maybe_apply_history(hookimpl)
+ hook._add_hookimpl(hookimpl)
+ hookcallers.append(hook)
+ return plugin_name
+
+ def parse_hookimpl_opts(self, plugin, name):
+ method = getattr(plugin, name)
+ if not inspect.isroutine(method):
+ return
+ try:
+ res = getattr(method, self.project_name + "_impl", None)
+ except Exception:
+ res = {}
+ if res is not None and not isinstance(res, dict):
+ # false positive
+ res = None
+ return res
+
+ def unregister(self, plugin=None, name=None):
+ """unregister a plugin object and all its contained hook implementations
+ from internal data structures."""
+ if name is None:
+ assert plugin is not None, "one of name or plugin needs to be specified"
+ name = self.get_name(plugin)
+
+ if plugin is None:
+ plugin = self.get_plugin(name)
+
+ # if self._name2plugin[name] == None registration was blocked: ignore
+ if self._name2plugin.get(name):
+ del self._name2plugin[name]
+
+ for hookcaller in self._plugin2hookcallers.pop(plugin, []):
+ hookcaller._remove_plugin(plugin)
+
+ return plugin
+
+ def set_blocked(self, name):
+ """block registrations of the given name, unregister if already registered."""
+ self.unregister(name=name)
+ self._name2plugin[name] = None
+
+ def is_blocked(self, name):
+ """return ``True`` if the given plugin name is blocked."""
+ return name in self._name2plugin and self._name2plugin[name] is None
+
+ def add_hookspecs(self, module_or_class):
+ """add new hook specifications defined in the given ``module_or_class``.
+ Functions are recognized if they have been decorated accordingly."""
+ names = []
+ for name in dir(module_or_class):
+ spec_opts = self.parse_hookspec_opts(module_or_class, name)
+ if spec_opts is not None:
+ hc = getattr(self.hook, name, None)
+ if hc is None:
+ hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
+ setattr(self.hook, name, hc)
+ else:
+ # plugins registered this hook without knowing the spec
+ hc.set_specification(module_or_class, spec_opts)
+ for hookfunction in hc.get_hookimpls():
+ self._verify_hook(hc, hookfunction)
+ names.append(name)
+
+ if not names:
+ raise ValueError(
+ f"did not find any {self.project_name!r} hooks in {module_or_class!r}"
+ )
+
+ def parse_hookspec_opts(self, module_or_class, name):
+ method = getattr(module_or_class, name)
+ return getattr(method, self.project_name + "_spec", None)
+
+ def get_plugins(self):
+ """return the set of registered plugins."""
+ return set(self._plugin2hookcallers)
+
+ def is_registered(self, plugin):
+ """Return ``True`` if the plugin is already registered."""
+ return plugin in self._plugin2hookcallers
+
+ def get_canonical_name(self, plugin):
+ """Return canonical name for a plugin object. Note that a plugin
+ may be registered under a different name which was specified
+ by the caller of :py:meth:`register(plugin, name) <.PluginManager.register>`.
+ To obtain the name of an registered plugin use :py:meth:`get_name(plugin)
+ <.PluginManager.get_name>` instead."""
+ return getattr(plugin, "__name__", None) or str(id(plugin))
+
+ def get_plugin(self, name):
+ """Return a plugin or ``None`` for the given name."""
+ return self._name2plugin.get(name)
+
+ def has_plugin(self, name):
+ """Return ``True`` if a plugin with the given name is registered."""
+ return self.get_plugin(name) is not None
+
+ def get_name(self, plugin):
+ """Return name for registered plugin or ``None`` if not registered."""
+ for name, val in self._name2plugin.items():
+ if plugin == val:
+ return name
+
+ def _verify_hook(self, hook, hookimpl):
+ if hook.is_historic() and hookimpl.hookwrapper:
+ raise PluginValidationError(
+ hookimpl.plugin,
+ "Plugin %r\nhook %r\nhistoric incompatible to hookwrapper"
+ % (hookimpl.plugin_name, hook.name),
+ )
+
+ if hook.spec.warn_on_impl:
+ _warn_for_function(hook.spec.warn_on_impl, hookimpl.function)
+
+ # positional arg checking
+ notinspec = set(hookimpl.argnames) - set(hook.spec.argnames)
+ if notinspec:
+ raise PluginValidationError(
+ hookimpl.plugin,
+ "Plugin %r for hook %r\nhookimpl definition: %s\n"
+ "Argument(s) %s are declared in the hookimpl but "
+ "can not be found in the hookspec"
+ % (
+ hookimpl.plugin_name,
+ hook.name,
+ _formatdef(hookimpl.function),
+ notinspec,
+ ),
+ )
+
+ if hookimpl.hookwrapper and not inspect.isgeneratorfunction(hookimpl.function):
+ raise PluginValidationError(
+ hookimpl.plugin,
+ "Plugin %r for hook %r\nhookimpl definition: %s\n"
+ "Declared as hookwrapper=True but function is not a generator function"
+ % (hookimpl.plugin_name, hook.name, _formatdef(hookimpl.function)),
+ )
+
+ def check_pending(self):
+ """Verify that all hooks which have not been verified against
+ a hook specification are optional, otherwise raise :py:class:`.PluginValidationError`."""
+ for name in self.hook.__dict__:
+ if name[0] != "_":
+ hook = getattr(self.hook, name)
+ if not hook.has_spec():
+ for hookimpl in hook.get_hookimpls():
+ if not hookimpl.optionalhook:
+ raise PluginValidationError(
+ hookimpl.plugin,
+ "unknown hook %r in plugin %r"
+ % (name, hookimpl.plugin),
+ )
+
+ def load_setuptools_entrypoints(self, group, name=None):
+ """Load modules from querying the specified setuptools ``group``.
+
+ :param str group: entry point group to load plugins
+ :param str name: if given, loads only plugins with the given ``name``.
+ :rtype: int
+ :return: return the number of loaded plugins by this call.
+ """
+ count = 0
+ for dist in list(importlib_metadata.distributions()):
+ for ep in dist.entry_points:
+ if (
+ ep.group != group
+ or (name is not None and ep.name != name)
+ # already registered
+ or self.get_plugin(ep.name)
+ or self.is_blocked(ep.name)
+ ):
+ continue
+ plugin = ep.load()
+ self.register(plugin, name=ep.name)
+ self._plugin_distinfo.append((plugin, DistFacade(dist)))
+ count += 1
+ return count
+
+ def list_plugin_distinfo(self):
+ """return list of distinfo/plugin tuples for all setuptools registered
+ plugins."""
+ return list(self._plugin_distinfo)
+
+ def list_name_plugin(self):
+ """return list of name/plugin pairs."""
+ return list(self._name2plugin.items())
+
+ def get_hookcallers(self, plugin):
+ """get all hook callers for the specified plugin."""
+ return self._plugin2hookcallers.get(plugin)
+
+ def add_hookcall_monitoring(self, before, after):
+ """add before/after tracing functions for all hooks
+ and return an undo function which, when called,
+ will remove the added tracers.
+
+ ``before(hook_name, hook_impls, kwargs)`` will be called ahead
+ of all hook calls and receive a hookcaller instance, a list
+ of HookImpl instances and the keyword arguments for the hook call.
+
+ ``after(outcome, hook_name, hook_impls, kwargs)`` receives the
+ same arguments as ``before`` but also a :py:class:`pluggy._callers._Result` object
+ which represents the result of the overall hook call.
+ """
+ oldcall = self._inner_hookexec
+
+ def traced_hookexec(hook_name, hook_impls, kwargs, firstresult):
+ before(hook_name, hook_impls, kwargs)
+ outcome = _Result.from_call(
+ lambda: oldcall(hook_name, hook_impls, kwargs, firstresult)
+ )
+ after(outcome, hook_name, hook_impls, kwargs)
+ return outcome.get_result()
+
+ self._inner_hookexec = traced_hookexec
+
+ def undo():
+ self._inner_hookexec = oldcall
+
+ return undo
+
+ def enable_tracing(self):
+ """enable tracing of hook calls and return an undo function."""
+ hooktrace = self.trace.root.get("hook")
+
+ def before(hook_name, methods, kwargs):
+ hooktrace.root.indent += 1
+ hooktrace(hook_name, kwargs)
+
+ def after(outcome, hook_name, methods, kwargs):
+ if outcome.excinfo is None:
+ hooktrace("finish", hook_name, "-->", outcome.get_result())
+ hooktrace.root.indent -= 1
+
+ return self.add_hookcall_monitoring(before, after)
+
+ def subset_hook_caller(self, name, remove_plugins):
+ """Return a new :py:class:`._hooks._HookCaller` instance for the named method
+ which manages calls to all registered plugins except the
+ ones from remove_plugins."""
+ orig = getattr(self.hook, name)
+ plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
+ if plugins_to_remove:
+ hc = _HookCaller(
+ orig.name, orig._hookexec, orig.spec.namespace, orig.spec.opts
+ )
+ for hookimpl in orig.get_hookimpls():
+ plugin = hookimpl.plugin
+ if plugin not in plugins_to_remove:
+ hc._add_hookimpl(hookimpl)
+ # we also keep track of this hook caller so it
+ # gets properly removed on plugin unregistration
+ self._plugin2hookcallers.setdefault(plugin, []).append(hc)
+ return hc
+ return orig
+
+
+def _formatdef(func):
+ return f"{func.__name__}{inspect.signature(func)}"
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_result.py b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_result.py
new file mode 100644
index 0000000000..4c1f7f1f3c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_result.py
@@ -0,0 +1,60 @@
+"""
+Hook wrapper "result" utilities.
+"""
+import sys
+
+
+def _raise_wrapfail(wrap_controller, msg):
+ co = wrap_controller.gi_code
+ raise RuntimeError(
+ "wrap_controller at %r %s:%d %s"
+ % (co.co_name, co.co_filename, co.co_firstlineno, msg)
+ )
+
+
+class HookCallError(Exception):
+ """Hook was called wrongly."""
+
+
+class _Result:
+ def __init__(self, result, excinfo):
+ self._result = result
+ self._excinfo = excinfo
+
+ @property
+ def excinfo(self):
+ return self._excinfo
+
+ @classmethod
+ def from_call(cls, func):
+ __tracebackhide__ = True
+ result = excinfo = None
+ try:
+ result = func()
+ except BaseException:
+ excinfo = sys.exc_info()
+
+ return cls(result, excinfo)
+
+ def force_result(self, result):
+ """Force the result(s) to ``result``.
+
+ If the hook was marked as a ``firstresult`` a single value should
+ be set otherwise set a (modified) list of results. Any exceptions
+ found during invocation will be deleted.
+ """
+ self._result = result
+ self._excinfo = None
+
+ def get_result(self):
+ """Get the result(s) for this hook call.
+
+ If the hook was marked as a ``firstresult`` only a single value
+ will be returned otherwise a list of results.
+ """
+ __tracebackhide__ = True
+ if self._excinfo is None:
+ return self._result
+ else:
+ ex = self._excinfo
+ raise ex[1].with_traceback(ex[2])
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_tracing.py b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_tracing.py
new file mode 100644
index 0000000000..82c016271e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/src/pluggy/_tracing.py
@@ -0,0 +1,62 @@
+"""
+Tracing utils
+"""
+
+
+class TagTracer:
+ def __init__(self):
+ self._tags2proc = {}
+ self._writer = None
+ self.indent = 0
+
+ def get(self, name):
+ return TagTracerSub(self, (name,))
+
+ def _format_message(self, tags, args):
+ if isinstance(args[-1], dict):
+ extra = args[-1]
+ args = args[:-1]
+ else:
+ extra = {}
+
+ content = " ".join(map(str, args))
+ indent = " " * self.indent
+
+ lines = ["{}{} [{}]\n".format(indent, content, ":".join(tags))]
+
+ for name, value in extra.items():
+ lines.append(f"{indent} {name}: {value}\n")
+
+ return "".join(lines)
+
+ def _processmessage(self, tags, args):
+ if self._writer is not None and args:
+ self._writer(self._format_message(tags, args))
+ try:
+ processor = self._tags2proc[tags]
+ except KeyError:
+ pass
+ else:
+ processor(tags, args)
+
+ def setwriter(self, writer):
+ self._writer = writer
+
+ def setprocessor(self, tags, processor):
+ if isinstance(tags, str):
+ tags = tuple(tags.split(":"))
+ else:
+ assert isinstance(tags, tuple)
+ self._tags2proc[tags] = processor
+
+
+class TagTracerSub:
+ def __init__(self, root, tags):
+ self.root = root
+ self.tags = tags
+
+ def __call__(self, *args):
+ self.root._processmessage(self.tags, args)
+
+ def get(self, name):
+ return self.__class__(self.root, self.tags + (name,))
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/testing/benchmark.py b/testing/web-platform/tests/tools/third_party/pluggy/testing/benchmark.py
new file mode 100644
index 0000000000..b0d4b9536a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/testing/benchmark.py
@@ -0,0 +1,102 @@
+"""
+Benchmarking and performance tests.
+"""
+import pytest
+from pluggy import HookspecMarker, HookimplMarker, PluginManager
+from pluggy._hooks import HookImpl
+from pluggy._callers import _multicall
+
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+@hookimpl
+def hook(arg1, arg2, arg3):
+ return arg1, arg2, arg3
+
+
+@hookimpl(hookwrapper=True)
+def wrapper(arg1, arg2, arg3):
+ yield
+
+
+@pytest.fixture(params=[10, 100], ids="hooks={}".format)
+def hooks(request):
+ return [hook for i in range(request.param)]
+
+
+@pytest.fixture(params=[10, 100], ids="wrappers={}".format)
+def wrappers(request):
+ return [wrapper for i in range(request.param)]
+
+
+def test_hook_and_wrappers_speed(benchmark, hooks, wrappers):
+ def setup():
+ hook_name = "foo"
+ hook_impls = []
+ for method in hooks + wrappers:
+ f = HookImpl(None, "<temp>", method, method.example_impl)
+ hook_impls.append(f)
+ caller_kwargs = {"arg1": 1, "arg2": 2, "arg3": 3}
+ firstresult = False
+ return (hook_name, hook_impls, caller_kwargs, firstresult), {}
+
+ benchmark.pedantic(_multicall, setup=setup)
+
+
+@pytest.mark.parametrize(
+ ("plugins, wrappers, nesting"),
+ [
+ (1, 1, 0),
+ (1, 1, 1),
+ (1, 1, 5),
+ (1, 5, 1),
+ (1, 5, 5),
+ (5, 1, 1),
+ (5, 1, 5),
+ (5, 5, 1),
+ (5, 5, 5),
+ (20, 20, 0),
+ (100, 100, 0),
+ ],
+)
+def test_call_hook(benchmark, plugins, wrappers, nesting):
+ pm = PluginManager("example")
+
+ class HookSpec:
+ @hookspec
+ def fun(self, hooks, nesting: int):
+ yield
+
+ class Plugin:
+ def __init__(self, num):
+ self.num = num
+
+ def __repr__(self):
+ return f"<Plugin {self.num}>"
+
+ @hookimpl
+ def fun(self, hooks, nesting: int):
+ if nesting:
+ hooks.fun(hooks=hooks, nesting=nesting - 1)
+
+ class PluginWrap:
+ def __init__(self, num):
+ self.num = num
+
+ def __repr__(self):
+ return f"<PluginWrap {self.num}>"
+
+ @hookimpl(hookwrapper=True)
+ def fun(self):
+ yield
+
+ pm.add_hookspecs(HookSpec)
+
+ for i in range(plugins):
+ pm.register(Plugin(i), name=f"plug_{i}")
+ for i in range(wrappers):
+ pm.register(PluginWrap(i), name=f"wrap_plug_{i}")
+
+ benchmark(pm.hook.fun, hooks=pm.hook, nesting=nesting)
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/testing/conftest.py b/testing/web-platform/tests/tools/third_party/pluggy/testing/conftest.py
new file mode 100644
index 0000000000..1fd4ecd5bd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/testing/conftest.py
@@ -0,0 +1,26 @@
+import pytest
+
+
+@pytest.fixture(
+ params=[lambda spec: spec, lambda spec: spec()],
+ ids=["spec-is-class", "spec-is-instance"],
+)
+def he_pm(request, pm):
+ from pluggy import HookspecMarker
+
+ hookspec = HookspecMarker("example")
+
+ class Hooks:
+ @hookspec
+ def he_method1(self, arg):
+ return arg + 1
+
+ pm.add_hookspecs(request.param(Hooks))
+ return pm
+
+
+@pytest.fixture
+def pm():
+ from pluggy import PluginManager
+
+ return PluginManager("example")
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/testing/test_details.py b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_details.py
new file mode 100644
index 0000000000..0ceb3b3eb1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_details.py
@@ -0,0 +1,135 @@
+import warnings
+import pytest
+from pluggy import PluginManager, HookimplMarker, HookspecMarker
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+def test_parse_hookimpl_override():
+ class MyPluginManager(PluginManager):
+ def parse_hookimpl_opts(self, module_or_class, name):
+ opts = PluginManager.parse_hookimpl_opts(self, module_or_class, name)
+ if opts is None:
+ if name.startswith("x1"):
+ opts = {}
+ return opts
+
+ class Plugin:
+ def x1meth(self):
+ pass
+
+ @hookimpl(hookwrapper=True, tryfirst=True)
+ def x1meth2(self):
+ yield # pragma: no cover
+
+ class Spec:
+ @hookspec
+ def x1meth(self):
+ pass
+
+ @hookspec
+ def x1meth2(self):
+ pass
+
+ pm = MyPluginManager(hookspec.project_name)
+ pm.register(Plugin())
+ pm.add_hookspecs(Spec)
+ assert not pm.hook.x1meth._nonwrappers[0].hookwrapper
+ assert not pm.hook.x1meth._nonwrappers[0].tryfirst
+ assert not pm.hook.x1meth._nonwrappers[0].trylast
+ assert not pm.hook.x1meth._nonwrappers[0].optionalhook
+
+ assert pm.hook.x1meth2._wrappers[0].tryfirst
+ assert pm.hook.x1meth2._wrappers[0].hookwrapper
+
+
+def test_warn_when_deprecated_specified(recwarn):
+ warning = DeprecationWarning("foo is deprecated")
+
+ class Spec:
+ @hookspec(warn_on_impl=warning)
+ def foo(self):
+ pass
+
+ class Plugin:
+ @hookimpl
+ def foo(self):
+ pass
+
+ pm = PluginManager(hookspec.project_name)
+ pm.add_hookspecs(Spec)
+
+ with pytest.warns(DeprecationWarning) as records:
+ pm.register(Plugin())
+ (record,) = records
+ assert record.message is warning
+ assert record.filename == Plugin.foo.__code__.co_filename
+ assert record.lineno == Plugin.foo.__code__.co_firstlineno
+
+
+def test_plugin_getattr_raises_errors():
+ """Pluggy must be able to handle plugins which raise weird exceptions
+ when getattr() gets called (#11).
+ """
+
+ class DontTouchMe:
+ def __getattr__(self, x):
+ raise Exception("cant touch me")
+
+ class Module:
+ pass
+
+ module = Module()
+ module.x = DontTouchMe()
+
+ pm = PluginManager(hookspec.project_name)
+ # register() would raise an error
+ pm.register(module, "donttouch")
+ assert pm.get_plugin("donttouch") is module
+
+
+def test_warning_on_call_vs_hookspec_arg_mismatch():
+ """Verify that is a hook is called with less arguments then defined in the
+ spec that a warning is emitted.
+ """
+
+ class Spec:
+ @hookspec
+ def myhook(self, arg1, arg2):
+ pass
+
+ class Plugin:
+ @hookimpl
+ def myhook(self, arg1):
+ pass
+
+ pm = PluginManager(hookspec.project_name)
+ pm.register(Plugin())
+ pm.add_hookspecs(Spec())
+
+ with warnings.catch_warnings(record=True) as warns:
+ warnings.simplefilter("always")
+
+ # calling should trigger a warning
+ pm.hook.myhook(arg1=1)
+
+ assert len(warns) == 1
+ warning = warns[-1]
+ assert issubclass(warning.category, Warning)
+ assert "Argument(s) ('arg2',)" in str(warning.message)
+
+
+def test_repr():
+ class Plugin:
+ @hookimpl
+ def myhook(self):
+ raise NotImplementedError()
+
+ pm = PluginManager(hookspec.project_name)
+
+ plugin = Plugin()
+ pname = pm.register(plugin)
+ assert repr(pm.hook.myhook._nonwrappers[0]) == (
+ f"<HookImpl plugin_name={pname!r}, plugin={plugin!r}>"
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/testing/test_helpers.py b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_helpers.py
new file mode 100644
index 0000000000..465858c499
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_helpers.py
@@ -0,0 +1,84 @@
+from pluggy._hooks import varnames
+from pluggy._manager import _formatdef
+
+
+def test_varnames():
+ def f(x):
+ i = 3 # noqa
+
+ class A:
+ def f(self, y):
+ pass
+
+ class B:
+ def __call__(self, z):
+ pass
+
+ assert varnames(f) == (("x",), ())
+ assert varnames(A().f) == (("y",), ())
+ assert varnames(B()) == (("z",), ())
+
+
+def test_varnames_default():
+ def f(x, y=3):
+ pass
+
+ assert varnames(f) == (("x",), ("y",))
+
+
+def test_varnames_class():
+ class C:
+ def __init__(self, x):
+ pass
+
+ class D:
+ pass
+
+ class E:
+ def __init__(self, x):
+ pass
+
+ class F:
+ pass
+
+ assert varnames(C) == (("x",), ())
+ assert varnames(D) == ((), ())
+ assert varnames(E) == (("x",), ())
+ assert varnames(F) == ((), ())
+
+
+def test_varnames_keyword_only():
+ def f1(x, *, y):
+ pass
+
+ def f2(x, *, y=3):
+ pass
+
+ def f3(x=1, *, y=3):
+ pass
+
+ assert varnames(f1) == (("x",), ())
+ assert varnames(f2) == (("x",), ())
+ assert varnames(f3) == ((), ("x",))
+
+
+def test_formatdef():
+ def function1():
+ pass
+
+ assert _formatdef(function1) == "function1()"
+
+ def function2(arg1):
+ pass
+
+ assert _formatdef(function2) == "function2(arg1)"
+
+ def function3(arg1, arg2="qwe"):
+ pass
+
+ assert _formatdef(function3) == "function3(arg1, arg2='qwe')"
+
+ def function4(arg1, *args, **kwargs):
+ pass
+
+ assert _formatdef(function4) == "function4(arg1, *args, **kwargs)"
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/testing/test_hookcaller.py b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_hookcaller.py
new file mode 100644
index 0000000000..9eeaef8666
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_hookcaller.py
@@ -0,0 +1,272 @@
+import pytest
+
+from pluggy import HookimplMarker, HookspecMarker, PluginValidationError
+from pluggy._hooks import HookImpl
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+@pytest.fixture
+def hc(pm):
+ class Hooks:
+ @hookspec
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+ return pm.hook.he_method1
+
+
+@pytest.fixture
+def addmeth(hc):
+ def addmeth(tryfirst=False, trylast=False, hookwrapper=False):
+ def wrap(func):
+ hookimpl(tryfirst=tryfirst, trylast=trylast, hookwrapper=hookwrapper)(func)
+ hc._add_hookimpl(HookImpl(None, "<temp>", func, func.example_impl))
+ return func
+
+ return wrap
+
+ return addmeth
+
+
+def funcs(hookmethods):
+ return [hookmethod.function for hookmethod in hookmethods]
+
+
+def test_adding_nonwrappers(hc, addmeth):
+ @addmeth()
+ def he_method1():
+ pass
+
+ @addmeth()
+ def he_method2():
+ pass
+
+ @addmeth()
+ def he_method3():
+ pass
+
+ assert funcs(hc._nonwrappers) == [he_method1, he_method2, he_method3]
+
+
+def test_adding_nonwrappers_trylast(hc, addmeth):
+ @addmeth()
+ def he_method1_middle():
+ pass
+
+ @addmeth(trylast=True)
+ def he_method1():
+ pass
+
+ @addmeth()
+ def he_method1_b():
+ pass
+
+ assert funcs(hc._nonwrappers) == [he_method1, he_method1_middle, he_method1_b]
+
+
+def test_adding_nonwrappers_trylast3(hc, addmeth):
+ @addmeth()
+ def he_method1_a():
+ pass
+
+ @addmeth(trylast=True)
+ def he_method1_b():
+ pass
+
+ @addmeth()
+ def he_method1_c():
+ pass
+
+ @addmeth(trylast=True)
+ def he_method1_d():
+ pass
+
+ assert funcs(hc._nonwrappers) == [
+ he_method1_d,
+ he_method1_b,
+ he_method1_a,
+ he_method1_c,
+ ]
+
+
+def test_adding_nonwrappers_trylast2(hc, addmeth):
+ @addmeth()
+ def he_method1_middle():
+ pass
+
+ @addmeth()
+ def he_method1_b():
+ pass
+
+ @addmeth(trylast=True)
+ def he_method1():
+ pass
+
+ assert funcs(hc._nonwrappers) == [he_method1, he_method1_middle, he_method1_b]
+
+
+def test_adding_nonwrappers_tryfirst(hc, addmeth):
+ @addmeth(tryfirst=True)
+ def he_method1():
+ pass
+
+ @addmeth()
+ def he_method1_middle():
+ pass
+
+ @addmeth()
+ def he_method1_b():
+ pass
+
+ assert funcs(hc._nonwrappers) == [he_method1_middle, he_method1_b, he_method1]
+
+
+def test_adding_wrappers_ordering(hc, addmeth):
+ @addmeth(hookwrapper=True)
+ def he_method1():
+ pass
+
+ @addmeth()
+ def he_method1_middle():
+ pass
+
+ @addmeth(hookwrapper=True)
+ def he_method3():
+ pass
+
+ assert funcs(hc._nonwrappers) == [he_method1_middle]
+ assert funcs(hc._wrappers) == [he_method1, he_method3]
+
+
+def test_adding_wrappers_ordering_tryfirst(hc, addmeth):
+ @addmeth(hookwrapper=True, tryfirst=True)
+ def he_method1():
+ pass
+
+ @addmeth(hookwrapper=True)
+ def he_method2():
+ pass
+
+ assert hc._nonwrappers == []
+ assert funcs(hc._wrappers) == [he_method2, he_method1]
+
+
+def test_hookspec(pm):
+ class HookSpec:
+ @hookspec()
+ def he_myhook1(arg1):
+ pass
+
+ @hookspec(firstresult=True)
+ def he_myhook2(arg1):
+ pass
+
+ @hookspec(firstresult=False)
+ def he_myhook3(arg1):
+ pass
+
+ pm.add_hookspecs(HookSpec)
+ assert not pm.hook.he_myhook1.spec.opts["firstresult"]
+ assert pm.hook.he_myhook2.spec.opts["firstresult"]
+ assert not pm.hook.he_myhook3.spec.opts["firstresult"]
+
+
+@pytest.mark.parametrize("name", ["hookwrapper", "optionalhook", "tryfirst", "trylast"])
+@pytest.mark.parametrize("val", [True, False])
+def test_hookimpl(name, val):
+ @hookimpl(**{name: val})
+ def he_myhook1(arg1):
+ pass
+
+ if val:
+ assert he_myhook1.example_impl.get(name)
+ else:
+ assert not hasattr(he_myhook1, name)
+
+
+def test_hookrelay_registry(pm):
+ """Verify hook caller instances are registered by name onto the relay
+ and can be likewise unregistered."""
+
+ class Api:
+ @hookspec
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+ hook = pm.hook
+ assert hasattr(hook, "hello")
+ assert repr(hook.hello).find("hello") != -1
+
+ class Plugin:
+ @hookimpl
+ def hello(self, arg):
+ return arg + 1
+
+ plugin = Plugin()
+ pm.register(plugin)
+ out = hook.hello(arg=3)
+ assert out == [4]
+ assert not hasattr(hook, "world")
+ pm.unregister(plugin)
+ assert hook.hello(arg=3) == []
+
+
+def test_hookrelay_registration_by_specname(pm):
+ """Verify hook caller instances may also be registered by specifying a
+ specname option to the hookimpl"""
+
+ class Api:
+ @hookspec
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+ hook = pm.hook
+ assert hasattr(hook, "hello")
+ assert len(pm.hook.hello.get_hookimpls()) == 0
+
+ class Plugin:
+ @hookimpl(specname="hello")
+ def foo(self, arg):
+ return arg + 1
+
+ plugin = Plugin()
+ pm.register(plugin)
+ out = hook.hello(arg=3)
+ assert out == [4]
+
+
+def test_hookrelay_registration_by_specname_raises(pm):
+ """Verify using specname still raises the types of errors during registration as it
+ would have without using specname."""
+
+ class Api:
+ @hookspec
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+
+ # make sure a bad signature still raises an error when using specname
+ class Plugin:
+ @hookimpl(specname="hello")
+ def foo(self, arg, too, many, args):
+ return arg + 1
+
+ with pytest.raises(PluginValidationError):
+ pm.register(Plugin())
+
+ # make sure check_pending still fails if specname doesn't have a
+ # corresponding spec. EVEN if the function name matches one.
+ class Plugin2:
+ @hookimpl(specname="bar")
+ def hello(self, arg):
+ return arg + 1
+
+ pm.register(Plugin2())
+ with pytest.raises(PluginValidationError):
+ pm.check_pending()
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/testing/test_invocations.py b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_invocations.py
new file mode 100644
index 0000000000..323b9b21e8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_invocations.py
@@ -0,0 +1,215 @@
+import pytest
+from pluggy import PluginValidationError, HookimplMarker, HookspecMarker
+
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+def test_argmismatch(pm):
+ class Api:
+ @hookspec
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+
+ class Plugin:
+ @hookimpl
+ def hello(self, argwrong):
+ pass
+
+ with pytest.raises(PluginValidationError) as exc:
+ pm.register(Plugin())
+
+ assert "argwrong" in str(exc.value)
+
+
+def test_only_kwargs(pm):
+ class Api:
+ @hookspec
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+ with pytest.raises(TypeError) as exc:
+ pm.hook.hello(3)
+
+ comprehensible = "hook calling supports only keyword arguments"
+ assert comprehensible in str(exc.value)
+
+
+def test_opt_in_args(pm):
+ """Verfiy that two hookimpls with mutex args can serve
+ under the same spec.
+ """
+
+ class Api:
+ @hookspec
+ def hello(self, arg1, arg2, common_arg):
+ "api hook 1"
+
+ class Plugin1:
+ @hookimpl
+ def hello(self, arg1, common_arg):
+ return arg1 + common_arg
+
+ class Plugin2:
+ @hookimpl
+ def hello(self, arg2, common_arg):
+ return arg2 + common_arg
+
+ pm.add_hookspecs(Api)
+ pm.register(Plugin1())
+ pm.register(Plugin2())
+
+ results = pm.hook.hello(arg1=1, arg2=2, common_arg=0)
+ assert results == [2, 1]
+
+
+def test_call_order(pm):
+ class Api:
+ @hookspec
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+
+ class Plugin1:
+ @hookimpl
+ def hello(self, arg):
+ return 1
+
+ class Plugin2:
+ @hookimpl
+ def hello(self, arg):
+ return 2
+
+ class Plugin3:
+ @hookimpl
+ def hello(self, arg):
+ return 3
+
+ class Plugin4:
+ @hookimpl(hookwrapper=True)
+ def hello(self, arg):
+ assert arg == 0
+ outcome = yield
+ assert outcome.get_result() == [3, 2, 1]
+
+ pm.register(Plugin1())
+ pm.register(Plugin2())
+ pm.register(Plugin3())
+ pm.register(Plugin4()) # hookwrapper should get same list result
+ res = pm.hook.hello(arg=0)
+ assert res == [3, 2, 1]
+
+
+def test_firstresult_definition(pm):
+ class Api:
+ @hookspec(firstresult=True)
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+
+ class Plugin1:
+ @hookimpl
+ def hello(self, arg):
+ return arg + 1
+
+ class Plugin2:
+ @hookimpl
+ def hello(self, arg):
+ return arg - 1
+
+ class Plugin3:
+ @hookimpl
+ def hello(self, arg):
+ return None
+
+ class Plugin4:
+ @hookimpl(hookwrapper=True)
+ def hello(self, arg):
+ assert arg == 3
+ outcome = yield
+ assert outcome.get_result() == 2
+
+ pm.register(Plugin1()) # discarded - not the last registered plugin
+ pm.register(Plugin2()) # used as result
+ pm.register(Plugin3()) # None result is ignored
+ pm.register(Plugin4()) # hookwrapper should get same non-list result
+ res = pm.hook.hello(arg=3)
+ assert res == 2
+
+
+def test_firstresult_force_result(pm):
+ """Verify forcing a result in a wrapper."""
+
+ class Api:
+ @hookspec(firstresult=True)
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+
+ class Plugin1:
+ @hookimpl
+ def hello(self, arg):
+ return arg + 1
+
+ class Plugin2:
+ @hookimpl(hookwrapper=True)
+ def hello(self, arg):
+ assert arg == 3
+ outcome = yield
+ assert outcome.get_result() == 4
+ outcome.force_result(0)
+
+ class Plugin3:
+ @hookimpl
+ def hello(self, arg):
+ return None
+
+ pm.register(Plugin1())
+ pm.register(Plugin2()) # wrapper
+ pm.register(Plugin3()) # ignored since returns None
+ res = pm.hook.hello(arg=3)
+ assert res == 0 # this result is forced and not a list
+
+
+def test_firstresult_returns_none(pm):
+ """If None results are returned by underlying implementations ensure
+ the multi-call loop returns a None value.
+ """
+
+ class Api:
+ @hookspec(firstresult=True)
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+
+ class Plugin1:
+ @hookimpl
+ def hello(self, arg):
+ return None
+
+ pm.register(Plugin1())
+ res = pm.hook.hello(arg=3)
+ assert res is None
+
+
+def test_firstresult_no_plugin(pm):
+ """If no implementations/plugins have been registered for a firstresult
+ hook the multi-call loop should return a None value.
+ """
+
+ class Api:
+ @hookspec(firstresult=True)
+ def hello(self, arg):
+ "api hook 1"
+
+ pm.add_hookspecs(Api)
+ res = pm.hook.hello(arg=3)
+ assert res is None
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/testing/test_multicall.py b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_multicall.py
new file mode 100644
index 0000000000..8ffb452f69
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_multicall.py
@@ -0,0 +1,147 @@
+import pytest
+from pluggy import HookCallError, HookspecMarker, HookimplMarker
+from pluggy._hooks import HookImpl
+from pluggy._callers import _multicall
+
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+def MC(methods, kwargs, firstresult=False):
+ caller = _multicall
+ hookfuncs = []
+ for method in methods:
+ f = HookImpl(None, "<temp>", method, method.example_impl)
+ hookfuncs.append(f)
+ return caller("foo", hookfuncs, kwargs, firstresult)
+
+
+def test_keyword_args():
+ @hookimpl
+ def f(x):
+ return x + 1
+
+ class A:
+ @hookimpl
+ def f(self, x, y):
+ return x + y
+
+ reslist = MC([f, A().f], dict(x=23, y=24))
+ assert reslist == [24 + 23, 24]
+
+
+def test_keyword_args_with_defaultargs():
+ @hookimpl
+ def f(x, z=1):
+ return x + z
+
+ reslist = MC([f], dict(x=23, y=24))
+ assert reslist == [24]
+
+
+def test_tags_call_error():
+ @hookimpl
+ def f(x):
+ return x
+
+ with pytest.raises(HookCallError):
+ MC([f], {})
+
+
+def test_call_none_is_no_result():
+ @hookimpl
+ def m1():
+ return 1
+
+ @hookimpl
+ def m2():
+ return None
+
+ res = MC([m1, m2], {}, firstresult=True)
+ assert res == 1
+ res = MC([m1, m2], {}, {})
+ assert res == [1]
+
+
+def test_hookwrapper():
+ out = []
+
+ @hookimpl(hookwrapper=True)
+ def m1():
+ out.append("m1 init")
+ yield None
+ out.append("m1 finish")
+
+ @hookimpl
+ def m2():
+ out.append("m2")
+ return 2
+
+ res = MC([m2, m1], {})
+ assert res == [2]
+ assert out == ["m1 init", "m2", "m1 finish"]
+ out[:] = []
+ res = MC([m2, m1], {}, firstresult=True)
+ assert res == 2
+ assert out == ["m1 init", "m2", "m1 finish"]
+
+
+def test_hookwrapper_order():
+ out = []
+
+ @hookimpl(hookwrapper=True)
+ def m1():
+ out.append("m1 init")
+ yield 1
+ out.append("m1 finish")
+
+ @hookimpl(hookwrapper=True)
+ def m2():
+ out.append("m2 init")
+ yield 2
+ out.append("m2 finish")
+
+ res = MC([m2, m1], {})
+ assert res == []
+ assert out == ["m1 init", "m2 init", "m2 finish", "m1 finish"]
+
+
+def test_hookwrapper_not_yield():
+ @hookimpl(hookwrapper=True)
+ def m1():
+ pass
+
+ with pytest.raises(TypeError):
+ MC([m1], {})
+
+
+def test_hookwrapper_too_many_yield():
+ @hookimpl(hookwrapper=True)
+ def m1():
+ yield 1
+ yield 2
+
+ with pytest.raises(RuntimeError) as ex:
+ MC([m1], {})
+ assert "m1" in str(ex.value)
+ assert (__file__ + ":") in str(ex.value)
+
+
+@pytest.mark.parametrize("exc", [ValueError, SystemExit])
+def test_hookwrapper_exception(exc):
+ out = []
+
+ @hookimpl(hookwrapper=True)
+ def m1():
+ out.append("m1 init")
+ yield None
+ out.append("m1 finish")
+
+ @hookimpl
+ def m2():
+ raise exc
+
+ with pytest.raises(exc):
+ MC([m2, m1], {})
+ assert out == ["m1 init", "m1 finish"]
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/testing/test_pluginmanager.py b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_pluginmanager.py
new file mode 100644
index 0000000000..304a007a58
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_pluginmanager.py
@@ -0,0 +1,544 @@
+"""
+``PluginManager`` unit and public API testing.
+"""
+import pytest
+
+from pluggy import (
+ PluginValidationError,
+ HookCallError,
+ HookimplMarker,
+ HookspecMarker,
+)
+from pluggy._manager import importlib_metadata
+
+
+hookspec = HookspecMarker("example")
+hookimpl = HookimplMarker("example")
+
+
+def test_plugin_double_register(pm):
+ """Registering the same plugin more then once isn't allowed"""
+ pm.register(42, name="abc")
+ with pytest.raises(ValueError):
+ pm.register(42, name="abc")
+ with pytest.raises(ValueError):
+ pm.register(42, name="def")
+
+
+def test_pm(pm):
+ """Basic registration with objects"""
+
+ class A:
+ pass
+
+ a1, a2 = A(), A()
+ pm.register(a1)
+ assert pm.is_registered(a1)
+ pm.register(a2, "hello")
+ assert pm.is_registered(a2)
+ out = pm.get_plugins()
+ assert a1 in out
+ assert a2 in out
+ assert pm.get_plugin("hello") == a2
+ assert pm.unregister(a1) == a1
+ assert not pm.is_registered(a1)
+
+ out = pm.list_name_plugin()
+ assert len(out) == 1
+ assert out == [("hello", a2)]
+
+
+def test_has_plugin(pm):
+ class A:
+ pass
+
+ a1 = A()
+ pm.register(a1, "hello")
+ assert pm.is_registered(a1)
+ assert pm.has_plugin("hello")
+
+
+def test_register_dynamic_attr(he_pm):
+ class A:
+ def __getattr__(self, name):
+ if name[0] != "_":
+ return 42
+ raise AttributeError()
+
+ a = A()
+ he_pm.register(a)
+ assert not he_pm.get_hookcallers(a)
+
+
+def test_pm_name(pm):
+ class A:
+ pass
+
+ a1 = A()
+ name = pm.register(a1, name="hello")
+ assert name == "hello"
+ pm.unregister(a1)
+ assert pm.get_plugin(a1) is None
+ assert not pm.is_registered(a1)
+ assert not pm.get_plugins()
+ name2 = pm.register(a1, name="hello")
+ assert name2 == name
+ pm.unregister(name="hello")
+ assert pm.get_plugin(a1) is None
+ assert not pm.is_registered(a1)
+ assert not pm.get_plugins()
+
+
+def test_set_blocked(pm):
+ class A:
+ pass
+
+ a1 = A()
+ name = pm.register(a1)
+ assert pm.is_registered(a1)
+ assert not pm.is_blocked(name)
+ pm.set_blocked(name)
+ assert pm.is_blocked(name)
+ assert not pm.is_registered(a1)
+
+ pm.set_blocked("somename")
+ assert pm.is_blocked("somename")
+ assert not pm.register(A(), "somename")
+ pm.unregister(name="somename")
+ assert pm.is_blocked("somename")
+
+
+def test_register_mismatch_method(he_pm):
+ class hello:
+ @hookimpl
+ def he_method_notexists(self):
+ pass
+
+ plugin = hello()
+
+ he_pm.register(plugin)
+ with pytest.raises(PluginValidationError) as excinfo:
+ he_pm.check_pending()
+ assert excinfo.value.plugin is plugin
+
+
+def test_register_mismatch_arg(he_pm):
+ class hello:
+ @hookimpl
+ def he_method1(self, qlwkje):
+ pass
+
+ plugin = hello()
+
+ with pytest.raises(PluginValidationError) as excinfo:
+ he_pm.register(plugin)
+ assert excinfo.value.plugin is plugin
+
+
+def test_register_hookwrapper_not_a_generator_function(he_pm):
+ class hello:
+ @hookimpl(hookwrapper=True)
+ def he_method1(self):
+ pass # pragma: no cover
+
+ plugin = hello()
+
+ with pytest.raises(PluginValidationError, match="generator function") as excinfo:
+ he_pm.register(plugin)
+ assert excinfo.value.plugin is plugin
+
+
+def test_register(pm):
+ class MyPlugin:
+ pass
+
+ my = MyPlugin()
+ pm.register(my)
+ assert my in pm.get_plugins()
+ my2 = MyPlugin()
+ pm.register(my2)
+ assert {my, my2}.issubset(pm.get_plugins())
+
+ assert pm.is_registered(my)
+ assert pm.is_registered(my2)
+ pm.unregister(my)
+ assert not pm.is_registered(my)
+ assert my not in pm.get_plugins()
+
+
+def test_register_unknown_hooks(pm):
+ class Plugin1:
+ @hookimpl
+ def he_method1(self, arg):
+ return arg + 1
+
+ pname = pm.register(Plugin1())
+
+ class Hooks:
+ @hookspec
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+ # assert not pm._unverified_hooks
+ assert pm.hook.he_method1(arg=1) == [2]
+ assert len(pm.get_hookcallers(pm.get_plugin(pname))) == 1
+
+
+def test_register_historic(pm):
+ class Hooks:
+ @hookspec(historic=True)
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+
+ pm.hook.he_method1.call_historic(kwargs=dict(arg=1))
+ out = []
+
+ class Plugin:
+ @hookimpl
+ def he_method1(self, arg):
+ out.append(arg)
+
+ pm.register(Plugin())
+ assert out == [1]
+
+ class Plugin2:
+ @hookimpl
+ def he_method1(self, arg):
+ out.append(arg * 10)
+
+ pm.register(Plugin2())
+ assert out == [1, 10]
+ pm.hook.he_method1.call_historic(kwargs=dict(arg=12))
+ assert out == [1, 10, 120, 12]
+
+
+@pytest.mark.parametrize("result_callback", [True, False])
+def test_with_result_memorized(pm, result_callback):
+ """Verify that ``_HookCaller._maybe_apply_history()`
+ correctly applies the ``result_callback`` function, when provided,
+ to the result from calling each newly registered hook.
+ """
+ out = []
+ if result_callback:
+
+ def callback(res):
+ out.append(res)
+
+ else:
+ callback = None
+
+ class Hooks:
+ @hookspec(historic=True)
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+
+ class Plugin1:
+ @hookimpl
+ def he_method1(self, arg):
+ return arg * 10
+
+ pm.register(Plugin1())
+
+ he_method1 = pm.hook.he_method1
+ he_method1.call_historic(result_callback=callback, kwargs=dict(arg=1))
+
+ class Plugin2:
+ @hookimpl
+ def he_method1(self, arg):
+ return arg * 10
+
+ pm.register(Plugin2())
+ if result_callback:
+ assert out == [10, 10]
+ else:
+ assert out == []
+
+
+def test_with_callbacks_immediately_executed(pm):
+ class Hooks:
+ @hookspec(historic=True)
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+
+ class Plugin1:
+ @hookimpl
+ def he_method1(self, arg):
+ return arg * 10
+
+ class Plugin2:
+ @hookimpl
+ def he_method1(self, arg):
+ return arg * 20
+
+ class Plugin3:
+ @hookimpl
+ def he_method1(self, arg):
+ return arg * 30
+
+ out = []
+ pm.register(Plugin1())
+ pm.register(Plugin2())
+
+ he_method1 = pm.hook.he_method1
+ he_method1.call_historic(lambda res: out.append(res), dict(arg=1))
+ assert out == [20, 10]
+ pm.register(Plugin3())
+ assert out == [20, 10, 30]
+
+
+def test_register_historic_incompat_hookwrapper(pm):
+ class Hooks:
+ @hookspec(historic=True)
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+
+ out = []
+
+ class Plugin:
+ @hookimpl(hookwrapper=True)
+ def he_method1(self, arg):
+ out.append(arg)
+
+ with pytest.raises(PluginValidationError):
+ pm.register(Plugin())
+
+
+def test_call_extra(pm):
+ class Hooks:
+ @hookspec
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+
+ def he_method1(arg):
+ return arg * 10
+
+ out = pm.hook.he_method1.call_extra([he_method1], dict(arg=1))
+ assert out == [10]
+
+
+def test_call_with_too_few_args(pm):
+ class Hooks:
+ @hookspec
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+
+ class Plugin1:
+ @hookimpl
+ def he_method1(self, arg):
+ 0 / 0
+
+ pm.register(Plugin1())
+ with pytest.raises(HookCallError):
+ with pytest.warns(UserWarning):
+ pm.hook.he_method1()
+
+
+def test_subset_hook_caller(pm):
+ class Hooks:
+ @hookspec
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+
+ out = []
+
+ class Plugin1:
+ @hookimpl
+ def he_method1(self, arg):
+ out.append(arg)
+
+ class Plugin2:
+ @hookimpl
+ def he_method1(self, arg):
+ out.append(arg * 10)
+
+ class PluginNo:
+ pass
+
+ plugin1, plugin2, plugin3 = Plugin1(), Plugin2(), PluginNo()
+ pm.register(plugin1)
+ pm.register(plugin2)
+ pm.register(plugin3)
+ pm.hook.he_method1(arg=1)
+ assert out == [10, 1]
+ out[:] = []
+
+ hc = pm.subset_hook_caller("he_method1", [plugin1])
+ hc(arg=2)
+ assert out == [20]
+ out[:] = []
+
+ hc = pm.subset_hook_caller("he_method1", [plugin2])
+ hc(arg=2)
+ assert out == [2]
+ out[:] = []
+
+ pm.unregister(plugin1)
+ hc(arg=2)
+ assert out == []
+ out[:] = []
+
+ pm.hook.he_method1(arg=1)
+ assert out == [10]
+
+
+def test_get_hookimpls(pm):
+ class Hooks:
+ @hookspec
+ def he_method1(self, arg):
+ pass
+
+ pm.add_hookspecs(Hooks)
+ assert pm.hook.he_method1.get_hookimpls() == []
+
+ class Plugin1:
+ @hookimpl
+ def he_method1(self, arg):
+ pass
+
+ class Plugin2:
+ @hookimpl
+ def he_method1(self, arg):
+ pass
+
+ class PluginNo:
+ pass
+
+ plugin1, plugin2, plugin3 = Plugin1(), Plugin2(), PluginNo()
+ pm.register(plugin1)
+ pm.register(plugin2)
+ pm.register(plugin3)
+
+ hookimpls = pm.hook.he_method1.get_hookimpls()
+ hook_plugins = [item.plugin for item in hookimpls]
+ assert hook_plugins == [plugin1, plugin2]
+
+
+def test_add_hookspecs_nohooks(pm):
+ with pytest.raises(ValueError):
+ pm.add_hookspecs(10)
+
+
+def test_load_setuptools_instantiation(monkeypatch, pm):
+ class EntryPoint:
+ name = "myname"
+ group = "hello"
+ value = "myname:foo"
+
+ def load(self):
+ class PseudoPlugin:
+ x = 42
+
+ return PseudoPlugin()
+
+ class Distribution:
+ entry_points = (EntryPoint(),)
+
+ dist = Distribution()
+
+ def my_distributions():
+ return (dist,)
+
+ monkeypatch.setattr(importlib_metadata, "distributions", my_distributions)
+ num = pm.load_setuptools_entrypoints("hello")
+ assert num == 1
+ plugin = pm.get_plugin("myname")
+ assert plugin.x == 42
+ ret = pm.list_plugin_distinfo()
+ # poor man's `assert ret == [(plugin, mock.ANY)]`
+ assert len(ret) == 1
+ assert len(ret[0]) == 2
+ assert ret[0][0] == plugin
+ assert ret[0][1]._dist == dist
+ num = pm.load_setuptools_entrypoints("hello")
+ assert num == 0 # no plugin loaded by this call
+
+
+def test_add_tracefuncs(he_pm):
+ out = []
+
+ class api1:
+ @hookimpl
+ def he_method1(self):
+ out.append("he_method1-api1")
+
+ class api2:
+ @hookimpl
+ def he_method1(self):
+ out.append("he_method1-api2")
+
+ he_pm.register(api1())
+ he_pm.register(api2())
+
+ def before(hook_name, hook_impls, kwargs):
+ out.append((hook_name, list(hook_impls), kwargs))
+
+ def after(outcome, hook_name, hook_impls, kwargs):
+ out.append((outcome, hook_name, list(hook_impls), kwargs))
+
+ undo = he_pm.add_hookcall_monitoring(before, after)
+
+ he_pm.hook.he_method1(arg=1)
+ assert len(out) == 4
+ assert out[0][0] == "he_method1"
+ assert len(out[0][1]) == 2
+ assert isinstance(out[0][2], dict)
+ assert out[1] == "he_method1-api2"
+ assert out[2] == "he_method1-api1"
+ assert len(out[3]) == 4
+ assert out[3][1] == out[0][0]
+
+ undo()
+ he_pm.hook.he_method1(arg=1)
+ assert len(out) == 4 + 2
+
+
+def test_hook_tracing(he_pm):
+ saveindent = []
+
+ class api1:
+ @hookimpl
+ def he_method1(self):
+ saveindent.append(he_pm.trace.root.indent)
+
+ class api2:
+ @hookimpl
+ def he_method1(self):
+ saveindent.append(he_pm.trace.root.indent)
+ raise ValueError()
+
+ he_pm.register(api1())
+ out = []
+ he_pm.trace.root.setwriter(out.append)
+ undo = he_pm.enable_tracing()
+ try:
+ indent = he_pm.trace.root.indent
+ he_pm.hook.he_method1(arg=1)
+ assert indent == he_pm.trace.root.indent
+ assert len(out) == 2
+ assert "he_method1" in out[0]
+ assert "finish" in out[1]
+
+ out[:] = []
+ he_pm.register(api2())
+
+ with pytest.raises(ValueError):
+ he_pm.hook.he_method1(arg=1)
+ assert he_pm.trace.root.indent == indent
+ assert saveindent[0] > indent
+ finally:
+ undo()
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/testing/test_tracer.py b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_tracer.py
new file mode 100644
index 0000000000..992ec67914
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/testing/test_tracer.py
@@ -0,0 +1,78 @@
+from pluggy._tracing import TagTracer
+
+import pytest
+
+
+@pytest.fixture
+def rootlogger():
+ return TagTracer()
+
+
+def test_simple(rootlogger):
+ log = rootlogger.get("pytest")
+ log("hello")
+ out = []
+ rootlogger.setwriter(out.append)
+ log("world")
+ assert len(out) == 1
+ assert out[0] == "world [pytest]\n"
+ sublog = log.get("collection")
+ sublog("hello")
+ assert out[1] == "hello [pytest:collection]\n"
+
+
+def test_indent(rootlogger):
+ log = rootlogger.get("1")
+ out = []
+ log.root.setwriter(lambda arg: out.append(arg))
+ log("hello")
+ log.root.indent += 1
+ log("line1")
+ log("line2")
+ log.root.indent += 1
+ log("line3")
+ log("line4")
+ log.root.indent -= 1
+ log("line5")
+ log.root.indent -= 1
+ log("last")
+ assert len(out) == 7
+ names = [x[: x.rfind(" [")] for x in out]
+ assert names == [
+ "hello",
+ " line1",
+ " line2",
+ " line3",
+ " line4",
+ " line5",
+ "last",
+ ]
+
+
+def test_readable_output_dictargs(rootlogger):
+
+ out = rootlogger._format_message(["test"], [1])
+ assert out == "1 [test]\n"
+
+ out2 = rootlogger._format_message(["test"], ["test", {"a": 1}])
+ assert out2 == "test [test]\n a: 1\n"
+
+
+def test_setprocessor(rootlogger):
+ log = rootlogger.get("1")
+ log2 = log.get("2")
+ assert log2.tags == tuple("12")
+ out = []
+ rootlogger.setprocessor(tuple("12"), lambda *args: out.append(args))
+ log("not seen")
+ log2("seen")
+ assert len(out) == 1
+ tags, args = out[0]
+ assert "1" in tags
+ assert "2" in tags
+ assert args == ("seen",)
+ l2 = []
+ rootlogger.setprocessor("1:2", lambda *args: l2.append(args))
+ log2("seen")
+ tags, args = l2[0]
+ assert args == ("seen",)
diff --git a/testing/web-platform/tests/tools/third_party/pluggy/tox.ini b/testing/web-platform/tests/tools/third_party/pluggy/tox.ini
new file mode 100644
index 0000000000..97b3eb7792
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pluggy/tox.ini
@@ -0,0 +1,57 @@
+[tox]
+envlist=linting,docs,py{36,37,38,39,py3},py{36,37}-pytest{main}
+
+[testenv]
+commands=
+ {env:_PLUGGY_TOX_CMD:pytest} {posargs}
+ coverage: coverage report -m
+ coverage: coverage xml
+setenv=
+ _PYTEST_SETUP_SKIP_PLUGGY_DEP=1
+ coverage: _PLUGGY_TOX_CMD=coverage run -m pytest
+extras=testing
+deps=
+ coverage: coverage
+ pytestmain: git+https://github.com/pytest-dev/pytest.git@main
+
+[testenv:benchmark]
+commands=pytest {posargs:testing/benchmark.py}
+deps=
+ pytest
+ pytest-benchmark
+
+[testenv:linting]
+skip_install = true
+basepython = python3
+deps = pre-commit
+commands = pre-commit run --all-files --show-diff-on-failure
+
+[testenv:docs]
+deps =
+ sphinx
+ pygments
+commands =
+ sphinx-build -W -b html {toxinidir}/docs {toxinidir}/build/html-docs
+
+[pytest]
+minversion=2.0
+testpaths = testing
+#--pyargs --doctest-modules --ignore=.tox
+addopts=-r a
+filterwarnings =
+ error
+
+[flake8]
+max-line-length=99
+
+[testenv:release]
+decription = do a release, required posarg of the version number
+basepython = python3
+skipsdist = True
+usedevelop = True
+passenv = *
+deps =
+ colorama
+ gitpython
+ towncrier
+commands = python scripts/release.py {posargs}
diff --git a/testing/web-platform/tests/tools/third_party/py/.flake8 b/testing/web-platform/tests/tools/third_party/py/.flake8
new file mode 100644
index 0000000000..f9c71a7fbc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/.flake8
@@ -0,0 +1,4 @@
+[flake8]
+max-line-length = 120
+per-file-ignores =
+ **/*.pyi:E252,E301,E302,E305,E501,E701,E704,F401,F811,F821
diff --git a/testing/web-platform/tests/tools/third_party/py/.github/workflows/main.yml b/testing/web-platform/tests/tools/third_party/py/.github/workflows/main.yml
new file mode 100644
index 0000000000..564aa0c531
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/.github/workflows/main.yml
@@ -0,0 +1,66 @@
+name: build
+
+on: [push, pull_request]
+
+jobs:
+ build:
+
+ runs-on: ${{ matrix.os }}
+
+ strategy:
+ fail-fast: false
+ matrix:
+ python: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "pypy3"]
+ os: [ubuntu-latest, windows-latest]
+ include:
+ - python: "2.7"
+ tox_env: "py27-pytest30"
+ - python: "3.5"
+ tox_env: "py35-pytest30"
+ - python: "3.6"
+ tox_env: "py36-pytest30"
+ - python: "3.7"
+ tox_env: "py37-pytest30"
+ - python: "3.8"
+ tox_env: "py38-pytest30"
+ - python: "3.9"
+ tox_env: "py39-pytest30"
+ - python: "pypy3"
+ tox_env: "pypy3-pytest30"
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python }}
+ - name: Test
+ run: |
+ pipx run tox -e ${{ matrix.tox_env }}
+
+ deploy:
+
+ if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags')
+
+ runs-on: ubuntu-latest
+
+ needs: build
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.7"
+ - name: Install wheel
+ run: |
+ python -m pip install --upgrade pip
+ pip install wheel
+ - name: Build package
+ run: |
+ python setup.py sdist bdist_wheel
+ - name: Publish package to PyPI
+ uses: pypa/gh-action-pypi-publish@master
+ with:
+ user: __token__
+ password: ${{ secrets.pypi_token }}
diff --git a/testing/web-platform/tests/tools/third_party/py/.gitignore b/testing/web-platform/tests/tools/third_party/py/.gitignore
new file mode 100644
index 0000000000..fa936f1596
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/.gitignore
@@ -0,0 +1,15 @@
+
+.cache/
+.tox/
+__pycache__/
+.mypy_cache/
+
+*.pyc
+*.pyo
+
+*.egg-info
+.eggs/
+
+dist/*
+/py/_version.py
+.pytest_cache/
diff --git a/testing/web-platform/tests/tools/third_party/py/AUTHORS b/testing/web-platform/tests/tools/third_party/py/AUTHORS
new file mode 100644
index 0000000000..9c5dda9ceb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/AUTHORS
@@ -0,0 +1,25 @@
+Holger Krekel, holger at merlinux eu
+Benjamin Peterson, benjamin at python org
+Ronny Pfannschmidt, Ronny.Pfannschmidt at gmx de
+Guido Wesdorp, johnny at johnnydebris net
+Samuele Pedroni, pedronis at openend se
+Carl Friedrich Bolz, cfbolz at gmx de
+Armin Rigo, arigo at tunes org
+Maciek Fijalkowski, fijal at genesilico pl
+Brian Dorsey, briandorsey at gmail com
+Floris Bruynooghe, flub at devork be
+merlinux GmbH, Germany, office at merlinux eu
+
+Contributors include::
+
+Ross Lawley
+Ralf Schmitt
+Chris Lamb
+Harald Armin Massa
+Martijn Faassen
+Ian Bicking
+Jan Balster
+Grig Gheorghiu
+Bob Ippolito
+Christian Tismer
+Wim Glenn
diff --git a/testing/web-platform/tests/tools/third_party/py/CHANGELOG.rst b/testing/web-platform/tests/tools/third_party/py/CHANGELOG.rst
new file mode 100644
index 0000000000..47c6fdb7a1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/CHANGELOG.rst
@@ -0,0 +1,1236 @@
+1.11.0 (2021-11-04)
+===================
+
+- Support Python 3.11
+- Support ``NO_COLOR`` environment variable
+- Update vendored apipkg: 1.5 => 2.0
+
+1.10.0 (2020-12-12)
+===================
+
+- Fix a regular expression DoS vulnerability in the py.path.svnwc SVN blame functionality (CVE-2020-29651)
+- Update vendored apipkg: 1.4 => 1.5
+- Update vendored iniconfig: 1.0.0 => 1.1.1
+
+1.9.0 (2020-06-24)
+==================
+
+- Add type annotation stubs for the following modules:
+
+ * ``py.error``
+ * ``py.iniconfig``
+ * ``py.path`` (not including SVN paths)
+ * ``py.io``
+ * ``py.xml``
+
+ There are no plans to type other modules at this time.
+
+ The type annotations are provided in external .pyi files, not inline in the
+ code, and may therefore contain small errors or omissions. If you use ``py``
+ in conjunction with a type checker, and encounter any type errors you believe
+ should be accepted, please report it in an issue.
+
+1.8.2 (2020-06-15)
+==================
+
+- On Windows, ``py.path.local``s which differ only in case now have the same
+ Python hash value. Previously, such paths were considered equal but had
+ different hashes, which is not allowed and breaks the assumptions made by
+ dicts, sets and other users of hashes.
+
+1.8.1 (2019-12-27)
+==================
+
+- Handle ``FileNotFoundError`` when trying to import pathlib in ``path.common``
+ on Python 3.4 (#207).
+
+- ``py.path.local.samefile`` now works correctly in Python 3 on Windows when dealing with symlinks.
+
+1.8.0 (2019-02-21)
+==================
+
+- add ``"importlib"`` pyimport mode for python3.5+, allowing unimportable test suites
+ to contain identically named modules.
+
+- fix ``LocalPath.as_cwd()`` not calling ``os.chdir()`` with ``None``, when
+ being invoked from a non-existing directory.
+
+
+1.7.0 (2018-10-11)
+==================
+
+- fix #174: use ``shutil.get_terminal_size()`` in Python 3.3+ to determine the size of the
+ terminal, which produces more accurate results than the previous method.
+
+- fix pytest-dev/pytest#2042: introduce new ``PY_IGNORE_IMPORTMISMATCH`` environment variable
+ that suppresses ``ImportMismatchError`` exceptions when set to ``1``.
+
+
+1.6.0 (2018-08-27)
+==================
+
+- add ``TerminalWriter.width_of_current_line`` (i18n version of
+ ``TerminalWriter.chars_on_current_line``), a read-only property
+ that tracks how wide the current line is, attempting to take
+ into account international characters in the calculation.
+
+1.5.4 (2018-06-27)
+==================
+
+- fix pytest-dev/pytest#3451: don't make assumptions about fs case sensitivity
+ in ``make_numbered_dir``.
+
+1.5.3
+=====
+
+- fix #179: ensure we can support 'from py.error import ...'
+
+1.5.2
+=====
+
+- fix #169, #170: error importing py.log on Windows: no module named ``syslog``.
+
+1.5.1
+=====
+
+- fix #167 - prevent pip from installing py in unsupported Python versions.
+
+1.5.0
+=====
+
+NOTE: **this release has been removed from PyPI** due to missing package
+metadata which caused a number of problems to py26 and py33 users.
+This issue was fixed in the 1.5.1 release.
+
+- python 2.6 and 3.3 are no longer supported
+- deprecate py.std and remove all internal uses
+- fix #73 turn py.error into an actual module
+- path join to / no longer produces leading double slashes
+- fix #82 - remove unsupportable aliases
+- fix python37 compatibility of path.sysfind on windows by correctly replacing vars
+- turn iniconfig and apipkg into vendored packages and ease de-vendoring for distributions
+- fix #68 remove invalid py.test.ensuretemp references
+- fix #25 - deprecate path.listdir(sort=callable)
+- add ``TerminalWriter.chars_on_current_line`` read-only property that tracks how many characters
+ have been written to the current line.
+
+1.4.34
+====================================================================
+
+- fix issue119 / pytest issue708 where tmpdir may fail to make numbered directories
+ when the filesystem is case-insensitive.
+
+1.4.33
+====================================================================
+
+- avoid imports in calls to py.path.local().fnmatch(). Thanks Andreas Pelme for
+ the PR.
+
+- fix issue106: Naive unicode encoding when calling fspath() in python2. Thanks Tiago Nobrega for the PR.
+
+- fix issue110: unittest.TestCase.assertWarns fails with py imported.
+
+1.4.32
+====================================================================
+
+- fix issue70: added ability to copy all stat info in py.path.local.copy.
+
+- make TerminalWriter.fullwidth a property. This results in the correct
+ value when the terminal gets resized.
+
+- update supported html tags to include recent additions.
+ Thanks Denis Afonso for the PR.
+
+- Remove internal code in ``Source.compile`` meant to support earlier Python 3 versions that produced the side effect
+ of leaving ``None`` in ``sys.modules`` when called (see pytest-dev/pytest#2103).
+ Thanks Bruno Oliveira for the PR.
+
+1.4.31
+==================================================
+
+- fix local().copy(dest, mode=True) to also work
+ with unicode.
+
+- pass better error message with svn EEXIST paths
+
+1.4.30
+==================================================
+
+- fix issue68 an assert with a multiline list comprehension
+ was not reported correctly. Thanks Henrik Heibuerger.
+
+
+1.4.29
+==================================================
+
+- fix issue55: revert a change to the statement finding algorithm
+ which is used by pytest for generating tracebacks.
+ Thanks Daniel Hahler for initial analysis.
+
+- fix pytest issue254 for when traceback rendering can't
+ find valid source code. Thanks Ionel Cristian Maries.
+
+
+1.4.28
+==================================================
+
+- fix issue64 -- dirpath regression when "abs=True" is passed.
+ Thanks Gilles Dartiguelongue.
+
+1.4.27
+==================================================
+
+- fix issue59: point to new repo site
+
+- allow a new ensuresyspath="append" mode for py.path.local.pyimport()
+ so that a neccessary import path is appended instead of prepended to
+ sys.path
+
+- strike undocumented, untested argument to py.path.local.pypkgpath
+
+- speed up py.path.local.dirpath by a factor of 10
+
+1.4.26
+==================================================
+
+- avoid calling normpath twice in py.path.local
+
+- py.builtin._reraise properly reraises under Python3 now.
+
+- fix issue53 - remove module index, thanks jenisys.
+
+- allow posix path separators when "fnmatch" is called.
+ Thanks Christian Long for the complete PR.
+
+1.4.25
+==================================================
+
+- fix issue52: vaguely fix py25 compat of py.path.local (it's not
+ officially supported), also fix docs
+
+- fix pytest issue 589: when checking if we have a recursion error
+ check for the specific "maximum recursion depth" text of the exception.
+
+1.4.24
+==================================================
+
+- Fix retrieving source when an else: line has an other statement on
+ the same line.
+
+- add localpath read_text/write_text/read_bytes/write_bytes methods
+ as shortcuts and clearer bytes/text interfaces for read/write.
+ Adapted from a PR from Paul Moore.
+
+
+1.4.23
+==================================================
+
+- use newer apipkg version which makes attribute access on
+ alias modules resolve to None rather than an ImportError.
+ This helps with code that uses inspect.getframeinfo()
+ on py34 which causes a complete walk on sys.modules
+ thus triggering the alias module to resolve and blowing
+ up with ImportError. The negative side is that something
+ like "py.test.X" will now result in None instead of "importerror: pytest"
+ if pytest is not installed. But you shouldn't import "py.test"
+ anyway anymore.
+
+- adapt one svn test to only check for any exception instead
+ of specific ones because different svn versions cause different
+ errors and we don't care.
+
+
+1.4.22
+==================================================
+
+- refactor class-level registry on ForkedFunc child start/finish
+ event to become instance based (i.e. passed into the constructor)
+
+1.4.21
+==================================================
+
+- ForkedFunc now has class-level register_on_start/on_exit()
+ methods to allow adding information in the boxed process.
+ Thanks Marc Schlaich.
+
+- ForkedFunc in the child opens in "auto-flush" mode for
+ stdout/stderr so that when a subprocess dies you can see
+ its output even if it didn't flush itself.
+
+- refactor traceback generation in light of pytest issue 364
+ (shortening tracebacks). you can now set a new traceback style
+ on a per-entry basis such that a caller can force entries to be
+ isplayed as short or long entries.
+
+- win32: py.path.local.sysfind(name) will preferrably return files with
+ extensions so that if "X" and "X.bat" or "X.exe" is on the PATH,
+ one of the latter two will be returned.
+
+1.4.20
+==================================================
+
+- ignore unicode decode errors in xmlescape. Thanks Anatoly Bubenkoff.
+
+- on python2 modify traceback.format_exception_only to match python3
+ behaviour, namely trying to print unicode for Exception instances
+
+- use a safer way for serializing exception reports (helps to fix
+ pytest issue413)
+
+Changes between 1.4.18 and 1.4.19
+==================================================
+
+- merge in apipkg fixes
+
+- some micro-optimizations in py/_code/code.py for speeding
+ up pytest runs. Thanks Alex Gaynor for initiative.
+
+- check PY_COLORS=1 or PY_COLORS=0 to force coloring/not-coloring
+ for py.io.TerminalWriter() independently from capabilities
+ of the output file. Thanks Marc Abramowitz for the PR.
+
+- some fixes to unicode handling in assertion handling.
+ Thanks for the PR to Floris Bruynooghe. (This helps
+ to fix pytest issue 319).
+
+- depend on setuptools presence, remove distribute_setup
+
+Changes between 1.4.17 and 1.4.18
+==================================================
+
+- introduce path.ensure_dir() as a synonym for ensure(..., dir=1)
+
+- some unicode/python3 related fixes wrt to path manipulations
+ (if you start passing unicode particular in py2 you might
+ still get problems, though)
+
+Changes between 1.4.16 and 1.4.17
+==================================================
+
+- make py.io.TerminalWriter() prefer colorama if it is available
+ and avoid empty lines when separator-lines are printed by
+ being defensive and reducing the working terminalwidth by 1
+
+- introduce optional "expanduser" argument to py.path.local
+ to that local("~", expanduser=True) gives the home
+ directory of "user".
+
+Changes between 1.4.15 and 1.4.16
+==================================================
+
+- fix issue35 - define __gt__ ordering between a local path
+ and strings
+
+- fix issue36 - make chdir() work even if os.getcwd() fails.
+
+- add path.exists/isdir/isfile/islink shortcuts
+
+- introduce local path.as_cwd() context manager.
+
+- introduce p.write(ensure=1) and p.open(ensure=1)
+ where ensure triggers creation of neccessary parent
+ dirs.
+
+
+Changes between 1.4.14 and 1.4.15
+==================================================
+
+- majorly speed up some common calling patterns with
+ LocalPath.listdir()/join/check/stat functions considerably.
+
+- fix an edge case with fnmatch where a glob style pattern appeared
+ in an absolute path.
+
+Changes between 1.4.13 and 1.4.14
+==================================================
+
+- fix dupfile to work with files that don't
+ carry a mode. Thanks Jason R. Coombs.
+
+Changes between 1.4.12 and 1.4.13
+==================================================
+
+- fix getting statementrange/compiling a file ending
+ in a comment line without newline (on python2.5)
+- for local paths you can pass "mode=True" to a copy()
+ in order to copy permission bits (underlying mechanism
+ is using shutil.copymode)
+- add paths arguments to py.path.local.sysfind to restrict
+ search to the diretories in the path.
+- add isdir/isfile/islink to path.stat() objects allowing to perform
+ multiple checks without calling out multiple times
+- drop py.path.local.__new__ in favour of a simpler __init__
+- iniconfig: allow "name:value" settings in config files, no space after
+ "name" required
+- fix issue 27 - NameError in unlikely untested case of saferepr
+
+
+Changes between 1.4.11 and 1.4.12
+==================================================
+
+- fix python2.4 support - for pre-AST interpreters re-introduce
+ old way to find statements in exceptions (closes pytest issue 209)
+- add tox.ini to distribution
+- fix issue23 - print *,** args information in tracebacks,
+ thanks Manuel Jacob
+
+
+Changes between 1.4.10 and 1.4.11
+==================================================
+
+- use _ast to determine statement ranges when printing tracebacks -
+ avoiding multi-second delays on some large test modules
+- fix an internal test to not use class-denoted pytest_funcarg__
+- fix a doc link to bug tracker
+- try to make terminal.write() printing more robust against
+ unicodeencode/decode problems, amend according test
+- introduce py.builtin.text and py.builtin.bytes
+ to point to respective str/unicode (py2) and bytes/str (py3) types
+- fix error handling on win32/py33 for ENODIR
+
+Changes between 1.4.9 and 1.4.10
+==================================================
+
+- terminalwriter: default to encode to UTF8 if no encoding is defined
+ on the output stream
+- issue22: improve heuristic for finding the statementrange in exceptions
+
+Changes between 1.4.8 and 1.4.9
+==================================================
+
+- fix bug of path.visit() which would not recognize glob-style patterns
+ for the "rec" recursion argument
+- changed iniconfig parsing to better conform, now the chars ";"
+ and "#" only mark a comment at the stripped start of a line
+- include recent apipkg-1.2
+- change internal terminalwriter.line/reline logic to more nicely
+ support file spinners
+
+Changes between 1.4.7 and 1.4.8
+==================================================
+
+- fix issue 13 - correct handling of the tag name object in xmlgen
+- fix issue 14 - support raw attribute values in xmlgen
+- fix windows terminalwriter printing/re-line problem
+- update distribute_setup.py to 0.6.27
+
+Changes between 1.4.6 and 1.4.7
+==================================================
+
+- fix issue11 - own test failure with python3.3 / Thanks Benjamin Peterson
+- help fix pytest issue 102
+
+Changes between 1.4.5 and 1.4.6
+==================================================
+
+- help to fix pytest issue99: unify output of
+ ExceptionInfo.getrepr(style="native") with ...(style="long")
+- fix issue7: source.getstatementrange() now raises proper error
+ if no valid statement can be found
+- fix issue8: fix code and tests of svnurl/svnwc to work on subversion 1.7 -
+ note that path.status(updates=1) will not properly work svn-17's status
+ --xml output is broken.
+- make source.getstatementrange() more resilent about non-python code frames
+ (as seen from jnja2)
+- make trackeback recursion detection more resilent
+ about the eval magic of a decorator library
+- iniconfig: add support for ; as comment starter
+- properly handle lists in xmlgen on python3
+- normalize py.code.getfslineno(obj) to always return a (string, int) tuple
+ defaulting to ("", -1) respectively if no source code can be found for obj.
+
+Changes between 1.4.4 and 1.4.5
+==================================================
+
+- improve some unicode handling in terminalwriter and capturing
+ (used by pytest)
+
+Changes between 1.4.3 and 1.4.4
+==================================================
+
+- a few fixes and assertion related refinements for pytest-2.1
+- guard py.code.Code and getfslineno against bogus input
+ and make py.code.Code objects for object instance
+ by looking up their __call__ function.
+- make exception presentation robust against invalid current cwd
+
+Changes between 1.4.2 and 1.4.3
+==================================================
+
+- fix terminal coloring issue for skipped tests (thanks Amaury)
+- fix issue4 - large calls to ansi_print (thanks Amaury)
+
+Changes between 1.4.1 and 1.4.2
+==================================================
+
+- fix (pytest) issue23 - tmpdir argument now works on Python3.2 and WindowsXP
+ (which apparently starts to offer os.symlink now)
+
+- better error message for syntax errors from compiled code
+
+- small fix to better deal with (un-)colored terminal output on windows
+
+Changes between 1.4.0 and 1.4.1
+==================================================
+
+- fix issue1 - py.error.* classes to be pickleable
+
+- fix issue2 - on windows32 use PATHEXT as the list of potential
+ extensions to find find binaries with py.path.local.sysfind(commandname)
+
+- fix (pytest-) issue10 and refine assertion reinterpretation
+ to avoid breaking if the __nonzero__ of an object fails
+
+- fix (pytest-) issue17 where python3 does not like "import *"
+ leading to misrepresentation of import-errors in test modules
+
+- fix py.error.* attribute pypy access issue
+
+- allow path.samefile(arg) to succeed when arg is a relative filename
+
+- fix (pytest-) issue20 path.samefile(relpath) works as expected now
+
+- fix (pytest-) issue8 len(long_list) now shows the lenght of the list
+
+Changes between 1.3.4 and 1.4.0
+==================================================
+
+- py.test was moved to a separate "pytest" package. What remains is
+ a stub hook which will proxy ``import py.test`` to ``pytest``.
+- all command line tools ("py.cleanup/lookup/countloc/..." moved
+ to "pycmd" package)
+- removed the old and deprecated "py.magic" namespace
+- use apipkg-1.1 and make py.apipkg.initpkg|ApiModule available
+- add py.iniconfig module for brain-dead easy ini-config file parsing
+- introduce py.builtin.any()
+- path objects have a .dirname attribute now (equivalent to
+ os.path.dirname(path))
+- path.visit() accepts breadthfirst (bf) and sort options
+- remove deprecated py.compat namespace
+
+Changes between 1.3.3 and 1.3.4
+==================================================
+
+- fix issue111: improve install documentation for windows
+- fix issue119: fix custom collectability of __init__.py as a module
+- fix issue116: --doctestmodules work with __init__.py files as well
+- fix issue115: unify internal exception passthrough/catching/GeneratorExit
+- fix issue118: new --tb=native for presenting cpython-standard exceptions
+
+Changes between 1.3.2 and 1.3.3
+==================================================
+
+- fix issue113: assertion representation problem with triple-quoted strings
+ (and possibly other cases)
+- make conftest loading detect that a conftest file with the same
+ content was already loaded, avoids surprises in nested directory structures
+ which can be produced e.g. by Hudson. It probably removes the need to use
+ --confcutdir in most cases.
+- fix terminal coloring for win32
+ (thanks Michael Foord for reporting)
+- fix weirdness: make terminal width detection work on stdout instead of stdin
+ (thanks Armin Ronacher for reporting)
+- remove trailing whitespace in all py/text distribution files
+
+Changes between 1.3.1 and 1.3.2
+==================================================
+
+New features
+++++++++++++++++++
+
+- fix issue103: introduce py.test.raises as context manager, examples::
+
+ with py.test.raises(ZeroDivisionError):
+ x = 0
+ 1 / x
+
+ with py.test.raises(RuntimeError) as excinfo:
+ call_something()
+
+ # you may do extra checks on excinfo.value|type|traceback here
+
+ (thanks Ronny Pfannschmidt)
+
+- Funcarg factories can now dynamically apply a marker to a
+ test invocation. This is for example useful if a factory
+ provides parameters to a test which are expected-to-fail::
+
+ def pytest_funcarg__arg(request):
+ request.applymarker(py.test.mark.xfail(reason="flaky config"))
+ ...
+
+ def test_function(arg):
+ ...
+
+- improved error reporting on collection and import errors. This makes
+ use of a more general mechanism, namely that for custom test item/collect
+ nodes ``node.repr_failure(excinfo)`` is now uniformly called so that you can
+ override it to return a string error representation of your choice
+ which is going to be reported as a (red) string.
+
+- introduce '--junitprefix=STR' option to prepend a prefix
+ to all reports in the junitxml file.
+
+Bug fixes / Maintenance
+++++++++++++++++++++++++++
+
+- make tests and the ``pytest_recwarn`` plugin in particular fully compatible
+ to Python2.7 (if you use the ``recwarn`` funcarg warnings will be enabled so that
+ you can properly check for their existence in a cross-python manner).
+- refine --pdb: ignore xfailed tests, unify its TB-reporting and
+ don't display failures again at the end.
+- fix assertion interpretation with the ** operator (thanks Benjamin Peterson)
+- fix issue105 assignment on the same line as a failing assertion (thanks Benjamin Peterson)
+- fix issue104 proper escaping for test names in junitxml plugin (thanks anonymous)
+- fix issue57 -f|--looponfail to work with xpassing tests (thanks Ronny)
+- fix issue92 collectonly reporter and --pastebin (thanks Benjamin Peterson)
+- fix py.code.compile(source) to generate unique filenames
+- fix assertion re-interp problems on PyPy, by defering code
+ compilation to the (overridable) Frame.eval class. (thanks Amaury Forgeot)
+- fix py.path.local.pyimport() to work with directories
+- streamline py.path.local.mkdtemp implementation and usage
+- don't print empty lines when showing junitxml-filename
+- add optional boolean ignore_errors parameter to py.path.local.remove
+- fix terminal writing on win32/python2.4
+- py.process.cmdexec() now tries harder to return properly encoded unicode objects
+ on all python versions
+- install plain py.test/py.which scripts also for Jython, this helps to
+ get canonical script paths in virtualenv situations
+- make path.bestrelpath(path) return ".", note that when calling
+ X.bestrelpath the assumption is that X is a directory.
+- make initial conftest discovery ignore "--" prefixed arguments
+- fix resultlog plugin when used in an multicpu/multihost xdist situation
+ (thanks Jakub Gustak)
+- perform distributed testing related reporting in the xdist-plugin
+ rather than having dist-related code in the generic py.test
+ distribution
+- fix homedir detection on Windows
+- ship distribute_setup.py version 0.6.13
+
+Changes between 1.3.0 and 1.3.1
+==================================================
+
+New features
+++++++++++++++++++
+
+- issue91: introduce new py.test.xfail(reason) helper
+ to imperatively mark a test as expected to fail. Can
+ be used from within setup and test functions. This is
+ useful especially for parametrized tests when certain
+ configurations are expected-to-fail. In this case the
+ declarative approach with the @py.test.mark.xfail cannot
+ be used as it would mark all configurations as xfail.
+
+- issue102: introduce new --maxfail=NUM option to stop
+ test runs after NUM failures. This is a generalization
+ of the '-x' or '--exitfirst' option which is now equivalent
+ to '--maxfail=1'. Both '-x' and '--maxfail' will
+ now also print a line near the end indicating the Interruption.
+
+- issue89: allow py.test.mark decorators to be used on classes
+ (class decorators were introduced with python2.6) and
+ also allow to have multiple markers applied at class/module level
+ by specifying a list.
+
+- improve and refine letter reporting in the progress bar:
+ . pass
+ f failed test
+ s skipped tests (reminder: use for dependency/platform mismatch only)
+ x xfailed test (test that was expected to fail)
+ X xpassed test (test that was expected to fail but passed)
+
+ You can use any combination of 'fsxX' with the '-r' extended
+ reporting option. The xfail/xpass results will show up as
+ skipped tests in the junitxml output - which also fixes
+ issue99.
+
+- make py.test.cmdline.main() return the exitstatus instead of raising
+ SystemExit and also allow it to be called multiple times. This of
+ course requires that your application and tests are properly teared
+ down and don't have global state.
+
+Fixes / Maintenance
+++++++++++++++++++++++
+
+- improved traceback presentation:
+ - improved and unified reporting for "--tb=short" option
+ - Errors during test module imports are much shorter, (using --tb=short style)
+ - raises shows shorter more relevant tracebacks
+ - --fulltrace now more systematically makes traces longer / inhibits cutting
+
+- improve support for raises and other dynamically compiled code by
+ manipulating python's linecache.cache instead of the previous
+ rather hacky way of creating custom code objects. This makes
+ it seemlessly work on Jython and PyPy where it previously didn't.
+
+- fix issue96: make capturing more resilient against Control-C
+ interruptions (involved somewhat substantial refactoring
+ to the underlying capturing functionality to avoid race
+ conditions).
+
+- fix chaining of conditional skipif/xfail decorators - so it works now
+ as expected to use multiple @py.test.mark.skipif(condition) decorators,
+ including specific reporting which of the conditions lead to skipping.
+
+- fix issue95: late-import zlib so that it's not required
+ for general py.test startup.
+
+- fix issue94: make reporting more robust against bogus source code
+ (and internally be more careful when presenting unexpected byte sequences)
+
+
+Changes between 1.2.1 and 1.3.0
+==================================================
+
+- deprecate --report option in favour of a new shorter and easier to
+ remember -r option: it takes a string argument consisting of any
+ combination of 'xfsX' characters. They relate to the single chars
+ you see during the dotted progress printing and will print an extra line
+ per test at the end of the test run. This extra line indicates the exact
+ position or test ID that you directly paste to the py.test cmdline in order
+ to re-run a particular test.
+
+- allow external plugins to register new hooks via the new
+ pytest_addhooks(pluginmanager) hook. The new release of
+ the pytest-xdist plugin for distributed and looponfailing
+ testing requires this feature.
+
+- add a new pytest_ignore_collect(path, config) hook to allow projects and
+ plugins to define exclusion behaviour for their directory structure -
+ for example you may define in a conftest.py this method::
+
+ def pytest_ignore_collect(path):
+ return path.check(link=1)
+
+ to prevent even a collection try of any tests in symlinked dirs.
+
+- new pytest_pycollect_makemodule(path, parent) hook for
+ allowing customization of the Module collection object for a
+ matching test module.
+
+- extend and refine xfail mechanism:
+ ``@py.test.mark.xfail(run=False)`` do not run the decorated test
+ ``@py.test.mark.xfail(reason="...")`` prints the reason string in xfail summaries
+ specifiying ``--runxfail`` on command line virtually ignores xfail markers
+
+- expose (previously internal) commonly useful methods:
+ py.io.get_terminal_with() -> return terminal width
+ py.io.ansi_print(...) -> print colored/bold text on linux/win32
+ py.io.saferepr(obj) -> return limited representation string
+
+- expose test outcome related exceptions as py.test.skip.Exception,
+ py.test.raises.Exception etc., useful mostly for plugins
+ doing special outcome interpretation/tweaking
+
+- (issue85) fix junitxml plugin to handle tests with non-ascii output
+
+- fix/refine python3 compatibility (thanks Benjamin Peterson)
+
+- fixes for making the jython/win32 combination work, note however:
+ jython2.5.1/win32 does not provide a command line launcher, see
+ http://bugs.jython.org/issue1491 . See pylib install documentation
+ for how to work around.
+
+- fixes for handling of unicode exception values and unprintable objects
+
+- (issue87) fix unboundlocal error in assertionold code
+
+- (issue86) improve documentation for looponfailing
+
+- refine IO capturing: stdin-redirect pseudo-file now has a NOP close() method
+
+- ship distribute_setup.py version 0.6.10
+
+- added links to the new capturelog and coverage plugins
+
+
+Changes between 1.2.1 and 1.2.0
+=====================================
+
+- refined usage and options for "py.cleanup"::
+
+ py.cleanup # remove "*.pyc" and "*$py.class" (jython) files
+ py.cleanup -e .swp -e .cache # also remove files with these extensions
+ py.cleanup -s # remove "build" and "dist" directory next to setup.py files
+ py.cleanup -d # also remove empty directories
+ py.cleanup -a # synonym for "-s -d -e 'pip-log.txt'"
+ py.cleanup -n # dry run, only show what would be removed
+
+- add a new option "py.test --funcargs" which shows available funcargs
+ and their help strings (docstrings on their respective factory function)
+ for a given test path
+
+- display a short and concise traceback if a funcarg lookup fails
+
+- early-load "conftest.py" files in non-dot first-level sub directories.
+ allows to conveniently keep and access test-related options in a ``test``
+ subdir and still add command line options.
+
+- fix issue67: new super-short traceback-printing option: "--tb=line" will print a single line for each failing (python) test indicating its filename, lineno and the failure value
+
+- fix issue78: always call python-level teardown functions even if the
+ according setup failed. This includes refinements for calling setup_module/class functions
+ which will now only be called once instead of the previous behaviour where they'd be called
+ multiple times if they raise an exception (including a Skipped exception). Any exception
+ will be re-corded and associated with all tests in the according module/class scope.
+
+- fix issue63: assume <40 columns to be a bogus terminal width, default to 80
+
+- fix pdb debugging to be in the correct frame on raises-related errors
+
+- update apipkg.py to fix an issue where recursive imports might
+ unnecessarily break importing
+
+- fix plugin links
+
+Changes between 1.2 and 1.1.1
+=====================================
+
+- moved dist/looponfailing from py.test core into a new
+ separately released pytest-xdist plugin.
+
+- new junitxml plugin: --junitxml=path will generate a junit style xml file
+ which is processable e.g. by the Hudson CI system.
+
+- new option: --genscript=path will generate a standalone py.test script
+ which will not need any libraries installed. thanks to Ralf Schmitt.
+
+- new option: --ignore will prevent specified path from collection.
+ Can be specified multiple times.
+
+- new option: --confcutdir=dir will make py.test only consider conftest
+ files that are relative to the specified dir.
+
+- new funcarg: "pytestconfig" is the pytest config object for access
+ to command line args and can now be easily used in a test.
+
+- install 'py.test' and `py.which` with a ``-$VERSION`` suffix to
+ disambiguate between Python3, python2.X, Jython and PyPy installed versions.
+
+- new "pytestconfig" funcarg allows access to test config object
+
+- new "pytest_report_header" hook can return additional lines
+ to be displayed at the header of a test run.
+
+- (experimental) allow "py.test path::name1::name2::..." for pointing
+ to a test within a test collection directly. This might eventually
+ evolve as a full substitute to "-k" specifications.
+
+- streamlined plugin loading: order is now as documented in
+ customize.html: setuptools, ENV, commandline, conftest.
+ also setuptools entry point names are turned to canonical namees ("pytest_*")
+
+- automatically skip tests that need 'capfd' but have no os.dup
+
+- allow pytest_generate_tests to be defined in classes as well
+
+- deprecate usage of 'disabled' attribute in favour of pytestmark
+- deprecate definition of Directory, Module, Class and Function nodes
+ in conftest.py files. Use pytest collect hooks instead.
+
+- collection/item node specific runtest/collect hooks are only called exactly
+ on matching conftest.py files, i.e. ones which are exactly below
+ the filesystem path of an item
+
+- change: the first pytest_collect_directory hook to return something
+ will now prevent further hooks to be called.
+
+- change: figleaf plugin now requires --figleaf to run. Also
+ change its long command line options to be a bit shorter (see py.test -h).
+
+- change: pytest doctest plugin is now enabled by default and has a
+ new option --doctest-glob to set a pattern for file matches.
+
+- change: remove internal py._* helper vars, only keep py._pydir
+
+- robustify capturing to survive if custom pytest_runtest_setup
+ code failed and prevented the capturing setup code from running.
+
+- make py.test.* helpers provided by default plugins visible early -
+ works transparently both for pydoc and for interactive sessions
+ which will regularly see e.g. py.test.mark and py.test.importorskip.
+
+- simplify internal plugin manager machinery
+- simplify internal collection tree by introducing a RootCollector node
+
+- fix assert reinterpreation that sees a call containing "keyword=..."
+
+- fix issue66: invoke pytest_sessionstart and pytest_sessionfinish
+ hooks on slaves during dist-testing, report module/session teardown
+ hooks correctly.
+
+- fix issue65: properly handle dist-testing if no
+ execnet/py lib installed remotely.
+
+- skip some install-tests if no execnet is available
+
+- fix docs, fix internal bin/ script generation
+
+
+Changes between 1.1.1 and 1.1.0
+=====================================
+
+- introduce automatic plugin registration via 'pytest11'
+ entrypoints via setuptools' pkg_resources.iter_entry_points
+
+- fix py.test dist-testing to work with execnet >= 1.0.0b4
+
+- re-introduce py.test.cmdline.main() for better backward compatibility
+
+- svn paths: fix a bug with path.check(versioned=True) for svn paths,
+ allow '%' in svn paths, make svnwc.update() default to interactive mode
+ like in 1.0.x and add svnwc.update(interactive=False) to inhibit interaction.
+
+- refine distributed tarball to contain test and no pyc files
+
+- try harder to have deprecation warnings for py.compat.* accesses
+ report a correct location
+
+Changes between 1.1.0 and 1.0.2
+=====================================
+
+* adjust and improve docs
+
+* remove py.rest tool and internal namespace - it was
+ never really advertised and can still be used with
+ the old release if needed. If there is interest
+ it could be revived into its own tool i guess.
+
+* fix issue48 and issue59: raise an Error if the module
+ from an imported test file does not seem to come from
+ the filepath - avoids "same-name" confusion that has
+ been reported repeatedly
+
+* merged Ronny's nose-compatibility hacks: now
+ nose-style setup_module() and setup() functions are
+ supported
+
+* introduce generalized py.test.mark function marking
+
+* reshuffle / refine command line grouping
+
+* deprecate parser.addgroup in favour of getgroup which creates option group
+
+* add --report command line option that allows to control showing of skipped/xfailed sections
+
+* generalized skipping: a new way to mark python functions with skipif or xfail
+ at function, class and modules level based on platform or sys-module attributes.
+
+* extend py.test.mark decorator to allow for positional args
+
+* introduce and test "py.cleanup -d" to remove empty directories
+
+* fix issue #59 - robustify unittest test collection
+
+* make bpython/help interaction work by adding an __all__ attribute
+ to ApiModule, cleanup initpkg
+
+* use MIT license for pylib, add some contributors
+
+* remove py.execnet code and substitute all usages with 'execnet' proper
+
+* fix issue50 - cached_setup now caches more to expectations
+ for test functions with multiple arguments.
+
+* merge Jarko's fixes, issue #45 and #46
+
+* add the ability to specify a path for py.lookup to search in
+
+* fix a funcarg cached_setup bug probably only occuring
+ in distributed testing and "module" scope with teardown.
+
+* many fixes and changes for making the code base python3 compatible,
+ many thanks to Benjamin Peterson for helping with this.
+
+* consolidate builtins implementation to be compatible with >=2.3,
+ add helpers to ease keeping 2 and 3k compatible code
+
+* deprecate py.compat.doctest|subprocess|textwrap|optparse
+
+* deprecate py.magic.autopath, remove py/magic directory
+
+* move pytest assertion handling to py/code and a pytest_assertion
+ plugin, add "--no-assert" option, deprecate py.magic namespaces
+ in favour of (less) py.code ones.
+
+* consolidate and cleanup py/code classes and files
+
+* cleanup py/misc, move tests to bin-for-dist
+
+* introduce delattr/delitem/delenv methods to py.test's monkeypatch funcarg
+
+* consolidate py.log implementation, remove old approach.
+
+* introduce py.io.TextIO and py.io.BytesIO for distinguishing between
+ text/unicode and byte-streams (uses underlying standard lib io.*
+ if available)
+
+* make py.unittest_convert helper script available which converts "unittest.py"
+ style files into the simpler assert/direct-test-classes py.test/nosetests
+ style. The script was written by Laura Creighton.
+
+* simplified internal localpath implementation
+
+Changes between 1.0.1 and 1.0.2
+=====================================
+
+* fixing packaging issues, triggered by fedora redhat packaging,
+ also added doc, examples and contrib dirs to the tarball.
+
+* added a documentation link to the new django plugin.
+
+Changes between 1.0.0 and 1.0.1
+=====================================
+
+* added a 'pytest_nose' plugin which handles nose.SkipTest,
+ nose-style function/method/generator setup/teardown and
+ tries to report functions correctly.
+
+* capturing of unicode writes or encoded strings to sys.stdout/err
+ work better, also terminalwriting was adapted and somewhat
+ unified between windows and linux.
+
+* improved documentation layout and content a lot
+
+* added a "--help-config" option to show conftest.py / ENV-var names for
+ all longopt cmdline options, and some special conftest.py variables.
+ renamed 'conf_capture' conftest setting to 'option_capture' accordingly.
+
+* fix issue #27: better reporting on non-collectable items given on commandline
+ (e.g. pyc files)
+
+* fix issue #33: added --version flag (thanks Benjamin Peterson)
+
+* fix issue #32: adding support for "incomplete" paths to wcpath.status()
+
+* "Test" prefixed classes are *not* collected by default anymore if they
+ have an __init__ method
+
+* monkeypatch setenv() now accepts a "prepend" parameter
+
+* improved reporting of collection error tracebacks
+
+* simplified multicall mechanism and plugin architecture,
+ renamed some internal methods and argnames
+
+Changes between 1.0.0b9 and 1.0.0
+=====================================
+
+* more terse reporting try to show filesystem path relatively to current dir
+* improve xfail output a bit
+
+Changes between 1.0.0b8 and 1.0.0b9
+=====================================
+
+* cleanly handle and report final teardown of test setup
+
+* fix svn-1.6 compat issue with py.path.svnwc().versioned()
+ (thanks Wouter Vanden Hove)
+
+* setup/teardown or collection problems now show as ERRORs
+ or with big "E"'s in the progress lines. they are reported
+ and counted separately.
+
+* dist-testing: properly handle test items that get locally
+ collected but cannot be collected on the remote side - often
+ due to platform/dependency reasons
+
+* simplified py.test.mark API - see keyword plugin documentation
+
+* integrate better with logging: capturing now by default captures
+ test functions and their immediate setup/teardown in a single stream
+
+* capsys and capfd funcargs now have a readouterr() and a close() method
+ (underlyingly py.io.StdCapture/FD objects are used which grew a
+ readouterr() method as well to return snapshots of captured out/err)
+
+* make assert-reinterpretation work better with comparisons not
+ returning bools (reported with numpy from thanks maciej fijalkowski)
+
+* reworked per-test output capturing into the pytest_iocapture.py plugin
+ and thus removed capturing code from config object
+
+* item.repr_failure(excinfo) instead of item.repr_failure(excinfo, outerr)
+
+
+Changes between 1.0.0b7 and 1.0.0b8
+=====================================
+
+* pytest_unittest-plugin is now enabled by default
+
+* introduced pytest_keyboardinterrupt hook and
+ refined pytest_sessionfinish hooked, added tests.
+
+* workaround a buggy logging module interaction ("closing already closed
+ files"). Thanks to Sridhar Ratnakumar for triggering.
+
+* if plugins use "py.test.importorskip" for importing
+ a dependency only a warning will be issued instead
+ of exiting the testing process.
+
+* many improvements to docs:
+ - refined funcargs doc , use the term "factory" instead of "provider"
+ - added a new talk/tutorial doc page
+ - better download page
+ - better plugin docstrings
+ - added new plugins page and automatic doc generation script
+
+* fixed teardown problem related to partially failing funcarg setups
+ (thanks MrTopf for reporting), "pytest_runtest_teardown" is now
+ always invoked even if the "pytest_runtest_setup" failed.
+
+* tweaked doctest output for docstrings in py modules,
+ thanks Radomir.
+
+Changes between 1.0.0b3 and 1.0.0b7
+=============================================
+
+* renamed py.test.xfail back to py.test.mark.xfail to avoid
+ two ways to decorate for xfail
+
+* re-added py.test.mark decorator for setting keywords on functions
+ (it was actually documented so removing it was not nice)
+
+* remove scope-argument from request.addfinalizer() because
+ request.cached_setup has the scope arg. TOOWTDI.
+
+* perform setup finalization before reporting failures
+
+* apply modified patches from Andreas Kloeckner to allow
+ test functions to have no func_code (#22) and to make
+ "-k" and function keywords work (#20)
+
+* apply patch from Daniel Peolzleithner (issue #23)
+
+* resolve issue #18, multiprocessing.Manager() and
+ redirection clash
+
+* make __name__ == "__channelexec__" for remote_exec code
+
+Changes between 1.0.0b1 and 1.0.0b3
+=============================================
+
+* plugin classes are removed: one now defines
+ hooks directly in conftest.py or global pytest_*.py
+ files.
+
+* added new pytest_namespace(config) hook that allows
+ to inject helpers directly to the py.test.* namespace.
+
+* documented and refined many hooks
+
+* added new style of generative tests via
+ pytest_generate_tests hook that integrates
+ well with function arguments.
+
+
+Changes between 0.9.2 and 1.0.0b1
+=============================================
+
+* introduced new "funcarg" setup method,
+ see doc/test/funcarg.txt
+
+* introduced plugin architecuture and many
+ new py.test plugins, see
+ doc/test/plugins.txt
+
+* teardown_method is now guaranteed to get
+ called after a test method has run.
+
+* new method: py.test.importorskip(mod,minversion)
+ will either import or call py.test.skip()
+
+* completely revised internal py.test architecture
+
+* new py.process.ForkedFunc object allowing to
+ fork execution of a function to a sub process
+ and getting a result back.
+
+XXX lots of things missing here XXX
+
+Changes between 0.9.1 and 0.9.2
+===============================
+
+* refined installation and metadata, created new setup.py,
+ now based on setuptools/ez_setup (thanks to Ralf Schmitt
+ for his support).
+
+* improved the way of making py.* scripts available in
+ windows environments, they are now added to the
+ Scripts directory as ".cmd" files.
+
+* py.path.svnwc.status() now is more complete and
+ uses xml output from the 'svn' command if available
+ (Guido Wesdorp)
+
+* fix for py.path.svn* to work with svn 1.5
+ (Chris Lamb)
+
+* fix path.relto(otherpath) method on windows to
+ use normcase for checking if a path is relative.
+
+* py.test's traceback is better parseable from editors
+ (follows the filenames:LINENO: MSG convention)
+ (thanks to Osmo Salomaa)
+
+* fix to javascript-generation, "py.test --runbrowser"
+ should work more reliably now
+
+* removed previously accidentally added
+ py.test.broken and py.test.notimplemented helpers.
+
+* there now is a py.__version__ attribute
+
+Changes between 0.9.0 and 0.9.1
+===============================
+
+This is a fairly complete list of changes between 0.9 and 0.9.1, which can
+serve as a reference for developers.
+
+* allowing + signs in py.path.svn urls [39106]
+* fixed support for Failed exceptions without excinfo in py.test [39340]
+* added support for killing processes for Windows (as well as platforms that
+ support os.kill) in py.misc.killproc [39655]
+* added setup/teardown for generative tests to py.test [40702]
+* added detection of FAILED TO LOAD MODULE to py.test [40703, 40738, 40739]
+* fixed problem with calling .remove() on wcpaths of non-versioned files in
+ py.path [44248]
+* fixed some import and inheritance issues in py.test [41480, 44648, 44655]
+* fail to run greenlet tests when pypy is available, but without stackless
+ [45294]
+* small fixes in rsession tests [45295]
+* fixed issue with 2.5 type representations in py.test [45483, 45484]
+* made that internal reporting issues displaying is done atomically in py.test
+ [45518]
+* made that non-existing files are igored by the py.lookup script [45519]
+* improved exception name creation in py.test [45535]
+* made that less threads are used in execnet [merge in 45539]
+* removed lock required for atomical reporting issue displaying in py.test
+ [45545]
+* removed globals from execnet [45541, 45547]
+* refactored cleanup mechanics, made that setDaemon is set to 1 to make atexit
+ get called in 2.5 (py.execnet) [45548]
+* fixed bug in joining threads in py.execnet's servemain [45549]
+* refactored py.test.rsession tests to not rely on exact output format anymore
+ [45646]
+* using repr() on test outcome [45647]
+* added 'Reason' classes for py.test.skip() [45648, 45649]
+* killed some unnecessary sanity check in py.test.collect [45655]
+* avoid using os.tmpfile() in py.io.fdcapture because on Windows it's only
+ usable by Administrators [45901]
+* added support for locking and non-recursive commits to py.path.svnwc [45994]
+* locking files in py.execnet to prevent CPython from segfaulting [46010]
+* added export() method to py.path.svnurl
+* fixed -d -x in py.test [47277]
+* fixed argument concatenation problem in py.path.svnwc [49423]
+* restore py.test behaviour that it exits with code 1 when there are failures
+ [49974]
+* don't fail on html files that don't have an accompanying .txt file [50606]
+* fixed 'utestconvert.py < input' [50645]
+* small fix for code indentation in py.code.source [50755]
+* fix _docgen.py documentation building [51285]
+* improved checks for source representation of code blocks in py.test [51292]
+* added support for passing authentication to py.path.svn* objects [52000,
+ 52001]
+* removed sorted() call for py.apigen tests in favour of [].sort() to support
+ Python 2.3 [52481]
diff --git a/testing/web-platform/tests/tools/third_party/py/LICENSE b/testing/web-platform/tests/tools/third_party/py/LICENSE
new file mode 100644
index 0000000000..31ecdfb1db
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/LICENSE
@@ -0,0 +1,19 @@
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+
diff --git a/testing/web-platform/tests/tools/third_party/py/MANIFEST.in b/testing/web-platform/tests/tools/third_party/py/MANIFEST.in
new file mode 100644
index 0000000000..6d255b1a9e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/MANIFEST.in
@@ -0,0 +1,11 @@
+include CHANGELOG.rst
+include AUTHORS
+include README.rst
+include setup.py
+include LICENSE
+include conftest.py
+include tox.ini
+recursive-include py *.pyi
+graft doc
+graft testing
+global-exclude *.pyc
diff --git a/testing/web-platform/tests/tools/third_party/py/README.rst b/testing/web-platform/tests/tools/third_party/py/README.rst
new file mode 100644
index 0000000000..80800b2b7a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/README.rst
@@ -0,0 +1,31 @@
+.. image:: https://img.shields.io/pypi/v/py.svg
+ :target: https://pypi.org/project/py
+
+.. image:: https://img.shields.io/conda/vn/conda-forge/py.svg
+ :target: https://anaconda.org/conda-forge/py
+
+.. image:: https://img.shields.io/pypi/pyversions/py.svg
+ :target: https://pypi.org/project/py
+
+.. image:: https://github.com/pytest-dev/py/workflows/build/badge.svg
+ :target: https://github.com/pytest-dev/py/actions
+
+
+**NOTE**: this library is in **maintenance mode** and should not be used in new code.
+
+The py lib is a Python development support library featuring
+the following tools and modules:
+
+* ``py.path``: uniform local and svn path objects -> please use pathlib/pathlib2 instead
+* ``py.apipkg``: explicit API control and lazy-importing -> please use the standalone package instead
+* ``py.iniconfig``: easy parsing of .ini files -> please use the standalone package instead
+* ``py.code``: dynamic code generation and introspection (deprecated, moved to ``pytest`` as a implementation detail).
+
+**NOTE**: prior to the 1.4 release this distribution used to
+contain py.test which is now its own package, see https://docs.pytest.org
+
+For questions and more information please visit https://py.readthedocs.io
+
+Bugs and issues: https://github.com/pytest-dev/py
+
+Authors: Holger Krekel and others, 2004-2017
diff --git a/testing/web-platform/tests/tools/third_party/py/RELEASING.rst b/testing/web-platform/tests/tools/third_party/py/RELEASING.rst
new file mode 100644
index 0000000000..fb588e3ab7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/RELEASING.rst
@@ -0,0 +1,17 @@
+Release Procedure
+-----------------
+
+#. Create a branch ``release-X.Y.Z`` from the latest ``master``.
+
+#. Manually update the ``CHANGELOG.rst`` and commit.
+
+#. Open a PR for this branch targeting ``master``.
+
+#. After all tests pass and the PR has been approved by at least another maintainer, publish to PyPI by creating and pushing a tag::
+
+ git tag X.Y.Z
+ git push git@github.com:pytest-dev/py X.Y.Z
+
+ Wait for the deploy to complete, then make sure it is `available on PyPI <https://pypi.org/project/py>`_.
+
+#. Merge your PR to ``master``.
diff --git a/testing/web-platform/tests/tools/third_party/py/bench/localpath.py b/testing/web-platform/tests/tools/third_party/py/bench/localpath.py
new file mode 100644
index 0000000000..aad44f2e66
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/bench/localpath.py
@@ -0,0 +1,73 @@
+import py
+
+class Listdir:
+ numiter = 100000
+ numentries = 100
+
+ def setup(self):
+ tmpdir = py.path.local.make_numbered_dir(self.__class__.__name__)
+ for i in range(self.numentries):
+ tmpdir.join(str(i))
+ self.tmpdir = tmpdir
+
+ def run(self):
+ return self.tmpdir.listdir()
+
+class Listdir_arg(Listdir):
+ numiter = 100000
+ numentries = 100
+
+ def run(self):
+ return self.tmpdir.listdir("47")
+
+class Join_onearg(Listdir):
+ def run(self):
+ self.tmpdir.join("17")
+ self.tmpdir.join("18")
+ self.tmpdir.join("19")
+
+class Join_multi(Listdir):
+ def run(self):
+ self.tmpdir.join("a", "b")
+ self.tmpdir.join("a", "b", "c")
+ self.tmpdir.join("a", "b", "c", "d")
+
+class Check(Listdir):
+ def run(self):
+ self.tmpdir.check()
+ self.tmpdir.check()
+ self.tmpdir.check()
+
+class CheckDir(Listdir):
+ def run(self):
+ self.tmpdir.check(dir=1)
+ self.tmpdir.check(dir=1)
+ assert not self.tmpdir.check(dir=0)
+
+class CheckDir2(Listdir):
+ def run(self):
+ self.tmpdir.stat().isdir()
+ self.tmpdir.stat().isdir()
+ assert self.tmpdir.stat().isdir()
+
+class CheckFile(Listdir):
+ def run(self):
+ self.tmpdir.check(file=1)
+ assert not self.tmpdir.check(file=1)
+ assert self.tmpdir.check(file=0)
+
+if __name__ == "__main__":
+ import time
+ for cls in [Listdir, Listdir_arg,
+ Join_onearg, Join_multi,
+ Check, CheckDir, CheckDir2, CheckFile,]:
+
+ inst = cls()
+ inst.setup()
+ now = time.time()
+ for i in xrange(cls.numiter):
+ inst.run()
+ elapsed = time.time() - now
+ print("%s: %d loops took %.2f seconds, per call %.6f" %(
+ cls.__name__,
+ cls.numiter, elapsed, elapsed / cls.numiter))
diff --git a/testing/web-platform/tests/tools/third_party/py/codecov.yml b/testing/web-platform/tests/tools/third_party/py/codecov.yml
new file mode 100644
index 0000000000..a0a308588e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/codecov.yml
@@ -0,0 +1,7 @@
+coverage:
+ status:
+ project: true
+ patch: true
+ changes: true
+
+comment: off
diff --git a/testing/web-platform/tests/tools/third_party/py/conftest.py b/testing/web-platform/tests/tools/third_party/py/conftest.py
new file mode 100644
index 0000000000..5bff3fe022
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/conftest.py
@@ -0,0 +1,60 @@
+import py
+import pytest
+import sys
+
+pytest_plugins = 'doctest', 'pytester'
+
+collect_ignore = ['build', 'doc/_build']
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("pylib", "py lib testing options")
+ group.addoption('--runslowtests',
+ action="store_true", dest="runslowtests", default=False,
+ help=("run slow tests"))
+
+@pytest.fixture
+def sshhost(request):
+ val = request.config.getvalue("sshhost")
+ if val:
+ return val
+ py.test.skip("need --sshhost option")
+
+
+# XXX copied from execnet's conftest.py - needs to be merged
+winpymap = {
+ 'python2.7': r'C:\Python27\python.exe',
+}
+
+
+def getexecutable(name, cache={}):
+ try:
+ return cache[name]
+ except KeyError:
+ executable = py.path.local.sysfind(name)
+ if executable:
+ if name == "jython":
+ import subprocess
+ popen = subprocess.Popen(
+ [str(executable), "--version"],
+ universal_newlines=True, stderr=subprocess.PIPE)
+ out, err = popen.communicate()
+ if not err or "2.5" not in err:
+ executable = None
+ cache[name] = executable
+ return executable
+
+
+@pytest.fixture(params=('python2.7', 'pypy-c', 'jython'))
+def anypython(request):
+ name = request.param
+ executable = getexecutable(name)
+ if executable is None:
+ if sys.platform == "win32":
+ executable = winpymap.get(name, None)
+ if executable:
+ executable = py.path.local(executable)
+ if executable.check():
+ return executable
+ py.test.skip("no %s found" % (name,))
+ return executable
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/Makefile b/testing/web-platform/tests/tools/third_party/py/doc/Makefile
new file mode 100644
index 0000000000..0a0e89e01f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/Makefile
@@ -0,0 +1,133 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+install: clean html
+ rsync -avz _build/html/ code:www-pylib/
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/py.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/py.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/py"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/py"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ make -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/_templates/layout.html b/testing/web-platform/tests/tools/third_party/py/doc/_templates/layout.html
new file mode 100644
index 0000000000..683863aa46
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/_templates/layout.html
@@ -0,0 +1,18 @@
+{% extends "!layout.html" %}
+
+{% block footer %}
+{{ super() }}
+<script type="text/javascript">
+
+ var _gaq = _gaq || [];
+ _gaq.push(['_setAccount', 'UA-7597274-14']);
+ _gaq.push(['_trackPageview']);
+
+ (function() {
+ var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
+ ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
+ var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
+ })();
+
+</script>
+{% endblock %}
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-0.9.0.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-0.9.0.txt
new file mode 100644
index 0000000000..0710931354
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-0.9.0.txt
@@ -0,0 +1,7 @@
+py lib 1.0.0: XXX
+======================================================================
+
+Welcome to the 1.0.0 py lib release - a library aiming to
+support agile and test-driven python development on various levels.
+
+XXX
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-0.9.2.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-0.9.2.txt
new file mode 100644
index 0000000000..8340dc4455
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-0.9.2.txt
@@ -0,0 +1,27 @@
+py lib 0.9.2: bugfix release
+=============================
+
+Welcome to the 0.9.2 py lib and py.test release -
+mainly fixing Windows issues, providing better
+packaging and integration with setuptools.
+
+Here is a quick summary of what the py lib provides:
+
+* py.test: cross-project testing tool with many advanced features
+* py.execnet: ad-hoc code distribution to SSH, Socket and local sub processes
+* py.magic.greenlet: micro-threads on standard CPython ("stackless-light")
+* py.path: path abstractions over local and subversion files
+* rich documentation of py's exported API
+* tested against Linux, Win32, OSX, works on python 2.3-2.6
+
+See here for more information:
+
+Pypi pages: https://pypi.org/project/py/
+
+Download/Install: http://codespeak.net/py/0.9.2/download.html
+
+Documentation/API: http://codespeak.net/py/0.9.2/index.html
+
+best and have fun,
+
+holger krekel
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.0.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.0.txt
new file mode 100644
index 0000000000..aef25ec239
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.0.txt
@@ -0,0 +1,63 @@
+
+pylib 1.0.0 released: testing-with-python innovations continue
+--------------------------------------------------------------------
+
+Took a few betas but finally i uploaded a `1.0.0 py lib release`_,
+featuring the mature and powerful py.test tool and "execnet-style"
+*elastic* distributed programming. With the new release, there are
+many new advanced automated testing features - here is a quick summary:
+
+* funcargs_ - pythonic zero-boilerplate fixtures for Python test functions :
+
+ - totally separates test code, test configuration and test setup
+ - ideal for integration and functional tests
+ - allows for flexible and natural test parametrization schemes
+
+* new `plugin architecture`_, allowing easy-to-write project-specific and cross-project single-file plugins. The most notable new external plugin is `oejskit`_ which naturally enables **running and reporting of javascript-unittests in real-life browsers**.
+
+* many new features done in easy-to-improve `default plugins`_, highlights:
+
+ * xfail: mark tests as "expected to fail" and report separately.
+ * pastebin: automatically send tracebacks to pocoo paste service
+ * capture: flexibly capture stdout/stderr of subprocesses, per-test ...
+ * monkeypatch: safely monkeypatch modules/classes from within tests
+ * unittest: run and integrate traditional unittest.py tests
+ * figleaf: generate html coverage reports with the figleaf module
+ * resultlog: generate buildbot-friendly reporting output
+ * ...
+
+* `distributed testing`_ and `elastic distributed execution`_:
+
+ - new unified "TX" URL scheme for specifying remote processes
+ - new distribution modes "--dist=each" and "--dist=load"
+ - new sync/async ways to handle 1:N communication
+ - improved documentation
+
+The py lib continues to offer most of the functionality used by
+the testing tool in `independent namespaces`_.
+
+Some non-test related code, notably greenlets/co-routines and
+api-generation now live as their own projects which simplifies the
+installation procedure because no C-Extensions are required anymore.
+
+The whole package should work well with Linux, Win32 and OSX, on Python
+2.3, 2.4, 2.5 and 2.6. (Expect Python3 compatibility soon!)
+
+For more info, see the py.test and py lib documentation:
+
+ http://pytest.org
+
+ http://pylib.org
+
+have fun,
+holger
+
+.. _`independent namespaces`: http://pylib.org
+.. _`funcargs`: http://codespeak.net/py/dist/test/funcargs.html
+.. _`plugin architecture`: http://codespeak.net/py/dist/test/extend.html
+.. _`default plugins`: http://codespeak.net/py/dist/test/plugin/index.html
+.. _`distributed testing`: http://codespeak.net/py/dist/test/dist.html
+.. _`elastic distributed execution`: http://codespeak.net/py/dist/execnet.html
+.. _`1.0.0 py lib release`: https://pypi.org/project/py/
+.. _`oejskit`: http://codespeak.net/py/dist/test/plugin/oejskit.html
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.1.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.1.txt
new file mode 100644
index 0000000000..0c9f8760bd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.1.txt
@@ -0,0 +1,48 @@
+1.0.1: improved reporting, nose/unittest.py support, bug fixes
+-----------------------------------------------------------------------
+
+This is a bugfix release of pylib/py.test also coming with:
+
+* improved documentation, improved navigation
+* test failure reporting improvements
+* support for directly running existing nose/unittest.py style tests
+
+visit here for more info, including quickstart and tutorials:
+
+ http://pytest.org and http://pylib.org
+
+
+Changelog 1.0.0 to 1.0.1
+------------------------
+
+* added a default 'pytest_nose' plugin which handles nose.SkipTest,
+ nose-style function/method/generator setup/teardown and
+ tries to report functions correctly.
+
+* improved documentation, better navigation: see http://pytest.org
+
+* added a "--help-config" option to show conftest.py / ENV-var names for
+ all longopt cmdline options, and some special conftest.py variables.
+ renamed 'conf_capture' conftest setting to 'option_capture' accordingly.
+
+* unicode fixes: capturing and unicode writes to sys.stdout
+ (through e.g a print statement) now work within tests,
+ they are encoded as "utf8" by default, also terminalwriting
+ was adapted and somewhat unified between windows and linux
+
+* fix issue #27: better reporting on non-collectable items given on commandline
+ (e.g. pyc files)
+
+* fix issue #33: added --version flag (thanks Benjamin Peterson)
+
+* fix issue #32: adding support for "incomplete" paths to wcpath.status()
+
+* "Test" prefixed classes are *not* collected by default anymore if they
+ have an __init__ method
+
+* monkeypatch setenv() now accepts a "prepend" parameter
+
+* improved reporting of collection error tracebacks
+
+* simplified multicall mechanism and plugin architecture,
+ renamed some internal methods and argnames
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.2.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.2.txt
new file mode 100644
index 0000000000..2354619535
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.0.2.txt
@@ -0,0 +1,5 @@
+1.0.2: packaging fixes
+-----------------------------------------------------------------------
+
+this release is purely a release for fixing packaging issues.
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.1.0.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.1.0.txt
new file mode 100644
index 0000000000..0441c3215e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.1.0.txt
@@ -0,0 +1,115 @@
+py.test/pylib 1.1.0: Python3, Jython, advanced skipping, cleanups ...
+--------------------------------------------------------------------------------
+
+Features:
+
+* compatible to Python3 (single py2/py3 source), `easy to install`_
+* conditional skipping_: skip/xfail based on platform/dependencies
+* generalized marking_: mark tests one a whole-class or whole-module basis
+
+Fixes:
+
+* code reduction and "de-magification" (e.g. 23 KLoc -> 11 KLOC)
+* distribute testing requires the now separately released execnet_ package
+* funcarg-setup/caching, "same-name" test modules now cause an exlicit error
+* de-cluttered reporting options, --report for skipped/xfail details
+
+Compatibilities
+
+1.1.0 should allow running test code that already worked well with 1.0.2
+plus some more due to improved unittest/nose compatibility.
+
+More information: http://pytest.org
+
+thanks and have fun,
+
+holger (http://twitter.com/hpk42)
+
+.. _execnet: http://codespeak.net/execnet
+.. _`easy to install`: ../install.html
+.. _marking: ../test/plugin/mark.html
+.. _skipping: ../test/plugin/skipping.html
+
+
+Changelog 1.0.2 -> 1.1.0
+-----------------------------------------------------------------------
+
+* remove py.rest tool and internal namespace - it was
+ never really advertised and can still be used with
+ the old release if needed. If there is interest
+ it could be revived into its own tool i guess.
+
+* fix issue48 and issue59: raise an Error if the module
+ from an imported test file does not seem to come from
+ the filepath - avoids "same-name" confusion that has
+ been reported repeatedly
+
+* merged Ronny's nose-compatibility hacks: now
+ nose-style setup_module() and setup() functions are
+ supported
+
+* introduce generalized py.test.mark function marking
+
+* reshuffle / refine command line grouping
+
+* deprecate parser.addgroup in favour of getgroup which creates option group
+
+* add --report command line option that allows to control showing of skipped/xfailed sections
+
+* generalized skipping: a new way to mark python functions with skipif or xfail
+ at function, class and modules level based on platform or sys-module attributes.
+
+* extend py.test.mark decorator to allow for positional args
+
+* introduce and test "py.cleanup -d" to remove empty directories
+
+* fix issue #59 - robustify unittest test collection
+
+* make bpython/help interaction work by adding an __all__ attribute
+ to ApiModule, cleanup initpkg
+
+* use MIT license for pylib, add some contributors
+
+* remove py.execnet code and substitute all usages with 'execnet' proper
+
+* fix issue50 - cached_setup now caches more to expectations
+ for test functions with multiple arguments.
+
+* merge Jarko's fixes, issue #45 and #46
+
+* add the ability to specify a path for py.lookup to search in
+
+* fix a funcarg cached_setup bug probably only occuring
+ in distributed testing and "module" scope with teardown.
+
+* many fixes and changes for making the code base python3 compatible,
+ many thanks to Benjamin Peterson for helping with this.
+
+* consolidate builtins implementation to be compatible with >=2.3,
+ add helpers to ease keeping 2 and 3k compatible code
+
+* deprecate py.compat.doctest|subprocess|textwrap|optparse
+
+* deprecate py.magic.autopath, remove py/magic directory
+
+* move pytest assertion handling to py/code and a pytest_assertion
+ plugin, add "--no-assert" option, deprecate py.magic namespaces
+ in favour of (less) py.code ones.
+
+* consolidate and cleanup py/code classes and files
+
+* cleanup py/misc, move tests to bin-for-dist
+
+* introduce delattr/delitem/delenv methods to py.test's monkeypatch funcarg
+
+* consolidate py.log implementation, remove old approach.
+
+* introduce py.io.TextIO and py.io.BytesIO for distinguishing between
+ text/unicode and byte-streams (uses underlying standard lib io.*
+ if available)
+
+* make py.unittest_convert helper script available which converts "unittest.py"
+ style files into the simpler assert/direct-test-classes py.test/nosetests
+ style. The script was written by Laura Creighton.
+
+* simplified internal localpath implementation
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.1.1.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.1.1.txt
new file mode 100644
index 0000000000..83e6a1fd8d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.1.1.txt
@@ -0,0 +1,48 @@
+py.test/pylib 1.1.1: bugfix release, setuptools plugin registration
+--------------------------------------------------------------------------------
+
+This is a compatibility fixing release of pylib/py.test to work
+better with previous 1.0.x test code bases. It also contains fixes
+and changes to work with `execnet>=1.0.0`_ to provide distributed
+testing and looponfailing testing modes. py-1.1.1 also introduces
+a new mechanism for registering plugins via setuptools.
+
+What is pylib/py.test?
+-----------------------
+
+py.test is an advanced automated testing tool working with
+Python2, Python3 and Jython versions on all major operating
+systems. It has an extensive plugin architecture and can run many
+existing common Python test suites without modification. Moreover,
+it offers some unique features not found in other
+testing tools. See http://pytest.org for more info.
+
+The pylib also contains a localpath and svnpath implementation
+and some developer-oriented command line tools. See
+http://pylib.org for more info.
+
+thanks to all who helped and gave feedback,
+have fun,
+
+holger (http://twitter.com/hpk42)
+
+.. _`execnet>=1.0.0`: http://codespeak.net/execnet
+
+Changes between 1.1.1 and 1.1.0
+=====================================
+
+- introduce automatic plugin registration via 'pytest11'
+ entrypoints via setuptools' pkg_resources.iter_entry_points
+
+- fix py.test dist-testing to work with execnet >= 1.0.0b4
+
+- re-introduce py.test.cmdline.main() for better backward compatibility
+
+- svn paths: fix a bug with path.check(versioned=True) for svn paths,
+ allow '%' in svn paths, make svnwc.update() default to interactive mode
+ like in 1.0.x and add svnwc.update(interactive=False) to inhibit interaction.
+
+- refine distributed tarball to contain test and no pyc files
+
+- try harder to have deprecation warnings for py.compat.* accesses
+ report a correct location
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.2.0.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.2.0.txt
new file mode 100644
index 0000000000..4f6a561447
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.2.0.txt
@@ -0,0 +1,116 @@
+py.test/pylib 1.2.0: junitxml, standalone test scripts, pluginization
+--------------------------------------------------------------------------------
+
+py.test is an advanced automated testing tool working with
+Python2, Python3 and Jython versions on all major operating
+systems. It has a simple plugin architecture and can run many
+existing common Python test suites without modification. It offers
+some unique features not found in other testing tools.
+See http://pytest.org for more info.
+
+py.test 1.2.0 brings many bug fixes and interesting new abilities:
+
+* --junitxml=path will create an XML file for use with CI processing
+* --genscript=path creates a standalone py.test-equivalent test-script
+* --ignore=path prevents collection of anything below that path
+* --confcutdir=path only lookup conftest.py test configs below that path
+* a 'pytest_report_header' hook to add info to the terminal report header
+* a 'pytestconfig' function argument gives direct access to option values
+* 'pytest_generate_tests' can now be put into a class as well
+* on CPython py.test additionally installs as "py.test-VERSION", on
+ Jython as py.test-jython and on PyPy as py.test-pypy-XYZ
+
+Apart from many bug fixes 1.2.0 also has better pluginization:
+Distributed testing and looponfailing testing now live in the
+separately installable 'pytest-xdist' plugin. The same is true for
+'pytest-figleaf' for doing coverage reporting. Those two plugins
+can serve well now as blue prints for doing your own.
+
+thanks to all who helped and gave feedback,
+have fun,
+
+holger krekel, January 2010
+
+Changes between 1.2.0 and 1.1.1
+=====================================
+
+- moved dist/looponfailing from py.test core into a new
+ separately released pytest-xdist plugin.
+
+- new junitxml plugin: --junitxml=path will generate a junit style xml file
+ which is processable e.g. by the Hudson CI system.
+
+- new option: --genscript=path will generate a standalone py.test script
+ which will not need any libraries installed. thanks to Ralf Schmitt.
+
+- new option: --ignore will prevent specified path from collection.
+ Can be specified multiple times.
+
+- new option: --confcutdir=dir will make py.test only consider conftest
+ files that are relative to the specified dir.
+
+- new funcarg: "pytestconfig" is the pytest config object for access
+ to command line args and can now be easily used in a test.
+
+- install 'py.test' and `py.which` with a ``-$VERSION`` suffix to
+ disambiguate between Python3, python2.X, Jython and PyPy installed versions.
+
+- new "pytestconfig" funcarg allows access to test config object
+
+- new "pytest_report_header" hook can return additional lines
+ to be displayed at the header of a test run.
+
+- (experimental) allow "py.test path::name1::name2::..." for pointing
+ to a test within a test collection directly. This might eventually
+ evolve as a full substitute to "-k" specifications.
+
+- streamlined plugin loading: order is now as documented in
+ customize.html: setuptools, ENV, commandline, conftest.
+ also setuptools entry point names are turned to canonical namees ("pytest_*")
+
+- automatically skip tests that need 'capfd' but have no os.dup
+
+- allow pytest_generate_tests to be defined in classes as well
+
+- deprecate usage of 'disabled' attribute in favour of pytestmark
+- deprecate definition of Directory, Module, Class and Function nodes
+ in conftest.py files. Use pytest collect hooks instead.
+
+- collection/item node specific runtest/collect hooks are only called exactly
+ on matching conftest.py files, i.e. ones which are exactly below
+ the filesystem path of an item
+
+- change: the first pytest_collect_directory hook to return something
+ will now prevent further hooks to be called.
+
+- change: figleaf plugin now requires --figleaf to run. Also
+ change its long command line options to be a bit shorter (see py.test -h).
+
+- change: pytest doctest plugin is now enabled by default and has a
+ new option --doctest-glob to set a pattern for file matches.
+
+- change: remove internal py._* helper vars, only keep py._pydir
+
+- robustify capturing to survive if custom pytest_runtest_setup
+ code failed and prevented the capturing setup code from running.
+
+- make py.test.* helpers provided by default plugins visible early -
+ works transparently both for pydoc and for interactive sessions
+ which will regularly see e.g. py.test.mark and py.test.importorskip.
+
+- simplify internal plugin manager machinery
+- simplify internal collection tree by introducing a RootCollector node
+
+- fix assert reinterpreation that sees a call containing "keyword=..."
+
+- fix issue66: invoke pytest_sessionstart and pytest_sessionfinish
+ hooks on slaves during dist-testing, report module/session teardown
+ hooks correctly.
+
+- fix issue65: properly handle dist-testing if no
+ execnet/py lib installed remotely.
+
+- skip some install-tests if no execnet is available
+
+- fix docs, fix internal bin/ script generation
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.2.1.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.2.1.txt
new file mode 100644
index 0000000000..5bf8ba22dc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.2.1.txt
@@ -0,0 +1,66 @@
+py.test/pylib 1.2.1: little fixes and improvements
+--------------------------------------------------------------------------------
+
+py.test is an advanced automated testing tool working with
+Python2, Python3 and Jython versions on all major operating
+systems. It has a simple plugin architecture and can run many
+existing common Python test suites without modification. It offers
+some unique features not found in other testing tools.
+See http://pytest.org for more info.
+
+py.test 1.2.1 brings bug fixes and some new options and abilities triggered
+by user feedback:
+
+* --funcargs [testpath] will show available builtin- and project funcargs.
+* display a short and concise traceback if funcarg lookup fails.
+* early-load "conftest.py" files in non-dot first-level sub directories.
+* --tb=line will print a single line for each failing test (issue67)
+* py.cleanup has a number of new options, cleanups up setup.py related files
+* fix issue78: always call python-level teardown functions even if the
+ according setup failed.
+
+For more detailed information see the changelog below.
+
+cheers and have fun,
+
+holger
+
+
+Changes between 1.2.1 and 1.2.0
+=====================================
+
+- refined usage and options for "py.cleanup"::
+
+ py.cleanup # remove "*.pyc" and "*$py.class" (jython) files
+ py.cleanup -e .swp -e .cache # also remove files with these extensions
+ py.cleanup -s # remove "build" and "dist" directory next to setup.py files
+ py.cleanup -d # also remove empty directories
+ py.cleanup -a # synonym for "-s -d -e 'pip-log.txt'"
+ py.cleanup -n # dry run, only show what would be removed
+
+- add a new option "py.test --funcargs" which shows available funcargs
+ and their help strings (docstrings on their respective factory function)
+ for a given test path
+
+- display a short and concise traceback if a funcarg lookup fails
+
+- early-load "conftest.py" files in non-dot first-level sub directories.
+ allows to conveniently keep and access test-related options in a ``test``
+ subdir and still add command line options.
+
+- fix issue67: new super-short traceback-printing option: "--tb=line" will print a single line for each failing (python) test indicating its filename, lineno and the failure value
+
+- fix issue78: always call python-level teardown functions even if the
+ according setup failed. This includes refinements for calling setup_module/class functions
+ which will now only be called once instead of the previous behaviour where they'd be called
+ multiple times if they raise an exception (including a Skipped exception). Any exception
+ will be re-corded and associated with all tests in the according module/class scope.
+
+- fix issue63: assume <40 columns to be a bogus terminal width, default to 80
+
+- fix pdb debugging to be in the correct frame on raises-related errors
+
+- update apipkg.py to fix an issue where recursive imports might
+ unnecessarily break importing
+
+- fix plugin links
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.0.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.0.txt
new file mode 100644
index 0000000000..cf97db0367
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.0.txt
@@ -0,0 +1,580 @@
+py.test/pylib 1.3.0: new options, per-plugin hooks, fixes ...
+===========================================================================
+
+The 1.3.0 release introduces new options, bug fixes and improved compatibility
+with Python3 and Jython-2.5.1 on Windows. If you already use py-1.2 chances
+are you can use py-1.3.0. See the below CHANGELOG for more details and
+http://pylib.org/install.html for installation instructions.
+
+py.test is an advanced automated testing tool working with Python2,
+Python3, Jython and PyPy versions on all major operating systems. It
+offers a no-boilerplate testing approach and has inspired other testing
+tools and enhancements in the standard Python library for more than five
+years. It has a simple and extensive plugin architecture, configurable
+reporting and provides unique ways to make it fit to your testing
+process and needs.
+
+See http://pytest.org for more info.
+
+cheers and have fun,
+
+holger krekel
+
+Changes between 1.2.1 and 1.3.0
+==================================================
+
+- deprecate --report option in favour of a new shorter and easier to
+ remember -r option: it takes a string argument consisting of any
+ combination of 'xfsX' characters. They relate to the single chars
+ you see during the dotted progress printing and will print an extra line
+ per test at the end of the test run. This extra line indicates the exact
+ position or test ID that you directly paste to the py.test cmdline in order
+ to re-run a particular test.
+
+- allow external plugins to register new hooks via the new
+ pytest_addhooks(pluginmanager) hook. The new release of
+ the pytest-xdist plugin for distributed and looponfailing
+ testing requires this feature.
+
+- add a new pytest_ignore_collect(path, config) hook to allow projects and
+ plugins to define exclusion behaviour for their directory structure -
+ for example you may define in a conftest.py this method::
+
+ def pytest_ignore_collect(path):
+ return path.check(link=1)
+
+ to prevent even collection of any tests in symlinked dirs.
+
+- new pytest_pycollect_makemodule(path, parent) hook for
+ allowing customization of the Module collection object for a
+ matching test module.
+
+- extend and refine xfail mechanism::
+
+ @py.test.mark.xfail(run=False) do not run the decorated test
+ @py.test.mark.xfail(reason="...") prints the reason string in xfail summaries
+
+ specifiying ``--runxfail`` on command line ignores xfail markers to show
+ you the underlying traceback.
+
+- expose (previously internal) commonly useful methods:
+ py.io.get_terminal_with() -> return terminal width
+ py.io.ansi_print(...) -> print colored/bold text on linux/win32
+ py.io.saferepr(obj) -> return limited representation string
+
+- expose test outcome related exceptions as py.test.skip.Exception,
+ py.test.raises.Exception etc., useful mostly for plugins
+ doing special outcome interpretation/tweaking
+
+- (issue85) fix junitxml plugin to handle tests with non-ascii output
+
+- fix/refine python3 compatibility (thanks Benjamin Peterson)
+
+- fixes for making the jython/win32 combination work, note however:
+ jython2.5.1/win32 does not provide a command line launcher, see
+ http://bugs.jython.org/issue1491 . See pylib install documentation
+ for how to work around.
+
+- fixes for handling of unicode exception values and unprintable objects
+
+- (issue87) fix unboundlocal error in assertionold code
+
+- (issue86) improve documentation for looponfailing
+
+- refine IO capturing: stdin-redirect pseudo-file now has a NOP close() method
+
+- ship distribute_setup.py version 0.6.10
+
+- added links to the new capturelog and coverage plugins
+
+
+Changes between 1.2.1 and 1.2.0
+=====================================
+
+- refined usage and options for "py.cleanup"::
+
+ py.cleanup # remove "*.pyc" and "*$py.class" (jython) files
+ py.cleanup -e .swp -e .cache # also remove files with these extensions
+ py.cleanup -s # remove "build" and "dist" directory next to setup.py files
+ py.cleanup -d # also remove empty directories
+ py.cleanup -a # synonym for "-s -d -e 'pip-log.txt'"
+ py.cleanup -n # dry run, only show what would be removed
+
+- add a new option "py.test --funcargs" which shows available funcargs
+ and their help strings (docstrings on their respective factory function)
+ for a given test path
+
+- display a short and concise traceback if a funcarg lookup fails
+
+- early-load "conftest.py" files in non-dot first-level sub directories.
+ allows to conveniently keep and access test-related options in a ``test``
+ subdir and still add command line options.
+
+- fix issue67: new super-short traceback-printing option: "--tb=line" will print a single line for each failing (python) test indicating its filename, lineno and the failure value
+
+- fix issue78: always call python-level teardown functions even if the
+ according setup failed. This includes refinements for calling setup_module/class functions
+ which will now only be called once instead of the previous behaviour where they'd be called
+ multiple times if they raise an exception (including a Skipped exception). Any exception
+ will be re-corded and associated with all tests in the according module/class scope.
+
+- fix issue63: assume <40 columns to be a bogus terminal width, default to 80
+
+- fix pdb debugging to be in the correct frame on raises-related errors
+
+- update apipkg.py to fix an issue where recursive imports might
+ unnecessarily break importing
+
+- fix plugin links
+
+Changes between 1.2 and 1.1.1
+=====================================
+
+- moved dist/looponfailing from py.test core into a new
+ separately released pytest-xdist plugin.
+
+- new junitxml plugin: --junitxml=path will generate a junit style xml file
+ which is processable e.g. by the Hudson CI system.
+
+- new option: --genscript=path will generate a standalone py.test script
+ which will not need any libraries installed. thanks to Ralf Schmitt.
+
+- new option: --ignore will prevent specified path from collection.
+ Can be specified multiple times.
+
+- new option: --confcutdir=dir will make py.test only consider conftest
+ files that are relative to the specified dir.
+
+- new funcarg: "pytestconfig" is the pytest config object for access
+ to command line args and can now be easily used in a test.
+
+- install 'py.test' and `py.which` with a ``-$VERSION`` suffix to
+ disambiguate between Python3, python2.X, Jython and PyPy installed versions.
+
+- new "pytestconfig" funcarg allows access to test config object
+
+- new "pytest_report_header" hook can return additional lines
+ to be displayed at the header of a test run.
+
+- (experimental) allow "py.test path::name1::name2::..." for pointing
+ to a test within a test collection directly. This might eventually
+ evolve as a full substitute to "-k" specifications.
+
+- streamlined plugin loading: order is now as documented in
+ customize.html: setuptools, ENV, commandline, conftest.
+ also setuptools entry point names are turned to canonical namees ("pytest_*")
+
+- automatically skip tests that need 'capfd' but have no os.dup
+
+- allow pytest_generate_tests to be defined in classes as well
+
+- deprecate usage of 'disabled' attribute in favour of pytestmark
+- deprecate definition of Directory, Module, Class and Function nodes
+ in conftest.py files. Use pytest collect hooks instead.
+
+- collection/item node specific runtest/collect hooks are only called exactly
+ on matching conftest.py files, i.e. ones which are exactly below
+ the filesystem path of an item
+
+- change: the first pytest_collect_directory hook to return something
+ will now prevent further hooks to be called.
+
+- change: figleaf plugin now requires --figleaf to run. Also
+ change its long command line options to be a bit shorter (see py.test -h).
+
+- change: pytest doctest plugin is now enabled by default and has a
+ new option --doctest-glob to set a pattern for file matches.
+
+- change: remove internal py._* helper vars, only keep py._pydir
+
+- robustify capturing to survive if custom pytest_runtest_setup
+ code failed and prevented the capturing setup code from running.
+
+- make py.test.* helpers provided by default plugins visible early -
+ works transparently both for pydoc and for interactive sessions
+ which will regularly see e.g. py.test.mark and py.test.importorskip.
+
+- simplify internal plugin manager machinery
+- simplify internal collection tree by introducing a RootCollector node
+
+- fix assert reinterpreation that sees a call containing "keyword=..."
+
+- fix issue66: invoke pytest_sessionstart and pytest_sessionfinish
+ hooks on slaves during dist-testing, report module/session teardown
+ hooks correctly.
+
+- fix issue65: properly handle dist-testing if no
+ execnet/py lib installed remotely.
+
+- skip some install-tests if no execnet is available
+
+- fix docs, fix internal bin/ script generation
+
+
+Changes between 1.1.1 and 1.1.0
+=====================================
+
+- introduce automatic plugin registration via 'pytest11'
+ entrypoints via setuptools' pkg_resources.iter_entry_points
+
+- fix py.test dist-testing to work with execnet >= 1.0.0b4
+
+- re-introduce py.test.cmdline.main() for better backward compatibility
+
+- svn paths: fix a bug with path.check(versioned=True) for svn paths,
+ allow '%' in svn paths, make svnwc.update() default to interactive mode
+ like in 1.0.x and add svnwc.update(interactive=False) to inhibit interaction.
+
+- refine distributed tarball to contain test and no pyc files
+
+- try harder to have deprecation warnings for py.compat.* accesses
+ report a correct location
+
+Changes between 1.1.0 and 1.0.2
+=====================================
+
+* adjust and improve docs
+
+* remove py.rest tool and internal namespace - it was
+ never really advertised and can still be used with
+ the old release if needed. If there is interest
+ it could be revived into its own tool i guess.
+
+* fix issue48 and issue59: raise an Error if the module
+ from an imported test file does not seem to come from
+ the filepath - avoids "same-name" confusion that has
+ been reported repeatedly
+
+* merged Ronny's nose-compatibility hacks: now
+ nose-style setup_module() and setup() functions are
+ supported
+
+* introduce generalized py.test.mark function marking
+
+* reshuffle / refine command line grouping
+
+* deprecate parser.addgroup in favour of getgroup which creates option group
+
+* add --report command line option that allows to control showing of skipped/xfailed sections
+
+* generalized skipping: a new way to mark python functions with skipif or xfail
+ at function, class and modules level based on platform or sys-module attributes.
+
+* extend py.test.mark decorator to allow for positional args
+
+* introduce and test "py.cleanup -d" to remove empty directories
+
+* fix issue #59 - robustify unittest test collection
+
+* make bpython/help interaction work by adding an __all__ attribute
+ to ApiModule, cleanup initpkg
+
+* use MIT license for pylib, add some contributors
+
+* remove py.execnet code and substitute all usages with 'execnet' proper
+
+* fix issue50 - cached_setup now caches more to expectations
+ for test functions with multiple arguments.
+
+* merge Jarko's fixes, issue #45 and #46
+
+* add the ability to specify a path for py.lookup to search in
+
+* fix a funcarg cached_setup bug probably only occuring
+ in distributed testing and "module" scope with teardown.
+
+* many fixes and changes for making the code base python3 compatible,
+ many thanks to Benjamin Peterson for helping with this.
+
+* consolidate builtins implementation to be compatible with >=2.3,
+ add helpers to ease keeping 2 and 3k compatible code
+
+* deprecate py.compat.doctest|subprocess|textwrap|optparse
+
+* deprecate py.magic.autopath, remove py/magic directory
+
+* move pytest assertion handling to py/code and a pytest_assertion
+ plugin, add "--no-assert" option, deprecate py.magic namespaces
+ in favour of (less) py.code ones.
+
+* consolidate and cleanup py/code classes and files
+
+* cleanup py/misc, move tests to bin-for-dist
+
+* introduce delattr/delitem/delenv methods to py.test's monkeypatch funcarg
+
+* consolidate py.log implementation, remove old approach.
+
+* introduce py.io.TextIO and py.io.BytesIO for distinguishing between
+ text/unicode and byte-streams (uses underlying standard lib io.*
+ if available)
+
+* make py.unittest_convert helper script available which converts "unittest.py"
+ style files into the simpler assert/direct-test-classes py.test/nosetests
+ style. The script was written by Laura Creighton.
+
+* simplified internal localpath implementation
+
+Changes between 1.0.1 and 1.0.2
+=====================================
+
+* fixing packaging issues, triggered by fedora redhat packaging,
+ also added doc, examples and contrib dirs to the tarball.
+
+* added a documentation link to the new django plugin.
+
+Changes between 1.0.0 and 1.0.1
+=====================================
+
+* added a 'pytest_nose' plugin which handles nose.SkipTest,
+ nose-style function/method/generator setup/teardown and
+ tries to report functions correctly.
+
+* capturing of unicode writes or encoded strings to sys.stdout/err
+ work better, also terminalwriting was adapted and somewhat
+ unified between windows and linux.
+
+* improved documentation layout and content a lot
+
+* added a "--help-config" option to show conftest.py / ENV-var names for
+ all longopt cmdline options, and some special conftest.py variables.
+ renamed 'conf_capture' conftest setting to 'option_capture' accordingly.
+
+* fix issue #27: better reporting on non-collectable items given on commandline
+ (e.g. pyc files)
+
+* fix issue #33: added --version flag (thanks Benjamin Peterson)
+
+* fix issue #32: adding support for "incomplete" paths to wcpath.status()
+
+* "Test" prefixed classes are *not* collected by default anymore if they
+ have an __init__ method
+
+* monkeypatch setenv() now accepts a "prepend" parameter
+
+* improved reporting of collection error tracebacks
+
+* simplified multicall mechanism and plugin architecture,
+ renamed some internal methods and argnames
+
+Changes between 1.0.0b9 and 1.0.0
+=====================================
+
+* more terse reporting try to show filesystem path relatively to current dir
+* improve xfail output a bit
+
+Changes between 1.0.0b8 and 1.0.0b9
+=====================================
+
+* cleanly handle and report final teardown of test setup
+
+* fix svn-1.6 compat issue with py.path.svnwc().versioned()
+ (thanks Wouter Vanden Hove)
+
+* setup/teardown or collection problems now show as ERRORs
+ or with big "E"'s in the progress lines. they are reported
+ and counted separately.
+
+* dist-testing: properly handle test items that get locally
+ collected but cannot be collected on the remote side - often
+ due to platform/dependency reasons
+
+* simplified py.test.mark API - see keyword plugin documentation
+
+* integrate better with logging: capturing now by default captures
+ test functions and their immediate setup/teardown in a single stream
+
+* capsys and capfd funcargs now have a readouterr() and a close() method
+ (underlyingly py.io.StdCapture/FD objects are used which grew a
+ readouterr() method as well to return snapshots of captured out/err)
+
+* make assert-reinterpretation work better with comparisons not
+ returning bools (reported with numpy from thanks maciej fijalkowski)
+
+* reworked per-test output capturing into the pytest_iocapture.py plugin
+ and thus removed capturing code from config object
+
+* item.repr_failure(excinfo) instead of item.repr_failure(excinfo, outerr)
+
+
+Changes between 1.0.0b7 and 1.0.0b8
+=====================================
+
+* pytest_unittest-plugin is now enabled by default
+
+* introduced pytest_keyboardinterrupt hook and
+ refined pytest_sessionfinish hooked, added tests.
+
+* workaround a buggy logging module interaction ("closing already closed
+ files"). Thanks to Sridhar Ratnakumar for triggering.
+
+* if plugins use "py.test.importorskip" for importing
+ a dependency only a warning will be issued instead
+ of exiting the testing process.
+
+* many improvements to docs:
+ - refined funcargs doc , use the term "factory" instead of "provider"
+ - added a new talk/tutorial doc page
+ - better download page
+ - better plugin docstrings
+ - added new plugins page and automatic doc generation script
+
+* fixed teardown problem related to partially failing funcarg setups
+ (thanks MrTopf for reporting), "pytest_runtest_teardown" is now
+ always invoked even if the "pytest_runtest_setup" failed.
+
+* tweaked doctest output for docstrings in py modules,
+ thanks Radomir.
+
+Changes between 1.0.0b3 and 1.0.0b7
+=============================================
+
+* renamed py.test.xfail back to py.test.mark.xfail to avoid
+ two ways to decorate for xfail
+
+* re-added py.test.mark decorator for setting keywords on functions
+ (it was actually documented so removing it was not nice)
+
+* remove scope-argument from request.addfinalizer() because
+ request.cached_setup has the scope arg. TOOWTDI.
+
+* perform setup finalization before reporting failures
+
+* apply modified patches from Andreas Kloeckner to allow
+ test functions to have no func_code (#22) and to make
+ "-k" and function keywords work (#20)
+
+* apply patch from Daniel Peolzleithner (issue #23)
+
+* resolve issue #18, multiprocessing.Manager() and
+ redirection clash
+
+* make __name__ == "__channelexec__" for remote_exec code
+
+Changes between 1.0.0b1 and 1.0.0b3
+=============================================
+
+* plugin classes are removed: one now defines
+ hooks directly in conftest.py or global pytest_*.py
+ files.
+
+* added new pytest_namespace(config) hook that allows
+ to inject helpers directly to the py.test.* namespace.
+
+* documented and refined many hooks
+
+* added new style of generative tests via
+ pytest_generate_tests hook that integrates
+ well with function arguments.
+
+
+Changes between 0.9.2 and 1.0.0b1
+=============================================
+
+* introduced new "funcarg" setup method,
+ see doc/test/funcarg.txt
+
+* introduced plugin architecuture and many
+ new py.test plugins, see
+ doc/test/plugins.txt
+
+* teardown_method is now guaranteed to get
+ called after a test method has run.
+
+* new method: py.test.importorskip(mod,minversion)
+ will either import or call py.test.skip()
+
+* completely revised internal py.test architecture
+
+* new py.process.ForkedFunc object allowing to
+ fork execution of a function to a sub process
+ and getting a result back.
+
+XXX lots of things missing here XXX
+
+Changes between 0.9.1 and 0.9.2
+===============================
+
+* refined installation and metadata, created new setup.py,
+ now based on setuptools/ez_setup (thanks to Ralf Schmitt
+ for his support).
+
+* improved the way of making py.* scripts available in
+ windows environments, they are now added to the
+ Scripts directory as ".cmd" files.
+
+* py.path.svnwc.status() now is more complete and
+ uses xml output from the 'svn' command if available
+ (Guido Wesdorp)
+
+* fix for py.path.svn* to work with svn 1.5
+ (Chris Lamb)
+
+* fix path.relto(otherpath) method on windows to
+ use normcase for checking if a path is relative.
+
+* py.test's traceback is better parseable from editors
+ (follows the filenames:LINENO: MSG convention)
+ (thanks to Osmo Salomaa)
+
+* fix to javascript-generation, "py.test --runbrowser"
+ should work more reliably now
+
+* removed previously accidentally added
+ py.test.broken and py.test.notimplemented helpers.
+
+* there now is a py.__version__ attribute
+
+Changes between 0.9.0 and 0.9.1
+===============================
+
+This is a fairly complete list of changes between 0.9 and 0.9.1, which can
+serve as a reference for developers.
+
+* allowing + signs in py.path.svn urls [39106]
+* fixed support for Failed exceptions without excinfo in py.test [39340]
+* added support for killing processes for Windows (as well as platforms that
+ support os.kill) in py.misc.killproc [39655]
+* added setup/teardown for generative tests to py.test [40702]
+* added detection of FAILED TO LOAD MODULE to py.test [40703, 40738, 40739]
+* fixed problem with calling .remove() on wcpaths of non-versioned files in
+ py.path [44248]
+* fixed some import and inheritance issues in py.test [41480, 44648, 44655]
+* fail to run greenlet tests when pypy is available, but without stackless
+ [45294]
+* small fixes in rsession tests [45295]
+* fixed issue with 2.5 type representations in py.test [45483, 45484]
+* made that internal reporting issues displaying is done atomically in py.test
+ [45518]
+* made that non-existing files are igored by the py.lookup script [45519]
+* improved exception name creation in py.test [45535]
+* made that less threads are used in execnet [merge in 45539]
+* removed lock required for atomical reporting issue displaying in py.test
+ [45545]
+* removed globals from execnet [45541, 45547]
+* refactored cleanup mechanics, made that setDaemon is set to 1 to make atexit
+ get called in 2.5 (py.execnet) [45548]
+* fixed bug in joining threads in py.execnet's servemain [45549]
+* refactored py.test.rsession tests to not rely on exact output format anymore
+ [45646]
+* using repr() on test outcome [45647]
+* added 'Reason' classes for py.test.skip() [45648, 45649]
+* killed some unnecessary sanity check in py.test.collect [45655]
+* avoid using os.tmpfile() in py.io.fdcapture because on Windows it's only
+ usable by Administrators [45901]
+* added support for locking and non-recursive commits to py.path.svnwc [45994]
+* locking files in py.execnet to prevent CPython from segfaulting [46010]
+* added export() method to py.path.svnurl
+* fixed -d -x in py.test [47277]
+* fixed argument concatenation problem in py.path.svnwc [49423]
+* restore py.test behaviour that it exits with code 1 when there are failures
+ [49974]
+* don't fail on html files that don't have an accompanying .txt file [50606]
+* fixed 'utestconvert.py < input' [50645]
+* small fix for code indentation in py.code.source [50755]
+* fix _docgen.py documentation building [51285]
+* improved checks for source representation of code blocks in py.test [51292]
+* added support for passing authentication to py.path.svn* objects [52000,
+ 52001]
+* removed sorted() call for py.apigen tests in favour of [].sort() to support
+ Python 2.3 [52481]
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.1.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.1.txt
new file mode 100644
index 0000000000..471de408a1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.1.txt
@@ -0,0 +1,104 @@
+py.test/pylib 1.3.1: new py.test.xfail, --maxfail, better reporting
+===========================================================================
+
+The pylib/py.test 1.3.1 release brings:
+
+- the new imperative ``py.test.xfail()`` helper in order to have a test or
+ setup function result in an "expected failure"
+- a new option ``--maxfail=NUM`` to stop the test run after some failures
+- markers/decorators are now applicable to test classes (>=Python2.6)
+- improved reporting, shorter tracebacks in several cases
+- some simplified internals, more compatibility with Jython and PyPy
+- bug fixes and various refinements
+
+See the below CHANGELOG entry below for more details and
+http://pylib.org/install.html for installation instructions.
+
+If you used older versions of py.test you should be able to upgrade
+to 1.3.1 without changes to your test source code.
+
+py.test is an automated testing tool working with Python2,
+Python3, Jython and PyPy versions on all major operating systems. It
+offers a no-boilerplate testing approach and has inspired other testing
+tools and enhancements in the standard Python library for more than five
+years. It has a simple and extensive plugin architecture, configurable
+reporting and provides unique ways to make it fit to your testing
+process and needs.
+
+See http://pytest.org for more info.
+
+cheers and have fun,
+
+holger krekel
+
+Changes between 1.3.0 and 1.3.1
+==================================================
+
+New features
+++++++++++++++++++
+
+- issue91: introduce new py.test.xfail(reason) helper
+ to imperatively mark a test as expected to fail. Can
+ be used from within setup and test functions. This is
+ useful especially for parametrized tests when certain
+ configurations are expected-to-fail. In this case the
+ declarative approach with the @py.test.mark.xfail cannot
+ be used as it would mark all configurations as xfail.
+
+- issue102: introduce new --maxfail=NUM option to stop
+ test runs after NUM failures. This is a generalization
+ of the '-x' or '--exitfirst' option which is now equivalent
+ to '--maxfail=1'. Both '-x' and '--maxfail' will
+ now also print a line near the end indicating the Interruption.
+
+- issue89: allow py.test.mark decorators to be used on classes
+ (class decorators were introduced with python2.6) and
+ also allow to have multiple markers applied at class/module level
+ by specifying a list.
+
+- improve and refine letter reporting in the progress bar:
+ . pass
+ f failed test
+ s skipped tests (reminder: use for dependency/platform mismatch only)
+ x xfailed test (test that was expected to fail)
+ X xpassed test (test that was expected to fail but passed)
+
+ You can use any combination of 'fsxX' with the '-r' extended
+ reporting option. The xfail/xpass results will show up as
+ skipped tests in the junitxml output - which also fixes
+ issue99.
+
+- make py.test.cmdline.main() return the exitstatus instead of raising
+ SystemExit and also allow it to be called multiple times. This of
+ course requires that your application and tests are properly teared
+ down and don't have global state.
+
+Fixes / Maintenance
+++++++++++++++++++++++
+
+- improved traceback presentation:
+ - improved and unified reporting for "--tb=short" option
+ - Errors during test module imports are much shorter, (using --tb=short style)
+ - raises shows shorter more relevant tracebacks
+ - --fulltrace now more systematically makes traces longer / inhibits cutting
+
+- improve support for raises and other dynamically compiled code by
+ manipulating python's linecache.cache instead of the previous
+ rather hacky way of creating custom code objects. This makes
+ it seemlessly work on Jython and PyPy where it previously didn't.
+
+- fix issue96: make capturing more resilient against Control-C
+ interruptions (involved somewhat substantial refactoring
+ to the underlying capturing functionality to avoid race
+ conditions).
+
+- fix chaining of conditional skipif/xfail decorators - so it works now
+ as expected to use multiple @py.test.mark.skipif(condition) decorators,
+ including specific reporting which of the conditions lead to skipping.
+
+- fix issue95: late-import zlib so that it's not required
+ for general py.test startup.
+
+- fix issue94: make reporting more robust against bogus source code
+ (and internally be more careful when presenting unexpected byte sequences)
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.2.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.2.txt
new file mode 100644
index 0000000000..599dfbed75
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.2.txt
@@ -0,0 +1,720 @@
+py.test/pylib 1.3.2: API and reporting refinements, many fixes
+===========================================================================
+
+The pylib/py.test 1.3.2 release brings many bug fixes and some new
+features. It was refined for and tested against the recently released
+Python2.7 and remains compatibile to the usual armada of interpreters
+(Python2.4 through to Python3.1.2, Jython and PyPy). Note that for using
+distributed testing features you'll need to upgrade to the jointly released
+pytest-xdist-1.4 because of some internal refactorings.
+
+See http://pytest.org for general documentation and below for
+a detailed CHANGELOG.
+
+cheers & particular thanks to Benjamin Peterson, Ronny Pfannschmidt
+and all issue and patch contributors,
+
+holger krekel
+
+Changes between 1.3.1 and 1.3.2
+==================================================
+
+New features
+++++++++++++++++++
+
+- fix issue103: introduce py.test.raises as context manager, examples::
+
+ with py.test.raises(ZeroDivisionError):
+ x = 0
+ 1 / x
+
+ with py.test.raises(RuntimeError) as excinfo:
+ call_something()
+
+ # you may do extra checks on excinfo.value|type|traceback here
+
+ (thanks Ronny Pfannschmidt)
+
+- Funcarg factories can now dynamically apply a marker to a
+ test invocation. This is for example useful if a factory
+ provides parameters to a test which are expected-to-fail::
+
+ def pytest_funcarg__arg(request):
+ request.applymarker(py.test.mark.xfail(reason="flaky config"))
+ ...
+
+ def test_function(arg):
+ ...
+
+- improved error reporting on collection and import errors. This makes
+ use of a more general mechanism, namely that for custom test item/collect
+ nodes ``node.repr_failure(excinfo)`` is now uniformly called so that you can
+ override it to return a string error representation of your choice
+ which is going to be reported as a (red) string.
+
+- introduce '--junitprefix=STR' option to prepend a prefix
+ to all reports in the junitxml file.
+
+Bug fixes / Maintenance
+++++++++++++++++++++++++++
+
+- make tests and the ``pytest_recwarn`` plugin in particular fully compatible
+ to Python2.7 (if you use the ``recwarn`` funcarg warnings will be enabled so that
+ you can properly check for their existence in a cross-python manner).
+- refine --pdb: ignore xfailed tests, unify its TB-reporting and
+ don't display failures again at the end.
+- fix assertion interpretation with the ** operator (thanks Benjamin Peterson)
+- fix issue105 assignment on the same line as a failing assertion (thanks Benjamin Peterson)
+- fix issue104 proper escaping for test names in junitxml plugin (thanks anonymous)
+- fix issue57 -f|--looponfail to work with xpassing tests (thanks Ronny)
+- fix issue92 collectonly reporter and --pastebin (thanks Benjamin Peterson)
+- fix py.code.compile(source) to generate unique filenames
+- fix assertion re-interp problems on PyPy, by defering code
+ compilation to the (overridable) Frame.eval class. (thanks Amaury Forgeot)
+- fix py.path.local.pyimport() to work with directories
+- streamline py.path.local.mkdtemp implementation and usage
+- don't print empty lines when showing junitxml-filename
+- add optional boolean ignore_errors parameter to py.path.local.remove
+- fix terminal writing on win32/python2.4
+- py.process.cmdexec() now tries harder to return properly encoded unicode objects
+ on all python versions
+- install plain py.test/py.which scripts also for Jython, this helps to
+ get canonical script paths in virtualenv situations
+- make path.bestrelpath(path) return ".", note that when calling
+ X.bestrelpath the assumption is that X is a directory.
+- make initial conftest discovery ignore "--" prefixed arguments
+- fix resultlog plugin when used in an multicpu/multihost xdist situation
+ (thanks Jakub Gustak)
+- perform distributed testing related reporting in the xdist-plugin
+ rather than having dist-related code in the generic py.test
+ distribution
+- fix homedir detection on Windows
+- ship distribute_setup.py version 0.6.13
+
+Changes between 1.3.0 and 1.3.1
+==================================================
+
+New features
+++++++++++++++++++
+
+- issue91: introduce new py.test.xfail(reason) helper
+ to imperatively mark a test as expected to fail. Can
+ be used from within setup and test functions. This is
+ useful especially for parametrized tests when certain
+ configurations are expected-to-fail. In this case the
+ declarative approach with the @py.test.mark.xfail cannot
+ be used as it would mark all configurations as xfail.
+
+- issue102: introduce new --maxfail=NUM option to stop
+ test runs after NUM failures. This is a generalization
+ of the '-x' or '--exitfirst' option which is now equivalent
+ to '--maxfail=1'. Both '-x' and '--maxfail' will
+ now also print a line near the end indicating the Interruption.
+
+- issue89: allow py.test.mark decorators to be used on classes
+ (class decorators were introduced with python2.6) and
+ also allow to have multiple markers applied at class/module level
+ by specifying a list.
+
+- improve and refine letter reporting in the progress bar:
+ . pass
+ f failed test
+ s skipped tests (reminder: use for dependency/platform mismatch only)
+ x xfailed test (test that was expected to fail)
+ X xpassed test (test that was expected to fail but passed)
+
+ You can use any combination of 'fsxX' with the '-r' extended
+ reporting option. The xfail/xpass results will show up as
+ skipped tests in the junitxml output - which also fixes
+ issue99.
+
+- make py.test.cmdline.main() return the exitstatus instead of raising
+ SystemExit and also allow it to be called multiple times. This of
+ course requires that your application and tests are properly teared
+ down and don't have global state.
+
+Fixes / Maintenance
+++++++++++++++++++++++
+
+- improved traceback presentation:
+ - improved and unified reporting for "--tb=short" option
+ - Errors during test module imports are much shorter, (using --tb=short style)
+ - raises shows shorter more relevant tracebacks
+ - --fulltrace now more systematically makes traces longer / inhibits cutting
+
+- improve support for raises and other dynamically compiled code by
+ manipulating python's linecache.cache instead of the previous
+ rather hacky way of creating custom code objects. This makes
+ it seemlessly work on Jython and PyPy where it previously didn't.
+
+- fix issue96: make capturing more resilient against Control-C
+ interruptions (involved somewhat substantial refactoring
+ to the underlying capturing functionality to avoid race
+ conditions).
+
+- fix chaining of conditional skipif/xfail decorators - so it works now
+ as expected to use multiple @py.test.mark.skipif(condition) decorators,
+ including specific reporting which of the conditions lead to skipping.
+
+- fix issue95: late-import zlib so that it's not required
+ for general py.test startup.
+
+- fix issue94: make reporting more robust against bogus source code
+ (and internally be more careful when presenting unexpected byte sequences)
+
+
+Changes between 1.2.1 and 1.3.0
+==================================================
+
+- deprecate --report option in favour of a new shorter and easier to
+ remember -r option: it takes a string argument consisting of any
+ combination of 'xfsX' characters. They relate to the single chars
+ you see during the dotted progress printing and will print an extra line
+ per test at the end of the test run. This extra line indicates the exact
+ position or test ID that you directly paste to the py.test cmdline in order
+ to re-run a particular test.
+
+- allow external plugins to register new hooks via the new
+ pytest_addhooks(pluginmanager) hook. The new release of
+ the pytest-xdist plugin for distributed and looponfailing
+ testing requires this feature.
+
+- add a new pytest_ignore_collect(path, config) hook to allow projects and
+ plugins to define exclusion behaviour for their directory structure -
+ for example you may define in a conftest.py this method::
+
+ def pytest_ignore_collect(path):
+ return path.check(link=1)
+
+ to prevent even a collection try of any tests in symlinked dirs.
+
+- new pytest_pycollect_makemodule(path, parent) hook for
+ allowing customization of the Module collection object for a
+ matching test module.
+
+- extend and refine xfail mechanism:
+ ``@py.test.mark.xfail(run=False)`` do not run the decorated test
+ ``@py.test.mark.xfail(reason="...")`` prints the reason string in xfail summaries
+ specifiying ``--runxfail`` on command line virtually ignores xfail markers
+
+- expose (previously internal) commonly useful methods:
+ py.io.get_terminal_with() -> return terminal width
+ py.io.ansi_print(...) -> print colored/bold text on linux/win32
+ py.io.saferepr(obj) -> return limited representation string
+
+- expose test outcome related exceptions as py.test.skip.Exception,
+ py.test.raises.Exception etc., useful mostly for plugins
+ doing special outcome interpretation/tweaking
+
+- (issue85) fix junitxml plugin to handle tests with non-ascii output
+
+- fix/refine python3 compatibility (thanks Benjamin Peterson)
+
+- fixes for making the jython/win32 combination work, note however:
+ jython2.5.1/win32 does not provide a command line launcher, see
+ http://bugs.jython.org/issue1491 . See pylib install documentation
+ for how to work around.
+
+- fixes for handling of unicode exception values and unprintable objects
+
+- (issue87) fix unboundlocal error in assertionold code
+
+- (issue86) improve documentation for looponfailing
+
+- refine IO capturing: stdin-redirect pseudo-file now has a NOP close() method
+
+- ship distribute_setup.py version 0.6.10
+
+- added links to the new capturelog and coverage plugins
+
+
+Changes between 1.2.1 and 1.2.0
+=====================================
+
+- refined usage and options for "py.cleanup"::
+
+ py.cleanup # remove "*.pyc" and "*$py.class" (jython) files
+ py.cleanup -e .swp -e .cache # also remove files with these extensions
+ py.cleanup -s # remove "build" and "dist" directory next to setup.py files
+ py.cleanup -d # also remove empty directories
+ py.cleanup -a # synonym for "-s -d -e 'pip-log.txt'"
+ py.cleanup -n # dry run, only show what would be removed
+
+- add a new option "py.test --funcargs" which shows available funcargs
+ and their help strings (docstrings on their respective factory function)
+ for a given test path
+
+- display a short and concise traceback if a funcarg lookup fails
+
+- early-load "conftest.py" files in non-dot first-level sub directories.
+ allows to conveniently keep and access test-related options in a ``test``
+ subdir and still add command line options.
+
+- fix issue67: new super-short traceback-printing option: "--tb=line" will print a single line for each failing (python) test indicating its filename, lineno and the failure value
+
+- fix issue78: always call python-level teardown functions even if the
+ according setup failed. This includes refinements for calling setup_module/class functions
+ which will now only be called once instead of the previous behaviour where they'd be called
+ multiple times if they raise an exception (including a Skipped exception). Any exception
+ will be re-corded and associated with all tests in the according module/class scope.
+
+- fix issue63: assume <40 columns to be a bogus terminal width, default to 80
+
+- fix pdb debugging to be in the correct frame on raises-related errors
+
+- update apipkg.py to fix an issue where recursive imports might
+ unnecessarily break importing
+
+- fix plugin links
+
+Changes between 1.2 and 1.1.1
+=====================================
+
+- moved dist/looponfailing from py.test core into a new
+ separately released pytest-xdist plugin.
+
+- new junitxml plugin: --junitxml=path will generate a junit style xml file
+ which is processable e.g. by the Hudson CI system.
+
+- new option: --genscript=path will generate a standalone py.test script
+ which will not need any libraries installed. thanks to Ralf Schmitt.
+
+- new option: --ignore will prevent specified path from collection.
+ Can be specified multiple times.
+
+- new option: --confcutdir=dir will make py.test only consider conftest
+ files that are relative to the specified dir.
+
+- new funcarg: "pytestconfig" is the pytest config object for access
+ to command line args and can now be easily used in a test.
+
+- install 'py.test' and `py.which` with a ``-$VERSION`` suffix to
+ disambiguate between Python3, python2.X, Jython and PyPy installed versions.
+
+- new "pytestconfig" funcarg allows access to test config object
+
+- new "pytest_report_header" hook can return additional lines
+ to be displayed at the header of a test run.
+
+- (experimental) allow "py.test path::name1::name2::..." for pointing
+ to a test within a test collection directly. This might eventually
+ evolve as a full substitute to "-k" specifications.
+
+- streamlined plugin loading: order is now as documented in
+ customize.html: setuptools, ENV, commandline, conftest.
+ also setuptools entry point names are turned to canonical namees ("pytest_*")
+
+- automatically skip tests that need 'capfd' but have no os.dup
+
+- allow pytest_generate_tests to be defined in classes as well
+
+- deprecate usage of 'disabled' attribute in favour of pytestmark
+- deprecate definition of Directory, Module, Class and Function nodes
+ in conftest.py files. Use pytest collect hooks instead.
+
+- collection/item node specific runtest/collect hooks are only called exactly
+ on matching conftest.py files, i.e. ones which are exactly below
+ the filesystem path of an item
+
+- change: the first pytest_collect_directory hook to return something
+ will now prevent further hooks to be called.
+
+- change: figleaf plugin now requires --figleaf to run. Also
+ change its long command line options to be a bit shorter (see py.test -h).
+
+- change: pytest doctest plugin is now enabled by default and has a
+ new option --doctest-glob to set a pattern for file matches.
+
+- change: remove internal py._* helper vars, only keep py._pydir
+
+- robustify capturing to survive if custom pytest_runtest_setup
+ code failed and prevented the capturing setup code from running.
+
+- make py.test.* helpers provided by default plugins visible early -
+ works transparently both for pydoc and for interactive sessions
+ which will regularly see e.g. py.test.mark and py.test.importorskip.
+
+- simplify internal plugin manager machinery
+- simplify internal collection tree by introducing a RootCollector node
+
+- fix assert reinterpreation that sees a call containing "keyword=..."
+
+- fix issue66: invoke pytest_sessionstart and pytest_sessionfinish
+ hooks on slaves during dist-testing, report module/session teardown
+ hooks correctly.
+
+- fix issue65: properly handle dist-testing if no
+ execnet/py lib installed remotely.
+
+- skip some install-tests if no execnet is available
+
+- fix docs, fix internal bin/ script generation
+
+
+Changes between 1.1.1 and 1.1.0
+=====================================
+
+- introduce automatic plugin registration via 'pytest11'
+ entrypoints via setuptools' pkg_resources.iter_entry_points
+
+- fix py.test dist-testing to work with execnet >= 1.0.0b4
+
+- re-introduce py.test.cmdline.main() for better backward compatibility
+
+- svn paths: fix a bug with path.check(versioned=True) for svn paths,
+ allow '%' in svn paths, make svnwc.update() default to interactive mode
+ like in 1.0.x and add svnwc.update(interactive=False) to inhibit interaction.
+
+- refine distributed tarball to contain test and no pyc files
+
+- try harder to have deprecation warnings for py.compat.* accesses
+ report a correct location
+
+Changes between 1.1.0 and 1.0.2
+=====================================
+
+* adjust and improve docs
+
+* remove py.rest tool and internal namespace - it was
+ never really advertised and can still be used with
+ the old release if needed. If there is interest
+ it could be revived into its own tool i guess.
+
+* fix issue48 and issue59: raise an Error if the module
+ from an imported test file does not seem to come from
+ the filepath - avoids "same-name" confusion that has
+ been reported repeatedly
+
+* merged Ronny's nose-compatibility hacks: now
+ nose-style setup_module() and setup() functions are
+ supported
+
+* introduce generalized py.test.mark function marking
+
+* reshuffle / refine command line grouping
+
+* deprecate parser.addgroup in favour of getgroup which creates option group
+
+* add --report command line option that allows to control showing of skipped/xfailed sections
+
+* generalized skipping: a new way to mark python functions with skipif or xfail
+ at function, class and modules level based on platform or sys-module attributes.
+
+* extend py.test.mark decorator to allow for positional args
+
+* introduce and test "py.cleanup -d" to remove empty directories
+
+* fix issue #59 - robustify unittest test collection
+
+* make bpython/help interaction work by adding an __all__ attribute
+ to ApiModule, cleanup initpkg
+
+* use MIT license for pylib, add some contributors
+
+* remove py.execnet code and substitute all usages with 'execnet' proper
+
+* fix issue50 - cached_setup now caches more to expectations
+ for test functions with multiple arguments.
+
+* merge Jarko's fixes, issue #45 and #46
+
+* add the ability to specify a path for py.lookup to search in
+
+* fix a funcarg cached_setup bug probably only occuring
+ in distributed testing and "module" scope with teardown.
+
+* many fixes and changes for making the code base python3 compatible,
+ many thanks to Benjamin Peterson for helping with this.
+
+* consolidate builtins implementation to be compatible with >=2.3,
+ add helpers to ease keeping 2 and 3k compatible code
+
+* deprecate py.compat.doctest|subprocess|textwrap|optparse
+
+* deprecate py.magic.autopath, remove py/magic directory
+
+* move pytest assertion handling to py/code and a pytest_assertion
+ plugin, add "--no-assert" option, deprecate py.magic namespaces
+ in favour of (less) py.code ones.
+
+* consolidate and cleanup py/code classes and files
+
+* cleanup py/misc, move tests to bin-for-dist
+
+* introduce delattr/delitem/delenv methods to py.test's monkeypatch funcarg
+
+* consolidate py.log implementation, remove old approach.
+
+* introduce py.io.TextIO and py.io.BytesIO for distinguishing between
+ text/unicode and byte-streams (uses underlying standard lib io.*
+ if available)
+
+* make py.unittest_convert helper script available which converts "unittest.py"
+ style files into the simpler assert/direct-test-classes py.test/nosetests
+ style. The script was written by Laura Creighton.
+
+* simplified internal localpath implementation
+
+Changes between 1.0.1 and 1.0.2
+=====================================
+
+* fixing packaging issues, triggered by fedora redhat packaging,
+ also added doc, examples and contrib dirs to the tarball.
+
+* added a documentation link to the new django plugin.
+
+Changes between 1.0.0 and 1.0.1
+=====================================
+
+* added a 'pytest_nose' plugin which handles nose.SkipTest,
+ nose-style function/method/generator setup/teardown and
+ tries to report functions correctly.
+
+* capturing of unicode writes or encoded strings to sys.stdout/err
+ work better, also terminalwriting was adapted and somewhat
+ unified between windows and linux.
+
+* improved documentation layout and content a lot
+
+* added a "--help-config" option to show conftest.py / ENV-var names for
+ all longopt cmdline options, and some special conftest.py variables.
+ renamed 'conf_capture' conftest setting to 'option_capture' accordingly.
+
+* fix issue #27: better reporting on non-collectable items given on commandline
+ (e.g. pyc files)
+
+* fix issue #33: added --version flag (thanks Benjamin Peterson)
+
+* fix issue #32: adding support for "incomplete" paths to wcpath.status()
+
+* "Test" prefixed classes are *not* collected by default anymore if they
+ have an __init__ method
+
+* monkeypatch setenv() now accepts a "prepend" parameter
+
+* improved reporting of collection error tracebacks
+
+* simplified multicall mechanism and plugin architecture,
+ renamed some internal methods and argnames
+
+Changes between 1.0.0b9 and 1.0.0
+=====================================
+
+* more terse reporting try to show filesystem path relatively to current dir
+* improve xfail output a bit
+
+Changes between 1.0.0b8 and 1.0.0b9
+=====================================
+
+* cleanly handle and report final teardown of test setup
+
+* fix svn-1.6 compat issue with py.path.svnwc().versioned()
+ (thanks Wouter Vanden Hove)
+
+* setup/teardown or collection problems now show as ERRORs
+ or with big "E"'s in the progress lines. they are reported
+ and counted separately.
+
+* dist-testing: properly handle test items that get locally
+ collected but cannot be collected on the remote side - often
+ due to platform/dependency reasons
+
+* simplified py.test.mark API - see keyword plugin documentation
+
+* integrate better with logging: capturing now by default captures
+ test functions and their immediate setup/teardown in a single stream
+
+* capsys and capfd funcargs now have a readouterr() and a close() method
+ (underlyingly py.io.StdCapture/FD objects are used which grew a
+ readouterr() method as well to return snapshots of captured out/err)
+
+* make assert-reinterpretation work better with comparisons not
+ returning bools (reported with numpy from thanks maciej fijalkowski)
+
+* reworked per-test output capturing into the pytest_iocapture.py plugin
+ and thus removed capturing code from config object
+
+* item.repr_failure(excinfo) instead of item.repr_failure(excinfo, outerr)
+
+
+Changes between 1.0.0b7 and 1.0.0b8
+=====================================
+
+* pytest_unittest-plugin is now enabled by default
+
+* introduced pytest_keyboardinterrupt hook and
+ refined pytest_sessionfinish hooked, added tests.
+
+* workaround a buggy logging module interaction ("closing already closed
+ files"). Thanks to Sridhar Ratnakumar for triggering.
+
+* if plugins use "py.test.importorskip" for importing
+ a dependency only a warning will be issued instead
+ of exiting the testing process.
+
+* many improvements to docs:
+ - refined funcargs doc , use the term "factory" instead of "provider"
+ - added a new talk/tutorial doc page
+ - better download page
+ - better plugin docstrings
+ - added new plugins page and automatic doc generation script
+
+* fixed teardown problem related to partially failing funcarg setups
+ (thanks MrTopf for reporting), "pytest_runtest_teardown" is now
+ always invoked even if the "pytest_runtest_setup" failed.
+
+* tweaked doctest output for docstrings in py modules,
+ thanks Radomir.
+
+Changes between 1.0.0b3 and 1.0.0b7
+=============================================
+
+* renamed py.test.xfail back to py.test.mark.xfail to avoid
+ two ways to decorate for xfail
+
+* re-added py.test.mark decorator for setting keywords on functions
+ (it was actually documented so removing it was not nice)
+
+* remove scope-argument from request.addfinalizer() because
+ request.cached_setup has the scope arg. TOOWTDI.
+
+* perform setup finalization before reporting failures
+
+* apply modified patches from Andreas Kloeckner to allow
+ test functions to have no func_code (#22) and to make
+ "-k" and function keywords work (#20)
+
+* apply patch from Daniel Peolzleithner (issue #23)
+
+* resolve issue #18, multiprocessing.Manager() and
+ redirection clash
+
+* make __name__ == "__channelexec__" for remote_exec code
+
+Changes between 1.0.0b1 and 1.0.0b3
+=============================================
+
+* plugin classes are removed: one now defines
+ hooks directly in conftest.py or global pytest_*.py
+ files.
+
+* added new pytest_namespace(config) hook that allows
+ to inject helpers directly to the py.test.* namespace.
+
+* documented and refined many hooks
+
+* added new style of generative tests via
+ pytest_generate_tests hook that integrates
+ well with function arguments.
+
+
+Changes between 0.9.2 and 1.0.0b1
+=============================================
+
+* introduced new "funcarg" setup method,
+ see doc/test/funcarg.txt
+
+* introduced plugin architecuture and many
+ new py.test plugins, see
+ doc/test/plugins.txt
+
+* teardown_method is now guaranteed to get
+ called after a test method has run.
+
+* new method: py.test.importorskip(mod,minversion)
+ will either import or call py.test.skip()
+
+* completely revised internal py.test architecture
+
+* new py.process.ForkedFunc object allowing to
+ fork execution of a function to a sub process
+ and getting a result back.
+
+XXX lots of things missing here XXX
+
+Changes between 0.9.1 and 0.9.2
+===============================
+
+* refined installation and metadata, created new setup.py,
+ now based on setuptools/ez_setup (thanks to Ralf Schmitt
+ for his support).
+
+* improved the way of making py.* scripts available in
+ windows environments, they are now added to the
+ Scripts directory as ".cmd" files.
+
+* py.path.svnwc.status() now is more complete and
+ uses xml output from the 'svn' command if available
+ (Guido Wesdorp)
+
+* fix for py.path.svn* to work with svn 1.5
+ (Chris Lamb)
+
+* fix path.relto(otherpath) method on windows to
+ use normcase for checking if a path is relative.
+
+* py.test's traceback is better parseable from editors
+ (follows the filenames:LINENO: MSG convention)
+ (thanks to Osmo Salomaa)
+
+* fix to javascript-generation, "py.test --runbrowser"
+ should work more reliably now
+
+* removed previously accidentally added
+ py.test.broken and py.test.notimplemented helpers.
+
+* there now is a py.__version__ attribute
+
+Changes between 0.9.0 and 0.9.1
+===============================
+
+This is a fairly complete list of changes between 0.9 and 0.9.1, which can
+serve as a reference for developers.
+
+* allowing + signs in py.path.svn urls [39106]
+* fixed support for Failed exceptions without excinfo in py.test [39340]
+* added support for killing processes for Windows (as well as platforms that
+ support os.kill) in py.misc.killproc [39655]
+* added setup/teardown for generative tests to py.test [40702]
+* added detection of FAILED TO LOAD MODULE to py.test [40703, 40738, 40739]
+* fixed problem with calling .remove() on wcpaths of non-versioned files in
+ py.path [44248]
+* fixed some import and inheritance issues in py.test [41480, 44648, 44655]
+* fail to run greenlet tests when pypy is available, but without stackless
+ [45294]
+* small fixes in rsession tests [45295]
+* fixed issue with 2.5 type representations in py.test [45483, 45484]
+* made that internal reporting issues displaying is done atomically in py.test
+ [45518]
+* made that non-existing files are igored by the py.lookup script [45519]
+* improved exception name creation in py.test [45535]
+* made that less threads are used in execnet [merge in 45539]
+* removed lock required for atomical reporting issue displaying in py.test
+ [45545]
+* removed globals from execnet [45541, 45547]
+* refactored cleanup mechanics, made that setDaemon is set to 1 to make atexit
+ get called in 2.5 (py.execnet) [45548]
+* fixed bug in joining threads in py.execnet's servemain [45549]
+* refactored py.test.rsession tests to not rely on exact output format anymore
+ [45646]
+* using repr() on test outcome [45647]
+* added 'Reason' classes for py.test.skip() [45648, 45649]
+* killed some unnecessary sanity check in py.test.collect [45655]
+* avoid using os.tmpfile() in py.io.fdcapture because on Windows it's only
+ usable by Administrators [45901]
+* added support for locking and non-recursive commits to py.path.svnwc [45994]
+* locking files in py.execnet to prevent CPython from segfaulting [46010]
+* added export() method to py.path.svnurl
+* fixed -d -x in py.test [47277]
+* fixed argument concatenation problem in py.path.svnwc [49423]
+* restore py.test behaviour that it exits with code 1 when there are failures
+ [49974]
+* don't fail on html files that don't have an accompanying .txt file [50606]
+* fixed 'utestconvert.py < input' [50645]
+* small fix for code indentation in py.code.source [50755]
+* fix _docgen.py documentation building [51285]
+* improved checks for source representation of code blocks in py.test [51292]
+* added support for passing authentication to py.path.svn* objects [52000,
+ 52001]
+* removed sorted() call for py.apigen tests in favour of [].sort() to support
+ Python 2.3 [52481]
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.3.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.3.txt
new file mode 100644
index 0000000000..c62cb85905
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.3.txt
@@ -0,0 +1,26 @@
+py.test/pylib 1.3.3: windows and other fixes
+===========================================================================
+
+pylib/py.test 1.3.3 is a minor bugfix release featuring some improvements
+and fixes. See changelog_ for full history.
+
+have fun,
+holger krekel
+
+.. _changelog: ../changelog.html
+
+Changes between 1.3.2 and 1.3.3
+==================================================
+
+- fix issue113: assertion representation problem with triple-quoted strings
+ (and possibly other cases)
+- make conftest loading detect that a conftest file with the same
+ content was already loaded, avoids surprises in nested directory structures
+ which can be produced e.g. by Hudson. It probably removes the need to use
+ --confcutdir in most cases.
+- fix terminal coloring for win32
+ (thanks Michael Foord for reporting)
+- fix weirdness: make terminal width detection work on stdout instead of stdin
+ (thanks Armin Ronacher for reporting)
+- remove trailing whitespace in all py/text distribution files
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.4.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.4.txt
new file mode 100644
index 0000000000..c156c8bdb3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.3.4.txt
@@ -0,0 +1,22 @@
+py.test/pylib 1.3.4: fixes and new native traceback option
+===========================================================================
+
+pylib/py.test 1.3.4 is a minor maintenance release mostly containing bug fixes
+and a new "--tb=native" traceback option to show "normal" Python standard
+tracebacks instead of the py.test enhanced tracebacks. See below for more
+change info and http://pytest.org for more general information on features
+and configuration of the testing tool.
+
+Thanks to the issue reporters and generally to Ronny Pfannschmidt for help.
+
+cheers,
+holger krekel
+
+Changes between 1.3.3 and 1.3.4
+==================================================
+
+- fix issue111: improve install documentation for windows
+- fix issue119: fix custom collectability of __init__.py as a module
+- fix issue116: --doctestmodules work with __init__.py files as well
+- fix issue115: unify internal exception passthrough/catching/GeneratorExit
+- fix issue118: new --tb=native for presenting cpython-standard exceptions
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.4.0.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.4.0.txt
new file mode 100644
index 0000000000..1c9fa75604
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.4.0.txt
@@ -0,0 +1,47 @@
+
+.. _`release-1.4.0`:
+
+py-1.4.0: cross-python lib for path, code, io, ... manipulations
+===========================================================================
+
+"py" is a small library comprising APIs for filesystem and svn path
+manipulations, dynamic code construction and introspection, a Py2/Py3
+compatibility namespace ("py.builtin"), IO capturing, terminal colored printing
+(on windows and linux), ini-file parsing and a lazy import mechanism.
+It runs unmodified on all Python interpreters compatible to Python2.4 up
+until Python 3.2. The general goal with "py" is to provide stable APIs
+for some common tasks that are continously tested against many Python
+interpreters and thus also to help transition. Here are some docs:
+
+ http://pylib.org
+
+NOTE: The prior py-1.3.X versions contained "py.test" which now comes
+as its own separate "pytest" distribution and was just released
+as "pytest-2.0.0", see here for the revamped docs:
+
+ http://pytest.org
+
+And "py.cleanup|py.lookup|py.countloc" etc. helpers are now part of
+the pycmd distribution, see https://pypi.org/project/pycmd/
+
+This makes "py-1.4.0" a simple library which does not install
+any command line utilities anymore.
+
+cheers,
+holger
+
+Changes between 1.3.4 and 1.4.0
+-------------------------------------
+
+- py.test was moved to a separate "pytest" package. What remains is
+ a stub hook which will proxy ``import py.test`` to ``pytest``.
+- all command line tools ("py.cleanup/lookup/countloc/..." moved
+ to "pycmd" package)
+- removed the old and deprecated "py.magic" namespace
+- use apipkg-1.1 and make py.apipkg.initpkg|ApiModule available
+- add py.iniconfig module for brain-dead easy ini-config file parsing
+- introduce py.builtin.any()
+- path objects have a .dirname attribute now (equivalent to
+ os.path.dirname(path))
+- path.visit() accepts breadthfirst (bf) and sort options
+- remove deprecated py.compat namespace
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.4.1.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.4.1.txt
new file mode 100644
index 0000000000..6ed72aa418
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/release-1.4.1.txt
@@ -0,0 +1,47 @@
+
+.. _`release-1.4.1`:
+
+py-1.4.1: cross-python lib for fs path, code, io, ... manipulations
+===========================================================================
+
+This is a bug fix release of the "py" lib, see below for detailed changes.
+The py lib is a small library comprising APIs for filesystem and svn path
+manipulations, dynamic code construction and introspection, a Py2/Py3
+compatibility namespace ("py.builtin"), IO capturing, terminal colored printing
+(on windows and linux), ini-file parsing and a lazy import mechanism.
+It runs unmodified on all Python interpreters compatible to Python2.4 up
+until Python 3.2, PyPy and Jython. The general goal with "py" is to
+provide stable APIs for some common tasks that are continously tested
+against many Python interpreters and thus also to help transition. Here
+are some docs:
+
+ http://pylib.org
+
+NOTE: The prior py-1.3.X versions contained "py.test" which since py-1.4.0
+comes as its own separate "pytest" distribution, see:
+
+ http://pytest.org
+
+Also, the "py.cleanup|py.lookup|py.countloc" helpers are now part of
+the pycmd distribution, see https://pypi.org/project/pycmd/
+
+
+Changes between 1.4.0 and 1.4.1
+==================================================
+
+- fix issue1 - py.error.* classes to be pickleable
+
+- fix issue2 - on windows32 use PATHEXT as the list of potential
+ extensions to find find binaries with py.path.local.sysfind(commandname)
+
+- fix (pytest-) issue10 and refine assertion reinterpretation
+ to avoid breaking if the __nonzero__ of an object fails
+
+- fix (pytest-) issue17 where python3 does not like star-imports,
+ leading to misrepresentation of import-errors in test modules
+
+- fix ``py.error.*`` attribute pypy access
+
+- allow path.samefile(arg) to succeed when arg is a relative filename
+
+- fix (pytest-) issue20 path.samefile(relpath) works as expected now
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/announce/releases.txt b/testing/web-platform/tests/tools/third_party/py/doc/announce/releases.txt
new file mode 100644
index 0000000000..309c29bac5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/announce/releases.txt
@@ -0,0 +1,16 @@
+=============
+Release notes
+=============
+
+Contents:
+
+.. toctree::
+ :maxdepth: 2
+
+.. include: release-1.1.0
+.. include: release-1.0.2
+
+ release-1.0.1
+ release-1.0.0
+ release-0.9.2
+ release-0.9.0
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/changelog.txt b/testing/web-platform/tests/tools/third_party/py/doc/changelog.txt
new file mode 100644
index 0000000000..0c9d0928e7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/changelog.txt
@@ -0,0 +1,3 @@
+.. _`changelog`:
+
+.. include:: ../CHANGELOG.rst
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/code.txt b/testing/web-platform/tests/tools/third_party/py/doc/code.txt
new file mode 100644
index 0000000000..bdd8691da0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/code.txt
@@ -0,0 +1,150 @@
+================================================================================
+py.code: higher level python code and introspection objects
+================================================================================
+
+``py.code`` provides higher level APIs and objects for Code, Frame, Traceback,
+ExceptionInfo and source code construction. The ``py.code`` library
+tries to simplify accessing the code objects as well as creating them.
+There is a small set of interfaces a user needs to deal with, all nicely
+bundled together, and with a rich set of 'Pythonic' functionality.
+
+Contents of the library
+=======================
+
+Every object in the ``py.code`` library wraps a code Python object related
+to code objects, source code, frames and tracebacks: the ``py.code.Code``
+class wraps code objects, ``py.code.Source`` source snippets,
+``py.code.Traceback` exception tracebacks, ``py.code.Frame`` frame
+objects (as found in e.g. tracebacks) and ``py.code.ExceptionInfo`` the
+tuple provided by sys.exc_info() (containing exception and traceback
+information when an exception occurs). Also in the library is a helper function
+``py.code.compile()`` that provides the same functionality as Python's
+built-in 'compile()' function, but returns a wrapped code object.
+
+The wrappers
+============
+
+``py.code.Code``
+-------------------
+
+Code objects are instantiated with a code object or a callable as argument,
+and provide functionality to compare themselves with other Code objects, get to
+the source file or its contents, create new Code objects from scratch, etc.
+
+A quick example::
+
+ >>> import py
+ >>> c = py.code.Code(py.path.local.read)
+ >>> c.path.basename
+ 'common.py'
+ >>> isinstance(c.source(), py.code.Source)
+ True
+ >>> str(c.source()).split('\n')[0]
+ "def read(self, mode='r'):"
+
+.. autoclass:: py.code.Code
+ :members:
+ :inherited-members:
+
+
+``py.code.Source``
+---------------------
+
+Source objects wrap snippets of Python source code, providing a simple yet
+powerful interface to read, deindent, slice, compare, compile and manipulate
+them, things that are not so easy in core Python.
+
+Example::
+
+ >>> s = py.code.Source("""\
+ ... def foo():
+ ... print "foo"
+ ... """)
+ >>> str(s).startswith('def') # automatic de-indentation!
+ True
+ >>> s.isparseable()
+ True
+ >>> sub = s.getstatement(1) # get the statement starting at line 1
+ >>> str(sub).strip() # XXX why is the strip() required?!?
+ 'print "foo"'
+
+.. autoclass:: py.code.Source
+ :members:
+
+
+``py.code.Traceback``
+------------------------
+
+Tracebacks are usually not very easy to examine, you need to access certain
+somewhat hidden attributes of the traceback's items (resulting in expressions
+such as 'fname = tb.tb_next.tb_frame.f_code.co_filename'). The Traceback
+interface (and its TracebackItem children) tries to improve this.
+
+Example::
+
+ >>> import sys
+ >>> try:
+ ... py.path.local(100) # illegal argument
+ ... except:
+ ... exc, e, tb = sys.exc_info()
+ >>> t = py.code.Traceback(tb)
+ >>> first = t[1] # get the second entry (first is in this doc)
+ >>> first.path.basename # second is in py/path/local.py
+ 'local.py'
+ >>> isinstance(first.statement, py.code.Source)
+ True
+ >>> str(first.statement).strip().startswith('raise ValueError')
+ True
+
+.. autoclass:: py.code.Traceback
+ :members:
+
+``py.code.Frame``
+--------------------
+
+Frame wrappers are used in ``py.code.Traceback`` items, and will usually not
+directly be instantiated. They provide some nice methods to evaluate code
+'inside' the frame (using the frame's local variables), get to the underlying
+code (frames have a code attribute that points to a ``py.code.Code`` object)
+and examine the arguments.
+
+Example (using the 'first' TracebackItem instance created above)::
+
+ >>> frame = first.frame
+ >>> isinstance(frame.code, py.code.Code)
+ True
+ >>> isinstance(frame.eval('self'), py.path.local)
+ True
+ >>> [namevalue[0] for namevalue in frame.getargs()]
+ ['cls', 'path']
+
+.. autoclass:: py.code.Frame
+ :members:
+
+``py.code.ExceptionInfo``
+----------------------------
+
+A wrapper around the tuple returned by sys.exc_info() (will call sys.exc_info()
+itself if the tuple is not provided as an argument), provides some handy
+attributes to easily access the traceback and exception string.
+
+Example::
+
+ >>> import sys
+ >>> try:
+ ... foobar()
+ ... except:
+ ... excinfo = py.code.ExceptionInfo()
+ >>> excinfo.typename
+ 'NameError'
+ >>> isinstance(excinfo.traceback, py.code.Traceback)
+ True
+ >>> excinfo.exconly()
+ "NameError: name 'foobar' is not defined"
+
+.. autoclass:: py.code.ExceptionInfo
+ :members:
+
+.. autoclass:: py.code.Traceback
+ :members:
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/conf.py b/testing/web-platform/tests/tools/third_party/py/doc/conf.py
new file mode 100644
index 0000000000..de4cbf8a46
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/conf.py
@@ -0,0 +1,263 @@
+# -*- coding: utf-8 -*-
+#
+# py documentation build configuration file, created by
+# sphinx-quickstart on Thu Oct 21 08:30:10 2010.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.txt'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'py'
+copyright = u'2010, holger krekel et. al.'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+# The full version, including alpha/beta/rc tags.
+import py
+release = py.__version__
+version = ".".join(release.split(".")[:2])
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'py'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'py.tex', u'py Documentation',
+ u'holger krekel et. al.', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'py', u'py Documentation',
+ [u'holger krekel et. al.'], 1)
+]
+
+autodoc_member_order = "bysource"
+autodoc_default_flags = "inherited-members"
+
+# -- Options for Epub output ---------------------------------------------------
+
+# Bibliographic Dublin Core info.
+epub_title = u'py'
+epub_author = u'holger krekel et. al.'
+epub_publisher = u'holger krekel et. al.'
+epub_copyright = u'2010, holger krekel et. al.'
+
+# The language of the text. It defaults to the language option
+# or en if the language is not set.
+#epub_language = ''
+
+# The scheme of the identifier. Typical schemes are ISBN or URL.
+#epub_scheme = ''
+
+# The unique identifier of the text. This can be a ISBN number
+# or the project homepage.
+#epub_identifier = ''
+
+# A unique identification for the text.
+#epub_uid = ''
+
+# HTML files that should be inserted before the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+#epub_pre_files = []
+
+# HTML files shat should be inserted after the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+#epub_post_files = []
+
+# A list of files that should not be packed into the epub file.
+#epub_exclude_files = []
+
+# The depth of the table of contents in toc.ncx.
+#epub_tocdepth = 3
+
+# Allow duplicate toc entries.
+#epub_tocdup = True
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {'http://docs.python.org/': None}
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/download.html b/testing/web-platform/tests/tools/third_party/py/doc/download.html
new file mode 100644
index 0000000000..5f4c466402
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/download.html
@@ -0,0 +1,18 @@
+<html>
+ <head>
+ <meta http-equiv="refresh" content=" 1 ; URL=install.html" />
+ </head>
+
+ <body>
+<script type="text/javascript">
+var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
+document.write(unescape("%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%3E%3C/script%3E"));
+</script>
+<script type="text/javascript">
+try {
+var pageTracker = _gat._getTracker("UA-7597274-3");
+pageTracker._trackPageview();
+} catch(err) {}</script>
+</body>
+</html>
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/example/genhtml.py b/testing/web-platform/tests/tools/third_party/py/doc/example/genhtml.py
new file mode 100644
index 0000000000..7a6d493497
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/example/genhtml.py
@@ -0,0 +1,13 @@
+from py.xml import html
+
+paras = "First Para", "Second para"
+
+doc = html.html(
+ html.head(
+ html.meta(name="Content-Type", value="text/html; charset=latin1")),
+ html.body(
+ [html.p(p) for p in paras]))
+
+print(unicode(doc).encode('latin1'))
+
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/example/genhtmlcss.py b/testing/web-platform/tests/tools/third_party/py/doc/example/genhtmlcss.py
new file mode 100644
index 0000000000..facca77b78
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/example/genhtmlcss.py
@@ -0,0 +1,23 @@
+import py
+html = py.xml.html
+
+class my(html):
+ "a custom style"
+ class body(html.body):
+ style = html.Style(font_size = "120%")
+
+ class h2(html.h2):
+ style = html.Style(background = "grey")
+
+ class p(html.p):
+ style = html.Style(font_weight="bold")
+
+doc = my.html(
+ my.head(),
+ my.body(
+ my.h2("hello world"),
+ my.p("bold as bold can")
+ )
+)
+
+print(doc.unicode(indent=2))
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/example/genxml.py b/testing/web-platform/tests/tools/third_party/py/doc/example/genxml.py
new file mode 100644
index 0000000000..444a4ca52c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/example/genxml.py
@@ -0,0 +1,17 @@
+
+import py
+class ns(py.xml.Namespace):
+ pass
+
+doc = ns.books(
+ ns.book(
+ ns.author("May Day"),
+ ns.title("python for java programmers"),),
+ ns.book(
+ ns.author("why", class_="somecssclass"),
+ ns.title("Java for Python programmers"),),
+ publisher="N.N",
+ )
+print(doc.unicode(indent=2).encode('utf8'))
+
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/faq.txt b/testing/web-platform/tests/tools/third_party/py/doc/faq.txt
new file mode 100644
index 0000000000..6d374e1db9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/faq.txt
@@ -0,0 +1,170 @@
+==================================
+Frequently Asked Questions
+==================================
+
+.. contents::
+ :local:
+ :depth: 2
+
+
+On naming, nosetests, licensing and magic
+===========================================
+
+Why the ``py`` naming? Why not ``pytest``?
+----------------------------------------------------
+
+This mostly has historic reasons - the aim is
+to get away from the somewhat questionable 'py' name
+at some point. These days (2010) the 'py' library
+almost completely comprises APIs that are used
+by the ``py.test`` tool. There also are some
+other uses, e.g. of the ``py.path.local()`` and
+other path implementations. So it requires some
+work to factor them out and do the shift.
+
+Why the ``py.test`` naming?
+------------------------------------
+
+because of TAB-completion under Bash/Shells. If you hit
+``py.<TAB>`` you'll get a list of available development
+tools that all share the ``py.`` prefix. Another motivation
+was to unify the package ("py.test") and tool filename.
+
+What's py.test's relation to ``nosetests``?
+---------------------------------------------
+
+py.test and nose_ share basic philosophy when it comes
+to running Python tests. In fact,
+with py.test-1.1.0 it is ever easier to run many test suites
+that currently work with ``nosetests``. nose_ was created
+as a clone of ``py.test`` when py.test was in the ``0.8`` release
+cycle so some of the newer features_ introduced with py.test-1.0
+and py.test-1.1 have no counterpart in nose_.
+
+.. _nose: https://nose.readthedocs.io/
+.. _features: test/features.html
+.. _apipkg: https://pypi.org/project/apipkg/
+
+
+What's this "magic" with py.test?
+----------------------------------------
+
+issues where people have used the term "magic" in the past:
+
+* `py/__init__.py`_ uses the apipkg_ mechanism for lazy-importing
+ and full control on what API you get when importing "import py".
+
+* when an ``assert`` statement fails, py.test re-interprets the expression
+ to show intermediate values if a test fails. If your expression
+ has side effects the intermediate values may not be the same, obfuscating
+ the initial error (this is also explained at the command line if it happens).
+ ``py.test --no-assert`` turns off assert re-intepretation.
+ Sidenote: it is good practise to avoid asserts with side effects.
+
+
+.. _`py namespaces`: index.html
+.. _`py/__init__.py`: http://bitbucket.org/hpk42/py-trunk/src/trunk/py/__init__.py
+
+Where does my ``py.test`` come/import from?
+----------------------------------------------
+
+You can issue::
+
+ py.test --version
+
+which tells you both version and import location of the tool.
+
+
+function arguments, parametrized tests and setup
+====================================================
+
+.. _funcargs: test/funcargs.html
+
+Is using funcarg- versus xUnit-based setup a style question?
+---------------------------------------------------------------
+
+It depends. For simple applications or for people experienced
+with nose_ or unittest-style test setup using `xUnit style setup`_
+make some sense. For larger test suites, parametrized testing
+or setup of complex test resources using funcargs_ is recommended.
+Moreover, funcargs are ideal for writing advanced test support
+code (like e.g. the monkeypatch_, the tmpdir_ or capture_ funcargs)
+because the support code can register setup/teardown functions
+in a managed class/module/function scope.
+
+.. _monkeypatch: test/plugin/monkeypatch.html
+.. _tmpdir: test/plugin/tmpdir.html
+.. _capture: test/plugin/capture.html
+.. _`xUnit style setup`: test/xunit_setup.html
+.. _`pytest_nose`: test/plugin/nose.html
+
+.. _`why pytest_pyfuncarg__ methods?`:
+
+Why the ``pytest_funcarg__*`` name for funcarg factories?
+---------------------------------------------------------------
+
+When experimenting with funcargs an explicit registration mechanism
+was considered. But lacking a good use case for this indirection and
+flexibility we decided to go for `Convention over Configuration`_ and
+allow to directly specify the factory. Besides removing the need
+for an indirection it allows to "grep" for ``pytest_funcarg__MYARG``
+and will safely find all factory functions for the ``MYARG`` function
+argument. It helps to alleviate the de-coupling of function
+argument usage and creation.
+
+.. _`Convention over Configuration`: https://en.wikipedia.org/wiki/Convention_over_configuration
+
+Can I yield multiple values from a factory function?
+-----------------------------------------------------
+
+There are two conceptual reasons why yielding from a factory function
+is not possible:
+
+* Calling factories for obtaining test function arguments
+ is part of setting up and running a test. At that
+ point it is not possible to add new test calls to
+ the test collection anymore.
+
+* If multiple factories yielded values there would
+ be no natural place to determine the combination
+ policy - in real-world examples some combinations
+ often should not run.
+
+Use the `pytest_generate_tests`_ hook to solve both issues
+and implement the `parametrization scheme of your choice`_.
+
+.. _`pytest_generate_tests`: test/funcargs.html#parametrizing-tests
+.. _`parametrization scheme of your choice`: https://holgerkrekel.net/2009/05/13/parametrizing-python-tests-generalized/
+
+
+py.test interaction with other packages
+===============================================
+
+Issues with py.test, multiprocess and setuptools?
+------------------------------------------------------------
+
+On windows the multiprocess package will instantiate sub processes
+by pickling and thus implicitely re-import a lot of local modules.
+Unfortuantely, setuptools-0.6.11 does not ``if __name__=='__main__'``
+protect its generated command line script. This leads to infinite
+recursion when running a test that instantiates Processes.
+There are these workarounds:
+
+* `install Distribute`_ as a drop-in replacement for setuptools
+ and install py.test
+
+* `directly use a checkout`_ which avoids all setuptools/Distribute
+ installation
+
+If those options are not available to you, you may also manually
+fix the script that is created by setuptools by inserting an
+``if __name__ == '__main__'``. Or you can create a "pytest.py"
+script with this content and invoke that with the python version::
+
+ import py
+ if __name__ == '__main__':
+ py.cmdline.pytest()
+
+.. _`directly use a checkout`: install.html#directly-use-a-checkout
+
+.. _`install distribute`: https://pypi.org/project/distribute/
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/img/pylib.png b/testing/web-platform/tests/tools/third_party/py/doc/img/pylib.png
new file mode 100644
index 0000000000..2e10d43886
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/img/pylib.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/index.txt b/testing/web-platform/tests/tools/third_party/py/doc/index.txt
new file mode 100644
index 0000000000..c700b17e98
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/index.txt
@@ -0,0 +1,39 @@
+.. py documentation master file, created by
+ sphinx-quickstart on Thu Oct 21 08:30:10 2010.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Welcome to py's documentation!
+=================================
+
+see :ref:`CHANGELOG <changelog>` for latest changes.
+
+.. _`pytest distribution`: http://pytest.org
+
+Contents:
+
+.. toctree::
+
+ install
+ path
+ code
+ io
+ log
+ xml
+ misc
+
+ :maxdepth: 2
+
+.. toctree::
+ :hidden:
+
+ announce/release-2.0.0
+ changelog
+ announce/*
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`search`
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/install.txt b/testing/web-platform/tests/tools/third_party/py/doc/install.txt
new file mode 100644
index 0000000000..93c79e3b2d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/install.txt
@@ -0,0 +1,91 @@
+
+.. _`py`:
+.. _`index page`: https://pypi.org/project/py/
+
+installation info in a nutshell
+===================================================
+
+**PyPI name**: py_
+
+**Pythons**: CPython 2.7, 3.5, 3.6, 3.7, PyPy-5.4
+
+**Operating systems**: Linux, Windows, OSX, Unix
+
+**Requirements**: setuptools_ or Distribute_
+
+**Installers**: ``easy_install`` and ``pip``
+
+**Code repository**: https://github.com/pytest-dev/py
+
+easy install or pip ``py``
+-----------------------------
+
+Both `Distribute`_ and setuptools_ provide the ``easy_install``
+installation tool with which you can type into a command line window::
+
+ easy_install -U py
+
+to install the latest release of the py lib. The ``-U`` switch
+will trigger an upgrade if you already have an older version installed.
+
+.. note::
+
+ As of version 1.4 py does not contain py.test anymore - you
+ need to install the new `pytest`_ distribution.
+
+.. _pytest: http://pytest.org
+
+Working from version control or a tarball
+-----------------------------------------------
+
+To follow development or start experiments, checkout the
+complete code and documentation source::
+
+ git clone https://github.com/pytest-dev/py
+
+Development takes place on the 'master' branch.
+
+You can also go to the python package index and
+download and unpack a TAR file::
+
+ https://pypi.org/project/py/
+
+activating a checkout with setuptools
+--------------------------------------------
+
+With a working `Distribute`_ or setuptools_ installation you can type::
+
+ python setup.py develop
+
+in order to work inline with the tools and the lib of your checkout.
+
+.. _`no-setuptools`:
+
+.. _`directly use a checkout`:
+
+.. _`setuptools`: https://pypi.org/project/setuptools/
+
+
+Mailing list and issue tracker
+--------------------------------------
+
+- `py-dev developers list`_ and `commit mailing list`_.
+
+- ``#pytest`` `on irc.libera.chat <ircs://irc.libera.chat:6697/#pytest>`_ IRC
+ channel for random questions (using an IRC client, `via webchat
+ <https://web.libera.chat/#pytest>`_, or `via Matrix
+ <https://matrix.to/#/%23pytest:libera.chat>`_).
+
+- `issue tracker`_ use the issue tracker to report
+ bugs or request features.
+
+.. _`issue tracker`: https://github.com/pytest-dev/py/issues
+
+.. _codespeak: http://codespeak.net/
+.. _`py-dev`:
+.. _`development mailing list`:
+.. _`py-dev developers list`: http://codespeak.net/mailman/listinfo/py-dev
+.. _`py-svn`:
+.. _`commit mailing list`: http://codespeak.net/mailman/listinfo/py-svn
+
+.. include:: links.inc
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/io.txt b/testing/web-platform/tests/tools/third_party/py/doc/io.txt
new file mode 100644
index 0000000000..c11308a6d2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/io.txt
@@ -0,0 +1,59 @@
+=======
+py.io
+=======
+
+
+The 'py' lib provides helper classes for capturing IO during
+execution of a program.
+
+IO Capturing examples
+===============================================
+
+``py.io.StdCapture``
+---------------------------
+
+Basic Example::
+
+ >>> import py
+ >>> capture = py.io.StdCapture()
+ >>> print "hello"
+ >>> out,err = capture.reset()
+ >>> out.strip() == "hello"
+ True
+
+For calling functions you may use a shortcut::
+
+ >>> import py
+ >>> def f(): print "hello"
+ >>> res, out, err = py.io.StdCapture.call(f)
+ >>> out.strip() == "hello"
+ True
+
+``py.io.StdCaptureFD``
+---------------------------
+
+If you also want to capture writes to the stdout/stderr
+filedescriptors you may invoke::
+
+ >>> import py, sys
+ >>> capture = py.io.StdCaptureFD(out=False, in_=False)
+ >>> sys.stderr.write("world")
+ >>> out,err = capture.reset()
+ >>> err
+ 'world'
+
+py.io object reference
+============================
+
+.. autoclass:: py.io.StdCaptureFD
+ :members:
+ :inherited-members:
+
+.. autoclass:: py.io.StdCapture
+ :members:
+ :inherited-members:
+
+.. autoclass:: py.io.TerminalWriter
+ :members:
+ :inherited-members:
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/links.inc b/testing/web-platform/tests/tools/third_party/py/doc/links.inc
new file mode 100644
index 0000000000..b61d01c696
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/links.inc
@@ -0,0 +1,15 @@
+
+.. _`skipping plugin`: plugin/skipping.html
+.. _`funcargs mechanism`: funcargs.html
+.. _`doctest.py`: https://docs.python.org/library/doctest.html
+.. _`xUnit style setup`: xunit_setup.html
+.. _`pytest_nose`: plugin/nose.html
+.. _`reStructured Text`: http://docutils.sourceforge.net
+.. _`Python debugger`: http://docs.python.org/lib/module-pdb.html
+.. _nose: https://nose.readthedocs.io/
+.. _pytest: https://pypi.org/project/pytest/
+.. _`setuptools`: https://pypi.org/project/setuptools/
+.. _`distribute`: https://pypi.org/project/distribute/
+.. _`pip`: https://pypi.org/project/pip/
+.. _`virtualenv`: https://pypi.org/project/virtualenv/
+.. _hudson: http://hudson-ci.org/
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/log.txt b/testing/web-platform/tests/tools/third_party/py/doc/log.txt
new file mode 100644
index 0000000000..ca60fcac25
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/log.txt
@@ -0,0 +1,208 @@
+.. role:: code(literal)
+.. role:: file(literal)
+
+.. XXX figure out how the code literals should be dealt with in sphinx. There is probably something builtin.
+
+========================================
+py.log documentation and musings
+========================================
+
+
+Foreword
+========
+
+This document is an attempt to briefly state the actual specification of the
+:code:`py.log` module. It was written by Francois Pinard and also contains
+some ideas for enhancing the py.log facilities.
+
+NOTE that :code:`py.log` is subject to refactorings, it may change with
+the next release.
+
+This document is meant to trigger or facilitate discussions. It shamelessly
+steals from the `Agile Testing`__ comments, and from other sources as well,
+without really trying to sort them out.
+
+__ http://agiletesting.blogspot.com/2005/06/keyword-based-logging-with-py-library.html
+
+
+Logging organisation
+====================
+
+The :code:`py.log` module aims a niche comparable to the one of the
+`logging module`__ found within the standard Python distributions, yet
+with much simpler paradigms for configuration and usage.
+
+__ http://www.python.org/doc/2.4.2/lib/module-logging.html
+
+Holger Krekel, the main :code:`py` library developer, introduced
+the idea of keyword-based logging and the idea of logging *producers* and
+*consumers*. A log producer is an object used by the application code
+to send messages to various log consumers. When you create a log
+producer, you define a set of keywords that are then used to both route
+the logging messages to consumers, and to prefix those messages.
+
+In fact, each log producer has a few keywords associated with it for
+identification purposes. These keywords form a tuple of strings, and
+may be used to later retrieve a particular log producer.
+
+A log producer may (or may not) be associated with a log consumer, meant
+to handle log messages in particular ways. The log consumers can be
+``STDOUT``, ``STDERR``, log files, syslog, the Windows Event Log, user
+defined functions, etc. (Yet, logging to syslog or to the Windows Event
+Log is only future plans for now). A log producer has never more than
+one consumer at a given time, but it is possible to dynamically switch
+a producer to use another consumer. On the other hand, a single log
+consumer may be associated with many producers.
+
+Note that creating and associating a producer and a consumer is done
+automatically when not otherwise overriden, so using :code:`py` logging
+is quite comfortable even in the smallest programs. More typically,
+the application programmer will likely design a hierarchy of producers,
+and will select keywords appropriately for marking the hierarchy tree.
+If a node of the hierarchical tree of producers has to be divided in
+sub-trees, all producers in the sub-trees share, as a common prefix, the
+keywords of the node being divided. In other words, we go further down
+in the hierarchy of producers merely by adding keywords.
+
+Using the py.log library
+================================
+
+To use the :code:`py.log` library, the user must import it into a Python
+application, create at least one log producer and one log consumer, have
+producers and consumers associated, and finally call the log producers
+as needed, giving them log messages.
+
+Importing
+---------
+
+Once the :code:`py` library is installed on your system, a mere::
+
+ import py
+
+holds enough magic for lazily importing the various facilities of the
+:code:`py` library when they are first needed. This is really how
+:code:`py.log` is made available to the application. For example, after
+the above ``import py``, one may directly write ``py.log.Producer(...)``
+and everything should work fine, the user does not have to worry about
+specifically importing more modules.
+
+Creating a producer
+-------------------
+
+There are three ways for creating a log producer instance:
+
+ + As soon as ``py.log`` is first evaluated within an application
+ program, a default log producer is created, and made available under
+ the name ``py.log.default``. The keyword ``default`` is associated
+ with that producer.
+
+ + The ``py.log.Producer()`` constructor may be explicitly called
+ for creating a new instance of a log producer. That constructor
+ accepts, as an argument, the keywords that should be associated with
+ that producer. Keywords may be given either as a tuple of keyword
+ strings, or as a single space-separated string of keywords.
+
+ + Whenever an attribute is *taken* out of a log producer instance,
+ for the first time that attribute is taken, a new log producer is
+ created. The keywords associated with that new producer are those
+ of the initial producer instance, to which is appended the name of
+ the attribute being taken.
+
+The last point is especially useful, as it allows using log producers
+without further declarations, merely creating them *on-the-fly*.
+
+Creating a consumer
+-------------------
+
+There are many ways for creating or denoting a log consumer:
+
+ + A default consumer exists within the ``py.log`` facilities, which
+ has the effect of writing log messages on the Python standard output
+ stream. That consumer is associated at the very top of the producer
+ hierarchy, and as such, is called whenever no other consumer is
+ found.
+
+ + The notation ``py.log.STDOUT`` accesses a log consumer which writes
+ log messages on the Python standard output stream.
+
+ + The notation ``py.log.STDERR`` accesses a log consumer which writes
+ log messages on the Python standard error stream.
+
+ + The ``py.log.File()`` constructor accepts, as argument, either a file
+ already opened in write mode or any similar file-like object, and
+ creates a log consumer able to write log messages onto that file.
+
+ + The ``py.log.Path()`` constructor accepts a file name for its first
+ argument, and creates a log consumer able to write log messages into
+ that file. The constructor call accepts a few keyword parameters:
+
+ + ``append``, which is ``False`` by default, may be used for
+ opening the file in append mode instead of write mode.
+
+ + ``delayed_create``, which is ``False`` by default, maybe be used
+ for opening the file at the latest possible time. Consequently,
+ the file will not be created if it did not exist, and no actual
+ log message gets written to it.
+
+ + ``buffering``, which is 1 by default, is used when opening the
+ file. Buffering can be turned off by specifying a 0 value. The
+ buffer size may also be selected through this argument.
+
+ + Any user defined function may be used for a log consumer. Such a
+ function should accept a single argument, which is the message to
+ write, and do whatever is deemed appropriate by the programmer.
+ When the need arises, this may be an especially useful and flexible
+ feature.
+
+ + The special value ``None`` means no consumer at all. This acts just
+ like if there was a consumer which would silently discard all log
+ messages sent to it.
+
+Associating producers and consumers
+-----------------------------------
+
+Each log producer may have at most one log consumer associated with
+it. A log producer gets associated with a log consumer through a
+``py.log.setconsumer()`` call. That function accepts two arguments,
+the first identifying a producer (a tuple of keyword strings or a single
+space-separated string of keywords), the second specifying the precise
+consumer to use for that producer. Until this function is called for a
+producer, that producer does not have any explicit consumer associated
+with it.
+
+Now, the hierarchy of log producers establishes which consumer gets used
+whenever a producer has no explicit consumer. When a log producer
+has no consumer explicitly associated with it, it dynamically and
+recursively inherits the consumer of its parent node, that is, that node
+being a bit closer to the root of the hierarchy. In other words, the
+rightmost keywords of that producer are dropped until another producer
+is found which has an explicit consumer. A nice side-effect is that,
+by explicitly associating a consumer with a producer, all consumer-less
+producers which appear under that producer, in the hierarchy tree,
+automatically *inherits* that consumer.
+
+Writing log messages
+--------------------
+
+All log producer instances are also functions, and this is by calling
+them that log messages are generated. Each call to a producer object
+produces the text for one log entry, which in turn, is sent to the log
+consumer for that producer.
+
+The log entry displays, after a prefix identifying the log producer
+being used, all arguments given in the call, converted to strings and
+space-separated. (This is meant by design to be fairly similar to what
+the ``print`` statement does in Python). The prefix itself is made up
+of a colon-separated list of keywords associated with the producer, the
+whole being set within square brackets.
+
+Note that the consumer is responsible for adding the newline at the end
+of the log entry. That final newline is not part of the text for the
+log entry.
+
+.. Other details
+.. -------------
+.. XXX: fill in details
+.. + Should speak about pickle-ability of :code:`py.log`.
+..
+.. + What is :code:`log.get` (in :file:`logger.py`)?
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/misc.txt b/testing/web-platform/tests/tools/third_party/py/doc/misc.txt
new file mode 100644
index 0000000000..4b45348275
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/misc.txt
@@ -0,0 +1,93 @@
+====================================
+Miscellaneous features of the py lib
+====================================
+
+Mapping the standard python library into py
+===========================================
+
+The ``py.std`` object allows lazy access to
+standard library modules. For example, to get to the print-exception
+functionality of the standard library you can write::
+
+ py.std.traceback.print_exc()
+
+without having to do anything else than the usual ``import py``
+at the beginning. You can access any other top-level standard
+library module this way. This means that you will only trigger
+imports of modules that are actually needed. Note that no attempt
+is made to import submodules.
+
+Support for interaction with system utilities/binaries
+======================================================
+
+Currently, the py lib offers two ways to interact with
+system executables. ``py.process.cmdexec()`` invokes
+the shell in order to execute a string. The other
+one, ``py.path.local``'s 'sysexec()' method lets you
+directly execute a binary.
+
+Both approaches will raise an exception in case of a return-
+code other than 0 and otherwise return the stdout-output
+of the child process.
+
+The shell based approach
+------------------------
+
+You can execute a command via your system shell
+by doing something like::
+
+ out = py.process.cmdexec('ls -v')
+
+However, the ``cmdexec`` approach has a few shortcomings:
+
+- it relies on the underlying system shell
+- it neccessitates shell-escaping for expressing arguments
+- it does not easily allow to "fix" the binary you want to run.
+- it only allows to execute executables from the local
+ filesystem
+
+.. _sysexec:
+
+local paths have ``sysexec``
+----------------------------
+
+In order to synchronously execute an executable file you
+can use ``sysexec``::
+
+ binsvn.sysexec('ls', 'http://codespeak.net/svn')
+
+where ``binsvn`` is a path that points to the ``svn`` commandline
+binary. Note that this function does not offer any shell-escaping
+so you have to pass in already separated arguments.
+
+finding an executable local path
+--------------------------------
+
+Finding an executable is quite different on multiple platforms.
+Currently, the ``PATH`` environment variable based search on
+unix platforms is supported::
+
+ py.path.local.sysfind('svn')
+
+which returns the first path whose ``basename`` matches ``svn``.
+In principle, `sysfind` deploys platform specific algorithms
+to perform the search. On Windows, for example, it may look
+at the registry (XXX).
+
+To make the story complete, we allow to pass in a second ``checker``
+argument that is called for each found executable. For example, if
+you have multiple binaries available you may want to select the
+right version::
+
+ def mysvn(p):
+ """ check that the given svn binary has version 1.1. """
+ line = p.execute('--version'').readlines()[0]
+ if line.find('version 1.1'):
+ return p
+ binsvn = py.path.local.sysfind('svn', checker=mysvn)
+
+
+Cross-Python Version compatibility helpers
+=============================================
+
+The ``py.builtin`` namespace provides a number of helpers that help to write python code compatible across Python interpreters, mainly Python2 and Python3. Type ``help(py.builtin)`` on a Python prompt for the selection of builtins.
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/path.txt b/testing/web-platform/tests/tools/third_party/py/doc/path.txt
new file mode 100644
index 0000000000..8f506d4923
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/path.txt
@@ -0,0 +1,264 @@
+=======
+py.path
+=======
+
+ **Note**: The 'py' library is in "maintenance mode" and so is not
+ recommended for new projects. Please check out
+ `pathlib <https://docs.python.org/3/library/pathlib.html>`_ or
+ `pathlib2 <https://pypi.org/project/pathlib2/>`_ for path
+ operations.
+
+The 'py' lib provides a uniform high-level api to deal with filesystems
+and filesystem-like interfaces: ``py.path``. It aims to offer a central
+object to fs-like object trees (reading from and writing to files, adding
+files/directories, examining the types and structure, etc.), and out-of-the-box
+provides a number of implementations of this API.
+
+py.path.local - local file system path
+===============================================
+
+.. _`local`:
+
+basic interactive example
+-------------------------------------
+
+The first and most obvious of the implementations is a wrapper around a local
+filesystem. It's just a bit nicer in usage than the regular Python APIs, and
+of course all the functionality is bundled together rather than spread over a
+number of modules.
+
+
+.. sourcecode:: pycon
+
+ >>> import py
+ >>> temppath = py.path.local('py.path_documentation')
+ >>> foopath = temppath.join('foo') # get child 'foo' (lazily)
+ >>> foopath.check() # check if child 'foo' exists
+ False
+ >>> foopath.write('bar') # write some data to it
+ >>> foopath.check()
+ True
+ >>> foopath.read()
+ 'bar'
+ >>> foofile = foopath.open() # return a 'real' file object
+ >>> foofile.read(1)
+ 'b'
+
+reference documentation
+---------------------------------
+
+.. autoclass:: py._path.local.LocalPath
+ :members:
+ :inherited-members:
+
+``py.path.svnurl`` and ``py.path.svnwc``
+==================================================
+
+Two other ``py.path`` implementations that the py lib provides wrap the
+popular `Subversion`_ revision control system: the first (called 'svnurl')
+by interfacing with a remote server, the second by wrapping a local checkout.
+Both allow you to access relatively advanced features such as metadata and
+versioning, and both in a way more user-friendly manner than existing other
+solutions.
+
+Some example usage of ``py.path.svnurl``:
+
+.. sourcecode:: pycon
+
+ .. >>> import py
+ .. >>> if not py.test.config.option.urlcheck: raise ValueError('skipchunk')
+ >>> url = py.path.svnurl('http://codespeak.net/svn/py')
+ >>> info = url.info()
+ >>> info.kind
+ 'dir'
+ >>> firstentry = url.log()[-1]
+ >>> import time
+ >>> time.strftime('%Y-%m-%d', time.gmtime(firstentry.date))
+ '2004-10-02'
+
+Example usage of ``py.path.svnwc``:
+
+.. sourcecode:: pycon
+
+ .. >>> if not py.test.config.option.urlcheck: raise ValueError('skipchunk')
+ >>> temp = py.path.local('py.path_documentation')
+ >>> wc = py.path.svnwc(temp.join('svnwc'))
+ >>> wc.checkout('http://codespeak.net/svn/py/dist/py/path/local')
+ >>> wc.join('local.py').check()
+ True
+
+.. _`Subversion`: http://subversion.tigris.org/
+
+svn path related API reference
+-----------------------------------------
+
+.. autoclass:: py._path.svnwc.SvnWCCommandPath
+ :members:
+ :inherited-members:
+
+.. autoclass:: py._path.svnurl.SvnCommandPath
+ :members:
+ :inherited-members:
+
+.. autoclass:: py._path.svnwc.SvnAuth
+ :members:
+ :inherited-members:
+
+Common vs. specific API, Examples
+========================================
+
+All Path objects support a common set of operations, suitable
+for many use cases and allowing to transparently switch the
+path object within an application (e.g. from "local" to "svnwc").
+The common set includes functions such as `path.read()` to read all data
+from a file, `path.write()` to write data, `path.listdir()` to get a list
+of directory entries, `path.check()` to check if a node exists
+and is of a particular type, `path.join()` to get
+to a (grand)child, `path.visit()` to recursively walk through a node's
+children, etc. Only things that are not common on 'normal' filesystems (yet),
+such as handling metadata (e.g. the Subversion "properties") require
+using specific APIs.
+
+A quick 'cookbook' of small examples that will be useful 'in real life',
+which also presents parts of the 'common' API, and shows some non-common
+methods:
+
+Searching `.txt` files
+--------------------------------
+
+Search for a particular string inside all files with a .txt extension in a
+specific directory.
+
+.. sourcecode:: pycon
+
+ >>> dirpath = temppath.ensure('testdir', dir=True)
+ >>> dirpath.join('textfile1.txt').write('foo bar baz')
+ >>> dirpath.join('textfile2.txt').write('frob bar spam eggs')
+ >>> subdir = dirpath.ensure('subdir', dir=True)
+ >>> subdir.join('textfile1.txt').write('foo baz')
+ >>> subdir.join('textfile2.txt').write('spam eggs spam foo bar spam')
+ >>> results = []
+ >>> for fpath in dirpath.visit('*.txt'):
+ ... if 'bar' in fpath.read():
+ ... results.append(fpath.basename)
+ >>> results.sort()
+ >>> results
+ ['textfile1.txt', 'textfile2.txt', 'textfile2.txt']
+
+Working with Paths
+----------------------------
+
+This example shows the ``py.path`` features to deal with
+filesystem paths Note that the filesystem is never touched,
+all operations are performed on a string level (so the paths
+don't have to exist, either):
+
+.. sourcecode:: pycon
+
+ >>> p1 = py.path.local('/foo/bar')
+ >>> p2 = p1.join('baz/qux')
+ >>> p2 == py.path.local('/foo/bar/baz/qux')
+ True
+ >>> sep = py.path.local.sep
+ >>> p2.relto(p1).replace(sep, '/') # os-specific path sep in the string
+ 'baz/qux'
+ >>> p2.bestrelpath(p1).replace(sep, '/')
+ '../..'
+ >>> p2.join(p2.bestrelpath(p1)) == p1
+ True
+ >>> p3 = p1 / 'baz/qux' # the / operator allows joining, too
+ >>> p2 == p3
+ True
+ >>> p4 = p1 + ".py"
+ >>> p4.basename == "bar.py"
+ True
+ >>> p4.ext == ".py"
+ True
+ >>> p4.purebasename == "bar"
+ True
+
+This should be possible on every implementation of ``py.path``, so
+regardless of whether the implementation wraps a UNIX filesystem, a Windows
+one, or a database or object tree, these functions should be available (each
+with their own notion of path seperators and dealing with conversions, etc.).
+
+Checking path types
+-------------------------------
+
+Now we will show a bit about the powerful 'check()' method on paths, which
+allows you to check whether a file exists, what type it is, etc.:
+
+.. sourcecode:: pycon
+
+ >>> file1 = temppath.join('file1')
+ >>> file1.check() # does it exist?
+ False
+ >>> file1 = file1.ensure(file=True) # 'touch' the file
+ >>> file1.check()
+ True
+ >>> file1.check(dir=True) # is it a dir?
+ False
+ >>> file1.check(file=True) # or a file?
+ True
+ >>> file1.check(ext='.txt') # check the extension
+ False
+ >>> textfile = temppath.ensure('text.txt', file=True)
+ >>> textfile.check(ext='.txt')
+ True
+ >>> file1.check(basename='file1') # we can use all the path's properties here
+ True
+
+Setting svn-properties
+--------------------------------
+
+As an example of 'uncommon' methods, we'll show how to read and write
+properties in an ``py.path.svnwc`` instance:
+
+.. sourcecode:: pycon
+
+ .. >>> if not py.test.config.option.urlcheck: raise ValueError('skipchunk')
+ >>> wc.propget('foo')
+ ''
+ >>> wc.propset('foo', 'bar')
+ >>> wc.propget('foo')
+ 'bar'
+ >>> len(wc.status().prop_modified) # our own props
+ 1
+ >>> msg = wc.revert() # roll back our changes
+ >>> len(wc.status().prop_modified)
+ 0
+
+SVN authentication
+----------------------------
+
+Some uncommon functionality can also be provided as extensions, such as SVN
+authentication:
+
+.. sourcecode:: pycon
+
+ .. >>> if not py.test.config.option.urlcheck: raise ValueError('skipchunk')
+ >>> auth = py.path.SvnAuth('anonymous', 'user', cache_auth=False,
+ ... interactive=False)
+ >>> wc.auth = auth
+ >>> wc.update() # this should work
+ >>> path = wc.ensure('thisshouldnotexist.txt')
+ >>> try:
+ ... path.commit('testing')
+ ... except py.process.cmdexec.Error, e:
+ ... pass
+ >>> 'authorization failed' in str(e)
+ True
+
+Known problems / limitations
+===================================
+
+* The SVN path objects require the "svn" command line,
+ there is currently no support for python bindings.
+ Parsing the svn output can lead to problems, particularly
+ regarding if you have a non-english "locales" setting.
+
+* While the path objects basically work on windows,
+ there is no attention yet on making unicode paths
+ work or deal with the famous "8.3" filename issues.
+
+
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/style.css b/testing/web-platform/tests/tools/third_party/py/doc/style.css
new file mode 100644
index 0000000000..95e3ef07b2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/style.css
@@ -0,0 +1,1044 @@
+body,body.editor,body.body {
+ font: 110% "Times New Roman", Arial, Verdana, Helvetica, serif;
+ background: White;
+ color: Black;
+}
+
+a, a.reference {
+ text-decoration: none;
+}
+a[href]:hover { text-decoration: underline; }
+
+img {
+ border: none;
+ vertical-align: middle;
+}
+
+p, div.text {
+ text-align: left;
+ line-height: 1.5em;
+ margin: 0.5em 0em 0em 0em;
+}
+
+
+
+p a:active {
+ color: Red;
+ background-color: transparent;
+}
+
+p img {
+ border: 0;
+ margin: 0;
+}
+
+img.inlinephoto {
+ padding: 0;
+ padding-right: 1em;
+ padding-top: 0.7em;
+ float: left;
+}
+
+hr {
+ clear: both;
+ height: 1px;
+ color: #8CACBB;
+ background-color: transparent;
+}
+
+
+ul {
+ line-height: 1.5em;
+ /*list-style-image: url("bullet.gif"); */
+ margin-left: 1.5em;
+ padding:0;
+}
+
+ol {
+ line-height: 1.5em;
+ margin-left: 1.5em;
+ padding:0;
+}
+
+ul a, ol a {
+ text-decoration: underline;
+}
+
+dl {
+}
+
+dd {
+ line-height: 1.5em;
+ margin-bottom: 1em;
+}
+
+blockquote {
+ font-family: Times, "Times New Roman", serif;
+ font-style: italic;
+ font-size: 120%;
+}
+
+code {
+ color: Black;
+ /*background-color: #dee7ec;*/
+ /*background-color: #cccccc;*/
+}
+
+pre {
+ padding: 1em;
+ border: 1px dotted #8cacbb;
+ color: Black;
+ /*
+ background-color: #dee7ec;
+ background-color: #cccccc;
+ background-color: #dee7ec;
+ */
+ overflow: auto;
+}
+
+
+.netscape4 {
+ display: none;
+}
+
+/* main page styles */
+
+/*a[href]:hover { color: black; text-decoration: underline; }
+a[href]:link { color: black; text-decoration: underline; }
+a[href] { color: black; text-decoration: underline; }
+*/
+
+span.menu_selected {
+ color: black;
+ text-decoration: none;
+ padding-right: 0.3em;
+ background-color: #cccccc;
+}
+
+
+a.menu {
+ /*color: #3ba6ec; */
+ font: 120% Verdana, Helvetica, Arial, sans-serif;
+ text-decoration: none;
+ padding-right: 0.3em;
+}
+
+a.menu[href]:visited, a.menu[href]:link{
+ /*color: #3ba6ec; */
+ text-decoration: none;
+}
+
+a.menu[href]:hover {
+ /*color: black;*/
+}
+
+div#pagetitle{
+ /*border-spacing: 20px;*/
+ font: 160% Verdana, Helvetica, Arial, sans-serif;
+ color: #3ba6ec;
+ vertical-align: middle;
+ left: 80 px;
+ padding-bottom: 0.3em;
+}
+
+a.wikicurrent {
+ font: 100% Verdana, Helvetica, Arial, sans-serif;
+ color: #3ba6ec;
+ vertical-align: middle;
+}
+
+
+table.body {
+ border: 0;
+ /*padding: 0;
+ border-spacing: 0px;
+ border-collapse: separate;
+ */
+}
+
+td.page-header-left {
+ padding: 5px;
+ /*border-bottom: 1px solid #444444;*/
+}
+
+td.page-header-top {
+ padding: 0;
+
+ /*border-bottom: 1px solid #444444;*/
+}
+
+td.sidebar {
+ padding: 1 0 0 1;
+}
+
+td.sidebar p.classblock {
+ padding: 0 5 0 5;
+ margin: 1 1 1 1;
+ border: 1px solid #444444;
+ background-color: #eeeeee;
+}
+
+td.sidebar p.userblock {
+ padding: 0 5 0 5;
+ margin: 1 1 1 1;
+ border: 1px solid #444444;
+ background-color: #eeeeff;
+}
+
+td.content {
+ padding: 1 5 1 5;
+ vertical-align: top;
+ width: 100%;
+}
+
+p.ok-message {
+ background-color: #22bb22;
+ padding: 5 5 5 5;
+ color: white;
+ font-weight: bold;
+}
+p.error-message {
+ background-color: #bb2222;
+ padding: 5 5 5 5;
+ color: white;
+ font-weight: bold;
+}
+
+p:first-child {
+ margin: 0 ;
+ padding: 0;
+}
+
+/* style for forms */
+table.form {
+ padding: 2;
+ border-spacing: 0px;
+ border-collapse: separate;
+}
+
+table.form th {
+ color: #333388;
+ text-align: right;
+ vertical-align: top;
+ font-weight: normal;
+}
+table.form th.header {
+ font-weight: bold;
+ background-color: #eeeeff;
+ text-align: left;
+}
+
+table.form th.required {
+ font-weight: bold;
+}
+
+table.form td {
+ color: #333333;
+ empty-cells: show;
+ vertical-align: top;
+}
+
+table.form td.optional {
+ font-weight: bold;
+ font-style: italic;
+}
+
+table.form td.html {
+ color: #777777;
+}
+
+/* style for lists */
+table.list {
+ border-spacing: 0px;
+ border-collapse: separate;
+ vertical-align: top;
+ padding-top: 0;
+ width: 100%;
+}
+
+table.list th {
+ padding: 0 4 0 4;
+ color: #404070;
+ background-color: #eeeeff;
+ border-right: 1px solid #404070;
+ border-top: 1px solid #404070;
+ border-bottom: 1px solid #404070;
+ vertical-align: top;
+ empty-cells: show;
+}
+table.list th a[href]:hover { color: #404070 }
+table.list th a[href]:link { color: #404070 }
+table.list th a[href] { color: #404070 }
+table.list th.group {
+ background-color: #f4f4ff;
+ text-align: center;
+ font-size: 120%;
+}
+
+table.list td {
+ padding: 0 4 0 4;
+ border: 0 2 0 2;
+ border-right: 1px solid #404070;
+ color: #404070;
+ background-color: white;
+ vertical-align: top;
+ empty-cells: show;
+}
+
+table.list tr.normal td {
+ background-color: white;
+ white-space: nowrap;
+}
+
+table.list tr.alt td {
+ background-color: #efefef;
+ white-space: nowrap;
+}
+
+table.list td:first-child {
+ border-left: 1px solid #404070;
+ border-right: 1px solid #404070;
+}
+
+table.list th:first-child {
+ border-left: 1px solid #404070;
+ border-right: 1px solid #404070;
+}
+
+table.list tr.navigation th {
+ text-align: right;
+}
+table.list tr.navigation th:first-child {
+ border-right: none;
+ text-align: left;
+}
+
+
+/* style for message displays */
+table.messages {
+ border-spacing: 0px;
+ border-collapse: separate;
+ width: 100%;
+}
+
+table.messages th.header{
+ padding-top: 10px;
+ border-bottom: 1px solid gray;
+ font-weight: bold;
+ background-color: white;
+ color: #707040;
+}
+
+table.messages th {
+ font-weight: bold;
+ color: black;
+ text-align: left;
+ border-bottom: 1px solid #afafaf;
+}
+
+table.messages td {
+ font-family: monospace;
+ background-color: #efefef;
+ border-bottom: 1px solid #afafaf;
+ color: black;
+ empty-cells: show;
+ border-right: 1px solid #afafaf;
+ vertical-align: top;
+ padding: 2 5 2 5;
+}
+
+table.messages td:first-child {
+ border-left: 1px solid #afafaf;
+ border-right: 1px solid #afafaf;
+}
+
+/* style for file displays */
+table.files {
+ border-spacing: 0px;
+ border-collapse: separate;
+ width: 100%;
+}
+
+table.files th.header{
+ padding-top: 10px;
+ border-bottom: 1px solid gray;
+ font-weight: bold;
+ background-color: white;
+ color: #707040;
+}
+
+table.files th {
+ border-bottom: 1px solid #afafaf;
+ font-weight: bold;
+ text-align: left;
+}
+
+table.files td {
+ font-family: monospace;
+ empty-cells: show;
+}
+
+/* style for history displays */
+table.history {
+ border-spacing: 0px;
+ border-collapse: separate;
+ width: 100%;
+}
+
+table.history th.header{
+ padding-top: 10px;
+ border-bottom: 1px solid gray;
+ font-weight: bold;
+ background-color: white;
+ color: #707040;
+ font-size: 100%;
+}
+
+table.history th {
+ border-bottom: 1px solid #afafaf;
+ font-weight: bold;
+ text-align: left;
+ font-size: 90%;
+}
+
+table.history td {
+ font-size: 90%;
+ vertical-align: top;
+ empty-cells: show;
+}
+
+
+/* style for class list */
+table.classlist {
+ border-spacing: 0px;
+ border-collapse: separate;
+ width: 100%;
+}
+
+table.classlist th.header{
+ padding-top: 10px;
+ border-bottom: 1px solid gray;
+ font-weight: bold;
+ background-color: white;
+ color: #707040;
+}
+
+table.classlist th {
+ font-weight: bold;
+ text-align: left;
+}
+
+
+/* style for class help display */
+table.classhelp {
+ border-spacing: 0px;
+ border-collapse: separate;
+ width: 100%;
+}
+
+table.classhelp th {
+ font-weight: bold;
+ text-align: left;
+ color: #707040;
+}
+
+table.classhelp td {
+ padding: 2 2 2 2;
+ border: 1px solid black;
+ text-align: left;
+ vertical-align: top;
+ empty-cells: show;
+}
+
+
+/* style for "other" displays */
+table.otherinfo {
+ border-spacing: 0px;
+ border-collapse: separate;
+ width: 100%;
+}
+
+table.otherinfo th.header{
+ padding-top: 10px;
+ border-bottom: 1px solid gray;
+ font-weight: bold;
+ background-color: white;
+ color: #707040;
+}
+
+table.otherinfo th {
+ border-bottom: 1px solid #afafaf;
+ font-weight: bold;
+ text-align: left;
+}
+
+input {
+ border: 1px solid #8cacbb;
+ color: Black;
+ background-color: white;
+ vertical-align: middle;
+ margin-bottom: 1px; /* IE bug fix */
+ padding: 0.1em;
+}
+
+select {
+ border: 1px solid #8cacbb;
+ color: Black;
+ background-color: white;
+ vertical-align: middle;
+ margin-bottom: 1px; /* IE bug fix */
+ padding: 0.1em;
+}
+
+
+a.nonexistent {
+ color: #FF2222;
+}
+a.nonexistent:visited {
+ color: #FF2222;
+}
+a.external {
+ color: #AA6600;
+}
+
+/*
+dl,ul,ol {
+ margin-top: 1pt;
+}
+tt,pre {
+ font-family: Lucida Console,Courier New,Courier,monotype;
+ font-size: 12pt;
+}
+pre.code {
+ margin-top: 8pt;
+ margin-bottom: 8pt;
+ background-color: #FFFFEE;
+ white-space:pre;
+ border-style:solid;
+ border-width:1pt;
+ border-color:#999999;
+ color:#111111;
+ padding:5px;
+ width:100%;
+}
+*/
+div.diffold {
+ background-color: #FFFF80;
+ border-style:none;
+ border-width:thin;
+ width:100%;
+}
+div.diffnew {
+ background-color: #80FF80;
+ border-style:none;
+ border-width:thin;
+ width:100%;
+}
+div.message {
+ margin-top: 6pt;
+ background-color: #E8FFE8;
+ border-style:solid;
+ border-width:1pt;
+ border-color:#999999;
+ color:#440000;
+ padding:5px;
+ width:100%;
+}
+strong.highlight {
+ background-color: #FFBBBB;
+/* as usual, NetScape breaks with innocent CSS
+ border-color: #FFAAAA;
+ border-style: solid;
+ border-width: 1pt;
+*/
+}
+
+table.navibar {
+ background-color: #C8C8C8;
+ border-spacing: 3px;
+}
+td.navibar {
+ background-color: #E8E8E8;
+ vertical-align: top;
+ text-align: right;
+ padding: 0px;
+}
+
+a#versioninfo {
+ color: blue;
+}
+
+div#pagename {
+ font-size: 140%;
+ color: blue;
+ text-align: center;
+ font-weight: bold;
+ background-color: white;
+ padding: 0 ;
+}
+
+a.wikiaction, input.wikiaction {
+ color: black;
+ text-decoration: None;
+ text-align: center;
+ color: black;
+ /*border: 1px solid #3ba6ec; */
+ margin: 4px;
+ padding: 5;
+ padding-bottom: 0;
+ white-space: nowrap;
+}
+
+a.wikiaction[href]:hover {
+ color: black;
+ text-decoration: none;
+ /*background-color: #dddddd; */
+}
+
+
+div.legenditem {
+ padding-top: 0.5em;
+ padding-left: 0.3em;
+}
+
+span.wikitoken {
+ background-color: #eeeeee;
+}
+
+
+div#contentspace h1:first-child, div.heading:first-child {
+ padding-top: 0;
+ margin-top: 0;
+}
+div#contentspace h2:first-child {
+ padding-top: 0;
+ margin-top: 0;
+}
+
+/* heading and paragraph text */
+
+div.heading, h1 {
+ font-family: Verdana, Helvetica, Arial, sans-serif;
+ background-color: #58b3ef;
+ background-color: #FFFFFF;
+ /*color: #4893cf;*/
+ color: black;
+ padding-top: 1.0em;
+ padding-bottom:0.2em;
+ text-align: left;
+ margin-top: 0em;
+ /*margin-bottom:8pt;*/
+ font-weight: bold;
+ font-size: 115%;
+ border-bottom: 1px solid #8CACBB;
+}
+
+h2 {
+ border-bottom: 1px dotted #8CACBB;
+}
+
+
+h1, h2, h3, h4, h5, h6 {
+ color: Black;
+ clear: left;
+ font: 100% Verdana, Helvetica, Arial, sans-serif;
+ margin: 0;
+ padding-left: 0em;
+ padding-top: 1em;
+ padding-bottom: 0.2em;
+ /*border-bottom: 1px solid #8CACBB;*/
+}
+/* h1,h2 { padding-top: 0; }*/
+
+
+h1 { font-size: 145%; }
+h2 { font-size: 115%; }
+h3 { font-size: 105%; }
+h4 { font-size: 100%; }
+h5 { font-size: 100%; }
+
+h1 a { text-decoration: None;}
+
+div.exception {
+ background-color: #bb2222;
+ padding: 5 5 5 5;
+ color: white;
+ font-weight: bold;
+}
+pre.exception {
+ font-size: 110%;
+ padding: 1em;
+ border: 1px solid #8cacbb;
+ color: Black;
+ background-color: #dee7ec;
+ background-color: #cccccc;
+}
+
+/* defines for navgiation bar (documentation) */
+
+
+div.direntry {
+ padding-top: 0.3em;
+ padding-bottom: 0.3em;
+ margin-right: 1em;
+ font-weight: bold;
+ background-color: #dee7ec;
+ font-size: 110%;
+}
+
+div.fileentry {
+ font-family: Verdana, Helvetica, Arial, sans-serif;
+ padding-bottom: 0.3em;
+ white-space: nowrap;
+ line-height: 150%;
+}
+
+a.fileentry {
+ white-space: nowrap;
+}
+
+
+span.left {
+ text-align: left;
+}
+span.right {
+ text-align: right;
+}
+
+div.navbar {
+ /*margin: 0;*/
+ font-size: 80% /*smaller*/;
+ font-weight: bold;
+ text-align: left;
+ /* position: fixed; */
+ top: 100pt;
+ left: 0pt; /* auto; */
+ width: 120pt;
+ /* right: auto;
+ right: 0pt; 2em; */
+}
+
+
+div.history a {
+ /* font-size: 70%; */
+}
+
+div.wikiactiontitle {
+ font-weight: bold;
+}
+
+/* REST defines */
+
+div.document {
+ margin: 0;
+}
+
+h1.title {
+ margin: 0;
+ margin-bottom: 0.5em;
+}
+
+td.toplist {
+ vertical-align: top;
+}
+
+img#pyimg {
+ float: left;
+ padding-bottom: 1em;
+}
+
+div#navspace {
+ position: absolute;
+ font-size: 100%;
+ width: 150px;
+ overflow: hidden; /* scroll; */
+}
+
+
+div#errorline {
+ position: relative;
+ top: 5px;
+ float: right;
+}
+
+div#contentspace {
+ position: absolute;
+ /* font: 120% "Times New Roman", serif;*/
+ font: 110% Verdana, Helvetica, Arial, sans-serif;
+ left: 170px;
+ margin-right: 5px;
+}
+
+div#menubar {
+/* width: 400px; */
+ float: left;
+}
+
+/* for the documentation page */
+div#title{
+
+ font-size: 110%;
+ color: black;
+
+
+ /*background-color: #dee7ec;
+ #padding: 5pt;
+ #padding-bottom: 1em;
+ #color: black;
+ border-width: 1pt;
+ border-style: solid;*/
+
+}
+
+div#docnavlist {
+ /*background-color: #dee7ec; */
+ padding: 5pt;
+ padding-bottom: 2em;
+ color: black;
+ border-width: 1pt;
+ /*border-style: solid;*/
+}
+
+
+/* text markup */
+
+div.listtitle {
+ color: Black;
+ clear: left;
+ font: 120% Verdana, Helvetica, Arial, sans-serif;
+ margin: 0;
+ padding-left: 0em;
+ padding-top: 0em;
+ padding-bottom: 0.2em;
+ margin-right: 0.5em;
+ border-bottom: 1px solid #8CACBB;
+}
+
+div.actionbox h3 {
+ padding-top: 0;
+ padding-right: 0.5em;
+ padding-left: 0.5em;
+ background-color: #fabf00;
+ text-align: center;
+ border: 1px solid black; /* 8cacbb; */
+}
+
+div.actionbox a {
+ display: block;
+ padding-bottom: 0.5em;
+ padding-top: 0.5em;
+ margin-left: 0.5em;
+}
+
+div.actionbox a.history {
+ display: block;
+ padding-bottom: 0.5em;
+ padding-top: 0.5em;
+ margin-left: 0.5em;
+ font-size: 90%;
+}
+
+div.actionbox {
+ margin-bottom: 2em;
+ padding-bottom: 1em;
+ overflow: hidden; /* scroll; */
+}
+
+/* taken from docutils (oh dear, a bit senseless) */
+ol.simple, ul.simple {
+ margin-bottom: 1em }
+
+ol.arabic {
+ list-style: decimal }
+
+ol.loweralpha {
+ list-style: lower-alpha }
+
+ol.upperalpha {
+ list-style: upper-alpha }
+
+ol.lowerroman {
+ list-style: lower-roman }
+
+ol.upperroman {
+ list-style: upper-roman }
+
+
+/*
+:Author: David Goodger
+:Contact: goodger@users.sourceforge.net
+:date: $Date: 2003/01/22 22:26:48 $
+:version: $Revision: 1.29 $
+:copyright: This stylesheet has been placed in the public domain.
+
+Default cascading style sheet for the HTML output of Docutils.
+*/
+/*
+.first {
+ margin-top: 0 }
+
+.last {
+ margin-bottom: 0 }
+
+a.toc-backref {
+ text-decoration: none ;
+ color: black }
+
+dd {
+ margin-bottom: 0.5em }
+
+div.abstract {
+ margin: 2em 5em }
+
+div.abstract p.topic-title {
+ font-weight: bold ;
+ text-align: center }
+
+div.attention, div.caution, div.danger, div.error, div.hint,
+div.important, div.note, div.tip, div.warning {
+ margin: 2em ;
+ border: medium outset ;
+ padding: 1em }
+
+div.attention p.admonition-title, div.caution p.admonition-title,
+div.danger p.admonition-title, div.error p.admonition-title,
+div.warning p.admonition-title {
+ color: red ;
+ font-weight: bold ;
+ font-family: sans-serif }
+
+div.hint p.admonition-title, div.important p.admonition-title,
+div.note p.admonition-title, div.tip p.admonition-title {
+ font-weight: bold ;
+ font-family: sans-serif }
+
+div.dedication {
+ margin: 2em 5em ;
+ text-align: center ;
+ font-style: italic }
+
+div.dedication p.topic-title {
+ font-weight: bold ;
+ font-style: normal }
+
+div.figure {
+ margin-left: 2em }
+
+div.footer, div.header {
+ font-size: smaller }
+
+div.system-messages {
+ margin: 5em }
+
+div.system-messages h1 {
+ color: red }
+
+div.system-message {
+ border: medium outset ;
+ padding: 1em }
+
+div.system-message p.system-message-title {
+ color: red ;
+ font-weight: bold }
+
+div.topic {
+ margin: 2em }
+
+h1.title {
+ text-align: center }
+
+h2.subtitle {
+ text-align: center }
+
+hr {
+ width: 75% }
+
+p.caption {
+ font-style: italic }
+
+p.credits {
+ font-style: italic ;
+ font-size: smaller }
+
+p.label {
+ white-space: nowrap }
+
+p.topic-title {
+ font-weight: bold }
+
+pre.address {
+ margin-bottom: 0 ;
+ margin-top: 0 ;
+ font-family: serif ;
+ font-size: 100% }
+
+pre.line-block {
+ font-family: serif ;
+ font-size: 100% }
+
+pre.literal-block, pre.doctest-block {
+ margin-left: 2em ;
+ margin-right: 2em ;
+ background-color: #eeeeee }
+
+span.classifier {
+ font-family: sans-serif ;
+ font-style: oblique }
+
+span.classifier-delimiter {
+ font-family: sans-serif ;
+ font-weight: bold }
+
+span.interpreted {
+ font-family: sans-serif }
+
+span.option {
+ white-space: nowrap }
+
+span.option-argument {
+ font-style: italic }
+
+span.pre {
+ white-space: pre }
+
+span.problematic {
+ color: red }
+
+table {
+ margin-top: 0.5em ;
+ margin-bottom: 0.5em }
+
+table.citation {
+ border-left: solid thin gray ;
+ padding-left: 0.5ex }
+
+table.docinfo {
+ margin: 2em 4em }
+
+table.footnote {
+ border-left: solid thin black ;
+ padding-left: 0.5ex }
+
+td, th {
+ padding-left: 0.5em ;
+ padding-right: 0.5em ;
+ vertical-align: top }
+
+th.docinfo-name, th.field-name {
+ font-weight: bold ;
+ text-align: left ;
+ white-space: nowrap }
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+ font-size: 100% }
+
+tt {
+ background-color: #eeeeee }
+
+ul.auto-toc {
+ list-style-type: none }
+*/
+
+div.section {
+ margin-top: 1.0em ;
+}
diff --git a/testing/web-platform/tests/tools/third_party/py/doc/xml.txt b/testing/web-platform/tests/tools/third_party/py/doc/xml.txt
new file mode 100644
index 0000000000..1022de6e91
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/doc/xml.txt
@@ -0,0 +1,164 @@
+====================================================
+py.xml: simple pythonic xml/html file generation
+====================================================
+
+Motivation
+==========
+
+There are a plethora of frameworks and libraries to generate
+xml and html trees. However, many of them are large, have a
+steep learning curve and are often hard to debug. Not to
+speak of the fact that they are frameworks to begin with.
+
+.. _xist: http://www.livinglogic.de/Python/xist/index.html
+
+a pythonic object model , please
+================================
+
+The py lib offers a pythonic way to generate xml/html, based on
+ideas from xist_ which `uses python class objects`_ to build
+xml trees. However, xist_'s implementation is somewhat heavy
+because it has additional goals like transformations and
+supporting many namespaces. But its basic idea is very easy.
+
+.. _`uses python class objects`: http://www.livinglogic.de/Python/xist/Howto.html
+
+generating arbitrary xml structures
+-----------------------------------
+
+With ``py.xml.Namespace`` you have the basis
+to generate custom xml-fragments on the fly::
+
+ class ns(py.xml.Namespace):
+ "my custom xml namespace"
+ doc = ns.books(
+ ns.book(
+ ns.author("May Day"),
+ ns.title("python for java programmers"),),
+ ns.book(
+ ns.author("why"),
+ ns.title("Java for Python programmers"),),
+ publisher="N.N",
+ )
+ print doc.unicode(indent=2).encode('utf8')
+
+will give you this representation::
+
+ <books publisher="N.N">
+ <book>
+ <author>May Day</author>
+ <title>python for java programmers</title></book>
+ <book>
+ <author>why</author>
+ <title>Java for Python programmers</title></book></books>
+
+In a sentence: positional arguments are child-tags and
+keyword-arguments are attributes.
+
+On a side note, you'll see that the unicode-serializer
+supports a nice indentation style which keeps your generated
+html readable, basically through emulating python's white
+space significance by putting closing-tags rightmost and
+almost invisible at first glance :-)
+
+basic example for generating html
+---------------------------------
+
+Consider this example::
+
+ from py.xml import html # html namespace
+
+ paras = "First Para", "Second para"
+
+ doc = html.html(
+ html.head(
+ html.meta(name="Content-Type", value="text/html; charset=latin1")),
+ html.body(
+ [html.p(p) for p in paras]))
+
+ print unicode(doc).encode('latin1')
+
+Again, tags are objects which contain tags and have attributes.
+More exactly, Tags inherit from the list type and thus can be
+manipulated as list objects. They additionally support a default
+way to represent themselves as a serialized unicode object.
+
+If you happen to look at the py.xml implementation you'll
+note that the tag/namespace implementation consumes some 50 lines
+with another 50 lines for the unicode serialization code.
+
+CSS-styling your html Tags
+--------------------------
+
+One aspect where many of the huge python xml/html generation
+frameworks utterly fail is a clean and convenient integration
+of CSS styling. Often, developers are left alone with keeping
+CSS style definitions in sync with some style files
+represented as strings (often in a separate .css file). Not
+only is this hard to debug but the missing abstractions make
+it hard to modify the styling of your tags or to choose custom
+style representations (inline, html.head or external). Add the
+Browers usual tolerance of messyness and errors in Style
+references and welcome to hell, known as the domain of
+developing web applications :-)
+
+By contrast, consider this CSS styling example::
+
+ class my(html):
+ "my initial custom style"
+ class body(html.body):
+ style = html.Style(font_size = "120%")
+
+ class h2(html.h2):
+ style = html.Style(background = "grey")
+
+ class p(html.p):
+ style = html.Style(font_weight="bold")
+
+ doc = my.html(
+ my.head(),
+ my.body(
+ my.h2("hello world"),
+ my.p("bold as bold can")
+ )
+ )
+
+ print doc.unicode(indent=2)
+
+This will give you a small'n mean self contained
+represenation by default::
+
+ <html>
+ <head/>
+ <body style="font-size: 120%">
+ <h2 style="background: grey">hello world</h2>
+ <p style="font-weight: bold">bold as bold can</p></body></html>
+
+Most importantly, note that the inline-styling is just an
+implementation detail of the unicode serialization code.
+You can easily modify the serialization to put your styling into the
+``html.head`` or in a separate file and autogenerate CSS-class
+names or ids.
+
+Hey, you could even write tests that you are using correct
+styles suitable for specific browser requirements. Did i mention
+that the ability to easily write tests for your generated
+html and its serialization could help to develop _stable_ user
+interfaces?
+
+More to come ...
+----------------
+
+For now, i don't think we should strive to offer much more
+than the above. However, it is probably not hard to offer
+*partial serialization* to allow generating maybe hundreds of
+complex html documents per second. Basically we would allow
+putting callables both as Tag content and as values of
+attributes. A slightly more advanced Serialization would then
+produce a list of unicode objects intermingled with callables.
+At HTTP-Request time the callables would get called to
+complete the probably request-specific serialization of
+your Tags. Hum, it's probably harder to explain this than to
+actually code it :-)
+
+.. _`py.test`: test/index.html
diff --git a/testing/web-platform/tests/tools/third_party/py/py/__init__.py b/testing/web-platform/tests/tools/third_party/py/py/__init__.py
new file mode 100644
index 0000000000..b892ce1a2a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/__init__.py
@@ -0,0 +1,156 @@
+"""
+pylib: rapid testing and development utils
+
+this module uses apipkg.py for lazy-loading sub modules
+and classes. The initpkg-dictionary below specifies
+name->value mappings where value can be another namespace
+dictionary or an import path.
+
+(c) Holger Krekel and others, 2004-2014
+"""
+from py._error import error
+
+try:
+ from py._vendored_packages import apipkg
+ lib_not_mangled_by_packagers = True
+ vendor_prefix = '._vendored_packages.'
+except ImportError:
+ import apipkg
+ lib_not_mangled_by_packagers = False
+ vendor_prefix = ''
+
+try:
+ from ._version import version as __version__
+except ImportError:
+ # broken installation, we don't even try
+ __version__ = "unknown"
+
+
+apipkg.initpkg(__name__, attr={'_apipkg': apipkg, 'error': error}, exportdefs={
+ # access to all standard lib modules
+ 'std': '._std:std',
+
+ '_pydir' : '.__metainfo:pydir',
+ 'version': 'py:__version__', # backward compatibility
+
+ # pytest-2.0 has a flat namespace, we use alias modules
+ # to keep old references compatible
+ 'test' : 'pytest',
+
+ # hook into the top-level standard library
+ 'process' : {
+ '__doc__' : '._process:__doc__',
+ 'cmdexec' : '._process.cmdexec:cmdexec',
+ 'kill' : '._process.killproc:kill',
+ 'ForkedFunc' : '._process.forkedfunc:ForkedFunc',
+ },
+
+ 'apipkg' : {
+ 'initpkg' : vendor_prefix + 'apipkg:initpkg',
+ 'ApiModule' : vendor_prefix + 'apipkg:ApiModule',
+ },
+
+ 'iniconfig' : {
+ 'IniConfig' : vendor_prefix + 'iniconfig:IniConfig',
+ 'ParseError' : vendor_prefix + 'iniconfig:ParseError',
+ },
+
+ 'path' : {
+ '__doc__' : '._path:__doc__',
+ 'svnwc' : '._path.svnwc:SvnWCCommandPath',
+ 'svnurl' : '._path.svnurl:SvnCommandPath',
+ 'local' : '._path.local:LocalPath',
+ 'SvnAuth' : '._path.svnwc:SvnAuth',
+ },
+
+ # python inspection/code-generation API
+ 'code' : {
+ '__doc__' : '._code:__doc__',
+ 'compile' : '._code.source:compile_',
+ 'Source' : '._code.source:Source',
+ 'Code' : '._code.code:Code',
+ 'Frame' : '._code.code:Frame',
+ 'ExceptionInfo' : '._code.code:ExceptionInfo',
+ 'Traceback' : '._code.code:Traceback',
+ 'getfslineno' : '._code.source:getfslineno',
+ 'getrawcode' : '._code.code:getrawcode',
+ 'patch_builtins' : '._code.code:patch_builtins',
+ 'unpatch_builtins' : '._code.code:unpatch_builtins',
+ '_AssertionError' : '._code.assertion:AssertionError',
+ '_reinterpret_old' : '._code.assertion:reinterpret_old',
+ '_reinterpret' : '._code.assertion:reinterpret',
+ '_reprcompare' : '._code.assertion:_reprcompare',
+ '_format_explanation' : '._code.assertion:_format_explanation',
+ },
+
+ # backports and additions of builtins
+ 'builtin' : {
+ '__doc__' : '._builtin:__doc__',
+ 'enumerate' : '._builtin:enumerate',
+ 'reversed' : '._builtin:reversed',
+ 'sorted' : '._builtin:sorted',
+ 'any' : '._builtin:any',
+ 'all' : '._builtin:all',
+ 'set' : '._builtin:set',
+ 'frozenset' : '._builtin:frozenset',
+ 'BaseException' : '._builtin:BaseException',
+ 'GeneratorExit' : '._builtin:GeneratorExit',
+ '_sysex' : '._builtin:_sysex',
+ 'print_' : '._builtin:print_',
+ '_reraise' : '._builtin:_reraise',
+ '_tryimport' : '._builtin:_tryimport',
+ 'exec_' : '._builtin:exec_',
+ '_basestring' : '._builtin:_basestring',
+ '_totext' : '._builtin:_totext',
+ '_isbytes' : '._builtin:_isbytes',
+ '_istext' : '._builtin:_istext',
+ '_getimself' : '._builtin:_getimself',
+ '_getfuncdict' : '._builtin:_getfuncdict',
+ '_getcode' : '._builtin:_getcode',
+ 'builtins' : '._builtin:builtins',
+ 'execfile' : '._builtin:execfile',
+ 'callable' : '._builtin:callable',
+ 'bytes' : '._builtin:bytes',
+ 'text' : '._builtin:text',
+ },
+
+ # input-output helping
+ 'io' : {
+ '__doc__' : '._io:__doc__',
+ 'dupfile' : '._io.capture:dupfile',
+ 'TextIO' : '._io.capture:TextIO',
+ 'BytesIO' : '._io.capture:BytesIO',
+ 'FDCapture' : '._io.capture:FDCapture',
+ 'StdCapture' : '._io.capture:StdCapture',
+ 'StdCaptureFD' : '._io.capture:StdCaptureFD',
+ 'TerminalWriter' : '._io.terminalwriter:TerminalWriter',
+ 'ansi_print' : '._io.terminalwriter:ansi_print',
+ 'get_terminal_width' : '._io.terminalwriter:get_terminal_width',
+ 'saferepr' : '._io.saferepr:saferepr',
+ },
+
+ # small and mean xml/html generation
+ 'xml' : {
+ '__doc__' : '._xmlgen:__doc__',
+ 'html' : '._xmlgen:html',
+ 'Tag' : '._xmlgen:Tag',
+ 'raw' : '._xmlgen:raw',
+ 'Namespace' : '._xmlgen:Namespace',
+ 'escape' : '._xmlgen:escape',
+ },
+
+ 'log' : {
+ # logging API ('producers' and 'consumers' connected via keywords)
+ '__doc__' : '._log:__doc__',
+ '_apiwarn' : '._log.warning:_apiwarn',
+ 'Producer' : '._log.log:Producer',
+ 'setconsumer' : '._log.log:setconsumer',
+ '_setstate' : '._log.log:setstate',
+ '_getstate' : '._log.log:getstate',
+ 'Path' : '._log.log:Path',
+ 'STDOUT' : '._log.log:STDOUT',
+ 'STDERR' : '._log.log:STDERR',
+ 'Syslog' : '._log.log:Syslog',
+ },
+
+})
diff --git a/testing/web-platform/tests/tools/third_party/py/py/__init__.pyi b/testing/web-platform/tests/tools/third_party/py/py/__init__.pyi
new file mode 100644
index 0000000000..96859e310f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/__init__.pyi
@@ -0,0 +1,20 @@
+from typing import Any
+
+# py allows to use e.g. py.path.local even without importing py.path.
+# So import implicitly.
+from . import error
+from . import iniconfig
+from . import path
+from . import io
+from . import xml
+
+__version__: str
+
+# Untyped modules below here.
+std: Any
+test: Any
+process: Any
+apipkg: Any
+code: Any
+builtin: Any
+log: Any
diff --git a/testing/web-platform/tests/tools/third_party/py/py/__metainfo.py b/testing/web-platform/tests/tools/third_party/py/py/__metainfo.py
new file mode 100644
index 0000000000..12581eb7af
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/__metainfo.py
@@ -0,0 +1,2 @@
+import py
+pydir = py.path.local(py.__file__).dirpath()
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_builtin.py b/testing/web-platform/tests/tools/third_party/py/py/_builtin.py
new file mode 100644
index 0000000000..ddc89fc7be
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_builtin.py
@@ -0,0 +1,149 @@
+import sys
+
+
+# Passthrough for builtins supported with py27.
+BaseException = BaseException
+GeneratorExit = GeneratorExit
+_sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit)
+all = all
+any = any
+callable = callable
+enumerate = enumerate
+reversed = reversed
+set, frozenset = set, frozenset
+sorted = sorted
+
+
+if sys.version_info >= (3, 0):
+ exec("print_ = print ; exec_=exec")
+ import builtins
+
+ # some backward compatibility helpers
+ _basestring = str
+ def _totext(obj, encoding=None, errors=None):
+ if isinstance(obj, bytes):
+ if errors is None:
+ obj = obj.decode(encoding)
+ else:
+ obj = obj.decode(encoding, errors)
+ elif not isinstance(obj, str):
+ obj = str(obj)
+ return obj
+
+ def _isbytes(x):
+ return isinstance(x, bytes)
+
+ def _istext(x):
+ return isinstance(x, str)
+
+ text = str
+ bytes = bytes
+
+ def _getimself(function):
+ return getattr(function, '__self__', None)
+
+ def _getfuncdict(function):
+ return getattr(function, "__dict__", None)
+
+ def _getcode(function):
+ return getattr(function, "__code__", None)
+
+ def execfile(fn, globs=None, locs=None):
+ if globs is None:
+ back = sys._getframe(1)
+ globs = back.f_globals
+ locs = back.f_locals
+ del back
+ elif locs is None:
+ locs = globs
+ fp = open(fn, "r")
+ try:
+ source = fp.read()
+ finally:
+ fp.close()
+ co = compile(source, fn, "exec", dont_inherit=True)
+ exec_(co, globs, locs)
+
+else:
+ import __builtin__ as builtins
+ _totext = unicode
+ _basestring = basestring
+ text = unicode
+ bytes = str
+ execfile = execfile
+ callable = callable
+ def _isbytes(x):
+ return isinstance(x, str)
+ def _istext(x):
+ return isinstance(x, unicode)
+
+ def _getimself(function):
+ return getattr(function, 'im_self', None)
+
+ def _getfuncdict(function):
+ return getattr(function, "__dict__", None)
+
+ def _getcode(function):
+ try:
+ return getattr(function, "__code__")
+ except AttributeError:
+ return getattr(function, "func_code", None)
+
+ def print_(*args, **kwargs):
+ """ minimal backport of py3k print statement. """
+ sep = ' '
+ if 'sep' in kwargs:
+ sep = kwargs.pop('sep')
+ end = '\n'
+ if 'end' in kwargs:
+ end = kwargs.pop('end')
+ file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
+ if kwargs:
+ args = ", ".join([str(x) for x in kwargs])
+ raise TypeError("invalid keyword arguments: %s" % args)
+ at_start = True
+ for x in args:
+ if not at_start:
+ file.write(sep)
+ file.write(str(x))
+ at_start = False
+ file.write(end)
+
+ def exec_(obj, globals=None, locals=None):
+ """ minimal backport of py3k exec statement. """
+ __tracebackhide__ = True
+ if globals is None:
+ frame = sys._getframe(1)
+ globals = frame.f_globals
+ if locals is None:
+ locals = frame.f_locals
+ elif locals is None:
+ locals = globals
+ exec2(obj, globals, locals)
+
+if sys.version_info >= (3, 0):
+ def _reraise(cls, val, tb):
+ __tracebackhide__ = True
+ assert hasattr(val, '__traceback__')
+ raise cls.with_traceback(val, tb)
+else:
+ exec ("""
+def _reraise(cls, val, tb):
+ __tracebackhide__ = True
+ raise cls, val, tb
+def exec2(obj, globals, locals):
+ __tracebackhide__ = True
+ exec obj in globals, locals
+""")
+
+def _tryimport(*names):
+ """ return the first successfully imported module. """
+ assert names
+ for name in names:
+ try:
+ __import__(name)
+ except ImportError:
+ excinfo = sys.exc_info()
+ else:
+ return sys.modules[name]
+ _reraise(*excinfo)
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_code/__init__.py b/testing/web-platform/tests/tools/third_party/py/py/_code/__init__.py
new file mode 100644
index 0000000000..f15acf8513
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_code/__init__.py
@@ -0,0 +1 @@
+""" python inspection/code generation API """
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_code/_assertionnew.py b/testing/web-platform/tests/tools/third_party/py/py/_code/_assertionnew.py
new file mode 100644
index 0000000000..d03f29d870
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_code/_assertionnew.py
@@ -0,0 +1,322 @@
+"""
+Find intermediate evalutation results in assert statements through builtin AST.
+This should replace _assertionold.py eventually.
+"""
+
+import sys
+import ast
+
+import py
+from py._code.assertion import _format_explanation, BuiltinAssertionError
+
+
+def _is_ast_expr(node):
+ return isinstance(node, ast.expr)
+def _is_ast_stmt(node):
+ return isinstance(node, ast.stmt)
+
+
+class Failure(Exception):
+ """Error found while interpreting AST."""
+
+ def __init__(self, explanation=""):
+ self.cause = sys.exc_info()
+ self.explanation = explanation
+
+
+def interpret(source, frame, should_fail=False):
+ mod = ast.parse(source)
+ visitor = DebugInterpreter(frame)
+ try:
+ visitor.visit(mod)
+ except Failure:
+ failure = sys.exc_info()[1]
+ return getfailure(failure)
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --no-assert)")
+
+def run(offending_line, frame=None):
+ if frame is None:
+ frame = py.code.Frame(sys._getframe(1))
+ return interpret(offending_line, frame)
+
+def getfailure(failure):
+ explanation = _format_explanation(failure.explanation)
+ value = failure.cause[1]
+ if str(value):
+ lines = explanation.splitlines()
+ if not lines:
+ lines.append("")
+ lines[0] += " << %s" % (value,)
+ explanation = "\n".join(lines)
+ text = "%s: %s" % (failure.cause[0].__name__, explanation)
+ if text.startswith("AssertionError: assert "):
+ text = text[16:]
+ return text
+
+
+operator_map = {
+ ast.BitOr : "|",
+ ast.BitXor : "^",
+ ast.BitAnd : "&",
+ ast.LShift : "<<",
+ ast.RShift : ">>",
+ ast.Add : "+",
+ ast.Sub : "-",
+ ast.Mult : "*",
+ ast.Div : "/",
+ ast.FloorDiv : "//",
+ ast.Mod : "%",
+ ast.Eq : "==",
+ ast.NotEq : "!=",
+ ast.Lt : "<",
+ ast.LtE : "<=",
+ ast.Gt : ">",
+ ast.GtE : ">=",
+ ast.Pow : "**",
+ ast.Is : "is",
+ ast.IsNot : "is not",
+ ast.In : "in",
+ ast.NotIn : "not in"
+}
+
+unary_map = {
+ ast.Not : "not %s",
+ ast.Invert : "~%s",
+ ast.USub : "-%s",
+ ast.UAdd : "+%s"
+}
+
+
+class DebugInterpreter(ast.NodeVisitor):
+ """Interpret AST nodes to gleam useful debugging information. """
+
+ def __init__(self, frame):
+ self.frame = frame
+
+ def generic_visit(self, node):
+ # Fallback when we don't have a special implementation.
+ if _is_ast_expr(node):
+ mod = ast.Expression(node)
+ co = self._compile(mod)
+ try:
+ result = self.frame.eval(co)
+ except Exception:
+ raise Failure()
+ explanation = self.frame.repr(result)
+ return explanation, result
+ elif _is_ast_stmt(node):
+ mod = ast.Module([node])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co)
+ except Exception:
+ raise Failure()
+ return None, None
+ else:
+ raise AssertionError("can't handle %s" %(node,))
+
+ def _compile(self, source, mode="eval"):
+ return compile(source, "<assertion interpretation>", mode)
+
+ def visit_Expr(self, expr):
+ return self.visit(expr.value)
+
+ def visit_Module(self, mod):
+ for stmt in mod.body:
+ self.visit(stmt)
+
+ def visit_Name(self, name):
+ explanation, result = self.generic_visit(name)
+ # See if the name is local.
+ source = "%r in locals() is not globals()" % (name.id,)
+ co = self._compile(source)
+ try:
+ local = self.frame.eval(co)
+ except Exception:
+ # have to assume it isn't
+ local = False
+ if not local:
+ return name.id, result
+ return explanation, result
+
+ def visit_Compare(self, comp):
+ left = comp.left
+ left_explanation, left_result = self.visit(left)
+ for op, next_op in zip(comp.ops, comp.comparators):
+ next_explanation, next_result = self.visit(next_op)
+ op_symbol = operator_map[op.__class__]
+ explanation = "%s %s %s" % (left_explanation, op_symbol,
+ next_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=next_result)
+ except Exception:
+ raise Failure(explanation)
+ try:
+ if not result:
+ break
+ except KeyboardInterrupt:
+ raise
+ except:
+ break
+ left_explanation, left_result = next_explanation, next_result
+
+ rcomp = py.code._reprcompare
+ if rcomp:
+ res = rcomp(op_symbol, left_result, next_result)
+ if res:
+ explanation = res
+ return explanation, result
+
+ def visit_BoolOp(self, boolop):
+ is_or = isinstance(boolop.op, ast.Or)
+ explanations = []
+ for operand in boolop.values:
+ explanation, result = self.visit(operand)
+ explanations.append(explanation)
+ if result == is_or:
+ break
+ name = is_or and " or " or " and "
+ explanation = "(" + name.join(explanations) + ")"
+ return explanation, result
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_explanation, operand_result = self.visit(unary.operand)
+ explanation = pattern % (operand_explanation,)
+ co = self._compile(pattern % ("__exprinfo_expr",))
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=operand_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_BinOp(self, binop):
+ left_explanation, left_result = self.visit(binop.left)
+ right_explanation, right_result = self.visit(binop.right)
+ symbol = operator_map[binop.op.__class__]
+ explanation = "(%s %s %s)" % (left_explanation, symbol,
+ right_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=right_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_Call(self, call):
+ func_explanation, func = self.visit(call.func)
+ arg_explanations = []
+ ns = {"__exprinfo_func" : func}
+ arguments = []
+ for arg in call.args:
+ arg_explanation, arg_result = self.visit(arg)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ arguments.append(arg_name)
+ arg_explanations.append(arg_explanation)
+ for keyword in call.keywords:
+ arg_explanation, arg_result = self.visit(keyword.value)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ keyword_source = "%s=%%s" % (keyword.arg)
+ arguments.append(keyword_source % (arg_name,))
+ arg_explanations.append(keyword_source % (arg_explanation,))
+ if call.starargs:
+ arg_explanation, arg_result = self.visit(call.starargs)
+ arg_name = "__exprinfo_star"
+ ns[arg_name] = arg_result
+ arguments.append("*%s" % (arg_name,))
+ arg_explanations.append("*%s" % (arg_explanation,))
+ if call.kwargs:
+ arg_explanation, arg_result = self.visit(call.kwargs)
+ arg_name = "__exprinfo_kwds"
+ ns[arg_name] = arg_result
+ arguments.append("**%s" % (arg_name,))
+ arg_explanations.append("**%s" % (arg_explanation,))
+ args_explained = ", ".join(arg_explanations)
+ explanation = "%s(%s)" % (func_explanation, args_explained)
+ args = ", ".join(arguments)
+ source = "__exprinfo_func(%s)" % (args,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, **ns)
+ except Exception:
+ raise Failure(explanation)
+ pattern = "%s\n{%s = %s\n}"
+ rep = self.frame.repr(result)
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def _is_builtin_name(self, name):
+ pattern = "%r not in globals() and %r not in locals()"
+ source = pattern % (name.id, name.id)
+ co = self._compile(source)
+ try:
+ return self.frame.eval(co)
+ except Exception:
+ return False
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ source_explanation, source_result = self.visit(attr.value)
+ explanation = "%s.%s" % (source_explanation, attr.attr)
+ source = "__exprinfo_expr.%s" % (attr.attr,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ raise Failure(explanation)
+ explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
+ self.frame.repr(result),
+ source_explanation, attr.attr)
+ # Check if the attr is from an instance.
+ source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
+ source = source % (attr.attr,)
+ co = self._compile(source)
+ try:
+ from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ from_instance = True
+ if from_instance:
+ rep = self.frame.repr(result)
+ pattern = "%s\n{%s = %s\n}"
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def visit_Assert(self, assrt):
+ test_explanation, test_result = self.visit(assrt.test)
+ if test_explanation.startswith("False\n{False =") and \
+ test_explanation.endswith("\n"):
+ test_explanation = test_explanation[15:-2]
+ explanation = "assert %s" % (test_explanation,)
+ if not test_result:
+ try:
+ raise BuiltinAssertionError
+ except Exception:
+ raise Failure(explanation)
+ return explanation, test_result
+
+ def visit_Assign(self, assign):
+ value_explanation, value_result = self.visit(assign.value)
+ explanation = "... = %s" % (value_explanation,)
+ name = ast.Name("__exprinfo_expr", ast.Load(),
+ lineno=assign.value.lineno,
+ col_offset=assign.value.col_offset)
+ new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
+ col_offset=assign.col_offset)
+ mod = ast.Module([new_assign])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co, __exprinfo_expr=value_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, value_result
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_code/_assertionold.py b/testing/web-platform/tests/tools/third_party/py/py/_code/_assertionold.py
new file mode 100644
index 0000000000..1bb70a875d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_code/_assertionold.py
@@ -0,0 +1,556 @@
+import py
+import sys, inspect
+from compiler import parse, ast, pycodegen
+from py._code.assertion import BuiltinAssertionError, _format_explanation
+import types
+
+passthroughex = py.builtin._sysex
+
+class Failure:
+ def __init__(self, node):
+ self.exc, self.value, self.tb = sys.exc_info()
+ self.node = node
+
+class View(object):
+ """View base class.
+
+ If C is a subclass of View, then C(x) creates a proxy object around
+ the object x. The actual class of the proxy is not C in general,
+ but a *subclass* of C determined by the rules below. To avoid confusion
+ we call view class the class of the proxy (a subclass of C, so of View)
+ and object class the class of x.
+
+ Attributes and methods not found in the proxy are automatically read on x.
+ Other operations like setting attributes are performed on the proxy, as
+ determined by its view class. The object x is available from the proxy
+ as its __obj__ attribute.
+
+ The view class selection is determined by the __view__ tuples and the
+ optional __viewkey__ method. By default, the selected view class is the
+ most specific subclass of C whose __view__ mentions the class of x.
+ If no such subclass is found, the search proceeds with the parent
+ object classes. For example, C(True) will first look for a subclass
+ of C with __view__ = (..., bool, ...) and only if it doesn't find any
+ look for one with __view__ = (..., int, ...), and then ..., object,...
+ If everything fails the class C itself is considered to be the default.
+
+ Alternatively, the view class selection can be driven by another aspect
+ of the object x, instead of the class of x, by overriding __viewkey__.
+ See last example at the end of this module.
+ """
+
+ _viewcache = {}
+ __view__ = ()
+
+ def __new__(rootclass, obj, *args, **kwds):
+ self = object.__new__(rootclass)
+ self.__obj__ = obj
+ self.__rootclass__ = rootclass
+ key = self.__viewkey__()
+ try:
+ self.__class__ = self._viewcache[key]
+ except KeyError:
+ self.__class__ = self._selectsubclass(key)
+ return self
+
+ def __getattr__(self, attr):
+ # attributes not found in the normal hierarchy rooted on View
+ # are looked up in the object's real class
+ return getattr(self.__obj__, attr)
+
+ def __viewkey__(self):
+ return self.__obj__.__class__
+
+ def __matchkey__(self, key, subclasses):
+ if inspect.isclass(key):
+ keys = inspect.getmro(key)
+ else:
+ keys = [key]
+ for key in keys:
+ result = [C for C in subclasses if key in C.__view__]
+ if result:
+ return result
+ return []
+
+ def _selectsubclass(self, key):
+ subclasses = list(enumsubclasses(self.__rootclass__))
+ for C in subclasses:
+ if not isinstance(C.__view__, tuple):
+ C.__view__ = (C.__view__,)
+ choices = self.__matchkey__(key, subclasses)
+ if not choices:
+ return self.__rootclass__
+ elif len(choices) == 1:
+ return choices[0]
+ else:
+ # combine the multiple choices
+ return type('?', tuple(choices), {})
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
+
+
+def enumsubclasses(cls):
+ for subcls in cls.__subclasses__():
+ for subsubclass in enumsubclasses(subcls):
+ yield subsubclass
+ yield cls
+
+
+class Interpretable(View):
+ """A parse tree node with a few extra methods."""
+ explanation = None
+
+ def is_builtin(self, frame):
+ return False
+
+ def eval(self, frame):
+ # fall-back for unknown expression nodes
+ try:
+ expr = ast.Expression(self.__obj__)
+ expr.filename = '<eval>'
+ self.__obj__.filename = '<eval>'
+ co = pycodegen.ExpressionCodeGenerator(expr).getCode()
+ result = frame.eval(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.result = result
+ self.explanation = self.explanation or frame.repr(self.result)
+
+ def run(self, frame):
+ # fall-back for unknown statement nodes
+ try:
+ expr = ast.Module(None, ast.Stmt([self.__obj__]))
+ expr.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(expr).getCode()
+ frame.exec_(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ def nice_explanation(self):
+ return _format_explanation(self.explanation)
+
+
+class Name(Interpretable):
+ __view__ = ast.Name
+
+ def is_local(self, frame):
+ source = '%r in locals() is not globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_global(self, frame):
+ source = '%r in globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_builtin(self, frame):
+ source = '%r not in locals() and %r not in globals()' % (
+ self.name, self.name)
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ super(Name, self).eval(frame)
+ if not self.is_local(frame):
+ self.explanation = self.name
+
+class Compare(Interpretable):
+ __view__ = ast.Compare
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ for operation, expr2 in self.ops:
+ if hasattr(self, 'result'):
+ # shortcutting in chained expressions
+ if not frame.is_true(self.result):
+ break
+ expr2 = Interpretable(expr2)
+ expr2.eval(frame)
+ self.explanation = "%s %s %s" % (
+ expr.explanation, operation, expr2.explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % operation
+ try:
+ self.result = frame.eval(source,
+ __exprinfo_left=expr.result,
+ __exprinfo_right=expr2.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ expr = expr2
+
+class And(Interpretable):
+ __view__ = ast.And
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if not frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' and '.join(explanations) + ')'
+
+class Or(Interpretable):
+ __view__ = ast.Or
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' or '.join(explanations) + ')'
+
+
+# == Unary operations ==
+keepalive = []
+for astclass, astpattern in {
+ ast.Not : 'not __exprinfo_expr',
+ ast.Invert : '(~__exprinfo_expr)',
+ }.items():
+
+ class UnaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.explanation = astpattern.replace('__exprinfo_expr',
+ expr.explanation)
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(UnaryArith)
+
+# == Binary operations ==
+for astclass, astpattern in {
+ ast.Add : '(__exprinfo_left + __exprinfo_right)',
+ ast.Sub : '(__exprinfo_left - __exprinfo_right)',
+ ast.Mul : '(__exprinfo_left * __exprinfo_right)',
+ ast.Div : '(__exprinfo_left / __exprinfo_right)',
+ ast.Mod : '(__exprinfo_left % __exprinfo_right)',
+ ast.Power : '(__exprinfo_left ** __exprinfo_right)',
+ }.items():
+
+ class BinaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ left = Interpretable(self.left)
+ left.eval(frame)
+ right = Interpretable(self.right)
+ right.eval(frame)
+ self.explanation = (astpattern
+ .replace('__exprinfo_left', left .explanation)
+ .replace('__exprinfo_right', right.explanation))
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_left=left.result,
+ __exprinfo_right=right.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(BinaryArith)
+
+
+class CallFunc(Interpretable):
+ __view__ = ast.CallFunc
+
+ def is_bool(self, frame):
+ source = 'isinstance(__exprinfo_value, bool)'
+ try:
+ return frame.is_true(frame.eval(source,
+ __exprinfo_value=self.result))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ node = Interpretable(self.node)
+ node.eval(frame)
+ explanations = []
+ vars = {'__exprinfo_fn': node.result}
+ source = '__exprinfo_fn('
+ for a in self.args:
+ if isinstance(a, ast.Keyword):
+ keyword = a.name
+ a = a.expr
+ else:
+ keyword = None
+ a = Interpretable(a)
+ a.eval(frame)
+ argname = '__exprinfo_%d' % len(vars)
+ vars[argname] = a.result
+ if keyword is None:
+ source += argname + ','
+ explanations.append(a.explanation)
+ else:
+ source += '%s=%s,' % (keyword, argname)
+ explanations.append('%s=%s' % (keyword, a.explanation))
+ if self.star_args:
+ star_args = Interpretable(self.star_args)
+ star_args.eval(frame)
+ argname = '__exprinfo_star'
+ vars[argname] = star_args.result
+ source += '*' + argname + ','
+ explanations.append('*' + star_args.explanation)
+ if self.dstar_args:
+ dstar_args = Interpretable(self.dstar_args)
+ dstar_args.eval(frame)
+ argname = '__exprinfo_kwds'
+ vars[argname] = dstar_args.result
+ source += '**' + argname + ','
+ explanations.append('**' + dstar_args.explanation)
+ self.explanation = "%s(%s)" % (
+ node.explanation, ', '.join(explanations))
+ if source.endswith(','):
+ source = source[:-1]
+ source += ')'
+ try:
+ self.result = frame.eval(source, **vars)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ if not node.is_builtin(frame) or not self.is_bool(frame):
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+class Getattr(Interpretable):
+ __view__ = ast.Getattr
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ source = '__exprinfo_expr.%s' % self.attrname
+ try:
+ self.result = frame.eval(source, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.explanation = '%s.%s' % (expr.explanation, self.attrname)
+ # if the attribute comes from the instance, its value is interesting
+ source = ('hasattr(__exprinfo_expr, "__dict__") and '
+ '%r in __exprinfo_expr.__dict__' % self.attrname)
+ try:
+ from_instance = frame.is_true(
+ frame.eval(source, __exprinfo_expr=expr.result))
+ except passthroughex:
+ raise
+ except:
+ from_instance = True
+ if from_instance:
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+# == Re-interpretation of full statements ==
+
+class Assert(Interpretable):
+ __view__ = ast.Assert
+
+ def run(self, frame):
+ test = Interpretable(self.test)
+ test.eval(frame)
+ # simplify 'assert False where False = ...'
+ if (test.explanation.startswith('False\n{False = ') and
+ test.explanation.endswith('\n}')):
+ test.explanation = test.explanation[15:-2]
+ # print the result as 'assert <explanation>'
+ self.result = test.result
+ self.explanation = 'assert ' + test.explanation
+ if not frame.is_true(test.result):
+ try:
+ raise BuiltinAssertionError
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Assign(Interpretable):
+ __view__ = ast.Assign
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = '... = ' + expr.explanation
+ # fall-back-run the rest of the assignment
+ ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
+ mod = ast.Module(None, ast.Stmt([ass]))
+ mod.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(mod).getCode()
+ try:
+ frame.exec_(co, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Discard(Interpretable):
+ __view__ = ast.Discard
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = expr.explanation
+
+class Stmt(Interpretable):
+ __view__ = ast.Stmt
+
+ def run(self, frame):
+ for stmt in self.nodes:
+ stmt = Interpretable(stmt)
+ stmt.run(frame)
+
+
+def report_failure(e):
+ explanation = e.node.nice_explanation()
+ if explanation:
+ explanation = ", in: " + explanation
+ else:
+ explanation = ""
+ sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
+
+def check(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ expr = parse(s, 'eval')
+ assert isinstance(expr, ast.Expression)
+ node = Interpretable(expr.node)
+ try:
+ node.eval(frame)
+ except passthroughex:
+ raise
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+ else:
+ if not frame.is_true(node.result):
+ sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
+
+
+###########################################################
+# API / Entry points
+# #########################################################
+
+def interpret(source, frame, should_fail=False):
+ module = Interpretable(parse(source, 'exec').node)
+ #print "got module", module
+ if isinstance(frame, types.FrameType):
+ frame = py.code.Frame(frame)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ return getfailure(e)
+ except passthroughex:
+ raise
+ except:
+ import traceback
+ traceback.print_exc()
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --nomagic)")
+ else:
+ return None
+
+def getmsg(excinfo):
+ if isinstance(excinfo, tuple):
+ excinfo = py.code.ExceptionInfo(excinfo)
+ #frame, line = gettbline(tb)
+ #frame = py.code.Frame(frame)
+ #return interpret(line, frame)
+
+ tb = excinfo.traceback[-1]
+ source = str(tb.statement).strip()
+ x = interpret(source, tb.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ return x
+
+def getfailure(e):
+ explanation = e.node.nice_explanation()
+ if str(e.value):
+ lines = explanation.split('\n')
+ lines[0] += " << %s" % (e.value,)
+ explanation = '\n'.join(lines)
+ text = "%s: %s" % (e.exc.__name__, explanation)
+ if text.startswith('AssertionError: assert '):
+ text = text[16:]
+ return text
+
+def run(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ module = Interpretable(parse(s, 'exec').node)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+
+
+if __name__ == '__main__':
+ # example:
+ def f():
+ return 5
+ def g():
+ return 3
+ def h(x):
+ return 'never'
+ check("f() * g() == 5")
+ check("not f()")
+ check("not (f() and g() or 0)")
+ check("f() == g()")
+ i = 4
+ check("i == f()")
+ check("len(f()) == 0")
+ check("isinstance(2+3+4, float)")
+
+ run("x = i")
+ check("x == 5")
+
+ run("assert not f(), 'oops'")
+ run("a, b, c = 1, 2")
+ run("a, b, c = f()")
+
+ check("max([f(),g()]) == 4")
+ check("'hello'[g()] == 'h'")
+ run("'guk%d' % h(f())")
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_code/_py2traceback.py b/testing/web-platform/tests/tools/third_party/py/py/_code/_py2traceback.py
new file mode 100644
index 0000000000..d65e27cb73
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_code/_py2traceback.py
@@ -0,0 +1,79 @@
+# copied from python-2.7.3's traceback.py
+# CHANGES:
+# - some_str is replaced, trying to create unicode strings
+#
+import types
+
+def format_exception_only(etype, value):
+ """Format the exception part of a traceback.
+
+ The arguments are the exception type and value such as given by
+ sys.last_type and sys.last_value. The return value is a list of
+ strings, each ending in a newline.
+
+ Normally, the list contains a single string; however, for
+ SyntaxError exceptions, it contains several lines that (when
+ printed) display detailed information about where the syntax
+ error occurred.
+
+ The message indicating which exception occurred is always the last
+ string in the list.
+
+ """
+
+ # An instance should not have a meaningful value parameter, but
+ # sometimes does, particularly for string exceptions, such as
+ # >>> raise string1, string2 # deprecated
+ #
+ # Clear these out first because issubtype(string1, SyntaxError)
+ # would throw another exception and mask the original problem.
+ if (isinstance(etype, BaseException) or
+ isinstance(etype, types.InstanceType) or
+ etype is None or type(etype) is str):
+ return [_format_final_exc_line(etype, value)]
+
+ stype = etype.__name__
+
+ if not issubclass(etype, SyntaxError):
+ return [_format_final_exc_line(stype, value)]
+
+ # It was a syntax error; show exactly where the problem was found.
+ lines = []
+ try:
+ msg, (filename, lineno, offset, badline) = value.args
+ except Exception:
+ pass
+ else:
+ filename = filename or "<string>"
+ lines.append(' File "%s", line %d\n' % (filename, lineno))
+ if badline is not None:
+ lines.append(' %s\n' % badline.strip())
+ if offset is not None:
+ caretspace = badline.rstrip('\n')[:offset].lstrip()
+ # non-space whitespace (likes tabs) must be kept for alignment
+ caretspace = ((c.isspace() and c or ' ') for c in caretspace)
+ # only three spaces to account for offset1 == pos 0
+ lines.append(' %s^\n' % ''.join(caretspace))
+ value = msg
+
+ lines.append(_format_final_exc_line(stype, value))
+ return lines
+
+def _format_final_exc_line(etype, value):
+ """Return a list of a single line -- normal case for format_exception_only"""
+ valuestr = _some_str(value)
+ if value is None or not valuestr:
+ line = "%s\n" % etype
+ else:
+ line = "%s: %s\n" % (etype, valuestr)
+ return line
+
+def _some_str(value):
+ try:
+ return unicode(value)
+ except Exception:
+ try:
+ return str(value)
+ except Exception:
+ pass
+ return '<unprintable %s object>' % type(value).__name__
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_code/assertion.py b/testing/web-platform/tests/tools/third_party/py/py/_code/assertion.py
new file mode 100644
index 0000000000..ff1643799c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_code/assertion.py
@@ -0,0 +1,90 @@
+import sys
+import py
+
+BuiltinAssertionError = py.builtin.builtins.AssertionError
+
+_reprcompare = None # if set, will be called by assert reinterp for comparison ops
+
+def _format_explanation(explanation):
+ """This formats an explanation
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ raw_lines = (explanation or '').split('\n')
+ # escape newlines not followed by {, } and ~
+ lines = [raw_lines[0]]
+ for l in raw_lines[1:]:
+ if l.startswith('{') or l.startswith('}') or l.startswith('~'):
+ lines.append(l)
+ else:
+ lines[-1] += '\\n' + l
+
+ result = lines[:1]
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith('{'):
+ if stackcnt[-1]:
+ s = 'and '
+ else:
+ s = 'where '
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
+ elif line.startswith('}'):
+ assert line.startswith('}')
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line.startswith('~')
+ result.append(' '*len(stack) + line[1:])
+ assert len(stack) == 1
+ return '\n'.join(result)
+
+
+class AssertionError(BuiltinAssertionError):
+ def __init__(self, *args):
+ BuiltinAssertionError.__init__(self, *args)
+ if args:
+ try:
+ self.msg = str(args[0])
+ except py.builtin._sysex:
+ raise
+ except:
+ self.msg = "<[broken __repr__] %s at %0xd>" %(
+ args[0].__class__, id(args[0]))
+ else:
+ f = py.code.Frame(sys._getframe(1))
+ try:
+ source = f.code.fullsource
+ if source is not None:
+ try:
+ source = source.getstatement(f.lineno, assertion=True)
+ except IndexError:
+ source = None
+ else:
+ source = str(source.deindent()).strip()
+ except py.error.ENOENT:
+ source = None
+ # this can also occur during reinterpretation, when the
+ # co_filename is set to "<run>".
+ if source:
+ self.msg = reinterpret(source, f, should_fail=True)
+ else:
+ self.msg = "<could not determine information>"
+ if not self.args:
+ self.args = (self.msg,)
+
+if sys.version_info > (3, 0):
+ AssertionError.__module__ = "builtins"
+ reinterpret_old = "old reinterpretation not available for py3"
+else:
+ from py._code._assertionold import interpret as reinterpret_old
+from py._code._assertionnew import interpret as reinterpret
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_code/code.py b/testing/web-platform/tests/tools/third_party/py/py/_code/code.py
new file mode 100644
index 0000000000..dad796283f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_code/code.py
@@ -0,0 +1,796 @@
+import py
+import sys
+from inspect import CO_VARARGS, CO_VARKEYWORDS, isclass
+
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+if sys.version_info[0] >= 3:
+ from traceback import format_exception_only
+else:
+ from py._code._py2traceback import format_exception_only
+
+import traceback
+
+
+class Code(object):
+ """ wrapper around Python code objects """
+ def __init__(self, rawcode):
+ if not hasattr(rawcode, "co_filename"):
+ rawcode = py.code.getrawcode(rawcode)
+ try:
+ self.filename = rawcode.co_filename
+ self.firstlineno = rawcode.co_firstlineno - 1
+ self.name = rawcode.co_name
+ except AttributeError:
+ raise TypeError("not a code object: %r" % (rawcode,))
+ self.raw = rawcode
+
+ def __eq__(self, other):
+ return self.raw == other.raw
+
+ def __ne__(self, other):
+ return not self == other
+
+ @property
+ def path(self):
+ """ return a path object pointing to source code (note that it
+ might not point to an actually existing file). """
+ p = py.path.local(self.raw.co_filename)
+ # maybe don't try this checking
+ if not p.check():
+ # XXX maybe try harder like the weird logic
+ # in the standard lib [linecache.updatecache] does?
+ p = self.raw.co_filename
+ return p
+
+ @property
+ def fullsource(self):
+ """ return a py.code.Source object for the full source file of the code
+ """
+ from py._code import source
+ full, _ = source.findsource(self.raw)
+ return full
+
+ def source(self):
+ """ return a py.code.Source object for the code object's source only
+ """
+ # return source only for that part of code
+ return py.code.Source(self.raw)
+
+ def getargs(self, var=False):
+ """ return a tuple with the argument names for the code object
+
+ if 'var' is set True also return the names of the variable and
+ keyword arguments when present
+ """
+ # handfull shortcut for getting args
+ raw = self.raw
+ argcount = raw.co_argcount
+ if var:
+ argcount += raw.co_flags & CO_VARARGS
+ argcount += raw.co_flags & CO_VARKEYWORDS
+ return raw.co_varnames[:argcount]
+
+class Frame(object):
+ """Wrapper around a Python frame holding f_locals and f_globals
+ in which expressions can be evaluated."""
+
+ def __init__(self, frame):
+ self.lineno = frame.f_lineno - 1
+ self.f_globals = frame.f_globals
+ self.f_locals = frame.f_locals
+ self.raw = frame
+ self.code = py.code.Code(frame.f_code)
+
+ @property
+ def statement(self):
+ """ statement this frame is at """
+ if self.code.fullsource is None:
+ return py.code.Source("")
+ return self.code.fullsource.getstatement(self.lineno)
+
+ def eval(self, code, **vars):
+ """ evaluate 'code' in the frame
+
+ 'vars' are optional additional local variables
+
+ returns the result of the evaluation
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ return eval(code, self.f_globals, f_locals)
+
+ def exec_(self, code, **vars):
+ """ exec 'code' in the frame
+
+ 'vars' are optiona; additional local variables
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ py.builtin.exec_(code, self.f_globals, f_locals)
+
+ def repr(self, object):
+ """ return a 'safe' (non-recursive, one-line) string repr for 'object'
+ """
+ return py.io.saferepr(object)
+
+ def is_true(self, object):
+ return object
+
+ def getargs(self, var=False):
+ """ return a list of tuples (name, value) for all arguments
+
+ if 'var' is set True also include the variable and keyword
+ arguments when present
+ """
+ retval = []
+ for arg in self.code.getargs(var):
+ try:
+ retval.append((arg, self.f_locals[arg]))
+ except KeyError:
+ pass # this can occur when using Psyco
+ return retval
+
+
+class TracebackEntry(object):
+ """ a single entry in a traceback """
+
+ _repr_style = None
+ exprinfo = None
+
+ def __init__(self, rawentry):
+ self._rawentry = rawentry
+ self.lineno = rawentry.tb_lineno - 1
+
+ def set_repr_style(self, mode):
+ assert mode in ("short", "long")
+ self._repr_style = mode
+
+ @property
+ def frame(self):
+ return py.code.Frame(self._rawentry.tb_frame)
+
+ @property
+ def relline(self):
+ return self.lineno - self.frame.code.firstlineno
+
+ def __repr__(self):
+ return "<TracebackEntry %s:%d>" % (self.frame.code.path, self.lineno+1)
+
+ @property
+ def statement(self):
+ """ py.code.Source object for the current statement """
+ source = self.frame.code.fullsource
+ return source.getstatement(self.lineno)
+
+ @property
+ def path(self):
+ """ path to the source code """
+ return self.frame.code.path
+
+ def getlocals(self):
+ return self.frame.f_locals
+ locals = property(getlocals, None, None, "locals of underlaying frame")
+
+ def reinterpret(self):
+ """Reinterpret the failing statement and returns a detailed information
+ about what operations are performed."""
+ if self.exprinfo is None:
+ source = str(self.statement).strip()
+ x = py.code._reinterpret(source, self.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ self.exprinfo = x
+ return self.exprinfo
+
+ def getfirstlinesource(self):
+ # on Jython this firstlineno can be -1 apparently
+ return max(self.frame.code.firstlineno, 0)
+
+ def getsource(self, astcache=None):
+ """ return failing source code. """
+ # we use the passed in astcache to not reparse asttrees
+ # within exception info printing
+ from py._code.source import getstatementrange_ast
+ source = self.frame.code.fullsource
+ if source is None:
+ return None
+ key = astnode = None
+ if astcache is not None:
+ key = self.frame.code.path
+ if key is not None:
+ astnode = astcache.get(key, None)
+ start = self.getfirstlinesource()
+ try:
+ astnode, _, end = getstatementrange_ast(self.lineno, source,
+ astnode=astnode)
+ except SyntaxError:
+ end = self.lineno + 1
+ else:
+ if key is not None:
+ astcache[key] = astnode
+ return source[start:end]
+
+ source = property(getsource)
+
+ def ishidden(self):
+ """ return True if the current frame has a var __tracebackhide__
+ resolving to True
+
+ mostly for internal use
+ """
+ try:
+ return self.frame.f_locals['__tracebackhide__']
+ except KeyError:
+ try:
+ return self.frame.f_globals['__tracebackhide__']
+ except KeyError:
+ return False
+
+ def __str__(self):
+ try:
+ fn = str(self.path)
+ except py.error.Error:
+ fn = '???'
+ name = self.frame.code.name
+ try:
+ line = str(self.statement).lstrip()
+ except KeyboardInterrupt:
+ raise
+ except:
+ line = "???"
+ return " File %r:%d in %s\n %s\n" % (fn, self.lineno+1, name, line)
+
+ def name(self):
+ return self.frame.code.raw.co_name
+ name = property(name, None, None, "co_name of underlaying code")
+
+
+class Traceback(list):
+ """ Traceback objects encapsulate and offer higher level
+ access to Traceback entries.
+ """
+ Entry = TracebackEntry
+
+ def __init__(self, tb):
+ """ initialize from given python traceback object. """
+ if hasattr(tb, 'tb_next'):
+ def f(cur):
+ while cur is not None:
+ yield self.Entry(cur)
+ cur = cur.tb_next
+ list.__init__(self, f(tb))
+ else:
+ list.__init__(self, tb)
+
+ def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
+ """ return a Traceback instance wrapping part of this Traceback
+
+ by provding any combination of path, lineno and firstlineno, the
+ first frame to start the to-be-returned traceback is determined
+
+ this allows cutting the first part of a Traceback instance e.g.
+ for formatting reasons (removing some uninteresting bits that deal
+ with handling of the exception/traceback)
+ """
+ for x in self:
+ code = x.frame.code
+ codepath = code.path
+ if ((path is None or codepath == path) and
+ (excludepath is None or not hasattr(codepath, 'relto') or
+ not codepath.relto(excludepath)) and
+ (lineno is None or x.lineno == lineno) and
+ (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
+ return Traceback(x._rawentry)
+ return self
+
+ def __getitem__(self, key):
+ val = super(Traceback, self).__getitem__(key)
+ if isinstance(key, type(slice(0))):
+ val = self.__class__(val)
+ return val
+
+ def filter(self, fn=lambda x: not x.ishidden()):
+ """ return a Traceback instance with certain items removed
+
+ fn is a function that gets a single argument, a TracebackItem
+ instance, and should return True when the item should be added
+ to the Traceback, False when not
+
+ by default this removes all the TracebackItems which are hidden
+ (see ishidden() above)
+ """
+ return Traceback(filter(fn, self))
+
+ def getcrashentry(self):
+ """ return last non-hidden traceback entry that lead
+ to the exception of a traceback.
+ """
+ for i in range(-1, -len(self)-1, -1):
+ entry = self[i]
+ if not entry.ishidden():
+ return entry
+ return self[-1]
+
+ def recursionindex(self):
+ """ return the index of the frame/TracebackItem where recursion
+ originates if appropriate, None if no recursion occurred
+ """
+ cache = {}
+ for i, entry in enumerate(self):
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ #XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
+ #print "checking for recursion at", key
+ l = cache.setdefault(key, [])
+ if l:
+ f = entry.frame
+ loc = f.f_locals
+ for otherloc in l:
+ if f.is_true(f.eval(co_equal,
+ __recursioncache_locals_1=loc,
+ __recursioncache_locals_2=otherloc)):
+ return i
+ l.append(entry.frame.f_locals)
+ return None
+
+co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
+ '?', 'eval')
+
+class ExceptionInfo(object):
+ """ wraps sys.exc_info() objects and offers
+ help for navigating the traceback.
+ """
+ _striptext = ''
+ def __init__(self, tup=None, exprinfo=None):
+ if tup is None:
+ tup = sys.exc_info()
+ if exprinfo is None and isinstance(tup[1], AssertionError):
+ exprinfo = getattr(tup[1], 'msg', None)
+ if exprinfo is None:
+ exprinfo = str(tup[1])
+ if exprinfo and exprinfo.startswith('assert '):
+ self._striptext = 'AssertionError: '
+ self._excinfo = tup
+ #: the exception class
+ self.type = tup[0]
+ #: the exception instance
+ self.value = tup[1]
+ #: the exception raw traceback
+ self.tb = tup[2]
+ #: the exception type name
+ self.typename = self.type.__name__
+ #: the exception traceback (py.code.Traceback instance)
+ self.traceback = py.code.Traceback(self.tb)
+
+ def __repr__(self):
+ return "<ExceptionInfo %s tblen=%d>" % (
+ self.typename, len(self.traceback))
+
+ def exconly(self, tryshort=False):
+ """ return the exception as a string
+
+ when 'tryshort' resolves to True, and the exception is a
+ py.code._AssertionError, only the actual exception part of
+ the exception representation is returned (so 'AssertionError: ' is
+ removed from the beginning)
+ """
+ lines = format_exception_only(self.type, self.value)
+ text = ''.join(lines)
+ text = text.rstrip()
+ if tryshort:
+ if text.startswith(self._striptext):
+ text = text[len(self._striptext):]
+ return text
+
+ def errisinstance(self, exc):
+ """ return True if the exception is an instance of exc """
+ return isinstance(self.value, exc)
+
+ def _getreprcrash(self):
+ exconly = self.exconly(tryshort=True)
+ entry = self.traceback.getcrashentry()
+ path, lineno = entry.frame.code.raw.co_filename, entry.lineno
+ return ReprFileLocation(path, lineno+1, exconly)
+
+ def getrepr(self, showlocals=False, style="long",
+ abspath=False, tbfilter=True, funcargs=False):
+ """ return str()able representation of this exception info.
+ showlocals: show locals per traceback entry
+ style: long|short|no|native traceback style
+ tbfilter: hide entries (where __tracebackhide__ is true)
+
+ in case of style==native, tbfilter and showlocals is ignored.
+ """
+ if style == 'native':
+ return ReprExceptionInfo(ReprTracebackNative(
+ traceback.format_exception(
+ self.type,
+ self.value,
+ self.traceback[0]._rawentry,
+ )), self._getreprcrash())
+
+ fmt = FormattedExcinfo(
+ showlocals=showlocals, style=style,
+ abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
+ return fmt.repr_excinfo(self)
+
+ def __str__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return str(loc)
+
+ def __unicode__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return loc.__unicode__()
+
+
+class FormattedExcinfo(object):
+ """ presenting information about failing Functions and Generators. """
+ # for traceback entries
+ flow_marker = ">"
+ fail_marker = "E"
+
+ def __init__(self, showlocals=False, style="long",
+ abspath=True, tbfilter=True, funcargs=False):
+ self.showlocals = showlocals
+ self.style = style
+ self.tbfilter = tbfilter
+ self.funcargs = funcargs
+ self.abspath = abspath
+ self.astcache = {}
+
+ def _getindent(self, source):
+ # figure out indent for given source
+ try:
+ s = str(source.getstatement(len(source)-1))
+ except KeyboardInterrupt:
+ raise
+ except:
+ try:
+ s = str(source[-1])
+ except KeyboardInterrupt:
+ raise
+ except:
+ return 0
+ return 4 + (len(s) - len(s.lstrip()))
+
+ def _getentrysource(self, entry):
+ source = entry.getsource(self.astcache)
+ if source is not None:
+ source = source.deindent()
+ return source
+
+ def _saferepr(self, obj):
+ return py.io.saferepr(obj)
+
+ def repr_args(self, entry):
+ if self.funcargs:
+ args = []
+ for argname, argvalue in entry.frame.getargs(var=True):
+ args.append((argname, self._saferepr(argvalue)))
+ return ReprFuncArgs(args)
+
+ def get_source(self, source, line_index=-1, excinfo=None, short=False):
+ """ return formatted and marked up source lines. """
+ lines = []
+ if source is None or line_index >= len(source.lines):
+ source = py.code.Source("???")
+ line_index = 0
+ if line_index < 0:
+ line_index += len(source)
+ space_prefix = " "
+ if short:
+ lines.append(space_prefix + source.lines[line_index].strip())
+ else:
+ for line in source.lines[:line_index]:
+ lines.append(space_prefix + line)
+ lines.append(self.flow_marker + " " + source.lines[line_index])
+ for line in source.lines[line_index+1:]:
+ lines.append(space_prefix + line)
+ if excinfo is not None:
+ indent = 4 if short else self._getindent(source)
+ lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
+ return lines
+
+ def get_exconly(self, excinfo, indent=4, markall=False):
+ lines = []
+ indent = " " * indent
+ # get the real exception information out
+ exlines = excinfo.exconly(tryshort=True).split('\n')
+ failindent = self.fail_marker + indent[1:]
+ for line in exlines:
+ lines.append(failindent + line)
+ if not markall:
+ failindent = indent
+ return lines
+
+ def repr_locals(self, locals):
+ if self.showlocals:
+ lines = []
+ keys = [loc for loc in locals if loc[0] != "@"]
+ keys.sort()
+ for name in keys:
+ value = locals[name]
+ if name == '__builtins__':
+ lines.append("__builtins__ = <builtins>")
+ else:
+ # This formatting could all be handled by the
+ # _repr() function, which is only reprlib.Repr in
+ # disguise, so is very configurable.
+ str_repr = self._saferepr(value)
+ #if len(str_repr) < 70 or not isinstance(value,
+ # (list, tuple, dict)):
+ lines.append("%-10s = %s" %(name, str_repr))
+ #else:
+ # self._line("%-10s =\\" % (name,))
+ # # XXX
+ # pprint.pprint(value, stream=self.excinfowriter)
+ return ReprLocals(lines)
+
+ def repr_traceback_entry(self, entry, excinfo=None):
+ source = self._getentrysource(entry)
+ if source is None:
+ source = py.code.Source("???")
+ line_index = 0
+ else:
+ # entry.getfirstlinesource() can be -1, should be 0 on jython
+ line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
+
+ lines = []
+ style = entry._repr_style
+ if style is None:
+ style = self.style
+ if style in ("short", "long"):
+ short = style == "short"
+ reprargs = self.repr_args(entry) if not short else None
+ s = self.get_source(source, line_index, excinfo, short=short)
+ lines.extend(s)
+ if short:
+ message = "in %s" %(entry.name)
+ else:
+ message = excinfo and excinfo.typename or ""
+ path = self._makepath(entry.path)
+ filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
+ localsrepr = None
+ if not short:
+ localsrepr = self.repr_locals(entry.locals)
+ return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
+ if excinfo:
+ lines.extend(self.get_exconly(excinfo, indent=4))
+ return ReprEntry(lines, None, None, None, style)
+
+ def _makepath(self, path):
+ if not self.abspath:
+ try:
+ np = py.path.local().bestrelpath(path)
+ except OSError:
+ return path
+ if len(np) < len(str(path)):
+ path = np
+ return path
+
+ def repr_traceback(self, excinfo):
+ traceback = excinfo.traceback
+ if self.tbfilter:
+ traceback = traceback.filter()
+ recursionindex = None
+ if excinfo.errisinstance(RuntimeError):
+ if "maximum recursion depth exceeded" in str(excinfo.value):
+ recursionindex = traceback.recursionindex()
+ last = traceback[-1]
+ entries = []
+ extraline = None
+ for index, entry in enumerate(traceback):
+ einfo = (last == entry) and excinfo or None
+ reprentry = self.repr_traceback_entry(entry, einfo)
+ entries.append(reprentry)
+ if index == recursionindex:
+ extraline = "!!! Recursion detected (same locals & position)"
+ break
+ return ReprTraceback(entries, extraline, style=self.style)
+
+ def repr_excinfo(self, excinfo):
+ reprtraceback = self.repr_traceback(excinfo)
+ reprcrash = excinfo._getreprcrash()
+ return ReprExceptionInfo(reprtraceback, reprcrash)
+
+class TerminalRepr:
+ def __str__(self):
+ s = self.__unicode__()
+ if sys.version_info[0] < 3:
+ s = s.encode('utf-8')
+ return s
+
+ def __unicode__(self):
+ # FYI this is called from pytest-xdist's serialization of exception
+ # information.
+ io = py.io.TextIO()
+ tw = py.io.TerminalWriter(file=io)
+ self.toterminal(tw)
+ return io.getvalue().strip()
+
+ def __repr__(self):
+ return "<%s instance at %0x>" %(self.__class__, id(self))
+
+
+class ReprExceptionInfo(TerminalRepr):
+ def __init__(self, reprtraceback, reprcrash):
+ self.reprtraceback = reprtraceback
+ self.reprcrash = reprcrash
+ self.sections = []
+
+ def addsection(self, name, content, sep="-"):
+ self.sections.append((name, content, sep))
+
+ def toterminal(self, tw):
+ self.reprtraceback.toterminal(tw)
+ for name, content, sep in self.sections:
+ tw.sep(sep, name)
+ tw.line(content)
+
+class ReprTraceback(TerminalRepr):
+ entrysep = "_ "
+
+ def __init__(self, reprentries, extraline, style):
+ self.reprentries = reprentries
+ self.extraline = extraline
+ self.style = style
+
+ def toterminal(self, tw):
+ # the entries might have different styles
+ last_style = None
+ for i, entry in enumerate(self.reprentries):
+ if entry.style == "long":
+ tw.line("")
+ entry.toterminal(tw)
+ if i < len(self.reprentries) - 1:
+ next_entry = self.reprentries[i+1]
+ if entry.style == "long" or \
+ entry.style == "short" and next_entry.style == "long":
+ tw.sep(self.entrysep)
+
+ if self.extraline:
+ tw.line(self.extraline)
+
+class ReprTracebackNative(ReprTraceback):
+ def __init__(self, tblines):
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+class ReprEntryNative(TerminalRepr):
+ style = "native"
+
+ def __init__(self, tblines):
+ self.lines = tblines
+
+ def toterminal(self, tw):
+ tw.write("".join(self.lines))
+
+class ReprEntry(TerminalRepr):
+ localssep = "_ "
+
+ def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
+ self.lines = lines
+ self.reprfuncargs = reprfuncargs
+ self.reprlocals = reprlocals
+ self.reprfileloc = filelocrepr
+ self.style = style
+
+ def toterminal(self, tw):
+ if self.style == "short":
+ self.reprfileloc.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ #tw.line("")
+ return
+ if self.reprfuncargs:
+ self.reprfuncargs.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ if self.reprlocals:
+ #tw.sep(self.localssep, "Locals")
+ tw.line("")
+ self.reprlocals.toterminal(tw)
+ if self.reprfileloc:
+ if self.lines:
+ tw.line("")
+ self.reprfileloc.toterminal(tw)
+
+ def __str__(self):
+ return "%s\n%s\n%s" % ("\n".join(self.lines),
+ self.reprlocals,
+ self.reprfileloc)
+
+class ReprFileLocation(TerminalRepr):
+ def __init__(self, path, lineno, message):
+ self.path = str(path)
+ self.lineno = lineno
+ self.message = message
+
+ def toterminal(self, tw):
+ # filename and lineno output for each entry,
+ # using an output format that most editors unterstand
+ msg = self.message
+ i = msg.find("\n")
+ if i != -1:
+ msg = msg[:i]
+ tw.line("%s:%s: %s" %(self.path, self.lineno, msg))
+
+class ReprLocals(TerminalRepr):
+ def __init__(self, lines):
+ self.lines = lines
+
+ def toterminal(self, tw):
+ for line in self.lines:
+ tw.line(line)
+
+class ReprFuncArgs(TerminalRepr):
+ def __init__(self, args):
+ self.args = args
+
+ def toterminal(self, tw):
+ if self.args:
+ linesofar = ""
+ for name, value in self.args:
+ ns = "%s = %s" %(name, value)
+ if len(ns) + len(linesofar) + 2 > tw.fullwidth:
+ if linesofar:
+ tw.line(linesofar)
+ linesofar = ns
+ else:
+ if linesofar:
+ linesofar += ", " + ns
+ else:
+ linesofar = ns
+ if linesofar:
+ tw.line(linesofar)
+ tw.line("")
+
+
+
+oldbuiltins = {}
+
+def patch_builtins(assertion=True, compile=True):
+ """ put compile and AssertionError builtins to Python's builtins. """
+ if assertion:
+ from py._code import assertion
+ l = oldbuiltins.setdefault('AssertionError', [])
+ l.append(py.builtin.builtins.AssertionError)
+ py.builtin.builtins.AssertionError = assertion.AssertionError
+ if compile:
+ l = oldbuiltins.setdefault('compile', [])
+ l.append(py.builtin.builtins.compile)
+ py.builtin.builtins.compile = py.code.compile
+
+def unpatch_builtins(assertion=True, compile=True):
+ """ remove compile and AssertionError builtins from Python builtins. """
+ if assertion:
+ py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
+ if compile:
+ py.builtin.builtins.compile = oldbuiltins['compile'].pop()
+
+def getrawcode(obj, trycall=True):
+ """ return code object for given function. """
+ try:
+ return obj.__code__
+ except AttributeError:
+ obj = getattr(obj, 'im_func', obj)
+ obj = getattr(obj, 'func_code', obj)
+ obj = getattr(obj, 'f_code', obj)
+ obj = getattr(obj, '__code__', obj)
+ if trycall and not hasattr(obj, 'co_firstlineno'):
+ if hasattr(obj, '__call__') and not isclass(obj):
+ x = getrawcode(obj.__call__, trycall=False)
+ if hasattr(x, 'co_firstlineno'):
+ return x
+ return obj
+
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_code/source.py b/testing/web-platform/tests/tools/third_party/py/py/_code/source.py
new file mode 100644
index 0000000000..7fc7b23a96
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_code/source.py
@@ -0,0 +1,410 @@
+from __future__ import generators
+
+from bisect import bisect_right
+import sys
+import inspect, tokenize
+import py
+from types import ModuleType
+cpy_compile = compile
+
+try:
+ import _ast
+ from _ast import PyCF_ONLY_AST as _AST_FLAG
+except ImportError:
+ _AST_FLAG = 0
+ _ast = None
+
+
+class Source(object):
+ """ a immutable object holding a source code fragment,
+ possibly deindenting it.
+ """
+ _compilecounter = 0
+ def __init__(self, *parts, **kwargs):
+ self.lines = lines = []
+ de = kwargs.get('deindent', True)
+ rstrip = kwargs.get('rstrip', True)
+ for part in parts:
+ if not part:
+ partlines = []
+ if isinstance(part, Source):
+ partlines = part.lines
+ elif isinstance(part, (tuple, list)):
+ partlines = [x.rstrip("\n") for x in part]
+ elif isinstance(part, py.builtin._basestring):
+ partlines = part.split('\n')
+ if rstrip:
+ while partlines:
+ if partlines[-1].strip():
+ break
+ partlines.pop()
+ else:
+ partlines = getsource(part, deindent=de).lines
+ if de:
+ partlines = deindent(partlines)
+ lines.extend(partlines)
+
+ def __eq__(self, other):
+ try:
+ return self.lines == other.lines
+ except AttributeError:
+ if isinstance(other, str):
+ return str(self) == other
+ return False
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self.lines[key]
+ else:
+ if key.step not in (None, 1):
+ raise IndexError("cannot slice a Source with a step")
+ return self.__getslice__(key.start, key.stop)
+
+ def __len__(self):
+ return len(self.lines)
+
+ def __getslice__(self, start, end):
+ newsource = Source()
+ newsource.lines = self.lines[start:end]
+ return newsource
+
+ def strip(self):
+ """ return new source object with trailing
+ and leading blank lines removed.
+ """
+ start, end = 0, len(self)
+ while start < end and not self.lines[start].strip():
+ start += 1
+ while end > start and not self.lines[end-1].strip():
+ end -= 1
+ source = Source()
+ source.lines[:] = self.lines[start:end]
+ return source
+
+ def putaround(self, before='', after='', indent=' ' * 4):
+ """ return a copy of the source object with
+ 'before' and 'after' wrapped around it.
+ """
+ before = Source(before)
+ after = Source(after)
+ newsource = Source()
+ lines = [ (indent + line) for line in self.lines]
+ newsource.lines = before.lines + lines + after.lines
+ return newsource
+
+ def indent(self, indent=' ' * 4):
+ """ return a copy of the source object with
+ all lines indented by the given indent-string.
+ """
+ newsource = Source()
+ newsource.lines = [(indent+line) for line in self.lines]
+ return newsource
+
+ def getstatement(self, lineno, assertion=False):
+ """ return Source statement which contains the
+ given linenumber (counted from 0).
+ """
+ start, end = self.getstatementrange(lineno, assertion)
+ return self[start:end]
+
+ def getstatementrange(self, lineno, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ """
+ if not (0 <= lineno < len(self)):
+ raise IndexError("lineno out of range")
+ ast, start, end = getstatementrange_ast(lineno, self)
+ return start, end
+
+ def deindent(self, offset=None):
+ """ return a new source object deindented by offset.
+ If offset is None then guess an indentation offset from
+ the first non-blank line. Subsequent lines which have a
+ lower indentation offset will be copied verbatim as
+ they are assumed to be part of multilines.
+ """
+ # XXX maybe use the tokenizer to properly handle multiline
+ # strings etc.pp?
+ newsource = Source()
+ newsource.lines[:] = deindent(self.lines, offset)
+ return newsource
+
+ def isparseable(self, deindent=True):
+ """ return True if source is parseable, heuristically
+ deindenting it by default.
+ """
+ try:
+ import parser
+ except ImportError:
+ syntax_checker = lambda x: compile(x, 'asd', 'exec')
+ else:
+ syntax_checker = parser.suite
+
+ if deindent:
+ source = str(self.deindent())
+ else:
+ source = str(self)
+ try:
+ #compile(source+'\n', "x", "exec")
+ syntax_checker(source+'\n')
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return False
+ else:
+ return True
+
+ def __str__(self):
+ return "\n".join(self.lines)
+
+ def compile(self, filename=None, mode='exec',
+ flag=generators.compiler_flag,
+ dont_inherit=0, _genframe=None):
+ """ return compiled code object. if filename is None
+ invent an artificial filename which displays
+ the source/line position of the caller frame.
+ """
+ if not filename or py.path.local(filename).check(file=0):
+ if _genframe is None:
+ _genframe = sys._getframe(1) # the caller
+ fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
+ base = "<%d-codegen " % self._compilecounter
+ self.__class__._compilecounter += 1
+ if not filename:
+ filename = base + '%s:%d>' % (fn, lineno)
+ else:
+ filename = base + '%r %s:%d>' % (filename, fn, lineno)
+ source = "\n".join(self.lines) + '\n'
+ try:
+ co = cpy_compile(source, filename, mode, flag)
+ except SyntaxError:
+ ex = sys.exc_info()[1]
+ # re-represent syntax errors from parsing python strings
+ msglines = self.lines[:ex.lineno]
+ if ex.offset:
+ msglines.append(" "*ex.offset + '^')
+ msglines.append("(code was compiled probably from here: %s)" % filename)
+ newex = SyntaxError('\n'.join(msglines))
+ newex.offset = ex.offset
+ newex.lineno = ex.lineno
+ newex.text = ex.text
+ raise newex
+ else:
+ if flag & _AST_FLAG:
+ return co
+ lines = [(x + "\n") for x in self.lines]
+ import linecache
+ linecache.cache[filename] = (1, None, lines, filename)
+ return co
+
+#
+# public API shortcut functions
+#
+
+def compile_(source, filename=None, mode='exec', flags=
+ generators.compiler_flag, dont_inherit=0):
+ """ compile the given source to a raw code object,
+ and maintain an internal cache which allows later
+ retrieval of the source code for the code object
+ and any recursively created code objects.
+ """
+ if _ast is not None and isinstance(source, _ast.AST):
+ # XXX should Source support having AST?
+ return cpy_compile(source, filename, mode, flags, dont_inherit)
+ _genframe = sys._getframe(1) # the caller
+ s = Source(source)
+ co = s.compile(filename, mode, flags, _genframe=_genframe)
+ return co
+
+
+def getfslineno(obj):
+ """ Return source location (path, lineno) for the given object.
+ If the source cannot be determined return ("", -1)
+ """
+ try:
+ code = py.code.Code(obj)
+ except TypeError:
+ try:
+ fn = (inspect.getsourcefile(obj) or
+ inspect.getfile(obj))
+ except TypeError:
+ return "", -1
+
+ fspath = fn and py.path.local(fn) or None
+ lineno = -1
+ if fspath:
+ try:
+ _, lineno = findsource(obj)
+ except IOError:
+ pass
+ else:
+ fspath = code.path
+ lineno = code.firstlineno
+ assert isinstance(lineno, int)
+ return fspath, lineno
+
+#
+# helper functions
+#
+
+def findsource(obj):
+ try:
+ sourcelines, lineno = inspect.findsource(obj)
+ except py.builtin._sysex:
+ raise
+ except:
+ return None, -1
+ source = Source()
+ source.lines = [line.rstrip() for line in sourcelines]
+ return source, lineno
+
+def getsource(obj, **kwargs):
+ obj = py.code.getrawcode(obj)
+ try:
+ strsrc = inspect.getsource(obj)
+ except IndentationError:
+ strsrc = "\"Buggy python version consider upgrading, cannot get source\""
+ assert isinstance(strsrc, str)
+ return Source(strsrc, **kwargs)
+
+def deindent(lines, offset=None):
+ if offset is None:
+ for line in lines:
+ line = line.expandtabs()
+ s = line.lstrip()
+ if s:
+ offset = len(line)-len(s)
+ break
+ else:
+ offset = 0
+ if offset == 0:
+ return list(lines)
+ newlines = []
+ def readline_generator(lines):
+ for line in lines:
+ yield line + '\n'
+ while True:
+ yield ''
+
+ it = readline_generator(lines)
+
+ try:
+ for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
+ if sline > len(lines):
+ break # End of input reached
+ if sline > len(newlines):
+ line = lines[sline - 1].expandtabs()
+ if line.lstrip() and line[:offset].isspace():
+ line = line[offset:] # Deindent
+ newlines.append(line)
+
+ for i in range(sline, eline):
+ # Don't deindent continuing lines of
+ # multiline tokens (i.e. multiline strings)
+ newlines.append(lines[i])
+ except (IndentationError, tokenize.TokenError):
+ pass
+ # Add any lines we didn't see. E.g. if an exception was raised.
+ newlines.extend(lines[len(newlines):])
+ return newlines
+
+
+def get_statement_startend2(lineno, node):
+ import ast
+ # flatten all statements and except handlers into one lineno-list
+ # AST's line numbers start indexing at 1
+ l = []
+ for x in ast.walk(node):
+ if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
+ l.append(x.lineno - 1)
+ for name in "finalbody", "orelse":
+ val = getattr(x, name, None)
+ if val:
+ # treat the finally/orelse part as its own statement
+ l.append(val[0].lineno - 1 - 1)
+ l.sort()
+ insert_index = bisect_right(l, lineno)
+ start = l[insert_index - 1]
+ if insert_index >= len(l):
+ end = None
+ else:
+ end = l[insert_index]
+ return start, end
+
+
+def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
+ if astnode is None:
+ content = str(source)
+ try:
+ astnode = compile(content, "source", "exec", 1024) # 1024 for AST
+ except ValueError:
+ start, end = getstatementrange_old(lineno, source, assertion)
+ return None, start, end
+ start, end = get_statement_startend2(lineno, astnode)
+ # we need to correct the end:
+ # - ast-parsing strips comments
+ # - there might be empty lines
+ # - we might have lesser indented code blocks at the end
+ if end is None:
+ end = len(source.lines)
+
+ if end > start + 1:
+ # make sure we don't span differently indented code blocks
+ # by using the BlockFinder helper used which inspect.getsource() uses itself
+ block_finder = inspect.BlockFinder()
+ # if we start with an indented line, put blockfinder to "started" mode
+ block_finder.started = source.lines[start][0].isspace()
+ it = ((x + "\n") for x in source.lines[start:end])
+ try:
+ for tok in tokenize.generate_tokens(lambda: next(it)):
+ block_finder.tokeneater(*tok)
+ except (inspect.EndOfBlock, IndentationError):
+ end = block_finder.last + start
+ except Exception:
+ pass
+
+ # the end might still point to a comment or empty line, correct it
+ while end:
+ line = source.lines[end - 1].lstrip()
+ if line.startswith("#") or not line:
+ end -= 1
+ else:
+ break
+ return astnode, start, end
+
+
+def getstatementrange_old(lineno, source, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ raise an IndexError if no such statementrange can be found.
+ """
+ # XXX this logic is only used on python2.4 and below
+ # 1. find the start of the statement
+ from codeop import compile_command
+ for start in range(lineno, -1, -1):
+ if assertion:
+ line = source.lines[start]
+ # the following lines are not fully tested, change with care
+ if 'super' in line and 'self' in line and '__init__' in line:
+ raise IndexError("likely a subclass")
+ if "assert" not in line and "raise" not in line:
+ continue
+ trylines = source.lines[start:lineno+1]
+ # quick hack to prepare parsing an indented line with
+ # compile_command() (which errors on "return" outside defs)
+ trylines.insert(0, 'def xxx():')
+ trysource = '\n '.join(trylines)
+ # ^ space here
+ try:
+ compile_command(trysource)
+ except (SyntaxError, OverflowError, ValueError):
+ continue
+
+ # 2. find the end of the statement
+ for end in range(lineno+1, len(source)+1):
+ trysource = source[start:end]
+ if trysource.isparseable():
+ return start, end
+ raise SyntaxError("no valid source range around line %d " % (lineno,))
+
+
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_error.py b/testing/web-platform/tests/tools/third_party/py/py/_error.py
new file mode 100644
index 0000000000..a6375de9fa
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_error.py
@@ -0,0 +1,91 @@
+"""
+create errno-specific classes for IO or os calls.
+
+"""
+from types import ModuleType
+import sys, os, errno
+
+class Error(EnvironmentError):
+ def __repr__(self):
+ return "%s.%s %r: %s " %(self.__class__.__module__,
+ self.__class__.__name__,
+ self.__class__.__doc__,
+ " ".join(map(str, self.args)),
+ #repr(self.args)
+ )
+
+ def __str__(self):
+ s = "[%s]: %s" %(self.__class__.__doc__,
+ " ".join(map(str, self.args)),
+ )
+ return s
+
+_winerrnomap = {
+ 2: errno.ENOENT,
+ 3: errno.ENOENT,
+ 17: errno.EEXIST,
+ 18: errno.EXDEV,
+ 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
+ 22: errno.ENOTDIR,
+ 20: errno.ENOTDIR,
+ 267: errno.ENOTDIR,
+ 5: errno.EACCES, # anything better?
+}
+
+class ErrorMaker(ModuleType):
+ """ lazily provides Exception classes for each possible POSIX errno
+ (as defined per the 'errno' module). All such instances
+ subclass EnvironmentError.
+ """
+ Error = Error
+ _errno2class = {}
+
+ def __getattr__(self, name):
+ if name[0] == "_":
+ raise AttributeError(name)
+ eno = getattr(errno, name)
+ cls = self._geterrnoclass(eno)
+ setattr(self, name, cls)
+ return cls
+
+ def _geterrnoclass(self, eno):
+ try:
+ return self._errno2class[eno]
+ except KeyError:
+ clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
+ errorcls = type(Error)(clsname, (Error,),
+ {'__module__':'py.error',
+ '__doc__': os.strerror(eno)})
+ self._errno2class[eno] = errorcls
+ return errorcls
+
+ def checked_call(self, func, *args, **kwargs):
+ """ call a function and raise an errno-exception if applicable. """
+ __tracebackhide__ = True
+ try:
+ return func(*args, **kwargs)
+ except self.Error:
+ raise
+ except (OSError, EnvironmentError):
+ cls, value, tb = sys.exc_info()
+ if not hasattr(value, 'errno'):
+ raise
+ __tracebackhide__ = False
+ errno = value.errno
+ try:
+ if not isinstance(value, WindowsError):
+ raise NameError
+ except NameError:
+ # we are not on Windows, or we got a proper OSError
+ cls = self._geterrnoclass(errno)
+ else:
+ try:
+ cls = self._geterrnoclass(_winerrnomap[errno])
+ except KeyError:
+ raise value
+ raise cls("%s%r" % (func.__name__, args))
+ __tracebackhide__ = True
+
+
+error = ErrorMaker('py.error')
+sys.modules[error.__name__] = error \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_io/__init__.py b/testing/web-platform/tests/tools/third_party/py/py/_io/__init__.py
new file mode 100644
index 0000000000..835f01f3ab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_io/__init__.py
@@ -0,0 +1 @@
+""" input/output helping """
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_io/capture.py b/testing/web-platform/tests/tools/third_party/py/py/_io/capture.py
new file mode 100644
index 0000000000..cacf2fa71a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_io/capture.py
@@ -0,0 +1,371 @@
+import os
+import sys
+import py
+import tempfile
+
+try:
+ from io import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+if sys.version_info < (3,0):
+ class TextIO(StringIO):
+ def write(self, data):
+ if not isinstance(data, unicode):
+ data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace')
+ return StringIO.write(self, data)
+else:
+ TextIO = StringIO
+
+try:
+ from io import BytesIO
+except ImportError:
+ class BytesIO(StringIO):
+ def write(self, data):
+ if isinstance(data, unicode):
+ raise TypeError("not a byte value: %r" %(data,))
+ return StringIO.write(self, data)
+
+patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
+
+class FDCapture:
+ """ Capture IO to/from a given os-level filedescriptor. """
+
+ def __init__(self, targetfd, tmpfile=None, now=True, patchsys=False):
+ """ save targetfd descriptor, and open a new
+ temporary file there. If no tmpfile is
+ specified a tempfile.Tempfile() will be opened
+ in text mode.
+ """
+ self.targetfd = targetfd
+ if tmpfile is None and targetfd != 0:
+ f = tempfile.TemporaryFile('wb+')
+ tmpfile = dupfile(f, encoding="UTF-8")
+ f.close()
+ self.tmpfile = tmpfile
+ self._savefd = os.dup(self.targetfd)
+ if patchsys:
+ self._oldsys = getattr(sys, patchsysdict[targetfd])
+ if now:
+ self.start()
+
+ def start(self):
+ try:
+ os.fstat(self._savefd)
+ except OSError:
+ raise ValueError("saved filedescriptor not valid, "
+ "did you call start() twice?")
+ if self.targetfd == 0 and not self.tmpfile:
+ fd = os.open(devnullpath, os.O_RDONLY)
+ os.dup2(fd, 0)
+ os.close(fd)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], DontReadFromInput())
+ else:
+ os.dup2(self.tmpfile.fileno(), self.targetfd)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], self.tmpfile)
+
+ def done(self):
+ """ unpatch and clean up, returns the self.tmpfile (file object)
+ """
+ os.dup2(self._savefd, self.targetfd)
+ os.close(self._savefd)
+ if self.targetfd != 0:
+ self.tmpfile.seek(0)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], self._oldsys)
+ return self.tmpfile
+
+ def writeorg(self, data):
+ """ write a string to the original file descriptor
+ """
+ tempfp = tempfile.TemporaryFile()
+ try:
+ os.dup2(self._savefd, tempfp.fileno())
+ tempfp.write(data)
+ finally:
+ tempfp.close()
+
+
+def dupfile(f, mode=None, buffering=0, raising=False, encoding=None):
+ """ return a new open file object that's a duplicate of f
+
+ mode is duplicated if not given, 'buffering' controls
+ buffer size (defaulting to no buffering) and 'raising'
+ defines whether an exception is raised when an incompatible
+ file object is passed in (if raising is False, the file
+ object itself will be returned)
+ """
+ try:
+ fd = f.fileno()
+ mode = mode or f.mode
+ except AttributeError:
+ if raising:
+ raise
+ return f
+ newfd = os.dup(fd)
+ if sys.version_info >= (3,0):
+ if encoding is not None:
+ mode = mode.replace("b", "")
+ buffering = True
+ return os.fdopen(newfd, mode, buffering, encoding, closefd=True)
+ else:
+ f = os.fdopen(newfd, mode, buffering)
+ if encoding is not None:
+ return EncodedFile(f, encoding)
+ return f
+
+class EncodedFile(object):
+ def __init__(self, _stream, encoding):
+ self._stream = _stream
+ self.encoding = encoding
+
+ def write(self, obj):
+ if isinstance(obj, unicode):
+ obj = obj.encode(self.encoding)
+ elif isinstance(obj, str):
+ pass
+ else:
+ obj = str(obj)
+ self._stream.write(obj)
+
+ def writelines(self, linelist):
+ data = ''.join(linelist)
+ self.write(data)
+
+ def __getattr__(self, name):
+ return getattr(self._stream, name)
+
+class Capture(object):
+ def call(cls, func, *args, **kwargs):
+ """ return a (res, out, err) tuple where
+ out and err represent the output/error output
+ during function execution.
+ call the given function with args/kwargs
+ and capture output/error during its execution.
+ """
+ so = cls()
+ try:
+ res = func(*args, **kwargs)
+ finally:
+ out, err = so.reset()
+ return res, out, err
+ call = classmethod(call)
+
+ def reset(self):
+ """ reset sys.stdout/stderr and return captured output as strings. """
+ if hasattr(self, '_reset'):
+ raise ValueError("was already reset")
+ self._reset = True
+ outfile, errfile = self.done(save=False)
+ out, err = "", ""
+ if outfile and not outfile.closed:
+ out = outfile.read()
+ outfile.close()
+ if errfile and errfile != outfile and not errfile.closed:
+ err = errfile.read()
+ errfile.close()
+ return out, err
+
+ def suspend(self):
+ """ return current snapshot captures, memorize tempfiles. """
+ outerr = self.readouterr()
+ outfile, errfile = self.done()
+ return outerr
+
+
+class StdCaptureFD(Capture):
+ """ This class allows to capture writes to FD1 and FD2
+ and may connect a NULL file to FD0 (and prevent
+ reads from sys.stdin). If any of the 0,1,2 file descriptors
+ is invalid it will not be captured.
+ """
+ def __init__(self, out=True, err=True, mixed=False,
+ in_=True, patchsys=True, now=True):
+ self._options = {
+ "out": out,
+ "err": err,
+ "mixed": mixed,
+ "in_": in_,
+ "patchsys": patchsys,
+ "now": now,
+ }
+ self._save()
+ if now:
+ self.startall()
+
+ def _save(self):
+ in_ = self._options['in_']
+ out = self._options['out']
+ err = self._options['err']
+ mixed = self._options['mixed']
+ patchsys = self._options['patchsys']
+ if in_:
+ try:
+ self.in_ = FDCapture(0, tmpfile=None, now=False,
+ patchsys=patchsys)
+ except OSError:
+ pass
+ if out:
+ tmpfile = None
+ if hasattr(out, 'write'):
+ tmpfile = out
+ try:
+ self.out = FDCapture(1, tmpfile=tmpfile,
+ now=False, patchsys=patchsys)
+ self._options['out'] = self.out.tmpfile
+ except OSError:
+ pass
+ if err:
+ if out and mixed:
+ tmpfile = self.out.tmpfile
+ elif hasattr(err, 'write'):
+ tmpfile = err
+ else:
+ tmpfile = None
+ try:
+ self.err = FDCapture(2, tmpfile=tmpfile,
+ now=False, patchsys=patchsys)
+ self._options['err'] = self.err.tmpfile
+ except OSError:
+ pass
+
+ def startall(self):
+ if hasattr(self, 'in_'):
+ self.in_.start()
+ if hasattr(self, 'out'):
+ self.out.start()
+ if hasattr(self, 'err'):
+ self.err.start()
+
+ def resume(self):
+ """ resume capturing with original temp files. """
+ self.startall()
+
+ def done(self, save=True):
+ """ return (outfile, errfile) and stop capturing. """
+ outfile = errfile = None
+ if hasattr(self, 'out') and not self.out.tmpfile.closed:
+ outfile = self.out.done()
+ if hasattr(self, 'err') and not self.err.tmpfile.closed:
+ errfile = self.err.done()
+ if hasattr(self, 'in_'):
+ tmpfile = self.in_.done()
+ if save:
+ self._save()
+ return outfile, errfile
+
+ def readouterr(self):
+ """ return snapshot value of stdout/stderr capturings. """
+ if hasattr(self, "out"):
+ out = self._readsnapshot(self.out.tmpfile)
+ else:
+ out = ""
+ if hasattr(self, "err"):
+ err = self._readsnapshot(self.err.tmpfile)
+ else:
+ err = ""
+ return out, err
+
+ def _readsnapshot(self, f):
+ f.seek(0)
+ res = f.read()
+ enc = getattr(f, "encoding", None)
+ if enc:
+ res = py.builtin._totext(res, enc, "replace")
+ f.truncate(0)
+ f.seek(0)
+ return res
+
+
+class StdCapture(Capture):
+ """ This class allows to capture writes to sys.stdout|stderr "in-memory"
+ and will raise errors on tries to read from sys.stdin. It only
+ modifies sys.stdout|stderr|stdin attributes and does not
+ touch underlying File Descriptors (use StdCaptureFD for that).
+ """
+ def __init__(self, out=True, err=True, in_=True, mixed=False, now=True):
+ self._oldout = sys.stdout
+ self._olderr = sys.stderr
+ self._oldin = sys.stdin
+ if out and not hasattr(out, 'file'):
+ out = TextIO()
+ self.out = out
+ if err:
+ if mixed:
+ err = out
+ elif not hasattr(err, 'write'):
+ err = TextIO()
+ self.err = err
+ self.in_ = in_
+ if now:
+ self.startall()
+
+ def startall(self):
+ if self.out:
+ sys.stdout = self.out
+ if self.err:
+ sys.stderr = self.err
+ if self.in_:
+ sys.stdin = self.in_ = DontReadFromInput()
+
+ def done(self, save=True):
+ """ return (outfile, errfile) and stop capturing. """
+ outfile = errfile = None
+ if self.out and not self.out.closed:
+ sys.stdout = self._oldout
+ outfile = self.out
+ outfile.seek(0)
+ if self.err and not self.err.closed:
+ sys.stderr = self._olderr
+ errfile = self.err
+ errfile.seek(0)
+ if self.in_:
+ sys.stdin = self._oldin
+ return outfile, errfile
+
+ def resume(self):
+ """ resume capturing with original temp files. """
+ self.startall()
+
+ def readouterr(self):
+ """ return snapshot value of stdout/stderr capturings. """
+ out = err = ""
+ if self.out:
+ out = self.out.getvalue()
+ self.out.truncate(0)
+ self.out.seek(0)
+ if self.err:
+ err = self.err.getvalue()
+ self.err.truncate(0)
+ self.err.seek(0)
+ return out, err
+
+class DontReadFromInput:
+ """Temporary stub class. Ideally when stdin is accessed, the
+ capturing should be turned off, with possibly all data captured
+ so far sent to the screen. This should be configurable, though,
+ because in automated test runs it is better to crash than
+ hang indefinitely.
+ """
+ def read(self, *args):
+ raise IOError("reading from stdin while output is captured")
+ readline = read
+ readlines = read
+ __iter__ = read
+
+ def fileno(self):
+ raise ValueError("redirected Stdin is pseudofile, has no fileno()")
+ def isatty(self):
+ return False
+ def close(self):
+ pass
+
+try:
+ devnullpath = os.devnull
+except AttributeError:
+ if os.name == 'nt':
+ devnullpath = 'NUL'
+ else:
+ devnullpath = '/dev/null'
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_io/saferepr.py b/testing/web-platform/tests/tools/third_party/py/py/_io/saferepr.py
new file mode 100644
index 0000000000..8518290efd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_io/saferepr.py
@@ -0,0 +1,71 @@
+import py
+import sys
+
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+class SafeRepr(reprlib.Repr):
+ """ subclass of repr.Repr that limits the resulting size of repr()
+ and includes information on exceptions raised during the call.
+ """
+ def repr(self, x):
+ return self._callhelper(reprlib.Repr.repr, self, x)
+
+ def repr_unicode(self, x, level):
+ # Strictly speaking wrong on narrow builds
+ def repr(u):
+ if "'" not in u:
+ return py.builtin._totext("'%s'") % u
+ elif '"' not in u:
+ return py.builtin._totext('"%s"') % u
+ else:
+ return py.builtin._totext("'%s'") % u.replace("'", r"\'")
+ s = repr(x[:self.maxstring])
+ if len(s) > self.maxstring:
+ i = max(0, (self.maxstring-3)//2)
+ j = max(0, self.maxstring-3-i)
+ s = repr(x[:i] + x[len(x)-j:])
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+ def repr_instance(self, x, level):
+ return self._callhelper(builtin_repr, x)
+
+ def _callhelper(self, call, x, *args):
+ try:
+ # Try the vanilla repr and make sure that the result is a string
+ s = call(x, *args)
+ except py.builtin._sysex:
+ raise
+ except:
+ cls, e, tb = sys.exc_info()
+ exc_name = getattr(cls, '__name__', 'unknown')
+ try:
+ exc_info = str(e)
+ except py.builtin._sysex:
+ raise
+ except:
+ exc_info = 'unknown'
+ return '<[%s("%s") raised in repr()] %s object at 0x%x>' % (
+ exc_name, exc_info, x.__class__.__name__, id(x))
+ else:
+ if len(s) > self.maxsize:
+ i = max(0, (self.maxsize-3)//2)
+ j = max(0, self.maxsize-3-i)
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+def saferepr(obj, maxsize=240):
+ """ return a size-limited safe repr-string for the given object.
+ Failing __repr__ functions of user instances will be represented
+ with a short exception info and 'saferepr' generally takes
+ care to never raise exceptions itself. This function is a wrapper
+ around the Repr/reprlib functionality of the standard 2.6 lib.
+ """
+ # review exception handling
+ srepr = SafeRepr()
+ srepr.maxstring = maxsize
+ srepr.maxsize = maxsize
+ srepr.maxother = 160
+ return srepr.repr(obj)
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_io/terminalwriter.py b/testing/web-platform/tests/tools/third_party/py/py/_io/terminalwriter.py
new file mode 100644
index 0000000000..442ca2395e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_io/terminalwriter.py
@@ -0,0 +1,423 @@
+"""
+
+Helper functions for writing to terminals and files.
+
+"""
+
+
+import sys, os, unicodedata
+import py
+py3k = sys.version_info[0] >= 3
+py33 = sys.version_info >= (3, 3)
+from py.builtin import text, bytes
+
+win32_and_ctypes = False
+colorama = None
+if sys.platform == "win32":
+ try:
+ import colorama
+ except ImportError:
+ try:
+ import ctypes
+ win32_and_ctypes = True
+ except ImportError:
+ pass
+
+
+def _getdimensions():
+ if py33:
+ import shutil
+ size = shutil.get_terminal_size()
+ return size.lines, size.columns
+ else:
+ import termios, fcntl, struct
+ call = fcntl.ioctl(1, termios.TIOCGWINSZ, "\000" * 8)
+ height, width = struct.unpack("hhhh", call)[:2]
+ return height, width
+
+
+def get_terminal_width():
+ width = 0
+ try:
+ _, width = _getdimensions()
+ except py.builtin._sysex:
+ raise
+ except:
+ # pass to fallback below
+ pass
+
+ if width == 0:
+ # FALLBACK:
+ # * some exception happened
+ # * or this is emacs terminal which reports (0,0)
+ width = int(os.environ.get('COLUMNS', 80))
+
+ # XXX the windows getdimensions may be bogus, let's sanify a bit
+ if width < 40:
+ width = 80
+ return width
+
+terminal_width = get_terminal_width()
+
+char_width = {
+ 'A': 1, # "Ambiguous"
+ 'F': 2, # Fullwidth
+ 'H': 1, # Halfwidth
+ 'N': 1, # Neutral
+ 'Na': 1, # Narrow
+ 'W': 2, # Wide
+}
+
+
+def get_line_width(text):
+ text = unicodedata.normalize('NFC', text)
+ return sum(char_width.get(unicodedata.east_asian_width(c), 1) for c in text)
+
+
+# XXX unify with _escaped func below
+def ansi_print(text, esc, file=None, newline=True, flush=False):
+ if file is None:
+ file = sys.stderr
+ text = text.rstrip()
+ if esc and not isinstance(esc, tuple):
+ esc = (esc,)
+ if esc and sys.platform != "win32" and file.isatty():
+ text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
+ text +
+ '\x1b[0m') # ANSI color code "reset"
+ if newline:
+ text += '\n'
+
+ if esc and win32_and_ctypes and file.isatty():
+ if 1 in esc:
+ bold = True
+ esc = tuple([x for x in esc if x != 1])
+ else:
+ bold = False
+ esctable = {() : FOREGROUND_WHITE, # normal
+ (31,): FOREGROUND_RED, # red
+ (32,): FOREGROUND_GREEN, # green
+ (33,): FOREGROUND_GREEN|FOREGROUND_RED, # yellow
+ (34,): FOREGROUND_BLUE, # blue
+ (35,): FOREGROUND_BLUE|FOREGROUND_RED, # purple
+ (36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan
+ (37,): FOREGROUND_WHITE, # white
+ (39,): FOREGROUND_WHITE, # reset
+ }
+ attr = esctable.get(esc, FOREGROUND_WHITE)
+ if bold:
+ attr |= FOREGROUND_INTENSITY
+ STD_OUTPUT_HANDLE = -11
+ STD_ERROR_HANDLE = -12
+ if file is sys.stderr:
+ handle = GetStdHandle(STD_ERROR_HANDLE)
+ else:
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ oldcolors = GetConsoleInfo(handle).wAttributes
+ attr |= (oldcolors & 0x0f0)
+ SetConsoleTextAttribute(handle, attr)
+ while len(text) > 32768:
+ file.write(text[:32768])
+ text = text[32768:]
+ if text:
+ file.write(text)
+ SetConsoleTextAttribute(handle, oldcolors)
+ else:
+ file.write(text)
+
+ if flush:
+ file.flush()
+
+def should_do_markup(file):
+ if os.environ.get('PY_COLORS') == '1':
+ return True
+ if os.environ.get('PY_COLORS') == '0':
+ return False
+ if 'NO_COLOR' in os.environ:
+ return False
+ return hasattr(file, 'isatty') and file.isatty() \
+ and os.environ.get('TERM') != 'dumb' \
+ and not (sys.platform.startswith('java') and os._name == 'nt')
+
+class TerminalWriter(object):
+ _esctable = dict(black=30, red=31, green=32, yellow=33,
+ blue=34, purple=35, cyan=36, white=37,
+ Black=40, Red=41, Green=42, Yellow=43,
+ Blue=44, Purple=45, Cyan=46, White=47,
+ bold=1, light=2, blink=5, invert=7)
+
+ # XXX deprecate stringio argument
+ def __init__(self, file=None, stringio=False, encoding=None):
+ if file is None:
+ if stringio:
+ self.stringio = file = py.io.TextIO()
+ else:
+ from sys import stdout as file
+ elif py.builtin.callable(file) and not (
+ hasattr(file, "write") and hasattr(file, "flush")):
+ file = WriteFile(file, encoding=encoding)
+ if hasattr(file, "isatty") and file.isatty() and colorama:
+ file = colorama.AnsiToWin32(file).stream
+ self.encoding = encoding or getattr(file, 'encoding', "utf-8")
+ self._file = file
+ self.hasmarkup = should_do_markup(file)
+ self._lastlen = 0
+ self._chars_on_current_line = 0
+ self._width_of_current_line = 0
+
+ @property
+ def fullwidth(self):
+ if hasattr(self, '_terminal_width'):
+ return self._terminal_width
+ return get_terminal_width()
+
+ @fullwidth.setter
+ def fullwidth(self, value):
+ self._terminal_width = value
+
+ @property
+ def chars_on_current_line(self):
+ """Return the number of characters written so far in the current line.
+
+ Please note that this count does not produce correct results after a reline() call,
+ see #164.
+
+ .. versionadded:: 1.5.0
+
+ :rtype: int
+ """
+ return self._chars_on_current_line
+
+ @property
+ def width_of_current_line(self):
+ """Return an estimate of the width so far in the current line.
+
+ .. versionadded:: 1.6.0
+
+ :rtype: int
+ """
+ return self._width_of_current_line
+
+ def _escaped(self, text, esc):
+ if esc and self.hasmarkup:
+ text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
+ text +'\x1b[0m')
+ return text
+
+ def markup(self, text, **kw):
+ esc = []
+ for name in kw:
+ if name not in self._esctable:
+ raise ValueError("unknown markup: %r" %(name,))
+ if kw[name]:
+ esc.append(self._esctable[name])
+ return self._escaped(text, tuple(esc))
+
+ def sep(self, sepchar, title=None, fullwidth=None, **kw):
+ if fullwidth is None:
+ fullwidth = self.fullwidth
+ # the goal is to have the line be as long as possible
+ # under the condition that len(line) <= fullwidth
+ if sys.platform == "win32":
+ # if we print in the last column on windows we are on a
+ # new line but there is no way to verify/neutralize this
+ # (we may not know the exact line width)
+ # so let's be defensive to avoid empty lines in the output
+ fullwidth -= 1
+ if title is not None:
+ # we want 2 + 2*len(fill) + len(title) <= fullwidth
+ # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
+ # 2*len(sepchar)*N <= fullwidth - len(title) - 2
+ # N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
+ N = max((fullwidth - len(title) - 2) // (2*len(sepchar)), 1)
+ fill = sepchar * N
+ line = "%s %s %s" % (fill, title, fill)
+ else:
+ # we want len(sepchar)*N <= fullwidth
+ # i.e. N <= fullwidth // len(sepchar)
+ line = sepchar * (fullwidth // len(sepchar))
+ # in some situations there is room for an extra sepchar at the right,
+ # in particular if we consider that with a sepchar like "_ " the
+ # trailing space is not important at the end of the line
+ if len(line) + len(sepchar.rstrip()) <= fullwidth:
+ line += sepchar.rstrip()
+
+ self.line(line, **kw)
+
+ def write(self, msg, **kw):
+ if msg:
+ if not isinstance(msg, (bytes, text)):
+ msg = text(msg)
+
+ self._update_chars_on_current_line(msg)
+
+ if self.hasmarkup and kw:
+ markupmsg = self.markup(msg, **kw)
+ else:
+ markupmsg = msg
+ write_out(self._file, markupmsg)
+
+ def _update_chars_on_current_line(self, text_or_bytes):
+ newline = b'\n' if isinstance(text_or_bytes, bytes) else '\n'
+ current_line = text_or_bytes.rsplit(newline, 1)[-1]
+ if isinstance(current_line, bytes):
+ current_line = current_line.decode('utf-8', errors='replace')
+ if newline in text_or_bytes:
+ self._chars_on_current_line = len(current_line)
+ self._width_of_current_line = get_line_width(current_line)
+ else:
+ self._chars_on_current_line += len(current_line)
+ self._width_of_current_line += get_line_width(current_line)
+
+ def line(self, s='', **kw):
+ self.write(s, **kw)
+ self._checkfill(s)
+ self.write('\n')
+
+ def reline(self, line, **kw):
+ if not self.hasmarkup:
+ raise ValueError("cannot use rewrite-line without terminal")
+ self.write(line, **kw)
+ self._checkfill(line)
+ self.write('\r')
+ self._lastlen = len(line)
+
+ def _checkfill(self, line):
+ diff2last = self._lastlen - len(line)
+ if diff2last > 0:
+ self.write(" " * diff2last)
+
+class Win32ConsoleWriter(TerminalWriter):
+ def write(self, msg, **kw):
+ if msg:
+ if not isinstance(msg, (bytes, text)):
+ msg = text(msg)
+
+ self._update_chars_on_current_line(msg)
+
+ oldcolors = None
+ if self.hasmarkup and kw:
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ oldcolors = GetConsoleInfo(handle).wAttributes
+ default_bg = oldcolors & 0x00F0
+ attr = default_bg
+ if kw.pop('bold', False):
+ attr |= FOREGROUND_INTENSITY
+
+ if kw.pop('red', False):
+ attr |= FOREGROUND_RED
+ elif kw.pop('blue', False):
+ attr |= FOREGROUND_BLUE
+ elif kw.pop('green', False):
+ attr |= FOREGROUND_GREEN
+ elif kw.pop('yellow', False):
+ attr |= FOREGROUND_GREEN|FOREGROUND_RED
+ else:
+ attr |= oldcolors & 0x0007
+
+ SetConsoleTextAttribute(handle, attr)
+ write_out(self._file, msg)
+ if oldcolors:
+ SetConsoleTextAttribute(handle, oldcolors)
+
+class WriteFile(object):
+ def __init__(self, writemethod, encoding=None):
+ self.encoding = encoding
+ self._writemethod = writemethod
+
+ def write(self, data):
+ if self.encoding:
+ data = data.encode(self.encoding, "replace")
+ self._writemethod(data)
+
+ def flush(self):
+ return
+
+
+if win32_and_ctypes:
+ TerminalWriter = Win32ConsoleWriter
+ import ctypes
+ from ctypes import wintypes
+
+ # ctypes access to the Windows console
+ STD_OUTPUT_HANDLE = -11
+ STD_ERROR_HANDLE = -12
+ FOREGROUND_BLACK = 0x0000 # black text
+ FOREGROUND_BLUE = 0x0001 # text color contains blue.
+ FOREGROUND_GREEN = 0x0002 # text color contains green.
+ FOREGROUND_RED = 0x0004 # text color contains red.
+ FOREGROUND_WHITE = 0x0007
+ FOREGROUND_INTENSITY = 0x0008 # text color is intensified.
+ BACKGROUND_BLACK = 0x0000 # background color black
+ BACKGROUND_BLUE = 0x0010 # background color contains blue.
+ BACKGROUND_GREEN = 0x0020 # background color contains green.
+ BACKGROUND_RED = 0x0040 # background color contains red.
+ BACKGROUND_WHITE = 0x0070
+ BACKGROUND_INTENSITY = 0x0080 # background color is intensified.
+
+ SHORT = ctypes.c_short
+ class COORD(ctypes.Structure):
+ _fields_ = [('X', SHORT),
+ ('Y', SHORT)]
+ class SMALL_RECT(ctypes.Structure):
+ _fields_ = [('Left', SHORT),
+ ('Top', SHORT),
+ ('Right', SHORT),
+ ('Bottom', SHORT)]
+ class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
+ _fields_ = [('dwSize', COORD),
+ ('dwCursorPosition', COORD),
+ ('wAttributes', wintypes.WORD),
+ ('srWindow', SMALL_RECT),
+ ('dwMaximumWindowSize', COORD)]
+
+ _GetStdHandle = ctypes.windll.kernel32.GetStdHandle
+ _GetStdHandle.argtypes = [wintypes.DWORD]
+ _GetStdHandle.restype = wintypes.HANDLE
+ def GetStdHandle(kind):
+ return _GetStdHandle(kind)
+
+ SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
+ SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
+ SetConsoleTextAttribute.restype = wintypes.BOOL
+
+ _GetConsoleScreenBufferInfo = \
+ ctypes.windll.kernel32.GetConsoleScreenBufferInfo
+ _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
+ ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
+ _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
+ def GetConsoleInfo(handle):
+ info = CONSOLE_SCREEN_BUFFER_INFO()
+ _GetConsoleScreenBufferInfo(handle, ctypes.byref(info))
+ return info
+
+ def _getdimensions():
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ info = GetConsoleInfo(handle)
+ # Substract one from the width, otherwise the cursor wraps
+ # and the ending \n causes an empty line to display.
+ return info.dwSize.Y, info.dwSize.X - 1
+
+def write_out(fil, msg):
+ # XXX sometimes "msg" is of type bytes, sometimes text which
+ # complicates the situation. Should we try to enforce unicode?
+ try:
+ # on py27 and above writing out to sys.stdout with an encoding
+ # should usually work for unicode messages (if the encoding is
+ # capable of it)
+ fil.write(msg)
+ except UnicodeEncodeError:
+ # on py26 it might not work because stdout expects bytes
+ if fil.encoding:
+ try:
+ fil.write(msg.encode(fil.encoding))
+ except UnicodeEncodeError:
+ # it might still fail if the encoding is not capable
+ pass
+ else:
+ fil.flush()
+ return
+ # fallback: escape all unicode characters
+ msg = msg.encode("unicode-escape").decode("ascii")
+ fil.write(msg)
+ fil.flush()
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_log/__init__.py b/testing/web-platform/tests/tools/third_party/py/py/_log/__init__.py
new file mode 100644
index 0000000000..fad62e960d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_log/__init__.py
@@ -0,0 +1,2 @@
+""" logging API ('producers' and 'consumers' connected via keywords) """
+
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_log/log.py b/testing/web-platform/tests/tools/third_party/py/py/_log/log.py
new file mode 100644
index 0000000000..56969bcb58
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_log/log.py
@@ -0,0 +1,206 @@
+"""
+basic logging functionality based on a producer/consumer scheme.
+
+XXX implement this API: (maybe put it into slogger.py?)
+
+ log = Logger(
+ info=py.log.STDOUT,
+ debug=py.log.STDOUT,
+ command=None)
+ log.info("hello", "world")
+ log.command("hello", "world")
+
+ log = Logger(info=Logger(something=...),
+ debug=py.log.STDOUT,
+ command=None)
+"""
+import py
+import sys
+
+
+class Message(object):
+ def __init__(self, keywords, args):
+ self.keywords = keywords
+ self.args = args
+
+ def content(self):
+ return " ".join(map(str, self.args))
+
+ def prefix(self):
+ return "[%s] " % (":".join(self.keywords))
+
+ def __str__(self):
+ return self.prefix() + self.content()
+
+
+class Producer(object):
+ """ (deprecated) Log producer API which sends messages to be logged
+ to a 'consumer' object, which then prints them to stdout,
+ stderr, files, etc. Used extensively by PyPy-1.1.
+ """
+
+ Message = Message # to allow later customization
+ keywords2consumer = {}
+
+ def __init__(self, keywords, keywordmapper=None, **kw):
+ if hasattr(keywords, 'split'):
+ keywords = tuple(keywords.split())
+ self._keywords = keywords
+ if keywordmapper is None:
+ keywordmapper = default_keywordmapper
+ self._keywordmapper = keywordmapper
+
+ def __repr__(self):
+ return "<py.log.Producer %s>" % ":".join(self._keywords)
+
+ def __getattr__(self, name):
+ if '_' in name:
+ raise AttributeError(name)
+ producer = self.__class__(self._keywords + (name,))
+ setattr(self, name, producer)
+ return producer
+
+ def __call__(self, *args):
+ """ write a message to the appropriate consumer(s) """
+ func = self._keywordmapper.getconsumer(self._keywords)
+ if func is not None:
+ func(self.Message(self._keywords, args))
+
+class KeywordMapper:
+ def __init__(self):
+ self.keywords2consumer = {}
+
+ def getstate(self):
+ return self.keywords2consumer.copy()
+
+ def setstate(self, state):
+ self.keywords2consumer.clear()
+ self.keywords2consumer.update(state)
+
+ def getconsumer(self, keywords):
+ """ return a consumer matching the given keywords.
+
+ tries to find the most suitable consumer by walking, starting from
+ the back, the list of keywords, the first consumer matching a
+ keyword is returned (falling back to py.log.default)
+ """
+ for i in range(len(keywords), 0, -1):
+ try:
+ return self.keywords2consumer[keywords[:i]]
+ except KeyError:
+ continue
+ return self.keywords2consumer.get('default', default_consumer)
+
+ def setconsumer(self, keywords, consumer):
+ """ set a consumer for a set of keywords. """
+ # normalize to tuples
+ if isinstance(keywords, str):
+ keywords = tuple(filter(None, keywords.split()))
+ elif hasattr(keywords, '_keywords'):
+ keywords = keywords._keywords
+ elif not isinstance(keywords, tuple):
+ raise TypeError("key %r is not a string or tuple" % (keywords,))
+ if consumer is not None and not py.builtin.callable(consumer):
+ if not hasattr(consumer, 'write'):
+ raise TypeError(
+ "%r should be None, callable or file-like" % (consumer,))
+ consumer = File(consumer)
+ self.keywords2consumer[keywords] = consumer
+
+
+def default_consumer(msg):
+ """ the default consumer, prints the message to stdout (using 'print') """
+ sys.stderr.write(str(msg)+"\n")
+
+default_keywordmapper = KeywordMapper()
+
+
+def setconsumer(keywords, consumer):
+ default_keywordmapper.setconsumer(keywords, consumer)
+
+
+def setstate(state):
+ default_keywordmapper.setstate(state)
+
+
+def getstate():
+ return default_keywordmapper.getstate()
+
+#
+# Consumers
+#
+
+
+class File(object):
+ """ log consumer wrapping a file(-like) object """
+ def __init__(self, f):
+ assert hasattr(f, 'write')
+ # assert isinstance(f, file) or not hasattr(f, 'open')
+ self._file = f
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ self._file.write(str(msg) + "\n")
+ if hasattr(self._file, 'flush'):
+ self._file.flush()
+
+
+class Path(object):
+ """ log consumer that opens and writes to a Path """
+ def __init__(self, filename, append=False,
+ delayed_create=False, buffering=False):
+ self._append = append
+ self._filename = str(filename)
+ self._buffering = buffering
+ if not delayed_create:
+ self._openfile()
+
+ def _openfile(self):
+ mode = self._append and 'a' or 'w'
+ f = open(self._filename, mode)
+ self._file = f
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ if not hasattr(self, "_file"):
+ self._openfile()
+ self._file.write(str(msg) + "\n")
+ if not self._buffering:
+ self._file.flush()
+
+
+def STDOUT(msg):
+ """ consumer that writes to sys.stdout """
+ sys.stdout.write(str(msg)+"\n")
+
+
+def STDERR(msg):
+ """ consumer that writes to sys.stderr """
+ sys.stderr.write(str(msg)+"\n")
+
+
+class Syslog:
+ """ consumer that writes to the syslog daemon """
+
+ def __init__(self, priority=None):
+ if priority is None:
+ priority = self.LOG_INFO
+ self.priority = priority
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ import syslog
+ syslog.syslog(self.priority, str(msg))
+
+
+try:
+ import syslog
+except ImportError:
+ pass
+else:
+ for _prio in "EMERG ALERT CRIT ERR WARNING NOTICE INFO DEBUG".split():
+ _prio = "LOG_" + _prio
+ try:
+ setattr(Syslog, _prio, getattr(syslog, _prio))
+ except AttributeError:
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_log/warning.py b/testing/web-platform/tests/tools/third_party/py/py/_log/warning.py
new file mode 100644
index 0000000000..6ef20d98a2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_log/warning.py
@@ -0,0 +1,79 @@
+import py, sys
+
+class DeprecationWarning(DeprecationWarning):
+ def __init__(self, msg, path, lineno):
+ self.msg = msg
+ self.path = path
+ self.lineno = lineno
+ def __repr__(self):
+ return "%s:%d: %s" %(self.path, self.lineno+1, self.msg)
+ def __str__(self):
+ return self.msg
+
+def _apiwarn(startversion, msg, stacklevel=2, function=None):
+ # below is mostly COPIED from python2.4/warnings.py's def warn()
+ # Get context information
+ if isinstance(stacklevel, str):
+ frame = sys._getframe(1)
+ level = 1
+ found = frame.f_code.co_filename.find(stacklevel) != -1
+ while frame:
+ co = frame.f_code
+ if co.co_filename.find(stacklevel) == -1:
+ if found:
+ stacklevel = level
+ break
+ else:
+ found = True
+ level += 1
+ frame = frame.f_back
+ else:
+ stacklevel = 1
+ msg = "%s (since version %s)" %(msg, startversion)
+ warn(msg, stacklevel=stacklevel+1, function=function)
+
+
+def warn(msg, stacklevel=1, function=None):
+ if function is not None:
+ import inspect
+ filename = inspect.getfile(function)
+ lineno = py.code.getrawcode(function).co_firstlineno
+ else:
+ try:
+ caller = sys._getframe(stacklevel)
+ except ValueError:
+ globals = sys.__dict__
+ lineno = 1
+ else:
+ globals = caller.f_globals
+ lineno = caller.f_lineno
+ if '__name__' in globals:
+ module = globals['__name__']
+ else:
+ module = "<string>"
+ filename = globals.get('__file__')
+ if filename:
+ fnl = filename.lower()
+ if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
+ filename = filename[:-1]
+ elif fnl.endswith("$py.class"):
+ filename = filename.replace('$py.class', '.py')
+ else:
+ if module == "__main__":
+ try:
+ filename = sys.argv[0]
+ except AttributeError:
+ # embedded interpreters don't have sys.argv, see bug #839151
+ filename = '__main__'
+ if not filename:
+ filename = module
+ path = py.path.local(filename)
+ warning = DeprecationWarning(msg, path, lineno)
+ import warnings
+ warnings.warn_explicit(warning, category=Warning,
+ filename=str(warning.path),
+ lineno=warning.lineno,
+ registry=warnings.__dict__.setdefault(
+ "__warningsregistry__", {})
+ )
+
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_path/__init__.py b/testing/web-platform/tests/tools/third_party/py/py/_path/__init__.py
new file mode 100644
index 0000000000..51f3246f80
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_path/__init__.py
@@ -0,0 +1 @@
+""" unified file system api """
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_path/cacheutil.py b/testing/web-platform/tests/tools/third_party/py/py/_path/cacheutil.py
new file mode 100644
index 0000000000..9922504750
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_path/cacheutil.py
@@ -0,0 +1,114 @@
+"""
+This module contains multithread-safe cache implementations.
+
+All Caches have
+
+ getorbuild(key, builder)
+ delentry(key)
+
+methods and allow configuration when instantiating the cache class.
+"""
+from time import time as gettime
+
+class BasicCache(object):
+ def __init__(self, maxentries=128):
+ self.maxentries = maxentries
+ self.prunenum = int(maxentries - maxentries/8)
+ self._dict = {}
+
+ def clear(self):
+ self._dict.clear()
+
+ def _getentry(self, key):
+ return self._dict[key]
+
+ def _putentry(self, key, entry):
+ self._prunelowestweight()
+ self._dict[key] = entry
+
+ def delentry(self, key, raising=False):
+ try:
+ del self._dict[key]
+ except KeyError:
+ if raising:
+ raise
+
+ def getorbuild(self, key, builder):
+ try:
+ entry = self._getentry(key)
+ except KeyError:
+ entry = self._build(key, builder)
+ self._putentry(key, entry)
+ return entry.value
+
+ def _prunelowestweight(self):
+ """ prune out entries with lowest weight. """
+ numentries = len(self._dict)
+ if numentries >= self.maxentries:
+ # evict according to entry's weight
+ items = [(entry.weight, key)
+ for key, entry in self._dict.items()]
+ items.sort()
+ index = numentries - self.prunenum
+ if index > 0:
+ for weight, key in items[:index]:
+ # in MT situations the element might be gone
+ self.delentry(key, raising=False)
+
+class BuildcostAccessCache(BasicCache):
+ """ A BuildTime/Access-counting cache implementation.
+ the weight of a value is computed as the product of
+
+ num-accesses-of-a-value * time-to-build-the-value
+
+ The values with the least such weights are evicted
+ if the cache maxentries threshold is superceded.
+ For implementation flexibility more than one object
+ might be evicted at a time.
+ """
+ # time function to use for measuring build-times
+
+ def _build(self, key, builder):
+ start = gettime()
+ val = builder()
+ end = gettime()
+ return WeightedCountingEntry(val, end-start)
+
+
+class WeightedCountingEntry(object):
+ def __init__(self, value, oneweight):
+ self._value = value
+ self.weight = self._oneweight = oneweight
+
+ def value(self):
+ self.weight += self._oneweight
+ return self._value
+ value = property(value)
+
+class AgingCache(BasicCache):
+ """ This cache prunes out cache entries that are too old.
+ """
+ def __init__(self, maxentries=128, maxseconds=10.0):
+ super(AgingCache, self).__init__(maxentries)
+ self.maxseconds = maxseconds
+
+ def _getentry(self, key):
+ entry = self._dict[key]
+ if entry.isexpired():
+ self.delentry(key)
+ raise KeyError(key)
+ return entry
+
+ def _build(self, key, builder):
+ val = builder()
+ entry = AgingEntry(val, gettime() + self.maxseconds)
+ return entry
+
+class AgingEntry(object):
+ def __init__(self, value, expirationtime):
+ self.value = value
+ self.weight = expirationtime
+
+ def isexpired(self):
+ t = gettime()
+ return t >= self.weight
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_path/common.py b/testing/web-platform/tests/tools/third_party/py/py/_path/common.py
new file mode 100644
index 0000000000..2364e5fef5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_path/common.py
@@ -0,0 +1,459 @@
+"""
+"""
+import warnings
+import os
+import sys
+import posixpath
+import fnmatch
+import py
+
+# Moved from local.py.
+iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt')
+
+try:
+ # FileNotFoundError might happen in py34, and is not available with py27.
+ import_errors = (ImportError, FileNotFoundError)
+except NameError:
+ import_errors = (ImportError,)
+
+try:
+ from os import fspath
+except ImportError:
+ def fspath(path):
+ """
+ Return the string representation of the path.
+ If str or bytes is passed in, it is returned unchanged.
+ This code comes from PEP 519, modified to support earlier versions of
+ python.
+
+ This is required for python < 3.6.
+ """
+ if isinstance(path, (py.builtin.text, py.builtin.bytes)):
+ return path
+
+ # Work from the object's type to match method resolution of other magic
+ # methods.
+ path_type = type(path)
+ try:
+ return path_type.__fspath__(path)
+ except AttributeError:
+ if hasattr(path_type, '__fspath__'):
+ raise
+ try:
+ import pathlib
+ except import_errors:
+ pass
+ else:
+ if isinstance(path, pathlib.PurePath):
+ return py.builtin.text(path)
+
+ raise TypeError("expected str, bytes or os.PathLike object, not "
+ + path_type.__name__)
+
+class Checkers:
+ _depend_on_existence = 'exists', 'link', 'dir', 'file'
+
+ def __init__(self, path):
+ self.path = path
+
+ def dir(self):
+ raise NotImplementedError
+
+ def file(self):
+ raise NotImplementedError
+
+ def dotfile(self):
+ return self.path.basename.startswith('.')
+
+ def ext(self, arg):
+ if not arg.startswith('.'):
+ arg = '.' + arg
+ return self.path.ext == arg
+
+ def exists(self):
+ raise NotImplementedError
+
+ def basename(self, arg):
+ return self.path.basename == arg
+
+ def basestarts(self, arg):
+ return self.path.basename.startswith(arg)
+
+ def relto(self, arg):
+ return self.path.relto(arg)
+
+ def fnmatch(self, arg):
+ return self.path.fnmatch(arg)
+
+ def endswith(self, arg):
+ return str(self.path).endswith(arg)
+
+ def _evaluate(self, kw):
+ for name, value in kw.items():
+ invert = False
+ meth = None
+ try:
+ meth = getattr(self, name)
+ except AttributeError:
+ if name[:3] == 'not':
+ invert = True
+ try:
+ meth = getattr(self, name[3:])
+ except AttributeError:
+ pass
+ if meth is None:
+ raise TypeError(
+ "no %r checker available for %r" % (name, self.path))
+ try:
+ if py.code.getrawcode(meth).co_argcount > 1:
+ if (not meth(value)) ^ invert:
+ return False
+ else:
+ if bool(value) ^ bool(meth()) ^ invert:
+ return False
+ except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
+ # EBUSY feels not entirely correct,
+ # but its kind of necessary since ENOMEDIUM
+ # is not accessible in python
+ for name in self._depend_on_existence:
+ if name in kw:
+ if kw.get(name):
+ return False
+ name = 'not' + name
+ if name in kw:
+ if not kw.get(name):
+ return False
+ return True
+
+class NeverRaised(Exception):
+ pass
+
+class PathBase(object):
+ """ shared implementation for filesystem path objects."""
+ Checkers = Checkers
+
+ def __div__(self, other):
+ return self.join(fspath(other))
+ __truediv__ = __div__ # py3k
+
+ def basename(self):
+ """ basename part of path. """
+ return self._getbyspec('basename')[0]
+ basename = property(basename, None, None, basename.__doc__)
+
+ def dirname(self):
+ """ dirname part of path. """
+ return self._getbyspec('dirname')[0]
+ dirname = property(dirname, None, None, dirname.__doc__)
+
+ def purebasename(self):
+ """ pure base name of the path."""
+ return self._getbyspec('purebasename')[0]
+ purebasename = property(purebasename, None, None, purebasename.__doc__)
+
+ def ext(self):
+ """ extension of the path (including the '.')."""
+ return self._getbyspec('ext')[0]
+ ext = property(ext, None, None, ext.__doc__)
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path joined with any given path arguments. """
+ return self.new(basename='').join(*args, **kwargs)
+
+ def read_binary(self):
+ """ read and return a bytestring from reading the path. """
+ with self.open('rb') as f:
+ return f.read()
+
+ def read_text(self, encoding):
+ """ read and return a Unicode string from reading the path. """
+ with self.open("r", encoding=encoding) as f:
+ return f.read()
+
+
+ def read(self, mode='r'):
+ """ read and return a bytestring from reading the path. """
+ with self.open(mode) as f:
+ return f.read()
+
+ def readlines(self, cr=1):
+ """ read and return a list of lines from the path. if cr is False, the
+newline will be removed from the end of each line. """
+ if sys.version_info < (3, ):
+ mode = 'rU'
+ else: # python 3 deprecates mode "U" in favor of "newline" option
+ mode = 'r'
+
+ if not cr:
+ content = self.read(mode)
+ return content.split('\n')
+ else:
+ f = self.open(mode)
+ try:
+ return f.readlines()
+ finally:
+ f.close()
+
+ def load(self):
+ """ (deprecated) return object unpickled from self.read() """
+ f = self.open('rb')
+ try:
+ import pickle
+ return py.error.checked_call(pickle.load, f)
+ finally:
+ f.close()
+
+ def move(self, target):
+ """ move this path to target. """
+ if target.relto(self):
+ raise py.error.EINVAL(
+ target,
+ "cannot move path into a subdirectory of itself")
+ try:
+ self.rename(target)
+ except py.error.EXDEV: # invalid cross-device link
+ self.copy(target)
+ self.remove()
+
+ def __repr__(self):
+ """ return a string representation of this path. """
+ return repr(str(self))
+
+ def check(self, **kw):
+ """ check a path for existence and properties.
+
+ Without arguments, return True if the path exists, otherwise False.
+
+ valid checkers::
+
+ file=1 # is a file
+ file=0 # is not a file (may not even exist)
+ dir=1 # is a dir
+ link=1 # is a link
+ exists=1 # exists
+
+ You can specify multiple checker definitions, for example::
+
+ path.check(file=1, link=1) # a link pointing to a file
+ """
+ if not kw:
+ kw = {'exists': 1}
+ return self.Checkers(self)._evaluate(kw)
+
+ def fnmatch(self, pattern):
+ """return true if the basename/fullname matches the glob-'pattern'.
+
+ valid pattern characters::
+
+ * matches everything
+ ? matches any single character
+ [seq] matches any character in seq
+ [!seq] matches any char not in seq
+
+ If the pattern contains a path-separator then the full path
+ is used for pattern matching and a '*' is prepended to the
+ pattern.
+
+ if the pattern doesn't contain a path-separator the pattern
+ is only matched against the basename.
+ """
+ return FNMatcher(pattern)(self)
+
+ def relto(self, relpath):
+ """ return a string which is the relative part of the path
+ to the given 'relpath'.
+ """
+ if not isinstance(relpath, (str, PathBase)):
+ raise TypeError("%r: not a string or path object" %(relpath,))
+ strrelpath = str(relpath)
+ if strrelpath and strrelpath[-1] != self.sep:
+ strrelpath += self.sep
+ #assert strrelpath[-1] == self.sep
+ #assert strrelpath[-2] != self.sep
+ strself = self.strpath
+ if sys.platform == "win32" or getattr(os, '_name', None) == 'nt':
+ if os.path.normcase(strself).startswith(
+ os.path.normcase(strrelpath)):
+ return strself[len(strrelpath):]
+ elif strself.startswith(strrelpath):
+ return strself[len(strrelpath):]
+ return ""
+
+ def ensure_dir(self, *args):
+ """ ensure the path joined with args is a directory. """
+ return self.ensure(*args, **{"dir": True})
+
+ def bestrelpath(self, dest):
+ """ return a string which is a relative path from self
+ (assumed to be a directory) to dest such that
+ self.join(bestrelpath) == dest and if not such
+ path can be determined return dest.
+ """
+ try:
+ if self == dest:
+ return os.curdir
+ base = self.common(dest)
+ if not base: # can be the case on windows
+ return str(dest)
+ self2base = self.relto(base)
+ reldest = dest.relto(base)
+ if self2base:
+ n = self2base.count(self.sep) + 1
+ else:
+ n = 0
+ l = [os.pardir] * n
+ if reldest:
+ l.append(reldest)
+ target = dest.sep.join(l)
+ return target
+ except AttributeError:
+ return str(dest)
+
+ def exists(self):
+ return self.check()
+
+ def isdir(self):
+ return self.check(dir=1)
+
+ def isfile(self):
+ return self.check(file=1)
+
+ def parts(self, reverse=False):
+ """ return a root-first list of all ancestor directories
+ plus the path itself.
+ """
+ current = self
+ l = [self]
+ while 1:
+ last = current
+ current = current.dirpath()
+ if last == current:
+ break
+ l.append(current)
+ if not reverse:
+ l.reverse()
+ return l
+
+ def common(self, other):
+ """ return the common part shared with the other path
+ or None if there is no common part.
+ """
+ last = None
+ for x, y in zip(self.parts(), other.parts()):
+ if x != y:
+ return last
+ last = x
+ return last
+
+ def __add__(self, other):
+ """ return new path object with 'other' added to the basename"""
+ return self.new(basename=self.basename+str(other))
+
+ def __cmp__(self, other):
+ """ return sort value (-1, 0, +1). """
+ try:
+ return cmp(self.strpath, other.strpath)
+ except AttributeError:
+ return cmp(str(self), str(other)) # self.path, other.path)
+
+ def __lt__(self, other):
+ try:
+ return self.strpath < other.strpath
+ except AttributeError:
+ return str(self) < str(other)
+
+ def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False):
+ """ yields all paths below the current one
+
+ fil is a filter (glob pattern or callable), if not matching the
+ path will not be yielded, defaulting to None (everything is
+ returned)
+
+ rec is a filter (glob pattern or callable) that controls whether
+ a node is descended, defaulting to None
+
+ ignore is an Exception class that is ignoredwhen calling dirlist()
+ on any of the paths (by default, all exceptions are reported)
+
+ bf if True will cause a breadthfirst search instead of the
+ default depthfirst. Default: False
+
+ sort if True will sort entries within each directory level.
+ """
+ for x in Visitor(fil, rec, ignore, bf, sort).gen(self):
+ yield x
+
+ def _sortlist(self, res, sort):
+ if sort:
+ if hasattr(sort, '__call__'):
+ warnings.warn(DeprecationWarning(
+ "listdir(sort=callable) is deprecated and breaks on python3"
+ ), stacklevel=3)
+ res.sort(sort)
+ else:
+ res.sort()
+
+ def samefile(self, other):
+ """ return True if other refers to the same stat object as self. """
+ return self.strpath == str(other)
+
+ def __fspath__(self):
+ return self.strpath
+
+class Visitor:
+ def __init__(self, fil, rec, ignore, bf, sort):
+ if isinstance(fil, py.builtin._basestring):
+ fil = FNMatcher(fil)
+ if isinstance(rec, py.builtin._basestring):
+ self.rec = FNMatcher(rec)
+ elif not hasattr(rec, '__call__') and rec:
+ self.rec = lambda path: True
+ else:
+ self.rec = rec
+ self.fil = fil
+ self.ignore = ignore
+ self.breadthfirst = bf
+ self.optsort = sort and sorted or (lambda x: x)
+
+ def gen(self, path):
+ try:
+ entries = path.listdir()
+ except self.ignore:
+ return
+ rec = self.rec
+ dirs = self.optsort([p for p in entries
+ if p.check(dir=1) and (rec is None or rec(p))])
+ if not self.breadthfirst:
+ for subdir in dirs:
+ for p in self.gen(subdir):
+ yield p
+ for p in self.optsort(entries):
+ if self.fil is None or self.fil(p):
+ yield p
+ if self.breadthfirst:
+ for subdir in dirs:
+ for p in self.gen(subdir):
+ yield p
+
+class FNMatcher:
+ def __init__(self, pattern):
+ self.pattern = pattern
+
+ def __call__(self, path):
+ pattern = self.pattern
+
+ if (pattern.find(path.sep) == -1 and
+ iswin32 and
+ pattern.find(posixpath.sep) != -1):
+ # Running on Windows, the pattern has no Windows path separators,
+ # and the pattern has one or more Posix path separators. Replace
+ # the Posix path separators with the Windows path separator.
+ pattern = pattern.replace(posixpath.sep, path.sep)
+
+ if pattern.find(path.sep) == -1:
+ name = path.basename
+ else:
+ name = str(path) # path.strpath # XXX svn?
+ if not os.path.isabs(pattern):
+ pattern = '*' + path.sep + pattern
+ return fnmatch.fnmatch(name, pattern)
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_path/local.py b/testing/web-platform/tests/tools/third_party/py/py/_path/local.py
new file mode 100644
index 0000000000..1385a03987
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_path/local.py
@@ -0,0 +1,1030 @@
+"""
+local path implementation.
+"""
+from __future__ import with_statement
+
+from contextlib import contextmanager
+import sys, os, atexit, io, uuid
+import py
+from py._path import common
+from py._path.common import iswin32, fspath
+from stat import S_ISLNK, S_ISDIR, S_ISREG
+
+from os.path import abspath, normpath, isabs, exists, isdir, isfile, islink, dirname
+
+if sys.version_info > (3,0):
+ def map_as_list(func, iter):
+ return list(map(func, iter))
+else:
+ map_as_list = map
+
+ALLOW_IMPORTLIB_MODE = sys.version_info > (3,5)
+if ALLOW_IMPORTLIB_MODE:
+ import importlib
+
+
+class Stat(object):
+ def __getattr__(self, name):
+ return getattr(self._osstatresult, "st_" + name)
+
+ def __init__(self, path, osstatresult):
+ self.path = path
+ self._osstatresult = osstatresult
+
+ @property
+ def owner(self):
+ if iswin32:
+ raise NotImplementedError("XXX win32")
+ import pwd
+ entry = py.error.checked_call(pwd.getpwuid, self.uid)
+ return entry[0]
+
+ @property
+ def group(self):
+ """ return group name of file. """
+ if iswin32:
+ raise NotImplementedError("XXX win32")
+ import grp
+ entry = py.error.checked_call(grp.getgrgid, self.gid)
+ return entry[0]
+
+ def isdir(self):
+ return S_ISDIR(self._osstatresult.st_mode)
+
+ def isfile(self):
+ return S_ISREG(self._osstatresult.st_mode)
+
+ def islink(self):
+ st = self.path.lstat()
+ return S_ISLNK(self._osstatresult.st_mode)
+
+class PosixPath(common.PathBase):
+ def chown(self, user, group, rec=0):
+ """ change ownership to the given user and group.
+ user and group may be specified by a number or
+ by a name. if rec is True change ownership
+ recursively.
+ """
+ uid = getuserid(user)
+ gid = getgroupid(group)
+ if rec:
+ for x in self.visit(rec=lambda x: x.check(link=0)):
+ if x.check(link=0):
+ py.error.checked_call(os.chown, str(x), uid, gid)
+ py.error.checked_call(os.chown, str(self), uid, gid)
+
+ def readlink(self):
+ """ return value of a symbolic link. """
+ return py.error.checked_call(os.readlink, self.strpath)
+
+ def mklinkto(self, oldname):
+ """ posix style hard link to another name. """
+ py.error.checked_call(os.link, str(oldname), str(self))
+
+ def mksymlinkto(self, value, absolute=1):
+ """ create a symbolic link with the given value (pointing to another name). """
+ if absolute:
+ py.error.checked_call(os.symlink, str(value), self.strpath)
+ else:
+ base = self.common(value)
+ # with posix local paths '/' is always a common base
+ relsource = self.__class__(value).relto(base)
+ reldest = self.relto(base)
+ n = reldest.count(self.sep)
+ target = self.sep.join(('..', )*n + (relsource, ))
+ py.error.checked_call(os.symlink, target, self.strpath)
+
+def getuserid(user):
+ import pwd
+ if not isinstance(user, int):
+ user = pwd.getpwnam(user)[2]
+ return user
+
+def getgroupid(group):
+ import grp
+ if not isinstance(group, int):
+ group = grp.getgrnam(group)[2]
+ return group
+
+FSBase = not iswin32 and PosixPath or common.PathBase
+
+class LocalPath(FSBase):
+ """ object oriented interface to os.path and other local filesystem
+ related information.
+ """
+ class ImportMismatchError(ImportError):
+ """ raised on pyimport() if there is a mismatch of __file__'s"""
+
+ sep = os.sep
+ class Checkers(common.Checkers):
+ def _stat(self):
+ try:
+ return self._statcache
+ except AttributeError:
+ try:
+ self._statcache = self.path.stat()
+ except py.error.ELOOP:
+ self._statcache = self.path.lstat()
+ return self._statcache
+
+ def dir(self):
+ return S_ISDIR(self._stat().mode)
+
+ def file(self):
+ return S_ISREG(self._stat().mode)
+
+ def exists(self):
+ return self._stat()
+
+ def link(self):
+ st = self.path.lstat()
+ return S_ISLNK(st.mode)
+
+ def __init__(self, path=None, expanduser=False):
+ """ Initialize and return a local Path instance.
+
+ Path can be relative to the current directory.
+ If path is None it defaults to the current working directory.
+ If expanduser is True, tilde-expansion is performed.
+ Note that Path instances always carry an absolute path.
+ Note also that passing in a local path object will simply return
+ the exact same path object. Use new() to get a new copy.
+ """
+ if path is None:
+ self.strpath = py.error.checked_call(os.getcwd)
+ else:
+ try:
+ path = fspath(path)
+ except TypeError:
+ raise ValueError("can only pass None, Path instances "
+ "or non-empty strings to LocalPath")
+ if expanduser:
+ path = os.path.expanduser(path)
+ self.strpath = abspath(path)
+
+ def __hash__(self):
+ s = self.strpath
+ if iswin32:
+ s = s.lower()
+ return hash(s)
+
+ def __eq__(self, other):
+ s1 = fspath(self)
+ try:
+ s2 = fspath(other)
+ except TypeError:
+ return False
+ if iswin32:
+ s1 = s1.lower()
+ try:
+ s2 = s2.lower()
+ except AttributeError:
+ return False
+ return s1 == s2
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __lt__(self, other):
+ return fspath(self) < fspath(other)
+
+ def __gt__(self, other):
+ return fspath(self) > fspath(other)
+
+ def samefile(self, other):
+ """ return True if 'other' references the same file as 'self'.
+ """
+ other = fspath(other)
+ if not isabs(other):
+ other = abspath(other)
+ if self == other:
+ return True
+ if not hasattr(os.path, "samefile"):
+ return False
+ return py.error.checked_call(
+ os.path.samefile, self.strpath, other)
+
+ def remove(self, rec=1, ignore_errors=False):
+ """ remove a file or directory (or a directory tree if rec=1).
+ if ignore_errors is True, errors while removing directories will
+ be ignored.
+ """
+ if self.check(dir=1, link=0):
+ if rec:
+ # force remove of readonly files on windows
+ if iswin32:
+ self.chmod(0o700, rec=1)
+ import shutil
+ py.error.checked_call(
+ shutil.rmtree, self.strpath,
+ ignore_errors=ignore_errors)
+ else:
+ py.error.checked_call(os.rmdir, self.strpath)
+ else:
+ if iswin32:
+ self.chmod(0o700)
+ py.error.checked_call(os.remove, self.strpath)
+
+ def computehash(self, hashtype="md5", chunksize=524288):
+ """ return hexdigest of hashvalue for this file. """
+ try:
+ try:
+ import hashlib as mod
+ except ImportError:
+ if hashtype == "sha1":
+ hashtype = "sha"
+ mod = __import__(hashtype)
+ hash = getattr(mod, hashtype)()
+ except (AttributeError, ImportError):
+ raise ValueError("Don't know how to compute %r hash" %(hashtype,))
+ f = self.open('rb')
+ try:
+ while 1:
+ buf = f.read(chunksize)
+ if not buf:
+ return hash.hexdigest()
+ hash.update(buf)
+ finally:
+ f.close()
+
+ def new(self, **kw):
+ """ create a modified version of this path.
+ the following keyword arguments modify various path parts::
+
+ a:/some/path/to/a/file.ext
+ xx drive
+ xxxxxxxxxxxxxxxxx dirname
+ xxxxxxxx basename
+ xxxx purebasename
+ xxx ext
+ """
+ obj = object.__new__(self.__class__)
+ if not kw:
+ obj.strpath = self.strpath
+ return obj
+ drive, dirname, basename, purebasename,ext = self._getbyspec(
+ "drive,dirname,basename,purebasename,ext")
+ if 'basename' in kw:
+ if 'purebasename' in kw or 'ext' in kw:
+ raise ValueError("invalid specification %r" % kw)
+ else:
+ pb = kw.setdefault('purebasename', purebasename)
+ try:
+ ext = kw['ext']
+ except KeyError:
+ pass
+ else:
+ if ext and not ext.startswith('.'):
+ ext = '.' + ext
+ kw['basename'] = pb + ext
+
+ if ('dirname' in kw and not kw['dirname']):
+ kw['dirname'] = drive
+ else:
+ kw.setdefault('dirname', dirname)
+ kw.setdefault('sep', self.sep)
+ obj.strpath = normpath(
+ "%(dirname)s%(sep)s%(basename)s" % kw)
+ return obj
+
+ def _getbyspec(self, spec):
+ """ see new for what 'spec' can be. """
+ res = []
+ parts = self.strpath.split(self.sep)
+
+ args = filter(None, spec.split(',') )
+ append = res.append
+ for name in args:
+ if name == 'drive':
+ append(parts[0])
+ elif name == 'dirname':
+ append(self.sep.join(parts[:-1]))
+ else:
+ basename = parts[-1]
+ if name == 'basename':
+ append(basename)
+ else:
+ i = basename.rfind('.')
+ if i == -1:
+ purebasename, ext = basename, ''
+ else:
+ purebasename, ext = basename[:i], basename[i:]
+ if name == 'purebasename':
+ append(purebasename)
+ elif name == 'ext':
+ append(ext)
+ else:
+ raise ValueError("invalid part specification %r" % name)
+ return res
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path joined with any given path arguments. """
+ if not kwargs:
+ path = object.__new__(self.__class__)
+ path.strpath = dirname(self.strpath)
+ if args:
+ path = path.join(*args)
+ return path
+ return super(LocalPath, self).dirpath(*args, **kwargs)
+
+ def join(self, *args, **kwargs):
+ """ return a new path by appending all 'args' as path
+ components. if abs=1 is used restart from root if any
+ of the args is an absolute path.
+ """
+ sep = self.sep
+ strargs = [fspath(arg) for arg in args]
+ strpath = self.strpath
+ if kwargs.get('abs'):
+ newargs = []
+ for arg in reversed(strargs):
+ if isabs(arg):
+ strpath = arg
+ strargs = newargs
+ break
+ newargs.insert(0, arg)
+ # special case for when we have e.g. strpath == "/"
+ actual_sep = "" if strpath.endswith(sep) else sep
+ for arg in strargs:
+ arg = arg.strip(sep)
+ if iswin32:
+ # allow unix style paths even on windows.
+ arg = arg.strip('/')
+ arg = arg.replace('/', sep)
+ strpath = strpath + actual_sep + arg
+ actual_sep = sep
+ obj = object.__new__(self.__class__)
+ obj.strpath = normpath(strpath)
+ return obj
+
+ def open(self, mode='r', ensure=False, encoding=None):
+ """ return an opened file with the given mode.
+
+ If ensure is True, create parent directories if needed.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ if encoding:
+ return py.error.checked_call(io.open, self.strpath, mode, encoding=encoding)
+ return py.error.checked_call(open, self.strpath, mode)
+
+ def _fastjoin(self, name):
+ child = object.__new__(self.__class__)
+ child.strpath = self.strpath + self.sep + name
+ return child
+
+ def islink(self):
+ return islink(self.strpath)
+
+ def check(self, **kw):
+ if not kw:
+ return exists(self.strpath)
+ if len(kw) == 1:
+ if "dir" in kw:
+ return not kw["dir"] ^ isdir(self.strpath)
+ if "file" in kw:
+ return not kw["file"] ^ isfile(self.strpath)
+ return super(LocalPath, self).check(**kw)
+
+ _patternchars = set("*?[" + os.path.sep)
+ def listdir(self, fil=None, sort=None):
+ """ list directory contents, possibly filter by the given fil func
+ and possibly sorted.
+ """
+ if fil is None and sort is None:
+ names = py.error.checked_call(os.listdir, self.strpath)
+ return map_as_list(self._fastjoin, names)
+ if isinstance(fil, py.builtin._basestring):
+ if not self._patternchars.intersection(fil):
+ child = self._fastjoin(fil)
+ if exists(child.strpath):
+ return [child]
+ return []
+ fil = common.FNMatcher(fil)
+ names = py.error.checked_call(os.listdir, self.strpath)
+ res = []
+ for name in names:
+ child = self._fastjoin(name)
+ if fil is None or fil(child):
+ res.append(child)
+ self._sortlist(res, sort)
+ return res
+
+ def size(self):
+ """ return size of the underlying file object """
+ return self.stat().size
+
+ def mtime(self):
+ """ return last modification time of the path. """
+ return self.stat().mtime
+
+ def copy(self, target, mode=False, stat=False):
+ """ copy path to target.
+
+ If mode is True, will copy copy permission from path to target.
+ If stat is True, copy permission, last modification
+ time, last access time, and flags from path to target.
+ """
+ if self.check(file=1):
+ if target.check(dir=1):
+ target = target.join(self.basename)
+ assert self!=target
+ copychunked(self, target)
+ if mode:
+ copymode(self.strpath, target.strpath)
+ if stat:
+ copystat(self, target)
+ else:
+ def rec(p):
+ return p.check(link=0)
+ for x in self.visit(rec=rec):
+ relpath = x.relto(self)
+ newx = target.join(relpath)
+ newx.dirpath().ensure(dir=1)
+ if x.check(link=1):
+ newx.mksymlinkto(x.readlink())
+ continue
+ elif x.check(file=1):
+ copychunked(x, newx)
+ elif x.check(dir=1):
+ newx.ensure(dir=1)
+ if mode:
+ copymode(x.strpath, newx.strpath)
+ if stat:
+ copystat(x, newx)
+
+ def rename(self, target):
+ """ rename this path to target. """
+ target = fspath(target)
+ return py.error.checked_call(os.rename, self.strpath, target)
+
+ def dump(self, obj, bin=1):
+ """ pickle object into path location"""
+ f = self.open('wb')
+ import pickle
+ try:
+ py.error.checked_call(pickle.dump, obj, f, bin)
+ finally:
+ f.close()
+
+ def mkdir(self, *args):
+ """ create & return the directory joined with args. """
+ p = self.join(*args)
+ py.error.checked_call(os.mkdir, fspath(p))
+ return p
+
+ def write_binary(self, data, ensure=False):
+ """ write binary data into path. If ensure is True create
+ missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ with self.open('wb') as f:
+ f.write(data)
+
+ def write_text(self, data, encoding, ensure=False):
+ """ write text data into path using the specified encoding.
+ If ensure is True create missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ with self.open('w', encoding=encoding) as f:
+ f.write(data)
+
+ def write(self, data, mode='w', ensure=False):
+ """ write data into path. If ensure is True create
+ missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ if 'b' in mode:
+ if not py.builtin._isbytes(data):
+ raise ValueError("can only process bytes")
+ else:
+ if not py.builtin._istext(data):
+ if not py.builtin._isbytes(data):
+ data = str(data)
+ else:
+ data = py.builtin._totext(data, sys.getdefaultencoding())
+ f = self.open(mode)
+ try:
+ f.write(data)
+ finally:
+ f.close()
+
+ def _ensuredirs(self):
+ parent = self.dirpath()
+ if parent == self:
+ return self
+ if parent.check(dir=0):
+ parent._ensuredirs()
+ if self.check(dir=0):
+ try:
+ self.mkdir()
+ except py.error.EEXIST:
+ # race condition: file/dir created by another thread/process.
+ # complain if it is not a dir
+ if self.check(dir=0):
+ raise
+ return self
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). if you specify a keyword argument 'dir=True'
+ then the path is forced to be a directory path.
+ """
+ p = self.join(*args)
+ if kwargs.get('dir', 0):
+ return p._ensuredirs()
+ else:
+ p.dirpath()._ensuredirs()
+ if not p.check(file=1):
+ p.open('w').close()
+ return p
+
+ def stat(self, raising=True):
+ """ Return an os.stat() tuple. """
+ if raising == True:
+ return Stat(self, py.error.checked_call(os.stat, self.strpath))
+ try:
+ return Stat(self, os.stat(self.strpath))
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return None
+
+ def lstat(self):
+ """ Return an os.lstat() tuple. """
+ return Stat(self, py.error.checked_call(os.lstat, self.strpath))
+
+ def setmtime(self, mtime=None):
+ """ set modification time for the given path. if 'mtime' is None
+ (the default) then the file's mtime is set to current time.
+
+ Note that the resolution for 'mtime' is platform dependent.
+ """
+ if mtime is None:
+ return py.error.checked_call(os.utime, self.strpath, mtime)
+ try:
+ return py.error.checked_call(os.utime, self.strpath, (-1, mtime))
+ except py.error.EINVAL:
+ return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime))
+
+ def chdir(self):
+ """ change directory to self and return old current directory """
+ try:
+ old = self.__class__()
+ except py.error.ENOENT:
+ old = None
+ py.error.checked_call(os.chdir, self.strpath)
+ return old
+
+
+ @contextmanager
+ def as_cwd(self):
+ """
+ Return a context manager, which changes to the path's dir during the
+ managed "with" context.
+ On __enter__ it returns the old dir, which might be ``None``.
+ """
+ old = self.chdir()
+ try:
+ yield old
+ finally:
+ if old is not None:
+ old.chdir()
+
+ def realpath(self):
+ """ return a new path which contains no symbolic links."""
+ return self.__class__(os.path.realpath(self.strpath))
+
+ def atime(self):
+ """ return last access time of the path. """
+ return self.stat().atime
+
+ def __repr__(self):
+ return 'local(%r)' % self.strpath
+
+ def __str__(self):
+ """ return string representation of the Path. """
+ return self.strpath
+
+ def chmod(self, mode, rec=0):
+ """ change permissions to the given mode. If mode is an
+ integer it directly encodes the os-specific modes.
+ if rec is True perform recursively.
+ """
+ if not isinstance(mode, int):
+ raise TypeError("mode %r must be an integer" % (mode,))
+ if rec:
+ for x in self.visit(rec=rec):
+ py.error.checked_call(os.chmod, str(x), mode)
+ py.error.checked_call(os.chmod, self.strpath, mode)
+
+ def pypkgpath(self):
+ """ return the Python package path by looking for the last
+ directory upwards which still contains an __init__.py.
+ Return None if a pkgpath can not be determined.
+ """
+ pkgpath = None
+ for parent in self.parts(reverse=True):
+ if parent.isdir():
+ if not parent.join('__init__.py').exists():
+ break
+ if not isimportable(parent.basename):
+ break
+ pkgpath = parent
+ return pkgpath
+
+ def _ensuresyspath(self, ensuremode, path):
+ if ensuremode:
+ s = str(path)
+ if ensuremode == "append":
+ if s not in sys.path:
+ sys.path.append(s)
+ else:
+ if s != sys.path[0]:
+ sys.path.insert(0, s)
+
+ def pyimport(self, modname=None, ensuresyspath=True):
+ """ return path as an imported python module.
+
+ If modname is None, look for the containing package
+ and construct an according module name.
+ The module will be put/looked up in sys.modules.
+ if ensuresyspath is True then the root dir for importing
+ the file (taking __init__.py files into account) will
+ be prepended to sys.path if it isn't there already.
+ If ensuresyspath=="append" the root dir will be appended
+ if it isn't already contained in sys.path.
+ if ensuresyspath is False no modification of syspath happens.
+
+ Special value of ensuresyspath=="importlib" is intended
+ purely for using in pytest, it is capable only of importing
+ separate .py files outside packages, e.g. for test suite
+ without any __init__.py file. It effectively allows having
+ same-named test modules in different places and offers
+ mild opt-in via this option. Note that it works only in
+ recent versions of python.
+ """
+ if not self.check():
+ raise py.error.ENOENT(self)
+
+ if ensuresyspath == 'importlib':
+ if modname is None:
+ modname = self.purebasename
+ if not ALLOW_IMPORTLIB_MODE:
+ raise ImportError(
+ "Can't use importlib due to old version of Python")
+ spec = importlib.util.spec_from_file_location(
+ modname, str(self))
+ if spec is None:
+ raise ImportError(
+ "Can't find module %s at location %s" %
+ (modname, str(self))
+ )
+ mod = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(mod)
+ return mod
+
+ pkgpath = None
+ if modname is None:
+ pkgpath = self.pypkgpath()
+ if pkgpath is not None:
+ pkgroot = pkgpath.dirpath()
+ names = self.new(ext="").relto(pkgroot).split(self.sep)
+ if names[-1] == "__init__":
+ names.pop()
+ modname = ".".join(names)
+ else:
+ pkgroot = self.dirpath()
+ modname = self.purebasename
+
+ self._ensuresyspath(ensuresyspath, pkgroot)
+ __import__(modname)
+ mod = sys.modules[modname]
+ if self.basename == "__init__.py":
+ return mod # we don't check anything as we might
+ # be in a namespace package ... too icky to check
+ modfile = mod.__file__
+ if modfile[-4:] in ('.pyc', '.pyo'):
+ modfile = modfile[:-1]
+ elif modfile.endswith('$py.class'):
+ modfile = modfile[:-9] + '.py'
+ if modfile.endswith(os.path.sep + "__init__.py"):
+ if self.basename != "__init__.py":
+ modfile = modfile[:-12]
+ try:
+ issame = self.samefile(modfile)
+ except py.error.ENOENT:
+ issame = False
+ if not issame:
+ ignore = os.getenv('PY_IGNORE_IMPORTMISMATCH')
+ if ignore != '1':
+ raise self.ImportMismatchError(modname, modfile, self)
+ return mod
+ else:
+ try:
+ return sys.modules[modname]
+ except KeyError:
+ # we have a custom modname, do a pseudo-import
+ import types
+ mod = types.ModuleType(modname)
+ mod.__file__ = str(self)
+ sys.modules[modname] = mod
+ try:
+ py.builtin.execfile(str(self), mod.__dict__)
+ except:
+ del sys.modules[modname]
+ raise
+ return mod
+
+ def sysexec(self, *argv, **popen_opts):
+ """ return stdout text from executing a system child process,
+ where the 'self' path points to executable.
+ The process is directly invoked and not through a system shell.
+ """
+ from subprocess import Popen, PIPE
+ argv = map_as_list(str, argv)
+ popen_opts['stdout'] = popen_opts['stderr'] = PIPE
+ proc = Popen([str(self)] + argv, **popen_opts)
+ stdout, stderr = proc.communicate()
+ ret = proc.wait()
+ if py.builtin._isbytes(stdout):
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+ if ret != 0:
+ if py.builtin._isbytes(stderr):
+ stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
+ raise py.process.cmdexec.Error(ret, ret, str(self),
+ stdout, stderr,)
+ return stdout
+
+ def sysfind(cls, name, checker=None, paths=None):
+ """ return a path object found by looking at the systems
+ underlying PATH specification. If the checker is not None
+ it will be invoked to filter matching paths. If a binary
+ cannot be found, None is returned
+ Note: This is probably not working on plain win32 systems
+ but may work on cygwin.
+ """
+ if isabs(name):
+ p = py.path.local(name)
+ if p.check(file=1):
+ return p
+ else:
+ if paths is None:
+ if iswin32:
+ paths = os.environ['Path'].split(';')
+ if '' not in paths and '.' not in paths:
+ paths.append('.')
+ try:
+ systemroot = os.environ['SYSTEMROOT']
+ except KeyError:
+ pass
+ else:
+ paths = [path.replace('%SystemRoot%', systemroot)
+ for path in paths]
+ else:
+ paths = os.environ['PATH'].split(':')
+ tryadd = []
+ if iswin32:
+ tryadd += os.environ['PATHEXT'].split(os.pathsep)
+ tryadd.append("")
+
+ for x in paths:
+ for addext in tryadd:
+ p = py.path.local(x).join(name, abs=True) + addext
+ try:
+ if p.check(file=1):
+ if checker:
+ if not checker(p):
+ continue
+ return p
+ except py.error.EACCES:
+ pass
+ return None
+ sysfind = classmethod(sysfind)
+
+ def _gethomedir(cls):
+ try:
+ x = os.environ['HOME']
+ except KeyError:
+ try:
+ x = os.environ["HOMEDRIVE"] + os.environ['HOMEPATH']
+ except KeyError:
+ return None
+ return cls(x)
+ _gethomedir = classmethod(_gethomedir)
+
+ # """
+ # special class constructors for local filesystem paths
+ # """
+ @classmethod
+ def get_temproot(cls):
+ """ return the system's temporary directory
+ (where tempfiles are usually created in)
+ """
+ import tempfile
+ return py.path.local(tempfile.gettempdir())
+
+ @classmethod
+ def mkdtemp(cls, rootdir=None):
+ """ return a Path object pointing to a fresh new temporary directory
+ (which we created ourself).
+ """
+ import tempfile
+ if rootdir is None:
+ rootdir = cls.get_temproot()
+ return cls(py.error.checked_call(tempfile.mkdtemp, dir=str(rootdir)))
+
+ def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
+ lock_timeout=172800): # two days
+ """ return unique directory with a number greater than the current
+ maximum one. The number is assumed to start directly after prefix.
+ if keep is true directories with a number less than (maxnum-keep)
+ will be removed. If .lock files are used (lock_timeout non-zero),
+ algorithm is multi-process safe.
+ """
+ if rootdir is None:
+ rootdir = cls.get_temproot()
+
+ nprefix = prefix.lower()
+ def parse_num(path):
+ """ parse the number out of a path (if it matches the prefix) """
+ nbasename = path.basename.lower()
+ if nbasename.startswith(nprefix):
+ try:
+ return int(nbasename[len(nprefix):])
+ except ValueError:
+ pass
+
+ def create_lockfile(path):
+ """ exclusively create lockfile. Throws when failed """
+ mypid = os.getpid()
+ lockfile = path.join('.lock')
+ if hasattr(lockfile, 'mksymlinkto'):
+ lockfile.mksymlinkto(str(mypid))
+ else:
+ fd = py.error.checked_call(os.open, str(lockfile), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644)
+ with os.fdopen(fd, 'w') as f:
+ f.write(str(mypid))
+ return lockfile
+
+ def atexit_remove_lockfile(lockfile):
+ """ ensure lockfile is removed at process exit """
+ mypid = os.getpid()
+ def try_remove_lockfile():
+ # in a fork() situation, only the last process should
+ # remove the .lock, otherwise the other processes run the
+ # risk of seeing their temporary dir disappear. For now
+ # we remove the .lock in the parent only (i.e. we assume
+ # that the children finish before the parent).
+ if os.getpid() != mypid:
+ return
+ try:
+ lockfile.remove()
+ except py.error.Error:
+ pass
+ atexit.register(try_remove_lockfile)
+
+ # compute the maximum number currently in use with the prefix
+ lastmax = None
+ while True:
+ maxnum = -1
+ for path in rootdir.listdir():
+ num = parse_num(path)
+ if num is not None:
+ maxnum = max(maxnum, num)
+
+ # make the new directory
+ try:
+ udir = rootdir.mkdir(prefix + str(maxnum+1))
+ if lock_timeout:
+ lockfile = create_lockfile(udir)
+ atexit_remove_lockfile(lockfile)
+ except (py.error.EEXIST, py.error.ENOENT, py.error.EBUSY):
+ # race condition (1): another thread/process created the dir
+ # in the meantime - try again
+ # race condition (2): another thread/process spuriously acquired
+ # lock treating empty directory as candidate
+ # for removal - try again
+ # race condition (3): another thread/process tried to create the lock at
+ # the same time (happened in Python 3.3 on Windows)
+ # https://ci.appveyor.com/project/pytestbot/py/build/1.0.21/job/ffi85j4c0lqwsfwa
+ if lastmax == maxnum:
+ raise
+ lastmax = maxnum
+ continue
+ break
+
+ def get_mtime(path):
+ """ read file modification time """
+ try:
+ return path.lstat().mtime
+ except py.error.Error:
+ pass
+
+ garbage_prefix = prefix + 'garbage-'
+
+ def is_garbage(path):
+ """ check if path denotes directory scheduled for removal """
+ bn = path.basename
+ return bn.startswith(garbage_prefix)
+
+ # prune old directories
+ udir_time = get_mtime(udir)
+ if keep and udir_time:
+ for path in rootdir.listdir():
+ num = parse_num(path)
+ if num is not None and num <= (maxnum - keep):
+ try:
+ # try acquiring lock to remove directory as exclusive user
+ if lock_timeout:
+ create_lockfile(path)
+ except (py.error.EEXIST, py.error.ENOENT, py.error.EBUSY):
+ path_time = get_mtime(path)
+ if not path_time:
+ # assume directory doesn't exist now
+ continue
+ if abs(udir_time - path_time) < lock_timeout:
+ # assume directory with lockfile exists
+ # and lock timeout hasn't expired yet
+ continue
+
+ # path dir locked for exclusive use
+ # and scheduled for removal to avoid another thread/process
+ # treating it as a new directory or removal candidate
+ garbage_path = rootdir.join(garbage_prefix + str(uuid.uuid4()))
+ try:
+ path.rename(garbage_path)
+ garbage_path.remove(rec=1)
+ except KeyboardInterrupt:
+ raise
+ except: # this might be py.error.Error, WindowsError ...
+ pass
+ if is_garbage(path):
+ try:
+ path.remove(rec=1)
+ except KeyboardInterrupt:
+ raise
+ except: # this might be py.error.Error, WindowsError ...
+ pass
+
+ # make link...
+ try:
+ username = os.environ['USER'] #linux, et al
+ except KeyError:
+ try:
+ username = os.environ['USERNAME'] #windows
+ except KeyError:
+ username = 'current'
+
+ src = str(udir)
+ dest = src[:src.rfind('-')] + '-' + username
+ try:
+ os.unlink(dest)
+ except OSError:
+ pass
+ try:
+ os.symlink(src, dest)
+ except (OSError, AttributeError, NotImplementedError):
+ pass
+
+ return udir
+ make_numbered_dir = classmethod(make_numbered_dir)
+
+
+def copymode(src, dest):
+ """ copy permission from src to dst. """
+ import shutil
+ shutil.copymode(src, dest)
+
+
+def copystat(src, dest):
+ """ copy permission, last modification time,
+ last access time, and flags from src to dst."""
+ import shutil
+ shutil.copystat(str(src), str(dest))
+
+
+def copychunked(src, dest):
+ chunksize = 524288 # half a meg of bytes
+ fsrc = src.open('rb')
+ try:
+ fdest = dest.open('wb')
+ try:
+ while 1:
+ buf = fsrc.read(chunksize)
+ if not buf:
+ break
+ fdest.write(buf)
+ finally:
+ fdest.close()
+ finally:
+ fsrc.close()
+
+
+def isimportable(name):
+ if name and (name[0].isalpha() or name[0] == '_'):
+ name = name.replace("_", '')
+ return not name or name.isalnum()
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_path/svnurl.py b/testing/web-platform/tests/tools/third_party/py/py/_path/svnurl.py
new file mode 100644
index 0000000000..6589a71d09
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_path/svnurl.py
@@ -0,0 +1,380 @@
+"""
+module defining a subversion path object based on the external
+command 'svn'. This modules aims to work with svn 1.3 and higher
+but might also interact well with earlier versions.
+"""
+
+import os, sys, time, re
+import py
+from py import path, process
+from py._path import common
+from py._path import svnwc as svncommon
+from py._path.cacheutil import BuildcostAccessCache, AgingCache
+
+DEBUG=False
+
+class SvnCommandPath(svncommon.SvnPathBase):
+ """ path implementation that offers access to (possibly remote) subversion
+ repositories. """
+
+ _lsrevcache = BuildcostAccessCache(maxentries=128)
+ _lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0)
+
+ def __new__(cls, path, rev=None, auth=None):
+ self = object.__new__(cls)
+ if isinstance(path, cls):
+ rev = path.rev
+ auth = path.auth
+ path = path.strpath
+ svncommon.checkbadchars(path)
+ path = path.rstrip('/')
+ self.strpath = path
+ self.rev = rev
+ self.auth = auth
+ return self
+
+ def __repr__(self):
+ if self.rev == -1:
+ return 'svnurl(%r)' % self.strpath
+ else:
+ return 'svnurl(%r, %r)' % (self.strpath, self.rev)
+
+ def _svnwithrev(self, cmd, *args):
+ """ execute an svn command, append our own url and revision """
+ if self.rev is None:
+ return self._svnwrite(cmd, *args)
+ else:
+ args = ['-r', self.rev] + list(args)
+ return self._svnwrite(cmd, *args)
+
+ def _svnwrite(self, cmd, *args):
+ """ execute an svn command, append our own url """
+ l = ['svn %s' % cmd]
+ args = ['"%s"' % self._escape(item) for item in args]
+ l.extend(args)
+ l.append('"%s"' % self._encodedurl())
+ # fixing the locale because we can't otherwise parse
+ string = " ".join(l)
+ if DEBUG:
+ print("execing %s" % string)
+ out = self._svncmdexecauth(string)
+ return out
+
+ def _svncmdexecauth(self, cmd):
+ """ execute an svn command 'as is' """
+ cmd = svncommon.fixlocale() + cmd
+ if self.auth is not None:
+ cmd += ' ' + self.auth.makecmdoptions()
+ return self._cmdexec(cmd)
+
+ def _cmdexec(self, cmd):
+ try:
+ out = process.cmdexec(cmd)
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if (e.err.find('File Exists') != -1 or
+ e.err.find('File already exists') != -1):
+ raise py.error.EEXIST(self)
+ raise
+ return out
+
+ def _svnpopenauth(self, cmd):
+ """ execute an svn command, return a pipe for reading stdin """
+ cmd = svncommon.fixlocale() + cmd
+ if self.auth is not None:
+ cmd += ' ' + self.auth.makecmdoptions()
+ return self._popen(cmd)
+
+ def _popen(self, cmd):
+ return os.popen(cmd)
+
+ def _encodedurl(self):
+ return self._escape(self.strpath)
+
+ def _norev_delentry(self, path):
+ auth = self.auth and self.auth.makecmdoptions() or None
+ self._lsnorevcache.delentry((str(path), auth))
+
+ def open(self, mode='r'):
+ """ return an opened file with the given mode. """
+ if mode not in ("r", "rU",):
+ raise ValueError("mode %r not supported" % (mode,))
+ assert self.check(file=1) # svn cat returns an empty file otherwise
+ if self.rev is None:
+ return self._svnpopenauth('svn cat "%s"' % (
+ self._escape(self.strpath), ))
+ else:
+ return self._svnpopenauth('svn cat -r %s "%s"' % (
+ self.rev, self._escape(self.strpath)))
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path of the current path joined
+ with any given path arguments.
+ """
+ l = self.strpath.split(self.sep)
+ if len(l) < 4:
+ raise py.error.EINVAL(self, "base is not valid")
+ elif len(l) == 4:
+ return self.join(*args, **kwargs)
+ else:
+ return self.new(basename='').join(*args, **kwargs)
+
+ # modifying methods (cache must be invalidated)
+ def mkdir(self, *args, **kwargs):
+ """ create & return the directory joined with args.
+ pass a 'msg' keyword argument to set the commit message.
+ """
+ commit_msg = kwargs.get('msg', "mkdir by py lib invocation")
+ createpath = self.join(*args)
+ createpath._svnwrite('mkdir', '-m', commit_msg)
+ self._norev_delentry(createpath.dirpath())
+ return createpath
+
+ def copy(self, target, msg='copied by py lib invocation'):
+ """ copy path to target with checkin message msg."""
+ if getattr(target, 'rev', None) is not None:
+ raise py.error.EINVAL(target, "revisions are immutable")
+ self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg,
+ self._escape(self), self._escape(target)))
+ self._norev_delentry(target.dirpath())
+
+ def rename(self, target, msg="renamed by py lib invocation"):
+ """ rename this path to target with checkin message msg. """
+ if getattr(self, 'rev', None) is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %(
+ msg, self._escape(self), self._escape(target)))
+ self._norev_delentry(self.dirpath())
+ self._norev_delentry(self)
+
+ def remove(self, rec=1, msg='removed by py lib invocation'):
+ """ remove a file or directory (or a directory tree if rec=1) with
+checkin message msg."""
+ if self.rev is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self)))
+ self._norev_delentry(self.dirpath())
+
+ def export(self, topath):
+ """ export to a local path
+
+ topath should not exist prior to calling this, returns a
+ py.path.local instance
+ """
+ topath = py.path.local(topath)
+ args = ['"%s"' % (self._escape(self),),
+ '"%s"' % (self._escape(topath),)]
+ if self.rev is not None:
+ args = ['-r', str(self.rev)] + args
+ self._svncmdexecauth('svn export %s' % (' '.join(args),))
+ return topath
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). If you specify a keyword argument 'dir=True'
+ then the path is forced to be a directory path.
+ """
+ if getattr(self, 'rev', None) is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ target = self.join(*args)
+ dir = kwargs.get('dir', 0)
+ for x in target.parts(reverse=True):
+ if x.check():
+ break
+ else:
+ raise py.error.ENOENT(target, "has not any valid base!")
+ if x == target:
+ if not x.check(dir=dir):
+ raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x)
+ return x
+ tocreate = target.relto(x)
+ basename = tocreate.split(self.sep, 1)[0]
+ tempdir = py.path.local.mkdtemp()
+ try:
+ tempdir.ensure(tocreate, dir=dir)
+ cmd = 'svn import -m "%s" "%s" "%s"' % (
+ "ensure %s" % self._escape(tocreate),
+ self._escape(tempdir.join(basename)),
+ x.join(basename)._encodedurl())
+ self._svncmdexecauth(cmd)
+ self._norev_delentry(x)
+ finally:
+ tempdir.remove()
+ return target
+
+ # end of modifying methods
+ def _propget(self, name):
+ res = self._svnwithrev('propget', name)
+ return res[:-1] # strip trailing newline
+
+ def _proplist(self):
+ res = self._svnwithrev('proplist')
+ lines = res.split('\n')
+ lines = [x.strip() for x in lines[1:]]
+ return svncommon.PropListDict(self, lines)
+
+ def info(self):
+ """ return an Info structure with svn-provided information. """
+ parent = self.dirpath()
+ nameinfo_seq = parent._listdir_nameinfo()
+ bn = self.basename
+ for name, info in nameinfo_seq:
+ if name == bn:
+ return info
+ raise py.error.ENOENT(self)
+
+
+ def _listdir_nameinfo(self):
+ """ return sequence of name-info directory entries of self """
+ def builder():
+ try:
+ res = self._svnwithrev('ls', '-v')
+ except process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('non-existent in that revision') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find("E200009:") != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('File not found') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('not part of a repository')!=-1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('Unable to open')!=-1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.lower().find('method not allowed')!=-1:
+ raise py.error.EACCES(self, e.err)
+ raise py.error.Error(e.err)
+ lines = res.split('\n')
+ nameinfo_seq = []
+ for lsline in lines:
+ if lsline:
+ info = InfoSvnCommand(lsline)
+ if info._name != '.': # svn 1.5 produces '.' dirs,
+ nameinfo_seq.append((info._name, info))
+ nameinfo_seq.sort()
+ return nameinfo_seq
+ auth = self.auth and self.auth.makecmdoptions() or None
+ if self.rev is not None:
+ return self._lsrevcache.getorbuild((self.strpath, self.rev, auth),
+ builder)
+ else:
+ return self._lsnorevcache.getorbuild((self.strpath, auth),
+ builder)
+
+ def listdir(self, fil=None, sort=None):
+ """ list directory contents, possibly filter by the given fil func
+ and possibly sorted.
+ """
+ if isinstance(fil, str):
+ fil = common.FNMatcher(fil)
+ nameinfo_seq = self._listdir_nameinfo()
+ if len(nameinfo_seq) == 1:
+ name, info = nameinfo_seq[0]
+ if name == self.basename and info.kind == 'file':
+ #if not self.check(dir=1):
+ raise py.error.ENOTDIR(self)
+ paths = [self.join(name) for (name, info) in nameinfo_seq]
+ if fil:
+ paths = [x for x in paths if fil(x)]
+ self._sortlist(paths, sort)
+ return paths
+
+
+ def log(self, rev_start=None, rev_end=1, verbose=False):
+ """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+ assert self.check() #make it simpler for the pipe
+ rev_start = rev_start is None and "HEAD" or rev_start
+ rev_end = rev_end is None and "HEAD" or rev_end
+
+ if rev_start == "HEAD" and rev_end == 1:
+ rev_opt = ""
+ else:
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
+ verbose_opt = verbose and "-v" or ""
+ xmlpipe = self._svnpopenauth('svn log --xml %s %s "%s"' %
+ (rev_opt, verbose_opt, self.strpath))
+ from xml.dom import minidom
+ tree = minidom.parse(xmlpipe)
+ result = []
+ for logentry in filter(None, tree.firstChild.childNodes):
+ if logentry.nodeType == logentry.ELEMENT_NODE:
+ result.append(svncommon.LogEntry(logentry))
+ return result
+
+#01234567890123456789012345678901234567890123467
+# 2256 hpk 165 Nov 24 17:55 __init__.py
+# XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!!
+# 1312 johnny 1627 May 05 14:32 test_decorators.py
+#
+class InfoSvnCommand:
+ # the '0?' part in the middle is an indication of whether the resource is
+ # locked, see 'svn help ls'
+ lspattern = re.compile(
+ r'^ *(?P<rev>\d+) +(?P<author>.+?) +(0? *(?P<size>\d+))? '
+ r'*(?P<date>\w+ +\d{2} +[\d:]+) +(?P<file>.*)$')
+ def __init__(self, line):
+ # this is a typical line from 'svn ls http://...'
+ #_ 1127 jum 0 Jul 13 15:28 branch/
+ match = self.lspattern.match(line)
+ data = match.groupdict()
+ self._name = data['file']
+ if self._name[-1] == '/':
+ self._name = self._name[:-1]
+ self.kind = 'dir'
+ else:
+ self.kind = 'file'
+ #self.has_props = l.pop(0) == 'P'
+ self.created_rev = int(data['rev'])
+ self.last_author = data['author']
+ self.size = data['size'] and int(data['size']) or 0
+ self.mtime = parse_time_with_missing_year(data['date'])
+ self.time = self.mtime * 1000000
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+
+#____________________________________________________
+#
+# helper functions
+#____________________________________________________
+def parse_time_with_missing_year(timestr):
+ """ analyze the time part from a single line of "svn ls -v"
+ the svn output doesn't show the year makes the 'timestr'
+ ambigous.
+ """
+ import calendar
+ t_now = time.gmtime()
+
+ tparts = timestr.split()
+ month = time.strptime(tparts.pop(0), '%b')[1]
+ day = time.strptime(tparts.pop(0), '%d')[2]
+ last = tparts.pop(0) # year or hour:minute
+ try:
+ if ":" in last:
+ raise ValueError()
+ year = time.strptime(last, '%Y')[0]
+ hour = minute = 0
+ except ValueError:
+ hour, minute = time.strptime(last, '%H:%M')[3:5]
+ year = t_now[0]
+
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
+ if t_result > t_now:
+ year -= 1
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
+ return calendar.timegm(t_result)
+
+class PathEntry:
+ def __init__(self, ppart):
+ self.strpath = ppart.firstChild.nodeValue.encode('UTF-8')
+ self.action = ppart.getAttribute('action').encode('UTF-8')
+ if self.action == 'A':
+ self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8')
+ if self.copyfrom_path:
+ self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev'))
+
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_path/svnwc.py b/testing/web-platform/tests/tools/third_party/py/py/_path/svnwc.py
new file mode 100644
index 0000000000..b5b9d8d544
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_path/svnwc.py
@@ -0,0 +1,1240 @@
+"""
+svn-Command based Implementation of a Subversion WorkingCopy Path.
+
+ SvnWCCommandPath is the main class.
+
+"""
+
+import os, sys, time, re, calendar
+import py
+import subprocess
+from py._path import common
+
+#-----------------------------------------------------------
+# Caching latest repository revision and repo-paths
+# (getting them is slow with the current implementations)
+#
+# XXX make mt-safe
+#-----------------------------------------------------------
+
+class cache:
+ proplist = {}
+ info = {}
+ entries = {}
+ prop = {}
+
+class RepoEntry:
+ def __init__(self, url, rev, timestamp):
+ self.url = url
+ self.rev = rev
+ self.timestamp = timestamp
+
+ def __str__(self):
+ return "repo: %s;%s %s" %(self.url, self.rev, self.timestamp)
+
+class RepoCache:
+ """ The Repocache manages discovered repository paths
+ and their revisions. If inside a timeout the cache
+ will even return the revision of the root.
+ """
+ timeout = 20 # seconds after which we forget that we know the last revision
+
+ def __init__(self):
+ self.repos = []
+
+ def clear(self):
+ self.repos = []
+
+ def put(self, url, rev, timestamp=None):
+ if rev is None:
+ return
+ if timestamp is None:
+ timestamp = time.time()
+
+ for entry in self.repos:
+ if url == entry.url:
+ entry.timestamp = timestamp
+ entry.rev = rev
+ #print "set repo", entry
+ break
+ else:
+ entry = RepoEntry(url, rev, timestamp)
+ self.repos.append(entry)
+ #print "appended repo", entry
+
+ def get(self, url):
+ now = time.time()
+ for entry in self.repos:
+ if url.startswith(entry.url):
+ if now < entry.timestamp + self.timeout:
+ #print "returning immediate Etrny", entry
+ return entry.url, entry.rev
+ return entry.url, -1
+ return url, -1
+
+repositories = RepoCache()
+
+
+# svn support code
+
+ALLOWED_CHARS = "_ -/\\=$.~+%" #add characters as necessary when tested
+if sys.platform == "win32":
+ ALLOWED_CHARS += ":"
+ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:'
+
+def _getsvnversion(ver=[]):
+ try:
+ return ver[0]
+ except IndexError:
+ v = py.process.cmdexec("svn -q --version")
+ v.strip()
+ v = '.'.join(v.split('.')[:2])
+ ver.append(v)
+ return v
+
+def _escape_helper(text):
+ text = str(text)
+ if sys.platform != 'win32':
+ text = str(text).replace('$', '\\$')
+ return text
+
+def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS):
+ for c in str(text):
+ if c.isalnum():
+ continue
+ if c in allowed_chars:
+ continue
+ return True
+ return False
+
+def checkbadchars(url):
+ # (hpk) not quite sure about the exact purpose, guido w.?
+ proto, uri = url.split("://", 1)
+ if proto != "file":
+ host, uripath = uri.split('/', 1)
+ # only check for bad chars in the non-protocol parts
+ if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \
+ or _check_for_bad_chars(uripath, ALLOWED_CHARS)):
+ raise ValueError("bad char in %r" % (url, ))
+
+
+#_______________________________________________________________
+
+class SvnPathBase(common.PathBase):
+ """ Base implementation for SvnPath implementations. """
+ sep = '/'
+
+ def _geturl(self):
+ return self.strpath
+ url = property(_geturl, None, None, "url of this svn-path.")
+
+ def __str__(self):
+ """ return a string representation (including rev-number) """
+ return self.strpath
+
+ def __hash__(self):
+ return hash(self.strpath)
+
+ def new(self, **kw):
+ """ create a modified version of this path. A 'rev' argument
+ indicates a new revision.
+ the following keyword arguments modify various path parts::
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ obj = object.__new__(self.__class__)
+ obj.rev = kw.get('rev', self.rev)
+ obj.auth = kw.get('auth', self.auth)
+ dirname, basename, purebasename, ext = self._getbyspec(
+ "dirname,basename,purebasename,ext")
+ if 'basename' in kw:
+ if 'purebasename' in kw or 'ext' in kw:
+ raise ValueError("invalid specification %r" % kw)
+ else:
+ pb = kw.setdefault('purebasename', purebasename)
+ ext = kw.setdefault('ext', ext)
+ if ext and not ext.startswith('.'):
+ ext = '.' + ext
+ kw['basename'] = pb + ext
+
+ kw.setdefault('dirname', dirname)
+ kw.setdefault('sep', self.sep)
+ if kw['basename']:
+ obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw
+ else:
+ obj.strpath = "%(dirname)s" % kw
+ return obj
+
+ def _getbyspec(self, spec):
+ """ get specified parts of the path. 'arg' is a string
+ with comma separated path parts. The parts are returned
+ in exactly the order of the specification.
+
+ you may specify the following parts:
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ res = []
+ parts = self.strpath.split(self.sep)
+ for name in spec.split(','):
+ name = name.strip()
+ if name == 'dirname':
+ res.append(self.sep.join(parts[:-1]))
+ elif name == 'basename':
+ res.append(parts[-1])
+ else:
+ basename = parts[-1]
+ i = basename.rfind('.')
+ if i == -1:
+ purebasename, ext = basename, ''
+ else:
+ purebasename, ext = basename[:i], basename[i:]
+ if name == 'purebasename':
+ res.append(purebasename)
+ elif name == 'ext':
+ res.append(ext)
+ else:
+ raise NameError("Don't know part %r" % name)
+ return res
+
+ def __eq__(self, other):
+ """ return true if path and rev attributes each match """
+ return (str(self) == str(other) and
+ (self.rev == other.rev or self.rev == other.rev))
+
+ def __ne__(self, other):
+ return not self == other
+
+ def join(self, *args):
+ """ return a new Path (with the same revision) which is composed
+ of the self Path followed by 'args' path components.
+ """
+ if not args:
+ return self
+
+ args = tuple([arg.strip(self.sep) for arg in args])
+ parts = (self.strpath, ) + args
+ newpath = self.__class__(self.sep.join(parts), self.rev, self.auth)
+ return newpath
+
+ def propget(self, name):
+ """ return the content of the given property. """
+ value = self._propget(name)
+ return value
+
+ def proplist(self):
+ """ list all property names. """
+ content = self._proplist()
+ return content
+
+ def size(self):
+ """ Return the size of the file content of the Path. """
+ return self.info().size
+
+ def mtime(self):
+ """ Return the last modification time of the file. """
+ return self.info().mtime
+
+ # shared help methods
+
+ def _escape(self, cmd):
+ return _escape_helper(cmd)
+
+
+ #def _childmaxrev(self):
+ # """ return maximum revision number of childs (or self.rev if no childs) """
+ # rev = self.rev
+ # for name, info in self._listdir_nameinfo():
+ # rev = max(rev, info.created_rev)
+ # return rev
+
+ #def _getlatestrevision(self):
+ # """ return latest repo-revision for this path. """
+ # url = self.strpath
+ # path = self.__class__(url, None)
+ #
+ # # we need a long walk to find the root-repo and revision
+ # while 1:
+ # try:
+ # rev = max(rev, path._childmaxrev())
+ # previous = path
+ # path = path.dirpath()
+ # except (IOError, process.cmdexec.Error):
+ # break
+ # if rev is None:
+ # raise IOError, "could not determine newest repo revision for %s" % self
+ # return rev
+
+ class Checkers(common.Checkers):
+ def dir(self):
+ try:
+ return self.path.info().kind == 'dir'
+ except py.error.Error:
+ return self._listdirworks()
+
+ def _listdirworks(self):
+ try:
+ self.path.listdir()
+ except py.error.ENOENT:
+ return False
+ else:
+ return True
+
+ def file(self):
+ try:
+ return self.path.info().kind == 'file'
+ except py.error.ENOENT:
+ return False
+
+ def exists(self):
+ try:
+ return self.path.info()
+ except py.error.ENOENT:
+ return self._listdirworks()
+
+def parse_apr_time(timestr):
+ i = timestr.rfind('.')
+ if i == -1:
+ raise ValueError("could not parse %s" % timestr)
+ timestr = timestr[:i]
+ parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
+ return time.mktime(parsedtime)
+
+class PropListDict(dict):
+ """ a Dictionary which fetches values (InfoSvnCommand instances) lazily"""
+ def __init__(self, path, keynames):
+ dict.__init__(self, [(x, None) for x in keynames])
+ self.path = path
+
+ def __getitem__(self, key):
+ value = dict.__getitem__(self, key)
+ if value is None:
+ value = self.path.propget(key)
+ dict.__setitem__(self, key, value)
+ return value
+
+def fixlocale():
+ if sys.platform != 'win32':
+ return 'LC_ALL=C '
+ return ''
+
+# some nasty chunk of code to solve path and url conversion and quoting issues
+ILLEGAL_CHARS = '* | \\ / : < > ? \t \n \x0b \x0c \r'.split(' ')
+if os.sep in ILLEGAL_CHARS:
+ ILLEGAL_CHARS.remove(os.sep)
+ISWINDOWS = sys.platform == 'win32'
+_reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I)
+def _check_path(path):
+ illegal = ILLEGAL_CHARS[:]
+ sp = path.strpath
+ if ISWINDOWS:
+ illegal.remove(':')
+ if not _reg_allow_disk.match(sp):
+ raise ValueError('path may not contain a colon (:)')
+ for char in sp:
+ if char not in string.printable or char in illegal:
+ raise ValueError('illegal character %r in path' % (char,))
+
+def path_to_fspath(path, addat=True):
+ _check_path(path)
+ sp = path.strpath
+ if addat and path.rev != -1:
+ sp = '%s@%s' % (sp, path.rev)
+ elif addat:
+ sp = '%s@HEAD' % (sp,)
+ return sp
+
+def url_from_path(path):
+ fspath = path_to_fspath(path, False)
+ from urllib import quote
+ if ISWINDOWS:
+ match = _reg_allow_disk.match(fspath)
+ fspath = fspath.replace('\\', '/')
+ if match.group(1):
+ fspath = '/%s%s' % (match.group(1).replace('\\', '/'),
+ quote(fspath[len(match.group(1)):]))
+ else:
+ fspath = quote(fspath)
+ else:
+ fspath = quote(fspath)
+ if path.rev != -1:
+ fspath = '%s@%s' % (fspath, path.rev)
+ else:
+ fspath = '%s@HEAD' % (fspath,)
+ return 'file://%s' % (fspath,)
+
+class SvnAuth(object):
+ """ container for auth information for Subversion """
+ def __init__(self, username, password, cache_auth=True, interactive=True):
+ self.username = username
+ self.password = password
+ self.cache_auth = cache_auth
+ self.interactive = interactive
+
+ def makecmdoptions(self):
+ uname = self.username.replace('"', '\\"')
+ passwd = self.password.replace('"', '\\"')
+ ret = []
+ if uname:
+ ret.append('--username="%s"' % (uname,))
+ if passwd:
+ ret.append('--password="%s"' % (passwd,))
+ if not self.cache_auth:
+ ret.append('--no-auth-cache')
+ if not self.interactive:
+ ret.append('--non-interactive')
+ return ' '.join(ret)
+
+ def __str__(self):
+ return "<SvnAuth username=%s ...>" %(self.username,)
+
+rex_blame = re.compile(r'\s*(\d+)\s+(\S+) (.*)')
+
+class SvnWCCommandPath(common.PathBase):
+ """ path implementation offering access/modification to svn working copies.
+ It has methods similar to the functions in os.path and similar to the
+ commands of the svn client.
+ """
+ sep = os.sep
+
+ def __new__(cls, wcpath=None, auth=None):
+ self = object.__new__(cls)
+ if isinstance(wcpath, cls):
+ if wcpath.__class__ == cls:
+ return wcpath
+ wcpath = wcpath.localpath
+ if _check_for_bad_chars(str(wcpath),
+ ALLOWED_CHARS):
+ raise ValueError("bad char in wcpath %s" % (wcpath, ))
+ self.localpath = py.path.local(wcpath)
+ self.auth = auth
+ return self
+
+ strpath = property(lambda x: str(x.localpath), None, None, "string path")
+ rev = property(lambda x: x.info(usecache=0).rev, None, None, "revision")
+
+ def __eq__(self, other):
+ return self.localpath == getattr(other, 'localpath', None)
+
+ def _geturl(self):
+ if getattr(self, '_url', None) is None:
+ info = self.info()
+ self._url = info.url #SvnPath(info.url, info.rev)
+ assert isinstance(self._url, py.builtin._basestring)
+ return self._url
+
+ url = property(_geturl, None, None, "url of this WC item")
+
+ def _escape(self, cmd):
+ return _escape_helper(cmd)
+
+ def dump(self, obj):
+ """ pickle object into path location"""
+ return self.localpath.dump(obj)
+
+ def svnurl(self):
+ """ return current SvnPath for this WC-item. """
+ info = self.info()
+ return py.path.svnurl(info.url)
+
+ def __repr__(self):
+ return "svnwc(%r)" % (self.strpath) # , self._url)
+
+ def __str__(self):
+ return str(self.localpath)
+
+ def _makeauthoptions(self):
+ if self.auth is None:
+ return ''
+ return self.auth.makecmdoptions()
+
+ def _authsvn(self, cmd, args=None):
+ args = args and list(args) or []
+ args.append(self._makeauthoptions())
+ return self._svn(cmd, *args)
+
+ def _svn(self, cmd, *args):
+ l = ['svn %s' % cmd]
+ args = [self._escape(item) for item in args]
+ l.extend(args)
+ l.append('"%s"' % self._escape(self.strpath))
+ # try fixing the locale because we can't otherwise parse
+ string = fixlocale() + " ".join(l)
+ try:
+ try:
+ key = 'LC_MESSAGES'
+ hold = os.environ.get(key)
+ os.environ[key] = 'C'
+ out = py.process.cmdexec(string)
+ finally:
+ if hold:
+ os.environ[key] = hold
+ else:
+ del os.environ[key]
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ strerr = e.err.lower()
+ if strerr.find('not found') != -1:
+ raise py.error.ENOENT(self)
+ elif strerr.find("E200009:") != -1:
+ raise py.error.ENOENT(self)
+ if (strerr.find('file exists') != -1 or
+ strerr.find('file already exists') != -1 or
+ strerr.find('w150002:') != -1 or
+ strerr.find("can't create directory") != -1):
+ raise py.error.EEXIST(strerr) #self)
+ raise
+ return out
+
+ def switch(self, url):
+ """ switch to given URL. """
+ self._authsvn('switch', [url])
+
+ def checkout(self, url=None, rev=None):
+ """ checkout from url to local wcpath. """
+ args = []
+ if url is None:
+ url = self.url
+ if rev is None or rev == -1:
+ if (sys.platform != 'win32' and
+ _getsvnversion() == '1.3'):
+ url += "@HEAD"
+ else:
+ if _getsvnversion() == '1.3':
+ url += "@%d" % rev
+ else:
+ args.append('-r' + str(rev))
+ args.append(url)
+ self._authsvn('co', args)
+
+ def update(self, rev='HEAD', interactive=True):
+ """ update working copy item to given revision. (None -> HEAD). """
+ opts = ['-r', rev]
+ if not interactive:
+ opts.append("--non-interactive")
+ self._authsvn('up', opts)
+
+ def write(self, content, mode='w'):
+ """ write content into local filesystem wc. """
+ self.localpath.write(content, mode)
+
+ def dirpath(self, *args):
+ """ return the directory Path of the current Path. """
+ return self.__class__(self.localpath.dirpath(*args), auth=self.auth)
+
+ def _ensuredirs(self):
+ parent = self.dirpath()
+ if parent.check(dir=0):
+ parent._ensuredirs()
+ if self.check(dir=0):
+ self.mkdir()
+ return self
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). if you specify a keyword argument 'directory=True'
+ then the path is forced to be a directory path.
+ """
+ p = self.join(*args)
+ if p.check():
+ if p.check(versioned=False):
+ p.add()
+ return p
+ if kwargs.get('dir', 0):
+ return p._ensuredirs()
+ parent = p.dirpath()
+ parent._ensuredirs()
+ p.write("")
+ p.add()
+ return p
+
+ def mkdir(self, *args):
+ """ create & return the directory joined with args. """
+ if args:
+ return self.join(*args).mkdir()
+ else:
+ self._svn('mkdir')
+ return self
+
+ def add(self):
+ """ add ourself to svn """
+ self._svn('add')
+
+ def remove(self, rec=1, force=1):
+ """ remove a file or a directory tree. 'rec'ursive is
+ ignored and considered always true (because of
+ underlying svn semantics.
+ """
+ assert rec, "svn cannot remove non-recursively"
+ if not self.check(versioned=True):
+ # not added to svn (anymore?), just remove
+ py.path.local(self).remove()
+ return
+ flags = []
+ if force:
+ flags.append('--force')
+ self._svn('remove', *flags)
+
+ def copy(self, target):
+ """ copy path to target."""
+ py.process.cmdexec("svn copy %s %s" %(str(self), str(target)))
+
+ def rename(self, target):
+ """ rename this path to target. """
+ py.process.cmdexec("svn move --force %s %s" %(str(self), str(target)))
+
+ def lock(self):
+ """ set a lock (exclusive) on the resource """
+ out = self._authsvn('lock').strip()
+ if not out:
+ # warning or error, raise exception
+ raise ValueError("unknown error in svn lock command")
+
+ def unlock(self):
+ """ unset a previously set lock """
+ out = self._authsvn('unlock').strip()
+ if out.startswith('svn:'):
+ # warning or error, raise exception
+ raise Exception(out[4:])
+
+ def cleanup(self):
+ """ remove any locks from the resource """
+ # XXX should be fixed properly!!!
+ try:
+ self.unlock()
+ except:
+ pass
+
+ def status(self, updates=0, rec=0, externals=0):
+ """ return (collective) Status object for this file. """
+ # http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1
+ # 2201 2192 jum test
+ # XXX
+ if externals:
+ raise ValueError("XXX cannot perform status() "
+ "on external items yet")
+ else:
+ #1.2 supports: externals = '--ignore-externals'
+ externals = ''
+ if rec:
+ rec= ''
+ else:
+ rec = '--non-recursive'
+
+ # XXX does not work on all subversion versions
+ #if not externals:
+ # externals = '--ignore-externals'
+
+ if updates:
+ updates = '-u'
+ else:
+ updates = ''
+
+ try:
+ cmd = 'status -v --xml --no-ignore %s %s %s' % (
+ updates, rec, externals)
+ out = self._authsvn(cmd)
+ except py.process.cmdexec.Error:
+ cmd = 'status -v --no-ignore %s %s %s' % (
+ updates, rec, externals)
+ out = self._authsvn(cmd)
+ rootstatus = WCStatus(self).fromstring(out, self)
+ else:
+ rootstatus = XMLWCStatus(self).fromstring(out, self)
+ return rootstatus
+
+ def diff(self, rev=None):
+ """ return a diff of the current path against revision rev (defaulting
+ to the last one).
+ """
+ args = []
+ if rev is not None:
+ args.append("-r %d" % rev)
+ out = self._authsvn('diff', args)
+ return out
+
+ def blame(self):
+ """ return a list of tuples of three elements:
+ (revision, commiter, line)
+ """
+ out = self._svn('blame')
+ result = []
+ blamelines = out.splitlines()
+ reallines = py.path.svnurl(self.url).readlines()
+ for i, (blameline, line) in enumerate(
+ zip(blamelines, reallines)):
+ m = rex_blame.match(blameline)
+ if not m:
+ raise ValueError("output line %r of svn blame does not match "
+ "expected format" % (line, ))
+ rev, name, _ = m.groups()
+ result.append((int(rev), name, line))
+ return result
+
+ _rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL)
+ def commit(self, msg='', rec=1):
+ """ commit with support for non-recursive commits """
+ # XXX i guess escaping should be done better here?!?
+ cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),)
+ if not rec:
+ cmd += ' -N'
+ out = self._authsvn(cmd)
+ try:
+ del cache.info[self]
+ except KeyError:
+ pass
+ if out:
+ m = self._rex_commit.match(out)
+ return int(m.group(1))
+
+ def propset(self, name, value, *args):
+ """ set property name to value on this path. """
+ d = py.path.local.mkdtemp()
+ try:
+ p = d.join('value')
+ p.write(value)
+ self._svn('propset', name, '--file', str(p), *args)
+ finally:
+ d.remove()
+
+ def propget(self, name):
+ """ get property name on this path. """
+ res = self._svn('propget', name)
+ return res[:-1] # strip trailing newline
+
+ def propdel(self, name):
+ """ delete property name on this path. """
+ res = self._svn('propdel', name)
+ return res[:-1] # strip trailing newline
+
+ def proplist(self, rec=0):
+ """ return a mapping of property names to property values.
+If rec is True, then return a dictionary mapping sub-paths to such mappings.
+"""
+ if rec:
+ res = self._svn('proplist -R')
+ return make_recursive_propdict(self, res)
+ else:
+ res = self._svn('proplist')
+ lines = res.split('\n')
+ lines = [x.strip() for x in lines[1:]]
+ return PropListDict(self, lines)
+
+ def revert(self, rec=0):
+ """ revert the local changes of this path. if rec is True, do so
+recursively. """
+ if rec:
+ result = self._svn('revert -R')
+ else:
+ result = self._svn('revert')
+ return result
+
+ def new(self, **kw):
+ """ create a modified version of this path. A 'rev' argument
+ indicates a new revision.
+ the following keyword arguments modify various path parts:
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ if kw:
+ localpath = self.localpath.new(**kw)
+ else:
+ localpath = self.localpath
+ return self.__class__(localpath, auth=self.auth)
+
+ def join(self, *args, **kwargs):
+ """ return a new Path (with the same revision) which is composed
+ of the self Path followed by 'args' path components.
+ """
+ if not args:
+ return self
+ localpath = self.localpath.join(*args, **kwargs)
+ return self.__class__(localpath, auth=self.auth)
+
+ def info(self, usecache=1):
+ """ return an Info structure with svn-provided information. """
+ info = usecache and cache.info.get(self)
+ if not info:
+ try:
+ output = self._svn('info')
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('Path is not a working copy directory') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find("is not under version control") != -1:
+ raise py.error.ENOENT(self, e.err)
+ raise
+ # XXX SVN 1.3 has output on stderr instead of stdout (while it does
+ # return 0!), so a bit nasty, but we assume no output is output
+ # to stderr...
+ if (output.strip() == '' or
+ output.lower().find('not a versioned resource') != -1):
+ raise py.error.ENOENT(self, output)
+ info = InfoSvnWCCommand(output)
+
+ # Can't reliably compare on Windows without access to win32api
+ if sys.platform != 'win32':
+ if info.path != self.localpath:
+ raise py.error.ENOENT(self, "not a versioned resource:" +
+ " %s != %s" % (info.path, self.localpath))
+ cache.info[self] = info
+ return info
+
+ def listdir(self, fil=None, sort=None):
+ """ return a sequence of Paths.
+
+ listdir will return either a tuple or a list of paths
+ depending on implementation choices.
+ """
+ if isinstance(fil, str):
+ fil = common.FNMatcher(fil)
+ # XXX unify argument naming with LocalPath.listdir
+ def notsvn(path):
+ return path.basename != '.svn'
+
+ paths = []
+ for localpath in self.localpath.listdir(notsvn):
+ p = self.__class__(localpath, auth=self.auth)
+ if notsvn(p) and (not fil or fil(p)):
+ paths.append(p)
+ self._sortlist(paths, sort)
+ return paths
+
+ def open(self, mode='r'):
+ """ return an opened file with the given mode. """
+ return open(self.strpath, mode)
+
+ def _getbyspec(self, spec):
+ return self.localpath._getbyspec(spec)
+
+ class Checkers(py.path.local.Checkers):
+ def __init__(self, path):
+ self.svnwcpath = path
+ self.path = path.localpath
+ def versioned(self):
+ try:
+ s = self.svnwcpath.info()
+ except (py.error.ENOENT, py.error.EEXIST):
+ return False
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('is not a working copy')!=-1:
+ return False
+ if e.err.lower().find('not a versioned resource') != -1:
+ return False
+ raise
+ else:
+ return True
+
+ def log(self, rev_start=None, rev_end=1, verbose=False):
+ """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+ assert self.check() # make it simpler for the pipe
+ rev_start = rev_start is None and "HEAD" or rev_start
+ rev_end = rev_end is None and "HEAD" or rev_end
+ if rev_start == "HEAD" and rev_end == 1:
+ rev_opt = ""
+ else:
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
+ verbose_opt = verbose and "-v" or ""
+ locale_env = fixlocale()
+ # some blather on stderr
+ auth_opt = self._makeauthoptions()
+ #stdin, stdout, stderr = os.popen3(locale_env +
+ # 'svn log --xml %s %s %s "%s"' % (
+ # rev_opt, verbose_opt, auth_opt,
+ # self.strpath))
+ cmd = locale_env + 'svn log --xml %s %s %s "%s"' % (
+ rev_opt, verbose_opt, auth_opt, self.strpath)
+
+ popen = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ shell=True,
+ )
+ stdout, stderr = popen.communicate()
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+ minidom,ExpatError = importxml()
+ try:
+ tree = minidom.parseString(stdout)
+ except ExpatError:
+ raise ValueError('no such revision')
+ result = []
+ for logentry in filter(None, tree.firstChild.childNodes):
+ if logentry.nodeType == logentry.ELEMENT_NODE:
+ result.append(LogEntry(logentry))
+ return result
+
+ def size(self):
+ """ Return the size of the file content of the Path. """
+ return self.info().size
+
+ def mtime(self):
+ """ Return the last modification time of the file. """
+ return self.info().mtime
+
+ def __hash__(self):
+ return hash((self.strpath, self.__class__, self.auth))
+
+
+class WCStatus:
+ attrnames = ('modified','added', 'conflict', 'unchanged', 'external',
+ 'deleted', 'prop_modified', 'unknown', 'update_available',
+ 'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced'
+ )
+
+ def __init__(self, wcpath, rev=None, modrev=None, author=None):
+ self.wcpath = wcpath
+ self.rev = rev
+ self.modrev = modrev
+ self.author = author
+
+ for name in self.attrnames:
+ setattr(self, name, [])
+
+ def allpath(self, sort=True, **kw):
+ d = {}
+ for name in self.attrnames:
+ if name not in kw or kw[name]:
+ for path in getattr(self, name):
+ d[path] = 1
+ l = d.keys()
+ if sort:
+ l.sort()
+ return l
+
+ # XXX a bit scary to assume there's always 2 spaces between username and
+ # path, however with win32 allowing spaces in user names there doesn't
+ # seem to be a more solid approach :(
+ _rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)')
+
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+ """ return a new WCStatus object from data 's'
+ """
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+ update_rev = None
+ for line in data.split('\n'):
+ if not line.strip():
+ continue
+ #print "processing %r" % line
+ flags, rest = line[:8], line[8:]
+ # first column
+ c0,c1,c2,c3,c4,c5,x6,c7 = flags
+ #if '*' in line:
+ # print "flags", repr(flags), "rest", repr(rest)
+
+ if c0 in '?XI':
+ fn = line.split(None, 1)[1]
+ if c0 == '?':
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.unknown.append(wcpath)
+ elif c0 == 'X':
+ wcpath = rootwcpath.__class__(
+ rootwcpath.localpath.join(fn, abs=1),
+ auth=rootwcpath.auth)
+ rootstatus.external.append(wcpath)
+ elif c0 == 'I':
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.ignored.append(wcpath)
+
+ continue
+
+ #elif c0 in '~!' or c4 == 'S':
+ # raise NotImplementedError("received flag %r" % c0)
+
+ m = WCStatus._rex_status.match(rest)
+ if not m:
+ if c7 == '*':
+ fn = rest.strip()
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.update_available.append(wcpath)
+ continue
+ if line.lower().find('against revision:')!=-1:
+ update_rev = int(rest.split(':')[1].strip())
+ continue
+ if line.lower().find('status on external') > -1:
+ # XXX not sure what to do here... perhaps we want to
+ # store some state instead of just continuing, as right
+ # now it makes the top-level external get added twice
+ # (once as external, once as 'normal' unchanged item)
+ # because of the way SVN presents external items
+ continue
+ # keep trying
+ raise ValueError("could not parse line %r" % line)
+ else:
+ rev, modrev, author, fn = m.groups()
+ wcpath = rootwcpath.join(fn, abs=1)
+ #assert wcpath.check()
+ if c0 == 'M':
+ assert wcpath.check(file=1), "didn't expect a directory with changed content here"
+ rootstatus.modified.append(wcpath)
+ elif c0 == 'A' or c3 == '+' :
+ rootstatus.added.append(wcpath)
+ elif c0 == 'D':
+ rootstatus.deleted.append(wcpath)
+ elif c0 == 'C':
+ rootstatus.conflict.append(wcpath)
+ elif c0 == '~':
+ rootstatus.kindmismatch.append(wcpath)
+ elif c0 == '!':
+ rootstatus.incomplete.append(wcpath)
+ elif c0 == 'R':
+ rootstatus.replaced.append(wcpath)
+ elif not c0.strip():
+ rootstatus.unchanged.append(wcpath)
+ else:
+ raise NotImplementedError("received flag %r" % c0)
+
+ if c1 == 'M':
+ rootstatus.prop_modified.append(wcpath)
+ # XXX do we cover all client versions here?
+ if c2 == 'L' or c5 == 'K':
+ rootstatus.locked.append(wcpath)
+ if c7 == '*':
+ rootstatus.update_available.append(wcpath)
+
+ if wcpath == rootwcpath:
+ rootstatus.rev = rev
+ rootstatus.modrev = modrev
+ rootstatus.author = author
+ if update_rev:
+ rootstatus.update_rev = update_rev
+ continue
+ return rootstatus
+ fromstring = staticmethod(fromstring)
+
+class XMLWCStatus(WCStatus):
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+ """ parse 'data' (XML string as outputted by svn st) into a status obj
+ """
+ # XXX for externals, the path is shown twice: once
+ # with external information, and once with full info as if
+ # the item was a normal non-external... the current way of
+ # dealing with this issue is by ignoring it - this does make
+ # externals appear as external items as well as 'normal',
+ # unchanged ones in the status object so this is far from ideal
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+ update_rev = None
+ minidom, ExpatError = importxml()
+ try:
+ doc = minidom.parseString(data)
+ except ExpatError:
+ e = sys.exc_info()[1]
+ raise ValueError(str(e))
+ urevels = doc.getElementsByTagName('against')
+ if urevels:
+ rootstatus.update_rev = urevels[-1].getAttribute('revision')
+ for entryel in doc.getElementsByTagName('entry'):
+ path = entryel.getAttribute('path')
+ statusel = entryel.getElementsByTagName('wc-status')[0]
+ itemstatus = statusel.getAttribute('item')
+
+ if itemstatus == 'unversioned':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.unknown.append(wcpath)
+ continue
+ elif itemstatus == 'external':
+ wcpath = rootwcpath.__class__(
+ rootwcpath.localpath.join(path, abs=1),
+ auth=rootwcpath.auth)
+ rootstatus.external.append(wcpath)
+ continue
+ elif itemstatus == 'ignored':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.ignored.append(wcpath)
+ continue
+ elif itemstatus == 'incomplete':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.incomplete.append(wcpath)
+ continue
+
+ rev = statusel.getAttribute('revision')
+ if itemstatus == 'added' or itemstatus == 'none':
+ rev = '0'
+ modrev = '?'
+ author = '?'
+ date = ''
+ elif itemstatus == "replaced":
+ pass
+ else:
+ #print entryel.toxml()
+ commitel = entryel.getElementsByTagName('commit')[0]
+ if commitel:
+ modrev = commitel.getAttribute('revision')
+ author = ''
+ author_els = commitel.getElementsByTagName('author')
+ if author_els:
+ for c in author_els[0].childNodes:
+ author += c.nodeValue
+ date = ''
+ for c in commitel.getElementsByTagName('date')[0]\
+ .childNodes:
+ date += c.nodeValue
+
+ wcpath = rootwcpath.join(path, abs=1)
+
+ assert itemstatus != 'modified' or wcpath.check(file=1), (
+ 'did\'t expect a directory with changed content here')
+
+ itemattrname = {
+ 'normal': 'unchanged',
+ 'unversioned': 'unknown',
+ 'conflicted': 'conflict',
+ 'none': 'added',
+ }.get(itemstatus, itemstatus)
+
+ attr = getattr(rootstatus, itemattrname)
+ attr.append(wcpath)
+
+ propsstatus = statusel.getAttribute('props')
+ if propsstatus not in ('none', 'normal'):
+ rootstatus.prop_modified.append(wcpath)
+
+ if wcpath == rootwcpath:
+ rootstatus.rev = rev
+ rootstatus.modrev = modrev
+ rootstatus.author = author
+ rootstatus.date = date
+
+ # handle repos-status element (remote info)
+ rstatusels = entryel.getElementsByTagName('repos-status')
+ if rstatusels:
+ rstatusel = rstatusels[0]
+ ritemstatus = rstatusel.getAttribute('item')
+ if ritemstatus in ('added', 'modified'):
+ rootstatus.update_available.append(wcpath)
+
+ lockels = entryel.getElementsByTagName('lock')
+ if len(lockels):
+ rootstatus.locked.append(wcpath)
+
+ return rootstatus
+ fromstring = staticmethod(fromstring)
+
+class InfoSvnWCCommand:
+ def __init__(self, output):
+ # Path: test
+ # URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test
+ # Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
+ # Revision: 2151
+ # Node Kind: directory
+ # Schedule: normal
+ # Last Changed Author: hpk
+ # Last Changed Rev: 2100
+ # Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+ # Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003)
+
+ d = {}
+ for line in output.split('\n'):
+ if not line.strip():
+ continue
+ key, value = line.split(':', 1)
+ key = key.lower().replace(' ', '')
+ value = value.strip()
+ d[key] = value
+ try:
+ self.url = d['url']
+ except KeyError:
+ raise ValueError("Not a versioned resource")
+ #raise ValueError, "Not a versioned resource %r" % path
+ self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
+ try:
+ self.rev = int(d['revision'])
+ except KeyError:
+ self.rev = None
+
+ self.path = py.path.local(d['path'])
+ self.size = self.path.size()
+ if 'lastchangedrev' in d:
+ self.created_rev = int(d['lastchangedrev'])
+ if 'lastchangedauthor' in d:
+ self.last_author = d['lastchangedauthor']
+ if 'lastchangeddate' in d:
+ self.mtime = parse_wcinfotime(d['lastchangeddate'])
+ self.time = self.mtime * 1000000
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+def parse_wcinfotime(timestr):
+ """ Returns seconds since epoch, UTC. """
+ # example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+ m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
+ if not m:
+ raise ValueError("timestring %r does not match" % timestr)
+ timestr, timezone = m.groups()
+ # do not handle timezone specially, return value should be UTC
+ parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
+ return calendar.timegm(parsedtime)
+
+def make_recursive_propdict(wcroot,
+ output,
+ rex = re.compile("Properties on '(.*)':")):
+ """ Return a dictionary of path->PropListDict mappings. """
+ lines = [x for x in output.split('\n') if x]
+ pdict = {}
+ while lines:
+ line = lines.pop(0)
+ m = rex.match(line)
+ if not m:
+ raise ValueError("could not parse propget-line: %r" % line)
+ path = m.groups()[0]
+ wcpath = wcroot.join(path, abs=1)
+ propnames = []
+ while lines and lines[0].startswith(' '):
+ propname = lines.pop(0).strip()
+ propnames.append(propname)
+ assert propnames, "must have found properties!"
+ pdict[wcpath] = PropListDict(wcpath, propnames)
+ return pdict
+
+
+def importxml(cache=[]):
+ if cache:
+ return cache
+ from xml.dom import minidom
+ from xml.parsers.expat import ExpatError
+ cache.extend([minidom, ExpatError])
+ return cache
+
+class LogEntry:
+ def __init__(self, logentry):
+ self.rev = int(logentry.getAttribute('revision'))
+ for lpart in filter(None, logentry.childNodes):
+ if lpart.nodeType == lpart.ELEMENT_NODE:
+ if lpart.nodeName == 'author':
+ self.author = lpart.firstChild.nodeValue
+ elif lpart.nodeName == 'msg':
+ if lpart.firstChild:
+ self.msg = lpart.firstChild.nodeValue
+ else:
+ self.msg = ''
+ elif lpart.nodeName == 'date':
+ #2003-07-29T20:05:11.598637Z
+ timestr = lpart.firstChild.nodeValue
+ self.date = parse_apr_time(timestr)
+ elif lpart.nodeName == 'paths':
+ self.strpaths = []
+ for ppart in filter(None, lpart.childNodes):
+ if ppart.nodeType == ppart.ELEMENT_NODE:
+ self.strpaths.append(PathEntry(ppart))
+ def __repr__(self):
+ return '<Logentry rev=%d author=%s date=%s>' % (
+ self.rev, self.author, self.date)
+
+
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_process/__init__.py b/testing/web-platform/tests/tools/third_party/py/py/_process/__init__.py
new file mode 100644
index 0000000000..86c714ad1a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_process/__init__.py
@@ -0,0 +1 @@
+""" high-level sub-process handling """
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_process/cmdexec.py b/testing/web-platform/tests/tools/third_party/py/py/_process/cmdexec.py
new file mode 100644
index 0000000000..f83a249402
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_process/cmdexec.py
@@ -0,0 +1,49 @@
+import sys
+import subprocess
+import py
+from subprocess import Popen, PIPE
+
+def cmdexec(cmd):
+ """ return unicode output of executing 'cmd' in a separate process.
+
+ raise cmdexec.Error exeception if the command failed.
+ the exception will provide an 'err' attribute containing
+ the error-output from the command.
+ if the subprocess module does not provide a proper encoding/unicode strings
+ sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'.
+ """
+ process = subprocess.Popen(cmd, shell=True,
+ universal_newlines=True,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = process.communicate()
+ if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not
+ try:
+ default_encoding = sys.getdefaultencoding() # jython may not have it
+ except AttributeError:
+ default_encoding = sys.stdout.encoding or 'UTF-8'
+ out = unicode(out, process.stdout.encoding or default_encoding)
+ err = unicode(err, process.stderr.encoding or default_encoding)
+ status = process.poll()
+ if status:
+ raise ExecutionFailed(status, status, cmd, out, err)
+ return out
+
+class ExecutionFailed(py.error.Error):
+ def __init__(self, status, systemstatus, cmd, out, err):
+ Exception.__init__(self)
+ self.status = status
+ self.systemstatus = systemstatus
+ self.cmd = cmd
+ self.err = err
+ self.out = out
+
+ def __str__(self):
+ return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
+
+# export the exception under the name 'py.process.cmdexec.Error'
+cmdexec.Error = ExecutionFailed
+try:
+ ExecutionFailed.__module__ = 'py.process.cmdexec'
+ ExecutionFailed.__name__ = 'Error'
+except (AttributeError, TypeError):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_process/forkedfunc.py b/testing/web-platform/tests/tools/third_party/py/py/_process/forkedfunc.py
new file mode 100644
index 0000000000..1c28530688
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_process/forkedfunc.py
@@ -0,0 +1,120 @@
+
+"""
+ ForkedFunc provides a way to run a function in a forked process
+ and get at its return value, stdout and stderr output as well
+ as signals and exitstatusus.
+"""
+
+import py
+import os
+import sys
+import marshal
+
+
+def get_unbuffered_io(fd, filename):
+ f = open(str(filename), "w")
+ if fd != f.fileno():
+ os.dup2(f.fileno(), fd)
+ class AutoFlush:
+ def write(self, data):
+ f.write(data)
+ f.flush()
+ def __getattr__(self, name):
+ return getattr(f, name)
+ return AutoFlush()
+
+
+class ForkedFunc:
+ EXITSTATUS_EXCEPTION = 3
+
+
+ def __init__(self, fun, args=None, kwargs=None, nice_level=0,
+ child_on_start=None, child_on_exit=None):
+ if args is None:
+ args = []
+ if kwargs is None:
+ kwargs = {}
+ self.fun = fun
+ self.args = args
+ self.kwargs = kwargs
+ self.tempdir = tempdir = py.path.local.mkdtemp()
+ self.RETVAL = tempdir.ensure('retval')
+ self.STDOUT = tempdir.ensure('stdout')
+ self.STDERR = tempdir.ensure('stderr')
+
+ pid = os.fork()
+ if pid: # in parent process
+ self.pid = pid
+ else: # in child process
+ self.pid = None
+ self._child(nice_level, child_on_start, child_on_exit)
+
+ def _child(self, nice_level, child_on_start, child_on_exit):
+ # right now we need to call a function, but first we need to
+ # map all IO that might happen
+ sys.stdout = stdout = get_unbuffered_io(1, self.STDOUT)
+ sys.stderr = stderr = get_unbuffered_io(2, self.STDERR)
+ retvalf = self.RETVAL.open("wb")
+ EXITSTATUS = 0
+ try:
+ if nice_level:
+ os.nice(nice_level)
+ try:
+ if child_on_start is not None:
+ child_on_start()
+ retval = self.fun(*self.args, **self.kwargs)
+ retvalf.write(marshal.dumps(retval))
+ if child_on_exit is not None:
+ child_on_exit()
+ except:
+ excinfo = py.code.ExceptionInfo()
+ stderr.write(str(excinfo._getreprcrash()))
+ EXITSTATUS = self.EXITSTATUS_EXCEPTION
+ finally:
+ stdout.close()
+ stderr.close()
+ retvalf.close()
+ os.close(1)
+ os.close(2)
+ os._exit(EXITSTATUS)
+
+ def waitfinish(self, waiter=os.waitpid):
+ pid, systemstatus = waiter(self.pid, 0)
+ if systemstatus:
+ if os.WIFSIGNALED(systemstatus):
+ exitstatus = os.WTERMSIG(systemstatus) + 128
+ else:
+ exitstatus = os.WEXITSTATUS(systemstatus)
+ else:
+ exitstatus = 0
+ signal = systemstatus & 0x7f
+ if not exitstatus and not signal:
+ retval = self.RETVAL.open('rb')
+ try:
+ retval_data = retval.read()
+ finally:
+ retval.close()
+ retval = marshal.loads(retval_data)
+ else:
+ retval = None
+ stdout = self.STDOUT.read()
+ stderr = self.STDERR.read()
+ self._removetemp()
+ return Result(exitstatus, signal, retval, stdout, stderr)
+
+ def _removetemp(self):
+ if self.tempdir.check():
+ self.tempdir.remove()
+
+ def __del__(self):
+ if self.pid is not None: # only clean up in main process
+ self._removetemp()
+
+
+class Result(object):
+ def __init__(self, exitstatus, signal, retval, stdout, stderr):
+ self.exitstatus = exitstatus
+ self.signal = signal
+ self.retval = retval
+ self.out = stdout
+ self.err = stderr
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_process/killproc.py b/testing/web-platform/tests/tools/third_party/py/py/_process/killproc.py
new file mode 100644
index 0000000000..18e8310b5f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_process/killproc.py
@@ -0,0 +1,23 @@
+import py
+import os, sys
+
+if sys.platform == "win32" or getattr(os, '_name', '') == 'nt':
+ try:
+ import ctypes
+ except ImportError:
+ def dokill(pid):
+ py.process.cmdexec("taskkill /F /PID %d" %(pid,))
+ else:
+ def dokill(pid):
+ PROCESS_TERMINATE = 1
+ handle = ctypes.windll.kernel32.OpenProcess(
+ PROCESS_TERMINATE, False, pid)
+ ctypes.windll.kernel32.TerminateProcess(handle, -1)
+ ctypes.windll.kernel32.CloseHandle(handle)
+else:
+ def dokill(pid):
+ os.kill(pid, 15)
+
+def kill(pid):
+ """ kill process by id. """
+ dokill(pid)
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_std.py b/testing/web-platform/tests/tools/third_party/py/py/_std.py
new file mode 100644
index 0000000000..66adb7b023
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_std.py
@@ -0,0 +1,27 @@
+import sys
+import warnings
+
+
+class PyStdIsDeprecatedWarning(DeprecationWarning):
+ pass
+
+
+class Std(object):
+ """ makes top-level python modules available as an attribute,
+ importing them on first access.
+ """
+
+ def __init__(self):
+ self.__dict__ = sys.modules
+
+ def __getattr__(self, name):
+ warnings.warn("py.std is deprecated, please import %s directly" % name,
+ category=PyStdIsDeprecatedWarning,
+ stacklevel=2)
+ try:
+ m = __import__(name)
+ except ImportError:
+ raise AttributeError("py.std: could not import %s" % name)
+ return m
+
+std = Std()
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/__init__.py b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/INSTALLER b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/LICENSE b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/LICENSE
new file mode 100644
index 0000000000..ff33b8f7ca
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/LICENSE
@@ -0,0 +1,18 @@
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/METADATA b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/METADATA
new file mode 100644
index 0000000000..7eea770a02
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/METADATA
@@ -0,0 +1,125 @@
+Metadata-Version: 2.1
+Name: apipkg
+Version: 2.0.0
+Summary: apipkg: namespace control and lazy-import mechanism
+Home-page: https://github.com/pytest-dev/apipkg
+Author: holger krekel
+Maintainer: Ronny Pfannschmidt
+Maintainer-email: opensource@ronnypfannschmidt.de
+License: MIT
+Platform: unix
+Platform: linux
+Platform: osx
+Platform: cygwin
+Platform: win32
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: POSIX
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Topic :: Software Development :: Libraries
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+
+Welcome to apipkg !
+-------------------
+
+With apipkg you can control the exported namespace of a Python package and
+greatly reduce the number of imports for your users.
+It is a `small pure Python module`_ that works on CPython 2.7 and 3.4+,
+Jython and PyPy. It cooperates well with Python's ``help()`` system,
+custom importers (PEP302) and common command-line completion tools.
+
+Usage is very simple: you can require 'apipkg' as a dependency or you
+can copy paste the ~200 lines of code into your project.
+
+
+Tutorial example
+-------------------
+
+Here is a simple ``mypkg`` package that specifies one namespace
+and exports two objects imported from different modules::
+
+
+ # mypkg/__init__.py
+ import apipkg
+ apipkg.initpkg(__name__, {
+ 'path': {
+ 'Class1': "_mypkg.somemodule:Class1",
+ 'clsattr': "_mypkg.othermodule:Class2.attr",
+ }
+ }
+
+The package is initialized with a dictionary as namespace.
+
+You need to create a ``_mypkg`` package with a ``somemodule.py``
+and ``othermodule.py`` containing the respective classes.
+The ``_mypkg`` is not special - it's a completely
+regular Python package.
+
+Namespace dictionaries contain ``name: value`` mappings
+where the value may be another namespace dictionary or
+a string specifying an import location. On accessing
+an namespace attribute an import will be performed::
+
+ >>> import mypkg
+ >>> mypkg.path
+ <ApiModule 'mypkg.path'>
+ >>> mypkg.path.Class1 # '_mypkg.somemodule' gets imported now
+ <class _mypkg.somemodule.Class1 at 0xb7d428fc>
+ >>> mypkg.path.clsattr # '_mypkg.othermodule' gets imported now
+ 4 # the value of _mypkg.othermodule.Class2.attr
+
+The ``mypkg.path`` namespace and its two entries are
+loaded when they are accessed. This means:
+
+* lazy loading - only what is actually needed is ever loaded
+
+* only the root "mypkg" ever needs to be imported to get
+ access to the complete functionality
+
+* the underlying modules are also accessible, for example::
+
+ from mypkg.sub import Class1
+
+
+Including apipkg in your package
+--------------------------------------
+
+If you don't want to add an ``apipkg`` dependency to your package you
+can copy the `apipkg.py`_ file somewhere to your own package,
+for example ``_mypkg/apipkg.py`` in the above example. You
+then import the ``initpkg`` function from that new place and
+are good to go.
+
+.. _`small pure Python module`:
+.. _`apipkg.py`: https://github.com/pytest-dev/apipkg/blob/master/src/apipkg/__init__.py
+
+Feedback?
+-----------------------
+
+If you have questions you are welcome to
+
+* join the **#pytest** channel on irc.libera.chat_
+ (using an IRC client, via webchat_, or via Matrix_).
+* create an issue on the bugtracker_
+
+.. _irc.libera.chat: ircs://irc.libera.chat:6697/#pytest
+.. _webchat: https://web.libera.chat/#pytest
+.. _matrix: https://matrix.to/#/%23pytest:libera.chat
+.. _bugtracker: https://github.com/pytest-dev/apipkg/issues
+
+
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/RECORD b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/RECORD
new file mode 100644
index 0000000000..357b8b9c72
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/RECORD
@@ -0,0 +1,11 @@
+apipkg-2.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+apipkg-2.0.0.dist-info/LICENSE,sha256=6J7tEHTTqUMZi6E5uAhE9bRFuGC7p0qK6twGEFZhZOo,1054
+apipkg-2.0.0.dist-info/METADATA,sha256=GqNwkxraK5UTxObLVXTLc2UqktOPwZnKqdk2ThzHX0A,4292
+apipkg-2.0.0.dist-info/RECORD,,
+apipkg-2.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+apipkg-2.0.0.dist-info/WHEEL,sha256=WzZ8cwjh8l0jtULNjYq1Hpr-WCqCRgPr--TX4P5I1Wo,110
+apipkg-2.0.0.dist-info/top_level.txt,sha256=3TGS6nmN7kjxhUK4LpPCB3QkQI34QYGrT0ZQGWajoZ8,7
+apipkg/__init__.py,sha256=gpbD3O57S9f-LsO2e-XwI6IGISayicfnCq3B5y_8frg,6978
+apipkg/__pycache__/__init__.cpython-39.pyc,,
+apipkg/__pycache__/version.cpython-39.pyc,,
+apipkg/version.py,sha256=bgZFg-f3UKhgE-z2w8RoFrwqRBzJBZkM4_jKFiYB9eU,142
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/REQUESTED b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/REQUESTED
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/WHEEL b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/WHEEL
new file mode 100644
index 0000000000..b733a60d37
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/top_level.txt b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..e2221c8f9e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg-2.0.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+apipkg
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg/__init__.py b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg/__init__.py
new file mode 100644
index 0000000000..350d8c4b07
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg/__init__.py
@@ -0,0 +1,217 @@
+"""
+apipkg: control the exported namespace of a Python package.
+
+see https://pypi.python.org/pypi/apipkg
+
+(c) holger krekel, 2009 - MIT license
+"""
+import os
+import sys
+from types import ModuleType
+
+from .version import version as __version__ # NOQA:F401
+
+
+def _py_abspath(path):
+ """
+ special version of abspath
+ that will leave paths from jython jars alone
+ """
+ if path.startswith("__pyclasspath__"):
+
+ return path
+ else:
+ return os.path.abspath(path)
+
+
+def distribution_version(name):
+ """try to get the version of the named distribution,
+ returs None on failure"""
+ from pkg_resources import get_distribution, DistributionNotFound
+
+ try:
+ dist = get_distribution(name)
+ except DistributionNotFound:
+ pass
+ else:
+ return dist.version
+
+
+def initpkg(pkgname, exportdefs, attr=None, eager=False):
+ """ initialize given package from the export definitions. """
+ attr = attr or {}
+ oldmod = sys.modules.get(pkgname)
+ d = {}
+ f = getattr(oldmod, "__file__", None)
+ if f:
+ f = _py_abspath(f)
+ d["__file__"] = f
+ if hasattr(oldmod, "__version__"):
+ d["__version__"] = oldmod.__version__
+ if hasattr(oldmod, "__loader__"):
+ d["__loader__"] = oldmod.__loader__
+ if hasattr(oldmod, "__path__"):
+ d["__path__"] = [_py_abspath(p) for p in oldmod.__path__]
+ if hasattr(oldmod, "__package__"):
+ d["__package__"] = oldmod.__package__
+ if "__doc__" not in exportdefs and getattr(oldmod, "__doc__", None):
+ d["__doc__"] = oldmod.__doc__
+ d["__spec__"] = getattr(oldmod, "__spec__", None)
+ d.update(attr)
+ if hasattr(oldmod, "__dict__"):
+ oldmod.__dict__.update(d)
+ mod = ApiModule(pkgname, exportdefs, implprefix=pkgname, attr=d)
+ sys.modules[pkgname] = mod
+ # eagerload in bypthon to avoid their monkeypatching breaking packages
+ if "bpython" in sys.modules or eager:
+ for module in list(sys.modules.values()):
+ if isinstance(module, ApiModule):
+ module.__dict__
+ return mod
+
+
+def importobj(modpath, attrname):
+ """imports a module, then resolves the attrname on it"""
+ module = __import__(modpath, None, None, ["__doc__"])
+ if not attrname:
+ return module
+
+ retval = module
+ names = attrname.split(".")
+ for x in names:
+ retval = getattr(retval, x)
+ return retval
+
+
+class ApiModule(ModuleType):
+ """the magical lazy-loading module standing"""
+
+ def __docget(self):
+ try:
+ return self.__doc
+ except AttributeError:
+ if "__doc__" in self.__map__:
+ return self.__makeattr("__doc__")
+
+ def __docset(self, value):
+ self.__doc = value
+
+ __doc__ = property(__docget, __docset)
+
+ def __init__(self, name, importspec, implprefix=None, attr=None):
+ self.__name__ = name
+ self.__all__ = [x for x in importspec if x != "__onfirstaccess__"]
+ self.__map__ = {}
+ self.__implprefix__ = implprefix or name
+ if attr:
+ for name, val in attr.items():
+ # print "setting", self.__name__, name, val
+ setattr(self, name, val)
+ for name, importspec in importspec.items():
+ if isinstance(importspec, dict):
+ subname = "{}.{}".format(self.__name__, name)
+ apimod = ApiModule(subname, importspec, implprefix)
+ sys.modules[subname] = apimod
+ setattr(self, name, apimod)
+ else:
+ parts = importspec.split(":")
+ modpath = parts.pop(0)
+ attrname = parts and parts[0] or ""
+ if modpath[0] == ".":
+ modpath = implprefix + modpath
+
+ if not attrname:
+ subname = "{}.{}".format(self.__name__, name)
+ apimod = AliasModule(subname, modpath)
+ sys.modules[subname] = apimod
+ if "." not in name:
+ setattr(self, name, apimod)
+ else:
+ self.__map__[name] = (modpath, attrname)
+
+ def __repr__(self):
+ repr_list = []
+ if hasattr(self, "__version__"):
+ repr_list.append("version=" + repr(self.__version__))
+ if hasattr(self, "__file__"):
+ repr_list.append("from " + repr(self.__file__))
+ if repr_list:
+ return "<ApiModule {!r} {}>".format(self.__name__, " ".join(repr_list))
+ return "<ApiModule {!r}>".format(self.__name__)
+
+ def __makeattr(self, name):
+ """lazily compute value for name or raise AttributeError if unknown."""
+ # print "makeattr", self.__name__, name
+ target = None
+ if "__onfirstaccess__" in self.__map__:
+ target = self.__map__.pop("__onfirstaccess__")
+ importobj(*target)()
+ try:
+ modpath, attrname = self.__map__[name]
+ except KeyError:
+ if target is not None and name != "__onfirstaccess__":
+ # retry, onfirstaccess might have set attrs
+ return getattr(self, name)
+ raise AttributeError(name)
+ else:
+ result = importobj(modpath, attrname)
+ setattr(self, name, result)
+ try:
+ del self.__map__[name]
+ except KeyError:
+ pass # in a recursive-import situation a double-del can happen
+ return result
+
+ __getattr__ = __makeattr
+
+ @property
+ def __dict__(self):
+ # force all the content of the module
+ # to be loaded when __dict__ is read
+ dictdescr = ModuleType.__dict__["__dict__"]
+ dict = dictdescr.__get__(self)
+ if dict is not None:
+ hasattr(self, "some")
+ for name in self.__all__:
+ try:
+ self.__makeattr(name)
+ except AttributeError:
+ pass
+ return dict
+
+
+def AliasModule(modname, modpath, attrname=None):
+ mod = []
+
+ def getmod():
+ if not mod:
+ x = importobj(modpath, None)
+ if attrname is not None:
+ x = getattr(x, attrname)
+ mod.append(x)
+ return mod[0]
+
+ x = modpath + ("." + attrname if attrname else "")
+ repr_result = "<AliasModule {!r} for {!r}>".format(modname, x)
+
+ class AliasModule(ModuleType):
+ def __repr__(self):
+ return repr_result
+
+ def __getattribute__(self, name):
+ try:
+ return getattr(getmod(), name)
+ except ImportError:
+ if modpath == "pytest" and attrname is None:
+ # hack for pylibs py.test
+ return None
+ else:
+ raise
+
+ def __setattr__(self, name, value):
+ setattr(getmod(), name, value)
+
+ def __delattr__(self, name):
+ delattr(getmod(), name)
+
+ return AliasModule(str(modname))
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg/version.py b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg/version.py
new file mode 100644
index 0000000000..c5b4e0e79f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/apipkg/version.py
@@ -0,0 +1,5 @@
+# coding: utf-8
+# file generated by setuptools_scm
+# don't change, don't track in version control
+version = '2.0.0'
+version_tuple = (2, 0, 0)
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/INSTALLER b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/LICENSE b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/LICENSE
new file mode 100644
index 0000000000..31ecdfb1db
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/LICENSE
@@ -0,0 +1,19 @@
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/METADATA b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/METADATA
new file mode 100644
index 0000000000..c078a7532f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/METADATA
@@ -0,0 +1,78 @@
+Metadata-Version: 2.1
+Name: iniconfig
+Version: 1.1.1
+Summary: iniconfig: brain-dead simple config-ini parsing
+Home-page: http://github.com/RonnyPfannschmidt/iniconfig
+Author: Ronny Pfannschmidt, Holger Krekel
+Author-email: opensource@ronnypfannschmidt.de, holger.krekel@gmail.com
+License: MIT License
+Platform: unix
+Platform: linux
+Platform: osx
+Platform: cygwin
+Platform: win32
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+
+iniconfig: brain-dead simple parsing of ini files
+=======================================================
+
+iniconfig is a small and simple INI-file parser module
+having a unique set of features:
+
+* tested against Python2.4 across to Python3.2, Jython, PyPy
+* maintains order of sections and entries
+* supports multi-line values with or without line-continuations
+* supports "#" comments everywhere
+* raises errors with proper line-numbers
+* no bells and whistles like automatic substitutions
+* iniconfig raises an Error if two sections have the same name.
+
+If you encounter issues or have feature wishes please report them to:
+
+ http://github.com/RonnyPfannschmidt/iniconfig/issues
+
+Basic Example
+===================================
+
+If you have an ini file like this::
+
+ # content of example.ini
+ [section1] # comment
+ name1=value1 # comment
+ name1b=value1,value2 # comment
+
+ [section2]
+ name2=
+ line1
+ line2
+
+then you can do::
+
+ >>> import iniconfig
+ >>> ini = iniconfig.IniConfig("example.ini")
+ >>> ini['section1']['name1'] # raises KeyError if not exists
+ 'value1'
+ >>> ini.get('section1', 'name1b', [], lambda x: x.split(","))
+ ['value1', 'value2']
+ >>> ini.get('section1', 'notexist', [], lambda x: x.split(","))
+ []
+ >>> [x.name for x in list(ini)]
+ ['section1', 'section2']
+ >>> list(list(ini)[0].items())
+ [('name1', 'value1'), ('name1b', 'value1,value2')]
+ >>> 'section1' in ini
+ True
+ >>> 'inexistendsection' in ini
+ False
+
+
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/RECORD b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/RECORD
new file mode 100644
index 0000000000..168233330b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/RECORD
@@ -0,0 +1,11 @@
+iniconfig-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+iniconfig-1.1.1.dist-info/LICENSE,sha256=KvaAw570k_uCgwNW0dPfGstaBgM8ui3sehniHKp3qGY,1061
+iniconfig-1.1.1.dist-info/METADATA,sha256=_4-oFKpRXuZv5rzepScpXRwhq6DzqsgbnA5ZpgMUMcs,2405
+iniconfig-1.1.1.dist-info/RECORD,,
+iniconfig-1.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+iniconfig-1.1.1.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+iniconfig-1.1.1.dist-info/top_level.txt,sha256=7KfM0fugdlToj9UW7enKXk2HYALQD8qHiyKtjhSzgN8,10
+iniconfig/__init__.py,sha256=-pBe5AF_6aAwo1CxJQ8i_zJq6ejc6IxHta7qk2tNJhY,5208
+iniconfig/__init__.pyi,sha256=-4KOctzq28ohRmTZsqlH6aylyFqsNKxYqtk1dteypi4,1205
+iniconfig/__pycache__/__init__.cpython-39.pyc,,
+iniconfig/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/REQUESTED b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/REQUESTED
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/WHEEL b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/WHEEL
new file mode 100644
index 0000000000..6d38aa0601
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.35.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/top_level.txt b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/top_level.txt
new file mode 100644
index 0000000000..9dda53692d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig-1.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+iniconfig
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/__init__.py b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/__init__.py
new file mode 100644
index 0000000000..6ad9eaf868
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/__init__.py
@@ -0,0 +1,165 @@
+""" brain-dead simple parser for ini-style files.
+(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed
+"""
+__all__ = ['IniConfig', 'ParseError']
+
+COMMENTCHARS = "#;"
+
+
+class ParseError(Exception):
+ def __init__(self, path, lineno, msg):
+ Exception.__init__(self, path, lineno, msg)
+ self.path = path
+ self.lineno = lineno
+ self.msg = msg
+
+ def __str__(self):
+ return "%s:%s: %s" % (self.path, self.lineno+1, self.msg)
+
+
+class SectionWrapper(object):
+ def __init__(self, config, name):
+ self.config = config
+ self.name = name
+
+ def lineof(self, name):
+ return self.config.lineof(self.name, name)
+
+ def get(self, key, default=None, convert=str):
+ return self.config.get(self.name, key,
+ convert=convert, default=default)
+
+ def __getitem__(self, key):
+ return self.config.sections[self.name][key]
+
+ def __iter__(self):
+ section = self.config.sections.get(self.name, [])
+
+ def lineof(key):
+ return self.config.lineof(self.name, key)
+ for name in sorted(section, key=lineof):
+ yield name
+
+ def items(self):
+ for name in self:
+ yield name, self[name]
+
+
+class IniConfig(object):
+ def __init__(self, path, data=None):
+ self.path = str(path) # convenience
+ if data is None:
+ f = open(self.path)
+ try:
+ tokens = self._parse(iter(f))
+ finally:
+ f.close()
+ else:
+ tokens = self._parse(data.splitlines(True))
+
+ self._sources = {}
+ self.sections = {}
+
+ for lineno, section, name, value in tokens:
+ if section is None:
+ self._raise(lineno, 'no section header defined')
+ self._sources[section, name] = lineno
+ if name is None:
+ if section in self.sections:
+ self._raise(lineno, 'duplicate section %r' % (section, ))
+ self.sections[section] = {}
+ else:
+ if name in self.sections[section]:
+ self._raise(lineno, 'duplicate name %r' % (name, ))
+ self.sections[section][name] = value
+
+ def _raise(self, lineno, msg):
+ raise ParseError(self.path, lineno, msg)
+
+ def _parse(self, line_iter):
+ result = []
+ section = None
+ for lineno, line in enumerate(line_iter):
+ name, data = self._parseline(line, lineno)
+ # new value
+ if name is not None and data is not None:
+ result.append((lineno, section, name, data))
+ # new section
+ elif name is not None and data is None:
+ if not name:
+ self._raise(lineno, 'empty section name')
+ section = name
+ result.append((lineno, section, None, None))
+ # continuation
+ elif name is None and data is not None:
+ if not result:
+ self._raise(lineno, 'unexpected value continuation')
+ last = result.pop()
+ last_name, last_data = last[-2:]
+ if last_name is None:
+ self._raise(lineno, 'unexpected value continuation')
+
+ if last_data:
+ data = '%s\n%s' % (last_data, data)
+ result.append(last[:-1] + (data,))
+ return result
+
+ def _parseline(self, line, lineno):
+ # blank lines
+ if iscommentline(line):
+ line = ""
+ else:
+ line = line.rstrip()
+ if not line:
+ return None, None
+ # section
+ if line[0] == '[':
+ realline = line
+ for c in COMMENTCHARS:
+ line = line.split(c)[0].rstrip()
+ if line[-1] == "]":
+ return line[1:-1], None
+ return None, realline.strip()
+ # value
+ elif not line[0].isspace():
+ try:
+ name, value = line.split('=', 1)
+ if ":" in name:
+ raise ValueError()
+ except ValueError:
+ try:
+ name, value = line.split(":", 1)
+ except ValueError:
+ self._raise(lineno, 'unexpected line: %r' % line)
+ return name.strip(), value.strip()
+ # continuation
+ else:
+ return None, line.strip()
+
+ def lineof(self, section, name=None):
+ lineno = self._sources.get((section, name))
+ if lineno is not None:
+ return lineno + 1
+
+ def get(self, section, name, default=None, convert=str):
+ try:
+ return convert(self.sections[section][name])
+ except KeyError:
+ return default
+
+ def __getitem__(self, name):
+ if name not in self.sections:
+ raise KeyError(name)
+ return SectionWrapper(self, name)
+
+ def __iter__(self):
+ for name in sorted(self.sections, key=self.lineof):
+ yield SectionWrapper(self, name)
+
+ def __contains__(self, arg):
+ return arg in self.sections
+
+
+def iscommentline(line):
+ c = line.lstrip()[:1]
+ return c in COMMENTCHARS
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/__init__.pyi b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/__init__.pyi
new file mode 100644
index 0000000000..b6284bec3f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/__init__.pyi
@@ -0,0 +1,31 @@
+from typing import Callable, Iterator, Mapping, Optional, Tuple, TypeVar, Union
+from typing_extensions import Final
+
+_D = TypeVar('_D')
+_T = TypeVar('_T')
+
+class ParseError(Exception):
+ # Private __init__.
+ path: Final[str]
+ lineno: Final[int]
+ msg: Final[str]
+
+class SectionWrapper:
+ # Private __init__.
+ config: Final[IniConfig]
+ name: Final[str]
+ def __getitem__(self, key: str) -> str: ...
+ def __iter__(self) -> Iterator[str]: ...
+ def get(self, key: str, default: _D = ..., convert: Callable[[str], _T] = ...) -> Union[_T, _D]: ...
+ def items(self) -> Iterator[Tuple[str, str]]: ...
+ def lineof(self, name: str) -> Optional[int]: ...
+
+class IniConfig:
+ path: Final[str]
+ sections: Final[Mapping[str, Mapping[str, str]]]
+ def __init__(self, path: str, data: Optional[str] = None): ...
+ def __contains__(self, arg: str) -> bool: ...
+ def __getitem__(self, name: str) -> SectionWrapper: ...
+ def __iter__(self) -> Iterator[SectionWrapper]: ...
+ def get(self, section: str, name: str, default: _D = ..., convert: Callable[[str], _T] = ...) -> Union[_T, _D]: ...
+ def lineof(self, section: str, name: Optional[str] = ...) -> Optional[int]: ...
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/py.typed b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_vendored_packages/iniconfig/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/py/py/_xmlgen.py b/testing/web-platform/tests/tools/third_party/py/py/_xmlgen.py
new file mode 100644
index 0000000000..1c83545884
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/_xmlgen.py
@@ -0,0 +1,255 @@
+"""
+module for generating and serializing xml and html structures
+by using simple python objects.
+
+(c) holger krekel, holger at merlinux eu. 2009
+"""
+import sys, re
+
+if sys.version_info >= (3,0):
+ def u(s):
+ return s
+ def unicode(x, errors=None):
+ if hasattr(x, '__unicode__'):
+ return x.__unicode__()
+ return str(x)
+else:
+ def u(s):
+ return unicode(s)
+ unicode = unicode
+
+
+class NamespaceMetaclass(type):
+ def __getattr__(self, name):
+ if name[:1] == '_':
+ raise AttributeError(name)
+ if self == Namespace:
+ raise ValueError("Namespace class is abstract")
+ tagspec = self.__tagspec__
+ if tagspec is not None and name not in tagspec:
+ raise AttributeError(name)
+ classattr = {}
+ if self.__stickyname__:
+ classattr['xmlname'] = name
+ cls = type(name, (self.__tagclass__,), classattr)
+ setattr(self, name, cls)
+ return cls
+
+class Tag(list):
+ class Attr(object):
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super(Tag, self).__init__(args)
+ self.attr = self.Attr(**kwargs)
+
+ def __unicode__(self):
+ return self.unicode(indent=0)
+ __str__ = __unicode__
+
+ def unicode(self, indent=2):
+ l = []
+ SimpleUnicodeVisitor(l.append, indent).visit(self)
+ return u("").join(l)
+
+ def __repr__(self):
+ name = self.__class__.__name__
+ return "<%r tag object %d>" % (name, id(self))
+
+Namespace = NamespaceMetaclass('Namespace', (object, ), {
+ '__tagspec__': None,
+ '__tagclass__': Tag,
+ '__stickyname__': False,
+})
+
+class HtmlTag(Tag):
+ def unicode(self, indent=2):
+ l = []
+ HtmlVisitor(l.append, indent, shortempty=False).visit(self)
+ return u("").join(l)
+
+# exported plain html namespace
+class html(Namespace):
+ __tagclass__ = HtmlTag
+ __stickyname__ = True
+ __tagspec__ = dict([(x,1) for x in (
+ 'a,abbr,acronym,address,applet,area,article,aside,audio,b,'
+ 'base,basefont,bdi,bdo,big,blink,blockquote,body,br,button,'
+ 'canvas,caption,center,cite,code,col,colgroup,command,comment,'
+ 'datalist,dd,del,details,dfn,dir,div,dl,dt,em,embed,'
+ 'fieldset,figcaption,figure,footer,font,form,frame,frameset,h1,'
+ 'h2,h3,h4,h5,h6,head,header,hgroup,hr,html,i,iframe,img,input,'
+ 'ins,isindex,kbd,keygen,label,legend,li,link,listing,map,mark,'
+ 'marquee,menu,meta,meter,multicol,nav,nobr,noembed,noframes,'
+ 'noscript,object,ol,optgroup,option,output,p,param,pre,progress,'
+ 'q,rp,rt,ruby,s,samp,script,section,select,small,source,span,'
+ 'strike,strong,style,sub,summary,sup,table,tbody,td,textarea,'
+ 'tfoot,th,thead,time,title,tr,track,tt,u,ul,xmp,var,video,wbr'
+ ).split(',') if x])
+
+ class Style(object):
+ def __init__(self, **kw):
+ for x, y in kw.items():
+ x = x.replace('_', '-')
+ setattr(self, x, y)
+
+
+class raw(object):
+ """just a box that can contain a unicode string that will be
+ included directly in the output"""
+ def __init__(self, uniobj):
+ self.uniobj = uniobj
+
+class SimpleUnicodeVisitor(object):
+ """ recursive visitor to write unicode. """
+ def __init__(self, write, indent=0, curindent=0, shortempty=True):
+ self.write = write
+ self.cache = {}
+ self.visited = {} # for detection of recursion
+ self.indent = indent
+ self.curindent = curindent
+ self.parents = []
+ self.shortempty = shortempty # short empty tags or not
+
+ def visit(self, node):
+ """ dispatcher on node's class/bases name. """
+ cls = node.__class__
+ try:
+ visitmethod = self.cache[cls]
+ except KeyError:
+ for subclass in cls.__mro__:
+ visitmethod = getattr(self, subclass.__name__, None)
+ if visitmethod is not None:
+ break
+ else:
+ visitmethod = self.__object
+ self.cache[cls] = visitmethod
+ visitmethod(node)
+
+ # the default fallback handler is marked private
+ # to avoid clashes with the tag name object
+ def __object(self, obj):
+ #self.write(obj)
+ self.write(escape(unicode(obj)))
+
+ def raw(self, obj):
+ self.write(obj.uniobj)
+
+ def list(self, obj):
+ assert id(obj) not in self.visited
+ self.visited[id(obj)] = 1
+ for elem in obj:
+ self.visit(elem)
+
+ def Tag(self, tag):
+ assert id(tag) not in self.visited
+ try:
+ tag.parent = self.parents[-1]
+ except IndexError:
+ tag.parent = None
+ self.visited[id(tag)] = 1
+ tagname = getattr(tag, 'xmlname', tag.__class__.__name__)
+ if self.curindent and not self._isinline(tagname):
+ self.write("\n" + u(' ') * self.curindent)
+ if tag:
+ self.curindent += self.indent
+ self.write(u('<%s%s>') % (tagname, self.attributes(tag)))
+ self.parents.append(tag)
+ for x in tag:
+ self.visit(x)
+ self.parents.pop()
+ self.write(u('</%s>') % tagname)
+ self.curindent -= self.indent
+ else:
+ nameattr = tagname+self.attributes(tag)
+ if self._issingleton(tagname):
+ self.write(u('<%s/>') % (nameattr,))
+ else:
+ self.write(u('<%s></%s>') % (nameattr, tagname))
+
+ def attributes(self, tag):
+ # serialize attributes
+ attrlist = dir(tag.attr)
+ attrlist.sort()
+ l = []
+ for name in attrlist:
+ res = self.repr_attribute(tag.attr, name)
+ if res is not None:
+ l.append(res)
+ l.extend(self.getstyle(tag))
+ return u("").join(l)
+
+ def repr_attribute(self, attrs, name):
+ if name[:2] != '__':
+ value = getattr(attrs, name)
+ if name.endswith('_'):
+ name = name[:-1]
+ if isinstance(value, raw):
+ insert = value.uniobj
+ else:
+ insert = escape(unicode(value))
+ return ' %s="%s"' % (name, insert)
+
+ def getstyle(self, tag):
+ """ return attribute list suitable for styling. """
+ try:
+ styledict = tag.style.__dict__
+ except AttributeError:
+ return []
+ else:
+ stylelist = [x+': ' + y for x,y in styledict.items()]
+ return [u(' style="%s"') % u('; ').join(stylelist)]
+
+ def _issingleton(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return self.shortempty
+
+ def _isinline(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return False
+
+class HtmlVisitor(SimpleUnicodeVisitor):
+
+ single = dict([(x, 1) for x in
+ ('br,img,area,param,col,hr,meta,link,base,'
+ 'input,frame').split(',')])
+ inline = dict([(x, 1) for x in
+ ('a abbr acronym b basefont bdo big br cite code dfn em font '
+ 'i img input kbd label q s samp select small span strike '
+ 'strong sub sup textarea tt u var'.split(' '))])
+
+ def repr_attribute(self, attrs, name):
+ if name == 'class_':
+ value = getattr(attrs, name)
+ if value is None:
+ return
+ return super(HtmlVisitor, self).repr_attribute(attrs, name)
+
+ def _issingleton(self, tagname):
+ return tagname in self.single
+
+ def _isinline(self, tagname):
+ return tagname in self.inline
+
+
+class _escape:
+ def __init__(self):
+ self.escape = {
+ u('"') : u('&quot;'), u('<') : u('&lt;'), u('>') : u('&gt;'),
+ u('&') : u('&amp;'), u("'") : u('&apos;'),
+ }
+ self.charef_rex = re.compile(u("|").join(self.escape.keys()))
+
+ def _replacer(self, match):
+ return self.escape[match.group(0)]
+
+ def __call__(self, ustring):
+ """ xml-escape the given unicode string. """
+ try:
+ ustring = unicode(ustring)
+ except UnicodeDecodeError:
+ ustring = unicode(ustring, 'utf-8', errors='replace')
+ return self.charef_rex.sub(self._replacer, ustring)
+
+escape = _escape()
diff --git a/testing/web-platform/tests/tools/third_party/py/py/error.pyi b/testing/web-platform/tests/tools/third_party/py/py/error.pyi
new file mode 100644
index 0000000000..034eba609f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/error.pyi
@@ -0,0 +1,129 @@
+from typing import Any, Callable, TypeVar
+
+_T = TypeVar('_T')
+
+def checked_call(func: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: ...
+class Error(EnvironmentError): ...
+class EPERM(Error): ...
+class ENOENT(Error): ...
+class ESRCH(Error): ...
+class EINTR(Error): ...
+class EIO(Error): ...
+class ENXIO(Error): ...
+class E2BIG(Error): ...
+class ENOEXEC(Error): ...
+class EBADF(Error): ...
+class ECHILD(Error): ...
+class EAGAIN(Error): ...
+class ENOMEM(Error): ...
+class EACCES(Error): ...
+class EFAULT(Error): ...
+class ENOTBLK(Error): ...
+class EBUSY(Error): ...
+class EEXIST(Error): ...
+class EXDEV(Error): ...
+class ENODEV(Error): ...
+class ENOTDIR(Error): ...
+class EISDIR(Error): ...
+class EINVAL(Error): ...
+class ENFILE(Error): ...
+class EMFILE(Error): ...
+class ENOTTY(Error): ...
+class ETXTBSY(Error): ...
+class EFBIG(Error): ...
+class ENOSPC(Error): ...
+class ESPIPE(Error): ...
+class EROFS(Error): ...
+class EMLINK(Error): ...
+class EPIPE(Error): ...
+class EDOM(Error): ...
+class ERANGE(Error): ...
+class EDEADLCK(Error): ...
+class ENAMETOOLONG(Error): ...
+class ENOLCK(Error): ...
+class ENOSYS(Error): ...
+class ENOTEMPTY(Error): ...
+class ELOOP(Error): ...
+class EWOULDBLOCK(Error): ...
+class ENOMSG(Error): ...
+class EIDRM(Error): ...
+class ECHRNG(Error): ...
+class EL2NSYNC(Error): ...
+class EL3HLT(Error): ...
+class EL3RST(Error): ...
+class ELNRNG(Error): ...
+class EUNATCH(Error): ...
+class ENOCSI(Error): ...
+class EL2HLT(Error): ...
+class EBADE(Error): ...
+class EBADR(Error): ...
+class EXFULL(Error): ...
+class ENOANO(Error): ...
+class EBADRQC(Error): ...
+class EBADSLT(Error): ...
+class EDEADLOCK(Error): ...
+class EBFONT(Error): ...
+class ENOSTR(Error): ...
+class ENODATA(Error): ...
+class ETIME(Error): ...
+class ENOSR(Error): ...
+class ENONET(Error): ...
+class ENOPKG(Error): ...
+class EREMOTE(Error): ...
+class ENOLINK(Error): ...
+class EADV(Error): ...
+class ESRMNT(Error): ...
+class ECOMM(Error): ...
+class EPROTO(Error): ...
+class EMULTIHOP(Error): ...
+class EDOTDOT(Error): ...
+class EBADMSG(Error): ...
+class EOVERFLOW(Error): ...
+class ENOTUNIQ(Error): ...
+class EBADFD(Error): ...
+class EREMCHG(Error): ...
+class ELIBACC(Error): ...
+class ELIBBAD(Error): ...
+class ELIBSCN(Error): ...
+class ELIBMAX(Error): ...
+class ELIBEXEC(Error): ...
+class EILSEQ(Error): ...
+class ERESTART(Error): ...
+class ESTRPIPE(Error): ...
+class EUSERS(Error): ...
+class ENOTSOCK(Error): ...
+class EDESTADDRREQ(Error): ...
+class EMSGSIZE(Error): ...
+class EPROTOTYPE(Error): ...
+class ENOPROTOOPT(Error): ...
+class EPROTONOSUPPORT(Error): ...
+class ESOCKTNOSUPPORT(Error): ...
+class ENOTSUP(Error): ...
+class EOPNOTSUPP(Error): ...
+class EPFNOSUPPORT(Error): ...
+class EAFNOSUPPORT(Error): ...
+class EADDRINUSE(Error): ...
+class EADDRNOTAVAIL(Error): ...
+class ENETDOWN(Error): ...
+class ENETUNREACH(Error): ...
+class ENETRESET(Error): ...
+class ECONNABORTED(Error): ...
+class ECONNRESET(Error): ...
+class ENOBUFS(Error): ...
+class EISCONN(Error): ...
+class ENOTCONN(Error): ...
+class ESHUTDOWN(Error): ...
+class ETOOMANYREFS(Error): ...
+class ETIMEDOUT(Error): ...
+class ECONNREFUSED(Error): ...
+class EHOSTDOWN(Error): ...
+class EHOSTUNREACH(Error): ...
+class EALREADY(Error): ...
+class EINPROGRESS(Error): ...
+class ESTALE(Error): ...
+class EUCLEAN(Error): ...
+class ENOTNAM(Error): ...
+class ENAVAIL(Error): ...
+class EISNAM(Error): ...
+class EREMOTEIO(Error): ...
+class EDQUOT(Error): ...
diff --git a/testing/web-platform/tests/tools/third_party/py/py/iniconfig.pyi b/testing/web-platform/tests/tools/third_party/py/py/iniconfig.pyi
new file mode 100644
index 0000000000..b6284bec3f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/iniconfig.pyi
@@ -0,0 +1,31 @@
+from typing import Callable, Iterator, Mapping, Optional, Tuple, TypeVar, Union
+from typing_extensions import Final
+
+_D = TypeVar('_D')
+_T = TypeVar('_T')
+
+class ParseError(Exception):
+ # Private __init__.
+ path: Final[str]
+ lineno: Final[int]
+ msg: Final[str]
+
+class SectionWrapper:
+ # Private __init__.
+ config: Final[IniConfig]
+ name: Final[str]
+ def __getitem__(self, key: str) -> str: ...
+ def __iter__(self) -> Iterator[str]: ...
+ def get(self, key: str, default: _D = ..., convert: Callable[[str], _T] = ...) -> Union[_T, _D]: ...
+ def items(self) -> Iterator[Tuple[str, str]]: ...
+ def lineof(self, name: str) -> Optional[int]: ...
+
+class IniConfig:
+ path: Final[str]
+ sections: Final[Mapping[str, Mapping[str, str]]]
+ def __init__(self, path: str, data: Optional[str] = None): ...
+ def __contains__(self, arg: str) -> bool: ...
+ def __getitem__(self, name: str) -> SectionWrapper: ...
+ def __iter__(self) -> Iterator[SectionWrapper]: ...
+ def get(self, section: str, name: str, default: _D = ..., convert: Callable[[str], _T] = ...) -> Union[_T, _D]: ...
+ def lineof(self, section: str, name: Optional[str] = ...) -> Optional[int]: ...
diff --git a/testing/web-platform/tests/tools/third_party/py/py/io.pyi b/testing/web-platform/tests/tools/third_party/py/py/io.pyi
new file mode 100644
index 0000000000..d377e2405d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/io.pyi
@@ -0,0 +1,130 @@
+from io import StringIO as TextIO
+from io import BytesIO as BytesIO
+from typing import Any, AnyStr, Callable, Generic, IO, List, Optional, Text, Tuple, TypeVar, Union, overload
+from typing_extensions import Final
+import sys
+
+_T = TypeVar("_T")
+
+class FDCapture(Generic[AnyStr]):
+ def __init__(self, targetfd: int, tmpfile: Optional[IO[AnyStr]] = ..., now: bool = ..., patchsys: bool = ...) -> None: ...
+ def start(self) -> None: ...
+ def done(self) -> IO[AnyStr]: ...
+ def writeorg(self, data: AnyStr) -> None: ...
+
+class StdCaptureFD:
+ def __init__(
+ self,
+ out: Union[bool, IO[str]] = ...,
+ err: Union[bool, IO[str]] = ...,
+ mixed: bool = ...,
+ in_: bool = ...,
+ patchsys: bool = ...,
+ now: bool = ...,
+ ) -> None: ...
+ @classmethod
+ def call(cls, func: Callable[..., _T], *args: Any, **kwargs: Any) -> Tuple[_T, str, str]: ...
+ def reset(self) -> Tuple[str, str]: ...
+ def suspend(self) -> Tuple[str, str]: ...
+ def startall(self) -> None: ...
+ def resume(self) -> None: ...
+ def done(self, save: bool = ...) -> Tuple[IO[str], IO[str]]: ...
+ def readouterr(self) -> Tuple[str, str]: ...
+
+class StdCapture:
+ def __init__(
+ self,
+ out: Union[bool, IO[str]] = ...,
+ err: Union[bool, IO[str]] = ...,
+ in_: bool = ...,
+ mixed: bool = ...,
+ now: bool = ...,
+ ) -> None: ...
+ @classmethod
+ def call(cls, func: Callable[..., _T], *args: Any, **kwargs: Any) -> Tuple[_T, str, str]: ...
+ def reset(self) -> Tuple[str, str]: ...
+ def suspend(self) -> Tuple[str, str]: ...
+ def startall(self) -> None: ...
+ def resume(self) -> None: ...
+ def done(self, save: bool = ...) -> Tuple[IO[str], IO[str]]: ...
+ def readouterr(self) -> Tuple[IO[str], IO[str]]: ...
+
+# XXX: The type here is not exactly right. If f is IO[bytes] and
+# encoding is not None, returns some weird hybrid, not exactly IO[bytes].
+def dupfile(
+ f: IO[AnyStr],
+ mode: Optional[str] = ...,
+ buffering: int = ...,
+ raising: bool = ...,
+ encoding: Optional[str] = ...,
+) -> IO[AnyStr]: ...
+def get_terminal_width() -> int: ...
+def ansi_print(
+ text: Union[str, Text],
+ esc: Union[Union[str, Text], Tuple[Union[str, Text], ...]],
+ file: Optional[IO[Any]] = ...,
+ newline: bool = ...,
+ flush: bool = ...,
+) -> None: ...
+def saferepr(obj, maxsize: int = ...) -> str: ...
+
+class TerminalWriter:
+ stringio: TextIO
+ encoding: Final[str]
+ hasmarkup: bool
+ def __init__(self, file: Optional[IO[str]] = ..., stringio: bool = ..., encoding: Optional[str] = ...) -> None: ...
+ @property
+ def fullwidth(self) -> int: ...
+ @fullwidth.setter
+ def fullwidth(self, value: int) -> None: ...
+ @property
+ def chars_on_current_line(self) -> int: ...
+ @property
+ def width_of_current_line(self) -> int: ...
+ def markup(
+ self,
+ text: str,
+ *,
+ black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,
+ cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,
+ Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,
+ blink: int = ..., invert: int = ...,
+ ) -> str: ...
+ def sep(
+ self,
+ sepchar: str,
+ title: Optional[str] = ...,
+ fullwidth: Optional[int] = ...,
+ *,
+ black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,
+ cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,
+ Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,
+ blink: int = ..., invert: int = ...,
+ ) -> None: ...
+ def write(
+ self,
+ msg: str,
+ *,
+ black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,
+ cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,
+ Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,
+ blink: int = ..., invert: int = ...,
+ ) -> None: ...
+ def line(
+ self,
+ s: str = ...,
+ *,
+ black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,
+ cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,
+ Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,
+ blink: int = ..., invert: int = ...,
+ ) -> None: ...
+ def reline(
+ self,
+ line: str,
+ *,
+ black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,
+ cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,
+ Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,
+ blink: int = ..., invert: int = ...,
+ ) -> None: ...
diff --git a/testing/web-platform/tests/tools/third_party/py/py/path.pyi b/testing/web-platform/tests/tools/third_party/py/py/path.pyi
new file mode 100644
index 0000000000..1ddab9601e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/path.pyi
@@ -0,0 +1,197 @@
+from typing import Any, AnyStr, Callable, ContextManager, Generic, IO, Iterable, Iterator, List, Optional, Text, Type, Union
+from typing_extensions import Final, Literal
+import os
+import sys
+
+class _FNMatcher(Generic[AnyStr]):
+ pattern: AnyStr = ...
+ def __init__(self, pattern: AnyStr) -> None: ...
+ def __call__(self, path: local) -> bool: ...
+
+class _Stat:
+ path: Final[local] = ...
+ mode: Final[int]
+ ino: Final[int]
+ dev: Final[int]
+ nlink: Final[int]
+ uid: Final[int]
+ gid: Final[int]
+ size: Final[int]
+ atime: Final[float]
+ mtime: Final[float]
+ ctime: Final[float]
+ atime_ns: Final[int]
+ mtime_ns: Final[int]
+ ctime_ns: Final[int]
+ if sys.version_info >= (3, 8) and sys.platform == "win32":
+ reparse_tag: Final[int]
+ blocks: Final[int]
+ blksize: Final[int]
+ rdev: Final[int]
+ flags: Final[int]
+ gen: Final[int]
+ birthtime: Final[int]
+ rsize: Final[int]
+ creator: Final[int]
+ type: Final[int]
+ if sys.platform != 'win32':
+ @property
+ def owner(self) -> str: ...
+ @property
+ def group(self) -> str: ...
+ def isdir(self) -> bool: ...
+ def isfile(self) -> bool: ...
+ def islink(self) -> bool: ...
+
+
+if sys.version_info >= (3, 6):
+ _PathLike = os.PathLike
+else:
+ class _PathLike(Generic[AnyStr]):
+ def __fspath__(self) -> AnyStr: ...
+_PathType = Union[bytes, Text, _PathLike[str], _PathLike[bytes], local]
+
+class local(_PathLike[str]):
+ class ImportMismatchError(ImportError): ...
+
+ sep: Final[str]
+ strpath: Final[str]
+
+ def __init__(self, path: _PathType = ..., expanduser: bool = ...) -> None: ...
+ def __hash__(self) -> int: ...
+ def __eq__(self, other: object) -> bool: ...
+ def __ne__(self, other: object) -> bool: ...
+ def __lt__(self, other: object) -> bool: ...
+ def __gt__(self, other: object) -> bool: ...
+ def __add__(self, other: object) -> local: ...
+ def __cmp__(self, other: object) -> int: ...
+ def __div__(self, other: _PathType) -> local: ...
+ def __truediv__(self, other: _PathType) -> local: ...
+ def __fspath__(self) -> str: ...
+
+ @classmethod
+ def get_temproot(cls) -> local: ...
+ @classmethod
+ def make_numbered_dir(
+ cls,
+ prefix: str = ...,
+ rootdir: Optional[local] = ...,
+ keep: Optional[int] = ...,
+ lock_timeout: int = ...,
+ ) -> local: ...
+ @classmethod
+ def mkdtemp(cls, rootdir: Optional[local] = ...) -> local: ...
+ @classmethod
+ def sysfind(
+ cls,
+ name: _PathType,
+ checker: Optional[Callable[[local], bool]] = ...,
+ paths: Optional[Iterable[_PathType]] = ...,
+ ) -> Optional[local]: ...
+
+ @property
+ def basename(self) -> str: ...
+ @property
+ def dirname(self) -> str: ...
+ @property
+ def purebasename(self) -> str: ...
+ @property
+ def ext(self) -> str: ...
+
+ def as_cwd(self) -> ContextManager[Optional[local]]: ...
+ def atime(self) -> float: ...
+ def bestrelpath(self, dest: local) -> str: ...
+ def chdir(self) -> local: ...
+ def check(
+ self,
+ *,
+ basename: int = ..., notbasename: int = ...,
+ basestarts: int = ..., notbasestarts: int = ...,
+ dir: int = ..., notdir: int = ...,
+ dotfile: int = ..., notdotfile: int = ...,
+ endswith: int = ..., notendswith: int = ...,
+ exists: int = ..., notexists: int = ...,
+ ext: int = ..., notext: int = ...,
+ file: int = ..., notfile: int = ...,
+ fnmatch: int = ..., notfnmatch: int = ...,
+ link: int = ..., notlink: int = ...,
+ relto: int = ..., notrelto: int = ...,
+ ) -> bool: ...
+ def chmod(self, mode: int, rec: Union[int, str, Text, Callable[[local], bool]] = ...) -> None: ...
+ if sys.platform != 'win32':
+ def chown(self, user: Union[int, str], group: Union[int, str], rec: int = ...) -> None: ...
+ def common(self, other: local) -> Optional[local]: ...
+ def computehash(self, hashtype: str = ..., chunksize: int = ...) -> str: ...
+ def copy(self, target: local, mode: bool = ..., stat: bool = ...) -> None: ...
+ def dirpath(self, *args: _PathType, abs: int = ...) -> local: ...
+ def dump(self, obj: Any, bin: Optional[int] = ...) -> None: ...
+ def ensure(self, *args: _PathType, dir: int = ...) -> local: ...
+ def ensure_dir(self, *args: _PathType) -> local: ...
+ def exists(self) -> bool: ...
+ def fnmatch(self, pattern: str): _FNMatcher
+ def isdir(self) -> bool: ...
+ def isfile(self) -> bool: ...
+ def islink(self) -> bool: ...
+ def join(self, *args: _PathType, abs: int = ...) -> local: ...
+ def listdir(
+ self,
+ fil: Optional[Union[str, Text, Callable[[local], bool]]] = ...,
+ sort: Optional[bool] = ...,
+ ) -> List[local]: ...
+ def load(self) -> Any: ...
+ def lstat(self) -> _Stat: ...
+ def mkdir(self, *args: _PathType) -> local: ...
+ if sys.platform != 'win32':
+ def mklinkto(self, oldname: Union[str, local]) -> None: ...
+ def mksymlinkto(self, value: local, absolute: int = ...) -> None: ...
+ def move(self, target: local) -> None: ...
+ def mtime(self) -> float: ...
+ def new(
+ self,
+ *,
+ drive: str = ...,
+ dirname: str = ...,
+ basename: str = ...,
+ purebasename: str = ...,
+ ext: str = ...,
+ ) -> local: ...
+ def open(self, mode: str = ..., ensure: bool = ..., encoding: Optional[str] = ...) -> IO[Any]: ...
+ def parts(self, reverse: bool = ...) -> List[local]: ...
+ def pyimport(
+ self,
+ modname: Optional[str] = ...,
+ ensuresyspath: Union[bool, Literal["append", "importlib"]] = ...,
+ ) -> Any: ...
+ def pypkgpath(self) -> Optional[local]: ...
+ def read(self, mode: str = ...) -> Union[Text, bytes]: ...
+ def read_binary(self) -> bytes: ...
+ def read_text(self, encoding: str) -> Text: ...
+ def readlines(self, cr: int = ...) -> List[str]: ...
+ if sys.platform != 'win32':
+ def readlink(self) -> str: ...
+ def realpath(self) -> local: ...
+ def relto(self, relpath: Union[str, local]) -> str: ...
+ def remove(self, rec: int = ..., ignore_errors: bool = ...) -> None: ...
+ def rename(self, target: _PathType) -> None: ...
+ def samefile(self, other: _PathType) -> bool: ...
+ def setmtime(self, mtime: Optional[float] = ...) -> None: ...
+ def size(self) -> int: ...
+ def stat(self, raising: bool = ...) -> _Stat: ...
+ def sysexec(self, *argv: Any, **popen_opts: Any) -> Text: ...
+ def visit(
+ self,
+ fil: Optional[Union[str, Text, Callable[[local], bool]]] = ...,
+ rec: Optional[Union[Literal[1, True], str, Text, Callable[[local], bool]]] = ...,
+ ignore: Type[Exception] = ...,
+ bf: bool = ...,
+ sort: bool = ...,
+ ) -> Iterator[local]: ...
+ def write(self, data: Any, mode: str = ..., ensure: bool = ...) -> None: ...
+ def write_binary(self, data: bytes, ensure: bool = ...) -> None: ...
+ def write_text(self, data: Union[str, Text], encoding: str, ensure: bool = ...) -> None: ...
+
+
+# Untyped types below here.
+svnwc: Any
+svnurl: Any
+SvnAuth: Any
diff --git a/testing/web-platform/tests/tools/third_party/py/py/py.typed b/testing/web-platform/tests/tools/third_party/py/py/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/py/py/test.py b/testing/web-platform/tests/tools/third_party/py/py/test.py
new file mode 100644
index 0000000000..aa5beb1789
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/test.py
@@ -0,0 +1,10 @@
+import sys
+if __name__ == '__main__':
+ import pytest
+ sys.exit(pytest.main())
+else:
+ import sys, pytest
+ sys.modules['py.test'] = pytest
+
+# for more API entry points see the 'tests' definition
+# in __init__.py
diff --git a/testing/web-platform/tests/tools/third_party/py/py/xml.pyi b/testing/web-platform/tests/tools/third_party/py/py/xml.pyi
new file mode 100644
index 0000000000..9c44480a5f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/py/xml.pyi
@@ -0,0 +1,25 @@
+from typing import ClassVar, Generic, Iterable, Text, Type, Union
+from typing_extensions import Final
+
+class raw:
+ uniobj: Final[Text]
+ def __init__(self, uniobj: Text) -> None: ...
+
+class _NamespaceMetaclass(type):
+ def __getattr__(self, name: str) -> Type[Tag]: ...
+
+class Namespace(metaclass=_NamespaceMetaclass): ...
+
+class Tag(list):
+ class Attr:
+ def __getattr__(self, attr: str) -> Text: ...
+ attr: Final[Attr]
+ def __init__(self, *args: Union[Text, raw, Tag, Iterable[Tag]], **kwargs: Union[Text, raw]) -> None: ...
+ def unicode(self, indent: int = ...) -> Text: ...
+
+class html(Namespace):
+ class Style:
+ def __init__(self, **kw: Union[str, Text]) -> None: ...
+ style: ClassVar[Style]
+
+def escape(ustring: Union[str, Text]) -> Text: ...
diff --git a/testing/web-platform/tests/tools/third_party/py/pyproject.toml b/testing/web-platform/tests/tools/third_party/py/pyproject.toml
new file mode 100644
index 0000000000..e386ea0b27
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/pyproject.toml
@@ -0,0 +1,6 @@
+[build-system]
+requires = [
+ "setuptools",
+ "setuptools_scm[toml]",
+]
+build-backend = "setuptools.build_meta"
diff --git a/testing/web-platform/tests/tools/third_party/py/setup.cfg b/testing/web-platform/tests/tools/third_party/py/setup.cfg
new file mode 100644
index 0000000000..5f25c2febf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/setup.cfg
@@ -0,0 +1,8 @@
+[wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE
+
+[devpi:upload]
+formats=sdist.tgz,bdist_wheel
diff --git a/testing/web-platform/tests/tools/third_party/py/setup.py b/testing/web-platform/tests/tools/third_party/py/setup.py
new file mode 100644
index 0000000000..5948ef0047
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/setup.py
@@ -0,0 +1,48 @@
+from setuptools import setup, find_packages
+
+
+def main():
+ setup(
+ name='py',
+ description='library with cross-python path, ini-parsing, io, code, log facilities',
+ long_description=open('README.rst').read(),
+ use_scm_version={"write_to": "py/_version.py"},
+ setup_requires=["setuptools_scm"],
+ url='https://py.readthedocs.io/',
+ license='MIT license',
+ platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'],
+ python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
+ author='holger krekel, Ronny Pfannschmidt, Benjamin Peterson and others',
+ author_email='pytest-dev@python.org',
+ classifiers=['Development Status :: 6 - Mature',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: POSIX',
+ 'Operating System :: Microsoft :: Windows',
+ 'Operating System :: MacOS :: MacOS X',
+ 'Topic :: Software Development :: Testing',
+ 'Topic :: Software Development :: Libraries',
+ 'Topic :: Utilities',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
+ 'Programming Language :: Python :: 3.10',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ ],
+ packages=find_packages(exclude=['tasks', 'testing']),
+ include_package_data=True,
+ zip_safe=False,
+ package_data={
+ "": ["py.typed"],
+ },
+ )
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/web-platform/tests/tools/third_party/py/tasks/__init__.py b/testing/web-platform/tests/tools/third_party/py/tasks/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/tasks/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/py/tasks/vendoring.py b/testing/web-platform/tests/tools/third_party/py/tasks/vendoring.py
new file mode 100644
index 0000000000..3c7d6015cf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/tasks/vendoring.py
@@ -0,0 +1,41 @@
+from __future__ import absolute_import, print_function
+import os.path
+import shutil
+import subprocess
+import sys
+
+VENDOR_TARGET = "py/_vendored_packages"
+GOOD_FILES = ('README.md', '__init__.py')
+
+
+def remove_libs():
+ print("removing vendored libs")
+ for filename in os.listdir(VENDOR_TARGET):
+ if filename not in GOOD_FILES:
+ path = os.path.join(VENDOR_TARGET, filename)
+ print(" ", path)
+ if os.path.isfile(path):
+ os.remove(path)
+ else:
+ shutil.rmtree(path)
+
+
+def update_libs():
+ print("installing libs")
+ subprocess.check_call((
+ sys.executable, '-m', 'pip', 'install',
+ '--target', VENDOR_TARGET, 'apipkg', 'iniconfig',
+ ))
+ subprocess.check_call(('git', 'add', VENDOR_TARGET))
+ print("Please commit to finish the update after running the tests:")
+ print()
+ print(' git commit -am "Updated vendored libs"')
+
+
+def main():
+ remove_libs()
+ update_libs()
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/code/test_assertion.py b/testing/web-platform/tests/tools/third_party/py/testing/code/test_assertion.py
new file mode 100644
index 0000000000..e2a7f90399
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/code/test_assertion.py
@@ -0,0 +1,305 @@
+import pytest, py
+import re
+
+def exvalue():
+ import sys
+ return sys.exc_info()[1]
+
+def f():
+ return 2
+
+def test_assert():
+ try:
+ assert f() == 3
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith('assert 2 == 3\n')
+
+
+def test_assert_within_finally():
+ excinfo = py.test.raises(ZeroDivisionError, """
+ try:
+ 1/0
+ finally:
+ i = 42
+ """)
+ s = excinfo.exconly()
+ assert re.search("ZeroDivisionError:.*division", s) is not None
+
+
+def test_assert_multiline_1():
+ try:
+ assert (f() ==
+ 3)
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith('assert 2 == 3\n')
+
+def test_assert_multiline_2():
+ try:
+ assert (f() == (4,
+ 3)[-1])
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith('assert 2 ==')
+
+def test_in():
+ try:
+ assert "hi" in [1, 2]
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith("assert 'hi' in")
+
+def test_is():
+ try:
+ assert 1 is 2
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith("assert 1 is 2")
+
+
+def test_attrib():
+ class Foo(object):
+ b = 1
+ i = Foo()
+ try:
+ assert i.b == 2
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith("assert 1 == 2")
+
+def test_attrib_inst():
+ class Foo(object):
+ b = 1
+ try:
+ assert Foo().b == 2
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith("assert 1 == 2")
+
+def test_len():
+ l = list(range(42))
+ try:
+ assert len(l) == 100
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert s.startswith("assert 42 == 100")
+ assert "where 42 = len([" in s
+
+
+def test_assert_keyword_arg():
+ def f(x=3):
+ return False
+ try:
+ assert f(x=5)
+ except AssertionError:
+ e = exvalue()
+ assert "x=5" in str(e)
+
+# These tests should both fail, but should fail nicely...
+class WeirdRepr:
+ def __repr__(self):
+ return '<WeirdRepr\nsecond line>'
+
+def bug_test_assert_repr():
+ v = WeirdRepr()
+ try:
+ assert v == 1
+ except AssertionError:
+ e = exvalue()
+ assert str(e).find('WeirdRepr') != -1
+ assert str(e).find('second line') != -1
+ assert 0
+
+def test_assert_non_string():
+ try:
+ assert 0, ['list']
+ except AssertionError:
+ e = exvalue()
+ assert str(e).find("list") != -1
+
+def test_assert_implicit_multiline():
+ try:
+ x = [1,2,3]
+ assert x != [1,
+ 2, 3]
+ except AssertionError:
+ e = exvalue()
+ assert str(e).find('assert [1, 2, 3] !=') != -1
+
+@py.test.mark.xfail(py.test.__version__[0] != "2",
+ reason="broken on modern pytest",
+ run=False
+)
+def test_assert_with_brokenrepr_arg():
+ class BrokenRepr:
+ def __repr__(self): 0 / 0
+ e = AssertionError(BrokenRepr())
+ if e.msg.find("broken __repr__") == -1:
+ py.test.fail("broken __repr__ not handle correctly")
+
+def test_multiple_statements_per_line():
+ try:
+ a = 1; assert a == 2
+ except AssertionError:
+ e = exvalue()
+ assert "assert 1 == 2" in str(e)
+
+def test_power():
+ try:
+ assert 2**3 == 7
+ except AssertionError:
+ e = exvalue()
+ assert "assert (2 ** 3) == 7" in str(e)
+
+
+class TestView:
+
+ def setup_class(cls):
+ cls.View = py.test.importorskip("py._code._assertionold").View
+
+ def test_class_dispatch(self):
+ ### Use a custom class hierarchy with existing instances
+
+ class Picklable(self.View):
+ pass
+
+ class Simple(Picklable):
+ __view__ = object
+ def pickle(self):
+ return repr(self.__obj__)
+
+ class Seq(Picklable):
+ __view__ = list, tuple, dict
+ def pickle(self):
+ return ';'.join(
+ [Picklable(item).pickle() for item in self.__obj__])
+
+ class Dict(Seq):
+ __view__ = dict
+ def pickle(self):
+ return Seq.pickle(self) + '!' + Seq(self.values()).pickle()
+
+ assert Picklable(123).pickle() == '123'
+ assert Picklable([1,[2,3],4]).pickle() == '1;2;3;4'
+ assert Picklable({1:2}).pickle() == '1!2'
+
+ def test_viewtype_class_hierarchy(self):
+ # Use a custom class hierarchy based on attributes of existing instances
+ class Operation:
+ "Existing class that I don't want to change."
+ def __init__(self, opname, *args):
+ self.opname = opname
+ self.args = args
+
+ existing = [Operation('+', 4, 5),
+ Operation('getitem', '', 'join'),
+ Operation('setattr', 'x', 'y', 3),
+ Operation('-', 12, 1)]
+
+ class PyOp(self.View):
+ def __viewkey__(self):
+ return self.opname
+ def generate(self):
+ return '%s(%s)' % (self.opname, ', '.join(map(repr, self.args)))
+
+ class PyBinaryOp(PyOp):
+ __view__ = ('+', '-', '*', '/')
+ def generate(self):
+ return '%s %s %s' % (self.args[0], self.opname, self.args[1])
+
+ codelines = [PyOp(op).generate() for op in existing]
+ assert codelines == ["4 + 5", "getitem('', 'join')",
+ "setattr('x', 'y', 3)", "12 - 1"]
+
+def test_underscore_api():
+ py.code._AssertionError
+ py.code._reinterpret_old # used by pypy
+ py.code._reinterpret
+
+def test_assert_customizable_reprcompare(monkeypatch):
+ util = pytest.importorskip("_pytest.assertion.util")
+ monkeypatch.setattr(util, '_reprcompare', lambda *args: 'hello')
+ try:
+ assert 3 == 4
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert "hello" in s
+
+def test_assert_long_source_1():
+ try:
+ assert len == [
+ (None, ['somet text', 'more text']),
+ ]
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert 're-run' not in s
+ assert 'somet text' in s
+
+def test_assert_long_source_2():
+ try:
+ assert(len == [
+ (None, ['somet text', 'more text']),
+ ])
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert 're-run' not in s
+ assert 'somet text' in s
+
+def test_assert_raise_alias(testdir):
+ testdir.makepyfile("""
+ import sys
+ EX = AssertionError
+ def test_hello():
+ raise EX("hello"
+ "multi"
+ "line")
+ """)
+ result = testdir.runpytest()
+ result.stdout.fnmatch_lines([
+ "*def test_hello*",
+ "*raise EX*",
+ "*1 failed*",
+ ])
+
+@py.test.mark.xfail(py.test.__version__[0] != "2",
+ reason="broken on modern pytest",
+ run=False)
+def test_assert_raise_subclass():
+ class SomeEx(AssertionError):
+ def __init__(self, *args):
+ super(SomeEx, self).__init__()
+ try:
+ raise SomeEx("hello")
+ except AssertionError as e:
+ s = str(e)
+ assert 're-run' not in s
+ assert 'could not determine' in s
+
+def test_assert_raises_in_nonzero_of_object_pytest_issue10():
+ class A(object):
+ def __nonzero__(self):
+ raise ValueError(42)
+ def __lt__(self, other):
+ return A()
+ def __repr__(self):
+ return "<MY42 object>"
+ def myany(x):
+ return True
+ try:
+ assert not(myany(A() < 0))
+ except AssertionError:
+ e = exvalue()
+ s = str(e)
+ assert "<MY42 object> < 0" in s
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/code/test_code.py b/testing/web-platform/tests/tools/third_party/py/testing/code/test_code.py
new file mode 100644
index 0000000000..28ec628b00
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/code/test_code.py
@@ -0,0 +1,159 @@
+import py
+import sys
+
+def test_ne():
+ code1 = py.code.Code(compile('foo = "bar"', '', 'exec'))
+ assert code1 == code1
+ code2 = py.code.Code(compile('foo = "baz"', '', 'exec'))
+ assert code2 != code1
+
+def test_code_gives_back_name_for_not_existing_file():
+ name = 'abc-123'
+ co_code = compile("pass\n", name, 'exec')
+ assert co_code.co_filename == name
+ code = py.code.Code(co_code)
+ assert str(code.path) == name
+ assert code.fullsource is None
+
+def test_code_with_class():
+ class A:
+ pass
+ py.test.raises(TypeError, "py.code.Code(A)")
+
+if True:
+ def x():
+ pass
+
+def test_code_fullsource():
+ code = py.code.Code(x)
+ full = code.fullsource
+ assert 'test_code_fullsource()' in str(full)
+
+def test_code_source():
+ code = py.code.Code(x)
+ src = code.source()
+ expected = """def x():
+ pass"""
+ assert str(src) == expected
+
+def test_frame_getsourcelineno_myself():
+ def func():
+ return sys._getframe(0)
+ f = func()
+ f = py.code.Frame(f)
+ source, lineno = f.code.fullsource, f.lineno
+ assert source[lineno].startswith(" return sys._getframe(0)")
+
+def test_getstatement_empty_fullsource():
+ def func():
+ return sys._getframe(0)
+ f = func()
+ f = py.code.Frame(f)
+ prop = f.code.__class__.fullsource
+ try:
+ f.code.__class__.fullsource = None
+ assert f.statement == py.code.Source("")
+ finally:
+ f.code.__class__.fullsource = prop
+
+def test_code_from_func():
+ co = py.code.Code(test_frame_getsourcelineno_myself)
+ assert co.firstlineno
+ assert co.path
+
+
+
+def test_builtin_patch_unpatch(monkeypatch):
+ cpy_builtin = py.builtin.builtins
+ comp = cpy_builtin.compile
+ def mycompile(*args, **kwargs):
+ return comp(*args, **kwargs)
+ class Sub(AssertionError):
+ pass
+ monkeypatch.setattr(cpy_builtin, 'AssertionError', Sub)
+ monkeypatch.setattr(cpy_builtin, 'compile', mycompile)
+ py.code.patch_builtins()
+ assert cpy_builtin.AssertionError != Sub
+ assert cpy_builtin.compile != mycompile
+ py.code.unpatch_builtins()
+ assert cpy_builtin.AssertionError is Sub
+ assert cpy_builtin.compile == mycompile
+
+
+def test_unicode_handling():
+ value = py.builtin._totext('\xc4\x85\xc4\x87\n', 'utf-8').encode('utf8')
+ def f():
+ raise Exception(value)
+ excinfo = py.test.raises(Exception, f)
+ s = str(excinfo)
+ if sys.version_info[0] < 3:
+ u = unicode(excinfo)
+
+def test_code_getargs():
+ def f1(x):
+ pass
+ c1 = py.code.Code(f1)
+ assert c1.getargs(var=True) == ('x',)
+
+ def f2(x, *y):
+ pass
+ c2 = py.code.Code(f2)
+ assert c2.getargs(var=True) == ('x', 'y')
+
+ def f3(x, **z):
+ pass
+ c3 = py.code.Code(f3)
+ assert c3.getargs(var=True) == ('x', 'z')
+
+ def f4(x, *y, **z):
+ pass
+ c4 = py.code.Code(f4)
+ assert c4.getargs(var=True) == ('x', 'y', 'z')
+
+
+def test_frame_getargs():
+ def f1(x):
+ return sys._getframe(0)
+ fr1 = py.code.Frame(f1('a'))
+ assert fr1.getargs(var=True) == [('x', 'a')]
+
+ def f2(x, *y):
+ return sys._getframe(0)
+ fr2 = py.code.Frame(f2('a', 'b', 'c'))
+ assert fr2.getargs(var=True) == [('x', 'a'), ('y', ('b', 'c'))]
+
+ def f3(x, **z):
+ return sys._getframe(0)
+ fr3 = py.code.Frame(f3('a', b='c'))
+ assert fr3.getargs(var=True) == [('x', 'a'), ('z', {'b': 'c'})]
+
+ def f4(x, *y, **z):
+ return sys._getframe(0)
+ fr4 = py.code.Frame(f4('a', 'b', c='d'))
+ assert fr4.getargs(var=True) == [('x', 'a'), ('y', ('b',)),
+ ('z', {'c': 'd'})]
+
+
+class TestExceptionInfo:
+
+ def test_bad_getsource(self):
+ try:
+ if False: pass
+ else: assert False
+ except AssertionError:
+ exci = py.code.ExceptionInfo()
+ assert exci.getrepr()
+
+
+class TestTracebackEntry:
+
+ def test_getsource(self):
+ try:
+ if False: pass
+ else: assert False
+ except AssertionError:
+ exci = py.code.ExceptionInfo()
+ entry = exci.traceback[0]
+ source = entry.getsource()
+ assert len(source) == 4
+ assert 'else: assert False' in source[3]
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/code/test_excinfo.py b/testing/web-platform/tests/tools/third_party/py/testing/code/test_excinfo.py
new file mode 100644
index 0000000000..c148ab8cfb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/code/test_excinfo.py
@@ -0,0 +1,956 @@
+# -*- coding: utf-8 -*-
+
+import py
+import pytest
+import sys
+from test_source import astonly
+
+from py._code.code import FormattedExcinfo, ReprExceptionInfo
+queue = py.builtin._tryimport('queue', 'Queue')
+
+failsonjython = py.test.mark.xfail("sys.platform.startswith('java')")
+
+try:
+ import importlib
+except ImportError:
+ invalidate_import_caches = None
+else:
+ invalidate_import_caches = getattr(importlib, "invalidate_caches", None)
+
+
+pytest_version_info = tuple(map(int, pytest.__version__.split(".")[:3]))
+
+broken_on_modern_pytest = pytest.mark.xfail(
+ pytest_version_info[0] != 2,
+ reason="this test hasn't been fixed after moving py.code into pytest",
+ run=False
+ )
+
+
+class TWMock:
+ def __init__(self):
+ self.lines = []
+
+ def sep(self, sep, line=None):
+ self.lines.append((sep, line))
+
+ def line(self, line, **kw):
+ self.lines.append(line)
+
+ def markup(self, text, **kw):
+ return text
+
+ fullwidth = 80
+
+
+def test_excinfo_simple():
+ try:
+ raise ValueError
+ except ValueError:
+ info = py.code.ExceptionInfo()
+ assert info.type == ValueError
+
+
+def test_excinfo_getstatement():
+ def g():
+ raise ValueError
+
+ def f():
+ g()
+ try:
+ f()
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+ linenumbers = [
+ py.code.getrawcode(f).co_firstlineno-1+3,
+ py.code.getrawcode(f).co_firstlineno-1+1,
+ py.code.getrawcode(g).co_firstlineno-1+1,
+ ]
+ l = list(excinfo.traceback)
+ foundlinenumbers = [x.lineno for x in l]
+ assert foundlinenumbers == linenumbers
+ #for x in info:
+ # print "%s:%d %s" %(x.path.relto(root), x.lineno, x.statement)
+ #xxx
+
+# testchain for getentries test below
+def f():
+ #
+ raise ValueError
+ #
+def g():
+ #
+ __tracebackhide__ = True
+ f()
+ #
+def h():
+ #
+ g()
+ #
+
+class TestTraceback_f_g_h:
+ def setup_method(self, method):
+ try:
+ h()
+ except ValueError:
+ self.excinfo = py.code.ExceptionInfo()
+
+ def test_traceback_entries(self):
+ tb = self.excinfo.traceback
+ entries = list(tb)
+ assert len(tb) == 4 # maybe fragile test
+ assert len(entries) == 4 # maybe fragile test
+ names = ['f', 'g', 'h']
+ for entry in entries:
+ try:
+ names.remove(entry.frame.code.name)
+ except ValueError:
+ pass
+ assert not names
+
+ def test_traceback_entry_getsource(self):
+ tb = self.excinfo.traceback
+ s = str(tb[-1].getsource())
+ assert s.startswith("def f():")
+ assert s.endswith("raise ValueError")
+
+ @astonly
+ @failsonjython
+ def test_traceback_entry_getsource_in_construct(self):
+ source = py.code.Source("""\
+ def xyz():
+ try:
+ raise ValueError
+ except somenoname:
+ pass
+ xyz()
+ """)
+ try:
+ exec (source.compile())
+ except NameError:
+ tb = py.code.ExceptionInfo().traceback
+ print (tb[-1].getsource())
+ s = str(tb[-1].getsource())
+ assert s.startswith("def xyz():\n try:")
+ assert s.strip().endswith("except somenoname:")
+
+ def test_traceback_cut(self):
+ co = py.code.Code(f)
+ path, firstlineno = co.path, co.firstlineno
+ traceback = self.excinfo.traceback
+ newtraceback = traceback.cut(path=path, firstlineno=firstlineno)
+ assert len(newtraceback) == 1
+ newtraceback = traceback.cut(path=path, lineno=firstlineno+2)
+ assert len(newtraceback) == 1
+
+ def test_traceback_cut_excludepath(self, testdir):
+ p = testdir.makepyfile("def f(): raise ValueError")
+ excinfo = py.test.raises(ValueError, "p.pyimport().f()")
+ basedir = py.path.local(py.test.__file__).dirpath()
+ newtraceback = excinfo.traceback.cut(excludepath=basedir)
+ for x in newtraceback:
+ if hasattr(x, 'path'):
+ assert not py.path.local(x.path).relto(basedir)
+ assert newtraceback[-1].frame.code.path == p
+
+ def test_traceback_filter(self):
+ traceback = self.excinfo.traceback
+ ntraceback = traceback.filter()
+ assert len(ntraceback) == len(traceback) - 1
+
+ def test_traceback_recursion_index(self):
+ def f(n):
+ if n < 10:
+ n += 1
+ f(n)
+ excinfo = py.test.raises(RuntimeError, f, 8)
+ traceback = excinfo.traceback
+ recindex = traceback.recursionindex()
+ assert recindex == 3
+
+ def test_traceback_only_specific_recursion_errors(self, monkeypatch):
+ def f(n):
+ if n == 0:
+ raise RuntimeError("hello")
+ f(n-1)
+
+ excinfo = pytest.raises(RuntimeError, f, 100)
+ monkeypatch.delattr(excinfo.traceback.__class__, "recursionindex")
+ repr = excinfo.getrepr()
+ assert "RuntimeError: hello" in str(repr.reprcrash)
+
+ def test_traceback_no_recursion_index(self):
+ def do_stuff():
+ raise RuntimeError
+
+ def reraise_me():
+ import sys
+ exc, val, tb = sys.exc_info()
+ py.builtin._reraise(exc, val, tb)
+
+ def f(n):
+ try:
+ do_stuff()
+ except:
+ reraise_me()
+ excinfo = py.test.raises(RuntimeError, f, 8)
+ traceback = excinfo.traceback
+ recindex = traceback.recursionindex()
+ assert recindex is None
+
+ def test_traceback_messy_recursion(self):
+ # XXX: simplified locally testable version
+ decorator = py.test.importorskip('decorator').decorator
+
+ def log(f, *k, **kw):
+ print('%s %s' % (k, kw))
+ f(*k, **kw)
+ log = decorator(log)
+
+ def fail():
+ raise ValueError('')
+
+ fail = log(log(fail))
+
+ excinfo = py.test.raises(ValueError, fail)
+ assert excinfo.traceback.recursionindex() is None
+
+ def test_traceback_getcrashentry(self):
+ def i():
+ __tracebackhide__ = True
+ raise ValueError
+
+ def h():
+ i()
+
+ def g():
+ __tracebackhide__ = True
+ h()
+
+ def f():
+ g()
+
+ excinfo = py.test.raises(ValueError, f)
+ tb = excinfo.traceback
+ entry = tb.getcrashentry()
+ co = py.code.Code(h)
+ assert entry.frame.code.path == co.path
+ assert entry.lineno == co.firstlineno + 1
+ assert entry.frame.code.name == 'h'
+
+ def test_traceback_getcrashentry_empty(self):
+ def g():
+ __tracebackhide__ = True
+ raise ValueError
+
+ def f():
+ __tracebackhide__ = True
+ g()
+
+ excinfo = py.test.raises(ValueError, f)
+ tb = excinfo.traceback
+ entry = tb.getcrashentry()
+ co = py.code.Code(g)
+ assert entry.frame.code.path == co.path
+ assert entry.lineno == co.firstlineno + 2
+ assert entry.frame.code.name == 'g'
+
+
+def hello(x):
+ x + 5
+
+
+def test_tbentry_reinterpret():
+ try:
+ hello("hello")
+ except TypeError:
+ excinfo = py.code.ExceptionInfo()
+ tbentry = excinfo.traceback[-1]
+ msg = tbentry.reinterpret()
+ assert msg.startswith("TypeError: ('hello' + 5)")
+
+
+def test_excinfo_exconly():
+ excinfo = py.test.raises(ValueError, h)
+ assert excinfo.exconly().startswith('ValueError')
+ excinfo = py.test.raises(ValueError,
+ "raise ValueError('hello\\nworld')")
+ msg = excinfo.exconly(tryshort=True)
+ assert msg.startswith('ValueError')
+ assert msg.endswith("world")
+
+
+def test_excinfo_repr():
+ excinfo = py.test.raises(ValueError, h)
+ s = repr(excinfo)
+ assert s == "<ExceptionInfo ValueError tblen=4>"
+
+
+def test_excinfo_str():
+ excinfo = py.test.raises(ValueError, h)
+ s = str(excinfo)
+ assert s.startswith(__file__[:-9]) # pyc file and $py.class
+ assert s.endswith("ValueError")
+ assert len(s.split(":")) >= 3 # on windows it's 4
+
+
+def test_excinfo_errisinstance():
+ excinfo = py.test.raises(ValueError, h)
+ assert excinfo.errisinstance(ValueError)
+
+
+def test_excinfo_no_sourcecode():
+ try:
+ exec ("raise ValueError()")
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+ s = str(excinfo.traceback[-1])
+ assert s == " File '<string>':1 in <module>\n ???\n"
+
+
+def test_excinfo_no_python_sourcecode(tmpdir):
+ #XXX: simplified locally testable version
+ tmpdir.join('test.txt').write("{{ h()}}:")
+
+ jinja2 = py.test.importorskip('jinja2')
+ loader = jinja2.FileSystemLoader(str(tmpdir))
+ env = jinja2.Environment(loader=loader)
+ template = env.get_template('test.txt')
+ excinfo = py.test.raises(ValueError,
+ template.render, h=h)
+ for item in excinfo.traceback:
+ print(item) # XXX: for some reason jinja.Template.render is printed in full
+ item.source # shouldnt fail
+ if item.path.basename == 'test.txt':
+ assert str(item.source) == '{{ h()}}:'
+
+
+def test_entrysource_Queue_example():
+ try:
+ queue.Queue().get(timeout=0.001)
+ except queue.Empty:
+ excinfo = py.code.ExceptionInfo()
+ entry = excinfo.traceback[-1]
+ source = entry.getsource()
+ assert source is not None
+ s = str(source).strip()
+ assert s.startswith("def get")
+
+
+def test_codepath_Queue_example():
+ try:
+ queue.Queue().get(timeout=0.001)
+ except queue.Empty:
+ excinfo = py.code.ExceptionInfo()
+ entry = excinfo.traceback[-1]
+ path = entry.path
+ assert isinstance(path, py.path.local)
+ assert path.basename.lower() == "queue.py"
+ assert path.check()
+
+
+class TestFormattedExcinfo:
+ def pytest_funcarg__importasmod(self, request):
+ def importasmod(source):
+ source = py.code.Source(source)
+ tmpdir = request.getfuncargvalue("tmpdir")
+ modpath = tmpdir.join("mod.py")
+ tmpdir.ensure("__init__.py")
+ modpath.write(source)
+ if invalidate_import_caches is not None:
+ invalidate_import_caches()
+ return modpath.pyimport()
+ return importasmod
+
+ def excinfo_from_exec(self, source):
+ source = py.code.Source(source).strip()
+ try:
+ exec (source.compile())
+ except KeyboardInterrupt:
+ raise
+ except:
+ return py.code.ExceptionInfo()
+ assert 0, "did not raise"
+
+ def test_repr_source(self):
+ pr = FormattedExcinfo()
+ source = py.code.Source("""
+ def f(x):
+ pass
+ """).strip()
+ pr.flow_marker = "|"
+ lines = pr.get_source(source, 0)
+ assert len(lines) == 2
+ assert lines[0] == "| def f(x):"
+ assert lines[1] == " pass"
+
+ @broken_on_modern_pytest
+ def test_repr_source_excinfo(self):
+ """ check if indentation is right """
+ pr = FormattedExcinfo()
+ excinfo = self.excinfo_from_exec("""
+ def f():
+ assert 0
+ f()
+ """)
+ pr = FormattedExcinfo()
+ source = pr._getentrysource(excinfo.traceback[-1])
+ lines = pr.get_source(source, 1, excinfo)
+ assert lines == [
+ ' def f():',
+ '> assert 0',
+ 'E assert 0'
+ ]
+
+ def test_repr_source_not_existing(self):
+ pr = FormattedExcinfo()
+ co = compile("raise ValueError()", "", "exec")
+ try:
+ exec (co)
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[1].lines[0] == "> ???"
+
+ def test_repr_many_line_source_not_existing(self):
+ pr = FormattedExcinfo()
+ co = compile("""
+a = 1
+raise ValueError()
+""", "", "exec")
+ try:
+ exec (co)
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[1].lines[0] == "> ???"
+
+ def test_repr_source_failing_fullsource(self):
+ pr = FormattedExcinfo()
+
+ class FakeCode(object):
+ class raw:
+ co_filename = '?'
+ path = '?'
+ firstlineno = 5
+
+ def fullsource(self):
+ return None
+ fullsource = property(fullsource)
+
+ class FakeFrame(object):
+ code = FakeCode()
+ f_locals = {}
+ f_globals = {}
+
+ class FakeTracebackEntry(py.code.Traceback.Entry):
+ def __init__(self, tb):
+ self.lineno = 5+3
+
+ @property
+ def frame(self):
+ return FakeFrame()
+
+ class Traceback(py.code.Traceback):
+ Entry = FakeTracebackEntry
+
+ class FakeExcinfo(py.code.ExceptionInfo):
+ typename = "Foo"
+ def __init__(self):
+ pass
+
+ def exconly(self, tryshort):
+ return "EXC"
+ def errisinstance(self, cls):
+ return False
+
+ excinfo = FakeExcinfo()
+ class FakeRawTB(object):
+ tb_next = None
+ tb = FakeRawTB()
+ excinfo.traceback = Traceback(tb)
+
+ fail = IOError()
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[0].lines[0] == "> ???"
+
+ fail = py.error.ENOENT
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[0].lines[0] == "> ???"
+
+
+ def test_repr_local(self):
+ p = FormattedExcinfo(showlocals=True)
+ loc = {'y': 5, 'z': 7, 'x': 3, '@x': 2, '__builtins__': {}}
+ reprlocals = p.repr_locals(loc)
+ assert reprlocals.lines
+ assert reprlocals.lines[0] == '__builtins__ = <builtins>'
+ assert reprlocals.lines[1] == 'x = 3'
+ assert reprlocals.lines[2] == 'y = 5'
+ assert reprlocals.lines[3] == 'z = 7'
+
+ def test_repr_tracebackentry_lines(self, importasmod):
+ mod = importasmod("""
+ def func1():
+ raise ValueError("hello\\nworld")
+ """)
+ excinfo = py.test.raises(ValueError, mod.func1)
+ excinfo.traceback = excinfo.traceback.filter()
+ p = FormattedExcinfo()
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-1])
+
+ # test as intermittent entry
+ lines = reprtb.lines
+ assert lines[0] == ' def func1():'
+ assert lines[1] == '> raise ValueError("hello\\nworld")'
+
+ # test as last entry
+ p = FormattedExcinfo(showlocals=True)
+ repr_entry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = repr_entry.lines
+ assert lines[0] == ' def func1():'
+ assert lines[1] == '> raise ValueError("hello\\nworld")'
+ assert lines[2] == 'E ValueError: hello'
+ assert lines[3] == 'E world'
+ assert not lines[4:]
+
+ loc = repr_entry.reprlocals is not None
+ loc = repr_entry.reprfileloc
+ assert loc.path == mod.__file__
+ assert loc.lineno == 3
+ #assert loc.message == "ValueError: hello"
+
+ def test_repr_tracebackentry_lines(self, importasmod):
+ mod = importasmod("""
+ def func1(m, x, y, z):
+ raise ValueError("hello\\nworld")
+ """)
+ excinfo = py.test.raises(ValueError, mod.func1, "m"*90, 5, 13, "z"*120)
+ excinfo.traceback = excinfo.traceback.filter()
+ entry = excinfo.traceback[-1]
+ p = FormattedExcinfo(funcargs=True)
+ reprfuncargs = p.repr_args(entry)
+ assert reprfuncargs.args[0] == ('m', repr("m"*90))
+ assert reprfuncargs.args[1] == ('x', '5')
+ assert reprfuncargs.args[2] == ('y', '13')
+ assert reprfuncargs.args[3] == ('z', repr("z" * 120))
+
+ p = FormattedExcinfo(funcargs=True)
+ repr_entry = p.repr_traceback_entry(entry)
+ assert repr_entry.reprfuncargs.args == reprfuncargs.args
+ tw = TWMock()
+ repr_entry.toterminal(tw)
+ assert tw.lines[0] == "m = " + repr('m' * 90)
+ assert tw.lines[1] == "x = 5, y = 13"
+ assert tw.lines[2] == "z = " + repr('z' * 120)
+
+ def test_repr_tracebackentry_lines_var_kw_args(self, importasmod):
+ mod = importasmod("""
+ def func1(x, *y, **z):
+ raise ValueError("hello\\nworld")
+ """)
+ excinfo = py.test.raises(ValueError, mod.func1, 'a', 'b', c='d')
+ excinfo.traceback = excinfo.traceback.filter()
+ entry = excinfo.traceback[-1]
+ p = FormattedExcinfo(funcargs=True)
+ reprfuncargs = p.repr_args(entry)
+ assert reprfuncargs.args[0] == ('x', repr('a'))
+ assert reprfuncargs.args[1] == ('y', repr(('b',)))
+ assert reprfuncargs.args[2] == ('z', repr({'c': 'd'}))
+
+ p = FormattedExcinfo(funcargs=True)
+ repr_entry = p.repr_traceback_entry(entry)
+ assert repr_entry.reprfuncargs.args == reprfuncargs.args
+ tw = TWMock()
+ repr_entry.toterminal(tw)
+ assert tw.lines[0] == "x = 'a', y = ('b',), z = {'c': 'd'}"
+
+ def test_repr_tracebackentry_short(self, importasmod):
+ mod = importasmod("""
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-2])
+ lines = reprtb.lines
+ basename = py.path.local(mod.__file__).basename
+ assert lines[0] == ' func1()'
+ assert basename in str(reprtb.reprfileloc.path)
+ assert reprtb.reprfileloc.lineno == 5
+
+ # test last entry
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = reprtb.lines
+ assert lines[0] == ' raise ValueError("hello")'
+ assert lines[1] == 'E ValueError: hello'
+ assert basename in str(reprtb.reprfileloc.path)
+ assert reprtb.reprfileloc.lineno == 3
+
+ def test_repr_tracebackentry_no(self, importasmod):
+ mod = importasmod("""
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(style="no")
+ p.repr_traceback_entry(excinfo.traceback[-2])
+
+ p = FormattedExcinfo(style="no")
+ reprentry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = reprentry.lines
+ assert lines[0] == 'E ValueError: hello'
+ assert not lines[1:]
+
+ def test_repr_traceback_tbfilter(self, importasmod):
+ mod = importasmod("""
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(tbfilter=True)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 2
+ p = FormattedExcinfo(tbfilter=False)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 3
+
+ def test_traceback_short_no_source(self, importasmod, monkeypatch):
+ mod = importasmod("""
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """)
+ try:
+ mod.entry()
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+ from py._code.code import Code
+ monkeypatch.setattr(Code, 'path', 'bogus')
+ excinfo.traceback[0].frame.code.path = "bogus"
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-2])
+ lines = reprtb.lines
+ last_p = FormattedExcinfo(style="short")
+ last_reprtb = last_p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ last_lines = last_reprtb.lines
+ monkeypatch.undo()
+ basename = py.path.local(mod.__file__).basename
+ assert lines[0] == ' func1()'
+
+ assert last_lines[0] == ' raise ValueError("hello")'
+ assert last_lines[1] == 'E ValueError: hello'
+
+ def test_repr_traceback_and_excinfo(self, importasmod):
+ mod = importasmod("""
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+
+ for style in ("long", "short"):
+ p = FormattedExcinfo(style=style)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 2
+ assert reprtb.style == style
+ assert not reprtb.extraline
+ repr = p.repr_excinfo(excinfo)
+ assert repr.reprtraceback
+ assert len(repr.reprtraceback.reprentries) == len(reprtb.reprentries)
+ assert repr.reprcrash.path.endswith("mod.py")
+ assert repr.reprcrash.message == "ValueError: 0"
+
+ def test_repr_traceback_with_invalid_cwd(self, importasmod, monkeypatch):
+ mod = importasmod("""
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+
+ p = FormattedExcinfo()
+ def raiseos():
+ raise OSError(2)
+ monkeypatch.setattr('os.getcwd', raiseos)
+ assert p._makepath(__file__) == __file__
+ reprtb = p.repr_traceback(excinfo)
+
+ @broken_on_modern_pytest
+ def test_repr_excinfo_addouterr(self, importasmod):
+ mod = importasmod("""
+ def entry():
+ raise ValueError()
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+ repr = excinfo.getrepr()
+ repr.addsection("title", "content")
+ twmock = TWMock()
+ repr.toterminal(twmock)
+ assert twmock.lines[-1] == "content"
+ assert twmock.lines[-2] == ("-", "title")
+
+ def test_repr_excinfo_reprcrash(self, importasmod):
+ mod = importasmod("""
+ def entry():
+ raise ValueError()
+ """)
+ excinfo = py.test.raises(ValueError, mod.entry)
+ repr = excinfo.getrepr()
+ assert repr.reprcrash.path.endswith("mod.py")
+ assert repr.reprcrash.lineno == 3
+ assert repr.reprcrash.message == "ValueError"
+ assert str(repr.reprcrash).endswith("mod.py:3: ValueError")
+
+ def test_repr_traceback_recursion(self, importasmod):
+ mod = importasmod("""
+ def rec2(x):
+ return rec1(x+1)
+ def rec1(x):
+ return rec2(x-1)
+ def entry():
+ rec1(42)
+ """)
+ excinfo = py.test.raises(RuntimeError, mod.entry)
+
+ for style in ("short", "long", "no"):
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback(excinfo)
+ assert reprtb.extraline == "!!! Recursion detected (same locals & position)"
+ assert str(reprtb)
+
+ @broken_on_modern_pytest
+ def test_tb_entry_AssertionError(self, importasmod):
+ # probably this test is a bit redundant
+ # as py/magic/testing/test_assertion.py
+ # already tests correctness of
+ # assertion-reinterpretation logic
+ mod = importasmod("""
+ def somefunc():
+ x = 1
+ assert x == 2
+ """)
+ excinfo = py.test.raises(AssertionError, mod.somefunc)
+
+ p = FormattedExcinfo()
+ reprentry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = reprentry.lines
+ assert lines[-1] == "E assert 1 == 2"
+
+ def test_reprexcinfo_getrepr(self, importasmod):
+ mod = importasmod("""
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """)
+ try:
+ mod.entry()
+ except ValueError:
+ excinfo = py.code.ExceptionInfo()
+
+ for style in ("short", "long", "no"):
+ for showlocals in (True, False):
+ repr = excinfo.getrepr(style=style, showlocals=showlocals)
+ assert isinstance(repr, ReprExceptionInfo)
+ assert repr.reprtraceback.style == style
+
+ def test_reprexcinfo_unicode(self):
+ from py._code.code import TerminalRepr
+ class MyRepr(TerminalRepr):
+ def toterminal(self, tw):
+ tw.line(py.builtin._totext("Ñ", "utf-8"))
+ x = py.builtin._totext(MyRepr())
+ assert x == py.builtin._totext("Ñ", "utf-8")
+
+ @broken_on_modern_pytest
+ def test_toterminal_long(self, importasmod):
+ mod = importasmod("""
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ tw = TWMock()
+ repr.toterminal(tw)
+ assert tw.lines[0] == ""
+ tw.lines.pop(0)
+ assert tw.lines[0] == " def f():"
+ assert tw.lines[1] == "> g(3)"
+ assert tw.lines[2] == ""
+ assert tw.lines[3].endswith("mod.py:5: ")
+ assert tw.lines[4] == ("_ ", None)
+ assert tw.lines[5] == ""
+ assert tw.lines[6] == " def g(x):"
+ assert tw.lines[7] == "> raise ValueError(x)"
+ assert tw.lines[8] == "E ValueError: 3"
+ assert tw.lines[9] == ""
+ assert tw.lines[10].endswith("mod.py:3: ValueError")
+
+ @broken_on_modern_pytest
+ def test_toterminal_long_missing_source(self, importasmod, tmpdir):
+ mod = importasmod("""
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ tmpdir.join('mod.py').remove()
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ tw = TWMock()
+ repr.toterminal(tw)
+ assert tw.lines[0] == ""
+ tw.lines.pop(0)
+ assert tw.lines[0] == "> ???"
+ assert tw.lines[1] == ""
+ assert tw.lines[2].endswith("mod.py:5: ")
+ assert tw.lines[3] == ("_ ", None)
+ assert tw.lines[4] == ""
+ assert tw.lines[5] == "> ???"
+ assert tw.lines[6] == "E ValueError: 3"
+ assert tw.lines[7] == ""
+ assert tw.lines[8].endswith("mod.py:3: ValueError")
+
+ @broken_on_modern_pytest
+ def test_toterminal_long_incomplete_source(self, importasmod, tmpdir):
+ mod = importasmod("""
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ tmpdir.join('mod.py').write('asdf')
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ tw = TWMock()
+ repr.toterminal(tw)
+ assert tw.lines[0] == ""
+ tw.lines.pop(0)
+ assert tw.lines[0] == "> ???"
+ assert tw.lines[1] == ""
+ assert tw.lines[2].endswith("mod.py:5: ")
+ assert tw.lines[3] == ("_ ", None)
+ assert tw.lines[4] == ""
+ assert tw.lines[5] == "> ???"
+ assert tw.lines[6] == "E ValueError: 3"
+ assert tw.lines[7] == ""
+ assert tw.lines[8].endswith("mod.py:3: ValueError")
+
+ @broken_on_modern_pytest
+ def test_toterminal_long_filenames(self, importasmod):
+ mod = importasmod("""
+ def f():
+ raise ValueError()
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ tw = TWMock()
+ path = py.path.local(mod.__file__)
+ old = path.dirpath().chdir()
+ try:
+ repr = excinfo.getrepr(abspath=False)
+ repr.toterminal(tw)
+ line = tw.lines[-1]
+ x = py.path.local().bestrelpath(path)
+ if len(x) < len(str(path)):
+ assert line == "mod.py:3: ValueError"
+
+ repr = excinfo.getrepr(abspath=True)
+ repr.toterminal(tw)
+ line = tw.lines[-1]
+ assert line == "%s:3: ValueError" %(path,)
+ finally:
+ old.chdir()
+
+ @pytest.mark.parametrize('style', ("long", "short", "no"))
+ @pytest.mark.parametrize('showlocals', (True, False),
+ ids=['locals', 'nolocals'])
+ @pytest.mark.parametrize('tbfilter', (True, False),
+ ids=['tbfilter', 'nofilter'])
+ @pytest.mark.parametrize('funcargs', (True, False),
+ ids=['funcargs', 'nofuncargs'])
+ def test_format_excinfo(self, importasmod,
+ style, showlocals, tbfilter, funcargs):
+
+ mod = importasmod("""
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ tw = py.io.TerminalWriter(stringio=True)
+ repr = excinfo.getrepr(
+ style=style,
+ showlocals=showlocals,
+ funcargs=funcargs,
+ tbfilter=tbfilter
+ )
+ repr.toterminal(tw)
+ assert tw.stringio.getvalue()
+
+ @broken_on_modern_pytest
+ def test_native_style(self):
+ excinfo = self.excinfo_from_exec("""
+ assert 0
+ """)
+ repr = excinfo.getrepr(style='native')
+ assert "assert 0" in str(repr.reprcrash)
+ s = str(repr)
+ assert s.startswith('Traceback (most recent call last):\n File')
+ assert s.endswith('\nAssertionError: assert 0')
+ assert 'exec (source.compile())' in s
+ assert s.count('assert 0') == 2
+
+ @broken_on_modern_pytest
+ def test_traceback_repr_style(self, importasmod):
+ mod = importasmod("""
+ def f():
+ g()
+ def g():
+ h()
+ def h():
+ i()
+ def i():
+ raise ValueError()
+ """)
+ excinfo = py.test.raises(ValueError, mod.f)
+ excinfo.traceback = excinfo.traceback.filter()
+ excinfo.traceback[1].set_repr_style("short")
+ excinfo.traceback[2].set_repr_style("short")
+ r = excinfo.getrepr(style="long")
+ tw = TWMock()
+ r.toterminal(tw)
+ for line in tw.lines: print (line)
+ assert tw.lines[0] == ""
+ assert tw.lines[1] == " def f():"
+ assert tw.lines[2] == "> g()"
+ assert tw.lines[3] == ""
+ assert tw.lines[4].endswith("mod.py:3: ")
+ assert tw.lines[5] == ("_ ", None)
+ assert tw.lines[6].endswith("in g")
+ assert tw.lines[7] == " h()"
+ assert tw.lines[8].endswith("in h")
+ assert tw.lines[9] == " i()"
+ assert tw.lines[10] == ("_ ", None)
+ assert tw.lines[11] == ""
+ assert tw.lines[12] == " def i():"
+ assert tw.lines[13] == "> raise ValueError()"
+ assert tw.lines[14] == "E ValueError"
+ assert tw.lines[15] == ""
+ assert tw.lines[16].endswith("mod.py:9: ValueError")
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/code/test_source.py b/testing/web-platform/tests/tools/third_party/py/testing/code/test_source.py
new file mode 100644
index 0000000000..ca9a42275c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/code/test_source.py
@@ -0,0 +1,656 @@
+from py.code import Source
+import py
+import sys
+import inspect
+
+from py._code.source import _ast
+if _ast is not None:
+ astonly = py.test.mark.nothing
+else:
+ astonly = py.test.mark.xfail("True", reason="only works with AST-compile")
+
+failsonjython = py.test.mark.xfail("sys.platform.startswith('java')")
+
+def test_source_str_function():
+ x = Source("3")
+ assert str(x) == "3"
+
+ x = Source(" 3")
+ assert str(x) == "3"
+
+ x = Source("""
+ 3
+ """, rstrip=False)
+ assert str(x) == "\n3\n "
+
+ x = Source("""
+ 3
+ """, rstrip=True)
+ assert str(x) == "\n3"
+
+def test_unicode():
+ try:
+ unicode
+ except NameError:
+ return
+ x = Source(unicode("4"))
+ assert str(x) == "4"
+ co = py.code.compile(unicode('u"\xc3\xa5"', 'utf8'), mode='eval')
+ val = eval(co)
+ assert isinstance(val, unicode)
+
+def test_source_from_function():
+ source = py.code.Source(test_source_str_function)
+ assert str(source).startswith('def test_source_str_function():')
+
+def test_source_from_method():
+ class TestClass:
+ def test_method(self):
+ pass
+ source = py.code.Source(TestClass().test_method)
+ assert source.lines == ["def test_method(self):",
+ " pass"]
+
+def test_source_from_lines():
+ lines = ["a \n", "b\n", "c"]
+ source = py.code.Source(lines)
+ assert source.lines == ['a ', 'b', 'c']
+
+def test_source_from_inner_function():
+ def f():
+ pass
+ source = py.code.Source(f, deindent=False)
+ assert str(source).startswith(' def f():')
+ source = py.code.Source(f)
+ assert str(source).startswith('def f():')
+
+def test_source_putaround_simple():
+ source = Source("raise ValueError")
+ source = source.putaround(
+ "try:", """\
+ except ValueError:
+ x = 42
+ else:
+ x = 23""")
+ assert str(source)=="""\
+try:
+ raise ValueError
+except ValueError:
+ x = 42
+else:
+ x = 23"""
+
+def test_source_putaround():
+ source = Source()
+ source = source.putaround("""
+ if 1:
+ x=1
+ """)
+ assert str(source).strip() == "if 1:\n x=1"
+
+def test_source_strips():
+ source = Source("")
+ assert source == Source()
+ assert str(source) == ''
+ assert source.strip() == source
+
+def test_source_strip_multiline():
+ source = Source()
+ source.lines = ["", " hello", " "]
+ source2 = source.strip()
+ assert source2.lines == [" hello"]
+
+def test_syntaxerror_rerepresentation():
+ ex = py.test.raises(SyntaxError, py.code.compile, 'xyz xyz')
+ assert ex.value.lineno == 1
+ assert ex.value.offset in (5, 7) # pypy/cpython difference
+ assert ex.value.text.strip(), 'x x'
+
+def test_isparseable():
+ assert Source("hello").isparseable()
+ assert Source("if 1:\n pass").isparseable()
+ assert Source(" \nif 1:\n pass").isparseable()
+ assert not Source("if 1:\n").isparseable()
+ assert not Source(" \nif 1:\npass").isparseable()
+ assert not Source(chr(0)).isparseable()
+
+class TestAccesses:
+ source = Source("""\
+ def f(x):
+ pass
+ def g(x):
+ pass
+ """)
+ def test_getrange(self):
+ x = self.source[0:2]
+ assert x.isparseable()
+ assert len(x.lines) == 2
+ assert str(x) == "def f(x):\n pass"
+
+ def test_getline(self):
+ x = self.source[0]
+ assert x == "def f(x):"
+
+ def test_len(self):
+ assert len(self.source) == 4
+
+ def test_iter(self):
+ l = [x for x in self.source]
+ assert len(l) == 4
+
+class TestSourceParsingAndCompiling:
+ source = Source("""\
+ def f(x):
+ assert (x ==
+ 3 +
+ 4)
+ """).strip()
+
+ def test_compile(self):
+ co = py.code.compile("x=3")
+ d = {}
+ exec (co, d)
+ assert d['x'] == 3
+
+ def test_compile_and_getsource_simple(self):
+ co = py.code.compile("x=3")
+ exec (co)
+ source = py.code.Source(co)
+ assert str(source) == "x=3"
+
+ def test_compile_and_getsource_through_same_function(self):
+ def gensource(source):
+ return py.code.compile(source)
+ co1 = gensource("""
+ def f():
+ raise KeyError()
+ """)
+ co2 = gensource("""
+ def f():
+ raise ValueError()
+ """)
+ source1 = inspect.getsource(co1)
+ assert 'KeyError' in source1
+ source2 = inspect.getsource(co2)
+ assert 'ValueError' in source2
+
+ def test_getstatement(self):
+ #print str(self.source)
+ ass = str(self.source[1:])
+ for i in range(1, 4):
+ #print "trying start in line %r" % self.source[i]
+ s = self.source.getstatement(i)
+ #x = s.deindent()
+ assert str(s) == ass
+
+ def test_getstatementrange_triple_quoted(self):
+ #print str(self.source)
+ source = Source("""hello('''
+ ''')""")
+ s = source.getstatement(0)
+ assert s == str(source)
+ s = source.getstatement(1)
+ assert s == str(source)
+
+ @astonly
+ def test_getstatementrange_within_constructs(self):
+ source = Source("""\
+ try:
+ try:
+ raise ValueError
+ except SomeThing:
+ pass
+ finally:
+ 42
+ """)
+ assert len(source) == 7
+ # check all lineno's that could occur in a traceback
+ #assert source.getstatementrange(0) == (0, 7)
+ #assert source.getstatementrange(1) == (1, 5)
+ assert source.getstatementrange(2) == (2, 3)
+ assert source.getstatementrange(3) == (3, 4)
+ assert source.getstatementrange(4) == (4, 5)
+ #assert source.getstatementrange(5) == (0, 7)
+ assert source.getstatementrange(6) == (6, 7)
+
+ def test_getstatementrange_bug(self):
+ source = Source("""\
+ try:
+ x = (
+ y +
+ z)
+ except:
+ pass
+ """)
+ assert len(source) == 6
+ assert source.getstatementrange(2) == (1, 4)
+
+ def test_getstatementrange_bug2(self):
+ source = Source("""\
+ assert (
+ 33
+ ==
+ [
+ X(3,
+ b=1, c=2
+ ),
+ ]
+ )
+ """)
+ assert len(source) == 9
+ assert source.getstatementrange(5) == (0, 9)
+
+ def test_getstatementrange_ast_issue58(self):
+ source = Source("""\
+
+ def test_some():
+ for a in [a for a in
+ CAUSE_ERROR]: pass
+
+ x = 3
+ """)
+ assert getstatement(2, source).lines == source.lines[2:3]
+ assert getstatement(3, source).lines == source.lines[3:4]
+
+ def test_getstatementrange_out_of_bounds_py3(self):
+ source = Source("if xxx:\n from .collections import something")
+ r = source.getstatementrange(1)
+ assert r == (1,2)
+
+ def test_getstatementrange_with_syntaxerror_issue7(self):
+ source = Source(":")
+ py.test.raises(SyntaxError, lambda: source.getstatementrange(0))
+
+ def test_compile_to_ast(self):
+ import ast
+ source = Source("x = 4")
+ mod = source.compile(flag=ast.PyCF_ONLY_AST)
+ assert isinstance(mod, ast.Module)
+ compile(mod, "<filename>", "exec")
+
+ def test_compile_and_getsource(self):
+ co = self.source.compile()
+ py.builtin.exec_(co, globals())
+ f(7)
+ excinfo = py.test.raises(AssertionError, "f(6)")
+ frame = excinfo.traceback[-1].frame
+ stmt = frame.code.fullsource.getstatement(frame.lineno)
+ #print "block", str(block)
+ assert str(stmt).strip().startswith('assert')
+
+ def test_compilefuncs_and_path_sanity(self):
+ def check(comp, name):
+ co = comp(self.source, name)
+ if not name:
+ expected = "codegen %s:%d>" %(mypath, mylineno+2+1)
+ else:
+ expected = "codegen %r %s:%d>" % (name, mypath, mylineno+2+1)
+ fn = co.co_filename
+ assert fn.endswith(expected)
+
+ mycode = py.code.Code(self.test_compilefuncs_and_path_sanity)
+ mylineno = mycode.firstlineno
+ mypath = mycode.path
+
+ for comp in py.code.compile, py.code.Source.compile:
+ for name in '', None, 'my':
+ yield check, comp, name
+
+ def test_offsetless_synerr(self):
+ py.test.raises(SyntaxError, py.code.compile, "lambda a,a: 0", mode='eval')
+
+def test_getstartingblock_singleline():
+ class A:
+ def __init__(self, *args):
+ frame = sys._getframe(1)
+ self.source = py.code.Frame(frame).statement
+
+ x = A('x', 'y')
+
+ l = [i for i in x.source.lines if i.strip()]
+ assert len(l) == 1
+
+def test_getstartingblock_multiline():
+ class A:
+ def __init__(self, *args):
+ frame = sys._getframe(1)
+ self.source = py.code.Frame(frame).statement
+
+ x = A('x',
+ 'y' \
+ ,
+ 'z')
+
+ l = [i for i in x.source.lines if i.strip()]
+ assert len(l) == 4
+
+def test_getline_finally():
+ def c(): pass
+ excinfo = py.test.raises(TypeError, """
+ teardown = None
+ try:
+ c(1)
+ finally:
+ if teardown:
+ teardown()
+ """)
+ source = excinfo.traceback[-1].statement
+ assert str(source).strip() == 'c(1)'
+
+def test_getfuncsource_dynamic():
+ source = """
+ def f():
+ raise ValueError
+
+ def g(): pass
+ """
+ co = py.code.compile(source)
+ py.builtin.exec_(co, globals())
+ assert str(py.code.Source(f)).strip() == 'def f():\n raise ValueError'
+ assert str(py.code.Source(g)).strip() == 'def g(): pass'
+
+
+def test_getfuncsource_with_multine_string():
+ def f():
+ c = '''while True:
+ pass
+'''
+ assert str(py.code.Source(f)).strip() == "def f():\n c = '''while True:\n pass\n'''"
+
+
+def test_deindent():
+ from py._code.source import deindent as deindent
+ assert deindent(['\tfoo', '\tbar', ]) == ['foo', 'bar']
+
+ def f():
+ c = '''while True:
+ pass
+'''
+ import inspect
+ lines = deindent(inspect.getsource(f).splitlines())
+ assert lines == ["def f():", " c = '''while True:", " pass", "'''"]
+
+ source = """
+ def f():
+ def g():
+ pass
+ """
+ lines = deindent(source.splitlines())
+ assert lines == ['', 'def f():', ' def g():', ' pass', ' ']
+
+def test_source_of_class_at_eof_without_newline(tmpdir):
+ # this test fails because the implicit inspect.getsource(A) below
+ # does not return the "x = 1" last line.
+ source = py.code.Source('''
+ class A(object):
+ def method(self):
+ x = 1
+ ''')
+ path = tmpdir.join("a.py")
+ path.write(source)
+ s2 = py.code.Source(tmpdir.join("a.py").pyimport().A)
+ assert str(source).strip() == str(s2).strip()
+
+if True:
+ def x():
+ pass
+
+def test_getsource_fallback():
+ from py._code.source import getsource
+ expected = """def x():
+ pass"""
+ src = getsource(x)
+ assert src == expected
+
+def test_idem_compile_and_getsource():
+ from py._code.source import getsource
+ expected = "def x(): pass"
+ co = py.code.compile(expected)
+ src = getsource(co)
+ assert src == expected
+
+def test_findsource_fallback():
+ from py._code.source import findsource
+ src, lineno = findsource(x)
+ assert 'test_findsource_simple' in str(src)
+ assert src[lineno] == ' def x():'
+
+def test_findsource():
+ from py._code.source import findsource
+ co = py.code.compile("""if 1:
+ def x():
+ pass
+""")
+
+ src, lineno = findsource(co)
+ assert 'if 1:' in str(src)
+
+ d = {}
+ eval(co, d)
+ src, lineno = findsource(d['x'])
+ assert 'if 1:' in str(src)
+ assert src[lineno] == " def x():"
+
+
+def test_getfslineno():
+ from py.code import getfslineno
+
+ def f(x):
+ pass
+
+ fspath, lineno = getfslineno(f)
+
+ assert fspath.basename == "test_source.py"
+ assert lineno == py.code.getrawcode(f).co_firstlineno-1 # see findsource
+
+ class A(object):
+ pass
+
+ fspath, lineno = getfslineno(A)
+
+ _, A_lineno = inspect.findsource(A)
+ assert fspath.basename == "test_source.py"
+ assert lineno == A_lineno
+
+ assert getfslineno(3) == ("", -1)
+ class B:
+ pass
+ B.__name__ = "B2"
+ # TODO: On CPython 3.9 this actually returns the line,
+ # should it?
+ # assert getfslineno(B)[1] == -1
+
+def test_code_of_object_instance_with_call():
+ class A:
+ pass
+ py.test.raises(TypeError, lambda: py.code.Source(A()))
+ class WithCall:
+ def __call__(self):
+ pass
+
+ code = py.code.Code(WithCall())
+ assert 'pass' in str(code.source())
+
+ class Hello(object):
+ def __call__(self):
+ pass
+ py.test.raises(TypeError, lambda: py.code.Code(Hello))
+
+
+def getstatement(lineno, source):
+ from py._code.source import getstatementrange_ast
+ source = py.code.Source(source, deindent=False)
+ ast, start, end = getstatementrange_ast(lineno, source)
+ return source[start:end]
+
+def test_oneline():
+ source = getstatement(0, "raise ValueError")
+ assert str(source) == "raise ValueError"
+
+def test_comment_and_no_newline_at_end():
+ from py._code.source import getstatementrange_ast
+ source = Source(['def test_basic_complex():',
+ ' assert 1 == 2',
+ '# vim: filetype=pyopencl:fdm=marker'])
+ ast, start, end = getstatementrange_ast(1, source)
+ assert end == 2
+
+def test_oneline_and_comment():
+ source = getstatement(0, "raise ValueError\n#hello")
+ assert str(source) == "raise ValueError"
+
+def test_comments():
+ source = '''def test():
+ "comment 1"
+ x = 1
+ # comment 2
+ # comment 3
+
+ assert False
+
+"""
+comment 4
+"""
+'''
+ for line in range(2, 6):
+ assert str(getstatement(line, source)) == " x = 1"
+ if sys.version_info >= (3, 8) or hasattr(sys, "pypy_version_info"):
+ tqs_start = 8
+ else:
+ tqs_start = 10
+ assert str(getstatement(10, source)) == '"""'
+ for line in range(6, tqs_start):
+ assert str(getstatement(line, source)) == " assert False"
+ for line in range(tqs_start, 10):
+ assert str(getstatement(line, source)) == '"""\ncomment 4\n"""'
+
+def test_comment_in_statement():
+ source = '''test(foo=1,
+ # comment 1
+ bar=2)
+'''
+ for line in range(1,3):
+ assert str(getstatement(line, source)) == \
+ 'test(foo=1,\n # comment 1\n bar=2)'
+
+def test_single_line_else():
+ source = getstatement(1, "if False: 2\nelse: 3")
+ assert str(source) == "else: 3"
+
+def test_single_line_finally():
+ source = getstatement(1, "try: 1\nfinally: 3")
+ assert str(source) == "finally: 3"
+
+def test_issue55():
+ source = ('def round_trip(dinp):\n assert 1 == dinp\n'
+ 'def test_rt():\n round_trip("""\n""")\n')
+ s = getstatement(3, source)
+ assert str(s) == ' round_trip("""\n""")'
+
+
+def XXXtest_multiline():
+ source = getstatement(0, """\
+raise ValueError(
+ 23
+)
+x = 3
+""")
+ assert str(source) == "raise ValueError(\n 23\n)"
+
+class TestTry:
+ pytestmark = astonly
+ source = """\
+try:
+ raise ValueError
+except Something:
+ raise IndexError(1)
+else:
+ raise KeyError()
+"""
+
+ def test_body(self):
+ source = getstatement(1, self.source)
+ assert str(source) == " raise ValueError"
+
+ def test_except_line(self):
+ source = getstatement(2, self.source)
+ assert str(source) == "except Something:"
+
+ def test_except_body(self):
+ source = getstatement(3, self.source)
+ assert str(source) == " raise IndexError(1)"
+
+ def test_else(self):
+ source = getstatement(5, self.source)
+ assert str(source) == " raise KeyError()"
+
+class TestTryFinally:
+ source = """\
+try:
+ raise ValueError
+finally:
+ raise IndexError(1)
+"""
+
+ def test_body(self):
+ source = getstatement(1, self.source)
+ assert str(source) == " raise ValueError"
+
+ def test_finally(self):
+ source = getstatement(3, self.source)
+ assert str(source) == " raise IndexError(1)"
+
+
+
+class TestIf:
+ pytestmark = astonly
+ source = """\
+if 1:
+ y = 3
+elif False:
+ y = 5
+else:
+ y = 7
+"""
+
+ def test_body(self):
+ source = getstatement(1, self.source)
+ assert str(source) == " y = 3"
+
+ def test_elif_clause(self):
+ source = getstatement(2, self.source)
+ assert str(source) == "elif False:"
+
+ def test_elif(self):
+ source = getstatement(3, self.source)
+ assert str(source) == " y = 5"
+
+ def test_else(self):
+ source = getstatement(5, self.source)
+ assert str(source) == " y = 7"
+
+def test_semicolon():
+ s = """\
+hello ; pytest.skip()
+"""
+ source = getstatement(0, s)
+ assert str(source) == s.strip()
+
+def test_def_online():
+ s = """\
+def func(): raise ValueError(42)
+
+def something():
+ pass
+"""
+ source = getstatement(0, s)
+ assert str(source) == "def func(): raise ValueError(42)"
+
+def XXX_test_expression_multiline():
+ source = """\
+something
+'''
+'''"""
+ result = getstatement(1, source)
+ assert str(result) == "'''\n'''"
+
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/conftest.py b/testing/web-platform/tests/tools/third_party/py/testing/conftest.py
new file mode 100644
index 0000000000..0f956b3dd2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/conftest.py
@@ -0,0 +1,3 @@
+
+pytest_plugins = "pytester",
+
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/io_/__init__.py b/testing/web-platform/tests/tools/third_party/py/testing/io_/__init__.py
new file mode 100644
index 0000000000..792d600548
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/io_/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/io_/test_capture.py b/testing/web-platform/tests/tools/third_party/py/testing/io_/test_capture.py
new file mode 100644
index 0000000000..b5fedd0abc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/io_/test_capture.py
@@ -0,0 +1,501 @@
+from __future__ import with_statement
+
+import os, sys
+import py
+
+needsdup = py.test.mark.skipif("not hasattr(os, 'dup')")
+
+from py.builtin import print_
+
+if sys.version_info >= (3,0):
+ def tobytes(obj):
+ if isinstance(obj, str):
+ obj = obj.encode('UTF-8')
+ assert isinstance(obj, bytes)
+ return obj
+ def totext(obj):
+ if isinstance(obj, bytes):
+ obj = str(obj, 'UTF-8')
+ assert isinstance(obj, str)
+ return obj
+else:
+ def tobytes(obj):
+ if isinstance(obj, unicode):
+ obj = obj.encode('UTF-8')
+ assert isinstance(obj, str)
+ return obj
+ def totext(obj):
+ if isinstance(obj, str):
+ obj = unicode(obj, 'UTF-8')
+ assert isinstance(obj, unicode)
+ return obj
+
+def oswritebytes(fd, obj):
+ os.write(fd, tobytes(obj))
+
+class TestTextIO:
+ def test_text(self):
+ f = py.io.TextIO()
+ f.write("hello")
+ s = f.getvalue()
+ assert s == "hello"
+ f.close()
+
+ def test_unicode_and_str_mixture(self):
+ f = py.io.TextIO()
+ if sys.version_info >= (3,0):
+ f.write("\u00f6")
+ py.test.raises(TypeError, "f.write(bytes('hello', 'UTF-8'))")
+ else:
+ f.write(unicode("\u00f6", 'UTF-8'))
+ f.write("hello") # bytes
+ s = f.getvalue()
+ f.close()
+ assert isinstance(s, unicode)
+
+def test_bytes_io():
+ f = py.io.BytesIO()
+ f.write(tobytes("hello"))
+ py.test.raises(TypeError, "f.write(totext('hello'))")
+ s = f.getvalue()
+ assert s == tobytes("hello")
+
+def test_dontreadfrominput():
+ from py._io.capture import DontReadFromInput
+ f = DontReadFromInput()
+ assert not f.isatty()
+ py.test.raises(IOError, f.read)
+ py.test.raises(IOError, f.readlines)
+ py.test.raises(IOError, iter, f)
+ py.test.raises(ValueError, f.fileno)
+ f.close() # just for completeness
+
+def pytest_funcarg__tmpfile(request):
+ testdir = request.getfuncargvalue("testdir")
+ f = testdir.makepyfile("").open('wb+')
+ request.addfinalizer(f.close)
+ return f
+
+@needsdup
+def test_dupfile(tmpfile):
+ flist = []
+ for i in range(5):
+ nf = py.io.dupfile(tmpfile, encoding="utf-8")
+ assert nf != tmpfile
+ assert nf.fileno() != tmpfile.fileno()
+ assert nf not in flist
+ print_(i, end="", file=nf)
+ flist.append(nf)
+ for i in range(5):
+ f = flist[i]
+ f.close()
+ tmpfile.seek(0)
+ s = tmpfile.read()
+ assert "01234" in repr(s)
+ tmpfile.close()
+
+def test_dupfile_no_mode():
+ """
+ dupfile should trap an AttributeError and return f if no mode is supplied.
+ """
+ class SomeFileWrapper(object):
+ "An object with a fileno method but no mode attribute"
+ def fileno(self):
+ return 1
+ tmpfile = SomeFileWrapper()
+ assert py.io.dupfile(tmpfile) is tmpfile
+ with py.test.raises(AttributeError):
+ py.io.dupfile(tmpfile, raising=True)
+
+def lsof_check(func):
+ pid = os.getpid()
+ try:
+ out = py.process.cmdexec("lsof -p %d" % pid)
+ except py.process.cmdexec.Error:
+ py.test.skip("could not run 'lsof'")
+ func()
+ out2 = py.process.cmdexec("lsof -p %d" % pid)
+ len1 = len([x for x in out.split("\n") if "REG" in x])
+ len2 = len([x for x in out2.split("\n") if "REG" in x])
+ assert len2 < len1 + 3, out2
+
+class TestFDCapture:
+ pytestmark = needsdup
+
+ def test_not_now(self, tmpfile):
+ fd = tmpfile.fileno()
+ cap = py.io.FDCapture(fd, now=False)
+ data = tobytes("hello")
+ os.write(fd, data)
+ f = cap.done()
+ s = f.read()
+ assert not s
+ cap = py.io.FDCapture(fd, now=False)
+ cap.start()
+ os.write(fd, data)
+ f = cap.done()
+ s = f.read()
+ assert s == "hello"
+
+ def test_simple(self, tmpfile):
+ fd = tmpfile.fileno()
+ cap = py.io.FDCapture(fd)
+ data = tobytes("hello")
+ os.write(fd, data)
+ f = cap.done()
+ s = f.read()
+ assert s == "hello"
+ f.close()
+
+ def test_simple_many(self, tmpfile):
+ for i in range(10):
+ self.test_simple(tmpfile)
+
+ def test_simple_many_check_open_files(self, tmpfile):
+ lsof_check(lambda: self.test_simple_many(tmpfile))
+
+ def test_simple_fail_second_start(self, tmpfile):
+ fd = tmpfile.fileno()
+ cap = py.io.FDCapture(fd)
+ f = cap.done()
+ py.test.raises(ValueError, cap.start)
+ f.close()
+
+ def test_stderr(self):
+ cap = py.io.FDCapture(2, patchsys=True)
+ print_("hello", file=sys.stderr)
+ f = cap.done()
+ s = f.read()
+ assert s == "hello\n"
+
+ def test_stdin(self, tmpfile):
+ tmpfile.write(tobytes("3"))
+ tmpfile.seek(0)
+ cap = py.io.FDCapture(0, tmpfile=tmpfile)
+ # check with os.read() directly instead of raw_input(), because
+ # sys.stdin itself may be redirected (as py.test now does by default)
+ x = os.read(0, 100).strip()
+ f = cap.done()
+ assert x == tobytes("3")
+
+ def test_writeorg(self, tmpfile):
+ data1, data2 = tobytes("foo"), tobytes("bar")
+ try:
+ cap = py.io.FDCapture(tmpfile.fileno())
+ tmpfile.write(data1)
+ cap.writeorg(data2)
+ finally:
+ tmpfile.close()
+ f = cap.done()
+ scap = f.read()
+ assert scap == totext(data1)
+ stmp = open(tmpfile.name, 'rb').read()
+ assert stmp == data2
+
+
+class TestStdCapture:
+ def getcapture(self, **kw):
+ return py.io.StdCapture(**kw)
+
+ def test_capturing_done_simple(self):
+ cap = self.getcapture()
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ outfile, errfile = cap.done()
+ s = outfile.read()
+ assert s == "hello"
+ s = errfile.read()
+ assert s == "world"
+
+ def test_capturing_reset_simple(self):
+ cap = self.getcapture()
+ print("hello world")
+ sys.stderr.write("hello error\n")
+ out, err = cap.reset()
+ assert out == "hello world\n"
+ assert err == "hello error\n"
+
+ def test_capturing_readouterr(self):
+ cap = self.getcapture()
+ try:
+ print ("hello world")
+ sys.stderr.write("hello error\n")
+ out, err = cap.readouterr()
+ assert out == "hello world\n"
+ assert err == "hello error\n"
+ sys.stderr.write("error2")
+ finally:
+ out, err = cap.reset()
+ assert err == "error2"
+
+ def test_capturing_readouterr_unicode(self):
+ cap = self.getcapture()
+ print ("hx\xc4\x85\xc4\x87")
+ out, err = cap.readouterr()
+ assert out == py.builtin._totext("hx\xc4\x85\xc4\x87\n", "utf8")
+
+ @py.test.mark.skipif('sys.version_info >= (3,)',
+ reason='text output different for bytes on python3')
+ def test_capturing_readouterr_decode_error_handling(self):
+ cap = self.getcapture()
+ # triggered a internal error in pytest
+ print('\xa6')
+ out, err = cap.readouterr()
+ assert out == py.builtin._totext('\ufffd\n', 'unicode-escape')
+
+ def test_capturing_mixed(self):
+ cap = self.getcapture(mixed=True)
+ sys.stdout.write("hello ")
+ sys.stderr.write("world")
+ sys.stdout.write(".")
+ out, err = cap.reset()
+ assert out.strip() == "hello world."
+ assert not err
+
+ def test_reset_twice_error(self):
+ cap = self.getcapture()
+ print ("hello")
+ out, err = cap.reset()
+ py.test.raises(ValueError, cap.reset)
+ assert out == "hello\n"
+ assert not err
+
+ def test_capturing_modify_sysouterr_in_between(self):
+ oldout = sys.stdout
+ olderr = sys.stderr
+ cap = self.getcapture()
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ sys.stdout = py.io.TextIO()
+ sys.stderr = py.io.TextIO()
+ print ("not seen")
+ sys.stderr.write("not seen\n")
+ out, err = cap.reset()
+ assert out == "hello"
+ assert err == "world"
+ assert sys.stdout == oldout
+ assert sys.stderr == olderr
+
+ def test_capturing_error_recursive(self):
+ cap1 = self.getcapture()
+ print ("cap1")
+ cap2 = self.getcapture()
+ print ("cap2")
+ out2, err2 = cap2.reset()
+ out1, err1 = cap1.reset()
+ assert out1 == "cap1\n"
+ assert out2 == "cap2\n"
+
+ def test_just_out_capture(self):
+ cap = self.getcapture(out=True, err=False)
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ out, err = cap.reset()
+ assert out == "hello"
+ assert not err
+
+ def test_just_err_capture(self):
+ cap = self.getcapture(out=False, err=True)
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ out, err = cap.reset()
+ assert err == "world"
+ assert not out
+
+ def test_stdin_restored(self):
+ old = sys.stdin
+ cap = self.getcapture(in_=True)
+ newstdin = sys.stdin
+ out, err = cap.reset()
+ assert newstdin != sys.stdin
+ assert sys.stdin is old
+
+ def test_stdin_nulled_by_default(self):
+ print ("XXX this test may well hang instead of crashing")
+ print ("XXX which indicates an error in the underlying capturing")
+ print ("XXX mechanisms")
+ cap = self.getcapture()
+ py.test.raises(IOError, "sys.stdin.read()")
+ out, err = cap.reset()
+
+ def test_suspend_resume(self):
+ cap = self.getcapture(out=True, err=False, in_=False)
+ try:
+ print ("hello")
+ sys.stderr.write("error\n")
+ out, err = cap.suspend()
+ assert out == "hello\n"
+ assert not err
+ print ("in between")
+ sys.stderr.write("in between\n")
+ cap.resume()
+ print ("after")
+ sys.stderr.write("error_after\n")
+ finally:
+ out, err = cap.reset()
+ assert out == "after\n"
+ assert not err
+
+class TestStdCaptureNotNow(TestStdCapture):
+ def getcapture(self, **kw):
+ kw['now'] = False
+ cap = py.io.StdCapture(**kw)
+ cap.startall()
+ return cap
+
+class TestStdCaptureFD(TestStdCapture):
+ pytestmark = needsdup
+
+ def getcapture(self, **kw):
+ return py.io.StdCaptureFD(**kw)
+
+ def test_intermingling(self):
+ cap = self.getcapture()
+ oswritebytes(1, "1")
+ sys.stdout.write(str(2))
+ sys.stdout.flush()
+ oswritebytes(1, "3")
+ oswritebytes(2, "a")
+ sys.stderr.write("b")
+ sys.stderr.flush()
+ oswritebytes(2, "c")
+ out, err = cap.reset()
+ assert out == "123"
+ assert err == "abc"
+
+ def test_callcapture(self):
+ def func(x, y):
+ print (x)
+ sys.stderr.write(str(y))
+ return 42
+
+ res, out, err = py.io.StdCaptureFD.call(func, 3, y=4)
+ assert res == 42
+ assert out.startswith("3")
+ assert err.startswith("4")
+
+ def test_many(self, capfd):
+ def f():
+ for i in range(10):
+ cap = py.io.StdCaptureFD()
+ cap.reset()
+ lsof_check(f)
+
+class TestStdCaptureFDNotNow(TestStdCaptureFD):
+ pytestmark = needsdup
+
+ def getcapture(self, **kw):
+ kw['now'] = False
+ cap = py.io.StdCaptureFD(**kw)
+ cap.startall()
+ return cap
+
+@needsdup
+def test_stdcapture_fd_tmpfile(tmpfile):
+ capfd = py.io.StdCaptureFD(out=tmpfile)
+ os.write(1, "hello".encode("ascii"))
+ os.write(2, "world".encode("ascii"))
+ outf, errf = capfd.done()
+ assert outf == tmpfile
+
+class TestStdCaptureFDinvalidFD:
+ pytestmark = needsdup
+ def test_stdcapture_fd_invalid_fd(self, testdir):
+ testdir.makepyfile("""
+ import py, os
+ def test_stdout():
+ os.close(1)
+ cap = py.io.StdCaptureFD(out=True, err=False, in_=False)
+ cap.done()
+ def test_stderr():
+ os.close(2)
+ cap = py.io.StdCaptureFD(out=False, err=True, in_=False)
+ cap.done()
+ def test_stdin():
+ os.close(0)
+ cap = py.io.StdCaptureFD(out=False, err=False, in_=True)
+ cap.done()
+ """)
+ result = testdir.runpytest("--capture=fd")
+ assert result.ret == 0
+ assert result.parseoutcomes()['passed'] == 3
+
+def test_capture_not_started_but_reset():
+ capsys = py.io.StdCapture(now=False)
+ capsys.done()
+ capsys.done()
+ capsys.reset()
+
+@needsdup
+def test_capture_no_sys():
+ capsys = py.io.StdCapture()
+ try:
+ cap = py.io.StdCaptureFD(patchsys=False)
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ oswritebytes(1, "1")
+ oswritebytes(2, "2")
+ out, err = cap.reset()
+ assert out == "1"
+ assert err == "2"
+ finally:
+ capsys.reset()
+
+@needsdup
+def test_callcapture_nofd():
+ def func(x, y):
+ oswritebytes(1, "hello")
+ oswritebytes(2, "hello")
+ print (x)
+ sys.stderr.write(str(y))
+ return 42
+
+ capfd = py.io.StdCaptureFD(patchsys=False)
+ try:
+ res, out, err = py.io.StdCapture.call(func, 3, y=4)
+ finally:
+ capfd.reset()
+ assert res == 42
+ assert out.startswith("3")
+ assert err.startswith("4")
+
+@needsdup
+@py.test.mark.parametrize('use', [True, False])
+def test_fdcapture_tmpfile_remains_the_same(tmpfile, use):
+ if not use:
+ tmpfile = True
+ cap = py.io.StdCaptureFD(out=False, err=tmpfile, now=False)
+ cap.startall()
+ capfile = cap.err.tmpfile
+ cap.suspend()
+ cap.resume()
+ capfile2 = cap.err.tmpfile
+ assert capfile2 == capfile
+
+@py.test.mark.parametrize('method', ['StdCapture', 'StdCaptureFD'])
+def test_capturing_and_logging_fundamentals(testdir, method):
+ if method == "StdCaptureFD" and not hasattr(os, 'dup'):
+ py.test.skip("need os.dup")
+ # here we check a fundamental feature
+ p = testdir.makepyfile("""
+ import sys, os
+ import py, logging
+ cap = py.io.%s(out=False, in_=False)
+
+ logging.warn("hello1")
+ outerr = cap.suspend()
+ print ("suspend, captured %%s" %%(outerr,))
+ logging.warn("hello2")
+
+ cap.resume()
+ logging.warn("hello3")
+
+ outerr = cap.suspend()
+ print ("suspend2, captured %%s" %% (outerr,))
+ """ % (method,))
+ result = testdir.runpython(p)
+ result.stdout.fnmatch_lines([
+ "suspend, captured*hello1*",
+ "suspend2, captured*hello2*WARNING:root:hello3*",
+ ])
+ assert "atexit" not in result.stderr.str()
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/io_/test_saferepr.py b/testing/web-platform/tests/tools/third_party/py/testing/io_/test_saferepr.py
new file mode 100644
index 0000000000..97be1416fe
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/io_/test_saferepr.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import generators
+import py
+import sys
+
+saferepr = py.io.saferepr
+
+class TestSafeRepr:
+ def test_simple_repr(self):
+ assert saferepr(1) == '1'
+ assert saferepr(None) == 'None'
+
+ def test_maxsize(self):
+ s = saferepr('x'*50, maxsize=25)
+ assert len(s) == 25
+ expected = repr('x'*10 + '...' + 'x'*10)
+ assert s == expected
+
+ def test_maxsize_error_on_instance(self):
+ class A:
+ def __repr__(self):
+ raise ValueError('...')
+
+ s = saferepr(('*'*50, A()), maxsize=25)
+ assert len(s) == 25
+ assert s[0] == '(' and s[-1] == ')'
+
+ def test_exceptions(self):
+ class BrokenRepr:
+ def __init__(self, ex):
+ self.ex = ex
+ foo = 0
+ def __repr__(self):
+ raise self.ex
+ class BrokenReprException(Exception):
+ __str__ = None
+ __repr__ = None
+ assert 'Exception' in saferepr(BrokenRepr(Exception("broken")))
+ s = saferepr(BrokenReprException("really broken"))
+ assert 'TypeError' in s
+ assert 'TypeError' in saferepr(BrokenRepr("string"))
+
+ s2 = saferepr(BrokenRepr(BrokenReprException('omg even worse')))
+ assert 'NameError' not in s2
+ assert 'unknown' in s2
+
+ def test_big_repr(self):
+ from py._io.saferepr import SafeRepr
+ assert len(saferepr(range(1000))) <= \
+ len('[' + SafeRepr().maxlist * "1000" + ']')
+
+ def test_repr_on_newstyle(self):
+ class Function(object):
+ def __repr__(self):
+ return "<%s>" %(self.name)
+ try:
+ s = saferepr(Function())
+ except Exception:
+ py.test.fail("saferepr failed for newstyle class")
+
+ def test_unicode(self):
+ val = py.builtin._totext('£€', 'utf-8')
+ reprval = py.builtin._totext("'£€'", 'utf-8')
+ assert saferepr(val) == reprval
+
+def test_unicode_handling():
+ value = py.builtin._totext('\xc4\x85\xc4\x87\n', 'utf-8').encode('utf8')
+ def f():
+ raise Exception(value)
+ excinfo = py.test.raises(Exception, f)
+ s = str(excinfo)
+ if sys.version_info[0] < 3:
+ u = unicode(excinfo)
+
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/io_/test_terminalwriter.py b/testing/web-platform/tests/tools/third_party/py/testing/io_/test_terminalwriter.py
new file mode 100644
index 0000000000..44b4f1ddee
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/io_/test_terminalwriter.py
@@ -0,0 +1,341 @@
+from collections import namedtuple
+
+import py
+import os, sys
+from py._io import terminalwriter
+import codecs
+import pytest
+
+def test_get_terminal_width():
+ x = py.io.get_terminal_width
+ assert x == terminalwriter.get_terminal_width
+
+def test_getdimensions(monkeypatch):
+ if sys.version_info >= (3, 3):
+ import shutil
+ Size = namedtuple('Size', 'lines columns')
+ monkeypatch.setattr(shutil, 'get_terminal_size', lambda: Size(60, 100))
+ assert terminalwriter._getdimensions() == (60, 100)
+ else:
+ fcntl = py.test.importorskip("fcntl")
+ import struct
+ l = []
+ monkeypatch.setattr(fcntl, 'ioctl', lambda *args: l.append(args))
+ try:
+ terminalwriter._getdimensions()
+ except (TypeError, struct.error):
+ pass
+ assert len(l) == 1
+ assert l[0][0] == 1
+
+def test_terminal_width_COLUMNS(monkeypatch):
+ """ Dummy test for get_terminal_width
+ """
+ fcntl = py.test.importorskip("fcntl")
+ monkeypatch.setattr(fcntl, 'ioctl', lambda *args: int('x'))
+ monkeypatch.setenv('COLUMNS', '42')
+ assert terminalwriter.get_terminal_width() == 42
+ monkeypatch.delenv('COLUMNS', raising=False)
+
+def test_terminalwriter_defaultwidth_80(monkeypatch):
+ monkeypatch.setattr(terminalwriter, '_getdimensions', lambda: 0/0)
+ monkeypatch.delenv('COLUMNS', raising=False)
+ tw = py.io.TerminalWriter()
+ assert tw.fullwidth == 80
+
+def test_terminalwriter_getdimensions_bogus(monkeypatch):
+ monkeypatch.setattr(terminalwriter, '_getdimensions', lambda: (10,10))
+ monkeypatch.delenv('COLUMNS', raising=False)
+ tw = py.io.TerminalWriter()
+ assert tw.fullwidth == 80
+
+def test_terminalwriter_getdimensions_emacs(monkeypatch):
+ # emacs terminal returns (0,0) but set COLUMNS properly
+ monkeypatch.setattr(terminalwriter, '_getdimensions', lambda: (0,0))
+ monkeypatch.setenv('COLUMNS', '42')
+ tw = py.io.TerminalWriter()
+ assert tw.fullwidth == 42
+
+def test_terminalwriter_computes_width(monkeypatch):
+ monkeypatch.setattr(terminalwriter, 'get_terminal_width', lambda: 42)
+ tw = py.io.TerminalWriter()
+ assert tw.fullwidth == 42
+
+def test_terminalwriter_default_instantiation():
+ tw = py.io.TerminalWriter(stringio=True)
+ assert hasattr(tw, 'stringio')
+
+def test_terminalwriter_dumb_term_no_markup(monkeypatch):
+ monkeypatch.setattr(os, 'environ', {'TERM': 'dumb', 'PATH': ''})
+ class MyFile:
+ closed = False
+ def isatty(self):
+ return True
+ monkeypatch.setattr(sys, 'stdout', MyFile())
+ try:
+ assert sys.stdout.isatty()
+ tw = py.io.TerminalWriter()
+ assert not tw.hasmarkup
+ finally:
+ monkeypatch.undo()
+
+def test_terminalwriter_file_unicode(tmpdir):
+ f = codecs.open(str(tmpdir.join("xyz")), "wb", "utf8")
+ tw = py.io.TerminalWriter(file=f)
+ assert tw.encoding == "utf8"
+
+def test_unicode_encoding():
+ msg = py.builtin._totext('b\u00f6y', 'utf8')
+ for encoding in 'utf8', 'latin1':
+ l = []
+ tw = py.io.TerminalWriter(l.append, encoding=encoding)
+ tw.line(msg)
+ assert l[0].strip() == msg.encode(encoding)
+
+@pytest.mark.parametrize("encoding", ["ascii"])
+def test_unicode_on_file_with_ascii_encoding(tmpdir, monkeypatch, encoding):
+ msg = py.builtin._totext('hell\xf6', "latin1")
+ #pytest.raises(UnicodeEncodeError, lambda: bytes(msg))
+ f = codecs.open(str(tmpdir.join("x")), "w", encoding)
+ tw = py.io.TerminalWriter(f)
+ tw.line(msg)
+ f.close()
+ s = tmpdir.join("x").open("rb").read().strip()
+ assert encoding == "ascii"
+ assert s == msg.encode("unicode-escape")
+
+
+win32 = int(sys.platform == "win32")
+class TestTerminalWriter:
+ def pytest_generate_tests(self, metafunc):
+ if "tw" in metafunc.funcargnames:
+ metafunc.addcall(id="path", param="path")
+ metafunc.addcall(id="stringio", param="stringio")
+ metafunc.addcall(id="callable", param="callable")
+ def pytest_funcarg__tw(self, request):
+ if request.param == "path":
+ tmpdir = request.getfuncargvalue("tmpdir")
+ p = tmpdir.join("tmpfile")
+ f = codecs.open(str(p), 'w+', encoding='utf8')
+ tw = py.io.TerminalWriter(f)
+ def getlines():
+ tw._file.flush()
+ return codecs.open(str(p), 'r',
+ encoding='utf8').readlines()
+ elif request.param == "stringio":
+ tw = py.io.TerminalWriter(stringio=True)
+ def getlines():
+ tw.stringio.seek(0)
+ return tw.stringio.readlines()
+ elif request.param == "callable":
+ writes = []
+ tw = py.io.TerminalWriter(writes.append)
+ def getlines():
+ io = py.io.TextIO()
+ io.write("".join(writes))
+ io.seek(0)
+ return io.readlines()
+ tw.getlines = getlines
+ tw.getvalue = lambda: "".join(getlines())
+ return tw
+
+ def test_line(self, tw):
+ tw.line("hello")
+ l = tw.getlines()
+ assert len(l) == 1
+ assert l[0] == "hello\n"
+
+ def test_line_unicode(self, tw):
+ for encoding in 'utf8', 'latin1':
+ tw._encoding = encoding
+ msg = py.builtin._totext('b\u00f6y', 'utf8')
+ tw.line(msg)
+ l = tw.getlines()
+ assert l[0] == msg + "\n"
+
+ def test_sep_no_title(self, tw):
+ tw.sep("-", fullwidth=60)
+ l = tw.getlines()
+ assert len(l) == 1
+ assert l[0] == "-" * (60-win32) + "\n"
+
+ def test_sep_with_title(self, tw):
+ tw.sep("-", "hello", fullwidth=60)
+ l = tw.getlines()
+ assert len(l) == 1
+ assert l[0] == "-" * 26 + " hello " + "-" * (27-win32) + "\n"
+
+ def test_sep_longer_than_width(self, tw):
+ tw.sep('-', 'a' * 10, fullwidth=5)
+ line, = tw.getlines()
+ # even though the string is wider than the line, still have a separator
+ assert line == '- aaaaaaaaaa -\n'
+
+ @py.test.mark.skipif("sys.platform == 'win32'")
+ def test__escaped(self, tw):
+ text2 = tw._escaped("hello", (31))
+ assert text2.find("hello") != -1
+
+ @py.test.mark.skipif("sys.platform == 'win32'")
+ def test_markup(self, tw):
+ for bold in (True, False):
+ for color in ("red", "green"):
+ text2 = tw.markup("hello", **{color: True, 'bold': bold})
+ assert text2.find("hello") != -1
+ py.test.raises(ValueError, "tw.markup('x', wronkw=3)")
+ py.test.raises(ValueError, "tw.markup('x', wronkw=0)")
+
+ def test_line_write_markup(self, tw):
+ tw.hasmarkup = True
+ tw.line("x", bold=True)
+ tw.write("x\n", red=True)
+ l = tw.getlines()
+ if sys.platform != "win32":
+ assert len(l[0]) >= 2, l
+ assert len(l[1]) >= 2, l
+
+ def test_attr_fullwidth(self, tw):
+ tw.sep("-", "hello", fullwidth=70)
+ tw.fullwidth = 70
+ tw.sep("-", "hello")
+ l = tw.getlines()
+ assert len(l[0]) == len(l[1])
+
+ def test_reline(self, tw):
+ tw.line("hello")
+ tw.hasmarkup = False
+ pytest.raises(ValueError, lambda: tw.reline("x"))
+ tw.hasmarkup = True
+ tw.reline("0 1 2")
+ tw.getlines()
+ l = tw.getvalue().split("\n")
+ assert len(l) == 2
+ tw.reline("0 1 3")
+ l = tw.getvalue().split("\n")
+ assert len(l) == 2
+ assert l[1].endswith("0 1 3\r")
+ tw.line("so")
+ l = tw.getvalue().split("\n")
+ assert len(l) == 3
+ assert l[-1] == ""
+ assert l[1] == ("0 1 2\r0 1 3\rso ")
+ assert l[0] == "hello"
+
+
+def test_terminal_with_callable_write_and_flush():
+ l = set()
+ class fil:
+ flush = lambda self: l.add("1")
+ write = lambda self, x: l.add("1")
+ __call__ = lambda self, x: l.add("2")
+
+ tw = py.io.TerminalWriter(fil())
+ tw.line("hello")
+ assert l == set(["1"])
+ del fil.flush
+ l.clear()
+ tw = py.io.TerminalWriter(fil())
+ tw.line("hello")
+ assert l == set(["2"])
+
+
+def test_chars_on_current_line():
+ tw = py.io.TerminalWriter(stringio=True)
+
+ written = []
+
+ def write_and_check(s, expected):
+ tw.write(s, bold=True)
+ written.append(s)
+ assert tw.chars_on_current_line == expected
+ assert tw.stringio.getvalue() == ''.join(written)
+
+ write_and_check('foo', 3)
+ write_and_check('bar', 6)
+ write_and_check('\n', 0)
+ write_and_check('\n', 0)
+ write_and_check('\n\n\n', 0)
+ write_and_check('\nfoo', 3)
+ write_and_check('\nfbar\nhello', 5)
+ write_and_check('10', 7)
+
+
+@pytest.mark.skipif(sys.platform == "win32", reason="win32 has no native ansi")
+def test_attr_hasmarkup():
+ tw = py.io.TerminalWriter(stringio=True)
+ assert not tw.hasmarkup
+ tw.hasmarkup = True
+ tw.line("hello", bold=True)
+ s = tw.stringio.getvalue()
+ assert len(s) > len("hello\n")
+ assert '\x1b[1m' in s
+ assert '\x1b[0m' in s
+
+@pytest.mark.skipif(sys.platform == "win32", reason="win32 has no native ansi")
+def test_ansi_print():
+ # we have no easy way to construct a file that
+ # represents a terminal
+ f = py.io.TextIO()
+ f.isatty = lambda: True
+ py.io.ansi_print("hello", 0x32, file=f)
+ text2 = f.getvalue()
+ assert text2.find("hello") != -1
+ assert len(text2) >= len("hello\n")
+ assert '\x1b[50m' in text2
+ assert '\x1b[0m' in text2
+
+def test_should_do_markup_PY_COLORS_eq_1(monkeypatch):
+ monkeypatch.setitem(os.environ, 'PY_COLORS', '1')
+ tw = py.io.TerminalWriter(stringio=True)
+ assert tw.hasmarkup
+ tw.line("hello", bold=True)
+ s = tw.stringio.getvalue()
+ assert len(s) > len("hello\n")
+ assert '\x1b[1m' in s
+ assert '\x1b[0m' in s
+
+def test_should_do_markup_PY_COLORS_eq_0(monkeypatch):
+ monkeypatch.setitem(os.environ, 'PY_COLORS', '0')
+ f = py.io.TextIO()
+ f.isatty = lambda: True
+ tw = py.io.TerminalWriter(file=f)
+ assert not tw.hasmarkup
+ tw.line("hello", bold=True)
+ s = f.getvalue()
+ assert s == "hello\n"
+
+def test_should_do_markup(monkeypatch):
+ monkeypatch.delenv("PY_COLORS", raising=False)
+ monkeypatch.delenv("NO_COLOR", raising=False)
+
+ should_do_markup = terminalwriter.should_do_markup
+
+ f = py.io.TextIO()
+ f.isatty = lambda: True
+
+ assert should_do_markup(f) is True
+
+ # NO_COLOR without PY_COLORS.
+ monkeypatch.setenv("NO_COLOR", "0")
+ assert should_do_markup(f) is False
+ monkeypatch.setenv("NO_COLOR", "1")
+ assert should_do_markup(f) is False
+ monkeypatch.setenv("NO_COLOR", "any")
+ assert should_do_markup(f) is False
+
+ # PY_COLORS overrides NO_COLOR ("0" and "1" only).
+ monkeypatch.setenv("PY_COLORS", "1")
+ assert should_do_markup(f) is True
+ monkeypatch.setenv("PY_COLORS", "0")
+ assert should_do_markup(f) is False
+ # Uses NO_COLOR.
+ monkeypatch.setenv("PY_COLORS", "any")
+ assert should_do_markup(f) is False
+ monkeypatch.delenv("NO_COLOR")
+ assert should_do_markup(f) is True
+
+ # Back to defaults.
+ monkeypatch.delenv("PY_COLORS")
+ assert should_do_markup(f) is True
+ f.isatty = lambda: False
+ assert should_do_markup(f) is False
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/io_/test_terminalwriter_linewidth.py b/testing/web-platform/tests/tools/third_party/py/testing/io_/test_terminalwriter_linewidth.py
new file mode 100644
index 0000000000..e6d84fbf7a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/io_/test_terminalwriter_linewidth.py
@@ -0,0 +1,56 @@
+# coding: utf-8
+from __future__ import unicode_literals
+
+from py._io.terminalwriter import TerminalWriter
+
+
+def test_terminal_writer_line_width_init():
+ tw = TerminalWriter()
+ assert tw.chars_on_current_line == 0
+ assert tw.width_of_current_line == 0
+
+
+def test_terminal_writer_line_width_update():
+ tw = TerminalWriter()
+ tw.write('hello world')
+ assert tw.chars_on_current_line == 11
+ assert tw.width_of_current_line == 11
+
+
+def test_terminal_writer_line_width_update_with_newline():
+ tw = TerminalWriter()
+ tw.write('hello\nworld')
+ assert tw.chars_on_current_line == 5
+ assert tw.width_of_current_line == 5
+
+
+def test_terminal_writer_line_width_update_with_wide_text():
+ tw = TerminalWriter()
+ tw.write('ä¹‡ä¹‚ã„’å°ºå‚ ã„’å„丨匚匚')
+ assert tw.chars_on_current_line == 11
+ assert tw.width_of_current_line == 21 # 5*2 + 1 + 5*2
+
+
+def test_terminal_writer_line_width_update_with_wide_bytes():
+ tw = TerminalWriter()
+ tw.write('ä¹‡ä¹‚ã„’å°ºå‚ ã„’å„丨匚匚'.encode('utf-8'))
+ assert tw.chars_on_current_line == 11
+ assert tw.width_of_current_line == 21
+
+
+def test_terminal_writer_line_width_composed():
+ tw = TerminalWriter()
+ text = 'café food'
+ assert len(text) == 9
+ tw.write(text)
+ assert tw.chars_on_current_line == 9
+ assert tw.width_of_current_line == 9
+
+
+def test_terminal_writer_line_width_combining():
+ tw = TerminalWriter()
+ text = 'cafeÌ food'
+ assert len(text) == 10
+ tw.write(text)
+ assert tw.chars_on_current_line == 10
+ assert tw.width_of_current_line == 9
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/log/__init__.py b/testing/web-platform/tests/tools/third_party/py/testing/log/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/log/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/log/test_log.py b/testing/web-platform/tests/tools/third_party/py/testing/log/test_log.py
new file mode 100644
index 0000000000..5c706d9b6a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/log/test_log.py
@@ -0,0 +1,191 @@
+import py
+
+from py._log.log import default_keywordmapper
+
+callcapture = py.io.StdCapture.call
+
+
+def setup_module(mod):
+ mod._oldstate = default_keywordmapper.getstate()
+
+def teardown_module(mod):
+ default_keywordmapper.setstate(mod._oldstate)
+
+class TestLogProducer:
+ def setup_method(self, meth):
+ from py._log.log import default_keywordmapper
+ default_keywordmapper.setstate(_oldstate)
+
+ def test_getstate_setstate(self):
+ state = py.log._getstate()
+ py.log.setconsumer("hello", [].append)
+ state2 = py.log._getstate()
+ assert state2 != state
+ py.log._setstate(state)
+ state3 = py.log._getstate()
+ assert state3 == state
+
+ def test_producer_repr(self):
+ d = py.log.Producer("default")
+ assert repr(d).find('default') != -1
+
+ def test_produce_one_keyword(self):
+ l = []
+ py.log.setconsumer('s1', l.append)
+ py.log.Producer('s1')("hello world")
+ assert len(l) == 1
+ msg = l[0]
+ assert msg.content().startswith('hello world')
+ assert msg.prefix() == '[s1] '
+ assert str(msg) == "[s1] hello world"
+
+ def test_producer_class(self):
+ p = py.log.Producer('x1')
+ l = []
+ py.log.setconsumer(p._keywords, l.append)
+ p("hello")
+ assert len(l) == 1
+ assert len(l[0].keywords) == 1
+ assert 'x1' == l[0].keywords[0]
+
+ def test_producer_caching(self):
+ p = py.log.Producer('x1')
+ x2 = p.x2
+ assert x2 is p.x2
+
+class TestLogConsumer:
+ def setup_method(self, meth):
+ default_keywordmapper.setstate(_oldstate)
+ def test_log_none(self):
+ log = py.log.Producer("XXX")
+ l = []
+ py.log.setconsumer('XXX', l.append)
+ log("1")
+ assert l
+ l[:] = []
+ py.log.setconsumer('XXX', None)
+ log("2")
+ assert not l
+
+ def test_log_default_stderr(self):
+ res, out, err = callcapture(py.log.Producer("default"), "hello")
+ assert err.strip() == "[default] hello"
+
+ def test_simple_consumer_match(self):
+ l = []
+ py.log.setconsumer("x1", l.append)
+ p = py.log.Producer("x1 x2")
+ p("hello")
+ assert l
+ assert l[0].content() == "hello"
+
+ def test_simple_consumer_match_2(self):
+ l = []
+ p = py.log.Producer("x1 x2")
+ py.log.setconsumer(p._keywords, l.append)
+ p("42")
+ assert l
+ assert l[0].content() == "42"
+
+ def test_no_auto_producer(self):
+ p = py.log.Producer('x')
+ py.test.raises(AttributeError, "p._x")
+ py.test.raises(AttributeError, "p.x_y")
+
+ def test_setconsumer_with_producer(self):
+ l = []
+ p = py.log.Producer("hello")
+ py.log.setconsumer(p, l.append)
+ p("world")
+ assert str(l[0]) == "[hello] world"
+
+ def test_multi_consumer(self):
+ l = []
+ py.log.setconsumer("x1", l.append)
+ py.log.setconsumer("x1 x2", None)
+ p = py.log.Producer("x1 x2")
+ p("hello")
+ assert not l
+ py.log.Producer("x1")("hello")
+ assert l
+ assert l[0].content() == "hello"
+
+ def test_log_stderr(self):
+ py.log.setconsumer("xyz", py.log.STDOUT)
+ res, out, err = callcapture(py.log.Producer("xyz"), "hello")
+ assert not err
+ assert out.strip() == '[xyz] hello'
+
+ def test_log_file(self, tmpdir):
+ customlog = tmpdir.join('log.out')
+ py.log.setconsumer("default", open(str(customlog), 'w', 1))
+ py.log.Producer("default")("hello world #1")
+ assert customlog.readlines() == ['[default] hello world #1\n']
+
+ py.log.setconsumer("default", py.log.Path(customlog, buffering=False))
+ py.log.Producer("default")("hello world #2")
+ res = customlog.readlines()
+ assert res == ['[default] hello world #2\n'] # no append by default!
+
+ def test_log_file_append_mode(self, tmpdir):
+ logfilefn = tmpdir.join('log_append.out')
+
+ # The append mode is on by default, so we don't need to specify it for File
+ py.log.setconsumer("default", py.log.Path(logfilefn, append=True,
+ buffering=0))
+ assert logfilefn.check()
+ py.log.Producer("default")("hello world #1")
+ lines = logfilefn.readlines()
+ assert lines == ['[default] hello world #1\n']
+ py.log.setconsumer("default", py.log.Path(logfilefn, append=True,
+ buffering=0))
+ py.log.Producer("default")("hello world #1")
+ lines = logfilefn.readlines()
+ assert lines == ['[default] hello world #1\n',
+ '[default] hello world #1\n']
+
+ def test_log_file_delayed_create(self, tmpdir):
+ logfilefn = tmpdir.join('log_create.out')
+
+ py.log.setconsumer("default", py.log.Path(logfilefn,
+ delayed_create=True, buffering=0))
+ assert not logfilefn.check()
+ py.log.Producer("default")("hello world #1")
+ lines = logfilefn.readlines()
+ assert lines == ['[default] hello world #1\n']
+
+ def test_keyword_based_log_files(self, tmpdir):
+ logfiles = []
+ keywords = 'k1 k2 k3'.split()
+ for key in keywords:
+ path = tmpdir.join(key)
+ py.log.setconsumer(key, py.log.Path(path, buffering=0))
+
+ py.log.Producer('k1')('1')
+ py.log.Producer('k2')('2')
+ py.log.Producer('k3')('3')
+
+ for key in keywords:
+ path = tmpdir.join(key)
+ assert path.read().strip() == '[%s] %s' % (key, key[-1])
+
+ # disabled for now; the syslog log file can usually be read only by root
+ # I manually inspected /var/log/messages and the entries were there
+ def no_test_log_syslog(self):
+ py.log.setconsumer("default", py.log.Syslog())
+ py.log.default("hello world #1")
+
+ # disabled for now until I figure out how to read entries in the
+ # Event Logs on Windows
+ # I manually inspected the Application Log and the entries were there
+ def no_test_log_winevent(self):
+ py.log.setconsumer("default", py.log.WinEvent())
+ py.log.default("hello world #1")
+
+ # disabled for now until I figure out how to properly pass the parameters
+ def no_test_log_email(self):
+ py.log.setconsumer("default", py.log.Email(mailhost="gheorghiu.net",
+ fromaddr="grig",
+ toaddrs="grig",
+ subject = "py.log email"))
+ py.log.default("hello world #1")
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/log/test_warning.py b/testing/web-platform/tests/tools/third_party/py/testing/log/test_warning.py
new file mode 100644
index 0000000000..36efec913a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/log/test_warning.py
@@ -0,0 +1,85 @@
+import sys
+from distutils.version import LooseVersion
+
+import pytest
+
+import py
+
+mypath = py.path.local(__file__).new(ext=".py")
+
+
+pytestmark = pytest.mark.skipif(LooseVersion(pytest.__version__) >= LooseVersion('3.1'),
+ reason='apiwarn is not compatible with pytest >= 3.1 (#162)')
+
+
+@pytest.mark.xfail
+def test_forwarding_to_warnings_module():
+ pytest.deprecated_call(py.log._apiwarn, "1.3", "..")
+
+def test_apiwarn_functional(recwarn):
+ capture = py.io.StdCapture()
+ py.log._apiwarn("x.y.z", "something", stacklevel=1)
+ out, err = capture.reset()
+ py.builtin.print_("out", out)
+ py.builtin.print_("err", err)
+ assert err.find("x.y.z") != -1
+ lno = py.code.getrawcode(test_apiwarn_functional).co_firstlineno + 2
+ exp = "%s:%s" % (mypath, lno)
+ assert err.find(exp) != -1
+
+def test_stacklevel(recwarn):
+ def f():
+ py.log._apiwarn("x", "some", stacklevel=2)
+ # 3
+ # 4
+ capture = py.io.StdCapture()
+ f()
+ out, err = capture.reset()
+ lno = py.code.getrawcode(test_stacklevel).co_firstlineno + 6
+ warning = str(err)
+ assert warning.find(":%s" % lno) != -1
+
+def test_stacklevel_initpkg_with_resolve(testdir, recwarn):
+ testdir.makepyfile(modabc="""
+ import py
+ def f():
+ py.log._apiwarn("x", "some", stacklevel="apipkg123")
+ """)
+ testdir.makepyfile(apipkg123="""
+ def __getattr__():
+ import modabc
+ modabc.f()
+ """)
+ p = testdir.makepyfile("""
+ import apipkg123
+ apipkg123.__getattr__()
+ """)
+ capture = py.io.StdCapture()
+ p.pyimport()
+ out, err = capture.reset()
+ warning = str(err)
+ loc = 'test_stacklevel_initpkg_with_resolve.py:2'
+ assert warning.find(loc) != -1
+
+def test_stacklevel_initpkg_no_resolve(recwarn):
+ def f():
+ py.log._apiwarn("x", "some", stacklevel="apipkg")
+ capture = py.io.StdCapture()
+ f()
+ out, err = capture.reset()
+ lno = py.code.getrawcode(test_stacklevel_initpkg_no_resolve).co_firstlineno + 2
+ warning = str(err)
+ assert warning.find(":%s" % lno) != -1
+
+
+def test_function(recwarn):
+ capture = py.io.StdCapture()
+ py.log._apiwarn("x.y.z", "something", function=test_function)
+ out, err = capture.reset()
+ py.builtin.print_("out", out)
+ py.builtin.print_("err", err)
+ assert err.find("x.y.z") != -1
+ lno = py.code.getrawcode(test_function).co_firstlineno
+ exp = "%s:%s" % (mypath, lno)
+ assert err.find(exp) != -1
+
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/path/common.py b/testing/web-platform/tests/tools/third_party/py/testing/path/common.py
new file mode 100644
index 0000000000..d69a1c39d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/path/common.py
@@ -0,0 +1,492 @@
+import py
+import sys
+
+import pytest
+
+class CommonFSTests(object):
+ def test_constructor_equality(self, path1):
+ p = path1.__class__(path1)
+ assert p == path1
+
+ def test_eq_nonstring(self, path1):
+ p1 = path1.join('sampledir')
+ p2 = path1.join('sampledir')
+ assert p1 == p2
+
+ def test_new_identical(self, path1):
+ assert path1 == path1.new()
+
+ def test_join(self, path1):
+ p = path1.join('sampledir')
+ strp = str(p)
+ assert strp.endswith('sampledir')
+ assert strp.startswith(str(path1))
+
+ def test_join_normalized(self, path1):
+ newpath = path1.join(path1.sep+'sampledir')
+ strp = str(newpath)
+ assert strp.endswith('sampledir')
+ assert strp.startswith(str(path1))
+ newpath = path1.join((path1.sep*2) + 'sampledir')
+ strp = str(newpath)
+ assert strp.endswith('sampledir')
+ assert strp.startswith(str(path1))
+
+ def test_join_noargs(self, path1):
+ newpath = path1.join()
+ assert path1 == newpath
+
+ def test_add_something(self, path1):
+ p = path1.join('sample')
+ p = p + 'dir'
+ assert p.check()
+ assert p.exists()
+ assert p.isdir()
+ assert not p.isfile()
+
+ def test_parts(self, path1):
+ newpath = path1.join('sampledir', 'otherfile')
+ par = newpath.parts()[-3:]
+ assert par == [path1, path1.join('sampledir'), newpath]
+
+ revpar = newpath.parts(reverse=True)[:3]
+ assert revpar == [newpath, path1.join('sampledir'), path1]
+
+ def test_common(self, path1):
+ other = path1.join('sampledir')
+ x = other.common(path1)
+ assert x == path1
+
+ #def test_parents_nonexisting_file(self, path1):
+ # newpath = path1 / 'dirnoexist' / 'nonexisting file'
+ # par = list(newpath.parents())
+ # assert par[:2] == [path1 / 'dirnoexist', path1]
+
+ def test_basename_checks(self, path1):
+ newpath = path1.join('sampledir')
+ assert newpath.check(basename='sampledir')
+ assert newpath.check(notbasename='xyz')
+ assert newpath.basename == 'sampledir'
+
+ def test_basename(self, path1):
+ newpath = path1.join('sampledir')
+ assert newpath.check(basename='sampledir')
+ assert newpath.basename, 'sampledir'
+
+ def test_dirname(self, path1):
+ newpath = path1.join('sampledir')
+ assert newpath.dirname == str(path1)
+
+ def test_dirpath(self, path1):
+ newpath = path1.join('sampledir')
+ assert newpath.dirpath() == path1
+
+ def test_dirpath_with_args(self, path1):
+ newpath = path1.join('sampledir')
+ assert newpath.dirpath('x') == path1.join('x')
+
+ def test_newbasename(self, path1):
+ newpath = path1.join('samplefile')
+ newbase = newpath.new(basename="samplefile2")
+ assert newbase.basename == "samplefile2"
+ assert newbase.dirpath() == newpath.dirpath()
+
+ def test_not_exists(self, path1):
+ assert not path1.join('does_not_exist').check()
+ assert path1.join('does_not_exist').check(exists=0)
+
+ def test_exists(self, path1):
+ assert path1.join("samplefile").check()
+ assert path1.join("samplefile").check(exists=1)
+ assert path1.join("samplefile").exists()
+ assert path1.join("samplefile").isfile()
+ assert not path1.join("samplefile").isdir()
+
+ def test_dir(self, path1):
+ #print repr(path1.join("sampledir"))
+ assert path1.join("sampledir").check(dir=1)
+ assert path1.join('samplefile').check(notdir=1)
+ assert not path1.join("samplefile").check(dir=1)
+ assert path1.join("samplefile").exists()
+ assert not path1.join("samplefile").isdir()
+ assert path1.join("samplefile").isfile()
+
+ def test_fnmatch_file(self, path1):
+ assert path1.join("samplefile").check(fnmatch='s*e')
+ assert path1.join("samplefile").fnmatch('s*e')
+ assert not path1.join("samplefile").fnmatch('s*x')
+ assert not path1.join("samplefile").check(fnmatch='s*x')
+
+ #def test_fnmatch_dir(self, path1):
+
+ # pattern = path1.sep.join(['s*file'])
+ # sfile = path1.join("samplefile")
+ # assert sfile.check(fnmatch=pattern)
+
+ def test_relto(self, path1):
+ l=path1.join("sampledir", "otherfile")
+ assert l.relto(path1) == l.sep.join(["sampledir", "otherfile"])
+ assert l.check(relto=path1)
+ assert path1.check(notrelto=l)
+ assert not path1.check(relto=l)
+
+ def test_bestrelpath(self, path1):
+ curdir = path1
+ sep = curdir.sep
+ s = curdir.bestrelpath(curdir)
+ assert s == "."
+ s = curdir.bestrelpath(curdir.join("hello", "world"))
+ assert s == "hello" + sep + "world"
+
+ s = curdir.bestrelpath(curdir.dirpath().join("sister"))
+ assert s == ".." + sep + "sister"
+ assert curdir.bestrelpath(curdir.dirpath()) == ".."
+
+ assert curdir.bestrelpath("hello") == "hello"
+
+ def test_relto_not_relative(self, path1):
+ l1=path1.join("bcde")
+ l2=path1.join("b")
+ assert not l1.relto(l2)
+ assert not l2.relto(l1)
+
+ @py.test.mark.xfail("sys.platform.startswith('java')")
+ def test_listdir(self, path1):
+ l = path1.listdir()
+ assert path1.join('sampledir') in l
+ assert path1.join('samplefile') in l
+ py.test.raises(py.error.ENOTDIR,
+ "path1.join('samplefile').listdir()")
+
+ def test_listdir_fnmatchstring(self, path1):
+ l = path1.listdir('s*dir')
+ assert len(l)
+ assert l[0], path1.join('sampledir')
+
+ def test_listdir_filter(self, path1):
+ l = path1.listdir(lambda x: x.check(dir=1))
+ assert path1.join('sampledir') in l
+ assert not path1.join('samplefile') in l
+
+ def test_listdir_sorted(self, path1):
+ l = path1.listdir(lambda x: x.check(basestarts="sample"), sort=True)
+ assert path1.join('sampledir') == l[0]
+ assert path1.join('samplefile') == l[1]
+ assert path1.join('samplepickle') == l[2]
+
+ def test_visit_nofilter(self, path1):
+ l = []
+ for i in path1.visit():
+ l.append(i.relto(path1))
+ assert "sampledir" in l
+ assert path1.sep.join(["sampledir", "otherfile"]) in l
+
+ def test_visit_norecurse(self, path1):
+ l = []
+ for i in path1.visit(None, lambda x: x.basename != "sampledir"):
+ l.append(i.relto(path1))
+ assert "sampledir" in l
+ assert not path1.sep.join(["sampledir", "otherfile"]) in l
+
+ @pytest.mark.parametrize('fil', ['*dir', u'*dir',
+ pytest.mark.skip("sys.version_info <"
+ " (3,6)")(b'*dir')])
+ def test_visit_filterfunc_is_string(self, path1, fil):
+ l = []
+ for i in path1.visit(fil):
+ l.append(i.relto(path1))
+ assert len(l), 2
+ assert "sampledir" in l
+ assert "otherdir" in l
+
+ @py.test.mark.xfail("sys.platform.startswith('java')")
+ def test_visit_ignore(self, path1):
+ p = path1.join('nonexisting')
+ assert list(p.visit(ignore=py.error.ENOENT)) == []
+
+ def test_visit_endswith(self, path1):
+ l = []
+ for i in path1.visit(lambda x: x.check(endswith="file")):
+ l.append(i.relto(path1))
+ assert path1.sep.join(["sampledir", "otherfile"]) in l
+ assert "samplefile" in l
+
+ def test_endswith(self, path1):
+ assert path1.check(notendswith='.py')
+ x = path1.join('samplefile')
+ assert x.check(endswith='file')
+
+ def test_cmp(self, path1):
+ path1 = path1.join('samplefile')
+ path2 = path1.join('samplefile2')
+ assert (path1 < path2) == ('samplefile' < 'samplefile2')
+ assert not (path1 < path1)
+
+ def test_simple_read(self, path1):
+ x = path1.join('samplefile').read('r')
+ assert x == 'samplefile\n'
+
+ def test_join_div_operator(self, path1):
+ newpath = path1 / '/sampledir' / '/test//'
+ newpath2 = path1.join('sampledir', 'test')
+ assert newpath == newpath2
+
+ def test_ext(self, path1):
+ newpath = path1.join('sampledir.ext')
+ assert newpath.ext == '.ext'
+ newpath = path1.join('sampledir')
+ assert not newpath.ext
+
+ def test_purebasename(self, path1):
+ newpath = path1.join('samplefile.py')
+ assert newpath.purebasename == 'samplefile'
+
+ def test_multiple_parts(self, path1):
+ newpath = path1.join('samplefile.py')
+ dirname, purebasename, basename, ext = newpath._getbyspec(
+ 'dirname,purebasename,basename,ext')
+ assert str(path1).endswith(dirname) # be careful with win32 'drive'
+ assert purebasename == 'samplefile'
+ assert basename == 'samplefile.py'
+ assert ext == '.py'
+
+ def test_dotted_name_ext(self, path1):
+ newpath = path1.join('a.b.c')
+ ext = newpath.ext
+ assert ext == '.c'
+ assert newpath.ext == '.c'
+
+ def test_newext(self, path1):
+ newpath = path1.join('samplefile.py')
+ newext = newpath.new(ext='.txt')
+ assert newext.basename == "samplefile.txt"
+ assert newext.purebasename == "samplefile"
+
+ def test_readlines(self, path1):
+ fn = path1.join('samplefile')
+ contents = fn.readlines()
+ assert contents == ['samplefile\n']
+
+ def test_readlines_nocr(self, path1):
+ fn = path1.join('samplefile')
+ contents = fn.readlines(cr=0)
+ assert contents == ['samplefile', '']
+
+ def test_file(self, path1):
+ assert path1.join('samplefile').check(file=1)
+
+ def test_not_file(self, path1):
+ assert not path1.join("sampledir").check(file=1)
+ assert path1.join("sampledir").check(file=0)
+
+ def test_non_existent(self, path1):
+ assert path1.join("sampledir.nothere").check(dir=0)
+ assert path1.join("sampledir.nothere").check(file=0)
+ assert path1.join("sampledir.nothere").check(notfile=1)
+ assert path1.join("sampledir.nothere").check(notdir=1)
+ assert path1.join("sampledir.nothere").check(notexists=1)
+ assert not path1.join("sampledir.nothere").check(notfile=0)
+
+ # pattern = path1.sep.join(['s*file'])
+ # sfile = path1.join("samplefile")
+ # assert sfile.check(fnmatch=pattern)
+
+ def test_size(self, path1):
+ url = path1.join("samplefile")
+ assert url.size() > len("samplefile")
+
+ def test_mtime(self, path1):
+ url = path1.join("samplefile")
+ assert url.mtime() > 0
+
+ def test_relto_wrong_type(self, path1):
+ py.test.raises(TypeError, "path1.relto(42)")
+
+ def test_load(self, path1):
+ p = path1.join('samplepickle')
+ obj = p.load()
+ assert type(obj) is dict
+ assert obj.get('answer',None) == 42
+
+ def test_visit_filesonly(self, path1):
+ l = []
+ for i in path1.visit(lambda x: x.check(file=1)):
+ l.append(i.relto(path1))
+ assert not "sampledir" in l
+ assert path1.sep.join(["sampledir", "otherfile"]) in l
+
+ def test_visit_nodotfiles(self, path1):
+ l = []
+ for i in path1.visit(lambda x: x.check(dotfile=0)):
+ l.append(i.relto(path1))
+ assert "sampledir" in l
+ assert path1.sep.join(["sampledir", "otherfile"]) in l
+ assert not ".dotfile" in l
+
+ def test_visit_breadthfirst(self, path1):
+ l = []
+ for i in path1.visit(bf=True):
+ l.append(i.relto(path1))
+ for i, p in enumerate(l):
+ if path1.sep in p:
+ for j in range(i, len(l)):
+ assert path1.sep in l[j]
+ break
+ else:
+ py.test.fail("huh")
+
+ def test_visit_sort(self, path1):
+ l = []
+ for i in path1.visit(bf=True, sort=True):
+ l.append(i.relto(path1))
+ for i, p in enumerate(l):
+ if path1.sep in p:
+ break
+ assert l[:i] == sorted(l[:i])
+ assert l[i:] == sorted(l[i:])
+
+ def test_endswith(self, path1):
+ def chk(p):
+ return p.check(endswith="pickle")
+ assert not chk(path1)
+ assert not chk(path1.join('samplefile'))
+ assert chk(path1.join('somepickle'))
+
+ def test_copy_file(self, path1):
+ otherdir = path1.join('otherdir')
+ initpy = otherdir.join('__init__.py')
+ copied = otherdir.join('copied')
+ initpy.copy(copied)
+ try:
+ assert copied.check()
+ s1 = initpy.read()
+ s2 = copied.read()
+ assert s1 == s2
+ finally:
+ if copied.check():
+ copied.remove()
+
+ def test_copy_dir(self, path1):
+ otherdir = path1.join('otherdir')
+ copied = path1.join('newdir')
+ try:
+ otherdir.copy(copied)
+ assert copied.check(dir=1)
+ assert copied.join('__init__.py').check(file=1)
+ s1 = otherdir.join('__init__.py').read()
+ s2 = copied.join('__init__.py').read()
+ assert s1 == s2
+ finally:
+ if copied.check(dir=1):
+ copied.remove(rec=1)
+
+ def test_remove_file(self, path1):
+ d = path1.ensure('todeleted')
+ assert d.check()
+ d.remove()
+ assert not d.check()
+
+ def test_remove_dir_recursive_by_default(self, path1):
+ d = path1.ensure('to', 'be', 'deleted')
+ assert d.check()
+ p = path1.join('to')
+ p.remove()
+ assert not p.check()
+
+ def test_ensure_dir(self, path1):
+ b = path1.ensure_dir("001", "002")
+ assert b.basename == "002"
+ assert b.isdir()
+
+ def test_mkdir_and_remove(self, path1):
+ tmpdir = path1
+ py.test.raises(py.error.EEXIST, tmpdir.mkdir, 'sampledir')
+ new = tmpdir.join('mktest1')
+ new.mkdir()
+ assert new.check(dir=1)
+ new.remove()
+
+ new = tmpdir.mkdir('mktest')
+ assert new.check(dir=1)
+ new.remove()
+ assert tmpdir.join('mktest') == new
+
+ def test_move_file(self, path1):
+ p = path1.join('samplefile')
+ newp = p.dirpath('moved_samplefile')
+ p.move(newp)
+ try:
+ assert newp.check(file=1)
+ assert not p.check()
+ finally:
+ dp = newp.dirpath()
+ if hasattr(dp, 'revert'):
+ dp.revert()
+ else:
+ newp.move(p)
+ assert p.check()
+
+ def test_move_dir(self, path1):
+ source = path1.join('sampledir')
+ dest = path1.join('moveddir')
+ source.move(dest)
+ assert dest.check(dir=1)
+ assert dest.join('otherfile').check(file=1)
+ assert not source.join('sampledir').check()
+
+ def test_fspath_protocol_match_strpath(self, path1):
+ assert path1.__fspath__() == path1.strpath
+
+ def test_fspath_func_match_strpath(self, path1):
+ try:
+ from os import fspath
+ except ImportError:
+ from py._path.common import fspath
+ assert fspath(path1) == path1.strpath
+
+ @py.test.mark.skip("sys.version_info < (3,6)")
+ def test_fspath_open(self, path1):
+ f = path1.join('opentestfile')
+ open(f)
+
+ @py.test.mark.skip("sys.version_info < (3,6)")
+ def test_fspath_fsencode(self, path1):
+ from os import fsencode
+ assert fsencode(path1) == fsencode(path1.strpath)
+
+def setuptestfs(path):
+ if path.join('samplefile').check():
+ return
+ #print "setting up test fs for", repr(path)
+ samplefile = path.ensure('samplefile')
+ samplefile.write('samplefile\n')
+
+ execfile = path.ensure('execfile')
+ execfile.write('x=42')
+
+ execfilepy = path.ensure('execfile.py')
+ execfilepy.write('x=42')
+
+ d = {1:2, 'hello': 'world', 'answer': 42}
+ path.ensure('samplepickle').dump(d)
+
+ sampledir = path.ensure('sampledir', dir=1)
+ sampledir.ensure('otherfile')
+
+ otherdir = path.ensure('otherdir', dir=1)
+ otherdir.ensure('__init__.py')
+
+ module_a = otherdir.ensure('a.py')
+ module_a.write('from .b import stuff as result\n')
+ module_b = otherdir.ensure('b.py')
+ module_b.write('stuff="got it"\n')
+ module_c = otherdir.ensure('c.py')
+ module_c.write('''import py;
+import otherdir.a
+value = otherdir.a.result
+''')
+ module_d = otherdir.ensure('d.py')
+ module_d.write('''import py;
+from otherdir import a
+value2 = a.result
+''')
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/path/conftest.py b/testing/web-platform/tests/tools/third_party/py/testing/path/conftest.py
new file mode 100644
index 0000000000..84fb5c8269
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/path/conftest.py
@@ -0,0 +1,80 @@
+import py
+import sys
+from py._path import svnwc as svncommon
+
+svnbin = py.path.local.sysfind('svn')
+repodump = py.path.local(__file__).dirpath('repotest.dump')
+from py.builtin import print_
+
+def pytest_funcarg__repowc1(request):
+ if svnbin is None:
+ py.test.skip("svn binary not found")
+
+ tmpdir = request.getfuncargvalue("tmpdir")
+ repo, repourl, wc = request.cached_setup(
+ setup=lambda: getrepowc(tmpdir, "path1repo", "path1wc"),
+ scope="module",
+ )
+ for x in ('test_remove', 'test_move', 'test_status_deleted'):
+ if request.function.__name__.startswith(x):
+ #print >>sys.stderr, ("saving repo", repo, "for", request.function)
+ _savedrepowc = save_repowc(repo, wc)
+ request.addfinalizer(lambda: restore_repowc(_savedrepowc))
+ return repo, repourl, wc
+
+def pytest_funcarg__repowc2(request):
+ tmpdir = request.getfuncargvalue("tmpdir")
+ name = request.function.__name__
+ repo, url, wc = getrepowc(tmpdir, "%s-repo-2" % name, "%s-wc-2" % name)
+ return repo, url, wc
+
+def getsvnbin():
+ if svnbin is None:
+ py.test.skip("svn binary not found")
+ return svnbin
+
+# make a wc directory out of a given root url
+# cache previously obtained wcs!
+#
+def getrepowc(tmpdir, reponame='basetestrepo', wcname='wc'):
+ repo = tmpdir.mkdir(reponame)
+ wcdir = tmpdir.mkdir(wcname)
+ repo.ensure(dir=1)
+ py.process.cmdexec('svnadmin create "%s"' %
+ svncommon._escape_helper(repo))
+ py.process.cmdexec('svnadmin load -q "%s" <"%s"' %
+ (svncommon._escape_helper(repo), repodump))
+ print_("created svn repository", repo)
+ wcdir.ensure(dir=1)
+ wc = py.path.svnwc(wcdir)
+ if sys.platform == 'win32':
+ repourl = "file://" + '/' + str(repo).replace('\\', '/')
+ else:
+ repourl = "file://%s" % repo
+ wc.checkout(repourl)
+ print_("checked out new repo into", wc)
+ return (repo, repourl, wc)
+
+
+def save_repowc(repo, wc):
+ assert not str(repo).startswith("file://"), repo
+ assert repo.check()
+ savedrepo = repo.dirpath(repo.basename+".1")
+ savedwc = wc.dirpath(wc.basename+".1")
+ repo.copy(savedrepo)
+ wc.localpath.copy(savedwc.localpath)
+ return savedrepo, savedwc
+
+def restore_repowc(obj):
+ savedrepo, savedwc = obj
+ #print >>sys.stderr, ("restoring", savedrepo)
+ repo = savedrepo.new(basename=savedrepo.basename[:-2])
+ assert repo.check()
+ wc = savedwc.new(basename=savedwc.basename[:-2])
+ assert wc.check()
+ wc.localpath.remove()
+ repo.remove()
+ savedrepo.move(repo)
+ savedwc.localpath.move(wc.localpath)
+ py.path.svnurl._lsnorevcache.clear()
+ py.path.svnurl._lsrevcache.clear()
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/path/repotest.dump b/testing/web-platform/tests/tools/third_party/py/testing/path/repotest.dump
new file mode 100644
index 0000000000..c7819cad7a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/path/repotest.dump
@@ -0,0 +1,228 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 876a30f4-1eed-0310-aeb7-ae314d1e5934
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-01-07T23:55:31.755989Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 118
+Content-length: 118
+
+K 7
+svn:log
+V 20
+testrepo setup rev 1
+K 10
+svn:author
+V 3
+hpk
+K 8
+svn:date
+V 27
+2005-01-07T23:55:37.815386Z
+PROPS-END
+
+Node-path: execfile
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 4
+Text-content-md5: d4b5bc61e16310f08c5d11866eba0a22
+Content-length: 14
+
+PROPS-END
+x=42
+
+Node-path: otherdir
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: otherdir/__init__.py
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: otherdir/a.py
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 247c7daeb2ee5dcab0aba7bd12bad665
+Content-length: 40
+
+PROPS-END
+from b import stuff as result
+
+
+Node-path: otherdir/b.py
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: c1b13503469a7711306d03a4b0721bc6
+Content-length: 25
+
+PROPS-END
+stuff="got it"
+
+
+Node-path: otherdir/c.py
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 75
+Text-content-md5: 250cdb6b5df68536152c681f48297569
+Content-length: 85
+
+PROPS-END
+import py; py.magic.autopath()
+import otherdir.a
+value = otherdir.a.result
+
+
+Node-path: otherdir/d.py
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 72
+Text-content-md5: 940c9c621e7b198e081459642c37f5a7
+Content-length: 82
+
+PROPS-END
+import py; py.magic.autopath()
+from otherdir import a
+value2 = a.result
+
+
+Node-path: sampledir
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: sampledir/otherfile
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: samplefile
+Node-kind: file
+Node-action: add
+Prop-content-length: 40
+Text-content-length: 11
+Text-content-md5: 9225ac28b32156979ab6482b8bb5fb8c
+Content-length: 51
+
+K 13
+svn:eol-style
+V 6
+native
+PROPS-END
+samplefile
+
+
+Node-path: samplepickle
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 56
+Text-content-md5: 719d85c1329a33134bb98f56b756c545
+Content-length: 66
+
+PROPS-END
+(dp1
+S'answer'
+p2
+I42
+sI1
+I2
+sS'hello'
+p3
+S'world'
+p4
+s.
+
+Revision-number: 2
+Prop-content-length: 108
+Content-length: 108
+
+K 7
+svn:log
+V 10
+second rev
+K 10
+svn:author
+V 3
+hpk
+K 8
+svn:date
+V 27
+2005-01-07T23:55:39.223202Z
+PROPS-END
+
+Node-path: anotherfile
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 5
+Text-content-md5: 5d41402abc4b2a76b9719d911017c592
+Content-length: 15
+
+PROPS-END
+hello
+
+Revision-number: 3
+Prop-content-length: 106
+Content-length: 106
+
+K 7
+svn:log
+V 9
+third rev
+K 10
+svn:author
+V 3
+hpk
+K 8
+svn:date
+V 27
+2005-01-07T23:55:41.556642Z
+PROPS-END
+
+Node-path: anotherfile
+Node-kind: file
+Node-action: change
+Text-content-length: 5
+Text-content-md5: 7d793037a0760186574b0282f2f435e7
+Content-length: 5
+
+world
+
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/path/svntestbase.py b/testing/web-platform/tests/tools/third_party/py/testing/path/svntestbase.py
new file mode 100644
index 0000000000..8d94a9ca64
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/path/svntestbase.py
@@ -0,0 +1,31 @@
+import sys
+import py
+from py._path import svnwc as svncommon
+from common import CommonFSTests
+
+class CommonSvnTests(CommonFSTests):
+
+ def test_propget(self, path1):
+ url = path1.join("samplefile")
+ value = url.propget('svn:eol-style')
+ assert value == 'native'
+
+ def test_proplist(self, path1):
+ url = path1.join("samplefile")
+ res = url.proplist()
+ assert res['svn:eol-style'] == 'native'
+
+ def test_info(self, path1):
+ url = path1.join("samplefile")
+ res = url.info()
+ assert res.size > len("samplefile") and res.created_rev >= 0
+
+ def test_log_simple(self, path1):
+ url = path1.join("samplefile")
+ logentries = url.log()
+ for logentry in logentries:
+ assert logentry.rev == 1
+ assert hasattr(logentry, 'author')
+ assert hasattr(logentry, 'date')
+
+#cache.repositories.put(svnrepourl, 1200, 0)
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/path/test_cacheutil.py b/testing/web-platform/tests/tools/third_party/py/testing/path/test_cacheutil.py
new file mode 100644
index 0000000000..c9fc07463a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/path/test_cacheutil.py
@@ -0,0 +1,89 @@
+import pytest
+from py._path import cacheutil
+
+import time
+
+class BasicCacheAPITest:
+ cache = None
+ def test_getorbuild(self):
+ val = self.cache.getorbuild(-42, lambda: 42)
+ assert val == 42
+ val = self.cache.getorbuild(-42, lambda: 23)
+ assert val == 42
+
+ def test_cache_get_key_error(self):
+ pytest.raises(KeyError, "self.cache._getentry(-23)")
+
+ def test_delentry_non_raising(self):
+ self.cache.getorbuild(100, lambda: 100)
+ self.cache.delentry(100)
+ pytest.raises(KeyError, "self.cache._getentry(100)")
+
+ def test_delentry_raising(self):
+ self.cache.getorbuild(100, lambda: 100)
+ self.cache.delentry(100)
+ pytest.raises(KeyError, self.cache.delentry, 100, raising=True)
+
+ def test_clear(self):
+ self.cache.clear()
+
+
+class TestBuildcostAccess(BasicCacheAPITest):
+ cache = cacheutil.BuildcostAccessCache(maxentries=128)
+
+ def test_cache_works_somewhat_simple(self, monkeypatch):
+ cache = cacheutil.BuildcostAccessCache()
+ # the default gettime
+ # BuildcostAccessCache.build can
+ # result into time()-time() == 0 which makes the below
+ # test fail randomly. Let's rather use incrementing
+ # numbers instead.
+ l = [0]
+
+ def counter():
+ l[0] = l[0] + 1
+ return l[0]
+ monkeypatch.setattr(cacheutil, 'gettime', counter)
+ for x in range(cache.maxentries):
+ y = cache.getorbuild(x, lambda: x)
+ assert x == y
+ for x in range(cache.maxentries):
+ assert cache.getorbuild(x, None) == x
+ halfentries = int(cache.maxentries / 2)
+ for x in range(halfentries):
+ assert cache.getorbuild(x, None) == x
+ assert cache.getorbuild(x, None) == x
+ # evict one entry
+ val = cache.getorbuild(-1, lambda: 42)
+ assert val == 42
+ # check that recently used ones are still there
+ # and are not build again
+ for x in range(halfentries):
+ assert cache.getorbuild(x, None) == x
+ assert cache.getorbuild(-1, None) == 42
+
+
+class TestAging(BasicCacheAPITest):
+ maxsecs = 0.10
+ cache = cacheutil.AgingCache(maxentries=128, maxseconds=maxsecs)
+
+ def test_cache_eviction(self):
+ self.cache.getorbuild(17, lambda: 17)
+ endtime = time.time() + self.maxsecs * 10
+ while time.time() < endtime:
+ try:
+ self.cache._getentry(17)
+ except KeyError:
+ break
+ time.sleep(self.maxsecs*0.3)
+ else:
+ pytest.fail("waiting for cache eviction failed")
+
+
+def test_prune_lowestweight():
+ maxsecs = 0.05
+ cache = cacheutil.AgingCache(maxentries=10, maxseconds=maxsecs)
+ for x in range(cache.maxentries):
+ cache.getorbuild(x, lambda: x)
+ time.sleep(maxsecs*1.1)
+ cache.getorbuild(cache.maxentries+1, lambda: 42)
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/path/test_local.py b/testing/web-platform/tests/tools/third_party/py/testing/path/test_local.py
new file mode 100644
index 0000000000..1b9a7923f6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/path/test_local.py
@@ -0,0 +1,1078 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+import time
+import py
+import pytest
+import os
+import sys
+import multiprocessing
+from py.path import local
+import common
+
+failsonjython = py.test.mark.xfail("sys.platform.startswith('java')")
+failsonjywin32 = py.test.mark.xfail(
+ "sys.platform.startswith('java') "
+ "and getattr(os, '_name', None) == 'nt'")
+win32only = py.test.mark.skipif(
+ "not (sys.platform == 'win32' or getattr(os, '_name', None) == 'nt')")
+skiponwin32 = py.test.mark.skipif(
+ "sys.platform == 'win32' or getattr(os, '_name', None) == 'nt'")
+
+ATIME_RESOLUTION = 0.01
+
+
+@pytest.yield_fixture(scope="session")
+def path1(tmpdir_factory):
+ path = tmpdir_factory.mktemp('path')
+ common.setuptestfs(path)
+ yield path
+ assert path.join("samplefile").check()
+
+
+@pytest.fixture
+def fake_fspath_obj(request):
+ class FakeFSPathClass(object):
+ def __init__(self, path):
+ self._path = path
+
+ def __fspath__(self):
+ return self._path
+
+ return FakeFSPathClass(os.path.join("this", "is", "a", "fake", "path"))
+
+
+def batch_make_numbered_dirs(rootdir, repeats):
+ try:
+ for i in range(repeats):
+ dir_ = py.path.local.make_numbered_dir(prefix='repro-', rootdir=rootdir)
+ file_ = dir_.join('foo')
+ file_.write('%s' % i)
+ actual = int(file_.read())
+ assert actual == i, 'int(file_.read()) is %s instead of %s' % (actual, i)
+ dir_.join('.lock').remove(ignore_errors=True)
+ return True
+ except KeyboardInterrupt:
+ # makes sure that interrupting test session won't hang it
+ os.exit(2)
+
+
+class TestLocalPath(common.CommonFSTests):
+ def test_join_normpath(self, tmpdir):
+ assert tmpdir.join(".") == tmpdir
+ p = tmpdir.join("../%s" % tmpdir.basename)
+ assert p == tmpdir
+ p = tmpdir.join("..//%s/" % tmpdir.basename)
+ assert p == tmpdir
+
+ @skiponwin32
+ def test_dirpath_abs_no_abs(self, tmpdir):
+ p = tmpdir.join('foo')
+ assert p.dirpath('/bar') == tmpdir.join('bar')
+ assert tmpdir.dirpath('/bar', abs=True) == local('/bar')
+
+ def test_gethash(self, tmpdir):
+ md5 = py.builtin._tryimport('md5', 'hashlib').md5
+ lib = py.builtin._tryimport('sha', 'hashlib')
+ sha = getattr(lib, 'sha1', getattr(lib, 'sha', None))
+ fn = tmpdir.join("testhashfile")
+ data = 'hello'.encode('ascii')
+ fn.write(data, mode="wb")
+ assert fn.computehash("md5") == md5(data).hexdigest()
+ assert fn.computehash("sha1") == sha(data).hexdigest()
+ py.test.raises(ValueError, fn.computehash, "asdasd")
+
+ def test_remove_removes_readonly_file(self, tmpdir):
+ readonly_file = tmpdir.join('readonly').ensure()
+ readonly_file.chmod(0)
+ readonly_file.remove()
+ assert not readonly_file.check(exists=1)
+
+ def test_remove_removes_readonly_dir(self, tmpdir):
+ readonly_dir = tmpdir.join('readonlydir').ensure(dir=1)
+ readonly_dir.chmod(int("500", 8))
+ readonly_dir.remove()
+ assert not readonly_dir.check(exists=1)
+
+ def test_remove_removes_dir_and_readonly_file(self, tmpdir):
+ readonly_dir = tmpdir.join('readonlydir').ensure(dir=1)
+ readonly_file = readonly_dir.join('readonlyfile').ensure()
+ readonly_file.chmod(0)
+ readonly_dir.remove()
+ assert not readonly_dir.check(exists=1)
+
+ def test_remove_routes_ignore_errors(self, tmpdir, monkeypatch):
+ l = []
+ monkeypatch.setattr(
+ 'shutil.rmtree',
+ lambda *args, **kwargs: l.append(kwargs))
+ tmpdir.remove()
+ assert not l[0]['ignore_errors']
+ for val in (True, False):
+ l[:] = []
+ tmpdir.remove(ignore_errors=val)
+ assert l[0]['ignore_errors'] == val
+
+ def test_initialize_curdir(self):
+ assert str(local()) == os.getcwd()
+
+ @skiponwin32
+ def test_chdir_gone(self, path1):
+ p = path1.ensure("dir_to_be_removed", dir=1)
+ p.chdir()
+ p.remove()
+ pytest.raises(py.error.ENOENT, py.path.local)
+ assert path1.chdir() is None
+ assert os.getcwd() == str(path1)
+
+ with pytest.raises(py.error.ENOENT):
+ with p.as_cwd():
+ raise NotImplementedError
+
+ @skiponwin32
+ def test_chdir_gone_in_as_cwd(self, path1):
+ p = path1.ensure("dir_to_be_removed", dir=1)
+ p.chdir()
+ p.remove()
+
+ with path1.as_cwd() as old:
+ assert old is None
+
+ def test_as_cwd(self, path1):
+ dir = path1.ensure("subdir", dir=1)
+ old = py.path.local()
+ with dir.as_cwd() as x:
+ assert x == old
+ assert py.path.local() == dir
+ assert os.getcwd() == str(old)
+
+ def test_as_cwd_exception(self, path1):
+ old = py.path.local()
+ dir = path1.ensure("subdir", dir=1)
+ with pytest.raises(ValueError):
+ with dir.as_cwd():
+ raise ValueError()
+ assert old == py.path.local()
+
+ def test_initialize_reldir(self, path1):
+ with path1.as_cwd():
+ p = local('samplefile')
+ assert p.check()
+
+ def test_tilde_expansion(self, monkeypatch, tmpdir):
+ monkeypatch.setenv("HOME", str(tmpdir))
+ p = py.path.local("~", expanduser=True)
+ assert p == os.path.expanduser("~")
+
+ @pytest.mark.skipif(
+ not sys.platform.startswith("win32"), reason="case insensitive only on windows"
+ )
+ def test_eq_hash_are_case_insensitive_on_windows(self):
+ a = py.path.local("/some/path")
+ b = py.path.local("/some/PATH")
+ assert a == b
+ assert hash(a) == hash(b)
+ assert a in {b}
+ assert a in {b: 'b'}
+
+ def test_eq_with_strings(self, path1):
+ path1 = path1.join('sampledir')
+ path2 = str(path1)
+ assert path1 == path2
+ assert path2 == path1
+ path3 = path1.join('samplefile')
+ assert path3 != path2
+ assert path2 != path3
+
+ def test_eq_with_none(self, path1):
+ assert path1 != None # noqa: E711
+
+ @pytest.mark.skipif(
+ sys.platform.startswith("win32"), reason="cannot remove cwd on Windows"
+ )
+ @pytest.mark.skipif(
+ sys.version_info < (3, 0) or sys.version_info >= (3, 5),
+ reason="only with Python 3 before 3.5"
+ )
+ def test_eq_with_none_and_custom_fspath(self, monkeypatch, path1):
+ import os
+ import shutil
+ import tempfile
+
+ d = tempfile.mkdtemp()
+ monkeypatch.chdir(d)
+ shutil.rmtree(d)
+
+ monkeypatch.delitem(sys.modules, 'pathlib', raising=False)
+ monkeypatch.setattr(sys, 'path', [''] + sys.path)
+
+ with pytest.raises(FileNotFoundError):
+ import pathlib # noqa: F401
+
+ assert path1 != None # noqa: E711
+
+ def test_eq_non_ascii_unicode(self, path1):
+ path2 = path1.join(u'temp')
+ path3 = path1.join(u'ação')
+ path4 = path1.join(u'ディレクトリ')
+
+ assert path2 != path3
+ assert path2 != path4
+ assert path4 != path3
+
+ def test_gt_with_strings(self, path1):
+ path2 = path1.join('sampledir')
+ path3 = str(path1.join("ttt"))
+ assert path3 > path2
+ assert path2 < path3
+ assert path2 < "ttt"
+ assert "ttt" > path2
+ path4 = path1.join("aaa")
+ l = [path2, path4, path3]
+ assert sorted(l) == [path4, path2, path3]
+
+ def test_open_and_ensure(self, path1):
+ p = path1.join("sub1", "sub2", "file")
+ with p.open("w", ensure=1) as f:
+ f.write("hello")
+ assert p.read() == "hello"
+
+ def test_write_and_ensure(self, path1):
+ p = path1.join("sub1", "sub2", "file")
+ p.write("hello", ensure=1)
+ assert p.read() == "hello"
+
+ @py.test.mark.parametrize('bin', (False, True))
+ def test_dump(self, tmpdir, bin):
+ path = tmpdir.join("dumpfile%s" % int(bin))
+ try:
+ d = {'answer': 42}
+ path.dump(d, bin=bin)
+ f = path.open('rb+')
+ import pickle
+ dnew = pickle.load(f)
+ assert d == dnew
+ finally:
+ f.close()
+
+ @failsonjywin32
+ def test_setmtime(self):
+ import tempfile
+ import time
+ try:
+ fd, name = tempfile.mkstemp()
+ os.close(fd)
+ except AttributeError:
+ name = tempfile.mktemp()
+ open(name, 'w').close()
+ try:
+ mtime = int(time.time())-100
+ path = local(name)
+ assert path.mtime() != mtime
+ path.setmtime(mtime)
+ assert path.mtime() == mtime
+ path.setmtime()
+ assert path.mtime() != mtime
+ finally:
+ os.remove(name)
+
+ def test_normpath(self, path1):
+ new1 = path1.join("/otherdir")
+ new2 = path1.join("otherdir")
+ assert str(new1) == str(new2)
+
+ def test_mkdtemp_creation(self):
+ d = local.mkdtemp()
+ try:
+ assert d.check(dir=1)
+ finally:
+ d.remove(rec=1)
+
+ def test_tmproot(self):
+ d = local.mkdtemp()
+ tmproot = local.get_temproot()
+ try:
+ assert d.check(dir=1)
+ assert d.dirpath() == tmproot
+ finally:
+ d.remove(rec=1)
+
+ def test_chdir(self, tmpdir):
+ old = local()
+ try:
+ res = tmpdir.chdir()
+ assert str(res) == str(old)
+ assert os.getcwd() == str(tmpdir)
+ finally:
+ old.chdir()
+
+ def test_ensure_filepath_withdir(self, tmpdir):
+ newfile = tmpdir.join('test1', 'test')
+ newfile.ensure()
+ assert newfile.check(file=1)
+ newfile.write("42")
+ newfile.ensure()
+ s = newfile.read()
+ assert s == "42"
+
+ def test_ensure_filepath_withoutdir(self, tmpdir):
+ newfile = tmpdir.join('test1file')
+ t = newfile.ensure()
+ assert t == newfile
+ assert newfile.check(file=1)
+
+ def test_ensure_dirpath(self, tmpdir):
+ newfile = tmpdir.join('test1', 'testfile')
+ t = newfile.ensure(dir=1)
+ assert t == newfile
+ assert newfile.check(dir=1)
+
+ def test_ensure_non_ascii_unicode(self, tmpdir):
+ newfile = tmpdir.join(u'ação',u'ディレクトリ')
+ t = newfile.ensure(dir=1)
+ assert t == newfile
+ assert newfile.check(dir=1)
+
+ def test_init_from_path(self, tmpdir):
+ l = local()
+ l2 = local(l)
+ assert l2 == l
+
+ wc = py.path.svnwc('.')
+ l3 = local(wc)
+ assert l3 is not wc
+ assert l3.strpath == wc.strpath
+ assert not hasattr(l3, 'commit')
+
+ @py.test.mark.xfail(run=False, reason="unreliable est for long filenames")
+ def test_long_filenames(self, tmpdir):
+ if sys.platform == "win32":
+ py.test.skip("win32: work around needed for path length limit")
+ # see http://codespeak.net/pipermail/py-dev/2008q2/000922.html
+
+ # testing paths > 260 chars (which is Windows' limitation, but
+ # depending on how the paths are used), but > 4096 (which is the
+ # Linux' limitation) - the behaviour of paths with names > 4096 chars
+ # is undetermined
+ newfilename = '/test' * 60
+ l = tmpdir.join(newfilename)
+ l.ensure(file=True)
+ l.write('foo')
+ l2 = tmpdir.join(newfilename)
+ assert l2.read() == 'foo'
+
+ def test_visit_depth_first(self, tmpdir):
+ tmpdir.ensure("a", "1")
+ tmpdir.ensure("b", "2")
+ p3 = tmpdir.ensure("breadth")
+ l = list(tmpdir.visit(lambda x: x.check(file=1)))
+ assert len(l) == 3
+ # check that breadth comes last
+ assert l[2] == p3
+
+ def test_visit_rec_fnmatch(self, tmpdir):
+ p1 = tmpdir.ensure("a", "123")
+ tmpdir.ensure(".b", "345")
+ l = list(tmpdir.visit("???", rec="[!.]*"))
+ assert len(l) == 1
+ # check that breadth comes last
+ assert l[0] == p1
+
+ def test_fnmatch_file_abspath(self, tmpdir):
+ b = tmpdir.join("a", "b")
+ assert b.fnmatch(os.sep.join("ab"))
+ pattern = os.sep.join([str(tmpdir), "*", "b"])
+ assert b.fnmatch(pattern)
+
+ def test_sysfind(self):
+ name = sys.platform == "win32" and "cmd" or "test"
+ x = py.path.local.sysfind(name)
+ assert x.check(file=1)
+ assert py.path.local.sysfind('jaksdkasldqwe') is None
+ assert py.path.local.sysfind(name, paths=[]) is None
+ x2 = py.path.local.sysfind(name, paths=[x.dirpath()])
+ assert x2 == x
+
+ def test_fspath_protocol_other_class(self, fake_fspath_obj):
+ # py.path is always absolute
+ py_path = py.path.local(fake_fspath_obj)
+ str_path = fake_fspath_obj.__fspath__()
+ assert py_path.check(endswith=str_path)
+ assert py_path.join(fake_fspath_obj).strpath == os.path.join(
+ py_path.strpath, str_path)
+
+ def test_make_numbered_dir_multiprocess_safe(self, tmpdir):
+ # https://github.com/pytest-dev/py/issues/30
+ pool = multiprocessing.Pool()
+ results = [pool.apply_async(batch_make_numbered_dirs, [tmpdir, 100]) for _ in range(20)]
+ for r in results:
+ assert r.get()
+
+
+class TestExecutionOnWindows:
+ pytestmark = win32only
+
+ def test_sysfind_bat_exe_before(self, tmpdir, monkeypatch):
+ monkeypatch.setenv("PATH", str(tmpdir), prepend=os.pathsep)
+ tmpdir.ensure("hello")
+ h = tmpdir.ensure("hello.bat")
+ x = py.path.local.sysfind("hello")
+ assert x == h
+
+
+class TestExecution:
+ pytestmark = skiponwin32
+
+ def test_sysfind_no_permisson_ignored(self, monkeypatch, tmpdir):
+ noperm = tmpdir.ensure('noperm', dir=True)
+ monkeypatch.setenv("PATH", noperm, prepend=":")
+ noperm.chmod(0)
+ assert py.path.local.sysfind('jaksdkasldqwe') is None
+
+ def test_sysfind_absolute(self):
+ x = py.path.local.sysfind('test')
+ assert x.check(file=1)
+ y = py.path.local.sysfind(str(x))
+ assert y.check(file=1)
+ assert y == x
+
+ def test_sysfind_multiple(self, tmpdir, monkeypatch):
+ monkeypatch.setenv('PATH', "%s:%s" % (
+ tmpdir.ensure('a'),
+ tmpdir.join('b')),
+ prepend=":")
+ tmpdir.ensure('b', 'a')
+ x = py.path.local.sysfind(
+ 'a', checker=lambda x: x.dirpath().basename == 'b')
+ assert x.basename == 'a'
+ assert x.dirpath().basename == 'b'
+ assert py.path.local.sysfind('a', checker=lambda x: None) is None
+
+ def test_sysexec(self):
+ x = py.path.local.sysfind('ls')
+ out = x.sysexec('-a')
+ for x in py.path.local().listdir():
+ assert out.find(x.basename) != -1
+
+ def test_sysexec_failing(self):
+ x = py.path.local.sysfind('false')
+ with pytest.raises(py.process.cmdexec.Error):
+ x.sysexec('aksjdkasjd')
+
+ def test_make_numbered_dir(self, tmpdir):
+ tmpdir.ensure('base.not_an_int', dir=1)
+ for i in range(10):
+ numdir = local.make_numbered_dir(prefix='base.', rootdir=tmpdir,
+ keep=2, lock_timeout=0)
+ assert numdir.check()
+ assert numdir.basename == 'base.%d' % i
+ if i >= 1:
+ assert numdir.new(ext=str(i-1)).check()
+ if i >= 2:
+ assert numdir.new(ext=str(i-2)).check()
+ if i >= 3:
+ assert not numdir.new(ext=str(i-3)).check()
+
+ def test_make_numbered_dir_case(self, tmpdir):
+ """make_numbered_dir does not make assumptions on the underlying
+ filesystem based on the platform and will assume it _could_ be case
+ insensitive.
+
+ See issues:
+ - https://github.com/pytest-dev/pytest/issues/708
+ - https://github.com/pytest-dev/pytest/issues/3451
+ """
+ d1 = local.make_numbered_dir(
+ prefix='CAse.', rootdir=tmpdir, keep=2, lock_timeout=0,
+ )
+ d2 = local.make_numbered_dir(
+ prefix='caSE.', rootdir=tmpdir, keep=2, lock_timeout=0,
+ )
+ assert str(d1).lower() != str(d2).lower()
+ assert str(d2).endswith('.1')
+
+ def test_make_numbered_dir_NotImplemented_Error(self, tmpdir, monkeypatch):
+ def notimpl(x, y):
+ raise NotImplementedError(42)
+ monkeypatch.setattr(os, 'symlink', notimpl)
+ x = tmpdir.make_numbered_dir(rootdir=tmpdir, lock_timeout=0)
+ assert x.relto(tmpdir)
+ assert x.check()
+
+ def test_locked_make_numbered_dir(self, tmpdir):
+ for i in range(10):
+ numdir = local.make_numbered_dir(prefix='base2.', rootdir=tmpdir,
+ keep=2)
+ assert numdir.check()
+ assert numdir.basename == 'base2.%d' % i
+ for j in range(i):
+ assert numdir.new(ext=str(j)).check()
+
+ def test_error_preservation(self, path1):
+ py.test.raises(EnvironmentError, path1.join('qwoeqiwe').mtime)
+ py.test.raises(EnvironmentError, path1.join('qwoeqiwe').read)
+
+ # def test_parentdirmatch(self):
+ # local.parentdirmatch('std', startmodule=__name__)
+ #
+
+
+class TestImport:
+ def test_pyimport(self, path1):
+ obj = path1.join('execfile.py').pyimport()
+ assert obj.x == 42
+ assert obj.__name__ == 'execfile'
+
+ def test_pyimport_renamed_dir_creates_mismatch(self, tmpdir, monkeypatch):
+ p = tmpdir.ensure("a", "test_x123.py")
+ p.pyimport()
+ tmpdir.join("a").move(tmpdir.join("b"))
+ with pytest.raises(tmpdir.ImportMismatchError):
+ tmpdir.join("b", "test_x123.py").pyimport()
+
+ # Errors can be ignored.
+ monkeypatch.setenv('PY_IGNORE_IMPORTMISMATCH', '1')
+ tmpdir.join("b", "test_x123.py").pyimport()
+
+ # PY_IGNORE_IMPORTMISMATCH=0 does not ignore error.
+ monkeypatch.setenv('PY_IGNORE_IMPORTMISMATCH', '0')
+ with pytest.raises(tmpdir.ImportMismatchError):
+ tmpdir.join("b", "test_x123.py").pyimport()
+
+ def test_pyimport_messy_name(self, tmpdir):
+ # http://bitbucket.org/hpk42/py-trunk/issue/129
+ path = tmpdir.ensure('foo__init__.py')
+ path.pyimport()
+
+ def test_pyimport_dir(self, tmpdir):
+ p = tmpdir.join("hello_123")
+ p_init = p.ensure("__init__.py")
+ m = p.pyimport()
+ assert m.__name__ == "hello_123"
+ m = p_init.pyimport()
+ assert m.__name__ == "hello_123"
+
+ def test_pyimport_execfile_different_name(self, path1):
+ obj = path1.join('execfile.py').pyimport(modname="0x.y.z")
+ assert obj.x == 42
+ assert obj.__name__ == '0x.y.z'
+
+ def test_pyimport_a(self, path1):
+ otherdir = path1.join('otherdir')
+ mod = otherdir.join('a.py').pyimport()
+ assert mod.result == "got it"
+ assert mod.__name__ == 'otherdir.a'
+
+ def test_pyimport_b(self, path1):
+ otherdir = path1.join('otherdir')
+ mod = otherdir.join('b.py').pyimport()
+ assert mod.stuff == "got it"
+ assert mod.__name__ == 'otherdir.b'
+
+ def test_pyimport_c(self, path1):
+ otherdir = path1.join('otherdir')
+ mod = otherdir.join('c.py').pyimport()
+ assert mod.value == "got it"
+
+ def test_pyimport_d(self, path1):
+ otherdir = path1.join('otherdir')
+ mod = otherdir.join('d.py').pyimport()
+ assert mod.value2 == "got it"
+
+ def test_pyimport_and_import(self, tmpdir):
+ tmpdir.ensure('xxxpackage', '__init__.py')
+ mod1path = tmpdir.ensure('xxxpackage', 'module1.py')
+ mod1 = mod1path.pyimport()
+ assert mod1.__name__ == 'xxxpackage.module1'
+ from xxxpackage import module1
+ assert module1 is mod1
+
+ def test_pyimport_check_filepath_consistency(self, monkeypatch, tmpdir):
+ name = 'pointsback123'
+ ModuleType = type(os)
+ p = tmpdir.ensure(name + '.py')
+ for ending in ('.pyc', '$py.class', '.pyo'):
+ mod = ModuleType(name)
+ pseudopath = tmpdir.ensure(name+ending)
+ mod.__file__ = str(pseudopath)
+ monkeypatch.setitem(sys.modules, name, mod)
+ newmod = p.pyimport()
+ assert mod == newmod
+ monkeypatch.undo()
+ mod = ModuleType(name)
+ pseudopath = tmpdir.ensure(name+"123.py")
+ mod.__file__ = str(pseudopath)
+ monkeypatch.setitem(sys.modules, name, mod)
+ excinfo = py.test.raises(pseudopath.ImportMismatchError, p.pyimport)
+ modname, modfile, orig = excinfo.value.args
+ assert modname == name
+ assert modfile == pseudopath
+ assert orig == p
+ assert issubclass(pseudopath.ImportMismatchError, ImportError)
+
+ def test_issue131_pyimport_on__init__(self, tmpdir):
+ # __init__.py files may be namespace packages, and thus the
+ # __file__ of an imported module may not be ourselves
+ # see issue
+ p1 = tmpdir.ensure("proja", "__init__.py")
+ p2 = tmpdir.ensure("sub", "proja", "__init__.py")
+ m1 = p1.pyimport()
+ m2 = p2.pyimport()
+ assert m1 == m2
+
+ def test_ensuresyspath_append(self, tmpdir):
+ root1 = tmpdir.mkdir("root1")
+ file1 = root1.ensure("x123.py")
+ assert str(root1) not in sys.path
+ file1.pyimport(ensuresyspath="append")
+ assert str(root1) == sys.path[-1]
+ assert str(root1) not in sys.path[:-1]
+
+
+class TestImportlibImport:
+ pytestmark = py.test.mark.skipif("sys.version_info < (3, 5)")
+
+ OPTS = {'ensuresyspath': 'importlib'}
+
+ def test_pyimport(self, path1):
+ obj = path1.join('execfile.py').pyimport(**self.OPTS)
+ assert obj.x == 42
+ assert obj.__name__ == 'execfile'
+
+ def test_pyimport_dir_fails(self, tmpdir):
+ p = tmpdir.join("hello_123")
+ p.ensure("__init__.py")
+ with pytest.raises(ImportError):
+ p.pyimport(**self.OPTS)
+
+ def test_pyimport_execfile_different_name(self, path1):
+ obj = path1.join('execfile.py').pyimport(modname="0x.y.z", **self.OPTS)
+ assert obj.x == 42
+ assert obj.__name__ == '0x.y.z'
+
+ def test_pyimport_relative_import_fails(self, path1):
+ otherdir = path1.join('otherdir')
+ with pytest.raises(ImportError):
+ otherdir.join('a.py').pyimport(**self.OPTS)
+
+ def test_pyimport_doesnt_use_sys_modules(self, tmpdir):
+ p = tmpdir.ensure('file738jsk.py')
+ mod = p.pyimport(**self.OPTS)
+ assert mod.__name__ == 'file738jsk'
+ assert 'file738jsk' not in sys.modules
+
+
+def test_pypkgdir(tmpdir):
+ pkg = tmpdir.ensure('pkg1', dir=1)
+ pkg.ensure("__init__.py")
+ pkg.ensure("subdir/__init__.py")
+ assert pkg.pypkgpath() == pkg
+ assert pkg.join('subdir', '__init__.py').pypkgpath() == pkg
+
+
+def test_pypkgdir_unimportable(tmpdir):
+ pkg = tmpdir.ensure('pkg1-1', dir=1) # unimportable
+ pkg.ensure("__init__.py")
+ subdir = pkg.ensure("subdir/__init__.py").dirpath()
+ assert subdir.pypkgpath() == subdir
+ assert subdir.ensure("xyz.py").pypkgpath() == subdir
+ assert not pkg.pypkgpath()
+
+
+def test_isimportable():
+ from py._path.local import isimportable
+ assert not isimportable("")
+ assert isimportable("x")
+ assert isimportable("x1")
+ assert isimportable("x_1")
+ assert isimportable("_")
+ assert isimportable("_1")
+ assert not isimportable("x-1")
+ assert not isimportable("x:1")
+
+
+def test_homedir_from_HOME(monkeypatch):
+ path = os.getcwd()
+ monkeypatch.setenv("HOME", path)
+ assert py.path.local._gethomedir() == py.path.local(path)
+
+
+def test_homedir_not_exists(monkeypatch):
+ monkeypatch.delenv("HOME", raising=False)
+ monkeypatch.delenv("HOMEDRIVE", raising=False)
+ homedir = py.path.local._gethomedir()
+ assert homedir is None
+
+
+def test_samefile(tmpdir):
+ assert tmpdir.samefile(tmpdir)
+ p = tmpdir.ensure("hello")
+ assert p.samefile(p)
+ with p.dirpath().as_cwd():
+ assert p.samefile(p.basename)
+ if sys.platform == "win32":
+ p1 = p.__class__(str(p).lower())
+ p2 = p.__class__(str(p).upper())
+ assert p1.samefile(p2)
+
+@pytest.mark.skipif(not hasattr(os, "symlink"), reason="os.symlink not available")
+def test_samefile_symlink(tmpdir):
+ p1 = tmpdir.ensure("foo.txt")
+ p2 = tmpdir.join("linked.txt")
+ try:
+ os.symlink(str(p1), str(p2))
+ except (OSError, NotImplementedError) as e:
+ # on Windows this might fail if the user doesn't have special symlink permissions
+ # pypy3 on Windows doesn't implement os.symlink and raises NotImplementedError
+ pytest.skip(str(e.args[0]))
+
+ assert p1.samefile(p2)
+
+def test_listdir_single_arg(tmpdir):
+ tmpdir.ensure("hello")
+ assert tmpdir.listdir("hello")[0].basename == "hello"
+
+
+def test_mkdtemp_rootdir(tmpdir):
+ dtmp = local.mkdtemp(rootdir=tmpdir)
+ assert tmpdir.listdir() == [dtmp]
+
+
+class TestWINLocalPath:
+ pytestmark = win32only
+
+ def test_owner_group_not_implemented(self, path1):
+ py.test.raises(NotImplementedError, "path1.stat().owner")
+ py.test.raises(NotImplementedError, "path1.stat().group")
+
+ def test_chmod_simple_int(self, path1):
+ py.builtin.print_("path1 is", path1)
+ mode = path1.stat().mode
+ # Ensure that we actually change the mode to something different.
+ path1.chmod(mode == 0 and 1 or 0)
+ try:
+ print(path1.stat().mode)
+ print(mode)
+ assert path1.stat().mode != mode
+ finally:
+ path1.chmod(mode)
+ assert path1.stat().mode == mode
+
+ def test_path_comparison_lowercase_mixed(self, path1):
+ t1 = path1.join("a_path")
+ t2 = path1.join("A_path")
+ assert t1 == t1
+ assert t1 == t2
+
+ def test_relto_with_mixed_case(self, path1):
+ t1 = path1.join("a_path", "fiLe")
+ t2 = path1.join("A_path")
+ assert t1.relto(t2) == "fiLe"
+
+ def test_allow_unix_style_paths(self, path1):
+ t1 = path1.join('a_path')
+ assert t1 == str(path1) + '\\a_path'
+ t1 = path1.join('a_path/')
+ assert t1 == str(path1) + '\\a_path'
+ t1 = path1.join('dir/a_path')
+ assert t1 == str(path1) + '\\dir\\a_path'
+
+ def test_sysfind_in_currentdir(self, path1):
+ cmd = py.path.local.sysfind('cmd')
+ root = cmd.new(dirname='', basename='') # c:\ in most installations
+ with root.as_cwd():
+ x = py.path.local.sysfind(cmd.relto(root))
+ assert x.check(file=1)
+
+ def test_fnmatch_file_abspath_posix_pattern_on_win32(self, tmpdir):
+ # path-matching patterns might contain a posix path separator '/'
+ # Test that we can match that pattern on windows.
+ import posixpath
+ b = tmpdir.join("a", "b")
+ assert b.fnmatch(posixpath.sep.join("ab"))
+ pattern = posixpath.sep.join([str(tmpdir), "*", "b"])
+ assert b.fnmatch(pattern)
+
+
+class TestPOSIXLocalPath:
+ pytestmark = skiponwin32
+
+ def test_hardlink(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ filepath = tmpdir.join('file')
+ filepath.write("Hello")
+ nlink = filepath.stat().nlink
+ linkpath.mklinkto(filepath)
+ assert filepath.stat().nlink == nlink + 1
+
+ def test_symlink_are_identical(self, tmpdir):
+ filepath = tmpdir.join('file')
+ filepath.write("Hello")
+ linkpath = tmpdir.join('test')
+ linkpath.mksymlinkto(filepath)
+ assert linkpath.readlink() == str(filepath)
+
+ def test_symlink_isfile(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ filepath = tmpdir.join('file')
+ filepath.write("")
+ linkpath.mksymlinkto(filepath)
+ assert linkpath.check(file=1)
+ assert not linkpath.check(link=0, file=1)
+ assert linkpath.islink()
+
+ def test_symlink_relative(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ filepath = tmpdir.join('file')
+ filepath.write("Hello")
+ linkpath.mksymlinkto(filepath, absolute=False)
+ assert linkpath.readlink() == "file"
+ assert filepath.read() == linkpath.read()
+
+ def test_symlink_not_existing(self, tmpdir):
+ linkpath = tmpdir.join('testnotexisting')
+ assert not linkpath.check(link=1)
+ assert linkpath.check(link=0)
+
+ def test_relto_with_root(self, path1, tmpdir):
+ y = path1.join('x').relto(py.path.local('/'))
+ assert y[0] == str(path1)[1]
+
+ def test_visit_recursive_symlink(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ linkpath.mksymlinkto(tmpdir)
+ visitor = tmpdir.visit(None, lambda x: x.check(link=0))
+ assert list(visitor) == [linkpath]
+
+ def test_symlink_isdir(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ linkpath.mksymlinkto(tmpdir)
+ assert linkpath.check(dir=1)
+ assert not linkpath.check(link=0, dir=1)
+
+ def test_symlink_remove(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ linkpath.mksymlinkto(linkpath) # point to itself
+ assert linkpath.check(link=1)
+ linkpath.remove()
+ assert not linkpath.check()
+
+ def test_realpath_file(self, tmpdir):
+ linkpath = tmpdir.join('test')
+ filepath = tmpdir.join('file')
+ filepath.write("")
+ linkpath.mksymlinkto(filepath)
+ realpath = linkpath.realpath()
+ assert realpath.basename == 'file'
+
+ def test_owner(self, path1, tmpdir):
+ from pwd import getpwuid
+ from grp import getgrgid
+ stat = path1.stat()
+ assert stat.path == path1
+
+ uid = stat.uid
+ gid = stat.gid
+ owner = getpwuid(uid)[0]
+ group = getgrgid(gid)[0]
+
+ assert uid == stat.uid
+ assert owner == stat.owner
+ assert gid == stat.gid
+ assert group == stat.group
+
+ def test_stat_helpers(self, tmpdir, monkeypatch):
+ path1 = tmpdir.ensure("file")
+ stat1 = path1.stat()
+ stat2 = tmpdir.stat()
+ assert stat1.isfile()
+ assert stat2.isdir()
+ assert not stat1.islink()
+ assert not stat2.islink()
+
+ def test_stat_non_raising(self, tmpdir):
+ path1 = tmpdir.join("file")
+ pytest.raises(py.error.ENOENT, lambda: path1.stat())
+ res = path1.stat(raising=False)
+ assert res is None
+
+ def test_atime(self, tmpdir):
+ import time
+ path = tmpdir.ensure('samplefile')
+ now = time.time()
+ atime1 = path.atime()
+ # we could wait here but timer resolution is very
+ # system dependent
+ path.read()
+ time.sleep(ATIME_RESOLUTION)
+ atime2 = path.atime()
+ time.sleep(ATIME_RESOLUTION)
+ duration = time.time() - now
+ assert (atime2-atime1) <= duration
+
+ def test_commondir(self, path1):
+ # XXX This is here in local until we find a way to implement this
+ # using the subversion command line api.
+ p1 = path1.join('something')
+ p2 = path1.join('otherthing')
+ assert p1.common(p2) == path1
+ assert p2.common(p1) == path1
+
+ def test_commondir_nocommon(self, path1):
+ # XXX This is here in local until we find a way to implement this
+ # using the subversion command line api.
+ p1 = path1.join('something')
+ p2 = py.path.local(path1.sep+'blabla')
+ assert p1.common(p2) == '/'
+
+ def test_join_to_root(self, path1):
+ root = path1.parts()[0]
+ assert len(str(root)) == 1
+ assert str(root.join('a')) == '/a'
+
+ def test_join_root_to_root_with_no_abs(self, path1):
+ nroot = path1.join('/')
+ assert str(path1) == str(nroot)
+ assert path1 == nroot
+
+ def test_chmod_simple_int(self, path1):
+ mode = path1.stat().mode
+ path1.chmod(int(mode/2))
+ try:
+ assert path1.stat().mode != mode
+ finally:
+ path1.chmod(mode)
+ assert path1.stat().mode == mode
+
+ def test_chmod_rec_int(self, path1):
+ # XXX fragile test
+ def recfilter(x): return x.check(dotfile=0, link=0)
+ oldmodes = {}
+ for x in path1.visit(rec=recfilter):
+ oldmodes[x] = x.stat().mode
+ path1.chmod(int("772", 8), rec=recfilter)
+ try:
+ for x in path1.visit(rec=recfilter):
+ assert x.stat().mode & int("777", 8) == int("772", 8)
+ finally:
+ for x, y in oldmodes.items():
+ x.chmod(y)
+
+ def test_copy_archiving(self, tmpdir):
+ unicode_fn = u"something-\342\200\223.txt"
+ f = tmpdir.ensure("a", unicode_fn)
+ a = f.dirpath()
+ oldmode = f.stat().mode
+ newmode = oldmode ^ 1
+ f.chmod(newmode)
+ b = tmpdir.join("b")
+ a.copy(b, mode=True)
+ assert b.join(f.basename).stat().mode == newmode
+
+ def test_copy_stat_file(self, tmpdir):
+ src = tmpdir.ensure('src')
+ dst = tmpdir.join('dst')
+ # a small delay before the copy
+ time.sleep(ATIME_RESOLUTION)
+ src.copy(dst, stat=True)
+ oldstat = src.stat()
+ newstat = dst.stat()
+ assert oldstat.mode == newstat.mode
+ assert (dst.atime() - src.atime()) < ATIME_RESOLUTION
+ assert (dst.mtime() - src.mtime()) < ATIME_RESOLUTION
+
+ def test_copy_stat_dir(self, tmpdir):
+ test_files = ['a', 'b', 'c']
+ src = tmpdir.join('src')
+ for f in test_files:
+ src.join(f).write(f, ensure=True)
+ dst = tmpdir.join('dst')
+ # a small delay before the copy
+ time.sleep(ATIME_RESOLUTION)
+ src.copy(dst, stat=True)
+ for f in test_files:
+ oldstat = src.join(f).stat()
+ newstat = dst.join(f).stat()
+ assert (newstat.atime - oldstat.atime) < ATIME_RESOLUTION
+ assert (newstat.mtime - oldstat.mtime) < ATIME_RESOLUTION
+ assert oldstat.mode == newstat.mode
+
+ @failsonjython
+ def test_chown_identity(self, path1):
+ owner = path1.stat().owner
+ group = path1.stat().group
+ path1.chown(owner, group)
+
+ @failsonjython
+ def test_chown_dangling_link(self, path1):
+ owner = path1.stat().owner
+ group = path1.stat().group
+ x = path1.join('hello')
+ x.mksymlinkto('qlwkejqwlek')
+ try:
+ path1.chown(owner, group, rec=1)
+ finally:
+ x.remove(rec=0)
+
+ @failsonjython
+ def test_chown_identity_rec_mayfail(self, path1):
+ owner = path1.stat().owner
+ group = path1.stat().group
+ path1.chown(owner, group)
+
+
+class TestUnicodePy2Py3:
+ def test_join_ensure(self, tmpdir, monkeypatch):
+ if sys.version_info >= (3, 0) and "LANG" not in os.environ:
+ pytest.skip("cannot run test without locale")
+ x = py.path.local(tmpdir.strpath)
+ part = "hällo"
+ y = x.ensure(part)
+ assert x.join(part) == y
+
+ def test_listdir(self, tmpdir):
+ if sys.version_info >= (3, 0) and "LANG" not in os.environ:
+ pytest.skip("cannot run test without locale")
+ x = py.path.local(tmpdir.strpath)
+ part = "hällo"
+ y = x.ensure(part)
+ assert x.listdir(part)[0] == y
+
+ @pytest.mark.xfail(
+ reason="changing read/write might break existing usages")
+ def test_read_write(self, tmpdir):
+ x = tmpdir.join("hello")
+ part = py.builtin._totext("hällo", "utf8")
+ x.write(part)
+ assert x.read() == part
+ x.write(part.encode(sys.getdefaultencoding()))
+ assert x.read() == part.encode(sys.getdefaultencoding())
+
+
+class TestBinaryAndTextMethods:
+ def test_read_binwrite(self, tmpdir):
+ x = tmpdir.join("hello")
+ part = py.builtin._totext("hällo", "utf8")
+ part_utf8 = part.encode("utf8")
+ x.write_binary(part_utf8)
+ assert x.read_binary() == part_utf8
+ s = x.read_text(encoding="utf8")
+ assert s == part
+ assert py.builtin._istext(s)
+
+ def test_read_textwrite(self, tmpdir):
+ x = tmpdir.join("hello")
+ part = py.builtin._totext("hällo", "utf8")
+ part_utf8 = part.encode("utf8")
+ x.write_text(part, encoding="utf8")
+ assert x.read_binary() == part_utf8
+ assert x.read_text(encoding="utf8") == part
+
+ def test_default_encoding(self, tmpdir):
+ x = tmpdir.join("hello")
+ # Can't use UTF8 as the default encoding (ASCII) doesn't support it
+ part = py.builtin._totext("hello", "ascii")
+ x.write_text(part, "ascii")
+ s = x.read_text("ascii")
+ assert s == part
+ assert type(s) == type(part)
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/path/test_svnauth.py b/testing/web-platform/tests/tools/third_party/py/testing/path/test_svnauth.py
new file mode 100644
index 0000000000..654f033224
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/path/test_svnauth.py
@@ -0,0 +1,460 @@
+import py
+from py.path import SvnAuth
+import time
+import sys
+
+svnbin = py.path.local.sysfind('svn')
+
+
+def make_repo_auth(repo, userdata):
+ """ write config to repo
+
+ user information in userdata is used for auth
+ userdata has user names as keys, and a tuple (password, readwrite) as
+ values, where 'readwrite' is either 'r' or 'rw'
+ """
+ confdir = py.path.local(repo).join('conf')
+ confdir.join('svnserve.conf').write('''\
+[general]
+anon-access = none
+password-db = passwd
+authz-db = authz
+realm = TestRepo
+''')
+ authzdata = '[/]\n'
+ passwddata = '[users]\n'
+ for user in userdata:
+ authzdata += '%s = %s\n' % (user, userdata[user][1])
+ passwddata += '%s = %s\n' % (user, userdata[user][0])
+ confdir.join('authz').write(authzdata)
+ confdir.join('passwd').write(passwddata)
+
+def serve_bg(repopath):
+ pidfile = py.path.local(repopath).join('pid')
+ port = 10000
+ e = None
+ while port < 10010:
+ cmd = 'svnserve -d -T --listen-port=%d --pid-file=%s -r %s' % (
+ port, pidfile, repopath)
+ print(cmd)
+ try:
+ py.process.cmdexec(cmd)
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ else:
+ # XXX we assume here that the pid file gets written somewhere, I
+ # guess this should be relatively safe... (I hope, at least?)
+ counter = pid = 0
+ while counter < 10:
+ counter += 1
+ try:
+ pid = pidfile.read()
+ except py.error.ENOENT:
+ pass
+ if pid:
+ break
+ time.sleep(0.2)
+ return port, int(pid)
+ port += 1
+ raise IOError('could not start svnserve: %s' % (e,))
+
+class TestSvnAuth(object):
+ def test_basic(self):
+ auth = SvnAuth('foo', 'bar')
+ assert auth.username == 'foo'
+ assert auth.password == 'bar'
+ assert str(auth)
+
+ def test_makecmdoptions_uname_pw_makestr(self):
+ auth = SvnAuth('foo', 'bar')
+ assert auth.makecmdoptions() == '--username="foo" --password="bar"'
+
+ def test_makecmdoptions_quote_escape(self):
+ auth = SvnAuth('fo"o', '"ba\'r"')
+ assert auth.makecmdoptions() == '--username="fo\\"o" --password="\\"ba\'r\\""'
+
+ def test_makecmdoptions_no_cache_auth(self):
+ auth = SvnAuth('foo', 'bar', cache_auth=False)
+ assert auth.makecmdoptions() == ('--username="foo" --password="bar" '
+ '--no-auth-cache')
+
+ def test_makecmdoptions_no_interactive(self):
+ auth = SvnAuth('foo', 'bar', interactive=False)
+ assert auth.makecmdoptions() == ('--username="foo" --password="bar" '
+ '--non-interactive')
+
+ def test_makecmdoptions_no_interactive_no_cache_auth(self):
+ auth = SvnAuth('foo', 'bar', cache_auth=False,
+ interactive=False)
+ assert auth.makecmdoptions() == ('--username="foo" --password="bar" '
+ '--no-auth-cache --non-interactive')
+
+class svnwc_no_svn(py.path.svnwc):
+ def __new__(cls, *args, **kwargs):
+ self = super(svnwc_no_svn, cls).__new__(cls, *args, **kwargs)
+ self.commands = []
+ return self
+
+ def _svn(self, *args):
+ self.commands.append(args)
+
+class TestSvnWCAuth(object):
+ def setup_method(self, meth):
+ if not svnbin:
+ py.test.skip("svn binary required")
+ self.auth = SvnAuth('user', 'pass', cache_auth=False)
+
+ def test_checkout(self):
+ wc = svnwc_no_svn('foo', auth=self.auth)
+ wc.checkout('url')
+ assert wc.commands[0][-1] == ('--username="user" --password="pass" '
+ '--no-auth-cache')
+
+ def test_commit(self):
+ wc = svnwc_no_svn('foo', auth=self.auth)
+ wc.commit('msg')
+ assert wc.commands[0][-1] == ('--username="user" --password="pass" '
+ '--no-auth-cache')
+
+ def test_checkout_no_cache_auth(self):
+ wc = svnwc_no_svn('foo', auth=self.auth)
+ wc.checkout('url')
+ assert wc.commands[0][-1] == ('--username="user" --password="pass" '
+ '--no-auth-cache')
+
+ def test_checkout_auth_from_constructor(self):
+ wc = svnwc_no_svn('foo', auth=self.auth)
+ wc.checkout('url')
+ assert wc.commands[0][-1] == ('--username="user" --password="pass" '
+ '--no-auth-cache')
+
+class svnurl_no_svn(py.path.svnurl):
+ cmdexec_output = 'test'
+ popen_output = 'test'
+ def __new__(cls, *args, **kwargs):
+ self = super(svnurl_no_svn, cls).__new__(cls, *args, **kwargs)
+ self.commands = []
+ return self
+
+ def _cmdexec(self, cmd):
+ self.commands.append(cmd)
+ return self.cmdexec_output
+
+ def _popen(self, cmd):
+ self.commands.append(cmd)
+ return self.popen_output
+
+class TestSvnURLAuth(object):
+ def setup_method(self, meth):
+ self.auth = SvnAuth('foo', 'bar')
+
+ def test_init(self):
+ u = svnurl_no_svn('http://foo.bar/svn')
+ assert u.auth is None
+
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ assert u.auth is self.auth
+
+ def test_new(self):
+ u = svnurl_no_svn('http://foo.bar/svn/foo', auth=self.auth)
+ new = u.new(basename='bar')
+ assert new.auth is self.auth
+ assert new.url == 'http://foo.bar/svn/bar'
+
+ def test_join(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ new = u.join('foo')
+ assert new.auth is self.auth
+ assert new.url == 'http://foo.bar/svn/foo'
+
+ def test_listdir(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ u.cmdexec_output = '''\
+ 1717 johnny 1529 Nov 04 14:32 LICENSE.txt
+ 1716 johnny 5352 Nov 04 14:28 README.txt
+'''
+ paths = u.listdir()
+ assert paths[0].auth is self.auth
+ assert paths[1].auth is self.auth
+ assert paths[0].basename == 'LICENSE.txt'
+
+ def test_info(self):
+ u = svnurl_no_svn('http://foo.bar/svn/LICENSE.txt', auth=self.auth)
+ def dirpath(self):
+ return self
+ u.cmdexec_output = '''\
+ 1717 johnny 1529 Nov 04 14:32 LICENSE.txt
+ 1716 johnny 5352 Nov 04 14:28 README.txt
+'''
+ org_dp = u.__class__.dirpath
+ u.__class__.dirpath = dirpath
+ try:
+ info = u.info()
+ finally:
+ u.dirpath = org_dp
+ assert info.size == 1529
+
+ def test_open(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ foo = u.join('foo')
+ foo.check = lambda *args, **kwargs: True
+ ret = foo.open()
+ assert ret == 'test'
+ assert '--username="foo" --password="bar"' in foo.commands[0]
+
+ def test_dirpath(self):
+ u = svnurl_no_svn('http://foo.bar/svn/foo', auth=self.auth)
+ parent = u.dirpath()
+ assert parent.auth is self.auth
+
+ def test_mkdir(self):
+ u = svnurl_no_svn('http://foo.bar/svn/qweqwe', auth=self.auth)
+ assert not u.commands
+ u.mkdir(msg='created dir foo')
+ assert u.commands
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+ def test_copy(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ u2 = svnurl_no_svn('http://foo.bar/svn2')
+ u.copy(u2, 'copied dir')
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+ def test_rename(self):
+ u = svnurl_no_svn('http://foo.bar/svn/foo', auth=self.auth)
+ u.rename('http://foo.bar/svn/bar', 'moved foo to bar')
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+ def test_remove(self):
+ u = svnurl_no_svn('http://foo.bar/svn/foo', auth=self.auth)
+ u.remove(msg='removing foo')
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+ def test_export(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ target = py.path.local('/foo')
+ u.export(target)
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+ def test_log(self):
+ u = svnurl_no_svn('http://foo.bar/svn/foo', auth=self.auth)
+ u.popen_output = py.io.TextIO(py.builtin._totext('''\
+<?xml version="1.0"?>
+<log>
+<logentry revision="51381">
+<author>guido</author>
+<date>2008-02-11T12:12:18.476481Z</date>
+<msg>Creating branch to work on auth support for py.path.svn*.
+</msg>
+</logentry>
+</log>
+''', 'ascii'))
+ u.check = lambda *args, **kwargs: True
+ ret = u.log(10, 20, verbose=True)
+ assert '--username="foo" --password="bar"' in u.commands[0]
+ assert len(ret) == 1
+ assert int(ret[0].rev) == 51381
+ assert ret[0].author == 'guido'
+
+ def test_propget(self):
+ u = svnurl_no_svn('http://foo.bar/svn', auth=self.auth)
+ u.propget('foo')
+ assert '--username="foo" --password="bar"' in u.commands[0]
+
+def pytest_funcarg__setup(request):
+ return Setup(request)
+
+class Setup:
+ def __init__(self, request):
+ if not svnbin:
+ py.test.skip("svn binary required")
+ if not request.config.option.runslowtests:
+ py.test.skip('use --runslowtests to run these tests')
+
+ tmpdir = request.getfuncargvalue("tmpdir")
+ repodir = tmpdir.join("repo")
+ py.process.cmdexec('svnadmin create %s' % repodir)
+ if sys.platform == 'win32':
+ repodir = '/' + str(repodir).replace('\\', '/')
+ self.repo = py.path.svnurl("file://%s" % repodir)
+ if sys.platform == 'win32':
+ # remove trailing slash...
+ repodir = repodir[1:]
+ self.repopath = py.path.local(repodir)
+ self.temppath = tmpdir.mkdir("temppath")
+ self.auth = SvnAuth('johnny', 'foo', cache_auth=False,
+ interactive=False)
+ make_repo_auth(self.repopath, {'johnny': ('foo', 'rw')})
+ self.port, self.pid = serve_bg(self.repopath.dirpath())
+ # XXX caching is too global
+ py.path.svnurl._lsnorevcache._dict.clear()
+ request.addfinalizer(lambda: py.process.kill(self.pid))
+
+class TestSvnWCAuthFunctional:
+ def test_checkout_constructor_arg(self, setup):
+ wc = py.path.svnwc(setup.temppath, auth=setup.auth)
+ wc.checkout(
+ 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename))
+ assert wc.join('.svn').check()
+
+ def test_checkout_function_arg(self, setup):
+ wc = py.path.svnwc(setup.temppath, auth=setup.auth)
+ wc.checkout(
+ 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename))
+ assert wc.join('.svn').check()
+
+ def test_checkout_failing_non_interactive(self, setup):
+ auth = SvnAuth('johnny', 'bar', cache_auth=False,
+ interactive=False)
+ wc = py.path.svnwc(setup.temppath, auth)
+ py.test.raises(Exception,
+ ("wc.checkout('svn://localhost:%(port)s/%(repopath)s')" %
+ setup.__dict__))
+
+ def test_log(self, setup):
+ wc = py.path.svnwc(setup.temppath, setup.auth)
+ wc.checkout(
+ 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename))
+ foo = wc.ensure('foo.txt')
+ wc.commit('added foo.txt')
+ log = foo.log()
+ assert len(log) == 1
+ assert log[0].msg == 'added foo.txt'
+
+ def test_switch(self, setup):
+ import pytest
+ try:
+ import xdist
+ pytest.skip('#160: fails under xdist')
+ except ImportError:
+ pass
+ wc = py.path.svnwc(setup.temppath, auth=setup.auth)
+ svnurl = 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename)
+ wc.checkout(svnurl)
+ wc.ensure('foo', dir=True).ensure('foo.txt').write('foo')
+ wc.commit('added foo dir with foo.txt file')
+ wc.ensure('bar', dir=True)
+ wc.commit('added bar dir')
+ bar = wc.join('bar')
+ bar.switch(svnurl + '/foo')
+ assert bar.join('foo.txt')
+
+ def test_update(self, setup):
+ wc1 = py.path.svnwc(setup.temppath.ensure('wc1', dir=True),
+ auth=setup.auth)
+ wc2 = py.path.svnwc(setup.temppath.ensure('wc2', dir=True),
+ auth=setup.auth)
+ wc1.checkout(
+ 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename))
+ wc2.checkout(
+ 'svn://localhost:%s/%s' % (setup.port, setup.repopath.basename))
+ wc1.ensure('foo', dir=True)
+ wc1.commit('added foo dir')
+ wc2.update()
+ assert wc2.join('foo').check()
+
+ auth = SvnAuth('unknown', 'unknown', interactive=False)
+ wc2.auth = auth
+ py.test.raises(Exception, 'wc2.update()')
+
+ def test_lock_unlock_status(self, setup):
+ port = setup.port
+ wc = py.path.svnwc(setup.temppath, auth=setup.auth)
+ wc.checkout(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename,))
+ wc.ensure('foo', file=True)
+ wc.commit('added foo file')
+ foo = wc.join('foo')
+ foo.lock()
+ status = foo.status()
+ assert status.locked
+ foo.unlock()
+ status = foo.status()
+ assert not status.locked
+
+ auth = SvnAuth('unknown', 'unknown', interactive=False)
+ foo.auth = auth
+ py.test.raises(Exception, 'foo.lock()')
+ py.test.raises(Exception, 'foo.unlock()')
+
+ def test_diff(self, setup):
+ port = setup.port
+ wc = py.path.svnwc(setup.temppath, auth=setup.auth)
+ wc.checkout(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename,))
+ wc.ensure('foo', file=True)
+ wc.commit('added foo file')
+ wc.update()
+ rev = int(wc.status().rev)
+ foo = wc.join('foo')
+ foo.write('bar')
+ diff = foo.diff()
+ assert '\n+bar\n' in diff
+ foo.commit('added some content')
+ diff = foo.diff()
+ assert not diff
+ diff = foo.diff(rev=rev)
+ assert '\n+bar\n' in diff
+
+ auth = SvnAuth('unknown', 'unknown', interactive=False)
+ foo.auth = auth
+ py.test.raises(Exception, 'foo.diff(rev=rev)')
+
+class TestSvnURLAuthFunctional:
+ def test_listdir(self, setup):
+ port = setup.port
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=setup.auth)
+ u.ensure('foo')
+ paths = u.listdir()
+ assert len(paths) == 1
+ assert paths[0].auth is setup.auth
+
+ auth = SvnAuth('foo', 'bar', interactive=False)
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=auth)
+ py.test.raises(Exception, 'u.listdir()')
+
+ def test_copy(self, setup):
+ port = setup.port
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=setup.auth)
+ foo = u.mkdir('foo')
+ assert foo.check()
+ bar = u.join('bar')
+ foo.copy(bar)
+ assert bar.check()
+ assert bar.auth is setup.auth
+
+ auth = SvnAuth('foo', 'bar', interactive=False)
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=auth)
+ foo = u.join('foo')
+ bar = u.join('bar')
+ py.test.raises(Exception, 'foo.copy(bar)')
+
+ def test_write_read(self, setup):
+ port = setup.port
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=setup.auth)
+ foo = u.ensure('foo')
+ fp = foo.open()
+ try:
+ data = fp.read()
+ finally:
+ fp.close()
+ assert data == ''
+
+ auth = SvnAuth('foo', 'bar', interactive=False)
+ u = py.path.svnurl(
+ 'svn://localhost:%s/%s' % (port, setup.repopath.basename),
+ auth=auth)
+ foo = u.join('foo')
+ py.test.raises(Exception, 'foo.open()')
+
+ # XXX rinse, repeat... :|
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/path/test_svnurl.py b/testing/web-platform/tests/tools/third_party/py/testing/path/test_svnurl.py
new file mode 100644
index 0000000000..15fbea5047
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/path/test_svnurl.py
@@ -0,0 +1,95 @@
+import py
+from py._path.svnurl import InfoSvnCommand
+import datetime
+import time
+from svntestbase import CommonSvnTests
+
+def pytest_funcarg__path1(request):
+ repo, repourl, wc = request.getfuncargvalue("repowc1")
+ return py.path.svnurl(repourl)
+
+class TestSvnURLCommandPath(CommonSvnTests):
+ @py.test.mark.xfail
+ def test_load(self, path1):
+ super(TestSvnURLCommandPath, self).test_load(path1)
+
+ # the following two work on jython but not in local/svnwc
+ def test_listdir(self, path1):
+ super(TestSvnURLCommandPath, self).test_listdir(path1)
+ def test_visit_ignore(self, path1):
+ super(TestSvnURLCommandPath, self).test_visit_ignore(path1)
+
+ def test_svnurl_needs_arg(self, path1):
+ py.test.raises(TypeError, "py.path.svnurl()")
+
+ def test_svnurl_does_not_accept_None_either(self, path1):
+ py.test.raises(Exception, "py.path.svnurl(None)")
+
+ def test_svnurl_characters_simple(self, path1):
+ py.path.svnurl("svn+ssh://hello/world")
+
+ def test_svnurl_characters_at_user(self, path1):
+ py.path.svnurl("http://user@host.com/some/dir")
+
+ def test_svnurl_characters_at_path(self, path1):
+ py.test.raises(ValueError, 'py.path.svnurl("http://host.com/foo@bar")')
+
+ def test_svnurl_characters_colon_port(self, path1):
+ py.path.svnurl("http://host.com:8080/some/dir")
+
+ def test_svnurl_characters_tilde_end(self, path1):
+ py.path.svnurl("http://host.com/some/file~")
+
+ @py.test.mark.xfail("sys.platform == 'win32'")
+ def test_svnurl_characters_colon_path(self, path1):
+ # colons are allowed on win32, because they're part of the drive
+ # part of an absolute path... however, they shouldn't be allowed in
+ # other parts, I think
+ py.test.raises(ValueError, 'py.path.svnurl("http://host.com/foo:bar")')
+
+ def test_export(self, path1, tmpdir):
+ tmpdir = tmpdir.join("empty")
+ p = path1.export(tmpdir)
+ assert p == tmpdir # XXX should return None
+ n1 = [x.basename for x in tmpdir.listdir()]
+ n2 = [x.basename for x in path1.listdir()]
+ n1.sort()
+ n2.sort()
+ assert n1 == n2
+ assert not p.join('.svn').check()
+ rev = path1.mkdir("newdir")
+ tmpdir.remove()
+ assert not tmpdir.check()
+ path1.new(rev=1).export(tmpdir)
+ for p in tmpdir.listdir():
+ assert p.basename in n2
+
+class TestSvnInfoCommand:
+
+ def test_svn_1_2(self):
+ line = " 2256 hpk 165 Nov 24 17:55 __init__.py"
+ info = InfoSvnCommand(line)
+ now = datetime.datetime.now()
+ assert info.last_author == 'hpk'
+ assert info.created_rev == 2256
+ assert info.kind == 'file'
+ # we don't check for the year (2006), because that depends
+ # on the clock correctly being setup
+ assert time.gmtime(info.mtime)[1:6] == (11, 24, 17, 55, 0)
+ assert info.size == 165
+ assert info.time == info.mtime * 1000000
+
+ def test_svn_1_3(self):
+ line =" 4784 hpk 2 Jun 01 2004 __init__.py"
+ info = InfoSvnCommand(line)
+ assert info.last_author == 'hpk'
+ assert info.kind == 'file'
+
+ def test_svn_1_3_b(self):
+ line =" 74 autoadmi Oct 06 23:59 plonesolutions.com/"
+ info = InfoSvnCommand(line)
+ assert info.last_author == 'autoadmi'
+ assert info.kind == 'dir'
+
+def test_badchars():
+ py.test.raises(ValueError, "py.path.svnurl('http://host/tmp/@@@:')")
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/path/test_svnwc.py b/testing/web-platform/tests/tools/third_party/py/testing/path/test_svnwc.py
new file mode 100644
index 0000000000..c643d9983f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/path/test_svnwc.py
@@ -0,0 +1,557 @@
+import py
+import os, sys
+import pytest
+from py._path.svnwc import InfoSvnWCCommand, XMLWCStatus, parse_wcinfotime
+from py._path import svnwc as svncommon
+from svntestbase import CommonSvnTests
+
+
+pytestmark = pytest.mark.xfail(sys.platform.startswith('win'),
+ reason='#161 all tests in this file are failing on Windows',
+ run=False)
+
+
+def test_make_repo(path1, tmpdir):
+ repo = tmpdir.join("repo")
+ py.process.cmdexec('svnadmin create %s' % repo)
+ if sys.platform == 'win32':
+ repo = '/' + str(repo).replace('\\', '/')
+ repo = py.path.svnurl("file://%s" % repo)
+ wc = py.path.svnwc(tmpdir.join("wc"))
+ wc.checkout(repo)
+ assert wc.rev == 0
+ assert len(wc.listdir()) == 0
+ p = wc.join("a_file")
+ p.write("test file")
+ p.add()
+ rev = wc.commit("some test")
+ assert p.info().rev == 1
+ assert rev == 1
+ rev = wc.commit()
+ assert rev is None
+
+def pytest_funcarg__path1(request):
+ repo, repourl, wc = request.getfuncargvalue("repowc1")
+ return wc
+
+class TestWCSvnCommandPath(CommonSvnTests):
+ def test_status_attributes_simple(self, path1):
+ def assert_nochange(p):
+ s = p.status()
+ assert not s.modified
+ assert not s.prop_modified
+ assert not s.added
+ assert not s.deleted
+ assert not s.replaced
+
+ dpath = path1.join('sampledir')
+ assert_nochange(path1.join('sampledir'))
+ assert_nochange(path1.join('samplefile'))
+
+ def test_status_added(self, path1):
+ nf = path1.join('newfile')
+ nf.write('hello')
+ nf.add()
+ try:
+ s = nf.status()
+ assert s.added
+ assert not s.modified
+ assert not s.prop_modified
+ assert not s.replaced
+ finally:
+ nf.revert()
+
+ def test_status_change(self, path1):
+ nf = path1.join('samplefile')
+ try:
+ nf.write(nf.read() + 'change')
+ s = nf.status()
+ assert not s.added
+ assert s.modified
+ assert not s.prop_modified
+ assert not s.replaced
+ finally:
+ nf.revert()
+
+ def test_status_added_ondirectory(self, path1):
+ sampledir = path1.join('sampledir')
+ try:
+ t2 = sampledir.mkdir('t2')
+ t1 = t2.join('t1')
+ t1.write('test')
+ t1.add()
+ s = sampledir.status(rec=1)
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ assert t1.basename in [item.basename for item in s.added]
+ assert t2.basename in [item.basename for item in s.added]
+ finally:
+ t2.revert(rec=1)
+ t2.localpath.remove(rec=1)
+
+ def test_status_unknown(self, path1):
+ t1 = path1.join('un1')
+ try:
+ t1.write('test')
+ s = path1.status()
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ assert t1.basename in [item.basename for item in s.unknown]
+ finally:
+ t1.localpath.remove()
+
+ def test_status_unchanged(self, path1):
+ r = path1
+ s = path1.status(rec=1)
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ assert r.join('samplefile').basename in [item.basename
+ for item in s.unchanged]
+ assert r.join('sampledir').basename in [item.basename
+ for item in s.unchanged]
+ assert r.join('sampledir/otherfile').basename in [item.basename
+ for item in s.unchanged]
+
+ def test_status_update(self, path1):
+ # not a mark because the global "pytestmark" will end up overwriting a mark here
+ pytest.xfail("svn-1.7 has buggy 'status --xml' output")
+ r = path1
+ try:
+ r.update(rev=1)
+ s = r.status(updates=1, rec=1)
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ import pprint
+ pprint.pprint(s.allpath())
+ assert r.join('anotherfile').basename in [item.basename for
+ item in s.update_available]
+ #assert len(s.update_available) == 1
+ finally:
+ r.update()
+
+ def test_status_replaced(self, path1):
+ p = path1.join("samplefile")
+ p.remove()
+ p.ensure(dir=0)
+ try:
+ s = path1.status()
+ assert p.basename in [item.basename for item in s.replaced]
+ finally:
+ path1.revert(rec=1)
+
+ def test_status_ignored(self, path1):
+ try:
+ d = path1.join('sampledir')
+ p = py.path.local(d).join('ignoredfile')
+ p.ensure(file=True)
+ s = d.status()
+ assert [x.basename for x in s.unknown] == ['ignoredfile']
+ assert [x.basename for x in s.ignored] == []
+ d.propset('svn:ignore', 'ignoredfile')
+ s = d.status()
+ assert [x.basename for x in s.unknown] == []
+ assert [x.basename for x in s.ignored] == ['ignoredfile']
+ finally:
+ path1.revert(rec=1)
+
+ def test_status_conflict(self, path1, tmpdir):
+ wc = path1
+ wccopy = py.path.svnwc(tmpdir.join("conflict_copy"))
+ wccopy.checkout(wc.url)
+ p = wc.ensure('conflictsamplefile', file=1)
+ p.write('foo')
+ wc.commit('added conflictsamplefile')
+ wccopy.update()
+ assert wccopy.join('conflictsamplefile').check()
+ p.write('bar')
+ wc.commit('wrote some data')
+ wccopy.join('conflictsamplefile').write('baz')
+ wccopy.update(interactive=False)
+ s = wccopy.status()
+ assert [x.basename for x in s.conflict] == ['conflictsamplefile']
+
+ def test_status_external(self, path1, repowc2):
+ otherrepo, otherrepourl, otherwc = repowc2
+ d = path1.ensure('sampledir', dir=1)
+ try:
+ d.update()
+ d.propset('svn:externals', 'otherwc %s' % (otherwc.url,))
+ d.update()
+ s = d.status()
+ assert [x.basename for x in s.external] == ['otherwc']
+ assert 'otherwc' not in [x.basename for x in s.unchanged]
+ s = d.status(rec=1)
+ assert [x.basename for x in s.external] == ['otherwc']
+ assert 'otherwc' in [x.basename for x in s.unchanged]
+ finally:
+ path1.revert(rec=1)
+
+ def test_status_deleted(self, path1):
+ d = path1.ensure('sampledir', dir=1)
+ d.remove()
+ d.ensure(dir=1)
+ path1.commit()
+ d.ensure('deletefile', dir=0)
+ d.commit()
+ s = d.status()
+ assert 'deletefile' in [x.basename for x in s.unchanged]
+ assert not s.deleted
+ p = d.join('deletefile')
+ p.remove()
+ s = d.status()
+ assert 'deletefile' not in s.unchanged
+ assert [x.basename for x in s.deleted] == ['deletefile']
+
+ def test_status_noauthor(self, path1):
+ # testing for XML without author - this used to raise an exception
+ xml = '''\
+ <entry path="/tmp/pytest-23/wc">
+ <wc-status item="normal" props="none" revision="0">
+ <commit revision="0">
+ <date>2008-08-19T16:50:53.400198Z</date>
+ </commit>
+ </wc-status>
+ </entry>
+ '''
+ XMLWCStatus.fromstring(xml, path1)
+
+ def test_status_wrong_xml(self, path1):
+ # testing for XML without author - this used to raise an exception
+ xml = '<entry path="/home/jean/zope/venv/projectdb/parts/development-products/DataGridField">\n<wc-status item="incomplete" props="none" revision="784">\n</wc-status>\n</entry>'
+ st = XMLWCStatus.fromstring(xml, path1)
+ assert len(st.incomplete) == 1
+
+ def test_diff(self, path1):
+ p = path1 / 'anotherfile'
+ out = p.diff(rev=2)
+ assert out.find('hello') != -1
+
+ def test_blame(self, path1):
+ p = path1.join('samplepickle')
+ lines = p.blame()
+ assert sum([l[0] for l in lines]) == len(lines)
+ for l1, l2 in zip(p.readlines(), [l[2] for l in lines]):
+ assert l1 == l2
+ assert [l[1] for l in lines] == ['hpk'] * len(lines)
+ p = path1.join('samplefile')
+ lines = p.blame()
+ assert sum([l[0] for l in lines]) == len(lines)
+ for l1, l2 in zip(p.readlines(), [l[2] for l in lines]):
+ assert l1 == l2
+ assert [l[1] for l in lines] == ['hpk'] * len(lines)
+
+ def test_join_abs(self, path1):
+ s = str(path1.localpath)
+ n = path1.join(s, abs=1)
+ assert path1 == n
+
+ def test_join_abs2(self, path1):
+ assert path1.join('samplefile', abs=1) == path1.join('samplefile')
+
+ def test_str_gives_localpath(self, path1):
+ assert str(path1) == str(path1.localpath)
+
+ def test_versioned(self, path1):
+ assert path1.check(versioned=1)
+ # TODO: Why does my copy of svn think .svn is versioned?
+ #assert path1.join('.svn').check(versioned=0)
+ assert path1.join('samplefile').check(versioned=1)
+ assert not path1.join('notexisting').check(versioned=1)
+ notexisting = path1.join('hello').localpath
+ try:
+ notexisting.write("")
+ assert path1.join('hello').check(versioned=0)
+ finally:
+ notexisting.remove()
+
+ def test_listdir_versioned(self, path1):
+ assert path1.check(versioned=1)
+ p = path1.localpath.ensure("not_a_versioned_file")
+ l = [x.localpath
+ for x in path1.listdir(lambda x: x.check(versioned=True))]
+ assert p not in l
+
+ def test_nonversioned_remove(self, path1):
+ assert path1.check(versioned=1)
+ somefile = path1.join('nonversioned/somefile')
+ nonwc = py.path.local(somefile)
+ nonwc.ensure()
+ assert somefile.check()
+ assert not somefile.check(versioned=True)
+ somefile.remove() # this used to fail because it tried to 'svn rm'
+
+ def test_properties(self, path1):
+ try:
+ path1.propset('gaga', 'this')
+ assert path1.propget('gaga') == 'this'
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ assert path1.basename in [item.basename for item in
+ path1.status().prop_modified]
+ assert 'gaga' in path1.proplist()
+ assert path1.proplist()['gaga'] == 'this'
+
+ finally:
+ path1.propdel('gaga')
+
+ def test_proplist_recursive(self, path1):
+ s = path1.join('samplefile')
+ s.propset('gugu', 'that')
+ try:
+ p = path1.proplist(rec=1)
+ # Comparing just the file names, because paths are unpredictable
+ # on Windows. (long vs. 8.3 paths)
+ assert (path1 / 'samplefile').basename in [item.basename
+ for item in p]
+ finally:
+ s.propdel('gugu')
+
+ def test_long_properties(self, path1):
+ value = """
+ vadm:posix : root root 0100755
+ Properties on 'chroot/dns/var/bind/db.net.xots':
+ """
+ try:
+ path1.propset('gaga', value)
+ backvalue = path1.propget('gaga')
+ assert backvalue == value
+ #assert len(backvalue.split('\n')) == 1
+ finally:
+ path1.propdel('gaga')
+
+
+ def test_ensure(self, path1):
+ newpath = path1.ensure('a', 'b', 'c')
+ try:
+ assert newpath.check(exists=1, versioned=1)
+ newpath.write("hello")
+ newpath.ensure()
+ assert newpath.read() == "hello"
+ finally:
+ path1.join('a').remove(force=1)
+
+ def test_not_versioned(self, path1):
+ p = path1.localpath.mkdir('whatever')
+ f = path1.localpath.ensure('testcreatedfile')
+ try:
+ assert path1.join('whatever').check(versioned=0)
+ assert path1.join('testcreatedfile').check(versioned=0)
+ assert not path1.join('testcreatedfile').check(versioned=1)
+ finally:
+ p.remove(rec=1)
+ f.remove()
+
+ def test_lock_unlock(self, path1):
+ root = path1
+ somefile = root.join('somefile')
+ somefile.ensure(file=True)
+ # not yet added to repo
+ py.test.raises(Exception, 'somefile.lock()')
+ somefile.write('foo')
+ somefile.commit('test')
+ assert somefile.check(versioned=True)
+ somefile.lock()
+ try:
+ locked = root.status().locked
+ assert len(locked) == 1
+ assert locked[0].basename == somefile.basename
+ assert locked[0].dirpath().basename == somefile.dirpath().basename
+ #assert somefile.locked()
+ py.test.raises(Exception, 'somefile.lock()')
+ finally:
+ somefile.unlock()
+ #assert not somefile.locked()
+ locked = root.status().locked
+ assert locked == []
+ py.test.raises(Exception, 'somefile,unlock()')
+ somefile.remove()
+
+ def test_commit_nonrecursive(self, path1):
+ somedir = path1.join('sampledir')
+ somedir.mkdir("subsubdir")
+ somedir.propset('foo', 'bar')
+ status = somedir.status()
+ assert len(status.prop_modified) == 1
+ assert len(status.added) == 1
+
+ somedir.commit('non-recursive commit', rec=0)
+ status = somedir.status()
+ assert len(status.prop_modified) == 0
+ assert len(status.added) == 1
+
+ somedir.commit('recursive commit')
+ status = somedir.status()
+ assert len(status.prop_modified) == 0
+ assert len(status.added) == 0
+
+ def test_commit_return_value(self, path1):
+ testfile = path1.join('test.txt').ensure(file=True)
+ testfile.write('test')
+ rev = path1.commit('testing')
+ assert type(rev) == int
+
+ anotherfile = path1.join('another.txt').ensure(file=True)
+ anotherfile.write('test')
+ rev2 = path1.commit('testing more')
+ assert type(rev2) == int
+ assert rev2 == rev + 1
+
+ #def test_log(self, path1):
+ # l = path1.log()
+ # assert len(l) == 3 # might need to be upped if more tests are added
+
+class XTestWCSvnCommandPathSpecial:
+
+ rooturl = 'http://codespeak.net/svn/py.path/trunk/dist/py.path/test/data'
+ #def test_update_none_rev(self, path1):
+ # path = tmpdir.join('checkouttest')
+ # wcpath = newpath(xsvnwc=str(path), url=path1url)
+ # try:
+ # wcpath.checkout(rev=2100)
+ # wcpath.update()
+ # assert wcpath.info().rev > 2100
+ # finally:
+ # wcpath.localpath.remove(rec=1)
+
+def test_parse_wcinfotime():
+ assert (parse_wcinfotime('2006-05-30 20:45:26 +0200 (Tue, 30 May 2006)') ==
+ 1149021926)
+ assert (parse_wcinfotime('2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)') ==
+ 1067287394)
+
+class TestInfoSvnWCCommand:
+
+ def test_svn_1_2(self, path1):
+ output = """
+ Path: test_svnwc.py
+ Name: test_svnwc.py
+ URL: http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py
+ Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
+ Revision: 28137
+ Node Kind: file
+ Schedule: normal
+ Last Changed Author: jan
+ Last Changed Rev: 27939
+ Last Changed Date: 2006-05-30 20:45:26 +0200 (Tue, 30 May 2006)
+ Text Last Updated: 2006-06-01 00:42:53 +0200 (Thu, 01 Jun 2006)
+ Properties Last Updated: 2006-05-23 11:54:59 +0200 (Tue, 23 May 2006)
+ Checksum: 357e44880e5d80157cc5fbc3ce9822e3
+ """
+ path = py.path.local(__file__).dirpath().chdir()
+ try:
+ info = InfoSvnWCCommand(output)
+ finally:
+ path.chdir()
+ assert info.last_author == 'jan'
+ assert info.kind == 'file'
+ assert info.mtime == 1149021926.0
+ assert info.url == 'http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py'
+ assert info.time == 1149021926000000.0
+ assert info.rev == 28137
+
+
+ def test_svn_1_3(self, path1):
+ output = """
+ Path: test_svnwc.py
+ Name: test_svnwc.py
+ URL: http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py
+ Repository Root: http://codespeak.net/svn
+ Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
+ Revision: 28124
+ Node Kind: file
+ Schedule: normal
+ Last Changed Author: jan
+ Last Changed Rev: 27939
+ Last Changed Date: 2006-05-30 20:45:26 +0200 (Tue, 30 May 2006)
+ Text Last Updated: 2006-06-02 23:46:11 +0200 (Fri, 02 Jun 2006)
+ Properties Last Updated: 2006-06-02 23:45:28 +0200 (Fri, 02 Jun 2006)
+ Checksum: 357e44880e5d80157cc5fbc3ce9822e3
+ """
+ path = py.path.local(__file__).dirpath().chdir()
+ try:
+ info = InfoSvnWCCommand(output)
+ finally:
+ path.chdir()
+ assert info.last_author == 'jan'
+ assert info.kind == 'file'
+ assert info.mtime == 1149021926.0
+ assert info.url == 'http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py'
+ assert info.rev == 28124
+ assert info.time == 1149021926000000.0
+
+
+def test_characters_at():
+ py.test.raises(ValueError, "py.path.svnwc('/tmp/@@@:')")
+
+def test_characters_tilde():
+ py.path.svnwc('/tmp/test~')
+
+
+class TestRepo:
+ def test_trailing_slash_is_stripped(self, path1):
+ # XXX we need to test more normalizing properties
+ url = path1.join("/")
+ assert path1 == url
+
+ #def test_different_revs_compare_unequal(self, path1):
+ # newpath = path1.new(rev=1199)
+ # assert newpath != path1
+
+ def test_exists_svn_root(self, path1):
+ assert path1.check()
+
+ #def test_not_exists_rev(self, path1):
+ # url = path1.__class__(path1url, rev=500)
+ # assert url.check(exists=0)
+
+ #def test_nonexisting_listdir_rev(self, path1):
+ # url = path1.__class__(path1url, rev=500)
+ # raises(py.error.ENOENT, url.listdir)
+
+ #def test_newrev(self, path1):
+ # url = path1.new(rev=None)
+ # assert url.rev == None
+ # assert url.strpath == path1.strpath
+ # url = path1.new(rev=10)
+ # assert url.rev == 10
+
+ #def test_info_rev(self, path1):
+ # url = path1.__class__(path1url, rev=1155)
+ # url = url.join("samplefile")
+ # res = url.info()
+ # assert res.size > len("samplefile") and res.created_rev == 1155
+
+ # the following tests are easier if we have a path class
+ def test_repocache_simple(self, path1):
+ repocache = svncommon.RepoCache()
+ repocache.put(path1.strpath, 42)
+ url, rev = repocache.get(path1.join('test').strpath)
+ assert rev == 42
+ assert url == path1.strpath
+
+ def test_repocache_notimeout(self, path1):
+ repocache = svncommon.RepoCache()
+ repocache.timeout = 0
+ repocache.put(path1.strpath, path1.rev)
+ url, rev = repocache.get(path1.strpath)
+ assert rev == -1
+ assert url == path1.strpath
+
+ def test_repocache_outdated(self, path1):
+ repocache = svncommon.RepoCache()
+ repocache.put(path1.strpath, 42, timestamp=0)
+ url, rev = repocache.get(path1.join('test').strpath)
+ assert rev == -1
+ assert url == path1.strpath
+
+ def _test_getreporev(self):
+ """ this test runs so slow it's usually disabled """
+ old = svncommon.repositories.repos
+ try:
+ _repocache.clear()
+ root = path1.new(rev=-1)
+ url, rev = cache.repocache.get(root.strpath)
+ assert rev>=0
+ assert url == svnrepourl
+ finally:
+ repositories.repos = old
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/process/__init__.py b/testing/web-platform/tests/tools/third_party/py/testing/process/__init__.py
new file mode 100644
index 0000000000..792d600548
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/process/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/process/test_cmdexec.py b/testing/web-platform/tests/tools/third_party/py/testing/process/test_cmdexec.py
new file mode 100644
index 0000000000..98463d906d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/process/test_cmdexec.py
@@ -0,0 +1,41 @@
+import py
+from py.process import cmdexec
+
+def exvalue():
+ import sys
+ return sys.exc_info()[1]
+
+
+class Test_exec_cmd:
+ def test_simple(self):
+ out = cmdexec('echo hallo')
+ assert out.strip() == 'hallo'
+ assert py.builtin._istext(out)
+
+ def test_simple_newline(self):
+ import sys
+ out = cmdexec(r"""%s -c "print ('hello')" """ % sys.executable)
+ assert out == 'hello\n'
+ assert py.builtin._istext(out)
+
+ def test_simple_error(self):
+ py.test.raises(cmdexec.Error, cmdexec, 'exit 1')
+
+ def test_simple_error_exact_status(self):
+ try:
+ cmdexec('exit 1')
+ except cmdexec.Error:
+ e = exvalue()
+ assert e.status == 1
+ assert py.builtin._istext(e.out)
+ assert py.builtin._istext(e.err)
+
+ def test_err(self):
+ try:
+ cmdexec('echoqweqwe123 hallo')
+ raise AssertionError("command succeeded but shouldn't")
+ except cmdexec.Error:
+ e = exvalue()
+ assert hasattr(e, 'err')
+ assert hasattr(e, 'out')
+ assert e.err or e.out
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/process/test_forkedfunc.py b/testing/web-platform/tests/tools/third_party/py/testing/process/test_forkedfunc.py
new file mode 100644
index 0000000000..ae0d9ab7e6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/process/test_forkedfunc.py
@@ -0,0 +1,173 @@
+import pytest
+import py, sys, os
+
+pytestmark = py.test.mark.skipif("not hasattr(os, 'fork')")
+
+
+def test_waitfinish_removes_tempdir():
+ ff = py.process.ForkedFunc(boxf1)
+ assert ff.tempdir.check()
+ ff.waitfinish()
+ assert not ff.tempdir.check()
+
+def test_tempdir_gets_gc_collected(monkeypatch):
+ monkeypatch.setattr(os, 'fork', lambda: os.getpid())
+ ff = py.process.ForkedFunc(boxf1)
+ assert ff.tempdir.check()
+ ff.__del__()
+ assert not ff.tempdir.check()
+
+def test_basic_forkedfunc():
+ result = py.process.ForkedFunc(boxf1).waitfinish()
+ assert result.out == "some out\n"
+ assert result.err == "some err\n"
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 1
+
+def test_exitstatus():
+ def func():
+ os._exit(4)
+ result = py.process.ForkedFunc(func).waitfinish()
+ assert result.exitstatus == 4
+ assert result.signal == 0
+ assert not result.out
+ assert not result.err
+
+def test_execption_in_func():
+ def fun():
+ raise ValueError(42)
+ ff = py.process.ForkedFunc(fun)
+ result = ff.waitfinish()
+ assert result.exitstatus == ff.EXITSTATUS_EXCEPTION
+ assert result.err.find("ValueError: 42") != -1
+ assert result.signal == 0
+ assert not result.retval
+
+def test_forkedfunc_on_fds():
+ result = py.process.ForkedFunc(boxf2).waitfinish()
+ assert result.out == "someout"
+ assert result.err == "someerr"
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 2
+
+def test_forkedfunc_on_fds_output():
+ result = py.process.ForkedFunc(boxf3).waitfinish()
+ assert result.signal == 11
+ assert result.out == "s"
+
+
+def test_forkedfunc_on_stdout():
+ def boxf3():
+ import sys
+ sys.stdout.write("hello\n")
+ os.kill(os.getpid(), 11)
+ result = py.process.ForkedFunc(boxf3).waitfinish()
+ assert result.signal == 11
+ assert result.out == "hello\n"
+
+def test_forkedfunc_signal():
+ result = py.process.ForkedFunc(boxseg).waitfinish()
+ assert result.retval is None
+ assert result.signal == 11
+
+def test_forkedfunc_huge_data():
+ result = py.process.ForkedFunc(boxhuge).waitfinish()
+ assert result.out
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 3
+
+def test_box_seq():
+ # we run many boxes with huge data, just one after another
+ for i in range(50):
+ result = py.process.ForkedFunc(boxhuge).waitfinish()
+ assert result.out
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 3
+
+def test_box_in_a_box():
+ def boxfun():
+ result = py.process.ForkedFunc(boxf2).waitfinish()
+ print (result.out)
+ sys.stderr.write(result.err + "\n")
+ return result.retval
+
+ result = py.process.ForkedFunc(boxfun).waitfinish()
+ assert result.out == "someout\n"
+ assert result.err == "someerr\n"
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 2
+
+def test_kill_func_forked():
+ class A:
+ pass
+ info = A()
+ import time
+
+ def box_fun():
+ time.sleep(10) # we don't want to last forever here
+
+ ff = py.process.ForkedFunc(box_fun)
+ os.kill(ff.pid, 15)
+ result = ff.waitfinish()
+ assert result.signal == 15
+
+
+def test_hooks(monkeypatch):
+ def _boxed():
+ return 1
+
+ def _on_start():
+ sys.stdout.write("some out\n")
+ sys.stdout.flush()
+
+ def _on_exit():
+ sys.stderr.write("some err\n")
+ sys.stderr.flush()
+
+ result = py.process.ForkedFunc(_boxed, child_on_start=_on_start,
+ child_on_exit=_on_exit).waitfinish()
+ assert result.out == "some out\n"
+ assert result.err == "some err\n"
+ assert result.exitstatus == 0
+ assert result.signal == 0
+ assert result.retval == 1
+
+
+# ======================================================================
+# examples
+# ======================================================================
+#
+
+def boxf1():
+ sys.stdout.write("some out\n")
+ sys.stderr.write("some err\n")
+ return 1
+
+def boxf2():
+ os.write(1, "someout".encode('ascii'))
+ os.write(2, "someerr".encode('ascii'))
+ return 2
+
+def boxf3():
+ os.write(1, "s".encode('ascii'))
+ os.kill(os.getpid(), 11)
+
+def boxseg():
+ os.kill(os.getpid(), 11)
+
+def boxhuge():
+ s = " ".encode('ascii')
+ os.write(1, s * 10000)
+ os.write(2, s * 10000)
+ os.write(1, s * 10000)
+
+ os.write(1, s * 10000)
+ os.write(2, s * 10000)
+ os.write(2, s * 10000)
+ os.write(1, s * 10000)
+ return 3
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/process/test_killproc.py b/testing/web-platform/tests/tools/third_party/py/testing/process/test_killproc.py
new file mode 100644
index 0000000000..b0d6e2f515
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/process/test_killproc.py
@@ -0,0 +1,18 @@
+import pytest
+import sys
+import py
+
+
+@pytest.mark.skipif("sys.platform.startswith('java')")
+def test_kill(tmpdir):
+ subprocess = pytest.importorskip("subprocess")
+ t = tmpdir.join("t.py")
+ t.write("import time ; time.sleep(100)")
+ proc = subprocess.Popen([sys.executable, str(t)])
+ assert proc.poll() is None # no return value yet
+ py.process.kill(proc.pid)
+ ret = proc.wait()
+ if sys.platform == "win32" and ret == 0:
+ pytest.skip("XXX on win32, subprocess.Popen().wait() on a killed "
+ "process does not yield return value != 0")
+ assert ret != 0
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/root/__init__.py b/testing/web-platform/tests/tools/third_party/py/testing/root/__init__.py
new file mode 100644
index 0000000000..792d600548
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/root/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/root/test_builtin.py b/testing/web-platform/tests/tools/third_party/py/testing/root/test_builtin.py
new file mode 100644
index 0000000000..287c60d552
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/root/test_builtin.py
@@ -0,0 +1,156 @@
+import sys
+import types
+import py
+from py.builtin import set, frozenset
+
+def test_enumerate():
+ l = [0,1,2]
+ for i,x in enumerate(l):
+ assert i == x
+
+def test_any():
+ assert not py.builtin.any([0,False, None])
+ assert py.builtin.any([0,False, None,1])
+
+def test_all():
+ assert not py.builtin.all([True, 1, False])
+ assert py.builtin.all([True, 1, object])
+
+def test_BaseException():
+ assert issubclass(IndexError, py.builtin.BaseException)
+ assert issubclass(Exception, py.builtin.BaseException)
+ assert issubclass(KeyboardInterrupt, py.builtin.BaseException)
+
+ class MyRandomClass(object):
+ pass
+ assert not issubclass(MyRandomClass, py.builtin.BaseException)
+
+ assert py.builtin.BaseException.__module__ in ('exceptions', 'builtins')
+ assert Exception.__name__ == 'Exception'
+
+
+def test_GeneratorExit():
+ assert py.builtin.GeneratorExit.__module__ in ('exceptions', 'builtins')
+ assert issubclass(py.builtin.GeneratorExit, py.builtin.BaseException)
+
+def test_reversed():
+ reversed = py.builtin.reversed
+ r = reversed("hello")
+ assert iter(r) is r
+ s = "".join(list(r))
+ assert s == "olleh"
+ assert list(reversed(list(reversed("hello")))) == ['h','e','l','l','o']
+ py.test.raises(TypeError, reversed, reversed("hello"))
+
+def test_simple():
+ s = set([1, 2, 3, 4])
+ assert s == set([3, 4, 2, 1])
+ s1 = s.union(set([5, 6]))
+ assert 5 in s1
+ assert 1 in s1
+
+def test_frozenset():
+ s = set([frozenset([0, 1]), frozenset([1, 0])])
+ assert len(s) == 1
+
+
+def test_print_simple():
+ from py.builtin import print_
+ py.test.raises(TypeError, "print_(hello=3)")
+ f = py.io.TextIO()
+ print_("hello", "world", file=f)
+ s = f.getvalue()
+ assert s == "hello world\n"
+
+ f = py.io.TextIO()
+ print_("hello", end="", file=f)
+ s = f.getvalue()
+ assert s == "hello"
+
+ f = py.io.TextIO()
+ print_("xyz", "abc", sep="", end="", file=f)
+ s = f.getvalue()
+ assert s == "xyzabc"
+
+ class X:
+ def __repr__(self): return "rep"
+ f = py.io.TextIO()
+ print_(X(), file=f)
+ assert f.getvalue() == "rep\n"
+
+def test_execfile(tmpdir):
+ test_file = tmpdir.join("test.py")
+ test_file.write("x = y\ndef f(): pass")
+ ns = {"y" : 42}
+ py.builtin.execfile(str(test_file), ns)
+ assert ns["x"] == 42
+ assert py.code.getrawcode(ns["f"]).co_filename == str(test_file)
+ class A:
+ y = 3
+ x = 4
+ py.builtin.execfile(str(test_file))
+ assert A.x == 3
+
+def test_getfuncdict():
+ def f():
+ raise NotImplementedError
+ f.x = 4
+ assert py.builtin._getfuncdict(f)["x"] == 4
+ assert py.builtin._getfuncdict(2) is None
+
+def test_callable():
+ class A: pass
+ assert py.builtin.callable(test_callable)
+ assert py.builtin.callable(A)
+ assert py.builtin.callable(list)
+ assert py.builtin.callable(id)
+ assert not py.builtin.callable(4)
+ assert not py.builtin.callable("hi")
+
+def test_totext():
+ py.builtin._totext("hello", "UTF-8")
+
+def test_bytes_text():
+ if sys.version_info[0] < 3:
+ assert py.builtin.text == unicode
+ assert py.builtin.bytes == str
+ else:
+ assert py.builtin.text == str
+ assert py.builtin.bytes == bytes
+
+def test_totext_badutf8():
+ # this was in printouts within the pytest testsuite
+ # totext would fail
+ if sys.version_info >= (3,):
+ errors = 'surrogateescape'
+ else: # old python has crappy error handlers
+ errors = 'replace'
+ py.builtin._totext("\xa6", "UTF-8", errors)
+
+def test_reraise():
+ from py.builtin import _reraise
+ try:
+ raise Exception()
+ except Exception:
+ cls, val, tb = sys.exc_info()
+ excinfo = py.test.raises(Exception, "_reraise(cls, val, tb)")
+
+def test_exec():
+ l = []
+ py.builtin.exec_("l.append(1)")
+ assert l == [1]
+ d = {}
+ py.builtin.exec_("x=4", d)
+ assert d['x'] == 4
+
+def test_tryimport():
+ py.test.raises(ImportError, py.builtin._tryimport, 'xqwe123')
+ x = py.builtin._tryimport('asldkajsdl', 'py')
+ assert x == py
+ x = py.builtin._tryimport('asldkajsdl', 'py.path')
+ assert x == py.path
+
+def test_getcode():
+ code = py.builtin._getcode(test_getcode)
+ assert isinstance(code, types.CodeType)
+ assert py.builtin._getcode(4) is None
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/root/test_error.py b/testing/web-platform/tests/tools/third_party/py/testing/root/test_error.py
new file mode 100644
index 0000000000..7bfbef3bd4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/root/test_error.py
@@ -0,0 +1,76 @@
+
+import py
+
+import errno
+import sys
+import subprocess
+
+
+def test_error_classes():
+ for name in errno.errorcode.values():
+ x = getattr(py.error, name)
+ assert issubclass(x, py.error.Error)
+ assert issubclass(x, EnvironmentError)
+
+
+def test_has_name():
+ assert py.error.__name__ == 'py.error'
+
+
+def test_picklability_issue1():
+ import pickle
+ e1 = py.error.ENOENT()
+ s = pickle.dumps(e1)
+ e2 = pickle.loads(s)
+ assert isinstance(e2, py.error.ENOENT)
+
+
+def test_unknown_error():
+ num = 3999
+ cls = py.error._geterrnoclass(num)
+ assert cls.__name__ == 'UnknownErrno%d' % (num,)
+ assert issubclass(cls, py.error.Error)
+ assert issubclass(cls, EnvironmentError)
+ cls2 = py.error._geterrnoclass(num)
+ assert cls is cls2
+
+
+def test_error_conversion_enotdir(testdir):
+ p = testdir.makepyfile("")
+ excinfo = py.test.raises(py.error.Error, py.error.checked_call, p.listdir)
+ assert isinstance(excinfo.value, EnvironmentError)
+ assert isinstance(excinfo.value, py.error.Error)
+ assert "ENOTDIR" in repr(excinfo.value)
+
+
+def test_checked_call_supports_kwargs(tmpdir):
+ import tempfile
+ py.error.checked_call(tempfile.mkdtemp, dir=str(tmpdir))
+
+
+def test_error_importable():
+ """Regression test for #179"""
+ subprocess.check_call(
+ [sys.executable, '-c', 'from py.error import ENOENT'])
+
+
+try:
+ import unittest
+ unittest.TestCase.assertWarns
+except (ImportError, AttributeError):
+ pass # required interface not available
+else:
+ import sys
+ import warnings
+
+ class Case(unittest.TestCase):
+ def test_assert_warns(self):
+ # Clear everything "py.*" from sys.modules and re-import py
+ # as a fresh start
+ for mod in tuple(sys.modules.keys()):
+ if mod and (mod == 'py' or mod.startswith('py.')):
+ del sys.modules[mod]
+ __import__('py')
+
+ with self.assertWarns(UserWarning):
+ warnings.warn('this should work')
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/root/test_py_imports.py b/testing/web-platform/tests/tools/third_party/py/testing/root/test_py_imports.py
new file mode 100644
index 0000000000..31fe6ead81
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/root/test_py_imports.py
@@ -0,0 +1,71 @@
+import py
+import sys
+
+
+@py.test.mark.parametrize('name', [x for x in dir(py) if x[0] != '_'])
+def test_dir(name):
+ obj = getattr(py, name)
+ if hasattr(obj, '__map__'): # isinstance(obj, Module):
+ keys = dir(obj)
+ assert len(keys) > 0
+ print (obj.__map__)
+ for name in list(obj.__map__):
+ assert hasattr(obj, name), (obj, name)
+
+
+def test_virtual_module_identity():
+ from py import path as path1
+ from py import path as path2
+ assert path1 is path2
+ from py.path import local as local1
+ from py.path import local as local2
+ assert local1 is local2
+
+
+def test_importall():
+ base = py._pydir
+ nodirs = [
+ ]
+ if sys.version_info >= (3, 0):
+ nodirs.append(base.join('_code', '_assertionold.py'))
+ else:
+ nodirs.append(base.join('_code', '_assertionnew.py'))
+
+ def recurse(p):
+ return p.check(dotfile=0) and p.basename != "attic"
+
+ for p in base.visit('*.py', recurse):
+ if p.basename == '__init__.py':
+ continue
+ relpath = p.new(ext='').relto(base)
+ if base.sep in relpath: # not py/*.py itself
+ for x in nodirs:
+ if p == x or p.relto(x):
+ break
+ else:
+ relpath = relpath.replace(base.sep, '.')
+ modpath = 'py.%s' % relpath
+ try:
+ check_import(modpath)
+ except py.test.skip.Exception:
+ pass
+
+
+def check_import(modpath):
+ py.builtin.print_("checking import", modpath)
+ assert __import__(modpath)
+
+
+def test_star_import():
+ exec("from py import *")
+
+
+def test_all_resolves():
+ seen = py.builtin.set([py])
+ lastlength = None
+ while len(seen) != lastlength:
+ lastlength = len(seen)
+ for item in py.builtin.frozenset(seen):
+ for value in item.__dict__.values():
+ if isinstance(value, type(py.test)):
+ seen.add(value)
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/root/test_std.py b/testing/web-platform/tests/tools/third_party/py/testing/root/test_std.py
new file mode 100644
index 0000000000..143556a055
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/root/test_std.py
@@ -0,0 +1,13 @@
+
+import py
+
+def test_os():
+ import os
+ assert py.std.os is os
+
+def test_import_error_converts_to_attributeerror():
+ py.test.raises(AttributeError, "py.std.xyzalskdj")
+
+def test_std_gets_it():
+ for x in py.std.sys.modules:
+ assert x in py.std.__dict__
diff --git a/testing/web-platform/tests/tools/third_party/py/testing/root/test_xmlgen.py b/testing/web-platform/tests/tools/third_party/py/testing/root/test_xmlgen.py
new file mode 100644
index 0000000000..fc0e82665f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/testing/root/test_xmlgen.py
@@ -0,0 +1,146 @@
+
+import py
+from py._xmlgen import unicode, html, raw
+import sys
+
+class ns(py.xml.Namespace):
+ pass
+
+def test_escape():
+ uvalue = py.builtin._totext('\xc4\x85\xc4\x87\n\xe2\x82\xac\n', 'utf-8')
+ class A:
+ def __unicode__(self):
+ return uvalue
+ def __str__(self):
+ x = self.__unicode__()
+ if sys.version_info[0] < 3:
+ return x.encode('utf-8')
+ return x
+ y = py.xml.escape(uvalue)
+ assert y == uvalue
+ x = py.xml.escape(A())
+ assert x == uvalue
+ if sys.version_info[0] < 3:
+ assert isinstance(x, unicode)
+ assert isinstance(y, unicode)
+ y = py.xml.escape(uvalue.encode('utf-8'))
+ assert y == uvalue
+
+
+def test_tag_with_text():
+ x = ns.hello("world")
+ u = unicode(x)
+ assert u == "<hello>world</hello>"
+
+def test_class_identity():
+ assert ns.hello is ns.hello
+
+def test_tag_with_text_and_attributes():
+ x = ns.some(name="hello", value="world")
+ assert x.attr.name == 'hello'
+ assert x.attr.value == 'world'
+ u = unicode(x)
+ assert u == '<some name="hello" value="world"/>'
+
+def test_tag_with_subclassed_attr_simple():
+ class my(ns.hello):
+ class Attr(ns.hello.Attr):
+ hello="world"
+ x = my()
+ assert x.attr.hello == 'world'
+ assert unicode(x) == '<my hello="world"/>'
+
+def test_tag_with_raw_attr():
+ x = html.object(data=raw('&'))
+ assert unicode(x) == '<object data="&"></object>'
+
+def test_tag_nested():
+ x = ns.hello(ns.world())
+ unicode(x) # triggers parentifying
+ assert x[0].parent is x
+ u = unicode(x)
+ assert u == '<hello><world/></hello>'
+
+def test_list_nested():
+ x = ns.hello([ns.world()]) #pass in a list here
+ u = unicode(x)
+ assert u == '<hello><world/></hello>'
+
+def test_tag_xmlname():
+ class my(ns.hello):
+ xmlname = 'world'
+ u = unicode(my())
+ assert u == '<world/>'
+
+def test_tag_with_text_entity():
+ x = ns.hello('world & rest')
+ u = unicode(x)
+ assert u == "<hello>world &amp; rest</hello>"
+
+def test_tag_with_text_and_attributes_entity():
+ x = ns.some(name="hello & world")
+ assert x.attr.name == "hello & world"
+ u = unicode(x)
+ assert u == '<some name="hello &amp; world"/>'
+
+def test_raw():
+ x = ns.some(py.xml.raw("<p>literal</p>"))
+ u = unicode(x)
+ assert u == "<some><p>literal</p></some>"
+
+
+def test_html_name_stickyness():
+ class my(html.p):
+ pass
+ x = my("hello")
+ assert unicode(x) == '<p>hello</p>'
+
+def test_stylenames():
+ class my:
+ class body(html.body):
+ style = html.Style(font_size = "12pt")
+ u = unicode(my.body())
+ assert u == '<body style="font-size: 12pt"></body>'
+
+def test_class_None():
+ t = html.body(class_=None)
+ u = unicode(t)
+ assert u == '<body></body>'
+
+def test_alternating_style():
+ alternating = (
+ html.Style(background="white"),
+ html.Style(background="grey"),
+ )
+ class my(html):
+ class li(html.li):
+ def style(self):
+ i = self.parent.index(self)
+ return alternating[i%2]
+ style = property(style)
+
+ x = my.ul(
+ my.li("hello"),
+ my.li("world"),
+ my.li("42"))
+ u = unicode(x)
+ assert u == ('<ul><li style="background: white">hello</li>'
+ '<li style="background: grey">world</li>'
+ '<li style="background: white">42</li>'
+ '</ul>')
+
+def test_singleton():
+ h = html.head(html.link(href="foo"))
+ assert unicode(h) == '<head><link href="foo"/></head>'
+
+ h = html.head(html.script(src="foo"))
+ assert unicode(h) == '<head><script src="foo"></script></head>'
+
+def test_inline():
+ h = html.div(html.span('foo'), html.span('bar'))
+ assert (h.unicode(indent=2) ==
+ '<div><span>foo</span><span>bar</span></div>')
+
+def test_object_tags():
+ o = html.object(html.object())
+ assert o.unicode(indent=0) == '<object><object></object></object>'
diff --git a/testing/web-platform/tests/tools/third_party/py/tox.ini b/testing/web-platform/tests/tools/third_party/py/tox.ini
new file mode 100644
index 0000000000..f3203507fd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/py/tox.ini
@@ -0,0 +1,44 @@
+[tox]
+# Skip py37-pytest29 as such a combination does not work (#192)
+envlist=py{27,35,36}-pytest{29,30,31},py37-pytest{30,31}
+
+[testenv]
+commands=
+ pip install -U . # hande the install order fallout since pytest depends on pip
+ py.test --confcutdir=. --junitxml={envlogdir}/junit-{envname}.xml []
+deps=
+ attrs
+ pytest29: pytest~=2.9.0
+ pytest30: pytest~=3.0.0
+ pytest31: pytest~=3.1.0
+
+[testenv:py27-xdist]
+basepython=python2.7
+deps=
+ pytest~=2.9.0
+ pytest-xdist<=1.16.0
+commands=
+ pip install -U .. # hande the install order fallout since pytest depends on pip
+ py.test -n3 --confcutdir=.. --runslowtests \
+ --junitxml={envlogdir}/junit-{envname}.xml []
+
+[testenv:jython]
+changedir=testing
+commands=
+ {envpython} -m pip install -U .. # hande the install order fallout since pytest depends on pip
+ {envpython} -m pytest --confcutdir=.. --junitxml={envlogdir}/junit-{envname}0.xml {posargs:io_ code}
+
+[pytest]
+rsyncdirs = conftest.py py doc testing
+addopts = -ra
+testpaths = testing
+
+[coverage:run]
+branch = 1
+source = .
+parallel = 1
+[coverage:report]
+include = py/*,testing/*
+exclude_lines =
+ #\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(cover|COVER)
+ ^\s*raise NotImplementedError\b
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/LICENSE b/testing/web-platform/tests/tools/third_party/pytest-asyncio/LICENSE
new file mode 100644
index 0000000000..e06d208186
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/LICENSE
@@ -0,0 +1,202 @@
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/PKG-INFO b/testing/web-platform/tests/tools/third_party/pytest-asyncio/PKG-INFO
new file mode 100644
index 0000000000..bb7611bc1a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/PKG-INFO
@@ -0,0 +1,302 @@
+Metadata-Version: 2.1
+Name: pytest-asyncio
+Version: 0.14.0
+Summary: Pytest support for asyncio.
+Home-page: https://github.com/pytest-dev/pytest-asyncio
+Author: Tin Tvrtković
+Author-email: tinchester@gmail.com
+License: Apache 2.0
+Description: pytest-asyncio: pytest support for asyncio
+ ==========================================
+
+ .. image:: https://img.shields.io/pypi/v/pytest-asyncio.svg
+ :target: https://pypi.python.org/pypi/pytest-asyncio
+ .. image:: https://travis-ci.org/pytest-dev/pytest-asyncio.svg?branch=master
+ :target: https://travis-ci.org/pytest-dev/pytest-asyncio
+ .. image:: https://coveralls.io/repos/pytest-dev/pytest-asyncio/badge.svg
+ :target: https://coveralls.io/r/pytest-dev/pytest-asyncio
+ .. image:: https://img.shields.io/pypi/pyversions/pytest-asyncio.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio
+ :alt: Supported Python versions
+
+ pytest-asyncio is an Apache2 licensed library, written in Python, for testing
+ asyncio code with pytest.
+
+ asyncio code is usually written in the form of coroutines, which makes it
+ slightly more difficult to test using normal testing tools. pytest-asyncio
+ provides useful fixtures and markers to make testing easier.
+
+ .. code-block:: python
+
+ @pytest.mark.asyncio
+ async def test_some_asyncio_code():
+ res = await library.do_something()
+ assert b'expected result' == res
+
+ pytest-asyncio has been strongly influenced by pytest-tornado_.
+
+ .. _pytest-tornado: https://github.com/eugeniy/pytest-tornado
+
+ Features
+ --------
+
+ - fixtures for creating and injecting versions of the asyncio event loop
+ - fixtures for injecting unused tcp ports
+ - pytest markers for treating tests as asyncio coroutines
+ - easy testing with non-default event loops
+ - support for `async def` fixtures and async generator fixtures
+
+ Installation
+ ------------
+
+ To install pytest-asyncio, simply:
+
+ .. code-block:: bash
+
+ $ pip install pytest-asyncio
+
+ This is enough for pytest to pick up pytest-asyncio.
+
+ Fixtures
+ --------
+
+ ``event_loop``
+ ~~~~~~~~~~~~~~
+ Creates and injects a new instance of the default asyncio event loop. By
+ default, the loop will be closed at the end of the test (i.e. the default
+ fixture scope is ``function``).
+
+ Note that just using the ``event_loop`` fixture won't make your test function
+ a coroutine. You'll need to interact with the event loop directly, using methods
+ like ``event_loop.run_until_complete``. See the ``pytest.mark.asyncio`` marker
+ for treating test functions like coroutines.
+
+ Simply using this fixture will not set the generated event loop as the
+ default asyncio event loop, or change the asyncio event loop policy in any way.
+ Use ``pytest.mark.asyncio`` for this purpose.
+
+ .. code-block:: python
+
+ def test_http_client(event_loop):
+ url = 'http://httpbin.org/get'
+ resp = event_loop.run_until_complete(http_client(url))
+ assert b'HTTP/1.1 200 OK' in resp
+
+ This fixture can be easily overridden in any of the standard pytest locations
+ (e.g. directly in the test file, or in ``conftest.py``) to use a non-default
+ event loop. This will take effect even if you're using the
+ ``pytest.mark.asyncio`` marker and not the ``event_loop`` fixture directly.
+
+ .. code-block:: python
+
+ @pytest.fixture
+ def event_loop():
+ loop = MyCustomLoop()
+ yield loop
+ loop.close()
+
+ If the ``pytest.mark.asyncio`` marker is applied, a pytest hook will
+ ensure the produced loop is set as the default global loop.
+ Fixtures depending on the ``event_loop`` fixture can expect the policy to be properly modified when they run.
+
+ ``unused_tcp_port``
+ ~~~~~~~~~~~~~~~~~~~
+ Finds and yields a single unused TCP port on the localhost interface. Useful for
+ binding temporary test servers.
+
+ ``unused_tcp_port_factory``
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ A callable which returns a different unused TCP port each invocation. Useful
+ when several unused TCP ports are required in a test.
+
+ .. code-block:: python
+
+ def a_test(unused_tcp_port_factory):
+ port1, port2 = unused_tcp_port_factory(), unused_tcp_port_factory()
+ ...
+
+ Async fixtures
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ Asynchronous fixtures are defined just like ordinary pytest fixtures, except they should be coroutines or asynchronous generators.
+
+ .. code-block:: python3
+
+ @pytest.fixture
+ async def async_gen_fixture():
+ await asyncio.sleep(0.1)
+ yield 'a value'
+
+ @pytest.fixture(scope='module')
+ async def async_fixture():
+ return await asyncio.sleep(0.1)
+
+ All scopes are supported, but if you use a non-function scope you will need
+ to redefine the ``event_loop`` fixture to have the same or broader scope.
+ Async fixtures need the event loop, and so must have the same or narrower scope
+ than the ``event_loop`` fixture.
+
+ If you want to do this with Python 3.5, the ``yield`` statement must be replaced with ``await yield_()`` and the coroutine
+ function must be decorated with ``@async_generator``, like so:
+
+ .. code-block:: python3
+
+ from async_generator import yield_, async_generator
+
+ @pytest.fixture
+ @async_generator
+ async def async_gen_fixture():
+ await asyncio.sleep(0.1)
+ await yield_('a value')
+
+
+ Markers
+ -------
+
+ ``pytest.mark.asyncio``
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ Mark your test coroutine with this marker and pytest will execute it as an
+ asyncio task using the event loop provided by the ``event_loop`` fixture. See
+ the introductory section for an example.
+
+ The event loop used can be overriden by overriding the ``event_loop`` fixture
+ (see above).
+
+ In order to make your test code a little more concise, the pytest |pytestmark|_
+ feature can be used to mark entire modules or classes with this marker.
+ Only test coroutines will be affected (by default, coroutines prefixed by
+ ``test_``), so, for example, fixtures are safe to define.
+
+ .. code-block:: python
+
+ import asyncio
+ import pytest
+
+ # All test coroutines will be treated as marked.
+ pytestmark = pytest.mark.asyncio
+
+ async def test_example(event_loop):
+ """No marker!"""
+ await asyncio.sleep(0, loop=event_loop)
+
+ .. |pytestmark| replace:: ``pytestmark``
+ .. _pytestmark: http://doc.pytest.org/en/latest/example/markers.html#marking-whole-classes-or-modules
+
+ Changelog
+ ---------
+ 0.13.0 (2020-06-24)
+ ~~~~~~~~~~~~~~~~~~~
+ - Fix `#162 <https://github.com/pytest-dev/pytest-asyncio/issues/162>`_, and ``event_loop`` fixture behavior now is coherent on all scopes.
+ `#164 <https://github.com/pytest-dev/pytest-asyncio/pull/164>`_
+
+ 0.12.0 (2020-05-04)
+ ~~~~~~~~~~~~~~~~~~~
+ - Run the event loop fixture as soon as possible. This helps with fixtures that have an implicit dependency on the event loop.
+ `#156 <https://github.com/pytest-dev/pytest-asyncio/pull/156>`_
+
+ 0.11.0 (2020-04-20)
+ ~~~~~~~~~~~~~~~~~~~
+ - Test on 3.8, drop 3.3 and 3.4. Stick to 0.10 for these versions.
+ `#152 <https://github.com/pytest-dev/pytest-asyncio/pull/152>`_
+ - Use the new Pytest 5.4.0 Function API. We therefore depend on pytest >= 5.4.0.
+ `#142 <https://github.com/pytest-dev/pytest-asyncio/pull/142>`_
+ - Better ``pytest.skip`` support.
+ `#126 <https://github.com/pytest-dev/pytest-asyncio/pull/126>`_
+
+ 0.10.0 (2019-01-08)
+ ~~~~~~~~~~~~~~~~~~~~
+ - ``pytest-asyncio`` integrates with `Hypothesis <https://hypothesis.readthedocs.io>`_
+ to support ``@given`` on async test functions using ``asyncio``.
+ `#102 <https://github.com/pytest-dev/pytest-asyncio/pull/102>`_
+ - Pytest 4.1 support.
+ `#105 <https://github.com/pytest-dev/pytest-asyncio/pull/105>`_
+
+ 0.9.0 (2018-07-28)
+ ~~~~~~~~~~~~~~~~~~
+ - Python 3.7 support.
+ - Remove ``event_loop_process_pool`` fixture and
+ ``pytest.mark.asyncio_process_pool`` marker (see
+ https://bugs.python.org/issue34075 for deprecation and removal details)
+
+ 0.8.0 (2017-09-23)
+ ~~~~~~~~~~~~~~~~~~
+ - Improve integration with other packages (like aiohttp) with more careful event loop handling.
+ `#64 <https://github.com/pytest-dev/pytest-asyncio/pull/64>`_
+
+ 0.7.0 (2017-09-08)
+ ~~~~~~~~~~~~~~~~~~
+ - Python versions pre-3.6 can use the async_generator library for async fixtures.
+ `#62 <https://github.com/pytest-dev/pytest-asyncio/pull/62>`
+
+
+ 0.6.0 (2017-05-28)
+ ~~~~~~~~~~~~~~~~~~
+ - Support for Python versions pre-3.5 has been dropped.
+ - ``pytestmark`` now works on both module and class level.
+ - The ``forbid_global_loop`` parameter has been removed.
+ - Support for async and async gen fixtures has been added.
+ `#45 <https://github.com/pytest-dev/pytest-asyncio/pull/45>`_
+ - The deprecation warning regarding ``asyncio.async()`` has been fixed.
+ `#51 <https://github.com/pytest-dev/pytest-asyncio/pull/51>`_
+
+ 0.5.0 (2016-09-07)
+ ~~~~~~~~~~~~~~~~~~
+ - Introduced a changelog.
+ `#31 <https://github.com/pytest-dev/pytest-asyncio/issues/31>`_
+ - The ``event_loop`` fixture is again responsible for closing itself.
+ This makes the fixture slightly harder to correctly override, but enables
+ other fixtures to depend on it correctly.
+ `#30 <https://github.com/pytest-dev/pytest-asyncio/issues/30>`_
+ - Deal with the event loop policy by wrapping a special pytest hook,
+ ``pytest_fixture_setup``. This allows setting the policy before fixtures
+ dependent on the ``event_loop`` fixture run, thus allowing them to take
+ advantage of the ``forbid_global_loop`` parameter. As a consequence of this,
+ we now depend on pytest 3.0.
+ `#29 <https://github.com/pytest-dev/pytest-asyncio/issues/29>`_
+
+
+ 0.4.1 (2016-06-01)
+ ~~~~~~~~~~~~~~~~~~
+ - Fix a bug preventing the propagation of exceptions from the plugin.
+ `#25 <https://github.com/pytest-dev/pytest-asyncio/issues/25>`_
+
+ 0.4.0 (2016-05-30)
+ ~~~~~~~~~~~~~~~~~~
+ - Make ``event_loop`` fixtures simpler to override by closing them in the
+ plugin, instead of directly in the fixture.
+ `#21 <https://github.com/pytest-dev/pytest-asyncio/pull/21>`_
+ - Introduce the ``forbid_global_loop`` parameter.
+ `#21 <https://github.com/pytest-dev/pytest-asyncio/pull/21>`_
+
+ 0.3.0 (2015-12-19)
+ ~~~~~~~~~~~~~~~~~~
+ - Support for Python 3.5 ``async``/``await`` syntax.
+ `#17 <https://github.com/pytest-dev/pytest-asyncio/pull/17>`_
+
+ 0.2.0 (2015-08-01)
+ ~~~~~~~~~~~~~~~~~~
+ - ``unused_tcp_port_factory`` fixture.
+ `#10 <https://github.com/pytest-dev/pytest-asyncio/issues/10>`_
+
+
+ 0.1.1 (2015-04-23)
+ ~~~~~~~~~~~~~~~~~~
+ Initial release.
+
+
+ Contributing
+ ------------
+ Contributions are very welcome. Tests can be run with ``tox``, please ensure
+ the coverage at least stays the same before you submit a pull request.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Topic :: Software Development :: Testing
+Classifier: Framework :: Pytest
+Requires-Python: >= 3.5
+Provides-Extra: testing
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/README.rst b/testing/web-platform/tests/tools/third_party/pytest-asyncio/README.rst
new file mode 100644
index 0000000000..6ea6014cce
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/README.rst
@@ -0,0 +1,281 @@
+pytest-asyncio: pytest support for asyncio
+==========================================
+
+.. image:: https://img.shields.io/pypi/v/pytest-asyncio.svg
+ :target: https://pypi.python.org/pypi/pytest-asyncio
+.. image:: https://travis-ci.org/pytest-dev/pytest-asyncio.svg?branch=master
+ :target: https://travis-ci.org/pytest-dev/pytest-asyncio
+.. image:: https://coveralls.io/repos/pytest-dev/pytest-asyncio/badge.svg
+ :target: https://coveralls.io/r/pytest-dev/pytest-asyncio
+.. image:: https://img.shields.io/pypi/pyversions/pytest-asyncio.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio
+ :alt: Supported Python versions
+
+pytest-asyncio is an Apache2 licensed library, written in Python, for testing
+asyncio code with pytest.
+
+asyncio code is usually written in the form of coroutines, which makes it
+slightly more difficult to test using normal testing tools. pytest-asyncio
+provides useful fixtures and markers to make testing easier.
+
+.. code-block:: python
+
+ @pytest.mark.asyncio
+ async def test_some_asyncio_code():
+ res = await library.do_something()
+ assert b'expected result' == res
+
+pytest-asyncio has been strongly influenced by pytest-tornado_.
+
+.. _pytest-tornado: https://github.com/eugeniy/pytest-tornado
+
+Features
+--------
+
+- fixtures for creating and injecting versions of the asyncio event loop
+- fixtures for injecting unused tcp ports
+- pytest markers for treating tests as asyncio coroutines
+- easy testing with non-default event loops
+- support for `async def` fixtures and async generator fixtures
+
+Installation
+------------
+
+To install pytest-asyncio, simply:
+
+.. code-block:: bash
+
+ $ pip install pytest-asyncio
+
+This is enough for pytest to pick up pytest-asyncio.
+
+Fixtures
+--------
+
+``event_loop``
+~~~~~~~~~~~~~~
+Creates and injects a new instance of the default asyncio event loop. By
+default, the loop will be closed at the end of the test (i.e. the default
+fixture scope is ``function``).
+
+Note that just using the ``event_loop`` fixture won't make your test function
+a coroutine. You'll need to interact with the event loop directly, using methods
+like ``event_loop.run_until_complete``. See the ``pytest.mark.asyncio`` marker
+for treating test functions like coroutines.
+
+Simply using this fixture will not set the generated event loop as the
+default asyncio event loop, or change the asyncio event loop policy in any way.
+Use ``pytest.mark.asyncio`` for this purpose.
+
+.. code-block:: python
+
+ def test_http_client(event_loop):
+ url = 'http://httpbin.org/get'
+ resp = event_loop.run_until_complete(http_client(url))
+ assert b'HTTP/1.1 200 OK' in resp
+
+This fixture can be easily overridden in any of the standard pytest locations
+(e.g. directly in the test file, or in ``conftest.py``) to use a non-default
+event loop. This will take effect even if you're using the
+``pytest.mark.asyncio`` marker and not the ``event_loop`` fixture directly.
+
+.. code-block:: python
+
+ @pytest.fixture
+ def event_loop():
+ loop = MyCustomLoop()
+ yield loop
+ loop.close()
+
+If the ``pytest.mark.asyncio`` marker is applied, a pytest hook will
+ensure the produced loop is set as the default global loop.
+Fixtures depending on the ``event_loop`` fixture can expect the policy to be properly modified when they run.
+
+``unused_tcp_port``
+~~~~~~~~~~~~~~~~~~~
+Finds and yields a single unused TCP port on the localhost interface. Useful for
+binding temporary test servers.
+
+``unused_tcp_port_factory``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+A callable which returns a different unused TCP port each invocation. Useful
+when several unused TCP ports are required in a test.
+
+.. code-block:: python
+
+ def a_test(unused_tcp_port_factory):
+ port1, port2 = unused_tcp_port_factory(), unused_tcp_port_factory()
+ ...
+
+Async fixtures
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Asynchronous fixtures are defined just like ordinary pytest fixtures, except they should be coroutines or asynchronous generators.
+
+.. code-block:: python3
+
+ @pytest.fixture
+ async def async_gen_fixture():
+ await asyncio.sleep(0.1)
+ yield 'a value'
+
+ @pytest.fixture(scope='module')
+ async def async_fixture():
+ return await asyncio.sleep(0.1)
+
+All scopes are supported, but if you use a non-function scope you will need
+to redefine the ``event_loop`` fixture to have the same or broader scope.
+Async fixtures need the event loop, and so must have the same or narrower scope
+than the ``event_loop`` fixture.
+
+If you want to do this with Python 3.5, the ``yield`` statement must be replaced with ``await yield_()`` and the coroutine
+function must be decorated with ``@async_generator``, like so:
+
+.. code-block:: python3
+
+ from async_generator import yield_, async_generator
+
+ @pytest.fixture
+ @async_generator
+ async def async_gen_fixture():
+ await asyncio.sleep(0.1)
+ await yield_('a value')
+
+
+Markers
+-------
+
+``pytest.mark.asyncio``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Mark your test coroutine with this marker and pytest will execute it as an
+asyncio task using the event loop provided by the ``event_loop`` fixture. See
+the introductory section for an example.
+
+The event loop used can be overriden by overriding the ``event_loop`` fixture
+(see above).
+
+In order to make your test code a little more concise, the pytest |pytestmark|_
+feature can be used to mark entire modules or classes with this marker.
+Only test coroutines will be affected (by default, coroutines prefixed by
+``test_``), so, for example, fixtures are safe to define.
+
+.. code-block:: python
+
+ import asyncio
+ import pytest
+
+ # All test coroutines will be treated as marked.
+ pytestmark = pytest.mark.asyncio
+
+ async def test_example(event_loop):
+ """No marker!"""
+ await asyncio.sleep(0, loop=event_loop)
+
+.. |pytestmark| replace:: ``pytestmark``
+.. _pytestmark: http://doc.pytest.org/en/latest/example/markers.html#marking-whole-classes-or-modules
+
+Changelog
+---------
+0.13.0 (2020-06-24)
+~~~~~~~~~~~~~~~~~~~
+- Fix `#162 <https://github.com/pytest-dev/pytest-asyncio/issues/162>`_, and ``event_loop`` fixture behavior now is coherent on all scopes.
+ `#164 <https://github.com/pytest-dev/pytest-asyncio/pull/164>`_
+
+0.12.0 (2020-05-04)
+~~~~~~~~~~~~~~~~~~~
+- Run the event loop fixture as soon as possible. This helps with fixtures that have an implicit dependency on the event loop.
+ `#156 <https://github.com/pytest-dev/pytest-asyncio/pull/156>`_
+
+0.11.0 (2020-04-20)
+~~~~~~~~~~~~~~~~~~~
+- Test on 3.8, drop 3.3 and 3.4. Stick to 0.10 for these versions.
+ `#152 <https://github.com/pytest-dev/pytest-asyncio/pull/152>`_
+- Use the new Pytest 5.4.0 Function API. We therefore depend on pytest >= 5.4.0.
+ `#142 <https://github.com/pytest-dev/pytest-asyncio/pull/142>`_
+- Better ``pytest.skip`` support.
+ `#126 <https://github.com/pytest-dev/pytest-asyncio/pull/126>`_
+
+0.10.0 (2019-01-08)
+~~~~~~~~~~~~~~~~~~~~
+- ``pytest-asyncio`` integrates with `Hypothesis <https://hypothesis.readthedocs.io>`_
+ to support ``@given`` on async test functions using ``asyncio``.
+ `#102 <https://github.com/pytest-dev/pytest-asyncio/pull/102>`_
+- Pytest 4.1 support.
+ `#105 <https://github.com/pytest-dev/pytest-asyncio/pull/105>`_
+
+0.9.0 (2018-07-28)
+~~~~~~~~~~~~~~~~~~
+- Python 3.7 support.
+- Remove ``event_loop_process_pool`` fixture and
+ ``pytest.mark.asyncio_process_pool`` marker (see
+ https://bugs.python.org/issue34075 for deprecation and removal details)
+
+0.8.0 (2017-09-23)
+~~~~~~~~~~~~~~~~~~
+- Improve integration with other packages (like aiohttp) with more careful event loop handling.
+ `#64 <https://github.com/pytest-dev/pytest-asyncio/pull/64>`_
+
+0.7.0 (2017-09-08)
+~~~~~~~~~~~~~~~~~~
+- Python versions pre-3.6 can use the async_generator library for async fixtures.
+ `#62 <https://github.com/pytest-dev/pytest-asyncio/pull/62>`
+
+
+0.6.0 (2017-05-28)
+~~~~~~~~~~~~~~~~~~
+- Support for Python versions pre-3.5 has been dropped.
+- ``pytestmark`` now works on both module and class level.
+- The ``forbid_global_loop`` parameter has been removed.
+- Support for async and async gen fixtures has been added.
+ `#45 <https://github.com/pytest-dev/pytest-asyncio/pull/45>`_
+- The deprecation warning regarding ``asyncio.async()`` has been fixed.
+ `#51 <https://github.com/pytest-dev/pytest-asyncio/pull/51>`_
+
+0.5.0 (2016-09-07)
+~~~~~~~~~~~~~~~~~~
+- Introduced a changelog.
+ `#31 <https://github.com/pytest-dev/pytest-asyncio/issues/31>`_
+- The ``event_loop`` fixture is again responsible for closing itself.
+ This makes the fixture slightly harder to correctly override, but enables
+ other fixtures to depend on it correctly.
+ `#30 <https://github.com/pytest-dev/pytest-asyncio/issues/30>`_
+- Deal with the event loop policy by wrapping a special pytest hook,
+ ``pytest_fixture_setup``. This allows setting the policy before fixtures
+ dependent on the ``event_loop`` fixture run, thus allowing them to take
+ advantage of the ``forbid_global_loop`` parameter. As a consequence of this,
+ we now depend on pytest 3.0.
+ `#29 <https://github.com/pytest-dev/pytest-asyncio/issues/29>`_
+
+
+0.4.1 (2016-06-01)
+~~~~~~~~~~~~~~~~~~
+- Fix a bug preventing the propagation of exceptions from the plugin.
+ `#25 <https://github.com/pytest-dev/pytest-asyncio/issues/25>`_
+
+0.4.0 (2016-05-30)
+~~~~~~~~~~~~~~~~~~
+- Make ``event_loop`` fixtures simpler to override by closing them in the
+ plugin, instead of directly in the fixture.
+ `#21 <https://github.com/pytest-dev/pytest-asyncio/pull/21>`_
+- Introduce the ``forbid_global_loop`` parameter.
+ `#21 <https://github.com/pytest-dev/pytest-asyncio/pull/21>`_
+
+0.3.0 (2015-12-19)
+~~~~~~~~~~~~~~~~~~
+- Support for Python 3.5 ``async``/``await`` syntax.
+ `#17 <https://github.com/pytest-dev/pytest-asyncio/pull/17>`_
+
+0.2.0 (2015-08-01)
+~~~~~~~~~~~~~~~~~~
+- ``unused_tcp_port_factory`` fixture.
+ `#10 <https://github.com/pytest-dev/pytest-asyncio/issues/10>`_
+
+
+0.1.1 (2015-04-23)
+~~~~~~~~~~~~~~~~~~
+Initial release.
+
+
+Contributing
+------------
+Contributions are very welcome. Tests can be run with ``tox``, please ensure
+the coverage at least stays the same before you submit a pull request.
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/PKG-INFO b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/PKG-INFO
new file mode 100644
index 0000000000..bb7611bc1a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/PKG-INFO
@@ -0,0 +1,302 @@
+Metadata-Version: 2.1
+Name: pytest-asyncio
+Version: 0.14.0
+Summary: Pytest support for asyncio.
+Home-page: https://github.com/pytest-dev/pytest-asyncio
+Author: Tin Tvrtković
+Author-email: tinchester@gmail.com
+License: Apache 2.0
+Description: pytest-asyncio: pytest support for asyncio
+ ==========================================
+
+ .. image:: https://img.shields.io/pypi/v/pytest-asyncio.svg
+ :target: https://pypi.python.org/pypi/pytest-asyncio
+ .. image:: https://travis-ci.org/pytest-dev/pytest-asyncio.svg?branch=master
+ :target: https://travis-ci.org/pytest-dev/pytest-asyncio
+ .. image:: https://coveralls.io/repos/pytest-dev/pytest-asyncio/badge.svg
+ :target: https://coveralls.io/r/pytest-dev/pytest-asyncio
+ .. image:: https://img.shields.io/pypi/pyversions/pytest-asyncio.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio
+ :alt: Supported Python versions
+
+ pytest-asyncio is an Apache2 licensed library, written in Python, for testing
+ asyncio code with pytest.
+
+ asyncio code is usually written in the form of coroutines, which makes it
+ slightly more difficult to test using normal testing tools. pytest-asyncio
+ provides useful fixtures and markers to make testing easier.
+
+ .. code-block:: python
+
+ @pytest.mark.asyncio
+ async def test_some_asyncio_code():
+ res = await library.do_something()
+ assert b'expected result' == res
+
+ pytest-asyncio has been strongly influenced by pytest-tornado_.
+
+ .. _pytest-tornado: https://github.com/eugeniy/pytest-tornado
+
+ Features
+ --------
+
+ - fixtures for creating and injecting versions of the asyncio event loop
+ - fixtures for injecting unused tcp ports
+ - pytest markers for treating tests as asyncio coroutines
+ - easy testing with non-default event loops
+ - support for `async def` fixtures and async generator fixtures
+
+ Installation
+ ------------
+
+ To install pytest-asyncio, simply:
+
+ .. code-block:: bash
+
+ $ pip install pytest-asyncio
+
+ This is enough for pytest to pick up pytest-asyncio.
+
+ Fixtures
+ --------
+
+ ``event_loop``
+ ~~~~~~~~~~~~~~
+ Creates and injects a new instance of the default asyncio event loop. By
+ default, the loop will be closed at the end of the test (i.e. the default
+ fixture scope is ``function``).
+
+ Note that just using the ``event_loop`` fixture won't make your test function
+ a coroutine. You'll need to interact with the event loop directly, using methods
+ like ``event_loop.run_until_complete``. See the ``pytest.mark.asyncio`` marker
+ for treating test functions like coroutines.
+
+ Simply using this fixture will not set the generated event loop as the
+ default asyncio event loop, or change the asyncio event loop policy in any way.
+ Use ``pytest.mark.asyncio`` for this purpose.
+
+ .. code-block:: python
+
+ def test_http_client(event_loop):
+ url = 'http://httpbin.org/get'
+ resp = event_loop.run_until_complete(http_client(url))
+ assert b'HTTP/1.1 200 OK' in resp
+
+ This fixture can be easily overridden in any of the standard pytest locations
+ (e.g. directly in the test file, or in ``conftest.py``) to use a non-default
+ event loop. This will take effect even if you're using the
+ ``pytest.mark.asyncio`` marker and not the ``event_loop`` fixture directly.
+
+ .. code-block:: python
+
+ @pytest.fixture
+ def event_loop():
+ loop = MyCustomLoop()
+ yield loop
+ loop.close()
+
+ If the ``pytest.mark.asyncio`` marker is applied, a pytest hook will
+ ensure the produced loop is set as the default global loop.
+ Fixtures depending on the ``event_loop`` fixture can expect the policy to be properly modified when they run.
+
+ ``unused_tcp_port``
+ ~~~~~~~~~~~~~~~~~~~
+ Finds and yields a single unused TCP port on the localhost interface. Useful for
+ binding temporary test servers.
+
+ ``unused_tcp_port_factory``
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ A callable which returns a different unused TCP port each invocation. Useful
+ when several unused TCP ports are required in a test.
+
+ .. code-block:: python
+
+ def a_test(unused_tcp_port_factory):
+ port1, port2 = unused_tcp_port_factory(), unused_tcp_port_factory()
+ ...
+
+ Async fixtures
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ Asynchronous fixtures are defined just like ordinary pytest fixtures, except they should be coroutines or asynchronous generators.
+
+ .. code-block:: python3
+
+ @pytest.fixture
+ async def async_gen_fixture():
+ await asyncio.sleep(0.1)
+ yield 'a value'
+
+ @pytest.fixture(scope='module')
+ async def async_fixture():
+ return await asyncio.sleep(0.1)
+
+ All scopes are supported, but if you use a non-function scope you will need
+ to redefine the ``event_loop`` fixture to have the same or broader scope.
+ Async fixtures need the event loop, and so must have the same or narrower scope
+ than the ``event_loop`` fixture.
+
+ If you want to do this with Python 3.5, the ``yield`` statement must be replaced with ``await yield_()`` and the coroutine
+ function must be decorated with ``@async_generator``, like so:
+
+ .. code-block:: python3
+
+ from async_generator import yield_, async_generator
+
+ @pytest.fixture
+ @async_generator
+ async def async_gen_fixture():
+ await asyncio.sleep(0.1)
+ await yield_('a value')
+
+
+ Markers
+ -------
+
+ ``pytest.mark.asyncio``
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ Mark your test coroutine with this marker and pytest will execute it as an
+ asyncio task using the event loop provided by the ``event_loop`` fixture. See
+ the introductory section for an example.
+
+ The event loop used can be overriden by overriding the ``event_loop`` fixture
+ (see above).
+
+ In order to make your test code a little more concise, the pytest |pytestmark|_
+ feature can be used to mark entire modules or classes with this marker.
+ Only test coroutines will be affected (by default, coroutines prefixed by
+ ``test_``), so, for example, fixtures are safe to define.
+
+ .. code-block:: python
+
+ import asyncio
+ import pytest
+
+ # All test coroutines will be treated as marked.
+ pytestmark = pytest.mark.asyncio
+
+ async def test_example(event_loop):
+ """No marker!"""
+ await asyncio.sleep(0, loop=event_loop)
+
+ .. |pytestmark| replace:: ``pytestmark``
+ .. _pytestmark: http://doc.pytest.org/en/latest/example/markers.html#marking-whole-classes-or-modules
+
+ Changelog
+ ---------
+ 0.13.0 (2020-06-24)
+ ~~~~~~~~~~~~~~~~~~~
+ - Fix `#162 <https://github.com/pytest-dev/pytest-asyncio/issues/162>`_, and ``event_loop`` fixture behavior now is coherent on all scopes.
+ `#164 <https://github.com/pytest-dev/pytest-asyncio/pull/164>`_
+
+ 0.12.0 (2020-05-04)
+ ~~~~~~~~~~~~~~~~~~~
+ - Run the event loop fixture as soon as possible. This helps with fixtures that have an implicit dependency on the event loop.
+ `#156 <https://github.com/pytest-dev/pytest-asyncio/pull/156>`_
+
+ 0.11.0 (2020-04-20)
+ ~~~~~~~~~~~~~~~~~~~
+ - Test on 3.8, drop 3.3 and 3.4. Stick to 0.10 for these versions.
+ `#152 <https://github.com/pytest-dev/pytest-asyncio/pull/152>`_
+ - Use the new Pytest 5.4.0 Function API. We therefore depend on pytest >= 5.4.0.
+ `#142 <https://github.com/pytest-dev/pytest-asyncio/pull/142>`_
+ - Better ``pytest.skip`` support.
+ `#126 <https://github.com/pytest-dev/pytest-asyncio/pull/126>`_
+
+ 0.10.0 (2019-01-08)
+ ~~~~~~~~~~~~~~~~~~~~
+ - ``pytest-asyncio`` integrates with `Hypothesis <https://hypothesis.readthedocs.io>`_
+ to support ``@given`` on async test functions using ``asyncio``.
+ `#102 <https://github.com/pytest-dev/pytest-asyncio/pull/102>`_
+ - Pytest 4.1 support.
+ `#105 <https://github.com/pytest-dev/pytest-asyncio/pull/105>`_
+
+ 0.9.0 (2018-07-28)
+ ~~~~~~~~~~~~~~~~~~
+ - Python 3.7 support.
+ - Remove ``event_loop_process_pool`` fixture and
+ ``pytest.mark.asyncio_process_pool`` marker (see
+ https://bugs.python.org/issue34075 for deprecation and removal details)
+
+ 0.8.0 (2017-09-23)
+ ~~~~~~~~~~~~~~~~~~
+ - Improve integration with other packages (like aiohttp) with more careful event loop handling.
+ `#64 <https://github.com/pytest-dev/pytest-asyncio/pull/64>`_
+
+ 0.7.0 (2017-09-08)
+ ~~~~~~~~~~~~~~~~~~
+ - Python versions pre-3.6 can use the async_generator library for async fixtures.
+ `#62 <https://github.com/pytest-dev/pytest-asyncio/pull/62>`
+
+
+ 0.6.0 (2017-05-28)
+ ~~~~~~~~~~~~~~~~~~
+ - Support for Python versions pre-3.5 has been dropped.
+ - ``pytestmark`` now works on both module and class level.
+ - The ``forbid_global_loop`` parameter has been removed.
+ - Support for async and async gen fixtures has been added.
+ `#45 <https://github.com/pytest-dev/pytest-asyncio/pull/45>`_
+ - The deprecation warning regarding ``asyncio.async()`` has been fixed.
+ `#51 <https://github.com/pytest-dev/pytest-asyncio/pull/51>`_
+
+ 0.5.0 (2016-09-07)
+ ~~~~~~~~~~~~~~~~~~
+ - Introduced a changelog.
+ `#31 <https://github.com/pytest-dev/pytest-asyncio/issues/31>`_
+ - The ``event_loop`` fixture is again responsible for closing itself.
+ This makes the fixture slightly harder to correctly override, but enables
+ other fixtures to depend on it correctly.
+ `#30 <https://github.com/pytest-dev/pytest-asyncio/issues/30>`_
+ - Deal with the event loop policy by wrapping a special pytest hook,
+ ``pytest_fixture_setup``. This allows setting the policy before fixtures
+ dependent on the ``event_loop`` fixture run, thus allowing them to take
+ advantage of the ``forbid_global_loop`` parameter. As a consequence of this,
+ we now depend on pytest 3.0.
+ `#29 <https://github.com/pytest-dev/pytest-asyncio/issues/29>`_
+
+
+ 0.4.1 (2016-06-01)
+ ~~~~~~~~~~~~~~~~~~
+ - Fix a bug preventing the propagation of exceptions from the plugin.
+ `#25 <https://github.com/pytest-dev/pytest-asyncio/issues/25>`_
+
+ 0.4.0 (2016-05-30)
+ ~~~~~~~~~~~~~~~~~~
+ - Make ``event_loop`` fixtures simpler to override by closing them in the
+ plugin, instead of directly in the fixture.
+ `#21 <https://github.com/pytest-dev/pytest-asyncio/pull/21>`_
+ - Introduce the ``forbid_global_loop`` parameter.
+ `#21 <https://github.com/pytest-dev/pytest-asyncio/pull/21>`_
+
+ 0.3.0 (2015-12-19)
+ ~~~~~~~~~~~~~~~~~~
+ - Support for Python 3.5 ``async``/``await`` syntax.
+ `#17 <https://github.com/pytest-dev/pytest-asyncio/pull/17>`_
+
+ 0.2.0 (2015-08-01)
+ ~~~~~~~~~~~~~~~~~~
+ - ``unused_tcp_port_factory`` fixture.
+ `#10 <https://github.com/pytest-dev/pytest-asyncio/issues/10>`_
+
+
+ 0.1.1 (2015-04-23)
+ ~~~~~~~~~~~~~~~~~~
+ Initial release.
+
+
+ Contributing
+ ------------
+ Contributions are very welcome. Tests can be run with ``tox``, please ensure
+ the coverage at least stays the same before you submit a pull request.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Topic :: Software Development :: Testing
+Classifier: Framework :: Pytest
+Requires-Python: >= 3.5
+Provides-Extra: testing
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/SOURCES.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..40cd5d7c7f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/SOURCES.txt
@@ -0,0 +1,12 @@
+LICENSE
+README.rst
+setup.cfg
+setup.py
+pytest_asyncio/__init__.py
+pytest_asyncio/plugin.py
+pytest_asyncio.egg-info/PKG-INFO
+pytest_asyncio.egg-info/SOURCES.txt
+pytest_asyncio.egg-info/dependency_links.txt
+pytest_asyncio.egg-info/entry_points.txt
+pytest_asyncio.egg-info/requires.txt
+pytest_asyncio.egg-info/top_level.txt \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/dependency_links.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/entry_points.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/entry_points.txt
new file mode 100644
index 0000000000..69c53be38a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/entry_points.txt
@@ -0,0 +1,3 @@
+[pytest11]
+asyncio = pytest_asyncio.plugin
+
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/requires.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/requires.txt
new file mode 100644
index 0000000000..d546f122ad
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/requires.txt
@@ -0,0 +1,9 @@
+pytest>=5.4.0
+
+[:python_version == "3.5"]
+async_generator>=1.3
+
+[testing]
+async_generator>=1.3
+coverage
+hypothesis>=5.7.1
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/top_level.txt b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/top_level.txt
new file mode 100644
index 0000000000..08d05d1ecf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio.egg-info/top_level.txt
@@ -0,0 +1 @@
+pytest_asyncio
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/__init__.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/__init__.py
new file mode 100644
index 0000000000..61c5f43b95
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/__init__.py
@@ -0,0 +1,2 @@
+"""The main point for importing pytest-asyncio items."""
+__version__ = "0.14.0"
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/plugin.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/plugin.py
new file mode 100644
index 0000000000..2fdc5f4e77
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/pytest_asyncio/plugin.py
@@ -0,0 +1,240 @@
+"""pytest-asyncio implementation."""
+import asyncio
+import contextlib
+import functools
+import inspect
+import socket
+
+import pytest
+try:
+ from _pytest.python import transfer_markers
+except ImportError: # Pytest 4.1.0 removes the transfer_marker api (#104)
+ def transfer_markers(*args, **kwargs): # noqa
+ """Noop when over pytest 4.1.0"""
+ pass
+
+try:
+ from async_generator import isasyncgenfunction
+except ImportError:
+ from inspect import isasyncgenfunction
+
+
+def _is_coroutine(obj):
+ """Check to see if an object is really an asyncio coroutine."""
+ return asyncio.iscoroutinefunction(obj) or inspect.isgeneratorfunction(obj)
+
+
+def pytest_configure(config):
+ """Inject documentation."""
+ config.addinivalue_line("markers",
+ "asyncio: "
+ "mark the test as a coroutine, it will be "
+ "run using an asyncio event loop")
+
+
+@pytest.mark.tryfirst
+def pytest_pycollect_makeitem(collector, name, obj):
+ """A pytest hook to collect asyncio coroutines."""
+ if collector.funcnamefilter(name) and _is_coroutine(obj):
+ item = pytest.Function.from_parent(collector, name=name)
+
+ # Due to how pytest test collection works, module-level pytestmarks
+ # are applied after the collection step. Since this is the collection
+ # step, we look ourselves.
+ transfer_markers(obj, item.cls, item.module)
+ item = pytest.Function.from_parent(collector, name=name) # To reload keywords.
+
+ if 'asyncio' in item.keywords:
+ return list(collector._genfunctions(name, obj))
+
+
+class FixtureStripper:
+ """Include additional Fixture, and then strip them"""
+ REQUEST = "request"
+ EVENT_LOOP = "event_loop"
+
+ def __init__(self, fixturedef):
+ self.fixturedef = fixturedef
+ self.to_strip = set()
+
+ def add(self, name):
+ """Add fixture name to fixturedef
+ and record in to_strip list (If not previously included)"""
+ if name in self.fixturedef.argnames:
+ return
+ self.fixturedef.argnames += (name, )
+ self.to_strip.add(name)
+
+ def get_and_strip_from(self, name, data_dict):
+ """Strip name from data, and return value"""
+ result = data_dict[name]
+ if name in self.to_strip:
+ del data_dict[name]
+ return result
+
+@pytest.hookimpl(trylast=True)
+def pytest_fixture_post_finalizer(fixturedef, request):
+ """Called after fixture teardown"""
+ if fixturedef.argname == "event_loop":
+ # Set empty loop policy, so that subsequent get_event_loop() provides a new loop
+ asyncio.set_event_loop_policy(None)
+
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_fixture_setup(fixturedef, request):
+ """Adjust the event loop policy when an event loop is produced."""
+ if fixturedef.argname == "event_loop":
+ outcome = yield
+ loop = outcome.get_result()
+ policy = asyncio.get_event_loop_policy()
+ policy.set_event_loop(loop)
+ return
+
+ if isasyncgenfunction(fixturedef.func):
+ # This is an async generator function. Wrap it accordingly.
+ generator = fixturedef.func
+
+ fixture_stripper = FixtureStripper(fixturedef)
+ fixture_stripper.add(FixtureStripper.EVENT_LOOP)
+ fixture_stripper.add(FixtureStripper.REQUEST)
+
+
+ def wrapper(*args, **kwargs):
+ loop = fixture_stripper.get_and_strip_from(FixtureStripper.EVENT_LOOP, kwargs)
+ request = fixture_stripper.get_and_strip_from(FixtureStripper.REQUEST, kwargs)
+
+ gen_obj = generator(*args, **kwargs)
+
+ async def setup():
+ res = await gen_obj.__anext__()
+ return res
+
+ def finalizer():
+ """Yield again, to finalize."""
+ async def async_finalizer():
+ try:
+ await gen_obj.__anext__()
+ except StopAsyncIteration:
+ pass
+ else:
+ msg = "Async generator fixture didn't stop."
+ msg += "Yield only once."
+ raise ValueError(msg)
+ loop.run_until_complete(async_finalizer())
+
+ request.addfinalizer(finalizer)
+ return loop.run_until_complete(setup())
+
+ fixturedef.func = wrapper
+ elif inspect.iscoroutinefunction(fixturedef.func):
+ coro = fixturedef.func
+
+ fixture_stripper = FixtureStripper(fixturedef)
+ fixture_stripper.add(FixtureStripper.EVENT_LOOP)
+
+ def wrapper(*args, **kwargs):
+ loop = fixture_stripper.get_and_strip_from(FixtureStripper.EVENT_LOOP, kwargs)
+
+ async def setup():
+ res = await coro(*args, **kwargs)
+ return res
+
+ return loop.run_until_complete(setup())
+
+ fixturedef.func = wrapper
+ yield
+
+
+@pytest.hookimpl(tryfirst=True, hookwrapper=True)
+def pytest_pyfunc_call(pyfuncitem):
+ """
+ Run asyncio marked test functions in an event loop instead of a normal
+ function call.
+ """
+ if 'asyncio' in pyfuncitem.keywords:
+ if getattr(pyfuncitem.obj, 'is_hypothesis_test', False):
+ pyfuncitem.obj.hypothesis.inner_test = wrap_in_sync(
+ pyfuncitem.obj.hypothesis.inner_test,
+ _loop=pyfuncitem.funcargs['event_loop']
+ )
+ else:
+ pyfuncitem.obj = wrap_in_sync(
+ pyfuncitem.obj,
+ _loop=pyfuncitem.funcargs['event_loop']
+ )
+ yield
+
+
+def wrap_in_sync(func, _loop):
+ """Return a sync wrapper around an async function executing it in the
+ current event loop."""
+
+ @functools.wraps(func)
+ def inner(**kwargs):
+ coro = func(**kwargs)
+ if coro is not None:
+ task = asyncio.ensure_future(coro, loop=_loop)
+ try:
+ _loop.run_until_complete(task)
+ except BaseException:
+ # run_until_complete doesn't get the result from exceptions
+ # that are not subclasses of `Exception`. Consume all
+ # exceptions to prevent asyncio's warning from logging.
+ if task.done() and not task.cancelled():
+ task.exception()
+ raise
+ return inner
+
+
+def pytest_runtest_setup(item):
+ if 'asyncio' in item.keywords:
+ # inject an event loop fixture for all async tests
+ if 'event_loop' in item.fixturenames:
+ item.fixturenames.remove('event_loop')
+ item.fixturenames.insert(0, 'event_loop')
+ if item.get_closest_marker("asyncio") is not None \
+ and not getattr(item.obj, 'hypothesis', False) \
+ and getattr(item.obj, 'is_hypothesis_test', False):
+ pytest.fail(
+ 'test function `%r` is using Hypothesis, but pytest-asyncio '
+ 'only works with Hypothesis 3.64.0 or later.' % item
+ )
+
+
+@pytest.fixture
+def event_loop(request):
+ """Create an instance of the default event loop for each test case."""
+ loop = asyncio.get_event_loop_policy().new_event_loop()
+ yield loop
+ loop.close()
+
+
+def _unused_tcp_port():
+ """Find an unused localhost TCP port from 1024-65535 and return it."""
+ with contextlib.closing(socket.socket()) as sock:
+ sock.bind(('127.0.0.1', 0))
+ return sock.getsockname()[1]
+
+
+@pytest.fixture
+def unused_tcp_port():
+ return _unused_tcp_port()
+
+
+@pytest.fixture
+def unused_tcp_port_factory():
+ """A factory function, producing different unused TCP ports."""
+ produced = set()
+
+ def factory():
+ """Return an unused port."""
+ port = _unused_tcp_port()
+
+ while port in produced:
+ port = _unused_tcp_port()
+
+ produced.add(port)
+
+ return port
+ return factory
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.cfg b/testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.cfg
new file mode 100644
index 0000000000..23a8eba2ce
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.cfg
@@ -0,0 +1,18 @@
+[coverage:run]
+source = pytest_asyncio
+
+[coverage:report]
+show_missing = true
+
+[tool:pytest]
+addopts = -rsx --tb=short
+testpaths = tests
+filterwarnings = error
+
+[metadata]
+license_file = LICENSE
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.py b/testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.py
new file mode 100644
index 0000000000..6175711350
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest-asyncio/setup.py
@@ -0,0 +1,54 @@
+import re
+from pathlib import Path
+
+from setuptools import setup, find_packages
+
+
+def find_version():
+ version_file = (
+ Path(__file__)
+ .parent.joinpath("pytest_asyncio", "__init__.py")
+ .read_text()
+ )
+ version_match = re.search(
+ r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M
+ )
+ if version_match:
+ return version_match.group(1)
+
+ raise RuntimeError("Unable to find version string.")
+
+
+setup(
+ name="pytest-asyncio",
+ version=find_version(),
+ packages=find_packages(),
+ url="https://github.com/pytest-dev/pytest-asyncio",
+ license="Apache 2.0",
+ author="Tin Tvrtković",
+ author_email="tinchester@gmail.com",
+ description="Pytest support for asyncio.",
+ long_description=Path(__file__).parent.joinpath("README.rst").read_text(),
+ classifiers=[
+ "Development Status :: 4 - Beta",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Topic :: Software Development :: Testing",
+ "Framework :: Pytest",
+ ],
+ python_requires=">= 3.5",
+ install_requires=["pytest >= 5.4.0"],
+ extras_require={
+ ':python_version == "3.5"': "async_generator >= 1.3",
+ "testing": [
+ "coverage",
+ "async_generator >= 1.3",
+ "hypothesis >= 5.7.1",
+ ],
+ },
+ entry_points={"pytest11": ["asyncio = pytest_asyncio.plugin"]},
+)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.coveragerc b/testing/web-platform/tests/tools/third_party/pytest/.coveragerc
new file mode 100644
index 0000000000..a335557d4f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.coveragerc
@@ -0,0 +1,31 @@
+[run]
+include =
+ src/*
+ testing/*
+ */lib/python*/site-packages/_pytest/*
+ */lib/python*/site-packages/pytest.py
+ */pypy*/site-packages/_pytest/*
+ */pypy*/site-packages/pytest.py
+ *\Lib\site-packages\_pytest\*
+ *\Lib\site-packages\pytest.py
+parallel = 1
+branch = 1
+
+[paths]
+source = src/
+ */lib/python*/site-packages/
+ */pypy*/site-packages/
+ *\Lib\site-packages\
+
+[report]
+skip_covered = True
+show_missing = True
+exclude_lines =
+ \#\s*pragma: no cover
+ ^\s*raise NotImplementedError\b
+ ^\s*return NotImplemented\b
+ ^\s*assert False(,|$)
+ ^\s*assert_never\(
+
+ ^\s*if TYPE_CHECKING:
+ ^\s*@overload( |$)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.gitblameignore b/testing/web-platform/tests/tools/third_party/pytest/.gitblameignore
new file mode 100644
index 0000000000..0cb298b024
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.gitblameignore
@@ -0,0 +1,28 @@
+# List of revisions that can be ignored with git-blame(1).
+#
+# See `blame.ignoreRevsFile` in git-config(1) to enable it by default, or
+# use it with `--ignore-revs-file` manually with git-blame.
+#
+# To "install" it:
+#
+# git config --local blame.ignoreRevsFile .gitblameignore
+
+# run black
+703e4b11ba76171eccd3f13e723c47b810ded7ef
+# switched to src layout
+eaa882f3d5340956beb176aa1753e07e3f3f2190
+# pre-commit run pyupgrade --all-files
+a91fe1feddbded535a4322ab854429e3a3961fb4
+# move node base classes from main to nodes
+afc607cfd81458d4e4f3b1f3cf8cc931b933907e
+# [?] split most fixture related code into own plugin
+8c49561470708761f7321504f5e8343811be87ac
+# run pyupgrade
+9aacb4635e81edd6ecf281d4f6c0cfc8e94ab301
+# run blacken-docs
+5f95dce95602921a70bfbc7d8de2f7712c5e4505
+# ran pyupgrade-docs again
+75d0b899bbb56d6849e9d69d83a9426ed3f43f8b
+
+# move argument parser to own file
+c9df77cbd6a365dcb73c39618e4842711817e871
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.github/FUNDING.yml b/testing/web-platform/tests/tools/third_party/pytest/.github/FUNDING.yml
new file mode 100644
index 0000000000..5f2d1cf09c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.github/FUNDING.yml
@@ -0,0 +1,5 @@
+# info:
+# * https://help.github.com/en/articles/displaying-a-sponsor-button-in-your-repository
+# * https://tidelift.com/subscription/how-to-connect-tidelift-with-github
+tidelift: pypi/pytest
+open_collective: pytest
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/1_bug_report.md b/testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/1_bug_report.md
new file mode 100644
index 0000000000..0fc3e06cd2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/1_bug_report.md
@@ -0,0 +1,16 @@
+---
+name: 🛠Bug Report
+about: Report errors and problems
+
+---
+
+<!--
+Thanks for submitting an issue!
+
+Quick check-list while reporting bugs:
+-->
+
+- [ ] a detailed description of the bug or problem you are having
+- [ ] output of `pip list` from the virtual environment you are using
+- [ ] pytest and operating system versions
+- [ ] minimal example if possible
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/2_feature_request.md b/testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/2_feature_request.md
new file mode 100644
index 0000000000..01fe96295e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/2_feature_request.md
@@ -0,0 +1,25 @@
+---
+name: 🚀 Feature Request
+about: Ideas for new features and improvements
+
+---
+
+<!--
+Thanks for suggesting a feature!
+
+Quick check-list while suggesting features:
+-->
+
+#### What's the problem this feature will solve?
+<!-- What are you trying to do, that you are unable to achieve with pytest as it currently stands? -->
+
+#### Describe the solution you'd like
+<!-- A clear and concise description of what you want to happen. -->
+
+<!-- Provide examples of real-world use cases that this would enable and how it solves the problem described above. -->
+
+#### Alternative Solutions
+<!-- Have you tried to workaround the problem using a pytest plugin or other tools? Or a different approach to solving this issue? Please elaborate here. -->
+
+#### Additional context
+<!-- Add any other context, links, etc. about the feature here. -->
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/config.yml b/testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 0000000000..742d2e4d66
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,5 @@
+blank_issues_enabled: false
+contact_links:
+ - name: â“ Support Question
+ url: https://github.com/pytest-dev/pytest/discussions
+ about: Use GitHub's new Discussions feature for questions
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.github/PULL_REQUEST_TEMPLATE.md b/testing/web-platform/tests/tools/third_party/pytest/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..5e7282bfd7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,26 @@
+<!--
+Thanks for submitting a PR, your contribution is really appreciated!
+
+Here is a quick checklist that should be present in PRs.
+
+- [ ] Include documentation when adding new features.
+- [ ] Include new tests or update existing tests when applicable.
+- [X] Allow maintainers to push and squash when merging my commits. Please uncheck this if you prefer to squash the commits yourself.
+
+If this change fixes an issue, please:
+
+- [ ] Add text like ``closes #XYZW`` to the PR description and/or commits (where ``XYZW`` is the issue number). See the [github docs](https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword) for more information.
+
+Unless your change is trivial or a small documentation fix (e.g., a typo or reword of a small section) please:
+
+- [ ] Create a new changelog file in the `changelog` folder, with a name like `<ISSUE NUMBER>.<TYPE>.rst`. See [changelog/README.rst](https://github.com/pytest-dev/pytest/blob/main/changelog/README.rst) for details.
+
+ Write sentences in the **past or present tense**, examples:
+
+ * *Improved verbose diff output with sequences.*
+ * *Terminal summary statistics now use multiple colors.*
+
+ Also make sure to end the sentence with a `.`.
+
+- [ ] Add yourself to `AUTHORS` in alphabetical order.
+-->
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.github/config.yml b/testing/web-platform/tests/tools/third_party/pytest/.github/config.yml
new file mode 100644
index 0000000000..86a8a97e78
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.github/config.yml
@@ -0,0 +1,2 @@
+rtd:
+ project: pytest
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.github/dependabot.yml b/testing/web-platform/tests/tools/third_party/pytest/.github/dependabot.yml
new file mode 100644
index 0000000000..507789bf5a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.github/dependabot.yml
@@ -0,0 +1,11 @@
+version: 2
+updates:
+- package-ecosystem: pip
+ directory: "/testing/plugins_integration"
+ schedule:
+ interval: weekly
+ time: "03:00"
+ open-pull-requests-limit: 10
+ allow:
+ - dependency-type: direct
+ - dependency-type: indirect
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.github/labels.toml b/testing/web-platform/tests/tools/third_party/pytest/.github/labels.toml
new file mode 100644
index 0000000000..aef1e913af
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.github/labels.toml
@@ -0,0 +1,149 @@
+["os: cygwin"]
+color = "006b75"
+description = "cygwin platform-specific problem"
+name = "os: cygwin"
+
+["os: linux"]
+color = "1d76db"
+description = "linux platform-specific problem"
+name = "os: linux"
+
+["os: mac"]
+color = "bfdadc"
+description = "mac platform-specific problem"
+name = "os: mac"
+
+["os: windows"]
+color = "fbca04"
+description = "windows platform-specific problem"
+name = "os: windows"
+
+["plugin: argcomplete"]
+color = "d4c5f9"
+description = "related to the argcomplete builtin plugin"
+name = "plugin: argcomplete"
+
+["plugin: cache"]
+color = "c7def8"
+description = "related to the cache builtin plugin"
+name = "plugin: cache"
+
+["plugin: capture"]
+color = "1d76db"
+description = "related to the capture builtin plugin"
+name = "plugin: capture"
+
+["plugin: debugging"]
+color = "dd52a8"
+description = "related to the debugging builtin plugin"
+name = "plugin: debugging"
+
+["plugin: doctests"]
+color = "fad8c7"
+description = "related to the doctests builtin plugin"
+name = "plugin: doctests"
+
+["plugin: junitxml"]
+color = "c5def5"
+description = "related to the junitxml builtin plugin"
+name = "plugin: junitxml"
+
+["plugin: logging"]
+color = "ff5432"
+description = "related to the logging builtin plugin"
+name = "plugin: logging"
+
+["plugin: monkeypatch"]
+color = "0e8a16"
+description = "related to the monkeypatch builtin plugin"
+name = "plugin: monkeypatch"
+
+["plugin: nose"]
+color = "bfdadc"
+description = "related to the nose integration builtin plugin"
+name = "plugin: nose"
+
+["plugin: pastebin"]
+color = "bfd4f2"
+description = "related to the pastebin builtin plugin"
+name = "plugin: pastebin"
+
+["plugin: pytester"]
+color = "c5def5"
+description = "related to the pytester builtin plugin"
+name = "plugin: pytester"
+
+["plugin: tmpdir"]
+color = "bfd4f2"
+description = "related to the tmpdir builtin plugin"
+name = "plugin: tmpdir"
+
+["plugin: unittest"]
+color = "006b75"
+description = "related to the unittest integration builtin plugin"
+name = "plugin: unittest"
+
+["plugin: warnings"]
+color = "fef2c0"
+description = "related to the warnings builtin plugin"
+name = "plugin: warnings"
+
+["plugin: xdist"]
+color = "5319e7"
+description = "related to the xdist external plugin"
+name = "plugin: xdist"
+
+["status: critical"]
+color = "e11d21"
+description = "grave problem or usability issue that affects lots of users"
+name = "status: critical"
+
+["status: easy"]
+color = "bfe5bf"
+description = "easy issue that is friendly to new contributor"
+name = "status: easy"
+
+["status: help wanted"]
+color = "159818"
+description = "developers would like help from experts on this topic"
+name = "status: help wanted"
+
+["status: needs information"]
+color = "5319e7"
+description = "reporter needs to provide more information; can be closed after 2 or more weeks of inactivity"
+name = "status: needs information"
+
+["topic: collection"]
+color = "006b75"
+description = "related to the collection phase"
+name = "topic: collection"
+
+["topic: config"]
+color = "006b75"
+description = "related to config handling, argument parsing and config file"
+name = "topic: config"
+
+["topic: fixtures"]
+color = "5319e7"
+description = "anything involving fixtures directly or indirectly"
+name = "topic: fixtures"
+
+["topic: marks"]
+color = "b60205"
+description = "related to marks, either the general marks or builtin"
+name = "topic: marks"
+
+["topic: parametrize"]
+color = "fbca04"
+description = "related to @pytest.mark.parametrize"
+name = "topic: parametrize"
+
+["topic: reporting"]
+color = "fef2c0"
+description = "related to terminal output and user-facing messages and errors"
+name = "topic: reporting"
+
+["topic: rewrite"]
+color = "0e8a16"
+description = "related to the assertion rewrite mechanism"
+name = "topic: rewrite"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.github/workflows/main.yml b/testing/web-platform/tests/tools/third_party/pytest/.github/workflows/main.yml
new file mode 100644
index 0000000000..42759ce853
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.github/workflows/main.yml
@@ -0,0 +1,231 @@
+name: main
+
+on:
+ push:
+ branches:
+ - main
+ - "[0-9]+.[0-9]+.x"
+ tags:
+ - "[0-9]+.[0-9]+.[0-9]+"
+ - "[0-9]+.[0-9]+.[0-9]+rc[0-9]+"
+
+ pull_request:
+ branches:
+ - main
+ - "[0-9]+.[0-9]+.x"
+
+env:
+ PYTEST_ADDOPTS: "--color=yes"
+
+# Set permissions at the job level.
+permissions: {}
+
+jobs:
+ build:
+ runs-on: ${{ matrix.os }}
+ timeout-minutes: 45
+ permissions:
+ contents: read
+
+ strategy:
+ fail-fast: false
+ matrix:
+ name: [
+ "windows-py36",
+ "windows-py37",
+ "windows-py37-pluggy",
+ "windows-py38",
+ "windows-py39",
+ "windows-py310",
+ "windows-py311",
+
+ "ubuntu-py36",
+ "ubuntu-py37",
+ "ubuntu-py37-pluggy",
+ "ubuntu-py37-freeze",
+ "ubuntu-py38",
+ "ubuntu-py39",
+ "ubuntu-py310",
+ "ubuntu-py311",
+ "ubuntu-pypy3",
+
+ "macos-py37",
+ "macos-py38",
+
+ "docs",
+ "doctesting",
+ "plugins",
+ ]
+
+ include:
+ - name: "windows-py36"
+ python: "3.6"
+ os: windows-latest
+ tox_env: "py36-xdist"
+ - name: "windows-py37"
+ python: "3.7"
+ os: windows-latest
+ tox_env: "py37-numpy"
+ - name: "windows-py37-pluggy"
+ python: "3.7"
+ os: windows-latest
+ tox_env: "py37-pluggymain-xdist"
+ - name: "windows-py38"
+ python: "3.8"
+ os: windows-latest
+ tox_env: "py38-unittestextras"
+ use_coverage: true
+ - name: "windows-py39"
+ python: "3.9"
+ os: windows-latest
+ tox_env: "py39-xdist"
+ - name: "windows-py310"
+ python: "3.10"
+ os: windows-latest
+ tox_env: "py310-xdist"
+ - name: "windows-py311"
+ python: "3.11-dev"
+ os: windows-latest
+ tox_env: "py311"
+
+ - name: "ubuntu-py36"
+ python: "3.6"
+ os: ubuntu-latest
+ tox_env: "py36-xdist"
+ - name: "ubuntu-py37"
+ python: "3.7"
+ os: ubuntu-latest
+ tox_env: "py37-lsof-numpy-pexpect"
+ use_coverage: true
+ - name: "ubuntu-py37-pluggy"
+ python: "3.7"
+ os: ubuntu-latest
+ tox_env: "py37-pluggymain-xdist"
+ - name: "ubuntu-py37-freeze"
+ python: "3.7"
+ os: ubuntu-latest
+ tox_env: "py37-freeze"
+ - name: "ubuntu-py38"
+ python: "3.8"
+ os: ubuntu-latest
+ tox_env: "py38-xdist"
+ - name: "ubuntu-py39"
+ python: "3.9"
+ os: ubuntu-latest
+ tox_env: "py39-xdist"
+ - name: "ubuntu-py310"
+ python: "3.10"
+ os: ubuntu-latest
+ tox_env: "py310-xdist"
+ - name: "ubuntu-py311"
+ python: "3.11-dev"
+ os: ubuntu-latest
+ tox_env: "py311"
+ - name: "ubuntu-pypy3"
+ python: "pypy-3.7"
+ os: ubuntu-latest
+ tox_env: "pypy3-xdist"
+
+ - name: "macos-py37"
+ python: "3.7"
+ os: macos-latest
+ tox_env: "py37-xdist"
+ - name: "macos-py38"
+ python: "3.8"
+ os: macos-latest
+ tox_env: "py38-xdist"
+ use_coverage: true
+
+ - name: "plugins"
+ python: "3.7"
+ os: ubuntu-latest
+ tox_env: "plugins"
+
+ - name: "docs"
+ python: "3.7"
+ os: ubuntu-latest
+ tox_env: "docs"
+ - name: "doctesting"
+ python: "3.7"
+ os: ubuntu-latest
+ tox_env: "doctesting"
+ use_coverage: true
+
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+ persist-credentials: false
+
+ - name: Set up Python ${{ matrix.python }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python }}
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install tox coverage
+
+ - name: Test without coverage
+ if: "! matrix.use_coverage"
+ run: "tox -e ${{ matrix.tox_env }}"
+
+ - name: Test with coverage
+ if: "matrix.use_coverage"
+ run: "tox -e ${{ matrix.tox_env }}-coverage"
+
+ - name: Generate coverage report
+ if: "matrix.use_coverage"
+ run: python -m coverage xml
+
+ - name: Upload coverage to Codecov
+ if: "matrix.use_coverage"
+ uses: codecov/codecov-action@v2
+ with:
+ fail_ci_if_error: true
+ files: ./coverage.xml
+ verbose: true
+
+ deploy:
+ if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') && github.repository == 'pytest-dev/pytest'
+
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ permissions:
+ contents: write
+
+ needs: [build]
+
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+ persist-credentials: false
+
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.7"
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install --upgrade build tox
+
+ - name: Build package
+ run: |
+ python -m build
+
+ - name: Publish package to PyPI
+ uses: pypa/gh-action-pypi-publish@master
+ with:
+ user: __token__
+ password: ${{ secrets.pypi_token }}
+
+ - name: Publish GitHub release notes
+ env:
+ GH_RELEASE_NOTES_TOKEN: ${{ github.token }}
+ run: |
+ sudo apt-get install pandoc
+ tox -e publish-gh-release-notes
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.github/workflows/prepare-release-pr.yml b/testing/web-platform/tests/tools/third_party/pytest/.github/workflows/prepare-release-pr.yml
new file mode 100644
index 0000000000..429834b3f2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.github/workflows/prepare-release-pr.yml
@@ -0,0 +1,52 @@
+name: prepare release pr
+
+on:
+ workflow_dispatch:
+ inputs:
+ branch:
+ description: 'Branch to base the release from'
+ required: true
+ default: ''
+ major:
+ description: 'Major release? (yes/no)'
+ required: true
+ default: 'no'
+ prerelease:
+ description: 'Prerelease (ex: rc1). Leave empty if not a pre-release.'
+ required: false
+ default: ''
+
+# Set permissions at the job level.
+permissions: {}
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ pull-requests: write
+
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.8"
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install --upgrade setuptools tox
+
+ - name: Prepare release PR (minor/patch release)
+ if: github.event.inputs.major == 'no'
+ run: |
+ tox -e prepare-release-pr -- ${{ github.event.inputs.branch }} ${{ github.token }} --prerelease='${{ github.event.inputs.prerelease }}'
+
+ - name: Prepare release PR (major release)
+ if: github.event.inputs.major == 'yes'
+ run: |
+ tox -e prepare-release-pr -- ${{ github.event.inputs.branch }} ${{ github.token }} --major --prerelease='${{ github.event.inputs.prerelease }}'
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.github/workflows/update-plugin-list.yml b/testing/web-platform/tests/tools/third_party/pytest/.github/workflows/update-plugin-list.yml
new file mode 100644
index 0000000000..193469072f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.github/workflows/update-plugin-list.yml
@@ -0,0 +1,49 @@
+name: Update Plugin List
+
+on:
+ schedule:
+ # At 00:00 on Sunday.
+ # https://crontab.guru
+ - cron: '0 0 * * 0'
+ workflow_dispatch:
+
+# Set permissions at the job level.
+permissions: {}
+
+jobs:
+ createPullRequest:
+ if: github.repository_owner == 'pytest-dev'
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ pull-requests: write
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.8
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install packaging requests tabulate[widechars] tqdm
+
+ - name: Update Plugin List
+ run: python scripts/update-plugin-list.py
+
+ - name: Create Pull Request
+ uses: peter-evans/create-pull-request@2455e1596942c2902952003bbb574afbbe2ab2e6
+ with:
+ commit-message: '[automated] Update plugin list'
+ author: 'pytest bot <pytestbot@users.noreply.github.com>'
+ branch: update-plugin-list/patch
+ delete-branch: true
+ branch-suffix: short-commit-hash
+ title: '[automated] Update plugin list'
+ body: '[automated] Update plugin list'
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.gitignore b/testing/web-platform/tests/tools/third_party/pytest/.gitignore
new file mode 100644
index 0000000000..935da3b9a2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.gitignore
@@ -0,0 +1,58 @@
+# Automatically generated by `hgimportsvn`
+.svn
+.hgsvn
+
+# Ignore local virtualenvs
+lib/
+bin/
+include/
+.Python/
+
+# These lines are suggested according to the svn:ignore property
+# Feel free to enable them by uncommenting them
+*.pyc
+*.pyo
+*.swp
+*.class
+*.orig
+*~
+.hypothesis/
+
+# autogenerated
+src/_pytest/_version.py
+# setuptools
+.eggs/
+
+doc/*/_build
+doc/*/.doctrees
+doc/*/_changelog_towncrier_draft.rst
+build/
+dist/
+*.egg-info
+htmlcov/
+issue/
+env/
+.env/
+.venv/
+/pythonenv*/
+3rdparty/
+.tox
+.cache
+.pytest_cache
+.mypy_cache
+.coverage
+.coverage.*
+coverage.xml
+.ropeproject
+.idea
+.hypothesis
+.pydevproject
+.project
+.settings
+.vscode
+
+# generated by pip
+pip-wheel-metadata/
+
+# pytest debug logs generated via --debug
+pytestdebug.log
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.pre-commit-config.yaml b/testing/web-platform/tests/tools/third_party/pytest/.pre-commit-config.yaml
new file mode 100644
index 0000000000..20cede3b7b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.pre-commit-config.yaml
@@ -0,0 +1,99 @@
+repos:
+- repo: https://github.com/psf/black
+ rev: 21.11b1
+ hooks:
+ - id: black
+ args: [--safe, --quiet]
+- repo: https://github.com/asottile/blacken-docs
+ rev: v1.12.0
+ hooks:
+ - id: blacken-docs
+ additional_dependencies: [black==20.8b1]
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.0.1
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: fix-encoding-pragma
+ args: [--remove]
+ - id: check-yaml
+ - id: debug-statements
+ exclude: _pytest/(debugging|hookspec).py
+ language_version: python3
+- repo: https://github.com/PyCQA/flake8
+ rev: 4.0.1
+ hooks:
+ - id: flake8
+ language_version: python3
+ additional_dependencies:
+ - flake8-typing-imports==1.9.0
+ - flake8-docstrings==1.5.0
+- repo: https://github.com/asottile/reorder_python_imports
+ rev: v2.6.0
+ hooks:
+ - id: reorder-python-imports
+ args: ['--application-directories=.:src', --py36-plus]
+- repo: https://github.com/asottile/pyupgrade
+ rev: v2.29.1
+ hooks:
+ - id: pyupgrade
+ args: [--py36-plus]
+- repo: https://github.com/asottile/setup-cfg-fmt
+ rev: v1.20.0
+ hooks:
+ - id: setup-cfg-fmt
+ args: [--max-py-version=3.10]
+- repo: https://github.com/pre-commit/pygrep-hooks
+ rev: v1.9.0
+ hooks:
+ - id: python-use-type-annotations
+- repo: https://github.com/pre-commit/mirrors-mypy
+ rev: v0.910-1
+ hooks:
+ - id: mypy
+ files: ^(src/|testing/)
+ args: []
+ additional_dependencies:
+ - iniconfig>=1.1.0
+ - py>=1.8.2
+ - attrs>=19.2.0
+ - packaging
+ - tomli
+ - types-atomicwrites
+ - types-pkg_resources
+- repo: local
+ hooks:
+ - id: rst
+ name: rst
+ entry: rst-lint --encoding utf-8
+ files: ^(RELEASING.rst|README.rst|TIDELIFT.rst)$
+ language: python
+ additional_dependencies: [pygments, restructuredtext_lint]
+ - id: changelogs-rst
+ name: changelog filenames
+ language: fail
+ entry: 'changelog files must be named ####.(breaking|bugfix|deprecation|doc|feature|improvement|trivial|vendor).rst'
+ exclude: changelog/(\d+\.(breaking|bugfix|deprecation|doc|feature|improvement|trivial|vendor).rst|README.rst|_template.rst)
+ files: ^changelog/
+ - id: py-deprecated
+ name: py library is deprecated
+ language: pygrep
+ entry: >
+ (?x)\bpy\.(
+ _code\.|
+ builtin\.|
+ code\.|
+ io\.|
+ path\.local\.sysfind|
+ process\.|
+ std\.|
+ error\.|
+ xml\.
+ )
+ types: [python]
+ - id: py-path-deprecated
+ name: py.path usage is deprecated
+ exclude: docs|src/_pytest/deprecated.py|testing/deprecated_test.py
+ language: pygrep
+ entry: \bpy\.path\.local
+ types: [python]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/.readthedocs.yml b/testing/web-platform/tests/tools/third_party/pytest/.readthedocs.yml
new file mode 100644
index 0000000000..bc44d38b4c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/.readthedocs.yml
@@ -0,0 +1,19 @@
+version: 2
+
+python:
+ install:
+ - requirements: doc/en/requirements.txt
+ - method: pip
+ path: .
+
+build:
+ os: ubuntu-20.04
+ tools:
+ python: "3.9"
+ apt_packages:
+ - inkscape
+
+formats:
+ - epub
+ - pdf
+ - htmlzip
diff --git a/testing/web-platform/tests/tools/third_party/pytest/AUTHORS b/testing/web-platform/tests/tools/third_party/pytest/AUTHORS
new file mode 100644
index 0000000000..9413f9c2e7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/AUTHORS
@@ -0,0 +1,356 @@
+Holger Krekel, holger at merlinux eu
+merlinux GmbH, Germany, office at merlinux eu
+
+Contributors include::
+
+Aaron Coleman
+Abdeali JK
+Abdelrahman Elbehery
+Abhijeet Kasurde
+Adam Johnson
+Adam Uhlir
+Ahn Ki-Wook
+Akiomi Kamakura
+Alan Velasco
+Alexander Johnson
+Alexander King
+Alexei Kozlenok
+Allan Feldman
+Aly Sivji
+Amir Elkess
+Anatoly Bubenkoff
+Anders Hovmöller
+Andras Mitzki
+Andras Tim
+Andrea Cimatoribus
+Andreas Motl
+Andreas Zeidler
+Andrew Shapton
+Andrey Paramonov
+Andrzej Klajnert
+Andrzej Ostrowski
+Andy Freeland
+Anthon van der Neut
+Anthony Shaw
+Anthony Sottile
+Anton Grinevich
+Anton Lodder
+Antony Lee
+Arel Cordero
+Arias Emmanuel
+Ariel Pillemer
+Armin Rigo
+Aron Coyle
+Aron Curzon
+Aviral Verma
+Aviv Palivoda
+Barney Gale
+Ben Gartner
+Ben Webb
+Benjamin Peterson
+Bernard Pratz
+Bob Ippolito
+Brian Dorsey
+Brian Maissy
+Brian Okken
+Brianna Laugher
+Bruno Oliveira
+Cal Leeming
+Carl Friedrich Bolz
+Carlos Jenkins
+Ceridwen
+Charles Cloud
+Charles Machalow
+Charnjit SiNGH (CCSJ)
+Chris Lamb
+Chris NeJame
+Chris Rose
+Christian Boelsen
+Christian Fetzer
+Christian Neumüller
+Christian Theunert
+Christian Tismer
+Christine Mecklenborg
+Christoph Buelter
+Christopher Dignam
+Christopher Gilling
+Claire Cecil
+Claudio Madotto
+CrazyMerlyn
+Cristian Vera
+Cyrus Maden
+Damian Skrzypczak
+Daniel Grana
+Daniel Hahler
+Daniel Nuri
+Daniel Wandschneider
+Daniele Procida
+Danielle Jenkins
+Daniil Galiev
+Dave Hunt
+David Díaz-Barquero
+David Mohr
+David Paul Röthlisberger
+David Szotten
+David Vierra
+Daw-Ran Liou
+Debi Mishra
+Denis Kirisov
+Denivy Braiam Rück
+Dhiren Serai
+Diego Russo
+Dmitry Dygalo
+Dmitry Pribysh
+Dominic Mortlock
+Duncan Betts
+Edison Gustavo Muenz
+Edoardo Batini
+Edson Tadeu M. Manoel
+Eduardo Schettino
+Eli Boyarski
+Elizaveta Shashkova
+Éloi Rivard
+Endre Galaczi
+Eric Hunsberger
+Eric Liu
+Eric Siegerman
+Erik Aronesty
+Erik M. Bray
+Evan Kepner
+Fabien Zarifian
+Fabio Zadrozny
+Felix Nieuwenhuizen
+Feng Ma
+Florian Bruhin
+Florian Dahlitz
+Floris Bruynooghe
+Gabriel Reis
+Garvit Shubham
+Gene Wood
+George Kussumoto
+Georgy Dyuldin
+Gergely Kalmár
+Gleb Nikonorov
+Graeme Smecher
+Graham Horler
+Greg Price
+Gregory Lee
+Grig Gheorghiu
+Grigorii Eremeev (budulianin)
+Guido Wesdorp
+Guoqiang Zhang
+Harald Armin Massa
+Harshna
+Henk-Jaap Wagenaar
+Holger Kohr
+Hugo van Kemenade
+Hui Wang (coldnight)
+Ian Bicking
+Ian Lesperance
+Ilya Konstantinov
+Ionuț Turturică
+Iwan Briquemont
+Jaap Broekhuizen
+Jakob van Santen
+Jakub Mitoraj
+James Bourbeau
+Jan Balster
+Janne Vanhala
+Jason R. Coombs
+Javier Domingo Cansino
+Javier Romero
+Jeff Rackauckas
+Jeff Widman
+Jenni Rinker
+John Eddie Ayson
+John Towler
+Jon Sonesen
+Jonas Obrist
+Jordan Guymon
+Jordan Moldow
+Jordan Speicher
+Joseph Hunkeler
+Josh Karpel
+Joshua Bronson
+Jurko Gospodnetić
+Justyna Janczyszyn
+Justice Ndou
+Kale Kundert
+Kamran Ahmad
+Karl O. Pinc
+Karthikeyan Singaravelan
+Katarzyna Jachim
+Katarzyna Król
+Katerina Koukiou
+Keri Volans
+Kevin Cox
+Kevin J. Foley
+Kian-Meng Ang
+Kodi B. Arfer
+Kostis Anagnostopoulos
+Kristoffer Nordström
+Kyle Altendorf
+Lawrence Mitchell
+Lee Kamentsky
+Lev Maximov
+Lewis Cowles
+Llandy Riveron Del Risco
+Loic Esteve
+Lukas Bednar
+Luke Murphy
+Maciek Fijalkowski
+Maho
+Maik Figura
+Mandeep Bhutani
+Manuel Krebber
+Marc Schlaich
+Marcelo Duarte Trevisani
+Marcin Bachry
+Marco Gorelli
+Mark Abramowitz
+Mark Dickinson
+Markus Unterwaditzer
+Martijn Faassen
+Martin Altmayer
+Martin K. Scherer
+Martin Prusse
+Mathieu Clabaut
+Matt Bachmann
+Matt Duck
+Matt Williams
+Matthias Hafner
+Maxim Filipenko
+Maximilian Cosmo Sitter
+mbyt
+Mickey Pashov
+Michael Aquilina
+Michael Birtwell
+Michael Droettboom
+Michael Goerz
+Michael Krebs
+Michael Seifert
+Michal Wajszczuk
+Michał Zięba
+Mihai Capotă
+Mike Hoyle (hoylemd)
+Mike Lundy
+Miro HronÄok
+Nathaniel Compton
+Nathaniel Waisbrot
+Ned Batchelder
+Neven Mundar
+Nicholas Devenish
+Nicholas Murphy
+Niclas Olofsson
+Nicolas Delaby
+Nikolay Kondratyev
+Olga Matoula
+Oleg Pidsadnyi
+Oleg Sushchenko
+Oliver Bestwalter
+Omar Kohl
+Omer Hadari
+Ondřej Súkup
+Oscar Benjamin
+Parth Patel
+Patrick Hayes
+Pauli Virtanen
+Pavel Karateev
+Paweł Adamczak
+Pedro Algarvio
+Petter Strandmark
+Philipp Loose
+Pieter Mulder
+Piotr Banaszkiewicz
+Piotr Helm
+Prakhar Gurunani
+Prashant Anand
+Prashant Sharma
+Pulkit Goyal
+Punyashloka Biswal
+Quentin Pradet
+Ralf Schmitt
+Ram Rachum
+Ralph Giles
+Ran Benita
+Raphael Castaneda
+Raphael Pierzina
+Raquel Alegre
+Ravi Chandra
+Robert Holt
+Roberto Polli
+Roland Puntaier
+Romain Dorgueil
+Roman Bolshakov
+Ronny Pfannschmidt
+Ross Lawley
+Ruaridh Williamson
+Russel Winder
+Ryan Wooden
+Saiprasad Kale
+Samuel Dion-Girardeau
+Samuel Searles-Bryant
+Samuele Pedroni
+Sanket Duthade
+Sankt Petersbug
+Segev Finer
+Serhii Mozghovyi
+Seth Junot
+Shantanu Jain
+Shubham Adep
+Simon Gomizelj
+Simon Kerr
+Skylar Downes
+Srinivas Reddy Thatiparthy
+Stefan Farmbauer
+Stefan Scherfke
+Stefan Zimmermann
+Stefano Taschini
+Steffen Allner
+Stephan Obermann
+Sven-Hendrik Haase
+Sylvain Marié
+Tadek Teleżyński
+Takafumi Arakaki
+Taneli Hukkinen
+Tanvi Mehta
+Tarcisio Fischer
+Tareq Alayan
+Ted Xiao
+Terje Runde
+Thomas Grainger
+Thomas Hisch
+Tim Hoffmann
+Tim Strazny
+Tom Dalton
+Tom Viner
+Tomáš GavenÄiak
+Tomer Keren
+Tor Colvin
+Trevor Bekolay
+Tyler Goodlet
+Tzu-ping Chung
+Vasily Kuznetsov
+Victor Maryama
+Victor Uriarte
+Vidar T. Fauske
+Virgil Dupras
+Vitaly Lashmanov
+Vlad Dragos
+Vlad Radziuk
+Vladyslav Rachek
+Volodymyr Piskun
+Wei Lin
+Wil Cooley
+William Lee
+Wim Glenn
+Wouter van Ackooy
+Xixi Zhao
+Xuan Luong
+Xuecong Liao
+Yoav Caspi
+Yuval Shimon
+Zac Hatfield-Dodds
+Zachary Kneupper
+Zoltán Máté
+Zsolt Cserna
diff --git a/testing/web-platform/tests/tools/third_party/pytest/CHANGELOG.rst b/testing/web-platform/tests/tools/third_party/pytest/CHANGELOG.rst
new file mode 100644
index 0000000000..481f277813
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/CHANGELOG.rst
@@ -0,0 +1,7 @@
+=========
+Changelog
+=========
+
+The pytest CHANGELOG is located `here <https://docs.pytest.org/en/stable/changelog.html>`__.
+
+The source document can be found at: https://github.com/pytest-dev/pytest/blob/main/doc/en/changelog.rst
diff --git a/testing/web-platform/tests/tools/third_party/pytest/CITATION b/testing/web-platform/tests/tools/third_party/pytest/CITATION
new file mode 100644
index 0000000000..d4e9d8ec7a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/CITATION
@@ -0,0 +1,16 @@
+NOTE: Change "x.y" by the version you use. If you are unsure about which version
+you are using run: `pip show pytest`.
+
+Text:
+
+[pytest] pytest x.y, 2004
+Krekel et al., https://github.com/pytest-dev/pytest
+
+BibTeX:
+
+@misc{pytestx.y,
+ title = {pytest x.y},
+ author = {Krekel, Holger and Oliveira, Bruno and Pfannschmidt, Ronny and Bruynooghe, Floris and Laugher, Brianna and Bruhin, Florian},
+ year = {2004},
+ url = {https://github.com/pytest-dev/pytest},
+}
diff --git a/testing/web-platform/tests/tools/third_party/pytest/CODE_OF_CONDUCT.md b/testing/web-platform/tests/tools/third_party/pytest/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..f0ca304be4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/CODE_OF_CONDUCT.md
@@ -0,0 +1,83 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, sex characteristics, gender identity and expression,
+level of experience, education, socio-economic status, nationality, personal
+appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at coc@pytest.org. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+The coc@pytest.org address is routed to the following people who can also be
+contacted individually:
+
+- Brianna Laugher ([@pfctdayelise](https://github.com/pfctdayelise)): brianna@laugher.id.au
+- Bruno Oliveira ([@nicoddemus](https://github.com/nicoddemus)): nicoddemus@gmail.com
+- Florian Bruhin ([@the-compiler](https://github.com/the-compiler)): pytest@the-compiler.org
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
+
+[homepage]: https://www.contributor-covenant.org
+
+For answers to common questions about this code of conduct, see
+https://www.contributor-covenant.org/faq
diff --git a/testing/web-platform/tests/tools/third_party/pytest/CONTRIBUTING.rst b/testing/web-platform/tests/tools/third_party/pytest/CONTRIBUTING.rst
new file mode 100644
index 0000000000..24bca723c8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/CONTRIBUTING.rst
@@ -0,0 +1,481 @@
+============================
+Contribution getting started
+============================
+
+Contributions are highly welcomed and appreciated. Every little bit of help counts,
+so do not hesitate!
+
+.. contents::
+ :depth: 2
+ :backlinks: none
+
+
+.. _submitfeedback:
+
+Feature requests and feedback
+-----------------------------
+
+Do you like pytest? Share some love on Twitter or in your blog posts!
+
+We'd also like to hear about your propositions and suggestions. Feel free to
+`submit them as issues <https://github.com/pytest-dev/pytest/issues>`_ and:
+
+* Explain in detail how they should work.
+* Keep the scope as narrow as possible. This will make it easier to implement.
+
+
+.. _reportbugs:
+
+Report bugs
+-----------
+
+Report bugs for pytest in the `issue tracker <https://github.com/pytest-dev/pytest/issues>`_.
+
+If you are reporting a bug, please include:
+
+* Your operating system name and version.
+* Any details about your local setup that might be helpful in troubleshooting,
+ specifically the Python interpreter version, installed libraries, and pytest
+ version.
+* Detailed steps to reproduce the bug.
+
+If you can write a demonstration test that currently fails but should pass
+(xfail), that is a very useful commit to make as well, even if you cannot
+fix the bug itself.
+
+
+.. _fixbugs:
+
+Fix bugs
+--------
+
+Look through the `GitHub issues for bugs <https://github.com/pytest-dev/pytest/labels/type:%20bug>`_.
+
+:ref:`Talk <contact>` to developers to find out how you can fix specific bugs. To indicate that you are going
+to work on a particular issue, add a comment to that effect on the specific issue.
+
+Don't forget to check the issue trackers of your favourite plugins, too!
+
+.. _writeplugins:
+
+Implement features
+------------------
+
+Look through the `GitHub issues for enhancements <https://github.com/pytest-dev/pytest/labels/type:%20enhancement>`_.
+
+:ref:`Talk <contact>` to developers to find out how you can implement specific
+features.
+
+Write documentation
+-------------------
+
+Pytest could always use more documentation. What exactly is needed?
+
+* More complementary documentation. Have you perhaps found something unclear?
+* Documentation translations. We currently have only English.
+* Docstrings. There can never be too many of them.
+* Blog posts, articles and such -- they're all very appreciated.
+
+You can also edit documentation files directly in the GitHub web interface,
+without using a local copy. This can be convenient for small fixes.
+
+.. note::
+ Build the documentation locally with the following command:
+
+ .. code:: bash
+
+ $ tox -e docs
+
+ The built documentation should be available in ``doc/en/_build/html``,
+ where 'en' refers to the documentation language.
+
+Pytest has an API reference which in large part is
+`generated automatically <https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html>`_
+from the docstrings of the documented items. Pytest uses the
+`Sphinx docstring format <https://sphinx-rtd-tutorial.readthedocs.io/en/latest/docstrings.html>`_.
+For example:
+
+.. code-block:: python
+
+ def my_function(arg: ArgType) -> Foo:
+ """Do important stuff.
+
+ More detailed info here, in separate paragraphs from the subject line.
+ Use proper sentences -- start sentences with capital letters and end
+ with periods.
+
+ Can include annotated documentation:
+
+ :param short_arg: An argument which determines stuff.
+ :param long_arg:
+ A long explanation which spans multiple lines, overflows
+ like this.
+ :returns: The result.
+ :raises ValueError:
+ Detailed information when this can happen.
+
+ .. versionadded:: 6.0
+
+ Including types into the annotations above is not necessary when
+ type-hinting is being used (as in this example).
+ """
+
+
+.. _submitplugin:
+
+Submitting Plugins to pytest-dev
+--------------------------------
+
+Pytest development of the core, some plugins and support code happens
+in repositories living under the ``pytest-dev`` organisations:
+
+- `pytest-dev on GitHub <https://github.com/pytest-dev>`_
+
+All pytest-dev Contributors team members have write access to all contained
+repositories. Pytest core and plugins are generally developed
+using `pull requests`_ to respective repositories.
+
+The objectives of the ``pytest-dev`` organisation are:
+
+* Having a central location for popular pytest plugins
+* Sharing some of the maintenance responsibility (in case a maintainer no
+ longer wishes to maintain a plugin)
+
+You can submit your plugin by subscribing to the `pytest-dev mail list
+<https://mail.python.org/mailman/listinfo/pytest-dev>`_ and writing a
+mail pointing to your existing pytest plugin repository which must have
+the following:
+
+- PyPI presence with packaging metadata that contains a ``pytest-``
+ prefixed name, version number, authors, short and long description.
+
+- a `tox configuration <https://tox.readthedocs.io/en/latest/config.html#configuration-discovery>`_
+ for running tests using `tox <https://tox.readthedocs.io>`_.
+
+- a ``README`` describing how to use the plugin and on which
+ platforms it runs.
+
+- a ``LICENSE`` file containing the licensing information, with
+ matching info in its packaging metadata.
+
+- an issue tracker for bug reports and enhancement requests.
+
+- a `changelog <https://keepachangelog.com/>`_.
+
+If no contributor strongly objects and two agree, the repository can then be
+transferred to the ``pytest-dev`` organisation.
+
+Here's a rundown of how a repository transfer usually proceeds
+(using a repository named ``joedoe/pytest-xyz`` as example):
+
+* ``joedoe`` transfers repository ownership to ``pytest-dev`` administrator ``calvin``.
+* ``calvin`` creates ``pytest-xyz-admin`` and ``pytest-xyz-developers`` teams, inviting ``joedoe`` to both as **maintainer**.
+* ``calvin`` transfers repository to ``pytest-dev`` and configures team access:
+
+ - ``pytest-xyz-admin`` **admin** access;
+ - ``pytest-xyz-developers`` **write** access;
+
+The ``pytest-dev/Contributors`` team has write access to all projects, and
+every project administrator is in it. We recommend that each plugin has at least three
+people who have the right to release to PyPI.
+
+Repository owners can rest assured that no ``pytest-dev`` administrator will ever make
+releases of your repository or take ownership in any way, except in rare cases
+where someone becomes unresponsive after months of contact attempts.
+As stated, the objective is to share maintenance and avoid "plugin-abandon".
+
+
+.. _`pull requests`:
+.. _pull-requests:
+
+Preparing Pull Requests
+-----------------------
+
+Short version
+~~~~~~~~~~~~~
+
+#. Fork the repository.
+#. Enable and install `pre-commit <https://pre-commit.com>`_ to ensure style-guides and code checks are followed.
+#. Follow **PEP-8** for naming and `black <https://github.com/psf/black>`_ for formatting.
+#. Tests are run using ``tox``::
+
+ tox -e linting,py37
+
+ The test environments above are usually enough to cover most cases locally.
+
+#. Write a ``changelog`` entry: ``changelog/2574.bugfix.rst``, use issue id number
+ and one of ``feature``, ``improvement``, ``bugfix``, ``doc``, ``deprecation``,
+ ``breaking``, ``vendor`` or ``trivial`` for the issue type.
+
+
+#. Unless your change is a trivial or a documentation fix (e.g., a typo or reword of a small section) please
+ add yourself to the ``AUTHORS`` file, in alphabetical order.
+
+
+Long version
+~~~~~~~~~~~~
+
+What is a "pull request"? It informs the project's core developers about the
+changes you want to review and merge. Pull requests are stored on
+`GitHub servers <https://github.com/pytest-dev/pytest/pulls>`_.
+Once you send a pull request, we can discuss its potential modifications and
+even add more commits to it later on. There's an excellent tutorial on how Pull
+Requests work in the
+`GitHub Help Center <https://help.github.com/articles/using-pull-requests/>`_.
+
+Here is a simple overview, with pytest-specific bits:
+
+#. Fork the
+ `pytest GitHub repository <https://github.com/pytest-dev/pytest>`__. It's
+ fine to use ``pytest`` as your fork repository name because it will live
+ under your user.
+
+#. Clone your fork locally using `git <https://git-scm.com/>`_ and create a branch::
+
+ $ git clone git@github.com:YOUR_GITHUB_USERNAME/pytest.git
+ $ cd pytest
+ # now, create your own branch off "main":
+
+ $ git checkout -b your-bugfix-branch-name main
+
+ Given we have "major.minor.micro" version numbers, bug fixes will usually
+ be released in micro releases whereas features will be released in
+ minor releases and incompatible changes in major releases.
+
+ If you need some help with Git, follow this quick start
+ guide: https://git.wiki.kernel.org/index.php/QuickStart
+
+#. Install `pre-commit <https://pre-commit.com>`_ and its hook on the pytest repo::
+
+ $ pip install --user pre-commit
+ $ pre-commit install
+
+ Afterwards ``pre-commit`` will run whenever you commit.
+
+ https://pre-commit.com/ is a framework for managing and maintaining multi-language pre-commit hooks
+ to ensure code-style and code formatting is consistent.
+
+#. Install tox
+
+ Tox is used to run all the tests and will automatically setup virtualenvs
+ to run the tests in.
+ (will implicitly use https://virtualenv.pypa.io/en/latest/)::
+
+ $ pip install tox
+
+#. Run all the tests
+
+ You need to have Python 3.7 available in your system. Now
+ running tests is as simple as issuing this command::
+
+ $ tox -e linting,py37
+
+ This command will run tests via the "tox" tool against Python 3.7
+ and also perform "lint" coding-style checks.
+
+#. You can now edit your local working copy and run the tests again as necessary. Please follow PEP-8 for naming.
+
+ You can pass different options to ``tox``. For example, to run tests on Python 3.7 and pass options to pytest
+ (e.g. enter pdb on failure) to pytest you can do::
+
+ $ tox -e py37 -- --pdb
+
+ Or to only run tests in a particular test module on Python 3.7::
+
+ $ tox -e py37 -- testing/test_config.py
+
+
+ When committing, ``pre-commit`` will re-format the files if necessary.
+
+#. If instead of using ``tox`` you prefer to run the tests directly, then we suggest to create a virtual environment and use
+ an editable install with the ``testing`` extra::
+
+ $ python3 -m venv .venv
+ $ source .venv/bin/activate # Linux
+ $ .venv/Scripts/activate.bat # Windows
+ $ pip install -e ".[testing]"
+
+ Afterwards, you can edit the files and run pytest normally::
+
+ $ pytest testing/test_config.py
+
+#. Create a new changelog entry in ``changelog``. The file should be named ``<issueid>.<type>.rst``,
+ where *issueid* is the number of the issue related to the change and *type* is one of
+ ``feature``, ``improvement``, ``bugfix``, ``doc``, ``deprecation``, ``breaking``, ``vendor``
+ or ``trivial``. You may skip creating the changelog entry if the change doesn't affect the
+ documented behaviour of pytest.
+
+#. Add yourself to ``AUTHORS`` file if not there yet, in alphabetical order.
+
+#. Commit and push once your tests pass and you are happy with your change(s)::
+
+ $ git commit -a -m "<commit message>"
+ $ git push -u
+
+#. Finally, submit a pull request through the GitHub website using this data::
+
+ head-fork: YOUR_GITHUB_USERNAME/pytest
+ compare: your-branch-name
+
+ base-fork: pytest-dev/pytest
+ base: main
+
+
+Writing Tests
+~~~~~~~~~~~~~
+
+Writing tests for plugins or for pytest itself is often done using the `pytester fixture <https://docs.pytest.org/en/stable/reference/reference.html#pytester>`_, as a "black-box" test.
+
+For example, to ensure a simple test passes you can write:
+
+.. code-block:: python
+
+ def test_true_assertion(pytester):
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert True
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(failed=0, passed=1)
+
+
+Alternatively, it is possible to make checks based on the actual output of the termal using
+*glob-like* expressions:
+
+.. code-block:: python
+
+ def test_true_assertion(pytester):
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert False
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*assert False*", "*1 failed*"])
+
+When choosing a file where to write a new test, take a look at the existing files and see if there's
+one file which looks like a good fit. For example, a regression test about a bug in the ``--lf`` option
+should go into ``test_cacheprovider.py``, given that this option is implemented in ``cacheprovider.py``.
+If in doubt, go ahead and open a PR with your best guess and we can discuss this over the code.
+
+Joining the Development Team
+----------------------------
+
+Anyone who has successfully seen through a pull request which did not
+require any extra work from the development team to merge will
+themselves gain commit access if they so wish (if we forget to ask please send a friendly
+reminder). This does not mean there is any change in your contribution workflow:
+everyone goes through the same pull-request-and-review process and
+no-one merges their own pull requests unless already approved. It does however mean you can
+participate in the development process more fully since you can merge
+pull requests from other contributors yourself after having reviewed
+them.
+
+
+Backporting bug fixes for the next patch release
+------------------------------------------------
+
+Pytest makes feature release every few weeks or months. In between, patch releases
+are made to the previous feature release, containing bug fixes only. The bug fixes
+usually fix regressions, but may be any change that should reach users before the
+next feature release.
+
+Suppose for example that the latest release was 1.2.3, and you want to include
+a bug fix in 1.2.4 (check https://github.com/pytest-dev/pytest/releases for the
+actual latest release). The procedure for this is:
+
+#. First, make sure the bug is fixed the ``main`` branch, with a regular pull
+ request, as described above. An exception to this is if the bug fix is not
+ applicable to ``main`` anymore.
+
+#. ``git checkout origin/1.2.x -b backport-XXXX`` # use the main PR number here
+
+#. Locate the merge commit on the PR, in the *merged* message, for example:
+
+ nicoddemus merged commit 0f8b462 into pytest-dev:main
+
+#. ``git cherry-pick -x -m1 REVISION`` # use the revision you found above (``0f8b462``).
+
+#. Open a PR targeting ``1.2.x``:
+
+ * Prefix the message with ``[1.2.x]``.
+ * Delete the PR body, it usually contains a duplicate commit message.
+
+
+Who does the backporting
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+As mentioned above, bugs should first be fixed on ``main`` (except in rare occasions
+that a bug only happens in a previous release). So, who should do the backport procedure described
+above?
+
+1. If the bug was fixed by a core developer, it is the main responsibility of that core developer
+ to do the backport.
+2. However, often the merge is done by another maintainer, in which case it is nice of them to
+ do the backport procedure if they have the time.
+3. For bugs submitted by non-maintainers, it is expected that a core developer will to do
+ the backport, normally the one that merged the PR on ``main``.
+4. If a non-maintainers notices a bug which is fixed on ``main`` but has not been backported
+ (due to maintainers forgetting to apply the *needs backport* label, or just plain missing it),
+ they are also welcome to open a PR with the backport. The procedure is simple and really
+ helps with the maintenance of the project.
+
+All the above are not rules, but merely some guidelines/suggestions on what we should expect
+about backports.
+
+Handling stale issues/PRs
+-------------------------
+
+Stale issues/PRs are those where pytest contributors have asked for questions/changes
+and the authors didn't get around to answer/implement them yet after a somewhat long time, or
+the discussion simply died because people seemed to lose interest.
+
+There are many reasons why people don't answer questions or implement requested changes:
+they might get busy, lose interest, or just forget about it,
+but the fact is that this is very common in open source software.
+
+The pytest team really appreciates every issue and pull request, but being a high-volume project
+with many issues and pull requests being submitted daily, we try to reduce the number of stale
+issues and PRs by regularly closing them. When an issue/pull request is closed in this manner,
+it is by no means a dismissal of the topic being tackled by the issue/pull request, but it
+is just a way for us to clear up the queue and make the maintainers' work more manageable. Submitters
+can always reopen the issue/pull request in their own time later if it makes sense.
+
+When to close
+~~~~~~~~~~~~~
+
+Here are a few general rules the maintainers use deciding when to close issues/PRs because
+of lack of inactivity:
+
+* Issues labeled ``question`` or ``needs information``: closed after 14 days inactive.
+* Issues labeled ``proposal``: closed after six months inactive.
+* Pull requests: after one month, consider pinging the author, update linked issue, or consider closing. For pull requests which are nearly finished, the team should consider finishing it up and merging it.
+
+The above are **not hard rules**, but merely **guidelines**, and can be (and often are!) reviewed on a case-by-case basis.
+
+Closing pull requests
+~~~~~~~~~~~~~~~~~~~~~
+
+When closing a Pull Request, it needs to be acknowledging the time, effort, and interest demonstrated by the person which submitted it. As mentioned previously, it is not the intent of the team to dismiss a stalled pull request entirely but to merely to clear up our queue, so a message like the one below is warranted when closing a pull request that went stale:
+
+ Hi <contributor>,
+
+ First of all, we would like to thank you for your time and effort on working on this, the pytest team deeply appreciates it.
+
+ We noticed it has been awhile since you have updated this PR, however. pytest is a high activity project, with many issues/PRs being opened daily, so it is hard for us maintainers to track which PRs are ready for merging, for review, or need more attention.
+
+ So for those reasons we, think it is best to close the PR for now, but with the only intention to clean up our queue, it is by no means a rejection of your changes. We still encourage you to re-open this PR (it is just a click of a button away) when you are ready to get back to it.
+
+ Again we appreciate your time for working on this, and hope you might get back to this at a later time!
+
+ <bye>
+
+Closing Issues
+--------------
+
+When a pull request is submitted to fix an issue, add text like ``closes #XYZW`` to the PR description and/or commits (where ``XYZW`` is the issue number). See the `GitHub docs <https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword>`_ for more information.
+
+When an issue is due to user error (e.g. misunderstanding of a functionality), please politely explain to the user why the issue raised is really a non-issue and ask them to close the issue if they have no further questions. If the original requestor is unresponsive, the issue will be handled as described in the section `Handling stale issues/PRs`_ above.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/LICENSE b/testing/web-platform/tests/tools/third_party/pytest/LICENSE
new file mode 100644
index 0000000000..c3f1657fce
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2004 Holger Krekel and others
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/OPENCOLLECTIVE.rst b/testing/web-platform/tests/tools/third_party/pytest/OPENCOLLECTIVE.rst
new file mode 100644
index 0000000000..8c1c90281e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/OPENCOLLECTIVE.rst
@@ -0,0 +1,44 @@
+==============
+OpenCollective
+==============
+
+pytest has a collective setup at `OpenCollective`_. This document describes how the core team manages
+OpenCollective-related activities.
+
+What is it
+==========
+
+Open Collective is an online funding platform for open and transparent communities.
+It provides tools to raise money and share your finances in full transparency.
+
+It is the platform of choice for individuals and companies that want to make one-time or
+monthly donations directly to the project.
+
+Funds
+=====
+
+The OpenCollective funds donated to pytest will be used to fund overall maintenance,
+local sprints, merchandising (stickers to distribute in conferences for example), and future
+gatherings of pytest developers (sprints).
+
+`Core contributors`_ which are contributing on a continuous basis are free to submit invoices
+to bill maintenance hours using the platform. How much each contributor should request is still an
+open question, but we should use common sense and trust in the contributors, most of which know
+themselves in-person. A good rule of thumb is to bill the same amount as monthly payments
+contributors which participate in the `Tidelift`_ subscription. If in doubt, just ask.
+
+Admins
+======
+
+A few people have admin access to the OpenCollective dashboard to make changes. Those people
+are part of the `@pytest-dev/opencollective-admins`_ team.
+
+`Core contributors`_ interested in helping out with OpenCollective maintenance are welcome! We don't
+expect much work here other than the occasional approval of expenses from other core contributors.
+Just drop a line to one of the `@pytest-dev/opencollective-admins`_ or use the mailing list.
+
+
+.. _`OpenCollective`: https://opencollective.com/pytest
+.. _`Tidelift`: https://tidelift.com
+.. _`core contributors`: https://github.com/orgs/pytest-dev/teams/core/members
+.. _`@pytest-dev/opencollective-admins`: https://github.com/orgs/pytest-dev/teams/opencollective-admins/members
diff --git a/testing/web-platform/tests/tools/third_party/pytest/README.rst b/testing/web-platform/tests/tools/third_party/pytest/README.rst
new file mode 100644
index 0000000000..1473376517
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/README.rst
@@ -0,0 +1,167 @@
+.. image:: https://github.com/pytest-dev/pytest/raw/main/doc/en/img/pytest_logo_curves.svg
+ :target: https://docs.pytest.org/en/stable/
+ :align: center
+ :height: 200
+ :alt: pytest
+
+
+------
+
+.. image:: https://img.shields.io/pypi/v/pytest.svg
+ :target: https://pypi.org/project/pytest/
+
+.. image:: https://img.shields.io/conda/vn/conda-forge/pytest.svg
+ :target: https://anaconda.org/conda-forge/pytest
+
+.. image:: https://img.shields.io/pypi/pyversions/pytest.svg
+ :target: https://pypi.org/project/pytest/
+
+.. image:: https://codecov.io/gh/pytest-dev/pytest/branch/main/graph/badge.svg
+ :target: https://codecov.io/gh/pytest-dev/pytest
+ :alt: Code coverage Status
+
+.. image:: https://github.com/pytest-dev/pytest/workflows/main/badge.svg
+ :target: https://github.com/pytest-dev/pytest/actions?query=workflow%3Amain
+
+.. image:: https://results.pre-commit.ci/badge/github/pytest-dev/pytest/main.svg
+ :target: https://results.pre-commit.ci/latest/github/pytest-dev/pytest/main
+ :alt: pre-commit.ci status
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+
+.. image:: https://www.codetriage.com/pytest-dev/pytest/badges/users.svg
+ :target: https://www.codetriage.com/pytest-dev/pytest
+
+.. image:: https://readthedocs.org/projects/pytest/badge/?version=latest
+ :target: https://pytest.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation Status
+
+.. image:: https://img.shields.io/badge/Discord-pytest--dev-blue
+ :target: https://discord.com/invite/pytest-dev
+ :alt: Discord
+
+.. image:: https://img.shields.io/badge/Libera%20chat-%23pytest-orange
+ :target: https://web.libera.chat/#pytest
+ :alt: Libera chat
+
+
+The ``pytest`` framework makes it easy to write small tests, yet
+scales to support complex functional testing for applications and libraries.
+
+An example of a simple test:
+
+.. code-block:: python
+
+ # content of test_sample.py
+ def inc(x):
+ return x + 1
+
+
+ def test_answer():
+ assert inc(3) == 5
+
+
+To execute it::
+
+ $ pytest
+ ============================= test session starts =============================
+ collected 1 items
+
+ test_sample.py F
+
+ ================================== FAILURES ===================================
+ _________________________________ test_answer _________________________________
+
+ def test_answer():
+ > assert inc(3) == 5
+ E assert 4 == 5
+ E + where 4 = inc(3)
+
+ test_sample.py:5: AssertionError
+ ========================== 1 failed in 0.04 seconds ===========================
+
+
+Due to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started <https://docs.pytest.org/en/stable/getting-started.html#our-first-test-run>`_ for more examples.
+
+
+Features
+--------
+
+- Detailed info on failing `assert statements <https://docs.pytest.org/en/stable/how-to/assert.html>`_ (no need to remember ``self.assert*`` names)
+
+- `Auto-discovery
+ <https://docs.pytest.org/en/stable/explanation/goodpractices.html#python-test-discovery>`_
+ of test modules and functions
+
+- `Modular fixtures <https://docs.pytest.org/en/stable/explanation/fixtures.html>`_ for
+ managing small or parametrized long-lived test resources
+
+- Can run `unittest <https://docs.pytest.org/en/stable/how-to/unittest.html>`_ (or trial),
+ `nose <https://docs.pytest.org/en/stable/how-to/nose.html>`_ test suites out of the box
+
+- Python 3.6+ and PyPy3
+
+- Rich plugin architecture, with over 850+ `external plugins <https://docs.pytest.org/en/latest/reference/plugin_list.html>`_ and thriving community
+
+
+Documentation
+-------------
+
+For full documentation, including installation, tutorials and PDF documents, please see https://docs.pytest.org/en/stable/.
+
+
+Bugs/Requests
+-------------
+
+Please use the `GitHub issue tracker <https://github.com/pytest-dev/pytest/issues>`_ to submit bugs or request features.
+
+
+Changelog
+---------
+
+Consult the `Changelog <https://docs.pytest.org/en/stable/changelog.html>`__ page for fixes and enhancements of each version.
+
+
+Support pytest
+--------------
+
+`Open Collective`_ is an online funding platform for open and transparent communities.
+It provides tools to raise money and share your finances in full transparency.
+
+It is the platform of choice for individuals and companies that want to make one-time or
+monthly donations directly to the project.
+
+See more details in the `pytest collective`_.
+
+.. _Open Collective: https://opencollective.com
+.. _pytest collective: https://opencollective.com/pytest
+
+
+pytest for enterprise
+---------------------
+
+Available as part of the Tidelift Subscription.
+
+The maintainers of pytest and thousands of other packages are working with Tidelift to deliver commercial support and
+maintenance for the open source dependencies you use to build your applications.
+Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use.
+
+`Learn more. <https://tidelift.com/subscription/pkg/pypi-pytest?utm_source=pypi-pytest&utm_medium=referral&utm_campaign=enterprise&utm_term=repo>`_
+
+Security
+^^^^^^^^
+
+pytest has never been associated with a security vulnerability, but in any case, to report a
+security vulnerability please use the `Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure.
+
+
+License
+-------
+
+Copyright Holger Krekel and others, 2004.
+
+Distributed under the terms of the `MIT`_ license, pytest is free and open source software.
+
+.. _`MIT`: https://github.com/pytest-dev/pytest/blob/main/LICENSE
diff --git a/testing/web-platform/tests/tools/third_party/pytest/RELEASING.rst b/testing/web-platform/tests/tools/third_party/pytest/RELEASING.rst
new file mode 100644
index 0000000000..25ce90d0f6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/RELEASING.rst
@@ -0,0 +1,173 @@
+Release Procedure
+-----------------
+
+Our current policy for releasing is to aim for a bug-fix release every few weeks and a minor release every 2-3 months. The idea
+is to get fixes and new features out instead of trying to cram a ton of features into a release and by consequence
+taking a lot of time to make a new one.
+
+The git commands assume the following remotes are setup:
+
+* ``origin``: your own fork of the repository.
+* ``upstream``: the ``pytest-dev/pytest`` official repository.
+
+Preparing: Automatic Method
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+We have developed an automated workflow for releases, that uses GitHub workflows and is triggered
+by `manually running <https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow>`__
+the `prepare-release-pr workflow <https://github.com/pytest-dev/pytest/actions/workflows/prepare-release-pr.yml>`__
+on GitHub Actions.
+
+The automation will decide the new version number based on the following criteria:
+
+- If the "major release" input is set to "yes", release a new major release
+ (e.g. 7.0.0 -> 8.0.0)
+- If there are any ``.feature.rst`` or ``.breaking.rst`` files in the
+ ``changelog`` directory, release a new minor release (e.g. 7.0.0 -> 7.1.0)
+- Otherwise, release a bugfix release (e.g. 7.0.0 -> 7.0.1)
+- If the "prerelease" input is set, append the string to the version number
+ (e.g. 7.0.0 -> 8.0.0rc1), if "major" is set, and "prerelease" is set to `rc1`)
+
+Bug-fix and minor releases
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Bug-fix and minor releases are always done from a maintenance branch. First,
+consider double-checking the ``changelog`` directory to see if there are any
+breaking changes or new features.
+
+For a new minor release, first create a new maintenance branch from ``main``::
+
+ git fetch --all
+ git branch 7.1.x upstream/main
+ git push upstream 7.1.x
+
+Then, trigger the workflow with the following inputs:
+
+- branch: **7.1.x**
+- major release: **no**
+- prerelease: empty
+
+Or via the commandline using `GitHub's cli <https://github.com/cli/cli>`__::
+
+ gh workflow run prepare-release-pr.yml -f branch=7.1.x -f major=no -f prerelease=
+
+Where ``7.1.x`` is the maintenance branch for the ``7.1`` series. The automated
+workflow will publish a PR for a branch ``release-7.1.0``.
+
+Similarly, for a bug-fix release, use the existing maintenance branch and
+trigger the workflow with e.g. ``branch: 7.0.x`` to get a new ``release-7.0.1``
+PR.
+
+Major releases
+^^^^^^^^^^^^^^
+
+1. Create a new maintenance branch from ``main``::
+
+ git fetch --all
+ git branch 8.0.x upstream/main
+ git push upstream 8.0.x
+
+2. Trigger the workflow with the following inputs:
+
+ - branch: **8.0.x**
+ - major release: **yes**
+ - prerelease: empty
+
+Or via the commandline::
+
+ gh workflow run prepare-release-pr.yml -f branch=8.0.x -f major=yes -f prerelease=
+
+The automated workflow will publish a PR for a branch ``release-8.0.0``.
+
+At this point on, this follows the same workflow as other maintenance branches: bug-fixes are merged
+into ``main`` and ported back to the maintenance branch, even for release candidates.
+
+Release candidates
+^^^^^^^^^^^^^^^^^^
+
+To release a release candidate, set the "prerelease" input to the version number
+suffix to use. To release a ``8.0.0rc1``, proceed like under "major releases", but set:
+
+- branch: 8.0.x
+- major release: yes
+- prerelease: **rc1**
+
+Or via the commandline::
+
+ gh workflow run prepare-release-pr.yml -f branch=8.0.x -f major=yes -f prerelease=rc1
+
+The automated workflow will publish a PR for a branch ``release-8.0.0rc1``.
+
+**A note about release candidates**
+
+During release candidates we can merge small improvements into
+the maintenance branch before releasing the final major version, however we must take care
+to avoid introducing big changes at this stage.
+
+Preparing: Manual Method
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+**Important**: pytest releases must be prepared on **Linux** because the docs and examples expect
+to be executed on that platform.
+
+To release a version ``MAJOR.MINOR.PATCH``, follow these steps:
+
+#. For major and minor releases, create a new branch ``MAJOR.MINOR.x`` from
+ ``upstream/main`` and push it to ``upstream``.
+
+#. Create a branch ``release-MAJOR.MINOR.PATCH`` from the ``MAJOR.MINOR.x`` branch.
+
+ Ensure your are updated and in a clean working tree.
+
+#. Using ``tox``, generate docs, changelog, announcements::
+
+ $ tox -e release -- MAJOR.MINOR.PATCH
+
+ This will generate a commit with all the changes ready for pushing.
+
+#. Open a PR for the ``release-MAJOR.MINOR.PATCH`` branch targeting ``MAJOR.MINOR.x``.
+
+
+Releasing
+~~~~~~~~~
+
+Both automatic and manual processes described above follow the same steps from this point onward.
+
+#. After all tests pass and the PR has been approved, tag the release commit
+ in the ``release-MAJOR.MINOR.PATCH`` branch and push it. This will publish to PyPI::
+
+ git fetch --all
+ git tag MAJOR.MINOR.PATCH upstream/release-MAJOR.MINOR.PATCH
+ git push git@github.com:pytest-dev/pytest.git MAJOR.MINOR.PATCH
+
+ Wait for the deploy to complete, then make sure it is `available on PyPI <https://pypi.org/project/pytest>`_.
+
+#. Merge the PR.
+
+#. Cherry-pick the CHANGELOG / announce files to the ``main`` branch::
+
+ git fetch --all --prune
+ git checkout upstream/main -b cherry-pick-release
+ git cherry-pick -x -m1 upstream/MAJOR.MINOR.x
+
+#. Open a PR for ``cherry-pick-release`` and merge it once CI passes. No need to wait for approvals if there were no conflicts on the previous step.
+
+#. For major and minor releases, tag the release cherry-pick merge commit in main with
+ a dev tag for the next feature release::
+
+ git checkout main
+ git pull
+ git tag MAJOR.{MINOR+1}.0.dev0
+ git push git@github.com:pytest-dev/pytest.git MAJOR.{MINOR+1}.0.dev0
+
+#. Send an email announcement with the contents from::
+
+ doc/en/announce/release-<VERSION>.rst
+
+ To the following mailing lists:
+
+ * pytest-dev@python.org (all releases)
+ * python-announce-list@python.org (all releases)
+ * testing-in-python@lists.idyll.org (only major/minor releases)
+
+ And announce it on `Twitter <https://twitter.com/>`_ with the ``#pytest`` hashtag.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/TIDELIFT.rst b/testing/web-platform/tests/tools/third_party/pytest/TIDELIFT.rst
new file mode 100644
index 0000000000..2fe25841c3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/TIDELIFT.rst
@@ -0,0 +1,60 @@
+========
+Tidelift
+========
+
+pytest is a member of `Tidelift`_. This document describes how the core team manages
+Tidelift-related activities.
+
+What is it
+==========
+
+Tidelift aims to make Open Source sustainable by offering subscriptions to companies which rely
+on Open Source packages. This subscription allows it to pay maintainers of those Open Source
+packages to aid sustainability of the work.
+
+It is the perfect platform for companies that want to support Open Source packages and at the same
+time obtain assurances regarding maintenance, quality and security.
+
+Funds
+=====
+
+It was decided in the `mailing list`_ that the Tidelift contribution will be split evenly between
+members of the `contributors team`_ interested in receiving funding.
+
+The current list of contributors receiving funding are:
+
+* `@asottile`_
+* `@nicoddemus`_
+* `@The-Compiler`_
+
+Contributors interested in receiving a part of the funds just need to submit a PR adding their
+name to the list. Contributors that want to stop receiving the funds should also submit a PR
+in the same way.
+
+The PR should mention `@pytest-dev/tidelift-admins`_ so appropriate changes
+can be made in the Tidelift platform.
+
+After the PR has been accepted and merged, the contributor should register in the `Tidelift`_
+platform and follow the instructions there, including signing an `agreement`_.
+
+Admins
+======
+
+A few people have admin access to the Tidelift dashboard to make changes. Those people
+are part of the `@pytest-dev/tidelift-admins`_ team.
+
+`Core contributors`_ interested in helping out with Tidelift maintenance are welcome! We don't
+expect much work here other than the occasional adding/removal of a contributor from receiving
+funds. Just drop a line to one of the `@pytest-dev/tidelift-admins`_ or use the mailing list.
+
+
+.. _`Tidelift`: https://tidelift.com
+.. _`mailing list`: https://mail.python.org/pipermail/pytest-dev/2019-May/004716.html
+.. _`contributors team`: https://github.com/orgs/pytest-dev/teams/contributors
+.. _`core contributors`: https://github.com/orgs/pytest-dev/teams/core/members
+.. _`@pytest-dev/tidelift-admins`: https://github.com/orgs/pytest-dev/teams/tidelift-admins/members
+.. _`agreement`: https://tidelift.com/docs/lifting/agreement
+
+.. _`@asottile`: https://github.com/asottile
+.. _`@nicoddemus`: https://github.com/nicoddemus
+.. _`@The-Compiler`: https://github.com/The-Compiler
diff --git a/testing/web-platform/tests/tools/third_party/pytest/bench/bench.py b/testing/web-platform/tests/tools/third_party/pytest/bench/bench.py
new file mode 100644
index 0000000000..c40fc8636c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/bench/bench.py
@@ -0,0 +1,13 @@
+import sys
+
+if __name__ == "__main__":
+ import cProfile
+ import pytest # NOQA
+ import pstats
+
+ script = sys.argv[1:] if len(sys.argv) > 1 else ["empty.py"]
+ cProfile.run("pytest.cmdline.main(%r)" % script, "prof")
+ p = pstats.Stats("prof")
+ p.strip_dirs()
+ p.sort_stats("cumulative")
+ print(p.print_stats(500))
diff --git a/testing/web-platform/tests/tools/third_party/pytest/bench/bench_argcomplete.py b/testing/web-platform/tests/tools/third_party/pytest/bench/bench_argcomplete.py
new file mode 100644
index 0000000000..335733df72
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/bench/bench_argcomplete.py
@@ -0,0 +1,19 @@
+# 10000 iterations, just for relative comparison
+# 2.7.5 3.3.2
+# FilesCompleter 75.1109 69.2116
+# FastFilesCompleter 0.7383 1.0760
+import timeit
+
+imports = [
+ "from argcomplete.completers import FilesCompleter as completer",
+ "from _pytest._argcomplete import FastFilesCompleter as completer",
+]
+
+count = 1000 # only a few seconds
+setup = "%s\nfc = completer()"
+run = 'fc("/d")'
+
+
+if __name__ == "__main__":
+ print(timeit.timeit(run, setup=setup % imports[0], number=count))
+ print(timeit.timeit(run, setup=setup % imports[1], number=count))
diff --git a/testing/web-platform/tests/tools/third_party/pytest/bench/empty.py b/testing/web-platform/tests/tools/third_party/pytest/bench/empty.py
new file mode 100644
index 0000000000..4e7371b6f8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/bench/empty.py
@@ -0,0 +1,2 @@
+for i in range(1000):
+ exec("def test_func_%d(): pass" % i)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/bench/manyparam.py b/testing/web-platform/tests/tools/third_party/pytest/bench/manyparam.py
new file mode 100644
index 0000000000..1226c73bd9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/bench/manyparam.py
@@ -0,0 +1,14 @@
+import pytest
+
+
+@pytest.fixture(scope="module", params=range(966))
+def foo(request):
+ return request.param
+
+
+def test_it(foo):
+ pass
+
+
+def test_it2(foo):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/bench/skip.py b/testing/web-platform/tests/tools/third_party/pytest/bench/skip.py
new file mode 100644
index 0000000000..f0c9d1ddbe
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/bench/skip.py
@@ -0,0 +1,9 @@
+import pytest
+
+SKIP = True
+
+
+@pytest.mark.parametrize("x", range(5000))
+def test_foo(x):
+ if SKIP:
+ pytest.skip("heh")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/bench/unit_test.py b/testing/web-platform/tests/tools/third_party/pytest/bench/unit_test.py
new file mode 100644
index 0000000000..ad52069dbf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/bench/unit_test.py
@@ -0,0 +1,13 @@
+from unittest import TestCase # noqa: F401
+
+for i in range(15000):
+ exec(
+ f"""
+class Test{i}(TestCase):
+ @classmethod
+ def setUpClass(cls): pass
+ def test_1(self): pass
+ def test_2(self): pass
+ def test_3(self): pass
+"""
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/bench/xunit.py b/testing/web-platform/tests/tools/third_party/pytest/bench/xunit.py
new file mode 100644
index 0000000000..3a77dcdce4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/bench/xunit.py
@@ -0,0 +1,11 @@
+for i in range(5000):
+ exec(
+ f"""
+class Test{i}:
+ @classmethod
+ def setup_class(cls): pass
+ def test_1(self): pass
+ def test_2(self): pass
+ def test_3(self): pass
+"""
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/changelog/README.rst b/testing/web-platform/tests/tools/third_party/pytest/changelog/README.rst
new file mode 100644
index 0000000000..6d026f57ef
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/changelog/README.rst
@@ -0,0 +1,37 @@
+This directory contains "newsfragments" which are short files that contain a small **ReST**-formatted
+text that will be added to the next ``CHANGELOG``.
+
+The ``CHANGELOG`` will be read by **users**, so this description should be aimed to pytest users
+instead of describing internal changes which are only relevant to the developers.
+
+Make sure to use full sentences in the **past or present tense** and use punctuation, examples::
+
+ Improved verbose diff output with sequences.
+
+ Terminal summary statistics now use multiple colors.
+
+Each file should be named like ``<ISSUE>.<TYPE>.rst``, where
+``<ISSUE>`` is an issue number, and ``<TYPE>`` is one of:
+
+* ``feature``: new user facing features, like new command-line options and new behavior.
+* ``improvement``: improvement of existing functionality, usually without requiring user intervention (for example, new fields being written in ``--junitxml``, improved colors in terminal, etc).
+* ``bugfix``: fixes a bug.
+* ``doc``: documentation improvement, like rewording an entire session or adding missing docs.
+* ``deprecation``: feature deprecation.
+* ``breaking``: a change which may break existing suites, such as feature removal or behavior change.
+* ``vendor``: changes in packages vendored in pytest.
+* ``trivial``: fixing a small typo or internal change that might be noteworthy.
+
+So for example: ``123.feature.rst``, ``456.bugfix.rst``.
+
+If your PR fixes an issue, use that number here. If there is no issue,
+then after you submit the PR and get the PR number you can add a
+changelog using that instead.
+
+If you are not sure what issue type to use, don't hesitate to ask in your PR.
+
+``towncrier`` preserves multiple paragraphs and formatting (code blocks, lists, and so on), but for entries
+other than ``features`` it is usually better to stick to a single paragraph to keep it concise.
+
+You can also run ``tox -e docs`` to build the documentation
+with the draft changelog (``doc/en/_build/html/changelog.html``) if you want to get a preview of how your change will look in the final release notes.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/changelog/_template.rst b/testing/web-platform/tests/tools/third_party/pytest/changelog/_template.rst
new file mode 100644
index 0000000000..5de4ae97ea
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/changelog/_template.rst
@@ -0,0 +1,40 @@
+{% for section in sections %}
+{% set underline = "-" %}
+{% if section %}
+{{section}}
+{{ underline * section|length }}{% set underline = "~" %}
+
+{% endif %}
+{% if sections[section] %}
+{% for category, val in definitions.items() if category in sections[section] %}
+
+{{ definitions[category]['name'] }}
+{{ underline * definitions[category]['name']|length }}
+
+{% if definitions[category]['showcontent'] %}
+{% for text, values in sections[section][category]|dictsort(by='value') %}
+{% set issue_joiner = joiner(', ') %}
+- {% for value in values|sort %}{{ issue_joiner() }}`{{ value }} <https://github.com/pytest-dev/pytest/issues/{{ value[1:] }}>`_{% endfor %}: {{ text }}
+
+
+{% endfor %}
+{% else %}
+- {{ sections[section][category]['']|sort|join(', ') }}
+
+
+{% endif %}
+{% if sections[section][category]|length == 0 %}
+
+No significant changes.
+
+
+{% else %}
+{% endif %}
+{% endfor %}
+{% else %}
+
+No significant changes.
+
+
+{% endif %}
+{% endfor %}
diff --git a/testing/web-platform/tests/tools/third_party/pytest/codecov.yml b/testing/web-platform/tests/tools/third_party/pytest/codecov.yml
new file mode 100644
index 0000000000..f1cc869733
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/codecov.yml
@@ -0,0 +1,6 @@
+# reference: https://docs.codecov.io/docs/codecovyml-reference
+coverage:
+ status:
+ patch: true
+ project: false
+comment: false
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/Makefile b/testing/web-platform/tests/tools/third_party/pytest/doc/en/Makefile
new file mode 100644
index 0000000000..f2db689121
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/Makefile
@@ -0,0 +1,43 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?=
+SPHINXBUILD ?= sphinx-build
+SOURCEDIR = .
+BUILDDIR = _build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+
+REGENDOC_ARGS := \
+ --normalize "/[ \t]+\n/\n/" \
+ --normalize "~\$$REGENDOC_TMPDIR~/home/sweet/project~" \
+ --normalize "~/path/to/example~/home/sweet/project~" \
+ --normalize "/in \d.\d\ds/in 0.12s/" \
+ --normalize "@/tmp/pytest-of-.*/pytest-\d+@PYTEST_TMPDIR@" \
+ --normalize "@pytest-(\d+)\\.[^ ,]+@pytest-\1.x.y@" \
+ --normalize "@py-(\d+)\\.[^ ,]+@py-\1.x.y@" \
+ --normalize "@pluggy-(\d+)\\.[.\d,]+@pluggy-\1.x.y@" \
+ --normalize "@hypothesis-(\d+)\\.[.\d,]+@hypothesis-\1.x.y@" \
+ --normalize "@Python (\d+)\\.[^ ,]+@Python \1.x.y@"
+
+regen: REGENDOC_FILES:=*.rst */*.rst
+regen:
+# need to reset cachedir to the non-tox default
+ PYTHONDONTWRITEBYTECODE=1 \
+ PYTEST_ADDOPTS="-pno:hypothesis -p no:hypothesispytest -Wignore::pytest.PytestUnknownMarkWarning -o cache_dir=.pytest_cache" \
+ COLUMNS=76 \
+ regendoc --update ${REGENDOC_FILES} ${REGENDOC_ARGS}
+
+.PHONY: regen
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/globaltoc.html b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/globaltoc.html
new file mode 100644
index 0000000000..7c595e7ebf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/globaltoc.html
@@ -0,0 +1,34 @@
+<h3>Contents</h3>
+
+<ul>
+ <li><a href="{{ pathto('index') }}">Home</a></li>
+
+ <li><a href="{{ pathto('getting-started') }}">Get started</a></li>
+ <li><a href="{{ pathto('how-to/index') }}">How-to guides</a></li>
+ <li><a href="{{ pathto('reference/index') }}">Reference guides</a></li>
+ <li><a href="{{ pathto('explanation/index') }}">Explanation</a></li>
+ <li><a href="{{ pathto('contents') }}">Complete table of contents</a></li>
+ <li><a href="{{ pathto('example/index') }}">Library of examples</a></li>
+</ul>
+
+<h3>About the project</h3>
+
+<ul>
+ <li><a href="{{ pathto('changelog') }}">Changelog</a></li>
+ <li><a href="{{ pathto('contributing') }}">Contributing</a></li>
+ <li><a href="{{ pathto('backwards-compatibility') }}">Backwards Compatibility</a></li>
+ <li><a href="{{ pathto('py27-py34-deprecation') }}">Python 2.7 and 3.4 Support</a></li>
+ <li><a href="{{ pathto('sponsor') }}">Sponsor</a></li>
+ <li><a href="{{ pathto('tidelift') }}">pytest for Enterprise</a></li>
+ <li><a href="{{ pathto('license') }}">License</a></li>
+ <li><a href="{{ pathto('contact') }}">Contact Channels</a></li>
+</ul>
+
+{%- if display_toc %}
+ <hr>
+ {{ toc }}
+{%- endif %}
+
+<hr>
+<a href="{{ pathto('genindex') }}">Index</a>
+<hr>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/layout.html b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/layout.html
new file mode 100644
index 0000000000..f7096eaaa5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/layout.html
@@ -0,0 +1,52 @@
+{#
+
+ Copied from:
+
+ https://raw.githubusercontent.com/pallets/pallets-sphinx-themes/b0c6c41849b4e15cbf62cc1d95c05ef2b3e155c8/src/pallets_sphinx_themes/themes/pocoo/layout.html
+
+ And removed the warning version (see #7331).
+
+#}
+
+{% extends "basic/layout.html" %}
+
+{% set metatags %}
+ {{- metatags }}
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+{%- endset %}
+
+{% block extrahead %}
+ {%- if page_canonical_url %}
+ <link rel="canonical" href="{{ page_canonical_url }}">
+ {%- endif %}
+ <script>DOCUMENTATION_OPTIONS.URL_ROOT = '{{ url_root }}';</script>
+ {{ super() }}
+{%- endblock %}
+
+{% block sidebarlogo %}
+ {% if pagename != "index" or theme_index_sidebar_logo %}
+ {{ super() }}
+ {% endif %}
+{% endblock %}
+
+{% block relbar2 %}{% endblock %}
+
+{% block sidebar2 %}
+ <span id="sidebar-top"></span>
+ {{- super() }}
+{%- endblock %}
+
+{% block footer %}
+ {{ super() }}
+ {%- if READTHEDOCS and not readthedocs_docsearch %}
+ <script>
+ if (typeof READTHEDOCS_DATA !== 'undefined') {
+ if (!READTHEDOCS_DATA.features) {
+ READTHEDOCS_DATA.features = {};
+ }
+ READTHEDOCS_DATA.features.docsearch_disabled = true;
+ }
+ </script>
+ {%- endif %}
+ {{ js_tag("_static/version_warning_offset.js") }}
+{% endblock %}
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/links.html b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/links.html
new file mode 100644
index 0000000000..c253ecabfd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/links.html
@@ -0,0 +1,7 @@
+<h3>Useful Links</h3>
+<ul>
+ <li><a href="https://pypi.org/project/pytest/">pytest @ PyPI</a></li>
+ <li><a href="https://github.com/pytest-dev/pytest/">pytest @ GitHub</a></li>
+ <li><a href="https://github.com/pytest-dev/pytest/issues">Issue Tracker</a></li>
+ <li><a href="https://media.readthedocs.org/pdf/pytest/latest/pytest.pdf">PDF Documentation</a>
+</ul>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/relations.html b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/relations.html
new file mode 100644
index 0000000000..3bbcde85bb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/relations.html
@@ -0,0 +1,19 @@
+<h3>Related Topics</h3>
+<ul>
+ <li><a href="{{ pathto(master_doc) }}">Documentation overview</a><ul>
+ {%- for parent in parents %}
+ <li><a href="{{ parent.link|e }}">{{ parent.title }}</a><ul>
+ {%- endfor %}
+ {%- if prev %}
+ <li>Previous: <a href="{{ prev.link|e }}" title="{{ _('previous chapter')
+ }}">{{ prev.title }}</a></li>
+ {%- endif %}
+ {%- if next %}
+ <li>Next: <a href="{{ next.link|e }}" title="{{ _('next chapter')
+ }}">{{ next.title }}</a></li>
+ {%- endif %}
+ {%- for parent in parents %}
+ </ul></li>
+ {%- endfor %}
+ </ul></li>
+</ul>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/sidebarintro.html b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/sidebarintro.html
new file mode 100644
index 0000000000..ae860c172f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/sidebarintro.html
@@ -0,0 +1,5 @@
+<h3>About pytest</h3>
+<p>
+ pytest is a mature full-featured Python testing tool that helps
+ you write better programs.
+</p>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/slim_searchbox.html b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/slim_searchbox.html
new file mode 100644
index 0000000000..e98ad4ed90
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/_templates/slim_searchbox.html
@@ -0,0 +1,15 @@
+{#
+ basic/searchbox.html with heading removed.
+#}
+{%- if pagename != "search" and builder != "singlehtml" %}
+<div id="searchbox" style="display: none" role="search">
+ <div class="searchformwrapper">
+ <form class="search" action="{{ pathto('search') }}" method="get">
+ <input type="text" name="q" aria-labelledby="searchlabel"
+ placeholder="Search"/>
+ <input type="submit" value="{{ _('Go') }}" />
+ </form>
+ </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+{%- endif %}
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/adopt.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/adopt.rst
new file mode 100644
index 0000000000..13d82bf011
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/adopt.rst
@@ -0,0 +1,78 @@
+:orphan:
+
+.. warnings about this file not being included in any toctree will be suppressed by :orphan:
+
+
+April 2015 is "adopt pytest month"
+=============================================
+
+Are you an enthusiastic pytest user, the local testing guru in your workplace? Or are you considering using pytest for your open source project, but not sure how to get started? Then you may be interested in "adopt pytest month"!
+
+We will pair experienced pytest users with open source projects, for a month's effort of getting new development teams started with pytest.
+
+In 2015 we are trying this for the first time. In February and March 2015 we will gather volunteers on both sides, in April we will do the work, and in May we will evaluate how it went. This effort is being coordinated by Brianna Laugher. If you have any questions or comments, you can raise them on the `@pytestdotorg twitter account <https://twitter.com/pytestdotorg>`_\, the :issue:`issue tracker <676>` or the `pytest-dev mailing list`_.
+
+
+.. _`pytest-dev mailing list`: https://mail.python.org/mailman/listinfo/pytest-dev
+
+
+The ideal pytest helper
+-----------------------------------------
+
+ - will be able to commit 2-4 hours a week to working with their particular project (this might involve joining their mailing list, installing the software and exploring any existing tests, offering advice, writing some example tests)
+ - feels confident in using pytest (e.g. has explored command line options, knows how to write parametrized tests, has an idea about conftest contents)
+ - does not need to be an expert in every aspect!
+
+Pytest helpers, sign up here! (preferably in February, hard deadline 22 March)
+
+
+
+The ideal partner project
+-----------------------------------------
+
+ - is open source, and predominantly written in Python
+ - has an automated/documented install process for developers
+ - has more than one core developer
+ - has at least one official release (e.g. is available on pypi)
+ - has the support of the core development team, in trying out pytest adoption
+ - has no tests... or 100% test coverage... or somewhere in between!
+
+Partner projects, sign up here! (by 22 March)
+
+
+
+What does it mean to "adopt pytest"?
+-----------------------------------------
+
+There can be many different definitions of "success". Pytest can run many nose_ and unittest_ tests by default, so using pytest as your testrunner may be possible from day 1. Job done, right?
+
+Progressive success might look like:
+
+ - tests can be run (by pytest) without errors (there may be failures)
+ - tests can be run (by pytest) without failures
+ - test runner is integrated into CI server
+ - existing tests are rewritten to take advantage of pytest features - this can happen in several iterations, for example:
+ - changing to native assert_ statements (pycmd_ has a script to help with that, ``pyconvert_unittest.py``)
+ - changing `setUp/tearDown methods`_ to fixtures_
+ - adding markers_
+ - other changes to reduce boilerplate
+ - assess needs for future tests to be written, e.g. new fixtures, distributed_ testing tweaks
+
+"Success" should also include that the development team feels comfortable with their knowledge of how to use pytest. In fact this is probably more important than anything else. So spending a lot of time on communication, giving examples, etc will probably be important - both in running the tests, and in writing them.
+
+It may be after the month is up, the partner project decides that pytest is not right for it. That's okay - hopefully the pytest team will also learn something about its weaknesses or deficiencies.
+
+.. _nose: nose.html
+.. _unittest: unittest.html
+.. _assert: assert.html
+.. _pycmd: https://bitbucket.org/hpk42/pycmd/overview
+.. _`setUp/tearDown methods`: xunit_setup.html
+.. _fixtures: fixture.html
+.. _markers: mark.html
+.. _distributed: xdist.html
+
+
+Other ways to help
+-----------------------------------------
+
+Promote! Do your favourite open source Python projects use pytest? If not, why not tell them about this page?
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/index.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/index.rst
new file mode 100644
index 0000000000..9505b0b9e4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/index.rst
@@ -0,0 +1,154 @@
+
+Release announcements
+===========================================
+
+.. toctree::
+ :maxdepth: 2
+
+
+ release-7.0.1
+ release-7.0.0
+ release-7.0.0rc1
+ release-6.2.5
+ release-6.2.4
+ release-6.2.3
+ release-6.2.2
+ release-6.2.1
+ release-6.2.0
+ release-6.1.2
+ release-6.1.1
+ release-6.1.0
+ release-6.0.2
+ release-6.0.1
+ release-6.0.0
+ release-6.0.0rc1
+ release-5.4.3
+ release-5.4.2
+ release-5.4.1
+ release-5.4.0
+ release-5.3.5
+ release-5.3.4
+ release-5.3.3
+ release-5.3.2
+ release-5.3.1
+ release-5.3.0
+ release-5.2.4
+ release-5.2.3
+ release-5.2.2
+ release-5.2.1
+ release-5.2.0
+ release-5.1.3
+ release-5.1.2
+ release-5.1.1
+ release-5.1.0
+ release-5.0.1
+ release-5.0.0
+ release-4.6.9
+ release-4.6.8
+ release-4.6.7
+ release-4.6.6
+ release-4.6.5
+ release-4.6.4
+ release-4.6.3
+ release-4.6.2
+ release-4.6.1
+ release-4.6.0
+ release-4.5.0
+ release-4.4.2
+ release-4.4.1
+ release-4.4.0
+ release-4.3.1
+ release-4.3.0
+ release-4.2.1
+ release-4.2.0
+ release-4.1.1
+ release-4.1.0
+ release-4.0.2
+ release-4.0.1
+ release-4.0.0
+ release-3.10.1
+ release-3.10.0
+ release-3.9.3
+ release-3.9.2
+ release-3.9.1
+ release-3.9.0
+ release-3.8.2
+ release-3.8.1
+ release-3.8.0
+ release-3.7.4
+ release-3.7.3
+ release-3.7.2
+ release-3.7.1
+ release-3.7.0
+ release-3.6.4
+ release-3.6.3
+ release-3.6.2
+ release-3.6.1
+ release-3.6.0
+ release-3.5.1
+ release-3.5.0
+ release-3.4.2
+ release-3.4.1
+ release-3.4.0
+ release-3.3.2
+ release-3.3.1
+ release-3.3.0
+ release-3.2.5
+ release-3.2.4
+ release-3.2.3
+ release-3.2.2
+ release-3.2.1
+ release-3.2.0
+ release-3.1.3
+ release-3.1.2
+ release-3.1.1
+ release-3.1.0
+ release-3.0.7
+ release-3.0.6
+ release-3.0.5
+ release-3.0.4
+ release-3.0.3
+ release-3.0.2
+ release-3.0.1
+ release-3.0.0
+ sprint2016
+ release-2.9.2
+ release-2.9.1
+ release-2.9.0
+ release-2.8.7
+ release-2.8.6
+ release-2.8.5
+ release-2.8.4
+ release-2.8.3
+ release-2.8.2
+ release-2.7.2
+ release-2.7.1
+ release-2.7.0
+ release-2.6.3
+ release-2.6.2
+ release-2.6.1
+ release-2.6.0
+ release-2.5.2
+ release-2.5.1
+ release-2.5.0
+ release-2.4.2
+ release-2.4.1
+ release-2.4.0
+ release-2.3.5
+ release-2.3.4
+ release-2.3.3
+ release-2.3.2
+ release-2.3.1
+ release-2.3.0
+ release-2.2.4
+ release-2.2.2
+ release-2.2.1
+ release-2.2.0
+ release-2.1.3
+ release-2.1.2
+ release-2.1.1
+ release-2.1.0
+ release-2.0.3
+ release-2.0.2
+ release-2.0.1
+ release-2.0.0
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.0.rst
new file mode 100644
index 0000000000..ecb1a1db98
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.0.rst
@@ -0,0 +1,129 @@
+py.test 2.0.0: asserts++, unittest++, reporting++, config++, docs++
+===========================================================================
+
+Welcome to pytest-2.0.0, a major new release of "py.test", the rapid
+easy Python testing tool. There are many new features and enhancements,
+see below for summary and detailed lists. A lot of long-deprecated code
+has been removed, resulting in a much smaller and cleaner
+implementation. See the new docs with examples here:
+
+ http://pytest.org/en/stable/index.html
+
+A note on packaging: pytest used to part of the "py" distribution up
+until version py-1.3.4 but this has changed now: pytest-2.0.0 only
+contains py.test related code and is expected to be backward-compatible
+to existing test code. If you want to install pytest, just type one of::
+
+ pip install -U pytest
+ easy_install -U pytest
+
+Many thanks to all issue reporters and people asking questions or
+complaining. Particular thanks to Floris Bruynooghe and Ronny Pfannschmidt
+for their great coding contributions and many others for feedback and help.
+
+best,
+holger krekel
+
+
+New Features
+-----------------------
+
+- new invocations through Python interpreter and from Python::
+
+ python -m pytest # on all pythons >= 2.5
+
+ or from a python program::
+
+ import pytest ; pytest.main(arglist, pluginlist)
+
+ see http://pytest.org/en/stable/how-to/usage.html for details.
+
+- new and better reporting information in assert expressions
+ if comparing lists, sequences or strings.
+
+ see http://pytest.org/en/stable/how-to/assert.html#newreport
+
+- new configuration through ini-files (setup.cfg or tox.ini recognized),
+ for example::
+
+ [pytest]
+ norecursedirs = .hg data* # don't ever recurse in such dirs
+ addopts = -x --pyargs # add these command line options by default
+
+ see http://pytest.org/en/stable/reference/customize.html
+
+- improved standard unittest support. In general py.test should now
+ better be able to run custom unittest.TestCases like twisted trial
+ or Django based TestCases. Also you can now run the tests of an
+ installed 'unittest' package with py.test::
+
+ py.test --pyargs unittest
+
+- new "-q" option which decreases verbosity and prints a more
+ nose/unittest-style "dot" output.
+
+- many many more detailed improvements details
+
+Fixes
+-----------------------
+
+- fix issue126 - introduce py.test.set_trace() to trace execution via
+ PDB during the running of tests even if capturing is ongoing.
+- fix issue124 - make reporting more resilient against tests opening
+ files on filedescriptor 1 (stdout).
+- fix issue109 - sibling conftest.py files will not be loaded.
+ (and Directory collectors cannot be customized anymore from a Directory's
+ conftest.py - this needs to happen at least one level up).
+- fix issue88 (finding custom test nodes from command line arg)
+- fix issue93 stdout/stderr is captured while importing conftest.py
+- fix bug: unittest collected functions now also can have "pytestmark"
+ applied at class/module level
+
+Important Notes
+--------------------
+
+* The usual way in pre-2.0 times to use py.test in python code was
+ to import "py" and then e.g. use "py.test.raises" for the helper.
+ This remains valid and is not planned to be deprecated. However,
+ in most examples and internal code you'll find "import pytest"
+ and "pytest.raises" used as the recommended default way.
+
+* pytest now first performs collection of the complete test suite
+ before running any test. This changes for example the semantics of when
+ pytest_collectstart/pytest_collectreport are called. Some plugins may
+ need upgrading.
+
+* The pytest package consists of a 400 LOC core.py and about 20 builtin plugins,
+ summing up to roughly 5000 LOCs, including docstrings. To be fair, it also
+ uses generic code from the "pylib", and the new "py" package to help
+ with filesystem and introspection/code manipulation.
+
+(Incompatible) Removals
+-----------------------------
+
+- py.test.config is now only available if you are in a test run.
+
+- the following (mostly already deprecated) functionality was removed:
+
+ - removed support for Module/Class/... collection node definitions
+ in conftest.py files. They will cause nothing special.
+ - removed support for calling the pre-1.0 collection API of "run()" and "join"
+ - removed reading option values from conftest.py files or env variables.
+ This can now be done much much better and easier through the ini-file
+ mechanism and the "addopts" entry in particular.
+ - removed the "disabled" attribute in test classes. Use the skipping
+ and pytestmark mechanism to skip or xfail a test class.
+
+- py.test.collect.Directory does not exist anymore and it
+ is not possible to provide an own "Directory" object.
+ If you have used this and don't know what to do, get
+ in contact. We'll figure something out.
+
+ Note that pytest_collect_directory() is still called but
+ any return value will be ignored. This allows to keep
+ old code working that performed for example "py.test.skip()"
+ in collect() to prevent recursion into directory trees
+ if a certain dependency or command line option is missing.
+
+
+see :ref:`changelog` for more detailed changes.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.1.rst
new file mode 100644
index 0000000000..4ff3e9f550
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.1.rst
@@ -0,0 +1,67 @@
+py.test 2.0.1: bug fixes
+===========================================================================
+
+Welcome to pytest-2.0.1, a maintenance and bug fix release of pytest,
+a mature testing tool for Python, supporting CPython 2.4-3.2, Jython
+and latest PyPy interpreters. See extensive docs with tested examples here:
+
+ http://pytest.org/
+
+If you want to install or upgrade pytest, just type one of::
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+Many thanks to all issue reporters and people asking questions or
+complaining. Particular thanks to Floris Bruynooghe and Ronny Pfannschmidt
+for their great coding contributions and many others for feedback and help.
+
+best,
+holger krekel
+
+Changes between 2.0.0 and 2.0.1
+----------------------------------------------
+
+- refine and unify initial capturing so that it works nicely
+ even if the logging module is used on an early-loaded conftest.py
+ file or plugin.
+- fix issue12 - show plugin versions with "--version" and
+ "--traceconfig" and also document how to add extra information
+ to reporting test header
+- fix issue17 (import-* reporting issue on python3) by
+ requiring py>1.4.0 (1.4.1 is going to include it)
+- fix issue10 (numpy arrays truth checking) by refining
+ assertion interpretation in py lib
+- fix issue15: make nose compatibility tests compatible
+ with python3 (now that nose-1.0 supports python3)
+- remove somewhat surprising "same-conftest" detection because
+ it ignores conftest.py when they appear in several subdirs.
+- improve assertions ("not in"), thanks Floris Bruynooghe
+- improve behaviour/warnings when running on top of "python -OO"
+ (assertions and docstrings are turned off, leading to potential
+ false positives)
+- introduce a pytest_cmdline_processargs(args) hook
+ to allow dynamic computation of command line arguments.
+ This fixes a regression because py.test prior to 2.0
+ allowed to set command line options from conftest.py
+ files which so far pytest-2.0 only allowed from ini-files now.
+- fix issue7: assert failures in doctest modules.
+ unexpected failures in doctests will not generally
+ show nicer, i.e. within the doctest failing context.
+- fix issue9: setup/teardown functions for an xfail-marked
+ test will report as xfail if they fail but report as normally
+ passing (not xpassing) if they succeed. This only is true
+ for "direct" setup/teardown invocations because teardown_class/
+ teardown_module cannot closely relate to a single test.
+- fix issue14: no logging errors at process exit
+- refinements to "collecting" output on non-ttys
+- refine internal plugin registration and --traceconfig output
+- introduce a mechanism to prevent/unregister plugins from the
+ command line, see http://pytest.org/en/stable/how-to/plugins.html#cmdunregister
+- activate resultlog plugin by default
+- fix regression wrt yielded tests which due to the
+ collection-before-running semantics were not
+ setup as with pytest 1.3.4. Note, however, that
+ the recommended and much cleaner way to do test
+ parametrization remains the "pytest_generate_tests"
+ mechanism, see the docs.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.2.rst
new file mode 100644
index 0000000000..f1f44f34f4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.2.rst
@@ -0,0 +1,73 @@
+py.test 2.0.2: bug fixes, improved xfail/skip expressions, speed ups
+===========================================================================
+
+Welcome to pytest-2.0.2, a maintenance and bug fix release of pytest,
+a mature testing tool for Python, supporting CPython 2.4-3.2, Jython
+and latest PyPy interpreters. See the extensive docs with tested examples here:
+
+ http://pytest.org/
+
+If you want to install or upgrade pytest, just type one of::
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+Many thanks to all issue reporters and people asking questions
+or complaining, particularly Jurko for his insistence,
+Laura, Victor and Brianna for helping with improving
+and Ronny for his general advise.
+
+best,
+holger krekel
+
+Changes between 2.0.1 and 2.0.2
+----------------------------------------------
+
+- tackle issue32 - speed up test runs of very quick test functions
+ by reducing the relative overhead
+
+- fix issue30 - extended xfail/skipif handling and improved reporting.
+ If you have a syntax error in your skip/xfail
+ expressions you now get nice error reports.
+
+ Also you can now access module globals from xfail/skipif
+ expressions so that this for example works now::
+
+ import pytest
+ import mymodule
+ @pytest.mark.skipif("mymodule.__version__[0] == "1")
+ def test_function():
+ pass
+
+ This will not run the test function if the module's version string
+ does not start with a "1". Note that specifying a string instead
+ of a boolean expressions allows py.test to report meaningful information
+ when summarizing a test run as to what conditions lead to skipping
+ (or xfail-ing) tests.
+
+- fix issue28 - setup_method and pytest_generate_tests work together
+ The setup_method fixture method now gets called also for
+ test function invocations generated from the pytest_generate_tests
+ hook.
+
+- fix issue27 - collectonly and keyword-selection (-k) now work together
+ Also, if you do "py.test --collectonly -q" you now get a flat list
+ of test ids that you can use to paste to the py.test commandline
+ in order to execute a particular test.
+
+- fix issue25 avoid reported problems with --pdb and python3.2/encodings output
+
+- fix issue23 - tmpdir argument now works on Python3.2 and WindowsXP
+ Starting with Python3.2 os.symlink may be supported. By requiring
+ a newer py lib version the py.path.local() implementation acknowledges
+ this.
+
+- fixed typos in the docs (thanks Victor Garcia, Brianna Laugher) and particular
+ thanks to Laura Creighton who also reviewed parts of the documentation.
+
+- fix slightly wrong output of verbose progress reporting for classes
+ (thanks Amaury)
+
+- more precise (avoiding of) deprecation warnings for node.Class|Function accesses
+
+- avoid std unittest assertion helper code in tracebacks (thanks Ronny)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.3.rst
new file mode 100644
index 0000000000..81d01eb99f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.0.3.rst
@@ -0,0 +1,39 @@
+py.test 2.0.3: bug fixes and speed ups
+===========================================================================
+
+Welcome to pytest-2.0.3, a maintenance and bug fix release of pytest,
+a mature testing tool for Python, supporting CPython 2.4-3.2, Jython
+and latest PyPy interpreters. See the extensive docs with tested examples here:
+
+ http://pytest.org/
+
+If you want to install or upgrade pytest, just type one of::
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+There also is a bugfix release 1.6 of pytest-xdist, the plugin
+that enables seamless distributed and "looponfail" testing for Python.
+
+best,
+holger krekel
+
+Changes between 2.0.2 and 2.0.3
+----------------------------------------------
+
+- fix issue38: nicer tracebacks on calls to hooks, particularly early
+ configure/sessionstart ones
+
+- fix missing skip reason/meta information in junitxml files, reported
+ via http://lists.idyll.org/pipermail/testing-in-python/2011-March/003928.html
+
+- fix issue34: avoid collection failure with "test" prefixed classes
+ deriving from object.
+
+- don't require zlib (and other libs) for genscript plugin without
+ --genscript actually being used.
+
+- speed up skips (by not doing a full traceback representation
+ internally)
+
+- fix issue37: avoid invalid characters in junitxml's output
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.0.rst
new file mode 100644
index 0000000000..78247247e2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.0.rst
@@ -0,0 +1,47 @@
+py.test 2.1.0: perfected assertions and bug fixes
+===========================================================================
+
+Welcome to the release of pytest-2.1, a mature testing tool for Python,
+supporting CPython 2.4-3.2, Jython and latest PyPy interpreters. See
+the improved extensive docs (now also as PDF!) with tested examples here:
+
+ http://pytest.org/
+
+The single biggest news about this release are **perfected assertions**
+courtesy of Benjamin Peterson. You can now safely use ``assert``
+statements in test modules without having to worry about side effects
+or python optimization ("-OO") options. This is achieved by rewriting
+assert statements in test modules upon import, using a PEP302 hook.
+See https://docs.pytest.org/en/stable/how-to/assert.html for
+detailed information. The work has been partly sponsored by my company,
+merlinux GmbH.
+
+For further details on bug fixes and smaller enhancements see below.
+
+If you want to install or upgrade pytest, just type one of::
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+best,
+holger krekel / https://merlinux.eu/
+
+Changes between 2.0.3 and 2.1.0
+----------------------------------------------
+
+- fix issue53 call nosestyle setup functions with correct ordering
+- fix issue58 and issue59: new assertion code fixes
+- merge Benjamin's assertionrewrite branch: now assertions
+ for test modules on python 2.6 and above are done by rewriting
+ the AST and saving the pyc file before the test module is imported.
+ see doc/assert.txt for more info.
+- fix issue43: improve doctests with better traceback reporting on
+ unexpected exceptions
+- fix issue47: timing output in junitxml for test cases is now correct
+- fix issue48: typo in MarkInfo repr leading to exception
+- fix issue49: avoid confusing error when initialization partially fails
+- fix issue44: env/username expansion for junitxml file path
+- show releaselevel information in test runs for pypy
+- reworked doc pages for better navigation and PDF generation
+- report KeyboardInterrupt even if interrupted during session startup
+- fix issue 35 - provide PDF doc version and download link from index page
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.1.rst
new file mode 100644
index 0000000000..369428ed2e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.1.rst
@@ -0,0 +1,36 @@
+py.test 2.1.1: assertion fixes and improved junitxml output
+===========================================================================
+
+pytest-2.1.1 is a backward compatible maintenance release of the
+popular py.test testing tool. See extensive docs with examples here:
+
+ http://pytest.org/
+
+Most bug fixes address remaining issues with the perfected assertions
+introduced with 2.1.0 - many thanks to the bug reporters and to Benjamin
+Peterson for helping to fix them. Also, junitxml output now produces
+system-out/err tags which lead to better displays of tracebacks with Jenkins.
+
+Also a quick note to package maintainers and others interested: there now
+is a "pytest" man page which can be generated with "make man" in doc/.
+
+If you want to install or upgrade pytest, just type one of::
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+best,
+holger krekel / https://merlinux.eu/
+
+Changes between 2.1.0 and 2.1.1
+----------------------------------------------
+
+- fix issue64 / pytest.set_trace now works within pytest_generate_tests hooks
+- fix issue60 / fix error conditions involving the creation of __pycache__
+- fix issue63 / assertion rewriting on inserts involving strings containing '%'
+- fix assertion rewriting on calls with a ** arg
+- don't cache rewritten modules if bytecode generation is disabled
+- fix assertion rewriting in read-only directories
+- fix issue59: provide system-out/err tags for junitxml output
+- fix issue61: assertion rewriting on boolean operations with 3 or more operands
+- you can now build a man page with "cd doc ; make man"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.2.rst
new file mode 100644
index 0000000000..a3c0c1a38a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.2.rst
@@ -0,0 +1,32 @@
+py.test 2.1.2: bug fixes and fixes for jython
+===========================================================================
+
+pytest-2.1.2 is a minor backward compatible maintenance release of the
+popular py.test testing tool. pytest is commonly used for unit,
+functional- and integration testing. See extensive docs with examples
+here:
+
+ http://pytest.org/
+
+Most bug fixes address remaining issues with the perfected assertions
+introduced in the 2.1 series - many thanks to the bug reporters and to Benjamin
+Peterson for helping to fix them. pytest should also work better with
+Jython-2.5.1 (and Jython trunk).
+
+If you want to install or upgrade pytest, just type one of::
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+best,
+holger krekel / https://merlinux.eu/
+
+Changes between 2.1.1 and 2.1.2
+----------------------------------------
+
+- fix assertion rewriting on files with windows newlines on some Python versions
+- refine test discovery by package/module name (--pyargs), thanks Florian Mayer
+- fix issue69 / assertion rewriting fixed on some boolean operations
+- fix issue68 / packages now work with assertion rewriting
+- fix issue66: use different assertion rewriting caches when the -O option is passed
+- don't try assertion rewriting on Jython, use reinterp
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.3.rst
new file mode 100644
index 0000000000..a43bc058c1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.1.3.rst
@@ -0,0 +1,32 @@
+py.test 2.1.3: just some more fixes
+===========================================================================
+
+pytest-2.1.3 is a minor backward compatible maintenance release of the
+popular py.test testing tool. It is commonly used for unit, functional-
+and integration testing. See extensive docs with examples here:
+
+ http://pytest.org/
+
+The release contains another fix to the perfected assertions introduced
+with the 2.1 series as well as the new possibility to customize reporting
+for assertion expressions on a per-directory level.
+
+If you want to install or upgrade pytest, just type one of::
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+Thanks to the bug reporters and to Ronny Pfannschmidt, Benjamin Peterson
+and Floris Bruynooghe who implemented the fixes.
+
+best,
+holger krekel
+
+Changes between 2.1.2 and 2.1.3
+----------------------------------------
+
+- fix issue79: assertion rewriting failed on some comparisons in boolops,
+- correctly handle zero length arguments (a la pytest '')
+- fix issue67 / junitxml now contains correct test durations
+- fix issue75 / skipping test failure on jython
+- fix issue77 / Allow assertrepr_compare hook to apply to a subset of tests
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.0.rst
new file mode 100644
index 0000000000..7a32dca173
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.0.rst
@@ -0,0 +1,95 @@
+py.test 2.2.0: test marking++, parametrization++ and duration profiling
+===========================================================================
+
+pytest-2.2.0 is a test-suite compatible release of the popular
+py.test testing tool. Plugins might need upgrades. It comes
+with these improvements:
+
+* easier and more powerful parametrization of tests:
+
+ - new @pytest.mark.parametrize decorator to run tests with different arguments
+ - new metafunc.parametrize() API for parametrizing arguments independently
+ - see examples at http://pytest.org/en/stable/example/how-to/parametrize.html
+ - NOTE that parametrize() related APIs are still a bit experimental
+ and might change in future releases.
+
+* improved handling of test markers and refined marking mechanism:
+
+ - "-m markexpr" option for selecting tests according to their mark
+ - a new "markers" ini-variable for registering test markers for your project
+ - the new "--strict" bails out with an error if using unregistered markers.
+ - see examples at http://pytest.org/en/stable/example/markers.html
+
+* duration profiling: new "--duration=N" option showing the N slowest test
+ execution or setup/teardown calls. This is most useful if you want to
+ find out where your slowest test code is.
+
+* also 2.2.0 performs more eager calling of teardown/finalizers functions
+ resulting in better and more accurate reporting when they fail
+
+Besides there is the usual set of bug fixes along with a cleanup of
+pytest's own test suite allowing it to run on a wider range of environments.
+
+For general information, see extensive docs with examples here:
+
+ http://pytest.org/
+
+If you want to install or upgrade pytest you might just type::
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+Thanks to Ronny Pfannschmidt, David Burns, Jeff Donner, Daniel Nouri, Alfredo Deza and all who gave feedback or sent bug reports.
+
+best,
+holger krekel
+
+
+notes on incompatibility
+------------------------------
+
+While test suites should work unchanged you might need to upgrade plugins:
+
+* You need a new version of the pytest-xdist plugin (1.7) for distributing
+ test runs.
+
+* Other plugins might need an upgrade if they implement
+ the ``pytest_runtest_logreport`` hook which now is called unconditionally
+ for the setup/teardown fixture phases of a test. You may choose to
+ ignore setup/teardown failures by inserting "if rep.when != 'call': return"
+ or something similar. Note that most code probably "just" works because
+ the hook was already called for failing setup/teardown phases of a test
+ so a plugin should have been ready to grok such reports already.
+
+
+Changes between 2.1.3 and 2.2.0
+----------------------------------------
+
+- fix issue90: introduce eager tearing down of test items so that
+ teardown function are called earlier.
+- add an all-powerful metafunc.parametrize function which allows to
+ parametrize test function arguments in multiple steps and therefore
+ from independent plugins and places.
+- add a @pytest.mark.parametrize helper which allows to easily
+ call a test function with different argument values.
+- Add examples to the "parametrize" example page, including a quick port
+ of Test scenarios and the new parametrize function and decorator.
+- introduce registration for "pytest.mark.*" helpers via ini-files
+ or through plugin hooks. Also introduce a "--strict" option which
+ will treat unregistered markers as errors
+ allowing to avoid typos and maintain a well described set of markers
+ for your test suite. See examples at http://pytest.org/en/stable/how-to/mark.html
+ and its links.
+- issue50: introduce "-m marker" option to select tests based on markers
+ (this is a stricter and more predictable version of "-k" in that "-m"
+ only matches complete markers and has more obvious rules for and/or
+ semantics.
+- new feature to help optimizing the speed of your tests:
+ --durations=N option for displaying N slowest test calls
+ and setup/teardown methods.
+- fix issue87: --pastebin now works with python3
+- fix issue89: --pdb with unexpected exceptions in doctest work more sensibly
+- fix and cleanup pytest's own test suite to not leak FDs
+- fix issue83: link to generated funcarg list
+- fix issue74: pyarg module names are now checked against imp.find_module false positives
+- fix compatibility with twisted/trial-11.1.0 use cases
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.1.rst
new file mode 100644
index 0000000000..44281597ea
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.1.rst
@@ -0,0 +1,41 @@
+pytest-2.2.1: bug fixes, perfect teardowns
+===========================================================================
+
+
+pytest-2.2.1 is a minor backward-compatible release of the py.test
+testing tool. It contains bug fixes and little improvements, including
+documentation fixes. If you are using the distributed testing
+pluginmake sure to upgrade it to pytest-xdist-1.8.
+
+For general information see here:
+
+ http://pytest.org/
+
+To install or upgrade pytest:
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+Special thanks for helping on this release to Ronny Pfannschmidt, Jurko
+Gospodnetic and Ralf Schmitt.
+
+best,
+holger krekel
+
+
+Changes between 2.2.0 and 2.2.1
+----------------------------------------
+
+- fix issue99 (in pytest and py) internallerrors with resultlog now
+ produce better output - fixed by normalizing pytest_internalerror
+ input arguments.
+- fix issue97 / traceback issues (in pytest and py) improve traceback output
+ in conjunction with jinja2 and cython which hack tracebacks
+- fix issue93 (in pytest and pytest-xdist) avoid "delayed teardowns":
+ the final test in a test node will now run its teardown directly
+ instead of waiting for the end of the session. Thanks Dave Hunt for
+ the good reporting and feedback. The pytest_runtest_protocol as well
+ as the pytest_runtest_teardown hooks now have "nextitem" available
+ which will be None indicating the end of the test run.
+- fix collection crash due to unknown-source collected items, thanks
+ to Ralf Schmitt (fixed by depending on a more recent pylib)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.2.rst
new file mode 100644
index 0000000000..22ef0bc7a1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.2.rst
@@ -0,0 +1,43 @@
+pytest-2.2.2: bug fixes
+===========================================================================
+
+pytest-2.2.2 (updated to 2.2.3 to fix packaging issues) is a minor
+backward-compatible release of the versatile py.test testing tool. It
+contains bug fixes and a few refinements particularly to reporting with
+"--collectonly", see below for betails.
+
+For general information see here:
+
+ http://pytest.org/
+
+To install or upgrade pytest:
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+Special thanks for helping on this release to Ronny Pfannschmidt
+and Ralf Schmitt and the contributors of issues.
+
+best,
+holger krekel
+
+
+Changes between 2.2.1 and 2.2.2
+----------------------------------------
+
+- fix issue101: wrong args to unittest.TestCase test function now
+ produce better output
+- fix issue102: report more useful errors and hints for when a
+ test directory was renamed and some pyc/__pycache__ remain
+- fix issue106: allow parametrize to be applied multiple times
+ e.g. from module, class and at function level.
+- fix issue107: actually perform session scope finalization
+- don't check in parametrize if indirect parameters are funcarg names
+- add chdir method to monkeypatch funcarg
+- fix crash resulting from calling monkeypatch undo a second time
+- fix issue115: make --collectonly robust against early failure
+ (missing files/directories)
+- "-qq --collectonly" now shows only files and the number of tests in them
+- "-q --collectonly" now shows test ids
+- allow adding of attributes to test reports such that it also works
+ with distributed testing (no upgrade of pytest-xdist needed)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.4.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.4.rst
new file mode 100644
index 0000000000..a8fb9b93c5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.2.4.rst
@@ -0,0 +1,38 @@
+pytest-2.2.4: bug fixes, better junitxml/unittest/python3 compat
+===========================================================================
+
+pytest-2.2.4 is a minor backward-compatible release of the versatile
+py.test testing tool. It contains bug fixes and a few refinements
+to junitxml reporting, better unittest- and python3 compatibility.
+
+For general information see here:
+
+ http://pytest.org/
+
+To install or upgrade pytest:
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+Special thanks for helping on this release to Ronny Pfannschmidt
+and Benjamin Peterson and the contributors of issues.
+
+best,
+holger krekel
+
+Changes between 2.2.3 and 2.2.4
+-----------------------------------
+
+- fix error message for rewritten assertions involving the % operator
+- fix issue 126: correctly match all invalid xml characters for junitxml
+ binary escape
+- fix issue with unittest: now @unittest.expectedFailure markers should
+ be processed correctly (you can also use @pytest.mark markers)
+- document integration with the extended distribute/setuptools test commands
+- fix issue 140: properly get the real functions
+ of bound classmethods for setup/teardown_class
+- fix issue #141: switch from the deceased paste.pocoo.org to bpaste.net
+- fix issue #143: call unconfigure/sessionfinish always when
+ configure/sessionstart where called
+- fix issue #144: better mangle test ids to junitxml classnames
+- upgrade distribute_setup.py to 0.6.27
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.0.rst
new file mode 100644
index 0000000000..6905b77b92
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.0.rst
@@ -0,0 +1,133 @@
+pytest-2.3: improved fixtures / better unittest integration
+=============================================================================
+
+pytest-2.3 comes with many major improvements for fixture/funcarg management
+and parametrized testing in Python. It is now easier, more efficient and
+more predictable to re-run the same tests with different fixture
+instances. Also, you can directly declare the caching "scope" of
+fixtures so that dependent tests throughout your whole test suite can
+re-use database or other expensive fixture objects with ease. Lastly,
+it's possible for fixture functions (formerly known as funcarg
+factories) to use other fixtures, allowing for a completely modular and
+re-usable fixture design.
+
+For detailed info and tutorial-style examples, see:
+
+ http://pytest.org/en/stable/explanation/fixtures.html
+
+Moreover, there is now support for using pytest fixtures/funcargs with
+unittest-style suites, see here for examples:
+
+ http://pytest.org/en/stable/how-to/unittest.html
+
+Besides, more unittest-test suites are now expected to "simply work"
+with pytest.
+
+All changes are backward compatible and you should be able to continue
+to run your test suites and 3rd party plugins that worked with
+pytest-2.2.4.
+
+If you are interested in the precise reasoning (including examples) of the
+pytest-2.3 fixture evolution, please consult
+http://pytest.org/en/stable/funcarg_compare.html
+
+For general info on installation and getting started:
+
+ http://pytest.org/en/stable/getting-started.html
+
+Docs and PDF access as usual at:
+
+ http://pytest.org
+
+and more details for those already in the knowing of pytest can be found
+in the CHANGELOG below.
+
+Particular thanks for this release go to Floris Bruynooghe, Alex Okrushko
+Carl Meyer, Ronny Pfannschmidt, Benjamin Peterson and Alex Gaynor for helping
+to get the new features right and well integrated. Ronny and Floris
+also helped to fix a number of bugs and yet more people helped by
+providing bug reports.
+
+have fun,
+holger krekel
+
+
+Changes between 2.2.4 and 2.3.0
+-----------------------------------
+
+- fix issue202 - better automatic names for parametrized test functions
+- fix issue139 - introduce @pytest.fixture which allows direct scoping
+ and parametrization of funcarg factories. Introduce new @pytest.setup
+ marker to allow the writing of setup functions which accept funcargs.
+- fix issue198 - conftest fixtures were not found on windows32 in some
+ circumstances with nested directory structures due to path manipulation issues
+- fix issue193 skip test functions with were parametrized with empty
+ parameter sets
+- fix python3.3 compat, mostly reporting bits that previously depended
+ on dict ordering
+- introduce re-ordering of tests by resource and parametrization setup
+ which takes precedence to the usual file-ordering
+- fix issue185 monkeypatching time.time does not cause pytest to fail
+- fix issue172 duplicate call of pytest.setup-decoratored setup_module
+ functions
+- fix junitxml=path construction so that if tests change the
+ current working directory and the path is a relative path
+ it is constructed correctly from the original current working dir.
+- fix "python setup.py test" example to cause a proper "errno" return
+- fix issue165 - fix broken doc links and mention stackoverflow for FAQ
+- catch unicode-issues when writing failure representations
+ to terminal to prevent the whole session from crashing
+- fix xfail/skip confusion: a skip-mark or an imperative pytest.skip
+ will now take precedence before xfail-markers because we
+ can't determine xfail/xpass status in case of a skip. see also:
+ http://stackoverflow.com/questions/11105828/in-py-test-when-i-explicitly-skip-a-test-that-is-marked-as-xfail-how-can-i-get
+
+- always report installed 3rd party plugins in the header of a test run
+
+- fix issue160: a failing setup of an xfail-marked tests should
+ be reported as xfail (not xpass)
+
+- fix issue128: show captured output when capsys/capfd are used
+
+- fix issue179: properly show the dependency chain of factories
+
+- pluginmanager.register(...) now raises ValueError if the
+ plugin has been already registered or the name is taken
+
+- fix issue159: improve https://docs.pytest.org/en/6.0.1/faq.html
+ especially with respect to the "magic" history, also mention
+ pytest-django, trial and unittest integration.
+
+- make request.keywords and node.keywords writable. All descendant
+ collection nodes will see keyword values. Keywords are dictionaries
+ containing markers and other info.
+
+- fix issue 178: xml binary escapes are now wrapped in py.xml.raw
+
+- fix issue 176: correctly catch the builtin AssertionError
+ even when we replaced AssertionError with a subclass on the
+ python level
+
+- factory discovery no longer fails with magic global callables
+ that provide no sane __code__ object (mock.call for example)
+
+- fix issue 182: testdir.inprocess_run now considers passed plugins
+
+- fix issue 188: ensure sys.exc_info is clear on python2
+ before calling into a test
+
+- fix issue 191: add unittest TestCase runTest method support
+- fix issue 156: monkeypatch correctly handles class level descriptors
+
+- reporting refinements:
+
+ - pytest_report_header now receives a "startdir" so that
+ you can use startdir.bestrelpath(yourpath) to show
+ nice relative path
+
+ - allow plugins to implement both pytest_report_header and
+ pytest_sessionstart (sessionstart is invoked first).
+
+ - don't show deselected reason line if there is none
+
+ - py.test -vv will show all of assert comparisons instead of truncating
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.1.rst
new file mode 100644
index 0000000000..6f8770b345
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.1.rst
@@ -0,0 +1,39 @@
+pytest-2.3.1: fix regression with factory functions
+===========================================================================
+
+pytest-2.3.1 is a quick follow-up release:
+
+- fix issue202 - regression with fixture functions/funcarg factories:
+ using "self" is now safe again and works as in 2.2.4. Thanks
+ to Eduard Schettino for the quick bug report.
+
+- disable pexpect pytest self tests on Freebsd - thanks Koob for the
+ quick reporting
+
+- fix/improve interactive docs with --markers
+
+See
+
+ http://pytest.org/
+
+for general information. To install or upgrade pytest:
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+best,
+holger krekel
+
+
+Changes between 2.3.0 and 2.3.1
+-----------------------------------
+
+- fix issue202 - fix regression: using "self" from fixture functions now
+ works as expected (it's the same "self" instance that a test method
+ which uses the fixture sees)
+
+- skip pexpect using tests (test_pdb.py mostly) on freebsd* systems
+ due to pexpect not supporting it properly (hanging)
+
+- link to web pages from --markers output which provides help for
+ pytest.mark.* usage.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.2.rst
new file mode 100644
index 0000000000..484feaaa5a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.2.rst
@@ -0,0 +1,57 @@
+pytest-2.3.2: some fixes and more traceback-printing speed
+===========================================================================
+
+pytest-2.3.2 is another stabilization release:
+
+- issue 205: fixes a regression with conftest detection
+- issue 208/29: fixes traceback-printing speed in some bad cases
+- fix teardown-ordering for parametrized setups
+- fix unittest and trial compat behaviour with respect to runTest() methods
+- issue 206 and others: some improvements to packaging
+- fix issue127 and others: improve some docs
+
+See
+
+ http://pytest.org/
+
+for general information. To install or upgrade pytest:
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+best,
+holger krekel
+
+
+Changes between 2.3.1 and 2.3.2
+-----------------------------------
+
+- fix issue208 and fix issue29 use new py version to avoid long pauses
+ when printing tracebacks in long modules
+
+- fix issue205 - conftests in subdirs customizing
+ pytest_pycollect_makemodule and pytest_pycollect_makeitem
+ now work properly
+
+- fix teardown-ordering for parametrized setups
+
+- fix issue127 - better documentation for pytest_addoption
+ and related objects.
+
+- fix unittest behaviour: TestCase.runtest only called if there are
+ test methods defined
+
+- improve trial support: don't collect its empty
+ unittest.TestCase.runTest() method
+
+- "python setup.py test" now works with pytest itself
+
+- fix/improve internal/packaging related bits:
+
+ - exception message check of test_nose.py now passes on python33 as well
+
+ - issue206 - fix test_assertrewrite.py to work when a global
+ PYTHONDONTWRITEBYTECODE=1 is present
+
+ - add tox.ini to pytest distribution so that ignore-dirs and others config
+ bits are properly distributed for maintainers who run pytest-own tests
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.3.rst
new file mode 100644
index 0000000000..0cb598a426
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.3.rst
@@ -0,0 +1,61 @@
+pytest-2.3.3: integration fixes, py24 support, ``*/**`` shown in traceback
+===========================================================================
+
+pytest-2.3.3 is another stabilization release of the py.test tool
+which offers uebersimple assertions, scalable fixture mechanisms
+and deep customization for testing with Python. Particularly,
+this release provides:
+
+- integration fixes and improvements related to flask, numpy, nose,
+ unittest, mock
+
+- makes pytest work on py24 again (yes, people sometimes still need to use it)
+
+- show ``*,**`` args in pytest tracebacks
+
+Thanks to Manuel Jacob, Thomas Waldmann, Ronny Pfannschmidt, Pavel Repin
+and Andreas Taumoefolau for providing patches and all for the issues.
+
+See
+
+ http://pytest.org/
+
+for general information. To install or upgrade pytest:
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+best,
+holger krekel
+
+Changes between 2.3.2 and 2.3.3
+-----------------------------------
+
+- fix issue214 - parse modules that contain special objects like e. g.
+ flask's request object which blows up on getattr access if no request
+ is active. thanks Thomas Waldmann.
+
+- fix issue213 - allow to parametrize with values like numpy arrays that
+ do not support an __eq__ operator
+
+- fix issue215 - split test_python.org into multiple files
+
+- fix issue148 - @unittest.skip on classes is now recognized and avoids
+ calling setUpClass/tearDownClass, thanks Pavel Repin
+
+- fix issue209 - reintroduce python2.4 support by depending on newer
+ pylib which re-introduced statement-finding for pre-AST interpreters
+
+- nose support: only call setup if it's a callable, thanks Andrew
+ Taumoefolau
+
+- fix issue219 - add py2.4-3.3 classifiers to TROVE list
+
+- in tracebacks *,** arg values are now shown next to normal arguments
+ (thanks Manuel Jacob)
+
+- fix issue217 - support mock.patch with pytest's fixtures - note that
+ you need either mock-1.0.1 or the python3.3 builtin unittest.mock.
+
+- fix issue127 - improve documentation for pytest_addoption() and
+ add a ``config.getoption(name)`` helper function for consistency.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.4.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.4.rst
new file mode 100644
index 0000000000..43bf03b02b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.4.rst
@@ -0,0 +1,39 @@
+pytest-2.3.4: stabilization, more flexible selection via "-k expr"
+===========================================================================
+
+pytest-2.3.4 is a small stabilization release of the py.test tool
+which offers uebersimple assertions, scalable fixture mechanisms
+and deep customization for testing with Python. This release
+comes with the following fixes and features:
+
+- make "-k" option accept an expressions the same as with "-m" so that one
+ can write: -k "name1 or name2" etc. This is a slight usage incompatibility
+ if you used special syntax like "TestClass.test_method" which you now
+ need to write as -k "TestClass and test_method" to match a certain
+ method in a certain test class.
+- allow to dynamically define markers via
+ item.keywords[...]=assignment integrating with "-m" option
+- yielded test functions will now have autouse-fixtures active but
+ cannot accept fixtures as funcargs - it's anyway recommended to
+ rather use the post-2.0 parametrize features instead of yield, see:
+ http://pytest.org/en/stable/example/how-to/parametrize.html
+- fix autouse-issue where autouse-fixtures would not be discovered
+ if defined in an a/conftest.py file and tests in a/tests/test_some.py
+- fix issue226 - LIFO ordering for fixture teardowns
+- fix issue224 - invocations with >256 char arguments now work
+- fix issue91 - add/discuss package/directory level setups in example
+- fixes related to autouse discovery and calling
+
+Thanks in particular to Thomas Waldmann for spotting and reporting issues.
+
+See
+
+ http://pytest.org/
+
+for general information. To install or upgrade pytest:
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+best,
+holger krekel
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.5.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.5.rst
new file mode 100644
index 0000000000..d68780a244
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.3.5.rst
@@ -0,0 +1,96 @@
+pytest-2.3.5: bug fixes and little improvements
+===========================================================================
+
+pytest-2.3.5 is a maintenance release with many bug fixes and little
+improvements. See the changelog below for details. No backward
+compatibility issues are foreseen and all plugins which worked with the
+prior version are expected to work unmodified. Speaking of which, a
+few interesting new plugins saw the light last month:
+
+- pytest-instafail: show failure information while tests are running
+- pytest-qt: testing of GUI applications written with QT/Pyside
+- pytest-xprocess: managing external processes across test runs
+- pytest-random: randomize test ordering
+
+And several others like pytest-django saw maintenance releases.
+For a more complete list, check out
+https://pypi.org/search/?q=pytest
+
+For general information see:
+
+ http://pytest.org/
+
+To install or upgrade pytest:
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+Particular thanks to Floris, Ronny, Benjamin and the many bug reporters
+and fix providers.
+
+may the fixtures be with you,
+holger krekel
+
+
+Changes between 2.3.4 and 2.3.5
+-----------------------------------
+
+- never consider a fixture function for test function collection
+
+- allow re-running of test items / helps to fix pytest-reruntests plugin
+ and also help to keep less fixture/resource references alive
+
+- put captured stdout/stderr into junitxml output even for passing tests
+ (thanks Adam Goucher)
+
+- Issue 265 - integrate nose setup/teardown with setupstate
+ so it doesn't try to teardown if it did not setup
+
+- issue 271 - don't write junitxml on worker nodes
+
+- Issue 274 - don't try to show full doctest example
+ when doctest does not know the example location
+
+- issue 280 - disable assertion rewriting on buggy CPython 2.6.0
+
+- inject "getfixture()" helper to retrieve fixtures from doctests,
+ thanks Andreas Zeidler
+
+- issue 259 - when assertion rewriting, be consistent with the default
+ source encoding of ASCII on Python 2
+
+- issue 251 - report a skip instead of ignoring classes with init
+
+- issue250 unicode/str mixes in parametrization names and values now works
+
+- issue257, assertion-triggered compilation of source ending in a
+ comment line doesn't blow up in python2.5 (fixed through py>=1.4.13.dev6)
+
+- fix --genscript option to generate standalone scripts that also
+ work with python3.3 (importer ordering)
+
+- issue171 - in assertion rewriting, show the repr of some
+ global variables
+
+- fix option help for "-k"
+
+- move long description of distribution into README.rst
+
+- improve docstring for metafunc.parametrize()
+
+- fix bug where using capsys with pytest.set_trace() in a test
+ function would break when looking at capsys.readouterr()
+
+- allow to specify prefixes starting with "_" when
+ customizing python_functions test discovery. (thanks Graham Horler)
+
+- improve PYTEST_DEBUG tracing output by putting
+ extra data on a new lines with additional indent
+
+- ensure OutcomeExceptions like skip/fail have initialized exception attributes
+
+- issue 260 - don't use nose special setup on plain unittest cases
+
+- fix issue134 - print the collect errors that prevent running specified test items
+
+- fix issue266 - accept unicode in MarkEvaluator expressions
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.0.rst
new file mode 100644
index 0000000000..138cc89576
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.0.rst
@@ -0,0 +1,223 @@
+pytest-2.4.0: new fixture features/hooks and bug fixes
+===========================================================================
+
+The just released pytest-2.4.0 brings many improvements and numerous
+bug fixes while remaining plugin- and test-suite compatible apart
+from a few supposedly very minor incompatibilities. See below for
+a full list of details. A few feature highlights:
+
+- new yield-style fixtures `pytest.yield_fixture
+ <http://pytest.org/en/stable/yieldfixture.html>`_, allowing to use
+ existing with-style context managers in fixture functions.
+
+- improved pdb support: ``import pdb ; pdb.set_trace()`` now works
+ without requiring prior disabling of stdout/stderr capturing.
+ Also the ``--pdb`` options works now on collection and internal errors
+ and we introduced a new experimental hook for IDEs/plugins to
+ intercept debugging: ``pytest_exception_interact(node, call, report)``.
+
+- shorter monkeypatch variant to allow specifying an import path as
+ a target, for example: ``monkeypatch.setattr("requests.get", myfunc)``
+
+- better unittest/nose compatibility: all teardown methods are now only
+ called if the corresponding setup method succeeded.
+
+- integrate tab-completion on command line options if you
+ have :pypi:`argcomplete` configured.
+
+- allow boolean expression directly with skipif/xfail
+ if a "reason" is also specified.
+
+- a new hook ``pytest_load_initial_conftests`` allows plugins like
+ :pypi:`pytest-django` to
+ influence the environment before conftest files import ``django``.
+
+- reporting: color the last line red or green depending if
+ failures/errors occurred or everything passed.
+
+The documentation has been updated to accommodate the changes,
+see `http://pytest.org <http://pytest.org>`_
+
+To install or upgrade pytest::
+
+ pip install -U pytest # or
+ easy_install -U pytest
+
+
+**Many thanks to all who helped, including Floris Bruynooghe,
+Brianna Laugher, Andreas Pelme, Anthon van der Neut, Anatoly Bubenkoff,
+Vladimir Keleshev, Mathieu Agopian, Ronny Pfannschmidt, Christian
+Theunert and many others.**
+
+may passing tests be with you,
+
+holger krekel
+
+Changes between 2.3.5 and 2.4
+-----------------------------------
+
+known incompatibilities:
+
+- if calling --genscript from python2.7 or above, you only get a
+ standalone script which works on python2.7 or above. Use Python2.6
+ to also get a python2.5 compatible version.
+
+- all xunit-style teardown methods (nose-style, pytest-style,
+ unittest-style) will not be called if the corresponding setup method failed,
+ see issue322 below.
+
+- the pytest_plugin_unregister hook wasn't ever properly called
+ and there is no known implementation of the hook - so it got removed.
+
+- pytest.fixture-decorated functions cannot be generators (i.e. use
+ yield) anymore. This change might be reversed in 2.4.1 if it causes
+ unforeseen real-life issues. However, you can always write and return
+ an inner function/generator and change the fixture consumer to iterate
+ over the returned generator. This change was done in lieu of the new
+ ``pytest.yield_fixture`` decorator, see below.
+
+new features:
+
+- experimentally introduce a new ``pytest.yield_fixture`` decorator
+ which accepts exactly the same parameters as pytest.fixture but
+ mandates a ``yield`` statement instead of a ``return statement`` from
+ fixture functions. This allows direct integration with "with-style"
+ context managers in fixture functions and generally avoids registering
+ of finalization callbacks in favour of treating the "after-yield" as
+ teardown code. Thanks Andreas Pelme, Vladimir Keleshev, Floris
+ Bruynooghe, Ronny Pfannschmidt and many others for discussions.
+
+- allow boolean expression directly with skipif/xfail
+ if a "reason" is also specified. Rework skipping documentation
+ to recommend "condition as booleans" because it prevents surprises
+ when importing markers between modules. Specifying conditions
+ as strings will remain fully supported.
+
+- reporting: color the last line red or green depending if
+ failures/errors occurred or everything passed. thanks Christian
+ Theunert.
+
+- make "import pdb ; pdb.set_trace()" work natively wrt capturing (no
+ "-s" needed anymore), making ``pytest.set_trace()`` a mere shortcut.
+
+- fix issue181: --pdb now also works on collect errors (and
+ on internal errors) . This was implemented by a slight internal
+ refactoring and the introduction of a new hook
+ ``pytest_exception_interact`` hook (see next item).
+
+- fix issue341: introduce new experimental hook for IDEs/terminals to
+ intercept debugging: ``pytest_exception_interact(node, call, report)``.
+
+- new monkeypatch.setattr() variant to provide a shorter
+ invocation for patching out classes/functions from modules:
+
+ monkeypatch.setattr("requests.get", myfunc)
+
+ will replace the "get" function of the "requests" module with ``myfunc``.
+
+- fix issue322: tearDownClass is not run if setUpClass failed. Thanks
+ Mathieu Agopian for the initial fix. Also make all of pytest/nose
+ finalizer mimic the same generic behaviour: if a setupX exists and
+ fails, don't run teardownX. This internally introduces a new method
+ "node.addfinalizer()" helper which can only be called during the setup
+ phase of a node.
+
+- simplify pytest.mark.parametrize() signature: allow to pass a
+ CSV-separated string to specify argnames. For example:
+ ``pytest.mark.parametrize("input,expected", [(1,2), (2,3)])``
+ works as well as the previous:
+ ``pytest.mark.parametrize(("input", "expected"), ...)``.
+
+- add support for setUpModule/tearDownModule detection, thanks Brian Okken.
+
+- integrate tab-completion on options through use of "argcomplete".
+ Thanks Anthon van der Neut for the PR.
+
+- change option names to be hyphen-separated long options but keep the
+ old spelling backward compatible. py.test -h will only show the
+ hyphenated version, for example "--collect-only" but "--collectonly"
+ will remain valid as well (for backward-compat reasons). Many thanks to
+ Anthon van der Neut for the implementation and to Hynek Schlawack for
+ pushing us.
+
+- fix issue 308 - allow to mark/xfail/skip individual parameter sets
+ when parametrizing. Thanks Brianna Laugher.
+
+- call new experimental pytest_load_initial_conftests hook to allow
+ 3rd party plugins to do something before a conftest is loaded.
+
+Bug fixes:
+
+- fix issue358 - capturing options are now parsed more properly
+ by using a new parser.parse_known_args method.
+
+- pytest now uses argparse instead of optparse (thanks Anthon) which
+ means that "argparse" is added as a dependency if installing into python2.6
+ environments or below.
+
+- fix issue333: fix a case of bad unittest/pytest hook interaction.
+
+- PR27: correctly handle nose.SkipTest during collection. Thanks
+ Antonio Cuni, Ronny Pfannschmidt.
+
+- fix issue355: junitxml puts name="pytest" attribute to testsuite tag.
+
+- fix issue336: autouse fixture in plugins should work again.
+
+- fix issue279: improve object comparisons on assertion failure
+ for standard datatypes and recognise collections.abc. Thanks to
+ Brianna Laugher and Mathieu Agopian.
+
+- fix issue317: assertion rewriter support for the is_package method
+
+- fix issue335: document py.code.ExceptionInfo() object returned
+ from pytest.raises(), thanks Mathieu Agopian.
+
+- remove implicit distribute_setup support from setup.py.
+
+- fix issue305: ignore any problems when writing pyc files.
+
+- SO-17664702: call fixture finalizers even if the fixture function
+ partially failed (finalizers would not always be called before)
+
+- fix issue320 - fix class scope for fixtures when mixed with
+ module-level functions. Thanks Anatloy Bubenkoff.
+
+- you can specify "-q" or "-qq" to get different levels of "quieter"
+ reporting (thanks Katarzyna Jachim)
+
+- fix issue300 - Fix order of conftest loading when starting py.test
+ in a subdirectory.
+
+- fix issue323 - sorting of many module-scoped arg parametrizations
+
+- make sessionfinish hooks execute with the same cwd-context as at
+ session start (helps fix plugin behaviour which write output files
+ with relative path such as pytest-cov)
+
+- fix issue316 - properly reference collection hooks in docs
+
+- fix issue 306 - cleanup of -k/-m options to only match markers/test
+ names/keywords respectively. Thanks Wouter van Ackooy.
+
+- improved doctest counting for doctests in python modules --
+ files without any doctest items will not show up anymore
+ and doctest examples are counted as separate test items.
+ thanks Danilo Bellini.
+
+- fix issue245 by depending on the released py-1.4.14
+ which fixes py.io.dupfile to work with files with no
+ mode. Thanks Jason R. Coombs.
+
+- fix junitxml generation when test output contains control characters,
+ addressing issue267, thanks Jaap Broekhuizen
+
+- fix issue338: honor --tb style for setup/teardown errors as well. Thanks Maho.
+
+- fix issue307 - use yaml.safe_load in example, thanks Mark Eichin.
+
+- better parametrize error messages, thanks Brianna Laugher
+
+- pytest_terminal_summary(terminalreporter) hooks can now use
+ ".section(title)" and ".line(msg)" methods to print extra
+ information at the end of a test run.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.1.rst
new file mode 100644
index 0000000000..308df6bdc4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.1.rst
@@ -0,0 +1,25 @@
+pytest-2.4.1: fixing three regressions compared to 2.3.5
+===========================================================================
+
+pytest-2.4.1 is a quick follow up release to fix three regressions
+compared to 2.3.5 before they hit more people:
+
+- When using parser.addoption() unicode arguments to the
+ "type" keyword should also be converted to the respective types.
+ thanks Floris Bruynooghe, @dnozay. (fixes issue360 and issue362)
+
+- fix dotted filename completion when using argcomplete
+ thanks Anthon van der Neuth. (fixes issue361)
+
+- fix regression when a 1-tuple ("arg",) is used for specifying
+ parametrization (the values of the parametrization were passed
+ nested in a tuple). Thanks Donald Stufft.
+
+- also merge doc typo fixes, thanks Andy Dirnberger
+
+as usual, docs at http://pytest.org and upgrades via::
+
+ pip install -U pytest
+
+have fun,
+holger krekel
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.2.rst
new file mode 100644
index 0000000000..ab08b72aaf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.4.2.rst
@@ -0,0 +1,39 @@
+pytest-2.4.2: colorama on windows, plugin/tmpdir fixes
+===========================================================================
+
+pytest-2.4.2 is another bug-fixing release:
+
+- on Windows require colorama and a newer py lib so that py.io.TerminalWriter()
+ now uses colorama instead of its own ctypes hacks. (fixes issue365)
+ thanks Paul Moore for bringing it up.
+
+- fix "-k" matching of tests where "repr" and "attr" and other names would
+ cause wrong matches because of an internal implementation quirk
+ (don't ask) which is now properly implemented. fixes issue345.
+
+- avoid tmpdir fixture to create too long filenames especially
+ when parametrization is used (issue354)
+
+- fix pytest-pep8 and pytest-flakes / pytest interactions
+ (collection names in mark plugin was assuming an item always
+ has a function which is not true for those plugins etc.)
+ Thanks Andi Zeidler.
+
+- introduce node.get_marker/node.add_marker API for plugins
+ like pytest-pep8 and pytest-flakes to avoid the messy
+ details of the node.keywords pseudo-dicts. Adapted
+ docs.
+
+- remove attempt to "dup" stdout at startup as it's icky.
+ the normal capturing should catch enough possibilities
+ of tests messing up standard FDs.
+
+- add pluginmanager.do_configure(config) as a link to
+ config.do_configure() for plugin-compatibility
+
+as usual, docs at http://pytest.org and upgrades via::
+
+ pip install -U pytest
+
+have fun,
+holger krekel
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.0.rst
new file mode 100644
index 0000000000..c6cdcdd8a8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.0.rst
@@ -0,0 +1,174 @@
+pytest-2.5.0: now down to ZERO reported bugs!
+===========================================================================
+
+pytest-2.5.0 is a big fixing release, the result of two community bug
+fixing days plus numerous additional works from many people and
+reporters. The release should be fully compatible to 2.4.2, existing
+plugins and test suites. We aim at maintaining this level of ZERO reported
+bugs because it's no fun if your testing tool has bugs, is it? Under a
+condition, though: when submitting a bug report please provide
+clear information about the circumstances and a simple example which
+reproduces the problem.
+
+The issue tracker is of course not empty now. We have many remaining
+"enhancement" issues which we'll hopefully can tackle in 2014 with your
+help.
+
+For those who use older Python versions, please note that pytest is not
+automatically tested on python2.5 due to virtualenv, setuptools and tox
+not supporting it anymore. Manual verification shows that it mostly
+works fine but it's not going to be part of the automated release
+process and thus likely to break in the future.
+
+As usual, current docs are at
+
+ http://pytest.org
+
+and you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Particular thanks for helping with this release go to Anatoly Bubenkoff,
+Floris Bruynooghe, Marc Abramowitz, Ralph Schmitt, Ronny Pfannschmidt,
+Donald Stufft, James Lan, Rob Dennis, Jason R. Coombs, Mathieu Agopian,
+Virgil Dupras, Bruno Oliveira, Alex Gaynor and others.
+
+have fun,
+holger krekel
+
+
+2.5.0
+-----------------------------------
+
+- dropped python2.5 from automated release testing of pytest itself
+ which means it's probably going to break soon (but still works
+ with this release we believe).
+
+- simplified and fixed implementation for calling finalizers when
+ parametrized fixtures or function arguments are involved. finalization
+ is now performed lazily at setup time instead of in the "teardown phase".
+ While this might sound odd at first, it helps to ensure that we are
+ correctly handling setup/teardown even in complex code. User-level code
+ should not be affected unless it's implementing the pytest_runtest_teardown
+ hook and expecting certain fixture instances are torn down within (very
+ unlikely and would have been unreliable anyway).
+
+- PR90: add --color=yes|no|auto option to force terminal coloring
+ mode ("auto" is default). Thanks Marc Abramowitz.
+
+- fix issue319 - correctly show unicode in assertion errors. Many
+ thanks to Floris Bruynooghe for the complete PR. Also means
+ we depend on py>=1.4.19 now.
+
+- fix issue396 - correctly sort and finalize class-scoped parametrized
+ tests independently from number of methods on the class.
+
+- refix issue323 in a better way -- parametrization should now never
+ cause Runtime Recursion errors because the underlying algorithm
+ for re-ordering tests per-scope/per-fixture is not recursive
+ anymore (it was tail-call recursive before which could lead
+ to problems for more than >966 non-function scoped parameters).
+
+- fix issue290 - there is preliminary support now for parametrizing
+ with repeated same values (sometimes useful to test if calling
+ a second time works as with the first time).
+
+- close issue240 - document precisely how pytest module importing
+ works, discuss the two common test directory layouts, and how it
+ interacts with PEP420-namespace packages.
+
+- fix issue246 fix finalizer order to be LIFO on independent fixtures
+ depending on a parametrized higher-than-function scoped fixture.
+ (was quite some effort so please bear with the complexity of this sentence :)
+ Thanks Ralph Schmitt for the precise failure example.
+
+- fix issue244 by implementing special index for parameters to only use
+ indices for paramentrized test ids
+
+- fix issue287 by running all finalizers but saving the exception
+ from the first failing finalizer and re-raising it so teardown will
+ still have failed. We reraise the first failing exception because
+ it might be the cause for other finalizers to fail.
+
+- fix ordering when mock.patch or other standard decorator-wrappings
+ are used with test methods. This fixes issue346 and should
+ help with random "xdist" collection failures. Thanks to
+ Ronny Pfannschmidt and Donald Stufft for helping to isolate it.
+
+- fix issue357 - special case "-k" expressions to allow for
+ filtering with simple strings that are not valid python expressions.
+ Examples: "-k 1.3" matches all tests parametrized with 1.3.
+ "-k None" filters all tests that have "None" in their name
+ and conversely "-k 'not None'".
+ Previously these examples would raise syntax errors.
+
+- fix issue384 by removing the trial support code
+ since the unittest compat enhancements allow
+ trial to handle it on its own
+
+- don't hide an ImportError when importing a plugin produces one.
+ fixes issue375.
+
+- fix issue275 - allow usefixtures and autouse fixtures
+ for running doctest text files.
+
+- fix issue380 by making --resultlog only rely on longrepr instead
+ of the "reprcrash" attribute which only exists sometimes.
+
+- address issue122: allow @pytest.fixture(params=iterator) by exploding
+ into a list early on.
+
+- fix pexpect-3.0 compatibility for pytest's own tests.
+ (fixes issue386)
+
+- allow nested parametrize-value markers, thanks James Lan for the PR.
+
+- fix unicode handling with new monkeypatch.setattr(import_path, value)
+ API. Thanks Rob Dennis. Fixes issue371.
+
+- fix unicode handling with junitxml, fixes issue368.
+
+- In assertion rewriting mode on Python 2, fix the detection of coding
+ cookies. See issue #330.
+
+- make "--runxfail" turn imperative pytest.xfail calls into no ops
+ (it already did neutralize pytest.mark.xfail markers)
+
+- refine pytest / pkg_resources interactions: The AssertionRewritingHook
+ PEP302 compliant loader now registers itself with setuptools/pkg_resources
+ properly so that the pkg_resources.resource_stream method works properly.
+ Fixes issue366. Thanks for the investigations and full PR to Jason R. Coombs.
+
+- pytestconfig fixture is now session-scoped as it is the same object during the
+ whole test run. Fixes issue370.
+
+- avoid one surprising case of marker malfunction/confusion::
+
+ @pytest.mark.some(lambda arg: ...)
+ def test_function():
+
+ would not work correctly because pytest assumes @pytest.mark.some
+ gets a function to be decorated already. We now at least detect if this
+ arg is a lambda and thus the example will work. Thanks Alex Gaynor
+ for bringing it up.
+
+- xfail a test on pypy that checks wrong encoding/ascii (pypy does
+ not error out). fixes issue385.
+
+- internally make varnames() deal with classes's __init__,
+ although it's not needed by pytest itself atm. Also
+ fix caching. Fixes issue376.
+
+- fix issue221 - handle importing of namespace-package with no
+ __init__.py properly.
+
+- refactor internal FixtureRequest handling to avoid monkeypatching.
+ One of the positive user-facing effects is that the "request" object
+ can now be used in closures.
+
+- fixed version comparison in pytest.importskip(modname, minverstring)
+
+- fix issue377 by clarifying in the nose-compat docs that pytest
+ does not duplicate the unittest-API into the "plain" namespace.
+
+- fix verbose reporting for @mock'd test functions
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.1.rst
new file mode 100644
index 0000000000..ff39db2d52
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.1.rst
@@ -0,0 +1,46 @@
+pytest-2.5.1: fixes and new home page styling
+===========================================================================
+
+pytest is a mature Python testing tool with more than 1000 tests
+against itself, passing on many different interpreters and platforms.
+
+The 2.5.1 release maintains the "zero-reported-bugs" promise by fixing
+the three bugs reported since the last release a few days ago. It also
+features a new home page styling implemented by Tobias Bieniek, based on
+the flask theme from Armin Ronacher:
+
+ http://pytest.org
+
+If you have anything more to improve styling and docs,
+we'd be very happy to merge further pull requests.
+
+On the coding side, the release also contains a little enhancement to
+fixture decorators allowing to directly influence generation of test
+ids, thanks to Floris Bruynooghe. Other thanks for helping with
+this release go to Anatoly Bubenkoff and Ronny Pfannschmidt.
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+have fun and a nice remaining "bug-free" time of the year :)
+holger krekel
+
+2.5.1
+-----------------------------------
+
+- merge new documentation styling PR from Tobias Bieniek.
+
+- fix issue403: allow parametrize of multiple same-name functions within
+ a collection node. Thanks Andreas Kloeckner and Alex Gaynor for reporting
+ and analysis.
+
+- Allow parameterized fixtures to specify the ID of the parameters by
+ adding an ids argument to pytest.fixture() and pytest.yield_fixture().
+ Thanks Floris Bruynooghe.
+
+- fix issue404 by always using the binary xml escape in the junitxml
+ plugin. Thanks Ronny Pfannschmidt.
+
+- fix issue407: fix addoption docstring to point to argparse instead of
+ optparse. Thanks Daniel D. Wright.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.2.rst
new file mode 100644
index 0000000000..edc4da6e19
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.5.2.rst
@@ -0,0 +1,63 @@
+pytest-2.5.2: fixes
+===========================================================================
+
+pytest is a mature Python testing tool with more than 1000 tests
+against itself, passing on many different interpreters and platforms.
+
+The 2.5.2 release fixes a few bugs with two maybe-bugs remaining and
+actively being worked on (and waiting for the bug reporter's input).
+We also have a new contribution guide thanks to Piotr Banaszkiewicz
+and others.
+
+See docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to the following people who contributed to this release:
+
+ Anatoly Bubenkov
+ Ronny Pfannschmidt
+ Floris Bruynooghe
+ Bruno Oliveira
+ Andreas Pelme
+ Jurko Gospodnetić
+ Piotr Banaszkiewicz
+ Simon Liedtke
+ lakka
+ Lukasz Balcerzak
+ Philippe Muller
+ Daniel Hahler
+
+have fun,
+holger krekel
+
+2.5.2
+-----------------------------------
+
+- fix issue409 -- better interoperate with cx_freeze by not
+ trying to import from collections.abc which causes problems
+ for py27/cx_freeze. Thanks Wolfgang L. for reporting and tracking it down.
+
+- fixed docs and code to use "pytest" instead of "py.test" almost everywhere.
+ Thanks Jurko Gospodnetic for the complete PR.
+
+- fix issue425: mention at end of "py.test -h" that --markers
+ and --fixtures work according to specified test path (or current dir)
+
+- fix issue413: exceptions with unicode attributes are now printed
+ correctly also on python2 and with pytest-xdist runs. (the fix
+ requires py-1.4.20)
+
+- copy, cleanup and integrate py.io capture
+ from pylib 1.4.20.dev2 (rev 13d9af95547e)
+
+- address issue416: clarify docs as to conftest.py loading semantics
+
+- fix issue429: comparing byte strings with non-ascii chars in assert
+ expressions now work better. Thanks Floris Bruynooghe.
+
+- make capfd/capsys.capture private, its unused and shouldn't be exposed
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.0.rst
new file mode 100644
index 0000000000..56fbd6cc1e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.0.rst
@@ -0,0 +1,153 @@
+pytest-2.6.0: shorter tracebacks, new warning system, test runner compat
+===========================================================================
+
+pytest is a mature Python testing tool with more than 1000 tests
+against itself, passing on many different interpreters and platforms.
+
+The 2.6.0 release should be drop-in backward compatible to 2.5.2 and
+fixes a number of bugs and brings some new features, mainly:
+
+- shorter tracebacks by default: only the first (test function) entry
+ and the last (failure location) entry are shown, the ones between
+ only in "short" format. Use ``--tb=long`` to get back the old
+ behaviour of showing "long" entries everywhere.
+
+- a new warning system which reports oddities during collection
+ and execution. For example, ignoring collecting Test* classes with an
+ ``__init__`` now produces a warning.
+
+- various improvements to nose/mock/unittest integration
+
+Note also that 2.6.0 departs with the "zero reported bugs" policy
+because it has been too hard to keep up with it, unfortunately.
+Instead we are for now rather bound to work on "upvoted" issues in
+the https://bitbucket.org/pytest-dev/pytest/issues?status=new&status=open&sort=-votes
+issue tracker.
+
+See docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed, among them:
+
+ Benjamin Peterson
+ Jurko Gospodnetić
+ Floris Bruynooghe
+ Marc Abramowitz
+ Marc Schlaich
+ Trevor Bekolay
+ Bruno Oliveira
+ Alex Groenholm
+
+have fun,
+holger krekel
+
+2.6.0
+-----------------------------------
+
+- fix issue537: Avoid importing old assertion reinterpretation code by default.
+ Thanks Benjamin Peterson.
+
+- fix issue364: shorten and enhance tracebacks representation by default.
+ The new "--tb=auto" option (default) will only display long tracebacks
+ for the first and last entry. You can get the old behaviour of printing
+ all entries as long entries with "--tb=long". Also short entries by
+ default are now printed very similarly to "--tb=native" ones.
+
+- fix issue514: teach assertion reinterpretation about private class attributes
+ Thanks Benjamin Peterson.
+
+- change -v output to include full node IDs of tests. Users can copy
+ a node ID from a test run, including line number, and use it as a
+ positional argument in order to run only a single test.
+
+- fix issue 475: fail early and comprehensible if calling
+ pytest.raises with wrong exception type.
+
+- fix issue516: tell in getting-started about current dependencies.
+
+- cleanup setup.py a bit and specify supported versions. Thanks Jurko
+ Gospodnetic for the PR.
+
+- change XPASS colour to yellow rather then red when tests are run
+ with -v.
+
+- fix issue473: work around mock putting an unbound method into a class
+ dict when double-patching.
+
+- fix issue498: if a fixture finalizer fails, make sure that
+ the fixture is still invalidated.
+
+- fix issue453: the result of the pytest_assertrepr_compare hook now gets
+ it's newlines escaped so that format_exception does not blow up.
+
+- internal new warning system: pytest will now produce warnings when
+ it detects oddities in your test collection or execution.
+ Warnings are ultimately sent to a new pytest_logwarning hook which is
+ currently only implemented by the terminal plugin which displays
+ warnings in the summary line and shows more details when -rw (report on
+ warnings) is specified.
+
+- change skips into warnings for test classes with an __init__ and
+ callables in test modules which look like a test but are not functions.
+
+- fix issue436: improved finding of initial conftest files from command
+ line arguments by using the result of parse_known_args rather than
+ the previous flaky heuristics. Thanks Marc Abramowitz for tests
+ and initial fixing approaches in this area.
+
+- fix issue #479: properly handle nose/unittest(2) SkipTest exceptions
+ during collection/loading of test modules. Thanks to Marc Schlaich
+ for the complete PR.
+
+- fix issue490: include pytest_load_initial_conftests in documentation
+ and improve docstring.
+
+- fix issue472: clarify that ``pytest.config.getvalue()`` cannot work
+ if it's triggered ahead of command line parsing.
+
+- merge PR123: improved integration with mock.patch decorator on tests.
+
+- fix issue412: messing with stdout/stderr FD-level streams is now
+ captured without crashes.
+
+- fix issue483: trial/py33 works now properly. Thanks Daniel Grana for PR.
+
+- improve example for pytest integration with "python setup.py test"
+ which now has a generic "-a" or "--pytest-args" option where you
+ can pass additional options as a quoted string. Thanks Trevor Bekolay.
+
+- simplified internal capturing mechanism and made it more robust
+ against tests or setups changing FD1/FD2, also better integrated
+ now with pytest.pdb() in single tests.
+
+- improvements to pytest's own test-suite leakage detection, courtesy of PRs
+ from Marc Abramowitz
+
+- fix issue492: avoid leak in test_writeorg. Thanks Marc Abramowitz.
+
+- fix issue493: don't run tests in doc directory with ``python setup.py test``
+ (use tox -e doctesting for that)
+
+- fix issue486: better reporting and handling of early conftest loading failures
+
+- some cleanup and simplification of internal conftest handling.
+
+- work a bit harder to break reference cycles when catching exceptions.
+ Thanks Jurko Gospodnetic.
+
+- fix issue443: fix skip examples to use proper comparison. Thanks Alex
+ Groenholm.
+
+- support nose-style ``__test__`` attribute on modules, classes and
+ functions, including unittest-style Classes. If set to False, the
+ test will not be collected.
+
+- fix issue512: show "<notset>" for arguments which might not be set
+ in monkeypatch plugin. Improves output in documentation.
+
+- avoid importing "py.test" (an old alias module for "pytest")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.1.rst
new file mode 100644
index 0000000000..7469c488e5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.1.rst
@@ -0,0 +1,58 @@
+pytest-2.6.1: fixes and new xfail feature
+===========================================================================
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+The 2.6.1 release is drop-in compatible to 2.5.2 and actually fixes some
+regressions introduced with 2.6.0. It also brings a little feature
+to the xfail marker which now recognizes expected exceptions,
+see the CHANGELOG below.
+
+See docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed, among them:
+
+ Floris Bruynooghe
+ Bruno Oliveira
+ Nicolas Delaby
+
+have fun,
+holger krekel
+
+Changes 2.6.1
+=================
+
+- No longer show line numbers in the --verbose output, the output is now
+ purely the nodeid. The line number is still shown in failure reports.
+ Thanks Floris Bruynooghe.
+
+- fix issue437 where assertion rewriting could cause pytest-xdist worker nodes
+ to collect different tests. Thanks Bruno Oliveira.
+
+- fix issue555: add "errors" attribute to capture-streams to satisfy
+ some distutils and possibly other code accessing sys.stdout.errors.
+
+- fix issue547 capsys/capfd also work when output capturing ("-s") is disabled.
+
+- address issue170: allow pytest.mark.xfail(...) to specify expected exceptions via
+ an optional "raises=EXC" argument where EXC can be a single exception
+ or a tuple of exception classes. Thanks David Mohr for the complete
+ PR.
+
+- fix integration of pytest with unittest.mock.patch decorator when
+ it uses the "new" argument. Thanks Nicolas Delaby for test and PR.
+
+- fix issue with detecting conftest files if the arguments contain
+ "::" node id specifications (copy pasted from "-v" output)
+
+- fix issue544 by only removing "@NUM" at the end of "::" separated parts
+ and if the part has a ".py" extension
+
+- don't use py.std import helper, rather import things directly.
+ Thanks Bruno Oliveira.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.2.rst
new file mode 100644
index 0000000000..9c3b7d96b0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.2.rst
@@ -0,0 +1,51 @@
+pytest-2.6.2: few fixes and cx_freeze support
+===========================================================================
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+This release is drop-in compatible to 2.5.2 and 2.6.X. It also
+brings support for including pytest with cx_freeze or similar
+freezing tools into your single-file app distribution. For details
+see the CHANGELOG below.
+
+See docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed, among them:
+
+ Floris Bruynooghe
+ Benjamin Peterson
+ Bruno Oliveira
+
+have fun,
+holger krekel
+
+2.6.2
+-----------
+
+- Added function pytest.freeze_includes(), which makes it easy to embed
+ pytest into executables using tools like cx_freeze.
+ See docs for examples and rationale. Thanks Bruno Oliveira.
+
+- Improve assertion rewriting cache invalidation precision.
+
+- fixed issue561: adapt autouse fixture example for python3.
+
+- fixed issue453: assertion rewriting issue with __repr__ containing
+ "\n{", "\n}" and "\n~".
+
+- fix issue560: correctly display code if an "else:" or "finally:" is
+ followed by statements on the same line.
+
+- Fix example in monkeypatch documentation, thanks t-8ch.
+
+- fix issue572: correct tmpdir doc example for python3.
+
+- Do not mark as universal wheel because Python 2.6 is different from
+ other builds due to the extra argparse dependency. Fixes issue566.
+ Thanks sontek.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.3.rst
new file mode 100644
index 0000000000..56973a2b2f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.6.3.rst
@@ -0,0 +1,51 @@
+pytest-2.6.3: fixes and little improvements
+===========================================================================
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+This release is drop-in compatible to 2.5.2 and 2.6.X.
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed, among them:
+
+ Floris Bruynooghe
+ Oleg Sinyavskiy
+ Uwe Schmitt
+ Charles Cloud
+ Wolfgang Schnerring
+
+have fun,
+holger krekel
+
+Changes 2.6.3
+======================
+
+- fix issue575: xunit-xml was reporting collection errors as failures
+ instead of errors, thanks Oleg Sinyavskiy.
+
+- fix issue582: fix setuptools example, thanks Laszlo Papp and Ronny
+ Pfannschmidt.
+
+- Fix infinite recursion bug when pickling capture.EncodedFile, thanks
+ Uwe Schmitt.
+
+- fix issue589: fix bad interaction with numpy and others when showing
+ exceptions. Check for precise "maximum recursion depth exceed" exception
+ instead of presuming any RuntimeError is that one (implemented in py
+ dep). Thanks Charles Cloud for analysing the issue.
+
+- fix conftest related fixture visibility issue: when running with a
+ CWD outside of a test package pytest would get fixture discovery wrong.
+ Thanks to Wolfgang Schnerring for figuring out a reproducible example.
+
+- Introduce pytest_enter_pdb hook (needed e.g. by pytest_timeout to cancel the
+ timeout when interactively entering pdb). Thanks Wolfgang Schnerring.
+
+- check xfail/skip also with non-python function test items. Thanks
+ Floris Bruynooghe.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.0.rst
new file mode 100644
index 0000000000..2840178a07
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.0.rst
@@ -0,0 +1,100 @@
+pytest-2.7.0: fixes, features, speed improvements
+===========================================================================
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+This release is supposed to be drop-in compatible to 2.6.X.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed, among them:
+
+ Anatoly Bubenkoff
+ Floris Bruynooghe
+ Brianna Laugher
+ Eric Siegerman
+ Daniel Hahler
+ Charles Cloud
+ Tom Viner
+ Holger Peters
+ Ldiary Translations
+ almarklein
+
+have fun,
+holger krekel
+
+2.7.0 (compared to 2.6.4)
+-----------------------------
+
+- fix issue435: make reload() work when assert rewriting is active.
+ Thanks Daniel Hahler.
+
+- fix issue616: conftest.py files and their contained fixtures are now
+ properly considered for visibility, independently from the exact
+ current working directory and test arguments that are used.
+ Many thanks to Eric Siegerman and his PR235 which contains
+ systematic tests for conftest visibility and now passes.
+ This change also introduces the concept of a ``rootdir`` which
+ is printed as a new pytest header and documented in the pytest
+ customize web page.
+
+- change reporting of "diverted" tests, i.e. tests that are collected
+ in one file but actually come from another (e.g. when tests in a test class
+ come from a base class in a different file). We now show the nodeid
+ and indicate via a postfix the other file.
+
+- add ability to set command line options by environment variable PYTEST_ADDOPTS.
+
+- added documentation on the new pytest-dev teams on bitbucket and
+ github. See https://pytest.org/en/stable/contributing.html .
+ Thanks to Anatoly for pushing and initial work on this.
+
+- fix issue650: new option ``--docttest-ignore-import-errors`` which
+ will turn import errors in doctests into skips. Thanks Charles Cloud
+ for the complete PR.
+
+- fix issue655: work around different ways that cause python2/3
+ to leak sys.exc_info into fixtures/tests causing failures in 3rd party code
+
+- fix issue615: assertion rewriting did not correctly escape % signs
+ when formatting boolean operations, which tripped over mixing
+ booleans with modulo operators. Thanks to Tom Viner for the report,
+ triaging and fix.
+
+- implement issue351: add ability to specify parametrize ids as a callable
+ to generate custom test ids. Thanks Brianna Laugher for the idea and
+ implementation.
+
+- introduce and document new hookwrapper mechanism useful for plugins
+ which want to wrap the execution of certain hooks for their purposes.
+ This supersedes the undocumented ``__multicall__`` protocol which
+ pytest itself and some external plugins use. Note that pytest-2.8
+ is scheduled to drop supporting the old ``__multicall__``
+ and only support the hookwrapper protocol.
+
+- majorly speed up invocation of plugin hooks
+
+- use hookwrapper mechanism in builtin pytest plugins.
+
+- add a doctest ini option for doctest flags, thanks Holger Peters.
+
+- add note to docs that if you want to mark a parameter and the
+ parameter is a callable, you also need to pass in a reason to disambiguate
+ it from the "decorator" case. Thanks Tom Viner.
+
+- "python_classes" and "python_functions" options now support glob-patterns
+ for test discovery, as discussed in issue600. Thanks Ldiary Translations.
+
+- allow to override parametrized fixtures with non-parametrized ones and vice versa (bubenkoff).
+
+- fix issue463: raise specific error for 'parameterize' misspelling (pfctdayelise).
+
+- On failure, the ``sys.last_value``, ``sys.last_type`` and
+ ``sys.last_traceback`` are set, so that a user can inspect the error
+ via postmortem debugging (almarklein).
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.1.rst
new file mode 100644
index 0000000000..5110c085e0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.1.rst
@@ -0,0 +1,58 @@
+pytest-2.7.1: bug fixes
+=======================
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+This release is supposed to be drop-in compatible to 2.7.0.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ Bruno Oliveira
+ Holger Krekel
+ Ionel Maries Cristian
+ Floris Bruynooghe
+
+Happy testing,
+The py.test Development Team
+
+
+2.7.1 (compared to 2.7.0)
+-------------------------
+
+- fix issue731: do not get confused by the braces which may be present
+ and unbalanced in an object's repr while collapsing False
+ explanations. Thanks Carl Meyer for the report and test case.
+
+- fix issue553: properly handling inspect.getsourcelines failures in
+ FixtureLookupError which would lead to an internal error,
+ obfuscating the original problem. Thanks talljosh for initial
+ diagnose/patch and Bruno Oliveira for final patch.
+
+- fix issue660: properly report scope-mismatch-access errors
+ independently from ordering of fixture arguments. Also
+ avoid the pytest internal traceback which does not provide
+ information to the user. Thanks Holger Krekel.
+
+- streamlined and documented release process. Also all versions
+ (in setup.py and documentation generation) are now read
+ from _pytest/__init__.py. Thanks Holger Krekel.
+
+- fixed docs to remove the notion that yield-fixtures are experimental.
+ They are here to stay :) Thanks Bruno Oliveira.
+
+- Support building wheels by using environment markers for the
+ requirements. Thanks Ionel Maries Cristian.
+
+- fixed regression to 2.6.4 which surfaced e.g. in lost stdout capture printing
+ when tests raised SystemExit. Thanks Holger Krekel.
+
+- reintroduced _pytest fixture of the pytester plugin which is used
+ at least by pytest-xdist.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.2.rst
new file mode 100644
index 0000000000..93e5b64eee
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.7.2.rst
@@ -0,0 +1,57 @@
+pytest-2.7.2: bug fixes
+=======================
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+This release is supposed to be drop-in compatible to 2.7.1.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ Bruno Oliveira
+ Floris Bruynooghe
+ Punyashloka Biswal
+ Aron Curzon
+ Benjamin Peterson
+ Thomas De Schampheleire
+ Edison Gustavo Muenz
+ Holger Krekel
+
+Happy testing,
+The py.test Development Team
+
+
+2.7.2 (compared to 2.7.1)
+-----------------------------
+
+- fix issue767: pytest.raises value attribute does not contain the exception
+ instance on Python 2.6. Thanks Eric Siegerman for providing the test
+ case and Bruno Oliveira for PR.
+
+- Automatically create directory for junitxml and results log.
+ Thanks Aron Curzon.
+
+- fix issue713: JUnit XML reports for doctest failures.
+ Thanks Punyashloka Biswal.
+
+- fix issue735: assertion failures on debug versions of Python 3.4+
+ Thanks Benjamin Peterson.
+
+- fix issue114: skipif marker reports to internal skipping plugin;
+ Thanks Floris Bruynooghe for reporting and Bruno Oliveira for the PR.
+
+- fix issue748: unittest.SkipTest reports to internal pytest unittest plugin.
+ Thanks Thomas De Schampheleire for reporting and Bruno Oliveira for the PR.
+
+- fix issue718: failed to create representation of sets containing unsortable
+ elements in python 2. Thanks Edison Gustavo Muenz
+
+- fix issue756, fix issue752 (and similar issues): depend on py-1.4.29
+ which has a refined algorithm for traceback generation.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.2.rst
new file mode 100644
index 0000000000..e472633885
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.2.rst
@@ -0,0 +1,44 @@
+pytest-2.8.2: bug fixes
+=======================
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+This release is supposed to be drop-in compatible to 2.8.1.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ Bruno Oliveira
+ Demian Brecht
+ Florian Bruhin
+ Ionel Cristian Mărieș
+ Raphael Pierzina
+ Ronny Pfannschmidt
+ holger krekel
+
+Happy testing,
+The py.test Development Team
+
+
+2.8.2 (compared to 2.7.2)
+-----------------------------
+
+- fix #1085: proper handling of encoding errors when passing encoded byte
+ strings to pytest.parametrize in Python 2.
+ Thanks Themanwithoutaplan for the report and Bruno Oliveira for the PR.
+
+- fix #1087: handling SystemError when passing empty byte strings to
+ pytest.parametrize in Python 3.
+ Thanks Paul Kehrer for the report and Bruno Oliveira for the PR.
+
+- fix #995: fixed internal error when filtering tracebacks where one entry
+ was generated by an exec() statement.
+ Thanks Daniel Hahler, Ashley C Straw, Philippe Gauthier and Pavel Savchenko
+ for contributing and Bruno Oliveira for the PR.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.3.rst
new file mode 100644
index 0000000000..3f357252bb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.3.rst
@@ -0,0 +1,58 @@
+pytest-2.8.3: bug fixes
+=======================
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+This release is supposed to be drop-in compatible to 2.8.2.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ Bruno Oliveira
+ Florian Bruhin
+ Gabe Hollombe
+ Gabriel Reis
+ Hartmut Goebel
+ John Vandenberg
+ Lee Kamentsky
+ Michael Birtwell
+ Raphael Pierzina
+ Ronny Pfannschmidt
+ William Martin Stewart
+
+Happy testing,
+The py.test Development Team
+
+
+2.8.3 (compared to 2.8.2)
+-----------------------------
+
+- fix #1169: add __name__ attribute to testcases in TestCaseFunction to
+ support the @unittest.skip decorator on functions and methods.
+ Thanks Lee Kamentsky for the PR.
+
+- fix #1035: collecting tests if test module level obj has __getattr__().
+ Thanks Suor for the report and Bruno Oliveira / Tom Viner for the PR.
+
+- fix #331: don't collect tests if their failure cannot be reported correctly
+ e.g. they are a callable instance of a class.
+
+- fix #1133: fixed internal error when filtering tracebacks where one entry
+ belongs to a file which is no longer available.
+ Thanks Bruno Oliveira for the PR.
+
+- enhancement made to highlight in red the name of the failing tests so
+ they stand out in the output.
+ Thanks Gabriel Reis for the PR.
+
+- add more talks to the documentation
+- extend documentation on the --ignore cli option
+- use pytest-runner for setuptools integration
+- minor fixes for interaction with OS X El Capitan system integrity protection (thanks Florian)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.4.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.4.rst
new file mode 100644
index 0000000000..adbdecc87e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.4.rst
@@ -0,0 +1,52 @@
+pytest-2.8.4
+============
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+This release is supposed to be drop-in compatible to 2.8.2.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ Bruno Oliveira
+ Florian Bruhin
+ Jeff Widman
+ Mehdy Khoshnoody
+ Nicholas Chammas
+ Ronny Pfannschmidt
+ Tim Chan
+
+
+Happy testing,
+The py.test Development Team
+
+
+2.8.4 (compared to 2.8.3)
+-----------------------------
+
+- fix #1190: ``deprecated_call()`` now works when the deprecated
+ function has been already called by another test in the same
+ module. Thanks Mikhail Chernykh for the report and Bruno Oliveira for the
+ PR.
+
+- fix #1198: ``--pastebin`` option now works on Python 3. Thanks
+ Mehdy Khoshnoody for the PR.
+
+- fix #1219: ``--pastebin`` now works correctly when captured output contains
+ non-ascii characters. Thanks Bruno Oliveira for the PR.
+
+- fix #1204: another error when collecting with a nasty __getattr__().
+ Thanks Florian Bruhin for the PR.
+
+- fix the summary printed when no tests did run.
+ Thanks Florian Bruhin for the PR.
+
+- a number of documentation modernizations wrt good practices.
+ Thanks Bruno Oliveira for the PR.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.5.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.5.rst
new file mode 100644
index 0000000000..c5343d1ea7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.5.rst
@@ -0,0 +1,39 @@
+pytest-2.8.5
+============
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+This release is supposed to be drop-in compatible to 2.8.4.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ Alex Gaynor
+ aselus-hub
+ Bruno Oliveira
+ Ronny Pfannschmidt
+
+
+Happy testing,
+The py.test Development Team
+
+
+2.8.5 (compared to 2.8.4)
+-------------------------
+
+- fix #1243: fixed issue where class attributes injected during collection could break pytest.
+ PR by Alexei Kozlenok, thanks Ronny Pfannschmidt and Bruno Oliveira for the review and help.
+
+- fix #1074: precompute junitxml chunks instead of storing the whole tree in objects
+ Thanks Bruno Oliveira for the report and Ronny Pfannschmidt for the PR
+
+- fix #1238: fix ``pytest.deprecated_call()`` receiving multiple arguments
+ (Regression introduced in 2.8.4). Thanks Alex Gaynor for the report and
+ Bruno Oliveira for the PR.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.6.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.6.rst
new file mode 100644
index 0000000000..5d6565b16a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.6.rst
@@ -0,0 +1,67 @@
+pytest-2.8.6
+============
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+This release is supposed to be drop-in compatible to 2.8.5.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ AMiT Kumar
+ Bruno Oliveira
+ Erik M. Bray
+ Florian Bruhin
+ Georgy Dyuldin
+ Jeff Widman
+ Kartik Singhal
+ Loïc Estève
+ Manu Phatak
+ Peter Demin
+ Rick van Hattem
+ Ronny Pfannschmidt
+ Ulrich Petri
+ foxx
+
+
+Happy testing,
+The py.test Development Team
+
+
+2.8.6 (compared to 2.8.5)
+-------------------------
+
+- fix #1259: allow for double nodeids in junitxml,
+ this was a regression failing plugins combinations
+ like pytest-pep8 + pytest-flakes
+
+- Workaround for exception that occurs in pyreadline when using
+ ``--pdb`` with standard I/O capture enabled.
+ Thanks Erik M. Bray for the PR.
+
+- fix #900: Better error message in case the target of a ``monkeypatch`` call
+ raises an ``ImportError``.
+
+- fix #1292: monkeypatch calls (setattr, setenv, etc.) are now O(1).
+ Thanks David R. MacIver for the report and Bruno Oliveira for the PR.
+
+- fix #1223: captured stdout and stderr are now properly displayed before
+ entering pdb when ``--pdb`` is used instead of being thrown away.
+ Thanks Cal Leeming for the PR.
+
+- fix #1305: pytest warnings emitted during ``pytest_terminal_summary`` are now
+ properly displayed.
+ Thanks Ionel Maries Cristian for the report and Bruno Oliveira for the PR.
+
+- fix #628: fixed internal UnicodeDecodeError when doctests contain unicode.
+ Thanks Jason R. Coombs for the report and Bruno Oliveira for the PR.
+
+- fix #1334: Add captured stdout to jUnit XML report on setup error.
+ Thanks Georgy Dyuldin for the PR.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.7.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.7.rst
new file mode 100644
index 0000000000..8236a09666
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.8.7.rst
@@ -0,0 +1,31 @@
+pytest-2.8.7
+============
+
+This is a hotfix release to solve a regression
+in the builtin monkeypatch plugin that got introduced in 2.8.6.
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+This release is supposed to be drop-in compatible to 2.8.5.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ Ronny Pfannschmidt
+
+
+Happy testing,
+The py.test Development Team
+
+
+2.8.7 (compared to 2.8.6)
+-------------------------
+
+- fix #1338: use predictable object resolution for monkeypatch
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.0.rst
new file mode 100644
index 0000000000..3aea08cb22
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.0.rst
@@ -0,0 +1,134 @@
+pytest-2.9.0
+============
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ Anatoly Bubenkov
+ Bruno Oliveira
+ Buck Golemon
+ David Vierra
+ Florian Bruhin
+ Galaczi Endre
+ Georgy Dyuldin
+ Lukas Bednar
+ Luke Murphy
+ Marcin Biernat
+ Matt Williams
+ Michael Aquilina
+ Raphael Pierzina
+ Ronny Pfannschmidt
+ Ryan Wooden
+ Tiemo Kieft
+ TomV
+ holger krekel
+ jab
+
+
+Happy testing,
+The py.test Development Team
+
+
+2.9.0 (compared to 2.8.7)
+-------------------------
+
+**New Features**
+
+* New ``pytest.mark.skip`` mark, which unconditionally skips marked tests.
+ Thanks :user:`MichaelAquilina` for the complete PR (:pull:`1040`).
+
+* ``--doctest-glob`` may now be passed multiple times in the command-line.
+ Thanks :user:`jab` and :user:`nicoddemus` for the PR.
+
+* New ``-rp`` and ``-rP`` reporting options give the summary and full output
+ of passing tests, respectively. Thanks to :user:`codewarrior0` for the PR.
+
+* ``pytest.mark.xfail`` now has a ``strict`` option which makes ``XPASS``
+ tests to fail the test suite, defaulting to ``False``. There's also a
+ ``xfail_strict`` ini option that can be used to configure it project-wise.
+ Thanks :user:`rabbbit` for the request and :user:`nicoddemus` for the PR (:issue:`1355`).
+
+* ``Parser.addini`` now supports options of type ``bool``. Thanks
+ :user:`nicoddemus` for the PR.
+
+* New ``ALLOW_BYTES`` doctest option strips ``b`` prefixes from byte strings
+ in doctest output (similar to ``ALLOW_UNICODE``).
+ Thanks :user:`jaraco` for the request and :user:`nicoddemus` for the PR (:issue:`1287`).
+
+* give a hint on KeyboardInterrupt to use the --fulltrace option to show the errors,
+ this fixes :issue:`1366`.
+ Thanks to :user:`hpk42` for the report and :user:`RonnyPfannschmidt` for the PR.
+
+* catch IndexError exceptions when getting exception source location. This fixes
+ pytest internal error for dynamically generated code (fixtures and tests)
+ where source lines are fake by intention
+
+**Changes**
+
+* **Important**: `py.code <https://pylib.readthedocs.io/en/stable/code.html>`_ has been
+ merged into the ``pytest`` repository as ``pytest._code``. This decision
+ was made because ``py.code`` had very few uses outside ``pytest`` and the
+ fact that it was in a different repository made it difficult to fix bugs on
+ its code in a timely manner. The team hopes with this to be able to better
+ refactor out and improve that code.
+ This change shouldn't affect users, but it is useful to let users aware
+ if they encounter any strange behavior.
+
+ Keep in mind that the code for ``pytest._code`` is **private** and
+ **experimental**, so you definitely should not import it explicitly!
+
+ Please note that the original ``py.code`` is still available in
+ `pylib <https://pylib.readthedocs.io/en/stable/>`_.
+
+* ``pytest_enter_pdb`` now optionally receives the pytest config object.
+ Thanks :user:`nicoddemus` for the PR.
+
+* Removed code and documentation for Python 2.5 or lower versions,
+ including removal of the obsolete ``_pytest.assertion.oldinterpret`` module.
+ Thanks :user:`nicoddemus` for the PR (:issue:`1226`).
+
+* Comparisons now always show up in full when ``CI`` or ``BUILD_NUMBER`` is
+ found in the environment, even when -vv isn't used.
+ Thanks :user:`The-Compiler` for the PR.
+
+* ``--lf`` and ``--ff`` now support long names: ``--last-failed`` and
+ ``--failed-first`` respectively.
+ Thanks :user:`MichaelAquilina` for the PR.
+
+* Added expected exceptions to pytest.raises fail message
+
+* Collection only displays progress ("collecting X items") when in a terminal.
+ This avoids cluttering the output when using ``--color=yes`` to obtain
+ colors in CI integrations systems (:issue:`1397`).
+
+**Bug Fixes**
+
+* The ``-s`` and ``-c`` options should now work under ``xdist``;
+ ``Config.fromdictargs`` now represents its input much more faithfully.
+ Thanks to :user:`bukzor` for the complete PR (:issue:`680`).
+
+* Fix (:issue:`1290`): support Python 3.5's ``@`` operator in assertion rewriting.
+ Thanks :user:`Shinkenjoe` for report with test case and :user:`tomviner` for the PR.
+
+* Fix formatting utf-8 explanation messages (:issue:`1379`).
+ Thanks :user:`biern` for the PR.
+
+* Fix `traceback style docs`_ to describe all of the available options
+ (auto/long/short/line/native/no), with ``auto`` being the default since v2.6.
+ Thanks :user:`hackebrot` for the PR.
+
+* Fix (:issue:`1422`): junit record_xml_property doesn't allow multiple records
+ with same name.
+
+
+.. _`traceback style docs`: https://pytest.org/en/stable/how-to/output.html#modifying-python-traceback-printing
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.1.rst
new file mode 100644
index 0000000000..6a627ad3cd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.1.rst
@@ -0,0 +1,57 @@
+pytest-2.9.1
+============
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ Bruno Oliveira
+ Daniel Hahler
+ Dmitry Malinovsky
+ Florian Bruhin
+ Floris Bruynooghe
+ Matt Bachmann
+ Ronny Pfannschmidt
+ TomV
+ Vladimir Bolshakov
+ Zearin
+ palaviv
+
+
+Happy testing,
+The py.test Development Team
+
+
+2.9.1 (compared to 2.9.0)
+-------------------------
+
+**Bug Fixes**
+
+* Improve error message when a plugin fails to load.
+ Thanks :user:`nicoddemus` for the PR.
+
+* Fix (:issue:`1178`):
+ ``pytest.fail`` with non-ascii characters raises an internal pytest error.
+ Thanks :user:`nicoddemus` for the PR.
+
+* Fix (:issue:`469`): junit parses report.nodeid incorrectly, when params IDs
+ contain ``::``. Thanks :user:`tomviner` for the PR (:pull:`1431`).
+
+* Fix (:issue:`578`): SyntaxErrors
+ containing non-ascii lines at the point of failure generated an internal
+ py.test error.
+ Thanks :user:`asottile` for the report and :user:`nicoddemus` for the PR.
+
+* Fix (:issue:`1437`): When passing in a bytestring regex pattern to parameterize
+ attempt to decode it as utf-8 ignoring errors.
+
+* Fix (:issue:`649`): parametrized test nodes cannot be specified to run on the command line.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.2.rst
new file mode 100644
index 0000000000..2dc82a1117
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-2.9.2.rst
@@ -0,0 +1,65 @@
+pytest-2.9.2
+============
+
+pytest is a mature Python testing tool with more than 1100 tests
+against itself, passing on many different interpreters and platforms.
+
+See below for the changes and see docs at:
+
+ http://pytest.org
+
+As usual, you can upgrade from pypi via::
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ Adam Chainz
+ Benjamin Dopplinger
+ Bruno Oliveira
+ Florian Bruhin
+ John Towler
+ Martin Prusse
+ Meng Jue
+ MengJueM
+ Omar Kohl
+ Quentin Pradet
+ Ronny Pfannschmidt
+ Thomas Güttler
+ TomV
+ Tyler Goodlet
+
+
+Happy testing,
+The py.test Development Team
+
+
+2.9.2 (compared to 2.9.1)
+---------------------------
+
+**Bug Fixes**
+
+* fix :issue:`510`: skip tests where one parameterize dimension was empty
+ thanks Alex Stapleton for the Report and :user:`RonnyPfannschmidt` for the PR
+
+* Fix Xfail does not work with condition keyword argument.
+ Thanks :user:`astraw38` for reporting the issue (:issue:`1496`) and :user:`tomviner`
+ for PR the (:pull:`1524`).
+
+* Fix win32 path issue when putting custom config file with absolute path
+ in ``pytest.main("-c your_absolute_path")``.
+
+* Fix maximum recursion depth detection when raised error class is not aware
+ of unicode/encoded bytes.
+ Thanks :user:`prusse-martin` for the PR (:pull:`1506`).
+
+* Fix ``pytest.mark.skip`` mark when used in strict mode.
+ Thanks :user:`pquentin` for the PR and :user:`RonnyPfannschmidt` for
+ showing how to fix the bug.
+
+* Minor improvements and fixes to the documentation.
+ Thanks :user:`omarkohl` for the PR.
+
+* Fix ``--fixtures`` to show all fixture definitions as opposed to just
+ one per fixture name.
+ Thanks to :user:`hackebrot` for the PR.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.0.rst
new file mode 100644
index 0000000000..5de3891148
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.0.rst
@@ -0,0 +1,82 @@
+pytest-3.0.0
+============
+
+The pytest team is proud to announce the 3.0.0 release!
+
+pytest is a mature Python testing tool with more than 1600 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a lot of bugs fixes and improvements, and much of
+the work done on it was possible because of the 2016 Sprint[1], which
+was funded by an indiegogo campaign which raised over US$12,000 with
+nearly 100 backers.
+
+There's a "What's new in pytest 3.0" [2] blog post highlighting the
+major features in this release.
+
+To see the complete changelog and documentation, please visit:
+
+ http://docs.pytest.org
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+ AbdealiJK
+ Ana Ribeiro
+ Antony Lee
+ Brandon W Maister
+ Brianna Laugher
+ Bruno Oliveira
+ Ceridwen
+ Christian Boelsen
+ Daniel Hahler
+ Danielle Jenkins
+ Dave Hunt
+ Diego Russo
+ Dmitry Dygalo
+ Edoardo Batini
+ Eli Boyarski
+ Florian Bruhin
+ Floris Bruynooghe
+ Greg Price
+ Guyzmo
+ HEAD KANGAROO
+ JJ
+ Javi Romero
+ Javier Domingo Cansino
+ Kale Kundert
+ Kalle Bronsen
+ Marius Gedminas
+ Matt Williams
+ Mike Lundy
+ Oliver Bestwalter
+ Omar Kohl
+ Raphael Pierzina
+ RedBeardCode
+ Roberto Polli
+ Romain Dorgueil
+ Roman Bolshakov
+ Ronny Pfannschmidt
+ Stefan Zimmermann
+ Steffen Allner
+ Tareq Alayan
+ Ted Xiao
+ Thomas Grainger
+ Tom Viner
+ TomV
+ Vasily Kuznetsov
+ aostr
+ marscher
+ palaviv
+ satoru
+ taschini
+
+
+Happy testing,
+The Pytest Development Team
+
+[1] http://blog.pytest.org/2016/pytest-development-sprint/
+[2] http://blog.pytest.org/2016/whats-new-in-pytest-30/
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.1.rst
new file mode 100644
index 0000000000..8f5cfe411a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.1.rst
@@ -0,0 +1,26 @@
+pytest-3.0.1
+============
+
+pytest 3.0.1 has just been released to PyPI.
+
+This release fixes some regressions reported in version 3.0.0, being a
+drop-in replacement. To upgrade:
+
+ pip install --upgrade pytest
+
+The changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+ Adam Chainz
+ Andrew Svetlov
+ Bruno Oliveira
+ Daniel Hahler
+ Dmitry Dygalo
+ Florian Bruhin
+ Marcin Bachry
+ Ronny Pfannschmidt
+ matthiasha
+
+Happy testing,
+The py.test Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.2.rst
new file mode 100644
index 0000000000..86ba82ca6e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.2.rst
@@ -0,0 +1,24 @@
+pytest-3.0.2
+============
+
+pytest 3.0.2 has just been released to PyPI.
+
+This release fixes some regressions and bugs reported in version 3.0.1, being a
+drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Ahn Ki-Wook
+* Bruno Oliveira
+* Florian Bruhin
+* Jordan Guymon
+* Raphael Pierzina
+* Ronny Pfannschmidt
+* mbyt
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.3.rst
new file mode 100644
index 0000000000..89a2e0c744
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.3.rst
@@ -0,0 +1,27 @@
+pytest-3.0.3
+============
+
+pytest 3.0.3 has just been released to PyPI.
+
+This release fixes some regressions and bugs reported in the last version,
+being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+* Florian Bruhin
+* Floris Bruynooghe
+* Huayi Zhang
+* Lev Maximov
+* Raquel Alegre
+* Ronny Pfannschmidt
+* Roy Williams
+* Tyler Goodlet
+* mbyt
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.4.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.4.rst
new file mode 100644
index 0000000000..72c2d29464
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.4.rst
@@ -0,0 +1,29 @@
+pytest-3.0.4
+============
+
+pytest 3.0.4 has just been released to PyPI.
+
+This release fixes some regressions and bugs reported in the last version,
+being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+* Dan Wandschneider
+* Florian Bruhin
+* Georgy Dyuldin
+* Grigorii Eremeev
+* Jason R. Coombs
+* Manuel Jacob
+* Mathieu Clabaut
+* Michael Seifert
+* Nikolaus Rath
+* Ronny Pfannschmidt
+* Tom V
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.5.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.5.rst
new file mode 100644
index 0000000000..97edb7d462
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.5.rst
@@ -0,0 +1,27 @@
+pytest-3.0.5
+============
+
+pytest 3.0.5 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Ana Vojnovic
+* Bruno Oliveira
+* Daniel Hahler
+* Duncan Betts
+* Igor Starikov
+* Ismail
+* Luke Murphy
+* Ned Batchelder
+* Ronny Pfannschmidt
+* Sebastian Ramacher
+* nmundar
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.6.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.6.rst
new file mode 100644
index 0000000000..9c072cedcc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.6.rst
@@ -0,0 +1,33 @@
+pytest-3.0.6
+============
+
+pytest 3.0.6 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+
+Thanks to all who contributed to this release, among them:
+
+* Andreas Pelme
+* Bruno Oliveira
+* Dmitry Malinovsky
+* Eli Boyarski
+* Jakub Wilk
+* Jeff Widman
+* Loïc Estève
+* Luke Murphy
+* Miro HronÄok
+* Oscar Hellström
+* Peter Heatwole
+* Philippe Ombredanne
+* Ronny Pfannschmidt
+* Rutger Prins
+* Stefan Scherfke
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.7.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.7.rst
new file mode 100644
index 0000000000..4b7e075e76
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.0.7.rst
@@ -0,0 +1,33 @@
+pytest-3.0.7
+============
+
+pytest 3.0.7 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Barney Gale
+* Bruno Oliveira
+* Florian Bruhin
+* Floris Bruynooghe
+* Ionel Cristian Mărieș
+* Katerina Koukiou
+* NODA, Kai
+* Omer Hadari
+* Patrick Hayes
+* Ran Benita
+* Ronny Pfannschmidt
+* Victor Uriarte
+* Vidar Tonaas Fauske
+* Ville Skyttä
+* fbjorn
+* mbyt
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.0.rst
new file mode 100644
index 0000000000..5527706794
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.0.rst
@@ -0,0 +1,61 @@
+pytest-3.1.0
+=======================================
+
+The pytest team is proud to announce the 3.1.0 release!
+
+pytest is a mature Python testing tool with more than 1600 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+http://doc.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ http://docs.pytest.org
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Ben Lloyd
+* Bruno Oliveira
+* David Giese
+* David Szotten
+* Dmitri Pribysh
+* Florian Bruhin
+* Florian Schulze
+* Floris Bruynooghe
+* John Towler
+* Jonas Obrist
+* Katerina Koukiou
+* Kodi Arfer
+* Krzysztof Szularz
+* Lev Maximov
+* Loïc Estève
+* Luke Murphy
+* Manuel Krebber
+* Matthew Duck
+* Matthias Bussonnier
+* Michael Howitz
+* Michal Wajszczuk
+* Paweł Adamczak
+* Rafael Bertoldi
+* Ravi Chandra
+* Ronny Pfannschmidt
+* Skylar Downes
+* Thomas Kriechbaumer
+* Vitaly Lashmanov
+* Vlad Dragos
+* Wheerd
+* Xander Johnson
+* mandeep
+* reut
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.1.rst
new file mode 100644
index 0000000000..135b2fe844
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.1.rst
@@ -0,0 +1,23 @@
+pytest-3.1.1
+=======================================
+
+pytest 3.1.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+* Florian Bruhin
+* Floris Bruynooghe
+* Jason R. Coombs
+* Ronny Pfannschmidt
+* wanghui
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.2.rst
new file mode 100644
index 0000000000..a9b85c4715
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.2.rst
@@ -0,0 +1,23 @@
+pytest-3.1.2
+=======================================
+
+pytest 3.1.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Andreas Pelme
+* ApaDoctor
+* Bruno Oliveira
+* Florian Bruhin
+* Ronny Pfannschmidt
+* Segev Finer
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.3.rst
new file mode 100644
index 0000000000..bc2b85fcfd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.1.3.rst
@@ -0,0 +1,23 @@
+pytest-3.1.3
+=======================================
+
+pytest 3.1.3 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Antoine Legrand
+* Bruno Oliveira
+* Max Moroz
+* Raphael Pierzina
+* Ronny Pfannschmidt
+* Ryan Fitzpatrick
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.10.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.10.0.rst
new file mode 100644
index 0000000000..ff3c000b0e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.10.0.rst
@@ -0,0 +1,43 @@
+pytest-3.10.0
+=======================================
+
+The pytest team is proud to announce the 3.10.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Anders Hovmöller
+* Andreu Vallbona Plazas
+* Ankit Goel
+* Anthony Sottile
+* Bernardo Gomes
+* Brianna Laugher
+* Bruno Oliveira
+* Daniel Hahler
+* David Szotten
+* Mick Koch
+* Niclas Olofsson
+* Palash Chatterjee
+* Ronny Pfannschmidt
+* Sven-Hendrik Haase
+* Ville Skyttä
+* William Jamir Silva
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.10.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.10.1.rst
new file mode 100644
index 0000000000..ad365f6347
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.10.1.rst
@@ -0,0 +1,24 @@
+pytest-3.10.1
+=======================================
+
+pytest 3.10.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Boris Feld
+* Bruno Oliveira
+* Daniel Hahler
+* Fabien ZARIFIAN
+* Jon Dufresne
+* Ronny Pfannschmidt
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.0.rst
new file mode 100644
index 0000000000..edc66a28e7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.0.rst
@@ -0,0 +1,48 @@
+pytest-3.2.0
+=======================================
+
+The pytest team is proud to announce the 3.2.0 release!
+
+pytest is a mature Python testing tool with more than 1600 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ http://doc.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ http://docs.pytest.org
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Alex Hartoto
+* Andras Tim
+* Bruno Oliveira
+* Daniel Hahler
+* Florian Bruhin
+* Floris Bruynooghe
+* John Still
+* Jordan Moldow
+* Kale Kundert
+* Lawrence Mitchell
+* Llandy Riveron Del Risco
+* Maik Figura
+* Martin Altmayer
+* Mihai Capotă
+* Nathaniel Waisbrot
+* Nguyễn Hồng Quân
+* Pauli Virtanen
+* Raphael Pierzina
+* Ronny Pfannschmidt
+* Segev Finer
+* V.Kuznetsov
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.1.rst
new file mode 100644
index 0000000000..c40217d311
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.1.rst
@@ -0,0 +1,22 @@
+pytest-3.2.1
+=======================================
+
+pytest 3.2.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Alex Gaynor
+* Bruno Oliveira
+* Florian Bruhin
+* Ronny Pfannschmidt
+* Srinivas Reddy Thatiparthy
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.2.rst
new file mode 100644
index 0000000000..5e6c43ab17
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.2.rst
@@ -0,0 +1,28 @@
+pytest-3.2.2
+=======================================
+
+pytest 3.2.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Andreas Pelme
+* Antonio Hidalgo
+* Bruno Oliveira
+* Felipe Dau
+* Fernando Macedo
+* Jesús Espino
+* Joan Massich
+* Joe Talbott
+* Kirill Pinchuk
+* Ronny Pfannschmidt
+* Xuan Luong
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.3.rst
new file mode 100644
index 0000000000..50dce29c1a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.3.rst
@@ -0,0 +1,23 @@
+pytest-3.2.3
+=======================================
+
+pytest 3.2.3 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+* Evan
+* Joe Hamman
+* Oliver Bestwalter
+* Ronny Pfannschmidt
+* Xuan Luong
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.4.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.4.rst
new file mode 100644
index 0000000000..ff0b35781b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.4.rst
@@ -0,0 +1,36 @@
+pytest-3.2.4
+=======================================
+
+pytest 3.2.4 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+* Christian Boelsen
+* Christoph Buchner
+* Daw-Ran Liou
+* Florian Bruhin
+* Franck Michea
+* Leonard Lausen
+* Matty G
+* Owen Tuz
+* Pavel Karateev
+* Pierre GIRAUD
+* Ronny Pfannschmidt
+* Stephen Finucane
+* Sviatoslav Abakumov
+* Thomas Hisch
+* Tom Dalton
+* Xuan Luong
+* Yorgos Pagles
+* Семён МарьÑÑин
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.5.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.5.rst
new file mode 100644
index 0000000000..68caccbdbc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.2.5.rst
@@ -0,0 +1,18 @@
+pytest-3.2.5
+=======================================
+
+pytest 3.2.5 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.0.rst
new file mode 100644
index 0000000000..1cbf2c448c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.0.rst
@@ -0,0 +1,50 @@
+pytest-3.3.0
+=======================================
+
+The pytest team is proud to announce the 3.3.0 release!
+
+pytest is a mature Python testing tool with more than 1600 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ http://doc.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ http://docs.pytest.org
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Ceridwen
+* Daniel Hahler
+* Dirk Thomas
+* Dmitry Malinovsky
+* Florian Bruhin
+* George Y. Kussumoto
+* Hugo
+* Jesús Espino
+* Joan Massich
+* Ofir
+* OfirOshir
+* Ronny Pfannschmidt
+* Samuel Dion-Girardeau
+* Srinivas Reddy Thatiparthy
+* Sviatoslav Abakumov
+* Tarcisio Fischer
+* Thomas Hisch
+* Tyler Goodlet
+* hugovk
+* je
+* prokaktus
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.1.rst
new file mode 100644
index 0000000000..98b6fa6c1b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.1.rst
@@ -0,0 +1,25 @@
+pytest-3.3.1
+=======================================
+
+pytest 3.3.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+* Daniel Hahler
+* Eugene Prikazchikov
+* Florian Bruhin
+* Roland Puntaier
+* Ronny Pfannschmidt
+* Sebastian Rahlf
+* Tom Viner
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.2.rst
new file mode 100644
index 0000000000..7a2577d1ff
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.3.2.rst
@@ -0,0 +1,28 @@
+pytest-3.3.2
+=======================================
+
+pytest 3.3.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Antony Lee
+* Austin
+* Bruno Oliveira
+* Florian Bruhin
+* Floris Bruynooghe
+* Henk-Jaap Wagenaar
+* Jurko Gospodnetić
+* Ronny Pfannschmidt
+* Srinivas Reddy Thatiparthy
+* Thomas Hisch
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.0.rst
new file mode 100644
index 0000000000..6ab5b124a2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.0.rst
@@ -0,0 +1,52 @@
+pytest-3.4.0
+=======================================
+
+The pytest team is proud to announce the 3.4.0 release!
+
+pytest is a mature Python testing tool with more than 1600 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ http://doc.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ http://docs.pytest.org
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Aaron
+* Alan Velasco
+* Anders Hovmöller
+* Andrew Toolan
+* Anthony Sottile
+* Aron Coyle
+* Brian Maissy
+* Bruno Oliveira
+* Cyrus Maden
+* Florian Bruhin
+* Henk-Jaap Wagenaar
+* Ian Lesperance
+* Jon Dufresne
+* Jurko Gospodnetić
+* Kate
+* Kimberly
+* Per A. Brodtkorb
+* Pierre-Alexandre Fonta
+* Raphael Castaneda
+* Ronny Pfannschmidt
+* ST John
+* Segev Finer
+* Thomas Hisch
+* Tzu-ping Chung
+* feuillemorte
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.1.rst
new file mode 100644
index 0000000000..d83949453a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.1.rst
@@ -0,0 +1,27 @@
+pytest-3.4.1
+=======================================
+
+pytest 3.4.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Aaron
+* Alan Velasco
+* Andy Freeland
+* Brian Maissy
+* Bruno Oliveira
+* Florian Bruhin
+* Jason R. Coombs
+* Marcin Bachry
+* Pedro Algarvio
+* Ronny Pfannschmidt
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.2.rst
new file mode 100644
index 0000000000..07cd9d3a8b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.4.2.rst
@@ -0,0 +1,28 @@
+pytest-3.4.2
+=======================================
+
+pytest 3.4.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Allan Feldman
+* Bruno Oliveira
+* Florian Bruhin
+* Jason R. Coombs
+* Kyle Altendorf
+* Maik Figura
+* Ronny Pfannschmidt
+* codetriage-readme-bot
+* feuillemorte
+* joshm91
+* mike
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.5.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.5.0.rst
new file mode 100644
index 0000000000..6bc2f3cd0c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.5.0.rst
@@ -0,0 +1,51 @@
+pytest-3.5.0
+=======================================
+
+The pytest team is proud to announce the 3.5.0 release!
+
+pytest is a mature Python testing tool with more than 1600 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ http://doc.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ http://docs.pytest.org
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Allan Feldman
+* Brian Maissy
+* Bruno Oliveira
+* Carlos Jenkins
+* Daniel Hahler
+* Florian Bruhin
+* Jason R. Coombs
+* Jeffrey Rackauckas
+* Jordan Speicher
+* Julien Palard
+* Kale Kundert
+* Kostis Anagnostopoulos
+* Kyle Altendorf
+* Maik Figura
+* Pedro Algarvio
+* Ronny Pfannschmidt
+* Tadeu Manoel
+* Tareq Alayan
+* Thomas Hisch
+* William Lee
+* codetriage-readme-bot
+* feuillemorte
+* joshm91
+* mike
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.5.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.5.1.rst
new file mode 100644
index 0000000000..802be03684
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.5.1.rst
@@ -0,0 +1,30 @@
+pytest-3.5.1
+=======================================
+
+pytest 3.5.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Brian Maissy
+* Bruno Oliveira
+* Darren Burns
+* David Chudzicki
+* Floris Bruynooghe
+* Holger Kohr
+* Irmen de Jong
+* Jeffrey Rackauckas
+* Rachel Kogan
+* Ronny Pfannschmidt
+* Stefan Scherfke
+* Tim Strazny
+* Семён МарьÑÑин
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.0.rst
new file mode 100644
index 0000000000..44b178c169
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.0.rst
@@ -0,0 +1,41 @@
+pytest-3.6.0
+=======================================
+
+The pytest team is proud to announce the 3.6.0 release!
+
+pytest is a mature Python testing tool with more than 1600 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ http://doc.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ http://docs.pytest.org
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Shaw
+* ApaDoctor
+* Brian Maissy
+* Bruno Oliveira
+* Jon Dufresne
+* Katerina Koukiou
+* Miro HronÄok
+* Rachel Kogan
+* Ronny Pfannschmidt
+* Tim Hughes
+* Tyler Goodlet
+* Ville Skyttä
+* aviral1701
+* feuillemorte
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.1.rst
new file mode 100644
index 0000000000..d971a3d490
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.1.rst
@@ -0,0 +1,24 @@
+pytest-3.6.1
+=======================================
+
+pytest 3.6.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Jeffrey Rackauckas
+* Miro HronÄok
+* Niklas Meinzer
+* Oliver Bestwalter
+* Ronny Pfannschmidt
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.2.rst
new file mode 100644
index 0000000000..9d91995793
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.2.rst
@@ -0,0 +1,29 @@
+pytest-3.6.2
+=======================================
+
+pytest 3.6.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Alan Velasco
+* Alex Barbato
+* Anthony Sottile
+* Bartosz Cierocki
+* Bruno Oliveira
+* Daniel Hahler
+* Guoqiang Zhang
+* Hynek Schlawack
+* John T. Wodder II
+* Michael Käufl
+* Ronny Pfannschmidt
+* Samuel Dion-Girardeau
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.3.rst
new file mode 100644
index 0000000000..4dda2460da
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.3.rst
@@ -0,0 +1,27 @@
+pytest-3.6.3
+=======================================
+
+pytest 3.6.3 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* AdamEr8
+* Anthony Sottile
+* Bruno Oliveira
+* Jean-Paul Calderone
+* Jon Dufresne
+* Marcelo Duarte Trevisani
+* Ondřej Súkup
+* Ronny Pfannschmidt
+* T.E.A de Souza
+* Victor Maryama
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.4.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.4.rst
new file mode 100644
index 0000000000..2c0f9efecc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.6.4.rst
@@ -0,0 +1,24 @@
+pytest-3.6.4
+=======================================
+
+pytest 3.6.4 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bernhard M. Wiedemann
+* Bruno Oliveira
+* Drew
+* E Hershey
+* Hugo Martins
+* Vlad Shcherbina
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.0.rst
new file mode 100644
index 0000000000..89908a9101
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.0.rst
@@ -0,0 +1,41 @@
+pytest-3.7.0
+=======================================
+
+The pytest team is proud to announce the 3.7.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ http://doc.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ http://docs.pytest.org
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Alan
+* Alan Brammer
+* Ammar Najjar
+* Anthony Sottile
+* Bruno Oliveira
+* Jeffrey Rackauckas
+* Kale Kundert
+* Ronny Pfannschmidt
+* Serhii Mozghovyi
+* Tadek Teleżyński
+* Wil Cooley
+* abrammer
+* avirlrma
+* turturica
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.1.rst
new file mode 100644
index 0000000000..7da5a3e1f7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.1.rst
@@ -0,0 +1,21 @@
+pytest-3.7.1
+=======================================
+
+pytest 3.7.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Kale Kundert
+* Ronny Pfannschmidt
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.2.rst
new file mode 100644
index 0000000000..fcc6121752
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.2.rst
@@ -0,0 +1,25 @@
+pytest-3.7.2
+=======================================
+
+pytest 3.7.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Josh Holland
+* Ronny Pfannschmidt
+* Sankt Petersbug
+* Wes Thomas
+* turturica
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.3.rst
new file mode 100644
index 0000000000..ee87da60d2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.3.rst
@@ -0,0 +1,32 @@
+pytest-3.7.3
+=======================================
+
+pytest 3.7.3 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at http://doc.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Andrew Champion
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Gandalf Saxe
+* Jennifer Rinker
+* Natan Lao
+* Ondřej Súkup
+* Ronny Pfannschmidt
+* Sankt Petersbug
+* Tyler Richard
+* Victor Maryama
+* Vlad Shcherbina
+* turturica
+* wim glenn
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.4.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.4.rst
new file mode 100644
index 0000000000..45be429388
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.7.4.rst
@@ -0,0 +1,22 @@
+pytest-3.7.4
+=======================================
+
+pytest 3.7.4 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Jiri Kuncar
+* Steve Piercy
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.0.rst
new file mode 100644
index 0000000000..8c35a44f6d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.0.rst
@@ -0,0 +1,38 @@
+pytest-3.8.0
+=======================================
+
+The pytest team is proud to announce the 3.8.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* CrazyMerlyn
+* Daniel Hahler
+* Fabio Zadrozny
+* Jeffrey Rackauckas
+* Ronny Pfannschmidt
+* Virgil Dupras
+* dhirensr
+* hoefling
+* wim glenn
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.1.rst
new file mode 100644
index 0000000000..f8f8accc4c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.1.rst
@@ -0,0 +1,25 @@
+pytest-3.8.1
+=======================================
+
+pytest 3.8.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Ankit Goel
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Maximilian Albert
+* Ronny Pfannschmidt
+* William Jamir Silva
+* wim glenn
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.2.rst
new file mode 100644
index 0000000000..9ea94c98a2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.8.2.rst
@@ -0,0 +1,28 @@
+pytest-3.8.2
+=======================================
+
+pytest 3.8.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Ankit Goel
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Denis Otkidach
+* Harry Percival
+* Jeffrey Rackauckas
+* Jose Carlos Menezes
+* Ronny Pfannschmidt
+* Zac Hatfield-Dodds
+* iwanb
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.0.rst
new file mode 100644
index 0000000000..0be6cf5be8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.0.rst
@@ -0,0 +1,43 @@
+pytest-3.9.0
+=======================================
+
+The pytest team is proud to announce the 3.9.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Andrea Cimatoribus
+* Ankit Goel
+* Anthony Sottile
+* Ben Eyal
+* Bruno Oliveira
+* Daniel Hahler
+* Jeffrey Rackauckas
+* Jose Carlos Menezes
+* Kyle Altendorf
+* Niklas JQ
+* Palash Chatterjee
+* Ronny Pfannschmidt
+* Thomas Hess
+* Thomas Hisch
+* Tomer Keren
+* Victor Maryama
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.1.rst
new file mode 100644
index 0000000000..e1afb3759d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.1.rst
@@ -0,0 +1,20 @@
+pytest-3.9.1
+=======================================
+
+pytest 3.9.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+* Ronny Pfannschmidt
+* Thomas Hisch
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.2.rst
new file mode 100644
index 0000000000..63e94e5aab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.2.rst
@@ -0,0 +1,23 @@
+pytest-3.9.2
+=======================================
+
+pytest 3.9.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Ankit Goel
+* Anthony Sottile
+* Bruno Oliveira
+* Ronny Pfannschmidt
+* Vincent Barbaresi
+* ykantor
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.3.rst
new file mode 100644
index 0000000000..661ddb5cb5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-3.9.3.rst
@@ -0,0 +1,24 @@
+pytest-3.9.3
+=======================================
+
+pytest 3.9.3 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Andreas Profous
+* Ankit Goel
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Jon Dufresne
+* Ronny Pfannschmidt
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.0.rst
new file mode 100644
index 0000000000..5eb0107758
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.0.rst
@@ -0,0 +1,30 @@
+pytest-4.0.0
+=======================================
+
+The pytest team is proud to announce the 4.0.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+* Daniel Hahler
+* Ronny Pfannschmidt
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.1.rst
new file mode 100644
index 0000000000..2902a6db9f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.1.rst
@@ -0,0 +1,23 @@
+pytest-4.0.1
+=======================================
+
+pytest 4.0.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Michael D. Hoyle
+* Ronny Pfannschmidt
+* Slam
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.2.rst
new file mode 100644
index 0000000000..f439b88fe2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.0.2.rst
@@ -0,0 +1,24 @@
+pytest-4.0.2
+=======================================
+
+pytest 4.0.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Pedro Algarvio
+* Ronny Pfannschmidt
+* Tomer Keren
+* Yash Todi
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.1.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.1.0.rst
new file mode 100644
index 0000000000..314564eeb6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.1.0.rst
@@ -0,0 +1,44 @@
+pytest-4.1.0
+=======================================
+
+The pytest team is proud to announce the 4.1.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Adam Johnson
+* Aly Sivji
+* Andrey Paramonov
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* David Vo
+* Hyunchel Kim
+* Jeffrey Rackauckas
+* Kanguros
+* Nicholas Devenish
+* Pedro Algarvio
+* Randy Barlow
+* Ronny Pfannschmidt
+* Tomer Keren
+* feuillemorte
+* wim glenn
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.1.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.1.1.rst
new file mode 100644
index 0000000000..1f45e082f8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.1.1.rst
@@ -0,0 +1,27 @@
+pytest-4.1.1
+=======================================
+
+pytest 4.1.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Anton Lodder
+* Bruno Oliveira
+* Daniel Hahler
+* David Vo
+* Oscar Benjamin
+* Ronny Pfannschmidt
+* Victor Maryama
+* Yoav Caspi
+* dmitry.dygalo
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.2.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.2.0.rst
new file mode 100644
index 0000000000..bcd7f77547
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.2.0.rst
@@ -0,0 +1,37 @@
+pytest-4.2.0
+=======================================
+
+The pytest team is proud to announce the 4.2.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Adam Uhlir
+* Anthony Sottile
+* Bruno Oliveira
+* Christopher Dignam
+* Daniel Hahler
+* Joseph Hunkeler
+* Kristoffer Nordstroem
+* Ronny Pfannschmidt
+* Thomas Hisch
+* wim glenn
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.2.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.2.1.rst
new file mode 100644
index 0000000000..36beafe11d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.2.1.rst
@@ -0,0 +1,30 @@
+pytest-4.2.1
+=======================================
+
+pytest 4.2.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Arel Cordero
+* Bruno Oliveira
+* Daniel Hahler
+* Holger Kohr
+* Kevin J. Foley
+* Nick Murphy
+* Paweł Stradomski
+* Raphael Pierzina
+* Ronny Pfannschmidt
+* Sam Brightman
+* Thomas Hisch
+* Zac Hatfield-Dodds
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.3.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.3.0.rst
new file mode 100644
index 0000000000..3b0b428092
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.3.0.rst
@@ -0,0 +1,36 @@
+pytest-4.3.0
+=======================================
+
+The pytest team is proud to announce the 4.3.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Andras Mitzki
+* Anthony Sottile
+* Bruno Oliveira
+* Christian Fetzer
+* Daniel Hahler
+* Grygorii Iermolenko
+* R. Alex Matevish
+* Ronny Pfannschmidt
+* cclauss
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.3.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.3.1.rst
new file mode 100644
index 0000000000..4251c744e5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.3.1.rst
@@ -0,0 +1,28 @@
+pytest-4.3.1
+=======================================
+
+pytest 4.3.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Andras Mitzki
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Danilo Horta
+* Grygorii Iermolenko
+* Jeff Hale
+* Kyle Altendorf
+* Stephan Hoyer
+* Zac Hatfield-Dodds
+* songbowen
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.0.rst
new file mode 100644
index 0000000000..dc89739d0a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.0.rst
@@ -0,0 +1,39 @@
+pytest-4.4.0
+=======================================
+
+The pytest team is proud to announce the 4.4.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* ApaDoctor
+* Bernhard M. Wiedemann
+* Brian Skinn
+* Bruno Oliveira
+* Daniel Hahler
+* Gary Tyler
+* Jeong YunWon
+* Miro HronÄok
+* Takafumi Arakaki
+* henrykironde
+* smheidrich
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.1.rst
new file mode 100644
index 0000000000..1272cd8fde
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.1.rst
@@ -0,0 +1,20 @@
+pytest-4.4.1
+=======================================
+
+pytest 4.4.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.2.rst
new file mode 100644
index 0000000000..5876e83b3b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.4.2.rst
@@ -0,0 +1,33 @@
+pytest-4.4.2
+=======================================
+
+pytest 4.4.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Allan Lewis
+* Anthony Sottile
+* Bruno Oliveira
+* DamianSkrzypczak
+* Daniel Hahler
+* Don Kirkby
+* Douglas Thor
+* Hugo
+* Ilya Konstantinov
+* Jon Dufresne
+* Matt Cooper
+* Nikolay Kondratyev
+* Ondřej Súkup
+* Peter Schutt
+* Romain Chossart
+* Sitaktif
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.5.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.5.0.rst
new file mode 100644
index 0000000000..d2a05d4f79
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.5.0.rst
@@ -0,0 +1,34 @@
+pytest-4.5.0
+=======================================
+
+The pytest team is proud to announce the 4.5.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Floris Bruynooghe
+* Pulkit Goyal
+* Samuel Searles-Bryant
+* Zac Hatfield-Dodds
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.0.rst
new file mode 100644
index 0000000000..a82fdd47d6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.0.rst
@@ -0,0 +1,43 @@
+pytest-4.6.0
+=======================================
+
+The pytest team is proud to announce the 4.6.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Akiomi Kamakura
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* David Röthlisberger
+* Evan Kepner
+* Jeffrey Rackauckas
+* MyComputer
+* Nikita Krokosh
+* Raul Tambre
+* Thomas Hisch
+* Tim Hoffmann
+* Tomer Keren
+* Victor Maryama
+* danielx123
+* oleg-yegorov
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.1.rst
new file mode 100644
index 0000000000..c79839b7b5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.1.rst
@@ -0,0 +1,19 @@
+pytest-4.6.1
+=======================================
+
+pytest 4.6.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.2.rst
new file mode 100644
index 0000000000..cfc595293a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.2.rst
@@ -0,0 +1,18 @@
+pytest-4.6.2
+=======================================
+
+pytest 4.6.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.3.rst
new file mode 100644
index 0000000000..f578464a7a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.3.rst
@@ -0,0 +1,21 @@
+pytest-4.6.3
+=======================================
+
+pytest 4.6.3 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Dirk Thomas
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.4.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.4.rst
new file mode 100644
index 0000000000..0eefcbeb1c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.4.rst
@@ -0,0 +1,22 @@
+pytest-4.6.4
+=======================================
+
+pytest 4.6.4 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Thomas Grainger
+* Zac Hatfield-Dodds
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.5.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.5.rst
new file mode 100644
index 0000000000..1ebf361fdf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.5.rst
@@ -0,0 +1,21 @@
+pytest-4.6.5
+=======================================
+
+pytest 4.6.5 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Thomas Grainger
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.6.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.6.rst
new file mode 100644
index 0000000000..b3bf1e431c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.6.rst
@@ -0,0 +1,20 @@
+pytest-4.6.6
+=======================================
+
+pytest 4.6.6 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Michael Goerz
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.7.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.7.rst
new file mode 100644
index 0000000000..f9d01845ec
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.7.rst
@@ -0,0 +1,19 @@
+pytest-4.6.7
+=======================================
+
+pytest 4.6.7 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+* Daniel Hahler
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.8.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.8.rst
new file mode 100644
index 0000000000..5cabe7826e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.8.rst
@@ -0,0 +1,20 @@
+pytest-4.6.8
+=======================================
+
+pytest 4.6.8 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Ryan Mast
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.9.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.9.rst
new file mode 100644
index 0000000000..7f7bb5996e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-4.6.9.rst
@@ -0,0 +1,21 @@
+pytest-4.6.9
+=======================================
+
+pytest 4.6.9 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Felix Yan
+* Hugo
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.0.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.0.0.rst
new file mode 100644
index 0000000000..f5e593e9d8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.0.0.rst
@@ -0,0 +1,46 @@
+pytest-5.0.0
+=======================================
+
+The pytest team is proud to announce the 5.0.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Dirk Thomas
+* Evan Kepner
+* Florian Bruhin
+* Hugo
+* Kevin J. Foley
+* Pulkit Goyal
+* Ralph Giles
+* Ronny Pfannschmidt
+* Thomas Grainger
+* Thomas Hisch
+* Tim Gates
+* Victor Maryama
+* Yuri Apollov
+* Zac Hatfield-Dodds
+* curiousjazz77
+* patriksevallius
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.0.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.0.1.rst
new file mode 100644
index 0000000000..e16a8f716f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.0.1.rst
@@ -0,0 +1,25 @@
+pytest-5.0.1
+=======================================
+
+pytest 5.0.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* AmirElkess
+* Andreu Vallbona Plazas
+* Anthony Sottile
+* Bruno Oliveira
+* Florian Bruhin
+* Michael Moore
+* Niklas Meinzer
+* Thomas Grainger
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.0.rst
new file mode 100644
index 0000000000..9ab54ff973
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.0.rst
@@ -0,0 +1,56 @@
+pytest-5.1.0
+=======================================
+
+The pytest team is proud to announce the 5.1.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Albert Tugushev
+* Alexey Zankevich
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* David Röthlisberger
+* Florian Bruhin
+* Ilya Stepin
+* Jon Dufresne
+* Kaiqi
+* Max R
+* Miro HronÄok
+* Oliver Bestwalter
+* Ran Benita
+* Ronny Pfannschmidt
+* Samuel Searles-Bryant
+* Semen Zhydenko
+* Steffen Schroeder
+* Thomas Grainger
+* Tim Hoffmann
+* William Woodall
+* Wojtek Erbetowski
+* Xixi Zhao
+* Yash Todi
+* boris
+* dmitry.dygalo
+* helloocc
+* martbln
+* mei-li
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.1.rst
new file mode 100644
index 0000000000..bb8de48014
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.1.rst
@@ -0,0 +1,24 @@
+pytest-5.1.1
+=======================================
+
+pytest 5.1.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Florian Bruhin
+* Hugo van Kemenade
+* Ran Benita
+* Ronny Pfannschmidt
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.2.rst
new file mode 100644
index 0000000000..c4cb8e3fb4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.2.rst
@@ -0,0 +1,23 @@
+pytest-5.1.2
+=======================================
+
+pytest 5.1.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Andrzej Klajnert
+* Anthony Sottile
+* Bruno Oliveira
+* Christian Neumüller
+* Robert Holt
+* linchiwei123
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.3.rst
new file mode 100644
index 0000000000..c4e88aed28
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.1.3.rst
@@ -0,0 +1,23 @@
+pytest-5.1.3
+=======================================
+
+pytest 5.1.3 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Christian Neumüller
+* Daniel Hahler
+* Gene Wood
+* Hugo
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.0.rst
new file mode 100644
index 0000000000..f43767b750
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.0.rst
@@ -0,0 +1,35 @@
+pytest-5.2.0
+=======================================
+
+The pytest team is proud to announce the 5.2.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Andrzej Klajnert
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* James Cooke
+* Michael Goerz
+* Ran Benita
+* Tomáš Chvátal
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.1.rst
new file mode 100644
index 0000000000..fe42b9bf15
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.1.rst
@@ -0,0 +1,23 @@
+pytest-5.2.1
+=======================================
+
+pytest 5.2.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Florian Bruhin
+* Hynek Schlawack
+* Kevin J. Foley
+* tadashigaki
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.2.rst
new file mode 100644
index 0000000000..89fd6a534d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.2.rst
@@ -0,0 +1,29 @@
+pytest-5.2.2
+=======================================
+
+pytest 5.2.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Albert Tugushev
+* Andrzej Klajnert
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Florian Bruhin
+* Nattaphoom Chaipreecha
+* Oliver Bestwalter
+* Philipp Loose
+* Ran Benita
+* Victor Maryama
+* Yoav Caspi
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.3.rst
new file mode 100644
index 0000000000..bab174495d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.3.rst
@@ -0,0 +1,28 @@
+pytest-5.2.3
+=======================================
+
+pytest 5.2.3 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Brett Cannon
+* Bruno Oliveira
+* Daniel Hahler
+* Daniil Galiev
+* David Szotten
+* Florian Bruhin
+* Patrick Harmon
+* Ran Benita
+* Zac Hatfield-Dodds
+* Zak Hassan
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.4.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.4.rst
new file mode 100644
index 0000000000..5f51896797
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.2.4.rst
@@ -0,0 +1,22 @@
+pytest-5.2.4
+=======================================
+
+pytest 5.2.4 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Hugo
+* Michael Shields
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.0.rst
new file mode 100644
index 0000000000..e13a71f09a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.0.rst
@@ -0,0 +1,45 @@
+pytest-5.3.0
+=======================================
+
+The pytest team is proud to announce the 5.3.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bugs fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from pypi via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* AnjoMan
+* Anthony Sottile
+* Anton Lodder
+* Bruno Oliveira
+* Daniel Hahler
+* Gregory Lee
+* Josh Karpel
+* JoshKarpel
+* Joshua Storck
+* Kale Kundert
+* MarcoGorelli
+* Michael Krebs
+* NNRepos
+* Ran Benita
+* TH3CHARLie
+* Tibor Arpas
+* Zac Hatfield-Dodds
+* 林玮
+
+
+Happy testing,
+The Pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.1.rst
new file mode 100644
index 0000000000..d575bb70e3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.1.rst
@@ -0,0 +1,26 @@
+pytest-5.3.1
+=======================================
+
+pytest 5.3.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Felix Yan
+* Florian Bruhin
+* Mark Dickinson
+* Nikolay Kondratyev
+* Steffen Schroeder
+* Zac Hatfield-Dodds
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.2.rst
new file mode 100644
index 0000000000..d562a33fb0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.2.rst
@@ -0,0 +1,26 @@
+pytest-5.3.2
+=======================================
+
+pytest 5.3.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Claudio Madotto
+* Daniel Hahler
+* Jared Vasquez
+* Michael Rose
+* Ran Benita
+* Ronny Pfannschmidt
+* Zac Hatfield-Dodds
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.3.rst
new file mode 100644
index 0000000000..40a6fb5b56
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.3.rst
@@ -0,0 +1,30 @@
+pytest-5.3.3
+=======================================
+
+pytest 5.3.3 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Adam Johnson
+* Alexandre Mulatinho
+* Anthony Sottile
+* Bruno Oliveira
+* Chris NeJame
+* Daniel Hahler
+* Hugo van Kemenade
+* Marcelo Duarte Trevisani
+* PaulC
+* Ran Benita
+* Ryan Barner
+* Seth Junot
+* marc
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.4.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.4.rst
new file mode 100644
index 0000000000..0750a9d404
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.4.rst
@@ -0,0 +1,20 @@
+pytest-5.3.4
+=======================================
+
+pytest 5.3.4 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+* Daniel Hahler
+* Ran Benita
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.5.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.5.rst
new file mode 100644
index 0000000000..e632ce8538
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.3.5.rst
@@ -0,0 +1,19 @@
+pytest-5.3.5
+=======================================
+
+pytest 5.3.5 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Daniel Hahler
+* Ran Benita
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.0.rst
new file mode 100644
index 0000000000..43dffc9290
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.0.rst
@@ -0,0 +1,59 @@
+pytest-5.4.0
+=======================================
+
+The pytest team is proud to announce the 5.4.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bug fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from PyPI via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Christoph Buelter
+* Christoph Bülter
+* Daniel Arndt
+* Daniel Hahler
+* Holger Kohr
+* Hugo
+* Hugo van Kemenade
+* Jakub Mitoraj
+* Kyle Altendorf
+* Minuddin Ahmed Rana
+* Nathaniel Compton
+* ParetoLife
+* Pauli Virtanen
+* Philipp Loose
+* Ran Benita
+* Ronny Pfannschmidt
+* Stefan Scherfke
+* Stefano Mazzucco
+* TWood67
+* Tobias Schmidt
+* Tomáš GavenÄiak
+* Vinay Calastry
+* Vladyslav Rachek
+* Zac Hatfield-Dodds
+* captainCapitalism
+* cmachalo
+* gftea
+* kpinc
+* rebecca-palmer
+* sdementen
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.1.rst
new file mode 100644
index 0000000000..f6a64efa49
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.1.rst
@@ -0,0 +1,18 @@
+pytest-5.4.1
+=======================================
+
+pytest 5.4.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.2.rst
new file mode 100644
index 0000000000..d742dd4aad
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.2.rst
@@ -0,0 +1,22 @@
+pytest-5.4.2
+=======================================
+
+pytest 5.4.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Daniel Hahler
+* Ran Benita
+* Ronny Pfannschmidt
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.3.rst
new file mode 100644
index 0000000000..6c995c1633
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-5.4.3.rst
@@ -0,0 +1,21 @@
+pytest-5.4.3
+=======================================
+
+pytest 5.4.3 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Ran Benita
+* Tor Colvin
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.0.rst
new file mode 100644
index 0000000000..9706fe59bc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.0.rst
@@ -0,0 +1,40 @@
+pytest-6.0.0
+=======================================
+
+The pytest team is proud to announce the 6.0.0 release!
+
+pytest is a mature Python testing tool with more than 2000 tests
+against itself, passing on many different interpreters and platforms.
+
+This release contains a number of bug fixes and improvements, so users are encouraged
+to take a look at the CHANGELOG:
+
+ https://docs.pytest.org/en/latest/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/latest/
+
+As usual, you can upgrade from PyPI via:
+
+ pip install -U pytest
+
+Thanks to all who contributed to this release, among them:
+
+* Anthony Sottile
+* Arvin Firouzi
+* Bruno Oliveira
+* Debi Mishra
+* Garrett Thomas
+* Hugo van Kemenade
+* Kelton Bassingthwaite
+* Kostis Anagnostopoulos
+* Lewis Cowles
+* Miro HronÄok
+* Ran Benita
+* Simon K
+* Zac Hatfield-Dodds
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.0rc1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.0rc1.rst
new file mode 100644
index 0000000000..5690b514ba
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.0rc1.rst
@@ -0,0 +1,67 @@
+pytest-6.0.0rc1
+=======================================
+
+pytest 6.0.0rc1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/latest/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Alfredo Deza
+* Andreas Maier
+* Andrew
+* Anthony Sottile
+* ArtyomKaltovich
+* Bruno Oliveira
+* Claire Cecil
+* Curt J. Sampson
+* Daniel
+* Daniel Hahler
+* Danny Sepler
+* David Diaz Barquero
+* Fabio Zadrozny
+* Felix Nieuwenhuizen
+* Florian Bruhin
+* Florian Dahlitz
+* Gleb Nikonorov
+* Hugo van Kemenade
+* Hunter Richards
+* Katarzyna Król
+* Katrin Leinweber
+* Keri Volans
+* Lewis Belcher
+* Lukas Geiger
+* Martin Michlmayr
+* Mattwmaster58
+* Maximilian Cosmo Sitter
+* Nikolay Kondratyev
+* Pavel Karateev
+* Paweł Wilczyński
+* Prashant Anand
+* Ram Rachum
+* Ran Benita
+* Ronny Pfannschmidt
+* Ruaridh Williamson
+* Simon K
+* Tim Hoffmann
+* Tor Colvin
+* Vlad-Radz
+* Xinbin Huang
+* Zac Hatfield-Dodds
+* earonesty
+* gaurav dhameeja
+* gdhameeja
+* ibriquem
+* mcsitter
+* piotrhm
+* smarie
+* symonk
+* xuiqzy
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.1.rst
new file mode 100644
index 0000000000..33fdbed3f6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.1.rst
@@ -0,0 +1,21 @@
+pytest-6.0.1
+=======================================
+
+pytest 6.0.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/latest/changelog.html.
+
+Thanks to all who contributed to this release, among them:
+
+* Bruno Oliveira
+* Mattreex
+* Ran Benita
+* hp310780
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.2.rst
new file mode 100644
index 0000000000..16eabc5863
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.0.2.rst
@@ -0,0 +1,19 @@
+pytest-6.0.2
+=======================================
+
+pytest 6.0.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all of the contributors to this release:
+
+* Bruno Oliveira
+* Ran Benita
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.0.rst
new file mode 100644
index 0000000000..f4b571ae84
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.0.rst
@@ -0,0 +1,44 @@
+pytest-6.1.0
+=======================================
+
+The pytest team is proud to announce the 6.1.0 release!
+
+This release contains new features, improvements, bug fixes, and breaking changes, so users
+are encouraged to take a look at the CHANGELOG carefully:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from PyPI via:
+
+ pip install -U pytest
+
+Thanks to all of the contributors to this release:
+
+* Anthony Sottile
+* Bruno Oliveira
+* C. Titus Brown
+* Drew Devereux
+* Faris A Chugthai
+* Florian Bruhin
+* Hugo van Kemenade
+* Hynek Schlawack
+* Joseph Lucas
+* Kamran Ahmad
+* Mattreex
+* Maximilian Cosmo Sitter
+* Ran Benita
+* Rüdiger Busche
+* Sam Estep
+* Sorin Sbarnea
+* Thomas Grainger
+* Vipul Kumar
+* Yutaro Ikeda
+* hp310780
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.1.rst
new file mode 100644
index 0000000000..e09408fdee
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.1.rst
@@ -0,0 +1,18 @@
+pytest-6.1.1
+=======================================
+
+pytest 6.1.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all of the contributors to this release:
+
+* Ran Benita
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.2.rst
new file mode 100644
index 0000000000..aa2c809520
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.1.2.rst
@@ -0,0 +1,22 @@
+pytest-6.1.2
+=======================================
+
+pytest 6.1.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all of the contributors to this release:
+
+* Bruno Oliveira
+* Manuel Mariñez
+* Ran Benita
+* Vasilis Gerakaris
+* William Jamir Silva
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.0.rst
new file mode 100644
index 0000000000..af16b830dd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.0.rst
@@ -0,0 +1,76 @@
+pytest-6.2.0
+=======================================
+
+The pytest team is proud to announce the 6.2.0 release!
+
+This release contains new features, improvements, bug fixes, and breaking changes, so users
+are encouraged to take a look at the CHANGELOG carefully:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from PyPI via:
+
+ pip install -U pytest
+
+Thanks to all of the contributors to this release:
+
+* Adam Johnson
+* Albert Villanova del Moral
+* Anthony Sottile
+* Anton
+* Ariel Pillemer
+* Bruno Oliveira
+* Charles Aracil
+* Christine M
+* Christine Mecklenborg
+* Cserna Zsolt
+* Dominic Mortlock
+* Emiel van de Laar
+* Florian Bruhin
+* Garvit Shubham
+* Gustavo Camargo
+* Hugo Martins
+* Hugo van Kemenade
+* Jakob van Santen
+* Josias Aurel
+* Jürgen Gmach
+* Karthikeyan Singaravelan
+* Katarzyna
+* Kyle Altendorf
+* Manuel Mariñez
+* Matthew Hughes
+* Matthias Gabriel
+* Max Voitko
+* Maximilian Cosmo Sitter
+* Mikhail Fesenko
+* Nimesh Vashistha
+* Pedro Algarvio
+* Petter Strandmark
+* Prakhar Gurunani
+* Prashant Sharma
+* Ran Benita
+* Ronny Pfannschmidt
+* Sanket Duthade
+* Shubham Adep
+* Simon K
+* Tanvi Mehta
+* Thomas Grainger
+* Tim Hoffmann
+* Vasilis Gerakaris
+* William Jamir Silva
+* Zac Hatfield-Dodds
+* crricks
+* dependabot[bot]
+* duthades
+* frankgerhardt
+* kwgchi
+* mickeypash
+* symonk
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.1.rst
new file mode 100644
index 0000000000..f9e7161835
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.1.rst
@@ -0,0 +1,20 @@
+pytest-6.2.1
+=======================================
+
+pytest 6.2.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all of the contributors to this release:
+
+* Bruno Oliveira
+* Jakob van Santen
+* Ran Benita
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.2.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.2.rst
new file mode 100644
index 0000000000..c3999c5386
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.2.rst
@@ -0,0 +1,21 @@
+pytest-6.2.2
+=======================================
+
+pytest 6.2.2 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all of the contributors to this release:
+
+* Adam Johnson
+* Bruno Oliveira
+* Chris NeJame
+* Ran Benita
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.3.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.3.rst
new file mode 100644
index 0000000000..e45aa6a03e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.3.rst
@@ -0,0 +1,19 @@
+pytest-6.2.3
+=======================================
+
+pytest 6.2.3 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all of the contributors to this release:
+
+* Bruno Oliveira
+* Ran Benita
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.4.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.4.rst
new file mode 100644
index 0000000000..fa2e3e7813
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.4.rst
@@ -0,0 +1,22 @@
+pytest-6.2.4
+=======================================
+
+pytest 6.2.4 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all of the contributors to this release:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Christian Maurer
+* Florian Bruhin
+* Ran Benita
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.5.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.5.rst
new file mode 100644
index 0000000000..bc6b4cf422
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-6.2.5.rst
@@ -0,0 +1,30 @@
+pytest-6.2.5
+=======================================
+
+pytest 6.2.5 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all of the contributors to this release:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Brylie Christopher Oxley
+* Daniel Asztalos
+* Florian Bruhin
+* Jason Haugen
+* MapleCCC
+* Michał Górny
+* Miro HronÄok
+* Ran Benita
+* Ronny Pfannschmidt
+* Sylvain Bellemare
+* Thomas Güttler
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.0.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.0.rst
new file mode 100644
index 0000000000..3ce4335564
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.0.rst
@@ -0,0 +1,74 @@
+pytest-7.0.0
+=======================================
+
+The pytest team is proud to announce the 7.0.0 release!
+
+This release contains new features, improvements, bug fixes, and breaking changes, so users
+are encouraged to take a look at the CHANGELOG carefully:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from PyPI via:
+
+ pip install -U pytest
+
+Thanks to all of the contributors to this release:
+
+* Adam J. Stewart
+* Alexander King
+* Amin Alaee
+* Andrew Neitsch
+* Anthony Sottile
+* Ben Davies
+* Bernát Gábor
+* Brian Okken
+* Bruno Oliveira
+* Cristian Vera
+* Dan Alvizu
+* David Szotten
+* Eddie
+* Emmanuel Arias
+* Emmanuel Meric de Bellefon
+* Eric Liu
+* Florian Bruhin
+* GergelyKalmar
+* Graeme Smecher
+* Harshna
+* Hugo van Kemenade
+* Jakub Kulík
+* James Myatt
+* Jeff Rasley
+* Kale Kundert
+* Kian Meng, Ang
+* Miro HronÄok
+* Naveen-Pratap
+* Oleg Höfling
+* Olga Matoula
+* Ran Benita
+* Ronny Pfannschmidt
+* Simon K
+* Srip
+* Sören Wegener
+* Taneli Hukkinen
+* Terje Runde
+* Thomas Grainger
+* Thomas Hisch
+* William Jamir Silva
+* Yuval Shimon
+* Zac Hatfield-Dodds
+* andrewdotn
+* denivyruck
+* ericluoliu
+* oleg.hoefling
+* symonk
+* ziebam
+* Éloi Rivard
+* Éric
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.0rc1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.0rc1.rst
new file mode 100644
index 0000000000..a5bf0ed3c4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.0rc1.rst
@@ -0,0 +1,74 @@
+pytest-7.0.0rc1
+=======================================
+
+The pytest team is proud to announce the 7.0.0rc1 prerelease!
+
+This is a prerelease, not intended for production use, but to test the upcoming features and improvements
+in order to catch any major problems before the final version is released to the major public.
+
+We appreciate your help testing this out before the final release, making sure to report any
+regressions to our issue tracker:
+
+https://github.com/pytest-dev/pytest/issues
+
+When doing so, please include the string ``[prerelease]`` in the title.
+
+You can upgrade from PyPI via:
+
+ pip install pytest==7.0.0rc1
+
+Users are encouraged to take a look at the CHANGELOG carefully:
+
+ https://docs.pytest.org/en/7.0.x/changelog.html
+
+Thanks to all the contributors to this release:
+
+* Adam J. Stewart
+* Alexander King
+* Amin Alaee
+* Andrew Neitsch
+* Anthony Sottile
+* Ben Davies
+* Bernát Gábor
+* Brian Okken
+* Bruno Oliveira
+* Cristian Vera
+* David Szotten
+* Eddie
+* Emmanuel Arias
+* Emmanuel Meric de Bellefon
+* Eric Liu
+* Florian Bruhin
+* GergelyKalmar
+* Graeme Smecher
+* Harshna
+* Hugo van Kemenade
+* Jakub Kulík
+* James Myatt
+* Jeff Rasley
+* Kale Kundert
+* Miro HronÄok
+* Naveen-Pratap
+* Oleg Höfling
+* Ran Benita
+* Ronny Pfannschmidt
+* Simon K
+* Srip
+* Sören Wegener
+* Taneli Hukkinen
+* Terje Runde
+* Thomas Grainger
+* Thomas Hisch
+* William Jamir Silva
+* Zac Hatfield-Dodds
+* andrewdotn
+* denivyruck
+* ericluoliu
+* oleg.hoefling
+* symonk
+* ziebam
+* Éloi Rivard
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.1.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.1.rst
new file mode 100644
index 0000000000..5accfbad0d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/release-7.0.1.rst
@@ -0,0 +1,20 @@
+pytest-7.0.1
+=======================================
+
+pytest 7.0.1 has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all of the contributors to this release:
+
+* Anthony Sottile
+* Bruno Oliveira
+* Ran Benita
+
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/sprint2016.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/sprint2016.rst
new file mode 100644
index 0000000000..8e70658987
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/announce/sprint2016.rst
@@ -0,0 +1,64 @@
+python testing sprint June 20th-26th 2016
+======================================================
+
+.. image:: ../img/freiburg2.jpg
+ :width: 400
+
+The pytest core group held the biggest sprint
+in its history in June 2016, taking place in the black forest town Freiburg
+in Germany. In February 2016 we started a `funding
+campaign on Indiegogo to cover expenses
+<http://igg.me/at/pytest-sprint/x/4034848>`_ The page also mentions
+some preliminary topics:
+
+- improving pytest-xdist test scheduling to take into account
+ fixture setups and explicit user hints.
+
+- provide info on fixture dependencies during --collect-only
+
+- tying pytest-xdist to tox so that you can do "py.test -e py34"
+ to run tests in a particular tox-managed virtualenv. Also
+ look into making pytest-xdist use tox environments on
+ remote ssh-sides so that remote dependency management becomes
+ easier.
+
+- refactoring the fixture system so more people understand it :)
+
+- integrating PyUnit setup methods as autouse fixtures.
+ possibly adding ways to influence ordering of same-scoped
+ fixtures (so you can make a choice of which fixtures come
+ before others)
+
+- fixing bugs and issues from the tracker, really an endless source :)
+
+
+Participants
+--------------
+
+Over 20 participants took part from 4 continents, including employees
+from Splunk, Personalkollen, Cobe.io, FanDuel and Dolby. Some newcomers
+mixed with developers who have worked on pytest since its beginning, and
+of course everyone in between.
+
+
+Sprint organisation, schedule
+-------------------------------
+
+People arrived in Freiburg on the 19th, with sprint development taking
+place on 20th, 21st, 22nd, 24th and 25th. On the 23rd we took a break
+day for some hot hiking in the Black Forest.
+
+Sprint activity was organised heavily around pairing, with plenty of group
+discusssions to take advantage of the high bandwidth, and lightning talks
+as well.
+
+
+Money / funding
+---------------
+
+
+The Indiegogo campaign aimed for 11000 USD and in the end raised over
+12000, to reimburse travel costs, pay for a sprint venue and catering.
+
+Excess money is reserved for further sprint/travel funding for pytest/tox
+contributors.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/backwards-compatibility.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/backwards-compatibility.rst
new file mode 100644
index 0000000000..3a0ff12616
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/backwards-compatibility.rst
@@ -0,0 +1,79 @@
+.. _backwards-compatibility:
+
+Backwards Compatibility Policy
+==============================
+
+.. versionadded: 6.0
+
+pytest is actively evolving and is a project that has been decades in the making,
+we keep learning about new and better structures to express different details about testing.
+
+While we implement those modifications we try to ensure an easy transition and don't want to impose unnecessary churn on our users and community/plugin authors.
+
+As of now, pytest considers multiple types of backward compatibility transitions:
+
+a) trivial: APIs which trivially translate to the new mechanism,
+ and do not cause problematic changes.
+
+ We try to support those indefinitely while encouraging users to switch to newer/better mechanisms through documentation.
+
+b) transitional: the old and new API don't conflict
+ and we can help users transition by using warnings, while supporting both for a prolonged time.
+
+ We will only start the removal of deprecated functionality in major releases (e.g. if we deprecate something in 3.0 we will start to remove it in 4.0), and keep it around for at least two minor releases (e.g. if we deprecate something in 3.9 and 4.0 is the next release, we start to remove it in 5.0, not in 4.0).
+
+ A deprecated feature scheduled to be removed in major version X will use the warning class `PytestRemovedInXWarning` (a subclass of :class:`~pytest.PytestDeprecationwarning`).
+
+ When the deprecation expires (e.g. 4.0 is released), we won't remove the deprecated functionality immediately, but will use the standard warning filters to turn `PytestRemovedInXWarning` (e.g. `PytestRemovedIn4Warning`) into **errors** by default. This approach makes it explicit that removal is imminent, and still gives you time to turn the deprecated feature into a warning instead of an error so it can be dealt with in your own time. In the next minor release (e.g. 4.1), the feature will be effectively removed.
+
+
+c) true breakage: should only be considered when normal transition is unreasonably unsustainable and would offset important development/features by years.
+ In addition, they should be limited to APIs where the number of actual users is very small (for example only impacting some plugins), and can be coordinated with the community in advance.
+
+ Examples for such upcoming changes:
+
+ * removal of ``pytest_runtest_protocol/nextitem`` - :issue:`895`
+ * rearranging of the node tree to include ``FunctionDefinition``
+ * rearranging of ``SetupState`` :issue:`895`
+
+ True breakages must be announced first in an issue containing:
+
+ * Detailed description of the change
+ * Rationale
+ * Expected impact on users and plugin authors (example in :issue:`895`)
+
+ After there's no hard *-1* on the issue it should be followed up by an initial proof-of-concept Pull Request.
+
+ This POC serves as both a coordination point to assess impact and potential inspiration to come up with a transitional solution after all.
+
+ After a reasonable amount of time the PR can be merged to base a new major release.
+
+ For the PR to mature from POC to acceptance, it must contain:
+ * Setup of deprecation errors/warnings that help users fix and port their code. If it is possible to introduce a deprecation period under the current series, before the true breakage, it should be introduced in a separate PR and be part of the current release stream.
+ * Detailed description of the rationale and examples on how to port code in ``doc/en/deprecations.rst``.
+
+
+History
+=========
+
+
+Focus primary on smooth transition - stance (pre 6.0)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Keeping backwards compatibility has a very high priority in the pytest project. Although we have deprecated functionality over the years, most of it is still supported. All deprecations in pytest were done because simpler or more efficient ways of accomplishing the same tasks have emerged, making the old way of doing things unnecessary.
+
+With the pytest 3.0 release we introduced a clear communication scheme for when we will actually remove the old busted joint and politely ask you to use the new hotness instead, while giving you enough time to adjust your tests or raise concerns if there are valid reasons to keep deprecated functionality around.
+
+To communicate changes we issue deprecation warnings using a custom warning hierarchy (see :ref:`internal-warnings`). These warnings may be suppressed using the standard means: ``-W`` command-line flag or ``filterwarnings`` ini options (see :ref:`warnings`), but we suggest to use these sparingly and temporarily, and heed the warnings when possible.
+
+We will only start the removal of deprecated functionality in major releases (e.g. if we deprecate something in 3.0 we will start to remove it in 4.0), and keep it around for at least two minor releases (e.g. if we deprecate something in 3.9 and 4.0 is the next release, we start to remove it in 5.0, not in 4.0).
+
+When the deprecation expires (e.g. 4.0 is released), we won't remove the deprecated functionality immediately, but will use the standard warning filters to turn them into **errors** by default. This approach makes it explicit that removal is imminent, and still gives you time to turn the deprecated feature into a warning instead of an error so it can be dealt with in your own time. In the next minor release (e.g. 4.1), the feature will be effectively removed.
+
+
+Deprecation Roadmap
+-------------------
+
+Features currently deprecated and removed in previous releases can be found in :ref:`deprecations`.
+
+We track future deprecation and removal of features using milestones and the `deprecation <https://github.com/pytest-dev/pytest/issues?q=label%3A%22type%3A+deprecation%22>`_ and `removal <https://github.com/pytest-dev/pytest/labels/type%3A%20removal>`_ labels on GitHub.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/builtin.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/builtin.rst
new file mode 100644
index 0000000000..c7e7863b21
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/builtin.rst
@@ -0,0 +1,197 @@
+:orphan:
+
+.. _`pytest helpers`:
+
+Pytest API and builtin fixtures
+================================================
+
+
+Most of the information of this page has been moved over to :ref:`api-reference`.
+
+For information on plugin hooks and objects, see :ref:`plugins`.
+
+For information on the ``pytest.mark`` mechanism, see :ref:`mark`.
+
+For information about fixtures, see :ref:`fixtures`. To see a complete list of available fixtures (add ``-v`` to also see fixtures with leading ``_``), type :
+
+.. code-block:: pytest
+
+ $ pytest --fixtures -v
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collected 0 items
+ cache -- .../_pytest/cacheprovider.py:510
+ Return a cache object that can persist state between testing sessions.
+
+ cache.get(key, default)
+ cache.set(key, value)
+
+ Keys must be ``/`` separated strings, where the first part is usually the
+ name of your plugin or application to avoid clashes with other cache users.
+
+ Values can be any object handled by the json stdlib module.
+
+ capsys -- .../_pytest/capture.py:878
+ Enable text capturing of writes to ``sys.stdout`` and ``sys.stderr``.
+
+ The captured output is made available via ``capsys.readouterr()`` method
+ calls, which return a ``(out, err)`` namedtuple.
+ ``out`` and ``err`` will be ``text`` objects.
+
+ capsysbinary -- .../_pytest/capture.py:895
+ Enable bytes capturing of writes to ``sys.stdout`` and ``sys.stderr``.
+
+ The captured output is made available via ``capsysbinary.readouterr()``
+ method calls, which return a ``(out, err)`` namedtuple.
+ ``out`` and ``err`` will be ``bytes`` objects.
+
+ capfd -- .../_pytest/capture.py:912
+ Enable text capturing of writes to file descriptors ``1`` and ``2``.
+
+ The captured output is made available via ``capfd.readouterr()`` method
+ calls, which return a ``(out, err)`` namedtuple.
+ ``out`` and ``err`` will be ``text`` objects.
+
+ capfdbinary -- .../_pytest/capture.py:929
+ Enable bytes capturing of writes to file descriptors ``1`` and ``2``.
+
+ The captured output is made available via ``capfd.readouterr()`` method
+ calls, which return a ``(out, err)`` namedtuple.
+ ``out`` and ``err`` will be ``byte`` objects.
+
+ doctest_namespace [session scope] -- .../_pytest/doctest.py:731
+ Fixture that returns a :py:class:`dict` that will be injected into the
+ namespace of doctests.
+
+ pytestconfig [session scope] -- .../_pytest/fixtures.py:1365
+ Session-scoped fixture that returns the session's :class:`pytest.Config`
+ object.
+
+ Example::
+
+ def test_foo(pytestconfig):
+ if pytestconfig.getoption("verbose") > 0:
+ ...
+
+ record_property -- .../_pytest/junitxml.py:282
+ Add extra properties to the calling test.
+
+ User properties become part of the test report and are available to the
+ configured reporters, like JUnit XML.
+
+ The fixture is callable with ``name, value``. The value is automatically
+ XML-encoded.
+
+ Example::
+
+ def test_function(record_property):
+ record_property("example_key", 1)
+
+ record_xml_attribute -- .../_pytest/junitxml.py:305
+ Add extra xml attributes to the tag for the calling test.
+
+ The fixture is callable with ``name, value``. The value is
+ automatically XML-encoded.
+
+ record_testsuite_property [session scope] -- .../_pytest/junitxml.py:343
+ Record a new ``<property>`` tag as child of the root ``<testsuite>``.
+
+ This is suitable to writing global information regarding the entire test
+ suite, and is compatible with ``xunit2`` JUnit family.
+
+ This is a ``session``-scoped fixture which is called with ``(name, value)``. Example:
+
+ .. code-block:: python
+
+ def test_foo(record_testsuite_property):
+ record_testsuite_property("ARCH", "PPC")
+ record_testsuite_property("STORAGE_TYPE", "CEPH")
+
+ ``name`` must be a string, ``value`` will be converted to a string and properly xml-escaped.
+
+ .. warning::
+
+ Currently this fixture **does not work** with the
+ `pytest-xdist <https://github.com/pytest-dev/pytest-xdist>`__ plugin. See
+ :issue:`7767` for details.
+
+ tmpdir_factory [session scope] -- .../_pytest/legacypath.py:295
+ Return a :class:`pytest.TempdirFactory` instance for the test session.
+
+ tmpdir -- .../_pytest/legacypath.py:302
+ Return a temporary directory path object which is unique to each test
+ function invocation, created as a sub directory of the base temporary
+ directory.
+
+ By default, a new base temporary directory is created each test session,
+ and old bases are removed after 3 sessions, to aid in debugging. If
+ ``--basetemp`` is used then it is cleared each session. See :ref:`base
+ temporary directory`.
+
+ The returned object is a `legacy_path`_ object.
+
+ .. _legacy_path: https://py.readthedocs.io/en/latest/path.html
+
+ caplog -- .../_pytest/logging.py:483
+ Access and control log capturing.
+
+ Captured logs are available through the following properties/methods::
+
+ * caplog.messages -> list of format-interpolated log messages
+ * caplog.text -> string containing formatted log output
+ * caplog.records -> list of logging.LogRecord instances
+ * caplog.record_tuples -> list of (logger_name, level, message) tuples
+ * caplog.clear() -> clear captured records and formatted log output string
+
+ monkeypatch -- .../_pytest/monkeypatch.py:29
+ A convenient fixture for monkey-patching.
+
+ The fixture provides these methods to modify objects, dictionaries or
+ os.environ::
+
+ monkeypatch.setattr(obj, name, value, raising=True)
+ monkeypatch.delattr(obj, name, raising=True)
+ monkeypatch.setitem(mapping, name, value)
+ monkeypatch.delitem(obj, name, raising=True)
+ monkeypatch.setenv(name, value, prepend=None)
+ monkeypatch.delenv(name, raising=True)
+ monkeypatch.syspath_prepend(path)
+ monkeypatch.chdir(path)
+
+ All modifications will be undone after the requesting test function or
+ fixture has finished. The ``raising`` parameter determines if a KeyError
+ or AttributeError will be raised if the set/deletion operation has no target.
+
+ recwarn -- .../_pytest/recwarn.py:29
+ Return a :class:`WarningsRecorder` instance that records all warnings emitted by test functions.
+
+ See https://docs.python.org/library/how-to/capture-warnings.html for information
+ on warning categories.
+
+ tmp_path_factory [session scope] -- .../_pytest/tmpdir.py:183
+ Return a :class:`pytest.TempPathFactory` instance for the test session.
+
+ tmp_path -- .../_pytest/tmpdir.py:198
+ Return a temporary directory path object which is unique to each test
+ function invocation, created as a sub directory of the base temporary
+ directory.
+
+ By default, a new base temporary directory is created each test session,
+ and old bases are removed after 3 sessions, to aid in debugging. If
+ ``--basetemp`` is used then it is cleared each session. See :ref:`base
+ temporary directory`.
+
+ The returned object is a :class:`pathlib.Path` object.
+
+
+ ========================== no tests ran in 0.12s ===========================
+
+You can also interactively ask for help, e.g. by typing on the Python interactive prompt something like:
+
+.. code-block:: python
+
+ import pytest
+
+ help(pytest)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/changelog.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/changelog.rst
new file mode 100644
index 0000000000..1acdad366d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/changelog.rst
@@ -0,0 +1,9044 @@
+.. _`changelog`:
+
+=========
+Changelog
+=========
+
+Versions follow `Semantic Versioning <https://semver.org/>`_ (``<major>.<minor>.<patch>``).
+
+Backward incompatible (breaking) changes will only be introduced in major versions
+with advance notice in the **Deprecations** section of releases.
+
+
+..
+ You should *NOT* be adding new change log entries to this file, this
+ file is managed by towncrier. You *may* edit previous change logs to
+ fix problems like typo corrections or such.
+ To add a new change log entry, please see
+ https://pip.pypa.io/en/latest/development/contributing/#news-entries
+ we named the news folder changelog
+
+
+.. only:: changelog_towncrier_draft
+
+ .. The 'changelog_towncrier_draft' tag is included by our 'tox -e docs',
+ but not on readthedocs.
+
+ .. include:: _changelog_towncrier_draft.rst
+
+.. towncrier release notes start
+
+pytest 7.0.1 (2022-02-11)
+=========================
+
+Bug Fixes
+---------
+
+- `#9608 <https://github.com/pytest-dev/pytest/issues/9608>`_: Fix invalid importing of ``importlib.readers`` in Python 3.9.
+
+
+- `#9610 <https://github.com/pytest-dev/pytest/issues/9610>`_: Restore `UnitTestFunction.obj` to return unbound rather than bound method.
+ Fixes a crash during a failed teardown in unittest TestCases with non-default `__init__`.
+ Regressed in pytest 7.0.0.
+
+
+- `#9636 <https://github.com/pytest-dev/pytest/issues/9636>`_: The ``pythonpath`` plugin was renamed to ``python_path``. This avoids a conflict with the ``pytest-pythonpath`` plugin.
+
+
+- `#9642 <https://github.com/pytest-dev/pytest/issues/9642>`_: Fix running tests by id with ``::`` in the parametrize portion.
+
+
+- `#9643 <https://github.com/pytest-dev/pytest/issues/9643>`_: Delay issuing a :class:`~pytest.PytestWarning` about diamond inheritance involving :class:`~pytest.Item` and
+ :class:`~pytest.Collector` so it can be filtered using :ref:`standard warning filters <warnings>`.
+
+
+pytest 7.0.0 (2022-02-03)
+=========================
+
+(**Please see the full set of changes for this release also in the 7.0.0rc1 notes below**)
+
+Deprecations
+------------
+
+- `#9488 <https://github.com/pytest-dev/pytest/issues/9488>`_: If custom subclasses of nodes like :class:`pytest.Item` override the
+ ``__init__`` method, they should take ``**kwargs``. See
+ :ref:`uncooperative-constructors-deprecated` for details.
+
+ Note that a deprection warning is only emitted when there is a conflict in the
+ arguments pytest expected to pass. This deprecation was already part of pytest
+ 7.0.0rc1 but wasn't documented.
+
+
+
+Bug Fixes
+---------
+
+- `#9355 <https://github.com/pytest-dev/pytest/issues/9355>`_: Fixed error message prints function decorators when using assert in Python 3.8 and above.
+
+
+- `#9396 <https://github.com/pytest-dev/pytest/issues/9396>`_: Ensure :attr:`pytest.Config.inifile` is available during the :func:`pytest_cmdline_main <_pytest.hookspec.pytest_cmdline_main>` hook (regression during ``7.0.0rc1``).
+
+
+
+Improved Documentation
+----------------------
+
+- `#9404 <https://github.com/pytest-dev/pytest/issues/9404>`_: Added extra documentation on alternatives to common misuses of `pytest.warns(None)` ahead of its deprecation.
+
+
+- `#9505 <https://github.com/pytest-dev/pytest/issues/9505>`_: Clarify where the configuration files are located. To avoid confusions documentation mentions
+ that configuration file is located in the root of the repository.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- `#9521 <https://github.com/pytest-dev/pytest/issues/9521>`_: Add test coverage to assertion rewrite path.
+
+
+pytest 7.0.0rc1 (2021-12-06)
+============================
+
+Breaking Changes
+----------------
+
+- `#7259 <https://github.com/pytest-dev/pytest/issues/7259>`_: The :ref:`Node.reportinfo() <non-python tests>` function first return value type has been expanded from `py.path.local | str` to `os.PathLike[str] | str`.
+
+ Most plugins which refer to `reportinfo()` only define it as part of a custom :class:`pytest.Item` implementation.
+ Since `py.path.local` is a `os.PathLike[str]`, these plugins are unaffacted.
+
+ Plugins and users which call `reportinfo()`, use the first return value and interact with it as a `py.path.local`, would need to adjust by calling `py.path.local(fspath)`.
+ Although preferably, avoid the legacy `py.path.local` and use `pathlib.Path`, or use `item.location` or `item.path`, instead.
+
+ Note: pytest was not able to provide a deprecation period for this change.
+
+
+- `#8246 <https://github.com/pytest-dev/pytest/issues/8246>`_: ``--version`` now writes version information to ``stdout`` rather than ``stderr``.
+
+
+- `#8733 <https://github.com/pytest-dev/pytest/issues/8733>`_: Drop a workaround for `pyreadline <https://github.com/pyreadline/pyreadline>`__ that made it work with ``--pdb``.
+
+ The workaround was introduced in `#1281 <https://github.com/pytest-dev/pytest/pull/1281>`__ in 2015, however since then
+ `pyreadline seems to have gone unmaintained <https://github.com/pyreadline/pyreadline/issues/58>`__, is `generating
+ warnings <https://github.com/pytest-dev/pytest/issues/8847>`__, and will stop working on Python 3.10.
+
+
+- `#9061 <https://github.com/pytest-dev/pytest/issues/9061>`_: Using :func:`pytest.approx` in a boolean context now raises an error hinting at the proper usage.
+
+ It is apparently common for users to mistakenly use ``pytest.approx`` like this:
+
+ .. code-block:: python
+
+ assert pytest.approx(actual, expected)
+
+ While the correct usage is:
+
+ .. code-block:: python
+
+ assert actual == pytest.approx(expected)
+
+ The new error message helps catch those mistakes.
+
+
+- `#9277 <https://github.com/pytest-dev/pytest/issues/9277>`_: The ``pytest.Instance`` collector type has been removed.
+ Importing ``pytest.Instance`` or ``_pytest.python.Instance`` returns a dummy type and emits a deprecation warning.
+ See :ref:`instance-collector-deprecation` for details.
+
+
+- `#9308 <https://github.com/pytest-dev/pytest/issues/9308>`_: **PytestRemovedIn7Warning deprecation warnings are now errors by default.**
+
+ Following our plan to remove deprecated features with as little disruption as
+ possible, all warnings of type ``PytestRemovedIn7Warning`` now generate errors
+ instead of warning messages by default.
+
+ **The affected features will be effectively removed in pytest 7.1**, so please consult the
+ :ref:`deprecations` section in the docs for directions on how to update existing code.
+
+ In the pytest ``7.0.X`` series, it is possible to change the errors back into warnings as a
+ stopgap measure by adding this to your ``pytest.ini`` file:
+
+ .. code-block:: ini
+
+ [pytest]
+ filterwarnings =
+ ignore::pytest.PytestRemovedIn7Warning
+
+ But this will stop working when pytest ``7.1`` is released.
+
+ **If you have concerns** about the removal of a specific feature, please add a
+ comment to :issue:`9308`.
+
+
+
+Deprecations
+------------
+
+- `#7259 <https://github.com/pytest-dev/pytest/issues/7259>`_: ``py.path.local`` arguments for hooks have been deprecated. See :ref:`the deprecation note <legacy-path-hooks-deprecated>` for full details.
+
+ ``py.path.local`` arguments to Node constructors have been deprecated. See :ref:`the deprecation note <node-ctor-fspath-deprecation>` for full details.
+
+ .. note::
+ The name of the :class:`~_pytest.nodes.Node` arguments and attributes (the
+ new attribute being ``path``) is **the opposite** of the situation for hooks
+ (the old argument being ``path``).
+
+ This is an unfortunate artifact due to historical reasons, which should be
+ resolved in future versions as we slowly get rid of the :pypi:`py`
+ dependency (see :issue:`9283` for a longer discussion).
+
+
+- `#7469 <https://github.com/pytest-dev/pytest/issues/7469>`_: Directly constructing the following classes is now deprecated:
+
+ - ``_pytest.mark.structures.Mark``
+ - ``_pytest.mark.structures.MarkDecorator``
+ - ``_pytest.mark.structures.MarkGenerator``
+ - ``_pytest.python.Metafunc``
+ - ``_pytest.runner.CallInfo``
+ - ``_pytest._code.ExceptionInfo``
+ - ``_pytest.config.argparsing.Parser``
+ - ``_pytest.config.argparsing.OptionGroup``
+ - ``_pytest.pytester.HookRecorder``
+
+ These constructors have always been considered private, but now issue a deprecation warning, which may become a hard error in pytest 8.
+
+
+- `#8242 <https://github.com/pytest-dev/pytest/issues/8242>`_: Raising :class:`unittest.SkipTest` to skip collection of tests during the
+ pytest collection phase is deprecated. Use :func:`pytest.skip` instead.
+
+ Note: This deprecation only relates to using :class:`unittest.SkipTest` during test
+ collection. You are probably not doing that. Ordinary usage of
+ :class:`unittest.SkipTest` / :meth:`unittest.TestCase.skipTest` /
+ :func:`unittest.skip` in unittest test cases is fully supported.
+
+
+- `#8315 <https://github.com/pytest-dev/pytest/issues/8315>`_: Several behaviors of :meth:`Parser.addoption <pytest.Parser.addoption>` are now
+ scheduled for removal in pytest 8 (deprecated since pytest 2.4.0):
+
+ - ``parser.addoption(..., help=".. %default ..")`` - use ``%(default)s`` instead.
+ - ``parser.addoption(..., type="int/string/float/complex")`` - use ``type=int`` etc. instead.
+
+
+- `#8447 <https://github.com/pytest-dev/pytest/issues/8447>`_: Defining a custom pytest node type which is both an :class:`pytest.Item <Item>` and a :class:`pytest.Collector <Collector>` (e.g. :class:`pytest.File <File>`) now issues a warning.
+ It was never sanely supported and triggers hard to debug errors.
+
+ See :ref:`the deprecation note <diamond-inheritance-deprecated>` for full details.
+
+
+- `#8592 <https://github.com/pytest-dev/pytest/issues/8592>`_: :hook:`pytest_cmdline_preparse` has been officially deprecated. It will be removed in a future release. Use :hook:`pytest_load_initial_conftests` instead.
+
+ See :ref:`the deprecation note <cmdline-preparse-deprecated>` for full details.
+
+
+- `#8645 <https://github.com/pytest-dev/pytest/issues/8645>`_: :func:`pytest.warns(None) <pytest.warns>` is now deprecated because many people used
+ it to mean "this code does not emit warnings", but it actually had the effect of
+ checking that the code emits at least one warning of any type - like ``pytest.warns()``
+ or ``pytest.warns(Warning)``.
+
+
+- `#8948 <https://github.com/pytest-dev/pytest/issues/8948>`_: :func:`pytest.skip(msg=...) <pytest.skip>`, :func:`pytest.fail(msg=...) <pytest.fail>` and :func:`pytest.exit(msg=...) <pytest.exit>`
+ signatures now accept a ``reason`` argument instead of ``msg``. Using ``msg`` still works, but is deprecated and will be removed in a future release.
+
+ This was changed for consistency with :func:`pytest.mark.skip <pytest.mark.skip>` and :func:`pytest.mark.xfail <pytest.mark.xfail>` which both accept
+ ``reason`` as an argument.
+
+- `#8174 <https://github.com/pytest-dev/pytest/issues/8174>`_: The following changes have been made to types reachable through :attr:`pytest.ExceptionInfo.traceback`:
+
+ - The ``path`` property of ``_pytest.code.Code`` returns ``Path`` instead of ``py.path.local``.
+ - The ``path`` property of ``_pytest.code.TracebackEntry`` returns ``Path`` instead of ``py.path.local``.
+
+ There was no deprecation period for this change (sorry!).
+
+
+Features
+--------
+
+- `#5196 <https://github.com/pytest-dev/pytest/issues/5196>`_: Tests are now ordered by definition order in more cases.
+
+ In a class hierarchy, tests from base classes are now consistently ordered before tests defined on their subclasses (reverse MRO order).
+
+
+- `#7132 <https://github.com/pytest-dev/pytest/issues/7132>`_: Added two environment variables :envvar:`PYTEST_THEME` and :envvar:`PYTEST_THEME_MODE` to let the users customize the pygments theme used.
+
+
+- `#7259 <https://github.com/pytest-dev/pytest/issues/7259>`_: Added :meth:`cache.mkdir() <pytest.Cache.mkdir>`, which is similar to the existing :meth:`cache.makedir() <pytest.Cache.makedir>`,
+ but returns a :class:`pathlib.Path` instead of a legacy ``py.path.local``.
+
+ Added a ``paths`` type to :meth:`parser.addini() <pytest.Parser.addini>`,
+ as in ``parser.addini("mypaths", "my paths", type="paths")``,
+ which is similar to the existing ``pathlist``,
+ but returns a list of :class:`pathlib.Path` instead of legacy ``py.path.local``.
+
+
+- `#7469 <https://github.com/pytest-dev/pytest/issues/7469>`_: The types of objects used in pytest's API are now exported so they may be used in type annotations.
+
+ The newly-exported types are:
+
+ - ``pytest.Config`` for :class:`Config <pytest.Config>`.
+ - ``pytest.Mark`` for :class:`marks <pytest.Mark>`.
+ - ``pytest.MarkDecorator`` for :class:`mark decorators <pytest.MarkDecorator>`.
+ - ``pytest.MarkGenerator`` for the :class:`pytest.mark <pytest.MarkGenerator>` singleton.
+ - ``pytest.Metafunc`` for the :class:`metafunc <pytest.MarkGenerator>` argument to the :hook:`pytest_generate_tests` hook.
+ - ``pytest.CallInfo`` for the :class:`CallInfo <pytest.CallInfo>` type passed to various hooks.
+ - ``pytest.PytestPluginManager`` for :class:`PytestPluginManager <pytest.PytestPluginManager>`.
+ - ``pytest.ExceptionInfo`` for the :class:`ExceptionInfo <pytest.ExceptionInfo>` type returned from :func:`pytest.raises` and passed to various hooks.
+ - ``pytest.Parser`` for the :class:`Parser <pytest.Parser>` type passed to the :hook:`pytest_addoption` hook.
+ - ``pytest.OptionGroup`` for the :class:`OptionGroup <pytest.OptionGroup>` type returned from the :func:`parser.addgroup <pytest.Parser.getgroup>` method.
+ - ``pytest.HookRecorder`` for the :class:`HookRecorder <pytest.HookRecorder>` type returned from :class:`~pytest.Pytester`.
+ - ``pytest.RecordedHookCall`` for the :class:`RecordedHookCall <pytest.HookRecorder>` type returned from :class:`~pytest.HookRecorder`.
+ - ``pytest.RunResult`` for the :class:`RunResult <pytest.RunResult>` type returned from :class:`~pytest.Pytester`.
+ - ``pytest.LineMatcher`` for the :class:`LineMatcher <pytest.RunResult>` type used in :class:`~pytest.RunResult` and others.
+ - ``pytest.TestReport`` for the :class:`TestReport <pytest.TestReport>` type used in various hooks.
+ - ``pytest.CollectReport`` for the :class:`CollectReport <pytest.CollectReport>` type used in various hooks.
+
+ Constructing most of them directly is not supported; they are only meant for use in type annotations.
+ Doing so will emit a deprecation warning, and may become a hard-error in pytest 8.0.
+
+ Subclassing them is also not supported. This is not currently enforced at runtime, but is detected by type-checkers such as mypy.
+
+
+- `#7856 <https://github.com/pytest-dev/pytest/issues/7856>`_: :ref:`--import-mode=importlib <import-modes>` now works with features that
+ depend on modules being on :py:data:`sys.modules`, such as :mod:`pickle` and :mod:`dataclasses`.
+
+
+- `#8144 <https://github.com/pytest-dev/pytest/issues/8144>`_: The following hooks now receive an additional ``pathlib.Path`` argument, equivalent to an existing ``py.path.local`` argument:
+
+ - :hook:`pytest_ignore_collect` - The ``collection_path`` parameter (equivalent to existing ``path`` parameter).
+ - :hook:`pytest_collect_file` - The ``file_path`` parameter (equivalent to existing ``path`` parameter).
+ - :hook:`pytest_pycollect_makemodule` - The ``module_path`` parameter (equivalent to existing ``path`` parameter).
+ - :hook:`pytest_report_header` - The ``start_path`` parameter (equivalent to existing ``startdir`` parameter).
+ - :hook:`pytest_report_collectionfinish` - The ``start_path`` parameter (equivalent to existing ``startdir`` parameter).
+
+ .. note::
+ The name of the :class:`~_pytest.nodes.Node` arguments and attributes (the
+ new attribute being ``path``) is **the opposite** of the situation for hooks
+ (the old argument being ``path``).
+
+ This is an unfortunate artifact due to historical reasons, which should be
+ resolved in future versions as we slowly get rid of the :pypi:`py`
+ dependency (see :issue:`9283` for a longer discussion).
+
+
+- `#8251 <https://github.com/pytest-dev/pytest/issues/8251>`_: Implement ``Node.path`` as a ``pathlib.Path``. Both the old ``fspath`` and this new attribute gets set no matter whether ``path`` or ``fspath`` (deprecated) is passed to the constructor. It is a replacement for the ``fspath`` attribute (which represents the same path as ``py.path.local``). While ``fspath`` is not deprecated yet
+ due to the ongoing migration of methods like :meth:`~_pytest.Item.reportinfo`, we expect to deprecate it in a future release.
+
+ .. note::
+ The name of the :class:`~_pytest.nodes.Node` arguments and attributes (the
+ new attribute being ``path``) is **the opposite** of the situation for hooks
+ (the old argument being ``path``).
+
+ This is an unfortunate artifact due to historical reasons, which should be
+ resolved in future versions as we slowly get rid of the :pypi:`py`
+ dependency (see :issue:`9283` for a longer discussion).
+
+
+- `#8421 <https://github.com/pytest-dev/pytest/issues/8421>`_: :func:`pytest.approx` now works on :class:`~decimal.Decimal` within mappings/dicts and sequences/lists.
+
+
+- `#8606 <https://github.com/pytest-dev/pytest/issues/8606>`_: pytest invocations with ``--fixtures-per-test`` and ``--fixtures`` have been enriched with:
+
+ - Fixture location path printed with the fixture name.
+ - First section of the fixture's docstring printed under the fixture name.
+ - Whole of fixture's docstring printed under the fixture name using ``--verbose`` option.
+
+
+- `#8761 <https://github.com/pytest-dev/pytest/issues/8761>`_: New :ref:`version-tuple` attribute, which makes it simpler for users to do something depending on the pytest version (such as declaring hooks which are introduced in later versions).
+
+
+- `#8789 <https://github.com/pytest-dev/pytest/issues/8789>`_: Switch TOML parser from ``toml`` to ``tomli`` for TOML v1.0.0 support in ``pyproject.toml``.
+
+
+- `#8920 <https://github.com/pytest-dev/pytest/issues/8920>`_: Added :class:`pytest.Stash`, a facility for plugins to store their data on :class:`~pytest.Config` and :class:`~_pytest.nodes.Node`\s in a type-safe and conflict-free manner.
+ See :ref:`plugin-stash` for details.
+
+
+- `#8953 <https://github.com/pytest-dev/pytest/issues/8953>`_: :class:`RunResult <_pytest.pytester.RunResult>` method :meth:`assert_outcomes <_pytest.pytester.RunResult.assert_outcomes>` now accepts a
+ ``warnings`` argument to assert the total number of warnings captured.
+
+
+- `#8954 <https://github.com/pytest-dev/pytest/issues/8954>`_: ``--debug`` flag now accepts a :class:`str` file to route debug logs into, remains defaulted to `pytestdebug.log`.
+
+
+- `#9023 <https://github.com/pytest-dev/pytest/issues/9023>`_: Full diffs are now always shown for equality assertions of iterables when
+ `CI` or ``BUILD_NUMBER`` is found in the environment, even when ``-v`` isn't
+ used.
+
+
+- `#9113 <https://github.com/pytest-dev/pytest/issues/9113>`_: :class:`RunResult <_pytest.pytester.RunResult>` method :meth:`assert_outcomes <_pytest.pytester.RunResult.assert_outcomes>` now accepts a
+ ``deselected`` argument to assert the total number of deselected tests.
+
+
+- `#9114 <https://github.com/pytest-dev/pytest/issues/9114>`_: Added :confval:`pythonpath` setting that adds listed paths to :data:`sys.path` for the duration of the test session. If you currently use the pytest-pythonpath or pytest-srcpaths plugins, you should be able to replace them with built-in `pythonpath` setting.
+
+
+
+Improvements
+------------
+
+- `#7480 <https://github.com/pytest-dev/pytest/issues/7480>`_: A deprecation scheduled to be removed in a major version X (e.g. pytest 7, 8, 9, ...) now uses warning category `PytestRemovedInXWarning`,
+ a subclass of :class:`~pytest.PytestDeprecationWarning`,
+ instead of :class:`PytestDeprecationWarning` directly.
+
+ See :ref:`backwards-compatibility` for more details.
+
+
+- `#7864 <https://github.com/pytest-dev/pytest/issues/7864>`_: Improved error messages when parsing warning filters.
+
+ Previously pytest would show an internal traceback, which besides being ugly sometimes would hide the cause
+ of the problem (for example an ``ImportError`` while importing a specific warning type).
+
+
+- `#8335 <https://github.com/pytest-dev/pytest/issues/8335>`_: Improved :func:`pytest.approx` assertion messages for sequences of numbers.
+
+ The assertion messages now dumps a table with the index and the error of each diff.
+ Example::
+
+ > assert [1, 2, 3, 4] == pytest.approx([1, 3, 3, 5])
+ E assert comparison failed for 2 values:
+ E Index | Obtained | Expected
+ E 1 | 2 | 3 +- 3.0e-06
+ E 3 | 4 | 5 +- 5.0e-06
+
+
+- `#8403 <https://github.com/pytest-dev/pytest/issues/8403>`_: By default, pytest will truncate long strings in assert errors so they don't clutter the output too much,
+ currently at ``240`` characters by default.
+
+ However, in some cases the longer output helps, or is even crucial, to diagnose a failure. Using ``-v`` will
+ now increase the truncation threshold to ``2400`` characters, and ``-vv`` or higher will disable truncation entirely.
+
+
+- `#8509 <https://github.com/pytest-dev/pytest/issues/8509>`_: Fixed issue where :meth:`unittest.TestCase.setUpClass` is not called when a test has `/` in its name since pytest 6.2.0.
+
+ This refers to the path part in pytest node IDs, e.g. ``TestClass::test_it`` in the node ID ``tests/test_file.py::TestClass::test_it``.
+
+ Now, instead of assuming that the test name does not contain ``/``, it is assumed that test path does not contain ``::``. We plan to hopefully make both of these work in the future.
+
+
+- `#8803 <https://github.com/pytest-dev/pytest/issues/8803>`_: It is now possible to add colors to custom log levels on cli log.
+
+ By using :func:`add_color_level <_pytest.logging.add_color_level>` from a ``pytest_configure`` hook, colors can be added::
+
+ logging_plugin = config.pluginmanager.get_plugin('logging-plugin')
+ logging_plugin.log_cli_handler.formatter.add_color_level(logging.INFO, 'cyan')
+ logging_plugin.log_cli_handler.formatter.add_color_level(logging.SPAM, 'blue')
+
+ See :ref:`log_colors` for more information.
+
+
+- `#8822 <https://github.com/pytest-dev/pytest/issues/8822>`_: When showing fixture paths in `--fixtures` or `--fixtures-by-test`, fixtures coming from pytest itself now display an elided path, rather than the full path to the file in the `site-packages` directory.
+
+
+- `#8898 <https://github.com/pytest-dev/pytest/issues/8898>`_: Complex numbers are now treated like floats and integers when generating parameterization IDs.
+
+
+- `#9062 <https://github.com/pytest-dev/pytest/issues/9062>`_: ``--stepwise-skip`` now implicitly enables ``--stepwise`` and can be used on its own.
+
+
+- `#9205 <https://github.com/pytest-dev/pytest/issues/9205>`_: :meth:`pytest.Cache.set` now preserves key order when saving dicts.
+
+
+
+Bug Fixes
+---------
+
+- `#7124 <https://github.com/pytest-dev/pytest/issues/7124>`_: Fixed an issue where ``__main__.py`` would raise an ``ImportError`` when ``--doctest-modules`` was provided.
+
+
+- `#8061 <https://github.com/pytest-dev/pytest/issues/8061>`_: Fixed failing ``staticmethod`` test cases if they are inherited from a parent test class.
+
+
+- `#8192 <https://github.com/pytest-dev/pytest/issues/8192>`_: ``testdir.makefile`` now silently accepts values which don't start with ``.`` to maintain backward compatibility with older pytest versions.
+
+ ``pytester.makefile`` now issues a clearer error if the ``.`` is missing in the ``ext`` argument.
+
+
+- `#8258 <https://github.com/pytest-dev/pytest/issues/8258>`_: Fixed issue where pytest's ``faulthandler`` support would not dump traceback on crashes
+ if the :mod:`faulthandler` module was already enabled during pytest startup (using
+ ``python -X dev -m pytest`` for example).
+
+
+- `#8317 <https://github.com/pytest-dev/pytest/issues/8317>`_: Fixed an issue where illegal directory characters derived from ``getpass.getuser()`` raised an ``OSError``.
+
+
+- `#8367 <https://github.com/pytest-dev/pytest/issues/8367>`_: Fix ``Class.from_parent`` so it forwards extra keyword arguments to the constructor.
+
+
+- `#8377 <https://github.com/pytest-dev/pytest/issues/8377>`_: The test selection options ``pytest -k`` and ``pytest -m`` now support matching
+ names containing forward slash (``/``) characters.
+
+
+- `#8384 <https://github.com/pytest-dev/pytest/issues/8384>`_: The ``@pytest.mark.skip`` decorator now correctly handles its arguments. When the ``reason`` argument is accidentally given both positional and as a keyword (e.g. because it was confused with ``skipif``), a ``TypeError`` now occurs. Before, such tests were silently skipped, and the positional argument ignored. Additionally, ``reason`` is now documented correctly as positional or keyword (rather than keyword-only).
+
+
+- `#8394 <https://github.com/pytest-dev/pytest/issues/8394>`_: Use private names for internal fixtures that handle classic setup/teardown so that they don't show up with the default ``--fixtures`` invocation (but they still show up with ``--fixtures -v``).
+
+
+- `#8456 <https://github.com/pytest-dev/pytest/issues/8456>`_: The :confval:`required_plugins` config option now works correctly when pre-releases of plugins are installed, rather than falsely claiming that those plugins aren't installed at all.
+
+
+- `#8464 <https://github.com/pytest-dev/pytest/issues/8464>`_: ``-c <config file>`` now also properly defines ``rootdir`` as the directory that contains ``<config file>``.
+
+
+- `#8503 <https://github.com/pytest-dev/pytest/issues/8503>`_: :meth:`pytest.MonkeyPatch.syspath_prepend` no longer fails when
+ ``setuptools`` is not installed.
+ It now only calls :func:`pkg_resources.fixup_namespace_packages` if
+ ``pkg_resources`` was previously imported, because it is not needed otherwise.
+
+
+- `#8548 <https://github.com/pytest-dev/pytest/issues/8548>`_: Introduce fix to handle precision width in ``log-cli-format`` in turn to fix output coloring for certain formats.
+
+
+- `#8796 <https://github.com/pytest-dev/pytest/issues/8796>`_: Fixed internal error when skipping doctests.
+
+
+- `#8983 <https://github.com/pytest-dev/pytest/issues/8983>`_: The test selection options ``pytest -k`` and ``pytest -m`` now support matching names containing backslash (`\\`) characters.
+ Backslashes are treated literally, not as escape characters (the values being matched against are already escaped).
+
+
+- `#8990 <https://github.com/pytest-dev/pytest/issues/8990>`_: Fix `pytest -vv` crashing with an internal exception `AttributeError: 'str' object has no attribute 'relative_to'` in some cases.
+
+
+- `#9077 <https://github.com/pytest-dev/pytest/issues/9077>`_: Fixed confusing error message when ``request.fspath`` / ``request.path`` was accessed from a session-scoped fixture.
+
+
+- `#9131 <https://github.com/pytest-dev/pytest/issues/9131>`_: Fixed the URL used by ``--pastebin`` to use `bpa.st <http://bpa.st>`__.
+
+
+- `#9163 <https://github.com/pytest-dev/pytest/issues/9163>`_: The end line number and end column offset are now properly set for rewritten assert statements.
+
+
+- `#9169 <https://github.com/pytest-dev/pytest/issues/9169>`_: Support for the ``files`` API from ``importlib.resources`` within rewritten files.
+
+
+- `#9272 <https://github.com/pytest-dev/pytest/issues/9272>`_: The nose compatibility module-level fixtures `setup()` and `teardown()` are now only called once per module, instead of for each test function.
+ They are now called even if object-level `setup`/`teardown` is defined.
+
+
+
+Improved Documentation
+----------------------
+
+- `#4320 <https://github.com/pytest-dev/pytest/issues/4320>`_: Improved docs for `pytester.copy_example`.
+
+
+- `#5105 <https://github.com/pytest-dev/pytest/issues/5105>`_: Add automatically generated :ref:`plugin-list`. The list is updated on a periodic schedule.
+
+
+- `#8337 <https://github.com/pytest-dev/pytest/issues/8337>`_: Recommend `numpy.testing <https://numpy.org/doc/stable/reference/routines.testing.html>`__ module on :func:`pytest.approx` documentation.
+
+
+- `#8655 <https://github.com/pytest-dev/pytest/issues/8655>`_: Help text for ``--pdbcls`` more accurately reflects the option's behavior.
+
+
+- `#9210 <https://github.com/pytest-dev/pytest/issues/9210>`_: Remove incorrect docs about ``confcutdir`` being a configuration option: it can only be set through the ``--confcutdir`` command-line option.
+
+
+- `#9242 <https://github.com/pytest-dev/pytest/issues/9242>`_: Upgrade readthedocs configuration to use a `newer Ubuntu version <https://blog.readthedocs.com/new-build-specification/>`__` with better unicode support for PDF docs.
+
+
+- `#9341 <https://github.com/pytest-dev/pytest/issues/9341>`_: Various methods commonly used for :ref:`non-python tests` are now correctly documented in the reference docs. They were undocumented previously.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- `#8133 <https://github.com/pytest-dev/pytest/issues/8133>`_: Migrate to ``setuptools_scm`` 6.x to use ``SETUPTOOLS_SCM_PRETEND_VERSION_FOR_PYTEST`` for more robust release tooling.
+
+
+- `#8174 <https://github.com/pytest-dev/pytest/issues/8174>`_: The following changes have been made to internal pytest types/functions:
+
+ - The ``_pytest.code.getfslineno()`` function returns ``Path`` instead of ``py.path.local``.
+ - The ``_pytest.python.path_matches_patterns()`` function takes ``Path`` instead of ``py.path.local``.
+ - The ``_pytest._code.Traceback.cut()`` function accepts any ``os.PathLike[str]``, not just ``py.path.local``.
+
+
+- `#8248 <https://github.com/pytest-dev/pytest/issues/8248>`_: Internal Restructure: let ``python.PyObjMixin`` inherit from ``nodes.Node`` to carry over typing information.
+
+
+- `#8432 <https://github.com/pytest-dev/pytest/issues/8432>`_: Improve error message when :func:`pytest.skip` is used at module level without passing `allow_module_level=True`.
+
+
+- `#8818 <https://github.com/pytest-dev/pytest/issues/8818>`_: Ensure ``regendoc`` opts out of ``TOX_ENV`` cachedir selection to ensure independent example test runs.
+
+
+- `#8913 <https://github.com/pytest-dev/pytest/issues/8913>`_: The private ``CallSpec2._arg2scopenum`` attribute has been removed after an internal refactoring.
+
+
+- `#8967 <https://github.com/pytest-dev/pytest/issues/8967>`_: :hook:`pytest_assertion_pass` is no longer considered experimental and
+ future changes to it will be considered more carefully.
+
+
+- `#9202 <https://github.com/pytest-dev/pytest/issues/9202>`_: Add github action to upload coverage report to codecov instead of bash uploader.
+
+
+- `#9225 <https://github.com/pytest-dev/pytest/issues/9225>`_: Changed the command used to create sdist and wheel artifacts: using the build package instead of setup.py.
+
+
+- `#9351 <https://github.com/pytest-dev/pytest/issues/9351>`_: Correct minor typos in doc/en/example/special.rst.
+
+
+pytest 6.2.5 (2021-08-29)
+=========================
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`8494`: Python 3.10 is now supported.
+
+
+- :issue:`9040`: Enable compatibility with ``pluggy 1.0`` or later.
+
+
+pytest 6.2.4 (2021-05-04)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`8539`: Fixed assertion rewriting on Python 3.10.
+
+
+pytest 6.2.3 (2021-04-03)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`8414`: pytest used to create directories under ``/tmp`` with world-readable
+ permissions. This means that any user in the system was able to read
+ information written by tests in temporary directories (such as those created by
+ the ``tmp_path``/``tmpdir`` fixture). Now the directories are created with
+ private permissions.
+
+ pytest used to silently use a pre-existing ``/tmp/pytest-of-<username>`` directory,
+ even if owned by another user. This means another user could pre-create such a
+ directory and gain control of another user's temporary directory. Now such a
+ condition results in an error.
+
+
+pytest 6.2.2 (2021-01-25)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`8152`: Fixed "(<Skipped instance>)" being shown as a skip reason in the verbose test summary line when the reason is empty.
+
+
+- :issue:`8249`: Fix the ``faulthandler`` plugin for occasions when running with ``twisted.logger`` and using ``pytest --capture=no``.
+
+
+pytest 6.2.1 (2020-12-15)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`7678`: Fixed bug where ``ImportPathMismatchError`` would be raised for files compiled in
+ the host and loaded later from an UNC mounted path (Windows).
+
+
+- :issue:`8132`: Fixed regression in ``approx``: in 6.2.0 ``approx`` no longer raises
+ ``TypeError`` when dealing with non-numeric types, falling back to normal comparison.
+ Before 6.2.0, array types like tf.DeviceArray fell through to the scalar case,
+ and happened to compare correctly to a scalar if they had only one element.
+ After 6.2.0, these types began failing, because they inherited neither from
+ standard Python number hierarchy nor from ``numpy.ndarray``.
+
+ ``approx`` now converts arguments to ``numpy.ndarray`` if they expose the array
+ protocol and are not scalars. This treats array-like objects like numpy arrays,
+ regardless of size.
+
+
+pytest 6.2.0 (2020-12-12)
+=========================
+
+Breaking Changes
+----------------
+
+- :issue:`7808`: pytest now supports python3.6+ only.
+
+
+
+Deprecations
+------------
+
+- :issue:`7469`: Directly constructing/calling the following classes/functions is now deprecated:
+
+ - ``_pytest.cacheprovider.Cache``
+ - ``_pytest.cacheprovider.Cache.for_config()``
+ - ``_pytest.cacheprovider.Cache.clear_cache()``
+ - ``_pytest.cacheprovider.Cache.cache_dir_from_config()``
+ - ``_pytest.capture.CaptureFixture``
+ - ``_pytest.fixtures.FixtureRequest``
+ - ``_pytest.fixtures.SubRequest``
+ - ``_pytest.logging.LogCaptureFixture``
+ - ``_pytest.pytester.Pytester``
+ - ``_pytest.pytester.Testdir``
+ - ``_pytest.recwarn.WarningsRecorder``
+ - ``_pytest.recwarn.WarningsChecker``
+ - ``_pytest.tmpdir.TempPathFactory``
+ - ``_pytest.tmpdir.TempdirFactory``
+
+ These have always been considered private, but now issue a deprecation warning, which may become a hard error in pytest 8.0.0.
+
+
+- :issue:`7530`: The ``--strict`` command-line option has been deprecated, use ``--strict-markers`` instead.
+
+ We have plans to maybe in the future to reintroduce ``--strict`` and make it an encompassing flag for all strictness
+ related options (``--strict-markers`` and ``--strict-config`` at the moment, more might be introduced in the future).
+
+
+- :issue:`7988`: The ``@pytest.yield_fixture`` decorator/function is now deprecated. Use :func:`pytest.fixture` instead.
+
+ ``yield_fixture`` has been an alias for ``fixture`` for a very long time, so can be search/replaced safely.
+
+
+
+Features
+--------
+
+- :issue:`5299`: pytest now warns about unraisable exceptions and unhandled thread exceptions that occur in tests on Python>=3.8.
+ See :ref:`unraisable` for more information.
+
+
+- :issue:`7425`: New :fixture:`pytester` fixture, which is identical to :fixture:`testdir` but its methods return :class:`pathlib.Path` when appropriate instead of ``py.path.local``.
+
+ This is part of the movement to use :class:`pathlib.Path` objects internally, in order to remove the dependency to ``py`` in the future.
+
+ Internally, the old :class:`Testdir <_pytest.pytester.Testdir>` is now a thin wrapper around :class:`Pytester <_pytest.pytester.Pytester>`, preserving the old interface.
+
+
+- :issue:`7695`: A new hook was added, `pytest_markeval_namespace` which should return a dictionary.
+ This dictionary will be used to augment the "global" variables available to evaluate skipif/xfail/xpass markers.
+
+ Pseudo example
+
+ ``conftest.py``:
+
+ .. code-block:: python
+
+ def pytest_markeval_namespace():
+ return {"color": "red"}
+
+ ``test_func.py``:
+
+ .. code-block:: python
+
+ @pytest.mark.skipif("color == 'blue'", reason="Color is not red")
+ def test_func():
+ assert False
+
+
+- :issue:`8006`: It is now possible to construct a :class:`~pytest.MonkeyPatch` object directly as ``pytest.MonkeyPatch()``,
+ in cases when the :fixture:`monkeypatch` fixture cannot be used. Previously some users imported it
+ from the private `_pytest.monkeypatch.MonkeyPatch` namespace.
+
+ Additionally, :meth:`MonkeyPatch.context <pytest.MonkeyPatch.context>` is now a classmethod,
+ and can be used as ``with MonkeyPatch.context() as mp: ...``. This is the recommended way to use
+ ``MonkeyPatch`` directly, since unlike the ``monkeypatch`` fixture, an instance created directly
+ is not ``undo()``-ed automatically.
+
+
+
+Improvements
+------------
+
+- :issue:`1265`: Added an ``__str__`` implementation to the :class:`~pytest.pytester.LineMatcher` class which is returned from ``pytester.run_pytest().stdout`` and similar. It returns the entire output, like the existing ``str()`` method.
+
+
+- :issue:`2044`: Verbose mode now shows the reason that a test was skipped in the test's terminal line after the "SKIPPED", "XFAIL" or "XPASS".
+
+
+- :issue:`7469` The types of builtin pytest fixtures are now exported so they may be used in type annotations of test functions.
+ The newly-exported types are:
+
+ - ``pytest.FixtureRequest`` for the :fixture:`request` fixture.
+ - ``pytest.Cache`` for the :fixture:`cache` fixture.
+ - ``pytest.CaptureFixture[str]`` for the :fixture:`capfd` and :fixture:`capsys` fixtures.
+ - ``pytest.CaptureFixture[bytes]`` for the :fixture:`capfdbinary` and :fixture:`capsysbinary` fixtures.
+ - ``pytest.LogCaptureFixture`` for the :fixture:`caplog` fixture.
+ - ``pytest.Pytester`` for the :fixture:`pytester` fixture.
+ - ``pytest.Testdir`` for the :fixture:`testdir` fixture.
+ - ``pytest.TempdirFactory`` for the :fixture:`tmpdir_factory` fixture.
+ - ``pytest.TempPathFactory`` for the :fixture:`tmp_path_factory` fixture.
+ - ``pytest.MonkeyPatch`` for the :fixture:`monkeypatch` fixture.
+ - ``pytest.WarningsRecorder`` for the :fixture:`recwarn` fixture.
+
+ Constructing them is not supported (except for `MonkeyPatch`); they are only meant for use in type annotations.
+ Doing so will emit a deprecation warning, and may become a hard-error in pytest 8.0.
+
+ Subclassing them is also not supported. This is not currently enforced at runtime, but is detected by type-checkers such as mypy.
+
+
+- :issue:`7527`: When a comparison between :func:`namedtuple <collections.namedtuple>` instances of the same type fails, pytest now shows the differing field names (possibly nested) instead of their indexes.
+
+
+- :issue:`7615`: :meth:`Node.warn <_pytest.nodes.Node.warn>` now permits any subclass of :class:`Warning`, not just :class:`PytestWarning <pytest.PytestWarning>`.
+
+
+- :issue:`7701`: Improved reporting when using ``--collected-only``. It will now show the number of collected tests in the summary stats.
+
+
+- :issue:`7710`: Use strict equality comparison for non-numeric types in :func:`pytest.approx` instead of
+ raising :class:`TypeError`.
+
+ This was the undocumented behavior before 3.7, but is now officially a supported feature.
+
+
+- :issue:`7938`: New ``--sw-skip`` argument which is a shorthand for ``--stepwise-skip``.
+
+
+- :issue:`8023`: Added ``'node_modules'`` to default value for :confval:`norecursedirs`.
+
+
+- :issue:`8032`: :meth:`doClassCleanups <unittest.TestCase.doClassCleanups>` (introduced in :mod:`unittest` in Python and 3.8) is now called appropriately.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`4824`: Fixed quadratic behavior and improved performance of collection of items using autouse fixtures and xunit fixtures.
+
+
+- :issue:`7758`: Fixed an issue where some files in packages are getting lost from ``--lf`` even though they contain tests that failed. Regressed in pytest 5.4.0.
+
+
+- :issue:`7911`: Directories created by by :fixture:`tmp_path` and :fixture:`tmpdir` are now considered stale after 3 days without modification (previous value was 3 hours) to avoid deleting directories still in use in long running test suites.
+
+
+- :issue:`7913`: Fixed a crash or hang in :meth:`pytester.spawn <_pytest.pytester.Pytester.spawn>` when the :mod:`readline` module is involved.
+
+
+- :issue:`7951`: Fixed handling of recursive symlinks when collecting tests.
+
+
+- :issue:`7981`: Fixed symlinked directories not being followed during collection. Regressed in pytest 6.1.0.
+
+
+- :issue:`8016`: Fixed only one doctest being collected when using ``pytest --doctest-modules path/to/an/__init__.py``.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`7429`: Add more information and use cases about skipping doctests.
+
+
+- :issue:`7780`: Classes which should not be inherited from are now marked ``final class`` in the API reference.
+
+
+- :issue:`7872`: ``_pytest.config.argparsing.Parser.addini()`` accepts explicit ``None`` and ``"string"``.
+
+
+- :issue:`7878`: In pull request section, ask to commit after editing changelog and authors file.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`7802`: The ``attrs`` dependency requirement is now >=19.2.0 instead of >=17.4.0.
+
+
+- :issue:`8014`: `.pyc` files created by pytest's assertion rewriting now conform to the newer :pep:`552` format on Python>=3.7.
+ (These files are internal and only interpreted by pytest itself.)
+
+
+pytest 6.1.2 (2020-10-28)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`7758`: Fixed an issue where some files in packages are getting lost from ``--lf`` even though they contain tests that failed. Regressed in pytest 5.4.0.
+
+
+- :issue:`7911`: Directories created by `tmpdir` are now considered stale after 3 days without modification (previous value was 3 hours) to avoid deleting directories still in use in long running test suites.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`7815`: Improve deprecation warning message for ``pytest._fillfuncargs()``.
+
+
+pytest 6.1.1 (2020-10-03)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`7807`: Fixed regression in pytest 6.1.0 causing incorrect rootdir to be determined in some non-trivial cases where parent directories have config files as well.
+
+
+- :issue:`7814`: Fixed crash in header reporting when :confval:`testpaths` is used and contains absolute paths (regression in 6.1.0).
+
+
+pytest 6.1.0 (2020-09-26)
+=========================
+
+Breaking Changes
+----------------
+
+- :issue:`5585`: As per our policy, the following features which have been deprecated in the 5.X series are now
+ removed:
+
+ * The ``funcargnames`` read-only property of ``FixtureRequest``, ``Metafunc``, and ``Function`` classes. Use ``fixturenames`` attribute.
+
+ * ``@pytest.fixture`` no longer supports positional arguments, pass all arguments by keyword instead.
+
+ * Direct construction of ``Node`` subclasses now raise an error, use ``from_parent`` instead.
+
+ * The default value for ``junit_family`` has changed to ``xunit2``. If you require the old format, add ``junit_family=xunit1`` to your configuration file.
+
+ * The ``TerminalReporter`` no longer has a ``writer`` attribute. Plugin authors may use the public functions of the ``TerminalReporter`` instead of accessing the ``TerminalWriter`` object directly.
+
+ * The ``--result-log`` option has been removed. Users are recommended to use the `pytest-reportlog <https://github.com/pytest-dev/pytest-reportlog>`__ plugin instead.
+
+
+ For more information consult :std:doc:`deprecations` in the docs.
+
+
+
+Deprecations
+------------
+
+- :issue:`6981`: The ``pytest.collect`` module is deprecated: all its names can be imported from ``pytest`` directly.
+
+
+- :issue:`7097`: The ``pytest._fillfuncargs`` function is deprecated. This function was kept
+ for backward compatibility with an older plugin.
+
+ It's functionality is not meant to be used directly, but if you must replace
+ it, use `function._request._fillfixtures()` instead, though note this is not
+ a public API and may break in the future.
+
+
+- :issue:`7210`: The special ``-k '-expr'`` syntax to ``-k`` is deprecated. Use ``-k 'not expr'``
+ instead.
+
+ The special ``-k 'expr:'`` syntax to ``-k`` is deprecated. Please open an issue
+ if you use this and want a replacement.
+
+
+- :issue:`7255`: The :hook:`pytest_warning_captured` hook is deprecated in favor
+ of :hook:`pytest_warning_recorded`, and will be removed in a future version.
+
+
+- :issue:`7648`: The ``gethookproxy()`` and ``isinitpath()`` methods of ``FSCollector`` and ``Package`` are deprecated;
+ use ``self.session.gethookproxy()`` and ``self.session.isinitpath()`` instead.
+ This should work on all pytest versions.
+
+
+
+Features
+--------
+
+- :issue:`7667`: New ``--durations-min`` command-line flag controls the minimal duration for inclusion in the slowest list of tests shown by ``--durations``. Previously this was hard-coded to ``0.005s``.
+
+
+
+Improvements
+------------
+
+- :issue:`6681`: Internal pytest warnings issued during the early stages of initialization are now properly handled and can filtered through :confval:`filterwarnings` or ``--pythonwarnings/-W``.
+
+ This also fixes a number of long standing issues: :issue:`2891`, :issue:`7620`, :issue:`7426`.
+
+
+- :issue:`7572`: When a plugin listed in ``required_plugins`` is missing or an unknown config key is used with ``--strict-config``, a simple error message is now shown instead of a stacktrace.
+
+
+- :issue:`7685`: Added two new attributes :attr:`rootpath <_pytest.config.Config.rootpath>` and :attr:`inipath <_pytest.config.Config.inipath>` to :class:`Config <_pytest.config.Config>`.
+ These attributes are :class:`pathlib.Path` versions of the existing :attr:`rootdir <_pytest.config.Config.rootdir>` and :attr:`inifile <_pytest.config.Config.inifile>` attributes,
+ and should be preferred over them when possible.
+
+
+- :issue:`7780`: Public classes which are not designed to be inherited from are now marked :func:`@final <typing.final>`.
+ Code which inherits from these classes will trigger a type-checking (e.g. mypy) error, but will still work in runtime.
+ Currently the ``final`` designation does not appear in the API Reference but hopefully will in the future.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`1953`: Fixed error when overwriting a parametrized fixture, while also reusing the super fixture value.
+
+ .. code-block:: python
+
+ # conftest.py
+ import pytest
+
+
+ @pytest.fixture(params=[1, 2])
+ def foo(request):
+ return request.param
+
+
+ # test_foo.py
+ import pytest
+
+
+ @pytest.fixture
+ def foo(foo):
+ return foo * 2
+
+
+- :issue:`4984`: Fixed an internal error crash with ``IndexError: list index out of range`` when
+ collecting a module which starts with a decorated function, the decorator
+ raises, and assertion rewriting is enabled.
+
+
+- :issue:`7591`: pylint shouldn't complain anymore about unimplemented abstract methods when inheriting from :ref:`File <non-python tests>`.
+
+
+- :issue:`7628`: Fixed test collection when a full path without a drive letter was passed to pytest on Windows (for example ``\projects\tests\test.py`` instead of ``c:\projects\tests\pytest.py``).
+
+
+- :issue:`7638`: Fix handling of command-line options that appear as paths but trigger an OS-level syntax error on Windows, such as the options used internally by ``pytest-xdist``.
+
+
+- :issue:`7742`: Fixed INTERNALERROR when accessing locals / globals with faulty ``exec``.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`1477`: Removed faq.rst and its reference in contents.rst.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`7536`: The internal ``junitxml`` plugin has rewritten to use ``xml.etree.ElementTree``.
+ The order of attributes in XML elements might differ. Some unneeded escaping is
+ no longer performed.
+
+
+- :issue:`7587`: The dependency on the ``more-itertools`` package has been removed.
+
+
+- :issue:`7631`: The result type of :meth:`capfd.readouterr() <_pytest.capture.CaptureFixture.readouterr>` (and similar) is no longer a namedtuple,
+ but should behave like one in all respects. This was done for technical reasons.
+
+
+- :issue:`7671`: When collecting tests, pytest finds test classes and functions by examining the
+ attributes of python objects (modules, classes and instances). To speed up this
+ process, pytest now ignores builtin attributes (like ``__class__``,
+ ``__delattr__`` and ``__new__``) without consulting the :confval:`python_classes` and
+ :confval:`python_functions` configuration options and without passing them to plugins
+ using the :hook:`pytest_pycollect_makeitem` hook.
+
+
+pytest 6.0.2 (2020-09-04)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`7148`: Fixed ``--log-cli`` potentially causing unrelated ``print`` output to be swallowed.
+
+
+- :issue:`7672`: Fixed log-capturing level restored incorrectly if ``caplog.set_level`` is called more than once.
+
+
+- :issue:`7686`: Fixed `NotSetType.token` being used as the parameter ID when the parametrization list is empty.
+ Regressed in pytest 6.0.0.
+
+
+- :issue:`7707`: Fix internal error when handling some exceptions that contain multiple lines or the style uses multiple lines (``--tb=line`` for example).
+
+
+pytest 6.0.1 (2020-07-30)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`7394`: Passing an empty ``help`` value to ``Parser.add_option`` is now accepted instead of crashing when running ``pytest --help``.
+ Passing ``None`` raises a more informative ``TypeError``.
+
+
+- :issue:`7558`: Fix pylint ``not-callable`` lint on ``pytest.mark.parametrize()`` and the other builtin marks:
+ ``skip``, ``skipif``, ``xfail``, ``usefixtures``, ``filterwarnings``.
+
+
+- :issue:`7559`: Fix regression in plugins using ``TestReport.longreprtext`` (such as ``pytest-html``) when ``TestReport.longrepr`` is not a string.
+
+
+- :issue:`7569`: Fix logging capture handler's level not reset on teardown after a call to ``caplog.set_level()``.
+
+
+pytest 6.0.0 (2020-07-28)
+=========================
+
+(**Please see the full set of changes for this release also in the 6.0.0rc1 notes below**)
+
+Breaking Changes
+----------------
+
+- :issue:`5584`: **PytestDeprecationWarning are now errors by default.**
+
+ Following our plan to remove deprecated features with as little disruption as
+ possible, all warnings of type ``PytestDeprecationWarning`` now generate errors
+ instead of warning messages.
+
+ **The affected features will be effectively removed in pytest 6.1**, so please consult the
+ :std:doc:`deprecations` section in the docs for directions on how to update existing code.
+
+ In the pytest ``6.0.X`` series, it is possible to change the errors back into warnings as a
+ stopgap measure by adding this to your ``pytest.ini`` file:
+
+ .. code-block:: ini
+
+ [pytest]
+ filterwarnings =
+ ignore::pytest.PytestDeprecationWarning
+
+ But this will stop working when pytest ``6.1`` is released.
+
+ **If you have concerns** about the removal of a specific feature, please add a
+ comment to :issue:`5584`.
+
+
+- :issue:`7472`: The ``exec_()`` and ``is_true()`` methods of ``_pytest._code.Frame`` have been removed.
+
+
+
+Features
+--------
+
+- :issue:`7464`: Added support for :envvar:`NO_COLOR` and :envvar:`FORCE_COLOR` environment variables to control colored output.
+
+
+
+Improvements
+------------
+
+- :issue:`7467`: ``--log-file`` CLI option and ``log_file`` ini marker now create subdirectories if needed.
+
+
+- :issue:`7489`: The :func:`pytest.raises` function has a clearer error message when ``match`` equals the obtained string but is not a regex match. In this case it is suggested to escape the regex.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`7392`: Fix the reported location of tests skipped with ``@pytest.mark.skip`` when ``--runxfail`` is used.
+
+
+- :issue:`7491`: :fixture:`tmpdir` and :fixture:`tmp_path` no longer raise an error if the lock to check for
+ stale temporary directories is not accessible.
+
+
+- :issue:`7517`: Preserve line endings when captured via ``capfd``.
+
+
+- :issue:`7534`: Restored the previous formatting of ``TracebackEntry.__str__`` which was changed by accident.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`7422`: Clarified when the ``usefixtures`` mark can apply fixtures to test.
+
+
+- :issue:`7441`: Add a note about ``-q`` option used in getting started guide.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`7389`: Fixture scope ``package`` is no longer considered experimental.
+
+
+pytest 6.0.0rc1 (2020-07-08)
+============================
+
+Breaking Changes
+----------------
+
+- :issue:`1316`: ``TestReport.longrepr`` is now always an instance of ``ReprExceptionInfo``. Previously it was a ``str`` when a test failed with ``pytest.fail(..., pytrace=False)``.
+
+
+- :issue:`5965`: symlinks are no longer resolved during collection and matching `conftest.py` files with test file paths.
+
+ Resolving symlinks for the current directory and during collection was introduced as a bugfix in 3.9.0, but it actually is a new feature which had unfortunate consequences in Windows and surprising results in other platforms.
+
+ The team decided to step back on resolving symlinks at all, planning to review this in the future with a more solid solution (see discussion in
+ :pull:`6523` for details).
+
+ This might break test suites which made use of this feature; the fix is to create a symlink
+ for the entire test tree, and not only to partial files/tress as it was possible previously.
+
+
+- :issue:`6505`: ``Testdir.run().parseoutcomes()`` now always returns the parsed nouns in plural form.
+
+ Originally ``parseoutcomes()`` would always returns the nouns in plural form, but a change
+ meant to improve the terminal summary by using singular form single items (``1 warning`` or ``1 error``)
+ caused an unintended regression by changing the keys returned by ``parseoutcomes()``.
+
+ Now the API guarantees to always return the plural form, so calls like this:
+
+ .. code-block:: python
+
+ result = testdir.runpytest()
+ result.assert_outcomes(error=1)
+
+ Need to be changed to:
+
+
+ .. code-block:: python
+
+ result = testdir.runpytest()
+ result.assert_outcomes(errors=1)
+
+
+- :issue:`6903`: The ``os.dup()`` function is now assumed to exist. We are not aware of any
+ supported Python 3 implementations which do not provide it.
+
+
+- :issue:`7040`: ``-k`` no longer matches against the names of the directories outside the test session root.
+
+ Also, ``pytest.Package.name`` is now just the name of the directory containing the package's
+ ``__init__.py`` file, instead of the full path. This is consistent with how the other nodes
+ are named, and also one of the reasons why ``-k`` would match against any directory containing
+ the test suite.
+
+
+- :issue:`7122`: Expressions given to the ``-m`` and ``-k`` options are no longer evaluated using Python's :func:`eval`.
+ The format supports ``or``, ``and``, ``not``, parenthesis and general identifiers to match against.
+ Python constants, keywords or other operators are no longer evaluated differently.
+
+
+- :issue:`7135`: Pytest now uses its own ``TerminalWriter`` class instead of using the one from the ``py`` library.
+ Plugins generally access this class through ``TerminalReporter.writer``, ``TerminalReporter.write()``
+ (and similar methods), or ``_pytest.config.create_terminal_writer()``.
+
+ The following breaking changes were made:
+
+ - Output (``write()`` method and others) no longer flush implicitly; the flushing behavior
+ of the underlying file is respected. To flush explicitly (for example, if you
+ want output to be shown before an end-of-line is printed), use ``write(flush=True)`` or
+ ``terminal_writer.flush()``.
+ - Explicit Windows console support was removed, delegated to the colorama library.
+ - Support for writing ``bytes`` was removed.
+ - The ``reline`` method and ``chars_on_current_line`` property were removed.
+ - The ``stringio`` and ``encoding`` arguments was removed.
+ - Support for passing a callable instead of a file was removed.
+
+
+- :issue:`7224`: The `item.catch_log_handler` and `item.catch_log_handlers` attributes, set by the
+ logging plugin and never meant to be public, are no longer available.
+
+ The deprecated ``--no-print-logs`` option and ``log_print`` ini option are removed. Use ``--show-capture`` instead.
+
+
+- :issue:`7226`: Removed the unused ``args`` parameter from ``pytest.Function.__init__``.
+
+
+- :issue:`7418`: Removed the `pytest_doctest_prepare_content` hook specification. This hook
+ hasn't been triggered by pytest for at least 10 years.
+
+
+- :issue:`7438`: Some changes were made to the internal ``_pytest._code.source``, listed here
+ for the benefit of plugin authors who may be using it:
+
+ - The ``deindent`` argument to ``Source()`` has been removed, now it is always true.
+ - Support for zero or multiple arguments to ``Source()`` has been removed.
+ - Support for comparing ``Source`` with an ``str`` has been removed.
+ - The methods ``Source.isparseable()`` and ``Source.putaround()`` have been removed.
+ - The method ``Source.compile()`` and function ``_pytest._code.compile()`` have
+ been removed; use plain ``compile()`` instead.
+ - The function ``_pytest._code.source.getsource()`` has been removed; use
+ ``Source()`` directly instead.
+
+
+
+Deprecations
+------------
+
+- :issue:`7210`: The special ``-k '-expr'`` syntax to ``-k`` is deprecated. Use ``-k 'not expr'``
+ instead.
+
+ The special ``-k 'expr:'`` syntax to ``-k`` is deprecated. Please open an issue
+ if you use this and want a replacement.
+
+- :issue:`4049`: ``pytest_warning_captured`` is deprecated in favor of the ``pytest_warning_recorded`` hook.
+
+
+Features
+--------
+
+- :issue:`1556`: pytest now supports ``pyproject.toml`` files for configuration.
+
+ The configuration options is similar to the one available in other formats, but must be defined
+ in a ``[tool.pytest.ini_options]`` table to be picked up by pytest:
+
+ .. code-block:: toml
+
+ # pyproject.toml
+ [tool.pytest.ini_options]
+ minversion = "6.0"
+ addopts = "-ra -q"
+ testpaths = [
+ "tests",
+ "integration",
+ ]
+
+ More information can be found :ref:`in the docs <config file formats>`.
+
+
+- :issue:`3342`: pytest now includes inline type annotations and exposes them to user programs.
+ Most of the user-facing API is covered, as well as internal code.
+
+ If you are running a type checker such as mypy on your tests, you may start
+ noticing type errors indicating incorrect usage. If you run into an error that
+ you believe to be incorrect, please let us know in an issue.
+
+ The types were developed against mypy version 0.780. Versions before 0.750
+ are known not to work. We recommend using the latest version. Other type
+ checkers may work as well, but they are not officially verified to work by
+ pytest yet.
+
+
+- :issue:`4049`: Introduced a new hook named `pytest_warning_recorded` to convey information about warnings captured by the internal `pytest` warnings plugin.
+
+ This hook is meant to replace `pytest_warning_captured`, which is deprecated and will be removed in a future release.
+
+
+- :issue:`6471`: New command-line flags:
+
+ * `--no-header`: disables the initial header, including platform, version, and plugins.
+ * `--no-summary`: disables the final test summary, including warnings.
+
+
+- :issue:`6856`: A warning is now shown when an unknown key is read from a config INI file.
+
+ The `--strict-config` flag has been added to treat these warnings as errors.
+
+
+- :issue:`6906`: Added `--code-highlight` command line option to enable/disable code highlighting in terminal output.
+
+
+- :issue:`7245`: New ``--import-mode=importlib`` option that uses :mod:`importlib` to import test modules.
+
+ Traditionally pytest used ``__import__`` while changing ``sys.path`` to import test modules (which
+ also changes ``sys.modules`` as a side-effect), which works but has a number of drawbacks, like requiring test modules
+ that don't live in packages to have unique names (as they need to reside under a unique name in ``sys.modules``).
+
+ ``--import-mode=importlib`` uses more fine grained import mechanisms from ``importlib`` which don't
+ require pytest to change ``sys.path`` or ``sys.modules`` at all, eliminating much of the drawbacks
+ of the previous mode.
+
+ We intend to make ``--import-mode=importlib`` the default in future versions, so users are encouraged
+ to try the new mode and provide feedback (both positive or negative) in issue :issue:`7245`.
+
+ You can read more about this option in :std:ref:`the documentation <import-modes>`.
+
+
+- :issue:`7305`: New ``required_plugins`` configuration option allows the user to specify a list of plugins, including version information, that are required for pytest to run. An error is raised if any required plugins are not found when running pytest.
+
+
+Improvements
+------------
+
+- :issue:`4375`: The ``pytest`` command now suppresses the ``BrokenPipeError`` error message that
+ is printed to stderr when the output of ``pytest`` is piped and and the pipe is
+ closed by the piped-to program (common examples are ``less`` and ``head``).
+
+
+- :issue:`4391`: Improved precision of test durations measurement. ``CallInfo`` items now have a new ``<CallInfo>.duration`` attribute, created using ``time.perf_counter()``. This attribute is used to fill the ``<TestReport>.duration`` attribute, which is more accurate than the previous ``<CallInfo>.stop - <CallInfo>.start`` (as these are based on ``time.time()``).
+
+
+- :issue:`4675`: Rich comparison for dataclasses and `attrs`-classes is now recursive.
+
+
+- :issue:`6285`: Exposed the `pytest.FixtureLookupError` exception which is raised by `request.getfixturevalue()`
+ (where `request` is a `FixtureRequest` fixture) when a fixture with the given name cannot be returned.
+
+
+- :issue:`6433`: If an error is encountered while formatting the message in a logging call, for
+ example ``logging.warning("oh no!: %s: %s", "first")`` (a second argument is
+ missing), pytest now propagates the error, likely causing the test to fail.
+
+ Previously, such a mistake would cause an error to be printed to stderr, which
+ is not displayed by default for passing tests. This change makes the mistake
+ visible during testing.
+
+ You may suppress this behavior temporarily or permanently by setting
+ ``logging.raiseExceptions = False``.
+
+
+- :issue:`6817`: Explicit new-lines in help texts of command-line options are preserved, allowing plugins better control
+ of the help displayed to users.
+
+
+- :issue:`6940`: When using the ``--duration`` option, the terminal message output is now more precise about the number and duration of hidden items.
+
+
+- :issue:`6991`: Collected files are displayed after any reports from hooks, e.g. the status from ``--lf``.
+
+
+- :issue:`7091`: When ``fd`` capturing is used, through ``--capture=fd`` or the ``capfd`` and
+ ``capfdbinary`` fixtures, and the file descriptor (0, 1, 2) cannot be
+ duplicated, FD capturing is still performed. Previously, direct writes to the
+ file descriptors would fail or be lost in this case.
+
+
+- :issue:`7119`: Exit with an error if the ``--basetemp`` argument is empty, is the current working directory or is one of the parent directories.
+ This is done to protect against accidental data loss, as any directory passed to this argument is cleared.
+
+
+- :issue:`7128`: `pytest --version` now displays just the pytest version, while `pytest --version --version` displays more verbose information including plugins. This is more consistent with how other tools show `--version`.
+
+
+- :issue:`7133`: :meth:`caplog.set_level() <_pytest.logging.LogCaptureFixture.set_level>` will now override any :confval:`log_level` set via the CLI or configuration file.
+
+
+- :issue:`7159`: :meth:`caplog.set_level() <_pytest.logging.LogCaptureFixture.set_level>` and :meth:`caplog.at_level() <_pytest.logging.LogCaptureFixture.at_level>` no longer affect
+ the level of logs that are shown in the *Captured log report* report section.
+
+
+- :issue:`7348`: Improve recursive diff report for comparison asserts on dataclasses / attrs.
+
+
+- :issue:`7385`: ``--junitxml`` now includes the exception cause in the ``message`` XML attribute for failures during setup and teardown.
+
+ Previously:
+
+ .. code-block:: xml
+
+ <error message="test setup failure">
+
+ Now:
+
+ .. code-block:: xml
+
+ <error message="failed on setup with &quot;ValueError: Some error during setup&quot;">
+
+
+
+Bug Fixes
+---------
+
+- :issue:`1120`: Fix issue where directories from :fixture:`tmpdir` are not removed properly when multiple instances of pytest are running in parallel.
+
+
+- :issue:`4583`: Prevent crashing and provide a user-friendly error when a marker expression (`-m`) invoking of :func:`eval` raises any exception.
+
+
+- :issue:`4677`: The path shown in the summary report for SKIPPED tests is now always relative. Previously it was sometimes absolute.
+
+
+- :issue:`5456`: Fix a possible race condition when trying to remove lock files used to control access to folders
+ created by :fixture:`tmp_path` and :fixture:`tmpdir`.
+
+
+- :issue:`6240`: Fixes an issue where logging during collection step caused duplication of log
+ messages to stderr.
+
+
+- :issue:`6428`: Paths appearing in error messages are now correct in case the current working directory has
+ changed since the start of the session.
+
+
+- :issue:`6755`: Support deleting paths longer than 260 characters on windows created inside :fixture:`tmpdir`.
+
+
+- :issue:`6871`: Fix crash with captured output when using :fixture:`capsysbinary`.
+
+
+- :issue:`6909`: Revert the change introduced by :pull:`6330`, which required all arguments to ``@pytest.mark.parametrize`` to be explicitly defined in the function signature.
+
+ The intention of the original change was to remove what was expected to be an unintended/surprising behavior, but it turns out many people relied on it, so the restriction has been reverted.
+
+
+- :issue:`6910`: Fix crash when plugins return an unknown stats while using the ``--reportlog`` option.
+
+
+- :issue:`6924`: Ensure a ``unittest.IsolatedAsyncioTestCase`` is actually awaited.
+
+
+- :issue:`6925`: Fix `TerminalRepr` instances to be hashable again.
+
+
+- :issue:`6947`: Fix regression where functions registered with :meth:`unittest.TestCase.addCleanup` were not being called on test failures.
+
+
+- :issue:`6951`: Allow users to still set the deprecated ``TerminalReporter.writer`` attribute.
+
+
+- :issue:`6956`: Prevent pytest from printing `ConftestImportFailure` traceback to stdout.
+
+
+- :issue:`6991`: Fix regressions with `--lf` filtering too much since pytest 5.4.
+
+
+- :issue:`6992`: Revert "tmpdir: clean up indirection via config for factories" :issue:`6767` as it breaks pytest-xdist.
+
+
+- :issue:`7061`: When a yielding fixture fails to yield a value, report a test setup error instead of crashing.
+
+
+- :issue:`7076`: The path of file skipped by ``@pytest.mark.skip`` in the SKIPPED report is now relative to invocation directory. Previously it was relative to root directory.
+
+
+- :issue:`7110`: Fixed regression: ``asyncbase.TestCase`` tests are executed correctly again.
+
+
+- :issue:`7126`: ``--setup-show`` now doesn't raise an error when a bytes value is used as a ``parametrize``
+ parameter when Python is called with the ``-bb`` flag.
+
+
+- :issue:`7143`: Fix :meth:`pytest.File.from_parent` so it forwards extra keyword arguments to the constructor.
+
+
+- :issue:`7145`: Classes with broken ``__getattribute__`` methods are displayed correctly during failures.
+
+
+- :issue:`7150`: Prevent hiding the underlying exception when ``ConfTestImportFailure`` is raised.
+
+
+- :issue:`7180`: Fix ``_is_setup_py`` for files encoded differently than locale.
+
+
+- :issue:`7215`: Fix regression where running with ``--pdb`` would call :meth:`unittest.TestCase.tearDown` for skipped tests.
+
+
+- :issue:`7253`: When using ``pytest.fixture`` on a function directly, as in ``pytest.fixture(func)``,
+ if the ``autouse`` or ``params`` arguments are also passed, the function is no longer
+ ignored, but is marked as a fixture.
+
+
+- :issue:`7360`: Fix possibly incorrect evaluation of string expressions passed to ``pytest.mark.skipif`` and ``pytest.mark.xfail``,
+ in rare circumstances where the exact same string is used but refers to different global values.
+
+
+- :issue:`7383`: Fixed exception causes all over the codebase, i.e. use `raise new_exception from old_exception` when wrapping an exception.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`7202`: The development guide now links to the contributing section of the docs and `RELEASING.rst` on GitHub.
+
+
+- :issue:`7233`: Add a note about ``--strict`` and ``--strict-markers`` and the preference for the latter one.
+
+
+- :issue:`7345`: Explain indirect parametrization and markers for fixtures.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`7035`: The ``originalname`` attribute of ``_pytest.python.Function`` now defaults to ``name`` if not
+ provided explicitly, and is always set.
+
+
+- :issue:`7264`: The dependency on the ``wcwidth`` package has been removed.
+
+
+- :issue:`7291`: Replaced ``py.iniconfig`` with :pypi:`iniconfig`.
+
+
+- :issue:`7295`: ``src/_pytest/config/__init__.py`` now uses the ``warnings`` module to report warnings instead of ``sys.stderr.write``.
+
+
+- :issue:`7356`: Remove last internal uses of deprecated *slave* term from old ``pytest-xdist``.
+
+
+- :issue:`7357`: ``py``>=1.8.2 is now required.
+
+
+pytest 5.4.3 (2020-06-02)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`6428`: Paths appearing in error messages are now correct in case the current working directory has
+ changed since the start of the session.
+
+
+- :issue:`6755`: Support deleting paths longer than 260 characters on windows created inside tmpdir.
+
+
+- :issue:`6956`: Prevent pytest from printing ConftestImportFailure traceback to stdout.
+
+
+- :issue:`7150`: Prevent hiding the underlying exception when ``ConfTestImportFailure`` is raised.
+
+
+- :issue:`7215`: Fix regression where running with ``--pdb`` would call the ``tearDown`` methods of ``unittest.TestCase``
+ subclasses for skipped tests.
+
+
+pytest 5.4.2 (2020-05-08)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`6871`: Fix crash with captured output when using the :fixture:`capsysbinary fixture <capsysbinary>`.
+
+
+- :issue:`6924`: Ensure a ``unittest.IsolatedAsyncioTestCase`` is actually awaited.
+
+
+- :issue:`6925`: Fix TerminalRepr instances to be hashable again.
+
+
+- :issue:`6947`: Fix regression where functions registered with ``TestCase.addCleanup`` were not being called on test failures.
+
+
+- :issue:`6951`: Allow users to still set the deprecated ``TerminalReporter.writer`` attribute.
+
+
+- :issue:`6992`: Revert "tmpdir: clean up indirection via config for factories" #6767 as it breaks pytest-xdist.
+
+
+- :issue:`7110`: Fixed regression: ``asyncbase.TestCase`` tests are executed correctly again.
+
+
+- :issue:`7143`: Fix ``File.from_parent`` so it forwards extra keyword arguments to the constructor.
+
+
+- :issue:`7145`: Classes with broken ``__getattribute__`` methods are displayed correctly during failures.
+
+
+- :issue:`7180`: Fix ``_is_setup_py`` for files encoded differently than locale.
+
+
+pytest 5.4.1 (2020-03-13)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`6909`: Revert the change introduced by :pull:`6330`, which required all arguments to ``@pytest.mark.parametrize`` to be explicitly defined in the function signature.
+
+ The intention of the original change was to remove what was expected to be an unintended/surprising behavior, but it turns out many people relied on it, so the restriction has been reverted.
+
+
+- :issue:`6910`: Fix crash when plugins return an unknown stats while using the ``--reportlog`` option.
+
+
+pytest 5.4.0 (2020-03-12)
+=========================
+
+Breaking Changes
+----------------
+
+- :issue:`6316`: Matching of ``-k EXPRESSION`` to test names is now case-insensitive.
+
+
+- :issue:`6443`: Plugins specified with ``-p`` are now loaded after internal plugins, which results in their hooks being called *before* the internal ones.
+
+ This makes the ``-p`` behavior consistent with ``PYTEST_PLUGINS``.
+
+
+- :issue:`6637`: Removed the long-deprecated ``pytest_itemstart`` hook.
+
+ This hook has been marked as deprecated and not been even called by pytest for over 10 years now.
+
+
+- :issue:`6673`: Reversed / fix meaning of "+/-" in error diffs. "-" means that sth. expected is missing in the result and "+" means that there are unexpected extras in the result.
+
+
+- :issue:`6737`: The ``cached_result`` attribute of ``FixtureDef`` is now set to ``None`` when
+ the result is unavailable, instead of being deleted.
+
+ If your plugin performs checks like ``hasattr(fixturedef, 'cached_result')``,
+ for example in a ``pytest_fixture_post_finalizer`` hook implementation, replace
+ it with ``fixturedef.cached_result is not None``. If you ``del`` the attribute,
+ set it to ``None`` instead.
+
+
+
+Deprecations
+------------
+
+- :issue:`3238`: Option ``--no-print-logs`` is deprecated and meant to be removed in a future release. If you use ``--no-print-logs``, please try out ``--show-capture`` and
+ provide feedback.
+
+ ``--show-capture`` command-line option was added in ``pytest 3.5.0`` and allows to specify how to
+ display captured output when tests fail: ``no``, ``stdout``, ``stderr``, ``log`` or ``all`` (the default).
+
+
+- :issue:`571`: Deprecate the unused/broken `pytest_collect_directory` hook.
+ It was misaligned since the removal of the ``Directory`` collector in 2010
+ and incorrect/unusable as soon as collection was split from test execution.
+
+
+- :issue:`5975`: Deprecate using direct constructors for ``Nodes``.
+
+ Instead they are now constructed via ``Node.from_parent``.
+
+ This transitional mechanism enables us to untangle the very intensely
+ entangled ``Node`` relationships by enforcing more controlled creation/configuration patterns.
+
+ As part of this change, session/config are already disallowed parameters and as we work on the details we might need disallow a few more as well.
+
+ Subclasses are expected to use `super().from_parent` if they intend to expand the creation of `Nodes`.
+
+
+- :issue:`6779`: The ``TerminalReporter.writer`` attribute has been deprecated and should no longer be used. This
+ was inadvertently exposed as part of the public API of that plugin and ties it too much
+ with ``py.io.TerminalWriter``.
+
+
+
+Features
+--------
+
+- :issue:`4597`: New :ref:`--capture=tee-sys <capture-method>` option to allow both live printing and capturing of test output.
+
+
+- :issue:`5712`: Now all arguments to ``@pytest.mark.parametrize`` need to be explicitly declared in the function signature or via ``indirect``.
+ Previously it was possible to omit an argument if a fixture with the same name existed, which was just an accident of implementation and was not meant to be a part of the API.
+
+
+- :issue:`6454`: Changed default for `-r` to `fE`, which displays failures and errors in the :ref:`short test summary <pytest.detailed_failed_tests_usage>`. `-rN` can be used to disable it (the old behavior).
+
+
+- :issue:`6469`: New options have been added to the :confval:`junit_logging` option: ``log``, ``out-err``, and ``all``.
+
+
+- :issue:`6834`: Excess warning summaries are now collapsed per file to ensure readable display of warning summaries.
+
+
+
+Improvements
+------------
+
+- :issue:`1857`: ``pytest.mark.parametrize`` accepts integers for ``ids`` again, converting it to strings.
+
+
+- :issue:`449`: Use "yellow" main color with any XPASSED tests.
+
+
+- :issue:`4639`: Revert "A warning is now issued when assertions are made for ``None``".
+
+ The warning proved to be less useful than initially expected and had quite a
+ few false positive cases.
+
+
+- :issue:`5686`: ``tmpdir_factory.mktemp`` now fails when given absolute and non-normalized paths.
+
+
+- :issue:`5984`: The ``pytest_warning_captured`` hook now receives a ``location`` parameter with the code location that generated the warning.
+
+
+- :issue:`6213`: pytester: the ``testdir`` fixture respects environment settings from the ``monkeypatch`` fixture for inner runs.
+
+
+- :issue:`6247`: ``--fulltrace`` is honored with collection errors.
+
+
+- :issue:`6384`: Make `--showlocals` work also with `--tb=short`.
+
+
+- :issue:`6653`: Add support for matching lines consecutively with :attr:`LineMatcher <_pytest.pytester.LineMatcher>`'s :func:`~_pytest.pytester.LineMatcher.fnmatch_lines` and :func:`~_pytest.pytester.LineMatcher.re_match_lines`.
+
+
+- :issue:`6658`: Code is now highlighted in tracebacks when ``pygments`` is installed.
+
+ Users are encouraged to install ``pygments`` into their environment and provide feedback, because
+ the plan is to make ``pygments`` a regular dependency in the future.
+
+
+- :issue:`6795`: Import usage error message with invalid `-o` option.
+
+
+- :issue:`759`: ``pytest.mark.parametrize`` supports iterators and generators for ``ids``.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`310`: Add support for calling `pytest.xfail()` and `pytest.importorskip()` with doctests.
+
+
+- :issue:`3823`: ``--trace`` now works with unittests.
+
+
+- :issue:`4445`: Fixed some warning reports produced by pytest to point to the correct location of the warning in the user's code.
+
+
+- :issue:`5301`: Fix ``--last-failed`` to collect new tests from files with known failures.
+
+
+- :issue:`5928`: Report ``PytestUnknownMarkWarning`` at the level of the user's code, not ``pytest``'s.
+
+
+- :issue:`5991`: Fix interaction with ``--pdb`` and unittests: do not use unittest's ``TestCase.debug()``.
+
+
+- :issue:`6334`: Fix summary entries appearing twice when ``f/F`` and ``s/S`` report chars were used at the same time in the ``-r`` command-line option (for example ``-rFf``).
+
+ The upper case variants were never documented and the preferred form should be the lower case.
+
+
+- :issue:`6409`: Fallback to green (instead of yellow) for non-last items without previous passes with colored terminal progress indicator.
+
+
+- :issue:`6454`: `--disable-warnings` is honored with `-ra` and `-rA`.
+
+
+- :issue:`6497`: Fix bug in the comparison of request key with cached key in fixture.
+
+ A construct ``if key == cached_key:`` can fail either because ``==`` is explicitly disallowed, or for, e.g., NumPy arrays, where the result of ``a == b`` cannot generally be converted to :class:`bool`.
+ The implemented fix replaces `==` with ``is``.
+
+
+- :issue:`6557`: Make capture output streams ``.write()`` method return the same return value from original streams.
+
+
+- :issue:`6566`: Fix ``EncodedFile.writelines`` to call the underlying buffer's ``writelines`` method.
+
+
+- :issue:`6575`: Fix internal crash when ``faulthandler`` starts initialized
+ (for example with ``PYTHONFAULTHANDLER=1`` environment variable set) and ``faulthandler_timeout`` defined
+ in the configuration file.
+
+
+- :issue:`6597`: Fix node ids which contain a parametrized empty-string variable.
+
+
+- :issue:`6646`: Assertion rewriting hooks are (re)stored for the current item, which fixes them being still used after e.g. pytester's :func:`testdir.runpytest <_pytest.pytester.Testdir.runpytest>` etc.
+
+
+- :issue:`6660`: :py:func:`pytest.exit` is handled when emitted from the :hook:`pytest_sessionfinish` hook. This includes quitting from a debugger.
+
+
+- :issue:`6752`: When :py:func:`pytest.raises` is used as a function (as opposed to a context manager),
+ a `match` keyword argument is now passed through to the tested function. Previously
+ it was swallowed and ignored (regression in pytest 5.1.0).
+
+
+- :issue:`6801`: Do not display empty lines in between traceback for unexpected exceptions with doctests.
+
+
+- :issue:`6802`: The :fixture:`testdir fixture <testdir>` works within doctests now.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`6696`: Add list of fixtures to start of fixture chapter.
+
+
+- :issue:`6742`: Expand first sentence on fixtures into a paragraph.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`6404`: Remove usage of ``parser`` module, deprecated in Python 3.9.
+
+
+pytest 5.3.5 (2020-01-29)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`6517`: Fix regression in pytest 5.3.4 causing an INTERNALERROR due to a wrong assertion.
+
+
+pytest 5.3.4 (2020-01-20)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`6496`: Revert :issue:`6436`: unfortunately this change has caused a number of regressions in many suites,
+ so the team decided to revert this change and make a new release while we continue to look for a solution.
+
+
+pytest 5.3.3 (2020-01-16)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`2780`: Captured output during teardown is shown with ``-rP``.
+
+
+- :issue:`5971`: Fix a ``pytest-xdist`` crash when dealing with exceptions raised in subprocesses created by the
+ ``multiprocessing`` module.
+
+
+- :issue:`6436`: :class:`FixtureDef <_pytest.fixtures.FixtureDef>` objects now properly register their finalizers with autouse and
+ parameterized fixtures that execute before them in the fixture stack so they are torn
+ down at the right times, and in the right order.
+
+
+- :issue:`6532`: Fix parsing of outcomes containing multiple errors with ``testdir`` results (regression in 5.3.0).
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`6350`: Optimized automatic renaming of test parameter IDs.
+
+
+pytest 5.3.2 (2019-12-13)
+=========================
+
+Improvements
+------------
+
+- :issue:`4639`: Revert "A warning is now issued when assertions are made for ``None``".
+
+ The warning proved to be less useful than initially expected and had quite a
+ few false positive cases.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`5430`: junitxml: Logs for failed test are now passed to junit report in case the test fails during call phase.
+
+
+- :issue:`6290`: The supporting files in the ``.pytest_cache`` directory are kept with ``--cache-clear``, which only clears cached values now.
+
+
+- :issue:`6301`: Fix assertion rewriting for egg-based distributions and ``editable`` installs (``pip install --editable``).
+
+
+pytest 5.3.1 (2019-11-25)
+=========================
+
+Improvements
+------------
+
+- :issue:`6231`: Improve check for misspelling of :ref:`pytest.mark.parametrize ref`.
+
+
+- :issue:`6257`: Handle :func:`pytest.exit` being used via :hook:`pytest_internalerror`, e.g. when quitting pdb from post mortem.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`5914`: pytester: fix :py:func:`~_pytest.pytester.LineMatcher.no_fnmatch_line` when used after positive matching.
+
+
+- :issue:`6082`: Fix line detection for doctest samples inside :py:class:`python:property` docstrings, as a workaround to :bpo:`17446`.
+
+
+- :issue:`6254`: Fix compatibility with pytest-parallel (regression in pytest 5.3.0).
+
+
+- :issue:`6255`: Clear the :py:data:`sys.last_traceback`, :py:data:`sys.last_type`
+ and :py:data:`sys.last_value` attributes by deleting them instead
+ of setting them to ``None``. This better matches the behaviour of
+ the Python standard library.
+
+
+pytest 5.3.0 (2019-11-19)
+=========================
+
+Deprecations
+------------
+
+- :issue:`6179`: The default value of :confval:`junit_family` option will change to ``"xunit2"`` in pytest 6.0, given
+ that this is the version supported by default in modern tools that manipulate this type of file.
+
+ In order to smooth the transition, pytest will issue a warning in case the ``--junitxml`` option
+ is given in the command line but :confval:`junit_family` is not explicitly configured in ``pytest.ini``.
+
+ For more information, :ref:`see the docs <junit-family changed default value>`.
+
+
+
+Features
+--------
+
+- :issue:`4488`: The pytest team has created the `pytest-reportlog <https://github.com/pytest-dev/pytest-reportlog>`__
+ plugin, which provides a new ``--report-log=FILE`` option that writes *report logs* into a file as the test session executes.
+
+ Each line of the report log contains a self contained JSON object corresponding to a testing event,
+ such as a collection or a test result report. The file is guaranteed to be flushed after writing
+ each line, so systems can read and process events in real-time.
+
+ The plugin is meant to replace the ``--resultlog`` option, which is deprecated and meant to be removed
+ in a future release. If you use ``--resultlog``, please try out ``pytest-reportlog`` and
+ provide feedback.
+
+
+- :issue:`4730`: When :py:data:`sys.pycache_prefix` (Python 3.8+) is set, it will be used by pytest to cache test files changed by the assertion rewriting mechanism.
+
+ This makes it easier to benefit of cached ``.pyc`` files even on file systems without permissions.
+
+
+- :issue:`5515`: Allow selective auto-indentation of multiline log messages.
+
+ Adds command line option ``--log-auto-indent``, config option
+ :confval:`log_auto_indent` and support for per-entry configuration of
+ indentation behavior on calls to :py:func:`python:logging.log()`.
+
+ Alters the default for auto-indention from ``"on"`` to ``"off"``. This
+ restores the older behavior that existed prior to v4.6.0. This
+ reversion to earlier behavior was done because it is better to
+ activate new features that may lead to broken tests explicitly
+ rather than implicitly.
+
+
+- :issue:`5914`: :fixture:`testdir` learned two new functions, :py:func:`~_pytest.pytester.LineMatcher.no_fnmatch_line` and
+ :py:func:`~_pytest.pytester.LineMatcher.no_re_match_line`.
+
+ The functions are used to ensure the captured text *does not* match the given
+ pattern.
+
+ The previous idiom was to use :py:func:`python:re.match`:
+
+ .. code-block:: python
+
+ result = testdir.runpytest()
+ assert re.match(pat, result.stdout.str()) is None
+
+ Or the ``in`` operator:
+
+ .. code-block:: python
+
+ result = testdir.runpytest()
+ assert text in result.stdout.str()
+
+ But the new functions produce best output on failure.
+
+
+- :issue:`6057`: Added tolerances to complex values when printing ``pytest.approx``.
+
+ For example, ``repr(pytest.approx(3+4j))`` returns ``(3+4j) ± 5e-06 ∠ ±180°``. This is polar notation indicating a circle around the expected value, with a radius of 5e-06. For ``approx`` comparisons to return ``True``, the actual value should fall within this circle.
+
+
+- :issue:`6061`: Added the pluginmanager as an argument to ``pytest_addoption``
+ so that hooks can be invoked when setting up command line options. This is
+ useful for having one plugin communicate things to another plugin,
+ such as default values or which set of command line options to add.
+
+
+
+Improvements
+------------
+
+- :issue:`5061`: Use multiple colors with terminal summary statistics.
+
+
+- :issue:`5630`: Quitting from debuggers is now properly handled in ``doctest`` items.
+
+
+- :issue:`5924`: Improved verbose diff output with sequences.
+
+ Before:
+
+ ::
+
+ E AssertionError: assert ['version', '...version_info'] == ['version', '...version', ...]
+ E Right contains 3 more items, first extra item: ' '
+ E Full diff:
+ E - ['version', 'version_info', 'sys.version', 'sys.version_info']
+ E + ['version',
+ E + 'version_info',
+ E + 'sys.version',
+ E + 'sys.version_info',
+ E + ' ',
+ E + 'sys.version',
+ E + 'sys.version_info']
+
+ After:
+
+ ::
+
+ E AssertionError: assert ['version', '...version_info'] == ['version', '...version', ...]
+ E Right contains 3 more items, first extra item: ' '
+ E Full diff:
+ E [
+ E 'version',
+ E 'version_info',
+ E 'sys.version',
+ E 'sys.version_info',
+ E + ' ',
+ E + 'sys.version',
+ E + 'sys.version_info',
+ E ]
+
+
+- :issue:`5934`: ``repr`` of ``ExceptionInfo`` objects has been improved to honor the ``__repr__`` method of the underlying exception.
+
+- :issue:`5936`: Display untruncated assertion message with ``-vv``.
+
+
+- :issue:`5990`: Fixed plurality mismatch in test summary (e.g. display "1 error" instead of "1 errors").
+
+
+- :issue:`6008`: ``Config.InvocationParams.args`` is now always a ``tuple`` to better convey that it should be
+ immutable and avoid accidental modifications.
+
+
+- :issue:`6023`: ``pytest.main`` returns a ``pytest.ExitCode`` instance now, except for when custom exit codes are used (where it returns ``int`` then still).
+
+
+- :issue:`6026`: Align prefixes in output of pytester's ``LineMatcher``.
+
+
+- :issue:`6059`: Collection errors are reported as errors (and not failures like before) in the terminal's short test summary.
+
+
+- :issue:`6069`: ``pytester.spawn`` does not skip/xfail tests on FreeBSD anymore unconditionally.
+
+
+- :issue:`6097`: The "[...%]" indicator in the test summary is now colored according to the final (new) multi-colored line's main color.
+
+
+- :issue:`6116`: Added ``--co`` as a synonym to ``--collect-only``.
+
+
+- :issue:`6148`: ``atomicwrites`` is now only used on Windows, fixing a performance regression with assertion rewriting on Unix.
+
+
+- :issue:`6152`: Now parametrization will use the ``__name__`` attribute of any object for the id, if present. Previously it would only use ``__name__`` for functions and classes.
+
+
+- :issue:`6176`: Improved failure reporting with pytester's ``Hookrecorder.assertoutcome``.
+
+
+- :issue:`6181`: The reason for a stopped session, e.g. with ``--maxfail`` / ``-x``, now gets reported in the test summary.
+
+
+- :issue:`6206`: Improved ``cache.set`` robustness and performance.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`2049`: Fixed ``--setup-plan`` showing inaccurate information about fixture lifetimes.
+
+
+- :issue:`2548`: Fixed line offset mismatch of skipped tests in terminal summary.
+
+
+- :issue:`6039`: The ``PytestDoctestRunner`` is now properly invalidated when unconfiguring the doctest plugin.
+
+ This is important when used with ``pytester``'s ``runpytest_inprocess``.
+
+
+- :issue:`6047`: BaseExceptions are now handled in ``saferepr``, which includes ``pytest.fail.Exception`` etc.
+
+
+- :issue:`6074`: pytester: fixed order of arguments in ``rm_rf`` warning when cleaning up temporary directories, and do not emit warnings for errors with ``os.open``.
+
+
+- :issue:`6189`: Fixed result of ``getmodpath`` method.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4901`: ``RunResult`` from ``pytester`` now displays the mnemonic of the ``ret`` attribute when it is a
+ valid ``pytest.ExitCode`` value.
+
+
+pytest 5.2.4 (2019-11-15)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`6194`: Fix incorrect discovery of non-test ``__init__.py`` files.
+
+
+- :issue:`6197`: Revert "The first test in a package (``__init__.py``) marked with ``@pytest.mark.skip`` is now correctly skipped.".
+
+
+pytest 5.2.3 (2019-11-14)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5830`: The first test in a package (``__init__.py``) marked with ``@pytest.mark.skip`` is now correctly skipped.
+
+
+- :issue:`6099`: Fix ``--trace`` when used with parametrized functions.
+
+
+- :issue:`6183`: Using ``request`` as a parameter name in ``@pytest.mark.parametrize`` now produces a more
+ user-friendly error.
+
+
+pytest 5.2.2 (2019-10-24)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5206`: Fix ``--nf`` to not forget about known nodeids with partial test selection.
+
+
+- :issue:`5906`: Fix crash with ``KeyboardInterrupt`` during ``--setup-show``.
+
+
+- :issue:`5946`: Fixed issue when parametrizing fixtures with numpy arrays (and possibly other sequence-like types).
+
+
+- :issue:`6044`: Properly ignore ``FileNotFoundError`` exceptions when trying to remove old temporary directories,
+ for instance when multiple processes try to remove the same directory (common with ``pytest-xdist``
+ for example).
+
+
+pytest 5.2.1 (2019-10-06)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5902`: Fix warnings about deprecated ``cmp`` attribute in ``attrs>=19.2``.
+
+
+pytest 5.2.0 (2019-09-28)
+=========================
+
+Deprecations
+------------
+
+- :issue:`1682`: Passing arguments to pytest.fixture() as positional arguments is deprecated - pass them
+ as a keyword argument instead.
+
+
+
+Features
+--------
+
+- :issue:`1682`: The ``scope`` parameter of ``@pytest.fixture`` can now be a callable that receives
+ the fixture name and the ``config`` object as keyword-only parameters.
+ See :ref:`the docs <dynamic scope>` for more information.
+
+
+- :issue:`5764`: New behavior of the ``--pastebin`` option: failures to connect to the pastebin server are reported, without failing the pytest run
+
+
+
+Bug Fixes
+---------
+
+- :issue:`5806`: Fix "lexer" being used when uploading to bpaste.net from ``--pastebin`` to "text".
+
+
+- :issue:`5884`: Fix ``--setup-only`` and ``--setup-show`` for custom pytest items.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`5056`: The HelpFormatter uses ``py.io.get_terminal_width`` for better width detection.
+
+
+pytest 5.1.3 (2019-09-18)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5807`: Fix pypy3.6 (nightly) on windows.
+
+
+- :issue:`5811`: Handle ``--fulltrace`` correctly with ``pytest.raises``.
+
+
+- :issue:`5819`: Windows: Fix regression with conftest whose qualified name contains uppercase
+ characters (introduced by #5792).
+
+
+pytest 5.1.2 (2019-08-30)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`2270`: Fixed ``self`` reference in function-scoped fixtures defined plugin classes: previously ``self``
+ would be a reference to a *test* class, not the *plugin* class.
+
+
+- :issue:`570`: Fixed long standing issue where fixture scope was not respected when indirect fixtures were used during
+ parametrization.
+
+
+- :issue:`5782`: Fix decoding error when printing an error response from ``--pastebin``.
+
+
+- :issue:`5786`: Chained exceptions in test and collection reports are now correctly serialized, allowing plugins like
+ ``pytest-xdist`` to display them properly.
+
+
+- :issue:`5792`: Windows: Fix error that occurs in certain circumstances when loading
+ ``conftest.py`` from a working directory that has casing other than the one stored
+ in the filesystem (e.g., ``c:\test`` instead of ``C:\test``).
+
+
+pytest 5.1.1 (2019-08-20)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5751`: Fixed ``TypeError`` when importing pytest on Python 3.5.0 and 3.5.1.
+
+
+pytest 5.1.0 (2019-08-15)
+=========================
+
+Removals
+--------
+
+- :issue:`5180`: As per our policy, the following features have been deprecated in the 4.X series and are now
+ removed:
+
+ * ``Request.getfuncargvalue``: use ``Request.getfixturevalue`` instead.
+
+ * ``pytest.raises`` and ``pytest.warns`` no longer support strings as the second argument.
+
+ * ``message`` parameter of ``pytest.raises``.
+
+ * ``pytest.raises``, ``pytest.warns`` and ``ParameterSet.param`` now use native keyword-only
+ syntax. This might change the exception message from previous versions, but they still raise
+ ``TypeError`` on unknown keyword arguments as before.
+
+ * ``pytest.config`` global variable.
+
+ * ``tmpdir_factory.ensuretemp`` method.
+
+ * ``pytest_logwarning`` hook.
+
+ * ``RemovedInPytest4Warning`` warning type.
+
+ * ``request`` is now a reserved name for fixtures.
+
+
+ For more information consult :std:doc:`deprecations` in the docs.
+
+
+- :issue:`5565`: Removed unused support code for :pypi:`unittest2`.
+
+ The ``unittest2`` backport module is no longer
+ necessary since Python 3.3+, and the small amount of code in pytest to support it also doesn't seem
+ to be used: after removed, all tests still pass unchanged.
+
+ Although our policy is to introduce a deprecation period before removing any features or support
+ for third party libraries, because this code is apparently not used
+ at all (even if ``unittest2`` is used by a test suite executed by pytest), it was decided to
+ remove it in this release.
+
+ If you experience a regression because of this, please :issue:`file an issue <new>`.
+
+
+- :issue:`5615`: ``pytest.fail``, ``pytest.xfail`` and ``pytest.skip`` no longer support bytes for the message argument.
+
+ This was supported for Python 2 where it was tempting to use ``"message"``
+ instead of ``u"message"``.
+
+ Python 3 code is unlikely to pass ``bytes`` to these functions. If you do,
+ please decode it to an ``str`` beforehand.
+
+
+
+Features
+--------
+
+- :issue:`5564`: New ``Config.invocation_args`` attribute containing the unchanged arguments passed to ``pytest.main()``.
+
+
+- :issue:`5576`: New :ref:`NUMBER <using doctest options>`
+ option for doctests to ignore irrelevant differences in floating-point numbers.
+ Inspired by Sébastien Boisgérault's `numtest <https://github.com/boisgera/numtest>`__
+ extension for doctest.
+
+
+
+Improvements
+------------
+
+- :issue:`5471`: JUnit XML now includes a timestamp and hostname in the testsuite tag.
+
+
+- :issue:`5707`: Time taken to run the test suite now includes a human-readable representation when it takes over
+ 60 seconds, for example::
+
+ ===== 2 failed in 102.70s (0:01:42) =====
+
+
+
+Bug Fixes
+---------
+
+- :issue:`4344`: Fix RuntimeError/StopIteration when trying to collect package with "__init__.py" only.
+
+
+- :issue:`5115`: Warnings issued during ``pytest_configure`` are explicitly not treated as errors, even if configured as such, because it otherwise completely breaks pytest.
+
+
+- :issue:`5477`: The XML file produced by ``--junitxml`` now correctly contain a ``<testsuites>`` root element.
+
+
+- :issue:`5524`: Fix issue where ``tmp_path`` and ``tmpdir`` would not remove directories containing files marked as read-only,
+ which could lead to pytest crashing when executed a second time with the ``--basetemp`` option.
+
+
+- :issue:`5537`: Replace ``importlib_metadata`` backport with ``importlib.metadata`` from the
+ standard library on Python 3.8+.
+
+
+- :issue:`5578`: Improve type checking for some exception-raising functions (``pytest.xfail``, ``pytest.skip``, etc)
+ so they provide better error messages when users meant to use marks (for example ``@pytest.xfail``
+ instead of ``@pytest.mark.xfail``).
+
+
+- :issue:`5606`: Fixed internal error when test functions were patched with objects that cannot be compared
+ for truth values against others, like ``numpy`` arrays.
+
+
+- :issue:`5634`: ``pytest.exit`` is now correctly handled in ``unittest`` cases.
+ This makes ``unittest`` cases handle ``quit`` from pytest's pdb correctly.
+
+
+- :issue:`5650`: Improved output when parsing an ini configuration file fails.
+
+
+- :issue:`5701`: Fix collection of ``staticmethod`` objects defined with ``functools.partial``.
+
+
+- :issue:`5734`: Skip async generator test functions, and update the warning message to refer to ``async def`` functions.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`5669`: Add docstring for ``Testdir.copy_example``.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`5095`: XML files of the ``xunit2`` family are now validated against the schema by pytest's own test suite
+ to avoid future regressions.
+
+
+- :issue:`5516`: Cache node splitting function which can improve collection performance in very large test suites.
+
+
+- :issue:`5603`: Simplified internal ``SafeRepr`` class and removed some dead code.
+
+
+- :issue:`5664`: When invoking pytest's own testsuite with ``PYTHONDONTWRITEBYTECODE=1``,
+ the ``test_xfail_handling`` test no longer fails.
+
+
+- :issue:`5684`: Replace manual handling of ``OSError.errno`` in the codebase by new ``OSError`` subclasses (``PermissionError``, ``FileNotFoundError``, etc.).
+
+
+pytest 5.0.1 (2019-07-04)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5479`: Improve quoting in ``raises`` match failure message.
+
+
+- :issue:`5523`: Fixed using multiple short options together in the command-line (for example ``-vs``) in Python 3.8+.
+
+
+- :issue:`5547`: ``--step-wise`` now handles ``xfail(strict=True)`` markers properly.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`5517`: Improve "Declaring new hooks" section in chapter "Writing Plugins"
+
+
+pytest 5.0.0 (2019-06-28)
+=========================
+
+Important
+---------
+
+This release is a Python3.5+ only release.
+
+For more details, see our :std:doc:`Python 2.7 and 3.4 support plan <py27-py34-deprecation>`.
+
+Removals
+--------
+
+- :issue:`1149`: Pytest no longer accepts prefixes of command-line arguments, for example
+ typing ``pytest --doctest-mod`` inplace of ``--doctest-modules``.
+ This was previously allowed where the ``ArgumentParser`` thought it was unambiguous,
+ but this could be incorrect due to delayed parsing of options for plugins.
+ See for example issues :issue:`1149`,
+ :issue:`3413`, and
+ :issue:`4009`.
+
+
+- :issue:`5402`: **PytestDeprecationWarning are now errors by default.**
+
+ Following our plan to remove deprecated features with as little disruption as
+ possible, all warnings of type ``PytestDeprecationWarning`` now generate errors
+ instead of warning messages.
+
+ **The affected features will be effectively removed in pytest 5.1**, so please consult the
+ :std:doc:`deprecations` section in the docs for directions on how to update existing code.
+
+ In the pytest ``5.0.X`` series, it is possible to change the errors back into warnings as a stop
+ gap measure by adding this to your ``pytest.ini`` file:
+
+ .. code-block:: ini
+
+ [pytest]
+ filterwarnings =
+ ignore::pytest.PytestDeprecationWarning
+
+ But this will stop working when pytest ``5.1`` is released.
+
+ **If you have concerns** about the removal of a specific feature, please add a
+ comment to :issue:`5402`.
+
+
+- :issue:`5412`: ``ExceptionInfo`` objects (returned by ``pytest.raises``) now have the same ``str`` representation as ``repr``, which
+ avoids some confusion when users use ``print(e)`` to inspect the object.
+
+ This means code like:
+
+ .. code-block:: python
+
+ with pytest.raises(SomeException) as e:
+ ...
+ assert "some message" in str(e)
+
+
+ Needs to be changed to:
+
+ .. code-block:: python
+
+ with pytest.raises(SomeException) as e:
+ ...
+ assert "some message" in str(e.value)
+
+
+
+
+Deprecations
+------------
+
+- :issue:`4488`: The removal of the ``--result-log`` option and module has been postponed to (tentatively) pytest 6.0 as
+ the team has not yet got around to implement a good alternative for it.
+
+
+- :issue:`466`: The ``funcargnames`` attribute has been an alias for ``fixturenames`` since
+ pytest 2.3, and is now deprecated in code too.
+
+
+
+Features
+--------
+
+- :issue:`3457`: New :hook:`pytest_assertion_pass`
+ hook, called with context information when an assertion *passes*.
+
+ This hook is still **experimental** so use it with caution.
+
+
+- :issue:`5440`: The :mod:`faulthandler` standard library
+ module is now enabled by default to help users diagnose crashes in C modules.
+
+ This functionality was provided by integrating the external
+ `pytest-faulthandler <https://github.com/pytest-dev/pytest-faulthandler>`__ plugin into the core,
+ so users should remove that plugin from their requirements if used.
+
+ For more information see the docs: :ref:`faulthandler`.
+
+
+- :issue:`5452`: When warnings are configured as errors, pytest warnings now appear as originating from ``pytest.`` instead of the internal ``_pytest.warning_types.`` module.
+
+
+- :issue:`5125`: ``Session.exitcode`` values are now coded in ``pytest.ExitCode``, an ``IntEnum``. This makes the exit code available for consumer code and are more explicit other than just documentation. User defined exit codes are still valid, but should be used with caution.
+
+ The team doesn't expect this change to break test suites or plugins in general, except in esoteric/specific scenarios.
+
+ **pytest-xdist** users should upgrade to ``1.29.0`` or later, as ``pytest-xdist`` required a compatibility fix because of this change.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`1403`: Switch from ``imp`` to ``importlib``.
+
+
+- :issue:`1671`: The name of the ``.pyc`` files cached by the assertion writer now includes the pytest version
+ to avoid stale caches.
+
+
+- :issue:`2761`: Honor :pep:`235` on case-insensitive file systems.
+
+
+- :issue:`5078`: Test module is no longer double-imported when using ``--pyargs``.
+
+
+- :issue:`5260`: Improved comparison of byte strings.
+
+ When comparing bytes, the assertion message used to show the byte numeric value when showing the differences::
+
+ def test():
+ > assert b'spam' == b'eggs'
+ E AssertionError: assert b'spam' == b'eggs'
+ E At index 0 diff: 115 != 101
+ E Use -v to get the full diff
+
+ It now shows the actual ascii representation instead, which is often more useful::
+
+ def test():
+ > assert b'spam' == b'eggs'
+ E AssertionError: assert b'spam' == b'eggs'
+ E At index 0 diff: b's' != b'e'
+ E Use -v to get the full diff
+
+
+- :issue:`5335`: Colorize level names when the level in the logging format is formatted using
+ '%(levelname).Xs' (truncated fixed width alignment), where X is an integer.
+
+
+- :issue:`5354`: Fix ``pytest.mark.parametrize`` when the argvalues is an iterator.
+
+
+- :issue:`5370`: Revert unrolling of ``all()`` to fix ``NameError`` on nested comprehensions.
+
+
+- :issue:`5371`: Revert unrolling of ``all()`` to fix incorrect handling of generators with ``if``.
+
+
+- :issue:`5372`: Revert unrolling of ``all()`` to fix incorrect assertion when using ``all()`` in an expression.
+
+
+- :issue:`5383`: ``-q`` has again an impact on the style of the collected items
+ (``--collect-only``) when ``--log-cli-level`` is used.
+
+
+- :issue:`5389`: Fix regressions of :pull:`5063` for ``importlib_metadata.PathDistribution`` which have their ``files`` attribute being ``None``.
+
+
+- :issue:`5390`: Fix regression where the ``obj`` attribute of ``TestCase`` items was no longer bound to methods.
+
+
+- :issue:`5404`: Emit a warning when attempting to unwrap a broken object raises an exception,
+ for easier debugging (:issue:`5080`).
+
+
+- :issue:`5432`: Prevent "already imported" warnings from assertion rewriter when invoking pytest in-process multiple times.
+
+
+- :issue:`5433`: Fix assertion rewriting in packages (``__init__.py``).
+
+
+- :issue:`5444`: Fix ``--stepwise`` mode when the first file passed on the command-line fails to collect.
+
+
+- :issue:`5482`: Fix bug introduced in 4.6.0 causing collection errors when passing
+ more than 2 positional arguments to ``pytest.mark.parametrize``.
+
+
+- :issue:`5505`: Fix crash when discovery fails while using ``-p no:terminal``.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`5315`: Expand docs on mocking classes and dictionaries with ``monkeypatch``.
+
+
+- :issue:`5416`: Fix PytestUnknownMarkWarning in run/skip example.
+
+
+pytest 4.6.11 (2020-06-04)
+==========================
+
+Bug Fixes
+---------
+
+- :issue:`6334`: Fix summary entries appearing twice when ``f/F`` and ``s/S`` report chars were used at the same time in the ``-r`` command-line option (for example ``-rFf``).
+
+ The upper case variants were never documented and the preferred form should be the lower case.
+
+
+- :issue:`7310`: Fix ``UnboundLocalError: local variable 'letter' referenced before
+ assignment`` in ``_pytest.terminal.pytest_report_teststatus()``
+ when plugins return report objects in an unconventional state.
+
+ This was making ``pytest_report_teststatus()`` skip
+ entering if-block branches that declare the ``letter`` variable.
+
+ The fix was to set the initial value of the ``letter`` before
+ the if-block cascade so that it always has a value.
+
+
+pytest 4.6.10 (2020-05-08)
+==========================
+
+Features
+--------
+
+- :issue:`6870`: New ``Config.invocation_args`` attribute containing the unchanged arguments passed to ``pytest.main()``.
+
+ Remark: while this is technically a new feature and according to our :ref:`policy <what goes into 4.6.x releases>` it should not have been backported, we have opened an exception in this particular case because it fixes a serious interaction with ``pytest-xdist``, so it can also be considered a bugfix.
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`6404`: Remove usage of ``parser`` module, deprecated in Python 3.9.
+
+
+pytest 4.6.9 (2020-01-04)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`6301`: Fix assertion rewriting for egg-based distributions and ``editable`` installs (``pip install --editable``).
+
+
+pytest 4.6.8 (2019-12-19)
+=========================
+
+Features
+--------
+
+- :issue:`5471`: JUnit XML now includes a timestamp and hostname in the testsuite tag.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`5430`: junitxml: Logs for failed test are now passed to junit report in case the test fails during call phase.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`6345`: Pin ``colorama`` to ``0.4.1`` only for Python 3.4 so newer Python versions can still receive colorama updates.
+
+
+pytest 4.6.7 (2019-12-05)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5477`: The XML file produced by ``--junitxml`` now correctly contain a ``<testsuites>`` root element.
+
+
+- :issue:`6044`: Properly ignore ``FileNotFoundError`` (``OSError.errno == NOENT`` in Python 2) exceptions when trying to remove old temporary directories,
+ for instance when multiple processes try to remove the same directory (common with ``pytest-xdist``
+ for example).
+
+
+pytest 4.6.6 (2019-10-11)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5523`: Fixed using multiple short options together in the command-line (for example ``-vs``) in Python 3.8+.
+
+
+- :issue:`5537`: Replace ``importlib_metadata`` backport with ``importlib.metadata`` from the
+ standard library on Python 3.8+.
+
+
+- :issue:`5806`: Fix "lexer" being used when uploading to bpaste.net from ``--pastebin`` to "text".
+
+
+- :issue:`5902`: Fix warnings about deprecated ``cmp`` attribute in ``attrs>=19.2``.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`5801`: Fixes python version checks (detected by ``flake8-2020``) in case python4 becomes a thing.
+
+
+pytest 4.6.5 (2019-08-05)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`4344`: Fix RuntimeError/StopIteration when trying to collect package with "__init__.py" only.
+
+
+- :issue:`5478`: Fix encode error when using unicode strings in exceptions with ``pytest.raises``.
+
+
+- :issue:`5524`: Fix issue where ``tmp_path`` and ``tmpdir`` would not remove directories containing files marked as read-only,
+ which could lead to pytest crashing when executed a second time with the ``--basetemp`` option.
+
+
+- :issue:`5547`: ``--step-wise`` now handles ``xfail(strict=True)`` markers properly.
+
+
+- :issue:`5650`: Improved output when parsing an ini configuration file fails.
+
+pytest 4.6.4 (2019-06-28)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5404`: Emit a warning when attempting to unwrap a broken object raises an exception,
+ for easier debugging (:issue:`5080`).
+
+
+- :issue:`5444`: Fix ``--stepwise`` mode when the first file passed on the command-line fails to collect.
+
+
+- :issue:`5482`: Fix bug introduced in 4.6.0 causing collection errors when passing
+ more than 2 positional arguments to ``pytest.mark.parametrize``.
+
+
+- :issue:`5505`: Fix crash when discovery fails while using ``-p no:terminal``.
+
+
+pytest 4.6.3 (2019-06-11)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5383`: ``-q`` has again an impact on the style of the collected items
+ (``--collect-only``) when ``--log-cli-level`` is used.
+
+
+- :issue:`5389`: Fix regressions of :pull:`5063` for ``importlib_metadata.PathDistribution`` which have their ``files`` attribute being ``None``.
+
+
+- :issue:`5390`: Fix regression where the ``obj`` attribute of ``TestCase`` items was no longer bound to methods.
+
+
+pytest 4.6.2 (2019-06-03)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5370`: Revert unrolling of ``all()`` to fix ``NameError`` on nested comprehensions.
+
+
+- :issue:`5371`: Revert unrolling of ``all()`` to fix incorrect handling of generators with ``if``.
+
+
+- :issue:`5372`: Revert unrolling of ``all()`` to fix incorrect assertion when using ``all()`` in an expression.
+
+
+pytest 4.6.1 (2019-06-02)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5354`: Fix ``pytest.mark.parametrize`` when the argvalues is an iterator.
+
+
+- :issue:`5358`: Fix assertion rewriting of ``all()`` calls to deal with non-generators.
+
+
+pytest 4.6.0 (2019-05-31)
+=========================
+
+Important
+---------
+
+The ``4.6.X`` series will be the last series to support **Python 2 and Python 3.4**.
+
+For more details, see our :std:doc:`Python 2.7 and 3.4 support plan <py27-py34-deprecation>`.
+
+
+Features
+--------
+
+- :issue:`4559`: Added the ``junit_log_passing_tests`` ini value which can be used to enable or disable logging of passing test output in the Junit XML file.
+
+
+- :issue:`4956`: pytester's ``testdir.spawn`` uses ``tmpdir`` as HOME/USERPROFILE directory.
+
+
+- :issue:`5062`: Unroll calls to ``all`` to full for-loops with assertion rewriting for better failure messages, especially when using Generator Expressions.
+
+
+- :issue:`5063`: Switch from ``pkg_resources`` to ``importlib-metadata`` for entrypoint detection for improved performance and import time.
+
+
+- :issue:`5091`: The output for ini options in ``--help`` has been improved.
+
+
+- :issue:`5269`: ``pytest.importorskip`` includes the ``ImportError`` now in the default ``reason``.
+
+
+- :issue:`5311`: Captured logs that are output for each failing test are formatted using the
+ ColoredLevelFormatter.
+
+
+- :issue:`5312`: Improved formatting of multiline log messages in Python 3.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`2064`: The debugging plugin imports the wrapped ``Pdb`` class (``--pdbcls``) on-demand now.
+
+
+- :issue:`4908`: The ``pytest_enter_pdb`` hook gets called with post-mortem (``--pdb``).
+
+
+- :issue:`5036`: Fix issue where fixtures dependent on other parametrized fixtures would be erroneously parametrized.
+
+
+- :issue:`5256`: Handle internal error due to a lone surrogate unicode character not being representable in Jython.
+
+
+- :issue:`5257`: Ensure that ``sys.stdout.mode`` does not include ``'b'`` as it is a text stream.
+
+
+- :issue:`5278`: Pytest's internal python plugin can be disabled using ``-p no:python`` again.
+
+
+- :issue:`5286`: Fix issue with ``disable_test_id_escaping_and_forfeit_all_rights_to_community_support`` option not working when using a list of test IDs in parametrized tests.
+
+
+- :issue:`5330`: Show the test module being collected when emitting ``PytestCollectionWarning`` messages for
+ test classes with ``__init__`` and ``__new__`` methods to make it easier to pin down the problem.
+
+
+- :issue:`5333`: Fix regression in 4.5.0 with ``--lf`` not re-running all tests with known failures from non-selected tests.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`5250`: Expand docs on use of ``setenv`` and ``delenv`` with ``monkeypatch``.
+
+
+pytest 4.5.0 (2019-05-11)
+=========================
+
+Features
+--------
+
+- :issue:`4826`: A warning is now emitted when unknown marks are used as a decorator.
+ This is often due to a typo, which can lead to silently broken tests.
+
+
+- :issue:`4907`: Show XFail reason as part of JUnitXML message field.
+
+
+- :issue:`5013`: Messages from crash reports are displayed within test summaries now, truncated to the terminal width.
+
+
+- :issue:`5023`: New flag ``--strict-markers`` that triggers an error when unknown markers (e.g. those not registered using the :confval:`markers` option in the configuration file) are used in the test suite.
+
+ The existing ``--strict`` option has the same behavior currently, but can be augmented in the future for additional checks.
+
+
+- :issue:`5026`: Assertion failure messages for sequences and dicts contain the number of different items now.
+
+
+- :issue:`5034`: Improve reporting with ``--lf`` and ``--ff`` (run-last-failure).
+
+
+- :issue:`5035`: The ``--cache-show`` option/action accepts an optional glob to show only matching cache entries.
+
+
+- :issue:`5059`: Standard input (stdin) can be given to pytester's ``Testdir.run()`` and ``Testdir.popen()``.
+
+
+- :issue:`5068`: The ``-r`` option learnt about ``A`` to display all reports (including passed ones) in the short test summary.
+
+
+- :issue:`5108`: The short test summary is displayed after passes with output (``-rP``).
+
+
+- :issue:`5172`: The ``--last-failed`` (``--lf``) option got smarter and will now skip entire files if all tests
+ of that test file have passed in previous runs, greatly speeding up collection.
+
+
+- :issue:`5177`: Introduce new specific warning ``PytestWarning`` subclasses to make it easier to filter warnings based on the class, rather than on the message. The new subclasses are:
+
+
+ * ``PytestAssertRewriteWarning``
+
+ * ``PytestCacheWarning``
+
+ * ``PytestCollectionWarning``
+
+ * ``PytestConfigWarning``
+
+ * ``PytestUnhandledCoroutineWarning``
+
+ * ``PytestUnknownMarkWarning``
+
+
+- :issue:`5202`: New ``record_testsuite_property`` session-scoped fixture allows users to log ``<property>`` tags at the ``testsuite``
+ level with the ``junitxml`` plugin.
+
+ The generated XML is compatible with the latest xunit standard, contrary to
+ the properties recorded by ``record_property`` and ``record_xml_attribute``.
+
+
+- :issue:`5214`: The default logging format has been changed to improve readability. Here is an
+ example of a previous logging message::
+
+ test_log_cli_enabled_disabled.py 3 CRITICAL critical message logged by test
+
+ This has now become::
+
+ CRITICAL root:test_log_cli_enabled_disabled.py:3 critical message logged by test
+
+ The formatting can be changed through the :confval:`log_format` configuration option.
+
+
+- :issue:`5220`: ``--fixtures`` now also shows fixture scope for scopes other than ``"function"``.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`5113`: Deselected items from plugins using ``pytest_collect_modifyitems`` as a hookwrapper are correctly reported now.
+
+
+- :issue:`5144`: With usage errors ``exitstatus`` is set to ``EXIT_USAGEERROR`` in the ``pytest_sessionfinish`` hook now as expected.
+
+
+- :issue:`5235`: ``outcome.exit`` is not used with ``EOF`` in the pdb wrapper anymore, but only with ``quit``.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`4935`: Expand docs on registering marks and the effect of ``--strict``.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4942`: ``logging.raiseExceptions`` is not set to ``False`` anymore.
+
+
+- :issue:`5013`: pytest now depends on :pypi:`wcwidth` to properly track unicode character sizes for more precise terminal output.
+
+
+- :issue:`5059`: pytester's ``Testdir.popen()`` uses ``stdout`` and ``stderr`` via keyword arguments with defaults now (``subprocess.PIPE``).
+
+
+- :issue:`5069`: The code for the short test summary in the terminal was moved to the terminal plugin.
+
+
+- :issue:`5082`: Improved validation of kwargs for various methods in the pytester plugin.
+
+
+- :issue:`5202`: ``record_property`` now emits a ``PytestWarning`` when used with ``junit_family=xunit2``: the fixture generates
+ ``property`` tags as children of ``testcase``, which is not permitted according to the most
+ `recent schema <https://github.com/jenkinsci/xunit-plugin/blob/master/
+ src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd>`__.
+
+
+- :issue:`5239`: Pin ``pluggy`` to ``< 1.0`` so we don't update to ``1.0`` automatically when
+ it gets released: there are planned breaking changes, and we want to ensure
+ pytest properly supports ``pluggy 1.0``.
+
+
+pytest 4.4.2 (2019-05-08)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5089`: Fix crash caused by error in ``__repr__`` function with both ``showlocals`` and verbose output enabled.
+
+
+- :issue:`5139`: Eliminate core dependency on 'terminal' plugin.
+
+
+- :issue:`5229`: Require ``pluggy>=0.11.0`` which reverts a dependency to ``importlib-metadata`` added in ``0.10.0``.
+ The ``importlib-metadata`` package cannot be imported when installed as an egg and causes issues when relying on ``setup.py`` to install test dependencies.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`5171`: Doc: ``pytest_ignore_collect``, ``pytest_collect_directory``, ``pytest_collect_file`` and ``pytest_pycollect_makemodule`` hooks's 'path' parameter documented type is now ``py.path.local``
+
+
+- :issue:`5188`: Improve help for ``--runxfail`` flag.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`5182`: Removed internal and unused ``_pytest.deprecated.MARK_INFO_ATTRIBUTE``.
+
+
+pytest 4.4.1 (2019-04-15)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`5031`: Environment variables are properly restored when using pytester's ``testdir`` fixture.
+
+
+- :issue:`5039`: Fix regression with ``--pdbcls``, which stopped working with local modules in 4.0.0.
+
+
+- :issue:`5092`: Produce a warning when unknown keywords are passed to ``pytest.param(...)``.
+
+
+- :issue:`5098`: Invalidate import caches with ``monkeypatch.syspath_prepend``, which is required with namespace packages being used.
+
+
+pytest 4.4.0 (2019-03-29)
+=========================
+
+Features
+--------
+
+- :issue:`2224`: ``async`` test functions are skipped and a warning is emitted when a suitable
+ async plugin is not installed (such as ``pytest-asyncio`` or ``pytest-trio``).
+
+ Previously ``async`` functions would not execute at all but still be marked as "passed".
+
+
+- :issue:`2482`: Include new ``disable_test_id_escaping_and_forfeit_all_rights_to_community_support`` option to disable ascii-escaping in parametrized values. This may cause a series of problems and as the name makes clear, use at your own risk.
+
+
+- :issue:`4718`: The ``-p`` option can now be used to early-load plugins also by entry-point name, instead of just
+ by module name.
+
+ This makes it possible to early load external plugins like ``pytest-cov`` in the command-line::
+
+ pytest -p pytest_cov
+
+
+- :issue:`4855`: The ``--pdbcls`` option handles classes via module attributes now (e.g.
+ ``pdb:pdb.Pdb`` with :pypi:`pdbpp`), and its validation was improved.
+
+
+- :issue:`4875`: The :confval:`testpaths` configuration option is now displayed next
+ to the ``rootdir`` and ``inifile`` lines in the pytest header if the option is in effect, i.e., directories or file names were
+ not explicitly passed in the command line.
+
+ Also, ``inifile`` is only displayed if there's a configuration file, instead of an empty ``inifile:`` string.
+
+
+- :issue:`4911`: Doctests can be skipped now dynamically using ``pytest.skip()``.
+
+
+- :issue:`4920`: Internal refactorings have been made in order to make the implementation of the
+ `pytest-subtests <https://github.com/pytest-dev/pytest-subtests>`__ plugin
+ possible, which adds unittest sub-test support and a new ``subtests`` fixture as discussed in
+ :issue:`1367`.
+
+ For details on the internal refactorings, please see the details on the related PR.
+
+
+- :issue:`4931`: pytester's ``LineMatcher`` asserts that the passed lines are a sequence.
+
+
+- :issue:`4936`: Handle ``-p plug`` after ``-p no:plug``.
+
+ This can be used to override a blocked plugin (e.g. in "addopts") from the
+ command line etc.
+
+
+- :issue:`4951`: Output capturing is handled correctly when only capturing via fixtures (capsys, capfs) with ``pdb.set_trace()``.
+
+
+- :issue:`4956`: ``pytester`` sets ``$HOME`` and ``$USERPROFILE`` to the temporary directory during test runs.
+
+ This ensures to not load configuration files from the real user's home directory.
+
+
+- :issue:`4980`: Namespace packages are handled better with ``monkeypatch.syspath_prepend`` and ``testdir.syspathinsert`` (via ``pkg_resources.fixup_namespace_packages``).
+
+
+- :issue:`4993`: The stepwise plugin reports status information now.
+
+
+- :issue:`5008`: If a ``setup.cfg`` file contains ``[tool:pytest]`` and also the no longer supported ``[pytest]`` section, pytest will use ``[tool:pytest]`` ignoring ``[pytest]``. Previously it would unconditionally error out.
+
+ This makes it simpler for plugins to support old pytest versions.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`1895`: Fix bug where fixtures requested dynamically via ``request.getfixturevalue()`` might be teardown
+ before the requesting fixture.
+
+
+- :issue:`4851`: pytester unsets ``PYTEST_ADDOPTS`` now to not use outer options with ``testdir.runpytest()``.
+
+
+- :issue:`4903`: Use the correct modified time for years after 2038 in rewritten ``.pyc`` files.
+
+
+- :issue:`4928`: Fix line offsets with ``ScopeMismatch`` errors.
+
+
+- :issue:`4957`: ``-p no:plugin`` is handled correctly for default (internal) plugins now, e.g. with ``-p no:capture``.
+
+ Previously they were loaded (imported) always, making e.g. the ``capfd`` fixture available.
+
+
+- :issue:`4968`: The pdb ``quit`` command is handled properly when used after the ``debug`` command with :pypi:`pdbpp`.
+
+
+- :issue:`4975`: Fix the interpretation of ``-qq`` option where it was being considered as ``-v`` instead.
+
+
+- :issue:`4978`: ``outcomes.Exit`` is not swallowed in ``assertrepr_compare`` anymore.
+
+
+- :issue:`4988`: Close logging's file handler explicitly when the session finishes.
+
+
+- :issue:`5003`: Fix line offset with mark collection error (off by one).
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`4974`: Update docs for ``pytest_cmdline_parse`` hook to note availability liminations
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4718`: ``pluggy>=0.9`` is now required.
+
+
+- :issue:`4815`: ``funcsigs>=1.0`` is now required for Python 2.7.
+
+
+- :issue:`4829`: Some left-over internal code related to ``yield`` tests has been removed.
+
+
+- :issue:`4890`: Remove internally unused ``anypython`` fixture from the pytester plugin.
+
+
+- :issue:`4912`: Remove deprecated Sphinx directive, ``add_description_unit()``,
+ pin sphinx-removed-in to >= 0.2.0 to support Sphinx 2.0.
+
+
+- :issue:`4913`: Fix pytest tests invocation with custom ``PYTHONPATH``.
+
+
+- :issue:`4965`: New ``pytest_report_to_serializable`` and ``pytest_report_from_serializable`` **experimental** hooks.
+
+ These hooks will be used by ``pytest-xdist``, ``pytest-subtests``, and the replacement for
+ resultlog to serialize and customize reports.
+
+ They are experimental, meaning that their details might change or even be removed
+ completely in future patch releases without warning.
+
+ Feedback is welcome from plugin authors and users alike.
+
+
+- :issue:`4987`: ``Collector.repr_failure`` respects the ``--tb`` option, but only defaults to ``short`` now (with ``auto``).
+
+
+pytest 4.3.1 (2019-03-11)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`4810`: Logging messages inside ``pytest_runtest_logreport()`` are now properly captured and displayed.
+
+
+- :issue:`4861`: Improve validation of contents written to captured output so it behaves the same as when capture is disabled.
+
+
+- :issue:`4898`: Fix ``AttributeError: FixtureRequest has no 'confg' attribute`` bug in ``testdir.copy_example``.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4768`: Avoid pkg_resources import at the top-level.
+
+
+pytest 4.3.0 (2019-02-16)
+=========================
+
+Deprecations
+------------
+
+- :issue:`4724`: ``pytest.warns()`` now emits a warning when it receives unknown keyword arguments.
+
+ This will be changed into an error in the future.
+
+
+
+Features
+--------
+
+- :issue:`2753`: Usage errors from argparse are mapped to pytest's ``UsageError``.
+
+
+- :issue:`3711`: Add the ``--ignore-glob`` parameter to exclude test-modules with Unix shell-style wildcards.
+ Add the :globalvar:`collect_ignore_glob` for ``conftest.py`` to exclude test-modules with Unix shell-style wildcards.
+
+
+- :issue:`4698`: The warning about Python 2.7 and 3.4 not being supported in pytest 5.0 has been removed.
+
+ In the end it was considered to be more
+ of a nuisance than actual utility and users of those Python versions shouldn't have problems as ``pip`` will not
+ install pytest 5.0 on those interpreters.
+
+
+- :issue:`4707`: With the help of new ``set_log_path()`` method there is a way to set ``log_file`` paths from hooks.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`4651`: ``--help`` and ``--version`` are handled with ``UsageError``.
+
+
+- :issue:`4782`: Fix ``AssertionError`` with collection of broken symlinks with packages.
+
+
+pytest 4.2.1 (2019-02-12)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`2895`: The ``pytest_report_collectionfinish`` hook now is also called with ``--collect-only``.
+
+
+- :issue:`3899`: Do not raise ``UsageError`` when an imported package has a ``pytest_plugins.py`` child module.
+
+
+- :issue:`4347`: Fix output capturing when using pdb++ with recursive debugging.
+
+
+- :issue:`4592`: Fix handling of ``collect_ignore`` via parent ``conftest.py``.
+
+
+- :issue:`4700`: Fix regression where ``setUpClass`` would always be called in subclasses even if all tests
+ were skipped by a ``unittest.skip()`` decorator applied in the subclass.
+
+
+- :issue:`4739`: Fix ``parametrize(... ids=<function>)`` when the function returns non-strings.
+
+
+- :issue:`4745`: Fix/improve collection of args when passing in ``__init__.py`` and a test file.
+
+
+- :issue:`4770`: ``more_itertools`` is now constrained to <6.0.0 when required for Python 2.7 compatibility.
+
+
+- :issue:`526`: Fix "ValueError: Plugin already registered" exceptions when running in build directories that symlink to actual source.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`3899`: Add note to ``plugins.rst`` that ``pytest_plugins`` should not be used as a name for a user module containing plugins.
+
+
+- :issue:`4324`: Document how to use ``raises`` and ``does_not_raise`` to write parametrized tests with conditional raises.
+
+
+- :issue:`4709`: Document how to customize test failure messages when using
+ ``pytest.warns``.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4741`: Some verbosity related attributes of the TerminalReporter plugin are now
+ read only properties.
+
+
+pytest 4.2.0 (2019-01-30)
+=========================
+
+Features
+--------
+
+- :issue:`3094`: :doc:`Classic xunit-style <how-to/xunit_setup>` functions and methods
+ now obey the scope of *autouse* fixtures.
+
+ This fixes a number of surprising issues like ``setup_method`` being called before session-scoped
+ autouse fixtures (see :issue:`517` for an example).
+
+
+- :issue:`4627`: Display a message at the end of the test session when running under Python 2.7 and 3.4 that pytest 5.0 will no longer
+ support those Python versions.
+
+
+- :issue:`4660`: The number of *selected* tests now are also displayed when the ``-k`` or ``-m`` flags are used.
+
+
+- :issue:`4688`: ``pytest_report_teststatus`` hook now can also receive a ``config`` parameter.
+
+
+- :issue:`4691`: ``pytest_terminal_summary`` hook now can also receive a ``config`` parameter.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`3547`: ``--junitxml`` can emit XML compatible with Jenkins xUnit.
+ ``junit_family`` INI option accepts ``legacy|xunit1``, which produces old style output, and ``xunit2`` that conforms more strictly to https://github.com/jenkinsci/xunit-plugin/blob/xunit-2.3.2/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd
+
+
+- :issue:`4280`: Improve quitting from pdb, especially with ``--trace``.
+
+ Using ``q[quit]`` after ``pdb.set_trace()`` will quit pytest also.
+
+
+- :issue:`4402`: Warning summary now groups warnings by message instead of by test id.
+
+ This makes the output more compact and better conveys the general idea of how much code is
+ actually generating warnings, instead of how many tests call that code.
+
+
+- :issue:`4536`: ``monkeypatch.delattr`` handles class descriptors like ``staticmethod``/``classmethod``.
+
+
+- :issue:`4649`: Restore marks being considered keywords for keyword expressions.
+
+
+- :issue:`4653`: ``tmp_path`` fixture and other related ones provides resolved path (a.k.a real path)
+
+
+- :issue:`4667`: ``pytest_terminal_summary`` uses result from ``pytest_report_teststatus`` hook, rather than hardcoded strings.
+
+
+- :issue:`4669`: Correctly handle ``unittest.SkipTest`` exception containing non-ascii characters on Python 2.
+
+
+- :issue:`4680`: Ensure the ``tmpdir`` and the ``tmp_path`` fixtures are the same folder.
+
+
+- :issue:`4681`: Ensure ``tmp_path`` is always a real path.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4643`: Use ``a.item()`` instead of the deprecated ``np.asscalar(a)`` in ``pytest.approx``.
+
+ ``np.asscalar`` has been :doc:`deprecated <numpy:release/1.16.0-notes>` in ``numpy 1.16.``.
+
+
+- :issue:`4657`: Copy saferepr from pylib
+
+
+- :issue:`4668`: The verbose word for expected failures in the teststatus report changes from ``xfail`` to ``XFAIL`` to be consistent with other test outcomes.
+
+
+pytest 4.1.1 (2019-01-12)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`2256`: Show full repr with ``assert a==b`` and ``-vv``.
+
+
+- :issue:`3456`: Extend Doctest-modules to ignore mock objects.
+
+
+- :issue:`4617`: Fixed ``pytest.warns`` bug when context manager is reused (e.g. multiple parametrization).
+
+
+- :issue:`4631`: Don't rewrite assertion when ``__getattr__`` is broken
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`3375`: Document that using ``setup.cfg`` may crash other tools or cause hard to track down problems because it uses a different parser than ``pytest.ini`` or ``tox.ini`` files.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4602`: Uninstall ``hypothesis`` in regen tox env.
+
+
+pytest 4.1.0 (2019-01-05)
+=========================
+
+Removals
+--------
+
+- :issue:`2169`: ``pytest.mark.parametrize``: in previous versions, errors raised by id functions were suppressed and changed into warnings. Now the exceptions are propagated, along with a pytest message informing the node, parameter value and index where the exception occurred.
+
+
+- :issue:`3078`: Remove legacy internal warnings system: ``config.warn``, ``Node.warn``. The ``pytest_logwarning`` now issues a warning when implemented.
+
+ See our :ref:`docs <config.warn and node.warn deprecated>` on information on how to update your code.
+
+
+- :issue:`3079`: Removed support for yield tests - they are fundamentally broken because they don't support fixtures properly since collection and test execution were separated.
+
+ See our :ref:`docs <yield tests deprecated>` on information on how to update your code.
+
+
+- :issue:`3082`: Removed support for applying marks directly to values in ``@pytest.mark.parametrize``. Use ``pytest.param`` instead.
+
+ See our :ref:`docs <marks in pytest.parametrize deprecated>` on information on how to update your code.
+
+
+- :issue:`3083`: Removed ``Metafunc.addcall``. This was the predecessor mechanism to ``@pytest.mark.parametrize``.
+
+ See our :ref:`docs <metafunc.addcall deprecated>` on information on how to update your code.
+
+
+- :issue:`3085`: Removed support for passing strings to ``pytest.main``. Now, always pass a list of strings instead.
+
+ See our :ref:`docs <passing command-line string to pytest.main deprecated>` on information on how to update your code.
+
+
+- :issue:`3086`: ``[pytest]`` section in **setup.cfg** files is no longer supported, use ``[tool:pytest]`` instead. ``setup.cfg`` files
+ are meant for use with ``distutils``, and a section named ``pytest`` has notoriously been a source of conflicts and bugs.
+
+ Note that for **pytest.ini** and **tox.ini** files the section remains ``[pytest]``.
+
+
+- :issue:`3616`: Removed the deprecated compat properties for ``node.Class/Function/Module`` - use ``pytest.Class/Function/Module`` now.
+
+ See our :ref:`docs <internal classes accessed through node deprecated>` on information on how to update your code.
+
+
+- :issue:`4421`: Removed the implementation of the ``pytest_namespace`` hook.
+
+ See our :ref:`docs <pytest.namespace deprecated>` on information on how to update your code.
+
+
+- :issue:`4489`: Removed ``request.cached_setup``. This was the predecessor mechanism to modern fixtures.
+
+ See our :ref:`docs <cached_setup deprecated>` on information on how to update your code.
+
+
+- :issue:`4535`: Removed the deprecated ``PyCollector.makeitem`` method. This method was made public by mistake a long time ago.
+
+
+- :issue:`4543`: Removed support to define fixtures using the ``pytest_funcarg__`` prefix. Use the ``@pytest.fixture`` decorator instead.
+
+ See our :ref:`docs <pytest_funcarg__ prefix deprecated>` on information on how to update your code.
+
+
+- :issue:`4545`: Calling fixtures directly is now always an error instead of a warning.
+
+ See our :ref:`docs <calling fixtures directly deprecated>` on information on how to update your code.
+
+
+- :issue:`4546`: Remove ``Node.get_marker(name)`` the return value was not usable for more than a existence check.
+
+ Use ``Node.get_closest_marker(name)`` as a replacement.
+
+
+- :issue:`4547`: The deprecated ``record_xml_property`` fixture has been removed, use the more generic ``record_property`` instead.
+
+ See our :ref:`docs <record_xml_property deprecated>` for more information.
+
+
+- :issue:`4548`: An error is now raised if the ``pytest_plugins`` variable is defined in a non-top-level ``conftest.py`` file (i.e., not residing in the ``rootdir``).
+
+ See our :ref:`docs <pytest_plugins in non-top-level conftest files deprecated>` for more information.
+
+
+- :issue:`891`: Remove ``testfunction.markername`` attributes - use ``Node.iter_markers(name=None)`` to iterate them.
+
+
+
+Deprecations
+------------
+
+- :issue:`3050`: Deprecated the ``pytest.config`` global.
+
+ See :ref:`pytest.config global deprecated` for rationale.
+
+
+- :issue:`3974`: Passing the ``message`` parameter of ``pytest.raises`` now issues a ``DeprecationWarning``.
+
+ It is a common mistake to think this parameter will match the exception message, while in fact
+ it only serves to provide a custom message in case the ``pytest.raises`` check fails. To avoid this
+ mistake and because it is believed to be little used, pytest is deprecating it without providing
+ an alternative for the moment.
+
+ If you have concerns about this, please comment on :issue:`3974`.
+
+
+- :issue:`4435`: Deprecated ``raises(..., 'code(as_a_string)')`` and ``warns(..., 'code(as_a_string)')``.
+
+ See :std:ref:`raises-warns-exec` for rationale and examples.
+
+
+
+Features
+--------
+
+- :issue:`3191`: A warning is now issued when assertions are made for ``None``.
+
+ This is a common source of confusion among new users, which write:
+
+ .. code-block:: python
+
+ assert mocked_object.assert_called_with(3, 4, 5, key="value")
+
+ When they should write:
+
+ .. code-block:: python
+
+ mocked_object.assert_called_with(3, 4, 5, key="value")
+
+ Because the ``assert_called_with`` method of mock objects already executes an assertion.
+
+ This warning will not be issued when ``None`` is explicitly checked. An assertion like:
+
+ .. code-block:: python
+
+ assert variable is None
+
+ will not issue the warning.
+
+
+- :issue:`3632`: Richer equality comparison introspection on ``AssertionError`` for objects created using `attrs <https://www.attrs.org/en/stable/>`__ or :mod:`dataclasses` (Python 3.7+, :pypi:`backported to 3.6 <dataclasses>`).
+
+
+- :issue:`4278`: ``CACHEDIR.TAG`` files are now created inside cache directories.
+
+ Those files are part of the `Cache Directory Tagging Standard <https://bford.info/cachedir/spec.html>`__, and can
+ be used by backup or synchronization programs to identify pytest's cache directory as such.
+
+
+- :issue:`4292`: ``pytest.outcomes.Exit`` is derived from ``SystemExit`` instead of ``KeyboardInterrupt``. This allows us to better handle ``pdb`` exiting.
+
+
+- :issue:`4371`: Updated the ``--collect-only`` option to display test descriptions when ran using ``--verbose``.
+
+
+- :issue:`4386`: Restructured ``ExceptionInfo`` object construction and ensure incomplete instances have a ``repr``/``str``.
+
+
+- :issue:`4416`: pdb: added support for keyword arguments with ``pdb.set_trace``.
+
+ It handles ``header`` similar to Python 3.7 does it, and forwards any
+ other keyword arguments to the ``Pdb`` constructor.
+
+ This allows for ``__import__("pdb").set_trace(skip=["foo.*"])``.
+
+
+- :issue:`4483`: Added ini parameter ``junit_duration_report`` to optionally report test call durations, excluding setup and teardown times.
+
+ The JUnit XML specification and the default pytest behavior is to include setup and teardown times in the test duration
+ report. You can include just the call durations instead (excluding setup and teardown) by adding this to your ``pytest.ini`` file:
+
+ .. code-block:: ini
+
+ [pytest]
+ junit_duration_report = call
+
+
+- :issue:`4532`: ``-ra`` now will show errors and failures last, instead of as the first items in the summary.
+
+ This makes it easier to obtain a list of errors and failures to run tests selectively.
+
+
+- :issue:`4599`: ``pytest.importorskip`` now supports a ``reason`` parameter, which will be shown when the
+ requested module cannot be imported.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`3532`: ``-p`` now accepts its argument without a space between the value, for example ``-pmyplugin``.
+
+
+- :issue:`4327`: ``approx`` again works with more generic containers, more precisely instances of ``Iterable`` and ``Sized`` instead of more restrictive ``Sequence``.
+
+
+- :issue:`4397`: Ensure that node ids are printable.
+
+
+- :issue:`4435`: Fixed ``raises(..., 'code(string)')`` frame filename.
+
+
+- :issue:`4458`: Display actual test ids in ``--collect-only``.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`4557`: Markers example documentation page updated to support latest pytest version.
+
+
+- :issue:`4558`: Update cache documentation example to correctly show cache hit and miss.
+
+
+- :issue:`4580`: Improved detailed summary report documentation.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4447`: Changed the deprecation type of ``--result-log`` to ``PytestDeprecationWarning``.
+
+ It was decided to remove this feature at the next major revision.
+
+
+pytest 4.0.2 (2018-12-13)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`4265`: Validate arguments from the ``PYTEST_ADDOPTS`` environment variable and the ``addopts`` ini option separately.
+
+
+- :issue:`4435`: Fix ``raises(..., 'code(string)')`` frame filename.
+
+
+- :issue:`4500`: When a fixture yields and a log call is made after the test runs, and, if the test is interrupted, capture attributes are ``None``.
+
+
+- :issue:`4538`: Raise ``TypeError`` for ``with raises(..., match=<non-None falsey value>)``.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`1495`: Document common doctest fixture directory tree structure pitfalls
+
+
+pytest 4.0.1 (2018-11-23)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`3952`: Display warnings before "short test summary info" again, but still later warnings in the end.
+
+
+- :issue:`4386`: Handle uninitialized exceptioninfo in repr/str.
+
+
+- :issue:`4393`: Do not create ``.gitignore``/``README.md`` files in existing cache directories.
+
+
+- :issue:`4400`: Rearrange warning handling for the yield test errors so the opt-out in 4.0.x correctly works.
+
+
+- :issue:`4405`: Fix collection of testpaths with ``--pyargs``.
+
+
+- :issue:`4412`: Fix assertion rewriting involving ``Starred`` + side-effects.
+
+
+- :issue:`4425`: Ensure we resolve the absolute path when the given ``--basetemp`` is a relative path.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4315`: Use ``pkg_resources.parse_version`` instead of ``LooseVersion`` in minversion check.
+
+
+- :issue:`4440`: Adjust the stack level of some internal pytest warnings.
+
+
+pytest 4.0.0 (2018-11-13)
+=========================
+
+Removals
+--------
+
+- :issue:`3737`: **RemovedInPytest4Warnings are now errors by default.**
+
+ Following our plan to remove deprecated features with as little disruption as
+ possible, all warnings of type ``RemovedInPytest4Warnings`` now generate errors
+ instead of warning messages.
+
+ **The affected features will be effectively removed in pytest 4.1**, so please consult the
+ :std:doc:`deprecations` section in the docs for directions on how to update existing code.
+
+ In the pytest ``4.0.X`` series, it is possible to change the errors back into warnings as a stop
+ gap measure by adding this to your ``pytest.ini`` file:
+
+ .. code-block:: ini
+
+ [pytest]
+ filterwarnings =
+ ignore::pytest.RemovedInPytest4Warning
+
+ But this will stop working when pytest ``4.1`` is released.
+
+ **If you have concerns** about the removal of a specific feature, please add a
+ comment to :issue:`4348`.
+
+
+- :issue:`4358`: Remove the ``::()`` notation to denote a test class instance in node ids.
+
+ Previously, node ids that contain test instances would use ``::()`` to denote the instance like this::
+
+ test_foo.py::Test::()::test_bar
+
+ The extra ``::()`` was puzzling to most users and has been removed, so that the test id becomes now::
+
+ test_foo.py::Test::test_bar
+
+ This change could not accompany a deprecation period as is usual when user-facing functionality changes because
+ it was not really possible to detect when the functionality was being used explicitly.
+
+ The extra ``::()`` might have been removed in some places internally already,
+ which then led to confusion in places where it was expected, e.g. with
+ ``--deselect`` (:issue:`4127`).
+
+ Test class instances are also not listed with ``--collect-only`` anymore.
+
+
+
+Features
+--------
+
+- :issue:`4270`: The ``cache_dir`` option uses ``$TOX_ENV_DIR`` as prefix (if set in the environment).
+
+ This uses a different cache per tox environment by default.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`3554`: Fix ``CallInfo.__repr__`` for when the call is not finished yet.
+
+
+pytest 3.10.1 (2018-11-11)
+==========================
+
+Bug Fixes
+---------
+
+- :issue:`4287`: Fix nested usage of debugging plugin (pdb), e.g. with pytester's ``testdir.runpytest``.
+
+
+- :issue:`4304`: Block the ``stepwise`` plugin if ``cacheprovider`` is also blocked, as one depends on the other.
+
+
+- :issue:`4306`: Parse ``minversion`` as an actual version and not as dot-separated strings.
+
+
+- :issue:`4310`: Fix duplicate collection due to multiple args matching the same packages.
+
+
+- :issue:`4321`: Fix ``item.nodeid`` with resolved symlinks.
+
+
+- :issue:`4325`: Fix collection of direct symlinked files, where the target does not match ``python_files``.
+
+
+- :issue:`4329`: Fix TypeError in report_collect with _collect_report_last_write.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4305`: Replace byte/unicode helpers in test_capture with python level syntax.
+
+
+pytest 3.10.0 (2018-11-03)
+==========================
+
+Features
+--------
+
+- :issue:`2619`: Resume capturing output after ``continue`` with ``__import__("pdb").set_trace()``.
+
+ This also adds a new ``pytest_leave_pdb`` hook, and passes in ``pdb`` to the
+ existing ``pytest_enter_pdb`` hook.
+
+
+- :issue:`4147`: Add ``--sw``, ``--stepwise`` as an alternative to ``--lf -x`` for stopping at the first failure, but starting the next test invocation from that test. See :ref:`the documentation <cache stepwise>` for more info.
+
+
+- :issue:`4188`: Make ``--color`` emit colorful dots when not running in verbose mode. Earlier, it would only colorize the test-by-test output if ``--verbose`` was also passed.
+
+
+- :issue:`4225`: Improve performance with collection reporting in non-quiet mode with terminals.
+
+ The "collecting …" message is only printed/updated every 0.5s.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`2701`: Fix false ``RemovedInPytest4Warning: usage of Session... is deprecated, please use pytest`` warnings.
+
+
+- :issue:`4046`: Fix problems with running tests in package ``__init__.py`` files.
+
+
+- :issue:`4260`: Swallow warnings during anonymous compilation of source.
+
+
+- :issue:`4262`: Fix access denied error when deleting stale directories created by ``tmpdir`` / ``tmp_path``.
+
+
+- :issue:`611`: Naming a fixture ``request`` will now raise a warning: the ``request`` fixture is internal and
+ should not be overwritten as it will lead to internal errors.
+
+- :issue:`4266`: Handle (ignore) exceptions raised during collection, e.g. with Django's LazySettings proxy class.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`4255`: Added missing documentation about the fact that module names passed to filter warnings are not regex-escaped.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4272`: Display cachedir also in non-verbose mode if non-default.
+
+
+- :issue:`4277`: pdb: improve message about output capturing with ``set_trace``.
+
+ Do not display "IO-capturing turned off/on" when ``-s`` is used to avoid
+ confusion.
+
+
+- :issue:`4279`: Improve message and stack level of warnings issued by ``monkeypatch.setenv`` when the value of the environment variable is not a ``str``.
+
+
+pytest 3.9.3 (2018-10-27)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`4174`: Fix "ValueError: Plugin already registered" with conftest plugins via symlink.
+
+
+- :issue:`4181`: Handle race condition between creation and deletion of temporary folders.
+
+
+- :issue:`4221`: Fix bug where the warning summary at the end of the test session was not showing the test where the warning was originated.
+
+
+- :issue:`4243`: Fix regression when ``stacklevel`` for warnings was passed as positional argument on python2.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`3851`: Add reference to ``empty_parameter_set_mark`` ini option in documentation of ``@pytest.mark.parametrize``
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`4028`: Revert patching of ``sys.breakpointhook`` since it appears to do nothing.
+
+
+- :issue:`4233`: Apply an import sorter (``reorder-python-imports``) to the codebase.
+
+
+- :issue:`4248`: Remove use of unnecessary compat shim, six.binary_type
+
+
+pytest 3.9.2 (2018-10-22)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`2909`: Improve error message when a recursive dependency between fixtures is detected.
+
+
+- :issue:`3340`: Fix logging messages not shown in hooks ``pytest_sessionstart()`` and ``pytest_sessionfinish()``.
+
+
+- :issue:`3533`: Fix unescaped XML raw objects in JUnit report for skipped tests
+
+
+- :issue:`3691`: Python 2: safely format warning message about passing unicode strings to ``warnings.warn``, which may cause
+ surprising ``MemoryError`` exception when monkey patching ``warnings.warn`` itself.
+
+
+- :issue:`4026`: Improve error message when it is not possible to determine a function's signature.
+
+
+- :issue:`4177`: Pin ``setuptools>=40.0`` to support ``py_modules`` in ``setup.cfg``
+
+
+- :issue:`4179`: Restore the tmpdir behaviour of symlinking the current test run.
+
+
+- :issue:`4192`: Fix filename reported by ``warnings.warn`` when using ``recwarn`` under python2.
+
+
+pytest 3.9.1 (2018-10-16)
+=========================
+
+Features
+--------
+
+- :issue:`4159`: For test-suites containing test classes, the information about the subclassed
+ module is now output only if a higher verbosity level is specified (at least
+ "-vv").
+
+
+pytest 3.9.0 (2018-10-15 - not published due to a release automation bug)
+=========================================================================
+
+Deprecations
+------------
+
+- :issue:`3616`: The following accesses have been documented as deprecated for years, but are now actually emitting deprecation warnings.
+
+ * Access of ``Module``, ``Function``, ``Class``, ``Instance``, ``File`` and ``Item`` through ``Node`` instances. Now
+ users will this warning::
+
+ usage of Function.Module is deprecated, please use pytest.Module instead
+
+ Users should just ``import pytest`` and access those objects using the ``pytest`` module.
+
+ * ``request.cached_setup``, this was the precursor of the setup/teardown mechanism available to fixtures. You can
+ consult :std:doc:`funcarg comparison section in the docs <funcarg_compare>`.
+
+ * Using objects named ``"Class"`` as a way to customize the type of nodes that are collected in ``Collector``
+ subclasses has been deprecated. Users instead should use ``pytest_collect_make_item`` to customize node types during
+ collection.
+
+ This issue should affect only advanced plugins who create new collection types, so if you see this warning
+ message please contact the authors so they can change the code.
+
+ * The warning that produces the message below has changed to ``RemovedInPytest4Warning``::
+
+ getfuncargvalue is deprecated, use getfixturevalue
+
+
+- :issue:`3988`: Add a Deprecation warning for pytest.ensuretemp as it was deprecated since a while.
+
+
+
+Features
+--------
+
+- :issue:`2293`: Improve usage errors messages by hiding internal details which can be distracting and noisy.
+
+ This has the side effect that some error conditions that previously raised generic errors (such as
+ ``ValueError`` for unregistered marks) are now raising ``Failed`` exceptions.
+
+
+- :issue:`3332`: Improve the error displayed when a ``conftest.py`` file could not be imported.
+
+ In order to implement this, a new ``chain`` parameter was added to ``ExceptionInfo.getrepr``
+ to show or hide chained tracebacks in Python 3 (defaults to ``True``).
+
+
+- :issue:`3849`: Add ``empty_parameter_set_mark=fail_at_collect`` ini option for raising an exception when parametrize collects an empty set.
+
+
+- :issue:`3964`: Log messages generated in the collection phase are shown when
+ live-logging is enabled and/or when they are logged to a file.
+
+
+- :issue:`3985`: Introduce ``tmp_path`` as a fixture providing a Path object. Also introduce ``tmp_path_factory`` as
+ a session-scoped fixture for creating arbitrary temporary directories from any other fixture or test.
+
+
+- :issue:`4013`: Deprecation warnings are now shown even if you customize the warnings filters yourself. In the previous version
+ any customization would override pytest's filters and deprecation warnings would fall back to being hidden by default.
+
+
+- :issue:`4073`: Allow specification of timeout for ``Testdir.runpytest_subprocess()`` and ``Testdir.run()``.
+
+
+- :issue:`4098`: Add returncode argument to pytest.exit() to exit pytest with a specific return code.
+
+
+- :issue:`4102`: Reimplement ``pytest.deprecated_call`` using ``pytest.warns`` so it supports the ``match='...'`` keyword argument.
+
+ This has the side effect that ``pytest.deprecated_call`` now raises ``pytest.fail.Exception`` instead
+ of ``AssertionError``.
+
+
+- :issue:`4149`: Require setuptools>=30.3 and move most of the metadata to ``setup.cfg``.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`2535`: Improve error message when test functions of ``unittest.TestCase`` subclasses use a parametrized fixture.
+
+
+- :issue:`3057`: ``request.fixturenames`` now correctly returns the name of fixtures created by ``request.getfixturevalue()``.
+
+
+- :issue:`3946`: Warning filters passed as command line options using ``-W`` now take precedence over filters defined in ``ini``
+ configuration files.
+
+
+- :issue:`4066`: Fix source reindenting by using ``textwrap.dedent`` directly.
+
+
+- :issue:`4102`: ``pytest.warn`` will capture previously-warned warnings in Python 2. Previously they were never raised.
+
+
+- :issue:`4108`: Resolve symbolic links for args.
+
+ This fixes running ``pytest tests/test_foo.py::test_bar``, where ``tests``
+ is a symlink to ``project/app/tests``:
+ previously ``project/app/conftest.py`` would be ignored for fixtures then.
+
+
+- :issue:`4132`: Fix duplicate printing of internal errors when using ``--pdb``.
+
+
+- :issue:`4135`: pathlib based tmpdir cleanup now correctly handles symlinks in the folder.
+
+
+- :issue:`4152`: Display the filename when encountering ``SyntaxWarning``.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`3713`: Update usefixtures documentation to clarify that it can't be used with fixture functions.
+
+
+- :issue:`4058`: Update fixture documentation to specify that a fixture can be invoked twice in the scope it's defined for.
+
+
+- :issue:`4064`: According to unittest.rst, setUpModule and tearDownModule were not implemented, but it turns out they are. So updated the documentation for unittest.
+
+
+- :issue:`4151`: Add tempir testing example to CONTRIBUTING.rst guide
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`2293`: The internal ``MarkerError`` exception has been removed.
+
+
+- :issue:`3988`: Port the implementation of tmpdir to pathlib.
+
+
+- :issue:`4063`: Exclude 0.00 second entries from ``--duration`` output unless ``-vv`` is passed on the command-line.
+
+
+- :issue:`4093`: Fixed formatting of string literals in internal tests.
+
+
+pytest 3.8.2 (2018-10-02)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- :issue:`4036`: The ``item`` parameter of ``pytest_warning_captured`` hook is now documented as deprecated. We realized only after
+ the ``3.8`` release that this parameter is incompatible with ``pytest-xdist``.
+
+ Our policy is to not deprecate features during bug-fix releases, but in this case we believe it makes sense as we are
+ only documenting it as deprecated, without issuing warnings which might potentially break test suites. This will get
+ the word out that hook implementers should not use this parameter at all.
+
+ In a future release ``item`` will always be ``None`` and will emit a proper warning when a hook implementation
+ makes use of it.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`3539`: Fix reload on assertion rewritten modules.
+
+
+- :issue:`4034`: The ``.user_properties`` attribute of ``TestReport`` objects is a list
+ of (name, value) tuples, but could sometimes be instantiated as a tuple
+ of tuples. It is now always a list.
+
+
+- :issue:`4039`: No longer issue warnings about using ``pytest_plugins`` in non-top-level directories when using ``--pyargs``: the
+ current ``--pyargs`` mechanism is not reliable and might give false negatives.
+
+
+- :issue:`4040`: Exclude empty reports for passed tests when ``-rP`` option is used.
+
+
+- :issue:`4051`: Improve error message when an invalid Python expression is passed to the ``-m`` option.
+
+
+- :issue:`4056`: ``MonkeyPatch.setenv`` and ``MonkeyPatch.delenv`` issue a warning if the environment variable name is not ``str`` on Python 2.
+
+ In Python 2, adding ``unicode`` keys to ``os.environ`` causes problems with ``subprocess`` (and possible other modules),
+ making this a subtle bug specially susceptible when used with ``from __future__ import unicode_literals``.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`3928`: Add possible values for fixture scope to docs.
+
+
+pytest 3.8.1 (2018-09-22)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`3286`: ``.pytest_cache`` directory is now automatically ignored by Git. Users who would like to contribute a solution for other SCMs please consult/comment on this issue.
+
+
+- :issue:`3749`: Fix the following error during collection of tests inside packages::
+
+ TypeError: object of type 'Package' has no len()
+
+
+- :issue:`3941`: Fix bug where indirect parametrization would consider the scope of all fixtures used by the test function to determine the parametrization scope, and not only the scope of the fixtures being parametrized.
+
+
+- :issue:`3973`: Fix crash of the assertion rewriter if a test changed the current working directory without restoring it afterwards.
+
+
+- :issue:`3998`: Fix issue that prevented some caplog properties (for example ``record_tuples``) from being available when entering the debugger with ``--pdb``.
+
+
+- :issue:`3999`: Fix ``UnicodeDecodeError`` in python2.x when a class returns a non-ascii binary ``__repr__`` in an assertion which also contains non-ascii text.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`3996`: New :std:doc:`deprecations` page shows all currently
+ deprecated features, the rationale to do so, and alternatives to update your code. It also list features removed
+ from pytest in past major releases to help those with ancient pytest versions to upgrade.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`3955`: Improve pre-commit detection for changelog filenames
+
+
+- :issue:`3975`: Remove legacy code around im_func as that was python2 only
+
+
+pytest 3.8.0 (2018-09-05)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- :issue:`2452`: ``Config.warn`` and ``Node.warn`` have been
+ deprecated, see :ref:`config.warn and node.warn deprecated` for rationale and
+ examples.
+
+- :issue:`3936`: ``@pytest.mark.filterwarnings`` second parameter is no longer regex-escaped,
+ making it possible to actually use regular expressions to check the warning message.
+
+ **Note**: regex-escaping the match string was an implementation oversight that might break test suites which depend
+ on the old behavior.
+
+
+
+Features
+--------
+
+- :issue:`2452`: Internal pytest warnings are now issued using the standard ``warnings`` module, making it possible to use
+ the standard warnings filters to manage those warnings. This introduces ``PytestWarning``,
+ ``PytestDeprecationWarning`` and ``RemovedInPytest4Warning`` warning types as part of the public API.
+
+ Consult :ref:`the documentation <internal-warnings>` for more info.
+
+
+- :issue:`2908`: ``DeprecationWarning`` and ``PendingDeprecationWarning`` are now shown by default if no other warning filter is
+ configured. This makes pytest more compliant with
+ :pep:`506#recommended-filter-settings-for-test-runners`. See
+ :ref:`the docs <deprecation-warnings>` for
+ more info.
+
+
+- :issue:`3251`: Warnings are now captured and displayed during test collection.
+
+
+- :issue:`3784`: ``PYTEST_DISABLE_PLUGIN_AUTOLOAD`` environment variable disables plugin auto-loading when set.
+
+
+- :issue:`3829`: Added the ``count`` option to ``console_output_style`` to enable displaying the progress as a count instead of a percentage.
+
+
+- :issue:`3837`: Added support for 'xfailed' and 'xpassed' outcomes to the ``pytester.RunResult.assert_outcomes`` signature.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`3911`: Terminal writer now takes into account unicode character width when writing out progress.
+
+
+- :issue:`3913`: Pytest now returns with correct exit code (EXIT_USAGEERROR, 4) when called with unknown arguments.
+
+
+- :issue:`3918`: Improve performance of assertion rewriting.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`3566`: Added a blurb in usage.rst for the usage of -r flag which is used to show an extra test summary info.
+
+
+- :issue:`3907`: Corrected type of the exceptions collection passed to ``xfail``: ``raises`` argument accepts a ``tuple`` instead of ``list``.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`3853`: Removed ``"run all (no recorded failures)"`` message printed with ``--failed-first`` and ``--last-failed`` when there are no failed tests.
+
+
+pytest 3.7.4 (2018-08-29)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`3506`: Fix possible infinite recursion when writing ``.pyc`` files.
+
+
+- :issue:`3853`: Cache plugin now obeys the ``-q`` flag when ``--last-failed`` and ``--failed-first`` flags are used.
+
+
+- :issue:`3883`: Fix bad console output when using ``console_output_style=classic``.
+
+
+- :issue:`3888`: Fix macOS specific code using ``capturemanager`` plugin in doctests.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`3902`: Fix pytest.org links
+
+
+pytest 3.7.3 (2018-08-26)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`3033`: Fixtures during teardown can again use ``capsys`` and ``capfd`` to inspect output captured during tests.
+
+
+- :issue:`3773`: Fix collection of tests from ``__init__.py`` files if they match the ``python_files`` configuration option.
+
+
+- :issue:`3796`: Fix issue where teardown of fixtures of consecutive sub-packages were executed once, at the end of the outer
+ package.
+
+
+- :issue:`3816`: Fix bug where ``--show-capture=no`` option would still show logs printed during fixture teardown.
+
+
+- :issue:`3819`: Fix ``stdout/stderr`` not getting captured when real-time cli logging is active.
+
+
+- :issue:`3843`: Fix collection error when specifying test functions directly in the command line using ``test.py::test`` syntax together with ``--doctest-modules``.
+
+
+- :issue:`3848`: Fix bugs where unicode arguments could not be passed to ``testdir.runpytest`` on Python 2.
+
+
+- :issue:`3854`: Fix double collection of tests within packages when the filename starts with a capital letter.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`3824`: Added example for multiple glob pattern matches in ``python_files``.
+
+
+- :issue:`3833`: Added missing docs for ``pytester.Testdir``.
+
+
+- :issue:`3870`: Correct documentation for setuptools integration.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`3826`: Replace broken type annotations with type comments.
+
+
+- :issue:`3845`: Remove a reference to issue :issue:`568` from the documentation, which has since been
+ fixed.
+
+
+pytest 3.7.2 (2018-08-16)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`3671`: Fix ``filterwarnings`` not being registered as a builtin mark.
+
+
+- :issue:`3768`, :issue:`3789`: Fix test collection from packages mixed with normal directories.
+
+
+- :issue:`3771`: Fix infinite recursion during collection if a ``pytest_ignore_collect`` hook returns ``False`` instead of ``None``.
+
+
+- :issue:`3774`: Fix bug where decorated fixtures would lose functionality (for example ``@mock.patch``).
+
+
+- :issue:`3775`: Fix bug where importing modules or other objects with prefix ``pytest_`` prefix would raise a ``PluginValidationError``.
+
+
+- :issue:`3788`: Fix ``AttributeError`` during teardown of ``TestCase`` subclasses which raise an exception during ``__init__``.
+
+
+- :issue:`3804`: Fix traceback reporting for exceptions with ``__cause__`` cycles.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`3746`: Add documentation for ``metafunc.config`` that had been mistakenly hidden.
+
+
+pytest 3.7.1 (2018-08-02)
+=========================
+
+Bug Fixes
+---------
+
+- :issue:`3473`: Raise immediately if ``approx()`` is given an expected value of a type it doesn't understand (e.g. strings, nested dicts, etc.).
+
+
+- :issue:`3712`: Correctly represent the dimensions of a numpy array when calling ``repr()`` on ``approx()``.
+
+- :issue:`3742`: Fix incompatibility with third party plugins during collection, which produced the error ``object has no attribute '_collectfile'``.
+
+- :issue:`3745`: Display the absolute path if ``cache_dir`` is not relative to the ``rootdir`` instead of failing.
+
+
+- :issue:`3747`: Fix compatibility problem with plugins and the warning code issued by fixture functions when they are called directly.
+
+
+- :issue:`3748`: Fix infinite recursion in ``pytest.approx`` with arrays in ``numpy<1.13``.
+
+
+- :issue:`3757`: Pin pathlib2 to ``>=2.2.0`` as we require ``__fspath__`` support.
+
+
+- :issue:`3763`: Fix ``TypeError`` when the assertion message is ``bytes`` in python 3.
+
+
+pytest 3.7.0 (2018-07-30)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- :issue:`2639`: ``pytest_namespace`` has been :ref:`deprecated <pytest.namespace deprecated>`.
+
+
+- :issue:`3661`: Calling a fixture function directly, as opposed to request them in a test function, now issues a ``RemovedInPytest4Warning``. See :ref:`the documentation for rationale and examples <calling fixtures directly deprecated>`.
+
+
+
+Features
+--------
+
+- :issue:`2283`: New ``package`` fixture scope: fixtures are finalized when the last test of a *package* finishes. This feature is considered **experimental**, so use it sparingly.
+
+
+- :issue:`3576`: ``Node.add_marker`` now supports an ``append=True/False`` parameter to determine whether the mark comes last (default) or first.
+
+
+- :issue:`3579`: Fixture ``caplog`` now has a ``messages`` property, providing convenient access to the format-interpolated log messages without the extra data provided by the formatter/handler.
+
+
+- :issue:`3610`: New ``--trace`` option to enter the debugger at the start of a test.
+
+
+- :issue:`3623`: Introduce ``pytester.copy_example`` as helper to do acceptance tests against examples from the project.
+
+
+
+Bug Fixes
+---------
+
+- :issue:`2220`: Fix a bug where fixtures overridden by direct parameters (for example parametrization) were being instantiated even if they were not being used by a test.
+
+
+- :issue:`3695`: Fix ``ApproxNumpy`` initialisation argument mixup, ``abs`` and ``rel`` tolerances were flipped causing strange comparison results.
+ Add tests to check ``abs`` and ``rel`` tolerances for ``np.array`` and test for expecting ``nan`` with ``np.array()``
+
+
+- :issue:`980`: Fix truncated locals output in verbose mode.
+
+
+
+Improved Documentation
+----------------------
+
+- :issue:`3295`: Correct the usage documentation of ``--last-failed-no-failures`` by adding the missing ``--last-failed`` argument in the presented examples, because they are misleading and lead to think that the missing argument is not needed.
+
+
+
+Trivial/Internal Changes
+------------------------
+
+- :issue:`3519`: Now a ``README.md`` file is created in ``.pytest_cache`` to make it clear why the directory exists.
+
+
+pytest 3.6.4 (2018-07-28)
+=========================
+
+Bug Fixes
+---------
+
+- Invoke pytest using ``-mpytest`` so ``sys.path`` does not get polluted by packages installed in ``site-packages``. (:issue:`742`)
+
+
+Improved Documentation
+----------------------
+
+- Use ``smtp_connection`` instead of ``smtp`` in fixtures documentation to avoid possible confusion. (:issue:`3592`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Remove obsolete ``__future__`` imports. (:issue:`2319`)
+
+- Add CITATION to provide information on how to formally cite pytest. (:issue:`3402`)
+
+- Replace broken type annotations with type comments. (:issue:`3635`)
+
+- Pin ``pluggy`` to ``<0.8``. (:issue:`3727`)
+
+
+pytest 3.6.3 (2018-07-04)
+=========================
+
+Bug Fixes
+---------
+
+- Fix ``ImportWarning`` triggered by explicit relative imports in
+ assertion-rewritten package modules. (:issue:`3061`)
+
+- Fix error in ``pytest.approx`` when dealing with 0-dimension numpy
+ arrays. (:issue:`3593`)
+
+- No longer raise ``ValueError`` when using the ``get_marker`` API. (:issue:`3605`)
+
+- Fix problem where log messages with non-ascii characters would not
+ appear in the output log file.
+ (:issue:`3630`)
+
+- No longer raise ``AttributeError`` when legacy marks can't be stored in
+ functions. (:issue:`3631`)
+
+
+Improved Documentation
+----------------------
+
+- The description above the example for ``@pytest.mark.skipif`` now better
+ matches the code. (:issue:`3611`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Internal refactoring: removed unused ``CallSpec2tox ._globalid_args``
+ attribute and ``metafunc`` parameter from ``CallSpec2.copy()``. (:issue:`3598`)
+
+- Silence usage of ``reduce`` warning in Python 2 (:issue:`3609`)
+
+- Fix usage of ``attr.ib`` deprecated ``convert`` parameter. (:issue:`3653`)
+
+
+pytest 3.6.2 (2018-06-20)
+=========================
+
+Bug Fixes
+---------
+
+- Fix regression in ``Node.add_marker`` by extracting the mark object of a
+ ``MarkDecorator``. (:issue:`3555`)
+
+- Warnings without ``location`` were reported as ``None``. This is corrected to
+ now report ``<undetermined location>``. (:issue:`3563`)
+
+- Continue to call finalizers in the stack when a finalizer in a former scope
+ raises an exception. (:issue:`3569`)
+
+- Fix encoding error with ``print`` statements in doctests (:issue:`3583`)
+
+
+Improved Documentation
+----------------------
+
+- Add documentation for the ``--strict`` flag. (:issue:`3549`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Update old quotation style to parens in fixture.rst documentation. (:issue:`3525`)
+
+- Improve display of hint about ``--fulltrace`` with ``KeyboardInterrupt``.
+ (:issue:`3545`)
+
+- pytest's testsuite is no longer runnable through ``python setup.py test`` --
+ instead invoke ``pytest`` or ``tox`` directly. (:issue:`3552`)
+
+- Fix typo in documentation (:issue:`3567`)
+
+
+pytest 3.6.1 (2018-06-05)
+=========================
+
+Bug Fixes
+---------
+
+- Fixed a bug where stdout and stderr were logged twice by junitxml when a test
+ was marked xfail. (:issue:`3491`)
+
+- Fix ``usefixtures`` mark applied to unittest tests by correctly instantiating
+ ``FixtureInfo``. (:issue:`3498`)
+
+- Fix assertion rewriter compatibility with libraries that monkey patch
+ ``file`` objects. (:issue:`3503`)
+
+
+Improved Documentation
+----------------------
+
+- Added a section on how to use fixtures as factories to the fixture
+ documentation. (:issue:`3461`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Enable caching for pip/pre-commit in order to reduce build time on
+ travis/appveyor. (:issue:`3502`)
+
+- Switch pytest to the src/ layout as we already suggested it for good practice
+ - now we implement it as well. (:issue:`3513`)
+
+- Fix if in tests to support 3.7.0b5, where a docstring handling in AST got
+ reverted. (:issue:`3530`)
+
+- Remove some python2.5 compatibility code. (:issue:`3529`)
+
+
+pytest 3.6.0 (2018-05-23)
+=========================
+
+Features
+--------
+
+- Revamp the internals of the ``pytest.mark`` implementation with correct per
+ node handling which fixes a number of long standing bugs caused by the old
+ design. This introduces new ``Node.iter_markers(name)`` and
+ ``Node.get_closest_marker(name)`` APIs. Users are **strongly encouraged** to
+ read the :ref:`reasons for the revamp in the docs <marker-revamp>`,
+ or jump over to details about :ref:`updating existing code to use the new APIs
+ <update marker code>`.
+ (:issue:`3317`)
+
+- Now when ``@pytest.fixture`` is applied more than once to the same function a
+ ``ValueError`` is raised. This buggy behavior would cause surprising problems
+ and if was working for a test suite it was mostly by accident. (:issue:`2334`)
+
+- Support for Python 3.7's builtin ``breakpoint()`` method, see
+ :ref:`Using the builtin breakpoint function <breakpoint-builtin>` for
+ details. (:issue:`3180`)
+
+- ``monkeypatch`` now supports a ``context()`` function which acts as a context
+ manager which undoes all patching done within the ``with`` block. (:issue:`3290`)
+
+- The ``--pdb`` option now causes KeyboardInterrupt to enter the debugger,
+ instead of stopping the test session. On python 2.7, hitting CTRL+C again
+ exits the debugger. On python 3.2 and higher, use CTRL+D. (:issue:`3299`)
+
+- pytest no longer changes the log level of the root logger when the
+ ``log-level`` parameter has greater numeric value than that of the level of
+ the root logger, which makes it play better with custom logging configuration
+ in user code. (:issue:`3307`)
+
+
+Bug Fixes
+---------
+
+- A rare race-condition which might result in corrupted ``.pyc`` files on
+ Windows has been hopefully solved. (:issue:`3008`)
+
+- Also use iter_marker for discovering the marks applying for marker
+ expressions from the cli to avoid the bad data from the legacy mark storage.
+ (:issue:`3441`)
+
+- When showing diffs of failed assertions where the contents contain only
+ whitespace, escape them using ``repr()`` first to make it easy to spot the
+ differences. (:issue:`3443`)
+
+
+Improved Documentation
+----------------------
+
+- Change documentation copyright year to a range which auto-updates itself each
+ time it is published. (:issue:`3303`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- ``pytest`` now depends on the `python-atomicwrites
+ <https://github.com/untitaker/python-atomicwrites>`_ library. (:issue:`3008`)
+
+- Update all pypi.python.org URLs to pypi.org. (:issue:`3431`)
+
+- Detect `pytest_` prefixed hooks using the internal plugin manager since
+ ``pluggy`` is deprecating the ``implprefix`` argument to ``PluginManager``.
+ (:issue:`3487`)
+
+- Import ``Mapping`` and ``Sequence`` from ``_pytest.compat`` instead of
+ directly from ``collections`` in ``python_api.py::approx``. Add ``Mapping``
+ to ``_pytest.compat``, import it from ``collections`` on python 2, but from
+ ``collections.abc`` on Python 3 to avoid a ``DeprecationWarning`` on Python
+ 3.7 or newer. (:issue:`3497`)
+
+
+pytest 3.5.1 (2018-04-23)
+=========================
+
+
+Bug Fixes
+---------
+
+- Reset ``sys.last_type``, ``sys.last_value`` and ``sys.last_traceback`` before
+ each test executes. Those attributes are added by pytest during the test run
+ to aid debugging, but were never reset so they would create a leaking
+ reference to the last failing test's frame which in turn could never be
+ reclaimed by the garbage collector. (:issue:`2798`)
+
+- ``pytest.raises`` now raises ``TypeError`` when receiving an unknown keyword
+ argument. (:issue:`3348`)
+
+- ``pytest.raises`` now works with exception classes that look like iterables.
+ (:issue:`3372`)
+
+
+Improved Documentation
+----------------------
+
+- Fix typo in ``caplog`` fixture documentation, which incorrectly identified
+ certain attributes as methods. (:issue:`3406`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Added a more indicative error message when parametrizing a function whose
+ argument takes a default value. (:issue:`3221`)
+
+- Remove internal ``_pytest.terminal.flatten`` function in favor of
+ ``more_itertools.collapse``. (:issue:`3330`)
+
+- Import some modules from ``collections.abc`` instead of ``collections`` as
+ the former modules trigger ``DeprecationWarning`` in Python 3.7. (:issue:`3339`)
+
+- record_property is no longer experimental, removing the warnings was
+ forgotten. (:issue:`3360`)
+
+- Mention in documentation and CLI help that fixtures with leading ``_`` are
+ printed by ``pytest --fixtures`` only if the ``-v`` option is added. (:issue:`3398`)
+
+
+pytest 3.5.0 (2018-03-21)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- ``record_xml_property`` fixture is now deprecated in favor of the more
+ generic ``record_property``. (:issue:`2770`)
+
+- Defining ``pytest_plugins`` is now deprecated in non-top-level conftest.py
+ files, because they "leak" to the entire directory tree.
+ :ref:`See the docs <pytest_plugins in non-top-level conftest files deprecated>`
+ for the rationale behind this decision (:issue:`3084`)
+
+
+Features
+--------
+
+- New ``--show-capture`` command-line option that allows to specify how to
+ display captured output when tests fail: ``no``, ``stdout``, ``stderr``,
+ ``log`` or ``all`` (the default). (:issue:`1478`)
+
+- New ``--rootdir`` command-line option to override the rules for discovering
+ the root directory. See :doc:`customize <reference/customize>` in the documentation for
+ details. (:issue:`1642`)
+
+- Fixtures are now instantiated based on their scopes, with higher-scoped
+ fixtures (such as ``session``) being instantiated first than lower-scoped
+ fixtures (such as ``function``). The relative order of fixtures of the same
+ scope is kept unchanged, based in their declaration order and their
+ dependencies. (:issue:`2405`)
+
+- ``record_xml_property`` renamed to ``record_property`` and is now compatible
+ with xdist, markers and any reporter. ``record_xml_property`` name is now
+ deprecated. (:issue:`2770`)
+
+- New ``--nf``, ``--new-first`` options: run new tests first followed by the
+ rest of the tests, in both cases tests are also sorted by the file modified
+ time, with more recent files coming first. (:issue:`3034`)
+
+- New ``--last-failed-no-failures`` command-line option that allows to specify
+ the behavior of the cache plugin's ```--last-failed`` feature when no tests
+ failed in the last run (or no cache was found): ``none`` or ``all`` (the
+ default). (:issue:`3139`)
+
+- New ``--doctest-continue-on-failure`` command-line option to enable doctests
+ to show multiple failures for each snippet, instead of stopping at the first
+ failure. (:issue:`3149`)
+
+- Captured log messages are added to the ``<system-out>`` tag in the generated
+ junit xml file if the ``junit_logging`` ini option is set to ``system-out``.
+ If the value of this ini option is ``system-err``, the logs are written to
+ ``<system-err>``. The default value for ``junit_logging`` is ``no``, meaning
+ captured logs are not written to the output file. (:issue:`3156`)
+
+- Allow the logging plugin to handle ``pytest_runtest_logstart`` and
+ ``pytest_runtest_logfinish`` hooks when live logs are enabled. (:issue:`3189`)
+
+- Passing ``--log-cli-level`` in the command-line now automatically activates
+ live logging. (:issue:`3190`)
+
+- Add command line option ``--deselect`` to allow deselection of individual
+ tests at collection time. (:issue:`3198`)
+
+- Captured logs are printed before entering pdb. (:issue:`3204`)
+
+- Deselected item count is now shown before tests are run, e.g. ``collected X
+ items / Y deselected``. (:issue:`3213`)
+
+- The builtin module ``platform`` is now available for use in expressions in
+ ``pytest.mark``. (:issue:`3236`)
+
+- The *short test summary info* section now is displayed after tracebacks and
+ warnings in the terminal. (:issue:`3255`)
+
+- New ``--verbosity`` flag to set verbosity level explicitly. (:issue:`3296`)
+
+- ``pytest.approx`` now accepts comparing a numpy array with a scalar. (:issue:`3312`)
+
+
+Bug Fixes
+---------
+
+- Suppress ``IOError`` when closing the temporary file used for capturing
+ streams in Python 2.7. (:issue:`2370`)
+
+- Fixed ``clear()`` method on ``caplog`` fixture which cleared ``records``, but
+ not the ``text`` property. (:issue:`3297`)
+
+- During test collection, when stdin is not allowed to be read, the
+ ``DontReadFromStdin`` object still allow itself to be iterable and resolved
+ to an iterator without crashing. (:issue:`3314`)
+
+
+Improved Documentation
+----------------------
+
+- Added a :doc:`reference <reference/reference>` page
+ to the docs. (:issue:`1713`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Change minimum requirement of ``attrs`` to ``17.4.0``. (:issue:`3228`)
+
+- Renamed example directories so all tests pass when ran from the base
+ directory. (:issue:`3245`)
+
+- Internal ``mark.py`` module has been turned into a package. (:issue:`3250`)
+
+- ``pytest`` now depends on the `more-itertools
+ <https://github.com/erikrose/more-itertools>`_ package. (:issue:`3265`)
+
+- Added warning when ``[pytest]`` section is used in a ``.cfg`` file passed
+ with ``-c`` (:issue:`3268`)
+
+- ``nodeids`` can now be passed explicitly to ``FSCollector`` and ``Node``
+ constructors. (:issue:`3291`)
+
+- Internal refactoring of ``FormattedExcinfo`` to use ``attrs`` facilities and
+ remove old support code for legacy Python versions. (:issue:`3292`)
+
+- Refactoring to unify how verbosity is handled internally. (:issue:`3296`)
+
+- Internal refactoring to better integrate with argparse. (:issue:`3304`)
+
+- Fix a python example when calling a fixture in doc/en/usage.rst (:issue:`3308`)
+
+
+pytest 3.4.2 (2018-03-04)
+=========================
+
+Bug Fixes
+---------
+
+- Removed progress information when capture option is ``no``. (:issue:`3203`)
+
+- Refactor check of bindir from ``exists`` to ``isdir``. (:issue:`3241`)
+
+- Fix ``TypeError`` issue when using ``approx`` with a ``Decimal`` value.
+ (:issue:`3247`)
+
+- Fix reference cycle generated when using the ``request`` fixture. (:issue:`3249`)
+
+- ``[tool:pytest]`` sections in ``*.cfg`` files passed by the ``-c`` option are
+ now properly recognized. (:issue:`3260`)
+
+
+Improved Documentation
+----------------------
+
+- Add logging plugin to plugins list. (:issue:`3209`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Fix minor typo in fixture.rst (:issue:`3259`)
+
+
+pytest 3.4.1 (2018-02-20)
+=========================
+
+Bug Fixes
+---------
+
+- Move import of ``doctest.UnexpectedException`` to top-level to avoid possible
+ errors when using ``--pdb``. (:issue:`1810`)
+
+- Added printing of captured stdout/stderr before entering pdb, and improved a
+ test which was giving false negatives about output capturing. (:issue:`3052`)
+
+- Fix ordering of tests using parametrized fixtures which can lead to fixtures
+ being created more than necessary. (:issue:`3161`)
+
+- Fix bug where logging happening at hooks outside of "test run" hooks would
+ cause an internal error. (:issue:`3184`)
+
+- Detect arguments injected by ``unittest.mock.patch`` decorator correctly when
+ pypi ``mock.patch`` is installed and imported. (:issue:`3206`)
+
+- Errors shown when a ``pytest.raises()`` with ``match=`` fails are now cleaner
+ on what happened: When no exception was raised, the "matching '...'" part got
+ removed as it falsely implies that an exception was raised but it didn't
+ match. When a wrong exception was raised, it's now thrown (like
+ ``pytest.raised()`` without ``match=`` would) instead of complaining about
+ the unmatched text. (:issue:`3222`)
+
+- Fixed output capture handling in doctests on macOS. (:issue:`985`)
+
+
+Improved Documentation
+----------------------
+
+- Add Sphinx parameter docs for ``match`` and ``message`` args to
+ ``pytest.raises``. (:issue:`3202`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- pytest has changed the publication procedure and is now being published to
+ PyPI directly from Travis. (:issue:`3060`)
+
+- Rename ``ParameterSet._for_parameterize()`` to ``_for_parametrize()`` in
+ order to comply with the naming convention. (:issue:`3166`)
+
+- Skip failing pdb/doctest test on mac. (:issue:`985`)
+
+
+pytest 3.4.0 (2018-01-30)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- All pytest classes now subclass ``object`` for better Python 2/3 compatibility.
+ This should not affect user code except in very rare edge cases. (:issue:`2147`)
+
+
+Features
+--------
+
+- Introduce ``empty_parameter_set_mark`` ini option to select which mark to
+ apply when ``@pytest.mark.parametrize`` is given an empty set of parameters.
+ Valid options are ``skip`` (default) and ``xfail``. Note that it is planned
+ to change the default to ``xfail`` in future releases as this is considered
+ less error prone. (:issue:`2527`)
+
+- **Incompatible change**: after community feedback the :doc:`logging <how-to/logging>` functionality has
+ undergone some changes. Please consult the :ref:`logging documentation <log_changes_3_4>`
+ for details. (:issue:`3013`)
+
+- Console output falls back to "classic" mode when capturing is disabled (``-s``),
+ otherwise the output gets garbled to the point of being useless. (:issue:`3038`)
+
+- New :hook:`pytest_runtest_logfinish`
+ hook which is called when a test item has finished executing, analogous to
+ :hook:`pytest_runtest_logstart`.
+ (:issue:`3101`)
+
+- Improve performance when collecting tests using many fixtures. (:issue:`3107`)
+
+- New ``caplog.get_records(when)`` method which provides access to the captured
+ records for the ``"setup"``, ``"call"`` and ``"teardown"``
+ testing stages. (:issue:`3117`)
+
+- New fixture ``record_xml_attribute`` that allows modifying and inserting
+ attributes on the ``<testcase>`` xml node in JUnit reports. (:issue:`3130`)
+
+- The default cache directory has been renamed from ``.cache`` to
+ ``.pytest_cache`` after community feedback that the name ``.cache`` did not
+ make it clear that it was used by pytest. (:issue:`3138`)
+
+- Colorize the levelname column in the live-log output. (:issue:`3142`)
+
+
+Bug Fixes
+---------
+
+- Fix hanging pexpect test on MacOS by using flush() instead of wait().
+ (:issue:`2022`)
+
+- Fix restoring Python state after in-process pytest runs with the
+ ``pytester`` plugin; this may break tests using multiple inprocess
+ pytest runs if later ones depend on earlier ones leaking global interpreter
+ changes. (:issue:`3016`)
+
+- Fix skipping plugin reporting hook when test aborted before plugin setup
+ hook. (:issue:`3074`)
+
+- Fix progress percentage reported when tests fail during teardown. (:issue:`3088`)
+
+- **Incompatible change**: ``-o/--override`` option no longer eats all the
+ remaining options, which can lead to surprising behavior: for example,
+ ``pytest -o foo=1 /path/to/test.py`` would fail because ``/path/to/test.py``
+ would be considered as part of the ``-o`` command-line argument. One
+ consequence of this is that now multiple configuration overrides need
+ multiple ``-o`` flags: ``pytest -o foo=1 -o bar=2``. (:issue:`3103`)
+
+
+Improved Documentation
+----------------------
+
+- Document hooks (defined with ``historic=True``) which cannot be used with
+ ``hookwrapper=True``. (:issue:`2423`)
+
+- Clarify that warning capturing doesn't change the warning filter by default.
+ (:issue:`2457`)
+
+- Clarify a possible confusion when using pytest_fixture_setup with fixture
+ functions that return None. (:issue:`2698`)
+
+- Fix the wording of a sentence on doctest flags used in pytest. (:issue:`3076`)
+
+- Prefer ``https://*.readthedocs.io`` over ``http://*.rtfd.org`` for links in
+ the documentation. (:issue:`3092`)
+
+- Improve readability (wording, grammar) of Getting Started guide (:issue:`3131`)
+
+- Added note that calling pytest.main multiple times from the same process is
+ not recommended because of import caching. (:issue:`3143`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Show a simple and easy error when keyword expressions trigger a syntax error
+ (for example, ``"-k foo and import"`` will show an error that you can not use
+ the ``import`` keyword in expressions). (:issue:`2953`)
+
+- Change parametrized automatic test id generation to use the ``__name__``
+ attribute of functions instead of the fallback argument name plus counter.
+ (:issue:`2976`)
+
+- Replace py.std with stdlib imports. (:issue:`3067`)
+
+- Corrected 'you' to 'your' in logging docs. (:issue:`3129`)
+
+
+pytest 3.3.2 (2017-12-25)
+=========================
+
+Bug Fixes
+---------
+
+- pytester: ignore files used to obtain current user metadata in the fd leak
+ detector. (:issue:`2784`)
+
+- Fix **memory leak** where objects returned by fixtures were never destructed
+ by the garbage collector. (:issue:`2981`)
+
+- Fix conversion of pyargs to filename to not convert symlinks on Python 2. (:issue:`2985`)
+
+- ``PYTEST_DONT_REWRITE`` is now checked for plugins too rather than only for
+ test modules. (:issue:`2995`)
+
+
+Improved Documentation
+----------------------
+
+- Add clarifying note about behavior of multiple parametrized arguments (:issue:`3001`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Code cleanup. (:issue:`3015`,
+ :issue:`3021`)
+
+- Clean up code by replacing imports and references of ``_ast`` to ``ast``.
+ (:issue:`3018`)
+
+
+pytest 3.3.1 (2017-12-05)
+=========================
+
+Bug Fixes
+---------
+
+- Fix issue about ``-p no:<plugin>`` having no effect. (:issue:`2920`)
+
+- Fix regression with warnings that contained non-strings in their arguments in
+ Python 2. (:issue:`2956`)
+
+- Always escape null bytes when setting ``PYTEST_CURRENT_TEST``. (:issue:`2957`)
+
+- Fix ``ZeroDivisionError`` when using the ``testmon`` plugin when no tests
+ were actually collected. (:issue:`2971`)
+
+- Bring back ``TerminalReporter.writer`` as an alias to
+ ``TerminalReporter._tw``. This alias was removed by accident in the ``3.3.0``
+ release. (:issue:`2984`)
+
+- The ``pytest-capturelog`` plugin is now also blacklisted, avoiding errors when
+ running pytest with it still installed. (:issue:`3004`)
+
+
+Improved Documentation
+----------------------
+
+- Fix broken link to plugin ``pytest-localserver``. (:issue:`2963`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Update github "bugs" link in ``CONTRIBUTING.rst`` (:issue:`2949`)
+
+
+pytest 3.3.0 (2017-11-23)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- pytest no longer supports Python **2.6** and **3.3**. Those Python versions
+ are EOL for some time now and incur maintenance and compatibility costs on
+ the pytest core team, and following up with the rest of the community we
+ decided that they will no longer be supported starting on this version. Users
+ which still require those versions should pin pytest to ``<3.3``. (:issue:`2812`)
+
+- Remove internal ``_preloadplugins()`` function. This removal is part of the
+ ``pytest_namespace()`` hook deprecation. (:issue:`2636`)
+
+- Internally change ``CallSpec2`` to have a list of marks instead of a broken
+ mapping of keywords. This removes the keywords attribute of the internal
+ ``CallSpec2`` class. (:issue:`2672`)
+
+- Remove ParameterSet.deprecated_arg_dict - its not a public api and the lack
+ of the underscore was a naming error. (:issue:`2675`)
+
+- Remove the internal multi-typed attribute ``Node._evalskip`` and replace it
+ with the boolean ``Node._skipped_by_mark``. (:issue:`2767`)
+
+- The ``params`` list passed to ``pytest.fixture`` is now for
+ all effects considered immutable and frozen at the moment of the ``pytest.fixture``
+ call. Previously the list could be changed before the first invocation of the fixture
+ allowing for a form of dynamic parametrization (for example, updated from command-line options),
+ but this was an unwanted implementation detail which complicated the internals and prevented
+ some internal cleanup. See issue :issue:`2959`
+ for details and a recommended workaround.
+
+Features
+--------
+
+- ``pytest_fixture_post_finalizer`` hook can now receive a ``request``
+ argument. (:issue:`2124`)
+
+- Replace the old introspection code in compat.py that determines the available
+ arguments of fixtures with inspect.signature on Python 3 and
+ funcsigs.signature on Python 2. This should respect ``__signature__``
+ declarations on functions. (:issue:`2267`)
+
+- Report tests with global ``pytestmark`` variable only once. (:issue:`2549`)
+
+- Now pytest displays the total progress percentage while running tests. The
+ previous output style can be set by configuring the ``console_output_style``
+ setting to ``classic``. (:issue:`2657`)
+
+- Match ``warns`` signature to ``raises`` by adding ``match`` keyword. (:issue:`2708`)
+
+- pytest now captures and displays output from the standard ``logging`` module.
+ The user can control the logging level to be captured by specifying options
+ in ``pytest.ini``, the command line and also during individual tests using
+ markers. Also, a ``caplog`` fixture is available that enables users to test
+ the captured log during specific tests (similar to ``capsys`` for example).
+ For more information, please see the :doc:`logging docs <how-to/logging>`. This feature was
+ introduced by merging the popular :pypi:`pytest-catchlog` plugin, thanks to :user:`thisch`.
+ Be advised that during the merging the
+ backward compatibility interface with the defunct ``pytest-capturelog`` has
+ been dropped. (:issue:`2794`)
+
+- Add ``allow_module_level`` kwarg to ``pytest.skip()``, enabling to skip the
+ whole module. (:issue:`2808`)
+
+- Allow setting ``file_or_dir``, ``-c``, and ``-o`` in PYTEST_ADDOPTS. (:issue:`2824`)
+
+- Return stdout/stderr capture results as a ``namedtuple``, so ``out`` and
+ ``err`` can be accessed by attribute. (:issue:`2879`)
+
+- Add ``capfdbinary``, a version of ``capfd`` which returns bytes from
+ ``readouterr()``. (:issue:`2923`)
+
+- Add ``capsysbinary`` a version of ``capsys`` which returns bytes from
+ ``readouterr()``. (:issue:`2934`)
+
+- Implement feature to skip ``setup.py`` files when run with
+ ``--doctest-modules``. (:issue:`502`)
+
+
+Bug Fixes
+---------
+
+- Resume output capturing after ``capsys/capfd.disabled()`` context manager.
+ (:issue:`1993`)
+
+- ``pytest_fixture_setup`` and ``pytest_fixture_post_finalizer`` hooks are now
+ called for all ``conftest.py`` files. (:issue:`2124`)
+
+- If an exception happens while loading a plugin, pytest no longer hides the
+ original traceback. In Python 2 it will show the original traceback with a new
+ message that explains in which plugin. In Python 3 it will show 2 canonized
+ exceptions, the original exception while loading the plugin in addition to an
+ exception that pytest throws about loading a plugin. (:issue:`2491`)
+
+- ``capsys`` and ``capfd`` can now be used by other fixtures. (:issue:`2709`)
+
+- Internal ``pytester`` plugin properly encodes ``bytes`` arguments to
+ ``utf-8``. (:issue:`2738`)
+
+- ``testdir`` now uses use the same method used by ``tmpdir`` to create its
+ temporary directory. This changes the final structure of the ``testdir``
+ directory slightly, but should not affect usage in normal scenarios and
+ avoids a number of potential problems. (:issue:`2751`)
+
+- pytest no longer complains about warnings with unicode messages being
+ non-ascii compatible even for ascii-compatible messages. As a result of this,
+ warnings with unicode messages are converted first to an ascii representation
+ for safety. (:issue:`2809`)
+
+- Change return value of pytest command when ``--maxfail`` is reached from
+ ``2`` (interrupted) to ``1`` (failed). (:issue:`2845`)
+
+- Fix issue in assertion rewriting which could lead it to rewrite modules which
+ should not be rewritten. (:issue:`2939`)
+
+- Handle marks without description in ``pytest.ini``. (:issue:`2942`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- pytest now depends on :pypi:`attrs` for internal
+ structures to ease code maintainability. (:issue:`2641`)
+
+- Refactored internal Python 2/3 compatibility code to use ``six``. (:issue:`2642`)
+
+- Stop vendoring ``pluggy`` - we're missing out on its latest changes for not
+ much benefit (:issue:`2719`)
+
+- Internal refactor: simplify ascii string escaping by using the
+ backslashreplace error handler in newer Python 3 versions. (:issue:`2734`)
+
+- Remove unnecessary mark evaluator in unittest plugin (:issue:`2767`)
+
+- Calls to ``Metafunc.addcall`` now emit a deprecation warning. This function
+ is scheduled to be removed in ``pytest-4.0``. (:issue:`2876`)
+
+- Internal move of the parameterset extraction to a more maintainable place.
+ (:issue:`2877`)
+
+- Internal refactoring to simplify scope node lookup. (:issue:`2910`)
+
+- Configure ``pytest`` to prevent pip from installing pytest in unsupported
+ Python versions. (:issue:`2922`)
+
+
+pytest 3.2.5 (2017-11-15)
+=========================
+
+Bug Fixes
+---------
+
+- Remove ``py<1.5`` restriction from ``pytest`` as this can cause version
+ conflicts in some installations. (:issue:`2926`)
+
+
+pytest 3.2.4 (2017-11-13)
+=========================
+
+Bug Fixes
+---------
+
+- Fix the bug where running with ``--pyargs`` will result in items with
+ empty ``parent.nodeid`` if run from a different root directory. (:issue:`2775`)
+
+- Fix issue with ``@pytest.parametrize`` if argnames was specified as keyword arguments.
+ (:issue:`2819`)
+
+- Strip whitespace from marker names when reading them from INI config. (:issue:`2856`)
+
+- Show full context of doctest source in the pytest output, if the line number of
+ failed example in the docstring is < 9. (:issue:`2882`)
+
+- Match fixture paths against actual path segments in order to avoid matching folders which share a prefix.
+ (:issue:`2836`)
+
+Improved Documentation
+----------------------
+
+- Introduce a dedicated section about conftest.py. (:issue:`1505`)
+
+- Explicitly mention ``xpass`` in the documentation of ``xfail``. (:issue:`1997`)
+
+- Append example for pytest.param in the example/parametrize document. (:issue:`2658`)
+
+- Clarify language of proposal for fixtures parameters (:issue:`2893`)
+
+- List python 3.6 in the documented supported versions in the getting started
+ document. (:issue:`2903`)
+
+- Clarify the documentation of available fixture scopes. (:issue:`538`)
+
+- Add documentation about the ``python -m pytest`` invocation adding the
+ current directory to sys.path. (:issue:`911`)
+
+
+pytest 3.2.3 (2017-10-03)
+=========================
+
+Bug Fixes
+---------
+
+- Fix crash in tab completion when no prefix is given. (:issue:`2748`)
+
+- The equality checking function (``__eq__``) of ``MarkDecorator`` returns
+ ``False`` if one object is not an instance of ``MarkDecorator``. (:issue:`2758`)
+
+- When running ``pytest --fixtures-per-test``: don't crash if an item has no
+ _fixtureinfo attribute (e.g. doctests) (:issue:`2788`)
+
+
+Improved Documentation
+----------------------
+
+- In help text of ``-k`` option, add example of using ``not`` to not select
+ certain tests whose names match the provided expression. (:issue:`1442`)
+
+- Add note in ``parametrize.rst`` about calling ``metafunc.parametrize``
+ multiple times. (:issue:`1548`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Set ``xfail_strict=True`` in pytest's own test suite to catch expected
+ failures as soon as they start to pass. (:issue:`2722`)
+
+- Fix typo in example of passing a callable to markers (in example/markers.rst)
+ (:issue:`2765`)
+
+
+pytest 3.2.2 (2017-09-06)
+=========================
+
+Bug Fixes
+---------
+
+- Calling the deprecated ``request.getfuncargvalue()`` now shows the source of
+ the call. (:issue:`2681`)
+
+- Allow tests declared as ``@staticmethod`` to use fixtures. (:issue:`2699`)
+
+- Fixed edge-case during collection: attributes which raised ``pytest.fail``
+ when accessed would abort the entire collection. (:issue:`2707`)
+
+- Fix ``ReprFuncArgs`` with mixed unicode and UTF-8 args. (:issue:`2731`)
+
+
+Improved Documentation
+----------------------
+
+- In examples on working with custom markers, add examples demonstrating the
+ usage of ``pytest.mark.MARKER_NAME.with_args`` in comparison with
+ ``pytest.mark.MARKER_NAME.__call__`` (:issue:`2604`)
+
+- In one of the simple examples, use ``pytest_collection_modifyitems()`` to skip
+ tests based on a command-line option, allowing its sharing while preventing a
+ user error when accessing ``pytest.config`` before the argument parsing.
+ (:issue:`2653`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Fixed minor error in 'Good Practices/Manual Integration' code snippet.
+ (:issue:`2691`)
+
+- Fixed typo in goodpractices.rst. (:issue:`2721`)
+
+- Improve user guidance regarding ``--resultlog`` deprecation. (:issue:`2739`)
+
+
+pytest 3.2.1 (2017-08-08)
+=========================
+
+Bug Fixes
+---------
+
+- Fixed small terminal glitch when collecting a single test item. (:issue:`2579`)
+
+- Correctly consider ``/`` as the file separator to automatically mark plugin
+ files for rewrite on Windows. (:issue:`2591`)
+
+- Properly escape test names when setting ``PYTEST_CURRENT_TEST`` environment
+ variable. (:issue:`2644`)
+
+- Fix error on Windows and Python 3.6+ when ``sys.stdout`` has been replaced
+ with a stream-like object which does not implement the full ``io`` module
+ buffer protocol. In particular this affects ``pytest-xdist`` users on the
+ aforementioned platform. (:issue:`2666`)
+
+
+Improved Documentation
+----------------------
+
+- Explicitly document which pytest features work with ``unittest``. (:issue:`2626`)
+
+
+pytest 3.2.0 (2017-07-30)
+=========================
+
+Deprecations and Removals
+-------------------------
+
+- ``pytest.approx`` no longer supports ``>``, ``>=``, ``<`` and ``<=``
+ operators to avoid surprising/inconsistent behavior. See the :func:`~pytest.approx` docs for more
+ information. (:issue:`2003`)
+
+- All old-style specific behavior in current classes in the pytest's API is
+ considered deprecated at this point and will be removed in a future release.
+ This affects Python 2 users only and in rare situations. (:issue:`2147`)
+
+- A deprecation warning is now raised when using marks for parameters
+ in ``pytest.mark.parametrize``. Use ``pytest.param`` to apply marks to
+ parameters instead. (:issue:`2427`)
+
+
+Features
+--------
+
+- Add support for numpy arrays (and dicts) to approx. (:issue:`1994`)
+
+- Now test function objects have a ``pytestmark`` attribute containing a list
+ of marks applied directly to the test function, as opposed to marks inherited
+ from parent classes or modules. (:issue:`2516`)
+
+- Collection ignores local virtualenvs by default; ``--collect-in-virtualenv``
+ overrides this behavior. (:issue:`2518`)
+
+- Allow class methods decorated as ``@staticmethod`` to be candidates for
+ collection as a test function. (Only for Python 2.7 and above. Python 2.6
+ will still ignore static methods.) (:issue:`2528`)
+
+- Introduce ``mark.with_args`` in order to allow passing functions/classes as
+ sole argument to marks. (:issue:`2540`)
+
+- New ``cache_dir`` ini option: sets the directory where the contents of the
+ cache plugin are stored. Directory may be relative or absolute path: if relative path, then
+ directory is created relative to ``rootdir``, otherwise it is used as is.
+ Additionally path may contain environment variables which are expanded during
+ runtime. (:issue:`2543`)
+
+- Introduce the ``PYTEST_CURRENT_TEST`` environment variable that is set with
+ the ``nodeid`` and stage (``setup``, ``call`` and ``teardown``) of the test
+ being currently executed. See the :ref:`documentation <pytest current test env>`
+ for more info. (:issue:`2583`)
+
+- Introduced ``@pytest.mark.filterwarnings`` mark which allows overwriting the
+ warnings filter on a per test, class or module level. See the :ref:`docs <filterwarnings>`
+ for more information. (:issue:`2598`)
+
+- ``--last-failed`` now remembers forever when a test has failed and only
+ forgets it if it passes again. This makes it easy to fix a test suite by
+ selectively running files and fixing tests incrementally. (:issue:`2621`)
+
+- New ``pytest_report_collectionfinish`` hook which allows plugins to add
+ messages to the terminal reporting after collection has been finished
+ successfully. (:issue:`2622`)
+
+- Added support for :pep:`415`\'s
+ ``Exception.__suppress_context__``. Now if a ``raise exception from None`` is
+ caught by pytest, pytest will no longer chain the context in the test report.
+ The behavior now matches Python's traceback behavior. (:issue:`2631`)
+
+- Exceptions raised by ``pytest.fail``, ``pytest.skip`` and ``pytest.xfail``
+ now subclass BaseException, making them harder to be caught unintentionally
+ by normal code. (:issue:`580`)
+
+
+Bug Fixes
+---------
+
+- Set ``stdin`` to a closed ``PIPE`` in ``pytester.py.Testdir.popen()`` for
+ avoid unwanted interactive ``pdb`` (:issue:`2023`)
+
+- Add missing ``encoding`` attribute to ``sys.std*`` streams when using
+ ``capsys`` capture mode. (:issue:`2375`)
+
+- Fix terminal color changing to black on Windows if ``colorama`` is imported
+ in a ``conftest.py`` file. (:issue:`2510`)
+
+- Fix line number when reporting summary of skipped tests. (:issue:`2548`)
+
+- capture: ensure that EncodedFile.name is a string. (:issue:`2555`)
+
+- The options ``--fixtures`` and ``--fixtures-per-test`` will now keep
+ indentation within docstrings. (:issue:`2574`)
+
+- doctests line numbers are now reported correctly, fixing `pytest-sugar#122
+ <https://github.com/Frozenball/pytest-sugar/issues/122>`_. (:issue:`2610`)
+
+- Fix non-determinism in order of fixture collection. Adds new dependency
+ (ordereddict) for Python 2.6. (:issue:`920`)
+
+
+Improved Documentation
+----------------------
+
+- Clarify ``pytest_configure`` hook call order. (:issue:`2539`)
+
+- Extend documentation for testing plugin code with the ``pytester`` plugin.
+ (:issue:`971`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Update help message for ``--strict`` to make it clear it only deals with
+ unregistered markers, not warnings. (:issue:`2444`)
+
+- Internal code move: move code for pytest.approx/pytest.raises to own files in
+ order to cut down the size of python.py (:issue:`2489`)
+
+- Renamed the utility function ``_pytest.compat._escape_strings`` to
+ ``_ascii_escaped`` to better communicate the function's purpose. (:issue:`2533`)
+
+- Improve error message for CollectError with skip/skipif. (:issue:`2546`)
+
+- Emit warning about ``yield`` tests being deprecated only once per generator.
+ (:issue:`2562`)
+
+- Ensure final collected line doesn't include artifacts of previous write.
+ (:issue:`2571`)
+
+- Fixed all flake8 errors and warnings. (:issue:`2581`)
+
+- Added ``fix-lint`` tox environment to run automatic pep8 fixes on the code.
+ (:issue:`2582`)
+
+- Turn warnings into errors in pytest's own test suite in order to catch
+ regressions due to deprecations more promptly. (:issue:`2588`)
+
+- Show multiple issue links in CHANGELOG entries. (:issue:`2620`)
+
+
+pytest 3.1.3 (2017-07-03)
+=========================
+
+Bug Fixes
+---------
+
+- Fix decode error in Python 2 for doctests in docstrings. (:issue:`2434`)
+
+- Exceptions raised during teardown by finalizers are now suppressed until all
+ finalizers are called, with the initial exception reraised. (:issue:`2440`)
+
+- Fix incorrect "collected items" report when specifying tests on the command-
+ line. (:issue:`2464`)
+
+- ``deprecated_call`` in context-manager form now captures deprecation warnings
+ even if the same warning has already been raised. Also, ``deprecated_call``
+ will always produce the same error message (previously it would produce
+ different messages in context-manager vs. function-call mode). (:issue:`2469`)
+
+- Fix issue where paths collected by pytest could have triple leading ``/``
+ characters. (:issue:`2475`)
+
+- Fix internal error when trying to detect the start of a recursive traceback.
+ (:issue:`2486`)
+
+
+Improved Documentation
+----------------------
+
+- Explicitly state for which hooks the calls stop after the first non-None
+ result. (:issue:`2493`)
+
+
+Trivial/Internal Changes
+------------------------
+
+- Create invoke tasks for updating the vendored packages. (:issue:`2474`)
+
+- Update copyright dates in LICENSE, README.rst and in the documentation.
+ (:issue:`2499`)
+
+
+pytest 3.1.2 (2017-06-08)
+=========================
+
+Bug Fixes
+---------
+
+- Required options added via ``pytest_addoption`` will no longer prevent using
+ --help without passing them. (#1999)
+
+- Respect ``python_files`` in assertion rewriting. (#2121)
+
+- Fix recursion error detection when frames in the traceback contain objects
+ that can't be compared (like ``numpy`` arrays). (#2459)
+
+- ``UnicodeWarning`` is issued from the internal pytest warnings plugin only
+ when the message contains non-ascii unicode (Python 2 only). (#2463)
+
+- Added a workaround for Python 3.6 ``WindowsConsoleIO`` breaking due to Pytests's
+ ``FDCapture``. Other code using console handles might still be affected by the
+ very same issue and might require further workarounds/fixes, i.e. ``colorama``.
+ (#2467)
+
+
+Improved Documentation
+----------------------
+
+- Fix internal API links to ``pluggy`` objects. (#2331)
+
+- Make it clear that ``pytest.xfail`` stops test execution at the calling point
+ and improve overall flow of the ``skipping`` docs. (#810)
+
+
+pytest 3.1.1 (2017-05-30)
+=========================
+
+Bug Fixes
+---------
+
+- pytest warning capture no longer overrides existing warning filters. The
+ previous behaviour would override all filters and caused regressions in test
+ suites which configure warning filters to match their needs. Note that as a
+ side-effect of this is that ``DeprecationWarning`` and
+ ``PendingDeprecationWarning`` are no longer shown by default. (#2430)
+
+- Fix issue with non-ascii contents in doctest text files. (#2434)
+
+- Fix encoding errors for unicode warnings in Python 2. (#2436)
+
+- ``pytest.deprecated_call`` now captures ``PendingDeprecationWarning`` in
+ context manager form. (#2441)
+
+
+Improved Documentation
+----------------------
+
+- Addition of towncrier for changelog management. (#2390)
+
+
+3.1.0 (2017-05-22)
+==================
+
+
+New Features
+------------
+
+* The ``pytest-warnings`` plugin has been integrated into the core and now ``pytest`` automatically
+ captures and displays warnings at the end of the test session.
+
+ .. warning::
+
+ This feature may disrupt test suites which apply and treat warnings themselves, and can be
+ disabled in your ``pytest.ini``:
+
+ .. code-block:: ini
+
+ [pytest]
+ addopts = -p no:warnings
+
+ See the :doc:`warnings documentation page <how-to/capture-warnings>` for more
+ information.
+
+ Thanks :user:`nicoddemus` for the PR.
+
+* Added ``junit_suite_name`` ini option to specify root ``<testsuite>`` name for JUnit XML reports (:issue:`533`).
+
+* Added an ini option ``doctest_encoding`` to specify which encoding to use for doctest files.
+ Thanks :user:`wheerd` for the PR (:pull:`2101`).
+
+* ``pytest.warns`` now checks for subclass relationship rather than
+ class equality. Thanks :user:`lesteve` for the PR (:pull:`2166`)
+
+* ``pytest.raises`` now asserts that the error message matches a text or regex
+ with the ``match`` keyword argument. Thanks :user:`Kriechi` for the PR.
+
+* ``pytest.param`` can be used to declare test parameter sets with marks and test ids.
+ Thanks :user:`RonnyPfannschmidt` for the PR.
+
+
+Changes
+-------
+
+* remove all internal uses of pytest_namespace hooks,
+ this is to prepare the removal of preloadconfig in pytest 4.0
+ Thanks to :user:`RonnyPfannschmidt` for the PR.
+
+* pytest now warns when a callable ids raises in a parametrized test. Thanks :user:`fogo` for the PR.
+
+* It is now possible to skip test classes from being collected by setting a
+ ``__test__`` attribute to ``False`` in the class body (:issue:`2007`). Thanks
+ to :user:`syre` for the report and :user:`lwm` for the PR.
+
+* Change junitxml.py to produce reports that comply with Junitxml schema.
+ If the same test fails with failure in call and then errors in teardown
+ we split testcase element into two, one containing the error and the other
+ the failure. (:issue:`2228`) Thanks to :user:`kkoukiou` for the PR.
+
+* Testcase reports with a ``url`` attribute will now properly write this to junitxml.
+ Thanks :user:`fushi` for the PR (:pull:`1874`).
+
+* Remove common items from dict comparison output when verbosity=1. Also update
+ the truncation message to make it clearer that pytest truncates all
+ assertion messages if verbosity < 2 (:issue:`1512`).
+ Thanks :user:`mattduck` for the PR
+
+* ``--pdbcls`` no longer implies ``--pdb``. This makes it possible to use
+ ``addopts=--pdbcls=module.SomeClass`` on ``pytest.ini``. Thanks :user:`davidszotten` for
+ the PR (:pull:`1952`).
+
+* fix :issue:`2013`: turn RecordedWarning into ``namedtuple``,
+ to give it a comprehensible repr while preventing unwarranted modification.
+
+* fix :issue:`2208`: ensure an iteration limit for _pytest.compat.get_real_func.
+ Thanks :user:`RonnyPfannschmidt` for the report and PR.
+
+* Hooks are now verified after collection is complete, rather than right after loading installed plugins. This
+ makes it easy to write hooks for plugins which will be loaded during collection, for example using the
+ ``pytest_plugins`` special variable (:issue:`1821`).
+ Thanks :user:`nicoddemus` for the PR.
+
+* Modify ``pytest_make_parametrize_id()`` hook to accept ``argname`` as an
+ additional parameter.
+ Thanks :user:`unsignedint` for the PR.
+
+* Add ``venv`` to the default ``norecursedirs`` setting.
+ Thanks :user:`The-Compiler` for the PR.
+
+* ``PluginManager.import_plugin`` now accepts unicode plugin names in Python 2.
+ Thanks :user:`reutsharabani` for the PR.
+
+* fix :issue:`2308`: When using both ``--lf`` and ``--ff``, only the last failed tests are run.
+ Thanks :user:`ojii` for the PR.
+
+* Replace minor/patch level version numbers in the documentation with placeholders.
+ This significantly reduces change-noise as different contributors regenerate
+ the documentation on different platforms.
+ Thanks :user:`RonnyPfannschmidt` for the PR.
+
+* fix :issue:`2391`: consider pytest_plugins on all plugin modules
+ Thanks :user:`RonnyPfannschmidt` for the PR.
+
+
+Bug Fixes
+---------
+
+* Fix ``AttributeError`` on ``sys.stdout.buffer`` / ``sys.stderr.buffer``
+ while using ``capsys`` fixture in python 3. (:issue:`1407`).
+ Thanks to :user:`asottile`.
+
+* Change capture.py's ``DontReadFromInput`` class to throw ``io.UnsupportedOperation`` errors rather
+ than ValueErrors in the ``fileno`` method (:issue:`2276`).
+ Thanks :user:`metasyn` and :user:`vlad-dragos` for the PR.
+
+* Fix exception formatting while importing modules when the exception message
+ contains non-ascii characters (:issue:`2336`).
+ Thanks :user:`fabioz` for the report and :user:`nicoddemus` for the PR.
+
+* Added documentation related to issue (:issue:`1937`)
+ Thanks :user:`skylarjhdownes` for the PR.
+
+* Allow collecting files with any file extension as Python modules (:issue:`2369`).
+ Thanks :user:`Kodiologist` for the PR.
+
+* Show the correct error message when collect "parametrize" func with wrong args (:issue:`2383`).
+ Thanks :user:`The-Compiler` for the report and :user:`robin0371` for the PR.
+
+
+3.0.7 (2017-03-14)
+==================
+
+
+* Fix issue in assertion rewriting breaking due to modules silently discarding
+ other modules when importing fails
+ Notably, importing the ``anydbm`` module is fixed. (:issue:`2248`).
+ Thanks :user:`pfhayes` for the PR.
+
+* junitxml: Fix problematic case where system-out tag occurred twice per testcase
+ element in the XML report. Thanks :user:`kkoukiou` for the PR.
+
+* Fix regression, pytest now skips unittest correctly if run with ``--pdb``
+ (:issue:`2137`). Thanks to :user:`gst` for the report and :user:`mbyt` for the PR.
+
+* Ignore exceptions raised from descriptors (e.g. properties) during Python test collection (:issue:`2234`).
+ Thanks to :user:`bluetech`.
+
+* ``--override-ini`` now correctly overrides some fundamental options like ``python_files`` (:issue:`2238`).
+ Thanks :user:`sirex` for the report and :user:`nicoddemus` for the PR.
+
+* Replace ``raise StopIteration`` usages in the code by simple ``returns`` to finish generators, in accordance to :pep:`479` (:issue:`2160`).
+ Thanks to :user:`nicoddemus` for the PR.
+
+* Fix internal errors when an unprintable ``AssertionError`` is raised inside a test.
+ Thanks :user:`omerhadari` for the PR.
+
+* Skipping plugin now also works with test items generated by custom collectors (:issue:`2231`).
+ Thanks to :user:`vidartf`.
+
+* Fix trailing whitespace in console output if no .ini file presented (:issue:`2281`). Thanks :user:`fbjorn` for the PR.
+
+* Conditionless ``xfail`` markers no longer rely on the underlying test item
+ being an instance of ``PyobjMixin``, and can therefore apply to tests not
+ collected by the built-in python test collector. Thanks :user:`barneygale` for the
+ PR.
+
+
+3.0.6 (2017-01-22)
+==================
+
+* pytest no longer generates ``PendingDeprecationWarning`` from its own operations, which was introduced by mistake in version ``3.0.5`` (:issue:`2118`).
+ Thanks to :user:`nicoddemus` for the report and :user:`RonnyPfannschmidt` for the PR.
+
+
+* pytest no longer recognizes coroutine functions as yield tests (:issue:`2129`).
+ Thanks to :user:`malinoff` for the PR.
+
+* Plugins loaded by the ``PYTEST_PLUGINS`` environment variable are now automatically
+ considered for assertion rewriting (:issue:`2185`).
+ Thanks :user:`nicoddemus` for the PR.
+
+* Improve error message when pytest.warns fails (:issue:`2150`). The type(s) of the
+ expected warnings and the list of caught warnings is added to the
+ error message. Thanks :user:`lesteve` for the PR.
+
+* Fix ``pytester`` internal plugin to work correctly with latest versions of
+ ``zope.interface`` (:issue:`1989`). Thanks :user:`nicoddemus` for the PR.
+
+* Assert statements of the ``pytester`` plugin again benefit from assertion rewriting (:issue:`1920`).
+ Thanks :user:`RonnyPfannschmidt` for the report and :user:`nicoddemus` for the PR.
+
+* Specifying tests with colons like ``test_foo.py::test_bar`` for tests in
+ subdirectories with ini configuration files now uses the correct ini file
+ (:issue:`2148`). Thanks :user:`pelme`.
+
+* Fail ``testdir.runpytest().assert_outcomes()`` explicitly if the pytest
+ terminal output it relies on is missing. Thanks to :user:`eli-b` for the PR.
+
+
+3.0.5 (2016-12-05)
+==================
+
+* Add warning when not passing ``option=value`` correctly to ``-o/--override-ini`` (:issue:`2105`).
+ Also improved the help documentation. Thanks to :user:`mbukatov` for the report and
+ :user:`lwm` for the PR.
+
+* Now ``--confcutdir`` and ``--junit-xml`` are properly validated if they are directories
+ and filenames, respectively (:issue:`2089` and :issue:`2078`). Thanks to :user:`lwm` for the PR.
+
+* Add hint to error message hinting possible missing ``__init__.py`` (:issue:`478`). Thanks :user:`DuncanBetts`.
+
+* More accurately describe when fixture finalization occurs in documentation (:issue:`687`). Thanks :user:`DuncanBetts`.
+
+* Provide ``:ref:`` targets for ``recwarn.rst`` so we can use intersphinx referencing.
+ Thanks to :user:`dupuy` for the report and :user:`lwm` for the PR.
+
+* In Python 2, use a simple ``+-`` ASCII string in the string representation of ``pytest.approx`` (for example ``"4 +- 4.0e-06"``)
+ because it is brittle to handle that in different contexts and representations internally in pytest
+ which can result in bugs such as :issue:`2111`. In Python 3, the representation still uses ``±`` (for example ``4 ± 4.0e-06``).
+ Thanks :user:`kerrick-lyft` for the report and :user:`nicoddemus` for the PR.
+
+* Using ``item.Function``, ``item.Module``, etc., is now issuing deprecation warnings, prefer
+ ``pytest.Function``, ``pytest.Module``, etc., instead (:issue:`2034`).
+ Thanks :user:`nmundar` for the PR.
+
+* Fix error message using ``approx`` with complex numbers (:issue:`2082`).
+ Thanks :user:`adler-j` for the report and :user:`nicoddemus` for the PR.
+
+* Fixed false-positives warnings from assertion rewrite hook for modules imported more than
+ once by the ``pytest_plugins`` mechanism.
+ Thanks :user:`nicoddemus` for the PR.
+
+* Remove an internal cache which could cause hooks from ``conftest.py`` files in
+ sub-directories to be called in other directories incorrectly (:issue:`2016`).
+ Thanks :user:`d-b-w` for the report and :user:`nicoddemus` for the PR.
+
+* Remove internal code meant to support earlier Python 3 versions that produced the side effect
+ of leaving ``None`` in ``sys.modules`` when expressions were evaluated by pytest (for example passing a condition
+ as a string to ``pytest.mark.skipif``)(:issue:`2103`).
+ Thanks :user:`jaraco` for the report and :user:`nicoddemus` for the PR.
+
+* Cope gracefully with a .pyc file with no matching .py file (:issue:`2038`). Thanks
+ :user:`nedbat`.
+
+
+3.0.4 (2016-11-09)
+==================
+
+* Import errors when collecting test modules now display the full traceback (:issue:`1976`).
+ Thanks :user:`cwitty` for the report and :user:`nicoddemus` for the PR.
+
+* Fix confusing command-line help message for custom options with two or more ``metavar`` properties (:issue:`2004`).
+ Thanks :user:`okulynyak` and :user:`davehunt` for the report and :user:`nicoddemus` for the PR.
+
+* When loading plugins, import errors which contain non-ascii messages are now properly handled in Python 2 (:issue:`1998`).
+ Thanks :user:`nicoddemus` for the PR.
+
+* Fixed cyclic reference when ``pytest.raises`` is used in context-manager form (:issue:`1965`). Also as a
+ result of this fix, ``sys.exc_info()`` is left empty in both context-manager and function call usages.
+ Previously, ``sys.exc_info`` would contain the exception caught by the context manager,
+ even when the expected exception occurred.
+ Thanks :user:`MSeifert04` for the report and the PR.
+
+* Fixed false-positives warnings from assertion rewrite hook for modules that were rewritten but
+ were later marked explicitly by ``pytest.register_assert_rewrite``
+ or implicitly as a plugin (:issue:`2005`).
+ Thanks :user:`RonnyPfannschmidt` for the report and :user:`nicoddemus` for the PR.
+
+* Report teardown output on test failure (:issue:`442`).
+ Thanks :user:`matclab` for the PR.
+
+* Fix teardown error message in generated xUnit XML.
+ Thanks :user:`gdyuldin` for the PR.
+
+* Properly handle exceptions in ``multiprocessing`` tasks (:issue:`1984`).
+ Thanks :user:`adborden` for the report and :user:`nicoddemus` for the PR.
+
+* Clean up unittest TestCase objects after tests are complete (:issue:`1649`).
+ Thanks :user:`d_b_w` for the report and PR.
+
+
+3.0.3 (2016-09-28)
+==================
+
+* The ``ids`` argument to ``parametrize`` again accepts ``unicode`` strings
+ in Python 2 (:issue:`1905`).
+ Thanks :user:`philpep` for the report and :user:`nicoddemus` for the PR.
+
+* Assertions are now being rewritten for plugins in development mode
+ (``pip install -e``) (:issue:`1934`).
+ Thanks :user:`nicoddemus` for the PR.
+
+* Fix pkg_resources import error in Jython projects (:issue:`1853`).
+ Thanks :user:`raquel-ucl` for the PR.
+
+* Got rid of ``AttributeError: 'Module' object has no attribute '_obj'`` exception
+ in Python 3 (:issue:`1944`).
+ Thanks :user:`axil` for the PR.
+
+* Explain a bad scope value passed to ``@fixture`` declarations or
+ a ``MetaFunc.parametrize()`` call.
+
+* This version includes ``pluggy-0.4.0``, which correctly handles
+ ``VersionConflict`` errors in plugins (:issue:`704`).
+ Thanks :user:`nicoddemus` for the PR.
+
+
+3.0.2 (2016-09-01)
+==================
+
+* Improve error message when passing non-string ids to ``pytest.mark.parametrize`` (:issue:`1857`).
+ Thanks :user:`okken` for the report and :user:`nicoddemus` for the PR.
+
+* Add ``buffer`` attribute to stdin stub class ``pytest.capture.DontReadFromInput``
+ Thanks :user:`joguSD` for the PR.
+
+* Fix ``UnicodeEncodeError`` when string comparison with unicode has failed. (:issue:`1864`)
+ Thanks :user:`AiOO` for the PR.
+
+* ``pytest_plugins`` is now handled correctly if defined as a string (as opposed as
+ a sequence of strings) when modules are considered for assertion rewriting.
+ Due to this bug, much more modules were being rewritten than necessary
+ if a test suite uses ``pytest_plugins`` to load internal plugins (:issue:`1888`).
+ Thanks :user:`jaraco` for the report and :user:`nicoddemus` for the PR (:pull:`1891`).
+
+* Do not call tearDown and cleanups when running tests from
+ ``unittest.TestCase`` subclasses with ``--pdb``
+ enabled. This allows proper post mortem debugging for all applications
+ which have significant logic in their tearDown machinery (:issue:`1890`). Thanks
+ :user:`mbyt` for the PR.
+
+* Fix use of deprecated ``getfuncargvalue`` method in the internal doctest plugin.
+ Thanks :user:`ViviCoder` for the report (:issue:`1898`).
+
+
+3.0.1 (2016-08-23)
+==================
+
+* Fix regression when ``importorskip`` is used at module level (:issue:`1822`).
+ Thanks :user:`jaraco` and :user:`The-Compiler` for the report and :user:`nicoddemus` for the PR.
+
+* Fix parametrization scope when session fixtures are used in conjunction
+ with normal parameters in the same call (:issue:`1832`).
+ Thanks :user:`The-Compiler` for the report, :user:`Kingdread` and :user:`nicoddemus` for the PR.
+
+* Fix internal error when parametrizing tests or fixtures using an empty ``ids`` argument (:issue:`1849`).
+ Thanks :user:`OPpuolitaival` for the report and :user:`nicoddemus` for the PR.
+
+* Fix loader error when running ``pytest`` embedded in a zipfile.
+ Thanks :user:`mbachry` for the PR.
+
+
+.. _release-3.0.0:
+
+3.0.0 (2016-08-18)
+==================
+
+**Incompatible changes**
+
+
+A number of incompatible changes were made in this release, with the intent of removing features deprecated for a long
+time or change existing behaviors in order to make them less surprising/more useful.
+
+* Reinterpretation mode has now been removed. Only plain and rewrite
+ mode are available, consequently the ``--assert=reinterp`` option is
+ no longer available. This also means files imported from plugins or
+ ``conftest.py`` will not benefit from improved assertions by
+ default, you should use ``pytest.register_assert_rewrite()`` to
+ explicitly turn on assertion rewriting for those files. Thanks
+ :user:`flub` for the PR.
+
+* The following deprecated commandline options were removed:
+
+ * ``--genscript``: no longer supported;
+ * ``--no-assert``: use ``--assert=plain`` instead;
+ * ``--nomagic``: use ``--assert=plain`` instead;
+ * ``--report``: use ``-r`` instead;
+
+ Thanks to :user:`RedBeardCode` for the PR (:pull:`1664`).
+
+* ImportErrors in plugins now are a fatal error instead of issuing a
+ pytest warning (:issue:`1479`). Thanks to :user:`The-Compiler` for the PR.
+
+* Removed support code for Python 3 versions < 3.3 (:pull:`1627`).
+
+* Removed all ``py.test-X*`` entry points. The versioned, suffixed entry points
+ were never documented and a leftover from a pre-virtualenv era. These entry
+ points also created broken entry points in wheels, so removing them also
+ removes a source of confusion for users (:issue:`1632`).
+ Thanks :user:`obestwalter` for the PR.
+
+* ``pytest.skip()`` now raises an error when used to decorate a test function,
+ as opposed to its original intent (to imperatively skip a test inside a test function). Previously
+ this usage would cause the entire module to be skipped (:issue:`607`).
+ Thanks :user:`omarkohl` for the complete PR (:pull:`1519`).
+
+* Exit tests if a collection error occurs. A poll indicated most users will hit CTRL-C
+ anyway as soon as they see collection errors, so pytest might as well make that the default behavior (:issue:`1421`).
+ A ``--continue-on-collection-errors`` option has been added to restore the previous behaviour.
+ Thanks :user:`olegpidsadnyi` and :user:`omarkohl` for the complete PR (:pull:`1628`).
+
+* Renamed the pytest ``pdb`` module (plugin) into ``debugging`` to avoid clashes with the builtin ``pdb`` module.
+
+* Raise a helpful failure message when requesting a parametrized fixture at runtime,
+ e.g. with ``request.getfixturevalue``. Previously these parameters were simply
+ never defined, so a fixture decorated like ``@pytest.fixture(params=[0, 1, 2])``
+ only ran once (:pull:`460`).
+ Thanks to :user:`nikratio` for the bug report, :user:`RedBeardCode` and :user:`tomviner` for the PR.
+
+* ``_pytest.monkeypatch.monkeypatch`` class has been renamed to ``_pytest.monkeypatch.MonkeyPatch``
+ so it doesn't conflict with the ``monkeypatch`` fixture.
+
+* ``--exitfirst / -x`` can now be overridden by a following ``--maxfail=N``
+ and is just a synonym for ``--maxfail=1``.
+
+
+**New Features**
+
+* Support nose-style ``__test__`` attribute on methods of classes,
+ including unittest-style Classes. If set to ``False``, the test will not be
+ collected.
+
+* New ``doctest_namespace`` fixture for injecting names into the
+ namespace in which doctests run.
+ Thanks :user:`milliams` for the complete PR (:pull:`1428`).
+
+* New ``--doctest-report`` option available to change the output format of diffs
+ when running (failing) doctests (implements :issue:`1749`).
+ Thanks :user:`hartym` for the PR.
+
+* New ``name`` argument to ``pytest.fixture`` decorator which allows a custom name
+ for a fixture (to solve the funcarg-shadowing-fixture problem).
+ Thanks :user:`novas0x2a` for the complete PR (:pull:`1444`).
+
+* New ``approx()`` function for easily comparing floating-point numbers in
+ tests.
+ Thanks :user:`kalekundert` for the complete PR (:pull:`1441`).
+
+* Ability to add global properties in the final xunit output file by accessing
+ the internal ``junitxml`` plugin (experimental).
+ Thanks :user:`tareqalayan` for the complete PR :pull:`1454`).
+
+* New ``ExceptionInfo.match()`` method to match a regular expression on the
+ string representation of an exception (:issue:`372`).
+ Thanks :user:`omarkohl` for the complete PR (:pull:`1502`).
+
+* ``__tracebackhide__`` can now also be set to a callable which then can decide
+ whether to filter the traceback based on the ``ExceptionInfo`` object passed
+ to it. Thanks :user:`The-Compiler` for the complete PR (:pull:`1526`).
+
+* New ``pytest_make_parametrize_id(config, val)`` hook which can be used by plugins to provide
+ friendly strings for custom types.
+ Thanks :user:`palaviv` for the PR.
+
+* ``capsys`` and ``capfd`` now have a ``disabled()`` context-manager method, which
+ can be used to temporarily disable capture within a test.
+ Thanks :user:`nicoddemus` for the PR.
+
+* New cli flag ``--fixtures-per-test``: shows which fixtures are being used
+ for each selected test item. Features doc strings of fixtures by default.
+ Can also show where fixtures are defined if combined with ``-v``.
+ Thanks :user:`hackebrot` for the PR.
+
+* Introduce ``pytest`` command as recommended entry point. Note that ``py.test``
+ still works and is not scheduled for removal. Closes proposal
+ :issue:`1629`. Thanks :user:`obestwalter` and :user:`davehunt` for the complete PR
+ (:pull:`1633`).
+
+* New cli flags:
+
+ + ``--setup-plan``: performs normal collection and reports
+ the potential setup and teardown and does not execute any fixtures and tests;
+ + ``--setup-only``: performs normal collection, executes setup and teardown of
+ fixtures and reports them;
+ + ``--setup-show``: performs normal test execution and additionally shows
+ setup and teardown of fixtures;
+ + ``--keep-duplicates``: py.test now ignores duplicated paths given in the command
+ line. To retain the previous behavior where the same test could be run multiple
+ times by specifying it in the command-line multiple times, pass the ``--keep-duplicates``
+ argument (:issue:`1609`);
+
+ Thanks :user:`d6e`, :user:`kvas-it`, :user:`sallner`, :user:`ioggstream` and :user:`omarkohl` for the PRs.
+
+* New CLI flag ``--override-ini``/``-o``: overrides values from the ini file.
+ For example: ``"-o xfail_strict=True"``'.
+ Thanks :user:`blueyed` and :user:`fengxx` for the PR.
+
+* New hooks:
+
+ + ``pytest_fixture_setup(fixturedef, request)``: executes fixture setup;
+ + ``pytest_fixture_post_finalizer(fixturedef)``: called after the fixture's
+ finalizer and has access to the fixture's result cache.
+
+ Thanks :user:`d6e`, :user:`sallner`.
+
+* Issue warnings for asserts whose test is a tuple literal. Such asserts will
+ never fail because tuples are always truthy and are usually a mistake
+ (see :issue:`1562`). Thanks :user:`kvas-it`, for the PR.
+
+* Allow passing a custom debugger class (e.g. ``--pdbcls=IPython.core.debugger:Pdb``).
+ Thanks to :user:`anntzer` for the PR.
+
+
+**Changes**
+
+* Plugins now benefit from assertion rewriting. Thanks
+ :user:`sober7`, :user:`nicoddemus` and :user:`flub` for the PR.
+
+* Change ``report.outcome`` for ``xpassed`` tests to ``"passed"`` in non-strict
+ mode and ``"failed"`` in strict mode. Thanks to :user:`hackebrot` for the PR
+ (:pull:`1795`) and :user:`gprasad84` for report (:issue:`1546`).
+
+* Tests marked with ``xfail(strict=False)`` (the default) now appear in
+ JUnitXML reports as passing tests instead of skipped.
+ Thanks to :user:`hackebrot` for the PR (:pull:`1795`).
+
+* Highlight path of the file location in the error report to make it easier to copy/paste.
+ Thanks :user:`suzaku` for the PR (:pull:`1778`).
+
+* Fixtures marked with ``@pytest.fixture`` can now use ``yield`` statements exactly like
+ those marked with the ``@pytest.yield_fixture`` decorator. This change renders
+ ``@pytest.yield_fixture`` deprecated and makes ``@pytest.fixture`` with ``yield`` statements
+ the preferred way to write teardown code (:pull:`1461`).
+ Thanks :user:`csaftoiu` for bringing this to attention and :user:`nicoddemus` for the PR.
+
+* Explicitly passed parametrize ids do not get escaped to ascii (:issue:`1351`).
+ Thanks :user:`ceridwen` for the PR.
+
+* Fixtures are now sorted in the error message displayed when an unknown
+ fixture is declared in a test function.
+ Thanks :user:`nicoddemus` for the PR.
+
+* ``pytest_terminal_summary`` hook now receives the ``exitstatus``
+ of the test session as argument. Thanks :user:`blueyed` for the PR (:pull:`1809`).
+
+* Parametrize ids can accept ``None`` as specific test id, in which case the
+ automatically generated id for that argument will be used.
+ Thanks :user:`palaviv` for the complete PR (:pull:`1468`).
+
+* The parameter to xunit-style setup/teardown methods (``setup_method``,
+ ``setup_module``, etc.) is now optional and may be omitted.
+ Thanks :user:`okken` for bringing this to attention and :user:`nicoddemus` for the PR.
+
+* Improved automatic id generation selection in case of duplicate ids in
+ parametrize.
+ Thanks :user:`palaviv` for the complete PR (:pull:`1474`).
+
+* Now pytest warnings summary is shown up by default. Added a new flag
+ ``--disable-pytest-warnings`` to explicitly disable the warnings summary (:issue:`1668`).
+
+* Make ImportError during collection more explicit by reminding
+ the user to check the name of the test module/package(s) (:issue:`1426`).
+ Thanks :user:`omarkohl` for the complete PR (:pull:`1520`).
+
+* Add ``build/`` and ``dist/`` to the default ``--norecursedirs`` list. Thanks
+ :user:`mikofski` for the report and :user:`tomviner` for the PR (:issue:`1544`).
+
+* ``pytest.raises`` in the context manager form accepts a custom
+ ``message`` to raise when no exception occurred.
+ Thanks :user:`palaviv` for the complete PR (:pull:`1616`).
+
+* ``conftest.py`` files now benefit from assertion rewriting; previously it
+ was only available for test modules. Thanks :user:`flub`, :user:`sober7` and
+ :user:`nicoddemus` for the PR (:issue:`1619`).
+
+* Text documents without any doctests no longer appear as "skipped".
+ Thanks :user:`graingert` for reporting and providing a full PR (:pull:`1580`).
+
+* Ensure that a module within a namespace package can be found when it
+ is specified on the command line together with the ``--pyargs``
+ option. Thanks to :user:`taschini` for the PR (:pull:`1597`).
+
+* Always include full assertion explanation during assertion rewriting. The previous behaviour was hiding
+ sub-expressions that happened to be ``False``, assuming this was redundant information.
+ Thanks :user:`bagerard` for reporting (:issue:`1503`). Thanks to :user:`davehunt` and
+ :user:`tomviner` for the PR.
+
+* ``OptionGroup.addoption()`` now checks if option names were already
+ added before, to make it easier to track down issues like :issue:`1618`.
+ Before, you only got exceptions later from ``argparse`` library,
+ giving no clue about the actual reason for double-added options.
+
+* ``yield``-based tests are considered deprecated and will be removed in pytest-4.0.
+ Thanks :user:`nicoddemus` for the PR.
+
+* ``[pytest]`` sections in ``setup.cfg`` files should now be named ``[tool:pytest]``
+ to avoid conflicts with other distutils commands (see :pull:`567`). ``[pytest]`` sections in
+ ``pytest.ini`` or ``tox.ini`` files are supported and unchanged.
+ Thanks :user:`nicoddemus` for the PR.
+
+* Using ``pytest_funcarg__`` prefix to declare fixtures is considered deprecated and will be
+ removed in pytest-4.0 (:pull:`1684`).
+ Thanks :user:`nicoddemus` for the PR.
+
+* Passing a command-line string to ``pytest.main()`` is considered deprecated and scheduled
+ for removal in pytest-4.0. It is recommended to pass a list of arguments instead (:pull:`1723`).
+
+* Rename ``getfuncargvalue`` to ``getfixturevalue``. ``getfuncargvalue`` is
+ still present but is now considered deprecated. Thanks to :user:`RedBeardCode` and :user:`tomviner`
+ for the PR (:pull:`1626`).
+
+* ``optparse`` type usage now triggers DeprecationWarnings (:issue:`1740`).
+
+
+* ``optparse`` backward compatibility supports float/complex types (:issue:`457`).
+
+* Refined logic for determining the ``rootdir``, considering only valid
+ paths which fixes a number of issues: :issue:`1594`, :issue:`1435` and :issue:`1471`.
+ Updated the documentation according to current behavior. Thanks to
+ :user:`blueyed`, :user:`davehunt` and :user:`matthiasha` for the PR.
+
+* Always include full assertion explanation. The previous behaviour was hiding
+ sub-expressions that happened to be False, assuming this was redundant information.
+ Thanks :user:`bagerard` for reporting (:issue:`1503`). Thanks to :user:`davehunt` and
+ :user:`tomviner` for PR.
+
+* Better message in case of not using parametrized variable (see :issue:`1539`).
+ Thanks to :user:`tramwaj29` for the PR.
+
+* Updated docstrings with a more uniform style.
+
+* Add stderr write for ``pytest.exit(msg)`` during startup. Previously the message was never shown.
+ Thanks :user:`BeyondEvil` for reporting :issue:`1210`. Thanks to @jgsonesen and
+ :user:`tomviner` for the PR.
+
+* No longer display the incorrect test deselection reason (:issue:`1372`).
+ Thanks :user:`ronnypfannschmidt` for the PR.
+
+* The ``--resultlog`` command line option has been deprecated: it is little used
+ and there are more modern and better alternatives (see :issue:`830`).
+ Thanks :user:`nicoddemus` for the PR.
+
+* Improve error message with fixture lookup errors: add an 'E' to the first
+ line and '>' to the rest. Fixes :issue:`717`. Thanks :user:`blueyed` for reporting and
+ a PR, :user:`eolo999` for the initial PR and :user:`tomviner` for his guidance during
+ EuroPython2016 sprint.
+
+
+**Bug Fixes**
+
+* Parametrize now correctly handles duplicated test ids.
+
+* Fix internal error issue when the ``method`` argument is missing for
+ ``teardown_method()`` (:issue:`1605`).
+
+* Fix exception visualization in case the current working directory (CWD) gets
+ deleted during testing (:issue:`1235`). Thanks :user:`bukzor` for reporting. PR by
+ :user:`marscher`.
+
+* Improve test output for logical expression with brackets (:issue:`925`).
+ Thanks :user:`DRMacIver` for reporting and :user:`RedBeardCode` for the PR.
+
+* Create correct diff for strings ending with newlines (:issue:`1553`).
+ Thanks :user:`Vogtinator` for reporting and :user:`RedBeardCode` and
+ :user:`tomviner` for the PR.
+
+* ``ConftestImportFailure`` now shows the traceback making it easier to
+ identify bugs in ``conftest.py`` files (:pull:`1516`). Thanks :user:`txomon` for
+ the PR.
+
+* Text documents without any doctests no longer appear as "skipped".
+ Thanks :user:`graingert` for reporting and providing a full PR (:pull:`1580`).
+
+* Fixed collection of classes with custom ``__new__`` method.
+ Fixes :issue:`1579`. Thanks to :user:`Stranger6667` for the PR.
+
+* Fixed scope overriding inside metafunc.parametrize (:issue:`634`).
+ Thanks to :user:`Stranger6667` for the PR.
+
+* Fixed the total tests tally in junit xml output (:pull:`1798`).
+ Thanks to :user:`cboelsen` for the PR.
+
+* Fixed off-by-one error with lines from ``request.node.warn``.
+ Thanks to :user:`blueyed` for the PR.
+
+
+2.9.2 (2016-05-31)
+==================
+
+**Bug Fixes**
+
+* fix :issue:`510`: skip tests where one parameterize dimension was empty
+ thanks Alex Stapleton for the Report and :user:`RonnyPfannschmidt` for the PR
+
+* Fix Xfail does not work with condition keyword argument.
+ Thanks :user:`astraw38` for reporting the issue (:issue:`1496`) and :user:`tomviner`
+ for PR the (:pull:`1524`).
+
+* Fix win32 path issue when putting custom config file with absolute path
+ in ``pytest.main("-c your_absolute_path")``.
+
+* Fix maximum recursion depth detection when raised error class is not aware
+ of unicode/encoded bytes.
+ Thanks :user:`prusse-martin` for the PR (:pull:`1506`).
+
+* Fix ``pytest.mark.skip`` mark when used in strict mode.
+ Thanks :user:`pquentin` for the PR and :user:`RonnyPfannschmidt` for
+ showing how to fix the bug.
+
+* Minor improvements and fixes to the documentation.
+ Thanks :user:`omarkohl` for the PR.
+
+* Fix ``--fixtures`` to show all fixture definitions as opposed to just
+ one per fixture name.
+ Thanks to :user:`hackebrot` for the PR.
+
+
+2.9.1 (2016-03-17)
+==================
+
+**Bug Fixes**
+
+* Improve error message when a plugin fails to load.
+ Thanks :user:`nicoddemus` for the PR.
+
+* Fix (:issue:`1178`):
+ ``pytest.fail`` with non-ascii characters raises an internal pytest error.
+ Thanks :user:`nicoddemus` for the PR.
+
+* Fix (:issue:`469`): junit parses report.nodeid incorrectly, when params IDs
+ contain ``::``. Thanks :user:`tomviner` for the PR (:pull:`1431`).
+
+* Fix (:issue:`578`): SyntaxErrors
+ containing non-ascii lines at the point of failure generated an internal
+ py.test error.
+ Thanks :user:`asottile` for the report and :user:`nicoddemus` for the PR.
+
+* Fix (:issue:`1437`): When passing in a bytestring regex pattern to parameterize
+ attempt to decode it as utf-8 ignoring errors.
+
+* Fix (:issue:`649`): parametrized test nodes cannot be specified to run on the command line.
+
+* Fix (:issue:`138`): better reporting for python 3.3+ chained exceptions
+
+
+2.9.0 (2016-02-29)
+==================
+
+**New Features**
+
+* New ``pytest.mark.skip`` mark, which unconditionally skips marked tests.
+ Thanks :user:`MichaelAquilina` for the complete PR (:pull:`1040`).
+
+* ``--doctest-glob`` may now be passed multiple times in the command-line.
+ Thanks :user:`jab` and :user:`nicoddemus` for the PR.
+
+* New ``-rp`` and ``-rP`` reporting options give the summary and full output
+ of passing tests, respectively. Thanks to :user:`codewarrior0` for the PR.
+
+* ``pytest.mark.xfail`` now has a ``strict`` option, which makes ``XPASS``
+ tests to fail the test suite (defaulting to ``False``). There's also a
+ ``xfail_strict`` ini option that can be used to configure it project-wise.
+ Thanks :user:`rabbbit` for the request and :user:`nicoddemus` for the PR (:pull:`1355`).
+
+* ``Parser.addini`` now supports options of type ``bool``.
+ Thanks :user:`nicoddemus` for the PR.
+
+* New ``ALLOW_BYTES`` doctest option. This strips ``b`` prefixes from byte strings
+ in doctest output (similar to ``ALLOW_UNICODE``).
+ Thanks :user:`jaraco` for the request and :user:`nicoddemus` for the PR (:pull:`1287`).
+
+* Give a hint on ``KeyboardInterrupt`` to use the ``--fulltrace`` option to show the errors.
+ Fixes :issue:`1366`.
+ Thanks to :user:`hpk42` for the report and :user:`RonnyPfannschmidt` for the PR.
+
+* Catch ``IndexError`` exceptions when getting exception source location.
+ Fixes a pytest internal error for dynamically generated code (fixtures and tests)
+ where source lines are fake by intention.
+
+**Changes**
+
+* **Important**: `py.code <https://pylib.readthedocs.io/en/stable/code.html>`_ has been
+ merged into the ``pytest`` repository as ``pytest._code``. This decision
+ was made because ``py.code`` had very few uses outside ``pytest`` and the
+ fact that it was in a different repository made it difficult to fix bugs on
+ its code in a timely manner. The team hopes with this to be able to better
+ refactor out and improve that code.
+ This change shouldn't affect users, but it is useful to let users aware
+ if they encounter any strange behavior.
+
+ Keep in mind that the code for ``pytest._code`` is **private** and
+ **experimental**, so you definitely should not import it explicitly!
+
+ Please note that the original ``py.code`` is still available in
+ `pylib <https://pylib.readthedocs.io>`_.
+
+* ``pytest_enter_pdb`` now optionally receives the pytest config object.
+ Thanks :user:`nicoddemus` for the PR.
+
+* Removed code and documentation for Python 2.5 or lower versions,
+ including removal of the obsolete ``_pytest.assertion.oldinterpret`` module.
+ Thanks :user:`nicoddemus` for the PR (:pull:`1226`).
+
+* Comparisons now always show up in full when ``CI`` or ``BUILD_NUMBER`` is
+ found in the environment, even when ``-vv`` isn't used.
+ Thanks :user:`The-Compiler` for the PR.
+
+* ``--lf`` and ``--ff`` now support long names: ``--last-failed`` and
+ ``--failed-first`` respectively.
+ Thanks :user:`MichaelAquilina` for the PR.
+
+* Added expected exceptions to ``pytest.raises`` fail message.
+
+* Collection only displays progress ("collecting X items") when in a terminal.
+ This avoids cluttering the output when using ``--color=yes`` to obtain
+ colors in CI integrations systems (:issue:`1397`).
+
+**Bug Fixes**
+
+* The ``-s`` and ``-c`` options should now work under ``xdist``;
+ ``Config.fromdictargs`` now represents its input much more faithfully.
+ Thanks to :user:`bukzor` for the complete PR (:issue:`680`).
+
+* Fix (:issue:`1290`): support Python 3.5's ``@`` operator in assertion rewriting.
+ Thanks :user:`Shinkenjoe` for report with test case and :user:`tomviner` for the PR.
+
+* Fix formatting utf-8 explanation messages (:issue:`1379`).
+ Thanks :user:`biern` for the PR.
+
+* Fix :ref:`traceback style docs <how-to-modifying-python-tb-printing>` to describe all of the available options
+ (auto/long/short/line/native/no), with ``auto`` being the default since v2.6.
+ Thanks :user:`hackebrot` for the PR.
+
+* Fix (:issue:`1422`): junit record_xml_property doesn't allow multiple records
+ with same name.
+
+
+2.8.7 (2016-01-24)
+==================
+
+- fix #1338: use predictable object resolution for monkeypatch
+
+2.8.6 (2016-01-21)
+==================
+
+- fix #1259: allow for double nodeids in junitxml,
+ this was a regression failing plugins combinations
+ like pytest-pep8 + pytest-flakes
+
+- Workaround for exception that occurs in pyreadline when using
+ ``--pdb`` with standard I/O capture enabled.
+ Thanks Erik M. Bray for the PR.
+
+- fix #900: Better error message in case the target of a ``monkeypatch`` call
+ raises an ``ImportError``.
+
+- fix #1292: monkeypatch calls (setattr, setenv, etc.) are now O(1).
+ Thanks David R. MacIver for the report and Bruno Oliveira for the PR.
+
+- fix #1223: captured stdout and stderr are now properly displayed before
+ entering pdb when ``--pdb`` is used instead of being thrown away.
+ Thanks Cal Leeming for the PR.
+
+- fix #1305: pytest warnings emitted during ``pytest_terminal_summary`` are now
+ properly displayed.
+ Thanks Ionel Maries Cristian for the report and Bruno Oliveira for the PR.
+
+- fix #628: fixed internal UnicodeDecodeError when doctests contain unicode.
+ Thanks Jason R. Coombs for the report and Bruno Oliveira for the PR.
+
+- fix #1334: Add captured stdout to jUnit XML report on setup error.
+ Thanks Georgy Dyuldin for the PR.
+
+
+2.8.5 (2015-12-11)
+==================
+
+- fix #1243: fixed issue where class attributes injected during collection could break pytest.
+ PR by Alexei Kozlenok, thanks Ronny Pfannschmidt and Bruno Oliveira for the review and help.
+
+- fix #1074: precompute junitxml chunks instead of storing the whole tree in objects
+ Thanks Bruno Oliveira for the report and Ronny Pfannschmidt for the PR
+
+- fix #1238: fix ``pytest.deprecated_call()`` receiving multiple arguments
+ (Regression introduced in 2.8.4). Thanks Alex Gaynor for the report and
+ Bruno Oliveira for the PR.
+
+
+2.8.4 (2015-12-06)
+==================
+
+- fix #1190: ``deprecated_call()`` now works when the deprecated
+ function has been already called by another test in the same
+ module. Thanks Mikhail Chernykh for the report and Bruno Oliveira for the
+ PR.
+
+- fix #1198: ``--pastebin`` option now works on Python 3. Thanks
+ Mehdy Khoshnoody for the PR.
+
+- fix #1219: ``--pastebin`` now works correctly when captured output contains
+ non-ascii characters. Thanks Bruno Oliveira for the PR.
+
+- fix #1204: another error when collecting with a nasty __getattr__().
+ Thanks Florian Bruhin for the PR.
+
+- fix the summary printed when no tests did run.
+ Thanks Florian Bruhin for the PR.
+- fix #1185 - ensure MANIFEST.in exactly matches what should go to a sdist
+
+- a number of documentation modernizations wrt good practices.
+ Thanks Bruno Oliveira for the PR.
+
+2.8.3 (2015-11-18)
+==================
+
+- fix #1169: add __name__ attribute to testcases in TestCaseFunction to
+ support the @unittest.skip decorator on functions and methods.
+ Thanks Lee Kamentsky for the PR.
+
+- fix #1035: collecting tests if test module level obj has __getattr__().
+ Thanks Suor for the report and Bruno Oliveira / Tom Viner for the PR.
+
+- fix #331: don't collect tests if their failure cannot be reported correctly
+ e.g. they are a callable instance of a class.
+
+- fix #1133: fixed internal error when filtering tracebacks where one entry
+ belongs to a file which is no longer available.
+ Thanks Bruno Oliveira for the PR.
+
+- enhancement made to highlight in red the name of the failing tests so
+ they stand out in the output.
+ Thanks Gabriel Reis for the PR.
+
+- add more talks to the documentation
+- extend documentation on the --ignore cli option
+- use pytest-runner for setuptools integration
+- minor fixes for interaction with OS X El Capitan
+ system integrity protection (thanks Florian)
+
+
+2.8.2 (2015-10-07)
+==================
+
+- fix #1085: proper handling of encoding errors when passing encoded byte
+ strings to pytest.parametrize in Python 2.
+ Thanks Themanwithoutaplan for the report and Bruno Oliveira for the PR.
+
+- fix #1087: handling SystemError when passing empty byte strings to
+ pytest.parametrize in Python 3.
+ Thanks Paul Kehrer for the report and Bruno Oliveira for the PR.
+
+- fix #995: fixed internal error when filtering tracebacks where one entry
+ was generated by an exec() statement.
+ Thanks Daniel Hahler, Ashley C Straw, Philippe Gauthier and Pavel Savchenko
+ for contributing and Bruno Oliveira for the PR.
+
+- fix #1100 and #1057: errors when using autouse fixtures and doctest modules.
+ Thanks Sergey B Kirpichev and Vital Kudzelka for contributing and Bruno
+ Oliveira for the PR.
+
+2.8.1 (2015-09-29)
+==================
+
+- fix #1034: Add missing nodeid on pytest_logwarning call in
+ addhook. Thanks Simon Gomizelj for the PR.
+
+- 'deprecated_call' is now only satisfied with a DeprecationWarning or
+ PendingDeprecationWarning. Before 2.8.0, it accepted any warning, and 2.8.0
+ made it accept only DeprecationWarning (but not PendingDeprecationWarning).
+ Thanks Alex Gaynor for the issue and Eric Hunsberger for the PR.
+
+- fix issue #1073: avoid calling __getattr__ on potential plugin objects.
+ This fixes an incompatibility with pytest-django. Thanks Andreas Pelme,
+ Bruno Oliveira and Ronny Pfannschmidt for contributing and Holger Krekel
+ for the fix.
+
+- Fix issue #704: handle versionconflict during plugin loading more
+ gracefully. Thanks Bruno Oliveira for the PR.
+
+- Fix issue #1064: ""--junitxml" regression when used with the
+ "pytest-xdist" plugin, with test reports being assigned to the wrong tests.
+ Thanks Daniel Grunwald for the report and Bruno Oliveira for the PR.
+
+- (experimental) adapt more SEMVER style versioning and change meaning of
+ master branch in git repo: "master" branch now keeps the bug fixes, changes
+ aimed for micro releases. "features" branch will only be released
+ with minor or major pytest releases.
+
+- Fix issue #766 by removing documentation references to distutils.
+ Thanks Russel Winder.
+
+- Fix issue #1030: now byte-strings are escaped to produce item node ids
+ to make them always serializable.
+ Thanks Andy Freeland for the report and Bruno Oliveira for the PR.
+
+- Python 2: if unicode parametrized values are convertible to ascii, their
+ ascii representation is used for the node id.
+
+- Fix issue #411: Add __eq__ method to assertion comparison example.
+ Thanks Ben Webb.
+- Fix issue #653: deprecated_call can be used as context manager.
+
+- fix issue 877: properly handle assertion explanations with non-ascii repr
+ Thanks Mathieu Agopian for the report and Ronny Pfannschmidt for the PR.
+
+- fix issue 1029: transform errors when writing cache values into pytest-warnings
+
+2.8.0 (2015-09-18)
+==================
+
+- new ``--lf`` and ``-ff`` options to run only the last failing tests or
+ "failing tests first" from the last run. This functionality is provided
+ through porting the formerly external pytest-cache plugin into pytest core.
+ BACKWARD INCOMPAT: if you used pytest-cache's functionality to persist
+ data between test runs be aware that we don't serialize sets anymore.
+ Thanks Ronny Pfannschmidt for most of the merging work.
+
+- "-r" option now accepts "a" to include all possible reports, similar
+ to passing "fEsxXw" explicitly (issue960).
+ Thanks Abhijeet Kasurde for the PR.
+
+- avoid python3.5 deprecation warnings by introducing version
+ specific inspection helpers, thanks Michael Droettboom.
+
+- fix issue562: @nose.tools.istest now fully respected.
+
+- fix issue934: when string comparison fails and a diff is too large to display
+ without passing -vv, still show a few lines of the diff.
+ Thanks Florian Bruhin for the report and Bruno Oliveira for the PR.
+
+- fix issue736: Fix a bug where fixture params would be discarded when combined
+ with parametrization markers.
+ Thanks to Markus Unterwaditzer for the PR.
+
+- fix issue710: introduce ALLOW_UNICODE doctest option: when enabled, the
+ ``u`` prefix is stripped from unicode strings in expected doctest output. This
+ allows doctests which use unicode to run in Python 2 and 3 unchanged.
+ Thanks Jason R. Coombs for the report and Bruno Oliveira for the PR.
+
+- parametrize now also generates meaningful test IDs for enum, regex and class
+ objects (as opposed to class instances).
+ Thanks to Florian Bruhin for the PR.
+
+- Add 'warns' to assert that warnings are thrown (like 'raises').
+ Thanks to Eric Hunsberger for the PR.
+
+- Fix issue683: Do not apply an already applied mark. Thanks ojake for the PR.
+
+- Deal with capturing failures better so fewer exceptions get lost to
+ /dev/null. Thanks David Szotten for the PR.
+
+- fix issue730: deprecate and warn about the --genscript option.
+ Thanks Ronny Pfannschmidt for the report and Christian Pommranz for the PR.
+
+- fix issue751: multiple parametrize with ids bug if it parametrizes class with
+ two or more test methods. Thanks Sergey Chipiga for reporting and Jan
+ Bednarik for PR.
+
+- fix issue82: avoid loading conftest files from setup.cfg/pytest.ini/tox.ini
+ files and upwards by default (--confcutdir can still be set to override this).
+ Thanks Bruno Oliveira for the PR.
+
+- fix issue768: docstrings found in python modules were not setting up session
+ fixtures. Thanks Jason R. Coombs for reporting and Bruno Oliveira for the PR.
+
+- added ``tmpdir_factory``, a session-scoped fixture that can be used to create
+ directories under the base temporary directory. Previously this object was
+ installed as a ``_tmpdirhandler`` attribute of the ``config`` object, but now it
+ is part of the official API and using ``config._tmpdirhandler`` is
+ deprecated.
+ Thanks Bruno Oliveira for the PR.
+
+- fix issue808: pytest's internal assertion rewrite hook now implements the
+ optional :pep:`302` get_data API so tests can access data files next to them.
+ Thanks xmo-odoo for request and example and Bruno Oliveira for
+ the PR.
+
+- rootdir and inifile are now displayed during usage errors to help
+ users diagnose problems such as unexpected ini files which add
+ unknown options being picked up by pytest. Thanks to Pavel Savchenko for
+ bringing the problem to attention in #821 and Bruno Oliveira for the PR.
+
+- Summary bar now is colored yellow for warning
+ situations such as: all tests either were skipped or xpass/xfailed,
+ or no tests were run at all (this is a partial fix for issue500).
+
+- fix issue812: pytest now exits with status code 5 in situations where no
+ tests were run at all, such as the directory given in the command line does
+ not contain any tests or as result of a command line option filters
+ all out all tests (-k for example).
+ Thanks Eric Siegerman (issue812) and Bruno Oliveira for the PR.
+
+- Summary bar now is colored yellow for warning
+ situations such as: all tests either were skipped or xpass/xfailed,
+ or no tests were run at all (related to issue500).
+ Thanks Eric Siegerman.
+
+- New ``testpaths`` ini option: list of directories to search for tests
+ when executing pytest from the root directory. This can be used
+ to speed up test collection when a project has well specified directories
+ for tests, being usually more practical than configuring norecursedirs for
+ all directories that do not contain tests.
+ Thanks to Adrian for idea (#694) and Bruno Oliveira for the PR.
+
+- fix issue713: JUnit XML reports for doctest failures.
+ Thanks Punyashloka Biswal.
+
+- fix issue970: internal pytest warnings now appear as "pytest-warnings" in
+ the terminal instead of "warnings", so it is clear for users that those
+ warnings are from pytest and not from the builtin "warnings" module.
+ Thanks Bruno Oliveira.
+
+- Include setup and teardown in junitxml test durations.
+ Thanks Janne Vanhala.
+
+- fix issue735: assertion failures on debug versions of Python 3.4+
+
+- new option ``--import-mode`` to allow to change test module importing
+ behaviour to append to sys.path instead of prepending. This better allows
+ to run test modules against installed versions of a package even if the
+ package under test has the same import root. In this example::
+
+ testing/__init__.py
+ testing/test_pkg_under_test.py
+ pkg_under_test/
+
+ the tests will run against the installed version
+ of pkg_under_test when ``--import-mode=append`` is used whereas
+ by default they would always pick up the local version. Thanks Holger Krekel.
+
+- pytester: add method ``TmpTestdir.delete_loaded_modules()``, and call it
+ from ``inline_run()`` to allow temporary modules to be reloaded.
+ Thanks Eduardo Schettino.
+
+- internally refactor pluginmanager API and code so that there
+ is a clear distinction between a pytest-agnostic rather simple
+ pluginmanager and the PytestPluginManager which adds a lot of
+ behaviour, among it handling of the local conftest files.
+ In terms of documented methods this is a backward compatible
+ change but it might still break 3rd party plugins which relied on
+ details like especially the pluginmanager.add_shutdown() API.
+ Thanks Holger Krekel.
+
+- pluginmanagement: introduce ``pytest.hookimpl`` and
+ ``pytest.hookspec`` decorators for setting impl/spec
+ specific parameters. This substitutes the previous
+ now deprecated use of ``pytest.mark`` which is meant to
+ contain markers for test functions only.
+
+- write/refine docs for "writing plugins" which now have their
+ own page and are separate from the "using/installing plugins`` page.
+
+- fix issue732: properly unregister plugins from any hook calling
+ sites allowing to have temporary plugins during test execution.
+
+- deprecate and warn about ``__multicall__`` argument in hook
+ implementations. Use the ``hookwrapper`` mechanism instead already
+ introduced with pytest-2.7.
+
+- speed up pytest's own test suite considerably by using inprocess
+ tests by default (testrun can be modified with --runpytest=subprocess
+ to create subprocesses in many places instead). The main
+ APIs to run pytest in a test is "runpytest()" or "runpytest_subprocess"
+ and "runpytest_inprocess" if you need a particular way of running
+ the test. In all cases you get back a RunResult but the inprocess
+ one will also have a "reprec" attribute with the recorded events/reports.
+
+- fix monkeypatch.setattr("x.y", raising=False) to actually not raise
+ if "y" is not a pre-existing attribute. Thanks Florian Bruhin.
+
+- fix issue741: make running output from testdir.run copy/pasteable
+ Thanks Bruno Oliveira.
+
+- add a new ``--noconftest`` argument which ignores all ``conftest.py`` files.
+
+- add ``file`` and ``line`` attributes to JUnit-XML output.
+
+- fix issue890: changed extension of all documentation files from ``txt`` to
+ ``rst``. Thanks to Abhijeet for the PR.
+
+- fix issue714: add ability to apply indirect=True parameter on particular argnames.
+ Thanks Elizaveta239.
+
+- fix issue890: changed extension of all documentation files from ``txt`` to
+ ``rst``. Thanks to Abhijeet for the PR.
+
+- fix issue957: "# doctest: SKIP" option will now register doctests as SKIPPED
+ rather than PASSED.
+ Thanks Thomas Grainger for the report and Bruno Oliveira for the PR.
+
+- issue951: add new record_xml_property fixture, that supports logging
+ additional information on xml output. Thanks David Diaz for the PR.
+
+- issue949: paths after normal options (for example ``-s``, ``-v``, etc) are now
+ properly used to discover ``rootdir`` and ``ini`` files.
+ Thanks Peter Lauri for the report and Bruno Oliveira for the PR.
+
+2.7.3 (2015-09-15)
+==================
+
+- Allow 'dev', 'rc', or other non-integer version strings in ``importorskip``.
+ Thanks to Eric Hunsberger for the PR.
+
+- fix issue856: consider --color parameter in all outputs (for example
+ --fixtures). Thanks Barney Gale for the report and Bruno Oliveira for the PR.
+
+- fix issue855: passing str objects as ``plugins`` argument to pytest.main
+ is now interpreted as a module name to be imported and registered as a
+ plugin, instead of silently having no effect.
+ Thanks xmo-odoo for the report and Bruno Oliveira for the PR.
+
+- fix issue744: fix for ast.Call changes in Python 3.5+. Thanks
+ Guido van Rossum, Matthias Bussonnier, Stefan Zimmermann and
+ Thomas Kluyver.
+
+- fix issue842: applying markers in classes no longer propagate this markers
+ to superclasses which also have markers.
+ Thanks xmo-odoo for the report and Bruno Oliveira for the PR.
+
+- preserve warning functions after call to pytest.deprecated_call. Thanks
+ Pieter Mulder for PR.
+
+- fix issue854: autouse yield_fixtures defined as class members of
+ unittest.TestCase subclasses now work as expected.
+ Thannks xmo-odoo for the report and Bruno Oliveira for the PR.
+
+- fix issue833: --fixtures now shows all fixtures of collected test files, instead of just the
+ fixtures declared on the first one.
+ Thanks Florian Bruhin for reporting and Bruno Oliveira for the PR.
+
+- fix issue863: skipped tests now report the correct reason when a skip/xfail
+ condition is met when using multiple markers.
+ Thanks Raphael Pierzina for reporting and Bruno Oliveira for the PR.
+
+- optimized tmpdir fixture initialization, which should make test sessions
+ faster (specially when using pytest-xdist). The only visible effect
+ is that now pytest uses a subdirectory in the $TEMP directory for all
+ directories created by this fixture (defaults to $TEMP/pytest-$USER).
+ Thanks Bruno Oliveira for the PR.
+
+2.7.2 (2015-06-23)
+==================
+
+- fix issue767: pytest.raises value attribute does not contain the exception
+ instance on Python 2.6. Thanks Eric Siegerman for providing the test
+ case and Bruno Oliveira for PR.
+
+- Automatically create directory for junitxml and results log.
+ Thanks Aron Curzon.
+
+- fix issue713: JUnit XML reports for doctest failures.
+ Thanks Punyashloka Biswal.
+
+- fix issue735: assertion failures on debug versions of Python 3.4+
+ Thanks Benjamin Peterson.
+
+- fix issue114: skipif marker reports to internal skipping plugin;
+ Thanks Floris Bruynooghe for reporting and Bruno Oliveira for the PR.
+
+- fix issue748: unittest.SkipTest reports to internal pytest unittest plugin.
+ Thanks Thomas De Schampheleire for reporting and Bruno Oliveira for the PR.
+
+- fix issue718: failed to create representation of sets containing unsortable
+ elements in python 2. Thanks Edison Gustavo Muenz.
+
+- fix issue756, fix issue752 (and similar issues): depend on py-1.4.29
+ which has a refined algorithm for traceback generation.
+
+
+2.7.1 (2015-05-19)
+==================
+
+- fix issue731: do not get confused by the braces which may be present
+ and unbalanced in an object's repr while collapsing False
+ explanations. Thanks Carl Meyer for the report and test case.
+
+- fix issue553: properly handling inspect.getsourcelines failures in
+ FixtureLookupError which would lead to an internal error,
+ obfuscating the original problem. Thanks talljosh for initial
+ diagnose/patch and Bruno Oliveira for final patch.
+
+- fix issue660: properly report scope-mismatch-access errors
+ independently from ordering of fixture arguments. Also
+ avoid the pytest internal traceback which does not provide
+ information to the user. Thanks Holger Krekel.
+
+- streamlined and documented release process. Also all versions
+ (in setup.py and documentation generation) are now read
+ from _pytest/__init__.py. Thanks Holger Krekel.
+
+- fixed docs to remove the notion that yield-fixtures are experimental.
+ They are here to stay :) Thanks Bruno Oliveira.
+
+- Support building wheels by using environment markers for the
+ requirements. Thanks Ionel Maries Cristian.
+
+- fixed regression to 2.6.4 which surfaced e.g. in lost stdout capture printing
+ when tests raised SystemExit. Thanks Holger Krekel.
+
+- reintroduced _pytest fixture of the pytester plugin which is used
+ at least by pytest-xdist.
+
+2.7.0 (2015-03-26)
+==================
+
+- fix issue435: make reload() work when assert rewriting is active.
+ Thanks Daniel Hahler.
+
+- fix issue616: conftest.py files and their contained fixtures are now
+ properly considered for visibility, independently from the exact
+ current working directory and test arguments that are used.
+ Many thanks to Eric Siegerman and his PR235 which contains
+ systematic tests for conftest visibility and now passes.
+ This change also introduces the concept of a ``rootdir`` which
+ is printed as a new pytest header and documented in the pytest
+ customize web page.
+
+- change reporting of "diverted" tests, i.e. tests that are collected
+ in one file but actually come from another (e.g. when tests in a test class
+ come from a base class in a different file). We now show the nodeid
+ and indicate via a postfix the other file.
+
+- add ability to set command line options by environment variable PYTEST_ADDOPTS.
+
+- added documentation on the new pytest-dev teams on bitbucket and
+ github. See https://pytest.org/en/stable/contributing.html .
+ Thanks to Anatoly for pushing and initial work on this.
+
+- fix issue650: new option ``--docttest-ignore-import-errors`` which
+ will turn import errors in doctests into skips. Thanks Charles Cloud
+ for the complete PR.
+
+- fix issue655: work around different ways that cause python2/3
+ to leak sys.exc_info into fixtures/tests causing failures in 3rd party code
+
+- fix issue615: assertion rewriting did not correctly escape % signs
+ when formatting boolean operations, which tripped over mixing
+ booleans with modulo operators. Thanks to Tom Viner for the report,
+ triaging and fix.
+
+- implement issue351: add ability to specify parametrize ids as a callable
+ to generate custom test ids. Thanks Brianna Laugher for the idea and
+ implementation.
+
+- introduce and document new hookwrapper mechanism useful for plugins
+ which want to wrap the execution of certain hooks for their purposes.
+ This supersedes the undocumented ``__multicall__`` protocol which
+ pytest itself and some external plugins use. Note that pytest-2.8
+ is scheduled to drop supporting the old ``__multicall__``
+ and only support the hookwrapper protocol.
+
+- majorly speed up invocation of plugin hooks
+
+- use hookwrapper mechanism in builtin pytest plugins.
+
+- add a doctest ini option for doctest flags, thanks Holger Peters.
+
+- add note to docs that if you want to mark a parameter and the
+ parameter is a callable, you also need to pass in a reason to disambiguate
+ it from the "decorator" case. Thanks Tom Viner.
+
+- "python_classes" and "python_functions" options now support glob-patterns
+ for test discovery, as discussed in issue600. Thanks Ldiary Translations.
+
+- allow to override parametrized fixtures with non-parametrized ones and vice versa (bubenkoff).
+
+- fix issue463: raise specific error for 'parameterize' misspelling (pfctdayelise).
+
+- On failure, the ``sys.last_value``, ``sys.last_type`` and
+ ``sys.last_traceback`` are set, so that a user can inspect the error
+ via postmortem debugging (almarklein).
+
+2.6.4 (2014-10-24)
+==================
+
+- Improve assertion failure reporting on iterables, by using ndiff and
+ pprint.
+
+- removed outdated japanese docs from source tree.
+
+- docs for "pytest_addhooks" hook. Thanks Bruno Oliveira.
+
+- updated plugin index docs. Thanks Bruno Oliveira.
+
+- fix issue557: with "-k" we only allow the old style "-" for negation
+ at the beginning of strings and even that is deprecated. Use "not" instead.
+ This should allow to pick parametrized tests where "-" appeared in the parameter.
+
+- fix issue604: Escape % character in the assertion message.
+
+- fix issue620: add explanation in the --genscript target about what
+ the binary blob means. Thanks Dinu Gherman.
+
+- fix issue614: fixed pastebin support.
+
+
+- fix issue620: add explanation in the --genscript target about what
+ the binary blob means. Thanks Dinu Gherman.
+
+- fix issue614: fixed pastebin support.
+
+2.6.3 (2014-09-24)
+==================
+
+- fix issue575: xunit-xml was reporting collection errors as failures
+ instead of errors, thanks Oleg Sinyavskiy.
+
+- fix issue582: fix setuptools example, thanks Laszlo Papp and Ronny
+ Pfannschmidt.
+
+- Fix infinite recursion bug when pickling capture.EncodedFile, thanks
+ Uwe Schmitt.
+
+- fix issue589: fix bad interaction with numpy and others when showing
+ exceptions. Check for precise "maximum recursion depth exceed" exception
+ instead of presuming any RuntimeError is that one (implemented in py
+ dep). Thanks Charles Cloud for analysing the issue.
+
+- fix conftest related fixture visibility issue: when running with a
+ CWD outside of a test package pytest would get fixture discovery wrong.
+ Thanks to Wolfgang Schnerring for figuring out a reproducible example.
+
+- Introduce pytest_enter_pdb hook (needed e.g. by pytest_timeout to cancel the
+ timeout when interactively entering pdb). Thanks Wolfgang Schnerring.
+
+- check xfail/skip also with non-python function test items. Thanks
+ Floris Bruynooghe.
+
+2.6.2 (2014-09-05)
+==================
+
+- Added function pytest.freeze_includes(), which makes it easy to embed
+ pytest into executables using tools like cx_freeze.
+ See docs for examples and rationale. Thanks Bruno Oliveira.
+
+- Improve assertion rewriting cache invalidation precision.
+
+- fixed issue561: adapt autouse fixture example for python3.
+
+- fixed issue453: assertion rewriting issue with __repr__ containing
+ "\n{", "\n}" and "\n~".
+
+- fix issue560: correctly display code if an "else:" or "finally:" is
+ followed by statements on the same line.
+
+- Fix example in monkeypatch documentation, thanks t-8ch.
+
+- fix issue572: correct tmpdir doc example for python3.
+
+- Do not mark as universal wheel because Python 2.6 is different from
+ other builds due to the extra argparse dependency. Fixes issue566.
+ Thanks sontek.
+
+- Implement issue549: user-provided assertion messages now no longer
+ replace the py.test introspection message but are shown in addition
+ to them.
+
+2.6.1 (2014-08-07)
+==================
+
+- No longer show line numbers in the --verbose output, the output is now
+ purely the nodeid. The line number is still shown in failure reports.
+ Thanks Floris Bruynooghe.
+
+- fix issue437 where assertion rewriting could cause pytest-xdist worker nodes
+ to collect different tests. Thanks Bruno Oliveira.
+
+- fix issue555: add "errors" attribute to capture-streams to satisfy
+ some distutils and possibly other code accessing sys.stdout.errors.
+
+- fix issue547 capsys/capfd also work when output capturing ("-s") is disabled.
+
+- address issue170: allow pytest.mark.xfail(...) to specify expected exceptions via
+ an optional "raises=EXC" argument where EXC can be a single exception
+ or a tuple of exception classes. Thanks David Mohr for the complete
+ PR.
+
+- fix integration of pytest with unittest.mock.patch decorator when
+ it uses the "new" argument. Thanks Nicolas Delaby for test and PR.
+
+- fix issue with detecting conftest files if the arguments contain
+ "::" node id specifications (copy pasted from "-v" output)
+
+- fix issue544 by only removing "@NUM" at the end of "::" separated parts
+ and if the part has a ".py" extension
+
+- don't use py.std import helper, rather import things directly.
+ Thanks Bruno Oliveira.
+
+2.6
+===
+
+- Cache exceptions from fixtures according to their scope (issue 467).
+
+- fix issue537: Avoid importing old assertion reinterpretation code by default.
+
+- fix issue364: shorten and enhance tracebacks representation by default.
+ The new "--tb=auto" option (default) will only display long tracebacks
+ for the first and last entry. You can get the old behaviour of printing
+ all entries as long entries with "--tb=long". Also short entries by
+ default are now printed very similarly to "--tb=native" ones.
+
+- fix issue514: teach assertion reinterpretation about private class attributes
+
+- change -v output to include full node IDs of tests. Users can copy
+ a node ID from a test run, including line number, and use it as a
+ positional argument in order to run only a single test.
+
+- fix issue 475: fail early and comprehensible if calling
+ pytest.raises with wrong exception type.
+
+- fix issue516: tell in getting-started about current dependencies.
+
+- cleanup setup.py a bit and specify supported versions. Thanks Jurko
+ Gospodnetic for the PR.
+
+- change XPASS colour to yellow rather then red when tests are run
+ with -v.
+
+- fix issue473: work around mock putting an unbound method into a class
+ dict when double-patching.
+
+- fix issue498: if a fixture finalizer fails, make sure that
+ the fixture is still invalidated.
+
+- fix issue453: the result of the pytest_assertrepr_compare hook now gets
+ it's newlines escaped so that format_exception does not blow up.
+
+- internal new warning system: pytest will now produce warnings when
+ it detects oddities in your test collection or execution.
+ Warnings are ultimately sent to a new pytest_logwarning hook which is
+ currently only implemented by the terminal plugin which displays
+ warnings in the summary line and shows more details when -rw (report on
+ warnings) is specified.
+
+- change skips into warnings for test classes with an __init__ and
+ callables in test modules which look like a test but are not functions.
+
+- fix issue436: improved finding of initial conftest files from command
+ line arguments by using the result of parse_known_args rather than
+ the previous flaky heuristics. Thanks Marc Abramowitz for tests
+ and initial fixing approaches in this area.
+
+- fix issue #479: properly handle nose/unittest(2) SkipTest exceptions
+ during collection/loading of test modules. Thanks to Marc Schlaich
+ for the complete PR.
+
+- fix issue490: include pytest_load_initial_conftests in documentation
+ and improve docstring.
+
+- fix issue472: clarify that ``pytest.config.getvalue()`` cannot work
+ if it's triggered ahead of command line parsing.
+
+- merge PR123: improved integration with mock.patch decorator on tests.
+
+- fix issue412: messing with stdout/stderr FD-level streams is now
+ captured without crashes.
+
+- fix issue483: trial/py33 works now properly. Thanks Daniel Grana for PR.
+
+- improve example for pytest integration with "python setup.py test"
+ which now has a generic "-a" or "--pytest-args" option where you
+ can pass additional options as a quoted string. Thanks Trevor Bekolay.
+
+- simplified internal capturing mechanism and made it more robust
+ against tests or setups changing FD1/FD2, also better integrated
+ now with pytest.pdb() in single tests.
+
+- improvements to pytest's own test-suite leakage detection, courtesy of PRs
+ from Marc Abramowitz
+
+- fix issue492: avoid leak in test_writeorg. Thanks Marc Abramowitz.
+
+- fix issue493: don't run tests in doc directory with ``python setup.py test``
+ (use tox -e doctesting for that)
+
+- fix issue486: better reporting and handling of early conftest loading failures
+
+- some cleanup and simplification of internal conftest handling.
+
+- work a bit harder to break reference cycles when catching exceptions.
+ Thanks Jurko Gospodnetic.
+
+- fix issue443: fix skip examples to use proper comparison. Thanks Alex
+ Groenholm.
+
+- support nose-style ``__test__`` attribute on modules, classes and
+ functions, including unittest-style Classes. If set to False, the
+ test will not be collected.
+
+- fix issue512: show "<notset>" for arguments which might not be set
+ in monkeypatch plugin. Improves output in documentation.
+
+
+2.5.2 (2014-01-29)
+==================
+
+- fix issue409 -- better interoperate with cx_freeze by not
+ trying to import from collections.abc which causes problems
+ for py27/cx_freeze. Thanks Wolfgang L. for reporting and tracking it down.
+
+- fixed docs and code to use "pytest" instead of "py.test" almost everywhere.
+ Thanks Jurko Gospodnetic for the complete PR.
+
+- fix issue425: mention at end of "py.test -h" that --markers
+ and --fixtures work according to specified test path (or current dir)
+
+- fix issue413: exceptions with unicode attributes are now printed
+ correctly also on python2 and with pytest-xdist runs. (the fix
+ requires py-1.4.20)
+
+- copy, cleanup and integrate py.io capture
+ from pylib 1.4.20.dev2 (rev 13d9af95547e)
+
+- address issue416: clarify docs as to conftest.py loading semantics
+
+- fix issue429: comparing byte strings with non-ascii chars in assert
+ expressions now work better. Thanks Floris Bruynooghe.
+
+- make capfd/capsys.capture private, its unused and shouldn't be exposed
+
+
+2.5.1 (2013-12-17)
+==================
+
+- merge new documentation styling PR from Tobias Bieniek.
+
+- fix issue403: allow parametrize of multiple same-name functions within
+ a collection node. Thanks Andreas Kloeckner and Alex Gaynor for reporting
+ and analysis.
+
+- Allow parameterized fixtures to specify the ID of the parameters by
+ adding an ids argument to pytest.fixture() and pytest.yield_fixture().
+ Thanks Floris Bruynooghe.
+
+- fix issue404 by always using the binary xml escape in the junitxml
+ plugin. Thanks Ronny Pfannschmidt.
+
+- fix issue407: fix addoption docstring to point to argparse instead of
+ optparse. Thanks Daniel D. Wright.
+
+
+
+2.5.0 (2013-12-12)
+==================
+
+- dropped python2.5 from automated release testing of pytest itself
+ which means it's probably going to break soon (but still works
+ with this release we believe).
+
+- simplified and fixed implementation for calling finalizers when
+ parametrized fixtures or function arguments are involved. finalization
+ is now performed lazily at setup time instead of in the "teardown phase".
+ While this might sound odd at first, it helps to ensure that we are
+ correctly handling setup/teardown even in complex code. User-level code
+ should not be affected unless it's implementing the pytest_runtest_teardown
+ hook and expecting certain fixture instances are torn down within (very
+ unlikely and would have been unreliable anyway).
+
+- PR90: add --color=yes|no|auto option to force terminal coloring
+ mode ("auto" is default). Thanks Marc Abramowitz.
+
+- fix issue319 - correctly show unicode in assertion errors. Many
+ thanks to Floris Bruynooghe for the complete PR. Also means
+ we depend on py>=1.4.19 now.
+
+- fix issue396 - correctly sort and finalize class-scoped parametrized
+ tests independently from number of methods on the class.
+
+- refix issue323 in a better way -- parametrization should now never
+ cause Runtime Recursion errors because the underlying algorithm
+ for re-ordering tests per-scope/per-fixture is not recursive
+ anymore (it was tail-call recursive before which could lead
+ to problems for more than >966 non-function scoped parameters).
+
+- fix issue290 - there is preliminary support now for parametrizing
+ with repeated same values (sometimes useful to test if calling
+ a second time works as with the first time).
+
+- close issue240 - document precisely how pytest module importing
+ works, discuss the two common test directory layouts, and how it
+ interacts with :pep:`420`\-namespace packages.
+
+- fix issue246 fix finalizer order to be LIFO on independent fixtures
+ depending on a parametrized higher-than-function scoped fixture.
+ (was quite some effort so please bear with the complexity of this sentence :)
+ Thanks Ralph Schmitt for the precise failure example.
+
+- fix issue244 by implementing special index for parameters to only use
+ indices for paramentrized test ids
+
+- fix issue287 by running all finalizers but saving the exception
+ from the first failing finalizer and re-raising it so teardown will
+ still have failed. We reraise the first failing exception because
+ it might be the cause for other finalizers to fail.
+
+- fix ordering when mock.patch or other standard decorator-wrappings
+ are used with test methods. This fixues issue346 and should
+ help with random "xdist" collection failures. Thanks to
+ Ronny Pfannschmidt and Donald Stufft for helping to isolate it.
+
+- fix issue357 - special case "-k" expressions to allow for
+ filtering with simple strings that are not valid python expressions.
+ Examples: "-k 1.3" matches all tests parametrized with 1.3.
+ "-k None" filters all tests that have "None" in their name
+ and conversely "-k 'not None'".
+ Previously these examples would raise syntax errors.
+
+- fix issue384 by removing the trial support code
+ since the unittest compat enhancements allow
+ trial to handle it on its own
+
+- don't hide an ImportError when importing a plugin produces one.
+ fixes issue375.
+
+- fix issue275 - allow usefixtures and autouse fixtures
+ for running doctest text files.
+
+- fix issue380 by making --resultlog only rely on longrepr instead
+ of the "reprcrash" attribute which only exists sometimes.
+
+- address issue122: allow @pytest.fixture(params=iterator) by exploding
+ into a list early on.
+
+- fix pexpect-3.0 compatibility for pytest's own tests.
+ (fixes issue386)
+
+- allow nested parametrize-value markers, thanks James Lan for the PR.
+
+- fix unicode handling with new monkeypatch.setattr(import_path, value)
+ API. Thanks Rob Dennis. Fixes issue371.
+
+- fix unicode handling with junitxml, fixes issue368.
+
+- In assertion rewriting mode on Python 2, fix the detection of coding
+ cookies. See issue #330.
+
+- make "--runxfail" turn imperative pytest.xfail calls into no ops
+ (it already did neutralize pytest.mark.xfail markers)
+
+- refine pytest / pkg_resources interactions: The AssertionRewritingHook
+ :pep:`302` compliant loader now registers itself with setuptools/pkg_resources
+ properly so that the pkg_resources.resource_stream method works properly.
+ Fixes issue366. Thanks for the investigations and full PR to Jason R. Coombs.
+
+- pytestconfig fixture is now session-scoped as it is the same object during the
+ whole test run. Fixes issue370.
+
+- avoid one surprising case of marker malfunction/confusion::
+
+ @pytest.mark.some(lambda arg: ...)
+ def test_function():
+
+ would not work correctly because pytest assumes @pytest.mark.some
+ gets a function to be decorated already. We now at least detect if this
+ arg is a lambda and thus the example will work. Thanks Alex Gaynor
+ for bringing it up.
+
+- xfail a test on pypy that checks wrong encoding/ascii (pypy does
+ not error out). fixes issue385.
+
+- internally make varnames() deal with classes's __init__,
+ although it's not needed by pytest itself atm. Also
+ fix caching. Fixes issue376.
+
+- fix issue221 - handle importing of namespace-package with no
+ __init__.py properly.
+
+- refactor internal FixtureRequest handling to avoid monkeypatching.
+ One of the positive user-facing effects is that the "request" object
+ can now be used in closures.
+
+- fixed version comparison in pytest.importskip(modname, minverstring)
+
+- fix issue377 by clarifying in the nose-compat docs that pytest
+ does not duplicate the unittest-API into the "plain" namespace.
+
+- fix verbose reporting for @mock'd test functions
+
+2.4.2 (2013-10-04)
+==================
+
+- on Windows require colorama and a newer py lib so that py.io.TerminalWriter()
+ now uses colorama instead of its own ctypes hacks. (fixes issue365)
+ thanks Paul Moore for bringing it up.
+
+- fix "-k" matching of tests where "repr" and "attr" and other names would
+ cause wrong matches because of an internal implementation quirk
+ (don't ask) which is now properly implemented. fixes issue345.
+
+- avoid tmpdir fixture to create too long filenames especially
+ when parametrization is used (issue354)
+
+- fix pytest-pep8 and pytest-flakes / pytest interactions
+ (collection names in mark plugin was assuming an item always
+ has a function which is not true for those plugins etc.)
+ Thanks Andi Zeidler.
+
+- introduce node.get_marker/node.add_marker API for plugins
+ like pytest-pep8 and pytest-flakes to avoid the messy
+ details of the node.keywords pseudo-dicts. Adapted
+ docs.
+
+- remove attempt to "dup" stdout at startup as it's icky.
+ the normal capturing should catch enough possibilities
+ of tests messing up standard FDs.
+
+- add pluginmanager.do_configure(config) as a link to
+ config.do_configure() for plugin-compatibility
+
+2.4.1 (2013-10-02)
+==================
+
+- When using parser.addoption() unicode arguments to the
+ "type" keyword should also be converted to the respective types.
+ thanks Floris Bruynooghe, @dnozay. (fixes issue360 and issue362)
+
+- fix dotted filename completion when using argcomplete
+ thanks Anthon van der Neuth. (fixes issue361)
+
+- fix regression when a 1-tuple ("arg",) is used for specifying
+ parametrization (the values of the parametrization were passed
+ nested in a tuple). Thanks Donald Stufft.
+
+- merge doc typo fixes, thanks Andy Dirnberger
+
+2.4
+===
+
+known incompatibilities:
+
+- if calling --genscript from python2.7 or above, you only get a
+ standalone script which works on python2.7 or above. Use Python2.6
+ to also get a python2.5 compatible version.
+
+- all xunit-style teardown methods (nose-style, pytest-style,
+ unittest-style) will not be called if the corresponding setup method failed,
+ see issue322 below.
+
+- the pytest_plugin_unregister hook wasn't ever properly called
+ and there is no known implementation of the hook - so it got removed.
+
+- pytest.fixture-decorated functions cannot be generators (i.e. use
+ yield) anymore. This change might be reversed in 2.4.1 if it causes
+ unforeseen real-life issues. However, you can always write and return
+ an inner function/generator and change the fixture consumer to iterate
+ over the returned generator. This change was done in lieu of the new
+ ``pytest.yield_fixture`` decorator, see below.
+
+new features:
+
+- experimentally introduce a new ``pytest.yield_fixture`` decorator
+ which accepts exactly the same parameters as pytest.fixture but
+ mandates a ``yield`` statement instead of a ``return statement`` from
+ fixture functions. This allows direct integration with "with-style"
+ context managers in fixture functions and generally avoids registering
+ of finalization callbacks in favour of treating the "after-yield" as
+ teardown code. Thanks Andreas Pelme, Vladimir Keleshev, Floris
+ Bruynooghe, Ronny Pfannschmidt and many others for discussions.
+
+- allow boolean expression directly with skipif/xfail
+ if a "reason" is also specified. Rework skipping documentation
+ to recommend "condition as booleans" because it prevents surprises
+ when importing markers between modules. Specifying conditions
+ as strings will remain fully supported.
+
+- reporting: color the last line red or green depending if
+ failures/errors occurred or everything passed. thanks Christian
+ Theunert.
+
+- make "import pdb ; pdb.set_trace()" work natively wrt capturing (no
+ "-s" needed anymore), making ``pytest.set_trace()`` a mere shortcut.
+
+- fix issue181: --pdb now also works on collect errors (and
+ on internal errors) . This was implemented by a slight internal
+ refactoring and the introduction of a new hook
+ ``pytest_exception_interact`` hook (see next item).
+
+- fix issue341: introduce new experimental hook for IDEs/terminals to
+ intercept debugging: ``pytest_exception_interact(node, call, report)``.
+
+- new monkeypatch.setattr() variant to provide a shorter
+ invocation for patching out classes/functions from modules:
+
+ monkeypatch.setattr("requests.get", myfunc)
+
+ will replace the "get" function of the "requests" module with ``myfunc``.
+
+- fix issue322: tearDownClass is not run if setUpClass failed. Thanks
+ Mathieu Agopian for the initial fix. Also make all of pytest/nose
+ finalizer mimic the same generic behaviour: if a setupX exists and
+ fails, don't run teardownX. This internally introduces a new method
+ "node.addfinalizer()" helper which can only be called during the setup
+ phase of a node.
+
+- simplify pytest.mark.parametrize() signature: allow to pass a
+ CSV-separated string to specify argnames. For example:
+ ``pytest.mark.parametrize("input,expected", [(1,2), (2,3)])``
+ works as well as the previous:
+ ``pytest.mark.parametrize(("input", "expected"), ...)``.
+
+- add support for setUpModule/tearDownModule detection, thanks Brian Okken.
+
+- integrate tab-completion on options through use of "argcomplete".
+ Thanks Anthon van der Neut for the PR.
+
+- change option names to be hyphen-separated long options but keep the
+ old spelling backward compatible. py.test -h will only show the
+ hyphenated version, for example "--collect-only" but "--collectonly"
+ will remain valid as well (for backward-compat reasons). Many thanks to
+ Anthon van der Neut for the implementation and to Hynek Schlawack for
+ pushing us.
+
+- fix issue 308 - allow to mark/xfail/skip individual parameter sets
+ when parametrizing. Thanks Brianna Laugher.
+
+- call new experimental pytest_load_initial_conftests hook to allow
+ 3rd party plugins to do something before a conftest is loaded.
+
+Bug fixes:
+
+- fix issue358 - capturing options are now parsed more properly
+ by using a new parser.parse_known_args method.
+
+- pytest now uses argparse instead of optparse (thanks Anthon) which
+ means that "argparse" is added as a dependency if installing into python2.6
+ environments or below.
+
+- fix issue333: fix a case of bad unittest/pytest hook interaction.
+
+- PR27: correctly handle nose.SkipTest during collection. Thanks
+ Antonio Cuni, Ronny Pfannschmidt.
+
+- fix issue355: junitxml puts name="pytest" attribute to testsuite tag.
+
+- fix issue336: autouse fixture in plugins should work again.
+
+- fix issue279: improve object comparisons on assertion failure
+ for standard datatypes and recognise collections.abc. Thanks to
+ Brianna Laugher and Mathieu Agopian.
+
+- fix issue317: assertion rewriter support for the is_package method
+
+- fix issue335: document py.code.ExceptionInfo() object returned
+ from pytest.raises(), thanks Mathieu Agopian.
+
+- remove implicit distribute_setup support from setup.py.
+
+- fix issue305: ignore any problems when writing pyc files.
+
+- SO-17664702: call fixture finalizers even if the fixture function
+ partially failed (finalizers would not always be called before)
+
+- fix issue320 - fix class scope for fixtures when mixed with
+ module-level functions. Thanks Anatloy Bubenkoff.
+
+- you can specify "-q" or "-qq" to get different levels of "quieter"
+ reporting (thanks Katarzyna Jachim)
+
+- fix issue300 - Fix order of conftest loading when starting py.test
+ in a subdirectory.
+
+- fix issue323 - sorting of many module-scoped arg parametrizations
+
+- make sessionfinish hooks execute with the same cwd-context as at
+ session start (helps fix plugin behaviour which write output files
+ with relative path such as pytest-cov)
+
+- fix issue316 - properly reference collection hooks in docs
+
+- fix issue 306 - cleanup of -k/-m options to only match markers/test
+ names/keywords respectively. Thanks Wouter van Ackooy.
+
+- improved doctest counting for doctests in python modules --
+ files without any doctest items will not show up anymore
+ and doctest examples are counted as separate test items.
+ thanks Danilo Bellini.
+
+- fix issue245 by depending on the released py-1.4.14
+ which fixes py.io.dupfile to work with files with no
+ mode. Thanks Jason R. Coombs.
+
+- fix junitxml generation when test output contains control characters,
+ addressing issue267, thanks Jaap Broekhuizen
+
+- fix issue338: honor --tb style for setup/teardown errors as well. Thanks Maho.
+
+- fix issue307 - use yaml.safe_load in example, thanks Mark Eichin.
+
+- better parametrize error messages, thanks Brianna Laugher
+
+- pytest_terminal_summary(terminalreporter) hooks can now use
+ ".section(title)" and ".line(msg)" methods to print extra
+ information at the end of a test run.
+
+2.3.5 (2013-04-30)
+==================
+
+- fix issue169: respect --tb=style with setup/teardown errors as well.
+
+- never consider a fixture function for test function collection
+
+- allow re-running of test items / helps to fix pytest-reruntests plugin
+ and also help to keep less fixture/resource references alive
+
+- put captured stdout/stderr into junitxml output even for passing tests
+ (thanks Adam Goucher)
+
+- Issue 265 - integrate nose setup/teardown with setupstate
+ so it doesn't try to teardown if it did not setup
+
+- issue 271 - don't write junitxml on worker nodes
+
+- Issue 274 - don't try to show full doctest example
+ when doctest does not know the example location
+
+- issue 280 - disable assertion rewriting on buggy CPython 2.6.0
+
+- inject "getfixture()" helper to retrieve fixtures from doctests,
+ thanks Andreas Zeidler
+
+- issue 259 - when assertion rewriting, be consistent with the default
+ source encoding of ASCII on Python 2
+
+- issue 251 - report a skip instead of ignoring classes with init
+
+- issue250 unicode/str mixes in parametrization names and values now works
+
+- issue257, assertion-triggered compilation of source ending in a
+ comment line doesn't blow up in python2.5 (fixed through py>=1.4.13.dev6)
+
+- fix --genscript option to generate standalone scripts that also
+ work with python3.3 (importer ordering)
+
+- issue171 - in assertion rewriting, show the repr of some
+ global variables
+
+- fix option help for "-k"
+
+- move long description of distribution into README.rst
+
+- improve docstring for metafunc.parametrize()
+
+- fix bug where using capsys with pytest.set_trace() in a test
+ function would break when looking at capsys.readouterr()
+
+- allow to specify prefixes starting with "_" when
+ customizing python_functions test discovery. (thanks Graham Horler)
+
+- improve PYTEST_DEBUG tracing output by putting
+ extra data on a new lines with additional indent
+
+- ensure OutcomeExceptions like skip/fail have initialized exception attributes
+
+- issue 260 - don't use nose special setup on plain unittest cases
+
+- fix issue134 - print the collect errors that prevent running specified test items
+
+- fix issue266 - accept unicode in MarkEvaluator expressions
+
+2.3.4 (2012-11-20)
+==================
+
+- yielded test functions will now have autouse-fixtures active but
+ cannot accept fixtures as funcargs - it's anyway recommended to
+ rather use the post-2.0 parametrize features instead of yield, see:
+ http://pytest.org/en/stable/example/how-to/parametrize.html
+- fix autouse-issue where autouse-fixtures would not be discovered
+ if defined in an a/conftest.py file and tests in a/tests/test_some.py
+- fix issue226 - LIFO ordering for fixture teardowns
+- fix issue224 - invocations with >256 char arguments now work
+- fix issue91 - add/discuss package/directory level setups in example
+- allow to dynamically define markers via
+ item.keywords[...]=assignment integrating with "-m" option
+- make "-k" accept an expressions the same as with "-m" so that one
+ can write: -k "name1 or name2" etc. This is a slight incompatibility
+ if you used special syntax like "TestClass.test_method" which you now
+ need to write as -k "TestClass and test_method" to match a certain
+ method in a certain test class.
+
+2.3.3 (2012-11-06)
+==================
+
+- fix issue214 - parse modules that contain special objects like e. g.
+ flask's request object which blows up on getattr access if no request
+ is active. thanks Thomas Waldmann.
+
+- fix issue213 - allow to parametrize with values like numpy arrays that
+ do not support an __eq__ operator
+
+- fix issue215 - split test_python.org into multiple files
+
+- fix issue148 - @unittest.skip on classes is now recognized and avoids
+ calling setUpClass/tearDownClass, thanks Pavel Repin
+
+- fix issue209 - reintroduce python2.4 support by depending on newer
+ pylib which re-introduced statement-finding for pre-AST interpreters
+
+- nose support: only call setup if it's a callable, thanks Andrew
+ Taumoefolau
+
+- fix issue219 - add py2.4-3.3 classifiers to TROVE list
+
+- in tracebacks *,** arg values are now shown next to normal arguments
+ (thanks Manuel Jacob)
+
+- fix issue217 - support mock.patch with pytest's fixtures - note that
+ you need either mock-1.0.1 or the python3.3 builtin unittest.mock.
+
+- fix issue127 - improve documentation for pytest_addoption() and
+ add a ``config.getoption(name)`` helper function for consistency.
+
+2.3.2 (2012-10-25)
+==================
+
+- fix issue208 and fix issue29 use new py version to avoid long pauses
+ when printing tracebacks in long modules
+
+- fix issue205 - conftests in subdirs customizing
+ pytest_pycollect_makemodule and pytest_pycollect_makeitem
+ now work properly
+
+- fix teardown-ordering for parametrized setups
+
+- fix issue127 - better documentation for pytest_addoption
+ and related objects.
+
+- fix unittest behaviour: TestCase.runtest only called if there are
+ test methods defined
+
+- improve trial support: don't collect its empty
+ unittest.TestCase.runTest() method
+
+- "python setup.py test" now works with pytest itself
+
+- fix/improve internal/packaging related bits:
+
+ - exception message check of test_nose.py now passes on python33 as well
+
+ - issue206 - fix test_assertrewrite.py to work when a global
+ PYTHONDONTWRITEBYTECODE=1 is present
+
+ - add tox.ini to pytest distribution so that ignore-dirs and others config
+ bits are properly distributed for maintainers who run pytest-own tests
+
+2.3.1 (2012-10-20)
+==================
+
+- fix issue202 - fix regression: using "self" from fixture functions now
+ works as expected (it's the same "self" instance that a test method
+ which uses the fixture sees)
+
+- skip pexpect using tests (test_pdb.py mostly) on freebsd* systems
+ due to pexpect not supporting it properly (hanging)
+
+- link to web pages from --markers output which provides help for
+ pytest.mark.* usage.
+
+2.3.0 (2012-10-19)
+==================
+
+- fix issue202 - better automatic names for parametrized test functions
+- fix issue139 - introduce @pytest.fixture which allows direct scoping
+ and parametrization of funcarg factories.
+- fix issue198 - conftest fixtures were not found on windows32 in some
+ circumstances with nested directory structures due to path manipulation issues
+- fix issue193 skip test functions with were parametrized with empty
+ parameter sets
+- fix python3.3 compat, mostly reporting bits that previously depended
+ on dict ordering
+- introduce re-ordering of tests by resource and parametrization setup
+ which takes precedence to the usual file-ordering
+- fix issue185 monkeypatching time.time does not cause pytest to fail
+- fix issue172 duplicate call of pytest.fixture decoratored setup_module
+ functions
+- fix junitxml=path construction so that if tests change the
+ current working directory and the path is a relative path
+ it is constructed correctly from the original current working dir.
+- fix "python setup.py test" example to cause a proper "errno" return
+- fix issue165 - fix broken doc links and mention stackoverflow for FAQ
+- catch unicode-issues when writing failure representations
+ to terminal to prevent the whole session from crashing
+- fix xfail/skip confusion: a skip-mark or an imperative pytest.skip
+ will now take precedence before xfail-markers because we
+ can't determine xfail/xpass status in case of a skip. see also:
+ http://stackoverflow.com/questions/11105828/in-py-test-when-i-explicitly-skip-a-test-that-is-marked-as-xfail-how-can-i-get
+
+- always report installed 3rd party plugins in the header of a test run
+
+- fix issue160: a failing setup of an xfail-marked tests should
+ be reported as xfail (not xpass)
+
+- fix issue128: show captured output when capsys/capfd are used
+
+- fix issue179: properly show the dependency chain of factories
+
+- pluginmanager.register(...) now raises ValueError if the
+ plugin has been already registered or the name is taken
+
+- fix issue159: improve https://docs.pytest.org/en/6.0.1/faq.html
+ especially with respect to the "magic" history, also mention
+ pytest-django, trial and unittest integration.
+
+- make request.keywords and node.keywords writable. All descendant
+ collection nodes will see keyword values. Keywords are dictionaries
+ containing markers and other info.
+
+- fix issue 178: xml binary escapes are now wrapped in py.xml.raw
+
+- fix issue 176: correctly catch the builtin AssertionError
+ even when we replaced AssertionError with a subclass on the
+ python level
+
+- factory discovery no longer fails with magic global callables
+ that provide no sane __code__ object (mock.call for example)
+
+- fix issue 182: testdir.inprocess_run now considers passed plugins
+
+- fix issue 188: ensure sys.exc_info is clear on python2
+ before calling into a test
+
+- fix issue 191: add unittest TestCase runTest method support
+- fix issue 156: monkeypatch correctly handles class level descriptors
+
+- reporting refinements:
+
+ - pytest_report_header now receives a "startdir" so that
+ you can use startdir.bestrelpath(yourpath) to show
+ nice relative path
+
+ - allow plugins to implement both pytest_report_header and
+ pytest_sessionstart (sessionstart is invoked first).
+
+ - don't show deselected reason line if there is none
+
+ - py.test -vv will show all of assert comparisons instead of truncating
+
+2.2.4 (2012-05-22)
+==================
+
+- fix error message for rewritten assertions involving the % operator
+- fix issue 126: correctly match all invalid xml characters for junitxml
+ binary escape
+- fix issue with unittest: now @unittest.expectedFailure markers should
+ be processed correctly (you can also use @pytest.mark markers)
+- document integration with the extended distribute/setuptools test commands
+- fix issue 140: properly get the real functions
+ of bound classmethods for setup/teardown_class
+- fix issue #141: switch from the deceased paste.pocoo.org to bpaste.net
+- fix issue #143: call unconfigure/sessionfinish always when
+ configure/sessionstart where called
+- fix issue #144: better mangle test ids to junitxml classnames
+- upgrade distribute_setup.py to 0.6.27
+
+2.2.3 (2012-02-05)
+==================
+
+- fix uploaded package to only include necessary files
+
+2.2.2 (2012-02-05)
+==================
+
+- fix issue101: wrong args to unittest.TestCase test function now
+ produce better output
+- fix issue102: report more useful errors and hints for when a
+ test directory was renamed and some pyc/__pycache__ remain
+- fix issue106: allow parametrize to be applied multiple times
+ e.g. from module, class and at function level.
+- fix issue107: actually perform session scope finalization
+- don't check in parametrize if indirect parameters are funcarg names
+- add chdir method to monkeypatch funcarg
+- fix crash resulting from calling monkeypatch undo a second time
+- fix issue115: make --collectonly robust against early failure
+ (missing files/directories)
+- "-qq --collectonly" now shows only files and the number of tests in them
+- "-q --collectonly" now shows test ids
+- allow adding of attributes to test reports such that it also works
+ with distributed testing (no upgrade of pytest-xdist needed)
+
+2.2.1 (2011-12-16)
+==================
+
+- fix issue99 (in pytest and py) internallerrors with resultlog now
+ produce better output - fixed by normalizing pytest_internalerror
+ input arguments.
+- fix issue97 / traceback issues (in pytest and py) improve traceback output
+ in conjunction with jinja2 and cython which hack tracebacks
+- fix issue93 (in pytest and pytest-xdist) avoid "delayed teardowns":
+ the final test in a test node will now run its teardown directly
+ instead of waiting for the end of the session. Thanks Dave Hunt for
+ the good reporting and feedback. The pytest_runtest_protocol as well
+ as the pytest_runtest_teardown hooks now have "nextitem" available
+ which will be None indicating the end of the test run.
+- fix collection crash due to unknown-source collected items, thanks
+ to Ralf Schmitt (fixed by depending on a more recent pylib)
+
+2.2.0 (2011-11-18)
+==================
+
+- fix issue90: introduce eager tearing down of test items so that
+ teardown function are called earlier.
+- add an all-powerful metafunc.parametrize function which allows to
+ parametrize test function arguments in multiple steps and therefore
+ from independent plugins and places.
+- add a @pytest.mark.parametrize helper which allows to easily
+ call a test function with different argument values
+- Add examples to the "parametrize" example page, including a quick port
+ of Test scenarios and the new parametrize function and decorator.
+- introduce registration for "pytest.mark.*" helpers via ini-files
+ or through plugin hooks. Also introduce a "--strict" option which
+ will treat unregistered markers as errors
+ allowing to avoid typos and maintain a well described set of markers
+ for your test suite. See examples at http://pytest.org/en/stable/how-to/mark.html
+ and its links.
+- issue50: introduce "-m marker" option to select tests based on markers
+ (this is a stricter and more predictable version of '-k' in that "-m"
+ only matches complete markers and has more obvious rules for and/or
+ semantics.
+- new feature to help optimizing the speed of your tests:
+ --durations=N option for displaying N slowest test calls
+ and setup/teardown methods.
+- fix issue87: --pastebin now works with python3
+- fix issue89: --pdb with unexpected exceptions in doctest work more sensibly
+- fix and cleanup pytest's own test suite to not leak FDs
+- fix issue83: link to generated funcarg list
+- fix issue74: pyarg module names are now checked against imp.find_module false positives
+- fix compatibility with twisted/trial-11.1.0 use cases
+- simplify Node.listchain
+- simplify junitxml output code by relying on py.xml
+- add support for skip properties on unittest classes and functions
+
+2.1.3 (2011-10-18)
+==================
+
+- fix issue79: assertion rewriting failed on some comparisons in boolops
+- correctly handle zero length arguments (a la pytest '')
+- fix issue67 / junitxml now contains correct test durations, thanks ronny
+- fix issue75 / skipping test failure on jython
+- fix issue77 / Allow assertrepr_compare hook to apply to a subset of tests
+
+2.1.2 (2011-09-24)
+==================
+
+- fix assertion rewriting on files with windows newlines on some Python versions
+- refine test discovery by package/module name (--pyargs), thanks Florian Mayer
+- fix issue69 / assertion rewriting fixed on some boolean operations
+- fix issue68 / packages now work with assertion rewriting
+- fix issue66: use different assertion rewriting caches when the -O option is passed
+- don't try assertion rewriting on Jython, use reinterp
+
+2.1.1
+=====
+
+- fix issue64 / pytest.set_trace now works within pytest_generate_tests hooks
+- fix issue60 / fix error conditions involving the creation of __pycache__
+- fix issue63 / assertion rewriting on inserts involving strings containing '%'
+- fix assertion rewriting on calls with a ** arg
+- don't cache rewritten modules if bytecode generation is disabled
+- fix assertion rewriting in read-only directories
+- fix issue59: provide system-out/err tags for junitxml output
+- fix issue61: assertion rewriting on boolean operations with 3 or more operands
+- you can now build a man page with "cd doc ; make man"
+
+2.1.0 (2011-07-09)
+==================
+
+- fix issue53 call nosestyle setup functions with correct ordering
+- fix issue58 and issue59: new assertion code fixes
+- merge Benjamin's assertionrewrite branch: now assertions
+ for test modules on python 2.6 and above are done by rewriting
+ the AST and saving the pyc file before the test module is imported.
+ see doc/assert.txt for more info.
+- fix issue43: improve doctests with better traceback reporting on
+ unexpected exceptions
+- fix issue47: timing output in junitxml for test cases is now correct
+- fix issue48: typo in MarkInfo repr leading to exception
+- fix issue49: avoid confusing error when initizaliation partially fails
+- fix issue44: env/username expansion for junitxml file path
+- show releaselevel information in test runs for pypy
+- reworked doc pages for better navigation and PDF generation
+- report KeyboardInterrupt even if interrupted during session startup
+- fix issue 35 - provide PDF doc version and download link from index page
+
+2.0.3 (2011-05-11)
+==================
+
+- fix issue38: nicer tracebacks on calls to hooks, particularly early
+ configure/sessionstart ones
+
+- fix missing skip reason/meta information in junitxml files, reported
+ via http://lists.idyll.org/pipermail/testing-in-python/2011-March/003928.html
+
+- fix issue34: avoid collection failure with "test" prefixed classes
+ deriving from object.
+
+- don't require zlib (and other libs) for genscript plugin without
+ --genscript actually being used.
+
+- speed up skips (by not doing a full traceback representation
+ internally)
+
+- fix issue37: avoid invalid characters in junitxml's output
+
+2.0.2 (2011-03-09)
+==================
+
+- tackle issue32 - speed up test runs of very quick test functions
+ by reducing the relative overhead
+
+- fix issue30 - extended xfail/skipif handling and improved reporting.
+ If you have a syntax error in your skip/xfail
+ expressions you now get nice error reports.
+
+ Also you can now access module globals from xfail/skipif
+ expressions so that this for example works now::
+
+ import pytest
+ import mymodule
+ @pytest.mark.skipif("mymodule.__version__[0] == "1")
+ def test_function():
+ pass
+
+ This will not run the test function if the module's version string
+ does not start with a "1". Note that specifying a string instead
+ of a boolean expressions allows py.test to report meaningful information
+ when summarizing a test run as to what conditions lead to skipping
+ (or xfail-ing) tests.
+
+- fix issue28 - setup_method and pytest_generate_tests work together
+ The setup_method fixture method now gets called also for
+ test function invocations generated from the pytest_generate_tests
+ hook.
+
+- fix issue27 - collectonly and keyword-selection (-k) now work together
+ Also, if you do "py.test --collectonly -q" you now get a flat list
+ of test ids that you can use to paste to the py.test commandline
+ in order to execute a particular test.
+
+- fix issue25 avoid reported problems with --pdb and python3.2/encodings output
+
+- fix issue23 - tmpdir argument now works on Python3.2 and WindowsXP
+ Starting with Python3.2 os.symlink may be supported. By requiring
+ a newer py lib version the py.path.local() implementation acknowledges
+ this.
+
+- fixed typos in the docs (thanks Victor Garcia, Brianna Laugher) and particular
+ thanks to Laura Creighton who also reviewed parts of the documentation.
+
+- fix slightly wrong output of verbose progress reporting for classes
+ (thanks Amaury)
+
+- more precise (avoiding of) deprecation warnings for node.Class|Function accesses
+
+- avoid std unittest assertion helper code in tracebacks (thanks Ronny)
+
+2.0.1 (2011-02-07)
+==================
+
+- refine and unify initial capturing so that it works nicely
+ even if the logging module is used on an early-loaded conftest.py
+ file or plugin.
+- allow to omit "()" in test ids to allow for uniform test ids
+ as produced by Alfredo's nice pytest.vim plugin.
+- fix issue12 - show plugin versions with "--version" and
+ "--traceconfig" and also document how to add extra information
+ to reporting test header
+- fix issue17 (import-* reporting issue on python3) by
+ requiring py>1.4.0 (1.4.1 is going to include it)
+- fix issue10 (numpy arrays truth checking) by refining
+ assertion interpretation in py lib
+- fix issue15: make nose compatibility tests compatible
+ with python3 (now that nose-1.0 supports python3)
+- remove somewhat surprising "same-conftest" detection because
+ it ignores conftest.py when they appear in several subdirs.
+- improve assertions ("not in"), thanks Floris Bruynooghe
+- improve behaviour/warnings when running on top of "python -OO"
+ (assertions and docstrings are turned off, leading to potential
+ false positives)
+- introduce a pytest_cmdline_processargs(args) hook
+ to allow dynamic computation of command line arguments.
+ This fixes a regression because py.test prior to 2.0
+ allowed to set command line options from conftest.py
+ files which so far pytest-2.0 only allowed from ini-files now.
+- fix issue7: assert failures in doctest modules.
+ unexpected failures in doctests will not generally
+ show nicer, i.e. within the doctest failing context.
+- fix issue9: setup/teardown functions for an xfail-marked
+ test will report as xfail if they fail but report as normally
+ passing (not xpassing) if they succeed. This only is true
+ for "direct" setup/teardown invocations because teardown_class/
+ teardown_module cannot closely relate to a single test.
+- fix issue14: no logging errors at process exit
+- refinements to "collecting" output on non-ttys
+- refine internal plugin registration and --traceconfig output
+- introduce a mechanism to prevent/unregister plugins from the
+ command line, see http://pytest.org/en/stable/how-to/plugins.html#cmdunregister
+- activate resultlog plugin by default
+- fix regression wrt yielded tests which due to the
+ collection-before-running semantics were not
+ setup as with pytest 1.3.4. Note, however, that
+ the recommended and much cleaner way to do test
+ parametraization remains the "pytest_generate_tests"
+ mechanism, see the docs.
+
+2.0.0 (2010-11-25)
+==================
+
+- pytest-2.0 is now its own package and depends on pylib-2.0
+- new ability: python -m pytest / python -m pytest.main ability
+- new python invocation: pytest.main(args, plugins) to load
+ some custom plugins early.
+- try harder to run unittest test suites in a more compatible manner
+ by deferring setup/teardown semantics to the unittest package.
+ also work harder to run twisted/trial and Django tests which
+ should now basically work by default.
+- introduce a new way to set config options via ini-style files,
+ by default setup.cfg and tox.ini files are searched. The old
+ ways (certain environment variables, dynamic conftest.py reading
+ is removed).
+- add a new "-q" option which decreases verbosity and prints a more
+ nose/unittest-style "dot" output.
+- fix issue135 - marks now work with unittest test cases as well
+- fix issue126 - introduce py.test.set_trace() to trace execution via
+ PDB during the running of tests even if capturing is ongoing.
+- fix issue123 - new "python -m py.test" invocation for py.test
+ (requires Python 2.5 or above)
+- fix issue124 - make reporting more resilient against tests opening
+ files on filedescriptor 1 (stdout).
+- fix issue109 - sibling conftest.py files will not be loaded.
+ (and Directory collectors cannot be customized anymore from a Directory's
+ conftest.py - this needs to happen at least one level up).
+- introduce (customizable) assertion failure representations and enhance
+ output on assertion failures for comparisons and other cases (Floris Bruynooghe)
+- nose-plugin: pass through type-signature failures in setup/teardown
+ functions instead of not calling them (Ed Singleton)
+- remove py.test.collect.Directory (follows from a major refactoring
+ and simplification of the collection process)
+- majorly reduce py.test core code, shift function/python testing to own plugin
+- fix issue88 (finding custom test nodes from command line arg)
+- refine 'tmpdir' creation, will now create basenames better associated
+ with test names (thanks Ronny)
+- "xpass" (unexpected pass) tests don't cause exitcode!=0
+- fix issue131 / issue60 - importing doctests in __init__ files used as namespace packages
+- fix issue93 stdout/stderr is captured while importing conftest.py
+- fix bug: unittest collected functions now also can have "pytestmark"
+ applied at class/module level
+- add ability to use "class" level for cached_setup helper
+- fix strangeness: mark.* objects are now immutable, create new instances
+
+1.3.4 (2010-09-14)
+==================
+
+- fix issue111: improve install documentation for windows
+- fix issue119: fix custom collectability of __init__.py as a module
+- fix issue116: --doctestmodules work with __init__.py files as well
+- fix issue115: unify internal exception passthrough/catching/GeneratorExit
+- fix issue118: new --tb=native for presenting cpython-standard exceptions
+
+1.3.3 (2010-07-30)
+==================
+
+- fix issue113: assertion representation problem with triple-quoted strings
+ (and possibly other cases)
+- make conftest loading detect that a conftest file with the same
+ content was already loaded, avoids surprises in nested directory structures
+ which can be produced e.g. by Hudson. It probably removes the need to use
+ --confcutdir in most cases.
+- fix terminal coloring for win32
+ (thanks Michael Foord for reporting)
+- fix weirdness: make terminal width detection work on stdout instead of stdin
+ (thanks Armin Ronacher for reporting)
+- remove trailing whitespace in all py/text distribution files
+
+1.3.2 (2010-07-08)
+==================
+
+**New features**
+
+- fix issue103: introduce py.test.raises as context manager, examples::
+
+ with py.test.raises(ZeroDivisionError):
+ x = 0
+ 1 / x
+
+ with py.test.raises(RuntimeError) as excinfo:
+ call_something()
+
+ # you may do extra checks on excinfo.value|type|traceback here
+
+ (thanks Ronny Pfannschmidt)
+
+- Funcarg factories can now dynamically apply a marker to a
+ test invocation. This is for example useful if a factory
+ provides parameters to a test which are expected-to-fail::
+
+ def pytest_funcarg__arg(request):
+ request.applymarker(py.test.mark.xfail(reason="flaky config"))
+ ...
+
+ def test_function(arg):
+ ...
+
+- improved error reporting on collection and import errors. This makes
+ use of a more general mechanism, namely that for custom test item/collect
+ nodes ``node.repr_failure(excinfo)`` is now uniformly called so that you can
+ override it to return a string error representation of your choice
+ which is going to be reported as a (red) string.
+
+- introduce '--junitprefix=STR' option to prepend a prefix
+ to all reports in the junitxml file.
+
+**Bug fixes**
+
+- make tests and the ``pytest_recwarn`` plugin in particular fully compatible
+ to Python2.7 (if you use the ``recwarn`` funcarg warnings will be enabled so that
+ you can properly check for their existence in a cross-python manner).
+- refine --pdb: ignore xfailed tests, unify its TB-reporting and
+ don't display failures again at the end.
+- fix assertion interpretation with the ** operator (thanks Benjamin Peterson)
+- fix issue105 assignment on the same line as a failing assertion (thanks Benjamin Peterson)
+- fix issue104 proper escaping for test names in junitxml plugin (thanks anonymous)
+- fix issue57 -f|--looponfail to work with xpassing tests (thanks Ronny)
+- fix issue92 collectonly reporter and --pastebin (thanks Benjamin Peterson)
+- fix py.code.compile(source) to generate unique filenames
+- fix assertion re-interp problems on PyPy, by deferring code
+ compilation to the (overridable) Frame.eval class. (thanks Amaury Forgeot)
+- fix py.path.local.pyimport() to work with directories
+- streamline py.path.local.mkdtemp implementation and usage
+- don't print empty lines when showing junitxml-filename
+- add optional boolean ignore_errors parameter to py.path.local.remove
+- fix terminal writing on win32/python2.4
+- py.process.cmdexec() now tries harder to return properly encoded unicode objects
+ on all python versions
+- install plain py.test/py.which scripts also for Jython, this helps to
+ get canonical script paths in virtualenv situations
+- make path.bestrelpath(path) return ".", note that when calling
+ X.bestrelpath the assumption is that X is a directory.
+- make initial conftest discovery ignore "--" prefixed arguments
+- fix resultlog plugin when used in a multicpu/multihost xdist situation
+ (thanks Jakub Gustak)
+- perform distributed testing related reporting in the xdist-plugin
+ rather than having dist-related code in the generic py.test
+ distribution
+- fix homedir detection on Windows
+- ship distribute_setup.py version 0.6.13
+
+1.3.1 (2010-05-25)
+==================
+
+**New features**
+
+- issue91: introduce new py.test.xfail(reason) helper
+ to imperatively mark a test as expected to fail. Can
+ be used from within setup and test functions. This is
+ useful especially for parametrized tests when certain
+ configurations are expected-to-fail. In this case the
+ declarative approach with the @py.test.mark.xfail cannot
+ be used as it would mark all configurations as xfail.
+
+- issue102: introduce new --maxfail=NUM option to stop
+ test runs after NUM failures. This is a generalization
+ of the '-x' or '--exitfirst' option which is now equivalent
+ to '--maxfail=1'. Both '-x' and '--maxfail' will
+ now also print a line near the end indicating the Interruption.
+
+- issue89: allow py.test.mark decorators to be used on classes
+ (class decorators were introduced with python2.6) and
+ also allow to have multiple markers applied at class/module level
+ by specifying a list.
+
+- improve and refine letter reporting in the progress bar:
+ . pass
+ f failed test
+ s skipped tests (reminder: use for dependency/platform mismatch only)
+ x xfailed test (test that was expected to fail)
+ X xpassed test (test that was expected to fail but passed)
+
+ You can use any combination of 'fsxX' with the '-r' extended
+ reporting option. The xfail/xpass results will show up as
+ skipped tests in the junitxml output - which also fixes
+ issue99.
+
+- make py.test.cmdline.main() return the exitstatus instead of raising
+ SystemExit and also allow it to be called multiple times. This of
+ course requires that your application and tests are properly teared
+ down and don't have global state.
+
+**Bug Fixes**
+
+- improved traceback presentation:
+ - improved and unified reporting for "--tb=short" option
+ - Errors during test module imports are much shorter, (using --tb=short style)
+ - raises shows shorter more relevant tracebacks
+ - --fulltrace now more systematically makes traces longer / inhibits cutting
+
+- improve support for raises and other dynamically compiled code by
+ manipulating python's linecache.cache instead of the previous
+ rather hacky way of creating custom code objects. This makes
+ it seamlessly work on Jython and PyPy where it previously didn't.
+
+- fix issue96: make capturing more resilient against Control-C
+ interruptions (involved somewhat substantial refactoring
+ to the underlying capturing functionality to avoid race
+ conditions).
+
+- fix chaining of conditional skipif/xfail decorators - so it works now
+ as expected to use multiple @py.test.mark.skipif(condition) decorators,
+ including specific reporting which of the conditions lead to skipping.
+
+- fix issue95: late-import zlib so that it's not required
+ for general py.test startup.
+
+- fix issue94: make reporting more robust against bogus source code
+ (and internally be more careful when presenting unexpected byte sequences)
+
+
+1.3.0 (2010-05-05)
+==================
+
+- deprecate --report option in favour of a new shorter and easier to
+ remember -r option: it takes a string argument consisting of any
+ combination of 'xfsX' characters. They relate to the single chars
+ you see during the dotted progress printing and will print an extra line
+ per test at the end of the test run. This extra line indicates the exact
+ position or test ID that you directly paste to the py.test cmdline in order
+ to re-run a particular test.
+
+- allow external plugins to register new hooks via the new
+ pytest_addhooks(pluginmanager) hook. The new release of
+ the pytest-xdist plugin for distributed and looponfailing
+ testing requires this feature.
+
+- add a new pytest_ignore_collect(path, config) hook to allow projects and
+ plugins to define exclusion behaviour for their directory structure -
+ for example you may define in a conftest.py this method::
+
+ def pytest_ignore_collect(path):
+ return path.check(link=1)
+
+ to prevent even a collection try of any tests in symlinked dirs.
+
+- new pytest_pycollect_makemodule(path, parent) hook for
+ allowing customization of the Module collection object for a
+ matching test module.
+
+- extend and refine xfail mechanism:
+ ``@py.test.mark.xfail(run=False)`` do not run the decorated test
+ ``@py.test.mark.xfail(reason="...")`` prints the reason string in xfail summaries
+ specifying ``--runxfail`` on command line virtually ignores xfail markers
+
+- expose (previously internal) commonly useful methods:
+ py.io.get_terminal_with() -> return terminal width
+ py.io.ansi_print(...) -> print colored/bold text on linux/win32
+ py.io.saferepr(obj) -> return limited representation string
+
+- expose test outcome related exceptions as py.test.skip.Exception,
+ py.test.raises.Exception etc., useful mostly for plugins
+ doing special outcome interpretation/tweaking
+
+- (issue85) fix junitxml plugin to handle tests with non-ascii output
+
+- fix/refine python3 compatibility (thanks Benjamin Peterson)
+
+- fixes for making the jython/win32 combination work, note however:
+ jython2.5.1/win32 does not provide a command line launcher, see
+ https://bugs.jython.org/issue1491 . See pylib install documentation
+ for how to work around.
+
+- fixes for handling of unicode exception values and unprintable objects
+
+- (issue87) fix unboundlocal error in assertionold code
+
+- (issue86) improve documentation for looponfailing
+
+- refine IO capturing: stdin-redirect pseudo-file now has a NOP close() method
+
+- ship distribute_setup.py version 0.6.10
+
+- added links to the new capturelog and coverage plugins
+
+
+1.2.0 (2010-01-18)
+==================
+
+- refined usage and options for "py.cleanup"::
+
+ py.cleanup # remove "*.pyc" and "*$py.class" (jython) files
+ py.cleanup -e .swp -e .cache # also remove files with these extensions
+ py.cleanup -s # remove "build" and "dist" directory next to setup.py files
+ py.cleanup -d # also remove empty directories
+ py.cleanup -a # synonym for "-s -d -e 'pip-log.txt'"
+ py.cleanup -n # dry run, only show what would be removed
+
+- add a new option "py.test --funcargs" which shows available funcargs
+ and their help strings (docstrings on their respective factory function)
+ for a given test path
+
+- display a short and concise traceback if a funcarg lookup fails
+
+- early-load "conftest.py" files in non-dot first-level sub directories.
+ allows to conveniently keep and access test-related options in a ``test``
+ subdir and still add command line options.
+
+- fix issue67: new super-short traceback-printing option: "--tb=line" will print a single line for each failing (python) test indicating its filename, lineno and the failure value
+
+- fix issue78: always call python-level teardown functions even if the
+ according setup failed. This includes refinements for calling setup_module/class functions
+ which will now only be called once instead of the previous behaviour where they'd be called
+ multiple times if they raise an exception (including a Skipped exception). Any exception
+ will be re-corded and associated with all tests in the according module/class scope.
+
+- fix issue63: assume <40 columns to be a bogus terminal width, default to 80
+
+- fix pdb debugging to be in the correct frame on raises-related errors
+
+- update apipkg.py to fix an issue where recursive imports might
+ unnecessarily break importing
+
+- fix plugin links
+
+1.1.1 (2009-11-24)
+==================
+
+- moved dist/looponfailing from py.test core into a new
+ separately released pytest-xdist plugin.
+
+- new junitxml plugin: --junitxml=path will generate a junit style xml file
+ which is processable e.g. by the Hudson CI system.
+
+- new option: --genscript=path will generate a standalone py.test script
+ which will not need any libraries installed. thanks to Ralf Schmitt.
+
+- new option: --ignore will prevent specified path from collection.
+ Can be specified multiple times.
+
+- new option: --confcutdir=dir will make py.test only consider conftest
+ files that are relative to the specified dir.
+
+- new funcarg: "pytestconfig" is the pytest config object for access
+ to command line args and can now be easily used in a test.
+
+- install ``py.test`` and ``py.which`` with a ``-$VERSION`` suffix to
+ disambiguate between Python3, python2.X, Jython and PyPy installed versions.
+
+- new "pytestconfig" funcarg allows access to test config object
+
+- new "pytest_report_header" hook can return additional lines
+ to be displayed at the header of a test run.
+
+- (experimental) allow "py.test path::name1::name2::..." for pointing
+ to a test within a test collection directly. This might eventually
+ evolve as a full substitute to "-k" specifications.
+
+- streamlined plugin loading: order is now as documented in
+ customize.html: setuptools, ENV, commandline, conftest.
+ also setuptools entry point names are turned to canonical names ("pytest_*")
+
+- automatically skip tests that need 'capfd' but have no os.dup
+
+- allow pytest_generate_tests to be defined in classes as well
+
+- deprecate usage of 'disabled' attribute in favour of pytestmark
+- deprecate definition of Directory, Module, Class and Function nodes
+ in conftest.py files. Use pytest collect hooks instead.
+
+- collection/item node specific runtest/collect hooks are only called exactly
+ on matching conftest.py files, i.e. ones which are exactly below
+ the filesystem path of an item
+
+- change: the first pytest_collect_directory hook to return something
+ will now prevent further hooks to be called.
+
+- change: figleaf plugin now requires --figleaf to run. Also
+ change its long command line options to be a bit shorter (see py.test -h).
+
+- change: pytest doctest plugin is now enabled by default and has a
+ new option --doctest-glob to set a pattern for file matches.
+
+- change: remove internal py._* helper vars, only keep py._pydir
+
+- robustify capturing to survive if custom pytest_runtest_setup
+ code failed and prevented the capturing setup code from running.
+
+- make py.test.* helpers provided by default plugins visible early -
+ works transparently both for pydoc and for interactive sessions
+ which will regularly see e.g. py.test.mark and py.test.importorskip.
+
+- simplify internal plugin manager machinery
+- simplify internal collection tree by introducing a RootCollector node
+
+- fix assert reinterpreation that sees a call containing "keyword=..."
+
+- fix issue66: invoke pytest_sessionstart and pytest_sessionfinish
+ hooks on worker nodes during dist-testing, report module/session teardown
+ hooks correctly.
+
+- fix issue65: properly handle dist-testing if no
+ execnet/py lib installed remotely.
+
+- skip some install-tests if no execnet is available
+
+- fix docs, fix internal bin/ script generation
+
+
+1.1.0 (2009-11-05)
+==================
+
+- introduce automatic plugin registration via 'pytest11'
+ entrypoints via setuptools' pkg_resources.iter_entry_points
+
+- fix py.test dist-testing to work with execnet >= 1.0.0b4
+
+- re-introduce py.test.cmdline.main() for better backward compatibility
+
+- svn paths: fix a bug with path.check(versioned=True) for svn paths,
+ allow '%' in svn paths, make svnwc.update() default to interactive mode
+ like in 1.0.x and add svnwc.update(interactive=False) to inhibit interaction.
+
+- refine distributed tarball to contain test and no pyc files
+
+- try harder to have deprecation warnings for py.compat.* accesses
+ report a correct location
+
+1.0.3
+=====
+
+* adjust and improve docs
+
+* remove py.rest tool and internal namespace - it was
+ never really advertised and can still be used with
+ the old release if needed. If there is interest
+ it could be revived into its own tool i guess.
+
+* fix issue48 and issue59: raise an Error if the module
+ from an imported test file does not seem to come from
+ the filepath - avoids "same-name" confusion that has
+ been reported repeatedly
+
+* merged Ronny's nose-compatibility hacks: now
+ nose-style setup_module() and setup() functions are
+ supported
+
+* introduce generalized py.test.mark function marking
+
+* reshuffle / refine command line grouping
+
+* deprecate parser.addgroup in favour of getgroup which creates option group
+
+* add --report command line option that allows to control showing of skipped/xfailed sections
+
+* generalized skipping: a new way to mark python functions with skipif or xfail
+ at function, class and modules level based on platform or sys-module attributes.
+
+* extend py.test.mark decorator to allow for positional args
+
+* introduce and test "py.cleanup -d" to remove empty directories
+
+* fix issue #59 - robustify unittest test collection
+
+* make bpython/help interaction work by adding an __all__ attribute
+ to ApiModule, cleanup initpkg
+
+* use MIT license for pylib, add some contributors
+
+* remove py.execnet code and substitute all usages with 'execnet' proper
+
+* fix issue50 - cached_setup now caches more to expectations
+ for test functions with multiple arguments.
+
+* merge Jarko's fixes, issue #45 and #46
+
+* add the ability to specify a path for py.lookup to search in
+
+* fix a funcarg cached_setup bug probably only occurring
+ in distributed testing and "module" scope with teardown.
+
+* many fixes and changes for making the code base python3 compatible,
+ many thanks to Benjamin Peterson for helping with this.
+
+* consolidate builtins implementation to be compatible with >=2.3,
+ add helpers to ease keeping 2 and 3k compatible code
+
+* deprecate py.compat.doctest|subprocess|textwrap|optparse
+
+* deprecate py.magic.autopath, remove py/magic directory
+
+* move pytest assertion handling to py/code and a pytest_assertion
+ plugin, add "--no-assert" option, deprecate py.magic namespaces
+ in favour of (less) py.code ones.
+
+* consolidate and cleanup py/code classes and files
+
+* cleanup py/misc, move tests to bin-for-dist
+
+* introduce delattr/delitem/delenv methods to py.test's monkeypatch funcarg
+
+* consolidate py.log implementation, remove old approach.
+
+* introduce py.io.TextIO and py.io.BytesIO for distinguishing between
+ text/unicode and byte-streams (uses underlying standard lib io.*
+ if available)
+
+* make py.unittest_convert helper script available which converts "unittest.py"
+ style files into the simpler assert/direct-test-classes py.test/nosetests
+ style. The script was written by Laura Creighton.
+
+* simplified internal localpath implementation
+
+1.0.2 (2009-08-27)
+==================
+
+* fixing packaging issues, triggered by fedora redhat packaging,
+ also added doc, examples and contrib dirs to the tarball.
+
+* added a documentation link to the new django plugin.
+
+1.0.1 (2009-08-19)
+==================
+
+* added a 'pytest_nose' plugin which handles nose.SkipTest,
+ nose-style function/method/generator setup/teardown and
+ tries to report functions correctly.
+
+* capturing of unicode writes or encoded strings to sys.stdout/err
+ work better, also terminalwriting was adapted and somewhat
+ unified between windows and linux.
+
+* improved documentation layout and content a lot
+
+* added a "--help-config" option to show conftest.py / ENV-var names for
+ all longopt cmdline options, and some special conftest.py variables.
+ renamed 'conf_capture' conftest setting to 'option_capture' accordingly.
+
+* fix issue #27: better reporting on non-collectable items given on commandline
+ (e.g. pyc files)
+
+* fix issue #33: added --version flag (thanks Benjamin Peterson)
+
+* fix issue #32: adding support for "incomplete" paths to wcpath.status()
+
+* "Test" prefixed classes are *not* collected by default anymore if they
+ have an __init__ method
+
+* monkeypatch setenv() now accepts a "prepend" parameter
+
+* improved reporting of collection error tracebacks
+
+* simplified multicall mechanism and plugin architecture,
+ renamed some internal methods and argnames
+
+1.0.0 (2009-08-04)
+==================
+
+* more terse reporting try to show filesystem path relatively to current dir
+* improve xfail output a bit
+
+1.0.0b9 (2009-07-31)
+====================
+
+* cleanly handle and report final teardown of test setup
+
+* fix svn-1.6 compat issue with py.path.svnwc().versioned()
+ (thanks Wouter Vanden Hove)
+
+* setup/teardown or collection problems now show as ERRORs
+ or with big "E"'s in the progress lines. they are reported
+ and counted separately.
+
+* dist-testing: properly handle test items that get locally
+ collected but cannot be collected on the remote side - often
+ due to platform/dependency reasons
+
+* simplified py.test.mark API - see keyword plugin documentation
+
+* integrate better with logging: capturing now by default captures
+ test functions and their immediate setup/teardown in a single stream
+
+* capsys and capfd funcargs now have a readouterr() and a close() method
+ (underlyingly py.io.StdCapture/FD objects are used which grew a
+ readouterr() method as well to return snapshots of captured out/err)
+
+* make assert-reinterpretation work better with comparisons not
+ returning bools (reported with numpy from thanks maciej fijalkowski)
+
+* reworked per-test output capturing into the pytest_iocapture.py plugin
+ and thus removed capturing code from config object
+
+* item.repr_failure(excinfo) instead of item.repr_failure(excinfo, outerr)
+
+
+1.0.0b8 (2009-07-22)
+====================
+
+* pytest_unittest-plugin is now enabled by default
+
+* introduced pytest_keyboardinterrupt hook and
+ refined pytest_sessionfinish hooked, added tests.
+
+* workaround a buggy logging module interaction ("closing already closed
+ files"). Thanks to Sridhar Ratnakumar for triggering.
+
+* if plugins use "py.test.importorskip" for importing
+ a dependency only a warning will be issued instead
+ of exiting the testing process.
+
+* many improvements to docs:
+ - refined funcargs doc , use the term "factory" instead of "provider"
+ - added a new talk/tutorial doc page
+ - better download page
+ - better plugin docstrings
+ - added new plugins page and automatic doc generation script
+
+* fixed teardown problem related to partially failing funcarg setups
+ (thanks MrTopf for reporting), "pytest_runtest_teardown" is now
+ always invoked even if the "pytest_runtest_setup" failed.
+
+* tweaked doctest output for docstrings in py modules,
+ thanks Radomir.
+
+1.0.0b7
+=======
+
+* renamed py.test.xfail back to py.test.mark.xfail to avoid
+ two ways to decorate for xfail
+
+* re-added py.test.mark decorator for setting keywords on functions
+ (it was actually documented so removing it was not nice)
+
+* remove scope-argument from request.addfinalizer() because
+ request.cached_setup has the scope arg. TOOWTDI.
+
+* perform setup finalization before reporting failures
+
+* apply modified patches from Andreas Kloeckner to allow
+ test functions to have no func_code (#22) and to make
+ "-k" and function keywords work (#20)
+
+* apply patch from Daniel Peolzleithner (issue #23)
+
+* resolve issue #18, multiprocessing.Manager() and
+ redirection clash
+
+* make __name__ == "__channelexec__" for remote_exec code
+
+1.0.0b3 (2009-06-19)
+====================
+
+* plugin classes are removed: one now defines
+ hooks directly in conftest.py or global pytest_*.py
+ files.
+
+* added new pytest_namespace(config) hook that allows
+ to inject helpers directly to the py.test.* namespace.
+
+* documented and refined many hooks
+
+* added new style of generative tests via
+ pytest_generate_tests hook that integrates
+ well with function arguments.
+
+
+1.0.0b1
+=======
+
+* introduced new "funcarg" setup method,
+ see doc/test/funcarg.txt
+
+* introduced plugin architecture and many
+ new py.test plugins, see
+ doc/test/plugins.txt
+
+* teardown_method is now guaranteed to get
+ called after a test method has run.
+
+* new method: py.test.importorskip(mod,minversion)
+ will either import or call py.test.skip()
+
+* completely revised internal py.test architecture
+
+* new py.process.ForkedFunc object allowing to
+ fork execution of a function to a sub process
+ and getting a result back.
+
+XXX lots of things missing here XXX
+
+0.9.2
+=====
+
+* refined installation and metadata, created new setup.py,
+ now based on setuptools/ez_setup (thanks to Ralf Schmitt
+ for his support).
+
+* improved the way of making py.* scripts available in
+ windows environments, they are now added to the
+ Scripts directory as ".cmd" files.
+
+* py.path.svnwc.status() now is more complete and
+ uses xml output from the 'svn' command if available
+ (Guido Wesdorp)
+
+* fix for py.path.svn* to work with svn 1.5
+ (Chris Lamb)
+
+* fix path.relto(otherpath) method on windows to
+ use normcase for checking if a path is relative.
+
+* py.test's traceback is better parseable from editors
+ (follows the filenames:LINENO: MSG convention)
+ (thanks to Osmo Salomaa)
+
+* fix to javascript-generation, "py.test --runbrowser"
+ should work more reliably now
+
+* removed previously accidentally added
+ py.test.broken and py.test.notimplemented helpers.
+
+* there now is a py.__version__ attribute
+
+0.9.1
+=====
+
+This is a fairly complete list of v0.9.1, which can
+serve as a reference for developers.
+
+* allowing + signs in py.path.svn urls [39106]
+* fixed support for Failed exceptions without excinfo in py.test [39340]
+* added support for killing processes for Windows (as well as platforms that
+ support os.kill) in py.misc.killproc [39655]
+* added setup/teardown for generative tests to py.test [40702]
+* added detection of FAILED TO LOAD MODULE to py.test [40703, 40738, 40739]
+* fixed problem with calling .remove() on wcpaths of non-versioned files in
+ py.path [44248]
+* fixed some import and inheritance issues in py.test [41480, 44648, 44655]
+* fail to run greenlet tests when pypy is available, but without stackless
+ [45294]
+* small fixes in rsession tests [45295]
+* fixed issue with 2.5 type representations in py.test [45483, 45484]
+* made that internal reporting issues displaying is done atomically in py.test
+ [45518]
+* made that non-existing files are ignored by the py.lookup script [45519]
+* improved exception name creation in py.test [45535]
+* made that less threads are used in execnet [merge in 45539]
+* removed lock required for atomic reporting issue displaying in py.test
+ [45545]
+* removed globals from execnet [45541, 45547]
+* refactored cleanup mechanics, made that setDaemon is set to 1 to make atexit
+ get called in 2.5 (py.execnet) [45548]
+* fixed bug in joining threads in py.execnet's servemain [45549]
+* refactored py.test.rsession tests to not rely on exact output format anymore
+ [45646]
+* using repr() on test outcome [45647]
+* added 'Reason' classes for py.test.skip() [45648, 45649]
+* killed some unnecessary sanity check in py.test.collect [45655]
+* avoid using os.tmpfile() in py.io.fdcapture because on Windows it's only
+ usable by Administrators [45901]
+* added support for locking and non-recursive commits to py.path.svnwc [45994]
+* locking files in py.execnet to prevent CPython from segfaulting [46010]
+* added export() method to py.path.svnurl
+* fixed -d -x in py.test [47277]
+* fixed argument concatenation problem in py.path.svnwc [49423]
+* restore py.test behaviour that it exits with code 1 when there are failures
+ [49974]
+* don't fail on html files that don't have an accompanying .txt file [50606]
+* fixed 'utestconvert.py < input' [50645]
+* small fix for code indentation in py.code.source [50755]
+* fix _docgen.py documentation building [51285]
+* improved checks for source representation of code blocks in py.test [51292]
+* added support for passing authentication to py.path.svn* objects [52000,
+ 52001]
+* removed sorted() call for py.apigen tests in favour of [].sort() to support
+ Python 2.3 [52481]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/conf.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/conf.py
new file mode 100644
index 0000000000..b316163532
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/conf.py
@@ -0,0 +1,478 @@
+#
+# pytest documentation build configuration file, created by
+# sphinx-quickstart on Fri Oct 8 17:54:28 2010.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The full version, including alpha/beta/rc tags.
+# The short X.Y version.
+import ast
+import os
+import shutil
+import sys
+from textwrap import dedent
+from typing import List
+from typing import TYPE_CHECKING
+
+from _pytest import __version__ as version
+
+if TYPE_CHECKING:
+ import sphinx.application
+
+
+release = ".".join(version.split(".")[:2])
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+# sys.path.insert(0, os.path.abspath('.'))
+
+autodoc_member_order = "bysource"
+autodoc_typehints = "description"
+todo_include_todos = 1
+
+latex_engine = "lualatex"
+
+latex_elements = {
+ "preamble": dedent(
+ r"""
+ \directlua{
+ luaotfload.add_fallback("fallbacks", {
+ "Noto Serif CJK SC:style=Regular;",
+ "Symbola:Style=Regular;"
+ })
+ }
+
+ \setmainfont{FreeSerif}[RawFeature={fallback=fallbacks}]
+ """
+ )
+}
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = [
+ "pallets_sphinx_themes",
+ "pygments_pytest",
+ "sphinx.ext.autodoc",
+ "sphinx.ext.autosummary",
+ "sphinx.ext.extlinks",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.todo",
+ "sphinx.ext.viewcode",
+ "sphinx_removed_in",
+ "sphinxcontrib_trio",
+]
+
+# Building PDF docs on readthedocs requires inkscape for svg to pdf
+# conversion. The relevant plugin is not useful for normal HTML builds, but
+# it still raises warnings and fails CI if inkscape is not available. So
+# only use the plugin if inkscape is actually available.
+if shutil.which("inkscape"):
+ extensions.append("sphinxcontrib.inkscapeconverter")
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# The suffix of source filenames.
+source_suffix = ".rst"
+
+# The encoding of source files.
+# source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = "contents"
+
+# General information about the project.
+project = "pytest"
+copyright = "2015, holger krekel and pytest-dev team"
+
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+# language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = [
+ "_build",
+ "naming20.rst",
+ "test/*",
+ "old_*",
+ "*attic*",
+ "*/attic*",
+ "funcargs.rst",
+ "setup.rst",
+ "example/remoteinterp.rst",
+]
+
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+default_role = "literal"
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+add_module_names = False
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = "sphinx"
+
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+# A list of regular expressions that match URIs that should not be checked when
+# doing a linkcheck.
+linkcheck_ignore = [
+ "https://blogs.msdn.microsoft.com/bharry/2017/06/28/testing-in-a-cloud-delivery-cadence/",
+ "http://pythontesting.net/framework/pytest-introduction/",
+ r"https://github.com/pytest-dev/pytest/issues/\d+",
+ r"https://github.com/pytest-dev/pytest/pull/\d+",
+]
+
+# The number of worker threads to use when checking links (default=5).
+linkcheck_workers = 5
+
+
+_repo = "https://github.com/pytest-dev/pytest"
+extlinks = {
+ "bpo": ("https://bugs.python.org/issue%s", "bpo-"),
+ "pypi": ("https://pypi.org/project/%s/", ""),
+ "issue": (f"{_repo}/issues/%s", "issue #"),
+ "pull": (f"{_repo}/pull/%s", "pull request #"),
+ "user": ("https://github.com/%s", "@"),
+}
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+sys.path.append(os.path.abspath("_themes"))
+html_theme_path = ["_themes"]
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = "flask"
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+# html_theme_options = {"index_logo": None}
+
+# Add any paths that contain custom themes here, relative to this directory.
+# html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+html_title = "pytest documentation"
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+html_short_title = "pytest-%s" % release
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+html_logo = "img/pytest_logo_curves.svg"
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+html_favicon = "img/favicon.png"
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+# html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+# html_sidebars = {}
+# html_sidebars = {'index': 'indexsidebar.html'}
+
+html_sidebars = {
+ "index": [
+ "slim_searchbox.html",
+ "sidebarintro.html",
+ "globaltoc.html",
+ "links.html",
+ "sourcelink.html",
+ ],
+ "**": [
+ "slim_searchbox.html",
+ "globaltoc.html",
+ "relations.html",
+ "links.html",
+ "sourcelink.html",
+ ],
+}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {}
+# html_additional_pages = {'index': 'index.html'}
+
+
+# If false, no module index is generated.
+html_domain_indices = True
+
+# If false, no index is generated.
+html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+html_show_sourcelink = False
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = "pytestdoc"
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+# latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+# latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ (
+ "contents",
+ "pytest.tex",
+ "pytest Documentation",
+ "holger krekel, trainer and consultant, https://merlinux.eu/",
+ "manual",
+ )
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+latex_logo = "img/pytest1.png"
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+# latex_show_urls = False
+
+# Additional stuff for the LaTeX preamble.
+# latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+latex_domain_indices = False
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [("usage", "pytest", "pytest usage", ["holger krekel at merlinux eu"], 1)]
+
+
+# -- Options for Epub output ---------------------------------------------------
+
+# Bibliographic Dublin Core info.
+epub_title = "pytest"
+epub_author = "holger krekel at merlinux eu"
+epub_publisher = "holger krekel at merlinux eu"
+epub_copyright = "2013, holger krekel et alii"
+
+# The language of the text. It defaults to the language option
+# or en if the language is not set.
+# epub_language = ''
+
+# The scheme of the identifier. Typical schemes are ISBN or URL.
+# epub_scheme = ''
+
+# The unique identifier of the text. This can be a ISBN number
+# or the project homepage.
+# epub_identifier = ''
+
+# A unique identification for the text.
+# epub_uid = ''
+
+# HTML files that should be inserted before the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+# epub_pre_files = []
+
+# HTML files shat should be inserted after the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+# epub_post_files = []
+
+# A list of files that should not be packed into the epub file.
+# epub_exclude_files = []
+
+# The depth of the table of contents in toc.ncx.
+# epub_tocdepth = 3
+
+# Allow duplicate toc entries.
+# epub_tocdup = True
+
+
+# -- Options for texinfo output ------------------------------------------------
+
+texinfo_documents = [
+ (
+ master_doc,
+ "pytest",
+ "pytest Documentation",
+ (
+ "Holger Krekel@*Benjamin Peterson@*Ronny Pfannschmidt@*"
+ "Floris Bruynooghe@*others"
+ ),
+ "pytest",
+ "simple powerful testing with Python",
+ "Programming",
+ 1,
+ )
+]
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+ "pluggy": ("https://pluggy.readthedocs.io/en/stable", None),
+ "python": ("https://docs.python.org/3", None),
+ "numpy": ("https://numpy.org/doc/stable", None),
+ "pip": ("https://pip.pypa.io/en/stable", None),
+ "tox": ("https://tox.wiki/en/stable", None),
+ "virtualenv": ("https://virtualenv.pypa.io/en/stable", None),
+ "django": (
+ "http://docs.djangoproject.com/en/stable",
+ "http://docs.djangoproject.com/en/stable/_objects",
+ ),
+ "setuptools": ("https://setuptools.pypa.io/en/stable", None),
+}
+
+
+def configure_logging(app: "sphinx.application.Sphinx") -> None:
+ """Configure Sphinx's WarningHandler to handle (expected) missing include."""
+ import sphinx.util.logging
+ import logging
+
+ class WarnLogFilter(logging.Filter):
+ def filter(self, record: logging.LogRecord) -> bool:
+ """Ignore warnings about missing include with "only" directive.
+
+ Ref: https://github.com/sphinx-doc/sphinx/issues/2150."""
+ if (
+ record.msg.startswith('Problems with "include" directive path:')
+ and "_changelog_towncrier_draft.rst" in record.msg
+ ):
+ return False
+ return True
+
+ logger = logging.getLogger(sphinx.util.logging.NAMESPACE)
+ warn_handler = [x for x in logger.handlers if x.level == logging.WARNING]
+ assert len(warn_handler) == 1, warn_handler
+ warn_handler[0].filters.insert(0, WarnLogFilter())
+
+
+def setup(app: "sphinx.application.Sphinx") -> None:
+ # from sphinx.ext.autodoc import cut_lines
+ # app.connect('autodoc-process-docstring', cut_lines(4, what=['module']))
+ app.add_crossref_type(
+ "fixture",
+ "fixture",
+ objname="built-in fixture",
+ indextemplate="pair: %s; fixture",
+ )
+
+ app.add_object_type(
+ "confval",
+ "confval",
+ objname="configuration value",
+ indextemplate="pair: %s; configuration value",
+ )
+
+ app.add_object_type(
+ "globalvar",
+ "globalvar",
+ objname="global variable interpreted by pytest",
+ indextemplate="pair: %s; global variable interpreted by pytest",
+ )
+
+ app.add_crossref_type(
+ directivename="hook",
+ rolename="hook",
+ objname="pytest hook",
+ indextemplate="pair: %s; hook",
+ )
+
+ configure_logging(app)
+
+ # Make Sphinx mark classes with "final" when decorated with @final.
+ # We need this because we import final from pytest._compat, not from
+ # typing (for Python < 3.8 compat), so Sphinx doesn't detect it.
+ # To keep things simple we accept any `@final` decorator.
+ # Ref: https://github.com/pytest-dev/pytest/pull/7780
+ import sphinx.pycode.ast
+ import sphinx.pycode.parser
+
+ original_is_final = sphinx.pycode.parser.VariableCommentPicker.is_final
+
+ def patched_is_final(self, decorators: List[ast.expr]) -> bool:
+ if original_is_final(self, decorators):
+ return True
+ return any(
+ sphinx.pycode.ast.unparse(decorator) == "final" for decorator in decorators
+ )
+
+ sphinx.pycode.parser.VariableCommentPicker.is_final = patched_is_final
+
+ # legacypath.py monkey-patches pytest.Testdir in. Import the file so
+ # that autodoc can discover references to it.
+ import _pytest.legacypath # noqa: F401
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/conftest.py
new file mode 100644
index 0000000000..1a62e1b5df
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/conftest.py
@@ -0,0 +1 @@
+collect_ignore = ["conf.py"]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/contact.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/contact.rst
new file mode 100644
index 0000000000..beed10d7f2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/contact.rst
@@ -0,0 +1,54 @@
+
+.. _`contact channels`:
+.. _`contact`:
+
+Contact channels
+===================================
+
+- `pytest issue tracker`_ to report bugs or suggest features (for version
+ 2.0 and above).
+- `pytest discussions`_ at github for general questions.
+- `pytest discord server <https://discord.com/invite/pytest-dev>`_
+ for pytest development visibility and general assistance.
+- `pytest on stackoverflow.com <http://stackoverflow.com/search?q=pytest>`_
+ to post precise questions with the tag ``pytest``. New Questions will usually
+ be seen by pytest users or developers and answered quickly.
+
+- `Testing In Python`_: a mailing list for Python testing tools and discussion.
+
+- `pytest-dev at python.org (mailing list)`_ pytest specific announcements and discussions.
+
+- :doc:`contribution guide <contributing>` for help on submitting pull
+ requests to GitHub.
+
+- ``#pytest`` `on irc.libera.chat <ircs://irc.libera.chat:6697/#pytest>`_ IRC
+ channel for random questions (using an IRC client, `via webchat
+ <https://web.libera.chat/#pytest>`_, or `via Matrix
+ <https://matrix.to/#/%23pytest:libera.chat>`_).
+
+- private mail to Holger.Krekel at gmail com if you want to communicate sensitive issues
+
+
+- `merlinux.eu`_ offers pytest and tox-related professional teaching and
+ consulting.
+
+.. _`pytest issue tracker`: https://github.com/pytest-dev/pytest/issues
+.. _`old issue tracker`: https://bitbucket.org/hpk42/py-trunk/issues/
+
+.. _`pytest discussions`: https://github.com/pytest-dev/pytest/discussions
+
+.. _`merlinux.eu`: https://merlinux.eu/
+
+.. _`get an account`:
+
+.. _tetamap: https://tetamap.wordpress.com/
+
+.. _`@pylibcommit`: https://twitter.com/pylibcommit
+
+
+.. _`Testing in Python`: http://lists.idyll.org/listinfo/testing-in-python
+.. _FOAF: https://en.wikipedia.org/wiki/FOAF
+.. _`py-dev`:
+.. _`development mailing list`:
+.. _`pytest-dev at python.org (mailing list)`: http://mail.python.org/mailman/listinfo/pytest-dev
+.. _`pytest-commit at python.org (mailing list)`: http://mail.python.org/mailman/listinfo/pytest-commit
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/contents.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/contents.rst
new file mode 100644
index 0000000000..049d44ba9d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/contents.rst
@@ -0,0 +1,116 @@
+.. _toc:
+
+Full pytest documentation
+===========================
+
+`Download latest version as PDF <https://media.readthedocs.org/pdf/pytest/latest/pytest.pdf>`_
+
+.. `Download latest version as EPUB <http://media.readthedocs.org/epub/pytest/latest/pytest.epub>`_
+
+
+Start here
+-----------
+
+.. toctree::
+ :maxdepth: 2
+
+ getting-started
+
+
+How-to guides
+-------------
+
+.. toctree::
+ :maxdepth: 2
+
+ how-to/usage
+ how-to/assert
+ how-to/fixtures
+ how-to/mark
+ how-to/parametrize
+ how-to/tmp_path
+ how-to/monkeypatch
+ how-to/doctest
+ how-to/cache
+
+ how-to/logging
+ how-to/capture-stdout-stderr
+ how-to/capture-warnings
+ how-to/skipping
+
+ how-to/plugins
+ how-to/writing_plugins
+ how-to/writing_hook_functions
+
+ how-to/existingtestsuite
+ how-to/unittest
+ how-to/nose
+ how-to/xunit_setup
+
+ how-to/bash-completion
+
+
+Reference guides
+-----------------
+
+.. toctree::
+ :maxdepth: 2
+
+ reference/fixtures
+ reference/plugin_list
+ reference/customize
+ reference/reference
+
+
+Explanation
+-----------------
+
+.. toctree::
+ :maxdepth: 2
+
+ explanation/anatomy
+ explanation/fixtures
+ explanation/goodpractices
+ explanation/flaky
+ explanation/pythonpath
+
+
+Further topics
+-----------------
+
+.. toctree::
+ :maxdepth: 2
+
+ example/index
+
+ backwards-compatibility
+ deprecations
+ py27-py34-deprecation
+
+ contributing
+ development_guide
+
+ sponsor
+ tidelift
+ license
+ contact
+
+ history
+ historical-notes
+ talks
+
+
+.. only:: html
+
+ .. toctree::
+ :maxdepth: 1
+
+ announce/index
+
+.. only:: html
+
+ .. toctree::
+ :hidden:
+ :maxdepth: 1
+
+ changelog
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/contributing.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/contributing.rst
new file mode 100644
index 0000000000..2b6578f6b9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/contributing.rst
@@ -0,0 +1,3 @@
+.. _contributing:
+
+.. include:: ../../CONTRIBUTING.rst
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/deprecations.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/deprecations.rst
new file mode 100644
index 0000000000..0f19744ade
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/deprecations.rst
@@ -0,0 +1,954 @@
+.. _deprecations:
+
+Deprecations and Removals
+=========================
+
+This page lists all pytest features that are currently deprecated or have been removed in past major releases.
+The objective is to give users a clear rationale why a certain feature has been removed, and what alternatives
+should be used instead.
+
+.. contents::
+ :depth: 3
+ :local:
+
+
+Deprecated Features
+-------------------
+
+Below is a complete list of all pytest features which are considered deprecated. Using those features will issue
+:class:`PytestWarning` or subclasses, which can be filtered using :ref:`standard warning filters <warnings>`.
+
+.. _instance-collector-deprecation:
+
+The ``pytest.Instance`` collector
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 7.0
+
+The ``pytest.Instance`` collector type has been removed.
+
+Previously, Python test methods were collected as :class:`~pytest.Class` -> ``Instance`` -> :class:`~pytest.Function`.
+Now :class:`~pytest.Class` collects the test methods directly.
+
+Most plugins which reference ``Instance`` do so in order to ignore or skip it,
+using a check such as ``if isinstance(node, Instance): return``.
+Such plugins should simply remove consideration of ``Instance`` on pytest>=7.
+However, to keep such uses working, a dummy type has been instanted in ``pytest.Instance`` and ``_pytest.python.Instance``,
+and importing it emits a deprecation warning. This will be removed in pytest 8.
+
+
+.. _node-ctor-fspath-deprecation:
+
+``fspath`` argument for Node constructors replaced with ``pathlib.Path``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 7.0
+
+In order to support the transition from ``py.path.local`` to :mod:`pathlib`,
+the ``fspath`` argument to :class:`~_pytest.nodes.Node` constructors like
+:func:`pytest.Function.from_parent()` and :func:`pytest.Class.from_parent()`
+is now deprecated.
+
+Plugins which construct nodes should pass the ``path`` argument, of type
+:class:`pathlib.Path`, instead of the ``fspath`` argument.
+
+Plugins which implement custom items and collectors are encouraged to replace
+``fspath`` parameters (``py.path.local``) with ``path`` parameters
+(``pathlib.Path``), and drop any other usage of the ``py`` library if possible.
+
+If possible, plugins with custom items should use :ref:`cooperative
+constructors <uncooperative-constructors-deprecated>` to avoid hardcoding
+arguments they only pass on to the superclass.
+
+.. note::
+ The name of the :class:`~_pytest.nodes.Node` arguments and attributes (the
+ new attribute being ``path``) is **the opposite** of the situation for
+ hooks, :ref:`outlined below <legacy-path-hooks-deprecated>` (the old
+ argument being ``path``).
+
+ This is an unfortunate artifact due to historical reasons, which should be
+ resolved in future versions as we slowly get rid of the :pypi:`py`
+ dependency (see :issue:`9283` for a longer discussion).
+
+Due to the ongoing migration of methods like :meth:`~_pytest.Item.reportinfo`
+which still is expected to return a ``py.path.local`` object, nodes still have
+both ``fspath`` (``py.path.local``) and ``path`` (``pathlib.Path``) attributes,
+no matter what argument was used in the constructor. We expect to deprecate the
+``fspath`` attribute in a future release.
+
+.. _legacy-path-hooks-deprecated:
+
+``py.path.local`` arguments for hooks replaced with ``pathlib.Path``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 7.0
+
+In order to support the transition from ``py.path.local`` to :mod:`pathlib`, the following hooks now receive additional arguments:
+
+* :hook:`pytest_ignore_collect(collection_path: pathlib.Path) <pytest_ignore_collect>` as equivalent to ``path``
+* :hook:`pytest_collect_file(file_path: pathlib.Path) <pytest_collect_file>` as equivalent to ``path``
+* :hook:`pytest_pycollect_makemodule(module_path: pathlib.Path) <pytest_pycollect_makemodule>` as equivalent to ``path``
+* :hook:`pytest_report_header(start_path: pathlib.Path) <pytest_report_header>` as equivalent to ``startdir``
+* :hook:`pytest_report_collectionfinish(start_path: pathlib.Path) <pytest_report_collectionfinish>` as equivalent to ``startdir``
+
+The accompanying ``py.path.local`` based paths have been deprecated: plugins which manually invoke those hooks should only pass the new ``pathlib.Path`` arguments, and users should change their hook implementations to use the new ``pathlib.Path`` arguments.
+
+.. note::
+ The name of the :class:`~_pytest.nodes.Node` arguments and attributes,
+ :ref:`outlined above <node-ctor-fspath-deprecation>` (the new attribute
+ being ``path``) is **the opposite** of the situation for hooks (the old
+ argument being ``path``).
+
+ This is an unfortunate artifact due to historical reasons, which should be
+ resolved in future versions as we slowly get rid of the :pypi:`py`
+ dependency (see :issue:`9283` for a longer discussion).
+
+Directly constructing internal classes
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 7.0
+
+Directly constructing the following classes is now deprecated:
+
+- ``_pytest.mark.structures.Mark``
+- ``_pytest.mark.structures.MarkDecorator``
+- ``_pytest.mark.structures.MarkGenerator``
+- ``_pytest.python.Metafunc``
+- ``_pytest.runner.CallInfo``
+- ``_pytest._code.ExceptionInfo``
+- ``_pytest.config.argparsing.Parser``
+- ``_pytest.config.argparsing.OptionGroup``
+- ``_pytest.pytester.HookRecorder``
+
+These constructors have always been considered private, but now issue a deprecation warning, which may become a hard error in pytest 8.
+
+.. _cmdline-preparse-deprecated:
+
+Passing ``msg=`` to ``pytest.skip``, ``pytest.fail`` or ``pytest.exit``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 7.0
+
+Passing the keyword argument ``msg`` to :func:`pytest.skip`, :func:`pytest.fail` or :func:`pytest.exit`
+is now deprecated and ``reason`` should be used instead. This change is to bring consistency between these
+functions and the ``@pytest.mark.skip`` and ``@pytest.mark.xfail`` markers which already accept a ``reason`` argument.
+
+.. code-block:: python
+
+ def test_fail_example():
+ # old
+ pytest.fail(msg="foo")
+ # new
+ pytest.fail(reason="bar")
+
+
+ def test_skip_example():
+ # old
+ pytest.skip(msg="foo")
+ # new
+ pytest.skip(reason="bar")
+
+
+ def test_exit_example():
+ # old
+ pytest.exit(msg="foo")
+ # new
+ pytest.exit(reason="bar")
+
+
+Implementing the ``pytest_cmdline_preparse`` hook
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 7.0
+
+Implementing the :hook:`pytest_cmdline_preparse` hook has been officially deprecated.
+Implement the :hook:`pytest_load_initial_conftests` hook instead.
+
+.. code-block:: python
+
+ def pytest_cmdline_preparse(config: Config, args: List[str]) -> None:
+ ...
+
+
+ # becomes:
+
+
+ def pytest_load_initial_conftests(
+ early_config: Config, parser: Parser, args: List[str]
+ ) -> None:
+ ...
+
+.. _diamond-inheritance-deprecated:
+
+Diamond inheritance between :class:`pytest.Collector` and :class:`pytest.Item`
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 7.0
+
+Defining a custom pytest node type which is both an :class:`pytest.Item <Item>` and a :class:`pytest.Collector <Collector>` (e.g. :class:`pytest.File <File>`) now issues a warning.
+It was never sanely supported and triggers hard to debug errors.
+
+Some plugins providing linting/code analysis have been using this as a hack.
+Instead, a separate collector node should be used, which collects the item. See
+:ref:`non-python tests` for an example, as well as an `example pr fixing inheritance`_.
+
+.. _example pr fixing inheritance: https://github.com/asmeurer/pytest-flakes/pull/40/files
+
+
+.. _uncooperative-constructors-deprecated:
+
+Constructors of custom :class:`pytest.Node` subclasses should take ``**kwargs``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 7.0
+
+If custom subclasses of nodes like :class:`pytest.Item` override the
+``__init__`` method, they should take ``**kwargs``. Thus,
+
+.. code-block:: python
+
+ class CustomItem(pytest.Item):
+ def __init__(self, name, parent, additional_arg):
+ super().__init__(name, parent)
+ self.additional_arg = additional_arg
+
+should be turned into:
+
+.. code-block:: python
+
+ class CustomItem(pytest.Item):
+ def __init__(self, *, additional_arg, **kwargs):
+ super().__init__(**kwargs)
+ self.additional_arg = additional_arg
+
+to avoid hard-coding the arguments pytest can pass to the superclass.
+See :ref:`non-python tests` for a full example.
+
+For cases without conflicts, no deprecation warning is emitted. For cases with
+conflicts (such as :class:`pytest.File` now taking ``path`` instead of
+``fspath``, as :ref:`outlined above <node-ctor-fspath-deprecation>`), a
+deprecation warning is now raised.
+
+Backward compatibilities in ``Parser.addoption``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 2.4
+
+Several behaviors of :meth:`Parser.addoption <pytest.Parser.addoption>` are now
+scheduled for removal in pytest 8 (deprecated since pytest 2.4.0):
+
+- ``parser.addoption(..., help=".. %default ..")`` - use ``%(default)s`` instead.
+- ``parser.addoption(..., type="int/string/float/complex")`` - use ``type=int`` etc. instead.
+
+
+Raising ``unittest.SkipTest`` during collection
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 7.0
+
+Raising :class:`unittest.SkipTest` to skip collection of tests during the
+pytest collection phase is deprecated. Use :func:`pytest.skip` instead.
+
+Note: This deprecation only relates to using `unittest.SkipTest` during test
+collection. You are probably not doing that. Ordinary usage of
+:class:`unittest.SkipTest` / :meth:`unittest.TestCase.skipTest` /
+:func:`unittest.skip` in unittest test cases is fully supported.
+
+Using ``pytest.warns(None)``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 7.0
+
+:func:`pytest.warns(None) <pytest.warns>` is now deprecated because it was frequently misused.
+Its correct usage was checking that the code emits at least one warning of any type - like ``pytest.warns()``
+or ``pytest.warns(Warning)``.
+
+See :ref:`warns use cases` for examples.
+
+The ``--strict`` command-line option
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 6.2
+
+The ``--strict`` command-line option has been deprecated in favor of ``--strict-markers``, which
+better conveys what the option does.
+
+We have plans to maybe in the future to reintroduce ``--strict`` and make it an encompassing
+flag for all strictness related options (``--strict-markers`` and ``--strict-config``
+at the moment, more might be introduced in the future).
+
+
+The ``yield_fixture`` function/decorator
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 6.2
+
+``pytest.yield_fixture`` is a deprecated alias for :func:`pytest.fixture`.
+
+It has been so for a very long time, so can be search/replaced safely.
+
+
+The ``pytest_warning_captured`` hook
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 6.0
+
+This hook has an `item` parameter which cannot be serialized by ``pytest-xdist``.
+
+Use the ``pytest_warning_recored`` hook instead, which replaces the ``item`` parameter
+by a ``nodeid`` parameter.
+
+The ``pytest.collect`` module
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 6.0
+
+The ``pytest.collect`` module is no longer part of the public API, all its names
+should now be imported from ``pytest`` directly instead.
+
+
+The ``pytest._fillfuncargs`` function
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 6.0
+
+This function was kept for backward compatibility with an older plugin.
+
+It's functionality is not meant to be used directly, but if you must replace
+it, use `function._request._fillfixtures()` instead, though note this is not
+a public API and may break in the future.
+
+
+Removed Features
+----------------
+
+As stated in our :ref:`backwards-compatibility` policy, deprecated features are removed only in major releases after
+an appropriate period of deprecation has passed.
+
+``--no-print-logs`` command-line option
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 5.4
+.. versionremoved:: 6.0
+
+
+The ``--no-print-logs`` option and ``log_print`` ini setting are removed. If
+you used them, please use ``--show-capture`` instead.
+
+A ``--show-capture`` command-line option was added in ``pytest 3.5.0`` which allows to specify how to
+display captured output when tests fail: ``no``, ``stdout``, ``stderr``, ``log`` or ``all`` (the default).
+
+
+.. _resultlog deprecated:
+
+Result log (``--result-log``)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. deprecated:: 4.0
+.. versionremoved:: 6.0
+
+The ``--result-log`` option produces a stream of test reports which can be
+analysed at runtime, but it uses a custom format which requires users to implement their own
+parser.
+
+The `pytest-reportlog <https://github.com/pytest-dev/pytest-reportlog>`__ plugin provides a ``--report-log`` option, a more standard and extensible alternative, producing
+one JSON object per-line, and should cover the same use cases. Please try it out and provide feedback.
+
+The ``pytest-reportlog`` plugin might even be merged into the core
+at some point, depending on the plans for the plugins and number of users using it.
+
+``pytest_collect_directory`` hook
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 6.0
+
+The ``pytest_collect_directory`` hook has not worked properly for years (it was called
+but the results were ignored). Users may consider using :hook:`pytest_collection_modifyitems` instead.
+
+TerminalReporter.writer
+~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 6.0
+
+The ``TerminalReporter.writer`` attribute has been deprecated and should no longer be used. This
+was inadvertently exposed as part of the public API of that plugin and ties it too much
+with ``py.io.TerminalWriter``.
+
+Plugins that used ``TerminalReporter.writer`` directly should instead use ``TerminalReporter``
+methods that provide the same functionality.
+
+.. _junit-family changed default value:
+
+``junit_family`` default value change to "xunit2"
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionchanged:: 6.0
+
+The default value of ``junit_family`` option will change to ``xunit2`` in pytest 6.0, which
+is an update of the old ``xunit1`` format and is supported by default in modern tools
+that manipulate this type of file (for example, Jenkins, Azure Pipelines, etc.).
+
+Users are recommended to try the new ``xunit2`` format and see if their tooling that consumes the JUnit
+XML file supports it.
+
+To use the new format, update your ``pytest.ini``:
+
+.. code-block:: ini
+
+ [pytest]
+ junit_family=xunit2
+
+If you discover that your tooling does not support the new format, and want to keep using the
+legacy version, set the option to ``legacy`` instead:
+
+.. code-block:: ini
+
+ [pytest]
+ junit_family=legacy
+
+By using ``legacy`` you will keep using the legacy/xunit1 format when upgrading to
+pytest 6.0, where the default format will be ``xunit2``.
+
+In order to let users know about the transition, pytest will issue a warning in case
+the ``--junitxml`` option is given in the command line but ``junit_family`` is not explicitly
+configured in ``pytest.ini``.
+
+Services known to support the ``xunit2`` format:
+
+* `Jenkins <https://www.jenkins.io/>`__ with the `JUnit <https://plugins.jenkins.io/junit>`__ plugin.
+* `Azure Pipelines <https://azure.microsoft.com/en-us/services/devops/pipelines>`__.
+
+Node Construction changed to ``Node.from_parent``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionchanged:: 6.0
+
+The construction of nodes now should use the named constructor ``from_parent``.
+This limitation in api surface intends to enable better/simpler refactoring of the collection tree.
+
+This means that instead of :code:`MyItem(name="foo", parent=collector, obj=42)`
+one now has to invoke :code:`MyItem.from_parent(collector, name="foo")`.
+
+Plugins that wish to support older versions of pytest and suppress the warning can use
+`hasattr` to check if `from_parent` exists in that version:
+
+.. code-block:: python
+
+ def pytest_pycollect_makeitem(collector, name, obj):
+ if hasattr(MyItem, "from_parent"):
+ item = MyItem.from_parent(collector, name="foo")
+ item.obj = 42
+ return item
+ else:
+ return MyItem(name="foo", parent=collector, obj=42)
+
+Note that ``from_parent`` should only be called with keyword arguments for the parameters.
+
+
+``pytest.fixture`` arguments are keyword only
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 6.0
+
+Passing arguments to pytest.fixture() as positional arguments has been removed - pass them by keyword instead.
+
+``funcargnames`` alias for ``fixturenames``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 6.0
+
+The ``FixtureRequest``, ``Metafunc``, and ``Function`` classes track the names of
+their associated fixtures, with the aptly-named ``fixturenames`` attribute.
+
+Prior to pytest 2.3, this attribute was named ``funcargnames``, and we have kept
+that as an alias since. It is finally due for removal, as it is often confusing
+in places where we or plugin authors must distinguish between fixture names and
+names supplied by non-fixture things such as ``pytest.mark.parametrize``.
+
+
+.. _pytest.config global deprecated:
+
+``pytest.config`` global
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 5.0
+
+The ``pytest.config`` global object is deprecated. Instead use
+``request.config`` (via the ``request`` fixture) or if you are a plugin author
+use the ``pytest_configure(config)`` hook. Note that many hooks can also access
+the ``config`` object indirectly, through ``session.config`` or ``item.config`` for example.
+
+
+.. _`raises message deprecated`:
+
+``"message"`` parameter of ``pytest.raises``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 5.0
+
+It is a common mistake to think this parameter will match the exception message, while in fact
+it only serves to provide a custom message in case the ``pytest.raises`` check fails. To prevent
+users from making this mistake, and because it is believed to be little used, pytest is
+deprecating it without providing an alternative for the moment.
+
+If you have a valid use case for this parameter, consider that to obtain the same results
+you can just call ``pytest.fail`` manually at the end of the ``with`` statement.
+
+For example:
+
+.. code-block:: python
+
+ with pytest.raises(TimeoutError, message="Client got unexpected message"):
+ wait_for(websocket.recv(), 0.5)
+
+
+Becomes:
+
+.. code-block:: python
+
+ with pytest.raises(TimeoutError):
+ wait_for(websocket.recv(), 0.5)
+ pytest.fail("Client got unexpected message")
+
+
+If you still have concerns about this deprecation and future removal, please comment on
+:issue:`3974`.
+
+
+.. _raises-warns-exec:
+
+``raises`` / ``warns`` with a string as the second argument
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 5.0
+
+Use the context manager form of these instead. When necessary, invoke ``exec``
+directly.
+
+Example:
+
+.. code-block:: python
+
+ pytest.raises(ZeroDivisionError, "1 / 0")
+ pytest.raises(SyntaxError, "a $ b")
+
+ pytest.warns(DeprecationWarning, "my_function()")
+ pytest.warns(SyntaxWarning, "assert(1, 2)")
+
+Becomes:
+
+.. code-block:: python
+
+ with pytest.raises(ZeroDivisionError):
+ 1 / 0
+ with pytest.raises(SyntaxError):
+ exec("a $ b") # exec is required for invalid syntax
+
+ with pytest.warns(DeprecationWarning):
+ my_function()
+ with pytest.warns(SyntaxWarning):
+ exec("assert(1, 2)") # exec is used to avoid a top-level warning
+
+
+
+
+Using ``Class`` in custom Collectors
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+Using objects named ``"Class"`` as a way to customize the type of nodes that are collected in ``Collector``
+subclasses has been deprecated. Users instead should use ``pytest_pycollect_makeitem`` to customize node types during
+collection.
+
+This issue should affect only advanced plugins who create new collection types, so if you see this warning
+message please contact the authors so they can change the code.
+
+
+.. _marks in pytest.parametrize deprecated:
+
+marks in ``pytest.mark.parametrize``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+Applying marks to values of a ``pytest.mark.parametrize`` call is now deprecated. For example:
+
+.. code-block:: python
+
+ @pytest.mark.parametrize(
+ "a, b",
+ [
+ (3, 9),
+ pytest.mark.xfail(reason="flaky")(6, 36),
+ (10, 100),
+ (20, 200),
+ (40, 400),
+ (50, 500),
+ ],
+ )
+ def test_foo(a, b):
+ ...
+
+This code applies the ``pytest.mark.xfail(reason="flaky")`` mark to the ``(6, 36)`` value of the above parametrization
+call.
+
+This was considered hard to read and understand, and also its implementation presented problems to the code preventing
+further internal improvements in the marks architecture.
+
+To update the code, use ``pytest.param``:
+
+.. code-block:: python
+
+ @pytest.mark.parametrize(
+ "a, b",
+ [
+ (3, 9),
+ pytest.param(6, 36, marks=pytest.mark.xfail(reason="flaky")),
+ (10, 100),
+ (20, 200),
+ (40, 400),
+ (50, 500),
+ ],
+ )
+ def test_foo(a, b):
+ ...
+
+
+.. _pytest_funcarg__ prefix deprecated:
+
+``pytest_funcarg__`` prefix
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+In very early pytest versions fixtures could be defined using the ``pytest_funcarg__`` prefix:
+
+.. code-block:: python
+
+ def pytest_funcarg__data():
+ return SomeData()
+
+Switch over to the ``@pytest.fixture`` decorator:
+
+.. code-block:: python
+
+ @pytest.fixture
+ def data():
+ return SomeData()
+
+
+
+[pytest] section in setup.cfg files
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+``[pytest]`` sections in ``setup.cfg`` files should now be named ``[tool:pytest]``
+to avoid conflicts with other distutils commands.
+
+
+.. _metafunc.addcall deprecated:
+
+Metafunc.addcall
+~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+``Metafunc.addcall`` was a precursor to the current parametrized mechanism. Users should use
+:meth:`pytest.Metafunc.parametrize` instead.
+
+Example:
+
+.. code-block:: python
+
+ def pytest_generate_tests(metafunc):
+ metafunc.addcall({"i": 1}, id="1")
+ metafunc.addcall({"i": 2}, id="2")
+
+Becomes:
+
+.. code-block:: python
+
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize("i", [1, 2], ids=["1", "2"])
+
+
+.. _cached_setup deprecated:
+
+``cached_setup``
+~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+``request.cached_setup`` was the precursor of the setup/teardown mechanism available to fixtures.
+
+Example:
+
+.. code-block:: python
+
+ @pytest.fixture
+ def db_session():
+ return request.cached_setup(
+ setup=Session.create, teardown=lambda session: session.close(), scope="module"
+ )
+
+This should be updated to make use of standard fixture mechanisms:
+
+.. code-block:: python
+
+ @pytest.fixture(scope="module")
+ def db_session():
+ session = Session.create()
+ yield session
+ session.close()
+
+
+You can consult :std:doc:`funcarg comparison section in the docs <funcarg_compare>` for
+more information.
+
+
+.. _pytest_plugins in non-top-level conftest files deprecated:
+
+pytest_plugins in non-top-level conftest files
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+Defining :globalvar:`pytest_plugins` is now deprecated in non-top-level conftest.py
+files because they will activate referenced plugins *globally*, which is surprising because for all other pytest
+features ``conftest.py`` files are only *active* for tests at or below it.
+
+
+.. _config.warn and node.warn deprecated:
+
+``Config.warn`` and ``Node.warn``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+Those methods were part of the internal pytest warnings system, but since ``3.8`` pytest is using the builtin warning
+system for its own warnings, so those two functions are now deprecated.
+
+``Config.warn`` should be replaced by calls to the standard ``warnings.warn``, example:
+
+.. code-block:: python
+
+ config.warn("C1", "some warning")
+
+Becomes:
+
+.. code-block:: python
+
+ warnings.warn(pytest.PytestWarning("some warning"))
+
+``Node.warn`` now supports two signatures:
+
+* ``node.warn(PytestWarning("some message"))``: is now the **recommended** way to call this function.
+ The warning instance must be a PytestWarning or subclass.
+
+* ``node.warn("CI", "some message")``: this code/message form has been **removed** and should be converted to the warning instance form above.
+
+.. _record_xml_property deprecated:
+
+record_xml_property
+~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+The ``record_xml_property`` fixture is now deprecated in favor of the more generic ``record_property``, which
+can be used by other consumers (for example ``pytest-html``) to obtain custom information about the test run.
+
+This is just a matter of renaming the fixture as the API is the same:
+
+.. code-block:: python
+
+ def test_foo(record_xml_property):
+ ...
+
+Change to:
+
+.. code-block:: python
+
+ def test_foo(record_property):
+ ...
+
+
+.. _passing command-line string to pytest.main deprecated:
+
+Passing command-line string to ``pytest.main()``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+Passing a command-line string to ``pytest.main()`` is deprecated:
+
+.. code-block:: python
+
+ pytest.main("-v -s")
+
+Pass a list instead:
+
+.. code-block:: python
+
+ pytest.main(["-v", "-s"])
+
+
+By passing a string, users expect that pytest will interpret that command-line using the shell rules they are working
+on (for example ``bash`` or ``Powershell``), but this is very hard/impossible to do in a portable way.
+
+
+.. _calling fixtures directly deprecated:
+
+Calling fixtures directly
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+Calling a fixture function directly, as opposed to request them in a test function, is deprecated.
+
+For example:
+
+.. code-block:: python
+
+ @pytest.fixture
+ def cell():
+ return ...
+
+
+ @pytest.fixture
+ def full_cell():
+ cell = cell()
+ cell.make_full()
+ return cell
+
+This is a great source of confusion to new users, which will often call the fixture functions and request them from test functions interchangeably, which breaks the fixture resolution model.
+
+In those cases just request the function directly in the dependent fixture:
+
+.. code-block:: python
+
+ @pytest.fixture
+ def cell():
+ return ...
+
+
+ @pytest.fixture
+ def full_cell(cell):
+ cell.make_full()
+ return cell
+
+Alternatively if the fixture function is called multiple times inside a test (making it hard to apply the above pattern) or
+if you would like to make minimal changes to the code, you can create a fixture which calls the original function together
+with the ``name`` parameter:
+
+.. code-block:: python
+
+ def cell():
+ return ...
+
+
+ @pytest.fixture(name="cell")
+ def cell_fixture():
+ return cell()
+
+
+.. _yield tests deprecated:
+
+``yield`` tests
+~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+pytest supported ``yield``-style tests, where a test function actually ``yield`` functions and values
+that are then turned into proper test methods. Example:
+
+.. code-block:: python
+
+ def check(x, y):
+ assert x ** x == y
+
+
+ def test_squared():
+ yield check, 2, 4
+ yield check, 3, 9
+
+This would result into two actual test functions being generated.
+
+This form of test function doesn't support fixtures properly, and users should switch to ``pytest.mark.parametrize``:
+
+.. code-block:: python
+
+ @pytest.mark.parametrize("x, y", [(2, 4), (3, 9)])
+ def test_squared(x, y):
+ assert x ** x == y
+
+.. _internal classes accessed through node deprecated:
+
+Internal classes accessed through ``Node``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+Access of ``Module``, ``Function``, ``Class``, ``Instance``, ``File`` and ``Item`` through ``Node`` instances now issue
+this warning:
+
+.. code-block:: text
+
+ usage of Function.Module is deprecated, please use pytest.Module instead
+
+Users should just ``import pytest`` and access those objects using the ``pytest`` module.
+
+This has been documented as deprecated for years, but only now we are actually emitting deprecation warnings.
+
+``Node.get_marker``
+~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+As part of a large :ref:`marker-revamp`, ``_pytest.nodes.Node.get_marker`` is removed. See
+:ref:`the documentation <update marker code>` on tips on how to update your code.
+
+
+``somefunction.markname``
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+As part of a large :ref:`marker-revamp` we already deprecated using ``MarkInfo``
+the only correct way to get markers of an element is via ``node.iter_markers(name)``.
+
+
+.. _pytest.namespace deprecated:
+
+``pytest_namespace``
+~~~~~~~~~~~~~~~~~~~~
+
+.. versionremoved:: 4.0
+
+This hook is deprecated because it greatly complicates the pytest internals regarding configuration and initialization, making some
+bug fixes and refactorings impossible.
+
+Example of usage:
+
+.. code-block:: python
+
+ class MySymbol:
+ ...
+
+
+ def pytest_namespace():
+ return {"my_symbol": MySymbol()}
+
+
+Plugin authors relying on this hook should instead require that users now import the plugin modules directly (with an appropriate public API).
+
+As a stopgap measure, plugin authors may still inject their names into pytest's namespace, usually during ``pytest_configure``:
+
+.. code-block:: python
+
+ import pytest
+
+
+ def pytest_configure():
+ pytest.my_symbol = MySymbol()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/development_guide.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/development_guide.rst
new file mode 100644
index 0000000000..3ee0ebbc23
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/development_guide.rst
@@ -0,0 +1,7 @@
+=================
+Development Guide
+=================
+
+The contributing guidelines are to be found :ref:`here <contributing>`.
+The release procedure for pytest is documented on
+`GitHub <https://github.com/pytest-dev/pytest/blob/main/RELEASING.rst>`_.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/failure_demo.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/failure_demo.py
new file mode 100644
index 0000000000..abb9bce509
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/failure_demo.py
@@ -0,0 +1,281 @@
+import pytest
+from pytest import raises
+
+
+def otherfunc(a, b):
+ assert a == b
+
+
+def somefunc(x, y):
+ otherfunc(x, y)
+
+
+def otherfunc_multi(a, b):
+ assert a == b
+
+
+@pytest.mark.parametrize("param1, param2", [(3, 6)])
+def test_generative(param1, param2):
+ assert param1 * 2 < param2
+
+
+class TestFailing:
+ def test_simple(self):
+ def f():
+ return 42
+
+ def g():
+ return 43
+
+ assert f() == g()
+
+ def test_simple_multiline(self):
+ otherfunc_multi(42, 6 * 9)
+
+ def test_not(self):
+ def f():
+ return 42
+
+ assert not f()
+
+
+class TestSpecialisedExplanations:
+ def test_eq_text(self):
+ assert "spam" == "eggs"
+
+ def test_eq_similar_text(self):
+ assert "foo 1 bar" == "foo 2 bar"
+
+ def test_eq_multiline_text(self):
+ assert "foo\nspam\nbar" == "foo\neggs\nbar"
+
+ def test_eq_long_text(self):
+ a = "1" * 100 + "a" + "2" * 100
+ b = "1" * 100 + "b" + "2" * 100
+ assert a == b
+
+ def test_eq_long_text_multiline(self):
+ a = "1\n" * 100 + "a" + "2\n" * 100
+ b = "1\n" * 100 + "b" + "2\n" * 100
+ assert a == b
+
+ def test_eq_list(self):
+ assert [0, 1, 2] == [0, 1, 3]
+
+ def test_eq_list_long(self):
+ a = [0] * 100 + [1] + [3] * 100
+ b = [0] * 100 + [2] + [3] * 100
+ assert a == b
+
+ def test_eq_dict(self):
+ assert {"a": 0, "b": 1, "c": 0} == {"a": 0, "b": 2, "d": 0}
+
+ def test_eq_set(self):
+ assert {0, 10, 11, 12} == {0, 20, 21}
+
+ def test_eq_longer_list(self):
+ assert [1, 2] == [1, 2, 3]
+
+ def test_in_list(self):
+ assert 1 in [0, 2, 3, 4, 5]
+
+ def test_not_in_text_multiline(self):
+ text = "some multiline\ntext\nwhich\nincludes foo\nand a\ntail"
+ assert "foo" not in text
+
+ def test_not_in_text_single(self):
+ text = "single foo line"
+ assert "foo" not in text
+
+ def test_not_in_text_single_long(self):
+ text = "head " * 50 + "foo " + "tail " * 20
+ assert "foo" not in text
+
+ def test_not_in_text_single_long_term(self):
+ text = "head " * 50 + "f" * 70 + "tail " * 20
+ assert "f" * 70 not in text
+
+ def test_eq_dataclass(self):
+ from dataclasses import dataclass
+
+ @dataclass
+ class Foo:
+ a: int
+ b: str
+
+ left = Foo(1, "b")
+ right = Foo(1, "c")
+ assert left == right
+
+ def test_eq_attrs(self):
+ import attr
+
+ @attr.s
+ class Foo:
+ a = attr.ib()
+ b = attr.ib()
+
+ left = Foo(1, "b")
+ right = Foo(1, "c")
+ assert left == right
+
+
+def test_attribute():
+ class Foo:
+ b = 1
+
+ i = Foo()
+ assert i.b == 2
+
+
+def test_attribute_instance():
+ class Foo:
+ b = 1
+
+ assert Foo().b == 2
+
+
+def test_attribute_failure():
+ class Foo:
+ def _get_b(self):
+ raise Exception("Failed to get attrib")
+
+ b = property(_get_b)
+
+ i = Foo()
+ assert i.b == 2
+
+
+def test_attribute_multiple():
+ class Foo:
+ b = 1
+
+ class Bar:
+ b = 2
+
+ assert Foo().b == Bar().b
+
+
+def globf(x):
+ return x + 1
+
+
+class TestRaises:
+ def test_raises(self):
+ s = "qwe"
+ raises(TypeError, int, s)
+
+ def test_raises_doesnt(self):
+ raises(OSError, int, "3")
+
+ def test_raise(self):
+ raise ValueError("demo error")
+
+ def test_tupleerror(self):
+ a, b = [1] # NOQA
+
+ def test_reinterpret_fails_with_print_for_the_fun_of_it(self):
+ items = [1, 2, 3]
+ print(f"items is {items!r}")
+ a, b = items.pop()
+
+ def test_some_error(self):
+ if namenotexi: # NOQA
+ pass
+
+ def func1(self):
+ assert 41 == 42
+
+
+# thanks to Matthew Scott for this test
+def test_dynamic_compile_shows_nicely():
+ import importlib.util
+ import sys
+
+ src = "def foo():\n assert 1 == 0\n"
+ name = "abc-123"
+ spec = importlib.util.spec_from_loader(name, loader=None)
+ module = importlib.util.module_from_spec(spec)
+ code = compile(src, name, "exec")
+ exec(code, module.__dict__)
+ sys.modules[name] = module
+ module.foo()
+
+
+class TestMoreErrors:
+ def test_complex_error(self):
+ def f():
+ return 44
+
+ def g():
+ return 43
+
+ somefunc(f(), g())
+
+ def test_z1_unpack_error(self):
+ items = []
+ a, b = items
+
+ def test_z2_type_error(self):
+ items = 3
+ a, b = items
+
+ def test_startswith(self):
+ s = "123"
+ g = "456"
+ assert s.startswith(g)
+
+ def test_startswith_nested(self):
+ def f():
+ return "123"
+
+ def g():
+ return "456"
+
+ assert f().startswith(g())
+
+ def test_global_func(self):
+ assert isinstance(globf(42), float)
+
+ def test_instance(self):
+ self.x = 6 * 7
+ assert self.x != 42
+
+ def test_compare(self):
+ assert globf(10) < 5
+
+ def test_try_finally(self):
+ x = 1
+ try:
+ assert x == 0
+ finally:
+ x = 0
+
+
+class TestCustomAssertMsg:
+ def test_single_line(self):
+ class A:
+ a = 1
+
+ b = 2
+ assert A.a == b, "A.a appears not to be b"
+
+ def test_multiline(self):
+ class A:
+ a = 1
+
+ b = 2
+ assert (
+ A.a == b
+ ), "A.a appears not to be b\nor does not appear to be b\none of those"
+
+ def test_custom_repr(self):
+ class JSON:
+ a = 1
+
+ def __repr__(self):
+ return "This is JSON\n{\n 'foo': 'bar'\n}"
+
+ a = JSON()
+ b = 2
+ assert a.a == b, a
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/global_testmodule_config/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/global_testmodule_config/conftest.py
new file mode 100644
index 0000000000..7cdf18cdbc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/global_testmodule_config/conftest.py
@@ -0,0 +1,14 @@
+import os.path
+
+import pytest
+
+mydir = os.path.dirname(__file__)
+
+
+def pytest_runtest_setup(item):
+ if isinstance(item, pytest.Function):
+ if not item.fspath.relto(mydir):
+ return
+ mod = item.getparent(pytest.Module).obj
+ if hasattr(mod, "hello"):
+ print(f"mod.hello {mod.hello!r}")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/global_testmodule_config/test_hello_world.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/global_testmodule_config/test_hello_world.py
new file mode 100644
index 0000000000..a31a601a1c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/global_testmodule_config/test_hello_world.py
@@ -0,0 +1,5 @@
+hello = "world"
+
+
+def test_func():
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/test_failures.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/test_failures.py
new file mode 100644
index 0000000000..350518b43c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/test_failures.py
@@ -0,0 +1,13 @@
+import os.path
+import shutil
+
+failure_demo = os.path.join(os.path.dirname(__file__), "failure_demo.py")
+pytest_plugins = ("pytester",)
+
+
+def test_failure_demo_fails_properly(pytester):
+ target = pytester.path.joinpath(os.path.basename(failure_demo))
+ shutil.copy(failure_demo, target)
+ result = pytester.runpytest(target, syspathinsert=True)
+ result.stdout.fnmatch_lines(["*44 failed*"])
+ assert result.ret != 0
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/test_setup_flow_example.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/test_setup_flow_example.py
new file mode 100644
index 0000000000..0e7eded06b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/assertion/test_setup_flow_example.py
@@ -0,0 +1,44 @@
+def setup_module(module):
+ module.TestStateFullThing.classcount = 0
+
+
+class TestStateFullThing:
+ def setup_class(cls):
+ cls.classcount += 1
+
+ def teardown_class(cls):
+ cls.classcount -= 1
+
+ def setup_method(self, method):
+ self.id = eval(method.__name__[5:])
+
+ def test_42(self):
+ assert self.classcount == 1
+ assert self.id == 42
+
+ def test_23(self):
+ assert self.classcount == 1
+ assert self.id == 23
+
+
+def teardown_module(module):
+ assert module.TestStateFullThing.classcount == 0
+
+
+""" For this example the control flow happens as follows::
+ import test_setup_flow_example
+ setup_module(test_setup_flow_example)
+ setup_class(TestStateFullThing)
+ instance = TestStateFullThing()
+ setup_method(instance, instance.test_42)
+ instance.test_42()
+ setup_method(instance, instance.test_23)
+ instance.test_23()
+ teardown_class(TestStateFullThing)
+ teardown_module(test_setup_flow_example)
+
+Note that ``setup_class(TestStateFullThing)`` is called and not
+``TestStateFullThing.setup_class()`` which would require you
+to insert ``setup_class = classmethod(setup_class)`` to make
+your setup function callable.
+"""
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/attic.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/attic.rst
new file mode 100644
index 0000000000..2ea8700620
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/attic.rst
@@ -0,0 +1,83 @@
+
+.. _`accept example`:
+
+example: specifying and selecting acceptance tests
+--------------------------------------------------------------
+
+.. sourcecode:: python
+
+ # ./conftest.py
+ def pytest_option(parser):
+ group = parser.getgroup("myproject")
+ group.addoption(
+ "-A", dest="acceptance", action="store_true", help="run (slow) acceptance tests"
+ )
+
+
+ def pytest_funcarg__accept(request):
+ return AcceptFixture(request)
+
+
+ class AcceptFixture:
+ def __init__(self, request):
+ if not request.config.getoption("acceptance"):
+ pytest.skip("specify -A to run acceptance tests")
+ self.tmpdir = request.config.mktemp(request.function.__name__, numbered=True)
+
+ def run(self, *cmd):
+ """ called by test code to execute an acceptance test. """
+ self.tmpdir.chdir()
+ return subprocess.check_output(cmd).decode()
+
+
+and the actual test function example:
+
+.. sourcecode:: python
+
+ def test_some_acceptance_aspect(accept):
+ accept.tmpdir.mkdir("somesub")
+ result = accept.run("ls", "-la")
+ assert "somesub" in result
+
+If you run this test without specifying a command line option
+the test will get skipped with an appropriate message. Otherwise
+you can start to add convenience and test support methods
+to your AcceptFixture and drive running of tools or
+applications and provide ways to do assertions about
+the output.
+
+.. _`decorate a funcarg`:
+
+example: decorating a funcarg in a test module
+--------------------------------------------------------------
+
+For larger scale setups it's sometimes useful to decorate
+a funcarg just for a particular test module. We can
+extend the `accept example`_ by putting this in our test module:
+
+.. sourcecode:: python
+
+ def pytest_funcarg__accept(request):
+ # call the next factory (living in our conftest.py)
+ arg = request.getfuncargvalue("accept")
+ # create a special layout in our tempdir
+ arg.tmpdir.mkdir("special")
+ return arg
+
+
+ class TestSpecialAcceptance:
+ def test_sometest(self, accept):
+ assert accept.tmpdir.join("special").check()
+
+Our module level factory will be invoked first and it can
+ask its request object to call the next factory and then
+decorate its result. This mechanism allows us to stay
+ignorant of how/where the function argument is provided -
+in our example from a `conftest plugin`_.
+
+sidenote: the temporary directory used here are instances of
+the `py.path.local`_ class which provides many of the os.path
+methods in a convenient way.
+
+.. _`py.path.local`: ../path.html#local
+.. _`conftest plugin`: customize.html#conftestplugin
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/conftest.py
new file mode 100644
index 0000000000..f905738c4f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/conftest.py
@@ -0,0 +1 @@
+collect_ignore = ["nonpython"]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/fixture_availability.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/fixture_availability.svg
new file mode 100644
index 0000000000..066caac344
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/fixture_availability.svg
@@ -0,0 +1,132 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="572" height="542">
+ <style>
+ text {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ dominant-baseline: middle;
+ text-anchor: middle;
+ fill: #062886;
+ font-size: medium;
+ }
+ ellipse.fixture, rect.test {
+ fill: #eeffcc;
+ stroke: #007020;
+ stroke-width: 2;
+ }
+ text.fixture {
+ color: #06287e;
+ }
+ circle.class, circle.module, circle.package {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ text.class, text.module, text.package {
+ fill: #0e84b5;
+ }
+ line, path {
+ stroke: black;
+ stroke-width: 2;
+ fill: none;
+ }
+ </style>
+
+ <!-- main scope -->
+ <circle class="package" r="270" cx="286" cy="271" />
+ <!-- scope name -->
+ <defs>
+ <path d="M 26,271 A 260 260 0 0 1 546 271" id="testp"/>
+ </defs>
+ <text class="package">
+ <textPath xlink:href="#testp" startOffset="50%">tests</textPath>
+ </text>
+
+ <!-- subpackage -->
+ <circle class="package" r="140" cx="186" cy="271" />
+ <!-- scope name -->
+ <defs>
+ <path d="M 56,271 A 130 130 0 0 1 316 271" id="subpackage"/>
+ </defs>
+ <text class="package">
+ <textPath xlink:href="#subpackage" startOffset="50%">subpackage</textPath>
+ </text>
+
+ <!-- test_subpackage.py -->
+ <circle class="module" r="90" cx="186" cy="311" />
+ <!-- scope name -->
+ <defs>
+ <path d="M 106,311 A 80 80 0 0 1 266 311" id="testSubpackage"/>
+ </defs>
+ <text class="module">
+ <textPath xlink:href="#testSubpackage" startOffset="50%">test_subpackage.py</textPath>
+ </text>
+ <!-- innermost -->
+ <line x1="186" x2="186" y1="271" y2="351"/>
+ <!-- mid -->
+ <path d="M 186 351 L 136 351 L 106 331 L 106 196" />
+ <!-- order -->
+ <path d="M 186 351 L 256 351 L 316 291 L 316 136" />
+ <!-- top -->
+ <path d="M 186 351 L 186 391 L 231 436 L 331 436" />
+ <ellipse class="fixture" rx="50" ry="25" cx="186" cy="271" />
+ <text x="186" y="271">innermost</text>
+ <rect class="test" width="110" height="50" x="131" y="326" />
+ <text x="186" y="351">test_order</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="126" cy="196" />
+ <text x="126" y="196">mid</text>
+ <!-- scope order number -->
+ <mask id="testSubpackageOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="90" cx="186" cy="311" />
+ </mask>
+ <circle class="module" r="15" cx="96" cy="311" mask="url(#testSubpackageOrderMask)"/>
+ <text class="module" x="96" y="311">1</text>
+ <!-- scope order number -->
+ <mask id="subpackageOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="140" cx="186" cy="271" />
+ </mask>
+ <circle class="module" r="15" cx="46" cy="271" mask="url(#subpackageOrderMask)"/>
+ <text class="module" x="46" y="271">2</text>
+ <!-- scope order number -->
+ <mask id="testsOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="270" cx="286" cy="271" />
+ </mask>
+ <circle class="module" r="15" cx="16" cy="271" mask="url(#testsOrderMask)"/>
+ <text class="module" x="16" y="271">3</text>
+
+ <!-- test_top.py -->
+ <circle class="module" r="85" cx="441" cy="271" />
+ <!-- scope name -->
+ <defs>
+ <path d="M 366,271 A 75 75 0 0 1 516 271" id="testTop"/>
+ </defs>
+ <text class="module">
+ <textPath xlink:href="#testTop" startOffset="50%">test_top.py</textPath>
+ </text>
+ <!-- innermost -->
+ <line x1="441" x2="441" y1="306" y2="236"/>
+ <!-- order -->
+ <path d="M 441 306 L 376 306 L 346 276 L 346 136" />
+ <!-- top -->
+ <path d="M 441 306 L 441 411 L 411 436 L 331 436" />
+ <ellipse class="fixture" rx="50" ry="25" cx="441" cy="236" />
+ <text x="441" y="236">innermost</text>
+ <rect class="test" width="110" height="50" x="386" y="281" />
+ <text x="441" y="306">test_order</text>
+ <!-- scope order number -->
+ <mask id="testTopOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="85" cx="441" cy="271" />
+ </mask>
+ <circle class="module" r="15" cx="526" cy="271" mask="url(#testTopOrderMask)"/>
+ <text class="module" x="526" y="271">1</text>
+ <!-- scope order number -->
+ <circle class="module" r="15" cx="556" cy="271" mask="url(#testsOrderMask)"/>
+ <text class="module" x="556" y="271">2</text>
+
+ <ellipse class="fixture" rx="50" ry="25" cx="331" cy="436" />
+ <text x="331" y="436">top</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="331" cy="136" />
+ <text x="331" y="136">order</text>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/fixture_availability_plugins.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/fixture_availability_plugins.svg
new file mode 100644
index 0000000000..36e3005507
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/fixture_availability_plugins.svg
@@ -0,0 +1,142 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="587" height="382">
+ <style>
+ text {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ dominant-baseline: middle;
+ text-anchor: middle;
+ fill: #062886;
+ font-size: medium;
+ alignment-baseline: center;
+ }
+ ellipse.fixture, rect.test {
+ fill: #eeffcc;
+ stroke: #007020;
+ stroke-width: 2;
+ }
+ text.fixture {
+ color: #06287e;
+ }
+ circle.class, circle.module, circle.package, circle.plugin {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ text.class, text.module, text.package, text.plugin {
+ fill: #0e84b5;
+ }
+ line, path {
+ stroke: black;
+ stroke-width: 2;
+ fill: none;
+ }
+ </style>
+
+ <!-- plugin_a.py scope -->
+ <circle class="plugin" r="85" cx="486" cy="86" />
+ <!-- plugin name -->
+ <defs>
+ <path d="M 411,86 A 75 75 0 0 1 561 86" id="pluginA"/>
+ </defs>
+ <text class="plugin">
+ <textPath xlink:href="#pluginA" startOffset="50%">plugin_a</textPath>
+ </text>
+ <!-- scope order number -->
+ <mask id="pluginAOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="85" cx="486" cy="86" />
+ </mask>
+ <circle class="module" r="15" cx="571" cy="86" mask="url(#pluginAOrderMask)"/>
+ <text class="module" x="571" y="86">4</text>
+
+ <!-- plugin_b.py scope -->
+ <circle class="plugin" r="85" cx="486" cy="296" />
+ <!-- plugin name -->
+ <defs>
+ <path d="M 411,296 A 75 75 0 0 1 561 296" id="pluginB"/>
+ </defs>
+ <text class="plugin">
+ <textPath xlink:href="#pluginB" startOffset="50%">plugin_b</textPath>
+ </text>
+ <!-- scope order number -->
+ <mask id="pluginBOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="85" cx="486" cy="296" />
+ </mask>
+ <circle class="module" r="15" cx="571" cy="296" mask="url(#pluginBOrderMask)"/>
+ <text class="module" x="571" y="296">4</text>
+
+ <!-- main scope -->
+ <circle class="package" r="190" cx="191" cy="191" />
+ <!-- scope name -->
+ <defs>
+ <path d="M 11,191 A 180 180 0 0 1 371 191" id="testp"/>
+ </defs>
+ <text class="package">
+ <textPath xlink:href="#testp" startOffset="50%">tests</textPath>
+ </text>
+ <!-- scope order number -->
+ <mask id="mainOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="190" cx="191" cy="191" />
+ </mask>
+ <circle class="module" r="15" cx="381" cy="191" mask="url(#mainOrderMask)"/>
+ <text class="module" x="381" y="191">3</text>
+
+ <!-- subpackage -->
+ <circle class="package" r="140" cx="191" cy="231" />
+ <!-- scope name -->
+ <defs>
+ <path d="M 61,231 A 130 130 0 0 1 321 231" id="subpackage"/>
+ </defs>
+ <text class="package">
+ <textPath xlink:href="#subpackage" startOffset="50%">subpackage</textPath>
+ </text>
+ <!-- scope order number -->
+ <mask id="subpackageOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="140" cx="191" cy="231" />
+ </mask>
+ <circle class="module" r="15" cx="331" cy="231" mask="url(#subpackageOrderMask)"/>
+ <text class="module" x="331" y="231">2</text>
+
+ <!-- test_subpackage.py -->
+ <circle class="module" r="90" cx="191" cy="271" />
+ <!-- scope name -->
+ <defs>
+ <path d="M 111,271 A 80 80 0 0 1 271 271" id="testSubpackage"/>
+ </defs>
+ <text class="module">
+ <textPath xlink:href="#testSubpackage" startOffset="50%">test_subpackage.py</textPath>
+ </text>
+ <!-- scope order number -->
+ <mask id="testSubpackageOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="90" cx="191" cy="271" />
+ </mask>
+ <circle class="module" r="15" cx="281" cy="271" mask="url(#testSubpackageOrderMask)"/>
+ <text class="module" x="281" y="271">1</text>
+
+ <!-- innermost -->
+ <line x1="191" x2="191" y1="231" y2="311"/>
+ <!-- mid -->
+ <path d="M 191 306 L 101 306 L 91 296 L 91 156 L 101 146 L 191 146" />
+ <!-- order -->
+ <path d="M 191 316 L 91 316 L 81 306 L 81 61 L 91 51 L 191 51" />
+ <!-- a_fix -->
+ <path d="M 191 306 L 291 306 L 301 296 L 301 96 L 311 86 L 486 86" />
+ <!-- b_fix -->
+ <path d="M 191 316 L 316 316 L 336 296 L 486 296" />
+ <ellipse class="fixture" rx="50" ry="25" cx="191" cy="231" />
+ <text x="191" y="231">inner</text>
+ <rect class="test" width="110" height="50" x="136" y="286" />
+ <text x="191" y="311">test_order</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="191" cy="146" />
+ <text x="191" y="146">mid</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="191" cy="51" />
+ <text x="191" y="51">order</text>
+
+ <ellipse class="fixture" rx="50" ry="25" cx="486" cy="86" />
+ <text x="486" y="86">a_fix</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="486" cy="296" />
+ <text x="486" y="296">b_fix</text>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse.py
new file mode 100644
index 0000000000..ec282ab4b2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse.py
@@ -0,0 +1,45 @@
+import pytest
+
+
+@pytest.fixture
+def order():
+ return []
+
+
+@pytest.fixture
+def a(order):
+ order.append("a")
+
+
+@pytest.fixture
+def b(a, order):
+ order.append("b")
+
+
+@pytest.fixture(autouse=True)
+def c(b, order):
+ order.append("c")
+
+
+@pytest.fixture
+def d(b, order):
+ order.append("d")
+
+
+@pytest.fixture
+def e(d, order):
+ order.append("e")
+
+
+@pytest.fixture
+def f(e, order):
+ order.append("f")
+
+
+@pytest.fixture
+def g(f, c, order):
+ order.append("g")
+
+
+def test_order_and_g(g, order):
+ assert order == ["a", "b", "c", "d", "e", "f", "g"]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse.svg
new file mode 100644
index 0000000000..36362e4fb0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse.svg
@@ -0,0 +1,64 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="252" height="682">
+ <style>
+ text {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ dominant-baseline: middle;
+ text-anchor: middle;
+ fill: #062886;
+ font-size: medium;
+ }
+ ellipse.fixture, rect.test {
+ fill: #eeffcc;
+ stroke: #007020;
+ stroke-width: 2;
+ }
+ text.fixture {
+ color: #06287e;
+ }
+ circle.class {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ text.class {
+ fill: #0e84b5;
+ }
+ path, line {
+ stroke: black;
+ stroke-width: 2;
+ fill: none;
+ }
+ rect.autouse {
+ fill: #ca7f3d;
+ }
+ </style>
+ <path d="M126,586 L26,506 L26,236" />
+ <path d="M226,446 L226,236 L126,166" />
+ <line x1="126" x2="126" y1="656" y2="516" />
+ <line x1="126" x2="226" y1="516" y2="446" />
+ <line x1="226" x2="126" y1="446" y2="376" />
+ <line x1="126" x2="126" y1="376" y2="166" />
+ <line x1="26" x2="126" y1="236" y2="166" />
+ <line x1="126" x2="126" y1="166" y2="26" />
+ <line x1="126" x2="126" y1="96" y2="26" />
+ <rect class="autouse" width="251" height="40" x="0" y="286" />
+ <text x="126" y="306">autouse</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="126" cy="26" />
+ <text x="126" y="26">order</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="96" />
+ <text x="126" y="96">a</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="166" />
+ <text x="126" y="166">b</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="26" cy="236" />
+ <text x="26" y="236">c</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="376" />
+ <text x="126" y="376">d</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="226" cy="446" />
+ <text x="226" y="446">e</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="516" />
+ <text x="126" y="516">f</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="586" />
+ <text x="126" y="586">g</text>
+ <rect class="test" width="110" height="50" x="71" y="631" />
+ <text x="126" y="656">test_order</text>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_flat.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_flat.svg
new file mode 100644
index 0000000000..03c4598272
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_flat.svg
@@ -0,0 +1,56 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="112" height="682">
+ <style>
+ text {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ dominant-baseline: middle;
+ text-anchor: middle;
+ fill: #062886;
+ font-size: medium;
+ }
+ ellipse.fixture, rect.test {
+ fill: #eeffcc;
+ stroke: #007020;
+ stroke-width: 2;
+ }
+ text.fixture {
+ color: #06287e;
+ }
+ circle.class {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ text.class {
+ fill: #0e84b5;
+ }
+ path, line {
+ stroke: black;
+ stroke-width: 2;
+ fill: none;
+ }
+ rect.autouse {
+ fill: #ca7f3d;
+ }
+ </style>
+ <line x1="56" x2="56" y1="681" y2="26" />
+ <ellipse class="fixture" rx="50" ry="25" cx="56" cy="26" />
+ <text x="56" y="26">order</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="96" />
+ <text x="56" y="96">a</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="166" />
+ <text x="56" y="166">b</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="236" />
+ <text x="56" y="236">c</text>
+ <rect class="autouse" width="112" height="40" x="0" y="286" />
+ <text x="56" y="306">autouse</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="376" />
+ <text x="56" y="376">d</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="446" />
+ <text x="56" y="446">e</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="516" />
+ <text x="56" y="516">f</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="586" />
+ <text x="56" y="586">g</text>
+ <rect class="test" width="110" height="50" x="1" y="631" />
+ <text x="56" y="656">test_order</text>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.py
new file mode 100644
index 0000000000..de0c264279
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.py
@@ -0,0 +1,31 @@
+import pytest
+
+
+@pytest.fixture(scope="class")
+def order():
+ return []
+
+
+@pytest.fixture(scope="class", autouse=True)
+def c1(order):
+ order.append("c1")
+
+
+@pytest.fixture(scope="class")
+def c2(order):
+ order.append("c2")
+
+
+@pytest.fixture(scope="class")
+def c3(order, c1):
+ order.append("c3")
+
+
+class TestClassWithC1Request:
+ def test_order(self, order, c1, c3):
+ assert order == ["c1", "c3"]
+
+
+class TestClassWithoutC1Request:
+ def test_order(self, order, c2):
+ assert order == ["c1", "c2"]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.svg
new file mode 100644
index 0000000000..fe5772993e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_multiple_scopes.svg
@@ -0,0 +1,76 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="862" height="402">
+ <style>
+ text {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ dominant-baseline: middle;
+ text-anchor: middle;
+ fill: #062886;
+ font-size: medium;
+ }
+ ellipse.fixture, rect.test {
+ fill: #eeffcc;
+ stroke: #007020;
+ stroke-width: 2;
+ }
+ text.fixture {
+ color: #06287e;
+ }
+ circle.class {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ text.class {
+ fill: #0e84b5;
+ }
+ line {
+ stroke: black;
+ stroke-width: 2;
+ }
+ rect.autouse {
+ fill: #ca7f3d;
+ }
+ </style>
+
+ <!-- TestWithC1Request -->
+ <circle class="class" r="200" cx="221" cy="201" />
+ <line x1="221" x2="221" y1="61" y2="316"/>
+ <ellipse class="fixture" rx="50" ry="25" cx="221" cy="61" />
+ <text x="221" y="61">order</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="221" cy="131" />
+ <text x="221" y="131">c1</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="221" cy="271" />
+ <text x="221" y="271">c3</text>
+ <rect class="test" width="110" height="50" x="166" y="316" />
+ <text x="221" y="341">test_order</text>
+ <!-- scope name -->
+ <defs>
+ <path d="M31,201 A 190 190 0 0 1 411 201" id="testClassWith"/>
+ </defs>
+ <text class="class">
+ <textPath xlink:href="#testClassWith" startOffset="50%">TestWithC1Request</textPath>
+ </text>
+
+ <!-- TestWithoutC1Request -->
+ <circle class="class" r="200" cx="641" cy="201" />
+ <line x1="641" x2="641" y1="61" y2="316"/>
+ <ellipse class="fixture" rx="50" ry="25" cx="641" cy="61" />
+ <text x="641" y="61">order</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="641" cy="131" />
+ <text x="641" y="131">c1</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="641" cy="271" />
+ <text x="641" y="271">c2</text>
+ <rect class="test" width="110" height="50" x="586" y="316" />
+ <text x="641" y="341">test_order</text>
+ <!-- scope name -->
+ <defs>
+ <path d="M451,201 A 190 190 0 0 1 831 201" id="testClassWithout"/>
+ </defs>
+ <text class="class">
+ <textPath xlink:href="#testClassWithout" startOffset="50%">TestWithoutC1Request</textPath>
+ </text>
+
+ <rect class="autouse" width="862" height="40" x="1" y="181" />
+ <rect width="10" height="100" class="autouse" x="426" y="151" />
+ <text x="431" y="201">autouse</text>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.py
new file mode 100644
index 0000000000..ba01ad32f5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.py
@@ -0,0 +1,36 @@
+import pytest
+
+
+@pytest.fixture
+def order():
+ return []
+
+
+@pytest.fixture
+def c1(order):
+ order.append("c1")
+
+
+@pytest.fixture
+def c2(order):
+ order.append("c2")
+
+
+class TestClassWithAutouse:
+ @pytest.fixture(autouse=True)
+ def c3(self, order, c2):
+ order.append("c3")
+
+ def test_req(self, order, c1):
+ assert order == ["c2", "c3", "c1"]
+
+ def test_no_req(self, order):
+ assert order == ["c2", "c3"]
+
+
+class TestClassWithoutAutouse:
+ def test_req(self, order, c1):
+ assert order == ["c1"]
+
+ def test_no_req(self, order):
+ assert order == []
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.svg
new file mode 100644
index 0000000000..2a9f51673f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_autouse_temp_effects.svg
@@ -0,0 +1,100 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="862" height="502">
+ <style>
+ text {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ dominant-baseline: middle;
+ text-anchor: middle;
+ fill: #062886;
+ font-size: medium;
+ }
+ ellipse.fixture, rect.test {
+ fill: #eeffcc;
+ stroke: #007020;
+ stroke-width: 2;
+ }
+ text.fixture {
+ color: #06287e;
+ }
+ circle.class {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ text.class {
+ fill: #0e84b5;
+ }
+ line {
+ stroke: black;
+ stroke-width: 2;
+ }
+ rect.autouse {
+ fill: #ca7f3d;
+ }
+ </style>
+
+ <!-- TestWithAutouse -->
+ <circle class="class" r="250" cx="251" cy="251" />
+ <!-- scope name -->
+ <defs>
+ <path d="M11,251 A 240 240 0 0 1 491 251" id="testClassWith"/>
+ </defs>
+ <text class="class">
+ <textPath xlink:href="#testClassWith" startOffset="50%">TestWithAutouse</textPath>
+ </text>
+ <mask id="autouseScope">
+ <circle fill="white" r="249" cx="251" cy="251" />
+ </mask>
+
+ <!-- TestWithAutouse.test_req -->
+ <line x1="176" x2="176" y1="76" y2="426"/>
+ <ellipse class="fixture" rx="50" ry="25" cx="176" cy="76" />
+ <text x="176" y="76">order</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="176" cy="146" />
+ <text x="176" y="146">c2</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="176" cy="216" />
+ <text x="176" y="216">c3</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="176" cy="356" />
+ <text x="176" y="356">c1</text>
+ <rect class="test" width="100" height="50" x="126" y="401" />
+ <text x="176" y="426">test_req</text>
+
+ <!-- TestWithAutouse.test_no_req -->
+ <line x1="326" x2="326" y1="76" y2="346"/>
+ <ellipse class="fixture" rx="50" ry="25" cx="326" cy="76" />
+ <text x="326" y="76">order</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="326" cy="146" />
+ <text x="326" y="146">c2</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="326" cy="216" />
+ <text x="326" y="216">c3</text>
+ <rect class="test" width="120" height="50" x="266" y="331" />
+ <text x="326" y="356">test_no_req</text>
+
+ <rect class="autouse" width="500" height="40" x="1" y="266" mask="url(#autouseScope)"/>
+ <text x="261" y="286">autouse</text>
+
+ <!-- TestWithoutAutouse -->
+ <circle class="class" r="170" cx="691" cy="251" />
+ <!-- scope name -->
+ <defs>
+ <path d="M 531,251 A 160 160 0 0 1 851 251" id="testClassWithout"/>
+ </defs>
+ <text class="class">
+ <textPath xlink:href="#testClassWithout" startOffset="50%">TestWithoutAutouse</textPath>
+ </text>
+
+ <!-- TestWithoutAutouse.test_req -->
+ <line x1="616" x2="616" y1="181" y2="321"/>
+ <ellipse class="fixture" rx="50" ry="25" cx="616" cy="181" />
+ <text x="616" y="181">order</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="616" cy="251" />
+ <text x="616" y="251">c1</text>
+ <rect class="test" width="100" height="50" x="566" y="296" />
+ <text x="616" y="321">test_req</text>
+
+ <!-- TestWithoutAutouse.test_no_req -->
+ <line x1="766" x2="766" y1="181" y2="251"/>
+ <ellipse class="fixture" rx="50" ry="25" cx="766" cy="181" />
+ <text x="766" y="181">order</text>
+ <rect class="test" width="120" height="50" x="706" y="226" />
+ <text x="766" y="251">test_no_req</text>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies.py
new file mode 100644
index 0000000000..b3512c2a64
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies.py
@@ -0,0 +1,45 @@
+import pytest
+
+
+@pytest.fixture
+def order():
+ return []
+
+
+@pytest.fixture
+def a(order):
+ order.append("a")
+
+
+@pytest.fixture
+def b(a, order):
+ order.append("b")
+
+
+@pytest.fixture
+def c(a, b, order):
+ order.append("c")
+
+
+@pytest.fixture
+def d(c, b, order):
+ order.append("d")
+
+
+@pytest.fixture
+def e(d, b, order):
+ order.append("e")
+
+
+@pytest.fixture
+def f(e, order):
+ order.append("f")
+
+
+@pytest.fixture
+def g(f, c, order):
+ order.append("g")
+
+
+def test_order(g, order):
+ assert order == ["a", "b", "c", "d", "e", "f", "g"]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies.svg
new file mode 100644
index 0000000000..24418e63c9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies.svg
@@ -0,0 +1,60 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="252" height="612">
+ <style>
+ text {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ dominant-baseline: middle;
+ text-anchor: middle;
+ fill: #062886;
+ font-size: medium;
+ }
+ ellipse.fixture, rect.test {
+ fill: #eeffcc;
+ stroke: #007020;
+ stroke-width: 2;
+ }
+ text.fixture {
+ color: #06287e;
+ }
+ circle.class {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ text.class {
+ fill: #0e84b5;
+ }
+ path, line {
+ stroke: black;
+ stroke-width: 2;
+ fill: none;
+ }
+ </style>
+ <path d="M126,516 L26,436 L26,236" />
+ <path d="M226,376 L226,236 L126,166" />
+ <line x1="126" x2="126" y1="586" y2="446" />
+ <line x1="126" x2="226" y1="446" y2="376" />
+ <line x1="226" x2="126" y1="376" y2="306" />
+ <line x1="126" x2="26" y1="306" y2="236" />
+ <line x1="126" x2="126" y1="306" y2="166" />
+ <line x1="26" x2="126" y1="236" y2="166" />
+ <line x1="126" x2="126" y1="166" y2="26" />
+ <line x1="126" x2="126" y1="96" y2="26" />
+ <ellipse class="fixture" rx="50" ry="25" cx="126" cy="26" />
+ <text x="126" y="26">order</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="96" />
+ <text x="126" y="96">a</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="166" />
+ <text x="126" y="166">b</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="26" cy="236" />
+ <text x="26" y="236">c</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="306" />
+ <text x="126" y="306">d</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="226" cy="376" />
+ <text x="226" y="376">e</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="446" />
+ <text x="126" y="446">f</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="516" />
+ <text x="126" y="516">g</text>
+ <rect class="test" width="110" height="50" x="71" y="561" />
+ <text x="126" y="586">test_order</text>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies_flat.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies_flat.svg
new file mode 100644
index 0000000000..bbe7ad2833
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies_flat.svg
@@ -0,0 +1,51 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="112" height="612">
+ <style>
+ text {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ dominant-baseline: middle;
+ text-anchor: middle;
+ fill: #062886;
+ font-size: medium;
+ }
+ ellipse.fixture, rect.test {
+ fill: #eeffcc;
+ stroke: #007020;
+ stroke-width: 2;
+ }
+ text.fixture {
+ color: #06287e;
+ }
+ circle.class {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ text.class {
+ fill: #0e84b5;
+ }
+ path, line {
+ stroke: black;
+ stroke-width: 2;
+ fill: none;
+ }
+ </style>
+ <line x1="56" x2="56" y1="611" y2="26" />
+ <ellipse class="fixture" rx="50" ry="25" cx="56" cy="26" />
+ <text x="56" y="26">order</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="96" />
+ <text x="56" y="96">a</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="166" />
+ <text x="56" y="166">b</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="236" />
+ <text x="56" y="236">c</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="306" />
+ <text x="56" y="306">d</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="376" />
+ <text x="56" y="376">e</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="446" />
+ <text x="56" y="446">f</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="56" cy="516" />
+ <text x="56" y="516">g</text>
+ <rect class="test" width="110" height="50" x="1" y="561" />
+ <text x="56" y="586">test_order</text>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies_unclear.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies_unclear.svg
new file mode 100644
index 0000000000..150724f80a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_dependencies_unclear.svg
@@ -0,0 +1,60 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="252" height="542">
+ <style>
+ text {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ dominant-baseline: middle;
+ text-anchor: middle;
+ fill: #062886;
+ font-size: medium;
+ }
+ ellipse.fixture, rect.test {
+ fill: #eeffcc;
+ stroke: #007020;
+ stroke-width: 2;
+ }
+ text.fixture {
+ color: #06287e;
+ }
+ circle.class {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ text.class {
+ fill: #0e84b5;
+ }
+ path, line {
+ stroke: black;
+ stroke-width: 2;
+ fill: none;
+ }
+ </style>
+ <path d="M126,446 L26,376 L26,236" />
+ <path d="M226,306 L126,236 L126,166" />
+ <line x1="126" x2="126" y1="516" y2="446" />
+ <line x1="226" x2="226" y1="376" y2="306" />
+ <line x1="226" x2="226" y1="306" y2="236" />
+ <line x1="226" x2="126" y1="236" y2="166" />
+ <line x1="126" x2="226" y1="446" y2="376" />
+ <line x1="26" x2="126" y1="236" y2="166" />
+ <line x1="126" x2="126" y1="166" y2="96" />
+ <line x1="126" x2="126" y1="96" y2="26" />
+ <ellipse class="fixture" rx="50" ry="25" cx="126" cy="26" />
+ <text x="126" y="26">order</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="96" />
+ <text x="126" y="96">a</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="166" />
+ <text x="126" y="166">b</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="26" cy="236" />
+ <text x="26" y="236">c</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="226" cy="236" />
+ <text x="226" y="236">d</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="226" cy="306" />
+ <text x="226" y="306">e</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="226" cy="376" />
+ <text x="226" y="376">f</text>
+ <ellipse class="fixture" rx="25" ry="25" cx="126" cy="446" />
+ <text x="126" y="446">g</text>
+ <rect class="test" width="110" height="50" x="71" y="491" />
+ <text x="126" y="516">test_order</text>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_scope.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_scope.py
new file mode 100644
index 0000000000..5d9487cab3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_scope.py
@@ -0,0 +1,36 @@
+import pytest
+
+
+@pytest.fixture(scope="session")
+def order():
+ return []
+
+
+@pytest.fixture
+def func(order):
+ order.append("function")
+
+
+@pytest.fixture(scope="class")
+def cls(order):
+ order.append("class")
+
+
+@pytest.fixture(scope="module")
+def mod(order):
+ order.append("module")
+
+
+@pytest.fixture(scope="package")
+def pack(order):
+ order.append("package")
+
+
+@pytest.fixture(scope="session")
+def sess(order):
+ order.append("session")
+
+
+class TestClass:
+ def test_order(self, func, cls, mod, pack, sess, order):
+ assert order == ["session", "package", "module", "class", "function"]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_scope.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_scope.svg
new file mode 100644
index 0000000000..f38ee60f1f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_order_scope.svg
@@ -0,0 +1,55 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="262" height="537">
+ <style>
+ text {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ dominant-baseline: middle;
+ text-anchor: middle;
+ fill: #062886;
+ font-size: medium;
+ }
+ ellipse.fixture, rect.test {
+ fill: #eeffcc;
+ stroke: #007020;
+ stroke-width: 2;
+ }
+ text.fixture {
+ color: #06287e;
+ }
+ circle.class {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ text.class {
+ fill: #0e84b5;
+ }
+ line {
+ stroke: black;
+ stroke-width: 2;
+ }
+ </style>
+ <!-- TestClass -->
+ <circle class="class" r="130" cx="131" cy="406" />
+ <line x1="131" x2="131" y1="21" y2="446"/>
+ <ellipse class="fixture" rx="50" ry="25" cx="131" cy="26" />
+ <text x="131" y="26">order</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="131" cy="96" />
+ <text x="131" y="96">sess</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="131" cy="166" />
+ <text x="131" y="166">pack</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="131" cy="236" />
+ <text x="131" y="236">mod</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="131" cy="306" />
+ <text x="131" y="306">cls</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="131" cy="376" />
+ <text x="131" y="376">func</text>
+ <rect class="test" width="110" height="50" x="76" y="421" />
+ <text x="131" y="446">test_order</text>
+ <!-- scope name -->
+ <defs>
+ <path d="M131,526 A 120 120 0 0 1 136 286" id="testClass"/>
+ </defs>
+ <text class="class">
+ <textPath xlink:href="#testClass" startOffset="50%">TestClass</textPath>
+ </text>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_request_different_scope.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_request_different_scope.py
new file mode 100644
index 0000000000..00e2e46d84
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_request_different_scope.py
@@ -0,0 +1,29 @@
+import pytest
+
+
+@pytest.fixture
+def order():
+ return []
+
+
+@pytest.fixture
+def outer(order, inner):
+ order.append("outer")
+
+
+class TestOne:
+ @pytest.fixture
+ def inner(self, order):
+ order.append("one")
+
+ def test_order(self, order, outer):
+ assert order == ["one", "outer"]
+
+
+class TestTwo:
+ @pytest.fixture
+ def inner(self, order):
+ order.append("two")
+
+ def test_order(self, order, outer):
+ assert order == ["two", "outer"]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_request_different_scope.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_request_different_scope.svg
new file mode 100644
index 0000000000..0a78a889fd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/fixtures/test_fixtures_request_different_scope.svg
@@ -0,0 +1,115 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="562" height="532">
+ <style>
+ text {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ dominant-baseline: middle;
+ text-anchor: middle;
+ fill: #062886;
+ font-size: medium;
+ }
+ ellipse.fixture, rect.test {
+ fill: #eeffcc;
+ stroke: #007020;
+ stroke-width: 2;
+ }
+ text.fixture {
+ color: #06287e;
+ }
+ circle.class {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ circle.module {
+ fill: #c3e0ec;
+ stroke: #0e84b5;
+ stroke-width: 2;
+ }
+ text.class, text.module {
+ fill: #0e84b5;
+ }
+ line, path {
+ stroke: black;
+ stroke-width: 2;
+ fill: none;
+ }
+ </style>
+ <!-- main scope -->
+ <circle class="module" r="265" cx="281" cy="266" />
+ <!-- scope name -->
+ <defs>
+ <path d="M 26,266 A 255 255 0 0 1 536 266" id="testModule"/>
+ </defs>
+ <text class="module">
+ <textPath xlink:href="#testModule" startOffset="50%">test_fixtures_request_different_scope.py</textPath>
+ </text>
+
+ <!-- TestOne -->
+ <circle class="class" r="100" cx="141" cy="266" />
+ <!-- inner -->
+ <line x1="141" x2="141" y1="231" y2="301"/>
+ <!-- order -->
+ <path d="M 141 296 L 201 296 L 211 286 L 211 146 L 221 136 L 281 136" />
+ <!-- outer -->
+ <path d="M 141 306 L 201 306 L 211 316 L 211 386 L 221 396 L 281 396" />
+ <ellipse class="fixture" rx="50" ry="25" cx="141" cy="231" />
+ <text x="141" y="231">inner</text>
+ <rect class="test" width="110" height="50" x="86" y="276" />
+ <text x="141" y="301">test_order</text>
+ <!-- scope name -->
+ <defs>
+ <path d="M 51,266 A 90 90 0 0 1 231 266" id="testOne"/>
+ </defs>
+ <text class="class">
+ <textPath xlink:href="#testOne" startOffset="50%">TestOne</textPath>
+ </text>
+ <!-- scope order number -->
+ <mask id="testOneOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="100" cx="141" cy="266" />
+ </mask>
+ <circle class="module" r="15" cx="41" cy="266" mask="url(#testOneOrderMask)"/>
+ <text class="module" x="41" y="266">1</text>
+ <!-- scope order number -->
+ <mask id="testMainOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="265" cx="281" cy="266" />
+ </mask>
+ <circle class="module" r="15" cx="16" cy="266" mask="url(#testMainOrderMask)"/>
+ <text class="module" x="16" y="266">2</text>
+
+ <!-- TestTwo -->
+ <circle class="class" r="100" cx="421" cy="266" />
+ <!-- inner -->
+ <line x1="421" x2="421" y1="231" y2="301"/>
+ <!-- order -->
+ <path d="M 421 296 L 361 296 L 351 286 L 351 146 L 341 136 L 281 136" />
+ <!-- outer -->
+ <path d="M 421 306 L 361 306 L 351 316 L 351 386 L 341 396 L 281 396" />
+ <ellipse class="fixture" rx="50" ry="25" cx="421" cy="231" />
+ <text x="421" y="231">inner</text>
+ <rect class="test" width="110" height="50" x="366" y="276" />
+ <text x="421" y="301">test_order</text>
+ <!-- scope name -->
+ <defs>
+ <path d="M 331,266 A 90 90 0 0 1 511 266" id="testTwo"/>
+ </defs>
+ <text class="class">
+ <textPath xlink:href="#testTwo" startOffset="50%">TestTwo</textPath>
+ </text>
+ <!-- scope order number -->
+ <mask id="testTwoOrderMask">
+ <rect x="0" y="0" width="100%" height="100%" fill="white"/>
+ <circle fill="black" stroke="white" stroke-width="2" r="100" cx="421" cy="266" />
+ </mask>
+ <circle class="module" r="15" cx="521" cy="266" mask="url(#testTwoOrderMask)"/>
+ <text class="module" x="521" y="266">1</text>
+ <!-- scope order number -->
+ <circle class="module" r="15" cx="546" cy="266" mask="url(#testMainOrderMask)"/>
+ <text class="module" x="546" y="266">2</text>
+
+ <ellipse class="fixture" rx="50" ry="25" cx="281" cy="396" />
+ <text x="281" y="396">outer</text>
+ <ellipse class="fixture" rx="50" ry="25" cx="281" cy="136" />
+ <text x="281" y="136">order</text>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/index.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/index.rst
new file mode 100644
index 0000000000..71e855534f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/index.rst
@@ -0,0 +1,34 @@
+
+.. _examples:
+
+Examples and customization tricks
+=================================
+
+Here is a (growing) list of examples. :ref:`Contact <contact>` us if you
+need more examples or have questions. Also take a look at the
+:ref:`comprehensive documentation <toc>` which contains many example
+snippets as well. Also, `pytest on stackoverflow.com
+<http://stackoverflow.com/search?q=pytest>`_ often comes with example
+answers.
+
+For basic examples, see
+
+- :ref:`get-started` for basic introductory examples
+- :ref:`assert` for basic assertion examples
+- :ref:`Fixtures <fixtures>` for basic fixture/setup examples
+- :ref:`parametrize` for basic test function parametrization
+- :ref:`unittest` for basic unittest integration
+- :ref:`noseintegration` for basic nosetests integration
+
+The following examples aim at various use cases you might encounter.
+
+.. toctree::
+ :maxdepth: 2
+
+ reportingdemo
+ simple
+ parametrize
+ markers
+ special
+ pythoncollection
+ nonpython
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/markers.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/markers.rst
new file mode 100644
index 0000000000..3226c0871e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/markers.rst
@@ -0,0 +1,734 @@
+
+.. _`mark examples`:
+
+Working with custom markers
+=================================================
+
+Here are some examples using the :ref:`mark` mechanism.
+
+.. _`mark run`:
+
+Marking test functions and selecting them for a run
+----------------------------------------------------
+
+You can "mark" a test function with custom metadata like this:
+
+.. code-block:: python
+
+ # content of test_server.py
+
+ import pytest
+
+
+ @pytest.mark.webtest
+ def test_send_http():
+ pass # perform some webtest test for your app
+
+
+ def test_something_quick():
+ pass
+
+
+ def test_another():
+ pass
+
+
+ class TestClass:
+ def test_method(self):
+ pass
+
+
+
+You can then restrict a test run to only run tests marked with ``webtest``:
+
+.. code-block:: pytest
+
+ $ pytest -v -m webtest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 4 items / 3 deselected / 1 selected
+
+ test_server.py::test_send_http PASSED [100%]
+
+ ===================== 1 passed, 3 deselected in 0.12s ======================
+
+Or the inverse, running all tests except the webtest ones:
+
+.. code-block:: pytest
+
+ $ pytest -v -m "not webtest"
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 4 items / 1 deselected / 3 selected
+
+ test_server.py::test_something_quick PASSED [ 33%]
+ test_server.py::test_another PASSED [ 66%]
+ test_server.py::TestClass::test_method PASSED [100%]
+
+ ===================== 3 passed, 1 deselected in 0.12s ======================
+
+Selecting tests based on their node ID
+--------------------------------------
+
+You can provide one or more :ref:`node IDs <node-id>` as positional
+arguments to select only specified tests. This makes it easy to select
+tests based on their module, class, method, or function name:
+
+.. code-block:: pytest
+
+ $ pytest -v test_server.py::TestClass::test_method
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 1 item
+
+ test_server.py::TestClass::test_method PASSED [100%]
+
+ ============================ 1 passed in 0.12s =============================
+
+You can also select on the class:
+
+.. code-block:: pytest
+
+ $ pytest -v test_server.py::TestClass
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 1 item
+
+ test_server.py::TestClass::test_method PASSED [100%]
+
+ ============================ 1 passed in 0.12s =============================
+
+Or select multiple nodes:
+
+.. code-block:: pytest
+
+ $ pytest -v test_server.py::TestClass test_server.py::test_send_http
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 2 items
+
+ test_server.py::TestClass::test_method PASSED [ 50%]
+ test_server.py::test_send_http PASSED [100%]
+
+ ============================ 2 passed in 0.12s =============================
+
+.. _node-id:
+
+.. note::
+
+ Node IDs are of the form ``module.py::class::method`` or
+ ``module.py::function``. Node IDs control which tests are
+ collected, so ``module.py::class`` will select all test methods
+ on the class. Nodes are also created for each parameter of a
+ parametrized fixture or test, so selecting a parametrized test
+ must include the parameter value, e.g.
+ ``module.py::function[param]``.
+
+ Node IDs for failing tests are displayed in the test summary info
+ when running pytest with the ``-rf`` option. You can also
+ construct Node IDs from the output of ``pytest --collectonly``.
+
+Using ``-k expr`` to select tests based on their name
+-------------------------------------------------------
+
+.. versionadded:: 2.0/2.3.4
+
+You can use the ``-k`` command line option to specify an expression
+which implements a substring match on the test names instead of the
+exact match on markers that ``-m`` provides. This makes it easy to
+select tests based on their names:
+
+.. versionchanged:: 5.4
+
+The expression matching is now case-insensitive.
+
+.. code-block:: pytest
+
+ $ pytest -v -k http # running with the above defined example module
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 4 items / 3 deselected / 1 selected
+
+ test_server.py::test_send_http PASSED [100%]
+
+ ===================== 1 passed, 3 deselected in 0.12s ======================
+
+And you can also run all tests except the ones that match the keyword:
+
+.. code-block:: pytest
+
+ $ pytest -k "not send_http" -v
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 4 items / 1 deselected / 3 selected
+
+ test_server.py::test_something_quick PASSED [ 33%]
+ test_server.py::test_another PASSED [ 66%]
+ test_server.py::TestClass::test_method PASSED [100%]
+
+ ===================== 3 passed, 1 deselected in 0.12s ======================
+
+Or to select "http" and "quick" tests:
+
+.. code-block:: pytest
+
+ $ pytest -k "http or quick" -v
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 4 items / 2 deselected / 2 selected
+
+ test_server.py::test_send_http PASSED [ 50%]
+ test_server.py::test_something_quick PASSED [100%]
+
+ ===================== 2 passed, 2 deselected in 0.12s ======================
+
+You can use ``and``, ``or``, ``not`` and parentheses.
+
+
+In addition to the test's name, ``-k`` also matches the names of the test's parents (usually, the name of the file and class it's in),
+attributes set on the test function, markers applied to it or its parents and any :attr:`extra keywords <_pytest.nodes.Node.extra_keyword_matches>`
+explicitly added to it or its parents.
+
+
+Registering markers
+-------------------------------------
+
+
+
+.. ini-syntax for custom markers:
+
+Registering markers for your test suite is simple:
+
+.. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ markers =
+ webtest: mark a test as a webtest.
+ slow: mark test as slow.
+
+Multiple custom markers can be registered, by defining each one in its own line, as shown in above example.
+
+You can ask which markers exist for your test suite - the list includes our just defined ``webtest`` and ``slow`` markers:
+
+.. code-block:: pytest
+
+ $ pytest --markers
+ @pytest.mark.webtest: mark a test as a webtest.
+
+ @pytest.mark.slow: mark test as slow.
+
+ @pytest.mark.filterwarnings(warning): add a warning filter to the given test. see https://docs.pytest.org/en/stable/how-to/capture-warnings.html#pytest-mark-filterwarnings
+
+ @pytest.mark.skip(reason=None): skip the given test function with an optional reason. Example: skip(reason="no way of currently testing this") skips the test.
+
+ @pytest.mark.skipif(condition, ..., *, reason=...): skip the given test function if any of the conditions evaluate to True. Example: skipif(sys.platform == 'win32') skips the test if we are on the win32 platform. See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-skipif
+
+ @pytest.mark.xfail(condition, ..., *, reason=..., run=True, raises=None, strict=xfail_strict): mark the test function as an expected failure if any of the conditions evaluate to True. Optionally specify a reason for better reporting and run=False if you don't even want to execute the test function. If only specific exception(s) are expected, you can list them in raises, and if the test fails in other ways, it will be reported as a true failure. See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-xfail
+
+ @pytest.mark.parametrize(argnames, argvalues): call a test function multiple times passing in different arguments in turn. argvalues generally needs to be a list of values if argnames specifies only one name or a list of tuples of values if argnames specifies multiple names. Example: @parametrize('arg1', [1,2]) would lead to two calls of the decorated test function, one with arg1=1 and another with arg1=2.see https://docs.pytest.org/en/stable/how-to/parametrize.html for more info and examples.
+
+ @pytest.mark.usefixtures(fixturename1, fixturename2, ...): mark tests as needing all of the specified fixtures. see https://docs.pytest.org/en/stable/explanation/fixtures.html#usefixtures
+
+ @pytest.mark.tryfirst: mark a hook implementation function such that the plugin machinery will try to call it first/as early as possible.
+
+ @pytest.mark.trylast: mark a hook implementation function such that the plugin machinery will try to call it last/as late as possible.
+
+
+For an example on how to add and work with markers from a plugin, see
+:ref:`adding a custom marker from a plugin`.
+
+.. note::
+
+ It is recommended to explicitly register markers so that:
+
+ * There is one place in your test suite defining your markers
+
+ * Asking for existing markers via ``pytest --markers`` gives good output
+
+ * Typos in function markers are treated as an error if you use
+ the ``--strict-markers`` option.
+
+.. _`scoped-marking`:
+
+Marking whole classes or modules
+----------------------------------------------------
+
+You may use ``pytest.mark`` decorators with classes to apply markers to all of
+its test methods:
+
+.. code-block:: python
+
+ # content of test_mark_classlevel.py
+ import pytest
+
+
+ @pytest.mark.webtest
+ class TestClass:
+ def test_startup(self):
+ pass
+
+ def test_startup_and_more(self):
+ pass
+
+This is equivalent to directly applying the decorator to the
+two test functions.
+
+To apply marks at the module level, use the :globalvar:`pytestmark` global variable::
+
+ import pytest
+ pytestmark = pytest.mark.webtest
+
+or multiple markers::
+
+ pytestmark = [pytest.mark.webtest, pytest.mark.slowtest]
+
+
+Due to legacy reasons, before class decorators were introduced, it is possible to set the
+:globalvar:`pytestmark` attribute on a test class like this:
+
+.. code-block:: python
+
+ import pytest
+
+
+ class TestClass:
+ pytestmark = pytest.mark.webtest
+
+.. _`marking individual tests when using parametrize`:
+
+Marking individual tests when using parametrize
+-----------------------------------------------
+
+When using parametrize, applying a mark will make it apply
+to each individual test. However it is also possible to
+apply a marker to an individual test instance:
+
+.. code-block:: python
+
+ import pytest
+
+
+ @pytest.mark.foo
+ @pytest.mark.parametrize(
+ ("n", "expected"), [(1, 2), pytest.param(1, 3, marks=pytest.mark.bar), (2, 3)]
+ )
+ def test_increment(n, expected):
+ assert n + 1 == expected
+
+In this example the mark "foo" will apply to each of the three
+tests, whereas the "bar" mark is only applied to the second test.
+Skip and xfail marks can also be applied in this way, see :ref:`skip/xfail with parametrize`.
+
+.. _`adding a custom marker from a plugin`:
+
+Custom marker and command line option to control test runs
+----------------------------------------------------------
+
+.. regendoc:wipe
+
+Plugins can provide custom markers and implement specific behaviour
+based on it. This is a self-contained example which adds a command
+line option and a parametrized test function marker to run tests
+specifies via named environments:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+ import pytest
+
+
+ def pytest_addoption(parser):
+ parser.addoption(
+ "-E",
+ action="store",
+ metavar="NAME",
+ help="only run tests matching the environment NAME.",
+ )
+
+
+ def pytest_configure(config):
+ # register an additional marker
+ config.addinivalue_line(
+ "markers", "env(name): mark test to run only on named environment"
+ )
+
+
+ def pytest_runtest_setup(item):
+ envnames = [mark.args[0] for mark in item.iter_markers(name="env")]
+ if envnames:
+ if item.config.getoption("-E") not in envnames:
+ pytest.skip("test requires env in {!r}".format(envnames))
+
+A test file using this local plugin:
+
+.. code-block:: python
+
+ # content of test_someenv.py
+
+ import pytest
+
+
+ @pytest.mark.env("stage1")
+ def test_basic_db_operation():
+ pass
+
+and an example invocations specifying a different environment than what
+the test needs:
+
+.. code-block:: pytest
+
+ $ pytest -E stage2
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 1 item
+
+ test_someenv.py s [100%]
+
+ ============================ 1 skipped in 0.12s ============================
+
+and here is one that specifies exactly the environment needed:
+
+.. code-block:: pytest
+
+ $ pytest -E stage1
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 1 item
+
+ test_someenv.py . [100%]
+
+ ============================ 1 passed in 0.12s =============================
+
+The ``--markers`` option always gives you a list of available markers:
+
+.. code-block:: pytest
+
+ $ pytest --markers
+ @pytest.mark.env(name): mark test to run only on named environment
+
+ @pytest.mark.filterwarnings(warning): add a warning filter to the given test. see https://docs.pytest.org/en/stable/how-to/capture-warnings.html#pytest-mark-filterwarnings
+
+ @pytest.mark.skip(reason=None): skip the given test function with an optional reason. Example: skip(reason="no way of currently testing this") skips the test.
+
+ @pytest.mark.skipif(condition, ..., *, reason=...): skip the given test function if any of the conditions evaluate to True. Example: skipif(sys.platform == 'win32') skips the test if we are on the win32 platform. See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-skipif
+
+ @pytest.mark.xfail(condition, ..., *, reason=..., run=True, raises=None, strict=xfail_strict): mark the test function as an expected failure if any of the conditions evaluate to True. Optionally specify a reason for better reporting and run=False if you don't even want to execute the test function. If only specific exception(s) are expected, you can list them in raises, and if the test fails in other ways, it will be reported as a true failure. See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-xfail
+
+ @pytest.mark.parametrize(argnames, argvalues): call a test function multiple times passing in different arguments in turn. argvalues generally needs to be a list of values if argnames specifies only one name or a list of tuples of values if argnames specifies multiple names. Example: @parametrize('arg1', [1,2]) would lead to two calls of the decorated test function, one with arg1=1 and another with arg1=2.see https://docs.pytest.org/en/stable/how-to/parametrize.html for more info and examples.
+
+ @pytest.mark.usefixtures(fixturename1, fixturename2, ...): mark tests as needing all of the specified fixtures. see https://docs.pytest.org/en/stable/explanation/fixtures.html#usefixtures
+
+ @pytest.mark.tryfirst: mark a hook implementation function such that the plugin machinery will try to call it first/as early as possible.
+
+ @pytest.mark.trylast: mark a hook implementation function such that the plugin machinery will try to call it last/as late as possible.
+
+
+.. _`passing callables to custom markers`:
+
+Passing a callable to custom markers
+--------------------------------------------
+
+.. regendoc:wipe
+
+Below is the config file that will be used in the next examples:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import sys
+
+
+ def pytest_runtest_setup(item):
+ for marker in item.iter_markers(name="my_marker"):
+ print(marker)
+ sys.stdout.flush()
+
+A custom marker can have its argument set, i.e. ``args`` and ``kwargs`` properties, defined by either invoking it as a callable or using ``pytest.mark.MARKER_NAME.with_args``. These two methods achieve the same effect most of the time.
+
+However, if there is a callable as the single positional argument with no keyword arguments, using the ``pytest.mark.MARKER_NAME(c)`` will not pass ``c`` as a positional argument but decorate ``c`` with the custom marker (see :ref:`MarkDecorator <mark>`). Fortunately, ``pytest.mark.MARKER_NAME.with_args`` comes to the rescue:
+
+.. code-block:: python
+
+ # content of test_custom_marker.py
+ import pytest
+
+
+ def hello_world(*args, **kwargs):
+ return "Hello World"
+
+
+ @pytest.mark.my_marker.with_args(hello_world)
+ def test_with_args():
+ pass
+
+The output is as follows:
+
+.. code-block:: pytest
+
+ $ pytest -q -s
+ Mark(name='my_marker', args=(<function hello_world at 0xdeadbeef0001>,), kwargs={})
+ .
+ 1 passed in 0.12s
+
+We can see that the custom marker has its argument set extended with the function ``hello_world``. This is the key difference between creating a custom marker as a callable, which invokes ``__call__`` behind the scenes, and using ``with_args``.
+
+
+Reading markers which were set from multiple places
+----------------------------------------------------
+
+.. versionadded: 2.2.2
+
+.. regendoc:wipe
+
+If you are heavily using markers in your test suite you may encounter the case where a marker is applied several times to a test function. From plugin
+code you can read over all such settings. Example:
+
+.. code-block:: python
+
+ # content of test_mark_three_times.py
+ import pytest
+
+ pytestmark = pytest.mark.glob("module", x=1)
+
+
+ @pytest.mark.glob("class", x=2)
+ class TestClass:
+ @pytest.mark.glob("function", x=3)
+ def test_something(self):
+ pass
+
+Here we have the marker "glob" applied three times to the same
+test function. From a conftest file we can read it like this:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import sys
+
+
+ def pytest_runtest_setup(item):
+ for mark in item.iter_markers(name="glob"):
+ print("glob args={} kwargs={}".format(mark.args, mark.kwargs))
+ sys.stdout.flush()
+
+Let's run this without capturing output and see what we get:
+
+.. code-block:: pytest
+
+ $ pytest -q -s
+ glob args=('function',) kwargs={'x': 3}
+ glob args=('class',) kwargs={'x': 2}
+ glob args=('module',) kwargs={'x': 1}
+ .
+ 1 passed in 0.12s
+
+Marking platform specific tests with pytest
+--------------------------------------------------------------
+
+.. regendoc:wipe
+
+Consider you have a test suite which marks tests for particular platforms,
+namely ``pytest.mark.darwin``, ``pytest.mark.win32`` etc. and you
+also have tests that run on all platforms and have no specific
+marker. If you now want to have a way to only run the tests
+for your particular platform, you could use the following plugin:
+
+.. code-block:: python
+
+ # content of conftest.py
+ #
+ import sys
+ import pytest
+
+ ALL = set("darwin linux win32".split())
+
+
+ def pytest_runtest_setup(item):
+ supported_platforms = ALL.intersection(mark.name for mark in item.iter_markers())
+ plat = sys.platform
+ if supported_platforms and plat not in supported_platforms:
+ pytest.skip("cannot run on platform {}".format(plat))
+
+then tests will be skipped if they were specified for a different platform.
+Let's do a little test file to show how this looks like:
+
+.. code-block:: python
+
+ # content of test_plat.py
+
+ import pytest
+
+
+ @pytest.mark.darwin
+ def test_if_apple_is_evil():
+ pass
+
+
+ @pytest.mark.linux
+ def test_if_linux_works():
+ pass
+
+
+ @pytest.mark.win32
+ def test_if_win32_crashes():
+ pass
+
+
+ def test_runs_everywhere():
+ pass
+
+then you will see two tests skipped and two executed tests as expected:
+
+.. code-block:: pytest
+
+ $ pytest -rs # this option reports skip reasons
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 4 items
+
+ test_plat.py s.s. [100%]
+
+ ========================= short test summary info ==========================
+ SKIPPED [2] conftest.py:12: cannot run on platform linux
+ ======================= 2 passed, 2 skipped in 0.12s =======================
+
+Note that if you specify a platform via the marker-command line option like this:
+
+.. code-block:: pytest
+
+ $ pytest -m linux
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 4 items / 3 deselected / 1 selected
+
+ test_plat.py . [100%]
+
+ ===================== 1 passed, 3 deselected in 0.12s ======================
+
+then the unmarked-tests will not be run. It is thus a way to restrict the run to the specific tests.
+
+Automatically adding markers based on test names
+--------------------------------------------------------
+
+.. regendoc:wipe
+
+If you have a test suite where test function names indicate a certain
+type of test, you can implement a hook that automatically defines
+markers so that you can use the ``-m`` option with it. Let's look
+at this test module:
+
+.. code-block:: python
+
+ # content of test_module.py
+
+
+ def test_interface_simple():
+ assert 0
+
+
+ def test_interface_complex():
+ assert 0
+
+
+ def test_event_simple():
+ assert 0
+
+
+ def test_something_else():
+ assert 0
+
+We want to dynamically define two markers and can do it in a
+``conftest.py`` plugin:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+ import pytest
+
+
+ def pytest_collection_modifyitems(items):
+ for item in items:
+ if "interface" in item.nodeid:
+ item.add_marker(pytest.mark.interface)
+ elif "event" in item.nodeid:
+ item.add_marker(pytest.mark.event)
+
+We can now use the ``-m option`` to select one set:
+
+.. code-block:: pytest
+
+ $ pytest -m interface --tb=short
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 4 items / 2 deselected / 2 selected
+
+ test_module.py FF [100%]
+
+ ================================= FAILURES =================================
+ __________________________ test_interface_simple ___________________________
+ test_module.py:4: in test_interface_simple
+ assert 0
+ E assert 0
+ __________________________ test_interface_complex __________________________
+ test_module.py:8: in test_interface_complex
+ assert 0
+ E assert 0
+ ========================= short test summary info ==========================
+ FAILED test_module.py::test_interface_simple - assert 0
+ FAILED test_module.py::test_interface_complex - assert 0
+ ===================== 2 failed, 2 deselected in 0.12s ======================
+
+or to select both "event" and "interface" tests:
+
+.. code-block:: pytest
+
+ $ pytest -m "interface or event" --tb=short
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 4 items / 1 deselected / 3 selected
+
+ test_module.py FFF [100%]
+
+ ================================= FAILURES =================================
+ __________________________ test_interface_simple ___________________________
+ test_module.py:4: in test_interface_simple
+ assert 0
+ E assert 0
+ __________________________ test_interface_complex __________________________
+ test_module.py:8: in test_interface_complex
+ assert 0
+ E assert 0
+ ____________________________ test_event_simple _____________________________
+ test_module.py:12: in test_event_simple
+ assert 0
+ E assert 0
+ ========================= short test summary info ==========================
+ FAILED test_module.py::test_interface_simple - assert 0
+ FAILED test_module.py::test_interface_complex - assert 0
+ FAILED test_module.py::test_event_simple - assert 0
+ ===================== 3 failed, 1 deselected in 0.12s ======================
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/multipython.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/multipython.py
new file mode 100644
index 0000000000..9005d31add
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/multipython.py
@@ -0,0 +1,72 @@
+"""
+module containing a parametrized tests testing cross-python
+serialization via the pickle module.
+"""
+import shutil
+import subprocess
+import textwrap
+
+import pytest
+
+pythonlist = ["python3.5", "python3.6", "python3.7"]
+
+
+@pytest.fixture(params=pythonlist)
+def python1(request, tmp_path):
+ picklefile = tmp_path / "data.pickle"
+ return Python(request.param, picklefile)
+
+
+@pytest.fixture(params=pythonlist)
+def python2(request, python1):
+ return Python(request.param, python1.picklefile)
+
+
+class Python:
+ def __init__(self, version, picklefile):
+ self.pythonpath = shutil.which(version)
+ if not self.pythonpath:
+ pytest.skip(f"{version!r} not found")
+ self.picklefile = picklefile
+
+ def dumps(self, obj):
+ dumpfile = self.picklefile.with_name("dump.py")
+ dumpfile.write_text(
+ textwrap.dedent(
+ r"""
+ import pickle
+ f = open({!r}, 'wb')
+ s = pickle.dump({!r}, f, protocol=2)
+ f.close()
+ """.format(
+ str(self.picklefile), obj
+ )
+ )
+ )
+ subprocess.check_call((self.pythonpath, str(dumpfile)))
+
+ def load_and_is_true(self, expression):
+ loadfile = self.picklefile.with_name("load.py")
+ loadfile.write_text(
+ textwrap.dedent(
+ r"""
+ import pickle
+ f = open({!r}, 'rb')
+ obj = pickle.load(f)
+ f.close()
+ res = eval({!r})
+ if not res:
+ raise SystemExit(1)
+ """.format(
+ str(self.picklefile), expression
+ )
+ )
+ )
+ print(loadfile)
+ subprocess.check_call((self.pythonpath, str(loadfile)))
+
+
+@pytest.mark.parametrize("obj", [42, {}, {1: 3}])
+def test_basic_objects(python1, python2, obj):
+ python1.dumps(obj)
+ python2.load_and_is_true(f"obj == {obj}")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython.rst
new file mode 100644
index 0000000000..f79f15b4f7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython.rst
@@ -0,0 +1,102 @@
+
+.. _`non-python tests`:
+
+Working with non-python tests
+====================================================
+
+.. _`yaml plugin`:
+
+A basic example for specifying tests in Yaml files
+--------------------------------------------------------------
+
+.. _`pytest-yamlwsgi`: http://bitbucket.org/aafshar/pytest-yamlwsgi/src/tip/pytest_yamlwsgi.py
+
+Here is an example ``conftest.py`` (extracted from Ali Afshar's special purpose `pytest-yamlwsgi`_ plugin). This ``conftest.py`` will collect ``test*.yaml`` files and will execute the yaml-formatted content as custom tests:
+
+.. include:: nonpython/conftest.py
+ :literal:
+
+You can create a simple example file:
+
+.. include:: nonpython/test_simple.yaml
+ :literal:
+
+and if you installed :pypi:`PyYAML` or a compatible YAML-parser you can
+now execute the test specification:
+
+.. code-block:: pytest
+
+ nonpython $ pytest test_simple.yaml
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project/nonpython
+ collected 2 items
+
+ test_simple.yaml F. [100%]
+
+ ================================= FAILURES =================================
+ ______________________________ usecase: hello ______________________________
+ usecase execution failed
+ spec failed: 'some': 'other'
+ no further details known at this point.
+ ========================= short test summary info ==========================
+ FAILED test_simple.yaml::hello
+ ======================= 1 failed, 1 passed in 0.12s ========================
+
+.. regendoc:wipe
+
+You get one dot for the passing ``sub1: sub1`` check and one failure.
+Obviously in the above ``conftest.py`` you'll want to implement a more
+interesting interpretation of the yaml-values. You can easily write
+your own domain specific testing language this way.
+
+.. note::
+
+ ``repr_failure(excinfo)`` is called for representing test failures.
+ If you create custom collection nodes you can return an error
+ representation string of your choice. It
+ will be reported as a (red) string.
+
+``reportinfo()`` is used for representing the test location and is also
+consulted when reporting in ``verbose`` mode:
+
+.. code-block:: pytest
+
+ nonpython $ pytest -v
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project/nonpython
+ collecting ... collected 2 items
+
+ test_simple.yaml::hello FAILED [ 50%]
+ test_simple.yaml::ok PASSED [100%]
+
+ ================================= FAILURES =================================
+ ______________________________ usecase: hello ______________________________
+ usecase execution failed
+ spec failed: 'some': 'other'
+ no further details known at this point.
+ ========================= short test summary info ==========================
+ FAILED test_simple.yaml::hello
+ ======================= 1 failed, 1 passed in 0.12s ========================
+
+.. regendoc:wipe
+
+While developing your custom test collection and execution it's also
+interesting to just look at the collection tree:
+
+.. code-block:: pytest
+
+ nonpython $ pytest --collect-only
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project/nonpython
+ collected 2 items
+
+ <Package nonpython>
+ <YamlFile test_simple.yaml>
+ <YamlItem hello>
+ <YamlItem ok>
+
+ ======================== 2 tests collected in 0.12s ========================
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/conftest.py
new file mode 100644
index 0000000000..bc39a1f6b2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/conftest.py
@@ -0,0 +1,47 @@
+# content of conftest.py
+import pytest
+
+
+def pytest_collect_file(parent, file_path):
+ if file_path.suffix == ".yaml" and file_path.name.startswith("test"):
+ return YamlFile.from_parent(parent, path=file_path)
+
+
+class YamlFile(pytest.File):
+ def collect(self):
+ # We need a yaml parser, e.g. PyYAML.
+ import yaml
+
+ raw = yaml.safe_load(self.path.open())
+ for name, spec in sorted(raw.items()):
+ yield YamlItem.from_parent(self, name=name, spec=spec)
+
+
+class YamlItem(pytest.Item):
+ def __init__(self, *, spec, **kwargs):
+ super().__init__(**kwargs)
+ self.spec = spec
+
+ def runtest(self):
+ for name, value in sorted(self.spec.items()):
+ # Some custom test execution (dumb example follows).
+ if name != value:
+ raise YamlException(self, name, value)
+
+ def repr_failure(self, excinfo):
+ """Called when self.runtest() raises an exception."""
+ if isinstance(excinfo.value, YamlException):
+ return "\n".join(
+ [
+ "usecase execution failed",
+ " spec failed: {1!r}: {2!r}".format(*excinfo.value.args),
+ " no further details known at this point.",
+ ]
+ )
+
+ def reportinfo(self):
+ return self.path, 0, f"usecase: {self.name}"
+
+
+class YamlException(Exception):
+ """Custom exception for error reporting."""
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/test_simple.yaml b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/test_simple.yaml
new file mode 100644
index 0000000000..8e3e7a4bbc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/nonpython/test_simple.yaml
@@ -0,0 +1,7 @@
+# test_simple.yaml
+ok:
+ sub1: sub1
+
+hello:
+ world: world
+ some: other
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/parametrize.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/parametrize.rst
new file mode 100644
index 0000000000..66d72f3cc0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/parametrize.rst
@@ -0,0 +1,708 @@
+
+.. _paramexamples:
+
+Parametrizing tests
+=================================================
+
+.. currentmodule:: _pytest.python
+
+``pytest`` allows to easily parametrize test functions.
+For basic docs, see :ref:`parametrize-basics`.
+
+In the following we provide some examples using
+the builtin mechanisms.
+
+Generating parameters combinations, depending on command line
+----------------------------------------------------------------------------
+
+.. regendoc:wipe
+
+Let's say we want to execute a test with different computation
+parameters and the parameter range shall be determined by a command
+line argument. Let's first write a simple (do-nothing) computation test:
+
+.. code-block:: python
+
+ # content of test_compute.py
+
+
+ def test_compute(param1):
+ assert param1 < 4
+
+Now we add a test configuration like this:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+
+ def pytest_addoption(parser):
+ parser.addoption("--all", action="store_true", help="run all combinations")
+
+
+ def pytest_generate_tests(metafunc):
+ if "param1" in metafunc.fixturenames:
+ if metafunc.config.getoption("all"):
+ end = 5
+ else:
+ end = 2
+ metafunc.parametrize("param1", range(end))
+
+This means that we only run 2 tests if we do not pass ``--all``:
+
+.. code-block:: pytest
+
+ $ pytest -q test_compute.py
+ .. [100%]
+ 2 passed in 0.12s
+
+We run only two computations, so we see two dots.
+let's run the full monty:
+
+.. code-block:: pytest
+
+ $ pytest -q --all
+ ....F [100%]
+ ================================= FAILURES =================================
+ _____________________________ test_compute[4] ______________________________
+
+ param1 = 4
+
+ def test_compute(param1):
+ > assert param1 < 4
+ E assert 4 < 4
+
+ test_compute.py:4: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_compute.py::test_compute[4] - assert 4 < 4
+ 1 failed, 4 passed in 0.12s
+
+As expected when running the full range of ``param1`` values
+we'll get an error on the last one.
+
+
+Different options for test IDs
+------------------------------------
+
+pytest will build a string that is the test ID for each set of values in a
+parametrized test. These IDs can be used with ``-k`` to select specific cases
+to run, and they will also identify the specific case when one is failing.
+Running pytest with ``--collect-only`` will show the generated IDs.
+
+Numbers, strings, booleans and None will have their usual string representation
+used in the test ID. For other objects, pytest will make a string based on
+the argument name:
+
+.. code-block:: python
+
+ # content of test_time.py
+
+ from datetime import datetime, timedelta
+
+ import pytest
+
+ testdata = [
+ (datetime(2001, 12, 12), datetime(2001, 12, 11), timedelta(1)),
+ (datetime(2001, 12, 11), datetime(2001, 12, 12), timedelta(-1)),
+ ]
+
+
+ @pytest.mark.parametrize("a,b,expected", testdata)
+ def test_timedistance_v0(a, b, expected):
+ diff = a - b
+ assert diff == expected
+
+
+ @pytest.mark.parametrize("a,b,expected", testdata, ids=["forward", "backward"])
+ def test_timedistance_v1(a, b, expected):
+ diff = a - b
+ assert diff == expected
+
+
+ def idfn(val):
+ if isinstance(val, (datetime,)):
+ # note this wouldn't show any hours/minutes/seconds
+ return val.strftime("%Y%m%d")
+
+
+ @pytest.mark.parametrize("a,b,expected", testdata, ids=idfn)
+ def test_timedistance_v2(a, b, expected):
+ diff = a - b
+ assert diff == expected
+
+
+ @pytest.mark.parametrize(
+ "a,b,expected",
+ [
+ pytest.param(
+ datetime(2001, 12, 12), datetime(2001, 12, 11), timedelta(1), id="forward"
+ ),
+ pytest.param(
+ datetime(2001, 12, 11), datetime(2001, 12, 12), timedelta(-1), id="backward"
+ ),
+ ],
+ )
+ def test_timedistance_v3(a, b, expected):
+ diff = a - b
+ assert diff == expected
+
+In ``test_timedistance_v0``, we let pytest generate the test IDs.
+
+In ``test_timedistance_v1``, we specified ``ids`` as a list of strings which were
+used as the test IDs. These are succinct, but can be a pain to maintain.
+
+In ``test_timedistance_v2``, we specified ``ids`` as a function that can generate a
+string representation to make part of the test ID. So our ``datetime`` values use the
+label generated by ``idfn``, but because we didn't generate a label for ``timedelta``
+objects, they are still using the default pytest representation:
+
+.. code-block:: pytest
+
+ $ pytest test_time.py --collect-only
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 8 items
+
+ <Module test_time.py>
+ <Function test_timedistance_v0[a0-b0-expected0]>
+ <Function test_timedistance_v0[a1-b1-expected1]>
+ <Function test_timedistance_v1[forward]>
+ <Function test_timedistance_v1[backward]>
+ <Function test_timedistance_v2[20011212-20011211-expected0]>
+ <Function test_timedistance_v2[20011211-20011212-expected1]>
+ <Function test_timedistance_v3[forward]>
+ <Function test_timedistance_v3[backward]>
+
+ ======================== 8 tests collected in 0.12s ========================
+
+In ``test_timedistance_v3``, we used ``pytest.param`` to specify the test IDs
+together with the actual data, instead of listing them separately.
+
+A quick port of "testscenarios"
+------------------------------------
+
+Here is a quick port to run tests configured with :pypi:`testscenarios`,
+an add-on from Robert Collins for the standard unittest framework. We
+only have to work a bit to construct the correct arguments for pytest's
+:py:func:`Metafunc.parametrize`:
+
+.. code-block:: python
+
+ # content of test_scenarios.py
+
+
+ def pytest_generate_tests(metafunc):
+ idlist = []
+ argvalues = []
+ for scenario in metafunc.cls.scenarios:
+ idlist.append(scenario[0])
+ items = scenario[1].items()
+ argnames = [x[0] for x in items]
+ argvalues.append([x[1] for x in items])
+ metafunc.parametrize(argnames, argvalues, ids=idlist, scope="class")
+
+
+ scenario1 = ("basic", {"attribute": "value"})
+ scenario2 = ("advanced", {"attribute": "value2"})
+
+
+ class TestSampleWithScenarios:
+ scenarios = [scenario1, scenario2]
+
+ def test_demo1(self, attribute):
+ assert isinstance(attribute, str)
+
+ def test_demo2(self, attribute):
+ assert isinstance(attribute, str)
+
+this is a fully self-contained example which you can run with:
+
+.. code-block:: pytest
+
+ $ pytest test_scenarios.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 4 items
+
+ test_scenarios.py .... [100%]
+
+ ============================ 4 passed in 0.12s =============================
+
+If you just collect tests you'll also nicely see 'advanced' and 'basic' as variants for the test function:
+
+.. code-block:: pytest
+
+ $ pytest --collect-only test_scenarios.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 4 items
+
+ <Module test_scenarios.py>
+ <Class TestSampleWithScenarios>
+ <Function test_demo1[basic]>
+ <Function test_demo2[basic]>
+ <Function test_demo1[advanced]>
+ <Function test_demo2[advanced]>
+
+ ======================== 4 tests collected in 0.12s ========================
+
+Note that we told ``metafunc.parametrize()`` that your scenario values
+should be considered class-scoped. With pytest-2.3 this leads to a
+resource-based ordering.
+
+Deferring the setup of parametrized resources
+---------------------------------------------------
+
+.. regendoc:wipe
+
+The parametrization of test functions happens at collection
+time. It is a good idea to setup expensive resources like DB
+connections or subprocess only when the actual test is run.
+Here is a simple example how you can achieve that. This test
+requires a ``db`` object fixture:
+
+.. code-block:: python
+
+ # content of test_backends.py
+
+ import pytest
+
+
+ def test_db_initialized(db):
+ # a dummy test
+ if db.__class__.__name__ == "DB2":
+ pytest.fail("deliberately failing for demo purposes")
+
+We can now add a test configuration that generates two invocations of
+the ``test_db_initialized`` function and also implements a factory that
+creates a database object for the actual test invocations:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import pytest
+
+
+ def pytest_generate_tests(metafunc):
+ if "db" in metafunc.fixturenames:
+ metafunc.parametrize("db", ["d1", "d2"], indirect=True)
+
+
+ class DB1:
+ "one database object"
+
+
+ class DB2:
+ "alternative database object"
+
+
+ @pytest.fixture
+ def db(request):
+ if request.param == "d1":
+ return DB1()
+ elif request.param == "d2":
+ return DB2()
+ else:
+ raise ValueError("invalid internal test config")
+
+Let's first see how it looks like at collection time:
+
+.. code-block:: pytest
+
+ $ pytest test_backends.py --collect-only
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 2 items
+
+ <Module test_backends.py>
+ <Function test_db_initialized[d1]>
+ <Function test_db_initialized[d2]>
+
+ ======================== 2 tests collected in 0.12s ========================
+
+And then when we run the test:
+
+.. code-block:: pytest
+
+ $ pytest -q test_backends.py
+ .F [100%]
+ ================================= FAILURES =================================
+ _________________________ test_db_initialized[d2] __________________________
+
+ db = <conftest.DB2 object at 0xdeadbeef0001>
+
+ def test_db_initialized(db):
+ # a dummy test
+ if db.__class__.__name__ == "DB2":
+ > pytest.fail("deliberately failing for demo purposes")
+ E Failed: deliberately failing for demo purposes
+
+ test_backends.py:8: Failed
+ ========================= short test summary info ==========================
+ FAILED test_backends.py::test_db_initialized[d2] - Failed: deliberately f...
+ 1 failed, 1 passed in 0.12s
+
+The first invocation with ``db == "DB1"`` passed while the second with ``db == "DB2"`` failed. Our ``db`` fixture function has instantiated each of the DB values during the setup phase while the ``pytest_generate_tests`` generated two according calls to the ``test_db_initialized`` during the collection phase.
+
+Indirect parametrization
+---------------------------------------------------
+
+Using the ``indirect=True`` parameter when parametrizing a test allows to
+parametrize a test with a fixture receiving the values before passing them to a
+test:
+
+.. code-block:: python
+
+ import pytest
+
+
+ @pytest.fixture
+ def fixt(request):
+ return request.param * 3
+
+
+ @pytest.mark.parametrize("fixt", ["a", "b"], indirect=True)
+ def test_indirect(fixt):
+ assert len(fixt) == 3
+
+This can be used, for example, to do more expensive setup at test run time in
+the fixture, rather than having to run those setup steps at collection time.
+
+.. regendoc:wipe
+
+Apply indirect on particular arguments
+---------------------------------------------------
+
+Very often parametrization uses more than one argument name. There is opportunity to apply ``indirect``
+parameter on particular arguments. It can be done by passing list or tuple of
+arguments' names to ``indirect``. In the example below there is a function ``test_indirect`` which uses
+two fixtures: ``x`` and ``y``. Here we give to indirect the list, which contains the name of the
+fixture ``x``. The indirect parameter will be applied to this argument only, and the value ``a``
+will be passed to respective fixture function:
+
+.. code-block:: python
+
+ # content of test_indirect_list.py
+
+ import pytest
+
+
+ @pytest.fixture(scope="function")
+ def x(request):
+ return request.param * 3
+
+
+ @pytest.fixture(scope="function")
+ def y(request):
+ return request.param * 2
+
+
+ @pytest.mark.parametrize("x, y", [("a", "b")], indirect=["x"])
+ def test_indirect(x, y):
+ assert x == "aaa"
+ assert y == "b"
+
+The result of this test will be successful:
+
+.. code-block:: pytest
+
+ $ pytest -v test_indirect_list.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 1 item
+
+ test_indirect_list.py::test_indirect[a-b] PASSED [100%]
+
+ ============================ 1 passed in 0.12s =============================
+
+.. regendoc:wipe
+
+Parametrizing test methods through per-class configuration
+--------------------------------------------------------------
+
+.. _`unittest parametrizer`: https://github.com/testing-cabal/unittest-ext/blob/master/params.py
+
+
+Here is an example ``pytest_generate_tests`` function implementing a
+parametrization scheme similar to Michael Foord's `unittest
+parametrizer`_ but in a lot less code:
+
+.. code-block:: python
+
+ # content of ./test_parametrize.py
+ import pytest
+
+
+ def pytest_generate_tests(metafunc):
+ # called once per each test function
+ funcarglist = metafunc.cls.params[metafunc.function.__name__]
+ argnames = sorted(funcarglist[0])
+ metafunc.parametrize(
+ argnames, [[funcargs[name] for name in argnames] for funcargs in funcarglist]
+ )
+
+
+ class TestClass:
+ # a map specifying multiple argument sets for a test method
+ params = {
+ "test_equals": [dict(a=1, b=2), dict(a=3, b=3)],
+ "test_zerodivision": [dict(a=1, b=0)],
+ }
+
+ def test_equals(self, a, b):
+ assert a == b
+
+ def test_zerodivision(self, a, b):
+ with pytest.raises(ZeroDivisionError):
+ a / b
+
+Our test generator looks up a class-level definition which specifies which
+argument sets to use for each test function. Let's run it:
+
+.. code-block:: pytest
+
+ $ pytest -q
+ F.. [100%]
+ ================================= FAILURES =================================
+ ________________________ TestClass.test_equals[1-2] ________________________
+
+ self = <test_parametrize.TestClass object at 0xdeadbeef0002>, a = 1, b = 2
+
+ def test_equals(self, a, b):
+ > assert a == b
+ E assert 1 == 2
+
+ test_parametrize.py:21: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_parametrize.py::TestClass::test_equals[1-2] - assert 1 == 2
+ 1 failed, 2 passed in 0.12s
+
+Indirect parametrization with multiple fixtures
+--------------------------------------------------------------
+
+Here is a stripped down real-life example of using parametrized
+testing for testing serialization of objects between different python
+interpreters. We define a ``test_basic_objects`` function which
+is to be run with different sets of arguments for its three arguments:
+
+* ``python1``: first python interpreter, run to pickle-dump an object to a file
+* ``python2``: second interpreter, run to pickle-load an object from a file
+* ``obj``: object to be dumped/loaded
+
+.. literalinclude:: multipython.py
+
+Running it results in some skips if we don't have all the python interpreters installed and otherwise runs all combinations (3 interpreters times 3 interpreters times 3 objects to serialize/deserialize):
+
+.. code-block:: pytest
+
+ . $ pytest -rs -q multipython.py
+ sssssssssssssssssssssssssss [100%]
+ ========================= short test summary info ==========================
+ SKIPPED [9] multipython.py:29: 'python3.5' not found
+ SKIPPED [9] multipython.py:29: 'python3.6' not found
+ SKIPPED [9] multipython.py:29: 'python3.7' not found
+ 27 skipped in 0.12s
+
+Indirect parametrization of optional implementations/imports
+--------------------------------------------------------------------
+
+If you want to compare the outcomes of several implementations of a given
+API, you can write test functions that receive the already imported implementations
+and get skipped in case the implementation is not importable/available. Let's
+say we have a "base" implementation and the other (possibly optimized ones)
+need to provide similar results:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+ import pytest
+
+
+ @pytest.fixture(scope="session")
+ def basemod(request):
+ return pytest.importorskip("base")
+
+
+ @pytest.fixture(scope="session", params=["opt1", "opt2"])
+ def optmod(request):
+ return pytest.importorskip(request.param)
+
+And then a base implementation of a simple function:
+
+.. code-block:: python
+
+ # content of base.py
+ def func1():
+ return 1
+
+And an optimized version:
+
+.. code-block:: python
+
+ # content of opt1.py
+ def func1():
+ return 1.0001
+
+And finally a little test module:
+
+.. code-block:: python
+
+ # content of test_module.py
+
+
+ def test_func1(basemod, optmod):
+ assert round(basemod.func1(), 3) == round(optmod.func1(), 3)
+
+
+If you run this with reporting for skips enabled:
+
+.. code-block:: pytest
+
+ $ pytest -rs test_module.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 2 items
+
+ test_module.py .s [100%]
+
+ ========================= short test summary info ==========================
+ SKIPPED [1] conftest.py:12: could not import 'opt2': No module named 'opt2'
+ ======================= 1 passed, 1 skipped in 0.12s =======================
+
+You'll see that we don't have an ``opt2`` module and thus the second test run
+of our ``test_func1`` was skipped. A few notes:
+
+- the fixture functions in the ``conftest.py`` file are "session-scoped" because we
+ don't need to import more than once
+
+- if you have multiple test functions and a skipped import, you will see
+ the ``[1]`` count increasing in the report
+
+- you can put :ref:`@pytest.mark.parametrize <@pytest.mark.parametrize>` style
+ parametrization on the test functions to parametrize input/output
+ values as well.
+
+
+Set marks or test ID for individual parametrized test
+--------------------------------------------------------------------
+
+Use ``pytest.param`` to apply marks or set test ID to individual parametrized test.
+For example:
+
+.. code-block:: python
+
+ # content of test_pytest_param_example.py
+ import pytest
+
+
+ @pytest.mark.parametrize(
+ "test_input,expected",
+ [
+ ("3+5", 8),
+ pytest.param("1+7", 8, marks=pytest.mark.basic),
+ pytest.param("2+4", 6, marks=pytest.mark.basic, id="basic_2+4"),
+ pytest.param(
+ "6*9", 42, marks=[pytest.mark.basic, pytest.mark.xfail], id="basic_6*9"
+ ),
+ ],
+ )
+ def test_eval(test_input, expected):
+ assert eval(test_input) == expected
+
+In this example, we have 4 parametrized tests. Except for the first test,
+we mark the rest three parametrized tests with the custom marker ``basic``,
+and for the fourth test we also use the built-in mark ``xfail`` to indicate this
+test is expected to fail. For explicitness, we set test ids for some tests.
+
+Then run ``pytest`` with verbose mode and with only the ``basic`` marker:
+
+.. code-block:: pytest
+
+ $ pytest -v -m basic
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 24 items / 21 deselected / 3 selected
+
+ test_pytest_param_example.py::test_eval[1+7-8] PASSED [ 33%]
+ test_pytest_param_example.py::test_eval[basic_2+4] PASSED [ 66%]
+ test_pytest_param_example.py::test_eval[basic_6*9] XFAIL [100%]
+
+ =============== 2 passed, 21 deselected, 1 xfailed in 0.12s ================
+
+As the result:
+
+- Four tests were collected
+- One test was deselected because it doesn't have the ``basic`` mark.
+- Three tests with the ``basic`` mark was selected.
+- The test ``test_eval[1+7-8]`` passed, but the name is autogenerated and confusing.
+- The test ``test_eval[basic_2+4]`` passed.
+- The test ``test_eval[basic_6*9]`` was expected to fail and did fail.
+
+.. _`parametrizing_conditional_raising`:
+
+Parametrizing conditional raising
+--------------------------------------------------------------------
+
+Use :func:`pytest.raises` with the
+:ref:`pytest.mark.parametrize ref` decorator to write parametrized tests
+in which some tests raise exceptions and others do not.
+
+It is helpful to define a no-op context manager ``does_not_raise`` to serve
+as a complement to ``raises``. For example:
+
+.. code-block:: python
+
+ from contextlib import contextmanager
+ import pytest
+
+
+ @contextmanager
+ def does_not_raise():
+ yield
+
+
+ @pytest.mark.parametrize(
+ "example_input,expectation",
+ [
+ (3, does_not_raise()),
+ (2, does_not_raise()),
+ (1, does_not_raise()),
+ (0, pytest.raises(ZeroDivisionError)),
+ ],
+ )
+ def test_division(example_input, expectation):
+ """Test how much I know division."""
+ with expectation:
+ assert (6 / example_input) is not None
+
+In the example above, the first three test cases should run unexceptionally,
+while the fourth should raise ``ZeroDivisionError``.
+
+If you're only supporting Python 3.7+, you can simply use ``nullcontext``
+to define ``does_not_raise``:
+
+.. code-block:: python
+
+ from contextlib import nullcontext as does_not_raise
+
+Or, if you're supporting Python 3.3+ you can use:
+
+.. code-block:: python
+
+ from contextlib import ExitStack as does_not_raise
+
+Or, if desired, you can ``pip install contextlib2`` and use:
+
+.. code-block:: python
+
+ from contextlib2 import nullcontext as does_not_raise
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/pythoncollection.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/pythoncollection.py
new file mode 100644
index 0000000000..8742526a19
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/pythoncollection.py
@@ -0,0 +1,14 @@
+# run this with $ pytest --collect-only test_collectonly.py
+#
+
+
+def test_function():
+ pass
+
+
+class TestClass:
+ def test_method(self):
+ pass
+
+ def test_anothermethod(self):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/pythoncollection.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/pythoncollection.rst
new file mode 100644
index 0000000000..b9c2386ac5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/pythoncollection.rst
@@ -0,0 +1,321 @@
+Changing standard (Python) test discovery
+===============================================
+
+Ignore paths during test collection
+-----------------------------------
+
+You can easily ignore certain test directories and modules during collection
+by passing the ``--ignore=path`` option on the cli. ``pytest`` allows multiple
+``--ignore`` options. Example:
+
+.. code-block:: text
+
+ tests/
+ |-- example
+ | |-- test_example_01.py
+ | |-- test_example_02.py
+ | '-- test_example_03.py
+ |-- foobar
+ | |-- test_foobar_01.py
+ | |-- test_foobar_02.py
+ | '-- test_foobar_03.py
+ '-- hello
+ '-- world
+ |-- test_world_01.py
+ |-- test_world_02.py
+ '-- test_world_03.py
+
+Now if you invoke ``pytest`` with ``--ignore=tests/foobar/test_foobar_03.py --ignore=tests/hello/``,
+you will see that ``pytest`` only collects test-modules, which do not match the patterns specified:
+
+.. code-block:: pytest
+
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-5.x.y, py-1.x.y, pluggy-0.x.y
+ rootdir: $REGENDOC_TMPDIR, inifile:
+ collected 5 items
+
+ tests/example/test_example_01.py . [ 20%]
+ tests/example/test_example_02.py . [ 40%]
+ tests/example/test_example_03.py . [ 60%]
+ tests/foobar/test_foobar_01.py . [ 80%]
+ tests/foobar/test_foobar_02.py . [100%]
+
+ ========================= 5 passed in 0.02 seconds =========================
+
+The ``--ignore-glob`` option allows to ignore test file paths based on Unix shell-style wildcards.
+If you want to exclude test-modules that end with ``_01.py``, execute ``pytest`` with ``--ignore-glob='*_01.py'``.
+
+Deselect tests during test collection
+-------------------------------------
+
+Tests can individually be deselected during collection by passing the ``--deselect=item`` option.
+For example, say ``tests/foobar/test_foobar_01.py`` contains ``test_a`` and ``test_b``.
+You can run all of the tests within ``tests/`` *except* for ``tests/foobar/test_foobar_01.py::test_a``
+by invoking ``pytest`` with ``--deselect tests/foobar/test_foobar_01.py::test_a``.
+``pytest`` allows multiple ``--deselect`` options.
+
+Keeping duplicate paths specified from command line
+----------------------------------------------------
+
+Default behavior of ``pytest`` is to ignore duplicate paths specified from the command line.
+Example:
+
+.. code-block:: pytest
+
+ pytest path_a path_a
+
+ ...
+ collected 1 item
+ ...
+
+Just collect tests once.
+
+To collect duplicate tests, use the ``--keep-duplicates`` option on the cli.
+Example:
+
+.. code-block:: pytest
+
+ pytest --keep-duplicates path_a path_a
+
+ ...
+ collected 2 items
+ ...
+
+As the collector just works on directories, if you specify twice a single test file, ``pytest`` will
+still collect it twice, no matter if the ``--keep-duplicates`` is not specified.
+Example:
+
+.. code-block:: pytest
+
+ pytest test_a.py test_a.py
+
+ ...
+ collected 2 items
+ ...
+
+
+Changing directory recursion
+-----------------------------------------------------
+
+You can set the :confval:`norecursedirs` option in an ini-file, for example your ``pytest.ini`` in the project root directory:
+
+.. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ norecursedirs = .svn _build tmp*
+
+This would tell ``pytest`` to not recurse into typical subversion or sphinx-build directories or into any ``tmp`` prefixed directory.
+
+.. _`change naming conventions`:
+
+Changing naming conventions
+-----------------------------------------------------
+
+You can configure different naming conventions by setting
+the :confval:`python_files`, :confval:`python_classes` and
+:confval:`python_functions` in your :ref:`configuration file <config file formats>`.
+Here is an example:
+
+.. code-block:: ini
+
+ # content of pytest.ini
+ # Example 1: have pytest look for "check" instead of "test"
+ [pytest]
+ python_files = check_*.py
+ python_classes = Check
+ python_functions = *_check
+
+This would make ``pytest`` look for tests in files that match the ``check_*
+.py`` glob-pattern, ``Check`` prefixes in classes, and functions and methods
+that match ``*_check``. For example, if we have:
+
+.. code-block:: python
+
+ # content of check_myapp.py
+ class CheckMyApp:
+ def simple_check(self):
+ pass
+
+ def complex_check(self):
+ pass
+
+The test collection would look like this:
+
+.. code-block:: pytest
+
+ $ pytest --collect-only
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project, configfile: pytest.ini
+ collected 2 items
+
+ <Module check_myapp.py>
+ <Class CheckMyApp>
+ <Function simple_check>
+ <Function complex_check>
+
+ ======================== 2 tests collected in 0.12s ========================
+
+You can check for multiple glob patterns by adding a space between the patterns:
+
+.. code-block:: ini
+
+ # Example 2: have pytest look for files with "test" and "example"
+ # content of pytest.ini
+ [pytest]
+ python_files = test_*.py example_*.py
+
+.. note::
+
+ the ``python_functions`` and ``python_classes`` options has no effect
+ for ``unittest.TestCase`` test discovery because pytest delegates
+ discovery of test case methods to unittest code.
+
+Interpreting cmdline arguments as Python packages
+-----------------------------------------------------
+
+You can use the ``--pyargs`` option to make ``pytest`` try
+interpreting arguments as python package names, deriving
+their file system path and then running the test. For
+example if you have unittest2 installed you can type:
+
+.. code-block:: bash
+
+ pytest --pyargs unittest2.test.test_skipping -q
+
+which would run the respective test module. Like with
+other options, through an ini-file and the :confval:`addopts` option you
+can make this change more permanently:
+
+.. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ addopts = --pyargs
+
+Now a simple invocation of ``pytest NAME`` will check
+if NAME exists as an importable package/module and otherwise
+treat it as a filesystem path.
+
+Finding out what is collected
+-----------------------------------------------
+
+You can always peek at the collection tree without running tests like this:
+
+.. code-block:: pytest
+
+ . $ pytest --collect-only pythoncollection.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project, configfile: pytest.ini
+ collected 3 items
+
+ <Module CWD/pythoncollection.py>
+ <Function test_function>
+ <Class TestClass>
+ <Function test_method>
+ <Function test_anothermethod>
+
+ ======================== 3 tests collected in 0.12s ========================
+
+.. _customizing-test-collection:
+
+Customizing test collection
+---------------------------
+
+.. regendoc:wipe
+
+You can easily instruct ``pytest`` to discover tests from every Python file:
+
+.. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ python_files = *.py
+
+However, many projects will have a ``setup.py`` which they don't want to be
+imported. Moreover, there may files only importable by a specific python
+version. For such cases you can dynamically define files to be ignored by
+listing them in a ``conftest.py`` file:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import sys
+
+ collect_ignore = ["setup.py"]
+ if sys.version_info[0] > 2:
+ collect_ignore.append("pkg/module_py2.py")
+
+and then if you have a module file like this:
+
+.. code-block:: python
+
+ # content of pkg/module_py2.py
+ def test_only_on_python2():
+ try:
+ assert 0
+ except Exception, e:
+ pass
+
+and a ``setup.py`` dummy file like this:
+
+.. code-block:: python
+
+ # content of setup.py
+ 0 / 0 # will raise exception if imported
+
+If you run with a Python 2 interpreter then you will find the one test and will
+leave out the ``setup.py`` file:
+
+.. code-block:: pytest
+
+ #$ pytest --collect-only
+ ====== test session starts ======
+ platform linux2 -- Python 2.7.10, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
+ rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
+ collected 1 items
+ <Module 'pkg/module_py2.py'>
+ <Function 'test_only_on_python2'>
+
+ ====== 1 tests found in 0.04 seconds ======
+
+If you run with a Python 3 interpreter both the one test and the ``setup.py``
+file will be left out:
+
+.. code-block:: pytest
+
+ $ pytest --collect-only
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project, configfile: pytest.ini
+ collected 0 items
+
+ ======================= no tests collected in 0.12s ========================
+
+It's also possible to ignore files based on Unix shell-style wildcards by adding
+patterns to :globalvar:`collect_ignore_glob`.
+
+The following example ``conftest.py`` ignores the file ``setup.py`` and in
+addition all files that end with ``*_py2.py`` when executed with a Python 3
+interpreter:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import sys
+
+ collect_ignore = ["setup.py"]
+ if sys.version_info[0] > 2:
+ collect_ignore_glob = ["*_py2.py"]
+
+Since Pytest 2.6, users can prevent pytest from discovering classes that start
+with ``Test`` by setting a boolean ``__test__`` attribute to ``False``.
+
+.. code-block:: python
+
+ # Will not be discovered as a test
+ class TestClass:
+ __test__ = False
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/reportingdemo.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/reportingdemo.rst
new file mode 100644
index 0000000000..cab9314361
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/reportingdemo.rst
@@ -0,0 +1,708 @@
+.. _`tbreportdemo`:
+
+Demo of Python failure reports with pytest
+==========================================
+
+Here is a nice run of several failures and how ``pytest`` presents things:
+
+.. code-block:: pytest
+
+ assertion $ pytest failure_demo.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project/assertion
+ collected 44 items
+
+ failure_demo.py FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF [100%]
+
+ ================================= FAILURES =================================
+ ___________________________ test_generative[3-6] ___________________________
+
+ param1 = 3, param2 = 6
+
+ @pytest.mark.parametrize("param1, param2", [(3, 6)])
+ def test_generative(param1, param2):
+ > assert param1 * 2 < param2
+ E assert (3 * 2) < 6
+
+ failure_demo.py:19: AssertionError
+ _________________________ TestFailing.test_simple __________________________
+
+ self = <failure_demo.TestFailing object at 0xdeadbeef0001>
+
+ def test_simple(self):
+ def f():
+ return 42
+
+ def g():
+ return 43
+
+ > assert f() == g()
+ E assert 42 == 43
+ E + where 42 = <function TestFailing.test_simple.<locals>.f at 0xdeadbeef0002>()
+ E + and 43 = <function TestFailing.test_simple.<locals>.g at 0xdeadbeef0003>()
+
+ failure_demo.py:30: AssertionError
+ ____________________ TestFailing.test_simple_multiline _____________________
+
+ self = <failure_demo.TestFailing object at 0xdeadbeef0004>
+
+ def test_simple_multiline(self):
+ > otherfunc_multi(42, 6 * 9)
+
+ failure_demo.py:33:
+ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
+
+ a = 42, b = 54
+
+ def otherfunc_multi(a, b):
+ > assert a == b
+ E assert 42 == 54
+
+ failure_demo.py:14: AssertionError
+ ___________________________ TestFailing.test_not ___________________________
+
+ self = <failure_demo.TestFailing object at 0xdeadbeef0005>
+
+ def test_not(self):
+ def f():
+ return 42
+
+ > assert not f()
+ E assert not 42
+ E + where 42 = <function TestFailing.test_not.<locals>.f at 0xdeadbeef0006>()
+
+ failure_demo.py:39: AssertionError
+ _________________ TestSpecialisedExplanations.test_eq_text _________________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef0007>
+
+ def test_eq_text(self):
+ > assert "spam" == "eggs"
+ E AssertionError: assert 'spam' == 'eggs'
+ E - eggs
+ E + spam
+
+ failure_demo.py:44: AssertionError
+ _____________ TestSpecialisedExplanations.test_eq_similar_text _____________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef0008>
+
+ def test_eq_similar_text(self):
+ > assert "foo 1 bar" == "foo 2 bar"
+ E AssertionError: assert 'foo 1 bar' == 'foo 2 bar'
+ E - foo 2 bar
+ E ? ^
+ E + foo 1 bar
+ E ? ^
+
+ failure_demo.py:47: AssertionError
+ ____________ TestSpecialisedExplanations.test_eq_multiline_text ____________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef0009>
+
+ def test_eq_multiline_text(self):
+ > assert "foo\nspam\nbar" == "foo\neggs\nbar"
+ E AssertionError: assert 'foo\nspam\nbar' == 'foo\neggs\nbar'
+ E foo
+ E - eggs
+ E + spam
+ E bar
+
+ failure_demo.py:50: AssertionError
+ ______________ TestSpecialisedExplanations.test_eq_long_text _______________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef000a>
+
+ def test_eq_long_text(self):
+ a = "1" * 100 + "a" + "2" * 100
+ b = "1" * 100 + "b" + "2" * 100
+ > assert a == b
+ E AssertionError: assert '111111111111...2222222222222' == '111111111111...2222222222222'
+ E Skipping 90 identical leading characters in diff, use -v to show
+ E Skipping 91 identical trailing characters in diff, use -v to show
+ E - 1111111111b222222222
+ E ? ^
+ E + 1111111111a222222222
+ E ? ^
+
+ failure_demo.py:55: AssertionError
+ _________ TestSpecialisedExplanations.test_eq_long_text_multiline __________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef000b>
+
+ def test_eq_long_text_multiline(self):
+ a = "1\n" * 100 + "a" + "2\n" * 100
+ b = "1\n" * 100 + "b" + "2\n" * 100
+ > assert a == b
+ E AssertionError: assert '1\n1\n1\n1\n...n2\n2\n2\n2\n' == '1\n1\n1\n1\n...n2\n2\n2\n2\n'
+ E Skipping 190 identical leading characters in diff, use -v to show
+ E Skipping 191 identical trailing characters in diff, use -v to show
+ E 1
+ E 1
+ E 1
+ E 1
+ E 1...
+ E
+ E ...Full output truncated (7 lines hidden), use '-vv' to show
+
+ failure_demo.py:60: AssertionError
+ _________________ TestSpecialisedExplanations.test_eq_list _________________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef000c>
+
+ def test_eq_list(self):
+ > assert [0, 1, 2] == [0, 1, 3]
+ E assert [0, 1, 2] == [0, 1, 3]
+ E At index 2 diff: 2 != 3
+ E Use -v to get the full diff
+
+ failure_demo.py:63: AssertionError
+ ______________ TestSpecialisedExplanations.test_eq_list_long _______________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef000d>
+
+ def test_eq_list_long(self):
+ a = [0] * 100 + [1] + [3] * 100
+ b = [0] * 100 + [2] + [3] * 100
+ > assert a == b
+ E assert [0, 0, 0, 0, 0, 0, ...] == [0, 0, 0, 0, 0, 0, ...]
+ E At index 100 diff: 1 != 2
+ E Use -v to get the full diff
+
+ failure_demo.py:68: AssertionError
+ _________________ TestSpecialisedExplanations.test_eq_dict _________________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef000e>
+
+ def test_eq_dict(self):
+ > assert {"a": 0, "b": 1, "c": 0} == {"a": 0, "b": 2, "d": 0}
+ E AssertionError: assert {'a': 0, 'b': 1, 'c': 0} == {'a': 0, 'b': 2, 'd': 0}
+ E Omitting 1 identical items, use -vv to show
+ E Differing items:
+ E {'b': 1} != {'b': 2}
+ E Left contains 1 more item:
+ E {'c': 0}
+ E Right contains 1 more item:
+ E {'d': 0}...
+ E
+ E ...Full output truncated (2 lines hidden), use '-vv' to show
+
+ failure_demo.py:71: AssertionError
+ _________________ TestSpecialisedExplanations.test_eq_set __________________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef000f>
+
+ def test_eq_set(self):
+ > assert {0, 10, 11, 12} == {0, 20, 21}
+ E AssertionError: assert {0, 10, 11, 12} == {0, 20, 21}
+ E Extra items in the left set:
+ E 10
+ E 11
+ E 12
+ E Extra items in the right set:
+ E 20
+ E 21...
+ E
+ E ...Full output truncated (2 lines hidden), use '-vv' to show
+
+ failure_demo.py:74: AssertionError
+ _____________ TestSpecialisedExplanations.test_eq_longer_list ______________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef0010>
+
+ def test_eq_longer_list(self):
+ > assert [1, 2] == [1, 2, 3]
+ E assert [1, 2] == [1, 2, 3]
+ E Right contains one more item: 3
+ E Use -v to get the full diff
+
+ failure_demo.py:77: AssertionError
+ _________________ TestSpecialisedExplanations.test_in_list _________________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef0011>
+
+ def test_in_list(self):
+ > assert 1 in [0, 2, 3, 4, 5]
+ E assert 1 in [0, 2, 3, 4, 5]
+
+ failure_demo.py:80: AssertionError
+ __________ TestSpecialisedExplanations.test_not_in_text_multiline __________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef0012>
+
+ def test_not_in_text_multiline(self):
+ text = "some multiline\ntext\nwhich\nincludes foo\nand a\ntail"
+ > assert "foo" not in text
+ E AssertionError: assert 'foo' not in 'some multil...nand a\ntail'
+ E 'foo' is contained here:
+ E some multiline
+ E text
+ E which
+ E includes foo
+ E ? +++
+ E and a...
+ E
+ E ...Full output truncated (2 lines hidden), use '-vv' to show
+
+ failure_demo.py:84: AssertionError
+ ___________ TestSpecialisedExplanations.test_not_in_text_single ____________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef0013>
+
+ def test_not_in_text_single(self):
+ text = "single foo line"
+ > assert "foo" not in text
+ E AssertionError: assert 'foo' not in 'single foo line'
+ E 'foo' is contained here:
+ E single foo line
+ E ? +++
+
+ failure_demo.py:88: AssertionError
+ _________ TestSpecialisedExplanations.test_not_in_text_single_long _________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef0014>
+
+ def test_not_in_text_single_long(self):
+ text = "head " * 50 + "foo " + "tail " * 20
+ > assert "foo" not in text
+ E AssertionError: assert 'foo' not in 'head head h...l tail tail '
+ E 'foo' is contained here:
+ E head head foo tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail
+ E ? +++
+
+ failure_demo.py:92: AssertionError
+ ______ TestSpecialisedExplanations.test_not_in_text_single_long_term _______
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef0015>
+
+ def test_not_in_text_single_long_term(self):
+ text = "head " * 50 + "f" * 70 + "tail " * 20
+ > assert "f" * 70 not in text
+ E AssertionError: assert 'fffffffffff...ffffffffffff' not in 'head head h...l tail tail '
+ E 'ffffffffffffffffff...fffffffffffffffffff' is contained here:
+ E head head fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffftail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail
+ E ? ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+ failure_demo.py:96: AssertionError
+ ______________ TestSpecialisedExplanations.test_eq_dataclass _______________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef0016>
+
+ def test_eq_dataclass(self):
+ from dataclasses import dataclass
+
+ @dataclass
+ class Foo:
+ a: int
+ b: str
+
+ left = Foo(1, "b")
+ right = Foo(1, "c")
+ > assert left == right
+ E AssertionError: assert TestSpecialis...oo(a=1, b='b') == TestSpecialis...oo(a=1, b='c')
+ E
+ E Omitting 1 identical items, use -vv to show
+ E Differing attributes:
+ E ['b']
+ E
+ E Drill down into differing attribute b:
+ E b: 'b' != 'c'...
+ E
+ E ...Full output truncated (3 lines hidden), use '-vv' to show
+
+ failure_demo.py:108: AssertionError
+ ________________ TestSpecialisedExplanations.test_eq_attrs _________________
+
+ self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef0017>
+
+ def test_eq_attrs(self):
+ import attr
+
+ @attr.s
+ class Foo:
+ a = attr.ib()
+ b = attr.ib()
+
+ left = Foo(1, "b")
+ right = Foo(1, "c")
+ > assert left == right
+ E AssertionError: assert Foo(a=1, b='b') == Foo(a=1, b='c')
+ E
+ E Omitting 1 identical items, use -vv to show
+ E Differing attributes:
+ E ['b']
+ E
+ E Drill down into differing attribute b:
+ E b: 'b' != 'c'...
+ E
+ E ...Full output truncated (3 lines hidden), use '-vv' to show
+
+ failure_demo.py:120: AssertionError
+ ______________________________ test_attribute ______________________________
+
+ def test_attribute():
+ class Foo:
+ b = 1
+
+ i = Foo()
+ > assert i.b == 2
+ E assert 1 == 2
+ E + where 1 = <failure_demo.test_attribute.<locals>.Foo object at 0xdeadbeef0018>.b
+
+ failure_demo.py:128: AssertionError
+ _________________________ test_attribute_instance __________________________
+
+ def test_attribute_instance():
+ class Foo:
+ b = 1
+
+ > assert Foo().b == 2
+ E AssertionError: assert 1 == 2
+ E + where 1 = <failure_demo.test_attribute_instance.<locals>.Foo object at 0xdeadbeef0019>.b
+ E + where <failure_demo.test_attribute_instance.<locals>.Foo object at 0xdeadbeef0019> = <class 'failure_demo.test_attribute_instance.<locals>.Foo'>()
+
+ failure_demo.py:135: AssertionError
+ __________________________ test_attribute_failure __________________________
+
+ def test_attribute_failure():
+ class Foo:
+ def _get_b(self):
+ raise Exception("Failed to get attrib")
+
+ b = property(_get_b)
+
+ i = Foo()
+ > assert i.b == 2
+
+ failure_demo.py:146:
+ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
+
+ self = <failure_demo.test_attribute_failure.<locals>.Foo object at 0xdeadbeef001a>
+
+ def _get_b(self):
+ > raise Exception("Failed to get attrib")
+ E Exception: Failed to get attrib
+
+ failure_demo.py:141: Exception
+ _________________________ test_attribute_multiple __________________________
+
+ def test_attribute_multiple():
+ class Foo:
+ b = 1
+
+ class Bar:
+ b = 2
+
+ > assert Foo().b == Bar().b
+ E AssertionError: assert 1 == 2
+ E + where 1 = <failure_demo.test_attribute_multiple.<locals>.Foo object at 0xdeadbeef001b>.b
+ E + where <failure_demo.test_attribute_multiple.<locals>.Foo object at 0xdeadbeef001b> = <class 'failure_demo.test_attribute_multiple.<locals>.Foo'>()
+ E + and 2 = <failure_demo.test_attribute_multiple.<locals>.Bar object at 0xdeadbeef001c>.b
+ E + where <failure_demo.test_attribute_multiple.<locals>.Bar object at 0xdeadbeef001c> = <class 'failure_demo.test_attribute_multiple.<locals>.Bar'>()
+
+ failure_demo.py:156: AssertionError
+ __________________________ TestRaises.test_raises __________________________
+
+ self = <failure_demo.TestRaises object at 0xdeadbeef001d>
+
+ def test_raises(self):
+ s = "qwe"
+ > raises(TypeError, int, s)
+ E ValueError: invalid literal for int() with base 10: 'qwe'
+
+ failure_demo.py:166: ValueError
+ ______________________ TestRaises.test_raises_doesnt _______________________
+
+ self = <failure_demo.TestRaises object at 0xdeadbeef001e>
+
+ def test_raises_doesnt(self):
+ > raises(OSError, int, "3")
+ E Failed: DID NOT RAISE <class 'OSError'>
+
+ failure_demo.py:169: Failed
+ __________________________ TestRaises.test_raise ___________________________
+
+ self = <failure_demo.TestRaises object at 0xdeadbeef001f>
+
+ def test_raise(self):
+ > raise ValueError("demo error")
+ E ValueError: demo error
+
+ failure_demo.py:172: ValueError
+ ________________________ TestRaises.test_tupleerror ________________________
+
+ self = <failure_demo.TestRaises object at 0xdeadbeef0020>
+
+ def test_tupleerror(self):
+ > a, b = [1] # NOQA
+ E ValueError: not enough values to unpack (expected 2, got 1)
+
+ failure_demo.py:175: ValueError
+ ______ TestRaises.test_reinterpret_fails_with_print_for_the_fun_of_it ______
+
+ self = <failure_demo.TestRaises object at 0xdeadbeef0021>
+
+ def test_reinterpret_fails_with_print_for_the_fun_of_it(self):
+ items = [1, 2, 3]
+ print(f"items is {items!r}")
+ > a, b = items.pop()
+ E TypeError: cannot unpack non-iterable int object
+
+ failure_demo.py:180: TypeError
+ --------------------------- Captured stdout call ---------------------------
+ items is [1, 2, 3]
+ ________________________ TestRaises.test_some_error ________________________
+
+ self = <failure_demo.TestRaises object at 0xdeadbeef0022>
+
+ def test_some_error(self):
+ > if namenotexi: # NOQA
+ E NameError: name 'namenotexi' is not defined
+
+ failure_demo.py:183: NameError
+ ____________________ test_dynamic_compile_shows_nicely _____________________
+
+ def test_dynamic_compile_shows_nicely():
+ import importlib.util
+ import sys
+
+ src = "def foo():\n assert 1 == 0\n"
+ name = "abc-123"
+ spec = importlib.util.spec_from_loader(name, loader=None)
+ module = importlib.util.module_from_spec(spec)
+ code = compile(src, name, "exec")
+ exec(code, module.__dict__)
+ sys.modules[name] = module
+ > module.foo()
+
+ failure_demo.py:202:
+ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
+
+ > ???
+ E AssertionError
+
+ abc-123:2: AssertionError
+ ____________________ TestMoreErrors.test_complex_error _____________________
+
+ self = <failure_demo.TestMoreErrors object at 0xdeadbeef0023>
+
+ def test_complex_error(self):
+ def f():
+ return 44
+
+ def g():
+ return 43
+
+ > somefunc(f(), g())
+
+ failure_demo.py:213:
+ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
+ failure_demo.py:10: in somefunc
+ otherfunc(x, y)
+ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
+
+ a = 44, b = 43
+
+ def otherfunc(a, b):
+ > assert a == b
+ E assert 44 == 43
+
+ failure_demo.py:6: AssertionError
+ ___________________ TestMoreErrors.test_z1_unpack_error ____________________
+
+ self = <failure_demo.TestMoreErrors object at 0xdeadbeef0024>
+
+ def test_z1_unpack_error(self):
+ items = []
+ > a, b = items
+ E ValueError: not enough values to unpack (expected 2, got 0)
+
+ failure_demo.py:217: ValueError
+ ____________________ TestMoreErrors.test_z2_type_error _____________________
+
+ self = <failure_demo.TestMoreErrors object at 0xdeadbeef0025>
+
+ def test_z2_type_error(self):
+ items = 3
+ > a, b = items
+ E TypeError: cannot unpack non-iterable int object
+
+ failure_demo.py:221: TypeError
+ ______________________ TestMoreErrors.test_startswith ______________________
+
+ self = <failure_demo.TestMoreErrors object at 0xdeadbeef0026>
+
+ def test_startswith(self):
+ s = "123"
+ g = "456"
+ > assert s.startswith(g)
+ E AssertionError: assert False
+ E + where False = <built-in method startswith of str object at 0xdeadbeef0027>('456')
+ E + where <built-in method startswith of str object at 0xdeadbeef0027> = '123'.startswith
+
+ failure_demo.py:226: AssertionError
+ __________________ TestMoreErrors.test_startswith_nested ___________________
+
+ self = <failure_demo.TestMoreErrors object at 0xdeadbeef0028>
+
+ def test_startswith_nested(self):
+ def f():
+ return "123"
+
+ def g():
+ return "456"
+
+ > assert f().startswith(g())
+ E AssertionError: assert False
+ E + where False = <built-in method startswith of str object at 0xdeadbeef0027>('456')
+ E + where <built-in method startswith of str object at 0xdeadbeef0027> = '123'.startswith
+ E + where '123' = <function TestMoreErrors.test_startswith_nested.<locals>.f at 0xdeadbeef0029>()
+ E + and '456' = <function TestMoreErrors.test_startswith_nested.<locals>.g at 0xdeadbeef002a>()
+
+ failure_demo.py:235: AssertionError
+ _____________________ TestMoreErrors.test_global_func ______________________
+
+ self = <failure_demo.TestMoreErrors object at 0xdeadbeef002b>
+
+ def test_global_func(self):
+ > assert isinstance(globf(42), float)
+ E assert False
+ E + where False = isinstance(43, float)
+ E + where 43 = globf(42)
+
+ failure_demo.py:238: AssertionError
+ _______________________ TestMoreErrors.test_instance _______________________
+
+ self = <failure_demo.TestMoreErrors object at 0xdeadbeef002c>
+
+ def test_instance(self):
+ self.x = 6 * 7
+ > assert self.x != 42
+ E assert 42 != 42
+ E + where 42 = <failure_demo.TestMoreErrors object at 0xdeadbeef002c>.x
+
+ failure_demo.py:242: AssertionError
+ _______________________ TestMoreErrors.test_compare ________________________
+
+ self = <failure_demo.TestMoreErrors object at 0xdeadbeef002d>
+
+ def test_compare(self):
+ > assert globf(10) < 5
+ E assert 11 < 5
+ E + where 11 = globf(10)
+
+ failure_demo.py:245: AssertionError
+ _____________________ TestMoreErrors.test_try_finally ______________________
+
+ self = <failure_demo.TestMoreErrors object at 0xdeadbeef002e>
+
+ def test_try_finally(self):
+ x = 1
+ try:
+ > assert x == 0
+ E assert 1 == 0
+
+ failure_demo.py:250: AssertionError
+ ___________________ TestCustomAssertMsg.test_single_line ___________________
+
+ self = <failure_demo.TestCustomAssertMsg object at 0xdeadbeef002f>
+
+ def test_single_line(self):
+ class A:
+ a = 1
+
+ b = 2
+ > assert A.a == b, "A.a appears not to be b"
+ E AssertionError: A.a appears not to be b
+ E assert 1 == 2
+ E + where 1 = <class 'failure_demo.TestCustomAssertMsg.test_single_line.<locals>.A'>.a
+
+ failure_demo.py:261: AssertionError
+ ____________________ TestCustomAssertMsg.test_multiline ____________________
+
+ self = <failure_demo.TestCustomAssertMsg object at 0xdeadbeef0030>
+
+ def test_multiline(self):
+ class A:
+ a = 1
+
+ b = 2
+ > assert (
+ A.a == b
+ ), "A.a appears not to be b\nor does not appear to be b\none of those"
+ E AssertionError: A.a appears not to be b
+ E or does not appear to be b
+ E one of those
+ E assert 1 == 2
+ E + where 1 = <class 'failure_demo.TestCustomAssertMsg.test_multiline.<locals>.A'>.a
+
+ failure_demo.py:268: AssertionError
+ ___________________ TestCustomAssertMsg.test_custom_repr ___________________
+
+ self = <failure_demo.TestCustomAssertMsg object at 0xdeadbeef0031>
+
+ def test_custom_repr(self):
+ class JSON:
+ a = 1
+
+ def __repr__(self):
+ return "This is JSON\n{\n 'foo': 'bar'\n}"
+
+ a = JSON()
+ b = 2
+ > assert a.a == b, a
+ E AssertionError: This is JSON
+ E {
+ E 'foo': 'bar'
+ E }
+ E assert 1 == 2
+ E + where 1 = This is JSON\n{\n 'foo': 'bar'\n}.a
+
+ failure_demo.py:281: AssertionError
+ ========================= short test summary info ==========================
+ FAILED failure_demo.py::test_generative[3-6] - assert (3 * 2) < 6
+ FAILED failure_demo.py::TestFailing::test_simple - assert 42 == 43
+ FAILED failure_demo.py::TestFailing::test_simple_multiline - assert 42 == 54
+ FAILED failure_demo.py::TestFailing::test_not - assert not 42
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_text - Asser...
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_similar_text
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_multiline_text
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_long_text - ...
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_long_text_multiline
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_list - asser...
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_list_long - ...
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_dict - Asser...
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_set - Assert...
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_longer_list
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_in_list - asser...
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_not_in_text_multiline
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_not_in_text_single
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_not_in_text_single_long
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_not_in_text_single_long_term
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_dataclass - ...
+ FAILED failure_demo.py::TestSpecialisedExplanations::test_eq_attrs - Asse...
+ FAILED failure_demo.py::test_attribute - assert 1 == 2
+ FAILED failure_demo.py::test_attribute_instance - AssertionError: assert ...
+ FAILED failure_demo.py::test_attribute_failure - Exception: Failed to get...
+ FAILED failure_demo.py::test_attribute_multiple - AssertionError: assert ...
+ FAILED failure_demo.py::TestRaises::test_raises - ValueError: invalid lit...
+ FAILED failure_demo.py::TestRaises::test_raises_doesnt - Failed: DID NOT ...
+ FAILED failure_demo.py::TestRaises::test_raise - ValueError: demo error
+ FAILED failure_demo.py::TestRaises::test_tupleerror - ValueError: not eno...
+ FAILED failure_demo.py::TestRaises::test_reinterpret_fails_with_print_for_the_fun_of_it
+ FAILED failure_demo.py::TestRaises::test_some_error - NameError: name 'na...
+ FAILED failure_demo.py::test_dynamic_compile_shows_nicely - AssertionError
+ FAILED failure_demo.py::TestMoreErrors::test_complex_error - assert 44 == 43
+ FAILED failure_demo.py::TestMoreErrors::test_z1_unpack_error - ValueError...
+ FAILED failure_demo.py::TestMoreErrors::test_z2_type_error - TypeError: c...
+ FAILED failure_demo.py::TestMoreErrors::test_startswith - AssertionError:...
+ FAILED failure_demo.py::TestMoreErrors::test_startswith_nested - Assertio...
+ FAILED failure_demo.py::TestMoreErrors::test_global_func - assert False
+ FAILED failure_demo.py::TestMoreErrors::test_instance - assert 42 != 42
+ FAILED failure_demo.py::TestMoreErrors::test_compare - assert 11 < 5
+ FAILED failure_demo.py::TestMoreErrors::test_try_finally - assert 1 == 0
+ FAILED failure_demo.py::TestCustomAssertMsg::test_single_line - Assertion...
+ FAILED failure_demo.py::TestCustomAssertMsg::test_multiline - AssertionEr...
+ FAILED failure_demo.py::TestCustomAssertMsg::test_custom_repr - Assertion...
+ ============================ 44 failed in 0.12s ============================
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/simple.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/simple.rst
new file mode 100644
index 0000000000..a70f340499
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/simple.rst
@@ -0,0 +1,1086 @@
+
+
+Basic patterns and examples
+==========================================================
+
+How to change command line options defaults
+-------------------------------------------
+
+It can be tedious to type the same series of command line options
+every time you use ``pytest``. For example, if you always want to see
+detailed info on skipped and xfailed tests, as well as have terser "dot"
+progress output, you can write it into a configuration file:
+
+.. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ addopts = -ra -q
+
+
+Alternatively, you can set a ``PYTEST_ADDOPTS`` environment variable to add command
+line options while the environment is in use:
+
+.. code-block:: bash
+
+ export PYTEST_ADDOPTS="-v"
+
+Here's how the command-line is built in the presence of ``addopts`` or the environment variable:
+
+.. code-block:: text
+
+ <pytest.ini:addopts> $PYTEST_ADDOPTS <extra command-line arguments>
+
+So if the user executes in the command-line:
+
+.. code-block:: bash
+
+ pytest -m slow
+
+The actual command line executed is:
+
+.. code-block:: bash
+
+ pytest -ra -q -v -m slow
+
+Note that as usual for other command-line applications, in case of conflicting options the last one wins, so the example
+above will show verbose output because ``-v`` overwrites ``-q``.
+
+
+.. _request example:
+
+Pass different values to a test function, depending on command line options
+----------------------------------------------------------------------------
+
+.. regendoc:wipe
+
+Suppose we want to write a test that depends on a command line option.
+Here is a basic pattern to achieve this:
+
+.. code-block:: python
+
+ # content of test_sample.py
+ def test_answer(cmdopt):
+ if cmdopt == "type1":
+ print("first")
+ elif cmdopt == "type2":
+ print("second")
+ assert 0 # to see what was printed
+
+
+For this to work we need to add a command line option and
+provide the ``cmdopt`` through a :ref:`fixture function <fixture>`:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import pytest
+
+
+ def pytest_addoption(parser):
+ parser.addoption(
+ "--cmdopt", action="store", default="type1", help="my option: type1 or type2"
+ )
+
+
+ @pytest.fixture
+ def cmdopt(request):
+ return request.config.getoption("--cmdopt")
+
+Let's run this without supplying our new option:
+
+.. code-block:: pytest
+
+ $ pytest -q test_sample.py
+ F [100%]
+ ================================= FAILURES =================================
+ _______________________________ test_answer ________________________________
+
+ cmdopt = 'type1'
+
+ def test_answer(cmdopt):
+ if cmdopt == "type1":
+ print("first")
+ elif cmdopt == "type2":
+ print("second")
+ > assert 0 # to see what was printed
+ E assert 0
+
+ test_sample.py:6: AssertionError
+ --------------------------- Captured stdout call ---------------------------
+ first
+ ========================= short test summary info ==========================
+ FAILED test_sample.py::test_answer - assert 0
+ 1 failed in 0.12s
+
+And now with supplying a command line option:
+
+.. code-block:: pytest
+
+ $ pytest -q --cmdopt=type2
+ F [100%]
+ ================================= FAILURES =================================
+ _______________________________ test_answer ________________________________
+
+ cmdopt = 'type2'
+
+ def test_answer(cmdopt):
+ if cmdopt == "type1":
+ print("first")
+ elif cmdopt == "type2":
+ print("second")
+ > assert 0 # to see what was printed
+ E assert 0
+
+ test_sample.py:6: AssertionError
+ --------------------------- Captured stdout call ---------------------------
+ second
+ ========================= short test summary info ==========================
+ FAILED test_sample.py::test_answer - assert 0
+ 1 failed in 0.12s
+
+You can see that the command line option arrived in our test.
+
+We could add simple validation for the input by listing the choices:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import pytest
+
+
+ def pytest_addoption(parser):
+ parser.addoption(
+ "--cmdopt",
+ action="store",
+ default="type1",
+ help="my option: type1 or type2",
+ choices=("type1", "type2"),
+ )
+
+Now we'll get feedback on a bad argument:
+
+.. code-block:: pytest
+
+ $ pytest -q --cmdopt=type3
+ ERROR: usage: pytest [options] [file_or_dir] [file_or_dir] [...]
+ pytest: error: argument --cmdopt: invalid choice: 'type3' (choose from 'type1', 'type2')
+
+
+If you need to provide more detailed error messages, you can use the
+``type`` parameter and raise ``pytest.UsageError``:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import pytest
+
+
+ def type_checker(value):
+ msg = "cmdopt must specify a numeric type as typeNNN"
+ if not value.startswith("type"):
+ raise pytest.UsageError(msg)
+ try:
+ int(value[4:])
+ except ValueError:
+ raise pytest.UsageError(msg)
+
+ return value
+
+
+ def pytest_addoption(parser):
+ parser.addoption(
+ "--cmdopt",
+ action="store",
+ default="type1",
+ help="my option: type1 or type2",
+ type=type_checker,
+ )
+
+This completes the basic pattern. However, one often rather wants to
+process command line options outside of the test and rather pass in
+different or more complex objects.
+
+Dynamically adding command line options
+--------------------------------------------------------------
+
+.. regendoc:wipe
+
+Through :confval:`addopts` you can statically add command line
+options for your project. You can also dynamically modify
+the command line arguments before they get processed:
+
+.. code-block:: python
+
+ # setuptools plugin
+ import sys
+
+
+ def pytest_load_initial_conftests(args):
+ if "xdist" in sys.modules: # pytest-xdist plugin
+ import multiprocessing
+
+ num = max(multiprocessing.cpu_count() / 2, 1)
+ args[:] = ["-n", str(num)] + args
+
+If you have the :pypi:`xdist plugin <pytest-xdist>` installed
+you will now always perform test runs using a number
+of subprocesses close to your CPU. Running in an empty
+directory with the above conftest.py:
+
+.. code-block:: pytest
+
+ $ pytest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 0 items
+
+ ========================== no tests ran in 0.12s ===========================
+
+.. _`excontrolskip`:
+
+Control skipping of tests according to command line option
+--------------------------------------------------------------
+
+.. regendoc:wipe
+
+Here is a ``conftest.py`` file adding a ``--runslow`` command
+line option to control skipping of ``pytest.mark.slow`` marked tests:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+ import pytest
+
+
+ def pytest_addoption(parser):
+ parser.addoption(
+ "--runslow", action="store_true", default=False, help="run slow tests"
+ )
+
+
+ def pytest_configure(config):
+ config.addinivalue_line("markers", "slow: mark test as slow to run")
+
+
+ def pytest_collection_modifyitems(config, items):
+ if config.getoption("--runslow"):
+ # --runslow given in cli: do not skip slow tests
+ return
+ skip_slow = pytest.mark.skip(reason="need --runslow option to run")
+ for item in items:
+ if "slow" in item.keywords:
+ item.add_marker(skip_slow)
+
+We can now write a test module like this:
+
+.. code-block:: python
+
+ # content of test_module.py
+ import pytest
+
+
+ def test_func_fast():
+ pass
+
+
+ @pytest.mark.slow
+ def test_func_slow():
+ pass
+
+and when running it will see a skipped "slow" test:
+
+.. code-block:: pytest
+
+ $ pytest -rs # "-rs" means report details on the little 's'
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 2 items
+
+ test_module.py .s [100%]
+
+ ========================= short test summary info ==========================
+ SKIPPED [1] test_module.py:8: need --runslow option to run
+ ======================= 1 passed, 1 skipped in 0.12s =======================
+
+Or run it including the ``slow`` marked test:
+
+.. code-block:: pytest
+
+ $ pytest --runslow
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 2 items
+
+ test_module.py .. [100%]
+
+ ============================ 2 passed in 0.12s =============================
+
+.. _`__tracebackhide__`:
+
+Writing well integrated assertion helpers
+-----------------------------------------
+
+.. regendoc:wipe
+
+If you have a test helper function called from a test you can
+use the ``pytest.fail`` marker to fail a test with a certain message.
+The test support function will not show up in the traceback if you
+set the ``__tracebackhide__`` option somewhere in the helper function.
+Example:
+
+.. code-block:: python
+
+ # content of test_checkconfig.py
+ import pytest
+
+
+ def checkconfig(x):
+ __tracebackhide__ = True
+ if not hasattr(x, "config"):
+ pytest.fail("not configured: {}".format(x))
+
+
+ def test_something():
+ checkconfig(42)
+
+The ``__tracebackhide__`` setting influences ``pytest`` showing
+of tracebacks: the ``checkconfig`` function will not be shown
+unless the ``--full-trace`` command line option is specified.
+Let's run our little function:
+
+.. code-block:: pytest
+
+ $ pytest -q test_checkconfig.py
+ F [100%]
+ ================================= FAILURES =================================
+ ______________________________ test_something ______________________________
+
+ def test_something():
+ > checkconfig(42)
+ E Failed: not configured: 42
+
+ test_checkconfig.py:11: Failed
+ ========================= short test summary info ==========================
+ FAILED test_checkconfig.py::test_something - Failed: not configured: 42
+ 1 failed in 0.12s
+
+If you only want to hide certain exceptions, you can set ``__tracebackhide__``
+to a callable which gets the ``ExceptionInfo`` object. You can for example use
+this to make sure unexpected exception types aren't hidden:
+
+.. code-block:: python
+
+ import operator
+ import pytest
+
+
+ class ConfigException(Exception):
+ pass
+
+
+ def checkconfig(x):
+ __tracebackhide__ = operator.methodcaller("errisinstance", ConfigException)
+ if not hasattr(x, "config"):
+ raise ConfigException("not configured: {}".format(x))
+
+
+ def test_something():
+ checkconfig(42)
+
+This will avoid hiding the exception traceback on unrelated exceptions (i.e.
+bugs in assertion helpers).
+
+
+Detect if running from within a pytest run
+--------------------------------------------------------------
+
+.. regendoc:wipe
+
+Usually it is a bad idea to make application code
+behave differently if called from a test. But if you
+absolutely must find out if your application code is
+running from a test you can do something like this:
+
+.. code-block:: python
+
+ # content of your_module.py
+
+
+ _called_from_test = False
+
+.. code-block:: python
+
+ # content of conftest.py
+
+
+ def pytest_configure(config):
+ your_module._called_from_test = True
+
+and then check for the ``your_module._called_from_test`` flag:
+
+.. code-block:: python
+
+ if your_module._called_from_test:
+ # called from within a test run
+ ...
+ else:
+ # called "normally"
+ ...
+
+accordingly in your application.
+
+Adding info to test report header
+--------------------------------------------------------------
+
+.. regendoc:wipe
+
+It's easy to present extra information in a ``pytest`` run:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+
+ def pytest_report_header(config):
+ return "project deps: mylib-1.1"
+
+which will add the string to the test header accordingly:
+
+.. code-block:: pytest
+
+ $ pytest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ project deps: mylib-1.1
+ rootdir: /home/sweet/project
+ collected 0 items
+
+ ========================== no tests ran in 0.12s ===========================
+
+.. regendoc:wipe
+
+It is also possible to return a list of strings which will be considered as several
+lines of information. You may consider ``config.getoption('verbose')`` in order to
+display more information if applicable:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+
+ def pytest_report_header(config):
+ if config.getoption("verbose") > 0:
+ return ["info1: did you know that ...", "did you?"]
+
+which will add info only when run with "--v":
+
+.. code-block:: pytest
+
+ $ pytest -v
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ info1: did you know that ...
+ did you?
+ rootdir: /home/sweet/project
+ collecting ... collected 0 items
+
+ ========================== no tests ran in 0.12s ===========================
+
+and nothing when run plainly:
+
+.. code-block:: pytest
+
+ $ pytest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 0 items
+
+ ========================== no tests ran in 0.12s ===========================
+
+Profiling test duration
+--------------------------
+
+.. regendoc:wipe
+
+.. versionadded: 2.2
+
+If you have a slow running large test suite you might want to find
+out which tests are the slowest. Let's make an artificial test suite:
+
+.. code-block:: python
+
+ # content of test_some_are_slow.py
+ import time
+
+
+ def test_funcfast():
+ time.sleep(0.1)
+
+
+ def test_funcslow1():
+ time.sleep(0.2)
+
+
+ def test_funcslow2():
+ time.sleep(0.3)
+
+Now we can profile which test functions execute the slowest:
+
+.. code-block:: pytest
+
+ $ pytest --durations=3
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 3 items
+
+ test_some_are_slow.py ... [100%]
+
+ =========================== slowest 3 durations ============================
+ 0.30s call test_some_are_slow.py::test_funcslow2
+ 0.20s call test_some_are_slow.py::test_funcslow1
+ 0.10s call test_some_are_slow.py::test_funcfast
+ ============================ 3 passed in 0.12s =============================
+
+Incremental testing - test steps
+---------------------------------------------------
+
+.. regendoc:wipe
+
+Sometimes you may have a testing situation which consists of a series
+of test steps. If one step fails it makes no sense to execute further
+steps as they are all expected to fail anyway and their tracebacks
+add no insight. Here is a simple ``conftest.py`` file which introduces
+an ``incremental`` marker which is to be used on classes:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+ from typing import Dict, Tuple
+ import pytest
+
+ # store history of failures per test class name and per index in parametrize (if parametrize used)
+ _test_failed_incremental: Dict[str, Dict[Tuple[int, ...], str]] = {}
+
+
+ def pytest_runtest_makereport(item, call):
+ if "incremental" in item.keywords:
+ # incremental marker is used
+ if call.excinfo is not None:
+ # the test has failed
+ # retrieve the class name of the test
+ cls_name = str(item.cls)
+ # retrieve the index of the test (if parametrize is used in combination with incremental)
+ parametrize_index = (
+ tuple(item.callspec.indices.values())
+ if hasattr(item, "callspec")
+ else ()
+ )
+ # retrieve the name of the test function
+ test_name = item.originalname or item.name
+ # store in _test_failed_incremental the original name of the failed test
+ _test_failed_incremental.setdefault(cls_name, {}).setdefault(
+ parametrize_index, test_name
+ )
+
+
+ def pytest_runtest_setup(item):
+ if "incremental" in item.keywords:
+ # retrieve the class name of the test
+ cls_name = str(item.cls)
+ # check if a previous test has failed for this class
+ if cls_name in _test_failed_incremental:
+ # retrieve the index of the test (if parametrize is used in combination with incremental)
+ parametrize_index = (
+ tuple(item.callspec.indices.values())
+ if hasattr(item, "callspec")
+ else ()
+ )
+ # retrieve the name of the first test function to fail for this class name and index
+ test_name = _test_failed_incremental[cls_name].get(parametrize_index, None)
+ # if name found, test has failed for the combination of class name & test name
+ if test_name is not None:
+ pytest.xfail("previous test failed ({})".format(test_name))
+
+
+These two hook implementations work together to abort incremental-marked
+tests in a class. Here is a test module example:
+
+.. code-block:: python
+
+ # content of test_step.py
+
+ import pytest
+
+
+ @pytest.mark.incremental
+ class TestUserHandling:
+ def test_login(self):
+ pass
+
+ def test_modification(self):
+ assert 0
+
+ def test_deletion(self):
+ pass
+
+
+ def test_normal():
+ pass
+
+If we run this:
+
+.. code-block:: pytest
+
+ $ pytest -rx
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 4 items
+
+ test_step.py .Fx. [100%]
+
+ ================================= FAILURES =================================
+ ____________________ TestUserHandling.test_modification ____________________
+
+ self = <test_step.TestUserHandling object at 0xdeadbeef0001>
+
+ def test_modification(self):
+ > assert 0
+ E assert 0
+
+ test_step.py:11: AssertionError
+ ========================= short test summary info ==========================
+ XFAIL test_step.py::TestUserHandling::test_deletion
+ reason: previous test failed (test_modification)
+ ================== 1 failed, 2 passed, 1 xfailed in 0.12s ==================
+
+We'll see that ``test_deletion`` was not executed because ``test_modification``
+failed. It is reported as an "expected failure".
+
+
+Package/Directory-level fixtures (setups)
+-------------------------------------------------------
+
+If you have nested test directories, you can have per-directory fixture scopes
+by placing fixture functions in a ``conftest.py`` file in that directory.
+You can use all types of fixtures including :ref:`autouse fixtures
+<autouse fixtures>` which are the equivalent of xUnit's setup/teardown
+concept. It's however recommended to have explicit fixture references in your
+tests or test classes rather than relying on implicitly executing
+setup/teardown functions, especially if they are far away from the actual tests.
+
+Here is an example for making a ``db`` fixture available in a directory:
+
+.. code-block:: python
+
+ # content of a/conftest.py
+ import pytest
+
+
+ class DB:
+ pass
+
+
+ @pytest.fixture(scope="session")
+ def db():
+ return DB()
+
+and then a test module in that directory:
+
+.. code-block:: python
+
+ # content of a/test_db.py
+ def test_a1(db):
+ assert 0, db # to show value
+
+another test module:
+
+.. code-block:: python
+
+ # content of a/test_db2.py
+ def test_a2(db):
+ assert 0, db # to show value
+
+and then a module in a sister directory which will not see
+the ``db`` fixture:
+
+.. code-block:: python
+
+ # content of b/test_error.py
+ def test_root(db): # no db here, will error out
+ pass
+
+We can run this:
+
+.. code-block:: pytest
+
+ $ pytest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 7 items
+
+ test_step.py .Fx. [ 57%]
+ a/test_db.py F [ 71%]
+ a/test_db2.py F [ 85%]
+ b/test_error.py E [100%]
+
+ ================================== ERRORS ==================================
+ _______________________ ERROR at setup of test_root ________________________
+ file /home/sweet/project/b/test_error.py, line 1
+ def test_root(db): # no db here, will error out
+ E fixture 'db' not found
+ > available fixtures: cache, capfd, capfdbinary, caplog, capsys, capsysbinary, doctest_namespace, monkeypatch, pytestconfig, record_property, record_testsuite_property, record_xml_attribute, recwarn, tmp_path, tmp_path_factory, tmpdir, tmpdir_factory
+ > use 'pytest --fixtures [testpath]' for help on them.
+
+ /home/sweet/project/b/test_error.py:1
+ ================================= FAILURES =================================
+ ____________________ TestUserHandling.test_modification ____________________
+
+ self = <test_step.TestUserHandling object at 0xdeadbeef0002>
+
+ def test_modification(self):
+ > assert 0
+ E assert 0
+
+ test_step.py:11: AssertionError
+ _________________________________ test_a1 __________________________________
+
+ db = <conftest.DB object at 0xdeadbeef0003>
+
+ def test_a1(db):
+ > assert 0, db # to show value
+ E AssertionError: <conftest.DB object at 0xdeadbeef0003>
+ E assert 0
+
+ a/test_db.py:2: AssertionError
+ _________________________________ test_a2 __________________________________
+
+ db = <conftest.DB object at 0xdeadbeef0003>
+
+ def test_a2(db):
+ > assert 0, db # to show value
+ E AssertionError: <conftest.DB object at 0xdeadbeef0003>
+ E assert 0
+
+ a/test_db2.py:2: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_step.py::TestUserHandling::test_modification - assert 0
+ FAILED a/test_db.py::test_a1 - AssertionError: <conftest.DB object at 0x7...
+ FAILED a/test_db2.py::test_a2 - AssertionError: <conftest.DB object at 0x...
+ ERROR b/test_error.py::test_root
+ ============= 3 failed, 2 passed, 1 xfailed, 1 error in 0.12s ==============
+
+The two test modules in the ``a`` directory see the same ``db`` fixture instance
+while the one test in the sister-directory ``b`` doesn't see it. We could of course
+also define a ``db`` fixture in that sister directory's ``conftest.py`` file.
+Note that each fixture is only instantiated if there is a test actually needing
+it (unless you use "autouse" fixture which are always executed ahead of the first test
+executing).
+
+
+Post-process test reports / failures
+---------------------------------------
+
+If you want to postprocess test reports and need access to the executing
+environment you can implement a hook that gets called when the test
+"report" object is about to be created. Here we write out all failing
+test calls and also access a fixture (if it was used by the test) in
+case you want to query/look at it during your post processing. In our
+case we just write some information out to a ``failures`` file:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+ import pytest
+ import os.path
+
+
+ @pytest.hookimpl(tryfirst=True, hookwrapper=True)
+ def pytest_runtest_makereport(item, call):
+ # execute all other hooks to obtain the report object
+ outcome = yield
+ rep = outcome.get_result()
+
+ # we only look at actual failing test calls, not setup/teardown
+ if rep.when == "call" and rep.failed:
+ mode = "a" if os.path.exists("failures") else "w"
+ with open("failures", mode) as f:
+ # let's also access a fixture for the fun of it
+ if "tmp_path" in item.fixturenames:
+ extra = " ({})".format(item.funcargs["tmp_path"])
+ else:
+ extra = ""
+
+ f.write(rep.nodeid + extra + "\n")
+
+
+if you then have failing tests:
+
+.. code-block:: python
+
+ # content of test_module.py
+ def test_fail1(tmp_path):
+ assert 0
+
+
+ def test_fail2():
+ assert 0
+
+and run them:
+
+.. code-block:: pytest
+
+ $ pytest test_module.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 2 items
+
+ test_module.py FF [100%]
+
+ ================================= FAILURES =================================
+ ________________________________ test_fail1 ________________________________
+
+ tmp_path = PosixPath('PYTEST_TMPDIR/test_fail10')
+
+ def test_fail1(tmp_path):
+ > assert 0
+ E assert 0
+
+ test_module.py:2: AssertionError
+ ________________________________ test_fail2 ________________________________
+
+ def test_fail2():
+ > assert 0
+ E assert 0
+
+ test_module.py:6: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_module.py::test_fail1 - assert 0
+ FAILED test_module.py::test_fail2 - assert 0
+ ============================ 2 failed in 0.12s =============================
+
+you will have a "failures" file which contains the failing test ids:
+
+.. code-block:: bash
+
+ $ cat failures
+ test_module.py::test_fail1 (PYTEST_TMPDIR/test_fail10)
+ test_module.py::test_fail2
+
+Making test result information available in fixtures
+-----------------------------------------------------------
+
+.. regendoc:wipe
+
+If you want to make test result reports available in fixture finalizers
+here is a little example implemented via a local plugin:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+ import pytest
+
+
+ @pytest.hookimpl(tryfirst=True, hookwrapper=True)
+ def pytest_runtest_makereport(item, call):
+ # execute all other hooks to obtain the report object
+ outcome = yield
+ rep = outcome.get_result()
+
+ # set a report attribute for each phase of a call, which can
+ # be "setup", "call", "teardown"
+
+ setattr(item, "rep_" + rep.when, rep)
+
+
+ @pytest.fixture
+ def something(request):
+ yield
+ # request.node is an "item" because we use the default
+ # "function" scope
+ if request.node.rep_setup.failed:
+ print("setting up a test failed!", request.node.nodeid)
+ elif request.node.rep_setup.passed:
+ if request.node.rep_call.failed:
+ print("executing test failed", request.node.nodeid)
+
+
+if you then have failing tests:
+
+.. code-block:: python
+
+ # content of test_module.py
+
+ import pytest
+
+
+ @pytest.fixture
+ def other():
+ assert 0
+
+
+ def test_setup_fails(something, other):
+ pass
+
+
+ def test_call_fails(something):
+ assert 0
+
+
+ def test_fail2():
+ assert 0
+
+and run it:
+
+.. code-block:: pytest
+
+ $ pytest -s test_module.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 3 items
+
+ test_module.py Esetting up a test failed! test_module.py::test_setup_fails
+ Fexecuting test failed test_module.py::test_call_fails
+ F
+
+ ================================== ERRORS ==================================
+ ____________________ ERROR at setup of test_setup_fails ____________________
+
+ @pytest.fixture
+ def other():
+ > assert 0
+ E assert 0
+
+ test_module.py:7: AssertionError
+ ================================= FAILURES =================================
+ _____________________________ test_call_fails ______________________________
+
+ something = None
+
+ def test_call_fails(something):
+ > assert 0
+ E assert 0
+
+ test_module.py:15: AssertionError
+ ________________________________ test_fail2 ________________________________
+
+ def test_fail2():
+ > assert 0
+ E assert 0
+
+ test_module.py:19: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_module.py::test_call_fails - assert 0
+ FAILED test_module.py::test_fail2 - assert 0
+ ERROR test_module.py::test_setup_fails - assert 0
+ ======================== 2 failed, 1 error in 0.12s ========================
+
+You'll see that the fixture finalizers could use the precise reporting
+information.
+
+.. _pytest current test env:
+
+``PYTEST_CURRENT_TEST`` environment variable
+--------------------------------------------
+
+
+
+Sometimes a test session might get stuck and there might be no easy way to figure out
+which test got stuck, for example if pytest was run in quiet mode (``-q``) or you don't have access to the console
+output. This is particularly a problem if the problem happens only sporadically, the famous "flaky" kind of tests.
+
+``pytest`` sets the :envvar:`PYTEST_CURRENT_TEST` environment variable when running tests, which can be inspected
+by process monitoring utilities or libraries like :pypi:`psutil` to discover which test got stuck if necessary:
+
+.. code-block:: python
+
+ import psutil
+
+ for pid in psutil.pids():
+ environ = psutil.Process(pid).environ()
+ if "PYTEST_CURRENT_TEST" in environ:
+ print(f'pytest process {pid} running: {environ["PYTEST_CURRENT_TEST"]}')
+
+During the test session pytest will set ``PYTEST_CURRENT_TEST`` to the current test
+:ref:`nodeid <nodeids>` and the current stage, which can be ``setup``, ``call``,
+or ``teardown``.
+
+For example, when running a single test function named ``test_foo`` from ``foo_module.py``,
+``PYTEST_CURRENT_TEST`` will be set to:
+
+#. ``foo_module.py::test_foo (setup)``
+#. ``foo_module.py::test_foo (call)``
+#. ``foo_module.py::test_foo (teardown)``
+
+In that order.
+
+.. note::
+
+ The contents of ``PYTEST_CURRENT_TEST`` is meant to be human readable and the actual format
+ can be changed between releases (even bug fixes) so it shouldn't be relied on for scripting
+ or automation.
+
+.. _freezing-pytest:
+
+Freezing pytest
+---------------
+
+If you freeze your application using a tool like
+`PyInstaller <https://pyinstaller.readthedocs.io>`_
+in order to distribute it to your end-users, it is a good idea to also package
+your test runner and run your tests using the frozen application. This way packaging
+errors such as dependencies not being included into the executable can be detected early
+while also allowing you to send test files to users so they can run them in their
+machines, which can be useful to obtain more information about a hard to reproduce bug.
+
+Fortunately recent ``PyInstaller`` releases already have a custom hook
+for pytest, but if you are using another tool to freeze executables
+such as ``cx_freeze`` or ``py2exe``, you can use ``pytest.freeze_includes()``
+to obtain the full list of internal pytest modules. How to configure the tools
+to find the internal modules varies from tool to tool, however.
+
+Instead of freezing the pytest runner as a separate executable, you can make
+your frozen program work as the pytest runner by some clever
+argument handling during program startup. This allows you to
+have a single executable, which is usually more convenient.
+Please note that the mechanism for plugin discovery used by pytest
+(setuptools entry points) doesn't work with frozen executables so pytest
+can't find any third party plugins automatically. To include third party plugins
+like ``pytest-timeout`` they must be imported explicitly and passed on to pytest.main.
+
+.. code-block:: python
+
+ # contents of app_main.py
+ import sys
+ import pytest_timeout # Third party plugin
+
+ if len(sys.argv) > 1 and sys.argv[1] == "--pytest":
+ import pytest
+
+ sys.exit(pytest.main(sys.argv[2:], plugins=[pytest_timeout]))
+ else:
+ # normal application execution: at this point argv can be parsed
+ # by your argument-parsing library of choice as usual
+ ...
+
+
+This allows you to execute tests using the frozen
+application with standard ``pytest`` command-line options:
+
+.. code-block:: bash
+
+ ./app_main --pytest --verbose --tb=long --junitxml=results.xml test-suite/
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/special.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/special.rst
new file mode 100644
index 0000000000..ace37c7278
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/special.rst
@@ -0,0 +1,84 @@
+A session-fixture which can look at all collected tests
+----------------------------------------------------------------
+
+A session-scoped fixture effectively has access to all
+collected test items. Here is an example of a fixture
+function which walks all collected tests and looks
+if their test class defines a ``callme`` method and
+calls it:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+ import pytest
+
+
+ @pytest.fixture(scope="session", autouse=True)
+ def callattr_ahead_of_alltests(request):
+ print("callattr_ahead_of_alltests called")
+ seen = {None}
+ session = request.node
+ for item in session.items:
+ cls = item.getparent(pytest.Class)
+ if cls not in seen:
+ if hasattr(cls.obj, "callme"):
+ cls.obj.callme()
+ seen.add(cls)
+
+test classes may now define a ``callme`` method which
+will be called ahead of running any tests:
+
+.. code-block:: python
+
+ # content of test_module.py
+
+
+ class TestHello:
+ @classmethod
+ def callme(cls):
+ print("callme called!")
+
+ def test_method1(self):
+ print("test_method1 called")
+
+ def test_method2(self):
+ print("test_method2 called")
+
+
+ class TestOther:
+ @classmethod
+ def callme(cls):
+ print("callme other called")
+
+ def test_other(self):
+ print("test other")
+
+
+ # works with unittest as well ...
+ import unittest
+
+
+ class SomeTest(unittest.TestCase):
+ @classmethod
+ def callme(self):
+ print("SomeTest callme called")
+
+ def test_unit1(self):
+ print("test_unit1 method called")
+
+If you run this without output capturing:
+
+.. code-block:: pytest
+
+ $ pytest -q -s test_module.py
+ callattr_ahead_of_alltests called
+ callme called!
+ callme other called
+ SomeTest callme called
+ test_method1 called
+ .test_method2 called
+ .test other
+ .test_unit1 method called
+ .
+ 4 passed in 0.12s
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/xfail_demo.py b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/xfail_demo.py
new file mode 100644
index 0000000000..01e6da1ad2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/example/xfail_demo.py
@@ -0,0 +1,38 @@
+import pytest
+
+xfail = pytest.mark.xfail
+
+
+@xfail
+def test_hello():
+ assert 0
+
+
+@xfail(run=False)
+def test_hello2():
+ assert 0
+
+
+@xfail("hasattr(os, 'sep')")
+def test_hello3():
+ assert 0
+
+
+@xfail(reason="bug 110")
+def test_hello4():
+ assert 0
+
+
+@xfail('pytest.__version__[0] != "17"')
+def test_hello5():
+ assert 0
+
+
+def test_hello6():
+ pytest.xfail("reason")
+
+
+@xfail(raises=IndexError)
+def test_hello7():
+ x = []
+ x[1] = 1
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/anatomy.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/anatomy.rst
new file mode 100644
index 0000000000..e86dd74251
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/anatomy.rst
@@ -0,0 +1,46 @@
+.. _test-anatomy:
+
+Anatomy of a test
+=================
+
+In the simplest terms, a test is meant to look at the result of a particular
+behavior, and make sure that result aligns with what you would expect.
+Behavior is not something that can be empirically measured, which is why writing
+tests can be challenging.
+
+"Behavior" is the way in which some system **acts in response** to a particular
+situation and/or stimuli. But exactly *how* or *why* something is done is not
+quite as important as *what* was done.
+
+You can think of a test as being broken down into four steps:
+
+1. **Arrange**
+2. **Act**
+3. **Assert**
+4. **Cleanup**
+
+**Arrange** is where we prepare everything for our test. This means pretty
+much everything except for the "**act**". It's lining up the dominoes so that
+the **act** can do its thing in one, state-changing step. This can mean
+preparing objects, starting/killing services, entering records into a database,
+or even things like defining a URL to query, generating some credentials for a
+user that doesn't exist yet, or just waiting for some process to finish.
+
+**Act** is the singular, state-changing action that kicks off the **behavior**
+we want to test. This behavior is what carries out the changing of the state of
+the system under test (SUT), and it's the resulting changed state that we can
+look at to make a judgement about the behavior. This typically takes the form of
+a function/method call.
+
+**Assert** is where we look at that resulting state and check if it looks how
+we'd expect after the dust has settled. It's where we gather evidence to say the
+behavior does or does not aligns with what we expect. The ``assert`` in our test
+is where we take that measurement/observation and apply our judgement to it. If
+something should be green, we'd say ``assert thing == "green"``.
+
+**Cleanup** is where the test picks up after itself, so other tests aren't being
+accidentally influenced by it.
+
+At its core, the test is ultimately the **act** and **assert** steps, with the
+**arrange** step only providing the context. **Behavior** exists between **act**
+and **assert**.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/fixtures.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/fixtures.rst
new file mode 100644
index 0000000000..194e576493
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/fixtures.rst
@@ -0,0 +1,174 @@
+.. _about-fixtures:
+
+About fixtures
+===============
+
+.. seealso:: :ref:`how-to-fixtures`
+.. seealso:: :ref:`Fixtures reference <reference-fixtures>`
+
+pytest fixtures are designed to be explicit, modular and scalable.
+
+What fixtures are
+-----------------
+
+In testing, a `fixture <https://en.wikipedia.org/wiki/Test_fixture#Software>`_
+provides a defined, reliable and consistent context for the tests. This could
+include environment (for example a database configured with known parameters)
+or content (such as a dataset).
+
+Fixtures define the steps and data that constitute the *arrange* phase of a
+test (see :ref:`test-anatomy`). In pytest, they are functions you define that
+serve this purpose. They can also be used to define a test's *act* phase; this
+is a powerful technique for designing more complex tests.
+
+The services, state, or other operating environments set up by fixtures are
+accessed by test functions through arguments. For each fixture used by a test
+function there is typically a parameter (named after the fixture) in the test
+function's definition.
+
+We can tell pytest that a particular function is a fixture by decorating it with
+:py:func:`@pytest.fixture <pytest.fixture>`. Here's a simple example of
+what a fixture in pytest might look like:
+
+.. code-block:: python
+
+ import pytest
+
+
+ class Fruit:
+ def __init__(self, name):
+ self.name = name
+
+ def __eq__(self, other):
+ return self.name == other.name
+
+
+ @pytest.fixture
+ def my_fruit():
+ return Fruit("apple")
+
+
+ @pytest.fixture
+ def fruit_basket(my_fruit):
+ return [Fruit("banana"), my_fruit]
+
+
+ def test_my_fruit_in_basket(my_fruit, fruit_basket):
+ assert my_fruit in fruit_basket
+
+Tests don't have to be limited to a single fixture, either. They can depend on
+as many fixtures as you want, and fixtures can use other fixtures, as well. This
+is where pytest's fixture system really shines.
+
+
+Improvements over xUnit-style setup/teardown functions
+-----------------------------------------------------------
+
+pytest fixtures offer dramatic improvements over the classic xUnit
+style of setup/teardown functions:
+
+* fixtures have explicit names and are activated by declaring their use
+ from test functions, modules, classes or whole projects.
+
+* fixtures are implemented in a modular manner, as each fixture name
+ triggers a *fixture function* which can itself use other fixtures.
+
+* fixture management scales from simple unit to complex
+ functional testing, allowing to parametrize fixtures and tests according
+ to configuration and component options, or to re-use fixtures
+ across function, class, module or whole test session scopes.
+
+* teardown logic can be easily, and safely managed, no matter how many fixtures
+ are used, without the need to carefully handle errors by hand or micromanage
+ the order that cleanup steps are added.
+
+In addition, pytest continues to support :ref:`xunitsetup`. You can mix
+both styles, moving incrementally from classic to new style, as you
+prefer. You can also start out from existing :ref:`unittest.TestCase
+style <unittest.TestCase>` or :ref:`nose based <nosestyle>` projects.
+
+
+
+Fixture errors
+--------------
+
+pytest does its best to put all the fixtures for a given test in a linear order
+so that it can see which fixture happens first, second, third, and so on. If an
+earlier fixture has a problem, though, and raises an exception, pytest will stop
+executing fixtures for that test and mark the test as having an error.
+
+When a test is marked as having an error, it doesn't mean the test failed,
+though. It just means the test couldn't even be attempted because one of the
+things it depends on had a problem.
+
+This is one reason why it's a good idea to cut out as many unnecessary
+dependencies as possible for a given test. That way a problem in something
+unrelated isn't causing us to have an incomplete picture of what may or may not
+have issues.
+
+Here's a quick example to help explain:
+
+.. code-block:: python
+
+ import pytest
+
+
+ @pytest.fixture
+ def order():
+ return []
+
+
+ @pytest.fixture
+ def append_first(order):
+ order.append(1)
+
+
+ @pytest.fixture
+ def append_second(order, append_first):
+ order.extend([2])
+
+
+ @pytest.fixture(autouse=True)
+ def append_third(order, append_second):
+ order += [3]
+
+
+ def test_order(order):
+ assert order == [1, 2, 3]
+
+
+If, for whatever reason, ``order.append(1)`` had a bug and it raises an exception,
+we wouldn't be able to know if ``order.extend([2])`` or ``order += [3]`` would
+also have problems. After ``append_first`` throws an exception, pytest won't run
+any more fixtures for ``test_order``, and it won't even try to run
+``test_order`` itself. The only things that would've run would be ``order`` and
+``append_first``.
+
+
+Sharing test data
+-----------------
+
+If you want to make test data from files available to your tests, a good way
+to do this is by loading these data in a fixture for use by your tests.
+This makes use of the automatic caching mechanisms of pytest.
+
+Another good approach is by adding the data files in the ``tests`` folder.
+There are also community plugins available to help to manage this aspect of
+testing, e.g. :pypi:`pytest-datadir` and :pypi:`pytest-datafiles`.
+
+.. _fixtures-signal-cleanup:
+
+A note about fixture cleanup
+----------------------------
+
+pytest does not do any special processing for :data:`SIGTERM <signal.SIGTERM>` and
+:data:`SIGQUIT <signal.SIGQUIT>` signals (:data:`SIGINT <signal.SIGINT>` is handled naturally
+by the Python runtime via :class:`KeyboardInterrupt`), so fixtures that manage external resources which are important
+to be cleared when the Python process is terminated (by those signals) might leak resources.
+
+The reason pytest does not handle those signals to perform fixture cleanup is that signal handlers are global,
+and changing them might interfere with the code under execution.
+
+If fixtures in your suite need special care regarding termination in those scenarios,
+see :issue:`this comment <5243#issuecomment-491522595>` in the issue
+tracker for a possible workaround.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/flaky.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/flaky.rst
new file mode 100644
index 0000000000..50121c7a76
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/flaky.rst
@@ -0,0 +1,126 @@
+
+Flaky tests
+-----------
+
+A "flaky" test is one that exhibits intermittent or sporadic failure, that seems to have non-deterministic behaviour. Sometimes it passes, sometimes it fails, and it's not clear why. This page discusses pytest features that can help and other general strategies for identifying, fixing or mitigating them.
+
+Why flaky tests are a problem
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Flaky tests are particularly troublesome when a continuous integration (CI) server is being used, so that all tests must pass before a new code change can be merged. If the test result is not a reliable signal -- that a test failure means the code change broke the test -- developers can become mistrustful of the test results, which can lead to overlooking genuine failures. It is also a source of wasted time as developers must re-run test suites and investigate spurious failures.
+
+
+Potential root causes
+^^^^^^^^^^^^^^^^^^^^^
+
+System state
+~~~~~~~~~~~~
+
+Broadly speaking, a flaky test indicates that the test relies on some system state that is not being appropriately controlled - the test environment is not sufficiently isolated. Higher level tests are more likely to be flaky as they rely on more state.
+
+Flaky tests sometimes appear when a test suite is run in parallel (such as use of pytest-xdist). This can indicate a test is reliant on test ordering.
+
+- Perhaps a different test is failing to clean up after itself and leaving behind data which causes the flaky test to fail.
+- The flaky test is reliant on data from a previous test that doesn't clean up after itself, and in parallel runs that previous test is not always present
+- Tests that modify global state typically cannot be run in parallel.
+
+
+Overly strict assertion
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Overly strict assertions can cause problems with floating point comparison as well as timing issues. :func:`pytest.approx` is useful here.
+
+
+Pytest features
+^^^^^^^^^^^^^^^
+
+Xfail strict
+~~~~~~~~~~~~
+
+:ref:`pytest.mark.xfail ref` with ``strict=False`` can be used to mark a test so that its failure does not cause the whole build to break. This could be considered like a manual quarantine, and is rather dangerous to use permanently.
+
+
+PYTEST_CURRENT_TEST
+~~~~~~~~~~~~~~~~~~~
+
+:envvar:`PYTEST_CURRENT_TEST` may be useful for figuring out "which test got stuck".
+See :ref:`pytest current test env` for more details.
+
+
+Plugins
+~~~~~~~
+
+Rerunning any failed tests can mitigate the negative effects of flaky tests by giving them additional chances to pass, so that the overall build does not fail. Several pytest plugins support this:
+
+* `flaky <https://github.com/box/flaky>`_
+* `pytest-flakefinder <https://github.com/dropbox/pytest-flakefinder>`_ - `blog post <https://blogs.dropbox.com/tech/2016/03/open-sourcing-pytest-tools/>`_
+* `pytest-rerunfailures <https://github.com/pytest-dev/pytest-rerunfailures>`_
+* `pytest-replay <https://github.com/ESSS/pytest-replay>`_: This plugin helps to reproduce locally crashes or flaky tests observed during CI runs.
+
+Plugins to deliberately randomize tests can help expose tests with state problems:
+
+* `pytest-random-order <https://github.com/jbasko/pytest-random-order>`_
+* `pytest-randomly <https://github.com/pytest-dev/pytest-randomly>`_
+
+
+Other general strategies
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+Split up test suites
+~~~~~~~~~~~~~~~~~~~~
+
+It can be common to split a single test suite into two, such as unit vs integration, and only use the unit test suite as a CI gate. This also helps keep build times manageable as high level tests tend to be slower. However, it means it does become possible for code that breaks the build to be merged, so extra vigilance is needed for monitoring the integration test results.
+
+
+Video/screenshot on failure
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For UI tests these are important for understanding what the state of the UI was when the test failed. pytest-splinter can be used with plugins like pytest-bdd and can `save a screenshot on test failure <https://pytest-splinter.readthedocs.io/en/latest/#automatic-screenshots-on-test-failure>`_, which can help to isolate the cause.
+
+
+Delete or rewrite the test
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If the functionality is covered by other tests, perhaps the test can be removed. If not, perhaps it can be rewritten at a lower level which will remove the flakiness or make its source more apparent.
+
+
+Quarantine
+~~~~~~~~~~
+
+Mark Lapierre discusses the `Pros and Cons of Quarantined Tests <https://dev.to/mlapierre/pros-and-cons-of-quarantined-tests-2emj>`_ in a post from 2018.
+
+
+
+CI tools that rerun on failure
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Azure Pipelines (the Azure cloud CI/CD tool, formerly Visual Studio Team Services or VSTS) has a feature to `identify flaky tests <https://docs.microsoft.com/en-us/azure/devops/release-notes/2017/dec-11-vsts#identify-flaky-tests>`_ and rerun failed tests.
+
+
+
+Research
+^^^^^^^^
+
+This is a limited list, please submit an issue or pull request to expand it!
+
+* Gao, Zebao, Yalan Liang, Myra B. Cohen, Atif M. Memon, and Zhen Wang. "Making system user interactive tests repeatable: When and what should we control?." In *Software Engineering (ICSE), 2015 IEEE/ACM 37th IEEE International Conference on*, vol. 1, pp. 55-65. IEEE, 2015. `PDF <http://www.cs.umd.edu/~atif/pubs/gao-icse15.pdf>`__
+* Palomba, Fabio, and Andy Zaidman. "Does refactoring of test smells induce fixing flaky tests?." In *Software Maintenance and Evolution (ICSME), 2017 IEEE International Conference on*, pp. 1-12. IEEE, 2017. `PDF in Google Drive <https://drive.google.com/file/d/10HdcCQiuQVgW3yYUJD-TSTq1NbYEprl0/view>`__
+* Bell, Jonathan, Owolabi Legunsen, Michael Hilton, Lamyaa Eloussi, Tifany Yung, and Darko Marinov. "DeFlaker: Automatically detecting flaky tests." In *Proceedings of the 2018 International Conference on Software Engineering*. 2018. `PDF <https://www.jonbell.net/icse18-deflaker.pdf>`__
+
+
+Resources
+^^^^^^^^^
+
+* `Eradicating Non-Determinism in Tests <https://martinfowler.com/articles/nonDeterminism.html>`_ by Martin Fowler, 2011
+* `No more flaky tests on the Go team <https://www.thoughtworks.com/insights/blog/no-more-flaky-tests-go-team>`_ by Pavan Sudarshan, 2012
+* `The Build That Cried Broken: Building Trust in your Continuous Integration Tests <https://www.youtube.com/embed/VotJqV4n8ig>`_ talk (video) by `Angie Jones <https://angiejones.tech/>`_ at SeleniumConf Austin 2017
+* `Test and Code Podcast: Flaky Tests and How to Deal with Them <https://testandcode.com/50>`_ by Brian Okken and Anthony Shaw, 2018
+* Microsoft:
+
+ * `How we approach testing VSTS to enable continuous delivery <https://blogs.msdn.microsoft.com/bharry/2017/06/28/testing-in-a-cloud-delivery-cadence/>`_ by Brian Harry MS, 2017
+ * `Eliminating Flaky Tests <https://docs.microsoft.com/en-us/azure/devops/learn/devops-at-microsoft/eliminating-flaky-tests>`_ blog and talk (video) by Munil Shah, 2017
+
+* Google:
+
+ * `Flaky Tests at Google and How We Mitigate Them <https://testing.googleblog.com/2016/05/flaky-tests-at-google-and-how-we.html>`_ by John Micco, 2016
+ * `Where do Google's flaky tests come from? <https://testing.googleblog.com/2017/04/where-do-our-flaky-tests-come-from.html>`_ by Jeff Listfield, 2017
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/goodpractices.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/goodpractices.rst
new file mode 100644
index 0000000000..32a14991ae
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/goodpractices.rst
@@ -0,0 +1,288 @@
+.. highlight:: python
+.. _`goodpractices`:
+
+Good Integration Practices
+=================================================
+
+Install package with pip
+-------------------------------------------------
+
+For development, we recommend you use :mod:`venv` for virtual environments and
+:doc:`pip:index` for installing your application and any dependencies,
+as well as the ``pytest`` package itself.
+This ensures your code and dependencies are isolated from your system Python installation.
+
+Next, place a ``pyproject.toml`` file in the root of your package:
+
+.. code-block:: toml
+
+ [build-system]
+ requires = ["setuptools>=42", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+and a ``setup.cfg`` file containing your package's metadata with the following minimum content:
+
+.. code-block:: ini
+
+ [metadata]
+ name = PACKAGENAME
+
+ [options]
+ packages = find:
+
+where ``PACKAGENAME`` is the name of your package.
+
+.. note::
+
+ If your pip version is older than ``21.3``, you'll also need a ``setup.py`` file:
+
+ .. code-block:: python
+
+ from setuptools import setup
+
+ setup()
+
+You can then install your package in "editable" mode by running from the same directory:
+
+.. code-block:: bash
+
+ pip install -e .
+
+which lets you change your source code (both tests and application) and rerun tests at will.
+
+.. _`test discovery`:
+.. _`Python test discovery`:
+
+Conventions for Python test discovery
+-------------------------------------------------
+
+``pytest`` implements the following standard test discovery:
+
+* If no arguments are specified then collection starts from :confval:`testpaths`
+ (if configured) or the current directory. Alternatively, command line arguments
+ can be used in any combination of directories, file names or node ids.
+* Recurse into directories, unless they match :confval:`norecursedirs`.
+* In those directories, search for ``test_*.py`` or ``*_test.py`` files, imported by their `test package name`_.
+* From those files, collect test items:
+
+ * ``test`` prefixed test functions or methods outside of class
+ * ``test`` prefixed test functions or methods inside ``Test`` prefixed test classes (without an ``__init__`` method)
+
+For examples of how to customize your test discovery :doc:`/example/pythoncollection`.
+
+Within Python modules, ``pytest`` also discovers tests using the standard
+:ref:`unittest.TestCase <unittest.TestCase>` subclassing technique.
+
+
+Choosing a test layout / import rules
+-------------------------------------
+
+``pytest`` supports two common test layouts:
+
+Tests outside application code
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Putting tests into an extra directory outside your actual application code
+might be useful if you have many functional tests or for other reasons want
+to keep tests separate from actual application code (often a good idea):
+
+.. code-block:: text
+
+ pyproject.toml
+ setup.cfg
+ mypkg/
+ __init__.py
+ app.py
+ view.py
+ tests/
+ test_app.py
+ test_view.py
+ ...
+
+This has the following benefits:
+
+* Your tests can run against an installed version after executing ``pip install .``.
+* Your tests can run against the local copy with an editable install after executing ``pip install --editable .``.
+* If you don't use an editable install and are relying on the fact that Python by default puts the current
+ directory in ``sys.path`` to import your package, you can execute ``python -m pytest`` to execute the tests against the
+ local copy directly, without using ``pip``.
+
+.. note::
+
+ See :ref:`pytest vs python -m pytest` for more information about the difference between calling ``pytest`` and
+ ``python -m pytest``.
+
+Note that this scheme has a drawback if you are using ``prepend`` :ref:`import mode <import-modes>`
+(which is the default): your test files must have **unique names**, because
+``pytest`` will import them as *top-level* modules since there are no packages
+to derive a full package name from. In other words, the test files in the example above will
+be imported as ``test_app`` and ``test_view`` top-level modules by adding ``tests/`` to
+``sys.path``.
+
+If you need to have test modules with the same name, you might add ``__init__.py`` files to your
+``tests`` folder and subfolders, changing them to packages:
+
+.. code-block:: text
+
+ pyproject.toml
+ setup.cfg
+ mypkg/
+ ...
+ tests/
+ __init__.py
+ foo/
+ __init__.py
+ test_view.py
+ bar/
+ __init__.py
+ test_view.py
+
+Now pytest will load the modules as ``tests.foo.test_view`` and ``tests.bar.test_view``, allowing
+you to have modules with the same name. But now this introduces a subtle problem: in order to load
+the test modules from the ``tests`` directory, pytest prepends the root of the repository to
+``sys.path``, which adds the side-effect that now ``mypkg`` is also importable.
+
+This is problematic if you are using a tool like `tox`_ to test your package in a virtual environment,
+because you want to test the *installed* version of your package, not the local code from the repository.
+
+.. _`src-layout`:
+
+In this situation, it is **strongly** suggested to use a ``src`` layout where application root package resides in a
+sub-directory of your root:
+
+.. code-block:: text
+
+ pyproject.toml
+ setup.cfg
+ src/
+ mypkg/
+ __init__.py
+ app.py
+ view.py
+ tests/
+ __init__.py
+ foo/
+ __init__.py
+ test_view.py
+ bar/
+ __init__.py
+ test_view.py
+
+
+This layout prevents a lot of common pitfalls and has many benefits, which are better explained in this excellent
+`blog post by Ionel Cristian Mărieș <https://blog.ionelmc.ro/2014/05/25/python-packaging/#the-structure>`_.
+
+.. note::
+ The new ``--import-mode=importlib`` (see :ref:`import-modes`) doesn't have
+ any of the drawbacks above because ``sys.path`` is not changed when importing
+ test modules, so users that run
+ into this issue are strongly encouraged to try it and report if the new option works well for them.
+
+ The ``src`` directory layout is still strongly recommended however.
+
+
+Tests as part of application code
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Inlining test directories into your application package
+is useful if you have direct relation between tests and application modules and
+want to distribute them along with your application:
+
+.. code-block:: text
+
+ pyproject.toml
+ setup.cfg
+ mypkg/
+ __init__.py
+ app.py
+ view.py
+ test/
+ __init__.py
+ test_app.py
+ test_view.py
+ ...
+
+In this scheme, it is easy to run your tests using the ``--pyargs`` option:
+
+.. code-block:: bash
+
+ pytest --pyargs mypkg
+
+``pytest`` will discover where ``mypkg`` is installed and collect tests from there.
+
+Note that this layout also works in conjunction with the ``src`` layout mentioned in the previous section.
+
+
+.. note::
+
+ You can use namespace packages (PEP420) for your application
+ but pytest will still perform `test package name`_ discovery based on the
+ presence of ``__init__.py`` files. If you use one of the
+ two recommended file system layouts above but leave away the ``__init__.py``
+ files from your directories, it should just work. From
+ "inlined tests", however, you will need to use absolute imports for
+ getting at your application code.
+
+.. _`test package name`:
+
+.. note::
+
+ In ``prepend`` and ``append`` import-modes, if pytest finds a ``"a/b/test_module.py"``
+ test file while recursing into the filesystem it determines the import name
+ as follows:
+
+ * determine ``basedir``: this is the first "upward" (towards the root)
+ directory not containing an ``__init__.py``. If e.g. both ``a``
+ and ``b`` contain an ``__init__.py`` file then the parent directory
+ of ``a`` will become the ``basedir``.
+
+ * perform ``sys.path.insert(0, basedir)`` to make the test module
+ importable under the fully qualified import name.
+
+ * ``import a.b.test_module`` where the path is determined
+ by converting path separators ``/`` into "." characters. This means
+ you must follow the convention of having directory and file
+ names map directly to the import names.
+
+ The reason for this somewhat evolved importing technique is
+ that in larger projects multiple test modules might import
+ from each other and thus deriving a canonical import name helps
+ to avoid surprises such as a test module getting imported twice.
+
+ With ``--import-mode=importlib`` things are less convoluted because
+ pytest doesn't need to change ``sys.path`` or ``sys.modules``, making things
+ much less surprising.
+
+
+.. _`buildout`: http://www.buildout.org/en/latest/
+
+.. _`use tox`:
+
+tox
+---
+
+Once you are done with your work and want to make sure that your actual
+package passes all tests you may want to look into :doc:`tox <tox:index>`, the
+virtualenv test automation tool and its :doc:`pytest support <tox:example/pytest>`.
+tox helps you to setup virtualenv environments with pre-defined
+dependencies and then executing a pre-configured test command with
+options. It will run tests against the installed package and not
+against your source code checkout, helping to detect packaging
+glitches.
+
+Do not run via setuptools
+-------------------------
+
+Integration with setuptools is **not recommended**,
+i.e. you should not be using ``python setup.py test`` or ``pytest-runner``,
+and may stop working in the future.
+
+This is deprecated since it depends on deprecated features of setuptools
+and relies on features that break security mechanisms in pip.
+For example 'setup_requires' and 'tests_require' bypass ``pip --require-hashes``.
+For more information and migration instructions,
+see the `pytest-runner notice <https://github.com/pytest-dev/pytest-runner#deprecation-notice>`_.
+See also `pypa/setuptools#1684 <https://github.com/pypa/setuptools/issues/1684>`_.
+
+setuptools intends to
+`remove the test command <https://github.com/pypa/setuptools/issues/931>`_.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/index.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/index.rst
new file mode 100644
index 0000000000..53910f1eb7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/index.rst
@@ -0,0 +1,15 @@
+:orphan:
+
+.. _explanation:
+
+Explanation
+================
+
+.. toctree::
+ :maxdepth: 1
+
+ anatomy
+ fixtures
+ goodpractices
+ flaky
+ pythonpath
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/pythonpath.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/pythonpath.rst
new file mode 100644
index 0000000000..2330356b86
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/explanation/pythonpath.rst
@@ -0,0 +1,133 @@
+.. _pythonpath:
+
+pytest import mechanisms and ``sys.path``/``PYTHONPATH``
+========================================================
+
+.. _`import-modes`:
+
+Import modes
+------------
+
+pytest as a testing framework needs to import test modules and ``conftest.py`` files for execution.
+
+Importing files in Python (at least until recently) is a non-trivial processes, often requiring
+changing :data:`sys.path`. Some aspects of the
+import process can be controlled through the ``--import-mode`` command-line flag, which can assume
+these values:
+
+* ``prepend`` (default): the directory path containing each module will be inserted into the *beginning*
+ of :py:data:`sys.path` if not already there, and then imported with the :func:`__import__ <__import__>` builtin.
+
+ This requires test module names to be unique when the test directory tree is not arranged in
+ packages, because the modules will put in :py:data:`sys.modules` after importing.
+
+ This is the classic mechanism, dating back from the time Python 2 was still supported.
+
+* ``append``: the directory containing each module is appended to the end of :py:data:`sys.path` if not already
+ there, and imported with ``__import__``.
+
+ This better allows to run test modules against installed versions of a package even if the
+ package under test has the same import root. For example:
+
+ ::
+
+ testing/__init__.py
+ testing/test_pkg_under_test.py
+ pkg_under_test/
+
+ the tests will run against the installed version
+ of ``pkg_under_test`` when ``--import-mode=append`` is used whereas
+ with ``prepend`` they would pick up the local version. This kind of confusion is why
+ we advocate for using :ref:`src <src-layout>` layouts.
+
+ Same as ``prepend``, requires test module names to be unique when the test directory tree is
+ not arranged in packages, because the modules will put in :py:data:`sys.modules` after importing.
+
+* ``importlib``: new in pytest-6.0, this mode uses :mod:`importlib` to import test modules. This gives full control over the import process, and doesn't require changing :py:data:`sys.path`.
+
+ For this reason this doesn't require test module names to be unique, but also makes test
+ modules non-importable by each other.
+
+ We intend to make ``importlib`` the default in future releases, depending on feedback.
+
+``prepend`` and ``append`` import modes scenarios
+-------------------------------------------------
+
+Here's a list of scenarios when using ``prepend`` or ``append`` import modes where pytest needs to
+change ``sys.path`` in order to import test modules or ``conftest.py`` files, and the issues users
+might encounter because of that.
+
+Test modules / ``conftest.py`` files inside packages
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Consider this file and directory layout::
+
+ root/
+ |- foo/
+ |- __init__.py
+ |- conftest.py
+ |- bar/
+ |- __init__.py
+ |- tests/
+ |- __init__.py
+ |- test_foo.py
+
+
+When executing:
+
+.. code-block:: bash
+
+ pytest root/
+
+pytest will find ``foo/bar/tests/test_foo.py`` and realize it is part of a package given that
+there's an ``__init__.py`` file in the same folder. It will then search upwards until it can find the
+last folder which still contains an ``__init__.py`` file in order to find the package *root* (in
+this case ``foo/``). To load the module, it will insert ``root/`` to the front of
+``sys.path`` (if not there already) in order to load
+``test_foo.py`` as the *module* ``foo.bar.tests.test_foo``.
+
+The same logic applies to the ``conftest.py`` file: it will be imported as ``foo.conftest`` module.
+
+Preserving the full package name is important when tests live in a package to avoid problems
+and allow test modules to have duplicated names. This is also discussed in details in
+:ref:`test discovery`.
+
+Standalone test modules / ``conftest.py`` files
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Consider this file and directory layout::
+
+ root/
+ |- foo/
+ |- conftest.py
+ |- bar/
+ |- tests/
+ |- test_foo.py
+
+
+When executing:
+
+.. code-block:: bash
+
+ pytest root/
+
+pytest will find ``foo/bar/tests/test_foo.py`` and realize it is NOT part of a package given that
+there's no ``__init__.py`` file in the same folder. It will then add ``root/foo/bar/tests`` to
+``sys.path`` in order to import ``test_foo.py`` as the *module* ``test_foo``. The same is done
+with the ``conftest.py`` file by adding ``root/foo`` to ``sys.path`` to import it as ``conftest``.
+
+For this reason this layout cannot have test modules with the same name, as they all will be
+imported in the global import namespace.
+
+This is also discussed in details in :ref:`test discovery`.
+
+.. _`pytest vs python -m pytest`:
+
+Invoking ``pytest`` versus ``python -m pytest``
+-----------------------------------------------
+
+Running pytest with ``pytest [...]`` instead of ``python -m pytest [...]`` yields nearly
+equivalent behaviour, except that the latter will add the current directory to ``sys.path``, which
+is standard ``python`` behavior.
+
+See also :ref:`invoke-python`.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/funcarg_compare.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/funcarg_compare.rst
new file mode 100644
index 0000000000..3bf4527cfb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/funcarg_compare.rst
@@ -0,0 +1,230 @@
+:orphan:
+
+.. _`funcargcompare`:
+
+pytest-2.3: reasoning for fixture/funcarg evolution
+=============================================================
+
+**Target audience**: Reading this document requires basic knowledge of
+python testing, xUnit setup methods and the (previous) basic pytest
+funcarg mechanism, see :ref:`historical funcargs and pytest.funcargs`.
+If you are new to pytest, then you can simply ignore this
+section and read the other sections.
+
+.. currentmodule:: _pytest
+
+Shortcomings of the previous ``pytest_funcarg__`` mechanism
+--------------------------------------------------------------
+
+The pre pytest-2.3 funcarg mechanism calls a factory each time a
+funcarg for a test function is required. If a factory wants to
+re-use a resource across different scopes, it often used
+the ``request.cached_setup()`` helper to manage caching of
+resources. Here is a basic example how we could implement
+a per-session Database object:
+
+.. code-block:: python
+
+ # content of conftest.py
+ class Database:
+ def __init__(self):
+ print("database instance created")
+
+ def destroy(self):
+ print("database instance destroyed")
+
+
+ def pytest_funcarg__db(request):
+ return request.cached_setup(
+ setup=DataBase, teardown=lambda db: db.destroy, scope="session"
+ )
+
+There are several limitations and difficulties with this approach:
+
+1. Scoping funcarg resource creation is not straight forward, instead one must
+ understand the intricate cached_setup() method mechanics.
+
+2. parametrizing the "db" resource is not straight forward:
+ you need to apply a "parametrize" decorator or implement a
+ :py:func:`~hookspec.pytest_generate_tests` hook
+ calling :py:func:`~pytest.Metafunc.parametrize` which
+ performs parametrization at the places where the resource
+ is used. Moreover, you need to modify the factory to use an
+ ``extrakey`` parameter containing ``request.param`` to the
+ ``Request.cached_setup`` call.
+
+3. Multiple parametrized session-scoped resources will be active
+ at the same time, making it hard for them to affect global state
+ of the application under test.
+
+4. there is no way how you can make use of funcarg factories
+ in xUnit setup methods.
+
+5. A non-parametrized fixture function cannot use a parametrized
+ funcarg resource if it isn't stated in the test function signature.
+
+All of these limitations are addressed with pytest-2.3 and its
+improved :ref:`fixture mechanism <fixture>`.
+
+
+Direct scoping of fixture/funcarg factories
+--------------------------------------------------------
+
+Instead of calling cached_setup() with a cache scope, you can use the
+:ref:`@pytest.fixture <pytest.fixture>` decorator and directly state
+the scope:
+
+.. code-block:: python
+
+ @pytest.fixture(scope="session")
+ def db(request):
+ # factory will only be invoked once per session -
+ db = DataBase()
+ request.addfinalizer(db.destroy) # destroy when session is finished
+ return db
+
+This factory implementation does not need to call ``cached_setup()`` anymore
+because it will only be invoked once per session. Moreover, the
+``request.addfinalizer()`` registers a finalizer according to the specified
+resource scope on which the factory function is operating.
+
+
+Direct parametrization of funcarg resource factories
+----------------------------------------------------------
+
+Previously, funcarg factories could not directly cause parametrization.
+You needed to specify a ``@parametrize`` decorator on your test function
+or implement a ``pytest_generate_tests`` hook to perform
+parametrization, i.e. calling a test multiple times with different value
+sets. pytest-2.3 introduces a decorator for use on the factory itself:
+
+.. code-block:: python
+
+ @pytest.fixture(params=["mysql", "pg"])
+ def db(request):
+ ... # use request.param
+
+Here the factory will be invoked twice (with the respective "mysql"
+and "pg" values set as ``request.param`` attributes) and all of
+the tests requiring "db" will run twice as well. The "mysql" and
+"pg" values will also be used for reporting the test-invocation variants.
+
+This new way of parametrizing funcarg factories should in many cases
+allow to re-use already written factories because effectively
+``request.param`` was already used when test functions/classes were
+parametrized via
+:py:func:`metafunc.parametrize(indirect=True) <pytest.Metafunc.parametrize>` calls.
+
+Of course it's perfectly fine to combine parametrization and scoping:
+
+.. code-block:: python
+
+ @pytest.fixture(scope="session", params=["mysql", "pg"])
+ def db(request):
+ if request.param == "mysql":
+ db = MySQL()
+ elif request.param == "pg":
+ db = PG()
+ request.addfinalizer(db.destroy) # destroy when session is finished
+ return db
+
+This would execute all tests requiring the per-session "db" resource twice,
+receiving the values created by the two respective invocations to the
+factory function.
+
+
+No ``pytest_funcarg__`` prefix when using @fixture decorator
+-------------------------------------------------------------------
+
+When using the ``@fixture`` decorator the name of the function
+denotes the name under which the resource can be accessed as a function
+argument:
+
+.. code-block:: python
+
+ @pytest.fixture()
+ def db(request):
+ ...
+
+The name under which the funcarg resource can be requested is ``db``.
+
+You can still use the "old" non-decorator way of specifying funcarg factories
+aka:
+
+.. code-block:: python
+
+ def pytest_funcarg__db(request):
+ ...
+
+
+But it is then not possible to define scoping and parametrization.
+It is thus recommended to use the factory decorator.
+
+
+solving per-session setup / autouse fixtures
+--------------------------------------------------------------
+
+pytest for a long time offered a pytest_configure and a pytest_sessionstart
+hook which are often used to setup global resources. This suffers from
+several problems:
+
+1. in distributed testing the managing process would setup test resources
+ that are never needed because it only co-ordinates the test run
+ activities of the worker processes.
+
+2. if you only perform a collection (with "--collect-only")
+ resource-setup will still be executed.
+
+3. If a pytest_sessionstart is contained in some subdirectories
+ conftest.py file, it will not be called. This stems from the
+ fact that this hook is actually used for reporting, in particular
+ the test-header with platform/custom information.
+
+Moreover, it was not easy to define a scoped setup from plugins or
+conftest files other than to implement a ``pytest_runtest_setup()`` hook
+and caring for scoping/caching yourself. And it's virtually impossible
+to do this with parametrization as ``pytest_runtest_setup()`` is called
+during test execution and parametrization happens at collection time.
+
+It follows that pytest_configure/session/runtest_setup are often not
+appropriate for implementing common fixture needs. Therefore,
+pytest-2.3 introduces :ref:`autouse fixtures` which fully
+integrate with the generic :ref:`fixture mechanism <fixture>`
+and obsolete many prior uses of pytest hooks.
+
+funcargs/fixture discovery now happens at collection time
+---------------------------------------------------------------------
+
+Since pytest-2.3, discovery of fixture/funcarg factories are taken care of
+at collection time. This is more efficient especially for large test suites.
+Moreover, a call to "pytest --collect-only" should be able to in the future
+show a lot of setup-information and thus presents a nice method to get an
+overview of fixture management in your project.
+
+.. _`compatibility notes`:
+
+.. _`funcargscompat`:
+
+Conclusion and compatibility notes
+---------------------------------------------------------
+
+**funcargs** were originally introduced to pytest-2.0. In pytest-2.3
+the mechanism was extended and refined and is now described as
+fixtures:
+
+* previously funcarg factories were specified with a special
+ ``pytest_funcarg__NAME`` prefix instead of using the
+ ``@pytest.fixture`` decorator.
+
+* Factories received a ``request`` object which managed caching through
+ ``request.cached_setup()`` calls and allowed using other funcargs via
+ ``request.getfuncargvalue()`` calls. These intricate APIs made it hard
+ to do proper parametrization and implement resource caching. The
+ new :py:func:`pytest.fixture` decorator allows to declare the scope
+ and let pytest figure things out for you.
+
+* if you used parametrization and funcarg factories which made use of
+ ``request.cached_setup()`` it is recommended to invest a few minutes
+ and simplify your fixture function code to use the :ref:`@pytest.fixture`
+ decorator instead. This will also allow to take advantage of
+ the automatic per-resource grouping of tests.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/funcargs.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/funcargs.rst
new file mode 100644
index 0000000000..4173675cdd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/funcargs.rst
@@ -0,0 +1,13 @@
+
+=======================================================
+funcargs: resource injection and parametrization
+=======================================================
+
+pytest-2.3 introduces major refinements to fixture management
+of which the funcarg mechanism introduced with pytest-2.0 remains
+a core part. The documentation has been refactored as well
+and you can read on here:
+
+- :ref:`fixtures`
+- :ref:`parametrize`
+- :ref:`funcargcompare`
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/getting-started.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/getting-started.rst
new file mode 100644
index 0000000000..5d13a76806
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/getting-started.rst
@@ -0,0 +1,257 @@
+.. _get-started:
+
+Get Started
+===================================
+
+.. _`getstarted`:
+.. _`installation`:
+
+Install ``pytest``
+----------------------------------------
+
+``pytest`` requires: Python 3.6, 3.7, 3.8, 3.9, or PyPy3.
+
+1. Run the following command in your command line:
+
+.. code-block:: bash
+
+ pip install -U pytest
+
+2. Check that you installed the correct version:
+
+.. code-block:: bash
+
+ $ pytest --version
+ pytest 7.0.1
+
+.. _`simpletest`:
+
+Create your first test
+----------------------------------------------------------
+
+Create a new file called ``test_sample.py``, containing a function, and a test:
+
+.. code-block:: python
+
+ # content of test_sample.py
+ def func(x):
+ return x + 1
+
+
+ def test_answer():
+ assert func(3) == 5
+
+The test
+
+.. code-block:: pytest
+
+ $ pytest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 1 item
+
+ test_sample.py F [100%]
+
+ ================================= FAILURES =================================
+ _______________________________ test_answer ________________________________
+
+ def test_answer():
+ > assert func(3) == 5
+ E assert 4 == 5
+ E + where 4 = func(3)
+
+ test_sample.py:6: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_sample.py::test_answer - assert 4 == 5
+ ============================ 1 failed in 0.12s =============================
+
+The ``[100%]`` refers to the overall progress of running all test cases. After it finishes, pytest then shows a failure report because ``func(3)`` does not return ``5``.
+
+.. note::
+
+ You can use the ``assert`` statement to verify test expectations. pytest’s :ref:`Advanced assertion introspection <python:assert>` will intelligently report intermediate values of the assert expression so you can avoid the many names :ref:`of JUnit legacy methods <testcase-objects>`.
+
+Run multiple tests
+----------------------------------------------------------
+
+``pytest`` will run all files of the form test_*.py or \*_test.py in the current directory and its subdirectories. More generally, it follows :ref:`standard test discovery rules <test discovery>`.
+
+
+Assert that a certain exception is raised
+--------------------------------------------------------------
+
+Use the :ref:`raises <assertraises>` helper to assert that some code raises an exception:
+
+.. code-block:: python
+
+ # content of test_sysexit.py
+ import pytest
+
+
+ def f():
+ raise SystemExit(1)
+
+
+ def test_mytest():
+ with pytest.raises(SystemExit):
+ f()
+
+Execute the test function with “quiet†reporting mode:
+
+.. code-block:: pytest
+
+ $ pytest -q test_sysexit.py
+ . [100%]
+ 1 passed in 0.12s
+
+.. note::
+
+ The ``-q/--quiet`` flag keeps the output brief in this and following examples.
+
+Group multiple tests in a class
+--------------------------------------------------------------
+
+.. regendoc:wipe
+
+Once you develop multiple tests, you may want to group them into a class. pytest makes it easy to create a class containing more than one test:
+
+.. code-block:: python
+
+ # content of test_class.py
+ class TestClass:
+ def test_one(self):
+ x = "this"
+ assert "h" in x
+
+ def test_two(self):
+ x = "hello"
+ assert hasattr(x, "check")
+
+``pytest`` discovers all tests following its :ref:`Conventions for Python test discovery <test discovery>`, so it finds both ``test_`` prefixed functions. There is no need to subclass anything, but make sure to prefix your class with ``Test`` otherwise the class will be skipped. We can simply run the module by passing its filename:
+
+.. code-block:: pytest
+
+ $ pytest -q test_class.py
+ .F [100%]
+ ================================= FAILURES =================================
+ ____________________________ TestClass.test_two ____________________________
+
+ self = <test_class.TestClass object at 0xdeadbeef0001>
+
+ def test_two(self):
+ x = "hello"
+ > assert hasattr(x, "check")
+ E AssertionError: assert False
+ E + where False = hasattr('hello', 'check')
+
+ test_class.py:8: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_class.py::TestClass::test_two - AssertionError: assert False
+ 1 failed, 1 passed in 0.12s
+
+The first test passed and the second failed. You can easily see the intermediate values in the assertion to help you understand the reason for the failure.
+
+Grouping tests in classes can be beneficial for the following reasons:
+
+ * Test organization
+ * Sharing fixtures for tests only in that particular class
+ * Applying marks at the class level and having them implicitly apply to all tests
+
+Something to be aware of when grouping tests inside classes is that each test has a unique instance of the class.
+Having each test share the same class instance would be very detrimental to test isolation and would promote poor test practices.
+This is outlined below:
+
+.. regendoc:wipe
+
+.. code-block:: python
+
+ # content of test_class_demo.py
+ class TestClassDemoInstance:
+ value = 0
+
+ def test_one(self):
+ self.value = 1
+ assert self.value == 1
+
+ def test_two(self):
+ assert self.value == 1
+
+
+.. code-block:: pytest
+
+ $ pytest -k TestClassDemoInstance -q
+ .F [100%]
+ ================================= FAILURES =================================
+ ______________________ TestClassDemoInstance.test_two ______________________
+
+ self = <test_class_demo.TestClassDemoInstance object at 0xdeadbeef0002>
+
+ def test_two(self):
+ > assert self.value == 1
+ E assert 0 == 1
+ E + where 0 = <test_class_demo.TestClassDemoInstance object at 0xdeadbeef0002>.value
+
+ test_class_demo.py:9: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_class_demo.py::TestClassDemoInstance::test_two - assert 0 == 1
+ 1 failed, 1 passed in 0.12s
+
+Note that attributes added at class level are *class attributes*, so they will be shared between tests.
+
+Request a unique temporary directory for functional tests
+--------------------------------------------------------------
+
+``pytest`` provides :std:doc:`Builtin fixtures/function arguments <builtin>` to request arbitrary resources, like a unique temporary directory:
+
+.. code-block:: python
+
+ # content of test_tmp_path.py
+ def test_needsfiles(tmp_path):
+ print(tmp_path)
+ assert 0
+
+List the name ``tmp_path`` in the test function signature and ``pytest`` will lookup and call a fixture factory to create the resource before performing the test function call. Before the test runs, ``pytest`` creates a unique-per-test-invocation temporary directory:
+
+.. code-block:: pytest
+
+ $ pytest -q test_tmp_path.py
+ F [100%]
+ ================================= FAILURES =================================
+ _____________________________ test_needsfiles ______________________________
+
+ tmp_path = PosixPath('PYTEST_TMPDIR/test_needsfiles0')
+
+ def test_needsfiles(tmp_path):
+ print(tmp_path)
+ > assert 0
+ E assert 0
+
+ test_tmp_path.py:3: AssertionError
+ --------------------------- Captured stdout call ---------------------------
+ PYTEST_TMPDIR/test_needsfiles0
+ ========================= short test summary info ==========================
+ FAILED test_tmp_path.py::test_needsfiles - assert 0
+ 1 failed in 0.12s
+
+More info on temporary directory handling is available at :ref:`Temporary directories and files <tmp_path handling>`.
+
+Find out what kind of builtin :ref:`pytest fixtures <fixtures>` exist with the command:
+
+.. code-block:: bash
+
+ pytest --fixtures # shows builtin and custom fixtures
+
+Note that this command omits fixtures with leading ``_`` unless the ``-v`` option is added.
+
+Continue reading
+-------------------------------------
+
+Check out additional pytest resources to help you customize tests for your unique workflow:
+
+* ":ref:`usage`" for command line invocation examples
+* ":ref:`existingtestsuite`" for working with pre-existing tests
+* ":ref:`mark`" for information on the ``pytest.mark`` mechanism
+* ":ref:`fixtures`" for providing a functional baseline to your tests
+* ":ref:`plugins`" for managing and writing plugins
+* ":ref:`goodpractices`" for virtualenv and test layouts
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/historical-notes.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/historical-notes.rst
new file mode 100644
index 0000000000..29ebbd5d19
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/historical-notes.rst
@@ -0,0 +1,312 @@
+Historical Notes
+================
+
+This page lists features or behavior from previous versions of pytest which have changed over the years. They are
+kept here as a historical note so users looking at old code can find documentation related to them.
+
+
+.. _marker-revamp:
+
+Marker revamp and iteration
+---------------------------
+
+.. versionchanged:: 3.6
+
+pytest's marker implementation traditionally worked by simply updating the ``__dict__`` attribute of functions to cumulatively add markers. As a result, markers would unintentionally be passed along class hierarchies in surprising ways. Further, the API for retrieving them was inconsistent, as markers from parameterization would be stored differently than markers applied using the ``@pytest.mark`` decorator and markers added via ``node.add_marker``.
+
+This state of things made it technically next to impossible to use data from markers correctly without having a deep understanding of the internals, leading to subtle and hard to understand bugs in more advanced usages.
+
+Depending on how a marker got declared/changed one would get either a ``MarkerInfo`` which might contain markers from sibling classes,
+``MarkDecorators`` when marks came from parameterization or from a ``node.add_marker`` call, discarding prior marks. Also ``MarkerInfo`` acts like a single mark, when it in fact represents a merged view on multiple marks with the same name.
+
+On top of that markers were not accessible in the same way for modules, classes, and functions/methods.
+In fact, markers were only accessible in functions, even if they were declared on classes/modules.
+
+A new API to access markers has been introduced in pytest 3.6 in order to solve the problems with
+the initial design, providing the :func:`_pytest.nodes.Node.iter_markers` method to iterate over
+markers in a consistent manner and reworking the internals, which solved a great deal of problems
+with the initial design.
+
+
+.. _update marker code:
+
+Updating code
+~~~~~~~~~~~~~
+
+The old ``Node.get_marker(name)`` function is considered deprecated because it returns an internal ``MarkerInfo`` object
+which contains the merged name, ``*args`` and ``**kwargs`` of all the markers which apply to that node.
+
+In general there are two scenarios on how markers should be handled:
+
+1. Marks overwrite each other. Order matters but you only want to think of your mark as a single item. E.g.
+``log_level('info')`` at a module level can be overwritten by ``log_level('debug')`` for a specific test.
+
+ In this case, use ``Node.get_closest_marker(name)``:
+
+ .. code-block:: python
+
+ # replace this:
+ marker = item.get_marker("log_level")
+ if marker:
+ level = marker.args[0]
+
+ # by this:
+ marker = item.get_closest_marker("log_level")
+ if marker:
+ level = marker.args[0]
+
+2. Marks compose in an additive manner. E.g. ``skipif(condition)`` marks mean you just want to evaluate all of them,
+order doesn't even matter. You probably want to think of your marks as a set here.
+
+ In this case iterate over each mark and handle their ``*args`` and ``**kwargs`` individually.
+
+ .. code-block:: python
+
+ # replace this
+ skipif = item.get_marker("skipif")
+ if skipif:
+ for condition in skipif.args:
+ # eval condition
+ ...
+
+ # by this:
+ for skipif in item.iter_markers("skipif"):
+ condition = skipif.args[0]
+ # eval condition
+
+
+If you are unsure or have any questions, please consider opening
+:issue:`an issue <new>`.
+
+Related issues
+~~~~~~~~~~~~~~
+
+Here is a non-exhaustive list of issues fixed by the new implementation:
+
+* Marks don't pick up nested classes (:issue:`199`).
+
+* Markers stain on all related classes (:issue:`568`).
+
+* Combining marks - args and kwargs calculation (:issue:`2897`).
+
+* ``request.node.get_marker('name')`` returns ``None`` for markers applied in classes (:issue:`902`).
+
+* Marks applied in parametrize are stored as markdecorator (:issue:`2400`).
+
+* Fix marker interaction in a backward incompatible way (:issue:`1670`).
+
+* Refactor marks to get rid of the current "marks transfer" mechanism (:issue:`2363`).
+
+* Introduce FunctionDefinition node, use it in generate_tests (:issue:`2522`).
+
+* Remove named marker attributes and collect markers in items (:issue:`891`).
+
+* skipif mark from parametrize hides module level skipif mark (:issue:`1540`).
+
+* skipif + parametrize not skipping tests (:issue:`1296`).
+
+* Marker transfer incompatible with inheritance (:issue:`535`).
+
+More details can be found in the :pull:`original PR <3317>`.
+
+.. note::
+
+ in a future major release of pytest we will introduce class based markers,
+ at which point markers will no longer be limited to instances of :py:class:`~_pytest.mark.Mark`.
+
+
+cache plugin integrated into the core
+-------------------------------------
+
+
+
+The functionality of the :ref:`core cache <cache>` plugin was previously distributed
+as a third party plugin named ``pytest-cache``. The core plugin
+is compatible regarding command line options and API usage except that you
+can only store/receive data between test runs that is json-serializable.
+
+.. _historical funcargs and pytest.funcargs:
+
+funcargs and ``pytest_funcarg__``
+---------------------------------
+
+
+
+In versions prior to 2.3 there was no ``@pytest.fixture`` marker
+and you had to use a magic ``pytest_funcarg__NAME`` prefix
+for the fixture factory. This remains and will remain supported
+but is not anymore advertised as the primary means of declaring fixture
+functions.
+
+
+``@pytest.yield_fixture`` decorator
+-----------------------------------
+
+
+
+Prior to version 2.10, in order to use a ``yield`` statement to execute teardown code one
+had to mark a fixture using the ``yield_fixture`` marker. From 2.10 onward, normal
+fixtures can use ``yield`` directly so the ``yield_fixture`` decorator is no longer needed
+and considered deprecated.
+
+
+``[pytest]`` header in ``setup.cfg``
+------------------------------------
+
+
+
+Prior to 3.0, the supported section name was ``[pytest]``. Due to how
+this may collide with some distutils commands, the recommended
+section name for ``setup.cfg`` files is now ``[tool:pytest]``.
+
+Note that for ``pytest.ini`` and ``tox.ini`` files the section
+name is ``[pytest]``.
+
+
+Applying marks to ``@pytest.mark.parametrize`` parameters
+---------------------------------------------------------
+
+
+
+Prior to version 3.1 the supported mechanism for marking values
+used the syntax:
+
+.. code-block:: python
+
+ import pytest
+
+
+ @pytest.mark.parametrize(
+ "test_input,expected", [("3+5", 8), ("2+4", 6), pytest.mark.xfail(("6*9", 42))]
+ )
+ def test_eval(test_input, expected):
+ assert eval(test_input) == expected
+
+
+This was an initial hack to support the feature but soon was demonstrated to be incomplete,
+broken for passing functions or applying multiple marks with the same name but different parameters.
+
+The old syntax is planned to be removed in pytest-4.0.
+
+
+``@pytest.mark.parametrize`` argument names as a tuple
+------------------------------------------------------
+
+
+
+In versions prior to 2.4 one needed to specify the argument
+names as a tuple. This remains valid but the simpler ``"name1,name2,..."``
+comma-separated-string syntax is now advertised first because
+it's easier to write and produces less line noise.
+
+
+setup: is now an "autouse fixture"
+----------------------------------
+
+
+
+During development prior to the pytest-2.3 release the name
+``pytest.setup`` was used but before the release it was renamed
+and moved to become part of the general fixture mechanism,
+namely :ref:`autouse fixtures`
+
+
+.. _string conditions:
+
+Conditions as strings instead of booleans
+-----------------------------------------
+
+
+
+Prior to pytest-2.4 the only way to specify skipif/xfail conditions was
+to use strings:
+
+.. code-block:: python
+
+ import sys
+
+
+ @pytest.mark.skipif("sys.version_info >= (3,3)")
+ def test_function():
+ ...
+
+During test function setup the skipif condition is evaluated by calling
+``eval('sys.version_info >= (3,0)', namespace)``. The namespace contains
+all the module globals, and ``os`` and ``sys`` as a minimum.
+
+Since pytest-2.4 :ref:`boolean conditions <condition booleans>` are considered preferable
+because markers can then be freely imported between test modules.
+With strings you need to import not only the marker but all variables
+used by the marker, which violates encapsulation.
+
+The reason for specifying the condition as a string was that ``pytest`` can
+report a summary of skip conditions based purely on the condition string.
+With conditions as booleans you are required to specify a ``reason`` string.
+
+Note that string conditions will remain fully supported and you are free
+to use them if you have no need for cross-importing markers.
+
+The evaluation of a condition string in ``pytest.mark.skipif(conditionstring)``
+or ``pytest.mark.xfail(conditionstring)`` takes place in a namespace
+dictionary which is constructed as follows:
+
+* the namespace is initialized by putting the ``sys`` and ``os`` modules
+ and the pytest ``config`` object into it.
+
+* updated with the module globals of the test function for which the
+ expression is applied.
+
+The pytest ``config`` object allows you to skip based on a test
+configuration value which you might have added:
+
+.. code-block:: python
+
+ @pytest.mark.skipif("not config.getvalue('db')")
+ def test_function():
+ ...
+
+The equivalent with "boolean conditions" is:
+
+.. code-block:: python
+
+ @pytest.mark.skipif(not pytest.config.getvalue("db"), reason="--db was not specified")
+ def test_function():
+ pass
+
+.. note::
+
+ You cannot use ``pytest.config.getvalue()`` in code
+ imported before pytest's argument parsing takes place. For example,
+ ``conftest.py`` files are imported before command line parsing and thus
+ ``config.getvalue()`` will not execute correctly.
+
+``pytest.set_trace()``
+----------------------
+
+
+
+Previous to version 2.4 to set a break point in code one needed to use ``pytest.set_trace()``:
+
+.. code-block:: python
+
+ import pytest
+
+
+ def test_function():
+ ...
+ pytest.set_trace() # invoke PDB debugger and tracing
+
+
+This is no longer needed and one can use the native ``import pdb;pdb.set_trace()`` call directly.
+
+For more details see :ref:`breakpoints`.
+
+"compat" properties
+-------------------
+
+
+
+Access of ``Module``, ``Function``, ``Class``, ``Instance``, ``File`` and ``Item`` through ``Node`` instances have long
+been documented as deprecated, but started to emit warnings from pytest ``3.9`` and onward.
+
+Users should just ``import pytest`` and access those objects using the ``pytest`` module.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/history.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/history.rst
new file mode 100644
index 0000000000..bb5aa49302
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/history.rst
@@ -0,0 +1,145 @@
+History
+=======
+
+pytest has a long and interesting history. The `first commit
+<https://github.com/pytest-dev/pytest/commit/5992a8ef21424d7571305a8d7e2a3431ee7e1e23>`__
+in this repository is from January 2007, and even that commit alone already
+tells a lot: The repository originally was from the :pypi:`py`
+library (later split off to pytest), and it
+originally was a SVN revision, migrated to Mercurial, and finally migrated to
+git.
+
+However, the commit says “create the new development trunk†and is
+already quite big: *435 files changed, 58640 insertions(+)*. This is because
+pytest originally was born as part of `PyPy <https://www.pypy.org/>`__, to make
+it easier to write tests for it. Here's how it evolved from there to its own
+project:
+
+
+- Late 2002 / early 2003, `PyPy was
+ born <https://morepypy.blogspot.com/2018/09/the-first-15-years-of-pypy.html>`__.
+- Like that blog post mentioned, from very early on, there was a big
+ focus on testing. There were various ``testsupport`` files on top of
+ unittest.py, and as early as June 2003, Holger Krekel (:user:`hpk42`)
+ `refactored <https://mail.python.org/pipermail/pypy-dev/2003-June/000787.html>`__
+ its test framework to clean things up (``pypy.tool.test``, but still
+ on top of ``unittest.py``, with nothing pytest-like yet).
+- In December 2003, there was `another
+ iteration <https://foss.heptapod.net/pypy/pypy/-/commit/02752373e1b29d89c6bb0a97e5f940caa22bdd63>`__
+ at improving their testing situation, by Stefan Schwarzer, called
+ ``pypy.tool.newtest``.
+- However, it didn’t seem to be around for long, as around June/July
+ 2004, efforts started on a thing called ``utest``, offering plain
+ assertions. This seems like the start of something pytest-like, but
+ unfortunately, it's unclear where the test runner's code was at the time.
+ The closest thing still around is `this
+ file <https://foss.heptapod.net/pypy/pypy/-/commit/0735f9ed287ec20950a7dd0a16fc10810d4f6847>`__,
+ but that doesn’t seem like a complete test runner at all. What can be seen
+ is that there were `various
+ efforts <https://foss.heptapod.net/pypy/pypy/-/commits/branch/default?utf8=%E2%9C%93&search=utest>`__
+ by Laura Creighton and Samuele Pedroni (:user:`pedronis`) at automatically
+ converting existing tests to the new ``utest`` framework.
+- Around the same time, for Europython 2004, @hpk42 `started a
+ project <http://web.archive.org/web/20041020215353/http://codespeak.net/svn/user/hpk/talks/std-talk.txt>`__
+ originally called “stdâ€, intended to be a “complementary standard
+ library†- already laying out the principles behind what later became
+ pytest:
+
+ - current “batteries included†are very useful, but
+
+ - some of them are written in a pretty much java-like style,
+ especially the unittest-framework
+ - […]
+ - the best API is one that doesn’t exist
+
+ […]
+
+ - a testing package should require as few boilerplate code as
+ possible and offer much flexibility
+ - it should provide premium quality tracebacks and debugging aid
+
+ […]
+
+ - first of all … forget about limited “assertXYZ APIs†and use the
+ real thing, e.g.::
+
+ assert x == y
+
+ - this works with plain python but you get unhelpful “assertion
+ failed†errors with no information
+
+ - std.utest (magic!) actually reinterprets the assertion expression
+ and offers detailed information about underlying values
+
+- In September 2004, the ``py-dev`` mailinglist gets born, which `is
+ now <https://mail.python.org/pipermail/pytest-dev/>`__ ``pytest-dev``,
+ but thankfully with all the original archives still intact.
+
+- Around September/October 2004, the ``std`` project `was renamed
+ <https://mail.python.org/pipermail/pypy-dev/2004-September/001565.html>`__ to
+ ``py`` and ``std.utest`` became ``py.test``. This is also the first time the
+ `entire source
+ code <https://foss.heptapod.net/pypy/pypy/-/commit/42cf50c412026028e20acd23d518bd92e623ac11>`__,
+ seems to be available, with much of the API still being around today:
+
+ - ``py.path.local``, which is being phased out of pytest (in favour of
+ pathlib) some 16-17 years later
+ - The idea of the collection tree, including ``Collector``,
+ ``FSCollector``, ``Directory``, ``PyCollector``, ``Module``,
+ ``Class``
+ - Arguments like ``-x`` / ``--exitfirst``, ``-l`` /
+ ``--showlocals``, ``--fulltrace``, ``--pdb``, ``-S`` /
+ ``--nocapture`` (``-s`` / ``--capture=off`` today),
+ ``--collectonly`` (``--collect-only`` today)
+
+- In the same month, the ``py`` library `gets split off
+ <https://foss.heptapod.net/pypy/pypy/-/commit/6bdafe9203ad92eb259270b267189141c53bce33>`__
+ from ``PyPy``
+
+- It seemed to get rather quiet for a while, and little seemed to happen
+ between October 2004 (removing ``py`` from PyPy) and January
+ 2007 (first commit in the now-pytest repository). However, there were
+ various discussions about features/ideas on the mailinglist, and
+ :pypi:`a couple of releases <py/0.8.0-alpha2/#history>` every
+ couple of months:
+
+ - March 2006: py 0.8.0-alpha2
+ - May 2007: py 0.9.0
+ - March 2008: py 0.9.1 (first release to be found `in the pytest
+ changelog <https://github.com/pytest-dev/pytest/blob/main/doc/en/changelog.rst#091>`__!)
+ - August 2008: py 0.9.2
+
+- In August 2009, py 1.0.0 was released, `introducing a lot of
+ fundamental
+ features <https://holgerkrekel.net/2009/08/04/pylib-1-0-0-released-the-testing-with-python-innovations-continue/>`__:
+
+ - funcargs/fixtures
+ - A `plugin
+ architecture <http://web.archive.org/web/20090629032718/https://codespeak.net/py/dist/test/extend.html>`__
+ which still looks very much the same today!
+ - Various `default
+ plugins <http://web.archive.org/web/20091005181132/https://codespeak.net/py/dist/test/plugin/index.html>`__,
+ including
+ `monkeypatch <http://web.archive.org/web/20091012022829/http://codespeak.net/py/dist/test/plugin/how-to/monkeypatch.html>`__
+
+- Even back there, the
+ `FAQ <http://web.archive.org/web/20091005222413/http://codespeak.net/py/dist/faq.html>`__
+ said:
+
+ Clearly, [a second standard library] was ambitious and the naming has
+ maybe haunted the project rather than helping it. There may be a
+ project name change and possibly a split up into different projects
+ sometime.
+
+ and that finally happened in November 2010, when pytest 2.0.0 `was
+ released <https://mail.python.org/pipermail/pytest-dev/2010-November/001687.html>`__
+ as a package separate from ``py`` (but still called ``py.test``).
+
+- In August 2016, pytest 3.0.0 :std:ref:`was released <release-3.0.0>`,
+ which adds ``pytest`` (rather than ``py.test``) as the recommended
+ command-line entry point
+
+Due to this history, it's difficult to answer the question when pytest was started.
+It depends what point should really be seen as the start of it all. One
+possible interpretation is to pick Europython 2004, i.e. around June/July
+2004.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/assert.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/assert.rst
new file mode 100644
index 0000000000..cb70db6b8e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/assert.rst
@@ -0,0 +1,336 @@
+.. _`assert`:
+
+How to write and report assertions in tests
+==================================================
+
+.. _`assert with the assert statement`:
+
+Asserting with the ``assert`` statement
+---------------------------------------------------------
+
+``pytest`` allows you to use the standard Python ``assert`` for verifying
+expectations and values in Python tests. For example, you can write the
+following:
+
+.. code-block:: python
+
+ # content of test_assert1.py
+ def f():
+ return 3
+
+
+ def test_function():
+ assert f() == 4
+
+to assert that your function returns a certain value. If this assertion fails
+you will see the return value of the function call:
+
+.. code-block:: pytest
+
+ $ pytest test_assert1.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 1 item
+
+ test_assert1.py F [100%]
+
+ ================================= FAILURES =================================
+ ______________________________ test_function _______________________________
+
+ def test_function():
+ > assert f() == 4
+ E assert 3 == 4
+ E + where 3 = f()
+
+ test_assert1.py:6: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_assert1.py::test_function - assert 3 == 4
+ ============================ 1 failed in 0.12s =============================
+
+``pytest`` has support for showing the values of the most common subexpressions
+including calls, attributes, comparisons, and binary and unary
+operators. (See :ref:`tbreportdemo`). This allows you to use the
+idiomatic python constructs without boilerplate code while not losing
+introspection information.
+
+However, if you specify a message with the assertion like this:
+
+.. code-block:: python
+
+ assert a % 2 == 0, "value was odd, should be even"
+
+then no assertion introspection takes places at all and the message
+will be simply shown in the traceback.
+
+See :ref:`assert-details` for more information on assertion introspection.
+
+.. _`assertraises`:
+
+Assertions about expected exceptions
+------------------------------------------
+
+In order to write assertions about raised exceptions, you can use
+:func:`pytest.raises` as a context manager like this:
+
+.. code-block:: python
+
+ import pytest
+
+
+ def test_zero_division():
+ with pytest.raises(ZeroDivisionError):
+ 1 / 0
+
+and if you need to have access to the actual exception info you may use:
+
+.. code-block:: python
+
+ def test_recursion_depth():
+ with pytest.raises(RuntimeError) as excinfo:
+
+ def f():
+ f()
+
+ f()
+ assert "maximum recursion" in str(excinfo.value)
+
+``excinfo`` is an :class:`~pytest.ExceptionInfo` instance, which is a wrapper around
+the actual exception raised. The main attributes of interest are
+``.type``, ``.value`` and ``.traceback``.
+
+You can pass a ``match`` keyword parameter to the context-manager to test
+that a regular expression matches on the string representation of an exception
+(similar to the ``TestCase.assertRaisesRegex`` method from ``unittest``):
+
+.. code-block:: python
+
+ import pytest
+
+
+ def myfunc():
+ raise ValueError("Exception 123 raised")
+
+
+ def test_match():
+ with pytest.raises(ValueError, match=r".* 123 .*"):
+ myfunc()
+
+The regexp parameter of the ``match`` method is matched with the ``re.search``
+function, so in the above example ``match='123'`` would have worked as
+well.
+
+There's an alternate form of the :func:`pytest.raises` function where you pass
+a function that will be executed with the given ``*args`` and ``**kwargs`` and
+assert that the given exception is raised:
+
+.. code-block:: python
+
+ pytest.raises(ExpectedException, func, *args, **kwargs)
+
+The reporter will provide you with helpful output in case of failures such as *no
+exception* or *wrong exception*.
+
+Note that it is also possible to specify a "raises" argument to
+``pytest.mark.xfail``, which checks that the test is failing in a more
+specific way than just having any exception raised:
+
+.. code-block:: python
+
+ @pytest.mark.xfail(raises=IndexError)
+ def test_f():
+ f()
+
+Using :func:`pytest.raises` is likely to be better for cases where you are
+testing exceptions your own code is deliberately raising, whereas using
+``@pytest.mark.xfail`` with a check function is probably better for something
+like documenting unfixed bugs (where the test describes what "should" happen)
+or bugs in dependencies.
+
+
+.. _`assertwarns`:
+
+Assertions about expected warnings
+-----------------------------------------
+
+
+
+You can check that code raises a particular warning using
+:ref:`pytest.warns <warns>`.
+
+
+.. _newreport:
+
+Making use of context-sensitive comparisons
+-------------------------------------------------
+
+
+
+``pytest`` has rich support for providing context-sensitive information
+when it encounters comparisons. For example:
+
+.. code-block:: python
+
+ # content of test_assert2.py
+ def test_set_comparison():
+ set1 = set("1308")
+ set2 = set("8035")
+ assert set1 == set2
+
+if you run this module:
+
+.. code-block:: pytest
+
+ $ pytest test_assert2.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 1 item
+
+ test_assert2.py F [100%]
+
+ ================================= FAILURES =================================
+ ___________________________ test_set_comparison ____________________________
+
+ def test_set_comparison():
+ set1 = set("1308")
+ set2 = set("8035")
+ > assert set1 == set2
+ E AssertionError: assert {'0', '1', '3', '8'} == {'0', '3', '5', '8'}
+ E Extra items in the left set:
+ E '1'
+ E Extra items in the right set:
+ E '5'
+ E Use -v to get the full diff
+
+ test_assert2.py:4: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_assert2.py::test_set_comparison - AssertionError: assert {'0'...
+ ============================ 1 failed in 0.12s =============================
+
+Special comparisons are done for a number of cases:
+
+* comparing long strings: a context diff is shown
+* comparing long sequences: first failing indices
+* comparing dicts: different entries
+
+See the :ref:`reporting demo <tbreportdemo>` for many more examples.
+
+Defining your own explanation for failed assertions
+---------------------------------------------------
+
+It is possible to add your own detailed explanations by implementing
+the ``pytest_assertrepr_compare`` hook.
+
+.. autofunction:: _pytest.hookspec.pytest_assertrepr_compare
+ :noindex:
+
+As an example consider adding the following hook in a :ref:`conftest.py <conftest.py>`
+file which provides an alternative explanation for ``Foo`` objects:
+
+.. code-block:: python
+
+ # content of conftest.py
+ from test_foocompare import Foo
+
+
+ def pytest_assertrepr_compare(op, left, right):
+ if isinstance(left, Foo) and isinstance(right, Foo) and op == "==":
+ return [
+ "Comparing Foo instances:",
+ " vals: {} != {}".format(left.val, right.val),
+ ]
+
+now, given this test module:
+
+.. code-block:: python
+
+ # content of test_foocompare.py
+ class Foo:
+ def __init__(self, val):
+ self.val = val
+
+ def __eq__(self, other):
+ return self.val == other.val
+
+
+ def test_compare():
+ f1 = Foo(1)
+ f2 = Foo(2)
+ assert f1 == f2
+
+you can run the test module and get the custom output defined in
+the conftest file:
+
+.. code-block:: pytest
+
+ $ pytest -q test_foocompare.py
+ F [100%]
+ ================================= FAILURES =================================
+ _______________________________ test_compare _______________________________
+
+ def test_compare():
+ f1 = Foo(1)
+ f2 = Foo(2)
+ > assert f1 == f2
+ E assert Comparing Foo instances:
+ E vals: 1 != 2
+
+ test_foocompare.py:12: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_foocompare.py::test_compare - assert Comparing Foo instances:
+ 1 failed in 0.12s
+
+.. _assert-details:
+.. _`assert introspection`:
+
+Assertion introspection details
+-------------------------------
+
+
+Reporting details about a failing assertion is achieved by rewriting assert
+statements before they are run. Rewritten assert statements put introspection
+information into the assertion failure message. ``pytest`` only rewrites test
+modules directly discovered by its test collection process, so **asserts in
+supporting modules which are not themselves test modules will not be rewritten**.
+
+You can manually enable assertion rewriting for an imported module by calling
+:ref:`register_assert_rewrite <assertion-rewriting>`
+before you import it (a good place to do that is in your root ``conftest.py``).
+
+For further information, Benjamin Peterson wrote up `Behind the scenes of pytest's new assertion rewriting <http://pybites.blogspot.com/2011/07/behind-scenes-of-pytests-new-assertion.html>`_.
+
+Assertion rewriting caches files on disk
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+``pytest`` will write back the rewritten modules to disk for caching. You can disable
+this behavior (for example to avoid leaving stale ``.pyc`` files around in projects that
+move files around a lot) by adding this to the top of your ``conftest.py`` file:
+
+.. code-block:: python
+
+ import sys
+
+ sys.dont_write_bytecode = True
+
+Note that you still get the benefits of assertion introspection, the only change is that
+the ``.pyc`` files won't be cached on disk.
+
+Additionally, rewriting will silently skip caching if it cannot write new ``.pyc`` files,
+i.e. in a read-only filesystem or a zipfile.
+
+
+Disabling assert rewriting
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+``pytest`` rewrites test modules on import by using an import
+hook to write new ``pyc`` files. Most of the time this works transparently.
+However, if you are working with the import machinery yourself, the import hook may
+interfere.
+
+If this is the case you have two options:
+
+* Disable rewriting for a specific module by adding the string
+ ``PYTEST_DONT_REWRITE`` to its docstring.
+
+* Disable rewriting for all modules by using ``--assert=plain``.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/bash-completion.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/bash-completion.rst
new file mode 100644
index 0000000000..245dfd6d9a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/bash-completion.rst
@@ -0,0 +1,33 @@
+
+.. _bash_completion:
+
+How to set up bash completion
+=============================
+
+When using bash as your shell, ``pytest`` can use argcomplete
+(https://argcomplete.readthedocs.io/) for auto-completion.
+For this ``argcomplete`` needs to be installed **and** enabled.
+
+Install argcomplete using:
+
+.. code-block:: bash
+
+ sudo pip install 'argcomplete>=0.5.7'
+
+For global activation of all argcomplete enabled python applications run:
+
+.. code-block:: bash
+
+ sudo activate-global-python-argcomplete
+
+For permanent (but not global) ``pytest`` activation, use:
+
+.. code-block:: bash
+
+ register-python-argcomplete pytest >> ~/.bashrc
+
+For one-time activation of argcomplete for ``pytest`` only, use:
+
+.. code-block:: bash
+
+ eval "$(register-python-argcomplete pytest)"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/cache.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/cache.rst
new file mode 100644
index 0000000000..e7994645dd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/cache.rst
@@ -0,0 +1,329 @@
+.. _`cache_provider`:
+.. _cache:
+
+
+How to re-run failed tests and maintain state between test runs
+===============================================================
+
+
+
+Usage
+---------
+
+The plugin provides two command line options to rerun failures from the
+last ``pytest`` invocation:
+
+* ``--lf``, ``--last-failed`` - to only re-run the failures.
+* ``--ff``, ``--failed-first`` - to run the failures first and then the rest of
+ the tests.
+
+For cleanup (usually not needed), a ``--cache-clear`` option allows to remove
+all cross-session cache contents ahead of a test run.
+
+Other plugins may access the `config.cache`_ object to set/get
+**json encodable** values between ``pytest`` invocations.
+
+.. note::
+
+ This plugin is enabled by default, but can be disabled if needed: see
+ :ref:`cmdunregister` (the internal name for this plugin is
+ ``cacheprovider``).
+
+
+Rerunning only failures or failures first
+-----------------------------------------------
+
+First, let's create 50 test invocation of which only 2 fail:
+
+.. code-block:: python
+
+ # content of test_50.py
+ import pytest
+
+
+ @pytest.mark.parametrize("i", range(50))
+ def test_num(i):
+ if i in (17, 25):
+ pytest.fail("bad luck")
+
+If you run this for the first time you will see two failures:
+
+.. code-block:: pytest
+
+ $ pytest -q
+ .................F.......F........................ [100%]
+ ================================= FAILURES =================================
+ _______________________________ test_num[17] _______________________________
+
+ i = 17
+
+ @pytest.mark.parametrize("i", range(50))
+ def test_num(i):
+ if i in (17, 25):
+ > pytest.fail("bad luck")
+ E Failed: bad luck
+
+ test_50.py:7: Failed
+ _______________________________ test_num[25] _______________________________
+
+ i = 25
+
+ @pytest.mark.parametrize("i", range(50))
+ def test_num(i):
+ if i in (17, 25):
+ > pytest.fail("bad luck")
+ E Failed: bad luck
+
+ test_50.py:7: Failed
+ ========================= short test summary info ==========================
+ FAILED test_50.py::test_num[17] - Failed: bad luck
+ FAILED test_50.py::test_num[25] - Failed: bad luck
+ 2 failed, 48 passed in 0.12s
+
+If you then run it with ``--lf``:
+
+.. code-block:: pytest
+
+ $ pytest --lf
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 2 items
+ run-last-failure: rerun previous 2 failures
+
+ test_50.py FF [100%]
+
+ ================================= FAILURES =================================
+ _______________________________ test_num[17] _______________________________
+
+ i = 17
+
+ @pytest.mark.parametrize("i", range(50))
+ def test_num(i):
+ if i in (17, 25):
+ > pytest.fail("bad luck")
+ E Failed: bad luck
+
+ test_50.py:7: Failed
+ _______________________________ test_num[25] _______________________________
+
+ i = 25
+
+ @pytest.mark.parametrize("i", range(50))
+ def test_num(i):
+ if i in (17, 25):
+ > pytest.fail("bad luck")
+ E Failed: bad luck
+
+ test_50.py:7: Failed
+ ========================= short test summary info ==========================
+ FAILED test_50.py::test_num[17] - Failed: bad luck
+ FAILED test_50.py::test_num[25] - Failed: bad luck
+ ============================ 2 failed in 0.12s =============================
+
+You have run only the two failing tests from the last run, while the 48 passing
+tests have not been run ("deselected").
+
+Now, if you run with the ``--ff`` option, all tests will be run but the first
+previous failures will be executed first (as can be seen from the series
+of ``FF`` and dots):
+
+.. code-block:: pytest
+
+ $ pytest --ff
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 50 items
+ run-last-failure: rerun previous 2 failures first
+
+ test_50.py FF................................................ [100%]
+
+ ================================= FAILURES =================================
+ _______________________________ test_num[17] _______________________________
+
+ i = 17
+
+ @pytest.mark.parametrize("i", range(50))
+ def test_num(i):
+ if i in (17, 25):
+ > pytest.fail("bad luck")
+ E Failed: bad luck
+
+ test_50.py:7: Failed
+ _______________________________ test_num[25] _______________________________
+
+ i = 25
+
+ @pytest.mark.parametrize("i", range(50))
+ def test_num(i):
+ if i in (17, 25):
+ > pytest.fail("bad luck")
+ E Failed: bad luck
+
+ test_50.py:7: Failed
+ ========================= short test summary info ==========================
+ FAILED test_50.py::test_num[17] - Failed: bad luck
+ FAILED test_50.py::test_num[25] - Failed: bad luck
+ ======================= 2 failed, 48 passed in 0.12s =======================
+
+.. _`config.cache`:
+
+New ``--nf``, ``--new-first`` options: run new tests first followed by the rest
+of the tests, in both cases tests are also sorted by the file modified time,
+with more recent files coming first.
+
+Behavior when no tests failed in the last run
+---------------------------------------------
+
+When no tests failed in the last run, or when no cached ``lastfailed`` data was
+found, ``pytest`` can be configured either to run all of the tests or no tests,
+using the ``--last-failed-no-failures`` option, which takes one of the following values:
+
+.. code-block:: bash
+
+ pytest --last-failed --last-failed-no-failures all # run all tests (default behavior)
+ pytest --last-failed --last-failed-no-failures none # run no tests and exit
+
+The new config.cache object
+--------------------------------
+
+.. regendoc:wipe
+
+Plugins or conftest.py support code can get a cached value using the
+pytest ``config`` object. Here is a basic example plugin which
+implements a :ref:`fixture <fixture>` which re-uses previously created state
+across pytest invocations:
+
+.. code-block:: python
+
+ # content of test_caching.py
+ import pytest
+ import time
+
+
+ def expensive_computation():
+ print("running expensive computation...")
+
+
+ @pytest.fixture
+ def mydata(request):
+ val = request.config.cache.get("example/value", None)
+ if val is None:
+ expensive_computation()
+ val = 42
+ request.config.cache.set("example/value", val)
+ return val
+
+
+ def test_function(mydata):
+ assert mydata == 23
+
+If you run this command for the first time, you can see the print statement:
+
+.. code-block:: pytest
+
+ $ pytest -q
+ F [100%]
+ ================================= FAILURES =================================
+ ______________________________ test_function _______________________________
+
+ mydata = 42
+
+ def test_function(mydata):
+ > assert mydata == 23
+ E assert 42 == 23
+
+ test_caching.py:20: AssertionError
+ -------------------------- Captured stdout setup ---------------------------
+ running expensive computation...
+ ========================= short test summary info ==========================
+ FAILED test_caching.py::test_function - assert 42 == 23
+ 1 failed in 0.12s
+
+If you run it a second time, the value will be retrieved from
+the cache and nothing will be printed:
+
+.. code-block:: pytest
+
+ $ pytest -q
+ F [100%]
+ ================================= FAILURES =================================
+ ______________________________ test_function _______________________________
+
+ mydata = 42
+
+ def test_function(mydata):
+ > assert mydata == 23
+ E assert 42 == 23
+
+ test_caching.py:20: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_caching.py::test_function - assert 42 == 23
+ 1 failed in 0.12s
+
+See the :fixture:`config.cache fixture <cache>` for more details.
+
+
+Inspecting Cache content
+------------------------
+
+You can always peek at the content of the cache using the
+``--cache-show`` command line option:
+
+.. code-block:: pytest
+
+ $ pytest --cache-show
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ cachedir: /home/sweet/project/.pytest_cache
+ --------------------------- cache values for '*' ---------------------------
+ cache/lastfailed contains:
+ {'test_caching.py::test_function': True}
+ cache/nodeids contains:
+ ['test_caching.py::test_function']
+ cache/stepwise contains:
+ []
+ example/value contains:
+ 42
+
+ ========================== no tests ran in 0.12s ===========================
+
+``--cache-show`` takes an optional argument to specify a glob pattern for
+filtering:
+
+.. code-block:: pytest
+
+ $ pytest --cache-show example/*
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ cachedir: /home/sweet/project/.pytest_cache
+ ----------------------- cache values for 'example/*' -----------------------
+ example/value contains:
+ 42
+
+ ========================== no tests ran in 0.12s ===========================
+
+Clearing Cache content
+----------------------
+
+You can instruct pytest to clear all cache files and values
+by adding the ``--cache-clear`` option like this:
+
+.. code-block:: bash
+
+ pytest --cache-clear
+
+This is recommended for invocations from Continuous Integration
+servers where isolation and correctness is more important
+than speed.
+
+
+.. _cache stepwise:
+
+Stepwise
+--------
+
+As an alternative to ``--lf -x``, especially for cases where you expect a large part of the test suite will fail, ``--sw``, ``--stepwise`` allows you to fix them one at a time. The test suite will run until the first failure and then stop. At the next invocation, tests will continue from the last failing test and then run until the next failing test. You may use the ``--stepwise-skip`` option to ignore one failing test and stop the test execution on the second failing test instead. This is useful if you get stuck on a failing test and just want to ignore it until later. Providing ``--stepwise-skip`` will also enable ``--stepwise`` implicitly.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/capture-stdout-stderr.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/capture-stdout-stderr.rst
new file mode 100644
index 0000000000..9ccea719b6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/capture-stdout-stderr.rst
@@ -0,0 +1,170 @@
+
+.. _`captures`:
+
+How to capture stdout/stderr output
+=========================================================
+
+Default stdout/stderr/stdin capturing behaviour
+---------------------------------------------------------
+
+During test execution any output sent to ``stdout`` and ``stderr`` is
+captured. If a test or a setup method fails its according captured
+output will usually be shown along with the failure traceback. (this
+behavior can be configured by the ``--show-capture`` command-line option).
+
+In addition, ``stdin`` is set to a "null" object which will
+fail on attempts to read from it because it is rarely desired
+to wait for interactive input when running automated tests.
+
+By default capturing is done by intercepting writes to low level
+file descriptors. This allows to capture output from simple
+print statements as well as output from a subprocess started by
+a test.
+
+.. _capture-method:
+
+Setting capturing methods or disabling capturing
+-------------------------------------------------
+
+There are three ways in which ``pytest`` can perform capturing:
+
+* ``fd`` (file descriptor) level capturing (default): All writes going to the
+ operating system file descriptors 1 and 2 will be captured.
+
+* ``sys`` level capturing: Only writes to Python files ``sys.stdout``
+ and ``sys.stderr`` will be captured. No capturing of writes to
+ filedescriptors is performed.
+
+* ``tee-sys`` capturing: Python writes to ``sys.stdout`` and ``sys.stderr``
+ will be captured, however the writes will also be passed-through to
+ the actual ``sys.stdout`` and ``sys.stderr``. This allows output to be
+ 'live printed' and captured for plugin use, such as junitxml (new in pytest 5.4).
+
+.. _`disable capturing`:
+
+You can influence output capturing mechanisms from the command line:
+
+.. code-block:: bash
+
+ pytest -s # disable all capturing
+ pytest --capture=sys # replace sys.stdout/stderr with in-mem files
+ pytest --capture=fd # also point filedescriptors 1 and 2 to temp file
+ pytest --capture=tee-sys # combines 'sys' and '-s', capturing sys.stdout/stderr
+ # and passing it along to the actual sys.stdout/stderr
+
+.. _printdebugging:
+
+Using print statements for debugging
+---------------------------------------------------
+
+One primary benefit of the default capturing of stdout/stderr output
+is that you can use print statements for debugging:
+
+.. code-block:: python
+
+ # content of test_module.py
+
+
+ def setup_function(function):
+ print("setting up", function)
+
+
+ def test_func1():
+ assert True
+
+
+ def test_func2():
+ assert False
+
+and running this module will show you precisely the output
+of the failing function and hide the other one:
+
+.. code-block:: pytest
+
+ $ pytest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 2 items
+
+ test_module.py .F [100%]
+
+ ================================= FAILURES =================================
+ ________________________________ test_func2 ________________________________
+
+ def test_func2():
+ > assert False
+ E assert False
+
+ test_module.py:12: AssertionError
+ -------------------------- Captured stdout setup ---------------------------
+ setting up <function test_func2 at 0xdeadbeef0001>
+ ========================= short test summary info ==========================
+ FAILED test_module.py::test_func2 - assert False
+ ======================= 1 failed, 1 passed in 0.12s ========================
+
+Accessing captured output from a test function
+---------------------------------------------------
+
+The ``capsys``, ``capsysbinary``, ``capfd``, and ``capfdbinary`` fixtures
+allow access to stdout/stderr output created during test execution. Here is
+an example test function that performs some output related checks:
+
+.. code-block:: python
+
+ def test_myoutput(capsys): # or use "capfd" for fd-level
+ print("hello")
+ sys.stderr.write("world\n")
+ captured = capsys.readouterr()
+ assert captured.out == "hello\n"
+ assert captured.err == "world\n"
+ print("next")
+ captured = capsys.readouterr()
+ assert captured.out == "next\n"
+
+The ``readouterr()`` call snapshots the output so far -
+and capturing will be continued. After the test
+function finishes the original streams will
+be restored. Using ``capsys`` this way frees your
+test from having to care about setting/resetting
+output streams and also interacts well with pytest's
+own per-test capturing.
+
+If you want to capture on filedescriptor level you can use
+the ``capfd`` fixture which offers the exact
+same interface but allows to also capture output from
+libraries or subprocesses that directly write to operating
+system level output streams (FD1 and FD2).
+
+
+
+The return value from ``readouterr`` changed to a ``namedtuple`` with two attributes, ``out`` and ``err``.
+
+
+
+If the code under test writes non-textual data, you can capture this using
+the ``capsysbinary`` fixture which instead returns ``bytes`` from
+the ``readouterr`` method.
+
+
+
+
+If the code under test writes non-textual data, you can capture this using
+the ``capfdbinary`` fixture which instead returns ``bytes`` from
+the ``readouterr`` method. The ``capfdbinary`` fixture operates on the
+filedescriptor level.
+
+
+
+
+To temporarily disable capture within a test, both ``capsys``
+and ``capfd`` have a ``disabled()`` method that can be used
+as a context manager, disabling capture inside the ``with`` block:
+
+.. code-block:: python
+
+ def test_disabling_capturing(capsys):
+ print("this output is captured")
+ with capsys.disabled():
+ print("output not captured, going directly to sys.stdout")
+ print("this output is also captured")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/capture-warnings.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/capture-warnings.rst
new file mode 100644
index 0000000000..065c11e610
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/capture-warnings.rst
@@ -0,0 +1,443 @@
+.. _`warnings`:
+
+How to capture warnings
+=======================
+
+
+
+Starting from version ``3.1``, pytest now automatically catches warnings during test execution
+and displays them at the end of the session:
+
+.. code-block:: python
+
+ # content of test_show_warnings.py
+ import warnings
+
+
+ def api_v1():
+ warnings.warn(UserWarning("api v1, should use functions from v2"))
+ return 1
+
+
+ def test_one():
+ assert api_v1() == 1
+
+Running pytest now produces this output:
+
+.. code-block:: pytest
+
+ $ pytest test_show_warnings.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 1 item
+
+ test_show_warnings.py . [100%]
+
+ ============================= warnings summary =============================
+ test_show_warnings.py::test_one
+ /home/sweet/project/test_show_warnings.py:5: UserWarning: api v1, should use functions from v2
+ warnings.warn(UserWarning("api v1, should use functions from v2"))
+
+ -- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html
+ ======================= 1 passed, 1 warning in 0.12s =======================
+
+Controlling warnings
+--------------------
+
+Similar to Python's `warning filter`_ and :option:`-W option <python:-W>` flag, pytest provides
+its own ``-W`` flag to control which warnings are ignored, displayed, or turned into
+errors. See the `warning filter`_ documentation for more
+advanced use-cases.
+
+.. _`warning filter`: https://docs.python.org/3/library/warnings.html#warning-filter
+
+This code sample shows how to treat any ``UserWarning`` category class of warning
+as an error:
+
+.. code-block:: pytest
+
+ $ pytest -q test_show_warnings.py -W error::UserWarning
+ F [100%]
+ ================================= FAILURES =================================
+ _________________________________ test_one _________________________________
+
+ def test_one():
+ > assert api_v1() == 1
+
+ test_show_warnings.py:10:
+ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
+
+ def api_v1():
+ > warnings.warn(UserWarning("api v1, should use functions from v2"))
+ E UserWarning: api v1, should use functions from v2
+
+ test_show_warnings.py:5: UserWarning
+ ========================= short test summary info ==========================
+ FAILED test_show_warnings.py::test_one - UserWarning: api v1, should use ...
+ 1 failed in 0.12s
+
+The same option can be set in the ``pytest.ini`` or ``pyproject.toml`` file using the
+``filterwarnings`` ini option. For example, the configuration below will ignore all
+user warnings and specific deprecation warnings matching a regex, but will transform
+all other warnings into errors.
+
+.. code-block:: ini
+
+ # pytest.ini
+ [pytest]
+ filterwarnings =
+ error
+ ignore::UserWarning
+ ignore:function ham\(\) is deprecated:DeprecationWarning
+
+.. code-block:: toml
+
+ # pyproject.toml
+ [tool.pytest.ini_options]
+ filterwarnings = [
+ "error",
+ "ignore::UserWarning",
+ # note the use of single quote below to denote "raw" strings in TOML
+ 'ignore:function ham\(\) is deprecated:DeprecationWarning',
+ ]
+
+
+When a warning matches more than one option in the list, the action for the last matching option
+is performed.
+
+
+.. _`filterwarnings`:
+
+``@pytest.mark.filterwarnings``
+-------------------------------
+
+
+
+You can use the ``@pytest.mark.filterwarnings`` to add warning filters to specific test items,
+allowing you to have finer control of which warnings should be captured at test, class or
+even module level:
+
+.. code-block:: python
+
+ import warnings
+
+
+ def api_v1():
+ warnings.warn(UserWarning("api v1, should use functions from v2"))
+ return 1
+
+
+ @pytest.mark.filterwarnings("ignore:api v1")
+ def test_one():
+ assert api_v1() == 1
+
+
+Filters applied using a mark take precedence over filters passed on the command line or configured
+by the ``filterwarnings`` ini option.
+
+You may apply a filter to all tests of a class by using the ``filterwarnings`` mark as a class
+decorator or to all tests in a module by setting the :globalvar:`pytestmark` variable:
+
+.. code-block:: python
+
+ # turns all warnings into errors for this module
+ pytestmark = pytest.mark.filterwarnings("error")
+
+
+
+*Credits go to Florian Schulze for the reference implementation in the* `pytest-warnings`_
+*plugin.*
+
+.. _`pytest-warnings`: https://github.com/fschulze/pytest-warnings
+
+Disabling warnings summary
+--------------------------
+
+Although not recommended, you can use the ``--disable-warnings`` command-line option to suppress the
+warning summary entirely from the test run output.
+
+Disabling warning capture entirely
+----------------------------------
+
+This plugin is enabled by default but can be disabled entirely in your ``pytest.ini`` file with:
+
+ .. code-block:: ini
+
+ [pytest]
+ addopts = -p no:warnings
+
+Or passing ``-p no:warnings`` in the command-line. This might be useful if your test suites handles warnings
+using an external system.
+
+
+.. _`deprecation-warnings`:
+
+DeprecationWarning and PendingDeprecationWarning
+------------------------------------------------
+
+
+By default pytest will display ``DeprecationWarning`` and ``PendingDeprecationWarning`` warnings from
+user code and third-party libraries, as recommended by :pep:`565`.
+This helps users keep their code modern and avoid breakages when deprecated warnings are effectively removed.
+
+Sometimes it is useful to hide some specific deprecation warnings that happen in code that you have no control over
+(such as third-party libraries), in which case you might use the warning filters options (ini or marks) to ignore
+those warnings.
+
+For example:
+
+.. code-block:: ini
+
+ [pytest]
+ filterwarnings =
+ ignore:.*U.*mode is deprecated:DeprecationWarning
+
+
+This will ignore all warnings of type ``DeprecationWarning`` where the start of the message matches
+the regular expression ``".*U.*mode is deprecated"``.
+
+.. note::
+
+ If warnings are configured at the interpreter level, using
+ the :envvar:`python:PYTHONWARNINGS` environment variable or the
+ ``-W`` command-line option, pytest will not configure any filters by default.
+
+ Also pytest doesn't follow :pep:`506` suggestion of resetting all warning filters because
+ it might break test suites that configure warning filters themselves
+ by calling :func:`warnings.simplefilter` (see :issue:`2430` for an example of that).
+
+
+.. _`ensuring a function triggers a deprecation warning`:
+
+.. _ensuring_function_triggers:
+
+Ensuring code triggers a deprecation warning
+--------------------------------------------
+
+You can also use :func:`pytest.deprecated_call` for checking
+that a certain function call triggers a ``DeprecationWarning`` or
+``PendingDeprecationWarning``:
+
+.. code-block:: python
+
+ import pytest
+
+
+ def test_myfunction_deprecated():
+ with pytest.deprecated_call():
+ myfunction(17)
+
+This test will fail if ``myfunction`` does not issue a deprecation warning
+when called with a ``17`` argument.
+
+
+
+
+.. _`asserting warnings`:
+
+.. _assertwarnings:
+
+.. _`asserting warnings with the warns function`:
+
+.. _warns:
+
+Asserting warnings with the warns function
+------------------------------------------
+
+
+
+You can check that code raises a particular warning using :func:`pytest.warns`,
+which works in a similar manner to :ref:`raises <assertraises>`:
+
+.. code-block:: python
+
+ import warnings
+ import pytest
+
+
+ def test_warning():
+ with pytest.warns(UserWarning):
+ warnings.warn("my warning", UserWarning)
+
+The test will fail if the warning in question is not raised. The keyword
+argument ``match`` to assert that the exception matches a text or regex::
+
+ >>> with warns(UserWarning, match='must be 0 or None'):
+ ... warnings.warn("value must be 0 or None", UserWarning)
+
+ >>> with warns(UserWarning, match=r'must be \d+$'):
+ ... warnings.warn("value must be 42", UserWarning)
+
+ >>> with warns(UserWarning, match=r'must be \d+$'):
+ ... warnings.warn("this is not here", UserWarning)
+ Traceback (most recent call last):
+ ...
+ Failed: DID NOT WARN. No warnings of type ...UserWarning... were emitted...
+
+You can also call :func:`pytest.warns` on a function or code string:
+
+.. code-block:: python
+
+ pytest.warns(expected_warning, func, *args, **kwargs)
+ pytest.warns(expected_warning, "func(*args, **kwargs)")
+
+The function also returns a list of all raised warnings (as
+``warnings.WarningMessage`` objects), which you can query for
+additional information:
+
+.. code-block:: python
+
+ with pytest.warns(RuntimeWarning) as record:
+ warnings.warn("another warning", RuntimeWarning)
+
+ # check that only one warning was raised
+ assert len(record) == 1
+ # check that the message matches
+ assert record[0].message.args[0] == "another warning"
+
+Alternatively, you can examine raised warnings in detail using the
+:ref:`recwarn <recwarn>` fixture (see below).
+
+
+The :ref:`recwarn <recwarn>` fixture automatically ensures to reset the warnings
+filter at the end of the test, so no global state is leaked.
+
+.. _`recording warnings`:
+
+.. _recwarn:
+
+Recording warnings
+------------------
+
+You can record raised warnings either using :func:`pytest.warns` or with
+the ``recwarn`` fixture.
+
+To record with :func:`pytest.warns` without asserting anything about the warnings,
+pass no arguments as the expected warning type and it will default to a generic Warning:
+
+.. code-block:: python
+
+ with pytest.warns() as record:
+ warnings.warn("user", UserWarning)
+ warnings.warn("runtime", RuntimeWarning)
+
+ assert len(record) == 2
+ assert str(record[0].message) == "user"
+ assert str(record[1].message) == "runtime"
+
+The ``recwarn`` fixture will record warnings for the whole function:
+
+.. code-block:: python
+
+ import warnings
+
+
+ def test_hello(recwarn):
+ warnings.warn("hello", UserWarning)
+ assert len(recwarn) == 1
+ w = recwarn.pop(UserWarning)
+ assert issubclass(w.category, UserWarning)
+ assert str(w.message) == "hello"
+ assert w.filename
+ assert w.lineno
+
+Both ``recwarn`` and :func:`pytest.warns` return the same interface for recorded
+warnings: a WarningsRecorder instance. To view the recorded warnings, you can
+iterate over this instance, call ``len`` on it to get the number of recorded
+warnings, or index into it to get a particular recorded warning.
+
+.. currentmodule:: _pytest.warnings
+
+Full API: :class:`~_pytest.recwarn.WarningsRecorder`.
+
+.. _`warns use cases`:
+
+Additional use cases of warnings in tests
+-----------------------------------------
+
+Here are some use cases involving warnings that often come up in tests, and suggestions on how to deal with them:
+
+- To ensure that **any** warning is emitted, use:
+
+.. code-block:: python
+
+ with pytest.warns():
+ ...
+
+- To ensure that **no** warnings are emitted, use:
+
+.. code-block:: python
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ ...
+
+- To suppress warnings, use:
+
+.. code-block:: python
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ ...
+
+
+.. _custom_failure_messages:
+
+Custom failure messages
+-----------------------
+
+Recording warnings provides an opportunity to produce custom test
+failure messages for when no warnings are issued or other conditions
+are met.
+
+.. code-block:: python
+
+ def test():
+ with pytest.warns(Warning) as record:
+ f()
+ if not record:
+ pytest.fail("Expected a warning!")
+
+If no warnings are issued when calling ``f``, then ``not record`` will
+evaluate to ``True``. You can then call :func:`pytest.fail` with a
+custom error message.
+
+.. _internal-warnings:
+
+Internal pytest warnings
+------------------------
+
+pytest may generate its own warnings in some situations, such as improper usage or deprecated features.
+
+For example, pytest will emit a warning if it encounters a class that matches :confval:`python_classes` but also
+defines an ``__init__`` constructor, as this prevents the class from being instantiated:
+
+.. code-block:: python
+
+ # content of test_pytest_warnings.py
+ class Test:
+ def __init__(self):
+ pass
+
+ def test_foo(self):
+ assert 1 == 1
+
+.. code-block:: pytest
+
+ $ pytest test_pytest_warnings.py -q
+
+ ============================= warnings summary =============================
+ test_pytest_warnings.py:1
+ /home/sweet/project/test_pytest_warnings.py:1: PytestCollectionWarning: cannot collect test class 'Test' because it has a __init__ constructor (from: test_pytest_warnings.py)
+ class Test:
+
+ -- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html
+ 1 warning in 0.12s
+
+These warnings might be filtered using the same builtin mechanisms used to filter other types of warnings.
+
+Please read our :ref:`backwards-compatibility` to learn how we proceed about deprecating and eventually removing
+features.
+
+The full list of warnings is listed in :ref:`the reference documentation <warnings ref>`.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/doctest.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/doctest.rst
new file mode 100644
index 0000000000..ce0b5a5f64
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/doctest.rst
@@ -0,0 +1,312 @@
+.. _doctest:
+
+How to run doctests
+=========================================================
+
+By default, all files matching the ``test*.txt`` pattern will
+be run through the python standard :mod:`doctest` module. You
+can change the pattern by issuing:
+
+.. code-block:: bash
+
+ pytest --doctest-glob="*.rst"
+
+on the command line. ``--doctest-glob`` can be given multiple times in the command-line.
+
+If you then have a text file like this:
+
+.. code-block:: text
+
+ # content of test_example.txt
+
+ hello this is a doctest
+ >>> x = 3
+ >>> x
+ 3
+
+then you can just invoke ``pytest`` directly:
+
+.. code-block:: pytest
+
+ $ pytest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 1 item
+
+ test_example.txt . [100%]
+
+ ============================ 1 passed in 0.12s =============================
+
+By default, pytest will collect ``test*.txt`` files looking for doctest directives, but you
+can pass additional globs using the ``--doctest-glob`` option (multi-allowed).
+
+In addition to text files, you can also execute doctests directly from docstrings of your classes
+and functions, including from test modules:
+
+.. code-block:: python
+
+ # content of mymodule.py
+ def something():
+ """a doctest in a docstring
+ >>> something()
+ 42
+ """
+ return 42
+
+.. code-block:: bash
+
+ $ pytest --doctest-modules
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 2 items
+
+ mymodule.py . [ 50%]
+ test_example.txt . [100%]
+
+ ============================ 2 passed in 0.12s =============================
+
+You can make these changes permanent in your project by
+putting them into a pytest.ini file like this:
+
+.. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ addopts = --doctest-modules
+
+
+Encoding
+--------
+
+The default encoding is **UTF-8**, but you can specify the encoding
+that will be used for those doctest files using the
+``doctest_encoding`` ini option:
+
+.. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ doctest_encoding = latin1
+
+.. _using doctest options:
+
+Using 'doctest' options
+-----------------------
+
+Python's standard :mod:`doctest` module provides some :ref:`options <python:option-flags-and-directives>`
+to configure the strictness of doctest tests. In pytest, you can enable those flags using the
+configuration file.
+
+For example, to make pytest ignore trailing whitespaces and ignore
+lengthy exception stack traces you can just write:
+
+.. code-block:: ini
+
+ [pytest]
+ doctest_optionflags = NORMALIZE_WHITESPACE IGNORE_EXCEPTION_DETAIL
+
+Alternatively, options can be enabled by an inline comment in the doc test
+itself:
+
+.. code-block:: rst
+
+ >>> something_that_raises() # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ValueError: ...
+
+pytest also introduces new options:
+
+* ``ALLOW_UNICODE``: when enabled, the ``u`` prefix is stripped from unicode
+ strings in expected doctest output. This allows doctests to run in Python 2
+ and Python 3 unchanged.
+
+* ``ALLOW_BYTES``: similarly, the ``b`` prefix is stripped from byte strings
+ in expected doctest output.
+
+* ``NUMBER``: when enabled, floating-point numbers only need to match as far as
+ the precision you have written in the expected doctest output. For example,
+ the following output would only need to match to 2 decimal places::
+
+ >>> math.pi
+ 3.14
+
+ If you wrote ``3.1416`` then the actual output would need to match to 4
+ decimal places; and so on.
+
+ This avoids false positives caused by limited floating-point precision, like
+ this::
+
+ Expected:
+ 0.233
+ Got:
+ 0.23300000000000001
+
+ ``NUMBER`` also supports lists of floating-point numbers -- in fact, it
+ matches floating-point numbers appearing anywhere in the output, even inside
+ a string! This means that it may not be appropriate to enable globally in
+ ``doctest_optionflags`` in your configuration file.
+
+ .. versionadded:: 5.1
+
+
+Continue on failure
+-------------------
+
+By default, pytest would report only the first failure for a given doctest. If
+you want to continue the test even when you have failures, do:
+
+.. code-block:: bash
+
+ pytest --doctest-modules --doctest-continue-on-failure
+
+
+Output format
+-------------
+
+You can change the diff output format on failure for your doctests
+by using one of standard doctest modules format in options
+(see :data:`python:doctest.REPORT_UDIFF`, :data:`python:doctest.REPORT_CDIFF`,
+:data:`python:doctest.REPORT_NDIFF`, :data:`python:doctest.REPORT_ONLY_FIRST_FAILURE`):
+
+.. code-block:: bash
+
+ pytest --doctest-modules --doctest-report none
+ pytest --doctest-modules --doctest-report udiff
+ pytest --doctest-modules --doctest-report cdiff
+ pytest --doctest-modules --doctest-report ndiff
+ pytest --doctest-modules --doctest-report only_first_failure
+
+
+pytest-specific features
+------------------------
+
+Some features are provided to make writing doctests easier or with better integration with
+your existing test suite. Keep in mind however that by using those features you will make
+your doctests incompatible with the standard ``doctests`` module.
+
+Using fixtures
+^^^^^^^^^^^^^^
+
+It is possible to use fixtures using the ``getfixture`` helper:
+
+.. code-block:: text
+
+ # content of example.rst
+ >>> tmp = getfixture('tmp_path')
+ >>> ...
+ >>>
+
+Note that the fixture needs to be defined in a place visible by pytest, for example, a `conftest.py`
+file or plugin; normal python files containing docstrings are not normally scanned for fixtures
+unless explicitly configured by :confval:`python_files`.
+
+Also, the :ref:`usefixtures <usefixtures>` mark and fixtures marked as :ref:`autouse <autouse>` are supported
+when executing text doctest files.
+
+
+.. _`doctest_namespace`:
+
+'doctest_namespace' fixture
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The ``doctest_namespace`` fixture can be used to inject items into the
+namespace in which your doctests run. It is intended to be used within
+your own fixtures to provide the tests that use them with context.
+
+``doctest_namespace`` is a standard ``dict`` object into which you
+place the objects you want to appear in the doctest namespace:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import numpy
+
+
+ @pytest.fixture(autouse=True)
+ def add_np(doctest_namespace):
+ doctest_namespace["np"] = numpy
+
+which can then be used in your doctests directly:
+
+.. code-block:: python
+
+ # content of numpy.py
+ def arange():
+ """
+ >>> a = np.arange(10)
+ >>> len(a)
+ 10
+ """
+ pass
+
+Note that like the normal ``conftest.py``, the fixtures are discovered in the directory tree conftest is in.
+Meaning that if you put your doctest with your source code, the relevant conftest.py needs to be in the same directory tree.
+Fixtures will not be discovered in a sibling directory tree!
+
+Skipping tests
+^^^^^^^^^^^^^^
+
+For the same reasons one might want to skip normal tests, it is also possible to skip
+tests inside doctests.
+
+To skip a single check inside a doctest you can use the standard
+:data:`doctest.SKIP` directive:
+
+.. code-block:: python
+
+ def test_random(y):
+ """
+ >>> random.random() # doctest: +SKIP
+ 0.156231223
+
+ >>> 1 + 1
+ 2
+ """
+
+This will skip the first check, but not the second.
+
+pytest also allows using the standard pytest functions :func:`pytest.skip` and
+:func:`pytest.xfail` inside doctests, which might be useful because you can
+then skip/xfail tests based on external conditions:
+
+
+.. code-block:: text
+
+ >>> import sys, pytest
+ >>> if sys.platform.startswith('win'):
+ ... pytest.skip('this doctest does not work on Windows')
+ ...
+ >>> import fcntl
+ >>> ...
+
+However using those functions is discouraged because it reduces the readability of the
+docstring.
+
+.. note::
+
+ :func:`pytest.skip` and :func:`pytest.xfail` behave differently depending
+ if the doctests are in a Python file (in docstrings) or a text file containing
+ doctests intermingled with text:
+
+ * Python modules (docstrings): the functions only act in that specific docstring,
+ letting the other docstrings in the same module execute as normal.
+
+ * Text files: the functions will skip/xfail the checks for the rest of the entire
+ file.
+
+
+Alternatives
+------------
+
+While the built-in pytest support provides a good set of functionalities for using
+doctests, if you use them extensively you might be interested in those external packages
+which add many more features, and include pytest integration:
+
+* `pytest-doctestplus <https://github.com/astropy/pytest-doctestplus>`__: provides
+ advanced doctest support and enables the testing of reStructuredText (".rst") files.
+
+* `Sybil <https://sybil.readthedocs.io>`__: provides a way to test examples in
+ your documentation by parsing them from the documentation source and evaluating
+ the parsed examples as part of your normal test run.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/existingtestsuite.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/existingtestsuite.rst
new file mode 100644
index 0000000000..9909e7d113
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/existingtestsuite.rst
@@ -0,0 +1,34 @@
+.. _existingtestsuite:
+
+How to use pytest with an existing test suite
+==============================================
+
+Pytest can be used with most existing test suites, but its
+behavior differs from other test runners such as :ref:`nose <noseintegration>` or
+Python's default unittest framework.
+
+Before using this section you will want to :ref:`install pytest <getstarted>`.
+
+Running an existing test suite with pytest
+---------------------------------------------
+
+Say you want to contribute to an existing repository somewhere.
+After pulling the code into your development space using some
+flavor of version control and (optionally) setting up a virtualenv
+you will want to run:
+
+.. code-block:: bash
+
+ cd <repository>
+ pip install -e . # Environment dependent alternatives include
+ # 'python setup.py develop' and 'conda develop'
+
+in your project root. This will set up a symlink to your code in
+site-packages, allowing you to edit your code while your tests
+run against it as if it were installed.
+
+Setting up your project in development mode lets you avoid having to
+reinstall every time you want to run your tests, and is less brittle than
+mucking about with sys.path to point your tests at local code.
+
+Also consider using :ref:`tox <use tox>`.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/failures.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/failures.rst
new file mode 100644
index 0000000000..ef87550915
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/failures.rst
@@ -0,0 +1,160 @@
+.. _how-to-handle-failures:
+
+How to handle test failures
+=============================
+
+.. _maxfail:
+
+Stopping after the first (or N) failures
+---------------------------------------------------
+
+To stop the testing process after the first (N) failures:
+
+.. code-block:: bash
+
+ pytest -x # stop after first failure
+ pytest --maxfail=2 # stop after two failures
+
+
+.. _pdb-option:
+
+Using :doc:`python:library/pdb` with pytest
+-------------------------------------------
+
+Dropping to :doc:`pdb <python:library/pdb>` on failures
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Python comes with a builtin Python debugger called :doc:`pdb <python:library/pdb>`. ``pytest``
+allows one to drop into the :doc:`pdb <python:library/pdb>` prompt via a command line option:
+
+.. code-block:: bash
+
+ pytest --pdb
+
+This will invoke the Python debugger on every failure (or KeyboardInterrupt).
+Often you might only want to do this for the first failing test to understand
+a certain failure situation:
+
+.. code-block:: bash
+
+ pytest -x --pdb # drop to PDB on first failure, then end test session
+ pytest --pdb --maxfail=3 # drop to PDB for first three failures
+
+Note that on any failure the exception information is stored on
+``sys.last_value``, ``sys.last_type`` and ``sys.last_traceback``. In
+interactive use, this allows one to drop into postmortem debugging with
+any debug tool. One can also manually access the exception information,
+for example::
+
+ >>> import sys
+ >>> sys.last_traceback.tb_lineno
+ 42
+ >>> sys.last_value
+ AssertionError('assert result == "ok"',)
+
+
+.. _trace-option:
+
+Dropping to :doc:`pdb <python:library/pdb>` at the start of a test
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+``pytest`` allows one to drop into the :doc:`pdb <python:library/pdb>` prompt immediately at the start of each test via a command line option:
+
+.. code-block:: bash
+
+ pytest --trace
+
+This will invoke the Python debugger at the start of every test.
+
+.. _breakpoints:
+
+Setting breakpoints
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. versionadded: 2.4.0
+
+To set a breakpoint in your code use the native Python ``import pdb;pdb.set_trace()`` call
+in your code and pytest automatically disables its output capture for that test:
+
+* Output capture in other tests is not affected.
+* Any prior test output that has already been captured and will be processed as
+ such.
+* Output capture gets resumed when ending the debugger session (via the
+ ``continue`` command).
+
+
+.. _`breakpoint-builtin`:
+
+Using the builtin breakpoint function
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Python 3.7 introduces a builtin ``breakpoint()`` function.
+Pytest supports the use of ``breakpoint()`` with the following behaviours:
+
+ - When ``breakpoint()`` is called and ``PYTHONBREAKPOINT`` is set to the default value, pytest will use the custom internal PDB trace UI instead of the system default ``Pdb``.
+ - When tests are complete, the system will default back to the system ``Pdb`` trace UI.
+ - With ``--pdb`` passed to pytest, the custom internal Pdb trace UI is used with both ``breakpoint()`` and failed tests/unhandled exceptions.
+ - ``--pdbcls`` can be used to specify a custom debugger class.
+
+
+.. _faulthandler:
+
+Fault Handler
+-------------
+
+.. versionadded:: 5.0
+
+The :mod:`faulthandler` standard module
+can be used to dump Python tracebacks on a segfault or after a timeout.
+
+The module is automatically enabled for pytest runs, unless the ``-p no:faulthandler`` is given
+on the command-line.
+
+Also the :confval:`faulthandler_timeout=X<faulthandler_timeout>` configuration option can be used
+to dump the traceback of all threads if a test takes longer than ``X``
+seconds to finish (not available on Windows).
+
+.. note::
+
+ This functionality has been integrated from the external
+ `pytest-faulthandler <https://github.com/pytest-dev/pytest-faulthandler>`__ plugin, with two
+ small differences:
+
+ * To disable it, use ``-p no:faulthandler`` instead of ``--no-faulthandler``: the former
+ can be used with any plugin, so it saves one option.
+
+ * The ``--faulthandler-timeout`` command-line option has become the
+ :confval:`faulthandler_timeout` configuration option. It can still be configured from
+ the command-line using ``-o faulthandler_timeout=X``.
+
+
+.. _unraisable:
+
+Warning about unraisable exceptions and unhandled thread exceptions
+-------------------------------------------------------------------
+
+.. versionadded:: 6.2
+
+.. note::
+
+ These features only work on Python>=3.8.
+
+Unhandled exceptions are exceptions that are raised in a situation in which
+they cannot propagate to a caller. The most common case is an exception raised
+in a :meth:`__del__ <object.__del__>` implementation.
+
+Unhandled thread exceptions are exceptions raised in a :class:`~threading.Thread`
+but not handled, causing the thread to terminate uncleanly.
+
+Both types of exceptions are normally considered bugs, but may go unnoticed
+because they don't cause the program itself to crash. Pytest detects these
+conditions and issues a warning that is visible in the test run summary.
+
+The plugins are automatically enabled for pytest runs, unless the
+``-p no:unraisableexception`` (for unraisable exceptions) and
+``-p no:threadexception`` (for thread exceptions) options are given on the
+command-line.
+
+The warnings may be silenced selectively using the :ref:`pytest.mark.filterwarnings ref`
+mark. The warning categories are :class:`pytest.PytestUnraisableExceptionWarning` and
+:class:`pytest.PytestUnhandledThreadExceptionWarning`.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/fixtures.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/fixtures.rst
new file mode 100644
index 0000000000..0801387745
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/fixtures.rst
@@ -0,0 +1,1887 @@
+.. _how-to-fixtures:
+
+How to use fixtures
+====================
+
+.. seealso:: :ref:`about-fixtures`
+.. seealso:: :ref:`Fixtures reference <reference-fixtures>`
+
+
+"Requesting" fixtures
+---------------------
+
+At a basic level, test functions request fixtures they require by declaring
+them as arguments.
+
+When pytest goes to run a test, it looks at the parameters in that test
+function's signature, and then searches for fixtures that have the same names as
+those parameters. Once pytest finds them, it runs those fixtures, captures what
+they returned (if anything), and passes those objects into the test function as
+arguments.
+
+
+Quick example
+^^^^^^^^^^^^^
+
+.. code-block:: python
+
+ import pytest
+
+
+ class Fruit:
+ def __init__(self, name):
+ self.name = name
+ self.cubed = False
+
+ def cube(self):
+ self.cubed = True
+
+
+ class FruitSalad:
+ def __init__(self, *fruit_bowl):
+ self.fruit = fruit_bowl
+ self._cube_fruit()
+
+ def _cube_fruit(self):
+ for fruit in self.fruit:
+ fruit.cube()
+
+
+ # Arrange
+ @pytest.fixture
+ def fruit_bowl():
+ return [Fruit("apple"), Fruit("banana")]
+
+
+ def test_fruit_salad(fruit_bowl):
+ # Act
+ fruit_salad = FruitSalad(*fruit_bowl)
+
+ # Assert
+ assert all(fruit.cubed for fruit in fruit_salad.fruit)
+
+In this example, ``test_fruit_salad`` "**requests**" ``fruit_bowl`` (i.e.
+``def test_fruit_salad(fruit_bowl):``), and when pytest sees this, it will
+execute the ``fruit_bowl`` fixture function and pass the object it returns into
+``test_fruit_salad`` as the ``fruit_bowl`` argument.
+
+Here's roughly
+what's happening if we were to do it by hand:
+
+.. code-block:: python
+
+ def fruit_bowl():
+ return [Fruit("apple"), Fruit("banana")]
+
+
+ def test_fruit_salad(fruit_bowl):
+ # Act
+ fruit_salad = FruitSalad(*fruit_bowl)
+
+ # Assert
+ assert all(fruit.cubed for fruit in fruit_salad.fruit)
+
+
+ # Arrange
+ bowl = fruit_bowl()
+ test_fruit_salad(fruit_bowl=bowl)
+
+
+Fixtures can **request** other fixtures
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+One of pytest's greatest strengths is its extremely flexible fixture system. It
+allows us to boil down complex requirements for tests into more simple and
+organized functions, where we only need to have each one describe the things
+they are dependent on. We'll get more into this further down, but for now,
+here's a quick example to demonstrate how fixtures can use other fixtures:
+
+.. code-block:: python
+
+ # contents of test_append.py
+ import pytest
+
+
+ # Arrange
+ @pytest.fixture
+ def first_entry():
+ return "a"
+
+
+ # Arrange
+ @pytest.fixture
+ def order(first_entry):
+ return [first_entry]
+
+
+ def test_string(order):
+ # Act
+ order.append("b")
+
+ # Assert
+ assert order == ["a", "b"]
+
+
+Notice that this is the same example from above, but very little changed. The
+fixtures in pytest **request** fixtures just like tests. All the same
+**requesting** rules apply to fixtures that do for tests. Here's how this
+example would work if we did it by hand:
+
+.. code-block:: python
+
+ def first_entry():
+ return "a"
+
+
+ def order(first_entry):
+ return [first_entry]
+
+
+ def test_string(order):
+ # Act
+ order.append("b")
+
+ # Assert
+ assert order == ["a", "b"]
+
+
+ entry = first_entry()
+ the_list = order(first_entry=entry)
+ test_string(order=the_list)
+
+Fixtures are reusable
+^^^^^^^^^^^^^^^^^^^^^
+
+One of the things that makes pytest's fixture system so powerful, is that it
+gives us the ability to define a generic setup step that can be reused over and
+over, just like a normal function would be used. Two different tests can request
+the same fixture and have pytest give each test their own result from that
+fixture.
+
+This is extremely useful for making sure tests aren't affected by each other. We
+can use this system to make sure each test gets its own fresh batch of data and
+is starting from a clean state so it can provide consistent, repeatable results.
+
+Here's an example of how this can come in handy:
+
+.. code-block:: python
+
+ # contents of test_append.py
+ import pytest
+
+
+ # Arrange
+ @pytest.fixture
+ def first_entry():
+ return "a"
+
+
+ # Arrange
+ @pytest.fixture
+ def order(first_entry):
+ return [first_entry]
+
+
+ def test_string(order):
+ # Act
+ order.append("b")
+
+ # Assert
+ assert order == ["a", "b"]
+
+
+ def test_int(order):
+ # Act
+ order.append(2)
+
+ # Assert
+ assert order == ["a", 2]
+
+
+Each test here is being given its own copy of that ``list`` object,
+which means the ``order`` fixture is getting executed twice (the same
+is true for the ``first_entry`` fixture). If we were to do this by hand as
+well, it would look something like this:
+
+.. code-block:: python
+
+ def first_entry():
+ return "a"
+
+
+ def order(first_entry):
+ return [first_entry]
+
+
+ def test_string(order):
+ # Act
+ order.append("b")
+
+ # Assert
+ assert order == ["a", "b"]
+
+
+ def test_int(order):
+ # Act
+ order.append(2)
+
+ # Assert
+ assert order == ["a", 2]
+
+
+ entry = first_entry()
+ the_list = order(first_entry=entry)
+ test_string(order=the_list)
+
+ entry = first_entry()
+ the_list = order(first_entry=entry)
+ test_int(order=the_list)
+
+A test/fixture can **request** more than one fixture at a time
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Tests and fixtures aren't limited to **requesting** a single fixture at a time.
+They can request as many as they like. Here's another quick example to
+demonstrate:
+
+.. code-block:: python
+
+ # contents of test_append.py
+ import pytest
+
+
+ # Arrange
+ @pytest.fixture
+ def first_entry():
+ return "a"
+
+
+ # Arrange
+ @pytest.fixture
+ def second_entry():
+ return 2
+
+
+ # Arrange
+ @pytest.fixture
+ def order(first_entry, second_entry):
+ return [first_entry, second_entry]
+
+
+ # Arrange
+ @pytest.fixture
+ def expected_list():
+ return ["a", 2, 3.0]
+
+
+ def test_string(order, expected_list):
+ # Act
+ order.append(3.0)
+
+ # Assert
+ assert order == expected_list
+
+Fixtures can be **requested** more than once per test (return values are cached)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Fixtures can also be **requested** more than once during the same test, and
+pytest won't execute them again for that test. This means we can **request**
+fixtures in multiple fixtures that are dependent on them (and even again in the
+test itself) without those fixtures being executed more than once.
+
+.. code-block:: python
+
+ # contents of test_append.py
+ import pytest
+
+
+ # Arrange
+ @pytest.fixture
+ def first_entry():
+ return "a"
+
+
+ # Arrange
+ @pytest.fixture
+ def order():
+ return []
+
+
+ # Act
+ @pytest.fixture
+ def append_first(order, first_entry):
+ return order.append(first_entry)
+
+
+ def test_string_only(append_first, order, first_entry):
+ # Assert
+ assert order == [first_entry]
+
+If a **requested** fixture was executed once for every time it was **requested**
+during a test, then this test would fail because both ``append_first`` and
+``test_string_only`` would see ``order`` as an empty list (i.e. ``[]``), but
+since the return value of ``order`` was cached (along with any side effects
+executing it may have had) after the first time it was called, both the test and
+``append_first`` were referencing the same object, and the test saw the effect
+``append_first`` had on that object.
+
+.. _`autouse`:
+.. _`autouse fixtures`:
+
+Autouse fixtures (fixtures you don't have to request)
+-----------------------------------------------------
+
+Sometimes you may want to have a fixture (or even several) that you know all
+your tests will depend on. "Autouse" fixtures are a convenient way to make all
+tests automatically **request** them. This can cut out a
+lot of redundant **requests**, and can even provide more advanced fixture usage
+(more on that further down).
+
+We can make a fixture an autouse fixture by passing in ``autouse=True`` to the
+fixture's decorator. Here's a simple example for how they can be used:
+
+.. code-block:: python
+
+ # contents of test_append.py
+ import pytest
+
+
+ @pytest.fixture
+ def first_entry():
+ return "a"
+
+
+ @pytest.fixture
+ def order(first_entry):
+ return []
+
+
+ @pytest.fixture(autouse=True)
+ def append_first(order, first_entry):
+ return order.append(first_entry)
+
+
+ def test_string_only(order, first_entry):
+ assert order == [first_entry]
+
+
+ def test_string_and_int(order, first_entry):
+ order.append(2)
+ assert order == [first_entry, 2]
+
+In this example, the ``append_first`` fixture is an autouse fixture. Because it
+happens automatically, both tests are affected by it, even though neither test
+**requested** it. That doesn't mean they *can't* be **requested** though; just
+that it isn't *necessary*.
+
+.. _smtpshared:
+
+Scope: sharing fixtures across classes, modules, packages or session
+--------------------------------------------------------------------
+
+.. regendoc:wipe
+
+Fixtures requiring network access depend on connectivity and are
+usually time-expensive to create. Extending the previous example, we
+can add a ``scope="module"`` parameter to the
+:py:func:`@pytest.fixture <pytest.fixture>` invocation
+to cause a ``smtp_connection`` fixture function, responsible to create a connection to a preexisting SMTP server, to only be invoked
+once per test *module* (the default is to invoke once per test *function*).
+Multiple test functions in a test module will thus
+each receive the same ``smtp_connection`` fixture instance, thus saving time.
+Possible values for ``scope`` are: ``function``, ``class``, ``module``, ``package`` or ``session``.
+
+The next example puts the fixture function into a separate ``conftest.py`` file
+so that tests from multiple test modules in the directory can
+access the fixture function:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import pytest
+ import smtplib
+
+
+ @pytest.fixture(scope="module")
+ def smtp_connection():
+ return smtplib.SMTP("smtp.gmail.com", 587, timeout=5)
+
+
+.. code-block:: python
+
+ # content of test_module.py
+
+
+ def test_ehlo(smtp_connection):
+ response, msg = smtp_connection.ehlo()
+ assert response == 250
+ assert b"smtp.gmail.com" in msg
+ assert 0 # for demo purposes
+
+
+ def test_noop(smtp_connection):
+ response, msg = smtp_connection.noop()
+ assert response == 250
+ assert 0 # for demo purposes
+
+Here, the ``test_ehlo`` needs the ``smtp_connection`` fixture value. pytest
+will discover and call the :py:func:`@pytest.fixture <pytest.fixture>`
+marked ``smtp_connection`` fixture function. Running the test looks like this:
+
+.. code-block:: pytest
+
+ $ pytest test_module.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 2 items
+
+ test_module.py FF [100%]
+
+ ================================= FAILURES =================================
+ ________________________________ test_ehlo _________________________________
+
+ smtp_connection = <smtplib.SMTP object at 0xdeadbeef0001>
+
+ def test_ehlo(smtp_connection):
+ response, msg = smtp_connection.ehlo()
+ assert response == 250
+ assert b"smtp.gmail.com" in msg
+ > assert 0 # for demo purposes
+ E assert 0
+
+ test_module.py:7: AssertionError
+ ________________________________ test_noop _________________________________
+
+ smtp_connection = <smtplib.SMTP object at 0xdeadbeef0001>
+
+ def test_noop(smtp_connection):
+ response, msg = smtp_connection.noop()
+ assert response == 250
+ > assert 0 # for demo purposes
+ E assert 0
+
+ test_module.py:13: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_module.py::test_ehlo - assert 0
+ FAILED test_module.py::test_noop - assert 0
+ ============================ 2 failed in 0.12s =============================
+
+You see the two ``assert 0`` failing and more importantly you can also see
+that the **exactly same** ``smtp_connection`` object was passed into the
+two test functions because pytest shows the incoming argument values in the
+traceback. As a result, the two test functions using ``smtp_connection`` run
+as quick as a single one because they reuse the same instance.
+
+If you decide that you rather want to have a session-scoped ``smtp_connection``
+instance, you can simply declare it:
+
+.. code-block:: python
+
+ @pytest.fixture(scope="session")
+ def smtp_connection():
+ # the returned fixture value will be shared for
+ # all tests requesting it
+ ...
+
+
+Fixture scopes
+^^^^^^^^^^^^^^
+
+Fixtures are created when first requested by a test, and are destroyed based on their ``scope``:
+
+* ``function``: the default scope, the fixture is destroyed at the end of the test.
+* ``class``: the fixture is destroyed during teardown of the last test in the class.
+* ``module``: the fixture is destroyed during teardown of the last test in the module.
+* ``package``: the fixture is destroyed during teardown of the last test in the package.
+* ``session``: the fixture is destroyed at the end of the test session.
+
+.. note::
+
+ Pytest only caches one instance of a fixture at a time, which
+ means that when using a parametrized fixture, pytest may invoke a fixture more than once in
+ the given scope.
+
+.. _dynamic scope:
+
+Dynamic scope
+^^^^^^^^^^^^^
+
+.. versionadded:: 5.2
+
+In some cases, you might want to change the scope of the fixture without changing the code.
+To do that, pass a callable to ``scope``. The callable must return a string with a valid scope
+and will be executed only once - during the fixture definition. It will be called with two
+keyword arguments - ``fixture_name`` as a string and ``config`` with a configuration object.
+
+This can be especially useful when dealing with fixtures that need time for setup, like spawning
+a docker container. You can use the command-line argument to control the scope of the spawned
+containers for different environments. See the example below.
+
+.. code-block:: python
+
+ def determine_scope(fixture_name, config):
+ if config.getoption("--keep-containers", None):
+ return "session"
+ return "function"
+
+
+ @pytest.fixture(scope=determine_scope)
+ def docker_container():
+ yield spawn_container()
+
+
+
+.. _`finalization`:
+
+Teardown/Cleanup (AKA Fixture finalization)
+-------------------------------------------
+
+When we run our tests, we'll want to make sure they clean up after themselves so
+they don't mess with any other tests (and also so that we don't leave behind a
+mountain of test data to bloat the system). Fixtures in pytest offer a very
+useful teardown system, which allows us to define the specific steps necessary
+for each fixture to clean up after itself.
+
+This system can be leveraged in two ways.
+
+.. _`yield fixtures`:
+
+1. ``yield`` fixtures (recommended)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. regendoc: wipe
+
+"Yield" fixtures ``yield`` instead of ``return``. With these
+fixtures, we can run some code and pass an object back to the requesting
+fixture/test, just like with the other fixtures. The only differences are:
+
+1. ``return`` is swapped out for ``yield``.
+2. Any teardown code for that fixture is placed *after* the ``yield``.
+
+Once pytest figures out a linear order for the fixtures, it will run each one up
+until it returns or yields, and then move on to the next fixture in the list to
+do the same thing.
+
+Once the test is finished, pytest will go back down the list of fixtures, but in
+the *reverse order*, taking each one that yielded, and running the code inside
+it that was *after* the ``yield`` statement.
+
+As a simple example, consider this basic email module:
+
+.. code-block:: python
+
+ # content of emaillib.py
+ class MailAdminClient:
+ def create_user(self):
+ return MailUser()
+
+ def delete_user(self, user):
+ # do some cleanup
+ pass
+
+
+ class MailUser:
+ def __init__(self):
+ self.inbox = []
+
+ def send_email(self, email, other):
+ other.inbox.append(email)
+
+ def clear_mailbox(self):
+ self.inbox.clear()
+
+
+ class Email:
+ def __init__(self, subject, body):
+ self.subject = subject
+ self.body = body
+
+Let's say we want to test sending email from one user to another. We'll have to
+first make each user, then send the email from one user to the other, and
+finally assert that the other user received that message in their inbox. If we
+want to clean up after the test runs, we'll likely have to make sure the other
+user's mailbox is emptied before deleting that user, otherwise the system may
+complain.
+
+Here's what that might look like:
+
+.. code-block:: python
+
+ # content of test_emaillib.py
+ import pytest
+
+ from emaillib import Email, MailAdminClient
+
+
+ @pytest.fixture
+ def mail_admin():
+ return MailAdminClient()
+
+
+ @pytest.fixture
+ def sending_user(mail_admin):
+ user = mail_admin.create_user()
+ yield user
+ mail_admin.delete_user(user)
+
+
+ @pytest.fixture
+ def receiving_user(mail_admin):
+ user = mail_admin.create_user()
+ yield user
+ mail_admin.delete_user(user)
+
+
+ def test_email_received(sending_user, receiving_user):
+ email = Email(subject="Hey!", body="How's it going?")
+ sending_user.send_email(email, receiving_user)
+ assert email in receiving_user.inbox
+
+Because ``receiving_user`` is the last fixture to run during setup, it's the first to run
+during teardown.
+
+There is a risk that even having the order right on the teardown side of things
+doesn't guarantee a safe cleanup. That's covered in a bit more detail in
+:ref:`safe teardowns`.
+
+.. code-block:: pytest
+
+ $ pytest -q test_emaillib.py
+ . [100%]
+ 1 passed in 0.12s
+
+Handling errors for yield fixture
+"""""""""""""""""""""""""""""""""
+
+If a yield fixture raises an exception before yielding, pytest won't try to run
+the teardown code after that yield fixture's ``yield`` statement. But, for every
+fixture that has already run successfully for that test, pytest will still
+attempt to tear them down as it normally would.
+
+2. Adding finalizers directly
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+While yield fixtures are considered to be the cleaner and more straightforward
+option, there is another choice, and that is to add "finalizer" functions
+directly to the test's `request-context`_ object. It brings a similar result as
+yield fixtures, but requires a bit more verbosity.
+
+In order to use this approach, we have to request the `request-context`_ object
+(just like we would request another fixture) in the fixture we need to add
+teardown code for, and then pass a callable, containing that teardown code, to
+its ``addfinalizer`` method.
+
+We have to be careful though, because pytest will run that finalizer once it's
+been added, even if that fixture raises an exception after adding the finalizer.
+So to make sure we don't run the finalizer code when we wouldn't need to, we
+would only add the finalizer once the fixture would have done something that
+we'd need to teardown.
+
+Here's how the previous example would look using the ``addfinalizer`` method:
+
+.. code-block:: python
+
+ # content of test_emaillib.py
+ import pytest
+
+ from emaillib import Email, MailAdminClient
+
+
+ @pytest.fixture
+ def mail_admin():
+ return MailAdminClient()
+
+
+ @pytest.fixture
+ def sending_user(mail_admin):
+ user = mail_admin.create_user()
+ yield user
+ mail_admin.delete_user(user)
+
+
+ @pytest.fixture
+ def receiving_user(mail_admin, request):
+ user = mail_admin.create_user()
+
+ def delete_user():
+ mail_admin.delete_user(user)
+
+ request.addfinalizer(delete_user)
+ return user
+
+
+ @pytest.fixture
+ def email(sending_user, receiving_user, request):
+ _email = Email(subject="Hey!", body="How's it going?")
+ sending_user.send_email(_email, receiving_user)
+
+ def empty_mailbox():
+ receiving_user.clear_mailbox()
+
+ request.addfinalizer(empty_mailbox)
+ return _email
+
+
+ def test_email_received(receiving_user, email):
+ assert email in receiving_user.inbox
+
+
+It's a bit longer than yield fixtures and a bit more complex, but it
+does offer some nuances for when you're in a pinch.
+
+.. code-block:: pytest
+
+ $ pytest -q test_emaillib.py
+ . [100%]
+ 1 passed in 0.12s
+
+.. _`safe teardowns`:
+
+Safe teardowns
+--------------
+
+The fixture system of pytest is *very* powerful, but it's still being run by a
+computer, so it isn't able to figure out how to safely teardown everything we
+throw at it. If we aren't careful, an error in the wrong spot might leave stuff
+from our tests behind, and that can cause further issues pretty quickly.
+
+For example, consider the following tests (based off of the mail example from
+above):
+
+.. code-block:: python
+
+ # content of test_emaillib.py
+ import pytest
+
+ from emaillib import Email, MailAdminClient
+
+
+ @pytest.fixture
+ def setup():
+ mail_admin = MailAdminClient()
+ sending_user = mail_admin.create_user()
+ receiving_user = mail_admin.create_user()
+ email = Email(subject="Hey!", body="How's it going?")
+ sending_user.send_email(email, receiving_user)
+ yield receiving_user, email
+ receiving_user.clear_mailbox()
+ mail_admin.delete_user(sending_user)
+ mail_admin.delete_user(receiving_user)
+
+
+ def test_email_received(setup):
+ receiving_user, email = setup
+ assert email in receiving_user.inbox
+
+This version is a lot more compact, but it's also harder to read, doesn't have a
+very descriptive fixture name, and none of the fixtures can be reused easily.
+
+There's also a more serious issue, which is that if any of those steps in the
+setup raise an exception, none of the teardown code will run.
+
+One option might be to go with the ``addfinalizer`` method instead of yield
+fixtures, but that might get pretty complex and difficult to maintain (and it
+wouldn't be compact anymore).
+
+.. code-block:: pytest
+
+ $ pytest -q test_emaillib.py
+ . [100%]
+ 1 passed in 0.12s
+
+.. _`safe fixture structure`:
+
+Safe fixture structure
+^^^^^^^^^^^^^^^^^^^^^^
+
+The safest and simplest fixture structure requires limiting fixtures to only
+making one state-changing action each, and then bundling them together with
+their teardown code, as :ref:`the email examples above <yield fixtures>` showed.
+
+The chance that a state-changing operation can fail but still modify state is
+negligible, as most of these operations tend to be `transaction
+<https://en.wikipedia.org/wiki/Transaction_processing>`_-based (at least at the
+level of testing where state could be left behind). So if we make sure that any
+successful state-changing action gets torn down by moving it to a separate
+fixture function and separating it from other, potentially failing
+state-changing actions, then our tests will stand the best chance at leaving
+the test environment the way they found it.
+
+For an example, let's say we have a website with a login page, and we have
+access to an admin API where we can generate users. For our test, we want to:
+
+1. Create a user through that admin API
+2. Launch a browser using Selenium
+3. Go to the login page of our site
+4. Log in as the user we created
+5. Assert that their name is in the header of the landing page
+
+We wouldn't want to leave that user in the system, nor would we want to leave
+that browser session running, so we'll want to make sure the fixtures that
+create those things clean up after themselves.
+
+Here's what that might look like:
+
+.. note::
+
+ For this example, certain fixtures (i.e. ``base_url`` and
+ ``admin_credentials``) are implied to exist elsewhere. So for now, let's
+ assume they exist, and we're just not looking at them.
+
+.. code-block:: python
+
+ from uuid import uuid4
+ from urllib.parse import urljoin
+
+ from selenium.webdriver import Chrome
+ import pytest
+
+ from src.utils.pages import LoginPage, LandingPage
+ from src.utils import AdminApiClient
+ from src.utils.data_types import User
+
+
+ @pytest.fixture
+ def admin_client(base_url, admin_credentials):
+ return AdminApiClient(base_url, **admin_credentials)
+
+
+ @pytest.fixture
+ def user(admin_client):
+ _user = User(name="Susan", username=f"testuser-{uuid4()}", password="P4$$word")
+ admin_client.create_user(_user)
+ yield _user
+ admin_client.delete_user(_user)
+
+
+ @pytest.fixture
+ def driver():
+ _driver = Chrome()
+ yield _driver
+ _driver.quit()
+
+
+ @pytest.fixture
+ def login(driver, base_url, user):
+ driver.get(urljoin(base_url, "/login"))
+ page = LoginPage(driver)
+ page.login(user)
+
+
+ @pytest.fixture
+ def landing_page(driver, login):
+ return LandingPage(driver)
+
+
+ def test_name_on_landing_page_after_login(landing_page, user):
+ assert landing_page.header == f"Welcome, {user.name}!"
+
+The way the dependencies are laid out means it's unclear if the ``user``
+fixture would execute before the ``driver`` fixture. But that's ok, because
+those are atomic operations, and so it doesn't matter which one runs first
+because the sequence of events for the test is still `linearizable
+<https://en.wikipedia.org/wiki/Linearizability>`_. But what *does* matter is
+that, no matter which one runs first, if the one raises an exception while the
+other would not have, neither will have left anything behind. If ``driver``
+executes before ``user``, and ``user`` raises an exception, the driver will
+still quit, and the user was never made. And if ``driver`` was the one to raise
+the exception, then the driver would never have been started and the user would
+never have been made.
+
+.. note:
+
+ While the ``user`` fixture doesn't *actually* need to happen before the
+ ``driver`` fixture, if we made ``driver`` request ``user``, it might save
+ some time in the event that making the user raises an exception, since it
+ won't bother trying to start the driver, which is a fairly expensive
+ operation.
+
+
+Running multiple ``assert`` statements safely
+---------------------------------------------
+
+Sometimes you may want to run multiple asserts after doing all that setup, which
+makes sense as, in more complex systems, a single action can kick off multiple
+behaviors. pytest has a convenient way of handling this and it combines a bunch
+of what we've gone over so far.
+
+All that's needed is stepping up to a larger scope, then having the **act**
+step defined as an autouse fixture, and finally, making sure all the fixtures
+are targeting that higher level scope.
+
+Let's pull :ref:`an example from above <safe fixture structure>`, and tweak it a
+bit. Let's say that in addition to checking for a welcome message in the header,
+we also want to check for a sign out button, and a link to the user's profile.
+
+Let's take a look at how we can structure that so we can run multiple asserts
+without having to repeat all those steps again.
+
+.. note::
+
+ For this example, certain fixtures (i.e. ``base_url`` and
+ ``admin_credentials``) are implied to exist elsewhere. So for now, let's
+ assume they exist, and we're just not looking at them.
+
+.. code-block:: python
+
+ # contents of tests/end_to_end/test_login.py
+ from uuid import uuid4
+ from urllib.parse import urljoin
+
+ from selenium.webdriver import Chrome
+ import pytest
+
+ from src.utils.pages import LoginPage, LandingPage
+ from src.utils import AdminApiClient
+ from src.utils.data_types import User
+
+
+ @pytest.fixture(scope="class")
+ def admin_client(base_url, admin_credentials):
+ return AdminApiClient(base_url, **admin_credentials)
+
+
+ @pytest.fixture(scope="class")
+ def user(admin_client):
+ _user = User(name="Susan", username=f"testuser-{uuid4()}", password="P4$$word")
+ admin_client.create_user(_user)
+ yield _user
+ admin_client.delete_user(_user)
+
+
+ @pytest.fixture(scope="class")
+ def driver():
+ _driver = Chrome()
+ yield _driver
+ _driver.quit()
+
+
+ @pytest.fixture(scope="class")
+ def landing_page(driver, login):
+ return LandingPage(driver)
+
+
+ class TestLandingPageSuccess:
+ @pytest.fixture(scope="class", autouse=True)
+ def login(self, driver, base_url, user):
+ driver.get(urljoin(base_url, "/login"))
+ page = LoginPage(driver)
+ page.login(user)
+
+ def test_name_in_header(self, landing_page, user):
+ assert landing_page.header == f"Welcome, {user.name}!"
+
+ def test_sign_out_button(self, landing_page):
+ assert landing_page.sign_out_button.is_displayed()
+
+ def test_profile_link(self, landing_page, user):
+ profile_href = urljoin(base_url, f"/profile?id={user.profile_id}")
+ assert landing_page.profile_link.get_attribute("href") == profile_href
+
+Notice that the methods are only referencing ``self`` in the signature as a
+formality. No state is tied to the actual test class as it might be in the
+``unittest.TestCase`` framework. Everything is managed by the pytest fixture
+system.
+
+Each method only has to request the fixtures that it actually needs without
+worrying about order. This is because the **act** fixture is an autouse fixture,
+and it made sure all the other fixtures executed before it. There's no more
+changes of state that need to take place, so the tests are free to make as many
+non-state-changing queries as they want without risking stepping on the toes of
+the other tests.
+
+The ``login`` fixture is defined inside the class as well, because not every one
+of the other tests in the module will be expecting a successful login, and the **act** may need to
+be handled a little differently for another test class. For example, if we
+wanted to write another test scenario around submitting bad credentials, we
+could handle it by adding something like this to the test file:
+
+.. note:
+
+ It's assumed that the page object for this (i.e. ``LoginPage``) raises a
+ custom exception, ``BadCredentialsException``, when it recognizes text
+ signifying that on the login form after attempting to log in.
+
+.. code-block:: python
+
+ class TestLandingPageBadCredentials:
+ @pytest.fixture(scope="class")
+ def faux_user(self, user):
+ _user = deepcopy(user)
+ _user.password = "badpass"
+ return _user
+
+ def test_raises_bad_credentials_exception(self, login_page, faux_user):
+ with pytest.raises(BadCredentialsException):
+ login_page.login(faux_user)
+
+
+.. _`request-context`:
+
+Fixtures can introspect the requesting test context
+-------------------------------------------------------------
+
+Fixture functions can accept the :py:class:`request <_pytest.fixtures.FixtureRequest>` object
+to introspect the "requesting" test function, class or module context.
+Further extending the previous ``smtp_connection`` fixture example, let's
+read an optional server URL from the test module which uses our fixture:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import pytest
+ import smtplib
+
+
+ @pytest.fixture(scope="module")
+ def smtp_connection(request):
+ server = getattr(request.module, "smtpserver", "smtp.gmail.com")
+ smtp_connection = smtplib.SMTP(server, 587, timeout=5)
+ yield smtp_connection
+ print("finalizing {} ({})".format(smtp_connection, server))
+ smtp_connection.close()
+
+We use the ``request.module`` attribute to optionally obtain an
+``smtpserver`` attribute from the test module. If we just execute
+again, nothing much has changed:
+
+.. code-block:: pytest
+
+ $ pytest -s -q --tb=no test_module.py
+ FFfinalizing <smtplib.SMTP object at 0xdeadbeef0002> (smtp.gmail.com)
+
+ ========================= short test summary info ==========================
+ FAILED test_module.py::test_ehlo - assert 0
+ FAILED test_module.py::test_noop - assert 0
+ 2 failed in 0.12s
+
+Let's quickly create another test module that actually sets the
+server URL in its module namespace:
+
+.. code-block:: python
+
+ # content of test_anothersmtp.py
+
+ smtpserver = "mail.python.org" # will be read by smtp fixture
+
+
+ def test_showhelo(smtp_connection):
+ assert 0, smtp_connection.helo()
+
+Running it:
+
+.. code-block:: pytest
+
+ $ pytest -qq --tb=short test_anothersmtp.py
+ F [100%]
+ ================================= FAILURES =================================
+ ______________________________ test_showhelo _______________________________
+ test_anothersmtp.py:6: in test_showhelo
+ assert 0, smtp_connection.helo()
+ E AssertionError: (250, b'mail.python.org')
+ E assert 0
+ ------------------------- Captured stdout teardown -------------------------
+ finalizing <smtplib.SMTP object at 0xdeadbeef0003> (mail.python.org)
+ ========================= short test summary info ==========================
+ FAILED test_anothersmtp.py::test_showhelo - AssertionError: (250, b'mail....
+
+voila! The ``smtp_connection`` fixture function picked up our mail server name
+from the module namespace.
+
+.. _`using-markers`:
+
+Using markers to pass data to fixtures
+-------------------------------------------------------------
+
+Using the :py:class:`request <_pytest.fixtures.FixtureRequest>` object, a fixture can also access
+markers which are applied to a test function. This can be useful to pass data
+into a fixture from a test:
+
+.. code-block:: python
+
+ import pytest
+
+
+ @pytest.fixture
+ def fixt(request):
+ marker = request.node.get_closest_marker("fixt_data")
+ if marker is None:
+ # Handle missing marker in some way...
+ data = None
+ else:
+ data = marker.args[0]
+
+ # Do something with the data
+ return data
+
+
+ @pytest.mark.fixt_data(42)
+ def test_fixt(fixt):
+ assert fixt == 42
+
+.. _`fixture-factory`:
+
+Factories as fixtures
+-------------------------------------------------------------
+
+The "factory as fixture" pattern can help in situations where the result
+of a fixture is needed multiple times in a single test. Instead of returning
+data directly, the fixture instead returns a function which generates the data.
+This function can then be called multiple times in the test.
+
+Factories can have parameters as needed:
+
+.. code-block:: python
+
+ @pytest.fixture
+ def make_customer_record():
+ def _make_customer_record(name):
+ return {"name": name, "orders": []}
+
+ return _make_customer_record
+
+
+ def test_customer_records(make_customer_record):
+ customer_1 = make_customer_record("Lisa")
+ customer_2 = make_customer_record("Mike")
+ customer_3 = make_customer_record("Meredith")
+
+If the data created by the factory requires managing, the fixture can take care of that:
+
+.. code-block:: python
+
+ @pytest.fixture
+ def make_customer_record():
+
+ created_records = []
+
+ def _make_customer_record(name):
+ record = models.Customer(name=name, orders=[])
+ created_records.append(record)
+ return record
+
+ yield _make_customer_record
+
+ for record in created_records:
+ record.destroy()
+
+
+ def test_customer_records(make_customer_record):
+ customer_1 = make_customer_record("Lisa")
+ customer_2 = make_customer_record("Mike")
+ customer_3 = make_customer_record("Meredith")
+
+
+.. _`fixture-parametrize`:
+
+Parametrizing fixtures
+-----------------------------------------------------------------
+
+Fixture functions can be parametrized in which case they will be called
+multiple times, each time executing the set of dependent tests, i.e. the
+tests that depend on this fixture. Test functions usually do not need
+to be aware of their re-running. Fixture parametrization helps to
+write exhaustive functional tests for components which themselves can be
+configured in multiple ways.
+
+Extending the previous example, we can flag the fixture to create two
+``smtp_connection`` fixture instances which will cause all tests using the fixture
+to run twice. The fixture function gets access to each parameter
+through the special :py:class:`request <FixtureRequest>` object:
+
+.. code-block:: python
+
+ # content of conftest.py
+ import pytest
+ import smtplib
+
+
+ @pytest.fixture(scope="module", params=["smtp.gmail.com", "mail.python.org"])
+ def smtp_connection(request):
+ smtp_connection = smtplib.SMTP(request.param, 587, timeout=5)
+ yield smtp_connection
+ print("finalizing {}".format(smtp_connection))
+ smtp_connection.close()
+
+The main change is the declaration of ``params`` with
+:py:func:`@pytest.fixture <pytest.fixture>`, a list of values
+for each of which the fixture function will execute and can access
+a value via ``request.param``. No test function code needs to change.
+So let's just do another run:
+
+.. code-block:: pytest
+
+ $ pytest -q test_module.py
+ FFFF [100%]
+ ================================= FAILURES =================================
+ ________________________ test_ehlo[smtp.gmail.com] _________________________
+
+ smtp_connection = <smtplib.SMTP object at 0xdeadbeef0004>
+
+ def test_ehlo(smtp_connection):
+ response, msg = smtp_connection.ehlo()
+ assert response == 250
+ assert b"smtp.gmail.com" in msg
+ > assert 0 # for demo purposes
+ E assert 0
+
+ test_module.py:7: AssertionError
+ ________________________ test_noop[smtp.gmail.com] _________________________
+
+ smtp_connection = <smtplib.SMTP object at 0xdeadbeef0004>
+
+ def test_noop(smtp_connection):
+ response, msg = smtp_connection.noop()
+ assert response == 250
+ > assert 0 # for demo purposes
+ E assert 0
+
+ test_module.py:13: AssertionError
+ ________________________ test_ehlo[mail.python.org] ________________________
+
+ smtp_connection = <smtplib.SMTP object at 0xdeadbeef0005>
+
+ def test_ehlo(smtp_connection):
+ response, msg = smtp_connection.ehlo()
+ assert response == 250
+ > assert b"smtp.gmail.com" in msg
+ E AssertionError: assert b'smtp.gmail.com' in b'mail.python.org\nPIPELINING\nSIZE 51200000\nETRN\nSTARTTLS\nAUTH DIGEST-MD5 NTLM CRAM-MD5\nENHANCEDSTATUSCODES\n8BITMIME\nDSN\nSMTPUTF8\nCHUNKING'
+
+ test_module.py:6: AssertionError
+ -------------------------- Captured stdout setup ---------------------------
+ finalizing <smtplib.SMTP object at 0xdeadbeef0004>
+ ________________________ test_noop[mail.python.org] ________________________
+
+ smtp_connection = <smtplib.SMTP object at 0xdeadbeef0005>
+
+ def test_noop(smtp_connection):
+ response, msg = smtp_connection.noop()
+ assert response == 250
+ > assert 0 # for demo purposes
+ E assert 0
+
+ test_module.py:13: AssertionError
+ ------------------------- Captured stdout teardown -------------------------
+ finalizing <smtplib.SMTP object at 0xdeadbeef0005>
+ ========================= short test summary info ==========================
+ FAILED test_module.py::test_ehlo[smtp.gmail.com] - assert 0
+ FAILED test_module.py::test_noop[smtp.gmail.com] - assert 0
+ FAILED test_module.py::test_ehlo[mail.python.org] - AssertionError: asser...
+ FAILED test_module.py::test_noop[mail.python.org] - assert 0
+ 4 failed in 0.12s
+
+We see that our two test functions each ran twice, against the different
+``smtp_connection`` instances. Note also, that with the ``mail.python.org``
+connection the second test fails in ``test_ehlo`` because a
+different server string is expected than what arrived.
+
+pytest will build a string that is the test ID for each fixture value
+in a parametrized fixture, e.g. ``test_ehlo[smtp.gmail.com]`` and
+``test_ehlo[mail.python.org]`` in the above examples. These IDs can
+be used with ``-k`` to select specific cases to run, and they will
+also identify the specific case when one is failing. Running pytest
+with ``--collect-only`` will show the generated IDs.
+
+Numbers, strings, booleans and ``None`` will have their usual string
+representation used in the test ID. For other objects, pytest will
+make a string based on the argument name. It is possible to customise
+the string used in a test ID for a certain fixture value by using the
+``ids`` keyword argument:
+
+.. code-block:: python
+
+ # content of test_ids.py
+ import pytest
+
+
+ @pytest.fixture(params=[0, 1], ids=["spam", "ham"])
+ def a(request):
+ return request.param
+
+
+ def test_a(a):
+ pass
+
+
+ def idfn(fixture_value):
+ if fixture_value == 0:
+ return "eggs"
+ else:
+ return None
+
+
+ @pytest.fixture(params=[0, 1], ids=idfn)
+ def b(request):
+ return request.param
+
+
+ def test_b(b):
+ pass
+
+The above shows how ``ids`` can be either a list of strings to use or
+a function which will be called with the fixture value and then
+has to return a string to use. In the latter case if the function
+returns ``None`` then pytest's auto-generated ID will be used.
+
+Running the above tests results in the following test IDs being used:
+
+.. code-block:: pytest
+
+ $ pytest --collect-only
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 11 items
+
+ <Module test_anothersmtp.py>
+ <Function test_showhelo[smtp.gmail.com]>
+ <Function test_showhelo[mail.python.org]>
+ <Module test_emaillib.py>
+ <Function test_email_received>
+ <Module test_ids.py>
+ <Function test_a[spam]>
+ <Function test_a[ham]>
+ <Function test_b[eggs]>
+ <Function test_b[1]>
+ <Module test_module.py>
+ <Function test_ehlo[smtp.gmail.com]>
+ <Function test_noop[smtp.gmail.com]>
+ <Function test_ehlo[mail.python.org]>
+ <Function test_noop[mail.python.org]>
+
+ ======================= 11 tests collected in 0.12s ========================
+
+.. _`fixture-parametrize-marks`:
+
+Using marks with parametrized fixtures
+--------------------------------------
+
+:func:`pytest.param` can be used to apply marks in values sets of parametrized fixtures in the same way
+that they can be used with :ref:`@pytest.mark.parametrize <@pytest.mark.parametrize>`.
+
+Example:
+
+.. code-block:: python
+
+ # content of test_fixture_marks.py
+ import pytest
+
+
+ @pytest.fixture(params=[0, 1, pytest.param(2, marks=pytest.mark.skip)])
+ def data_set(request):
+ return request.param
+
+
+ def test_data(data_set):
+ pass
+
+Running this test will *skip* the invocation of ``data_set`` with value ``2``:
+
+.. code-block:: pytest
+
+ $ pytest test_fixture_marks.py -v
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 3 items
+
+ test_fixture_marks.py::test_data[0] PASSED [ 33%]
+ test_fixture_marks.py::test_data[1] PASSED [ 66%]
+ test_fixture_marks.py::test_data[2] SKIPPED (unconditional skip) [100%]
+
+ ======================= 2 passed, 1 skipped in 0.12s =======================
+
+.. _`interdependent fixtures`:
+
+Modularity: using fixtures from a fixture function
+----------------------------------------------------------
+
+In addition to using fixtures in test functions, fixture functions
+can use other fixtures themselves. This contributes to a modular design
+of your fixtures and allows re-use of framework-specific fixtures across
+many projects. As a simple example, we can extend the previous example
+and instantiate an object ``app`` where we stick the already defined
+``smtp_connection`` resource into it:
+
+.. code-block:: python
+
+ # content of test_appsetup.py
+
+ import pytest
+
+
+ class App:
+ def __init__(self, smtp_connection):
+ self.smtp_connection = smtp_connection
+
+
+ @pytest.fixture(scope="module")
+ def app(smtp_connection):
+ return App(smtp_connection)
+
+
+ def test_smtp_connection_exists(app):
+ assert app.smtp_connection
+
+Here we declare an ``app`` fixture which receives the previously defined
+``smtp_connection`` fixture and instantiates an ``App`` object with it. Let's run it:
+
+.. code-block:: pytest
+
+ $ pytest -v test_appsetup.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 2 items
+
+ test_appsetup.py::test_smtp_connection_exists[smtp.gmail.com] PASSED [ 50%]
+ test_appsetup.py::test_smtp_connection_exists[mail.python.org] PASSED [100%]
+
+ ============================ 2 passed in 0.12s =============================
+
+Due to the parametrization of ``smtp_connection``, the test will run twice with two
+different ``App`` instances and respective smtp servers. There is no
+need for the ``app`` fixture to be aware of the ``smtp_connection``
+parametrization because pytest will fully analyse the fixture dependency graph.
+
+Note that the ``app`` fixture has a scope of ``module`` and uses a
+module-scoped ``smtp_connection`` fixture. The example would still work if
+``smtp_connection`` was cached on a ``session`` scope: it is fine for fixtures to use
+"broader" scoped fixtures but not the other way round:
+A session-scoped fixture could not use a module-scoped one in a
+meaningful way.
+
+
+.. _`automatic per-resource grouping`:
+
+Automatic grouping of tests by fixture instances
+----------------------------------------------------------
+
+.. regendoc: wipe
+
+pytest minimizes the number of active fixtures during test runs.
+If you have a parametrized fixture, then all the tests using it will
+first execute with one instance and then finalizers are called
+before the next fixture instance is created. Among other things,
+this eases testing of applications which create and use global state.
+
+The following example uses two parametrized fixtures, one of which is
+scoped on a per-module basis, and all the functions perform ``print`` calls
+to show the setup/teardown flow:
+
+.. code-block:: python
+
+ # content of test_module.py
+ import pytest
+
+
+ @pytest.fixture(scope="module", params=["mod1", "mod2"])
+ def modarg(request):
+ param = request.param
+ print(" SETUP modarg", param)
+ yield param
+ print(" TEARDOWN modarg", param)
+
+
+ @pytest.fixture(scope="function", params=[1, 2])
+ def otherarg(request):
+ param = request.param
+ print(" SETUP otherarg", param)
+ yield param
+ print(" TEARDOWN otherarg", param)
+
+
+ def test_0(otherarg):
+ print(" RUN test0 with otherarg", otherarg)
+
+
+ def test_1(modarg):
+ print(" RUN test1 with modarg", modarg)
+
+
+ def test_2(otherarg, modarg):
+ print(" RUN test2 with otherarg {} and modarg {}".format(otherarg, modarg))
+
+
+Let's run the tests in verbose mode and with looking at the print-output:
+
+.. code-block:: pytest
+
+ $ pytest -v -s test_module.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y -- $PYTHON_PREFIX/bin/python
+ cachedir: .pytest_cache
+ rootdir: /home/sweet/project
+ collecting ... collected 8 items
+
+ test_module.py::test_0[1] SETUP otherarg 1
+ RUN test0 with otherarg 1
+ PASSED TEARDOWN otherarg 1
+
+ test_module.py::test_0[2] SETUP otherarg 2
+ RUN test0 with otherarg 2
+ PASSED TEARDOWN otherarg 2
+
+ test_module.py::test_1[mod1] SETUP modarg mod1
+ RUN test1 with modarg mod1
+ PASSED
+ test_module.py::test_2[mod1-1] SETUP otherarg 1
+ RUN test2 with otherarg 1 and modarg mod1
+ PASSED TEARDOWN otherarg 1
+
+ test_module.py::test_2[mod1-2] SETUP otherarg 2
+ RUN test2 with otherarg 2 and modarg mod1
+ PASSED TEARDOWN otherarg 2
+
+ test_module.py::test_1[mod2] TEARDOWN modarg mod1
+ SETUP modarg mod2
+ RUN test1 with modarg mod2
+ PASSED
+ test_module.py::test_2[mod2-1] SETUP otherarg 1
+ RUN test2 with otherarg 1 and modarg mod2
+ PASSED TEARDOWN otherarg 1
+
+ test_module.py::test_2[mod2-2] SETUP otherarg 2
+ RUN test2 with otherarg 2 and modarg mod2
+ PASSED TEARDOWN otherarg 2
+ TEARDOWN modarg mod2
+
+
+ ============================ 8 passed in 0.12s =============================
+
+You can see that the parametrized module-scoped ``modarg`` resource caused an
+ordering of test execution that lead to the fewest possible "active" resources.
+The finalizer for the ``mod1`` parametrized resource was executed before the
+``mod2`` resource was setup.
+
+In particular notice that test_0 is completely independent and finishes first.
+Then test_1 is executed with ``mod1``, then test_2 with ``mod1``, then test_1
+with ``mod2`` and finally test_2 with ``mod2``.
+
+The ``otherarg`` parametrized resource (having function scope) was set up before
+and teared down after every test that used it.
+
+
+.. _`usefixtures`:
+
+Use fixtures in classes and modules with ``usefixtures``
+--------------------------------------------------------
+
+.. regendoc:wipe
+
+Sometimes test functions do not directly need access to a fixture object.
+For example, tests may require to operate with an empty directory as the
+current working directory but otherwise do not care for the concrete
+directory. Here is how you can use the standard :mod:`tempfile`
+and pytest fixtures to
+achieve it. We separate the creation of the fixture into a :file:`conftest.py`
+file:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+ import os
+ import tempfile
+
+ import pytest
+
+
+ @pytest.fixture
+ def cleandir():
+ with tempfile.TemporaryDirectory() as newpath:
+ old_cwd = os.getcwd()
+ os.chdir(newpath)
+ yield
+ os.chdir(old_cwd)
+
+and declare its use in a test module via a ``usefixtures`` marker:
+
+.. code-block:: python
+
+ # content of test_setenv.py
+ import os
+ import pytest
+
+
+ @pytest.mark.usefixtures("cleandir")
+ class TestDirectoryInit:
+ def test_cwd_starts_empty(self):
+ assert os.listdir(os.getcwd()) == []
+ with open("myfile", "w") as f:
+ f.write("hello")
+
+ def test_cwd_again_starts_empty(self):
+ assert os.listdir(os.getcwd()) == []
+
+Due to the ``usefixtures`` marker, the ``cleandir`` fixture
+will be required for the execution of each test method, just as if
+you specified a "cleandir" function argument to each of them. Let's run it
+to verify our fixture is activated and the tests pass:
+
+.. code-block:: pytest
+
+ $ pytest -q
+ .. [100%]
+ 2 passed in 0.12s
+
+You can specify multiple fixtures like this:
+
+.. code-block:: python
+
+ @pytest.mark.usefixtures("cleandir", "anotherfixture")
+ def test():
+ ...
+
+and you may specify fixture usage at the test module level using :globalvar:`pytestmark`:
+
+.. code-block:: python
+
+ pytestmark = pytest.mark.usefixtures("cleandir")
+
+
+It is also possible to put fixtures required by all tests in your project
+into an ini-file:
+
+.. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ usefixtures = cleandir
+
+
+.. warning::
+
+ Note this mark has no effect in **fixture functions**. For example,
+ this **will not work as expected**:
+
+ .. code-block:: python
+
+ @pytest.mark.usefixtures("my_other_fixture")
+ @pytest.fixture
+ def my_fixture_that_sadly_wont_use_my_other_fixture():
+ ...
+
+ Currently this will not generate any error or warning, but this is intended
+ to be handled by :issue:`3664`.
+
+.. _`override fixtures`:
+
+Overriding fixtures on various levels
+-------------------------------------
+
+In relatively large test suite, you most likely need to ``override`` a ``global`` or ``root`` fixture with a ``locally``
+defined one, keeping the test code readable and maintainable.
+
+Override a fixture on a folder (conftest) level
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Given the tests file structure is:
+
+::
+
+ tests/
+ __init__.py
+
+ conftest.py
+ # content of tests/conftest.py
+ import pytest
+
+ @pytest.fixture
+ def username():
+ return 'username'
+
+ test_something.py
+ # content of tests/test_something.py
+ def test_username(username):
+ assert username == 'username'
+
+ subfolder/
+ __init__.py
+
+ conftest.py
+ # content of tests/subfolder/conftest.py
+ import pytest
+
+ @pytest.fixture
+ def username(username):
+ return 'overridden-' + username
+
+ test_something.py
+ # content of tests/subfolder/test_something.py
+ def test_username(username):
+ assert username == 'overridden-username'
+
+As you can see, a fixture with the same name can be overridden for certain test folder level.
+Note that the ``base`` or ``super`` fixture can be accessed from the ``overriding``
+fixture easily - used in the example above.
+
+Override a fixture on a test module level
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Given the tests file structure is:
+
+::
+
+ tests/
+ __init__.py
+
+ conftest.py
+ # content of tests/conftest.py
+ import pytest
+
+ @pytest.fixture
+ def username():
+ return 'username'
+
+ test_something.py
+ # content of tests/test_something.py
+ import pytest
+
+ @pytest.fixture
+ def username(username):
+ return 'overridden-' + username
+
+ def test_username(username):
+ assert username == 'overridden-username'
+
+ test_something_else.py
+ # content of tests/test_something_else.py
+ import pytest
+
+ @pytest.fixture
+ def username(username):
+ return 'overridden-else-' + username
+
+ def test_username(username):
+ assert username == 'overridden-else-username'
+
+In the example above, a fixture with the same name can be overridden for certain test module.
+
+
+Override a fixture with direct test parametrization
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Given the tests file structure is:
+
+::
+
+ tests/
+ __init__.py
+
+ conftest.py
+ # content of tests/conftest.py
+ import pytest
+
+ @pytest.fixture
+ def username():
+ return 'username'
+
+ @pytest.fixture
+ def other_username(username):
+ return 'other-' + username
+
+ test_something.py
+ # content of tests/test_something.py
+ import pytest
+
+ @pytest.mark.parametrize('username', ['directly-overridden-username'])
+ def test_username(username):
+ assert username == 'directly-overridden-username'
+
+ @pytest.mark.parametrize('username', ['directly-overridden-username-other'])
+ def test_username_other(other_username):
+ assert other_username == 'other-directly-overridden-username-other'
+
+In the example above, a fixture value is overridden by the test parameter value. Note that the value of the fixture
+can be overridden this way even if the test doesn't use it directly (doesn't mention it in the function prototype).
+
+
+Override a parametrized fixture with non-parametrized one and vice versa
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Given the tests file structure is:
+
+::
+
+ tests/
+ __init__.py
+
+ conftest.py
+ # content of tests/conftest.py
+ import pytest
+
+ @pytest.fixture(params=['one', 'two', 'three'])
+ def parametrized_username(request):
+ return request.param
+
+ @pytest.fixture
+ def non_parametrized_username(request):
+ return 'username'
+
+ test_something.py
+ # content of tests/test_something.py
+ import pytest
+
+ @pytest.fixture
+ def parametrized_username():
+ return 'overridden-username'
+
+ @pytest.fixture(params=['one', 'two', 'three'])
+ def non_parametrized_username(request):
+ return request.param
+
+ def test_username(parametrized_username):
+ assert parametrized_username == 'overridden-username'
+
+ def test_parametrized_username(non_parametrized_username):
+ assert non_parametrized_username in ['one', 'two', 'three']
+
+ test_something_else.py
+ # content of tests/test_something_else.py
+ def test_username(parametrized_username):
+ assert parametrized_username in ['one', 'two', 'three']
+
+ def test_username(non_parametrized_username):
+ assert non_parametrized_username == 'username'
+
+In the example above, a parametrized fixture is overridden with a non-parametrized version, and
+a non-parametrized fixture is overridden with a parametrized version for certain test module.
+The same applies for the test folder level obviously.
+
+
+Using fixtures from other projects
+----------------------------------
+
+Usually projects that provide pytest support will use :ref:`entry points <setuptools entry points>`,
+so just installing those projects into an environment will make those fixtures available for use.
+
+In case you want to use fixtures from a project that does not use entry points, you can
+define :globalvar:`pytest_plugins` in your top ``conftest.py`` file to register that module
+as a plugin.
+
+Suppose you have some fixtures in ``mylibrary.fixtures`` and you want to reuse them into your
+``app/tests`` directory.
+
+All you need to do is to define :globalvar:`pytest_plugins` in ``app/tests/conftest.py``
+pointing to that module.
+
+.. code-block:: python
+
+ pytest_plugins = "mylibrary.fixtures"
+
+This effectively registers ``mylibrary.fixtures`` as a plugin, making all its fixtures and
+hooks available to tests in ``app/tests``.
+
+.. note::
+
+ Sometimes users will *import* fixtures from other projects for use, however this is not
+ recommended: importing fixtures into a module will register them in pytest
+ as *defined* in that module.
+
+ This has minor consequences, such as appearing multiple times in ``pytest --help``,
+ but it is not **recommended** because this behavior might change/stop working
+ in future versions.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/index.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/index.rst
new file mode 100644
index 0000000000..6f52aaecdc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/index.rst
@@ -0,0 +1,64 @@
+:orphan:
+
+.. _how-to:
+
+How-to guides
+================
+
+Core pytest functionality
+-------------------------
+
+.. toctree::
+ :maxdepth: 1
+
+ usage
+ assert
+ fixtures
+ mark
+ parametrize
+ tmp_path
+ monkeypatch
+ doctest
+ cache
+
+Test output and outcomes
+----------------------------
+
+.. toctree::
+ :maxdepth: 1
+
+ failures
+ output
+ logging
+ capture-stdout-stderr
+ capture-warnings
+ skipping
+
+Plugins
+----------------------------
+
+.. toctree::
+ :maxdepth: 1
+
+ plugins
+ writing_plugins
+ writing_hook_functions
+
+pytest and other test systems
+-----------------------------
+
+.. toctree::
+ :maxdepth: 1
+
+ existingtestsuite
+ unittest
+ nose
+ xunit_setup
+
+pytest development environment
+------------------------------
+
+.. toctree::
+ :maxdepth: 1
+
+ bash-completion
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/logging.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/logging.rst
new file mode 100644
index 0000000000..2e8734fa6a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/logging.rst
@@ -0,0 +1,292 @@
+.. _logging:
+
+How to manage logging
+---------------------
+
+pytest captures log messages of level ``WARNING`` or above automatically and displays them in their own section
+for each failed test in the same manner as captured stdout and stderr.
+
+Running without options:
+
+.. code-block:: bash
+
+ pytest
+
+Shows failed tests like so:
+
+.. code-block:: pytest
+
+ ----------------------- Captured stdlog call ----------------------
+ test_reporting.py 26 WARNING text going to logger
+ ----------------------- Captured stdout call ----------------------
+ text going to stdout
+ ----------------------- Captured stderr call ----------------------
+ text going to stderr
+ ==================== 2 failed in 0.02 seconds =====================
+
+By default each captured log message shows the module, line number, log level
+and message.
+
+If desired the log and date format can be specified to
+anything that the logging module supports by passing specific formatting options:
+
+.. code-block:: bash
+
+ pytest --log-format="%(asctime)s %(levelname)s %(message)s" \
+ --log-date-format="%Y-%m-%d %H:%M:%S"
+
+Shows failed tests like so:
+
+.. code-block:: pytest
+
+ ----------------------- Captured stdlog call ----------------------
+ 2010-04-10 14:48:44 WARNING text going to logger
+ ----------------------- Captured stdout call ----------------------
+ text going to stdout
+ ----------------------- Captured stderr call ----------------------
+ text going to stderr
+ ==================== 2 failed in 0.02 seconds =====================
+
+These options can also be customized through ``pytest.ini`` file:
+
+.. code-block:: ini
+
+ [pytest]
+ log_format = %(asctime)s %(levelname)s %(message)s
+ log_date_format = %Y-%m-%d %H:%M:%S
+
+Further it is possible to disable reporting of captured content (stdout,
+stderr and logs) on failed tests completely with:
+
+.. code-block:: bash
+
+ pytest --show-capture=no
+
+
+caplog fixture
+^^^^^^^^^^^^^^
+
+Inside tests it is possible to change the log level for the captured log
+messages. This is supported by the ``caplog`` fixture:
+
+.. code-block:: python
+
+ def test_foo(caplog):
+ caplog.set_level(logging.INFO)
+ pass
+
+By default the level is set on the root logger,
+however as a convenience it is also possible to set the log level of any
+logger:
+
+.. code-block:: python
+
+ def test_foo(caplog):
+ caplog.set_level(logging.CRITICAL, logger="root.baz")
+ pass
+
+The log levels set are restored automatically at the end of the test.
+
+It is also possible to use a context manager to temporarily change the log
+level inside a ``with`` block:
+
+.. code-block:: python
+
+ def test_bar(caplog):
+ with caplog.at_level(logging.INFO):
+ pass
+
+Again, by default the level of the root logger is affected but the level of any
+logger can be changed instead with:
+
+.. code-block:: python
+
+ def test_bar(caplog):
+ with caplog.at_level(logging.CRITICAL, logger="root.baz"):
+ pass
+
+Lastly all the logs sent to the logger during the test run are made available on
+the fixture in the form of both the ``logging.LogRecord`` instances and the final log text.
+This is useful for when you want to assert on the contents of a message:
+
+.. code-block:: python
+
+ def test_baz(caplog):
+ func_under_test()
+ for record in caplog.records:
+ assert record.levelname != "CRITICAL"
+ assert "wally" not in caplog.text
+
+For all the available attributes of the log records see the
+``logging.LogRecord`` class.
+
+You can also resort to ``record_tuples`` if all you want to do is to ensure,
+that certain messages have been logged under a given logger name with a given
+severity and message:
+
+.. code-block:: python
+
+ def test_foo(caplog):
+ logging.getLogger().info("boo %s", "arg")
+
+ assert caplog.record_tuples == [("root", logging.INFO, "boo arg")]
+
+You can call ``caplog.clear()`` to reset the captured log records in a test:
+
+.. code-block:: python
+
+ def test_something_with_clearing_records(caplog):
+ some_method_that_creates_log_records()
+ caplog.clear()
+ your_test_method()
+ assert ["Foo"] == [rec.message for rec in caplog.records]
+
+
+The ``caplog.records`` attribute contains records from the current stage only, so
+inside the ``setup`` phase it contains only setup logs, same with the ``call`` and
+``teardown`` phases.
+
+To access logs from other stages, use the ``caplog.get_records(when)`` method. As an example,
+if you want to make sure that tests which use a certain fixture never log any warnings, you can inspect
+the records for the ``setup`` and ``call`` stages during teardown like so:
+
+.. code-block:: python
+
+ @pytest.fixture
+ def window(caplog):
+ window = create_window()
+ yield window
+ for when in ("setup", "call"):
+ messages = [
+ x.message for x in caplog.get_records(when) if x.levelno == logging.WARNING
+ ]
+ if messages:
+ pytest.fail(
+ "warning messages encountered during testing: {}".format(messages)
+ )
+
+
+
+The full API is available at :class:`pytest.LogCaptureFixture`.
+
+
+.. _live_logs:
+
+Live Logs
+^^^^^^^^^
+
+By setting the :confval:`log_cli` configuration option to ``true``, pytest will output
+logging records as they are emitted directly into the console.
+
+You can specify the logging level for which log records with equal or higher
+level are printed to the console by passing ``--log-cli-level``. This setting
+accepts the logging level names as seen in python's documentation or an integer
+as the logging level num.
+
+Additionally, you can also specify ``--log-cli-format`` and
+``--log-cli-date-format`` which mirror and default to ``--log-format`` and
+``--log-date-format`` if not provided, but are applied only to the console
+logging handler.
+
+All of the CLI log options can also be set in the configuration INI file. The
+option names are:
+
+* ``log_cli_level``
+* ``log_cli_format``
+* ``log_cli_date_format``
+
+If you need to record the whole test suite logging calls to a file, you can pass
+``--log-file=/path/to/log/file``. This log file is opened in write mode which
+means that it will be overwritten at each run tests session.
+
+You can also specify the logging level for the log file by passing
+``--log-file-level``. This setting accepts the logging level names as seen in
+python's documentation(ie, uppercased level names) or an integer as the logging
+level num.
+
+Additionally, you can also specify ``--log-file-format`` and
+``--log-file-date-format`` which are equal to ``--log-format`` and
+``--log-date-format`` but are applied to the log file logging handler.
+
+All of the log file options can also be set in the configuration INI file. The
+option names are:
+
+* ``log_file``
+* ``log_file_level``
+* ``log_file_format``
+* ``log_file_date_format``
+
+You can call ``set_log_path()`` to customize the log_file path dynamically. This functionality
+is considered **experimental**.
+
+.. _log_colors:
+
+Customizing Colors
+^^^^^^^^^^^^^^^^^^
+
+Log levels are colored if colored terminal output is enabled. Changing
+from default colors or putting color on custom log levels is supported
+through ``add_color_level()``. Example:
+
+.. code-block:: python
+
+ @pytest.hookimpl
+ def pytest_configure(config):
+ logging_plugin = config.pluginmanager.get_plugin("logging-plugin")
+
+ # Change color on existing log level
+ logging_plugin.log_cli_handler.formatter.add_color_level(logging.INFO, "cyan")
+
+ # Add color to a custom log level (a custom log level `SPAM` is already set up)
+ logging_plugin.log_cli_handler.formatter.add_color_level(logging.SPAM, "blue")
+.. warning::
+
+ This feature and its API are considered **experimental** and might change
+ between releases without a deprecation notice.
+.. _log_release_notes:
+
+Release notes
+^^^^^^^^^^^^^
+
+This feature was introduced as a drop-in replacement for the
+:pypi:`pytest-catchlog` plugin and they conflict
+with each other. The backward compatibility API with ``pytest-capturelog``
+has been dropped when this feature was introduced, so if for that reason you
+still need ``pytest-catchlog`` you can disable the internal feature by
+adding to your ``pytest.ini``:
+
+.. code-block:: ini
+
+ [pytest]
+ addopts=-p no:logging
+
+
+.. _log_changes_3_4:
+
+Incompatible changes in pytest 3.4
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This feature was introduced in ``3.3`` and some **incompatible changes** have been
+made in ``3.4`` after community feedback:
+
+* Log levels are no longer changed unless explicitly requested by the :confval:`log_level` configuration
+ or ``--log-level`` command-line options. This allows users to configure logger objects themselves.
+ Setting :confval:`log_level` will set the level that is captured globally so if a specific test requires
+ a lower level than this, use the ``caplog.set_level()`` functionality otherwise that test will be prone to
+ failure.
+* :ref:`Live Logs <live_logs>` is now disabled by default and can be enabled setting the
+ :confval:`log_cli` configuration option to ``true``. When enabled, the verbosity is increased so logging for each
+ test is visible.
+* :ref:`Live Logs <live_logs>` are now sent to ``sys.stdout`` and no longer require the ``-s`` command-line option
+ to work.
+
+If you want to partially restore the logging behavior of version ``3.3``, you can add this options to your ``ini``
+file:
+
+.. code-block:: ini
+
+ [pytest]
+ log_cli=true
+ log_level=NOTSET
+
+More details about the discussion that lead to this changes can be read in :issue:`3013`.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/mark.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/mark.rst
new file mode 100644
index 0000000000..33f9d18bfe
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/mark.rst
@@ -0,0 +1,93 @@
+.. _mark:
+
+How to mark test functions with attributes
+===========================================
+
+By using the ``pytest.mark`` helper you can easily set
+metadata on your test functions. You can find the full list of builtin markers
+in the :ref:`API Reference<marks ref>`. Or you can list all the markers, including
+builtin and custom, using the CLI - :code:`pytest --markers`.
+
+Here are some of the builtin markers:
+
+* :ref:`usefixtures <usefixtures>` - use fixtures on a test function or class
+* :ref:`filterwarnings <filterwarnings>` - filter certain warnings of a test function
+* :ref:`skip <skip>` - always skip a test function
+* :ref:`skipif <skipif>` - skip a test function if a certain condition is met
+* :ref:`xfail <xfail>` - produce an "expected failure" outcome if a certain
+ condition is met
+* :ref:`parametrize <parametrizemark>` - perform multiple calls
+ to the same test function.
+
+It's easy to create custom markers or to apply markers
+to whole test classes or modules. Those markers can be used by plugins, and also
+are commonly used to :ref:`select tests <mark run>` on the command-line with the ``-m`` option.
+
+See :ref:`mark examples` for examples which also serve as documentation.
+
+.. note::
+
+ Marks can only be applied to tests, having no effect on
+ :ref:`fixtures <fixtures>`.
+
+
+Registering marks
+-----------------
+
+You can register custom marks in your ``pytest.ini`` file like this:
+
+.. code-block:: ini
+
+ [pytest]
+ markers =
+ slow: marks tests as slow (deselect with '-m "not slow"')
+ serial
+
+or in your ``pyproject.toml`` file like this:
+
+.. code-block:: toml
+
+ [tool.pytest.ini_options]
+ markers = [
+ "slow: marks tests as slow (deselect with '-m \"not slow\"')",
+ "serial",
+ ]
+
+Note that everything past the ``:`` after the mark name is an optional description.
+
+Alternatively, you can register new markers programmatically in a
+:ref:`pytest_configure <initialization-hooks>` hook:
+
+.. code-block:: python
+
+ def pytest_configure(config):
+ config.addinivalue_line(
+ "markers", "env(name): mark test to run only on named environment"
+ )
+
+
+Registered marks appear in pytest's help text and do not emit warnings (see the next section). It
+is recommended that third-party plugins always :ref:`register their markers <registering-markers>`.
+
+.. _unknown-marks:
+
+Raising errors on unknown marks
+-------------------------------
+
+Unregistered marks applied with the ``@pytest.mark.name_of_the_mark`` decorator
+will always emit a warning in order to avoid silently doing something
+surprising due to mistyped names. As described in the previous section, you can disable
+the warning for custom marks by registering them in your ``pytest.ini`` file or
+using a custom ``pytest_configure`` hook.
+
+When the ``--strict-markers`` command-line flag is passed, any unknown marks applied
+with the ``@pytest.mark.name_of_the_mark`` decorator will trigger an error. You can
+enforce this validation in your project by adding ``--strict-markers`` to ``addopts``:
+
+.. code-block:: ini
+
+ [pytest]
+ addopts = --strict-markers
+ markers =
+ slow: marks tests as slow (deselect with '-m "not slow"')
+ serial
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/monkeypatch.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/monkeypatch.rst
new file mode 100644
index 0000000000..9c61233f7e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/monkeypatch.rst
@@ -0,0 +1,444 @@
+.. _monkeypatching:
+
+How to monkeypatch/mock modules and environments
+================================================================
+
+.. currentmodule:: _pytest.monkeypatch
+
+Sometimes tests need to invoke functionality which depends
+on global settings or which invokes code which cannot be easily
+tested such as network access. The ``monkeypatch`` fixture
+helps you to safely set/delete an attribute, dictionary item or
+environment variable, or to modify ``sys.path`` for importing.
+
+The ``monkeypatch`` fixture provides these helper methods for safely patching and mocking
+functionality in tests:
+
+.. code-block:: python
+
+ monkeypatch.setattr(obj, name, value, raising=True)
+ monkeypatch.setattr("somemodule.obj.name", value, raising=True)
+ monkeypatch.delattr(obj, name, raising=True)
+ monkeypatch.setitem(mapping, name, value)
+ monkeypatch.delitem(obj, name, raising=True)
+ monkeypatch.setenv(name, value, prepend=None)
+ monkeypatch.delenv(name, raising=True)
+ monkeypatch.syspath_prepend(path)
+ monkeypatch.chdir(path)
+
+All modifications will be undone after the requesting
+test function or fixture has finished. The ``raising``
+parameter determines if a ``KeyError`` or ``AttributeError``
+will be raised if the target of the set/deletion operation does not exist.
+
+Consider the following scenarios:
+
+1. Modifying the behavior of a function or the property of a class for a test e.g.
+there is an API call or database connection you will not make for a test but you know
+what the expected output should be. Use :py:meth:`monkeypatch.setattr <MonkeyPatch.setattr>` to patch the
+function or property with your desired testing behavior. This can include your own functions.
+Use :py:meth:`monkeypatch.delattr <MonkeyPatch.delattr>` to remove the function or property for the test.
+
+2. Modifying the values of dictionaries e.g. you have a global configuration that
+you want to modify for certain test cases. Use :py:meth:`monkeypatch.setitem <MonkeyPatch.setitem>` to patch the
+dictionary for the test. :py:meth:`monkeypatch.delitem <MonkeyPatch.delitem>` can be used to remove items.
+
+3. Modifying environment variables for a test e.g. to test program behavior if an
+environment variable is missing, or to set multiple values to a known variable.
+:py:meth:`monkeypatch.setenv <MonkeyPatch.setenv>` and :py:meth:`monkeypatch.delenv <MonkeyPatch.delenv>` can be used for
+these patches.
+
+4. Use ``monkeypatch.setenv("PATH", value, prepend=os.pathsep)`` to modify ``$PATH``, and
+:py:meth:`monkeypatch.chdir <MonkeyPatch.chdir>` to change the context of the current working directory
+during a test.
+
+5. Use :py:meth:`monkeypatch.syspath_prepend <MonkeyPatch.syspath_prepend>` to modify ``sys.path`` which will also
+call ``pkg_resources.fixup_namespace_packages`` and :py:func:`importlib.invalidate_caches`.
+
+See the `monkeypatch blog post`_ for some introduction material
+and a discussion of its motivation.
+
+.. _`monkeypatch blog post`: https://tetamap.wordpress.com//2009/03/03/monkeypatching-in-unit-tests-done-right/
+
+Simple example: monkeypatching functions
+----------------------------------------
+
+Consider a scenario where you are working with user directories. In the context of
+testing, you do not want your test to depend on the running user. ``monkeypatch``
+can be used to patch functions dependent on the user to always return a
+specific value.
+
+In this example, :py:meth:`monkeypatch.setattr <MonkeyPatch.setattr>` is used to patch ``Path.home``
+so that the known testing path ``Path("/abc")`` is always used when the test is run.
+This removes any dependency on the running user for testing purposes.
+:py:meth:`monkeypatch.setattr <MonkeyPatch.setattr>` must be called before the function which will use
+the patched function is called.
+After the test function finishes the ``Path.home`` modification will be undone.
+
+.. code-block:: python
+
+ # contents of test_module.py with source code and the test
+ from pathlib import Path
+
+
+ def getssh():
+ """Simple function to return expanded homedir ssh path."""
+ return Path.home() / ".ssh"
+
+
+ def test_getssh(monkeypatch):
+ # mocked return function to replace Path.home
+ # always return '/abc'
+ def mockreturn():
+ return Path("/abc")
+
+ # Application of the monkeypatch to replace Path.home
+ # with the behavior of mockreturn defined above.
+ monkeypatch.setattr(Path, "home", mockreturn)
+
+ # Calling getssh() will use mockreturn in place of Path.home
+ # for this test with the monkeypatch.
+ x = getssh()
+ assert x == Path("/abc/.ssh")
+
+Monkeypatching returned objects: building mock classes
+------------------------------------------------------
+
+:py:meth:`monkeypatch.setattr <MonkeyPatch.setattr>` can be used in conjunction with classes to mock returned
+objects from functions instead of values.
+Imagine a simple function to take an API url and return the json response.
+
+.. code-block:: python
+
+ # contents of app.py, a simple API retrieval example
+ import requests
+
+
+ def get_json(url):
+ """Takes a URL, and returns the JSON."""
+ r = requests.get(url)
+ return r.json()
+
+We need to mock ``r``, the returned response object for testing purposes.
+The mock of ``r`` needs a ``.json()`` method which returns a dictionary.
+This can be done in our test file by defining a class to represent ``r``.
+
+.. code-block:: python
+
+ # contents of test_app.py, a simple test for our API retrieval
+ # import requests for the purposes of monkeypatching
+ import requests
+
+ # our app.py that includes the get_json() function
+ # this is the previous code block example
+ import app
+
+ # custom class to be the mock return value
+ # will override the requests.Response returned from requests.get
+ class MockResponse:
+
+ # mock json() method always returns a specific testing dictionary
+ @staticmethod
+ def json():
+ return {"mock_key": "mock_response"}
+
+
+ def test_get_json(monkeypatch):
+
+ # Any arguments may be passed and mock_get() will always return our
+ # mocked object, which only has the .json() method.
+ def mock_get(*args, **kwargs):
+ return MockResponse()
+
+ # apply the monkeypatch for requests.get to mock_get
+ monkeypatch.setattr(requests, "get", mock_get)
+
+ # app.get_json, which contains requests.get, uses the monkeypatch
+ result = app.get_json("https://fakeurl")
+ assert result["mock_key"] == "mock_response"
+
+
+``monkeypatch`` applies the mock for ``requests.get`` with our ``mock_get`` function.
+The ``mock_get`` function returns an instance of the ``MockResponse`` class, which
+has a ``json()`` method defined to return a known testing dictionary and does not
+require any outside API connection.
+
+You can build the ``MockResponse`` class with the appropriate degree of complexity for
+the scenario you are testing. For instance, it could include an ``ok`` property that
+always returns ``True``, or return different values from the ``json()`` mocked method
+based on input strings.
+
+This mock can be shared across tests using a ``fixture``:
+
+.. code-block:: python
+
+ # contents of test_app.py, a simple test for our API retrieval
+ import pytest
+ import requests
+
+ # app.py that includes the get_json() function
+ import app
+
+ # custom class to be the mock return value of requests.get()
+ class MockResponse:
+ @staticmethod
+ def json():
+ return {"mock_key": "mock_response"}
+
+
+ # monkeypatched requests.get moved to a fixture
+ @pytest.fixture
+ def mock_response(monkeypatch):
+ """Requests.get() mocked to return {'mock_key':'mock_response'}."""
+
+ def mock_get(*args, **kwargs):
+ return MockResponse()
+
+ monkeypatch.setattr(requests, "get", mock_get)
+
+
+ # notice our test uses the custom fixture instead of monkeypatch directly
+ def test_get_json(mock_response):
+ result = app.get_json("https://fakeurl")
+ assert result["mock_key"] == "mock_response"
+
+
+Furthermore, if the mock was designed to be applied to all tests, the ``fixture`` could
+be moved to a ``conftest.py`` file and use the with ``autouse=True`` option.
+
+
+Global patch example: preventing "requests" from remote operations
+------------------------------------------------------------------
+
+If you want to prevent the "requests" library from performing http
+requests in all your tests, you can do:
+
+.. code-block:: python
+
+ # contents of conftest.py
+ import pytest
+
+
+ @pytest.fixture(autouse=True)
+ def no_requests(monkeypatch):
+ """Remove requests.sessions.Session.request for all tests."""
+ monkeypatch.delattr("requests.sessions.Session.request")
+
+This autouse fixture will be executed for each test function and it
+will delete the method ``request.session.Session.request``
+so that any attempts within tests to create http requests will fail.
+
+
+.. note::
+
+ Be advised that it is not recommended to patch builtin functions such as ``open``,
+ ``compile``, etc., because it might break pytest's internals. If that's
+ unavoidable, passing ``--tb=native``, ``--assert=plain`` and ``--capture=no`` might
+ help although there's no guarantee.
+
+.. note::
+
+ Mind that patching ``stdlib`` functions and some third-party libraries used by pytest
+ might break pytest itself, therefore in those cases it is recommended to use
+ :meth:`MonkeyPatch.context` to limit the patching to the block you want tested:
+
+ .. code-block:: python
+
+ import functools
+
+
+ def test_partial(monkeypatch):
+ with monkeypatch.context() as m:
+ m.setattr(functools, "partial", 3)
+ assert functools.partial == 3
+
+ See :issue:`3290` for details.
+
+
+Monkeypatching environment variables
+------------------------------------
+
+If you are working with environment variables you often need to safely change the values
+or delete them from the system for testing purposes. ``monkeypatch`` provides a mechanism
+to do this using the ``setenv`` and ``delenv`` method. Our example code to test:
+
+.. code-block:: python
+
+ # contents of our original code file e.g. code.py
+ import os
+
+
+ def get_os_user_lower():
+ """Simple retrieval function.
+ Returns lowercase USER or raises OSError."""
+ username = os.getenv("USER")
+
+ if username is None:
+ raise OSError("USER environment is not set.")
+
+ return username.lower()
+
+There are two potential paths. First, the ``USER`` environment variable is set to a
+value. Second, the ``USER`` environment variable does not exist. Using ``monkeypatch``
+both paths can be safely tested without impacting the running environment:
+
+.. code-block:: python
+
+ # contents of our test file e.g. test_code.py
+ import pytest
+
+
+ def test_upper_to_lower(monkeypatch):
+ """Set the USER env var to assert the behavior."""
+ monkeypatch.setenv("USER", "TestingUser")
+ assert get_os_user_lower() == "testinguser"
+
+
+ def test_raise_exception(monkeypatch):
+ """Remove the USER env var and assert OSError is raised."""
+ monkeypatch.delenv("USER", raising=False)
+
+ with pytest.raises(OSError):
+ _ = get_os_user_lower()
+
+This behavior can be moved into ``fixture`` structures and shared across tests:
+
+.. code-block:: python
+
+ # contents of our test file e.g. test_code.py
+ import pytest
+
+
+ @pytest.fixture
+ def mock_env_user(monkeypatch):
+ monkeypatch.setenv("USER", "TestingUser")
+
+
+ @pytest.fixture
+ def mock_env_missing(monkeypatch):
+ monkeypatch.delenv("USER", raising=False)
+
+
+ # notice the tests reference the fixtures for mocks
+ def test_upper_to_lower(mock_env_user):
+ assert get_os_user_lower() == "testinguser"
+
+
+ def test_raise_exception(mock_env_missing):
+ with pytest.raises(OSError):
+ _ = get_os_user_lower()
+
+
+Monkeypatching dictionaries
+---------------------------
+
+:py:meth:`monkeypatch.setitem <MonkeyPatch.setitem>` can be used to safely set the values of dictionaries
+to specific values during tests. Take this simplified connection string example:
+
+.. code-block:: python
+
+ # contents of app.py to generate a simple connection string
+ DEFAULT_CONFIG = {"user": "user1", "database": "db1"}
+
+
+ def create_connection_string(config=None):
+ """Creates a connection string from input or defaults."""
+ config = config or DEFAULT_CONFIG
+ return f"User Id={config['user']}; Location={config['database']};"
+
+For testing purposes we can patch the ``DEFAULT_CONFIG`` dictionary to specific values.
+
+.. code-block:: python
+
+ # contents of test_app.py
+ # app.py with the connection string function (prior code block)
+ import app
+
+
+ def test_connection(monkeypatch):
+
+ # Patch the values of DEFAULT_CONFIG to specific
+ # testing values only for this test.
+ monkeypatch.setitem(app.DEFAULT_CONFIG, "user", "test_user")
+ monkeypatch.setitem(app.DEFAULT_CONFIG, "database", "test_db")
+
+ # expected result based on the mocks
+ expected = "User Id=test_user; Location=test_db;"
+
+ # the test uses the monkeypatched dictionary settings
+ result = app.create_connection_string()
+ assert result == expected
+
+You can use the :py:meth:`monkeypatch.delitem <MonkeyPatch.delitem>` to remove values.
+
+.. code-block:: python
+
+ # contents of test_app.py
+ import pytest
+
+ # app.py with the connection string function
+ import app
+
+
+ def test_missing_user(monkeypatch):
+
+ # patch the DEFAULT_CONFIG t be missing the 'user' key
+ monkeypatch.delitem(app.DEFAULT_CONFIG, "user", raising=False)
+
+ # Key error expected because a config is not passed, and the
+ # default is now missing the 'user' entry.
+ with pytest.raises(KeyError):
+ _ = app.create_connection_string()
+
+
+The modularity of fixtures gives you the flexibility to define
+separate fixtures for each potential mock and reference them in the needed tests.
+
+.. code-block:: python
+
+ # contents of test_app.py
+ import pytest
+
+ # app.py with the connection string function
+ import app
+
+ # all of the mocks are moved into separated fixtures
+ @pytest.fixture
+ def mock_test_user(monkeypatch):
+ """Set the DEFAULT_CONFIG user to test_user."""
+ monkeypatch.setitem(app.DEFAULT_CONFIG, "user", "test_user")
+
+
+ @pytest.fixture
+ def mock_test_database(monkeypatch):
+ """Set the DEFAULT_CONFIG database to test_db."""
+ monkeypatch.setitem(app.DEFAULT_CONFIG, "database", "test_db")
+
+
+ @pytest.fixture
+ def mock_missing_default_user(monkeypatch):
+ """Remove the user key from DEFAULT_CONFIG"""
+ monkeypatch.delitem(app.DEFAULT_CONFIG, "user", raising=False)
+
+
+ # tests reference only the fixture mocks that are needed
+ def test_connection(mock_test_user, mock_test_database):
+
+ expected = "User Id=test_user; Location=test_db;"
+
+ result = app.create_connection_string()
+ assert result == expected
+
+
+ def test_missing_user(mock_missing_default_user):
+
+ with pytest.raises(KeyError):
+ _ = app.create_connection_string()
+
+
+.. currentmodule:: _pytest.monkeypatch
+
+API Reference
+-------------
+
+Consult the docs for the :class:`MonkeyPatch` class.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/nose.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/nose.rst
new file mode 100644
index 0000000000..4bf8b06c32
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/nose.rst
@@ -0,0 +1,79 @@
+.. _`noseintegration`:
+
+How to run tests written for nose
+=======================================
+
+``pytest`` has basic support for running tests written for nose_.
+
+.. _nosestyle:
+
+Usage
+-------------
+
+After :ref:`installation` type:
+
+.. code-block:: bash
+
+ python setup.py develop # make sure tests can import our package
+ pytest # instead of 'nosetests'
+
+and you should be able to run your nose style tests and
+make use of pytest's capabilities.
+
+Supported nose Idioms
+----------------------
+
+* setup and teardown at module/class/method level
+* SkipTest exceptions and markers
+* setup/teardown decorators
+* ``__test__`` attribute on modules/classes/functions
+* general usage of nose utilities
+
+Unsupported idioms / known issues
+----------------------------------
+
+- unittest-style ``setUp, tearDown, setUpClass, tearDownClass``
+ are recognized only on ``unittest.TestCase`` classes but not
+ on plain classes. ``nose`` supports these methods also on plain
+ classes but pytest deliberately does not. As nose and pytest already
+ both support ``setup_class, teardown_class, setup_method, teardown_method``
+ it doesn't seem useful to duplicate the unittest-API like nose does.
+ If you however rather think pytest should support the unittest-spelling on
+ plain classes please post to :issue:`377`.
+
+- nose imports test modules with the same import path (e.g.
+ ``tests.test_mode``) but different file system paths
+ (e.g. ``tests/test_mode.py`` and ``other/tests/test_mode.py``)
+ by extending sys.path/import semantics. pytest does not do that
+ but there is discussion in :issue:`268` for adding some support. Note that
+ `nose2 choose to avoid this sys.path/import hackery <https://nose2.readthedocs.io/en/latest/differences.html#test-discovery-and-loading>`_.
+
+ If you place a conftest.py file in the root directory of your project
+ (as determined by pytest) pytest will run tests "nose style" against
+ the code below that directory by adding it to your ``sys.path`` instead of
+ running against your installed code.
+
+ You may find yourself wanting to do this if you ran ``python setup.py install``
+ to set up your project, as opposed to ``python setup.py develop`` or any of
+ the package manager equivalents. Installing with develop in a
+ virtual environment like tox is recommended over this pattern.
+
+- nose-style doctests are not collected and executed correctly,
+ also doctest fixtures don't work.
+
+- no nose-configuration is recognized.
+
+- ``yield``-based methods are unsupported as of pytest 4.1.0. They are
+ fundamentally incompatible with pytest because they don't support fixtures
+ properly since collection and test execution are separated.
+
+Migrating from nose to pytest
+------------------------------
+
+`nose2pytest <https://github.com/pytest-dev/nose2pytest>`_ is a Python script
+and pytest plugin to help convert Nose-based tests into pytest-based tests.
+Specifically, the script transforms nose.tools.assert_* function calls into
+raw assert statements, while preserving format of original arguments
+as much as possible.
+
+.. _nose: https://nose.readthedocs.io/en/latest/
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/output.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/output.rst
new file mode 100644
index 0000000000..4b90988f49
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/output.rst
@@ -0,0 +1,710 @@
+.. _how-to-manage-output:
+
+Managing pytest's output
+=========================
+
+.. _how-to-modifying-python-tb-printing:
+
+Modifying Python traceback printing
+--------------------------------------------------
+
+Examples for modifying traceback printing:
+
+.. code-block:: bash
+
+ pytest --showlocals # show local variables in tracebacks
+ pytest -l # show local variables (shortcut)
+
+ pytest --tb=auto # (default) 'long' tracebacks for the first and last
+ # entry, but 'short' style for the other entries
+ pytest --tb=long # exhaustive, informative traceback formatting
+ pytest --tb=short # shorter traceback format
+ pytest --tb=line # only one line per failure
+ pytest --tb=native # Python standard library formatting
+ pytest --tb=no # no traceback at all
+
+The ``--full-trace`` causes very long traces to be printed on error (longer
+than ``--tb=long``). It also ensures that a stack trace is printed on
+**KeyboardInterrupt** (Ctrl+C).
+This is very useful if the tests are taking too long and you interrupt them
+with Ctrl+C to find out where the tests are *hanging*. By default no output
+will be shown (because KeyboardInterrupt is caught by pytest). By using this
+option you make sure a trace is shown.
+
+
+Verbosity
+--------------------------------------------------
+
+The ``-v`` flag controls the verbosity of pytest output in various aspects: test session progress, assertion
+details when tests fail, fixtures details with ``--fixtures``, etc.
+
+.. regendoc:wipe
+
+Consider this simple file:
+
+.. code-block:: python
+
+ # content of test_verbosity_example.py
+ def test_ok():
+ pass
+
+
+ def test_words_fail():
+ fruits1 = ["banana", "apple", "grapes", "melon", "kiwi"]
+ fruits2 = ["banana", "apple", "orange", "melon", "kiwi"]
+ assert fruits1 == fruits2
+
+
+ def test_numbers_fail():
+ number_to_text1 = {str(x): x for x in range(5)}
+ number_to_text2 = {str(x * 10): x * 10 for x in range(5)}
+ assert number_to_text1 == number_to_text2
+
+
+ def test_long_text_fail():
+ long_text = "Lorem ipsum dolor sit amet " * 10
+ assert "hello world" in long_text
+
+Executing pytest normally gives us this output (we are skipping the header to focus on the rest):
+
+.. code-block:: pytest
+
+ $ pytest --no-header
+ =========================== test session starts ============================
+ collected 4 items
+
+ test_verbosity_example.py .FFF [100%]
+
+ ================================= FAILURES =================================
+ _____________________________ test_words_fail ______________________________
+
+ def test_words_fail():
+ fruits1 = ["banana", "apple", "grapes", "melon", "kiwi"]
+ fruits2 = ["banana", "apple", "orange", "melon", "kiwi"]
+ > assert fruits1 == fruits2
+ E AssertionError: assert ['banana', 'a...elon', 'kiwi'] == ['banana', 'a...elon', 'kiwi']
+ E At index 2 diff: 'grapes' != 'orange'
+ E Use -v to get the full diff
+
+ test_verbosity_example.py:8: AssertionError
+ ____________________________ test_numbers_fail _____________________________
+
+ def test_numbers_fail():
+ number_to_text1 = {str(x): x for x in range(5)}
+ number_to_text2 = {str(x * 10): x * 10 for x in range(5)}
+ > assert number_to_text1 == number_to_text2
+ E AssertionError: assert {'0': 0, '1':..., '3': 3, ...} == {'0': 0, '10'...'30': 30, ...}
+ E Omitting 1 identical items, use -vv to show
+ E Left contains 4 more items:
+ E {'1': 1, '2': 2, '3': 3, '4': 4}
+ E Right contains 4 more items:
+ E {'10': 10, '20': 20, '30': 30, '40': 40}
+ E Use -v to get the full diff
+
+ test_verbosity_example.py:14: AssertionError
+ ___________________________ test_long_text_fail ____________________________
+
+ def test_long_text_fail():
+ long_text = "Lorem ipsum dolor sit amet " * 10
+ > assert "hello world" in long_text
+ E AssertionError: assert 'hello world' in 'Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ips... sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet '
+
+ test_verbosity_example.py:19: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_verbosity_example.py::test_words_fail - AssertionError: asser...
+ FAILED test_verbosity_example.py::test_numbers_fail - AssertionError: ass...
+ FAILED test_verbosity_example.py::test_long_text_fail - AssertionError: a...
+ ======================= 3 failed, 1 passed in 0.12s ========================
+
+Notice that:
+
+* Each test inside the file is shown by a single character in the output: ``.`` for passing, ``F`` for failure.
+* ``test_words_fail`` failed, and we are shown a short summary indicating the index 2 of the two lists differ.
+* ``test_numbers_fail`` failed, and we are shown a summary of left/right differences on dictionary items. Identical items are omitted.
+* ``test_long_text_fail`` failed, and the right hand side of the ``in`` statement is truncated using ``...```
+ because it is longer than an internal threshold (240 characters currently).
+
+Now we can increase pytest's verbosity:
+
+.. code-block:: pytest
+
+ $ pytest --no-header -v
+ =========================== test session starts ============================
+ collecting ... collected 4 items
+
+ test_verbosity_example.py::test_ok PASSED [ 25%]
+ test_verbosity_example.py::test_words_fail FAILED [ 50%]
+ test_verbosity_example.py::test_numbers_fail FAILED [ 75%]
+ test_verbosity_example.py::test_long_text_fail FAILED [100%]
+
+ ================================= FAILURES =================================
+ _____________________________ test_words_fail ______________________________
+
+ def test_words_fail():
+ fruits1 = ["banana", "apple", "grapes", "melon", "kiwi"]
+ fruits2 = ["banana", "apple", "orange", "melon", "kiwi"]
+ > assert fruits1 == fruits2
+ E AssertionError: assert ['banana', 'a...elon', 'kiwi'] == ['banana', 'a...elon', 'kiwi']
+ E At index 2 diff: 'grapes' != 'orange'
+ E Full diff:
+ E - ['banana', 'apple', 'orange', 'melon', 'kiwi']
+ E ? ^ ^^
+ E + ['banana', 'apple', 'grapes', 'melon', 'kiwi']
+ E ? ^ ^ +
+
+ test_verbosity_example.py:8: AssertionError
+ ____________________________ test_numbers_fail _____________________________
+
+ def test_numbers_fail():
+ number_to_text1 = {str(x): x for x in range(5)}
+ number_to_text2 = {str(x * 10): x * 10 for x in range(5)}
+ > assert number_to_text1 == number_to_text2
+ E AssertionError: assert {'0': 0, '1':..., '3': 3, ...} == {'0': 0, '10'...'30': 30, ...}
+ E Omitting 1 identical items, use -vv to show
+ E Left contains 4 more items:
+ E {'1': 1, '2': 2, '3': 3, '4': 4}
+ E Right contains 4 more items:
+ E {'10': 10, '20': 20, '30': 30, '40': 40}
+ E Full diff:
+ E - {'0': 0, '10': 10, '20': 20, '30': 30, '40': 40}...
+ E
+ E ...Full output truncated (3 lines hidden), use '-vv' to show
+
+ test_verbosity_example.py:14: AssertionError
+ ___________________________ test_long_text_fail ____________________________
+
+ def test_long_text_fail():
+ long_text = "Lorem ipsum dolor sit amet " * 10
+ > assert "hello world" in long_text
+ E AssertionError: assert 'hello world' in 'Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet '
+
+ test_verbosity_example.py:19: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_verbosity_example.py::test_words_fail - AssertionError: asser...
+ FAILED test_verbosity_example.py::test_numbers_fail - AssertionError: ass...
+ FAILED test_verbosity_example.py::test_long_text_fail - AssertionError: a...
+ ======================= 3 failed, 1 passed in 0.12s ========================
+
+Notice now that:
+
+* Each test inside the file gets its own line in the output.
+* ``test_words_fail`` now shows the two failing lists in full, in addition to which index differs.
+* ``test_numbers_fail`` now shows a text diff of the two dictionaries, truncated.
+* ``test_long_text_fail`` no longer truncates the right hand side of the ``in`` statement, because the internal
+ threshold for truncation is larger now (2400 characters currently).
+
+Now if we increase verbosity even more:
+
+.. code-block:: pytest
+
+ $ pytest --no-header -vv
+ =========================== test session starts ============================
+ collecting ... collected 4 items
+
+ test_verbosity_example.py::test_ok PASSED [ 25%]
+ test_verbosity_example.py::test_words_fail FAILED [ 50%]
+ test_verbosity_example.py::test_numbers_fail FAILED [ 75%]
+ test_verbosity_example.py::test_long_text_fail FAILED [100%]
+
+ ================================= FAILURES =================================
+ _____________________________ test_words_fail ______________________________
+
+ def test_words_fail():
+ fruits1 = ["banana", "apple", "grapes", "melon", "kiwi"]
+ fruits2 = ["banana", "apple", "orange", "melon", "kiwi"]
+ > assert fruits1 == fruits2
+ E AssertionError: assert ['banana', 'apple', 'grapes', 'melon', 'kiwi'] == ['banana', 'apple', 'orange', 'melon', 'kiwi']
+ E At index 2 diff: 'grapes' != 'orange'
+ E Full diff:
+ E - ['banana', 'apple', 'orange', 'melon', 'kiwi']
+ E ? ^ ^^
+ E + ['banana', 'apple', 'grapes', 'melon', 'kiwi']
+ E ? ^ ^ +
+
+ test_verbosity_example.py:8: AssertionError
+ ____________________________ test_numbers_fail _____________________________
+
+ def test_numbers_fail():
+ number_to_text1 = {str(x): x for x in range(5)}
+ number_to_text2 = {str(x * 10): x * 10 for x in range(5)}
+ > assert number_to_text1 == number_to_text2
+ E AssertionError: assert {'0': 0, '1': 1, '2': 2, '3': 3, '4': 4} == {'0': 0, '10': 10, '20': 20, '30': 30, '40': 40}
+ E Common items:
+ E {'0': 0}
+ E Left contains 4 more items:
+ E {'1': 1, '2': 2, '3': 3, '4': 4}
+ E Right contains 4 more items:
+ E {'10': 10, '20': 20, '30': 30, '40': 40}
+ E Full diff:
+ E - {'0': 0, '10': 10, '20': 20, '30': 30, '40': 40}
+ E ? - - - - - - - -
+ E + {'0': 0, '1': 1, '2': 2, '3': 3, '4': 4}
+
+ test_verbosity_example.py:14: AssertionError
+ ___________________________ test_long_text_fail ____________________________
+
+ def test_long_text_fail():
+ long_text = "Lorem ipsum dolor sit amet " * 10
+ > assert "hello world" in long_text
+ E AssertionError: assert 'hello world' in 'Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet Lorem ipsum dolor sit amet '
+
+ test_verbosity_example.py:19: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_verbosity_example.py::test_words_fail - AssertionError: asser...
+ FAILED test_verbosity_example.py::test_numbers_fail - AssertionError: ass...
+ FAILED test_verbosity_example.py::test_long_text_fail - AssertionError: a...
+ ======================= 3 failed, 1 passed in 0.12s ========================
+
+Notice now that:
+
+* Each test inside the file gets its own line in the output.
+* ``test_words_fail`` gives the same output as before in this case.
+* ``test_numbers_fail`` now shows a full text diff of the two dictionaries.
+* ``test_long_text_fail`` also doesn't truncate on the right hand side as before, but now pytest won't truncate any
+ text at all, regardless of its size.
+
+Those were examples of how verbosity affects normal test session output, but verbosity also is used in other
+situations, for example you are shown even fixtures that start with ``_`` if you use ``pytest --fixtures -v``.
+
+Using higher verbosity levels (``-vvv``, ``-vvvv``, ...) is supported, but has no effect in pytest itself at the moment,
+however some plugins might make use of higher verbosity.
+
+.. _`pytest.detailed_failed_tests_usage`:
+
+Producing a detailed summary report
+--------------------------------------------------
+
+The ``-r`` flag can be used to display a "short test summary info" at the end of the test session,
+making it easy in large test suites to get a clear picture of all failures, skips, xfails, etc.
+
+It defaults to ``fE`` to list failures and errors.
+
+.. regendoc:wipe
+
+Example:
+
+.. code-block:: python
+
+ # content of test_example.py
+ import pytest
+
+
+ @pytest.fixture
+ def error_fixture():
+ assert 0
+
+
+ def test_ok():
+ print("ok")
+
+
+ def test_fail():
+ assert 0
+
+
+ def test_error(error_fixture):
+ pass
+
+
+ def test_skip():
+ pytest.skip("skipping this test")
+
+
+ def test_xfail():
+ pytest.xfail("xfailing this test")
+
+
+ @pytest.mark.xfail(reason="always xfail")
+ def test_xpass():
+ pass
+
+
+.. code-block:: pytest
+
+ $ pytest -ra
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 6 items
+
+ test_example.py .FEsxX [100%]
+
+ ================================== ERRORS ==================================
+ _______________________ ERROR at setup of test_error _______________________
+
+ @pytest.fixture
+ def error_fixture():
+ > assert 0
+ E assert 0
+
+ test_example.py:6: AssertionError
+ ================================= FAILURES =================================
+ ________________________________ test_fail _________________________________
+
+ def test_fail():
+ > assert 0
+ E assert 0
+
+ test_example.py:14: AssertionError
+ ========================= short test summary info ==========================
+ SKIPPED [1] test_example.py:22: skipping this test
+ XFAIL test_example.py::test_xfail
+ reason: xfailing this test
+ XPASS test_example.py::test_xpass always xfail
+ ERROR test_example.py::test_error - assert 0
+ FAILED test_example.py::test_fail - assert 0
+ == 1 failed, 1 passed, 1 skipped, 1 xfailed, 1 xpassed, 1 error in 0.12s ===
+
+The ``-r`` options accepts a number of characters after it, with ``a`` used
+above meaning "all except passes".
+
+Here is the full list of available characters that can be used:
+
+ - ``f`` - failed
+ - ``E`` - error
+ - ``s`` - skipped
+ - ``x`` - xfailed
+ - ``X`` - xpassed
+ - ``p`` - passed
+ - ``P`` - passed with output
+
+Special characters for (de)selection of groups:
+
+ - ``a`` - all except ``pP``
+ - ``A`` - all
+ - ``N`` - none, this can be used to display nothing (since ``fE`` is the default)
+
+More than one character can be used, so for example to only see failed and skipped tests, you can execute:
+
+.. code-block:: pytest
+
+ $ pytest -rfs
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 6 items
+
+ test_example.py .FEsxX [100%]
+
+ ================================== ERRORS ==================================
+ _______________________ ERROR at setup of test_error _______________________
+
+ @pytest.fixture
+ def error_fixture():
+ > assert 0
+ E assert 0
+
+ test_example.py:6: AssertionError
+ ================================= FAILURES =================================
+ ________________________________ test_fail _________________________________
+
+ def test_fail():
+ > assert 0
+ E assert 0
+
+ test_example.py:14: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_example.py::test_fail - assert 0
+ SKIPPED [1] test_example.py:22: skipping this test
+ == 1 failed, 1 passed, 1 skipped, 1 xfailed, 1 xpassed, 1 error in 0.12s ===
+
+Using ``p`` lists the passing tests, whilst ``P`` adds an extra section "PASSES" with those tests that passed but had
+captured output:
+
+.. code-block:: pytest
+
+ $ pytest -rpP
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 6 items
+
+ test_example.py .FEsxX [100%]
+
+ ================================== ERRORS ==================================
+ _______________________ ERROR at setup of test_error _______________________
+
+ @pytest.fixture
+ def error_fixture():
+ > assert 0
+ E assert 0
+
+ test_example.py:6: AssertionError
+ ================================= FAILURES =================================
+ ________________________________ test_fail _________________________________
+
+ def test_fail():
+ > assert 0
+ E assert 0
+
+ test_example.py:14: AssertionError
+ ================================== PASSES ==================================
+ _________________________________ test_ok __________________________________
+ --------------------------- Captured stdout call ---------------------------
+ ok
+ ========================= short test summary info ==========================
+ PASSED test_example.py::test_ok
+ == 1 failed, 1 passed, 1 skipped, 1 xfailed, 1 xpassed, 1 error in 0.12s ===
+
+Creating resultlog format files
+--------------------------------------------------
+
+To create plain-text machine-readable result files you can issue:
+
+.. code-block:: bash
+
+ pytest --resultlog=path
+
+and look at the content at the ``path`` location. Such files are used e.g.
+by the `PyPy-test`_ web page to show test results over several revisions.
+
+.. warning::
+
+ This option is rarely used and is scheduled for removal in pytest 6.0.
+
+ If you use this option, consider using the new `pytest-reportlog <https://github.com/pytest-dev/pytest-reportlog>`__ plugin instead.
+
+ See :ref:`the deprecation docs <resultlog deprecated>` for more information.
+
+
+.. _`PyPy-test`: http://buildbot.pypy.org/summary
+
+
+Creating JUnitXML format files
+----------------------------------------------------
+
+To create result files which can be read by Jenkins_ or other Continuous
+integration servers, use this invocation:
+
+.. code-block:: bash
+
+ pytest --junitxml=path
+
+to create an XML file at ``path``.
+
+
+
+To set the name of the root test suite xml item, you can configure the ``junit_suite_name`` option in your config file:
+
+.. code-block:: ini
+
+ [pytest]
+ junit_suite_name = my_suite
+
+.. versionadded:: 4.0
+
+JUnit XML specification seems to indicate that ``"time"`` attribute
+should report total test execution times, including setup and teardown
+(`1 <http://windyroad.com.au/dl/Open%20Source/JUnit.xsd>`_, `2
+<https://www.ibm.com/support/knowledgecenter/en/SSQ2R2_14.1.0/com.ibm.rsar.analysis.codereview.cobol.doc/topics/cac_useresults_junit.html>`_).
+It is the default pytest behavior. To report just call durations
+instead, configure the ``junit_duration_report`` option like this:
+
+.. code-block:: ini
+
+ [pytest]
+ junit_duration_report = call
+
+.. _record_property example:
+
+record_property
+~~~~~~~~~~~~~~~~~
+
+If you want to log additional information for a test, you can use the
+``record_property`` fixture:
+
+.. code-block:: python
+
+ def test_function(record_property):
+ record_property("example_key", 1)
+ assert True
+
+This will add an extra property ``example_key="1"`` to the generated
+``testcase`` tag:
+
+.. code-block:: xml
+
+ <testcase classname="test_function" file="test_function.py" line="0" name="test_function" time="0.0009">
+ <properties>
+ <property name="example_key" value="1" />
+ </properties>
+ </testcase>
+
+Alternatively, you can integrate this functionality with custom markers:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+
+ def pytest_collection_modifyitems(session, config, items):
+ for item in items:
+ for marker in item.iter_markers(name="test_id"):
+ test_id = marker.args[0]
+ item.user_properties.append(("test_id", test_id))
+
+And in your tests:
+
+.. code-block:: python
+
+ # content of test_function.py
+ import pytest
+
+
+ @pytest.mark.test_id(1501)
+ def test_function():
+ assert True
+
+Will result in:
+
+.. code-block:: xml
+
+ <testcase classname="test_function" file="test_function.py" line="0" name="test_function" time="0.0009">
+ <properties>
+ <property name="test_id" value="1501" />
+ </properties>
+ </testcase>
+
+.. warning::
+
+ Please note that using this feature will break schema verifications for the latest JUnitXML schema.
+ This might be a problem when used with some CI servers.
+
+
+record_xml_attribute
+~~~~~~~~~~~~~~~~~~~~~~~
+
+To add an additional xml attribute to a testcase element, you can use
+``record_xml_attribute`` fixture. This can also be used to override existing values:
+
+.. code-block:: python
+
+ def test_function(record_xml_attribute):
+ record_xml_attribute("assertions", "REQ-1234")
+ record_xml_attribute("classname", "custom_classname")
+ print("hello world")
+ assert True
+
+Unlike ``record_property``, this will not add a new child element.
+Instead, this will add an attribute ``assertions="REQ-1234"`` inside the generated
+``testcase`` tag and override the default ``classname`` with ``"classname=custom_classname"``:
+
+.. code-block:: xml
+
+ <testcase classname="custom_classname" file="test_function.py" line="0" name="test_function" time="0.003" assertions="REQ-1234">
+ <system-out>
+ hello world
+ </system-out>
+ </testcase>
+
+.. warning::
+
+ ``record_xml_attribute`` is an experimental feature, and its interface might be replaced
+ by something more powerful and general in future versions. The
+ functionality per-se will be kept, however.
+
+ Using this over ``record_xml_property`` can help when using ci tools to parse the xml report.
+ However, some parsers are quite strict about the elements and attributes that are allowed.
+ Many tools use an xsd schema (like the example below) to validate incoming xml.
+ Make sure you are using attribute names that are allowed by your parser.
+
+ Below is the Scheme used by Jenkins to validate the XML report:
+
+ .. code-block:: xml
+
+ <xs:element name="testcase">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="skipped" minOccurs="0" maxOccurs="1"/>
+ <xs:element ref="error" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element ref="failure" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element ref="system-out" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element ref="system-err" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ <xs:attribute name="name" type="xs:string" use="required"/>
+ <xs:attribute name="assertions" type="xs:string" use="optional"/>
+ <xs:attribute name="time" type="xs:string" use="optional"/>
+ <xs:attribute name="classname" type="xs:string" use="optional"/>
+ <xs:attribute name="status" type="xs:string" use="optional"/>
+ </xs:complexType>
+ </xs:element>
+
+.. warning::
+
+ Please note that using this feature will break schema verifications for the latest JUnitXML schema.
+ This might be a problem when used with some CI servers.
+
+.. _record_testsuite_property example:
+
+record_testsuite_property
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. versionadded:: 4.5
+
+If you want to add a properties node at the test-suite level, which may contains properties
+that are relevant to all tests, you can use the ``record_testsuite_property`` session-scoped fixture:
+
+The ``record_testsuite_property`` session-scoped fixture can be used to add properties relevant
+to all tests.
+
+.. code-block:: python
+
+ import pytest
+
+
+ @pytest.fixture(scope="session", autouse=True)
+ def log_global_env_facts(record_testsuite_property):
+ record_testsuite_property("ARCH", "PPC")
+ record_testsuite_property("STORAGE_TYPE", "CEPH")
+
+
+ class TestMe:
+ def test_foo(self):
+ assert True
+
+The fixture is a callable which receives ``name`` and ``value`` of a ``<property>`` tag
+added at the test-suite level of the generated xml:
+
+.. code-block:: xml
+
+ <testsuite errors="0" failures="0" name="pytest" skipped="0" tests="1" time="0.006">
+ <properties>
+ <property name="ARCH" value="PPC"/>
+ <property name="STORAGE_TYPE" value="CEPH"/>
+ </properties>
+ <testcase classname="test_me.TestMe" file="test_me.py" line="16" name="test_foo" time="0.000243663787842"/>
+ </testsuite>
+
+``name`` must be a string, ``value`` will be converted to a string and properly xml-escaped.
+
+The generated XML is compatible with the latest ``xunit`` standard, contrary to `record_property`_
+and `record_xml_attribute`_.
+
+
+Sending test report to an online pastebin service
+--------------------------------------------------
+
+**Creating a URL for each test failure**:
+
+.. code-block:: bash
+
+ pytest --pastebin=failed
+
+This will submit test run information to a remote Paste service and
+provide a URL for each failure. You may select tests as usual or add
+for example ``-x`` if you only want to send one particular failure.
+
+**Creating a URL for a whole test session log**:
+
+.. code-block:: bash
+
+ pytest --pastebin=all
+
+Currently only pasting to the https://bpaste.net/ service is implemented.
+
+.. versionchanged:: 5.2
+
+If creating the URL fails for any reason, a warning is generated instead of failing the
+entire test suite.
+
+.. _jenkins: https://jenkins-ci.org
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/parametrize.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/parametrize.rst
new file mode 100644
index 0000000000..a0c9968428
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/parametrize.rst
@@ -0,0 +1,298 @@
+
+.. _`test generators`:
+.. _`parametrizing-tests`:
+.. _`parametrized test functions`:
+.. _`parametrize`:
+
+.. _`parametrize-basics`:
+
+How to parametrize fixtures and test functions
+==========================================================================
+
+pytest enables test parametrization at several levels:
+
+- :py:func:`pytest.fixture` allows one to :ref:`parametrize fixture
+ functions <fixture-parametrize>`.
+
+* `@pytest.mark.parametrize`_ allows one to define multiple sets of
+ arguments and fixtures at the test function or class.
+
+* `pytest_generate_tests`_ allows one to define custom parametrization
+ schemes or extensions.
+
+.. _parametrizemark:
+.. _`@pytest.mark.parametrize`:
+
+
+``@pytest.mark.parametrize``: parametrizing test functions
+---------------------------------------------------------------------
+
+.. regendoc: wipe
+
+
+
+ Several improvements.
+
+The builtin :ref:`pytest.mark.parametrize ref` decorator enables
+parametrization of arguments for a test function. Here is a typical example
+of a test function that implements checking that a certain input leads
+to an expected output:
+
+.. code-block:: python
+
+ # content of test_expectation.py
+ import pytest
+
+
+ @pytest.mark.parametrize("test_input,expected", [("3+5", 8), ("2+4", 6), ("6*9", 42)])
+ def test_eval(test_input, expected):
+ assert eval(test_input) == expected
+
+Here, the ``@parametrize`` decorator defines three different ``(test_input,expected)``
+tuples so that the ``test_eval`` function will run three times using
+them in turn:
+
+.. code-block:: pytest
+
+ $ pytest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 3 items
+
+ test_expectation.py ..F [100%]
+
+ ================================= FAILURES =================================
+ ____________________________ test_eval[6*9-42] _____________________________
+
+ test_input = '6*9', expected = 42
+
+ @pytest.mark.parametrize("test_input,expected", [("3+5", 8), ("2+4", 6), ("6*9", 42)])
+ def test_eval(test_input, expected):
+ > assert eval(test_input) == expected
+ E AssertionError: assert 54 == 42
+ E + where 54 = eval('6*9')
+
+ test_expectation.py:6: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_expectation.py::test_eval[6*9-42] - AssertionError: assert 54...
+ ======================= 1 failed, 2 passed in 0.12s ========================
+
+.. note::
+
+ Parameter values are passed as-is to tests (no copy whatsoever).
+
+ For example, if you pass a list or a dict as a parameter value, and
+ the test case code mutates it, the mutations will be reflected in subsequent
+ test case calls.
+
+.. note::
+
+ pytest by default escapes any non-ascii characters used in unicode strings
+ for the parametrization because it has several downsides.
+ If however you would like to use unicode strings in parametrization
+ and see them in the terminal as is (non-escaped), use this option
+ in your ``pytest.ini``:
+
+ .. code-block:: ini
+
+ [pytest]
+ disable_test_id_escaping_and_forfeit_all_rights_to_community_support = True
+
+ Keep in mind however that this might cause unwanted side effects and
+ even bugs depending on the OS used and plugins currently installed,
+ so use it at your own risk.
+
+
+As designed in this example, only one pair of input/output values fails
+the simple test function. And as usual with test function arguments,
+you can see the ``input`` and ``output`` values in the traceback.
+
+Note that you could also use the parametrize marker on a class or a module
+(see :ref:`mark`) which would invoke several functions with the argument sets,
+for instance:
+
+
+.. code-block:: python
+
+ import pytest
+
+
+ @pytest.mark.parametrize("n,expected", [(1, 2), (3, 4)])
+ class TestClass:
+ def test_simple_case(self, n, expected):
+ assert n + 1 == expected
+
+ def test_weird_simple_case(self, n, expected):
+ assert (n * 1) + 1 == expected
+
+
+To parametrize all tests in a module, you can assign to the :globalvar:`pytestmark` global variable:
+
+
+.. code-block:: python
+
+ import pytest
+
+ pytestmark = pytest.mark.parametrize("n,expected", [(1, 2), (3, 4)])
+
+
+ class TestClass:
+ def test_simple_case(self, n, expected):
+ assert n + 1 == expected
+
+ def test_weird_simple_case(self, n, expected):
+ assert (n * 1) + 1 == expected
+
+
+It is also possible to mark individual test instances within parametrize,
+for example with the builtin ``mark.xfail``:
+
+.. code-block:: python
+
+ # content of test_expectation.py
+ import pytest
+
+
+ @pytest.mark.parametrize(
+ "test_input,expected",
+ [("3+5", 8), ("2+4", 6), pytest.param("6*9", 42, marks=pytest.mark.xfail)],
+ )
+ def test_eval(test_input, expected):
+ assert eval(test_input) == expected
+
+Let's run this:
+
+.. code-block:: pytest
+
+ $ pytest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 3 items
+
+ test_expectation.py ..x [100%]
+
+ ======================= 2 passed, 1 xfailed in 0.12s =======================
+
+The one parameter set which caused a failure previously now
+shows up as an "xfailed" (expected to fail) test.
+
+In case the values provided to ``parametrize`` result in an empty list - for
+example, if they're dynamically generated by some function - the behaviour of
+pytest is defined by the :confval:`empty_parameter_set_mark` option.
+
+To get all combinations of multiple parametrized arguments you can stack
+``parametrize`` decorators:
+
+.. code-block:: python
+
+ import pytest
+
+
+ @pytest.mark.parametrize("x", [0, 1])
+ @pytest.mark.parametrize("y", [2, 3])
+ def test_foo(x, y):
+ pass
+
+This will run the test with the arguments set to ``x=0/y=2``, ``x=1/y=2``,
+``x=0/y=3``, and ``x=1/y=3`` exhausting parameters in the order of the decorators.
+
+.. _`pytest_generate_tests`:
+
+Basic ``pytest_generate_tests`` example
+---------------------------------------------
+
+Sometimes you may want to implement your own parametrization scheme
+or implement some dynamism for determining the parameters or scope
+of a fixture. For this, you can use the ``pytest_generate_tests`` hook
+which is called when collecting a test function. Through the passed in
+``metafunc`` object you can inspect the requesting test context and, most
+importantly, you can call ``metafunc.parametrize()`` to cause
+parametrization.
+
+For example, let's say we want to run a test taking string inputs which
+we want to set via a new ``pytest`` command line option. Let's first write
+a simple test accepting a ``stringinput`` fixture function argument:
+
+.. code-block:: python
+
+ # content of test_strings.py
+
+
+ def test_valid_string(stringinput):
+ assert stringinput.isalpha()
+
+Now we add a ``conftest.py`` file containing the addition of a
+command line option and the parametrization of our test function:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+
+ def pytest_addoption(parser):
+ parser.addoption(
+ "--stringinput",
+ action="append",
+ default=[],
+ help="list of stringinputs to pass to test functions",
+ )
+
+
+ def pytest_generate_tests(metafunc):
+ if "stringinput" in metafunc.fixturenames:
+ metafunc.parametrize("stringinput", metafunc.config.getoption("stringinput"))
+
+If we now pass two stringinput values, our test will run twice:
+
+.. code-block:: pytest
+
+ $ pytest -q --stringinput="hello" --stringinput="world" test_strings.py
+ .. [100%]
+ 2 passed in 0.12s
+
+Let's also run with a stringinput that will lead to a failing test:
+
+.. code-block:: pytest
+
+ $ pytest -q --stringinput="!" test_strings.py
+ F [100%]
+ ================================= FAILURES =================================
+ ___________________________ test_valid_string[!] ___________________________
+
+ stringinput = '!'
+
+ def test_valid_string(stringinput):
+ > assert stringinput.isalpha()
+ E AssertionError: assert False
+ E + where False = <built-in method isalpha of str object at 0xdeadbeef0001>()
+ E + where <built-in method isalpha of str object at 0xdeadbeef0001> = '!'.isalpha
+
+ test_strings.py:4: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_strings.py::test_valid_string[!] - AssertionError: assert False
+ 1 failed in 0.12s
+
+As expected our test function fails.
+
+If you don't specify a stringinput it will be skipped because
+``metafunc.parametrize()`` will be called with an empty parameter
+list:
+
+.. code-block:: pytest
+
+ $ pytest -q -rs test_strings.py
+ s [100%]
+ ========================= short test summary info ==========================
+ SKIPPED [1] test_strings.py: got empty parameter set ['stringinput'], function test_valid_string at /home/sweet/project/test_strings.py:2
+ 1 skipped in 0.12s
+
+Note that when calling ``metafunc.parametrize`` multiple times with different parameter sets, all parameter names across
+those sets cannot be duplicated, otherwise an error will be raised.
+
+More examples
+-------------
+
+For further examples, you might want to look at :ref:`more
+parametrization examples <paramexamples>`.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/plugins.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/plugins.rst
new file mode 100644
index 0000000000..cae737e96e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/plugins.rst
@@ -0,0 +1,136 @@
+.. _`external plugins`:
+.. _`extplugins`:
+.. _`using plugins`:
+
+How to install and use plugins
+===============================
+
+This section talks about installing and using third party plugins.
+For writing your own plugins, please refer to :ref:`writing-plugins`.
+
+Installing a third party plugin can be easily done with ``pip``:
+
+.. code-block:: bash
+
+ pip install pytest-NAME
+ pip uninstall pytest-NAME
+
+If a plugin is installed, ``pytest`` automatically finds and integrates it,
+there is no need to activate it.
+
+Here is a little annotated list for some popular plugins:
+
+* :pypi:`pytest-django`: write tests
+ for :std:doc:`django <django:index>` apps, using pytest integration.
+
+* :pypi:`pytest-twisted`: write tests
+ for `twisted <https://twistedmatrix.com/>`_ apps, starting a reactor and
+ processing deferreds from test functions.
+
+* :pypi:`pytest-cov`:
+ coverage reporting, compatible with distributed testing
+
+* :pypi:`pytest-xdist`:
+ to distribute tests to CPUs and remote hosts, to run in boxed
+ mode which allows to survive segmentation faults, to run in
+ looponfailing mode, automatically re-running failing tests
+ on file changes.
+
+* :pypi:`pytest-instafail`:
+ to report failures while the test run is happening.
+
+* :pypi:`pytest-bdd`:
+ to write tests using behaviour-driven testing.
+
+* :pypi:`pytest-timeout`:
+ to timeout tests based on function marks or global definitions.
+
+* :pypi:`pytest-pep8`:
+ a ``--pep8`` option to enable PEP8 compliance checking.
+
+* :pypi:`pytest-flakes`:
+ check source code with pyflakes.
+
+* :pypi:`oejskit`:
+ a plugin to run javascript unittests in live browsers.
+
+To see a complete list of all plugins with their latest testing
+status against different pytest and Python versions, please visit
+:ref:`plugin-list`.
+
+You may also discover more plugins through a `pytest- pypi.org search`_.
+
+.. _`pytest- pypi.org search`: https://pypi.org/search/?q=pytest-
+
+
+.. _`available installable plugins`:
+
+Requiring/Loading plugins in a test module or conftest file
+-----------------------------------------------------------
+
+You can require plugins in a test module or a conftest file using :globalvar:`pytest_plugins`:
+
+.. code-block:: python
+
+ pytest_plugins = ("myapp.testsupport.myplugin",)
+
+When the test module or conftest plugin is loaded the specified plugins
+will be loaded as well.
+
+.. note::
+
+ Requiring plugins using a ``pytest_plugins`` variable in non-root
+ ``conftest.py`` files is deprecated. See
+ :ref:`full explanation <requiring plugins in non-root conftests>`
+ in the Writing plugins section.
+
+.. note::
+ The name ``pytest_plugins`` is reserved and should not be used as a
+ name for a custom plugin module.
+
+
+.. _`findpluginname`:
+
+Finding out which plugins are active
+------------------------------------
+
+If you want to find out which plugins are active in your
+environment you can type:
+
+.. code-block:: bash
+
+ pytest --trace-config
+
+and will get an extended test header which shows activated plugins
+and their names. It will also print local plugins aka
+:ref:`conftest.py <conftest.py plugins>` files when they are loaded.
+
+.. _`cmdunregister`:
+
+Deactivating / unregistering a plugin by name
+---------------------------------------------
+
+You can prevent plugins from loading or unregister them:
+
+.. code-block:: bash
+
+ pytest -p no:NAME
+
+This means that any subsequent try to activate/load the named
+plugin will not work.
+
+If you want to unconditionally disable a plugin for a project, you can add
+this option to your ``pytest.ini`` file:
+
+.. code-block:: ini
+
+ [pytest]
+ addopts = -p no:NAME
+
+Alternatively to disable it only in certain environments (for example in a
+CI server), you can set ``PYTEST_ADDOPTS`` environment variable to
+``-p no:name``.
+
+See :ref:`findpluginname` for how to obtain the name of a plugin.
+
+.. _`builtin plugins`:
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/skipping.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/skipping.rst
new file mode 100644
index 0000000000..e2f59c77ae
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/skipping.rst
@@ -0,0 +1,430 @@
+.. _`skip and xfail`:
+
+.. _skipping:
+
+How to use skip and xfail to deal with tests that cannot succeed
+=================================================================
+
+You can mark test functions that cannot be run on certain platforms
+or that you expect to fail so pytest can deal with them accordingly and
+present a summary of the test session, while keeping the test suite *green*.
+
+A **skip** means that you expect your test to pass only if some conditions are met,
+otherwise pytest should skip running the test altogether. Common examples are skipping
+windows-only tests on non-windows platforms, or skipping tests that depend on an external
+resource which is not available at the moment (for example a database).
+
+An **xfail** means that you expect a test to fail for some reason.
+A common example is a test for a feature not yet implemented, or a bug not yet fixed.
+When a test passes despite being expected to fail (marked with ``pytest.mark.xfail``),
+it's an **xpass** and will be reported in the test summary.
+
+``pytest`` counts and lists *skip* and *xfail* tests separately. Detailed
+information about skipped/xfailed tests is not shown by default to avoid
+cluttering the output. You can use the ``-r`` option to see details
+corresponding to the "short" letters shown in the test progress:
+
+.. code-block:: bash
+
+ pytest -rxXs # show extra info on xfailed, xpassed, and skipped tests
+
+More details on the ``-r`` option can be found by running ``pytest -h``.
+
+(See :ref:`how to change command line options defaults`)
+
+.. _skipif:
+.. _skip:
+.. _`condition booleans`:
+
+Skipping test functions
+-----------------------
+
+
+
+The simplest way to skip a test function is to mark it with the ``skip`` decorator
+which may be passed an optional ``reason``:
+
+.. code-block:: python
+
+ @pytest.mark.skip(reason="no way of currently testing this")
+ def test_the_unknown():
+ ...
+
+
+Alternatively, it is also possible to skip imperatively during test execution or setup
+by calling the ``pytest.skip(reason)`` function:
+
+.. code-block:: python
+
+ def test_function():
+ if not valid_config():
+ pytest.skip("unsupported configuration")
+
+The imperative method is useful when it is not possible to evaluate the skip condition
+during import time.
+
+It is also possible to skip the whole module using
+``pytest.skip(reason, allow_module_level=True)`` at the module level:
+
+.. code-block:: python
+
+ import sys
+ import pytest
+
+ if not sys.platform.startswith("win"):
+ pytest.skip("skipping windows-only tests", allow_module_level=True)
+
+
+**Reference**: :ref:`pytest.mark.skip ref`
+
+``skipif``
+~~~~~~~~~~
+
+
+
+If you wish to skip something conditionally then you can use ``skipif`` instead.
+Here is an example of marking a test function to be skipped
+when run on an interpreter earlier than Python3.6:
+
+.. code-block:: python
+
+ import sys
+
+
+ @pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7 or higher")
+ def test_function():
+ ...
+
+If the condition evaluates to ``True`` during collection, the test function will be skipped,
+with the specified reason appearing in the summary when using ``-rs``.
+
+You can share ``skipif`` markers between modules. Consider this test module:
+
+.. code-block:: python
+
+ # content of test_mymodule.py
+ import mymodule
+
+ minversion = pytest.mark.skipif(
+ mymodule.__versioninfo__ < (1, 1), reason="at least mymodule-1.1 required"
+ )
+
+
+ @minversion
+ def test_function():
+ ...
+
+You can import the marker and reuse it in another test module:
+
+.. code-block:: python
+
+ # test_myothermodule.py
+ from test_mymodule import minversion
+
+
+ @minversion
+ def test_anotherfunction():
+ ...
+
+For larger test suites it's usually a good idea to have one file
+where you define the markers which you then consistently apply
+throughout your test suite.
+
+Alternatively, you can use :ref:`condition strings
+<string conditions>` instead of booleans, but they can't be shared between modules easily
+so they are supported mainly for backward compatibility reasons.
+
+**Reference**: :ref:`pytest.mark.skipif ref`
+
+
+Skip all test functions of a class or module
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can use the ``skipif`` marker (as any other marker) on classes:
+
+.. code-block:: python
+
+ @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
+ class TestPosixCalls:
+ def test_function(self):
+ "will not be setup or run under 'win32' platform"
+
+If the condition is ``True``, this marker will produce a skip result for
+each of the test methods of that class.
+
+If you want to skip all test functions of a module, you may use the
+:globalvar:`pytestmark` global:
+
+.. code-block:: python
+
+ # test_module.py
+ pytestmark = pytest.mark.skipif(...)
+
+If multiple ``skipif`` decorators are applied to a test function, it
+will be skipped if any of the skip conditions is true.
+
+.. _`whole class- or module level`: mark.html#scoped-marking
+
+
+Skipping files or directories
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Sometimes you may need to skip an entire file or directory, for example if the
+tests rely on Python version-specific features or contain code that you do not
+wish pytest to run. In this case, you must exclude the files and directories
+from collection. Refer to :ref:`customizing-test-collection` for more
+information.
+
+
+Skipping on a missing import dependency
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can skip tests on a missing import by using :ref:`pytest.importorskip ref`
+at module level, within a test, or test setup function.
+
+.. code-block:: python
+
+ docutils = pytest.importorskip("docutils")
+
+If ``docutils`` cannot be imported here, this will lead to a skip outcome of
+the test. You can also skip based on the version number of a library:
+
+.. code-block:: python
+
+ docutils = pytest.importorskip("docutils", minversion="0.3")
+
+The version will be read from the specified
+module's ``__version__`` attribute.
+
+Summary
+~~~~~~~
+
+Here's a quick guide on how to skip tests in a module in different situations:
+
+1. Skip all tests in a module unconditionally:
+
+ .. code-block:: python
+
+ pytestmark = pytest.mark.skip("all tests still WIP")
+
+2. Skip all tests in a module based on some condition:
+
+ .. code-block:: python
+
+ pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="tests for linux only")
+
+3. Skip all tests in a module if some import is missing:
+
+ .. code-block:: python
+
+ pexpect = pytest.importorskip("pexpect")
+
+
+.. _xfail:
+
+XFail: mark test functions as expected to fail
+----------------------------------------------
+
+You can use the ``xfail`` marker to indicate that you
+expect a test to fail:
+
+.. code-block:: python
+
+ @pytest.mark.xfail
+ def test_function():
+ ...
+
+This test will run but no traceback will be reported when it fails. Instead, terminal
+reporting will list it in the "expected to fail" (``XFAIL``) or "unexpectedly
+passing" (``XPASS``) sections.
+
+Alternatively, you can also mark a test as ``XFAIL`` from within the test or its setup function
+imperatively:
+
+.. code-block:: python
+
+ def test_function():
+ if not valid_config():
+ pytest.xfail("failing configuration (but should work)")
+
+.. code-block:: python
+
+ def test_function2():
+ import slow_module
+
+ if slow_module.slow_function():
+ pytest.xfail("slow_module taking too long")
+
+These two examples illustrate situations where you don't want to check for a condition
+at the module level, which is when a condition would otherwise be evaluated for marks.
+
+This will make ``test_function`` ``XFAIL``. Note that no other code is executed after
+the :func:`pytest.xfail` call, differently from the marker. That's because it is implemented
+internally by raising a known exception.
+
+**Reference**: :ref:`pytest.mark.xfail ref`
+
+
+``condition`` parameter
+~~~~~~~~~~~~~~~~~~~~~~~
+
+If a test is only expected to fail under a certain condition, you can pass
+that condition as the first parameter:
+
+.. code-block:: python
+
+ @pytest.mark.xfail(sys.platform == "win32", reason="bug in a 3rd party library")
+ def test_function():
+ ...
+
+Note that you have to pass a reason as well (see the parameter description at
+:ref:`pytest.mark.xfail ref`).
+
+``reason`` parameter
+~~~~~~~~~~~~~~~~~~~~
+
+You can specify the motive of an expected failure with the ``reason`` parameter:
+
+.. code-block:: python
+
+ @pytest.mark.xfail(reason="known parser issue")
+ def test_function():
+ ...
+
+
+``raises`` parameter
+~~~~~~~~~~~~~~~~~~~~
+
+If you want to be more specific as to why the test is failing, you can specify
+a single exception, or a tuple of exceptions, in the ``raises`` argument.
+
+.. code-block:: python
+
+ @pytest.mark.xfail(raises=RuntimeError)
+ def test_function():
+ ...
+
+Then the test will be reported as a regular failure if it fails with an
+exception not mentioned in ``raises``.
+
+``run`` parameter
+~~~~~~~~~~~~~~~~~
+
+If a test should be marked as xfail and reported as such but should not be
+even executed, use the ``run`` parameter as ``False``:
+
+.. code-block:: python
+
+ @pytest.mark.xfail(run=False)
+ def test_function():
+ ...
+
+This is specially useful for xfailing tests that are crashing the interpreter and should be
+investigated later.
+
+.. _`xfail strict tutorial`:
+
+``strict`` parameter
+~~~~~~~~~~~~~~~~~~~~
+
+Both ``XFAIL`` and ``XPASS`` don't fail the test suite by default.
+You can change this by setting the ``strict`` keyword-only parameter to ``True``:
+
+.. code-block:: python
+
+ @pytest.mark.xfail(strict=True)
+ def test_function():
+ ...
+
+
+This will make ``XPASS`` ("unexpectedly passing") results from this test to fail the test suite.
+
+You can change the default value of the ``strict`` parameter using the
+``xfail_strict`` ini option:
+
+.. code-block:: ini
+
+ [pytest]
+ xfail_strict=true
+
+
+Ignoring xfail
+~~~~~~~~~~~~~~
+
+By specifying on the commandline:
+
+.. code-block:: bash
+
+ pytest --runxfail
+
+you can force the running and reporting of an ``xfail`` marked test
+as if it weren't marked at all. This also causes :func:`pytest.xfail` to produce no effect.
+
+Examples
+~~~~~~~~
+
+Here is a simple test file with the several usages:
+
+.. literalinclude:: /example/xfail_demo.py
+
+Running it with the report-on-xfail option gives this output:
+
+.. FIXME: Use $ instead of ! again to re-enable regendoc once it's fixed:
+ https://github.com/pytest-dev/pytest/issues/8807
+
+.. code-block:: pytest
+
+ ! pytest -rx xfail_demo.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-6.x.y, py-1.x.y, pluggy-1.x.y
+ cachedir: $PYTHON_PREFIX/.pytest_cache
+ rootdir: $REGENDOC_TMPDIR/example
+ collected 7 items
+
+ xfail_demo.py xxxxxxx [100%]
+
+ ========================= short test summary info ==========================
+ XFAIL xfail_demo.py::test_hello
+ XFAIL xfail_demo.py::test_hello2
+ reason: [NOTRUN]
+ XFAIL xfail_demo.py::test_hello3
+ condition: hasattr(os, 'sep')
+ XFAIL xfail_demo.py::test_hello4
+ bug 110
+ XFAIL xfail_demo.py::test_hello5
+ condition: pytest.__version__[0] != "17"
+ XFAIL xfail_demo.py::test_hello6
+ reason: reason
+ XFAIL xfail_demo.py::test_hello7
+ ============================ 7 xfailed in 0.12s ============================
+
+.. _`skip/xfail with parametrize`:
+
+Skip/xfail with parametrize
+---------------------------
+
+It is possible to apply markers like skip and xfail to individual
+test instances when using parametrize:
+
+.. code-block:: python
+
+ import sys
+ import pytest
+
+
+ @pytest.mark.parametrize(
+ ("n", "expected"),
+ [
+ (1, 2),
+ pytest.param(1, 0, marks=pytest.mark.xfail),
+ pytest.param(1, 3, marks=pytest.mark.xfail(reason="some bug")),
+ (2, 3),
+ (3, 4),
+ (4, 5),
+ pytest.param(
+ 10, 11, marks=pytest.mark.skipif(sys.version_info >= (3, 0), reason="py2k")
+ ),
+ ],
+ )
+ def test_increment(n, expected):
+ assert n + 1 == expected
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/tmp_path.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/tmp_path.rst
new file mode 100644
index 0000000000..ebd74d42e9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/tmp_path.rst
@@ -0,0 +1,139 @@
+
+.. _`tmp_path handling`:
+.. _tmp_path:
+
+How to use temporary directories and files in tests
+===================================================
+
+The ``tmp_path`` fixture
+------------------------
+
+You can use the ``tmp_path`` fixture which will
+provide a temporary directory unique to the test invocation,
+created in the `base temporary directory`_.
+
+``tmp_path`` is a :class:`pathlib.Path` object. Here is an example test usage:
+
+.. code-block:: python
+
+ # content of test_tmp_path.py
+ CONTENT = "content"
+
+
+ def test_create_file(tmp_path):
+ d = tmp_path / "sub"
+ d.mkdir()
+ p = d / "hello.txt"
+ p.write_text(CONTENT)
+ assert p.read_text() == CONTENT
+ assert len(list(tmp_path.iterdir())) == 1
+ assert 0
+
+Running this would result in a passed test except for the last
+``assert 0`` line which we use to look at values:
+
+.. code-block:: pytest
+
+ $ pytest test_tmp_path.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 1 item
+
+ test_tmp_path.py F [100%]
+
+ ================================= FAILURES =================================
+ _____________________________ test_create_file _____________________________
+
+ tmp_path = PosixPath('PYTEST_TMPDIR/test_create_file0')
+
+ def test_create_file(tmp_path):
+ d = tmp_path / "sub"
+ d.mkdir()
+ p = d / "hello.txt"
+ p.write_text(CONTENT)
+ assert p.read_text() == CONTENT
+ assert len(list(tmp_path.iterdir())) == 1
+ > assert 0
+ E assert 0
+
+ test_tmp_path.py:11: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_tmp_path.py::test_create_file - assert 0
+ ============================ 1 failed in 0.12s =============================
+
+.. _`tmp_path_factory example`:
+
+The ``tmp_path_factory`` fixture
+--------------------------------
+
+The ``tmp_path_factory`` is a session-scoped fixture which can be used
+to create arbitrary temporary directories from any other fixture or test.
+
+For example, suppose your test suite needs a large image on disk, which is
+generated procedurally. Instead of computing the same image for each test
+that uses it into its own ``tmp_path``, you can generate it once per-session
+to save time:
+
+.. code-block:: python
+
+ # contents of conftest.py
+ import pytest
+
+
+ @pytest.fixture(scope="session")
+ def image_file(tmp_path_factory):
+ img = compute_expensive_image()
+ fn = tmp_path_factory.mktemp("data") / "img.png"
+ img.save(fn)
+ return fn
+
+
+ # contents of test_image.py
+ def test_histogram(image_file):
+ img = load_image(image_file)
+ # compute and test histogram
+
+See :ref:`tmp_path_factory API <tmp_path_factory factory api>` for details.
+
+.. _`tmpdir and tmpdir_factory`:
+.. _tmpdir:
+
+The ``tmpdir`` and ``tmpdir_factory`` fixtures
+---------------------------------------------------
+
+The ``tmpdir`` and ``tmpdir_factory`` fixtures are similar to ``tmp_path``
+and ``tmp_path_factory``, but use/return legacy `py.path.local`_ objects
+rather than standard :class:`pathlib.Path` objects. These days, prefer to
+use ``tmp_path`` and ``tmp_path_factory``.
+
+See :fixture:`tmpdir <tmpdir>` :fixture:`tmpdir_factory <tmpdir_factory>`
+API for details.
+
+
+.. _`base temporary directory`:
+
+The default base temporary directory
+-----------------------------------------------
+
+Temporary directories are by default created as sub-directories of
+the system temporary directory. The base name will be ``pytest-NUM`` where
+``NUM`` will be incremented with each test run. Moreover, entries older
+than 3 temporary directories will be removed.
+
+You can override the default temporary directory setting like this:
+
+.. code-block:: bash
+
+ pytest --basetemp=mydir
+
+.. warning::
+
+ The contents of ``mydir`` will be completely removed, so make sure to use a directory
+ for that purpose only.
+
+When distributing tests on the local machine using ``pytest-xdist``, care is taken to
+automatically configure a basetemp directory for the sub processes such that all temporary
+data lands below a single per-test run basetemp directory.
+
+.. _`py.path.local`: https://py.readthedocs.io/en/latest/path.html
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/unittest.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/unittest.rst
new file mode 100644
index 0000000000..bff7511077
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/unittest.rst
@@ -0,0 +1,251 @@
+
+.. _`unittest.TestCase`:
+.. _`unittest`:
+
+How to use ``unittest``-based tests with pytest
+===============================================
+
+``pytest`` supports running Python ``unittest``-based tests out of the box.
+It's meant for leveraging existing ``unittest``-based test suites
+to use pytest as a test runner and also allow to incrementally adapt
+the test suite to take full advantage of pytest's features.
+
+To run an existing ``unittest``-style test suite using ``pytest``, type:
+
+.. code-block:: bash
+
+ pytest tests
+
+
+pytest will automatically collect ``unittest.TestCase`` subclasses and
+their ``test`` methods in ``test_*.py`` or ``*_test.py`` files.
+
+Almost all ``unittest`` features are supported:
+
+* ``@unittest.skip`` style decorators;
+* ``setUp/tearDown``;
+* ``setUpClass/tearDownClass``;
+* ``setUpModule/tearDownModule``;
+
+.. _`load_tests protocol`: https://docs.python.org/3/library/unittest.html#load-tests-protocol
+
+Up to this point pytest does not have support for the following features:
+
+* `load_tests protocol`_;
+* :ref:`subtests <python:subtests>`;
+
+Benefits out of the box
+-----------------------
+
+By running your test suite with pytest you can make use of several features,
+in most cases without having to modify existing code:
+
+* Obtain :ref:`more informative tracebacks <tbreportdemo>`;
+* :ref:`stdout and stderr <captures>` capturing;
+* :ref:`Test selection options <select-tests>` using ``-k`` and ``-m`` flags;
+* :ref:`maxfail`;
+* :ref:`--pdb <pdb-option>` command-line option for debugging on test failures
+ (see :ref:`note <pdb-unittest-note>` below);
+* Distribute tests to multiple CPUs using the :pypi:`pytest-xdist` plugin;
+* Use :ref:`plain assert-statements <assert>` instead of ``self.assert*`` functions
+ (:pypi:`unittest2pytest` is immensely helpful in this);
+
+
+pytest features in ``unittest.TestCase`` subclasses
+---------------------------------------------------
+
+The following pytest features work in ``unittest.TestCase`` subclasses:
+
+* :ref:`Marks <mark>`: :ref:`skip <skip>`, :ref:`skipif <skipif>`, :ref:`xfail <xfail>`;
+* :ref:`Auto-use fixtures <mixing-fixtures>`;
+
+The following pytest features **do not** work, and probably
+never will due to different design philosophies:
+
+* :ref:`Fixtures <fixture>` (except for ``autouse`` fixtures, see :ref:`below <mixing-fixtures>`);
+* :ref:`Parametrization <parametrize>`;
+* :ref:`Custom hooks <writing-plugins>`;
+
+
+Third party plugins may or may not work well, depending on the plugin and the test suite.
+
+.. _mixing-fixtures:
+
+Mixing pytest fixtures into ``unittest.TestCase`` subclasses using marks
+------------------------------------------------------------------------
+
+Running your unittest with ``pytest`` allows you to use its
+:ref:`fixture mechanism <fixture>` with ``unittest.TestCase`` style
+tests. Assuming you have at least skimmed the pytest fixture features,
+let's jump-start into an example that integrates a pytest ``db_class``
+fixture, setting up a class-cached database object, and then reference
+it from a unittest-style test:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+ # we define a fixture function below and it will be "used" by
+ # referencing its name from tests
+
+ import pytest
+
+
+ @pytest.fixture(scope="class")
+ def db_class(request):
+ class DummyDB:
+ pass
+
+ # set a class attribute on the invoking test context
+ request.cls.db = DummyDB()
+
+This defines a fixture function ``db_class`` which - if used - is
+called once for each test class and which sets the class-level
+``db`` attribute to a ``DummyDB`` instance. The fixture function
+achieves this by receiving a special ``request`` object which gives
+access to :ref:`the requesting test context <request-context>` such
+as the ``cls`` attribute, denoting the class from which the fixture
+is used. This architecture de-couples fixture writing from actual test
+code and allows re-use of the fixture by a minimal reference, the fixture
+name. So let's write an actual ``unittest.TestCase`` class using our
+fixture definition:
+
+.. code-block:: python
+
+ # content of test_unittest_db.py
+
+ import unittest
+ import pytest
+
+
+ @pytest.mark.usefixtures("db_class")
+ class MyTest(unittest.TestCase):
+ def test_method1(self):
+ assert hasattr(self, "db")
+ assert 0, self.db # fail for demo purposes
+
+ def test_method2(self):
+ assert 0, self.db # fail for demo purposes
+
+The ``@pytest.mark.usefixtures("db_class")`` class-decorator makes sure that
+the pytest fixture function ``db_class`` is called once per class.
+Due to the deliberately failing assert statements, we can take a look at
+the ``self.db`` values in the traceback:
+
+.. code-block:: pytest
+
+ $ pytest test_unittest_db.py
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 2 items
+
+ test_unittest_db.py FF [100%]
+
+ ================================= FAILURES =================================
+ ___________________________ MyTest.test_method1 ____________________________
+
+ self = <test_unittest_db.MyTest testMethod=test_method1>
+
+ def test_method1(self):
+ assert hasattr(self, "db")
+ > assert 0, self.db # fail for demo purposes
+ E AssertionError: <conftest.db_class.<locals>.DummyDB object at 0xdeadbeef0001>
+ E assert 0
+
+ test_unittest_db.py:10: AssertionError
+ ___________________________ MyTest.test_method2 ____________________________
+
+ self = <test_unittest_db.MyTest testMethod=test_method2>
+
+ def test_method2(self):
+ > assert 0, self.db # fail for demo purposes
+ E AssertionError: <conftest.db_class.<locals>.DummyDB object at 0xdeadbeef0001>
+ E assert 0
+
+ test_unittest_db.py:13: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_unittest_db.py::MyTest::test_method1 - AssertionError: <conft...
+ FAILED test_unittest_db.py::MyTest::test_method2 - AssertionError: <conft...
+ ============================ 2 failed in 0.12s =============================
+
+This default pytest traceback shows that the two test methods
+share the same ``self.db`` instance which was our intention
+when writing the class-scoped fixture function above.
+
+
+Using autouse fixtures and accessing other fixtures
+---------------------------------------------------
+
+Although it's usually better to explicitly declare use of fixtures you need
+for a given test, you may sometimes want to have fixtures that are
+automatically used in a given context. After all, the traditional
+style of unittest-setup mandates the use of this implicit fixture writing
+and chances are, you are used to it or like it.
+
+You can flag fixture functions with ``@pytest.fixture(autouse=True)``
+and define the fixture function in the context where you want it used.
+Let's look at an ``initdir`` fixture which makes all test methods of a
+``TestCase`` class execute in a temporary directory with a
+pre-initialized ``samplefile.ini``. Our ``initdir`` fixture itself uses
+the pytest builtin :fixture:`tmp_path` fixture to delegate the
+creation of a per-test temporary directory:
+
+.. code-block:: python
+
+ # content of test_unittest_cleandir.py
+ import os
+ import pytest
+ import unittest
+
+
+ class MyTest(unittest.TestCase):
+ @pytest.fixture(autouse=True)
+ def initdir(self, tmp_path, monkeypatch):
+ monkeypatch.chdir(tmp_path) # change to pytest-provided temporary directory
+ tmp_path.joinpath("samplefile.ini").write_text("# testdata")
+
+ def test_method(self):
+ with open("samplefile.ini") as f:
+ s = f.read()
+ assert "testdata" in s
+
+Due to the ``autouse`` flag the ``initdir`` fixture function will be
+used for all methods of the class where it is defined. This is a
+shortcut for using a ``@pytest.mark.usefixtures("initdir")`` marker
+on the class like in the previous example.
+
+Running this test module ...:
+
+.. code-block:: pytest
+
+ $ pytest -q test_unittest_cleandir.py
+ . [100%]
+ 1 passed in 0.12s
+
+... gives us one passed test because the ``initdir`` fixture function
+was executed ahead of the ``test_method``.
+
+.. note::
+
+ ``unittest.TestCase`` methods cannot directly receive fixture
+ arguments as implementing that is likely to inflict
+ on the ability to run general unittest.TestCase test suites.
+
+ The above ``usefixtures`` and ``autouse`` examples should help to mix in
+ pytest fixtures into unittest suites.
+
+ You can also gradually move away from subclassing from ``unittest.TestCase`` to *plain asserts*
+ and then start to benefit from the full pytest feature set step by step.
+
+.. _pdb-unittest-note:
+
+.. note::
+
+ Due to architectural differences between the two frameworks, setup and
+ teardown for ``unittest``-based tests is performed during the ``call`` phase
+ of testing instead of in ``pytest``'s standard ``setup`` and ``teardown``
+ stages. This can be important to understand in some situations, particularly
+ when reasoning about errors. For example, if a ``unittest``-based suite
+ exhibits errors during setup, ``pytest`` will report no errors during its
+ ``setup`` phase and will instead raise the error during ``call``.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/usage.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/usage.rst
new file mode 100644
index 0000000000..3522b258dc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/usage.rst
@@ -0,0 +1,214 @@
+
+.. _usage:
+
+How to invoke pytest
+==========================================
+
+.. seealso:: :ref:`Complete pytest command-line flag reference <command-line-flags>`
+
+In general, pytest is invoked with the command ``pytest`` (see below for :ref:`other ways to invoke pytest
+<invoke-other>`). This will execute all tests in all files whose names follow the form ``test_*.py`` or ``\*_test.py``
+in the current directory and its subdirectories. More generally, pytest follows :ref:`standard test discovery rules
+<test discovery>`.
+
+
+.. _select-tests:
+
+Specifying which tests to run
+------------------------------
+
+Pytest supports several ways to run and select tests from the command-line.
+
+**Run tests in a module**
+
+.. code-block:: bash
+
+ pytest test_mod.py
+
+**Run tests in a directory**
+
+.. code-block:: bash
+
+ pytest testing/
+
+**Run tests by keyword expressions**
+
+.. code-block:: bash
+
+ pytest -k "MyClass and not method"
+
+This will run tests which contain names that match the given *string expression* (case-insensitive),
+which can include Python operators that use filenames, class names and function names as variables.
+The example above will run ``TestMyClass.test_something`` but not ``TestMyClass.test_method_simple``.
+
+.. _nodeids:
+
+**Run tests by node ids**
+
+Each collected test is assigned a unique ``nodeid`` which consist of the module filename followed
+by specifiers like class names, function names and parameters from parametrization, separated by ``::`` characters.
+
+To run a specific test within a module:
+
+.. code-block:: bash
+
+ pytest test_mod.py::test_func
+
+
+Another example specifying a test method in the command line:
+
+.. code-block:: bash
+
+ pytest test_mod.py::TestClass::test_method
+
+**Run tests by marker expressions**
+
+.. code-block:: bash
+
+ pytest -m slow
+
+Will run all tests which are decorated with the ``@pytest.mark.slow`` decorator.
+
+For more information see :ref:`marks <mark>`.
+
+**Run tests from packages**
+
+.. code-block:: bash
+
+ pytest --pyargs pkg.testing
+
+This will import ``pkg.testing`` and use its filesystem location to find and run tests from.
+
+
+Getting help on version, option names, environment variables
+--------------------------------------------------------------
+
+.. code-block:: bash
+
+ pytest --version # shows where pytest was imported from
+ pytest --fixtures # show available builtin function arguments
+ pytest -h | --help # show help on command line and config file options
+
+
+.. _durations:
+
+Profiling test execution duration
+-------------------------------------
+
+.. versionchanged:: 6.0
+
+To get a list of the slowest 10 test durations over 1.0s long:
+
+.. code-block:: bash
+
+ pytest --durations=10 --durations-min=1.0
+
+By default, pytest will not show test durations that are too small (<0.005s) unless ``-vv`` is passed on the command-line.
+
+
+Managing loading of plugins
+-------------------------------
+
+Early loading plugins
+~~~~~~~~~~~~~~~~~~~~~~~
+
+You can early-load plugins (internal and external) explicitly in the command-line with the ``-p`` option::
+
+ pytest -p mypluginmodule
+
+The option receives a ``name`` parameter, which can be:
+
+* A full module dotted name, for example ``myproject.plugins``. This dotted name must be importable.
+* The entry-point name of a plugin. This is the name passed to ``setuptools`` when the plugin is
+ registered. For example to early-load the :pypi:`pytest-cov` plugin you can use::
+
+ pytest -p pytest_cov
+
+
+Disabling plugins
+~~~~~~~~~~~~~~~~~~
+
+To disable loading specific plugins at invocation time, use the ``-p`` option
+together with the prefix ``no:``.
+
+Example: to disable loading the plugin ``doctest``, which is responsible for
+executing doctest tests from text files, invoke pytest like this:
+
+.. code-block:: bash
+
+ pytest -p no:doctest
+
+
+.. _invoke-other:
+
+Other ways of calling pytest
+-----------------------------------------------------
+
+.. _invoke-python:
+
+Calling pytest through ``python -m pytest``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can invoke testing through the Python interpreter from the command line:
+
+.. code-block:: text
+
+ python -m pytest [...]
+
+This is almost equivalent to invoking the command line script ``pytest [...]``
+directly, except that calling via ``python`` will also add the current directory to ``sys.path``.
+
+
+.. _`pytest.main-usage`:
+
+Calling pytest from Python code
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can invoke ``pytest`` from Python code directly:
+
+.. code-block:: python
+
+ retcode = pytest.main()
+
+this acts as if you would call "pytest" from the command line.
+It will not raise :class:`SystemExit` but return the :ref:`exit code <exit-codes>` instead.
+You can pass in options and arguments:
+
+.. code-block:: python
+
+ retcode = pytest.main(["-x", "mytestdir"])
+
+You can specify additional plugins to ``pytest.main``:
+
+.. code-block:: python
+
+ # content of myinvoke.py
+ import pytest
+ import sys
+
+
+ class MyPlugin:
+ def pytest_sessionfinish(self):
+ print("*** test run reporting finishing")
+
+
+ if __name__ == "__main__":
+ sys.exit(pytest.main(["-qq"], plugins=[MyPlugin()]))
+
+Running it will show that ``MyPlugin`` was added and its
+hook was invoked:
+
+.. code-block:: pytest
+
+ $ python myinvoke.py
+ *** test run reporting finishing
+
+
+.. note::
+
+ Calling ``pytest.main()`` will result in importing your tests and any modules
+ that they import. Due to the caching mechanism of python's import system,
+ making subsequent calls to ``pytest.main()`` from the same process will not
+ reflect changes to those files between the calls. For this reason, making
+ multiple calls to ``pytest.main()`` from the same process (in order to re-run
+ tests, for example) is not recommended.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/writing_hook_functions.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/writing_hook_functions.rst
new file mode 100644
index 0000000000..f615fced86
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/writing_hook_functions.rst
@@ -0,0 +1,352 @@
+.. _`writinghooks`:
+
+Writing hook functions
+======================
+
+
+.. _validation:
+
+hook function validation and execution
+--------------------------------------
+
+pytest calls hook functions from registered plugins for any
+given hook specification. Let's look at a typical hook function
+for the ``pytest_collection_modifyitems(session, config,
+items)`` hook which pytest calls after collection of all test items is
+completed.
+
+When we implement a ``pytest_collection_modifyitems`` function in our plugin
+pytest will during registration verify that you use argument
+names which match the specification and bail out if not.
+
+Let's look at a possible implementation:
+
+.. code-block:: python
+
+ def pytest_collection_modifyitems(config, items):
+ # called after collection is completed
+ # you can modify the ``items`` list
+ ...
+
+Here, ``pytest`` will pass in ``config`` (the pytest config object)
+and ``items`` (the list of collected test items) but will not pass
+in the ``session`` argument because we didn't list it in the function
+signature. This dynamic "pruning" of arguments allows ``pytest`` to
+be "future-compatible": we can introduce new hook named parameters without
+breaking the signatures of existing hook implementations. It is one of
+the reasons for the general long-lived compatibility of pytest plugins.
+
+Note that hook functions other than ``pytest_runtest_*`` are not
+allowed to raise exceptions. Doing so will break the pytest run.
+
+
+
+.. _firstresult:
+
+firstresult: stop at first non-None result
+-------------------------------------------
+
+Most calls to ``pytest`` hooks result in a **list of results** which contains
+all non-None results of the called hook functions.
+
+Some hook specifications use the ``firstresult=True`` option so that the hook
+call only executes until the first of N registered functions returns a
+non-None result which is then taken as result of the overall hook call.
+The remaining hook functions will not be called in this case.
+
+.. _`hookwrapper`:
+
+hookwrapper: executing around other hooks
+-------------------------------------------------
+
+.. currentmodule:: _pytest.core
+
+
+
+pytest plugins can implement hook wrappers which wrap the execution
+of other hook implementations. A hook wrapper is a generator function
+which yields exactly once. When pytest invokes hooks it first executes
+hook wrappers and passes the same arguments as to the regular hooks.
+
+At the yield point of the hook wrapper pytest will execute the next hook
+implementations and return their result to the yield point in the form of
+a :py:class:`Result <pluggy._Result>` instance which encapsulates a result or
+exception info. The yield point itself will thus typically not raise
+exceptions (unless there are bugs).
+
+Here is an example definition of a hook wrapper:
+
+.. code-block:: python
+
+ import pytest
+
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_pyfunc_call(pyfuncitem):
+ do_something_before_next_hook_executes()
+
+ outcome = yield
+ # outcome.excinfo may be None or a (cls, val, tb) tuple
+
+ res = outcome.get_result() # will raise if outcome was exception
+
+ post_process_result(res)
+
+ outcome.force_result(new_res) # to override the return value to the plugin system
+
+Note that hook wrappers don't return results themselves, they merely
+perform tracing or other side effects around the actual hook implementations.
+If the result of the underlying hook is a mutable object, they may modify
+that result but it's probably better to avoid it.
+
+For more information, consult the
+:ref:`pluggy documentation about hookwrappers <pluggy:hookwrappers>`.
+
+.. _plugin-hookorder:
+
+Hook function ordering / call example
+-------------------------------------
+
+For any given hook specification there may be more than one
+implementation and we thus generally view ``hook`` execution as a
+``1:N`` function call where ``N`` is the number of registered functions.
+There are ways to influence if a hook implementation comes before or
+after others, i.e. the position in the ``N``-sized list of functions:
+
+.. code-block:: python
+
+ # Plugin 1
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_collection_modifyitems(items):
+ # will execute as early as possible
+ ...
+
+
+ # Plugin 2
+ @pytest.hookimpl(trylast=True)
+ def pytest_collection_modifyitems(items):
+ # will execute as late as possible
+ ...
+
+
+ # Plugin 3
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_collection_modifyitems(items):
+ # will execute even before the tryfirst one above!
+ outcome = yield
+ # will execute after all non-hookwrappers executed
+
+Here is the order of execution:
+
+1. Plugin3's pytest_collection_modifyitems called until the yield point
+ because it is a hook wrapper.
+
+2. Plugin1's pytest_collection_modifyitems is called because it is marked
+ with ``tryfirst=True``.
+
+3. Plugin2's pytest_collection_modifyitems is called because it is marked
+ with ``trylast=True`` (but even without this mark it would come after
+ Plugin1).
+
+4. Plugin3's pytest_collection_modifyitems then executing the code after the yield
+ point. The yield receives a :py:class:`Result <pluggy._Result>` instance which encapsulates
+ the result from calling the non-wrappers. Wrappers shall not modify the result.
+
+It's possible to use ``tryfirst`` and ``trylast`` also in conjunction with
+``hookwrapper=True`` in which case it will influence the ordering of hookwrappers
+among each other.
+
+
+Declaring new hooks
+------------------------
+
+.. note::
+
+ This is a quick overview on how to add new hooks and how they work in general, but a more complete
+ overview can be found in `the pluggy documentation <https://pluggy.readthedocs.io/en/latest/>`__.
+
+.. currentmodule:: _pytest.hookspec
+
+Plugins and ``conftest.py`` files may declare new hooks that can then be
+implemented by other plugins in order to alter behaviour or interact with
+the new plugin:
+
+.. autofunction:: pytest_addhooks
+ :noindex:
+
+Hooks are usually declared as do-nothing functions that contain only
+documentation describing when the hook will be called and what return values
+are expected. The names of the functions must start with `pytest_` otherwise pytest won't recognize them.
+
+Here's an example. Let's assume this code is in the ``sample_hook.py`` module.
+
+.. code-block:: python
+
+ def pytest_my_hook(config):
+ """
+ Receives the pytest config and does things with it
+ """
+
+To register the hooks with pytest they need to be structured in their own module or class. This
+class or module can then be passed to the ``pluginmanager`` using the ``pytest_addhooks`` function
+(which itself is a hook exposed by pytest).
+
+.. code-block:: python
+
+ def pytest_addhooks(pluginmanager):
+ """ This example assumes the hooks are grouped in the 'sample_hook' module. """
+ from my_app.tests import sample_hook
+
+ pluginmanager.add_hookspecs(sample_hook)
+
+For a real world example, see `newhooks.py`_ from `xdist <https://github.com/pytest-dev/pytest-xdist>`_.
+
+.. _`newhooks.py`: https://github.com/pytest-dev/pytest-xdist/blob/974bd566c599dc6a9ea291838c6f226197208b46/xdist/newhooks.py
+
+Hooks may be called both from fixtures or from other hooks. In both cases, hooks are called
+through the ``hook`` object, available in the ``config`` object. Most hooks receive a
+``config`` object directly, while fixtures may use the ``pytestconfig`` fixture which provides the same object.
+
+.. code-block:: python
+
+ @pytest.fixture()
+ def my_fixture(pytestconfig):
+ # call the hook called "pytest_my_hook"
+ # 'result' will be a list of return values from all registered functions.
+ result = pytestconfig.hook.pytest_my_hook(config=pytestconfig)
+
+.. note::
+ Hooks receive parameters using only keyword arguments.
+
+Now your hook is ready to be used. To register a function at the hook, other plugins or users must
+now simply define the function ``pytest_my_hook`` with the correct signature in their ``conftest.py``.
+
+Example:
+
+.. code-block:: python
+
+ def pytest_my_hook(config):
+ """
+ Print all active hooks to the screen.
+ """
+ print(config.hook)
+
+
+.. _`addoptionhooks`:
+
+
+Using hooks in pytest_addoption
+-------------------------------
+
+Occasionally, it is necessary to change the way in which command line options
+are defined by one plugin based on hooks in another plugin. For example,
+a plugin may expose a command line option for which another plugin needs
+to define the default value. The pluginmanager can be used to install and
+use hooks to accomplish this. The plugin would define and add the hooks
+and use pytest_addoption as follows:
+
+.. code-block:: python
+
+ # contents of hooks.py
+
+ # Use firstresult=True because we only want one plugin to define this
+ # default value
+ @hookspec(firstresult=True)
+ def pytest_config_file_default_value():
+ """ Return the default value for the config file command line option. """
+
+
+ # contents of myplugin.py
+
+
+ def pytest_addhooks(pluginmanager):
+ """ This example assumes the hooks are grouped in the 'hooks' module. """
+ from . import hooks
+
+ pluginmanager.add_hookspecs(hooks)
+
+
+ def pytest_addoption(parser, pluginmanager):
+ default_value = pluginmanager.hook.pytest_config_file_default_value()
+ parser.addoption(
+ "--config-file",
+ help="Config file to use, defaults to %(default)s",
+ default=default_value,
+ )
+
+The conftest.py that is using myplugin would simply define the hook as follows:
+
+.. code-block:: python
+
+ def pytest_config_file_default_value():
+ return "config.yaml"
+
+
+Optionally using hooks from 3rd party plugins
+---------------------------------------------
+
+Using new hooks from plugins as explained above might be a little tricky
+because of the standard :ref:`validation mechanism <validation>`:
+if you depend on a plugin that is not installed, validation will fail and
+the error message will not make much sense to your users.
+
+One approach is to defer the hook implementation to a new plugin instead of
+declaring the hook functions directly in your plugin module, for example:
+
+.. code-block:: python
+
+ # contents of myplugin.py
+
+
+ class DeferPlugin:
+ """Simple plugin to defer pytest-xdist hook functions."""
+
+ def pytest_testnodedown(self, node, error):
+ """standard xdist hook function."""
+
+
+ def pytest_configure(config):
+ if config.pluginmanager.hasplugin("xdist"):
+ config.pluginmanager.register(DeferPlugin())
+
+This has the added benefit of allowing you to conditionally install hooks
+depending on which plugins are installed.
+
+.. _plugin-stash:
+
+Storing data on items across hook functions
+-------------------------------------------
+
+Plugins often need to store data on :class:`~pytest.Item`\s in one hook
+implementation, and access it in another. One common solution is to just
+assign some private attribute directly on the item, but type-checkers like
+mypy frown upon this, and it may also cause conflicts with other plugins.
+So pytest offers a better way to do this, :attr:`item.stash <_pytest.nodes.Node.stash>`.
+
+To use the "stash" in your plugins, first create "stash keys" somewhere at the
+top level of your plugin:
+
+.. code-block:: python
+
+ been_there_key = pytest.StashKey[bool]()
+ done_that_key = pytest.StashKey[str]()
+
+then use the keys to stash your data at some point:
+
+.. code-block:: python
+
+ def pytest_runtest_setup(item: pytest.Item) -> None:
+ item.stash[been_there_key] = True
+ item.stash[done_that_key] = "no"
+
+and retrieve them at another point:
+
+.. code-block:: python
+
+ def pytest_runtest_teardown(item: pytest.Item) -> None:
+ if not item.stash[been_there_key]:
+ print("Oh?")
+ item.stash[done_that_key] = "yes!"
+
+Stashes are available on all node types (like :class:`~pytest.Class`,
+:class:`~pytest.Session`) and also on :class:`~pytest.Config`, if needed.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/writing_plugins.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/writing_plugins.rst
new file mode 100644
index 0000000000..b2d2b6563d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/writing_plugins.rst
@@ -0,0 +1,458 @@
+.. _plugins:
+.. _`writing-plugins`:
+
+Writing plugins
+===============
+
+It is easy to implement `local conftest plugins`_ for your own project
+or `pip-installable plugins`_ that can be used throughout many projects,
+including third party projects. Please refer to :ref:`using plugins` if you
+only want to use but not write plugins.
+
+A plugin contains one or multiple hook functions. :ref:`Writing hooks <writinghooks>`
+explains the basics and details of how you can write a hook function yourself.
+``pytest`` implements all aspects of configuration, collection, running and
+reporting by calling :ref:`well specified hooks <hook-reference>` of the following plugins:
+
+* builtin plugins: loaded from pytest's internal ``_pytest`` directory.
+
+* :ref:`external plugins <extplugins>`: modules discovered through
+ `setuptools entry points`_
+
+* `conftest.py plugins`_: modules auto-discovered in test directories
+
+In principle, each hook call is a ``1:N`` Python function call where ``N`` is the
+number of registered implementation functions for a given specification.
+All specifications and implementations follow the ``pytest_`` prefix
+naming convention, making them easy to distinguish and find.
+
+.. _`pluginorder`:
+
+Plugin discovery order at tool startup
+--------------------------------------
+
+``pytest`` loads plugin modules at tool startup in the following way:
+
+1. by scanning the command line for the ``-p no:name`` option
+ and *blocking* that plugin from being loaded (even builtin plugins can
+ be blocked this way). This happens before normal command-line parsing.
+
+2. by loading all builtin plugins.
+
+3. by scanning the command line for the ``-p name`` option
+ and loading the specified plugin. This happens before normal command-line parsing.
+
+4. by loading all plugins registered through `setuptools entry points`_.
+
+5. by loading all plugins specified through the :envvar:`PYTEST_PLUGINS` environment variable.
+
+6. by loading all :file:`conftest.py` files as inferred by the command line
+ invocation:
+
+ - if no test paths are specified, use the current dir as a test path
+ - if exists, load ``conftest.py`` and ``test*/conftest.py`` relative
+ to the directory part of the first test path. After the ``conftest.py``
+ file is loaded, load all plugins specified in its
+ :globalvar:`pytest_plugins` variable if present.
+
+ Note that pytest does not find ``conftest.py`` files in deeper nested
+ sub directories at tool startup. It is usually a good idea to keep
+ your ``conftest.py`` file in the top level test or project root directory.
+
+7. by recursively loading all plugins specified by the
+ :globalvar:`pytest_plugins` variable in ``conftest.py`` files.
+
+
+.. _`pytest/plugin`: http://bitbucket.org/pytest-dev/pytest/src/tip/pytest/plugin/
+.. _`conftest.py plugins`:
+.. _`localplugin`:
+.. _`local conftest plugins`:
+
+conftest.py: local per-directory plugins
+----------------------------------------
+
+Local ``conftest.py`` plugins contain directory-specific hook
+implementations. Hook Session and test running activities will
+invoke all hooks defined in ``conftest.py`` files closer to the
+root of the filesystem. Example of implementing the
+``pytest_runtest_setup`` hook so that is called for tests in the ``a``
+sub directory but not for other directories::
+
+ a/conftest.py:
+ def pytest_runtest_setup(item):
+ # called for running each test in 'a' directory
+ print("setting up", item)
+
+ a/test_sub.py:
+ def test_sub():
+ pass
+
+ test_flat.py:
+ def test_flat():
+ pass
+
+Here is how you might run it::
+
+     pytest test_flat.py --capture=no # will not show "setting up"
+ pytest a/test_sub.py --capture=no # will show "setting up"
+
+.. note::
+ If you have ``conftest.py`` files which do not reside in a
+ python package directory (i.e. one containing an ``__init__.py``) then
+ "import conftest" can be ambiguous because there might be other
+ ``conftest.py`` files as well on your ``PYTHONPATH`` or ``sys.path``.
+ It is thus good practice for projects to either put ``conftest.py``
+ under a package scope or to never import anything from a
+ ``conftest.py`` file.
+
+ See also: :ref:`pythonpath`.
+
+.. note::
+ Some hooks should be implemented only in plugins or conftest.py files situated at the
+ tests root directory due to how pytest discovers plugins during startup,
+ see the documentation of each hook for details.
+
+Writing your own plugin
+-----------------------
+
+If you want to write a plugin, there are many real-life examples
+you can copy from:
+
+* a custom collection example plugin: :ref:`yaml plugin`
+* builtin plugins which provide pytest's own functionality
+* many :ref:`external plugins <plugin-list>` providing additional features
+
+All of these plugins implement :ref:`hooks <hook-reference>` and/or :ref:`fixtures <fixture>`
+to extend and add functionality.
+
+.. note::
+ Make sure to check out the excellent
+ `cookiecutter-pytest-plugin <https://github.com/pytest-dev/cookiecutter-pytest-plugin>`_
+ project, which is a `cookiecutter template <https://github.com/audreyr/cookiecutter>`_
+ for authoring plugins.
+
+ The template provides an excellent starting point with a working plugin,
+ tests running with tox, a comprehensive README file as well as a
+ pre-configured entry-point.
+
+Also consider :ref:`contributing your plugin to pytest-dev<submitplugin>`
+once it has some happy users other than yourself.
+
+
+.. _`setuptools entry points`:
+.. _`pip-installable plugins`:
+
+Making your plugin installable by others
+----------------------------------------
+
+If you want to make your plugin externally available, you
+may define a so-called entry point for your distribution so
+that ``pytest`` finds your plugin module. Entry points are
+a feature that is provided by :std:doc:`setuptools:index`. pytest looks up
+the ``pytest11`` entrypoint to discover its
+plugins and you can thus make your plugin available by defining
+it in your setuptools-invocation:
+
+.. sourcecode:: python
+
+ # sample ./setup.py file
+ from setuptools import setup
+
+ setup(
+ name="myproject",
+ packages=["myproject"],
+ # the following makes a plugin available to pytest
+ entry_points={"pytest11": ["name_of_plugin = myproject.pluginmodule"]},
+ # custom PyPI classifier for pytest plugins
+ classifiers=["Framework :: Pytest"],
+ )
+
+If a package is installed this way, ``pytest`` will load
+``myproject.pluginmodule`` as a plugin which can define
+:ref:`hooks <hook-reference>`.
+
+.. note::
+
+ Make sure to include ``Framework :: Pytest`` in your list of
+ `PyPI classifiers <https://pypi.org/classifiers/>`_
+ to make it easy for users to find your plugin.
+
+
+.. _assertion-rewriting:
+
+Assertion Rewriting
+-------------------
+
+One of the main features of ``pytest`` is the use of plain assert
+statements and the detailed introspection of expressions upon
+assertion failures. This is provided by "assertion rewriting" which
+modifies the parsed AST before it gets compiled to bytecode. This is
+done via a :pep:`302` import hook which gets installed early on when
+``pytest`` starts up and will perform this rewriting when modules get
+imported. However, since we do not want to test different bytecode
+from what you will run in production, this hook only rewrites test modules
+themselves (as defined by the :confval:`python_files` configuration option),
+and any modules which are part of plugins.
+Any other imported module will not be rewritten and normal assertion behaviour
+will happen.
+
+If you have assertion helpers in other modules where you would need
+assertion rewriting to be enabled you need to ask ``pytest``
+explicitly to rewrite this module before it gets imported.
+
+.. autofunction:: pytest.register_assert_rewrite
+ :noindex:
+
+This is especially important when you write a pytest plugin which is
+created using a package. The import hook only treats ``conftest.py``
+files and any modules which are listed in the ``pytest11`` entrypoint
+as plugins. As an example consider the following package::
+
+ pytest_foo/__init__.py
+ pytest_foo/plugin.py
+ pytest_foo/helper.py
+
+With the following typical ``setup.py`` extract:
+
+.. code-block:: python
+
+ setup(..., entry_points={"pytest11": ["foo = pytest_foo.plugin"]}, ...)
+
+In this case only ``pytest_foo/plugin.py`` will be rewritten. If the
+helper module also contains assert statements which need to be
+rewritten it needs to be marked as such, before it gets imported.
+This is easiest by marking it for rewriting inside the
+``__init__.py`` module, which will always be imported first when a
+module inside a package is imported. This way ``plugin.py`` can still
+import ``helper.py`` normally. The contents of
+``pytest_foo/__init__.py`` will then need to look like this:
+
+.. code-block:: python
+
+ import pytest
+
+ pytest.register_assert_rewrite("pytest_foo.helper")
+
+
+Requiring/Loading plugins in a test module or conftest file
+-----------------------------------------------------------
+
+You can require plugins in a test module or a ``conftest.py`` file using :globalvar:`pytest_plugins`:
+
+.. code-block:: python
+
+ pytest_plugins = ["name1", "name2"]
+
+When the test module or conftest plugin is loaded the specified plugins
+will be loaded as well. Any module can be blessed as a plugin, including internal
+application modules:
+
+.. code-block:: python
+
+ pytest_plugins = "myapp.testsupport.myplugin"
+
+:globalvar:`pytest_plugins` are processed recursively, so note that in the example above
+if ``myapp.testsupport.myplugin`` also declares :globalvar:`pytest_plugins`, the contents
+of the variable will also be loaded as plugins, and so on.
+
+.. _`requiring plugins in non-root conftests`:
+
+.. note::
+ Requiring plugins using :globalvar:`pytest_plugins` variable in non-root
+ ``conftest.py`` files is deprecated.
+
+ This is important because ``conftest.py`` files implement per-directory
+ hook implementations, but once a plugin is imported, it will affect the
+ entire directory tree. In order to avoid confusion, defining
+ :globalvar:`pytest_plugins` in any ``conftest.py`` file which is not located in the
+ tests root directory is deprecated, and will raise a warning.
+
+This mechanism makes it easy to share fixtures within applications or even
+external applications without the need to create external plugins using
+the ``setuptools``'s entry point technique.
+
+Plugins imported by :globalvar:`pytest_plugins` will also automatically be marked
+for assertion rewriting (see :func:`pytest.register_assert_rewrite`).
+However for this to have any effect the module must not be
+imported already; if it was already imported at the time the
+:globalvar:`pytest_plugins` statement is processed, a warning will result and
+assertions inside the plugin will not be rewritten. To fix this you
+can either call :func:`pytest.register_assert_rewrite` yourself before
+the module is imported, or you can arrange the code to delay the
+importing until after the plugin is registered.
+
+
+Accessing another plugin by name
+--------------------------------
+
+If a plugin wants to collaborate with code from
+another plugin it can obtain a reference through
+the plugin manager like this:
+
+.. sourcecode:: python
+
+ plugin = config.pluginmanager.get_plugin("name_of_plugin")
+
+If you want to look at the names of existing plugins, use
+the ``--trace-config`` option.
+
+
+.. _registering-markers:
+
+Registering custom markers
+--------------------------
+
+If your plugin uses any markers, you should register them so that they appear in
+pytest's help text and do not :ref:`cause spurious warnings <unknown-marks>`.
+For example, the following plugin would register ``cool_marker`` and
+``mark_with`` for all users:
+
+.. code-block:: python
+
+ def pytest_configure(config):
+ config.addinivalue_line("markers", "cool_marker: this one is for cool tests.")
+ config.addinivalue_line(
+ "markers", "mark_with(arg, arg2): this marker takes arguments."
+ )
+
+
+Testing plugins
+---------------
+
+pytest comes with a plugin named ``pytester`` that helps you write tests for
+your plugin code. The plugin is disabled by default, so you will have to enable
+it before you can use it.
+
+You can do so by adding the following line to a ``conftest.py`` file in your
+testing directory:
+
+.. code-block:: python
+
+ # content of conftest.py
+
+ pytest_plugins = ["pytester"]
+
+Alternatively you can invoke pytest with the ``-p pytester`` command line
+option.
+
+This will allow you to use the :py:class:`pytester <pytest.Pytester>`
+fixture for testing your plugin code.
+
+Let's demonstrate what you can do with the plugin with an example. Imagine we
+developed a plugin that provides a fixture ``hello`` which yields a function
+and we can invoke this function with one optional parameter. It will return a
+string value of ``Hello World!`` if we do not supply a value or ``Hello
+{value}!`` if we do supply a string value.
+
+.. code-block:: python
+
+ import pytest
+
+
+ def pytest_addoption(parser):
+ group = parser.getgroup("helloworld")
+ group.addoption(
+ "--name",
+ action="store",
+ dest="name",
+ default="World",
+ help='Default "name" for hello().',
+ )
+
+
+ @pytest.fixture
+ def hello(request):
+ name = request.config.getoption("name")
+
+ def _hello(name=None):
+ if not name:
+ name = request.config.getoption("name")
+ return "Hello {name}!".format(name=name)
+
+ return _hello
+
+
+Now the ``pytester`` fixture provides a convenient API for creating temporary
+``conftest.py`` files and test files. It also allows us to run the tests and
+return a result object, with which we can assert the tests' outcomes.
+
+.. code-block:: python
+
+ def test_hello(pytester):
+ """Make sure that our plugin works."""
+
+ # create a temporary conftest.py file
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(params=[
+ "Brianna",
+ "Andreas",
+ "Floris",
+ ])
+ def name(request):
+ return request.param
+ """
+ )
+
+ # create a temporary pytest test file
+ pytester.makepyfile(
+ """
+ def test_hello_default(hello):
+ assert hello() == "Hello World!"
+
+ def test_hello_name(hello, name):
+ assert hello(name) == "Hello {0}!".format(name)
+ """
+ )
+
+ # run all tests with pytest
+ result = pytester.runpytest()
+
+ # check that all 4 tests passed
+ result.assert_outcomes(passed=4)
+
+
+Additionally it is possible to copy examples to the ``pytester``'s isolated environment
+before running pytest on it. This way we can abstract the tested logic to separate files,
+which is especially useful for longer tests and/or longer ``conftest.py`` files.
+
+Note that for ``pytester.copy_example`` to work we need to set `pytester_example_dir`
+in our ``pytest.ini`` to tell pytest where to look for example files.
+
+.. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ pytester_example_dir = .
+
+
+.. code-block:: python
+
+ # content of test_example.py
+
+
+ def test_plugin(pytester):
+ pytester.copy_example("test_example.py")
+ pytester.runpytest("-k", "test_example")
+
+
+ def test_example():
+ pass
+
+.. code-block:: pytest
+
+ $ pytest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project, configfile: pytest.ini
+ collected 2 items
+
+ test_example.py .. [100%]
+
+ ============================ 2 passed in 0.12s =============================
+
+For more information about the result object that ``runpytest()`` returns, and
+the methods that it provides please check out the :py:class:`RunResult
+<_pytest.pytester.RunResult>` documentation.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/xunit_setup.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/xunit_setup.rst
new file mode 100644
index 0000000000..5a97b2c85f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/how-to/xunit_setup.rst
@@ -0,0 +1,117 @@
+
+.. _`classic xunit`:
+.. _xunitsetup:
+
+How to implement xunit-style set-up
+========================================
+
+This section describes a classic and popular way how you can implement
+fixtures (setup and teardown test state) on a per-module/class/function basis.
+
+
+.. note::
+
+ While these setup/teardown methods are simple and familiar to those
+ coming from a ``unittest`` or ``nose`` background, you may also consider
+ using pytest's more powerful :ref:`fixture mechanism
+ <fixture>` which leverages the concept of dependency injection, allowing
+ for a more modular and more scalable approach for managing test state,
+ especially for larger projects and for functional testing. You can
+ mix both fixture mechanisms in the same file but
+ test methods of ``unittest.TestCase`` subclasses
+ cannot receive fixture arguments.
+
+
+Module level setup/teardown
+--------------------------------------
+
+If you have multiple test functions and test classes in a single
+module you can optionally implement the following fixture methods
+which will usually be called once for all the functions:
+
+.. code-block:: python
+
+ def setup_module(module):
+ """ setup any state specific to the execution of the given module."""
+
+
+ def teardown_module(module):
+ """teardown any state that was previously setup with a setup_module
+ method.
+ """
+
+As of pytest-3.0, the ``module`` parameter is optional.
+
+Class level setup/teardown
+----------------------------------
+
+Similarly, the following methods are called at class level before
+and after all test methods of the class are called:
+
+.. code-block:: python
+
+ @classmethod
+ def setup_class(cls):
+ """setup any state specific to the execution of the given class (which
+ usually contains tests).
+ """
+
+
+ @classmethod
+ def teardown_class(cls):
+ """teardown any state that was previously setup with a call to
+ setup_class.
+ """
+
+Method and function level setup/teardown
+-----------------------------------------------
+
+Similarly, the following methods are called around each method invocation:
+
+.. code-block:: python
+
+ def setup_method(self, method):
+ """setup any state tied to the execution of the given method in a
+ class. setup_method is invoked for every test method of a class.
+ """
+
+
+ def teardown_method(self, method):
+ """teardown any state that was previously setup with a setup_method
+ call.
+ """
+
+As of pytest-3.0, the ``method`` parameter is optional.
+
+If you would rather define test functions directly at module level
+you can also use the following functions to implement fixtures:
+
+.. code-block:: python
+
+ def setup_function(function):
+ """setup any state tied to the execution of the given function.
+ Invoked for every test function in the module.
+ """
+
+
+ def teardown_function(function):
+ """teardown any state that was previously setup with a setup_function
+ call.
+ """
+
+As of pytest-3.0, the ``function`` parameter is optional.
+
+Remarks:
+
+* It is possible for setup/teardown pairs to be invoked multiple times
+ per testing process.
+
+* teardown functions are not called if the corresponding setup function existed
+ and failed/was skipped.
+
+* Prior to pytest-4.2, xunit-style functions did not obey the scope rules of fixtures, so
+ it was possible, for example, for a ``setup_method`` to be called before a
+ session-scoped autouse fixture.
+
+ Now the xunit-style functions are integrated with the fixture mechanism and obey the proper
+ scope rules of fixtures involved in the call.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/cramer2.png b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/cramer2.png
new file mode 100644
index 0000000000..6bf0e92e20
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/cramer2.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/favicon.png b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/favicon.png
new file mode 100644
index 0000000000..5c8824d67d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/favicon.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/freiburg2.jpg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/freiburg2.jpg
new file mode 100644
index 0000000000..3383d3023d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/freiburg2.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/gaynor3.png b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/gaynor3.png
new file mode 100644
index 0000000000..a577c168b3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/gaynor3.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/keleshev.png b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/keleshev.png
new file mode 100644
index 0000000000..0d5e571e26
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/keleshev.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pullrequest.png b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pullrequest.png
new file mode 100644
index 0000000000..4af293b213
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pullrequest.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pylib.png b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pylib.png
new file mode 100644
index 0000000000..2e10d43886
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pylib.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pytest1.png b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pytest1.png
new file mode 100644
index 0000000000..e8064a694c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pytest1.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pytest_logo_curves.svg b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pytest_logo_curves.svg
new file mode 100644
index 0000000000..e05ceb1123
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/pytest_logo_curves.svg
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0" y="0" width="1500" height="1500" viewBox="0, 0, 1500, 1500">
+ <g id="pytest_logo">
+ <g id="graphics">
+ <path d="M521.576,213.75 L952.616,213.75 C964.283,213.75 973.741,223.208 973.741,234.875 L973.741,234.875 C973.741,246.542 964.283,256 952.616,256 L521.576,256 C509.909,256 500.451,246.542 500.451,234.875 L500.451,234.875 C500.451,223.208 509.909,213.75 521.576,213.75 z" fill="#696969" id="horizontal_bar"/>
+ <g id="top_bars">
+ <path d="M525.333,171 L612,171 L612,191 L525.333,191 L525.333,171 z" fill="#009FE3"/>
+ <path d="M638.667,171 L725.333,171 L725.333,191 L638.667,191 L638.667,171 z" fill="#C7D302"/>
+ <path d="M750.5,171 L837.167,171 L837.167,191 L750.5,191 L750.5,171 z" fill="#F07E16"/>
+ <path d="M861.861,171 L948.528,171 L948.528,191 L861.861,191 L861.861,171 z" fill="#DF2815"/>
+ </g>
+ <g id="bottom_bars">
+ <path d="M861.861,278 L948.528,278 L948.528,424.5 L861.861,424.5 L861.861,278 z" fill="#DF2815"/>
+ <path d="M750.5,278 L837.328,278 L837.328,516 L750.5,516 L750.5,278 z" fill="#F07E16"/>
+ <path d="M638.667,278 L725.328,278 L725.328,634.5 L638.667,634.5 L638.667,278 z" fill="#C7D302"/>
+ <path d="M525.333,278 L612,278 L612,712.5 L525.333,712.5 L525.333,278 z" fill="#009FE3"/>
+ </g>
+ </g>
+ <g id="pytest">
+ <path d="M252.959,1173.846 Q240.139,1173.846 229.71,1171.021 Q219.28,1168.196 210.914,1163.525 Q202.549,1158.853 196.139,1152.552 Q189.729,1146.25 184.732,1139.297 L182.124,1139.297 Q182.776,1146.685 183.428,1153.421 Q183.862,1159.07 184.297,1165.046 Q184.732,1171.021 184.732,1174.498 L184.732,1276.404 L145.186,1276.404 L145.186,930.921 L177.344,930.921 L182.993,963.079 L184.732,963.079 Q189.729,955.474 196.03,948.847 Q202.332,942.22 210.697,937.331 Q219.063,932.442 229.492,929.509 Q239.922,926.575 252.959,926.575 Q273.384,926.575 290.115,934.397 Q306.846,942.22 318.688,957.756 Q330.53,973.292 337.048,996.324 Q343.567,1019.356 343.567,1049.776 Q343.567,1080.413 337.048,1103.554 Q330.53,1126.695 318.688,1142.339 Q306.846,1157.984 290.115,1165.915 Q273.384,1173.846 252.959,1173.846 z M245.354,959.385 Q228.84,959.385 217.433,964.383 Q206.025,969.38 198.964,979.593 Q191.902,989.805 188.534,1005.015 Q185.166,1020.225 184.732,1040.867 L184.732,1049.776 Q184.732,1071.722 187.665,1088.779 Q190.598,1105.835 197.66,1117.46 Q204.722,1129.085 216.455,1135.06 Q228.189,1141.036 245.789,1141.036 Q275.122,1141.036 288.92,1117.352 Q302.717,1093.667 302.717,1049.341 Q302.717,1004.146 288.92,981.766 Q275.122,959.385 245.354,959.385 z" fill="#696969"/>
+ <path d="M370.293,930.921 L411.36,930.921 L458.076,1064.117 Q461.118,1072.808 464.269,1082.369 Q467.42,1091.929 470.136,1101.49 Q472.852,1111.05 474.807,1119.959 Q476.763,1128.868 477.632,1136.473 L478.936,1136.473 Q480.022,1131.041 482.412,1121.697 Q484.802,1112.354 487.736,1101.816 Q490.669,1091.277 493.82,1081.065 Q496.97,1070.853 499.36,1063.682 L542.6,930.921 L583.45,930.921 L489.148,1200.572 Q483.064,1218.172 476.002,1232.187 Q468.941,1246.202 459.597,1255.979 Q450.254,1265.757 437.651,1271.081 Q425.049,1276.404 407.666,1276.404 Q396.367,1276.404 388.11,1275.209 Q379.854,1274.014 373.987,1272.71 L373.987,1241.204 Q378.55,1242.291 385.503,1243.051 Q392.456,1243.812 400.061,1243.812 Q410.491,1243.812 418.096,1241.313 Q425.701,1238.814 431.35,1234.034 Q437,1229.253 441.019,1222.3 Q445.039,1215.347 448.298,1206.438 L460.684,1171.673 z" fill="#696969"/>
+ <path d="M695.568,1141.47 Q699.479,1141.47 704.368,1141.036 Q709.257,1140.601 713.82,1139.949 Q718.383,1139.297 722.186,1138.428 Q725.988,1137.559 727.944,1136.907 L727.944,1166.893 Q725.119,1168.196 720.773,1169.5 Q716.428,1170.804 711.213,1171.781 Q705.998,1172.759 700.349,1173.302 Q694.699,1173.846 689.267,1173.846 Q675.795,1173.846 664.279,1170.369 Q652.763,1166.893 644.398,1158.418 Q636.032,1149.944 631.252,1135.495 Q626.472,1121.045 626.472,1099.1 L626.472,960.689 L592.792,960.689 L592.792,943.089 L626.472,926.141 L643.42,876.165 L666.235,876.165 L666.235,930.921 L726.206,930.921 L726.206,960.689 L666.235,960.689 L666.235,1099.1 Q666.235,1120.176 673.079,1130.823 Q679.924,1141.47 695.568,1141.47 z" fill="#009FE3"/>
+ <path d="M868.527,1173.846 Q844.626,1173.846 824.853,1165.806 Q805.08,1157.767 790.848,1142.339 Q776.616,1126.912 768.793,1104.097 Q760.971,1081.282 760.971,1051.949 Q760.971,1022.398 768.142,999.148 Q775.312,975.899 788.349,959.711 Q801.386,943.523 819.529,935.049 Q837.673,926.575 859.619,926.575 Q881.13,926.575 898.295,934.289 Q915.461,942.002 927.412,956.017 Q939.362,970.032 945.772,989.697 Q952.182,1009.361 952.182,1033.262 L952.182,1057.815 L801.821,1057.815 Q802.907,1099.751 819.529,1119.524 Q836.152,1139.297 868.962,1139.297 Q880.043,1139.297 889.495,1138.211 Q898.947,1137.125 907.747,1135.06 Q916.547,1132.996 924.804,1129.845 Q933.061,1126.695 941.535,1122.784 L941.535,1157.984 Q932.844,1162.112 924.478,1165.154 Q916.113,1168.196 907.313,1170.152 Q898.513,1172.107 889.061,1172.977 Q879.609,1173.846 868.527,1173.846 z M858.749,959.385 Q833.979,959.385 819.529,976.333 Q805.08,993.282 802.69,1025.657 L909.594,1025.657 Q909.594,1010.882 906.661,998.605 Q903.727,986.329 897.535,977.637 Q891.342,968.946 881.782,964.166 Q872.221,959.385 858.749,959.385 z" fill="#009FE3"/>
+ <path d="M1155.126,1104.097 Q1155.126,1121.48 1148.825,1134.517 Q1142.524,1147.554 1130.682,1156.354 Q1118.84,1165.154 1102.109,1169.5 Q1085.378,1173.846 1064.518,1173.846 Q1040.834,1173.846 1023.886,1170.043 Q1006.938,1166.241 994.118,1158.853 L994.118,1122.784 Q1000.854,1126.26 1009.111,1129.628 Q1017.368,1132.996 1026.494,1135.604 Q1035.62,1138.211 1045.289,1139.841 Q1054.958,1141.47 1064.518,1141.47 Q1078.642,1141.47 1088.528,1139.08 Q1098.415,1136.69 1104.608,1132.236 Q1110.8,1127.781 1113.625,1121.371 Q1116.45,1114.961 1116.45,1107.139 Q1116.45,1100.403 1114.277,1094.971 Q1112.104,1089.539 1106.346,1084.216 Q1100.588,1078.892 1090.593,1073.46 Q1080.598,1068.028 1064.953,1061.292 Q1049.308,1054.556 1036.815,1048.038 Q1024.321,1041.519 1015.629,1033.479 Q1006.938,1025.44 1002.266,1014.902 Q997.595,1004.363 997.595,989.805 Q997.595,974.595 1003.57,962.753 Q1009.545,950.911 1020.41,942.872 Q1031.274,934.832 1046.484,930.704 Q1061.694,926.575 1080.38,926.575 Q1101.457,926.575 1118.948,931.138 Q1136.44,935.701 1152.084,943.089 L1138.395,975.03 Q1124.272,968.729 1109.388,964.057 Q1094.504,959.385 1079.077,959.385 Q1056.913,959.385 1046.266,966.664 Q1035.62,973.943 1035.62,987.415 Q1035.62,995.02 1038.118,1000.669 Q1040.617,1006.319 1046.701,1011.316 Q1052.785,1016.314 1062.997,1021.42 Q1073.21,1026.526 1088.42,1032.828 Q1104.064,1039.346 1116.341,1045.865 Q1128.618,1052.383 1137.309,1060.531 Q1146,1068.68 1150.563,1079.109 Q1155.126,1089.539 1155.126,1104.097 z" fill="#009FE3"/>
+ <path d="M1285.28,1141.47 Q1289.191,1141.47 1294.08,1141.036 Q1298.969,1140.601 1303.532,1139.949 Q1308.095,1139.297 1311.898,1138.428 Q1315.7,1137.559 1317.656,1136.907 L1317.656,1166.893 Q1314.831,1168.196 1310.485,1169.5 Q1306.14,1170.804 1300.925,1171.781 Q1295.71,1172.759 1290.06,1173.302 Q1284.411,1173.846 1278.979,1173.846 Q1265.507,1173.846 1253.991,1170.369 Q1242.475,1166.893 1234.109,1158.418 Q1225.744,1149.944 1220.964,1135.495 Q1216.183,1121.045 1216.183,1099.1 L1216.183,960.689 L1182.504,960.689 L1182.504,943.089 L1216.183,926.141 L1233.132,876.165 L1255.947,876.165 L1255.947,930.921 L1315.917,930.921 L1315.917,960.689 L1255.947,960.689 L1255.947,1099.1 Q1255.947,1120.176 1262.791,1130.823 Q1269.636,1141.47 1285.28,1141.47 z" fill="#009FE3"/>
+ </g>
+ </g>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/theuni.png b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/theuni.png
new file mode 100644
index 0000000000..abeb737e79
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/img/theuni.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/index.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/index.rst
new file mode 100644
index 0000000000..d1b3d2e8a0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/index.rst
@@ -0,0 +1,148 @@
+:orphan:
+
+..
+ .. sidebar:: Next Open Trainings
+
+ - `Professional Testing with Python <https://www.python-academy.com/courses/specialtopics/python_course_testing.html>`_, via `Python Academy <https://www.python-academy.com/>`_, February 1st to 3rd, 2022, Leipzig (Germany) and remote.
+
+ Also see `previous talks and blogposts <talks.html>`_.
+
+.. _features:
+
+pytest: helps you write better programs
+=======================================
+
+.. module:: pytest
+
+The ``pytest`` framework makes it easy to write small, readable tests, and can
+scale to support complex functional testing for applications and libraries.
+
+
+**Pythons**: ``pytest`` requires: Python 3.6, 3.7, 3.8, 3.9, or PyPy3.
+
+**PyPI package name**: :pypi:`pytest`
+
+**Documentation as PDF**: `download latest <https://media.readthedocs.org/pdf/pytest/latest/pytest.pdf>`_
+
+
+A quick example
+---------------
+
+.. code-block:: python
+
+ # content of test_sample.py
+ def inc(x):
+ return x + 1
+
+
+ def test_answer():
+ assert inc(3) == 5
+
+
+To execute it:
+
+.. code-block:: pytest
+
+ $ pytest
+ =========================== test session starts ============================
+ platform linux -- Python 3.x.y, pytest-7.x.y, pluggy-1.x.y
+ rootdir: /home/sweet/project
+ collected 1 item
+
+ test_sample.py F [100%]
+
+ ================================= FAILURES =================================
+ _______________________________ test_answer ________________________________
+
+ def test_answer():
+ > assert inc(3) == 5
+ E assert 4 == 5
+ E + where 4 = inc(3)
+
+ test_sample.py:6: AssertionError
+ ========================= short test summary info ==========================
+ FAILED test_sample.py::test_answer - assert 4 == 5
+ ============================ 1 failed in 0.12s =============================
+
+Due to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used.
+See :ref:`Get started <getstarted>` for a basic introduction to using pytest.
+
+
+Features
+--------
+
+- Detailed info on failing :ref:`assert statements <assert>` (no need to remember ``self.assert*`` names)
+
+- :ref:`Auto-discovery <test discovery>` of test modules and functions
+
+- :ref:`Modular fixtures <fixture>` for managing small or parametrized long-lived test resources
+
+- Can run :ref:`unittest <unittest>` (including trial) and :ref:`nose <noseintegration>` test suites out of the box
+
+- Python 3.6+ and PyPy 3
+
+- Rich plugin architecture, with over 800+ :ref:`external plugins <plugin-list>` and thriving community
+
+
+Documentation
+-------------
+
+* :ref:`Get started <get-started>` - install pytest and grasp its basics just twenty minutes
+* :ref:`How-to guides <how-to>` - step-by-step guides, covering a vast range of use-cases and needs
+* :ref:`Reference guides <reference>` - includes the complete pytest API reference, lists of plugins and more
+* :ref:`Explanation <explanation>` - background, discussion of key topics, answers to higher-level questions
+
+
+Bugs/Requests
+-------------
+
+Please use the `GitHub issue tracker <https://github.com/pytest-dev/pytest/issues>`_ to submit bugs or request features.
+
+
+Changelog
+---------
+
+Consult the :ref:`Changelog <changelog>` page for fixes and enhancements of each version.
+
+Support pytest
+--------------
+
+`Open Collective`_ is an online funding platform for open and transparent communities.
+It provides tools to raise money and share your finances in full transparency.
+
+It is the platform of choice for individuals and companies that want to make one-time or
+monthly donations directly to the project.
+
+See more details in the `pytest collective`_.
+
+.. _Open Collective: https://opencollective.com
+.. _pytest collective: https://opencollective.com/pytest
+
+
+pytest for enterprise
+---------------------
+
+Available as part of the Tidelift Subscription.
+
+The maintainers of pytest and thousands of other packages are working with Tidelift to deliver commercial support and
+maintenance for the open source dependencies you use to build your applications.
+Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use.
+
+`Learn more. <https://tidelift.com/subscription/pkg/pypi-pytest?utm_source=pypi-pytest&utm_medium=referral&utm_campaign=enterprise&utm_term=repo>`_
+
+Security
+~~~~~~~~
+
+pytest has never been associated with a security vulnerability, but in any case, to report a
+security vulnerability please use the `Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure.
+
+
+License
+-------
+
+Copyright Holger Krekel and others, 2004.
+
+Distributed under the terms of the `MIT`_ license, pytest is free and open source software.
+
+.. _`MIT`: https://github.com/pytest-dev/pytest/blob/main/LICENSE
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/license.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/license.rst
new file mode 100644
index 0000000000..acbfb8bdb1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/license.rst
@@ -0,0 +1,32 @@
+.. _license:
+
+License
+-------
+
+Distributed under the terms of the `MIT`_ license, pytest is free and open source software.
+
+.. code-block:: text
+
+ The MIT License (MIT)
+
+ Copyright (c) 2004 Holger Krekel and others
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
+ this software and associated documentation files (the "Software"), to deal in
+ the Software without restriction, including without limitation the rights to
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ of the Software, and to permit persons to whom the Software is furnished to do
+ so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+
+.. _`MIT`: https://github.com/pytest-dev/pytest/blob/main/LICENSE
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/naming20.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/naming20.rst
new file mode 100644
index 0000000000..5a81df2698
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/naming20.rst
@@ -0,0 +1,20 @@
+
+.. _naming20:
+
+New pytest names in 2.0 (flat is better than nested)
+----------------------------------------------------
+
+If you used older version of the ``py`` distribution (which
+included the py.test command line tool and Python name space)
+you accessed helpers and possibly collection classes through
+the ``py.test`` Python namespaces. The new ``pytest``
+Python module flaty provides the same objects, following
+these renaming rules::
+
+ py.test.XYZ -> pytest.XYZ
+ py.test.collect.XYZ -> pytest.XYZ
+ py.test.cmdline.main -> pytest.main
+
+The old ``py.test.*`` ways to access functionality remain
+valid but you are encouraged to do global renaming according
+to the above rules in your test code.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/proposals/parametrize_with_fixtures.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/proposals/parametrize_with_fixtures.rst
new file mode 100644
index 0000000000..f6814ec78d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/proposals/parametrize_with_fixtures.rst
@@ -0,0 +1,164 @@
+:orphan:
+
+===================================
+PROPOSAL: Parametrize with fixtures
+===================================
+
+.. warning::
+
+ This document outlines a proposal around using fixtures as input
+ of parametrized tests or fixtures.
+
+Problem
+-------
+
+As a user I have functional tests that I would like to run against various
+scenarios.
+
+In this particular example we want to generate a new project based on a
+cookiecutter template. We want to test default values but also data that
+emulates user input.
+
+- use default values
+
+- emulate user input
+
+ - specify 'author'
+
+ - specify 'project_slug'
+
+ - specify 'author' and 'project_slug'
+
+This is how a functional test could look like:
+
+.. code-block:: python
+
+ import pytest
+
+
+ @pytest.fixture
+ def default_context():
+ return {"extra_context": {}}
+
+
+ @pytest.fixture(
+ params=[
+ {"author": "alice"},
+ {"project_slug": "helloworld"},
+ {"author": "bob", "project_slug": "foobar"},
+ ]
+ )
+ def extra_context(request):
+ return {"extra_context": request.param}
+
+
+ @pytest.fixture(params=["default", "extra"])
+ def context(request):
+ if request.param == "default":
+ return request.getfuncargvalue("default_context")
+ else:
+ return request.getfuncargvalue("extra_context")
+
+
+ def test_generate_project(cookies, context):
+ """Call the cookiecutter API to generate a new project from a
+ template.
+ """
+ result = cookies.bake(extra_context=context)
+
+ assert result.exit_code == 0
+ assert result.exception is None
+ assert result.project.isdir()
+
+
+Issues
+------
+
+* By using ``request.getfuncargvalue()`` we rely on actual fixture function
+ execution to know what fixtures are involved, due to its dynamic nature
+* More importantly, ``request.getfuncargvalue()`` cannot be combined with
+ parametrized fixtures, such as ``extra_context``
+* This is very inconvenient if you wish to extend an existing test suite by
+ certain parameters for fixtures that are already used by tests
+
+pytest version 3.0 reports an error if you try to run above code::
+
+ Failed: The requested fixture has no parameter defined for the current
+ test.
+
+ Requested fixture 'extra_context'
+
+
+Proposed solution
+-----------------
+
+A new function that can be used in modules can be used to dynamically define
+fixtures from existing ones.
+
+.. code-block:: python
+
+ pytest.define_combined_fixture(
+ name="context", fixtures=["default_context", "extra_context"]
+ )
+
+The new fixture ``context`` inherits the scope from the used fixtures and yield
+the following values.
+
+- ``{}``
+
+- ``{'author': 'alice'}``
+
+- ``{'project_slug': 'helloworld'}``
+
+- ``{'author': 'bob', 'project_slug': 'foobar'}``
+
+Alternative approach
+--------------------
+
+A new helper function named ``fixture_request`` would tell pytest to yield
+all parameters marked as a fixture.
+
+.. note::
+
+ The :pypi:`pytest-lazy-fixture` plugin implements a very
+ similar solution to the proposal below, make sure to check it out.
+
+.. code-block:: python
+
+ @pytest.fixture(
+ params=[
+ pytest.fixture_request("default_context"),
+ pytest.fixture_request("extra_context"),
+ ]
+ )
+ def context(request):
+ """Returns all values for ``default_context``, one-by-one before it
+ does the same for ``extra_context``.
+
+ request.param:
+ - {}
+ - {'author': 'alice'}
+ - {'project_slug': 'helloworld'}
+ - {'author': 'bob', 'project_slug': 'foobar'}
+ """
+ return request.param
+
+The same helper can be used in combination with ``pytest.mark.parametrize``.
+
+.. code-block:: python
+
+
+ @pytest.mark.parametrize(
+ "context, expected_response_code",
+ [
+ (pytest.fixture_request("default_context"), 0),
+ (pytest.fixture_request("extra_context"), 0),
+ ],
+ )
+ def test_generate_project(cookies, context, exit_code):
+ """Call the cookiecutter API to generate a new project from a
+ template.
+ """
+ result = cookies.bake(extra_context=context)
+
+ assert result.exit_code == exit_code
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/py27-py34-deprecation.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/py27-py34-deprecation.rst
new file mode 100644
index 0000000000..660b078e30
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/py27-py34-deprecation.rst
@@ -0,0 +1,99 @@
+Python 2.7 and 3.4 support
+==========================
+
+It is demanding on the maintainers of an open source project to support many Python versions, as
+there's extra cost of keeping code compatible between all versions, while holding back on
+features only made possible on newer Python versions.
+
+In case of Python 2 and 3, the difference between the languages makes it even more prominent,
+because many new Python 3 features cannot be used in a Python 2/3 compatible code base.
+
+Python 2.7 EOL has been reached :pep:`in 2020 <0373#maintenance-releases>`, with
+the last release made in April, 2020.
+
+Python 3.4 EOL has been reached :pep:`in 2019 <0429#release-schedule>`, with the last release made in March, 2019.
+
+For those reasons, in Jun 2019 it was decided that **pytest 4.6** series will be the last to support Python 2.7 and 3.4.
+
+What this means for general users
+---------------------------------
+
+Thanks to the `python_requires`_ setuptools option,
+Python 2.7 and Python 3.4 users using a modern pip version
+will install the last pytest 4.6.X version automatically even if 5.0 or later versions
+are available on PyPI.
+
+Users should ensure they are using the latest pip and setuptools versions for this to work.
+
+Maintenance of 4.6.X versions
+-----------------------------
+
+Until January 2020, the pytest core team ported many bug-fixes from the main release into the
+``4.6.x`` branch, with several 4.6.X releases being made along the year.
+
+From now on, the core team will **no longer actively backport patches**, but the ``4.6.x``
+branch will continue to exist so the community itself can contribute patches.
+
+The core team will be happy to accept those patches, and make new 4.6.X releases **until mid-2020**
+(but consider that date as a ballpark, after that date the team might still decide to make new releases
+for critical bugs).
+
+.. _`python_requires`: https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires
+
+Technical aspects
+~~~~~~~~~~~~~~~~~
+
+(This section is a transcript from :issue:`5275`).
+
+In this section we describe the technical aspects of the Python 2.7 and 3.4 support plan.
+
+.. _what goes into 4.6.x releases:
+
+What goes into 4.6.X releases
++++++++++++++++++++++++++++++
+
+New 4.6.X releases will contain bug fixes only.
+
+When will 4.6.X releases happen
++++++++++++++++++++++++++++++++
+
+New 4.6.X releases will happen after we have a few bugs in place to release, or if a few weeks have
+passed (say a single bug has been fixed a month after the latest 4.6.X release).
+
+No hard rules here, just ballpark.
+
+Who will handle applying bug fixes
+++++++++++++++++++++++++++++++++++
+
+We core maintainers expect that people still using Python 2.7/3.4 and being affected by
+bugs to step up and provide patches and/or port bug fixes from the active branches.
+
+We will be happy to guide users interested in doing so, so please don't hesitate to ask.
+
+**Backporting changes into 4.6**
+
+Please follow these instructions:
+
+#. ``git fetch --all --prune``
+
+#. ``git checkout origin/4.6.x -b backport-XXXX`` # use the PR number here
+
+#. Locate the merge commit on the PR, in the *merged* message, for example:
+
+ nicoddemus merged commit 0f8b462 into pytest-dev:features
+
+#. ``git cherry-pick -m1 REVISION`` # use the revision you found above (``0f8b462``).
+
+#. Open a PR targeting ``4.6.x``:
+
+ * Prefix the message with ``[4.6]`` so it is an obvious backport
+ * Delete the PR body, it usually contains a duplicate commit message.
+
+**Providing new PRs to 4.6**
+
+Fresh pull requests to ``4.6.x`` will be accepted provided that
+the equivalent code in the active branches does not contain that bug (for example, a bug is specific
+to Python 2 only).
+
+Bug fixes that also happen in the mainstream version should be first fixed
+there, and then backported as per instructions above.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/pytest.ini b/testing/web-platform/tests/tools/third_party/pytest/doc/en/pytest.ini
new file mode 100644
index 0000000000..7604360561
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/pytest.ini
@@ -0,0 +1,2 @@
+[pytest]
+# just defined to prevent the root level tox.ini from kicking in
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/recwarn.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/recwarn.rst
new file mode 100644
index 0000000000..513af0d450
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/recwarn.rst
@@ -0,0 +1,3 @@
+:orphan:
+
+This page has been moved, please see :ref:`assertwarnings`.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/customize.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/customize.rst
new file mode 100644
index 0000000000..fe10ca066b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/customize.rst
@@ -0,0 +1,248 @@
+Configuration
+=============
+
+Command line options and configuration file settings
+-----------------------------------------------------------------
+
+You can get help on command line options and values in INI-style
+configurations files by using the general help option:
+
+.. code-block:: bash
+
+ pytest -h # prints options _and_ config file settings
+
+This will display command line and configuration file settings
+which were registered by installed plugins.
+
+.. _`config file formats`:
+
+Configuration file formats
+--------------------------
+
+Many :ref:`pytest settings <ini options ref>` can be set in a *configuration file*, which
+by convention resides in the root directory of your repository.
+
+A quick example of the configuration files supported by pytest:
+
+pytest.ini
+~~~~~~~~~~
+
+``pytest.ini`` files take precedence over other files, even when empty.
+
+.. code-block:: ini
+
+ # pytest.ini
+ [pytest]
+ minversion = 6.0
+ addopts = -ra -q
+ testpaths =
+ tests
+ integration
+
+
+pyproject.toml
+~~~~~~~~~~~~~~
+
+.. versionadded:: 6.0
+
+``pyproject.toml`` are considered for configuration when they contain a ``tool.pytest.ini_options`` table.
+
+.. code-block:: toml
+
+ # pyproject.toml
+ [tool.pytest.ini_options]
+ minversion = "6.0"
+ addopts = "-ra -q"
+ testpaths = [
+ "tests",
+ "integration",
+ ]
+
+.. note::
+
+ One might wonder why ``[tool.pytest.ini_options]`` instead of ``[tool.pytest]`` as is the
+ case with other tools.
+
+ The reason is that the pytest team intends to fully utilize the rich TOML data format
+ for configuration in the future, reserving the ``[tool.pytest]`` table for that.
+ The ``ini_options`` table is being used, for now, as a bridge between the existing
+ ``.ini`` configuration system and the future configuration format.
+
+tox.ini
+~~~~~~~
+
+``tox.ini`` files are the configuration files of the `tox <https://tox.readthedocs.io>`__ project,
+and can also be used to hold pytest configuration if they have a ``[pytest]`` section.
+
+.. code-block:: ini
+
+ # tox.ini
+ [pytest]
+ minversion = 6.0
+ addopts = -ra -q
+ testpaths =
+ tests
+ integration
+
+
+setup.cfg
+~~~~~~~~~
+
+``setup.cfg`` files are general purpose configuration files, used originally by :doc:`distutils <distutils/configfile>`, and can also be used to hold pytest configuration
+if they have a ``[tool:pytest]`` section.
+
+.. code-block:: ini
+
+ # setup.cfg
+ [tool:pytest]
+ minversion = 6.0
+ addopts = -ra -q
+ testpaths =
+ tests
+ integration
+
+.. warning::
+
+ Usage of ``setup.cfg`` is not recommended unless for very simple use cases. ``.cfg``
+ files use a different parser than ``pytest.ini`` and ``tox.ini`` which might cause hard to track
+ down problems.
+ When possible, it is recommended to use the latter files, or ``pyproject.toml``, to hold your
+ pytest configuration.
+
+
+.. _rootdir:
+.. _configfiles:
+
+Initialization: determining rootdir and configfile
+--------------------------------------------------
+
+pytest determines a ``rootdir`` for each test run which depends on
+the command line arguments (specified test files, paths) and on
+the existence of configuration files. The determined ``rootdir`` and ``configfile`` are
+printed as part of the pytest header during startup.
+
+Here's a summary what ``pytest`` uses ``rootdir`` for:
+
+* Construct *nodeids* during collection; each test is assigned
+ a unique *nodeid* which is rooted at the ``rootdir`` and takes into account
+ the full path, class name, function name and parametrization (if any).
+
+* Is used by plugins as a stable location to store project/test run specific information;
+ for example, the internal :ref:`cache <cache>` plugin creates a ``.pytest_cache`` subdirectory
+ in ``rootdir`` to store its cross-test run state.
+
+``rootdir`` is **NOT** used to modify ``sys.path``/``PYTHONPATH`` or
+influence how modules are imported. See :ref:`pythonpath` for more details.
+
+The ``--rootdir=path`` command-line option can be used to force a specific directory.
+Note that contrary to other command-line options, ``--rootdir`` cannot be used with
+:confval:`addopts` inside ``pytest.ini`` because the ``rootdir`` is used to *find* ``pytest.ini``
+already.
+
+Finding the ``rootdir``
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Here is the algorithm which finds the rootdir from ``args``:
+
+- If ``-c`` is passed in the command-line, use that as configuration file, and its directory as ``rootdir``.
+
+- Determine the common ancestor directory for the specified ``args`` that are
+ recognised as paths that exist in the file system. If no such paths are
+ found, the common ancestor directory is set to the current working directory.
+
+- Look for ``pytest.ini``, ``pyproject.toml``, ``tox.ini``, and ``setup.cfg`` files in the ancestor
+ directory and upwards. If one is matched, it becomes the ``configfile`` and its
+ directory becomes the ``rootdir``.
+
+- If no configuration file was found, look for ``setup.py`` upwards from the common
+ ancestor directory to determine the ``rootdir``.
+
+- If no ``setup.py`` was found, look for ``pytest.ini``, ``pyproject.toml``, ``tox.ini``, and
+ ``setup.cfg`` in each of the specified ``args`` and upwards. If one is
+ matched, it becomes the ``configfile`` and its directory becomes the ``rootdir``.
+
+- If no ``configfile`` was found and no configuration argument is passed, use the already determined common ancestor as root
+ directory. This allows the use of pytest in structures that are not part of
+ a package and don't have any particular configuration file.
+
+If no ``args`` are given, pytest collects test below the current working
+directory and also starts determining the ``rootdir`` from there.
+
+Files will only be matched for configuration if:
+
+* ``pytest.ini``: will always match and take precedence, even if empty.
+* ``pyproject.toml``: contains a ``[tool.pytest.ini_options]`` table.
+* ``tox.ini``: contains a ``[pytest]`` section.
+* ``setup.cfg``: contains a ``[tool:pytest]`` section.
+
+The files are considered in the order above. Options from multiple ``configfiles`` candidates
+are never merged - the first match wins.
+
+The :class:`Config <pytest.Config>` object (accessible via hooks or through the :fixture:`pytestconfig` fixture)
+will subsequently carry these attributes:
+
+- :attr:`config.rootpath <pytest.Config.rootpath>`: the determined root directory, guaranteed to exist.
+
+- :attr:`config.inipath <pytest.Config.inipath>`: the determined ``configfile``, may be ``None``
+ (it is named ``inipath`` for historical reasons).
+
+.. versionadded:: 6.1
+ The ``config.rootpath`` and ``config.inipath`` properties. They are :class:`pathlib.Path`
+ versions of the older ``config.rootdir`` and ``config.inifile``, which have type
+ ``py.path.local``, and still exist for backward compatibility.
+
+The ``rootdir`` is used as a reference directory for constructing test
+addresses ("nodeids") and can be used also by plugins for storing
+per-testrun information.
+
+Example:
+
+.. code-block:: bash
+
+ pytest path/to/testdir path/other/
+
+will determine the common ancestor as ``path`` and then
+check for configuration files as follows:
+
+.. code-block:: text
+
+ # first look for pytest.ini files
+ path/pytest.ini
+ path/pyproject.toml # must contain a [tool.pytest.ini_options] table to match
+ path/tox.ini # must contain [pytest] section to match
+ path/setup.cfg # must contain [tool:pytest] section to match
+ pytest.ini
+ ... # all the way up to the root
+
+ # now look for setup.py
+ path/setup.py
+ setup.py
+ ... # all the way up to the root
+
+
+.. warning::
+
+ Custom pytest plugin commandline arguments may include a path, as in
+ ``pytest --log-output ../../test.log args``. Then ``args`` is mandatory,
+ otherwise pytest uses the folder of test.log for rootdir determination
+ (see also :issue:`1435`).
+ A dot ``.`` for referencing to the current working directory is also
+ possible.
+
+
+.. _`how to change command line options defaults`:
+.. _`adding default options`:
+
+
+Builtin configuration file options
+----------------------------------------------
+
+For the full list of options consult the :ref:`reference documentation <ini options ref>`.
+
+Syntax highlighting theme customization
+---------------------------------------
+
+The syntax highlighting themes used by pytest can be customized using two environment variables:
+
+- :envvar:`PYTEST_THEME` sets a `pygment style <https://pygments.org/docs/styles/>`_ to use.
+- :envvar:`PYTEST_THEME_MODE` sets this style to *light* or *dark*.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/exit-codes.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/exit-codes.rst
new file mode 100644
index 0000000000..b695ca3702
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/exit-codes.rst
@@ -0,0 +1,26 @@
+.. _exit-codes:
+
+Exit codes
+========================================================
+
+Running ``pytest`` can result in six different exit codes:
+
+:Exit code 0: All tests were collected and passed successfully
+:Exit code 1: Tests were collected and run but some of the tests failed
+:Exit code 2: Test execution was interrupted by the user
+:Exit code 3: Internal error happened while executing tests
+:Exit code 4: pytest command line usage error
+:Exit code 5: No tests were collected
+
+They are represented by the :class:`pytest.ExitCode` enum. The exit codes being a part of the public API can be imported and accessed directly using:
+
+.. code-block:: python
+
+ from pytest import ExitCode
+
+.. note::
+
+ If you would like to customize the exit code in some scenarios, specially when
+ no tests are collected, consider using the
+ `pytest-custom_exit_code <https://github.com/yashtodi94/pytest-custom_exit_code>`__
+ plugin.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/fixtures.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/fixtures.rst
new file mode 100644
index 0000000000..d25979ab95
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/fixtures.rst
@@ -0,0 +1,455 @@
+.. _reference-fixtures:
+.. _fixture:
+.. _fixtures:
+.. _`@pytest.fixture`:
+.. _`pytest.fixture`:
+
+
+Fixtures reference
+========================================================
+
+.. seealso:: :ref:`about-fixtures`
+.. seealso:: :ref:`how-to-fixtures`
+
+
+.. currentmodule:: _pytest.python
+
+.. _`Dependency injection`: https://en.wikipedia.org/wiki/Dependency_injection
+
+
+Built-in fixtures
+-----------------
+
+:ref:`Fixtures <fixtures-api>` are defined using the :ref:`@pytest.fixture
+<pytest.fixture-api>` decorator. Pytest has several useful built-in fixtures:
+
+ :fixture:`capfd`
+ Capture, as text, output to file descriptors ``1`` and ``2``.
+
+ :fixture:`capfdbinary`
+ Capture, as bytes, output to file descriptors ``1`` and ``2``.
+
+ :fixture:`caplog`
+ Control logging and access log entries.
+
+ :fixture:`capsys`
+ Capture, as text, output to ``sys.stdout`` and ``sys.stderr``.
+
+ :fixture:`capsysbinary`
+ Capture, as bytes, output to ``sys.stdout`` and ``sys.stderr``.
+
+ :fixture:`cache`
+ Store and retrieve values across pytest runs.
+
+ :fixture:`doctest_namespace`
+ Provide a dict injected into the docstests namespace.
+
+ :fixture:`monkeypatch`
+ Temporarily modify classes, functions, dictionaries,
+ ``os.environ``, and other objects.
+
+ :fixture:`pytestconfig`
+ Access to configuration values, pluginmanager and plugin hooks.
+
+ :fixture:`record_property`
+ Add extra properties to the test.
+
+ :fixture:`record_testsuite_property`
+ Add extra properties to the test suite.
+
+ :fixture:`recwarn`
+ Record warnings emitted by test functions.
+
+ :fixture:`request`
+ Provide information on the executing test function.
+
+ :fixture:`testdir`
+ Provide a temporary test directory to aid in running, and
+ testing, pytest plugins.
+
+ :fixture:`tmp_path`
+ Provide a :class:`pathlib.Path` object to a temporary directory
+ which is unique to each test function.
+
+ :fixture:`tmp_path_factory`
+ Make session-scoped temporary directories and return
+ :class:`pathlib.Path` objects.
+
+ :fixture:`tmpdir`
+ Provide a :class:`py.path.local` object to a temporary
+ directory which is unique to each test function;
+ replaced by :fixture:`tmp_path`.
+
+ .. _`py.path.local`: https://py.readthedocs.io/en/latest/path.html
+
+ :fixture:`tmpdir_factory`
+ Make session-scoped temporary directories and return
+ :class:`py.path.local` objects;
+ replaced by :fixture:`tmp_path_factory`.
+
+
+.. _`conftest.py`:
+.. _`conftest`:
+
+Fixture availability
+---------------------
+
+Fixture availability is determined from the perspective of the test. A fixture
+is only available for tests to request if they are in the scope that fixture is
+defined in. If a fixture is defined inside a class, it can only be requested by
+tests inside that class. But if a fixture is defined inside the global scope of
+the module, than every test in that module, even if it's defined inside a class,
+can request it.
+
+Similarly, a test can also only be affected by an autouse fixture if that test
+is in the same scope that autouse fixture is defined in (see
+:ref:`autouse order`).
+
+A fixture can also request any other fixture, no matter where it's defined, so
+long as the test requesting them can see all fixtures involved.
+
+For example, here's a test file with a fixture (``outer``) that requests a
+fixture (``inner``) from a scope it wasn't defined in:
+
+.. literalinclude:: /example/fixtures/test_fixtures_request_different_scope.py
+
+From the tests' perspectives, they have no problem seeing each of the fixtures
+they're dependent on:
+
+.. image:: /example/fixtures/test_fixtures_request_different_scope.*
+ :align: center
+
+So when they run, ``outer`` will have no problem finding ``inner``, because
+pytest searched from the tests' perspectives.
+
+.. note::
+ The scope a fixture is defined in has no bearing on the order it will be
+ instantiated in: the order is mandated by the logic described
+ :ref:`here <fixture order>`.
+
+``conftest.py``: sharing fixtures across multiple files
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The ``conftest.py`` file serves as a means of providing fixtures for an entire
+directory. Fixtures defined in a ``conftest.py`` can be used by any test
+in that package without needing to import them (pytest will automatically
+discover them).
+
+You can have multiple nested directories/packages containing your tests, and
+each directory can have its own ``conftest.py`` with its own fixtures, adding on
+to the ones provided by the ``conftest.py`` files in parent directories.
+
+For example, given a test file structure like this:
+
+::
+
+ tests/
+ __init__.py
+
+ conftest.py
+ # content of tests/conftest.py
+ import pytest
+
+ @pytest.fixture
+ def order():
+ return []
+
+ @pytest.fixture
+ def top(order, innermost):
+ order.append("top")
+
+ test_top.py
+ # content of tests/test_top.py
+ import pytest
+
+ @pytest.fixture
+ def innermost(order):
+ order.append("innermost top")
+
+ def test_order(order, top):
+ assert order == ["innermost top", "top"]
+
+ subpackage/
+ __init__.py
+
+ conftest.py
+ # content of tests/subpackage/conftest.py
+ import pytest
+
+ @pytest.fixture
+ def mid(order):
+ order.append("mid subpackage")
+
+ test_subpackage.py
+ # content of tests/subpackage/test_subpackage.py
+ import pytest
+
+ @pytest.fixture
+ def innermost(order, mid):
+ order.append("innermost subpackage")
+
+ def test_order(order, top):
+ assert order == ["mid subpackage", "innermost subpackage", "top"]
+
+The boundaries of the scopes can be visualized like this:
+
+.. image:: /example/fixtures/fixture_availability.*
+ :align: center
+
+The directories become their own sort of scope where fixtures that are defined
+in a ``conftest.py`` file in that directory become available for that whole
+scope.
+
+Tests are allowed to search upward (stepping outside a circle) for fixtures, but
+can never go down (stepping inside a circle) to continue their search. So
+``tests/subpackage/test_subpackage.py::test_order`` would be able to find the
+``innermost`` fixture defined in ``tests/subpackage/test_subpackage.py``, but
+the one defined in ``tests/test_top.py`` would be unavailable to it because it
+would have to step down a level (step inside a circle) to find it.
+
+The first fixture the test finds is the one that will be used, so
+:ref:`fixtures can be overridden <override fixtures>` if you need to change or
+extend what one does for a particular scope.
+
+You can also use the ``conftest.py`` file to implement
+:ref:`local per-directory plugins <conftest.py plugins>`.
+
+Fixtures from third-party plugins
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Fixtures don't have to be defined in this structure to be available for tests,
+though. They can also be provided by third-party plugins that are installed, and
+this is how many pytest plugins operate. As long as those plugins are installed,
+the fixtures they provide can be requested from anywhere in your test suite.
+
+Because they're provided from outside the structure of your test suite,
+third-party plugins don't really provide a scope like `conftest.py` files and
+the directories in your test suite do. As a result, pytest will search for
+fixtures stepping out through scopes as explained previously, only reaching
+fixtures defined in plugins *last*.
+
+For example, given the following file structure:
+
+::
+
+ tests/
+ __init__.py
+
+ conftest.py
+ # content of tests/conftest.py
+ import pytest
+
+ @pytest.fixture
+ def order():
+ return []
+
+ subpackage/
+ __init__.py
+
+ conftest.py
+ # content of tests/subpackage/conftest.py
+ import pytest
+
+ @pytest.fixture(autouse=True)
+ def mid(order, b_fix):
+ order.append("mid subpackage")
+
+ test_subpackage.py
+ # content of tests/subpackage/test_subpackage.py
+ import pytest
+
+ @pytest.fixture
+ def inner(order, mid, a_fix):
+ order.append("inner subpackage")
+
+ def test_order(order, inner):
+ assert order == ["b_fix", "mid subpackage", "a_fix", "inner subpackage"]
+
+If ``plugin_a`` is installed and provides the fixture ``a_fix``, and
+``plugin_b`` is installed and provides the fixture ``b_fix``, then this is what
+the test's search for fixtures would look like:
+
+.. image:: /example/fixtures/fixture_availability_plugins.svg
+ :align: center
+
+pytest will only search for ``a_fix`` and ``b_fix`` in the plugins after
+searching for them first in the scopes inside ``tests/``.
+
+.. note:
+
+ pytest can tell you what fixtures are available for a given test if you call
+ ``pytests`` along with the test's name (or the scope it's in), and provide
+ the ``--fixtures`` flag, e.g. ``pytest --fixtures test_something.py``
+ (fixtures with names that start with ``_`` will only be shown if you also
+ provide the ``-v`` flag).
+
+
+.. _`fixture order`:
+
+Fixture instantiation order
+---------------------------
+
+When pytest wants to execute a test, once it knows what fixtures will be
+executed, it has to figure out the order they'll be executed in. To do this, it
+considers 3 factors:
+
+1. scope
+2. dependencies
+3. autouse
+
+Names of fixtures or tests, where they're defined, the order they're defined in,
+and the order fixtures are requested in have no bearing on execution order
+beyond coincidence. While pytest will try to make sure coincidences like these
+stay consistent from run to run, it's not something that should be depended on.
+If you want to control the order, it's safest to rely on these 3 things and make
+sure dependencies are clearly established.
+
+Higher-scoped fixtures are executed first
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Within a function request for fixtures, those of higher-scopes (such as
+``session``) are executed before lower-scoped fixtures (such as ``function`` or
+``class``).
+
+Here's an example:
+
+.. literalinclude:: /example/fixtures/test_fixtures_order_scope.py
+
+The test will pass because the larger scoped fixtures are executing first.
+
+The order breaks down to this:
+
+.. image:: /example/fixtures/test_fixtures_order_scope.*
+ :align: center
+
+Fixtures of the same order execute based on dependencies
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+When a fixture requests another fixture, the other fixture is executed first.
+So if fixture ``a`` requests fixture ``b``, fixture ``b`` will execute first,
+because ``a`` depends on ``b`` and can't operate without it. Even if ``a``
+doesn't need the result of ``b``, it can still request ``b`` if it needs to make
+sure it is executed after ``b``.
+
+For example:
+
+.. literalinclude:: /example/fixtures/test_fixtures_order_dependencies.py
+
+If we map out what depends on what, we get something that look like this:
+
+.. image:: /example/fixtures/test_fixtures_order_dependencies.*
+ :align: center
+
+The rules provided by each fixture (as to what fixture(s) each one has to come
+after) are comprehensive enough that it can be flattened to this:
+
+.. image:: /example/fixtures/test_fixtures_order_dependencies_flat.*
+ :align: center
+
+Enough information has to be provided through these requests in order for pytest
+to be able to figure out a clear, linear chain of dependencies, and as a result,
+an order of operations for a given test. If there's any ambiguity, and the order
+of operations can be interpreted more than one way, you should assume pytest
+could go with any one of those interpretations at any point.
+
+For example, if ``d`` didn't request ``c``, i.e.the graph would look like this:
+
+.. image:: /example/fixtures/test_fixtures_order_dependencies_unclear.*
+ :align: center
+
+Because nothing requested ``c`` other than ``g``, and ``g`` also requests ``f``,
+it's now unclear if ``c`` should go before/after ``f``, ``e``, or ``d``. The
+only rules that were set for ``c`` is that it must execute after ``b`` and
+before ``g``.
+
+pytest doesn't know where ``c`` should go in the case, so it should be assumed
+that it could go anywhere between ``g`` and ``b``.
+
+This isn't necessarily bad, but it's something to keep in mind. If the order
+they execute in could affect the behavior a test is targeting, or could
+otherwise influence the result of a test, then the order should be defined
+explicitly in a way that allows pytest to linearize/"flatten" that order.
+
+.. _`autouse order`:
+
+Autouse fixtures are executed first within their scope
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Autouse fixtures are assumed to apply to every test that could reference them,
+so they are executed before other fixtures in that scope. Fixtures that are
+requested by autouse fixtures effectively become autouse fixtures themselves for
+the tests that the real autouse fixture applies to.
+
+So if fixture ``a`` is autouse and fixture ``b`` is not, but fixture ``a``
+requests fixture ``b``, then fixture ``b`` will effectively be an autouse
+fixture as well, but only for the tests that ``a`` applies to.
+
+In the last example, the graph became unclear if ``d`` didn't request ``c``. But
+if ``c`` was autouse, then ``b`` and ``a`` would effectively also be autouse
+because ``c`` depends on them. As a result, they would all be shifted above
+non-autouse fixtures within that scope.
+
+So if the test file looked like this:
+
+.. literalinclude:: /example/fixtures/test_fixtures_order_autouse.py
+
+the graph would look like this:
+
+.. image:: /example/fixtures/test_fixtures_order_autouse.*
+ :align: center
+
+Because ``c`` can now be put above ``d`` in the graph, pytest can once again
+linearize the graph to this:
+
+.. image:: /example/fixtures/test_fixtures_order_autouse_flat.*
+ :align: center
+
+In this example, ``c`` makes ``b`` and ``a`` effectively autouse fixtures as
+well.
+
+Be careful with autouse, though, as an autouse fixture will automatically
+execute for every test that can reach it, even if they don't request it. For
+example, consider this file:
+
+.. literalinclude:: /example/fixtures/test_fixtures_order_autouse_multiple_scopes.py
+
+Even though nothing in ``TestClassWithoutC1Request`` is requesting ``c1``, it still
+is executed for the tests inside it anyway:
+
+.. image:: /example/fixtures/test_fixtures_order_autouse_multiple_scopes.*
+ :align: center
+
+But just because one autouse fixture requested a non-autouse fixture, that
+doesn't mean the non-autouse fixture becomes an autouse fixture for all contexts
+that it can apply to. It only effectively becomes an autouse fixture for the
+contexts the real autouse fixture (the one that requested the non-autouse
+fixture) can apply to.
+
+For example, take a look at this test file:
+
+.. literalinclude:: /example/fixtures/test_fixtures_order_autouse_temp_effects.py
+
+It would break down to something like this:
+
+.. image:: /example/fixtures/test_fixtures_order_autouse_temp_effects.*
+ :align: center
+
+For ``test_req`` and ``test_no_req`` inside ``TestClassWithAutouse``, ``c3``
+effectively makes ``c2`` an autouse fixture, which is why ``c2`` and ``c3`` are
+executed for both tests, despite not being requested, and why ``c2`` and ``c3``
+are executed before ``c1`` for ``test_req``.
+
+If this made ``c2`` an *actual* autouse fixture, then ``c2`` would also execute
+for the tests inside ``TestClassWithoutAutouse``, since they can reference
+``c2`` if they wanted to. But it doesn't, because from the perspective of the
+``TestClassWithoutAutouse`` tests, ``c2`` isn't an autouse fixture, since they
+can't see ``c3``.
+
+
+.. note:
+
+ pytest can tell you what order the fixtures will execute in for a given test
+ if you call ``pytests`` along with the test's name (or the scope it's in),
+ and provide the ``--setup-plan`` flag, e.g.
+ ``pytest --setup-plan test_something.py`` (fixtures with names that start
+ with ``_`` will only be shown if you also provide the ``-v`` flag).
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/index.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/index.rst
new file mode 100644
index 0000000000..d964840031
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/index.rst
@@ -0,0 +1,15 @@
+:orphan:
+
+.. _reference:
+
+Reference guides
+================
+
+.. toctree::
+ :maxdepth: 1
+
+ fixtures
+ plugin_list
+ customize
+ reference
+ exit-codes
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/plugin_list.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/plugin_list.rst
new file mode 100644
index 0000000000..ebf4009136
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/plugin_list.rst
@@ -0,0 +1,7728 @@
+
+.. _plugin-list:
+
+Plugin List
+===========
+
+PyPI projects that match "pytest-\*" are considered plugins and are listed
+automatically. Packages classified as inactive are excluded.
+
+.. The following conditional uses a different format for this list when
+ creating a PDF, because otherwise the table gets far too wide for the
+ page.
+
+This list contains 963 plugins.
+
+.. only:: not latex
+
+ =============================================== ======================================================================================================================================================================== ============== ===================== ================================================
+ name summary last release status requires
+ =============================================== ======================================================================================================================================================================== ============== ===================== ================================================
+ :pypi:`pytest-accept` A pytest-plugin for updating doctest outputs Nov 22, 2021 N/A pytest (>=6,<7)
+ :pypi:`pytest-adaptavist` pytest plugin for generating test execution results within Jira Test Management (tm4j) Nov 30, 2021 N/A pytest (>=5.4.0)
+ :pypi:`pytest-addons-test` 用于测试pytestçš„æ’件 Aug 02, 2021 N/A pytest (>=6.2.4,<7.0.0)
+ :pypi:`pytest-adf` Pytest plugin for writing Azure Data Factory integration tests May 10, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-adf-azure-identity` Pytest plugin for writing Azure Data Factory integration tests Mar 06, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-agent` Service that exposes a REST API that can be used to interract remotely with Pytest. It is shipped with a dashboard that enables running tests in a more convenient way. Nov 25, 2021 N/A N/A
+ :pypi:`pytest-aggreport` pytest plugin for pytest-repeat that generate aggregate report of the same test cases with additional statistics details. Mar 07, 2021 4 - Beta pytest (>=6.2.2)
+ :pypi:`pytest-aio` Pytest plugin for testing async python code Oct 20, 2021 4 - Beta pytest
+ :pypi:`pytest-aiofiles` pytest fixtures for writing aiofiles tests with pyfakefs May 14, 2017 5 - Production/Stable N/A
+ :pypi:`pytest-aiohttp` pytest plugin for aiohttp support Dec 05, 2017 N/A pytest
+ :pypi:`pytest-aiohttp-client` Pytest \`client\` fixture for the Aiohttp Nov 01, 2020 N/A pytest (>=6)
+ :pypi:`pytest-aioresponses` py.test integration for aioresponses Jul 29, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-aioworkers` A plugin to test aioworkers project with pytest Dec 04, 2019 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-airflow` pytest support for airflow. Apr 03, 2019 3 - Alpha pytest (>=4.4.0)
+ :pypi:`pytest-airflow-utils` Nov 15, 2021 N/A N/A
+ :pypi:`pytest-alembic` A pytest plugin for verifying alembic migrations. Dec 02, 2021 N/A pytest (>=1.0)
+ :pypi:`pytest-allclose` Pytest fixture extending Numpy's allclose function Jul 30, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-allure-adaptor` Plugin for py.test to generate allure xml reports Jan 10, 2018 N/A pytest (>=2.7.3)
+ :pypi:`pytest-allure-adaptor2` Plugin for py.test to generate allure xml reports Oct 14, 2020 N/A pytest (>=2.7.3)
+ :pypi:`pytest-allure-dsl` pytest plugin to test case doc string dls instructions Oct 25, 2020 4 - Beta pytest
+ :pypi:`pytest-allure-spec-coverage` The pytest plugin aimed to display test coverage of the specs(requirements) in Allure Oct 26, 2021 N/A pytest
+ :pypi:`pytest-alphamoon` Static code checks used at Alphamoon Oct 21, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-android` This fixture provides a configured "driver" for Android Automated Testing, using uiautomator2. Feb 21, 2019 3 - Alpha pytest
+ :pypi:`pytest-anki` A pytest plugin for testing Anki add-ons Oct 14, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-annotate` pytest-annotate: Generate PyAnnotate annotations from your pytest tests. Nov 29, 2021 3 - Alpha pytest (<7.0.0,>=3.2.0)
+ :pypi:`pytest-ansible` Plugin for py.test to simplify calling ansible modules from tests or fixtures May 25, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-ansible-playbook` Pytest fixture which runs given ansible playbook file. Mar 08, 2019 4 - Beta N/A
+ :pypi:`pytest-ansible-playbook-runner` Pytest fixture which runs given ansible playbook file. Dec 02, 2020 4 - Beta pytest (>=3.1.0)
+ :pypi:`pytest-antilru` Bust functools.lru_cache when running pytest to avoid test pollution Apr 11, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-anyio` The pytest anyio plugin is built into anyio. You don't need this package. Jun 29, 2021 N/A pytest
+ :pypi:`pytest-anything` Pytest fixtures to assert anything and something Feb 18, 2021 N/A N/A
+ :pypi:`pytest-aoc` Downloads puzzle inputs for Advent of Code and synthesizes PyTest fixtures Nov 23, 2021 N/A pytest ; extra == 'test'
+ :pypi:`pytest-api` PyTest-API Python Web Framework built for testing purposes. May 04, 2021 N/A N/A
+ :pypi:`pytest-apistellar` apistellar plugin for pytest. Jun 18, 2019 N/A N/A
+ :pypi:`pytest-appengine` AppEngine integration that works well with pytest-django Feb 27, 2017 N/A N/A
+ :pypi:`pytest-appium` Pytest plugin for appium Dec 05, 2019 N/A N/A
+ :pypi:`pytest-approvaltests` A plugin to use approvaltests with pytest Feb 07, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-argus` pyest results colection plugin Jun 24, 2021 5 - Production/Stable pytest (>=6.2.4)
+ :pypi:`pytest-arraydiff` pytest plugin to help with comparing array output from tests Dec 06, 2018 4 - Beta pytest
+ :pypi:`pytest-asgi-server` Convenient ASGI client/server fixtures for Pytest Dec 12, 2020 N/A pytest (>=5.4.1)
+ :pypi:`pytest-asptest` test Answer Set Programming programs Apr 28, 2018 4 - Beta N/A
+ :pypi:`pytest-assertutil` pytest-assertutil May 10, 2019 N/A N/A
+ :pypi:`pytest-assert-utils` Useful assertion utilities for use with pytest Sep 21, 2021 3 - Alpha N/A
+ :pypi:`pytest-assume` A pytest plugin that allows multiple failures per test Jun 24, 2021 N/A pytest (>=2.7)
+ :pypi:`pytest-ast-back-to-python` A plugin for pytest devs to view how assertion rewriting recodes the AST Sep 29, 2019 4 - Beta N/A
+ :pypi:`pytest-astropy` Meta-package containing dependencies for testing Sep 21, 2021 5 - Production/Stable pytest (>=4.6)
+ :pypi:`pytest-astropy-header` pytest plugin to add diagnostic information to the header of the test output Dec 18, 2019 3 - Alpha pytest (>=2.8)
+ :pypi:`pytest-ast-transformer` May 04, 2019 3 - Alpha pytest
+ :pypi:`pytest-asyncio` Pytest support for asyncio. Oct 15, 2021 4 - Beta pytest (>=5.4.0)
+ :pypi:`pytest-asyncio-cooperative` Run all your asynchronous tests cooperatively. Oct 12, 2021 4 - Beta N/A
+ :pypi:`pytest-asyncio-network-simulator` pytest-asyncio-network-simulator: Plugin for pytest for simulator the network in tests Jul 31, 2018 3 - Alpha pytest (<3.7.0,>=3.3.2)
+ :pypi:`pytest-async-mongodb` pytest plugin for async MongoDB Oct 18, 2017 5 - Production/Stable pytest (>=2.5.2)
+ :pypi:`pytest-async-sqlalchemy` Database testing fixtures using the SQLAlchemy asyncio API Oct 07, 2021 4 - Beta pytest (>=6.0.0)
+ :pypi:`pytest-atomic` Skip rest of tests if previous test failed. Nov 24, 2018 4 - Beta N/A
+ :pypi:`pytest-attrib` pytest plugin to select tests based on attributes similar to the nose-attrib plugin May 24, 2016 4 - Beta N/A
+ :pypi:`pytest-austin` Austin plugin for pytest Oct 11, 2020 4 - Beta N/A
+ :pypi:`pytest-autochecklog` automatically check condition and log all the checks Apr 25, 2015 4 - Beta N/A
+ :pypi:`pytest-automation` pytest plugin for building a test suite, using YAML files to extend pytest parameterize functionality. Oct 01, 2021 N/A pytest
+ :pypi:`pytest-automock` Pytest plugin for automatical mocks creation Apr 22, 2020 N/A pytest ; extra == 'dev'
+ :pypi:`pytest-auto-parametrize` pytest plugin: avoid repeating arguments in parametrize Oct 02, 2016 3 - Alpha N/A
+ :pypi:`pytest-autotest` This fixture provides a configured "driver" for Android Automated Testing, using uiautomator2. Aug 25, 2021 N/A pytest
+ :pypi:`pytest-avoidance` Makes pytest skip tests that don not need rerunning May 23, 2019 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-aws` pytest plugin for testing AWS resource configurations Oct 04, 2017 4 - Beta N/A
+ :pypi:`pytest-aws-config` Protect your AWS credentials in unit tests May 28, 2021 N/A N/A
+ :pypi:`pytest-axe` pytest plugin for axe-selenium-python Nov 12, 2018 N/A pytest (>=3.0.0)
+ :pypi:`pytest-azurepipelines` Formatting PyTest output for Azure Pipelines UI Jul 23, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-bandit` A bandit plugin for pytest Feb 23, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-base-url` pytest plugin for URL based testing Jun 19, 2020 5 - Production/Stable pytest (>=2.7.3)
+ :pypi:`pytest-bdd` BDD for pytest Oct 25, 2021 6 - Mature pytest (>=4.3)
+ :pypi:`pytest-bdd-splinter` Common steps for pytest bdd and splinter integration Aug 12, 2019 5 - Production/Stable pytest (>=4.0.0)
+ :pypi:`pytest-bdd-web` A simple plugin to use with pytest Jan 02, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-bdd-wrappers` Feb 11, 2020 2 - Pre-Alpha N/A
+ :pypi:`pytest-beakerlib` A pytest plugin that reports test results to the BeakerLib framework Mar 17, 2017 5 - Production/Stable pytest
+ :pypi:`pytest-beds` Fixtures for testing Google Appengine (GAE) apps Jun 07, 2016 4 - Beta N/A
+ :pypi:`pytest-bench` Benchmark utility that plugs into pytest. Jul 21, 2014 3 - Alpha N/A
+ :pypi:`pytest-benchmark` A \`\`pytest\`\` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer. Apr 17, 2021 5 - Production/Stable pytest (>=3.8)
+ :pypi:`pytest-bg-process` Pytest plugin to initialize background process Aug 17, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-bigchaindb` A BigchainDB plugin for pytest. Aug 17, 2021 4 - Beta N/A
+ :pypi:`pytest-bigquery-mock` Provides a mock fixture for python bigquery client Aug 05, 2021 N/A pytest (>=5.0)
+ :pypi:`pytest-black` A pytest plugin to enable format checking with black Oct 05, 2020 4 - Beta N/A
+ :pypi:`pytest-black-multipy` Allow '--black' on older Pythons Jan 14, 2021 5 - Production/Stable pytest (!=3.7.3,>=3.5) ; extra == 'testing'
+ :pypi:`pytest-blame` A pytest plugin helps developers to debug by providing useful commits history. May 04, 2019 N/A pytest (>=4.4.0)
+ :pypi:`pytest-blender` Blender Pytest plugin. Oct 29, 2021 N/A pytest (==6.2.5) ; extra == 'dev'
+ :pypi:`pytest-blink1` Pytest plugin to emit notifications via the Blink(1) RGB LED Jan 07, 2018 4 - Beta N/A
+ :pypi:`pytest-blockage` Disable network requests during a test run. Feb 13, 2019 N/A pytest
+ :pypi:`pytest-blocker` pytest plugin to mark a test as blocker and skip all other tests Sep 07, 2015 4 - Beta N/A
+ :pypi:`pytest-board` Local continuous test runner with pytest and watchdog. Jan 20, 2019 N/A N/A
+ :pypi:`pytest-bpdb` A py.test plug-in to enable drop to bpdb debugger on test failure. Jan 19, 2015 2 - Pre-Alpha N/A
+ :pypi:`pytest-bravado` Pytest-bravado automatically generates from OpenAPI specification client fixtures. Jul 19, 2021 N/A N/A
+ :pypi:`pytest-breakword` Use breakword with pytest Aug 04, 2021 N/A pytest (>=6.2.4,<7.0.0)
+ :pypi:`pytest-breed-adapter` A simple plugin to connect with breed-server Nov 07, 2018 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-briefcase` A pytest plugin for running tests on a Briefcase project. Jun 14, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-browser` A pytest plugin for console based browser test selection just after the collection phase Dec 10, 2016 3 - Alpha N/A
+ :pypi:`pytest-browsermob-proxy` BrowserMob proxy plugin for py.test. Jun 11, 2013 4 - Beta N/A
+ :pypi:`pytest-browserstack-local` \`\`py.test\`\` plugin to run \`\`BrowserStackLocal\`\` in background. Feb 09, 2018 N/A N/A
+ :pypi:`pytest-bug` Pytest plugin for marking tests as a bug Jun 02, 2020 5 - Production/Stable pytest (>=3.6.0)
+ :pypi:`pytest-bugtong-tag` pytest-bugtong-tag is a plugin for pytest Apr 23, 2021 N/A N/A
+ :pypi:`pytest-bugzilla` py.test bugzilla integration plugin May 05, 2010 4 - Beta N/A
+ :pypi:`pytest-bugzilla-notifier` A plugin that allows you to execute create, update, and read information from BugZilla bugs Jun 15, 2018 4 - Beta pytest (>=2.9.2)
+ :pypi:`pytest-buildkite` Plugin for pytest that automatically publishes coverage and pytest report annotations to Buildkite. Jul 13, 2019 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-builtin-types` Nov 17, 2021 N/A pytest
+ :pypi:`pytest-bwrap` Run your tests in Bubblewrap sandboxes Oct 26, 2018 3 - Alpha N/A
+ :pypi:`pytest-cache` pytest plugin with mechanisms for caching across test runs Jun 04, 2013 3 - Alpha N/A
+ :pypi:`pytest-cache-assert` Cache assertion data to simplify regression testing of complex serializable data Nov 03, 2021 4 - Beta pytest (>=5)
+ :pypi:`pytest-cagoule` Pytest plugin to only run tests affected by changes Jan 01, 2020 3 - Alpha N/A
+ :pypi:`pytest-camel-collect` Enable CamelCase-aware pytest class collection Aug 02, 2020 N/A pytest (>=2.9)
+ :pypi:`pytest-canonical-data` A plugin which allows to compare results with canonical results, based on previous runs May 08, 2020 2 - Pre-Alpha pytest (>=3.5.0)
+ :pypi:`pytest-caprng` A plugin that replays pRNG state on failure. May 02, 2018 4 - Beta N/A
+ :pypi:`pytest-capture-deprecatedwarnings` pytest plugin to capture all deprecatedwarnings and put them in one file Apr 30, 2019 N/A N/A
+ :pypi:`pytest-capturelogs` A sample Python project Sep 11, 2021 3 - Alpha N/A
+ :pypi:`pytest-cases` Separate test code from test cases in pytest. Nov 08, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-cassandra` Cassandra CCM Test Fixtures for pytest Nov 04, 2017 1 - Planning N/A
+ :pypi:`pytest-catchlog` py.test plugin to catch log messages. This is a fork of pytest-capturelog. Jan 24, 2016 4 - Beta pytest (>=2.6)
+ :pypi:`pytest-catch-server` Pytest plugin with server for catching HTTP requests. Dec 12, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-celery` pytest-celery a shim pytest plugin to enable celery.contrib.pytest May 06, 2021 N/A N/A
+ :pypi:`pytest-chainmaker` pytest plugin for chainmaker Oct 15, 2021 N/A N/A
+ :pypi:`pytest-chalice` A set of py.test fixtures for AWS Chalice Jul 01, 2020 4 - Beta N/A
+ :pypi:`pytest-change-report` turn . into √,turn F into x Sep 14, 2020 N/A pytest
+ :pypi:`pytest-chdir` A pytest fixture for changing current working directory Jan 28, 2020 N/A pytest (>=5.0.0,<6.0.0)
+ :pypi:`pytest-checkdocs` check the README when running tests Jul 31, 2021 5 - Production/Stable pytest (>=4.6) ; extra == 'testing'
+ :pypi:`pytest-checkipdb` plugin to check if there are ipdb debugs left Jul 22, 2020 5 - Production/Stable pytest (>=2.9.2)
+ :pypi:`pytest-check-links` Check links in files Jul 29, 2020 N/A pytest (>=4.6)
+ :pypi:`pytest-check-mk` pytest plugin to test Check_MK checks Nov 19, 2015 4 - Beta pytest
+ :pypi:`pytest-circleci` py.test plugin for CircleCI May 03, 2019 N/A N/A
+ :pypi:`pytest-circleci-parallelized` Parallelize pytest across CircleCI workers. Mar 26, 2019 N/A N/A
+ :pypi:`pytest-ckan` Backport of CKAN 2.9 pytest plugin and fixtures to CAKN 2.8 Apr 28, 2020 4 - Beta pytest
+ :pypi:`pytest-clarity` A plugin providing an alternative, colourful diff output for failing assertions. Jun 11, 2021 N/A N/A
+ :pypi:`pytest-cldf` Easy quality control for CLDF datasets using pytest May 06, 2019 N/A N/A
+ :pypi:`pytest-click` Py.test plugin for Click Aug 29, 2020 5 - Production/Stable pytest (>=5.0)
+ :pypi:`pytest-clld` Nov 29, 2021 N/A pytest (>=3.6)
+ :pypi:`pytest-cloud` Distributed tests planner plugin for pytest testing framework. Oct 05, 2020 6 - Mature N/A
+ :pypi:`pytest-cloudflare-worker` pytest plugin for testing cloudflare workers Mar 30, 2021 4 - Beta pytest (>=6.0.0)
+ :pypi:`pytest-cobra` PyTest plugin for testing Smart Contracts for Ethereum blockchain. Jun 29, 2019 3 - Alpha pytest (<4.0.0,>=3.7.1)
+ :pypi:`pytest-codeblocks` Test code blocks in your READMEs Oct 13, 2021 4 - Beta pytest (>=6)
+ :pypi:`pytest-codecheckers` pytest plugin to add source code sanity checks (pep8 and friends) Feb 13, 2010 N/A N/A
+ :pypi:`pytest-codecov` Pytest plugin for uploading pytest-cov results to codecov.io Oct 27, 2021 4 - Beta pytest (>=4.6.0)
+ :pypi:`pytest-codegen` Automatically create pytest test signatures Aug 23, 2020 2 - Pre-Alpha N/A
+ :pypi:`pytest-codestyle` pytest plugin to run pycodestyle Mar 23, 2020 3 - Alpha N/A
+ :pypi:`pytest-collect-formatter` Formatter for pytest collect output Mar 29, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-collect-formatter2` Formatter for pytest collect output May 31, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-colordots` Colorizes the progress indicators Oct 06, 2017 5 - Production/Stable N/A
+ :pypi:`pytest-commander` An interactive GUI test runner for PyTest Aug 17, 2021 N/A pytest (<7.0.0,>=6.2.4)
+ :pypi:`pytest-common-subject` pytest framework for testing different aspects of a common method Nov 12, 2020 N/A pytest (>=3.6,<7)
+ :pypi:`pytest-concurrent` Concurrently execute test cases with multithread, multiprocess and gevent Jan 12, 2019 4 - Beta pytest (>=3.1.1)
+ :pypi:`pytest-config` Base configurations and utilities for developing your Python project test suite with pytest. Nov 07, 2014 5 - Production/Stable N/A
+ :pypi:`pytest-confluence-report` Package stands for pytest plugin to upload results into Confluence page. Nov 06, 2020 N/A N/A
+ :pypi:`pytest-console-scripts` Pytest plugin for testing console scripts Sep 28, 2021 4 - Beta N/A
+ :pypi:`pytest-consul` pytest plugin with fixtures for testing consul aware apps Nov 24, 2018 3 - Alpha pytest
+ :pypi:`pytest-container` Pytest fixtures for writing container based tests Nov 19, 2021 3 - Alpha pytest (>=3.10)
+ :pypi:`pytest-contextfixture` Define pytest fixtures as context managers. Mar 12, 2013 4 - Beta N/A
+ :pypi:`pytest-contexts` A plugin to run tests written with the Contexts framework using pytest May 19, 2021 4 - Beta N/A
+ :pypi:`pytest-cookies` The pytest plugin for your Cookiecutter templates. 🪠May 24, 2021 5 - Production/Stable pytest (>=3.3.0)
+ :pypi:`pytest-couchdbkit` py.test extension for per-test couchdb databases using couchdbkit Apr 17, 2012 N/A N/A
+ :pypi:`pytest-count` count erros and send email Jan 12, 2018 4 - Beta N/A
+ :pypi:`pytest-cov` Pytest plugin for measuring coverage. Oct 04, 2021 5 - Production/Stable pytest (>=4.6)
+ :pypi:`pytest-cover` Pytest plugin for measuring coverage. Forked from \`pytest-cov\`. Aug 01, 2015 5 - Production/Stable N/A
+ :pypi:`pytest-coverage` Jun 17, 2015 N/A N/A
+ :pypi:`pytest-coverage-context` Coverage dynamic context support for PyTest, including sub-processes Jan 04, 2021 4 - Beta pytest (>=6.1.0)
+ :pypi:`pytest-cov-exclude` Pytest plugin for excluding tests based on coverage data Apr 29, 2016 4 - Beta pytest (>=2.8.0,<2.9.0); extra == 'dev'
+ :pypi:`pytest-cpp` Use pytest's runner to discover and execute C++ tests Dec 03, 2021 5 - Production/Stable pytest (!=5.4.0,!=5.4.1)
+ :pypi:`pytest-cram` Run cram tests with pytest. Aug 08, 2020 N/A N/A
+ :pypi:`pytest-crate` Manages CrateDB instances during your integration tests May 28, 2019 3 - Alpha pytest (>=4.0)
+ :pypi:`pytest-cricri` A Cricri plugin for pytest. Jan 27, 2018 N/A pytest
+ :pypi:`pytest-crontab` add crontab task in crontab Dec 09, 2019 N/A N/A
+ :pypi:`pytest-csv` CSV output for pytest. Apr 22, 2021 N/A pytest (>=6.0)
+ :pypi:`pytest-curio` Pytest support for curio. Oct 07, 2020 N/A N/A
+ :pypi:`pytest-curl-report` pytest plugin to generate curl command line report Dec 11, 2016 4 - Beta N/A
+ :pypi:`pytest-custom-concurrency` Custom grouping concurrence for pytest Feb 08, 2021 N/A N/A
+ :pypi:`pytest-custom-exit-code` Exit pytest test session with custom exit code in different scenarios Aug 07, 2019 4 - Beta pytest (>=4.0.2)
+ :pypi:`pytest-custom-nodeid` Custom grouping for pytest-xdist, rename test cases name and test cases nodeid, support allure report Mar 07, 2021 N/A N/A
+ :pypi:`pytest-custom-report` Configure the symbols displayed for test outcomes Jan 30, 2019 N/A pytest
+ :pypi:`pytest-custom-scheduling` Custom grouping for pytest-xdist, rename test cases name and test cases nodeid, support allure report Mar 01, 2021 N/A N/A
+ :pypi:`pytest-cython` A plugin for testing Cython extension modules Jan 26, 2021 4 - Beta pytest (>=2.7.3)
+ :pypi:`pytest-darker` A pytest plugin for checking of modified code using Darker Aug 16, 2020 N/A pytest (>=6.0.1) ; extra == 'test'
+ :pypi:`pytest-dash` pytest fixtures to run dash applications. Mar 18, 2019 N/A N/A
+ :pypi:`pytest-data` Useful functions for managing data for pytest fixtures Nov 01, 2016 5 - Production/Stable N/A
+ :pypi:`pytest-databricks` Pytest plugin for remote Databricks notebooks testing Jul 29, 2020 N/A pytest
+ :pypi:`pytest-datadir` pytest plugin for test data directories and files Oct 22, 2019 5 - Production/Stable pytest (>=2.7.0)
+ :pypi:`pytest-datadir-mgr` Manager for test data providing downloads, caching of generated files, and a context for temp directories. Aug 16, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-datadir-ng` Fixtures for pytest allowing test functions/methods to easily retrieve test resources from the local filesystem. Dec 25, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-data-file` Fixture "data" and "case_data" for test from yaml file Dec 04, 2019 N/A N/A
+ :pypi:`pytest-datafiles` py.test plugin to create a 'tmpdir' containing predefined files/directories. Oct 07, 2018 5 - Production/Stable pytest (>=3.6)
+ :pypi:`pytest-datafixtures` Data fixtures for pytest made simple Dec 05, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-data-from-files` pytest plugin to provide data from files loaded automatically Oct 13, 2021 4 - Beta pytest
+ :pypi:`pytest-dataplugin` A pytest plugin for managing an archive of test data. Sep 16, 2017 1 - Planning N/A
+ :pypi:`pytest-datarecorder` A py.test plugin recording and comparing test output. Apr 20, 2020 5 - Production/Stable pytest
+ :pypi:`pytest-datatest` A pytest plugin for test driven data-wrangling (this is the development version of datatest's pytest integration). Oct 15, 2020 4 - Beta pytest (>=3.3)
+ :pypi:`pytest-db` Session scope fixture "db" for mysql query or change Dec 04, 2019 N/A N/A
+ :pypi:`pytest-dbfixtures` Databases fixtures plugin for py.test. Dec 07, 2016 4 - Beta N/A
+ :pypi:`pytest-db-plugin` Nov 27, 2021 N/A pytest (>=5.0)
+ :pypi:`pytest-dbt-adapter` A pytest plugin for testing dbt adapter plugins Nov 24, 2021 N/A pytest (<7,>=6)
+ :pypi:`pytest-dbus-notification` D-BUS notifications for pytest results. Mar 05, 2014 5 - Production/Stable N/A
+ :pypi:`pytest-deadfixtures` A simple plugin to list unused fixtures in pytest Jul 23, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-deepcov` deepcov Mar 30, 2021 N/A N/A
+ :pypi:`pytest-defer` Aug 24, 2021 N/A N/A
+ :pypi:`pytest-demo-plugin` pytest示例æ’件 May 15, 2021 N/A N/A
+ :pypi:`pytest-dependency` Manage dependencies of tests Feb 14, 2020 4 - Beta N/A
+ :pypi:`pytest-depends` Tests that depend on other tests Apr 05, 2020 5 - Production/Stable pytest (>=3)
+ :pypi:`pytest-deprecate` Mark tests as testing a deprecated feature with a warning note. Jul 01, 2019 N/A N/A
+ :pypi:`pytest-describe` Describe-style plugin for pytest Nov 13, 2021 4 - Beta pytest (>=4.0.0)
+ :pypi:`pytest-describe-it` plugin for rich text descriptions Jul 19, 2019 4 - Beta pytest
+ :pypi:`pytest-devpi-server` DevPI server fixture for py.test May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-diamond` pytest plugin for diamond Aug 31, 2015 4 - Beta N/A
+ :pypi:`pytest-dicom` pytest plugin to provide DICOM fixtures Dec 19, 2018 3 - Alpha pytest
+ :pypi:`pytest-dictsdiff` Jul 26, 2019 N/A N/A
+ :pypi:`pytest-diff` A simple plugin to use with pytest Mar 30, 2019 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-disable` pytest plugin to disable a test and skip it from testrun Sep 10, 2015 4 - Beta N/A
+ :pypi:`pytest-disable-plugin` Disable plugins per test Feb 28, 2019 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-discord` A pytest plugin to notify test results to a Discord channel. Mar 20, 2021 3 - Alpha pytest (!=6.0.0,<7,>=3.3.2)
+ :pypi:`pytest-django` A Django plugin for pytest. Dec 02, 2021 5 - Production/Stable pytest (>=5.4.0)
+ :pypi:`pytest-django-ahead` A Django plugin for pytest. Oct 27, 2016 5 - Production/Stable pytest (>=2.9)
+ :pypi:`pytest-djangoapp` Nice pytest plugin to help you with Django pluggable application testing. Aug 04, 2021 4 - Beta N/A
+ :pypi:`pytest-django-cache-xdist` A djangocachexdist plugin for pytest May 12, 2020 4 - Beta N/A
+ :pypi:`pytest-django-casperjs` Integrate CasperJS with your django tests as a pytest fixture. Mar 15, 2015 2 - Pre-Alpha N/A
+ :pypi:`pytest-django-dotenv` Pytest plugin used to setup environment variables with django-dotenv Nov 26, 2019 4 - Beta pytest (>=2.6.0)
+ :pypi:`pytest-django-factories` Factories for your Django models that can be used as Pytest fixtures. Nov 12, 2020 4 - Beta N/A
+ :pypi:`pytest-django-gcir` A Django plugin for pytest. Mar 06, 2018 5 - Production/Stable N/A
+ :pypi:`pytest-django-haystack` Cleanup your Haystack indexes between tests Sep 03, 2017 5 - Production/Stable pytest (>=2.3.4)
+ :pypi:`pytest-django-ifactory` A model instance factory for pytest-django Jan 13, 2021 3 - Alpha N/A
+ :pypi:`pytest-django-lite` The bare minimum to integrate py.test with Django. Jan 30, 2014 N/A N/A
+ :pypi:`pytest-django-liveserver-ssl` Jul 30, 2021 3 - Alpha N/A
+ :pypi:`pytest-django-model` A Simple Way to Test your Django Models Feb 14, 2019 4 - Beta N/A
+ :pypi:`pytest-django-ordering` A pytest plugin for preserving the order in which Django runs tests. Jul 25, 2019 5 - Production/Stable pytest (>=2.3.0)
+ :pypi:`pytest-django-queries` Generate performance reports from your django database performance tests. Mar 01, 2021 N/A N/A
+ :pypi:`pytest-djangorestframework` A djangorestframework plugin for pytest Aug 11, 2019 4 - Beta N/A
+ :pypi:`pytest-django-rq` A pytest plugin to help writing unit test for django-rq Apr 13, 2020 4 - Beta N/A
+ :pypi:`pytest-django-sqlcounts` py.test plugin for reporting the number of SQLs executed per django testcase. Jun 16, 2015 4 - Beta N/A
+ :pypi:`pytest-django-testing-postgresql` Use a temporary PostgreSQL database with pytest-django Dec 05, 2019 3 - Alpha N/A
+ :pypi:`pytest-doc` A documentation plugin for py.test. Jun 28, 2015 5 - Production/Stable N/A
+ :pypi:`pytest-docgen` An RST Documentation Generator for pytest-based test suites Apr 17, 2020 N/A N/A
+ :pypi:`pytest-docker` Simple pytest fixtures for Docker and docker-compose based tests Jun 14, 2021 N/A pytest (<7.0,>=4.0)
+ :pypi:`pytest-docker-butla` Jun 16, 2019 3 - Alpha N/A
+ :pypi:`pytest-dockerc` Run, manage and stop Docker Compose project from Docker API Oct 09, 2020 5 - Production/Stable pytest (>=3.0)
+ :pypi:`pytest-docker-compose` Manages Docker containers during your integration tests Jan 26, 2021 5 - Production/Stable pytest (>=3.3)
+ :pypi:`pytest-docker-db` A plugin to use docker databases for pytests Mar 20, 2021 5 - Production/Stable pytest (>=3.1.1)
+ :pypi:`pytest-docker-fixtures` pytest docker fixtures Nov 23, 2021 3 - Alpha N/A
+ :pypi:`pytest-docker-git-fixtures` Pytest fixtures for testing with git scm. Mar 11, 2021 4 - Beta pytest
+ :pypi:`pytest-docker-pexpect` pytest plugin for writing functional tests with pexpect and docker Jan 14, 2019 N/A pytest
+ :pypi:`pytest-docker-postgresql` A simple plugin to use with pytest Sep 24, 2019 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-docker-py` Easy to use, simple to extend, pytest plugin that minimally leverages docker-py. Nov 27, 2018 N/A pytest (==4.0.0)
+ :pypi:`pytest-docker-registry-fixtures` Pytest fixtures for testing with docker registries. Mar 04, 2021 4 - Beta pytest
+ :pypi:`pytest-docker-tools` Docker integration tests for pytest Jul 23, 2021 4 - Beta pytest (>=6.0.1,<7.0.0)
+ :pypi:`pytest-docs` Documentation tool for pytest Nov 11, 2018 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-docstyle` pytest plugin to run pydocstyle Mar 23, 2020 3 - Alpha N/A
+ :pypi:`pytest-doctest-custom` A py.test plugin for customizing string representations of doctest results. Jul 25, 2016 4 - Beta N/A
+ :pypi:`pytest-doctest-ellipsis-markers` Setup additional values for ELLIPSIS_MARKER for doctests Jan 12, 2018 4 - Beta N/A
+ :pypi:`pytest-doctest-import` A simple pytest plugin to import names and add them to the doctest namespace. Nov 13, 2018 4 - Beta pytest (>=3.3.0)
+ :pypi:`pytest-doctestplus` Pytest plugin with advanced doctest features. Nov 16, 2021 3 - Alpha pytest (>=4.6)
+ :pypi:`pytest-doctest-ufunc` A plugin to run doctests in docstrings of Numpy ufuncs Aug 02, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-dolphin` Some extra stuff that we use ininternally Nov 30, 2016 4 - Beta pytest (==3.0.4)
+ :pypi:`pytest-doorstop` A pytest plugin for adding test results into doorstop items. Jun 09, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-dotenv` A py.test plugin that parses environment files before running tests Jun 16, 2020 4 - Beta pytest (>=5.0.0)
+ :pypi:`pytest-drf` A Django REST framework plugin for pytest. Nov 12, 2020 5 - Production/Stable pytest (>=3.6)
+ :pypi:`pytest-drivings` Tool to allow webdriver automation to be ran locally or remotely Jan 13, 2021 N/A N/A
+ :pypi:`pytest-drop-dup-tests` A Pytest plugin to drop duplicated tests during collection May 23, 2020 4 - Beta pytest (>=2.7)
+ :pypi:`pytest-dummynet` A py.test plugin providing access to a dummynet. Oct 13, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-dump2json` A pytest plugin for dumping test results to json. Jun 29, 2015 N/A N/A
+ :pypi:`pytest-duration-insights` Jun 25, 2021 N/A N/A
+ :pypi:`pytest-dynamicrerun` A pytest plugin to rerun tests dynamically based off of test outcome and output. Aug 15, 2020 4 - Beta N/A
+ :pypi:`pytest-dynamodb` DynamoDB fixtures for pytest Jun 03, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-easy-addoption` pytest-easy-addoption: Easy way to work with pytest addoption Jan 22, 2020 N/A N/A
+ :pypi:`pytest-easy-api` Simple API testing with pytest Mar 26, 2018 N/A N/A
+ :pypi:`pytest-easyMPI` Package that supports mpi tests in pytest Oct 21, 2020 N/A N/A
+ :pypi:`pytest-easyread` pytest plugin that makes terminal printouts of the reports easier to read Nov 17, 2017 N/A N/A
+ :pypi:`pytest-easy-server` Pytest plugin for easy testing against servers May 01, 2021 4 - Beta pytest (<5.0.0,>=4.3.1) ; python_version < "3.5"
+ :pypi:`pytest-ec2` Pytest execution on EC2 instance Oct 22, 2019 3 - Alpha N/A
+ :pypi:`pytest-echo` pytest plugin with mechanisms for echoing environment variables, package version and generic attributes Jan 08, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-elasticsearch` Elasticsearch fixtures and fixture factories for Pytest. May 12, 2021 5 - Production/Stable pytest (>=3.0.0)
+ :pypi:`pytest-elements` Tool to help automate user interfaces Jan 13, 2021 N/A pytest (>=5.4,<6.0)
+ :pypi:`pytest-elk-reporter` A simple plugin to use with pytest Jan 24, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-email` Send execution result email Jul 08, 2020 N/A pytest
+ :pypi:`pytest-embedded` pytest embedded plugin Nov 29, 2021 N/A pytest (>=6.2.0)
+ :pypi:`pytest-embedded-idf` pytest embedded plugin for esp-idf project Nov 29, 2021 N/A N/A
+ :pypi:`pytest-embedded-jtag` pytest embedded plugin for testing with jtag Nov 29, 2021 N/A N/A
+ :pypi:`pytest-embedded-qemu` pytest embedded plugin for qemu, not target chip Nov 29, 2021 N/A N/A
+ :pypi:`pytest-embedded-qemu-idf` pytest embedded plugin for esp-idf project by qemu, not target chip Jun 29, 2021 N/A N/A
+ :pypi:`pytest-embedded-serial` pytest embedded plugin for testing serial ports Nov 29, 2021 N/A N/A
+ :pypi:`pytest-embedded-serial-esp` pytest embedded plugin for testing espressif boards via serial ports Nov 29, 2021 N/A N/A
+ :pypi:`pytest-emoji` A pytest plugin that adds emojis to your test result report Feb 19, 2019 4 - Beta pytest (>=4.2.1)
+ :pypi:`pytest-emoji-output` Pytest plugin to represent test output with emoji support Oct 10, 2021 4 - Beta pytest (==6.0.1)
+ :pypi:`pytest-enabler` Enable installed pytest plugins Nov 08, 2021 5 - Production/Stable pytest (>=6) ; extra == 'testing'
+ :pypi:`pytest-encode` set your encoding and logger Nov 06, 2021 N/A N/A
+ :pypi:`pytest-encode-kane` set your encoding and logger Nov 16, 2021 N/A pytest
+ :pypi:`pytest-enhancements` Improvements for pytest (rejected upstream) Oct 30, 2019 4 - Beta N/A
+ :pypi:`pytest-env` py.test plugin that allows you to add environment variables. Jun 16, 2017 4 - Beta N/A
+ :pypi:`pytest-envfiles` A py.test plugin that parses environment files before running tests Oct 08, 2015 3 - Alpha N/A
+ :pypi:`pytest-env-info` Push information about the running pytest into envvars Nov 25, 2017 4 - Beta pytest (>=3.1.1)
+ :pypi:`pytest-envraw` py.test plugin that allows you to add environment variables. Aug 27, 2020 4 - Beta pytest (>=2.6.0)
+ :pypi:`pytest-envvars` Pytest plugin to validate use of envvars on your tests Jun 13, 2020 5 - Production/Stable pytest (>=3.0.0)
+ :pypi:`pytest-env-yaml` Apr 02, 2019 N/A N/A
+ :pypi:`pytest-eradicate` pytest plugin to check for commented out code Sep 08, 2020 N/A pytest (>=2.4.2)
+ :pypi:`pytest-error-for-skips` Pytest plugin to treat skipped tests a test failure Dec 19, 2019 4 - Beta pytest (>=4.6)
+ :pypi:`pytest-eth` PyTest plugin for testing Smart Contracts for Ethereum Virtual Machine (EVM). Aug 14, 2020 1 - Planning N/A
+ :pypi:`pytest-ethereum` pytest-ethereum: Pytest library for ethereum projects. Jun 24, 2019 3 - Alpha pytest (==3.3.2); extra == 'dev'
+ :pypi:`pytest-eucalyptus` Pytest Plugin for BDD Aug 13, 2019 N/A pytest (>=4.2.0)
+ :pypi:`pytest-eventlet` Applies eventlet monkey-patch as a pytest plugin. Oct 04, 2021 N/A pytest ; extra == 'dev'
+ :pypi:`pytest-excel` pytest plugin for generating excel reports Oct 06, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-exceptional` Better exceptions Mar 16, 2017 4 - Beta N/A
+ :pypi:`pytest-exception-script` Walk your code through exception script to check it's resiliency to failures. Aug 04, 2020 3 - Alpha pytest
+ :pypi:`pytest-executable` pytest plugin for testing executables Nov 10, 2021 4 - Beta pytest (<6.3,>=4.3)
+ :pypi:`pytest-expect` py.test plugin to store test expectations and mark tests based on them Apr 21, 2016 4 - Beta N/A
+ :pypi:`pytest-expecter` Better testing with expecter and pytest. Jul 08, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-expectr` This plugin is used to expect multiple assert using pytest framework. Oct 05, 2018 N/A pytest (>=2.4.2)
+ :pypi:`pytest-explicit` A Pytest plugin to ignore certain marked tests by default Jun 15, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-exploratory` Interactive console for pytest. Aug 03, 2021 N/A pytest (>=5.3)
+ :pypi:`pytest-external-blockers` a special outcome for tests that are blocked for external reasons Oct 05, 2021 N/A pytest
+ :pypi:`pytest-extra-durations` A pytest plugin to get durations on a per-function basis and per module basis. Apr 21, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-fabric` Provides test utilities to run fabric task tests by using docker containers Sep 12, 2018 5 - Production/Stable N/A
+ :pypi:`pytest-factory` Use factories for test setup with py.test Sep 06, 2020 3 - Alpha pytest (>4.3)
+ :pypi:`pytest-factoryboy` Factory Boy support for pytest. Dec 30, 2020 6 - Mature pytest (>=4.6)
+ :pypi:`pytest-factoryboy-fixtures` Generates pytest fixtures that allow the use of type hinting Jun 25, 2020 N/A N/A
+ :pypi:`pytest-factoryboy-state` Simple factoryboy random state management Dec 11, 2020 4 - Beta pytest (>=5.0)
+ :pypi:`pytest-failed-screenshot` Test case fails,take a screenshot,save it,attach it to the allure Apr 21, 2021 N/A N/A
+ :pypi:`pytest-failed-to-verify` A pytest plugin that helps better distinguishing real test failures from setup flakiness. Aug 08, 2019 5 - Production/Stable pytest (>=4.1.0)
+ :pypi:`pytest-faker` Faker integration with the pytest framework. Dec 19, 2016 6 - Mature N/A
+ :pypi:`pytest-falcon` Pytest helpers for Falcon. Sep 07, 2016 4 - Beta N/A
+ :pypi:`pytest-falcon-client` Pytest \`client\` fixture for the Falcon Framework Mar 19, 2019 N/A N/A
+ :pypi:`pytest-fantasy` Pytest plugin for Flask Fantasy Framework Mar 14, 2019 N/A N/A
+ :pypi:`pytest-fastapi` Dec 27, 2020 N/A N/A
+ :pypi:`pytest-fastest` Use SCM and coverage to run only needed tests Mar 05, 2020 N/A N/A
+ :pypi:`pytest-fast-first` Pytest plugin that runs fast tests first Apr 02, 2021 3 - Alpha pytest
+ :pypi:`pytest-faulthandler` py.test plugin that activates the fault handler module for tests (dummy package) Jul 04, 2019 6 - Mature pytest (>=5.0)
+ :pypi:`pytest-fauxfactory` Integration of fauxfactory into pytest. Dec 06, 2017 5 - Production/Stable pytest (>=3.2)
+ :pypi:`pytest-figleaf` py.test figleaf coverage plugin Jan 18, 2010 5 - Production/Stable N/A
+ :pypi:`pytest-filecov` A pytest plugin to detect unused files Jun 27, 2021 4 - Beta pytest
+ :pypi:`pytest-filedata` easily load data from files Jan 17, 2019 4 - Beta N/A
+ :pypi:`pytest-filemarker` A pytest plugin that runs marked tests when files change. Dec 01, 2020 N/A pytest
+ :pypi:`pytest-filter-case` run test cases filter by mark Nov 05, 2020 N/A N/A
+ :pypi:`pytest-filter-subpackage` Pytest plugin for filtering based on sub-packages Jan 09, 2020 3 - Alpha pytest (>=3.0)
+ :pypi:`pytest-find-dependencies` A pytest plugin to find dependencies between tests Apr 21, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-finer-verdicts` A pytest plugin to treat non-assertion failures as test errors. Jun 18, 2020 N/A pytest (>=5.4.3)
+ :pypi:`pytest-firefox` pytest plugin to manipulate firefox Aug 08, 2017 3 - Alpha pytest (>=3.0.2)
+ :pypi:`pytest-fixture-config` Fixture configuration utils for py.test May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-fixture-maker` Pytest plugin to load fixtures from YAML files Sep 21, 2021 N/A N/A
+ :pypi:`pytest-fixture-marker` A pytest plugin to add markers based on fixtures used. Oct 11, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-fixture-order` pytest plugin to control fixture evaluation order Aug 25, 2020 N/A pytest (>=3.0)
+ :pypi:`pytest-fixtures` Common fixtures for pytest May 01, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-fixture-tools` Plugin for pytest which provides tools for fixtures Aug 18, 2020 6 - Mature pytest
+ :pypi:`pytest-fixture-typecheck` A pytest plugin to assert type annotations at runtime. Aug 24, 2021 N/A pytest
+ :pypi:`pytest-flake8` pytest plugin to check FLAKE8 requirements Dec 16, 2020 4 - Beta pytest (>=3.5)
+ :pypi:`pytest-flake8-path` A pytest fixture for testing flake8 plugins. Aug 11, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-flakefinder` Runs tests multiple times to expose flakiness. Jul 28, 2020 4 - Beta pytest (>=2.7.1)
+ :pypi:`pytest-flakes` pytest plugin to check source code with pyflakes Dec 02, 2021 5 - Production/Stable pytest (>=5)
+ :pypi:`pytest-flaptastic` Flaptastic py.test plugin Mar 17, 2019 N/A N/A
+ :pypi:`pytest-flask` A set of py.test fixtures to test Flask applications. Feb 27, 2021 5 - Production/Stable pytest (>=5.2)
+ :pypi:`pytest-flask-sqlalchemy` A pytest plugin for preserving test isolation in Flask-SQlAlchemy using database transactions. Apr 04, 2019 4 - Beta pytest (>=3.2.1)
+ :pypi:`pytest-flask-sqlalchemy-transactions` Run tests in transactions using pytest, Flask, and SQLalchemy. Aug 02, 2018 4 - Beta pytest (>=3.2.1)
+ :pypi:`pytest-flyte` Pytest fixtures for simplifying Flyte integration testing May 03, 2021 N/A pytest
+ :pypi:`pytest-focus` A pytest plugin that alerts user of failed test cases with screen notifications May 04, 2019 4 - Beta pytest
+ :pypi:`pytest-forcefail` py.test plugin to make the test failing regardless of pytest.mark.xfail May 15, 2018 4 - Beta N/A
+ :pypi:`pytest-forward-compatability` A name to avoid typosquating pytest-foward-compatibility Sep 06, 2020 N/A N/A
+ :pypi:`pytest-forward-compatibility` A pytest plugin to shim pytest commandline options for fowards compatibility Sep 29, 2020 N/A N/A
+ :pypi:`pytest-freezegun` Wrap tests with fixtures in freeze_time Jul 19, 2020 4 - Beta pytest (>=3.0.0)
+ :pypi:`pytest-freeze-reqs` Check if requirement files are frozen Apr 29, 2021 N/A N/A
+ :pypi:`pytest-frozen-uuids` Deterministically frozen UUID's for your tests Oct 19, 2021 N/A pytest (>=3.0)
+ :pypi:`pytest-func-cov` Pytest plugin for measuring function coverage Apr 15, 2021 3 - Alpha pytest (>=5)
+ :pypi:`pytest-funparam` An alternative way to parametrize test cases. Dec 02, 2021 4 - Beta pytest >=4.6.0
+ :pypi:`pytest-fxa` pytest plugin for Firefox Accounts Aug 28, 2018 5 - Production/Stable N/A
+ :pypi:`pytest-fxtest` Oct 27, 2020 N/A N/A
+ :pypi:`pytest-gc` The garbage collector plugin for py.test Feb 01, 2018 N/A N/A
+ :pypi:`pytest-gcov` Uses gcov to measure test coverage of a C library Feb 01, 2018 3 - Alpha N/A
+ :pypi:`pytest-gevent` Ensure that gevent is properly patched when invoking pytest Feb 25, 2020 N/A pytest
+ :pypi:`pytest-gherkin` A flexible framework for executing BDD gherkin tests Jul 27, 2019 3 - Alpha pytest (>=5.0.0)
+ :pypi:`pytest-ghostinspector` For finding/executing Ghost Inspector tests May 17, 2016 3 - Alpha N/A
+ :pypi:`pytest-girder` A set of pytest fixtures for testing Girder applications. Nov 30, 2021 N/A N/A
+ :pypi:`pytest-git` Git repository fixture for py.test May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-gitcov` Pytest plugin for reporting on coverage of the last git commit. Jan 11, 2020 2 - Pre-Alpha N/A
+ :pypi:`pytest-git-fixtures` Pytest fixtures for testing with git. Mar 11, 2021 4 - Beta pytest
+ :pypi:`pytest-github` Plugin for py.test that associates tests with github issues using a marker. Mar 07, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-github-actions-annotate-failures` pytest plugin to annotate failed tests with a workflow command for GitHub Actions Oct 24, 2021 N/A pytest (>=4.0.0)
+ :pypi:`pytest-gitignore` py.test plugin to ignore the same files as git Jul 17, 2015 4 - Beta N/A
+ :pypi:`pytest-glamor-allure` Extends allure-pytest functionality Nov 26, 2021 4 - Beta pytest
+ :pypi:`pytest-gnupg-fixtures` Pytest fixtures for testing with gnupg. Mar 04, 2021 4 - Beta pytest
+ :pypi:`pytest-golden` Plugin for pytest that offloads expected outputs to data files Nov 23, 2020 N/A pytest (>=6.1.2,<7.0.0)
+ :pypi:`pytest-graphql-schema` Get graphql schema as fixture for pytest Oct 18, 2019 N/A N/A
+ :pypi:`pytest-greendots` Green progress dots Feb 08, 2014 3 - Alpha N/A
+ :pypi:`pytest-growl` Growl notifications for pytest results. Jan 13, 2014 5 - Production/Stable N/A
+ :pypi:`pytest-grpc` pytest plugin for grpc May 01, 2020 N/A pytest (>=3.6.0)
+ :pypi:`pytest-hammertime` Display "🔨 " instead of "." for passed pytest tests. Jul 28, 2018 N/A pytest
+ :pypi:`pytest-harvest` Store data created during your pytest tests execution, and retrieve it at the end of the session, e.g. for applicative benchmarking purposes. Apr 01, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-helm-chart` A plugin to provide different types and configs of Kubernetes clusters that can be used for testing. Jun 15, 2020 4 - Beta pytest (>=5.4.2,<6.0.0)
+ :pypi:`pytest-helm-charts` A plugin to provide different types and configs of Kubernetes clusters that can be used for testing. Oct 26, 2021 4 - Beta pytest (>=6.1.2,<7.0.0)
+ :pypi:`pytest-helper` Functions to help in using the pytest testing framework May 31, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-helpers` pytest helpers May 17, 2020 N/A pytest
+ :pypi:`pytest-helpers-namespace` Pytest Helpers Namespace Plugin Apr 29, 2021 5 - Production/Stable pytest (>=6.0.0)
+ :pypi:`pytest-hidecaptured` Hide captured output May 04, 2018 4 - Beta pytest (>=2.8.5)
+ :pypi:`pytest-historic` Custom report to display pytest historical execution records Apr 08, 2020 N/A pytest
+ :pypi:`pytest-historic-hook` Custom listener to store execution results into MYSQL DB, which is used for pytest-historic report Apr 08, 2020 N/A pytest
+ :pypi:`pytest-homeassistant` A pytest plugin for use with homeassistant custom components. Aug 12, 2020 4 - Beta N/A
+ :pypi:`pytest-homeassistant-custom-component` Experimental package to automatically extract test plugins for Home Assistant custom components Nov 20, 2021 3 - Alpha pytest (==6.2.5)
+ :pypi:`pytest-honors` Report on tests that honor constraints, and guard against regressions Mar 06, 2020 4 - Beta N/A
+ :pypi:`pytest-hoverfly` Simplify working with Hoverfly from pytest Jul 12, 2021 N/A pytest (>=5.0)
+ :pypi:`pytest-hoverfly-wrapper` Integrates the Hoverfly HTTP proxy into Pytest Aug 29, 2021 4 - Beta N/A
+ :pypi:`pytest-hpfeeds` Helpers for testing hpfeeds in your python project Aug 27, 2021 4 - Beta pytest (>=6.2.4,<7.0.0)
+ :pypi:`pytest-html` pytest plugin for generating HTML reports Dec 13, 2020 5 - Production/Stable pytest (!=6.0.0,>=5.0)
+ :pypi:`pytest-html-lee` optimized pytest plugin for generating HTML reports Jun 30, 2020 5 - Production/Stable pytest (>=5.0)
+ :pypi:`pytest-html-profiling` Pytest plugin for generating HTML reports with per-test profiling and optionally call graph visualizations. Based on pytest-html by Dave Hunt. Feb 11, 2020 5 - Production/Stable pytest (>=3.0)
+ :pypi:`pytest-html-reporter` Generates a static html report based on pytest framework Apr 25, 2021 N/A N/A
+ :pypi:`pytest-html-thread` pytest plugin for generating HTML reports Dec 29, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-http` Fixture "http" for http requests Dec 05, 2019 N/A N/A
+ :pypi:`pytest-httpbin` Easily test your HTTP library against a local copy of httpbin Feb 11, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-http-mocker` Pytest plugin for http mocking (via https://github.com/vilus/mocker) Oct 20, 2019 N/A N/A
+ :pypi:`pytest-httpretty` A thin wrapper of HTTPretty for pytest Feb 16, 2014 3 - Alpha N/A
+ :pypi:`pytest-httpserver` pytest-httpserver is a httpserver for pytest Oct 18, 2021 3 - Alpha pytest ; extra == 'dev'
+ :pypi:`pytest-httpx` Send responses to httpx. Nov 16, 2021 5 - Production/Stable pytest (==6.*)
+ :pypi:`pytest-httpx-blockage` Disable httpx requests during a test run Nov 16, 2021 N/A pytest (>=6.2.5)
+ :pypi:`pytest-hue` Visualise PyTest status via your Phillips Hue lights May 09, 2019 N/A N/A
+ :pypi:`pytest-hylang` Pytest plugin to allow running tests written in hylang Mar 28, 2021 N/A pytest
+ :pypi:`pytest-hypo-25` help hypo module for pytest Jan 12, 2020 3 - Alpha N/A
+ :pypi:`pytest-ibutsu` A plugin to sent pytest results to an Ibutsu server Jun 16, 2021 4 - Beta pytest
+ :pypi:`pytest-icdiff` use icdiff for better error messages in pytest assertions Apr 08, 2020 4 - Beta N/A
+ :pypi:`pytest-idapro` A pytest plugin for idapython. Allows a pytest setup to run tests outside and inside IDA in an automated manner by runnig pytest inside IDA and by mocking idapython api Nov 03, 2018 N/A N/A
+ :pypi:`pytest-idempotent` Pytest plugin for testing function idempotence. Nov 26, 2021 N/A N/A
+ :pypi:`pytest-ignore-flaky` ignore failures from flaky tests (pytest plugin) Apr 23, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-image-diff` Jul 28, 2021 3 - Alpha pytest
+ :pypi:`pytest-incremental` an incremental test runner (pytest plugin) Apr 24, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-influxdb` Plugin for influxdb and pytest integration. Apr 20, 2021 N/A N/A
+ :pypi:`pytest-info-collector` pytest plugin to collect information from tests May 26, 2019 3 - Alpha N/A
+ :pypi:`pytest-informative-node` display more node ininformation. Apr 25, 2019 4 - Beta N/A
+ :pypi:`pytest-infrastructure` pytest stack validation prior to testing executing Apr 12, 2020 4 - Beta N/A
+ :pypi:`pytest-ini` Reuse pytest.ini to store env variables Sep 30, 2021 N/A N/A
+ :pypi:`pytest-inmanta` A py.test plugin providing fixtures to simplify inmanta modules testing. Aug 17, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-inmanta-extensions` Inmanta tests package May 27, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-Inomaly` A simple image diff plugin for pytest Feb 13, 2018 4 - Beta N/A
+ :pypi:`pytest-insta` A practical snapshot testing plugin for pytest Apr 07, 2021 N/A pytest (>=6.0.2,<7.0.0)
+ :pypi:`pytest-instafail` pytest plugin to show failures instantly Jun 14, 2020 4 - Beta pytest (>=2.9)
+ :pypi:`pytest-instrument` pytest plugin to instrument tests Apr 05, 2020 5 - Production/Stable pytest (>=5.1.0)
+ :pypi:`pytest-integration` Organizing pytests by integration or not Apr 16, 2020 N/A N/A
+ :pypi:`pytest-integration-mark` Automatic integration test marking and excluding plugin for pytest Jul 19, 2021 N/A pytest (>=5.2,<7.0)
+ :pypi:`pytest-interactive` A pytest plugin for console based interactive test selection just after the collection phase Nov 30, 2017 3 - Alpha N/A
+ :pypi:`pytest-intercept-remote` Pytest plugin for intercepting outgoing connection requests during pytest run. May 24, 2021 4 - Beta pytest (>=4.6)
+ :pypi:`pytest-invenio` Pytest fixtures for Invenio. May 11, 2021 5 - Production/Stable pytest (<7,>=6)
+ :pypi:`pytest-involve` Run tests covering a specific file or changeset Feb 02, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-ipdb` A py.test plug-in to enable drop to ipdb debugger on test failure. Sep 02, 2014 2 - Pre-Alpha N/A
+ :pypi:`pytest-ipynb` THIS PROJECT IS ABANDONED Jan 29, 2019 3 - Alpha N/A
+ :pypi:`pytest-isort` py.test plugin to check import ordering using isort Apr 27, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-it` Pytest plugin to display test reports as a plaintext spec, inspired by Rspec: https://github.com/mattduck/pytest-it. Jan 22, 2020 4 - Beta N/A
+ :pypi:`pytest-iterassert` Nicer list and iterable assertion messages for pytest May 11, 2020 3 - Alpha N/A
+ :pypi:`pytest-jasmine` Run jasmine tests from your pytest test suite Nov 04, 2017 1 - Planning N/A
+ :pypi:`pytest-jest` A custom jest-pytest oriented Pytest reporter May 22, 2018 4 - Beta pytest (>=3.3.2)
+ :pypi:`pytest-jira` py.test JIRA integration plugin, using markers Dec 02, 2021 3 - Alpha N/A
+ :pypi:`pytest-jira-xray` pytest plugin to integrate tests with JIRA XRAY Nov 28, 2021 3 - Alpha pytest
+ :pypi:`pytest-jobserver` Limit parallel tests with posix jobserver. May 15, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-joke` Test failures are better served with humor. Oct 08, 2019 4 - Beta pytest (>=4.2.1)
+ :pypi:`pytest-json` Generate JSON test reports Jan 18, 2016 4 - Beta N/A
+ :pypi:`pytest-jsonlint` UNKNOWN Aug 04, 2016 N/A N/A
+ :pypi:`pytest-json-report` A pytest plugin to report test results as JSON files Sep 24, 2021 4 - Beta pytest (>=3.8.0)
+ :pypi:`pytest-kafka` Zookeeper, Kafka server, and Kafka consumer fixtures for Pytest Aug 24, 2021 N/A pytest
+ :pypi:`pytest-kafkavents` A plugin to send pytest events to Kafka Sep 08, 2021 4 - Beta pytest
+ :pypi:`pytest-kind` Kubernetes test support with KIND for pytest Jan 24, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-kivy` Kivy GUI tests fixtures using pytest Jul 06, 2021 4 - Beta pytest (>=3.6)
+ :pypi:`pytest-knows` A pytest plugin that can automaticly skip test case based on dependence info calculated by trace Aug 22, 2014 N/A N/A
+ :pypi:`pytest-konira` Run Konira DSL tests with py.test Oct 09, 2011 N/A N/A
+ :pypi:`pytest-krtech-common` pytest krtech common library Nov 28, 2016 4 - Beta N/A
+ :pypi:`pytest-kwparametrize` Alternate syntax for @pytest.mark.parametrize with test cases as dictionaries and default value fallbacks Jan 22, 2021 N/A pytest (>=6)
+ :pypi:`pytest-lambda` Define pytest fixtures with lambda functions. Aug 23, 2021 3 - Alpha pytest (>=3.6,<7)
+ :pypi:`pytest-lamp` Jan 06, 2017 3 - Alpha N/A
+ :pypi:`pytest-layab` Pytest fixtures for layab. Oct 05, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-lazy-fixture` It helps to use fixtures in pytest.mark.parametrize Feb 01, 2020 4 - Beta pytest (>=3.2.5)
+ :pypi:`pytest-ldap` python-ldap fixtures for pytest Aug 18, 2020 N/A pytest
+ :pypi:`pytest-leaks` A pytest plugin to trace resource leaks. Nov 27, 2019 1 - Planning N/A
+ :pypi:`pytest-level` Select tests of a given level or lower Oct 21, 2019 N/A pytest
+ :pypi:`pytest-libfaketime` A python-libfaketime plugin for pytest. Dec 22, 2018 4 - Beta pytest (>=3.0.0)
+ :pypi:`pytest-libiio` A pytest plugin to manage interfacing with libiio contexts Oct 29, 2021 4 - Beta N/A
+ :pypi:`pytest-libnotify` Pytest plugin that shows notifications about the test run Apr 02, 2021 3 - Alpha pytest
+ :pypi:`pytest-ligo` Jan 16, 2020 4 - Beta N/A
+ :pypi:`pytest-lineno` A pytest plugin to show the line numbers of test functions Dec 04, 2020 N/A pytest
+ :pypi:`pytest-line-profiler` Profile code executed by pytest May 03, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-lisa` Pytest plugin for organizing tests. Jan 21, 2021 3 - Alpha pytest (>=6.1.2,<7.0.0)
+ :pypi:`pytest-listener` A simple network listener May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-litf` A pytest plugin that stream output in LITF format Jan 18, 2021 4 - Beta pytest (>=3.1.1)
+ :pypi:`pytest-live` Live results for pytest Mar 08, 2020 N/A pytest
+ :pypi:`pytest-localftpserver` A PyTest plugin which provides an FTP fixture for your tests Aug 25, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-localserver` py.test plugin to test server connections locally. Nov 19, 2021 4 - Beta N/A
+ :pypi:`pytest-localstack` Pytest plugin for AWS integration tests Aug 22, 2019 4 - Beta pytest (>=3.3.0)
+ :pypi:`pytest-lockable` lockable resource plugin for pytest Nov 09, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-locker` Used to lock object during testing. Essentially changing assertions from being hard coded to asserting that nothing changed Oct 29, 2021 N/A pytest (>=5.4)
+ :pypi:`pytest-log` print log Aug 15, 2021 N/A pytest (>=3.8)
+ :pypi:`pytest-logbook` py.test plugin to capture logbook log messages Nov 23, 2015 5 - Production/Stable pytest (>=2.8)
+ :pypi:`pytest-logdog` Pytest plugin to test logging Jun 15, 2021 1 - Planning pytest (>=6.2.0)
+ :pypi:`pytest-logfest` Pytest plugin providing three logger fixtures with basic or full writing to log files Jul 21, 2019 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-logger` Plugin configuring handlers for loggers from Python logging module. Jul 25, 2019 4 - Beta pytest (>=3.2)
+ :pypi:`pytest-logging` Configures logging and allows tweaking the log level with a py.test flag Nov 04, 2015 4 - Beta N/A
+ :pypi:`pytest-log-report` Package for creating a pytest test run reprot Dec 26, 2019 N/A N/A
+ :pypi:`pytest-manual-marker` pytest marker for marking manual tests Oct 11, 2021 3 - Alpha pytest (>=6)
+ :pypi:`pytest-markdown` Test your markdown docs with pytest Jan 15, 2021 4 - Beta pytest (>=6.0.1,<7.0.0)
+ :pypi:`pytest-marker-bugzilla` py.test bugzilla integration plugin, using markers Jan 09, 2020 N/A N/A
+ :pypi:`pytest-markers-presence` A simple plugin to detect missed pytest tags and markers" Feb 04, 2021 4 - Beta pytest (>=6.0)
+ :pypi:`pytest-markfiltration` UNKNOWN Nov 08, 2011 3 - Alpha N/A
+ :pypi:`pytest-mark-no-py3` pytest plugin and bowler codemod to help migrate tests to Python 3 May 17, 2019 N/A pytest
+ :pypi:`pytest-marks` UNKNOWN Nov 23, 2012 3 - Alpha N/A
+ :pypi:`pytest-matcher` Match test output against patterns stored in files Apr 23, 2020 5 - Production/Stable pytest (>=3.4)
+ :pypi:`pytest-match-skip` Skip matching marks. Matches partial marks using wildcards. May 15, 2019 4 - Beta pytest (>=4.4.1)
+ :pypi:`pytest-mat-report` this is report Jan 20, 2021 N/A N/A
+ :pypi:`pytest-matrix` Provide tools for generating tests from combinations of fixtures. Jun 24, 2020 5 - Production/Stable pytest (>=5.4.3,<6.0.0)
+ :pypi:`pytest-mccabe` pytest plugin to run the mccabe code complexity checker. Jul 22, 2020 3 - Alpha pytest (>=5.4.0)
+ :pypi:`pytest-md` Plugin for generating Markdown reports for pytest results Jul 11, 2019 3 - Alpha pytest (>=4.2.1)
+ :pypi:`pytest-md-report` A pytest plugin to make a test results report with Markdown table format. May 04, 2021 4 - Beta pytest (!=6.0.0,<7,>=3.3.2)
+ :pypi:`pytest-memprof` Estimates memory consumption of test functions Mar 29, 2019 4 - Beta N/A
+ :pypi:`pytest-menu` A pytest plugin for console based interactive test selection just after the collection phase Oct 04, 2017 3 - Alpha pytest (>=2.4.2)
+ :pypi:`pytest-mercurial` pytest plugin to write integration tests for projects using Mercurial Python internals Nov 21, 2020 1 - Planning N/A
+ :pypi:`pytest-message` Pytest plugin for sending report message of marked tests execution Nov 04, 2021 N/A pytest (>=6.2.5)
+ :pypi:`pytest-messenger` Pytest to Slack reporting plugin Dec 16, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-metadata` pytest plugin for test session metadata Nov 27, 2020 5 - Production/Stable pytest (>=2.9.0)
+ :pypi:`pytest-metrics` Custom metrics report for pytest Apr 04, 2020 N/A pytest
+ :pypi:`pytest-mimesis` Mimesis integration with the pytest test runner Mar 21, 2020 5 - Production/Stable pytest (>=4.2)
+ :pypi:`pytest-minecraft` A pytest plugin for running tests against Minecraft releases Sep 26, 2020 N/A pytest (>=6.0.1,<7.0.0)
+ :pypi:`pytest-missing-fixtures` Pytest plugin that creates missing fixtures Oct 14, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-ml` Test your machine learning! May 04, 2019 4 - Beta N/A
+ :pypi:`pytest-mocha` pytest plugin to display test execution output like a mochajs Apr 02, 2020 4 - Beta pytest (>=5.4.0)
+ :pypi:`pytest-mock` Thin-wrapper around the mock package for easier use with pytest May 06, 2021 5 - Production/Stable pytest (>=5.0)
+ :pypi:`pytest-mock-api` A mock API server with configurable routes and responses available as a fixture. Feb 13, 2019 1 - Planning pytest (>=4.0.0)
+ :pypi:`pytest-mock-generator` A pytest fixture wrapper for https://pypi.org/project/mock-generator Aug 10, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-mock-helper` Help you mock HTTP call and generate mock code Jan 24, 2018 N/A pytest
+ :pypi:`pytest-mockito` Base fixtures for mockito Jul 11, 2018 4 - Beta N/A
+ :pypi:`pytest-mockredis` An in-memory mock of a Redis server that runs in a separate thread. This is to be used for unit-tests that require a Redis database. Jan 02, 2018 2 - Pre-Alpha N/A
+ :pypi:`pytest-mock-resources` A pytest plugin for easily instantiating reproducible mock resources. Dec 03, 2021 N/A pytest (>=1.0)
+ :pypi:`pytest-mock-server` Mock server plugin for pytest Apr 06, 2020 4 - Beta N/A
+ :pypi:`pytest-mockservers` A set of fixtures to test your requests to HTTP/UDP servers Mar 31, 2020 N/A pytest (>=4.3.0)
+ :pypi:`pytest-modifyjunit` Utility for adding additional properties to junit xml for IDM QE Jan 10, 2019 N/A N/A
+ :pypi:`pytest-modifyscope` pytest plugin to modify fixture scope Apr 12, 2020 N/A pytest
+ :pypi:`pytest-molecule` PyTest Molecule Plugin :: discover and run molecule tests Oct 06, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-mongo` MongoDB process and client fixtures plugin for Pytest. Jun 07, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-mongodb` pytest plugin for MongoDB fixtures Dec 07, 2019 5 - Production/Stable pytest (>=2.5.2)
+ :pypi:`pytest-monitor` Pytest plugin for analyzing resource usage. Aug 24, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-monkeyplus` pytest's monkeypatch subclass with extra functionalities Sep 18, 2012 5 - Production/Stable N/A
+ :pypi:`pytest-monkeytype` pytest-monkeytype: Generate Monkeytype annotations from your pytest tests. Jul 29, 2020 4 - Beta N/A
+ :pypi:`pytest-moto` Fixtures for integration tests of AWS services,uses moto mocking library. Aug 28, 2015 1 - Planning N/A
+ :pypi:`pytest-motor` A pytest plugin for motor, the non-blocking MongoDB driver. Jul 21, 2021 3 - Alpha pytest
+ :pypi:`pytest-mp` A test batcher for multiprocessed Pytest runs May 23, 2018 4 - Beta pytest
+ :pypi:`pytest-mpi` pytest plugin to collect information from tests Mar 14, 2021 3 - Alpha pytest
+ :pypi:`pytest-mpl` pytest plugin to help with testing figures output from Matplotlib Jul 02, 2021 4 - Beta pytest
+ :pypi:`pytest-mproc` low-startup-overhead, scalable, distributed-testing pytest plugin Mar 07, 2021 4 - Beta pytest
+ :pypi:`pytest-multi-check` Pytest-плагин, реализует возможноÑÑ‚ÑŒ мульти проверок и мÑгких проверок Jun 03, 2021 N/A pytest
+ :pypi:`pytest-multihost` Utility for writing multi-host tests for pytest Apr 07, 2020 4 - Beta N/A
+ :pypi:`pytest-multilog` Multi-process logs handling and other helpers for pytest Jun 10, 2021 N/A N/A
+ :pypi:`pytest-multithreading` a pytest plugin for th and concurrent testing Aug 12, 2021 N/A pytest (>=3.6)
+ :pypi:`pytest-mutagen` Add the mutation testing feature to pytest Jul 24, 2020 N/A pytest (>=5.4)
+ :pypi:`pytest-mypy` Mypy static type checker plugin for Pytest Mar 21, 2021 4 - Beta pytest (>=3.5)
+ :pypi:`pytest-mypyd` Mypy static type checker plugin for Pytest Aug 20, 2019 4 - Beta pytest (<4.7,>=2.8) ; python_version < "3.5"
+ :pypi:`pytest-mypy-plugins` pytest plugin for writing tests for mypy plugins Oct 19, 2021 3 - Alpha pytest (>=6.0.0)
+ :pypi:`pytest-mypy-plugins-shim` Substitute for "pytest-mypy-plugins" for Python implementations which aren't supported by mypy. Apr 12, 2021 N/A N/A
+ :pypi:`pytest-mypy-testing` Pytest plugin to check mypy output. Jun 13, 2021 N/A pytest
+ :pypi:`pytest-mysql` MySQL process and client fixtures for pytest Nov 22, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-needle` pytest plugin for visual testing websites using selenium Dec 10, 2018 4 - Beta pytest (<5.0.0,>=3.0.0)
+ :pypi:`pytest-neo` pytest-neo is a plugin for pytest that shows tests like screen of Matrix. Apr 23, 2019 3 - Alpha pytest (>=3.7.2)
+ :pypi:`pytest-network` A simple plugin to disable network on socket level. May 07, 2020 N/A N/A
+ :pypi:`pytest-never-sleep` pytest plugin helps to avoid adding tests without mock \`time.sleep\` May 05, 2021 3 - Alpha pytest (>=3.5.1)
+ :pypi:`pytest-nginx` nginx fixture for pytest Aug 12, 2017 5 - Production/Stable N/A
+ :pypi:`pytest-nginx-iplweb` nginx fixture for pytest - iplweb temporary fork Mar 01, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-ngrok` Jan 22, 2020 3 - Alpha N/A
+ :pypi:`pytest-ngsfixtures` pytest ngs fixtures Sep 06, 2019 2 - Pre-Alpha pytest (>=5.0.0)
+ :pypi:`pytest-nice` A pytest plugin that alerts user of failed test cases with screen notifications May 04, 2019 4 - Beta pytest
+ :pypi:`pytest-nice-parametrize` A small snippet for nicer PyTest's Parametrize Apr 17, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-nlcov` Pytest plugin to get the coverage of the new lines (based on git diff) only Jul 07, 2021 N/A N/A
+ :pypi:`pytest-nocustom` Run all tests without custom markers Jul 07, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-nodev` Test-driven source code search for Python. Jul 21, 2016 4 - Beta pytest (>=2.8.1)
+ :pypi:`pytest-nogarbage` Ensure a test produces no garbage Aug 29, 2021 5 - Production/Stable pytest (>=4.6.0)
+ :pypi:`pytest-notebook` A pytest plugin for testing Jupyter Notebooks Sep 16, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-notice` Send pytest execution result email Nov 05, 2020 N/A N/A
+ :pypi:`pytest-notification` A pytest plugin for sending a desktop notification and playing a sound upon completion of tests Jun 19, 2020 N/A pytest (>=4)
+ :pypi:`pytest-notifier` A pytest plugin to notify test result Jun 12, 2020 3 - Alpha pytest
+ :pypi:`pytest-notimplemented` Pytest markers for not implemented features and tests. Aug 27, 2019 N/A pytest (>=5.1,<6.0)
+ :pypi:`pytest-notion` A PyTest Reporter to send test runs to Notion.so Aug 07, 2019 N/A N/A
+ :pypi:`pytest-nunit` A pytest plugin for generating NUnit3 test result XML output Aug 04, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-ochrus` pytest results data-base and HTML reporter Feb 21, 2018 4 - Beta N/A
+ :pypi:`pytest-odoo` py.test plugin to run Odoo tests Nov 04, 2021 4 - Beta pytest (>=2.9)
+ :pypi:`pytest-odoo-fixtures` Project description Jun 25, 2019 N/A N/A
+ :pypi:`pytest-oerp` pytest plugin to test OpenERP modules Feb 28, 2012 3 - Alpha N/A
+ :pypi:`pytest-ok` The ultimate pytest output plugin Apr 01, 2019 4 - Beta N/A
+ :pypi:`pytest-only` Use @pytest.mark.only to run a single test Jan 19, 2020 N/A N/A
+ :pypi:`pytest-oot` Run object-oriented tests in a simple format Sep 18, 2016 4 - Beta N/A
+ :pypi:`pytest-openfiles` Pytest plugin for detecting inadvertent open file handles Apr 16, 2020 3 - Alpha pytest (>=4.6)
+ :pypi:`pytest-opentmi` pytest plugin for publish results to opentmi Nov 04, 2021 5 - Production/Stable pytest (>=5.0)
+ :pypi:`pytest-operator` Fixtures for Operators Oct 26, 2021 N/A N/A
+ :pypi:`pytest-optional` include/exclude values of fixtures in pytest Oct 07, 2015 N/A N/A
+ :pypi:`pytest-optional-tests` Easy declaration of optional tests (i.e., that are not run by default) Jul 09, 2019 4 - Beta pytest (>=4.5.0)
+ :pypi:`pytest-orchestration` A pytest plugin for orchestrating tests Jul 18, 2019 N/A N/A
+ :pypi:`pytest-order` pytest plugin to run your tests in a specific order May 30, 2021 4 - Beta pytest (>=5.0)
+ :pypi:`pytest-ordering` pytest plugin to run your tests in a specific order Nov 14, 2018 4 - Beta pytest
+ :pypi:`pytest-osxnotify` OS X notifications for py.test results. May 15, 2015 N/A N/A
+ :pypi:`pytest-otel` pytest-otel report OpenTelemetry traces about test executed Dec 03, 2021 N/A N/A
+ :pypi:`pytest-pact` A simple plugin to use with pytest Jan 07, 2019 4 - Beta N/A
+ :pypi:`pytest-pahrametahrize` Parametrize your tests with a Boston accent. Nov 24, 2021 4 - Beta pytest (>=6.0,<7.0)
+ :pypi:`pytest-parallel` a pytest plugin for parallel and concurrent testing Oct 10, 2021 3 - Alpha pytest (>=3.0.0)
+ :pypi:`pytest-parallel-39` a pytest plugin for parallel and concurrent testing Jul 12, 2021 3 - Alpha pytest (>=3.0.0)
+ :pypi:`pytest-param` pytest plugin to test all, first, last or random params Sep 11, 2016 4 - Beta pytest (>=2.6.0)
+ :pypi:`pytest-paramark` Configure pytest fixtures using a combination of"parametrize" and markers Jan 10, 2020 4 - Beta pytest (>=4.5.0)
+ :pypi:`pytest-parametrization` Simpler PyTest parametrization Nov 30, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-parametrize-cases` A more user-friendly way to write parametrized tests. Dec 12, 2020 N/A pytest (>=6.1.2,<7.0.0)
+ :pypi:`pytest-parametrized` Pytest plugin for parametrizing tests with default iterables. Oct 19, 2020 5 - Production/Stable pytest
+ :pypi:`pytest-parawtf` Finally spell paramete?ri[sz]e correctly Dec 03, 2018 4 - Beta pytest (>=3.6.0)
+ :pypi:`pytest-pass` Check out https://github.com/elilutsky/pytest-pass Dec 04, 2019 N/A N/A
+ :pypi:`pytest-passrunner` Pytest plugin providing the 'run_on_pass' marker Feb 10, 2021 5 - Production/Stable pytest (>=4.6.0)
+ :pypi:`pytest-paste-config` Allow setting the path to a paste config file Sep 18, 2013 3 - Alpha N/A
+ :pypi:`pytest-patches` A contextmanager pytest fixture for handling multiple mock patches Aug 30, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-pdb` pytest plugin which adds pdb helper commands related to pytest. Jul 31, 2018 N/A N/A
+ :pypi:`pytest-peach` pytest plugin for fuzzing with Peach API Security Apr 12, 2019 4 - Beta pytest (>=2.8.7)
+ :pypi:`pytest-pep257` py.test plugin for pep257 Jul 09, 2016 N/A N/A
+ :pypi:`pytest-pep8` pytest plugin to check PEP8 requirements Apr 27, 2014 N/A N/A
+ :pypi:`pytest-percent` Change the exit code of pytest test sessions when a required percent of tests pass. May 21, 2020 N/A pytest (>=5.2.0)
+ :pypi:`pytest-perf` pytest-perf Jun 27, 2021 5 - Production/Stable pytest (>=4.6) ; extra == 'testing'
+ :pypi:`pytest-performance` A simple plugin to ensure the execution of critical sections of code has not been impacted Sep 11, 2020 5 - Production/Stable pytest (>=3.7.0)
+ :pypi:`pytest-persistence` Pytest tool for persistent objects Nov 06, 2021 N/A N/A
+ :pypi:`pytest-pgsql` Pytest plugins and helpers for tests using a Postgres database. May 13, 2020 5 - Production/Stable pytest (>=3.0.0)
+ :pypi:`pytest-phmdoctest` pytest plugin to test Python examples in Markdown using phmdoctest. Nov 10, 2021 4 - Beta pytest (>=6.2) ; extra == 'test'
+ :pypi:`pytest-picked` Run the tests related to the changed files Dec 23, 2020 N/A pytest (>=3.5.0)
+ :pypi:`pytest-pigeonhole` Jun 25, 2018 5 - Production/Stable pytest (>=3.4)
+ :pypi:`pytest-pikachu` Show surprise when tests are passing Aug 05, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-pilot` Slice in your test base thanks to powerful markers. Oct 09, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-pings` 🦊 The pytest plugin for Firefox Telemetry 📊 Jun 29, 2019 3 - Alpha pytest (>=5.0.0)
+ :pypi:`pytest-pinned` A simple pytest plugin for pinning tests Sep 17, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-pinpoint` A pytest plugin which runs SBFL algorithms to detect faults. Sep 25, 2020 N/A pytest (>=4.4.0)
+ :pypi:`pytest-pipeline` Pytest plugin for functional testing of data analysispipelines Jan 24, 2017 3 - Alpha N/A
+ :pypi:`pytest-platform-markers` Markers for pytest to skip tests on specific platforms Sep 09, 2019 4 - Beta pytest (>=3.6.0)
+ :pypi:`pytest-play` pytest plugin that let you automate actions and assertions with test metrics reporting executing plain YAML files Jun 12, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-playbook` Pytest plugin for reading playbooks. Jan 21, 2021 3 - Alpha pytest (>=6.1.2,<7.0.0)
+ :pypi:`pytest-playwright` A pytest wrapper with fixtures for Playwright to automate web browsers Oct 28, 2021 N/A pytest
+ :pypi:`pytest-playwrights` A pytest wrapper with fixtures for Playwright to automate web browsers Dec 02, 2021 N/A N/A
+ :pypi:`pytest-playwright-snapshot` A pytest wrapper for snapshot testing with playwright Aug 19, 2021 N/A N/A
+ :pypi:`pytest-plt` Fixtures for quickly making Matplotlib plots in tests Aug 17, 2020 5 - Production/Stable pytest
+ :pypi:`pytest-plugin-helpers` A plugin to help developing and testing other plugins Nov 23, 2019 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-plus` PyTest Plus Plugin :: extends pytest functionality Mar 19, 2020 5 - Production/Stable pytest (>=3.50)
+ :pypi:`pytest-pmisc` Mar 21, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-pointers` Pytest plugin to define functions you test with special marks for better navigation and reports Oct 14, 2021 N/A N/A
+ :pypi:`pytest-polarion-cfme` pytest plugin for collecting test cases and recording test results Nov 13, 2017 3 - Alpha N/A
+ :pypi:`pytest-polarion-collect` pytest plugin for collecting polarion test cases data Jun 18, 2020 3 - Alpha pytest
+ :pypi:`pytest-polecat` Provides Polecat pytest fixtures Aug 12, 2019 4 - Beta N/A
+ :pypi:`pytest-ponyorm` PonyORM in Pytest Oct 31, 2018 N/A pytest (>=3.1.1)
+ :pypi:`pytest-poo` Visualize your crappy tests Mar 25, 2021 5 - Production/Stable pytest (>=2.3.4)
+ :pypi:`pytest-poo-fail` Visualize your failed tests with poo Feb 12, 2015 5 - Production/Stable N/A
+ :pypi:`pytest-pop` A pytest plugin to help with testing pop projects Aug 19, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-portion` Select a portion of the collected tests Jan 28, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-postgres` Run PostgreSQL in Docker container in Pytest. Mar 22, 2020 N/A pytest
+ :pypi:`pytest-postgresql` Postgresql fixtures and fixture factories for Pytest. Nov 05, 2021 5 - Production/Stable pytest (>=3.0.0)
+ :pypi:`pytest-power` pytest plugin with powerful fixtures Dec 31, 2020 N/A pytest (>=5.4)
+ :pypi:`pytest-pretty-terminal` pytest plugin for generating prettier terminal output Nov 24, 2021 N/A pytest (>=3.4.1)
+ :pypi:`pytest-pride` Minitest-style test colors Apr 02, 2016 3 - Alpha N/A
+ :pypi:`pytest-print` pytest-print adds the printer fixture you can use to print messages to the user (directly to the pytest runner, not stdout) Jun 17, 2021 5 - Production/Stable pytest (>=6)
+ :pypi:`pytest-profiling` Profiling plugin for py.test May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-progress` pytest plugin for instant test progress status Nov 09, 2021 5 - Production/Stable pytest (>=2.7)
+ :pypi:`pytest-prometheus` Report test pass / failures to a Prometheus PushGateway Oct 03, 2017 N/A N/A
+ :pypi:`pytest-prosper` Test helpers for Prosper projects Sep 24, 2018 N/A N/A
+ :pypi:`pytest-pspec` A rspec format reporter for Python ptest Jun 02, 2020 4 - Beta pytest (>=3.0.0)
+ :pypi:`pytest-psqlgraph` pytest plugin for testing applications that use psqlgraph Oct 19, 2021 4 - Beta pytest (>=6.0)
+ :pypi:`pytest-ptera` Use ptera probes in tests Oct 20, 2021 N/A pytest (>=6.2.4,<7.0.0)
+ :pypi:`pytest-pudb` Pytest PuDB debugger integration Oct 25, 2018 3 - Alpha pytest (>=2.0)
+ :pypi:`pytest-purkinje` py.test plugin for purkinje test runner Oct 28, 2017 2 - Pre-Alpha N/A
+ :pypi:`pytest-pycharm` Plugin for py.test to enter PyCharm debugger on uncaught exceptions Aug 13, 2020 5 - Production/Stable pytest (>=2.3)
+ :pypi:`pytest-pycodestyle` pytest plugin to run pycodestyle Aug 10, 2020 3 - Alpha N/A
+ :pypi:`pytest-pydev` py.test plugin to connect to a remote debug server with PyDev or PyCharm. Nov 15, 2017 3 - Alpha N/A
+ :pypi:`pytest-pydocstyle` pytest plugin to run pydocstyle Aug 10, 2020 3 - Alpha N/A
+ :pypi:`pytest-pylint` pytest plugin to check source code with pylint Nov 09, 2020 5 - Production/Stable pytest (>=5.4)
+ :pypi:`pytest-pypi` Easily test your HTTP library against a local copy of pypi Mar 04, 2018 3 - Alpha N/A
+ :pypi:`pytest-pypom-navigation` Core engine for cookiecutter-qa and pytest-play packages Feb 18, 2019 4 - Beta pytest (>=3.0.7)
+ :pypi:`pytest-pyppeteer` A plugin to run pyppeteer in pytest. Feb 16, 2021 4 - Beta pytest (>=6.0.2)
+ :pypi:`pytest-pyq` Pytest fixture "q" for pyq Mar 10, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-pyramid` pytest_pyramid - provides fixtures for testing pyramid applications with pytest test suite Oct 15, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-pyramid-server` Pyramid server fixture for py.test May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-pyright` Pytest plugin for type checking code with Pyright Aug 16, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-pytestrail` Pytest plugin for interaction with TestRail Aug 27, 2020 4 - Beta pytest (>=3.8.0)
+ :pypi:`pytest-pythonpath` pytest plugin for adding to the PYTHONPATH from command line or configs. Aug 22, 2018 5 - Production/Stable N/A
+ :pypi:`pytest-pytorch` pytest plugin for a better developer experience when working with the PyTorch test suite May 25, 2021 4 - Beta pytest
+ :pypi:`pytest-qasync` Pytest support for qasync. Jul 12, 2021 4 - Beta pytest (>=5.4.0)
+ :pypi:`pytest-qatouch` Pytest plugin for uploading test results to your QA Touch Testrun. Jun 26, 2021 4 - Beta pytest (>=6.2.0)
+ :pypi:`pytest-qgis` A pytest plugin for testing QGIS python plugins Nov 25, 2021 5 - Production/Stable pytest (>=6.2.3)
+ :pypi:`pytest-qml` Run QML Tests with pytest Dec 02, 2020 4 - Beta pytest (>=6.0.0)
+ :pypi:`pytest-qr` pytest plugin to generate test result QR codes Nov 25, 2021 4 - Beta N/A
+ :pypi:`pytest-qt` pytest support for PyQt and PySide applications Jun 13, 2021 5 - Production/Stable pytest (>=3.0.0)
+ :pypi:`pytest-qt-app` QT app fixture for py.test Dec 23, 2015 5 - Production/Stable N/A
+ :pypi:`pytest-quarantine` A plugin for pytest to manage expected test failures Nov 24, 2019 5 - Production/Stable pytest (>=4.6)
+ :pypi:`pytest-quickcheck` pytest plugin to generate random data inspired by QuickCheck Nov 15, 2020 4 - Beta pytest (<6.0.0,>=4.0)
+ :pypi:`pytest-rabbitmq` RabbitMQ process and client fixtures for pytest Jun 02, 2021 5 - Production/Stable pytest (>=3.0.0)
+ :pypi:`pytest-race` Race conditions tester for pytest Nov 21, 2016 4 - Beta N/A
+ :pypi:`pytest-rage` pytest plugin to implement PEP712 Oct 21, 2011 3 - Alpha N/A
+ :pypi:`pytest-railflow-testrail-reporter` Generate json reports along with specified metadata defined in test markers. Dec 02, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-raises` An implementation of pytest.raises as a pytest.mark fixture Apr 23, 2020 N/A pytest (>=3.2.2)
+ :pypi:`pytest-raisesregexp` Simple pytest plugin to look for regex in Exceptions Dec 18, 2015 N/A N/A
+ :pypi:`pytest-raisin` Plugin enabling the use of exception instances with pytest.raises Jun 25, 2020 N/A pytest
+ :pypi:`pytest-random` py.test plugin to randomize tests Apr 28, 2013 3 - Alpha N/A
+ :pypi:`pytest-randomly` Pytest plugin to randomly order tests and control random.seed. Nov 30, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-randomness` Pytest plugin about random seed management May 30, 2019 3 - Alpha N/A
+ :pypi:`pytest-random-num` Randomise the order in which pytest tests are run with some control over the randomness Oct 19, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-random-order` Randomise the order in which pytest tests are run with some control over the randomness Nov 30, 2018 5 - Production/Stable pytest (>=3.0.0)
+ :pypi:`pytest-readme` Test your README.md file Dec 28, 2014 5 - Production/Stable N/A
+ :pypi:`pytest-reana` Pytest fixtures for REANA. Nov 22, 2021 3 - Alpha N/A
+ :pypi:`pytest-recording` A pytest plugin that allows you recording of network interactions via VCR.py Jul 08, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-recordings` Provides pytest plugins for reporting request/response traffic, screenshots, and more to ReportPortal Aug 13, 2020 N/A N/A
+ :pypi:`pytest-redis` Redis fixtures and fixture factories for Pytest. Nov 03, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-redislite` Pytest plugin for testing code using Redis Sep 19, 2021 4 - Beta pytest
+ :pypi:`pytest-redmine` Pytest plugin for redmine Mar 19, 2018 1 - Planning N/A
+ :pypi:`pytest-ref` A plugin to store reference files to ease regression testing Nov 23, 2019 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-reference-formatter` Conveniently run pytest with a dot-formatted test reference. Oct 01, 2019 4 - Beta N/A
+ :pypi:`pytest-regressions` Easy to use fixtures to write regression tests. Jan 27, 2021 5 - Production/Stable pytest (>=3.5.0)
+ :pypi:`pytest-regtest` pytest plugin for regression tests Jun 03, 2021 N/A N/A
+ :pypi:`pytest-relative-order` a pytest plugin that sorts tests using "before" and "after" markers May 17, 2021 4 - Beta N/A
+ :pypi:`pytest-relaxed` Relaxed test discovery/organization for pytest Jun 14, 2019 5 - Production/Stable pytest (<5,>=3)
+ :pypi:`pytest-remfiles` Pytest plugin to create a temporary directory with remote files Jul 01, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-remotedata` Pytest plugin for controlling remote data access. Jul 20, 2019 3 - Alpha pytest (>=3.1)
+ :pypi:`pytest-remote-response` Pytest plugin for capturing and mocking connection requests. Jun 30, 2021 4 - Beta pytest (>=4.6)
+ :pypi:`pytest-remove-stale-bytecode` py.test plugin to remove stale byte code files. Mar 04, 2020 4 - Beta pytest
+ :pypi:`pytest-reorder` Reorder tests depending on their paths and names. May 31, 2018 4 - Beta pytest
+ :pypi:`pytest-repeat` pytest plugin for repeating tests Oct 31, 2020 5 - Production/Stable pytest (>=3.6)
+ :pypi:`pytest-replay` Saves previous test runs and allow re-execute previous pytest runs to reproduce crashes or flaky tests Jun 09, 2021 4 - Beta pytest (>=3.0.0)
+ :pypi:`pytest-repo-health` A pytest plugin to report on repository standards conformance Nov 23, 2021 3 - Alpha pytest
+ :pypi:`pytest-report` Creates json report that is compatible with atom.io's linter message format May 11, 2016 4 - Beta N/A
+ :pypi:`pytest-reporter` Generate Pytest reports with templates Jul 22, 2021 4 - Beta pytest
+ :pypi:`pytest-reporter-html1` A basic HTML report template for Pytest Jun 08, 2021 4 - Beta N/A
+ :pypi:`pytest-reportinfra` Pytest plugin for reportinfra Aug 11, 2019 3 - Alpha N/A
+ :pypi:`pytest-reporting` A plugin to report summarized results in a table format Oct 25, 2019 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-reportlog` Replacement for the --resultlog option, focused in simplicity and extensibility Dec 11, 2020 3 - Alpha pytest (>=5.2)
+ :pypi:`pytest-report-me` A pytest plugin to generate report. Dec 31, 2020 N/A pytest
+ :pypi:`pytest-report-parameters` pytest plugin for adding tests' parameters to junit report Jun 18, 2020 3 - Alpha pytest (>=2.4.2)
+ :pypi:`pytest-reportportal` Agent for Reporting results of tests to the Report Portal Jun 18, 2021 N/A pytest (>=3.8.0)
+ :pypi:`pytest-reqs` pytest plugin to check pinned requirements May 12, 2019 N/A pytest (>=2.4.2)
+ :pypi:`pytest-requests` A simple plugin to use with pytest Jun 24, 2019 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-reraise` Make multi-threaded pytest test cases fail when they should Jun 17, 2021 5 - Production/Stable pytest (>=4.6)
+ :pypi:`pytest-rerun` Re-run only changed files in specified branch Jul 08, 2019 N/A pytest (>=3.6)
+ :pypi:`pytest-rerunfailures` pytest plugin to re-run tests to eliminate flaky failures Sep 17, 2021 5 - Production/Stable pytest (>=5.3)
+ :pypi:`pytest-resilient-circuits` Resilient Circuits fixtures for PyTest. Nov 15, 2021 N/A N/A
+ :pypi:`pytest-resource` Load resource fixture plugin to use with pytest Nov 14, 2018 4 - Beta N/A
+ :pypi:`pytest-resource-path` Provides path for uniform access to test resources in isolated directory May 01, 2021 5 - Production/Stable pytest (>=3.5.0)
+ :pypi:`pytest-responsemock` Simplified requests calls mocking for pytest Oct 10, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-responses` py.test integration for responses Apr 26, 2021 N/A pytest (>=2.5)
+ :pypi:`pytest-restrict` Pytest plugin to restrict the test types allowed Aug 12, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-rethinkdb` A RethinkDB plugin for pytest. Jul 24, 2016 4 - Beta N/A
+ :pypi:`pytest-reverse` Pytest plugin to reverse test order. Aug 12, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-ringo` pytest plugin to test webapplications using the Ringo webframework Sep 27, 2017 3 - Alpha N/A
+ :pypi:`pytest-rng` Fixtures for seeding tests and making randomness reproducible Aug 08, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-roast` pytest plugin for ROAST configuration override and fixtures Jul 29, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-rocketchat` Pytest to Rocket.Chat reporting plugin Apr 18, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-rotest` Pytest integration with rotest Sep 08, 2019 N/A pytest (>=3.5.0)
+ :pypi:`pytest-rpc` Extend py.test for RPC OpenStack testing. Feb 22, 2019 4 - Beta pytest (~=3.6)
+ :pypi:`pytest-rst` Test code from RST documents with pytest Sep 21, 2021 N/A pytest
+ :pypi:`pytest-rt` pytest data collector plugin for Testgr Sep 04, 2021 N/A N/A
+ :pypi:`pytest-rts` Coverage-based regression test selection (RTS) plugin for pytest May 17, 2021 N/A pytest
+ :pypi:`pytest-run-changed` Pytest plugin that runs changed tests only Apr 02, 2021 3 - Alpha pytest
+ :pypi:`pytest-runfailed` implement a --failed option for pytest Mar 24, 2016 N/A N/A
+ :pypi:`pytest-runner` Invoke py.test as distutils command with dependency resolution May 19, 2021 5 - Production/Stable pytest (>=4.6) ; extra == 'testing'
+ :pypi:`pytest-runtime-xfail` Call runtime_xfail() to mark running test as xfail. Aug 26, 2021 N/A N/A
+ :pypi:`pytest-salt` Pytest Salt Plugin Jan 27, 2020 4 - Beta N/A
+ :pypi:`pytest-salt-containers` A Pytest plugin that builds and creates docker containers Nov 09, 2016 4 - Beta N/A
+ :pypi:`pytest-salt-factories` Pytest Salt Plugin Sep 16, 2021 4 - Beta pytest (>=6.0.0)
+ :pypi:`pytest-salt-from-filenames` Simple PyTest Plugin For Salt's Test Suite Specifically Jan 29, 2019 4 - Beta pytest (>=4.1)
+ :pypi:`pytest-salt-runtests-bridge` Simple PyTest Plugin For Salt's Test Suite Specifically Dec 05, 2019 4 - Beta pytest (>=4.1)
+ :pypi:`pytest-sanic` a pytest plugin for Sanic Oct 25, 2021 N/A pytest (>=5.2)
+ :pypi:`pytest-sanity` Dec 07, 2020 N/A N/A
+ :pypi:`pytest-sa-pg` May 14, 2019 N/A N/A
+ :pypi:`pytest-sbase` A complete web automation framework for end-to-end testing. Dec 03, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-scenario` pytest plugin for test scenarios Feb 06, 2017 3 - Alpha N/A
+ :pypi:`pytest-schema` 👠Validate return values against a schema-like object in testing Aug 31, 2020 5 - Production/Stable pytest (>=3.5.0)
+ :pypi:`pytest-securestore` An encrypted password store for use within pytest cases Nov 08, 2021 4 - Beta N/A
+ :pypi:`pytest-select` A pytest plugin which allows to (de-)select tests from a file. Jan 18, 2019 3 - Alpha pytest (>=3.0)
+ :pypi:`pytest-selenium` pytest plugin for Selenium Sep 19, 2020 5 - Production/Stable pytest (>=5.0.0)
+ :pypi:`pytest-seleniumbase` A complete web automation framework for end-to-end testing. Dec 03, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-selenium-enhancer` pytest plugin for Selenium Nov 26, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-selenium-pdiff` A pytest package implementing perceptualdiff for Selenium tests. Apr 06, 2017 2 - Pre-Alpha N/A
+ :pypi:`pytest-send-email` Send pytest execution result email Dec 04, 2019 N/A N/A
+ :pypi:`pytest-sentry` A pytest plugin to send testrun information to Sentry.io Apr 21, 2021 N/A pytest
+ :pypi:`pytest-server-fixtures` Extensible server fixures for py.test May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-serverless` Automatically mocks resources from serverless.yml in pytest using moto. Nov 27, 2021 4 - Beta N/A
+ :pypi:`pytest-services` Services plugin for pytest testing framework Oct 30, 2020 6 - Mature N/A
+ :pypi:`pytest-session2file` pytest-session2file (aka: pytest-session_to_file for v0.1.0 - v0.1.2) is a py.test plugin for capturing and saving to file the stdout of py.test. Jan 26, 2021 3 - Alpha pytest
+ :pypi:`pytest-session-fixture-globalize` py.test plugin to make session fixtures behave as if written in conftest, even if it is written in some modules May 15, 2018 4 - Beta N/A
+ :pypi:`pytest-session_to_file` pytest-session_to_file is a py.test plugin for capturing and saving to file the stdout of py.test. Oct 01, 2015 3 - Alpha N/A
+ :pypi:`pytest-sftpserver` py.test plugin to locally test sftp server connections. Sep 16, 2019 4 - Beta N/A
+ :pypi:`pytest-shard` Dec 11, 2020 4 - Beta pytest
+ :pypi:`pytest-shell` A pytest plugin to help with testing shell scripts / black box commands Nov 07, 2021 N/A N/A
+ :pypi:`pytest-sheraf` Versatile ZODB abstraction layer - pytest fixtures Feb 11, 2020 N/A pytest
+ :pypi:`pytest-sherlock` pytest plugin help to find coupled tests Nov 18, 2021 5 - Production/Stable pytest (>=3.5.1)
+ :pypi:`pytest-shortcuts` Expand command-line shortcuts listed in pytest configuration Oct 29, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-shutil` A goodie-bag of unix shell and environment tools for py.test May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-simplehttpserver` Simple pytest fixture to spin up an HTTP server Jun 24, 2021 4 - Beta N/A
+ :pypi:`pytest-simple-plugin` Simple pytest plugin Nov 27, 2019 N/A N/A
+ :pypi:`pytest-simple-settings` simple-settings plugin for pytest Nov 17, 2020 4 - Beta pytest
+ :pypi:`pytest-single-file-logging` Allow for multiple processes to log to a single file May 05, 2016 4 - Beta pytest (>=2.8.1)
+ :pypi:`pytest-skip-markers` Pytest Salt Plugin Oct 04, 2021 4 - Beta pytest (>=6.0.0)
+ :pypi:`pytest-skipper` A plugin that selects only tests with changes in execution path Mar 26, 2017 3 - Alpha pytest (>=3.0.6)
+ :pypi:`pytest-skippy` Automatically skip tests that don't need to run! Jan 27, 2018 3 - Alpha pytest (>=2.3.4)
+ :pypi:`pytest-skip-slow` A pytest plugin to skip \`@pytest.mark.slow\` tests by default. Sep 28, 2021 N/A N/A
+ :pypi:`pytest-slack` Pytest to Slack reporting plugin Dec 15, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-slow` A pytest plugin to skip \`@pytest.mark.slow\` tests by default. Sep 28, 2021 N/A N/A
+ :pypi:`pytest-smartcollect` A plugin for collecting tests that touch changed code Oct 04, 2018 N/A pytest (>=3.5.0)
+ :pypi:`pytest-smartcov` Smart coverage plugin for pytest. Sep 30, 2017 3 - Alpha N/A
+ :pypi:`pytest-smtp` Send email with pytest execution result Feb 20, 2021 N/A pytest
+ :pypi:`pytest-snail` Plugin for adding a marker to slow running tests. 🌠Nov 04, 2019 3 - Alpha pytest (>=5.0.1)
+ :pypi:`pytest-snapci` py.test plugin for Snap-CI Nov 12, 2015 N/A N/A
+ :pypi:`pytest-snapshot` A plugin for snapshot testing with pytest. Dec 02, 2021 4 - Beta pytest (>=3.0.0)
+ :pypi:`pytest-snmpserver` May 12, 2021 N/A N/A
+ :pypi:`pytest-socket` Pytest Plugin to disable socket calls during tests Aug 28, 2021 4 - Beta pytest (>=3.6.3)
+ :pypi:`pytest-soft-assertions` May 05, 2020 3 - Alpha pytest
+ :pypi:`pytest-solr` Solr process and client fixtures for py.test. May 11, 2020 3 - Alpha pytest (>=3.0.0)
+ :pypi:`pytest-sorter` A simple plugin to first execute tests that historically failed more Apr 20, 2021 4 - Beta pytest (>=3.1.1)
+ :pypi:`pytest-sourceorder` Test-ordering plugin for pytest Sep 01, 2021 4 - Beta pytest
+ :pypi:`pytest-spark` pytest plugin to run the tests with support of pyspark. Feb 23, 2020 4 - Beta pytest
+ :pypi:`pytest-spawner` py.test plugin to spawn process and communicate with them. Jul 31, 2015 4 - Beta N/A
+ :pypi:`pytest-spec` Library pytest-spec is a pytest plugin to display test execution output like a SPECIFICATION. May 04, 2021 N/A N/A
+ :pypi:`pytest-sphinx` Doctest plugin for pytest with support for Sphinx-specific doctest-directives Aug 05, 2020 4 - Beta N/A
+ :pypi:`pytest-spiratest` Exports unit tests as test runs in SpiraTest/Team/Plan Oct 13, 2021 N/A N/A
+ :pypi:`pytest-splinter` Splinter plugin for pytest testing framework Dec 25, 2020 6 - Mature N/A
+ :pypi:`pytest-split` Pytest plugin which splits the test suite to equally sized sub suites based on test execution time. Nov 09, 2021 4 - Beta pytest (>=5,<7)
+ :pypi:`pytest-splitio` Split.io SDK integration for e2e tests Sep 22, 2020 N/A pytest (<7,>=5.0)
+ :pypi:`pytest-split-tests` A Pytest plugin for running a subset of your tests by splitting them in to equally sized groups. Forked from Mark Adams' original project pytest-test-groups. Jul 30, 2021 5 - Production/Stable pytest (>=2.5)
+ :pypi:`pytest-split-tests-tresorit` Feb 22, 2021 1 - Planning N/A
+ :pypi:`pytest-splunk-addon` A Dynamic test tool for Splunk Apps and Add-ons Nov 29, 2021 N/A pytest (>5.4.0,<6.3)
+ :pypi:`pytest-splunk-addon-ui-smartx` Library to support testing Splunk Add-on UX Oct 07, 2021 N/A N/A
+ :pypi:`pytest-splunk-env` pytest fixtures for interaction with Splunk Enterprise and Splunk Cloud Oct 22, 2020 N/A pytest (>=6.1.1,<7.0.0)
+ :pypi:`pytest-sqitch` sqitch for pytest Apr 06, 2020 4 - Beta N/A
+ :pypi:`pytest-sqlalchemy` pytest plugin with sqlalchemy related fixtures Mar 13, 2018 3 - Alpha N/A
+ :pypi:`pytest-sql-bigquery` Yet another SQL-testing framework for BigQuery provided by pytest plugin Dec 19, 2019 N/A pytest
+ :pypi:`pytest-srcpaths` Add paths to sys.path Oct 15, 2021 N/A N/A
+ :pypi:`pytest-ssh` pytest plugin for ssh command run May 27, 2019 N/A pytest
+ :pypi:`pytest-start-from` Start pytest run from a given point Apr 11, 2016 N/A N/A
+ :pypi:`pytest-statsd` pytest plugin for reporting to graphite Nov 30, 2018 5 - Production/Stable pytest (>=3.0.0)
+ :pypi:`pytest-stepfunctions` A small description May 08, 2021 4 - Beta pytest
+ :pypi:`pytest-steps` Create step-wise / incremental tests in pytest. Sep 23, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-stepwise` Run a test suite one failing test at a time. Dec 01, 2015 4 - Beta N/A
+ :pypi:`pytest-stoq` A plugin to pytest stoq Feb 09, 2021 4 - Beta N/A
+ :pypi:`pytest-stress` A Pytest plugin that allows you to loop tests for a user defined amount of time. Dec 07, 2019 4 - Beta pytest (>=3.6.0)
+ :pypi:`pytest-structlog` Structured logging assertions Sep 21, 2021 N/A pytest
+ :pypi:`pytest-structmpd` provide structured temporary directory Oct 17, 2018 N/A N/A
+ :pypi:`pytest-stub` Stub packages, modules and attributes. Apr 28, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-stubprocess` Provide stub implementations for subprocesses in Python tests Sep 17, 2018 3 - Alpha pytest (>=3.5.0)
+ :pypi:`pytest-study` A pytest plugin to organize long run tests (named studies) without interfering the regular tests Sep 26, 2017 3 - Alpha pytest (>=2.0)
+ :pypi:`pytest-subprocess` A plugin to fake subprocess for pytest Nov 07, 2021 5 - Production/Stable pytest (>=4.0.0)
+ :pypi:`pytest-subtesthack` A hack to explicitly set up and tear down fixtures. Mar 02, 2021 N/A N/A
+ :pypi:`pytest-subtests` unittest subTest() support and subtests fixture May 29, 2021 4 - Beta pytest (>=5.3.0)
+ :pypi:`pytest-subunit` pytest-subunit is a plugin for py.test which outputs testsresult in subunit format. Aug 29, 2017 N/A N/A
+ :pypi:`pytest-sugar` pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly). Jul 06, 2020 3 - Alpha N/A
+ :pypi:`pytest-sugar-bugfix159` Workaround for https://github.com/Frozenball/pytest-sugar/issues/159 Nov 07, 2018 5 - Production/Stable pytest (!=3.7.3,>=3.5); extra == 'testing'
+ :pypi:`pytest-super-check` Pytest plugin to check your TestCase classes call super in setUp, tearDown, etc. Aug 12, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-svn` SVN repository fixture for py.test May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-symbols` pytest-symbols is a pytest plugin that adds support for passing test environment symbols into pytest tests. Nov 20, 2017 3 - Alpha N/A
+ :pypi:`pytest-takeltest` Fixtures for ansible, testinfra and molecule Oct 13, 2021 N/A N/A
+ :pypi:`pytest-talisker` Nov 28, 2021 N/A N/A
+ :pypi:`pytest-tap` Test Anything Protocol (TAP) reporting plugin for pytest Oct 27, 2021 5 - Production/Stable pytest (>=3.0)
+ :pypi:`pytest-tape` easy assertion with expected results saved to yaml files Mar 17, 2021 4 - Beta N/A
+ :pypi:`pytest-target` Pytest plugin for remote target orchestration. Jan 21, 2021 3 - Alpha pytest (>=6.1.2,<7.0.0)
+ :pypi:`pytest-tblineinfo` tblineinfo is a py.test plugin that insert the node id in the final py.test report when --tb=line option is used Dec 01, 2015 3 - Alpha pytest (>=2.0)
+ :pypi:`pytest-teamcity-logblock` py.test plugin to introduce block structure in teamcity build log, if output is not captured May 15, 2018 4 - Beta N/A
+ :pypi:`pytest-telegram` Pytest to Telegram reporting plugin Dec 10, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-tempdir` Predictable and repeatable tempdir support. Oct 11, 2019 4 - Beta pytest (>=2.8.1)
+ :pypi:`pytest-terraform` A pytest plugin for using terraform fixtures Nov 10, 2021 N/A pytest (>=6.0)
+ :pypi:`pytest-terraform-fixture` generate terraform resources to use with pytest Nov 14, 2018 4 - Beta N/A
+ :pypi:`pytest-testbook` A plugin to run tests written in Jupyter notebook Dec 11, 2016 3 - Alpha N/A
+ :pypi:`pytest-testconfig` Test configuration plugin for pytest. Jan 11, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-testdirectory` A py.test plugin providing temporary directories in unit tests. Nov 06, 2018 5 - Production/Stable pytest
+ :pypi:`pytest-testdox` A testdox format reporter for pytest Oct 13, 2020 5 - Production/Stable pytest (>=3.7.0)
+ :pypi:`pytest-test-groups` A Pytest plugin for running a subset of your tests by splitting them in to equally sized groups. Oct 25, 2016 5 - Production/Stable N/A
+ :pypi:`pytest-testinfra` Test infrastructures Jun 20, 2021 5 - Production/Stable pytest (!=3.0.2)
+ :pypi:`pytest-testlink-adaptor` pytest reporting plugin for testlink Dec 20, 2018 4 - Beta pytest (>=2.6)
+ :pypi:`pytest-testmon` selects tests affected by changed files and methods Oct 22, 2021 4 - Beta N/A
+ :pypi:`pytest-testobject` Plugin to use TestObject Suites with Pytest Sep 24, 2019 4 - Beta pytest (>=3.1.1)
+ :pypi:`pytest-testrail` pytest plugin for creating TestRail runs and adding results Aug 27, 2020 N/A pytest (>=3.6)
+ :pypi:`pytest-testrail2` A small example package Nov 17, 2020 N/A pytest (>=5)
+ :pypi:`pytest-testrail-api` Плагин Pytest, Ð´Ð»Ñ Ð¸Ð½Ñ‚ÐµÐ³Ñ€Ð°Ñ†Ð¸Ð¸ Ñ TestRail Nov 30, 2021 N/A pytest (>=5.5)
+ :pypi:`pytest-testrail-api-client` TestRail Api Python Client Dec 03, 2021 N/A pytest
+ :pypi:`pytest-testrail-appetize` pytest plugin for creating TestRail runs and adding results Sep 29, 2021 N/A N/A
+ :pypi:`pytest-testrail-client` pytest plugin for Testrail Sep 29, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-testrail-e2e` pytest plugin for creating TestRail runs and adding results Oct 11, 2021 N/A pytest (>=3.6)
+ :pypi:`pytest-testrail-ns` pytest plugin for creating TestRail runs and adding results Oct 08, 2021 N/A pytest (>=3.6)
+ :pypi:`pytest-testrail-plugin` PyTest plugin for TestRail Apr 21, 2020 3 - Alpha pytest
+ :pypi:`pytest-testrail-reporter` Sep 10, 2018 N/A N/A
+ :pypi:`pytest-testreport` Nov 12, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-testslide` TestSlide fixture for pytest Jan 07, 2021 5 - Production/Stable pytest (~=6.2)
+ :pypi:`pytest-test-this` Plugin for py.test to run relevant tests, based on naively checking if a test contains a reference to the symbol you supply Sep 15, 2019 2 - Pre-Alpha pytest (>=2.3)
+ :pypi:`pytest-test-utils` Nov 30, 2021 N/A pytest (>=5)
+ :pypi:`pytest-tesults` Tesults plugin for pytest Jul 31, 2021 5 - Production/Stable pytest (>=3.5.0)
+ :pypi:`pytest-tezos` pytest-ligo Jan 16, 2020 4 - Beta N/A
+ :pypi:`pytest-thawgun` Pytest plugin for time travel May 26, 2020 3 - Alpha N/A
+ :pypi:`pytest-threadleak` Detects thread leaks Sep 08, 2017 4 - Beta N/A
+ :pypi:`pytest-tick` Ticking on tests Aug 31, 2021 5 - Production/Stable pytest (>=6.2.5,<7.0.0)
+ :pypi:`pytest-timeit` A pytest plugin to time test function runs Oct 13, 2016 4 - Beta N/A
+ :pypi:`pytest-timeout` pytest plugin to abort hanging tests Oct 11, 2021 5 - Production/Stable pytest (>=5.0.0)
+ :pypi:`pytest-timeouts` Linux-only Pytest plugin to control durations of various test case execution phases Sep 21, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-timer` A timer plugin for pytest Jun 02, 2021 N/A N/A
+ :pypi:`pytest-timestamper` Pytest plugin to add a timestamp prefix to the pytest output Jun 06, 2021 N/A N/A
+ :pypi:`pytest-tipsi-django` Nov 17, 2021 4 - Beta pytest (>=6.0.0)
+ :pypi:`pytest-tipsi-testing` Better fixtures management. Various helpers Nov 04, 2020 4 - Beta pytest (>=3.3.0)
+ :pypi:`pytest-tldr` A pytest plugin that limits the output to just the things you need. Mar 12, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-tm4j-reporter` Cloud Jira Test Management (TM4J) PyTest reporter plugin Sep 01, 2020 N/A pytest
+ :pypi:`pytest-tmreport` this is a vue-element ui report for pytest Nov 17, 2021 N/A N/A
+ :pypi:`pytest-todo` A small plugin for the pytest testing framework, marking TODO comments as failure May 23, 2019 4 - Beta pytest
+ :pypi:`pytest-tomato` Mar 01, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-toolbelt` This is just a collection of utilities for pytest, but don't really belong in pytest proper. Aug 12, 2019 3 - Alpha N/A
+ :pypi:`pytest-toolbox` Numerous useful plugins for pytest. Apr 07, 2018 N/A pytest (>=3.5.0)
+ :pypi:`pytest-tornado` A py.test plugin providing fixtures and markers to simplify testing of asynchronous tornado applications. Jun 17, 2020 5 - Production/Stable pytest (>=3.6)
+ :pypi:`pytest-tornado5` A py.test plugin providing fixtures and markers to simplify testing of asynchronous tornado applications. Nov 16, 2018 5 - Production/Stable pytest (>=3.6)
+ :pypi:`pytest-tornado-yen3` A py.test plugin providing fixtures and markers to simplify testing of asynchronous tornado applications. Oct 15, 2018 5 - Production/Stable N/A
+ :pypi:`pytest-tornasync` py.test plugin for testing Python 3.5+ Tornado code Jul 15, 2019 3 - Alpha pytest (>=3.0)
+ :pypi:`pytest-track` Feb 26, 2021 3 - Alpha pytest (>=3.0)
+ :pypi:`pytest-translations` Test your translation files. Nov 05, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-travis-fold` Folds captured output sections in Travis CI build log Nov 29, 2017 4 - Beta pytest (>=2.6.0)
+ :pypi:`pytest-trello` Plugin for py.test that integrates trello using markers Nov 20, 2015 5 - Production/Stable N/A
+ :pypi:`pytest-trepan` Pytest plugin for trepan debugger. Jul 28, 2018 5 - Production/Stable N/A
+ :pypi:`pytest-trialtemp` py.test plugin for using the same _trial_temp working directory as trial Jun 08, 2015 N/A N/A
+ :pypi:`pytest-trio` Pytest plugin for trio Oct 16, 2020 N/A N/A
+ :pypi:`pytest-tspwplib` A simple plugin to use with tspwplib Jan 08, 2021 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-tstcls` Test Class Base Mar 23, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-twisted` A twisted plugin for pytest. Aug 30, 2021 5 - Production/Stable pytest (>=2.3)
+ :pypi:`pytest-typhoon-xray` Typhoon HIL plugin for pytest Nov 03, 2021 4 - Beta N/A
+ :pypi:`pytest-tytest` Typhoon HIL plugin for pytest May 25, 2020 4 - Beta pytest (>=5.4.2)
+ :pypi:`pytest-ubersmith` Easily mock calls to ubersmith at the \`requests\` level. Apr 13, 2015 N/A N/A
+ :pypi:`pytest-ui` Text User Interface for running python tests Jul 05, 2021 4 - Beta pytest
+ :pypi:`pytest-unhandled-exception-exit-code` Plugin for py.test set a different exit code on uncaught exceptions Jun 22, 2020 5 - Production/Stable pytest (>=2.3)
+ :pypi:`pytest-unittest-filter` A pytest plugin for filtering unittest-based test classes Jan 12, 2019 4 - Beta pytest (>=3.1.0)
+ :pypi:`pytest-unmarked` Run only unmarked tests Aug 27, 2019 5 - Production/Stable N/A
+ :pypi:`pytest-unordered` Test equality of unordered collections in pytest Mar 28, 2021 4 - Beta N/A
+ :pypi:`pytest-upload-report` pytest-upload-report is a plugin for pytest that upload your test report for test results. Jun 18, 2021 5 - Production/Stable N/A
+ :pypi:`pytest-utils` Some helpers for pytest. Dec 04, 2021 4 - Beta pytest (>=6.2.5,<7.0.0)
+ :pypi:`pytest-vagrant` A py.test plugin providing access to vagrant. Sep 07, 2021 5 - Production/Stable pytest
+ :pypi:`pytest-valgrind` May 19, 2021 N/A N/A
+ :pypi:`pytest-variables` pytest plugin for providing variables to tests/fixtures Oct 23, 2019 5 - Production/Stable pytest (>=2.4.2)
+ :pypi:`pytest-variant` Variant support for Pytest Jun 20, 2021 N/A N/A
+ :pypi:`pytest-vcr` Plugin for managing VCR.py cassettes Apr 26, 2019 5 - Production/Stable pytest (>=3.6.0)
+ :pypi:`pytest-vcr-delete-on-fail` A pytest plugin that automates vcrpy cassettes deletion on test failure. Aug 13, 2021 4 - Beta pytest (>=6.2.2,<7.0.0)
+ :pypi:`pytest-vcrpandas` Test from HTTP interactions to dataframe processed. Jan 12, 2019 4 - Beta pytest
+ :pypi:`pytest-venv` py.test fixture for creating a virtual environment Aug 04, 2020 4 - Beta pytest
+ :pypi:`pytest-ver` Pytest module with Verification Report Aug 30, 2021 2 - Pre-Alpha N/A
+ :pypi:`pytest-verbose-parametrize` More descriptive output for parametrized py.test tests May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-vimqf` A simple pytest plugin that will shrink pytest output when specified, to fit vim quickfix window. Feb 08, 2021 4 - Beta pytest (>=6.2.2,<7.0.0)
+ :pypi:`pytest-virtualenv` Virtualenv fixture for py.test May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-voluptuous` Pytest plugin for asserting data against voluptuous schema. Jun 09, 2020 N/A pytest
+ :pypi:`pytest-vscodedebug` A pytest plugin to easily enable debugging tests within Visual Studio Code Dec 04, 2020 4 - Beta N/A
+ :pypi:`pytest-vts` pytest plugin for automatic recording of http stubbed tests Jun 05, 2019 N/A pytest (>=2.3)
+ :pypi:`pytest-vw` pytest-vw makes your failing test cases succeed under CI tools scrutiny Oct 07, 2015 4 - Beta N/A
+ :pypi:`pytest-vyper` Plugin for the vyper smart contract language. May 28, 2020 2 - Pre-Alpha N/A
+ :pypi:`pytest-wa-e2e-plugin` Pytest plugin for testing whatsapp bots with end to end tests Feb 18, 2020 4 - Beta pytest (>=3.5.0)
+ :pypi:`pytest-watch` Local continuous test runner with pytest and watchdog. May 20, 2018 N/A N/A
+ :pypi:`pytest-watcher` Continiously runs pytest on changes in \*.py files Sep 18, 2021 3 - Alpha N/A
+ :pypi:`pytest-wdl` Pytest plugin for testing WDL workflows. Nov 17, 2020 5 - Production/Stable N/A
+ :pypi:`pytest-webdriver` Selenium webdriver fixture for py.test May 28, 2019 5 - Production/Stable pytest
+ :pypi:`pytest-wetest` Welian API Automation test framework pytest plugin Nov 10, 2018 4 - Beta N/A
+ :pypi:`pytest-whirlwind` Testing Tornado. Jun 12, 2020 N/A N/A
+ :pypi:`pytest-wholenodeid` pytest addon for displaying the whole node id for failures Aug 26, 2015 4 - Beta pytest (>=2.0)
+ :pypi:`pytest-win32consoletitle` Pytest progress in console title (Win32 only) Aug 08, 2021 N/A N/A
+ :pypi:`pytest-winnotify` Windows tray notifications for py.test results. Apr 22, 2016 N/A N/A
+ :pypi:`pytest-with-docker` pytest with docker helpers. Nov 09, 2021 N/A pytest
+ :pypi:`pytest-workflow` A pytest plugin for configuring workflow/pipeline tests using YAML files Dec 03, 2021 5 - Production/Stable pytest (>=5.4.0)
+ :pypi:`pytest-xdist` pytest xdist plugin for distributed testing and loop-on-failing modes Sep 21, 2021 5 - Production/Stable pytest (>=6.0.0)
+ :pypi:`pytest-xdist-debug-for-graingert` pytest xdist plugin for distributed testing and loop-on-failing modes Jul 24, 2019 5 - Production/Stable pytest (>=4.4.0)
+ :pypi:`pytest-xdist-forked` forked from pytest-xdist Feb 10, 2020 5 - Production/Stable pytest (>=4.4.0)
+ :pypi:`pytest-xdist-tracker` pytest plugin helps to reproduce failures for particular xdist node Nov 18, 2021 3 - Alpha pytest (>=3.5.1)
+ :pypi:`pytest-xfaillist` Maintain a xfaillist in an additional file to avoid merge-conflicts. Sep 17, 2021 N/A pytest (>=6.2.2,<7.0.0)
+ :pypi:`pytest-xfiles` Pytest fixtures providing data read from function, module or package related (x)files. Feb 27, 2018 N/A N/A
+ :pypi:`pytest-xlog` Extended logging for test and decorators May 31, 2020 4 - Beta N/A
+ :pypi:`pytest-xpara` An extended parametrizing plugin of pytest. Oct 30, 2017 3 - Alpha pytest
+ :pypi:`pytest-xprocess` A pytest plugin for managing processes across test runs. Jul 28, 2021 4 - Beta pytest (>=2.8)
+ :pypi:`pytest-xray` May 30, 2019 3 - Alpha N/A
+ :pypi:`pytest-xrayjira` Mar 17, 2020 3 - Alpha pytest (==4.3.1)
+ :pypi:`pytest-xray-server` Oct 27, 2021 3 - Alpha pytest (>=5.3.1)
+ :pypi:`pytest-xvfb` A pytest plugin to run Xvfb for tests. Jun 09, 2020 4 - Beta pytest (>=2.8.1)
+ :pypi:`pytest-yaml` This plugin is used to load yaml output to your test using pytest framework. Oct 05, 2018 N/A pytest
+ :pypi:`pytest-yamltree` Create or check file/directory trees described by YAML Mar 02, 2020 4 - Beta pytest (>=3.1.1)
+ :pypi:`pytest-yamlwsgi` Run tests against wsgi apps defined in yaml May 11, 2010 N/A N/A
+ :pypi:`pytest-yapf` Run yapf Jul 06, 2017 4 - Beta pytest (>=3.1.1)
+ :pypi:`pytest-yapf3` Validate your Python file format with yapf Aug 03, 2020 5 - Production/Stable pytest (>=5.4)
+ :pypi:`pytest-yield` PyTest plugin to run tests concurrently, each \`yield\` switch context to other one Jan 23, 2019 N/A N/A
+ :pypi:`pytest-yuk` Display tests you are uneasy with, using 🤢/🤮 for pass/fail of tests marked with yuk. Mar 26, 2021 N/A N/A
+ :pypi:`pytest-zafira` A Zafira plugin for pytest Sep 18, 2019 5 - Production/Stable pytest (==4.1.1)
+ :pypi:`pytest-zap` OWASP ZAP plugin for py.test. May 12, 2014 4 - Beta N/A
+ :pypi:`pytest-zebrunner` Pytest connector for Zebrunner reporting Dec 02, 2021 5 - Production/Stable pytest (>=4.5.0)
+ :pypi:`pytest-zigzag` Extend py.test for RPC OpenStack testing. Feb 27, 2019 4 - Beta pytest (~=3.6)
+ =============================================== ======================================================================================================================================================================== ============== ===================== ================================================
+
+.. only:: latex
+
+
+ :pypi:`pytest-accept`
+ *last release*: Nov 22, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6,<7)
+
+ A pytest-plugin for updating doctest outputs
+
+ :pypi:`pytest-adaptavist`
+ *last release*: Nov 30, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=5.4.0)
+
+ pytest plugin for generating test execution results within Jira Test Management (tm4j)
+
+ :pypi:`pytest-addons-test`
+ *last release*: Aug 02, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6.2.4,<7.0.0)
+
+ 用于测试pytestçš„æ’件
+
+ :pypi:`pytest-adf`
+ *last release*: May 10, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Pytest plugin for writing Azure Data Factory integration tests
+
+ :pypi:`pytest-adf-azure-identity`
+ *last release*: Mar 06, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Pytest plugin for writing Azure Data Factory integration tests
+
+ :pypi:`pytest-agent`
+ *last release*: Nov 25, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Service that exposes a REST API that can be used to interract remotely with Pytest. It is shipped with a dashboard that enables running tests in a more convenient way.
+
+ :pypi:`pytest-aggreport`
+ *last release*: Mar 07, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.2.2)
+
+ pytest plugin for pytest-repeat that generate aggregate report of the same test cases with additional statistics details.
+
+ :pypi:`pytest-aio`
+ *last release*: Oct 20, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Pytest plugin for testing async python code
+
+ :pypi:`pytest-aiofiles`
+ *last release*: May 14, 2017,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ pytest fixtures for writing aiofiles tests with pyfakefs
+
+ :pypi:`pytest-aiohttp`
+ *last release*: Dec 05, 2017,
+ *status*: N/A,
+ *requires*: pytest
+
+ pytest plugin for aiohttp support
+
+ :pypi:`pytest-aiohttp-client`
+ *last release*: Nov 01, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=6)
+
+ Pytest \`client\` fixture for the Aiohttp
+
+ :pypi:`pytest-aioresponses`
+ *last release*: Jul 29, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ py.test integration for aioresponses
+
+ :pypi:`pytest-aioworkers`
+ *last release*: Dec 04, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A plugin to test aioworkers project with pytest
+
+ :pypi:`pytest-airflow`
+ *last release*: Apr 03, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=4.4.0)
+
+ pytest support for airflow.
+
+ :pypi:`pytest-airflow-utils`
+ *last release*: Nov 15, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-alembic`
+ *last release*: Dec 02, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=1.0)
+
+ A pytest plugin for verifying alembic migrations.
+
+ :pypi:`pytest-allclose`
+ *last release*: Jul 30, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Pytest fixture extending Numpy's allclose function
+
+ :pypi:`pytest-allure-adaptor`
+ *last release*: Jan 10, 2018,
+ *status*: N/A,
+ *requires*: pytest (>=2.7.3)
+
+ Plugin for py.test to generate allure xml reports
+
+ :pypi:`pytest-allure-adaptor2`
+ *last release*: Oct 14, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=2.7.3)
+
+ Plugin for py.test to generate allure xml reports
+
+ :pypi:`pytest-allure-dsl`
+ *last release*: Oct 25, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ pytest plugin to test case doc string dls instructions
+
+ :pypi:`pytest-allure-spec-coverage`
+ *last release*: Oct 26, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ The pytest plugin aimed to display test coverage of the specs(requirements) in Allure
+
+ :pypi:`pytest-alphamoon`
+ *last release*: Oct 21, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Static code checks used at Alphamoon
+
+ :pypi:`pytest-android`
+ *last release*: Feb 21, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ This fixture provides a configured "driver" for Android Automated Testing, using uiautomator2.
+
+ :pypi:`pytest-anki`
+ *last release*: Oct 14, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A pytest plugin for testing Anki add-ons
+
+ :pypi:`pytest-annotate`
+ *last release*: Nov 29, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (<7.0.0,>=3.2.0)
+
+ pytest-annotate: Generate PyAnnotate annotations from your pytest tests.
+
+ :pypi:`pytest-ansible`
+ *last release*: May 25, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Plugin for py.test to simplify calling ansible modules from tests or fixtures
+
+ :pypi:`pytest-ansible-playbook`
+ *last release*: Mar 08, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Pytest fixture which runs given ansible playbook file.
+
+ :pypi:`pytest-ansible-playbook-runner`
+ *last release*: Dec 02, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.1.0)
+
+ Pytest fixture which runs given ansible playbook file.
+
+ :pypi:`pytest-antilru`
+ *last release*: Apr 11, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Bust functools.lru_cache when running pytest to avoid test pollution
+
+ :pypi:`pytest-anyio`
+ *last release*: Jun 29, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ The pytest anyio plugin is built into anyio. You don't need this package.
+
+ :pypi:`pytest-anything`
+ *last release*: Feb 18, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest fixtures to assert anything and something
+
+ :pypi:`pytest-aoc`
+ *last release*: Nov 23, 2021,
+ *status*: N/A,
+ *requires*: pytest ; extra == 'test'
+
+ Downloads puzzle inputs for Advent of Code and synthesizes PyTest fixtures
+
+ :pypi:`pytest-api`
+ *last release*: May 04, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ PyTest-API Python Web Framework built for testing purposes.
+
+ :pypi:`pytest-apistellar`
+ *last release*: Jun 18, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ apistellar plugin for pytest.
+
+ :pypi:`pytest-appengine`
+ *last release*: Feb 27, 2017,
+ *status*: N/A,
+ *requires*: N/A
+
+ AppEngine integration that works well with pytest-django
+
+ :pypi:`pytest-appium`
+ *last release*: Dec 05, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest plugin for appium
+
+ :pypi:`pytest-approvaltests`
+ *last release*: Feb 07, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A plugin to use approvaltests with pytest
+
+ :pypi:`pytest-argus`
+ *last release*: Jun 24, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=6.2.4)
+
+ pyest results colection plugin
+
+ :pypi:`pytest-arraydiff`
+ *last release*: Dec 06, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ pytest plugin to help with comparing array output from tests
+
+ :pypi:`pytest-asgi-server`
+ *last release*: Dec 12, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=5.4.1)
+
+ Convenient ASGI client/server fixtures for Pytest
+
+ :pypi:`pytest-asptest`
+ *last release*: Apr 28, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ test Answer Set Programming programs
+
+ :pypi:`pytest-assertutil`
+ *last release*: May 10, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest-assertutil
+
+ :pypi:`pytest-assert-utils`
+ *last release*: Sep 21, 2021,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Useful assertion utilities for use with pytest
+
+ :pypi:`pytest-assume`
+ *last release*: Jun 24, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=2.7)
+
+ A pytest plugin that allows multiple failures per test
+
+ :pypi:`pytest-ast-back-to-python`
+ *last release*: Sep 29, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A plugin for pytest devs to view how assertion rewriting recodes the AST
+
+ :pypi:`pytest-astropy`
+ *last release*: Sep 21, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.6)
+
+ Meta-package containing dependencies for testing
+
+ :pypi:`pytest-astropy-header`
+ *last release*: Dec 18, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=2.8)
+
+ pytest plugin to add diagnostic information to the header of the test output
+
+ :pypi:`pytest-ast-transformer`
+ *last release*: May 04, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+
+
+ :pypi:`pytest-asyncio`
+ *last release*: Oct 15, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=5.4.0)
+
+ Pytest support for asyncio.
+
+ :pypi:`pytest-asyncio-cooperative`
+ *last release*: Oct 12, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Run all your asynchronous tests cooperatively.
+
+ :pypi:`pytest-asyncio-network-simulator`
+ *last release*: Jul 31, 2018,
+ *status*: 3 - Alpha,
+ *requires*: pytest (<3.7.0,>=3.3.2)
+
+ pytest-asyncio-network-simulator: Plugin for pytest for simulator the network in tests
+
+ :pypi:`pytest-async-mongodb`
+ *last release*: Oct 18, 2017,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.5.2)
+
+ pytest plugin for async MongoDB
+
+ :pypi:`pytest-async-sqlalchemy`
+ *last release*: Oct 07, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0.0)
+
+ Database testing fixtures using the SQLAlchemy asyncio API
+
+ :pypi:`pytest-atomic`
+ *last release*: Nov 24, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Skip rest of tests if previous test failed.
+
+ :pypi:`pytest-attrib`
+ *last release*: May 24, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest plugin to select tests based on attributes similar to the nose-attrib plugin
+
+ :pypi:`pytest-austin`
+ *last release*: Oct 11, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Austin plugin for pytest
+
+ :pypi:`pytest-autochecklog`
+ *last release*: Apr 25, 2015,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ automatically check condition and log all the checks
+
+ :pypi:`pytest-automation`
+ *last release*: Oct 01, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ pytest plugin for building a test suite, using YAML files to extend pytest parameterize functionality.
+
+ :pypi:`pytest-automock`
+ *last release*: Apr 22, 2020,
+ *status*: N/A,
+ *requires*: pytest ; extra == 'dev'
+
+ Pytest plugin for automatical mocks creation
+
+ :pypi:`pytest-auto-parametrize`
+ *last release*: Oct 02, 2016,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin: avoid repeating arguments in parametrize
+
+ :pypi:`pytest-autotest`
+ *last release*: Aug 25, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ This fixture provides a configured "driver" for Android Automated Testing, using uiautomator2.
+
+ :pypi:`pytest-avoidance`
+ *last release*: May 23, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Makes pytest skip tests that don not need rerunning
+
+ :pypi:`pytest-aws`
+ *last release*: Oct 04, 2017,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest plugin for testing AWS resource configurations
+
+ :pypi:`pytest-aws-config`
+ *last release*: May 28, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Protect your AWS credentials in unit tests
+
+ :pypi:`pytest-axe`
+ *last release*: Nov 12, 2018,
+ *status*: N/A,
+ *requires*: pytest (>=3.0.0)
+
+ pytest plugin for axe-selenium-python
+
+ :pypi:`pytest-azurepipelines`
+ *last release*: Jul 23, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Formatting PyTest output for Azure Pipelines UI
+
+ :pypi:`pytest-bandit`
+ *last release*: Feb 23, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A bandit plugin for pytest
+
+ :pypi:`pytest-base-url`
+ *last release*: Jun 19, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.7.3)
+
+ pytest plugin for URL based testing
+
+ :pypi:`pytest-bdd`
+ *last release*: Oct 25, 2021,
+ *status*: 6 - Mature,
+ *requires*: pytest (>=4.3)
+
+ BDD for pytest
+
+ :pypi:`pytest-bdd-splinter`
+ *last release*: Aug 12, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.0.0)
+
+ Common steps for pytest bdd and splinter integration
+
+ :pypi:`pytest-bdd-web`
+ *last release*: Jan 02, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A simple plugin to use with pytest
+
+ :pypi:`pytest-bdd-wrappers`
+ *last release*: Feb 11, 2020,
+ *status*: 2 - Pre-Alpha,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-beakerlib`
+ *last release*: Mar 17, 2017,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ A pytest plugin that reports test results to the BeakerLib framework
+
+ :pypi:`pytest-beds`
+ *last release*: Jun 07, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Fixtures for testing Google Appengine (GAE) apps
+
+ :pypi:`pytest-bench`
+ *last release*: Jul 21, 2014,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Benchmark utility that plugs into pytest.
+
+ :pypi:`pytest-benchmark`
+ *last release*: Apr 17, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.8)
+
+ A \`\`pytest\`\` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer.
+
+ :pypi:`pytest-bg-process`
+ *last release*: Aug 17, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Pytest plugin to initialize background process
+
+ :pypi:`pytest-bigchaindb`
+ *last release*: Aug 17, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A BigchainDB plugin for pytest.
+
+ :pypi:`pytest-bigquery-mock`
+ *last release*: Aug 05, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=5.0)
+
+ Provides a mock fixture for python bigquery client
+
+ :pypi:`pytest-black`
+ *last release*: Oct 05, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A pytest plugin to enable format checking with black
+
+ :pypi:`pytest-black-multipy`
+ *last release*: Jan 14, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (!=3.7.3,>=3.5) ; extra == 'testing'
+
+ Allow '--black' on older Pythons
+
+ :pypi:`pytest-blame`
+ *last release*: May 04, 2019,
+ *status*: N/A,
+ *requires*: pytest (>=4.4.0)
+
+ A pytest plugin helps developers to debug by providing useful commits history.
+
+ :pypi:`pytest-blender`
+ *last release*: Oct 29, 2021,
+ *status*: N/A,
+ *requires*: pytest (==6.2.5) ; extra == 'dev'
+
+ Blender Pytest plugin.
+
+ :pypi:`pytest-blink1`
+ *last release*: Jan 07, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Pytest plugin to emit notifications via the Blink(1) RGB LED
+
+ :pypi:`pytest-blockage`
+ *last release*: Feb 13, 2019,
+ *status*: N/A,
+ *requires*: pytest
+
+ Disable network requests during a test run.
+
+ :pypi:`pytest-blocker`
+ *last release*: Sep 07, 2015,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest plugin to mark a test as blocker and skip all other tests
+
+ :pypi:`pytest-board`
+ *last release*: Jan 20, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Local continuous test runner with pytest and watchdog.
+
+ :pypi:`pytest-bpdb`
+ *last release*: Jan 19, 2015,
+ *status*: 2 - Pre-Alpha,
+ *requires*: N/A
+
+ A py.test plug-in to enable drop to bpdb debugger on test failure.
+
+ :pypi:`pytest-bravado`
+ *last release*: Jul 19, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest-bravado automatically generates from OpenAPI specification client fixtures.
+
+ :pypi:`pytest-breakword`
+ *last release*: Aug 04, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6.2.4,<7.0.0)
+
+ Use breakword with pytest
+
+ :pypi:`pytest-breed-adapter`
+ *last release*: Nov 07, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A simple plugin to connect with breed-server
+
+ :pypi:`pytest-briefcase`
+ *last release*: Jun 14, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A pytest plugin for running tests on a Briefcase project.
+
+ :pypi:`pytest-browser`
+ *last release*: Dec 10, 2016,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ A pytest plugin for console based browser test selection just after the collection phase
+
+ :pypi:`pytest-browsermob-proxy`
+ *last release*: Jun 11, 2013,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ BrowserMob proxy plugin for py.test.
+
+ :pypi:`pytest-browserstack-local`
+ *last release*: Feb 09, 2018,
+ *status*: N/A,
+ *requires*: N/A
+
+ \`\`py.test\`\` plugin to run \`\`BrowserStackLocal\`\` in background.
+
+ :pypi:`pytest-bug`
+ *last release*: Jun 02, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.6.0)
+
+ Pytest plugin for marking tests as a bug
+
+ :pypi:`pytest-bugtong-tag`
+ *last release*: Apr 23, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest-bugtong-tag is a plugin for pytest
+
+ :pypi:`pytest-bugzilla`
+ *last release*: May 05, 2010,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ py.test bugzilla integration plugin
+
+ :pypi:`pytest-bugzilla-notifier`
+ *last release*: Jun 15, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.9.2)
+
+ A plugin that allows you to execute create, update, and read information from BugZilla bugs
+
+ :pypi:`pytest-buildkite`
+ *last release*: Jul 13, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Plugin for pytest that automatically publishes coverage and pytest report annotations to Buildkite.
+
+ :pypi:`pytest-builtin-types`
+ *last release*: Nov 17, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+
+
+ :pypi:`pytest-bwrap`
+ *last release*: Oct 26, 2018,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Run your tests in Bubblewrap sandboxes
+
+ :pypi:`pytest-cache`
+ *last release*: Jun 04, 2013,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin with mechanisms for caching across test runs
+
+ :pypi:`pytest-cache-assert`
+ *last release*: Nov 03, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=5)
+
+ Cache assertion data to simplify regression testing of complex serializable data
+
+ :pypi:`pytest-cagoule`
+ *last release*: Jan 01, 2020,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Pytest plugin to only run tests affected by changes
+
+ :pypi:`pytest-camel-collect`
+ *last release*: Aug 02, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=2.9)
+
+ Enable CamelCase-aware pytest class collection
+
+ :pypi:`pytest-canonical-data`
+ *last release*: May 08, 2020,
+ *status*: 2 - Pre-Alpha,
+ *requires*: pytest (>=3.5.0)
+
+ A plugin which allows to compare results with canonical results, based on previous runs
+
+ :pypi:`pytest-caprng`
+ *last release*: May 02, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A plugin that replays pRNG state on failure.
+
+ :pypi:`pytest-capture-deprecatedwarnings`
+ *last release*: Apr 30, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest plugin to capture all deprecatedwarnings and put them in one file
+
+ :pypi:`pytest-capturelogs`
+ *last release*: Sep 11, 2021,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ A sample Python project
+
+ :pypi:`pytest-cases`
+ *last release*: Nov 08, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Separate test code from test cases in pytest.
+
+ :pypi:`pytest-cassandra`
+ *last release*: Nov 04, 2017,
+ *status*: 1 - Planning,
+ *requires*: N/A
+
+ Cassandra CCM Test Fixtures for pytest
+
+ :pypi:`pytest-catchlog`
+ *last release*: Jan 24, 2016,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.6)
+
+ py.test plugin to catch log messages. This is a fork of pytest-capturelog.
+
+ :pypi:`pytest-catch-server`
+ *last release*: Dec 12, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Pytest plugin with server for catching HTTP requests.
+
+ :pypi:`pytest-celery`
+ *last release*: May 06, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest-celery a shim pytest plugin to enable celery.contrib.pytest
+
+ :pypi:`pytest-chainmaker`
+ *last release*: Oct 15, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest plugin for chainmaker
+
+ :pypi:`pytest-chalice`
+ *last release*: Jul 01, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A set of py.test fixtures for AWS Chalice
+
+ :pypi:`pytest-change-report`
+ *last release*: Sep 14, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ turn . into √,turn F into x
+
+ :pypi:`pytest-chdir`
+ *last release*: Jan 28, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=5.0.0,<6.0.0)
+
+ A pytest fixture for changing current working directory
+
+ :pypi:`pytest-checkdocs`
+ *last release*: Jul 31, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.6) ; extra == 'testing'
+
+ check the README when running tests
+
+ :pypi:`pytest-checkipdb`
+ *last release*: Jul 22, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.9.2)
+
+ plugin to check if there are ipdb debugs left
+
+ :pypi:`pytest-check-links`
+ *last release*: Jul 29, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=4.6)
+
+ Check links in files
+
+ :pypi:`pytest-check-mk`
+ *last release*: Nov 19, 2015,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ pytest plugin to test Check_MK checks
+
+ :pypi:`pytest-circleci`
+ *last release*: May 03, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ py.test plugin for CircleCI
+
+ :pypi:`pytest-circleci-parallelized`
+ *last release*: Mar 26, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Parallelize pytest across CircleCI workers.
+
+ :pypi:`pytest-ckan`
+ *last release*: Apr 28, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Backport of CKAN 2.9 pytest plugin and fixtures to CAKN 2.8
+
+ :pypi:`pytest-clarity`
+ *last release*: Jun 11, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ A plugin providing an alternative, colourful diff output for failing assertions.
+
+ :pypi:`pytest-cldf`
+ *last release*: May 06, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Easy quality control for CLDF datasets using pytest
+
+ :pypi:`pytest-click`
+ *last release*: Aug 29, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.0)
+
+ Py.test plugin for Click
+
+ :pypi:`pytest-clld`
+ *last release*: Nov 29, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=3.6)
+
+
+
+ :pypi:`pytest-cloud`
+ *last release*: Oct 05, 2020,
+ *status*: 6 - Mature,
+ *requires*: N/A
+
+ Distributed tests planner plugin for pytest testing framework.
+
+ :pypi:`pytest-cloudflare-worker`
+ *last release*: Mar 30, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0.0)
+
+ pytest plugin for testing cloudflare workers
+
+ :pypi:`pytest-cobra`
+ *last release*: Jun 29, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (<4.0.0,>=3.7.1)
+
+ PyTest plugin for testing Smart Contracts for Ethereum blockchain.
+
+ :pypi:`pytest-codeblocks`
+ *last release*: Oct 13, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6)
+
+ Test code blocks in your READMEs
+
+ :pypi:`pytest-codecheckers`
+ *last release*: Feb 13, 2010,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest plugin to add source code sanity checks (pep8 and friends)
+
+ :pypi:`pytest-codecov`
+ *last release*: Oct 27, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.6.0)
+
+ Pytest plugin for uploading pytest-cov results to codecov.io
+
+ :pypi:`pytest-codegen`
+ *last release*: Aug 23, 2020,
+ *status*: 2 - Pre-Alpha,
+ *requires*: N/A
+
+ Automatically create pytest test signatures
+
+ :pypi:`pytest-codestyle`
+ *last release*: Mar 23, 2020,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin to run pycodestyle
+
+ :pypi:`pytest-collect-formatter`
+ *last release*: Mar 29, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Formatter for pytest collect output
+
+ :pypi:`pytest-collect-formatter2`
+ *last release*: May 31, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Formatter for pytest collect output
+
+ :pypi:`pytest-colordots`
+ *last release*: Oct 06, 2017,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Colorizes the progress indicators
+
+ :pypi:`pytest-commander`
+ *last release*: Aug 17, 2021,
+ *status*: N/A,
+ *requires*: pytest (<7.0.0,>=6.2.4)
+
+ An interactive GUI test runner for PyTest
+
+ :pypi:`pytest-common-subject`
+ *last release*: Nov 12, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=3.6,<7)
+
+ pytest framework for testing different aspects of a common method
+
+ :pypi:`pytest-concurrent`
+ *last release*: Jan 12, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.1.1)
+
+ Concurrently execute test cases with multithread, multiprocess and gevent
+
+ :pypi:`pytest-config`
+ *last release*: Nov 07, 2014,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Base configurations and utilities for developing your Python project test suite with pytest.
+
+ :pypi:`pytest-confluence-report`
+ *last release*: Nov 06, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Package stands for pytest plugin to upload results into Confluence page.
+
+ :pypi:`pytest-console-scripts`
+ *last release*: Sep 28, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Pytest plugin for testing console scripts
+
+ :pypi:`pytest-consul`
+ *last release*: Nov 24, 2018,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ pytest plugin with fixtures for testing consul aware apps
+
+ :pypi:`pytest-container`
+ *last release*: Nov 19, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.10)
+
+ Pytest fixtures for writing container based tests
+
+ :pypi:`pytest-contextfixture`
+ *last release*: Mar 12, 2013,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Define pytest fixtures as context managers.
+
+ :pypi:`pytest-contexts`
+ *last release*: May 19, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A plugin to run tests written with the Contexts framework using pytest
+
+ :pypi:`pytest-cookies`
+ *last release*: May 24, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.3.0)
+
+ The pytest plugin for your Cookiecutter templates. ðŸª
+
+ :pypi:`pytest-couchdbkit`
+ *last release*: Apr 17, 2012,
+ *status*: N/A,
+ *requires*: N/A
+
+ py.test extension for per-test couchdb databases using couchdbkit
+
+ :pypi:`pytest-count`
+ *last release*: Jan 12, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ count erros and send email
+
+ :pypi:`pytest-cov`
+ *last release*: Oct 04, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.6)
+
+ Pytest plugin for measuring coverage.
+
+ :pypi:`pytest-cover`
+ *last release*: Aug 01, 2015,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Pytest plugin for measuring coverage. Forked from \`pytest-cov\`.
+
+ :pypi:`pytest-coverage`
+ *last release*: Jun 17, 2015,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-coverage-context`
+ *last release*: Jan 04, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.1.0)
+
+ Coverage dynamic context support for PyTest, including sub-processes
+
+ :pypi:`pytest-cov-exclude`
+ *last release*: Apr 29, 2016,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.8.0,<2.9.0); extra == 'dev'
+
+ Pytest plugin for excluding tests based on coverage data
+
+ :pypi:`pytest-cpp`
+ *last release*: Dec 03, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (!=5.4.0,!=5.4.1)
+
+ Use pytest's runner to discover and execute C++ tests
+
+ :pypi:`pytest-cram`
+ *last release*: Aug 08, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Run cram tests with pytest.
+
+ :pypi:`pytest-crate`
+ *last release*: May 28, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=4.0)
+
+ Manages CrateDB instances during your integration tests
+
+ :pypi:`pytest-cricri`
+ *last release*: Jan 27, 2018,
+ *status*: N/A,
+ *requires*: pytest
+
+ A Cricri plugin for pytest.
+
+ :pypi:`pytest-crontab`
+ *last release*: Dec 09, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ add crontab task in crontab
+
+ :pypi:`pytest-csv`
+ *last release*: Apr 22, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6.0)
+
+ CSV output for pytest.
+
+ :pypi:`pytest-curio`
+ *last release*: Oct 07, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest support for curio.
+
+ :pypi:`pytest-curl-report`
+ *last release*: Dec 11, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest plugin to generate curl command line report
+
+ :pypi:`pytest-custom-concurrency`
+ *last release*: Feb 08, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Custom grouping concurrence for pytest
+
+ :pypi:`pytest-custom-exit-code`
+ *last release*: Aug 07, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.0.2)
+
+ Exit pytest test session with custom exit code in different scenarios
+
+ :pypi:`pytest-custom-nodeid`
+ *last release*: Mar 07, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Custom grouping for pytest-xdist, rename test cases name and test cases nodeid, support allure report
+
+ :pypi:`pytest-custom-report`
+ *last release*: Jan 30, 2019,
+ *status*: N/A,
+ *requires*: pytest
+
+ Configure the symbols displayed for test outcomes
+
+ :pypi:`pytest-custom-scheduling`
+ *last release*: Mar 01, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Custom grouping for pytest-xdist, rename test cases name and test cases nodeid, support allure report
+
+ :pypi:`pytest-cython`
+ *last release*: Jan 26, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.7.3)
+
+ A plugin for testing Cython extension modules
+
+ :pypi:`pytest-darker`
+ *last release*: Aug 16, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=6.0.1) ; extra == 'test'
+
+ A pytest plugin for checking of modified code using Darker
+
+ :pypi:`pytest-dash`
+ *last release*: Mar 18, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest fixtures to run dash applications.
+
+ :pypi:`pytest-data`
+ *last release*: Nov 01, 2016,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Useful functions for managing data for pytest fixtures
+
+ :pypi:`pytest-databricks`
+ *last release*: Jul 29, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Pytest plugin for remote Databricks notebooks testing
+
+ :pypi:`pytest-datadir`
+ *last release*: Oct 22, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.7.0)
+
+ pytest plugin for test data directories and files
+
+ :pypi:`pytest-datadir-mgr`
+ *last release*: Aug 16, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Manager for test data providing downloads, caching of generated files, and a context for temp directories.
+
+ :pypi:`pytest-datadir-ng`
+ *last release*: Dec 25, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Fixtures for pytest allowing test functions/methods to easily retrieve test resources from the local filesystem.
+
+ :pypi:`pytest-data-file`
+ *last release*: Dec 04, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Fixture "data" and "case_data" for test from yaml file
+
+ :pypi:`pytest-datafiles`
+ *last release*: Oct 07, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.6)
+
+ py.test plugin to create a 'tmpdir' containing predefined files/directories.
+
+ :pypi:`pytest-datafixtures`
+ *last release*: Dec 05, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Data fixtures for pytest made simple
+
+ :pypi:`pytest-data-from-files`
+ *last release*: Oct 13, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ pytest plugin to provide data from files loaded automatically
+
+ :pypi:`pytest-dataplugin`
+ *last release*: Sep 16, 2017,
+ *status*: 1 - Planning,
+ *requires*: N/A
+
+ A pytest plugin for managing an archive of test data.
+
+ :pypi:`pytest-datarecorder`
+ *last release*: Apr 20, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ A py.test plugin recording and comparing test output.
+
+ :pypi:`pytest-datatest`
+ *last release*: Oct 15, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.3)
+
+ A pytest plugin for test driven data-wrangling (this is the development version of datatest's pytest integration).
+
+ :pypi:`pytest-db`
+ *last release*: Dec 04, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Session scope fixture "db" for mysql query or change
+
+ :pypi:`pytest-dbfixtures`
+ *last release*: Dec 07, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Databases fixtures plugin for py.test.
+
+ :pypi:`pytest-db-plugin`
+ *last release*: Nov 27, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=5.0)
+
+
+
+ :pypi:`pytest-dbt-adapter`
+ *last release*: Nov 24, 2021,
+ *status*: N/A,
+ *requires*: pytest (<7,>=6)
+
+ A pytest plugin for testing dbt adapter plugins
+
+ :pypi:`pytest-dbus-notification`
+ *last release*: Mar 05, 2014,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ D-BUS notifications for pytest results.
+
+ :pypi:`pytest-deadfixtures`
+ *last release*: Jul 23, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ A simple plugin to list unused fixtures in pytest
+
+ :pypi:`pytest-deepcov`
+ *last release*: Mar 30, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ deepcov
+
+ :pypi:`pytest-defer`
+ *last release*: Aug 24, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-demo-plugin`
+ *last release*: May 15, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest示例æ’件
+
+ :pypi:`pytest-dependency`
+ *last release*: Feb 14, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Manage dependencies of tests
+
+ :pypi:`pytest-depends`
+ *last release*: Apr 05, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3)
+
+ Tests that depend on other tests
+
+ :pypi:`pytest-deprecate`
+ *last release*: Jul 01, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Mark tests as testing a deprecated feature with a warning note.
+
+ :pypi:`pytest-describe`
+ *last release*: Nov 13, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.0.0)
+
+ Describe-style plugin for pytest
+
+ :pypi:`pytest-describe-it`
+ *last release*: Jul 19, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ plugin for rich text descriptions
+
+ :pypi:`pytest-devpi-server`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ DevPI server fixture for py.test
+
+ :pypi:`pytest-diamond`
+ *last release*: Aug 31, 2015,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest plugin for diamond
+
+ :pypi:`pytest-dicom`
+ *last release*: Dec 19, 2018,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ pytest plugin to provide DICOM fixtures
+
+ :pypi:`pytest-dictsdiff`
+ *last release*: Jul 26, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-diff`
+ *last release*: Mar 30, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A simple plugin to use with pytest
+
+ :pypi:`pytest-disable`
+ *last release*: Sep 10, 2015,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest plugin to disable a test and skip it from testrun
+
+ :pypi:`pytest-disable-plugin`
+ *last release*: Feb 28, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Disable plugins per test
+
+ :pypi:`pytest-discord`
+ *last release*: Mar 20, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (!=6.0.0,<7,>=3.3.2)
+
+ A pytest plugin to notify test results to a Discord channel.
+
+ :pypi:`pytest-django`
+ *last release*: Dec 02, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.4.0)
+
+ A Django plugin for pytest.
+
+ :pypi:`pytest-django-ahead`
+ *last release*: Oct 27, 2016,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.9)
+
+ A Django plugin for pytest.
+
+ :pypi:`pytest-djangoapp`
+ *last release*: Aug 04, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Nice pytest plugin to help you with Django pluggable application testing.
+
+ :pypi:`pytest-django-cache-xdist`
+ *last release*: May 12, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A djangocachexdist plugin for pytest
+
+ :pypi:`pytest-django-casperjs`
+ *last release*: Mar 15, 2015,
+ *status*: 2 - Pre-Alpha,
+ *requires*: N/A
+
+ Integrate CasperJS with your django tests as a pytest fixture.
+
+ :pypi:`pytest-django-dotenv`
+ *last release*: Nov 26, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.6.0)
+
+ Pytest plugin used to setup environment variables with django-dotenv
+
+ :pypi:`pytest-django-factories`
+ *last release*: Nov 12, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Factories for your Django models that can be used as Pytest fixtures.
+
+ :pypi:`pytest-django-gcir`
+ *last release*: Mar 06, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ A Django plugin for pytest.
+
+ :pypi:`pytest-django-haystack`
+ *last release*: Sep 03, 2017,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.3.4)
+
+ Cleanup your Haystack indexes between tests
+
+ :pypi:`pytest-django-ifactory`
+ *last release*: Jan 13, 2021,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ A model instance factory for pytest-django
+
+ :pypi:`pytest-django-lite`
+ *last release*: Jan 30, 2014,
+ *status*: N/A,
+ *requires*: N/A
+
+ The bare minimum to integrate py.test with Django.
+
+ :pypi:`pytest-django-liveserver-ssl`
+ *last release*: Jul 30, 2021,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-django-model`
+ *last release*: Feb 14, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A Simple Way to Test your Django Models
+
+ :pypi:`pytest-django-ordering`
+ *last release*: Jul 25, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.3.0)
+
+ A pytest plugin for preserving the order in which Django runs tests.
+
+ :pypi:`pytest-django-queries`
+ *last release*: Mar 01, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Generate performance reports from your django database performance tests.
+
+ :pypi:`pytest-djangorestframework`
+ *last release*: Aug 11, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A djangorestframework plugin for pytest
+
+ :pypi:`pytest-django-rq`
+ *last release*: Apr 13, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A pytest plugin to help writing unit test for django-rq
+
+ :pypi:`pytest-django-sqlcounts`
+ *last release*: Jun 16, 2015,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ py.test plugin for reporting the number of SQLs executed per django testcase.
+
+ :pypi:`pytest-django-testing-postgresql`
+ *last release*: Dec 05, 2019,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Use a temporary PostgreSQL database with pytest-django
+
+ :pypi:`pytest-doc`
+ *last release*: Jun 28, 2015,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ A documentation plugin for py.test.
+
+ :pypi:`pytest-docgen`
+ *last release*: Apr 17, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ An RST Documentation Generator for pytest-based test suites
+
+ :pypi:`pytest-docker`
+ *last release*: Jun 14, 2021,
+ *status*: N/A,
+ *requires*: pytest (<7.0,>=4.0)
+
+ Simple pytest fixtures for Docker and docker-compose based tests
+
+ :pypi:`pytest-docker-butla`
+ *last release*: Jun 16, 2019,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-dockerc`
+ *last release*: Oct 09, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.0)
+
+ Run, manage and stop Docker Compose project from Docker API
+
+ :pypi:`pytest-docker-compose`
+ *last release*: Jan 26, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.3)
+
+ Manages Docker containers during your integration tests
+
+ :pypi:`pytest-docker-db`
+ *last release*: Mar 20, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.1.1)
+
+ A plugin to use docker databases for pytests
+
+ :pypi:`pytest-docker-fixtures`
+ *last release*: Nov 23, 2021,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest docker fixtures
+
+ :pypi:`pytest-docker-git-fixtures`
+ *last release*: Mar 11, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Pytest fixtures for testing with git scm.
+
+ :pypi:`pytest-docker-pexpect`
+ *last release*: Jan 14, 2019,
+ *status*: N/A,
+ *requires*: pytest
+
+ pytest plugin for writing functional tests with pexpect and docker
+
+ :pypi:`pytest-docker-postgresql`
+ *last release*: Sep 24, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A simple plugin to use with pytest
+
+ :pypi:`pytest-docker-py`
+ *last release*: Nov 27, 2018,
+ *status*: N/A,
+ *requires*: pytest (==4.0.0)
+
+ Easy to use, simple to extend, pytest plugin that minimally leverages docker-py.
+
+ :pypi:`pytest-docker-registry-fixtures`
+ *last release*: Mar 04, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Pytest fixtures for testing with docker registries.
+
+ :pypi:`pytest-docker-tools`
+ *last release*: Jul 23, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0.1,<7.0.0)
+
+ Docker integration tests for pytest
+
+ :pypi:`pytest-docs`
+ *last release*: Nov 11, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Documentation tool for pytest
+
+ :pypi:`pytest-docstyle`
+ *last release*: Mar 23, 2020,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin to run pydocstyle
+
+ :pypi:`pytest-doctest-custom`
+ *last release*: Jul 25, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A py.test plugin for customizing string representations of doctest results.
+
+ :pypi:`pytest-doctest-ellipsis-markers`
+ *last release*: Jan 12, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Setup additional values for ELLIPSIS_MARKER for doctests
+
+ :pypi:`pytest-doctest-import`
+ *last release*: Nov 13, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.3.0)
+
+ A simple pytest plugin to import names and add them to the doctest namespace.
+
+ :pypi:`pytest-doctestplus`
+ *last release*: Nov 16, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=4.6)
+
+ Pytest plugin with advanced doctest features.
+
+ :pypi:`pytest-doctest-ufunc`
+ *last release*: Aug 02, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A plugin to run doctests in docstrings of Numpy ufuncs
+
+ :pypi:`pytest-dolphin`
+ *last release*: Nov 30, 2016,
+ *status*: 4 - Beta,
+ *requires*: pytest (==3.0.4)
+
+ Some extra stuff that we use ininternally
+
+ :pypi:`pytest-doorstop`
+ *last release*: Jun 09, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A pytest plugin for adding test results into doorstop items.
+
+ :pypi:`pytest-dotenv`
+ *last release*: Jun 16, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=5.0.0)
+
+ A py.test plugin that parses environment files before running tests
+
+ :pypi:`pytest-drf`
+ *last release*: Nov 12, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.6)
+
+ A Django REST framework plugin for pytest.
+
+ :pypi:`pytest-drivings`
+ *last release*: Jan 13, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Tool to allow webdriver automation to be ran locally or remotely
+
+ :pypi:`pytest-drop-dup-tests`
+ *last release*: May 23, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.7)
+
+ A Pytest plugin to drop duplicated tests during collection
+
+ :pypi:`pytest-dummynet`
+ *last release*: Oct 13, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ A py.test plugin providing access to a dummynet.
+
+ :pypi:`pytest-dump2json`
+ *last release*: Jun 29, 2015,
+ *status*: N/A,
+ *requires*: N/A
+
+ A pytest plugin for dumping test results to json.
+
+ :pypi:`pytest-duration-insights`
+ *last release*: Jun 25, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-dynamicrerun`
+ *last release*: Aug 15, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A pytest plugin to rerun tests dynamically based off of test outcome and output.
+
+ :pypi:`pytest-dynamodb`
+ *last release*: Jun 03, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ DynamoDB fixtures for pytest
+
+ :pypi:`pytest-easy-addoption`
+ *last release*: Jan 22, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest-easy-addoption: Easy way to work with pytest addoption
+
+ :pypi:`pytest-easy-api`
+ *last release*: Mar 26, 2018,
+ *status*: N/A,
+ *requires*: N/A
+
+ Simple API testing with pytest
+
+ :pypi:`pytest-easyMPI`
+ *last release*: Oct 21, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Package that supports mpi tests in pytest
+
+ :pypi:`pytest-easyread`
+ *last release*: Nov 17, 2017,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest plugin that makes terminal printouts of the reports easier to read
+
+ :pypi:`pytest-easy-server`
+ *last release*: May 01, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (<5.0.0,>=4.3.1) ; python_version < "3.5"
+
+ Pytest plugin for easy testing against servers
+
+ :pypi:`pytest-ec2`
+ *last release*: Oct 22, 2019,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Pytest execution on EC2 instance
+
+ :pypi:`pytest-echo`
+ *last release*: Jan 08, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ pytest plugin with mechanisms for echoing environment variables, package version and generic attributes
+
+ :pypi:`pytest-elasticsearch`
+ *last release*: May 12, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.0.0)
+
+ Elasticsearch fixtures and fixture factories for Pytest.
+
+ :pypi:`pytest-elements`
+ *last release*: Jan 13, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=5.4,<6.0)
+
+ Tool to help automate user interfaces
+
+ :pypi:`pytest-elk-reporter`
+ *last release*: Jan 24, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A simple plugin to use with pytest
+
+ :pypi:`pytest-email`
+ *last release*: Jul 08, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Send execution result email
+
+ :pypi:`pytest-embedded`
+ *last release*: Nov 29, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6.2.0)
+
+ pytest embedded plugin
+
+ :pypi:`pytest-embedded-idf`
+ *last release*: Nov 29, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest embedded plugin for esp-idf project
+
+ :pypi:`pytest-embedded-jtag`
+ *last release*: Nov 29, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest embedded plugin for testing with jtag
+
+ :pypi:`pytest-embedded-qemu`
+ *last release*: Nov 29, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest embedded plugin for qemu, not target chip
+
+ :pypi:`pytest-embedded-qemu-idf`
+ *last release*: Jun 29, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest embedded plugin for esp-idf project by qemu, not target chip
+
+ :pypi:`pytest-embedded-serial`
+ *last release*: Nov 29, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest embedded plugin for testing serial ports
+
+ :pypi:`pytest-embedded-serial-esp`
+ *last release*: Nov 29, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest embedded plugin for testing espressif boards via serial ports
+
+ :pypi:`pytest-emoji`
+ *last release*: Feb 19, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.2.1)
+
+ A pytest plugin that adds emojis to your test result report
+
+ :pypi:`pytest-emoji-output`
+ *last release*: Oct 10, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (==6.0.1)
+
+ Pytest plugin to represent test output with emoji support
+
+ :pypi:`pytest-enabler`
+ *last release*: Nov 08, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=6) ; extra == 'testing'
+
+ Enable installed pytest plugins
+
+ :pypi:`pytest-encode`
+ *last release*: Nov 06, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ set your encoding and logger
+
+ :pypi:`pytest-encode-kane`
+ *last release*: Nov 16, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ set your encoding and logger
+
+ :pypi:`pytest-enhancements`
+ *last release*: Oct 30, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Improvements for pytest (rejected upstream)
+
+ :pypi:`pytest-env`
+ *last release*: Jun 16, 2017,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ py.test plugin that allows you to add environment variables.
+
+ :pypi:`pytest-envfiles`
+ *last release*: Oct 08, 2015,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ A py.test plugin that parses environment files before running tests
+
+ :pypi:`pytest-env-info`
+ *last release*: Nov 25, 2017,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.1.1)
+
+ Push information about the running pytest into envvars
+
+ :pypi:`pytest-envraw`
+ *last release*: Aug 27, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.6.0)
+
+ py.test plugin that allows you to add environment variables.
+
+ :pypi:`pytest-envvars`
+ *last release*: Jun 13, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.0.0)
+
+ Pytest plugin to validate use of envvars on your tests
+
+ :pypi:`pytest-env-yaml`
+ *last release*: Apr 02, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-eradicate`
+ *last release*: Sep 08, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=2.4.2)
+
+ pytest plugin to check for commented out code
+
+ :pypi:`pytest-error-for-skips`
+ *last release*: Dec 19, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.6)
+
+ Pytest plugin to treat skipped tests a test failure
+
+ :pypi:`pytest-eth`
+ *last release*: Aug 14, 2020,
+ *status*: 1 - Planning,
+ *requires*: N/A
+
+ PyTest plugin for testing Smart Contracts for Ethereum Virtual Machine (EVM).
+
+ :pypi:`pytest-ethereum`
+ *last release*: Jun 24, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (==3.3.2); extra == 'dev'
+
+ pytest-ethereum: Pytest library for ethereum projects.
+
+ :pypi:`pytest-eucalyptus`
+ *last release*: Aug 13, 2019,
+ *status*: N/A,
+ *requires*: pytest (>=4.2.0)
+
+ Pytest Plugin for BDD
+
+ :pypi:`pytest-eventlet`
+ *last release*: Oct 04, 2021,
+ *status*: N/A,
+ *requires*: pytest ; extra == 'dev'
+
+ Applies eventlet monkey-patch as a pytest plugin.
+
+ :pypi:`pytest-excel`
+ *last release*: Oct 06, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ pytest plugin for generating excel reports
+
+ :pypi:`pytest-exceptional`
+ *last release*: Mar 16, 2017,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Better exceptions
+
+ :pypi:`pytest-exception-script`
+ *last release*: Aug 04, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ Walk your code through exception script to check it's resiliency to failures.
+
+ :pypi:`pytest-executable`
+ *last release*: Nov 10, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (<6.3,>=4.3)
+
+ pytest plugin for testing executables
+
+ :pypi:`pytest-expect`
+ *last release*: Apr 21, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ py.test plugin to store test expectations and mark tests based on them
+
+ :pypi:`pytest-expecter`
+ *last release*: Jul 08, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Better testing with expecter and pytest.
+
+ :pypi:`pytest-expectr`
+ *last release*: Oct 05, 2018,
+ *status*: N/A,
+ *requires*: pytest (>=2.4.2)
+
+ This plugin is used to expect multiple assert using pytest framework.
+
+ :pypi:`pytest-explicit`
+ *last release*: Jun 15, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ A Pytest plugin to ignore certain marked tests by default
+
+ :pypi:`pytest-exploratory`
+ *last release*: Aug 03, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=5.3)
+
+ Interactive console for pytest.
+
+ :pypi:`pytest-external-blockers`
+ *last release*: Oct 05, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ a special outcome for tests that are blocked for external reasons
+
+ :pypi:`pytest-extra-durations`
+ *last release*: Apr 21, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A pytest plugin to get durations on a per-function basis and per module basis.
+
+ :pypi:`pytest-fabric`
+ *last release*: Sep 12, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Provides test utilities to run fabric task tests by using docker containers
+
+ :pypi:`pytest-factory`
+ *last release*: Sep 06, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>4.3)
+
+ Use factories for test setup with py.test
+
+ :pypi:`pytest-factoryboy`
+ *last release*: Dec 30, 2020,
+ *status*: 6 - Mature,
+ *requires*: pytest (>=4.6)
+
+ Factory Boy support for pytest.
+
+ :pypi:`pytest-factoryboy-fixtures`
+ *last release*: Jun 25, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Generates pytest fixtures that allow the use of type hinting
+
+ :pypi:`pytest-factoryboy-state`
+ *last release*: Dec 11, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=5.0)
+
+ Simple factoryboy random state management
+
+ :pypi:`pytest-failed-screenshot`
+ *last release*: Apr 21, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Test case fails,take a screenshot,save it,attach it to the allure
+
+ :pypi:`pytest-failed-to-verify`
+ *last release*: Aug 08, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.1.0)
+
+ A pytest plugin that helps better distinguishing real test failures from setup flakiness.
+
+ :pypi:`pytest-faker`
+ *last release*: Dec 19, 2016,
+ *status*: 6 - Mature,
+ *requires*: N/A
+
+ Faker integration with the pytest framework.
+
+ :pypi:`pytest-falcon`
+ *last release*: Sep 07, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Pytest helpers for Falcon.
+
+ :pypi:`pytest-falcon-client`
+ *last release*: Mar 19, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest \`client\` fixture for the Falcon Framework
+
+ :pypi:`pytest-fantasy`
+ *last release*: Mar 14, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest plugin for Flask Fantasy Framework
+
+ :pypi:`pytest-fastapi`
+ *last release*: Dec 27, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-fastest`
+ *last release*: Mar 05, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Use SCM and coverage to run only needed tests
+
+ :pypi:`pytest-fast-first`
+ *last release*: Apr 02, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ Pytest plugin that runs fast tests first
+
+ :pypi:`pytest-faulthandler`
+ *last release*: Jul 04, 2019,
+ *status*: 6 - Mature,
+ *requires*: pytest (>=5.0)
+
+ py.test plugin that activates the fault handler module for tests (dummy package)
+
+ :pypi:`pytest-fauxfactory`
+ *last release*: Dec 06, 2017,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.2)
+
+ Integration of fauxfactory into pytest.
+
+ :pypi:`pytest-figleaf`
+ *last release*: Jan 18, 2010,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ py.test figleaf coverage plugin
+
+ :pypi:`pytest-filecov`
+ *last release*: Jun 27, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ A pytest plugin to detect unused files
+
+ :pypi:`pytest-filedata`
+ *last release*: Jan 17, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ easily load data from files
+
+ :pypi:`pytest-filemarker`
+ *last release*: Dec 01, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ A pytest plugin that runs marked tests when files change.
+
+ :pypi:`pytest-filter-case`
+ *last release*: Nov 05, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ run test cases filter by mark
+
+ :pypi:`pytest-filter-subpackage`
+ *last release*: Jan 09, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.0)
+
+ Pytest plugin for filtering based on sub-packages
+
+ :pypi:`pytest-find-dependencies`
+ *last release*: Apr 21, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A pytest plugin to find dependencies between tests
+
+ :pypi:`pytest-finer-verdicts`
+ *last release*: Jun 18, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=5.4.3)
+
+ A pytest plugin to treat non-assertion failures as test errors.
+
+ :pypi:`pytest-firefox`
+ *last release*: Aug 08, 2017,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.0.2)
+
+ pytest plugin to manipulate firefox
+
+ :pypi:`pytest-fixture-config`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Fixture configuration utils for py.test
+
+ :pypi:`pytest-fixture-maker`
+ *last release*: Sep 21, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest plugin to load fixtures from YAML files
+
+ :pypi:`pytest-fixture-marker`
+ *last release*: Oct 11, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ A pytest plugin to add markers based on fixtures used.
+
+ :pypi:`pytest-fixture-order`
+ *last release*: Aug 25, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=3.0)
+
+ pytest plugin to control fixture evaluation order
+
+ :pypi:`pytest-fixtures`
+ *last release*: May 01, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Common fixtures for pytest
+
+ :pypi:`pytest-fixture-tools`
+ *last release*: Aug 18, 2020,
+ *status*: 6 - Mature,
+ *requires*: pytest
+
+ Plugin for pytest which provides tools for fixtures
+
+ :pypi:`pytest-fixture-typecheck`
+ *last release*: Aug 24, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ A pytest plugin to assert type annotations at runtime.
+
+ :pypi:`pytest-flake8`
+ *last release*: Dec 16, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5)
+
+ pytest plugin to check FLAKE8 requirements
+
+ :pypi:`pytest-flake8-path`
+ *last release*: Aug 11, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ A pytest fixture for testing flake8 plugins.
+
+ :pypi:`pytest-flakefinder`
+ *last release*: Jul 28, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.7.1)
+
+ Runs tests multiple times to expose flakiness.
+
+ :pypi:`pytest-flakes`
+ *last release*: Dec 02, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5)
+
+ pytest plugin to check source code with pyflakes
+
+ :pypi:`pytest-flaptastic`
+ *last release*: Mar 17, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Flaptastic py.test plugin
+
+ :pypi:`pytest-flask`
+ *last release*: Feb 27, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.2)
+
+ A set of py.test fixtures to test Flask applications.
+
+ :pypi:`pytest-flask-sqlalchemy`
+ *last release*: Apr 04, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.2.1)
+
+ A pytest plugin for preserving test isolation in Flask-SQlAlchemy using database transactions.
+
+ :pypi:`pytest-flask-sqlalchemy-transactions`
+ *last release*: Aug 02, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.2.1)
+
+ Run tests in transactions using pytest, Flask, and SQLalchemy.
+
+ :pypi:`pytest-flyte`
+ *last release*: May 03, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ Pytest fixtures for simplifying Flyte integration testing
+
+ :pypi:`pytest-focus`
+ *last release*: May 04, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ A pytest plugin that alerts user of failed test cases with screen notifications
+
+ :pypi:`pytest-forcefail`
+ *last release*: May 15, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ py.test plugin to make the test failing regardless of pytest.mark.xfail
+
+ :pypi:`pytest-forward-compatability`
+ *last release*: Sep 06, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ A name to avoid typosquating pytest-foward-compatibility
+
+ :pypi:`pytest-forward-compatibility`
+ *last release*: Sep 29, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ A pytest plugin to shim pytest commandline options for fowards compatibility
+
+ :pypi:`pytest-freezegun`
+ *last release*: Jul 19, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.0.0)
+
+ Wrap tests with fixtures in freeze_time
+
+ :pypi:`pytest-freeze-reqs`
+ *last release*: Apr 29, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Check if requirement files are frozen
+
+ :pypi:`pytest-frozen-uuids`
+ *last release*: Oct 19, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=3.0)
+
+ Deterministically frozen UUID's for your tests
+
+ :pypi:`pytest-func-cov`
+ *last release*: Apr 15, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=5)
+
+ Pytest plugin for measuring function coverage
+
+ :pypi:`pytest-funparam`
+ *last release*: Dec 02, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest >=4.6.0
+
+ An alternative way to parametrize test cases.
+
+ :pypi:`pytest-fxa`
+ *last release*: Aug 28, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ pytest plugin for Firefox Accounts
+
+ :pypi:`pytest-fxtest`
+ *last release*: Oct 27, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-gc`
+ *last release*: Feb 01, 2018,
+ *status*: N/A,
+ *requires*: N/A
+
+ The garbage collector plugin for py.test
+
+ :pypi:`pytest-gcov`
+ *last release*: Feb 01, 2018,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Uses gcov to measure test coverage of a C library
+
+ :pypi:`pytest-gevent`
+ *last release*: Feb 25, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Ensure that gevent is properly patched when invoking pytest
+
+ :pypi:`pytest-gherkin`
+ *last release*: Jul 27, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=5.0.0)
+
+ A flexible framework for executing BDD gherkin tests
+
+ :pypi:`pytest-ghostinspector`
+ *last release*: May 17, 2016,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ For finding/executing Ghost Inspector tests
+
+ :pypi:`pytest-girder`
+ *last release*: Nov 30, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ A set of pytest fixtures for testing Girder applications.
+
+ :pypi:`pytest-git`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Git repository fixture for py.test
+
+ :pypi:`pytest-gitcov`
+ *last release*: Jan 11, 2020,
+ *status*: 2 - Pre-Alpha,
+ *requires*: N/A
+
+ Pytest plugin for reporting on coverage of the last git commit.
+
+ :pypi:`pytest-git-fixtures`
+ *last release*: Mar 11, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Pytest fixtures for testing with git.
+
+ :pypi:`pytest-github`
+ *last release*: Mar 07, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Plugin for py.test that associates tests with github issues using a marker.
+
+ :pypi:`pytest-github-actions-annotate-failures`
+ *last release*: Oct 24, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=4.0.0)
+
+ pytest plugin to annotate failed tests with a workflow command for GitHub Actions
+
+ :pypi:`pytest-gitignore`
+ *last release*: Jul 17, 2015,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ py.test plugin to ignore the same files as git
+
+ :pypi:`pytest-glamor-allure`
+ *last release*: Nov 26, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Extends allure-pytest functionality
+
+ :pypi:`pytest-gnupg-fixtures`
+ *last release*: Mar 04, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Pytest fixtures for testing with gnupg.
+
+ :pypi:`pytest-golden`
+ *last release*: Nov 23, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=6.1.2,<7.0.0)
+
+ Plugin for pytest that offloads expected outputs to data files
+
+ :pypi:`pytest-graphql-schema`
+ *last release*: Oct 18, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Get graphql schema as fixture for pytest
+
+ :pypi:`pytest-greendots`
+ *last release*: Feb 08, 2014,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Green progress dots
+
+ :pypi:`pytest-growl`
+ *last release*: Jan 13, 2014,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Growl notifications for pytest results.
+
+ :pypi:`pytest-grpc`
+ *last release*: May 01, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=3.6.0)
+
+ pytest plugin for grpc
+
+ :pypi:`pytest-hammertime`
+ *last release*: Jul 28, 2018,
+ *status*: N/A,
+ *requires*: pytest
+
+ Display "🔨 " instead of "." for passed pytest tests.
+
+ :pypi:`pytest-harvest`
+ *last release*: Apr 01, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Store data created during your pytest tests execution, and retrieve it at the end of the session, e.g. for applicative benchmarking purposes.
+
+ :pypi:`pytest-helm-chart`
+ *last release*: Jun 15, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=5.4.2,<6.0.0)
+
+ A plugin to provide different types and configs of Kubernetes clusters that can be used for testing.
+
+ :pypi:`pytest-helm-charts`
+ *last release*: Oct 26, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.1.2,<7.0.0)
+
+ A plugin to provide different types and configs of Kubernetes clusters that can be used for testing.
+
+ :pypi:`pytest-helper`
+ *last release*: May 31, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Functions to help in using the pytest testing framework
+
+ :pypi:`pytest-helpers`
+ *last release*: May 17, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ pytest helpers
+
+ :pypi:`pytest-helpers-namespace`
+ *last release*: Apr 29, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=6.0.0)
+
+ Pytest Helpers Namespace Plugin
+
+ :pypi:`pytest-hidecaptured`
+ *last release*: May 04, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.8.5)
+
+ Hide captured output
+
+ :pypi:`pytest-historic`
+ *last release*: Apr 08, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Custom report to display pytest historical execution records
+
+ :pypi:`pytest-historic-hook`
+ *last release*: Apr 08, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Custom listener to store execution results into MYSQL DB, which is used for pytest-historic report
+
+ :pypi:`pytest-homeassistant`
+ *last release*: Aug 12, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A pytest plugin for use with homeassistant custom components.
+
+ :pypi:`pytest-homeassistant-custom-component`
+ *last release*: Nov 20, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (==6.2.5)
+
+ Experimental package to automatically extract test plugins for Home Assistant custom components
+
+ :pypi:`pytest-honors`
+ *last release*: Mar 06, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Report on tests that honor constraints, and guard against regressions
+
+ :pypi:`pytest-hoverfly`
+ *last release*: Jul 12, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=5.0)
+
+ Simplify working with Hoverfly from pytest
+
+ :pypi:`pytest-hoverfly-wrapper`
+ *last release*: Aug 29, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Integrates the Hoverfly HTTP proxy into Pytest
+
+ :pypi:`pytest-hpfeeds`
+ *last release*: Aug 27, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.2.4,<7.0.0)
+
+ Helpers for testing hpfeeds in your python project
+
+ :pypi:`pytest-html`
+ *last release*: Dec 13, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (!=6.0.0,>=5.0)
+
+ pytest plugin for generating HTML reports
+
+ :pypi:`pytest-html-lee`
+ *last release*: Jun 30, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.0)
+
+ optimized pytest plugin for generating HTML reports
+
+ :pypi:`pytest-html-profiling`
+ *last release*: Feb 11, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.0)
+
+ Pytest plugin for generating HTML reports with per-test profiling and optionally call graph visualizations. Based on pytest-html by Dave Hunt.
+
+ :pypi:`pytest-html-reporter`
+ *last release*: Apr 25, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Generates a static html report based on pytest framework
+
+ :pypi:`pytest-html-thread`
+ *last release*: Dec 29, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ pytest plugin for generating HTML reports
+
+ :pypi:`pytest-http`
+ *last release*: Dec 05, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Fixture "http" for http requests
+
+ :pypi:`pytest-httpbin`
+ *last release*: Feb 11, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Easily test your HTTP library against a local copy of httpbin
+
+ :pypi:`pytest-http-mocker`
+ *last release*: Oct 20, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest plugin for http mocking (via https://github.com/vilus/mocker)
+
+ :pypi:`pytest-httpretty`
+ *last release*: Feb 16, 2014,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ A thin wrapper of HTTPretty for pytest
+
+ :pypi:`pytest-httpserver`
+ *last release*: Oct 18, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest ; extra == 'dev'
+
+ pytest-httpserver is a httpserver for pytest
+
+ :pypi:`pytest-httpx`
+ *last release*: Nov 16, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (==6.*)
+
+ Send responses to httpx.
+
+ :pypi:`pytest-httpx-blockage`
+ *last release*: Nov 16, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6.2.5)
+
+ Disable httpx requests during a test run
+
+ :pypi:`pytest-hue`
+ *last release*: May 09, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Visualise PyTest status via your Phillips Hue lights
+
+ :pypi:`pytest-hylang`
+ *last release*: Mar 28, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ Pytest plugin to allow running tests written in hylang
+
+ :pypi:`pytest-hypo-25`
+ *last release*: Jan 12, 2020,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ help hypo module for pytest
+
+ :pypi:`pytest-ibutsu`
+ *last release*: Jun 16, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ A plugin to sent pytest results to an Ibutsu server
+
+ :pypi:`pytest-icdiff`
+ *last release*: Apr 08, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ use icdiff for better error messages in pytest assertions
+
+ :pypi:`pytest-idapro`
+ *last release*: Nov 03, 2018,
+ *status*: N/A,
+ *requires*: N/A
+
+ A pytest plugin for idapython. Allows a pytest setup to run tests outside and inside IDA in an automated manner by runnig pytest inside IDA and by mocking idapython api
+
+ :pypi:`pytest-idempotent`
+ *last release*: Nov 26, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest plugin for testing function idempotence.
+
+ :pypi:`pytest-ignore-flaky`
+ *last release*: Apr 23, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ ignore failures from flaky tests (pytest plugin)
+
+ :pypi:`pytest-image-diff`
+ *last release*: Jul 28, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+
+
+ :pypi:`pytest-incremental`
+ *last release*: Apr 24, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ an incremental test runner (pytest plugin)
+
+ :pypi:`pytest-influxdb`
+ *last release*: Apr 20, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Plugin for influxdb and pytest integration.
+
+ :pypi:`pytest-info-collector`
+ *last release*: May 26, 2019,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin to collect information from tests
+
+ :pypi:`pytest-informative-node`
+ *last release*: Apr 25, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ display more node ininformation.
+
+ :pypi:`pytest-infrastructure`
+ *last release*: Apr 12, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest stack validation prior to testing executing
+
+ :pypi:`pytest-ini`
+ *last release*: Sep 30, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Reuse pytest.ini to store env variables
+
+ :pypi:`pytest-inmanta`
+ *last release*: Aug 17, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ A py.test plugin providing fixtures to simplify inmanta modules testing.
+
+ :pypi:`pytest-inmanta-extensions`
+ *last release*: May 27, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Inmanta tests package
+
+ :pypi:`pytest-Inomaly`
+ *last release*: Feb 13, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A simple image diff plugin for pytest
+
+ :pypi:`pytest-insta`
+ *last release*: Apr 07, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6.0.2,<7.0.0)
+
+ A practical snapshot testing plugin for pytest
+
+ :pypi:`pytest-instafail`
+ *last release*: Jun 14, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.9)
+
+ pytest plugin to show failures instantly
+
+ :pypi:`pytest-instrument`
+ *last release*: Apr 05, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.1.0)
+
+ pytest plugin to instrument tests
+
+ :pypi:`pytest-integration`
+ *last release*: Apr 16, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Organizing pytests by integration or not
+
+ :pypi:`pytest-integration-mark`
+ *last release*: Jul 19, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=5.2,<7.0)
+
+ Automatic integration test marking and excluding plugin for pytest
+
+ :pypi:`pytest-interactive`
+ *last release*: Nov 30, 2017,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ A pytest plugin for console based interactive test selection just after the collection phase
+
+ :pypi:`pytest-intercept-remote`
+ *last release*: May 24, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.6)
+
+ Pytest plugin for intercepting outgoing connection requests during pytest run.
+
+ :pypi:`pytest-invenio`
+ *last release*: May 11, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (<7,>=6)
+
+ Pytest fixtures for Invenio.
+
+ :pypi:`pytest-involve`
+ *last release*: Feb 02, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Run tests covering a specific file or changeset
+
+ :pypi:`pytest-ipdb`
+ *last release*: Sep 02, 2014,
+ *status*: 2 - Pre-Alpha,
+ *requires*: N/A
+
+ A py.test plug-in to enable drop to ipdb debugger on test failure.
+
+ :pypi:`pytest-ipynb`
+ *last release*: Jan 29, 2019,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ THIS PROJECT IS ABANDONED
+
+ :pypi:`pytest-isort`
+ *last release*: Apr 27, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ py.test plugin to check import ordering using isort
+
+ :pypi:`pytest-it`
+ *last release*: Jan 22, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Pytest plugin to display test reports as a plaintext spec, inspired by Rspec: https://github.com/mattduck/pytest-it.
+
+ :pypi:`pytest-iterassert`
+ *last release*: May 11, 2020,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Nicer list and iterable assertion messages for pytest
+
+ :pypi:`pytest-jasmine`
+ *last release*: Nov 04, 2017,
+ *status*: 1 - Planning,
+ *requires*: N/A
+
+ Run jasmine tests from your pytest test suite
+
+ :pypi:`pytest-jest`
+ *last release*: May 22, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.3.2)
+
+ A custom jest-pytest oriented Pytest reporter
+
+ :pypi:`pytest-jira`
+ *last release*: Dec 02, 2021,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ py.test JIRA integration plugin, using markers
+
+ :pypi:`pytest-jira-xray`
+ *last release*: Nov 28, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ pytest plugin to integrate tests with JIRA XRAY
+
+ :pypi:`pytest-jobserver`
+ *last release*: May 15, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Limit parallel tests with posix jobserver.
+
+ :pypi:`pytest-joke`
+ *last release*: Oct 08, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.2.1)
+
+ Test failures are better served with humor.
+
+ :pypi:`pytest-json`
+ *last release*: Jan 18, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Generate JSON test reports
+
+ :pypi:`pytest-jsonlint`
+ *last release*: Aug 04, 2016,
+ *status*: N/A,
+ *requires*: N/A
+
+ UNKNOWN
+
+ :pypi:`pytest-json-report`
+ *last release*: Sep 24, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.8.0)
+
+ A pytest plugin to report test results as JSON files
+
+ :pypi:`pytest-kafka`
+ *last release*: Aug 24, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ Zookeeper, Kafka server, and Kafka consumer fixtures for Pytest
+
+ :pypi:`pytest-kafkavents`
+ *last release*: Sep 08, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ A plugin to send pytest events to Kafka
+
+ :pypi:`pytest-kind`
+ *last release*: Jan 24, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Kubernetes test support with KIND for pytest
+
+ :pypi:`pytest-kivy`
+ *last release*: Jul 06, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.6)
+
+ Kivy GUI tests fixtures using pytest
+
+ :pypi:`pytest-knows`
+ *last release*: Aug 22, 2014,
+ *status*: N/A,
+ *requires*: N/A
+
+ A pytest plugin that can automaticly skip test case based on dependence info calculated by trace
+
+ :pypi:`pytest-konira`
+ *last release*: Oct 09, 2011,
+ *status*: N/A,
+ *requires*: N/A
+
+ Run Konira DSL tests with py.test
+
+ :pypi:`pytest-krtech-common`
+ *last release*: Nov 28, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest krtech common library
+
+ :pypi:`pytest-kwparametrize`
+ *last release*: Jan 22, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6)
+
+ Alternate syntax for @pytest.mark.parametrize with test cases as dictionaries and default value fallbacks
+
+ :pypi:`pytest-lambda`
+ *last release*: Aug 23, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.6,<7)
+
+ Define pytest fixtures with lambda functions.
+
+ :pypi:`pytest-lamp`
+ *last release*: Jan 06, 2017,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-layab`
+ *last release*: Oct 05, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Pytest fixtures for layab.
+
+ :pypi:`pytest-lazy-fixture`
+ *last release*: Feb 01, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.2.5)
+
+ It helps to use fixtures in pytest.mark.parametrize
+
+ :pypi:`pytest-ldap`
+ *last release*: Aug 18, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ python-ldap fixtures for pytest
+
+ :pypi:`pytest-leaks`
+ *last release*: Nov 27, 2019,
+ *status*: 1 - Planning,
+ *requires*: N/A
+
+ A pytest plugin to trace resource leaks.
+
+ :pypi:`pytest-level`
+ *last release*: Oct 21, 2019,
+ *status*: N/A,
+ *requires*: pytest
+
+ Select tests of a given level or lower
+
+ :pypi:`pytest-libfaketime`
+ *last release*: Dec 22, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.0.0)
+
+ A python-libfaketime plugin for pytest.
+
+ :pypi:`pytest-libiio`
+ *last release*: Oct 29, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A pytest plugin to manage interfacing with libiio contexts
+
+ :pypi:`pytest-libnotify`
+ *last release*: Apr 02, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ Pytest plugin that shows notifications about the test run
+
+ :pypi:`pytest-ligo`
+ *last release*: Jan 16, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-lineno`
+ *last release*: Dec 04, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ A pytest plugin to show the line numbers of test functions
+
+ :pypi:`pytest-line-profiler`
+ *last release*: May 03, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Profile code executed by pytest
+
+ :pypi:`pytest-lisa`
+ *last release*: Jan 21, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=6.1.2,<7.0.0)
+
+ Pytest plugin for organizing tests.
+
+ :pypi:`pytest-listener`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ A simple network listener
+
+ :pypi:`pytest-litf`
+ *last release*: Jan 18, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.1.1)
+
+ A pytest plugin that stream output in LITF format
+
+ :pypi:`pytest-live`
+ *last release*: Mar 08, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Live results for pytest
+
+ :pypi:`pytest-localftpserver`
+ *last release*: Aug 25, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ A PyTest plugin which provides an FTP fixture for your tests
+
+ :pypi:`pytest-localserver`
+ *last release*: Nov 19, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ py.test plugin to test server connections locally.
+
+ :pypi:`pytest-localstack`
+ *last release*: Aug 22, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.3.0)
+
+ Pytest plugin for AWS integration tests
+
+ :pypi:`pytest-lockable`
+ *last release*: Nov 09, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ lockable resource plugin for pytest
+
+ :pypi:`pytest-locker`
+ *last release*: Oct 29, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=5.4)
+
+ Used to lock object during testing. Essentially changing assertions from being hard coded to asserting that nothing changed
+
+ :pypi:`pytest-log`
+ *last release*: Aug 15, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=3.8)
+
+ print log
+
+ :pypi:`pytest-logbook`
+ *last release*: Nov 23, 2015,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.8)
+
+ py.test plugin to capture logbook log messages
+
+ :pypi:`pytest-logdog`
+ *last release*: Jun 15, 2021,
+ *status*: 1 - Planning,
+ *requires*: pytest (>=6.2.0)
+
+ Pytest plugin to test logging
+
+ :pypi:`pytest-logfest`
+ *last release*: Jul 21, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Pytest plugin providing three logger fixtures with basic or full writing to log files
+
+ :pypi:`pytest-logger`
+ *last release*: Jul 25, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.2)
+
+ Plugin configuring handlers for loggers from Python logging module.
+
+ :pypi:`pytest-logging`
+ *last release*: Nov 04, 2015,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Configures logging and allows tweaking the log level with a py.test flag
+
+ :pypi:`pytest-log-report`
+ *last release*: Dec 26, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Package for creating a pytest test run reprot
+
+ :pypi:`pytest-manual-marker`
+ *last release*: Oct 11, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=6)
+
+ pytest marker for marking manual tests
+
+ :pypi:`pytest-markdown`
+ *last release*: Jan 15, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0.1,<7.0.0)
+
+ Test your markdown docs with pytest
+
+ :pypi:`pytest-marker-bugzilla`
+ *last release*: Jan 09, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ py.test bugzilla integration plugin, using markers
+
+ :pypi:`pytest-markers-presence`
+ *last release*: Feb 04, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0)
+
+ A simple plugin to detect missed pytest tags and markers"
+
+ :pypi:`pytest-markfiltration`
+ *last release*: Nov 08, 2011,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ UNKNOWN
+
+ :pypi:`pytest-mark-no-py3`
+ *last release*: May 17, 2019,
+ *status*: N/A,
+ *requires*: pytest
+
+ pytest plugin and bowler codemod to help migrate tests to Python 3
+
+ :pypi:`pytest-marks`
+ *last release*: Nov 23, 2012,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ UNKNOWN
+
+ :pypi:`pytest-matcher`
+ *last release*: Apr 23, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.4)
+
+ Match test output against patterns stored in files
+
+ :pypi:`pytest-match-skip`
+ *last release*: May 15, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.4.1)
+
+ Skip matching marks. Matches partial marks using wildcards.
+
+ :pypi:`pytest-mat-report`
+ *last release*: Jan 20, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ this is report
+
+ :pypi:`pytest-matrix`
+ *last release*: Jun 24, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.4.3,<6.0.0)
+
+ Provide tools for generating tests from combinations of fixtures.
+
+ :pypi:`pytest-mccabe`
+ *last release*: Jul 22, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=5.4.0)
+
+ pytest plugin to run the mccabe code complexity checker.
+
+ :pypi:`pytest-md`
+ *last release*: Jul 11, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=4.2.1)
+
+ Plugin for generating Markdown reports for pytest results
+
+ :pypi:`pytest-md-report`
+ *last release*: May 04, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (!=6.0.0,<7,>=3.3.2)
+
+ A pytest plugin to make a test results report with Markdown table format.
+
+ :pypi:`pytest-memprof`
+ *last release*: Mar 29, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Estimates memory consumption of test functions
+
+ :pypi:`pytest-menu`
+ *last release*: Oct 04, 2017,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=2.4.2)
+
+ A pytest plugin for console based interactive test selection just after the collection phase
+
+ :pypi:`pytest-mercurial`
+ *last release*: Nov 21, 2020,
+ *status*: 1 - Planning,
+ *requires*: N/A
+
+ pytest plugin to write integration tests for projects using Mercurial Python internals
+
+ :pypi:`pytest-message`
+ *last release*: Nov 04, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6.2.5)
+
+ Pytest plugin for sending report message of marked tests execution
+
+ :pypi:`pytest-messenger`
+ *last release*: Dec 16, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Pytest to Slack reporting plugin
+
+ :pypi:`pytest-metadata`
+ *last release*: Nov 27, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.9.0)
+
+ pytest plugin for test session metadata
+
+ :pypi:`pytest-metrics`
+ *last release*: Apr 04, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Custom metrics report for pytest
+
+ :pypi:`pytest-mimesis`
+ *last release*: Mar 21, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.2)
+
+ Mimesis integration with the pytest test runner
+
+ :pypi:`pytest-minecraft`
+ *last release*: Sep 26, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=6.0.1,<7.0.0)
+
+ A pytest plugin for running tests against Minecraft releases
+
+ :pypi:`pytest-missing-fixtures`
+ *last release*: Oct 14, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Pytest plugin that creates missing fixtures
+
+ :pypi:`pytest-ml`
+ *last release*: May 04, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Test your machine learning!
+
+ :pypi:`pytest-mocha`
+ *last release*: Apr 02, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=5.4.0)
+
+ pytest plugin to display test execution output like a mochajs
+
+ :pypi:`pytest-mock`
+ *last release*: May 06, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.0)
+
+ Thin-wrapper around the mock package for easier use with pytest
+
+ :pypi:`pytest-mock-api`
+ *last release*: Feb 13, 2019,
+ *status*: 1 - Planning,
+ *requires*: pytest (>=4.0.0)
+
+ A mock API server with configurable routes and responses available as a fixture.
+
+ :pypi:`pytest-mock-generator`
+ *last release*: Aug 10, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ A pytest fixture wrapper for https://pypi.org/project/mock-generator
+
+ :pypi:`pytest-mock-helper`
+ *last release*: Jan 24, 2018,
+ *status*: N/A,
+ *requires*: pytest
+
+ Help you mock HTTP call and generate mock code
+
+ :pypi:`pytest-mockito`
+ *last release*: Jul 11, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Base fixtures for mockito
+
+ :pypi:`pytest-mockredis`
+ *last release*: Jan 02, 2018,
+ *status*: 2 - Pre-Alpha,
+ *requires*: N/A
+
+ An in-memory mock of a Redis server that runs in a separate thread. This is to be used for unit-tests that require a Redis database.
+
+ :pypi:`pytest-mock-resources`
+ *last release*: Dec 03, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=1.0)
+
+ A pytest plugin for easily instantiating reproducible mock resources.
+
+ :pypi:`pytest-mock-server`
+ *last release*: Apr 06, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Mock server plugin for pytest
+
+ :pypi:`pytest-mockservers`
+ *last release*: Mar 31, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=4.3.0)
+
+ A set of fixtures to test your requests to HTTP/UDP servers
+
+ :pypi:`pytest-modifyjunit`
+ *last release*: Jan 10, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Utility for adding additional properties to junit xml for IDM QE
+
+ :pypi:`pytest-modifyscope`
+ *last release*: Apr 12, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ pytest plugin to modify fixture scope
+
+ :pypi:`pytest-molecule`
+ *last release*: Oct 06, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ PyTest Molecule Plugin :: discover and run molecule tests
+
+ :pypi:`pytest-mongo`
+ *last release*: Jun 07, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ MongoDB process and client fixtures plugin for Pytest.
+
+ :pypi:`pytest-mongodb`
+ *last release*: Dec 07, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.5.2)
+
+ pytest plugin for MongoDB fixtures
+
+ :pypi:`pytest-monitor`
+ *last release*: Aug 24, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Pytest plugin for analyzing resource usage.
+
+ :pypi:`pytest-monkeyplus`
+ *last release*: Sep 18, 2012,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ pytest's monkeypatch subclass with extra functionalities
+
+ :pypi:`pytest-monkeytype`
+ *last release*: Jul 29, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest-monkeytype: Generate Monkeytype annotations from your pytest tests.
+
+ :pypi:`pytest-moto`
+ *last release*: Aug 28, 2015,
+ *status*: 1 - Planning,
+ *requires*: N/A
+
+ Fixtures for integration tests of AWS services,uses moto mocking library.
+
+ :pypi:`pytest-motor`
+ *last release*: Jul 21, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ A pytest plugin for motor, the non-blocking MongoDB driver.
+
+ :pypi:`pytest-mp`
+ *last release*: May 23, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ A test batcher for multiprocessed Pytest runs
+
+ :pypi:`pytest-mpi`
+ *last release*: Mar 14, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ pytest plugin to collect information from tests
+
+ :pypi:`pytest-mpl`
+ *last release*: Jul 02, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ pytest plugin to help with testing figures output from Matplotlib
+
+ :pypi:`pytest-mproc`
+ *last release*: Mar 07, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ low-startup-overhead, scalable, distributed-testing pytest plugin
+
+ :pypi:`pytest-multi-check`
+ *last release*: Jun 03, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ Pytest-плагин, реализует возможноÑÑ‚ÑŒ мульти проверок и мÑгких проверок
+
+ :pypi:`pytest-multihost`
+ *last release*: Apr 07, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Utility for writing multi-host tests for pytest
+
+ :pypi:`pytest-multilog`
+ *last release*: Jun 10, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Multi-process logs handling and other helpers for pytest
+
+ :pypi:`pytest-multithreading`
+ *last release*: Aug 12, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=3.6)
+
+ a pytest plugin for th and concurrent testing
+
+ :pypi:`pytest-mutagen`
+ *last release*: Jul 24, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=5.4)
+
+ Add the mutation testing feature to pytest
+
+ :pypi:`pytest-mypy`
+ *last release*: Mar 21, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5)
+
+ Mypy static type checker plugin for Pytest
+
+ :pypi:`pytest-mypyd`
+ *last release*: Aug 20, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (<4.7,>=2.8) ; python_version < "3.5"
+
+ Mypy static type checker plugin for Pytest
+
+ :pypi:`pytest-mypy-plugins`
+ *last release*: Oct 19, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=6.0.0)
+
+ pytest plugin for writing tests for mypy plugins
+
+ :pypi:`pytest-mypy-plugins-shim`
+ *last release*: Apr 12, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Substitute for "pytest-mypy-plugins" for Python implementations which aren't supported by mypy.
+
+ :pypi:`pytest-mypy-testing`
+ *last release*: Jun 13, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ Pytest plugin to check mypy output.
+
+ :pypi:`pytest-mysql`
+ *last release*: Nov 22, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ MySQL process and client fixtures for pytest
+
+ :pypi:`pytest-needle`
+ *last release*: Dec 10, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest (<5.0.0,>=3.0.0)
+
+ pytest plugin for visual testing websites using selenium
+
+ :pypi:`pytest-neo`
+ *last release*: Apr 23, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.7.2)
+
+ pytest-neo is a plugin for pytest that shows tests like screen of Matrix.
+
+ :pypi:`pytest-network`
+ *last release*: May 07, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ A simple plugin to disable network on socket level.
+
+ :pypi:`pytest-never-sleep`
+ *last release*: May 05, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.5.1)
+
+ pytest plugin helps to avoid adding tests without mock \`time.sleep\`
+
+ :pypi:`pytest-nginx`
+ *last release*: Aug 12, 2017,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ nginx fixture for pytest
+
+ :pypi:`pytest-nginx-iplweb`
+ *last release*: Mar 01, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ nginx fixture for pytest - iplweb temporary fork
+
+ :pypi:`pytest-ngrok`
+ *last release*: Jan 22, 2020,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-ngsfixtures`
+ *last release*: Sep 06, 2019,
+ *status*: 2 - Pre-Alpha,
+ *requires*: pytest (>=5.0.0)
+
+ pytest ngs fixtures
+
+ :pypi:`pytest-nice`
+ *last release*: May 04, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ A pytest plugin that alerts user of failed test cases with screen notifications
+
+ :pypi:`pytest-nice-parametrize`
+ *last release*: Apr 17, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ A small snippet for nicer PyTest's Parametrize
+
+ :pypi:`pytest-nlcov`
+ *last release*: Jul 07, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest plugin to get the coverage of the new lines (based on git diff) only
+
+ :pypi:`pytest-nocustom`
+ *last release*: Jul 07, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Run all tests without custom markers
+
+ :pypi:`pytest-nodev`
+ *last release*: Jul 21, 2016,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.8.1)
+
+ Test-driven source code search for Python.
+
+ :pypi:`pytest-nogarbage`
+ *last release*: Aug 29, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.6.0)
+
+ Ensure a test produces no garbage
+
+ :pypi:`pytest-notebook`
+ *last release*: Sep 16, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A pytest plugin for testing Jupyter Notebooks
+
+ :pypi:`pytest-notice`
+ *last release*: Nov 05, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Send pytest execution result email
+
+ :pypi:`pytest-notification`
+ *last release*: Jun 19, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=4)
+
+ A pytest plugin for sending a desktop notification and playing a sound upon completion of tests
+
+ :pypi:`pytest-notifier`
+ *last release*: Jun 12, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ A pytest plugin to notify test result
+
+ :pypi:`pytest-notimplemented`
+ *last release*: Aug 27, 2019,
+ *status*: N/A,
+ *requires*: pytest (>=5.1,<6.0)
+
+ Pytest markers for not implemented features and tests.
+
+ :pypi:`pytest-notion`
+ *last release*: Aug 07, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ A PyTest Reporter to send test runs to Notion.so
+
+ :pypi:`pytest-nunit`
+ *last release*: Aug 04, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A pytest plugin for generating NUnit3 test result XML output
+
+ :pypi:`pytest-ochrus`
+ *last release*: Feb 21, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest results data-base and HTML reporter
+
+ :pypi:`pytest-odoo`
+ *last release*: Nov 04, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.9)
+
+ py.test plugin to run Odoo tests
+
+ :pypi:`pytest-odoo-fixtures`
+ *last release*: Jun 25, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Project description
+
+ :pypi:`pytest-oerp`
+ *last release*: Feb 28, 2012,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin to test OpenERP modules
+
+ :pypi:`pytest-ok`
+ *last release*: Apr 01, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ The ultimate pytest output plugin
+
+ :pypi:`pytest-only`
+ *last release*: Jan 19, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Use @pytest.mark.only to run a single test
+
+ :pypi:`pytest-oot`
+ *last release*: Sep 18, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Run object-oriented tests in a simple format
+
+ :pypi:`pytest-openfiles`
+ *last release*: Apr 16, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=4.6)
+
+ Pytest plugin for detecting inadvertent open file handles
+
+ :pypi:`pytest-opentmi`
+ *last release*: Nov 04, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.0)
+
+ pytest plugin for publish results to opentmi
+
+ :pypi:`pytest-operator`
+ *last release*: Oct 26, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Fixtures for Operators
+
+ :pypi:`pytest-optional`
+ *last release*: Oct 07, 2015,
+ *status*: N/A,
+ *requires*: N/A
+
+ include/exclude values of fixtures in pytest
+
+ :pypi:`pytest-optional-tests`
+ *last release*: Jul 09, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.5.0)
+
+ Easy declaration of optional tests (i.e., that are not run by default)
+
+ :pypi:`pytest-orchestration`
+ *last release*: Jul 18, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ A pytest plugin for orchestrating tests
+
+ :pypi:`pytest-order`
+ *last release*: May 30, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=5.0)
+
+ pytest plugin to run your tests in a specific order
+
+ :pypi:`pytest-ordering`
+ *last release*: Nov 14, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ pytest plugin to run your tests in a specific order
+
+ :pypi:`pytest-osxnotify`
+ *last release*: May 15, 2015,
+ *status*: N/A,
+ *requires*: N/A
+
+ OS X notifications for py.test results.
+
+ :pypi:`pytest-otel`
+ *last release*: Dec 03, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest-otel report OpenTelemetry traces about test executed
+
+ :pypi:`pytest-pact`
+ *last release*: Jan 07, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A simple plugin to use with pytest
+
+ :pypi:`pytest-pahrametahrize`
+ *last release*: Nov 24, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0,<7.0)
+
+ Parametrize your tests with a Boston accent.
+
+ :pypi:`pytest-parallel`
+ *last release*: Oct 10, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.0.0)
+
+ a pytest plugin for parallel and concurrent testing
+
+ :pypi:`pytest-parallel-39`
+ *last release*: Jul 12, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.0.0)
+
+ a pytest plugin for parallel and concurrent testing
+
+ :pypi:`pytest-param`
+ *last release*: Sep 11, 2016,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.6.0)
+
+ pytest plugin to test all, first, last or random params
+
+ :pypi:`pytest-paramark`
+ *last release*: Jan 10, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.5.0)
+
+ Configure pytest fixtures using a combination of"parametrize" and markers
+
+ :pypi:`pytest-parametrization`
+ *last release*: Nov 30, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Simpler PyTest parametrization
+
+ :pypi:`pytest-parametrize-cases`
+ *last release*: Dec 12, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=6.1.2,<7.0.0)
+
+ A more user-friendly way to write parametrized tests.
+
+ :pypi:`pytest-parametrized`
+ *last release*: Oct 19, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Pytest plugin for parametrizing tests with default iterables.
+
+ :pypi:`pytest-parawtf`
+ *last release*: Dec 03, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.6.0)
+
+ Finally spell paramete?ri[sz]e correctly
+
+ :pypi:`pytest-pass`
+ *last release*: Dec 04, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Check out https://github.com/elilutsky/pytest-pass
+
+ :pypi:`pytest-passrunner`
+ *last release*: Feb 10, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.6.0)
+
+ Pytest plugin providing the 'run_on_pass' marker
+
+ :pypi:`pytest-paste-config`
+ *last release*: Sep 18, 2013,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Allow setting the path to a paste config file
+
+ :pypi:`pytest-patches`
+ *last release*: Aug 30, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A contextmanager pytest fixture for handling multiple mock patches
+
+ :pypi:`pytest-pdb`
+ *last release*: Jul 31, 2018,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest plugin which adds pdb helper commands related to pytest.
+
+ :pypi:`pytest-peach`
+ *last release*: Apr 12, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.8.7)
+
+ pytest plugin for fuzzing with Peach API Security
+
+ :pypi:`pytest-pep257`
+ *last release*: Jul 09, 2016,
+ *status*: N/A,
+ *requires*: N/A
+
+ py.test plugin for pep257
+
+ :pypi:`pytest-pep8`
+ *last release*: Apr 27, 2014,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest plugin to check PEP8 requirements
+
+ :pypi:`pytest-percent`
+ *last release*: May 21, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=5.2.0)
+
+ Change the exit code of pytest test sessions when a required percent of tests pass.
+
+ :pypi:`pytest-perf`
+ *last release*: Jun 27, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.6) ; extra == 'testing'
+
+ pytest-perf
+
+ :pypi:`pytest-performance`
+ *last release*: Sep 11, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.7.0)
+
+ A simple plugin to ensure the execution of critical sections of code has not been impacted
+
+ :pypi:`pytest-persistence`
+ *last release*: Nov 06, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest tool for persistent objects
+
+ :pypi:`pytest-pgsql`
+ *last release*: May 13, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.0.0)
+
+ Pytest plugins and helpers for tests using a Postgres database.
+
+ :pypi:`pytest-phmdoctest`
+ *last release*: Nov 10, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.2) ; extra == 'test'
+
+ pytest plugin to test Python examples in Markdown using phmdoctest.
+
+ :pypi:`pytest-picked`
+ *last release*: Dec 23, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=3.5.0)
+
+ Run the tests related to the changed files
+
+ :pypi:`pytest-pigeonhole`
+ *last release*: Jun 25, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.4)
+
+
+
+ :pypi:`pytest-pikachu`
+ *last release*: Aug 05, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Show surprise when tests are passing
+
+ :pypi:`pytest-pilot`
+ *last release*: Oct 09, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Slice in your test base thanks to powerful markers.
+
+ :pypi:`pytest-pings`
+ *last release*: Jun 29, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=5.0.0)
+
+ 🦊 The pytest plugin for Firefox Telemetry 📊
+
+ :pypi:`pytest-pinned`
+ *last release*: Sep 17, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A simple pytest plugin for pinning tests
+
+ :pypi:`pytest-pinpoint`
+ *last release*: Sep 25, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=4.4.0)
+
+ A pytest plugin which runs SBFL algorithms to detect faults.
+
+ :pypi:`pytest-pipeline`
+ *last release*: Jan 24, 2017,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Pytest plugin for functional testing of data analysispipelines
+
+ :pypi:`pytest-platform-markers`
+ *last release*: Sep 09, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.6.0)
+
+ Markers for pytest to skip tests on specific platforms
+
+ :pypi:`pytest-play`
+ *last release*: Jun 12, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ pytest plugin that let you automate actions and assertions with test metrics reporting executing plain YAML files
+
+ :pypi:`pytest-playbook`
+ *last release*: Jan 21, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=6.1.2,<7.0.0)
+
+ Pytest plugin for reading playbooks.
+
+ :pypi:`pytest-playwright`
+ *last release*: Oct 28, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ A pytest wrapper with fixtures for Playwright to automate web browsers
+
+ :pypi:`pytest-playwrights`
+ *last release*: Dec 02, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ A pytest wrapper with fixtures for Playwright to automate web browsers
+
+ :pypi:`pytest-playwright-snapshot`
+ *last release*: Aug 19, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ A pytest wrapper for snapshot testing with playwright
+
+ :pypi:`pytest-plt`
+ *last release*: Aug 17, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Fixtures for quickly making Matplotlib plots in tests
+
+ :pypi:`pytest-plugin-helpers`
+ *last release*: Nov 23, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A plugin to help developing and testing other plugins
+
+ :pypi:`pytest-plus`
+ *last release*: Mar 19, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.50)
+
+ PyTest Plus Plugin :: extends pytest functionality
+
+ :pypi:`pytest-pmisc`
+ *last release*: Mar 21, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-pointers`
+ *last release*: Oct 14, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest plugin to define functions you test with special marks for better navigation and reports
+
+ :pypi:`pytest-polarion-cfme`
+ *last release*: Nov 13, 2017,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin for collecting test cases and recording test results
+
+ :pypi:`pytest-polarion-collect`
+ *last release*: Jun 18, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ pytest plugin for collecting polarion test cases data
+
+ :pypi:`pytest-polecat`
+ *last release*: Aug 12, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Provides Polecat pytest fixtures
+
+ :pypi:`pytest-ponyorm`
+ *last release*: Oct 31, 2018,
+ *status*: N/A,
+ *requires*: pytest (>=3.1.1)
+
+ PonyORM in Pytest
+
+ :pypi:`pytest-poo`
+ *last release*: Mar 25, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.3.4)
+
+ Visualize your crappy tests
+
+ :pypi:`pytest-poo-fail`
+ *last release*: Feb 12, 2015,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Visualize your failed tests with poo
+
+ :pypi:`pytest-pop`
+ *last release*: Aug 19, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ A pytest plugin to help with testing pop projects
+
+ :pypi:`pytest-portion`
+ *last release*: Jan 28, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Select a portion of the collected tests
+
+ :pypi:`pytest-postgres`
+ *last release*: Mar 22, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Run PostgreSQL in Docker container in Pytest.
+
+ :pypi:`pytest-postgresql`
+ *last release*: Nov 05, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.0.0)
+
+ Postgresql fixtures and fixture factories for Pytest.
+
+ :pypi:`pytest-power`
+ *last release*: Dec 31, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=5.4)
+
+ pytest plugin with powerful fixtures
+
+ :pypi:`pytest-pretty-terminal`
+ *last release*: Nov 24, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=3.4.1)
+
+ pytest plugin for generating prettier terminal output
+
+ :pypi:`pytest-pride`
+ *last release*: Apr 02, 2016,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Minitest-style test colors
+
+ :pypi:`pytest-print`
+ *last release*: Jun 17, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=6)
+
+ pytest-print adds the printer fixture you can use to print messages to the user (directly to the pytest runner, not stdout)
+
+ :pypi:`pytest-profiling`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Profiling plugin for py.test
+
+ :pypi:`pytest-progress`
+ *last release*: Nov 09, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.7)
+
+ pytest plugin for instant test progress status
+
+ :pypi:`pytest-prometheus`
+ *last release*: Oct 03, 2017,
+ *status*: N/A,
+ *requires*: N/A
+
+ Report test pass / failures to a Prometheus PushGateway
+
+ :pypi:`pytest-prosper`
+ *last release*: Sep 24, 2018,
+ *status*: N/A,
+ *requires*: N/A
+
+ Test helpers for Prosper projects
+
+ :pypi:`pytest-pspec`
+ *last release*: Jun 02, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.0.0)
+
+ A rspec format reporter for Python ptest
+
+ :pypi:`pytest-psqlgraph`
+ *last release*: Oct 19, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0)
+
+ pytest plugin for testing applications that use psqlgraph
+
+ :pypi:`pytest-ptera`
+ *last release*: Oct 20, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6.2.4,<7.0.0)
+
+ Use ptera probes in tests
+
+ :pypi:`pytest-pudb`
+ *last release*: Oct 25, 2018,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=2.0)
+
+ Pytest PuDB debugger integration
+
+ :pypi:`pytest-purkinje`
+ *last release*: Oct 28, 2017,
+ *status*: 2 - Pre-Alpha,
+ *requires*: N/A
+
+ py.test plugin for purkinje test runner
+
+ :pypi:`pytest-pycharm`
+ *last release*: Aug 13, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.3)
+
+ Plugin for py.test to enter PyCharm debugger on uncaught exceptions
+
+ :pypi:`pytest-pycodestyle`
+ *last release*: Aug 10, 2020,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin to run pycodestyle
+
+ :pypi:`pytest-pydev`
+ *last release*: Nov 15, 2017,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ py.test plugin to connect to a remote debug server with PyDev or PyCharm.
+
+ :pypi:`pytest-pydocstyle`
+ *last release*: Aug 10, 2020,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin to run pydocstyle
+
+ :pypi:`pytest-pylint`
+ *last release*: Nov 09, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.4)
+
+ pytest plugin to check source code with pylint
+
+ :pypi:`pytest-pypi`
+ *last release*: Mar 04, 2018,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Easily test your HTTP library against a local copy of pypi
+
+ :pypi:`pytest-pypom-navigation`
+ *last release*: Feb 18, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.0.7)
+
+ Core engine for cookiecutter-qa and pytest-play packages
+
+ :pypi:`pytest-pyppeteer`
+ *last release*: Feb 16, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0.2)
+
+ A plugin to run pyppeteer in pytest.
+
+ :pypi:`pytest-pyq`
+ *last release*: Mar 10, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Pytest fixture "q" for pyq
+
+ :pypi:`pytest-pyramid`
+ *last release*: Oct 15, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ pytest_pyramid - provides fixtures for testing pyramid applications with pytest test suite
+
+ :pypi:`pytest-pyramid-server`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Pyramid server fixture for py.test
+
+ :pypi:`pytest-pyright`
+ *last release*: Aug 16, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Pytest plugin for type checking code with Pyright
+
+ :pypi:`pytest-pytestrail`
+ *last release*: Aug 27, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.8.0)
+
+ Pytest plugin for interaction with TestRail
+
+ :pypi:`pytest-pythonpath`
+ *last release*: Aug 22, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ pytest plugin for adding to the PYTHONPATH from command line or configs.
+
+ :pypi:`pytest-pytorch`
+ *last release*: May 25, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ pytest plugin for a better developer experience when working with the PyTorch test suite
+
+ :pypi:`pytest-qasync`
+ *last release*: Jul 12, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=5.4.0)
+
+ Pytest support for qasync.
+
+ :pypi:`pytest-qatouch`
+ *last release*: Jun 26, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.2.0)
+
+ Pytest plugin for uploading test results to your QA Touch Testrun.
+
+ :pypi:`pytest-qgis`
+ *last release*: Nov 25, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=6.2.3)
+
+ A pytest plugin for testing QGIS python plugins
+
+ :pypi:`pytest-qml`
+ *last release*: Dec 02, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0.0)
+
+ Run QML Tests with pytest
+
+ :pypi:`pytest-qr`
+ *last release*: Nov 25, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest plugin to generate test result QR codes
+
+ :pypi:`pytest-qt`
+ *last release*: Jun 13, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.0.0)
+
+ pytest support for PyQt and PySide applications
+
+ :pypi:`pytest-qt-app`
+ *last release*: Dec 23, 2015,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ QT app fixture for py.test
+
+ :pypi:`pytest-quarantine`
+ *last release*: Nov 24, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.6)
+
+ A plugin for pytest to manage expected test failures
+
+ :pypi:`pytest-quickcheck`
+ *last release*: Nov 15, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (<6.0.0,>=4.0)
+
+ pytest plugin to generate random data inspired by QuickCheck
+
+ :pypi:`pytest-rabbitmq`
+ *last release*: Jun 02, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.0.0)
+
+ RabbitMQ process and client fixtures for pytest
+
+ :pypi:`pytest-race`
+ *last release*: Nov 21, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Race conditions tester for pytest
+
+ :pypi:`pytest-rage`
+ *last release*: Oct 21, 2011,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin to implement PEP712
+
+ :pypi:`pytest-railflow-testrail-reporter`
+ *last release*: Dec 02, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Generate json reports along with specified metadata defined in test markers.
+
+ :pypi:`pytest-raises`
+ *last release*: Apr 23, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=3.2.2)
+
+ An implementation of pytest.raises as a pytest.mark fixture
+
+ :pypi:`pytest-raisesregexp`
+ *last release*: Dec 18, 2015,
+ *status*: N/A,
+ *requires*: N/A
+
+ Simple pytest plugin to look for regex in Exceptions
+
+ :pypi:`pytest-raisin`
+ *last release*: Jun 25, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Plugin enabling the use of exception instances with pytest.raises
+
+ :pypi:`pytest-random`
+ *last release*: Apr 28, 2013,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ py.test plugin to randomize tests
+
+ :pypi:`pytest-randomly`
+ *last release*: Nov 30, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Pytest plugin to randomly order tests and control random.seed.
+
+ :pypi:`pytest-randomness`
+ *last release*: May 30, 2019,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Pytest plugin about random seed management
+
+ :pypi:`pytest-random-num`
+ *last release*: Oct 19, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Randomise the order in which pytest tests are run with some control over the randomness
+
+ :pypi:`pytest-random-order`
+ *last release*: Nov 30, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.0.0)
+
+ Randomise the order in which pytest tests are run with some control over the randomness
+
+ :pypi:`pytest-readme`
+ *last release*: Dec 28, 2014,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Test your README.md file
+
+ :pypi:`pytest-reana`
+ *last release*: Nov 22, 2021,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Pytest fixtures for REANA.
+
+ :pypi:`pytest-recording`
+ *last release*: Jul 08, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A pytest plugin that allows you recording of network interactions via VCR.py
+
+ :pypi:`pytest-recordings`
+ *last release*: Aug 13, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Provides pytest plugins for reporting request/response traffic, screenshots, and more to ReportPortal
+
+ :pypi:`pytest-redis`
+ *last release*: Nov 03, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Redis fixtures and fixture factories for Pytest.
+
+ :pypi:`pytest-redislite`
+ *last release*: Sep 19, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Pytest plugin for testing code using Redis
+
+ :pypi:`pytest-redmine`
+ *last release*: Mar 19, 2018,
+ *status*: 1 - Planning,
+ *requires*: N/A
+
+ Pytest plugin for redmine
+
+ :pypi:`pytest-ref`
+ *last release*: Nov 23, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A plugin to store reference files to ease regression testing
+
+ :pypi:`pytest-reference-formatter`
+ *last release*: Oct 01, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Conveniently run pytest with a dot-formatted test reference.
+
+ :pypi:`pytest-regressions`
+ *last release*: Jan 27, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.5.0)
+
+ Easy to use fixtures to write regression tests.
+
+ :pypi:`pytest-regtest`
+ *last release*: Jun 03, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest plugin for regression tests
+
+ :pypi:`pytest-relative-order`
+ *last release*: May 17, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ a pytest plugin that sorts tests using "before" and "after" markers
+
+ :pypi:`pytest-relaxed`
+ *last release*: Jun 14, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (<5,>=3)
+
+ Relaxed test discovery/organization for pytest
+
+ :pypi:`pytest-remfiles`
+ *last release*: Jul 01, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Pytest plugin to create a temporary directory with remote files
+
+ :pypi:`pytest-remotedata`
+ *last release*: Jul 20, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.1)
+
+ Pytest plugin for controlling remote data access.
+
+ :pypi:`pytest-remote-response`
+ *last release*: Jun 30, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.6)
+
+ Pytest plugin for capturing and mocking connection requests.
+
+ :pypi:`pytest-remove-stale-bytecode`
+ *last release*: Mar 04, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ py.test plugin to remove stale byte code files.
+
+ :pypi:`pytest-reorder`
+ *last release*: May 31, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Reorder tests depending on their paths and names.
+
+ :pypi:`pytest-repeat`
+ *last release*: Oct 31, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.6)
+
+ pytest plugin for repeating tests
+
+ :pypi:`pytest-replay`
+ *last release*: Jun 09, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.0.0)
+
+ Saves previous test runs and allow re-execute previous pytest runs to reproduce crashes or flaky tests
+
+ :pypi:`pytest-repo-health`
+ *last release*: Nov 23, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ A pytest plugin to report on repository standards conformance
+
+ :pypi:`pytest-report`
+ *last release*: May 11, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Creates json report that is compatible with atom.io's linter message format
+
+ :pypi:`pytest-reporter`
+ *last release*: Jul 22, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Generate Pytest reports with templates
+
+ :pypi:`pytest-reporter-html1`
+ *last release*: Jun 08, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A basic HTML report template for Pytest
+
+ :pypi:`pytest-reportinfra`
+ *last release*: Aug 11, 2019,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Pytest plugin for reportinfra
+
+ :pypi:`pytest-reporting`
+ *last release*: Oct 25, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A plugin to report summarized results in a table format
+
+ :pypi:`pytest-reportlog`
+ *last release*: Dec 11, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=5.2)
+
+ Replacement for the --resultlog option, focused in simplicity and extensibility
+
+ :pypi:`pytest-report-me`
+ *last release*: Dec 31, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ A pytest plugin to generate report.
+
+ :pypi:`pytest-report-parameters`
+ *last release*: Jun 18, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=2.4.2)
+
+ pytest plugin for adding tests' parameters to junit report
+
+ :pypi:`pytest-reportportal`
+ *last release*: Jun 18, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=3.8.0)
+
+ Agent for Reporting results of tests to the Report Portal
+
+ :pypi:`pytest-reqs`
+ *last release*: May 12, 2019,
+ *status*: N/A,
+ *requires*: pytest (>=2.4.2)
+
+ pytest plugin to check pinned requirements
+
+ :pypi:`pytest-requests`
+ *last release*: Jun 24, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A simple plugin to use with pytest
+
+ :pypi:`pytest-reraise`
+ *last release*: Jun 17, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.6)
+
+ Make multi-threaded pytest test cases fail when they should
+
+ :pypi:`pytest-rerun`
+ *last release*: Jul 08, 2019,
+ *status*: N/A,
+ *requires*: pytest (>=3.6)
+
+ Re-run only changed files in specified branch
+
+ :pypi:`pytest-rerunfailures`
+ *last release*: Sep 17, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.3)
+
+ pytest plugin to re-run tests to eliminate flaky failures
+
+ :pypi:`pytest-resilient-circuits`
+ *last release*: Nov 15, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Resilient Circuits fixtures for PyTest.
+
+ :pypi:`pytest-resource`
+ *last release*: Nov 14, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Load resource fixture plugin to use with pytest
+
+ :pypi:`pytest-resource-path`
+ *last release*: May 01, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.5.0)
+
+ Provides path for uniform access to test resources in isolated directory
+
+ :pypi:`pytest-responsemock`
+ *last release*: Oct 10, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Simplified requests calls mocking for pytest
+
+ :pypi:`pytest-responses`
+ *last release*: Apr 26, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=2.5)
+
+ py.test integration for responses
+
+ :pypi:`pytest-restrict`
+ *last release*: Aug 12, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Pytest plugin to restrict the test types allowed
+
+ :pypi:`pytest-rethinkdb`
+ *last release*: Jul 24, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A RethinkDB plugin for pytest.
+
+ :pypi:`pytest-reverse`
+ *last release*: Aug 12, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Pytest plugin to reverse test order.
+
+ :pypi:`pytest-ringo`
+ *last release*: Sep 27, 2017,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin to test webapplications using the Ringo webframework
+
+ :pypi:`pytest-rng`
+ *last release*: Aug 08, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Fixtures for seeding tests and making randomness reproducible
+
+ :pypi:`pytest-roast`
+ *last release*: Jul 29, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ pytest plugin for ROAST configuration override and fixtures
+
+ :pypi:`pytest-rocketchat`
+ *last release*: Apr 18, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Pytest to Rocket.Chat reporting plugin
+
+ :pypi:`pytest-rotest`
+ *last release*: Sep 08, 2019,
+ *status*: N/A,
+ *requires*: pytest (>=3.5.0)
+
+ Pytest integration with rotest
+
+ :pypi:`pytest-rpc`
+ *last release*: Feb 22, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (~=3.6)
+
+ Extend py.test for RPC OpenStack testing.
+
+ :pypi:`pytest-rst`
+ *last release*: Sep 21, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ Test code from RST documents with pytest
+
+ :pypi:`pytest-rt`
+ *last release*: Sep 04, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest data collector plugin for Testgr
+
+ :pypi:`pytest-rts`
+ *last release*: May 17, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ Coverage-based regression test selection (RTS) plugin for pytest
+
+ :pypi:`pytest-run-changed`
+ *last release*: Apr 02, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ Pytest plugin that runs changed tests only
+
+ :pypi:`pytest-runfailed`
+ *last release*: Mar 24, 2016,
+ *status*: N/A,
+ *requires*: N/A
+
+ implement a --failed option for pytest
+
+ :pypi:`pytest-runner`
+ *last release*: May 19, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.6) ; extra == 'testing'
+
+ Invoke py.test as distutils command with dependency resolution
+
+ :pypi:`pytest-runtime-xfail`
+ *last release*: Aug 26, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Call runtime_xfail() to mark running test as xfail.
+
+ :pypi:`pytest-salt`
+ *last release*: Jan 27, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Pytest Salt Plugin
+
+ :pypi:`pytest-salt-containers`
+ *last release*: Nov 09, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A Pytest plugin that builds and creates docker containers
+
+ :pypi:`pytest-salt-factories`
+ *last release*: Sep 16, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0.0)
+
+ Pytest Salt Plugin
+
+ :pypi:`pytest-salt-from-filenames`
+ *last release*: Jan 29, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.1)
+
+ Simple PyTest Plugin For Salt's Test Suite Specifically
+
+ :pypi:`pytest-salt-runtests-bridge`
+ *last release*: Dec 05, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=4.1)
+
+ Simple PyTest Plugin For Salt's Test Suite Specifically
+
+ :pypi:`pytest-sanic`
+ *last release*: Oct 25, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=5.2)
+
+ a pytest plugin for Sanic
+
+ :pypi:`pytest-sanity`
+ *last release*: Dec 07, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-sa-pg`
+ *last release*: May 14, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-sbase`
+ *last release*: Dec 03, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ A complete web automation framework for end-to-end testing.
+
+ :pypi:`pytest-scenario`
+ *last release*: Feb 06, 2017,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin for test scenarios
+
+ :pypi:`pytest-schema`
+ *last release*: Aug 31, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.5.0)
+
+ 👠Validate return values against a schema-like object in testing
+
+ :pypi:`pytest-securestore`
+ *last release*: Nov 08, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ An encrypted password store for use within pytest cases
+
+ :pypi:`pytest-select`
+ *last release*: Jan 18, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.0)
+
+ A pytest plugin which allows to (de-)select tests from a file.
+
+ :pypi:`pytest-selenium`
+ *last release*: Sep 19, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.0.0)
+
+ pytest plugin for Selenium
+
+ :pypi:`pytest-seleniumbase`
+ *last release*: Dec 03, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ A complete web automation framework for end-to-end testing.
+
+ :pypi:`pytest-selenium-enhancer`
+ *last release*: Nov 26, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ pytest plugin for Selenium
+
+ :pypi:`pytest-selenium-pdiff`
+ *last release*: Apr 06, 2017,
+ *status*: 2 - Pre-Alpha,
+ *requires*: N/A
+
+ A pytest package implementing perceptualdiff for Selenium tests.
+
+ :pypi:`pytest-send-email`
+ *last release*: Dec 04, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Send pytest execution result email
+
+ :pypi:`pytest-sentry`
+ *last release*: Apr 21, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ A pytest plugin to send testrun information to Sentry.io
+
+ :pypi:`pytest-server-fixtures`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Extensible server fixures for py.test
+
+ :pypi:`pytest-serverless`
+ *last release*: Nov 27, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Automatically mocks resources from serverless.yml in pytest using moto.
+
+ :pypi:`pytest-services`
+ *last release*: Oct 30, 2020,
+ *status*: 6 - Mature,
+ *requires*: N/A
+
+ Services plugin for pytest testing framework
+
+ :pypi:`pytest-session2file`
+ *last release*: Jan 26, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ pytest-session2file (aka: pytest-session_to_file for v0.1.0 - v0.1.2) is a py.test plugin for capturing and saving to file the stdout of py.test.
+
+ :pypi:`pytest-session-fixture-globalize`
+ *last release*: May 15, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ py.test plugin to make session fixtures behave as if written in conftest, even if it is written in some modules
+
+ :pypi:`pytest-session_to_file`
+ *last release*: Oct 01, 2015,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest-session_to_file is a py.test plugin for capturing and saving to file the stdout of py.test.
+
+ :pypi:`pytest-sftpserver`
+ *last release*: Sep 16, 2019,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ py.test plugin to locally test sftp server connections.
+
+ :pypi:`pytest-shard`
+ *last release*: Dec 11, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+
+
+ :pypi:`pytest-shell`
+ *last release*: Nov 07, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ A pytest plugin to help with testing shell scripts / black box commands
+
+ :pypi:`pytest-sheraf`
+ *last release*: Feb 11, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Versatile ZODB abstraction layer - pytest fixtures
+
+ :pypi:`pytest-sherlock`
+ *last release*: Nov 18, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.5.1)
+
+ pytest plugin help to find coupled tests
+
+ :pypi:`pytest-shortcuts`
+ *last release*: Oct 29, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Expand command-line shortcuts listed in pytest configuration
+
+ :pypi:`pytest-shutil`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ A goodie-bag of unix shell and environment tools for py.test
+
+ :pypi:`pytest-simplehttpserver`
+ *last release*: Jun 24, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Simple pytest fixture to spin up an HTTP server
+
+ :pypi:`pytest-simple-plugin`
+ *last release*: Nov 27, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ Simple pytest plugin
+
+ :pypi:`pytest-simple-settings`
+ *last release*: Nov 17, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ simple-settings plugin for pytest
+
+ :pypi:`pytest-single-file-logging`
+ *last release*: May 05, 2016,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.8.1)
+
+ Allow for multiple processes to log to a single file
+
+ :pypi:`pytest-skip-markers`
+ *last release*: Oct 04, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0.0)
+
+ Pytest Salt Plugin
+
+ :pypi:`pytest-skipper`
+ *last release*: Mar 26, 2017,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.0.6)
+
+ A plugin that selects only tests with changes in execution path
+
+ :pypi:`pytest-skippy`
+ *last release*: Jan 27, 2018,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=2.3.4)
+
+ Automatically skip tests that don't need to run!
+
+ :pypi:`pytest-skip-slow`
+ *last release*: Sep 28, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ A pytest plugin to skip \`@pytest.mark.slow\` tests by default.
+
+ :pypi:`pytest-slack`
+ *last release*: Dec 15, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Pytest to Slack reporting plugin
+
+ :pypi:`pytest-slow`
+ *last release*: Sep 28, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ A pytest plugin to skip \`@pytest.mark.slow\` tests by default.
+
+ :pypi:`pytest-smartcollect`
+ *last release*: Oct 04, 2018,
+ *status*: N/A,
+ *requires*: pytest (>=3.5.0)
+
+ A plugin for collecting tests that touch changed code
+
+ :pypi:`pytest-smartcov`
+ *last release*: Sep 30, 2017,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Smart coverage plugin for pytest.
+
+ :pypi:`pytest-smtp`
+ *last release*: Feb 20, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ Send email with pytest execution result
+
+ :pypi:`pytest-snail`
+ *last release*: Nov 04, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=5.0.1)
+
+ Plugin for adding a marker to slow running tests. ðŸŒ
+
+ :pypi:`pytest-snapci`
+ *last release*: Nov 12, 2015,
+ *status*: N/A,
+ *requires*: N/A
+
+ py.test plugin for Snap-CI
+
+ :pypi:`pytest-snapshot`
+ *last release*: Dec 02, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.0.0)
+
+ A plugin for snapshot testing with pytest.
+
+ :pypi:`pytest-snmpserver`
+ *last release*: May 12, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-socket`
+ *last release*: Aug 28, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.6.3)
+
+ Pytest Plugin to disable socket calls during tests
+
+ :pypi:`pytest-soft-assertions`
+ *last release*: May 05, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+
+
+ :pypi:`pytest-solr`
+ *last release*: May 11, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.0.0)
+
+ Solr process and client fixtures for py.test.
+
+ :pypi:`pytest-sorter`
+ *last release*: Apr 20, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.1.1)
+
+ A simple plugin to first execute tests that historically failed more
+
+ :pypi:`pytest-sourceorder`
+ *last release*: Sep 01, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Test-ordering plugin for pytest
+
+ :pypi:`pytest-spark`
+ *last release*: Feb 23, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ pytest plugin to run the tests with support of pyspark.
+
+ :pypi:`pytest-spawner`
+ *last release*: Jul 31, 2015,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ py.test plugin to spawn process and communicate with them.
+
+ :pypi:`pytest-spec`
+ *last release*: May 04, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Library pytest-spec is a pytest plugin to display test execution output like a SPECIFICATION.
+
+ :pypi:`pytest-sphinx`
+ *last release*: Aug 05, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Doctest plugin for pytest with support for Sphinx-specific doctest-directives
+
+ :pypi:`pytest-spiratest`
+ *last release*: Oct 13, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Exports unit tests as test runs in SpiraTest/Team/Plan
+
+ :pypi:`pytest-splinter`
+ *last release*: Dec 25, 2020,
+ *status*: 6 - Mature,
+ *requires*: N/A
+
+ Splinter plugin for pytest testing framework
+
+ :pypi:`pytest-split`
+ *last release*: Nov 09, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=5,<7)
+
+ Pytest plugin which splits the test suite to equally sized sub suites based on test execution time.
+
+ :pypi:`pytest-splitio`
+ *last release*: Sep 22, 2020,
+ *status*: N/A,
+ *requires*: pytest (<7,>=5.0)
+
+ Split.io SDK integration for e2e tests
+
+ :pypi:`pytest-split-tests`
+ *last release*: Jul 30, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.5)
+
+ A Pytest plugin for running a subset of your tests by splitting them in to equally sized groups. Forked from Mark Adams' original project pytest-test-groups.
+
+ :pypi:`pytest-split-tests-tresorit`
+ *last release*: Feb 22, 2021,
+ *status*: 1 - Planning,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-splunk-addon`
+ *last release*: Nov 29, 2021,
+ *status*: N/A,
+ *requires*: pytest (>5.4.0,<6.3)
+
+ A Dynamic test tool for Splunk Apps and Add-ons
+
+ :pypi:`pytest-splunk-addon-ui-smartx`
+ *last release*: Oct 07, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Library to support testing Splunk Add-on UX
+
+ :pypi:`pytest-splunk-env`
+ *last release*: Oct 22, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=6.1.1,<7.0.0)
+
+ pytest fixtures for interaction with Splunk Enterprise and Splunk Cloud
+
+ :pypi:`pytest-sqitch`
+ *last release*: Apr 06, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ sqitch for pytest
+
+ :pypi:`pytest-sqlalchemy`
+ *last release*: Mar 13, 2018,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest plugin with sqlalchemy related fixtures
+
+ :pypi:`pytest-sql-bigquery`
+ *last release*: Dec 19, 2019,
+ *status*: N/A,
+ *requires*: pytest
+
+ Yet another SQL-testing framework for BigQuery provided by pytest plugin
+
+ :pypi:`pytest-srcpaths`
+ *last release*: Oct 15, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Add paths to sys.path
+
+ :pypi:`pytest-ssh`
+ *last release*: May 27, 2019,
+ *status*: N/A,
+ *requires*: pytest
+
+ pytest plugin for ssh command run
+
+ :pypi:`pytest-start-from`
+ *last release*: Apr 11, 2016,
+ *status*: N/A,
+ *requires*: N/A
+
+ Start pytest run from a given point
+
+ :pypi:`pytest-statsd`
+ *last release*: Nov 30, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.0.0)
+
+ pytest plugin for reporting to graphite
+
+ :pypi:`pytest-stepfunctions`
+ *last release*: May 08, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ A small description
+
+ :pypi:`pytest-steps`
+ *last release*: Sep 23, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Create step-wise / incremental tests in pytest.
+
+ :pypi:`pytest-stepwise`
+ *last release*: Dec 01, 2015,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Run a test suite one failing test at a time.
+
+ :pypi:`pytest-stoq`
+ *last release*: Feb 09, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A plugin to pytest stoq
+
+ :pypi:`pytest-stress`
+ *last release*: Dec 07, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.6.0)
+
+ A Pytest plugin that allows you to loop tests for a user defined amount of time.
+
+ :pypi:`pytest-structlog`
+ *last release*: Sep 21, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ Structured logging assertions
+
+ :pypi:`pytest-structmpd`
+ *last release*: Oct 17, 2018,
+ *status*: N/A,
+ *requires*: N/A
+
+ provide structured temporary directory
+
+ :pypi:`pytest-stub`
+ *last release*: Apr 28, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Stub packages, modules and attributes.
+
+ :pypi:`pytest-stubprocess`
+ *last release*: Sep 17, 2018,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.5.0)
+
+ Provide stub implementations for subprocesses in Python tests
+
+ :pypi:`pytest-study`
+ *last release*: Sep 26, 2017,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=2.0)
+
+ A pytest plugin to organize long run tests (named studies) without interfering the regular tests
+
+ :pypi:`pytest-subprocess`
+ *last release*: Nov 07, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.0.0)
+
+ A plugin to fake subprocess for pytest
+
+ :pypi:`pytest-subtesthack`
+ *last release*: Mar 02, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ A hack to explicitly set up and tear down fixtures.
+
+ :pypi:`pytest-subtests`
+ *last release*: May 29, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=5.3.0)
+
+ unittest subTest() support and subtests fixture
+
+ :pypi:`pytest-subunit`
+ *last release*: Aug 29, 2017,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest-subunit is a plugin for py.test which outputs testsresult in subunit format.
+
+ :pypi:`pytest-sugar`
+ *last release*: Jul 06, 2020,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly).
+
+ :pypi:`pytest-sugar-bugfix159`
+ *last release*: Nov 07, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (!=3.7.3,>=3.5); extra == 'testing'
+
+ Workaround for https://github.com/Frozenball/pytest-sugar/issues/159
+
+ :pypi:`pytest-super-check`
+ *last release*: Aug 12, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Pytest plugin to check your TestCase classes call super in setUp, tearDown, etc.
+
+ :pypi:`pytest-svn`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ SVN repository fixture for py.test
+
+ :pypi:`pytest-symbols`
+ *last release*: Nov 20, 2017,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ pytest-symbols is a pytest plugin that adds support for passing test environment symbols into pytest tests.
+
+ :pypi:`pytest-takeltest`
+ *last release*: Oct 13, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Fixtures for ansible, testinfra and molecule
+
+ :pypi:`pytest-talisker`
+ *last release*: Nov 28, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-tap`
+ *last release*: Oct 27, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.0)
+
+ Test Anything Protocol (TAP) reporting plugin for pytest
+
+ :pypi:`pytest-tape`
+ *last release*: Mar 17, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ easy assertion with expected results saved to yaml files
+
+ :pypi:`pytest-target`
+ *last release*: Jan 21, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=6.1.2,<7.0.0)
+
+ Pytest plugin for remote target orchestration.
+
+ :pypi:`pytest-tblineinfo`
+ *last release*: Dec 01, 2015,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=2.0)
+
+ tblineinfo is a py.test plugin that insert the node id in the final py.test report when --tb=line option is used
+
+ :pypi:`pytest-teamcity-logblock`
+ *last release*: May 15, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ py.test plugin to introduce block structure in teamcity build log, if output is not captured
+
+ :pypi:`pytest-telegram`
+ *last release*: Dec 10, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Pytest to Telegram reporting plugin
+
+ :pypi:`pytest-tempdir`
+ *last release*: Oct 11, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.8.1)
+
+ Predictable and repeatable tempdir support.
+
+ :pypi:`pytest-terraform`
+ *last release*: Nov 10, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6.0)
+
+ A pytest plugin for using terraform fixtures
+
+ :pypi:`pytest-terraform-fixture`
+ *last release*: Nov 14, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ generate terraform resources to use with pytest
+
+ :pypi:`pytest-testbook`
+ *last release*: Dec 11, 2016,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ A plugin to run tests written in Jupyter notebook
+
+ :pypi:`pytest-testconfig`
+ *last release*: Jan 11, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Test configuration plugin for pytest.
+
+ :pypi:`pytest-testdirectory`
+ *last release*: Nov 06, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ A py.test plugin providing temporary directories in unit tests.
+
+ :pypi:`pytest-testdox`
+ *last release*: Oct 13, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.7.0)
+
+ A testdox format reporter for pytest
+
+ :pypi:`pytest-test-groups`
+ *last release*: Oct 25, 2016,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ A Pytest plugin for running a subset of your tests by splitting them in to equally sized groups.
+
+ :pypi:`pytest-testinfra`
+ *last release*: Jun 20, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (!=3.0.2)
+
+ Test infrastructures
+
+ :pypi:`pytest-testlink-adaptor`
+ *last release*: Dec 20, 2018,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.6)
+
+ pytest reporting plugin for testlink
+
+ :pypi:`pytest-testmon`
+ *last release*: Oct 22, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ selects tests affected by changed files and methods
+
+ :pypi:`pytest-testobject`
+ *last release*: Sep 24, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.1.1)
+
+ Plugin to use TestObject Suites with Pytest
+
+ :pypi:`pytest-testrail`
+ *last release*: Aug 27, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=3.6)
+
+ pytest plugin for creating TestRail runs and adding results
+
+ :pypi:`pytest-testrail2`
+ *last release*: Nov 17, 2020,
+ *status*: N/A,
+ *requires*: pytest (>=5)
+
+ A small example package
+
+ :pypi:`pytest-testrail-api`
+ *last release*: Nov 30, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=5.5)
+
+ Плагин Pytest, Ð´Ð»Ñ Ð¸Ð½Ñ‚ÐµÐ³Ñ€Ð°Ñ†Ð¸Ð¸ Ñ TestRail
+
+ :pypi:`pytest-testrail-api-client`
+ *last release*: Dec 03, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ TestRail Api Python Client
+
+ :pypi:`pytest-testrail-appetize`
+ *last release*: Sep 29, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ pytest plugin for creating TestRail runs and adding results
+
+ :pypi:`pytest-testrail-client`
+ *last release*: Sep 29, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ pytest plugin for Testrail
+
+ :pypi:`pytest-testrail-e2e`
+ *last release*: Oct 11, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=3.6)
+
+ pytest plugin for creating TestRail runs and adding results
+
+ :pypi:`pytest-testrail-ns`
+ *last release*: Oct 08, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=3.6)
+
+ pytest plugin for creating TestRail runs and adding results
+
+ :pypi:`pytest-testrail-plugin`
+ *last release*: Apr 21, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ PyTest plugin for TestRail
+
+ :pypi:`pytest-testrail-reporter`
+ *last release*: Sep 10, 2018,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-testreport`
+ *last release*: Nov 12, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+
+
+ :pypi:`pytest-testslide`
+ *last release*: Jan 07, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (~=6.2)
+
+ TestSlide fixture for pytest
+
+ :pypi:`pytest-test-this`
+ *last release*: Sep 15, 2019,
+ *status*: 2 - Pre-Alpha,
+ *requires*: pytest (>=2.3)
+
+ Plugin for py.test to run relevant tests, based on naively checking if a test contains a reference to the symbol you supply
+
+ :pypi:`pytest-test-utils`
+ *last release*: Nov 30, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=5)
+
+
+
+ :pypi:`pytest-tesults`
+ *last release*: Jul 31, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.5.0)
+
+ Tesults plugin for pytest
+
+ :pypi:`pytest-tezos`
+ *last release*: Jan 16, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest-ligo
+
+ :pypi:`pytest-thawgun`
+ *last release*: May 26, 2020,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Pytest plugin for time travel
+
+ :pypi:`pytest-threadleak`
+ *last release*: Sep 08, 2017,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Detects thread leaks
+
+ :pypi:`pytest-tick`
+ *last release*: Aug 31, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=6.2.5,<7.0.0)
+
+ Ticking on tests
+
+ :pypi:`pytest-timeit`
+ *last release*: Oct 13, 2016,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A pytest plugin to time test function runs
+
+ :pypi:`pytest-timeout`
+ *last release*: Oct 11, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.0.0)
+
+ pytest plugin to abort hanging tests
+
+ :pypi:`pytest-timeouts`
+ *last release*: Sep 21, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Linux-only Pytest plugin to control durations of various test case execution phases
+
+ :pypi:`pytest-timer`
+ *last release*: Jun 02, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ A timer plugin for pytest
+
+ :pypi:`pytest-timestamper`
+ *last release*: Jun 06, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest plugin to add a timestamp prefix to the pytest output
+
+ :pypi:`pytest-tipsi-django`
+ *last release*: Nov 17, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.0.0)
+
+
+
+ :pypi:`pytest-tipsi-testing`
+ *last release*: Nov 04, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.3.0)
+
+ Better fixtures management. Various helpers
+
+ :pypi:`pytest-tldr`
+ *last release*: Mar 12, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A pytest plugin that limits the output to just the things you need.
+
+ :pypi:`pytest-tm4j-reporter`
+ *last release*: Sep 01, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Cloud Jira Test Management (TM4J) PyTest reporter plugin
+
+ :pypi:`pytest-tmreport`
+ *last release*: Nov 17, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ this is a vue-element ui report for pytest
+
+ :pypi:`pytest-todo`
+ *last release*: May 23, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ A small plugin for the pytest testing framework, marking TODO comments as failure
+
+ :pypi:`pytest-tomato`
+ *last release*: Mar 01, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-toolbelt`
+ *last release*: Aug 12, 2019,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ This is just a collection of utilities for pytest, but don't really belong in pytest proper.
+
+ :pypi:`pytest-toolbox`
+ *last release*: Apr 07, 2018,
+ *status*: N/A,
+ *requires*: pytest (>=3.5.0)
+
+ Numerous useful plugins for pytest.
+
+ :pypi:`pytest-tornado`
+ *last release*: Jun 17, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.6)
+
+ A py.test plugin providing fixtures and markers to simplify testing of asynchronous tornado applications.
+
+ :pypi:`pytest-tornado5`
+ *last release*: Nov 16, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.6)
+
+ A py.test plugin providing fixtures and markers to simplify testing of asynchronous tornado applications.
+
+ :pypi:`pytest-tornado-yen3`
+ *last release*: Oct 15, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ A py.test plugin providing fixtures and markers to simplify testing of asynchronous tornado applications.
+
+ :pypi:`pytest-tornasync`
+ *last release*: Jul 15, 2019,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.0)
+
+ py.test plugin for testing Python 3.5+ Tornado code
+
+ :pypi:`pytest-track`
+ *last release*: Feb 26, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.0)
+
+
+
+ :pypi:`pytest-translations`
+ *last release*: Nov 05, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Test your translation files.
+
+ :pypi:`pytest-travis-fold`
+ *last release*: Nov 29, 2017,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.6.0)
+
+ Folds captured output sections in Travis CI build log
+
+ :pypi:`pytest-trello`
+ *last release*: Nov 20, 2015,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Plugin for py.test that integrates trello using markers
+
+ :pypi:`pytest-trepan`
+ *last release*: Jul 28, 2018,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Pytest plugin for trepan debugger.
+
+ :pypi:`pytest-trialtemp`
+ *last release*: Jun 08, 2015,
+ *status*: N/A,
+ *requires*: N/A
+
+ py.test plugin for using the same _trial_temp working directory as trial
+
+ :pypi:`pytest-trio`
+ *last release*: Oct 16, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest plugin for trio
+
+ :pypi:`pytest-tspwplib`
+ *last release*: Jan 08, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ A simple plugin to use with tspwplib
+
+ :pypi:`pytest-tstcls`
+ *last release*: Mar 23, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Test Class Base
+
+ :pypi:`pytest-twisted`
+ *last release*: Aug 30, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.3)
+
+ A twisted plugin for pytest.
+
+ :pypi:`pytest-typhoon-xray`
+ *last release*: Nov 03, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Typhoon HIL plugin for pytest
+
+ :pypi:`pytest-tytest`
+ *last release*: May 25, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=5.4.2)
+
+ Typhoon HIL plugin for pytest
+
+ :pypi:`pytest-ubersmith`
+ *last release*: Apr 13, 2015,
+ *status*: N/A,
+ *requires*: N/A
+
+ Easily mock calls to ubersmith at the \`requests\` level.
+
+ :pypi:`pytest-ui`
+ *last release*: Jul 05, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Text User Interface for running python tests
+
+ :pypi:`pytest-unhandled-exception-exit-code`
+ *last release*: Jun 22, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.3)
+
+ Plugin for py.test set a different exit code on uncaught exceptions
+
+ :pypi:`pytest-unittest-filter`
+ *last release*: Jan 12, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.1.0)
+
+ A pytest plugin for filtering unittest-based test classes
+
+ :pypi:`pytest-unmarked`
+ *last release*: Aug 27, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Run only unmarked tests
+
+ :pypi:`pytest-unordered`
+ *last release*: Mar 28, 2021,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Test equality of unordered collections in pytest
+
+ :pypi:`pytest-upload-report`
+ *last release*: Jun 18, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ pytest-upload-report is a plugin for pytest that upload your test report for test results.
+
+ :pypi:`pytest-utils`
+ *last release*: Dec 04, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.2.5,<7.0.0)
+
+ Some helpers for pytest.
+
+ :pypi:`pytest-vagrant`
+ *last release*: Sep 07, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ A py.test plugin providing access to vagrant.
+
+ :pypi:`pytest-valgrind`
+ *last release*: May 19, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-variables`
+ *last release*: Oct 23, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=2.4.2)
+
+ pytest plugin for providing variables to tests/fixtures
+
+ :pypi:`pytest-variant`
+ *last release*: Jun 20, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Variant support for Pytest
+
+ :pypi:`pytest-vcr`
+ *last release*: Apr 26, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=3.6.0)
+
+ Plugin for managing VCR.py cassettes
+
+ :pypi:`pytest-vcr-delete-on-fail`
+ *last release*: Aug 13, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.2.2,<7.0.0)
+
+ A pytest plugin that automates vcrpy cassettes deletion on test failure.
+
+ :pypi:`pytest-vcrpandas`
+ *last release*: Jan 12, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ Test from HTTP interactions to dataframe processed.
+
+ :pypi:`pytest-venv`
+ *last release*: Aug 04, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest
+
+ py.test fixture for creating a virtual environment
+
+ :pypi:`pytest-ver`
+ *last release*: Aug 30, 2021,
+ *status*: 2 - Pre-Alpha,
+ *requires*: N/A
+
+ Pytest module with Verification Report
+
+ :pypi:`pytest-verbose-parametrize`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ More descriptive output for parametrized py.test tests
+
+ :pypi:`pytest-vimqf`
+ *last release*: Feb 08, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=6.2.2,<7.0.0)
+
+ A simple pytest plugin that will shrink pytest output when specified, to fit vim quickfix window.
+
+ :pypi:`pytest-virtualenv`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Virtualenv fixture for py.test
+
+ :pypi:`pytest-voluptuous`
+ *last release*: Jun 09, 2020,
+ *status*: N/A,
+ *requires*: pytest
+
+ Pytest plugin for asserting data against voluptuous schema.
+
+ :pypi:`pytest-vscodedebug`
+ *last release*: Dec 04, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ A pytest plugin to easily enable debugging tests within Visual Studio Code
+
+ :pypi:`pytest-vts`
+ *last release*: Jun 05, 2019,
+ *status*: N/A,
+ *requires*: pytest (>=2.3)
+
+ pytest plugin for automatic recording of http stubbed tests
+
+ :pypi:`pytest-vw`
+ *last release*: Oct 07, 2015,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ pytest-vw makes your failing test cases succeed under CI tools scrutiny
+
+ :pypi:`pytest-vyper`
+ *last release*: May 28, 2020,
+ *status*: 2 - Pre-Alpha,
+ *requires*: N/A
+
+ Plugin for the vyper smart contract language.
+
+ :pypi:`pytest-wa-e2e-plugin`
+ *last release*: Feb 18, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.5.0)
+
+ Pytest plugin for testing whatsapp bots with end to end tests
+
+ :pypi:`pytest-watch`
+ *last release*: May 20, 2018,
+ *status*: N/A,
+ *requires*: N/A
+
+ Local continuous test runner with pytest and watchdog.
+
+ :pypi:`pytest-watcher`
+ *last release*: Sep 18, 2021,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+ Continiously runs pytest on changes in \*.py files
+
+ :pypi:`pytest-wdl`
+ *last release*: Nov 17, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: N/A
+
+ Pytest plugin for testing WDL workflows.
+
+ :pypi:`pytest-webdriver`
+ *last release*: May 28, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest
+
+ Selenium webdriver fixture for py.test
+
+ :pypi:`pytest-wetest`
+ *last release*: Nov 10, 2018,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Welian API Automation test framework pytest plugin
+
+ :pypi:`pytest-whirlwind`
+ *last release*: Jun 12, 2020,
+ *status*: N/A,
+ *requires*: N/A
+
+ Testing Tornado.
+
+ :pypi:`pytest-wholenodeid`
+ *last release*: Aug 26, 2015,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.0)
+
+ pytest addon for displaying the whole node id for failures
+
+ :pypi:`pytest-win32consoletitle`
+ *last release*: Aug 08, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest progress in console title (Win32 only)
+
+ :pypi:`pytest-winnotify`
+ *last release*: Apr 22, 2016,
+ *status*: N/A,
+ *requires*: N/A
+
+ Windows tray notifications for py.test results.
+
+ :pypi:`pytest-with-docker`
+ *last release*: Nov 09, 2021,
+ *status*: N/A,
+ *requires*: pytest
+
+ pytest with docker helpers.
+
+ :pypi:`pytest-workflow`
+ *last release*: Dec 03, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.4.0)
+
+ A pytest plugin for configuring workflow/pipeline tests using YAML files
+
+ :pypi:`pytest-xdist`
+ *last release*: Sep 21, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=6.0.0)
+
+ pytest xdist plugin for distributed testing and loop-on-failing modes
+
+ :pypi:`pytest-xdist-debug-for-graingert`
+ *last release*: Jul 24, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.4.0)
+
+ pytest xdist plugin for distributed testing and loop-on-failing modes
+
+ :pypi:`pytest-xdist-forked`
+ *last release*: Feb 10, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.4.0)
+
+ forked from pytest-xdist
+
+ :pypi:`pytest-xdist-tracker`
+ *last release*: Nov 18, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=3.5.1)
+
+ pytest plugin helps to reproduce failures for particular xdist node
+
+ :pypi:`pytest-xfaillist`
+ *last release*: Sep 17, 2021,
+ *status*: N/A,
+ *requires*: pytest (>=6.2.2,<7.0.0)
+
+ Maintain a xfaillist in an additional file to avoid merge-conflicts.
+
+ :pypi:`pytest-xfiles`
+ *last release*: Feb 27, 2018,
+ *status*: N/A,
+ *requires*: N/A
+
+ Pytest fixtures providing data read from function, module or package related (x)files.
+
+ :pypi:`pytest-xlog`
+ *last release*: May 31, 2020,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ Extended logging for test and decorators
+
+ :pypi:`pytest-xpara`
+ *last release*: Oct 30, 2017,
+ *status*: 3 - Alpha,
+ *requires*: pytest
+
+ An extended parametrizing plugin of pytest.
+
+ :pypi:`pytest-xprocess`
+ *last release*: Jul 28, 2021,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.8)
+
+ A pytest plugin for managing processes across test runs.
+
+ :pypi:`pytest-xray`
+ *last release*: May 30, 2019,
+ *status*: 3 - Alpha,
+ *requires*: N/A
+
+
+
+ :pypi:`pytest-xrayjira`
+ *last release*: Mar 17, 2020,
+ *status*: 3 - Alpha,
+ *requires*: pytest (==4.3.1)
+
+
+
+ :pypi:`pytest-xray-server`
+ *last release*: Oct 27, 2021,
+ *status*: 3 - Alpha,
+ *requires*: pytest (>=5.3.1)
+
+
+
+ :pypi:`pytest-xvfb`
+ *last release*: Jun 09, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=2.8.1)
+
+ A pytest plugin to run Xvfb for tests.
+
+ :pypi:`pytest-yaml`
+ *last release*: Oct 05, 2018,
+ *status*: N/A,
+ *requires*: pytest
+
+ This plugin is used to load yaml output to your test using pytest framework.
+
+ :pypi:`pytest-yamltree`
+ *last release*: Mar 02, 2020,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.1.1)
+
+ Create or check file/directory trees described by YAML
+
+ :pypi:`pytest-yamlwsgi`
+ *last release*: May 11, 2010,
+ *status*: N/A,
+ *requires*: N/A
+
+ Run tests against wsgi apps defined in yaml
+
+ :pypi:`pytest-yapf`
+ *last release*: Jul 06, 2017,
+ *status*: 4 - Beta,
+ *requires*: pytest (>=3.1.1)
+
+ Run yapf
+
+ :pypi:`pytest-yapf3`
+ *last release*: Aug 03, 2020,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=5.4)
+
+ Validate your Python file format with yapf
+
+ :pypi:`pytest-yield`
+ *last release*: Jan 23, 2019,
+ *status*: N/A,
+ *requires*: N/A
+
+ PyTest plugin to run tests concurrently, each \`yield\` switch context to other one
+
+ :pypi:`pytest-yuk`
+ *last release*: Mar 26, 2021,
+ *status*: N/A,
+ *requires*: N/A
+
+ Display tests you are uneasy with, using 🤢/🤮 for pass/fail of tests marked with yuk.
+
+ :pypi:`pytest-zafira`
+ *last release*: Sep 18, 2019,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (==4.1.1)
+
+ A Zafira plugin for pytest
+
+ :pypi:`pytest-zap`
+ *last release*: May 12, 2014,
+ *status*: 4 - Beta,
+ *requires*: N/A
+
+ OWASP ZAP plugin for py.test.
+
+ :pypi:`pytest-zebrunner`
+ *last release*: Dec 02, 2021,
+ *status*: 5 - Production/Stable,
+ *requires*: pytest (>=4.5.0)
+
+ Pytest connector for Zebrunner reporting
+
+ :pypi:`pytest-zigzag`
+ *last release*: Feb 27, 2019,
+ *status*: 4 - Beta,
+ *requires*: pytest (~=3.6)
+
+ Extend py.test for RPC OpenStack testing.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/reference.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/reference.rst
new file mode 100644
index 0000000000..0d80c80680
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/reference/reference.rst
@@ -0,0 +1,2101 @@
+.. _`api-reference`:
+
+API Reference
+=============
+
+This page contains the full reference to pytest's API.
+
+.. contents::
+ :depth: 3
+ :local:
+
+Constants
+---------
+
+pytest.__version__
+~~~~~~~~~~~~~~~~~~
+
+The current pytest version, as a string::
+
+ >>> import pytest
+ >>> pytest.__version__
+ '7.0.0'
+
+
+.. _`version-tuple`:
+
+pytest.version_tuple
+~~~~~~~~~~~~~~~~~~~~
+
+.. versionadded:: 7.0
+
+The current pytest version, as a tuple::
+
+ >>> import pytest
+ >>> pytest.version_tuple
+ (7, 0, 0)
+
+For pre-releases, the last component will be a string with the prerelease version::
+
+ >>> import pytest
+ >>> pytest.version_tuple
+ (7, 0, '0rc1')
+
+
+Functions
+---------
+
+pytest.approx
+~~~~~~~~~~~~~
+
+.. autofunction:: pytest.approx
+
+pytest.fail
+~~~~~~~~~~~
+
+**Tutorial**: :ref:`skipping`
+
+.. autofunction:: pytest.fail(reason, [pytrace=True, msg=None])
+
+pytest.skip
+~~~~~~~~~~~
+
+.. autofunction:: pytest.skip(reason, [allow_module_level=False, msg=None])
+
+.. _`pytest.importorskip ref`:
+
+pytest.importorskip
+~~~~~~~~~~~~~~~~~~~
+
+.. autofunction:: pytest.importorskip
+
+pytest.xfail
+~~~~~~~~~~~~
+
+.. autofunction:: pytest.xfail
+
+pytest.exit
+~~~~~~~~~~~
+
+.. autofunction:: pytest.exit(reason, [returncode=False, msg=None])
+
+pytest.main
+~~~~~~~~~~~
+
+.. autofunction:: pytest.main
+
+pytest.param
+~~~~~~~~~~~~
+
+.. autofunction:: pytest.param(*values, [id], [marks])
+
+pytest.raises
+~~~~~~~~~~~~~
+
+**Tutorial**: :ref:`assertraises`.
+
+.. autofunction:: pytest.raises(expected_exception: Exception [, *, match])
+ :with: excinfo
+
+pytest.deprecated_call
+~~~~~~~~~~~~~~~~~~~~~~
+
+**Tutorial**: :ref:`ensuring_function_triggers`.
+
+.. autofunction:: pytest.deprecated_call()
+ :with:
+
+pytest.register_assert_rewrite
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+**Tutorial**: :ref:`assertion-rewriting`.
+
+.. autofunction:: pytest.register_assert_rewrite
+
+pytest.warns
+~~~~~~~~~~~~
+
+**Tutorial**: :ref:`assertwarnings`
+
+.. autofunction:: pytest.warns(expected_warning: Exception, [match])
+ :with:
+
+pytest.freeze_includes
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+**Tutorial**: :ref:`freezing-pytest`.
+
+.. autofunction:: pytest.freeze_includes
+
+.. _`marks ref`:
+
+Marks
+-----
+
+Marks can be used apply meta data to *test functions* (but not fixtures), which can then be accessed by
+fixtures or plugins.
+
+
+
+
+.. _`pytest.mark.filterwarnings ref`:
+
+pytest.mark.filterwarnings
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+**Tutorial**: :ref:`filterwarnings`.
+
+Add warning filters to marked test items.
+
+.. py:function:: pytest.mark.filterwarnings(filter)
+
+ :keyword str filter:
+ A *warning specification string*, which is composed of contents of the tuple ``(action, message, category, module, lineno)``
+ as specified in :ref:`python:warning-filter` section of
+ the Python documentation, separated by ``":"``. Optional fields can be omitted.
+ Module names passed for filtering are not regex-escaped.
+
+ For example:
+
+ .. code-block:: python
+
+ @pytest.mark.filterwarnings("ignore:.*usage will be deprecated.*:DeprecationWarning")
+ def test_foo():
+ ...
+
+
+.. _`pytest.mark.parametrize ref`:
+
+pytest.mark.parametrize
+~~~~~~~~~~~~~~~~~~~~~~~
+
+:ref:`parametrize`.
+
+This mark has the same signature as :py:meth:`pytest.Metafunc.parametrize`; see there.
+
+
+.. _`pytest.mark.skip ref`:
+
+pytest.mark.skip
+~~~~~~~~~~~~~~~~
+
+:ref:`skip`.
+
+Unconditionally skip a test function.
+
+.. py:function:: pytest.mark.skip(reason=None)
+
+ :keyword str reason: Reason why the test function is being skipped.
+
+
+.. _`pytest.mark.skipif ref`:
+
+pytest.mark.skipif
+~~~~~~~~~~~~~~~~~~
+
+:ref:`skipif`.
+
+Skip a test function if a condition is ``True``.
+
+.. py:function:: pytest.mark.skipif(condition, *, reason=None)
+
+ :type condition: bool or str
+ :param condition: ``True/False`` if the condition should be skipped or a :ref:`condition string <string conditions>`.
+ :keyword str reason: Reason why the test function is being skipped.
+
+
+.. _`pytest.mark.usefixtures ref`:
+
+pytest.mark.usefixtures
+~~~~~~~~~~~~~~~~~~~~~~~
+
+**Tutorial**: :ref:`usefixtures`.
+
+Mark a test function as using the given fixture names.
+
+.. py:function:: pytest.mark.usefixtures(*names)
+
+ :param args: The names of the fixture to use, as strings.
+
+.. note::
+
+ When using `usefixtures` in hooks, it can only load fixtures when applied to a test function before test setup
+ (for example in the `pytest_collection_modifyitems` hook).
+
+ Also note that this mark has no effect when applied to **fixtures**.
+
+
+
+.. _`pytest.mark.xfail ref`:
+
+pytest.mark.xfail
+~~~~~~~~~~~~~~~~~~
+
+**Tutorial**: :ref:`xfail`.
+
+Marks a test function as *expected to fail*.
+
+.. py:function:: pytest.mark.xfail(condition=None, *, reason=None, raises=None, run=True, strict=False)
+
+ :type condition: bool or str
+ :param condition:
+ Condition for marking the test function as xfail (``True/False`` or a
+ :ref:`condition string <string conditions>`). If a bool, you also have
+ to specify ``reason`` (see :ref:`condition string <string conditions>`).
+ :keyword str reason:
+ Reason why the test function is marked as xfail.
+ :keyword Type[Exception] raises:
+ Exception subclass expected to be raised by the test function; other exceptions will fail the test.
+ :keyword bool run:
+ If the test function should actually be executed. If ``False``, the function will always xfail and will
+ not be executed (useful if a function is segfaulting).
+ :keyword bool strict:
+ * If ``False`` (the default) the function will be shown in the terminal output as ``xfailed`` if it fails
+ and as ``xpass`` if it passes. In both cases this will not cause the test suite to fail as a whole. This
+ is particularly useful to mark *flaky* tests (tests that fail at random) to be tackled later.
+ * If ``True``, the function will be shown in the terminal output as ``xfailed`` if it fails, but if it
+ unexpectedly passes then it will **fail** the test suite. This is particularly useful to mark functions
+ that are always failing and there should be a clear indication if they unexpectedly start to pass (for example
+ a new release of a library fixes a known bug).
+
+
+Custom marks
+~~~~~~~~~~~~
+
+Marks are created dynamically using the factory object ``pytest.mark`` and applied as a decorator.
+
+For example:
+
+.. code-block:: python
+
+ @pytest.mark.timeout(10, "slow", method="thread")
+ def test_function():
+ ...
+
+Will create and attach a :class:`Mark <pytest.Mark>` object to the collected
+:class:`Item <pytest.Item>`, which can then be accessed by fixtures or hooks with
+:meth:`Node.iter_markers <_pytest.nodes.Node.iter_markers>`. The ``mark`` object will have the following attributes:
+
+.. code-block:: python
+
+ mark.args == (10, "slow")
+ mark.kwargs == {"method": "thread"}
+
+Example for using multiple custom markers:
+
+.. code-block:: python
+
+ @pytest.mark.timeout(10, "slow", method="thread")
+ @pytest.mark.slow
+ def test_function():
+ ...
+
+When :meth:`Node.iter_markers <_pytest.nodes.Node.iter_markers>` or :meth:`Node.iter_markers <_pytest.nodes.Node.iter_markers_with_node>` is used with multiple markers, the marker closest to the function will be iterated over first. The above example will result in ``@pytest.mark.slow`` followed by ``@pytest.mark.timeout(...)``.
+
+.. _`fixtures-api`:
+
+Fixtures
+--------
+
+**Tutorial**: :ref:`fixture`.
+
+Fixtures are requested by test functions or other fixtures by declaring them as argument names.
+
+
+Example of a test requiring a fixture:
+
+.. code-block:: python
+
+ def test_output(capsys):
+ print("hello")
+ out, err = capsys.readouterr()
+ assert out == "hello\n"
+
+
+Example of a fixture requiring another fixture:
+
+.. code-block:: python
+
+ @pytest.fixture
+ def db_session(tmp_path):
+ fn = tmp_path / "db.file"
+ return connect(fn)
+
+For more details, consult the full :ref:`fixtures docs <fixture>`.
+
+
+.. _`pytest.fixture-api`:
+
+@pytest.fixture
+~~~~~~~~~~~~~~~
+
+.. autofunction:: pytest.fixture
+ :decorator:
+
+
+.. fixture:: cache
+
+config.cache
+~~~~~~~~~~~~
+
+**Tutorial**: :ref:`cache`.
+
+The ``config.cache`` object allows other plugins and fixtures
+to store and retrieve values across test runs. To access it from fixtures
+request ``pytestconfig`` into your fixture and get it with ``pytestconfig.cache``.
+
+Under the hood, the cache plugin uses the simple
+``dumps``/``loads`` API of the :py:mod:`json` stdlib module.
+
+``config.cache`` is an instance of :class:`pytest.Cache`:
+
+.. autoclass:: pytest.Cache()
+ :members:
+
+
+.. fixture:: capsys
+
+capsys
+~~~~~~
+
+:ref:`captures`.
+
+.. autofunction:: _pytest.capture.capsys()
+ :no-auto-options:
+
+ Returns an instance of :class:`CaptureFixture[str] <pytest.CaptureFixture>`.
+
+ Example:
+
+ .. code-block:: python
+
+ def test_output(capsys):
+ print("hello")
+ captured = capsys.readouterr()
+ assert captured.out == "hello\n"
+
+.. autoclass:: pytest.CaptureFixture()
+ :members:
+
+
+.. fixture:: capsysbinary
+
+capsysbinary
+~~~~~~~~~~~~
+
+:ref:`captures`.
+
+.. autofunction:: _pytest.capture.capsysbinary()
+ :no-auto-options:
+
+ Returns an instance of :class:`CaptureFixture[bytes] <pytest.CaptureFixture>`.
+
+ Example:
+
+ .. code-block:: python
+
+ def test_output(capsysbinary):
+ print("hello")
+ captured = capsysbinary.readouterr()
+ assert captured.out == b"hello\n"
+
+
+.. fixture:: capfd
+
+capfd
+~~~~~~
+
+:ref:`captures`.
+
+.. autofunction:: _pytest.capture.capfd()
+ :no-auto-options:
+
+ Returns an instance of :class:`CaptureFixture[str] <pytest.CaptureFixture>`.
+
+ Example:
+
+ .. code-block:: python
+
+ def test_system_echo(capfd):
+ os.system('echo "hello"')
+ captured = capfd.readouterr()
+ assert captured.out == "hello\n"
+
+
+.. fixture:: capfdbinary
+
+capfdbinary
+~~~~~~~~~~~~
+
+:ref:`captures`.
+
+.. autofunction:: _pytest.capture.capfdbinary()
+ :no-auto-options:
+
+ Returns an instance of :class:`CaptureFixture[bytes] <pytest.CaptureFixture>`.
+
+ Example:
+
+ .. code-block:: python
+
+ def test_system_echo(capfdbinary):
+ os.system('echo "hello"')
+ captured = capfdbinary.readouterr()
+ assert captured.out == b"hello\n"
+
+
+.. fixture:: doctest_namespace
+
+doctest_namespace
+~~~~~~~~~~~~~~~~~
+
+:ref:`doctest`.
+
+.. autofunction:: _pytest.doctest.doctest_namespace()
+
+ Usually this fixture is used in conjunction with another ``autouse`` fixture:
+
+ .. code-block:: python
+
+ @pytest.fixture(autouse=True)
+ def add_np(doctest_namespace):
+ doctest_namespace["np"] = numpy
+
+ For more details: :ref:`doctest_namespace`.
+
+
+.. fixture:: request
+
+request
+~~~~~~~
+
+:ref:`request example`.
+
+The ``request`` fixture is a special fixture providing information of the requesting test function.
+
+.. autoclass:: pytest.FixtureRequest()
+ :members:
+
+
+.. fixture:: pytestconfig
+
+pytestconfig
+~~~~~~~~~~~~
+
+.. autofunction:: _pytest.fixtures.pytestconfig()
+
+
+.. fixture:: record_property
+
+record_property
+~~~~~~~~~~~~~~~~~~~
+
+**Tutorial**: :ref:`record_property example`.
+
+.. autofunction:: _pytest.junitxml.record_property()
+
+
+.. fixture:: record_testsuite_property
+
+record_testsuite_property
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+**Tutorial**: :ref:`record_testsuite_property example`.
+
+.. autofunction:: _pytest.junitxml.record_testsuite_property()
+
+
+.. fixture:: caplog
+
+caplog
+~~~~~~
+
+:ref:`logging`.
+
+.. autofunction:: _pytest.logging.caplog()
+ :no-auto-options:
+
+ Returns a :class:`pytest.LogCaptureFixture` instance.
+
+.. autoclass:: pytest.LogCaptureFixture()
+ :members:
+
+
+.. fixture:: monkeypatch
+
+monkeypatch
+~~~~~~~~~~~
+
+:ref:`monkeypatching`.
+
+.. autofunction:: _pytest.monkeypatch.monkeypatch()
+ :no-auto-options:
+
+ Returns a :class:`~pytest.MonkeyPatch` instance.
+
+.. autoclass:: pytest.MonkeyPatch
+ :members:
+
+
+.. fixture:: pytester
+
+pytester
+~~~~~~~~
+
+.. versionadded:: 6.2
+
+Provides a :class:`~pytest.Pytester` instance that can be used to run and test pytest itself.
+
+It provides an empty directory where pytest can be executed in isolation, and contains facilities
+to write tests, configuration files, and match against expected output.
+
+To use it, include in your topmost ``conftest.py`` file:
+
+.. code-block:: python
+
+ pytest_plugins = "pytester"
+
+
+
+.. autoclass:: pytest.Pytester()
+ :members:
+
+.. autoclass:: pytest.RunResult()
+ :members:
+
+.. autoclass:: pytest.LineMatcher()
+ :members:
+ :special-members: __str__
+
+.. autoclass:: pytest.HookRecorder()
+ :members:
+
+.. autoclass:: pytest.RecordedHookCall()
+ :members:
+
+.. fixture:: testdir
+
+testdir
+~~~~~~~
+
+Identical to :fixture:`pytester`, but provides an instance whose methods return
+legacy ``py.path.local`` objects instead when applicable.
+
+New code should avoid using :fixture:`testdir` in favor of :fixture:`pytester`.
+
+.. autoclass:: pytest.Testdir()
+ :members:
+
+
+.. fixture:: recwarn
+
+recwarn
+~~~~~~~
+
+**Tutorial**: :ref:`assertwarnings`
+
+.. autofunction:: _pytest.recwarn.recwarn()
+ :no-auto-options:
+
+.. autoclass:: pytest.WarningsRecorder()
+ :members:
+
+Each recorded warning is an instance of :class:`warnings.WarningMessage`.
+
+.. note::
+ ``DeprecationWarning`` and ``PendingDeprecationWarning`` are treated
+ differently; see :ref:`ensuring_function_triggers`.
+
+
+.. fixture:: tmp_path
+
+tmp_path
+~~~~~~~~
+
+:ref:`tmp_path`
+
+.. autofunction:: _pytest.tmpdir.tmp_path()
+ :no-auto-options:
+
+
+.. fixture:: tmp_path_factory
+
+tmp_path_factory
+~~~~~~~~~~~~~~~~
+
+:ref:`tmp_path_factory example`
+
+.. _`tmp_path_factory factory api`:
+
+``tmp_path_factory`` is an instance of :class:`~pytest.TempPathFactory`:
+
+.. autoclass:: pytest.TempPathFactory()
+ :members:
+
+
+.. fixture:: tmpdir
+
+tmpdir
+~~~~~~
+
+:ref:`tmpdir and tmpdir_factory`
+
+.. autofunction:: _pytest.legacypath.LegacyTmpdirPlugin.tmpdir()
+ :no-auto-options:
+
+
+.. fixture:: tmpdir_factory
+
+tmpdir_factory
+~~~~~~~~~~~~~~
+
+:ref:`tmpdir and tmpdir_factory`
+
+``tmpdir_factory`` is an instance of :class:`~pytest.TempdirFactory`:
+
+.. autoclass:: pytest.TempdirFactory()
+ :members:
+
+
+.. _`hook-reference`:
+
+Hooks
+-----
+
+:ref:`writing-plugins`.
+
+.. currentmodule:: _pytest.hookspec
+
+Reference to all hooks which can be implemented by :ref:`conftest.py files <localplugin>` and :ref:`plugins <plugins>`.
+
+Bootstrapping hooks
+~~~~~~~~~~~~~~~~~~~
+
+Bootstrapping hooks called for plugins registered early enough (internal and setuptools plugins).
+
+.. hook:: pytest_load_initial_conftests
+.. autofunction:: pytest_load_initial_conftests
+.. hook:: pytest_cmdline_preparse
+.. autofunction:: pytest_cmdline_preparse
+.. hook:: pytest_cmdline_parse
+.. autofunction:: pytest_cmdline_parse
+.. hook:: pytest_cmdline_main
+.. autofunction:: pytest_cmdline_main
+
+.. _`initialization-hooks`:
+
+Initialization hooks
+~~~~~~~~~~~~~~~~~~~~
+
+Initialization hooks called for plugins and ``conftest.py`` files.
+
+.. hook:: pytest_addoption
+.. autofunction:: pytest_addoption
+.. hook:: pytest_addhooks
+.. autofunction:: pytest_addhooks
+.. hook:: pytest_configure
+.. autofunction:: pytest_configure
+.. hook:: pytest_unconfigure
+.. autofunction:: pytest_unconfigure
+.. hook:: pytest_sessionstart
+.. autofunction:: pytest_sessionstart
+.. hook:: pytest_sessionfinish
+.. autofunction:: pytest_sessionfinish
+
+.. hook:: pytest_plugin_registered
+.. autofunction:: pytest_plugin_registered
+
+Collection hooks
+~~~~~~~~~~~~~~~~
+
+``pytest`` calls the following hooks for collecting files and directories:
+
+.. hook:: pytest_collection
+.. autofunction:: pytest_collection
+.. hook:: pytest_ignore_collect
+.. autofunction:: pytest_ignore_collect
+.. hook:: pytest_collect_file
+.. autofunction:: pytest_collect_file
+.. hook:: pytest_pycollect_makemodule
+.. autofunction:: pytest_pycollect_makemodule
+
+For influencing the collection of objects in Python modules
+you can use the following hook:
+
+.. hook:: pytest_pycollect_makeitem
+.. autofunction:: pytest_pycollect_makeitem
+.. hook:: pytest_generate_tests
+.. autofunction:: pytest_generate_tests
+.. hook:: pytest_make_parametrize_id
+.. autofunction:: pytest_make_parametrize_id
+
+Hooks for influencing test skipping:
+
+.. hook:: pytest_markeval_namespace
+.. autofunction:: pytest_markeval_namespace
+
+After collection is complete, you can modify the order of
+items, delete or otherwise amend the test items:
+
+.. hook:: pytest_collection_modifyitems
+.. autofunction:: pytest_collection_modifyitems
+
+.. note::
+ If this hook is implemented in ``conftest.py`` files, it always receives all collected items, not only those
+ under the ``conftest.py`` where it is implemented.
+
+.. autofunction:: pytest_collection_finish
+
+Test running (runtest) hooks
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+All runtest related hooks receive a :py:class:`pytest.Item <pytest.Item>` object.
+
+.. hook:: pytest_runtestloop
+.. autofunction:: pytest_runtestloop
+.. hook:: pytest_runtest_protocol
+.. autofunction:: pytest_runtest_protocol
+.. hook:: pytest_runtest_logstart
+.. autofunction:: pytest_runtest_logstart
+.. hook:: pytest_runtest_logfinish
+.. autofunction:: pytest_runtest_logfinish
+.. hook:: pytest_runtest_setup
+.. autofunction:: pytest_runtest_setup
+.. hook:: pytest_runtest_call
+.. autofunction:: pytest_runtest_call
+.. hook:: pytest_runtest_teardown
+.. autofunction:: pytest_runtest_teardown
+.. hook:: pytest_runtest_makereport
+.. autofunction:: pytest_runtest_makereport
+
+For deeper understanding you may look at the default implementation of
+these hooks in ``_pytest.runner`` and maybe also
+in ``_pytest.pdb`` which interacts with ``_pytest.capture``
+and its input/output capturing in order to immediately drop
+into interactive debugging when a test failure occurs.
+
+.. hook:: pytest_pyfunc_call
+.. autofunction:: pytest_pyfunc_call
+
+Reporting hooks
+~~~~~~~~~~~~~~~
+
+Session related reporting hooks:
+
+.. hook:: pytest_collectstart
+.. autofunction:: pytest_collectstart
+.. hook:: pytest_make_collect_report
+.. autofunction:: pytest_make_collect_report
+.. hook:: pytest_itemcollected
+.. autofunction:: pytest_itemcollected
+.. hook:: pytest_collectreport
+.. autofunction:: pytest_collectreport
+.. hook:: pytest_deselected
+.. autofunction:: pytest_deselected
+.. hook:: pytest_report_header
+.. autofunction:: pytest_report_header
+.. hook:: pytest_report_collectionfinish
+.. autofunction:: pytest_report_collectionfinish
+.. hook:: pytest_report_teststatus
+.. autofunction:: pytest_report_teststatus
+.. hook:: pytest_report_to_serializable
+.. autofunction:: pytest_report_to_serializable
+.. hook:: pytest_report_from_serializable
+.. autofunction:: pytest_report_from_serializable
+.. hook:: pytest_terminal_summary
+.. autofunction:: pytest_terminal_summary
+.. hook:: pytest_fixture_setup
+.. autofunction:: pytest_fixture_setup
+.. hook:: pytest_fixture_post_finalizer
+.. autofunction:: pytest_fixture_post_finalizer
+.. hook:: pytest_warning_captured
+.. autofunction:: pytest_warning_captured
+.. hook:: pytest_warning_recorded
+.. autofunction:: pytest_warning_recorded
+
+Central hook for reporting about test execution:
+
+.. hook:: pytest_runtest_logreport
+.. autofunction:: pytest_runtest_logreport
+
+Assertion related hooks:
+
+.. hook:: pytest_assertrepr_compare
+.. autofunction:: pytest_assertrepr_compare
+.. hook:: pytest_assertion_pass
+.. autofunction:: pytest_assertion_pass
+
+
+Debugging/Interaction hooks
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+There are few hooks which can be used for special
+reporting or interaction with exceptions:
+
+.. hook:: pytest_internalerror
+.. autofunction:: pytest_internalerror
+.. hook:: pytest_keyboard_interrupt
+.. autofunction:: pytest_keyboard_interrupt
+.. hook:: pytest_exception_interact
+.. autofunction:: pytest_exception_interact
+.. hook:: pytest_enter_pdb
+.. autofunction:: pytest_enter_pdb
+.. hook:: pytest_leave_pdb
+.. autofunction:: pytest_leave_pdb
+
+
+Objects
+-------
+
+Full reference to objects accessible from :ref:`fixtures <fixture>` or :ref:`hooks <hook-reference>`.
+
+
+CallInfo
+~~~~~~~~
+
+.. autoclass:: pytest.CallInfo()
+ :members:
+
+
+Class
+~~~~~
+
+.. autoclass:: pytest.Class()
+ :members:
+ :show-inheritance:
+
+Collector
+~~~~~~~~~
+
+.. autoclass:: pytest.Collector()
+ :members:
+ :show-inheritance:
+
+CollectReport
+~~~~~~~~~~~~~
+
+.. autoclass:: pytest.CollectReport()
+ :members:
+ :show-inheritance:
+ :inherited-members:
+
+Config
+~~~~~~
+
+.. autoclass:: pytest.Config()
+ :members:
+
+ExceptionInfo
+~~~~~~~~~~~~~
+
+.. autoclass:: pytest.ExceptionInfo()
+ :members:
+
+
+ExitCode
+~~~~~~~~
+
+.. autoclass:: pytest.ExitCode
+ :members:
+
+File
+~~~~
+
+.. autoclass:: pytest.File()
+ :members:
+ :show-inheritance:
+
+
+FixtureDef
+~~~~~~~~~~
+
+.. autoclass:: _pytest.fixtures.FixtureDef()
+ :members:
+ :show-inheritance:
+
+FSCollector
+~~~~~~~~~~~
+
+.. autoclass:: _pytest.nodes.FSCollector()
+ :members:
+ :show-inheritance:
+
+Function
+~~~~~~~~
+
+.. autoclass:: pytest.Function()
+ :members:
+ :show-inheritance:
+
+FunctionDefinition
+~~~~~~~~~~~~~~~~~~
+
+.. autoclass:: _pytest.python.FunctionDefinition()
+ :members:
+ :show-inheritance:
+
+Item
+~~~~
+
+.. autoclass:: pytest.Item()
+ :members:
+ :show-inheritance:
+
+MarkDecorator
+~~~~~~~~~~~~~
+
+.. autoclass:: pytest.MarkDecorator()
+ :members:
+
+
+MarkGenerator
+~~~~~~~~~~~~~
+
+.. autoclass:: pytest.MarkGenerator()
+ :members:
+
+
+Mark
+~~~~
+
+.. autoclass:: pytest.Mark()
+ :members:
+
+
+Metafunc
+~~~~~~~~
+
+.. autoclass:: pytest.Metafunc()
+ :members:
+
+Module
+~~~~~~
+
+.. autoclass:: pytest.Module()
+ :members:
+ :show-inheritance:
+
+Node
+~~~~
+
+.. autoclass:: _pytest.nodes.Node()
+ :members:
+
+Parser
+~~~~~~
+
+.. autoclass:: pytest.Parser()
+ :members:
+
+OptionGroup
+~~~~~~~~~~~
+
+.. autoclass:: pytest.OptionGroup()
+ :members:
+
+PytestPluginManager
+~~~~~~~~~~~~~~~~~~~
+
+.. autoclass:: pytest.PytestPluginManager()
+ :members:
+ :undoc-members:
+ :inherited-members:
+ :show-inheritance:
+
+Session
+~~~~~~~
+
+.. autoclass:: pytest.Session()
+ :members:
+ :show-inheritance:
+
+TestReport
+~~~~~~~~~~
+
+.. autoclass:: pytest.TestReport()
+ :members:
+ :show-inheritance:
+ :inherited-members:
+
+_Result
+~~~~~~~
+
+Result object used within :ref:`hook wrappers <hookwrapper>`, see :py:class:`_Result in the pluggy documentation <pluggy._callers._Result>` for more information.
+
+Stash
+~~~~~
+
+.. autoclass:: pytest.Stash
+ :special-members: __setitem__, __getitem__, __delitem__, __contains__, __len__
+ :members:
+
+.. autoclass:: pytest.StashKey
+ :show-inheritance:
+ :members:
+
+
+Global Variables
+----------------
+
+pytest treats some global variables in a special manner when defined in a test module or
+``conftest.py`` files.
+
+
+.. globalvar:: collect_ignore
+
+**Tutorial**: :ref:`customizing-test-collection`
+
+Can be declared in *conftest.py files* to exclude test directories or modules.
+Needs to be a list of paths (``str``, :class:`pathlib.Path` or any :class:`os.PathLike`).
+
+.. code-block:: python
+
+ collect_ignore = ["setup.py"]
+
+
+.. globalvar:: collect_ignore_glob
+
+**Tutorial**: :ref:`customizing-test-collection`
+
+Can be declared in *conftest.py files* to exclude test directories or modules
+with Unix shell-style wildcards. Needs to be ``list[str]`` where ``str`` can
+contain glob patterns.
+
+.. code-block:: python
+
+ collect_ignore_glob = ["*_ignore.py"]
+
+
+.. globalvar:: pytest_plugins
+
+**Tutorial**: :ref:`available installable plugins`
+
+Can be declared at the **global** level in *test modules* and *conftest.py files* to register additional plugins.
+Can be either a ``str`` or ``Sequence[str]``.
+
+.. code-block:: python
+
+ pytest_plugins = "myapp.testsupport.myplugin"
+
+.. code-block:: python
+
+ pytest_plugins = ("myapp.testsupport.tools", "myapp.testsupport.regression")
+
+
+.. globalvar:: pytestmark
+
+**Tutorial**: :ref:`scoped-marking`
+
+Can be declared at the **global** level in *test modules* to apply one or more :ref:`marks <marks ref>` to all
+test functions and methods. Can be either a single mark or a list of marks (applied in left-to-right order).
+
+.. code-block:: python
+
+ import pytest
+
+ pytestmark = pytest.mark.webtest
+
+
+.. code-block:: python
+
+ import pytest
+
+ pytestmark = [pytest.mark.integration, pytest.mark.slow]
+
+
+Environment Variables
+---------------------
+
+Environment variables that can be used to change pytest's behavior.
+
+.. envvar:: PYTEST_ADDOPTS
+
+This contains a command-line (parsed by the py:mod:`shlex` module) that will be **prepended** to the command line given
+by the user, see :ref:`adding default options` for more information.
+
+.. envvar:: PYTEST_CURRENT_TEST
+
+This is not meant to be set by users, but is set by pytest internally with the name of the current test so other
+processes can inspect it, see :ref:`pytest current test env` for more information.
+
+.. envvar:: PYTEST_DEBUG
+
+When set, pytest will print tracing and debug information.
+
+.. envvar:: PYTEST_DISABLE_PLUGIN_AUTOLOAD
+
+When set, disables plugin auto-loading through setuptools entrypoints. Only explicitly specified plugins will be
+loaded.
+
+.. envvar:: PYTEST_PLUGINS
+
+Contains comma-separated list of modules that should be loaded as plugins:
+
+.. code-block:: bash
+
+ export PYTEST_PLUGINS=mymodule.plugin,xdist
+
+.. envvar:: PYTEST_THEME
+
+Sets a `pygment style <https://pygments.org/docs/styles/>`_ to use for the code output.
+
+.. envvar:: PYTEST_THEME_MODE
+
+Sets the :envvar:`PYTEST_THEME` to be either *dark* or *light*.
+
+.. envvar:: PY_COLORS
+
+When set to ``1``, pytest will use color in terminal output.
+When set to ``0``, pytest will not use color.
+``PY_COLORS`` takes precedence over ``NO_COLOR`` and ``FORCE_COLOR``.
+
+.. envvar:: NO_COLOR
+
+When set (regardless of value), pytest will not use color in terminal output.
+``PY_COLORS`` takes precedence over ``NO_COLOR``, which takes precedence over ``FORCE_COLOR``.
+See `no-color.org <https://no-color.org/>`__ for other libraries supporting this community standard.
+
+.. envvar:: FORCE_COLOR
+
+When set (regardless of value), pytest will use color in terminal output.
+``PY_COLORS`` and ``NO_COLOR`` take precedence over ``FORCE_COLOR``.
+
+Exceptions
+----------
+
+.. autoclass:: pytest.UsageError()
+ :show-inheritance:
+
+.. _`warnings ref`:
+
+Warnings
+--------
+
+Custom warnings generated in some situations such as improper usage or deprecated features.
+
+.. autoclass:: pytest.PytestWarning
+ :show-inheritance:
+
+.. autoclass:: pytest.PytestAssertRewriteWarning
+ :show-inheritance:
+
+.. autoclass:: pytest.PytestCacheWarning
+ :show-inheritance:
+
+.. autoclass:: pytest.PytestCollectionWarning
+ :show-inheritance:
+
+.. autoclass:: pytest.PytestConfigWarning
+ :show-inheritance:
+
+.. autoclass:: pytest.PytestDeprecationWarning
+ :show-inheritance:
+
+.. autoclass:: pytest.PytestExperimentalApiWarning
+ :show-inheritance:
+
+.. autoclass:: pytest.PytestUnhandledCoroutineWarning
+ :show-inheritance:
+
+.. autoclass:: pytest.PytestUnknownMarkWarning
+ :show-inheritance:
+
+.. autoclass:: pytest.PytestUnraisableExceptionWarning
+ :show-inheritance:
+
+.. autoclass:: pytest.PytestUnhandledThreadExceptionWarning
+ :show-inheritance:
+
+
+Consult the :ref:`internal-warnings` section in the documentation for more information.
+
+
+.. _`ini options ref`:
+
+Configuration Options
+---------------------
+
+Here is a list of builtin configuration options that may be written in a ``pytest.ini``, ``pyproject.toml``, ``tox.ini`` or ``setup.cfg``
+file, usually located at the root of your repository. To see each file format in details, see
+:ref:`config file formats`.
+
+.. warning::
+ Usage of ``setup.cfg`` is not recommended except for very simple use cases. ``.cfg``
+ files use a different parser than ``pytest.ini`` and ``tox.ini`` which might cause hard to track
+ down problems.
+ When possible, it is recommended to use the latter files, or ``pyproject.toml``, to hold your pytest configuration.
+
+Configuration options may be overwritten in the command-line by using ``-o/--override-ini``, which can also be
+passed multiple times. The expected format is ``name=value``. For example::
+
+ pytest -o console_output_style=classic -o cache_dir=/tmp/mycache
+
+
+.. confval:: addopts
+
+ Add the specified ``OPTS`` to the set of command line arguments as if they
+ had been specified by the user. Example: if you have this ini file content:
+
+ .. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ addopts = --maxfail=2 -rf # exit after 2 failures, report fail info
+
+ issuing ``pytest test_hello.py`` actually means:
+
+ .. code-block:: bash
+
+ pytest --maxfail=2 -rf test_hello.py
+
+ Default is to add no options.
+
+
+.. confval:: cache_dir
+
+ Sets a directory where stores content of cache plugin. Default directory is
+ ``.pytest_cache`` which is created in :ref:`rootdir <rootdir>`. Directory may be
+ relative or absolute path. If setting relative path, then directory is created
+ relative to :ref:`rootdir <rootdir>`. Additionally path may contain environment
+ variables, that will be expanded. For more information about cache plugin
+ please refer to :ref:`cache_provider`.
+
+.. confval:: console_output_style
+
+ Sets the console output style while running tests:
+
+ * ``classic``: classic pytest output.
+ * ``progress``: like classic pytest output, but with a progress indicator.
+ * ``count``: like progress, but shows progress as the number of tests completed instead of a percent.
+
+ The default is ``progress``, but you can fallback to ``classic`` if you prefer or
+ the new mode is causing unexpected problems:
+
+ .. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ console_output_style = classic
+
+
+.. confval:: doctest_encoding
+
+
+
+ Default encoding to use to decode text files with docstrings.
+ :ref:`See how pytest handles doctests <doctest>`.
+
+
+.. confval:: doctest_optionflags
+
+ One or more doctest flag names from the standard ``doctest`` module.
+ :ref:`See how pytest handles doctests <doctest>`.
+
+
+.. confval:: empty_parameter_set_mark
+
+
+
+ Allows to pick the action for empty parametersets in parameterization
+
+ * ``skip`` skips tests with an empty parameterset (default)
+ * ``xfail`` marks tests with an empty parameterset as xfail(run=False)
+ * ``fail_at_collect`` raises an exception if parametrize collects an empty parameter set
+
+ .. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ empty_parameter_set_mark = xfail
+
+ .. note::
+
+ The default value of this option is planned to change to ``xfail`` in future releases
+ as this is considered less error prone, see :issue:`3155` for more details.
+
+
+.. confval:: faulthandler_timeout
+
+ Dumps the tracebacks of all threads if a test takes longer than ``X`` seconds to run (including
+ fixture setup and teardown). Implemented using the :func:`faulthandler.dump_traceback_later` function,
+ so all caveats there apply.
+
+ .. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ faulthandler_timeout=5
+
+ For more information please refer to :ref:`faulthandler`.
+
+.. confval:: filterwarnings
+
+
+
+ Sets a list of filters and actions that should be taken for matched
+ warnings. By default all warnings emitted during the test session
+ will be displayed in a summary at the end of the test session.
+
+ .. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ filterwarnings =
+ error
+ ignore::DeprecationWarning
+
+ This tells pytest to ignore deprecation warnings and turn all other warnings
+ into errors. For more information please refer to :ref:`warnings`.
+
+
+.. confval:: junit_duration_report
+
+ .. versionadded:: 4.1
+
+ Configures how durations are recorded into the JUnit XML report:
+
+ * ``total`` (the default): duration times reported include setup, call, and teardown times.
+ * ``call``: duration times reported include only call times, excluding setup and teardown.
+
+ .. code-block:: ini
+
+ [pytest]
+ junit_duration_report = call
+
+
+.. confval:: junit_family
+
+ .. versionadded:: 4.2
+ .. versionchanged:: 6.1
+ Default changed to ``xunit2``.
+
+ Configures the format of the generated JUnit XML file. The possible options are:
+
+ * ``xunit1`` (or ``legacy``): produces old style output, compatible with the xunit 1.0 format.
+ * ``xunit2``: produces `xunit 2.0 style output <https://github.com/jenkinsci/xunit-plugin/blob/xunit-2.3.2/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd>`__, which should be more compatible with latest Jenkins versions. **This is the default**.
+
+ .. code-block:: ini
+
+ [pytest]
+ junit_family = xunit2
+
+
+.. confval:: junit_logging
+
+ .. versionadded:: 3.5
+ .. versionchanged:: 5.4
+ ``log``, ``all``, ``out-err`` options added.
+
+ Configures if captured output should be written to the JUnit XML file. Valid values are:
+
+ * ``log``: write only ``logging`` captured output.
+ * ``system-out``: write captured ``stdout`` contents.
+ * ``system-err``: write captured ``stderr`` contents.
+ * ``out-err``: write both captured ``stdout`` and ``stderr`` contents.
+ * ``all``: write captured ``logging``, ``stdout`` and ``stderr`` contents.
+ * ``no`` (the default): no captured output is written.
+
+ .. code-block:: ini
+
+ [pytest]
+ junit_logging = system-out
+
+
+.. confval:: junit_log_passing_tests
+
+ .. versionadded:: 4.6
+
+ If ``junit_logging != "no"``, configures if the captured output should be written
+ to the JUnit XML file for **passing** tests. Default is ``True``.
+
+ .. code-block:: ini
+
+ [pytest]
+ junit_log_passing_tests = False
+
+
+.. confval:: junit_suite_name
+
+ To set the name of the root test suite xml item, you can configure the ``junit_suite_name`` option in your config file:
+
+ .. code-block:: ini
+
+ [pytest]
+ junit_suite_name = my_suite
+
+.. confval:: log_auto_indent
+
+ Allow selective auto-indentation of multiline log messages.
+
+ Supports command line option ``--log-auto-indent [value]``
+ and config option ``log_auto_indent = [value]`` to set the
+ auto-indentation behavior for all logging.
+
+ ``[value]`` can be:
+ * True or "On" - Dynamically auto-indent multiline log messages
+ * False or "Off" or 0 - Do not auto-indent multiline log messages (the default behavior)
+ * [positive integer] - auto-indent multiline log messages by [value] spaces
+
+ .. code-block:: ini
+
+ [pytest]
+ log_auto_indent = False
+
+ Supports passing kwarg ``extra={"auto_indent": [value]}`` to
+ calls to ``logging.log()`` to specify auto-indentation behavior for
+ a specific entry in the log. ``extra`` kwarg overrides the value specified
+ on the command line or in the config.
+
+.. confval:: log_cli
+
+ Enable log display during test run (also known as :ref:`"live logging" <live_logs>`).
+ The default is ``False``.
+
+ .. code-block:: ini
+
+ [pytest]
+ log_cli = True
+
+.. confval:: log_cli_date_format
+
+
+
+ Sets a :py:func:`time.strftime`-compatible string that will be used when formatting dates for live logging.
+
+ .. code-block:: ini
+
+ [pytest]
+ log_cli_date_format = %Y-%m-%d %H:%M:%S
+
+ For more information, see :ref:`live_logs`.
+
+.. confval:: log_cli_format
+
+
+
+ Sets a :py:mod:`logging`-compatible string used to format live logging messages.
+
+ .. code-block:: ini
+
+ [pytest]
+ log_cli_format = %(asctime)s %(levelname)s %(message)s
+
+ For more information, see :ref:`live_logs`.
+
+
+.. confval:: log_cli_level
+
+
+
+ Sets the minimum log message level that should be captured for live logging. The integer value or
+ the names of the levels can be used.
+
+ .. code-block:: ini
+
+ [pytest]
+ log_cli_level = INFO
+
+ For more information, see :ref:`live_logs`.
+
+
+.. confval:: log_date_format
+
+
+
+ Sets a :py:func:`time.strftime`-compatible string that will be used when formatting dates for logging capture.
+
+ .. code-block:: ini
+
+ [pytest]
+ log_date_format = %Y-%m-%d %H:%M:%S
+
+ For more information, see :ref:`logging`.
+
+
+.. confval:: log_file
+
+
+
+ Sets a file name relative to the ``pytest.ini`` file where log messages should be written to, in addition
+ to the other logging facilities that are active.
+
+ .. code-block:: ini
+
+ [pytest]
+ log_file = logs/pytest-logs.txt
+
+ For more information, see :ref:`logging`.
+
+
+.. confval:: log_file_date_format
+
+
+
+ Sets a :py:func:`time.strftime`-compatible string that will be used when formatting dates for the logging file.
+
+ .. code-block:: ini
+
+ [pytest]
+ log_file_date_format = %Y-%m-%d %H:%M:%S
+
+ For more information, see :ref:`logging`.
+
+.. confval:: log_file_format
+
+
+
+ Sets a :py:mod:`logging`-compatible string used to format logging messages redirected to the logging file.
+
+ .. code-block:: ini
+
+ [pytest]
+ log_file_format = %(asctime)s %(levelname)s %(message)s
+
+ For more information, see :ref:`logging`.
+
+.. confval:: log_file_level
+
+
+
+ Sets the minimum log message level that should be captured for the logging file. The integer value or
+ the names of the levels can be used.
+
+ .. code-block:: ini
+
+ [pytest]
+ log_file_level = INFO
+
+ For more information, see :ref:`logging`.
+
+
+.. confval:: log_format
+
+
+
+ Sets a :py:mod:`logging`-compatible string used to format captured logging messages.
+
+ .. code-block:: ini
+
+ [pytest]
+ log_format = %(asctime)s %(levelname)s %(message)s
+
+ For more information, see :ref:`logging`.
+
+
+.. confval:: log_level
+
+
+
+ Sets the minimum log message level that should be captured for logging capture. The integer value or
+ the names of the levels can be used.
+
+ .. code-block:: ini
+
+ [pytest]
+ log_level = INFO
+
+ For more information, see :ref:`logging`.
+
+
+.. confval:: markers
+
+ When the ``--strict-markers`` or ``--strict`` command-line arguments are used,
+ only known markers - defined in code by core pytest or some plugin - are allowed.
+
+ You can list additional markers in this setting to add them to the whitelist,
+ in which case you probably want to add ``--strict-markers`` to ``addopts``
+ to avoid future regressions:
+
+ .. code-block:: ini
+
+ [pytest]
+ addopts = --strict-markers
+ markers =
+ slow
+ serial
+
+ .. note::
+ The use of ``--strict-markers`` is highly preferred. ``--strict`` was kept for
+ backward compatibility only and may be confusing for others as it only applies to
+ markers and not to other options.
+
+.. confval:: minversion
+
+ Specifies a minimal pytest version required for running tests.
+
+ .. code-block:: ini
+
+ # content of pytest.ini
+ [pytest]
+ minversion = 3.0 # will fail if we run with pytest-2.8
+
+
+.. confval:: norecursedirs
+
+ Set the directory basename patterns to avoid when recursing
+ for test discovery. The individual (fnmatch-style) patterns are
+ applied to the basename of a directory to decide if to recurse into it.
+ Pattern matching characters::
+
+ * matches everything
+ ? matches any single character
+ [seq] matches any character in seq
+ [!seq] matches any char not in seq
+
+ Default patterns are ``'*.egg'``, ``'.*'``, ``'_darcs'``, ``'build'``,
+ ``'CVS'``, ``'dist'``, ``'node_modules'``, ``'venv'``, ``'{arch}'``.
+ Setting a ``norecursedirs`` replaces the default. Here is an example of
+ how to avoid certain directories:
+
+ .. code-block:: ini
+
+ [pytest]
+ norecursedirs = .svn _build tmp*
+
+ This would tell ``pytest`` to not look into typical subversion or
+ sphinx-build directories or into any ``tmp`` prefixed directory.
+
+ Additionally, ``pytest`` will attempt to intelligently identify and ignore a
+ virtualenv by the presence of an activation script. Any directory deemed to
+ be the root of a virtual environment will not be considered during test
+ collection unless ``‑‑collect‑in‑virtualenv`` is given. Note also that
+ ``norecursedirs`` takes precedence over ``‑‑collect‑in‑virtualenv``; e.g. if
+ you intend to run tests in a virtualenv with a base directory that matches
+ ``'.*'`` you *must* override ``norecursedirs`` in addition to using the
+ ``‑‑collect‑in‑virtualenv`` flag.
+
+
+.. confval:: python_classes
+
+ One or more name prefixes or glob-style patterns determining which classes
+ are considered for test collection. Search for multiple glob patterns by
+ adding a space between patterns. By default, pytest will consider any
+ class prefixed with ``Test`` as a test collection. Here is an example of how
+ to collect tests from classes that end in ``Suite``:
+
+ .. code-block:: ini
+
+ [pytest]
+ python_classes = *Suite
+
+ Note that ``unittest.TestCase`` derived classes are always collected
+ regardless of this option, as ``unittest``'s own collection framework is used
+ to collect those tests.
+
+
+.. confval:: python_files
+
+ One or more Glob-style file patterns determining which python files
+ are considered as test modules. Search for multiple glob patterns by
+ adding a space between patterns:
+
+ .. code-block:: ini
+
+ [pytest]
+ python_files = test_*.py check_*.py example_*.py
+
+ Or one per line:
+
+ .. code-block:: ini
+
+ [pytest]
+ python_files =
+ test_*.py
+ check_*.py
+ example_*.py
+
+ By default, files matching ``test_*.py`` and ``*_test.py`` will be considered
+ test modules.
+
+
+.. confval:: python_functions
+
+ One or more name prefixes or glob-patterns determining which test functions
+ and methods are considered tests. Search for multiple glob patterns by
+ adding a space between patterns. By default, pytest will consider any
+ function prefixed with ``test`` as a test. Here is an example of how
+ to collect test functions and methods that end in ``_test``:
+
+ .. code-block:: ini
+
+ [pytest]
+ python_functions = *_test
+
+ Note that this has no effect on methods that live on a ``unittest.TestCase``
+ derived class, as ``unittest``'s own collection framework is used
+ to collect those tests.
+
+ See :ref:`change naming conventions` for more detailed examples.
+
+
+.. confval:: pythonpath
+
+ Sets list of directories that should be added to the python search path.
+ Directories will be added to the head of :data:`sys.path`.
+ Similar to the :envvar:`PYTHONPATH` environment variable, the directories will be
+ included in where Python will look for imported modules.
+ Paths are relative to the :ref:`rootdir <rootdir>` directory.
+ Directories remain in path for the duration of the test session.
+
+ .. code-block:: ini
+
+ [pytest]
+ pythonpath = src1 src2
+
+
+.. confval:: required_plugins
+
+ A space separated list of plugins that must be present for pytest to run.
+ Plugins can be listed with or without version specifiers directly following
+ their name. Whitespace between different version specifiers is not allowed.
+ If any one of the plugins is not found, emit an error.
+
+ .. code-block:: ini
+
+ [pytest]
+ required_plugins = pytest-django>=3.0.0,<4.0.0 pytest-html pytest-xdist>=1.0.0
+
+
+.. confval:: testpaths
+
+
+
+ Sets list of directories that should be searched for tests when
+ no specific directories, files or test ids are given in the command line when
+ executing pytest from the :ref:`rootdir <rootdir>` directory.
+ Useful when all project tests are in a known location to speed up
+ test collection and to avoid picking up undesired tests by accident.
+
+ .. code-block:: ini
+
+ [pytest]
+ testpaths = testing doc
+
+ This tells pytest to only look for tests in ``testing`` and ``doc``
+ directories when executing from the root directory.
+
+
+.. confval:: usefixtures
+
+ List of fixtures that will be applied to all test functions; this is semantically the same to apply
+ the ``@pytest.mark.usefixtures`` marker to all test functions.
+
+
+ .. code-block:: ini
+
+ [pytest]
+ usefixtures =
+ clean_db
+
+
+.. confval:: xfail_strict
+
+ If set to ``True``, tests marked with ``@pytest.mark.xfail`` that actually succeed will by default fail the
+ test suite.
+ For more information, see :ref:`xfail strict tutorial`.
+
+
+ .. code-block:: ini
+
+ [pytest]
+ xfail_strict = True
+
+
+.. _`command-line-flags`:
+
+Command-line Flags
+------------------
+
+All the command-line flags can be obtained by running ``pytest --help``::
+
+ $ pytest --help
+ usage: pytest [options] [file_or_dir] [file_or_dir] [...]
+
+ positional arguments:
+ file_or_dir
+
+ general:
+ -k EXPRESSION only run tests which match the given substring
+ expression. An expression is a python evaluatable
+ expression where all names are substring-matched
+ against test names and their parent classes.
+ Example: -k 'test_method or test_other' matches all
+ test functions and classes whose name contains
+ 'test_method' or 'test_other', while -k 'not
+ test_method' matches those that don't contain
+ 'test_method' in their names. -k 'not test_method
+ and not test_other' will eliminate the matches.
+ Additionally keywords are matched to classes and
+ functions containing extra names in their
+ 'extra_keyword_matches' set, as well as functions
+ which have names assigned directly to them. The
+ matching is case-insensitive.
+ -m MARKEXPR only run tests matching given mark expression.
+ For example: -m 'mark1 and not mark2'.
+ --markers show markers (builtin, plugin and per-project ones).
+ -x, --exitfirst exit instantly on first error or failed test.
+ --fixtures, --funcargs
+ show available fixtures, sorted by plugin appearance
+ (fixtures with leading '_' are only shown with '-v')
+ --fixtures-per-test show fixtures per test
+ --pdb start the interactive Python debugger on errors or
+ KeyboardInterrupt.
+ --pdbcls=modulename:classname
+ specify a custom interactive Python debugger for use
+ with --pdb.For example:
+ --pdbcls=IPython.terminal.debugger:TerminalPdb
+ --trace Immediately break when running each test.
+ --capture=method per-test capturing method: one of fd|sys|no|tee-sys.
+ -s shortcut for --capture=no.
+ --runxfail report the results of xfail tests as if they were
+ not marked
+ --lf, --last-failed rerun only the tests that failed at the last run (or
+ all if none failed)
+ --ff, --failed-first run all tests, but run the last failures first.
+ This may re-order tests and thus lead to repeated
+ fixture setup/teardown.
+ --nf, --new-first run tests from new files first, then the rest of the
+ tests sorted by file mtime
+ --cache-show=[CACHESHOW]
+ show cache contents, don't perform collection or
+ tests. Optional argument: glob (default: '*').
+ --cache-clear remove all cache contents at start of test run.
+ --lfnf={all,none}, --last-failed-no-failures={all,none}
+ which tests to run with no previously (known)
+ failures.
+ --sw, --stepwise exit on test failure and continue from last failing
+ test next time
+ --sw-skip, --stepwise-skip
+ ignore the first failing test but stop on the next
+ failing test.
+ implicitly enables --stepwise.
+
+ reporting:
+ --durations=N show N slowest setup/test durations (N=0 for all).
+ --durations-min=N Minimal duration in seconds for inclusion in slowest
+ list. Default 0.005
+ -v, --verbose increase verbosity.
+ --no-header disable header
+ --no-summary disable summary
+ -q, --quiet decrease verbosity.
+ --verbosity=VERBOSE set verbosity. Default is 0.
+ -r chars show extra test summary info as specified by chars:
+ (f)ailed, (E)rror, (s)kipped, (x)failed, (X)passed,
+ (p)assed, (P)assed with output, (a)ll except passed
+ (p/P), or (A)ll. (w)arnings are enabled by default
+ (see --disable-warnings), 'N' can be used to reset
+ the list. (default: 'fE').
+ --disable-warnings, --disable-pytest-warnings
+ disable warnings summary
+ -l, --showlocals show locals in tracebacks (disabled by default).
+ --tb=style traceback print mode
+ (auto/long/short/line/native/no).
+ --show-capture={no,stdout,stderr,log,all}
+ Controls how captured stdout/stderr/log is shown on
+ failed tests. Default is 'all'.
+ --full-trace don't cut any tracebacks (default is to cut).
+ --color=color color terminal output (yes/no/auto).
+ --code-highlight={yes,no}
+ Whether code should be highlighted (only if --color
+ is also enabled)
+ --pastebin=mode send failed|all info to bpaste.net pastebin service.
+ --junit-xml=path create junit-xml style report file at given path.
+ --junit-prefix=str prepend prefix to classnames in junit-xml output
+
+ pytest-warnings:
+ -W PYTHONWARNINGS, --pythonwarnings=PYTHONWARNINGS
+ set which warnings to report, see -W option of
+ python itself.
+ --maxfail=num exit after first num failures or errors.
+ --strict-config any warnings encountered while parsing the `pytest`
+ section of the configuration file raise errors.
+ --strict-markers markers not registered in the `markers` section of
+ the configuration file raise errors.
+ --strict (deprecated) alias to --strict-markers.
+ -c file load configuration from `file` instead of trying to
+ locate one of the implicit configuration files.
+ --continue-on-collection-errors
+ Force test execution even if collection errors
+ occur.
+ --rootdir=ROOTDIR Define root directory for tests. Can be relative
+ path: 'root_dir', './root_dir',
+ 'root_dir/another_dir/'; absolute path:
+ '/home/user/root_dir'; path with variables:
+ '$HOME/root_dir'.
+
+ collection:
+ --collect-only, --co only collect tests, don't execute them.
+ --pyargs try to interpret all arguments as python packages.
+ --ignore=path ignore path during collection (multi-allowed).
+ --ignore-glob=path ignore path pattern during collection (multi-
+ allowed).
+ --deselect=nodeid_prefix
+ deselect item (via node id prefix) during collection
+ (multi-allowed).
+ --confcutdir=dir only load conftest.py's relative to specified dir.
+ --noconftest Don't load any conftest.py files.
+ --keep-duplicates Keep duplicate tests.
+ --collect-in-virtualenv
+ Don't ignore tests in a local virtualenv directory
+ --import-mode={prepend,append,importlib}
+ prepend/append to sys.path when importing test
+ modules and conftest files, default is to prepend.
+ --doctest-modules run doctests in all .py modules
+ --doctest-report={none,cdiff,ndiff,udiff,only_first_failure}
+ choose another output format for diffs on doctest
+ failure
+ --doctest-glob=pat doctests file matching pattern, default: test*.txt
+ --doctest-ignore-import-errors
+ ignore doctest ImportErrors
+ --doctest-continue-on-failure
+ for a given doctest, continue to run after the first
+ failure
+
+ test session debugging and configuration:
+ --basetemp=dir base temporary directory for this test run.(warning:
+ this directory is removed if it exists)
+ -V, --version display pytest version and information about
+ plugins. When given twice, also display information
+ about plugins.
+ -h, --help show help message and configuration info
+ -p name early-load given plugin module name or entry point
+ (multi-allowed).
+ To avoid loading of plugins, use the `no:` prefix,
+ e.g. `no:doctest`.
+ --trace-config trace considerations of conftest.py files.
+ --debug=[DEBUG_FILE_NAME]
+ store internal tracing debug information in this log
+ file.
+ This file is opened with 'w' and truncated as a
+ result, care advised.
+ Defaults to 'pytestdebug.log'.
+ -o OVERRIDE_INI, --override-ini=OVERRIDE_INI
+ override ini option with "option=value" style, e.g.
+ `-o xfail_strict=True -o cache_dir=cache`.
+ --assert=MODE Control assertion debugging tools.
+ 'plain' performs no assertion debugging.
+ 'rewrite' (the default) rewrites assert statements
+ in test modules on import to provide assert
+ expression information.
+ --setup-only only setup fixtures, do not execute tests.
+ --setup-show show setup of fixtures while executing tests.
+ --setup-plan show what fixtures and tests would be executed but
+ don't execute anything.
+
+ logging:
+ --log-level=LEVEL level of messages to catch/display.
+ Not set by default, so it depends on the root/parent
+ log handler's effective level, where it is "WARNING"
+ by default.
+ --log-format=LOG_FORMAT
+ log format as used by the logging module.
+ --log-date-format=LOG_DATE_FORMAT
+ log date format as used by the logging module.
+ --log-cli-level=LOG_CLI_LEVEL
+ cli logging level.
+ --log-cli-format=LOG_CLI_FORMAT
+ log format as used by the logging module.
+ --log-cli-date-format=LOG_CLI_DATE_FORMAT
+ log date format as used by the logging module.
+ --log-file=LOG_FILE path to a file when logging will be written to.
+ --log-file-level=LOG_FILE_LEVEL
+ log file logging level.
+ --log-file-format=LOG_FILE_FORMAT
+ log format as used by the logging module.
+ --log-file-date-format=LOG_FILE_DATE_FORMAT
+ log date format as used by the logging module.
+ --log-auto-indent=LOG_AUTO_INDENT
+ Auto-indent multiline messages passed to the logging
+ module. Accepts true|on, false|off or an integer.
+
+ [pytest] ini-options in the first pytest.ini|tox.ini|setup.cfg file found:
+
+ markers (linelist): markers for test functions
+ empty_parameter_set_mark (string):
+ default marker for empty parametersets
+ norecursedirs (args): directory patterns to avoid for recursion
+ testpaths (args): directories to search for tests when no files or
+ directories are given in the command line.
+ filterwarnings (linelist):
+ Each line specifies a pattern for
+ warnings.filterwarnings. Processed after
+ -W/--pythonwarnings.
+ usefixtures (args): list of default fixtures to be used with this
+ project
+ python_files (args): glob-style file patterns for Python test module
+ discovery
+ python_classes (args):
+ prefixes or glob names for Python test class
+ discovery
+ python_functions (args):
+ prefixes or glob names for Python test function and
+ method discovery
+ disable_test_id_escaping_and_forfeit_all_rights_to_community_support (bool):
+ disable string escape non-ascii characters, might
+ cause unwanted side effects(use at your own risk)
+ console_output_style (string):
+ console output: "classic", or with additional
+ progress information ("progress" (percentage) |
+ "count").
+ xfail_strict (bool): default for the strict parameter of xfail markers
+ when not given explicitly (default: False)
+ enable_assertion_pass_hook (bool):
+ Enables the pytest_assertion_pass hook.Make sure to
+ delete any previously generated pyc cache files.
+ junit_suite_name (string):
+ Test suite name for JUnit report
+ junit_logging (string):
+ Write captured log messages to JUnit report: one of
+ no|log|system-out|system-err|out-err|all
+ junit_log_passing_tests (bool):
+ Capture log information for passing tests to JUnit
+ report:
+ junit_duration_report (string):
+ Duration time to report: one of total|call
+ junit_family (string):
+ Emit XML for schema: one of legacy|xunit1|xunit2
+ doctest_optionflags (args):
+ option flags for doctests
+ doctest_encoding (string):
+ encoding used for doctest files
+ cache_dir (string): cache directory path.
+ log_level (string): default value for --log-level
+ log_format (string): default value for --log-format
+ log_date_format (string):
+ default value for --log-date-format
+ log_cli (bool): enable log display during test run (also known as
+ "live logging").
+ log_cli_level (string):
+ default value for --log-cli-level
+ log_cli_format (string):
+ default value for --log-cli-format
+ log_cli_date_format (string):
+ default value for --log-cli-date-format
+ log_file (string): default value for --log-file
+ log_file_level (string):
+ default value for --log-file-level
+ log_file_format (string):
+ default value for --log-file-format
+ log_file_date_format (string):
+ default value for --log-file-date-format
+ log_auto_indent (string):
+ default value for --log-auto-indent
+ pythonpath (paths): Add paths to sys.path
+ faulthandler_timeout (string):
+ Dump the traceback of all threads if a test takes
+ more than TIMEOUT seconds to finish.
+ addopts (args): extra command line options
+ minversion (string): minimally required pytest version
+ required_plugins (args):
+ plugins that must be present for pytest to run
+
+ environment variables:
+ PYTEST_ADDOPTS extra command line options
+ PYTEST_PLUGINS comma-separated plugins to load during startup
+ PYTEST_DISABLE_PLUGIN_AUTOLOAD set to disable plugin auto-loading
+ PYTEST_DEBUG set to enable debug tracing of pytest's internals
+
+
+ to see available markers type: pytest --markers
+ to see available fixtures type: pytest --fixtures
+ (shown according to specified file_or_dir or current dir if not specified; fixtures with leading '_' are only shown with the '-v' option
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/requirements.txt b/testing/web-platform/tests/tools/third_party/pytest/doc/en/requirements.txt
new file mode 100644
index 0000000000..5b49cb7fcc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/requirements.txt
@@ -0,0 +1,7 @@
+pallets-sphinx-themes
+pluggy>=1.0
+pygments-pytest>=2.2.0
+sphinx-removed-in>=0.2.0
+sphinx>=3.1,<4
+sphinxcontrib-trio
+sphinxcontrib-svg2pdfconverter
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/sponsor.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/sponsor.rst
new file mode 100644
index 0000000000..8362a7f0a3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/sponsor.rst
@@ -0,0 +1,26 @@
+Sponsor
+=======
+
+pytest is maintained by a team of volunteers from all around the world in their free time. While
+we work on pytest because we love the project and use it daily at our daily jobs, monetary
+compensation when possible is welcome to justify time away from friends, family and personal time.
+
+Money is also used to fund local sprints, merchandising (stickers to distribute in conferences for example)
+and every few years a large sprint involving all members.
+
+OpenCollective
+--------------
+
+`Open Collective`_ is an online funding platform for open and transparent communities.
+It provide tools to raise money and share your finances in full transparency.
+
+It is the platform of choice for individuals and companies that want to make one-time or
+monthly donations directly to the project.
+
+See more details in the `pytest collective`_.
+
+
+.. _Tidelift: https://tidelift.com
+.. _Tidelift subscription: https://tidelift.com/subscription/pkg/pypi-pytest
+.. _Open Collective: https://opencollective.com
+.. _pytest collective: https://opencollective.com/pytest
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/talks.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/talks.rst
new file mode 100644
index 0000000000..6843c82bab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/talks.rst
@@ -0,0 +1,109 @@
+
+Talks and Tutorials
+==========================
+
+Books
+---------------------------------------------
+
+- `pytest Quick Start Guide, by Bruno Oliveira (2018)
+ <https://www.packtpub.com/web-development/pytest-quick-start-guide>`_.
+
+- `Python Testing with pytest, by Brian Okken (2017)
+ <https://pragprog.com/book/bopytest/python-testing-with-pytest>`_.
+
+Talks and blog postings
+---------------------------------------------
+
+- Webinar: `pytest: Test Driven Development für Python (German) <https://bruhin.software/ins-pytest/>`_, Florian Bruhin, via mylearning.ch, 2020
+
+- Webinar: `Simplify Your Tests with Fixtures <https://blog.jetbrains.com/pycharm/2020/08/webinar-recording-simplify-your-tests-with-fixtures-with-oliver-bestwalter/>`_, Oliver Bestwalter, via JetBrains, 2020
+
+- Training: `Introduction to pytest - simple, rapid and fun testing with Python <https://www.youtube.com/watch?v=CMuSn9cofbI>`_, Florian Bruhin, PyConDE 2019
+
+- Abridged metaprogramming classics - this episode: pytest, Oliver Bestwalter, PyConDE 2019 (`repository <https://github.com/obestwalter/abridged-meta-programming-classics>`__, `recording <https://www.youtube.com/watch?v=zHpeMTJsBRk&feature=youtu.be>`__)
+
+- Testing PySide/PyQt code easily using the pytest framework, Florian Bruhin, Qt World Summit 2019 (`slides <https://bruhin.software/talks/qtws19.pdf>`__, `recording <https://www.youtube.com/watch?v=zdsBS5BXGqQ>`__)
+
+- `pytest: recommendations, basic packages for testing in Python and Django, Andreu Vallbona, PyBCN June 2019 <https://www.slideshare.net/AndreuVallbonaPlazas/pybcn-pytest-recomendaciones-paquetes-bsicos-para-testing-en-python-y-django>`_.
+
+- pytest: recommendations, basic packages for testing in Python and Django, Andreu Vallbona, PyconES 2017 (`slides in english <http://talks.apsl.io/testing-pycones-2017/>`_, `video in spanish <https://www.youtube.com/watch?v=K20GeR-lXDk>`_)
+
+- `pytest advanced, Andrew Svetlov (Russian, PyCon Russia, 2016)
+ <https://www.youtube.com/watch?v=7KgihdKTWY4>`_.
+
+- `Pythonic testing, Igor Starikov (Russian, PyNsk, November 2016)
+ <https://www.youtube.com/watch?v=_92nfdd5nK8>`_.
+
+- `pytest - Rapid Simple Testing, Florian Bruhin, Swiss Python Summit 2016
+ <https://www.youtube.com/watch?v=rCBHkQ_LVIs>`_.
+
+- `Improve your testing with Pytest and Mock, Gabe Hollombe, PyCon SG 2015
+ <https://www.youtube.com/watch?v=RcN26hznmk4>`_.
+
+- `Introduction to pytest, Andreas Pelme, EuroPython 2014
+ <https://www.youtube.com/watch?v=LdVJj65ikRY>`_.
+
+- `Advanced Uses of py.test Fixtures, Floris Bruynooghe, EuroPython
+ 2014 <https://www.youtube.com/watch?v=IBC_dxr-4ps>`_.
+
+- `Why i use py.test and maybe you should too, Andy Todd, Pycon AU 2013
+ <https://www.youtube.com/watch?v=P-AhpukDIik>`_
+
+- `3-part blog series about pytest from @pydanny alias Daniel Greenfeld (January
+ 2014) <https://daniel.roygreenfeld.com/pytest-no-boilerplate-testing.html>`_
+
+- `pytest: helps you write better Django apps, Andreas Pelme, DjangoCon
+ Europe 2014 <https://www.youtube.com/watch?v=aaArYVh6XSM>`_.
+
+- `Testing Django Applications with pytest, Andreas Pelme, EuroPython
+ 2013 <https://www.youtube.com/watch?v=aUf8Fkb7TaY>`_.
+
+- `Testes pythonics com py.test, Vinicius Belchior Assef Neto, Plone
+ Conf 2013, Brazil <https://www.youtube.com/watch?v=QUKoq2K7bis>`_.
+
+- `Introduction to py.test fixtures, FOSDEM 2013, Floris Bruynooghe
+ <https://www.youtube.com/watch?v=bJhRW4eZMco>`_.
+
+- `pytest feature and release highlights, Holger Krekel (GERMAN, October 2013)
+ <http://pyvideo.org/video/2429/pytest-feature-and-new-release-highlights>`_
+
+- `pytest introduction from Brian Okken (January 2013)
+ <http://pythontesting.net/framework/pytest-introduction/>`_
+
+- pycon australia 2012 pytest talk from Brianna Laugher (`video <https://www.youtube.com/watch?v=DTNejE9EraI>`_, `slides <https://www.slideshare.net/pfctdayelise/funcargs-other-fun-with-pytest>`_, `code <https://gist.github.com/3386951>`_)
+- `pycon 2012 US talk video from Holger Krekel <https://www.youtube.com/watch?v=9LVqBQcFmyw>`_
+
+- `monkey patching done right`_ (blog post, consult `monkeypatch plugin`_ for up-to-date API)
+
+Test parametrization:
+
+- `generating parametrized tests with fixtures`_.
+- `test generators and cached setup`_
+- `parametrizing tests, generalized`_ (blog post)
+- `putting test-hooks into local or global plugins`_ (blog post)
+
+Assertion introspection:
+
+- `(07/2011) Behind the scenes of pytest's new assertion rewriting
+ <http://pybites.blogspot.com/2011/07/behind-scenes-of-pytests-new-assertion.html>`_
+
+Distributed testing:
+
+- `simultaneously test your code on all platforms`_ (blog entry)
+
+Plugin specific examples:
+
+- `skipping slow tests by default in pytest`_ (blog entry)
+
+- `many examples in the docs for plugins`_
+
+.. _`skipping slow tests by default in pytest`: http://bruynooghe.blogspot.com/2009/12/skipping-slow-test-by-default-in-pytest.html
+.. _`many examples in the docs for plugins`: plugins.html
+.. _`monkeypatch plugin`: monkeypatch.html
+.. _`application setup in test functions with fixtures`: fixture.html#interdependent-fixtures
+.. _`simultaneously test your code on all platforms`: https://tetamap.wordpress.com//2009/03/23/new-simultanously-test-your-code-on-all-platforms/
+.. _`monkey patching done right`: https://tetamap.wordpress.com//2009/03/03/monkeypatching-in-unit-tests-done-right/
+.. _`putting test-hooks into local or global plugins`: https://tetamap.wordpress.com/2009/05/14/putting-test-hooks-into-local-and-global-plugins/
+.. _`parametrizing tests, generalized`: https://tetamap.wordpress.com/2009/05/13/parametrizing-python-tests-generalized/
+.. _`generating parametrized tests with fixtures`: parametrize.html#test-generators
+.. _`test generators and cached setup`: http://bruynooghe.blogspot.com/2010/06/pytest-test-generators-and-cached-setup.html
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/tidelift.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/tidelift.rst
new file mode 100644
index 0000000000..8ce55e97b3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/tidelift.rst
@@ -0,0 +1,45 @@
+pytest for enterprise
+=====================
+
+`Tidelift`_ is working with the maintainers of pytest and thousands of other
+open source projects to deliver commercial support and maintenance for the open source dependencies you use
+to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the
+exact dependencies you use.
+
+`Get more details <https://tidelift.com/subscription/pkg/pypi-pytest?utm_source=pypi-pytest&utm_medium=referral&utm_campaign=enterprise>`_
+
+The Tidelift Subscription is a managed open source subscription for application dependencies covering millions of open source projects across JavaScript, Python, Java, PHP, Ruby, .NET, and more.
+
+Your subscription includes:
+
+* **Security updates**
+
+ - Tidelift's security response team coordinates patches for new breaking security vulnerabilities and alerts immediately through a private channel, so your software supply chain is always secure.
+
+* **Licensing verification and indemnification**
+
+ - Tidelift verifies license information to enable easy policy enforcement and adds intellectual property indemnification to cover creators and users in case something goes wrong. You always have a 100% up-to-date bill of materials for your dependencies to share with your legal team, customers, or partners.
+
+* **Maintenance and code improvement**
+
+ - Tidelift ensures the software you rely on keeps working as long as you need it to work. Your managed dependencies are actively maintained and we recruit additional maintainers where required.
+
+* **Package selection and version guidance**
+
+ - Tidelift helps you choose the best open source packages from the start—and then guide you through updates to stay on the best releases as new issues arise.
+
+* **Roadmap input**
+
+ - Take a seat at the table with the creators behind the software you use. Tidelift's participating maintainers earn more income as their software is used by more subscribers, so they're interested in knowing what you need.
+
+* **Tooling and cloud integration**
+
+ - Tidelift works with GitHub, GitLab, BitBucket, and every cloud platform (and other deployment targets, too).
+
+The end result? All of the capabilities you expect from commercial-grade software, for the full breadth of open
+source you use. That means less time grappling with esoteric open source trivia, and more time building your own
+applications—and your business.
+
+`Request a demo <https://tidelift.com/subscription/request-a-demo?utm_source=pypi-pytest&utm_medium=referral&utm_campaign=enterprise>`_
+
+.. _Tidelift: https://tidelift.com
diff --git a/testing/web-platform/tests/tools/third_party/pytest/doc/en/yieldfixture.rst b/testing/web-platform/tests/tools/third_party/pytest/doc/en/yieldfixture.rst
new file mode 100644
index 0000000000..47590f9db9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/doc/en/yieldfixture.rst
@@ -0,0 +1,18 @@
+:orphan:
+
+.. _yieldfixture:
+
+"yield_fixture" functions
+---------------------------------------------------------------
+
+
+
+
+
+.. important::
+ Since pytest-3.0, fixtures using the normal ``fixture`` decorator can use a ``yield``
+ statement to provide fixture values and execute teardown code, exactly like ``yield_fixture``
+ in previous versions.
+
+ Marking functions as ``yield_fixture`` is still supported, but deprecated and should not
+ be used in new code.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/extra/get_issues.py b/testing/web-platform/tests/tools/third_party/pytest/extra/get_issues.py
new file mode 100644
index 0000000000..4aaa3c3ec3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/extra/get_issues.py
@@ -0,0 +1,85 @@
+import json
+from pathlib import Path
+
+import requests
+
+issues_url = "https://api.github.com/repos/pytest-dev/pytest/issues"
+
+
+def get_issues():
+ issues = []
+ url = issues_url
+ while 1:
+ get_data = {"state": "all"}
+ r = requests.get(url, params=get_data)
+ data = r.json()
+ if r.status_code == 403:
+ # API request limit exceeded
+ print(data["message"])
+ exit(1)
+ issues.extend(data)
+
+ # Look for next page
+ links = requests.utils.parse_header_links(r.headers["Link"])
+ another_page = False
+ for link in links:
+ if link["rel"] == "next":
+ url = link["url"]
+ another_page = True
+ if not another_page:
+ return issues
+
+
+def main(args):
+ cachefile = Path(args.cache)
+ if not cachefile.exists() or args.refresh:
+ issues = get_issues()
+ cachefile.write_text(json.dumps(issues), "utf-8")
+ else:
+ issues = json.loads(cachefile.read_text("utf-8"))
+
+ open_issues = [x for x in issues if x["state"] == "open"]
+
+ open_issues.sort(key=lambda x: x["number"])
+ report(open_issues)
+
+
+def _get_kind(issue):
+ labels = [label["name"] for label in issue["labels"]]
+ for key in ("bug", "enhancement", "proposal"):
+ if key in labels:
+ return key
+ return "issue"
+
+
+def report(issues):
+ for issue in issues:
+ title = issue["title"]
+ # body = issue["body"]
+ kind = _get_kind(issue)
+ status = issue["state"]
+ number = issue["number"]
+ link = "https://github.com/pytest-dev/pytest/issues/%s/" % number
+ print("----")
+ print(status, kind, link)
+ print(title)
+ # print()
+ # lines = body.split("\n")
+ # print("\n".join(lines[:3]))
+ # if len(lines) > 3 or len(body) > 240:
+ # print("...")
+ print("\n\nFound %s open issues" % len(issues))
+
+
+if __name__ == "__main__":
+ import argparse
+
+ parser = argparse.ArgumentParser("process bitbucket issues")
+ parser.add_argument(
+ "--refresh", action="store_true", help="invalidate cache, refresh issues"
+ )
+ parser.add_argument(
+ "--cache", action="store", default="issues.json", help="cache file"
+ )
+ args = parser.parse_args()
+ main(args)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/extra/setup-py.test/setup.py b/testing/web-platform/tests/tools/third_party/pytest/extra/setup-py.test/setup.py
new file mode 100644
index 0000000000..d0560ce1f5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/extra/setup-py.test/setup.py
@@ -0,0 +1,11 @@
+import sys
+from distutils.core import setup
+
+if __name__ == "__main__":
+ if "sdist" not in sys.argv[1:]:
+ raise ValueError("please use 'pytest' pypi package instead of 'py.test'")
+ setup(
+ name="py.test",
+ version="0.0",
+ description="please use 'pytest' for installation",
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/pyproject.toml b/testing/web-platform/tests/tools/third_party/pytest/pyproject.toml
new file mode 100644
index 0000000000..5d32b755c7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/pyproject.toml
@@ -0,0 +1,116 @@
+[build-system]
+requires = [
+ # sync with setup.py until we discard non-pep-517/518
+ "setuptools>=45.0",
+ "setuptools-scm[toml]>=6.2.3",
+ "wheel",
+]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools_scm]
+write_to = "src/_pytest/_version.py"
+
+[tool.pytest.ini_options]
+minversion = "2.0"
+addopts = "-rfEX -p pytester --strict-markers"
+python_files = ["test_*.py", "*_test.py", "testing/python/*.py"]
+python_classes = ["Test", "Acceptance"]
+python_functions = ["test"]
+# NOTE: "doc" is not included here, but gets tested explicitly via "doctesting".
+testpaths = ["testing"]
+norecursedirs = ["testing/example_scripts"]
+xfail_strict = true
+filterwarnings = [
+ "error",
+ "default:Using or importing the ABCs:DeprecationWarning:unittest2.*",
+ # produced by older pyparsing<=2.2.0.
+ "default:Using or importing the ABCs:DeprecationWarning:pyparsing.*",
+ "default:the imp module is deprecated in favour of importlib:DeprecationWarning:nose.*",
+ # distutils is deprecated in 3.10, scheduled for removal in 3.12
+ "ignore:The distutils package is deprecated:DeprecationWarning",
+ # produced by python3.6/site.py itself (3.6.7 on Travis, could not trigger it with 3.6.8)."
+ "ignore:.*U.*mode is deprecated:DeprecationWarning:(?!(pytest|_pytest))",
+ # produced by pytest-xdist
+ "ignore:.*type argument to addoption.*:DeprecationWarning",
+ # produced on execnet (pytest-xdist)
+ "ignore:.*inspect.getargspec.*deprecated, use inspect.signature.*:DeprecationWarning",
+ # pytest's own futurewarnings
+ "ignore::pytest.PytestExperimentalApiWarning",
+ # Do not cause SyntaxError for invalid escape sequences in py37.
+ # Those are caught/handled by pyupgrade, and not easy to filter with the
+ # module being the filename (with .py removed).
+ "default:invalid escape sequence:DeprecationWarning",
+ # ignore use of unregistered marks, because we use many to test the implementation
+ "ignore::_pytest.warning_types.PytestUnknownMarkWarning",
+ # https://github.com/benjaminp/six/issues/341
+ "ignore:_SixMetaPathImporter\\.exec_module\\(\\) not found; falling back to load_module\\(\\):ImportWarning",
+ # https://github.com/benjaminp/six/pull/352
+ "ignore:_SixMetaPathImporter\\.find_spec\\(\\) not found; falling back to find_module\\(\\):ImportWarning",
+ # https://github.com/pypa/setuptools/pull/2517
+ "ignore:VendorImporter\\.find_spec\\(\\) not found; falling back to find_module\\(\\):ImportWarning",
+ # https://github.com/pytest-dev/execnet/pull/127
+ "ignore:isSet\\(\\) is deprecated, use is_set\\(\\) instead:DeprecationWarning",
+]
+pytester_example_dir = "testing/example_scripts"
+markers = [
+ # dummy markers for testing
+ "foo",
+ "bar",
+ "baz",
+ # conftest.py reorders tests moving slow ones to the end of the list
+ "slow",
+ # experimental mark for all tests using pexpect
+ "uses_pexpect",
+]
+
+
+[tool.towncrier]
+package = "pytest"
+package_dir = "src"
+filename = "doc/en/changelog.rst"
+directory = "changelog/"
+title_format = "pytest {version} ({project_date})"
+template = "changelog/_template.rst"
+
+ [[tool.towncrier.type]]
+ directory = "breaking"
+ name = "Breaking Changes"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "deprecation"
+ name = "Deprecations"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "feature"
+ name = "Features"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "improvement"
+ name = "Improvements"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "bugfix"
+ name = "Bug Fixes"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "vendor"
+ name = "Vendored Libraries"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "doc"
+ name = "Improved Documentation"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "trivial"
+ name = "Trivial/Internal Changes"
+ showcontent = true
+
+[tool.black]
+target-version = ['py36']
diff --git a/testing/web-platform/tests/tools/third_party/pytest/scripts/prepare-release-pr.py b/testing/web-platform/tests/tools/third_party/pytest/scripts/prepare-release-pr.py
new file mode 100644
index 0000000000..7a80de7eda
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/scripts/prepare-release-pr.py
@@ -0,0 +1,174 @@
+"""
+This script is part of the pytest release process which is triggered manually in the Actions
+tab of the repository.
+
+The user will need to enter the base branch to start the release from (for example
+``6.1.x`` or ``main``) and if it should be a major release.
+
+The appropriate version will be obtained based on the given branch automatically.
+
+After that, it will create a release using the `release` tox environment, and push a new PR.
+
+**Token**: currently the token from the GitHub Actions is used, pushed with
+`pytest bot <pytestbot@gmail.com>` commit author.
+"""
+import argparse
+import re
+from pathlib import Path
+from subprocess import check_call
+from subprocess import check_output
+from subprocess import run
+
+from colorama import Fore
+from colorama import init
+from github3.repos import Repository
+
+
+class InvalidFeatureRelease(Exception):
+ pass
+
+
+SLUG = "pytest-dev/pytest"
+
+PR_BODY = """\
+Created automatically from manual trigger.
+
+Once all builds pass and it has been **approved** by one or more maintainers, the build
+can be released by pushing a tag `{version}` to this repository.
+"""
+
+
+def login(token: str) -> Repository:
+ import github3
+
+ github = github3.login(token=token)
+ owner, repo = SLUG.split("/")
+ return github.repository(owner, repo)
+
+
+def prepare_release_pr(
+ base_branch: str, is_major: bool, token: str, prerelease: str
+) -> None:
+ print()
+ print(f"Processing release for branch {Fore.CYAN}{base_branch}")
+
+ check_call(["git", "checkout", f"origin/{base_branch}"])
+
+ changelog = Path("changelog")
+
+ features = list(changelog.glob("*.feature.rst"))
+ breaking = list(changelog.glob("*.breaking.rst"))
+ is_feature_release = bool(features or breaking)
+
+ try:
+ version = find_next_version(
+ base_branch, is_major, is_feature_release, prerelease
+ )
+ except InvalidFeatureRelease as e:
+ print(f"{Fore.RED}{e}")
+ raise SystemExit(1)
+
+ print(f"Version: {Fore.CYAN}{version}")
+
+ release_branch = f"release-{version}"
+
+ run(
+ ["git", "config", "user.name", "pytest bot"],
+ check=True,
+ )
+ run(
+ ["git", "config", "user.email", "pytestbot@gmail.com"],
+ check=True,
+ )
+
+ run(
+ ["git", "checkout", "-b", release_branch, f"origin/{base_branch}"],
+ check=True,
+ )
+
+ print(f"Branch {Fore.CYAN}{release_branch}{Fore.RESET} created.")
+
+ if is_major:
+ template_name = "release.major.rst"
+ elif prerelease:
+ template_name = "release.pre.rst"
+ elif is_feature_release:
+ template_name = "release.minor.rst"
+ else:
+ template_name = "release.patch.rst"
+
+ # important to use tox here because we have changed branches, so dependencies
+ # might have changed as well
+ cmdline = [
+ "tox",
+ "-e",
+ "release",
+ "--",
+ version,
+ template_name,
+ release_branch, # doc_version
+ "--skip-check-links",
+ ]
+ print("Running", " ".join(cmdline))
+ run(
+ cmdline,
+ check=True,
+ )
+
+ oauth_url = f"https://{token}:x-oauth-basic@github.com/{SLUG}.git"
+ run(
+ ["git", "push", oauth_url, f"HEAD:{release_branch}", "--force"],
+ check=True,
+ )
+ print(f"Branch {Fore.CYAN}{release_branch}{Fore.RESET} pushed.")
+
+ body = PR_BODY.format(version=version)
+ repo = login(token)
+ pr = repo.create_pull(
+ f"Prepare release {version}",
+ base=base_branch,
+ head=release_branch,
+ body=body,
+ )
+ print(f"Pull request {Fore.CYAN}{pr.url}{Fore.RESET} created.")
+
+
+def find_next_version(
+ base_branch: str, is_major: bool, is_feature_release: bool, prerelease: str
+) -> str:
+ output = check_output(["git", "tag"], encoding="UTF-8")
+ valid_versions = []
+ for v in output.splitlines():
+ m = re.match(r"\d.\d.\d+$", v.strip())
+ if m:
+ valid_versions.append(tuple(int(x) for x in v.split(".")))
+
+ valid_versions.sort()
+ last_version = valid_versions[-1]
+
+ if is_major:
+ return f"{last_version[0]+1}.0.0{prerelease}"
+ elif is_feature_release:
+ return f"{last_version[0]}.{last_version[1] + 1}.0{prerelease}"
+ else:
+ return f"{last_version[0]}.{last_version[1]}.{last_version[2] + 1}{prerelease}"
+
+
+def main() -> None:
+ init(autoreset=True)
+ parser = argparse.ArgumentParser()
+ parser.add_argument("base_branch")
+ parser.add_argument("token")
+ parser.add_argument("--major", action="store_true", default=False)
+ parser.add_argument("--prerelease", default="")
+ options = parser.parse_args()
+ prepare_release_pr(
+ base_branch=options.base_branch,
+ is_major=options.major,
+ token=options.token,
+ prerelease=options.prerelease,
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/scripts/publish-gh-release-notes.py b/testing/web-platform/tests/tools/third_party/pytest/scripts/publish-gh-release-notes.py
new file mode 100644
index 0000000000..68cbd7adff
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/scripts/publish-gh-release-notes.py
@@ -0,0 +1,102 @@
+"""
+Script used to publish GitHub release notes extracted from CHANGELOG.rst.
+
+This script is meant to be executed after a successful deployment in GitHub actions.
+
+Uses the following environment variables:
+
+* GIT_TAG: the name of the tag of the current commit.
+* GH_RELEASE_NOTES_TOKEN: a personal access token with 'repo' permissions.
+
+ Create one at:
+
+ https://github.com/settings/tokens
+
+ This token should be set in a secret in the repository, which is exposed as an
+ environment variable in the main.yml workflow file.
+
+The script also requires ``pandoc`` to be previously installed in the system.
+
+Requires Python3.6+.
+"""
+import os
+import re
+import sys
+from pathlib import Path
+
+import github3
+import pypandoc
+
+
+def publish_github_release(slug, token, tag_name, body):
+ github = github3.login(token=token)
+ owner, repo = slug.split("/")
+ repo = github.repository(owner, repo)
+ return repo.create_release(tag_name=tag_name, body=body)
+
+
+def parse_changelog(tag_name):
+ p = Path(__file__).parent.parent / "doc/en/changelog.rst"
+ changelog_lines = p.read_text(encoding="UTF-8").splitlines()
+
+ title_regex = re.compile(r"pytest (\d\.\d+\.\d+) \(\d{4}-\d{2}-\d{2}\)")
+ consuming_version = False
+ version_lines = []
+ for line in changelog_lines:
+ m = title_regex.match(line)
+ if m:
+ # found the version we want: start to consume lines until we find the next version title
+ if m.group(1) == tag_name:
+ consuming_version = True
+ # found a new version title while parsing the version we want: break out
+ elif consuming_version:
+ break
+ if consuming_version:
+ version_lines.append(line)
+
+ return "\n".join(version_lines)
+
+
+def convert_rst_to_md(text):
+ return pypandoc.convert_text(
+ text, "md", format="rst", extra_args=["--wrap=preserve"]
+ )
+
+
+def main(argv):
+ if len(argv) > 1:
+ tag_name = argv[1]
+ else:
+ tag_name = os.environ.get("GITHUB_REF")
+ if not tag_name:
+ print("tag_name not given and $GITHUB_REF not set", file=sys.stderr)
+ return 1
+ if tag_name.startswith("refs/tags/"):
+ tag_name = tag_name[len("refs/tags/") :]
+
+ token = os.environ.get("GH_RELEASE_NOTES_TOKEN")
+ if not token:
+ print("GH_RELEASE_NOTES_TOKEN not set", file=sys.stderr)
+ return 1
+
+ slug = os.environ.get("GITHUB_REPOSITORY")
+ if not slug:
+ print("GITHUB_REPOSITORY not set", file=sys.stderr)
+ return 1
+
+ rst_body = parse_changelog(tag_name)
+ md_body = convert_rst_to_md(rst_body)
+ if not publish_github_release(slug, token, tag_name, md_body):
+ print("Could not publish release notes:", file=sys.stderr)
+ print(md_body, file=sys.stderr)
+ return 5
+
+ print()
+ print(f"Release notes for {tag_name} published successfully:")
+ print(f"https://github.com/{slug}/releases/tag/{tag_name}")
+ print()
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
diff --git a/testing/web-platform/tests/tools/third_party/pytest/scripts/release.major.rst b/testing/web-platform/tests/tools/third_party/pytest/scripts/release.major.rst
new file mode 100644
index 0000000000..76e447f0c6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/scripts/release.major.rst
@@ -0,0 +1,24 @@
+pytest-{version}
+=======================================
+
+The pytest team is proud to announce the {version} release!
+
+This release contains new features, improvements, bug fixes, and breaking changes, so users
+are encouraged to take a look at the CHANGELOG carefully:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from PyPI via:
+
+ pip install -U pytest
+
+Thanks to all of the contributors to this release:
+
+{contributors}
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/scripts/release.minor.rst b/testing/web-platform/tests/tools/third_party/pytest/scripts/release.minor.rst
new file mode 100644
index 0000000000..9a06d3d414
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/scripts/release.minor.rst
@@ -0,0 +1,24 @@
+pytest-{version}
+=======================================
+
+The pytest team is proud to announce the {version} release!
+
+This release contains new features, improvements, and bug fixes,
+the full list of changes is available in the changelog:
+
+ https://docs.pytest.org/en/stable/changelog.html
+
+For complete documentation, please visit:
+
+ https://docs.pytest.org/en/stable/
+
+As usual, you can upgrade from PyPI via:
+
+ pip install -U pytest
+
+Thanks to all of the contributors to this release:
+
+{contributors}
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/scripts/release.patch.rst b/testing/web-platform/tests/tools/third_party/pytest/scripts/release.patch.rst
new file mode 100644
index 0000000000..59fbe50ce0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/scripts/release.patch.rst
@@ -0,0 +1,17 @@
+pytest-{version}
+=======================================
+
+pytest {version} has just been released to PyPI.
+
+This is a bug-fix release, being a drop-in replacement. To upgrade::
+
+ pip install --upgrade pytest
+
+The full changelog is available at https://docs.pytest.org/en/stable/changelog.html.
+
+Thanks to all of the contributors to this release:
+
+{contributors}
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/scripts/release.pre.rst b/testing/web-platform/tests/tools/third_party/pytest/scripts/release.pre.rst
new file mode 100644
index 0000000000..960fae7e4f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/scripts/release.pre.rst
@@ -0,0 +1,29 @@
+pytest-{version}
+=======================================
+
+The pytest team is proud to announce the {version} prerelease!
+
+This is a prerelease, not intended for production use, but to test the upcoming features and improvements
+in order to catch any major problems before the final version is released to the major public.
+
+We appreciate your help testing this out before the final release, making sure to report any
+regressions to our issue tracker:
+
+https://github.com/pytest-dev/pytest/issues
+
+When doing so, please include the string ``[prerelease]`` in the title.
+
+You can upgrade from PyPI via:
+
+ pip install pytest=={version}
+
+Users are encouraged to take a look at the CHANGELOG carefully:
+
+ https://docs.pytest.org/en/{doc_version}/changelog.html
+
+Thanks to all the contributors to this release:
+
+{contributors}
+
+Happy testing,
+The pytest Development Team
diff --git a/testing/web-platform/tests/tools/third_party/pytest/scripts/release.py b/testing/web-platform/tests/tools/third_party/pytest/scripts/release.py
new file mode 100644
index 0000000000..19fef42842
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/scripts/release.py
@@ -0,0 +1,131 @@
+"""Invoke development tasks."""
+import argparse
+import os
+from pathlib import Path
+from subprocess import call
+from subprocess import check_call
+from subprocess import check_output
+
+from colorama import Fore
+from colorama import init
+
+
+def announce(version, template_name, doc_version):
+ """Generates a new release announcement entry in the docs."""
+ # Get our list of authors
+ stdout = check_output(["git", "describe", "--abbrev=0", "--tags"])
+ stdout = stdout.decode("utf-8")
+ last_version = stdout.strip()
+
+ stdout = check_output(["git", "log", f"{last_version}..HEAD", "--format=%aN"])
+ stdout = stdout.decode("utf-8")
+
+ contributors = {
+ name
+ for name in stdout.splitlines()
+ if not name.endswith("[bot]") and name != "pytest bot"
+ }
+
+ template_text = (
+ Path(__file__).parent.joinpath(template_name).read_text(encoding="UTF-8")
+ )
+
+ contributors_text = "\n".join(f"* {name}" for name in sorted(contributors)) + "\n"
+ text = template_text.format(
+ version=version, contributors=contributors_text, doc_version=doc_version
+ )
+
+ target = Path(__file__).parent.joinpath(f"../doc/en/announce/release-{version}.rst")
+ target.write_text(text, encoding="UTF-8")
+ print(f"{Fore.CYAN}[generate.announce] {Fore.RESET}Generated {target.name}")
+
+ # Update index with the new release entry
+ index_path = Path(__file__).parent.joinpath("../doc/en/announce/index.rst")
+ lines = index_path.read_text(encoding="UTF-8").splitlines()
+ indent = " "
+ for index, line in enumerate(lines):
+ if line.startswith(f"{indent}release-"):
+ new_line = indent + target.stem
+ if line != new_line:
+ lines.insert(index, new_line)
+ index_path.write_text("\n".join(lines) + "\n", encoding="UTF-8")
+ print(
+ f"{Fore.CYAN}[generate.announce] {Fore.RESET}Updated {index_path.name}"
+ )
+ else:
+ print(
+ f"{Fore.CYAN}[generate.announce] {Fore.RESET}Skip {index_path.name} (already contains release)"
+ )
+ break
+
+ check_call(["git", "add", str(target)])
+
+
+def regen(version):
+ """Call regendoc tool to update examples and pytest output in the docs."""
+ print(f"{Fore.CYAN}[generate.regen] {Fore.RESET}Updating docs")
+ check_call(
+ ["tox", "-e", "regen"],
+ env={**os.environ, "SETUPTOOLS_SCM_PRETEND_VERSION_FOR_PYTEST": version},
+ )
+
+
+def fix_formatting():
+ """Runs pre-commit in all files to ensure they are formatted correctly"""
+ print(
+ f"{Fore.CYAN}[generate.fix linting] {Fore.RESET}Fixing formatting using pre-commit"
+ )
+ call(["pre-commit", "run", "--all-files"])
+
+
+def check_links():
+ """Runs sphinx-build to check links"""
+ print(f"{Fore.CYAN}[generate.check_links] {Fore.RESET}Checking links")
+ check_call(["tox", "-e", "docs-checklinks"])
+
+
+def pre_release(version, template_name, doc_version, *, skip_check_links):
+ """Generates new docs, release announcements and creates a local tag."""
+ announce(version, template_name, doc_version)
+ regen(version)
+ changelog(version, write_out=True)
+ fix_formatting()
+ if not skip_check_links:
+ check_links()
+
+ msg = f"Prepare release version {version}"
+ check_call(["git", "commit", "-a", "-m", msg])
+
+ print()
+ print(f"{Fore.CYAN}[generate.pre_release] {Fore.GREEN}All done!")
+ print()
+ print("Please push your branch and open a PR.")
+
+
+def changelog(version, write_out=False):
+ addopts = [] if write_out else ["--draft"]
+ check_call(["towncrier", "--yes", "--version", version] + addopts)
+
+
+def main():
+ init(autoreset=True)
+ parser = argparse.ArgumentParser()
+ parser.add_argument("version", help="Release version")
+ parser.add_argument(
+ "template_name", help="Name of template file to use for release announcement"
+ )
+ parser.add_argument(
+ "doc_version", help="For prereleases, the version to link to in the docs"
+ )
+ parser.add_argument("--skip-check-links", action="store_true", default=False)
+ options = parser.parse_args()
+ pre_release(
+ options.version,
+ options.template_name,
+ options.doc_version,
+ skip_check_links=options.skip_check_links,
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/scripts/towncrier-draft-to-file.py b/testing/web-platform/tests/tools/third_party/pytest/scripts/towncrier-draft-to-file.py
new file mode 100644
index 0000000000..81507b40b7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/scripts/towncrier-draft-to-file.py
@@ -0,0 +1,15 @@
+import sys
+from subprocess import call
+
+
+def main():
+ """
+ Platform agnostic wrapper script for towncrier.
+ Fixes the issue (#7251) where windows users are unable to natively run tox -e docs to build pytest docs.
+ """
+ with open("doc/en/_changelog_towncrier_draft.rst", "w") as draft_file:
+ return call(("towncrier", "--draft"), stdout=draft_file)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/testing/web-platform/tests/tools/third_party/pytest/scripts/update-plugin-list.py b/testing/web-platform/tests/tools/third_party/pytest/scripts/update-plugin-list.py
new file mode 100644
index 0000000000..c034c72420
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/scripts/update-plugin-list.py
@@ -0,0 +1,140 @@
+import datetime
+import pathlib
+import re
+from textwrap import dedent
+from textwrap import indent
+
+import packaging.version
+import requests
+import tabulate
+import wcwidth
+from tqdm import tqdm
+
+FILE_HEAD = r"""
+.. _plugin-list:
+
+Plugin List
+===========
+
+PyPI projects that match "pytest-\*" are considered plugins and are listed
+automatically. Packages classified as inactive are excluded.
+
+.. The following conditional uses a different format for this list when
+ creating a PDF, because otherwise the table gets far too wide for the
+ page.
+
+"""
+DEVELOPMENT_STATUS_CLASSIFIERS = (
+ "Development Status :: 1 - Planning",
+ "Development Status :: 2 - Pre-Alpha",
+ "Development Status :: 3 - Alpha",
+ "Development Status :: 4 - Beta",
+ "Development Status :: 5 - Production/Stable",
+ "Development Status :: 6 - Mature",
+ "Development Status :: 7 - Inactive",
+)
+
+
+def escape_rst(text: str) -> str:
+ """Rudimentary attempt to escape special RST characters to appear as
+ plain text."""
+ text = (
+ text.replace("*", "\\*")
+ .replace("<", "\\<")
+ .replace(">", "\\>")
+ .replace("`", "\\`")
+ )
+ text = re.sub(r"_\b", "", text)
+ return text
+
+
+def iter_plugins():
+ regex = r">([\d\w-]*)</a>"
+ response = requests.get("https://pypi.org/simple")
+
+ matches = list(
+ match
+ for match in re.finditer(regex, response.text)
+ if match.groups()[0].startswith("pytest-")
+ )
+
+ for match in tqdm(matches, smoothing=0):
+ name = match.groups()[0]
+ response = requests.get(f"https://pypi.org/pypi/{name}/json")
+ if response.status_code == 404:
+ # Some packages, like pytest-azurepipelines42, are included in https://pypi.org/simple but
+ # return 404 on the JSON API. Skip.
+ continue
+ response.raise_for_status()
+ info = response.json()["info"]
+ if "Development Status :: 7 - Inactive" in info["classifiers"]:
+ continue
+ for classifier in DEVELOPMENT_STATUS_CLASSIFIERS:
+ if classifier in info["classifiers"]:
+ status = classifier[22:]
+ break
+ else:
+ status = "N/A"
+ requires = "N/A"
+ if info["requires_dist"]:
+ for requirement in info["requires_dist"]:
+ if requirement == "pytest" or "pytest " in requirement:
+ requires = requirement
+ break
+ releases = response.json()["releases"]
+ for release in sorted(releases, key=packaging.version.parse, reverse=True):
+ if releases[release]:
+ release_date = datetime.date.fromisoformat(
+ releases[release][-1]["upload_time_iso_8601"].split("T")[0]
+ )
+ last_release = release_date.strftime("%b %d, %Y")
+ break
+ name = f':pypi:`{info["name"]}`'
+ summary = escape_rst(info["summary"].replace("\n", ""))
+ yield {
+ "name": name,
+ "summary": summary.strip(),
+ "last release": last_release,
+ "status": status,
+ "requires": requires,
+ }
+
+
+def plugin_definitions(plugins):
+ """Return RST for the plugin list that fits better on a vertical page."""
+
+ for plugin in plugins:
+ yield dedent(
+ f"""
+ {plugin['name']}
+ *last release*: {plugin["last release"]},
+ *status*: {plugin["status"]},
+ *requires*: {plugin["requires"]}
+
+ {plugin["summary"]}
+ """
+ )
+
+
+def main():
+ plugins = list(iter_plugins())
+
+ reference_dir = pathlib.Path("doc", "en", "reference")
+
+ plugin_list = reference_dir / "plugin_list.rst"
+ with plugin_list.open("w") as f:
+ f.write(FILE_HEAD)
+ f.write(f"This list contains {len(plugins)} plugins.\n\n")
+ f.write(".. only:: not latex\n\n")
+
+ wcwidth # reference library that must exist for tabulate to work
+ plugin_table = tabulate.tabulate(plugins, headers="keys", tablefmt="rst")
+ f.write(indent(plugin_table, " "))
+ f.write("\n\n")
+
+ f.write(".. only:: latex\n\n")
+ f.write(indent("".join(plugin_definitions(plugins)), " "))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/setup.cfg b/testing/web-platform/tests/tools/third_party/pytest/setup.cfg
new file mode 100644
index 0000000000..26a5d2e63e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/setup.cfg
@@ -0,0 +1,105 @@
+[metadata]
+name = pytest
+description = pytest: simple powerful testing with Python
+long_description = file: README.rst
+long_description_content_type = text/x-rst
+url = https://docs.pytest.org/en/latest/
+author = Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others
+license = MIT
+license_file = LICENSE
+platforms = unix, linux, osx, cygwin, win32
+classifiers =
+ Development Status :: 6 - Mature
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Operating System :: MacOS :: MacOS X
+ Operating System :: Microsoft :: Windows
+ Operating System :: POSIX
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3 :: Only
+ Programming Language :: Python :: 3.6
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: 3.9
+ Programming Language :: Python :: 3.10
+ Topic :: Software Development :: Libraries
+ Topic :: Software Development :: Testing
+ Topic :: Utilities
+keywords = test, unittest
+project_urls =
+ Changelog=https://docs.pytest.org/en/stable/changelog.html
+ Twitter=https://twitter.com/pytestdotorg
+ Source=https://github.com/pytest-dev/pytest
+ Tracker=https://github.com/pytest-dev/pytest/issues
+
+[options]
+packages =
+ _pytest
+ _pytest._code
+ _pytest._io
+ _pytest.assertion
+ _pytest.config
+ _pytest.mark
+ pytest
+install_requires =
+ attrs>=19.2.0
+ iniconfig
+ packaging
+ pluggy>=0.12,<2.0
+ py>=1.8.2
+ tomli>=1.0.0
+ atomicwrites>=1.0;sys_platform=="win32"
+ colorama;sys_platform=="win32"
+ importlib-metadata>=0.12;python_version<"3.8"
+python_requires = >=3.6
+package_dir =
+ =src
+setup_requires =
+ setuptools
+ setuptools-scm>=6.0
+zip_safe = no
+
+[options.entry_points]
+console_scripts =
+ pytest=pytest:console_main
+ py.test=pytest:console_main
+
+[options.extras_require]
+testing =
+ argcomplete
+ hypothesis>=3.56
+ mock
+ nose
+ pygments>=2.7.2
+ requests
+ xmlschema
+
+[options.package_data]
+_pytest = py.typed
+pytest = py.typed
+
+[build_sphinx]
+source_dir = doc/en/
+build_dir = doc/build
+all_files = 1
+
+[check-manifest]
+ignore =
+ src/_pytest/_version.py
+
+[devpi:upload]
+formats = sdist.tgz,bdist_wheel
+
+[mypy]
+mypy_path = src
+check_untyped_defs = True
+disallow_any_generics = True
+ignore_missing_imports = True
+no_implicit_optional = True
+show_error_codes = True
+strict_equality = True
+warn_redundant_casts = True
+warn_return_any = True
+warn_unreachable = True
+warn_unused_configs = True
+no_implicit_reexport = True
diff --git a/testing/web-platform/tests/tools/third_party/pytest/setup.py b/testing/web-platform/tests/tools/third_party/pytest/setup.py
new file mode 100644
index 0000000000..7f1a1763ca
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/setup.py
@@ -0,0 +1,4 @@
+from setuptools import setup
+
+if __name__ == "__main__":
+ setup()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/__init__.py
new file mode 100644
index 0000000000..8a406c5c75
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/__init__.py
@@ -0,0 +1,9 @@
+__all__ = ["__version__", "version_tuple"]
+
+try:
+ from ._version import version as __version__, version_tuple
+except ImportError: # pragma: no cover
+ # broken installation, we don't even try
+ # unknown only works because we do poor mans version compare
+ __version__ = "unknown"
+ version_tuple = (0, 0, "unknown") # type:ignore[assignment]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_argcomplete.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_argcomplete.py
new file mode 100644
index 0000000000..41d9d9407c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_argcomplete.py
@@ -0,0 +1,117 @@
+"""Allow bash-completion for argparse with argcomplete if installed.
+
+Needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail
+to find the magic string, so _ARGCOMPLETE env. var is never set, and
+this does not need special code).
+
+Function try_argcomplete(parser) should be called directly before
+the call to ArgumentParser.parse_args().
+
+The filescompleter is what you normally would use on the positional
+arguments specification, in order to get "dirname/" after "dirn<TAB>"
+instead of the default "dirname ":
+
+ optparser.add_argument(Config._file_or_dir, nargs='*').completer=filescompleter
+
+Other, application specific, completers should go in the file
+doing the add_argument calls as they need to be specified as .completer
+attributes as well. (If argcomplete is not installed, the function the
+attribute points to will not be used).
+
+SPEEDUP
+=======
+
+The generic argcomplete script for bash-completion
+(/etc/bash_completion.d/python-argcomplete.sh)
+uses a python program to determine startup script generated by pip.
+You can speed up completion somewhat by changing this script to include
+ # PYTHON_ARGCOMPLETE_OK
+so the python-argcomplete-check-easy-install-script does not
+need to be called to find the entry point of the code and see if that is
+marked with PYTHON_ARGCOMPLETE_OK.
+
+INSTALL/DEBUGGING
+=================
+
+To include this support in another application that has setup.py generated
+scripts:
+
+- Add the line:
+ # PYTHON_ARGCOMPLETE_OK
+ near the top of the main python entry point.
+
+- Include in the file calling parse_args():
+ from _argcomplete import try_argcomplete, filescompleter
+ Call try_argcomplete just before parse_args(), and optionally add
+ filescompleter to the positional arguments' add_argument().
+
+If things do not work right away:
+
+- Switch on argcomplete debugging with (also helpful when doing custom
+ completers):
+ export _ARC_DEBUG=1
+
+- Run:
+ python-argcomplete-check-easy-install-script $(which appname)
+ echo $?
+ will echo 0 if the magic line has been found, 1 if not.
+
+- Sometimes it helps to find early on errors using:
+ _ARGCOMPLETE=1 _ARC_DEBUG=1 appname
+ which should throw a KeyError: 'COMPLINE' (which is properly set by the
+ global argcomplete script).
+"""
+import argparse
+import os
+import sys
+from glob import glob
+from typing import Any
+from typing import List
+from typing import Optional
+
+
+class FastFilesCompleter:
+ """Fast file completer class."""
+
+ def __init__(self, directories: bool = True) -> None:
+ self.directories = directories
+
+ def __call__(self, prefix: str, **kwargs: Any) -> List[str]:
+ # Only called on non option completions.
+ if os.path.sep in prefix[1:]:
+ prefix_dir = len(os.path.dirname(prefix) + os.path.sep)
+ else:
+ prefix_dir = 0
+ completion = []
+ globbed = []
+ if "*" not in prefix and "?" not in prefix:
+ # We are on unix, otherwise no bash.
+ if not prefix or prefix[-1] == os.path.sep:
+ globbed.extend(glob(prefix + ".*"))
+ prefix += "*"
+ globbed.extend(glob(prefix))
+ for x in sorted(globbed):
+ if os.path.isdir(x):
+ x += "/"
+ # Append stripping the prefix (like bash, not like compgen).
+ completion.append(x[prefix_dir:])
+ return completion
+
+
+if os.environ.get("_ARGCOMPLETE"):
+ try:
+ import argcomplete.completers
+ except ImportError:
+ sys.exit(-1)
+ filescompleter: Optional[FastFilesCompleter] = FastFilesCompleter()
+
+ def try_argcomplete(parser: argparse.ArgumentParser) -> None:
+ argcomplete.autocomplete(parser, always_complete_options=False)
+
+
+else:
+
+ def try_argcomplete(parser: argparse.ArgumentParser) -> None:
+ pass
+
+ filescompleter = None
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/__init__.py
new file mode 100644
index 0000000000..511d0dde66
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/__init__.py
@@ -0,0 +1,22 @@
+"""Python inspection/code generation API."""
+from .code import Code
+from .code import ExceptionInfo
+from .code import filter_traceback
+from .code import Frame
+from .code import getfslineno
+from .code import Traceback
+from .code import TracebackEntry
+from .source import getrawcode
+from .source import Source
+
+__all__ = [
+ "Code",
+ "ExceptionInfo",
+ "filter_traceback",
+ "Frame",
+ "getfslineno",
+ "getrawcode",
+ "Traceback",
+ "TracebackEntry",
+ "Source",
+]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/code.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/code.py
new file mode 100644
index 0000000000..5b758a8848
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/code.py
@@ -0,0 +1,1274 @@
+import ast
+import inspect
+import os
+import re
+import sys
+import traceback
+from inspect import CO_VARARGS
+from inspect import CO_VARKEYWORDS
+from io import StringIO
+from pathlib import Path
+from traceback import format_exception_only
+from types import CodeType
+from types import FrameType
+from types import TracebackType
+from typing import Any
+from typing import Callable
+from typing import ClassVar
+from typing import Dict
+from typing import Generic
+from typing import Iterable
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import overload
+from typing import Pattern
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+from weakref import ref
+
+import attr
+import pluggy
+
+import _pytest
+from _pytest._code.source import findsource
+from _pytest._code.source import getrawcode
+from _pytest._code.source import getstatementrange_ast
+from _pytest._code.source import Source
+from _pytest._io import TerminalWriter
+from _pytest._io.saferepr import safeformat
+from _pytest._io.saferepr import saferepr
+from _pytest.compat import final
+from _pytest.compat import get_real_func
+from _pytest.deprecated import check_ispytest
+from _pytest.pathlib import absolutepath
+from _pytest.pathlib import bestrelpath
+
+if TYPE_CHECKING:
+ from typing_extensions import Literal
+ from typing_extensions import SupportsIndex
+ from weakref import ReferenceType
+
+ _TracebackStyle = Literal["long", "short", "line", "no", "native", "value", "auto"]
+
+
+class Code:
+ """Wrapper around Python code objects."""
+
+ __slots__ = ("raw",)
+
+ def __init__(self, obj: CodeType) -> None:
+ self.raw = obj
+
+ @classmethod
+ def from_function(cls, obj: object) -> "Code":
+ return cls(getrawcode(obj))
+
+ def __eq__(self, other):
+ return self.raw == other.raw
+
+ # Ignore type because of https://github.com/python/mypy/issues/4266.
+ __hash__ = None # type: ignore
+
+ @property
+ def firstlineno(self) -> int:
+ return self.raw.co_firstlineno - 1
+
+ @property
+ def name(self) -> str:
+ return self.raw.co_name
+
+ @property
+ def path(self) -> Union[Path, str]:
+ """Return a path object pointing to source code, or an ``str`` in
+ case of ``OSError`` / non-existing file."""
+ if not self.raw.co_filename:
+ return ""
+ try:
+ p = absolutepath(self.raw.co_filename)
+ # maybe don't try this checking
+ if not p.exists():
+ raise OSError("path check failed.")
+ return p
+ except OSError:
+ # XXX maybe try harder like the weird logic
+ # in the standard lib [linecache.updatecache] does?
+ return self.raw.co_filename
+
+ @property
+ def fullsource(self) -> Optional["Source"]:
+ """Return a _pytest._code.Source object for the full source file of the code."""
+ full, _ = findsource(self.raw)
+ return full
+
+ def source(self) -> "Source":
+ """Return a _pytest._code.Source object for the code object's source only."""
+ # return source only for that part of code
+ return Source(self.raw)
+
+ def getargs(self, var: bool = False) -> Tuple[str, ...]:
+ """Return a tuple with the argument names for the code object.
+
+ If 'var' is set True also return the names of the variable and
+ keyword arguments when present.
+ """
+ # Handy shortcut for getting args.
+ raw = self.raw
+ argcount = raw.co_argcount
+ if var:
+ argcount += raw.co_flags & CO_VARARGS
+ argcount += raw.co_flags & CO_VARKEYWORDS
+ return raw.co_varnames[:argcount]
+
+
+class Frame:
+ """Wrapper around a Python frame holding f_locals and f_globals
+ in which expressions can be evaluated."""
+
+ __slots__ = ("raw",)
+
+ def __init__(self, frame: FrameType) -> None:
+ self.raw = frame
+
+ @property
+ def lineno(self) -> int:
+ return self.raw.f_lineno - 1
+
+ @property
+ def f_globals(self) -> Dict[str, Any]:
+ return self.raw.f_globals
+
+ @property
+ def f_locals(self) -> Dict[str, Any]:
+ return self.raw.f_locals
+
+ @property
+ def code(self) -> Code:
+ return Code(self.raw.f_code)
+
+ @property
+ def statement(self) -> "Source":
+ """Statement this frame is at."""
+ if self.code.fullsource is None:
+ return Source("")
+ return self.code.fullsource.getstatement(self.lineno)
+
+ def eval(self, code, **vars):
+ """Evaluate 'code' in the frame.
+
+ 'vars' are optional additional local variables.
+
+ Returns the result of the evaluation.
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ return eval(code, self.f_globals, f_locals)
+
+ def repr(self, object: object) -> str:
+ """Return a 'safe' (non-recursive, one-line) string repr for 'object'."""
+ return saferepr(object)
+
+ def getargs(self, var: bool = False):
+ """Return a list of tuples (name, value) for all arguments.
+
+ If 'var' is set True, also include the variable and keyword arguments
+ when present.
+ """
+ retval = []
+ for arg in self.code.getargs(var):
+ try:
+ retval.append((arg, self.f_locals[arg]))
+ except KeyError:
+ pass # this can occur when using Psyco
+ return retval
+
+
+class TracebackEntry:
+ """A single entry in a Traceback."""
+
+ __slots__ = ("_rawentry", "_excinfo", "_repr_style")
+
+ def __init__(
+ self,
+ rawentry: TracebackType,
+ excinfo: Optional["ReferenceType[ExceptionInfo[BaseException]]"] = None,
+ ) -> None:
+ self._rawentry = rawentry
+ self._excinfo = excinfo
+ self._repr_style: Optional['Literal["short", "long"]'] = None
+
+ @property
+ def lineno(self) -> int:
+ return self._rawentry.tb_lineno - 1
+
+ def set_repr_style(self, mode: "Literal['short', 'long']") -> None:
+ assert mode in ("short", "long")
+ self._repr_style = mode
+
+ @property
+ def frame(self) -> Frame:
+ return Frame(self._rawentry.tb_frame)
+
+ @property
+ def relline(self) -> int:
+ return self.lineno - self.frame.code.firstlineno
+
+ def __repr__(self) -> str:
+ return "<TracebackEntry %s:%d>" % (self.frame.code.path, self.lineno + 1)
+
+ @property
+ def statement(self) -> "Source":
+ """_pytest._code.Source object for the current statement."""
+ source = self.frame.code.fullsource
+ assert source is not None
+ return source.getstatement(self.lineno)
+
+ @property
+ def path(self) -> Union[Path, str]:
+ """Path to the source code."""
+ return self.frame.code.path
+
+ @property
+ def locals(self) -> Dict[str, Any]:
+ """Locals of underlying frame."""
+ return self.frame.f_locals
+
+ def getfirstlinesource(self) -> int:
+ return self.frame.code.firstlineno
+
+ def getsource(
+ self, astcache: Optional[Dict[Union[str, Path], ast.AST]] = None
+ ) -> Optional["Source"]:
+ """Return failing source code."""
+ # we use the passed in astcache to not reparse asttrees
+ # within exception info printing
+ source = self.frame.code.fullsource
+ if source is None:
+ return None
+ key = astnode = None
+ if astcache is not None:
+ key = self.frame.code.path
+ if key is not None:
+ astnode = astcache.get(key, None)
+ start = self.getfirstlinesource()
+ try:
+ astnode, _, end = getstatementrange_ast(
+ self.lineno, source, astnode=astnode
+ )
+ except SyntaxError:
+ end = self.lineno + 1
+ else:
+ if key is not None and astcache is not None:
+ astcache[key] = astnode
+ return source[start:end]
+
+ source = property(getsource)
+
+ def ishidden(self) -> bool:
+ """Return True if the current frame has a var __tracebackhide__
+ resolving to True.
+
+ If __tracebackhide__ is a callable, it gets called with the
+ ExceptionInfo instance and can decide whether to hide the traceback.
+
+ Mostly for internal use.
+ """
+ tbh: Union[
+ bool, Callable[[Optional[ExceptionInfo[BaseException]]], bool]
+ ] = False
+ for maybe_ns_dct in (self.frame.f_locals, self.frame.f_globals):
+ # in normal cases, f_locals and f_globals are dictionaries
+ # however via `exec(...)` / `eval(...)` they can be other types
+ # (even incorrect types!).
+ # as such, we suppress all exceptions while accessing __tracebackhide__
+ try:
+ tbh = maybe_ns_dct["__tracebackhide__"]
+ except Exception:
+ pass
+ else:
+ break
+ if tbh and callable(tbh):
+ return tbh(None if self._excinfo is None else self._excinfo())
+ return tbh
+
+ def __str__(self) -> str:
+ name = self.frame.code.name
+ try:
+ line = str(self.statement).lstrip()
+ except KeyboardInterrupt:
+ raise
+ except BaseException:
+ line = "???"
+ # This output does not quite match Python's repr for traceback entries,
+ # but changing it to do so would break certain plugins. See
+ # https://github.com/pytest-dev/pytest/pull/7535/ for details.
+ return " File %r:%d in %s\n %s\n" % (
+ str(self.path),
+ self.lineno + 1,
+ name,
+ line,
+ )
+
+ @property
+ def name(self) -> str:
+ """co_name of underlying code."""
+ return self.frame.code.raw.co_name
+
+
+class Traceback(List[TracebackEntry]):
+ """Traceback objects encapsulate and offer higher level access to Traceback entries."""
+
+ def __init__(
+ self,
+ tb: Union[TracebackType, Iterable[TracebackEntry]],
+ excinfo: Optional["ReferenceType[ExceptionInfo[BaseException]]"] = None,
+ ) -> None:
+ """Initialize from given python traceback object and ExceptionInfo."""
+ self._excinfo = excinfo
+ if isinstance(tb, TracebackType):
+
+ def f(cur: TracebackType) -> Iterable[TracebackEntry]:
+ cur_: Optional[TracebackType] = cur
+ while cur_ is not None:
+ yield TracebackEntry(cur_, excinfo=excinfo)
+ cur_ = cur_.tb_next
+
+ super().__init__(f(tb))
+ else:
+ super().__init__(tb)
+
+ def cut(
+ self,
+ path: Optional[Union["os.PathLike[str]", str]] = None,
+ lineno: Optional[int] = None,
+ firstlineno: Optional[int] = None,
+ excludepath: Optional["os.PathLike[str]"] = None,
+ ) -> "Traceback":
+ """Return a Traceback instance wrapping part of this Traceback.
+
+ By providing any combination of path, lineno and firstlineno, the
+ first frame to start the to-be-returned traceback is determined.
+
+ This allows cutting the first part of a Traceback instance e.g.
+ for formatting reasons (removing some uninteresting bits that deal
+ with handling of the exception/traceback).
+ """
+ path_ = None if path is None else os.fspath(path)
+ excludepath_ = None if excludepath is None else os.fspath(excludepath)
+ for x in self:
+ code = x.frame.code
+ codepath = code.path
+ if path is not None and str(codepath) != path_:
+ continue
+ if (
+ excludepath is not None
+ and isinstance(codepath, Path)
+ and excludepath_ in (str(p) for p in codepath.parents) # type: ignore[operator]
+ ):
+ continue
+ if lineno is not None and x.lineno != lineno:
+ continue
+ if firstlineno is not None and x.frame.code.firstlineno != firstlineno:
+ continue
+ return Traceback(x._rawentry, self._excinfo)
+ return self
+
+ @overload
+ def __getitem__(self, key: "SupportsIndex") -> TracebackEntry:
+ ...
+
+ @overload
+ def __getitem__(self, key: slice) -> "Traceback":
+ ...
+
+ def __getitem__(
+ self, key: Union["SupportsIndex", slice]
+ ) -> Union[TracebackEntry, "Traceback"]:
+ if isinstance(key, slice):
+ return self.__class__(super().__getitem__(key))
+ else:
+ return super().__getitem__(key)
+
+ def filter(
+ self, fn: Callable[[TracebackEntry], bool] = lambda x: not x.ishidden()
+ ) -> "Traceback":
+ """Return a Traceback instance with certain items removed
+
+ fn is a function that gets a single argument, a TracebackEntry
+ instance, and should return True when the item should be added
+ to the Traceback, False when not.
+
+ By default this removes all the TracebackEntries which are hidden
+ (see ishidden() above).
+ """
+ return Traceback(filter(fn, self), self._excinfo)
+
+ def getcrashentry(self) -> TracebackEntry:
+ """Return last non-hidden traceback entry that lead to the exception of a traceback."""
+ for i in range(-1, -len(self) - 1, -1):
+ entry = self[i]
+ if not entry.ishidden():
+ return entry
+ return self[-1]
+
+ def recursionindex(self) -> Optional[int]:
+ """Return the index of the frame/TracebackEntry where recursion originates if
+ appropriate, None if no recursion occurred."""
+ cache: Dict[Tuple[Any, int, int], List[Dict[str, Any]]] = {}
+ for i, entry in enumerate(self):
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ # XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
+ # print "checking for recursion at", key
+ values = cache.setdefault(key, [])
+ if values:
+ f = entry.frame
+ loc = f.f_locals
+ for otherloc in values:
+ if otherloc == loc:
+ return i
+ values.append(entry.frame.f_locals)
+ return None
+
+
+E = TypeVar("E", bound=BaseException, covariant=True)
+
+
+@final
+@attr.s(repr=False, init=False, auto_attribs=True)
+class ExceptionInfo(Generic[E]):
+ """Wraps sys.exc_info() objects and offers help for navigating the traceback."""
+
+ _assert_start_repr: ClassVar = "AssertionError('assert "
+
+ _excinfo: Optional[Tuple[Type["E"], "E", TracebackType]]
+ _striptext: str
+ _traceback: Optional[Traceback]
+
+ def __init__(
+ self,
+ excinfo: Optional[Tuple[Type["E"], "E", TracebackType]],
+ striptext: str = "",
+ traceback: Optional[Traceback] = None,
+ *,
+ _ispytest: bool = False,
+ ) -> None:
+ check_ispytest(_ispytest)
+ self._excinfo = excinfo
+ self._striptext = striptext
+ self._traceback = traceback
+
+ @classmethod
+ def from_exc_info(
+ cls,
+ exc_info: Tuple[Type[E], E, TracebackType],
+ exprinfo: Optional[str] = None,
+ ) -> "ExceptionInfo[E]":
+ """Return an ExceptionInfo for an existing exc_info tuple.
+
+ .. warning::
+
+ Experimental API
+
+ :param exprinfo:
+ A text string helping to determine if we should strip
+ ``AssertionError`` from the output. Defaults to the exception
+ message/``__str__()``.
+ """
+ _striptext = ""
+ if exprinfo is None and isinstance(exc_info[1], AssertionError):
+ exprinfo = getattr(exc_info[1], "msg", None)
+ if exprinfo is None:
+ exprinfo = saferepr(exc_info[1])
+ if exprinfo and exprinfo.startswith(cls._assert_start_repr):
+ _striptext = "AssertionError: "
+
+ return cls(exc_info, _striptext, _ispytest=True)
+
+ @classmethod
+ def from_current(
+ cls, exprinfo: Optional[str] = None
+ ) -> "ExceptionInfo[BaseException]":
+ """Return an ExceptionInfo matching the current traceback.
+
+ .. warning::
+
+ Experimental API
+
+ :param exprinfo:
+ A text string helping to determine if we should strip
+ ``AssertionError`` from the output. Defaults to the exception
+ message/``__str__()``.
+ """
+ tup = sys.exc_info()
+ assert tup[0] is not None, "no current exception"
+ assert tup[1] is not None, "no current exception"
+ assert tup[2] is not None, "no current exception"
+ exc_info = (tup[0], tup[1], tup[2])
+ return ExceptionInfo.from_exc_info(exc_info, exprinfo)
+
+ @classmethod
+ def for_later(cls) -> "ExceptionInfo[E]":
+ """Return an unfilled ExceptionInfo."""
+ return cls(None, _ispytest=True)
+
+ def fill_unfilled(self, exc_info: Tuple[Type[E], E, TracebackType]) -> None:
+ """Fill an unfilled ExceptionInfo created with ``for_later()``."""
+ assert self._excinfo is None, "ExceptionInfo was already filled"
+ self._excinfo = exc_info
+
+ @property
+ def type(self) -> Type[E]:
+ """The exception class."""
+ assert (
+ self._excinfo is not None
+ ), ".type can only be used after the context manager exits"
+ return self._excinfo[0]
+
+ @property
+ def value(self) -> E:
+ """The exception value."""
+ assert (
+ self._excinfo is not None
+ ), ".value can only be used after the context manager exits"
+ return self._excinfo[1]
+
+ @property
+ def tb(self) -> TracebackType:
+ """The exception raw traceback."""
+ assert (
+ self._excinfo is not None
+ ), ".tb can only be used after the context manager exits"
+ return self._excinfo[2]
+
+ @property
+ def typename(self) -> str:
+ """The type name of the exception."""
+ assert (
+ self._excinfo is not None
+ ), ".typename can only be used after the context manager exits"
+ return self.type.__name__
+
+ @property
+ def traceback(self) -> Traceback:
+ """The traceback."""
+ if self._traceback is None:
+ self._traceback = Traceback(self.tb, excinfo=ref(self))
+ return self._traceback
+
+ @traceback.setter
+ def traceback(self, value: Traceback) -> None:
+ self._traceback = value
+
+ def __repr__(self) -> str:
+ if self._excinfo is None:
+ return "<ExceptionInfo for raises contextmanager>"
+ return "<{} {} tblen={}>".format(
+ self.__class__.__name__, saferepr(self._excinfo[1]), len(self.traceback)
+ )
+
+ def exconly(self, tryshort: bool = False) -> str:
+ """Return the exception as a string.
+
+ When 'tryshort' resolves to True, and the exception is an
+ AssertionError, only the actual exception part of the exception
+ representation is returned (so 'AssertionError: ' is removed from
+ the beginning).
+ """
+ lines = format_exception_only(self.type, self.value)
+ text = "".join(lines)
+ text = text.rstrip()
+ if tryshort:
+ if text.startswith(self._striptext):
+ text = text[len(self._striptext) :]
+ return text
+
+ def errisinstance(
+ self, exc: Union[Type[BaseException], Tuple[Type[BaseException], ...]]
+ ) -> bool:
+ """Return True if the exception is an instance of exc.
+
+ Consider using ``isinstance(excinfo.value, exc)`` instead.
+ """
+ return isinstance(self.value, exc)
+
+ def _getreprcrash(self) -> "ReprFileLocation":
+ exconly = self.exconly(tryshort=True)
+ entry = self.traceback.getcrashentry()
+ path, lineno = entry.frame.code.raw.co_filename, entry.lineno
+ return ReprFileLocation(path, lineno + 1, exconly)
+
+ def getrepr(
+ self,
+ showlocals: bool = False,
+ style: "_TracebackStyle" = "long",
+ abspath: bool = False,
+ tbfilter: bool = True,
+ funcargs: bool = False,
+ truncate_locals: bool = True,
+ chain: bool = True,
+ ) -> Union["ReprExceptionInfo", "ExceptionChainRepr"]:
+ """Return str()able representation of this exception info.
+
+ :param bool showlocals:
+ Show locals per traceback entry.
+ Ignored if ``style=="native"``.
+
+ :param str style:
+ long|short|no|native|value traceback style.
+
+ :param bool abspath:
+ If paths should be changed to absolute or left unchanged.
+
+ :param bool tbfilter:
+ Hide entries that contain a local variable ``__tracebackhide__==True``.
+ Ignored if ``style=="native"``.
+
+ :param bool funcargs:
+ Show fixtures ("funcargs" for legacy purposes) per traceback entry.
+
+ :param bool truncate_locals:
+ With ``showlocals==True``, make sure locals can be safely represented as strings.
+
+ :param bool chain:
+ If chained exceptions in Python 3 should be shown.
+
+ .. versionchanged:: 3.9
+
+ Added the ``chain`` parameter.
+ """
+ if style == "native":
+ return ReprExceptionInfo(
+ ReprTracebackNative(
+ traceback.format_exception(
+ self.type, self.value, self.traceback[0]._rawentry
+ )
+ ),
+ self._getreprcrash(),
+ )
+
+ fmt = FormattedExcinfo(
+ showlocals=showlocals,
+ style=style,
+ abspath=abspath,
+ tbfilter=tbfilter,
+ funcargs=funcargs,
+ truncate_locals=truncate_locals,
+ chain=chain,
+ )
+ return fmt.repr_excinfo(self)
+
+ def match(self, regexp: Union[str, Pattern[str]]) -> "Literal[True]":
+ """Check whether the regular expression `regexp` matches the string
+ representation of the exception using :func:`python:re.search`.
+
+ If it matches `True` is returned, otherwise an `AssertionError` is raised.
+ """
+ __tracebackhide__ = True
+ msg = "Regex pattern {!r} does not match {!r}."
+ if regexp == str(self.value):
+ msg += " Did you mean to `re.escape()` the regex?"
+ assert re.search(regexp, str(self.value)), msg.format(regexp, str(self.value))
+ # Return True to allow for "assert excinfo.match()".
+ return True
+
+
+@attr.s(auto_attribs=True)
+class FormattedExcinfo:
+ """Presenting information about failing Functions and Generators."""
+
+ # for traceback entries
+ flow_marker: ClassVar = ">"
+ fail_marker: ClassVar = "E"
+
+ showlocals: bool = False
+ style: "_TracebackStyle" = "long"
+ abspath: bool = True
+ tbfilter: bool = True
+ funcargs: bool = False
+ truncate_locals: bool = True
+ chain: bool = True
+ astcache: Dict[Union[str, Path], ast.AST] = attr.ib(
+ factory=dict, init=False, repr=False
+ )
+
+ def _getindent(self, source: "Source") -> int:
+ # Figure out indent for the given source.
+ try:
+ s = str(source.getstatement(len(source) - 1))
+ except KeyboardInterrupt:
+ raise
+ except BaseException:
+ try:
+ s = str(source[-1])
+ except KeyboardInterrupt:
+ raise
+ except BaseException:
+ return 0
+ return 4 + (len(s) - len(s.lstrip()))
+
+ def _getentrysource(self, entry: TracebackEntry) -> Optional["Source"]:
+ source = entry.getsource(self.astcache)
+ if source is not None:
+ source = source.deindent()
+ return source
+
+ def repr_args(self, entry: TracebackEntry) -> Optional["ReprFuncArgs"]:
+ if self.funcargs:
+ args = []
+ for argname, argvalue in entry.frame.getargs(var=True):
+ args.append((argname, saferepr(argvalue)))
+ return ReprFuncArgs(args)
+ return None
+
+ def get_source(
+ self,
+ source: Optional["Source"],
+ line_index: int = -1,
+ excinfo: Optional[ExceptionInfo[BaseException]] = None,
+ short: bool = False,
+ ) -> List[str]:
+ """Return formatted and marked up source lines."""
+ lines = []
+ if source is None or line_index >= len(source.lines):
+ source = Source("???")
+ line_index = 0
+ if line_index < 0:
+ line_index += len(source)
+ space_prefix = " "
+ if short:
+ lines.append(space_prefix + source.lines[line_index].strip())
+ else:
+ for line in source.lines[:line_index]:
+ lines.append(space_prefix + line)
+ lines.append(self.flow_marker + " " + source.lines[line_index])
+ for line in source.lines[line_index + 1 :]:
+ lines.append(space_prefix + line)
+ if excinfo is not None:
+ indent = 4 if short else self._getindent(source)
+ lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
+ return lines
+
+ def get_exconly(
+ self,
+ excinfo: ExceptionInfo[BaseException],
+ indent: int = 4,
+ markall: bool = False,
+ ) -> List[str]:
+ lines = []
+ indentstr = " " * indent
+ # Get the real exception information out.
+ exlines = excinfo.exconly(tryshort=True).split("\n")
+ failindent = self.fail_marker + indentstr[1:]
+ for line in exlines:
+ lines.append(failindent + line)
+ if not markall:
+ failindent = indentstr
+ return lines
+
+ def repr_locals(self, locals: Mapping[str, object]) -> Optional["ReprLocals"]:
+ if self.showlocals:
+ lines = []
+ keys = [loc for loc in locals if loc[0] != "@"]
+ keys.sort()
+ for name in keys:
+ value = locals[name]
+ if name == "__builtins__":
+ lines.append("__builtins__ = <builtins>")
+ else:
+ # This formatting could all be handled by the
+ # _repr() function, which is only reprlib.Repr in
+ # disguise, so is very configurable.
+ if self.truncate_locals:
+ str_repr = saferepr(value)
+ else:
+ str_repr = safeformat(value)
+ # if len(str_repr) < 70 or not isinstance(value, (list, tuple, dict)):
+ lines.append(f"{name:<10} = {str_repr}")
+ # else:
+ # self._line("%-10s =\\" % (name,))
+ # # XXX
+ # pprint.pprint(value, stream=self.excinfowriter)
+ return ReprLocals(lines)
+ return None
+
+ def repr_traceback_entry(
+ self,
+ entry: TracebackEntry,
+ excinfo: Optional[ExceptionInfo[BaseException]] = None,
+ ) -> "ReprEntry":
+ lines: List[str] = []
+ style = entry._repr_style if entry._repr_style is not None else self.style
+ if style in ("short", "long"):
+ source = self._getentrysource(entry)
+ if source is None:
+ source = Source("???")
+ line_index = 0
+ else:
+ line_index = entry.lineno - entry.getfirstlinesource()
+ short = style == "short"
+ reprargs = self.repr_args(entry) if not short else None
+ s = self.get_source(source, line_index, excinfo, short=short)
+ lines.extend(s)
+ if short:
+ message = "in %s" % (entry.name)
+ else:
+ message = excinfo and excinfo.typename or ""
+ entry_path = entry.path
+ path = self._makepath(entry_path)
+ reprfileloc = ReprFileLocation(path, entry.lineno + 1, message)
+ localsrepr = self.repr_locals(entry.locals)
+ return ReprEntry(lines, reprargs, localsrepr, reprfileloc, style)
+ elif style == "value":
+ if excinfo:
+ lines.extend(str(excinfo.value).split("\n"))
+ return ReprEntry(lines, None, None, None, style)
+ else:
+ if excinfo:
+ lines.extend(self.get_exconly(excinfo, indent=4))
+ return ReprEntry(lines, None, None, None, style)
+
+ def _makepath(self, path: Union[Path, str]) -> str:
+ if not self.abspath and isinstance(path, Path):
+ try:
+ np = bestrelpath(Path.cwd(), path)
+ except OSError:
+ return str(path)
+ if len(np) < len(str(path)):
+ return np
+ return str(path)
+
+ def repr_traceback(self, excinfo: ExceptionInfo[BaseException]) -> "ReprTraceback":
+ traceback = excinfo.traceback
+ if self.tbfilter:
+ traceback = traceback.filter()
+
+ if isinstance(excinfo.value, RecursionError):
+ traceback, extraline = self._truncate_recursive_traceback(traceback)
+ else:
+ extraline = None
+
+ last = traceback[-1]
+ entries = []
+ if self.style == "value":
+ reprentry = self.repr_traceback_entry(last, excinfo)
+ entries.append(reprentry)
+ return ReprTraceback(entries, None, style=self.style)
+
+ for index, entry in enumerate(traceback):
+ einfo = (last == entry) and excinfo or None
+ reprentry = self.repr_traceback_entry(entry, einfo)
+ entries.append(reprentry)
+ return ReprTraceback(entries, extraline, style=self.style)
+
+ def _truncate_recursive_traceback(
+ self, traceback: Traceback
+ ) -> Tuple[Traceback, Optional[str]]:
+ """Truncate the given recursive traceback trying to find the starting
+ point of the recursion.
+
+ The detection is done by going through each traceback entry and
+ finding the point in which the locals of the frame are equal to the
+ locals of a previous frame (see ``recursionindex()``).
+
+ Handle the situation where the recursion process might raise an
+ exception (for example comparing numpy arrays using equality raises a
+ TypeError), in which case we do our best to warn the user of the
+ error and show a limited traceback.
+ """
+ try:
+ recursionindex = traceback.recursionindex()
+ except Exception as e:
+ max_frames = 10
+ extraline: Optional[str] = (
+ "!!! Recursion error detected, but an error occurred locating the origin of recursion.\n"
+ " The following exception happened when comparing locals in the stack frame:\n"
+ " {exc_type}: {exc_msg}\n"
+ " Displaying first and last {max_frames} stack frames out of {total}."
+ ).format(
+ exc_type=type(e).__name__,
+ exc_msg=str(e),
+ max_frames=max_frames,
+ total=len(traceback),
+ )
+ # Type ignored because adding two instances of a List subtype
+ # currently incorrectly has type List instead of the subtype.
+ traceback = traceback[:max_frames] + traceback[-max_frames:] # type: ignore
+ else:
+ if recursionindex is not None:
+ extraline = "!!! Recursion detected (same locals & position)"
+ traceback = traceback[: recursionindex + 1]
+ else:
+ extraline = None
+
+ return traceback, extraline
+
+ def repr_excinfo(
+ self, excinfo: ExceptionInfo[BaseException]
+ ) -> "ExceptionChainRepr":
+ repr_chain: List[
+ Tuple[ReprTraceback, Optional[ReprFileLocation], Optional[str]]
+ ] = []
+ e: Optional[BaseException] = excinfo.value
+ excinfo_: Optional[ExceptionInfo[BaseException]] = excinfo
+ descr = None
+ seen: Set[int] = set()
+ while e is not None and id(e) not in seen:
+ seen.add(id(e))
+ if excinfo_:
+ reprtraceback = self.repr_traceback(excinfo_)
+ reprcrash: Optional[ReprFileLocation] = (
+ excinfo_._getreprcrash() if self.style != "value" else None
+ )
+ else:
+ # Fallback to native repr if the exception doesn't have a traceback:
+ # ExceptionInfo objects require a full traceback to work.
+ reprtraceback = ReprTracebackNative(
+ traceback.format_exception(type(e), e, None)
+ )
+ reprcrash = None
+
+ repr_chain += [(reprtraceback, reprcrash, descr)]
+ if e.__cause__ is not None and self.chain:
+ e = e.__cause__
+ excinfo_ = (
+ ExceptionInfo.from_exc_info((type(e), e, e.__traceback__))
+ if e.__traceback__
+ else None
+ )
+ descr = "The above exception was the direct cause of the following exception:"
+ elif (
+ e.__context__ is not None and not e.__suppress_context__ and self.chain
+ ):
+ e = e.__context__
+ excinfo_ = (
+ ExceptionInfo.from_exc_info((type(e), e, e.__traceback__))
+ if e.__traceback__
+ else None
+ )
+ descr = "During handling of the above exception, another exception occurred:"
+ else:
+ e = None
+ repr_chain.reverse()
+ return ExceptionChainRepr(repr_chain)
+
+
+@attr.s(eq=False, auto_attribs=True)
+class TerminalRepr:
+ def __str__(self) -> str:
+ # FYI this is called from pytest-xdist's serialization of exception
+ # information.
+ io = StringIO()
+ tw = TerminalWriter(file=io)
+ self.toterminal(tw)
+ return io.getvalue().strip()
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__} instance at {id(self):0x}>"
+
+ def toterminal(self, tw: TerminalWriter) -> None:
+ raise NotImplementedError()
+
+
+# This class is abstract -- only subclasses are instantiated.
+@attr.s(eq=False)
+class ExceptionRepr(TerminalRepr):
+ # Provided by subclasses.
+ reprcrash: Optional["ReprFileLocation"]
+ reprtraceback: "ReprTraceback"
+
+ def __attrs_post_init__(self) -> None:
+ self.sections: List[Tuple[str, str, str]] = []
+
+ def addsection(self, name: str, content: str, sep: str = "-") -> None:
+ self.sections.append((name, content, sep))
+
+ def toterminal(self, tw: TerminalWriter) -> None:
+ for name, content, sep in self.sections:
+ tw.sep(sep, name)
+ tw.line(content)
+
+
+@attr.s(eq=False, auto_attribs=True)
+class ExceptionChainRepr(ExceptionRepr):
+ chain: Sequence[Tuple["ReprTraceback", Optional["ReprFileLocation"], Optional[str]]]
+
+ def __attrs_post_init__(self) -> None:
+ super().__attrs_post_init__()
+ # reprcrash and reprtraceback of the outermost (the newest) exception
+ # in the chain.
+ self.reprtraceback = self.chain[-1][0]
+ self.reprcrash = self.chain[-1][1]
+
+ def toterminal(self, tw: TerminalWriter) -> None:
+ for element in self.chain:
+ element[0].toterminal(tw)
+ if element[2] is not None:
+ tw.line("")
+ tw.line(element[2], yellow=True)
+ super().toterminal(tw)
+
+
+@attr.s(eq=False, auto_attribs=True)
+class ReprExceptionInfo(ExceptionRepr):
+ reprtraceback: "ReprTraceback"
+ reprcrash: "ReprFileLocation"
+
+ def toterminal(self, tw: TerminalWriter) -> None:
+ self.reprtraceback.toterminal(tw)
+ super().toterminal(tw)
+
+
+@attr.s(eq=False, auto_attribs=True)
+class ReprTraceback(TerminalRepr):
+ reprentries: Sequence[Union["ReprEntry", "ReprEntryNative"]]
+ extraline: Optional[str]
+ style: "_TracebackStyle"
+
+ entrysep: ClassVar = "_ "
+
+ def toterminal(self, tw: TerminalWriter) -> None:
+ # The entries might have different styles.
+ for i, entry in enumerate(self.reprentries):
+ if entry.style == "long":
+ tw.line("")
+ entry.toterminal(tw)
+ if i < len(self.reprentries) - 1:
+ next_entry = self.reprentries[i + 1]
+ if (
+ entry.style == "long"
+ or entry.style == "short"
+ and next_entry.style == "long"
+ ):
+ tw.sep(self.entrysep)
+
+ if self.extraline:
+ tw.line(self.extraline)
+
+
+class ReprTracebackNative(ReprTraceback):
+ def __init__(self, tblines: Sequence[str]) -> None:
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+
+@attr.s(eq=False, auto_attribs=True)
+class ReprEntryNative(TerminalRepr):
+ lines: Sequence[str]
+
+ style: ClassVar["_TracebackStyle"] = "native"
+
+ def toterminal(self, tw: TerminalWriter) -> None:
+ tw.write("".join(self.lines))
+
+
+@attr.s(eq=False, auto_attribs=True)
+class ReprEntry(TerminalRepr):
+ lines: Sequence[str]
+ reprfuncargs: Optional["ReprFuncArgs"]
+ reprlocals: Optional["ReprLocals"]
+ reprfileloc: Optional["ReprFileLocation"]
+ style: "_TracebackStyle"
+
+ def _write_entry_lines(self, tw: TerminalWriter) -> None:
+ """Write the source code portions of a list of traceback entries with syntax highlighting.
+
+ Usually entries are lines like these:
+
+ " x = 1"
+ "> assert x == 2"
+ "E assert 1 == 2"
+
+ This function takes care of rendering the "source" portions of it (the lines without
+ the "E" prefix) using syntax highlighting, taking care to not highlighting the ">"
+ character, as doing so might break line continuations.
+ """
+
+ if not self.lines:
+ return
+
+ # separate indents and source lines that are not failures: we want to
+ # highlight the code but not the indentation, which may contain markers
+ # such as "> assert 0"
+ fail_marker = f"{FormattedExcinfo.fail_marker} "
+ indent_size = len(fail_marker)
+ indents: List[str] = []
+ source_lines: List[str] = []
+ failure_lines: List[str] = []
+ for index, line in enumerate(self.lines):
+ is_failure_line = line.startswith(fail_marker)
+ if is_failure_line:
+ # from this point on all lines are considered part of the failure
+ failure_lines.extend(self.lines[index:])
+ break
+ else:
+ if self.style == "value":
+ source_lines.append(line)
+ else:
+ indents.append(line[:indent_size])
+ source_lines.append(line[indent_size:])
+
+ tw._write_source(source_lines, indents)
+
+ # failure lines are always completely red and bold
+ for line in failure_lines:
+ tw.line(line, bold=True, red=True)
+
+ def toterminal(self, tw: TerminalWriter) -> None:
+ if self.style == "short":
+ assert self.reprfileloc is not None
+ self.reprfileloc.toterminal(tw)
+ self._write_entry_lines(tw)
+ if self.reprlocals:
+ self.reprlocals.toterminal(tw, indent=" " * 8)
+ return
+
+ if self.reprfuncargs:
+ self.reprfuncargs.toterminal(tw)
+
+ self._write_entry_lines(tw)
+
+ if self.reprlocals:
+ tw.line("")
+ self.reprlocals.toterminal(tw)
+ if self.reprfileloc:
+ if self.lines:
+ tw.line("")
+ self.reprfileloc.toterminal(tw)
+
+ def __str__(self) -> str:
+ return "{}\n{}\n{}".format(
+ "\n".join(self.lines), self.reprlocals, self.reprfileloc
+ )
+
+
+@attr.s(eq=False, auto_attribs=True)
+class ReprFileLocation(TerminalRepr):
+ path: str = attr.ib(converter=str)
+ lineno: int
+ message: str
+
+ def toterminal(self, tw: TerminalWriter) -> None:
+ # Filename and lineno output for each entry, using an output format
+ # that most editors understand.
+ msg = self.message
+ i = msg.find("\n")
+ if i != -1:
+ msg = msg[:i]
+ tw.write(self.path, bold=True, red=True)
+ tw.line(f":{self.lineno}: {msg}")
+
+
+@attr.s(eq=False, auto_attribs=True)
+class ReprLocals(TerminalRepr):
+ lines: Sequence[str]
+
+ def toterminal(self, tw: TerminalWriter, indent="") -> None:
+ for line in self.lines:
+ tw.line(indent + line)
+
+
+@attr.s(eq=False, auto_attribs=True)
+class ReprFuncArgs(TerminalRepr):
+ args: Sequence[Tuple[str, object]]
+
+ def toterminal(self, tw: TerminalWriter) -> None:
+ if self.args:
+ linesofar = ""
+ for name, value in self.args:
+ ns = f"{name} = {value}"
+ if len(ns) + len(linesofar) + 2 > tw.fullwidth:
+ if linesofar:
+ tw.line(linesofar)
+ linesofar = ns
+ else:
+ if linesofar:
+ linesofar += ", " + ns
+ else:
+ linesofar = ns
+ if linesofar:
+ tw.line(linesofar)
+ tw.line("")
+
+
+def getfslineno(obj: object) -> Tuple[Union[str, Path], int]:
+ """Return source location (path, lineno) for the given object.
+
+ If the source cannot be determined return ("", -1).
+
+ The line number is 0-based.
+ """
+ # xxx let decorators etc specify a sane ordering
+ # NOTE: this used to be done in _pytest.compat.getfslineno, initially added
+ # in 6ec13a2b9. It ("place_as") appears to be something very custom.
+ obj = get_real_func(obj)
+ if hasattr(obj, "place_as"):
+ obj = obj.place_as # type: ignore[attr-defined]
+
+ try:
+ code = Code.from_function(obj)
+ except TypeError:
+ try:
+ fn = inspect.getsourcefile(obj) or inspect.getfile(obj) # type: ignore[arg-type]
+ except TypeError:
+ return "", -1
+
+ fspath = fn and absolutepath(fn) or ""
+ lineno = -1
+ if fspath:
+ try:
+ _, lineno = findsource(obj)
+ except OSError:
+ pass
+ return fspath, lineno
+
+ return code.path, code.firstlineno
+
+
+# Relative paths that we use to filter traceback entries from appearing to the user;
+# see filter_traceback.
+# note: if we need to add more paths than what we have now we should probably use a list
+# for better maintenance.
+
+_PLUGGY_DIR = Path(pluggy.__file__.rstrip("oc"))
+# pluggy is either a package or a single module depending on the version
+if _PLUGGY_DIR.name == "__init__.py":
+ _PLUGGY_DIR = _PLUGGY_DIR.parent
+_PYTEST_DIR = Path(_pytest.__file__).parent
+
+
+def filter_traceback(entry: TracebackEntry) -> bool:
+ """Return True if a TracebackEntry instance should be included in tracebacks.
+
+ We hide traceback entries of:
+
+ * dynamically generated code (no code to show up for it);
+ * internal traceback from pytest or its internal libraries, py and pluggy.
+ """
+ # entry.path might sometimes return a str object when the entry
+ # points to dynamically generated code.
+ # See https://bitbucket.org/pytest-dev/py/issues/71.
+ raw_filename = entry.frame.code.raw.co_filename
+ is_generated = "<" in raw_filename and ">" in raw_filename
+ if is_generated:
+ return False
+
+ # entry.path might point to a non-existing file, in which case it will
+ # also return a str object. See #1133.
+ p = Path(entry.path)
+
+ parents = p.parents
+ if _PLUGGY_DIR in parents:
+ return False
+ if _PYTEST_DIR in parents:
+ return False
+
+ return True
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/source.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/source.py
new file mode 100644
index 0000000000..208cfb8003
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_code/source.py
@@ -0,0 +1,217 @@
+import ast
+import inspect
+import textwrap
+import tokenize
+import types
+import warnings
+from bisect import bisect_right
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import overload
+from typing import Tuple
+from typing import Union
+
+
+class Source:
+ """An immutable object holding a source code fragment.
+
+ When using Source(...), the source lines are deindented.
+ """
+
+ def __init__(self, obj: object = None) -> None:
+ if not obj:
+ self.lines: List[str] = []
+ elif isinstance(obj, Source):
+ self.lines = obj.lines
+ elif isinstance(obj, (tuple, list)):
+ self.lines = deindent(x.rstrip("\n") for x in obj)
+ elif isinstance(obj, str):
+ self.lines = deindent(obj.split("\n"))
+ else:
+ try:
+ rawcode = getrawcode(obj)
+ src = inspect.getsource(rawcode)
+ except TypeError:
+ src = inspect.getsource(obj) # type: ignore[arg-type]
+ self.lines = deindent(src.split("\n"))
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Source):
+ return NotImplemented
+ return self.lines == other.lines
+
+ # Ignore type because of https://github.com/python/mypy/issues/4266.
+ __hash__ = None # type: ignore
+
+ @overload
+ def __getitem__(self, key: int) -> str:
+ ...
+
+ @overload
+ def __getitem__(self, key: slice) -> "Source":
+ ...
+
+ def __getitem__(self, key: Union[int, slice]) -> Union[str, "Source"]:
+ if isinstance(key, int):
+ return self.lines[key]
+ else:
+ if key.step not in (None, 1):
+ raise IndexError("cannot slice a Source with a step")
+ newsource = Source()
+ newsource.lines = self.lines[key.start : key.stop]
+ return newsource
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self.lines)
+
+ def __len__(self) -> int:
+ return len(self.lines)
+
+ def strip(self) -> "Source":
+ """Return new Source object with trailing and leading blank lines removed."""
+ start, end = 0, len(self)
+ while start < end and not self.lines[start].strip():
+ start += 1
+ while end > start and not self.lines[end - 1].strip():
+ end -= 1
+ source = Source()
+ source.lines[:] = self.lines[start:end]
+ return source
+
+ def indent(self, indent: str = " " * 4) -> "Source":
+ """Return a copy of the source object with all lines indented by the
+ given indent-string."""
+ newsource = Source()
+ newsource.lines = [(indent + line) for line in self.lines]
+ return newsource
+
+ def getstatement(self, lineno: int) -> "Source":
+ """Return Source statement which contains the given linenumber
+ (counted from 0)."""
+ start, end = self.getstatementrange(lineno)
+ return self[start:end]
+
+ def getstatementrange(self, lineno: int) -> Tuple[int, int]:
+ """Return (start, end) tuple which spans the minimal statement region
+ which containing the given lineno."""
+ if not (0 <= lineno < len(self)):
+ raise IndexError("lineno out of range")
+ ast, start, end = getstatementrange_ast(lineno, self)
+ return start, end
+
+ def deindent(self) -> "Source":
+ """Return a new Source object deindented."""
+ newsource = Source()
+ newsource.lines[:] = deindent(self.lines)
+ return newsource
+
+ def __str__(self) -> str:
+ return "\n".join(self.lines)
+
+
+#
+# helper functions
+#
+
+
+def findsource(obj) -> Tuple[Optional[Source], int]:
+ try:
+ sourcelines, lineno = inspect.findsource(obj)
+ except Exception:
+ return None, -1
+ source = Source()
+ source.lines = [line.rstrip() for line in sourcelines]
+ return source, lineno
+
+
+def getrawcode(obj: object, trycall: bool = True) -> types.CodeType:
+ """Return code object for given function."""
+ try:
+ return obj.__code__ # type: ignore[attr-defined,no-any-return]
+ except AttributeError:
+ pass
+ if trycall:
+ call = getattr(obj, "__call__", None)
+ if call and not isinstance(obj, type):
+ return getrawcode(call, trycall=False)
+ raise TypeError(f"could not get code object for {obj!r}")
+
+
+def deindent(lines: Iterable[str]) -> List[str]:
+ return textwrap.dedent("\n".join(lines)).splitlines()
+
+
+def get_statement_startend2(lineno: int, node: ast.AST) -> Tuple[int, Optional[int]]:
+ # Flatten all statements and except handlers into one lineno-list.
+ # AST's line numbers start indexing at 1.
+ values: List[int] = []
+ for x in ast.walk(node):
+ if isinstance(x, (ast.stmt, ast.ExceptHandler)):
+ # Before Python 3.8, the lineno of a decorated class or function pointed at the decorator.
+ # Since Python 3.8, the lineno points to the class/def, so need to include the decorators.
+ if isinstance(x, (ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef)):
+ for d in x.decorator_list:
+ values.append(d.lineno - 1)
+ values.append(x.lineno - 1)
+ for name in ("finalbody", "orelse"):
+ val: Optional[List[ast.stmt]] = getattr(x, name, None)
+ if val:
+ # Treat the finally/orelse part as its own statement.
+ values.append(val[0].lineno - 1 - 1)
+ values.sort()
+ insert_index = bisect_right(values, lineno)
+ start = values[insert_index - 1]
+ if insert_index >= len(values):
+ end = None
+ else:
+ end = values[insert_index]
+ return start, end
+
+
+def getstatementrange_ast(
+ lineno: int,
+ source: Source,
+ assertion: bool = False,
+ astnode: Optional[ast.AST] = None,
+) -> Tuple[ast.AST, int, int]:
+ if astnode is None:
+ content = str(source)
+ # See #4260:
+ # Don't produce duplicate warnings when compiling source to find AST.
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ astnode = ast.parse(content, "source", "exec")
+
+ start, end = get_statement_startend2(lineno, astnode)
+ # We need to correct the end:
+ # - ast-parsing strips comments
+ # - there might be empty lines
+ # - we might have lesser indented code blocks at the end
+ if end is None:
+ end = len(source.lines)
+
+ if end > start + 1:
+ # Make sure we don't span differently indented code blocks
+ # by using the BlockFinder helper used which inspect.getsource() uses itself.
+ block_finder = inspect.BlockFinder()
+ # If we start with an indented line, put blockfinder to "started" mode.
+ block_finder.started = source.lines[start][0].isspace()
+ it = ((x + "\n") for x in source.lines[start:end])
+ try:
+ for tok in tokenize.generate_tokens(lambda: next(it)):
+ block_finder.tokeneater(*tok)
+ except (inspect.EndOfBlock, IndentationError):
+ end = block_finder.last + start
+ except Exception:
+ pass
+
+ # The end might still point to a comment or empty line, correct it.
+ while end:
+ line = source.lines[end - 1].lstrip()
+ if line.startswith("#") or not line:
+ end -= 1
+ else:
+ break
+ return astnode, start, end
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/__init__.py
new file mode 100644
index 0000000000..db001e918c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/__init__.py
@@ -0,0 +1,8 @@
+from .terminalwriter import get_terminal_width
+from .terminalwriter import TerminalWriter
+
+
+__all__ = [
+ "TerminalWriter",
+ "get_terminal_width",
+]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/saferepr.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/saferepr.py
new file mode 100644
index 0000000000..e7ff5cab20
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/saferepr.py
@@ -0,0 +1,153 @@
+import pprint
+import reprlib
+from typing import Any
+from typing import Dict
+from typing import IO
+from typing import Optional
+
+
+def _try_repr_or_str(obj: object) -> str:
+ try:
+ return repr(obj)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except BaseException:
+ return f'{type(obj).__name__}("{obj}")'
+
+
+def _format_repr_exception(exc: BaseException, obj: object) -> str:
+ try:
+ exc_info = _try_repr_or_str(exc)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except BaseException as exc:
+ exc_info = f"unpresentable exception ({_try_repr_or_str(exc)})"
+ return "<[{} raised in repr()] {} object at 0x{:x}>".format(
+ exc_info, type(obj).__name__, id(obj)
+ )
+
+
+def _ellipsize(s: str, maxsize: int) -> str:
+ if len(s) > maxsize:
+ i = max(0, (maxsize - 3) // 2)
+ j = max(0, maxsize - 3 - i)
+ return s[:i] + "..." + s[len(s) - j :]
+ return s
+
+
+class SafeRepr(reprlib.Repr):
+ """
+ repr.Repr that limits the resulting size of repr() and includes
+ information on exceptions raised during the call.
+ """
+
+ def __init__(self, maxsize: Optional[int]) -> None:
+ """
+ :param maxsize:
+ If not None, will truncate the resulting repr to that specific size, using ellipsis
+ somewhere in the middle to hide the extra text.
+ If None, will not impose any size limits on the returning repr.
+ """
+ super().__init__()
+ # ``maxstring`` is used by the superclass, and needs to be an int; using a
+ # very large number in case maxsize is None, meaning we want to disable
+ # truncation.
+ self.maxstring = maxsize if maxsize is not None else 1_000_000_000
+ self.maxsize = maxsize
+
+ def repr(self, x: object) -> str:
+ try:
+ s = super().repr(x)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except BaseException as exc:
+ s = _format_repr_exception(exc, x)
+ if self.maxsize is not None:
+ s = _ellipsize(s, self.maxsize)
+ return s
+
+ def repr_instance(self, x: object, level: int) -> str:
+ try:
+ s = repr(x)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except BaseException as exc:
+ s = _format_repr_exception(exc, x)
+ if self.maxsize is not None:
+ s = _ellipsize(s, self.maxsize)
+ return s
+
+
+def safeformat(obj: object) -> str:
+ """Return a pretty printed string for the given object.
+
+ Failing __repr__ functions of user instances will be represented
+ with a short exception info.
+ """
+ try:
+ return pprint.pformat(obj)
+ except Exception as exc:
+ return _format_repr_exception(exc, obj)
+
+
+# Maximum size of overall repr of objects to display during assertion errors.
+DEFAULT_REPR_MAX_SIZE = 240
+
+
+def saferepr(obj: object, maxsize: Optional[int] = DEFAULT_REPR_MAX_SIZE) -> str:
+ """Return a size-limited safe repr-string for the given object.
+
+ Failing __repr__ functions of user instances will be represented
+ with a short exception info and 'saferepr' generally takes
+ care to never raise exceptions itself.
+
+ This function is a wrapper around the Repr/reprlib functionality of the
+ stdlib.
+ """
+ return SafeRepr(maxsize).repr(obj)
+
+
+class AlwaysDispatchingPrettyPrinter(pprint.PrettyPrinter):
+ """PrettyPrinter that always dispatches (regardless of width)."""
+
+ def _format(
+ self,
+ object: object,
+ stream: IO[str],
+ indent: int,
+ allowance: int,
+ context: Dict[int, Any],
+ level: int,
+ ) -> None:
+ # Type ignored because _dispatch is private.
+ p = self._dispatch.get(type(object).__repr__, None) # type: ignore[attr-defined]
+
+ objid = id(object)
+ if objid in context or p is None:
+ # Type ignored because _format is private.
+ super()._format( # type: ignore[misc]
+ object,
+ stream,
+ indent,
+ allowance,
+ context,
+ level,
+ )
+ return
+
+ context[objid] = 1
+ p(self, object, stream, indent, allowance, context, level + 1)
+ del context[objid]
+
+
+def _pformat_dispatch(
+ object: object,
+ indent: int = 1,
+ width: int = 80,
+ depth: Optional[int] = None,
+ *,
+ compact: bool = False,
+) -> str:
+ return AlwaysDispatchingPrettyPrinter(
+ indent=indent, width=width, depth=depth, compact=compact
+ ).pformat(object)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/terminalwriter.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/terminalwriter.py
new file mode 100644
index 0000000000..379035d858
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/terminalwriter.py
@@ -0,0 +1,233 @@
+"""Helper functions for writing to terminals and files."""
+import os
+import shutil
+import sys
+from typing import Optional
+from typing import Sequence
+from typing import TextIO
+
+from .wcwidth import wcswidth
+from _pytest.compat import final
+
+
+# This code was initially copied from py 1.8.1, file _io/terminalwriter.py.
+
+
+def get_terminal_width() -> int:
+ width, _ = shutil.get_terminal_size(fallback=(80, 24))
+
+ # The Windows get_terminal_size may be bogus, let's sanify a bit.
+ if width < 40:
+ width = 80
+
+ return width
+
+
+def should_do_markup(file: TextIO) -> bool:
+ if os.environ.get("PY_COLORS") == "1":
+ return True
+ if os.environ.get("PY_COLORS") == "0":
+ return False
+ if "NO_COLOR" in os.environ:
+ return False
+ if "FORCE_COLOR" in os.environ:
+ return True
+ return (
+ hasattr(file, "isatty") and file.isatty() and os.environ.get("TERM") != "dumb"
+ )
+
+
+@final
+class TerminalWriter:
+ _esctable = dict(
+ black=30,
+ red=31,
+ green=32,
+ yellow=33,
+ blue=34,
+ purple=35,
+ cyan=36,
+ white=37,
+ Black=40,
+ Red=41,
+ Green=42,
+ Yellow=43,
+ Blue=44,
+ Purple=45,
+ Cyan=46,
+ White=47,
+ bold=1,
+ light=2,
+ blink=5,
+ invert=7,
+ )
+
+ def __init__(self, file: Optional[TextIO] = None) -> None:
+ if file is None:
+ file = sys.stdout
+ if hasattr(file, "isatty") and file.isatty() and sys.platform == "win32":
+ try:
+ import colorama
+ except ImportError:
+ pass
+ else:
+ file = colorama.AnsiToWin32(file).stream
+ assert file is not None
+ self._file = file
+ self.hasmarkup = should_do_markup(file)
+ self._current_line = ""
+ self._terminal_width: Optional[int] = None
+ self.code_highlight = True
+
+ @property
+ def fullwidth(self) -> int:
+ if self._terminal_width is not None:
+ return self._terminal_width
+ return get_terminal_width()
+
+ @fullwidth.setter
+ def fullwidth(self, value: int) -> None:
+ self._terminal_width = value
+
+ @property
+ def width_of_current_line(self) -> int:
+ """Return an estimate of the width so far in the current line."""
+ return wcswidth(self._current_line)
+
+ def markup(self, text: str, **markup: bool) -> str:
+ for name in markup:
+ if name not in self._esctable:
+ raise ValueError(f"unknown markup: {name!r}")
+ if self.hasmarkup:
+ esc = [self._esctable[name] for name, on in markup.items() if on]
+ if esc:
+ text = "".join("\x1b[%sm" % cod for cod in esc) + text + "\x1b[0m"
+ return text
+
+ def sep(
+ self,
+ sepchar: str,
+ title: Optional[str] = None,
+ fullwidth: Optional[int] = None,
+ **markup: bool,
+ ) -> None:
+ if fullwidth is None:
+ fullwidth = self.fullwidth
+ # The goal is to have the line be as long as possible
+ # under the condition that len(line) <= fullwidth.
+ if sys.platform == "win32":
+ # If we print in the last column on windows we are on a
+ # new line but there is no way to verify/neutralize this
+ # (we may not know the exact line width).
+ # So let's be defensive to avoid empty lines in the output.
+ fullwidth -= 1
+ if title is not None:
+ # we want 2 + 2*len(fill) + len(title) <= fullwidth
+ # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
+ # 2*len(sepchar)*N <= fullwidth - len(title) - 2
+ # N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
+ N = max((fullwidth - len(title) - 2) // (2 * len(sepchar)), 1)
+ fill = sepchar * N
+ line = f"{fill} {title} {fill}"
+ else:
+ # we want len(sepchar)*N <= fullwidth
+ # i.e. N <= fullwidth // len(sepchar)
+ line = sepchar * (fullwidth // len(sepchar))
+ # In some situations there is room for an extra sepchar at the right,
+ # in particular if we consider that with a sepchar like "_ " the
+ # trailing space is not important at the end of the line.
+ if len(line) + len(sepchar.rstrip()) <= fullwidth:
+ line += sepchar.rstrip()
+
+ self.line(line, **markup)
+
+ def write(self, msg: str, *, flush: bool = False, **markup: bool) -> None:
+ if msg:
+ current_line = msg.rsplit("\n", 1)[-1]
+ if "\n" in msg:
+ self._current_line = current_line
+ else:
+ self._current_line += current_line
+
+ msg = self.markup(msg, **markup)
+
+ try:
+ self._file.write(msg)
+ except UnicodeEncodeError:
+ # Some environments don't support printing general Unicode
+ # strings, due to misconfiguration or otherwise; in that case,
+ # print the string escaped to ASCII.
+ # When the Unicode situation improves we should consider
+ # letting the error propagate instead of masking it (see #7475
+ # for one brief attempt).
+ msg = msg.encode("unicode-escape").decode("ascii")
+ self._file.write(msg)
+
+ if flush:
+ self.flush()
+
+ def line(self, s: str = "", **markup: bool) -> None:
+ self.write(s, **markup)
+ self.write("\n")
+
+ def flush(self) -> None:
+ self._file.flush()
+
+ def _write_source(self, lines: Sequence[str], indents: Sequence[str] = ()) -> None:
+ """Write lines of source code possibly highlighted.
+
+ Keeping this private for now because the API is clunky. We should discuss how
+ to evolve the terminal writer so we can have more precise color support, for example
+ being able to write part of a line in one color and the rest in another, and so on.
+ """
+ if indents and len(indents) != len(lines):
+ raise ValueError(
+ "indents size ({}) should have same size as lines ({})".format(
+ len(indents), len(lines)
+ )
+ )
+ if not indents:
+ indents = [""] * len(lines)
+ source = "\n".join(lines)
+ new_lines = self._highlight(source).splitlines()
+ for indent, new_line in zip(indents, new_lines):
+ self.line(indent + new_line)
+
+ def _highlight(self, source: str) -> str:
+ """Highlight the given source code if we have markup support."""
+ from _pytest.config.exceptions import UsageError
+
+ if not self.hasmarkup or not self.code_highlight:
+ return source
+ try:
+ from pygments.formatters.terminal import TerminalFormatter
+ from pygments.lexers.python import PythonLexer
+ from pygments import highlight
+ import pygments.util
+ except ImportError:
+ return source
+ else:
+ try:
+ highlighted: str = highlight(
+ source,
+ PythonLexer(),
+ TerminalFormatter(
+ bg=os.getenv("PYTEST_THEME_MODE", "dark"),
+ style=os.getenv("PYTEST_THEME"),
+ ),
+ )
+ return highlighted
+ except pygments.util.ClassNotFound:
+ raise UsageError(
+ "PYTEST_THEME environment variable had an invalid value: '{}'. "
+ "Only valid pygment styles are allowed.".format(
+ os.getenv("PYTEST_THEME")
+ )
+ )
+ except pygments.util.OptionError:
+ raise UsageError(
+ "PYTEST_THEME_MODE environment variable had an invalid value: '{}'. "
+ "The only allowed values are 'dark' and 'light'.".format(
+ os.getenv("PYTEST_THEME_MODE")
+ )
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/wcwidth.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/wcwidth.py
new file mode 100644
index 0000000000..e5c7bf4d86
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_io/wcwidth.py
@@ -0,0 +1,55 @@
+import unicodedata
+from functools import lru_cache
+
+
+@lru_cache(100)
+def wcwidth(c: str) -> int:
+ """Determine how many columns are needed to display a character in a terminal.
+
+ Returns -1 if the character is not printable.
+ Returns 0, 1 or 2 for other characters.
+ """
+ o = ord(c)
+
+ # ASCII fast path.
+ if 0x20 <= o < 0x07F:
+ return 1
+
+ # Some Cf/Zp/Zl characters which should be zero-width.
+ if (
+ o == 0x0000
+ or 0x200B <= o <= 0x200F
+ or 0x2028 <= o <= 0x202E
+ or 0x2060 <= o <= 0x2063
+ ):
+ return 0
+
+ category = unicodedata.category(c)
+
+ # Control characters.
+ if category == "Cc":
+ return -1
+
+ # Combining characters with zero width.
+ if category in ("Me", "Mn"):
+ return 0
+
+ # Full/Wide east asian characters.
+ if unicodedata.east_asian_width(c) in ("F", "W"):
+ return 2
+
+ return 1
+
+
+def wcswidth(s: str) -> int:
+ """Determine how many columns are needed to display a string in a terminal.
+
+ Returns -1 if the string contains non-printable characters.
+ """
+ width = 0
+ for c in unicodedata.normalize("NFC", s):
+ wc = wcwidth(c)
+ if wc < 0:
+ return -1
+ width += wc
+ return width
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_version.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_version.py
new file mode 100644
index 0000000000..5515abadad
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/_version.py
@@ -0,0 +1,5 @@
+# coding: utf-8
+# file generated by setuptools_scm
+# don't change, don't track in version control
+version = '7.0.1'
+version_tuple = (7, 0, 1)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/__init__.py
new file mode 100644
index 0000000000..480a26ad86
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/__init__.py
@@ -0,0 +1,181 @@
+"""Support for presenting detailed information in failing assertions."""
+import sys
+from typing import Any
+from typing import Generator
+from typing import List
+from typing import Optional
+from typing import TYPE_CHECKING
+
+from _pytest.assertion import rewrite
+from _pytest.assertion import truncate
+from _pytest.assertion import util
+from _pytest.assertion.rewrite import assertstate_key
+from _pytest.config import Config
+from _pytest.config import hookimpl
+from _pytest.config.argparsing import Parser
+from _pytest.nodes import Item
+
+if TYPE_CHECKING:
+ from _pytest.main import Session
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("debugconfig")
+ group.addoption(
+ "--assert",
+ action="store",
+ dest="assertmode",
+ choices=("rewrite", "plain"),
+ default="rewrite",
+ metavar="MODE",
+ help=(
+ "Control assertion debugging tools.\n"
+ "'plain' performs no assertion debugging.\n"
+ "'rewrite' (the default) rewrites assert statements in test modules"
+ " on import to provide assert expression information."
+ ),
+ )
+ parser.addini(
+ "enable_assertion_pass_hook",
+ type="bool",
+ default=False,
+ help="Enables the pytest_assertion_pass hook."
+ "Make sure to delete any previously generated pyc cache files.",
+ )
+
+
+def register_assert_rewrite(*names: str) -> None:
+ """Register one or more module names to be rewritten on import.
+
+ This function will make sure that this module or all modules inside
+ the package will get their assert statements rewritten.
+ Thus you should make sure to call this before the module is
+ actually imported, usually in your __init__.py if you are a plugin
+ using a package.
+
+ :raises TypeError: If the given module names are not strings.
+ """
+ for name in names:
+ if not isinstance(name, str):
+ msg = "expected module names as *args, got {0} instead" # type: ignore[unreachable]
+ raise TypeError(msg.format(repr(names)))
+ for hook in sys.meta_path:
+ if isinstance(hook, rewrite.AssertionRewritingHook):
+ importhook = hook
+ break
+ else:
+ # TODO(typing): Add a protocol for mark_rewrite() and use it
+ # for importhook and for PytestPluginManager.rewrite_hook.
+ importhook = DummyRewriteHook() # type: ignore
+ importhook.mark_rewrite(*names)
+
+
+class DummyRewriteHook:
+ """A no-op import hook for when rewriting is disabled."""
+
+ def mark_rewrite(self, *names: str) -> None:
+ pass
+
+
+class AssertionState:
+ """State for the assertion plugin."""
+
+ def __init__(self, config: Config, mode) -> None:
+ self.mode = mode
+ self.trace = config.trace.root.get("assertion")
+ self.hook: Optional[rewrite.AssertionRewritingHook] = None
+
+
+def install_importhook(config: Config) -> rewrite.AssertionRewritingHook:
+ """Try to install the rewrite hook, raise SystemError if it fails."""
+ config.stash[assertstate_key] = AssertionState(config, "rewrite")
+ config.stash[assertstate_key].hook = hook = rewrite.AssertionRewritingHook(config)
+ sys.meta_path.insert(0, hook)
+ config.stash[assertstate_key].trace("installed rewrite import hook")
+
+ def undo() -> None:
+ hook = config.stash[assertstate_key].hook
+ if hook is not None and hook in sys.meta_path:
+ sys.meta_path.remove(hook)
+
+ config.add_cleanup(undo)
+ return hook
+
+
+def pytest_collection(session: "Session") -> None:
+ # This hook is only called when test modules are collected
+ # so for example not in the managing process of pytest-xdist
+ # (which does not collect test modules).
+ assertstate = session.config.stash.get(assertstate_key, None)
+ if assertstate:
+ if assertstate.hook is not None:
+ assertstate.hook.set_session(session)
+
+
+@hookimpl(tryfirst=True, hookwrapper=True)
+def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:
+ """Setup the pytest_assertrepr_compare and pytest_assertion_pass hooks.
+
+ The rewrite module will use util._reprcompare if it exists to use custom
+ reporting via the pytest_assertrepr_compare hook. This sets up this custom
+ comparison for the test.
+ """
+
+ ihook = item.ihook
+
+ def callbinrepr(op, left: object, right: object) -> Optional[str]:
+ """Call the pytest_assertrepr_compare hook and prepare the result.
+
+ This uses the first result from the hook and then ensures the
+ following:
+ * Overly verbose explanations are truncated unless configured otherwise
+ (eg. if running in verbose mode).
+ * Embedded newlines are escaped to help util.format_explanation()
+ later.
+ * If the rewrite mode is used embedded %-characters are replaced
+ to protect later % formatting.
+
+ The result can be formatted by util.format_explanation() for
+ pretty printing.
+ """
+ hook_result = ihook.pytest_assertrepr_compare(
+ config=item.config, op=op, left=left, right=right
+ )
+ for new_expl in hook_result:
+ if new_expl:
+ new_expl = truncate.truncate_if_required(new_expl, item)
+ new_expl = [line.replace("\n", "\\n") for line in new_expl]
+ res = "\n~".join(new_expl)
+ if item.config.getvalue("assertmode") == "rewrite":
+ res = res.replace("%", "%%")
+ return res
+ return None
+
+ saved_assert_hooks = util._reprcompare, util._assertion_pass
+ util._reprcompare = callbinrepr
+ util._config = item.config
+
+ if ihook.pytest_assertion_pass.get_hookimpls():
+
+ def call_assertion_pass_hook(lineno: int, orig: str, expl: str) -> None:
+ ihook.pytest_assertion_pass(item=item, lineno=lineno, orig=orig, expl=expl)
+
+ util._assertion_pass = call_assertion_pass_hook
+
+ yield
+
+ util._reprcompare, util._assertion_pass = saved_assert_hooks
+ util._config = None
+
+
+def pytest_sessionfinish(session: "Session") -> None:
+ assertstate = session.config.stash.get(assertstate_key, None)
+ if assertstate:
+ if assertstate.hook is not None:
+ assertstate.hook.set_session(None)
+
+
+def pytest_assertrepr_compare(
+ config: Config, op: str, left: Any, right: Any
+) -> Optional[List[str]]:
+ return util.assertrepr_compare(config=config, op=op, left=left, right=right)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/rewrite.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/rewrite.py
new file mode 100644
index 0000000000..88ac6cab36
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/rewrite.py
@@ -0,0 +1,1136 @@
+"""Rewrite assertion AST to produce nice error messages."""
+import ast
+import errno
+import functools
+import importlib.abc
+import importlib.machinery
+import importlib.util
+import io
+import itertools
+import marshal
+import os
+import struct
+import sys
+import tokenize
+import types
+from pathlib import Path
+from pathlib import PurePath
+from typing import Callable
+from typing import Dict
+from typing import IO
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from _pytest._io.saferepr import DEFAULT_REPR_MAX_SIZE
+from _pytest._io.saferepr import saferepr
+from _pytest._version import version
+from _pytest.assertion import util
+from _pytest.assertion.util import ( # noqa: F401
+ format_explanation as _format_explanation,
+)
+from _pytest.config import Config
+from _pytest.main import Session
+from _pytest.pathlib import absolutepath
+from _pytest.pathlib import fnmatch_ex
+from _pytest.stash import StashKey
+
+if TYPE_CHECKING:
+ from _pytest.assertion import AssertionState
+
+
+assertstate_key = StashKey["AssertionState"]()
+
+
+# pytest caches rewritten pycs in pycache dirs
+PYTEST_TAG = f"{sys.implementation.cache_tag}-pytest-{version}"
+PYC_EXT = ".py" + (__debug__ and "c" or "o")
+PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
+
+
+class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader):
+ """PEP302/PEP451 import hook which rewrites asserts."""
+
+ def __init__(self, config: Config) -> None:
+ self.config = config
+ try:
+ self.fnpats = config.getini("python_files")
+ except ValueError:
+ self.fnpats = ["test_*.py", "*_test.py"]
+ self.session: Optional[Session] = None
+ self._rewritten_names: Dict[str, Path] = {}
+ self._must_rewrite: Set[str] = set()
+ # flag to guard against trying to rewrite a pyc file while we are already writing another pyc file,
+ # which might result in infinite recursion (#3506)
+ self._writing_pyc = False
+ self._basenames_to_check_rewrite = {"conftest"}
+ self._marked_for_rewrite_cache: Dict[str, bool] = {}
+ self._session_paths_checked = False
+
+ def set_session(self, session: Optional[Session]) -> None:
+ self.session = session
+ self._session_paths_checked = False
+
+ # Indirection so we can mock calls to find_spec originated from the hook during testing
+ _find_spec = importlib.machinery.PathFinder.find_spec
+
+ def find_spec(
+ self,
+ name: str,
+ path: Optional[Sequence[Union[str, bytes]]] = None,
+ target: Optional[types.ModuleType] = None,
+ ) -> Optional[importlib.machinery.ModuleSpec]:
+ if self._writing_pyc:
+ return None
+ state = self.config.stash[assertstate_key]
+ if self._early_rewrite_bailout(name, state):
+ return None
+ state.trace("find_module called for: %s" % name)
+
+ # Type ignored because mypy is confused about the `self` binding here.
+ spec = self._find_spec(name, path) # type: ignore
+ if (
+ # the import machinery could not find a file to import
+ spec is None
+ # this is a namespace package (without `__init__.py`)
+ # there's nothing to rewrite there
+ # python3.6: `namespace`
+ # python3.7+: `None`
+ or spec.origin == "namespace"
+ or spec.origin is None
+ # we can only rewrite source files
+ or not isinstance(spec.loader, importlib.machinery.SourceFileLoader)
+ # if the file doesn't exist, we can't rewrite it
+ or not os.path.exists(spec.origin)
+ ):
+ return None
+ else:
+ fn = spec.origin
+
+ if not self._should_rewrite(name, fn, state):
+ return None
+
+ return importlib.util.spec_from_file_location(
+ name,
+ fn,
+ loader=self,
+ submodule_search_locations=spec.submodule_search_locations,
+ )
+
+ def create_module(
+ self, spec: importlib.machinery.ModuleSpec
+ ) -> Optional[types.ModuleType]:
+ return None # default behaviour is fine
+
+ def exec_module(self, module: types.ModuleType) -> None:
+ assert module.__spec__ is not None
+ assert module.__spec__.origin is not None
+ fn = Path(module.__spec__.origin)
+ state = self.config.stash[assertstate_key]
+
+ self._rewritten_names[module.__name__] = fn
+
+ # The requested module looks like a test file, so rewrite it. This is
+ # the most magical part of the process: load the source, rewrite the
+ # asserts, and load the rewritten source. We also cache the rewritten
+ # module code in a special pyc. We must be aware of the possibility of
+ # concurrent pytest processes rewriting and loading pycs. To avoid
+ # tricky race conditions, we maintain the following invariant: The
+ # cached pyc is always a complete, valid pyc. Operations on it must be
+ # atomic. POSIX's atomic rename comes in handy.
+ write = not sys.dont_write_bytecode
+ cache_dir = get_cache_dir(fn)
+ if write:
+ ok = try_makedirs(cache_dir)
+ if not ok:
+ write = False
+ state.trace(f"read only directory: {cache_dir}")
+
+ cache_name = fn.name[:-3] + PYC_TAIL
+ pyc = cache_dir / cache_name
+ # Notice that even if we're in a read-only directory, I'm going
+ # to check for a cached pyc. This may not be optimal...
+ co = _read_pyc(fn, pyc, state.trace)
+ if co is None:
+ state.trace(f"rewriting {fn!r}")
+ source_stat, co = _rewrite_test(fn, self.config)
+ if write:
+ self._writing_pyc = True
+ try:
+ _write_pyc(state, co, source_stat, pyc)
+ finally:
+ self._writing_pyc = False
+ else:
+ state.trace(f"found cached rewritten pyc for {fn}")
+ exec(co, module.__dict__)
+
+ def _early_rewrite_bailout(self, name: str, state: "AssertionState") -> bool:
+ """A fast way to get out of rewriting modules.
+
+ Profiling has shown that the call to PathFinder.find_spec (inside of
+ the find_spec from this class) is a major slowdown, so, this method
+ tries to filter what we're sure won't be rewritten before getting to
+ it.
+ """
+ if self.session is not None and not self._session_paths_checked:
+ self._session_paths_checked = True
+ for initial_path in self.session._initialpaths:
+ # Make something as c:/projects/my_project/path.py ->
+ # ['c:', 'projects', 'my_project', 'path.py']
+ parts = str(initial_path).split(os.path.sep)
+ # add 'path' to basenames to be checked.
+ self._basenames_to_check_rewrite.add(os.path.splitext(parts[-1])[0])
+
+ # Note: conftest already by default in _basenames_to_check_rewrite.
+ parts = name.split(".")
+ if parts[-1] in self._basenames_to_check_rewrite:
+ return False
+
+ # For matching the name it must be as if it was a filename.
+ path = PurePath(os.path.sep.join(parts) + ".py")
+
+ for pat in self.fnpats:
+ # if the pattern contains subdirectories ("tests/**.py" for example) we can't bail out based
+ # on the name alone because we need to match against the full path
+ if os.path.dirname(pat):
+ return False
+ if fnmatch_ex(pat, path):
+ return False
+
+ if self._is_marked_for_rewrite(name, state):
+ return False
+
+ state.trace(f"early skip of rewriting module: {name}")
+ return True
+
+ def _should_rewrite(self, name: str, fn: str, state: "AssertionState") -> bool:
+ # always rewrite conftest files
+ if os.path.basename(fn) == "conftest.py":
+ state.trace(f"rewriting conftest file: {fn!r}")
+ return True
+
+ if self.session is not None:
+ if self.session.isinitpath(absolutepath(fn)):
+ state.trace(f"matched test file (was specified on cmdline): {fn!r}")
+ return True
+
+ # modules not passed explicitly on the command line are only
+ # rewritten if they match the naming convention for test files
+ fn_path = PurePath(fn)
+ for pat in self.fnpats:
+ if fnmatch_ex(pat, fn_path):
+ state.trace(f"matched test file {fn!r}")
+ return True
+
+ return self._is_marked_for_rewrite(name, state)
+
+ def _is_marked_for_rewrite(self, name: str, state: "AssertionState") -> bool:
+ try:
+ return self._marked_for_rewrite_cache[name]
+ except KeyError:
+ for marked in self._must_rewrite:
+ if name == marked or name.startswith(marked + "."):
+ state.trace(f"matched marked file {name!r} (from {marked!r})")
+ self._marked_for_rewrite_cache[name] = True
+ return True
+
+ self._marked_for_rewrite_cache[name] = False
+ return False
+
+ def mark_rewrite(self, *names: str) -> None:
+ """Mark import names as needing to be rewritten.
+
+ The named module or package as well as any nested modules will
+ be rewritten on import.
+ """
+ already_imported = (
+ set(names).intersection(sys.modules).difference(self._rewritten_names)
+ )
+ for name in already_imported:
+ mod = sys.modules[name]
+ if not AssertionRewriter.is_rewrite_disabled(
+ mod.__doc__ or ""
+ ) and not isinstance(mod.__loader__, type(self)):
+ self._warn_already_imported(name)
+ self._must_rewrite.update(names)
+ self._marked_for_rewrite_cache.clear()
+
+ def _warn_already_imported(self, name: str) -> None:
+ from _pytest.warning_types import PytestAssertRewriteWarning
+
+ self.config.issue_config_time_warning(
+ PytestAssertRewriteWarning(
+ "Module already imported so cannot be rewritten: %s" % name
+ ),
+ stacklevel=5,
+ )
+
+ def get_data(self, pathname: Union[str, bytes]) -> bytes:
+ """Optional PEP302 get_data API."""
+ with open(pathname, "rb") as f:
+ return f.read()
+
+ if sys.version_info >= (3, 10):
+
+ def get_resource_reader(self, name: str) -> importlib.abc.TraversableResources: # type: ignore
+ if sys.version_info < (3, 11):
+ from importlib.readers import FileReader
+ else:
+ from importlib.resources.readers import FileReader
+
+ return FileReader(types.SimpleNamespace(path=self._rewritten_names[name]))
+
+
+def _write_pyc_fp(
+ fp: IO[bytes], source_stat: os.stat_result, co: types.CodeType
+) -> None:
+ # Technically, we don't have to have the same pyc format as
+ # (C)Python, since these "pycs" should never be seen by builtin
+ # import. However, there's little reason to deviate.
+ fp.write(importlib.util.MAGIC_NUMBER)
+ # https://www.python.org/dev/peps/pep-0552/
+ if sys.version_info >= (3, 7):
+ flags = b"\x00\x00\x00\x00"
+ fp.write(flags)
+ # as of now, bytecode header expects 32-bit numbers for size and mtime (#4903)
+ mtime = int(source_stat.st_mtime) & 0xFFFFFFFF
+ size = source_stat.st_size & 0xFFFFFFFF
+ # "<LL" stands for 2 unsigned longs, little-endian.
+ fp.write(struct.pack("<LL", mtime, size))
+ fp.write(marshal.dumps(co))
+
+
+if sys.platform == "win32":
+ from atomicwrites import atomic_write
+
+ def _write_pyc(
+ state: "AssertionState",
+ co: types.CodeType,
+ source_stat: os.stat_result,
+ pyc: Path,
+ ) -> bool:
+ try:
+ with atomic_write(os.fspath(pyc), mode="wb", overwrite=True) as fp:
+ _write_pyc_fp(fp, source_stat, co)
+ except OSError as e:
+ state.trace(f"error writing pyc file at {pyc}: {e}")
+ # we ignore any failure to write the cache file
+ # there are many reasons, permission-denied, pycache dir being a
+ # file etc.
+ return False
+ return True
+
+
+else:
+
+ def _write_pyc(
+ state: "AssertionState",
+ co: types.CodeType,
+ source_stat: os.stat_result,
+ pyc: Path,
+ ) -> bool:
+ proc_pyc = f"{pyc}.{os.getpid()}"
+ try:
+ fp = open(proc_pyc, "wb")
+ except OSError as e:
+ state.trace(f"error writing pyc file at {proc_pyc}: errno={e.errno}")
+ return False
+
+ try:
+ _write_pyc_fp(fp, source_stat, co)
+ os.rename(proc_pyc, pyc)
+ except OSError as e:
+ state.trace(f"error writing pyc file at {pyc}: {e}")
+ # we ignore any failure to write the cache file
+ # there are many reasons, permission-denied, pycache dir being a
+ # file etc.
+ return False
+ finally:
+ fp.close()
+ return True
+
+
+def _rewrite_test(fn: Path, config: Config) -> Tuple[os.stat_result, types.CodeType]:
+ """Read and rewrite *fn* and return the code object."""
+ stat = os.stat(fn)
+ source = fn.read_bytes()
+ strfn = str(fn)
+ tree = ast.parse(source, filename=strfn)
+ rewrite_asserts(tree, source, strfn, config)
+ co = compile(tree, strfn, "exec", dont_inherit=True)
+ return stat, co
+
+
+def _read_pyc(
+ source: Path, pyc: Path, trace: Callable[[str], None] = lambda x: None
+) -> Optional[types.CodeType]:
+ """Possibly read a pytest pyc containing rewritten code.
+
+ Return rewritten code if successful or None if not.
+ """
+ try:
+ fp = open(pyc, "rb")
+ except OSError:
+ return None
+ with fp:
+ # https://www.python.org/dev/peps/pep-0552/
+ has_flags = sys.version_info >= (3, 7)
+ try:
+ stat_result = os.stat(source)
+ mtime = int(stat_result.st_mtime)
+ size = stat_result.st_size
+ data = fp.read(16 if has_flags else 12)
+ except OSError as e:
+ trace(f"_read_pyc({source}): OSError {e}")
+ return None
+ # Check for invalid or out of date pyc file.
+ if len(data) != (16 if has_flags else 12):
+ trace("_read_pyc(%s): invalid pyc (too short)" % source)
+ return None
+ if data[:4] != importlib.util.MAGIC_NUMBER:
+ trace("_read_pyc(%s): invalid pyc (bad magic number)" % source)
+ return None
+ if has_flags and data[4:8] != b"\x00\x00\x00\x00":
+ trace("_read_pyc(%s): invalid pyc (unsupported flags)" % source)
+ return None
+ mtime_data = data[8 if has_flags else 4 : 12 if has_flags else 8]
+ if int.from_bytes(mtime_data, "little") != mtime & 0xFFFFFFFF:
+ trace("_read_pyc(%s): out of date" % source)
+ return None
+ size_data = data[12 if has_flags else 8 : 16 if has_flags else 12]
+ if int.from_bytes(size_data, "little") != size & 0xFFFFFFFF:
+ trace("_read_pyc(%s): invalid pyc (incorrect size)" % source)
+ return None
+ try:
+ co = marshal.load(fp)
+ except Exception as e:
+ trace(f"_read_pyc({source}): marshal.load error {e}")
+ return None
+ if not isinstance(co, types.CodeType):
+ trace("_read_pyc(%s): not a code object" % source)
+ return None
+ return co
+
+
+def rewrite_asserts(
+ mod: ast.Module,
+ source: bytes,
+ module_path: Optional[str] = None,
+ config: Optional[Config] = None,
+) -> None:
+ """Rewrite the assert statements in mod."""
+ AssertionRewriter(module_path, config, source).run(mod)
+
+
+def _saferepr(obj: object) -> str:
+ r"""Get a safe repr of an object for assertion error messages.
+
+ The assertion formatting (util.format_explanation()) requires
+ newlines to be escaped since they are a special character for it.
+ Normally assertion.util.format_explanation() does this but for a
+ custom repr it is possible to contain one of the special escape
+ sequences, especially '\n{' and '\n}' are likely to be present in
+ JSON reprs.
+ """
+ maxsize = _get_maxsize_for_saferepr(util._config)
+ return saferepr(obj, maxsize=maxsize).replace("\n", "\\n")
+
+
+def _get_maxsize_for_saferepr(config: Optional[Config]) -> Optional[int]:
+ """Get `maxsize` configuration for saferepr based on the given config object."""
+ verbosity = config.getoption("verbose") if config is not None else 0
+ if verbosity >= 2:
+ return None
+ if verbosity >= 1:
+ return DEFAULT_REPR_MAX_SIZE * 10
+ return DEFAULT_REPR_MAX_SIZE
+
+
+def _format_assertmsg(obj: object) -> str:
+ r"""Format the custom assertion message given.
+
+ For strings this simply replaces newlines with '\n~' so that
+ util.format_explanation() will preserve them instead of escaping
+ newlines. For other objects saferepr() is used first.
+ """
+ # reprlib appears to have a bug which means that if a string
+ # contains a newline it gets escaped, however if an object has a
+ # .__repr__() which contains newlines it does not get escaped.
+ # However in either case we want to preserve the newline.
+ replaces = [("\n", "\n~"), ("%", "%%")]
+ if not isinstance(obj, str):
+ obj = saferepr(obj)
+ replaces.append(("\\n", "\n~"))
+
+ for r1, r2 in replaces:
+ obj = obj.replace(r1, r2)
+
+ return obj
+
+
+def _should_repr_global_name(obj: object) -> bool:
+ if callable(obj):
+ return False
+
+ try:
+ return not hasattr(obj, "__name__")
+ except Exception:
+ return True
+
+
+def _format_boolop(explanations: Iterable[str], is_or: bool) -> str:
+ explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
+ return explanation.replace("%", "%%")
+
+
+def _call_reprcompare(
+ ops: Sequence[str],
+ results: Sequence[bool],
+ expls: Sequence[str],
+ each_obj: Sequence[object],
+) -> str:
+ for i, res, expl in zip(range(len(ops)), results, expls):
+ try:
+ done = not res
+ except Exception:
+ done = True
+ if done:
+ break
+ if util._reprcompare is not None:
+ custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])
+ if custom is not None:
+ return custom
+ return expl
+
+
+def _call_assertion_pass(lineno: int, orig: str, expl: str) -> None:
+ if util._assertion_pass is not None:
+ util._assertion_pass(lineno, orig, expl)
+
+
+def _check_if_assertion_pass_impl() -> bool:
+ """Check if any plugins implement the pytest_assertion_pass hook
+ in order not to generate explanation unnecessarily (might be expensive)."""
+ return True if util._assertion_pass else False
+
+
+UNARY_MAP = {ast.Not: "not %s", ast.Invert: "~%s", ast.USub: "-%s", ast.UAdd: "+%s"}
+
+BINOP_MAP = {
+ ast.BitOr: "|",
+ ast.BitXor: "^",
+ ast.BitAnd: "&",
+ ast.LShift: "<<",
+ ast.RShift: ">>",
+ ast.Add: "+",
+ ast.Sub: "-",
+ ast.Mult: "*",
+ ast.Div: "/",
+ ast.FloorDiv: "//",
+ ast.Mod: "%%", # escaped for string formatting
+ ast.Eq: "==",
+ ast.NotEq: "!=",
+ ast.Lt: "<",
+ ast.LtE: "<=",
+ ast.Gt: ">",
+ ast.GtE: ">=",
+ ast.Pow: "**",
+ ast.Is: "is",
+ ast.IsNot: "is not",
+ ast.In: "in",
+ ast.NotIn: "not in",
+ ast.MatMult: "@",
+}
+
+
+def traverse_node(node: ast.AST) -> Iterator[ast.AST]:
+ """Recursively yield node and all its children in depth-first order."""
+ yield node
+ for child in ast.iter_child_nodes(node):
+ yield from traverse_node(child)
+
+
+@functools.lru_cache(maxsize=1)
+def _get_assertion_exprs(src: bytes) -> Dict[int, str]:
+ """Return a mapping from {lineno: "assertion test expression"}."""
+ ret: Dict[int, str] = {}
+
+ depth = 0
+ lines: List[str] = []
+ assert_lineno: Optional[int] = None
+ seen_lines: Set[int] = set()
+
+ def _write_and_reset() -> None:
+ nonlocal depth, lines, assert_lineno, seen_lines
+ assert assert_lineno is not None
+ ret[assert_lineno] = "".join(lines).rstrip().rstrip("\\")
+ depth = 0
+ lines = []
+ assert_lineno = None
+ seen_lines = set()
+
+ tokens = tokenize.tokenize(io.BytesIO(src).readline)
+ for tp, source, (lineno, offset), _, line in tokens:
+ if tp == tokenize.NAME and source == "assert":
+ assert_lineno = lineno
+ elif assert_lineno is not None:
+ # keep track of depth for the assert-message `,` lookup
+ if tp == tokenize.OP and source in "([{":
+ depth += 1
+ elif tp == tokenize.OP and source in ")]}":
+ depth -= 1
+
+ if not lines:
+ lines.append(line[offset:])
+ seen_lines.add(lineno)
+ # a non-nested comma separates the expression from the message
+ elif depth == 0 and tp == tokenize.OP and source == ",":
+ # one line assert with message
+ if lineno in seen_lines and len(lines) == 1:
+ offset_in_trimmed = offset + len(lines[-1]) - len(line)
+ lines[-1] = lines[-1][:offset_in_trimmed]
+ # multi-line assert with message
+ elif lineno in seen_lines:
+ lines[-1] = lines[-1][:offset]
+ # multi line assert with escapd newline before message
+ else:
+ lines.append(line[:offset])
+ _write_and_reset()
+ elif tp in {tokenize.NEWLINE, tokenize.ENDMARKER}:
+ _write_and_reset()
+ elif lines and lineno not in seen_lines:
+ lines.append(line)
+ seen_lines.add(lineno)
+
+ return ret
+
+
+class AssertionRewriter(ast.NodeVisitor):
+ """Assertion rewriting implementation.
+
+ The main entrypoint is to call .run() with an ast.Module instance,
+ this will then find all the assert statements and rewrite them to
+ provide intermediate values and a detailed assertion error. See
+ http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html
+ for an overview of how this works.
+
+ The entry point here is .run() which will iterate over all the
+ statements in an ast.Module and for each ast.Assert statement it
+ finds call .visit() with it. Then .visit_Assert() takes over and
+ is responsible for creating new ast statements to replace the
+ original assert statement: it rewrites the test of an assertion
+ to provide intermediate values and replace it with an if statement
+ which raises an assertion error with a detailed explanation in
+ case the expression is false and calls pytest_assertion_pass hook
+ if expression is true.
+
+ For this .visit_Assert() uses the visitor pattern to visit all the
+ AST nodes of the ast.Assert.test field, each visit call returning
+ an AST node and the corresponding explanation string. During this
+ state is kept in several instance attributes:
+
+ :statements: All the AST statements which will replace the assert
+ statement.
+
+ :variables: This is populated by .variable() with each variable
+ used by the statements so that they can all be set to None at
+ the end of the statements.
+
+ :variable_counter: Counter to create new unique variables needed
+ by statements. Variables are created using .variable() and
+ have the form of "@py_assert0".
+
+ :expl_stmts: The AST statements which will be executed to get
+ data from the assertion. This is the code which will construct
+ the detailed assertion message that is used in the AssertionError
+ or for the pytest_assertion_pass hook.
+
+ :explanation_specifiers: A dict filled by .explanation_param()
+ with %-formatting placeholders and their corresponding
+ expressions to use in the building of an assertion message.
+ This is used by .pop_format_context() to build a message.
+
+ :stack: A stack of the explanation_specifiers dicts maintained by
+ .push_format_context() and .pop_format_context() which allows
+ to build another %-formatted string while already building one.
+
+ This state is reset on every new assert statement visited and used
+ by the other visitors.
+ """
+
+ def __init__(
+ self, module_path: Optional[str], config: Optional[Config], source: bytes
+ ) -> None:
+ super().__init__()
+ self.module_path = module_path
+ self.config = config
+ if config is not None:
+ self.enable_assertion_pass_hook = config.getini(
+ "enable_assertion_pass_hook"
+ )
+ else:
+ self.enable_assertion_pass_hook = False
+ self.source = source
+
+ def run(self, mod: ast.Module) -> None:
+ """Find all assert statements in *mod* and rewrite them."""
+ if not mod.body:
+ # Nothing to do.
+ return
+
+ # We'll insert some special imports at the top of the module, but after any
+ # docstrings and __future__ imports, so first figure out where that is.
+ doc = getattr(mod, "docstring", None)
+ expect_docstring = doc is None
+ if doc is not None and self.is_rewrite_disabled(doc):
+ return
+ pos = 0
+ lineno = 1
+ for item in mod.body:
+ if (
+ expect_docstring
+ and isinstance(item, ast.Expr)
+ and isinstance(item.value, ast.Str)
+ ):
+ doc = item.value.s
+ if self.is_rewrite_disabled(doc):
+ return
+ expect_docstring = False
+ elif (
+ isinstance(item, ast.ImportFrom)
+ and item.level == 0
+ and item.module == "__future__"
+ ):
+ pass
+ else:
+ break
+ pos += 1
+ # Special case: for a decorated function, set the lineno to that of the
+ # first decorator, not the `def`. Issue #4984.
+ if isinstance(item, ast.FunctionDef) and item.decorator_list:
+ lineno = item.decorator_list[0].lineno
+ else:
+ lineno = item.lineno
+ # Now actually insert the special imports.
+ if sys.version_info >= (3, 10):
+ aliases = [
+ ast.alias("builtins", "@py_builtins", lineno=lineno, col_offset=0),
+ ast.alias(
+ "_pytest.assertion.rewrite",
+ "@pytest_ar",
+ lineno=lineno,
+ col_offset=0,
+ ),
+ ]
+ else:
+ aliases = [
+ ast.alias("builtins", "@py_builtins"),
+ ast.alias("_pytest.assertion.rewrite", "@pytest_ar"),
+ ]
+ imports = [
+ ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases
+ ]
+ mod.body[pos:pos] = imports
+
+ # Collect asserts.
+ nodes: List[ast.AST] = [mod]
+ while nodes:
+ node = nodes.pop()
+ for name, field in ast.iter_fields(node):
+ if isinstance(field, list):
+ new: List[ast.AST] = []
+ for i, child in enumerate(field):
+ if isinstance(child, ast.Assert):
+ # Transform assert.
+ new.extend(self.visit(child))
+ else:
+ new.append(child)
+ if isinstance(child, ast.AST):
+ nodes.append(child)
+ setattr(node, name, new)
+ elif (
+ isinstance(field, ast.AST)
+ # Don't recurse into expressions as they can't contain
+ # asserts.
+ and not isinstance(field, ast.expr)
+ ):
+ nodes.append(field)
+
+ @staticmethod
+ def is_rewrite_disabled(docstring: str) -> bool:
+ return "PYTEST_DONT_REWRITE" in docstring
+
+ def variable(self) -> str:
+ """Get a new variable."""
+ # Use a character invalid in python identifiers to avoid clashing.
+ name = "@py_assert" + str(next(self.variable_counter))
+ self.variables.append(name)
+ return name
+
+ def assign(self, expr: ast.expr) -> ast.Name:
+ """Give *expr* a name."""
+ name = self.variable()
+ self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
+ return ast.Name(name, ast.Load())
+
+ def display(self, expr: ast.expr) -> ast.expr:
+ """Call saferepr on the expression."""
+ return self.helper("_saferepr", expr)
+
+ def helper(self, name: str, *args: ast.expr) -> ast.expr:
+ """Call a helper in this module."""
+ py_name = ast.Name("@pytest_ar", ast.Load())
+ attr = ast.Attribute(py_name, name, ast.Load())
+ return ast.Call(attr, list(args), [])
+
+ def builtin(self, name: str) -> ast.Attribute:
+ """Return the builtin called *name*."""
+ builtin_name = ast.Name("@py_builtins", ast.Load())
+ return ast.Attribute(builtin_name, name, ast.Load())
+
+ def explanation_param(self, expr: ast.expr) -> str:
+ """Return a new named %-formatting placeholder for expr.
+
+ This creates a %-formatting placeholder for expr in the
+ current formatting context, e.g. ``%(py0)s``. The placeholder
+ and expr are placed in the current format context so that it
+ can be used on the next call to .pop_format_context().
+ """
+ specifier = "py" + str(next(self.variable_counter))
+ self.explanation_specifiers[specifier] = expr
+ return "%(" + specifier + ")s"
+
+ def push_format_context(self) -> None:
+ """Create a new formatting context.
+
+ The format context is used for when an explanation wants to
+ have a variable value formatted in the assertion message. In
+ this case the value required can be added using
+ .explanation_param(). Finally .pop_format_context() is used
+ to format a string of %-formatted values as added by
+ .explanation_param().
+ """
+ self.explanation_specifiers: Dict[str, ast.expr] = {}
+ self.stack.append(self.explanation_specifiers)
+
+ def pop_format_context(self, expl_expr: ast.expr) -> ast.Name:
+ """Format the %-formatted string with current format context.
+
+ The expl_expr should be an str ast.expr instance constructed from
+ the %-placeholders created by .explanation_param(). This will
+ add the required code to format said string to .expl_stmts and
+ return the ast.Name instance of the formatted string.
+ """
+ current = self.stack.pop()
+ if self.stack:
+ self.explanation_specifiers = self.stack[-1]
+ keys = [ast.Str(key) for key in current.keys()]
+ format_dict = ast.Dict(keys, list(current.values()))
+ form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
+ name = "@py_format" + str(next(self.variable_counter))
+ if self.enable_assertion_pass_hook:
+ self.format_variables.append(name)
+ self.expl_stmts.append(ast.Assign([ast.Name(name, ast.Store())], form))
+ return ast.Name(name, ast.Load())
+
+ def generic_visit(self, node: ast.AST) -> Tuple[ast.Name, str]:
+ """Handle expressions we don't have custom code for."""
+ assert isinstance(node, ast.expr)
+ res = self.assign(node)
+ return res, self.explanation_param(self.display(res))
+
+ def visit_Assert(self, assert_: ast.Assert) -> List[ast.stmt]:
+ """Return the AST statements to replace the ast.Assert instance.
+
+ This rewrites the test of an assertion to provide
+ intermediate values and replace it with an if statement which
+ raises an assertion error with a detailed explanation in case
+ the expression is false.
+ """
+ if isinstance(assert_.test, ast.Tuple) and len(assert_.test.elts) >= 1:
+ from _pytest.warning_types import PytestAssertRewriteWarning
+ import warnings
+
+ # TODO: This assert should not be needed.
+ assert self.module_path is not None
+ warnings.warn_explicit(
+ PytestAssertRewriteWarning(
+ "assertion is always true, perhaps remove parentheses?"
+ ),
+ category=None,
+ filename=self.module_path,
+ lineno=assert_.lineno,
+ )
+
+ self.statements: List[ast.stmt] = []
+ self.variables: List[str] = []
+ self.variable_counter = itertools.count()
+
+ if self.enable_assertion_pass_hook:
+ self.format_variables: List[str] = []
+
+ self.stack: List[Dict[str, ast.expr]] = []
+ self.expl_stmts: List[ast.stmt] = []
+ self.push_format_context()
+ # Rewrite assert into a bunch of statements.
+ top_condition, explanation = self.visit(assert_.test)
+
+ negation = ast.UnaryOp(ast.Not(), top_condition)
+
+ if self.enable_assertion_pass_hook: # Experimental pytest_assertion_pass hook
+ msg = self.pop_format_context(ast.Str(explanation))
+
+ # Failed
+ if assert_.msg:
+ assertmsg = self.helper("_format_assertmsg", assert_.msg)
+ gluestr = "\n>assert "
+ else:
+ assertmsg = ast.Str("")
+ gluestr = "assert "
+ err_explanation = ast.BinOp(ast.Str(gluestr), ast.Add(), msg)
+ err_msg = ast.BinOp(assertmsg, ast.Add(), err_explanation)
+ err_name = ast.Name("AssertionError", ast.Load())
+ fmt = self.helper("_format_explanation", err_msg)
+ exc = ast.Call(err_name, [fmt], [])
+ raise_ = ast.Raise(exc, None)
+ statements_fail = []
+ statements_fail.extend(self.expl_stmts)
+ statements_fail.append(raise_)
+
+ # Passed
+ fmt_pass = self.helper("_format_explanation", msg)
+ orig = _get_assertion_exprs(self.source)[assert_.lineno]
+ hook_call_pass = ast.Expr(
+ self.helper(
+ "_call_assertion_pass",
+ ast.Num(assert_.lineno),
+ ast.Str(orig),
+ fmt_pass,
+ )
+ )
+ # If any hooks implement assert_pass hook
+ hook_impl_test = ast.If(
+ self.helper("_check_if_assertion_pass_impl"),
+ self.expl_stmts + [hook_call_pass],
+ [],
+ )
+ statements_pass = [hook_impl_test]
+
+ # Test for assertion condition
+ main_test = ast.If(negation, statements_fail, statements_pass)
+ self.statements.append(main_test)
+ if self.format_variables:
+ variables = [
+ ast.Name(name, ast.Store()) for name in self.format_variables
+ ]
+ clear_format = ast.Assign(variables, ast.NameConstant(None))
+ self.statements.append(clear_format)
+
+ else: # Original assertion rewriting
+ # Create failure message.
+ body = self.expl_stmts
+ self.statements.append(ast.If(negation, body, []))
+ if assert_.msg:
+ assertmsg = self.helper("_format_assertmsg", assert_.msg)
+ explanation = "\n>assert " + explanation
+ else:
+ assertmsg = ast.Str("")
+ explanation = "assert " + explanation
+ template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))
+ msg = self.pop_format_context(template)
+ fmt = self.helper("_format_explanation", msg)
+ err_name = ast.Name("AssertionError", ast.Load())
+ exc = ast.Call(err_name, [fmt], [])
+ raise_ = ast.Raise(exc, None)
+
+ body.append(raise_)
+
+ # Clear temporary variables by setting them to None.
+ if self.variables:
+ variables = [ast.Name(name, ast.Store()) for name in self.variables]
+ clear = ast.Assign(variables, ast.NameConstant(None))
+ self.statements.append(clear)
+ # Fix locations (line numbers/column offsets).
+ for stmt in self.statements:
+ for node in traverse_node(stmt):
+ ast.copy_location(node, assert_)
+ return self.statements
+
+ def visit_Name(self, name: ast.Name) -> Tuple[ast.Name, str]:
+ # Display the repr of the name if it's a local variable or
+ # _should_repr_global_name() thinks it's acceptable.
+ locs = ast.Call(self.builtin("locals"), [], [])
+ inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])
+ dorepr = self.helper("_should_repr_global_name", name)
+ test = ast.BoolOp(ast.Or(), [inlocs, dorepr])
+ expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
+ return name, self.explanation_param(expr)
+
+ def visit_BoolOp(self, boolop: ast.BoolOp) -> Tuple[ast.Name, str]:
+ res_var = self.variable()
+ expl_list = self.assign(ast.List([], ast.Load()))
+ app = ast.Attribute(expl_list, "append", ast.Load())
+ is_or = int(isinstance(boolop.op, ast.Or))
+ body = save = self.statements
+ fail_save = self.expl_stmts
+ levels = len(boolop.values) - 1
+ self.push_format_context()
+ # Process each operand, short-circuiting if needed.
+ for i, v in enumerate(boolop.values):
+ if i:
+ fail_inner: List[ast.stmt] = []
+ # cond is set in a prior loop iteration below
+ self.expl_stmts.append(ast.If(cond, fail_inner, [])) # noqa
+ self.expl_stmts = fail_inner
+ self.push_format_context()
+ res, expl = self.visit(v)
+ body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
+ expl_format = self.pop_format_context(ast.Str(expl))
+ call = ast.Call(app, [expl_format], [])
+ self.expl_stmts.append(ast.Expr(call))
+ if i < levels:
+ cond: ast.expr = res
+ if is_or:
+ cond = ast.UnaryOp(ast.Not(), cond)
+ inner: List[ast.stmt] = []
+ self.statements.append(ast.If(cond, inner, []))
+ self.statements = body = inner
+ self.statements = save
+ self.expl_stmts = fail_save
+ expl_template = self.helper("_format_boolop", expl_list, ast.Num(is_or))
+ expl = self.pop_format_context(expl_template)
+ return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
+
+ def visit_UnaryOp(self, unary: ast.UnaryOp) -> Tuple[ast.Name, str]:
+ pattern = UNARY_MAP[unary.op.__class__]
+ operand_res, operand_expl = self.visit(unary.operand)
+ res = self.assign(ast.UnaryOp(unary.op, operand_res))
+ return res, pattern % (operand_expl,)
+
+ def visit_BinOp(self, binop: ast.BinOp) -> Tuple[ast.Name, str]:
+ symbol = BINOP_MAP[binop.op.__class__]
+ left_expr, left_expl = self.visit(binop.left)
+ right_expr, right_expl = self.visit(binop.right)
+ explanation = f"({left_expl} {symbol} {right_expl})"
+ res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
+ return res, explanation
+
+ def visit_Call(self, call: ast.Call) -> Tuple[ast.Name, str]:
+ new_func, func_expl = self.visit(call.func)
+ arg_expls = []
+ new_args = []
+ new_kwargs = []
+ for arg in call.args:
+ res, expl = self.visit(arg)
+ arg_expls.append(expl)
+ new_args.append(res)
+ for keyword in call.keywords:
+ res, expl = self.visit(keyword.value)
+ new_kwargs.append(ast.keyword(keyword.arg, res))
+ if keyword.arg:
+ arg_expls.append(keyword.arg + "=" + expl)
+ else: # **args have `arg` keywords with an .arg of None
+ arg_expls.append("**" + expl)
+
+ expl = "{}({})".format(func_expl, ", ".join(arg_expls))
+ new_call = ast.Call(new_func, new_args, new_kwargs)
+ res = self.assign(new_call)
+ res_expl = self.explanation_param(self.display(res))
+ outer_expl = f"{res_expl}\n{{{res_expl} = {expl}\n}}"
+ return res, outer_expl
+
+ def visit_Starred(self, starred: ast.Starred) -> Tuple[ast.Starred, str]:
+ # A Starred node can appear in a function call.
+ res, expl = self.visit(starred.value)
+ new_starred = ast.Starred(res, starred.ctx)
+ return new_starred, "*" + expl
+
+ def visit_Attribute(self, attr: ast.Attribute) -> Tuple[ast.Name, str]:
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ value, value_expl = self.visit(attr.value)
+ res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
+ res_expl = self.explanation_param(self.display(res))
+ pat = "%s\n{%s = %s.%s\n}"
+ expl = pat % (res_expl, res_expl, value_expl, attr.attr)
+ return res, expl
+
+ def visit_Compare(self, comp: ast.Compare) -> Tuple[ast.expr, str]:
+ self.push_format_context()
+ left_res, left_expl = self.visit(comp.left)
+ if isinstance(comp.left, (ast.Compare, ast.BoolOp)):
+ left_expl = f"({left_expl})"
+ res_variables = [self.variable() for i in range(len(comp.ops))]
+ load_names = [ast.Name(v, ast.Load()) for v in res_variables]
+ store_names = [ast.Name(v, ast.Store()) for v in res_variables]
+ it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
+ expls = []
+ syms = []
+ results = [left_res]
+ for i, op, next_operand in it:
+ next_res, next_expl = self.visit(next_operand)
+ if isinstance(next_operand, (ast.Compare, ast.BoolOp)):
+ next_expl = f"({next_expl})"
+ results.append(next_res)
+ sym = BINOP_MAP[op.__class__]
+ syms.append(ast.Str(sym))
+ expl = f"{left_expl} {sym} {next_expl}"
+ expls.append(ast.Str(expl))
+ res_expr = ast.Compare(left_res, [op], [next_res])
+ self.statements.append(ast.Assign([store_names[i]], res_expr))
+ left_res, left_expl = next_res, next_expl
+ # Use pytest.assertion.util._reprcompare if that's available.
+ expl_call = self.helper(
+ "_call_reprcompare",
+ ast.Tuple(syms, ast.Load()),
+ ast.Tuple(load_names, ast.Load()),
+ ast.Tuple(expls, ast.Load()),
+ ast.Tuple(results, ast.Load()),
+ )
+ if len(comp.ops) > 1:
+ res: ast.expr = ast.BoolOp(ast.And(), load_names)
+ else:
+ res = load_names[0]
+ return res, self.explanation_param(self.pop_format_context(expl_call))
+
+
+def try_makedirs(cache_dir: Path) -> bool:
+ """Attempt to create the given directory and sub-directories exist.
+
+ Returns True if successful or if it already exists.
+ """
+ try:
+ os.makedirs(cache_dir, exist_ok=True)
+ except (FileNotFoundError, NotADirectoryError, FileExistsError):
+ # One of the path components was not a directory:
+ # - we're in a zip file
+ # - it is a file
+ return False
+ except PermissionError:
+ return False
+ except OSError as e:
+ # as of now, EROFS doesn't have an equivalent OSError-subclass
+ if e.errno == errno.EROFS:
+ return False
+ raise
+ return True
+
+
+def get_cache_dir(file_path: Path) -> Path:
+ """Return the cache directory to write .pyc files for the given .py file path."""
+ if sys.version_info >= (3, 8) and sys.pycache_prefix:
+ # given:
+ # prefix = '/tmp/pycs'
+ # path = '/home/user/proj/test_app.py'
+ # we want:
+ # '/tmp/pycs/home/user/proj'
+ return Path(sys.pycache_prefix) / Path(*file_path.parts[1:-1])
+ else:
+ # classic pycache directory
+ return file_path.parent / "__pycache__"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/truncate.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/truncate.py
new file mode 100644
index 0000000000..ce148dca09
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/truncate.py
@@ -0,0 +1,94 @@
+"""Utilities for truncating assertion output.
+
+Current default behaviour is to truncate assertion explanations at
+~8 terminal lines, unless running in "-vv" mode or running on CI.
+"""
+from typing import List
+from typing import Optional
+
+from _pytest.assertion import util
+from _pytest.nodes import Item
+
+
+DEFAULT_MAX_LINES = 8
+DEFAULT_MAX_CHARS = 8 * 80
+USAGE_MSG = "use '-vv' to show"
+
+
+def truncate_if_required(
+ explanation: List[str], item: Item, max_length: Optional[int] = None
+) -> List[str]:
+ """Truncate this assertion explanation if the given test item is eligible."""
+ if _should_truncate_item(item):
+ return _truncate_explanation(explanation)
+ return explanation
+
+
+def _should_truncate_item(item: Item) -> bool:
+ """Whether or not this test item is eligible for truncation."""
+ verbose = item.config.option.verbose
+ return verbose < 2 and not util.running_on_ci()
+
+
+def _truncate_explanation(
+ input_lines: List[str],
+ max_lines: Optional[int] = None,
+ max_chars: Optional[int] = None,
+) -> List[str]:
+ """Truncate given list of strings that makes up the assertion explanation.
+
+ Truncates to either 8 lines, or 640 characters - whichever the input reaches
+ first. The remaining lines will be replaced by a usage message.
+ """
+
+ if max_lines is None:
+ max_lines = DEFAULT_MAX_LINES
+ if max_chars is None:
+ max_chars = DEFAULT_MAX_CHARS
+
+ # Check if truncation required
+ input_char_count = len("".join(input_lines))
+ if len(input_lines) <= max_lines and input_char_count <= max_chars:
+ return input_lines
+
+ # Truncate first to max_lines, and then truncate to max_chars if max_chars
+ # is exceeded.
+ truncated_explanation = input_lines[:max_lines]
+ truncated_explanation = _truncate_by_char_count(truncated_explanation, max_chars)
+
+ # Add ellipsis to final line
+ truncated_explanation[-1] = truncated_explanation[-1] + "..."
+
+ # Append useful message to explanation
+ truncated_line_count = len(input_lines) - len(truncated_explanation)
+ truncated_line_count += 1 # Account for the part-truncated final line
+ msg = "...Full output truncated"
+ if truncated_line_count == 1:
+ msg += f" ({truncated_line_count} line hidden)"
+ else:
+ msg += f" ({truncated_line_count} lines hidden)"
+ msg += f", {USAGE_MSG}"
+ truncated_explanation.extend(["", str(msg)])
+ return truncated_explanation
+
+
+def _truncate_by_char_count(input_lines: List[str], max_chars: int) -> List[str]:
+ # Check if truncation required
+ if len("".join(input_lines)) <= max_chars:
+ return input_lines
+
+ # Find point at which input length exceeds total allowed length
+ iterated_char_count = 0
+ for iterated_index, input_line in enumerate(input_lines):
+ if iterated_char_count + len(input_line) > max_chars:
+ break
+ iterated_char_count += len(input_line)
+
+ # Create truncated explanation with modified final line
+ truncated_result = input_lines[:iterated_index]
+ final_line = input_lines[iterated_index]
+ if final_line:
+ final_line_truncate_point = max_chars - iterated_char_count
+ final_line = final_line[:final_line_truncate_point]
+ truncated_result.append(final_line)
+ return truncated_result
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/util.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/util.py
new file mode 100644
index 0000000000..19f1089c20
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/assertion/util.py
@@ -0,0 +1,498 @@
+"""Utilities for assertion debugging."""
+import collections.abc
+import os
+import pprint
+from typing import AbstractSet
+from typing import Any
+from typing import Callable
+from typing import Iterable
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Sequence
+
+import _pytest._code
+from _pytest import outcomes
+from _pytest._io.saferepr import _pformat_dispatch
+from _pytest._io.saferepr import safeformat
+from _pytest._io.saferepr import saferepr
+from _pytest.config import Config
+
+# The _reprcompare attribute on the util module is used by the new assertion
+# interpretation code and assertion rewriter to detect this plugin was
+# loaded and in turn call the hooks defined here as part of the
+# DebugInterpreter.
+_reprcompare: Optional[Callable[[str, object, object], Optional[str]]] = None
+
+# Works similarly as _reprcompare attribute. Is populated with the hook call
+# when pytest_runtest_setup is called.
+_assertion_pass: Optional[Callable[[int, str, str], None]] = None
+
+# Config object which is assigned during pytest_runtest_protocol.
+_config: Optional[Config] = None
+
+
+def format_explanation(explanation: str) -> str:
+ r"""Format an explanation.
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ lines = _split_explanation(explanation)
+ result = _format_lines(lines)
+ return "\n".join(result)
+
+
+def _split_explanation(explanation: str) -> List[str]:
+ r"""Return a list of individual lines in the explanation.
+
+ This will return a list of lines split on '\n{', '\n}' and '\n~'.
+ Any other newlines will be escaped and appear in the line as the
+ literal '\n' characters.
+ """
+ raw_lines = (explanation or "").split("\n")
+ lines = [raw_lines[0]]
+ for values in raw_lines[1:]:
+ if values and values[0] in ["{", "}", "~", ">"]:
+ lines.append(values)
+ else:
+ lines[-1] += "\\n" + values
+ return lines
+
+
+def _format_lines(lines: Sequence[str]) -> List[str]:
+ """Format the individual lines.
+
+ This will replace the '{', '}' and '~' characters of our mini formatting
+ language with the proper 'where ...', 'and ...' and ' + ...' text, taking
+ care of indentation along the way.
+
+ Return a list of formatted lines.
+ """
+ result = list(lines[:1])
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith("{"):
+ if stackcnt[-1]:
+ s = "and "
+ else:
+ s = "where "
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(" +" + " " * (len(stack) - 1) + s + line[1:])
+ elif line.startswith("}"):
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line[0] in ["~", ">"]
+ stack[-1] += 1
+ indent = len(stack) if line.startswith("~") else len(stack) - 1
+ result.append(" " * indent + line[1:])
+ assert len(stack) == 1
+ return result
+
+
+def issequence(x: Any) -> bool:
+ return isinstance(x, collections.abc.Sequence) and not isinstance(x, str)
+
+
+def istext(x: Any) -> bool:
+ return isinstance(x, str)
+
+
+def isdict(x: Any) -> bool:
+ return isinstance(x, dict)
+
+
+def isset(x: Any) -> bool:
+ return isinstance(x, (set, frozenset))
+
+
+def isnamedtuple(obj: Any) -> bool:
+ return isinstance(obj, tuple) and getattr(obj, "_fields", None) is not None
+
+
+def isdatacls(obj: Any) -> bool:
+ return getattr(obj, "__dataclass_fields__", None) is not None
+
+
+def isattrs(obj: Any) -> bool:
+ return getattr(obj, "__attrs_attrs__", None) is not None
+
+
+def isiterable(obj: Any) -> bool:
+ try:
+ iter(obj)
+ return not istext(obj)
+ except TypeError:
+ return False
+
+
+def assertrepr_compare(config, op: str, left: Any, right: Any) -> Optional[List[str]]:
+ """Return specialised explanations for some operators/operands."""
+ verbose = config.getoption("verbose")
+ if verbose > 1:
+ left_repr = safeformat(left)
+ right_repr = safeformat(right)
+ else:
+ # XXX: "15 chars indentation" is wrong
+ # ("E AssertionError: assert "); should use term width.
+ maxsize = (
+ 80 - 15 - len(op) - 2
+ ) // 2 # 15 chars indentation, 1 space around op
+ left_repr = saferepr(left, maxsize=maxsize)
+ right_repr = saferepr(right, maxsize=maxsize)
+
+ summary = f"{left_repr} {op} {right_repr}"
+
+ explanation = None
+ try:
+ if op == "==":
+ explanation = _compare_eq_any(left, right, verbose)
+ elif op == "not in":
+ if istext(left) and istext(right):
+ explanation = _notin_text(left, right, verbose)
+ except outcomes.Exit:
+ raise
+ except Exception:
+ explanation = [
+ "(pytest_assertion plugin: representation of details failed: {}.".format(
+ _pytest._code.ExceptionInfo.from_current()._getreprcrash()
+ ),
+ " Probably an object has a faulty __repr__.)",
+ ]
+
+ if not explanation:
+ return None
+
+ return [summary] + explanation
+
+
+def _compare_eq_any(left: Any, right: Any, verbose: int = 0) -> List[str]:
+ explanation = []
+ if istext(left) and istext(right):
+ explanation = _diff_text(left, right, verbose)
+ else:
+ from _pytest.python_api import ApproxBase
+
+ if isinstance(left, ApproxBase) or isinstance(right, ApproxBase):
+ # Although the common order should be obtained == expected, this ensures both ways
+ approx_side = left if isinstance(left, ApproxBase) else right
+ other_side = right if isinstance(left, ApproxBase) else left
+
+ explanation = approx_side._repr_compare(other_side)
+ elif type(left) == type(right) and (
+ isdatacls(left) or isattrs(left) or isnamedtuple(left)
+ ):
+ # Note: unlike dataclasses/attrs, namedtuples compare only the
+ # field values, not the type or field names. But this branch
+ # intentionally only handles the same-type case, which was often
+ # used in older code bases before dataclasses/attrs were available.
+ explanation = _compare_eq_cls(left, right, verbose)
+ elif issequence(left) and issequence(right):
+ explanation = _compare_eq_sequence(left, right, verbose)
+ elif isset(left) and isset(right):
+ explanation = _compare_eq_set(left, right, verbose)
+ elif isdict(left) and isdict(right):
+ explanation = _compare_eq_dict(left, right, verbose)
+ elif verbose > 0:
+ explanation = _compare_eq_verbose(left, right)
+
+ if isiterable(left) and isiterable(right):
+ expl = _compare_eq_iterable(left, right, verbose)
+ explanation.extend(expl)
+
+ return explanation
+
+
+def _diff_text(left: str, right: str, verbose: int = 0) -> List[str]:
+ """Return the explanation for the diff between text.
+
+ Unless --verbose is used this will skip leading and trailing
+ characters which are identical to keep the diff minimal.
+ """
+ from difflib import ndiff
+
+ explanation: List[str] = []
+
+ if verbose < 1:
+ i = 0 # just in case left or right has zero length
+ for i in range(min(len(left), len(right))):
+ if left[i] != right[i]:
+ break
+ if i > 42:
+ i -= 10 # Provide some context
+ explanation = [
+ "Skipping %s identical leading characters in diff, use -v to show" % i
+ ]
+ left = left[i:]
+ right = right[i:]
+ if len(left) == len(right):
+ for i in range(len(left)):
+ if left[-i] != right[-i]:
+ break
+ if i > 42:
+ i -= 10 # Provide some context
+ explanation += [
+ "Skipping {} identical trailing "
+ "characters in diff, use -v to show".format(i)
+ ]
+ left = left[:-i]
+ right = right[:-i]
+ keepends = True
+ if left.isspace() or right.isspace():
+ left = repr(str(left))
+ right = repr(str(right))
+ explanation += ["Strings contain only whitespace, escaping them using repr()"]
+ # "right" is the expected base against which we compare "left",
+ # see https://github.com/pytest-dev/pytest/issues/3333
+ explanation += [
+ line.strip("\n")
+ for line in ndiff(right.splitlines(keepends), left.splitlines(keepends))
+ ]
+ return explanation
+
+
+def _compare_eq_verbose(left: Any, right: Any) -> List[str]:
+ keepends = True
+ left_lines = repr(left).splitlines(keepends)
+ right_lines = repr(right).splitlines(keepends)
+
+ explanation: List[str] = []
+ explanation += ["+" + line for line in left_lines]
+ explanation += ["-" + line for line in right_lines]
+
+ return explanation
+
+
+def _surrounding_parens_on_own_lines(lines: List[str]) -> None:
+ """Move opening/closing parenthesis/bracket to own lines."""
+ opening = lines[0][:1]
+ if opening in ["(", "[", "{"]:
+ lines[0] = " " + lines[0][1:]
+ lines[:] = [opening] + lines
+ closing = lines[-1][-1:]
+ if closing in [")", "]", "}"]:
+ lines[-1] = lines[-1][:-1] + ","
+ lines[:] = lines + [closing]
+
+
+def _compare_eq_iterable(
+ left: Iterable[Any], right: Iterable[Any], verbose: int = 0
+) -> List[str]:
+ if not verbose and not running_on_ci():
+ return ["Use -v to get the full diff"]
+ # dynamic import to speedup pytest
+ import difflib
+
+ left_formatting = pprint.pformat(left).splitlines()
+ right_formatting = pprint.pformat(right).splitlines()
+
+ # Re-format for different output lengths.
+ lines_left = len(left_formatting)
+ lines_right = len(right_formatting)
+ if lines_left != lines_right:
+ left_formatting = _pformat_dispatch(left).splitlines()
+ right_formatting = _pformat_dispatch(right).splitlines()
+
+ if lines_left > 1 or lines_right > 1:
+ _surrounding_parens_on_own_lines(left_formatting)
+ _surrounding_parens_on_own_lines(right_formatting)
+
+ explanation = ["Full diff:"]
+ # "right" is the expected base against which we compare "left",
+ # see https://github.com/pytest-dev/pytest/issues/3333
+ explanation.extend(
+ line.rstrip() for line in difflib.ndiff(right_formatting, left_formatting)
+ )
+ return explanation
+
+
+def _compare_eq_sequence(
+ left: Sequence[Any], right: Sequence[Any], verbose: int = 0
+) -> List[str]:
+ comparing_bytes = isinstance(left, bytes) and isinstance(right, bytes)
+ explanation: List[str] = []
+ len_left = len(left)
+ len_right = len(right)
+ for i in range(min(len_left, len_right)):
+ if left[i] != right[i]:
+ if comparing_bytes:
+ # when comparing bytes, we want to see their ascii representation
+ # instead of their numeric values (#5260)
+ # using a slice gives us the ascii representation:
+ # >>> s = b'foo'
+ # >>> s[0]
+ # 102
+ # >>> s[0:1]
+ # b'f'
+ left_value = left[i : i + 1]
+ right_value = right[i : i + 1]
+ else:
+ left_value = left[i]
+ right_value = right[i]
+
+ explanation += [f"At index {i} diff: {left_value!r} != {right_value!r}"]
+ break
+
+ if comparing_bytes:
+ # when comparing bytes, it doesn't help to show the "sides contain one or more
+ # items" longer explanation, so skip it
+
+ return explanation
+
+ len_diff = len_left - len_right
+ if len_diff:
+ if len_diff > 0:
+ dir_with_more = "Left"
+ extra = saferepr(left[len_right])
+ else:
+ len_diff = 0 - len_diff
+ dir_with_more = "Right"
+ extra = saferepr(right[len_left])
+
+ if len_diff == 1:
+ explanation += [f"{dir_with_more} contains one more item: {extra}"]
+ else:
+ explanation += [
+ "%s contains %d more items, first extra item: %s"
+ % (dir_with_more, len_diff, extra)
+ ]
+ return explanation
+
+
+def _compare_eq_set(
+ left: AbstractSet[Any], right: AbstractSet[Any], verbose: int = 0
+) -> List[str]:
+ explanation = []
+ diff_left = left - right
+ diff_right = right - left
+ if diff_left:
+ explanation.append("Extra items in the left set:")
+ for item in diff_left:
+ explanation.append(saferepr(item))
+ if diff_right:
+ explanation.append("Extra items in the right set:")
+ for item in diff_right:
+ explanation.append(saferepr(item))
+ return explanation
+
+
+def _compare_eq_dict(
+ left: Mapping[Any, Any], right: Mapping[Any, Any], verbose: int = 0
+) -> List[str]:
+ explanation: List[str] = []
+ set_left = set(left)
+ set_right = set(right)
+ common = set_left.intersection(set_right)
+ same = {k: left[k] for k in common if left[k] == right[k]}
+ if same and verbose < 2:
+ explanation += ["Omitting %s identical items, use -vv to show" % len(same)]
+ elif same:
+ explanation += ["Common items:"]
+ explanation += pprint.pformat(same).splitlines()
+ diff = {k for k in common if left[k] != right[k]}
+ if diff:
+ explanation += ["Differing items:"]
+ for k in diff:
+ explanation += [saferepr({k: left[k]}) + " != " + saferepr({k: right[k]})]
+ extra_left = set_left - set_right
+ len_extra_left = len(extra_left)
+ if len_extra_left:
+ explanation.append(
+ "Left contains %d more item%s:"
+ % (len_extra_left, "" if len_extra_left == 1 else "s")
+ )
+ explanation.extend(
+ pprint.pformat({k: left[k] for k in extra_left}).splitlines()
+ )
+ extra_right = set_right - set_left
+ len_extra_right = len(extra_right)
+ if len_extra_right:
+ explanation.append(
+ "Right contains %d more item%s:"
+ % (len_extra_right, "" if len_extra_right == 1 else "s")
+ )
+ explanation.extend(
+ pprint.pformat({k: right[k] for k in extra_right}).splitlines()
+ )
+ return explanation
+
+
+def _compare_eq_cls(left: Any, right: Any, verbose: int) -> List[str]:
+ if isdatacls(left):
+ all_fields = left.__dataclass_fields__
+ fields_to_check = [field for field, info in all_fields.items() if info.compare]
+ elif isattrs(left):
+ all_fields = left.__attrs_attrs__
+ fields_to_check = [field.name for field in all_fields if getattr(field, "eq")]
+ elif isnamedtuple(left):
+ fields_to_check = left._fields
+ else:
+ assert False
+
+ indent = " "
+ same = []
+ diff = []
+ for field in fields_to_check:
+ if getattr(left, field) == getattr(right, field):
+ same.append(field)
+ else:
+ diff.append(field)
+
+ explanation = []
+ if same or diff:
+ explanation += [""]
+ if same and verbose < 2:
+ explanation.append("Omitting %s identical items, use -vv to show" % len(same))
+ elif same:
+ explanation += ["Matching attributes:"]
+ explanation += pprint.pformat(same).splitlines()
+ if diff:
+ explanation += ["Differing attributes:"]
+ explanation += pprint.pformat(diff).splitlines()
+ for field in diff:
+ field_left = getattr(left, field)
+ field_right = getattr(right, field)
+ explanation += [
+ "",
+ "Drill down into differing attribute %s:" % field,
+ ("%s%s: %r != %r") % (indent, field, field_left, field_right),
+ ]
+ explanation += [
+ indent + line
+ for line in _compare_eq_any(field_left, field_right, verbose)
+ ]
+ return explanation
+
+
+def _notin_text(term: str, text: str, verbose: int = 0) -> List[str]:
+ index = text.find(term)
+ head = text[:index]
+ tail = text[index + len(term) :]
+ correct_text = head + tail
+ diff = _diff_text(text, correct_text, verbose)
+ newdiff = ["%s is contained here:" % saferepr(term, maxsize=42)]
+ for line in diff:
+ if line.startswith("Skipping"):
+ continue
+ if line.startswith("- "):
+ continue
+ if line.startswith("+ "):
+ newdiff.append(" " + line[2:])
+ else:
+ newdiff.append(line)
+ return newdiff
+
+
+def running_on_ci() -> bool:
+ """Check if we're currently running on a CI system."""
+ env_vars = ["CI", "BUILD_NUMBER"]
+ return any(var in os.environ for var in env_vars)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/cacheprovider.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/cacheprovider.py
new file mode 100644
index 0000000000..681d02b409
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/cacheprovider.py
@@ -0,0 +1,580 @@
+"""Implementation of the cache provider."""
+# This plugin was not named "cache" to avoid conflicts with the external
+# pytest-cache version.
+import json
+import os
+from pathlib import Path
+from typing import Dict
+from typing import Generator
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import Set
+from typing import Union
+
+import attr
+
+from .pathlib import resolve_from_str
+from .pathlib import rm_rf
+from .reports import CollectReport
+from _pytest import nodes
+from _pytest._io import TerminalWriter
+from _pytest.compat import final
+from _pytest.config import Config
+from _pytest.config import ExitCode
+from _pytest.config import hookimpl
+from _pytest.config.argparsing import Parser
+from _pytest.deprecated import check_ispytest
+from _pytest.fixtures import fixture
+from _pytest.fixtures import FixtureRequest
+from _pytest.main import Session
+from _pytest.python import Module
+from _pytest.python import Package
+from _pytest.reports import TestReport
+
+
+README_CONTENT = """\
+# pytest cache directory #
+
+This directory contains data from the pytest's cache plugin,
+which provides the `--lf` and `--ff` options, as well as the `cache` fixture.
+
+**Do not** commit this to version control.
+
+See [the docs](https://docs.pytest.org/en/stable/how-to/cache.html) for more information.
+"""
+
+CACHEDIR_TAG_CONTENT = b"""\
+Signature: 8a477f597d28d172789f06886806bc55
+# This file is a cache directory tag created by pytest.
+# For information about cache directory tags, see:
+# https://bford.info/cachedir/spec.html
+"""
+
+
+@final
+@attr.s(init=False, auto_attribs=True)
+class Cache:
+ _cachedir: Path = attr.ib(repr=False)
+ _config: Config = attr.ib(repr=False)
+
+ # Sub-directory under cache-dir for directories created by `mkdir()`.
+ _CACHE_PREFIX_DIRS = "d"
+
+ # Sub-directory under cache-dir for values created by `set()`.
+ _CACHE_PREFIX_VALUES = "v"
+
+ def __init__(
+ self, cachedir: Path, config: Config, *, _ispytest: bool = False
+ ) -> None:
+ check_ispytest(_ispytest)
+ self._cachedir = cachedir
+ self._config = config
+
+ @classmethod
+ def for_config(cls, config: Config, *, _ispytest: bool = False) -> "Cache":
+ """Create the Cache instance for a Config.
+
+ :meta private:
+ """
+ check_ispytest(_ispytest)
+ cachedir = cls.cache_dir_from_config(config, _ispytest=True)
+ if config.getoption("cacheclear") and cachedir.is_dir():
+ cls.clear_cache(cachedir, _ispytest=True)
+ return cls(cachedir, config, _ispytest=True)
+
+ @classmethod
+ def clear_cache(cls, cachedir: Path, _ispytest: bool = False) -> None:
+ """Clear the sub-directories used to hold cached directories and values.
+
+ :meta private:
+ """
+ check_ispytest(_ispytest)
+ for prefix in (cls._CACHE_PREFIX_DIRS, cls._CACHE_PREFIX_VALUES):
+ d = cachedir / prefix
+ if d.is_dir():
+ rm_rf(d)
+
+ @staticmethod
+ def cache_dir_from_config(config: Config, *, _ispytest: bool = False) -> Path:
+ """Get the path to the cache directory for a Config.
+
+ :meta private:
+ """
+ check_ispytest(_ispytest)
+ return resolve_from_str(config.getini("cache_dir"), config.rootpath)
+
+ def warn(self, fmt: str, *, _ispytest: bool = False, **args: object) -> None:
+ """Issue a cache warning.
+
+ :meta private:
+ """
+ check_ispytest(_ispytest)
+ import warnings
+ from _pytest.warning_types import PytestCacheWarning
+
+ warnings.warn(
+ PytestCacheWarning(fmt.format(**args) if args else fmt),
+ self._config.hook,
+ stacklevel=3,
+ )
+
+ def mkdir(self, name: str) -> Path:
+ """Return a directory path object with the given name.
+
+ If the directory does not yet exist, it will be created. You can use
+ it to manage files to e.g. store/retrieve database dumps across test
+ sessions.
+
+ .. versionadded:: 7.0
+
+ :param name:
+ Must be a string not containing a ``/`` separator.
+ Make sure the name contains your plugin or application
+ identifiers to prevent clashes with other cache users.
+ """
+ path = Path(name)
+ if len(path.parts) > 1:
+ raise ValueError("name is not allowed to contain path separators")
+ res = self._cachedir.joinpath(self._CACHE_PREFIX_DIRS, path)
+ res.mkdir(exist_ok=True, parents=True)
+ return res
+
+ def _getvaluepath(self, key: str) -> Path:
+ return self._cachedir.joinpath(self._CACHE_PREFIX_VALUES, Path(key))
+
+ def get(self, key: str, default):
+ """Return the cached value for the given key.
+
+ If no value was yet cached or the value cannot be read, the specified
+ default is returned.
+
+ :param key:
+ Must be a ``/`` separated value. Usually the first
+ name is the name of your plugin or your application.
+ :param default:
+ The value to return in case of a cache-miss or invalid cache value.
+ """
+ path = self._getvaluepath(key)
+ try:
+ with path.open("r") as f:
+ return json.load(f)
+ except (ValueError, OSError):
+ return default
+
+ def set(self, key: str, value: object) -> None:
+ """Save value for the given key.
+
+ :param key:
+ Must be a ``/`` separated value. Usually the first
+ name is the name of your plugin or your application.
+ :param value:
+ Must be of any combination of basic python types,
+ including nested types like lists of dictionaries.
+ """
+ path = self._getvaluepath(key)
+ try:
+ if path.parent.is_dir():
+ cache_dir_exists_already = True
+ else:
+ cache_dir_exists_already = self._cachedir.exists()
+ path.parent.mkdir(exist_ok=True, parents=True)
+ except OSError:
+ self.warn("could not create cache path {path}", path=path, _ispytest=True)
+ return
+ if not cache_dir_exists_already:
+ self._ensure_supporting_files()
+ data = json.dumps(value, indent=2)
+ try:
+ f = path.open("w")
+ except OSError:
+ self.warn("cache could not write path {path}", path=path, _ispytest=True)
+ else:
+ with f:
+ f.write(data)
+
+ def _ensure_supporting_files(self) -> None:
+ """Create supporting files in the cache dir that are not really part of the cache."""
+ readme_path = self._cachedir / "README.md"
+ readme_path.write_text(README_CONTENT)
+
+ gitignore_path = self._cachedir.joinpath(".gitignore")
+ msg = "# Created by pytest automatically.\n*\n"
+ gitignore_path.write_text(msg, encoding="UTF-8")
+
+ cachedir_tag_path = self._cachedir.joinpath("CACHEDIR.TAG")
+ cachedir_tag_path.write_bytes(CACHEDIR_TAG_CONTENT)
+
+
+class LFPluginCollWrapper:
+ def __init__(self, lfplugin: "LFPlugin") -> None:
+ self.lfplugin = lfplugin
+ self._collected_at_least_one_failure = False
+
+ @hookimpl(hookwrapper=True)
+ def pytest_make_collect_report(self, collector: nodes.Collector):
+ if isinstance(collector, Session):
+ out = yield
+ res: CollectReport = out.get_result()
+
+ # Sort any lf-paths to the beginning.
+ lf_paths = self.lfplugin._last_failed_paths
+
+ res.result = sorted(
+ res.result,
+ # use stable sort to priorize last failed
+ key=lambda x: x.path in lf_paths,
+ reverse=True,
+ )
+ return
+
+ elif isinstance(collector, Module):
+ if collector.path in self.lfplugin._last_failed_paths:
+ out = yield
+ res = out.get_result()
+ result = res.result
+ lastfailed = self.lfplugin.lastfailed
+
+ # Only filter with known failures.
+ if not self._collected_at_least_one_failure:
+ if not any(x.nodeid in lastfailed for x in result):
+ return
+ self.lfplugin.config.pluginmanager.register(
+ LFPluginCollSkipfiles(self.lfplugin), "lfplugin-collskip"
+ )
+ self._collected_at_least_one_failure = True
+
+ session = collector.session
+ result[:] = [
+ x
+ for x in result
+ if x.nodeid in lastfailed
+ # Include any passed arguments (not trivial to filter).
+ or session.isinitpath(x.path)
+ # Keep all sub-collectors.
+ or isinstance(x, nodes.Collector)
+ ]
+ return
+ yield
+
+
+class LFPluginCollSkipfiles:
+ def __init__(self, lfplugin: "LFPlugin") -> None:
+ self.lfplugin = lfplugin
+
+ @hookimpl
+ def pytest_make_collect_report(
+ self, collector: nodes.Collector
+ ) -> Optional[CollectReport]:
+ # Packages are Modules, but _last_failed_paths only contains
+ # test-bearing paths and doesn't try to include the paths of their
+ # packages, so don't filter them.
+ if isinstance(collector, Module) and not isinstance(collector, Package):
+ if collector.path not in self.lfplugin._last_failed_paths:
+ self.lfplugin._skipped_files += 1
+
+ return CollectReport(
+ collector.nodeid, "passed", longrepr=None, result=[]
+ )
+ return None
+
+
+class LFPlugin:
+ """Plugin which implements the --lf (run last-failing) option."""
+
+ def __init__(self, config: Config) -> None:
+ self.config = config
+ active_keys = "lf", "failedfirst"
+ self.active = any(config.getoption(key) for key in active_keys)
+ assert config.cache
+ self.lastfailed: Dict[str, bool] = config.cache.get("cache/lastfailed", {})
+ self._previously_failed_count: Optional[int] = None
+ self._report_status: Optional[str] = None
+ self._skipped_files = 0 # count skipped files during collection due to --lf
+
+ if config.getoption("lf"):
+ self._last_failed_paths = self.get_last_failed_paths()
+ config.pluginmanager.register(
+ LFPluginCollWrapper(self), "lfplugin-collwrapper"
+ )
+
+ def get_last_failed_paths(self) -> Set[Path]:
+ """Return a set with all Paths()s of the previously failed nodeids."""
+ rootpath = self.config.rootpath
+ result = {rootpath / nodeid.split("::")[0] for nodeid in self.lastfailed}
+ return {x for x in result if x.exists()}
+
+ def pytest_report_collectionfinish(self) -> Optional[str]:
+ if self.active and self.config.getoption("verbose") >= 0:
+ return "run-last-failure: %s" % self._report_status
+ return None
+
+ def pytest_runtest_logreport(self, report: TestReport) -> None:
+ if (report.when == "call" and report.passed) or report.skipped:
+ self.lastfailed.pop(report.nodeid, None)
+ elif report.failed:
+ self.lastfailed[report.nodeid] = True
+
+ def pytest_collectreport(self, report: CollectReport) -> None:
+ passed = report.outcome in ("passed", "skipped")
+ if passed:
+ if report.nodeid in self.lastfailed:
+ self.lastfailed.pop(report.nodeid)
+ self.lastfailed.update((item.nodeid, True) for item in report.result)
+ else:
+ self.lastfailed[report.nodeid] = True
+
+ @hookimpl(hookwrapper=True, tryfirst=True)
+ def pytest_collection_modifyitems(
+ self, config: Config, items: List[nodes.Item]
+ ) -> Generator[None, None, None]:
+ yield
+
+ if not self.active:
+ return
+
+ if self.lastfailed:
+ previously_failed = []
+ previously_passed = []
+ for item in items:
+ if item.nodeid in self.lastfailed:
+ previously_failed.append(item)
+ else:
+ previously_passed.append(item)
+ self._previously_failed_count = len(previously_failed)
+
+ if not previously_failed:
+ # Running a subset of all tests with recorded failures
+ # only outside of it.
+ self._report_status = "%d known failures not in selected tests" % (
+ len(self.lastfailed),
+ )
+ else:
+ if self.config.getoption("lf"):
+ items[:] = previously_failed
+ config.hook.pytest_deselected(items=previously_passed)
+ else: # --failedfirst
+ items[:] = previously_failed + previously_passed
+
+ noun = "failure" if self._previously_failed_count == 1 else "failures"
+ suffix = " first" if self.config.getoption("failedfirst") else ""
+ self._report_status = "rerun previous {count} {noun}{suffix}".format(
+ count=self._previously_failed_count, suffix=suffix, noun=noun
+ )
+
+ if self._skipped_files > 0:
+ files_noun = "file" if self._skipped_files == 1 else "files"
+ self._report_status += " (skipped {files} {files_noun})".format(
+ files=self._skipped_files, files_noun=files_noun
+ )
+ else:
+ self._report_status = "no previously failed tests, "
+ if self.config.getoption("last_failed_no_failures") == "none":
+ self._report_status += "deselecting all items."
+ config.hook.pytest_deselected(items=items[:])
+ items[:] = []
+ else:
+ self._report_status += "not deselecting items."
+
+ def pytest_sessionfinish(self, session: Session) -> None:
+ config = self.config
+ if config.getoption("cacheshow") or hasattr(config, "workerinput"):
+ return
+
+ assert config.cache is not None
+ saved_lastfailed = config.cache.get("cache/lastfailed", {})
+ if saved_lastfailed != self.lastfailed:
+ config.cache.set("cache/lastfailed", self.lastfailed)
+
+
+class NFPlugin:
+ """Plugin which implements the --nf (run new-first) option."""
+
+ def __init__(self, config: Config) -> None:
+ self.config = config
+ self.active = config.option.newfirst
+ assert config.cache is not None
+ self.cached_nodeids = set(config.cache.get("cache/nodeids", []))
+
+ @hookimpl(hookwrapper=True, tryfirst=True)
+ def pytest_collection_modifyitems(
+ self, items: List[nodes.Item]
+ ) -> Generator[None, None, None]:
+ yield
+
+ if self.active:
+ new_items: Dict[str, nodes.Item] = {}
+ other_items: Dict[str, nodes.Item] = {}
+ for item in items:
+ if item.nodeid not in self.cached_nodeids:
+ new_items[item.nodeid] = item
+ else:
+ other_items[item.nodeid] = item
+
+ items[:] = self._get_increasing_order(
+ new_items.values()
+ ) + self._get_increasing_order(other_items.values())
+ self.cached_nodeids.update(new_items)
+ else:
+ self.cached_nodeids.update(item.nodeid for item in items)
+
+ def _get_increasing_order(self, items: Iterable[nodes.Item]) -> List[nodes.Item]:
+ return sorted(items, key=lambda item: item.path.stat().st_mtime, reverse=True) # type: ignore[no-any-return]
+
+ def pytest_sessionfinish(self) -> None:
+ config = self.config
+ if config.getoption("cacheshow") or hasattr(config, "workerinput"):
+ return
+
+ if config.getoption("collectonly"):
+ return
+
+ assert config.cache is not None
+ config.cache.set("cache/nodeids", sorted(self.cached_nodeids))
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("general")
+ group.addoption(
+ "--lf",
+ "--last-failed",
+ action="store_true",
+ dest="lf",
+ help="rerun only the tests that failed "
+ "at the last run (or all if none failed)",
+ )
+ group.addoption(
+ "--ff",
+ "--failed-first",
+ action="store_true",
+ dest="failedfirst",
+ help="run all tests, but run the last failures first.\n"
+ "This may re-order tests and thus lead to "
+ "repeated fixture setup/teardown.",
+ )
+ group.addoption(
+ "--nf",
+ "--new-first",
+ action="store_true",
+ dest="newfirst",
+ help="run tests from new files first, then the rest of the tests "
+ "sorted by file mtime",
+ )
+ group.addoption(
+ "--cache-show",
+ action="append",
+ nargs="?",
+ dest="cacheshow",
+ help=(
+ "show cache contents, don't perform collection or tests. "
+ "Optional argument: glob (default: '*')."
+ ),
+ )
+ group.addoption(
+ "--cache-clear",
+ action="store_true",
+ dest="cacheclear",
+ help="remove all cache contents at start of test run.",
+ )
+ cache_dir_default = ".pytest_cache"
+ if "TOX_ENV_DIR" in os.environ:
+ cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default)
+ parser.addini("cache_dir", default=cache_dir_default, help="cache directory path.")
+ group.addoption(
+ "--lfnf",
+ "--last-failed-no-failures",
+ action="store",
+ dest="last_failed_no_failures",
+ choices=("all", "none"),
+ default="all",
+ help="which tests to run with no previously (known) failures.",
+ )
+
+
+def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:
+ if config.option.cacheshow:
+ from _pytest.main import wrap_session
+
+ return wrap_session(config, cacheshow)
+ return None
+
+
+@hookimpl(tryfirst=True)
+def pytest_configure(config: Config) -> None:
+ config.cache = Cache.for_config(config, _ispytest=True)
+ config.pluginmanager.register(LFPlugin(config), "lfplugin")
+ config.pluginmanager.register(NFPlugin(config), "nfplugin")
+
+
+@fixture
+def cache(request: FixtureRequest) -> Cache:
+ """Return a cache object that can persist state between testing sessions.
+
+ cache.get(key, default)
+ cache.set(key, value)
+
+ Keys must be ``/`` separated strings, where the first part is usually the
+ name of your plugin or application to avoid clashes with other cache users.
+
+ Values can be any object handled by the json stdlib module.
+ """
+ assert request.config.cache is not None
+ return request.config.cache
+
+
+def pytest_report_header(config: Config) -> Optional[str]:
+ """Display cachedir with --cache-show and if non-default."""
+ if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache":
+ assert config.cache is not None
+ cachedir = config.cache._cachedir
+ # TODO: evaluate generating upward relative paths
+ # starting with .., ../.. if sensible
+
+ try:
+ displaypath = cachedir.relative_to(config.rootpath)
+ except ValueError:
+ displaypath = cachedir
+ return f"cachedir: {displaypath}"
+ return None
+
+
+def cacheshow(config: Config, session: Session) -> int:
+ from pprint import pformat
+
+ assert config.cache is not None
+
+ tw = TerminalWriter()
+ tw.line("cachedir: " + str(config.cache._cachedir))
+ if not config.cache._cachedir.is_dir():
+ tw.line("cache is empty")
+ return 0
+
+ glob = config.option.cacheshow[0]
+ if glob is None:
+ glob = "*"
+
+ dummy = object()
+ basedir = config.cache._cachedir
+ vdir = basedir / Cache._CACHE_PREFIX_VALUES
+ tw.sep("-", "cache values for %r" % glob)
+ for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()):
+ key = str(valpath.relative_to(vdir))
+ val = config.cache.get(key, dummy)
+ if val is dummy:
+ tw.line("%s contains unreadable content, will be ignored" % key)
+ else:
+ tw.line("%s contains:" % key)
+ for line in pformat(val).splitlines():
+ tw.line(" " + line)
+
+ ddir = basedir / Cache._CACHE_PREFIX_DIRS
+ if ddir.is_dir():
+ contents = sorted(ddir.rglob(glob))
+ tw.sep("-", "cache directories for %r" % glob)
+ for p in contents:
+ # if p.is_dir():
+ # print("%s/" % p.relative_to(basedir))
+ if p.is_file():
+ key = str(p.relative_to(basedir))
+ tw.line(f"{key} is a file of length {p.stat().st_size:d}")
+ return 0
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/capture.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/capture.py
new file mode 100644
index 0000000000..884f035e29
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/capture.py
@@ -0,0 +1,942 @@
+"""Per-test stdout/stderr capturing mechanism."""
+import contextlib
+import functools
+import io
+import os
+import sys
+from io import UnsupportedOperation
+from tempfile import TemporaryFile
+from typing import Any
+from typing import AnyStr
+from typing import Generator
+from typing import Generic
+from typing import Iterator
+from typing import Optional
+from typing import TextIO
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from _pytest.compat import final
+from _pytest.config import Config
+from _pytest.config import hookimpl
+from _pytest.config.argparsing import Parser
+from _pytest.deprecated import check_ispytest
+from _pytest.fixtures import fixture
+from _pytest.fixtures import SubRequest
+from _pytest.nodes import Collector
+from _pytest.nodes import File
+from _pytest.nodes import Item
+
+if TYPE_CHECKING:
+ from typing_extensions import Literal
+
+ _CaptureMethod = Literal["fd", "sys", "no", "tee-sys"]
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("general")
+ group._addoption(
+ "--capture",
+ action="store",
+ default="fd",
+ metavar="method",
+ choices=["fd", "sys", "no", "tee-sys"],
+ help="per-test capturing method: one of fd|sys|no|tee-sys.",
+ )
+ group._addoption(
+ "-s",
+ action="store_const",
+ const="no",
+ dest="capture",
+ help="shortcut for --capture=no.",
+ )
+
+
+def _colorama_workaround() -> None:
+ """Ensure colorama is imported so that it attaches to the correct stdio
+ handles on Windows.
+
+ colorama uses the terminal on import time. So if something does the
+ first import of colorama while I/O capture is active, colorama will
+ fail in various ways.
+ """
+ if sys.platform.startswith("win32"):
+ try:
+ import colorama # noqa: F401
+ except ImportError:
+ pass
+
+
+def _py36_windowsconsoleio_workaround(stream: TextIO) -> None:
+ """Workaround for Windows Unicode console handling on Python>=3.6.
+
+ Python 3.6 implemented Unicode console handling for Windows. This works
+ by reading/writing to the raw console handle using
+ ``{Read,Write}ConsoleW``.
+
+ The problem is that we are going to ``dup2`` over the stdio file
+ descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the
+ handles used by Python to write to the console. Though there is still some
+ weirdness and the console handle seems to only be closed randomly and not
+ on the first call to ``CloseHandle``, or maybe it gets reopened with the
+ same handle value when we suspend capturing.
+
+ The workaround in this case will reopen stdio with a different fd which
+ also means a different handle by replicating the logic in
+ "Py_lifecycle.c:initstdio/create_stdio".
+
+ :param stream:
+ In practice ``sys.stdout`` or ``sys.stderr``, but given
+ here as parameter for unittesting purposes.
+
+ See https://github.com/pytest-dev/py/issues/103.
+ """
+ if not sys.platform.startswith("win32") or hasattr(sys, "pypy_version_info"):
+ return
+
+ # Bail out if ``stream`` doesn't seem like a proper ``io`` stream (#2666).
+ if not hasattr(stream, "buffer"): # type: ignore[unreachable]
+ return
+
+ buffered = hasattr(stream.buffer, "raw")
+ raw_stdout = stream.buffer.raw if buffered else stream.buffer # type: ignore[attr-defined]
+
+ if not isinstance(raw_stdout, io._WindowsConsoleIO): # type: ignore[attr-defined]
+ return
+
+ def _reopen_stdio(f, mode):
+ if not buffered and mode[0] == "w":
+ buffering = 0
+ else:
+ buffering = -1
+
+ return io.TextIOWrapper(
+ open(os.dup(f.fileno()), mode, buffering), # type: ignore[arg-type]
+ f.encoding,
+ f.errors,
+ f.newlines,
+ f.line_buffering,
+ )
+
+ sys.stdin = _reopen_stdio(sys.stdin, "rb")
+ sys.stdout = _reopen_stdio(sys.stdout, "wb")
+ sys.stderr = _reopen_stdio(sys.stderr, "wb")
+
+
+@hookimpl(hookwrapper=True)
+def pytest_load_initial_conftests(early_config: Config):
+ ns = early_config.known_args_namespace
+ if ns.capture == "fd":
+ _py36_windowsconsoleio_workaround(sys.stdout)
+ _colorama_workaround()
+ pluginmanager = early_config.pluginmanager
+ capman = CaptureManager(ns.capture)
+ pluginmanager.register(capman, "capturemanager")
+
+ # Make sure that capturemanager is properly reset at final shutdown.
+ early_config.add_cleanup(capman.stop_global_capturing)
+
+ # Finally trigger conftest loading but while capturing (issue #93).
+ capman.start_global_capturing()
+ outcome = yield
+ capman.suspend_global_capture()
+ if outcome.excinfo is not None:
+ out, err = capman.read_global_capture()
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+
+# IO Helpers.
+
+
+class EncodedFile(io.TextIOWrapper):
+ __slots__ = ()
+
+ @property
+ def name(self) -> str:
+ # Ensure that file.name is a string. Workaround for a Python bug
+ # fixed in >=3.7.4: https://bugs.python.org/issue36015
+ return repr(self.buffer)
+
+ @property
+ def mode(self) -> str:
+ # TextIOWrapper doesn't expose a mode, but at least some of our
+ # tests check it.
+ return self.buffer.mode.replace("b", "")
+
+
+class CaptureIO(io.TextIOWrapper):
+ def __init__(self) -> None:
+ super().__init__(io.BytesIO(), encoding="UTF-8", newline="", write_through=True)
+
+ def getvalue(self) -> str:
+ assert isinstance(self.buffer, io.BytesIO)
+ return self.buffer.getvalue().decode("UTF-8")
+
+
+class TeeCaptureIO(CaptureIO):
+ def __init__(self, other: TextIO) -> None:
+ self._other = other
+ super().__init__()
+
+ def write(self, s: str) -> int:
+ super().write(s)
+ return self._other.write(s)
+
+
+class DontReadFromInput:
+ encoding = None
+
+ def read(self, *args):
+ raise OSError(
+ "pytest: reading from stdin while output is captured! Consider using `-s`."
+ )
+
+ readline = read
+ readlines = read
+ __next__ = read
+
+ def __iter__(self):
+ return self
+
+ def fileno(self) -> int:
+ raise UnsupportedOperation("redirected stdin is pseudofile, has no fileno()")
+
+ def isatty(self) -> bool:
+ return False
+
+ def close(self) -> None:
+ pass
+
+ @property
+ def buffer(self):
+ return self
+
+
+# Capture classes.
+
+
+patchsysdict = {0: "stdin", 1: "stdout", 2: "stderr"}
+
+
+class NoCapture:
+ EMPTY_BUFFER = None
+ __init__ = start = done = suspend = resume = lambda *args: None
+
+
+class SysCaptureBinary:
+
+ EMPTY_BUFFER = b""
+
+ def __init__(self, fd: int, tmpfile=None, *, tee: bool = False) -> None:
+ name = patchsysdict[fd]
+ self._old = getattr(sys, name)
+ self.name = name
+ if tmpfile is None:
+ if name == "stdin":
+ tmpfile = DontReadFromInput()
+ else:
+ tmpfile = CaptureIO() if not tee else TeeCaptureIO(self._old)
+ self.tmpfile = tmpfile
+ self._state = "initialized"
+
+ def repr(self, class_name: str) -> str:
+ return "<{} {} _old={} _state={!r} tmpfile={!r}>".format(
+ class_name,
+ self.name,
+ hasattr(self, "_old") and repr(self._old) or "<UNSET>",
+ self._state,
+ self.tmpfile,
+ )
+
+ def __repr__(self) -> str:
+ return "<{} {} _old={} _state={!r} tmpfile={!r}>".format(
+ self.__class__.__name__,
+ self.name,
+ hasattr(self, "_old") and repr(self._old) or "<UNSET>",
+ self._state,
+ self.tmpfile,
+ )
+
+ def _assert_state(self, op: str, states: Tuple[str, ...]) -> None:
+ assert (
+ self._state in states
+ ), "cannot {} in state {!r}: expected one of {}".format(
+ op, self._state, ", ".join(states)
+ )
+
+ def start(self) -> None:
+ self._assert_state("start", ("initialized",))
+ setattr(sys, self.name, self.tmpfile)
+ self._state = "started"
+
+ def snap(self):
+ self._assert_state("snap", ("started", "suspended"))
+ self.tmpfile.seek(0)
+ res = self.tmpfile.buffer.read()
+ self.tmpfile.seek(0)
+ self.tmpfile.truncate()
+ return res
+
+ def done(self) -> None:
+ self._assert_state("done", ("initialized", "started", "suspended", "done"))
+ if self._state == "done":
+ return
+ setattr(sys, self.name, self._old)
+ del self._old
+ self.tmpfile.close()
+ self._state = "done"
+
+ def suspend(self) -> None:
+ self._assert_state("suspend", ("started", "suspended"))
+ setattr(sys, self.name, self._old)
+ self._state = "suspended"
+
+ def resume(self) -> None:
+ self._assert_state("resume", ("started", "suspended"))
+ if self._state == "started":
+ return
+ setattr(sys, self.name, self.tmpfile)
+ self._state = "started"
+
+ def writeorg(self, data) -> None:
+ self._assert_state("writeorg", ("started", "suspended"))
+ self._old.flush()
+ self._old.buffer.write(data)
+ self._old.buffer.flush()
+
+
+class SysCapture(SysCaptureBinary):
+ EMPTY_BUFFER = "" # type: ignore[assignment]
+
+ def snap(self):
+ res = self.tmpfile.getvalue()
+ self.tmpfile.seek(0)
+ self.tmpfile.truncate()
+ return res
+
+ def writeorg(self, data):
+ self._assert_state("writeorg", ("started", "suspended"))
+ self._old.write(data)
+ self._old.flush()
+
+
+class FDCaptureBinary:
+ """Capture IO to/from a given OS-level file descriptor.
+
+ snap() produces `bytes`.
+ """
+
+ EMPTY_BUFFER = b""
+
+ def __init__(self, targetfd: int) -> None:
+ self.targetfd = targetfd
+
+ try:
+ os.fstat(targetfd)
+ except OSError:
+ # FD capturing is conceptually simple -- create a temporary file,
+ # redirect the FD to it, redirect back when done. But when the
+ # target FD is invalid it throws a wrench into this lovely scheme.
+ #
+ # Tests themselves shouldn't care if the FD is valid, FD capturing
+ # should work regardless of external circumstances. So falling back
+ # to just sys capturing is not a good option.
+ #
+ # Further complications are the need to support suspend() and the
+ # possibility of FD reuse (e.g. the tmpfile getting the very same
+ # target FD). The following approach is robust, I believe.
+ self.targetfd_invalid: Optional[int] = os.open(os.devnull, os.O_RDWR)
+ os.dup2(self.targetfd_invalid, targetfd)
+ else:
+ self.targetfd_invalid = None
+ self.targetfd_save = os.dup(targetfd)
+
+ if targetfd == 0:
+ self.tmpfile = open(os.devnull)
+ self.syscapture = SysCapture(targetfd)
+ else:
+ self.tmpfile = EncodedFile(
+ TemporaryFile(buffering=0),
+ encoding="utf-8",
+ errors="replace",
+ newline="",
+ write_through=True,
+ )
+ if targetfd in patchsysdict:
+ self.syscapture = SysCapture(targetfd, self.tmpfile)
+ else:
+ self.syscapture = NoCapture()
+
+ self._state = "initialized"
+
+ def __repr__(self) -> str:
+ return "<{} {} oldfd={} _state={!r} tmpfile={!r}>".format(
+ self.__class__.__name__,
+ self.targetfd,
+ self.targetfd_save,
+ self._state,
+ self.tmpfile,
+ )
+
+ def _assert_state(self, op: str, states: Tuple[str, ...]) -> None:
+ assert (
+ self._state in states
+ ), "cannot {} in state {!r}: expected one of {}".format(
+ op, self._state, ", ".join(states)
+ )
+
+ def start(self) -> None:
+ """Start capturing on targetfd using memorized tmpfile."""
+ self._assert_state("start", ("initialized",))
+ os.dup2(self.tmpfile.fileno(), self.targetfd)
+ self.syscapture.start()
+ self._state = "started"
+
+ def snap(self):
+ self._assert_state("snap", ("started", "suspended"))
+ self.tmpfile.seek(0)
+ res = self.tmpfile.buffer.read()
+ self.tmpfile.seek(0)
+ self.tmpfile.truncate()
+ return res
+
+ def done(self) -> None:
+ """Stop capturing, restore streams, return original capture file,
+ seeked to position zero."""
+ self._assert_state("done", ("initialized", "started", "suspended", "done"))
+ if self._state == "done":
+ return
+ os.dup2(self.targetfd_save, self.targetfd)
+ os.close(self.targetfd_save)
+ if self.targetfd_invalid is not None:
+ if self.targetfd_invalid != self.targetfd:
+ os.close(self.targetfd)
+ os.close(self.targetfd_invalid)
+ self.syscapture.done()
+ self.tmpfile.close()
+ self._state = "done"
+
+ def suspend(self) -> None:
+ self._assert_state("suspend", ("started", "suspended"))
+ if self._state == "suspended":
+ return
+ self.syscapture.suspend()
+ os.dup2(self.targetfd_save, self.targetfd)
+ self._state = "suspended"
+
+ def resume(self) -> None:
+ self._assert_state("resume", ("started", "suspended"))
+ if self._state == "started":
+ return
+ self.syscapture.resume()
+ os.dup2(self.tmpfile.fileno(), self.targetfd)
+ self._state = "started"
+
+ def writeorg(self, data):
+ """Write to original file descriptor."""
+ self._assert_state("writeorg", ("started", "suspended"))
+ os.write(self.targetfd_save, data)
+
+
+class FDCapture(FDCaptureBinary):
+ """Capture IO to/from a given OS-level file descriptor.
+
+ snap() produces text.
+ """
+
+ # Ignore type because it doesn't match the type in the superclass (bytes).
+ EMPTY_BUFFER = "" # type: ignore
+
+ def snap(self):
+ self._assert_state("snap", ("started", "suspended"))
+ self.tmpfile.seek(0)
+ res = self.tmpfile.read()
+ self.tmpfile.seek(0)
+ self.tmpfile.truncate()
+ return res
+
+ def writeorg(self, data):
+ """Write to original file descriptor."""
+ super().writeorg(data.encode("utf-8")) # XXX use encoding of original stream
+
+
+# MultiCapture
+
+
+# This class was a namedtuple, but due to mypy limitation[0] it could not be
+# made generic, so was replaced by a regular class which tries to emulate the
+# pertinent parts of a namedtuple. If the mypy limitation is ever lifted, can
+# make it a namedtuple again.
+# [0]: https://github.com/python/mypy/issues/685
+@final
+@functools.total_ordering
+class CaptureResult(Generic[AnyStr]):
+ """The result of :method:`CaptureFixture.readouterr`."""
+
+ __slots__ = ("out", "err")
+
+ def __init__(self, out: AnyStr, err: AnyStr) -> None:
+ self.out: AnyStr = out
+ self.err: AnyStr = err
+
+ def __len__(self) -> int:
+ return 2
+
+ def __iter__(self) -> Iterator[AnyStr]:
+ return iter((self.out, self.err))
+
+ def __getitem__(self, item: int) -> AnyStr:
+ return tuple(self)[item]
+
+ def _replace(
+ self, *, out: Optional[AnyStr] = None, err: Optional[AnyStr] = None
+ ) -> "CaptureResult[AnyStr]":
+ return CaptureResult(
+ out=self.out if out is None else out, err=self.err if err is None else err
+ )
+
+ def count(self, value: AnyStr) -> int:
+ return tuple(self).count(value)
+
+ def index(self, value) -> int:
+ return tuple(self).index(value)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, (CaptureResult, tuple)):
+ return NotImplemented
+ return tuple(self) == tuple(other)
+
+ def __hash__(self) -> int:
+ return hash(tuple(self))
+
+ def __lt__(self, other: object) -> bool:
+ if not isinstance(other, (CaptureResult, tuple)):
+ return NotImplemented
+ return tuple(self) < tuple(other)
+
+ def __repr__(self) -> str:
+ return f"CaptureResult(out={self.out!r}, err={self.err!r})"
+
+
+class MultiCapture(Generic[AnyStr]):
+ _state = None
+ _in_suspended = False
+
+ def __init__(self, in_, out, err) -> None:
+ self.in_ = in_
+ self.out = out
+ self.err = err
+
+ def __repr__(self) -> str:
+ return "<MultiCapture out={!r} err={!r} in_={!r} _state={!r} _in_suspended={!r}>".format(
+ self.out,
+ self.err,
+ self.in_,
+ self._state,
+ self._in_suspended,
+ )
+
+ def start_capturing(self) -> None:
+ self._state = "started"
+ if self.in_:
+ self.in_.start()
+ if self.out:
+ self.out.start()
+ if self.err:
+ self.err.start()
+
+ def pop_outerr_to_orig(self) -> Tuple[AnyStr, AnyStr]:
+ """Pop current snapshot out/err capture and flush to orig streams."""
+ out, err = self.readouterr()
+ if out:
+ self.out.writeorg(out)
+ if err:
+ self.err.writeorg(err)
+ return out, err
+
+ def suspend_capturing(self, in_: bool = False) -> None:
+ self._state = "suspended"
+ if self.out:
+ self.out.suspend()
+ if self.err:
+ self.err.suspend()
+ if in_ and self.in_:
+ self.in_.suspend()
+ self._in_suspended = True
+
+ def resume_capturing(self) -> None:
+ self._state = "started"
+ if self.out:
+ self.out.resume()
+ if self.err:
+ self.err.resume()
+ if self._in_suspended:
+ self.in_.resume()
+ self._in_suspended = False
+
+ def stop_capturing(self) -> None:
+ """Stop capturing and reset capturing streams."""
+ if self._state == "stopped":
+ raise ValueError("was already stopped")
+ self._state = "stopped"
+ if self.out:
+ self.out.done()
+ if self.err:
+ self.err.done()
+ if self.in_:
+ self.in_.done()
+
+ def is_started(self) -> bool:
+ """Whether actively capturing -- not suspended or stopped."""
+ return self._state == "started"
+
+ def readouterr(self) -> CaptureResult[AnyStr]:
+ out = self.out.snap() if self.out else ""
+ err = self.err.snap() if self.err else ""
+ return CaptureResult(out, err)
+
+
+def _get_multicapture(method: "_CaptureMethod") -> MultiCapture[str]:
+ if method == "fd":
+ return MultiCapture(in_=FDCapture(0), out=FDCapture(1), err=FDCapture(2))
+ elif method == "sys":
+ return MultiCapture(in_=SysCapture(0), out=SysCapture(1), err=SysCapture(2))
+ elif method == "no":
+ return MultiCapture(in_=None, out=None, err=None)
+ elif method == "tee-sys":
+ return MultiCapture(
+ in_=None, out=SysCapture(1, tee=True), err=SysCapture(2, tee=True)
+ )
+ raise ValueError(f"unknown capturing method: {method!r}")
+
+
+# CaptureManager and CaptureFixture
+
+
+class CaptureManager:
+ """The capture plugin.
+
+ Manages that the appropriate capture method is enabled/disabled during
+ collection and each test phase (setup, call, teardown). After each of
+ those points, the captured output is obtained and attached to the
+ collection/runtest report.
+
+ There are two levels of capture:
+
+ * global: enabled by default and can be suppressed by the ``-s``
+ option. This is always enabled/disabled during collection and each test
+ phase.
+
+ * fixture: when a test function or one of its fixture depend on the
+ ``capsys`` or ``capfd`` fixtures. In this case special handling is
+ needed to ensure the fixtures take precedence over the global capture.
+ """
+
+ def __init__(self, method: "_CaptureMethod") -> None:
+ self._method = method
+ self._global_capturing: Optional[MultiCapture[str]] = None
+ self._capture_fixture: Optional[CaptureFixture[Any]] = None
+
+ def __repr__(self) -> str:
+ return "<CaptureManager _method={!r} _global_capturing={!r} _capture_fixture={!r}>".format(
+ self._method, self._global_capturing, self._capture_fixture
+ )
+
+ def is_capturing(self) -> Union[str, bool]:
+ if self.is_globally_capturing():
+ return "global"
+ if self._capture_fixture:
+ return "fixture %s" % self._capture_fixture.request.fixturename
+ return False
+
+ # Global capturing control
+
+ def is_globally_capturing(self) -> bool:
+ return self._method != "no"
+
+ def start_global_capturing(self) -> None:
+ assert self._global_capturing is None
+ self._global_capturing = _get_multicapture(self._method)
+ self._global_capturing.start_capturing()
+
+ def stop_global_capturing(self) -> None:
+ if self._global_capturing is not None:
+ self._global_capturing.pop_outerr_to_orig()
+ self._global_capturing.stop_capturing()
+ self._global_capturing = None
+
+ def resume_global_capture(self) -> None:
+ # During teardown of the python process, and on rare occasions, capture
+ # attributes can be `None` while trying to resume global capture.
+ if self._global_capturing is not None:
+ self._global_capturing.resume_capturing()
+
+ def suspend_global_capture(self, in_: bool = False) -> None:
+ if self._global_capturing is not None:
+ self._global_capturing.suspend_capturing(in_=in_)
+
+ def suspend(self, in_: bool = False) -> None:
+ # Need to undo local capsys-et-al if it exists before disabling global capture.
+ self.suspend_fixture()
+ self.suspend_global_capture(in_)
+
+ def resume(self) -> None:
+ self.resume_global_capture()
+ self.resume_fixture()
+
+ def read_global_capture(self) -> CaptureResult[str]:
+ assert self._global_capturing is not None
+ return self._global_capturing.readouterr()
+
+ # Fixture Control
+
+ def set_fixture(self, capture_fixture: "CaptureFixture[Any]") -> None:
+ if self._capture_fixture:
+ current_fixture = self._capture_fixture.request.fixturename
+ requested_fixture = capture_fixture.request.fixturename
+ capture_fixture.request.raiseerror(
+ "cannot use {} and {} at the same time".format(
+ requested_fixture, current_fixture
+ )
+ )
+ self._capture_fixture = capture_fixture
+
+ def unset_fixture(self) -> None:
+ self._capture_fixture = None
+
+ def activate_fixture(self) -> None:
+ """If the current item is using ``capsys`` or ``capfd``, activate
+ them so they take precedence over the global capture."""
+ if self._capture_fixture:
+ self._capture_fixture._start()
+
+ def deactivate_fixture(self) -> None:
+ """Deactivate the ``capsys`` or ``capfd`` fixture of this item, if any."""
+ if self._capture_fixture:
+ self._capture_fixture.close()
+
+ def suspend_fixture(self) -> None:
+ if self._capture_fixture:
+ self._capture_fixture._suspend()
+
+ def resume_fixture(self) -> None:
+ if self._capture_fixture:
+ self._capture_fixture._resume()
+
+ # Helper context managers
+
+ @contextlib.contextmanager
+ def global_and_fixture_disabled(self) -> Generator[None, None, None]:
+ """Context manager to temporarily disable global and current fixture capturing."""
+ do_fixture = self._capture_fixture and self._capture_fixture._is_started()
+ if do_fixture:
+ self.suspend_fixture()
+ do_global = self._global_capturing and self._global_capturing.is_started()
+ if do_global:
+ self.suspend_global_capture()
+ try:
+ yield
+ finally:
+ if do_global:
+ self.resume_global_capture()
+ if do_fixture:
+ self.resume_fixture()
+
+ @contextlib.contextmanager
+ def item_capture(self, when: str, item: Item) -> Generator[None, None, None]:
+ self.resume_global_capture()
+ self.activate_fixture()
+ try:
+ yield
+ finally:
+ self.deactivate_fixture()
+ self.suspend_global_capture(in_=False)
+
+ out, err = self.read_global_capture()
+ item.add_report_section(when, "stdout", out)
+ item.add_report_section(when, "stderr", err)
+
+ # Hooks
+
+ @hookimpl(hookwrapper=True)
+ def pytest_make_collect_report(self, collector: Collector):
+ if isinstance(collector, File):
+ self.resume_global_capture()
+ outcome = yield
+ self.suspend_global_capture()
+ out, err = self.read_global_capture()
+ rep = outcome.get_result()
+ if out:
+ rep.sections.append(("Captured stdout", out))
+ if err:
+ rep.sections.append(("Captured stderr", err))
+ else:
+ yield
+
+ @hookimpl(hookwrapper=True)
+ def pytest_runtest_setup(self, item: Item) -> Generator[None, None, None]:
+ with self.item_capture("setup", item):
+ yield
+
+ @hookimpl(hookwrapper=True)
+ def pytest_runtest_call(self, item: Item) -> Generator[None, None, None]:
+ with self.item_capture("call", item):
+ yield
+
+ @hookimpl(hookwrapper=True)
+ def pytest_runtest_teardown(self, item: Item) -> Generator[None, None, None]:
+ with self.item_capture("teardown", item):
+ yield
+
+ @hookimpl(tryfirst=True)
+ def pytest_keyboard_interrupt(self) -> None:
+ self.stop_global_capturing()
+
+ @hookimpl(tryfirst=True)
+ def pytest_internalerror(self) -> None:
+ self.stop_global_capturing()
+
+
+class CaptureFixture(Generic[AnyStr]):
+ """Object returned by the :fixture:`capsys`, :fixture:`capsysbinary`,
+ :fixture:`capfd` and :fixture:`capfdbinary` fixtures."""
+
+ def __init__(
+ self, captureclass, request: SubRequest, *, _ispytest: bool = False
+ ) -> None:
+ check_ispytest(_ispytest)
+ self.captureclass = captureclass
+ self.request = request
+ self._capture: Optional[MultiCapture[AnyStr]] = None
+ self._captured_out = self.captureclass.EMPTY_BUFFER
+ self._captured_err = self.captureclass.EMPTY_BUFFER
+
+ def _start(self) -> None:
+ if self._capture is None:
+ self._capture = MultiCapture(
+ in_=None,
+ out=self.captureclass(1),
+ err=self.captureclass(2),
+ )
+ self._capture.start_capturing()
+
+ def close(self) -> None:
+ if self._capture is not None:
+ out, err = self._capture.pop_outerr_to_orig()
+ self._captured_out += out
+ self._captured_err += err
+ self._capture.stop_capturing()
+ self._capture = None
+
+ def readouterr(self) -> CaptureResult[AnyStr]:
+ """Read and return the captured output so far, resetting the internal
+ buffer.
+
+ :returns:
+ The captured content as a namedtuple with ``out`` and ``err``
+ string attributes.
+ """
+ captured_out, captured_err = self._captured_out, self._captured_err
+ if self._capture is not None:
+ out, err = self._capture.readouterr()
+ captured_out += out
+ captured_err += err
+ self._captured_out = self.captureclass.EMPTY_BUFFER
+ self._captured_err = self.captureclass.EMPTY_BUFFER
+ return CaptureResult(captured_out, captured_err)
+
+ def _suspend(self) -> None:
+ """Suspend this fixture's own capturing temporarily."""
+ if self._capture is not None:
+ self._capture.suspend_capturing()
+
+ def _resume(self) -> None:
+ """Resume this fixture's own capturing temporarily."""
+ if self._capture is not None:
+ self._capture.resume_capturing()
+
+ def _is_started(self) -> bool:
+ """Whether actively capturing -- not disabled or closed."""
+ if self._capture is not None:
+ return self._capture.is_started()
+ return False
+
+ @contextlib.contextmanager
+ def disabled(self) -> Generator[None, None, None]:
+ """Temporarily disable capturing while inside the ``with`` block."""
+ capmanager = self.request.config.pluginmanager.getplugin("capturemanager")
+ with capmanager.global_and_fixture_disabled():
+ yield
+
+
+# The fixtures.
+
+
+@fixture
+def capsys(request: SubRequest) -> Generator[CaptureFixture[str], None, None]:
+ """Enable text capturing of writes to ``sys.stdout`` and ``sys.stderr``.
+
+ The captured output is made available via ``capsys.readouterr()`` method
+ calls, which return a ``(out, err)`` namedtuple.
+ ``out`` and ``err`` will be ``text`` objects.
+ """
+ capman = request.config.pluginmanager.getplugin("capturemanager")
+ capture_fixture = CaptureFixture[str](SysCapture, request, _ispytest=True)
+ capman.set_fixture(capture_fixture)
+ capture_fixture._start()
+ yield capture_fixture
+ capture_fixture.close()
+ capman.unset_fixture()
+
+
+@fixture
+def capsysbinary(request: SubRequest) -> Generator[CaptureFixture[bytes], None, None]:
+ """Enable bytes capturing of writes to ``sys.stdout`` and ``sys.stderr``.
+
+ The captured output is made available via ``capsysbinary.readouterr()``
+ method calls, which return a ``(out, err)`` namedtuple.
+ ``out`` and ``err`` will be ``bytes`` objects.
+ """
+ capman = request.config.pluginmanager.getplugin("capturemanager")
+ capture_fixture = CaptureFixture[bytes](SysCaptureBinary, request, _ispytest=True)
+ capman.set_fixture(capture_fixture)
+ capture_fixture._start()
+ yield capture_fixture
+ capture_fixture.close()
+ capman.unset_fixture()
+
+
+@fixture
+def capfd(request: SubRequest) -> Generator[CaptureFixture[str], None, None]:
+ """Enable text capturing of writes to file descriptors ``1`` and ``2``.
+
+ The captured output is made available via ``capfd.readouterr()`` method
+ calls, which return a ``(out, err)`` namedtuple.
+ ``out`` and ``err`` will be ``text`` objects.
+ """
+ capman = request.config.pluginmanager.getplugin("capturemanager")
+ capture_fixture = CaptureFixture[str](FDCapture, request, _ispytest=True)
+ capman.set_fixture(capture_fixture)
+ capture_fixture._start()
+ yield capture_fixture
+ capture_fixture.close()
+ capman.unset_fixture()
+
+
+@fixture
+def capfdbinary(request: SubRequest) -> Generator[CaptureFixture[bytes], None, None]:
+ """Enable bytes capturing of writes to file descriptors ``1`` and ``2``.
+
+ The captured output is made available via ``capfd.readouterr()`` method
+ calls, which return a ``(out, err)`` namedtuple.
+ ``out`` and ``err`` will be ``byte`` objects.
+ """
+ capman = request.config.pluginmanager.getplugin("capturemanager")
+ capture_fixture = CaptureFixture[bytes](FDCaptureBinary, request, _ispytest=True)
+ capman.set_fixture(capture_fixture)
+ capture_fixture._start()
+ yield capture_fixture
+ capture_fixture.close()
+ capman.unset_fixture()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/compat.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/compat.py
new file mode 100644
index 0000000000..7703dee8c5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/compat.py
@@ -0,0 +1,417 @@
+"""Python version compatibility code."""
+import enum
+import functools
+import inspect
+import os
+import sys
+from contextlib import contextmanager
+from inspect import Parameter
+from inspect import signature
+from pathlib import Path
+from typing import Any
+from typing import Callable
+from typing import Generic
+from typing import Optional
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+import attr
+import py
+
+if TYPE_CHECKING:
+ from typing import NoReturn
+ from typing_extensions import Final
+
+
+_T = TypeVar("_T")
+_S = TypeVar("_S")
+
+#: constant to prepare valuing pylib path replacements/lazy proxies later on
+# intended for removal in pytest 8.0 or 9.0
+
+# fmt: off
+# intentional space to create a fake difference for the verification
+LEGACY_PATH = py.path. local
+# fmt: on
+
+
+def legacy_path(path: Union[str, "os.PathLike[str]"]) -> LEGACY_PATH:
+ """Internal wrapper to prepare lazy proxies for legacy_path instances"""
+ return LEGACY_PATH(path)
+
+
+# fmt: off
+# Singleton type for NOTSET, as described in:
+# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions
+class NotSetType(enum.Enum):
+ token = 0
+NOTSET: "Final" = NotSetType.token # noqa: E305
+# fmt: on
+
+if sys.version_info >= (3, 8):
+ from importlib import metadata as importlib_metadata
+else:
+ import importlib_metadata # noqa: F401
+
+
+def _format_args(func: Callable[..., Any]) -> str:
+ return str(signature(func))
+
+
+def is_generator(func: object) -> bool:
+ genfunc = inspect.isgeneratorfunction(func)
+ return genfunc and not iscoroutinefunction(func)
+
+
+def iscoroutinefunction(func: object) -> bool:
+ """Return True if func is a coroutine function (a function defined with async
+ def syntax, and doesn't contain yield), or a function decorated with
+ @asyncio.coroutine.
+
+ Note: copied and modified from Python 3.5's builtin couroutines.py to avoid
+ importing asyncio directly, which in turns also initializes the "logging"
+ module as a side-effect (see issue #8).
+ """
+ return inspect.iscoroutinefunction(func) or getattr(func, "_is_coroutine", False)
+
+
+def is_async_function(func: object) -> bool:
+ """Return True if the given function seems to be an async function or
+ an async generator."""
+ return iscoroutinefunction(func) or inspect.isasyncgenfunction(func)
+
+
+def getlocation(function, curdir: Optional[str] = None) -> str:
+ function = get_real_func(function)
+ fn = Path(inspect.getfile(function))
+ lineno = function.__code__.co_firstlineno
+ if curdir is not None:
+ try:
+ relfn = fn.relative_to(curdir)
+ except ValueError:
+ pass
+ else:
+ return "%s:%d" % (relfn, lineno + 1)
+ return "%s:%d" % (fn, lineno + 1)
+
+
+def num_mock_patch_args(function) -> int:
+ """Return number of arguments used up by mock arguments (if any)."""
+ patchings = getattr(function, "patchings", None)
+ if not patchings:
+ return 0
+
+ mock_sentinel = getattr(sys.modules.get("mock"), "DEFAULT", object())
+ ut_mock_sentinel = getattr(sys.modules.get("unittest.mock"), "DEFAULT", object())
+
+ return len(
+ [
+ p
+ for p in patchings
+ if not p.attribute_name
+ and (p.new is mock_sentinel or p.new is ut_mock_sentinel)
+ ]
+ )
+
+
+def getfuncargnames(
+ function: Callable[..., Any],
+ *,
+ name: str = "",
+ is_method: bool = False,
+ cls: Optional[type] = None,
+) -> Tuple[str, ...]:
+ """Return the names of a function's mandatory arguments.
+
+ Should return the names of all function arguments that:
+ * Aren't bound to an instance or type as in instance or class methods.
+ * Don't have default values.
+ * Aren't bound with functools.partial.
+ * Aren't replaced with mocks.
+
+ The is_method and cls arguments indicate that the function should
+ be treated as a bound method even though it's not unless, only in
+ the case of cls, the function is a static method.
+
+ The name parameter should be the original name in which the function was collected.
+ """
+ # TODO(RonnyPfannschmidt): This function should be refactored when we
+ # revisit fixtures. The fixture mechanism should ask the node for
+ # the fixture names, and not try to obtain directly from the
+ # function object well after collection has occurred.
+
+ # The parameters attribute of a Signature object contains an
+ # ordered mapping of parameter names to Parameter instances. This
+ # creates a tuple of the names of the parameters that don't have
+ # defaults.
+ try:
+ parameters = signature(function).parameters
+ except (ValueError, TypeError) as e:
+ from _pytest.outcomes import fail
+
+ fail(
+ f"Could not determine arguments of {function!r}: {e}",
+ pytrace=False,
+ )
+
+ arg_names = tuple(
+ p.name
+ for p in parameters.values()
+ if (
+ p.kind is Parameter.POSITIONAL_OR_KEYWORD
+ or p.kind is Parameter.KEYWORD_ONLY
+ )
+ and p.default is Parameter.empty
+ )
+ if not name:
+ name = function.__name__
+
+ # If this function should be treated as a bound method even though
+ # it's passed as an unbound method or function, remove the first
+ # parameter name.
+ if is_method or (
+ # Not using `getattr` because we don't want to resolve the staticmethod.
+ # Not using `cls.__dict__` because we want to check the entire MRO.
+ cls
+ and not isinstance(
+ inspect.getattr_static(cls, name, default=None), staticmethod
+ )
+ ):
+ arg_names = arg_names[1:]
+ # Remove any names that will be replaced with mocks.
+ if hasattr(function, "__wrapped__"):
+ arg_names = arg_names[num_mock_patch_args(function) :]
+ return arg_names
+
+
+if sys.version_info < (3, 7):
+
+ @contextmanager
+ def nullcontext():
+ yield
+
+
+else:
+ from contextlib import nullcontext as nullcontext # noqa: F401
+
+
+def get_default_arg_names(function: Callable[..., Any]) -> Tuple[str, ...]:
+ # Note: this code intentionally mirrors the code at the beginning of
+ # getfuncargnames, to get the arguments which were excluded from its result
+ # because they had default values.
+ return tuple(
+ p.name
+ for p in signature(function).parameters.values()
+ if p.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY)
+ and p.default is not Parameter.empty
+ )
+
+
+_non_printable_ascii_translate_table = {
+ i: f"\\x{i:02x}" for i in range(128) if i not in range(32, 127)
+}
+_non_printable_ascii_translate_table.update(
+ {ord("\t"): "\\t", ord("\r"): "\\r", ord("\n"): "\\n"}
+)
+
+
+def _translate_non_printable(s: str) -> str:
+ return s.translate(_non_printable_ascii_translate_table)
+
+
+STRING_TYPES = bytes, str
+
+
+def _bytes_to_ascii(val: bytes) -> str:
+ return val.decode("ascii", "backslashreplace")
+
+
+def ascii_escaped(val: Union[bytes, str]) -> str:
+ r"""If val is pure ASCII, return it as an str, otherwise, escape
+ bytes objects into a sequence of escaped bytes:
+
+ b'\xc3\xb4\xc5\xd6' -> r'\xc3\xb4\xc5\xd6'
+
+ and escapes unicode objects into a sequence of escaped unicode
+ ids, e.g.:
+
+ r'4\nV\U00043efa\x0eMXWB\x1e\u3028\u15fd\xcd\U0007d944'
+
+ Note:
+ The obvious "v.decode('unicode-escape')" will return
+ valid UTF-8 unicode if it finds them in bytes, but we
+ want to return escaped bytes for any byte, even if they match
+ a UTF-8 string.
+ """
+ if isinstance(val, bytes):
+ ret = _bytes_to_ascii(val)
+ else:
+ ret = val.encode("unicode_escape").decode("ascii")
+ return _translate_non_printable(ret)
+
+
+@attr.s
+class _PytestWrapper:
+ """Dummy wrapper around a function object for internal use only.
+
+ Used to correctly unwrap the underlying function object when we are
+ creating fixtures, because we wrap the function object ourselves with a
+ decorator to issue warnings when the fixture function is called directly.
+ """
+
+ obj = attr.ib()
+
+
+def get_real_func(obj):
+ """Get the real function object of the (possibly) wrapped object by
+ functools.wraps or functools.partial."""
+ start_obj = obj
+ for i in range(100):
+ # __pytest_wrapped__ is set by @pytest.fixture when wrapping the fixture function
+ # to trigger a warning if it gets called directly instead of by pytest: we don't
+ # want to unwrap further than this otherwise we lose useful wrappings like @mock.patch (#3774)
+ new_obj = getattr(obj, "__pytest_wrapped__", None)
+ if isinstance(new_obj, _PytestWrapper):
+ obj = new_obj.obj
+ break
+ new_obj = getattr(obj, "__wrapped__", None)
+ if new_obj is None:
+ break
+ obj = new_obj
+ else:
+ from _pytest._io.saferepr import saferepr
+
+ raise ValueError(
+ ("could not find real function of {start}\nstopped at {current}").format(
+ start=saferepr(start_obj), current=saferepr(obj)
+ )
+ )
+ if isinstance(obj, functools.partial):
+ obj = obj.func
+ return obj
+
+
+def get_real_method(obj, holder):
+ """Attempt to obtain the real function object that might be wrapping
+ ``obj``, while at the same time returning a bound method to ``holder`` if
+ the original object was a bound method."""
+ try:
+ is_method = hasattr(obj, "__func__")
+ obj = get_real_func(obj)
+ except Exception: # pragma: no cover
+ return obj
+ if is_method and hasattr(obj, "__get__") and callable(obj.__get__):
+ obj = obj.__get__(holder)
+ return obj
+
+
+def getimfunc(func):
+ try:
+ return func.__func__
+ except AttributeError:
+ return func
+
+
+def safe_getattr(object: Any, name: str, default: Any) -> Any:
+ """Like getattr but return default upon any Exception or any OutcomeException.
+
+ Attribute access can potentially fail for 'evil' Python objects.
+ See issue #214.
+ It catches OutcomeException because of #2490 (issue #580), new outcomes
+ are derived from BaseException instead of Exception (for more details
+ check #2707).
+ """
+ from _pytest.outcomes import TEST_OUTCOME
+
+ try:
+ return getattr(object, name, default)
+ except TEST_OUTCOME:
+ return default
+
+
+def safe_isclass(obj: object) -> bool:
+ """Ignore any exception via isinstance on Python 3."""
+ try:
+ return inspect.isclass(obj)
+ except Exception:
+ return False
+
+
+if TYPE_CHECKING:
+ if sys.version_info >= (3, 8):
+ from typing import final as final
+ else:
+ from typing_extensions import final as final
+elif sys.version_info >= (3, 8):
+ from typing import final as final
+else:
+
+ def final(f):
+ return f
+
+
+if sys.version_info >= (3, 8):
+ from functools import cached_property as cached_property
+else:
+ from typing import overload
+ from typing import Type
+
+ class cached_property(Generic[_S, _T]):
+ __slots__ = ("func", "__doc__")
+
+ def __init__(self, func: Callable[[_S], _T]) -> None:
+ self.func = func
+ self.__doc__ = func.__doc__
+
+ @overload
+ def __get__(
+ self, instance: None, owner: Optional[Type[_S]] = ...
+ ) -> "cached_property[_S, _T]":
+ ...
+
+ @overload
+ def __get__(self, instance: _S, owner: Optional[Type[_S]] = ...) -> _T:
+ ...
+
+ def __get__(self, instance, owner=None):
+ if instance is None:
+ return self
+ value = instance.__dict__[self.func.__name__] = self.func(instance)
+ return value
+
+
+# Perform exhaustiveness checking.
+#
+# Consider this example:
+#
+# MyUnion = Union[int, str]
+#
+# def handle(x: MyUnion) -> int {
+# if isinstance(x, int):
+# return 1
+# elif isinstance(x, str):
+# return 2
+# else:
+# raise Exception('unreachable')
+#
+# Now suppose we add a new variant:
+#
+# MyUnion = Union[int, str, bytes]
+#
+# After doing this, we must remember ourselves to go and update the handle
+# function to handle the new variant.
+#
+# With `assert_never` we can do better:
+#
+# // raise Exception('unreachable')
+# return assert_never(x)
+#
+# Now, if we forget to handle the new variant, the type-checker will emit a
+# compile-time error, instead of the runtime error we would have gotten
+# previously.
+#
+# This also work for Enums (if you use `is` to compare) and Literals.
+def assert_never(value: "NoReturn") -> "NoReturn":
+ assert False, f"Unhandled value: {value} ({type(value).__name__})"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/__init__.py
new file mode 100644
index 0000000000..ebf6e1b950
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/__init__.py
@@ -0,0 +1,1697 @@
+"""Command line options, ini-file and conftest.py processing."""
+import argparse
+import collections.abc
+import contextlib
+import copy
+import enum
+import inspect
+import os
+import re
+import shlex
+import sys
+import types
+import warnings
+from functools import lru_cache
+from pathlib import Path
+from textwrap import dedent
+from types import TracebackType
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Generator
+from typing import IO
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import TextIO
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+
+import attr
+from pluggy import HookimplMarker
+from pluggy import HookspecMarker
+from pluggy import PluginManager
+
+import _pytest._code
+import _pytest.deprecated
+import _pytest.hookspec
+from .exceptions import PrintHelp as PrintHelp
+from .exceptions import UsageError as UsageError
+from .findpaths import determine_setup
+from _pytest._code import ExceptionInfo
+from _pytest._code import filter_traceback
+from _pytest._io import TerminalWriter
+from _pytest.compat import final
+from _pytest.compat import importlib_metadata
+from _pytest.outcomes import fail
+from _pytest.outcomes import Skipped
+from _pytest.pathlib import absolutepath
+from _pytest.pathlib import bestrelpath
+from _pytest.pathlib import import_path
+from _pytest.pathlib import ImportMode
+from _pytest.pathlib import resolve_package_path
+from _pytest.stash import Stash
+from _pytest.warning_types import PytestConfigWarning
+
+if TYPE_CHECKING:
+
+ from _pytest._code.code import _TracebackStyle
+ from _pytest.terminal import TerminalReporter
+ from .argparsing import Argument
+
+
+_PluggyPlugin = object
+"""A type to represent plugin objects.
+
+Plugins can be any namespace, so we can't narrow it down much, but we use an
+alias to make the intent clear.
+
+Ideally this type would be provided by pluggy itself.
+"""
+
+
+hookimpl = HookimplMarker("pytest")
+hookspec = HookspecMarker("pytest")
+
+
+@final
+class ExitCode(enum.IntEnum):
+ """Encodes the valid exit codes by pytest.
+
+ Currently users and plugins may supply other exit codes as well.
+
+ .. versionadded:: 5.0
+ """
+
+ #: Tests passed.
+ OK = 0
+ #: Tests failed.
+ TESTS_FAILED = 1
+ #: pytest was interrupted.
+ INTERRUPTED = 2
+ #: An internal error got in the way.
+ INTERNAL_ERROR = 3
+ #: pytest was misused.
+ USAGE_ERROR = 4
+ #: pytest couldn't find tests.
+ NO_TESTS_COLLECTED = 5
+
+
+class ConftestImportFailure(Exception):
+ def __init__(
+ self,
+ path: Path,
+ excinfo: Tuple[Type[Exception], Exception, TracebackType],
+ ) -> None:
+ super().__init__(path, excinfo)
+ self.path = path
+ self.excinfo = excinfo
+
+ def __str__(self) -> str:
+ return "{}: {} (from {})".format(
+ self.excinfo[0].__name__, self.excinfo[1], self.path
+ )
+
+
+def filter_traceback_for_conftest_import_failure(
+ entry: _pytest._code.TracebackEntry,
+) -> bool:
+ """Filter tracebacks entries which point to pytest internals or importlib.
+
+ Make a special case for importlib because we use it to import test modules and conftest files
+ in _pytest.pathlib.import_path.
+ """
+ return filter_traceback(entry) and "importlib" not in str(entry.path).split(os.sep)
+
+
+def main(
+ args: Optional[Union[List[str], "os.PathLike[str]"]] = None,
+ plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None,
+) -> Union[int, ExitCode]:
+ """Perform an in-process test run.
+
+ :param args: List of command line arguments.
+ :param plugins: List of plugin objects to be auto-registered during initialization.
+
+ :returns: An exit code.
+ """
+ try:
+ try:
+ config = _prepareconfig(args, plugins)
+ except ConftestImportFailure as e:
+ exc_info = ExceptionInfo.from_exc_info(e.excinfo)
+ tw = TerminalWriter(sys.stderr)
+ tw.line(f"ImportError while loading conftest '{e.path}'.", red=True)
+ exc_info.traceback = exc_info.traceback.filter(
+ filter_traceback_for_conftest_import_failure
+ )
+ exc_repr = (
+ exc_info.getrepr(style="short", chain=False)
+ if exc_info.traceback
+ else exc_info.exconly()
+ )
+ formatted_tb = str(exc_repr)
+ for line in formatted_tb.splitlines():
+ tw.line(line.rstrip(), red=True)
+ return ExitCode.USAGE_ERROR
+ else:
+ try:
+ ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main(
+ config=config
+ )
+ try:
+ return ExitCode(ret)
+ except ValueError:
+ return ret
+ finally:
+ config._ensure_unconfigure()
+ except UsageError as e:
+ tw = TerminalWriter(sys.stderr)
+ for msg in e.args:
+ tw.line(f"ERROR: {msg}\n", red=True)
+ return ExitCode.USAGE_ERROR
+
+
+def console_main() -> int:
+ """The CLI entry point of pytest.
+
+ This function is not meant for programmable use; use `main()` instead.
+ """
+ # https://docs.python.org/3/library/signal.html#note-on-sigpipe
+ try:
+ code = main()
+ sys.stdout.flush()
+ return code
+ except BrokenPipeError:
+ # Python flushes standard streams on exit; redirect remaining output
+ # to devnull to avoid another BrokenPipeError at shutdown
+ devnull = os.open(os.devnull, os.O_WRONLY)
+ os.dup2(devnull, sys.stdout.fileno())
+ return 1 # Python exits with error code 1 on EPIPE
+
+
+class cmdline: # compatibility namespace
+ main = staticmethod(main)
+
+
+def filename_arg(path: str, optname: str) -> str:
+ """Argparse type validator for filename arguments.
+
+ :path: Path of filename.
+ :optname: Name of the option.
+ """
+ if os.path.isdir(path):
+ raise UsageError(f"{optname} must be a filename, given: {path}")
+ return path
+
+
+def directory_arg(path: str, optname: str) -> str:
+ """Argparse type validator for directory arguments.
+
+ :path: Path of directory.
+ :optname: Name of the option.
+ """
+ if not os.path.isdir(path):
+ raise UsageError(f"{optname} must be a directory, given: {path}")
+ return path
+
+
+# Plugins that cannot be disabled via "-p no:X" currently.
+essential_plugins = (
+ "mark",
+ "main",
+ "runner",
+ "fixtures",
+ "helpconfig", # Provides -p.
+)
+
+default_plugins = essential_plugins + (
+ "python",
+ "terminal",
+ "debugging",
+ "unittest",
+ "capture",
+ "skipping",
+ "legacypath",
+ "tmpdir",
+ "monkeypatch",
+ "recwarn",
+ "pastebin",
+ "nose",
+ "assertion",
+ "junitxml",
+ "doctest",
+ "cacheprovider",
+ "freeze_support",
+ "setuponly",
+ "setupplan",
+ "stepwise",
+ "warnings",
+ "logging",
+ "reports",
+ "python_path",
+ *(["unraisableexception", "threadexception"] if sys.version_info >= (3, 8) else []),
+ "faulthandler",
+)
+
+builtin_plugins = set(default_plugins)
+builtin_plugins.add("pytester")
+builtin_plugins.add("pytester_assertions")
+
+
+def get_config(
+ args: Optional[List[str]] = None,
+ plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None,
+) -> "Config":
+ # subsequent calls to main will create a fresh instance
+ pluginmanager = PytestPluginManager()
+ config = Config(
+ pluginmanager,
+ invocation_params=Config.InvocationParams(
+ args=args or (),
+ plugins=plugins,
+ dir=Path.cwd(),
+ ),
+ )
+
+ if args is not None:
+ # Handle any "-p no:plugin" args.
+ pluginmanager.consider_preparse(args, exclude_only=True)
+
+ for spec in default_plugins:
+ pluginmanager.import_plugin(spec)
+
+ return config
+
+
+def get_plugin_manager() -> "PytestPluginManager":
+ """Obtain a new instance of the
+ :py:class:`pytest.PytestPluginManager`, with default plugins
+ already loaded.
+
+ This function can be used by integration with other tools, like hooking
+ into pytest to run tests into an IDE.
+ """
+ return get_config().pluginmanager
+
+
+def _prepareconfig(
+ args: Optional[Union[List[str], "os.PathLike[str]"]] = None,
+ plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None,
+) -> "Config":
+ if args is None:
+ args = sys.argv[1:]
+ elif isinstance(args, os.PathLike):
+ args = [os.fspath(args)]
+ elif not isinstance(args, list):
+ msg = "`args` parameter expected to be a list of strings, got: {!r} (type: {})"
+ raise TypeError(msg.format(args, type(args)))
+
+ config = get_config(args, plugins)
+ pluginmanager = config.pluginmanager
+ try:
+ if plugins:
+ for plugin in plugins:
+ if isinstance(plugin, str):
+ pluginmanager.consider_pluginarg(plugin)
+ else:
+ pluginmanager.register(plugin)
+ config = pluginmanager.hook.pytest_cmdline_parse(
+ pluginmanager=pluginmanager, args=args
+ )
+ return config
+ except BaseException:
+ config._ensure_unconfigure()
+ raise
+
+
+def _get_directory(path: Path) -> Path:
+ """Get the directory of a path - itself if already a directory."""
+ if path.is_file():
+ return path.parent
+ else:
+ return path
+
+
+@final
+class PytestPluginManager(PluginManager):
+ """A :py:class:`pluggy.PluginManager <pluggy.PluginManager>` with
+ additional pytest-specific functionality:
+
+ * Loading plugins from the command line, ``PYTEST_PLUGINS`` env variable and
+ ``pytest_plugins`` global variables found in plugins being loaded.
+ * ``conftest.py`` loading during start-up.
+ """
+
+ def __init__(self) -> None:
+ import _pytest.assertion
+
+ super().__init__("pytest")
+ # The objects are module objects, only used generically.
+ self._conftest_plugins: Set[types.ModuleType] = set()
+
+ # State related to local conftest plugins.
+ self._dirpath2confmods: Dict[Path, List[types.ModuleType]] = {}
+ self._conftestpath2mod: Dict[Path, types.ModuleType] = {}
+ self._confcutdir: Optional[Path] = None
+ self._noconftest = False
+
+ # _getconftestmodules()'s call to _get_directory() causes a stat
+ # storm when it's called potentially thousands of times in a test
+ # session (#9478), often with the same path, so cache it.
+ self._get_directory = lru_cache(256)(_get_directory)
+
+ self._duplicatepaths: Set[Path] = set()
+
+ # plugins that were explicitly skipped with pytest.skip
+ # list of (module name, skip reason)
+ # previously we would issue a warning when a plugin was skipped, but
+ # since we refactored warnings as first citizens of Config, they are
+ # just stored here to be used later.
+ self.skipped_plugins: List[Tuple[str, str]] = []
+
+ self.add_hookspecs(_pytest.hookspec)
+ self.register(self)
+ if os.environ.get("PYTEST_DEBUG"):
+ err: IO[str] = sys.stderr
+ encoding: str = getattr(err, "encoding", "utf8")
+ try:
+ err = open(
+ os.dup(err.fileno()),
+ mode=err.mode,
+ buffering=1,
+ encoding=encoding,
+ )
+ except Exception:
+ pass
+ self.trace.root.setwriter(err.write)
+ self.enable_tracing()
+
+ # Config._consider_importhook will set a real object if required.
+ self.rewrite_hook = _pytest.assertion.DummyRewriteHook()
+ # Used to know when we are importing conftests after the pytest_configure stage.
+ self._configured = False
+
+ def parse_hookimpl_opts(self, plugin: _PluggyPlugin, name: str):
+ # pytest hooks are always prefixed with "pytest_",
+ # so we avoid accessing possibly non-readable attributes
+ # (see issue #1073).
+ if not name.startswith("pytest_"):
+ return
+ # Ignore names which can not be hooks.
+ if name == "pytest_plugins":
+ return
+
+ method = getattr(plugin, name)
+ opts = super().parse_hookimpl_opts(plugin, name)
+
+ # Consider only actual functions for hooks (#3775).
+ if not inspect.isroutine(method):
+ return
+
+ # Collect unmarked hooks as long as they have the `pytest_' prefix.
+ if opts is None and name.startswith("pytest_"):
+ opts = {}
+ if opts is not None:
+ # TODO: DeprecationWarning, people should use hookimpl
+ # https://github.com/pytest-dev/pytest/issues/4562
+ known_marks = {m.name for m in getattr(method, "pytestmark", [])}
+
+ for name in ("tryfirst", "trylast", "optionalhook", "hookwrapper"):
+ opts.setdefault(name, hasattr(method, name) or name in known_marks)
+ return opts
+
+ def parse_hookspec_opts(self, module_or_class, name: str):
+ opts = super().parse_hookspec_opts(module_or_class, name)
+ if opts is None:
+ method = getattr(module_or_class, name)
+
+ if name.startswith("pytest_"):
+ # todo: deprecate hookspec hacks
+ # https://github.com/pytest-dev/pytest/issues/4562
+ known_marks = {m.name for m in getattr(method, "pytestmark", [])}
+ opts = {
+ "firstresult": hasattr(method, "firstresult")
+ or "firstresult" in known_marks,
+ "historic": hasattr(method, "historic")
+ or "historic" in known_marks,
+ }
+ return opts
+
+ def register(
+ self, plugin: _PluggyPlugin, name: Optional[str] = None
+ ) -> Optional[str]:
+ if name in _pytest.deprecated.DEPRECATED_EXTERNAL_PLUGINS:
+ warnings.warn(
+ PytestConfigWarning(
+ "{} plugin has been merged into the core, "
+ "please remove it from your requirements.".format(
+ name.replace("_", "-")
+ )
+ )
+ )
+ return None
+ ret: Optional[str] = super().register(plugin, name)
+ if ret:
+ self.hook.pytest_plugin_registered.call_historic(
+ kwargs=dict(plugin=plugin, manager=self)
+ )
+
+ if isinstance(plugin, types.ModuleType):
+ self.consider_module(plugin)
+ return ret
+
+ def getplugin(self, name: str):
+ # Support deprecated naming because plugins (xdist e.g.) use it.
+ plugin: Optional[_PluggyPlugin] = self.get_plugin(name)
+ return plugin
+
+ def hasplugin(self, name: str) -> bool:
+ """Return whether a plugin with the given name is registered."""
+ return bool(self.get_plugin(name))
+
+ def pytest_configure(self, config: "Config") -> None:
+ """:meta private:"""
+ # XXX now that the pluginmanager exposes hookimpl(tryfirst...)
+ # we should remove tryfirst/trylast as markers.
+ config.addinivalue_line(
+ "markers",
+ "tryfirst: mark a hook implementation function such that the "
+ "plugin machinery will try to call it first/as early as possible.",
+ )
+ config.addinivalue_line(
+ "markers",
+ "trylast: mark a hook implementation function such that the "
+ "plugin machinery will try to call it last/as late as possible.",
+ )
+ self._configured = True
+
+ #
+ # Internal API for local conftest plugin handling.
+ #
+ def _set_initial_conftests(
+ self, namespace: argparse.Namespace, rootpath: Path
+ ) -> None:
+ """Load initial conftest files given a preparsed "namespace".
+
+ As conftest files may add their own command line options which have
+ arguments ('--my-opt somepath') we might get some false positives.
+ All builtin and 3rd party plugins will have been loaded, however, so
+ common options will not confuse our logic here.
+ """
+ current = Path.cwd()
+ self._confcutdir = (
+ absolutepath(current / namespace.confcutdir)
+ if namespace.confcutdir
+ else None
+ )
+ self._noconftest = namespace.noconftest
+ self._using_pyargs = namespace.pyargs
+ testpaths = namespace.file_or_dir
+ foundanchor = False
+ for testpath in testpaths:
+ path = str(testpath)
+ # remove node-id syntax
+ i = path.find("::")
+ if i != -1:
+ path = path[:i]
+ anchor = absolutepath(current / path)
+ if anchor.exists(): # we found some file object
+ self._try_load_conftest(anchor, namespace.importmode, rootpath)
+ foundanchor = True
+ if not foundanchor:
+ self._try_load_conftest(current, namespace.importmode, rootpath)
+
+ def _try_load_conftest(
+ self, anchor: Path, importmode: Union[str, ImportMode], rootpath: Path
+ ) -> None:
+ self._getconftestmodules(anchor, importmode, rootpath)
+ # let's also consider test* subdirs
+ if anchor.is_dir():
+ for x in anchor.glob("test*"):
+ if x.is_dir():
+ self._getconftestmodules(x, importmode, rootpath)
+
+ def _getconftestmodules(
+ self, path: Path, importmode: Union[str, ImportMode], rootpath: Path
+ ) -> List[types.ModuleType]:
+ if self._noconftest:
+ return []
+
+ directory = self._get_directory(path)
+
+ # Optimization: avoid repeated searches in the same directory.
+ # Assumes always called with same importmode and rootpath.
+ existing_clist = self._dirpath2confmods.get(directory)
+ if existing_clist is not None:
+ return existing_clist
+
+ # XXX these days we may rather want to use config.rootpath
+ # and allow users to opt into looking into the rootdir parent
+ # directories instead of requiring to specify confcutdir.
+ clist = []
+ confcutdir_parents = self._confcutdir.parents if self._confcutdir else []
+ for parent in reversed((directory, *directory.parents)):
+ if parent in confcutdir_parents:
+ continue
+ conftestpath = parent / "conftest.py"
+ if conftestpath.is_file():
+ mod = self._importconftest(conftestpath, importmode, rootpath)
+ clist.append(mod)
+ self._dirpath2confmods[directory] = clist
+ return clist
+
+ def _rget_with_confmod(
+ self,
+ name: str,
+ path: Path,
+ importmode: Union[str, ImportMode],
+ rootpath: Path,
+ ) -> Tuple[types.ModuleType, Any]:
+ modules = self._getconftestmodules(path, importmode, rootpath=rootpath)
+ for mod in reversed(modules):
+ try:
+ return mod, getattr(mod, name)
+ except AttributeError:
+ continue
+ raise KeyError(name)
+
+ def _importconftest(
+ self, conftestpath: Path, importmode: Union[str, ImportMode], rootpath: Path
+ ) -> types.ModuleType:
+ # Use a resolved Path object as key to avoid loading the same conftest
+ # twice with build systems that create build directories containing
+ # symlinks to actual files.
+ # Using Path().resolve() is better than py.path.realpath because
+ # it resolves to the correct path/drive in case-insensitive file systems (#5792)
+ key = conftestpath.resolve()
+
+ with contextlib.suppress(KeyError):
+ return self._conftestpath2mod[key]
+
+ pkgpath = resolve_package_path(conftestpath)
+ if pkgpath is None:
+ _ensure_removed_sysmodule(conftestpath.stem)
+
+ try:
+ mod = import_path(conftestpath, mode=importmode, root=rootpath)
+ except Exception as e:
+ assert e.__traceback__ is not None
+ exc_info = (type(e), e, e.__traceback__)
+ raise ConftestImportFailure(conftestpath, exc_info) from e
+
+ self._check_non_top_pytest_plugins(mod, conftestpath)
+
+ self._conftest_plugins.add(mod)
+ self._conftestpath2mod[key] = mod
+ dirpath = conftestpath.parent
+ if dirpath in self._dirpath2confmods:
+ for path, mods in self._dirpath2confmods.items():
+ if path and dirpath in path.parents or path == dirpath:
+ assert mod not in mods
+ mods.append(mod)
+ self.trace(f"loading conftestmodule {mod!r}")
+ self.consider_conftest(mod)
+ return mod
+
+ def _check_non_top_pytest_plugins(
+ self,
+ mod: types.ModuleType,
+ conftestpath: Path,
+ ) -> None:
+ if (
+ hasattr(mod, "pytest_plugins")
+ and self._configured
+ and not self._using_pyargs
+ ):
+ msg = (
+ "Defining 'pytest_plugins' in a non-top-level conftest is no longer supported:\n"
+ "It affects the entire test suite instead of just below the conftest as expected.\n"
+ " {}\n"
+ "Please move it to a top level conftest file at the rootdir:\n"
+ " {}\n"
+ "For more information, visit:\n"
+ " https://docs.pytest.org/en/stable/deprecations.html#pytest-plugins-in-non-top-level-conftest-files"
+ )
+ fail(msg.format(conftestpath, self._confcutdir), pytrace=False)
+
+ #
+ # API for bootstrapping plugin loading
+ #
+ #
+
+ def consider_preparse(
+ self, args: Sequence[str], *, exclude_only: bool = False
+ ) -> None:
+ """:meta private:"""
+ i = 0
+ n = len(args)
+ while i < n:
+ opt = args[i]
+ i += 1
+ if isinstance(opt, str):
+ if opt == "-p":
+ try:
+ parg = args[i]
+ except IndexError:
+ return
+ i += 1
+ elif opt.startswith("-p"):
+ parg = opt[2:]
+ else:
+ continue
+ if exclude_only and not parg.startswith("no:"):
+ continue
+ self.consider_pluginarg(parg)
+
+ def consider_pluginarg(self, arg: str) -> None:
+ """:meta private:"""
+ if arg.startswith("no:"):
+ name = arg[3:]
+ if name in essential_plugins:
+ raise UsageError("plugin %s cannot be disabled" % name)
+
+ # PR #4304: remove stepwise if cacheprovider is blocked.
+ if name == "cacheprovider":
+ self.set_blocked("stepwise")
+ self.set_blocked("pytest_stepwise")
+
+ self.set_blocked(name)
+ if not name.startswith("pytest_"):
+ self.set_blocked("pytest_" + name)
+ else:
+ name = arg
+ # Unblock the plugin. None indicates that it has been blocked.
+ # There is no interface with pluggy for this.
+ if self._name2plugin.get(name, -1) is None:
+ del self._name2plugin[name]
+ if not name.startswith("pytest_"):
+ if self._name2plugin.get("pytest_" + name, -1) is None:
+ del self._name2plugin["pytest_" + name]
+ self.import_plugin(arg, consider_entry_points=True)
+
+ def consider_conftest(self, conftestmodule: types.ModuleType) -> None:
+ """:meta private:"""
+ self.register(conftestmodule, name=conftestmodule.__file__)
+
+ def consider_env(self) -> None:
+ """:meta private:"""
+ self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS"))
+
+ def consider_module(self, mod: types.ModuleType) -> None:
+ """:meta private:"""
+ self._import_plugin_specs(getattr(mod, "pytest_plugins", []))
+
+ def _import_plugin_specs(
+ self, spec: Union[None, types.ModuleType, str, Sequence[str]]
+ ) -> None:
+ plugins = _get_plugin_specs_as_list(spec)
+ for import_spec in plugins:
+ self.import_plugin(import_spec)
+
+ def import_plugin(self, modname: str, consider_entry_points: bool = False) -> None:
+ """Import a plugin with ``modname``.
+
+ If ``consider_entry_points`` is True, entry point names are also
+ considered to find a plugin.
+ """
+ # Most often modname refers to builtin modules, e.g. "pytester",
+ # "terminal" or "capture". Those plugins are registered under their
+ # basename for historic purposes but must be imported with the
+ # _pytest prefix.
+ assert isinstance(modname, str), (
+ "module name as text required, got %r" % modname
+ )
+ if self.is_blocked(modname) or self.get_plugin(modname) is not None:
+ return
+
+ importspec = "_pytest." + modname if modname in builtin_plugins else modname
+ self.rewrite_hook.mark_rewrite(importspec)
+
+ if consider_entry_points:
+ loaded = self.load_setuptools_entrypoints("pytest11", name=modname)
+ if loaded:
+ return
+
+ try:
+ __import__(importspec)
+ except ImportError as e:
+ raise ImportError(
+ f'Error importing plugin "{modname}": {e.args[0]}'
+ ).with_traceback(e.__traceback__) from e
+
+ except Skipped as e:
+ self.skipped_plugins.append((modname, e.msg or ""))
+ else:
+ mod = sys.modules[importspec]
+ self.register(mod, modname)
+
+
+def _get_plugin_specs_as_list(
+ specs: Union[None, types.ModuleType, str, Sequence[str]]
+) -> List[str]:
+ """Parse a plugins specification into a list of plugin names."""
+ # None means empty.
+ if specs is None:
+ return []
+ # Workaround for #3899 - a submodule which happens to be called "pytest_plugins".
+ if isinstance(specs, types.ModuleType):
+ return []
+ # Comma-separated list.
+ if isinstance(specs, str):
+ return specs.split(",") if specs else []
+ # Direct specification.
+ if isinstance(specs, collections.abc.Sequence):
+ return list(specs)
+ raise UsageError(
+ "Plugins may be specified as a sequence or a ','-separated string of plugin names. Got: %r"
+ % specs
+ )
+
+
+def _ensure_removed_sysmodule(modname: str) -> None:
+ try:
+ del sys.modules[modname]
+ except KeyError:
+ pass
+
+
+class Notset:
+ def __repr__(self):
+ return "<NOTSET>"
+
+
+notset = Notset()
+
+
+def _iter_rewritable_modules(package_files: Iterable[str]) -> Iterator[str]:
+ """Given an iterable of file names in a source distribution, return the "names" that should
+ be marked for assertion rewrite.
+
+ For example the package "pytest_mock/__init__.py" should be added as "pytest_mock" in
+ the assertion rewrite mechanism.
+
+ This function has to deal with dist-info based distributions and egg based distributions
+ (which are still very much in use for "editable" installs).
+
+ Here are the file names as seen in a dist-info based distribution:
+
+ pytest_mock/__init__.py
+ pytest_mock/_version.py
+ pytest_mock/plugin.py
+ pytest_mock.egg-info/PKG-INFO
+
+ Here are the file names as seen in an egg based distribution:
+
+ src/pytest_mock/__init__.py
+ src/pytest_mock/_version.py
+ src/pytest_mock/plugin.py
+ src/pytest_mock.egg-info/PKG-INFO
+ LICENSE
+ setup.py
+
+ We have to take in account those two distribution flavors in order to determine which
+ names should be considered for assertion rewriting.
+
+ More information:
+ https://github.com/pytest-dev/pytest-mock/issues/167
+ """
+ package_files = list(package_files)
+ seen_some = False
+ for fn in package_files:
+ is_simple_module = "/" not in fn and fn.endswith(".py")
+ is_package = fn.count("/") == 1 and fn.endswith("__init__.py")
+ if is_simple_module:
+ module_name, _ = os.path.splitext(fn)
+ # we ignore "setup.py" at the root of the distribution
+ if module_name != "setup":
+ seen_some = True
+ yield module_name
+ elif is_package:
+ package_name = os.path.dirname(fn)
+ seen_some = True
+ yield package_name
+
+ if not seen_some:
+ # At this point we did not find any packages or modules suitable for assertion
+ # rewriting, so we try again by stripping the first path component (to account for
+ # "src" based source trees for example).
+ # This approach lets us have the common case continue to be fast, as egg-distributions
+ # are rarer.
+ new_package_files = []
+ for fn in package_files:
+ parts = fn.split("/")
+ new_fn = "/".join(parts[1:])
+ if new_fn:
+ new_package_files.append(new_fn)
+ if new_package_files:
+ yield from _iter_rewritable_modules(new_package_files)
+
+
+def _args_converter(args: Iterable[str]) -> Tuple[str, ...]:
+ return tuple(args)
+
+
+@final
+class Config:
+ """Access to configuration values, pluginmanager and plugin hooks.
+
+ :param PytestPluginManager pluginmanager:
+ A pytest PluginManager.
+
+ :param InvocationParams invocation_params:
+ Object containing parameters regarding the :func:`pytest.main`
+ invocation.
+ """
+
+ @final
+ @attr.s(frozen=True, auto_attribs=True)
+ class InvocationParams:
+ """Holds parameters passed during :func:`pytest.main`.
+
+ The object attributes are read-only.
+
+ .. versionadded:: 5.1
+
+ .. note::
+
+ Note that the environment variable ``PYTEST_ADDOPTS`` and the ``addopts``
+ ini option are handled by pytest, not being included in the ``args`` attribute.
+
+ Plugins accessing ``InvocationParams`` must be aware of that.
+ """
+
+ args: Tuple[str, ...] = attr.ib(converter=_args_converter)
+ """The command-line arguments as passed to :func:`pytest.main`."""
+ plugins: Optional[Sequence[Union[str, _PluggyPlugin]]]
+ """Extra plugins, might be `None`."""
+ dir: Path
+ """The directory from which :func:`pytest.main` was invoked."""
+
+ def __init__(
+ self,
+ pluginmanager: PytestPluginManager,
+ *,
+ invocation_params: Optional[InvocationParams] = None,
+ ) -> None:
+ from .argparsing import Parser, FILE_OR_DIR
+
+ if invocation_params is None:
+ invocation_params = self.InvocationParams(
+ args=(), plugins=None, dir=Path.cwd()
+ )
+
+ self.option = argparse.Namespace()
+ """Access to command line option as attributes.
+
+ :type: argparse.Namespace
+ """
+
+ self.invocation_params = invocation_params
+ """The parameters with which pytest was invoked.
+
+ :type: InvocationParams
+ """
+
+ _a = FILE_OR_DIR
+ self._parser = Parser(
+ usage=f"%(prog)s [options] [{_a}] [{_a}] [...]",
+ processopt=self._processopt,
+ _ispytest=True,
+ )
+ self.pluginmanager = pluginmanager
+ """The plugin manager handles plugin registration and hook invocation.
+
+ :type: PytestPluginManager
+ """
+
+ self.stash = Stash()
+ """A place where plugins can store information on the config for their
+ own use.
+
+ :type: Stash
+ """
+ # Deprecated alias. Was never public. Can be removed in a few releases.
+ self._store = self.stash
+
+ from .compat import PathAwareHookProxy
+
+ self.trace = self.pluginmanager.trace.root.get("config")
+ self.hook = PathAwareHookProxy(self.pluginmanager.hook)
+ self._inicache: Dict[str, Any] = {}
+ self._override_ini: Sequence[str] = ()
+ self._opt2dest: Dict[str, str] = {}
+ self._cleanup: List[Callable[[], None]] = []
+ self.pluginmanager.register(self, "pytestconfig")
+ self._configured = False
+ self.hook.pytest_addoption.call_historic(
+ kwargs=dict(parser=self._parser, pluginmanager=self.pluginmanager)
+ )
+
+ if TYPE_CHECKING:
+ from _pytest.cacheprovider import Cache
+
+ self.cache: Optional[Cache] = None
+
+ @property
+ def rootpath(self) -> Path:
+ """The path to the :ref:`rootdir <rootdir>`.
+
+ :type: pathlib.Path
+
+ .. versionadded:: 6.1
+ """
+ return self._rootpath
+
+ @property
+ def inipath(self) -> Optional[Path]:
+ """The path to the :ref:`configfile <configfiles>`.
+
+ :type: Optional[pathlib.Path]
+
+ .. versionadded:: 6.1
+ """
+ return self._inipath
+
+ def add_cleanup(self, func: Callable[[], None]) -> None:
+ """Add a function to be called when the config object gets out of
+ use (usually coinciding with pytest_unconfigure)."""
+ self._cleanup.append(func)
+
+ def _do_configure(self) -> None:
+ assert not self._configured
+ self._configured = True
+ with warnings.catch_warnings():
+ warnings.simplefilter("default")
+ self.hook.pytest_configure.call_historic(kwargs=dict(config=self))
+
+ def _ensure_unconfigure(self) -> None:
+ if self._configured:
+ self._configured = False
+ self.hook.pytest_unconfigure(config=self)
+ self.hook.pytest_configure._call_history = []
+ while self._cleanup:
+ fin = self._cleanup.pop()
+ fin()
+
+ def get_terminal_writer(self) -> TerminalWriter:
+ terminalreporter: TerminalReporter = self.pluginmanager.get_plugin(
+ "terminalreporter"
+ )
+ return terminalreporter._tw
+
+ def pytest_cmdline_parse(
+ self, pluginmanager: PytestPluginManager, args: List[str]
+ ) -> "Config":
+ try:
+ self.parse(args)
+ except UsageError:
+
+ # Handle --version and --help here in a minimal fashion.
+ # This gets done via helpconfig normally, but its
+ # pytest_cmdline_main is not called in case of errors.
+ if getattr(self.option, "version", False) or "--version" in args:
+ from _pytest.helpconfig import showversion
+
+ showversion(self)
+ elif (
+ getattr(self.option, "help", False) or "--help" in args or "-h" in args
+ ):
+ self._parser._getparser().print_help()
+ sys.stdout.write(
+ "\nNOTE: displaying only minimal help due to UsageError.\n\n"
+ )
+
+ raise
+
+ return self
+
+ def notify_exception(
+ self,
+ excinfo: ExceptionInfo[BaseException],
+ option: Optional[argparse.Namespace] = None,
+ ) -> None:
+ if option and getattr(option, "fulltrace", False):
+ style: _TracebackStyle = "long"
+ else:
+ style = "native"
+ excrepr = excinfo.getrepr(
+ funcargs=True, showlocals=getattr(option, "showlocals", False), style=style
+ )
+ res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo)
+ if not any(res):
+ for line in str(excrepr).split("\n"):
+ sys.stderr.write("INTERNALERROR> %s\n" % line)
+ sys.stderr.flush()
+
+ def cwd_relative_nodeid(self, nodeid: str) -> str:
+ # nodeid's are relative to the rootpath, compute relative to cwd.
+ if self.invocation_params.dir != self.rootpath:
+ fullpath = self.rootpath / nodeid
+ nodeid = bestrelpath(self.invocation_params.dir, fullpath)
+ return nodeid
+
+ @classmethod
+ def fromdictargs(cls, option_dict, args) -> "Config":
+ """Constructor usable for subprocesses."""
+ config = get_config(args)
+ config.option.__dict__.update(option_dict)
+ config.parse(args, addopts=False)
+ for x in config.option.plugins:
+ config.pluginmanager.consider_pluginarg(x)
+ return config
+
+ def _processopt(self, opt: "Argument") -> None:
+ for name in opt._short_opts + opt._long_opts:
+ self._opt2dest[name] = opt.dest
+
+ if hasattr(opt, "default"):
+ if not hasattr(self.option, opt.dest):
+ setattr(self.option, opt.dest, opt.default)
+
+ @hookimpl(trylast=True)
+ def pytest_load_initial_conftests(self, early_config: "Config") -> None:
+ self.pluginmanager._set_initial_conftests(
+ early_config.known_args_namespace, rootpath=early_config.rootpath
+ )
+
+ def _initini(self, args: Sequence[str]) -> None:
+ ns, unknown_args = self._parser.parse_known_and_unknown_args(
+ args, namespace=copy.copy(self.option)
+ )
+ rootpath, inipath, inicfg = determine_setup(
+ ns.inifilename,
+ ns.file_or_dir + unknown_args,
+ rootdir_cmd_arg=ns.rootdir or None,
+ config=self,
+ )
+ self._rootpath = rootpath
+ self._inipath = inipath
+ self.inicfg = inicfg
+ self._parser.extra_info["rootdir"] = str(self.rootpath)
+ self._parser.extra_info["inifile"] = str(self.inipath)
+ self._parser.addini("addopts", "extra command line options", "args")
+ self._parser.addini("minversion", "minimally required pytest version")
+ self._parser.addini(
+ "required_plugins",
+ "plugins that must be present for pytest to run",
+ type="args",
+ default=[],
+ )
+ self._override_ini = ns.override_ini or ()
+
+ def _consider_importhook(self, args: Sequence[str]) -> None:
+ """Install the PEP 302 import hook if using assertion rewriting.
+
+ Needs to parse the --assert=<mode> option from the commandline
+ and find all the installed plugins to mark them for rewriting
+ by the importhook.
+ """
+ ns, unknown_args = self._parser.parse_known_and_unknown_args(args)
+ mode = getattr(ns, "assertmode", "plain")
+ if mode == "rewrite":
+ import _pytest.assertion
+
+ try:
+ hook = _pytest.assertion.install_importhook(self)
+ except SystemError:
+ mode = "plain"
+ else:
+ self._mark_plugins_for_rewrite(hook)
+ self._warn_about_missing_assertion(mode)
+
+ def _mark_plugins_for_rewrite(self, hook) -> None:
+ """Given an importhook, mark for rewrite any top-level
+ modules or packages in the distribution package for
+ all pytest plugins."""
+ self.pluginmanager.rewrite_hook = hook
+
+ if os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"):
+ # We don't autoload from setuptools entry points, no need to continue.
+ return
+
+ package_files = (
+ str(file)
+ for dist in importlib_metadata.distributions()
+ if any(ep.group == "pytest11" for ep in dist.entry_points)
+ for file in dist.files or []
+ )
+
+ for name in _iter_rewritable_modules(package_files):
+ hook.mark_rewrite(name)
+
+ def _validate_args(self, args: List[str], via: str) -> List[str]:
+ """Validate known args."""
+ self._parser._config_source_hint = via # type: ignore
+ try:
+ self._parser.parse_known_and_unknown_args(
+ args, namespace=copy.copy(self.option)
+ )
+ finally:
+ del self._parser._config_source_hint # type: ignore
+
+ return args
+
+ def _preparse(self, args: List[str], addopts: bool = True) -> None:
+ if addopts:
+ env_addopts = os.environ.get("PYTEST_ADDOPTS", "")
+ if len(env_addopts):
+ args[:] = (
+ self._validate_args(shlex.split(env_addopts), "via PYTEST_ADDOPTS")
+ + args
+ )
+ self._initini(args)
+ if addopts:
+ args[:] = (
+ self._validate_args(self.getini("addopts"), "via addopts config") + args
+ )
+
+ self.known_args_namespace = self._parser.parse_known_args(
+ args, namespace=copy.copy(self.option)
+ )
+ self._checkversion()
+ self._consider_importhook(args)
+ self.pluginmanager.consider_preparse(args, exclude_only=False)
+ if not os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"):
+ # Don't autoload from setuptools entry point. Only explicitly specified
+ # plugins are going to be loaded.
+ self.pluginmanager.load_setuptools_entrypoints("pytest11")
+ self.pluginmanager.consider_env()
+
+ self.known_args_namespace = self._parser.parse_known_args(
+ args, namespace=copy.copy(self.known_args_namespace)
+ )
+
+ self._validate_plugins()
+ self._warn_about_skipped_plugins()
+
+ if self.known_args_namespace.strict:
+ self.issue_config_time_warning(
+ _pytest.deprecated.STRICT_OPTION, stacklevel=2
+ )
+
+ if self.known_args_namespace.confcutdir is None and self.inipath is not None:
+ confcutdir = str(self.inipath.parent)
+ self.known_args_namespace.confcutdir = confcutdir
+ try:
+ self.hook.pytest_load_initial_conftests(
+ early_config=self, args=args, parser=self._parser
+ )
+ except ConftestImportFailure as e:
+ if self.known_args_namespace.help or self.known_args_namespace.version:
+ # we don't want to prevent --help/--version to work
+ # so just let is pass and print a warning at the end
+ self.issue_config_time_warning(
+ PytestConfigWarning(f"could not load initial conftests: {e.path}"),
+ stacklevel=2,
+ )
+ else:
+ raise
+
+ @hookimpl(hookwrapper=True)
+ def pytest_collection(self) -> Generator[None, None, None]:
+ # Validate invalid ini keys after collection is done so we take in account
+ # options added by late-loading conftest files.
+ yield
+ self._validate_config_options()
+
+ def _checkversion(self) -> None:
+ import pytest
+
+ minver = self.inicfg.get("minversion", None)
+ if minver:
+ # Imported lazily to improve start-up time.
+ from packaging.version import Version
+
+ if not isinstance(minver, str):
+ raise pytest.UsageError(
+ "%s: 'minversion' must be a single value" % self.inipath
+ )
+
+ if Version(minver) > Version(pytest.__version__):
+ raise pytest.UsageError(
+ "%s: 'minversion' requires pytest-%s, actual pytest-%s'"
+ % (
+ self.inipath,
+ minver,
+ pytest.__version__,
+ )
+ )
+
+ def _validate_config_options(self) -> None:
+ for key in sorted(self._get_unknown_ini_keys()):
+ self._warn_or_fail_if_strict(f"Unknown config option: {key}\n")
+
+ def _validate_plugins(self) -> None:
+ required_plugins = sorted(self.getini("required_plugins"))
+ if not required_plugins:
+ return
+
+ # Imported lazily to improve start-up time.
+ from packaging.version import Version
+ from packaging.requirements import InvalidRequirement, Requirement
+
+ plugin_info = self.pluginmanager.list_plugin_distinfo()
+ plugin_dist_info = {dist.project_name: dist.version for _, dist in plugin_info}
+
+ missing_plugins = []
+ for required_plugin in required_plugins:
+ try:
+ req = Requirement(required_plugin)
+ except InvalidRequirement:
+ missing_plugins.append(required_plugin)
+ continue
+
+ if req.name not in plugin_dist_info:
+ missing_plugins.append(required_plugin)
+ elif not req.specifier.contains(
+ Version(plugin_dist_info[req.name]), prereleases=True
+ ):
+ missing_plugins.append(required_plugin)
+
+ if missing_plugins:
+ raise UsageError(
+ "Missing required plugins: {}".format(", ".join(missing_plugins)),
+ )
+
+ def _warn_or_fail_if_strict(self, message: str) -> None:
+ if self.known_args_namespace.strict_config:
+ raise UsageError(message)
+
+ self.issue_config_time_warning(PytestConfigWarning(message), stacklevel=3)
+
+ def _get_unknown_ini_keys(self) -> List[str]:
+ parser_inicfg = self._parser._inidict
+ return [name for name in self.inicfg if name not in parser_inicfg]
+
+ def parse(self, args: List[str], addopts: bool = True) -> None:
+ # Parse given cmdline arguments into this config object.
+ assert not hasattr(
+ self, "args"
+ ), "can only parse cmdline args at most once per Config object"
+ self.hook.pytest_addhooks.call_historic(
+ kwargs=dict(pluginmanager=self.pluginmanager)
+ )
+ self._preparse(args, addopts=addopts)
+ # XXX deprecated hook:
+ self.hook.pytest_cmdline_preparse(config=self, args=args)
+ self._parser.after_preparse = True # type: ignore
+ try:
+ args = self._parser.parse_setoption(
+ args, self.option, namespace=self.option
+ )
+ if not args:
+ if self.invocation_params.dir == self.rootpath:
+ args = self.getini("testpaths")
+ if not args:
+ args = [str(self.invocation_params.dir)]
+ self.args = args
+ except PrintHelp:
+ pass
+
+ def issue_config_time_warning(self, warning: Warning, stacklevel: int) -> None:
+ """Issue and handle a warning during the "configure" stage.
+
+ During ``pytest_configure`` we can't capture warnings using the ``catch_warnings_for_item``
+ function because it is not possible to have hookwrappers around ``pytest_configure``.
+
+ This function is mainly intended for plugins that need to issue warnings during
+ ``pytest_configure`` (or similar stages).
+
+ :param warning: The warning instance.
+ :param stacklevel: stacklevel forwarded to warnings.warn.
+ """
+ if self.pluginmanager.is_blocked("warnings"):
+ return
+
+ cmdline_filters = self.known_args_namespace.pythonwarnings or []
+ config_filters = self.getini("filterwarnings")
+
+ with warnings.catch_warnings(record=True) as records:
+ warnings.simplefilter("always", type(warning))
+ apply_warning_filters(config_filters, cmdline_filters)
+ warnings.warn(warning, stacklevel=stacklevel)
+
+ if records:
+ frame = sys._getframe(stacklevel - 1)
+ location = frame.f_code.co_filename, frame.f_lineno, frame.f_code.co_name
+ self.hook.pytest_warning_captured.call_historic(
+ kwargs=dict(
+ warning_message=records[0],
+ when="config",
+ item=None,
+ location=location,
+ )
+ )
+ self.hook.pytest_warning_recorded.call_historic(
+ kwargs=dict(
+ warning_message=records[0],
+ when="config",
+ nodeid="",
+ location=location,
+ )
+ )
+
+ def addinivalue_line(self, name: str, line: str) -> None:
+ """Add a line to an ini-file option. The option must have been
+ declared but might not yet be set in which case the line becomes
+ the first line in its value."""
+ x = self.getini(name)
+ assert isinstance(x, list)
+ x.append(line) # modifies the cached list inline
+
+ def getini(self, name: str):
+ """Return configuration value from an :ref:`ini file <configfiles>`.
+
+ If the specified name hasn't been registered through a prior
+ :func:`parser.addini <pytest.Parser.addini>` call (usually from a
+ plugin), a ValueError is raised.
+ """
+ try:
+ return self._inicache[name]
+ except KeyError:
+ self._inicache[name] = val = self._getini(name)
+ return val
+
+ # Meant for easy monkeypatching by legacypath plugin.
+ # Can be inlined back (with no cover removed) once legacypath is gone.
+ def _getini_unknown_type(self, name: str, type: str, value: Union[str, List[str]]):
+ msg = f"unknown configuration type: {type}"
+ raise ValueError(msg, value) # pragma: no cover
+
+ def _getini(self, name: str):
+ try:
+ description, type, default = self._parser._inidict[name]
+ except KeyError as e:
+ raise ValueError(f"unknown configuration value: {name!r}") from e
+ override_value = self._get_override_ini_value(name)
+ if override_value is None:
+ try:
+ value = self.inicfg[name]
+ except KeyError:
+ if default is not None:
+ return default
+ if type is None:
+ return ""
+ return []
+ else:
+ value = override_value
+ # Coerce the values based on types.
+ #
+ # Note: some coercions are only required if we are reading from .ini files, because
+ # the file format doesn't contain type information, but when reading from toml we will
+ # get either str or list of str values (see _parse_ini_config_from_pyproject_toml).
+ # For example:
+ #
+ # ini:
+ # a_line_list = "tests acceptance"
+ # in this case, we need to split the string to obtain a list of strings.
+ #
+ # toml:
+ # a_line_list = ["tests", "acceptance"]
+ # in this case, we already have a list ready to use.
+ #
+ if type == "paths":
+ # TODO: This assert is probably not valid in all cases.
+ assert self.inipath is not None
+ dp = self.inipath.parent
+ input_values = shlex.split(value) if isinstance(value, str) else value
+ return [dp / x for x in input_values]
+ elif type == "args":
+ return shlex.split(value) if isinstance(value, str) else value
+ elif type == "linelist":
+ if isinstance(value, str):
+ return [t for t in map(lambda x: x.strip(), value.split("\n")) if t]
+ else:
+ return value
+ elif type == "bool":
+ return _strtobool(str(value).strip())
+ elif type == "string":
+ return value
+ elif type is None:
+ return value
+ else:
+ return self._getini_unknown_type(name, type, value)
+
+ def _getconftest_pathlist(
+ self, name: str, path: Path, rootpath: Path
+ ) -> Optional[List[Path]]:
+ try:
+ mod, relroots = self.pluginmanager._rget_with_confmod(
+ name, path, self.getoption("importmode"), rootpath
+ )
+ except KeyError:
+ return None
+ modpath = Path(mod.__file__).parent
+ values: List[Path] = []
+ for relroot in relroots:
+ if isinstance(relroot, os.PathLike):
+ relroot = Path(relroot)
+ else:
+ relroot = relroot.replace("/", os.sep)
+ relroot = absolutepath(modpath / relroot)
+ values.append(relroot)
+ return values
+
+ def _get_override_ini_value(self, name: str) -> Optional[str]:
+ value = None
+ # override_ini is a list of "ini=value" options.
+ # Always use the last item if multiple values are set for same ini-name,
+ # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2.
+ for ini_config in self._override_ini:
+ try:
+ key, user_ini_value = ini_config.split("=", 1)
+ except ValueError as e:
+ raise UsageError(
+ "-o/--override-ini expects option=value style (got: {!r}).".format(
+ ini_config
+ )
+ ) from e
+ else:
+ if key == name:
+ value = user_ini_value
+ return value
+
+ def getoption(self, name: str, default=notset, skip: bool = False):
+ """Return command line option value.
+
+ :param name: Name of the option. You may also specify
+ the literal ``--OPT`` option instead of the "dest" option name.
+ :param default: Default value if no option of that name exists.
+ :param skip: If True, raise pytest.skip if option does not exists
+ or has a None value.
+ """
+ name = self._opt2dest.get(name, name)
+ try:
+ val = getattr(self.option, name)
+ if val is None and skip:
+ raise AttributeError(name)
+ return val
+ except AttributeError as e:
+ if default is not notset:
+ return default
+ if skip:
+ import pytest
+
+ pytest.skip(f"no {name!r} option found")
+ raise ValueError(f"no option named {name!r}") from e
+
+ def getvalue(self, name: str, path=None):
+ """Deprecated, use getoption() instead."""
+ return self.getoption(name)
+
+ def getvalueorskip(self, name: str, path=None):
+ """Deprecated, use getoption(skip=True) instead."""
+ return self.getoption(name, skip=True)
+
+ def _warn_about_missing_assertion(self, mode: str) -> None:
+ if not _assertion_supported():
+ if mode == "plain":
+ warning_text = (
+ "ASSERTIONS ARE NOT EXECUTED"
+ " and FAILING TESTS WILL PASS. Are you"
+ " using python -O?"
+ )
+ else:
+ warning_text = (
+ "assertions not in test modules or"
+ " plugins will be ignored"
+ " because assert statements are not executed "
+ "by the underlying Python interpreter "
+ "(are you using python -O?)\n"
+ )
+ self.issue_config_time_warning(
+ PytestConfigWarning(warning_text),
+ stacklevel=3,
+ )
+
+ def _warn_about_skipped_plugins(self) -> None:
+ for module_name, msg in self.pluginmanager.skipped_plugins:
+ self.issue_config_time_warning(
+ PytestConfigWarning(f"skipped plugin {module_name!r}: {msg}"),
+ stacklevel=2,
+ )
+
+
+def _assertion_supported() -> bool:
+ try:
+ assert False
+ except AssertionError:
+ return True
+ else:
+ return False # type: ignore[unreachable]
+
+
+def create_terminal_writer(
+ config: Config, file: Optional[TextIO] = None
+) -> TerminalWriter:
+ """Create a TerminalWriter instance configured according to the options
+ in the config object.
+
+ Every code which requires a TerminalWriter object and has access to a
+ config object should use this function.
+ """
+ tw = TerminalWriter(file=file)
+
+ if config.option.color == "yes":
+ tw.hasmarkup = True
+ elif config.option.color == "no":
+ tw.hasmarkup = False
+
+ if config.option.code_highlight == "yes":
+ tw.code_highlight = True
+ elif config.option.code_highlight == "no":
+ tw.code_highlight = False
+
+ return tw
+
+
+def _strtobool(val: str) -> bool:
+ """Convert a string representation of truth to True or False.
+
+ True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+ are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
+ 'val' is anything else.
+
+ .. note:: Copied from distutils.util.
+ """
+ val = val.lower()
+ if val in ("y", "yes", "t", "true", "on", "1"):
+ return True
+ elif val in ("n", "no", "f", "false", "off", "0"):
+ return False
+ else:
+ raise ValueError(f"invalid truth value {val!r}")
+
+
+@lru_cache(maxsize=50)
+def parse_warning_filter(
+ arg: str, *, escape: bool
+) -> Tuple[str, str, Type[Warning], str, int]:
+ """Parse a warnings filter string.
+
+ This is copied from warnings._setoption with the following changes:
+
+ * Does not apply the filter.
+ * Escaping is optional.
+ * Raises UsageError so we get nice error messages on failure.
+ """
+ __tracebackhide__ = True
+ error_template = dedent(
+ f"""\
+ while parsing the following warning configuration:
+
+ {arg}
+
+ This error occurred:
+
+ {{error}}
+ """
+ )
+
+ parts = arg.split(":")
+ if len(parts) > 5:
+ doc_url = (
+ "https://docs.python.org/3/library/warnings.html#describing-warning-filters"
+ )
+ error = dedent(
+ f"""\
+ Too many fields ({len(parts)}), expected at most 5 separated by colons:
+
+ action:message:category:module:line
+
+ For more information please consult: {doc_url}
+ """
+ )
+ raise UsageError(error_template.format(error=error))
+
+ while len(parts) < 5:
+ parts.append("")
+ action_, message, category_, module, lineno_ = (s.strip() for s in parts)
+ try:
+ action: str = warnings._getaction(action_) # type: ignore[attr-defined]
+ except warnings._OptionError as e:
+ raise UsageError(error_template.format(error=str(e)))
+ try:
+ category: Type[Warning] = _resolve_warning_category(category_)
+ except Exception:
+ exc_info = ExceptionInfo.from_current()
+ exception_text = exc_info.getrepr(style="native")
+ raise UsageError(error_template.format(error=exception_text))
+ if message and escape:
+ message = re.escape(message)
+ if module and escape:
+ module = re.escape(module) + r"\Z"
+ if lineno_:
+ try:
+ lineno = int(lineno_)
+ if lineno < 0:
+ raise ValueError("number is negative")
+ except ValueError as e:
+ raise UsageError(
+ error_template.format(error=f"invalid lineno {lineno_!r}: {e}")
+ )
+ else:
+ lineno = 0
+ return action, message, category, module, lineno
+
+
+def _resolve_warning_category(category: str) -> Type[Warning]:
+ """
+ Copied from warnings._getcategory, but changed so it lets exceptions (specially ImportErrors)
+ propagate so we can get access to their tracebacks (#9218).
+ """
+ __tracebackhide__ = True
+ if not category:
+ return Warning
+
+ if "." not in category:
+ import builtins as m
+
+ klass = category
+ else:
+ module, _, klass = category.rpartition(".")
+ m = __import__(module, None, None, [klass])
+ cat = getattr(m, klass)
+ if not issubclass(cat, Warning):
+ raise UsageError(f"{cat} is not a Warning subclass")
+ return cast(Type[Warning], cat)
+
+
+def apply_warning_filters(
+ config_filters: Iterable[str], cmdline_filters: Iterable[str]
+) -> None:
+ """Applies pytest-configured filters to the warnings module"""
+ # Filters should have this precedence: cmdline options, config.
+ # Filters should be applied in the inverse order of precedence.
+ for arg in config_filters:
+ warnings.filterwarnings(*parse_warning_filter(arg, escape=False))
+
+ for arg in cmdline_filters:
+ warnings.filterwarnings(*parse_warning_filter(arg, escape=True))
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/argparsing.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/argparsing.py
new file mode 100644
index 0000000000..b0bb3f168f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/argparsing.py
@@ -0,0 +1,535 @@
+import argparse
+import os
+import sys
+import warnings
+from gettext import gettext
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+import _pytest._io
+from _pytest.compat import final
+from _pytest.config.exceptions import UsageError
+from _pytest.deprecated import ARGUMENT_PERCENT_DEFAULT
+from _pytest.deprecated import ARGUMENT_TYPE_STR
+from _pytest.deprecated import ARGUMENT_TYPE_STR_CHOICE
+from _pytest.deprecated import check_ispytest
+
+if TYPE_CHECKING:
+ from typing import NoReturn
+ from typing_extensions import Literal
+
+FILE_OR_DIR = "file_or_dir"
+
+
+@final
+class Parser:
+ """Parser for command line arguments and ini-file values.
+
+ :ivar extra_info: Dict of generic param -> value to display in case
+ there's an error processing the command line arguments.
+ """
+
+ prog: Optional[str] = None
+
+ def __init__(
+ self,
+ usage: Optional[str] = None,
+ processopt: Optional[Callable[["Argument"], None]] = None,
+ *,
+ _ispytest: bool = False,
+ ) -> None:
+ check_ispytest(_ispytest)
+ self._anonymous = OptionGroup("custom options", parser=self, _ispytest=True)
+ self._groups: List[OptionGroup] = []
+ self._processopt = processopt
+ self._usage = usage
+ self._inidict: Dict[str, Tuple[str, Optional[str], Any]] = {}
+ self._ininames: List[str] = []
+ self.extra_info: Dict[str, Any] = {}
+
+ def processoption(self, option: "Argument") -> None:
+ if self._processopt:
+ if option.dest:
+ self._processopt(option)
+
+ def getgroup(
+ self, name: str, description: str = "", after: Optional[str] = None
+ ) -> "OptionGroup":
+ """Get (or create) a named option Group.
+
+ :name: Name of the option group.
+ :description: Long description for --help output.
+ :after: Name of another group, used for ordering --help output.
+
+ The returned group object has an ``addoption`` method with the same
+ signature as :func:`parser.addoption <pytest.Parser.addoption>` but
+ will be shown in the respective group in the output of
+ ``pytest. --help``.
+ """
+ for group in self._groups:
+ if group.name == name:
+ return group
+ group = OptionGroup(name, description, parser=self, _ispytest=True)
+ i = 0
+ for i, grp in enumerate(self._groups):
+ if grp.name == after:
+ break
+ self._groups.insert(i + 1, group)
+ return group
+
+ def addoption(self, *opts: str, **attrs: Any) -> None:
+ """Register a command line option.
+
+ :opts: Option names, can be short or long options.
+ :attrs: Same attributes which the ``add_argument()`` function of the
+ `argparse library <https://docs.python.org/library/argparse.html>`_
+ accepts.
+
+ After command line parsing, options are available on the pytest config
+ object via ``config.option.NAME`` where ``NAME`` is usually set
+ by passing a ``dest`` attribute, for example
+ ``addoption("--long", dest="NAME", ...)``.
+ """
+ self._anonymous.addoption(*opts, **attrs)
+
+ def parse(
+ self,
+ args: Sequence[Union[str, "os.PathLike[str]"]],
+ namespace: Optional[argparse.Namespace] = None,
+ ) -> argparse.Namespace:
+ from _pytest._argcomplete import try_argcomplete
+
+ self.optparser = self._getparser()
+ try_argcomplete(self.optparser)
+ strargs = [os.fspath(x) for x in args]
+ return self.optparser.parse_args(strargs, namespace=namespace)
+
+ def _getparser(self) -> "MyOptionParser":
+ from _pytest._argcomplete import filescompleter
+
+ optparser = MyOptionParser(self, self.extra_info, prog=self.prog)
+ groups = self._groups + [self._anonymous]
+ for group in groups:
+ if group.options:
+ desc = group.description or group.name
+ arggroup = optparser.add_argument_group(desc)
+ for option in group.options:
+ n = option.names()
+ a = option.attrs()
+ arggroup.add_argument(*n, **a)
+ file_or_dir_arg = optparser.add_argument(FILE_OR_DIR, nargs="*")
+ # bash like autocompletion for dirs (appending '/')
+ # Type ignored because typeshed doesn't know about argcomplete.
+ file_or_dir_arg.completer = filescompleter # type: ignore
+ return optparser
+
+ def parse_setoption(
+ self,
+ args: Sequence[Union[str, "os.PathLike[str]"]],
+ option: argparse.Namespace,
+ namespace: Optional[argparse.Namespace] = None,
+ ) -> List[str]:
+ parsedoption = self.parse(args, namespace=namespace)
+ for name, value in parsedoption.__dict__.items():
+ setattr(option, name, value)
+ return cast(List[str], getattr(parsedoption, FILE_OR_DIR))
+
+ def parse_known_args(
+ self,
+ args: Sequence[Union[str, "os.PathLike[str]"]],
+ namespace: Optional[argparse.Namespace] = None,
+ ) -> argparse.Namespace:
+ """Parse and return a namespace object with known arguments at this point."""
+ return self.parse_known_and_unknown_args(args, namespace=namespace)[0]
+
+ def parse_known_and_unknown_args(
+ self,
+ args: Sequence[Union[str, "os.PathLike[str]"]],
+ namespace: Optional[argparse.Namespace] = None,
+ ) -> Tuple[argparse.Namespace, List[str]]:
+ """Parse and return a namespace object with known arguments, and
+ the remaining arguments unknown at this point."""
+ optparser = self._getparser()
+ strargs = [os.fspath(x) for x in args]
+ return optparser.parse_known_args(strargs, namespace=namespace)
+
+ def addini(
+ self,
+ name: str,
+ help: str,
+ type: Optional[
+ "Literal['string', 'paths', 'pathlist', 'args', 'linelist', 'bool']"
+ ] = None,
+ default=None,
+ ) -> None:
+ """Register an ini-file option.
+
+ :name:
+ Name of the ini-variable.
+ :type:
+ Type of the variable. Can be:
+
+ * ``string``: a string
+ * ``bool``: a boolean
+ * ``args``: a list of strings, separated as in a shell
+ * ``linelist``: a list of strings, separated by line breaks
+ * ``paths``: a list of :class:`pathlib.Path`, separated as in a shell
+ * ``pathlist``: a list of ``py.path``, separated as in a shell
+
+ .. versionadded:: 7.0
+ The ``paths`` variable type.
+
+ Defaults to ``string`` if ``None`` or not passed.
+ :default:
+ Default value if no ini-file option exists but is queried.
+
+ The value of ini-variables can be retrieved via a call to
+ :py:func:`config.getini(name) <pytest.Config.getini>`.
+ """
+ assert type in (None, "string", "paths", "pathlist", "args", "linelist", "bool")
+ self._inidict[name] = (help, type, default)
+ self._ininames.append(name)
+
+
+class ArgumentError(Exception):
+ """Raised if an Argument instance is created with invalid or
+ inconsistent arguments."""
+
+ def __init__(self, msg: str, option: Union["Argument", str]) -> None:
+ self.msg = msg
+ self.option_id = str(option)
+
+ def __str__(self) -> str:
+ if self.option_id:
+ return f"option {self.option_id}: {self.msg}"
+ else:
+ return self.msg
+
+
+class Argument:
+ """Class that mimics the necessary behaviour of optparse.Option.
+
+ It's currently a least effort implementation and ignoring choices
+ and integer prefixes.
+
+ https://docs.python.org/3/library/optparse.html#optparse-standard-option-types
+ """
+
+ _typ_map = {"int": int, "string": str, "float": float, "complex": complex}
+
+ def __init__(self, *names: str, **attrs: Any) -> None:
+ """Store parms in private vars for use in add_argument."""
+ self._attrs = attrs
+ self._short_opts: List[str] = []
+ self._long_opts: List[str] = []
+ if "%default" in (attrs.get("help") or ""):
+ warnings.warn(ARGUMENT_PERCENT_DEFAULT, stacklevel=3)
+ try:
+ typ = attrs["type"]
+ except KeyError:
+ pass
+ else:
+ # This might raise a keyerror as well, don't want to catch that.
+ if isinstance(typ, str):
+ if typ == "choice":
+ warnings.warn(
+ ARGUMENT_TYPE_STR_CHOICE.format(typ=typ, names=names),
+ stacklevel=4,
+ )
+ # argparse expects a type here take it from
+ # the type of the first element
+ attrs["type"] = type(attrs["choices"][0])
+ else:
+ warnings.warn(
+ ARGUMENT_TYPE_STR.format(typ=typ, names=names), stacklevel=4
+ )
+ attrs["type"] = Argument._typ_map[typ]
+ # Used in test_parseopt -> test_parse_defaultgetter.
+ self.type = attrs["type"]
+ else:
+ self.type = typ
+ try:
+ # Attribute existence is tested in Config._processopt.
+ self.default = attrs["default"]
+ except KeyError:
+ pass
+ self._set_opt_strings(names)
+ dest: Optional[str] = attrs.get("dest")
+ if dest:
+ self.dest = dest
+ elif self._long_opts:
+ self.dest = self._long_opts[0][2:].replace("-", "_")
+ else:
+ try:
+ self.dest = self._short_opts[0][1:]
+ except IndexError as e:
+ self.dest = "???" # Needed for the error repr.
+ raise ArgumentError("need a long or short option", self) from e
+
+ def names(self) -> List[str]:
+ return self._short_opts + self._long_opts
+
+ def attrs(self) -> Mapping[str, Any]:
+ # Update any attributes set by processopt.
+ attrs = "default dest help".split()
+ attrs.append(self.dest)
+ for attr in attrs:
+ try:
+ self._attrs[attr] = getattr(self, attr)
+ except AttributeError:
+ pass
+ if self._attrs.get("help"):
+ a = self._attrs["help"]
+ a = a.replace("%default", "%(default)s")
+ # a = a.replace('%prog', '%(prog)s')
+ self._attrs["help"] = a
+ return self._attrs
+
+ def _set_opt_strings(self, opts: Sequence[str]) -> None:
+ """Directly from optparse.
+
+ Might not be necessary as this is passed to argparse later on.
+ """
+ for opt in opts:
+ if len(opt) < 2:
+ raise ArgumentError(
+ "invalid option string %r: "
+ "must be at least two characters long" % opt,
+ self,
+ )
+ elif len(opt) == 2:
+ if not (opt[0] == "-" and opt[1] != "-"):
+ raise ArgumentError(
+ "invalid short option string %r: "
+ "must be of the form -x, (x any non-dash char)" % opt,
+ self,
+ )
+ self._short_opts.append(opt)
+ else:
+ if not (opt[0:2] == "--" and opt[2] != "-"):
+ raise ArgumentError(
+ "invalid long option string %r: "
+ "must start with --, followed by non-dash" % opt,
+ self,
+ )
+ self._long_opts.append(opt)
+
+ def __repr__(self) -> str:
+ args: List[str] = []
+ if self._short_opts:
+ args += ["_short_opts: " + repr(self._short_opts)]
+ if self._long_opts:
+ args += ["_long_opts: " + repr(self._long_opts)]
+ args += ["dest: " + repr(self.dest)]
+ if hasattr(self, "type"):
+ args += ["type: " + repr(self.type)]
+ if hasattr(self, "default"):
+ args += ["default: " + repr(self.default)]
+ return "Argument({})".format(", ".join(args))
+
+
+class OptionGroup:
+ """A group of options shown in its own section."""
+
+ def __init__(
+ self,
+ name: str,
+ description: str = "",
+ parser: Optional[Parser] = None,
+ *,
+ _ispytest: bool = False,
+ ) -> None:
+ check_ispytest(_ispytest)
+ self.name = name
+ self.description = description
+ self.options: List[Argument] = []
+ self.parser = parser
+
+ def addoption(self, *optnames: str, **attrs: Any) -> None:
+ """Add an option to this group.
+
+ If a shortened version of a long option is specified, it will
+ be suppressed in the help. ``addoption('--twowords', '--two-words')``
+ results in help showing ``--two-words`` only, but ``--twowords`` gets
+ accepted **and** the automatic destination is in ``args.twowords``.
+ """
+ conflict = set(optnames).intersection(
+ name for opt in self.options for name in opt.names()
+ )
+ if conflict:
+ raise ValueError("option names %s already added" % conflict)
+ option = Argument(*optnames, **attrs)
+ self._addoption_instance(option, shortupper=False)
+
+ def _addoption(self, *optnames: str, **attrs: Any) -> None:
+ option = Argument(*optnames, **attrs)
+ self._addoption_instance(option, shortupper=True)
+
+ def _addoption_instance(self, option: "Argument", shortupper: bool = False) -> None:
+ if not shortupper:
+ for opt in option._short_opts:
+ if opt[0] == "-" and opt[1].islower():
+ raise ValueError("lowercase shortoptions reserved")
+ if self.parser:
+ self.parser.processoption(option)
+ self.options.append(option)
+
+
+class MyOptionParser(argparse.ArgumentParser):
+ def __init__(
+ self,
+ parser: Parser,
+ extra_info: Optional[Dict[str, Any]] = None,
+ prog: Optional[str] = None,
+ ) -> None:
+ self._parser = parser
+ super().__init__(
+ prog=prog,
+ usage=parser._usage,
+ add_help=False,
+ formatter_class=DropShorterLongHelpFormatter,
+ allow_abbrev=False,
+ )
+ # extra_info is a dict of (param -> value) to display if there's
+ # an usage error to provide more contextual information to the user.
+ self.extra_info = extra_info if extra_info else {}
+
+ def error(self, message: str) -> "NoReturn":
+ """Transform argparse error message into UsageError."""
+ msg = f"{self.prog}: error: {message}"
+
+ if hasattr(self._parser, "_config_source_hint"):
+ # Type ignored because the attribute is set dynamically.
+ msg = f"{msg} ({self._parser._config_source_hint})" # type: ignore
+
+ raise UsageError(self.format_usage() + msg)
+
+ # Type ignored because typeshed has a very complex type in the superclass.
+ def parse_args( # type: ignore
+ self,
+ args: Optional[Sequence[str]] = None,
+ namespace: Optional[argparse.Namespace] = None,
+ ) -> argparse.Namespace:
+ """Allow splitting of positional arguments."""
+ parsed, unrecognized = self.parse_known_args(args, namespace)
+ if unrecognized:
+ for arg in unrecognized:
+ if arg and arg[0] == "-":
+ lines = ["unrecognized arguments: %s" % (" ".join(unrecognized))]
+ for k, v in sorted(self.extra_info.items()):
+ lines.append(f" {k}: {v}")
+ self.error("\n".join(lines))
+ getattr(parsed, FILE_OR_DIR).extend(unrecognized)
+ return parsed
+
+ if sys.version_info[:2] < (3, 9): # pragma: no cover
+ # Backport of https://github.com/python/cpython/pull/14316 so we can
+ # disable long --argument abbreviations without breaking short flags.
+ def _parse_optional(
+ self, arg_string: str
+ ) -> Optional[Tuple[Optional[argparse.Action], str, Optional[str]]]:
+ if not arg_string:
+ return None
+ if not arg_string[0] in self.prefix_chars:
+ return None
+ if arg_string in self._option_string_actions:
+ action = self._option_string_actions[arg_string]
+ return action, arg_string, None
+ if len(arg_string) == 1:
+ return None
+ if "=" in arg_string:
+ option_string, explicit_arg = arg_string.split("=", 1)
+ if option_string in self._option_string_actions:
+ action = self._option_string_actions[option_string]
+ return action, option_string, explicit_arg
+ if self.allow_abbrev or not arg_string.startswith("--"):
+ option_tuples = self._get_option_tuples(arg_string)
+ if len(option_tuples) > 1:
+ msg = gettext(
+ "ambiguous option: %(option)s could match %(matches)s"
+ )
+ options = ", ".join(option for _, option, _ in option_tuples)
+ self.error(msg % {"option": arg_string, "matches": options})
+ elif len(option_tuples) == 1:
+ (option_tuple,) = option_tuples
+ return option_tuple
+ if self._negative_number_matcher.match(arg_string):
+ if not self._has_negative_number_optionals:
+ return None
+ if " " in arg_string:
+ return None
+ return None, arg_string, None
+
+
+class DropShorterLongHelpFormatter(argparse.HelpFormatter):
+ """Shorten help for long options that differ only in extra hyphens.
+
+ - Collapse **long** options that are the same except for extra hyphens.
+ - Shortcut if there are only two options and one of them is a short one.
+ - Cache result on the action object as this is called at least 2 times.
+ """
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ # Use more accurate terminal width.
+ if "width" not in kwargs:
+ kwargs["width"] = _pytest._io.get_terminal_width()
+ super().__init__(*args, **kwargs)
+
+ def _format_action_invocation(self, action: argparse.Action) -> str:
+ orgstr = super()._format_action_invocation(action)
+ if orgstr and orgstr[0] != "-": # only optional arguments
+ return orgstr
+ res: Optional[str] = getattr(action, "_formatted_action_invocation", None)
+ if res:
+ return res
+ options = orgstr.split(", ")
+ if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2):
+ # a shortcut for '-h, --help' or '--abc', '-a'
+ action._formatted_action_invocation = orgstr # type: ignore
+ return orgstr
+ return_list = []
+ short_long: Dict[str, str] = {}
+ for option in options:
+ if len(option) == 2 or option[2] == " ":
+ continue
+ if not option.startswith("--"):
+ raise ArgumentError(
+ 'long optional argument without "--": [%s]' % (option), option
+ )
+ xxoption = option[2:]
+ shortened = xxoption.replace("-", "")
+ if shortened not in short_long or len(short_long[shortened]) < len(
+ xxoption
+ ):
+ short_long[shortened] = xxoption
+ # now short_long has been filled out to the longest with dashes
+ # **and** we keep the right option ordering from add_argument
+ for option in options:
+ if len(option) == 2 or option[2] == " ":
+ return_list.append(option)
+ if option[2:] == short_long.get(option.replace("-", "")):
+ return_list.append(option.replace(" ", "=", 1))
+ formatted_action_invocation = ", ".join(return_list)
+ action._formatted_action_invocation = formatted_action_invocation # type: ignore
+ return formatted_action_invocation
+
+ def _split_lines(self, text, width):
+ """Wrap lines after splitting on original newlines.
+
+ This allows to have explicit line breaks in the help text.
+ """
+ import textwrap
+
+ lines = []
+ for line in text.splitlines():
+ lines.extend(textwrap.wrap(line.strip(), width))
+ return lines
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/compat.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/compat.py
new file mode 100644
index 0000000000..ba267d2150
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/compat.py
@@ -0,0 +1,71 @@
+import functools
+import warnings
+from pathlib import Path
+from typing import Optional
+
+from ..compat import LEGACY_PATH
+from ..compat import legacy_path
+from ..deprecated import HOOK_LEGACY_PATH_ARG
+from _pytest.nodes import _check_path
+
+# hookname: (Path, LEGACY_PATH)
+imply_paths_hooks = {
+ "pytest_ignore_collect": ("collection_path", "path"),
+ "pytest_collect_file": ("file_path", "path"),
+ "pytest_pycollect_makemodule": ("module_path", "path"),
+ "pytest_report_header": ("start_path", "startdir"),
+ "pytest_report_collectionfinish": ("start_path", "startdir"),
+}
+
+
+class PathAwareHookProxy:
+ """
+ this helper wraps around hook callers
+ until pluggy supports fixingcalls, this one will do
+
+ it currently doesn't return full hook caller proxies for fixed hooks,
+ this may have to be changed later depending on bugs
+ """
+
+ def __init__(self, hook_caller):
+ self.__hook_caller = hook_caller
+
+ def __dir__(self):
+ return dir(self.__hook_caller)
+
+ def __getattr__(self, key, _wraps=functools.wraps):
+ hook = getattr(self.__hook_caller, key)
+ if key not in imply_paths_hooks:
+ self.__dict__[key] = hook
+ return hook
+ else:
+ path_var, fspath_var = imply_paths_hooks[key]
+
+ @_wraps(hook)
+ def fixed_hook(**kw):
+
+ path_value: Optional[Path] = kw.pop(path_var, None)
+ fspath_value: Optional[LEGACY_PATH] = kw.pop(fspath_var, None)
+ if fspath_value is not None:
+ warnings.warn(
+ HOOK_LEGACY_PATH_ARG.format(
+ pylib_path_arg=fspath_var, pathlib_path_arg=path_var
+ ),
+ stacklevel=2,
+ )
+ if path_value is not None:
+ if fspath_value is not None:
+ _check_path(path_value, fspath_value)
+ else:
+ fspath_value = legacy_path(path_value)
+ else:
+ assert fspath_value is not None
+ path_value = Path(fspath_value)
+
+ kw[path_var] = path_value
+ kw[fspath_var] = fspath_value
+ return hook(**kw)
+
+ fixed_hook.__name__ = key
+ self.__dict__[key] = fixed_hook
+ return fixed_hook
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/exceptions.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/exceptions.py
new file mode 100644
index 0000000000..4f1320e758
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/exceptions.py
@@ -0,0 +1,11 @@
+from _pytest.compat import final
+
+
+@final
+class UsageError(Exception):
+ """Error in pytest usage or invocation."""
+
+
+class PrintHelp(Exception):
+ """Raised when pytest should print its help to skip the rest of the
+ argument parsing and validation."""
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/findpaths.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/findpaths.py
new file mode 100644
index 0000000000..89ade5f23b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/config/findpaths.py
@@ -0,0 +1,213 @@
+import os
+from pathlib import Path
+from typing import Dict
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+import iniconfig
+
+from .exceptions import UsageError
+from _pytest.outcomes import fail
+from _pytest.pathlib import absolutepath
+from _pytest.pathlib import commonpath
+
+if TYPE_CHECKING:
+ from . import Config
+
+
+def _parse_ini_config(path: Path) -> iniconfig.IniConfig:
+ """Parse the given generic '.ini' file using legacy IniConfig parser, returning
+ the parsed object.
+
+ Raise UsageError if the file cannot be parsed.
+ """
+ try:
+ return iniconfig.IniConfig(str(path))
+ except iniconfig.ParseError as exc:
+ raise UsageError(str(exc)) from exc
+
+
+def load_config_dict_from_file(
+ filepath: Path,
+) -> Optional[Dict[str, Union[str, List[str]]]]:
+ """Load pytest configuration from the given file path, if supported.
+
+ Return None if the file does not contain valid pytest configuration.
+ """
+
+ # Configuration from ini files are obtained from the [pytest] section, if present.
+ if filepath.suffix == ".ini":
+ iniconfig = _parse_ini_config(filepath)
+
+ if "pytest" in iniconfig:
+ return dict(iniconfig["pytest"].items())
+ else:
+ # "pytest.ini" files are always the source of configuration, even if empty.
+ if filepath.name == "pytest.ini":
+ return {}
+
+ # '.cfg' files are considered if they contain a "[tool:pytest]" section.
+ elif filepath.suffix == ".cfg":
+ iniconfig = _parse_ini_config(filepath)
+
+ if "tool:pytest" in iniconfig.sections:
+ return dict(iniconfig["tool:pytest"].items())
+ elif "pytest" in iniconfig.sections:
+ # If a setup.cfg contains a "[pytest]" section, we raise a failure to indicate users that
+ # plain "[pytest]" sections in setup.cfg files is no longer supported (#3086).
+ fail(CFG_PYTEST_SECTION.format(filename="setup.cfg"), pytrace=False)
+
+ # '.toml' files are considered if they contain a [tool.pytest.ini_options] table.
+ elif filepath.suffix == ".toml":
+ import tomli
+
+ toml_text = filepath.read_text(encoding="utf-8")
+ try:
+ config = tomli.loads(toml_text)
+ except tomli.TOMLDecodeError as exc:
+ raise UsageError(str(exc)) from exc
+
+ result = config.get("tool", {}).get("pytest", {}).get("ini_options", None)
+ if result is not None:
+ # TOML supports richer data types than ini files (strings, arrays, floats, ints, etc),
+ # however we need to convert all scalar values to str for compatibility with the rest
+ # of the configuration system, which expects strings only.
+ def make_scalar(v: object) -> Union[str, List[str]]:
+ return v if isinstance(v, list) else str(v)
+
+ return {k: make_scalar(v) for k, v in result.items()}
+
+ return None
+
+
+def locate_config(
+ args: Iterable[Path],
+) -> Tuple[Optional[Path], Optional[Path], Dict[str, Union[str, List[str]]]]:
+ """Search in the list of arguments for a valid ini-file for pytest,
+ and return a tuple of (rootdir, inifile, cfg-dict)."""
+ config_names = [
+ "pytest.ini",
+ "pyproject.toml",
+ "tox.ini",
+ "setup.cfg",
+ ]
+ args = [x for x in args if not str(x).startswith("-")]
+ if not args:
+ args = [Path.cwd()]
+ for arg in args:
+ argpath = absolutepath(arg)
+ for base in (argpath, *argpath.parents):
+ for config_name in config_names:
+ p = base / config_name
+ if p.is_file():
+ ini_config = load_config_dict_from_file(p)
+ if ini_config is not None:
+ return base, p, ini_config
+ return None, None, {}
+
+
+def get_common_ancestor(paths: Iterable[Path]) -> Path:
+ common_ancestor: Optional[Path] = None
+ for path in paths:
+ if not path.exists():
+ continue
+ if common_ancestor is None:
+ common_ancestor = path
+ else:
+ if common_ancestor in path.parents or path == common_ancestor:
+ continue
+ elif path in common_ancestor.parents:
+ common_ancestor = path
+ else:
+ shared = commonpath(path, common_ancestor)
+ if shared is not None:
+ common_ancestor = shared
+ if common_ancestor is None:
+ common_ancestor = Path.cwd()
+ elif common_ancestor.is_file():
+ common_ancestor = common_ancestor.parent
+ return common_ancestor
+
+
+def get_dirs_from_args(args: Iterable[str]) -> List[Path]:
+ def is_option(x: str) -> bool:
+ return x.startswith("-")
+
+ def get_file_part_from_node_id(x: str) -> str:
+ return x.split("::")[0]
+
+ def get_dir_from_path(path: Path) -> Path:
+ if path.is_dir():
+ return path
+ return path.parent
+
+ def safe_exists(path: Path) -> bool:
+ # This can throw on paths that contain characters unrepresentable at the OS level,
+ # or with invalid syntax on Windows (https://bugs.python.org/issue35306)
+ try:
+ return path.exists()
+ except OSError:
+ return False
+
+ # These look like paths but may not exist
+ possible_paths = (
+ absolutepath(get_file_part_from_node_id(arg))
+ for arg in args
+ if not is_option(arg)
+ )
+
+ return [get_dir_from_path(path) for path in possible_paths if safe_exists(path)]
+
+
+CFG_PYTEST_SECTION = "[pytest] section in {filename} files is no longer supported, change to [tool:pytest] instead."
+
+
+def determine_setup(
+ inifile: Optional[str],
+ args: Sequence[str],
+ rootdir_cmd_arg: Optional[str] = None,
+ config: Optional["Config"] = None,
+) -> Tuple[Path, Optional[Path], Dict[str, Union[str, List[str]]]]:
+ rootdir = None
+ dirs = get_dirs_from_args(args)
+ if inifile:
+ inipath_ = absolutepath(inifile)
+ inipath: Optional[Path] = inipath_
+ inicfg = load_config_dict_from_file(inipath_) or {}
+ if rootdir_cmd_arg is None:
+ rootdir = inipath_.parent
+ else:
+ ancestor = get_common_ancestor(dirs)
+ rootdir, inipath, inicfg = locate_config([ancestor])
+ if rootdir is None and rootdir_cmd_arg is None:
+ for possible_rootdir in (ancestor, *ancestor.parents):
+ if (possible_rootdir / "setup.py").is_file():
+ rootdir = possible_rootdir
+ break
+ else:
+ if dirs != [ancestor]:
+ rootdir, inipath, inicfg = locate_config(dirs)
+ if rootdir is None:
+ if config is not None:
+ cwd = config.invocation_params.dir
+ else:
+ cwd = Path.cwd()
+ rootdir = get_common_ancestor([cwd, ancestor])
+ is_fs_root = os.path.splitdrive(str(rootdir))[1] == "/"
+ if is_fs_root:
+ rootdir = ancestor
+ if rootdir_cmd_arg:
+ rootdir = absolutepath(os.path.expandvars(rootdir_cmd_arg))
+ if not rootdir.is_dir():
+ raise UsageError(
+ "Directory '{}' not found. Check your '--rootdir' option.".format(
+ rootdir
+ )
+ )
+ assert rootdir is not None
+ return rootdir, inipath, inicfg or {}
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/debugging.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/debugging.py
new file mode 100644
index 0000000000..452fb18ac3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/debugging.py
@@ -0,0 +1,388 @@
+"""Interactive debugging with PDB, the Python Debugger."""
+import argparse
+import functools
+import sys
+import types
+from typing import Any
+from typing import Callable
+from typing import Generator
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+
+from _pytest import outcomes
+from _pytest._code import ExceptionInfo
+from _pytest.config import Config
+from _pytest.config import ConftestImportFailure
+from _pytest.config import hookimpl
+from _pytest.config import PytestPluginManager
+from _pytest.config.argparsing import Parser
+from _pytest.config.exceptions import UsageError
+from _pytest.nodes import Node
+from _pytest.reports import BaseReport
+
+if TYPE_CHECKING:
+ from _pytest.capture import CaptureManager
+ from _pytest.runner import CallInfo
+
+
+def _validate_usepdb_cls(value: str) -> Tuple[str, str]:
+ """Validate syntax of --pdbcls option."""
+ try:
+ modname, classname = value.split(":")
+ except ValueError as e:
+ raise argparse.ArgumentTypeError(
+ f"{value!r} is not in the format 'modname:classname'"
+ ) from e
+ return (modname, classname)
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("general")
+ group._addoption(
+ "--pdb",
+ dest="usepdb",
+ action="store_true",
+ help="start the interactive Python debugger on errors or KeyboardInterrupt.",
+ )
+ group._addoption(
+ "--pdbcls",
+ dest="usepdb_cls",
+ metavar="modulename:classname",
+ type=_validate_usepdb_cls,
+ help="specify a custom interactive Python debugger for use with --pdb."
+ "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb",
+ )
+ group._addoption(
+ "--trace",
+ dest="trace",
+ action="store_true",
+ help="Immediately break when running each test.",
+ )
+
+
+def pytest_configure(config: Config) -> None:
+ import pdb
+
+ if config.getvalue("trace"):
+ config.pluginmanager.register(PdbTrace(), "pdbtrace")
+ if config.getvalue("usepdb"):
+ config.pluginmanager.register(PdbInvoke(), "pdbinvoke")
+
+ pytestPDB._saved.append(
+ (pdb.set_trace, pytestPDB._pluginmanager, pytestPDB._config)
+ )
+ pdb.set_trace = pytestPDB.set_trace
+ pytestPDB._pluginmanager = config.pluginmanager
+ pytestPDB._config = config
+
+ # NOTE: not using pytest_unconfigure, since it might get called although
+ # pytest_configure was not (if another plugin raises UsageError).
+ def fin() -> None:
+ (
+ pdb.set_trace,
+ pytestPDB._pluginmanager,
+ pytestPDB._config,
+ ) = pytestPDB._saved.pop()
+
+ config.add_cleanup(fin)
+
+
+class pytestPDB:
+ """Pseudo PDB that defers to the real pdb."""
+
+ _pluginmanager: Optional[PytestPluginManager] = None
+ _config: Optional[Config] = None
+ _saved: List[
+ Tuple[Callable[..., None], Optional[PytestPluginManager], Optional[Config]]
+ ] = []
+ _recursive_debug = 0
+ _wrapped_pdb_cls: Optional[Tuple[Type[Any], Type[Any]]] = None
+
+ @classmethod
+ def _is_capturing(cls, capman: Optional["CaptureManager"]) -> Union[str, bool]:
+ if capman:
+ return capman.is_capturing()
+ return False
+
+ @classmethod
+ def _import_pdb_cls(cls, capman: Optional["CaptureManager"]):
+ if not cls._config:
+ import pdb
+
+ # Happens when using pytest.set_trace outside of a test.
+ return pdb.Pdb
+
+ usepdb_cls = cls._config.getvalue("usepdb_cls")
+
+ if cls._wrapped_pdb_cls and cls._wrapped_pdb_cls[0] == usepdb_cls:
+ return cls._wrapped_pdb_cls[1]
+
+ if usepdb_cls:
+ modname, classname = usepdb_cls
+
+ try:
+ __import__(modname)
+ mod = sys.modules[modname]
+
+ # Handle --pdbcls=pdb:pdb.Pdb (useful e.g. with pdbpp).
+ parts = classname.split(".")
+ pdb_cls = getattr(mod, parts[0])
+ for part in parts[1:]:
+ pdb_cls = getattr(pdb_cls, part)
+ except Exception as exc:
+ value = ":".join((modname, classname))
+ raise UsageError(
+ f"--pdbcls: could not import {value!r}: {exc}"
+ ) from exc
+ else:
+ import pdb
+
+ pdb_cls = pdb.Pdb
+
+ wrapped_cls = cls._get_pdb_wrapper_class(pdb_cls, capman)
+ cls._wrapped_pdb_cls = (usepdb_cls, wrapped_cls)
+ return wrapped_cls
+
+ @classmethod
+ def _get_pdb_wrapper_class(cls, pdb_cls, capman: Optional["CaptureManager"]):
+ import _pytest.config
+
+ # Type ignored because mypy doesn't support "dynamic"
+ # inheritance like this.
+ class PytestPdbWrapper(pdb_cls): # type: ignore[valid-type,misc]
+ _pytest_capman = capman
+ _continued = False
+
+ def do_debug(self, arg):
+ cls._recursive_debug += 1
+ ret = super().do_debug(arg)
+ cls._recursive_debug -= 1
+ return ret
+
+ def do_continue(self, arg):
+ ret = super().do_continue(arg)
+ if cls._recursive_debug == 0:
+ assert cls._config is not None
+ tw = _pytest.config.create_terminal_writer(cls._config)
+ tw.line()
+
+ capman = self._pytest_capman
+ capturing = pytestPDB._is_capturing(capman)
+ if capturing:
+ if capturing == "global":
+ tw.sep(">", "PDB continue (IO-capturing resumed)")
+ else:
+ tw.sep(
+ ">",
+ "PDB continue (IO-capturing resumed for %s)"
+ % capturing,
+ )
+ assert capman is not None
+ capman.resume()
+ else:
+ tw.sep(">", "PDB continue")
+ assert cls._pluginmanager is not None
+ cls._pluginmanager.hook.pytest_leave_pdb(config=cls._config, pdb=self)
+ self._continued = True
+ return ret
+
+ do_c = do_cont = do_continue
+
+ def do_quit(self, arg):
+ """Raise Exit outcome when quit command is used in pdb.
+
+ This is a bit of a hack - it would be better if BdbQuit
+ could be handled, but this would require to wrap the
+ whole pytest run, and adjust the report etc.
+ """
+ ret = super().do_quit(arg)
+
+ if cls._recursive_debug == 0:
+ outcomes.exit("Quitting debugger")
+
+ return ret
+
+ do_q = do_quit
+ do_exit = do_quit
+
+ def setup(self, f, tb):
+ """Suspend on setup().
+
+ Needed after do_continue resumed, and entering another
+ breakpoint again.
+ """
+ ret = super().setup(f, tb)
+ if not ret and self._continued:
+ # pdb.setup() returns True if the command wants to exit
+ # from the interaction: do not suspend capturing then.
+ if self._pytest_capman:
+ self._pytest_capman.suspend_global_capture(in_=True)
+ return ret
+
+ def get_stack(self, f, t):
+ stack, i = super().get_stack(f, t)
+ if f is None:
+ # Find last non-hidden frame.
+ i = max(0, len(stack) - 1)
+ while i and stack[i][0].f_locals.get("__tracebackhide__", False):
+ i -= 1
+ return stack, i
+
+ return PytestPdbWrapper
+
+ @classmethod
+ def _init_pdb(cls, method, *args, **kwargs):
+ """Initialize PDB debugging, dropping any IO capturing."""
+ import _pytest.config
+
+ if cls._pluginmanager is None:
+ capman: Optional[CaptureManager] = None
+ else:
+ capman = cls._pluginmanager.getplugin("capturemanager")
+ if capman:
+ capman.suspend(in_=True)
+
+ if cls._config:
+ tw = _pytest.config.create_terminal_writer(cls._config)
+ tw.line()
+
+ if cls._recursive_debug == 0:
+ # Handle header similar to pdb.set_trace in py37+.
+ header = kwargs.pop("header", None)
+ if header is not None:
+ tw.sep(">", header)
+ else:
+ capturing = cls._is_capturing(capman)
+ if capturing == "global":
+ tw.sep(">", f"PDB {method} (IO-capturing turned off)")
+ elif capturing:
+ tw.sep(
+ ">",
+ "PDB %s (IO-capturing turned off for %s)"
+ % (method, capturing),
+ )
+ else:
+ tw.sep(">", f"PDB {method}")
+
+ _pdb = cls._import_pdb_cls(capman)(**kwargs)
+
+ if cls._pluginmanager:
+ cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config, pdb=_pdb)
+ return _pdb
+
+ @classmethod
+ def set_trace(cls, *args, **kwargs) -> None:
+ """Invoke debugging via ``Pdb.set_trace``, dropping any IO capturing."""
+ frame = sys._getframe().f_back
+ _pdb = cls._init_pdb("set_trace", *args, **kwargs)
+ _pdb.set_trace(frame)
+
+
+class PdbInvoke:
+ def pytest_exception_interact(
+ self, node: Node, call: "CallInfo[Any]", report: BaseReport
+ ) -> None:
+ capman = node.config.pluginmanager.getplugin("capturemanager")
+ if capman:
+ capman.suspend_global_capture(in_=True)
+ out, err = capman.read_global_capture()
+ sys.stdout.write(out)
+ sys.stdout.write(err)
+ assert call.excinfo is not None
+ _enter_pdb(node, call.excinfo, report)
+
+ def pytest_internalerror(self, excinfo: ExceptionInfo[BaseException]) -> None:
+ tb = _postmortem_traceback(excinfo)
+ post_mortem(tb)
+
+
+class PdbTrace:
+ @hookimpl(hookwrapper=True)
+ def pytest_pyfunc_call(self, pyfuncitem) -> Generator[None, None, None]:
+ wrap_pytest_function_for_tracing(pyfuncitem)
+ yield
+
+
+def wrap_pytest_function_for_tracing(pyfuncitem):
+ """Change the Python function object of the given Function item by a
+ wrapper which actually enters pdb before calling the python function
+ itself, effectively leaving the user in the pdb prompt in the first
+ statement of the function."""
+ _pdb = pytestPDB._init_pdb("runcall")
+ testfunction = pyfuncitem.obj
+
+ # we can't just return `partial(pdb.runcall, testfunction)` because (on
+ # python < 3.7.4) runcall's first param is `func`, which means we'd get
+ # an exception if one of the kwargs to testfunction was called `func`.
+ @functools.wraps(testfunction)
+ def wrapper(*args, **kwargs):
+ func = functools.partial(testfunction, *args, **kwargs)
+ _pdb.runcall(func)
+
+ pyfuncitem.obj = wrapper
+
+
+def maybe_wrap_pytest_function_for_tracing(pyfuncitem):
+ """Wrap the given pytestfunct item for tracing support if --trace was given in
+ the command line."""
+ if pyfuncitem.config.getvalue("trace"):
+ wrap_pytest_function_for_tracing(pyfuncitem)
+
+
+def _enter_pdb(
+ node: Node, excinfo: ExceptionInfo[BaseException], rep: BaseReport
+) -> BaseReport:
+ # XXX we re-use the TerminalReporter's terminalwriter
+ # because this seems to avoid some encoding related troubles
+ # for not completely clear reasons.
+ tw = node.config.pluginmanager.getplugin("terminalreporter")._tw
+ tw.line()
+
+ showcapture = node.config.option.showcapture
+
+ for sectionname, content in (
+ ("stdout", rep.capstdout),
+ ("stderr", rep.capstderr),
+ ("log", rep.caplog),
+ ):
+ if showcapture in (sectionname, "all") and content:
+ tw.sep(">", "captured " + sectionname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ tw.line(content)
+
+ tw.sep(">", "traceback")
+ rep.toterminal(tw)
+ tw.sep(">", "entering PDB")
+ tb = _postmortem_traceback(excinfo)
+ rep._pdbshown = True # type: ignore[attr-defined]
+ post_mortem(tb)
+ return rep
+
+
+def _postmortem_traceback(excinfo: ExceptionInfo[BaseException]) -> types.TracebackType:
+ from doctest import UnexpectedException
+
+ if isinstance(excinfo.value, UnexpectedException):
+ # A doctest.UnexpectedException is not useful for post_mortem.
+ # Use the underlying exception instead:
+ return excinfo.value.exc_info[2]
+ elif isinstance(excinfo.value, ConftestImportFailure):
+ # A config.ConftestImportFailure is not useful for post_mortem.
+ # Use the underlying exception instead:
+ return excinfo.value.excinfo[2]
+ else:
+ assert excinfo._excinfo is not None
+ return excinfo._excinfo[2]
+
+
+def post_mortem(t: types.TracebackType) -> None:
+ p = pytestPDB._init_pdb("post_mortem")
+ p.reset()
+ p.interaction(None, t)
+ if p.quitting:
+ outcomes.exit("Quitting debugger")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/deprecated.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/deprecated.py
new file mode 100644
index 0000000000..5248927113
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/deprecated.py
@@ -0,0 +1,155 @@
+"""Deprecation messages and bits of code used elsewhere in the codebase that
+is planned to be removed in the next pytest release.
+
+Keeping it in a central location makes it easy to track what is deprecated and should
+be removed when the time comes.
+
+All constants defined in this module should be either instances of
+:class:`PytestWarning`, or :class:`UnformattedWarning`
+in case of warnings which need to format their messages.
+"""
+from warnings import warn
+
+from _pytest.warning_types import PytestDeprecationWarning
+from _pytest.warning_types import PytestRemovedIn7Warning
+from _pytest.warning_types import PytestRemovedIn8Warning
+from _pytest.warning_types import UnformattedWarning
+
+# set of plugins which have been integrated into the core; we use this list to ignore
+# them during registration to avoid conflicts
+DEPRECATED_EXTERNAL_PLUGINS = {
+ "pytest_catchlog",
+ "pytest_capturelog",
+ "pytest_faulthandler",
+}
+
+
+FILLFUNCARGS = UnformattedWarning(
+ PytestRemovedIn7Warning,
+ "{name} is deprecated, use "
+ "function._request._fillfixtures() instead if you cannot avoid reaching into internals.",
+)
+
+PYTEST_COLLECT_MODULE = UnformattedWarning(
+ PytestRemovedIn7Warning,
+ "pytest.collect.{name} was moved to pytest.{name}\n"
+ "Please update to the new name.",
+)
+
+# This can be* removed pytest 8, but it's harmless and common, so no rush to remove.
+# * If you're in the future: "could have been".
+YIELD_FIXTURE = PytestDeprecationWarning(
+ "@pytest.yield_fixture is deprecated.\n"
+ "Use @pytest.fixture instead; they are the same."
+)
+
+MINUS_K_DASH = PytestRemovedIn7Warning(
+ "The `-k '-expr'` syntax to -k is deprecated.\nUse `-k 'not expr'` instead."
+)
+
+MINUS_K_COLON = PytestRemovedIn7Warning(
+ "The `-k 'expr:'` syntax to -k is deprecated.\n"
+ "Please open an issue if you use this and want a replacement."
+)
+
+WARNING_CAPTURED_HOOK = PytestRemovedIn7Warning(
+ "The pytest_warning_captured is deprecated and will be removed in a future release.\n"
+ "Please use pytest_warning_recorded instead."
+)
+
+WARNING_CMDLINE_PREPARSE_HOOK = PytestRemovedIn8Warning(
+ "The pytest_cmdline_preparse hook is deprecated and will be removed in a future release. \n"
+ "Please use pytest_load_initial_conftests hook instead."
+)
+
+FSCOLLECTOR_GETHOOKPROXY_ISINITPATH = PytestRemovedIn8Warning(
+ "The gethookproxy() and isinitpath() methods of FSCollector and Package are deprecated; "
+ "use self.session.gethookproxy() and self.session.isinitpath() instead. "
+)
+
+STRICT_OPTION = PytestRemovedIn8Warning(
+ "The --strict option is deprecated, use --strict-markers instead."
+)
+
+# This deprecation is never really meant to be removed.
+PRIVATE = PytestDeprecationWarning("A private pytest class or function was used.")
+
+UNITTEST_SKIP_DURING_COLLECTION = PytestRemovedIn8Warning(
+ "Raising unittest.SkipTest to skip tests during collection is deprecated. "
+ "Use pytest.skip() instead."
+)
+
+ARGUMENT_PERCENT_DEFAULT = PytestRemovedIn8Warning(
+ 'pytest now uses argparse. "%default" should be changed to "%(default)s"',
+)
+
+ARGUMENT_TYPE_STR_CHOICE = UnformattedWarning(
+ PytestRemovedIn8Warning,
+ "`type` argument to addoption() is the string {typ!r}."
+ " For choices this is optional and can be omitted, "
+ " but when supplied should be a type (for example `str` or `int`)."
+ " (options: {names})",
+)
+
+ARGUMENT_TYPE_STR = UnformattedWarning(
+ PytestRemovedIn8Warning,
+ "`type` argument to addoption() is the string {typ!r}, "
+ " but when supplied should be a type (for example `str` or `int`)."
+ " (options: {names})",
+)
+
+
+HOOK_LEGACY_PATH_ARG = UnformattedWarning(
+ PytestRemovedIn8Warning,
+ "The ({pylib_path_arg}: py.path.local) argument is deprecated, please use ({pathlib_path_arg}: pathlib.Path)\n"
+ "see https://docs.pytest.org/en/latest/deprecations.html"
+ "#py-path-local-arguments-for-hooks-replaced-with-pathlib-path",
+)
+
+NODE_CTOR_FSPATH_ARG = UnformattedWarning(
+ PytestRemovedIn8Warning,
+ "The (fspath: py.path.local) argument to {node_type_name} is deprecated. "
+ "Please use the (path: pathlib.Path) argument instead.\n"
+ "See https://docs.pytest.org/en/latest/deprecations.html"
+ "#fspath-argument-for-node-constructors-replaced-with-pathlib-path",
+)
+
+WARNS_NONE_ARG = PytestRemovedIn8Warning(
+ "Passing None has been deprecated.\n"
+ "See https://docs.pytest.org/en/latest/how-to/capture-warnings.html"
+ "#additional-use-cases-of-warnings-in-tests"
+ " for alternatives in common use cases."
+)
+
+KEYWORD_MSG_ARG = UnformattedWarning(
+ PytestRemovedIn8Warning,
+ "pytest.{func}(msg=...) is now deprecated, use pytest.{func}(reason=...) instead",
+)
+
+INSTANCE_COLLECTOR = PytestRemovedIn8Warning(
+ "The pytest.Instance collector type is deprecated and is no longer used. "
+ "See https://docs.pytest.org/en/latest/deprecations.html#the-pytest-instance-collector",
+)
+
+# You want to make some `__init__` or function "private".
+#
+# def my_private_function(some, args):
+# ...
+#
+# Do this:
+#
+# def my_private_function(some, args, *, _ispytest: bool = False):
+# check_ispytest(_ispytest)
+# ...
+#
+# Change all internal/allowed calls to
+#
+# my_private_function(some, args, _ispytest=True)
+#
+# All other calls will get the default _ispytest=False and trigger
+# the warning (possibly error in the future).
+
+
+def check_ispytest(ispytest: bool) -> None:
+ if not ispytest:
+ warn(PRIVATE, stacklevel=3)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/doctest.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/doctest.py
new file mode 100644
index 0000000000..0784f431b8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/doctest.py
@@ -0,0 +1,734 @@
+"""Discover and run doctests in modules and test files."""
+import bdb
+import inspect
+import os
+import platform
+import sys
+import traceback
+import types
+import warnings
+from contextlib import contextmanager
+from pathlib import Path
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generator
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import Pattern
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+
+import pytest
+from _pytest import outcomes
+from _pytest._code.code import ExceptionInfo
+from _pytest._code.code import ReprFileLocation
+from _pytest._code.code import TerminalRepr
+from _pytest._io import TerminalWriter
+from _pytest.compat import safe_getattr
+from _pytest.config import Config
+from _pytest.config.argparsing import Parser
+from _pytest.fixtures import FixtureRequest
+from _pytest.nodes import Collector
+from _pytest.outcomes import OutcomeException
+from _pytest.pathlib import fnmatch_ex
+from _pytest.pathlib import import_path
+from _pytest.python_api import approx
+from _pytest.warning_types import PytestWarning
+
+if TYPE_CHECKING:
+ import doctest
+
+DOCTEST_REPORT_CHOICE_NONE = "none"
+DOCTEST_REPORT_CHOICE_CDIFF = "cdiff"
+DOCTEST_REPORT_CHOICE_NDIFF = "ndiff"
+DOCTEST_REPORT_CHOICE_UDIFF = "udiff"
+DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = "only_first_failure"
+
+DOCTEST_REPORT_CHOICES = (
+ DOCTEST_REPORT_CHOICE_NONE,
+ DOCTEST_REPORT_CHOICE_CDIFF,
+ DOCTEST_REPORT_CHOICE_NDIFF,
+ DOCTEST_REPORT_CHOICE_UDIFF,
+ DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE,
+)
+
+# Lazy definition of runner class
+RUNNER_CLASS = None
+# Lazy definition of output checker class
+CHECKER_CLASS: Optional[Type["doctest.OutputChecker"]] = None
+
+
+def pytest_addoption(parser: Parser) -> None:
+ parser.addini(
+ "doctest_optionflags",
+ "option flags for doctests",
+ type="args",
+ default=["ELLIPSIS"],
+ )
+ parser.addini(
+ "doctest_encoding", "encoding used for doctest files", default="utf-8"
+ )
+ group = parser.getgroup("collect")
+ group.addoption(
+ "--doctest-modules",
+ action="store_true",
+ default=False,
+ help="run doctests in all .py modules",
+ dest="doctestmodules",
+ )
+ group.addoption(
+ "--doctest-report",
+ type=str.lower,
+ default="udiff",
+ help="choose another output format for diffs on doctest failure",
+ choices=DOCTEST_REPORT_CHOICES,
+ dest="doctestreport",
+ )
+ group.addoption(
+ "--doctest-glob",
+ action="append",
+ default=[],
+ metavar="pat",
+ help="doctests file matching pattern, default: test*.txt",
+ dest="doctestglob",
+ )
+ group.addoption(
+ "--doctest-ignore-import-errors",
+ action="store_true",
+ default=False,
+ help="ignore doctest ImportErrors",
+ dest="doctest_ignore_import_errors",
+ )
+ group.addoption(
+ "--doctest-continue-on-failure",
+ action="store_true",
+ default=False,
+ help="for a given doctest, continue to run after the first failure",
+ dest="doctest_continue_on_failure",
+ )
+
+
+def pytest_unconfigure() -> None:
+ global RUNNER_CLASS
+
+ RUNNER_CLASS = None
+
+
+def pytest_collect_file(
+ file_path: Path,
+ parent: Collector,
+) -> Optional[Union["DoctestModule", "DoctestTextfile"]]:
+ config = parent.config
+ if file_path.suffix == ".py":
+ if config.option.doctestmodules and not any(
+ (_is_setup_py(file_path), _is_main_py(file_path))
+ ):
+ mod: DoctestModule = DoctestModule.from_parent(parent, path=file_path)
+ return mod
+ elif _is_doctest(config, file_path, parent):
+ txt: DoctestTextfile = DoctestTextfile.from_parent(parent, path=file_path)
+ return txt
+ return None
+
+
+def _is_setup_py(path: Path) -> bool:
+ if path.name != "setup.py":
+ return False
+ contents = path.read_bytes()
+ return b"setuptools" in contents or b"distutils" in contents
+
+
+def _is_doctest(config: Config, path: Path, parent: Collector) -> bool:
+ if path.suffix in (".txt", ".rst") and parent.session.isinitpath(path):
+ return True
+ globs = config.getoption("doctestglob") or ["test*.txt"]
+ return any(fnmatch_ex(glob, path) for glob in globs)
+
+
+def _is_main_py(path: Path) -> bool:
+ return path.name == "__main__.py"
+
+
+class ReprFailDoctest(TerminalRepr):
+ def __init__(
+ self, reprlocation_lines: Sequence[Tuple[ReprFileLocation, Sequence[str]]]
+ ) -> None:
+ self.reprlocation_lines = reprlocation_lines
+
+ def toterminal(self, tw: TerminalWriter) -> None:
+ for reprlocation, lines in self.reprlocation_lines:
+ for line in lines:
+ tw.line(line)
+ reprlocation.toterminal(tw)
+
+
+class MultipleDoctestFailures(Exception):
+ def __init__(self, failures: Sequence["doctest.DocTestFailure"]) -> None:
+ super().__init__()
+ self.failures = failures
+
+
+def _init_runner_class() -> Type["doctest.DocTestRunner"]:
+ import doctest
+
+ class PytestDoctestRunner(doctest.DebugRunner):
+ """Runner to collect failures.
+
+ Note that the out variable in this case is a list instead of a
+ stdout-like object.
+ """
+
+ def __init__(
+ self,
+ checker: Optional["doctest.OutputChecker"] = None,
+ verbose: Optional[bool] = None,
+ optionflags: int = 0,
+ continue_on_failure: bool = True,
+ ) -> None:
+ super().__init__(checker=checker, verbose=verbose, optionflags=optionflags)
+ self.continue_on_failure = continue_on_failure
+
+ def report_failure(
+ self,
+ out,
+ test: "doctest.DocTest",
+ example: "doctest.Example",
+ got: str,
+ ) -> None:
+ failure = doctest.DocTestFailure(test, example, got)
+ if self.continue_on_failure:
+ out.append(failure)
+ else:
+ raise failure
+
+ def report_unexpected_exception(
+ self,
+ out,
+ test: "doctest.DocTest",
+ example: "doctest.Example",
+ exc_info: Tuple[Type[BaseException], BaseException, types.TracebackType],
+ ) -> None:
+ if isinstance(exc_info[1], OutcomeException):
+ raise exc_info[1]
+ if isinstance(exc_info[1], bdb.BdbQuit):
+ outcomes.exit("Quitting debugger")
+ failure = doctest.UnexpectedException(test, example, exc_info)
+ if self.continue_on_failure:
+ out.append(failure)
+ else:
+ raise failure
+
+ return PytestDoctestRunner
+
+
+def _get_runner(
+ checker: Optional["doctest.OutputChecker"] = None,
+ verbose: Optional[bool] = None,
+ optionflags: int = 0,
+ continue_on_failure: bool = True,
+) -> "doctest.DocTestRunner":
+ # We need this in order to do a lazy import on doctest
+ global RUNNER_CLASS
+ if RUNNER_CLASS is None:
+ RUNNER_CLASS = _init_runner_class()
+ # Type ignored because the continue_on_failure argument is only defined on
+ # PytestDoctestRunner, which is lazily defined so can't be used as a type.
+ return RUNNER_CLASS( # type: ignore
+ checker=checker,
+ verbose=verbose,
+ optionflags=optionflags,
+ continue_on_failure=continue_on_failure,
+ )
+
+
+class DoctestItem(pytest.Item):
+ def __init__(
+ self,
+ name: str,
+ parent: "Union[DoctestTextfile, DoctestModule]",
+ runner: Optional["doctest.DocTestRunner"] = None,
+ dtest: Optional["doctest.DocTest"] = None,
+ ) -> None:
+ super().__init__(name, parent)
+ self.runner = runner
+ self.dtest = dtest
+ self.obj = None
+ self.fixture_request: Optional[FixtureRequest] = None
+
+ @classmethod
+ def from_parent( # type: ignore
+ cls,
+ parent: "Union[DoctestTextfile, DoctestModule]",
+ *,
+ name: str,
+ runner: "doctest.DocTestRunner",
+ dtest: "doctest.DocTest",
+ ):
+ # incompatible signature due to imposed limits on subclass
+ """The public named constructor."""
+ return super().from_parent(name=name, parent=parent, runner=runner, dtest=dtest)
+
+ def setup(self) -> None:
+ if self.dtest is not None:
+ self.fixture_request = _setup_fixtures(self)
+ globs = dict(getfixture=self.fixture_request.getfixturevalue)
+ for name, value in self.fixture_request.getfixturevalue(
+ "doctest_namespace"
+ ).items():
+ globs[name] = value
+ self.dtest.globs.update(globs)
+
+ def runtest(self) -> None:
+ assert self.dtest is not None
+ assert self.runner is not None
+ _check_all_skipped(self.dtest)
+ self._disable_output_capturing_for_darwin()
+ failures: List["doctest.DocTestFailure"] = []
+ # Type ignored because we change the type of `out` from what
+ # doctest expects.
+ self.runner.run(self.dtest, out=failures) # type: ignore[arg-type]
+ if failures:
+ raise MultipleDoctestFailures(failures)
+
+ def _disable_output_capturing_for_darwin(self) -> None:
+ """Disable output capturing. Otherwise, stdout is lost to doctest (#985)."""
+ if platform.system() != "Darwin":
+ return
+ capman = self.config.pluginmanager.getplugin("capturemanager")
+ if capman:
+ capman.suspend_global_capture(in_=True)
+ out, err = capman.read_global_capture()
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+ # TODO: Type ignored -- breaks Liskov Substitution.
+ def repr_failure( # type: ignore[override]
+ self,
+ excinfo: ExceptionInfo[BaseException],
+ ) -> Union[str, TerminalRepr]:
+ import doctest
+
+ failures: Optional[
+ Sequence[Union[doctest.DocTestFailure, doctest.UnexpectedException]]
+ ] = None
+ if isinstance(
+ excinfo.value, (doctest.DocTestFailure, doctest.UnexpectedException)
+ ):
+ failures = [excinfo.value]
+ elif isinstance(excinfo.value, MultipleDoctestFailures):
+ failures = excinfo.value.failures
+
+ if failures is None:
+ return super().repr_failure(excinfo)
+
+ reprlocation_lines = []
+ for failure in failures:
+ example = failure.example
+ test = failure.test
+ filename = test.filename
+ if test.lineno is None:
+ lineno = None
+ else:
+ lineno = test.lineno + example.lineno + 1
+ message = type(failure).__name__
+ # TODO: ReprFileLocation doesn't expect a None lineno.
+ reprlocation = ReprFileLocation(filename, lineno, message) # type: ignore[arg-type]
+ checker = _get_checker()
+ report_choice = _get_report_choice(self.config.getoption("doctestreport"))
+ if lineno is not None:
+ assert failure.test.docstring is not None
+ lines = failure.test.docstring.splitlines(False)
+ # add line numbers to the left of the error message
+ assert test.lineno is not None
+ lines = [
+ "%03d %s" % (i + test.lineno + 1, x) for (i, x) in enumerate(lines)
+ ]
+ # trim docstring error lines to 10
+ lines = lines[max(example.lineno - 9, 0) : example.lineno + 1]
+ else:
+ lines = [
+ "EXAMPLE LOCATION UNKNOWN, not showing all tests of that example"
+ ]
+ indent = ">>>"
+ for line in example.source.splitlines():
+ lines.append(f"??? {indent} {line}")
+ indent = "..."
+ if isinstance(failure, doctest.DocTestFailure):
+ lines += checker.output_difference(
+ example, failure.got, report_choice
+ ).split("\n")
+ else:
+ inner_excinfo = ExceptionInfo.from_exc_info(failure.exc_info)
+ lines += ["UNEXPECTED EXCEPTION: %s" % repr(inner_excinfo.value)]
+ lines += [
+ x.strip("\n") for x in traceback.format_exception(*failure.exc_info)
+ ]
+ reprlocation_lines.append((reprlocation, lines))
+ return ReprFailDoctest(reprlocation_lines)
+
+ def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]:
+ assert self.dtest is not None
+ return self.path, self.dtest.lineno, "[doctest] %s" % self.name
+
+
+def _get_flag_lookup() -> Dict[str, int]:
+ import doctest
+
+ return dict(
+ DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1,
+ DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE,
+ NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE,
+ ELLIPSIS=doctest.ELLIPSIS,
+ IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL,
+ COMPARISON_FLAGS=doctest.COMPARISON_FLAGS,
+ ALLOW_UNICODE=_get_allow_unicode_flag(),
+ ALLOW_BYTES=_get_allow_bytes_flag(),
+ NUMBER=_get_number_flag(),
+ )
+
+
+def get_optionflags(parent):
+ optionflags_str = parent.config.getini("doctest_optionflags")
+ flag_lookup_table = _get_flag_lookup()
+ flag_acc = 0
+ for flag in optionflags_str:
+ flag_acc |= flag_lookup_table[flag]
+ return flag_acc
+
+
+def _get_continue_on_failure(config):
+ continue_on_failure = config.getvalue("doctest_continue_on_failure")
+ if continue_on_failure:
+ # We need to turn off this if we use pdb since we should stop at
+ # the first failure.
+ if config.getvalue("usepdb"):
+ continue_on_failure = False
+ return continue_on_failure
+
+
+class DoctestTextfile(pytest.Module):
+ obj = None
+
+ def collect(self) -> Iterable[DoctestItem]:
+ import doctest
+
+ # Inspired by doctest.testfile; ideally we would use it directly,
+ # but it doesn't support passing a custom checker.
+ encoding = self.config.getini("doctest_encoding")
+ text = self.path.read_text(encoding)
+ filename = str(self.path)
+ name = self.path.name
+ globs = {"__name__": "__main__"}
+
+ optionflags = get_optionflags(self)
+
+ runner = _get_runner(
+ verbose=False,
+ optionflags=optionflags,
+ checker=_get_checker(),
+ continue_on_failure=_get_continue_on_failure(self.config),
+ )
+
+ parser = doctest.DocTestParser()
+ test = parser.get_doctest(text, globs, name, filename, 0)
+ if test.examples:
+ yield DoctestItem.from_parent(
+ self, name=test.name, runner=runner, dtest=test
+ )
+
+
+def _check_all_skipped(test: "doctest.DocTest") -> None:
+ """Raise pytest.skip() if all examples in the given DocTest have the SKIP
+ option set."""
+ import doctest
+
+ all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples)
+ if all_skipped:
+ pytest.skip("all tests skipped by +SKIP option")
+
+
+def _is_mocked(obj: object) -> bool:
+ """Return if an object is possibly a mock object by checking the
+ existence of a highly improbable attribute."""
+ return (
+ safe_getattr(obj, "pytest_mock_example_attribute_that_shouldnt_exist", None)
+ is not None
+ )
+
+
+@contextmanager
+def _patch_unwrap_mock_aware() -> Generator[None, None, None]:
+ """Context manager which replaces ``inspect.unwrap`` with a version
+ that's aware of mock objects and doesn't recurse into them."""
+ real_unwrap = inspect.unwrap
+
+ def _mock_aware_unwrap(
+ func: Callable[..., Any], *, stop: Optional[Callable[[Any], Any]] = None
+ ) -> Any:
+ try:
+ if stop is None or stop is _is_mocked:
+ return real_unwrap(func, stop=_is_mocked)
+ _stop = stop
+ return real_unwrap(func, stop=lambda obj: _is_mocked(obj) or _stop(func))
+ except Exception as e:
+ warnings.warn(
+ "Got %r when unwrapping %r. This is usually caused "
+ "by a violation of Python's object protocol; see e.g. "
+ "https://github.com/pytest-dev/pytest/issues/5080" % (e, func),
+ PytestWarning,
+ )
+ raise
+
+ inspect.unwrap = _mock_aware_unwrap
+ try:
+ yield
+ finally:
+ inspect.unwrap = real_unwrap
+
+
+class DoctestModule(pytest.Module):
+ def collect(self) -> Iterable[DoctestItem]:
+ import doctest
+
+ class MockAwareDocTestFinder(doctest.DocTestFinder):
+ """A hackish doctest finder that overrides stdlib internals to fix a stdlib bug.
+
+ https://github.com/pytest-dev/pytest/issues/3456
+ https://bugs.python.org/issue25532
+ """
+
+ def _find_lineno(self, obj, source_lines):
+ """Doctest code does not take into account `@property`, this
+ is a hackish way to fix it. https://bugs.python.org/issue17446
+
+ Wrapped Doctests will need to be unwrapped so the correct
+ line number is returned. This will be reported upstream. #8796
+ """
+ if isinstance(obj, property):
+ obj = getattr(obj, "fget", obj)
+
+ if hasattr(obj, "__wrapped__"):
+ # Get the main obj in case of it being wrapped
+ obj = inspect.unwrap(obj)
+
+ # Type ignored because this is a private function.
+ return super()._find_lineno( # type:ignore[misc]
+ obj,
+ source_lines,
+ )
+
+ def _find(
+ self, tests, obj, name, module, source_lines, globs, seen
+ ) -> None:
+ if _is_mocked(obj):
+ return
+ with _patch_unwrap_mock_aware():
+
+ # Type ignored because this is a private function.
+ super()._find( # type:ignore[misc]
+ tests, obj, name, module, source_lines, globs, seen
+ )
+
+ if self.path.name == "conftest.py":
+ module = self.config.pluginmanager._importconftest(
+ self.path,
+ self.config.getoption("importmode"),
+ rootpath=self.config.rootpath,
+ )
+ else:
+ try:
+ module = import_path(self.path, root=self.config.rootpath)
+ except ImportError:
+ if self.config.getvalue("doctest_ignore_import_errors"):
+ pytest.skip("unable to import module %r" % self.path)
+ else:
+ raise
+ # Uses internal doctest module parsing mechanism.
+ finder = MockAwareDocTestFinder()
+ optionflags = get_optionflags(self)
+ runner = _get_runner(
+ verbose=False,
+ optionflags=optionflags,
+ checker=_get_checker(),
+ continue_on_failure=_get_continue_on_failure(self.config),
+ )
+
+ for test in finder.find(module, module.__name__):
+ if test.examples: # skip empty doctests
+ yield DoctestItem.from_parent(
+ self, name=test.name, runner=runner, dtest=test
+ )
+
+
+def _setup_fixtures(doctest_item: DoctestItem) -> FixtureRequest:
+ """Used by DoctestTextfile and DoctestItem to setup fixture information."""
+
+ def func() -> None:
+ pass
+
+ doctest_item.funcargs = {} # type: ignore[attr-defined]
+ fm = doctest_item.session._fixturemanager
+ doctest_item._fixtureinfo = fm.getfixtureinfo( # type: ignore[attr-defined]
+ node=doctest_item, func=func, cls=None, funcargs=False
+ )
+ fixture_request = FixtureRequest(doctest_item, _ispytest=True)
+ fixture_request._fillfixtures()
+ return fixture_request
+
+
+def _init_checker_class() -> Type["doctest.OutputChecker"]:
+ import doctest
+ import re
+
+ class LiteralsOutputChecker(doctest.OutputChecker):
+ # Based on doctest_nose_plugin.py from the nltk project
+ # (https://github.com/nltk/nltk) and on the "numtest" doctest extension
+ # by Sebastien Boisgerault (https://github.com/boisgera/numtest).
+
+ _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE)
+ _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE)
+ _number_re = re.compile(
+ r"""
+ (?P<number>
+ (?P<mantissa>
+ (?P<integer1> [+-]?\d*)\.(?P<fraction>\d+)
+ |
+ (?P<integer2> [+-]?\d+)\.
+ )
+ (?:
+ [Ee]
+ (?P<exponent1> [+-]?\d+)
+ )?
+ |
+ (?P<integer3> [+-]?\d+)
+ (?:
+ [Ee]
+ (?P<exponent2> [+-]?\d+)
+ )
+ )
+ """,
+ re.VERBOSE,
+ )
+
+ def check_output(self, want: str, got: str, optionflags: int) -> bool:
+ if super().check_output(want, got, optionflags):
+ return True
+
+ allow_unicode = optionflags & _get_allow_unicode_flag()
+ allow_bytes = optionflags & _get_allow_bytes_flag()
+ allow_number = optionflags & _get_number_flag()
+
+ if not allow_unicode and not allow_bytes and not allow_number:
+ return False
+
+ def remove_prefixes(regex: Pattern[str], txt: str) -> str:
+ return re.sub(regex, r"\1\2", txt)
+
+ if allow_unicode:
+ want = remove_prefixes(self._unicode_literal_re, want)
+ got = remove_prefixes(self._unicode_literal_re, got)
+
+ if allow_bytes:
+ want = remove_prefixes(self._bytes_literal_re, want)
+ got = remove_prefixes(self._bytes_literal_re, got)
+
+ if allow_number:
+ got = self._remove_unwanted_precision(want, got)
+
+ return super().check_output(want, got, optionflags)
+
+ def _remove_unwanted_precision(self, want: str, got: str) -> str:
+ wants = list(self._number_re.finditer(want))
+ gots = list(self._number_re.finditer(got))
+ if len(wants) != len(gots):
+ return got
+ offset = 0
+ for w, g in zip(wants, gots):
+ fraction: Optional[str] = w.group("fraction")
+ exponent: Optional[str] = w.group("exponent1")
+ if exponent is None:
+ exponent = w.group("exponent2")
+ precision = 0 if fraction is None else len(fraction)
+ if exponent is not None:
+ precision -= int(exponent)
+ if float(w.group()) == approx(float(g.group()), abs=10 ** -precision):
+ # They're close enough. Replace the text we actually
+ # got with the text we want, so that it will match when we
+ # check the string literally.
+ got = (
+ got[: g.start() + offset] + w.group() + got[g.end() + offset :]
+ )
+ offset += w.end() - w.start() - (g.end() - g.start())
+ return got
+
+ return LiteralsOutputChecker
+
+
+def _get_checker() -> "doctest.OutputChecker":
+ """Return a doctest.OutputChecker subclass that supports some
+ additional options:
+
+ * ALLOW_UNICODE and ALLOW_BYTES options to ignore u'' and b''
+ prefixes (respectively) in string literals. Useful when the same
+ doctest should run in Python 2 and Python 3.
+
+ * NUMBER to ignore floating-point differences smaller than the
+ precision of the literal number in the doctest.
+
+ An inner class is used to avoid importing "doctest" at the module
+ level.
+ """
+ global CHECKER_CLASS
+ if CHECKER_CLASS is None:
+ CHECKER_CLASS = _init_checker_class()
+ return CHECKER_CLASS()
+
+
+def _get_allow_unicode_flag() -> int:
+ """Register and return the ALLOW_UNICODE flag."""
+ import doctest
+
+ return doctest.register_optionflag("ALLOW_UNICODE")
+
+
+def _get_allow_bytes_flag() -> int:
+ """Register and return the ALLOW_BYTES flag."""
+ import doctest
+
+ return doctest.register_optionflag("ALLOW_BYTES")
+
+
+def _get_number_flag() -> int:
+ """Register and return the NUMBER flag."""
+ import doctest
+
+ return doctest.register_optionflag("NUMBER")
+
+
+def _get_report_choice(key: str) -> int:
+ """Return the actual `doctest` module flag value.
+
+ We want to do it as late as possible to avoid importing `doctest` and all
+ its dependencies when parsing options, as it adds overhead and breaks tests.
+ """
+ import doctest
+
+ return {
+ DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF,
+ DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF,
+ DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF,
+ DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE,
+ DOCTEST_REPORT_CHOICE_NONE: 0,
+ }[key]
+
+
+@pytest.fixture(scope="session")
+def doctest_namespace() -> Dict[str, Any]:
+ """Fixture that returns a :py:class:`dict` that will be injected into the
+ namespace of doctests."""
+ return dict()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/faulthandler.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/faulthandler.py
new file mode 100644
index 0000000000..aaee307ff2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/faulthandler.py
@@ -0,0 +1,97 @@
+import io
+import os
+import sys
+from typing import Generator
+from typing import TextIO
+
+import pytest
+from _pytest.config import Config
+from _pytest.config.argparsing import Parser
+from _pytest.nodes import Item
+from _pytest.stash import StashKey
+
+
+fault_handler_stderr_key = StashKey[TextIO]()
+fault_handler_originally_enabled_key = StashKey[bool]()
+
+
+def pytest_addoption(parser: Parser) -> None:
+ help = (
+ "Dump the traceback of all threads if a test takes "
+ "more than TIMEOUT seconds to finish."
+ )
+ parser.addini("faulthandler_timeout", help, default=0.0)
+
+
+def pytest_configure(config: Config) -> None:
+ import faulthandler
+
+ stderr_fd_copy = os.dup(get_stderr_fileno())
+ config.stash[fault_handler_stderr_key] = open(stderr_fd_copy, "w")
+ config.stash[fault_handler_originally_enabled_key] = faulthandler.is_enabled()
+ faulthandler.enable(file=config.stash[fault_handler_stderr_key])
+
+
+def pytest_unconfigure(config: Config) -> None:
+ import faulthandler
+
+ faulthandler.disable()
+ # Close the dup file installed during pytest_configure.
+ if fault_handler_stderr_key in config.stash:
+ config.stash[fault_handler_stderr_key].close()
+ del config.stash[fault_handler_stderr_key]
+ if config.stash.get(fault_handler_originally_enabled_key, False):
+ # Re-enable the faulthandler if it was originally enabled.
+ faulthandler.enable(file=get_stderr_fileno())
+
+
+def get_stderr_fileno() -> int:
+ try:
+ fileno = sys.stderr.fileno()
+ # The Twisted Logger will return an invalid file descriptor since it is not backed
+ # by an FD. So, let's also forward this to the same code path as with pytest-xdist.
+ if fileno == -1:
+ raise AttributeError()
+ return fileno
+ except (AttributeError, io.UnsupportedOperation):
+ # pytest-xdist monkeypatches sys.stderr with an object that is not an actual file.
+ # https://docs.python.org/3/library/faulthandler.html#issue-with-file-descriptors
+ # This is potentially dangerous, but the best we can do.
+ return sys.__stderr__.fileno()
+
+
+def get_timeout_config_value(config: Config) -> float:
+ return float(config.getini("faulthandler_timeout") or 0.0)
+
+
+@pytest.hookimpl(hookwrapper=True, trylast=True)
+def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:
+ timeout = get_timeout_config_value(item.config)
+ stderr = item.config.stash[fault_handler_stderr_key]
+ if timeout > 0 and stderr is not None:
+ import faulthandler
+
+ faulthandler.dump_traceback_later(timeout, file=stderr)
+ try:
+ yield
+ finally:
+ faulthandler.cancel_dump_traceback_later()
+ else:
+ yield
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_enter_pdb() -> None:
+ """Cancel any traceback dumping due to timeout before entering pdb."""
+ import faulthandler
+
+ faulthandler.cancel_dump_traceback_later()
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_exception_interact() -> None:
+ """Cancel any traceback dumping due to an interactive exception being
+ raised."""
+ import faulthandler
+
+ faulthandler.cancel_dump_traceback_later()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/fixtures.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/fixtures.py
new file mode 100644
index 0000000000..fddff931c5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/fixtures.py
@@ -0,0 +1,1686 @@
+import functools
+import inspect
+import os
+import sys
+import warnings
+from collections import defaultdict
+from collections import deque
+from contextlib import suppress
+from pathlib import Path
+from types import TracebackType
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Generator
+from typing import Generic
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import MutableMapping
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+import attr
+
+import _pytest
+from _pytest import nodes
+from _pytest._code import getfslineno
+from _pytest._code.code import FormattedExcinfo
+from _pytest._code.code import TerminalRepr
+from _pytest._io import TerminalWriter
+from _pytest.compat import _format_args
+from _pytest.compat import _PytestWrapper
+from _pytest.compat import assert_never
+from _pytest.compat import final
+from _pytest.compat import get_real_func
+from _pytest.compat import get_real_method
+from _pytest.compat import getfuncargnames
+from _pytest.compat import getimfunc
+from _pytest.compat import getlocation
+from _pytest.compat import is_generator
+from _pytest.compat import NOTSET
+from _pytest.compat import safe_getattr
+from _pytest.config import _PluggyPlugin
+from _pytest.config import Config
+from _pytest.config.argparsing import Parser
+from _pytest.deprecated import check_ispytest
+from _pytest.deprecated import FILLFUNCARGS
+from _pytest.deprecated import YIELD_FIXTURE
+from _pytest.mark import Mark
+from _pytest.mark import ParameterSet
+from _pytest.mark.structures import MarkDecorator
+from _pytest.outcomes import fail
+from _pytest.outcomes import TEST_OUTCOME
+from _pytest.pathlib import absolutepath
+from _pytest.pathlib import bestrelpath
+from _pytest.scope import HIGH_SCOPES
+from _pytest.scope import Scope
+from _pytest.stash import StashKey
+
+
+if TYPE_CHECKING:
+ from typing import Deque
+ from typing import NoReturn
+
+ from _pytest.scope import _ScopeName
+ from _pytest.main import Session
+ from _pytest.python import CallSpec2
+ from _pytest.python import Function
+ from _pytest.python import Metafunc
+
+
+# The value of the fixture -- return/yield of the fixture function (type variable).
+FixtureValue = TypeVar("FixtureValue")
+# The type of the fixture function (type variable).
+FixtureFunction = TypeVar("FixtureFunction", bound=Callable[..., object])
+# The type of a fixture function (type alias generic in fixture value).
+_FixtureFunc = Union[
+ Callable[..., FixtureValue], Callable[..., Generator[FixtureValue, None, None]]
+]
+# The type of FixtureDef.cached_result (type alias generic in fixture value).
+_FixtureCachedResult = Union[
+ Tuple[
+ # The result.
+ FixtureValue,
+ # Cache key.
+ object,
+ None,
+ ],
+ Tuple[
+ None,
+ # Cache key.
+ object,
+ # Exc info if raised.
+ Tuple[Type[BaseException], BaseException, TracebackType],
+ ],
+]
+
+
+@attr.s(frozen=True, auto_attribs=True)
+class PseudoFixtureDef(Generic[FixtureValue]):
+ cached_result: "_FixtureCachedResult[FixtureValue]"
+ _scope: Scope
+
+
+def pytest_sessionstart(session: "Session") -> None:
+ session._fixturemanager = FixtureManager(session)
+
+
+def get_scope_package(node, fixturedef: "FixtureDef[object]"):
+ import pytest
+
+ cls = pytest.Package
+ current = node
+ fixture_package_name = "{}/{}".format(fixturedef.baseid, "__init__.py")
+ while current and (
+ type(current) is not cls or fixture_package_name != current.nodeid
+ ):
+ current = current.parent
+ if current is None:
+ return node.session
+ return current
+
+
+def get_scope_node(
+ node: nodes.Node, scope: Scope
+) -> Optional[Union[nodes.Item, nodes.Collector]]:
+ import _pytest.python
+
+ if scope is Scope.Function:
+ return node.getparent(nodes.Item)
+ elif scope is Scope.Class:
+ return node.getparent(_pytest.python.Class)
+ elif scope is Scope.Module:
+ return node.getparent(_pytest.python.Module)
+ elif scope is Scope.Package:
+ return node.getparent(_pytest.python.Package)
+ elif scope is Scope.Session:
+ return node.getparent(_pytest.main.Session)
+ else:
+ assert_never(scope)
+
+
+# Used for storing artificial fixturedefs for direct parametrization.
+name2pseudofixturedef_key = StashKey[Dict[str, "FixtureDef[Any]"]]()
+
+
+def add_funcarg_pseudo_fixture_def(
+ collector: nodes.Collector, metafunc: "Metafunc", fixturemanager: "FixtureManager"
+) -> None:
+ # This function will transform all collected calls to functions
+ # if they use direct funcargs (i.e. direct parametrization)
+ # because we want later test execution to be able to rely on
+ # an existing FixtureDef structure for all arguments.
+ # XXX we can probably avoid this algorithm if we modify CallSpec2
+ # to directly care for creating the fixturedefs within its methods.
+ if not metafunc._calls[0].funcargs:
+ # This function call does not have direct parametrization.
+ return
+ # Collect funcargs of all callspecs into a list of values.
+ arg2params: Dict[str, List[object]] = {}
+ arg2scope: Dict[str, Scope] = {}
+ for callspec in metafunc._calls:
+ for argname, argvalue in callspec.funcargs.items():
+ assert argname not in callspec.params
+ callspec.params[argname] = argvalue
+ arg2params_list = arg2params.setdefault(argname, [])
+ callspec.indices[argname] = len(arg2params_list)
+ arg2params_list.append(argvalue)
+ if argname not in arg2scope:
+ scope = callspec._arg2scope.get(argname, Scope.Function)
+ arg2scope[argname] = scope
+ callspec.funcargs.clear()
+
+ # Register artificial FixtureDef's so that later at test execution
+ # time we can rely on a proper FixtureDef to exist for fixture setup.
+ arg2fixturedefs = metafunc._arg2fixturedefs
+ for argname, valuelist in arg2params.items():
+ # If we have a scope that is higher than function, we need
+ # to make sure we only ever create an according fixturedef on
+ # a per-scope basis. We thus store and cache the fixturedef on the
+ # node related to the scope.
+ scope = arg2scope[argname]
+ node = None
+ if scope is not Scope.Function:
+ node = get_scope_node(collector, scope)
+ if node is None:
+ assert scope is Scope.Class and isinstance(
+ collector, _pytest.python.Module
+ )
+ # Use module-level collector for class-scope (for now).
+ node = collector
+ if node is None:
+ name2pseudofixturedef = None
+ else:
+ default: Dict[str, FixtureDef[Any]] = {}
+ name2pseudofixturedef = node.stash.setdefault(
+ name2pseudofixturedef_key, default
+ )
+ if name2pseudofixturedef is not None and argname in name2pseudofixturedef:
+ arg2fixturedefs[argname] = [name2pseudofixturedef[argname]]
+ else:
+ fixturedef = FixtureDef(
+ fixturemanager=fixturemanager,
+ baseid="",
+ argname=argname,
+ func=get_direct_param_fixture_func,
+ scope=arg2scope[argname],
+ params=valuelist,
+ unittest=False,
+ ids=None,
+ )
+ arg2fixturedefs[argname] = [fixturedef]
+ if name2pseudofixturedef is not None:
+ name2pseudofixturedef[argname] = fixturedef
+
+
+def getfixturemarker(obj: object) -> Optional["FixtureFunctionMarker"]:
+ """Return fixturemarker or None if it doesn't exist or raised
+ exceptions."""
+ try:
+ fixturemarker: Optional[FixtureFunctionMarker] = getattr(
+ obj, "_pytestfixturefunction", None
+ )
+ except TEST_OUTCOME:
+ # some objects raise errors like request (from flask import request)
+ # we don't expect them to be fixture functions
+ return None
+ return fixturemarker
+
+
+# Parametrized fixture key, helper alias for code below.
+_Key = Tuple[object, ...]
+
+
+def get_parametrized_fixture_keys(item: nodes.Item, scope: Scope) -> Iterator[_Key]:
+ """Return list of keys for all parametrized arguments which match
+ the specified scope."""
+ assert scope is not Scope.Function
+ try:
+ callspec = item.callspec # type: ignore[attr-defined]
+ except AttributeError:
+ pass
+ else:
+ cs: CallSpec2 = callspec
+ # cs.indices.items() is random order of argnames. Need to
+ # sort this so that different calls to
+ # get_parametrized_fixture_keys will be deterministic.
+ for argname, param_index in sorted(cs.indices.items()):
+ if cs._arg2scope[argname] != scope:
+ continue
+ if scope is Scope.Session:
+ key: _Key = (argname, param_index)
+ elif scope is Scope.Package:
+ key = (argname, param_index, item.path.parent)
+ elif scope is Scope.Module:
+ key = (argname, param_index, item.path)
+ elif scope is Scope.Class:
+ item_cls = item.cls # type: ignore[attr-defined]
+ key = (argname, param_index, item.path, item_cls)
+ else:
+ assert_never(scope)
+ yield key
+
+
+# Algorithm for sorting on a per-parametrized resource setup basis.
+# It is called for Session scope first and performs sorting
+# down to the lower scopes such as to minimize number of "high scope"
+# setups and teardowns.
+
+
+def reorder_items(items: Sequence[nodes.Item]) -> List[nodes.Item]:
+ argkeys_cache: Dict[Scope, Dict[nodes.Item, Dict[_Key, None]]] = {}
+ items_by_argkey: Dict[Scope, Dict[_Key, Deque[nodes.Item]]] = {}
+ for scope in HIGH_SCOPES:
+ d: Dict[nodes.Item, Dict[_Key, None]] = {}
+ argkeys_cache[scope] = d
+ item_d: Dict[_Key, Deque[nodes.Item]] = defaultdict(deque)
+ items_by_argkey[scope] = item_d
+ for item in items:
+ keys = dict.fromkeys(get_parametrized_fixture_keys(item, scope), None)
+ if keys:
+ d[item] = keys
+ for key in keys:
+ item_d[key].append(item)
+ items_dict = dict.fromkeys(items, None)
+ return list(
+ reorder_items_atscope(items_dict, argkeys_cache, items_by_argkey, Scope.Session)
+ )
+
+
+def fix_cache_order(
+ item: nodes.Item,
+ argkeys_cache: Dict[Scope, Dict[nodes.Item, Dict[_Key, None]]],
+ items_by_argkey: Dict[Scope, Dict[_Key, "Deque[nodes.Item]"]],
+) -> None:
+ for scope in HIGH_SCOPES:
+ for key in argkeys_cache[scope].get(item, []):
+ items_by_argkey[scope][key].appendleft(item)
+
+
+def reorder_items_atscope(
+ items: Dict[nodes.Item, None],
+ argkeys_cache: Dict[Scope, Dict[nodes.Item, Dict[_Key, None]]],
+ items_by_argkey: Dict[Scope, Dict[_Key, "Deque[nodes.Item]"]],
+ scope: Scope,
+) -> Dict[nodes.Item, None]:
+ if scope is Scope.Function or len(items) < 3:
+ return items
+ ignore: Set[Optional[_Key]] = set()
+ items_deque = deque(items)
+ items_done: Dict[nodes.Item, None] = {}
+ scoped_items_by_argkey = items_by_argkey[scope]
+ scoped_argkeys_cache = argkeys_cache[scope]
+ while items_deque:
+ no_argkey_group: Dict[nodes.Item, None] = {}
+ slicing_argkey = None
+ while items_deque:
+ item = items_deque.popleft()
+ if item in items_done or item in no_argkey_group:
+ continue
+ argkeys = dict.fromkeys(
+ (k for k in scoped_argkeys_cache.get(item, []) if k not in ignore), None
+ )
+ if not argkeys:
+ no_argkey_group[item] = None
+ else:
+ slicing_argkey, _ = argkeys.popitem()
+ # We don't have to remove relevant items from later in the
+ # deque because they'll just be ignored.
+ matching_items = [
+ i for i in scoped_items_by_argkey[slicing_argkey] if i in items
+ ]
+ for i in reversed(matching_items):
+ fix_cache_order(i, argkeys_cache, items_by_argkey)
+ items_deque.appendleft(i)
+ break
+ if no_argkey_group:
+ no_argkey_group = reorder_items_atscope(
+ no_argkey_group, argkeys_cache, items_by_argkey, scope.next_lower()
+ )
+ for item in no_argkey_group:
+ items_done[item] = None
+ ignore.add(slicing_argkey)
+ return items_done
+
+
+def _fillfuncargs(function: "Function") -> None:
+ """Fill missing fixtures for a test function, old public API (deprecated)."""
+ warnings.warn(FILLFUNCARGS.format(name="pytest._fillfuncargs()"), stacklevel=2)
+ _fill_fixtures_impl(function)
+
+
+def fillfixtures(function: "Function") -> None:
+ """Fill missing fixtures for a test function (deprecated)."""
+ warnings.warn(
+ FILLFUNCARGS.format(name="_pytest.fixtures.fillfixtures()"), stacklevel=2
+ )
+ _fill_fixtures_impl(function)
+
+
+def _fill_fixtures_impl(function: "Function") -> None:
+ """Internal implementation to fill fixtures on the given function object."""
+ try:
+ request = function._request
+ except AttributeError:
+ # XXX this special code path is only expected to execute
+ # with the oejskit plugin. It uses classes with funcargs
+ # and we thus have to work a bit to allow this.
+ fm = function.session._fixturemanager
+ assert function.parent is not None
+ fi = fm.getfixtureinfo(function.parent, function.obj, None)
+ function._fixtureinfo = fi
+ request = function._request = FixtureRequest(function, _ispytest=True)
+ fm.session._setupstate.setup(function)
+ request._fillfixtures()
+ # Prune out funcargs for jstests.
+ function.funcargs = {name: function.funcargs[name] for name in fi.argnames}
+ else:
+ request._fillfixtures()
+
+
+def get_direct_param_fixture_func(request):
+ return request.param
+
+
+@attr.s(slots=True, auto_attribs=True)
+class FuncFixtureInfo:
+ # Original function argument names.
+ argnames: Tuple[str, ...]
+ # Argnames that function immediately requires. These include argnames +
+ # fixture names specified via usefixtures and via autouse=True in fixture
+ # definitions.
+ initialnames: Tuple[str, ...]
+ names_closure: List[str]
+ name2fixturedefs: Dict[str, Sequence["FixtureDef[Any]"]]
+
+ def prune_dependency_tree(self) -> None:
+ """Recompute names_closure from initialnames and name2fixturedefs.
+
+ Can only reduce names_closure, which means that the new closure will
+ always be a subset of the old one. The order is preserved.
+
+ This method is needed because direct parametrization may shadow some
+ of the fixtures that were included in the originally built dependency
+ tree. In this way the dependency tree can get pruned, and the closure
+ of argnames may get reduced.
+ """
+ closure: Set[str] = set()
+ working_set = set(self.initialnames)
+ while working_set:
+ argname = working_set.pop()
+ # Argname may be smth not included in the original names_closure,
+ # in which case we ignore it. This currently happens with pseudo
+ # FixtureDefs which wrap 'get_direct_param_fixture_func(request)'.
+ # So they introduce the new dependency 'request' which might have
+ # been missing in the original tree (closure).
+ if argname not in closure and argname in self.names_closure:
+ closure.add(argname)
+ if argname in self.name2fixturedefs:
+ working_set.update(self.name2fixturedefs[argname][-1].argnames)
+
+ self.names_closure[:] = sorted(closure, key=self.names_closure.index)
+
+
+class FixtureRequest:
+ """A request for a fixture from a test or fixture function.
+
+ A request object gives access to the requesting test context and has
+ an optional ``param`` attribute in case the fixture is parametrized
+ indirectly.
+ """
+
+ def __init__(self, pyfuncitem, *, _ispytest: bool = False) -> None:
+ check_ispytest(_ispytest)
+ self._pyfuncitem = pyfuncitem
+ #: Fixture for which this request is being performed.
+ self.fixturename: Optional[str] = None
+ self._scope = Scope.Function
+ self._fixture_defs: Dict[str, FixtureDef[Any]] = {}
+ fixtureinfo: FuncFixtureInfo = pyfuncitem._fixtureinfo
+ self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy()
+ self._arg2index: Dict[str, int] = {}
+ self._fixturemanager: FixtureManager = pyfuncitem.session._fixturemanager
+
+ @property
+ def scope(self) -> "_ScopeName":
+ """Scope string, one of "function", "class", "module", "package", "session"."""
+ return self._scope.value
+
+ @property
+ def fixturenames(self) -> List[str]:
+ """Names of all active fixtures in this request."""
+ result = list(self._pyfuncitem._fixtureinfo.names_closure)
+ result.extend(set(self._fixture_defs).difference(result))
+ return result
+
+ @property
+ def node(self):
+ """Underlying collection node (depends on current request scope)."""
+ return self._getscopeitem(self._scope)
+
+ def _getnextfixturedef(self, argname: str) -> "FixtureDef[Any]":
+ fixturedefs = self._arg2fixturedefs.get(argname, None)
+ if fixturedefs is None:
+ # We arrive here because of a dynamic call to
+ # getfixturevalue(argname) usage which was naturally
+ # not known at parsing/collection time.
+ assert self._pyfuncitem.parent is not None
+ parentid = self._pyfuncitem.parent.nodeid
+ fixturedefs = self._fixturemanager.getfixturedefs(argname, parentid)
+ # TODO: Fix this type ignore. Either add assert or adjust types.
+ # Can this be None here?
+ self._arg2fixturedefs[argname] = fixturedefs # type: ignore[assignment]
+ # fixturedefs list is immutable so we maintain a decreasing index.
+ index = self._arg2index.get(argname, 0) - 1
+ if fixturedefs is None or (-index > len(fixturedefs)):
+ raise FixtureLookupError(argname, self)
+ self._arg2index[argname] = index
+ return fixturedefs[index]
+
+ @property
+ def config(self) -> Config:
+ """The pytest config object associated with this request."""
+ return self._pyfuncitem.config # type: ignore[no-any-return]
+
+ @property
+ def function(self):
+ """Test function object if the request has a per-function scope."""
+ if self.scope != "function":
+ raise AttributeError(
+ f"function not available in {self.scope}-scoped context"
+ )
+ return self._pyfuncitem.obj
+
+ @property
+ def cls(self):
+ """Class (can be None) where the test function was collected."""
+ if self.scope not in ("class", "function"):
+ raise AttributeError(f"cls not available in {self.scope}-scoped context")
+ clscol = self._pyfuncitem.getparent(_pytest.python.Class)
+ if clscol:
+ return clscol.obj
+
+ @property
+ def instance(self):
+ """Instance (can be None) on which test function was collected."""
+ # unittest support hack, see _pytest.unittest.TestCaseFunction.
+ try:
+ return self._pyfuncitem._testcase
+ except AttributeError:
+ function = getattr(self, "function", None)
+ return getattr(function, "__self__", None)
+
+ @property
+ def module(self):
+ """Python module object where the test function was collected."""
+ if self.scope not in ("function", "class", "module"):
+ raise AttributeError(f"module not available in {self.scope}-scoped context")
+ return self._pyfuncitem.getparent(_pytest.python.Module).obj
+
+ @property
+ def path(self) -> Path:
+ if self.scope not in ("function", "class", "module", "package"):
+ raise AttributeError(f"path not available in {self.scope}-scoped context")
+ # TODO: Remove ignore once _pyfuncitem is properly typed.
+ return self._pyfuncitem.path # type: ignore
+
+ @property
+ def keywords(self) -> MutableMapping[str, Any]:
+ """Keywords/markers dictionary for the underlying node."""
+ node: nodes.Node = self.node
+ return node.keywords
+
+ @property
+ def session(self) -> "Session":
+ """Pytest session object."""
+ return self._pyfuncitem.session # type: ignore[no-any-return]
+
+ def addfinalizer(self, finalizer: Callable[[], object]) -> None:
+ """Add finalizer/teardown function to be called after the last test
+ within the requesting test context finished execution."""
+ # XXX usually this method is shadowed by fixturedef specific ones.
+ self._addfinalizer(finalizer, scope=self.scope)
+
+ def _addfinalizer(self, finalizer: Callable[[], object], scope) -> None:
+ node = self._getscopeitem(scope)
+ node.addfinalizer(finalizer)
+
+ def applymarker(self, marker: Union[str, MarkDecorator]) -> None:
+ """Apply a marker to a single test function invocation.
+
+ This method is useful if you don't want to have a keyword/marker
+ on all function invocations.
+
+ :param marker:
+ A :class:`pytest.MarkDecorator` object created by a call
+ to ``pytest.mark.NAME(...)``.
+ """
+ self.node.add_marker(marker)
+
+ def raiseerror(self, msg: Optional[str]) -> "NoReturn":
+ """Raise a FixtureLookupError with the given message."""
+ raise self._fixturemanager.FixtureLookupError(None, self, msg)
+
+ def _fillfixtures(self) -> None:
+ item = self._pyfuncitem
+ fixturenames = getattr(item, "fixturenames", self.fixturenames)
+ for argname in fixturenames:
+ if argname not in item.funcargs:
+ item.funcargs[argname] = self.getfixturevalue(argname)
+
+ def getfixturevalue(self, argname: str) -> Any:
+ """Dynamically run a named fixture function.
+
+ Declaring fixtures via function argument is recommended where possible.
+ But if you can only decide whether to use another fixture at test
+ setup time, you may use this function to retrieve it inside a fixture
+ or test function body.
+
+ :raises pytest.FixtureLookupError:
+ If the given fixture could not be found.
+ """
+ fixturedef = self._get_active_fixturedef(argname)
+ assert fixturedef.cached_result is not None
+ return fixturedef.cached_result[0]
+
+ def _get_active_fixturedef(
+ self, argname: str
+ ) -> Union["FixtureDef[object]", PseudoFixtureDef[object]]:
+ try:
+ return self._fixture_defs[argname]
+ except KeyError:
+ try:
+ fixturedef = self._getnextfixturedef(argname)
+ except FixtureLookupError:
+ if argname == "request":
+ cached_result = (self, [0], None)
+ return PseudoFixtureDef(cached_result, Scope.Function)
+ raise
+ # Remove indent to prevent the python3 exception
+ # from leaking into the call.
+ self._compute_fixture_value(fixturedef)
+ self._fixture_defs[argname] = fixturedef
+ return fixturedef
+
+ def _get_fixturestack(self) -> List["FixtureDef[Any]"]:
+ current = self
+ values: List[FixtureDef[Any]] = []
+ while isinstance(current, SubRequest):
+ values.append(current._fixturedef) # type: ignore[has-type]
+ current = current._parent_request
+ values.reverse()
+ return values
+
+ def _compute_fixture_value(self, fixturedef: "FixtureDef[object]") -> None:
+ """Create a SubRequest based on "self" and call the execute method
+ of the given FixtureDef object.
+
+ This will force the FixtureDef object to throw away any previous
+ results and compute a new fixture value, which will be stored into
+ the FixtureDef object itself.
+ """
+ # prepare a subrequest object before calling fixture function
+ # (latter managed by fixturedef)
+ argname = fixturedef.argname
+ funcitem = self._pyfuncitem
+ scope = fixturedef._scope
+ try:
+ param = funcitem.callspec.getparam(argname)
+ except (AttributeError, ValueError):
+ param = NOTSET
+ param_index = 0
+ has_params = fixturedef.params is not None
+ fixtures_not_supported = getattr(funcitem, "nofuncargs", False)
+ if has_params and fixtures_not_supported:
+ msg = (
+ "{name} does not support fixtures, maybe unittest.TestCase subclass?\n"
+ "Node id: {nodeid}\n"
+ "Function type: {typename}"
+ ).format(
+ name=funcitem.name,
+ nodeid=funcitem.nodeid,
+ typename=type(funcitem).__name__,
+ )
+ fail(msg, pytrace=False)
+ if has_params:
+ frame = inspect.stack()[3]
+ frameinfo = inspect.getframeinfo(frame[0])
+ source_path = absolutepath(frameinfo.filename)
+ source_lineno = frameinfo.lineno
+ try:
+ source_path_str = str(
+ source_path.relative_to(funcitem.config.rootpath)
+ )
+ except ValueError:
+ source_path_str = str(source_path)
+ msg = (
+ "The requested fixture has no parameter defined for test:\n"
+ " {}\n\n"
+ "Requested fixture '{}' defined in:\n{}"
+ "\n\nRequested here:\n{}:{}".format(
+ funcitem.nodeid,
+ fixturedef.argname,
+ getlocation(fixturedef.func, funcitem.config.rootpath),
+ source_path_str,
+ source_lineno,
+ )
+ )
+ fail(msg, pytrace=False)
+ else:
+ param_index = funcitem.callspec.indices[argname]
+ # If a parametrize invocation set a scope it will override
+ # the static scope defined with the fixture function.
+ with suppress(KeyError):
+ scope = funcitem.callspec._arg2scope[argname]
+
+ subrequest = SubRequest(
+ self, scope, param, param_index, fixturedef, _ispytest=True
+ )
+
+ # Check if a higher-level scoped fixture accesses a lower level one.
+ subrequest._check_scope(argname, self._scope, scope)
+ try:
+ # Call the fixture function.
+ fixturedef.execute(request=subrequest)
+ finally:
+ self._schedule_finalizers(fixturedef, subrequest)
+
+ def _schedule_finalizers(
+ self, fixturedef: "FixtureDef[object]", subrequest: "SubRequest"
+ ) -> None:
+ # If fixture function failed it might have registered finalizers.
+ subrequest.node.addfinalizer(lambda: fixturedef.finish(request=subrequest))
+
+ def _check_scope(
+ self,
+ argname: str,
+ invoking_scope: Scope,
+ requested_scope: Scope,
+ ) -> None:
+ if argname == "request":
+ return
+ if invoking_scope > requested_scope:
+ # Try to report something helpful.
+ text = "\n".join(self._factorytraceback())
+ fail(
+ f"ScopeMismatch: You tried to access the {requested_scope.value} scoped "
+ f"fixture {argname} with a {invoking_scope.value} scoped request object, "
+ f"involved factories:\n{text}",
+ pytrace=False,
+ )
+
+ def _factorytraceback(self) -> List[str]:
+ lines = []
+ for fixturedef in self._get_fixturestack():
+ factory = fixturedef.func
+ fs, lineno = getfslineno(factory)
+ if isinstance(fs, Path):
+ session: Session = self._pyfuncitem.session
+ p = bestrelpath(session.path, fs)
+ else:
+ p = fs
+ args = _format_args(factory)
+ lines.append("%s:%d: def %s%s" % (p, lineno + 1, factory.__name__, args))
+ return lines
+
+ def _getscopeitem(
+ self, scope: Union[Scope, "_ScopeName"]
+ ) -> Union[nodes.Item, nodes.Collector]:
+ if isinstance(scope, str):
+ scope = Scope(scope)
+ if scope is Scope.Function:
+ # This might also be a non-function Item despite its attribute name.
+ node: Optional[Union[nodes.Item, nodes.Collector]] = self._pyfuncitem
+ elif scope is Scope.Package:
+ # FIXME: _fixturedef is not defined on FixtureRequest (this class),
+ # but on FixtureRequest (a subclass).
+ node = get_scope_package(self._pyfuncitem, self._fixturedef) # type: ignore[attr-defined]
+ else:
+ node = get_scope_node(self._pyfuncitem, scope)
+ if node is None and scope is Scope.Class:
+ # Fallback to function item itself.
+ node = self._pyfuncitem
+ assert node, 'Could not obtain a node for scope "{}" for function {!r}'.format(
+ scope, self._pyfuncitem
+ )
+ return node
+
+ def __repr__(self) -> str:
+ return "<FixtureRequest for %r>" % (self.node)
+
+
+@final
+class SubRequest(FixtureRequest):
+ """A sub request for handling getting a fixture from a test function/fixture."""
+
+ def __init__(
+ self,
+ request: "FixtureRequest",
+ scope: Scope,
+ param: Any,
+ param_index: int,
+ fixturedef: "FixtureDef[object]",
+ *,
+ _ispytest: bool = False,
+ ) -> None:
+ check_ispytest(_ispytest)
+ self._parent_request = request
+ self.fixturename = fixturedef.argname
+ if param is not NOTSET:
+ self.param = param
+ self.param_index = param_index
+ self._scope = scope
+ self._fixturedef = fixturedef
+ self._pyfuncitem = request._pyfuncitem
+ self._fixture_defs = request._fixture_defs
+ self._arg2fixturedefs = request._arg2fixturedefs
+ self._arg2index = request._arg2index
+ self._fixturemanager = request._fixturemanager
+
+ def __repr__(self) -> str:
+ return f"<SubRequest {self.fixturename!r} for {self._pyfuncitem!r}>"
+
+ def addfinalizer(self, finalizer: Callable[[], object]) -> None:
+ """Add finalizer/teardown function to be called after the last test
+ within the requesting test context finished execution."""
+ self._fixturedef.addfinalizer(finalizer)
+
+ def _schedule_finalizers(
+ self, fixturedef: "FixtureDef[object]", subrequest: "SubRequest"
+ ) -> None:
+ # If the executing fixturedef was not explicitly requested in the argument list (via
+ # getfixturevalue inside the fixture call) then ensure this fixture def will be finished
+ # first.
+ if fixturedef.argname not in self.fixturenames:
+ fixturedef.addfinalizer(
+ functools.partial(self._fixturedef.finish, request=self)
+ )
+ super()._schedule_finalizers(fixturedef, subrequest)
+
+
+@final
+class FixtureLookupError(LookupError):
+ """Could not return a requested fixture (missing or invalid)."""
+
+ def __init__(
+ self, argname: Optional[str], request: FixtureRequest, msg: Optional[str] = None
+ ) -> None:
+ self.argname = argname
+ self.request = request
+ self.fixturestack = request._get_fixturestack()
+ self.msg = msg
+
+ def formatrepr(self) -> "FixtureLookupErrorRepr":
+ tblines: List[str] = []
+ addline = tblines.append
+ stack = [self.request._pyfuncitem.obj]
+ stack.extend(map(lambda x: x.func, self.fixturestack))
+ msg = self.msg
+ if msg is not None:
+ # The last fixture raise an error, let's present
+ # it at the requesting side.
+ stack = stack[:-1]
+ for function in stack:
+ fspath, lineno = getfslineno(function)
+ try:
+ lines, _ = inspect.getsourcelines(get_real_func(function))
+ except (OSError, IndexError, TypeError):
+ error_msg = "file %s, line %s: source code not available"
+ addline(error_msg % (fspath, lineno + 1))
+ else:
+ addline(f"file {fspath}, line {lineno + 1}")
+ for i, line in enumerate(lines):
+ line = line.rstrip()
+ addline(" " + line)
+ if line.lstrip().startswith("def"):
+ break
+
+ if msg is None:
+ fm = self.request._fixturemanager
+ available = set()
+ parentid = self.request._pyfuncitem.parent.nodeid
+ for name, fixturedefs in fm._arg2fixturedefs.items():
+ faclist = list(fm._matchfactories(fixturedefs, parentid))
+ if faclist:
+ available.add(name)
+ if self.argname in available:
+ msg = " recursive dependency involving fixture '{}' detected".format(
+ self.argname
+ )
+ else:
+ msg = f"fixture '{self.argname}' not found"
+ msg += "\n available fixtures: {}".format(", ".join(sorted(available)))
+ msg += "\n use 'pytest --fixtures [testpath]' for help on them."
+
+ return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname)
+
+
+class FixtureLookupErrorRepr(TerminalRepr):
+ def __init__(
+ self,
+ filename: Union[str, "os.PathLike[str]"],
+ firstlineno: int,
+ tblines: Sequence[str],
+ errorstring: str,
+ argname: Optional[str],
+ ) -> None:
+ self.tblines = tblines
+ self.errorstring = errorstring
+ self.filename = filename
+ self.firstlineno = firstlineno
+ self.argname = argname
+
+ def toterminal(self, tw: TerminalWriter) -> None:
+ # tw.line("FixtureLookupError: %s" %(self.argname), red=True)
+ for tbline in self.tblines:
+ tw.line(tbline.rstrip())
+ lines = self.errorstring.split("\n")
+ if lines:
+ tw.line(
+ f"{FormattedExcinfo.fail_marker} {lines[0].strip()}",
+ red=True,
+ )
+ for line in lines[1:]:
+ tw.line(
+ f"{FormattedExcinfo.flow_marker} {line.strip()}",
+ red=True,
+ )
+ tw.line()
+ tw.line("%s:%d" % (os.fspath(self.filename), self.firstlineno + 1))
+
+
+def fail_fixturefunc(fixturefunc, msg: str) -> "NoReturn":
+ fs, lineno = getfslineno(fixturefunc)
+ location = f"{fs}:{lineno + 1}"
+ source = _pytest._code.Source(fixturefunc)
+ fail(msg + ":\n\n" + str(source.indent()) + "\n" + location, pytrace=False)
+
+
+def call_fixture_func(
+ fixturefunc: "_FixtureFunc[FixtureValue]", request: FixtureRequest, kwargs
+) -> FixtureValue:
+ if is_generator(fixturefunc):
+ fixturefunc = cast(
+ Callable[..., Generator[FixtureValue, None, None]], fixturefunc
+ )
+ generator = fixturefunc(**kwargs)
+ try:
+ fixture_result = next(generator)
+ except StopIteration:
+ raise ValueError(f"{request.fixturename} did not yield a value") from None
+ finalizer = functools.partial(_teardown_yield_fixture, fixturefunc, generator)
+ request.addfinalizer(finalizer)
+ else:
+ fixturefunc = cast(Callable[..., FixtureValue], fixturefunc)
+ fixture_result = fixturefunc(**kwargs)
+ return fixture_result
+
+
+def _teardown_yield_fixture(fixturefunc, it) -> None:
+ """Execute the teardown of a fixture function by advancing the iterator
+ after the yield and ensure the iteration ends (if not it means there is
+ more than one yield in the function)."""
+ try:
+ next(it)
+ except StopIteration:
+ pass
+ else:
+ fail_fixturefunc(fixturefunc, "fixture function has more than one 'yield'")
+
+
+def _eval_scope_callable(
+ scope_callable: "Callable[[str, Config], _ScopeName]",
+ fixture_name: str,
+ config: Config,
+) -> "_ScopeName":
+ try:
+ # Type ignored because there is no typing mechanism to specify
+ # keyword arguments, currently.
+ result = scope_callable(fixture_name=fixture_name, config=config) # type: ignore[call-arg]
+ except Exception as e:
+ raise TypeError(
+ "Error evaluating {} while defining fixture '{}'.\n"
+ "Expected a function with the signature (*, fixture_name, config)".format(
+ scope_callable, fixture_name
+ )
+ ) from e
+ if not isinstance(result, str):
+ fail(
+ "Expected {} to return a 'str' while defining fixture '{}', but it returned:\n"
+ "{!r}".format(scope_callable, fixture_name, result),
+ pytrace=False,
+ )
+ return result
+
+
+@final
+class FixtureDef(Generic[FixtureValue]):
+ """A container for a factory definition."""
+
+ def __init__(
+ self,
+ fixturemanager: "FixtureManager",
+ baseid: Optional[str],
+ argname: str,
+ func: "_FixtureFunc[FixtureValue]",
+ scope: Union[Scope, "_ScopeName", Callable[[str, Config], "_ScopeName"], None],
+ params: Optional[Sequence[object]],
+ unittest: bool = False,
+ ids: Optional[
+ Union[
+ Tuple[Union[None, str, float, int, bool], ...],
+ Callable[[Any], Optional[object]],
+ ]
+ ] = None,
+ ) -> None:
+ self._fixturemanager = fixturemanager
+ self.baseid = baseid or ""
+ self.has_location = baseid is not None
+ self.func = func
+ self.argname = argname
+ if scope is None:
+ scope = Scope.Function
+ elif callable(scope):
+ scope = _eval_scope_callable(scope, argname, fixturemanager.config)
+
+ if isinstance(scope, str):
+ scope = Scope.from_user(
+ scope, descr=f"Fixture '{func.__name__}'", where=baseid
+ )
+ self._scope = scope
+ self.params: Optional[Sequence[object]] = params
+ self.argnames: Tuple[str, ...] = getfuncargnames(
+ func, name=argname, is_method=unittest
+ )
+ self.unittest = unittest
+ self.ids = ids
+ self.cached_result: Optional[_FixtureCachedResult[FixtureValue]] = None
+ self._finalizers: List[Callable[[], object]] = []
+
+ @property
+ def scope(self) -> "_ScopeName":
+ """Scope string, one of "function", "class", "module", "package", "session"."""
+ return self._scope.value
+
+ def addfinalizer(self, finalizer: Callable[[], object]) -> None:
+ self._finalizers.append(finalizer)
+
+ def finish(self, request: SubRequest) -> None:
+ exc = None
+ try:
+ while self._finalizers:
+ try:
+ func = self._finalizers.pop()
+ func()
+ except BaseException as e:
+ # XXX Only first exception will be seen by user,
+ # ideally all should be reported.
+ if exc is None:
+ exc = e
+ if exc:
+ raise exc
+ finally:
+ hook = self._fixturemanager.session.gethookproxy(request.node.path)
+ hook.pytest_fixture_post_finalizer(fixturedef=self, request=request)
+ # Even if finalization fails, we invalidate the cached fixture
+ # value and remove all finalizers because they may be bound methods
+ # which will keep instances alive.
+ self.cached_result = None
+ self._finalizers = []
+
+ def execute(self, request: SubRequest) -> FixtureValue:
+ # Get required arguments and register our own finish()
+ # with their finalization.
+ for argname in self.argnames:
+ fixturedef = request._get_active_fixturedef(argname)
+ if argname != "request":
+ # PseudoFixtureDef is only for "request".
+ assert isinstance(fixturedef, FixtureDef)
+ fixturedef.addfinalizer(functools.partial(self.finish, request=request))
+
+ my_cache_key = self.cache_key(request)
+ if self.cached_result is not None:
+ # note: comparison with `==` can fail (or be expensive) for e.g.
+ # numpy arrays (#6497).
+ cache_key = self.cached_result[1]
+ if my_cache_key is cache_key:
+ if self.cached_result[2] is not None:
+ _, val, tb = self.cached_result[2]
+ raise val.with_traceback(tb)
+ else:
+ result = self.cached_result[0]
+ return result
+ # We have a previous but differently parametrized fixture instance
+ # so we need to tear it down before creating a new one.
+ self.finish(request)
+ assert self.cached_result is None
+
+ hook = self._fixturemanager.session.gethookproxy(request.node.path)
+ result = hook.pytest_fixture_setup(fixturedef=self, request=request)
+ return result
+
+ def cache_key(self, request: SubRequest) -> object:
+ return request.param_index if not hasattr(request, "param") else request.param
+
+ def __repr__(self) -> str:
+ return "<FixtureDef argname={!r} scope={!r} baseid={!r}>".format(
+ self.argname, self.scope, self.baseid
+ )
+
+
+def resolve_fixture_function(
+ fixturedef: FixtureDef[FixtureValue], request: FixtureRequest
+) -> "_FixtureFunc[FixtureValue]":
+ """Get the actual callable that can be called to obtain the fixture
+ value, dealing with unittest-specific instances and bound methods."""
+ fixturefunc = fixturedef.func
+ if fixturedef.unittest:
+ if request.instance is not None:
+ # Bind the unbound method to the TestCase instance.
+ fixturefunc = fixturedef.func.__get__(request.instance) # type: ignore[union-attr]
+ else:
+ # The fixture function needs to be bound to the actual
+ # request.instance so that code working with "fixturedef" behaves
+ # as expected.
+ if request.instance is not None:
+ # Handle the case where fixture is defined not in a test class, but some other class
+ # (for example a plugin class with a fixture), see #2270.
+ if hasattr(fixturefunc, "__self__") and not isinstance(
+ request.instance, fixturefunc.__self__.__class__ # type: ignore[union-attr]
+ ):
+ return fixturefunc
+ fixturefunc = getimfunc(fixturedef.func)
+ if fixturefunc != fixturedef.func:
+ fixturefunc = fixturefunc.__get__(request.instance) # type: ignore[union-attr]
+ return fixturefunc
+
+
+def pytest_fixture_setup(
+ fixturedef: FixtureDef[FixtureValue], request: SubRequest
+) -> FixtureValue:
+ """Execution of fixture setup."""
+ kwargs = {}
+ for argname in fixturedef.argnames:
+ fixdef = request._get_active_fixturedef(argname)
+ assert fixdef.cached_result is not None
+ result, arg_cache_key, exc = fixdef.cached_result
+ request._check_scope(argname, request._scope, fixdef._scope)
+ kwargs[argname] = result
+
+ fixturefunc = resolve_fixture_function(fixturedef, request)
+ my_cache_key = fixturedef.cache_key(request)
+ try:
+ result = call_fixture_func(fixturefunc, request, kwargs)
+ except TEST_OUTCOME:
+ exc_info = sys.exc_info()
+ assert exc_info[0] is not None
+ fixturedef.cached_result = (None, my_cache_key, exc_info)
+ raise
+ fixturedef.cached_result = (result, my_cache_key, None)
+ return result
+
+
+def _ensure_immutable_ids(
+ ids: Optional[
+ Union[
+ Iterable[Union[None, str, float, int, bool]],
+ Callable[[Any], Optional[object]],
+ ]
+ ],
+) -> Optional[
+ Union[
+ Tuple[Union[None, str, float, int, bool], ...],
+ Callable[[Any], Optional[object]],
+ ]
+]:
+ if ids is None:
+ return None
+ if callable(ids):
+ return ids
+ return tuple(ids)
+
+
+def _params_converter(
+ params: Optional[Iterable[object]],
+) -> Optional[Tuple[object, ...]]:
+ return tuple(params) if params is not None else None
+
+
+def wrap_function_to_error_out_if_called_directly(
+ function: FixtureFunction,
+ fixture_marker: "FixtureFunctionMarker",
+) -> FixtureFunction:
+ """Wrap the given fixture function so we can raise an error about it being called directly,
+ instead of used as an argument in a test function."""
+ message = (
+ 'Fixture "{name}" called directly. Fixtures are not meant to be called directly,\n'
+ "but are created automatically when test functions request them as parameters.\n"
+ "See https://docs.pytest.org/en/stable/explanation/fixtures.html for more information about fixtures, and\n"
+ "https://docs.pytest.org/en/stable/deprecations.html#calling-fixtures-directly about how to update your code."
+ ).format(name=fixture_marker.name or function.__name__)
+
+ @functools.wraps(function)
+ def result(*args, **kwargs):
+ fail(message, pytrace=False)
+
+ # Keep reference to the original function in our own custom attribute so we don't unwrap
+ # further than this point and lose useful wrappings like @mock.patch (#3774).
+ result.__pytest_wrapped__ = _PytestWrapper(function) # type: ignore[attr-defined]
+
+ return cast(FixtureFunction, result)
+
+
+@final
+@attr.s(frozen=True, auto_attribs=True)
+class FixtureFunctionMarker:
+ scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]"
+ params: Optional[Tuple[object, ...]] = attr.ib(converter=_params_converter)
+ autouse: bool = False
+ ids: Union[
+ Tuple[Union[None, str, float, int, bool], ...],
+ Callable[[Any], Optional[object]],
+ ] = attr.ib(
+ default=None,
+ converter=_ensure_immutable_ids,
+ )
+ name: Optional[str] = None
+
+ def __call__(self, function: FixtureFunction) -> FixtureFunction:
+ if inspect.isclass(function):
+ raise ValueError("class fixtures not supported (maybe in the future)")
+
+ if getattr(function, "_pytestfixturefunction", False):
+ raise ValueError(
+ "fixture is being applied more than once to the same function"
+ )
+
+ function = wrap_function_to_error_out_if_called_directly(function, self)
+
+ name = self.name or function.__name__
+ if name == "request":
+ location = getlocation(function)
+ fail(
+ "'request' is a reserved word for fixtures, use another name:\n {}".format(
+ location
+ ),
+ pytrace=False,
+ )
+
+ # Type ignored because https://github.com/python/mypy/issues/2087.
+ function._pytestfixturefunction = self # type: ignore[attr-defined]
+ return function
+
+
+@overload
+def fixture(
+ fixture_function: FixtureFunction,
+ *,
+ scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ...,
+ params: Optional[Iterable[object]] = ...,
+ autouse: bool = ...,
+ ids: Optional[
+ Union[
+ Iterable[Union[None, str, float, int, bool]],
+ Callable[[Any], Optional[object]],
+ ]
+ ] = ...,
+ name: Optional[str] = ...,
+) -> FixtureFunction:
+ ...
+
+
+@overload
+def fixture(
+ fixture_function: None = ...,
+ *,
+ scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ...,
+ params: Optional[Iterable[object]] = ...,
+ autouse: bool = ...,
+ ids: Optional[
+ Union[
+ Iterable[Union[None, str, float, int, bool]],
+ Callable[[Any], Optional[object]],
+ ]
+ ] = ...,
+ name: Optional[str] = None,
+) -> FixtureFunctionMarker:
+ ...
+
+
+def fixture(
+ fixture_function: Optional[FixtureFunction] = None,
+ *,
+ scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = "function",
+ params: Optional[Iterable[object]] = None,
+ autouse: bool = False,
+ ids: Optional[
+ Union[
+ Iterable[Union[None, str, float, int, bool]],
+ Callable[[Any], Optional[object]],
+ ]
+ ] = None,
+ name: Optional[str] = None,
+) -> Union[FixtureFunctionMarker, FixtureFunction]:
+ """Decorator to mark a fixture factory function.
+
+ This decorator can be used, with or without parameters, to define a
+ fixture function.
+
+ The name of the fixture function can later be referenced to cause its
+ invocation ahead of running tests: test modules or classes can use the
+ ``pytest.mark.usefixtures(fixturename)`` marker.
+
+ Test functions can directly use fixture names as input arguments in which
+ case the fixture instance returned from the fixture function will be
+ injected.
+
+ Fixtures can provide their values to test functions using ``return`` or
+ ``yield`` statements. When using ``yield`` the code block after the
+ ``yield`` statement is executed as teardown code regardless of the test
+ outcome, and must yield exactly once.
+
+ :param scope:
+ The scope for which this fixture is shared; one of ``"function"``
+ (default), ``"class"``, ``"module"``, ``"package"`` or ``"session"``.
+
+ This parameter may also be a callable which receives ``(fixture_name, config)``
+ as parameters, and must return a ``str`` with one of the values mentioned above.
+
+ See :ref:`dynamic scope` in the docs for more information.
+
+ :param params:
+ An optional list of parameters which will cause multiple invocations
+ of the fixture function and all of the tests using it. The current
+ parameter is available in ``request.param``.
+
+ :param autouse:
+ If True, the fixture func is activated for all tests that can see it.
+ If False (the default), an explicit reference is needed to activate
+ the fixture.
+
+ :param ids:
+ List of string ids each corresponding to the params so that they are
+ part of the test id. If no ids are provided they will be generated
+ automatically from the params.
+
+ :param name:
+ The name of the fixture. This defaults to the name of the decorated
+ function. If a fixture is used in the same module in which it is
+ defined, the function name of the fixture will be shadowed by the
+ function arg that requests the fixture; one way to resolve this is to
+ name the decorated function ``fixture_<fixturename>`` and then use
+ ``@pytest.fixture(name='<fixturename>')``.
+ """
+ fixture_marker = FixtureFunctionMarker(
+ scope=scope,
+ params=params,
+ autouse=autouse,
+ ids=ids,
+ name=name,
+ )
+
+ # Direct decoration.
+ if fixture_function:
+ return fixture_marker(fixture_function)
+
+ return fixture_marker
+
+
+def yield_fixture(
+ fixture_function=None,
+ *args,
+ scope="function",
+ params=None,
+ autouse=False,
+ ids=None,
+ name=None,
+):
+ """(Return a) decorator to mark a yield-fixture factory function.
+
+ .. deprecated:: 3.0
+ Use :py:func:`pytest.fixture` directly instead.
+ """
+ warnings.warn(YIELD_FIXTURE, stacklevel=2)
+ return fixture(
+ fixture_function,
+ *args,
+ scope=scope,
+ params=params,
+ autouse=autouse,
+ ids=ids,
+ name=name,
+ )
+
+
+@fixture(scope="session")
+def pytestconfig(request: FixtureRequest) -> Config:
+ """Session-scoped fixture that returns the session's :class:`pytest.Config`
+ object.
+
+ Example::
+
+ def test_foo(pytestconfig):
+ if pytestconfig.getoption("verbose") > 0:
+ ...
+
+ """
+ return request.config
+
+
+def pytest_addoption(parser: Parser) -> None:
+ parser.addini(
+ "usefixtures",
+ type="args",
+ default=[],
+ help="list of default fixtures to be used with this project",
+ )
+
+
+class FixtureManager:
+ """pytest fixture definitions and information is stored and managed
+ from this class.
+
+ During collection fm.parsefactories() is called multiple times to parse
+ fixture function definitions into FixtureDef objects and internal
+ data structures.
+
+ During collection of test functions, metafunc-mechanics instantiate
+ a FuncFixtureInfo object which is cached per node/func-name.
+ This FuncFixtureInfo object is later retrieved by Function nodes
+ which themselves offer a fixturenames attribute.
+
+ The FuncFixtureInfo object holds information about fixtures and FixtureDefs
+ relevant for a particular function. An initial list of fixtures is
+ assembled like this:
+
+ - ini-defined usefixtures
+ - autouse-marked fixtures along the collection chain up from the function
+ - usefixtures markers at module/class/function level
+ - test function funcargs
+
+ Subsequently the funcfixtureinfo.fixturenames attribute is computed
+ as the closure of the fixtures needed to setup the initial fixtures,
+ i.e. fixtures needed by fixture functions themselves are appended
+ to the fixturenames list.
+
+ Upon the test-setup phases all fixturenames are instantiated, retrieved
+ by a lookup of their FuncFixtureInfo.
+ """
+
+ FixtureLookupError = FixtureLookupError
+ FixtureLookupErrorRepr = FixtureLookupErrorRepr
+
+ def __init__(self, session: "Session") -> None:
+ self.session = session
+ self.config: Config = session.config
+ self._arg2fixturedefs: Dict[str, List[FixtureDef[Any]]] = {}
+ self._holderobjseen: Set[object] = set()
+ # A mapping from a nodeid to a list of autouse fixtures it defines.
+ self._nodeid_autousenames: Dict[str, List[str]] = {
+ "": self.config.getini("usefixtures"),
+ }
+ session.config.pluginmanager.register(self, "funcmanage")
+
+ def _get_direct_parametrize_args(self, node: nodes.Node) -> List[str]:
+ """Return all direct parametrization arguments of a node, so we don't
+ mistake them for fixtures.
+
+ Check https://github.com/pytest-dev/pytest/issues/5036.
+
+ These things are done later as well when dealing with parametrization
+ so this could be improved.
+ """
+ parametrize_argnames: List[str] = []
+ for marker in node.iter_markers(name="parametrize"):
+ if not marker.kwargs.get("indirect", False):
+ p_argnames, _ = ParameterSet._parse_parametrize_args(
+ *marker.args, **marker.kwargs
+ )
+ parametrize_argnames.extend(p_argnames)
+
+ return parametrize_argnames
+
+ def getfixtureinfo(
+ self, node: nodes.Node, func, cls, funcargs: bool = True
+ ) -> FuncFixtureInfo:
+ if funcargs and not getattr(node, "nofuncargs", False):
+ argnames = getfuncargnames(func, name=node.name, cls=cls)
+ else:
+ argnames = ()
+
+ usefixtures = tuple(
+ arg for mark in node.iter_markers(name="usefixtures") for arg in mark.args
+ )
+ initialnames = usefixtures + argnames
+ fm = node.session._fixturemanager
+ initialnames, names_closure, arg2fixturedefs = fm.getfixtureclosure(
+ initialnames, node, ignore_args=self._get_direct_parametrize_args(node)
+ )
+ return FuncFixtureInfo(argnames, initialnames, names_closure, arg2fixturedefs)
+
+ def pytest_plugin_registered(self, plugin: _PluggyPlugin) -> None:
+ nodeid = None
+ try:
+ p = absolutepath(plugin.__file__) # type: ignore[attr-defined]
+ except AttributeError:
+ pass
+ else:
+ # Construct the base nodeid which is later used to check
+ # what fixtures are visible for particular tests (as denoted
+ # by their test id).
+ if p.name.startswith("conftest.py"):
+ try:
+ nodeid = str(p.parent.relative_to(self.config.rootpath))
+ except ValueError:
+ nodeid = ""
+ if nodeid == ".":
+ nodeid = ""
+ if os.sep != nodes.SEP:
+ nodeid = nodeid.replace(os.sep, nodes.SEP)
+
+ self.parsefactories(plugin, nodeid)
+
+ def _getautousenames(self, nodeid: str) -> Iterator[str]:
+ """Return the names of autouse fixtures applicable to nodeid."""
+ for parentnodeid in nodes.iterparentnodeids(nodeid):
+ basenames = self._nodeid_autousenames.get(parentnodeid)
+ if basenames:
+ yield from basenames
+
+ def getfixtureclosure(
+ self,
+ fixturenames: Tuple[str, ...],
+ parentnode: nodes.Node,
+ ignore_args: Sequence[str] = (),
+ ) -> Tuple[Tuple[str, ...], List[str], Dict[str, Sequence[FixtureDef[Any]]]]:
+ # Collect the closure of all fixtures, starting with the given
+ # fixturenames as the initial set. As we have to visit all
+ # factory definitions anyway, we also return an arg2fixturedefs
+ # mapping so that the caller can reuse it and does not have
+ # to re-discover fixturedefs again for each fixturename
+ # (discovering matching fixtures for a given name/node is expensive).
+
+ parentid = parentnode.nodeid
+ fixturenames_closure = list(self._getautousenames(parentid))
+
+ def merge(otherlist: Iterable[str]) -> None:
+ for arg in otherlist:
+ if arg not in fixturenames_closure:
+ fixturenames_closure.append(arg)
+
+ merge(fixturenames)
+
+ # At this point, fixturenames_closure contains what we call "initialnames",
+ # which is a set of fixturenames the function immediately requests. We
+ # need to return it as well, so save this.
+ initialnames = tuple(fixturenames_closure)
+
+ arg2fixturedefs: Dict[str, Sequence[FixtureDef[Any]]] = {}
+ lastlen = -1
+ while lastlen != len(fixturenames_closure):
+ lastlen = len(fixturenames_closure)
+ for argname in fixturenames_closure:
+ if argname in ignore_args:
+ continue
+ if argname in arg2fixturedefs:
+ continue
+ fixturedefs = self.getfixturedefs(argname, parentid)
+ if fixturedefs:
+ arg2fixturedefs[argname] = fixturedefs
+ merge(fixturedefs[-1].argnames)
+
+ def sort_by_scope(arg_name: str) -> Scope:
+ try:
+ fixturedefs = arg2fixturedefs[arg_name]
+ except KeyError:
+ return Scope.Function
+ else:
+ return fixturedefs[-1]._scope
+
+ fixturenames_closure.sort(key=sort_by_scope, reverse=True)
+ return initialnames, fixturenames_closure, arg2fixturedefs
+
+ def pytest_generate_tests(self, metafunc: "Metafunc") -> None:
+ """Generate new tests based on parametrized fixtures used by the given metafunc"""
+
+ def get_parametrize_mark_argnames(mark: Mark) -> Sequence[str]:
+ args, _ = ParameterSet._parse_parametrize_args(*mark.args, **mark.kwargs)
+ return args
+
+ for argname in metafunc.fixturenames:
+ # Get the FixtureDefs for the argname.
+ fixture_defs = metafunc._arg2fixturedefs.get(argname)
+ if not fixture_defs:
+ # Will raise FixtureLookupError at setup time if not parametrized somewhere
+ # else (e.g @pytest.mark.parametrize)
+ continue
+
+ # If the test itself parametrizes using this argname, give it
+ # precedence.
+ if any(
+ argname in get_parametrize_mark_argnames(mark)
+ for mark in metafunc.definition.iter_markers("parametrize")
+ ):
+ continue
+
+ # In the common case we only look at the fixture def with the
+ # closest scope (last in the list). But if the fixture overrides
+ # another fixture, while requesting the super fixture, keep going
+ # in case the super fixture is parametrized (#1953).
+ for fixturedef in reversed(fixture_defs):
+ # Fixture is parametrized, apply it and stop.
+ if fixturedef.params is not None:
+ metafunc.parametrize(
+ argname,
+ fixturedef.params,
+ indirect=True,
+ scope=fixturedef.scope,
+ ids=fixturedef.ids,
+ )
+ break
+
+ # Not requesting the overridden super fixture, stop.
+ if argname not in fixturedef.argnames:
+ break
+
+ # Try next super fixture, if any.
+
+ def pytest_collection_modifyitems(self, items: List[nodes.Item]) -> None:
+ # Separate parametrized setups.
+ items[:] = reorder_items(items)
+
+ def parsefactories(
+ self, node_or_obj, nodeid=NOTSET, unittest: bool = False
+ ) -> None:
+ if nodeid is not NOTSET:
+ holderobj = node_or_obj
+ else:
+ holderobj = node_or_obj.obj
+ nodeid = node_or_obj.nodeid
+ if holderobj in self._holderobjseen:
+ return
+
+ self._holderobjseen.add(holderobj)
+ autousenames = []
+ for name in dir(holderobj):
+ # ugly workaround for one of the fspath deprecated property of node
+ # todo: safely generalize
+ if isinstance(holderobj, nodes.Node) and name == "fspath":
+ continue
+
+ # The attribute can be an arbitrary descriptor, so the attribute
+ # access below can raise. safe_getatt() ignores such exceptions.
+ obj = safe_getattr(holderobj, name, None)
+ marker = getfixturemarker(obj)
+ if not isinstance(marker, FixtureFunctionMarker):
+ # Magic globals with __getattr__ might have got us a wrong
+ # fixture attribute.
+ continue
+
+ if marker.name:
+ name = marker.name
+
+ # During fixture definition we wrap the original fixture function
+ # to issue a warning if called directly, so here we unwrap it in
+ # order to not emit the warning when pytest itself calls the
+ # fixture function.
+ obj = get_real_method(obj, holderobj)
+
+ fixture_def = FixtureDef(
+ fixturemanager=self,
+ baseid=nodeid,
+ argname=name,
+ func=obj,
+ scope=marker.scope,
+ params=marker.params,
+ unittest=unittest,
+ ids=marker.ids,
+ )
+
+ faclist = self._arg2fixturedefs.setdefault(name, [])
+ if fixture_def.has_location:
+ faclist.append(fixture_def)
+ else:
+ # fixturedefs with no location are at the front
+ # so this inserts the current fixturedef after the
+ # existing fixturedefs from external plugins but
+ # before the fixturedefs provided in conftests.
+ i = len([f for f in faclist if not f.has_location])
+ faclist.insert(i, fixture_def)
+ if marker.autouse:
+ autousenames.append(name)
+
+ if autousenames:
+ self._nodeid_autousenames.setdefault(nodeid or "", []).extend(autousenames)
+
+ def getfixturedefs(
+ self, argname: str, nodeid: str
+ ) -> Optional[Sequence[FixtureDef[Any]]]:
+ """Get a list of fixtures which are applicable to the given node id.
+
+ :param str argname: Name of the fixture to search for.
+ :param str nodeid: Full node id of the requesting test.
+ :rtype: Sequence[FixtureDef]
+ """
+ try:
+ fixturedefs = self._arg2fixturedefs[argname]
+ except KeyError:
+ return None
+ return tuple(self._matchfactories(fixturedefs, nodeid))
+
+ def _matchfactories(
+ self, fixturedefs: Iterable[FixtureDef[Any]], nodeid: str
+ ) -> Iterator[FixtureDef[Any]]:
+ parentnodeids = set(nodes.iterparentnodeids(nodeid))
+ for fixturedef in fixturedefs:
+ if fixturedef.baseid in parentnodeids:
+ yield fixturedef
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/freeze_support.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/freeze_support.py
new file mode 100644
index 0000000000..9f8ea231fe
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/freeze_support.py
@@ -0,0 +1,44 @@
+"""Provides a function to report all internal modules for using freezing
+tools."""
+import types
+from typing import Iterator
+from typing import List
+from typing import Union
+
+
+def freeze_includes() -> List[str]:
+ """Return a list of module names used by pytest that should be
+ included by cx_freeze."""
+ import _pytest
+
+ result = list(_iter_all_modules(_pytest))
+ return result
+
+
+def _iter_all_modules(
+ package: Union[str, types.ModuleType],
+ prefix: str = "",
+) -> Iterator[str]:
+ """Iterate over the names of all modules that can be found in the given
+ package, recursively.
+
+ >>> import _pytest
+ >>> list(_iter_all_modules(_pytest))
+ ['_pytest._argcomplete', '_pytest._code.code', ...]
+ """
+ import os
+ import pkgutil
+
+ if isinstance(package, str):
+ path = package
+ else:
+ # Type ignored because typeshed doesn't define ModuleType.__path__
+ # (only defined on packages).
+ package_path = package.__path__ # type: ignore[attr-defined]
+ path, prefix = package_path[0], package.__name__ + "."
+ for _, name, is_package in pkgutil.iter_modules([path]):
+ if is_package:
+ for m in _iter_all_modules(os.path.join(path, name), prefix=name + "."):
+ yield prefix + m
+ else:
+ yield prefix + name
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/helpconfig.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/helpconfig.py
new file mode 100644
index 0000000000..aca2cd391e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/helpconfig.py
@@ -0,0 +1,264 @@
+"""Version info, help messages, tracing configuration."""
+import os
+import sys
+from argparse import Action
+from typing import List
+from typing import Optional
+from typing import Union
+
+import pytest
+from _pytest.config import Config
+from _pytest.config import ExitCode
+from _pytest.config import PrintHelp
+from _pytest.config.argparsing import Parser
+
+
+class HelpAction(Action):
+ """An argparse Action that will raise an exception in order to skip the
+ rest of the argument parsing when --help is passed.
+
+ This prevents argparse from quitting due to missing required arguments
+ when any are defined, for example by ``pytest_addoption``.
+ This is similar to the way that the builtin argparse --help option is
+ implemented by raising SystemExit.
+ """
+
+ def __init__(self, option_strings, dest=None, default=False, help=None):
+ super().__init__(
+ option_strings=option_strings,
+ dest=dest,
+ const=True,
+ default=default,
+ nargs=0,
+ help=help,
+ )
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ setattr(namespace, self.dest, self.const)
+
+ # We should only skip the rest of the parsing after preparse is done.
+ if getattr(parser._parser, "after_preparse", False):
+ raise PrintHelp
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("debugconfig")
+ group.addoption(
+ "--version",
+ "-V",
+ action="count",
+ default=0,
+ dest="version",
+ help="display pytest version and information about plugins. "
+ "When given twice, also display information about plugins.",
+ )
+ group._addoption(
+ "-h",
+ "--help",
+ action=HelpAction,
+ dest="help",
+ help="show help message and configuration info",
+ )
+ group._addoption(
+ "-p",
+ action="append",
+ dest="plugins",
+ default=[],
+ metavar="name",
+ help="early-load given plugin module name or entry point (multi-allowed).\n"
+ "To avoid loading of plugins, use the `no:` prefix, e.g. "
+ "`no:doctest`.",
+ )
+ group.addoption(
+ "--traceconfig",
+ "--trace-config",
+ action="store_true",
+ default=False,
+ help="trace considerations of conftest.py files.",
+ )
+ group.addoption(
+ "--debug",
+ action="store",
+ nargs="?",
+ const="pytestdebug.log",
+ dest="debug",
+ metavar="DEBUG_FILE_NAME",
+ help="store internal tracing debug information in this log file.\n"
+ "This file is opened with 'w' and truncated as a result, care advised.\n"
+ "Defaults to 'pytestdebug.log'.",
+ )
+ group._addoption(
+ "-o",
+ "--override-ini",
+ dest="override_ini",
+ action="append",
+ help='override ini option with "option=value" style, e.g. `-o xfail_strict=True -o cache_dir=cache`.',
+ )
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_cmdline_parse():
+ outcome = yield
+ config: Config = outcome.get_result()
+
+ if config.option.debug:
+ # --debug | --debug <file.log> was provided.
+ path = config.option.debug
+ debugfile = open(path, "w")
+ debugfile.write(
+ "versions pytest-%s, "
+ "python-%s\ncwd=%s\nargs=%s\n\n"
+ % (
+ pytest.__version__,
+ ".".join(map(str, sys.version_info)),
+ os.getcwd(),
+ config.invocation_params.args,
+ )
+ )
+ config.trace.root.setwriter(debugfile.write)
+ undo_tracing = config.pluginmanager.enable_tracing()
+ sys.stderr.write("writing pytest debug information to %s\n" % path)
+
+ def unset_tracing() -> None:
+ debugfile.close()
+ sys.stderr.write("wrote pytest debug information to %s\n" % debugfile.name)
+ config.trace.root.setwriter(None)
+ undo_tracing()
+
+ config.add_cleanup(unset_tracing)
+
+
+def showversion(config: Config) -> None:
+ if config.option.version > 1:
+ sys.stdout.write(
+ "This is pytest version {}, imported from {}\n".format(
+ pytest.__version__, pytest.__file__
+ )
+ )
+ plugininfo = getpluginversioninfo(config)
+ if plugininfo:
+ for line in plugininfo:
+ sys.stdout.write(line + "\n")
+ else:
+ sys.stdout.write(f"pytest {pytest.__version__}\n")
+
+
+def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:
+ if config.option.version > 0:
+ showversion(config)
+ return 0
+ elif config.option.help:
+ config._do_configure()
+ showhelp(config)
+ config._ensure_unconfigure()
+ return 0
+ return None
+
+
+def showhelp(config: Config) -> None:
+ import textwrap
+
+ reporter = config.pluginmanager.get_plugin("terminalreporter")
+ tw = reporter._tw
+ tw.write(config._parser.optparser.format_help())
+ tw.line()
+ tw.line(
+ "[pytest] ini-options in the first pytest.ini|tox.ini|setup.cfg file found:"
+ )
+ tw.line()
+
+ columns = tw.fullwidth # costly call
+ indent_len = 24 # based on argparse's max_help_position=24
+ indent = " " * indent_len
+ for name in config._parser._ininames:
+ help, type, default = config._parser._inidict[name]
+ if type is None:
+ type = "string"
+ if help is None:
+ raise TypeError(f"help argument cannot be None for {name}")
+ spec = f"{name} ({type}):"
+ tw.write(" %s" % spec)
+ spec_len = len(spec)
+ if spec_len > (indent_len - 3):
+ # Display help starting at a new line.
+ tw.line()
+ helplines = textwrap.wrap(
+ help,
+ columns,
+ initial_indent=indent,
+ subsequent_indent=indent,
+ break_on_hyphens=False,
+ )
+
+ for line in helplines:
+ tw.line(line)
+ else:
+ # Display help starting after the spec, following lines indented.
+ tw.write(" " * (indent_len - spec_len - 2))
+ wrapped = textwrap.wrap(help, columns - indent_len, break_on_hyphens=False)
+
+ if wrapped:
+ tw.line(wrapped[0])
+ for line in wrapped[1:]:
+ tw.line(indent + line)
+
+ tw.line()
+ tw.line("environment variables:")
+ vars = [
+ ("PYTEST_ADDOPTS", "extra command line options"),
+ ("PYTEST_PLUGINS", "comma-separated plugins to load during startup"),
+ ("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "set to disable plugin auto-loading"),
+ ("PYTEST_DEBUG", "set to enable debug tracing of pytest's internals"),
+ ]
+ for name, help in vars:
+ tw.line(f" {name:<24} {help}")
+ tw.line()
+ tw.line()
+
+ tw.line("to see available markers type: pytest --markers")
+ tw.line("to see available fixtures type: pytest --fixtures")
+ tw.line(
+ "(shown according to specified file_or_dir or current dir "
+ "if not specified; fixtures with leading '_' are only shown "
+ "with the '-v' option"
+ )
+
+ for warningreport in reporter.stats.get("warnings", []):
+ tw.line("warning : " + warningreport.message, red=True)
+ return
+
+
+conftest_options = [("pytest_plugins", "list of plugin names to load")]
+
+
+def getpluginversioninfo(config: Config) -> List[str]:
+ lines = []
+ plugininfo = config.pluginmanager.list_plugin_distinfo()
+ if plugininfo:
+ lines.append("setuptools registered plugins:")
+ for plugin, dist in plugininfo:
+ loc = getattr(plugin, "__file__", repr(plugin))
+ content = f"{dist.project_name}-{dist.version} at {loc}"
+ lines.append(" " + content)
+ return lines
+
+
+def pytest_report_header(config: Config) -> List[str]:
+ lines = []
+ if config.option.debug or config.option.traceconfig:
+ lines.append(f"using: pytest-{pytest.__version__}")
+
+ verinfo = getpluginversioninfo(config)
+ if verinfo:
+ lines.extend(verinfo)
+
+ if config.option.traceconfig:
+ lines.append("active plugins:")
+ items = config.pluginmanager.list_name_plugin()
+ for name, plugin in items:
+ if hasattr(plugin, "__file__"):
+ r = plugin.__file__
+ else:
+ r = repr(plugin)
+ lines.append(f" {name:<20}: {r}")
+ return lines
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/hookspec.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/hookspec.py
new file mode 100644
index 0000000000..79251315d8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/hookspec.py
@@ -0,0 +1,928 @@
+"""Hook specifications for pytest plugins which are invoked by pytest itself
+and by builtin plugins."""
+from pathlib import Path
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from pluggy import HookspecMarker
+
+from _pytest.deprecated import WARNING_CAPTURED_HOOK
+from _pytest.deprecated import WARNING_CMDLINE_PREPARSE_HOOK
+
+if TYPE_CHECKING:
+ import pdb
+ import warnings
+ from typing_extensions import Literal
+
+ from _pytest._code.code import ExceptionRepr
+ from _pytest.code import ExceptionInfo
+ from _pytest.config import Config
+ from _pytest.config import ExitCode
+ from _pytest.config import PytestPluginManager
+ from _pytest.config import _PluggyPlugin
+ from _pytest.config.argparsing import Parser
+ from _pytest.fixtures import FixtureDef
+ from _pytest.fixtures import SubRequest
+ from _pytest.main import Session
+ from _pytest.nodes import Collector
+ from _pytest.nodes import Item
+ from _pytest.outcomes import Exit
+ from _pytest.python import Function
+ from _pytest.python import Metafunc
+ from _pytest.python import Module
+ from _pytest.python import PyCollector
+ from _pytest.reports import CollectReport
+ from _pytest.reports import TestReport
+ from _pytest.runner import CallInfo
+ from _pytest.terminal import TerminalReporter
+ from _pytest.compat import LEGACY_PATH
+
+
+hookspec = HookspecMarker("pytest")
+
+# -------------------------------------------------------------------------
+# Initialization hooks called for every plugin
+# -------------------------------------------------------------------------
+
+
+@hookspec(historic=True)
+def pytest_addhooks(pluginmanager: "PytestPluginManager") -> None:
+ """Called at plugin registration time to allow adding new hooks via a call to
+ ``pluginmanager.add_hookspecs(module_or_class, prefix)``.
+
+ :param pytest.PytestPluginManager pluginmanager: The pytest plugin manager.
+
+ .. note::
+ This hook is incompatible with ``hookwrapper=True``.
+ """
+
+
+@hookspec(historic=True)
+def pytest_plugin_registered(
+ plugin: "_PluggyPlugin", manager: "PytestPluginManager"
+) -> None:
+ """A new pytest plugin got registered.
+
+ :param plugin: The plugin module or instance.
+ :param pytest.PytestPluginManager manager: pytest plugin manager.
+
+ .. note::
+ This hook is incompatible with ``hookwrapper=True``.
+ """
+
+
+@hookspec(historic=True)
+def pytest_addoption(parser: "Parser", pluginmanager: "PytestPluginManager") -> None:
+ """Register argparse-style options and ini-style config values,
+ called once at the beginning of a test run.
+
+ .. note::
+
+ This function should be implemented only in plugins or ``conftest.py``
+ files situated at the tests root directory due to how pytest
+ :ref:`discovers plugins during startup <pluginorder>`.
+
+ :param pytest.Parser parser:
+ To add command line options, call
+ :py:func:`parser.addoption(...) <pytest.Parser.addoption>`.
+ To add ini-file values call :py:func:`parser.addini(...)
+ <pytest.Parser.addini>`.
+
+ :param pytest.PytestPluginManager pluginmanager:
+ The pytest plugin manager, which can be used to install :py:func:`hookspec`'s
+ or :py:func:`hookimpl`'s and allow one plugin to call another plugin's hooks
+ to change how command line options are added.
+
+ Options can later be accessed through the
+ :py:class:`config <pytest.Config>` object, respectively:
+
+ - :py:func:`config.getoption(name) <pytest.Config.getoption>` to
+ retrieve the value of a command line option.
+
+ - :py:func:`config.getini(name) <pytest.Config.getini>` to retrieve
+ a value read from an ini-style file.
+
+ The config object is passed around on many internal objects via the ``.config``
+ attribute or can be retrieved as the ``pytestconfig`` fixture.
+
+ .. note::
+ This hook is incompatible with ``hookwrapper=True``.
+ """
+
+
+@hookspec(historic=True)
+def pytest_configure(config: "Config") -> None:
+ """Allow plugins and conftest files to perform initial configuration.
+
+ This hook is called for every plugin and initial conftest file
+ after command line options have been parsed.
+
+ After that, the hook is called for other conftest files as they are
+ imported.
+
+ .. note::
+ This hook is incompatible with ``hookwrapper=True``.
+
+ :param pytest.Config config: The pytest config object.
+ """
+
+
+# -------------------------------------------------------------------------
+# Bootstrapping hooks called for plugins registered early enough:
+# internal and 3rd party plugins.
+# -------------------------------------------------------------------------
+
+
+@hookspec(firstresult=True)
+def pytest_cmdline_parse(
+ pluginmanager: "PytestPluginManager", args: List[str]
+) -> Optional["Config"]:
+ """Return an initialized config object, parsing the specified args.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+
+ .. note::
+ This hook will only be called for plugin classes passed to the
+ ``plugins`` arg when using `pytest.main`_ to perform an in-process
+ test run.
+
+ :param pytest.PytestPluginManager pluginmanager: The pytest plugin manager.
+ :param List[str] args: List of arguments passed on the command line.
+ """
+
+
+@hookspec(warn_on_impl=WARNING_CMDLINE_PREPARSE_HOOK)
+def pytest_cmdline_preparse(config: "Config", args: List[str]) -> None:
+ """(**Deprecated**) modify command line arguments before option parsing.
+
+ This hook is considered deprecated and will be removed in a future pytest version. Consider
+ using :hook:`pytest_load_initial_conftests` instead.
+
+ .. note::
+ This hook will not be called for ``conftest.py`` files, only for setuptools plugins.
+
+ :param pytest.Config config: The pytest config object.
+ :param List[str] args: Arguments passed on the command line.
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_cmdline_main(config: "Config") -> Optional[Union["ExitCode", int]]:
+ """Called for performing the main command line action. The default
+ implementation will invoke the configure hooks and runtest_mainloop.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+
+ :param pytest.Config config: The pytest config object.
+ """
+
+
+def pytest_load_initial_conftests(
+ early_config: "Config", parser: "Parser", args: List[str]
+) -> None:
+ """Called to implement the loading of initial conftest files ahead
+ of command line option parsing.
+
+ .. note::
+ This hook will not be called for ``conftest.py`` files, only for setuptools plugins.
+
+ :param pytest.Config early_config: The pytest config object.
+ :param List[str] args: Arguments passed on the command line.
+ :param pytest.Parser parser: To add command line options.
+ """
+
+
+# -------------------------------------------------------------------------
+# collection hooks
+# -------------------------------------------------------------------------
+
+
+@hookspec(firstresult=True)
+def pytest_collection(session: "Session") -> Optional[object]:
+ """Perform the collection phase for the given session.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+ The return value is not used, but only stops further processing.
+
+ The default collection phase is this (see individual hooks for full details):
+
+ 1. Starting from ``session`` as the initial collector:
+
+ 1. ``pytest_collectstart(collector)``
+ 2. ``report = pytest_make_collect_report(collector)``
+ 3. ``pytest_exception_interact(collector, call, report)`` if an interactive exception occurred
+ 4. For each collected node:
+
+ 1. If an item, ``pytest_itemcollected(item)``
+ 2. If a collector, recurse into it.
+
+ 5. ``pytest_collectreport(report)``
+
+ 2. ``pytest_collection_modifyitems(session, config, items)``
+
+ 1. ``pytest_deselected(items)`` for any deselected items (may be called multiple times)
+
+ 3. ``pytest_collection_finish(session)``
+ 4. Set ``session.items`` to the list of collected items
+ 5. Set ``session.testscollected`` to the number of collected items
+
+ You can implement this hook to only perform some action before collection,
+ for example the terminal plugin uses it to start displaying the collection
+ counter (and returns `None`).
+
+ :param pytest.Session session: The pytest session object.
+ """
+
+
+def pytest_collection_modifyitems(
+ session: "Session", config: "Config", items: List["Item"]
+) -> None:
+ """Called after collection has been performed. May filter or re-order
+ the items in-place.
+
+ :param pytest.Session session: The pytest session object.
+ :param pytest.Config config: The pytest config object.
+ :param List[pytest.Item] items: List of item objects.
+ """
+
+
+def pytest_collection_finish(session: "Session") -> None:
+ """Called after collection has been performed and modified.
+
+ :param pytest.Session session: The pytest session object.
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_ignore_collect(
+ collection_path: Path, path: "LEGACY_PATH", config: "Config"
+) -> Optional[bool]:
+ """Return True to prevent considering this path for collection.
+
+ This hook is consulted for all files and directories prior to calling
+ more specific hooks.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+
+ :param pathlib.Path collection_path : The path to analyze.
+ :param LEGACY_PATH path: The path to analyze (deprecated).
+ :param pytest.Config config: The pytest config object.
+
+ .. versionchanged:: 7.0.0
+ The ``collection_path`` parameter was added as a :class:`pathlib.Path`
+ equivalent of the ``path`` parameter. The ``path`` parameter
+ has been deprecated.
+ """
+
+
+def pytest_collect_file(
+ file_path: Path, path: "LEGACY_PATH", parent: "Collector"
+) -> "Optional[Collector]":
+ """Create a Collector for the given path, or None if not relevant.
+
+ The new node needs to have the specified ``parent`` as a parent.
+
+ :param pathlib.Path file_path: The path to analyze.
+ :param LEGACY_PATH path: The path to collect (deprecated).
+
+ .. versionchanged:: 7.0.0
+ The ``file_path`` parameter was added as a :class:`pathlib.Path`
+ equivalent of the ``path`` parameter. The ``path`` parameter
+ has been deprecated.
+ """
+
+
+# logging hooks for collection
+
+
+def pytest_collectstart(collector: "Collector") -> None:
+ """Collector starts collecting."""
+
+
+def pytest_itemcollected(item: "Item") -> None:
+ """We just collected a test item."""
+
+
+def pytest_collectreport(report: "CollectReport") -> None:
+ """Collector finished collecting."""
+
+
+def pytest_deselected(items: Sequence["Item"]) -> None:
+ """Called for deselected test items, e.g. by keyword.
+
+ May be called multiple times.
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_make_collect_report(collector: "Collector") -> "Optional[CollectReport]":
+ """Perform :func:`collector.collect() <pytest.Collector.collect>` and return
+ a :class:`~pytest.CollectReport`.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+ """
+
+
+# -------------------------------------------------------------------------
+# Python test function related hooks
+# -------------------------------------------------------------------------
+
+
+@hookspec(firstresult=True)
+def pytest_pycollect_makemodule(
+ module_path: Path, path: "LEGACY_PATH", parent
+) -> Optional["Module"]:
+ """Return a Module collector or None for the given path.
+
+ This hook will be called for each matching test module path.
+ The pytest_collect_file hook needs to be used if you want to
+ create test modules for files that do not match as a test module.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+
+ :param pathlib.Path module_path: The path of the module to collect.
+ :param LEGACY_PATH path: The path of the module to collect (deprecated).
+
+ .. versionchanged:: 7.0.0
+ The ``module_path`` parameter was added as a :class:`pathlib.Path`
+ equivalent of the ``path`` parameter.
+
+ The ``path`` parameter has been deprecated in favor of ``fspath``.
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_pycollect_makeitem(
+ collector: "PyCollector", name: str, obj: object
+) -> Union[None, "Item", "Collector", List[Union["Item", "Collector"]]]:
+ """Return a custom item/collector for a Python object in a module, or None.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_pyfunc_call(pyfuncitem: "Function") -> Optional[object]:
+ """Call underlying test function.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+ """
+
+
+def pytest_generate_tests(metafunc: "Metafunc") -> None:
+ """Generate (multiple) parametrized calls to a test function."""
+
+
+@hookspec(firstresult=True)
+def pytest_make_parametrize_id(
+ config: "Config", val: object, argname: str
+) -> Optional[str]:
+ """Return a user-friendly string representation of the given ``val``
+ that will be used by @pytest.mark.parametrize calls, or None if the hook
+ doesn't know about ``val``.
+
+ The parameter name is available as ``argname``, if required.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+
+ :param pytest.Config config: The pytest config object.
+ :param val: The parametrized value.
+ :param str argname: The automatic parameter name produced by pytest.
+ """
+
+
+# -------------------------------------------------------------------------
+# runtest related hooks
+# -------------------------------------------------------------------------
+
+
+@hookspec(firstresult=True)
+def pytest_runtestloop(session: "Session") -> Optional[object]:
+ """Perform the main runtest loop (after collection finished).
+
+ The default hook implementation performs the runtest protocol for all items
+ collected in the session (``session.items``), unless the collection failed
+ or the ``collectonly`` pytest option is set.
+
+ If at any point :py:func:`pytest.exit` is called, the loop is
+ terminated immediately.
+
+ If at any point ``session.shouldfail`` or ``session.shouldstop`` are set, the
+ loop is terminated after the runtest protocol for the current item is finished.
+
+ :param pytest.Session session: The pytest session object.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+ The return value is not used, but only stops further processing.
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_runtest_protocol(
+ item: "Item", nextitem: "Optional[Item]"
+) -> Optional[object]:
+ """Perform the runtest protocol for a single test item.
+
+ The default runtest protocol is this (see individual hooks for full details):
+
+ - ``pytest_runtest_logstart(nodeid, location)``
+
+ - Setup phase:
+ - ``call = pytest_runtest_setup(item)`` (wrapped in ``CallInfo(when="setup")``)
+ - ``report = pytest_runtest_makereport(item, call)``
+ - ``pytest_runtest_logreport(report)``
+ - ``pytest_exception_interact(call, report)`` if an interactive exception occurred
+
+ - Call phase, if the the setup passed and the ``setuponly`` pytest option is not set:
+ - ``call = pytest_runtest_call(item)`` (wrapped in ``CallInfo(when="call")``)
+ - ``report = pytest_runtest_makereport(item, call)``
+ - ``pytest_runtest_logreport(report)``
+ - ``pytest_exception_interact(call, report)`` if an interactive exception occurred
+
+ - Teardown phase:
+ - ``call = pytest_runtest_teardown(item, nextitem)`` (wrapped in ``CallInfo(when="teardown")``)
+ - ``report = pytest_runtest_makereport(item, call)``
+ - ``pytest_runtest_logreport(report)``
+ - ``pytest_exception_interact(call, report)`` if an interactive exception occurred
+
+ - ``pytest_runtest_logfinish(nodeid, location)``
+
+ :param item: Test item for which the runtest protocol is performed.
+ :param nextitem: The scheduled-to-be-next test item (or None if this is the end my friend).
+
+ Stops at first non-None result, see :ref:`firstresult`.
+ The return value is not used, but only stops further processing.
+ """
+
+
+def pytest_runtest_logstart(
+ nodeid: str, location: Tuple[str, Optional[int], str]
+) -> None:
+ """Called at the start of running the runtest protocol for a single item.
+
+ See :hook:`pytest_runtest_protocol` for a description of the runtest protocol.
+
+ :param str nodeid: Full node ID of the item.
+ :param location: A tuple of ``(filename, lineno, testname)``.
+ """
+
+
+def pytest_runtest_logfinish(
+ nodeid: str, location: Tuple[str, Optional[int], str]
+) -> None:
+ """Called at the end of running the runtest protocol for a single item.
+
+ See :hook:`pytest_runtest_protocol` for a description of the runtest protocol.
+
+ :param str nodeid: Full node ID of the item.
+ :param location: A tuple of ``(filename, lineno, testname)``.
+ """
+
+
+def pytest_runtest_setup(item: "Item") -> None:
+ """Called to perform the setup phase for a test item.
+
+ The default implementation runs ``setup()`` on ``item`` and all of its
+ parents (which haven't been setup yet). This includes obtaining the
+ values of fixtures required by the item (which haven't been obtained
+ yet).
+ """
+
+
+def pytest_runtest_call(item: "Item") -> None:
+ """Called to run the test for test item (the call phase).
+
+ The default implementation calls ``item.runtest()``.
+ """
+
+
+def pytest_runtest_teardown(item: "Item", nextitem: Optional["Item"]) -> None:
+ """Called to perform the teardown phase for a test item.
+
+ The default implementation runs the finalizers and calls ``teardown()``
+ on ``item`` and all of its parents (which need to be torn down). This
+ includes running the teardown phase of fixtures required by the item (if
+ they go out of scope).
+
+ :param nextitem:
+ The scheduled-to-be-next test item (None if no further test item is
+ scheduled). This argument is used to perform exact teardowns, i.e.
+ calling just enough finalizers so that nextitem only needs to call
+ setup functions.
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_runtest_makereport(
+ item: "Item", call: "CallInfo[None]"
+) -> Optional["TestReport"]:
+ """Called to create a :class:`~pytest.TestReport` for each of
+ the setup, call and teardown runtest phases of a test item.
+
+ See :hook:`pytest_runtest_protocol` for a description of the runtest protocol.
+
+ :param call: The :class:`~pytest.CallInfo` for the phase.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+ """
+
+
+def pytest_runtest_logreport(report: "TestReport") -> None:
+ """Process the :class:`~pytest.TestReport` produced for each
+ of the setup, call and teardown runtest phases of an item.
+
+ See :hook:`pytest_runtest_protocol` for a description of the runtest protocol.
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_report_to_serializable(
+ config: "Config",
+ report: Union["CollectReport", "TestReport"],
+) -> Optional[Dict[str, Any]]:
+ """Serialize the given report object into a data structure suitable for
+ sending over the wire, e.g. converted to JSON."""
+
+
+@hookspec(firstresult=True)
+def pytest_report_from_serializable(
+ config: "Config",
+ data: Dict[str, Any],
+) -> Optional[Union["CollectReport", "TestReport"]]:
+ """Restore a report object previously serialized with
+ :hook:`pytest_report_to_serializable`."""
+
+
+# -------------------------------------------------------------------------
+# Fixture related hooks
+# -------------------------------------------------------------------------
+
+
+@hookspec(firstresult=True)
+def pytest_fixture_setup(
+ fixturedef: "FixtureDef[Any]", request: "SubRequest"
+) -> Optional[object]:
+ """Perform fixture setup execution.
+
+ :returns: The return value of the call to the fixture function.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+
+ .. note::
+ If the fixture function returns None, other implementations of
+ this hook function will continue to be called, according to the
+ behavior of the :ref:`firstresult` option.
+ """
+
+
+def pytest_fixture_post_finalizer(
+ fixturedef: "FixtureDef[Any]", request: "SubRequest"
+) -> None:
+ """Called after fixture teardown, but before the cache is cleared, so
+ the fixture result ``fixturedef.cached_result`` is still available (not
+ ``None``)."""
+
+
+# -------------------------------------------------------------------------
+# test session related hooks
+# -------------------------------------------------------------------------
+
+
+def pytest_sessionstart(session: "Session") -> None:
+ """Called after the ``Session`` object has been created and before performing collection
+ and entering the run test loop.
+
+ :param pytest.Session session: The pytest session object.
+ """
+
+
+def pytest_sessionfinish(
+ session: "Session",
+ exitstatus: Union[int, "ExitCode"],
+) -> None:
+ """Called after whole test run finished, right before returning the exit status to the system.
+
+ :param pytest.Session session: The pytest session object.
+ :param int exitstatus: The status which pytest will return to the system.
+ """
+
+
+def pytest_unconfigure(config: "Config") -> None:
+ """Called before test process is exited.
+
+ :param pytest.Config config: The pytest config object.
+ """
+
+
+# -------------------------------------------------------------------------
+# hooks for customizing the assert methods
+# -------------------------------------------------------------------------
+
+
+def pytest_assertrepr_compare(
+ config: "Config", op: str, left: object, right: object
+) -> Optional[List[str]]:
+ """Return explanation for comparisons in failing assert expressions.
+
+ Return None for no custom explanation, otherwise return a list
+ of strings. The strings will be joined by newlines but any newlines
+ *in* a string will be escaped. Note that all but the first line will
+ be indented slightly, the intention is for the first line to be a summary.
+
+ :param pytest.Config config: The pytest config object.
+ """
+
+
+def pytest_assertion_pass(item: "Item", lineno: int, orig: str, expl: str) -> None:
+ """Called whenever an assertion passes.
+
+ .. versionadded:: 5.0
+
+ Use this hook to do some processing after a passing assertion.
+ The original assertion information is available in the `orig` string
+ and the pytest introspected assertion information is available in the
+ `expl` string.
+
+ This hook must be explicitly enabled by the ``enable_assertion_pass_hook``
+ ini-file option:
+
+ .. code-block:: ini
+
+ [pytest]
+ enable_assertion_pass_hook=true
+
+ You need to **clean the .pyc** files in your project directory and interpreter libraries
+ when enabling this option, as assertions will require to be re-written.
+
+ :param pytest.Item item: pytest item object of current test.
+ :param int lineno: Line number of the assert statement.
+ :param str orig: String with the original assertion.
+ :param str expl: String with the assert explanation.
+ """
+
+
+# -------------------------------------------------------------------------
+# Hooks for influencing reporting (invoked from _pytest_terminal).
+# -------------------------------------------------------------------------
+
+
+def pytest_report_header(
+ config: "Config", start_path: Path, startdir: "LEGACY_PATH"
+) -> Union[str, List[str]]:
+ """Return a string or list of strings to be displayed as header info for terminal reporting.
+
+ :param pytest.Config config: The pytest config object.
+ :param Path start_path: The starting dir.
+ :param LEGACY_PATH startdir: The starting dir (deprecated).
+
+ .. note::
+
+ Lines returned by a plugin are displayed before those of plugins which
+ ran before it.
+ If you want to have your line(s) displayed first, use
+ :ref:`trylast=True <plugin-hookorder>`.
+
+ .. note::
+
+ This function should be implemented only in plugins or ``conftest.py``
+ files situated at the tests root directory due to how pytest
+ :ref:`discovers plugins during startup <pluginorder>`.
+
+ .. versionchanged:: 7.0.0
+ The ``start_path`` parameter was added as a :class:`pathlib.Path`
+ equivalent of the ``startdir`` parameter. The ``startdir`` parameter
+ has been deprecated.
+ """
+
+
+def pytest_report_collectionfinish(
+ config: "Config",
+ start_path: Path,
+ startdir: "LEGACY_PATH",
+ items: Sequence["Item"],
+) -> Union[str, List[str]]:
+ """Return a string or list of strings to be displayed after collection
+ has finished successfully.
+
+ These strings will be displayed after the standard "collected X items" message.
+
+ .. versionadded:: 3.2
+
+ :param pytest.Config config: The pytest config object.
+ :param Path start_path: The starting dir.
+ :param LEGACY_PATH startdir: The starting dir (deprecated).
+ :param items: List of pytest items that are going to be executed; this list should not be modified.
+
+ .. note::
+
+ Lines returned by a plugin are displayed before those of plugins which
+ ran before it.
+ If you want to have your line(s) displayed first, use
+ :ref:`trylast=True <plugin-hookorder>`.
+
+ .. versionchanged:: 7.0.0
+ The ``start_path`` parameter was added as a :class:`pathlib.Path`
+ equivalent of the ``startdir`` parameter. The ``startdir`` parameter
+ has been deprecated.
+ """
+
+
+@hookspec(firstresult=True)
+def pytest_report_teststatus(
+ report: Union["CollectReport", "TestReport"], config: "Config"
+) -> Tuple[str, str, Union[str, Mapping[str, bool]]]:
+ """Return result-category, shortletter and verbose word for status
+ reporting.
+
+ The result-category is a category in which to count the result, for
+ example "passed", "skipped", "error" or the empty string.
+
+ The shortletter is shown as testing progresses, for example ".", "s",
+ "E" or the empty string.
+
+ The verbose word is shown as testing progresses in verbose mode, for
+ example "PASSED", "SKIPPED", "ERROR" or the empty string.
+
+ pytest may style these implicitly according to the report outcome.
+ To provide explicit styling, return a tuple for the verbose word,
+ for example ``"rerun", "R", ("RERUN", {"yellow": True})``.
+
+ :param report: The report object whose status is to be returned.
+ :param config: The pytest config object.
+
+ Stops at first non-None result, see :ref:`firstresult`.
+ """
+
+
+def pytest_terminal_summary(
+ terminalreporter: "TerminalReporter",
+ exitstatus: "ExitCode",
+ config: "Config",
+) -> None:
+ """Add a section to terminal summary reporting.
+
+ :param _pytest.terminal.TerminalReporter terminalreporter: The internal terminal reporter object.
+ :param int exitstatus: The exit status that will be reported back to the OS.
+ :param pytest.Config config: The pytest config object.
+
+ .. versionadded:: 4.2
+ The ``config`` parameter.
+ """
+
+
+@hookspec(historic=True, warn_on_impl=WARNING_CAPTURED_HOOK)
+def pytest_warning_captured(
+ warning_message: "warnings.WarningMessage",
+ when: "Literal['config', 'collect', 'runtest']",
+ item: Optional["Item"],
+ location: Optional[Tuple[str, int, str]],
+) -> None:
+ """(**Deprecated**) Process a warning captured by the internal pytest warnings plugin.
+
+ .. deprecated:: 6.0
+
+ This hook is considered deprecated and will be removed in a future pytest version.
+ Use :func:`pytest_warning_recorded` instead.
+
+ :param warnings.WarningMessage warning_message:
+ The captured warning. This is the same object produced by :py:func:`warnings.catch_warnings`, and contains
+ the same attributes as the parameters of :py:func:`warnings.showwarning`.
+
+ :param str when:
+ Indicates when the warning was captured. Possible values:
+
+ * ``"config"``: during pytest configuration/initialization stage.
+ * ``"collect"``: during test collection.
+ * ``"runtest"``: during test execution.
+
+ :param pytest.Item|None item:
+ The item being executed if ``when`` is ``"runtest"``, otherwise ``None``.
+
+ :param tuple location:
+ When available, holds information about the execution context of the captured
+ warning (filename, linenumber, function). ``function`` evaluates to <module>
+ when the execution context is at the module level.
+ """
+
+
+@hookspec(historic=True)
+def pytest_warning_recorded(
+ warning_message: "warnings.WarningMessage",
+ when: "Literal['config', 'collect', 'runtest']",
+ nodeid: str,
+ location: Optional[Tuple[str, int, str]],
+) -> None:
+ """Process a warning captured by the internal pytest warnings plugin.
+
+ :param warnings.WarningMessage warning_message:
+ The captured warning. This is the same object produced by :py:func:`warnings.catch_warnings`, and contains
+ the same attributes as the parameters of :py:func:`warnings.showwarning`.
+
+ :param str when:
+ Indicates when the warning was captured. Possible values:
+
+ * ``"config"``: during pytest configuration/initialization stage.
+ * ``"collect"``: during test collection.
+ * ``"runtest"``: during test execution.
+
+ :param str nodeid:
+ Full id of the item.
+
+ :param tuple|None location:
+ When available, holds information about the execution context of the captured
+ warning (filename, linenumber, function). ``function`` evaluates to <module>
+ when the execution context is at the module level.
+
+ .. versionadded:: 6.0
+ """
+
+
+# -------------------------------------------------------------------------
+# Hooks for influencing skipping
+# -------------------------------------------------------------------------
+
+
+def pytest_markeval_namespace(config: "Config") -> Dict[str, Any]:
+ """Called when constructing the globals dictionary used for
+ evaluating string conditions in xfail/skipif markers.
+
+ This is useful when the condition for a marker requires
+ objects that are expensive or impossible to obtain during
+ collection time, which is required by normal boolean
+ conditions.
+
+ .. versionadded:: 6.2
+
+ :param pytest.Config config: The pytest config object.
+ :returns: A dictionary of additional globals to add.
+ """
+
+
+# -------------------------------------------------------------------------
+# error handling and internal debugging hooks
+# -------------------------------------------------------------------------
+
+
+def pytest_internalerror(
+ excrepr: "ExceptionRepr",
+ excinfo: "ExceptionInfo[BaseException]",
+) -> Optional[bool]:
+ """Called for internal errors.
+
+ Return True to suppress the fallback handling of printing an
+ INTERNALERROR message directly to sys.stderr.
+ """
+
+
+def pytest_keyboard_interrupt(
+ excinfo: "ExceptionInfo[Union[KeyboardInterrupt, Exit]]",
+) -> None:
+ """Called for keyboard interrupt."""
+
+
+def pytest_exception_interact(
+ node: Union["Item", "Collector"],
+ call: "CallInfo[Any]",
+ report: Union["CollectReport", "TestReport"],
+) -> None:
+ """Called when an exception was raised which can potentially be
+ interactively handled.
+
+ May be called during collection (see :hook:`pytest_make_collect_report`),
+ in which case ``report`` is a :class:`CollectReport`.
+
+ May be called during runtest of an item (see :hook:`pytest_runtest_protocol`),
+ in which case ``report`` is a :class:`TestReport`.
+
+ This hook is not called if the exception that was raised is an internal
+ exception like ``skip.Exception``.
+ """
+
+
+def pytest_enter_pdb(config: "Config", pdb: "pdb.Pdb") -> None:
+ """Called upon pdb.set_trace().
+
+ Can be used by plugins to take special action just before the python
+ debugger enters interactive mode.
+
+ :param pytest.Config config: The pytest config object.
+ :param pdb.Pdb pdb: The Pdb instance.
+ """
+
+
+def pytest_leave_pdb(config: "Config", pdb: "pdb.Pdb") -> None:
+ """Called when leaving pdb (e.g. with continue after pdb.set_trace()).
+
+ Can be used by plugins to take special action just after the python
+ debugger leaves interactive mode.
+
+ :param pytest.Config config: The pytest config object.
+ :param pdb.Pdb pdb: The Pdb instance.
+ """
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/junitxml.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/junitxml.py
new file mode 100644
index 0000000000..4af5fbab0c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/junitxml.py
@@ -0,0 +1,696 @@
+"""Report test results in JUnit-XML format, for use with Jenkins and build
+integration servers.
+
+Based on initial code from Ross Lawley.
+
+Output conforms to
+https://github.com/jenkinsci/xunit-plugin/blob/master/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd
+"""
+import functools
+import os
+import platform
+import re
+import xml.etree.ElementTree as ET
+from datetime import datetime
+from typing import Callable
+from typing import Dict
+from typing import List
+from typing import Match
+from typing import Optional
+from typing import Tuple
+from typing import Union
+
+import pytest
+from _pytest import nodes
+from _pytest import timing
+from _pytest._code.code import ExceptionRepr
+from _pytest._code.code import ReprFileLocation
+from _pytest.config import Config
+from _pytest.config import filename_arg
+from _pytest.config.argparsing import Parser
+from _pytest.fixtures import FixtureRequest
+from _pytest.reports import TestReport
+from _pytest.stash import StashKey
+from _pytest.terminal import TerminalReporter
+
+
+xml_key = StashKey["LogXML"]()
+
+
+def bin_xml_escape(arg: object) -> str:
+ r"""Visually escape invalid XML characters.
+
+ For example, transforms
+ 'hello\aworld\b'
+ into
+ 'hello#x07world#x08'
+ Note that the #xABs are *not* XML escapes - missing the ampersand &#xAB.
+ The idea is to escape visually for the user rather than for XML itself.
+ """
+
+ def repl(matchobj: Match[str]) -> str:
+ i = ord(matchobj.group())
+ if i <= 0xFF:
+ return "#x%02X" % i
+ else:
+ return "#x%04X" % i
+
+ # The spec range of valid chars is:
+ # Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
+ # For an unknown(?) reason, we disallow #x7F (DEL) as well.
+ illegal_xml_re = (
+ "[^\u0009\u000A\u000D\u0020-\u007E\u0080-\uD7FF\uE000-\uFFFD\u10000-\u10FFFF]"
+ )
+ return re.sub(illegal_xml_re, repl, str(arg))
+
+
+def merge_family(left, right) -> None:
+ result = {}
+ for kl, vl in left.items():
+ for kr, vr in right.items():
+ if not isinstance(vl, list):
+ raise TypeError(type(vl))
+ result[kl] = vl + vr
+ left.update(result)
+
+
+families = {}
+families["_base"] = {"testcase": ["classname", "name"]}
+families["_base_legacy"] = {"testcase": ["file", "line", "url"]}
+
+# xUnit 1.x inherits legacy attributes.
+families["xunit1"] = families["_base"].copy()
+merge_family(families["xunit1"], families["_base_legacy"])
+
+# xUnit 2.x uses strict base attributes.
+families["xunit2"] = families["_base"]
+
+
+class _NodeReporter:
+ def __init__(self, nodeid: Union[str, TestReport], xml: "LogXML") -> None:
+ self.id = nodeid
+ self.xml = xml
+ self.add_stats = self.xml.add_stats
+ self.family = self.xml.family
+ self.duration = 0
+ self.properties: List[Tuple[str, str]] = []
+ self.nodes: List[ET.Element] = []
+ self.attrs: Dict[str, str] = {}
+
+ def append(self, node: ET.Element) -> None:
+ self.xml.add_stats(node.tag)
+ self.nodes.append(node)
+
+ def add_property(self, name: str, value: object) -> None:
+ self.properties.append((str(name), bin_xml_escape(value)))
+
+ def add_attribute(self, name: str, value: object) -> None:
+ self.attrs[str(name)] = bin_xml_escape(value)
+
+ def make_properties_node(self) -> Optional[ET.Element]:
+ """Return a Junit node containing custom properties, if any."""
+ if self.properties:
+ properties = ET.Element("properties")
+ for name, value in self.properties:
+ properties.append(ET.Element("property", name=name, value=value))
+ return properties
+ return None
+
+ def record_testreport(self, testreport: TestReport) -> None:
+ names = mangle_test_address(testreport.nodeid)
+ existing_attrs = self.attrs
+ classnames = names[:-1]
+ if self.xml.prefix:
+ classnames.insert(0, self.xml.prefix)
+ attrs: Dict[str, str] = {
+ "classname": ".".join(classnames),
+ "name": bin_xml_escape(names[-1]),
+ "file": testreport.location[0],
+ }
+ if testreport.location[1] is not None:
+ attrs["line"] = str(testreport.location[1])
+ if hasattr(testreport, "url"):
+ attrs["url"] = testreport.url
+ self.attrs = attrs
+ self.attrs.update(existing_attrs) # Restore any user-defined attributes.
+
+ # Preserve legacy testcase behavior.
+ if self.family == "xunit1":
+ return
+
+ # Filter out attributes not permitted by this test family.
+ # Including custom attributes because they are not valid here.
+ temp_attrs = {}
+ for key in self.attrs.keys():
+ if key in families[self.family]["testcase"]:
+ temp_attrs[key] = self.attrs[key]
+ self.attrs = temp_attrs
+
+ def to_xml(self) -> ET.Element:
+ testcase = ET.Element("testcase", self.attrs, time="%.3f" % self.duration)
+ properties = self.make_properties_node()
+ if properties is not None:
+ testcase.append(properties)
+ testcase.extend(self.nodes)
+ return testcase
+
+ def _add_simple(self, tag: str, message: str, data: Optional[str] = None) -> None:
+ node = ET.Element(tag, message=message)
+ node.text = bin_xml_escape(data)
+ self.append(node)
+
+ def write_captured_output(self, report: TestReport) -> None:
+ if not self.xml.log_passing_tests and report.passed:
+ return
+
+ content_out = report.capstdout
+ content_log = report.caplog
+ content_err = report.capstderr
+ if self.xml.logging == "no":
+ return
+ content_all = ""
+ if self.xml.logging in ["log", "all"]:
+ content_all = self._prepare_content(content_log, " Captured Log ")
+ if self.xml.logging in ["system-out", "out-err", "all"]:
+ content_all += self._prepare_content(content_out, " Captured Out ")
+ self._write_content(report, content_all, "system-out")
+ content_all = ""
+ if self.xml.logging in ["system-err", "out-err", "all"]:
+ content_all += self._prepare_content(content_err, " Captured Err ")
+ self._write_content(report, content_all, "system-err")
+ content_all = ""
+ if content_all:
+ self._write_content(report, content_all, "system-out")
+
+ def _prepare_content(self, content: str, header: str) -> str:
+ return "\n".join([header.center(80, "-"), content, ""])
+
+ def _write_content(self, report: TestReport, content: str, jheader: str) -> None:
+ tag = ET.Element(jheader)
+ tag.text = bin_xml_escape(content)
+ self.append(tag)
+
+ def append_pass(self, report: TestReport) -> None:
+ self.add_stats("passed")
+
+ def append_failure(self, report: TestReport) -> None:
+ # msg = str(report.longrepr.reprtraceback.extraline)
+ if hasattr(report, "wasxfail"):
+ self._add_simple("skipped", "xfail-marked test passes unexpectedly")
+ else:
+ assert report.longrepr is not None
+ reprcrash: Optional[ReprFileLocation] = getattr(
+ report.longrepr, "reprcrash", None
+ )
+ if reprcrash is not None:
+ message = reprcrash.message
+ else:
+ message = str(report.longrepr)
+ message = bin_xml_escape(message)
+ self._add_simple("failure", message, str(report.longrepr))
+
+ def append_collect_error(self, report: TestReport) -> None:
+ # msg = str(report.longrepr.reprtraceback.extraline)
+ assert report.longrepr is not None
+ self._add_simple("error", "collection failure", str(report.longrepr))
+
+ def append_collect_skipped(self, report: TestReport) -> None:
+ self._add_simple("skipped", "collection skipped", str(report.longrepr))
+
+ def append_error(self, report: TestReport) -> None:
+ assert report.longrepr is not None
+ reprcrash: Optional[ReprFileLocation] = getattr(
+ report.longrepr, "reprcrash", None
+ )
+ if reprcrash is not None:
+ reason = reprcrash.message
+ else:
+ reason = str(report.longrepr)
+
+ if report.when == "teardown":
+ msg = f'failed on teardown with "{reason}"'
+ else:
+ msg = f'failed on setup with "{reason}"'
+ self._add_simple("error", msg, str(report.longrepr))
+
+ def append_skipped(self, report: TestReport) -> None:
+ if hasattr(report, "wasxfail"):
+ xfailreason = report.wasxfail
+ if xfailreason.startswith("reason: "):
+ xfailreason = xfailreason[8:]
+ xfailreason = bin_xml_escape(xfailreason)
+ skipped = ET.Element("skipped", type="pytest.xfail", message=xfailreason)
+ self.append(skipped)
+ else:
+ assert isinstance(report.longrepr, tuple)
+ filename, lineno, skipreason = report.longrepr
+ if skipreason.startswith("Skipped: "):
+ skipreason = skipreason[9:]
+ details = f"{filename}:{lineno}: {skipreason}"
+
+ skipped = ET.Element("skipped", type="pytest.skip", message=skipreason)
+ skipped.text = bin_xml_escape(details)
+ self.append(skipped)
+ self.write_captured_output(report)
+
+ def finalize(self) -> None:
+ data = self.to_xml()
+ self.__dict__.clear()
+ # Type ignored because mypy doesn't like overriding a method.
+ # Also the return value doesn't match...
+ self.to_xml = lambda: data # type: ignore[assignment]
+
+
+def _warn_incompatibility_with_xunit2(
+ request: FixtureRequest, fixture_name: str
+) -> None:
+ """Emit a PytestWarning about the given fixture being incompatible with newer xunit revisions."""
+ from _pytest.warning_types import PytestWarning
+
+ xml = request.config.stash.get(xml_key, None)
+ if xml is not None and xml.family not in ("xunit1", "legacy"):
+ request.node.warn(
+ PytestWarning(
+ "{fixture_name} is incompatible with junit_family '{family}' (use 'legacy' or 'xunit1')".format(
+ fixture_name=fixture_name, family=xml.family
+ )
+ )
+ )
+
+
+@pytest.fixture
+def record_property(request: FixtureRequest) -> Callable[[str, object], None]:
+ """Add extra properties to the calling test.
+
+ User properties become part of the test report and are available to the
+ configured reporters, like JUnit XML.
+
+ The fixture is callable with ``name, value``. The value is automatically
+ XML-encoded.
+
+ Example::
+
+ def test_function(record_property):
+ record_property("example_key", 1)
+ """
+ _warn_incompatibility_with_xunit2(request, "record_property")
+
+ def append_property(name: str, value: object) -> None:
+ request.node.user_properties.append((name, value))
+
+ return append_property
+
+
+@pytest.fixture
+def record_xml_attribute(request: FixtureRequest) -> Callable[[str, object], None]:
+ """Add extra xml attributes to the tag for the calling test.
+
+ The fixture is callable with ``name, value``. The value is
+ automatically XML-encoded.
+ """
+ from _pytest.warning_types import PytestExperimentalApiWarning
+
+ request.node.warn(
+ PytestExperimentalApiWarning("record_xml_attribute is an experimental feature")
+ )
+
+ _warn_incompatibility_with_xunit2(request, "record_xml_attribute")
+
+ # Declare noop
+ def add_attr_noop(name: str, value: object) -> None:
+ pass
+
+ attr_func = add_attr_noop
+
+ xml = request.config.stash.get(xml_key, None)
+ if xml is not None:
+ node_reporter = xml.node_reporter(request.node.nodeid)
+ attr_func = node_reporter.add_attribute
+
+ return attr_func
+
+
+def _check_record_param_type(param: str, v: str) -> None:
+ """Used by record_testsuite_property to check that the given parameter name is of the proper
+ type."""
+ __tracebackhide__ = True
+ if not isinstance(v, str):
+ msg = "{param} parameter needs to be a string, but {g} given" # type: ignore[unreachable]
+ raise TypeError(msg.format(param=param, g=type(v).__name__))
+
+
+@pytest.fixture(scope="session")
+def record_testsuite_property(request: FixtureRequest) -> Callable[[str, object], None]:
+ """Record a new ``<property>`` tag as child of the root ``<testsuite>``.
+
+ This is suitable to writing global information regarding the entire test
+ suite, and is compatible with ``xunit2`` JUnit family.
+
+ This is a ``session``-scoped fixture which is called with ``(name, value)``. Example:
+
+ .. code-block:: python
+
+ def test_foo(record_testsuite_property):
+ record_testsuite_property("ARCH", "PPC")
+ record_testsuite_property("STORAGE_TYPE", "CEPH")
+
+ ``name`` must be a string, ``value`` will be converted to a string and properly xml-escaped.
+
+ .. warning::
+
+ Currently this fixture **does not work** with the
+ `pytest-xdist <https://github.com/pytest-dev/pytest-xdist>`__ plugin. See
+ :issue:`7767` for details.
+ """
+
+ __tracebackhide__ = True
+
+ def record_func(name: str, value: object) -> None:
+ """No-op function in case --junitxml was not passed in the command-line."""
+ __tracebackhide__ = True
+ _check_record_param_type("name", name)
+
+ xml = request.config.stash.get(xml_key, None)
+ if xml is not None:
+ record_func = xml.add_global_property # noqa
+ return record_func
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("terminal reporting")
+ group.addoption(
+ "--junitxml",
+ "--junit-xml",
+ action="store",
+ dest="xmlpath",
+ metavar="path",
+ type=functools.partial(filename_arg, optname="--junitxml"),
+ default=None,
+ help="create junit-xml style report file at given path.",
+ )
+ group.addoption(
+ "--junitprefix",
+ "--junit-prefix",
+ action="store",
+ metavar="str",
+ default=None,
+ help="prepend prefix to classnames in junit-xml output",
+ )
+ parser.addini(
+ "junit_suite_name", "Test suite name for JUnit report", default="pytest"
+ )
+ parser.addini(
+ "junit_logging",
+ "Write captured log messages to JUnit report: "
+ "one of no|log|system-out|system-err|out-err|all",
+ default="no",
+ )
+ parser.addini(
+ "junit_log_passing_tests",
+ "Capture log information for passing tests to JUnit report: ",
+ type="bool",
+ default=True,
+ )
+ parser.addini(
+ "junit_duration_report",
+ "Duration time to report: one of total|call",
+ default="total",
+ ) # choices=['total', 'call'])
+ parser.addini(
+ "junit_family",
+ "Emit XML for schema: one of legacy|xunit1|xunit2",
+ default="xunit2",
+ )
+
+
+def pytest_configure(config: Config) -> None:
+ xmlpath = config.option.xmlpath
+ # Prevent opening xmllog on worker nodes (xdist).
+ if xmlpath and not hasattr(config, "workerinput"):
+ junit_family = config.getini("junit_family")
+ config.stash[xml_key] = LogXML(
+ xmlpath,
+ config.option.junitprefix,
+ config.getini("junit_suite_name"),
+ config.getini("junit_logging"),
+ config.getini("junit_duration_report"),
+ junit_family,
+ config.getini("junit_log_passing_tests"),
+ )
+ config.pluginmanager.register(config.stash[xml_key])
+
+
+def pytest_unconfigure(config: Config) -> None:
+ xml = config.stash.get(xml_key, None)
+ if xml:
+ del config.stash[xml_key]
+ config.pluginmanager.unregister(xml)
+
+
+def mangle_test_address(address: str) -> List[str]:
+ path, possible_open_bracket, params = address.partition("[")
+ names = path.split("::")
+ # Convert file path to dotted path.
+ names[0] = names[0].replace(nodes.SEP, ".")
+ names[0] = re.sub(r"\.py$", "", names[0])
+ # Put any params back.
+ names[-1] += possible_open_bracket + params
+ return names
+
+
+class LogXML:
+ def __init__(
+ self,
+ logfile,
+ prefix: Optional[str],
+ suite_name: str = "pytest",
+ logging: str = "no",
+ report_duration: str = "total",
+ family="xunit1",
+ log_passing_tests: bool = True,
+ ) -> None:
+ logfile = os.path.expanduser(os.path.expandvars(logfile))
+ self.logfile = os.path.normpath(os.path.abspath(logfile))
+ self.prefix = prefix
+ self.suite_name = suite_name
+ self.logging = logging
+ self.log_passing_tests = log_passing_tests
+ self.report_duration = report_duration
+ self.family = family
+ self.stats: Dict[str, int] = dict.fromkeys(
+ ["error", "passed", "failure", "skipped"], 0
+ )
+ self.node_reporters: Dict[
+ Tuple[Union[str, TestReport], object], _NodeReporter
+ ] = {}
+ self.node_reporters_ordered: List[_NodeReporter] = []
+ self.global_properties: List[Tuple[str, str]] = []
+
+ # List of reports that failed on call but teardown is pending.
+ self.open_reports: List[TestReport] = []
+ self.cnt_double_fail_tests = 0
+
+ # Replaces convenience family with real family.
+ if self.family == "legacy":
+ self.family = "xunit1"
+
+ def finalize(self, report: TestReport) -> None:
+ nodeid = getattr(report, "nodeid", report)
+ # Local hack to handle xdist report order.
+ workernode = getattr(report, "node", None)
+ reporter = self.node_reporters.pop((nodeid, workernode))
+ if reporter is not None:
+ reporter.finalize()
+
+ def node_reporter(self, report: Union[TestReport, str]) -> _NodeReporter:
+ nodeid: Union[str, TestReport] = getattr(report, "nodeid", report)
+ # Local hack to handle xdist report order.
+ workernode = getattr(report, "node", None)
+
+ key = nodeid, workernode
+
+ if key in self.node_reporters:
+ # TODO: breaks for --dist=each
+ return self.node_reporters[key]
+
+ reporter = _NodeReporter(nodeid, self)
+
+ self.node_reporters[key] = reporter
+ self.node_reporters_ordered.append(reporter)
+
+ return reporter
+
+ def add_stats(self, key: str) -> None:
+ if key in self.stats:
+ self.stats[key] += 1
+
+ def _opentestcase(self, report: TestReport) -> _NodeReporter:
+ reporter = self.node_reporter(report)
+ reporter.record_testreport(report)
+ return reporter
+
+ def pytest_runtest_logreport(self, report: TestReport) -> None:
+ """Handle a setup/call/teardown report, generating the appropriate
+ XML tags as necessary.
+
+ Note: due to plugins like xdist, this hook may be called in interlaced
+ order with reports from other nodes. For example:
+
+ Usual call order:
+ -> setup node1
+ -> call node1
+ -> teardown node1
+ -> setup node2
+ -> call node2
+ -> teardown node2
+
+ Possible call order in xdist:
+ -> setup node1
+ -> call node1
+ -> setup node2
+ -> call node2
+ -> teardown node2
+ -> teardown node1
+ """
+ close_report = None
+ if report.passed:
+ if report.when == "call": # ignore setup/teardown
+ reporter = self._opentestcase(report)
+ reporter.append_pass(report)
+ elif report.failed:
+ if report.when == "teardown":
+ # The following vars are needed when xdist plugin is used.
+ report_wid = getattr(report, "worker_id", None)
+ report_ii = getattr(report, "item_index", None)
+ close_report = next(
+ (
+ rep
+ for rep in self.open_reports
+ if (
+ rep.nodeid == report.nodeid
+ and getattr(rep, "item_index", None) == report_ii
+ and getattr(rep, "worker_id", None) == report_wid
+ )
+ ),
+ None,
+ )
+ if close_report:
+ # We need to open new testcase in case we have failure in
+ # call and error in teardown in order to follow junit
+ # schema.
+ self.finalize(close_report)
+ self.cnt_double_fail_tests += 1
+ reporter = self._opentestcase(report)
+ if report.when == "call":
+ reporter.append_failure(report)
+ self.open_reports.append(report)
+ if not self.log_passing_tests:
+ reporter.write_captured_output(report)
+ else:
+ reporter.append_error(report)
+ elif report.skipped:
+ reporter = self._opentestcase(report)
+ reporter.append_skipped(report)
+ self.update_testcase_duration(report)
+ if report.when == "teardown":
+ reporter = self._opentestcase(report)
+ reporter.write_captured_output(report)
+
+ for propname, propvalue in report.user_properties:
+ reporter.add_property(propname, str(propvalue))
+
+ self.finalize(report)
+ report_wid = getattr(report, "worker_id", None)
+ report_ii = getattr(report, "item_index", None)
+ close_report = next(
+ (
+ rep
+ for rep in self.open_reports
+ if (
+ rep.nodeid == report.nodeid
+ and getattr(rep, "item_index", None) == report_ii
+ and getattr(rep, "worker_id", None) == report_wid
+ )
+ ),
+ None,
+ )
+ if close_report:
+ self.open_reports.remove(close_report)
+
+ def update_testcase_duration(self, report: TestReport) -> None:
+ """Accumulate total duration for nodeid from given report and update
+ the Junit.testcase with the new total if already created."""
+ if self.report_duration == "total" or report.when == self.report_duration:
+ reporter = self.node_reporter(report)
+ reporter.duration += getattr(report, "duration", 0.0)
+
+ def pytest_collectreport(self, report: TestReport) -> None:
+ if not report.passed:
+ reporter = self._opentestcase(report)
+ if report.failed:
+ reporter.append_collect_error(report)
+ else:
+ reporter.append_collect_skipped(report)
+
+ def pytest_internalerror(self, excrepr: ExceptionRepr) -> None:
+ reporter = self.node_reporter("internal")
+ reporter.attrs.update(classname="pytest", name="internal")
+ reporter._add_simple("error", "internal error", str(excrepr))
+
+ def pytest_sessionstart(self) -> None:
+ self.suite_start_time = timing.time()
+
+ def pytest_sessionfinish(self) -> None:
+ dirname = os.path.dirname(os.path.abspath(self.logfile))
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+
+ with open(self.logfile, "w", encoding="utf-8") as logfile:
+ suite_stop_time = timing.time()
+ suite_time_delta = suite_stop_time - self.suite_start_time
+
+ numtests = (
+ self.stats["passed"]
+ + self.stats["failure"]
+ + self.stats["skipped"]
+ + self.stats["error"]
+ - self.cnt_double_fail_tests
+ )
+ logfile.write('<?xml version="1.0" encoding="utf-8"?>')
+
+ suite_node = ET.Element(
+ "testsuite",
+ name=self.suite_name,
+ errors=str(self.stats["error"]),
+ failures=str(self.stats["failure"]),
+ skipped=str(self.stats["skipped"]),
+ tests=str(numtests),
+ time="%.3f" % suite_time_delta,
+ timestamp=datetime.fromtimestamp(self.suite_start_time).isoformat(),
+ hostname=platform.node(),
+ )
+ global_properties = self._get_global_properties_node()
+ if global_properties is not None:
+ suite_node.append(global_properties)
+ for node_reporter in self.node_reporters_ordered:
+ suite_node.append(node_reporter.to_xml())
+ testsuites = ET.Element("testsuites")
+ testsuites.append(suite_node)
+ logfile.write(ET.tostring(testsuites, encoding="unicode"))
+
+ def pytest_terminal_summary(self, terminalreporter: TerminalReporter) -> None:
+ terminalreporter.write_sep("-", f"generated xml file: {self.logfile}")
+
+ def add_global_property(self, name: str, value: object) -> None:
+ __tracebackhide__ = True
+ _check_record_param_type("name", name)
+ self.global_properties.append((name, bin_xml_escape(value)))
+
+ def _get_global_properties_node(self) -> Optional[ET.Element]:
+ """Return a Junit node containing custom properties, if any."""
+ if self.global_properties:
+ properties = ET.Element("properties")
+ for name, value in self.global_properties:
+ properties.append(ET.Element("property", name=name, value=value))
+ return properties
+ return None
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/legacypath.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/legacypath.py
new file mode 100644
index 0000000000..37e8c24220
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/legacypath.py
@@ -0,0 +1,467 @@
+"""Add backward compatibility support for the legacy py path type."""
+import shlex
+import subprocess
+from pathlib import Path
+from typing import List
+from typing import Optional
+from typing import TYPE_CHECKING
+from typing import Union
+
+import attr
+from iniconfig import SectionWrapper
+
+from _pytest.cacheprovider import Cache
+from _pytest.compat import final
+from _pytest.compat import LEGACY_PATH
+from _pytest.compat import legacy_path
+from _pytest.config import Config
+from _pytest.config import hookimpl
+from _pytest.config import PytestPluginManager
+from _pytest.deprecated import check_ispytest
+from _pytest.fixtures import fixture
+from _pytest.fixtures import FixtureRequest
+from _pytest.main import Session
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.nodes import Collector
+from _pytest.nodes import Item
+from _pytest.nodes import Node
+from _pytest.pytester import HookRecorder
+from _pytest.pytester import Pytester
+from _pytest.pytester import RunResult
+from _pytest.terminal import TerminalReporter
+from _pytest.tmpdir import TempPathFactory
+
+if TYPE_CHECKING:
+ from typing_extensions import Final
+
+ import pexpect
+
+
+@final
+class Testdir:
+ """
+ Similar to :class:`Pytester`, but this class works with legacy legacy_path objects instead.
+
+ All methods just forward to an internal :class:`Pytester` instance, converting results
+ to `legacy_path` objects as necessary.
+ """
+
+ __test__ = False
+
+ CLOSE_STDIN: "Final" = Pytester.CLOSE_STDIN
+ TimeoutExpired: "Final" = Pytester.TimeoutExpired
+
+ def __init__(self, pytester: Pytester, *, _ispytest: bool = False) -> None:
+ check_ispytest(_ispytest)
+ self._pytester = pytester
+
+ @property
+ def tmpdir(self) -> LEGACY_PATH:
+ """Temporary directory where tests are executed."""
+ return legacy_path(self._pytester.path)
+
+ @property
+ def test_tmproot(self) -> LEGACY_PATH:
+ return legacy_path(self._pytester._test_tmproot)
+
+ @property
+ def request(self):
+ return self._pytester._request
+
+ @property
+ def plugins(self):
+ return self._pytester.plugins
+
+ @plugins.setter
+ def plugins(self, plugins):
+ self._pytester.plugins = plugins
+
+ @property
+ def monkeypatch(self) -> MonkeyPatch:
+ return self._pytester._monkeypatch
+
+ def make_hook_recorder(self, pluginmanager) -> HookRecorder:
+ """See :meth:`Pytester.make_hook_recorder`."""
+ return self._pytester.make_hook_recorder(pluginmanager)
+
+ def chdir(self) -> None:
+ """See :meth:`Pytester.chdir`."""
+ return self._pytester.chdir()
+
+ def finalize(self) -> None:
+ """See :meth:`Pytester._finalize`."""
+ return self._pytester._finalize()
+
+ def makefile(self, ext, *args, **kwargs) -> LEGACY_PATH:
+ """See :meth:`Pytester.makefile`."""
+ if ext and not ext.startswith("."):
+ # pytester.makefile is going to throw a ValueError in a way that
+ # testdir.makefile did not, because
+ # pathlib.Path is stricter suffixes than py.path
+ # This ext arguments is likely user error, but since testdir has
+ # allowed this, we will prepend "." as a workaround to avoid breaking
+ # testdir usage that worked before
+ ext = "." + ext
+ return legacy_path(self._pytester.makefile(ext, *args, **kwargs))
+
+ def makeconftest(self, source) -> LEGACY_PATH:
+ """See :meth:`Pytester.makeconftest`."""
+ return legacy_path(self._pytester.makeconftest(source))
+
+ def makeini(self, source) -> LEGACY_PATH:
+ """See :meth:`Pytester.makeini`."""
+ return legacy_path(self._pytester.makeini(source))
+
+ def getinicfg(self, source: str) -> SectionWrapper:
+ """See :meth:`Pytester.getinicfg`."""
+ return self._pytester.getinicfg(source)
+
+ def makepyprojecttoml(self, source) -> LEGACY_PATH:
+ """See :meth:`Pytester.makepyprojecttoml`."""
+ return legacy_path(self._pytester.makepyprojecttoml(source))
+
+ def makepyfile(self, *args, **kwargs) -> LEGACY_PATH:
+ """See :meth:`Pytester.makepyfile`."""
+ return legacy_path(self._pytester.makepyfile(*args, **kwargs))
+
+ def maketxtfile(self, *args, **kwargs) -> LEGACY_PATH:
+ """See :meth:`Pytester.maketxtfile`."""
+ return legacy_path(self._pytester.maketxtfile(*args, **kwargs))
+
+ def syspathinsert(self, path=None) -> None:
+ """See :meth:`Pytester.syspathinsert`."""
+ return self._pytester.syspathinsert(path)
+
+ def mkdir(self, name) -> LEGACY_PATH:
+ """See :meth:`Pytester.mkdir`."""
+ return legacy_path(self._pytester.mkdir(name))
+
+ def mkpydir(self, name) -> LEGACY_PATH:
+ """See :meth:`Pytester.mkpydir`."""
+ return legacy_path(self._pytester.mkpydir(name))
+
+ def copy_example(self, name=None) -> LEGACY_PATH:
+ """See :meth:`Pytester.copy_example`."""
+ return legacy_path(self._pytester.copy_example(name))
+
+ def getnode(self, config: Config, arg) -> Optional[Union[Item, Collector]]:
+ """See :meth:`Pytester.getnode`."""
+ return self._pytester.getnode(config, arg)
+
+ def getpathnode(self, path):
+ """See :meth:`Pytester.getpathnode`."""
+ return self._pytester.getpathnode(path)
+
+ def genitems(self, colitems: List[Union[Item, Collector]]) -> List[Item]:
+ """See :meth:`Pytester.genitems`."""
+ return self._pytester.genitems(colitems)
+
+ def runitem(self, source):
+ """See :meth:`Pytester.runitem`."""
+ return self._pytester.runitem(source)
+
+ def inline_runsource(self, source, *cmdlineargs):
+ """See :meth:`Pytester.inline_runsource`."""
+ return self._pytester.inline_runsource(source, *cmdlineargs)
+
+ def inline_genitems(self, *args):
+ """See :meth:`Pytester.inline_genitems`."""
+ return self._pytester.inline_genitems(*args)
+
+ def inline_run(self, *args, plugins=(), no_reraise_ctrlc: bool = False):
+ """See :meth:`Pytester.inline_run`."""
+ return self._pytester.inline_run(
+ *args, plugins=plugins, no_reraise_ctrlc=no_reraise_ctrlc
+ )
+
+ def runpytest_inprocess(self, *args, **kwargs) -> RunResult:
+ """See :meth:`Pytester.runpytest_inprocess`."""
+ return self._pytester.runpytest_inprocess(*args, **kwargs)
+
+ def runpytest(self, *args, **kwargs) -> RunResult:
+ """See :meth:`Pytester.runpytest`."""
+ return self._pytester.runpytest(*args, **kwargs)
+
+ def parseconfig(self, *args) -> Config:
+ """See :meth:`Pytester.parseconfig`."""
+ return self._pytester.parseconfig(*args)
+
+ def parseconfigure(self, *args) -> Config:
+ """See :meth:`Pytester.parseconfigure`."""
+ return self._pytester.parseconfigure(*args)
+
+ def getitem(self, source, funcname="test_func"):
+ """See :meth:`Pytester.getitem`."""
+ return self._pytester.getitem(source, funcname)
+
+ def getitems(self, source):
+ """See :meth:`Pytester.getitems`."""
+ return self._pytester.getitems(source)
+
+ def getmodulecol(self, source, configargs=(), withinit=False):
+ """See :meth:`Pytester.getmodulecol`."""
+ return self._pytester.getmodulecol(
+ source, configargs=configargs, withinit=withinit
+ )
+
+ def collect_by_name(
+ self, modcol: Collector, name: str
+ ) -> Optional[Union[Item, Collector]]:
+ """See :meth:`Pytester.collect_by_name`."""
+ return self._pytester.collect_by_name(modcol, name)
+
+ def popen(
+ self,
+ cmdargs,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdin=CLOSE_STDIN,
+ **kw,
+ ):
+ """See :meth:`Pytester.popen`."""
+ return self._pytester.popen(cmdargs, stdout, stderr, stdin, **kw)
+
+ def run(self, *cmdargs, timeout=None, stdin=CLOSE_STDIN) -> RunResult:
+ """See :meth:`Pytester.run`."""
+ return self._pytester.run(*cmdargs, timeout=timeout, stdin=stdin)
+
+ def runpython(self, script) -> RunResult:
+ """See :meth:`Pytester.runpython`."""
+ return self._pytester.runpython(script)
+
+ def runpython_c(self, command):
+ """See :meth:`Pytester.runpython_c`."""
+ return self._pytester.runpython_c(command)
+
+ def runpytest_subprocess(self, *args, timeout=None) -> RunResult:
+ """See :meth:`Pytester.runpytest_subprocess`."""
+ return self._pytester.runpytest_subprocess(*args, timeout=timeout)
+
+ def spawn_pytest(
+ self, string: str, expect_timeout: float = 10.0
+ ) -> "pexpect.spawn":
+ """See :meth:`Pytester.spawn_pytest`."""
+ return self._pytester.spawn_pytest(string, expect_timeout=expect_timeout)
+
+ def spawn(self, cmd: str, expect_timeout: float = 10.0) -> "pexpect.spawn":
+ """See :meth:`Pytester.spawn`."""
+ return self._pytester.spawn(cmd, expect_timeout=expect_timeout)
+
+ def __repr__(self) -> str:
+ return f"<Testdir {self.tmpdir!r}>"
+
+ def __str__(self) -> str:
+ return str(self.tmpdir)
+
+
+class LegacyTestdirPlugin:
+ @staticmethod
+ @fixture
+ def testdir(pytester: Pytester) -> Testdir:
+ """
+ Identical to :fixture:`pytester`, and provides an instance whose methods return
+ legacy ``LEGACY_PATH`` objects instead when applicable.
+
+ New code should avoid using :fixture:`testdir` in favor of :fixture:`pytester`.
+ """
+ return Testdir(pytester, _ispytest=True)
+
+
+@final
+@attr.s(init=False, auto_attribs=True)
+class TempdirFactory:
+ """Backward compatibility wrapper that implements :class:``_pytest.compat.LEGACY_PATH``
+ for :class:``TempPathFactory``."""
+
+ _tmppath_factory: TempPathFactory
+
+ def __init__(
+ self, tmppath_factory: TempPathFactory, *, _ispytest: bool = False
+ ) -> None:
+ check_ispytest(_ispytest)
+ self._tmppath_factory = tmppath_factory
+
+ def mktemp(self, basename: str, numbered: bool = True) -> LEGACY_PATH:
+ """Same as :meth:`TempPathFactory.mktemp`, but returns a ``_pytest.compat.LEGACY_PATH`` object."""
+ return legacy_path(self._tmppath_factory.mktemp(basename, numbered).resolve())
+
+ def getbasetemp(self) -> LEGACY_PATH:
+ """Backward compat wrapper for ``_tmppath_factory.getbasetemp``."""
+ return legacy_path(self._tmppath_factory.getbasetemp().resolve())
+
+
+class LegacyTmpdirPlugin:
+ @staticmethod
+ @fixture(scope="session")
+ def tmpdir_factory(request: FixtureRequest) -> TempdirFactory:
+ """Return a :class:`pytest.TempdirFactory` instance for the test session."""
+ # Set dynamically by pytest_configure().
+ return request.config._tmpdirhandler # type: ignore
+
+ @staticmethod
+ @fixture
+ def tmpdir(tmp_path: Path) -> LEGACY_PATH:
+ """Return a temporary directory path object which is unique to each test
+ function invocation, created as a sub directory of the base temporary
+ directory.
+
+ By default, a new base temporary directory is created each test session,
+ and old bases are removed after 3 sessions, to aid in debugging. If
+ ``--basetemp`` is used then it is cleared each session. See :ref:`base
+ temporary directory`.
+
+ The returned object is a `legacy_path`_ object.
+
+ .. _legacy_path: https://py.readthedocs.io/en/latest/path.html
+ """
+ return legacy_path(tmp_path)
+
+
+def Cache_makedir(self: Cache, name: str) -> LEGACY_PATH:
+ """Return a directory path object with the given name.
+
+ Same as :func:`mkdir`, but returns a legacy py path instance.
+ """
+ return legacy_path(self.mkdir(name))
+
+
+def FixtureRequest_fspath(self: FixtureRequest) -> LEGACY_PATH:
+ """(deprecated) The file system path of the test module which collected this test."""
+ return legacy_path(self.path)
+
+
+def TerminalReporter_startdir(self: TerminalReporter) -> LEGACY_PATH:
+ """The directory from which pytest was invoked.
+
+ Prefer to use ``startpath`` which is a :class:`pathlib.Path`.
+
+ :type: LEGACY_PATH
+ """
+ return legacy_path(self.startpath)
+
+
+def Config_invocation_dir(self: Config) -> LEGACY_PATH:
+ """The directory from which pytest was invoked.
+
+ Prefer to use :attr:`invocation_params.dir <InvocationParams.dir>`,
+ which is a :class:`pathlib.Path`.
+
+ :type: LEGACY_PATH
+ """
+ return legacy_path(str(self.invocation_params.dir))
+
+
+def Config_rootdir(self: Config) -> LEGACY_PATH:
+ """The path to the :ref:`rootdir <rootdir>`.
+
+ Prefer to use :attr:`rootpath`, which is a :class:`pathlib.Path`.
+
+ :type: LEGACY_PATH
+ """
+ return legacy_path(str(self.rootpath))
+
+
+def Config_inifile(self: Config) -> Optional[LEGACY_PATH]:
+ """The path to the :ref:`configfile <configfiles>`.
+
+ Prefer to use :attr:`inipath`, which is a :class:`pathlib.Path`.
+
+ :type: Optional[LEGACY_PATH]
+ """
+ return legacy_path(str(self.inipath)) if self.inipath else None
+
+
+def Session_stardir(self: Session) -> LEGACY_PATH:
+ """The path from which pytest was invoked.
+
+ Prefer to use ``startpath`` which is a :class:`pathlib.Path`.
+
+ :type: LEGACY_PATH
+ """
+ return legacy_path(self.startpath)
+
+
+def Config__getini_unknown_type(
+ self, name: str, type: str, value: Union[str, List[str]]
+):
+ if type == "pathlist":
+ # TODO: This assert is probably not valid in all cases.
+ assert self.inipath is not None
+ dp = self.inipath.parent
+ input_values = shlex.split(value) if isinstance(value, str) else value
+ return [legacy_path(str(dp / x)) for x in input_values]
+ else:
+ raise ValueError(f"unknown configuration type: {type}", value)
+
+
+def Node_fspath(self: Node) -> LEGACY_PATH:
+ """(deprecated) returns a legacy_path copy of self.path"""
+ return legacy_path(self.path)
+
+
+def Node_fspath_set(self: Node, value: LEGACY_PATH) -> None:
+ self.path = Path(value)
+
+
+@hookimpl(tryfirst=True)
+def pytest_load_initial_conftests(early_config: Config) -> None:
+ """Monkeypatch legacy path attributes in several classes, as early as possible."""
+ mp = MonkeyPatch()
+ early_config.add_cleanup(mp.undo)
+
+ # Add Cache.makedir().
+ mp.setattr(Cache, "makedir", Cache_makedir, raising=False)
+
+ # Add FixtureRequest.fspath property.
+ mp.setattr(FixtureRequest, "fspath", property(FixtureRequest_fspath), raising=False)
+
+ # Add TerminalReporter.startdir property.
+ mp.setattr(
+ TerminalReporter, "startdir", property(TerminalReporter_startdir), raising=False
+ )
+
+ # Add Config.{invocation_dir,rootdir,inifile} properties.
+ mp.setattr(Config, "invocation_dir", property(Config_invocation_dir), raising=False)
+ mp.setattr(Config, "rootdir", property(Config_rootdir), raising=False)
+ mp.setattr(Config, "inifile", property(Config_inifile), raising=False)
+
+ # Add Session.startdir property.
+ mp.setattr(Session, "startdir", property(Session_stardir), raising=False)
+
+ # Add pathlist configuration type.
+ mp.setattr(Config, "_getini_unknown_type", Config__getini_unknown_type)
+
+ # Add Node.fspath property.
+ mp.setattr(Node, "fspath", property(Node_fspath, Node_fspath_set), raising=False)
+
+
+@hookimpl
+def pytest_configure(config: Config) -> None:
+ """Installs the LegacyTmpdirPlugin if the ``tmpdir`` plugin is also installed."""
+ if config.pluginmanager.has_plugin("tmpdir"):
+ mp = MonkeyPatch()
+ config.add_cleanup(mp.undo)
+ # Create TmpdirFactory and attach it to the config object.
+ #
+ # This is to comply with existing plugins which expect the handler to be
+ # available at pytest_configure time, but ideally should be moved entirely
+ # to the tmpdir_factory session fixture.
+ try:
+ tmp_path_factory = config._tmp_path_factory # type: ignore[attr-defined]
+ except AttributeError:
+ # tmpdir plugin is blocked.
+ pass
+ else:
+ _tmpdirhandler = TempdirFactory(tmp_path_factory, _ispytest=True)
+ mp.setattr(config, "_tmpdirhandler", _tmpdirhandler, raising=False)
+
+ config.pluginmanager.register(LegacyTmpdirPlugin, "legacypath-tmpdir")
+
+
+@hookimpl
+def pytest_plugin_registered(plugin: object, manager: PytestPluginManager) -> None:
+ # pytester is not loaded by default and is commonly loaded from a conftest,
+ # so checking for it in `pytest_configure` is not enough.
+ is_pytester = plugin is manager.get_plugin("pytester")
+ if is_pytester and not manager.is_registered(LegacyTestdirPlugin):
+ manager.register(LegacyTestdirPlugin, "legacypath-pytester")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/logging.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/logging.py
new file mode 100644
index 0000000000..31ad830107
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/logging.py
@@ -0,0 +1,831 @@
+"""Access and control log capturing."""
+import logging
+import os
+import re
+import sys
+from contextlib import contextmanager
+from io import StringIO
+from pathlib import Path
+from typing import AbstractSet
+from typing import Dict
+from typing import Generator
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Tuple
+from typing import TypeVar
+from typing import Union
+
+from _pytest import nodes
+from _pytest._io import TerminalWriter
+from _pytest.capture import CaptureManager
+from _pytest.compat import final
+from _pytest.compat import nullcontext
+from _pytest.config import _strtobool
+from _pytest.config import Config
+from _pytest.config import create_terminal_writer
+from _pytest.config import hookimpl
+from _pytest.config import UsageError
+from _pytest.config.argparsing import Parser
+from _pytest.deprecated import check_ispytest
+from _pytest.fixtures import fixture
+from _pytest.fixtures import FixtureRequest
+from _pytest.main import Session
+from _pytest.stash import StashKey
+from _pytest.terminal import TerminalReporter
+
+
+DEFAULT_LOG_FORMAT = "%(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s"
+DEFAULT_LOG_DATE_FORMAT = "%H:%M:%S"
+_ANSI_ESCAPE_SEQ = re.compile(r"\x1b\[[\d;]+m")
+caplog_handler_key = StashKey["LogCaptureHandler"]()
+caplog_records_key = StashKey[Dict[str, List[logging.LogRecord]]]()
+
+
+def _remove_ansi_escape_sequences(text: str) -> str:
+ return _ANSI_ESCAPE_SEQ.sub("", text)
+
+
+class ColoredLevelFormatter(logging.Formatter):
+ """A logging formatter which colorizes the %(levelname)..s part of the
+ log format passed to __init__."""
+
+ LOGLEVEL_COLOROPTS: Mapping[int, AbstractSet[str]] = {
+ logging.CRITICAL: {"red"},
+ logging.ERROR: {"red", "bold"},
+ logging.WARNING: {"yellow"},
+ logging.WARN: {"yellow"},
+ logging.INFO: {"green"},
+ logging.DEBUG: {"purple"},
+ logging.NOTSET: set(),
+ }
+ LEVELNAME_FMT_REGEX = re.compile(r"%\(levelname\)([+-.]?\d*(?:\.\d+)?s)")
+
+ def __init__(self, terminalwriter: TerminalWriter, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ self._terminalwriter = terminalwriter
+ self._original_fmt = self._style._fmt
+ self._level_to_fmt_mapping: Dict[int, str] = {}
+
+ for level, color_opts in self.LOGLEVEL_COLOROPTS.items():
+ self.add_color_level(level, *color_opts)
+
+ def add_color_level(self, level: int, *color_opts: str) -> None:
+ """Add or update color opts for a log level.
+
+ :param level:
+ Log level to apply a style to, e.g. ``logging.INFO``.
+ :param color_opts:
+ ANSI escape sequence color options. Capitalized colors indicates
+ background color, i.e. ``'green', 'Yellow', 'bold'`` will give bold
+ green text on yellow background.
+
+ .. warning::
+ This is an experimental API.
+ """
+
+ assert self._fmt is not None
+ levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt)
+ if not levelname_fmt_match:
+ return
+ levelname_fmt = levelname_fmt_match.group()
+
+ formatted_levelname = levelname_fmt % {"levelname": logging.getLevelName(level)}
+
+ # add ANSI escape sequences around the formatted levelname
+ color_kwargs = {name: True for name in color_opts}
+ colorized_formatted_levelname = self._terminalwriter.markup(
+ formatted_levelname, **color_kwargs
+ )
+ self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub(
+ colorized_formatted_levelname, self._fmt
+ )
+
+ def format(self, record: logging.LogRecord) -> str:
+ fmt = self._level_to_fmt_mapping.get(record.levelno, self._original_fmt)
+ self._style._fmt = fmt
+ return super().format(record)
+
+
+class PercentStyleMultiline(logging.PercentStyle):
+ """A logging style with special support for multiline messages.
+
+ If the message of a record consists of multiple lines, this style
+ formats the message as if each line were logged separately.
+ """
+
+ def __init__(self, fmt: str, auto_indent: Union[int, str, bool, None]) -> None:
+ super().__init__(fmt)
+ self._auto_indent = self._get_auto_indent(auto_indent)
+
+ @staticmethod
+ def _get_auto_indent(auto_indent_option: Union[int, str, bool, None]) -> int:
+ """Determine the current auto indentation setting.
+
+ Specify auto indent behavior (on/off/fixed) by passing in
+ extra={"auto_indent": [value]} to the call to logging.log() or
+ using a --log-auto-indent [value] command line or the
+ log_auto_indent [value] config option.
+
+ Default behavior is auto-indent off.
+
+ Using the string "True" or "on" or the boolean True as the value
+ turns auto indent on, using the string "False" or "off" or the
+ boolean False or the int 0 turns it off, and specifying a
+ positive integer fixes the indentation position to the value
+ specified.
+
+ Any other values for the option are invalid, and will silently be
+ converted to the default.
+
+ :param None|bool|int|str auto_indent_option:
+ User specified option for indentation from command line, config
+ or extra kwarg. Accepts int, bool or str. str option accepts the
+ same range of values as boolean config options, as well as
+ positive integers represented in str form.
+
+ :returns:
+ Indentation value, which can be
+ -1 (automatically determine indentation) or
+ 0 (auto-indent turned off) or
+ >0 (explicitly set indentation position).
+ """
+
+ if auto_indent_option is None:
+ return 0
+ elif isinstance(auto_indent_option, bool):
+ if auto_indent_option:
+ return -1
+ else:
+ return 0
+ elif isinstance(auto_indent_option, int):
+ return int(auto_indent_option)
+ elif isinstance(auto_indent_option, str):
+ try:
+ return int(auto_indent_option)
+ except ValueError:
+ pass
+ try:
+ if _strtobool(auto_indent_option):
+ return -1
+ except ValueError:
+ return 0
+
+ return 0
+
+ def format(self, record: logging.LogRecord) -> str:
+ if "\n" in record.message:
+ if hasattr(record, "auto_indent"):
+ # Passed in from the "extra={}" kwarg on the call to logging.log().
+ auto_indent = self._get_auto_indent(record.auto_indent) # type: ignore[attr-defined]
+ else:
+ auto_indent = self._auto_indent
+
+ if auto_indent:
+ lines = record.message.splitlines()
+ formatted = self._fmt % {**record.__dict__, "message": lines[0]}
+
+ if auto_indent < 0:
+ indentation = _remove_ansi_escape_sequences(formatted).find(
+ lines[0]
+ )
+ else:
+ # Optimizes logging by allowing a fixed indentation.
+ indentation = auto_indent
+ lines[0] = formatted
+ return ("\n" + " " * indentation).join(lines)
+ return self._fmt % record.__dict__
+
+
+def get_option_ini(config: Config, *names: str):
+ for name in names:
+ ret = config.getoption(name) # 'default' arg won't work as expected
+ if ret is None:
+ ret = config.getini(name)
+ if ret:
+ return ret
+
+
+def pytest_addoption(parser: Parser) -> None:
+ """Add options to control log capturing."""
+ group = parser.getgroup("logging")
+
+ def add_option_ini(option, dest, default=None, type=None, **kwargs):
+ parser.addini(
+ dest, default=default, type=type, help="default value for " + option
+ )
+ group.addoption(option, dest=dest, **kwargs)
+
+ add_option_ini(
+ "--log-level",
+ dest="log_level",
+ default=None,
+ metavar="LEVEL",
+ help=(
+ "level of messages to catch/display.\n"
+ "Not set by default, so it depends on the root/parent log handler's"
+ ' effective level, where it is "WARNING" by default.'
+ ),
+ )
+ add_option_ini(
+ "--log-format",
+ dest="log_format",
+ default=DEFAULT_LOG_FORMAT,
+ help="log format as used by the logging module.",
+ )
+ add_option_ini(
+ "--log-date-format",
+ dest="log_date_format",
+ default=DEFAULT_LOG_DATE_FORMAT,
+ help="log date format as used by the logging module.",
+ )
+ parser.addini(
+ "log_cli",
+ default=False,
+ type="bool",
+ help='enable log display during test run (also known as "live logging").',
+ )
+ add_option_ini(
+ "--log-cli-level", dest="log_cli_level", default=None, help="cli logging level."
+ )
+ add_option_ini(
+ "--log-cli-format",
+ dest="log_cli_format",
+ default=None,
+ help="log format as used by the logging module.",
+ )
+ add_option_ini(
+ "--log-cli-date-format",
+ dest="log_cli_date_format",
+ default=None,
+ help="log date format as used by the logging module.",
+ )
+ add_option_ini(
+ "--log-file",
+ dest="log_file",
+ default=None,
+ help="path to a file when logging will be written to.",
+ )
+ add_option_ini(
+ "--log-file-level",
+ dest="log_file_level",
+ default=None,
+ help="log file logging level.",
+ )
+ add_option_ini(
+ "--log-file-format",
+ dest="log_file_format",
+ default=DEFAULT_LOG_FORMAT,
+ help="log format as used by the logging module.",
+ )
+ add_option_ini(
+ "--log-file-date-format",
+ dest="log_file_date_format",
+ default=DEFAULT_LOG_DATE_FORMAT,
+ help="log date format as used by the logging module.",
+ )
+ add_option_ini(
+ "--log-auto-indent",
+ dest="log_auto_indent",
+ default=None,
+ help="Auto-indent multiline messages passed to the logging module. Accepts true|on, false|off or an integer.",
+ )
+
+
+_HandlerType = TypeVar("_HandlerType", bound=logging.Handler)
+
+
+# Not using @contextmanager for performance reasons.
+class catching_logs:
+ """Context manager that prepares the whole logging machinery properly."""
+
+ __slots__ = ("handler", "level", "orig_level")
+
+ def __init__(self, handler: _HandlerType, level: Optional[int] = None) -> None:
+ self.handler = handler
+ self.level = level
+
+ def __enter__(self):
+ root_logger = logging.getLogger()
+ if self.level is not None:
+ self.handler.setLevel(self.level)
+ root_logger.addHandler(self.handler)
+ if self.level is not None:
+ self.orig_level = root_logger.level
+ root_logger.setLevel(min(self.orig_level, self.level))
+ return self.handler
+
+ def __exit__(self, type, value, traceback):
+ root_logger = logging.getLogger()
+ if self.level is not None:
+ root_logger.setLevel(self.orig_level)
+ root_logger.removeHandler(self.handler)
+
+
+class LogCaptureHandler(logging.StreamHandler):
+ """A logging handler that stores log records and the log text."""
+
+ stream: StringIO
+
+ def __init__(self) -> None:
+ """Create a new log handler."""
+ super().__init__(StringIO())
+ self.records: List[logging.LogRecord] = []
+
+ def emit(self, record: logging.LogRecord) -> None:
+ """Keep the log records in a list in addition to the log text."""
+ self.records.append(record)
+ super().emit(record)
+
+ def reset(self) -> None:
+ self.records = []
+ self.stream = StringIO()
+
+ def handleError(self, record: logging.LogRecord) -> None:
+ if logging.raiseExceptions:
+ # Fail the test if the log message is bad (emit failed).
+ # The default behavior of logging is to print "Logging error"
+ # to stderr with the call stack and some extra details.
+ # pytest wants to make such mistakes visible during testing.
+ raise
+
+
+@final
+class LogCaptureFixture:
+ """Provides access and control of log capturing."""
+
+ def __init__(self, item: nodes.Node, *, _ispytest: bool = False) -> None:
+ check_ispytest(_ispytest)
+ self._item = item
+ self._initial_handler_level: Optional[int] = None
+ # Dict of log name -> log level.
+ self._initial_logger_levels: Dict[Optional[str], int] = {}
+
+ def _finalize(self) -> None:
+ """Finalize the fixture.
+
+ This restores the log levels changed by :meth:`set_level`.
+ """
+ # Restore log levels.
+ if self._initial_handler_level is not None:
+ self.handler.setLevel(self._initial_handler_level)
+ for logger_name, level in self._initial_logger_levels.items():
+ logger = logging.getLogger(logger_name)
+ logger.setLevel(level)
+
+ @property
+ def handler(self) -> LogCaptureHandler:
+ """Get the logging handler used by the fixture.
+
+ :rtype: LogCaptureHandler
+ """
+ return self._item.stash[caplog_handler_key]
+
+ def get_records(self, when: str) -> List[logging.LogRecord]:
+ """Get the logging records for one of the possible test phases.
+
+ :param str when:
+ Which test phase to obtain the records from. Valid values are: "setup", "call" and "teardown".
+
+ :returns: The list of captured records at the given stage.
+ :rtype: List[logging.LogRecord]
+
+ .. versionadded:: 3.4
+ """
+ return self._item.stash[caplog_records_key].get(when, [])
+
+ @property
+ def text(self) -> str:
+ """The formatted log text."""
+ return _remove_ansi_escape_sequences(self.handler.stream.getvalue())
+
+ @property
+ def records(self) -> List[logging.LogRecord]:
+ """The list of log records."""
+ return self.handler.records
+
+ @property
+ def record_tuples(self) -> List[Tuple[str, int, str]]:
+ """A list of a stripped down version of log records intended
+ for use in assertion comparison.
+
+ The format of the tuple is:
+
+ (logger_name, log_level, message)
+ """
+ return [(r.name, r.levelno, r.getMessage()) for r in self.records]
+
+ @property
+ def messages(self) -> List[str]:
+ """A list of format-interpolated log messages.
+
+ Unlike 'records', which contains the format string and parameters for
+ interpolation, log messages in this list are all interpolated.
+
+ Unlike 'text', which contains the output from the handler, log
+ messages in this list are unadorned with levels, timestamps, etc,
+ making exact comparisons more reliable.
+
+ Note that traceback or stack info (from :func:`logging.exception` or
+ the `exc_info` or `stack_info` arguments to the logging functions) is
+ not included, as this is added by the formatter in the handler.
+
+ .. versionadded:: 3.7
+ """
+ return [r.getMessage() for r in self.records]
+
+ def clear(self) -> None:
+ """Reset the list of log records and the captured log text."""
+ self.handler.reset()
+
+ def set_level(self, level: Union[int, str], logger: Optional[str] = None) -> None:
+ """Set the level of a logger for the duration of a test.
+
+ .. versionchanged:: 3.4
+ The levels of the loggers changed by this function will be
+ restored to their initial values at the end of the test.
+
+ :param int level: The level.
+ :param str logger: The logger to update. If not given, the root logger.
+ """
+ logger_obj = logging.getLogger(logger)
+ # Save the original log-level to restore it during teardown.
+ self._initial_logger_levels.setdefault(logger, logger_obj.level)
+ logger_obj.setLevel(level)
+ if self._initial_handler_level is None:
+ self._initial_handler_level = self.handler.level
+ self.handler.setLevel(level)
+
+ @contextmanager
+ def at_level(
+ self, level: Union[int, str], logger: Optional[str] = None
+ ) -> Generator[None, None, None]:
+ """Context manager that sets the level for capturing of logs. After
+ the end of the 'with' statement the level is restored to its original
+ value.
+
+ :param int level: The level.
+ :param str logger: The logger to update. If not given, the root logger.
+ """
+ logger_obj = logging.getLogger(logger)
+ orig_level = logger_obj.level
+ logger_obj.setLevel(level)
+ handler_orig_level = self.handler.level
+ self.handler.setLevel(level)
+ try:
+ yield
+ finally:
+ logger_obj.setLevel(orig_level)
+ self.handler.setLevel(handler_orig_level)
+
+
+@fixture
+def caplog(request: FixtureRequest) -> Generator[LogCaptureFixture, None, None]:
+ """Access and control log capturing.
+
+ Captured logs are available through the following properties/methods::
+
+ * caplog.messages -> list of format-interpolated log messages
+ * caplog.text -> string containing formatted log output
+ * caplog.records -> list of logging.LogRecord instances
+ * caplog.record_tuples -> list of (logger_name, level, message) tuples
+ * caplog.clear() -> clear captured records and formatted log output string
+ """
+ result = LogCaptureFixture(request.node, _ispytest=True)
+ yield result
+ result._finalize()
+
+
+def get_log_level_for_setting(config: Config, *setting_names: str) -> Optional[int]:
+ for setting_name in setting_names:
+ log_level = config.getoption(setting_name)
+ if log_level is None:
+ log_level = config.getini(setting_name)
+ if log_level:
+ break
+ else:
+ return None
+
+ if isinstance(log_level, str):
+ log_level = log_level.upper()
+ try:
+ return int(getattr(logging, log_level, log_level))
+ except ValueError as e:
+ # Python logging does not recognise this as a logging level
+ raise UsageError(
+ "'{}' is not recognized as a logging level name for "
+ "'{}'. Please consider passing the "
+ "logging level num instead.".format(log_level, setting_name)
+ ) from e
+
+
+# run after terminalreporter/capturemanager are configured
+@hookimpl(trylast=True)
+def pytest_configure(config: Config) -> None:
+ config.pluginmanager.register(LoggingPlugin(config), "logging-plugin")
+
+
+class LoggingPlugin:
+ """Attaches to the logging module and captures log messages for each test."""
+
+ def __init__(self, config: Config) -> None:
+ """Create a new plugin to capture log messages.
+
+ The formatter can be safely shared across all handlers so
+ create a single one for the entire test session here.
+ """
+ self._config = config
+
+ # Report logging.
+ self.formatter = self._create_formatter(
+ get_option_ini(config, "log_format"),
+ get_option_ini(config, "log_date_format"),
+ get_option_ini(config, "log_auto_indent"),
+ )
+ self.log_level = get_log_level_for_setting(config, "log_level")
+ self.caplog_handler = LogCaptureHandler()
+ self.caplog_handler.setFormatter(self.formatter)
+ self.report_handler = LogCaptureHandler()
+ self.report_handler.setFormatter(self.formatter)
+
+ # File logging.
+ self.log_file_level = get_log_level_for_setting(config, "log_file_level")
+ log_file = get_option_ini(config, "log_file") or os.devnull
+ if log_file != os.devnull:
+ directory = os.path.dirname(os.path.abspath(log_file))
+ if not os.path.isdir(directory):
+ os.makedirs(directory)
+
+ self.log_file_handler = _FileHandler(log_file, mode="w", encoding="UTF-8")
+ log_file_format = get_option_ini(config, "log_file_format", "log_format")
+ log_file_date_format = get_option_ini(
+ config, "log_file_date_format", "log_date_format"
+ )
+
+ log_file_formatter = logging.Formatter(
+ log_file_format, datefmt=log_file_date_format
+ )
+ self.log_file_handler.setFormatter(log_file_formatter)
+
+ # CLI/live logging.
+ self.log_cli_level = get_log_level_for_setting(
+ config, "log_cli_level", "log_level"
+ )
+ if self._log_cli_enabled():
+ terminal_reporter = config.pluginmanager.get_plugin("terminalreporter")
+ capture_manager = config.pluginmanager.get_plugin("capturemanager")
+ # if capturemanager plugin is disabled, live logging still works.
+ self.log_cli_handler: Union[
+ _LiveLoggingStreamHandler, _LiveLoggingNullHandler
+ ] = _LiveLoggingStreamHandler(terminal_reporter, capture_manager)
+ else:
+ self.log_cli_handler = _LiveLoggingNullHandler()
+ log_cli_formatter = self._create_formatter(
+ get_option_ini(config, "log_cli_format", "log_format"),
+ get_option_ini(config, "log_cli_date_format", "log_date_format"),
+ get_option_ini(config, "log_auto_indent"),
+ )
+ self.log_cli_handler.setFormatter(log_cli_formatter)
+
+ def _create_formatter(self, log_format, log_date_format, auto_indent):
+ # Color option doesn't exist if terminal plugin is disabled.
+ color = getattr(self._config.option, "color", "no")
+ if color != "no" and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search(
+ log_format
+ ):
+ formatter: logging.Formatter = ColoredLevelFormatter(
+ create_terminal_writer(self._config), log_format, log_date_format
+ )
+ else:
+ formatter = logging.Formatter(log_format, log_date_format)
+
+ formatter._style = PercentStyleMultiline(
+ formatter._style._fmt, auto_indent=auto_indent
+ )
+
+ return formatter
+
+ def set_log_path(self, fname: str) -> None:
+ """Set the filename parameter for Logging.FileHandler().
+
+ Creates parent directory if it does not exist.
+
+ .. warning::
+ This is an experimental API.
+ """
+ fpath = Path(fname)
+
+ if not fpath.is_absolute():
+ fpath = self._config.rootpath / fpath
+
+ if not fpath.parent.exists():
+ fpath.parent.mkdir(exist_ok=True, parents=True)
+
+ stream = fpath.open(mode="w", encoding="UTF-8")
+ if sys.version_info >= (3, 7):
+ old_stream = self.log_file_handler.setStream(stream)
+ else:
+ old_stream = self.log_file_handler.stream
+ self.log_file_handler.acquire()
+ try:
+ self.log_file_handler.flush()
+ self.log_file_handler.stream = stream
+ finally:
+ self.log_file_handler.release()
+ if old_stream:
+ # https://github.com/python/typeshed/pull/5663
+ old_stream.close() # type:ignore[attr-defined]
+
+ def _log_cli_enabled(self):
+ """Return whether live logging is enabled."""
+ enabled = self._config.getoption(
+ "--log-cli-level"
+ ) is not None or self._config.getini("log_cli")
+ if not enabled:
+ return False
+
+ terminal_reporter = self._config.pluginmanager.get_plugin("terminalreporter")
+ if terminal_reporter is None:
+ # terminal reporter is disabled e.g. by pytest-xdist.
+ return False
+
+ return True
+
+ @hookimpl(hookwrapper=True, tryfirst=True)
+ def pytest_sessionstart(self) -> Generator[None, None, None]:
+ self.log_cli_handler.set_when("sessionstart")
+
+ with catching_logs(self.log_cli_handler, level=self.log_cli_level):
+ with catching_logs(self.log_file_handler, level=self.log_file_level):
+ yield
+
+ @hookimpl(hookwrapper=True, tryfirst=True)
+ def pytest_collection(self) -> Generator[None, None, None]:
+ self.log_cli_handler.set_when("collection")
+
+ with catching_logs(self.log_cli_handler, level=self.log_cli_level):
+ with catching_logs(self.log_file_handler, level=self.log_file_level):
+ yield
+
+ @hookimpl(hookwrapper=True)
+ def pytest_runtestloop(self, session: Session) -> Generator[None, None, None]:
+ if session.config.option.collectonly:
+ yield
+ return
+
+ if self._log_cli_enabled() and self._config.getoption("verbose") < 1:
+ # The verbose flag is needed to avoid messy test progress output.
+ self._config.option.verbose = 1
+
+ with catching_logs(self.log_cli_handler, level=self.log_cli_level):
+ with catching_logs(self.log_file_handler, level=self.log_file_level):
+ yield # Run all the tests.
+
+ @hookimpl
+ def pytest_runtest_logstart(self) -> None:
+ self.log_cli_handler.reset()
+ self.log_cli_handler.set_when("start")
+
+ @hookimpl
+ def pytest_runtest_logreport(self) -> None:
+ self.log_cli_handler.set_when("logreport")
+
+ def _runtest_for(self, item: nodes.Item, when: str) -> Generator[None, None, None]:
+ """Implement the internals of the pytest_runtest_xxx() hooks."""
+ with catching_logs(
+ self.caplog_handler,
+ level=self.log_level,
+ ) as caplog_handler, catching_logs(
+ self.report_handler,
+ level=self.log_level,
+ ) as report_handler:
+ caplog_handler.reset()
+ report_handler.reset()
+ item.stash[caplog_records_key][when] = caplog_handler.records
+ item.stash[caplog_handler_key] = caplog_handler
+
+ yield
+
+ log = report_handler.stream.getvalue().strip()
+ item.add_report_section(when, "log", log)
+
+ @hookimpl(hookwrapper=True)
+ def pytest_runtest_setup(self, item: nodes.Item) -> Generator[None, None, None]:
+ self.log_cli_handler.set_when("setup")
+
+ empty: Dict[str, List[logging.LogRecord]] = {}
+ item.stash[caplog_records_key] = empty
+ yield from self._runtest_for(item, "setup")
+
+ @hookimpl(hookwrapper=True)
+ def pytest_runtest_call(self, item: nodes.Item) -> Generator[None, None, None]:
+ self.log_cli_handler.set_when("call")
+
+ yield from self._runtest_for(item, "call")
+
+ @hookimpl(hookwrapper=True)
+ def pytest_runtest_teardown(self, item: nodes.Item) -> Generator[None, None, None]:
+ self.log_cli_handler.set_when("teardown")
+
+ yield from self._runtest_for(item, "teardown")
+ del item.stash[caplog_records_key]
+ del item.stash[caplog_handler_key]
+
+ @hookimpl
+ def pytest_runtest_logfinish(self) -> None:
+ self.log_cli_handler.set_when("finish")
+
+ @hookimpl(hookwrapper=True, tryfirst=True)
+ def pytest_sessionfinish(self) -> Generator[None, None, None]:
+ self.log_cli_handler.set_when("sessionfinish")
+
+ with catching_logs(self.log_cli_handler, level=self.log_cli_level):
+ with catching_logs(self.log_file_handler, level=self.log_file_level):
+ yield
+
+ @hookimpl
+ def pytest_unconfigure(self) -> None:
+ # Close the FileHandler explicitly.
+ # (logging.shutdown might have lost the weakref?!)
+ self.log_file_handler.close()
+
+
+class _FileHandler(logging.FileHandler):
+ """A logging FileHandler with pytest tweaks."""
+
+ def handleError(self, record: logging.LogRecord) -> None:
+ # Handled by LogCaptureHandler.
+ pass
+
+
+class _LiveLoggingStreamHandler(logging.StreamHandler):
+ """A logging StreamHandler used by the live logging feature: it will
+ write a newline before the first log message in each test.
+
+ During live logging we must also explicitly disable stdout/stderr
+ capturing otherwise it will get captured and won't appear in the
+ terminal.
+ """
+
+ # Officially stream needs to be a IO[str], but TerminalReporter
+ # isn't. So force it.
+ stream: TerminalReporter = None # type: ignore
+
+ def __init__(
+ self,
+ terminal_reporter: TerminalReporter,
+ capture_manager: Optional[CaptureManager],
+ ) -> None:
+ super().__init__(stream=terminal_reporter) # type: ignore[arg-type]
+ self.capture_manager = capture_manager
+ self.reset()
+ self.set_when(None)
+ self._test_outcome_written = False
+
+ def reset(self) -> None:
+ """Reset the handler; should be called before the start of each test."""
+ self._first_record_emitted = False
+
+ def set_when(self, when: Optional[str]) -> None:
+ """Prepare for the given test phase (setup/call/teardown)."""
+ self._when = when
+ self._section_name_shown = False
+ if when == "start":
+ self._test_outcome_written = False
+
+ def emit(self, record: logging.LogRecord) -> None:
+ ctx_manager = (
+ self.capture_manager.global_and_fixture_disabled()
+ if self.capture_manager
+ else nullcontext()
+ )
+ with ctx_manager:
+ if not self._first_record_emitted:
+ self.stream.write("\n")
+ self._first_record_emitted = True
+ elif self._when in ("teardown", "finish"):
+ if not self._test_outcome_written:
+ self._test_outcome_written = True
+ self.stream.write("\n")
+ if not self._section_name_shown and self._when:
+ self.stream.section("live log " + self._when, sep="-", bold=True)
+ self._section_name_shown = True
+ super().emit(record)
+
+ def handleError(self, record: logging.LogRecord) -> None:
+ # Handled by LogCaptureHandler.
+ pass
+
+
+class _LiveLoggingNullHandler(logging.NullHandler):
+ """A logging handler used when live logging is disabled."""
+
+ def reset(self) -> None:
+ pass
+
+ def set_when(self, when: str) -> None:
+ pass
+
+ def handleError(self, record: logging.LogRecord) -> None:
+ # Handled by LogCaptureHandler.
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/main.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/main.py
new file mode 100644
index 0000000000..fea8179ca7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/main.py
@@ -0,0 +1,896 @@
+"""Core implementation of the testing process: init, session, runtest loop."""
+import argparse
+import fnmatch
+import functools
+import importlib
+import os
+import sys
+from pathlib import Path
+from typing import Callable
+from typing import Dict
+from typing import FrozenSet
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+
+import attr
+
+import _pytest._code
+from _pytest import nodes
+from _pytest.compat import final
+from _pytest.config import Config
+from _pytest.config import directory_arg
+from _pytest.config import ExitCode
+from _pytest.config import hookimpl
+from _pytest.config import PytestPluginManager
+from _pytest.config import UsageError
+from _pytest.config.argparsing import Parser
+from _pytest.fixtures import FixtureManager
+from _pytest.outcomes import exit
+from _pytest.pathlib import absolutepath
+from _pytest.pathlib import bestrelpath
+from _pytest.pathlib import fnmatch_ex
+from _pytest.pathlib import visit
+from _pytest.reports import CollectReport
+from _pytest.reports import TestReport
+from _pytest.runner import collect_one_node
+from _pytest.runner import SetupState
+
+
+if TYPE_CHECKING:
+ from typing_extensions import Literal
+
+
+def pytest_addoption(parser: Parser) -> None:
+ parser.addini(
+ "norecursedirs",
+ "directory patterns to avoid for recursion",
+ type="args",
+ default=[
+ "*.egg",
+ ".*",
+ "_darcs",
+ "build",
+ "CVS",
+ "dist",
+ "node_modules",
+ "venv",
+ "{arch}",
+ ],
+ )
+ parser.addini(
+ "testpaths",
+ "directories to search for tests when no files or directories are given in the "
+ "command line.",
+ type="args",
+ default=[],
+ )
+ group = parser.getgroup("general", "running and selection options")
+ group._addoption(
+ "-x",
+ "--exitfirst",
+ action="store_const",
+ dest="maxfail",
+ const=1,
+ help="exit instantly on first error or failed test.",
+ )
+ group = parser.getgroup("pytest-warnings")
+ group.addoption(
+ "-W",
+ "--pythonwarnings",
+ action="append",
+ help="set which warnings to report, see -W option of python itself.",
+ )
+ parser.addini(
+ "filterwarnings",
+ type="linelist",
+ help="Each line specifies a pattern for "
+ "warnings.filterwarnings. "
+ "Processed after -W/--pythonwarnings.",
+ )
+ group._addoption(
+ "--maxfail",
+ metavar="num",
+ action="store",
+ type=int,
+ dest="maxfail",
+ default=0,
+ help="exit after first num failures or errors.",
+ )
+ group._addoption(
+ "--strict-config",
+ action="store_true",
+ help="any warnings encountered while parsing the `pytest` section of the configuration file raise errors.",
+ )
+ group._addoption(
+ "--strict-markers",
+ action="store_true",
+ help="markers not registered in the `markers` section of the configuration file raise errors.",
+ )
+ group._addoption(
+ "--strict",
+ action="store_true",
+ help="(deprecated) alias to --strict-markers.",
+ )
+ group._addoption(
+ "-c",
+ metavar="file",
+ type=str,
+ dest="inifilename",
+ help="load configuration from `file` instead of trying to locate one of the implicit "
+ "configuration files.",
+ )
+ group._addoption(
+ "--continue-on-collection-errors",
+ action="store_true",
+ default=False,
+ dest="continue_on_collection_errors",
+ help="Force test execution even if collection errors occur.",
+ )
+ group._addoption(
+ "--rootdir",
+ action="store",
+ dest="rootdir",
+ help="Define root directory for tests. Can be relative path: 'root_dir', './root_dir', "
+ "'root_dir/another_dir/'; absolute path: '/home/user/root_dir'; path with variables: "
+ "'$HOME/root_dir'.",
+ )
+
+ group = parser.getgroup("collect", "collection")
+ group.addoption(
+ "--collectonly",
+ "--collect-only",
+ "--co",
+ action="store_true",
+ help="only collect tests, don't execute them.",
+ )
+ group.addoption(
+ "--pyargs",
+ action="store_true",
+ help="try to interpret all arguments as python packages.",
+ )
+ group.addoption(
+ "--ignore",
+ action="append",
+ metavar="path",
+ help="ignore path during collection (multi-allowed).",
+ )
+ group.addoption(
+ "--ignore-glob",
+ action="append",
+ metavar="path",
+ help="ignore path pattern during collection (multi-allowed).",
+ )
+ group.addoption(
+ "--deselect",
+ action="append",
+ metavar="nodeid_prefix",
+ help="deselect item (via node id prefix) during collection (multi-allowed).",
+ )
+ group.addoption(
+ "--confcutdir",
+ dest="confcutdir",
+ default=None,
+ metavar="dir",
+ type=functools.partial(directory_arg, optname="--confcutdir"),
+ help="only load conftest.py's relative to specified dir.",
+ )
+ group.addoption(
+ "--noconftest",
+ action="store_true",
+ dest="noconftest",
+ default=False,
+ help="Don't load any conftest.py files.",
+ )
+ group.addoption(
+ "--keepduplicates",
+ "--keep-duplicates",
+ action="store_true",
+ dest="keepduplicates",
+ default=False,
+ help="Keep duplicate tests.",
+ )
+ group.addoption(
+ "--collect-in-virtualenv",
+ action="store_true",
+ dest="collect_in_virtualenv",
+ default=False,
+ help="Don't ignore tests in a local virtualenv directory",
+ )
+ group.addoption(
+ "--import-mode",
+ default="prepend",
+ choices=["prepend", "append", "importlib"],
+ dest="importmode",
+ help="prepend/append to sys.path when importing test modules and conftest files, "
+ "default is to prepend.",
+ )
+
+ group = parser.getgroup("debugconfig", "test session debugging and configuration")
+ group.addoption(
+ "--basetemp",
+ dest="basetemp",
+ default=None,
+ type=validate_basetemp,
+ metavar="dir",
+ help=(
+ "base temporary directory for this test run."
+ "(warning: this directory is removed if it exists)"
+ ),
+ )
+
+
+def validate_basetemp(path: str) -> str:
+ # GH 7119
+ msg = "basetemp must not be empty, the current working directory or any parent directory of it"
+
+ # empty path
+ if not path:
+ raise argparse.ArgumentTypeError(msg)
+
+ def is_ancestor(base: Path, query: Path) -> bool:
+ """Return whether query is an ancestor of base."""
+ if base == query:
+ return True
+ return query in base.parents
+
+ # check if path is an ancestor of cwd
+ if is_ancestor(Path.cwd(), Path(path).absolute()):
+ raise argparse.ArgumentTypeError(msg)
+
+ # check symlinks for ancestors
+ if is_ancestor(Path.cwd().resolve(), Path(path).resolve()):
+ raise argparse.ArgumentTypeError(msg)
+
+ return path
+
+
+def wrap_session(
+ config: Config, doit: Callable[[Config, "Session"], Optional[Union[int, ExitCode]]]
+) -> Union[int, ExitCode]:
+ """Skeleton command line program."""
+ session = Session.from_config(config)
+ session.exitstatus = ExitCode.OK
+ initstate = 0
+ try:
+ try:
+ config._do_configure()
+ initstate = 1
+ config.hook.pytest_sessionstart(session=session)
+ initstate = 2
+ session.exitstatus = doit(config, session) or 0
+ except UsageError:
+ session.exitstatus = ExitCode.USAGE_ERROR
+ raise
+ except Failed:
+ session.exitstatus = ExitCode.TESTS_FAILED
+ except (KeyboardInterrupt, exit.Exception):
+ excinfo = _pytest._code.ExceptionInfo.from_current()
+ exitstatus: Union[int, ExitCode] = ExitCode.INTERRUPTED
+ if isinstance(excinfo.value, exit.Exception):
+ if excinfo.value.returncode is not None:
+ exitstatus = excinfo.value.returncode
+ if initstate < 2:
+ sys.stderr.write(f"{excinfo.typename}: {excinfo.value.msg}\n")
+ config.hook.pytest_keyboard_interrupt(excinfo=excinfo)
+ session.exitstatus = exitstatus
+ except BaseException:
+ session.exitstatus = ExitCode.INTERNAL_ERROR
+ excinfo = _pytest._code.ExceptionInfo.from_current()
+ try:
+ config.notify_exception(excinfo, config.option)
+ except exit.Exception as exc:
+ if exc.returncode is not None:
+ session.exitstatus = exc.returncode
+ sys.stderr.write(f"{type(exc).__name__}: {exc}\n")
+ else:
+ if isinstance(excinfo.value, SystemExit):
+ sys.stderr.write("mainloop: caught unexpected SystemExit!\n")
+
+ finally:
+ # Explicitly break reference cycle.
+ excinfo = None # type: ignore
+ os.chdir(session.startpath)
+ if initstate >= 2:
+ try:
+ config.hook.pytest_sessionfinish(
+ session=session, exitstatus=session.exitstatus
+ )
+ except exit.Exception as exc:
+ if exc.returncode is not None:
+ session.exitstatus = exc.returncode
+ sys.stderr.write(f"{type(exc).__name__}: {exc}\n")
+ config._ensure_unconfigure()
+ return session.exitstatus
+
+
+def pytest_cmdline_main(config: Config) -> Union[int, ExitCode]:
+ return wrap_session(config, _main)
+
+
+def _main(config: Config, session: "Session") -> Optional[Union[int, ExitCode]]:
+ """Default command line protocol for initialization, session,
+ running tests and reporting."""
+ config.hook.pytest_collection(session=session)
+ config.hook.pytest_runtestloop(session=session)
+
+ if session.testsfailed:
+ return ExitCode.TESTS_FAILED
+ elif session.testscollected == 0:
+ return ExitCode.NO_TESTS_COLLECTED
+ return None
+
+
+def pytest_collection(session: "Session") -> None:
+ session.perform_collect()
+
+
+def pytest_runtestloop(session: "Session") -> bool:
+ if session.testsfailed and not session.config.option.continue_on_collection_errors:
+ raise session.Interrupted(
+ "%d error%s during collection"
+ % (session.testsfailed, "s" if session.testsfailed != 1 else "")
+ )
+
+ if session.config.option.collectonly:
+ return True
+
+ for i, item in enumerate(session.items):
+ nextitem = session.items[i + 1] if i + 1 < len(session.items) else None
+ item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
+ if session.shouldfail:
+ raise session.Failed(session.shouldfail)
+ if session.shouldstop:
+ raise session.Interrupted(session.shouldstop)
+ return True
+
+
+def _in_venv(path: Path) -> bool:
+ """Attempt to detect if ``path`` is the root of a Virtual Environment by
+ checking for the existence of the appropriate activate script."""
+ bindir = path.joinpath("Scripts" if sys.platform.startswith("win") else "bin")
+ try:
+ if not bindir.is_dir():
+ return False
+ except OSError:
+ return False
+ activates = (
+ "activate",
+ "activate.csh",
+ "activate.fish",
+ "Activate",
+ "Activate.bat",
+ "Activate.ps1",
+ )
+ return any(fname.name in activates for fname in bindir.iterdir())
+
+
+def pytest_ignore_collect(collection_path: Path, config: Config) -> Optional[bool]:
+ ignore_paths = config._getconftest_pathlist(
+ "collect_ignore", path=collection_path.parent, rootpath=config.rootpath
+ )
+ ignore_paths = ignore_paths or []
+ excludeopt = config.getoption("ignore")
+ if excludeopt:
+ ignore_paths.extend(absolutepath(x) for x in excludeopt)
+
+ if collection_path in ignore_paths:
+ return True
+
+ ignore_globs = config._getconftest_pathlist(
+ "collect_ignore_glob", path=collection_path.parent, rootpath=config.rootpath
+ )
+ ignore_globs = ignore_globs or []
+ excludeglobopt = config.getoption("ignore_glob")
+ if excludeglobopt:
+ ignore_globs.extend(absolutepath(x) for x in excludeglobopt)
+
+ if any(fnmatch.fnmatch(str(collection_path), str(glob)) for glob in ignore_globs):
+ return True
+
+ allow_in_venv = config.getoption("collect_in_virtualenv")
+ if not allow_in_venv and _in_venv(collection_path):
+ return True
+ return None
+
+
+def pytest_collection_modifyitems(items: List[nodes.Item], config: Config) -> None:
+ deselect_prefixes = tuple(config.getoption("deselect") or [])
+ if not deselect_prefixes:
+ return
+
+ remaining = []
+ deselected = []
+ for colitem in items:
+ if colitem.nodeid.startswith(deselect_prefixes):
+ deselected.append(colitem)
+ else:
+ remaining.append(colitem)
+
+ if deselected:
+ config.hook.pytest_deselected(items=deselected)
+ items[:] = remaining
+
+
+class FSHookProxy:
+ def __init__(self, pm: PytestPluginManager, remove_mods) -> None:
+ self.pm = pm
+ self.remove_mods = remove_mods
+
+ def __getattr__(self, name: str):
+ x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods)
+ self.__dict__[name] = x
+ return x
+
+
+class Interrupted(KeyboardInterrupt):
+ """Signals that the test run was interrupted."""
+
+ __module__ = "builtins" # For py3.
+
+
+class Failed(Exception):
+ """Signals a stop as failed test run."""
+
+
+@attr.s(slots=True, auto_attribs=True)
+class _bestrelpath_cache(Dict[Path, str]):
+ path: Path
+
+ def __missing__(self, path: Path) -> str:
+ r = bestrelpath(self.path, path)
+ self[path] = r
+ return r
+
+
+@final
+class Session(nodes.FSCollector):
+ Interrupted = Interrupted
+ Failed = Failed
+ # Set on the session by runner.pytest_sessionstart.
+ _setupstate: SetupState
+ # Set on the session by fixtures.pytest_sessionstart.
+ _fixturemanager: FixtureManager
+ exitstatus: Union[int, ExitCode]
+
+ def __init__(self, config: Config) -> None:
+ super().__init__(
+ path=config.rootpath,
+ fspath=None,
+ parent=None,
+ config=config,
+ session=self,
+ nodeid="",
+ )
+ self.testsfailed = 0
+ self.testscollected = 0
+ self.shouldstop: Union[bool, str] = False
+ self.shouldfail: Union[bool, str] = False
+ self.trace = config.trace.root.get("collection")
+ self._initialpaths: FrozenSet[Path] = frozenset()
+
+ self._bestrelpathcache: Dict[Path, str] = _bestrelpath_cache(config.rootpath)
+
+ self.config.pluginmanager.register(self, name="session")
+
+ @classmethod
+ def from_config(cls, config: Config) -> "Session":
+ session: Session = cls._create(config=config)
+ return session
+
+ def __repr__(self) -> str:
+ return "<%s %s exitstatus=%r testsfailed=%d testscollected=%d>" % (
+ self.__class__.__name__,
+ self.name,
+ getattr(self, "exitstatus", "<UNSET>"),
+ self.testsfailed,
+ self.testscollected,
+ )
+
+ @property
+ def startpath(self) -> Path:
+ """The path from which pytest was invoked.
+
+ .. versionadded:: 7.0.0
+ """
+ return self.config.invocation_params.dir
+
+ def _node_location_to_relpath(self, node_path: Path) -> str:
+ # bestrelpath is a quite slow function.
+ return self._bestrelpathcache[node_path]
+
+ @hookimpl(tryfirst=True)
+ def pytest_collectstart(self) -> None:
+ if self.shouldfail:
+ raise self.Failed(self.shouldfail)
+ if self.shouldstop:
+ raise self.Interrupted(self.shouldstop)
+
+ @hookimpl(tryfirst=True)
+ def pytest_runtest_logreport(
+ self, report: Union[TestReport, CollectReport]
+ ) -> None:
+ if report.failed and not hasattr(report, "wasxfail"):
+ self.testsfailed += 1
+ maxfail = self.config.getvalue("maxfail")
+ if maxfail and self.testsfailed >= maxfail:
+ self.shouldfail = "stopping after %d failures" % (self.testsfailed)
+
+ pytest_collectreport = pytest_runtest_logreport
+
+ def isinitpath(self, path: Union[str, "os.PathLike[str]"]) -> bool:
+ # Optimization: Path(Path(...)) is much slower than isinstance.
+ path_ = path if isinstance(path, Path) else Path(path)
+ return path_ in self._initialpaths
+
+ def gethookproxy(self, fspath: "os.PathLike[str]"):
+ # Optimization: Path(Path(...)) is much slower than isinstance.
+ path = fspath if isinstance(fspath, Path) else Path(fspath)
+ pm = self.config.pluginmanager
+ # Check if we have the common case of running
+ # hooks with all conftest.py files.
+ my_conftestmodules = pm._getconftestmodules(
+ path,
+ self.config.getoption("importmode"),
+ rootpath=self.config.rootpath,
+ )
+ remove_mods = pm._conftest_plugins.difference(my_conftestmodules)
+ if remove_mods:
+ # One or more conftests are not in use at this fspath.
+ from .config.compat import PathAwareHookProxy
+
+ proxy = PathAwareHookProxy(FSHookProxy(pm, remove_mods))
+ else:
+ # All plugins are active for this fspath.
+ proxy = self.config.hook
+ return proxy
+
+ def _recurse(self, direntry: "os.DirEntry[str]") -> bool:
+ if direntry.name == "__pycache__":
+ return False
+ fspath = Path(direntry.path)
+ ihook = self.gethookproxy(fspath.parent)
+ if ihook.pytest_ignore_collect(collection_path=fspath, config=self.config):
+ return False
+ norecursepatterns = self.config.getini("norecursedirs")
+ if any(fnmatch_ex(pat, fspath) for pat in norecursepatterns):
+ return False
+ return True
+
+ def _collectfile(
+ self, fspath: Path, handle_dupes: bool = True
+ ) -> Sequence[nodes.Collector]:
+ assert (
+ fspath.is_file()
+ ), "{!r} is not a file (isdir={!r}, exists={!r}, islink={!r})".format(
+ fspath, fspath.is_dir(), fspath.exists(), fspath.is_symlink()
+ )
+ ihook = self.gethookproxy(fspath)
+ if not self.isinitpath(fspath):
+ if ihook.pytest_ignore_collect(collection_path=fspath, config=self.config):
+ return ()
+
+ if handle_dupes:
+ keepduplicates = self.config.getoption("keepduplicates")
+ if not keepduplicates:
+ duplicate_paths = self.config.pluginmanager._duplicatepaths
+ if fspath in duplicate_paths:
+ return ()
+ else:
+ duplicate_paths.add(fspath)
+
+ return ihook.pytest_collect_file(file_path=fspath, parent=self) # type: ignore[no-any-return]
+
+ @overload
+ def perform_collect(
+ self, args: Optional[Sequence[str]] = ..., genitems: "Literal[True]" = ...
+ ) -> Sequence[nodes.Item]:
+ ...
+
+ @overload
+ def perform_collect(
+ self, args: Optional[Sequence[str]] = ..., genitems: bool = ...
+ ) -> Sequence[Union[nodes.Item, nodes.Collector]]:
+ ...
+
+ def perform_collect(
+ self, args: Optional[Sequence[str]] = None, genitems: bool = True
+ ) -> Sequence[Union[nodes.Item, nodes.Collector]]:
+ """Perform the collection phase for this session.
+
+ This is called by the default :hook:`pytest_collection` hook
+ implementation; see the documentation of this hook for more details.
+ For testing purposes, it may also be called directly on a fresh
+ ``Session``.
+
+ This function normally recursively expands any collectors collected
+ from the session to their items, and only items are returned. For
+ testing purposes, this may be suppressed by passing ``genitems=False``,
+ in which case the return value contains these collectors unexpanded,
+ and ``session.items`` is empty.
+ """
+ if args is None:
+ args = self.config.args
+
+ self.trace("perform_collect", self, args)
+ self.trace.root.indent += 1
+
+ self._notfound: List[Tuple[str, Sequence[nodes.Collector]]] = []
+ self._initial_parts: List[Tuple[Path, List[str]]] = []
+ self.items: List[nodes.Item] = []
+
+ hook = self.config.hook
+
+ items: Sequence[Union[nodes.Item, nodes.Collector]] = self.items
+ try:
+ initialpaths: List[Path] = []
+ for arg in args:
+ fspath, parts = resolve_collection_argument(
+ self.config.invocation_params.dir,
+ arg,
+ as_pypath=self.config.option.pyargs,
+ )
+ self._initial_parts.append((fspath, parts))
+ initialpaths.append(fspath)
+ self._initialpaths = frozenset(initialpaths)
+ rep = collect_one_node(self)
+ self.ihook.pytest_collectreport(report=rep)
+ self.trace.root.indent -= 1
+ if self._notfound:
+ errors = []
+ for arg, cols in self._notfound:
+ line = f"(no name {arg!r} in any of {cols!r})"
+ errors.append(f"not found: {arg}\n{line}")
+ raise UsageError(*errors)
+ if not genitems:
+ items = rep.result
+ else:
+ if rep.passed:
+ for node in rep.result:
+ self.items.extend(self.genitems(node))
+
+ self.config.pluginmanager.check_pending()
+ hook.pytest_collection_modifyitems(
+ session=self, config=self.config, items=items
+ )
+ finally:
+ hook.pytest_collection_finish(session=self)
+
+ self.testscollected = len(items)
+ return items
+
+ def collect(self) -> Iterator[Union[nodes.Item, nodes.Collector]]:
+ from _pytest.python import Package
+
+ # Keep track of any collected nodes in here, so we don't duplicate fixtures.
+ node_cache1: Dict[Path, Sequence[nodes.Collector]] = {}
+ node_cache2: Dict[Tuple[Type[nodes.Collector], Path], nodes.Collector] = {}
+
+ # Keep track of any collected collectors in matchnodes paths, so they
+ # are not collected more than once.
+ matchnodes_cache: Dict[Tuple[Type[nodes.Collector], str], CollectReport] = {}
+
+ # Dirnames of pkgs with dunder-init files.
+ pkg_roots: Dict[str, Package] = {}
+
+ for argpath, names in self._initial_parts:
+ self.trace("processing argument", (argpath, names))
+ self.trace.root.indent += 1
+
+ # Start with a Session root, and delve to argpath item (dir or file)
+ # and stack all Packages found on the way.
+ # No point in finding packages when collecting doctests.
+ if not self.config.getoption("doctestmodules", False):
+ pm = self.config.pluginmanager
+ confcutdir = pm._confcutdir
+ for parent in (argpath, *argpath.parents):
+ if confcutdir and parent in confcutdir.parents:
+ break
+
+ if parent.is_dir():
+ pkginit = parent / "__init__.py"
+ if pkginit.is_file() and pkginit not in node_cache1:
+ col = self._collectfile(pkginit, handle_dupes=False)
+ if col:
+ if isinstance(col[0], Package):
+ pkg_roots[str(parent)] = col[0]
+ node_cache1[col[0].path] = [col[0]]
+
+ # If it's a directory argument, recurse and look for any Subpackages.
+ # Let the Package collector deal with subnodes, don't collect here.
+ if argpath.is_dir():
+ assert not names, f"invalid arg {(argpath, names)!r}"
+
+ seen_dirs: Set[Path] = set()
+ for direntry in visit(str(argpath), self._recurse):
+ if not direntry.is_file():
+ continue
+
+ path = Path(direntry.path)
+ dirpath = path.parent
+
+ if dirpath not in seen_dirs:
+ # Collect packages first.
+ seen_dirs.add(dirpath)
+ pkginit = dirpath / "__init__.py"
+ if pkginit.exists():
+ for x in self._collectfile(pkginit):
+ yield x
+ if isinstance(x, Package):
+ pkg_roots[str(dirpath)] = x
+ if str(dirpath) in pkg_roots:
+ # Do not collect packages here.
+ continue
+
+ for x in self._collectfile(path):
+ key2 = (type(x), x.path)
+ if key2 in node_cache2:
+ yield node_cache2[key2]
+ else:
+ node_cache2[key2] = x
+ yield x
+ else:
+ assert argpath.is_file()
+
+ if argpath in node_cache1:
+ col = node_cache1[argpath]
+ else:
+ collect_root = pkg_roots.get(str(argpath.parent), self)
+ col = collect_root._collectfile(argpath, handle_dupes=False)
+ if col:
+ node_cache1[argpath] = col
+
+ matching = []
+ work: List[
+ Tuple[Sequence[Union[nodes.Item, nodes.Collector]], Sequence[str]]
+ ] = [(col, names)]
+ while work:
+ self.trace("matchnodes", col, names)
+ self.trace.root.indent += 1
+
+ matchnodes, matchnames = work.pop()
+ for node in matchnodes:
+ if not matchnames:
+ matching.append(node)
+ continue
+ if not isinstance(node, nodes.Collector):
+ continue
+ key = (type(node), node.nodeid)
+ if key in matchnodes_cache:
+ rep = matchnodes_cache[key]
+ else:
+ rep = collect_one_node(node)
+ matchnodes_cache[key] = rep
+ if rep.passed:
+ submatchnodes = []
+ for r in rep.result:
+ # TODO: Remove parametrized workaround once collection structure contains
+ # parametrization.
+ if (
+ r.name == matchnames[0]
+ or r.name.split("[")[0] == matchnames[0]
+ ):
+ submatchnodes.append(r)
+ if submatchnodes:
+ work.append((submatchnodes, matchnames[1:]))
+ else:
+ # Report collection failures here to avoid failing to run some test
+ # specified in the command line because the module could not be
+ # imported (#134).
+ node.ihook.pytest_collectreport(report=rep)
+
+ self.trace("matchnodes finished -> ", len(matching), "nodes")
+ self.trace.root.indent -= 1
+
+ if not matching:
+ report_arg = "::".join((str(argpath), *names))
+ self._notfound.append((report_arg, col))
+ continue
+
+ # If __init__.py was the only file requested, then the matched
+ # node will be the corresponding Package (by default), and the
+ # first yielded item will be the __init__ Module itself, so
+ # just use that. If this special case isn't taken, then all the
+ # files in the package will be yielded.
+ if argpath.name == "__init__.py" and isinstance(matching[0], Package):
+ try:
+ yield next(iter(matching[0].collect()))
+ except StopIteration:
+ # The package collects nothing with only an __init__.py
+ # file in it, which gets ignored by the default
+ # "python_files" option.
+ pass
+ continue
+
+ yield from matching
+
+ self.trace.root.indent -= 1
+
+ def genitems(
+ self, node: Union[nodes.Item, nodes.Collector]
+ ) -> Iterator[nodes.Item]:
+ self.trace("genitems", node)
+ if isinstance(node, nodes.Item):
+ node.ihook.pytest_itemcollected(item=node)
+ yield node
+ else:
+ assert isinstance(node, nodes.Collector)
+ rep = collect_one_node(node)
+ if rep.passed:
+ for subnode in rep.result:
+ yield from self.genitems(subnode)
+ node.ihook.pytest_collectreport(report=rep)
+
+
+def search_pypath(module_name: str) -> str:
+ """Search sys.path for the given a dotted module name, and return its file system path."""
+ try:
+ spec = importlib.util.find_spec(module_name)
+ # AttributeError: looks like package module, but actually filename
+ # ImportError: module does not exist
+ # ValueError: not a module name
+ except (AttributeError, ImportError, ValueError):
+ return module_name
+ if spec is None or spec.origin is None or spec.origin == "namespace":
+ return module_name
+ elif spec.submodule_search_locations:
+ return os.path.dirname(spec.origin)
+ else:
+ return spec.origin
+
+
+def resolve_collection_argument(
+ invocation_path: Path, arg: str, *, as_pypath: bool = False
+) -> Tuple[Path, List[str]]:
+ """Parse path arguments optionally containing selection parts and return (fspath, names).
+
+ Command-line arguments can point to files and/or directories, and optionally contain
+ parts for specific tests selection, for example:
+
+ "pkg/tests/test_foo.py::TestClass::test_foo"
+
+ This function ensures the path exists, and returns a tuple:
+
+ (Path("/full/path/to/pkg/tests/test_foo.py"), ["TestClass", "test_foo"])
+
+ When as_pypath is True, expects that the command-line argument actually contains
+ module paths instead of file-system paths:
+
+ "pkg.tests.test_foo::TestClass::test_foo"
+
+ In which case we search sys.path for a matching module, and then return the *path* to the
+ found module.
+
+ If the path doesn't exist, raise UsageError.
+ If the path is a directory and selection parts are present, raise UsageError.
+ """
+ base, squacket, rest = str(arg).partition("[")
+ strpath, *parts = base.split("::")
+ if parts:
+ parts[-1] = f"{parts[-1]}{squacket}{rest}"
+ if as_pypath:
+ strpath = search_pypath(strpath)
+ fspath = invocation_path / strpath
+ fspath = absolutepath(fspath)
+ if not fspath.exists():
+ msg = (
+ "module or package not found: {arg} (missing __init__.py?)"
+ if as_pypath
+ else "file or directory not found: {arg}"
+ )
+ raise UsageError(msg.format(arg=arg))
+ if parts and fspath.is_dir():
+ msg = (
+ "package argument cannot contain :: selection parts: {arg}"
+ if as_pypath
+ else "directory argument cannot contain :: selection parts: {arg}"
+ )
+ raise UsageError(msg.format(arg=arg))
+ return fspath, parts
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/__init__.py
new file mode 100644
index 0000000000..7e082f2e6e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/__init__.py
@@ -0,0 +1,282 @@
+"""Generic mechanism for marking and selecting python functions."""
+import warnings
+from typing import AbstractSet
+from typing import Collection
+from typing import List
+from typing import Optional
+from typing import TYPE_CHECKING
+from typing import Union
+
+import attr
+
+from .expression import Expression
+from .expression import ParseError
+from .structures import EMPTY_PARAMETERSET_OPTION
+from .structures import get_empty_parameterset_mark
+from .structures import Mark
+from .structures import MARK_GEN
+from .structures import MarkDecorator
+from .structures import MarkGenerator
+from .structures import ParameterSet
+from _pytest.config import Config
+from _pytest.config import ExitCode
+from _pytest.config import hookimpl
+from _pytest.config import UsageError
+from _pytest.config.argparsing import Parser
+from _pytest.deprecated import MINUS_K_COLON
+from _pytest.deprecated import MINUS_K_DASH
+from _pytest.stash import StashKey
+
+if TYPE_CHECKING:
+ from _pytest.nodes import Item
+
+
+__all__ = [
+ "MARK_GEN",
+ "Mark",
+ "MarkDecorator",
+ "MarkGenerator",
+ "ParameterSet",
+ "get_empty_parameterset_mark",
+]
+
+
+old_mark_config_key = StashKey[Optional[Config]]()
+
+
+def param(
+ *values: object,
+ marks: Union[MarkDecorator, Collection[Union[MarkDecorator, Mark]]] = (),
+ id: Optional[str] = None,
+) -> ParameterSet:
+ """Specify a parameter in `pytest.mark.parametrize`_ calls or
+ :ref:`parametrized fixtures <fixture-parametrize-marks>`.
+
+ .. code-block:: python
+
+ @pytest.mark.parametrize(
+ "test_input,expected",
+ [
+ ("3+5", 8),
+ pytest.param("6*9", 42, marks=pytest.mark.xfail),
+ ],
+ )
+ def test_eval(test_input, expected):
+ assert eval(test_input) == expected
+
+ :param values: Variable args of the values of the parameter set, in order.
+ :keyword marks: A single mark or a list of marks to be applied to this parameter set.
+ :keyword str id: The id to attribute to this parameter set.
+ """
+ return ParameterSet.param(*values, marks=marks, id=id)
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("general")
+ group._addoption(
+ "-k",
+ action="store",
+ dest="keyword",
+ default="",
+ metavar="EXPRESSION",
+ help="only run tests which match the given substring expression. "
+ "An expression is a python evaluatable expression "
+ "where all names are substring-matched against test names "
+ "and their parent classes. Example: -k 'test_method or test_"
+ "other' matches all test functions and classes whose name "
+ "contains 'test_method' or 'test_other', while -k 'not test_method' "
+ "matches those that don't contain 'test_method' in their names. "
+ "-k 'not test_method and not test_other' will eliminate the matches. "
+ "Additionally keywords are matched to classes and functions "
+ "containing extra names in their 'extra_keyword_matches' set, "
+ "as well as functions which have names assigned directly to them. "
+ "The matching is case-insensitive.",
+ )
+
+ group._addoption(
+ "-m",
+ action="store",
+ dest="markexpr",
+ default="",
+ metavar="MARKEXPR",
+ help="only run tests matching given mark expression.\n"
+ "For example: -m 'mark1 and not mark2'.",
+ )
+
+ group.addoption(
+ "--markers",
+ action="store_true",
+ help="show markers (builtin, plugin and per-project ones).",
+ )
+
+ parser.addini("markers", "markers for test functions", "linelist")
+ parser.addini(EMPTY_PARAMETERSET_OPTION, "default marker for empty parametersets")
+
+
+@hookimpl(tryfirst=True)
+def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:
+ import _pytest.config
+
+ if config.option.markers:
+ config._do_configure()
+ tw = _pytest.config.create_terminal_writer(config)
+ for line in config.getini("markers"):
+ parts = line.split(":", 1)
+ name = parts[0]
+ rest = parts[1] if len(parts) == 2 else ""
+ tw.write("@pytest.mark.%s:" % name, bold=True)
+ tw.line(rest)
+ tw.line()
+ config._ensure_unconfigure()
+ return 0
+
+ return None
+
+
+@attr.s(slots=True, auto_attribs=True)
+class KeywordMatcher:
+ """A matcher for keywords.
+
+ Given a list of names, matches any substring of one of these names. The
+ string inclusion check is case-insensitive.
+
+ Will match on the name of colitem, including the names of its parents.
+ Only matches names of items which are either a :class:`Class` or a
+ :class:`Function`.
+
+ Additionally, matches on names in the 'extra_keyword_matches' set of
+ any item, as well as names directly assigned to test functions.
+ """
+
+ _names: AbstractSet[str]
+
+ @classmethod
+ def from_item(cls, item: "Item") -> "KeywordMatcher":
+ mapped_names = set()
+
+ # Add the names of the current item and any parent items.
+ import pytest
+
+ for node in item.listchain():
+ if not isinstance(node, pytest.Session):
+ mapped_names.add(node.name)
+
+ # Add the names added as extra keywords to current or parent items.
+ mapped_names.update(item.listextrakeywords())
+
+ # Add the names attached to the current function through direct assignment.
+ function_obj = getattr(item, "function", None)
+ if function_obj:
+ mapped_names.update(function_obj.__dict__)
+
+ # Add the markers to the keywords as we no longer handle them correctly.
+ mapped_names.update(mark.name for mark in item.iter_markers())
+
+ return cls(mapped_names)
+
+ def __call__(self, subname: str) -> bool:
+ subname = subname.lower()
+ names = (name.lower() for name in self._names)
+
+ for name in names:
+ if subname in name:
+ return True
+ return False
+
+
+def deselect_by_keyword(items: "List[Item]", config: Config) -> None:
+ keywordexpr = config.option.keyword.lstrip()
+ if not keywordexpr:
+ return
+
+ if keywordexpr.startswith("-"):
+ # To be removed in pytest 8.0.0.
+ warnings.warn(MINUS_K_DASH, stacklevel=2)
+ keywordexpr = "not " + keywordexpr[1:]
+ selectuntil = False
+ if keywordexpr[-1:] == ":":
+ # To be removed in pytest 8.0.0.
+ warnings.warn(MINUS_K_COLON, stacklevel=2)
+ selectuntil = True
+ keywordexpr = keywordexpr[:-1]
+
+ expr = _parse_expression(keywordexpr, "Wrong expression passed to '-k'")
+
+ remaining = []
+ deselected = []
+ for colitem in items:
+ if keywordexpr and not expr.evaluate(KeywordMatcher.from_item(colitem)):
+ deselected.append(colitem)
+ else:
+ if selectuntil:
+ keywordexpr = None
+ remaining.append(colitem)
+
+ if deselected:
+ config.hook.pytest_deselected(items=deselected)
+ items[:] = remaining
+
+
+@attr.s(slots=True, auto_attribs=True)
+class MarkMatcher:
+ """A matcher for markers which are present.
+
+ Tries to match on any marker names, attached to the given colitem.
+ """
+
+ own_mark_names: AbstractSet[str]
+
+ @classmethod
+ def from_item(cls, item: "Item") -> "MarkMatcher":
+ mark_names = {mark.name for mark in item.iter_markers()}
+ return cls(mark_names)
+
+ def __call__(self, name: str) -> bool:
+ return name in self.own_mark_names
+
+
+def deselect_by_mark(items: "List[Item]", config: Config) -> None:
+ matchexpr = config.option.markexpr
+ if not matchexpr:
+ return
+
+ expr = _parse_expression(matchexpr, "Wrong expression passed to '-m'")
+ remaining: List[Item] = []
+ deselected: List[Item] = []
+ for item in items:
+ if expr.evaluate(MarkMatcher.from_item(item)):
+ remaining.append(item)
+ else:
+ deselected.append(item)
+ if deselected:
+ config.hook.pytest_deselected(items=deselected)
+ items[:] = remaining
+
+
+def _parse_expression(expr: str, exc_message: str) -> Expression:
+ try:
+ return Expression.compile(expr)
+ except ParseError as e:
+ raise UsageError(f"{exc_message}: {expr}: {e}") from None
+
+
+def pytest_collection_modifyitems(items: "List[Item]", config: Config) -> None:
+ deselect_by_keyword(items, config)
+ deselect_by_mark(items, config)
+
+
+def pytest_configure(config: Config) -> None:
+ config.stash[old_mark_config_key] = MARK_GEN._config
+ MARK_GEN._config = config
+
+ empty_parameterset = config.getini(EMPTY_PARAMETERSET_OPTION)
+
+ if empty_parameterset not in ("skip", "xfail", "fail_at_collect", None, ""):
+ raise UsageError(
+ "{!s} must be one of skip, xfail or fail_at_collect"
+ " but it is {!r}".format(EMPTY_PARAMETERSET_OPTION, empty_parameterset)
+ )
+
+
+def pytest_unconfigure(config: Config) -> None:
+ MARK_GEN._config = config.stash.get(old_mark_config_key, None)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/expression.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/expression.py
new file mode 100644
index 0000000000..92220d7723
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/expression.py
@@ -0,0 +1,225 @@
+r"""Evaluate match expressions, as used by `-k` and `-m`.
+
+The grammar is:
+
+expression: expr? EOF
+expr: and_expr ('or' and_expr)*
+and_expr: not_expr ('and' not_expr)*
+not_expr: 'not' not_expr | '(' expr ')' | ident
+ident: (\w|:|\+|-|\.|\[|\]|\\|/)+
+
+The semantics are:
+
+- Empty expression evaluates to False.
+- ident evaluates to True of False according to a provided matcher function.
+- or/and/not evaluate according to the usual boolean semantics.
+"""
+import ast
+import enum
+import re
+import types
+from typing import Callable
+from typing import Iterator
+from typing import Mapping
+from typing import Optional
+from typing import Sequence
+from typing import TYPE_CHECKING
+
+import attr
+
+if TYPE_CHECKING:
+ from typing import NoReturn
+
+
+__all__ = [
+ "Expression",
+ "ParseError",
+]
+
+
+class TokenType(enum.Enum):
+ LPAREN = "left parenthesis"
+ RPAREN = "right parenthesis"
+ OR = "or"
+ AND = "and"
+ NOT = "not"
+ IDENT = "identifier"
+ EOF = "end of input"
+
+
+@attr.s(frozen=True, slots=True, auto_attribs=True)
+class Token:
+ type: TokenType
+ value: str
+ pos: int
+
+
+class ParseError(Exception):
+ """The expression contains invalid syntax.
+
+ :param column: The column in the line where the error occurred (1-based).
+ :param message: A description of the error.
+ """
+
+ def __init__(self, column: int, message: str) -> None:
+ self.column = column
+ self.message = message
+
+ def __str__(self) -> str:
+ return f"at column {self.column}: {self.message}"
+
+
+class Scanner:
+ __slots__ = ("tokens", "current")
+
+ def __init__(self, input: str) -> None:
+ self.tokens = self.lex(input)
+ self.current = next(self.tokens)
+
+ def lex(self, input: str) -> Iterator[Token]:
+ pos = 0
+ while pos < len(input):
+ if input[pos] in (" ", "\t"):
+ pos += 1
+ elif input[pos] == "(":
+ yield Token(TokenType.LPAREN, "(", pos)
+ pos += 1
+ elif input[pos] == ")":
+ yield Token(TokenType.RPAREN, ")", pos)
+ pos += 1
+ else:
+ match = re.match(r"(:?\w|:|\+|-|\.|\[|\]|\\|/)+", input[pos:])
+ if match:
+ value = match.group(0)
+ if value == "or":
+ yield Token(TokenType.OR, value, pos)
+ elif value == "and":
+ yield Token(TokenType.AND, value, pos)
+ elif value == "not":
+ yield Token(TokenType.NOT, value, pos)
+ else:
+ yield Token(TokenType.IDENT, value, pos)
+ pos += len(value)
+ else:
+ raise ParseError(
+ pos + 1,
+ f'unexpected character "{input[pos]}"',
+ )
+ yield Token(TokenType.EOF, "", pos)
+
+ def accept(self, type: TokenType, *, reject: bool = False) -> Optional[Token]:
+ if self.current.type is type:
+ token = self.current
+ if token.type is not TokenType.EOF:
+ self.current = next(self.tokens)
+ return token
+ if reject:
+ self.reject((type,))
+ return None
+
+ def reject(self, expected: Sequence[TokenType]) -> "NoReturn":
+ raise ParseError(
+ self.current.pos + 1,
+ "expected {}; got {}".format(
+ " OR ".join(type.value for type in expected),
+ self.current.type.value,
+ ),
+ )
+
+
+# True, False and None are legal match expression identifiers,
+# but illegal as Python identifiers. To fix this, this prefix
+# is added to identifiers in the conversion to Python AST.
+IDENT_PREFIX = "$"
+
+
+def expression(s: Scanner) -> ast.Expression:
+ if s.accept(TokenType.EOF):
+ ret: ast.expr = ast.NameConstant(False)
+ else:
+ ret = expr(s)
+ s.accept(TokenType.EOF, reject=True)
+ return ast.fix_missing_locations(ast.Expression(ret))
+
+
+def expr(s: Scanner) -> ast.expr:
+ ret = and_expr(s)
+ while s.accept(TokenType.OR):
+ rhs = and_expr(s)
+ ret = ast.BoolOp(ast.Or(), [ret, rhs])
+ return ret
+
+
+def and_expr(s: Scanner) -> ast.expr:
+ ret = not_expr(s)
+ while s.accept(TokenType.AND):
+ rhs = not_expr(s)
+ ret = ast.BoolOp(ast.And(), [ret, rhs])
+ return ret
+
+
+def not_expr(s: Scanner) -> ast.expr:
+ if s.accept(TokenType.NOT):
+ return ast.UnaryOp(ast.Not(), not_expr(s))
+ if s.accept(TokenType.LPAREN):
+ ret = expr(s)
+ s.accept(TokenType.RPAREN, reject=True)
+ return ret
+ ident = s.accept(TokenType.IDENT)
+ if ident:
+ return ast.Name(IDENT_PREFIX + ident.value, ast.Load())
+ s.reject((TokenType.NOT, TokenType.LPAREN, TokenType.IDENT))
+
+
+class MatcherAdapter(Mapping[str, bool]):
+ """Adapts a matcher function to a locals mapping as required by eval()."""
+
+ def __init__(self, matcher: Callable[[str], bool]) -> None:
+ self.matcher = matcher
+
+ def __getitem__(self, key: str) -> bool:
+ return self.matcher(key[len(IDENT_PREFIX) :])
+
+ def __iter__(self) -> Iterator[str]:
+ raise NotImplementedError()
+
+ def __len__(self) -> int:
+ raise NotImplementedError()
+
+
+class Expression:
+ """A compiled match expression as used by -k and -m.
+
+ The expression can be evaluated against different matchers.
+ """
+
+ __slots__ = ("code",)
+
+ def __init__(self, code: types.CodeType) -> None:
+ self.code = code
+
+ @classmethod
+ def compile(self, input: str) -> "Expression":
+ """Compile a match expression.
+
+ :param input: The input expression - one line.
+ """
+ astexpr = expression(Scanner(input))
+ code: types.CodeType = compile(
+ astexpr,
+ filename="<pytest match expression>",
+ mode="eval",
+ )
+ return Expression(code)
+
+ def evaluate(self, matcher: Callable[[str], bool]) -> bool:
+ """Evaluate the match expression.
+
+ :param matcher:
+ Given an identifier, should return whether it matches or not.
+ Should be prepared to handle arbitrary strings as input.
+
+ :returns: Whether the expression matches or not.
+ """
+ ret: bool = eval(self.code, {"__builtins__": {}}, MatcherAdapter(matcher))
+ return ret
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/structures.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/structures.py
new file mode 100644
index 0000000000..0e42cd8de5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/mark/structures.py
@@ -0,0 +1,595 @@
+import collections.abc
+import inspect
+import warnings
+from typing import Any
+from typing import Callable
+from typing import Collection
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import MutableMapping
+from typing import NamedTuple
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+import attr
+
+from .._code import getfslineno
+from ..compat import ascii_escaped
+from ..compat import final
+from ..compat import NOTSET
+from ..compat import NotSetType
+from _pytest.config import Config
+from _pytest.deprecated import check_ispytest
+from _pytest.outcomes import fail
+from _pytest.warning_types import PytestUnknownMarkWarning
+
+if TYPE_CHECKING:
+ from ..nodes import Node
+
+
+EMPTY_PARAMETERSET_OPTION = "empty_parameter_set_mark"
+
+
+def istestfunc(func) -> bool:
+ return callable(func) and getattr(func, "__name__", "<lambda>") != "<lambda>"
+
+
+def get_empty_parameterset_mark(
+ config: Config, argnames: Sequence[str], func
+) -> "MarkDecorator":
+ from ..nodes import Collector
+
+ fs, lineno = getfslineno(func)
+ reason = "got empty parameter set %r, function %s at %s:%d" % (
+ argnames,
+ func.__name__,
+ fs,
+ lineno,
+ )
+
+ requested_mark = config.getini(EMPTY_PARAMETERSET_OPTION)
+ if requested_mark in ("", None, "skip"):
+ mark = MARK_GEN.skip(reason=reason)
+ elif requested_mark == "xfail":
+ mark = MARK_GEN.xfail(reason=reason, run=False)
+ elif requested_mark == "fail_at_collect":
+ f_name = func.__name__
+ _, lineno = getfslineno(func)
+ raise Collector.CollectError(
+ "Empty parameter set in '%s' at line %d" % (f_name, lineno + 1)
+ )
+ else:
+ raise LookupError(requested_mark)
+ return mark
+
+
+class ParameterSet(
+ NamedTuple(
+ "ParameterSet",
+ [
+ ("values", Sequence[Union[object, NotSetType]]),
+ ("marks", Collection[Union["MarkDecorator", "Mark"]]),
+ ("id", Optional[str]),
+ ],
+ )
+):
+ @classmethod
+ def param(
+ cls,
+ *values: object,
+ marks: Union["MarkDecorator", Collection[Union["MarkDecorator", "Mark"]]] = (),
+ id: Optional[str] = None,
+ ) -> "ParameterSet":
+ if isinstance(marks, MarkDecorator):
+ marks = (marks,)
+ else:
+ assert isinstance(marks, collections.abc.Collection)
+
+ if id is not None:
+ if not isinstance(id, str):
+ raise TypeError(f"Expected id to be a string, got {type(id)}: {id!r}")
+ id = ascii_escaped(id)
+ return cls(values, marks, id)
+
+ @classmethod
+ def extract_from(
+ cls,
+ parameterset: Union["ParameterSet", Sequence[object], object],
+ force_tuple: bool = False,
+ ) -> "ParameterSet":
+ """Extract from an object or objects.
+
+ :param parameterset:
+ A legacy style parameterset that may or may not be a tuple,
+ and may or may not be wrapped into a mess of mark objects.
+
+ :param force_tuple:
+ Enforce tuple wrapping so single argument tuple values
+ don't get decomposed and break tests.
+ """
+
+ if isinstance(parameterset, cls):
+ return parameterset
+ if force_tuple:
+ return cls.param(parameterset)
+ else:
+ # TODO: Refactor to fix this type-ignore. Currently the following
+ # passes type-checking but crashes:
+ #
+ # @pytest.mark.parametrize(('x', 'y'), [1, 2])
+ # def test_foo(x, y): pass
+ return cls(parameterset, marks=[], id=None) # type: ignore[arg-type]
+
+ @staticmethod
+ def _parse_parametrize_args(
+ argnames: Union[str, List[str], Tuple[str, ...]],
+ argvalues: Iterable[Union["ParameterSet", Sequence[object], object]],
+ *args,
+ **kwargs,
+ ) -> Tuple[Union[List[str], Tuple[str, ...]], bool]:
+ if not isinstance(argnames, (tuple, list)):
+ argnames = [x.strip() for x in argnames.split(",") if x.strip()]
+ force_tuple = len(argnames) == 1
+ else:
+ force_tuple = False
+ return argnames, force_tuple
+
+ @staticmethod
+ def _parse_parametrize_parameters(
+ argvalues: Iterable[Union["ParameterSet", Sequence[object], object]],
+ force_tuple: bool,
+ ) -> List["ParameterSet"]:
+ return [
+ ParameterSet.extract_from(x, force_tuple=force_tuple) for x in argvalues
+ ]
+
+ @classmethod
+ def _for_parametrize(
+ cls,
+ argnames: Union[str, List[str], Tuple[str, ...]],
+ argvalues: Iterable[Union["ParameterSet", Sequence[object], object]],
+ func,
+ config: Config,
+ nodeid: str,
+ ) -> Tuple[Union[List[str], Tuple[str, ...]], List["ParameterSet"]]:
+ argnames, force_tuple = cls._parse_parametrize_args(argnames, argvalues)
+ parameters = cls._parse_parametrize_parameters(argvalues, force_tuple)
+ del argvalues
+
+ if parameters:
+ # Check all parameter sets have the correct number of values.
+ for param in parameters:
+ if len(param.values) != len(argnames):
+ msg = (
+ '{nodeid}: in "parametrize" the number of names ({names_len}):\n'
+ " {names}\n"
+ "must be equal to the number of values ({values_len}):\n"
+ " {values}"
+ )
+ fail(
+ msg.format(
+ nodeid=nodeid,
+ values=param.values,
+ names=argnames,
+ names_len=len(argnames),
+ values_len=len(param.values),
+ ),
+ pytrace=False,
+ )
+ else:
+ # Empty parameter set (likely computed at runtime): create a single
+ # parameter set with NOTSET values, with the "empty parameter set" mark applied to it.
+ mark = get_empty_parameterset_mark(config, argnames, func)
+ parameters.append(
+ ParameterSet(values=(NOTSET,) * len(argnames), marks=[mark], id=None)
+ )
+ return argnames, parameters
+
+
+@final
+@attr.s(frozen=True, init=False, auto_attribs=True)
+class Mark:
+ #: Name of the mark.
+ name: str
+ #: Positional arguments of the mark decorator.
+ args: Tuple[Any, ...]
+ #: Keyword arguments of the mark decorator.
+ kwargs: Mapping[str, Any]
+
+ #: Source Mark for ids with parametrize Marks.
+ _param_ids_from: Optional["Mark"] = attr.ib(default=None, repr=False)
+ #: Resolved/generated ids with parametrize Marks.
+ _param_ids_generated: Optional[Sequence[str]] = attr.ib(default=None, repr=False)
+
+ def __init__(
+ self,
+ name: str,
+ args: Tuple[Any, ...],
+ kwargs: Mapping[str, Any],
+ param_ids_from: Optional["Mark"] = None,
+ param_ids_generated: Optional[Sequence[str]] = None,
+ *,
+ _ispytest: bool = False,
+ ) -> None:
+ """:meta private:"""
+ check_ispytest(_ispytest)
+ # Weirdness to bypass frozen=True.
+ object.__setattr__(self, "name", name)
+ object.__setattr__(self, "args", args)
+ object.__setattr__(self, "kwargs", kwargs)
+ object.__setattr__(self, "_param_ids_from", param_ids_from)
+ object.__setattr__(self, "_param_ids_generated", param_ids_generated)
+
+ def _has_param_ids(self) -> bool:
+ return "ids" in self.kwargs or len(self.args) >= 4
+
+ def combined_with(self, other: "Mark") -> "Mark":
+ """Return a new Mark which is a combination of this
+ Mark and another Mark.
+
+ Combines by appending args and merging kwargs.
+
+ :param Mark other: The mark to combine with.
+ :rtype: Mark
+ """
+ assert self.name == other.name
+
+ # Remember source of ids with parametrize Marks.
+ param_ids_from: Optional[Mark] = None
+ if self.name == "parametrize":
+ if other._has_param_ids():
+ param_ids_from = other
+ elif self._has_param_ids():
+ param_ids_from = self
+
+ return Mark(
+ self.name,
+ self.args + other.args,
+ dict(self.kwargs, **other.kwargs),
+ param_ids_from=param_ids_from,
+ _ispytest=True,
+ )
+
+
+# A generic parameter designating an object to which a Mark may
+# be applied -- a test function (callable) or class.
+# Note: a lambda is not allowed, but this can't be represented.
+Markable = TypeVar("Markable", bound=Union[Callable[..., object], type])
+
+
+@attr.s(init=False, auto_attribs=True)
+class MarkDecorator:
+ """A decorator for applying a mark on test functions and classes.
+
+ ``MarkDecorators`` are created with ``pytest.mark``::
+
+ mark1 = pytest.mark.NAME # Simple MarkDecorator
+ mark2 = pytest.mark.NAME(name1=value) # Parametrized MarkDecorator
+
+ and can then be applied as decorators to test functions::
+
+ @mark2
+ def test_function():
+ pass
+
+ When a ``MarkDecorator`` is called, it does the following:
+
+ 1. If called with a single class as its only positional argument and no
+ additional keyword arguments, it attaches the mark to the class so it
+ gets applied automatically to all test cases found in that class.
+
+ 2. If called with a single function as its only positional argument and
+ no additional keyword arguments, it attaches the mark to the function,
+ containing all the arguments already stored internally in the
+ ``MarkDecorator``.
+
+ 3. When called in any other case, it returns a new ``MarkDecorator``
+ instance with the original ``MarkDecorator``'s content updated with
+ the arguments passed to this call.
+
+ Note: The rules above prevent a ``MarkDecorator`` from storing only a
+ single function or class reference as its positional argument with no
+ additional keyword or positional arguments. You can work around this by
+ using `with_args()`.
+ """
+
+ mark: Mark
+
+ def __init__(self, mark: Mark, *, _ispytest: bool = False) -> None:
+ """:meta private:"""
+ check_ispytest(_ispytest)
+ self.mark = mark
+
+ @property
+ def name(self) -> str:
+ """Alias for mark.name."""
+ return self.mark.name
+
+ @property
+ def args(self) -> Tuple[Any, ...]:
+ """Alias for mark.args."""
+ return self.mark.args
+
+ @property
+ def kwargs(self) -> Mapping[str, Any]:
+ """Alias for mark.kwargs."""
+ return self.mark.kwargs
+
+ @property
+ def markname(self) -> str:
+ """:meta private:"""
+ return self.name # for backward-compat (2.4.1 had this attr)
+
+ def with_args(self, *args: object, **kwargs: object) -> "MarkDecorator":
+ """Return a MarkDecorator with extra arguments added.
+
+ Unlike calling the MarkDecorator, with_args() can be used even
+ if the sole argument is a callable/class.
+ """
+ mark = Mark(self.name, args, kwargs, _ispytest=True)
+ return MarkDecorator(self.mark.combined_with(mark), _ispytest=True)
+
+ # Type ignored because the overloads overlap with an incompatible
+ # return type. Not much we can do about that. Thankfully mypy picks
+ # the first match so it works out even if we break the rules.
+ @overload
+ def __call__(self, arg: Markable) -> Markable: # type: ignore[misc]
+ pass
+
+ @overload
+ def __call__(self, *args: object, **kwargs: object) -> "MarkDecorator":
+ pass
+
+ def __call__(self, *args: object, **kwargs: object):
+ """Call the MarkDecorator."""
+ if args and not kwargs:
+ func = args[0]
+ is_class = inspect.isclass(func)
+ if len(args) == 1 and (istestfunc(func) or is_class):
+ store_mark(func, self.mark)
+ return func
+ return self.with_args(*args, **kwargs)
+
+
+def get_unpacked_marks(obj: object) -> Iterable[Mark]:
+ """Obtain the unpacked marks that are stored on an object."""
+ mark_list = getattr(obj, "pytestmark", [])
+ if not isinstance(mark_list, list):
+ mark_list = [mark_list]
+ return normalize_mark_list(mark_list)
+
+
+def normalize_mark_list(
+ mark_list: Iterable[Union[Mark, MarkDecorator]]
+) -> Iterable[Mark]:
+ """
+ Normalize an iterable of Mark or MarkDecorator objects into a list of marks
+ by retrieving the `mark` attribute on MarkDecorator instances.
+
+ :param mark_list: marks to normalize
+ :returns: A new list of the extracted Mark objects
+ """
+ for mark in mark_list:
+ mark_obj = getattr(mark, "mark", mark)
+ if not isinstance(mark_obj, Mark):
+ raise TypeError(f"got {repr(mark_obj)} instead of Mark")
+ yield mark_obj
+
+
+def store_mark(obj, mark: Mark) -> None:
+ """Store a Mark on an object.
+
+ This is used to implement the Mark declarations/decorators correctly.
+ """
+ assert isinstance(mark, Mark), mark
+ # Always reassign name to avoid updating pytestmark in a reference that
+ # was only borrowed.
+ obj.pytestmark = [*get_unpacked_marks(obj), mark]
+
+
+# Typing for builtin pytest marks. This is cheating; it gives builtin marks
+# special privilege, and breaks modularity. But practicality beats purity...
+if TYPE_CHECKING:
+ from _pytest.scope import _ScopeName
+
+ class _SkipMarkDecorator(MarkDecorator):
+ @overload # type: ignore[override,misc]
+ def __call__(self, arg: Markable) -> Markable:
+ ...
+
+ @overload
+ def __call__(self, reason: str = ...) -> "MarkDecorator":
+ ...
+
+ class _SkipifMarkDecorator(MarkDecorator):
+ def __call__( # type: ignore[override]
+ self,
+ condition: Union[str, bool] = ...,
+ *conditions: Union[str, bool],
+ reason: str = ...,
+ ) -> MarkDecorator:
+ ...
+
+ class _XfailMarkDecorator(MarkDecorator):
+ @overload # type: ignore[override,misc]
+ def __call__(self, arg: Markable) -> Markable:
+ ...
+
+ @overload
+ def __call__(
+ self,
+ condition: Union[str, bool] = ...,
+ *conditions: Union[str, bool],
+ reason: str = ...,
+ run: bool = ...,
+ raises: Union[Type[BaseException], Tuple[Type[BaseException], ...]] = ...,
+ strict: bool = ...,
+ ) -> MarkDecorator:
+ ...
+
+ class _ParametrizeMarkDecorator(MarkDecorator):
+ def __call__( # type: ignore[override]
+ self,
+ argnames: Union[str, List[str], Tuple[str, ...]],
+ argvalues: Iterable[Union[ParameterSet, Sequence[object], object]],
+ *,
+ indirect: Union[bool, Sequence[str]] = ...,
+ ids: Optional[
+ Union[
+ Iterable[Union[None, str, float, int, bool]],
+ Callable[[Any], Optional[object]],
+ ]
+ ] = ...,
+ scope: Optional[_ScopeName] = ...,
+ ) -> MarkDecorator:
+ ...
+
+ class _UsefixturesMarkDecorator(MarkDecorator):
+ def __call__(self, *fixtures: str) -> MarkDecorator: # type: ignore[override]
+ ...
+
+ class _FilterwarningsMarkDecorator(MarkDecorator):
+ def __call__(self, *filters: str) -> MarkDecorator: # type: ignore[override]
+ ...
+
+
+@final
+class MarkGenerator:
+ """Factory for :class:`MarkDecorator` objects - exposed as
+ a ``pytest.mark`` singleton instance.
+
+ Example::
+
+ import pytest
+
+ @pytest.mark.slowtest
+ def test_function():
+ pass
+
+ applies a 'slowtest' :class:`Mark` on ``test_function``.
+ """
+
+ # See TYPE_CHECKING above.
+ if TYPE_CHECKING:
+ skip: _SkipMarkDecorator
+ skipif: _SkipifMarkDecorator
+ xfail: _XfailMarkDecorator
+ parametrize: _ParametrizeMarkDecorator
+ usefixtures: _UsefixturesMarkDecorator
+ filterwarnings: _FilterwarningsMarkDecorator
+
+ def __init__(self, *, _ispytest: bool = False) -> None:
+ check_ispytest(_ispytest)
+ self._config: Optional[Config] = None
+ self._markers: Set[str] = set()
+
+ def __getattr__(self, name: str) -> MarkDecorator:
+ """Generate a new :class:`MarkDecorator` with the given name."""
+ if name[0] == "_":
+ raise AttributeError("Marker name must NOT start with underscore")
+
+ if self._config is not None:
+ # We store a set of markers as a performance optimisation - if a mark
+ # name is in the set we definitely know it, but a mark may be known and
+ # not in the set. We therefore start by updating the set!
+ if name not in self._markers:
+ for line in self._config.getini("markers"):
+ # example lines: "skipif(condition): skip the given test if..."
+ # or "hypothesis: tests which use Hypothesis", so to get the
+ # marker name we split on both `:` and `(`.
+ marker = line.split(":")[0].split("(")[0].strip()
+ self._markers.add(marker)
+
+ # If the name is not in the set of known marks after updating,
+ # then it really is time to issue a warning or an error.
+ if name not in self._markers:
+ if self._config.option.strict_markers or self._config.option.strict:
+ fail(
+ f"{name!r} not found in `markers` configuration option",
+ pytrace=False,
+ )
+
+ # Raise a specific error for common misspellings of "parametrize".
+ if name in ["parameterize", "parametrise", "parameterise"]:
+ __tracebackhide__ = True
+ fail(f"Unknown '{name}' mark, did you mean 'parametrize'?")
+
+ warnings.warn(
+ "Unknown pytest.mark.%s - is this a typo? You can register "
+ "custom marks to avoid this warning - for details, see "
+ "https://docs.pytest.org/en/stable/how-to/mark.html" % name,
+ PytestUnknownMarkWarning,
+ 2,
+ )
+
+ return MarkDecorator(Mark(name, (), {}, _ispytest=True), _ispytest=True)
+
+
+MARK_GEN = MarkGenerator(_ispytest=True)
+
+
+@final
+class NodeKeywords(MutableMapping[str, Any]):
+ __slots__ = ("node", "parent", "_markers")
+
+ def __init__(self, node: "Node") -> None:
+ self.node = node
+ self.parent = node.parent
+ self._markers = {node.name: True}
+
+ def __getitem__(self, key: str) -> Any:
+ try:
+ return self._markers[key]
+ except KeyError:
+ if self.parent is None:
+ raise
+ return self.parent.keywords[key]
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self._markers[key] = value
+
+ # Note: we could've avoided explicitly implementing some of the methods
+ # below and use the collections.abc fallback, but that would be slow.
+
+ def __contains__(self, key: object) -> bool:
+ return (
+ key in self._markers
+ or self.parent is not None
+ and key in self.parent.keywords
+ )
+
+ def update( # type: ignore[override]
+ self,
+ other: Union[Mapping[str, Any], Iterable[Tuple[str, Any]]] = (),
+ **kwds: Any,
+ ) -> None:
+ self._markers.update(other)
+ self._markers.update(kwds)
+
+ def __delitem__(self, key: str) -> None:
+ raise ValueError("cannot delete key in keywords dict")
+
+ def __iter__(self) -> Iterator[str]:
+ # Doesn't need to be fast.
+ yield from self._markers
+ if self.parent is not None:
+ for keyword in self.parent.keywords:
+ # self._marks and self.parent.keywords can have duplicates.
+ if keyword not in self._markers:
+ yield keyword
+
+ def __len__(self) -> int:
+ # Doesn't need to be fast.
+ return sum(1 for keyword in self)
+
+ def __repr__(self) -> str:
+ return f"<NodeKeywords for node {self.node}>"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/monkeypatch.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/monkeypatch.py
new file mode 100644
index 0000000000..31f95a95ab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/monkeypatch.py
@@ -0,0 +1,383 @@
+"""Monkeypatching and mocking functionality."""
+import os
+import re
+import sys
+import warnings
+from contextlib import contextmanager
+from typing import Any
+from typing import Generator
+from typing import List
+from typing import MutableMapping
+from typing import Optional
+from typing import overload
+from typing import Tuple
+from typing import TypeVar
+from typing import Union
+
+from _pytest.compat import final
+from _pytest.fixtures import fixture
+from _pytest.warning_types import PytestWarning
+
+RE_IMPORT_ERROR_NAME = re.compile(r"^No module named (.*)$")
+
+
+K = TypeVar("K")
+V = TypeVar("V")
+
+
+@fixture
+def monkeypatch() -> Generator["MonkeyPatch", None, None]:
+ """A convenient fixture for monkey-patching.
+
+ The fixture provides these methods to modify objects, dictionaries or
+ os.environ::
+
+ monkeypatch.setattr(obj, name, value, raising=True)
+ monkeypatch.delattr(obj, name, raising=True)
+ monkeypatch.setitem(mapping, name, value)
+ monkeypatch.delitem(obj, name, raising=True)
+ monkeypatch.setenv(name, value, prepend=None)
+ monkeypatch.delenv(name, raising=True)
+ monkeypatch.syspath_prepend(path)
+ monkeypatch.chdir(path)
+
+ All modifications will be undone after the requesting test function or
+ fixture has finished. The ``raising`` parameter determines if a KeyError
+ or AttributeError will be raised if the set/deletion operation has no target.
+ """
+ mpatch = MonkeyPatch()
+ yield mpatch
+ mpatch.undo()
+
+
+def resolve(name: str) -> object:
+ # Simplified from zope.dottedname.
+ parts = name.split(".")
+
+ used = parts.pop(0)
+ found = __import__(used)
+ for part in parts:
+ used += "." + part
+ try:
+ found = getattr(found, part)
+ except AttributeError:
+ pass
+ else:
+ continue
+ # We use explicit un-nesting of the handling block in order
+ # to avoid nested exceptions.
+ try:
+ __import__(used)
+ except ImportError as ex:
+ expected = str(ex).split()[-1]
+ if expected == used:
+ raise
+ else:
+ raise ImportError(f"import error in {used}: {ex}") from ex
+ found = annotated_getattr(found, part, used)
+ return found
+
+
+def annotated_getattr(obj: object, name: str, ann: str) -> object:
+ try:
+ obj = getattr(obj, name)
+ except AttributeError as e:
+ raise AttributeError(
+ "{!r} object at {} has no attribute {!r}".format(
+ type(obj).__name__, ann, name
+ )
+ ) from e
+ return obj
+
+
+def derive_importpath(import_path: str, raising: bool) -> Tuple[str, object]:
+ if not isinstance(import_path, str) or "." not in import_path:
+ raise TypeError(f"must be absolute import path string, not {import_path!r}")
+ module, attr = import_path.rsplit(".", 1)
+ target = resolve(module)
+ if raising:
+ annotated_getattr(target, attr, ann=module)
+ return attr, target
+
+
+class Notset:
+ def __repr__(self) -> str:
+ return "<notset>"
+
+
+notset = Notset()
+
+
+@final
+class MonkeyPatch:
+ """Helper to conveniently monkeypatch attributes/items/environment
+ variables/syspath.
+
+ Returned by the :fixture:`monkeypatch` fixture.
+
+ :versionchanged:: 6.2
+ Can now also be used directly as `pytest.MonkeyPatch()`, for when
+ the fixture is not available. In this case, use
+ :meth:`with MonkeyPatch.context() as mp: <context>` or remember to call
+ :meth:`undo` explicitly.
+ """
+
+ def __init__(self) -> None:
+ self._setattr: List[Tuple[object, str, object]] = []
+ self._setitem: List[Tuple[MutableMapping[Any, Any], object, object]] = []
+ self._cwd: Optional[str] = None
+ self._savesyspath: Optional[List[str]] = None
+
+ @classmethod
+ @contextmanager
+ def context(cls) -> Generator["MonkeyPatch", None, None]:
+ """Context manager that returns a new :class:`MonkeyPatch` object
+ which undoes any patching done inside the ``with`` block upon exit.
+
+ Example:
+
+ .. code-block:: python
+
+ import functools
+
+
+ def test_partial(monkeypatch):
+ with monkeypatch.context() as m:
+ m.setattr(functools, "partial", 3)
+
+ Useful in situations where it is desired to undo some patches before the test ends,
+ such as mocking ``stdlib`` functions that might break pytest itself if mocked (for examples
+ of this see :issue:`3290`).
+ """
+ m = cls()
+ try:
+ yield m
+ finally:
+ m.undo()
+
+ @overload
+ def setattr(
+ self,
+ target: str,
+ name: object,
+ value: Notset = ...,
+ raising: bool = ...,
+ ) -> None:
+ ...
+
+ @overload
+ def setattr(
+ self,
+ target: object,
+ name: str,
+ value: object,
+ raising: bool = ...,
+ ) -> None:
+ ...
+
+ def setattr(
+ self,
+ target: Union[str, object],
+ name: Union[object, str],
+ value: object = notset,
+ raising: bool = True,
+ ) -> None:
+ """Set attribute value on target, memorizing the old value.
+
+ For convenience you can specify a string as ``target`` which
+ will be interpreted as a dotted import path, with the last part
+ being the attribute name. For example,
+ ``monkeypatch.setattr("os.getcwd", lambda: "/")``
+ would set the ``getcwd`` function of the ``os`` module.
+
+ Raises AttributeError if the attribute does not exist, unless
+ ``raising`` is set to False.
+ """
+ __tracebackhide__ = True
+ import inspect
+
+ if isinstance(value, Notset):
+ if not isinstance(target, str):
+ raise TypeError(
+ "use setattr(target, name, value) or "
+ "setattr(target, value) with target being a dotted "
+ "import string"
+ )
+ value = name
+ name, target = derive_importpath(target, raising)
+ else:
+ if not isinstance(name, str):
+ raise TypeError(
+ "use setattr(target, name, value) with name being a string or "
+ "setattr(target, value) with target being a dotted "
+ "import string"
+ )
+
+ oldval = getattr(target, name, notset)
+ if raising and oldval is notset:
+ raise AttributeError(f"{target!r} has no attribute {name!r}")
+
+ # avoid class descriptors like staticmethod/classmethod
+ if inspect.isclass(target):
+ oldval = target.__dict__.get(name, notset)
+ self._setattr.append((target, name, oldval))
+ setattr(target, name, value)
+
+ def delattr(
+ self,
+ target: Union[object, str],
+ name: Union[str, Notset] = notset,
+ raising: bool = True,
+ ) -> None:
+ """Delete attribute ``name`` from ``target``.
+
+ If no ``name`` is specified and ``target`` is a string
+ it will be interpreted as a dotted import path with the
+ last part being the attribute name.
+
+ Raises AttributeError it the attribute does not exist, unless
+ ``raising`` is set to False.
+ """
+ __tracebackhide__ = True
+ import inspect
+
+ if isinstance(name, Notset):
+ if not isinstance(target, str):
+ raise TypeError(
+ "use delattr(target, name) or "
+ "delattr(target) with target being a dotted "
+ "import string"
+ )
+ name, target = derive_importpath(target, raising)
+
+ if not hasattr(target, name):
+ if raising:
+ raise AttributeError(name)
+ else:
+ oldval = getattr(target, name, notset)
+ # Avoid class descriptors like staticmethod/classmethod.
+ if inspect.isclass(target):
+ oldval = target.__dict__.get(name, notset)
+ self._setattr.append((target, name, oldval))
+ delattr(target, name)
+
+ def setitem(self, dic: MutableMapping[K, V], name: K, value: V) -> None:
+ """Set dictionary entry ``name`` to value."""
+ self._setitem.append((dic, name, dic.get(name, notset)))
+ dic[name] = value
+
+ def delitem(self, dic: MutableMapping[K, V], name: K, raising: bool = True) -> None:
+ """Delete ``name`` from dict.
+
+ Raises ``KeyError`` if it doesn't exist, unless ``raising`` is set to
+ False.
+ """
+ if name not in dic:
+ if raising:
+ raise KeyError(name)
+ else:
+ self._setitem.append((dic, name, dic.get(name, notset)))
+ del dic[name]
+
+ def setenv(self, name: str, value: str, prepend: Optional[str] = None) -> None:
+ """Set environment variable ``name`` to ``value``.
+
+ If ``prepend`` is a character, read the current environment variable
+ value and prepend the ``value`` adjoined with the ``prepend``
+ character.
+ """
+ if not isinstance(value, str):
+ warnings.warn( # type: ignore[unreachable]
+ PytestWarning(
+ "Value of environment variable {name} type should be str, but got "
+ "{value!r} (type: {type}); converted to str implicitly".format(
+ name=name, value=value, type=type(value).__name__
+ )
+ ),
+ stacklevel=2,
+ )
+ value = str(value)
+ if prepend and name in os.environ:
+ value = value + prepend + os.environ[name]
+ self.setitem(os.environ, name, value)
+
+ def delenv(self, name: str, raising: bool = True) -> None:
+ """Delete ``name`` from the environment.
+
+ Raises ``KeyError`` if it does not exist, unless ``raising`` is set to
+ False.
+ """
+ environ: MutableMapping[str, str] = os.environ
+ self.delitem(environ, name, raising=raising)
+
+ def syspath_prepend(self, path) -> None:
+ """Prepend ``path`` to ``sys.path`` list of import locations."""
+
+ if self._savesyspath is None:
+ self._savesyspath = sys.path[:]
+ sys.path.insert(0, str(path))
+
+ # https://github.com/pypa/setuptools/blob/d8b901bc/docs/pkg_resources.txt#L162-L171
+ # this is only needed when pkg_resources was already loaded by the namespace package
+ if "pkg_resources" in sys.modules:
+ from pkg_resources import fixup_namespace_packages
+
+ fixup_namespace_packages(str(path))
+
+ # A call to syspathinsert() usually means that the caller wants to
+ # import some dynamically created files, thus with python3 we
+ # invalidate its import caches.
+ # This is especially important when any namespace package is in use,
+ # since then the mtime based FileFinder cache (that gets created in
+ # this case already) gets not invalidated when writing the new files
+ # quickly afterwards.
+ from importlib import invalidate_caches
+
+ invalidate_caches()
+
+ def chdir(self, path: Union[str, "os.PathLike[str]"]) -> None:
+ """Change the current working directory to the specified path.
+
+ Path can be a string or a path object.
+ """
+ if self._cwd is None:
+ self._cwd = os.getcwd()
+ os.chdir(path)
+
+ def undo(self) -> None:
+ """Undo previous changes.
+
+ This call consumes the undo stack. Calling it a second time has no
+ effect unless you do more monkeypatching after the undo call.
+
+ There is generally no need to call `undo()`, since it is
+ called automatically during tear-down.
+
+ Note that the same `monkeypatch` fixture is used across a
+ single test function invocation. If `monkeypatch` is used both by
+ the test function itself and one of the test fixtures,
+ calling `undo()` will undo all of the changes made in
+ both functions.
+ """
+ for obj, name, value in reversed(self._setattr):
+ if value is not notset:
+ setattr(obj, name, value)
+ else:
+ delattr(obj, name)
+ self._setattr[:] = []
+ for dictionary, key, value in reversed(self._setitem):
+ if value is notset:
+ try:
+ del dictionary[key]
+ except KeyError:
+ pass # Was already deleted, so we have the desired state.
+ else:
+ dictionary[key] = value
+ self._setitem[:] = []
+ if self._savesyspath is not None:
+ sys.path[:] = self._savesyspath
+ self._savesyspath = None
+
+ if self._cwd is not None:
+ os.chdir(self._cwd)
+ self._cwd = None
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/nodes.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/nodes.py
new file mode 100644
index 0000000000..e49c1b003e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/nodes.py
@@ -0,0 +1,762 @@
+import os
+import warnings
+from inspect import signature
+from pathlib import Path
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import MutableMapping
+from typing import Optional
+from typing import overload
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+import _pytest._code
+from _pytest._code import getfslineno
+from _pytest._code.code import ExceptionInfo
+from _pytest._code.code import TerminalRepr
+from _pytest.compat import cached_property
+from _pytest.compat import LEGACY_PATH
+from _pytest.config import Config
+from _pytest.config import ConftestImportFailure
+from _pytest.deprecated import FSCOLLECTOR_GETHOOKPROXY_ISINITPATH
+from _pytest.deprecated import NODE_CTOR_FSPATH_ARG
+from _pytest.mark.structures import Mark
+from _pytest.mark.structures import MarkDecorator
+from _pytest.mark.structures import NodeKeywords
+from _pytest.outcomes import fail
+from _pytest.pathlib import absolutepath
+from _pytest.pathlib import commonpath
+from _pytest.stash import Stash
+from _pytest.warning_types import PytestWarning
+
+if TYPE_CHECKING:
+ # Imported here due to circular import.
+ from _pytest.main import Session
+ from _pytest._code.code import _TracebackStyle
+
+
+SEP = "/"
+
+tracebackcutdir = Path(_pytest.__file__).parent
+
+
+def iterparentnodeids(nodeid: str) -> Iterator[str]:
+ """Return the parent node IDs of a given node ID, inclusive.
+
+ For the node ID
+
+ "testing/code/test_excinfo.py::TestFormattedExcinfo::test_repr_source"
+
+ the result would be
+
+ ""
+ "testing"
+ "testing/code"
+ "testing/code/test_excinfo.py"
+ "testing/code/test_excinfo.py::TestFormattedExcinfo"
+ "testing/code/test_excinfo.py::TestFormattedExcinfo::test_repr_source"
+
+ Note that / components are only considered until the first ::.
+ """
+ pos = 0
+ first_colons: Optional[int] = nodeid.find("::")
+ if first_colons == -1:
+ first_colons = None
+ # The root Session node - always present.
+ yield ""
+ # Eagerly consume SEP parts until first colons.
+ while True:
+ at = nodeid.find(SEP, pos, first_colons)
+ if at == -1:
+ break
+ if at > 0:
+ yield nodeid[:at]
+ pos = at + len(SEP)
+ # Eagerly consume :: parts.
+ while True:
+ at = nodeid.find("::", pos)
+ if at == -1:
+ break
+ if at > 0:
+ yield nodeid[:at]
+ pos = at + len("::")
+ # The node ID itself.
+ if nodeid:
+ yield nodeid
+
+
+def _check_path(path: Path, fspath: LEGACY_PATH) -> None:
+ if Path(fspath) != path:
+ raise ValueError(
+ f"Path({fspath!r}) != {path!r}\n"
+ "if both path and fspath are given they need to be equal"
+ )
+
+
+def _imply_path(
+ node_type: Type["Node"],
+ path: Optional[Path],
+ fspath: Optional[LEGACY_PATH],
+) -> Path:
+ if fspath is not None:
+ warnings.warn(
+ NODE_CTOR_FSPATH_ARG.format(
+ node_type_name=node_type.__name__,
+ ),
+ stacklevel=3,
+ )
+ if path is not None:
+ if fspath is not None:
+ _check_path(path, fspath)
+ return path
+ else:
+ assert fspath is not None
+ return Path(fspath)
+
+
+_NodeType = TypeVar("_NodeType", bound="Node")
+
+
+class NodeMeta(type):
+ def __call__(self, *k, **kw):
+ msg = (
+ "Direct construction of {name} has been deprecated, please use {name}.from_parent.\n"
+ "See "
+ "https://docs.pytest.org/en/stable/deprecations.html#node-construction-changed-to-node-from-parent"
+ " for more details."
+ ).format(name=f"{self.__module__}.{self.__name__}")
+ fail(msg, pytrace=False)
+
+ def _create(self, *k, **kw):
+ try:
+ return super().__call__(*k, **kw)
+ except TypeError:
+ sig = signature(getattr(self, "__init__"))
+ known_kw = {k: v for k, v in kw.items() if k in sig.parameters}
+ from .warning_types import PytestDeprecationWarning
+
+ warnings.warn(
+ PytestDeprecationWarning(
+ f"{self} is not using a cooperative constructor and only takes {set(known_kw)}.\n"
+ "See https://docs.pytest.org/en/stable/deprecations.html"
+ "#constructors-of-custom-pytest-node-subclasses-should-take-kwargs "
+ "for more details."
+ )
+ )
+
+ return super().__call__(*k, **known_kw)
+
+
+class Node(metaclass=NodeMeta):
+ """Base class for Collector and Item, the components of the test
+ collection tree.
+
+ Collector subclasses have children; Items are leaf nodes.
+ """
+
+ # Implemented in the legacypath plugin.
+ #: A ``LEGACY_PATH`` copy of the :attr:`path` attribute. Intended for usage
+ #: for methods not migrated to ``pathlib.Path`` yet, such as
+ #: :meth:`Item.reportinfo`. Will be deprecated in a future release, prefer
+ #: using :attr:`path` instead.
+ fspath: LEGACY_PATH
+
+ # Use __slots__ to make attribute access faster.
+ # Note that __dict__ is still available.
+ __slots__ = (
+ "name",
+ "parent",
+ "config",
+ "session",
+ "path",
+ "_nodeid",
+ "_store",
+ "__dict__",
+ )
+
+ def __init__(
+ self,
+ name: str,
+ parent: "Optional[Node]" = None,
+ config: Optional[Config] = None,
+ session: "Optional[Session]" = None,
+ fspath: Optional[LEGACY_PATH] = None,
+ path: Optional[Path] = None,
+ nodeid: Optional[str] = None,
+ ) -> None:
+ #: A unique name within the scope of the parent node.
+ self.name = name
+
+ #: The parent collector node.
+ self.parent = parent
+
+ if config:
+ #: The pytest config object.
+ self.config: Config = config
+ else:
+ if not parent:
+ raise TypeError("config or parent must be provided")
+ self.config = parent.config
+
+ if session:
+ #: The pytest session this node is part of.
+ self.session = session
+ else:
+ if not parent:
+ raise TypeError("session or parent must be provided")
+ self.session = parent.session
+
+ if path is None and fspath is None:
+ path = getattr(parent, "path", None)
+ #: Filesystem path where this node was collected from (can be None).
+ self.path: Path = _imply_path(type(self), path, fspath=fspath)
+
+ # The explicit annotation is to avoid publicly exposing NodeKeywords.
+ #: Keywords/markers collected from all scopes.
+ self.keywords: MutableMapping[str, Any] = NodeKeywords(self)
+
+ #: The marker objects belonging to this node.
+ self.own_markers: List[Mark] = []
+
+ #: Allow adding of extra keywords to use for matching.
+ self.extra_keyword_matches: Set[str] = set()
+
+ if nodeid is not None:
+ assert "::()" not in nodeid
+ self._nodeid = nodeid
+ else:
+ if not self.parent:
+ raise TypeError("nodeid or parent must be provided")
+ self._nodeid = self.parent.nodeid + "::" + self.name
+
+ #: A place where plugins can store information on the node for their
+ #: own use.
+ #:
+ #: :type: Stash
+ self.stash = Stash()
+ # Deprecated alias. Was never public. Can be removed in a few releases.
+ self._store = self.stash
+
+ @classmethod
+ def from_parent(cls, parent: "Node", **kw):
+ """Public constructor for Nodes.
+
+ This indirection got introduced in order to enable removing
+ the fragile logic from the node constructors.
+
+ Subclasses can use ``super().from_parent(...)`` when overriding the
+ construction.
+
+ :param parent: The parent node of this Node.
+ """
+ if "config" in kw:
+ raise TypeError("config is not a valid argument for from_parent")
+ if "session" in kw:
+ raise TypeError("session is not a valid argument for from_parent")
+ return cls._create(parent=parent, **kw)
+
+ @property
+ def ihook(self):
+ """fspath-sensitive hook proxy used to call pytest hooks."""
+ return self.session.gethookproxy(self.path)
+
+ def __repr__(self) -> str:
+ return "<{} {}>".format(self.__class__.__name__, getattr(self, "name", None))
+
+ def warn(self, warning: Warning) -> None:
+ """Issue a warning for this Node.
+
+ Warnings will be displayed after the test session, unless explicitly suppressed.
+
+ :param Warning warning:
+ The warning instance to issue.
+
+ :raises ValueError: If ``warning`` instance is not a subclass of Warning.
+
+ Example usage:
+
+ .. code-block:: python
+
+ node.warn(PytestWarning("some message"))
+ node.warn(UserWarning("some message"))
+
+ .. versionchanged:: 6.2
+ Any subclass of :class:`Warning` is now accepted, rather than only
+ :class:`PytestWarning <pytest.PytestWarning>` subclasses.
+ """
+ # enforce type checks here to avoid getting a generic type error later otherwise.
+ if not isinstance(warning, Warning):
+ raise ValueError(
+ "warning must be an instance of Warning or subclass, got {!r}".format(
+ warning
+ )
+ )
+ path, lineno = get_fslocation_from_item(self)
+ assert lineno is not None
+ warnings.warn_explicit(
+ warning,
+ category=None,
+ filename=str(path),
+ lineno=lineno + 1,
+ )
+
+ # Methods for ordering nodes.
+
+ @property
+ def nodeid(self) -> str:
+ """A ::-separated string denoting its collection tree address."""
+ return self._nodeid
+
+ def __hash__(self) -> int:
+ return hash(self._nodeid)
+
+ def setup(self) -> None:
+ pass
+
+ def teardown(self) -> None:
+ pass
+
+ def listchain(self) -> List["Node"]:
+ """Return list of all parent collectors up to self, starting from
+ the root of collection tree."""
+ chain = []
+ item: Optional[Node] = self
+ while item is not None:
+ chain.append(item)
+ item = item.parent
+ chain.reverse()
+ return chain
+
+ def add_marker(
+ self, marker: Union[str, MarkDecorator], append: bool = True
+ ) -> None:
+ """Dynamically add a marker object to the node.
+
+ :param append:
+ Whether to append the marker, or prepend it.
+ """
+ from _pytest.mark import MARK_GEN
+
+ if isinstance(marker, MarkDecorator):
+ marker_ = marker
+ elif isinstance(marker, str):
+ marker_ = getattr(MARK_GEN, marker)
+ else:
+ raise ValueError("is not a string or pytest.mark.* Marker")
+ self.keywords[marker_.name] = marker_
+ if append:
+ self.own_markers.append(marker_.mark)
+ else:
+ self.own_markers.insert(0, marker_.mark)
+
+ def iter_markers(self, name: Optional[str] = None) -> Iterator[Mark]:
+ """Iterate over all markers of the node.
+
+ :param name: If given, filter the results by the name attribute.
+ """
+ return (x[1] for x in self.iter_markers_with_node(name=name))
+
+ def iter_markers_with_node(
+ self, name: Optional[str] = None
+ ) -> Iterator[Tuple["Node", Mark]]:
+ """Iterate over all markers of the node.
+
+ :param name: If given, filter the results by the name attribute.
+ :returns: An iterator of (node, mark) tuples.
+ """
+ for node in reversed(self.listchain()):
+ for mark in node.own_markers:
+ if name is None or getattr(mark, "name", None) == name:
+ yield node, mark
+
+ @overload
+ def get_closest_marker(self, name: str) -> Optional[Mark]:
+ ...
+
+ @overload
+ def get_closest_marker(self, name: str, default: Mark) -> Mark:
+ ...
+
+ def get_closest_marker(
+ self, name: str, default: Optional[Mark] = None
+ ) -> Optional[Mark]:
+ """Return the first marker matching the name, from closest (for
+ example function) to farther level (for example module level).
+
+ :param default: Fallback return value if no marker was found.
+ :param name: Name to filter by.
+ """
+ return next(self.iter_markers(name=name), default)
+
+ def listextrakeywords(self) -> Set[str]:
+ """Return a set of all extra keywords in self and any parents."""
+ extra_keywords: Set[str] = set()
+ for item in self.listchain():
+ extra_keywords.update(item.extra_keyword_matches)
+ return extra_keywords
+
+ def listnames(self) -> List[str]:
+ return [x.name for x in self.listchain()]
+
+ def addfinalizer(self, fin: Callable[[], object]) -> None:
+ """Register a function to be called when this node is finalized.
+
+ This method can only be called when this node is active
+ in a setup chain, for example during self.setup().
+ """
+ self.session._setupstate.addfinalizer(fin, self)
+
+ def getparent(self, cls: Type[_NodeType]) -> Optional[_NodeType]:
+ """Get the next parent node (including self) which is an instance of
+ the given class."""
+ current: Optional[Node] = self
+ while current and not isinstance(current, cls):
+ current = current.parent
+ assert current is None or isinstance(current, cls)
+ return current
+
+ def _prunetraceback(self, excinfo: ExceptionInfo[BaseException]) -> None:
+ pass
+
+ def _repr_failure_py(
+ self,
+ excinfo: ExceptionInfo[BaseException],
+ style: "Optional[_TracebackStyle]" = None,
+ ) -> TerminalRepr:
+ from _pytest.fixtures import FixtureLookupError
+
+ if isinstance(excinfo.value, ConftestImportFailure):
+ excinfo = ExceptionInfo.from_exc_info(excinfo.value.excinfo)
+ if isinstance(excinfo.value, fail.Exception):
+ if not excinfo.value.pytrace:
+ style = "value"
+ if isinstance(excinfo.value, FixtureLookupError):
+ return excinfo.value.formatrepr()
+ if self.config.getoption("fulltrace", False):
+ style = "long"
+ else:
+ tb = _pytest._code.Traceback([excinfo.traceback[-1]])
+ self._prunetraceback(excinfo)
+ if len(excinfo.traceback) == 0:
+ excinfo.traceback = tb
+ if style == "auto":
+ style = "long"
+ # XXX should excinfo.getrepr record all data and toterminal() process it?
+ if style is None:
+ if self.config.getoption("tbstyle", "auto") == "short":
+ style = "short"
+ else:
+ style = "long"
+
+ if self.config.getoption("verbose", 0) > 1:
+ truncate_locals = False
+ else:
+ truncate_locals = True
+
+ # excinfo.getrepr() formats paths relative to the CWD if `abspath` is False.
+ # It is possible for a fixture/test to change the CWD while this code runs, which
+ # would then result in the user seeing confusing paths in the failure message.
+ # To fix this, if the CWD changed, always display the full absolute path.
+ # It will be better to just always display paths relative to invocation_dir, but
+ # this requires a lot of plumbing (#6428).
+ try:
+ abspath = Path(os.getcwd()) != self.config.invocation_params.dir
+ except OSError:
+ abspath = True
+
+ return excinfo.getrepr(
+ funcargs=True,
+ abspath=abspath,
+ showlocals=self.config.getoption("showlocals", False),
+ style=style,
+ tbfilter=False, # pruned already, or in --fulltrace mode.
+ truncate_locals=truncate_locals,
+ )
+
+ def repr_failure(
+ self,
+ excinfo: ExceptionInfo[BaseException],
+ style: "Optional[_TracebackStyle]" = None,
+ ) -> Union[str, TerminalRepr]:
+ """Return a representation of a collection or test failure.
+
+ .. seealso:: :ref:`non-python tests`
+
+ :param excinfo: Exception information for the failure.
+ """
+ return self._repr_failure_py(excinfo, style)
+
+
+def get_fslocation_from_item(node: "Node") -> Tuple[Union[str, Path], Optional[int]]:
+ """Try to extract the actual location from a node, depending on available attributes:
+
+ * "location": a pair (path, lineno)
+ * "obj": a Python object that the node wraps.
+ * "fspath": just a path
+
+ :rtype: A tuple of (str|Path, int) with filename and line number.
+ """
+ # See Item.location.
+ location: Optional[Tuple[str, Optional[int], str]] = getattr(node, "location", None)
+ if location is not None:
+ return location[:2]
+ obj = getattr(node, "obj", None)
+ if obj is not None:
+ return getfslineno(obj)
+ return getattr(node, "fspath", "unknown location"), -1
+
+
+class Collector(Node):
+ """Collector instances create children through collect() and thus
+ iteratively build a tree."""
+
+ class CollectError(Exception):
+ """An error during collection, contains a custom message."""
+
+ def collect(self) -> Iterable[Union["Item", "Collector"]]:
+ """Return a list of children (items and collectors) for this
+ collection node."""
+ raise NotImplementedError("abstract")
+
+ # TODO: This omits the style= parameter which breaks Liskov Substitution.
+ def repr_failure( # type: ignore[override]
+ self, excinfo: ExceptionInfo[BaseException]
+ ) -> Union[str, TerminalRepr]:
+ """Return a representation of a collection failure.
+
+ :param excinfo: Exception information for the failure.
+ """
+ if isinstance(excinfo.value, self.CollectError) and not self.config.getoption(
+ "fulltrace", False
+ ):
+ exc = excinfo.value
+ return str(exc.args[0])
+
+ # Respect explicit tbstyle option, but default to "short"
+ # (_repr_failure_py uses "long" with "fulltrace" option always).
+ tbstyle = self.config.getoption("tbstyle", "auto")
+ if tbstyle == "auto":
+ tbstyle = "short"
+
+ return self._repr_failure_py(excinfo, style=tbstyle)
+
+ def _prunetraceback(self, excinfo: ExceptionInfo[BaseException]) -> None:
+ if hasattr(self, "path"):
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=self.path)
+ if ntraceback == traceback:
+ ntraceback = ntraceback.cut(excludepath=tracebackcutdir)
+ excinfo.traceback = ntraceback.filter()
+
+
+def _check_initialpaths_for_relpath(session: "Session", path: Path) -> Optional[str]:
+ for initial_path in session._initialpaths:
+ if commonpath(path, initial_path) == initial_path:
+ rel = str(path.relative_to(initial_path))
+ return "" if rel == "." else rel
+ return None
+
+
+class FSCollector(Collector):
+ def __init__(
+ self,
+ fspath: Optional[LEGACY_PATH] = None,
+ path_or_parent: Optional[Union[Path, Node]] = None,
+ path: Optional[Path] = None,
+ name: Optional[str] = None,
+ parent: Optional[Node] = None,
+ config: Optional[Config] = None,
+ session: Optional["Session"] = None,
+ nodeid: Optional[str] = None,
+ ) -> None:
+ if path_or_parent:
+ if isinstance(path_or_parent, Node):
+ assert parent is None
+ parent = cast(FSCollector, path_or_parent)
+ elif isinstance(path_or_parent, Path):
+ assert path is None
+ path = path_or_parent
+
+ path = _imply_path(type(self), path, fspath=fspath)
+ if name is None:
+ name = path.name
+ if parent is not None and parent.path != path:
+ try:
+ rel = path.relative_to(parent.path)
+ except ValueError:
+ pass
+ else:
+ name = str(rel)
+ name = name.replace(os.sep, SEP)
+ self.path = path
+
+ if session is None:
+ assert parent is not None
+ session = parent.session
+
+ if nodeid is None:
+ try:
+ nodeid = str(self.path.relative_to(session.config.rootpath))
+ except ValueError:
+ nodeid = _check_initialpaths_for_relpath(session, path)
+
+ if nodeid and os.sep != SEP:
+ nodeid = nodeid.replace(os.sep, SEP)
+
+ super().__init__(
+ name=name,
+ parent=parent,
+ config=config,
+ session=session,
+ nodeid=nodeid,
+ path=path,
+ )
+
+ @classmethod
+ def from_parent(
+ cls,
+ parent,
+ *,
+ fspath: Optional[LEGACY_PATH] = None,
+ path: Optional[Path] = None,
+ **kw,
+ ):
+ """The public constructor."""
+ return super().from_parent(parent=parent, fspath=fspath, path=path, **kw)
+
+ def gethookproxy(self, fspath: "os.PathLike[str]"):
+ warnings.warn(FSCOLLECTOR_GETHOOKPROXY_ISINITPATH, stacklevel=2)
+ return self.session.gethookproxy(fspath)
+
+ def isinitpath(self, path: Union[str, "os.PathLike[str]"]) -> bool:
+ warnings.warn(FSCOLLECTOR_GETHOOKPROXY_ISINITPATH, stacklevel=2)
+ return self.session.isinitpath(path)
+
+
+class File(FSCollector):
+ """Base class for collecting tests from a file.
+
+ :ref:`non-python tests`.
+ """
+
+
+class Item(Node):
+ """A basic test invocation item.
+
+ Note that for a single function there might be multiple test invocation items.
+ """
+
+ nextitem = None
+
+ def __init__(
+ self,
+ name,
+ parent=None,
+ config: Optional[Config] = None,
+ session: Optional["Session"] = None,
+ nodeid: Optional[str] = None,
+ **kw,
+ ) -> None:
+ # The first two arguments are intentionally passed positionally,
+ # to keep plugins who define a node type which inherits from
+ # (pytest.Item, pytest.File) working (see issue #8435).
+ # They can be made kwargs when the deprecation above is done.
+ super().__init__(
+ name,
+ parent,
+ config=config,
+ session=session,
+ nodeid=nodeid,
+ **kw,
+ )
+ self._report_sections: List[Tuple[str, str, str]] = []
+
+ #: A list of tuples (name, value) that holds user defined properties
+ #: for this test.
+ self.user_properties: List[Tuple[str, object]] = []
+
+ self._check_item_and_collector_diamond_inheritance()
+
+ def _check_item_and_collector_diamond_inheritance(self) -> None:
+ """
+ Check if the current type inherits from both File and Collector
+ at the same time, emitting a warning accordingly (#8447).
+ """
+ cls = type(self)
+
+ # We inject an attribute in the type to avoid issuing this warning
+ # for the same class more than once, which is not helpful.
+ # It is a hack, but was deemed acceptable in order to avoid
+ # flooding the user in the common case.
+ attr_name = "_pytest_diamond_inheritance_warning_shown"
+ if getattr(cls, attr_name, False):
+ return
+ setattr(cls, attr_name, True)
+
+ problems = ", ".join(
+ base.__name__ for base in cls.__bases__ if issubclass(base, Collector)
+ )
+ if problems:
+ warnings.warn(
+ f"{cls.__name__} is an Item subclass and should not be a collector, "
+ f"however its bases {problems} are collectors.\n"
+ "Please split the Collectors and the Item into separate node types.\n"
+ "Pytest Doc example: https://docs.pytest.org/en/latest/example/nonpython.html\n"
+ "example pull request on a plugin: https://github.com/asmeurer/pytest-flakes/pull/40/",
+ PytestWarning,
+ )
+
+ def runtest(self) -> None:
+ """Run the test case for this item.
+
+ Must be implemented by subclasses.
+
+ .. seealso:: :ref:`non-python tests`
+ """
+ raise NotImplementedError("runtest must be implemented by Item subclass")
+
+ def add_report_section(self, when: str, key: str, content: str) -> None:
+ """Add a new report section, similar to what's done internally to add
+ stdout and stderr captured output::
+
+ item.add_report_section("call", "stdout", "report section contents")
+
+ :param str when:
+ One of the possible capture states, ``"setup"``, ``"call"``, ``"teardown"``.
+ :param str key:
+ Name of the section, can be customized at will. Pytest uses ``"stdout"`` and
+ ``"stderr"`` internally.
+ :param str content:
+ The full contents as a string.
+ """
+ if content:
+ self._report_sections.append((when, key, content))
+
+ def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]:
+ """Get location information for this item for test reports.
+
+ Returns a tuple with three elements:
+
+ - The path of the test (default ``self.path``)
+ - The line number of the test (default ``None``)
+ - A name of the test to be shown (default ``""``)
+
+ .. seealso:: :ref:`non-python tests`
+ """
+ return self.path, None, ""
+
+ @cached_property
+ def location(self) -> Tuple[str, Optional[int], str]:
+ location = self.reportinfo()
+ path = absolutepath(os.fspath(location[0]))
+ relfspath = self.session._node_location_to_relpath(path)
+ assert type(location[2]) is str
+ return (relfspath, location[1], location[2])
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/nose.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/nose.py
new file mode 100644
index 0000000000..b0699d22bd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/nose.py
@@ -0,0 +1,42 @@
+"""Run testsuites written for nose."""
+from _pytest.config import hookimpl
+from _pytest.fixtures import getfixturemarker
+from _pytest.nodes import Item
+from _pytest.python import Function
+from _pytest.unittest import TestCaseFunction
+
+
+@hookimpl(trylast=True)
+def pytest_runtest_setup(item: Item) -> None:
+ if not isinstance(item, Function):
+ return
+ # Don't do nose style setup/teardown on direct unittest style classes.
+ if isinstance(item, TestCaseFunction):
+ return
+
+ # Capture the narrowed type of item for the teardown closure,
+ # see https://github.com/python/mypy/issues/2608
+ func = item
+
+ call_optional(func.obj, "setup")
+ func.addfinalizer(lambda: call_optional(func.obj, "teardown"))
+
+ # NOTE: Module- and class-level fixtures are handled in python.py
+ # with `pluginmanager.has_plugin("nose")` checks.
+ # It would have been nicer to implement them outside of core, but
+ # it's not straightforward.
+
+
+def call_optional(obj: object, name: str) -> bool:
+ method = getattr(obj, name, None)
+ if method is None:
+ return False
+ is_fixture = getfixturemarker(method) is not None
+ if is_fixture:
+ return False
+ if not callable(method):
+ return False
+ # If there are any problems allow the exception to raise rather than
+ # silently ignoring it.
+ method()
+ return True
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/outcomes.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/outcomes.py
new file mode 100644
index 0000000000..25206fe0e8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/outcomes.py
@@ -0,0 +1,307 @@
+"""Exception classes and constants handling test outcomes as well as
+functions creating them."""
+import sys
+import warnings
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Optional
+from typing import Type
+from typing import TypeVar
+
+from _pytest.deprecated import KEYWORD_MSG_ARG
+
+TYPE_CHECKING = False # Avoid circular import through compat.
+
+if TYPE_CHECKING:
+ from typing import NoReturn
+ from typing_extensions import Protocol
+else:
+ # typing.Protocol is only available starting from Python 3.8. It is also
+ # available from typing_extensions, but we don't want a runtime dependency
+ # on that. So use a dummy runtime implementation.
+ from typing import Generic
+
+ Protocol = Generic
+
+
+class OutcomeException(BaseException):
+ """OutcomeException and its subclass instances indicate and contain info
+ about test and collection outcomes."""
+
+ def __init__(self, msg: Optional[str] = None, pytrace: bool = True) -> None:
+ if msg is not None and not isinstance(msg, str):
+ error_msg = ( # type: ignore[unreachable]
+ "{} expected string as 'msg' parameter, got '{}' instead.\n"
+ "Perhaps you meant to use a mark?"
+ )
+ raise TypeError(error_msg.format(type(self).__name__, type(msg).__name__))
+ super().__init__(msg)
+ self.msg = msg
+ self.pytrace = pytrace
+
+ def __repr__(self) -> str:
+ if self.msg is not None:
+ return self.msg
+ return f"<{self.__class__.__name__} instance>"
+
+ __str__ = __repr__
+
+
+TEST_OUTCOME = (OutcomeException, Exception)
+
+
+class Skipped(OutcomeException):
+ # XXX hackish: on 3k we fake to live in the builtins
+ # in order to have Skipped exception printing shorter/nicer
+ __module__ = "builtins"
+
+ def __init__(
+ self,
+ msg: Optional[str] = None,
+ pytrace: bool = True,
+ allow_module_level: bool = False,
+ *,
+ _use_item_location: bool = False,
+ ) -> None:
+ super().__init__(msg=msg, pytrace=pytrace)
+ self.allow_module_level = allow_module_level
+ # If true, the skip location is reported as the item's location,
+ # instead of the place that raises the exception/calls skip().
+ self._use_item_location = _use_item_location
+
+
+class Failed(OutcomeException):
+ """Raised from an explicit call to pytest.fail()."""
+
+ __module__ = "builtins"
+
+
+class Exit(Exception):
+ """Raised for immediate program exits (no tracebacks/summaries)."""
+
+ def __init__(
+ self, msg: str = "unknown reason", returncode: Optional[int] = None
+ ) -> None:
+ self.msg = msg
+ self.returncode = returncode
+ super().__init__(msg)
+
+
+# Elaborate hack to work around https://github.com/python/mypy/issues/2087.
+# Ideally would just be `exit.Exception = Exit` etc.
+
+_F = TypeVar("_F", bound=Callable[..., object])
+_ET = TypeVar("_ET", bound=Type[BaseException])
+
+
+class _WithException(Protocol[_F, _ET]):
+ Exception: _ET
+ __call__: _F
+
+
+def _with_exception(exception_type: _ET) -> Callable[[_F], _WithException[_F, _ET]]:
+ def decorate(func: _F) -> _WithException[_F, _ET]:
+ func_with_exception = cast(_WithException[_F, _ET], func)
+ func_with_exception.Exception = exception_type
+ return func_with_exception
+
+ return decorate
+
+
+# Exposed helper methods.
+
+
+@_with_exception(Exit)
+def exit(
+ reason: str = "", returncode: Optional[int] = None, *, msg: Optional[str] = None
+) -> "NoReturn":
+ """Exit testing process.
+
+ :param reason:
+ The message to show as the reason for exiting pytest. reason has a default value
+ only because `msg` is deprecated.
+
+ :param returncode:
+ Return code to be used when exiting pytest.
+
+ :param msg:
+ Same as ``reason``, but deprecated. Will be removed in a future version, use ``reason`` instead.
+ """
+ __tracebackhide__ = True
+ from _pytest.config import UsageError
+
+ if reason and msg:
+ raise UsageError(
+ "cannot pass reason and msg to exit(), `msg` is deprecated, use `reason`."
+ )
+ if not reason:
+ if msg is None:
+ raise UsageError("exit() requires a reason argument")
+ warnings.warn(KEYWORD_MSG_ARG.format(func="exit"), stacklevel=2)
+ reason = msg
+ raise Exit(reason, returncode)
+
+
+@_with_exception(Skipped)
+def skip(
+ reason: str = "", *, allow_module_level: bool = False, msg: Optional[str] = None
+) -> "NoReturn":
+ """Skip an executing test with the given message.
+
+ This function should be called only during testing (setup, call or teardown) or
+ during collection by using the ``allow_module_level`` flag. This function can
+ be called in doctests as well.
+
+ :param reason:
+ The message to show the user as reason for the skip.
+
+ :param allow_module_level:
+ Allows this function to be called at module level, skipping the rest
+ of the module. Defaults to False.
+
+ :param msg:
+ Same as ``reason``, but deprecated. Will be removed in a future version, use ``reason`` instead.
+
+ .. note::
+ It is better to use the :ref:`pytest.mark.skipif ref` marker when
+ possible to declare a test to be skipped under certain conditions
+ like mismatching platforms or dependencies.
+ Similarly, use the ``# doctest: +SKIP`` directive (see :py:data:`doctest.SKIP`)
+ to skip a doctest statically.
+ """
+ __tracebackhide__ = True
+ reason = _resolve_msg_to_reason("skip", reason, msg)
+ raise Skipped(msg=reason, allow_module_level=allow_module_level)
+
+
+@_with_exception(Failed)
+def fail(
+ reason: str = "", pytrace: bool = True, msg: Optional[str] = None
+) -> "NoReturn":
+ """Explicitly fail an executing test with the given message.
+
+ :param reason:
+ The message to show the user as reason for the failure.
+
+ :param pytrace:
+ If False, msg represents the full failure information and no
+ python traceback will be reported.
+
+ :param msg:
+ Same as ``reason``, but deprecated. Will be removed in a future version, use ``reason`` instead.
+ """
+ __tracebackhide__ = True
+ reason = _resolve_msg_to_reason("fail", reason, msg)
+ raise Failed(msg=reason, pytrace=pytrace)
+
+
+def _resolve_msg_to_reason(
+ func_name: str, reason: str, msg: Optional[str] = None
+) -> str:
+ """
+ Handles converting the deprecated msg parameter if provided into
+ reason, raising a deprecation warning. This function will be removed
+ when the optional msg argument is removed from here in future.
+
+ :param str func_name:
+ The name of the offending function, this is formatted into the deprecation message.
+
+ :param str reason:
+ The reason= passed into either pytest.fail() or pytest.skip()
+
+ :param str msg:
+ The msg= passed into either pytest.fail() or pytest.skip(). This will
+ be converted into reason if it is provided to allow pytest.skip(msg=) or
+ pytest.fail(msg=) to continue working in the interim period.
+
+ :returns:
+ The value to use as reason.
+
+ """
+ __tracebackhide__ = True
+ if msg is not None:
+
+ if reason:
+ from pytest import UsageError
+
+ raise UsageError(
+ f"Passing both ``reason`` and ``msg`` to pytest.{func_name}(...) is not permitted."
+ )
+ warnings.warn(KEYWORD_MSG_ARG.format(func=func_name), stacklevel=3)
+ reason = msg
+ return reason
+
+
+class XFailed(Failed):
+ """Raised from an explicit call to pytest.xfail()."""
+
+
+@_with_exception(XFailed)
+def xfail(reason: str = "") -> "NoReturn":
+ """Imperatively xfail an executing test or setup function with the given reason.
+
+ This function should be called only during testing (setup, call or teardown).
+
+ .. note::
+ It is better to use the :ref:`pytest.mark.xfail ref` marker when
+ possible to declare a test to be xfailed under certain conditions
+ like known bugs or missing features.
+ """
+ __tracebackhide__ = True
+ raise XFailed(reason)
+
+
+def importorskip(
+ modname: str, minversion: Optional[str] = None, reason: Optional[str] = None
+) -> Any:
+ """Import and return the requested module ``modname``, or skip the
+ current test if the module cannot be imported.
+
+ :param str modname:
+ The name of the module to import.
+ :param str minversion:
+ If given, the imported module's ``__version__`` attribute must be at
+ least this minimal version, otherwise the test is still skipped.
+ :param str reason:
+ If given, this reason is shown as the message when the module cannot
+ be imported.
+
+ :returns:
+ The imported module. This should be assigned to its canonical name.
+
+ Example::
+
+ docutils = pytest.importorskip("docutils")
+ """
+ import warnings
+
+ __tracebackhide__ = True
+ compile(modname, "", "eval") # to catch syntaxerrors
+
+ with warnings.catch_warnings():
+ # Make sure to ignore ImportWarnings that might happen because
+ # of existing directories with the same name we're trying to
+ # import but without a __init__.py file.
+ warnings.simplefilter("ignore")
+ try:
+ __import__(modname)
+ except ImportError as exc:
+ if reason is None:
+ reason = f"could not import {modname!r}: {exc}"
+ raise Skipped(reason, allow_module_level=True) from None
+ mod = sys.modules[modname]
+ if minversion is None:
+ return mod
+ verattr = getattr(mod, "__version__", None)
+ if minversion is not None:
+ # Imported lazily to improve start-up time.
+ from packaging.version import Version
+
+ if verattr is None or Version(verattr) < Version(minversion):
+ raise Skipped(
+ "module %r has __version__ %r, required is: %r"
+ % (modname, verattr, minversion),
+ allow_module_level=True,
+ )
+ return mod
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pastebin.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pastebin.py
new file mode 100644
index 0000000000..385b3022cc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pastebin.py
@@ -0,0 +1,110 @@
+"""Submit failure or test session information to a pastebin service."""
+import tempfile
+from io import StringIO
+from typing import IO
+from typing import Union
+
+import pytest
+from _pytest.config import Config
+from _pytest.config import create_terminal_writer
+from _pytest.config.argparsing import Parser
+from _pytest.stash import StashKey
+from _pytest.terminal import TerminalReporter
+
+
+pastebinfile_key = StashKey[IO[bytes]]()
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("terminal reporting")
+ group._addoption(
+ "--pastebin",
+ metavar="mode",
+ action="store",
+ dest="pastebin",
+ default=None,
+ choices=["failed", "all"],
+ help="send failed|all info to bpaste.net pastebin service.",
+ )
+
+
+@pytest.hookimpl(trylast=True)
+def pytest_configure(config: Config) -> None:
+ if config.option.pastebin == "all":
+ tr = config.pluginmanager.getplugin("terminalreporter")
+ # If no terminal reporter plugin is present, nothing we can do here;
+ # this can happen when this function executes in a worker node
+ # when using pytest-xdist, for example.
+ if tr is not None:
+ # pastebin file will be UTF-8 encoded binary file.
+ config.stash[pastebinfile_key] = tempfile.TemporaryFile("w+b")
+ oldwrite = tr._tw.write
+
+ def tee_write(s, **kwargs):
+ oldwrite(s, **kwargs)
+ if isinstance(s, str):
+ s = s.encode("utf-8")
+ config.stash[pastebinfile_key].write(s)
+
+ tr._tw.write = tee_write
+
+
+def pytest_unconfigure(config: Config) -> None:
+ if pastebinfile_key in config.stash:
+ pastebinfile = config.stash[pastebinfile_key]
+ # Get terminal contents and delete file.
+ pastebinfile.seek(0)
+ sessionlog = pastebinfile.read()
+ pastebinfile.close()
+ del config.stash[pastebinfile_key]
+ # Undo our patching in the terminal reporter.
+ tr = config.pluginmanager.getplugin("terminalreporter")
+ del tr._tw.__dict__["write"]
+ # Write summary.
+ tr.write_sep("=", "Sending information to Paste Service")
+ pastebinurl = create_new_paste(sessionlog)
+ tr.write_line("pastebin session-log: %s\n" % pastebinurl)
+
+
+def create_new_paste(contents: Union[str, bytes]) -> str:
+ """Create a new paste using the bpaste.net service.
+
+ :contents: Paste contents string.
+ :returns: URL to the pasted contents, or an error message.
+ """
+ import re
+ from urllib.request import urlopen
+ from urllib.parse import urlencode
+
+ params = {"code": contents, "lexer": "text", "expiry": "1week"}
+ url = "https://bpa.st"
+ try:
+ response: str = (
+ urlopen(url, data=urlencode(params).encode("ascii")).read().decode("utf-8")
+ )
+ except OSError as exc_info: # urllib errors
+ return "bad response: %s" % exc_info
+ m = re.search(r'href="/raw/(\w+)"', response)
+ if m:
+ return f"{url}/show/{m.group(1)}"
+ else:
+ return "bad response: invalid format ('" + response + "')"
+
+
+def pytest_terminal_summary(terminalreporter: TerminalReporter) -> None:
+ if terminalreporter.config.option.pastebin != "failed":
+ return
+ if "failed" in terminalreporter.stats:
+ terminalreporter.write_sep("=", "Sending information to Paste Service")
+ for rep in terminalreporter.stats["failed"]:
+ try:
+ msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc
+ except AttributeError:
+ msg = terminalreporter._getfailureheadline(rep)
+ file = StringIO()
+ tw = create_terminal_writer(terminalreporter.config, file)
+ rep.toterminal(tw)
+ s = file.getvalue()
+ assert len(s)
+ pastebinurl = create_new_paste(s)
+ terminalreporter.write_line(f"{msg} --> {pastebinurl}")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pathlib.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pathlib.py
new file mode 100644
index 0000000000..b44753e1a4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pathlib.py
@@ -0,0 +1,724 @@
+import atexit
+import contextlib
+import fnmatch
+import importlib.util
+import itertools
+import os
+import shutil
+import sys
+import uuid
+import warnings
+from enum import Enum
+from errno import EBADF
+from errno import ELOOP
+from errno import ENOENT
+from errno import ENOTDIR
+from functools import partial
+from os.path import expanduser
+from os.path import expandvars
+from os.path import isabs
+from os.path import sep
+from pathlib import Path
+from pathlib import PurePath
+from posixpath import sep as posix_sep
+from types import ModuleType
+from typing import Callable
+from typing import Dict
+from typing import Iterable
+from typing import Iterator
+from typing import Optional
+from typing import Set
+from typing import TypeVar
+from typing import Union
+
+from _pytest.compat import assert_never
+from _pytest.outcomes import skip
+from _pytest.warning_types import PytestWarning
+
+LOCK_TIMEOUT = 60 * 60 * 24 * 3
+
+
+_AnyPurePath = TypeVar("_AnyPurePath", bound=PurePath)
+
+# The following function, variables and comments were
+# copied from cpython 3.9 Lib/pathlib.py file.
+
+# EBADF - guard against macOS `stat` throwing EBADF
+_IGNORED_ERRORS = (ENOENT, ENOTDIR, EBADF, ELOOP)
+
+_IGNORED_WINERRORS = (
+ 21, # ERROR_NOT_READY - drive exists but is not accessible
+ 1921, # ERROR_CANT_RESOLVE_FILENAME - fix for broken symlink pointing to itself
+)
+
+
+def _ignore_error(exception):
+ return (
+ getattr(exception, "errno", None) in _IGNORED_ERRORS
+ or getattr(exception, "winerror", None) in _IGNORED_WINERRORS
+ )
+
+
+def get_lock_path(path: _AnyPurePath) -> _AnyPurePath:
+ return path.joinpath(".lock")
+
+
+def on_rm_rf_error(func, path: str, exc, *, start_path: Path) -> bool:
+ """Handle known read-only errors during rmtree.
+
+ The returned value is used only by our own tests.
+ """
+ exctype, excvalue = exc[:2]
+
+ # Another process removed the file in the middle of the "rm_rf" (xdist for example).
+ # More context: https://github.com/pytest-dev/pytest/issues/5974#issuecomment-543799018
+ if isinstance(excvalue, FileNotFoundError):
+ return False
+
+ if not isinstance(excvalue, PermissionError):
+ warnings.warn(
+ PytestWarning(f"(rm_rf) error removing {path}\n{exctype}: {excvalue}")
+ )
+ return False
+
+ if func not in (os.rmdir, os.remove, os.unlink):
+ if func not in (os.open,):
+ warnings.warn(
+ PytestWarning(
+ "(rm_rf) unknown function {} when removing {}:\n{}: {}".format(
+ func, path, exctype, excvalue
+ )
+ )
+ )
+ return False
+
+ # Chmod + retry.
+ import stat
+
+ def chmod_rw(p: str) -> None:
+ mode = os.stat(p).st_mode
+ os.chmod(p, mode | stat.S_IRUSR | stat.S_IWUSR)
+
+ # For files, we need to recursively go upwards in the directories to
+ # ensure they all are also writable.
+ p = Path(path)
+ if p.is_file():
+ for parent in p.parents:
+ chmod_rw(str(parent))
+ # Stop when we reach the original path passed to rm_rf.
+ if parent == start_path:
+ break
+ chmod_rw(str(path))
+
+ func(path)
+ return True
+
+
+def ensure_extended_length_path(path: Path) -> Path:
+ """Get the extended-length version of a path (Windows).
+
+ On Windows, by default, the maximum length of a path (MAX_PATH) is 260
+ characters, and operations on paths longer than that fail. But it is possible
+ to overcome this by converting the path to "extended-length" form before
+ performing the operation:
+ https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#maximum-path-length-limitation
+
+ On Windows, this function returns the extended-length absolute version of path.
+ On other platforms it returns path unchanged.
+ """
+ if sys.platform.startswith("win32"):
+ path = path.resolve()
+ path = Path(get_extended_length_path_str(str(path)))
+ return path
+
+
+def get_extended_length_path_str(path: str) -> str:
+ """Convert a path to a Windows extended length path."""
+ long_path_prefix = "\\\\?\\"
+ unc_long_path_prefix = "\\\\?\\UNC\\"
+ if path.startswith((long_path_prefix, unc_long_path_prefix)):
+ return path
+ # UNC
+ if path.startswith("\\\\"):
+ return unc_long_path_prefix + path[2:]
+ return long_path_prefix + path
+
+
+def rm_rf(path: Path) -> None:
+ """Remove the path contents recursively, even if some elements
+ are read-only."""
+ path = ensure_extended_length_path(path)
+ onerror = partial(on_rm_rf_error, start_path=path)
+ shutil.rmtree(str(path), onerror=onerror)
+
+
+def find_prefixed(root: Path, prefix: str) -> Iterator[Path]:
+ """Find all elements in root that begin with the prefix, case insensitive."""
+ l_prefix = prefix.lower()
+ for x in root.iterdir():
+ if x.name.lower().startswith(l_prefix):
+ yield x
+
+
+def extract_suffixes(iter: Iterable[PurePath], prefix: str) -> Iterator[str]:
+ """Return the parts of the paths following the prefix.
+
+ :param iter: Iterator over path names.
+ :param prefix: Expected prefix of the path names.
+ """
+ p_len = len(prefix)
+ for p in iter:
+ yield p.name[p_len:]
+
+
+def find_suffixes(root: Path, prefix: str) -> Iterator[str]:
+ """Combine find_prefixes and extract_suffixes."""
+ return extract_suffixes(find_prefixed(root, prefix), prefix)
+
+
+def parse_num(maybe_num) -> int:
+ """Parse number path suffixes, returns -1 on error."""
+ try:
+ return int(maybe_num)
+ except ValueError:
+ return -1
+
+
+def _force_symlink(
+ root: Path, target: Union[str, PurePath], link_to: Union[str, Path]
+) -> None:
+ """Helper to create the current symlink.
+
+ It's full of race conditions that are reasonably OK to ignore
+ for the context of best effort linking to the latest test run.
+
+ The presumption being that in case of much parallelism
+ the inaccuracy is going to be acceptable.
+ """
+ current_symlink = root.joinpath(target)
+ try:
+ current_symlink.unlink()
+ except OSError:
+ pass
+ try:
+ current_symlink.symlink_to(link_to)
+ except Exception:
+ pass
+
+
+def make_numbered_dir(root: Path, prefix: str, mode: int = 0o700) -> Path:
+ """Create a directory with an increased number as suffix for the given prefix."""
+ for i in range(10):
+ # try up to 10 times to create the folder
+ max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1)
+ new_number = max_existing + 1
+ new_path = root.joinpath(f"{prefix}{new_number}")
+ try:
+ new_path.mkdir(mode=mode)
+ except Exception:
+ pass
+ else:
+ _force_symlink(root, prefix + "current", new_path)
+ return new_path
+ else:
+ raise OSError(
+ "could not create numbered dir with prefix "
+ "{prefix} in {root} after 10 tries".format(prefix=prefix, root=root)
+ )
+
+
+def create_cleanup_lock(p: Path) -> Path:
+ """Create a lock to prevent premature folder cleanup."""
+ lock_path = get_lock_path(p)
+ try:
+ fd = os.open(str(lock_path), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644)
+ except FileExistsError as e:
+ raise OSError(f"cannot create lockfile in {p}") from e
+ else:
+ pid = os.getpid()
+ spid = str(pid).encode()
+ os.write(fd, spid)
+ os.close(fd)
+ if not lock_path.is_file():
+ raise OSError("lock path got renamed after successful creation")
+ return lock_path
+
+
+def register_cleanup_lock_removal(lock_path: Path, register=atexit.register):
+ """Register a cleanup function for removing a lock, by default on atexit."""
+ pid = os.getpid()
+
+ def cleanup_on_exit(lock_path: Path = lock_path, original_pid: int = pid) -> None:
+ current_pid = os.getpid()
+ if current_pid != original_pid:
+ # fork
+ return
+ try:
+ lock_path.unlink()
+ except OSError:
+ pass
+
+ return register(cleanup_on_exit)
+
+
+def maybe_delete_a_numbered_dir(path: Path) -> None:
+ """Remove a numbered directory if its lock can be obtained and it does
+ not seem to be in use."""
+ path = ensure_extended_length_path(path)
+ lock_path = None
+ try:
+ lock_path = create_cleanup_lock(path)
+ parent = path.parent
+
+ garbage = parent.joinpath(f"garbage-{uuid.uuid4()}")
+ path.rename(garbage)
+ rm_rf(garbage)
+ except OSError:
+ # known races:
+ # * other process did a cleanup at the same time
+ # * deletable folder was found
+ # * process cwd (Windows)
+ return
+ finally:
+ # If we created the lock, ensure we remove it even if we failed
+ # to properly remove the numbered dir.
+ if lock_path is not None:
+ try:
+ lock_path.unlink()
+ except OSError:
+ pass
+
+
+def ensure_deletable(path: Path, consider_lock_dead_if_created_before: float) -> bool:
+ """Check if `path` is deletable based on whether the lock file is expired."""
+ if path.is_symlink():
+ return False
+ lock = get_lock_path(path)
+ try:
+ if not lock.is_file():
+ return True
+ except OSError:
+ # we might not have access to the lock file at all, in this case assume
+ # we don't have access to the entire directory (#7491).
+ return False
+ try:
+ lock_time = lock.stat().st_mtime
+ except Exception:
+ return False
+ else:
+ if lock_time < consider_lock_dead_if_created_before:
+ # We want to ignore any errors while trying to remove the lock such as:
+ # - PermissionDenied, like the file permissions have changed since the lock creation;
+ # - FileNotFoundError, in case another pytest process got here first;
+ # and any other cause of failure.
+ with contextlib.suppress(OSError):
+ lock.unlink()
+ return True
+ return False
+
+
+def try_cleanup(path: Path, consider_lock_dead_if_created_before: float) -> None:
+ """Try to cleanup a folder if we can ensure it's deletable."""
+ if ensure_deletable(path, consider_lock_dead_if_created_before):
+ maybe_delete_a_numbered_dir(path)
+
+
+def cleanup_candidates(root: Path, prefix: str, keep: int) -> Iterator[Path]:
+ """List candidates for numbered directories to be removed - follows py.path."""
+ max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1)
+ max_delete = max_existing - keep
+ paths = find_prefixed(root, prefix)
+ paths, paths2 = itertools.tee(paths)
+ numbers = map(parse_num, extract_suffixes(paths2, prefix))
+ for path, number in zip(paths, numbers):
+ if number <= max_delete:
+ yield path
+
+
+def cleanup_numbered_dir(
+ root: Path, prefix: str, keep: int, consider_lock_dead_if_created_before: float
+) -> None:
+ """Cleanup for lock driven numbered directories."""
+ for path in cleanup_candidates(root, prefix, keep):
+ try_cleanup(path, consider_lock_dead_if_created_before)
+ for path in root.glob("garbage-*"):
+ try_cleanup(path, consider_lock_dead_if_created_before)
+
+
+def make_numbered_dir_with_cleanup(
+ root: Path,
+ prefix: str,
+ keep: int,
+ lock_timeout: float,
+ mode: int,
+) -> Path:
+ """Create a numbered dir with a cleanup lock and remove old ones."""
+ e = None
+ for i in range(10):
+ try:
+ p = make_numbered_dir(root, prefix, mode)
+ lock_path = create_cleanup_lock(p)
+ register_cleanup_lock_removal(lock_path)
+ except Exception as exc:
+ e = exc
+ else:
+ consider_lock_dead_if_created_before = p.stat().st_mtime - lock_timeout
+ # Register a cleanup for program exit
+ atexit.register(
+ cleanup_numbered_dir,
+ root,
+ prefix,
+ keep,
+ consider_lock_dead_if_created_before,
+ )
+ return p
+ assert e is not None
+ raise e
+
+
+def resolve_from_str(input: str, rootpath: Path) -> Path:
+ input = expanduser(input)
+ input = expandvars(input)
+ if isabs(input):
+ return Path(input)
+ else:
+ return rootpath.joinpath(input)
+
+
+def fnmatch_ex(pattern: str, path: Union[str, "os.PathLike[str]"]) -> bool:
+ """A port of FNMatcher from py.path.common which works with PurePath() instances.
+
+ The difference between this algorithm and PurePath.match() is that the
+ latter matches "**" glob expressions for each part of the path, while
+ this algorithm uses the whole path instead.
+
+ For example:
+ "tests/foo/bar/doc/test_foo.py" matches pattern "tests/**/doc/test*.py"
+ with this algorithm, but not with PurePath.match().
+
+ This algorithm was ported to keep backward-compatibility with existing
+ settings which assume paths match according this logic.
+
+ References:
+ * https://bugs.python.org/issue29249
+ * https://bugs.python.org/issue34731
+ """
+ path = PurePath(path)
+ iswin32 = sys.platform.startswith("win")
+
+ if iswin32 and sep not in pattern and posix_sep in pattern:
+ # Running on Windows, the pattern has no Windows path separators,
+ # and the pattern has one or more Posix path separators. Replace
+ # the Posix path separators with the Windows path separator.
+ pattern = pattern.replace(posix_sep, sep)
+
+ if sep not in pattern:
+ name = path.name
+ else:
+ name = str(path)
+ if path.is_absolute() and not os.path.isabs(pattern):
+ pattern = f"*{os.sep}{pattern}"
+ return fnmatch.fnmatch(name, pattern)
+
+
+def parts(s: str) -> Set[str]:
+ parts = s.split(sep)
+ return {sep.join(parts[: i + 1]) or sep for i in range(len(parts))}
+
+
+def symlink_or_skip(src, dst, **kwargs):
+ """Make a symlink, or skip the test in case symlinks are not supported."""
+ try:
+ os.symlink(str(src), str(dst), **kwargs)
+ except OSError as e:
+ skip(f"symlinks not supported: {e}")
+
+
+class ImportMode(Enum):
+ """Possible values for `mode` parameter of `import_path`."""
+
+ prepend = "prepend"
+ append = "append"
+ importlib = "importlib"
+
+
+class ImportPathMismatchError(ImportError):
+ """Raised on import_path() if there is a mismatch of __file__'s.
+
+ This can happen when `import_path` is called multiple times with different filenames that has
+ the same basename but reside in packages
+ (for example "/tests1/test_foo.py" and "/tests2/test_foo.py").
+ """
+
+
+def import_path(
+ p: Union[str, "os.PathLike[str]"],
+ *,
+ mode: Union[str, ImportMode] = ImportMode.prepend,
+ root: Path,
+) -> ModuleType:
+ """Import and return a module from the given path, which can be a file (a module) or
+ a directory (a package).
+
+ The import mechanism used is controlled by the `mode` parameter:
+
+ * `mode == ImportMode.prepend`: the directory containing the module (or package, taking
+ `__init__.py` files into account) will be put at the *start* of `sys.path` before
+ being imported with `__import__.
+
+ * `mode == ImportMode.append`: same as `prepend`, but the directory will be appended
+ to the end of `sys.path`, if not already in `sys.path`.
+
+ * `mode == ImportMode.importlib`: uses more fine control mechanisms provided by `importlib`
+ to import the module, which avoids having to use `__import__` and muck with `sys.path`
+ at all. It effectively allows having same-named test modules in different places.
+
+ :param root:
+ Used as an anchor when mode == ImportMode.importlib to obtain
+ a unique name for the module being imported so it can safely be stored
+ into ``sys.modules``.
+
+ :raises ImportPathMismatchError:
+ If after importing the given `path` and the module `__file__`
+ are different. Only raised in `prepend` and `append` modes.
+ """
+ mode = ImportMode(mode)
+
+ path = Path(p)
+
+ if not path.exists():
+ raise ImportError(path)
+
+ if mode is ImportMode.importlib:
+ module_name = module_name_from_path(path, root)
+
+ for meta_importer in sys.meta_path:
+ spec = meta_importer.find_spec(module_name, [str(path.parent)])
+ if spec is not None:
+ break
+ else:
+ spec = importlib.util.spec_from_file_location(module_name, str(path))
+
+ if spec is None:
+ raise ImportError(f"Can't find module {module_name} at location {path}")
+ mod = importlib.util.module_from_spec(spec)
+ sys.modules[module_name] = mod
+ spec.loader.exec_module(mod) # type: ignore[union-attr]
+ insert_missing_modules(sys.modules, module_name)
+ return mod
+
+ pkg_path = resolve_package_path(path)
+ if pkg_path is not None:
+ pkg_root = pkg_path.parent
+ names = list(path.with_suffix("").relative_to(pkg_root).parts)
+ if names[-1] == "__init__":
+ names.pop()
+ module_name = ".".join(names)
+ else:
+ pkg_root = path.parent
+ module_name = path.stem
+
+ # Change sys.path permanently: restoring it at the end of this function would cause surprising
+ # problems because of delayed imports: for example, a conftest.py file imported by this function
+ # might have local imports, which would fail at runtime if we restored sys.path.
+ if mode is ImportMode.append:
+ if str(pkg_root) not in sys.path:
+ sys.path.append(str(pkg_root))
+ elif mode is ImportMode.prepend:
+ if str(pkg_root) != sys.path[0]:
+ sys.path.insert(0, str(pkg_root))
+ else:
+ assert_never(mode)
+
+ importlib.import_module(module_name)
+
+ mod = sys.modules[module_name]
+ if path.name == "__init__.py":
+ return mod
+
+ ignore = os.environ.get("PY_IGNORE_IMPORTMISMATCH", "")
+ if ignore != "1":
+ module_file = mod.__file__
+ if module_file.endswith((".pyc", ".pyo")):
+ module_file = module_file[:-1]
+ if module_file.endswith(os.path.sep + "__init__.py"):
+ module_file = module_file[: -(len(os.path.sep + "__init__.py"))]
+
+ try:
+ is_same = _is_same(str(path), module_file)
+ except FileNotFoundError:
+ is_same = False
+
+ if not is_same:
+ raise ImportPathMismatchError(module_name, module_file, path)
+
+ return mod
+
+
+# Implement a special _is_same function on Windows which returns True if the two filenames
+# compare equal, to circumvent os.path.samefile returning False for mounts in UNC (#7678).
+if sys.platform.startswith("win"):
+
+ def _is_same(f1: str, f2: str) -> bool:
+ return Path(f1) == Path(f2) or os.path.samefile(f1, f2)
+
+
+else:
+
+ def _is_same(f1: str, f2: str) -> bool:
+ return os.path.samefile(f1, f2)
+
+
+def module_name_from_path(path: Path, root: Path) -> str:
+ """
+ Return a dotted module name based on the given path, anchored on root.
+
+ For example: path="projects/src/tests/test_foo.py" and root="/projects", the
+ resulting module name will be "src.tests.test_foo".
+ """
+ path = path.with_suffix("")
+ try:
+ relative_path = path.relative_to(root)
+ except ValueError:
+ # If we can't get a relative path to root, use the full path, except
+ # for the first part ("d:\\" or "/" depending on the platform, for example).
+ path_parts = path.parts[1:]
+ else:
+ # Use the parts for the relative path to the root path.
+ path_parts = relative_path.parts
+
+ return ".".join(path_parts)
+
+
+def insert_missing_modules(modules: Dict[str, ModuleType], module_name: str) -> None:
+ """
+ Used by ``import_path`` to create intermediate modules when using mode=importlib.
+
+ When we want to import a module as "src.tests.test_foo" for example, we need
+ to create empty modules "src" and "src.tests" after inserting "src.tests.test_foo",
+ otherwise "src.tests.test_foo" is not importable by ``__import__``.
+ """
+ module_parts = module_name.split(".")
+ while module_name:
+ if module_name not in modules:
+ module = ModuleType(
+ module_name,
+ doc="Empty module created by pytest's importmode=importlib.",
+ )
+ modules[module_name] = module
+ module_parts.pop(-1)
+ module_name = ".".join(module_parts)
+
+
+def resolve_package_path(path: Path) -> Optional[Path]:
+ """Return the Python package path by looking for the last
+ directory upwards which still contains an __init__.py.
+
+ Returns None if it can not be determined.
+ """
+ result = None
+ for parent in itertools.chain((path,), path.parents):
+ if parent.is_dir():
+ if not parent.joinpath("__init__.py").is_file():
+ break
+ if not parent.name.isidentifier():
+ break
+ result = parent
+ return result
+
+
+def visit(
+ path: Union[str, "os.PathLike[str]"], recurse: Callable[["os.DirEntry[str]"], bool]
+) -> Iterator["os.DirEntry[str]"]:
+ """Walk a directory recursively, in breadth-first order.
+
+ Entries at each directory level are sorted.
+ """
+
+ # Skip entries with symlink loops and other brokenness, so the caller doesn't
+ # have to deal with it.
+ entries = []
+ for entry in os.scandir(path):
+ try:
+ entry.is_file()
+ except OSError as err:
+ if _ignore_error(err):
+ continue
+ raise
+ entries.append(entry)
+
+ entries.sort(key=lambda entry: entry.name)
+
+ yield from entries
+
+ for entry in entries:
+ if entry.is_dir() and recurse(entry):
+ yield from visit(entry.path, recurse)
+
+
+def absolutepath(path: Union[Path, str]) -> Path:
+ """Convert a path to an absolute path using os.path.abspath.
+
+ Prefer this over Path.resolve() (see #6523).
+ Prefer this over Path.absolute() (not public, doesn't normalize).
+ """
+ return Path(os.path.abspath(str(path)))
+
+
+def commonpath(path1: Path, path2: Path) -> Optional[Path]:
+ """Return the common part shared with the other path, or None if there is
+ no common part.
+
+ If one path is relative and one is absolute, returns None.
+ """
+ try:
+ return Path(os.path.commonpath((str(path1), str(path2))))
+ except ValueError:
+ return None
+
+
+def bestrelpath(directory: Path, dest: Path) -> str:
+ """Return a string which is a relative path from directory to dest such
+ that directory/bestrelpath == dest.
+
+ The paths must be either both absolute or both relative.
+
+ If no such path can be determined, returns dest.
+ """
+ assert isinstance(directory, Path)
+ assert isinstance(dest, Path)
+ if dest == directory:
+ return os.curdir
+ # Find the longest common directory.
+ base = commonpath(directory, dest)
+ # Can be the case on Windows for two absolute paths on different drives.
+ # Can be the case for two relative paths without common prefix.
+ # Can be the case for a relative path and an absolute path.
+ if not base:
+ return str(dest)
+ reldirectory = directory.relative_to(base)
+ reldest = dest.relative_to(base)
+ return os.path.join(
+ # Back from directory to base.
+ *([os.pardir] * len(reldirectory.parts)),
+ # Forward from base to dest.
+ *reldest.parts,
+ )
+
+
+# Originates from py. path.local.copy(), with siginficant trims and adjustments.
+# TODO(py38): Replace with shutil.copytree(..., symlinks=True, dirs_exist_ok=True)
+def copytree(source: Path, target: Path) -> None:
+ """Recursively copy a source directory to target."""
+ assert source.is_dir()
+ for entry in visit(source, recurse=lambda entry: not entry.is_symlink()):
+ x = Path(entry)
+ relpath = x.relative_to(source)
+ newx = target / relpath
+ newx.parent.mkdir(exist_ok=True)
+ if x.is_symlink():
+ newx.symlink_to(os.readlink(x))
+ elif x.is_file():
+ shutil.copyfile(x, newx)
+ elif x.is_dir():
+ newx.mkdir(exist_ok=True)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/py.typed b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pytester.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pytester.py
new file mode 100644
index 0000000000..363a372744
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pytester.py
@@ -0,0 +1,1748 @@
+"""(Disabled by default) support for testing pytest and pytest plugins.
+
+PYTEST_DONT_REWRITE
+"""
+import collections.abc
+import contextlib
+import gc
+import importlib
+import os
+import platform
+import re
+import shutil
+import subprocess
+import sys
+import traceback
+from fnmatch import fnmatch
+from io import StringIO
+from pathlib import Path
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generator
+from typing import IO
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import TextIO
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+from weakref import WeakKeyDictionary
+
+from iniconfig import IniConfig
+from iniconfig import SectionWrapper
+
+from _pytest import timing
+from _pytest._code import Source
+from _pytest.capture import _get_multicapture
+from _pytest.compat import final
+from _pytest.compat import NOTSET
+from _pytest.compat import NotSetType
+from _pytest.config import _PluggyPlugin
+from _pytest.config import Config
+from _pytest.config import ExitCode
+from _pytest.config import hookimpl
+from _pytest.config import main
+from _pytest.config import PytestPluginManager
+from _pytest.config.argparsing import Parser
+from _pytest.deprecated import check_ispytest
+from _pytest.fixtures import fixture
+from _pytest.fixtures import FixtureRequest
+from _pytest.main import Session
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.nodes import Collector
+from _pytest.nodes import Item
+from _pytest.outcomes import fail
+from _pytest.outcomes import importorskip
+from _pytest.outcomes import skip
+from _pytest.pathlib import bestrelpath
+from _pytest.pathlib import copytree
+from _pytest.pathlib import make_numbered_dir
+from _pytest.reports import CollectReport
+from _pytest.reports import TestReport
+from _pytest.tmpdir import TempPathFactory
+from _pytest.warning_types import PytestWarning
+
+
+if TYPE_CHECKING:
+ from typing_extensions import Final
+ from typing_extensions import Literal
+
+ import pexpect
+
+
+pytest_plugins = ["pytester_assertions"]
+
+
+IGNORE_PAM = [ # filenames added when obtaining details about the current user
+ "/var/lib/sss/mc/passwd"
+]
+
+
+def pytest_addoption(parser: Parser) -> None:
+ parser.addoption(
+ "--lsof",
+ action="store_true",
+ dest="lsof",
+ default=False,
+ help="run FD checks if lsof is available",
+ )
+
+ parser.addoption(
+ "--runpytest",
+ default="inprocess",
+ dest="runpytest",
+ choices=("inprocess", "subprocess"),
+ help=(
+ "run pytest sub runs in tests using an 'inprocess' "
+ "or 'subprocess' (python -m main) method"
+ ),
+ )
+
+ parser.addini(
+ "pytester_example_dir", help="directory to take the pytester example files from"
+ )
+
+
+def pytest_configure(config: Config) -> None:
+ if config.getvalue("lsof"):
+ checker = LsofFdLeakChecker()
+ if checker.matching_platform():
+ config.pluginmanager.register(checker)
+
+ config.addinivalue_line(
+ "markers",
+ "pytester_example_path(*path_segments): join the given path "
+ "segments to `pytester_example_dir` for this test.",
+ )
+
+
+class LsofFdLeakChecker:
+ def get_open_files(self) -> List[Tuple[str, str]]:
+ out = subprocess.run(
+ ("lsof", "-Ffn0", "-p", str(os.getpid())),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ check=True,
+ universal_newlines=True,
+ ).stdout
+
+ def isopen(line: str) -> bool:
+ return line.startswith("f") and (
+ "deleted" not in line
+ and "mem" not in line
+ and "txt" not in line
+ and "cwd" not in line
+ )
+
+ open_files = []
+
+ for line in out.split("\n"):
+ if isopen(line):
+ fields = line.split("\0")
+ fd = fields[0][1:]
+ filename = fields[1][1:]
+ if filename in IGNORE_PAM:
+ continue
+ if filename.startswith("/"):
+ open_files.append((fd, filename))
+
+ return open_files
+
+ def matching_platform(self) -> bool:
+ try:
+ subprocess.run(("lsof", "-v"), check=True)
+ except (OSError, subprocess.CalledProcessError):
+ return False
+ else:
+ return True
+
+ @hookimpl(hookwrapper=True, tryfirst=True)
+ def pytest_runtest_protocol(self, item: Item) -> Generator[None, None, None]:
+ lines1 = self.get_open_files()
+ yield
+ if hasattr(sys, "pypy_version_info"):
+ gc.collect()
+ lines2 = self.get_open_files()
+
+ new_fds = {t[0] for t in lines2} - {t[0] for t in lines1}
+ leaked_files = [t for t in lines2 if t[0] in new_fds]
+ if leaked_files:
+ error = [
+ "***** %s FD leakage detected" % len(leaked_files),
+ *(str(f) for f in leaked_files),
+ "*** Before:",
+ *(str(f) for f in lines1),
+ "*** After:",
+ *(str(f) for f in lines2),
+ "***** %s FD leakage detected" % len(leaked_files),
+ "*** function %s:%s: %s " % item.location,
+ "See issue #2366",
+ ]
+ item.warn(PytestWarning("\n".join(error)))
+
+
+# used at least by pytest-xdist plugin
+
+
+@fixture
+def _pytest(request: FixtureRequest) -> "PytestArg":
+ """Return a helper which offers a gethookrecorder(hook) method which
+ returns a HookRecorder instance which helps to make assertions about called
+ hooks."""
+ return PytestArg(request)
+
+
+class PytestArg:
+ def __init__(self, request: FixtureRequest) -> None:
+ self._request = request
+
+ def gethookrecorder(self, hook) -> "HookRecorder":
+ hookrecorder = HookRecorder(hook._pm)
+ self._request.addfinalizer(hookrecorder.finish_recording)
+ return hookrecorder
+
+
+def get_public_names(values: Iterable[str]) -> List[str]:
+ """Only return names from iterator values without a leading underscore."""
+ return [x for x in values if x[0] != "_"]
+
+
+@final
+class RecordedHookCall:
+ """A recorded call to a hook.
+
+ The arguments to the hook call are set as attributes.
+ For example:
+
+ .. code-block:: python
+
+ calls = hook_recorder.getcalls("pytest_runtest_setup")
+ # Suppose pytest_runtest_setup was called once with `item=an_item`.
+ assert calls[0].item is an_item
+ """
+
+ def __init__(self, name: str, kwargs) -> None:
+ self.__dict__.update(kwargs)
+ self._name = name
+
+ def __repr__(self) -> str:
+ d = self.__dict__.copy()
+ del d["_name"]
+ return f"<RecordedHookCall {self._name!r}(**{d!r})>"
+
+ if TYPE_CHECKING:
+ # The class has undetermined attributes, this tells mypy about it.
+ def __getattr__(self, key: str):
+ ...
+
+
+@final
+class HookRecorder:
+ """Record all hooks called in a plugin manager.
+
+ Hook recorders are created by :class:`Pytester`.
+
+ This wraps all the hook calls in the plugin manager, recording each call
+ before propagating the normal calls.
+ """
+
+ def __init__(
+ self, pluginmanager: PytestPluginManager, *, _ispytest: bool = False
+ ) -> None:
+ check_ispytest(_ispytest)
+
+ self._pluginmanager = pluginmanager
+ self.calls: List[RecordedHookCall] = []
+ self.ret: Optional[Union[int, ExitCode]] = None
+
+ def before(hook_name: str, hook_impls, kwargs) -> None:
+ self.calls.append(RecordedHookCall(hook_name, kwargs))
+
+ def after(outcome, hook_name: str, hook_impls, kwargs) -> None:
+ pass
+
+ self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after)
+
+ def finish_recording(self) -> None:
+ self._undo_wrapping()
+
+ def getcalls(self, names: Union[str, Iterable[str]]) -> List[RecordedHookCall]:
+ """Get all recorded calls to hooks with the given names (or name)."""
+ if isinstance(names, str):
+ names = names.split()
+ return [call for call in self.calls if call._name in names]
+
+ def assert_contains(self, entries: Sequence[Tuple[str, str]]) -> None:
+ __tracebackhide__ = True
+ i = 0
+ entries = list(entries)
+ backlocals = sys._getframe(1).f_locals
+ while entries:
+ name, check = entries.pop(0)
+ for ind, call in enumerate(self.calls[i:]):
+ if call._name == name:
+ print("NAMEMATCH", name, call)
+ if eval(check, backlocals, call.__dict__):
+ print("CHECKERMATCH", repr(check), "->", call)
+ else:
+ print("NOCHECKERMATCH", repr(check), "-", call)
+ continue
+ i += ind + 1
+ break
+ print("NONAMEMATCH", name, "with", call)
+ else:
+ fail(f"could not find {name!r} check {check!r}")
+
+ def popcall(self, name: str) -> RecordedHookCall:
+ __tracebackhide__ = True
+ for i, call in enumerate(self.calls):
+ if call._name == name:
+ del self.calls[i]
+ return call
+ lines = [f"could not find call {name!r}, in:"]
+ lines.extend([" %s" % x for x in self.calls])
+ fail("\n".join(lines))
+
+ def getcall(self, name: str) -> RecordedHookCall:
+ values = self.getcalls(name)
+ assert len(values) == 1, (name, values)
+ return values[0]
+
+ # functionality for test reports
+
+ @overload
+ def getreports(
+ self,
+ names: "Literal['pytest_collectreport']",
+ ) -> Sequence[CollectReport]:
+ ...
+
+ @overload
+ def getreports(
+ self,
+ names: "Literal['pytest_runtest_logreport']",
+ ) -> Sequence[TestReport]:
+ ...
+
+ @overload
+ def getreports(
+ self,
+ names: Union[str, Iterable[str]] = (
+ "pytest_collectreport",
+ "pytest_runtest_logreport",
+ ),
+ ) -> Sequence[Union[CollectReport, TestReport]]:
+ ...
+
+ def getreports(
+ self,
+ names: Union[str, Iterable[str]] = (
+ "pytest_collectreport",
+ "pytest_runtest_logreport",
+ ),
+ ) -> Sequence[Union[CollectReport, TestReport]]:
+ return [x.report for x in self.getcalls(names)]
+
+ def matchreport(
+ self,
+ inamepart: str = "",
+ names: Union[str, Iterable[str]] = (
+ "pytest_runtest_logreport",
+ "pytest_collectreport",
+ ),
+ when: Optional[str] = None,
+ ) -> Union[CollectReport, TestReport]:
+ """Return a testreport whose dotted import path matches."""
+ values = []
+ for rep in self.getreports(names=names):
+ if not when and rep.when != "call" and rep.passed:
+ # setup/teardown passing reports - let's ignore those
+ continue
+ if when and rep.when != when:
+ continue
+ if not inamepart or inamepart in rep.nodeid.split("::"):
+ values.append(rep)
+ if not values:
+ raise ValueError(
+ "could not find test report matching %r: "
+ "no test reports at all!" % (inamepart,)
+ )
+ if len(values) > 1:
+ raise ValueError(
+ "found 2 or more testreports matching {!r}: {}".format(
+ inamepart, values
+ )
+ )
+ return values[0]
+
+ @overload
+ def getfailures(
+ self,
+ names: "Literal['pytest_collectreport']",
+ ) -> Sequence[CollectReport]:
+ ...
+
+ @overload
+ def getfailures(
+ self,
+ names: "Literal['pytest_runtest_logreport']",
+ ) -> Sequence[TestReport]:
+ ...
+
+ @overload
+ def getfailures(
+ self,
+ names: Union[str, Iterable[str]] = (
+ "pytest_collectreport",
+ "pytest_runtest_logreport",
+ ),
+ ) -> Sequence[Union[CollectReport, TestReport]]:
+ ...
+
+ def getfailures(
+ self,
+ names: Union[str, Iterable[str]] = (
+ "pytest_collectreport",
+ "pytest_runtest_logreport",
+ ),
+ ) -> Sequence[Union[CollectReport, TestReport]]:
+ return [rep for rep in self.getreports(names) if rep.failed]
+
+ def getfailedcollections(self) -> Sequence[CollectReport]:
+ return self.getfailures("pytest_collectreport")
+
+ def listoutcomes(
+ self,
+ ) -> Tuple[
+ Sequence[TestReport],
+ Sequence[Union[CollectReport, TestReport]],
+ Sequence[Union[CollectReport, TestReport]],
+ ]:
+ passed = []
+ skipped = []
+ failed = []
+ for rep in self.getreports(
+ ("pytest_collectreport", "pytest_runtest_logreport")
+ ):
+ if rep.passed:
+ if rep.when == "call":
+ assert isinstance(rep, TestReport)
+ passed.append(rep)
+ elif rep.skipped:
+ skipped.append(rep)
+ else:
+ assert rep.failed, f"Unexpected outcome: {rep!r}"
+ failed.append(rep)
+ return passed, skipped, failed
+
+ def countoutcomes(self) -> List[int]:
+ return [len(x) for x in self.listoutcomes()]
+
+ def assertoutcome(self, passed: int = 0, skipped: int = 0, failed: int = 0) -> None:
+ __tracebackhide__ = True
+ from _pytest.pytester_assertions import assertoutcome
+
+ outcomes = self.listoutcomes()
+ assertoutcome(
+ outcomes,
+ passed=passed,
+ skipped=skipped,
+ failed=failed,
+ )
+
+ def clear(self) -> None:
+ self.calls[:] = []
+
+
+@fixture
+def linecomp() -> "LineComp":
+ """A :class: `LineComp` instance for checking that an input linearly
+ contains a sequence of strings."""
+ return LineComp()
+
+
+@fixture(name="LineMatcher")
+def LineMatcher_fixture(request: FixtureRequest) -> Type["LineMatcher"]:
+ """A reference to the :class: `LineMatcher`.
+
+ This is instantiable with a list of lines (without their trailing newlines).
+ This is useful for testing large texts, such as the output of commands.
+ """
+ return LineMatcher
+
+
+@fixture
+def pytester(request: FixtureRequest, tmp_path_factory: TempPathFactory) -> "Pytester":
+ """
+ Facilities to write tests/configuration files, execute pytest in isolation, and match
+ against expected output, perfect for black-box testing of pytest plugins.
+
+ It attempts to isolate the test run from external factors as much as possible, modifying
+ the current working directory to ``path`` and environment variables during initialization.
+
+ It is particularly useful for testing plugins. It is similar to the :fixture:`tmp_path`
+ fixture but provides methods which aid in testing pytest itself.
+ """
+ return Pytester(request, tmp_path_factory, _ispytest=True)
+
+
+@fixture
+def _sys_snapshot() -> Generator[None, None, None]:
+ snappaths = SysPathsSnapshot()
+ snapmods = SysModulesSnapshot()
+ yield
+ snapmods.restore()
+ snappaths.restore()
+
+
+@fixture
+def _config_for_test() -> Generator[Config, None, None]:
+ from _pytest.config import get_config
+
+ config = get_config()
+ yield config
+ config._ensure_unconfigure() # cleanup, e.g. capman closing tmpfiles.
+
+
+# Regex to match the session duration string in the summary: "74.34s".
+rex_session_duration = re.compile(r"\d+\.\d\ds")
+# Regex to match all the counts and phrases in the summary line: "34 passed, 111 skipped".
+rex_outcome = re.compile(r"(\d+) (\w+)")
+
+
+@final
+class RunResult:
+ """The result of running a command from :class:`~pytest.Pytester`."""
+
+ def __init__(
+ self,
+ ret: Union[int, ExitCode],
+ outlines: List[str],
+ errlines: List[str],
+ duration: float,
+ ) -> None:
+ try:
+ self.ret: Union[int, ExitCode] = ExitCode(ret)
+ """The return value."""
+ except ValueError:
+ self.ret = ret
+ self.outlines = outlines
+ """List of lines captured from stdout."""
+ self.errlines = errlines
+ """List of lines captured from stderr."""
+ self.stdout = LineMatcher(outlines)
+ """:class:`~pytest.LineMatcher` of stdout.
+
+ Use e.g. :func:`str(stdout) <pytest.LineMatcher.__str__()>` to reconstruct stdout, or the commonly used
+ :func:`stdout.fnmatch_lines() <pytest.LineMatcher.fnmatch_lines()>` method.
+ """
+ self.stderr = LineMatcher(errlines)
+ """:class:`~pytest.LineMatcher` of stderr."""
+ self.duration = duration
+ """Duration in seconds."""
+
+ def __repr__(self) -> str:
+ return (
+ "<RunResult ret=%s len(stdout.lines)=%d len(stderr.lines)=%d duration=%.2fs>"
+ % (self.ret, len(self.stdout.lines), len(self.stderr.lines), self.duration)
+ )
+
+ def parseoutcomes(self) -> Dict[str, int]:
+ """Return a dictionary of outcome noun -> count from parsing the terminal
+ output that the test process produced.
+
+ The returned nouns will always be in plural form::
+
+ ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ====
+
+ Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``.
+ """
+ return self.parse_summary_nouns(self.outlines)
+
+ @classmethod
+ def parse_summary_nouns(cls, lines) -> Dict[str, int]:
+ """Extract the nouns from a pytest terminal summary line.
+
+ It always returns the plural noun for consistency::
+
+ ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ====
+
+ Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``.
+ """
+ for line in reversed(lines):
+ if rex_session_duration.search(line):
+ outcomes = rex_outcome.findall(line)
+ ret = {noun: int(count) for (count, noun) in outcomes}
+ break
+ else:
+ raise ValueError("Pytest terminal summary report not found")
+
+ to_plural = {
+ "warning": "warnings",
+ "error": "errors",
+ }
+ return {to_plural.get(k, k): v for k, v in ret.items()}
+
+ def assert_outcomes(
+ self,
+ passed: int = 0,
+ skipped: int = 0,
+ failed: int = 0,
+ errors: int = 0,
+ xpassed: int = 0,
+ xfailed: int = 0,
+ warnings: Optional[int] = None,
+ deselected: Optional[int] = None,
+ ) -> None:
+ """
+ Assert that the specified outcomes appear with the respective
+ numbers (0 means it didn't occur) in the text output from a test run.
+
+ ``warnings`` and ``deselected`` are only checked if not None.
+ """
+ __tracebackhide__ = True
+ from _pytest.pytester_assertions import assert_outcomes
+
+ outcomes = self.parseoutcomes()
+ assert_outcomes(
+ outcomes,
+ passed=passed,
+ skipped=skipped,
+ failed=failed,
+ errors=errors,
+ xpassed=xpassed,
+ xfailed=xfailed,
+ warnings=warnings,
+ deselected=deselected,
+ )
+
+
+class CwdSnapshot:
+ def __init__(self) -> None:
+ self.__saved = os.getcwd()
+
+ def restore(self) -> None:
+ os.chdir(self.__saved)
+
+
+class SysModulesSnapshot:
+ def __init__(self, preserve: Optional[Callable[[str], bool]] = None) -> None:
+ self.__preserve = preserve
+ self.__saved = dict(sys.modules)
+
+ def restore(self) -> None:
+ if self.__preserve:
+ self.__saved.update(
+ (k, m) for k, m in sys.modules.items() if self.__preserve(k)
+ )
+ sys.modules.clear()
+ sys.modules.update(self.__saved)
+
+
+class SysPathsSnapshot:
+ def __init__(self) -> None:
+ self.__saved = list(sys.path), list(sys.meta_path)
+
+ def restore(self) -> None:
+ sys.path[:], sys.meta_path[:] = self.__saved
+
+
+@final
+class Pytester:
+ """
+ Facilities to write tests/configuration files, execute pytest in isolation, and match
+ against expected output, perfect for black-box testing of pytest plugins.
+
+ It attempts to isolate the test run from external factors as much as possible, modifying
+ the current working directory to ``path`` and environment variables during initialization.
+
+ Attributes:
+
+ :ivar Path path: temporary directory path used to create files/run tests from, etc.
+
+ :ivar plugins:
+ A list of plugins to use with :py:meth:`parseconfig` and
+ :py:meth:`runpytest`. Initially this is an empty list but plugins can
+ be added to the list. The type of items to add to the list depends on
+ the method using them so refer to them for details.
+ """
+
+ __test__ = False
+
+ CLOSE_STDIN: "Final" = NOTSET
+
+ class TimeoutExpired(Exception):
+ pass
+
+ def __init__(
+ self,
+ request: FixtureRequest,
+ tmp_path_factory: TempPathFactory,
+ *,
+ _ispytest: bool = False,
+ ) -> None:
+ check_ispytest(_ispytest)
+ self._request = request
+ self._mod_collections: WeakKeyDictionary[
+ Collector, List[Union[Item, Collector]]
+ ] = WeakKeyDictionary()
+ if request.function:
+ name: str = request.function.__name__
+ else:
+ name = request.node.name
+ self._name = name
+ self._path: Path = tmp_path_factory.mktemp(name, numbered=True)
+ self.plugins: List[Union[str, _PluggyPlugin]] = []
+ self._cwd_snapshot = CwdSnapshot()
+ self._sys_path_snapshot = SysPathsSnapshot()
+ self._sys_modules_snapshot = self.__take_sys_modules_snapshot()
+ self.chdir()
+ self._request.addfinalizer(self._finalize)
+ self._method = self._request.config.getoption("--runpytest")
+ self._test_tmproot = tmp_path_factory.mktemp(f"tmp-{name}", numbered=True)
+
+ self._monkeypatch = mp = MonkeyPatch()
+ mp.setenv("PYTEST_DEBUG_TEMPROOT", str(self._test_tmproot))
+ # Ensure no unexpected caching via tox.
+ mp.delenv("TOX_ENV_DIR", raising=False)
+ # Discard outer pytest options.
+ mp.delenv("PYTEST_ADDOPTS", raising=False)
+ # Ensure no user config is used.
+ tmphome = str(self.path)
+ mp.setenv("HOME", tmphome)
+ mp.setenv("USERPROFILE", tmphome)
+ # Do not use colors for inner runs by default.
+ mp.setenv("PY_COLORS", "0")
+
+ @property
+ def path(self) -> Path:
+ """Temporary directory where files are created and pytest is executed."""
+ return self._path
+
+ def __repr__(self) -> str:
+ return f"<Pytester {self.path!r}>"
+
+ def _finalize(self) -> None:
+ """
+ Clean up global state artifacts.
+
+ Some methods modify the global interpreter state and this tries to
+ clean this up. It does not remove the temporary directory however so
+ it can be looked at after the test run has finished.
+ """
+ self._sys_modules_snapshot.restore()
+ self._sys_path_snapshot.restore()
+ self._cwd_snapshot.restore()
+ self._monkeypatch.undo()
+
+ def __take_sys_modules_snapshot(self) -> SysModulesSnapshot:
+ # Some zope modules used by twisted-related tests keep internal state
+ # and can't be deleted; we had some trouble in the past with
+ # `zope.interface` for example.
+ #
+ # Preserve readline due to https://bugs.python.org/issue41033.
+ # pexpect issues a SIGWINCH.
+ def preserve_module(name):
+ return name.startswith(("zope", "readline"))
+
+ return SysModulesSnapshot(preserve=preserve_module)
+
+ def make_hook_recorder(self, pluginmanager: PytestPluginManager) -> HookRecorder:
+ """Create a new :py:class:`HookRecorder` for a PluginManager."""
+ pluginmanager.reprec = reprec = HookRecorder(pluginmanager, _ispytest=True)
+ self._request.addfinalizer(reprec.finish_recording)
+ return reprec
+
+ def chdir(self) -> None:
+ """Cd into the temporary directory.
+
+ This is done automatically upon instantiation.
+ """
+ os.chdir(self.path)
+
+ def _makefile(
+ self,
+ ext: str,
+ lines: Sequence[Union[Any, bytes]],
+ files: Dict[str, str],
+ encoding: str = "utf-8",
+ ) -> Path:
+ items = list(files.items())
+
+ if ext and not ext.startswith("."):
+ raise ValueError(
+ f"pytester.makefile expects a file extension, try .{ext} instead of {ext}"
+ )
+
+ def to_text(s: Union[Any, bytes]) -> str:
+ return s.decode(encoding) if isinstance(s, bytes) else str(s)
+
+ if lines:
+ source = "\n".join(to_text(x) for x in lines)
+ basename = self._name
+ items.insert(0, (basename, source))
+
+ ret = None
+ for basename, value in items:
+ p = self.path.joinpath(basename).with_suffix(ext)
+ p.parent.mkdir(parents=True, exist_ok=True)
+ source_ = Source(value)
+ source = "\n".join(to_text(line) for line in source_.lines)
+ p.write_text(source.strip(), encoding=encoding)
+ if ret is None:
+ ret = p
+ assert ret is not None
+ return ret
+
+ def makefile(self, ext: str, *args: str, **kwargs: str) -> Path:
+ r"""Create new text file(s) in the test directory.
+
+ :param str ext:
+ The extension the file(s) should use, including the dot, e.g. `.py`.
+ :param args:
+ All args are treated as strings and joined using newlines.
+ The result is written as contents to the file. The name of the
+ file is based on the test function requesting this fixture.
+ :param kwargs:
+ Each keyword is the name of a file, while the value of it will
+ be written as contents of the file.
+
+ Examples:
+
+ .. code-block:: python
+
+ pytester.makefile(".txt", "line1", "line2")
+
+ pytester.makefile(".ini", pytest="[pytest]\naddopts=-rs\n")
+
+ To create binary files, use :meth:`pathlib.Path.write_bytes` directly:
+
+ .. code-block:: python
+
+ filename = pytester.path.joinpath("foo.bin")
+ filename.write_bytes(b"...")
+ """
+ return self._makefile(ext, args, kwargs)
+
+ def makeconftest(self, source: str) -> Path:
+ """Write a contest.py file with 'source' as contents."""
+ return self.makepyfile(conftest=source)
+
+ def makeini(self, source: str) -> Path:
+ """Write a tox.ini file with 'source' as contents."""
+ return self.makefile(".ini", tox=source)
+
+ def getinicfg(self, source: str) -> SectionWrapper:
+ """Return the pytest section from the tox.ini config file."""
+ p = self.makeini(source)
+ return IniConfig(str(p))["pytest"]
+
+ def makepyprojecttoml(self, source: str) -> Path:
+ """Write a pyproject.toml file with 'source' as contents.
+
+ .. versionadded:: 6.0
+ """
+ return self.makefile(".toml", pyproject=source)
+
+ def makepyfile(self, *args, **kwargs) -> Path:
+ r"""Shortcut for .makefile() with a .py extension.
+
+ Defaults to the test name with a '.py' extension, e.g test_foobar.py, overwriting
+ existing files.
+
+ Examples:
+
+ .. code-block:: python
+
+ def test_something(pytester):
+ # Initial file is created test_something.py.
+ pytester.makepyfile("foobar")
+ # To create multiple files, pass kwargs accordingly.
+ pytester.makepyfile(custom="foobar")
+ # At this point, both 'test_something.py' & 'custom.py' exist in the test directory.
+
+ """
+ return self._makefile(".py", args, kwargs)
+
+ def maketxtfile(self, *args, **kwargs) -> Path:
+ r"""Shortcut for .makefile() with a .txt extension.
+
+ Defaults to the test name with a '.txt' extension, e.g test_foobar.txt, overwriting
+ existing files.
+
+ Examples:
+
+ .. code-block:: python
+
+ def test_something(pytester):
+ # Initial file is created test_something.txt.
+ pytester.maketxtfile("foobar")
+ # To create multiple files, pass kwargs accordingly.
+ pytester.maketxtfile(custom="foobar")
+ # At this point, both 'test_something.txt' & 'custom.txt' exist in the test directory.
+
+ """
+ return self._makefile(".txt", args, kwargs)
+
+ def syspathinsert(
+ self, path: Optional[Union[str, "os.PathLike[str]"]] = None
+ ) -> None:
+ """Prepend a directory to sys.path, defaults to :attr:`path`.
+
+ This is undone automatically when this object dies at the end of each
+ test.
+ """
+ if path is None:
+ path = self.path
+
+ self._monkeypatch.syspath_prepend(str(path))
+
+ def mkdir(self, name: str) -> Path:
+ """Create a new (sub)directory."""
+ p = self.path / name
+ p.mkdir()
+ return p
+
+ def mkpydir(self, name: str) -> Path:
+ """Create a new python package.
+
+ This creates a (sub)directory with an empty ``__init__.py`` file so it
+ gets recognised as a Python package.
+ """
+ p = self.path / name
+ p.mkdir()
+ p.joinpath("__init__.py").touch()
+ return p
+
+ def copy_example(self, name: Optional[str] = None) -> Path:
+ """Copy file from project's directory into the testdir.
+
+ :param str name: The name of the file to copy.
+ :return: path to the copied directory (inside ``self.path``).
+
+ """
+ example_dir = self._request.config.getini("pytester_example_dir")
+ if example_dir is None:
+ raise ValueError("pytester_example_dir is unset, can't copy examples")
+ example_dir = self._request.config.rootpath / example_dir
+
+ for extra_element in self._request.node.iter_markers("pytester_example_path"):
+ assert extra_element.args
+ example_dir = example_dir.joinpath(*extra_element.args)
+
+ if name is None:
+ func_name = self._name
+ maybe_dir = example_dir / func_name
+ maybe_file = example_dir / (func_name + ".py")
+
+ if maybe_dir.is_dir():
+ example_path = maybe_dir
+ elif maybe_file.is_file():
+ example_path = maybe_file
+ else:
+ raise LookupError(
+ f"{func_name} can't be found as module or package in {example_dir}"
+ )
+ else:
+ example_path = example_dir.joinpath(name)
+
+ if example_path.is_dir() and not example_path.joinpath("__init__.py").is_file():
+ copytree(example_path, self.path)
+ return self.path
+ elif example_path.is_file():
+ result = self.path.joinpath(example_path.name)
+ shutil.copy(example_path, result)
+ return result
+ else:
+ raise LookupError(
+ f'example "{example_path}" is not found as a file or directory'
+ )
+
+ def getnode(
+ self, config: Config, arg: Union[str, "os.PathLike[str]"]
+ ) -> Optional[Union[Collector, Item]]:
+ """Return the collection node of a file.
+
+ :param pytest.Config config:
+ A pytest config.
+ See :py:meth:`parseconfig` and :py:meth:`parseconfigure` for creating it.
+ :param os.PathLike[str] arg:
+ Path to the file.
+ """
+ session = Session.from_config(config)
+ assert "::" not in str(arg)
+ p = Path(os.path.abspath(arg))
+ config.hook.pytest_sessionstart(session=session)
+ res = session.perform_collect([str(p)], genitems=False)[0]
+ config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK)
+ return res
+
+ def getpathnode(self, path: Union[str, "os.PathLike[str]"]):
+ """Return the collection node of a file.
+
+ This is like :py:meth:`getnode` but uses :py:meth:`parseconfigure` to
+ create the (configured) pytest Config instance.
+
+ :param os.PathLike[str] path: Path to the file.
+ """
+ path = Path(path)
+ config = self.parseconfigure(path)
+ session = Session.from_config(config)
+ x = bestrelpath(session.path, path)
+ config.hook.pytest_sessionstart(session=session)
+ res = session.perform_collect([x], genitems=False)[0]
+ config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK)
+ return res
+
+ def genitems(self, colitems: Sequence[Union[Item, Collector]]) -> List[Item]:
+ """Generate all test items from a collection node.
+
+ This recurses into the collection node and returns a list of all the
+ test items contained within.
+ """
+ session = colitems[0].session
+ result: List[Item] = []
+ for colitem in colitems:
+ result.extend(session.genitems(colitem))
+ return result
+
+ def runitem(self, source: str) -> Any:
+ """Run the "test_func" Item.
+
+ The calling test instance (class containing the test method) must
+ provide a ``.getrunner()`` method which should return a runner which
+ can run the test protocol for a single item, e.g.
+ :py:func:`_pytest.runner.runtestprotocol`.
+ """
+ # used from runner functional tests
+ item = self.getitem(source)
+ # the test class where we are called from wants to provide the runner
+ testclassinstance = self._request.instance
+ runner = testclassinstance.getrunner()
+ return runner(item)
+
+ def inline_runsource(self, source: str, *cmdlineargs) -> HookRecorder:
+ """Run a test module in process using ``pytest.main()``.
+
+ This run writes "source" into a temporary file and runs
+ ``pytest.main()`` on it, returning a :py:class:`HookRecorder` instance
+ for the result.
+
+ :param source: The source code of the test module.
+ :param cmdlineargs: Any extra command line arguments to use.
+ """
+ p = self.makepyfile(source)
+ values = list(cmdlineargs) + [p]
+ return self.inline_run(*values)
+
+ def inline_genitems(self, *args) -> Tuple[List[Item], HookRecorder]:
+ """Run ``pytest.main(['--collectonly'])`` in-process.
+
+ Runs the :py:func:`pytest.main` function to run all of pytest inside
+ the test process itself like :py:meth:`inline_run`, but returns a
+ tuple of the collected items and a :py:class:`HookRecorder` instance.
+ """
+ rec = self.inline_run("--collect-only", *args)
+ items = [x.item for x in rec.getcalls("pytest_itemcollected")]
+ return items, rec
+
+ def inline_run(
+ self,
+ *args: Union[str, "os.PathLike[str]"],
+ plugins=(),
+ no_reraise_ctrlc: bool = False,
+ ) -> HookRecorder:
+ """Run ``pytest.main()`` in-process, returning a HookRecorder.
+
+ Runs the :py:func:`pytest.main` function to run all of pytest inside
+ the test process itself. This means it can return a
+ :py:class:`HookRecorder` instance which gives more detailed results
+ from that run than can be done by matching stdout/stderr from
+ :py:meth:`runpytest`.
+
+ :param args:
+ Command line arguments to pass to :py:func:`pytest.main`.
+ :param plugins:
+ Extra plugin instances the ``pytest.main()`` instance should use.
+ :param no_reraise_ctrlc:
+ Typically we reraise keyboard interrupts from the child run. If
+ True, the KeyboardInterrupt exception is captured.
+ """
+ # (maybe a cpython bug?) the importlib cache sometimes isn't updated
+ # properly between file creation and inline_run (especially if imports
+ # are interspersed with file creation)
+ importlib.invalidate_caches()
+
+ plugins = list(plugins)
+ finalizers = []
+ try:
+ # Any sys.module or sys.path changes done while running pytest
+ # inline should be reverted after the test run completes to avoid
+ # clashing with later inline tests run within the same pytest test,
+ # e.g. just because they use matching test module names.
+ finalizers.append(self.__take_sys_modules_snapshot().restore)
+ finalizers.append(SysPathsSnapshot().restore)
+
+ # Important note:
+ # - our tests should not leave any other references/registrations
+ # laying around other than possibly loaded test modules
+ # referenced from sys.modules, as nothing will clean those up
+ # automatically
+
+ rec = []
+
+ class Collect:
+ def pytest_configure(x, config: Config) -> None:
+ rec.append(self.make_hook_recorder(config.pluginmanager))
+
+ plugins.append(Collect())
+ ret = main([str(x) for x in args], plugins=plugins)
+ if len(rec) == 1:
+ reprec = rec.pop()
+ else:
+
+ class reprec: # type: ignore
+ pass
+
+ reprec.ret = ret
+
+ # Typically we reraise keyboard interrupts from the child run
+ # because it's our user requesting interruption of the testing.
+ if ret == ExitCode.INTERRUPTED and not no_reraise_ctrlc:
+ calls = reprec.getcalls("pytest_keyboard_interrupt")
+ if calls and calls[-1].excinfo.type == KeyboardInterrupt:
+ raise KeyboardInterrupt()
+ return reprec
+ finally:
+ for finalizer in finalizers:
+ finalizer()
+
+ def runpytest_inprocess(
+ self, *args: Union[str, "os.PathLike[str]"], **kwargs: Any
+ ) -> RunResult:
+ """Return result of running pytest in-process, providing a similar
+ interface to what self.runpytest() provides."""
+ syspathinsert = kwargs.pop("syspathinsert", False)
+
+ if syspathinsert:
+ self.syspathinsert()
+ now = timing.time()
+ capture = _get_multicapture("sys")
+ capture.start_capturing()
+ try:
+ try:
+ reprec = self.inline_run(*args, **kwargs)
+ except SystemExit as e:
+ ret = e.args[0]
+ try:
+ ret = ExitCode(e.args[0])
+ except ValueError:
+ pass
+
+ class reprec: # type: ignore
+ ret = ret
+
+ except Exception:
+ traceback.print_exc()
+
+ class reprec: # type: ignore
+ ret = ExitCode(3)
+
+ finally:
+ out, err = capture.readouterr()
+ capture.stop_capturing()
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+ assert reprec.ret is not None
+ res = RunResult(
+ reprec.ret, out.splitlines(), err.splitlines(), timing.time() - now
+ )
+ res.reprec = reprec # type: ignore
+ return res
+
+ def runpytest(
+ self, *args: Union[str, "os.PathLike[str]"], **kwargs: Any
+ ) -> RunResult:
+ """Run pytest inline or in a subprocess, depending on the command line
+ option "--runpytest" and return a :py:class:`~pytest.RunResult`."""
+ new_args = self._ensure_basetemp(args)
+ if self._method == "inprocess":
+ return self.runpytest_inprocess(*new_args, **kwargs)
+ elif self._method == "subprocess":
+ return self.runpytest_subprocess(*new_args, **kwargs)
+ raise RuntimeError(f"Unrecognized runpytest option: {self._method}")
+
+ def _ensure_basetemp(
+ self, args: Sequence[Union[str, "os.PathLike[str]"]]
+ ) -> List[Union[str, "os.PathLike[str]"]]:
+ new_args = list(args)
+ for x in new_args:
+ if str(x).startswith("--basetemp"):
+ break
+ else:
+ new_args.append("--basetemp=%s" % self.path.parent.joinpath("basetemp"))
+ return new_args
+
+ def parseconfig(self, *args: Union[str, "os.PathLike[str]"]) -> Config:
+ """Return a new pytest Config instance from given commandline args.
+
+ This invokes the pytest bootstrapping code in _pytest.config to create
+ a new :py:class:`_pytest.core.PluginManager` and call the
+ pytest_cmdline_parse hook to create a new
+ :py:class:`pytest.Config` instance.
+
+ If :py:attr:`plugins` has been populated they should be plugin modules
+ to be registered with the PluginManager.
+ """
+ import _pytest.config
+
+ new_args = self._ensure_basetemp(args)
+ new_args = [str(x) for x in new_args]
+
+ config = _pytest.config._prepareconfig(new_args, self.plugins) # type: ignore[arg-type]
+ # we don't know what the test will do with this half-setup config
+ # object and thus we make sure it gets unconfigured properly in any
+ # case (otherwise capturing could still be active, for example)
+ self._request.addfinalizer(config._ensure_unconfigure)
+ return config
+
+ def parseconfigure(self, *args: Union[str, "os.PathLike[str]"]) -> Config:
+ """Return a new pytest configured Config instance.
+
+ Returns a new :py:class:`pytest.Config` instance like
+ :py:meth:`parseconfig`, but also calls the pytest_configure hook.
+ """
+ config = self.parseconfig(*args)
+ config._do_configure()
+ return config
+
+ def getitem(
+ self, source: Union[str, "os.PathLike[str]"], funcname: str = "test_func"
+ ) -> Item:
+ """Return the test item for a test function.
+
+ Writes the source to a python file and runs pytest's collection on
+ the resulting module, returning the test item for the requested
+ function name.
+
+ :param source:
+ The module source.
+ :param funcname:
+ The name of the test function for which to return a test item.
+ """
+ items = self.getitems(source)
+ for item in items:
+ if item.name == funcname:
+ return item
+ assert 0, "{!r} item not found in module:\n{}\nitems: {}".format(
+ funcname, source, items
+ )
+
+ def getitems(self, source: Union[str, "os.PathLike[str]"]) -> List[Item]:
+ """Return all test items collected from the module.
+
+ Writes the source to a Python file and runs pytest's collection on
+ the resulting module, returning all test items contained within.
+ """
+ modcol = self.getmodulecol(source)
+ return self.genitems([modcol])
+
+ def getmodulecol(
+ self,
+ source: Union[str, "os.PathLike[str]"],
+ configargs=(),
+ *,
+ withinit: bool = False,
+ ):
+ """Return the module collection node for ``source``.
+
+ Writes ``source`` to a file using :py:meth:`makepyfile` and then
+ runs the pytest collection on it, returning the collection node for the
+ test module.
+
+ :param source:
+ The source code of the module to collect.
+
+ :param configargs:
+ Any extra arguments to pass to :py:meth:`parseconfigure`.
+
+ :param withinit:
+ Whether to also write an ``__init__.py`` file to the same
+ directory to ensure it is a package.
+ """
+ if isinstance(source, os.PathLike):
+ path = self.path.joinpath(source)
+ assert not withinit, "not supported for paths"
+ else:
+ kw = {self._name: str(source)}
+ path = self.makepyfile(**kw)
+ if withinit:
+ self.makepyfile(__init__="#")
+ self.config = config = self.parseconfigure(path, *configargs)
+ return self.getnode(config, path)
+
+ def collect_by_name(
+ self, modcol: Collector, name: str
+ ) -> Optional[Union[Item, Collector]]:
+ """Return the collection node for name from the module collection.
+
+ Searches a module collection node for a collection node matching the
+ given name.
+
+ :param modcol: A module collection node; see :py:meth:`getmodulecol`.
+ :param name: The name of the node to return.
+ """
+ if modcol not in self._mod_collections:
+ self._mod_collections[modcol] = list(modcol.collect())
+ for colitem in self._mod_collections[modcol]:
+ if colitem.name == name:
+ return colitem
+ return None
+
+ def popen(
+ self,
+ cmdargs: Sequence[Union[str, "os.PathLike[str]"]],
+ stdout: Union[int, TextIO] = subprocess.PIPE,
+ stderr: Union[int, TextIO] = subprocess.PIPE,
+ stdin: Union[NotSetType, bytes, IO[Any], int] = CLOSE_STDIN,
+ **kw,
+ ):
+ """Invoke :py:class:`subprocess.Popen`.
+
+ Calls :py:class:`subprocess.Popen` making sure the current working
+ directory is in ``PYTHONPATH``.
+
+ You probably want to use :py:meth:`run` instead.
+ """
+ env = os.environ.copy()
+ env["PYTHONPATH"] = os.pathsep.join(
+ filter(None, [os.getcwd(), env.get("PYTHONPATH", "")])
+ )
+ kw["env"] = env
+
+ if stdin is self.CLOSE_STDIN:
+ kw["stdin"] = subprocess.PIPE
+ elif isinstance(stdin, bytes):
+ kw["stdin"] = subprocess.PIPE
+ else:
+ kw["stdin"] = stdin
+
+ popen = subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw)
+ if stdin is self.CLOSE_STDIN:
+ assert popen.stdin is not None
+ popen.stdin.close()
+ elif isinstance(stdin, bytes):
+ assert popen.stdin is not None
+ popen.stdin.write(stdin)
+
+ return popen
+
+ def run(
+ self,
+ *cmdargs: Union[str, "os.PathLike[str]"],
+ timeout: Optional[float] = None,
+ stdin: Union[NotSetType, bytes, IO[Any], int] = CLOSE_STDIN,
+ ) -> RunResult:
+ """Run a command with arguments.
+
+ Run a process using :py:class:`subprocess.Popen` saving the stdout and
+ stderr.
+
+ :param cmdargs:
+ The sequence of arguments to pass to :py:class:`subprocess.Popen`,
+ with path-like objects being converted to :py:class:`str`
+ automatically.
+ :param timeout:
+ The period in seconds after which to timeout and raise
+ :py:class:`Pytester.TimeoutExpired`.
+ :param stdin:
+ Optional standard input.
+
+ - If it is :py:attr:`CLOSE_STDIN` (Default), then this method calls
+ :py:class:`subprocess.Popen` with ``stdin=subprocess.PIPE``, and
+ the standard input is closed immediately after the new command is
+ started.
+
+ - If it is of type :py:class:`bytes`, these bytes are sent to the
+ standard input of the command.
+
+ - Otherwise, it is passed through to :py:class:`subprocess.Popen`.
+ For further information in this case, consult the document of the
+ ``stdin`` parameter in :py:class:`subprocess.Popen`.
+ """
+ __tracebackhide__ = True
+
+ cmdargs = tuple(os.fspath(arg) for arg in cmdargs)
+ p1 = self.path.joinpath("stdout")
+ p2 = self.path.joinpath("stderr")
+ print("running:", *cmdargs)
+ print(" in:", Path.cwd())
+
+ with p1.open("w", encoding="utf8") as f1, p2.open("w", encoding="utf8") as f2:
+ now = timing.time()
+ popen = self.popen(
+ cmdargs,
+ stdin=stdin,
+ stdout=f1,
+ stderr=f2,
+ close_fds=(sys.platform != "win32"),
+ )
+ if popen.stdin is not None:
+ popen.stdin.close()
+
+ def handle_timeout() -> None:
+ __tracebackhide__ = True
+
+ timeout_message = (
+ "{seconds} second timeout expired running:"
+ " {command}".format(seconds=timeout, command=cmdargs)
+ )
+
+ popen.kill()
+ popen.wait()
+ raise self.TimeoutExpired(timeout_message)
+
+ if timeout is None:
+ ret = popen.wait()
+ else:
+ try:
+ ret = popen.wait(timeout)
+ except subprocess.TimeoutExpired:
+ handle_timeout()
+
+ with p1.open(encoding="utf8") as f1, p2.open(encoding="utf8") as f2:
+ out = f1.read().splitlines()
+ err = f2.read().splitlines()
+
+ self._dump_lines(out, sys.stdout)
+ self._dump_lines(err, sys.stderr)
+
+ with contextlib.suppress(ValueError):
+ ret = ExitCode(ret)
+ return RunResult(ret, out, err, timing.time() - now)
+
+ def _dump_lines(self, lines, fp):
+ try:
+ for line in lines:
+ print(line, file=fp)
+ except UnicodeEncodeError:
+ print(f"couldn't print to {fp} because of encoding")
+
+ def _getpytestargs(self) -> Tuple[str, ...]:
+ return sys.executable, "-mpytest"
+
+ def runpython(self, script: "os.PathLike[str]") -> RunResult:
+ """Run a python script using sys.executable as interpreter."""
+ return self.run(sys.executable, script)
+
+ def runpython_c(self, command: str) -> RunResult:
+ """Run ``python -c "command"``."""
+ return self.run(sys.executable, "-c", command)
+
+ def runpytest_subprocess(
+ self, *args: Union[str, "os.PathLike[str]"], timeout: Optional[float] = None
+ ) -> RunResult:
+ """Run pytest as a subprocess with given arguments.
+
+ Any plugins added to the :py:attr:`plugins` list will be added using the
+ ``-p`` command line option. Additionally ``--basetemp`` is used to put
+ any temporary files and directories in a numbered directory prefixed
+ with "runpytest-" to not conflict with the normal numbered pytest
+ location for temporary files and directories.
+
+ :param args:
+ The sequence of arguments to pass to the pytest subprocess.
+ :param timeout:
+ The period in seconds after which to timeout and raise
+ :py:class:`Pytester.TimeoutExpired`.
+ """
+ __tracebackhide__ = True
+ p = make_numbered_dir(root=self.path, prefix="runpytest-", mode=0o700)
+ args = ("--basetemp=%s" % p,) + args
+ plugins = [x for x in self.plugins if isinstance(x, str)]
+ if plugins:
+ args = ("-p", plugins[0]) + args
+ args = self._getpytestargs() + args
+ return self.run(*args, timeout=timeout)
+
+ def spawn_pytest(
+ self, string: str, expect_timeout: float = 10.0
+ ) -> "pexpect.spawn":
+ """Run pytest using pexpect.
+
+ This makes sure to use the right pytest and sets up the temporary
+ directory locations.
+
+ The pexpect child is returned.
+ """
+ basetemp = self.path / "temp-pexpect"
+ basetemp.mkdir(mode=0o700)
+ invoke = " ".join(map(str, self._getpytestargs()))
+ cmd = f"{invoke} --basetemp={basetemp} {string}"
+ return self.spawn(cmd, expect_timeout=expect_timeout)
+
+ def spawn(self, cmd: str, expect_timeout: float = 10.0) -> "pexpect.spawn":
+ """Run a command using pexpect.
+
+ The pexpect child is returned.
+ """
+ pexpect = importorskip("pexpect", "3.0")
+ if hasattr(sys, "pypy_version_info") and "64" in platform.machine():
+ skip("pypy-64 bit not supported")
+ if not hasattr(pexpect, "spawn"):
+ skip("pexpect.spawn not available")
+ logfile = self.path.joinpath("spawn.out").open("wb")
+
+ child = pexpect.spawn(cmd, logfile=logfile, timeout=expect_timeout)
+ self._request.addfinalizer(logfile.close)
+ return child
+
+
+class LineComp:
+ def __init__(self) -> None:
+ self.stringio = StringIO()
+ """:class:`python:io.StringIO()` instance used for input."""
+
+ def assert_contains_lines(self, lines2: Sequence[str]) -> None:
+ """Assert that ``lines2`` are contained (linearly) in :attr:`stringio`'s value.
+
+ Lines are matched using :func:`LineMatcher.fnmatch_lines <pytest.LineMatcher.fnmatch_lines>`.
+ """
+ __tracebackhide__ = True
+ val = self.stringio.getvalue()
+ self.stringio.truncate(0)
+ self.stringio.seek(0)
+ lines1 = val.split("\n")
+ LineMatcher(lines1).fnmatch_lines(lines2)
+
+
+class LineMatcher:
+ """Flexible matching of text.
+
+ This is a convenience class to test large texts like the output of
+ commands.
+
+ The constructor takes a list of lines without their trailing newlines, i.e.
+ ``text.splitlines()``.
+ """
+
+ def __init__(self, lines: List[str]) -> None:
+ self.lines = lines
+ self._log_output: List[str] = []
+
+ def __str__(self) -> str:
+ """Return the entire original text.
+
+ .. versionadded:: 6.2
+ You can use :meth:`str` in older versions.
+ """
+ return "\n".join(self.lines)
+
+ def _getlines(self, lines2: Union[str, Sequence[str], Source]) -> Sequence[str]:
+ if isinstance(lines2, str):
+ lines2 = Source(lines2)
+ if isinstance(lines2, Source):
+ lines2 = lines2.strip().lines
+ return lines2
+
+ def fnmatch_lines_random(self, lines2: Sequence[str]) -> None:
+ """Check lines exist in the output in any order (using :func:`python:fnmatch.fnmatch`)."""
+ __tracebackhide__ = True
+ self._match_lines_random(lines2, fnmatch)
+
+ def re_match_lines_random(self, lines2: Sequence[str]) -> None:
+ """Check lines exist in the output in any order (using :func:`python:re.match`)."""
+ __tracebackhide__ = True
+ self._match_lines_random(lines2, lambda name, pat: bool(re.match(pat, name)))
+
+ def _match_lines_random(
+ self, lines2: Sequence[str], match_func: Callable[[str, str], bool]
+ ) -> None:
+ __tracebackhide__ = True
+ lines2 = self._getlines(lines2)
+ for line in lines2:
+ for x in self.lines:
+ if line == x or match_func(x, line):
+ self._log("matched: ", repr(line))
+ break
+ else:
+ msg = "line %r not found in output" % line
+ self._log(msg)
+ self._fail(msg)
+
+ def get_lines_after(self, fnline: str) -> Sequence[str]:
+ """Return all lines following the given line in the text.
+
+ The given line can contain glob wildcards.
+ """
+ for i, line in enumerate(self.lines):
+ if fnline == line or fnmatch(line, fnline):
+ return self.lines[i + 1 :]
+ raise ValueError("line %r not found in output" % fnline)
+
+ def _log(self, *args) -> None:
+ self._log_output.append(" ".join(str(x) for x in args))
+
+ @property
+ def _log_text(self) -> str:
+ return "\n".join(self._log_output)
+
+ def fnmatch_lines(
+ self, lines2: Sequence[str], *, consecutive: bool = False
+ ) -> None:
+ """Check lines exist in the output (using :func:`python:fnmatch.fnmatch`).
+
+ The argument is a list of lines which have to match and can use glob
+ wildcards. If they do not match a pytest.fail() is called. The
+ matches and non-matches are also shown as part of the error message.
+
+ :param lines2: String patterns to match.
+ :param consecutive: Match lines consecutively?
+ """
+ __tracebackhide__ = True
+ self._match_lines(lines2, fnmatch, "fnmatch", consecutive=consecutive)
+
+ def re_match_lines(
+ self, lines2: Sequence[str], *, consecutive: bool = False
+ ) -> None:
+ """Check lines exist in the output (using :func:`python:re.match`).
+
+ The argument is a list of lines which have to match using ``re.match``.
+ If they do not match a pytest.fail() is called.
+
+ The matches and non-matches are also shown as part of the error message.
+
+ :param lines2: string patterns to match.
+ :param consecutive: match lines consecutively?
+ """
+ __tracebackhide__ = True
+ self._match_lines(
+ lines2,
+ lambda name, pat: bool(re.match(pat, name)),
+ "re.match",
+ consecutive=consecutive,
+ )
+
+ def _match_lines(
+ self,
+ lines2: Sequence[str],
+ match_func: Callable[[str, str], bool],
+ match_nickname: str,
+ *,
+ consecutive: bool = False,
+ ) -> None:
+ """Underlying implementation of ``fnmatch_lines`` and ``re_match_lines``.
+
+ :param Sequence[str] lines2:
+ List of string patterns to match. The actual format depends on
+ ``match_func``.
+ :param match_func:
+ A callable ``match_func(line, pattern)`` where line is the
+ captured line from stdout/stderr and pattern is the matching
+ pattern.
+ :param str match_nickname:
+ The nickname for the match function that will be logged to stdout
+ when a match occurs.
+ :param consecutive:
+ Match lines consecutively?
+ """
+ if not isinstance(lines2, collections.abc.Sequence):
+ raise TypeError(f"invalid type for lines2: {type(lines2).__name__}")
+ lines2 = self._getlines(lines2)
+ lines1 = self.lines[:]
+ extralines = []
+ __tracebackhide__ = True
+ wnick = len(match_nickname) + 1
+ started = False
+ for line in lines2:
+ nomatchprinted = False
+ while lines1:
+ nextline = lines1.pop(0)
+ if line == nextline:
+ self._log("exact match:", repr(line))
+ started = True
+ break
+ elif match_func(nextline, line):
+ self._log("%s:" % match_nickname, repr(line))
+ self._log(
+ "{:>{width}}".format("with:", width=wnick), repr(nextline)
+ )
+ started = True
+ break
+ else:
+ if consecutive and started:
+ msg = f"no consecutive match: {line!r}"
+ self._log(msg)
+ self._log(
+ "{:>{width}}".format("with:", width=wnick), repr(nextline)
+ )
+ self._fail(msg)
+ if not nomatchprinted:
+ self._log(
+ "{:>{width}}".format("nomatch:", width=wnick), repr(line)
+ )
+ nomatchprinted = True
+ self._log("{:>{width}}".format("and:", width=wnick), repr(nextline))
+ extralines.append(nextline)
+ else:
+ msg = f"remains unmatched: {line!r}"
+ self._log(msg)
+ self._fail(msg)
+ self._log_output = []
+
+ def no_fnmatch_line(self, pat: str) -> None:
+ """Ensure captured lines do not match the given pattern, using ``fnmatch.fnmatch``.
+
+ :param str pat: The pattern to match lines.
+ """
+ __tracebackhide__ = True
+ self._no_match_line(pat, fnmatch, "fnmatch")
+
+ def no_re_match_line(self, pat: str) -> None:
+ """Ensure captured lines do not match the given pattern, using ``re.match``.
+
+ :param str pat: The regular expression to match lines.
+ """
+ __tracebackhide__ = True
+ self._no_match_line(
+ pat, lambda name, pat: bool(re.match(pat, name)), "re.match"
+ )
+
+ def _no_match_line(
+ self, pat: str, match_func: Callable[[str, str], bool], match_nickname: str
+ ) -> None:
+ """Ensure captured lines does not have a the given pattern, using ``fnmatch.fnmatch``.
+
+ :param str pat: The pattern to match lines.
+ """
+ __tracebackhide__ = True
+ nomatch_printed = False
+ wnick = len(match_nickname) + 1
+ for line in self.lines:
+ if match_func(line, pat):
+ msg = f"{match_nickname}: {pat!r}"
+ self._log(msg)
+ self._log("{:>{width}}".format("with:", width=wnick), repr(line))
+ self._fail(msg)
+ else:
+ if not nomatch_printed:
+ self._log("{:>{width}}".format("nomatch:", width=wnick), repr(pat))
+ nomatch_printed = True
+ self._log("{:>{width}}".format("and:", width=wnick), repr(line))
+ self._log_output = []
+
+ def _fail(self, msg: str) -> None:
+ __tracebackhide__ = True
+ log_text = self._log_text
+ self._log_output = []
+ fail(log_text)
+
+ def str(self) -> str:
+ """Return the entire original text."""
+ return str(self)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pytester_assertions.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pytester_assertions.py
new file mode 100644
index 0000000000..657e4db5fc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/pytester_assertions.py
@@ -0,0 +1,75 @@
+"""Helper plugin for pytester; should not be loaded on its own."""
+# This plugin contains assertions used by pytester. pytester cannot
+# contain them itself, since it is imported by the `pytest` module,
+# hence cannot be subject to assertion rewriting, which requires a
+# module to not be already imported.
+from typing import Dict
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+from _pytest.reports import CollectReport
+from _pytest.reports import TestReport
+
+
+def assertoutcome(
+ outcomes: Tuple[
+ Sequence[TestReport],
+ Sequence[Union[CollectReport, TestReport]],
+ Sequence[Union[CollectReport, TestReport]],
+ ],
+ passed: int = 0,
+ skipped: int = 0,
+ failed: int = 0,
+) -> None:
+ __tracebackhide__ = True
+
+ realpassed, realskipped, realfailed = outcomes
+ obtained = {
+ "passed": len(realpassed),
+ "skipped": len(realskipped),
+ "failed": len(realfailed),
+ }
+ expected = {"passed": passed, "skipped": skipped, "failed": failed}
+ assert obtained == expected, outcomes
+
+
+def assert_outcomes(
+ outcomes: Dict[str, int],
+ passed: int = 0,
+ skipped: int = 0,
+ failed: int = 0,
+ errors: int = 0,
+ xpassed: int = 0,
+ xfailed: int = 0,
+ warnings: Optional[int] = None,
+ deselected: Optional[int] = None,
+) -> None:
+ """Assert that the specified outcomes appear with the respective
+ numbers (0 means it didn't occur) in the text output from a test run."""
+ __tracebackhide__ = True
+
+ obtained = {
+ "passed": outcomes.get("passed", 0),
+ "skipped": outcomes.get("skipped", 0),
+ "failed": outcomes.get("failed", 0),
+ "errors": outcomes.get("errors", 0),
+ "xpassed": outcomes.get("xpassed", 0),
+ "xfailed": outcomes.get("xfailed", 0),
+ }
+ expected = {
+ "passed": passed,
+ "skipped": skipped,
+ "failed": failed,
+ "errors": errors,
+ "xpassed": xpassed,
+ "xfailed": xfailed,
+ }
+ if warnings is not None:
+ obtained["warnings"] = outcomes.get("warnings", 0)
+ expected["warnings"] = warnings
+ if deselected is not None:
+ obtained["deselected"] = outcomes.get("deselected", 0)
+ expected["deselected"] = deselected
+ assert obtained == expected
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python.py
new file mode 100644
index 0000000000..0fd5702a5c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python.py
@@ -0,0 +1,1764 @@
+"""Python test discovery, setup and run of test functions."""
+import enum
+import fnmatch
+import inspect
+import itertools
+import os
+import sys
+import types
+import warnings
+from collections import Counter
+from collections import defaultdict
+from functools import partial
+from pathlib import Path
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generator
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Pattern
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+import attr
+
+import _pytest
+from _pytest import fixtures
+from _pytest import nodes
+from _pytest._code import filter_traceback
+from _pytest._code import getfslineno
+from _pytest._code.code import ExceptionInfo
+from _pytest._code.code import TerminalRepr
+from _pytest._io import TerminalWriter
+from _pytest._io.saferepr import saferepr
+from _pytest.compat import ascii_escaped
+from _pytest.compat import assert_never
+from _pytest.compat import final
+from _pytest.compat import get_default_arg_names
+from _pytest.compat import get_real_func
+from _pytest.compat import getimfunc
+from _pytest.compat import getlocation
+from _pytest.compat import is_async_function
+from _pytest.compat import is_generator
+from _pytest.compat import LEGACY_PATH
+from _pytest.compat import NOTSET
+from _pytest.compat import safe_getattr
+from _pytest.compat import safe_isclass
+from _pytest.compat import STRING_TYPES
+from _pytest.config import Config
+from _pytest.config import ExitCode
+from _pytest.config import hookimpl
+from _pytest.config.argparsing import Parser
+from _pytest.deprecated import check_ispytest
+from _pytest.deprecated import FSCOLLECTOR_GETHOOKPROXY_ISINITPATH
+from _pytest.deprecated import INSTANCE_COLLECTOR
+from _pytest.fixtures import FuncFixtureInfo
+from _pytest.main import Session
+from _pytest.mark import MARK_GEN
+from _pytest.mark import ParameterSet
+from _pytest.mark.structures import get_unpacked_marks
+from _pytest.mark.structures import Mark
+from _pytest.mark.structures import MarkDecorator
+from _pytest.mark.structures import normalize_mark_list
+from _pytest.outcomes import fail
+from _pytest.outcomes import skip
+from _pytest.pathlib import bestrelpath
+from _pytest.pathlib import fnmatch_ex
+from _pytest.pathlib import import_path
+from _pytest.pathlib import ImportPathMismatchError
+from _pytest.pathlib import parts
+from _pytest.pathlib import visit
+from _pytest.scope import Scope
+from _pytest.warning_types import PytestCollectionWarning
+from _pytest.warning_types import PytestUnhandledCoroutineWarning
+
+if TYPE_CHECKING:
+ from typing_extensions import Literal
+ from _pytest.scope import _ScopeName
+
+
+_PYTEST_DIR = Path(_pytest.__file__).parent
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("general")
+ group.addoption(
+ "--fixtures",
+ "--funcargs",
+ action="store_true",
+ dest="showfixtures",
+ default=False,
+ help="show available fixtures, sorted by plugin appearance "
+ "(fixtures with leading '_' are only shown with '-v')",
+ )
+ group.addoption(
+ "--fixtures-per-test",
+ action="store_true",
+ dest="show_fixtures_per_test",
+ default=False,
+ help="show fixtures per test",
+ )
+ parser.addini(
+ "python_files",
+ type="args",
+ # NOTE: default is also used in AssertionRewritingHook.
+ default=["test_*.py", "*_test.py"],
+ help="glob-style file patterns for Python test module discovery",
+ )
+ parser.addini(
+ "python_classes",
+ type="args",
+ default=["Test"],
+ help="prefixes or glob names for Python test class discovery",
+ )
+ parser.addini(
+ "python_functions",
+ type="args",
+ default=["test"],
+ help="prefixes or glob names for Python test function and method discovery",
+ )
+ parser.addini(
+ "disable_test_id_escaping_and_forfeit_all_rights_to_community_support",
+ type="bool",
+ default=False,
+ help="disable string escape non-ascii characters, might cause unwanted "
+ "side effects(use at your own risk)",
+ )
+
+
+def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:
+ if config.option.showfixtures:
+ showfixtures(config)
+ return 0
+ if config.option.show_fixtures_per_test:
+ show_fixtures_per_test(config)
+ return 0
+ return None
+
+
+def pytest_generate_tests(metafunc: "Metafunc") -> None:
+ for marker in metafunc.definition.iter_markers(name="parametrize"):
+ metafunc.parametrize(*marker.args, **marker.kwargs, _param_mark=marker)
+
+
+def pytest_configure(config: Config) -> None:
+ config.addinivalue_line(
+ "markers",
+ "parametrize(argnames, argvalues): call a test function multiple "
+ "times passing in different arguments in turn. argvalues generally "
+ "needs to be a list of values if argnames specifies only one name "
+ "or a list of tuples of values if argnames specifies multiple names. "
+ "Example: @parametrize('arg1', [1,2]) would lead to two calls of the "
+ "decorated test function, one with arg1=1 and another with arg1=2."
+ "see https://docs.pytest.org/en/stable/how-to/parametrize.html for more info "
+ "and examples.",
+ )
+ config.addinivalue_line(
+ "markers",
+ "usefixtures(fixturename1, fixturename2, ...): mark tests as needing "
+ "all of the specified fixtures. see "
+ "https://docs.pytest.org/en/stable/explanation/fixtures.html#usefixtures ",
+ )
+
+
+def async_warn_and_skip(nodeid: str) -> None:
+ msg = "async def functions are not natively supported and have been skipped.\n"
+ msg += (
+ "You need to install a suitable plugin for your async framework, for example:\n"
+ )
+ msg += " - anyio\n"
+ msg += " - pytest-asyncio\n"
+ msg += " - pytest-tornasync\n"
+ msg += " - pytest-trio\n"
+ msg += " - pytest-twisted"
+ warnings.warn(PytestUnhandledCoroutineWarning(msg.format(nodeid)))
+ skip(reason="async def function and no async plugin installed (see warnings)")
+
+
+@hookimpl(trylast=True)
+def pytest_pyfunc_call(pyfuncitem: "Function") -> Optional[object]:
+ testfunction = pyfuncitem.obj
+ if is_async_function(testfunction):
+ async_warn_and_skip(pyfuncitem.nodeid)
+ funcargs = pyfuncitem.funcargs
+ testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames}
+ result = testfunction(**testargs)
+ if hasattr(result, "__await__") or hasattr(result, "__aiter__"):
+ async_warn_and_skip(pyfuncitem.nodeid)
+ return True
+
+
+def pytest_collect_file(file_path: Path, parent: nodes.Collector) -> Optional["Module"]:
+ if file_path.suffix == ".py":
+ if not parent.session.isinitpath(file_path):
+ if not path_matches_patterns(
+ file_path, parent.config.getini("python_files") + ["__init__.py"]
+ ):
+ return None
+ ihook = parent.session.gethookproxy(file_path)
+ module: Module = ihook.pytest_pycollect_makemodule(
+ module_path=file_path, parent=parent
+ )
+ return module
+ return None
+
+
+def path_matches_patterns(path: Path, patterns: Iterable[str]) -> bool:
+ """Return whether path matches any of the patterns in the list of globs given."""
+ return any(fnmatch_ex(pattern, path) for pattern in patterns)
+
+
+def pytest_pycollect_makemodule(module_path: Path, parent) -> "Module":
+ if module_path.name == "__init__.py":
+ pkg: Package = Package.from_parent(parent, path=module_path)
+ return pkg
+ mod: Module = Module.from_parent(parent, path=module_path)
+ return mod
+
+
+@hookimpl(trylast=True)
+def pytest_pycollect_makeitem(collector: "PyCollector", name: str, obj: object):
+ # Nothing was collected elsewhere, let's do it here.
+ if safe_isclass(obj):
+ if collector.istestclass(obj, name):
+ return Class.from_parent(collector, name=name, obj=obj)
+ elif collector.istestfunction(obj, name):
+ # mock seems to store unbound methods (issue473), normalize it.
+ obj = getattr(obj, "__func__", obj)
+ # We need to try and unwrap the function if it's a functools.partial
+ # or a functools.wrapped.
+ # We mustn't if it's been wrapped with mock.patch (python 2 only).
+ if not (inspect.isfunction(obj) or inspect.isfunction(get_real_func(obj))):
+ filename, lineno = getfslineno(obj)
+ warnings.warn_explicit(
+ message=PytestCollectionWarning(
+ "cannot collect %r because it is not a function." % name
+ ),
+ category=None,
+ filename=str(filename),
+ lineno=lineno + 1,
+ )
+ elif getattr(obj, "__test__", True):
+ if is_generator(obj):
+ res = Function.from_parent(collector, name=name)
+ reason = "yield tests were removed in pytest 4.0 - {name} will be ignored".format(
+ name=name
+ )
+ res.add_marker(MARK_GEN.xfail(run=False, reason=reason))
+ res.warn(PytestCollectionWarning(reason))
+ else:
+ res = list(collector._genfunctions(name, obj))
+ return res
+
+
+class PyobjMixin(nodes.Node):
+ """this mix-in inherits from Node to carry over the typing information
+
+ as its intended to always mix in before a node
+ its position in the mro is unaffected"""
+
+ _ALLOW_MARKERS = True
+
+ @property
+ def module(self):
+ """Python module object this node was collected from (can be None)."""
+ node = self.getparent(Module)
+ return node.obj if node is not None else None
+
+ @property
+ def cls(self):
+ """Python class object this node was collected from (can be None)."""
+ node = self.getparent(Class)
+ return node.obj if node is not None else None
+
+ @property
+ def instance(self):
+ """Python instance object the function is bound to.
+
+ Returns None if not a test method, e.g. for a standalone test function,
+ a staticmethod, a class or a module.
+ """
+ node = self.getparent(Function)
+ return getattr(node.obj, "__self__", None) if node is not None else None
+
+ @property
+ def obj(self):
+ """Underlying Python object."""
+ obj = getattr(self, "_obj", None)
+ if obj is None:
+ self._obj = obj = self._getobj()
+ # XXX evil hack
+ # used to avoid Function marker duplication
+ if self._ALLOW_MARKERS:
+ self.own_markers.extend(get_unpacked_marks(self.obj))
+ return obj
+
+ @obj.setter
+ def obj(self, value):
+ self._obj = value
+
+ def _getobj(self):
+ """Get the underlying Python object. May be overwritten by subclasses."""
+ # TODO: Improve the type of `parent` such that assert/ignore aren't needed.
+ assert self.parent is not None
+ obj = self.parent.obj # type: ignore[attr-defined]
+ return getattr(obj, self.name)
+
+ def getmodpath(self, stopatmodule: bool = True, includemodule: bool = False) -> str:
+ """Return Python path relative to the containing module."""
+ chain = self.listchain()
+ chain.reverse()
+ parts = []
+ for node in chain:
+ name = node.name
+ if isinstance(node, Module):
+ name = os.path.splitext(name)[0]
+ if stopatmodule:
+ if includemodule:
+ parts.append(name)
+ break
+ parts.append(name)
+ parts.reverse()
+ return ".".join(parts)
+
+ def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]:
+ # XXX caching?
+ obj = self.obj
+ compat_co_firstlineno = getattr(obj, "compat_co_firstlineno", None)
+ if isinstance(compat_co_firstlineno, int):
+ # nose compatibility
+ file_path = sys.modules[obj.__module__].__file__
+ if file_path.endswith(".pyc"):
+ file_path = file_path[:-1]
+ path: Union["os.PathLike[str]", str] = file_path
+ lineno = compat_co_firstlineno
+ else:
+ path, lineno = getfslineno(obj)
+ modpath = self.getmodpath()
+ assert isinstance(lineno, int)
+ return path, lineno, modpath
+
+
+# As an optimization, these builtin attribute names are pre-ignored when
+# iterating over an object during collection -- the pytest_pycollect_makeitem
+# hook is not called for them.
+# fmt: off
+class _EmptyClass: pass # noqa: E701
+IGNORED_ATTRIBUTES = frozenset.union( # noqa: E305
+ frozenset(),
+ # Module.
+ dir(types.ModuleType("empty_module")),
+ # Some extra module attributes the above doesn't catch.
+ {"__builtins__", "__file__", "__cached__"},
+ # Class.
+ dir(_EmptyClass),
+ # Instance.
+ dir(_EmptyClass()),
+)
+del _EmptyClass
+# fmt: on
+
+
+class PyCollector(PyobjMixin, nodes.Collector):
+ def funcnamefilter(self, name: str) -> bool:
+ return self._matches_prefix_or_glob_option("python_functions", name)
+
+ def isnosetest(self, obj: object) -> bool:
+ """Look for the __test__ attribute, which is applied by the
+ @nose.tools.istest decorator.
+ """
+ # We explicitly check for "is True" here to not mistakenly treat
+ # classes with a custom __getattr__ returning something truthy (like a
+ # function) as test classes.
+ return safe_getattr(obj, "__test__", False) is True
+
+ def classnamefilter(self, name: str) -> bool:
+ return self._matches_prefix_or_glob_option("python_classes", name)
+
+ def istestfunction(self, obj: object, name: str) -> bool:
+ if self.funcnamefilter(name) or self.isnosetest(obj):
+ if isinstance(obj, staticmethod):
+ # staticmethods need to be unwrapped.
+ obj = safe_getattr(obj, "__func__", False)
+ return callable(obj) and fixtures.getfixturemarker(obj) is None
+ else:
+ return False
+
+ def istestclass(self, obj: object, name: str) -> bool:
+ return self.classnamefilter(name) or self.isnosetest(obj)
+
+ def _matches_prefix_or_glob_option(self, option_name: str, name: str) -> bool:
+ """Check if the given name matches the prefix or glob-pattern defined
+ in ini configuration."""
+ for option in self.config.getini(option_name):
+ if name.startswith(option):
+ return True
+ # Check that name looks like a glob-string before calling fnmatch
+ # because this is called for every name in each collected module,
+ # and fnmatch is somewhat expensive to call.
+ elif ("*" in option or "?" in option or "[" in option) and fnmatch.fnmatch(
+ name, option
+ ):
+ return True
+ return False
+
+ def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]:
+ if not getattr(self.obj, "__test__", True):
+ return []
+
+ # Avoid random getattrs and peek in the __dict__ instead.
+ dicts = [getattr(self.obj, "__dict__", {})]
+ if isinstance(self.obj, type):
+ for basecls in self.obj.__mro__:
+ dicts.append(basecls.__dict__)
+
+ # In each class, nodes should be definition ordered. Since Python 3.6,
+ # __dict__ is definition ordered.
+ seen: Set[str] = set()
+ dict_values: List[List[Union[nodes.Item, nodes.Collector]]] = []
+ ihook = self.ihook
+ for dic in dicts:
+ values: List[Union[nodes.Item, nodes.Collector]] = []
+ # Note: seems like the dict can change during iteration -
+ # be careful not to remove the list() without consideration.
+ for name, obj in list(dic.items()):
+ if name in IGNORED_ATTRIBUTES:
+ continue
+ if name in seen:
+ continue
+ seen.add(name)
+ res = ihook.pytest_pycollect_makeitem(
+ collector=self, name=name, obj=obj
+ )
+ if res is None:
+ continue
+ elif isinstance(res, list):
+ values.extend(res)
+ else:
+ values.append(res)
+ dict_values.append(values)
+
+ # Between classes in the class hierarchy, reverse-MRO order -- nodes
+ # inherited from base classes should come before subclasses.
+ result = []
+ for values in reversed(dict_values):
+ result.extend(values)
+ return result
+
+ def _genfunctions(self, name: str, funcobj) -> Iterator["Function"]:
+ modulecol = self.getparent(Module)
+ assert modulecol is not None
+ module = modulecol.obj
+ clscol = self.getparent(Class)
+ cls = clscol and clscol.obj or None
+
+ definition = FunctionDefinition.from_parent(self, name=name, callobj=funcobj)
+ fixtureinfo = definition._fixtureinfo
+
+ # pytest_generate_tests impls call metafunc.parametrize() which fills
+ # metafunc._calls, the outcome of the hook.
+ metafunc = Metafunc(
+ definition=definition,
+ fixtureinfo=fixtureinfo,
+ config=self.config,
+ cls=cls,
+ module=module,
+ _ispytest=True,
+ )
+ methods = []
+ if hasattr(module, "pytest_generate_tests"):
+ methods.append(module.pytest_generate_tests)
+ if cls is not None and hasattr(cls, "pytest_generate_tests"):
+ methods.append(cls().pytest_generate_tests)
+ self.ihook.pytest_generate_tests.call_extra(methods, dict(metafunc=metafunc))
+
+ if not metafunc._calls:
+ yield Function.from_parent(self, name=name, fixtureinfo=fixtureinfo)
+ else:
+ # Add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs.
+ fm = self.session._fixturemanager
+ fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm)
+
+ # Add_funcarg_pseudo_fixture_def may have shadowed some fixtures
+ # with direct parametrization, so make sure we update what the
+ # function really needs.
+ fixtureinfo.prune_dependency_tree()
+
+ for callspec in metafunc._calls:
+ subname = f"{name}[{callspec.id}]"
+ yield Function.from_parent(
+ self,
+ name=subname,
+ callspec=callspec,
+ fixtureinfo=fixtureinfo,
+ keywords={callspec.id: True},
+ originalname=name,
+ )
+
+
+class Module(nodes.File, PyCollector):
+ """Collector for test classes and functions."""
+
+ def _getobj(self):
+ return self._importtestmodule()
+
+ def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]:
+ self._inject_setup_module_fixture()
+ self._inject_setup_function_fixture()
+ self.session._fixturemanager.parsefactories(self)
+ return super().collect()
+
+ def _inject_setup_module_fixture(self) -> None:
+ """Inject a hidden autouse, module scoped fixture into the collected module object
+ that invokes setUpModule/tearDownModule if either or both are available.
+
+ Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
+ other fixtures (#517).
+ """
+ has_nose = self.config.pluginmanager.has_plugin("nose")
+ setup_module = _get_first_non_fixture_func(
+ self.obj, ("setUpModule", "setup_module")
+ )
+ if setup_module is None and has_nose:
+ # The name "setup" is too common - only treat as fixture if callable.
+ setup_module = _get_first_non_fixture_func(self.obj, ("setup",))
+ if not callable(setup_module):
+ setup_module = None
+ teardown_module = _get_first_non_fixture_func(
+ self.obj, ("tearDownModule", "teardown_module")
+ )
+ if teardown_module is None and has_nose:
+ teardown_module = _get_first_non_fixture_func(self.obj, ("teardown",))
+ # Same as "setup" above - only treat as fixture if callable.
+ if not callable(teardown_module):
+ teardown_module = None
+
+ if setup_module is None and teardown_module is None:
+ return
+
+ @fixtures.fixture(
+ autouse=True,
+ scope="module",
+ # Use a unique name to speed up lookup.
+ name=f"_xunit_setup_module_fixture_{self.obj.__name__}",
+ )
+ def xunit_setup_module_fixture(request) -> Generator[None, None, None]:
+ if setup_module is not None:
+ _call_with_optional_argument(setup_module, request.module)
+ yield
+ if teardown_module is not None:
+ _call_with_optional_argument(teardown_module, request.module)
+
+ self.obj.__pytest_setup_module = xunit_setup_module_fixture
+
+ def _inject_setup_function_fixture(self) -> None:
+ """Inject a hidden autouse, function scoped fixture into the collected module object
+ that invokes setup_function/teardown_function if either or both are available.
+
+ Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
+ other fixtures (#517).
+ """
+ setup_function = _get_first_non_fixture_func(self.obj, ("setup_function",))
+ teardown_function = _get_first_non_fixture_func(
+ self.obj, ("teardown_function",)
+ )
+ if setup_function is None and teardown_function is None:
+ return
+
+ @fixtures.fixture(
+ autouse=True,
+ scope="function",
+ # Use a unique name to speed up lookup.
+ name=f"_xunit_setup_function_fixture_{self.obj.__name__}",
+ )
+ def xunit_setup_function_fixture(request) -> Generator[None, None, None]:
+ if request.instance is not None:
+ # in this case we are bound to an instance, so we need to let
+ # setup_method handle this
+ yield
+ return
+ if setup_function is not None:
+ _call_with_optional_argument(setup_function, request.function)
+ yield
+ if teardown_function is not None:
+ _call_with_optional_argument(teardown_function, request.function)
+
+ self.obj.__pytest_setup_function = xunit_setup_function_fixture
+
+ def _importtestmodule(self):
+ # We assume we are only called once per module.
+ importmode = self.config.getoption("--import-mode")
+ try:
+ mod = import_path(self.path, mode=importmode, root=self.config.rootpath)
+ except SyntaxError as e:
+ raise self.CollectError(
+ ExceptionInfo.from_current().getrepr(style="short")
+ ) from e
+ except ImportPathMismatchError as e:
+ raise self.CollectError(
+ "import file mismatch:\n"
+ "imported module %r has this __file__ attribute:\n"
+ " %s\n"
+ "which is not the same as the test file we want to collect:\n"
+ " %s\n"
+ "HINT: remove __pycache__ / .pyc files and/or use a "
+ "unique basename for your test file modules" % e.args
+ ) from e
+ except ImportError as e:
+ exc_info = ExceptionInfo.from_current()
+ if self.config.getoption("verbose") < 2:
+ exc_info.traceback = exc_info.traceback.filter(filter_traceback)
+ exc_repr = (
+ exc_info.getrepr(style="short")
+ if exc_info.traceback
+ else exc_info.exconly()
+ )
+ formatted_tb = str(exc_repr)
+ raise self.CollectError(
+ "ImportError while importing test module '{path}'.\n"
+ "Hint: make sure your test modules/packages have valid Python names.\n"
+ "Traceback:\n"
+ "{traceback}".format(path=self.path, traceback=formatted_tb)
+ ) from e
+ except skip.Exception as e:
+ if e.allow_module_level:
+ raise
+ raise self.CollectError(
+ "Using pytest.skip outside of a test will skip the entire module. "
+ "If that's your intention, pass `allow_module_level=True`. "
+ "If you want to skip a specific test or an entire class, "
+ "use the @pytest.mark.skip or @pytest.mark.skipif decorators."
+ ) from e
+ self.config.pluginmanager.consider_module(mod)
+ return mod
+
+
+class Package(Module):
+ def __init__(
+ self,
+ fspath: Optional[LEGACY_PATH],
+ parent: nodes.Collector,
+ # NOTE: following args are unused:
+ config=None,
+ session=None,
+ nodeid=None,
+ path=Optional[Path],
+ ) -> None:
+ # NOTE: Could be just the following, but kept as-is for compat.
+ # nodes.FSCollector.__init__(self, fspath, parent=parent)
+ session = parent.session
+ nodes.FSCollector.__init__(
+ self,
+ fspath=fspath,
+ path=path,
+ parent=parent,
+ config=config,
+ session=session,
+ nodeid=nodeid,
+ )
+ self.name = self.path.parent.name
+
+ def setup(self) -> None:
+ # Not using fixtures to call setup_module here because autouse fixtures
+ # from packages are not called automatically (#4085).
+ setup_module = _get_first_non_fixture_func(
+ self.obj, ("setUpModule", "setup_module")
+ )
+ if setup_module is not None:
+ _call_with_optional_argument(setup_module, self.obj)
+
+ teardown_module = _get_first_non_fixture_func(
+ self.obj, ("tearDownModule", "teardown_module")
+ )
+ if teardown_module is not None:
+ func = partial(_call_with_optional_argument, teardown_module, self.obj)
+ self.addfinalizer(func)
+
+ def gethookproxy(self, fspath: "os.PathLike[str]"):
+ warnings.warn(FSCOLLECTOR_GETHOOKPROXY_ISINITPATH, stacklevel=2)
+ return self.session.gethookproxy(fspath)
+
+ def isinitpath(self, path: Union[str, "os.PathLike[str]"]) -> bool:
+ warnings.warn(FSCOLLECTOR_GETHOOKPROXY_ISINITPATH, stacklevel=2)
+ return self.session.isinitpath(path)
+
+ def _recurse(self, direntry: "os.DirEntry[str]") -> bool:
+ if direntry.name == "__pycache__":
+ return False
+ fspath = Path(direntry.path)
+ ihook = self.session.gethookproxy(fspath.parent)
+ if ihook.pytest_ignore_collect(collection_path=fspath, config=self.config):
+ return False
+ norecursepatterns = self.config.getini("norecursedirs")
+ if any(fnmatch_ex(pat, fspath) for pat in norecursepatterns):
+ return False
+ return True
+
+ def _collectfile(
+ self, fspath: Path, handle_dupes: bool = True
+ ) -> Sequence[nodes.Collector]:
+ assert (
+ fspath.is_file()
+ ), "{!r} is not a file (isdir={!r}, exists={!r}, islink={!r})".format(
+ fspath, fspath.is_dir(), fspath.exists(), fspath.is_symlink()
+ )
+ ihook = self.session.gethookproxy(fspath)
+ if not self.session.isinitpath(fspath):
+ if ihook.pytest_ignore_collect(collection_path=fspath, config=self.config):
+ return ()
+
+ if handle_dupes:
+ keepduplicates = self.config.getoption("keepduplicates")
+ if not keepduplicates:
+ duplicate_paths = self.config.pluginmanager._duplicatepaths
+ if fspath in duplicate_paths:
+ return ()
+ else:
+ duplicate_paths.add(fspath)
+
+ return ihook.pytest_collect_file(file_path=fspath, parent=self) # type: ignore[no-any-return]
+
+ def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]:
+ this_path = self.path.parent
+ init_module = this_path / "__init__.py"
+ if init_module.is_file() and path_matches_patterns(
+ init_module, self.config.getini("python_files")
+ ):
+ yield Module.from_parent(self, path=init_module)
+ pkg_prefixes: Set[Path] = set()
+ for direntry in visit(str(this_path), recurse=self._recurse):
+ path = Path(direntry.path)
+
+ # We will visit our own __init__.py file, in which case we skip it.
+ if direntry.is_file():
+ if direntry.name == "__init__.py" and path.parent == this_path:
+ continue
+
+ parts_ = parts(direntry.path)
+ if any(
+ str(pkg_prefix) in parts_ and pkg_prefix / "__init__.py" != path
+ for pkg_prefix in pkg_prefixes
+ ):
+ continue
+
+ if direntry.is_file():
+ yield from self._collectfile(path)
+ elif not direntry.is_dir():
+ # Broken symlink or invalid/missing file.
+ continue
+ elif path.joinpath("__init__.py").is_file():
+ pkg_prefixes.add(path)
+
+
+def _call_with_optional_argument(func, arg) -> None:
+ """Call the given function with the given argument if func accepts one argument, otherwise
+ calls func without arguments."""
+ arg_count = func.__code__.co_argcount
+ if inspect.ismethod(func):
+ arg_count -= 1
+ if arg_count:
+ func(arg)
+ else:
+ func()
+
+
+def _get_first_non_fixture_func(obj: object, names: Iterable[str]) -> Optional[object]:
+ """Return the attribute from the given object to be used as a setup/teardown
+ xunit-style function, but only if not marked as a fixture to avoid calling it twice."""
+ for name in names:
+ meth: Optional[object] = getattr(obj, name, None)
+ if meth is not None and fixtures.getfixturemarker(meth) is None:
+ return meth
+ return None
+
+
+class Class(PyCollector):
+ """Collector for test methods."""
+
+ @classmethod
+ def from_parent(cls, parent, *, name, obj=None, **kw):
+ """The public constructor."""
+ return super().from_parent(name=name, parent=parent, **kw)
+
+ def newinstance(self):
+ return self.obj()
+
+ def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]:
+ if not safe_getattr(self.obj, "__test__", True):
+ return []
+ if hasinit(self.obj):
+ assert self.parent is not None
+ self.warn(
+ PytestCollectionWarning(
+ "cannot collect test class %r because it has a "
+ "__init__ constructor (from: %s)"
+ % (self.obj.__name__, self.parent.nodeid)
+ )
+ )
+ return []
+ elif hasnew(self.obj):
+ assert self.parent is not None
+ self.warn(
+ PytestCollectionWarning(
+ "cannot collect test class %r because it has a "
+ "__new__ constructor (from: %s)"
+ % (self.obj.__name__, self.parent.nodeid)
+ )
+ )
+ return []
+
+ self._inject_setup_class_fixture()
+ self._inject_setup_method_fixture()
+
+ self.session._fixturemanager.parsefactories(self.newinstance(), self.nodeid)
+
+ return super().collect()
+
+ def _inject_setup_class_fixture(self) -> None:
+ """Inject a hidden autouse, class scoped fixture into the collected class object
+ that invokes setup_class/teardown_class if either or both are available.
+
+ Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
+ other fixtures (#517).
+ """
+ setup_class = _get_first_non_fixture_func(self.obj, ("setup_class",))
+ teardown_class = getattr(self.obj, "teardown_class", None)
+ if setup_class is None and teardown_class is None:
+ return
+
+ @fixtures.fixture(
+ autouse=True,
+ scope="class",
+ # Use a unique name to speed up lookup.
+ name=f"_xunit_setup_class_fixture_{self.obj.__qualname__}",
+ )
+ def xunit_setup_class_fixture(cls) -> Generator[None, None, None]:
+ if setup_class is not None:
+ func = getimfunc(setup_class)
+ _call_with_optional_argument(func, self.obj)
+ yield
+ if teardown_class is not None:
+ func = getimfunc(teardown_class)
+ _call_with_optional_argument(func, self.obj)
+
+ self.obj.__pytest_setup_class = xunit_setup_class_fixture
+
+ def _inject_setup_method_fixture(self) -> None:
+ """Inject a hidden autouse, function scoped fixture into the collected class object
+ that invokes setup_method/teardown_method if either or both are available.
+
+ Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
+ other fixtures (#517).
+ """
+ has_nose = self.config.pluginmanager.has_plugin("nose")
+ setup_name = "setup_method"
+ setup_method = _get_first_non_fixture_func(self.obj, (setup_name,))
+ if setup_method is None and has_nose:
+ setup_name = "setup"
+ setup_method = _get_first_non_fixture_func(self.obj, (setup_name,))
+ teardown_name = "teardown_method"
+ teardown_method = getattr(self.obj, teardown_name, None)
+ if teardown_method is None and has_nose:
+ teardown_name = "teardown"
+ teardown_method = getattr(self.obj, teardown_name, None)
+ if setup_method is None and teardown_method is None:
+ return
+
+ @fixtures.fixture(
+ autouse=True,
+ scope="function",
+ # Use a unique name to speed up lookup.
+ name=f"_xunit_setup_method_fixture_{self.obj.__qualname__}",
+ )
+ def xunit_setup_method_fixture(self, request) -> Generator[None, None, None]:
+ method = request.function
+ if setup_method is not None:
+ func = getattr(self, setup_name)
+ _call_with_optional_argument(func, method)
+ yield
+ if teardown_method is not None:
+ func = getattr(self, teardown_name)
+ _call_with_optional_argument(func, method)
+
+ self.obj.__pytest_setup_method = xunit_setup_method_fixture
+
+
+class InstanceDummy:
+ """Instance used to be a node type between Class and Function. It has been
+ removed in pytest 7.0. Some plugins exist which reference `pytest.Instance`
+ only to ignore it; this dummy class keeps them working. This will be removed
+ in pytest 8."""
+
+ pass
+
+
+# Note: module __getattr__ only works on Python>=3.7. Unfortunately
+# we can't provide this deprecation warning on Python 3.6.
+def __getattr__(name: str) -> object:
+ if name == "Instance":
+ warnings.warn(INSTANCE_COLLECTOR, 2)
+ return InstanceDummy
+ raise AttributeError(f"module {__name__} has no attribute {name}")
+
+
+def hasinit(obj: object) -> bool:
+ init: object = getattr(obj, "__init__", None)
+ if init:
+ return init != object.__init__
+ return False
+
+
+def hasnew(obj: object) -> bool:
+ new: object = getattr(obj, "__new__", None)
+ if new:
+ return new != object.__new__
+ return False
+
+
+@final
+@attr.s(frozen=True, slots=True, auto_attribs=True)
+class CallSpec2:
+ """A planned parameterized invocation of a test function.
+
+ Calculated during collection for a given test function's Metafunc.
+ Once collection is over, each callspec is turned into a single Item
+ and stored in item.callspec.
+ """
+
+ # arg name -> arg value which will be passed to the parametrized test
+ # function (direct parameterization).
+ funcargs: Dict[str, object] = attr.Factory(dict)
+ # arg name -> arg value which will be passed to a fixture of the same name
+ # (indirect parametrization).
+ params: Dict[str, object] = attr.Factory(dict)
+ # arg name -> arg index.
+ indices: Dict[str, int] = attr.Factory(dict)
+ # Used for sorting parametrized resources.
+ _arg2scope: Dict[str, Scope] = attr.Factory(dict)
+ # Parts which will be added to the item's name in `[..]` separated by "-".
+ _idlist: List[str] = attr.Factory(list)
+ # Marks which will be applied to the item.
+ marks: List[Mark] = attr.Factory(list)
+
+ def setmulti(
+ self,
+ *,
+ valtypes: Mapping[str, "Literal['params', 'funcargs']"],
+ argnames: Iterable[str],
+ valset: Iterable[object],
+ id: str,
+ marks: Iterable[Union[Mark, MarkDecorator]],
+ scope: Scope,
+ param_index: int,
+ ) -> "CallSpec2":
+ funcargs = self.funcargs.copy()
+ params = self.params.copy()
+ indices = self.indices.copy()
+ arg2scope = self._arg2scope.copy()
+ for arg, val in zip(argnames, valset):
+ if arg in params or arg in funcargs:
+ raise ValueError(f"duplicate {arg!r}")
+ valtype_for_arg = valtypes[arg]
+ if valtype_for_arg == "params":
+ params[arg] = val
+ elif valtype_for_arg == "funcargs":
+ funcargs[arg] = val
+ else:
+ assert_never(valtype_for_arg)
+ indices[arg] = param_index
+ arg2scope[arg] = scope
+ return CallSpec2(
+ funcargs=funcargs,
+ params=params,
+ arg2scope=arg2scope,
+ indices=indices,
+ idlist=[*self._idlist, id],
+ marks=[*self.marks, *normalize_mark_list(marks)],
+ )
+
+ def getparam(self, name: str) -> object:
+ try:
+ return self.params[name]
+ except KeyError as e:
+ raise ValueError(name) from e
+
+ @property
+ def id(self) -> str:
+ return "-".join(self._idlist)
+
+
+@final
+class Metafunc:
+ """Objects passed to the :hook:`pytest_generate_tests` hook.
+
+ They help to inspect a test function and to generate tests according to
+ test configuration or values specified in the class or module where a
+ test function is defined.
+ """
+
+ def __init__(
+ self,
+ definition: "FunctionDefinition",
+ fixtureinfo: fixtures.FuncFixtureInfo,
+ config: Config,
+ cls=None,
+ module=None,
+ *,
+ _ispytest: bool = False,
+ ) -> None:
+ check_ispytest(_ispytest)
+
+ #: Access to the underlying :class:`_pytest.python.FunctionDefinition`.
+ self.definition = definition
+
+ #: Access to the :class:`pytest.Config` object for the test session.
+ self.config = config
+
+ #: The module object where the test function is defined in.
+ self.module = module
+
+ #: Underlying Python test function.
+ self.function = definition.obj
+
+ #: Set of fixture names required by the test function.
+ self.fixturenames = fixtureinfo.names_closure
+
+ #: Class object where the test function is defined in or ``None``.
+ self.cls = cls
+
+ self._arg2fixturedefs = fixtureinfo.name2fixturedefs
+
+ # Result of parametrize().
+ self._calls: List[CallSpec2] = []
+
+ def parametrize(
+ self,
+ argnames: Union[str, List[str], Tuple[str, ...]],
+ argvalues: Iterable[Union[ParameterSet, Sequence[object], object]],
+ indirect: Union[bool, Sequence[str]] = False,
+ ids: Optional[
+ Union[
+ Iterable[Union[None, str, float, int, bool]],
+ Callable[[Any], Optional[object]],
+ ]
+ ] = None,
+ scope: "Optional[_ScopeName]" = None,
+ *,
+ _param_mark: Optional[Mark] = None,
+ ) -> None:
+ """Add new invocations to the underlying test function using the list
+ of argvalues for the given argnames. Parametrization is performed
+ during the collection phase. If you need to setup expensive resources
+ see about setting indirect to do it rather than at test setup time.
+
+ Can be called multiple times, in which case each call parametrizes all
+ previous parametrizations, e.g.
+
+ ::
+
+ unparametrized: t
+ parametrize ["x", "y"]: t[x], t[y]
+ parametrize [1, 2]: t[x-1], t[x-2], t[y-1], t[y-2]
+
+ :param argnames:
+ A comma-separated string denoting one or more argument names, or
+ a list/tuple of argument strings.
+
+ :param argvalues:
+ The list of argvalues determines how often a test is invoked with
+ different argument values.
+
+ If only one argname was specified argvalues is a list of values.
+ If N argnames were specified, argvalues must be a list of
+ N-tuples, where each tuple-element specifies a value for its
+ respective argname.
+
+ :param indirect:
+ A list of arguments' names (subset of argnames) or a boolean.
+ If True the list contains all names from the argnames. Each
+ argvalue corresponding to an argname in this list will
+ be passed as request.param to its respective argname fixture
+ function so that it can perform more expensive setups during the
+ setup phase of a test rather than at collection time.
+
+ :param ids:
+ Sequence of (or generator for) ids for ``argvalues``,
+ or a callable to return part of the id for each argvalue.
+
+ With sequences (and generators like ``itertools.count()``) the
+ returned ids should be of type ``string``, ``int``, ``float``,
+ ``bool``, or ``None``.
+ They are mapped to the corresponding index in ``argvalues``.
+ ``None`` means to use the auto-generated id.
+
+ If it is a callable it will be called for each entry in
+ ``argvalues``, and the return value is used as part of the
+ auto-generated id for the whole set (where parts are joined with
+ dashes ("-")).
+ This is useful to provide more specific ids for certain items, e.g.
+ dates. Returning ``None`` will use an auto-generated id.
+
+ If no ids are provided they will be generated automatically from
+ the argvalues.
+
+ :param scope:
+ If specified it denotes the scope of the parameters.
+ The scope is used for grouping tests by parameter instances.
+ It will also override any fixture-function defined scope, allowing
+ to set a dynamic scope using test context or configuration.
+ """
+ argnames, parameters = ParameterSet._for_parametrize(
+ argnames,
+ argvalues,
+ self.function,
+ self.config,
+ nodeid=self.definition.nodeid,
+ )
+ del argvalues
+
+ if "request" in argnames:
+ fail(
+ "'request' is a reserved name and cannot be used in @pytest.mark.parametrize",
+ pytrace=False,
+ )
+
+ if scope is not None:
+ scope_ = Scope.from_user(
+ scope, descr=f"parametrize() call in {self.function.__name__}"
+ )
+ else:
+ scope_ = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect)
+
+ self._validate_if_using_arg_names(argnames, indirect)
+
+ arg_values_types = self._resolve_arg_value_types(argnames, indirect)
+
+ # Use any already (possibly) generated ids with parametrize Marks.
+ if _param_mark and _param_mark._param_ids_from:
+ generated_ids = _param_mark._param_ids_from._param_ids_generated
+ if generated_ids is not None:
+ ids = generated_ids
+
+ ids = self._resolve_arg_ids(
+ argnames, ids, parameters, nodeid=self.definition.nodeid
+ )
+
+ # Store used (possibly generated) ids with parametrize Marks.
+ if _param_mark and _param_mark._param_ids_from and generated_ids is None:
+ object.__setattr__(_param_mark._param_ids_from, "_param_ids_generated", ids)
+
+ # Create the new calls: if we are parametrize() multiple times (by applying the decorator
+ # more than once) then we accumulate those calls generating the cartesian product
+ # of all calls.
+ newcalls = []
+ for callspec in self._calls or [CallSpec2()]:
+ for param_index, (param_id, param_set) in enumerate(zip(ids, parameters)):
+ newcallspec = callspec.setmulti(
+ valtypes=arg_values_types,
+ argnames=argnames,
+ valset=param_set.values,
+ id=param_id,
+ marks=param_set.marks,
+ scope=scope_,
+ param_index=param_index,
+ )
+ newcalls.append(newcallspec)
+ self._calls = newcalls
+
+ def _resolve_arg_ids(
+ self,
+ argnames: Sequence[str],
+ ids: Optional[
+ Union[
+ Iterable[Union[None, str, float, int, bool]],
+ Callable[[Any], Optional[object]],
+ ]
+ ],
+ parameters: Sequence[ParameterSet],
+ nodeid: str,
+ ) -> List[str]:
+ """Resolve the actual ids for the given argnames, based on the ``ids`` parameter given
+ to ``parametrize``.
+
+ :param List[str] argnames: List of argument names passed to ``parametrize()``.
+ :param ids: The ids parameter of the parametrized call (see docs).
+ :param List[ParameterSet] parameters: The list of parameter values, same size as ``argnames``.
+ :param str str: The nodeid of the item that generated this parametrized call.
+ :rtype: List[str]
+ :returns: The list of ids for each argname given.
+ """
+ if ids is None:
+ idfn = None
+ ids_ = None
+ elif callable(ids):
+ idfn = ids
+ ids_ = None
+ else:
+ idfn = None
+ ids_ = self._validate_ids(ids, parameters, self.function.__name__)
+ return idmaker(argnames, parameters, idfn, ids_, self.config, nodeid=nodeid)
+
+ def _validate_ids(
+ self,
+ ids: Iterable[Union[None, str, float, int, bool]],
+ parameters: Sequence[ParameterSet],
+ func_name: str,
+ ) -> List[Union[None, str]]:
+ try:
+ num_ids = len(ids) # type: ignore[arg-type]
+ except TypeError:
+ try:
+ iter(ids)
+ except TypeError as e:
+ raise TypeError("ids must be a callable or an iterable") from e
+ num_ids = len(parameters)
+
+ # num_ids == 0 is a special case: https://github.com/pytest-dev/pytest/issues/1849
+ if num_ids != len(parameters) and num_ids != 0:
+ msg = "In {}: {} parameter sets specified, with different number of ids: {}"
+ fail(msg.format(func_name, len(parameters), num_ids), pytrace=False)
+
+ new_ids = []
+ for idx, id_value in enumerate(itertools.islice(ids, num_ids)):
+ if id_value is None or isinstance(id_value, str):
+ new_ids.append(id_value)
+ elif isinstance(id_value, (float, int, bool)):
+ new_ids.append(str(id_value))
+ else:
+ msg = ( # type: ignore[unreachable]
+ "In {}: ids must be list of string/float/int/bool, "
+ "found: {} (type: {!r}) at index {}"
+ )
+ fail(
+ msg.format(func_name, saferepr(id_value), type(id_value), idx),
+ pytrace=False,
+ )
+ return new_ids
+
+ def _resolve_arg_value_types(
+ self,
+ argnames: Sequence[str],
+ indirect: Union[bool, Sequence[str]],
+ ) -> Dict[str, "Literal['params', 'funcargs']"]:
+ """Resolve if each parametrized argument must be considered a
+ parameter to a fixture or a "funcarg" to the function, based on the
+ ``indirect`` parameter of the parametrized() call.
+
+ :param List[str] argnames: List of argument names passed to ``parametrize()``.
+ :param indirect: Same as the ``indirect`` parameter of ``parametrize()``.
+ :rtype: Dict[str, str]
+ A dict mapping each arg name to either:
+ * "params" if the argname should be the parameter of a fixture of the same name.
+ * "funcargs" if the argname should be a parameter to the parametrized test function.
+ """
+ if isinstance(indirect, bool):
+ valtypes: Dict[str, Literal["params", "funcargs"]] = dict.fromkeys(
+ argnames, "params" if indirect else "funcargs"
+ )
+ elif isinstance(indirect, Sequence):
+ valtypes = dict.fromkeys(argnames, "funcargs")
+ for arg in indirect:
+ if arg not in argnames:
+ fail(
+ "In {}: indirect fixture '{}' doesn't exist".format(
+ self.function.__name__, arg
+ ),
+ pytrace=False,
+ )
+ valtypes[arg] = "params"
+ else:
+ fail(
+ "In {func}: expected Sequence or boolean for indirect, got {type}".format(
+ type=type(indirect).__name__, func=self.function.__name__
+ ),
+ pytrace=False,
+ )
+ return valtypes
+
+ def _validate_if_using_arg_names(
+ self,
+ argnames: Sequence[str],
+ indirect: Union[bool, Sequence[str]],
+ ) -> None:
+ """Check if all argnames are being used, by default values, or directly/indirectly.
+
+ :param List[str] argnames: List of argument names passed to ``parametrize()``.
+ :param indirect: Same as the ``indirect`` parameter of ``parametrize()``.
+ :raises ValueError: If validation fails.
+ """
+ default_arg_names = set(get_default_arg_names(self.function))
+ func_name = self.function.__name__
+ for arg in argnames:
+ if arg not in self.fixturenames:
+ if arg in default_arg_names:
+ fail(
+ "In {}: function already takes an argument '{}' with a default value".format(
+ func_name, arg
+ ),
+ pytrace=False,
+ )
+ else:
+ if isinstance(indirect, Sequence):
+ name = "fixture" if arg in indirect else "argument"
+ else:
+ name = "fixture" if indirect else "argument"
+ fail(
+ f"In {func_name}: function uses no {name} '{arg}'",
+ pytrace=False,
+ )
+
+
+def _find_parametrized_scope(
+ argnames: Sequence[str],
+ arg2fixturedefs: Mapping[str, Sequence[fixtures.FixtureDef[object]]],
+ indirect: Union[bool, Sequence[str]],
+) -> Scope:
+ """Find the most appropriate scope for a parametrized call based on its arguments.
+
+ When there's at least one direct argument, always use "function" scope.
+
+ When a test function is parametrized and all its arguments are indirect
+ (e.g. fixtures), return the most narrow scope based on the fixtures used.
+
+ Related to issue #1832, based on code posted by @Kingdread.
+ """
+ if isinstance(indirect, Sequence):
+ all_arguments_are_fixtures = len(indirect) == len(argnames)
+ else:
+ all_arguments_are_fixtures = bool(indirect)
+
+ if all_arguments_are_fixtures:
+ fixturedefs = arg2fixturedefs or {}
+ used_scopes = [
+ fixturedef[0]._scope
+ for name, fixturedef in fixturedefs.items()
+ if name in argnames
+ ]
+ # Takes the most narrow scope from used fixtures.
+ return min(used_scopes, default=Scope.Function)
+
+ return Scope.Function
+
+
+def _ascii_escaped_by_config(val: Union[str, bytes], config: Optional[Config]) -> str:
+ if config is None:
+ escape_option = False
+ else:
+ escape_option = config.getini(
+ "disable_test_id_escaping_and_forfeit_all_rights_to_community_support"
+ )
+ # TODO: If escaping is turned off and the user passes bytes,
+ # will return a bytes. For now we ignore this but the
+ # code *probably* doesn't handle this case.
+ return val if escape_option else ascii_escaped(val) # type: ignore
+
+
+def _idval(
+ val: object,
+ argname: str,
+ idx: int,
+ idfn: Optional[Callable[[Any], Optional[object]]],
+ nodeid: Optional[str],
+ config: Optional[Config],
+) -> str:
+ if idfn:
+ try:
+ generated_id = idfn(val)
+ if generated_id is not None:
+ val = generated_id
+ except Exception as e:
+ prefix = f"{nodeid}: " if nodeid is not None else ""
+ msg = "error raised while trying to determine id of parameter '{}' at position {}"
+ msg = prefix + msg.format(argname, idx)
+ raise ValueError(msg) from e
+ elif config:
+ hook_id: Optional[str] = config.hook.pytest_make_parametrize_id(
+ config=config, val=val, argname=argname
+ )
+ if hook_id:
+ return hook_id
+
+ if isinstance(val, STRING_TYPES):
+ return _ascii_escaped_by_config(val, config)
+ elif val is None or isinstance(val, (float, int, bool, complex)):
+ return str(val)
+ elif isinstance(val, Pattern):
+ return ascii_escaped(val.pattern)
+ elif val is NOTSET:
+ # Fallback to default. Note that NOTSET is an enum.Enum.
+ pass
+ elif isinstance(val, enum.Enum):
+ return str(val)
+ elif isinstance(getattr(val, "__name__", None), str):
+ # Name of a class, function, module, etc.
+ name: str = getattr(val, "__name__")
+ return name
+ return str(argname) + str(idx)
+
+
+def _idvalset(
+ idx: int,
+ parameterset: ParameterSet,
+ argnames: Iterable[str],
+ idfn: Optional[Callable[[Any], Optional[object]]],
+ ids: Optional[List[Union[None, str]]],
+ nodeid: Optional[str],
+ config: Optional[Config],
+) -> str:
+ if parameterset.id is not None:
+ return parameterset.id
+ id = None if ids is None or idx >= len(ids) else ids[idx]
+ if id is None:
+ this_id = [
+ _idval(val, argname, idx, idfn, nodeid=nodeid, config=config)
+ for val, argname in zip(parameterset.values, argnames)
+ ]
+ return "-".join(this_id)
+ else:
+ return _ascii_escaped_by_config(id, config)
+
+
+def idmaker(
+ argnames: Iterable[str],
+ parametersets: Iterable[ParameterSet],
+ idfn: Optional[Callable[[Any], Optional[object]]] = None,
+ ids: Optional[List[Union[None, str]]] = None,
+ config: Optional[Config] = None,
+ nodeid: Optional[str] = None,
+) -> List[str]:
+ resolved_ids = [
+ _idvalset(
+ valindex, parameterset, argnames, idfn, ids, config=config, nodeid=nodeid
+ )
+ for valindex, parameterset in enumerate(parametersets)
+ ]
+
+ # All IDs must be unique!
+ unique_ids = set(resolved_ids)
+ if len(unique_ids) != len(resolved_ids):
+
+ # Record the number of occurrences of each test ID.
+ test_id_counts = Counter(resolved_ids)
+
+ # Map the test ID to its next suffix.
+ test_id_suffixes: Dict[str, int] = defaultdict(int)
+
+ # Suffix non-unique IDs to make them unique.
+ for index, test_id in enumerate(resolved_ids):
+ if test_id_counts[test_id] > 1:
+ resolved_ids[index] = f"{test_id}{test_id_suffixes[test_id]}"
+ test_id_suffixes[test_id] += 1
+
+ return resolved_ids
+
+
+def _pretty_fixture_path(func) -> str:
+ cwd = Path.cwd()
+ loc = Path(getlocation(func, str(cwd)))
+ prefix = Path("...", "_pytest")
+ try:
+ return str(prefix / loc.relative_to(_PYTEST_DIR))
+ except ValueError:
+ return bestrelpath(cwd, loc)
+
+
+def show_fixtures_per_test(config):
+ from _pytest.main import wrap_session
+
+ return wrap_session(config, _show_fixtures_per_test)
+
+
+def _show_fixtures_per_test(config: Config, session: Session) -> None:
+ import _pytest.config
+
+ session.perform_collect()
+ curdir = Path.cwd()
+ tw = _pytest.config.create_terminal_writer(config)
+ verbose = config.getvalue("verbose")
+
+ def get_best_relpath(func) -> str:
+ loc = getlocation(func, str(curdir))
+ return bestrelpath(curdir, Path(loc))
+
+ def write_fixture(fixture_def: fixtures.FixtureDef[object]) -> None:
+ argname = fixture_def.argname
+ if verbose <= 0 and argname.startswith("_"):
+ return
+ prettypath = _pretty_fixture_path(fixture_def.func)
+ tw.write(f"{argname}", green=True)
+ tw.write(f" -- {prettypath}", yellow=True)
+ tw.write("\n")
+ fixture_doc = inspect.getdoc(fixture_def.func)
+ if fixture_doc:
+ write_docstring(
+ tw, fixture_doc.split("\n\n")[0] if verbose <= 0 else fixture_doc
+ )
+ else:
+ tw.line(" no docstring available", red=True)
+
+ def write_item(item: nodes.Item) -> None:
+ # Not all items have _fixtureinfo attribute.
+ info: Optional[FuncFixtureInfo] = getattr(item, "_fixtureinfo", None)
+ if info is None or not info.name2fixturedefs:
+ # This test item does not use any fixtures.
+ return
+ tw.line()
+ tw.sep("-", f"fixtures used by {item.name}")
+ # TODO: Fix this type ignore.
+ tw.sep("-", f"({get_best_relpath(item.function)})") # type: ignore[attr-defined]
+ # dict key not used in loop but needed for sorting.
+ for _, fixturedefs in sorted(info.name2fixturedefs.items()):
+ assert fixturedefs is not None
+ if not fixturedefs:
+ continue
+ # Last item is expected to be the one used by the test item.
+ write_fixture(fixturedefs[-1])
+
+ for session_item in session.items:
+ write_item(session_item)
+
+
+def showfixtures(config: Config) -> Union[int, ExitCode]:
+ from _pytest.main import wrap_session
+
+ return wrap_session(config, _showfixtures_main)
+
+
+def _showfixtures_main(config: Config, session: Session) -> None:
+ import _pytest.config
+
+ session.perform_collect()
+ curdir = Path.cwd()
+ tw = _pytest.config.create_terminal_writer(config)
+ verbose = config.getvalue("verbose")
+
+ fm = session._fixturemanager
+
+ available = []
+ seen: Set[Tuple[str, str]] = set()
+
+ for argname, fixturedefs in fm._arg2fixturedefs.items():
+ assert fixturedefs is not None
+ if not fixturedefs:
+ continue
+ for fixturedef in fixturedefs:
+ loc = getlocation(fixturedef.func, str(curdir))
+ if (fixturedef.argname, loc) in seen:
+ continue
+ seen.add((fixturedef.argname, loc))
+ available.append(
+ (
+ len(fixturedef.baseid),
+ fixturedef.func.__module__,
+ _pretty_fixture_path(fixturedef.func),
+ fixturedef.argname,
+ fixturedef,
+ )
+ )
+
+ available.sort()
+ currentmodule = None
+ for baseid, module, prettypath, argname, fixturedef in available:
+ if currentmodule != module:
+ if not module.startswith("_pytest."):
+ tw.line()
+ tw.sep("-", f"fixtures defined from {module}")
+ currentmodule = module
+ if verbose <= 0 and argname.startswith("_"):
+ continue
+ tw.write(f"{argname}", green=True)
+ if fixturedef.scope != "function":
+ tw.write(" [%s scope]" % fixturedef.scope, cyan=True)
+ tw.write(f" -- {prettypath}", yellow=True)
+ tw.write("\n")
+ doc = inspect.getdoc(fixturedef.func)
+ if doc:
+ write_docstring(tw, doc.split("\n\n")[0] if verbose <= 0 else doc)
+ else:
+ tw.line(" no docstring available", red=True)
+ tw.line()
+
+
+def write_docstring(tw: TerminalWriter, doc: str, indent: str = " ") -> None:
+ for line in doc.split("\n"):
+ tw.line(indent + line)
+
+
+class Function(PyobjMixin, nodes.Item):
+ """An Item responsible for setting up and executing a Python test function.
+
+ :param name:
+ The full function name, including any decorations like those
+ added by parametrization (``my_func[my_param]``).
+ :param parent:
+ The parent Node.
+ :param config:
+ The pytest Config object.
+ :param callspec:
+ If given, this is function has been parametrized and the callspec contains
+ meta information about the parametrization.
+ :param callobj:
+ If given, the object which will be called when the Function is invoked,
+ otherwise the callobj will be obtained from ``parent`` using ``originalname``.
+ :param keywords:
+ Keywords bound to the function object for "-k" matching.
+ :param session:
+ The pytest Session object.
+ :param fixtureinfo:
+ Fixture information already resolved at this fixture node..
+ :param originalname:
+ The attribute name to use for accessing the underlying function object.
+ Defaults to ``name``. Set this if name is different from the original name,
+ for example when it contains decorations like those added by parametrization
+ (``my_func[my_param]``).
+ """
+
+ # Disable since functions handle it themselves.
+ _ALLOW_MARKERS = False
+
+ def __init__(
+ self,
+ name: str,
+ parent,
+ config: Optional[Config] = None,
+ callspec: Optional[CallSpec2] = None,
+ callobj=NOTSET,
+ keywords=None,
+ session: Optional[Session] = None,
+ fixtureinfo: Optional[FuncFixtureInfo] = None,
+ originalname: Optional[str] = None,
+ ) -> None:
+ super().__init__(name, parent, config=config, session=session)
+
+ if callobj is not NOTSET:
+ self.obj = callobj
+
+ #: Original function name, without any decorations (for example
+ #: parametrization adds a ``"[...]"`` suffix to function names), used to access
+ #: the underlying function object from ``parent`` (in case ``callobj`` is not given
+ #: explicitly).
+ #:
+ #: .. versionadded:: 3.0
+ self.originalname = originalname or name
+
+ # Note: when FunctionDefinition is introduced, we should change ``originalname``
+ # to a readonly property that returns FunctionDefinition.name.
+
+ self.keywords.update(self.obj.__dict__)
+ self.own_markers.extend(get_unpacked_marks(self.obj))
+ if callspec:
+ self.callspec = callspec
+ # this is total hostile and a mess
+ # keywords are broken by design by now
+ # this will be redeemed later
+ for mark in callspec.marks:
+ # feel free to cry, this was broken for years before
+ # and keywords can't fix it per design
+ self.keywords[mark.name] = mark
+ self.own_markers.extend(normalize_mark_list(callspec.marks))
+ if keywords:
+ self.keywords.update(keywords)
+
+ # todo: this is a hell of a hack
+ # https://github.com/pytest-dev/pytest/issues/4569
+
+ self.keywords.update(
+ {
+ mark.name: True
+ for mark in self.iter_markers()
+ if mark.name not in self.keywords
+ }
+ )
+
+ if fixtureinfo is None:
+ fixtureinfo = self.session._fixturemanager.getfixtureinfo(
+ self, self.obj, self.cls, funcargs=True
+ )
+ self._fixtureinfo: FuncFixtureInfo = fixtureinfo
+ self.fixturenames = fixtureinfo.names_closure
+ self._initrequest()
+
+ @classmethod
+ def from_parent(cls, parent, **kw): # todo: determine sound type limitations
+ """The public constructor."""
+ return super().from_parent(parent=parent, **kw)
+
+ def _initrequest(self) -> None:
+ self.funcargs: Dict[str, object] = {}
+ self._request = fixtures.FixtureRequest(self, _ispytest=True)
+
+ @property
+ def function(self):
+ """Underlying python 'function' object."""
+ return getimfunc(self.obj)
+
+ def _getobj(self):
+ assert self.parent is not None
+ if isinstance(self.parent, Class):
+ # Each Function gets a fresh class instance.
+ parent_obj = self.parent.newinstance()
+ else:
+ parent_obj = self.parent.obj # type: ignore[attr-defined]
+ return getattr(parent_obj, self.originalname)
+
+ @property
+ def _pyfuncitem(self):
+ """(compatonly) for code expecting pytest-2.2 style request objects."""
+ return self
+
+ def runtest(self) -> None:
+ """Execute the underlying test function."""
+ self.ihook.pytest_pyfunc_call(pyfuncitem=self)
+
+ def setup(self) -> None:
+ self._request._fillfixtures()
+
+ def _prunetraceback(self, excinfo: ExceptionInfo[BaseException]) -> None:
+ if hasattr(self, "_obj") and not self.config.getoption("fulltrace", False):
+ code = _pytest._code.Code.from_function(get_real_func(self.obj))
+ path, firstlineno = code.path, code.firstlineno
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
+ if ntraceback == traceback:
+ ntraceback = ntraceback.cut(path=path)
+ if ntraceback == traceback:
+ ntraceback = ntraceback.filter(filter_traceback)
+ if not ntraceback:
+ ntraceback = traceback
+
+ excinfo.traceback = ntraceback.filter()
+ # issue364: mark all but first and last frames to
+ # only show a single-line message for each frame.
+ if self.config.getoption("tbstyle", "auto") == "auto":
+ if len(excinfo.traceback) > 2:
+ for entry in excinfo.traceback[1:-1]:
+ entry.set_repr_style("short")
+
+ # TODO: Type ignored -- breaks Liskov Substitution.
+ def repr_failure( # type: ignore[override]
+ self,
+ excinfo: ExceptionInfo[BaseException],
+ ) -> Union[str, TerminalRepr]:
+ style = self.config.getoption("tbstyle", "auto")
+ if style == "auto":
+ style = "long"
+ return self._repr_failure_py(excinfo, style=style)
+
+
+class FunctionDefinition(Function):
+ """
+ This class is a step gap solution until we evolve to have actual function definition nodes
+ and manage to get rid of ``metafunc``.
+ """
+
+ def runtest(self) -> None:
+ raise RuntimeError("function definitions are not supposed to be run as tests")
+
+ setup = runtest
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python_api.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python_api.py
new file mode 100644
index 0000000000..cb72fde1e1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python_api.py
@@ -0,0 +1,961 @@
+import math
+import pprint
+from collections.abc import Sized
+from decimal import Decimal
+from numbers import Complex
+from types import TracebackType
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Generic
+from typing import Iterable
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import overload
+from typing import Pattern
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+if TYPE_CHECKING:
+ from numpy import ndarray
+
+
+import _pytest._code
+from _pytest.compat import final
+from _pytest.compat import STRING_TYPES
+from _pytest.outcomes import fail
+
+
+def _non_numeric_type_error(value, at: Optional[str]) -> TypeError:
+ at_str = f" at {at}" if at else ""
+ return TypeError(
+ "cannot make approximate comparisons to non-numeric values: {!r} {}".format(
+ value, at_str
+ )
+ )
+
+
+def _compare_approx(
+ full_object: object,
+ message_data: Sequence[Tuple[str, str, str]],
+ number_of_elements: int,
+ different_ids: Sequence[object],
+ max_abs_diff: float,
+ max_rel_diff: float,
+) -> List[str]:
+ message_list = list(message_data)
+ message_list.insert(0, ("Index", "Obtained", "Expected"))
+ max_sizes = [0, 0, 0]
+ for index, obtained, expected in message_list:
+ max_sizes[0] = max(max_sizes[0], len(index))
+ max_sizes[1] = max(max_sizes[1], len(obtained))
+ max_sizes[2] = max(max_sizes[2], len(expected))
+ explanation = [
+ f"comparison failed. Mismatched elements: {len(different_ids)} / {number_of_elements}:",
+ f"Max absolute difference: {max_abs_diff}",
+ f"Max relative difference: {max_rel_diff}",
+ ] + [
+ f"{indexes:<{max_sizes[0]}} | {obtained:<{max_sizes[1]}} | {expected:<{max_sizes[2]}}"
+ for indexes, obtained, expected in message_list
+ ]
+ return explanation
+
+
+# builtin pytest.approx helper
+
+
+class ApproxBase:
+ """Provide shared utilities for making approximate comparisons between
+ numbers or sequences of numbers."""
+
+ # Tell numpy to use our `__eq__` operator instead of its.
+ __array_ufunc__ = None
+ __array_priority__ = 100
+
+ def __init__(self, expected, rel=None, abs=None, nan_ok: bool = False) -> None:
+ __tracebackhide__ = True
+ self.expected = expected
+ self.abs = abs
+ self.rel = rel
+ self.nan_ok = nan_ok
+ self._check_type()
+
+ def __repr__(self) -> str:
+ raise NotImplementedError
+
+ def _repr_compare(self, other_side: Any) -> List[str]:
+ return [
+ "comparison failed",
+ f"Obtained: {other_side}",
+ f"Expected: {self}",
+ ]
+
+ def __eq__(self, actual) -> bool:
+ return all(
+ a == self._approx_scalar(x) for a, x in self._yield_comparisons(actual)
+ )
+
+ def __bool__(self):
+ __tracebackhide__ = True
+ raise AssertionError(
+ "approx() is not supported in a boolean context.\nDid you mean: `assert a == approx(b)`?"
+ )
+
+ # Ignore type because of https://github.com/python/mypy/issues/4266.
+ __hash__ = None # type: ignore
+
+ def __ne__(self, actual) -> bool:
+ return not (actual == self)
+
+ def _approx_scalar(self, x) -> "ApproxScalar":
+ if isinstance(x, Decimal):
+ return ApproxDecimal(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok)
+ return ApproxScalar(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok)
+
+ def _yield_comparisons(self, actual):
+ """Yield all the pairs of numbers to be compared.
+
+ This is used to implement the `__eq__` method.
+ """
+ raise NotImplementedError
+
+ def _check_type(self) -> None:
+ """Raise a TypeError if the expected value is not a valid type."""
+ # This is only a concern if the expected value is a sequence. In every
+ # other case, the approx() function ensures that the expected value has
+ # a numeric type. For this reason, the default is to do nothing. The
+ # classes that deal with sequences should reimplement this method to
+ # raise if there are any non-numeric elements in the sequence.
+ pass
+
+
+def _recursive_list_map(f, x):
+ if isinstance(x, list):
+ return [_recursive_list_map(f, xi) for xi in x]
+ else:
+ return f(x)
+
+
+class ApproxNumpy(ApproxBase):
+ """Perform approximate comparisons where the expected value is numpy array."""
+
+ def __repr__(self) -> str:
+ list_scalars = _recursive_list_map(self._approx_scalar, self.expected.tolist())
+ return f"approx({list_scalars!r})"
+
+ def _repr_compare(self, other_side: "ndarray") -> List[str]:
+ import itertools
+ import math
+
+ def get_value_from_nested_list(
+ nested_list: List[Any], nd_index: Tuple[Any, ...]
+ ) -> Any:
+ """
+ Helper function to get the value out of a nested list, given an n-dimensional index.
+ This mimics numpy's indexing, but for raw nested python lists.
+ """
+ value: Any = nested_list
+ for i in nd_index:
+ value = value[i]
+ return value
+
+ np_array_shape = self.expected.shape
+ approx_side_as_list = _recursive_list_map(
+ self._approx_scalar, self.expected.tolist()
+ )
+
+ if np_array_shape != other_side.shape:
+ return [
+ "Impossible to compare arrays with different shapes.",
+ f"Shapes: {np_array_shape} and {other_side.shape}",
+ ]
+
+ number_of_elements = self.expected.size
+ max_abs_diff = -math.inf
+ max_rel_diff = -math.inf
+ different_ids = []
+ for index in itertools.product(*(range(i) for i in np_array_shape)):
+ approx_value = get_value_from_nested_list(approx_side_as_list, index)
+ other_value = get_value_from_nested_list(other_side, index)
+ if approx_value != other_value:
+ abs_diff = abs(approx_value.expected - other_value)
+ max_abs_diff = max(max_abs_diff, abs_diff)
+ if other_value == 0.0:
+ max_rel_diff = math.inf
+ else:
+ max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value))
+ different_ids.append(index)
+
+ message_data = [
+ (
+ str(index),
+ str(get_value_from_nested_list(other_side, index)),
+ str(get_value_from_nested_list(approx_side_as_list, index)),
+ )
+ for index in different_ids
+ ]
+ return _compare_approx(
+ self.expected,
+ message_data,
+ number_of_elements,
+ different_ids,
+ max_abs_diff,
+ max_rel_diff,
+ )
+
+ def __eq__(self, actual) -> bool:
+ import numpy as np
+
+ # self.expected is supposed to always be an array here.
+
+ if not np.isscalar(actual):
+ try:
+ actual = np.asarray(actual)
+ except Exception as e:
+ raise TypeError(f"cannot compare '{actual}' to numpy.ndarray") from e
+
+ if not np.isscalar(actual) and actual.shape != self.expected.shape:
+ return False
+
+ return super().__eq__(actual)
+
+ def _yield_comparisons(self, actual):
+ import numpy as np
+
+ # `actual` can either be a numpy array or a scalar, it is treated in
+ # `__eq__` before being passed to `ApproxBase.__eq__`, which is the
+ # only method that calls this one.
+
+ if np.isscalar(actual):
+ for i in np.ndindex(self.expected.shape):
+ yield actual, self.expected[i].item()
+ else:
+ for i in np.ndindex(self.expected.shape):
+ yield actual[i].item(), self.expected[i].item()
+
+
+class ApproxMapping(ApproxBase):
+ """Perform approximate comparisons where the expected value is a mapping
+ with numeric values (the keys can be anything)."""
+
+ def __repr__(self) -> str:
+ return "approx({!r})".format(
+ {k: self._approx_scalar(v) for k, v in self.expected.items()}
+ )
+
+ def _repr_compare(self, other_side: Mapping[object, float]) -> List[str]:
+ import math
+
+ approx_side_as_map = {
+ k: self._approx_scalar(v) for k, v in self.expected.items()
+ }
+
+ number_of_elements = len(approx_side_as_map)
+ max_abs_diff = -math.inf
+ max_rel_diff = -math.inf
+ different_ids = []
+ for (approx_key, approx_value), other_value in zip(
+ approx_side_as_map.items(), other_side.values()
+ ):
+ if approx_value != other_value:
+ max_abs_diff = max(
+ max_abs_diff, abs(approx_value.expected - other_value)
+ )
+ max_rel_diff = max(
+ max_rel_diff,
+ abs((approx_value.expected - other_value) / approx_value.expected),
+ )
+ different_ids.append(approx_key)
+
+ message_data = [
+ (str(key), str(other_side[key]), str(approx_side_as_map[key]))
+ for key in different_ids
+ ]
+
+ return _compare_approx(
+ self.expected,
+ message_data,
+ number_of_elements,
+ different_ids,
+ max_abs_diff,
+ max_rel_diff,
+ )
+
+ def __eq__(self, actual) -> bool:
+ try:
+ if set(actual.keys()) != set(self.expected.keys()):
+ return False
+ except AttributeError:
+ return False
+
+ return super().__eq__(actual)
+
+ def _yield_comparisons(self, actual):
+ for k in self.expected.keys():
+ yield actual[k], self.expected[k]
+
+ def _check_type(self) -> None:
+ __tracebackhide__ = True
+ for key, value in self.expected.items():
+ if isinstance(value, type(self.expected)):
+ msg = "pytest.approx() does not support nested dictionaries: key={!r} value={!r}\n full mapping={}"
+ raise TypeError(msg.format(key, value, pprint.pformat(self.expected)))
+
+
+class ApproxSequencelike(ApproxBase):
+ """Perform approximate comparisons where the expected value is a sequence of numbers."""
+
+ def __repr__(self) -> str:
+ seq_type = type(self.expected)
+ if seq_type not in (tuple, list, set):
+ seq_type = list
+ return "approx({!r})".format(
+ seq_type(self._approx_scalar(x) for x in self.expected)
+ )
+
+ def _repr_compare(self, other_side: Sequence[float]) -> List[str]:
+ import math
+ import numpy as np
+
+ if len(self.expected) != len(other_side):
+ return [
+ "Impossible to compare lists with different sizes.",
+ f"Lengths: {len(self.expected)} and {len(other_side)}",
+ ]
+
+ approx_side_as_map = _recursive_list_map(self._approx_scalar, self.expected)
+
+ number_of_elements = len(approx_side_as_map)
+ max_abs_diff = -math.inf
+ max_rel_diff = -math.inf
+ different_ids = []
+ for i, (approx_value, other_value) in enumerate(
+ zip(approx_side_as_map, other_side)
+ ):
+ if approx_value != other_value:
+ abs_diff = abs(approx_value.expected - other_value)
+ max_abs_diff = max(max_abs_diff, abs_diff)
+ if other_value == 0.0:
+ max_rel_diff = np.inf
+ else:
+ max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value))
+ different_ids.append(i)
+
+ message_data = [
+ (str(i), str(other_side[i]), str(approx_side_as_map[i]))
+ for i in different_ids
+ ]
+
+ return _compare_approx(
+ self.expected,
+ message_data,
+ number_of_elements,
+ different_ids,
+ max_abs_diff,
+ max_rel_diff,
+ )
+
+ def __eq__(self, actual) -> bool:
+ try:
+ if len(actual) != len(self.expected):
+ return False
+ except TypeError:
+ return False
+ return super().__eq__(actual)
+
+ def _yield_comparisons(self, actual):
+ return zip(actual, self.expected)
+
+ def _check_type(self) -> None:
+ __tracebackhide__ = True
+ for index, x in enumerate(self.expected):
+ if isinstance(x, type(self.expected)):
+ msg = "pytest.approx() does not support nested data structures: {!r} at index {}\n full sequence: {}"
+ raise TypeError(msg.format(x, index, pprint.pformat(self.expected)))
+
+
+class ApproxScalar(ApproxBase):
+ """Perform approximate comparisons where the expected value is a single number."""
+
+ # Using Real should be better than this Union, but not possible yet:
+ # https://github.com/python/typeshed/pull/3108
+ DEFAULT_ABSOLUTE_TOLERANCE: Union[float, Decimal] = 1e-12
+ DEFAULT_RELATIVE_TOLERANCE: Union[float, Decimal] = 1e-6
+
+ def __repr__(self) -> str:
+ """Return a string communicating both the expected value and the
+ tolerance for the comparison being made.
+
+ For example, ``1.0 ± 1e-6``, ``(3+4j) ± 5e-6 ∠ ±180°``.
+ """
+ # Don't show a tolerance for values that aren't compared using
+ # tolerances, i.e. non-numerics and infinities. Need to call abs to
+ # handle complex numbers, e.g. (inf + 1j).
+ if (not isinstance(self.expected, (Complex, Decimal))) or math.isinf(
+ abs(self.expected) # type: ignore[arg-type]
+ ):
+ return str(self.expected)
+
+ # If a sensible tolerance can't be calculated, self.tolerance will
+ # raise a ValueError. In this case, display '???'.
+ try:
+ vetted_tolerance = f"{self.tolerance:.1e}"
+ if (
+ isinstance(self.expected, Complex)
+ and self.expected.imag
+ and not math.isinf(self.tolerance)
+ ):
+ vetted_tolerance += " ∠ ±180°"
+ except ValueError:
+ vetted_tolerance = "???"
+
+ return f"{self.expected} ± {vetted_tolerance}"
+
+ def __eq__(self, actual) -> bool:
+ """Return whether the given value is equal to the expected value
+ within the pre-specified tolerance."""
+ asarray = _as_numpy_array(actual)
+ if asarray is not None:
+ # Call ``__eq__()`` manually to prevent infinite-recursion with
+ # numpy<1.13. See #3748.
+ return all(self.__eq__(a) for a in asarray.flat)
+
+ # Short-circuit exact equality.
+ if actual == self.expected:
+ return True
+
+ # If either type is non-numeric, fall back to strict equality.
+ # NB: we need Complex, rather than just Number, to ensure that __abs__,
+ # __sub__, and __float__ are defined.
+ if not (
+ isinstance(self.expected, (Complex, Decimal))
+ and isinstance(actual, (Complex, Decimal))
+ ):
+ return False
+
+ # Allow the user to control whether NaNs are considered equal to each
+ # other or not. The abs() calls are for compatibility with complex
+ # numbers.
+ if math.isnan(abs(self.expected)): # type: ignore[arg-type]
+ return self.nan_ok and math.isnan(abs(actual)) # type: ignore[arg-type]
+
+ # Infinity shouldn't be approximately equal to anything but itself, but
+ # if there's a relative tolerance, it will be infinite and infinity
+ # will seem approximately equal to everything. The equal-to-itself
+ # case would have been short circuited above, so here we can just
+ # return false if the expected value is infinite. The abs() call is
+ # for compatibility with complex numbers.
+ if math.isinf(abs(self.expected)): # type: ignore[arg-type]
+ return False
+
+ # Return true if the two numbers are within the tolerance.
+ result: bool = abs(self.expected - actual) <= self.tolerance
+ return result
+
+ # Ignore type because of https://github.com/python/mypy/issues/4266.
+ __hash__ = None # type: ignore
+
+ @property
+ def tolerance(self):
+ """Return the tolerance for the comparison.
+
+ This could be either an absolute tolerance or a relative tolerance,
+ depending on what the user specified or which would be larger.
+ """
+
+ def set_default(x, default):
+ return x if x is not None else default
+
+ # Figure out what the absolute tolerance should be. ``self.abs`` is
+ # either None or a value specified by the user.
+ absolute_tolerance = set_default(self.abs, self.DEFAULT_ABSOLUTE_TOLERANCE)
+
+ if absolute_tolerance < 0:
+ raise ValueError(
+ f"absolute tolerance can't be negative: {absolute_tolerance}"
+ )
+ if math.isnan(absolute_tolerance):
+ raise ValueError("absolute tolerance can't be NaN.")
+
+ # If the user specified an absolute tolerance but not a relative one,
+ # just return the absolute tolerance.
+ if self.rel is None:
+ if self.abs is not None:
+ return absolute_tolerance
+
+ # Figure out what the relative tolerance should be. ``self.rel`` is
+ # either None or a value specified by the user. This is done after
+ # we've made sure the user didn't ask for an absolute tolerance only,
+ # because we don't want to raise errors about the relative tolerance if
+ # we aren't even going to use it.
+ relative_tolerance = set_default(
+ self.rel, self.DEFAULT_RELATIVE_TOLERANCE
+ ) * abs(self.expected)
+
+ if relative_tolerance < 0:
+ raise ValueError(
+ f"relative tolerance can't be negative: {relative_tolerance}"
+ )
+ if math.isnan(relative_tolerance):
+ raise ValueError("relative tolerance can't be NaN.")
+
+ # Return the larger of the relative and absolute tolerances.
+ return max(relative_tolerance, absolute_tolerance)
+
+
+class ApproxDecimal(ApproxScalar):
+ """Perform approximate comparisons where the expected value is a Decimal."""
+
+ DEFAULT_ABSOLUTE_TOLERANCE = Decimal("1e-12")
+ DEFAULT_RELATIVE_TOLERANCE = Decimal("1e-6")
+
+
+def approx(expected, rel=None, abs=None, nan_ok: bool = False) -> ApproxBase:
+ """Assert that two numbers (or two sets of numbers) are equal to each other
+ within some tolerance.
+
+ Due to the :std:doc:`tutorial/floatingpoint`, numbers that we
+ would intuitively expect to be equal are not always so::
+
+ >>> 0.1 + 0.2 == 0.3
+ False
+
+ This problem is commonly encountered when writing tests, e.g. when making
+ sure that floating-point values are what you expect them to be. One way to
+ deal with this problem is to assert that two floating-point numbers are
+ equal to within some appropriate tolerance::
+
+ >>> abs((0.1 + 0.2) - 0.3) < 1e-6
+ True
+
+ However, comparisons like this are tedious to write and difficult to
+ understand. Furthermore, absolute comparisons like the one above are
+ usually discouraged because there's no tolerance that works well for all
+ situations. ``1e-6`` is good for numbers around ``1``, but too small for
+ very big numbers and too big for very small ones. It's better to express
+ the tolerance as a fraction of the expected value, but relative comparisons
+ like that are even more difficult to write correctly and concisely.
+
+ The ``approx`` class performs floating-point comparisons using a syntax
+ that's as intuitive as possible::
+
+ >>> from pytest import approx
+ >>> 0.1 + 0.2 == approx(0.3)
+ True
+
+ The same syntax also works for sequences of numbers::
+
+ >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6))
+ True
+
+ Dictionary *values*::
+
+ >>> {'a': 0.1 + 0.2, 'b': 0.2 + 0.4} == approx({'a': 0.3, 'b': 0.6})
+ True
+
+ ``numpy`` arrays::
+
+ >>> import numpy as np # doctest: +SKIP
+ >>> np.array([0.1, 0.2]) + np.array([0.2, 0.4]) == approx(np.array([0.3, 0.6])) # doctest: +SKIP
+ True
+
+ And for a ``numpy`` array against a scalar::
+
+ >>> import numpy as np # doctest: +SKIP
+ >>> np.array([0.1, 0.2]) + np.array([0.2, 0.1]) == approx(0.3) # doctest: +SKIP
+ True
+
+ By default, ``approx`` considers numbers within a relative tolerance of
+ ``1e-6`` (i.e. one part in a million) of its expected value to be equal.
+ This treatment would lead to surprising results if the expected value was
+ ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``.
+ To handle this case less surprisingly, ``approx`` also considers numbers
+ within an absolute tolerance of ``1e-12`` of its expected value to be
+ equal. Infinity and NaN are special cases. Infinity is only considered
+ equal to itself, regardless of the relative tolerance. NaN is not
+ considered equal to anything by default, but you can make it be equal to
+ itself by setting the ``nan_ok`` argument to True. (This is meant to
+ facilitate comparing arrays that use NaN to mean "no data".)
+
+ Both the relative and absolute tolerances can be changed by passing
+ arguments to the ``approx`` constructor::
+
+ >>> 1.0001 == approx(1)
+ False
+ >>> 1.0001 == approx(1, rel=1e-3)
+ True
+ >>> 1.0001 == approx(1, abs=1e-3)
+ True
+
+ If you specify ``abs`` but not ``rel``, the comparison will not consider
+ the relative tolerance at all. In other words, two numbers that are within
+ the default relative tolerance of ``1e-6`` will still be considered unequal
+ if they exceed the specified absolute tolerance. If you specify both
+ ``abs`` and ``rel``, the numbers will be considered equal if either
+ tolerance is met::
+
+ >>> 1 + 1e-8 == approx(1)
+ True
+ >>> 1 + 1e-8 == approx(1, abs=1e-12)
+ False
+ >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12)
+ True
+
+ You can also use ``approx`` to compare nonnumeric types, or dicts and
+ sequences containing nonnumeric types, in which case it falls back to
+ strict equality. This can be useful for comparing dicts and sequences that
+ can contain optional values::
+
+ >>> {"required": 1.0000005, "optional": None} == approx({"required": 1, "optional": None})
+ True
+ >>> [None, 1.0000005] == approx([None,1])
+ True
+ >>> ["foo", 1.0000005] == approx([None,1])
+ False
+
+ If you're thinking about using ``approx``, then you might want to know how
+ it compares to other good ways of comparing floating-point numbers. All of
+ these algorithms are based on relative and absolute tolerances and should
+ agree for the most part, but they do have meaningful differences:
+
+ - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative
+ tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute
+ tolerance is met. Because the relative tolerance is calculated w.r.t.
+ both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor
+ ``b`` is a "reference value"). You have to specify an absolute tolerance
+ if you want to compare to ``0.0`` because there is no tolerance by
+ default. More information: :py:func:`math.isclose`.
+
+ - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference
+ between ``a`` and ``b`` is less that the sum of the relative tolerance
+ w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance
+ is only calculated w.r.t. ``b``, this test is asymmetric and you can
+ think of ``b`` as the reference value. Support for comparing sequences
+ is provided by :py:func:`numpy.allclose`. More information:
+ :std:doc:`numpy:reference/generated/numpy.isclose`.
+
+ - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b``
+ are within an absolute tolerance of ``1e-7``. No relative tolerance is
+ considered , so this function is not appropriate for very large or very
+ small numbers. Also, it's only available in subclasses of ``unittest.TestCase``
+ and it's ugly because it doesn't follow PEP8. More information:
+ :py:meth:`unittest.TestCase.assertAlmostEqual`.
+
+ - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative
+ tolerance is met w.r.t. ``b`` or if the absolute tolerance is met.
+ Because the relative tolerance is only calculated w.r.t. ``b``, this test
+ is asymmetric and you can think of ``b`` as the reference value. In the
+ special case that you explicitly specify an absolute tolerance but not a
+ relative tolerance, only the absolute tolerance is considered.
+
+ .. note::
+
+ ``approx`` can handle numpy arrays, but we recommend the
+ specialised test helpers in :std:doc:`numpy:reference/routines.testing`
+ if you need support for comparisons, NaNs, or ULP-based tolerances.
+
+ .. warning::
+
+ .. versionchanged:: 3.2
+
+ In order to avoid inconsistent behavior, :py:exc:`TypeError` is
+ raised for ``>``, ``>=``, ``<`` and ``<=`` comparisons.
+ The example below illustrates the problem::
+
+ assert approx(0.1) > 0.1 + 1e-10 # calls approx(0.1).__gt__(0.1 + 1e-10)
+ assert 0.1 + 1e-10 > approx(0.1) # calls approx(0.1).__lt__(0.1 + 1e-10)
+
+ In the second example one expects ``approx(0.1).__le__(0.1 + 1e-10)``
+ to be called. But instead, ``approx(0.1).__lt__(0.1 + 1e-10)`` is used to
+ comparison. This is because the call hierarchy of rich comparisons
+ follows a fixed behavior. More information: :py:meth:`object.__ge__`
+
+ .. versionchanged:: 3.7.1
+ ``approx`` raises ``TypeError`` when it encounters a dict value or
+ sequence element of nonnumeric type.
+
+ .. versionchanged:: 6.1.0
+ ``approx`` falls back to strict equality for nonnumeric types instead
+ of raising ``TypeError``.
+ """
+
+ # Delegate the comparison to a class that knows how to deal with the type
+ # of the expected value (e.g. int, float, list, dict, numpy.array, etc).
+ #
+ # The primary responsibility of these classes is to implement ``__eq__()``
+ # and ``__repr__()``. The former is used to actually check if some
+ # "actual" value is equivalent to the given expected value within the
+ # allowed tolerance. The latter is used to show the user the expected
+ # value and tolerance, in the case that a test failed.
+ #
+ # The actual logic for making approximate comparisons can be found in
+ # ApproxScalar, which is used to compare individual numbers. All of the
+ # other Approx classes eventually delegate to this class. The ApproxBase
+ # class provides some convenient methods and overloads, but isn't really
+ # essential.
+
+ __tracebackhide__ = True
+
+ if isinstance(expected, Decimal):
+ cls: Type[ApproxBase] = ApproxDecimal
+ elif isinstance(expected, Mapping):
+ cls = ApproxMapping
+ elif _is_numpy_array(expected):
+ expected = _as_numpy_array(expected)
+ cls = ApproxNumpy
+ elif (
+ isinstance(expected, Iterable)
+ and isinstance(expected, Sized)
+ # Type ignored because the error is wrong -- not unreachable.
+ and not isinstance(expected, STRING_TYPES) # type: ignore[unreachable]
+ ):
+ cls = ApproxSequencelike
+ else:
+ cls = ApproxScalar
+
+ return cls(expected, rel, abs, nan_ok)
+
+
+def _is_numpy_array(obj: object) -> bool:
+ """
+ Return true if the given object is implicitly convertible to ndarray,
+ and numpy is already imported.
+ """
+ return _as_numpy_array(obj) is not None
+
+
+def _as_numpy_array(obj: object) -> Optional["ndarray"]:
+ """
+ Return an ndarray if the given object is implicitly convertible to ndarray,
+ and numpy is already imported, otherwise None.
+ """
+ import sys
+
+ np: Any = sys.modules.get("numpy")
+ if np is not None:
+ # avoid infinite recursion on numpy scalars, which have __array__
+ if np.isscalar(obj):
+ return None
+ elif isinstance(obj, np.ndarray):
+ return obj
+ elif hasattr(obj, "__array__") or hasattr("obj", "__array_interface__"):
+ return np.asarray(obj)
+ return None
+
+
+# builtin pytest.raises helper
+
+E = TypeVar("E", bound=BaseException)
+
+
+@overload
+def raises(
+ expected_exception: Union[Type[E], Tuple[Type[E], ...]],
+ *,
+ match: Optional[Union[str, Pattern[str]]] = ...,
+) -> "RaisesContext[E]":
+ ...
+
+
+@overload
+def raises(
+ expected_exception: Union[Type[E], Tuple[Type[E], ...]],
+ func: Callable[..., Any],
+ *args: Any,
+ **kwargs: Any,
+) -> _pytest._code.ExceptionInfo[E]:
+ ...
+
+
+def raises(
+ expected_exception: Union[Type[E], Tuple[Type[E], ...]], *args: Any, **kwargs: Any
+) -> Union["RaisesContext[E]", _pytest._code.ExceptionInfo[E]]:
+ r"""Assert that a code block/function call raises ``expected_exception``
+ or raise a failure exception otherwise.
+
+ :kwparam match:
+ If specified, a string containing a regular expression,
+ or a regular expression object, that is tested against the string
+ representation of the exception using :py:func:`re.search`. To match a literal
+ string that may contain :std:ref:`special characters <re-syntax>`, the pattern can
+ first be escaped with :py:func:`re.escape`.
+
+ (This is only used when :py:func:`pytest.raises` is used as a context manager,
+ and passed through to the function otherwise.
+ When using :py:func:`pytest.raises` as a function, you can use:
+ ``pytest.raises(Exc, func, match="passed on").match("my pattern")``.)
+
+ .. currentmodule:: _pytest._code
+
+ Use ``pytest.raises`` as a context manager, which will capture the exception of the given
+ type::
+
+ >>> import pytest
+ >>> with pytest.raises(ZeroDivisionError):
+ ... 1/0
+
+ If the code block does not raise the expected exception (``ZeroDivisionError`` in the example
+ above), or no exception at all, the check will fail instead.
+
+ You can also use the keyword argument ``match`` to assert that the
+ exception matches a text or regex::
+
+ >>> with pytest.raises(ValueError, match='must be 0 or None'):
+ ... raise ValueError("value must be 0 or None")
+
+ >>> with pytest.raises(ValueError, match=r'must be \d+$'):
+ ... raise ValueError("value must be 42")
+
+ The context manager produces an :class:`ExceptionInfo` object which can be used to inspect the
+ details of the captured exception::
+
+ >>> with pytest.raises(ValueError) as exc_info:
+ ... raise ValueError("value must be 42")
+ >>> assert exc_info.type is ValueError
+ >>> assert exc_info.value.args[0] == "value must be 42"
+
+ .. note::
+
+ When using ``pytest.raises`` as a context manager, it's worthwhile to
+ note that normal context manager rules apply and that the exception
+ raised *must* be the final line in the scope of the context manager.
+ Lines of code after that, within the scope of the context manager will
+ not be executed. For example::
+
+ >>> value = 15
+ >>> with pytest.raises(ValueError) as exc_info:
+ ... if value > 10:
+ ... raise ValueError("value must be <= 10")
+ ... assert exc_info.type is ValueError # this will not execute
+
+ Instead, the following approach must be taken (note the difference in
+ scope)::
+
+ >>> with pytest.raises(ValueError) as exc_info:
+ ... if value > 10:
+ ... raise ValueError("value must be <= 10")
+ ...
+ >>> assert exc_info.type is ValueError
+
+ **Using with** ``pytest.mark.parametrize``
+
+ When using :ref:`pytest.mark.parametrize ref`
+ it is possible to parametrize tests such that
+ some runs raise an exception and others do not.
+
+ See :ref:`parametrizing_conditional_raising` for an example.
+
+ **Legacy form**
+
+ It is possible to specify a callable by passing a to-be-called lambda::
+
+ >>> raises(ZeroDivisionError, lambda: 1/0)
+ <ExceptionInfo ...>
+
+ or you can specify an arbitrary callable with arguments::
+
+ >>> def f(x): return 1/x
+ ...
+ >>> raises(ZeroDivisionError, f, 0)
+ <ExceptionInfo ...>
+ >>> raises(ZeroDivisionError, f, x=0)
+ <ExceptionInfo ...>
+
+ The form above is fully supported but discouraged for new code because the
+ context manager form is regarded as more readable and less error-prone.
+
+ .. note::
+ Similar to caught exception objects in Python, explicitly clearing
+ local references to returned ``ExceptionInfo`` objects can
+ help the Python interpreter speed up its garbage collection.
+
+ Clearing those references breaks a reference cycle
+ (``ExceptionInfo`` --> caught exception --> frame stack raising
+ the exception --> current frame stack --> local variables -->
+ ``ExceptionInfo``) which makes Python keep all objects referenced
+ from that cycle (including all local variables in the current
+ frame) alive until the next cyclic garbage collection run.
+ More detailed information can be found in the official Python
+ documentation for :ref:`the try statement <python:try>`.
+ """
+ __tracebackhide__ = True
+
+ if isinstance(expected_exception, type):
+ excepted_exceptions: Tuple[Type[E], ...] = (expected_exception,)
+ else:
+ excepted_exceptions = expected_exception
+ for exc in excepted_exceptions:
+ if not isinstance(exc, type) or not issubclass(exc, BaseException):
+ msg = "expected exception must be a BaseException type, not {}" # type: ignore[unreachable]
+ not_a = exc.__name__ if isinstance(exc, type) else type(exc).__name__
+ raise TypeError(msg.format(not_a))
+
+ message = f"DID NOT RAISE {expected_exception}"
+
+ if not args:
+ match: Optional[Union[str, Pattern[str]]] = kwargs.pop("match", None)
+ if kwargs:
+ msg = "Unexpected keyword arguments passed to pytest.raises: "
+ msg += ", ".join(sorted(kwargs))
+ msg += "\nUse context-manager form instead?"
+ raise TypeError(msg)
+ return RaisesContext(expected_exception, message, match)
+ else:
+ func = args[0]
+ if not callable(func):
+ raise TypeError(f"{func!r} object (type: {type(func)}) must be callable")
+ try:
+ func(*args[1:], **kwargs)
+ except expected_exception as e:
+ # We just caught the exception - there is a traceback.
+ assert e.__traceback__ is not None
+ return _pytest._code.ExceptionInfo.from_exc_info(
+ (type(e), e, e.__traceback__)
+ )
+ fail(message)
+
+
+# This doesn't work with mypy for now. Use fail.Exception instead.
+raises.Exception = fail.Exception # type: ignore
+
+
+@final
+class RaisesContext(Generic[E]):
+ def __init__(
+ self,
+ expected_exception: Union[Type[E], Tuple[Type[E], ...]],
+ message: str,
+ match_expr: Optional[Union[str, Pattern[str]]] = None,
+ ) -> None:
+ self.expected_exception = expected_exception
+ self.message = message
+ self.match_expr = match_expr
+ self.excinfo: Optional[_pytest._code.ExceptionInfo[E]] = None
+
+ def __enter__(self) -> _pytest._code.ExceptionInfo[E]:
+ self.excinfo = _pytest._code.ExceptionInfo.for_later()
+ return self.excinfo
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> bool:
+ __tracebackhide__ = True
+ if exc_type is None:
+ fail(self.message)
+ assert self.excinfo is not None
+ if not issubclass(exc_type, self.expected_exception):
+ return False
+ # Cast to narrow the exception type now that it's verified.
+ exc_info = cast(Tuple[Type[E], E, TracebackType], (exc_type, exc_val, exc_tb))
+ self.excinfo.fill_unfilled(exc_info)
+ if self.match_expr is not None:
+ self.excinfo.match(self.match_expr)
+ return True
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python_path.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python_path.py
new file mode 100644
index 0000000000..cceabbca12
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/python_path.py
@@ -0,0 +1,24 @@
+import sys
+
+import pytest
+from pytest import Config
+from pytest import Parser
+
+
+def pytest_addoption(parser: Parser) -> None:
+ parser.addini("pythonpath", type="paths", help="Add paths to sys.path", default=[])
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_load_initial_conftests(early_config: Config) -> None:
+ # `pythonpath = a b` will set `sys.path` to `[a, b, x, y, z, ...]`
+ for path in reversed(early_config.getini("pythonpath")):
+ sys.path.insert(0, str(path))
+
+
+@pytest.hookimpl(trylast=True)
+def pytest_unconfigure(config: Config) -> None:
+ for path in config.getini("pythonpath"):
+ path_str = str(path)
+ if path_str in sys.path:
+ sys.path.remove(path_str)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/recwarn.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/recwarn.py
new file mode 100644
index 0000000000..175b571a80
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/recwarn.py
@@ -0,0 +1,296 @@
+"""Record warnings during test function execution."""
+import re
+import warnings
+from types import TracebackType
+from typing import Any
+from typing import Callable
+from typing import Generator
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import overload
+from typing import Pattern
+from typing import Tuple
+from typing import Type
+from typing import TypeVar
+from typing import Union
+
+from _pytest.compat import final
+from _pytest.deprecated import check_ispytest
+from _pytest.deprecated import WARNS_NONE_ARG
+from _pytest.fixtures import fixture
+from _pytest.outcomes import fail
+
+
+T = TypeVar("T")
+
+
+@fixture
+def recwarn() -> Generator["WarningsRecorder", None, None]:
+ """Return a :class:`WarningsRecorder` instance that records all warnings emitted by test functions.
+
+ See https://docs.python.org/library/how-to/capture-warnings.html for information
+ on warning categories.
+ """
+ wrec = WarningsRecorder(_ispytest=True)
+ with wrec:
+ warnings.simplefilter("default")
+ yield wrec
+
+
+@overload
+def deprecated_call(
+ *, match: Optional[Union[str, Pattern[str]]] = ...
+) -> "WarningsRecorder":
+ ...
+
+
+@overload
+def deprecated_call(func: Callable[..., T], *args: Any, **kwargs: Any) -> T:
+ ...
+
+
+def deprecated_call(
+ func: Optional[Callable[..., Any]] = None, *args: Any, **kwargs: Any
+) -> Union["WarningsRecorder", Any]:
+ """Assert that code produces a ``DeprecationWarning`` or ``PendingDeprecationWarning``.
+
+ This function can be used as a context manager::
+
+ >>> import warnings
+ >>> def api_call_v2():
+ ... warnings.warn('use v3 of this api', DeprecationWarning)
+ ... return 200
+
+ >>> import pytest
+ >>> with pytest.deprecated_call():
+ ... assert api_call_v2() == 200
+
+ It can also be used by passing a function and ``*args`` and ``**kwargs``,
+ in which case it will ensure calling ``func(*args, **kwargs)`` produces one of
+ the warnings types above. The return value is the return value of the function.
+
+ In the context manager form you may use the keyword argument ``match`` to assert
+ that the warning matches a text or regex.
+
+ The context manager produces a list of :class:`warnings.WarningMessage` objects,
+ one for each warning raised.
+ """
+ __tracebackhide__ = True
+ if func is not None:
+ args = (func,) + args
+ return warns((DeprecationWarning, PendingDeprecationWarning), *args, **kwargs)
+
+
+@overload
+def warns(
+ expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]] = ...,
+ *,
+ match: Optional[Union[str, Pattern[str]]] = ...,
+) -> "WarningsChecker":
+ ...
+
+
+@overload
+def warns(
+ expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]],
+ func: Callable[..., T],
+ *args: Any,
+ **kwargs: Any,
+) -> T:
+ ...
+
+
+def warns(
+ expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]] = Warning,
+ *args: Any,
+ match: Optional[Union[str, Pattern[str]]] = None,
+ **kwargs: Any,
+) -> Union["WarningsChecker", Any]:
+ r"""Assert that code raises a particular class of warning.
+
+ Specifically, the parameter ``expected_warning`` can be a warning class or
+ sequence of warning classes, and the inside the ``with`` block must issue a warning of that class or
+ classes.
+
+ This helper produces a list of :class:`warnings.WarningMessage` objects,
+ one for each warning raised.
+
+ This function can be used as a context manager, or any of the other ways
+ :func:`pytest.raises` can be used::
+
+ >>> import pytest
+ >>> with pytest.warns(RuntimeWarning):
+ ... warnings.warn("my warning", RuntimeWarning)
+
+ In the context manager form you may use the keyword argument ``match`` to assert
+ that the warning matches a text or regex::
+
+ >>> with pytest.warns(UserWarning, match='must be 0 or None'):
+ ... warnings.warn("value must be 0 or None", UserWarning)
+
+ >>> with pytest.warns(UserWarning, match=r'must be \d+$'):
+ ... warnings.warn("value must be 42", UserWarning)
+
+ >>> with pytest.warns(UserWarning, match=r'must be \d+$'):
+ ... warnings.warn("this is not here", UserWarning)
+ Traceback (most recent call last):
+ ...
+ Failed: DID NOT WARN. No warnings of type ...UserWarning... were emitted...
+
+ """
+ __tracebackhide__ = True
+ if not args:
+ if kwargs:
+ msg = "Unexpected keyword arguments passed to pytest.warns: "
+ msg += ", ".join(sorted(kwargs))
+ msg += "\nUse context-manager form instead?"
+ raise TypeError(msg)
+ return WarningsChecker(expected_warning, match_expr=match, _ispytest=True)
+ else:
+ func = args[0]
+ if not callable(func):
+ raise TypeError(f"{func!r} object (type: {type(func)}) must be callable")
+ with WarningsChecker(expected_warning, _ispytest=True):
+ return func(*args[1:], **kwargs)
+
+
+class WarningsRecorder(warnings.catch_warnings):
+ """A context manager to record raised warnings.
+
+ Adapted from `warnings.catch_warnings`.
+ """
+
+ def __init__(self, *, _ispytest: bool = False) -> None:
+ check_ispytest(_ispytest)
+ # Type ignored due to the way typeshed handles warnings.catch_warnings.
+ super().__init__(record=True) # type: ignore[call-arg]
+ self._entered = False
+ self._list: List[warnings.WarningMessage] = []
+
+ @property
+ def list(self) -> List["warnings.WarningMessage"]:
+ """The list of recorded warnings."""
+ return self._list
+
+ def __getitem__(self, i: int) -> "warnings.WarningMessage":
+ """Get a recorded warning by index."""
+ return self._list[i]
+
+ def __iter__(self) -> Iterator["warnings.WarningMessage"]:
+ """Iterate through the recorded warnings."""
+ return iter(self._list)
+
+ def __len__(self) -> int:
+ """The number of recorded warnings."""
+ return len(self._list)
+
+ def pop(self, cls: Type[Warning] = Warning) -> "warnings.WarningMessage":
+ """Pop the first recorded warning, raise exception if not exists."""
+ for i, w in enumerate(self._list):
+ if issubclass(w.category, cls):
+ return self._list.pop(i)
+ __tracebackhide__ = True
+ raise AssertionError("%r not found in warning list" % cls)
+
+ def clear(self) -> None:
+ """Clear the list of recorded warnings."""
+ self._list[:] = []
+
+ # Type ignored because it doesn't exactly warnings.catch_warnings.__enter__
+ # -- it returns a List but we only emulate one.
+ def __enter__(self) -> "WarningsRecorder": # type: ignore
+ if self._entered:
+ __tracebackhide__ = True
+ raise RuntimeError("Cannot enter %r twice" % self)
+ _list = super().__enter__()
+ # record=True means it's None.
+ assert _list is not None
+ self._list = _list
+ warnings.simplefilter("always")
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ if not self._entered:
+ __tracebackhide__ = True
+ raise RuntimeError("Cannot exit %r without entering first" % self)
+
+ super().__exit__(exc_type, exc_val, exc_tb)
+
+ # Built-in catch_warnings does not reset entered state so we do it
+ # manually here for this context manager to become reusable.
+ self._entered = False
+
+
+@final
+class WarningsChecker(WarningsRecorder):
+ def __init__(
+ self,
+ expected_warning: Optional[
+ Union[Type[Warning], Tuple[Type[Warning], ...]]
+ ] = Warning,
+ match_expr: Optional[Union[str, Pattern[str]]] = None,
+ *,
+ _ispytest: bool = False,
+ ) -> None:
+ check_ispytest(_ispytest)
+ super().__init__(_ispytest=True)
+
+ msg = "exceptions must be derived from Warning, not %s"
+ if expected_warning is None:
+ warnings.warn(WARNS_NONE_ARG, stacklevel=4)
+ expected_warning_tup = None
+ elif isinstance(expected_warning, tuple):
+ for exc in expected_warning:
+ if not issubclass(exc, Warning):
+ raise TypeError(msg % type(exc))
+ expected_warning_tup = expected_warning
+ elif issubclass(expected_warning, Warning):
+ expected_warning_tup = (expected_warning,)
+ else:
+ raise TypeError(msg % type(expected_warning))
+
+ self.expected_warning = expected_warning_tup
+ self.match_expr = match_expr
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ super().__exit__(exc_type, exc_val, exc_tb)
+
+ __tracebackhide__ = True
+
+ # only check if we're not currently handling an exception
+ if exc_type is None and exc_val is None and exc_tb is None:
+ if self.expected_warning is not None:
+ if not any(issubclass(r.category, self.expected_warning) for r in self):
+ __tracebackhide__ = True
+ fail(
+ "DID NOT WARN. No warnings of type {} were emitted. "
+ "The list of emitted warnings is: {}.".format(
+ self.expected_warning, [each.message for each in self]
+ )
+ )
+ elif self.match_expr is not None:
+ for r in self:
+ if issubclass(r.category, self.expected_warning):
+ if re.compile(self.match_expr).search(str(r.message)):
+ break
+ else:
+ fail(
+ "DID NOT WARN. No warnings of type {} matching"
+ " ('{}') were emitted. The list of emitted warnings"
+ " is: {}.".format(
+ self.expected_warning,
+ self.match_expr,
+ [each.message for each in self],
+ )
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/reports.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/reports.py
new file mode 100644
index 0000000000..a68e68bc52
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/reports.py
@@ -0,0 +1,598 @@
+import os
+from io import StringIO
+from pprint import pprint
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+import attr
+
+from _pytest._code.code import ExceptionChainRepr
+from _pytest._code.code import ExceptionInfo
+from _pytest._code.code import ExceptionRepr
+from _pytest._code.code import ReprEntry
+from _pytest._code.code import ReprEntryNative
+from _pytest._code.code import ReprExceptionInfo
+from _pytest._code.code import ReprFileLocation
+from _pytest._code.code import ReprFuncArgs
+from _pytest._code.code import ReprLocals
+from _pytest._code.code import ReprTraceback
+from _pytest._code.code import TerminalRepr
+from _pytest._io import TerminalWriter
+from _pytest.compat import final
+from _pytest.config import Config
+from _pytest.nodes import Collector
+from _pytest.nodes import Item
+from _pytest.outcomes import skip
+
+if TYPE_CHECKING:
+ from typing import NoReturn
+ from typing_extensions import Literal
+
+ from _pytest.runner import CallInfo
+
+
+def getworkerinfoline(node):
+ try:
+ return node._workerinfocache
+ except AttributeError:
+ d = node.workerinfo
+ ver = "%s.%s.%s" % d["version_info"][:3]
+ node._workerinfocache = s = "[{}] {} -- Python {} {}".format(
+ d["id"], d["sysplatform"], ver, d["executable"]
+ )
+ return s
+
+
+_R = TypeVar("_R", bound="BaseReport")
+
+
+class BaseReport:
+ when: Optional[str]
+ location: Optional[Tuple[str, Optional[int], str]]
+ longrepr: Union[
+ None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr
+ ]
+ sections: List[Tuple[str, str]]
+ nodeid: str
+ outcome: "Literal['passed', 'failed', 'skipped']"
+
+ def __init__(self, **kw: Any) -> None:
+ self.__dict__.update(kw)
+
+ if TYPE_CHECKING:
+ # Can have arbitrary fields given to __init__().
+ def __getattr__(self, key: str) -> Any:
+ ...
+
+ def toterminal(self, out: TerminalWriter) -> None:
+ if hasattr(self, "node"):
+ worker_info = getworkerinfoline(self.node)
+ if worker_info:
+ out.line(worker_info)
+
+ longrepr = self.longrepr
+ if longrepr is None:
+ return
+
+ if hasattr(longrepr, "toterminal"):
+ longrepr_terminal = cast(TerminalRepr, longrepr)
+ longrepr_terminal.toterminal(out)
+ else:
+ try:
+ s = str(longrepr)
+ except UnicodeEncodeError:
+ s = "<unprintable longrepr>"
+ out.line(s)
+
+ def get_sections(self, prefix: str) -> Iterator[Tuple[str, str]]:
+ for name, content in self.sections:
+ if name.startswith(prefix):
+ yield prefix, content
+
+ @property
+ def longreprtext(self) -> str:
+ """Read-only property that returns the full string representation of
+ ``longrepr``.
+
+ .. versionadded:: 3.0
+ """
+ file = StringIO()
+ tw = TerminalWriter(file)
+ tw.hasmarkup = False
+ self.toterminal(tw)
+ exc = file.getvalue()
+ return exc.strip()
+
+ @property
+ def caplog(self) -> str:
+ """Return captured log lines, if log capturing is enabled.
+
+ .. versionadded:: 3.5
+ """
+ return "\n".join(
+ content for (prefix, content) in self.get_sections("Captured log")
+ )
+
+ @property
+ def capstdout(self) -> str:
+ """Return captured text from stdout, if capturing is enabled.
+
+ .. versionadded:: 3.0
+ """
+ return "".join(
+ content for (prefix, content) in self.get_sections("Captured stdout")
+ )
+
+ @property
+ def capstderr(self) -> str:
+ """Return captured text from stderr, if capturing is enabled.
+
+ .. versionadded:: 3.0
+ """
+ return "".join(
+ content for (prefix, content) in self.get_sections("Captured stderr")
+ )
+
+ @property
+ def passed(self) -> bool:
+ """Whether the outcome is passed."""
+ return self.outcome == "passed"
+
+ @property
+ def failed(self) -> bool:
+ """Whether the outcome is failed."""
+ return self.outcome == "failed"
+
+ @property
+ def skipped(self) -> bool:
+ """Whether the outcome is skipped."""
+ return self.outcome == "skipped"
+
+ @property
+ def fspath(self) -> str:
+ """The path portion of the reported node, as a string."""
+ return self.nodeid.split("::")[0]
+
+ @property
+ def count_towards_summary(self) -> bool:
+ """**Experimental** Whether this report should be counted towards the
+ totals shown at the end of the test session: "1 passed, 1 failure, etc".
+
+ .. note::
+
+ This function is considered **experimental**, so beware that it is subject to changes
+ even in patch releases.
+ """
+ return True
+
+ @property
+ def head_line(self) -> Optional[str]:
+ """**Experimental** The head line shown with longrepr output for this
+ report, more commonly during traceback representation during
+ failures::
+
+ ________ Test.foo ________
+
+
+ In the example above, the head_line is "Test.foo".
+
+ .. note::
+
+ This function is considered **experimental**, so beware that it is subject to changes
+ even in patch releases.
+ """
+ if self.location is not None:
+ fspath, lineno, domain = self.location
+ return domain
+ return None
+
+ def _get_verbose_word(self, config: Config):
+ _category, _short, verbose = config.hook.pytest_report_teststatus(
+ report=self, config=config
+ )
+ return verbose
+
+ def _to_json(self) -> Dict[str, Any]:
+ """Return the contents of this report as a dict of builtin entries,
+ suitable for serialization.
+
+ This was originally the serialize_report() function from xdist (ca03269).
+
+ Experimental method.
+ """
+ return _report_to_json(self)
+
+ @classmethod
+ def _from_json(cls: Type[_R], reportdict: Dict[str, object]) -> _R:
+ """Create either a TestReport or CollectReport, depending on the calling class.
+
+ It is the callers responsibility to know which class to pass here.
+
+ This was originally the serialize_report() function from xdist (ca03269).
+
+ Experimental method.
+ """
+ kwargs = _report_kwargs_from_json(reportdict)
+ return cls(**kwargs)
+
+
+def _report_unserialization_failure(
+ type_name: str, report_class: Type[BaseReport], reportdict
+) -> "NoReturn":
+ url = "https://github.com/pytest-dev/pytest/issues"
+ stream = StringIO()
+ pprint("-" * 100, stream=stream)
+ pprint("INTERNALERROR: Unknown entry type returned: %s" % type_name, stream=stream)
+ pprint("report_name: %s" % report_class, stream=stream)
+ pprint(reportdict, stream=stream)
+ pprint("Please report this bug at %s" % url, stream=stream)
+ pprint("-" * 100, stream=stream)
+ raise RuntimeError(stream.getvalue())
+
+
+@final
+class TestReport(BaseReport):
+ """Basic test report object (also used for setup and teardown calls if
+ they fail).
+
+ Reports can contain arbitrary extra attributes.
+ """
+
+ __test__ = False
+
+ def __init__(
+ self,
+ nodeid: str,
+ location: Tuple[str, Optional[int], str],
+ keywords,
+ outcome: "Literal['passed', 'failed', 'skipped']",
+ longrepr: Union[
+ None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr
+ ],
+ when: "Literal['setup', 'call', 'teardown']",
+ sections: Iterable[Tuple[str, str]] = (),
+ duration: float = 0,
+ user_properties: Optional[Iterable[Tuple[str, object]]] = None,
+ **extra,
+ ) -> None:
+ #: Normalized collection nodeid.
+ self.nodeid = nodeid
+
+ #: A (filesystempath, lineno, domaininfo) tuple indicating the
+ #: actual location of a test item - it might be different from the
+ #: collected one e.g. if a method is inherited from a different module.
+ self.location: Tuple[str, Optional[int], str] = location
+
+ #: A name -> value dictionary containing all keywords and
+ #: markers associated with a test invocation.
+ self.keywords = keywords
+
+ #: Test outcome, always one of "passed", "failed", "skipped".
+ self.outcome = outcome
+
+ #: None or a failure representation.
+ self.longrepr = longrepr
+
+ #: One of 'setup', 'call', 'teardown' to indicate runtest phase.
+ self.when = when
+
+ #: User properties is a list of tuples (name, value) that holds user
+ #: defined properties of the test.
+ self.user_properties = list(user_properties or [])
+
+ #: Tuples of str ``(heading, content)`` with extra information
+ #: for the test report. Used by pytest to add text captured
+ #: from ``stdout``, ``stderr``, and intercepted logging events. May
+ #: be used by other plugins to add arbitrary information to reports.
+ self.sections = list(sections)
+
+ #: Time it took to run just the test.
+ self.duration = duration
+
+ self.__dict__.update(extra)
+
+ def __repr__(self) -> str:
+ return "<{} {!r} when={!r} outcome={!r}>".format(
+ self.__class__.__name__, self.nodeid, self.when, self.outcome
+ )
+
+ @classmethod
+ def from_item_and_call(cls, item: Item, call: "CallInfo[None]") -> "TestReport":
+ """Create and fill a TestReport with standard item and call info."""
+ when = call.when
+ # Remove "collect" from the Literal type -- only for collection calls.
+ assert when != "collect"
+ duration = call.duration
+ keywords = {x: 1 for x in item.keywords}
+ excinfo = call.excinfo
+ sections = []
+ if not call.excinfo:
+ outcome: Literal["passed", "failed", "skipped"] = "passed"
+ longrepr: Union[
+ None,
+ ExceptionInfo[BaseException],
+ Tuple[str, int, str],
+ str,
+ TerminalRepr,
+ ] = None
+ else:
+ if not isinstance(excinfo, ExceptionInfo):
+ outcome = "failed"
+ longrepr = excinfo
+ elif isinstance(excinfo.value, skip.Exception):
+ outcome = "skipped"
+ r = excinfo._getreprcrash()
+ if excinfo.value._use_item_location:
+ path, line = item.reportinfo()[:2]
+ assert line is not None
+ longrepr = os.fspath(path), line + 1, r.message
+ else:
+ longrepr = (str(r.path), r.lineno, r.message)
+ else:
+ outcome = "failed"
+ if call.when == "call":
+ longrepr = item.repr_failure(excinfo)
+ else: # exception in setup or teardown
+ longrepr = item._repr_failure_py(
+ excinfo, style=item.config.getoption("tbstyle", "auto")
+ )
+ for rwhen, key, content in item._report_sections:
+ sections.append((f"Captured {key} {rwhen}", content))
+ return cls(
+ item.nodeid,
+ item.location,
+ keywords,
+ outcome,
+ longrepr,
+ when,
+ sections,
+ duration,
+ user_properties=item.user_properties,
+ )
+
+
+@final
+class CollectReport(BaseReport):
+ """Collection report object.
+
+ Reports can contain arbitrary extra attributes.
+ """
+
+ when = "collect"
+
+ def __init__(
+ self,
+ nodeid: str,
+ outcome: "Literal['passed', 'failed', 'skipped']",
+ longrepr: Union[
+ None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr
+ ],
+ result: Optional[List[Union[Item, Collector]]],
+ sections: Iterable[Tuple[str, str]] = (),
+ **extra,
+ ) -> None:
+ #: Normalized collection nodeid.
+ self.nodeid = nodeid
+
+ #: Test outcome, always one of "passed", "failed", "skipped".
+ self.outcome = outcome
+
+ #: None or a failure representation.
+ self.longrepr = longrepr
+
+ #: The collected items and collection nodes.
+ self.result = result or []
+
+ #: Tuples of str ``(heading, content)`` with extra information
+ #: for the test report. Used by pytest to add text captured
+ #: from ``stdout``, ``stderr``, and intercepted logging events. May
+ #: be used by other plugins to add arbitrary information to reports.
+ self.sections = list(sections)
+
+ self.__dict__.update(extra)
+
+ @property
+ def location(self):
+ return (self.fspath, None, self.fspath)
+
+ def __repr__(self) -> str:
+ return "<CollectReport {!r} lenresult={} outcome={!r}>".format(
+ self.nodeid, len(self.result), self.outcome
+ )
+
+
+class CollectErrorRepr(TerminalRepr):
+ def __init__(self, msg: str) -> None:
+ self.longrepr = msg
+
+ def toterminal(self, out: TerminalWriter) -> None:
+ out.line(self.longrepr, red=True)
+
+
+def pytest_report_to_serializable(
+ report: Union[CollectReport, TestReport]
+) -> Optional[Dict[str, Any]]:
+ if isinstance(report, (TestReport, CollectReport)):
+ data = report._to_json()
+ data["$report_type"] = report.__class__.__name__
+ return data
+ # TODO: Check if this is actually reachable.
+ return None # type: ignore[unreachable]
+
+
+def pytest_report_from_serializable(
+ data: Dict[str, Any],
+) -> Optional[Union[CollectReport, TestReport]]:
+ if "$report_type" in data:
+ if data["$report_type"] == "TestReport":
+ return TestReport._from_json(data)
+ elif data["$report_type"] == "CollectReport":
+ return CollectReport._from_json(data)
+ assert False, "Unknown report_type unserialize data: {}".format(
+ data["$report_type"]
+ )
+ return None
+
+
+def _report_to_json(report: BaseReport) -> Dict[str, Any]:
+ """Return the contents of this report as a dict of builtin entries,
+ suitable for serialization.
+
+ This was originally the serialize_report() function from xdist (ca03269).
+ """
+
+ def serialize_repr_entry(
+ entry: Union[ReprEntry, ReprEntryNative]
+ ) -> Dict[str, Any]:
+ data = attr.asdict(entry)
+ for key, value in data.items():
+ if hasattr(value, "__dict__"):
+ data[key] = attr.asdict(value)
+ entry_data = {"type": type(entry).__name__, "data": data}
+ return entry_data
+
+ def serialize_repr_traceback(reprtraceback: ReprTraceback) -> Dict[str, Any]:
+ result = attr.asdict(reprtraceback)
+ result["reprentries"] = [
+ serialize_repr_entry(x) for x in reprtraceback.reprentries
+ ]
+ return result
+
+ def serialize_repr_crash(
+ reprcrash: Optional[ReprFileLocation],
+ ) -> Optional[Dict[str, Any]]:
+ if reprcrash is not None:
+ return attr.asdict(reprcrash)
+ else:
+ return None
+
+ def serialize_exception_longrepr(rep: BaseReport) -> Dict[str, Any]:
+ assert rep.longrepr is not None
+ # TODO: Investigate whether the duck typing is really necessary here.
+ longrepr = cast(ExceptionRepr, rep.longrepr)
+ result: Dict[str, Any] = {
+ "reprcrash": serialize_repr_crash(longrepr.reprcrash),
+ "reprtraceback": serialize_repr_traceback(longrepr.reprtraceback),
+ "sections": longrepr.sections,
+ }
+ if isinstance(longrepr, ExceptionChainRepr):
+ result["chain"] = []
+ for repr_traceback, repr_crash, description in longrepr.chain:
+ result["chain"].append(
+ (
+ serialize_repr_traceback(repr_traceback),
+ serialize_repr_crash(repr_crash),
+ description,
+ )
+ )
+ else:
+ result["chain"] = None
+ return result
+
+ d = report.__dict__.copy()
+ if hasattr(report.longrepr, "toterminal"):
+ if hasattr(report.longrepr, "reprtraceback") and hasattr(
+ report.longrepr, "reprcrash"
+ ):
+ d["longrepr"] = serialize_exception_longrepr(report)
+ else:
+ d["longrepr"] = str(report.longrepr)
+ else:
+ d["longrepr"] = report.longrepr
+ for name in d:
+ if isinstance(d[name], os.PathLike):
+ d[name] = os.fspath(d[name])
+ elif name == "result":
+ d[name] = None # for now
+ return d
+
+
+def _report_kwargs_from_json(reportdict: Dict[str, Any]) -> Dict[str, Any]:
+ """Return **kwargs that can be used to construct a TestReport or
+ CollectReport instance.
+
+ This was originally the serialize_report() function from xdist (ca03269).
+ """
+
+ def deserialize_repr_entry(entry_data):
+ data = entry_data["data"]
+ entry_type = entry_data["type"]
+ if entry_type == "ReprEntry":
+ reprfuncargs = None
+ reprfileloc = None
+ reprlocals = None
+ if data["reprfuncargs"]:
+ reprfuncargs = ReprFuncArgs(**data["reprfuncargs"])
+ if data["reprfileloc"]:
+ reprfileloc = ReprFileLocation(**data["reprfileloc"])
+ if data["reprlocals"]:
+ reprlocals = ReprLocals(data["reprlocals"]["lines"])
+
+ reprentry: Union[ReprEntry, ReprEntryNative] = ReprEntry(
+ lines=data["lines"],
+ reprfuncargs=reprfuncargs,
+ reprlocals=reprlocals,
+ reprfileloc=reprfileloc,
+ style=data["style"],
+ )
+ elif entry_type == "ReprEntryNative":
+ reprentry = ReprEntryNative(data["lines"])
+ else:
+ _report_unserialization_failure(entry_type, TestReport, reportdict)
+ return reprentry
+
+ def deserialize_repr_traceback(repr_traceback_dict):
+ repr_traceback_dict["reprentries"] = [
+ deserialize_repr_entry(x) for x in repr_traceback_dict["reprentries"]
+ ]
+ return ReprTraceback(**repr_traceback_dict)
+
+ def deserialize_repr_crash(repr_crash_dict: Optional[Dict[str, Any]]):
+ if repr_crash_dict is not None:
+ return ReprFileLocation(**repr_crash_dict)
+ else:
+ return None
+
+ if (
+ reportdict["longrepr"]
+ and "reprcrash" in reportdict["longrepr"]
+ and "reprtraceback" in reportdict["longrepr"]
+ ):
+
+ reprtraceback = deserialize_repr_traceback(
+ reportdict["longrepr"]["reprtraceback"]
+ )
+ reprcrash = deserialize_repr_crash(reportdict["longrepr"]["reprcrash"])
+ if reportdict["longrepr"]["chain"]:
+ chain = []
+ for repr_traceback_data, repr_crash_data, description in reportdict[
+ "longrepr"
+ ]["chain"]:
+ chain.append(
+ (
+ deserialize_repr_traceback(repr_traceback_data),
+ deserialize_repr_crash(repr_crash_data),
+ description,
+ )
+ )
+ exception_info: Union[
+ ExceptionChainRepr, ReprExceptionInfo
+ ] = ExceptionChainRepr(chain)
+ else:
+ exception_info = ReprExceptionInfo(reprtraceback, reprcrash)
+
+ for section in reportdict["longrepr"]["sections"]:
+ exception_info.addsection(*section)
+ reportdict["longrepr"] = exception_info
+
+ return reportdict
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/runner.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/runner.py
new file mode 100644
index 0000000000..e43dd2dc81
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/runner.py
@@ -0,0 +1,548 @@
+"""Basic collect and runtest protocol implementations."""
+import bdb
+import os
+import sys
+import warnings
+from typing import Callable
+from typing import cast
+from typing import Dict
+from typing import Generic
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+import attr
+
+from .reports import BaseReport
+from .reports import CollectErrorRepr
+from .reports import CollectReport
+from .reports import TestReport
+from _pytest import timing
+from _pytest._code.code import ExceptionChainRepr
+from _pytest._code.code import ExceptionInfo
+from _pytest._code.code import TerminalRepr
+from _pytest.compat import final
+from _pytest.config.argparsing import Parser
+from _pytest.deprecated import check_ispytest
+from _pytest.deprecated import UNITTEST_SKIP_DURING_COLLECTION
+from _pytest.nodes import Collector
+from _pytest.nodes import Item
+from _pytest.nodes import Node
+from _pytest.outcomes import Exit
+from _pytest.outcomes import OutcomeException
+from _pytest.outcomes import Skipped
+from _pytest.outcomes import TEST_OUTCOME
+
+if TYPE_CHECKING:
+ from typing_extensions import Literal
+
+ from _pytest.main import Session
+ from _pytest.terminal import TerminalReporter
+
+#
+# pytest plugin hooks.
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("terminal reporting", "reporting", after="general")
+ group.addoption(
+ "--durations",
+ action="store",
+ type=int,
+ default=None,
+ metavar="N",
+ help="show N slowest setup/test durations (N=0 for all).",
+ )
+ group.addoption(
+ "--durations-min",
+ action="store",
+ type=float,
+ default=0.005,
+ metavar="N",
+ help="Minimal duration in seconds for inclusion in slowest list. Default 0.005",
+ )
+
+
+def pytest_terminal_summary(terminalreporter: "TerminalReporter") -> None:
+ durations = terminalreporter.config.option.durations
+ durations_min = terminalreporter.config.option.durations_min
+ verbose = terminalreporter.config.getvalue("verbose")
+ if durations is None:
+ return
+ tr = terminalreporter
+ dlist = []
+ for replist in tr.stats.values():
+ for rep in replist:
+ if hasattr(rep, "duration"):
+ dlist.append(rep)
+ if not dlist:
+ return
+ dlist.sort(key=lambda x: x.duration, reverse=True) # type: ignore[no-any-return]
+ if not durations:
+ tr.write_sep("=", "slowest durations")
+ else:
+ tr.write_sep("=", "slowest %s durations" % durations)
+ dlist = dlist[:durations]
+
+ for i, rep in enumerate(dlist):
+ if verbose < 2 and rep.duration < durations_min:
+ tr.write_line("")
+ tr.write_line(
+ "(%s durations < %gs hidden. Use -vv to show these durations.)"
+ % (len(dlist) - i, durations_min)
+ )
+ break
+ tr.write_line(f"{rep.duration:02.2f}s {rep.when:<8} {rep.nodeid}")
+
+
+def pytest_sessionstart(session: "Session") -> None:
+ session._setupstate = SetupState()
+
+
+def pytest_sessionfinish(session: "Session") -> None:
+ session._setupstate.teardown_exact(None)
+
+
+def pytest_runtest_protocol(item: Item, nextitem: Optional[Item]) -> bool:
+ ihook = item.ihook
+ ihook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location)
+ runtestprotocol(item, nextitem=nextitem)
+ ihook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location)
+ return True
+
+
+def runtestprotocol(
+ item: Item, log: bool = True, nextitem: Optional[Item] = None
+) -> List[TestReport]:
+ hasrequest = hasattr(item, "_request")
+ if hasrequest and not item._request: # type: ignore[attr-defined]
+ # This only happens if the item is re-run, as is done by
+ # pytest-rerunfailures.
+ item._initrequest() # type: ignore[attr-defined]
+ rep = call_and_report(item, "setup", log)
+ reports = [rep]
+ if rep.passed:
+ if item.config.getoption("setupshow", False):
+ show_test_item(item)
+ if not item.config.getoption("setuponly", False):
+ reports.append(call_and_report(item, "call", log))
+ reports.append(call_and_report(item, "teardown", log, nextitem=nextitem))
+ # After all teardown hooks have been called
+ # want funcargs and request info to go away.
+ if hasrequest:
+ item._request = False # type: ignore[attr-defined]
+ item.funcargs = None # type: ignore[attr-defined]
+ return reports
+
+
+def show_test_item(item: Item) -> None:
+ """Show test function, parameters and the fixtures of the test item."""
+ tw = item.config.get_terminal_writer()
+ tw.line()
+ tw.write(" " * 8)
+ tw.write(item.nodeid)
+ used_fixtures = sorted(getattr(item, "fixturenames", []))
+ if used_fixtures:
+ tw.write(" (fixtures used: {})".format(", ".join(used_fixtures)))
+ tw.flush()
+
+
+def pytest_runtest_setup(item: Item) -> None:
+ _update_current_test_var(item, "setup")
+ item.session._setupstate.setup(item)
+
+
+def pytest_runtest_call(item: Item) -> None:
+ _update_current_test_var(item, "call")
+ try:
+ del sys.last_type
+ del sys.last_value
+ del sys.last_traceback
+ except AttributeError:
+ pass
+ try:
+ item.runtest()
+ except Exception as e:
+ # Store trace info to allow postmortem debugging
+ sys.last_type = type(e)
+ sys.last_value = e
+ assert e.__traceback__ is not None
+ # Skip *this* frame
+ sys.last_traceback = e.__traceback__.tb_next
+ raise e
+
+
+def pytest_runtest_teardown(item: Item, nextitem: Optional[Item]) -> None:
+ _update_current_test_var(item, "teardown")
+ item.session._setupstate.teardown_exact(nextitem)
+ _update_current_test_var(item, None)
+
+
+def _update_current_test_var(
+ item: Item, when: Optional["Literal['setup', 'call', 'teardown']"]
+) -> None:
+ """Update :envvar:`PYTEST_CURRENT_TEST` to reflect the current item and stage.
+
+ If ``when`` is None, delete ``PYTEST_CURRENT_TEST`` from the environment.
+ """
+ var_name = "PYTEST_CURRENT_TEST"
+ if when:
+ value = f"{item.nodeid} ({when})"
+ # don't allow null bytes on environment variables (see #2644, #2957)
+ value = value.replace("\x00", "(null)")
+ os.environ[var_name] = value
+ else:
+ os.environ.pop(var_name)
+
+
+def pytest_report_teststatus(report: BaseReport) -> Optional[Tuple[str, str, str]]:
+ if report.when in ("setup", "teardown"):
+ if report.failed:
+ # category, shortletter, verbose-word
+ return "error", "E", "ERROR"
+ elif report.skipped:
+ return "skipped", "s", "SKIPPED"
+ else:
+ return "", "", ""
+ return None
+
+
+#
+# Implementation
+
+
+def call_and_report(
+ item: Item, when: "Literal['setup', 'call', 'teardown']", log: bool = True, **kwds
+) -> TestReport:
+ call = call_runtest_hook(item, when, **kwds)
+ hook = item.ihook
+ report: TestReport = hook.pytest_runtest_makereport(item=item, call=call)
+ if log:
+ hook.pytest_runtest_logreport(report=report)
+ if check_interactive_exception(call, report):
+ hook.pytest_exception_interact(node=item, call=call, report=report)
+ return report
+
+
+def check_interactive_exception(call: "CallInfo[object]", report: BaseReport) -> bool:
+ """Check whether the call raised an exception that should be reported as
+ interactive."""
+ if call.excinfo is None:
+ # Didn't raise.
+ return False
+ if hasattr(report, "wasxfail"):
+ # Exception was expected.
+ return False
+ if isinstance(call.excinfo.value, (Skipped, bdb.BdbQuit)):
+ # Special control flow exception.
+ return False
+ return True
+
+
+def call_runtest_hook(
+ item: Item, when: "Literal['setup', 'call', 'teardown']", **kwds
+) -> "CallInfo[None]":
+ if when == "setup":
+ ihook: Callable[..., None] = item.ihook.pytest_runtest_setup
+ elif when == "call":
+ ihook = item.ihook.pytest_runtest_call
+ elif when == "teardown":
+ ihook = item.ihook.pytest_runtest_teardown
+ else:
+ assert False, f"Unhandled runtest hook case: {when}"
+ reraise: Tuple[Type[BaseException], ...] = (Exit,)
+ if not item.config.getoption("usepdb", False):
+ reraise += (KeyboardInterrupt,)
+ return CallInfo.from_call(
+ lambda: ihook(item=item, **kwds), when=when, reraise=reraise
+ )
+
+
+TResult = TypeVar("TResult", covariant=True)
+
+
+@final
+@attr.s(repr=False, init=False, auto_attribs=True)
+class CallInfo(Generic[TResult]):
+ """Result/Exception info of a function invocation."""
+
+ _result: Optional[TResult]
+ #: The captured exception of the call, if it raised.
+ excinfo: Optional[ExceptionInfo[BaseException]]
+ #: The system time when the call started, in seconds since the epoch.
+ start: float
+ #: The system time when the call ended, in seconds since the epoch.
+ stop: float
+ #: The call duration, in seconds.
+ duration: float
+ #: The context of invocation: "collect", "setup", "call" or "teardown".
+ when: "Literal['collect', 'setup', 'call', 'teardown']"
+
+ def __init__(
+ self,
+ result: Optional[TResult],
+ excinfo: Optional[ExceptionInfo[BaseException]],
+ start: float,
+ stop: float,
+ duration: float,
+ when: "Literal['collect', 'setup', 'call', 'teardown']",
+ *,
+ _ispytest: bool = False,
+ ) -> None:
+ check_ispytest(_ispytest)
+ self._result = result
+ self.excinfo = excinfo
+ self.start = start
+ self.stop = stop
+ self.duration = duration
+ self.when = when
+
+ @property
+ def result(self) -> TResult:
+ """The return value of the call, if it didn't raise.
+
+ Can only be accessed if excinfo is None.
+ """
+ if self.excinfo is not None:
+ raise AttributeError(f"{self!r} has no valid result")
+ # The cast is safe because an exception wasn't raised, hence
+ # _result has the expected function return type (which may be
+ # None, that's why a cast and not an assert).
+ return cast(TResult, self._result)
+
+ @classmethod
+ def from_call(
+ cls,
+ func: "Callable[[], TResult]",
+ when: "Literal['collect', 'setup', 'call', 'teardown']",
+ reraise: Optional[
+ Union[Type[BaseException], Tuple[Type[BaseException], ...]]
+ ] = None,
+ ) -> "CallInfo[TResult]":
+ """Call func, wrapping the result in a CallInfo.
+
+ :param func:
+ The function to call. Called without arguments.
+ :param when:
+ The phase in which the function is called.
+ :param reraise:
+ Exception or exceptions that shall propagate if raised by the
+ function, instead of being wrapped in the CallInfo.
+ """
+ excinfo = None
+ start = timing.time()
+ precise_start = timing.perf_counter()
+ try:
+ result: Optional[TResult] = func()
+ except BaseException:
+ excinfo = ExceptionInfo.from_current()
+ if reraise is not None and isinstance(excinfo.value, reraise):
+ raise
+ result = None
+ # use the perf counter
+ precise_stop = timing.perf_counter()
+ duration = precise_stop - precise_start
+ stop = timing.time()
+ return cls(
+ start=start,
+ stop=stop,
+ duration=duration,
+ when=when,
+ result=result,
+ excinfo=excinfo,
+ _ispytest=True,
+ )
+
+ def __repr__(self) -> str:
+ if self.excinfo is None:
+ return f"<CallInfo when={self.when!r} result: {self._result!r}>"
+ return f"<CallInfo when={self.when!r} excinfo={self.excinfo!r}>"
+
+
+def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> TestReport:
+ return TestReport.from_item_and_call(item, call)
+
+
+def pytest_make_collect_report(collector: Collector) -> CollectReport:
+ call = CallInfo.from_call(lambda: list(collector.collect()), "collect")
+ longrepr: Union[None, Tuple[str, int, str], str, TerminalRepr] = None
+ if not call.excinfo:
+ outcome: Literal["passed", "skipped", "failed"] = "passed"
+ else:
+ skip_exceptions = [Skipped]
+ unittest = sys.modules.get("unittest")
+ if unittest is not None:
+ # Type ignored because unittest is loaded dynamically.
+ skip_exceptions.append(unittest.SkipTest) # type: ignore
+ if isinstance(call.excinfo.value, tuple(skip_exceptions)):
+ if unittest is not None and isinstance(
+ call.excinfo.value, unittest.SkipTest # type: ignore[attr-defined]
+ ):
+ warnings.warn(UNITTEST_SKIP_DURING_COLLECTION, stacklevel=2)
+
+ outcome = "skipped"
+ r_ = collector._repr_failure_py(call.excinfo, "line")
+ assert isinstance(r_, ExceptionChainRepr), repr(r_)
+ r = r_.reprcrash
+ assert r
+ longrepr = (str(r.path), r.lineno, r.message)
+ else:
+ outcome = "failed"
+ errorinfo = collector.repr_failure(call.excinfo)
+ if not hasattr(errorinfo, "toterminal"):
+ assert isinstance(errorinfo, str)
+ errorinfo = CollectErrorRepr(errorinfo)
+ longrepr = errorinfo
+ result = call.result if not call.excinfo else None
+ rep = CollectReport(collector.nodeid, outcome, longrepr, result)
+ rep.call = call # type: ignore # see collect_one_node
+ return rep
+
+
+class SetupState:
+ """Shared state for setting up/tearing down test items or collectors
+ in a session.
+
+ Suppose we have a collection tree as follows:
+
+ <Session session>
+ <Module mod1>
+ <Function item1>
+ <Module mod2>
+ <Function item2>
+
+ The SetupState maintains a stack. The stack starts out empty:
+
+ []
+
+ During the setup phase of item1, setup(item1) is called. What it does
+ is:
+
+ push session to stack, run session.setup()
+ push mod1 to stack, run mod1.setup()
+ push item1 to stack, run item1.setup()
+
+ The stack is:
+
+ [session, mod1, item1]
+
+ While the stack is in this shape, it is allowed to add finalizers to
+ each of session, mod1, item1 using addfinalizer().
+
+ During the teardown phase of item1, teardown_exact(item2) is called,
+ where item2 is the next item to item1. What it does is:
+
+ pop item1 from stack, run its teardowns
+ pop mod1 from stack, run its teardowns
+
+ mod1 was popped because it ended its purpose with item1. The stack is:
+
+ [session]
+
+ During the setup phase of item2, setup(item2) is called. What it does
+ is:
+
+ push mod2 to stack, run mod2.setup()
+ push item2 to stack, run item2.setup()
+
+ Stack:
+
+ [session, mod2, item2]
+
+ During the teardown phase of item2, teardown_exact(None) is called,
+ because item2 is the last item. What it does is:
+
+ pop item2 from stack, run its teardowns
+ pop mod2 from stack, run its teardowns
+ pop session from stack, run its teardowns
+
+ Stack:
+
+ []
+
+ The end!
+ """
+
+ def __init__(self) -> None:
+ # The stack is in the dict insertion order.
+ self.stack: Dict[
+ Node,
+ Tuple[
+ # Node's finalizers.
+ List[Callable[[], object]],
+ # Node's exception, if its setup raised.
+ Optional[Union[OutcomeException, Exception]],
+ ],
+ ] = {}
+
+ def setup(self, item: Item) -> None:
+ """Setup objects along the collector chain to the item."""
+ needed_collectors = item.listchain()
+
+ # If a collector fails its setup, fail its entire subtree of items.
+ # The setup is not retried for each item - the same exception is used.
+ for col, (finalizers, exc) in self.stack.items():
+ assert col in needed_collectors, "previous item was not torn down properly"
+ if exc:
+ raise exc
+
+ for col in needed_collectors[len(self.stack) :]:
+ assert col not in self.stack
+ # Push onto the stack.
+ self.stack[col] = ([col.teardown], None)
+ try:
+ col.setup()
+ except TEST_OUTCOME as exc:
+ self.stack[col] = (self.stack[col][0], exc)
+ raise exc
+
+ def addfinalizer(self, finalizer: Callable[[], object], node: Node) -> None:
+ """Attach a finalizer to the given node.
+
+ The node must be currently active in the stack.
+ """
+ assert node and not isinstance(node, tuple)
+ assert callable(finalizer)
+ assert node in self.stack, (node, self.stack)
+ self.stack[node][0].append(finalizer)
+
+ def teardown_exact(self, nextitem: Optional[Item]) -> None:
+ """Teardown the current stack up until reaching nodes that nextitem
+ also descends from.
+
+ When nextitem is None (meaning we're at the last item), the entire
+ stack is torn down.
+ """
+ needed_collectors = nextitem and nextitem.listchain() or []
+ exc = None
+ while self.stack:
+ if list(self.stack.keys()) == needed_collectors[: len(self.stack)]:
+ break
+ node, (finalizers, _) = self.stack.popitem()
+ while finalizers:
+ fin = finalizers.pop()
+ try:
+ fin()
+ except TEST_OUTCOME as e:
+ # XXX Only first exception will be seen by user,
+ # ideally all should be reported.
+ if exc is None:
+ exc = e
+ if exc:
+ raise exc
+ if nextitem is None:
+ assert not self.stack
+
+
+def collect_one_node(collector: Collector) -> CollectReport:
+ ihook = collector.ihook
+ ihook.pytest_collectstart(collector=collector)
+ rep: CollectReport = ihook.pytest_make_collect_report(collector=collector)
+ call = rep.__dict__.pop("call", None)
+ if call and check_interactive_exception(call, rep):
+ ihook.pytest_exception_interact(node=collector, call=call, report=rep)
+ return rep
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/scope.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/scope.py
new file mode 100644
index 0000000000..7a746fb9fa
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/scope.py
@@ -0,0 +1,91 @@
+"""
+Scope definition and related utilities.
+
+Those are defined here, instead of in the 'fixtures' module because
+their use is spread across many other pytest modules, and centralizing it in 'fixtures'
+would cause circular references.
+
+Also this makes the module light to import, as it should.
+"""
+from enum import Enum
+from functools import total_ordering
+from typing import Optional
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from typing_extensions import Literal
+
+ _ScopeName = Literal["session", "package", "module", "class", "function"]
+
+
+@total_ordering
+class Scope(Enum):
+ """
+ Represents one of the possible fixture scopes in pytest.
+
+ Scopes are ordered from lower to higher, that is:
+
+ ->>> higher ->>>
+
+ Function < Class < Module < Package < Session
+
+ <<<- lower <<<-
+ """
+
+ # Scopes need to be listed from lower to higher.
+ Function: "_ScopeName" = "function"
+ Class: "_ScopeName" = "class"
+ Module: "_ScopeName" = "module"
+ Package: "_ScopeName" = "package"
+ Session: "_ScopeName" = "session"
+
+ def next_lower(self) -> "Scope":
+ """Return the next lower scope."""
+ index = _SCOPE_INDICES[self]
+ if index == 0:
+ raise ValueError(f"{self} is the lower-most scope")
+ return _ALL_SCOPES[index - 1]
+
+ def next_higher(self) -> "Scope":
+ """Return the next higher scope."""
+ index = _SCOPE_INDICES[self]
+ if index == len(_SCOPE_INDICES) - 1:
+ raise ValueError(f"{self} is the upper-most scope")
+ return _ALL_SCOPES[index + 1]
+
+ def __lt__(self, other: "Scope") -> bool:
+ self_index = _SCOPE_INDICES[self]
+ other_index = _SCOPE_INDICES[other]
+ return self_index < other_index
+
+ @classmethod
+ def from_user(
+ cls, scope_name: "_ScopeName", descr: str, where: Optional[str] = None
+ ) -> "Scope":
+ """
+ Given a scope name from the user, return the equivalent Scope enum. Should be used
+ whenever we want to convert a user provided scope name to its enum object.
+
+ If the scope name is invalid, construct a user friendly message and call pytest.fail.
+ """
+ from _pytest.outcomes import fail
+
+ try:
+ # Holding this reference is necessary for mypy at the moment.
+ scope = Scope(scope_name)
+ except ValueError:
+ fail(
+ "{} {}got an unexpected scope value '{}'".format(
+ descr, f"from {where} " if where else "", scope_name
+ ),
+ pytrace=False,
+ )
+ return scope
+
+
+_ALL_SCOPES = list(Scope)
+_SCOPE_INDICES = {scope: index for index, scope in enumerate(_ALL_SCOPES)}
+
+
+# Ordered list of scopes which can contain many tests (in practice all except Function).
+HIGH_SCOPES = [x for x in Scope if x is not Scope.Function]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/setuponly.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/setuponly.py
new file mode 100644
index 0000000000..531131ce72
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/setuponly.py
@@ -0,0 +1,97 @@
+from typing import Generator
+from typing import Optional
+from typing import Union
+
+import pytest
+from _pytest._io.saferepr import saferepr
+from _pytest.config import Config
+from _pytest.config import ExitCode
+from _pytest.config.argparsing import Parser
+from _pytest.fixtures import FixtureDef
+from _pytest.fixtures import SubRequest
+from _pytest.scope import Scope
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("debugconfig")
+ group.addoption(
+ "--setuponly",
+ "--setup-only",
+ action="store_true",
+ help="only setup fixtures, do not execute tests.",
+ )
+ group.addoption(
+ "--setupshow",
+ "--setup-show",
+ action="store_true",
+ help="show setup of fixtures while executing tests.",
+ )
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_fixture_setup(
+ fixturedef: FixtureDef[object], request: SubRequest
+) -> Generator[None, None, None]:
+ yield
+ if request.config.option.setupshow:
+ if hasattr(request, "param"):
+ # Save the fixture parameter so ._show_fixture_action() can
+ # display it now and during the teardown (in .finish()).
+ if fixturedef.ids:
+ if callable(fixturedef.ids):
+ param = fixturedef.ids(request.param)
+ else:
+ param = fixturedef.ids[request.param_index]
+ else:
+ param = request.param
+ fixturedef.cached_param = param # type: ignore[attr-defined]
+ _show_fixture_action(fixturedef, "SETUP")
+
+
+def pytest_fixture_post_finalizer(fixturedef: FixtureDef[object]) -> None:
+ if fixturedef.cached_result is not None:
+ config = fixturedef._fixturemanager.config
+ if config.option.setupshow:
+ _show_fixture_action(fixturedef, "TEARDOWN")
+ if hasattr(fixturedef, "cached_param"):
+ del fixturedef.cached_param # type: ignore[attr-defined]
+
+
+def _show_fixture_action(fixturedef: FixtureDef[object], msg: str) -> None:
+ config = fixturedef._fixturemanager.config
+ capman = config.pluginmanager.getplugin("capturemanager")
+ if capman:
+ capman.suspend_global_capture()
+
+ tw = config.get_terminal_writer()
+ tw.line()
+ # Use smaller indentation the higher the scope: Session = 0, Package = 1, etc.
+ scope_indent = list(reversed(Scope)).index(fixturedef._scope)
+ tw.write(" " * 2 * scope_indent)
+ tw.write(
+ "{step} {scope} {fixture}".format(
+ step=msg.ljust(8), # align the output to TEARDOWN
+ scope=fixturedef.scope[0].upper(),
+ fixture=fixturedef.argname,
+ )
+ )
+
+ if msg == "SETUP":
+ deps = sorted(arg for arg in fixturedef.argnames if arg != "request")
+ if deps:
+ tw.write(" (fixtures used: {})".format(", ".join(deps)))
+
+ if hasattr(fixturedef, "cached_param"):
+ tw.write(f"[{saferepr(fixturedef.cached_param, maxsize=42)}]") # type: ignore[attr-defined]
+
+ tw.flush()
+
+ if capman:
+ capman.resume_global_capture()
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:
+ if config.option.setuponly:
+ config.option.setupshow = True
+ return None
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/setupplan.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/setupplan.py
new file mode 100644
index 0000000000..9ba81ccaf0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/setupplan.py
@@ -0,0 +1,40 @@
+from typing import Optional
+from typing import Union
+
+import pytest
+from _pytest.config import Config
+from _pytest.config import ExitCode
+from _pytest.config.argparsing import Parser
+from _pytest.fixtures import FixtureDef
+from _pytest.fixtures import SubRequest
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("debugconfig")
+ group.addoption(
+ "--setupplan",
+ "--setup-plan",
+ action="store_true",
+ help="show what fixtures and tests would be executed but "
+ "don't execute anything.",
+ )
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_fixture_setup(
+ fixturedef: FixtureDef[object], request: SubRequest
+) -> Optional[object]:
+ # Will return a dummy fixture if the setuponly option is provided.
+ if request.config.option.setupplan:
+ my_cache_key = fixturedef.cache_key(request)
+ fixturedef.cached_result = (None, my_cache_key, None)
+ return fixturedef.cached_result
+ return None
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:
+ if config.option.setupplan:
+ config.option.setuponly = True
+ config.option.setupshow = True
+ return None
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/skipping.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/skipping.py
new file mode 100644
index 0000000000..ac7216f838
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/skipping.py
@@ -0,0 +1,296 @@
+"""Support for skip/xfail functions and markers."""
+import os
+import platform
+import sys
+import traceback
+from collections.abc import Mapping
+from typing import Generator
+from typing import Optional
+from typing import Tuple
+from typing import Type
+
+import attr
+
+from _pytest.config import Config
+from _pytest.config import hookimpl
+from _pytest.config.argparsing import Parser
+from _pytest.mark.structures import Mark
+from _pytest.nodes import Item
+from _pytest.outcomes import fail
+from _pytest.outcomes import skip
+from _pytest.outcomes import xfail
+from _pytest.reports import BaseReport
+from _pytest.runner import CallInfo
+from _pytest.stash import StashKey
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("general")
+ group.addoption(
+ "--runxfail",
+ action="store_true",
+ dest="runxfail",
+ default=False,
+ help="report the results of xfail tests as if they were not marked",
+ )
+
+ parser.addini(
+ "xfail_strict",
+ "default for the strict parameter of xfail "
+ "markers when not given explicitly (default: False)",
+ default=False,
+ type="bool",
+ )
+
+
+def pytest_configure(config: Config) -> None:
+ if config.option.runxfail:
+ # yay a hack
+ import pytest
+
+ old = pytest.xfail
+ config.add_cleanup(lambda: setattr(pytest, "xfail", old))
+
+ def nop(*args, **kwargs):
+ pass
+
+ nop.Exception = xfail.Exception # type: ignore[attr-defined]
+ setattr(pytest, "xfail", nop)
+
+ config.addinivalue_line(
+ "markers",
+ "skip(reason=None): skip the given test function with an optional reason. "
+ 'Example: skip(reason="no way of currently testing this") skips the '
+ "test.",
+ )
+ config.addinivalue_line(
+ "markers",
+ "skipif(condition, ..., *, reason=...): "
+ "skip the given test function if any of the conditions evaluate to True. "
+ "Example: skipif(sys.platform == 'win32') skips the test if we are on the win32 platform. "
+ "See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-skipif",
+ )
+ config.addinivalue_line(
+ "markers",
+ "xfail(condition, ..., *, reason=..., run=True, raises=None, strict=xfail_strict): "
+ "mark the test function as an expected failure if any of the conditions "
+ "evaluate to True. Optionally specify a reason for better reporting "
+ "and run=False if you don't even want to execute the test function. "
+ "If only specific exception(s) are expected, you can list them in "
+ "raises, and if the test fails in other ways, it will be reported as "
+ "a true failure. See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-xfail",
+ )
+
+
+def evaluate_condition(item: Item, mark: Mark, condition: object) -> Tuple[bool, str]:
+ """Evaluate a single skipif/xfail condition.
+
+ If an old-style string condition is given, it is eval()'d, otherwise the
+ condition is bool()'d. If this fails, an appropriately formatted pytest.fail
+ is raised.
+
+ Returns (result, reason). The reason is only relevant if the result is True.
+ """
+ # String condition.
+ if isinstance(condition, str):
+ globals_ = {
+ "os": os,
+ "sys": sys,
+ "platform": platform,
+ "config": item.config,
+ }
+ for dictionary in reversed(
+ item.ihook.pytest_markeval_namespace(config=item.config)
+ ):
+ if not isinstance(dictionary, Mapping):
+ raise ValueError(
+ "pytest_markeval_namespace() needs to return a dict, got {!r}".format(
+ dictionary
+ )
+ )
+ globals_.update(dictionary)
+ if hasattr(item, "obj"):
+ globals_.update(item.obj.__globals__) # type: ignore[attr-defined]
+ try:
+ filename = f"<{mark.name} condition>"
+ condition_code = compile(condition, filename, "eval")
+ result = eval(condition_code, globals_)
+ except SyntaxError as exc:
+ msglines = [
+ "Error evaluating %r condition" % mark.name,
+ " " + condition,
+ " " + " " * (exc.offset or 0) + "^",
+ "SyntaxError: invalid syntax",
+ ]
+ fail("\n".join(msglines), pytrace=False)
+ except Exception as exc:
+ msglines = [
+ "Error evaluating %r condition" % mark.name,
+ " " + condition,
+ *traceback.format_exception_only(type(exc), exc),
+ ]
+ fail("\n".join(msglines), pytrace=False)
+
+ # Boolean condition.
+ else:
+ try:
+ result = bool(condition)
+ except Exception as exc:
+ msglines = [
+ "Error evaluating %r condition as a boolean" % mark.name,
+ *traceback.format_exception_only(type(exc), exc),
+ ]
+ fail("\n".join(msglines), pytrace=False)
+
+ reason = mark.kwargs.get("reason", None)
+ if reason is None:
+ if isinstance(condition, str):
+ reason = "condition: " + condition
+ else:
+ # XXX better be checked at collection time
+ msg = (
+ "Error evaluating %r: " % mark.name
+ + "you need to specify reason=STRING when using booleans as conditions."
+ )
+ fail(msg, pytrace=False)
+
+ return result, reason
+
+
+@attr.s(slots=True, frozen=True, auto_attribs=True)
+class Skip:
+ """The result of evaluate_skip_marks()."""
+
+ reason: str = "unconditional skip"
+
+
+def evaluate_skip_marks(item: Item) -> Optional[Skip]:
+ """Evaluate skip and skipif marks on item, returning Skip if triggered."""
+ for mark in item.iter_markers(name="skipif"):
+ if "condition" not in mark.kwargs:
+ conditions = mark.args
+ else:
+ conditions = (mark.kwargs["condition"],)
+
+ # Unconditional.
+ if not conditions:
+ reason = mark.kwargs.get("reason", "")
+ return Skip(reason)
+
+ # If any of the conditions are true.
+ for condition in conditions:
+ result, reason = evaluate_condition(item, mark, condition)
+ if result:
+ return Skip(reason)
+
+ for mark in item.iter_markers(name="skip"):
+ try:
+ return Skip(*mark.args, **mark.kwargs)
+ except TypeError as e:
+ raise TypeError(str(e) + " - maybe you meant pytest.mark.skipif?") from None
+
+ return None
+
+
+@attr.s(slots=True, frozen=True, auto_attribs=True)
+class Xfail:
+ """The result of evaluate_xfail_marks()."""
+
+ reason: str
+ run: bool
+ strict: bool
+ raises: Optional[Tuple[Type[BaseException], ...]]
+
+
+def evaluate_xfail_marks(item: Item) -> Optional[Xfail]:
+ """Evaluate xfail marks on item, returning Xfail if triggered."""
+ for mark in item.iter_markers(name="xfail"):
+ run = mark.kwargs.get("run", True)
+ strict = mark.kwargs.get("strict", item.config.getini("xfail_strict"))
+ raises = mark.kwargs.get("raises", None)
+ if "condition" not in mark.kwargs:
+ conditions = mark.args
+ else:
+ conditions = (mark.kwargs["condition"],)
+
+ # Unconditional.
+ if not conditions:
+ reason = mark.kwargs.get("reason", "")
+ return Xfail(reason, run, strict, raises)
+
+ # If any of the conditions are true.
+ for condition in conditions:
+ result, reason = evaluate_condition(item, mark, condition)
+ if result:
+ return Xfail(reason, run, strict, raises)
+
+ return None
+
+
+# Saves the xfail mark evaluation. Can be refreshed during call if None.
+xfailed_key = StashKey[Optional[Xfail]]()
+
+
+@hookimpl(tryfirst=True)
+def pytest_runtest_setup(item: Item) -> None:
+ skipped = evaluate_skip_marks(item)
+ if skipped:
+ raise skip.Exception(skipped.reason, _use_item_location=True)
+
+ item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item)
+ if xfailed and not item.config.option.runxfail and not xfailed.run:
+ xfail("[NOTRUN] " + xfailed.reason)
+
+
+@hookimpl(hookwrapper=True)
+def pytest_runtest_call(item: Item) -> Generator[None, None, None]:
+ xfailed = item.stash.get(xfailed_key, None)
+ if xfailed is None:
+ item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item)
+
+ if xfailed and not item.config.option.runxfail and not xfailed.run:
+ xfail("[NOTRUN] " + xfailed.reason)
+
+ yield
+
+ # The test run may have added an xfail mark dynamically.
+ xfailed = item.stash.get(xfailed_key, None)
+ if xfailed is None:
+ item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item)
+
+
+@hookimpl(hookwrapper=True)
+def pytest_runtest_makereport(item: Item, call: CallInfo[None]):
+ outcome = yield
+ rep = outcome.get_result()
+ xfailed = item.stash.get(xfailed_key, None)
+ if item.config.option.runxfail:
+ pass # don't interfere
+ elif call.excinfo and isinstance(call.excinfo.value, xfail.Exception):
+ assert call.excinfo.value.msg is not None
+ rep.wasxfail = "reason: " + call.excinfo.value.msg
+ rep.outcome = "skipped"
+ elif not rep.skipped and xfailed:
+ if call.excinfo:
+ raises = xfailed.raises
+ if raises is not None and not isinstance(call.excinfo.value, raises):
+ rep.outcome = "failed"
+ else:
+ rep.outcome = "skipped"
+ rep.wasxfail = xfailed.reason
+ elif call.when == "call":
+ if xfailed.strict:
+ rep.outcome = "failed"
+ rep.longrepr = "[XPASS(strict)] " + xfailed.reason
+ else:
+ rep.outcome = "passed"
+ rep.wasxfail = xfailed.reason
+
+
+def pytest_report_teststatus(report: BaseReport) -> Optional[Tuple[str, str, str]]:
+ if hasattr(report, "wasxfail"):
+ if report.skipped:
+ return "xfailed", "x", "XFAIL"
+ elif report.passed:
+ return "xpassed", "X", "XPASS"
+ return None
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/stash.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/stash.py
new file mode 100644
index 0000000000..e61d75b95f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/stash.py
@@ -0,0 +1,112 @@
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Generic
+from typing import TypeVar
+from typing import Union
+
+
+__all__ = ["Stash", "StashKey"]
+
+
+T = TypeVar("T")
+D = TypeVar("D")
+
+
+class StashKey(Generic[T]):
+ """``StashKey`` is an object used as a key to a :class:`Stash`.
+
+ A ``StashKey`` is associated with the type ``T`` of the value of the key.
+
+ A ``StashKey`` is unique and cannot conflict with another key.
+ """
+
+ __slots__ = ()
+
+
+class Stash:
+ r"""``Stash`` is a type-safe heterogeneous mutable mapping that
+ allows keys and value types to be defined separately from
+ where it (the ``Stash``) is created.
+
+ Usually you will be given an object which has a ``Stash``, for example
+ :class:`~pytest.Config` or a :class:`~_pytest.nodes.Node`:
+
+ .. code-block:: python
+
+ stash: Stash = some_object.stash
+
+ If a module or plugin wants to store data in this ``Stash``, it creates
+ :class:`StashKey`\s for its keys (at the module level):
+
+ .. code-block:: python
+
+ # At the top-level of the module
+ some_str_key = StashKey[str]()
+ some_bool_key = StashKey[bool]()
+
+ To store information:
+
+ .. code-block:: python
+
+ # Value type must match the key.
+ stash[some_str_key] = "value"
+ stash[some_bool_key] = True
+
+ To retrieve the information:
+
+ .. code-block:: python
+
+ # The static type of some_str is str.
+ some_str = stash[some_str_key]
+ # The static type of some_bool is bool.
+ some_bool = stash[some_bool_key]
+ """
+
+ __slots__ = ("_storage",)
+
+ def __init__(self) -> None:
+ self._storage: Dict[StashKey[Any], object] = {}
+
+ def __setitem__(self, key: StashKey[T], value: T) -> None:
+ """Set a value for key."""
+ self._storage[key] = value
+
+ def __getitem__(self, key: StashKey[T]) -> T:
+ """Get the value for key.
+
+ Raises ``KeyError`` if the key wasn't set before.
+ """
+ return cast(T, self._storage[key])
+
+ def get(self, key: StashKey[T], default: D) -> Union[T, D]:
+ """Get the value for key, or return default if the key wasn't set
+ before."""
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def setdefault(self, key: StashKey[T], default: T) -> T:
+ """Return the value of key if already set, otherwise set the value
+ of key to default and return default."""
+ try:
+ return self[key]
+ except KeyError:
+ self[key] = default
+ return default
+
+ def __delitem__(self, key: StashKey[T]) -> None:
+ """Delete the value for key.
+
+ Raises ``KeyError`` if the key wasn't set before.
+ """
+ del self._storage[key]
+
+ def __contains__(self, key: StashKey[T]) -> bool:
+ """Return whether key was set."""
+ return key in self._storage
+
+ def __len__(self) -> int:
+ """Return how many items exist in the stash."""
+ return len(self._storage)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/stepwise.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/stepwise.py
new file mode 100644
index 0000000000..4d95a96b87
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/stepwise.py
@@ -0,0 +1,122 @@
+from typing import List
+from typing import Optional
+from typing import TYPE_CHECKING
+
+import pytest
+from _pytest import nodes
+from _pytest.config import Config
+from _pytest.config.argparsing import Parser
+from _pytest.main import Session
+from _pytest.reports import TestReport
+
+if TYPE_CHECKING:
+ from _pytest.cacheprovider import Cache
+
+STEPWISE_CACHE_DIR = "cache/stepwise"
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("general")
+ group.addoption(
+ "--sw",
+ "--stepwise",
+ action="store_true",
+ default=False,
+ dest="stepwise",
+ help="exit on test failure and continue from last failing test next time",
+ )
+ group.addoption(
+ "--sw-skip",
+ "--stepwise-skip",
+ action="store_true",
+ default=False,
+ dest="stepwise_skip",
+ help="ignore the first failing test but stop on the next failing test.\n"
+ "implicitly enables --stepwise.",
+ )
+
+
+@pytest.hookimpl
+def pytest_configure(config: Config) -> None:
+ if config.option.stepwise_skip:
+ # allow --stepwise-skip to work on it's own merits.
+ config.option.stepwise = True
+ if config.getoption("stepwise"):
+ config.pluginmanager.register(StepwisePlugin(config), "stepwiseplugin")
+
+
+def pytest_sessionfinish(session: Session) -> None:
+ if not session.config.getoption("stepwise"):
+ assert session.config.cache is not None
+ # Clear the list of failing tests if the plugin is not active.
+ session.config.cache.set(STEPWISE_CACHE_DIR, [])
+
+
+class StepwisePlugin:
+ def __init__(self, config: Config) -> None:
+ self.config = config
+ self.session: Optional[Session] = None
+ self.report_status = ""
+ assert config.cache is not None
+ self.cache: Cache = config.cache
+ self.lastfailed: Optional[str] = self.cache.get(STEPWISE_CACHE_DIR, None)
+ self.skip: bool = config.getoption("stepwise_skip")
+
+ def pytest_sessionstart(self, session: Session) -> None:
+ self.session = session
+
+ def pytest_collection_modifyitems(
+ self, config: Config, items: List[nodes.Item]
+ ) -> None:
+ if not self.lastfailed:
+ self.report_status = "no previously failed tests, not skipping."
+ return
+
+ # check all item nodes until we find a match on last failed
+ failed_index = None
+ for index, item in enumerate(items):
+ if item.nodeid == self.lastfailed:
+ failed_index = index
+ break
+
+ # If the previously failed test was not found among the test items,
+ # do not skip any tests.
+ if failed_index is None:
+ self.report_status = "previously failed test not found, not skipping."
+ else:
+ self.report_status = f"skipping {failed_index} already passed items."
+ deselected = items[:failed_index]
+ del items[:failed_index]
+ config.hook.pytest_deselected(items=deselected)
+
+ def pytest_runtest_logreport(self, report: TestReport) -> None:
+ if report.failed:
+ if self.skip:
+ # Remove test from the failed ones (if it exists) and unset the skip option
+ # to make sure the following tests will not be skipped.
+ if report.nodeid == self.lastfailed:
+ self.lastfailed = None
+
+ self.skip = False
+ else:
+ # Mark test as the last failing and interrupt the test session.
+ self.lastfailed = report.nodeid
+ assert self.session is not None
+ self.session.shouldstop = (
+ "Test failed, continuing from this test next run."
+ )
+
+ else:
+ # If the test was actually run and did pass.
+ if report.when == "call":
+ # Remove test from the failed ones, if exists.
+ if report.nodeid == self.lastfailed:
+ self.lastfailed = None
+
+ def pytest_report_collectionfinish(self) -> Optional[str]:
+ if self.config.getoption("verbose") >= 0 and self.report_status:
+ return f"stepwise: {self.report_status}"
+ return None
+
+ def pytest_sessionfinish(self) -> None:
+ self.cache.set(STEPWISE_CACHE_DIR, self.lastfailed)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/terminal.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/terminal.py
new file mode 100644
index 0000000000..ccbd84d7d7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/terminal.py
@@ -0,0 +1,1394 @@
+"""Terminal reporting of the full testing process.
+
+This is a good source for looking at the various reporting hooks.
+"""
+import argparse
+import datetime
+import inspect
+import platform
+import sys
+import warnings
+from collections import Counter
+from functools import partial
+from pathlib import Path
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import ClassVar
+from typing import Dict
+from typing import Generator
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import TextIO
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+import attr
+import pluggy
+
+import _pytest._version
+from _pytest import nodes
+from _pytest import timing
+from _pytest._code import ExceptionInfo
+from _pytest._code.code import ExceptionRepr
+from _pytest._io.wcwidth import wcswidth
+from _pytest.compat import final
+from _pytest.config import _PluggyPlugin
+from _pytest.config import Config
+from _pytest.config import ExitCode
+from _pytest.config import hookimpl
+from _pytest.config.argparsing import Parser
+from _pytest.nodes import Item
+from _pytest.nodes import Node
+from _pytest.pathlib import absolutepath
+from _pytest.pathlib import bestrelpath
+from _pytest.reports import BaseReport
+from _pytest.reports import CollectReport
+from _pytest.reports import TestReport
+
+if TYPE_CHECKING:
+ from typing_extensions import Literal
+
+ from _pytest.main import Session
+
+
+REPORT_COLLECTING_RESOLUTION = 0.5
+
+KNOWN_TYPES = (
+ "failed",
+ "passed",
+ "skipped",
+ "deselected",
+ "xfailed",
+ "xpassed",
+ "warnings",
+ "error",
+)
+
+_REPORTCHARS_DEFAULT = "fE"
+
+
+class MoreQuietAction(argparse.Action):
+ """A modified copy of the argparse count action which counts down and updates
+ the legacy quiet attribute at the same time.
+
+ Used to unify verbosity handling.
+ """
+
+ def __init__(
+ self,
+ option_strings: Sequence[str],
+ dest: str,
+ default: object = None,
+ required: bool = False,
+ help: Optional[str] = None,
+ ) -> None:
+ super().__init__(
+ option_strings=option_strings,
+ dest=dest,
+ nargs=0,
+ default=default,
+ required=required,
+ help=help,
+ )
+
+ def __call__(
+ self,
+ parser: argparse.ArgumentParser,
+ namespace: argparse.Namespace,
+ values: Union[str, Sequence[object], None],
+ option_string: Optional[str] = None,
+ ) -> None:
+ new_count = getattr(namespace, self.dest, 0) - 1
+ setattr(namespace, self.dest, new_count)
+ # todo Deprecate config.quiet
+ namespace.quiet = getattr(namespace, "quiet", 0) + 1
+
+
+def pytest_addoption(parser: Parser) -> None:
+ group = parser.getgroup("terminal reporting", "reporting", after="general")
+ group._addoption(
+ "-v",
+ "--verbose",
+ action="count",
+ default=0,
+ dest="verbose",
+ help="increase verbosity.",
+ )
+ group._addoption(
+ "--no-header",
+ action="store_true",
+ default=False,
+ dest="no_header",
+ help="disable header",
+ )
+ group._addoption(
+ "--no-summary",
+ action="store_true",
+ default=False,
+ dest="no_summary",
+ help="disable summary",
+ )
+ group._addoption(
+ "-q",
+ "--quiet",
+ action=MoreQuietAction,
+ default=0,
+ dest="verbose",
+ help="decrease verbosity.",
+ )
+ group._addoption(
+ "--verbosity",
+ dest="verbose",
+ type=int,
+ default=0,
+ help="set verbosity. Default is 0.",
+ )
+ group._addoption(
+ "-r",
+ action="store",
+ dest="reportchars",
+ default=_REPORTCHARS_DEFAULT,
+ metavar="chars",
+ help="show extra test summary info as specified by chars: (f)ailed, "
+ "(E)rror, (s)kipped, (x)failed, (X)passed, "
+ "(p)assed, (P)assed with output, (a)ll except passed (p/P), or (A)ll. "
+ "(w)arnings are enabled by default (see --disable-warnings), "
+ "'N' can be used to reset the list. (default: 'fE').",
+ )
+ group._addoption(
+ "--disable-warnings",
+ "--disable-pytest-warnings",
+ default=False,
+ dest="disable_warnings",
+ action="store_true",
+ help="disable warnings summary",
+ )
+ group._addoption(
+ "-l",
+ "--showlocals",
+ action="store_true",
+ dest="showlocals",
+ default=False,
+ help="show locals in tracebacks (disabled by default).",
+ )
+ group._addoption(
+ "--tb",
+ metavar="style",
+ action="store",
+ dest="tbstyle",
+ default="auto",
+ choices=["auto", "long", "short", "no", "line", "native"],
+ help="traceback print mode (auto/long/short/line/native/no).",
+ )
+ group._addoption(
+ "--show-capture",
+ action="store",
+ dest="showcapture",
+ choices=["no", "stdout", "stderr", "log", "all"],
+ default="all",
+ help="Controls how captured stdout/stderr/log is shown on failed tests. "
+ "Default is 'all'.",
+ )
+ group._addoption(
+ "--fulltrace",
+ "--full-trace",
+ action="store_true",
+ default=False,
+ help="don't cut any tracebacks (default is to cut).",
+ )
+ group._addoption(
+ "--color",
+ metavar="color",
+ action="store",
+ dest="color",
+ default="auto",
+ choices=["yes", "no", "auto"],
+ help="color terminal output (yes/no/auto).",
+ )
+ group._addoption(
+ "--code-highlight",
+ default="yes",
+ choices=["yes", "no"],
+ help="Whether code should be highlighted (only if --color is also enabled)",
+ )
+
+ parser.addini(
+ "console_output_style",
+ help='console output: "classic", or with additional progress information ("progress" (percentage) | "count").',
+ default="progress",
+ )
+
+
+def pytest_configure(config: Config) -> None:
+ reporter = TerminalReporter(config, sys.stdout)
+ config.pluginmanager.register(reporter, "terminalreporter")
+ if config.option.debug or config.option.traceconfig:
+
+ def mywriter(tags, args):
+ msg = " ".join(map(str, args))
+ reporter.write_line("[traceconfig] " + msg)
+
+ config.trace.root.setprocessor("pytest:config", mywriter)
+
+
+def getreportopt(config: Config) -> str:
+ reportchars: str = config.option.reportchars
+
+ old_aliases = {"F", "S"}
+ reportopts = ""
+ for char in reportchars:
+ if char in old_aliases:
+ char = char.lower()
+ if char == "a":
+ reportopts = "sxXEf"
+ elif char == "A":
+ reportopts = "PpsxXEf"
+ elif char == "N":
+ reportopts = ""
+ elif char not in reportopts:
+ reportopts += char
+
+ if not config.option.disable_warnings and "w" not in reportopts:
+ reportopts = "w" + reportopts
+ elif config.option.disable_warnings and "w" in reportopts:
+ reportopts = reportopts.replace("w", "")
+
+ return reportopts
+
+
+@hookimpl(trylast=True) # after _pytest.runner
+def pytest_report_teststatus(report: BaseReport) -> Tuple[str, str, str]:
+ letter = "F"
+ if report.passed:
+ letter = "."
+ elif report.skipped:
+ letter = "s"
+
+ outcome: str = report.outcome
+ if report.when in ("collect", "setup", "teardown") and outcome == "failed":
+ outcome = "error"
+ letter = "E"
+
+ return outcome, letter, outcome.upper()
+
+
+@attr.s(auto_attribs=True)
+class WarningReport:
+ """Simple structure to hold warnings information captured by ``pytest_warning_recorded``.
+
+ :ivar str message:
+ User friendly message about the warning.
+ :ivar str|None nodeid:
+ nodeid that generated the warning (see ``get_location``).
+ :ivar tuple fslocation:
+ File system location of the source of the warning (see ``get_location``).
+ """
+
+ message: str
+ nodeid: Optional[str] = None
+ fslocation: Optional[Tuple[str, int]] = None
+
+ count_towards_summary: ClassVar = True
+
+ def get_location(self, config: Config) -> Optional[str]:
+ """Return the more user-friendly information about the location of a warning, or None."""
+ if self.nodeid:
+ return self.nodeid
+ if self.fslocation:
+ filename, linenum = self.fslocation
+ relpath = bestrelpath(config.invocation_params.dir, absolutepath(filename))
+ return f"{relpath}:{linenum}"
+ return None
+
+
+@final
+class TerminalReporter:
+ def __init__(self, config: Config, file: Optional[TextIO] = None) -> None:
+ import _pytest.config
+
+ self.config = config
+ self._numcollected = 0
+ self._session: Optional[Session] = None
+ self._showfspath: Optional[bool] = None
+
+ self.stats: Dict[str, List[Any]] = {}
+ self._main_color: Optional[str] = None
+ self._known_types: Optional[List[str]] = None
+ self.startpath = config.invocation_params.dir
+ if file is None:
+ file = sys.stdout
+ self._tw = _pytest.config.create_terminal_writer(config, file)
+ self._screen_width = self._tw.fullwidth
+ self.currentfspath: Union[None, Path, str, int] = None
+ self.reportchars = getreportopt(config)
+ self.hasmarkup = self._tw.hasmarkup
+ self.isatty = file.isatty()
+ self._progress_nodeids_reported: Set[str] = set()
+ self._show_progress_info = self._determine_show_progress_info()
+ self._collect_report_last_write: Optional[float] = None
+ self._already_displayed_warnings: Optional[int] = None
+ self._keyboardinterrupt_memo: Optional[ExceptionRepr] = None
+
+ def _determine_show_progress_info(self) -> "Literal['progress', 'count', False]":
+ """Return whether we should display progress information based on the current config."""
+ # do not show progress if we are not capturing output (#3038)
+ if self.config.getoption("capture", "no") == "no":
+ return False
+ # do not show progress if we are showing fixture setup/teardown
+ if self.config.getoption("setupshow", False):
+ return False
+ cfg: str = self.config.getini("console_output_style")
+ if cfg == "progress":
+ return "progress"
+ elif cfg == "count":
+ return "count"
+ else:
+ return False
+
+ @property
+ def verbosity(self) -> int:
+ verbosity: int = self.config.option.verbose
+ return verbosity
+
+ @property
+ def showheader(self) -> bool:
+ return self.verbosity >= 0
+
+ @property
+ def no_header(self) -> bool:
+ return bool(self.config.option.no_header)
+
+ @property
+ def no_summary(self) -> bool:
+ return bool(self.config.option.no_summary)
+
+ @property
+ def showfspath(self) -> bool:
+ if self._showfspath is None:
+ return self.verbosity >= 0
+ return self._showfspath
+
+ @showfspath.setter
+ def showfspath(self, value: Optional[bool]) -> None:
+ self._showfspath = value
+
+ @property
+ def showlongtestinfo(self) -> bool:
+ return self.verbosity > 0
+
+ def hasopt(self, char: str) -> bool:
+ char = {"xfailed": "x", "skipped": "s"}.get(char, char)
+ return char in self.reportchars
+
+ def write_fspath_result(self, nodeid: str, res, **markup: bool) -> None:
+ fspath = self.config.rootpath / nodeid.split("::")[0]
+ if self.currentfspath is None or fspath != self.currentfspath:
+ if self.currentfspath is not None and self._show_progress_info:
+ self._write_progress_information_filling_space()
+ self.currentfspath = fspath
+ relfspath = bestrelpath(self.startpath, fspath)
+ self._tw.line()
+ self._tw.write(relfspath + " ")
+ self._tw.write(res, flush=True, **markup)
+
+ def write_ensure_prefix(self, prefix: str, extra: str = "", **kwargs) -> None:
+ if self.currentfspath != prefix:
+ self._tw.line()
+ self.currentfspath = prefix
+ self._tw.write(prefix)
+ if extra:
+ self._tw.write(extra, **kwargs)
+ self.currentfspath = -2
+
+ def ensure_newline(self) -> None:
+ if self.currentfspath:
+ self._tw.line()
+ self.currentfspath = None
+
+ def write(self, content: str, *, flush: bool = False, **markup: bool) -> None:
+ self._tw.write(content, flush=flush, **markup)
+
+ def flush(self) -> None:
+ self._tw.flush()
+
+ def write_line(self, line: Union[str, bytes], **markup: bool) -> None:
+ if not isinstance(line, str):
+ line = str(line, errors="replace")
+ self.ensure_newline()
+ self._tw.line(line, **markup)
+
+ def rewrite(self, line: str, **markup: bool) -> None:
+ """Rewinds the terminal cursor to the beginning and writes the given line.
+
+ :param erase:
+ If True, will also add spaces until the full terminal width to ensure
+ previous lines are properly erased.
+
+ The rest of the keyword arguments are markup instructions.
+ """
+ erase = markup.pop("erase", False)
+ if erase:
+ fill_count = self._tw.fullwidth - len(line) - 1
+ fill = " " * fill_count
+ else:
+ fill = ""
+ line = str(line)
+ self._tw.write("\r" + line + fill, **markup)
+
+ def write_sep(
+ self,
+ sep: str,
+ title: Optional[str] = None,
+ fullwidth: Optional[int] = None,
+ **markup: bool,
+ ) -> None:
+ self.ensure_newline()
+ self._tw.sep(sep, title, fullwidth, **markup)
+
+ def section(self, title: str, sep: str = "=", **kw: bool) -> None:
+ self._tw.sep(sep, title, **kw)
+
+ def line(self, msg: str, **kw: bool) -> None:
+ self._tw.line(msg, **kw)
+
+ def _add_stats(self, category: str, items: Sequence[Any]) -> None:
+ set_main_color = category not in self.stats
+ self.stats.setdefault(category, []).extend(items)
+ if set_main_color:
+ self._set_main_color()
+
+ def pytest_internalerror(self, excrepr: ExceptionRepr) -> bool:
+ for line in str(excrepr).split("\n"):
+ self.write_line("INTERNALERROR> " + line)
+ return True
+
+ def pytest_warning_recorded(
+ self,
+ warning_message: warnings.WarningMessage,
+ nodeid: str,
+ ) -> None:
+ from _pytest.warnings import warning_record_to_str
+
+ fslocation = warning_message.filename, warning_message.lineno
+ message = warning_record_to_str(warning_message)
+
+ warning_report = WarningReport(
+ fslocation=fslocation, message=message, nodeid=nodeid
+ )
+ self._add_stats("warnings", [warning_report])
+
+ def pytest_plugin_registered(self, plugin: _PluggyPlugin) -> None:
+ if self.config.option.traceconfig:
+ msg = f"PLUGIN registered: {plugin}"
+ # XXX This event may happen during setup/teardown time
+ # which unfortunately captures our output here
+ # which garbles our output if we use self.write_line.
+ self.write_line(msg)
+
+ def pytest_deselected(self, items: Sequence[Item]) -> None:
+ self._add_stats("deselected", items)
+
+ def pytest_runtest_logstart(
+ self, nodeid: str, location: Tuple[str, Optional[int], str]
+ ) -> None:
+ # Ensure that the path is printed before the
+ # 1st test of a module starts running.
+ if self.showlongtestinfo:
+ line = self._locationline(nodeid, *location)
+ self.write_ensure_prefix(line, "")
+ self.flush()
+ elif self.showfspath:
+ self.write_fspath_result(nodeid, "")
+ self.flush()
+
+ def pytest_runtest_logreport(self, report: TestReport) -> None:
+ self._tests_ran = True
+ rep = report
+ res: Tuple[
+ str, str, Union[str, Tuple[str, Mapping[str, bool]]]
+ ] = self.config.hook.pytest_report_teststatus(report=rep, config=self.config)
+ category, letter, word = res
+ if not isinstance(word, tuple):
+ markup = None
+ else:
+ word, markup = word
+ self._add_stats(category, [rep])
+ if not letter and not word:
+ # Probably passed setup/teardown.
+ return
+ running_xdist = hasattr(rep, "node")
+ if markup is None:
+ was_xfail = hasattr(report, "wasxfail")
+ if rep.passed and not was_xfail:
+ markup = {"green": True}
+ elif rep.passed and was_xfail:
+ markup = {"yellow": True}
+ elif rep.failed:
+ markup = {"red": True}
+ elif rep.skipped:
+ markup = {"yellow": True}
+ else:
+ markup = {}
+ if self.verbosity <= 0:
+ self._tw.write(letter, **markup)
+ else:
+ self._progress_nodeids_reported.add(rep.nodeid)
+ line = self._locationline(rep.nodeid, *rep.location)
+ if not running_xdist:
+ self.write_ensure_prefix(line, word, **markup)
+ if rep.skipped or hasattr(report, "wasxfail"):
+ available_width = (
+ (self._tw.fullwidth - self._tw.width_of_current_line)
+ - len(" [100%]")
+ - 1
+ )
+ reason = _get_raw_skip_reason(rep)
+ reason_ = _format_trimmed(" ({})", reason, available_width)
+ if reason and reason_ is not None:
+ self._tw.write(reason_)
+ if self._show_progress_info:
+ self._write_progress_information_filling_space()
+ else:
+ self.ensure_newline()
+ self._tw.write("[%s]" % rep.node.gateway.id)
+ if self._show_progress_info:
+ self._tw.write(
+ self._get_progress_information_message() + " ", cyan=True
+ )
+ else:
+ self._tw.write(" ")
+ self._tw.write(word, **markup)
+ self._tw.write(" " + line)
+ self.currentfspath = -2
+ self.flush()
+
+ @property
+ def _is_last_item(self) -> bool:
+ assert self._session is not None
+ return len(self._progress_nodeids_reported) == self._session.testscollected
+
+ def pytest_runtest_logfinish(self, nodeid: str) -> None:
+ assert self._session
+ if self.verbosity <= 0 and self._show_progress_info:
+ if self._show_progress_info == "count":
+ num_tests = self._session.testscollected
+ progress_length = len(f" [{num_tests}/{num_tests}]")
+ else:
+ progress_length = len(" [100%]")
+
+ self._progress_nodeids_reported.add(nodeid)
+
+ if self._is_last_item:
+ self._write_progress_information_filling_space()
+ else:
+ main_color, _ = self._get_main_color()
+ w = self._width_of_current_line
+ past_edge = w + progress_length + 1 >= self._screen_width
+ if past_edge:
+ msg = self._get_progress_information_message()
+ self._tw.write(msg + "\n", **{main_color: True})
+
+ def _get_progress_information_message(self) -> str:
+ assert self._session
+ collected = self._session.testscollected
+ if self._show_progress_info == "count":
+ if collected:
+ progress = self._progress_nodeids_reported
+ counter_format = f"{{:{len(str(collected))}d}}"
+ format_string = f" [{counter_format}/{{}}]"
+ return format_string.format(len(progress), collected)
+ return f" [ {collected} / {collected} ]"
+ else:
+ if collected:
+ return " [{:3d}%]".format(
+ len(self._progress_nodeids_reported) * 100 // collected
+ )
+ return " [100%]"
+
+ def _write_progress_information_filling_space(self) -> None:
+ color, _ = self._get_main_color()
+ msg = self._get_progress_information_message()
+ w = self._width_of_current_line
+ fill = self._tw.fullwidth - w - 1
+ self.write(msg.rjust(fill), flush=True, **{color: True})
+
+ @property
+ def _width_of_current_line(self) -> int:
+ """Return the width of the current line."""
+ return self._tw.width_of_current_line
+
+ def pytest_collection(self) -> None:
+ if self.isatty:
+ if self.config.option.verbose >= 0:
+ self.write("collecting ... ", flush=True, bold=True)
+ self._collect_report_last_write = timing.time()
+ elif self.config.option.verbose >= 1:
+ self.write("collecting ... ", flush=True, bold=True)
+
+ def pytest_collectreport(self, report: CollectReport) -> None:
+ if report.failed:
+ self._add_stats("error", [report])
+ elif report.skipped:
+ self._add_stats("skipped", [report])
+ items = [x for x in report.result if isinstance(x, Item)]
+ self._numcollected += len(items)
+ if self.isatty:
+ self.report_collect()
+
+ def report_collect(self, final: bool = False) -> None:
+ if self.config.option.verbose < 0:
+ return
+
+ if not final:
+ # Only write "collecting" report every 0.5s.
+ t = timing.time()
+ if (
+ self._collect_report_last_write is not None
+ and self._collect_report_last_write > t - REPORT_COLLECTING_RESOLUTION
+ ):
+ return
+ self._collect_report_last_write = t
+
+ errors = len(self.stats.get("error", []))
+ skipped = len(self.stats.get("skipped", []))
+ deselected = len(self.stats.get("deselected", []))
+ selected = self._numcollected - errors - skipped - deselected
+ line = "collected " if final else "collecting "
+ line += (
+ str(self._numcollected) + " item" + ("" if self._numcollected == 1 else "s")
+ )
+ if errors:
+ line += " / %d error%s" % (errors, "s" if errors != 1 else "")
+ if deselected:
+ line += " / %d deselected" % deselected
+ if skipped:
+ line += " / %d skipped" % skipped
+ if self._numcollected > selected > 0:
+ line += " / %d selected" % selected
+ if self.isatty:
+ self.rewrite(line, bold=True, erase=True)
+ if final:
+ self.write("\n")
+ else:
+ self.write_line(line)
+
+ @hookimpl(trylast=True)
+ def pytest_sessionstart(self, session: "Session") -> None:
+ self._session = session
+ self._sessionstarttime = timing.time()
+ if not self.showheader:
+ return
+ self.write_sep("=", "test session starts", bold=True)
+ verinfo = platform.python_version()
+ if not self.no_header:
+ msg = f"platform {sys.platform} -- Python {verinfo}"
+ pypy_version_info = getattr(sys, "pypy_version_info", None)
+ if pypy_version_info:
+ verinfo = ".".join(map(str, pypy_version_info[:3]))
+ msg += f"[pypy-{verinfo}-{pypy_version_info[3]}]"
+ msg += ", pytest-{}, pluggy-{}".format(
+ _pytest._version.version, pluggy.__version__
+ )
+ if (
+ self.verbosity > 0
+ or self.config.option.debug
+ or getattr(self.config.option, "pastebin", None)
+ ):
+ msg += " -- " + str(sys.executable)
+ self.write_line(msg)
+ lines = self.config.hook.pytest_report_header(
+ config=self.config, start_path=self.startpath
+ )
+ self._write_report_lines_from_hooks(lines)
+
+ def _write_report_lines_from_hooks(
+ self, lines: Sequence[Union[str, Sequence[str]]]
+ ) -> None:
+ for line_or_lines in reversed(lines):
+ if isinstance(line_or_lines, str):
+ self.write_line(line_or_lines)
+ else:
+ for line in line_or_lines:
+ self.write_line(line)
+
+ def pytest_report_header(self, config: Config) -> List[str]:
+ line = "rootdir: %s" % config.rootpath
+
+ if config.inipath:
+ line += ", configfile: " + bestrelpath(config.rootpath, config.inipath)
+
+ testpaths: List[str] = config.getini("testpaths")
+ if config.invocation_params.dir == config.rootpath and config.args == testpaths:
+ line += ", testpaths: {}".format(", ".join(testpaths))
+
+ result = [line]
+
+ plugininfo = config.pluginmanager.list_plugin_distinfo()
+ if plugininfo:
+ result.append("plugins: %s" % ", ".join(_plugin_nameversions(plugininfo)))
+ return result
+
+ def pytest_collection_finish(self, session: "Session") -> None:
+ self.report_collect(True)
+
+ lines = self.config.hook.pytest_report_collectionfinish(
+ config=self.config,
+ start_path=self.startpath,
+ items=session.items,
+ )
+ self._write_report_lines_from_hooks(lines)
+
+ if self.config.getoption("collectonly"):
+ if session.items:
+ if self.config.option.verbose > -1:
+ self._tw.line("")
+ self._printcollecteditems(session.items)
+
+ failed = self.stats.get("failed")
+ if failed:
+ self._tw.sep("!", "collection failures")
+ for rep in failed:
+ rep.toterminal(self._tw)
+
+ def _printcollecteditems(self, items: Sequence[Item]) -> None:
+ if self.config.option.verbose < 0:
+ if self.config.option.verbose < -1:
+ counts = Counter(item.nodeid.split("::", 1)[0] for item in items)
+ for name, count in sorted(counts.items()):
+ self._tw.line("%s: %d" % (name, count))
+ else:
+ for item in items:
+ self._tw.line(item.nodeid)
+ return
+ stack: List[Node] = []
+ indent = ""
+ for item in items:
+ needed_collectors = item.listchain()[1:] # strip root node
+ while stack:
+ if stack == needed_collectors[: len(stack)]:
+ break
+ stack.pop()
+ for col in needed_collectors[len(stack) :]:
+ stack.append(col)
+ indent = (len(stack) - 1) * " "
+ self._tw.line(f"{indent}{col}")
+ if self.config.option.verbose >= 1:
+ obj = getattr(col, "obj", None)
+ doc = inspect.getdoc(obj) if obj else None
+ if doc:
+ for line in doc.splitlines():
+ self._tw.line("{}{}".format(indent + " ", line))
+
+ @hookimpl(hookwrapper=True)
+ def pytest_sessionfinish(
+ self, session: "Session", exitstatus: Union[int, ExitCode]
+ ):
+ outcome = yield
+ outcome.get_result()
+ self._tw.line("")
+ summary_exit_codes = (
+ ExitCode.OK,
+ ExitCode.TESTS_FAILED,
+ ExitCode.INTERRUPTED,
+ ExitCode.USAGE_ERROR,
+ ExitCode.NO_TESTS_COLLECTED,
+ )
+ if exitstatus in summary_exit_codes and not self.no_summary:
+ self.config.hook.pytest_terminal_summary(
+ terminalreporter=self, exitstatus=exitstatus, config=self.config
+ )
+ if session.shouldfail:
+ self.write_sep("!", str(session.shouldfail), red=True)
+ if exitstatus == ExitCode.INTERRUPTED:
+ self._report_keyboardinterrupt()
+ self._keyboardinterrupt_memo = None
+ elif session.shouldstop:
+ self.write_sep("!", str(session.shouldstop), red=True)
+ self.summary_stats()
+
+ @hookimpl(hookwrapper=True)
+ def pytest_terminal_summary(self) -> Generator[None, None, None]:
+ self.summary_errors()
+ self.summary_failures()
+ self.summary_warnings()
+ self.summary_passes()
+ yield
+ self.short_test_summary()
+ # Display any extra warnings from teardown here (if any).
+ self.summary_warnings()
+
+ def pytest_keyboard_interrupt(self, excinfo: ExceptionInfo[BaseException]) -> None:
+ self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
+
+ def pytest_unconfigure(self) -> None:
+ if self._keyboardinterrupt_memo is not None:
+ self._report_keyboardinterrupt()
+
+ def _report_keyboardinterrupt(self) -> None:
+ excrepr = self._keyboardinterrupt_memo
+ assert excrepr is not None
+ assert excrepr.reprcrash is not None
+ msg = excrepr.reprcrash.message
+ self.write_sep("!", msg)
+ if "KeyboardInterrupt" in msg:
+ if self.config.option.fulltrace:
+ excrepr.toterminal(self._tw)
+ else:
+ excrepr.reprcrash.toterminal(self._tw)
+ self._tw.line(
+ "(to show a full traceback on KeyboardInterrupt use --full-trace)",
+ yellow=True,
+ )
+
+ def _locationline(
+ self, nodeid: str, fspath: str, lineno: Optional[int], domain: str
+ ) -> str:
+ def mkrel(nodeid: str) -> str:
+ line = self.config.cwd_relative_nodeid(nodeid)
+ if domain and line.endswith(domain):
+ line = line[: -len(domain)]
+ values = domain.split("[")
+ values[0] = values[0].replace(".", "::") # don't replace '.' in params
+ line += "[".join(values)
+ return line
+
+ # collect_fspath comes from testid which has a "/"-normalized path.
+ if fspath:
+ res = mkrel(nodeid)
+ if self.verbosity >= 2 and nodeid.split("::")[0] != fspath.replace(
+ "\\", nodes.SEP
+ ):
+ res += " <- " + bestrelpath(self.startpath, Path(fspath))
+ else:
+ res = "[location]"
+ return res + " "
+
+ def _getfailureheadline(self, rep):
+ head_line = rep.head_line
+ if head_line:
+ return head_line
+ return "test session" # XXX?
+
+ def _getcrashline(self, rep):
+ try:
+ return str(rep.longrepr.reprcrash)
+ except AttributeError:
+ try:
+ return str(rep.longrepr)[:50]
+ except AttributeError:
+ return ""
+
+ #
+ # Summaries for sessionfinish.
+ #
+ def getreports(self, name: str):
+ return [x for x in self.stats.get(name, ()) if not hasattr(x, "_pdbshown")]
+
+ def summary_warnings(self) -> None:
+ if self.hasopt("w"):
+ all_warnings: Optional[List[WarningReport]] = self.stats.get("warnings")
+ if not all_warnings:
+ return
+
+ final = self._already_displayed_warnings is not None
+ if final:
+ warning_reports = all_warnings[self._already_displayed_warnings :]
+ else:
+ warning_reports = all_warnings
+ self._already_displayed_warnings = len(warning_reports)
+ if not warning_reports:
+ return
+
+ reports_grouped_by_message: Dict[str, List[WarningReport]] = {}
+ for wr in warning_reports:
+ reports_grouped_by_message.setdefault(wr.message, []).append(wr)
+
+ def collapsed_location_report(reports: List[WarningReport]) -> str:
+ locations = []
+ for w in reports:
+ location = w.get_location(self.config)
+ if location:
+ locations.append(location)
+
+ if len(locations) < 10:
+ return "\n".join(map(str, locations))
+
+ counts_by_filename = Counter(
+ str(loc).split("::", 1)[0] for loc in locations
+ )
+ return "\n".join(
+ "{}: {} warning{}".format(k, v, "s" if v > 1 else "")
+ for k, v in counts_by_filename.items()
+ )
+
+ title = "warnings summary (final)" if final else "warnings summary"
+ self.write_sep("=", title, yellow=True, bold=False)
+ for message, message_reports in reports_grouped_by_message.items():
+ maybe_location = collapsed_location_report(message_reports)
+ if maybe_location:
+ self._tw.line(maybe_location)
+ lines = message.splitlines()
+ indented = "\n".join(" " + x for x in lines)
+ message = indented.rstrip()
+ else:
+ message = message.rstrip()
+ self._tw.line(message)
+ self._tw.line()
+ self._tw.line(
+ "-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html"
+ )
+
+ def summary_passes(self) -> None:
+ if self.config.option.tbstyle != "no":
+ if self.hasopt("P"):
+ reports: List[TestReport] = self.getreports("passed")
+ if not reports:
+ return
+ self.write_sep("=", "PASSES")
+ for rep in reports:
+ if rep.sections:
+ msg = self._getfailureheadline(rep)
+ self.write_sep("_", msg, green=True, bold=True)
+ self._outrep_summary(rep)
+ self._handle_teardown_sections(rep.nodeid)
+
+ def _get_teardown_reports(self, nodeid: str) -> List[TestReport]:
+ reports = self.getreports("")
+ return [
+ report
+ for report in reports
+ if report.when == "teardown" and report.nodeid == nodeid
+ ]
+
+ def _handle_teardown_sections(self, nodeid: str) -> None:
+ for report in self._get_teardown_reports(nodeid):
+ self.print_teardown_sections(report)
+
+ def print_teardown_sections(self, rep: TestReport) -> None:
+ showcapture = self.config.option.showcapture
+ if showcapture == "no":
+ return
+ for secname, content in rep.sections:
+ if showcapture != "all" and showcapture not in secname:
+ continue
+ if "teardown" in secname:
+ self._tw.sep("-", secname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ self._tw.line(content)
+
+ def summary_failures(self) -> None:
+ if self.config.option.tbstyle != "no":
+ reports: List[BaseReport] = self.getreports("failed")
+ if not reports:
+ return
+ self.write_sep("=", "FAILURES")
+ if self.config.option.tbstyle == "line":
+ for rep in reports:
+ line = self._getcrashline(rep)
+ self.write_line(line)
+ else:
+ for rep in reports:
+ msg = self._getfailureheadline(rep)
+ self.write_sep("_", msg, red=True, bold=True)
+ self._outrep_summary(rep)
+ self._handle_teardown_sections(rep.nodeid)
+
+ def summary_errors(self) -> None:
+ if self.config.option.tbstyle != "no":
+ reports: List[BaseReport] = self.getreports("error")
+ if not reports:
+ return
+ self.write_sep("=", "ERRORS")
+ for rep in self.stats["error"]:
+ msg = self._getfailureheadline(rep)
+ if rep.when == "collect":
+ msg = "ERROR collecting " + msg
+ else:
+ msg = f"ERROR at {rep.when} of {msg}"
+ self.write_sep("_", msg, red=True, bold=True)
+ self._outrep_summary(rep)
+
+ def _outrep_summary(self, rep: BaseReport) -> None:
+ rep.toterminal(self._tw)
+ showcapture = self.config.option.showcapture
+ if showcapture == "no":
+ return
+ for secname, content in rep.sections:
+ if showcapture != "all" and showcapture not in secname:
+ continue
+ self._tw.sep("-", secname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ self._tw.line(content)
+
+ def summary_stats(self) -> None:
+ if self.verbosity < -1:
+ return
+
+ session_duration = timing.time() - self._sessionstarttime
+ (parts, main_color) = self.build_summary_stats_line()
+ line_parts = []
+
+ display_sep = self.verbosity >= 0
+ if display_sep:
+ fullwidth = self._tw.fullwidth
+ for text, markup in parts:
+ with_markup = self._tw.markup(text, **markup)
+ if display_sep:
+ fullwidth += len(with_markup) - len(text)
+ line_parts.append(with_markup)
+ msg = ", ".join(line_parts)
+
+ main_markup = {main_color: True}
+ duration = f" in {format_session_duration(session_duration)}"
+ duration_with_markup = self._tw.markup(duration, **main_markup)
+ if display_sep:
+ fullwidth += len(duration_with_markup) - len(duration)
+ msg += duration_with_markup
+
+ if display_sep:
+ markup_for_end_sep = self._tw.markup("", **main_markup)
+ if markup_for_end_sep.endswith("\x1b[0m"):
+ markup_for_end_sep = markup_for_end_sep[:-4]
+ fullwidth += len(markup_for_end_sep)
+ msg += markup_for_end_sep
+
+ if display_sep:
+ self.write_sep("=", msg, fullwidth=fullwidth, **main_markup)
+ else:
+ self.write_line(msg, **main_markup)
+
+ def short_test_summary(self) -> None:
+ if not self.reportchars:
+ return
+
+ def show_simple(stat, lines: List[str]) -> None:
+ failed = self.stats.get(stat, [])
+ if not failed:
+ return
+ termwidth = self._tw.fullwidth
+ config = self.config
+ for rep in failed:
+ line = _get_line_with_reprcrash_message(config, rep, termwidth)
+ lines.append(line)
+
+ def show_xfailed(lines: List[str]) -> None:
+ xfailed = self.stats.get("xfailed", [])
+ for rep in xfailed:
+ verbose_word = rep._get_verbose_word(self.config)
+ pos = _get_pos(self.config, rep)
+ lines.append(f"{verbose_word} {pos}")
+ reason = rep.wasxfail
+ if reason:
+ lines.append(" " + str(reason))
+
+ def show_xpassed(lines: List[str]) -> None:
+ xpassed = self.stats.get("xpassed", [])
+ for rep in xpassed:
+ verbose_word = rep._get_verbose_word(self.config)
+ pos = _get_pos(self.config, rep)
+ reason = rep.wasxfail
+ lines.append(f"{verbose_word} {pos} {reason}")
+
+ def show_skipped(lines: List[str]) -> None:
+ skipped: List[CollectReport] = self.stats.get("skipped", [])
+ fskips = _folded_skips(self.startpath, skipped) if skipped else []
+ if not fskips:
+ return
+ verbose_word = skipped[0]._get_verbose_word(self.config)
+ for num, fspath, lineno, reason in fskips:
+ if reason.startswith("Skipped: "):
+ reason = reason[9:]
+ if lineno is not None:
+ lines.append(
+ "%s [%d] %s:%d: %s"
+ % (verbose_word, num, fspath, lineno, reason)
+ )
+ else:
+ lines.append("%s [%d] %s: %s" % (verbose_word, num, fspath, reason))
+
+ REPORTCHAR_ACTIONS: Mapping[str, Callable[[List[str]], None]] = {
+ "x": show_xfailed,
+ "X": show_xpassed,
+ "f": partial(show_simple, "failed"),
+ "s": show_skipped,
+ "p": partial(show_simple, "passed"),
+ "E": partial(show_simple, "error"),
+ }
+
+ lines: List[str] = []
+ for char in self.reportchars:
+ action = REPORTCHAR_ACTIONS.get(char)
+ if action: # skipping e.g. "P" (passed with output) here.
+ action(lines)
+
+ if lines:
+ self.write_sep("=", "short test summary info")
+ for line in lines:
+ self.write_line(line)
+
+ def _get_main_color(self) -> Tuple[str, List[str]]:
+ if self._main_color is None or self._known_types is None or self._is_last_item:
+ self._set_main_color()
+ assert self._main_color
+ assert self._known_types
+ return self._main_color, self._known_types
+
+ def _determine_main_color(self, unknown_type_seen: bool) -> str:
+ stats = self.stats
+ if "failed" in stats or "error" in stats:
+ main_color = "red"
+ elif "warnings" in stats or "xpassed" in stats or unknown_type_seen:
+ main_color = "yellow"
+ elif "passed" in stats or not self._is_last_item:
+ main_color = "green"
+ else:
+ main_color = "yellow"
+ return main_color
+
+ def _set_main_color(self) -> None:
+ unknown_types: List[str] = []
+ for found_type in self.stats.keys():
+ if found_type: # setup/teardown reports have an empty key, ignore them
+ if found_type not in KNOWN_TYPES and found_type not in unknown_types:
+ unknown_types.append(found_type)
+ self._known_types = list(KNOWN_TYPES) + unknown_types
+ self._main_color = self._determine_main_color(bool(unknown_types))
+
+ def build_summary_stats_line(self) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]:
+ """
+ Build the parts used in the last summary stats line.
+
+ The summary stats line is the line shown at the end, "=== 12 passed, 2 errors in Xs===".
+
+ This function builds a list of the "parts" that make up for the text in that line, in
+ the example above it would be:
+
+ [
+ ("12 passed", {"green": True}),
+ ("2 errors", {"red": True}
+ ]
+
+ That last dict for each line is a "markup dictionary", used by TerminalWriter to
+ color output.
+
+ The final color of the line is also determined by this function, and is the second
+ element of the returned tuple.
+ """
+ if self.config.getoption("collectonly"):
+ return self._build_collect_only_summary_stats_line()
+ else:
+ return self._build_normal_summary_stats_line()
+
+ def _get_reports_to_display(self, key: str) -> List[Any]:
+ """Get test/collection reports for the given status key, such as `passed` or `error`."""
+ reports = self.stats.get(key, [])
+ return [x for x in reports if getattr(x, "count_towards_summary", True)]
+
+ def _build_normal_summary_stats_line(
+ self,
+ ) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]:
+ main_color, known_types = self._get_main_color()
+ parts = []
+
+ for key in known_types:
+ reports = self._get_reports_to_display(key)
+ if reports:
+ count = len(reports)
+ color = _color_for_type.get(key, _color_for_type_default)
+ markup = {color: True, "bold": color == main_color}
+ parts.append(("%d %s" % pluralize(count, key), markup))
+
+ if not parts:
+ parts = [("no tests ran", {_color_for_type_default: True})]
+
+ return parts, main_color
+
+ def _build_collect_only_summary_stats_line(
+ self,
+ ) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]:
+ deselected = len(self._get_reports_to_display("deselected"))
+ errors = len(self._get_reports_to_display("error"))
+
+ if self._numcollected == 0:
+ parts = [("no tests collected", {"yellow": True})]
+ main_color = "yellow"
+
+ elif deselected == 0:
+ main_color = "green"
+ collected_output = "%d %s collected" % pluralize(self._numcollected, "test")
+ parts = [(collected_output, {main_color: True})]
+ else:
+ all_tests_were_deselected = self._numcollected == deselected
+ if all_tests_were_deselected:
+ main_color = "yellow"
+ collected_output = f"no tests collected ({deselected} deselected)"
+ else:
+ main_color = "green"
+ selected = self._numcollected - deselected
+ collected_output = f"{selected}/{self._numcollected} tests collected ({deselected} deselected)"
+
+ parts = [(collected_output, {main_color: True})]
+
+ if errors:
+ main_color = _color_for_type["error"]
+ parts += [("%d %s" % pluralize(errors, "error"), {main_color: True})]
+
+ return parts, main_color
+
+
+def _get_pos(config: Config, rep: BaseReport):
+ nodeid = config.cwd_relative_nodeid(rep.nodeid)
+ return nodeid
+
+
+def _format_trimmed(format: str, msg: str, available_width: int) -> Optional[str]:
+ """Format msg into format, ellipsizing it if doesn't fit in available_width.
+
+ Returns None if even the ellipsis can't fit.
+ """
+ # Only use the first line.
+ i = msg.find("\n")
+ if i != -1:
+ msg = msg[:i]
+
+ ellipsis = "..."
+ format_width = wcswidth(format.format(""))
+ if format_width + len(ellipsis) > available_width:
+ return None
+
+ if format_width + wcswidth(msg) > available_width:
+ available_width -= len(ellipsis)
+ msg = msg[:available_width]
+ while format_width + wcswidth(msg) > available_width:
+ msg = msg[:-1]
+ msg += ellipsis
+
+ return format.format(msg)
+
+
+def _get_line_with_reprcrash_message(
+ config: Config, rep: BaseReport, termwidth: int
+) -> str:
+ """Get summary line for a report, trying to add reprcrash message."""
+ verbose_word = rep._get_verbose_word(config)
+ pos = _get_pos(config, rep)
+
+ line = f"{verbose_word} {pos}"
+ line_width = wcswidth(line)
+
+ try:
+ # Type ignored intentionally -- possible AttributeError expected.
+ msg = rep.longrepr.reprcrash.message # type: ignore[union-attr]
+ except AttributeError:
+ pass
+ else:
+ available_width = termwidth - line_width
+ msg = _format_trimmed(" - {}", msg, available_width)
+ if msg is not None:
+ line += msg
+
+ return line
+
+
+def _folded_skips(
+ startpath: Path,
+ skipped: Sequence[CollectReport],
+) -> List[Tuple[int, str, Optional[int], str]]:
+ d: Dict[Tuple[str, Optional[int], str], List[CollectReport]] = {}
+ for event in skipped:
+ assert event.longrepr is not None
+ assert isinstance(event.longrepr, tuple), (event, event.longrepr)
+ assert len(event.longrepr) == 3, (event, event.longrepr)
+ fspath, lineno, reason = event.longrepr
+ # For consistency, report all fspaths in relative form.
+ fspath = bestrelpath(startpath, Path(fspath))
+ keywords = getattr(event, "keywords", {})
+ # Folding reports with global pytestmark variable.
+ # This is a workaround, because for now we cannot identify the scope of a skip marker
+ # TODO: Revisit after marks scope would be fixed.
+ if (
+ event.when == "setup"
+ and "skip" in keywords
+ and "pytestmark" not in keywords
+ ):
+ key: Tuple[str, Optional[int], str] = (fspath, None, reason)
+ else:
+ key = (fspath, lineno, reason)
+ d.setdefault(key, []).append(event)
+ values: List[Tuple[int, str, Optional[int], str]] = []
+ for key, events in d.items():
+ values.append((len(events), *key))
+ return values
+
+
+_color_for_type = {
+ "failed": "red",
+ "error": "red",
+ "warnings": "yellow",
+ "passed": "green",
+}
+_color_for_type_default = "yellow"
+
+
+def pluralize(count: int, noun: str) -> Tuple[int, str]:
+ # No need to pluralize words such as `failed` or `passed`.
+ if noun not in ["error", "warnings", "test"]:
+ return count, noun
+
+ # The `warnings` key is plural. To avoid API breakage, we keep it that way but
+ # set it to singular here so we can determine plurality in the same way as we do
+ # for `error`.
+ noun = noun.replace("warnings", "warning")
+
+ return count, noun + "s" if count != 1 else noun
+
+
+def _plugin_nameversions(plugininfo) -> List[str]:
+ values: List[str] = []
+ for plugin, dist in plugininfo:
+ # Gets us name and version!
+ name = "{dist.project_name}-{dist.version}".format(dist=dist)
+ # Questionable convenience, but it keeps things short.
+ if name.startswith("pytest-"):
+ name = name[7:]
+ # We decided to print python package names they can have more than one plugin.
+ if name not in values:
+ values.append(name)
+ return values
+
+
+def format_session_duration(seconds: float) -> str:
+ """Format the given seconds in a human readable manner to show in the final summary."""
+ if seconds < 60:
+ return f"{seconds:.2f}s"
+ else:
+ dt = datetime.timedelta(seconds=int(seconds))
+ return f"{seconds:.2f}s ({dt})"
+
+
+def _get_raw_skip_reason(report: TestReport) -> str:
+ """Get the reason string of a skip/xfail/xpass test report.
+
+ The string is just the part given by the user.
+ """
+ if hasattr(report, "wasxfail"):
+ reason = cast(str, report.wasxfail)
+ if reason.startswith("reason: "):
+ reason = reason[len("reason: ") :]
+ return reason
+ else:
+ assert report.skipped
+ assert isinstance(report.longrepr, tuple)
+ _, _, reason = report.longrepr
+ if reason.startswith("Skipped: "):
+ reason = reason[len("Skipped: ") :]
+ elif reason == "Skipped":
+ reason = ""
+ return reason
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/threadexception.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/threadexception.py
new file mode 100644
index 0000000000..43341e739a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/threadexception.py
@@ -0,0 +1,88 @@
+import threading
+import traceback
+import warnings
+from types import TracebackType
+from typing import Any
+from typing import Callable
+from typing import Generator
+from typing import Optional
+from typing import Type
+
+import pytest
+
+
+# Copied from cpython/Lib/test/support/threading_helper.py, with modifications.
+class catch_threading_exception:
+ """Context manager catching threading.Thread exception using
+ threading.excepthook.
+
+ Storing exc_value using a custom hook can create a reference cycle. The
+ reference cycle is broken explicitly when the context manager exits.
+
+ Storing thread using a custom hook can resurrect it if it is set to an
+ object which is being finalized. Exiting the context manager clears the
+ stored object.
+
+ Usage:
+ with threading_helper.catch_threading_exception() as cm:
+ # code spawning a thread which raises an exception
+ ...
+ # check the thread exception: use cm.args
+ ...
+ # cm.args attribute no longer exists at this point
+ # (to break a reference cycle)
+ """
+
+ def __init__(self) -> None:
+ self.args: Optional["threading.ExceptHookArgs"] = None
+ self._old_hook: Optional[Callable[["threading.ExceptHookArgs"], Any]] = None
+
+ def _hook(self, args: "threading.ExceptHookArgs") -> None:
+ self.args = args
+
+ def __enter__(self) -> "catch_threading_exception":
+ self._old_hook = threading.excepthook
+ threading.excepthook = self._hook
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ assert self._old_hook is not None
+ threading.excepthook = self._old_hook
+ self._old_hook = None
+ del self.args
+
+
+def thread_exception_runtest_hook() -> Generator[None, None, None]:
+ with catch_threading_exception() as cm:
+ yield
+ if cm.args:
+ thread_name = "<unknown>" if cm.args.thread is None else cm.args.thread.name
+ msg = f"Exception in thread {thread_name}\n\n"
+ msg += "".join(
+ traceback.format_exception(
+ cm.args.exc_type,
+ cm.args.exc_value,
+ cm.args.exc_traceback,
+ )
+ )
+ warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))
+
+
+@pytest.hookimpl(hookwrapper=True, trylast=True)
+def pytest_runtest_setup() -> Generator[None, None, None]:
+ yield from thread_exception_runtest_hook()
+
+
+@pytest.hookimpl(hookwrapper=True, tryfirst=True)
+def pytest_runtest_call() -> Generator[None, None, None]:
+ yield from thread_exception_runtest_hook()
+
+
+@pytest.hookimpl(hookwrapper=True, tryfirst=True)
+def pytest_runtest_teardown() -> Generator[None, None, None]:
+ yield from thread_exception_runtest_hook()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/timing.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/timing.py
new file mode 100644
index 0000000000..925163a585
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/timing.py
@@ -0,0 +1,12 @@
+"""Indirection for time functions.
+
+We intentionally grab some "time" functions internally to avoid tests mocking "time" to affect
+pytest runtime information (issue #185).
+
+Fixture "mock_timing" also interacts with this module for pytest's own tests.
+"""
+from time import perf_counter
+from time import sleep
+from time import time
+
+__all__ = ["perf_counter", "sleep", "time"]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/tmpdir.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/tmpdir.py
new file mode 100644
index 0000000000..f901fd5727
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/tmpdir.py
@@ -0,0 +1,211 @@
+"""Support for providing temporary directories to test functions."""
+import os
+import re
+import sys
+import tempfile
+from pathlib import Path
+from typing import Optional
+
+import attr
+
+from .pathlib import LOCK_TIMEOUT
+from .pathlib import make_numbered_dir
+from .pathlib import make_numbered_dir_with_cleanup
+from .pathlib import rm_rf
+from _pytest.compat import final
+from _pytest.config import Config
+from _pytest.deprecated import check_ispytest
+from _pytest.fixtures import fixture
+from _pytest.fixtures import FixtureRequest
+from _pytest.monkeypatch import MonkeyPatch
+
+
+@final
+@attr.s(init=False)
+class TempPathFactory:
+ """Factory for temporary directories under the common base temp directory.
+
+ The base directory can be configured using the ``--basetemp`` option.
+ """
+
+ _given_basetemp = attr.ib(type=Optional[Path])
+ _trace = attr.ib()
+ _basetemp = attr.ib(type=Optional[Path])
+
+ def __init__(
+ self,
+ given_basetemp: Optional[Path],
+ trace,
+ basetemp: Optional[Path] = None,
+ *,
+ _ispytest: bool = False,
+ ) -> None:
+ check_ispytest(_ispytest)
+ if given_basetemp is None:
+ self._given_basetemp = None
+ else:
+ # Use os.path.abspath() to get absolute path instead of resolve() as it
+ # does not work the same in all platforms (see #4427).
+ # Path.absolute() exists, but it is not public (see https://bugs.python.org/issue25012).
+ self._given_basetemp = Path(os.path.abspath(str(given_basetemp)))
+ self._trace = trace
+ self._basetemp = basetemp
+
+ @classmethod
+ def from_config(
+ cls,
+ config: Config,
+ *,
+ _ispytest: bool = False,
+ ) -> "TempPathFactory":
+ """Create a factory according to pytest configuration.
+
+ :meta private:
+ """
+ check_ispytest(_ispytest)
+ return cls(
+ given_basetemp=config.option.basetemp,
+ trace=config.trace.get("tmpdir"),
+ _ispytest=True,
+ )
+
+ def _ensure_relative_to_basetemp(self, basename: str) -> str:
+ basename = os.path.normpath(basename)
+ if (self.getbasetemp() / basename).resolve().parent != self.getbasetemp():
+ raise ValueError(f"{basename} is not a normalized and relative path")
+ return basename
+
+ def mktemp(self, basename: str, numbered: bool = True) -> Path:
+ """Create a new temporary directory managed by the factory.
+
+ :param basename:
+ Directory base name, must be a relative path.
+
+ :param numbered:
+ If ``True``, ensure the directory is unique by adding a numbered
+ suffix greater than any existing one: ``basename="foo-"`` and ``numbered=True``
+ means that this function will create directories named ``"foo-0"``,
+ ``"foo-1"``, ``"foo-2"`` and so on.
+
+ :returns:
+ The path to the new directory.
+ """
+ basename = self._ensure_relative_to_basetemp(basename)
+ if not numbered:
+ p = self.getbasetemp().joinpath(basename)
+ p.mkdir(mode=0o700)
+ else:
+ p = make_numbered_dir(root=self.getbasetemp(), prefix=basename, mode=0o700)
+ self._trace("mktemp", p)
+ return p
+
+ def getbasetemp(self) -> Path:
+ """Return the base temporary directory, creating it if needed."""
+ if self._basetemp is not None:
+ return self._basetemp
+
+ if self._given_basetemp is not None:
+ basetemp = self._given_basetemp
+ if basetemp.exists():
+ rm_rf(basetemp)
+ basetemp.mkdir(mode=0o700)
+ basetemp = basetemp.resolve()
+ else:
+ from_env = os.environ.get("PYTEST_DEBUG_TEMPROOT")
+ temproot = Path(from_env or tempfile.gettempdir()).resolve()
+ user = get_user() or "unknown"
+ # use a sub-directory in the temproot to speed-up
+ # make_numbered_dir() call
+ rootdir = temproot.joinpath(f"pytest-of-{user}")
+ try:
+ rootdir.mkdir(mode=0o700, exist_ok=True)
+ except OSError:
+ # getuser() likely returned illegal characters for the platform, use unknown back off mechanism
+ rootdir = temproot.joinpath("pytest-of-unknown")
+ rootdir.mkdir(mode=0o700, exist_ok=True)
+ # Because we use exist_ok=True with a predictable name, make sure
+ # we are the owners, to prevent any funny business (on unix, where
+ # temproot is usually shared).
+ # Also, to keep things private, fixup any world-readable temp
+ # rootdir's permissions. Historically 0o755 was used, so we can't
+ # just error out on this, at least for a while.
+ if sys.platform != "win32":
+ uid = os.getuid()
+ rootdir_stat = rootdir.stat()
+ # getuid shouldn't fail, but cpython defines such a case.
+ # Let's hope for the best.
+ if uid != -1:
+ if rootdir_stat.st_uid != uid:
+ raise OSError(
+ f"The temporary directory {rootdir} is not owned by the current user. "
+ "Fix this and try again."
+ )
+ if (rootdir_stat.st_mode & 0o077) != 0:
+ os.chmod(rootdir, rootdir_stat.st_mode & ~0o077)
+ basetemp = make_numbered_dir_with_cleanup(
+ prefix="pytest-",
+ root=rootdir,
+ keep=3,
+ lock_timeout=LOCK_TIMEOUT,
+ mode=0o700,
+ )
+ assert basetemp is not None, basetemp
+ self._basetemp = basetemp
+ self._trace("new basetemp", basetemp)
+ return basetemp
+
+
+def get_user() -> Optional[str]:
+ """Return the current user name, or None if getuser() does not work
+ in the current environment (see #1010)."""
+ import getpass
+
+ try:
+ return getpass.getuser()
+ except (ImportError, KeyError):
+ return None
+
+
+def pytest_configure(config: Config) -> None:
+ """Create a TempPathFactory and attach it to the config object.
+
+ This is to comply with existing plugins which expect the handler to be
+ available at pytest_configure time, but ideally should be moved entirely
+ to the tmp_path_factory session fixture.
+ """
+ mp = MonkeyPatch()
+ config.add_cleanup(mp.undo)
+ _tmp_path_factory = TempPathFactory.from_config(config, _ispytest=True)
+ mp.setattr(config, "_tmp_path_factory", _tmp_path_factory, raising=False)
+
+
+@fixture(scope="session")
+def tmp_path_factory(request: FixtureRequest) -> TempPathFactory:
+ """Return a :class:`pytest.TempPathFactory` instance for the test session."""
+ # Set dynamically by pytest_configure() above.
+ return request.config._tmp_path_factory # type: ignore
+
+
+def _mk_tmp(request: FixtureRequest, factory: TempPathFactory) -> Path:
+ name = request.node.name
+ name = re.sub(r"[\W]", "_", name)
+ MAXVAL = 30
+ name = name[:MAXVAL]
+ return factory.mktemp(name, numbered=True)
+
+
+@fixture
+def tmp_path(request: FixtureRequest, tmp_path_factory: TempPathFactory) -> Path:
+ """Return a temporary directory path object which is unique to each test
+ function invocation, created as a sub directory of the base temporary
+ directory.
+
+ By default, a new base temporary directory is created each test session,
+ and old bases are removed after 3 sessions, to aid in debugging. If
+ ``--basetemp`` is used then it is cleared each session. See :ref:`base
+ temporary directory`.
+
+ The returned object is a :class:`pathlib.Path` object.
+ """
+
+ return _mk_tmp(request, tmp_path_factory)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/unittest.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/unittest.py
new file mode 100644
index 0000000000..0315168b04
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/unittest.py
@@ -0,0 +1,414 @@
+"""Discover and run std-library "unittest" style tests."""
+import sys
+import traceback
+import types
+from typing import Any
+from typing import Callable
+from typing import Generator
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+
+import _pytest._code
+import pytest
+from _pytest.compat import getimfunc
+from _pytest.compat import is_async_function
+from _pytest.config import hookimpl
+from _pytest.fixtures import FixtureRequest
+from _pytest.nodes import Collector
+from _pytest.nodes import Item
+from _pytest.outcomes import exit
+from _pytest.outcomes import fail
+from _pytest.outcomes import skip
+from _pytest.outcomes import xfail
+from _pytest.python import Class
+from _pytest.python import Function
+from _pytest.python import PyCollector
+from _pytest.runner import CallInfo
+from _pytest.scope import Scope
+
+if TYPE_CHECKING:
+ import unittest
+ import twisted.trial.unittest
+
+ _SysExcInfoType = Union[
+ Tuple[Type[BaseException], BaseException, types.TracebackType],
+ Tuple[None, None, None],
+ ]
+
+
+def pytest_pycollect_makeitem(
+ collector: PyCollector, name: str, obj: object
+) -> Optional["UnitTestCase"]:
+ # Has unittest been imported and is obj a subclass of its TestCase?
+ try:
+ ut = sys.modules["unittest"]
+ # Type ignored because `ut` is an opaque module.
+ if not issubclass(obj, ut.TestCase): # type: ignore
+ return None
+ except Exception:
+ return None
+ # Yes, so let's collect it.
+ item: UnitTestCase = UnitTestCase.from_parent(collector, name=name, obj=obj)
+ return item
+
+
+class UnitTestCase(Class):
+ # Marker for fixturemanger.getfixtureinfo()
+ # to declare that our children do not support funcargs.
+ nofuncargs = True
+
+ def collect(self) -> Iterable[Union[Item, Collector]]:
+ from unittest import TestLoader
+
+ cls = self.obj
+ if not getattr(cls, "__test__", True):
+ return
+
+ skipped = _is_skipped(cls)
+ if not skipped:
+ self._inject_setup_teardown_fixtures(cls)
+ self._inject_setup_class_fixture()
+
+ self.session._fixturemanager.parsefactories(self, unittest=True)
+ loader = TestLoader()
+ foundsomething = False
+ for name in loader.getTestCaseNames(self.obj):
+ x = getattr(self.obj, name)
+ if not getattr(x, "__test__", True):
+ continue
+ funcobj = getimfunc(x)
+ yield TestCaseFunction.from_parent(self, name=name, callobj=funcobj)
+ foundsomething = True
+
+ if not foundsomething:
+ runtest = getattr(self.obj, "runTest", None)
+ if runtest is not None:
+ ut = sys.modules.get("twisted.trial.unittest", None)
+ # Type ignored because `ut` is an opaque module.
+ if ut is None or runtest != ut.TestCase.runTest: # type: ignore
+ yield TestCaseFunction.from_parent(self, name="runTest")
+
+ def _inject_setup_teardown_fixtures(self, cls: type) -> None:
+ """Injects a hidden auto-use fixture to invoke setUpClass/setup_method and corresponding
+ teardown functions (#517)."""
+ class_fixture = _make_xunit_fixture(
+ cls,
+ "setUpClass",
+ "tearDownClass",
+ "doClassCleanups",
+ scope=Scope.Class,
+ pass_self=False,
+ )
+ if class_fixture:
+ cls.__pytest_class_setup = class_fixture # type: ignore[attr-defined]
+
+ method_fixture = _make_xunit_fixture(
+ cls,
+ "setup_method",
+ "teardown_method",
+ None,
+ scope=Scope.Function,
+ pass_self=True,
+ )
+ if method_fixture:
+ cls.__pytest_method_setup = method_fixture # type: ignore[attr-defined]
+
+
+def _make_xunit_fixture(
+ obj: type,
+ setup_name: str,
+ teardown_name: str,
+ cleanup_name: Optional[str],
+ scope: Scope,
+ pass_self: bool,
+):
+ setup = getattr(obj, setup_name, None)
+ teardown = getattr(obj, teardown_name, None)
+ if setup is None and teardown is None:
+ return None
+
+ if cleanup_name:
+ cleanup = getattr(obj, cleanup_name, lambda *args: None)
+ else:
+
+ def cleanup(*args):
+ pass
+
+ @pytest.fixture(
+ scope=scope.value,
+ autouse=True,
+ # Use a unique name to speed up lookup.
+ name=f"_unittest_{setup_name}_fixture_{obj.__qualname__}",
+ )
+ def fixture(self, request: FixtureRequest) -> Generator[None, None, None]:
+ if _is_skipped(self):
+ reason = self.__unittest_skip_why__
+ raise pytest.skip.Exception(reason, _use_item_location=True)
+ if setup is not None:
+ try:
+ if pass_self:
+ setup(self, request.function)
+ else:
+ setup()
+ # unittest does not call the cleanup function for every BaseException, so we
+ # follow this here.
+ except Exception:
+ if pass_self:
+ cleanup(self)
+ else:
+ cleanup()
+
+ raise
+ yield
+ try:
+ if teardown is not None:
+ if pass_self:
+ teardown(self, request.function)
+ else:
+ teardown()
+ finally:
+ if pass_self:
+ cleanup(self)
+ else:
+ cleanup()
+
+ return fixture
+
+
+class TestCaseFunction(Function):
+ nofuncargs = True
+ _excinfo: Optional[List[_pytest._code.ExceptionInfo[BaseException]]] = None
+ _testcase: Optional["unittest.TestCase"] = None
+
+ def _getobj(self):
+ assert self.parent is not None
+ # Unlike a regular Function in a Class, where `item.obj` returns
+ # a *bound* method (attached to an instance), TestCaseFunction's
+ # `obj` returns an *unbound* method (not attached to an instance).
+ # This inconsistency is probably not desirable, but needs some
+ # consideration before changing.
+ return getattr(self.parent.obj, self.originalname) # type: ignore[attr-defined]
+
+ def setup(self) -> None:
+ # A bound method to be called during teardown() if set (see 'runtest()').
+ self._explicit_tearDown: Optional[Callable[[], None]] = None
+ assert self.parent is not None
+ self._testcase = self.parent.obj(self.name) # type: ignore[attr-defined]
+ self._obj = getattr(self._testcase, self.name)
+ if hasattr(self, "_request"):
+ self._request._fillfixtures()
+
+ def teardown(self) -> None:
+ if self._explicit_tearDown is not None:
+ self._explicit_tearDown()
+ self._explicit_tearDown = None
+ self._testcase = None
+ self._obj = None
+
+ def startTest(self, testcase: "unittest.TestCase") -> None:
+ pass
+
+ def _addexcinfo(self, rawexcinfo: "_SysExcInfoType") -> None:
+ # Unwrap potential exception info (see twisted trial support below).
+ rawexcinfo = getattr(rawexcinfo, "_rawexcinfo", rawexcinfo)
+ try:
+ excinfo = _pytest._code.ExceptionInfo[BaseException].from_exc_info(rawexcinfo) # type: ignore[arg-type]
+ # Invoke the attributes to trigger storing the traceback
+ # trial causes some issue there.
+ excinfo.value
+ excinfo.traceback
+ except TypeError:
+ try:
+ try:
+ values = traceback.format_exception(*rawexcinfo)
+ values.insert(
+ 0,
+ "NOTE: Incompatible Exception Representation, "
+ "displaying natively:\n\n",
+ )
+ fail("".join(values), pytrace=False)
+ except (fail.Exception, KeyboardInterrupt):
+ raise
+ except BaseException:
+ fail(
+ "ERROR: Unknown Incompatible Exception "
+ "representation:\n%r" % (rawexcinfo,),
+ pytrace=False,
+ )
+ except KeyboardInterrupt:
+ raise
+ except fail.Exception:
+ excinfo = _pytest._code.ExceptionInfo.from_current()
+ self.__dict__.setdefault("_excinfo", []).append(excinfo)
+
+ def addError(
+ self, testcase: "unittest.TestCase", rawexcinfo: "_SysExcInfoType"
+ ) -> None:
+ try:
+ if isinstance(rawexcinfo[1], exit.Exception):
+ exit(rawexcinfo[1].msg)
+ except TypeError:
+ pass
+ self._addexcinfo(rawexcinfo)
+
+ def addFailure(
+ self, testcase: "unittest.TestCase", rawexcinfo: "_SysExcInfoType"
+ ) -> None:
+ self._addexcinfo(rawexcinfo)
+
+ def addSkip(self, testcase: "unittest.TestCase", reason: str) -> None:
+ try:
+ raise pytest.skip.Exception(reason, _use_item_location=True)
+ except skip.Exception:
+ self._addexcinfo(sys.exc_info())
+
+ def addExpectedFailure(
+ self,
+ testcase: "unittest.TestCase",
+ rawexcinfo: "_SysExcInfoType",
+ reason: str = "",
+ ) -> None:
+ try:
+ xfail(str(reason))
+ except xfail.Exception:
+ self._addexcinfo(sys.exc_info())
+
+ def addUnexpectedSuccess(
+ self,
+ testcase: "unittest.TestCase",
+ reason: Optional["twisted.trial.unittest.Todo"] = None,
+ ) -> None:
+ msg = "Unexpected success"
+ if reason:
+ msg += f": {reason.reason}"
+ # Preserve unittest behaviour - fail the test. Explicitly not an XPASS.
+ try:
+ fail(msg, pytrace=False)
+ except fail.Exception:
+ self._addexcinfo(sys.exc_info())
+
+ def addSuccess(self, testcase: "unittest.TestCase") -> None:
+ pass
+
+ def stopTest(self, testcase: "unittest.TestCase") -> None:
+ pass
+
+ def runtest(self) -> None:
+ from _pytest.debugging import maybe_wrap_pytest_function_for_tracing
+
+ assert self._testcase is not None
+
+ maybe_wrap_pytest_function_for_tracing(self)
+
+ # Let the unittest framework handle async functions.
+ if is_async_function(self.obj):
+ # Type ignored because self acts as the TestResult, but is not actually one.
+ self._testcase(result=self) # type: ignore[arg-type]
+ else:
+ # When --pdb is given, we want to postpone calling tearDown() otherwise
+ # when entering the pdb prompt, tearDown() would have probably cleaned up
+ # instance variables, which makes it difficult to debug.
+ # Arguably we could always postpone tearDown(), but this changes the moment where the
+ # TestCase instance interacts with the results object, so better to only do it
+ # when absolutely needed.
+ if self.config.getoption("usepdb") and not _is_skipped(self.obj):
+ self._explicit_tearDown = self._testcase.tearDown
+ setattr(self._testcase, "tearDown", lambda *args: None)
+
+ # We need to update the actual bound method with self.obj, because
+ # wrap_pytest_function_for_tracing replaces self.obj by a wrapper.
+ setattr(self._testcase, self.name, self.obj)
+ try:
+ self._testcase(result=self) # type: ignore[arg-type]
+ finally:
+ delattr(self._testcase, self.name)
+
+ def _prunetraceback(
+ self, excinfo: _pytest._code.ExceptionInfo[BaseException]
+ ) -> None:
+ super()._prunetraceback(excinfo)
+ traceback = excinfo.traceback.filter(
+ lambda x: not x.frame.f_globals.get("__unittest")
+ )
+ if traceback:
+ excinfo.traceback = traceback
+
+
+@hookimpl(tryfirst=True)
+def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> None:
+ if isinstance(item, TestCaseFunction):
+ if item._excinfo:
+ call.excinfo = item._excinfo.pop(0)
+ try:
+ del call.result
+ except AttributeError:
+ pass
+
+ # Convert unittest.SkipTest to pytest.skip.
+ # This is actually only needed for nose, which reuses unittest.SkipTest for
+ # its own nose.SkipTest. For unittest TestCases, SkipTest is already
+ # handled internally, and doesn't reach here.
+ unittest = sys.modules.get("unittest")
+ if (
+ unittest
+ and call.excinfo
+ and isinstance(call.excinfo.value, unittest.SkipTest) # type: ignore[attr-defined]
+ ):
+ excinfo = call.excinfo
+ call2 = CallInfo[None].from_call(
+ lambda: pytest.skip(str(excinfo.value)), call.when
+ )
+ call.excinfo = call2.excinfo
+
+
+# Twisted trial support.
+
+
+@hookimpl(hookwrapper=True)
+def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:
+ if isinstance(item, TestCaseFunction) and "twisted.trial.unittest" in sys.modules:
+ ut: Any = sys.modules["twisted.python.failure"]
+ Failure__init__ = ut.Failure.__init__
+ check_testcase_implements_trial_reporter()
+
+ def excstore(
+ self, exc_value=None, exc_type=None, exc_tb=None, captureVars=None
+ ):
+ if exc_value is None:
+ self._rawexcinfo = sys.exc_info()
+ else:
+ if exc_type is None:
+ exc_type = type(exc_value)
+ self._rawexcinfo = (exc_type, exc_value, exc_tb)
+ try:
+ Failure__init__(
+ self, exc_value, exc_type, exc_tb, captureVars=captureVars
+ )
+ except TypeError:
+ Failure__init__(self, exc_value, exc_type, exc_tb)
+
+ ut.Failure.__init__ = excstore
+ yield
+ ut.Failure.__init__ = Failure__init__
+ else:
+ yield
+
+
+def check_testcase_implements_trial_reporter(done: List[int] = []) -> None:
+ if done:
+ return
+ from zope.interface import classImplements
+ from twisted.trial.itrial import IReporter
+
+ classImplements(TestCaseFunction, IReporter)
+ done.append(1)
+
+
+def _is_skipped(obj) -> bool:
+ """Return True if the given object has been marked with @unittest.skip."""
+ return bool(getattr(obj, "__unittest_skip__", False))
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/unraisableexception.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/unraisableexception.py
new file mode 100644
index 0000000000..fcb5d8237c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/unraisableexception.py
@@ -0,0 +1,93 @@
+import sys
+import traceback
+import warnings
+from types import TracebackType
+from typing import Any
+from typing import Callable
+from typing import Generator
+from typing import Optional
+from typing import Type
+
+import pytest
+
+
+# Copied from cpython/Lib/test/support/__init__.py, with modifications.
+class catch_unraisable_exception:
+ """Context manager catching unraisable exception using sys.unraisablehook.
+
+ Storing the exception value (cm.unraisable.exc_value) creates a reference
+ cycle. The reference cycle is broken explicitly when the context manager
+ exits.
+
+ Storing the object (cm.unraisable.object) can resurrect it if it is set to
+ an object which is being finalized. Exiting the context manager clears the
+ stored object.
+
+ Usage:
+ with catch_unraisable_exception() as cm:
+ # code creating an "unraisable exception"
+ ...
+ # check the unraisable exception: use cm.unraisable
+ ...
+ # cm.unraisable attribute no longer exists at this point
+ # (to break a reference cycle)
+ """
+
+ def __init__(self) -> None:
+ self.unraisable: Optional["sys.UnraisableHookArgs"] = None
+ self._old_hook: Optional[Callable[["sys.UnraisableHookArgs"], Any]] = None
+
+ def _hook(self, unraisable: "sys.UnraisableHookArgs") -> None:
+ # Storing unraisable.object can resurrect an object which is being
+ # finalized. Storing unraisable.exc_value creates a reference cycle.
+ self.unraisable = unraisable
+
+ def __enter__(self) -> "catch_unraisable_exception":
+ self._old_hook = sys.unraisablehook
+ sys.unraisablehook = self._hook
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ assert self._old_hook is not None
+ sys.unraisablehook = self._old_hook
+ self._old_hook = None
+ del self.unraisable
+
+
+def unraisable_exception_runtest_hook() -> Generator[None, None, None]:
+ with catch_unraisable_exception() as cm:
+ yield
+ if cm.unraisable:
+ if cm.unraisable.err_msg is not None:
+ err_msg = cm.unraisable.err_msg
+ else:
+ err_msg = "Exception ignored in"
+ msg = f"{err_msg}: {cm.unraisable.object!r}\n\n"
+ msg += "".join(
+ traceback.format_exception(
+ cm.unraisable.exc_type,
+ cm.unraisable.exc_value,
+ cm.unraisable.exc_traceback,
+ )
+ )
+ warnings.warn(pytest.PytestUnraisableExceptionWarning(msg))
+
+
+@pytest.hookimpl(hookwrapper=True, tryfirst=True)
+def pytest_runtest_setup() -> Generator[None, None, None]:
+ yield from unraisable_exception_runtest_hook()
+
+
+@pytest.hookimpl(hookwrapper=True, tryfirst=True)
+def pytest_runtest_call() -> Generator[None, None, None]:
+ yield from unraisable_exception_runtest_hook()
+
+
+@pytest.hookimpl(hookwrapper=True, tryfirst=True)
+def pytest_runtest_teardown() -> Generator[None, None, None]:
+ yield from unraisable_exception_runtest_hook()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/warning_types.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/warning_types.py
new file mode 100644
index 0000000000..2a97a31978
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/warning_types.py
@@ -0,0 +1,145 @@
+from typing import Any
+from typing import Generic
+from typing import Type
+from typing import TypeVar
+
+import attr
+
+from _pytest.compat import final
+
+
+class PytestWarning(UserWarning):
+ """Base class for all warnings emitted by pytest."""
+
+ __module__ = "pytest"
+
+
+@final
+class PytestAssertRewriteWarning(PytestWarning):
+ """Warning emitted by the pytest assert rewrite module."""
+
+ __module__ = "pytest"
+
+
+@final
+class PytestCacheWarning(PytestWarning):
+ """Warning emitted by the cache plugin in various situations."""
+
+ __module__ = "pytest"
+
+
+@final
+class PytestConfigWarning(PytestWarning):
+ """Warning emitted for configuration issues."""
+
+ __module__ = "pytest"
+
+
+@final
+class PytestCollectionWarning(PytestWarning):
+ """Warning emitted when pytest is not able to collect a file or symbol in a module."""
+
+ __module__ = "pytest"
+
+
+class PytestDeprecationWarning(PytestWarning, DeprecationWarning):
+ """Warning class for features that will be removed in a future version."""
+
+ __module__ = "pytest"
+
+
+@final
+class PytestRemovedIn7Warning(PytestDeprecationWarning):
+ """Warning class for features that will be removed in pytest 7."""
+
+ __module__ = "pytest"
+
+
+@final
+class PytestRemovedIn8Warning(PytestDeprecationWarning):
+ """Warning class for features that will be removed in pytest 8."""
+
+ __module__ = "pytest"
+
+
+@final
+class PytestExperimentalApiWarning(PytestWarning, FutureWarning):
+ """Warning category used to denote experiments in pytest.
+
+ Use sparingly as the API might change or even be removed completely in a
+ future version.
+ """
+
+ __module__ = "pytest"
+
+ @classmethod
+ def simple(cls, apiname: str) -> "PytestExperimentalApiWarning":
+ return cls(
+ "{apiname} is an experimental api that may change over time".format(
+ apiname=apiname
+ )
+ )
+
+
+@final
+class PytestUnhandledCoroutineWarning(PytestWarning):
+ """Warning emitted for an unhandled coroutine.
+
+ A coroutine was encountered when collecting test functions, but was not
+ handled by any async-aware plugin.
+ Coroutine test functions are not natively supported.
+ """
+
+ __module__ = "pytest"
+
+
+@final
+class PytestUnknownMarkWarning(PytestWarning):
+ """Warning emitted on use of unknown markers.
+
+ See :ref:`mark` for details.
+ """
+
+ __module__ = "pytest"
+
+
+@final
+class PytestUnraisableExceptionWarning(PytestWarning):
+ """An unraisable exception was reported.
+
+ Unraisable exceptions are exceptions raised in :meth:`__del__ <object.__del__>`
+ implementations and similar situations when the exception cannot be raised
+ as normal.
+ """
+
+ __module__ = "pytest"
+
+
+@final
+class PytestUnhandledThreadExceptionWarning(PytestWarning):
+ """An unhandled exception occurred in a :class:`~threading.Thread`.
+
+ Such exceptions don't propagate normally.
+ """
+
+ __module__ = "pytest"
+
+
+_W = TypeVar("_W", bound=PytestWarning)
+
+
+@final
+@attr.s(auto_attribs=True)
+class UnformattedWarning(Generic[_W]):
+ """A warning meant to be formatted during runtime.
+
+ This is used to hold warnings that need to format their message at runtime,
+ as opposed to a direct message.
+ """
+
+ category: Type["_W"]
+ template: str
+
+ def format(self, **kwargs: Any) -> _W:
+ """Return an instance of the warning category, formatted with given kwargs."""
+ return self.category(self.template.format(**kwargs))
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/warnings.py b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/warnings.py
new file mode 100644
index 0000000000..c0c946cbde
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/_pytest/warnings.py
@@ -0,0 +1,141 @@
+import sys
+import warnings
+from contextlib import contextmanager
+from typing import Generator
+from typing import Optional
+from typing import TYPE_CHECKING
+
+import pytest
+from _pytest.config import apply_warning_filters
+from _pytest.config import Config
+from _pytest.config import parse_warning_filter
+from _pytest.main import Session
+from _pytest.nodes import Item
+from _pytest.terminal import TerminalReporter
+
+if TYPE_CHECKING:
+ from typing_extensions import Literal
+
+
+def pytest_configure(config: Config) -> None:
+ config.addinivalue_line(
+ "markers",
+ "filterwarnings(warning): add a warning filter to the given test. "
+ "see https://docs.pytest.org/en/stable/how-to/capture-warnings.html#pytest-mark-filterwarnings ",
+ )
+
+
+@contextmanager
+def catch_warnings_for_item(
+ config: Config,
+ ihook,
+ when: "Literal['config', 'collect', 'runtest']",
+ item: Optional[Item],
+) -> Generator[None, None, None]:
+ """Context manager that catches warnings generated in the contained execution block.
+
+ ``item`` can be None if we are not in the context of an item execution.
+
+ Each warning captured triggers the ``pytest_warning_recorded`` hook.
+ """
+ config_filters = config.getini("filterwarnings")
+ cmdline_filters = config.known_args_namespace.pythonwarnings or []
+ with warnings.catch_warnings(record=True) as log:
+ # mypy can't infer that record=True means log is not None; help it.
+ assert log is not None
+
+ if not sys.warnoptions:
+ # If user is not explicitly configuring warning filters, show deprecation warnings by default (#2908).
+ warnings.filterwarnings("always", category=DeprecationWarning)
+ warnings.filterwarnings("always", category=PendingDeprecationWarning)
+
+ warnings.filterwarnings("error", category=pytest.PytestRemovedIn7Warning)
+
+ apply_warning_filters(config_filters, cmdline_filters)
+
+ # apply filters from "filterwarnings" marks
+ nodeid = "" if item is None else item.nodeid
+ if item is not None:
+ for mark in item.iter_markers(name="filterwarnings"):
+ for arg in mark.args:
+ warnings.filterwarnings(*parse_warning_filter(arg, escape=False))
+
+ yield
+
+ for warning_message in log:
+ ihook.pytest_warning_captured.call_historic(
+ kwargs=dict(
+ warning_message=warning_message,
+ when=when,
+ item=item,
+ location=None,
+ )
+ )
+ ihook.pytest_warning_recorded.call_historic(
+ kwargs=dict(
+ warning_message=warning_message,
+ nodeid=nodeid,
+ when=when,
+ location=None,
+ )
+ )
+
+
+def warning_record_to_str(warning_message: warnings.WarningMessage) -> str:
+ """Convert a warnings.WarningMessage to a string."""
+ warn_msg = warning_message.message
+ msg = warnings.formatwarning(
+ str(warn_msg),
+ warning_message.category,
+ warning_message.filename,
+ warning_message.lineno,
+ warning_message.line,
+ )
+ return msg
+
+
+@pytest.hookimpl(hookwrapper=True, tryfirst=True)
+def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:
+ with catch_warnings_for_item(
+ config=item.config, ihook=item.ihook, when="runtest", item=item
+ ):
+ yield
+
+
+@pytest.hookimpl(hookwrapper=True, tryfirst=True)
+def pytest_collection(session: Session) -> Generator[None, None, None]:
+ config = session.config
+ with catch_warnings_for_item(
+ config=config, ihook=config.hook, when="collect", item=None
+ ):
+ yield
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_terminal_summary(
+ terminalreporter: TerminalReporter,
+) -> Generator[None, None, None]:
+ config = terminalreporter.config
+ with catch_warnings_for_item(
+ config=config, ihook=config.hook, when="config", item=None
+ ):
+ yield
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_sessionfinish(session: Session) -> Generator[None, None, None]:
+ config = session.config
+ with catch_warnings_for_item(
+ config=config, ihook=config.hook, when="config", item=None
+ ):
+ yield
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_load_initial_conftests(
+ early_config: "Config",
+) -> Generator[None, None, None]:
+ with catch_warnings_for_item(
+ config=early_config, ihook=early_config.hook, when="config", item=None
+ ):
+ yield
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/pytest/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/src/pytest/__init__.py
new file mode 100644
index 0000000000..6050fd1124
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/pytest/__init__.py
@@ -0,0 +1,171 @@
+# PYTHON_ARGCOMPLETE_OK
+"""pytest: unit and functional testing with Python."""
+from . import collect
+from _pytest import __version__
+from _pytest import version_tuple
+from _pytest._code import ExceptionInfo
+from _pytest.assertion import register_assert_rewrite
+from _pytest.cacheprovider import Cache
+from _pytest.capture import CaptureFixture
+from _pytest.config import cmdline
+from _pytest.config import Config
+from _pytest.config import console_main
+from _pytest.config import ExitCode
+from _pytest.config import hookimpl
+from _pytest.config import hookspec
+from _pytest.config import main
+from _pytest.config import PytestPluginManager
+from _pytest.config import UsageError
+from _pytest.config.argparsing import OptionGroup
+from _pytest.config.argparsing import Parser
+from _pytest.debugging import pytestPDB as __pytestPDB
+from _pytest.fixtures import _fillfuncargs
+from _pytest.fixtures import fixture
+from _pytest.fixtures import FixtureLookupError
+from _pytest.fixtures import FixtureRequest
+from _pytest.fixtures import yield_fixture
+from _pytest.freeze_support import freeze_includes
+from _pytest.legacypath import TempdirFactory
+from _pytest.legacypath import Testdir
+from _pytest.logging import LogCaptureFixture
+from _pytest.main import Session
+from _pytest.mark import Mark
+from _pytest.mark import MARK_GEN as mark
+from _pytest.mark import MarkDecorator
+from _pytest.mark import MarkGenerator
+from _pytest.mark import param
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.nodes import Collector
+from _pytest.nodes import File
+from _pytest.nodes import Item
+from _pytest.outcomes import exit
+from _pytest.outcomes import fail
+from _pytest.outcomes import importorskip
+from _pytest.outcomes import skip
+from _pytest.outcomes import xfail
+from _pytest.pytester import HookRecorder
+from _pytest.pytester import LineMatcher
+from _pytest.pytester import Pytester
+from _pytest.pytester import RecordedHookCall
+from _pytest.pytester import RunResult
+from _pytest.python import Class
+from _pytest.python import Function
+from _pytest.python import Metafunc
+from _pytest.python import Module
+from _pytest.python import Package
+from _pytest.python_api import approx
+from _pytest.python_api import raises
+from _pytest.recwarn import deprecated_call
+from _pytest.recwarn import WarningsRecorder
+from _pytest.recwarn import warns
+from _pytest.reports import CollectReport
+from _pytest.reports import TestReport
+from _pytest.runner import CallInfo
+from _pytest.stash import Stash
+from _pytest.stash import StashKey
+from _pytest.tmpdir import TempPathFactory
+from _pytest.warning_types import PytestAssertRewriteWarning
+from _pytest.warning_types import PytestCacheWarning
+from _pytest.warning_types import PytestCollectionWarning
+from _pytest.warning_types import PytestConfigWarning
+from _pytest.warning_types import PytestDeprecationWarning
+from _pytest.warning_types import PytestExperimentalApiWarning
+from _pytest.warning_types import PytestRemovedIn7Warning
+from _pytest.warning_types import PytestRemovedIn8Warning
+from _pytest.warning_types import PytestUnhandledCoroutineWarning
+from _pytest.warning_types import PytestUnhandledThreadExceptionWarning
+from _pytest.warning_types import PytestUnknownMarkWarning
+from _pytest.warning_types import PytestUnraisableExceptionWarning
+from _pytest.warning_types import PytestWarning
+
+set_trace = __pytestPDB.set_trace
+
+
+__all__ = [
+ "__version__",
+ "_fillfuncargs",
+ "approx",
+ "Cache",
+ "CallInfo",
+ "CaptureFixture",
+ "Class",
+ "cmdline",
+ "collect",
+ "Collector",
+ "CollectReport",
+ "Config",
+ "console_main",
+ "deprecated_call",
+ "exit",
+ "ExceptionInfo",
+ "ExitCode",
+ "fail",
+ "File",
+ "fixture",
+ "FixtureLookupError",
+ "FixtureRequest",
+ "freeze_includes",
+ "Function",
+ "hookimpl",
+ "HookRecorder",
+ "hookspec",
+ "importorskip",
+ "Item",
+ "LineMatcher",
+ "LogCaptureFixture",
+ "main",
+ "mark",
+ "Mark",
+ "MarkDecorator",
+ "MarkGenerator",
+ "Metafunc",
+ "Module",
+ "MonkeyPatch",
+ "OptionGroup",
+ "Package",
+ "param",
+ "Parser",
+ "PytestAssertRewriteWarning",
+ "PytestCacheWarning",
+ "PytestCollectionWarning",
+ "PytestConfigWarning",
+ "PytestDeprecationWarning",
+ "PytestExperimentalApiWarning",
+ "PytestRemovedIn7Warning",
+ "PytestRemovedIn8Warning",
+ "Pytester",
+ "PytestPluginManager",
+ "PytestUnhandledCoroutineWarning",
+ "PytestUnhandledThreadExceptionWarning",
+ "PytestUnknownMarkWarning",
+ "PytestUnraisableExceptionWarning",
+ "PytestWarning",
+ "raises",
+ "RecordedHookCall",
+ "register_assert_rewrite",
+ "RunResult",
+ "Session",
+ "set_trace",
+ "skip",
+ "Stash",
+ "StashKey",
+ "version_tuple",
+ "TempdirFactory",
+ "TempPathFactory",
+ "Testdir",
+ "TestReport",
+ "UsageError",
+ "WarningsRecorder",
+ "warns",
+ "xfail",
+ "yield_fixture",
+]
+
+
+def __getattr__(name: str) -> object:
+ if name == "Instance":
+ # The import emits a deprecation warning.
+ from _pytest.python import Instance
+
+ return Instance
+ raise AttributeError(f"module {__name__} has no attribute {name}")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/pytest/__main__.py b/testing/web-platform/tests/tools/third_party/pytest/src/pytest/__main__.py
new file mode 100644
index 0000000000..b170152937
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/pytest/__main__.py
@@ -0,0 +1,5 @@
+"""The pytest entry point."""
+import pytest
+
+if __name__ == "__main__":
+ raise SystemExit(pytest.console_main())
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/pytest/collect.py b/testing/web-platform/tests/tools/third_party/pytest/src/pytest/collect.py
new file mode 100644
index 0000000000..4b2b581806
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/pytest/collect.py
@@ -0,0 +1,38 @@
+import sys
+import warnings
+from types import ModuleType
+from typing import Any
+from typing import List
+
+import pytest
+from _pytest.deprecated import PYTEST_COLLECT_MODULE
+
+COLLECT_FAKEMODULE_ATTRIBUTES = [
+ "Collector",
+ "Module",
+ "Function",
+ "Session",
+ "Item",
+ "Class",
+ "File",
+ "_fillfuncargs",
+]
+
+
+class FakeCollectModule(ModuleType):
+ def __init__(self) -> None:
+ super().__init__("pytest.collect")
+ self.__all__ = list(COLLECT_FAKEMODULE_ATTRIBUTES)
+ self.__pytest = pytest
+
+ def __dir__(self) -> List[str]:
+ return dir(super()) + self.__all__
+
+ def __getattr__(self, name: str) -> Any:
+ if name not in self.__all__:
+ raise AttributeError(name)
+ warnings.warn(PYTEST_COLLECT_MODULE.format(name=name), stacklevel=2)
+ return getattr(pytest, name)
+
+
+sys.modules["pytest.collect"] = FakeCollectModule()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/src/pytest/py.typed b/testing/web-platform/tests/tools/third_party/pytest/src/pytest/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/src/pytest/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/acceptance_test.py b/testing/web-platform/tests/tools/third_party/pytest/testing/acceptance_test.py
new file mode 100644
index 0000000000..8b8d4a4a6e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/acceptance_test.py
@@ -0,0 +1,1297 @@
+import os
+import sys
+import types
+
+import attr
+
+import pytest
+from _pytest.compat import importlib_metadata
+from _pytest.config import ExitCode
+from _pytest.pathlib import symlink_or_skip
+from _pytest.pytester import Pytester
+
+
+def prepend_pythonpath(*dirs) -> str:
+ cur = os.getenv("PYTHONPATH")
+ if cur:
+ dirs += (cur,)
+ return os.pathsep.join(str(p) for p in dirs)
+
+
+class TestGeneralUsage:
+ def test_config_error(self, pytester: Pytester) -> None:
+ pytester.copy_example("conftest_usageerror/conftest.py")
+ result = pytester.runpytest(pytester.path)
+ assert result.ret == ExitCode.USAGE_ERROR
+ result.stderr.fnmatch_lines(["*ERROR: hello"])
+ result.stdout.fnmatch_lines(["*pytest_unconfigure_called"])
+
+ def test_root_conftest_syntax_error(self, pytester: Pytester) -> None:
+ pytester.makepyfile(conftest="raise SyntaxError\n")
+ result = pytester.runpytest()
+ result.stderr.fnmatch_lines(["*raise SyntaxError*"])
+ assert result.ret != 0
+
+ def test_early_hook_error_issue38_1(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_sessionstart():
+ 0 / 0
+ """
+ )
+ result = pytester.runpytest(pytester.path)
+ assert result.ret != 0
+ # tracestyle is native by default for hook failures
+ result.stdout.fnmatch_lines(
+ ["*INTERNALERROR*File*conftest.py*line 2*", "*0 / 0*"]
+ )
+ result = pytester.runpytest(pytester.path, "--fulltrace")
+ assert result.ret != 0
+ # tracestyle is native by default for hook failures
+ result.stdout.fnmatch_lines(
+ ["*INTERNALERROR*def pytest_sessionstart():*", "*INTERNALERROR*0 / 0*"]
+ )
+
+ def test_early_hook_configure_error_issue38(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_configure():
+ 0 / 0
+ """
+ )
+ result = pytester.runpytest(pytester.path)
+ assert result.ret != 0
+ # here we get it on stderr
+ result.stderr.fnmatch_lines(
+ ["*INTERNALERROR*File*conftest.py*line 2*", "*0 / 0*"]
+ )
+
+ def test_file_not_found(self, pytester: Pytester) -> None:
+ result = pytester.runpytest("asd")
+ assert result.ret != 0
+ result.stderr.fnmatch_lines(["ERROR: file or directory not found: asd"])
+
+ def test_file_not_found_unconfigure_issue143(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_configure():
+ print("---configure")
+ def pytest_unconfigure():
+ print("---unconfigure")
+ """
+ )
+ result = pytester.runpytest("-s", "asd")
+ assert result.ret == ExitCode.USAGE_ERROR
+ result.stderr.fnmatch_lines(["ERROR: file or directory not found: asd"])
+ result.stdout.fnmatch_lines(["*---configure", "*---unconfigure"])
+
+ def test_config_preparse_plugin_option(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ pytest_xyz="""
+ def pytest_addoption(parser):
+ parser.addoption("--xyz", dest="xyz", action="store")
+ """
+ )
+ pytester.makepyfile(
+ test_one="""
+ def test_option(pytestconfig):
+ assert pytestconfig.option.xyz == "123"
+ """
+ )
+ result = pytester.runpytest("-p", "pytest_xyz", "--xyz=123", syspathinsert=True)
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ @pytest.mark.parametrize("load_cov_early", [True, False])
+ def test_early_load_setuptools_name(
+ self, pytester: Pytester, monkeypatch, load_cov_early
+ ) -> None:
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD")
+
+ pytester.makepyfile(mytestplugin1_module="")
+ pytester.makepyfile(mytestplugin2_module="")
+ pytester.makepyfile(mycov_module="")
+ pytester.syspathinsert()
+
+ loaded = []
+
+ @attr.s
+ class DummyEntryPoint:
+ name = attr.ib()
+ module = attr.ib()
+ group = "pytest11"
+
+ def load(self):
+ __import__(self.module)
+ loaded.append(self.name)
+ return sys.modules[self.module]
+
+ entry_points = [
+ DummyEntryPoint("myplugin1", "mytestplugin1_module"),
+ DummyEntryPoint("myplugin2", "mytestplugin2_module"),
+ DummyEntryPoint("mycov", "mycov_module"),
+ ]
+
+ @attr.s
+ class DummyDist:
+ entry_points = attr.ib()
+ files = ()
+
+ def my_dists():
+ return (DummyDist(entry_points),)
+
+ monkeypatch.setattr(importlib_metadata, "distributions", my_dists)
+ params = ("-p", "mycov") if load_cov_early else ()
+ pytester.runpytest_inprocess(*params)
+ if load_cov_early:
+ assert loaded == ["mycov", "myplugin1", "myplugin2"]
+ else:
+ assert loaded == ["myplugin1", "myplugin2", "mycov"]
+
+ @pytest.mark.parametrize("import_mode", ["prepend", "append", "importlib"])
+ def test_assertion_rewrite(self, pytester: Pytester, import_mode) -> None:
+ p = pytester.makepyfile(
+ """
+ def test_this():
+ x = 0
+ assert x
+ """
+ )
+ result = pytester.runpytest(p, f"--import-mode={import_mode}")
+ result.stdout.fnmatch_lines(["> assert x", "E assert 0"])
+ assert result.ret == 1
+
+ def test_nested_import_error(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import import_fails
+ def test_this():
+ assert import_fails.a == 1
+ """
+ )
+ pytester.makepyfile(import_fails="import does_not_work")
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "ImportError while importing test module*",
+ "*No module named *does_not_work*",
+ ]
+ )
+ assert result.ret == 2
+
+ def test_not_collectable_arguments(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("")
+ p2 = pytester.makefile(".pyc", "123")
+ result = pytester.runpytest(p1, p2)
+ assert result.ret == ExitCode.USAGE_ERROR
+ result.stderr.fnmatch_lines(
+ [
+ f"ERROR: not found: {p2}",
+ f"(no name {str(p2)!r} in any of [[][]])",
+ "",
+ ]
+ )
+
+ @pytest.mark.filterwarnings("default")
+ def test_better_reporting_on_conftest_load_failure(
+ self, pytester: Pytester
+ ) -> None:
+ """Show a user-friendly traceback on conftest import failures (#486, #3332)"""
+ pytester.makepyfile("")
+ conftest = pytester.makeconftest(
+ """
+ def foo():
+ import qwerty
+ foo()
+ """
+ )
+ result = pytester.runpytest("--help")
+ result.stdout.fnmatch_lines(
+ """
+ *--version*
+ *warning*conftest.py*
+ """
+ )
+ result = pytester.runpytest()
+ assert result.stdout.lines == []
+ assert result.stderr.lines == [
+ f"ImportError while loading conftest '{conftest}'.",
+ "conftest.py:3: in <module>",
+ " foo()",
+ "conftest.py:2: in foo",
+ " import qwerty",
+ "E ModuleNotFoundError: No module named 'qwerty'",
+ ]
+
+ def test_early_skip(self, pytester: Pytester) -> None:
+ pytester.mkdir("xyz")
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_collect_file():
+ pytest.skip("early")
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ result.stdout.fnmatch_lines(["*1 skip*"])
+
+ def test_issue88_initial_file_multinodes(self, pytester: Pytester) -> None:
+ pytester.copy_example("issue88_initial_file_multinodes")
+ p = pytester.makepyfile("def test_hello(): pass")
+ result = pytester.runpytest(p, "--collect-only")
+ result.stdout.fnmatch_lines(["*MyFile*test_issue88*", "*Module*test_issue88*"])
+
+ def test_issue93_initialnode_importing_capturing(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import sys
+ print("should not be seen")
+ sys.stderr.write("stder42\\n")
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ result.stdout.no_fnmatch_line("*should not be seen*")
+ assert "stderr42" not in result.stderr.str()
+
+ def test_conftest_printing_shows_if_error(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ print("should be seen")
+ assert 0
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret != 0
+ assert "should be seen" in result.stdout.str()
+
+ def test_issue109_sibling_conftests_not_loaded(self, pytester: Pytester) -> None:
+ sub1 = pytester.mkdir("sub1")
+ sub2 = pytester.mkdir("sub2")
+ sub1.joinpath("conftest.py").write_text("assert 0")
+ result = pytester.runpytest(sub2)
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ sub2.joinpath("__init__.py").touch()
+ p = sub2.joinpath("test_hello.py")
+ p.touch()
+ result = pytester.runpytest(p)
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ result = pytester.runpytest(sub1)
+ assert result.ret == ExitCode.USAGE_ERROR
+
+ def test_directory_skipped(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_ignore_collect():
+ pytest.skip("intentional")
+ """
+ )
+ pytester.makepyfile("def test_hello(): pass")
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ result.stdout.fnmatch_lines(["*1 skipped*"])
+
+ def test_multiple_items_per_collector_byid(self, pytester: Pytester) -> None:
+ c = pytester.makeconftest(
+ """
+ import pytest
+ class MyItem(pytest.Item):
+ def runtest(self):
+ pass
+ class MyCollector(pytest.File):
+ def collect(self):
+ return [MyItem.from_parent(name="xyz", parent=self)]
+ def pytest_collect_file(file_path, parent):
+ if file_path.name.startswith("conftest"):
+ return MyCollector.from_parent(path=file_path, parent=parent)
+ """
+ )
+ result = pytester.runpytest(c.name + "::" + "xyz")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 pass*"])
+
+ def test_skip_on_generated_funcarg_id(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize('x', [3], ids=['hello-123'])
+ def pytest_runtest_setup(item):
+ print(item.keywords)
+ if 'hello-123' in item.keywords:
+ pytest.skip("hello")
+ assert 0
+ """
+ )
+ p = pytester.makepyfile("""def test_func(x): pass""")
+ res = pytester.runpytest(p)
+ assert res.ret == 0
+ res.stdout.fnmatch_lines(["*1 skipped*"])
+
+ def test_direct_addressing_selects(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize('i', [1, 2], ids=["1", "2"])
+ def test_func(i):
+ pass
+ """
+ )
+ res = pytester.runpytest(p.name + "::" + "test_func[1]")
+ assert res.ret == 0
+ res.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_direct_addressing_notfound(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def test_func():
+ pass
+ """
+ )
+ res = pytester.runpytest(p.name + "::" + "test_notfound")
+ assert res.ret
+ res.stderr.fnmatch_lines(["*ERROR*not found*"])
+
+ def test_docstring_on_hookspec(self) -> None:
+ from _pytest import hookspec
+
+ for name, value in vars(hookspec).items():
+ if name.startswith("pytest_"):
+ assert value.__doc__, "no docstring for %s" % name
+
+ def test_initialization_error_issue49(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_configure():
+ x
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 3 # internal error
+ result.stderr.fnmatch_lines(["INTERNAL*pytest_configure*", "INTERNAL*x*"])
+ assert "sessionstarttime" not in result.stderr.str()
+
+ @pytest.mark.parametrize("lookfor", ["test_fun.py::test_a"])
+ def test_issue134_report_error_when_collecting_member(
+ self, pytester: Pytester, lookfor
+ ) -> None:
+ pytester.makepyfile(
+ test_fun="""
+ def test_a():
+ pass
+ def"""
+ )
+ result = pytester.runpytest(lookfor)
+ result.stdout.fnmatch_lines(["*SyntaxError*"])
+ if "::" in lookfor:
+ result.stderr.fnmatch_lines(["*ERROR*"])
+ assert result.ret == 4 # usage error only if item not found
+
+ def test_report_all_failed_collections_initargs(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ from _pytest.config import ExitCode
+
+ def pytest_sessionfinish(exitstatus):
+ assert exitstatus == ExitCode.USAGE_ERROR
+ print("pytest_sessionfinish_called")
+ """
+ )
+ pytester.makepyfile(test_a="def", test_b="def")
+ result = pytester.runpytest("test_a.py::a", "test_b.py::b")
+ result.stderr.fnmatch_lines(["*ERROR*test_a.py::a*", "*ERROR*test_b.py::b*"])
+ result.stdout.fnmatch_lines(["pytest_sessionfinish_called"])
+ assert result.ret == ExitCode.USAGE_ERROR
+
+ def test_namespace_import_doesnt_confuse_import_hook(
+ self, pytester: Pytester
+ ) -> None:
+ """Ref #383.
+
+ Python 3.3's namespace package messed with our import hooks.
+ Importing a module that didn't exist, even if the ImportError was
+ gracefully handled, would make our test crash.
+ """
+ pytester.mkdir("not_a_package")
+ p = pytester.makepyfile(
+ """
+ try:
+ from not_a_package import doesnt_exist
+ except ImportError:
+ # We handle the import error gracefully here
+ pass
+
+ def test_whatever():
+ pass
+ """
+ )
+ res = pytester.runpytest(p.name)
+ assert res.ret == 0
+
+ def test_unknown_option(self, pytester: Pytester) -> None:
+ result = pytester.runpytest("--qwlkej")
+ result.stderr.fnmatch_lines(
+ """
+ *unrecognized*
+ """
+ )
+
+ def test_getsourcelines_error_issue553(
+ self, pytester: Pytester, monkeypatch
+ ) -> None:
+ monkeypatch.setattr("inspect.getsourcelines", None)
+ p = pytester.makepyfile(
+ """
+ def raise_error(obj):
+ raise OSError('source code not available')
+
+ import inspect
+ inspect.getsourcelines = raise_error
+
+ def test_foo(invalid_fixture):
+ pass
+ """
+ )
+ res = pytester.runpytest(p)
+ res.stdout.fnmatch_lines(
+ ["*source code not available*", "E*fixture 'invalid_fixture' not found"]
+ )
+
+ def test_plugins_given_as_strings(
+ self, pytester: Pytester, monkeypatch, _sys_snapshot
+ ) -> None:
+ """Test that str values passed to main() as `plugins` arg are
+ interpreted as module names to be imported and registered (#855)."""
+ with pytest.raises(ImportError) as excinfo:
+ pytest.main([str(pytester.path)], plugins=["invalid.module"])
+ assert "invalid" in str(excinfo.value)
+
+ p = pytester.path.joinpath("test_test_plugins_given_as_strings.py")
+ p.write_text("def test_foo(): pass")
+ mod = types.ModuleType("myplugin")
+ monkeypatch.setitem(sys.modules, "myplugin", mod)
+ assert pytest.main(args=[str(pytester.path)], plugins=["myplugin"]) == 0
+
+ def test_parametrized_with_bytes_regex(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import re
+ import pytest
+ @pytest.mark.parametrize('r', [re.compile(b'foo')])
+ def test_stuff(r):
+ pass
+ """
+ )
+ res = pytester.runpytest(p)
+ res.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_parametrized_with_null_bytes(self, pytester: Pytester) -> None:
+ """Test parametrization with values that contain null bytes and unicode characters (#2644, #2957)"""
+ p = pytester.makepyfile(
+ """\
+ import pytest
+
+ @pytest.mark.parametrize("data", [b"\\x00", "\\x00", 'ação'])
+ def test_foo(data):
+ assert data
+ """
+ )
+ res = pytester.runpytest(p)
+ res.assert_outcomes(passed=3)
+
+
+class TestInvocationVariants:
+ def test_earlyinit(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ assert hasattr(pytest, 'mark')
+ """
+ )
+ result = pytester.runpython(p)
+ assert result.ret == 0
+
+ def test_pydoc(self, pytester: Pytester) -> None:
+ result = pytester.runpython_c("import pytest;help(pytest)")
+ assert result.ret == 0
+ s = result.stdout.str()
+ assert "MarkGenerator" in s
+
+ def test_import_star_pytest(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ from pytest import *
+ #Item
+ #File
+ main
+ skip
+ xfail
+ """
+ )
+ result = pytester.runpython(p)
+ assert result.ret == 0
+
+ def test_double_pytestcmdline(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ run="""
+ import pytest
+ pytest.main()
+ pytest.main()
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_hello():
+ pass
+ """
+ )
+ result = pytester.runpython(p)
+ result.stdout.fnmatch_lines(["*1 passed*", "*1 passed*"])
+
+ def test_python_minus_m_invocation_ok(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("def test_hello(): pass")
+ res = pytester.run(sys.executable, "-m", "pytest", str(p1))
+ assert res.ret == 0
+
+ def test_python_minus_m_invocation_fail(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("def test_fail(): 0/0")
+ res = pytester.run(sys.executable, "-m", "pytest", str(p1))
+ assert res.ret == 1
+
+ def test_python_pytest_package(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("def test_pass(): pass")
+ res = pytester.run(sys.executable, "-m", "pytest", str(p1))
+ assert res.ret == 0
+ res.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_invoke_with_invalid_type(self) -> None:
+ with pytest.raises(
+ TypeError, match="expected to be a list of strings, got: '-h'"
+ ):
+ pytest.main("-h") # type: ignore[arg-type]
+
+ def test_invoke_with_path(self, pytester: Pytester, capsys) -> None:
+ retcode = pytest.main([str(pytester.path)])
+ assert retcode == ExitCode.NO_TESTS_COLLECTED
+ out, err = capsys.readouterr()
+
+ def test_invoke_plugin_api(self, capsys) -> None:
+ class MyPlugin:
+ def pytest_addoption(self, parser):
+ parser.addoption("--myopt")
+
+ pytest.main(["-h"], plugins=[MyPlugin()])
+ out, err = capsys.readouterr()
+ assert "--myopt" in out
+
+ def test_pyargs_importerror(self, pytester: Pytester, monkeypatch) -> None:
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", False)
+ path = pytester.mkpydir("tpkg")
+ path.joinpath("test_hello.py").write_text("raise ImportError")
+
+ result = pytester.runpytest("--pyargs", "tpkg.test_hello", syspathinsert=True)
+ assert result.ret != 0
+
+ result.stdout.fnmatch_lines(["collected*0*items*/*1*error"])
+
+ def test_pyargs_only_imported_once(self, pytester: Pytester) -> None:
+ pkg = pytester.mkpydir("foo")
+ pkg.joinpath("test_foo.py").write_text(
+ "print('hello from test_foo')\ndef test(): pass"
+ )
+ pkg.joinpath("conftest.py").write_text(
+ "def pytest_configure(config): print('configuring')"
+ )
+
+ result = pytester.runpytest(
+ "--pyargs", "foo.test_foo", "-s", syspathinsert=True
+ )
+ # should only import once
+ assert result.outlines.count("hello from test_foo") == 1
+ # should only configure once
+ assert result.outlines.count("configuring") == 1
+
+ def test_pyargs_filename_looks_like_module(self, pytester: Pytester) -> None:
+ pytester.path.joinpath("conftest.py").touch()
+ pytester.path.joinpath("t.py").write_text("def test(): pass")
+ result = pytester.runpytest("--pyargs", "t.py")
+ assert result.ret == ExitCode.OK
+
+ def test_cmdline_python_package(self, pytester: Pytester, monkeypatch) -> None:
+ import warnings
+
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", False)
+ path = pytester.mkpydir("tpkg")
+ path.joinpath("test_hello.py").write_text("def test_hello(): pass")
+ path.joinpath("test_world.py").write_text("def test_world(): pass")
+ result = pytester.runpytest("--pyargs", "tpkg")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*2 passed*"])
+ result = pytester.runpytest("--pyargs", "tpkg.test_hello", syspathinsert=True)
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ empty_package = pytester.mkpydir("empty_package")
+ monkeypatch.setenv("PYTHONPATH", str(empty_package), prepend=os.pathsep)
+ # the path which is not a package raises a warning on pypy;
+ # no idea why only pypy and not normal python warn about it here
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", ImportWarning)
+ result = pytester.runpytest("--pyargs", ".")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ monkeypatch.setenv("PYTHONPATH", str(pytester), prepend=os.pathsep)
+ result = pytester.runpytest("--pyargs", "tpkg.test_missing", syspathinsert=True)
+ assert result.ret != 0
+ result.stderr.fnmatch_lines(["*not*found*test_missing*"])
+
+ def test_cmdline_python_namespace_package(
+ self, pytester: Pytester, monkeypatch
+ ) -> None:
+ """Test --pyargs option with namespace packages (#1567).
+
+ Ref: https://packaging.python.org/guides/packaging-namespace-packages/
+ """
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", raising=False)
+
+ search_path = []
+ for dirname in "hello", "world":
+ d = pytester.mkdir(dirname)
+ search_path.append(d)
+ ns = d.joinpath("ns_pkg")
+ ns.mkdir()
+ ns.joinpath("__init__.py").write_text(
+ "__import__('pkg_resources').declare_namespace(__name__)"
+ )
+ lib = ns.joinpath(dirname)
+ lib.mkdir()
+ lib.joinpath("__init__.py").touch()
+ lib.joinpath(f"test_{dirname}.py").write_text(
+ f"def test_{dirname}(): pass\ndef test_other():pass"
+ )
+
+ # The structure of the test directory is now:
+ # .
+ # ├── hello
+ # │ └── ns_pkg
+ # │ ├── __init__.py
+ # │ └── hello
+ # │ ├── __init__.py
+ # │ └── test_hello.py
+ # └── world
+ # └── ns_pkg
+ # ├── __init__.py
+ # └── world
+ # ├── __init__.py
+ # └── test_world.py
+
+ # NOTE: the different/reversed ordering is intentional here.
+ monkeypatch.setenv("PYTHONPATH", prepend_pythonpath(*search_path))
+ for p in search_path:
+ monkeypatch.syspath_prepend(p)
+
+ # mixed module and filenames:
+ monkeypatch.chdir("world")
+ result = pytester.runpytest("--pyargs", "-v", "ns_pkg.hello", "ns_pkg/world")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "test_hello.py::test_hello*PASSED*",
+ "test_hello.py::test_other*PASSED*",
+ "ns_pkg/world/test_world.py::test_world*PASSED*",
+ "ns_pkg/world/test_world.py::test_other*PASSED*",
+ "*4 passed in*",
+ ]
+ )
+
+ # specify tests within a module
+ pytester.chdir()
+ result = pytester.runpytest(
+ "--pyargs", "-v", "ns_pkg.world.test_world::test_other"
+ )
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ ["*test_world.py::test_other*PASSED*", "*1 passed*"]
+ )
+
+ def test_invoke_test_and_doctestmodules(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def test():
+ pass
+ """
+ )
+ result = pytester.runpytest(str(p) + "::test", "--doctest-modules")
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_cmdline_python_package_symlink(
+ self, pytester: Pytester, monkeypatch
+ ) -> None:
+ """
+ --pyargs with packages with path containing symlink can have conftest.py in
+ their package (#2985)
+ """
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", raising=False)
+
+ dirname = "lib"
+ d = pytester.mkdir(dirname)
+ foo = d.joinpath("foo")
+ foo.mkdir()
+ foo.joinpath("__init__.py").touch()
+ lib = foo.joinpath("bar")
+ lib.mkdir()
+ lib.joinpath("__init__.py").touch()
+ lib.joinpath("test_bar.py").write_text(
+ "def test_bar(): pass\ndef test_other(a_fixture):pass"
+ )
+ lib.joinpath("conftest.py").write_text(
+ "import pytest\n@pytest.fixture\ndef a_fixture():pass"
+ )
+
+ d_local = pytester.mkdir("symlink_root")
+ symlink_location = d_local / "lib"
+ symlink_or_skip(d, symlink_location, target_is_directory=True)
+
+ # The structure of the test directory is now:
+ # .
+ # ├── symlink_root
+ # │ └── lib -> ../lib
+ # └── lib
+ # └── foo
+ # ├── __init__.py
+ # └── bar
+ # ├── __init__.py
+ # ├── conftest.py
+ # └── test_bar.py
+
+ # NOTE: the different/reversed ordering is intentional here.
+ search_path = ["lib", os.path.join("symlink_root", "lib")]
+ monkeypatch.setenv("PYTHONPATH", prepend_pythonpath(*search_path))
+ for p in search_path:
+ monkeypatch.syspath_prepend(p)
+
+ # module picked up in symlink-ed directory:
+ # It picks up symlink_root/lib/foo/bar (symlink) via sys.path.
+ result = pytester.runpytest("--pyargs", "-v", "foo.bar")
+ pytester.chdir()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "symlink_root/lib/foo/bar/test_bar.py::test_bar PASSED*",
+ "symlink_root/lib/foo/bar/test_bar.py::test_other PASSED*",
+ "*2 passed*",
+ ]
+ )
+
+ def test_cmdline_python_package_not_exists(self, pytester: Pytester) -> None:
+ result = pytester.runpytest("--pyargs", "tpkgwhatv")
+ assert result.ret
+ result.stderr.fnmatch_lines(["ERROR*module*or*package*not*found*"])
+
+ @pytest.mark.xfail(reason="decide: feature or bug")
+ def test_noclass_discovery_if_not_testcase(self, pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class TestHello(object):
+ def test_hello(self):
+ assert self.attr
+
+ class RealTest(unittest.TestCase, TestHello):
+ attr = 42
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ reprec.assertoutcome(passed=1)
+
+ def test_doctest_id(self, pytester: Pytester) -> None:
+ pytester.makefile(
+ ".txt",
+ """
+ >>> x=3
+ >>> x
+ 4
+ """,
+ )
+ testid = "test_doctest_id.txt::test_doctest_id.txt"
+ expected_lines = [
+ "*= FAILURES =*",
+ "*_ ?doctest? test_doctest_id.txt _*",
+ "FAILED test_doctest_id.txt::test_doctest_id.txt",
+ "*= 1 failed in*",
+ ]
+ result = pytester.runpytest(testid, "-rf", "--tb=short")
+ result.stdout.fnmatch_lines(expected_lines)
+
+ # Ensure that re-running it will still handle it as
+ # doctest.DocTestFailure, which was not the case before when
+ # re-importing doctest, but not creating a new RUNNER_CLASS.
+ result = pytester.runpytest(testid, "-rf", "--tb=short")
+ result.stdout.fnmatch_lines(expected_lines)
+
+ def test_core_backward_compatibility(self) -> None:
+ """Test backward compatibility for get_plugin_manager function. See #787."""
+ import _pytest.config
+
+ assert (
+ type(_pytest.config.get_plugin_manager())
+ is _pytest.config.PytestPluginManager
+ )
+
+ def test_has_plugin(self, request) -> None:
+ """Test hasplugin function of the plugin manager (#932)."""
+ assert request.config.pluginmanager.hasplugin("python")
+
+
+class TestDurations:
+ source = """
+ from _pytest import timing
+ def test_something():
+ pass
+ def test_2():
+ timing.sleep(0.010)
+ def test_1():
+ timing.sleep(0.002)
+ def test_3():
+ timing.sleep(0.020)
+ """
+
+ def test_calls(self, pytester: Pytester, mock_timing) -> None:
+ pytester.makepyfile(self.source)
+ result = pytester.runpytest_inprocess("--durations=10")
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines_random(
+ ["*durations*", "*call*test_3*", "*call*test_2*"]
+ )
+
+ result.stdout.fnmatch_lines(
+ ["(8 durations < 0.005s hidden. Use -vv to show these durations.)"]
+ )
+
+ def test_calls_show_2(self, pytester: Pytester, mock_timing) -> None:
+
+ pytester.makepyfile(self.source)
+ result = pytester.runpytest_inprocess("--durations=2")
+ assert result.ret == 0
+
+ lines = result.stdout.get_lines_after("*slowest*durations*")
+ assert "4 passed" in lines[2]
+
+ def test_calls_showall(self, pytester: Pytester, mock_timing) -> None:
+ pytester.makepyfile(self.source)
+ result = pytester.runpytest_inprocess("--durations=0")
+ assert result.ret == 0
+
+ tested = "3"
+ for x in tested:
+ for y in ("call",): # 'setup', 'call', 'teardown':
+ for line in result.stdout.lines:
+ if ("test_%s" % x) in line and y in line:
+ break
+ else:
+ raise AssertionError(f"not found {x} {y}")
+
+ def test_calls_showall_verbose(self, pytester: Pytester, mock_timing) -> None:
+ pytester.makepyfile(self.source)
+ result = pytester.runpytest_inprocess("--durations=0", "-vv")
+ assert result.ret == 0
+
+ for x in "123":
+ for y in ("call",): # 'setup', 'call', 'teardown':
+ for line in result.stdout.lines:
+ if ("test_%s" % x) in line and y in line:
+ break
+ else:
+ raise AssertionError(f"not found {x} {y}")
+
+ def test_with_deselected(self, pytester: Pytester, mock_timing) -> None:
+ pytester.makepyfile(self.source)
+ result = pytester.runpytest_inprocess("--durations=2", "-k test_3")
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(["*durations*", "*call*test_3*"])
+
+ def test_with_failing_collection(self, pytester: Pytester, mock_timing) -> None:
+ pytester.makepyfile(self.source)
+ pytester.makepyfile(test_collecterror="""xyz""")
+ result = pytester.runpytest_inprocess("--durations=2", "-k test_1")
+ assert result.ret == 2
+
+ result.stdout.fnmatch_lines(["*Interrupted: 1 error during collection*"])
+ # Collection errors abort test execution, therefore no duration is
+ # output
+ result.stdout.no_fnmatch_line("*duration*")
+
+ def test_with_not(self, pytester: Pytester, mock_timing) -> None:
+ pytester.makepyfile(self.source)
+ result = pytester.runpytest_inprocess("-k not 1")
+ assert result.ret == 0
+
+
+class TestDurationsWithFixture:
+ source = """
+ import pytest
+ from _pytest import timing
+
+ @pytest.fixture
+ def setup_fixt():
+ timing.sleep(2)
+
+ def test_1(setup_fixt):
+ timing.sleep(5)
+ """
+
+ def test_setup_function(self, pytester: Pytester, mock_timing) -> None:
+ pytester.makepyfile(self.source)
+ result = pytester.runpytest_inprocess("--durations=10")
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines_random(
+ """
+ *durations*
+ 5.00s call *test_1*
+ 2.00s setup *test_1*
+ """
+ )
+
+
+def test_zipimport_hook(pytester: Pytester) -> None:
+ """Test package loader is being used correctly (see #1837)."""
+ zipapp = pytest.importorskip("zipapp")
+ pytester.path.joinpath("app").mkdir()
+ pytester.makepyfile(
+ **{
+ "app/foo.py": """
+ import pytest
+ def main():
+ pytest.main(['--pyargs', 'foo'])
+ """
+ }
+ )
+ target = pytester.path.joinpath("foo.zip")
+ zipapp.create_archive(
+ str(pytester.path.joinpath("app")), str(target), main="foo:main"
+ )
+ result = pytester.runpython(target)
+ assert result.ret == 0
+ result.stderr.fnmatch_lines(["*not found*foo*"])
+ result.stdout.no_fnmatch_line("*INTERNALERROR>*")
+
+
+def test_import_plugin_unicode_name(pytester: Pytester) -> None:
+ pytester.makepyfile(myplugin="")
+ pytester.makepyfile("def test(): pass")
+ pytester.makeconftest("pytest_plugins = ['myplugin']")
+ r = pytester.runpytest()
+ assert r.ret == 0
+
+
+def test_pytest_plugins_as_module(pytester: Pytester) -> None:
+ """Do not raise an error if pytest_plugins attribute is a module (#3899)"""
+ pytester.makepyfile(
+ **{
+ "__init__.py": "",
+ "pytest_plugins.py": "",
+ "conftest.py": "from . import pytest_plugins",
+ "test_foo.py": "def test(): pass",
+ }
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 1 passed in *"])
+
+
+def test_deferred_hook_checking(pytester: Pytester) -> None:
+ """Check hooks as late as possible (#1821)."""
+ pytester.syspathinsert()
+ pytester.makepyfile(
+ **{
+ "plugin.py": """
+ class Hooks(object):
+ def pytest_my_hook(self, config):
+ pass
+
+ def pytest_configure(config):
+ config.pluginmanager.add_hookspecs(Hooks)
+ """,
+ "conftest.py": """
+ pytest_plugins = ['plugin']
+ def pytest_my_hook(config):
+ return 40
+ """,
+ "test_foo.py": """
+ def test(request):
+ assert request.config.hook.pytest_my_hook(config=request.config) == [40]
+ """,
+ }
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 1 passed *"])
+
+
+def test_fixture_values_leak(pytester: Pytester) -> None:
+ """Ensure that fixture objects are properly destroyed by the garbage collector at the end of their expected
+ life-times (#2981).
+ """
+ pytester.makepyfile(
+ """
+ import attr
+ import gc
+ import pytest
+ import weakref
+
+ @attr.s
+ class SomeObj(object):
+ name = attr.ib()
+
+ fix_of_test1_ref = None
+ session_ref = None
+
+ @pytest.fixture(scope='session')
+ def session_fix():
+ global session_ref
+ obj = SomeObj(name='session-fixture')
+ session_ref = weakref.ref(obj)
+ return obj
+
+ @pytest.fixture
+ def fix(session_fix):
+ global fix_of_test1_ref
+ obj = SomeObj(name='local-fixture')
+ fix_of_test1_ref = weakref.ref(obj)
+ return obj
+
+ def test1(fix):
+ assert fix_of_test1_ref() is fix
+
+ def test2():
+ gc.collect()
+ # fixture "fix" created during test1 must have been destroyed by now
+ assert fix_of_test1_ref() is None
+ """
+ )
+ # Running on subprocess does not activate the HookRecorder
+ # which holds itself a reference to objects in case of the
+ # pytest_assert_reprcompare hook
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["* 2 passed *"])
+
+
+def test_fixture_order_respects_scope(pytester: Pytester) -> None:
+ """Ensure that fixtures are created according to scope order (#2405)."""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ data = {}
+
+ @pytest.fixture(scope='module')
+ def clean_data():
+ data.clear()
+
+ @pytest.fixture(autouse=True)
+ def add_data():
+ data.update(value=True)
+
+ @pytest.mark.usefixtures('clean_data')
+ def test_value():
+ assert data.get('value')
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+
+
+def test_frame_leak_on_failing_test(pytester: Pytester) -> None:
+ """Pytest would leak garbage referencing the frames of tests that failed
+ that could never be reclaimed (#2798).
+
+ Unfortunately it was not possible to remove the actual circles because most of them
+ are made of traceback objects which cannot be weakly referenced. Those objects at least
+ can be eventually claimed by the garbage collector.
+ """
+ pytester.makepyfile(
+ """
+ import gc
+ import weakref
+
+ class Obj:
+ pass
+
+ ref = None
+
+ def test1():
+ obj = Obj()
+ global ref
+ ref = weakref.ref(obj)
+ assert 0
+
+ def test2():
+ gc.collect()
+ assert ref() is None
+ """
+ )
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*1 failed, 1 passed in*"])
+
+
+def test_fixture_mock_integration(pytester: Pytester) -> None:
+ """Test that decorators applied to fixture are left working (#3774)"""
+ p = pytester.copy_example("acceptance/fixture_mock_integration.py")
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_usage_error_code(pytester: Pytester) -> None:
+ result = pytester.runpytest("-unknown-option-")
+ assert result.ret == ExitCode.USAGE_ERROR
+
+
+@pytest.mark.filterwarnings("default::pytest.PytestUnhandledCoroutineWarning")
+def test_warn_on_async_function(pytester: Pytester) -> None:
+ # In the below we .close() the coroutine only to avoid
+ # "RuntimeWarning: coroutine 'test_2' was never awaited"
+ # which messes with other tests.
+ pytester.makepyfile(
+ test_async="""
+ async def test_1():
+ pass
+ async def test_2():
+ pass
+ def test_3():
+ coro = test_2()
+ coro.close()
+ return coro
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "test_async.py::test_1",
+ "test_async.py::test_2",
+ "test_async.py::test_3",
+ "*async def functions are not natively supported*",
+ "*3 skipped, 3 warnings in*",
+ ]
+ )
+ # ensure our warning message appears only once
+ assert (
+ result.stdout.str().count("async def functions are not natively supported") == 1
+ )
+
+
+@pytest.mark.filterwarnings("default::pytest.PytestUnhandledCoroutineWarning")
+def test_warn_on_async_gen_function(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_async="""
+ async def test_1():
+ yield
+ async def test_2():
+ yield
+ def test_3():
+ return test_2()
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "test_async.py::test_1",
+ "test_async.py::test_2",
+ "test_async.py::test_3",
+ "*async def functions are not natively supported*",
+ "*3 skipped, 3 warnings in*",
+ ]
+ )
+ # ensure our warning message appears only once
+ assert (
+ result.stdout.str().count("async def functions are not natively supported") == 1
+ )
+
+
+def test_pdb_can_be_rewritten(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ **{
+ "conftest.py": """
+ import pytest
+ pytest.register_assert_rewrite("pdb")
+ """,
+ "__init__.py": "",
+ "pdb.py": """
+ def check():
+ assert 1 == 2
+ """,
+ "test_pdb.py": """
+ def test():
+ import pdb
+ assert pdb.check()
+ """,
+ }
+ )
+ # Disable debugging plugin itself to avoid:
+ # > INTERNALERROR> AttributeError: module 'pdb' has no attribute 'set_trace'
+ result = pytester.runpytest_subprocess("-p", "no:debugging", "-vv")
+ result.stdout.fnmatch_lines(
+ [
+ " def check():",
+ "> assert 1 == 2",
+ "E assert 1 == 2",
+ "E +1",
+ "E -2",
+ "",
+ "pdb.py:2: AssertionError",
+ "*= 1 failed in *",
+ ]
+ )
+ assert result.ret == 1
+
+
+def test_tee_stdio_captures_and_live_prints(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import sys
+ def test_simple():
+ print ("@this is stdout@")
+ print ("@this is stderr@", file=sys.stderr)
+ """
+ )
+ result = pytester.runpytest_subprocess(
+ testpath,
+ "--capture=tee-sys",
+ "--junitxml=output.xml",
+ "-o",
+ "junit_logging=all",
+ )
+
+ # ensure stdout/stderr were 'live printed'
+ result.stdout.fnmatch_lines(["*@this is stdout@*"])
+ result.stderr.fnmatch_lines(["*@this is stderr@*"])
+
+ # now ensure the output is in the junitxml
+ with open(pytester.path.joinpath("output.xml")) as f:
+ fullXml = f.read()
+ assert "@this is stdout@\n" in fullXml
+ assert "@this is stderr@\n" in fullXml
+
+
+@pytest.mark.skipif(
+ sys.platform == "win32",
+ reason="Windows raises `OSError: [Errno 22] Invalid argument` instead",
+)
+def test_no_brokenpipeerror_message(pytester: Pytester) -> None:
+ """Ensure that the broken pipe error message is suppressed.
+
+ In some Python versions, it reaches sys.unraisablehook, in others
+ a BrokenPipeError exception is propagated, but either way it prints
+ to stderr on shutdown, so checking nothing is printed is enough.
+ """
+ popen = pytester.popen((*pytester._getpytestargs(), "--help"))
+ popen.stdout.close()
+ ret = popen.wait()
+ assert popen.stderr.read() == b""
+ assert ret == 1
+
+ # Cleanup.
+ popen.stderr.close()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/code/test_code.py b/testing/web-platform/tests/tools/third_party/pytest/testing/code/test_code.py
new file mode 100644
index 0000000000..33809528a0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/code/test_code.py
@@ -0,0 +1,212 @@
+import re
+import sys
+from types import FrameType
+from unittest import mock
+
+import pytest
+from _pytest._code import Code
+from _pytest._code import ExceptionInfo
+from _pytest._code import Frame
+from _pytest._code import Source
+from _pytest._code.code import ExceptionChainRepr
+from _pytest._code.code import ReprFuncArgs
+
+
+def test_ne() -> None:
+ code1 = Code(compile('foo = "bar"', "", "exec"))
+ assert code1 == code1
+ code2 = Code(compile('foo = "baz"', "", "exec"))
+ assert code2 != code1
+
+
+def test_code_gives_back_name_for_not_existing_file() -> None:
+ name = "abc-123"
+ co_code = compile("pass\n", name, "exec")
+ assert co_code.co_filename == name
+ code = Code(co_code)
+ assert str(code.path) == name
+ assert code.fullsource is None
+
+
+def test_code_from_function_with_class() -> None:
+ class A:
+ pass
+
+ with pytest.raises(TypeError):
+ Code.from_function(A)
+
+
+def x() -> None:
+ raise NotImplementedError()
+
+
+def test_code_fullsource() -> None:
+ code = Code.from_function(x)
+ full = code.fullsource
+ assert "test_code_fullsource()" in str(full)
+
+
+def test_code_source() -> None:
+ code = Code.from_function(x)
+ src = code.source()
+ expected = """def x() -> None:
+ raise NotImplementedError()"""
+ assert str(src) == expected
+
+
+def test_frame_getsourcelineno_myself() -> None:
+ def func() -> FrameType:
+ return sys._getframe(0)
+
+ f = Frame(func())
+ source, lineno = f.code.fullsource, f.lineno
+ assert source is not None
+ assert source[lineno].startswith(" return sys._getframe(0)")
+
+
+def test_getstatement_empty_fullsource() -> None:
+ def func() -> FrameType:
+ return sys._getframe(0)
+
+ f = Frame(func())
+ with mock.patch.object(f.code.__class__, "fullsource", None):
+ assert f.statement == Source("")
+
+
+def test_code_from_func() -> None:
+ co = Code.from_function(test_frame_getsourcelineno_myself)
+ assert co.firstlineno
+ assert co.path
+
+
+def test_unicode_handling() -> None:
+ value = "ąć".encode()
+
+ def f() -> None:
+ raise Exception(value)
+
+ excinfo = pytest.raises(Exception, f)
+ str(excinfo)
+
+
+def test_code_getargs() -> None:
+ def f1(x):
+ raise NotImplementedError()
+
+ c1 = Code.from_function(f1)
+ assert c1.getargs(var=True) == ("x",)
+
+ def f2(x, *y):
+ raise NotImplementedError()
+
+ c2 = Code.from_function(f2)
+ assert c2.getargs(var=True) == ("x", "y")
+
+ def f3(x, **z):
+ raise NotImplementedError()
+
+ c3 = Code.from_function(f3)
+ assert c3.getargs(var=True) == ("x", "z")
+
+ def f4(x, *y, **z):
+ raise NotImplementedError()
+
+ c4 = Code.from_function(f4)
+ assert c4.getargs(var=True) == ("x", "y", "z")
+
+
+def test_frame_getargs() -> None:
+ def f1(x) -> FrameType:
+ return sys._getframe(0)
+
+ fr1 = Frame(f1("a"))
+ assert fr1.getargs(var=True) == [("x", "a")]
+
+ def f2(x, *y) -> FrameType:
+ return sys._getframe(0)
+
+ fr2 = Frame(f2("a", "b", "c"))
+ assert fr2.getargs(var=True) == [("x", "a"), ("y", ("b", "c"))]
+
+ def f3(x, **z) -> FrameType:
+ return sys._getframe(0)
+
+ fr3 = Frame(f3("a", b="c"))
+ assert fr3.getargs(var=True) == [("x", "a"), ("z", {"b": "c"})]
+
+ def f4(x, *y, **z) -> FrameType:
+ return sys._getframe(0)
+
+ fr4 = Frame(f4("a", "b", c="d"))
+ assert fr4.getargs(var=True) == [("x", "a"), ("y", ("b",)), ("z", {"c": "d"})]
+
+
+class TestExceptionInfo:
+ def test_bad_getsource(self) -> None:
+ try:
+ if False:
+ pass
+ else:
+ assert False
+ except AssertionError:
+ exci = ExceptionInfo.from_current()
+ assert exci.getrepr()
+
+ def test_from_current_with_missing(self) -> None:
+ with pytest.raises(AssertionError, match="no current exception"):
+ ExceptionInfo.from_current()
+
+
+class TestTracebackEntry:
+ def test_getsource(self) -> None:
+ try:
+ if False:
+ pass
+ else:
+ assert False
+ except AssertionError:
+ exci = ExceptionInfo.from_current()
+ entry = exci.traceback[0]
+ source = entry.getsource()
+ assert source is not None
+ assert len(source) == 6
+ assert "assert False" in source[5]
+
+ def test_tb_entry_str(self):
+ try:
+ assert False
+ except AssertionError:
+ exci = ExceptionInfo.from_current()
+ pattern = r" File '.*test_code.py':\d+ in test_tb_entry_str\n assert False"
+ entry = str(exci.traceback[0])
+ assert re.match(pattern, entry)
+
+
+class TestReprFuncArgs:
+ def test_not_raise_exception_with_mixed_encoding(self, tw_mock) -> None:
+ args = [("unicode_string", "São Paulo"), ("utf8_string", b"S\xc3\xa3o Paulo")]
+
+ r = ReprFuncArgs(args)
+ r.toterminal(tw_mock)
+
+ assert (
+ tw_mock.lines[0]
+ == r"unicode_string = São Paulo, utf8_string = b'S\xc3\xa3o Paulo'"
+ )
+
+
+def test_ExceptionChainRepr():
+ """Test ExceptionChainRepr, especially with regard to being hashable."""
+ try:
+ raise ValueError()
+ except ValueError:
+ excinfo1 = ExceptionInfo.from_current()
+ excinfo2 = ExceptionInfo.from_current()
+
+ repr1 = excinfo1.getrepr()
+ repr2 = excinfo2.getrepr()
+ assert repr1 != repr2
+
+ assert isinstance(repr1, ExceptionChainRepr)
+ assert hash(repr1) != hash(repr2)
+ assert repr1 is not excinfo1.getrepr()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/code/test_excinfo.py b/testing/web-platform/tests/tools/third_party/pytest/testing/code/test_excinfo.py
new file mode 100644
index 0000000000..61aa4406ad
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/code/test_excinfo.py
@@ -0,0 +1,1470 @@
+import importlib
+import io
+import operator
+import queue
+import sys
+import textwrap
+from pathlib import Path
+from typing import Any
+from typing import Dict
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+import _pytest
+import pytest
+from _pytest._code.code import ExceptionChainRepr
+from _pytest._code.code import ExceptionInfo
+from _pytest._code.code import FormattedExcinfo
+from _pytest._io import TerminalWriter
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pathlib import bestrelpath
+from _pytest.pathlib import import_path
+from _pytest.pytester import LineMatcher
+from _pytest.pytester import Pytester
+
+
+if TYPE_CHECKING:
+ from _pytest._code.code import _TracebackStyle
+
+
+@pytest.fixture
+def limited_recursion_depth():
+ before = sys.getrecursionlimit()
+ sys.setrecursionlimit(150)
+ yield
+ sys.setrecursionlimit(before)
+
+
+def test_excinfo_simple() -> None:
+ try:
+ raise ValueError
+ except ValueError:
+ info = _pytest._code.ExceptionInfo.from_current()
+ assert info.type == ValueError
+
+
+def test_excinfo_from_exc_info_simple() -> None:
+ try:
+ raise ValueError
+ except ValueError as e:
+ assert e.__traceback__ is not None
+ info = _pytest._code.ExceptionInfo.from_exc_info((type(e), e, e.__traceback__))
+ assert info.type == ValueError
+
+
+def test_excinfo_getstatement():
+ def g():
+ raise ValueError
+
+ def f():
+ g()
+
+ try:
+ f()
+ except ValueError:
+ excinfo = _pytest._code.ExceptionInfo.from_current()
+ linenumbers = [
+ f.__code__.co_firstlineno - 1 + 4,
+ f.__code__.co_firstlineno - 1 + 1,
+ g.__code__.co_firstlineno - 1 + 1,
+ ]
+ values = list(excinfo.traceback)
+ foundlinenumbers = [x.lineno for x in values]
+ assert foundlinenumbers == linenumbers
+ # for x in info:
+ # print "%s:%d %s" %(x.path.relto(root), x.lineno, x.statement)
+ # xxx
+
+
+# testchain for getentries test below
+
+
+def f():
+ #
+ raise ValueError
+ #
+
+
+def g():
+ #
+ __tracebackhide__ = True
+ f()
+ #
+
+
+def h():
+ #
+ g()
+ #
+
+
+class TestTraceback_f_g_h:
+ def setup_method(self, method):
+ try:
+ h()
+ except ValueError:
+ self.excinfo = _pytest._code.ExceptionInfo.from_current()
+
+ def test_traceback_entries(self):
+ tb = self.excinfo.traceback
+ entries = list(tb)
+ assert len(tb) == 4 # maybe fragile test
+ assert len(entries) == 4 # maybe fragile test
+ names = ["f", "g", "h"]
+ for entry in entries:
+ try:
+ names.remove(entry.frame.code.name)
+ except ValueError:
+ pass
+ assert not names
+
+ def test_traceback_entry_getsource(self):
+ tb = self.excinfo.traceback
+ s = str(tb[-1].getsource())
+ assert s.startswith("def f():")
+ assert s.endswith("raise ValueError")
+
+ def test_traceback_entry_getsource_in_construct(self):
+ def xyz():
+ try:
+ raise ValueError
+ except somenoname: # type: ignore[name-defined] # noqa: F821
+ pass # pragma: no cover
+
+ try:
+ xyz()
+ except NameError:
+ excinfo = _pytest._code.ExceptionInfo.from_current()
+ else:
+ assert False, "did not raise NameError"
+
+ tb = excinfo.traceback
+ source = tb[-1].getsource()
+ assert source is not None
+ assert source.deindent().lines == [
+ "def xyz():",
+ " try:",
+ " raise ValueError",
+ " except somenoname: # type: ignore[name-defined] # noqa: F821",
+ ]
+
+ def test_traceback_cut(self) -> None:
+ co = _pytest._code.Code.from_function(f)
+ path, firstlineno = co.path, co.firstlineno
+ assert isinstance(path, Path)
+ traceback = self.excinfo.traceback
+ newtraceback = traceback.cut(path=path, firstlineno=firstlineno)
+ assert len(newtraceback) == 1
+ newtraceback = traceback.cut(path=path, lineno=firstlineno + 2)
+ assert len(newtraceback) == 1
+
+ def test_traceback_cut_excludepath(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile("def f(): raise ValueError")
+ with pytest.raises(ValueError) as excinfo:
+ import_path(p, root=pytester.path).f() # type: ignore[attr-defined]
+ basedir = Path(pytest.__file__).parent
+ newtraceback = excinfo.traceback.cut(excludepath=basedir)
+ for x in newtraceback:
+ assert isinstance(x.path, Path)
+ assert basedir not in x.path.parents
+ assert newtraceback[-1].frame.code.path == p
+
+ def test_traceback_filter(self):
+ traceback = self.excinfo.traceback
+ ntraceback = traceback.filter()
+ assert len(ntraceback) == len(traceback) - 1
+
+ @pytest.mark.parametrize(
+ "tracebackhide, matching",
+ [
+ (lambda info: True, True),
+ (lambda info: False, False),
+ (operator.methodcaller("errisinstance", ValueError), True),
+ (operator.methodcaller("errisinstance", IndexError), False),
+ ],
+ )
+ def test_traceback_filter_selective(self, tracebackhide, matching):
+ def f():
+ #
+ raise ValueError
+ #
+
+ def g():
+ #
+ __tracebackhide__ = tracebackhide
+ f()
+ #
+
+ def h():
+ #
+ g()
+ #
+
+ excinfo = pytest.raises(ValueError, h)
+ traceback = excinfo.traceback
+ ntraceback = traceback.filter()
+ print(f"old: {traceback!r}")
+ print(f"new: {ntraceback!r}")
+
+ if matching:
+ assert len(ntraceback) == len(traceback) - 2
+ else:
+ # -1 because of the __tracebackhide__ in pytest.raises
+ assert len(ntraceback) == len(traceback) - 1
+
+ def test_traceback_recursion_index(self):
+ def f(n):
+ if n < 10:
+ n += 1
+ f(n)
+
+ excinfo = pytest.raises(RuntimeError, f, 8)
+ traceback = excinfo.traceback
+ recindex = traceback.recursionindex()
+ assert recindex == 3
+
+ def test_traceback_only_specific_recursion_errors(self, monkeypatch):
+ def f(n):
+ if n == 0:
+ raise RuntimeError("hello")
+ f(n - 1)
+
+ excinfo = pytest.raises(RuntimeError, f, 25)
+ monkeypatch.delattr(excinfo.traceback.__class__, "recursionindex")
+ repr = excinfo.getrepr()
+ assert "RuntimeError: hello" in str(repr.reprcrash)
+
+ def test_traceback_no_recursion_index(self) -> None:
+ def do_stuff() -> None:
+ raise RuntimeError
+
+ def reraise_me() -> None:
+ import sys
+
+ exc, val, tb = sys.exc_info()
+ assert val is not None
+ raise val.with_traceback(tb)
+
+ def f(n: int) -> None:
+ try:
+ do_stuff()
+ except BaseException:
+ reraise_me()
+
+ excinfo = pytest.raises(RuntimeError, f, 8)
+ assert excinfo is not None
+ traceback = excinfo.traceback
+ recindex = traceback.recursionindex()
+ assert recindex is None
+
+ def test_traceback_messy_recursion(self):
+ # XXX: simplified locally testable version
+ decorator = pytest.importorskip("decorator").decorator
+
+ def log(f, *k, **kw):
+ print(f"{k} {kw}")
+ f(*k, **kw)
+
+ log = decorator(log)
+
+ def fail():
+ raise ValueError("")
+
+ fail = log(log(fail))
+
+ excinfo = pytest.raises(ValueError, fail)
+ assert excinfo.traceback.recursionindex() is None
+
+ def test_traceback_getcrashentry(self):
+ def i():
+ __tracebackhide__ = True
+ raise ValueError
+
+ def h():
+ i()
+
+ def g():
+ __tracebackhide__ = True
+ h()
+
+ def f():
+ g()
+
+ excinfo = pytest.raises(ValueError, f)
+ tb = excinfo.traceback
+ entry = tb.getcrashentry()
+ co = _pytest._code.Code.from_function(h)
+ assert entry.frame.code.path == co.path
+ assert entry.lineno == co.firstlineno + 1
+ assert entry.frame.code.name == "h"
+
+ def test_traceback_getcrashentry_empty(self):
+ def g():
+ __tracebackhide__ = True
+ raise ValueError
+
+ def f():
+ __tracebackhide__ = True
+ g()
+
+ excinfo = pytest.raises(ValueError, f)
+ tb = excinfo.traceback
+ entry = tb.getcrashentry()
+ co = _pytest._code.Code.from_function(g)
+ assert entry.frame.code.path == co.path
+ assert entry.lineno == co.firstlineno + 2
+ assert entry.frame.code.name == "g"
+
+
+def test_excinfo_exconly():
+ excinfo = pytest.raises(ValueError, h)
+ assert excinfo.exconly().startswith("ValueError")
+ with pytest.raises(ValueError) as excinfo:
+ raise ValueError("hello\nworld")
+ msg = excinfo.exconly(tryshort=True)
+ assert msg.startswith("ValueError")
+ assert msg.endswith("world")
+
+
+def test_excinfo_repr_str() -> None:
+ excinfo1 = pytest.raises(ValueError, h)
+ assert repr(excinfo1) == "<ExceptionInfo ValueError() tblen=4>"
+ assert str(excinfo1) == "<ExceptionInfo ValueError() tblen=4>"
+
+ class CustomException(Exception):
+ def __repr__(self):
+ return "custom_repr"
+
+ def raises() -> None:
+ raise CustomException()
+
+ excinfo2 = pytest.raises(CustomException, raises)
+ assert repr(excinfo2) == "<ExceptionInfo custom_repr tblen=2>"
+ assert str(excinfo2) == "<ExceptionInfo custom_repr tblen=2>"
+
+
+def test_excinfo_for_later() -> None:
+ e = ExceptionInfo[BaseException].for_later()
+ assert "for raises" in repr(e)
+ assert "for raises" in str(e)
+
+
+def test_excinfo_errisinstance():
+ excinfo = pytest.raises(ValueError, h)
+ assert excinfo.errisinstance(ValueError)
+
+
+def test_excinfo_no_sourcecode():
+ try:
+ exec("raise ValueError()")
+ except ValueError:
+ excinfo = _pytest._code.ExceptionInfo.from_current()
+ s = str(excinfo.traceback[-1])
+ assert s == " File '<string>':1 in <module>\n ???\n"
+
+
+def test_excinfo_no_python_sourcecode(tmp_path: Path) -> None:
+ # XXX: simplified locally testable version
+ tmp_path.joinpath("test.txt").write_text("{{ h()}}:")
+
+ jinja2 = pytest.importorskip("jinja2")
+ loader = jinja2.FileSystemLoader(str(tmp_path))
+ env = jinja2.Environment(loader=loader)
+ template = env.get_template("test.txt")
+ excinfo = pytest.raises(ValueError, template.render, h=h)
+ for item in excinfo.traceback:
+ print(item) # XXX: for some reason jinja.Template.render is printed in full
+ item.source # shouldn't fail
+ if isinstance(item.path, Path) and item.path.name == "test.txt":
+ assert str(item.source) == "{{ h()}}:"
+
+
+def test_entrysource_Queue_example():
+ try:
+ queue.Queue().get(timeout=0.001)
+ except queue.Empty:
+ excinfo = _pytest._code.ExceptionInfo.from_current()
+ entry = excinfo.traceback[-1]
+ source = entry.getsource()
+ assert source is not None
+ s = str(source).strip()
+ assert s.startswith("def get")
+
+
+def test_codepath_Queue_example() -> None:
+ try:
+ queue.Queue().get(timeout=0.001)
+ except queue.Empty:
+ excinfo = _pytest._code.ExceptionInfo.from_current()
+ entry = excinfo.traceback[-1]
+ path = entry.path
+ assert isinstance(path, Path)
+ assert path.name.lower() == "queue.py"
+ assert path.exists()
+
+
+def test_match_succeeds():
+ with pytest.raises(ZeroDivisionError) as excinfo:
+ 0 // 0
+ excinfo.match(r".*zero.*")
+
+
+def test_match_raises_error(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_division_zero():
+ with pytest.raises(ZeroDivisionError) as excinfo:
+ 0 / 0
+ excinfo.match(r'[123]+')
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret != 0
+
+ exc_msg = "Regex pattern '[[]123[]]+' does not match 'division by zero'."
+ result.stdout.fnmatch_lines([f"E * AssertionError: {exc_msg}"])
+ result.stdout.no_fnmatch_line("*__tracebackhide__ = True*")
+
+ result = pytester.runpytest("--fulltrace")
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ ["*__tracebackhide__ = True*", f"E * AssertionError: {exc_msg}"]
+ )
+
+
+class TestFormattedExcinfo:
+ @pytest.fixture
+ def importasmod(self, tmp_path: Path, _sys_snapshot):
+ def importasmod(source):
+ source = textwrap.dedent(source)
+ modpath = tmp_path.joinpath("mod.py")
+ tmp_path.joinpath("__init__.py").touch()
+ modpath.write_text(source)
+ importlib.invalidate_caches()
+ return import_path(modpath, root=tmp_path)
+
+ return importasmod
+
+ def test_repr_source(self):
+ pr = FormattedExcinfo()
+ source = _pytest._code.Source(
+ """\
+ def f(x):
+ pass
+ """
+ ).strip()
+ pr.flow_marker = "|" # type: ignore[misc]
+ lines = pr.get_source(source, 0)
+ assert len(lines) == 2
+ assert lines[0] == "| def f(x):"
+ assert lines[1] == " pass"
+
+ def test_repr_source_excinfo(self) -> None:
+ """Check if indentation is right."""
+ try:
+
+ def f():
+ 1 / 0
+
+ f()
+
+ except BaseException:
+ excinfo = _pytest._code.ExceptionInfo.from_current()
+ else:
+ assert False, "did not raise"
+
+ pr = FormattedExcinfo()
+ source = pr._getentrysource(excinfo.traceback[-1])
+ assert source is not None
+ lines = pr.get_source(source, 1, excinfo)
+ for line in lines:
+ print(line)
+ assert lines == [
+ " def f():",
+ "> 1 / 0",
+ "E ZeroDivisionError: division by zero",
+ ]
+
+ def test_repr_source_not_existing(self):
+ pr = FormattedExcinfo()
+ co = compile("raise ValueError()", "", "exec")
+ try:
+ exec(co)
+ except ValueError:
+ excinfo = _pytest._code.ExceptionInfo.from_current()
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[1].lines[0] == "> ???"
+ assert repr.chain[0][0].reprentries[1].lines[0] == "> ???"
+
+ def test_repr_many_line_source_not_existing(self):
+ pr = FormattedExcinfo()
+ co = compile(
+ """
+a = 1
+raise ValueError()
+""",
+ "",
+ "exec",
+ )
+ try:
+ exec(co)
+ except ValueError:
+ excinfo = _pytest._code.ExceptionInfo.from_current()
+ repr = pr.repr_excinfo(excinfo)
+ assert repr.reprtraceback.reprentries[1].lines[0] == "> ???"
+ assert repr.chain[0][0].reprentries[1].lines[0] == "> ???"
+
+ def test_repr_source_failing_fullsource(self, monkeypatch) -> None:
+ pr = FormattedExcinfo()
+
+ try:
+ 1 / 0
+ except ZeroDivisionError:
+ excinfo = ExceptionInfo.from_current()
+
+ with monkeypatch.context() as m:
+ m.setattr(_pytest._code.Code, "fullsource", property(lambda self: None))
+ repr = pr.repr_excinfo(excinfo)
+
+ assert repr.reprtraceback.reprentries[0].lines[0] == "> ???"
+ assert repr.chain[0][0].reprentries[0].lines[0] == "> ???"
+
+ def test_repr_local(self) -> None:
+ p = FormattedExcinfo(showlocals=True)
+ loc = {"y": 5, "z": 7, "x": 3, "@x": 2, "__builtins__": {}}
+ reprlocals = p.repr_locals(loc)
+ assert reprlocals is not None
+ assert reprlocals.lines
+ assert reprlocals.lines[0] == "__builtins__ = <builtins>"
+ assert reprlocals.lines[1] == "x = 3"
+ assert reprlocals.lines[2] == "y = 5"
+ assert reprlocals.lines[3] == "z = 7"
+
+ def test_repr_local_with_error(self) -> None:
+ class ObjWithErrorInRepr:
+ def __repr__(self):
+ raise NotImplementedError
+
+ p = FormattedExcinfo(showlocals=True, truncate_locals=False)
+ loc = {"x": ObjWithErrorInRepr(), "__builtins__": {}}
+ reprlocals = p.repr_locals(loc)
+ assert reprlocals is not None
+ assert reprlocals.lines
+ assert reprlocals.lines[0] == "__builtins__ = <builtins>"
+ assert "[NotImplementedError() raised in repr()]" in reprlocals.lines[1]
+
+ def test_repr_local_with_exception_in_class_property(self) -> None:
+ class ExceptionWithBrokenClass(Exception):
+ # Type ignored because it's bypassed intentionally.
+ @property # type: ignore
+ def __class__(self):
+ raise TypeError("boom!")
+
+ class ObjWithErrorInRepr:
+ def __repr__(self):
+ raise ExceptionWithBrokenClass()
+
+ p = FormattedExcinfo(showlocals=True, truncate_locals=False)
+ loc = {"x": ObjWithErrorInRepr(), "__builtins__": {}}
+ reprlocals = p.repr_locals(loc)
+ assert reprlocals is not None
+ assert reprlocals.lines
+ assert reprlocals.lines[0] == "__builtins__ = <builtins>"
+ assert "[ExceptionWithBrokenClass() raised in repr()]" in reprlocals.lines[1]
+
+ def test_repr_local_truncated(self) -> None:
+ loc = {"l": [i for i in range(10)]}
+ p = FormattedExcinfo(showlocals=True)
+ truncated_reprlocals = p.repr_locals(loc)
+ assert truncated_reprlocals is not None
+ assert truncated_reprlocals.lines
+ assert truncated_reprlocals.lines[0] == "l = [0, 1, 2, 3, 4, 5, ...]"
+
+ q = FormattedExcinfo(showlocals=True, truncate_locals=False)
+ full_reprlocals = q.repr_locals(loc)
+ assert full_reprlocals is not None
+ assert full_reprlocals.lines
+ assert full_reprlocals.lines[0] == "l = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"
+
+ def test_repr_tracebackentry_lines(self, importasmod) -> None:
+ mod = importasmod(
+ """
+ def func1():
+ raise ValueError("hello\\nworld")
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.func1)
+ excinfo.traceback = excinfo.traceback.filter()
+ p = FormattedExcinfo()
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-1])
+
+ # test as intermittent entry
+ lines = reprtb.lines
+ assert lines[0] == " def func1():"
+ assert lines[1] == '> raise ValueError("hello\\nworld")'
+
+ # test as last entry
+ p = FormattedExcinfo(showlocals=True)
+ repr_entry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = repr_entry.lines
+ assert lines[0] == " def func1():"
+ assert lines[1] == '> raise ValueError("hello\\nworld")'
+ assert lines[2] == "E ValueError: hello"
+ assert lines[3] == "E world"
+ assert not lines[4:]
+
+ loc = repr_entry.reprfileloc
+ assert loc is not None
+ assert loc.path == mod.__file__
+ assert loc.lineno == 3
+ # assert loc.message == "ValueError: hello"
+
+ def test_repr_tracebackentry_lines2(self, importasmod, tw_mock) -> None:
+ mod = importasmod(
+ """
+ def func1(m, x, y, z):
+ raise ValueError("hello\\nworld")
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.func1, "m" * 90, 5, 13, "z" * 120)
+ excinfo.traceback = excinfo.traceback.filter()
+ entry = excinfo.traceback[-1]
+ p = FormattedExcinfo(funcargs=True)
+ reprfuncargs = p.repr_args(entry)
+ assert reprfuncargs is not None
+ assert reprfuncargs.args[0] == ("m", repr("m" * 90))
+ assert reprfuncargs.args[1] == ("x", "5")
+ assert reprfuncargs.args[2] == ("y", "13")
+ assert reprfuncargs.args[3] == ("z", repr("z" * 120))
+
+ p = FormattedExcinfo(funcargs=True)
+ repr_entry = p.repr_traceback_entry(entry)
+ assert repr_entry.reprfuncargs is not None
+ assert repr_entry.reprfuncargs.args == reprfuncargs.args
+ repr_entry.toterminal(tw_mock)
+ assert tw_mock.lines[0] == "m = " + repr("m" * 90)
+ assert tw_mock.lines[1] == "x = 5, y = 13"
+ assert tw_mock.lines[2] == "z = " + repr("z" * 120)
+
+ def test_repr_tracebackentry_lines_var_kw_args(self, importasmod, tw_mock) -> None:
+ mod = importasmod(
+ """
+ def func1(x, *y, **z):
+ raise ValueError("hello\\nworld")
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.func1, "a", "b", c="d")
+ excinfo.traceback = excinfo.traceback.filter()
+ entry = excinfo.traceback[-1]
+ p = FormattedExcinfo(funcargs=True)
+ reprfuncargs = p.repr_args(entry)
+ assert reprfuncargs is not None
+ assert reprfuncargs.args[0] == ("x", repr("a"))
+ assert reprfuncargs.args[1] == ("y", repr(("b",)))
+ assert reprfuncargs.args[2] == ("z", repr({"c": "d"}))
+
+ p = FormattedExcinfo(funcargs=True)
+ repr_entry = p.repr_traceback_entry(entry)
+ assert repr_entry.reprfuncargs
+ assert repr_entry.reprfuncargs.args == reprfuncargs.args
+ repr_entry.toterminal(tw_mock)
+ assert tw_mock.lines[0] == "x = 'a', y = ('b',), z = {'c': 'd'}"
+
+ def test_repr_tracebackentry_short(self, importasmod) -> None:
+ mod = importasmod(
+ """
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-2])
+ lines = reprtb.lines
+ basename = Path(mod.__file__).name
+ assert lines[0] == " func1()"
+ assert reprtb.reprfileloc is not None
+ assert basename in str(reprtb.reprfileloc.path)
+ assert reprtb.reprfileloc.lineno == 5
+
+ # test last entry
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = reprtb.lines
+ assert lines[0] == ' raise ValueError("hello")'
+ assert lines[1] == "E ValueError: hello"
+ assert reprtb.reprfileloc is not None
+ assert basename in str(reprtb.reprfileloc.path)
+ assert reprtb.reprfileloc.lineno == 3
+
+ def test_repr_tracebackentry_no(self, importasmod):
+ mod = importasmod(
+ """
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(style="no")
+ p.repr_traceback_entry(excinfo.traceback[-2])
+
+ p = FormattedExcinfo(style="no")
+ reprentry = p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ lines = reprentry.lines
+ assert lines[0] == "E ValueError: hello"
+ assert not lines[1:]
+
+ def test_repr_traceback_tbfilter(self, importasmod):
+ mod = importasmod(
+ """
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ p = FormattedExcinfo(tbfilter=True)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 2
+ p = FormattedExcinfo(tbfilter=False)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 3
+
+ def test_traceback_short_no_source(self, importasmod, monkeypatch) -> None:
+ mod = importasmod(
+ """
+ def func1():
+ raise ValueError("hello")
+ def entry():
+ func1()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ from _pytest._code.code import Code
+
+ monkeypatch.setattr(Code, "path", "bogus")
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback_entry(excinfo.traceback[-2])
+ lines = reprtb.lines
+ last_p = FormattedExcinfo(style="short")
+ last_reprtb = last_p.repr_traceback_entry(excinfo.traceback[-1], excinfo)
+ last_lines = last_reprtb.lines
+ monkeypatch.undo()
+ assert lines[0] == " func1()"
+
+ assert last_lines[0] == ' raise ValueError("hello")'
+ assert last_lines[1] == "E ValueError: hello"
+
+ def test_repr_traceback_and_excinfo(self, importasmod) -> None:
+ mod = importasmod(
+ """
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+
+ styles: Tuple[_TracebackStyle, ...] = ("long", "short")
+ for style in styles:
+ p = FormattedExcinfo(style=style)
+ reprtb = p.repr_traceback(excinfo)
+ assert len(reprtb.reprentries) == 2
+ assert reprtb.style == style
+ assert not reprtb.extraline
+ repr = p.repr_excinfo(excinfo)
+ assert repr.reprtraceback
+ assert len(repr.reprtraceback.reprentries) == len(reprtb.reprentries)
+
+ assert repr.chain[0][0]
+ assert len(repr.chain[0][0].reprentries) == len(reprtb.reprentries)
+ assert repr.reprcrash is not None
+ assert repr.reprcrash.path.endswith("mod.py")
+ assert repr.reprcrash.message == "ValueError: 0"
+
+ def test_repr_traceback_with_invalid_cwd(self, importasmod, monkeypatch) -> None:
+ mod = importasmod(
+ """
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+
+ p = FormattedExcinfo(abspath=False)
+
+ raised = 0
+
+ orig_path_cwd = Path.cwd
+
+ def raiseos():
+ nonlocal raised
+ upframe = sys._getframe().f_back
+ assert upframe is not None
+ if upframe.f_code.co_name == "_makepath":
+ # Only raise with expected calls, but not via e.g. inspect for
+ # py38-windows.
+ raised += 1
+ raise OSError(2, "custom_oserror")
+ return orig_path_cwd()
+
+ monkeypatch.setattr(Path, "cwd", raiseos)
+ assert p._makepath(Path(__file__)) == __file__
+ assert raised == 1
+ repr_tb = p.repr_traceback(excinfo)
+
+ matcher = LineMatcher(str(repr_tb).splitlines())
+ matcher.fnmatch_lines(
+ [
+ "def entry():",
+ "> f(0)",
+ "",
+ f"{mod.__file__}:5: ",
+ "_ _ *",
+ "",
+ " def f(x):",
+ "> raise ValueError(x)",
+ "E ValueError: 0",
+ "",
+ f"{mod.__file__}:3: ValueError",
+ ]
+ )
+ assert raised == 3
+
+ def test_repr_excinfo_addouterr(self, importasmod, tw_mock):
+ mod = importasmod(
+ """
+ def entry():
+ raise ValueError()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ repr = excinfo.getrepr()
+ repr.addsection("title", "content")
+ repr.toterminal(tw_mock)
+ assert tw_mock.lines[-1] == "content"
+ assert tw_mock.lines[-2] == ("-", "title")
+
+ def test_repr_excinfo_reprcrash(self, importasmod) -> None:
+ mod = importasmod(
+ """
+ def entry():
+ raise ValueError()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+ repr = excinfo.getrepr()
+ assert repr.reprcrash is not None
+ assert repr.reprcrash.path.endswith("mod.py")
+ assert repr.reprcrash.lineno == 3
+ assert repr.reprcrash.message == "ValueError"
+ assert str(repr.reprcrash).endswith("mod.py:3: ValueError")
+
+ def test_repr_traceback_recursion(self, importasmod):
+ mod = importasmod(
+ """
+ def rec2(x):
+ return rec1(x+1)
+ def rec1(x):
+ return rec2(x-1)
+ def entry():
+ rec1(42)
+ """
+ )
+ excinfo = pytest.raises(RuntimeError, mod.entry)
+
+ for style in ("short", "long", "no"):
+ p = FormattedExcinfo(style="short")
+ reprtb = p.repr_traceback(excinfo)
+ assert reprtb.extraline == "!!! Recursion detected (same locals & position)"
+ assert str(reprtb)
+
+ def test_reprexcinfo_getrepr(self, importasmod) -> None:
+ mod = importasmod(
+ """
+ def f(x):
+ raise ValueError(x)
+ def entry():
+ f(0)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.entry)
+
+ styles: Tuple[_TracebackStyle, ...] = ("short", "long", "no")
+ for style in styles:
+ for showlocals in (True, False):
+ repr = excinfo.getrepr(style=style, showlocals=showlocals)
+ assert repr.reprtraceback.style == style
+
+ assert isinstance(repr, ExceptionChainRepr)
+ for r in repr.chain:
+ assert r[0].style == style
+
+ def test_reprexcinfo_unicode(self):
+ from _pytest._code.code import TerminalRepr
+
+ class MyRepr(TerminalRepr):
+ def toterminal(self, tw: TerminalWriter) -> None:
+ tw.line("Ñ")
+
+ x = str(MyRepr())
+ assert x == "Ñ"
+
+ def test_toterminal_long(self, importasmod, tw_mock):
+ mod = importasmod(
+ """
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.f)
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ repr.toterminal(tw_mock)
+ assert tw_mock.lines[0] == ""
+ tw_mock.lines.pop(0)
+ assert tw_mock.lines[0] == " def f():"
+ assert tw_mock.lines[1] == "> g(3)"
+ assert tw_mock.lines[2] == ""
+ line = tw_mock.get_write_msg(3)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[4] == (":5: ")
+ assert tw_mock.lines[5] == ("_ ", None)
+ assert tw_mock.lines[6] == ""
+ assert tw_mock.lines[7] == " def g(x):"
+ assert tw_mock.lines[8] == "> raise ValueError(x)"
+ assert tw_mock.lines[9] == "E ValueError: 3"
+ assert tw_mock.lines[10] == ""
+ line = tw_mock.get_write_msg(11)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[12] == ":3: ValueError"
+
+ def test_toterminal_long_missing_source(
+ self, importasmod, tmp_path: Path, tw_mock
+ ) -> None:
+ mod = importasmod(
+ """
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.f)
+ tmp_path.joinpath("mod.py").unlink()
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ repr.toterminal(tw_mock)
+ assert tw_mock.lines[0] == ""
+ tw_mock.lines.pop(0)
+ assert tw_mock.lines[0] == "> ???"
+ assert tw_mock.lines[1] == ""
+ line = tw_mock.get_write_msg(2)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[3] == ":5: "
+ assert tw_mock.lines[4] == ("_ ", None)
+ assert tw_mock.lines[5] == ""
+ assert tw_mock.lines[6] == "> ???"
+ assert tw_mock.lines[7] == "E ValueError: 3"
+ assert tw_mock.lines[8] == ""
+ line = tw_mock.get_write_msg(9)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[10] == ":3: ValueError"
+
+ def test_toterminal_long_incomplete_source(
+ self, importasmod, tmp_path: Path, tw_mock
+ ) -> None:
+ mod = importasmod(
+ """
+ def g(x):
+ raise ValueError(x)
+ def f():
+ g(3)
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.f)
+ tmp_path.joinpath("mod.py").write_text("asdf")
+ excinfo.traceback = excinfo.traceback.filter()
+ repr = excinfo.getrepr()
+ repr.toterminal(tw_mock)
+ assert tw_mock.lines[0] == ""
+ tw_mock.lines.pop(0)
+ assert tw_mock.lines[0] == "> ???"
+ assert tw_mock.lines[1] == ""
+ line = tw_mock.get_write_msg(2)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[3] == ":5: "
+ assert tw_mock.lines[4] == ("_ ", None)
+ assert tw_mock.lines[5] == ""
+ assert tw_mock.lines[6] == "> ???"
+ assert tw_mock.lines[7] == "E ValueError: 3"
+ assert tw_mock.lines[8] == ""
+ line = tw_mock.get_write_msg(9)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[10] == ":3: ValueError"
+
+ def test_toterminal_long_filenames(
+ self, importasmod, tw_mock, monkeypatch: MonkeyPatch
+ ) -> None:
+ mod = importasmod(
+ """
+ def f():
+ raise ValueError()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.f)
+ path = Path(mod.__file__)
+ monkeypatch.chdir(path.parent)
+ repr = excinfo.getrepr(abspath=False)
+ repr.toterminal(tw_mock)
+ x = bestrelpath(Path.cwd(), path)
+ if len(x) < len(str(path)):
+ msg = tw_mock.get_write_msg(-2)
+ assert msg == "mod.py"
+ assert tw_mock.lines[-1] == ":3: ValueError"
+
+ repr = excinfo.getrepr(abspath=True)
+ repr.toterminal(tw_mock)
+ msg = tw_mock.get_write_msg(-2)
+ assert msg == str(path)
+ line = tw_mock.lines[-1]
+ assert line == ":3: ValueError"
+
+ @pytest.mark.parametrize(
+ "reproptions",
+ [
+ pytest.param(
+ {
+ "style": style,
+ "showlocals": showlocals,
+ "funcargs": funcargs,
+ "tbfilter": tbfilter,
+ },
+ id="style={},showlocals={},funcargs={},tbfilter={}".format(
+ style, showlocals, funcargs, tbfilter
+ ),
+ )
+ for style in ["long", "short", "line", "no", "native", "value", "auto"]
+ for showlocals in (True, False)
+ for tbfilter in (True, False)
+ for funcargs in (True, False)
+ ],
+ )
+ def test_format_excinfo(self, reproptions: Dict[str, Any]) -> None:
+ def bar():
+ assert False, "some error"
+
+ def foo():
+ bar()
+
+ # using inline functions as opposed to importasmod so we get source code lines
+ # in the tracebacks (otherwise getinspect doesn't find the source code).
+ with pytest.raises(AssertionError) as excinfo:
+ foo()
+ file = io.StringIO()
+ tw = TerminalWriter(file=file)
+ repr = excinfo.getrepr(**reproptions)
+ repr.toterminal(tw)
+ assert file.getvalue()
+
+ def test_traceback_repr_style(self, importasmod, tw_mock):
+ mod = importasmod(
+ """
+ def f():
+ g()
+ def g():
+ h()
+ def h():
+ i()
+ def i():
+ raise ValueError()
+ """
+ )
+ excinfo = pytest.raises(ValueError, mod.f)
+ excinfo.traceback = excinfo.traceback.filter()
+ excinfo.traceback[1].set_repr_style("short")
+ excinfo.traceback[2].set_repr_style("short")
+ r = excinfo.getrepr(style="long")
+ r.toterminal(tw_mock)
+ for line in tw_mock.lines:
+ print(line)
+ assert tw_mock.lines[0] == ""
+ assert tw_mock.lines[1] == " def f():"
+ assert tw_mock.lines[2] == "> g()"
+ assert tw_mock.lines[3] == ""
+ msg = tw_mock.get_write_msg(4)
+ assert msg.endswith("mod.py")
+ assert tw_mock.lines[5] == ":3: "
+ assert tw_mock.lines[6] == ("_ ", None)
+ tw_mock.get_write_msg(7)
+ assert tw_mock.lines[8].endswith("in g")
+ assert tw_mock.lines[9] == " h()"
+ tw_mock.get_write_msg(10)
+ assert tw_mock.lines[11].endswith("in h")
+ assert tw_mock.lines[12] == " i()"
+ assert tw_mock.lines[13] == ("_ ", None)
+ assert tw_mock.lines[14] == ""
+ assert tw_mock.lines[15] == " def i():"
+ assert tw_mock.lines[16] == "> raise ValueError()"
+ assert tw_mock.lines[17] == "E ValueError"
+ assert tw_mock.lines[18] == ""
+ msg = tw_mock.get_write_msg(19)
+ msg.endswith("mod.py")
+ assert tw_mock.lines[20] == ":9: ValueError"
+
+ def test_exc_chain_repr(self, importasmod, tw_mock):
+ mod = importasmod(
+ """
+ class Err(Exception):
+ pass
+ def f():
+ try:
+ g()
+ except Exception as e:
+ raise Err() from e
+ finally:
+ h()
+ def g():
+ raise ValueError()
+
+ def h():
+ raise AttributeError()
+ """
+ )
+ excinfo = pytest.raises(AttributeError, mod.f)
+ r = excinfo.getrepr(style="long")
+ r.toterminal(tw_mock)
+ for line in tw_mock.lines:
+ print(line)
+ assert tw_mock.lines[0] == ""
+ assert tw_mock.lines[1] == " def f():"
+ assert tw_mock.lines[2] == " try:"
+ assert tw_mock.lines[3] == "> g()"
+ assert tw_mock.lines[4] == ""
+ line = tw_mock.get_write_msg(5)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[6] == ":6: "
+ assert tw_mock.lines[7] == ("_ ", None)
+ assert tw_mock.lines[8] == ""
+ assert tw_mock.lines[9] == " def g():"
+ assert tw_mock.lines[10] == "> raise ValueError()"
+ assert tw_mock.lines[11] == "E ValueError"
+ assert tw_mock.lines[12] == ""
+ line = tw_mock.get_write_msg(13)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[14] == ":12: ValueError"
+ assert tw_mock.lines[15] == ""
+ assert (
+ tw_mock.lines[16]
+ == "The above exception was the direct cause of the following exception:"
+ )
+ assert tw_mock.lines[17] == ""
+ assert tw_mock.lines[18] == " def f():"
+ assert tw_mock.lines[19] == " try:"
+ assert tw_mock.lines[20] == " g()"
+ assert tw_mock.lines[21] == " except Exception as e:"
+ assert tw_mock.lines[22] == "> raise Err() from e"
+ assert tw_mock.lines[23] == "E test_exc_chain_repr0.mod.Err"
+ assert tw_mock.lines[24] == ""
+ line = tw_mock.get_write_msg(25)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[26] == ":8: Err"
+ assert tw_mock.lines[27] == ""
+ assert (
+ tw_mock.lines[28]
+ == "During handling of the above exception, another exception occurred:"
+ )
+ assert tw_mock.lines[29] == ""
+ assert tw_mock.lines[30] == " def f():"
+ assert tw_mock.lines[31] == " try:"
+ assert tw_mock.lines[32] == " g()"
+ assert tw_mock.lines[33] == " except Exception as e:"
+ assert tw_mock.lines[34] == " raise Err() from e"
+ assert tw_mock.lines[35] == " finally:"
+ assert tw_mock.lines[36] == "> h()"
+ assert tw_mock.lines[37] == ""
+ line = tw_mock.get_write_msg(38)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[39] == ":10: "
+ assert tw_mock.lines[40] == ("_ ", None)
+ assert tw_mock.lines[41] == ""
+ assert tw_mock.lines[42] == " def h():"
+ assert tw_mock.lines[43] == "> raise AttributeError()"
+ assert tw_mock.lines[44] == "E AttributeError"
+ assert tw_mock.lines[45] == ""
+ line = tw_mock.get_write_msg(46)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[47] == ":15: AttributeError"
+
+ @pytest.mark.parametrize("mode", ["from_none", "explicit_suppress"])
+ def test_exc_repr_chain_suppression(self, importasmod, mode, tw_mock):
+ """Check that exc repr does not show chained exceptions in Python 3.
+ - When the exception is raised with "from None"
+ - Explicitly suppressed with "chain=False" to ExceptionInfo.getrepr().
+ """
+ raise_suffix = " from None" if mode == "from_none" else ""
+ mod = importasmod(
+ """
+ def f():
+ try:
+ g()
+ except Exception:
+ raise AttributeError(){raise_suffix}
+ def g():
+ raise ValueError()
+ """.format(
+ raise_suffix=raise_suffix
+ )
+ )
+ excinfo = pytest.raises(AttributeError, mod.f)
+ r = excinfo.getrepr(style="long", chain=mode != "explicit_suppress")
+ r.toterminal(tw_mock)
+ for line in tw_mock.lines:
+ print(line)
+ assert tw_mock.lines[0] == ""
+ assert tw_mock.lines[1] == " def f():"
+ assert tw_mock.lines[2] == " try:"
+ assert tw_mock.lines[3] == " g()"
+ assert tw_mock.lines[4] == " except Exception:"
+ assert tw_mock.lines[5] == "> raise AttributeError(){}".format(
+ raise_suffix
+ )
+ assert tw_mock.lines[6] == "E AttributeError"
+ assert tw_mock.lines[7] == ""
+ line = tw_mock.get_write_msg(8)
+ assert line.endswith("mod.py")
+ assert tw_mock.lines[9] == ":6: AttributeError"
+ assert len(tw_mock.lines) == 10
+
+ @pytest.mark.parametrize(
+ "reason, description",
+ [
+ pytest.param(
+ "cause",
+ "The above exception was the direct cause of the following exception:",
+ id="cause",
+ ),
+ pytest.param(
+ "context",
+ "During handling of the above exception, another exception occurred:",
+ id="context",
+ ),
+ ],
+ )
+ def test_exc_chain_repr_without_traceback(self, importasmod, reason, description):
+ """
+ Handle representation of exception chains where one of the exceptions doesn't have a
+ real traceback, such as those raised in a subprocess submitted by the multiprocessing
+ module (#1984).
+ """
+ exc_handling_code = " from e" if reason == "cause" else ""
+ mod = importasmod(
+ """
+ def f():
+ try:
+ g()
+ except Exception as e:
+ raise RuntimeError('runtime problem'){exc_handling_code}
+ def g():
+ raise ValueError('invalid value')
+ """.format(
+ exc_handling_code=exc_handling_code
+ )
+ )
+
+ with pytest.raises(RuntimeError) as excinfo:
+ mod.f()
+
+ # emulate the issue described in #1984
+ attr = "__%s__" % reason
+ getattr(excinfo.value, attr).__traceback__ = None
+
+ r = excinfo.getrepr()
+ file = io.StringIO()
+ tw = TerminalWriter(file=file)
+ tw.hasmarkup = False
+ r.toterminal(tw)
+
+ matcher = LineMatcher(file.getvalue().splitlines())
+ matcher.fnmatch_lines(
+ [
+ "ValueError: invalid value",
+ description,
+ "* except Exception as e:",
+ "> * raise RuntimeError('runtime problem')" + exc_handling_code,
+ "E *RuntimeError: runtime problem",
+ ]
+ )
+
+ def test_exc_chain_repr_cycle(self, importasmod, tw_mock):
+ mod = importasmod(
+ """
+ class Err(Exception):
+ pass
+ def fail():
+ return 0 / 0
+ def reraise():
+ try:
+ fail()
+ except ZeroDivisionError as e:
+ raise Err() from e
+ def unreraise():
+ try:
+ reraise()
+ except Err as e:
+ raise e.__cause__
+ """
+ )
+ excinfo = pytest.raises(ZeroDivisionError, mod.unreraise)
+ r = excinfo.getrepr(style="short")
+ r.toterminal(tw_mock)
+ out = "\n".join(line for line in tw_mock.lines if isinstance(line, str))
+ expected_out = textwrap.dedent(
+ """\
+ :13: in unreraise
+ reraise()
+ :10: in reraise
+ raise Err() from e
+ E test_exc_chain_repr_cycle0.mod.Err
+
+ During handling of the above exception, another exception occurred:
+ :15: in unreraise
+ raise e.__cause__
+ :8: in reraise
+ fail()
+ :5: in fail
+ return 0 / 0
+ E ZeroDivisionError: division by zero"""
+ )
+ assert out == expected_out
+
+ def test_exec_type_error_filter(self, importasmod):
+ """See #7742"""
+ mod = importasmod(
+ """\
+ def f():
+ exec("a = 1", {}, [])
+ """
+ )
+ with pytest.raises(TypeError) as excinfo:
+ mod.f()
+ # previously crashed with `AttributeError: list has no attribute get`
+ excinfo.traceback.filter()
+
+
+@pytest.mark.parametrize("style", ["short", "long"])
+@pytest.mark.parametrize("encoding", [None, "utf8", "utf16"])
+def test_repr_traceback_with_unicode(style, encoding):
+ if encoding is None:
+ msg: Union[str, bytes] = "☹"
+ else:
+ msg = "☹".encode(encoding)
+ try:
+ raise RuntimeError(msg)
+ except RuntimeError:
+ e_info = ExceptionInfo.from_current()
+ formatter = FormattedExcinfo(style=style)
+ repr_traceback = formatter.repr_traceback(e_info)
+ assert repr_traceback is not None
+
+
+def test_cwd_deleted(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import os
+
+ def test(tmp_path):
+ os.chdir(tmp_path)
+ tmp_path.unlink()
+ assert False
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 1 failed in *"])
+ result.stdout.no_fnmatch_line("*INTERNALERROR*")
+ result.stderr.no_fnmatch_line("*INTERNALERROR*")
+
+
+def test_regression_nagative_line_index(pytester: Pytester) -> None:
+ """
+ With Python 3.10 alphas, there was an INTERNALERROR reported in
+ https://github.com/pytest-dev/pytest/pull/8227
+ This test ensures it does not regress.
+ """
+ pytester.makepyfile(
+ """
+ import ast
+ import pytest
+
+
+ def test_literal_eval():
+ with pytest.raises(ValueError, match="^$"):
+ ast.literal_eval("pytest")
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 1 failed in *"])
+ result.stdout.no_fnmatch_line("*INTERNALERROR*")
+ result.stderr.no_fnmatch_line("*INTERNALERROR*")
+
+
+@pytest.mark.usefixtures("limited_recursion_depth")
+def test_exception_repr_extraction_error_on_recursion():
+ """
+ Ensure we can properly detect a recursion error even
+ if some locals raise error on comparison (#2459).
+ """
+
+ class numpy_like:
+ def __eq__(self, other):
+ if type(other) is numpy_like:
+ raise ValueError(
+ "The truth value of an array "
+ "with more than one element is ambiguous."
+ )
+
+ def a(x):
+ return b(numpy_like())
+
+ def b(x):
+ return a(numpy_like())
+
+ with pytest.raises(RuntimeError) as excinfo:
+ a(numpy_like())
+
+ matcher = LineMatcher(str(excinfo.getrepr()).splitlines())
+ matcher.fnmatch_lines(
+ [
+ "!!! Recursion error detected, but an error occurred locating the origin of recursion.",
+ "*The following exception happened*",
+ "*ValueError: The truth value of an array*",
+ ]
+ )
+
+
+@pytest.mark.usefixtures("limited_recursion_depth")
+def test_no_recursion_index_on_recursion_error():
+ """
+ Ensure that we don't break in case we can't find the recursion index
+ during a recursion error (#2486).
+ """
+
+ class RecursionDepthError:
+ def __getattr__(self, attr):
+ return getattr(self, "_" + attr)
+
+ with pytest.raises(RuntimeError) as excinfo:
+ RecursionDepthError().trigger
+ assert "maximum recursion" in str(excinfo.getrepr())
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/code/test_source.py b/testing/web-platform/tests/tools/third_party/pytest/testing/code/test_source.py
new file mode 100644
index 0000000000..9f7be5e245
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/code/test_source.py
@@ -0,0 +1,656 @@
+# flake8: noqa
+# disable flake check on this file because some constructs are strange
+# or redundant on purpose and can't be disable on a line-by-line basis
+import ast
+import inspect
+import linecache
+import sys
+import textwrap
+from pathlib import Path
+from types import CodeType
+from typing import Any
+from typing import Dict
+from typing import Optional
+
+import pytest
+from _pytest._code import Code
+from _pytest._code import Frame
+from _pytest._code import getfslineno
+from _pytest._code import Source
+from _pytest.pathlib import import_path
+
+
+def test_source_str_function() -> None:
+ x = Source("3")
+ assert str(x) == "3"
+
+ x = Source(" 3")
+ assert str(x) == "3"
+
+ x = Source(
+ """
+ 3
+ """
+ )
+ assert str(x) == "\n3"
+
+
+def test_source_from_function() -> None:
+ source = Source(test_source_str_function)
+ assert str(source).startswith("def test_source_str_function() -> None:")
+
+
+def test_source_from_method() -> None:
+ class TestClass:
+ def test_method(self):
+ pass
+
+ source = Source(TestClass().test_method)
+ assert source.lines == ["def test_method(self):", " pass"]
+
+
+def test_source_from_lines() -> None:
+ lines = ["a \n", "b\n", "c"]
+ source = Source(lines)
+ assert source.lines == ["a ", "b", "c"]
+
+
+def test_source_from_inner_function() -> None:
+ def f():
+ raise NotImplementedError()
+
+ source = Source(f)
+ assert str(source).startswith("def f():")
+
+
+def test_source_strips() -> None:
+ source = Source("")
+ assert source == Source()
+ assert str(source) == ""
+ assert source.strip() == source
+
+
+def test_source_strip_multiline() -> None:
+ source = Source()
+ source.lines = ["", " hello", " "]
+ source2 = source.strip()
+ assert source2.lines == [" hello"]
+
+
+class TestAccesses:
+ def setup_class(self) -> None:
+ self.source = Source(
+ """\
+ def f(x):
+ pass
+ def g(x):
+ pass
+ """
+ )
+
+ def test_getrange(self) -> None:
+ x = self.source[0:2]
+ assert len(x.lines) == 2
+ assert str(x) == "def f(x):\n pass"
+
+ def test_getrange_step_not_supported(self) -> None:
+ with pytest.raises(IndexError, match=r"step"):
+ self.source[::2]
+
+ def test_getline(self) -> None:
+ x = self.source[0]
+ assert x == "def f(x):"
+
+ def test_len(self) -> None:
+ assert len(self.source) == 4
+
+ def test_iter(self) -> None:
+ values = [x for x in self.source]
+ assert len(values) == 4
+
+
+class TestSourceParsing:
+ def setup_class(self) -> None:
+ self.source = Source(
+ """\
+ def f(x):
+ assert (x ==
+ 3 +
+ 4)
+ """
+ ).strip()
+
+ def test_getstatement(self) -> None:
+ # print str(self.source)
+ ass = str(self.source[1:])
+ for i in range(1, 4):
+ # print "trying start in line %r" % self.source[i]
+ s = self.source.getstatement(i)
+ # x = s.deindent()
+ assert str(s) == ass
+
+ def test_getstatementrange_triple_quoted(self) -> None:
+ # print str(self.source)
+ source = Source(
+ """hello('''
+ ''')"""
+ )
+ s = source.getstatement(0)
+ assert s == source
+ s = source.getstatement(1)
+ assert s == source
+
+ def test_getstatementrange_within_constructs(self) -> None:
+ source = Source(
+ """\
+ try:
+ try:
+ raise ValueError
+ except SomeThing:
+ pass
+ finally:
+ 42
+ """
+ )
+ assert len(source) == 7
+ # check all lineno's that could occur in a traceback
+ # assert source.getstatementrange(0) == (0, 7)
+ # assert source.getstatementrange(1) == (1, 5)
+ assert source.getstatementrange(2) == (2, 3)
+ assert source.getstatementrange(3) == (3, 4)
+ assert source.getstatementrange(4) == (4, 5)
+ # assert source.getstatementrange(5) == (0, 7)
+ assert source.getstatementrange(6) == (6, 7)
+
+ def test_getstatementrange_bug(self) -> None:
+ source = Source(
+ """\
+ try:
+ x = (
+ y +
+ z)
+ except:
+ pass
+ """
+ )
+ assert len(source) == 6
+ assert source.getstatementrange(2) == (1, 4)
+
+ def test_getstatementrange_bug2(self) -> None:
+ source = Source(
+ """\
+ assert (
+ 33
+ ==
+ [
+ X(3,
+ b=1, c=2
+ ),
+ ]
+ )
+ """
+ )
+ assert len(source) == 9
+ assert source.getstatementrange(5) == (0, 9)
+
+ def test_getstatementrange_ast_issue58(self) -> None:
+ source = Source(
+ """\
+
+ def test_some():
+ for a in [a for a in
+ CAUSE_ERROR]: pass
+
+ x = 3
+ """
+ )
+ assert getstatement(2, source).lines == source.lines[2:3]
+ assert getstatement(3, source).lines == source.lines[3:4]
+
+ def test_getstatementrange_out_of_bounds_py3(self) -> None:
+ source = Source("if xxx:\n from .collections import something")
+ r = source.getstatementrange(1)
+ assert r == (1, 2)
+
+ def test_getstatementrange_with_syntaxerror_issue7(self) -> None:
+ source = Source(":")
+ pytest.raises(SyntaxError, lambda: source.getstatementrange(0))
+
+
+def test_getstartingblock_singleline() -> None:
+ class A:
+ def __init__(self, *args) -> None:
+ frame = sys._getframe(1)
+ self.source = Frame(frame).statement
+
+ x = A("x", "y")
+
+ values = [i for i in x.source.lines if i.strip()]
+ assert len(values) == 1
+
+
+def test_getline_finally() -> None:
+ def c() -> None:
+ pass
+
+ with pytest.raises(TypeError) as excinfo:
+ teardown = None
+ try:
+ c(1) # type: ignore
+ finally:
+ if teardown:
+ teardown() # type: ignore[unreachable]
+ source = excinfo.traceback[-1].statement
+ assert str(source).strip() == "c(1) # type: ignore"
+
+
+def test_getfuncsource_dynamic() -> None:
+ def f():
+ raise NotImplementedError()
+
+ def g():
+ pass # pragma: no cover
+
+ f_source = Source(f)
+ g_source = Source(g)
+ assert str(f_source).strip() == "def f():\n raise NotImplementedError()"
+ assert str(g_source).strip() == "def g():\n pass # pragma: no cover"
+
+
+def test_getfuncsource_with_multine_string() -> None:
+ def f():
+ c = """while True:
+ pass
+"""
+
+ expected = '''\
+ def f():
+ c = """while True:
+ pass
+"""
+'''
+ assert str(Source(f)) == expected.rstrip()
+
+
+def test_deindent() -> None:
+ from _pytest._code.source import deindent as deindent
+
+ assert deindent(["\tfoo", "\tbar"]) == ["foo", "bar"]
+
+ source = """\
+ def f():
+ def g():
+ pass
+ """
+ lines = deindent(source.splitlines())
+ assert lines == ["def f():", " def g():", " pass"]
+
+
+def test_source_of_class_at_eof_without_newline(_sys_snapshot, tmp_path: Path) -> None:
+ # this test fails because the implicit inspect.getsource(A) below
+ # does not return the "x = 1" last line.
+ source = Source(
+ """
+ class A:
+ def method(self):
+ x = 1
+ """
+ )
+ path = tmp_path.joinpath("a.py")
+ path.write_text(str(source))
+ mod: Any = import_path(path, root=tmp_path)
+ s2 = Source(mod.A)
+ assert str(source).strip() == str(s2).strip()
+
+
+if True:
+
+ def x():
+ pass
+
+
+def test_source_fallback() -> None:
+ src = Source(x)
+ expected = """def x():
+ pass"""
+ assert str(src) == expected
+
+
+def test_findsource_fallback() -> None:
+ from _pytest._code.source import findsource
+
+ src, lineno = findsource(x)
+ assert src is not None
+ assert "test_findsource_simple" in str(src)
+ assert src[lineno] == " def x():"
+
+
+def test_findsource(monkeypatch) -> None:
+ from _pytest._code.source import findsource
+
+ filename = "<pytest-test_findsource>"
+ lines = ["if 1:\n", " def x():\n", " pass\n"]
+ co = compile("".join(lines), filename, "exec")
+
+ # Type ignored because linecache.cache is private.
+ monkeypatch.setitem(linecache.cache, filename, (1, None, lines, filename)) # type: ignore[attr-defined]
+
+ src, lineno = findsource(co)
+ assert src is not None
+ assert "if 1:" in str(src)
+
+ d: Dict[str, Any] = {}
+ eval(co, d)
+ src, lineno = findsource(d["x"])
+ assert src is not None
+ assert "if 1:" in str(src)
+ assert src[lineno] == " def x():"
+
+
+def test_getfslineno() -> None:
+ def f(x) -> None:
+ raise NotImplementedError()
+
+ fspath, lineno = getfslineno(f)
+
+ assert isinstance(fspath, Path)
+ assert fspath.name == "test_source.py"
+ assert lineno == f.__code__.co_firstlineno - 1 # see findsource
+
+ class A:
+ pass
+
+ fspath, lineno = getfslineno(A)
+
+ _, A_lineno = inspect.findsource(A)
+ assert isinstance(fspath, Path)
+ assert fspath.name == "test_source.py"
+ assert lineno == A_lineno
+
+ assert getfslineno(3) == ("", -1)
+
+ class B:
+ pass
+
+ B.__name__ = B.__qualname__ = "B2"
+ assert getfslineno(B)[1] == -1
+
+
+def test_code_of_object_instance_with_call() -> None:
+ class A:
+ pass
+
+ pytest.raises(TypeError, lambda: Source(A()))
+
+ class WithCall:
+ def __call__(self) -> None:
+ pass
+
+ code = Code.from_function(WithCall())
+ assert "pass" in str(code.source())
+
+ class Hello:
+ def __call__(self) -> None:
+ pass
+
+ pytest.raises(TypeError, lambda: Code.from_function(Hello))
+
+
+def getstatement(lineno: int, source) -> Source:
+ from _pytest._code.source import getstatementrange_ast
+
+ src = Source(source)
+ ast, start, end = getstatementrange_ast(lineno, src)
+ return src[start:end]
+
+
+def test_oneline() -> None:
+ source = getstatement(0, "raise ValueError")
+ assert str(source) == "raise ValueError"
+
+
+def test_comment_and_no_newline_at_end() -> None:
+ from _pytest._code.source import getstatementrange_ast
+
+ source = Source(
+ [
+ "def test_basic_complex():",
+ " assert 1 == 2",
+ "# vim: filetype=pyopencl:fdm=marker",
+ ]
+ )
+ ast, start, end = getstatementrange_ast(1, source)
+ assert end == 2
+
+
+def test_oneline_and_comment() -> None:
+ source = getstatement(0, "raise ValueError\n#hello")
+ assert str(source) == "raise ValueError"
+
+
+def test_comments() -> None:
+ source = '''def test():
+ "comment 1"
+ x = 1
+ # comment 2
+ # comment 3
+
+ assert False
+
+"""
+comment 4
+"""
+'''
+ for line in range(2, 6):
+ assert str(getstatement(line, source)) == " x = 1"
+ if sys.version_info >= (3, 8) or hasattr(sys, "pypy_version_info"):
+ tqs_start = 8
+ else:
+ tqs_start = 10
+ assert str(getstatement(10, source)) == '"""'
+ for line in range(6, tqs_start):
+ assert str(getstatement(line, source)) == " assert False"
+ for line in range(tqs_start, 10):
+ assert str(getstatement(line, source)) == '"""\ncomment 4\n"""'
+
+
+def test_comment_in_statement() -> None:
+ source = """test(foo=1,
+ # comment 1
+ bar=2)
+"""
+ for line in range(1, 3):
+ assert (
+ str(getstatement(line, source))
+ == "test(foo=1,\n # comment 1\n bar=2)"
+ )
+
+
+def test_source_with_decorator() -> None:
+ """Test behavior with Source / Code().source with regard to decorators."""
+ from _pytest.compat import get_real_func
+
+ @pytest.mark.foo
+ def deco_mark():
+ assert False
+
+ src = inspect.getsource(deco_mark)
+ assert textwrap.indent(str(Source(deco_mark)), " ") + "\n" == src
+ assert src.startswith(" @pytest.mark.foo")
+
+ @pytest.fixture
+ def deco_fixture():
+ assert False
+
+ src = inspect.getsource(deco_fixture)
+ assert src == " @pytest.fixture\n def deco_fixture():\n assert False\n"
+ # currently Source does not unwrap decorators, testing the
+ # existing behavior here for explicitness, but perhaps we should revisit/change this
+ # in the future
+ assert str(Source(deco_fixture)).startswith("@functools.wraps(function)")
+ assert (
+ textwrap.indent(str(Source(get_real_func(deco_fixture))), " ") + "\n" == src
+ )
+
+
+def test_single_line_else() -> None:
+ source = getstatement(1, "if False: 2\nelse: 3")
+ assert str(source) == "else: 3"
+
+
+def test_single_line_finally() -> None:
+ source = getstatement(1, "try: 1\nfinally: 3")
+ assert str(source) == "finally: 3"
+
+
+def test_issue55() -> None:
+ source = (
+ "def round_trip(dinp):\n assert 1 == dinp\n"
+ 'def test_rt():\n round_trip("""\n""")\n'
+ )
+ s = getstatement(3, source)
+ assert str(s) == ' round_trip("""\n""")'
+
+
+def test_multiline() -> None:
+ source = getstatement(
+ 0,
+ """\
+raise ValueError(
+ 23
+)
+x = 3
+""",
+ )
+ assert str(source) == "raise ValueError(\n 23\n)"
+
+
+class TestTry:
+ def setup_class(self) -> None:
+ self.source = """\
+try:
+ raise ValueError
+except Something:
+ raise IndexError(1)
+else:
+ raise KeyError()
+"""
+
+ def test_body(self) -> None:
+ source = getstatement(1, self.source)
+ assert str(source) == " raise ValueError"
+
+ def test_except_line(self) -> None:
+ source = getstatement(2, self.source)
+ assert str(source) == "except Something:"
+
+ def test_except_body(self) -> None:
+ source = getstatement(3, self.source)
+ assert str(source) == " raise IndexError(1)"
+
+ def test_else(self) -> None:
+ source = getstatement(5, self.source)
+ assert str(source) == " raise KeyError()"
+
+
+class TestTryFinally:
+ def setup_class(self) -> None:
+ self.source = """\
+try:
+ raise ValueError
+finally:
+ raise IndexError(1)
+"""
+
+ def test_body(self) -> None:
+ source = getstatement(1, self.source)
+ assert str(source) == " raise ValueError"
+
+ def test_finally(self) -> None:
+ source = getstatement(3, self.source)
+ assert str(source) == " raise IndexError(1)"
+
+
+class TestIf:
+ def setup_class(self) -> None:
+ self.source = """\
+if 1:
+ y = 3
+elif False:
+ y = 5
+else:
+ y = 7
+"""
+
+ def test_body(self) -> None:
+ source = getstatement(1, self.source)
+ assert str(source) == " y = 3"
+
+ def test_elif_clause(self) -> None:
+ source = getstatement(2, self.source)
+ assert str(source) == "elif False:"
+
+ def test_elif(self) -> None:
+ source = getstatement(3, self.source)
+ assert str(source) == " y = 5"
+
+ def test_else(self) -> None:
+ source = getstatement(5, self.source)
+ assert str(source) == " y = 7"
+
+
+def test_semicolon() -> None:
+ s = """\
+hello ; pytest.skip()
+"""
+ source = getstatement(0, s)
+ assert str(source) == s.strip()
+
+
+def test_def_online() -> None:
+ s = """\
+def func(): raise ValueError(42)
+
+def something():
+ pass
+"""
+ source = getstatement(0, s)
+ assert str(source) == "def func(): raise ValueError(42)"
+
+
+def test_decorator() -> None:
+ s = """\
+def foo(f):
+ pass
+
+@foo
+def bar():
+ pass
+ """
+ source = getstatement(3, s)
+ assert "@foo" in str(source)
+
+
+def XXX_test_expression_multiline() -> None:
+ source = """\
+something
+'''
+'''"""
+ result = getstatement(1, source)
+ assert str(result) == "'''\n'''"
+
+
+def test_getstartingblock_multiline() -> None:
+ class A:
+ def __init__(self, *args):
+ frame = sys._getframe(1)
+ self.source = Frame(frame).statement
+
+ # fmt: off
+ x = A('x',
+ 'y'
+ ,
+ 'z')
+ # fmt: on
+ values = [i for i in x.source.lines if i.strip()]
+ assert len(values) == 4
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/conftest.py
new file mode 100644
index 0000000000..107aad86b2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/conftest.py
@@ -0,0 +1,216 @@
+import re
+import sys
+from typing import List
+
+import pytest
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+
+if sys.gettrace():
+
+ @pytest.fixture(autouse=True)
+ def restore_tracing():
+ """Restore tracing function (when run with Coverage.py).
+
+ https://bugs.python.org/issue37011
+ """
+ orig_trace = sys.gettrace()
+ yield
+ if sys.gettrace() != orig_trace:
+ sys.settrace(orig_trace)
+
+
+@pytest.hookimpl(hookwrapper=True, tryfirst=True)
+def pytest_collection_modifyitems(items):
+ """Prefer faster tests.
+
+ Use a hookwrapper to do this in the beginning, so e.g. --ff still works
+ correctly.
+ """
+ fast_items = []
+ slow_items = []
+ slowest_items = []
+ neutral_items = []
+
+ spawn_names = {"spawn_pytest", "spawn"}
+
+ for item in items:
+ try:
+ fixtures = item.fixturenames
+ except AttributeError:
+ # doctest at least
+ # (https://github.com/pytest-dev/pytest/issues/5070)
+ neutral_items.append(item)
+ else:
+ if "pytester" in fixtures:
+ co_names = item.function.__code__.co_names
+ if spawn_names.intersection(co_names):
+ item.add_marker(pytest.mark.uses_pexpect)
+ slowest_items.append(item)
+ elif "runpytest_subprocess" in co_names:
+ slowest_items.append(item)
+ else:
+ slow_items.append(item)
+ item.add_marker(pytest.mark.slow)
+ else:
+ marker = item.get_closest_marker("slow")
+ if marker:
+ slowest_items.append(item)
+ else:
+ fast_items.append(item)
+
+ items[:] = fast_items + neutral_items + slow_items + slowest_items
+
+ yield
+
+
+@pytest.fixture
+def tw_mock():
+ """Returns a mock terminal writer"""
+
+ class TWMock:
+ WRITE = object()
+
+ def __init__(self):
+ self.lines = []
+ self.is_writing = False
+
+ def sep(self, sep, line=None):
+ self.lines.append((sep, line))
+
+ def write(self, msg, **kw):
+ self.lines.append((TWMock.WRITE, msg))
+
+ def _write_source(self, lines, indents=()):
+ if not indents:
+ indents = [""] * len(lines)
+ for indent, line in zip(indents, lines):
+ self.line(indent + line)
+
+ def line(self, line, **kw):
+ self.lines.append(line)
+
+ def markup(self, text, **kw):
+ return text
+
+ def get_write_msg(self, idx):
+ flag, msg = self.lines[idx]
+ assert flag == TWMock.WRITE
+ return msg
+
+ fullwidth = 80
+
+ return TWMock()
+
+
+@pytest.fixture
+def dummy_yaml_custom_test(pytester: Pytester):
+ """Writes a conftest file that collects and executes a dummy yaml test.
+
+ Taken from the docs, but stripped down to the bare minimum, useful for
+ tests which needs custom items collected.
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+
+ def pytest_collect_file(parent, file_path):
+ if file_path.suffix == ".yaml" and file_path.name.startswith("test"):
+ return YamlFile.from_parent(path=file_path, parent=parent)
+
+ class YamlFile(pytest.File):
+ def collect(self):
+ yield YamlItem.from_parent(name=self.path.name, parent=self)
+
+ class YamlItem(pytest.Item):
+ def runtest(self):
+ pass
+ """
+ )
+ pytester.makefile(".yaml", test1="")
+
+
+@pytest.fixture
+def pytester(pytester: Pytester, monkeypatch: MonkeyPatch) -> Pytester:
+ monkeypatch.setenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "1")
+ return pytester
+
+
+@pytest.fixture(scope="session")
+def color_mapping():
+ """Returns a utility class which can replace keys in strings in the form "{NAME}"
+ by their equivalent ASCII codes in the terminal.
+
+ Used by tests which check the actual colors output by pytest.
+ """
+
+ class ColorMapping:
+ COLORS = {
+ "red": "\x1b[31m",
+ "green": "\x1b[32m",
+ "yellow": "\x1b[33m",
+ "bold": "\x1b[1m",
+ "reset": "\x1b[0m",
+ "kw": "\x1b[94m",
+ "hl-reset": "\x1b[39;49;00m",
+ "function": "\x1b[92m",
+ "number": "\x1b[94m",
+ "str": "\x1b[33m",
+ "print": "\x1b[96m",
+ }
+ RE_COLORS = {k: re.escape(v) for k, v in COLORS.items()}
+
+ @classmethod
+ def format(cls, lines: List[str]) -> List[str]:
+ """Straightforward replacement of color names to their ASCII codes."""
+ return [line.format(**cls.COLORS) for line in lines]
+
+ @classmethod
+ def format_for_fnmatch(cls, lines: List[str]) -> List[str]:
+ """Replace color names for use with LineMatcher.fnmatch_lines"""
+ return [line.format(**cls.COLORS).replace("[", "[[]") for line in lines]
+
+ @classmethod
+ def format_for_rematch(cls, lines: List[str]) -> List[str]:
+ """Replace color names for use with LineMatcher.re_match_lines"""
+ return [line.format(**cls.RE_COLORS) for line in lines]
+
+ return ColorMapping
+
+
+@pytest.fixture
+def mock_timing(monkeypatch: MonkeyPatch):
+ """Mocks _pytest.timing with a known object that can be used to control timing in tests
+ deterministically.
+
+ pytest itself should always use functions from `_pytest.timing` instead of `time` directly.
+
+ This then allows us more control over time during testing, if testing code also
+ uses `_pytest.timing` functions.
+
+ Time is static, and only advances through `sleep` calls, thus tests might sleep over large
+ numbers and obtain accurate time() calls at the end, making tests reliable and instant.
+ """
+ import attr
+
+ @attr.s
+ class MockTiming:
+
+ _current_time = attr.ib(default=1590150050.0)
+
+ def sleep(self, seconds):
+ self._current_time += seconds
+
+ def time(self):
+ return self._current_time
+
+ def patch(self):
+ from _pytest import timing
+
+ monkeypatch.setattr(timing, "sleep", self.sleep)
+ monkeypatch.setattr(timing, "time", self.time)
+ monkeypatch.setattr(timing, "perf_counter", self.time)
+
+ result = MockTiming()
+ result.patch()
+ return result
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/deprecated_test.py b/testing/web-platform/tests/tools/third_party/pytest/testing/deprecated_test.py
new file mode 100644
index 0000000000..9ac7fe1cac
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/deprecated_test.py
@@ -0,0 +1,310 @@
+import re
+import sys
+import warnings
+from pathlib import Path
+from unittest import mock
+
+import pytest
+from _pytest import deprecated
+from _pytest.compat import legacy_path
+from _pytest.pytester import Pytester
+from pytest import PytestDeprecationWarning
+
+
+@pytest.mark.parametrize("attribute", pytest.collect.__all__) # type: ignore
+# false positive due to dynamic attribute
+def test_pytest_collect_module_deprecated(attribute) -> None:
+ with pytest.warns(DeprecationWarning, match=attribute):
+ getattr(pytest.collect, attribute)
+
+
+@pytest.mark.parametrize("plugin", sorted(deprecated.DEPRECATED_EXTERNAL_PLUGINS))
+@pytest.mark.filterwarnings("default")
+def test_external_plugins_integrated(pytester: Pytester, plugin) -> None:
+ pytester.syspathinsert()
+ pytester.makepyfile(**{plugin: ""})
+
+ with pytest.warns(pytest.PytestConfigWarning):
+ pytester.parseconfig("-p", plugin)
+
+
+def test_fillfuncargs_is_deprecated() -> None:
+ with pytest.warns(
+ pytest.PytestDeprecationWarning,
+ match=re.escape(
+ "pytest._fillfuncargs() is deprecated, use "
+ "function._request._fillfixtures() instead if you cannot avoid reaching into internals."
+ ),
+ ):
+ pytest._fillfuncargs(mock.Mock())
+
+
+def test_fillfixtures_is_deprecated() -> None:
+ import _pytest.fixtures
+
+ with pytest.warns(
+ pytest.PytestDeprecationWarning,
+ match=re.escape(
+ "_pytest.fixtures.fillfixtures() is deprecated, use "
+ "function._request._fillfixtures() instead if you cannot avoid reaching into internals."
+ ),
+ ):
+ _pytest.fixtures.fillfixtures(mock.Mock())
+
+
+def test_minus_k_dash_is_deprecated(pytester: Pytester) -> None:
+ threepass = pytester.makepyfile(
+ test_threepass="""
+ def test_one(): assert 1
+ def test_two(): assert 1
+ def test_three(): assert 1
+ """
+ )
+ result = pytester.runpytest("-k=-test_two", threepass)
+ result.stdout.fnmatch_lines(["*The `-k '-expr'` syntax*deprecated*"])
+
+
+def test_minus_k_colon_is_deprecated(pytester: Pytester) -> None:
+ threepass = pytester.makepyfile(
+ test_threepass="""
+ def test_one(): assert 1
+ def test_two(): assert 1
+ def test_three(): assert 1
+ """
+ )
+ result = pytester.runpytest("-k", "test_two:", threepass)
+ result.stdout.fnmatch_lines(["*The `-k 'expr:'` syntax*deprecated*"])
+
+
+def test_fscollector_gethookproxy_isinitpath(pytester: Pytester) -> None:
+ module = pytester.getmodulecol(
+ """
+ def test_foo(): pass
+ """,
+ withinit=True,
+ )
+ assert isinstance(module, pytest.Module)
+ package = module.parent
+ assert isinstance(package, pytest.Package)
+
+ with pytest.warns(pytest.PytestDeprecationWarning, match="gethookproxy"):
+ package.gethookproxy(pytester.path)
+
+ with pytest.warns(pytest.PytestDeprecationWarning, match="isinitpath"):
+ package.isinitpath(pytester.path)
+
+ # The methods on Session are *not* deprecated.
+ session = module.session
+ with warnings.catch_warnings(record=True) as rec:
+ session.gethookproxy(pytester.path)
+ session.isinitpath(pytester.path)
+ assert len(rec) == 0
+
+
+def test_strict_option_is_deprecated(pytester: Pytester) -> None:
+ """--strict is a deprecated alias to --strict-markers (#7530)."""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.unknown
+ def test_foo(): pass
+ """
+ )
+ result = pytester.runpytest("--strict")
+ result.stdout.fnmatch_lines(
+ [
+ "'unknown' not found in `markers` configuration option",
+ "*PytestRemovedIn8Warning: The --strict option is deprecated, use --strict-markers instead.",
+ ]
+ )
+
+
+def test_yield_fixture_is_deprecated() -> None:
+ with pytest.warns(DeprecationWarning, match=r"yield_fixture is deprecated"):
+
+ @pytest.yield_fixture
+ def fix():
+ assert False
+
+
+def test_private_is_deprecated() -> None:
+ class PrivateInit:
+ def __init__(self, foo: int, *, _ispytest: bool = False) -> None:
+ deprecated.check_ispytest(_ispytest)
+
+ with pytest.warns(
+ pytest.PytestDeprecationWarning, match="private pytest class or function"
+ ):
+ PrivateInit(10)
+
+ # Doesn't warn.
+ PrivateInit(10, _ispytest=True)
+
+
+def test_raising_unittest_skiptest_during_collection_is_deprecated(
+ pytester: Pytester,
+) -> None:
+ pytester.makepyfile(
+ """
+ import unittest
+ raise unittest.SkipTest()
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*PytestRemovedIn8Warning: Raising unittest.SkipTest*",
+ ]
+ )
+
+
+@pytest.mark.parametrize("hooktype", ["hook", "ihook"])
+def test_hookproxy_warnings_for_pathlib(tmp_path, hooktype, request):
+ path = legacy_path(tmp_path)
+
+ PATH_WARN_MATCH = r".*path: py\.path\.local\) argument is deprecated, please use \(collection_path: pathlib\.Path.*"
+ if hooktype == "ihook":
+ hooks = request.node.ihook
+ else:
+ hooks = request.config.hook
+
+ with pytest.warns(PytestDeprecationWarning, match=PATH_WARN_MATCH) as r:
+ l1 = sys._getframe().f_lineno
+ hooks.pytest_ignore_collect(
+ config=request.config, path=path, collection_path=tmp_path
+ )
+ l2 = sys._getframe().f_lineno
+
+ (record,) = r
+ assert record.filename == __file__
+ assert l1 < record.lineno < l2
+
+ hooks.pytest_ignore_collect(config=request.config, collection_path=tmp_path)
+
+ # Passing entirely *different* paths is an outright error.
+ with pytest.raises(ValueError, match=r"path.*fspath.*need to be equal"):
+ with pytest.warns(PytestDeprecationWarning, match=PATH_WARN_MATCH) as r:
+ hooks.pytest_ignore_collect(
+ config=request.config, path=path, collection_path=Path("/bla/bla")
+ )
+
+
+def test_warns_none_is_deprecated():
+ with pytest.warns(
+ PytestDeprecationWarning,
+ match=re.escape(
+ "Passing None has been deprecated.\n"
+ "See https://docs.pytest.org/en/latest/how-to/capture-warnings.html"
+ "#additional-use-cases-of-warnings-in-tests"
+ " for alternatives in common use cases."
+ ),
+ ):
+ with pytest.warns(None): # type: ignore[call-overload]
+ pass
+
+
+class TestSkipMsgArgumentDeprecated:
+ def test_skip_with_msg_is_deprecated(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ def test_skipping_msg():
+ pytest.skip(msg="skippedmsg")
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*PytestRemovedIn8Warning: pytest.skip(msg=...) is now deprecated, "
+ "use pytest.skip(reason=...) instead",
+ '*pytest.skip(msg="skippedmsg")*',
+ ]
+ )
+ result.assert_outcomes(skipped=1, warnings=1)
+
+ def test_fail_with_msg_is_deprecated(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ def test_failing_msg():
+ pytest.fail(msg="failedmsg")
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*PytestRemovedIn8Warning: pytest.fail(msg=...) is now deprecated, "
+ "use pytest.fail(reason=...) instead",
+ '*pytest.fail(msg="failedmsg")',
+ ]
+ )
+ result.assert_outcomes(failed=1, warnings=1)
+
+ def test_exit_with_msg_is_deprecated(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ def test_exit_msg():
+ pytest.exit(msg="exitmsg")
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*PytestRemovedIn8Warning: pytest.exit(msg=...) is now deprecated, "
+ "use pytest.exit(reason=...) instead",
+ ]
+ )
+ result.assert_outcomes(warnings=1)
+
+
+def test_deprecation_of_cmdline_preparse(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_cmdline_preparse(config, args):
+ ...
+
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*PytestRemovedIn8Warning: The pytest_cmdline_preparse hook is deprecated*",
+ "*Please use pytest_load_initial_conftests hook instead.*",
+ ]
+ )
+
+
+def test_node_ctor_fspath_argument_is_deprecated(pytester: Pytester) -> None:
+ mod = pytester.getmodulecol("")
+
+ with pytest.warns(
+ pytest.PytestDeprecationWarning,
+ match=re.escape("The (fspath: py.path.local) argument to File is deprecated."),
+ ):
+ pytest.File.from_parent(
+ parent=mod.parent,
+ fspath=legacy_path("bla"),
+ )
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 7),
+ reason="This deprecation can only be emitted on python>=3.7",
+)
+def test_importing_instance_is_deprecated(pytester: Pytester) -> None:
+ with pytest.warns(
+ pytest.PytestDeprecationWarning,
+ match=re.escape("The pytest.Instance collector type is deprecated"),
+ ):
+ pytest.Instance
+
+ with pytest.warns(
+ pytest.PytestDeprecationWarning,
+ match=re.escape("The pytest.Instance collector type is deprecated"),
+ ):
+ from _pytest.python import Instance # noqa: F401
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/README.rst b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/README.rst
new file mode 100644
index 0000000000..97d0fda5c5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/README.rst
@@ -0,0 +1,9 @@
+Example test scripts
+=====================
+
+
+The files in this folder are not direct tests, but rather example test suites that demonstrate certain issues/behaviours.
+
+In the future we will move part of the content of the acceptance tests here in order to have directly testable code instead of writing out things and then running them in nested pytest sessions/subprocesses.
+
+This will aid debugging and comprehension.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/acceptance/fixture_mock_integration.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/acceptance/fixture_mock_integration.py
new file mode 100644
index 0000000000..5b00ac90e1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/acceptance/fixture_mock_integration.py
@@ -0,0 +1,16 @@
+"""Reproduces issue #3774"""
+from unittest import mock
+
+import pytest
+
+config = {"mykey": "ORIGINAL"}
+
+
+@pytest.fixture(scope="function")
+@mock.patch.dict(config, {"mykey": "MOCKED"})
+def my_fixture():
+ return config["mykey"]
+
+
+def test_foobar(my_fixture):
+ assert my_fixture == "MOCKED"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/pytest.ini b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/pytest.ini
new file mode 100644
index 0000000000..7c47955402
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/pytest.ini
@@ -0,0 +1,2 @@
+[pytest]
+python_files = *.py
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/tests/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/tests/__init__.py
new file mode 100644
index 0000000000..9cd366295e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/tests/__init__.py
@@ -0,0 +1,2 @@
+def test_init():
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/tests/test_foo.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/tests/test_foo.py
new file mode 100644
index 0000000000..8f2d73cfa4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/collect_init_tests/tests/test_foo.py
@@ -0,0 +1,2 @@
+def test_foo():
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/__init__.pyi b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/__init__.pyi
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/__init__.pyi
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/conftest.py
new file mode 100644
index 0000000000..973ccc0c03
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/conftest.py
@@ -0,0 +1,2 @@
+def pytest_ignore_collect(collection_path):
+ return False
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/tests/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/tests/test_basic.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/tests/test_basic.py
new file mode 100644
index 0000000000..f174823854
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_infinite_recursion/tests/test_basic.py
@@ -0,0 +1,2 @@
+def test():
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_init_given_as_arg/pkg/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_init_given_as_arg/pkg/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_init_given_as_arg/pkg/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_init_given_as_arg/pkg/test_foo.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_init_given_as_arg/pkg/test_foo.py
new file mode 100644
index 0000000000..f174823854
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/collect/package_init_given_as_arg/pkg/test_foo.py
@@ -0,0 +1,2 @@
+def test():
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/__init__.pyi b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/__init__.pyi
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/__init__.pyi
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/conftest.py
new file mode 100644
index 0000000000..2da4ffe2fe
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/conftest.py
@@ -0,0 +1,2 @@
+class pytest_something:
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/test_foo.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/test_foo.py
new file mode 100644
index 0000000000..8f2d73cfa4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/config/collect_pytest_prefix/test_foo.py
@@ -0,0 +1,2 @@
+def test_foo():
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/conftest_usageerror/__init__.pyi b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/conftest_usageerror/__init__.pyi
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/conftest_usageerror/__init__.pyi
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/conftest_usageerror/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/conftest_usageerror/conftest.py
new file mode 100644
index 0000000000..8973e4252d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/conftest_usageerror/conftest.py
@@ -0,0 +1,8 @@
+def pytest_configure(config):
+ import pytest
+
+ raise pytest.UsageError("hello")
+
+
+def pytest_unconfigure(config):
+ print("pytest_unconfigure_called")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses.py
new file mode 100644
index 0000000000..d96c90a91b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses.py
@@ -0,0 +1,14 @@
+from dataclasses import dataclass
+from dataclasses import field
+
+
+def test_dataclasses() -> None:
+ @dataclass
+ class SimpleDataObject:
+ field_a: int = field()
+ field_b: str = field()
+
+ left = SimpleDataObject(1, "b")
+ right = SimpleDataObject(1, "c")
+
+ assert left == right
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_field_comparison_off.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_field_comparison_off.py
new file mode 100644
index 0000000000..7479c66c1b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_field_comparison_off.py
@@ -0,0 +1,14 @@
+from dataclasses import dataclass
+from dataclasses import field
+
+
+def test_dataclasses_with_attribute_comparison_off() -> None:
+ @dataclass
+ class SimpleDataObject:
+ field_a: int = field()
+ field_b: str = field(compare=False)
+
+ left = SimpleDataObject(1, "b")
+ right = SimpleDataObject(1, "c")
+
+ assert left == right
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_verbose.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_verbose.py
new file mode 100644
index 0000000000..4737ef904e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_dataclasses_verbose.py
@@ -0,0 +1,14 @@
+from dataclasses import dataclass
+from dataclasses import field
+
+
+def test_dataclasses_verbose() -> None:
+ @dataclass
+ class SimpleDataObject:
+ field_a: int = field()
+ field_b: str = field()
+
+ left = SimpleDataObject(1, "b")
+ right = SimpleDataObject(1, "c")
+
+ assert left == right
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_recursive_dataclasses.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_recursive_dataclasses.py
new file mode 100644
index 0000000000..0945790f00
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_recursive_dataclasses.py
@@ -0,0 +1,44 @@
+from dataclasses import dataclass
+
+
+@dataclass
+class S:
+ a: int
+ b: str
+
+
+@dataclass
+class C:
+ c: S
+ d: S
+
+
+@dataclass
+class C2:
+ e: C
+ f: S
+
+
+@dataclass
+class C3:
+ g: S
+ h: C2
+ i: str
+ j: str
+
+
+def test_recursive_dataclasses():
+ left = C3(
+ S(10, "ten"),
+ C2(C(S(1, "one"), S(2, "two")), S(2, "three")),
+ "equal",
+ "left",
+ )
+ right = C3(
+ S(20, "xxx"),
+ C2(C(S(1, "one"), S(2, "yyy")), S(3, "three")),
+ "equal",
+ "right",
+ )
+
+ assert left == right
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_two_different_dataclasses.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_two_different_dataclasses.py
new file mode 100644
index 0000000000..0a4820c69b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/dataclasses/test_compare_two_different_dataclasses.py
@@ -0,0 +1,19 @@
+from dataclasses import dataclass
+from dataclasses import field
+
+
+def test_comparing_two_different_data_classes() -> None:
+ @dataclass
+ class SimpleDataObjectOne:
+ field_a: int = field()
+ field_b: str = field()
+
+ @dataclass
+ class SimpleDataObjectTwo:
+ field_a: int = field()
+ field_b: str = field()
+
+ left = SimpleDataObjectOne(1, "b")
+ right = SimpleDataObjectTwo(1, "c")
+
+ assert left != right # type: ignore[comparison-overlap]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/doctest/main_py/__main__.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/doctest/main_py/__main__.py
new file mode 100644
index 0000000000..e471d06d64
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/doctest/main_py/__main__.py
@@ -0,0 +1,2 @@
+def test_this_is_ignored():
+ assert True
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/doctest/main_py/test_normal_module.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/doctest/main_py/test_normal_module.py
new file mode 100644
index 0000000000..700cc9750c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/doctest/main_py/test_normal_module.py
@@ -0,0 +1,6 @@
+def test_doc():
+ """
+ >>> 10 > 5
+ True
+ """
+ assert False
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/__init__.pyi b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/__init__.pyi
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/__init__.pyi
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/conftest.py
new file mode 100644
index 0000000000..a7a5e9db80
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/conftest.py
@@ -0,0 +1,15 @@
+import pytest
+
+
+class CustomItem(pytest.Item):
+ def runtest(self):
+ pass
+
+
+class CustomFile(pytest.File):
+ def collect(self):
+ yield CustomItem.from_parent(name="foo", parent=self)
+
+
+def pytest_collect_file(file_path, parent):
+ return CustomFile.from_parent(path=file_path, parent=parent)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/foo/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/foo/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/foo/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/foo/test_foo.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/foo/test_foo.py
new file mode 100644
index 0000000000..f174823854
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/custom_item/foo/test_foo.py
@@ -0,0 +1,2 @@
+def test():
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/conftest.py
new file mode 100644
index 0000000000..be5adbeb6e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/conftest.py
@@ -0,0 +1,7 @@
+import pytest
+
+
+@pytest.fixture
+def arg1(request):
+ with pytest.raises(pytest.FixtureLookupError):
+ request.getfixturevalue("arg2")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/test_in_sub1.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/test_in_sub1.py
new file mode 100644
index 0000000000..df36da1369
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub1/test_in_sub1.py
@@ -0,0 +1,2 @@
+def test_1(arg1):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/conftest.py
new file mode 100644
index 0000000000..00981c5dc1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/conftest.py
@@ -0,0 +1,6 @@
+import pytest
+
+
+@pytest.fixture
+def arg2(request):
+ pytest.raises(Exception, request.getfixturevalue, "arg1")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/test_in_sub2.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/test_in_sub2.py
new file mode 100644
index 0000000000..1c34f94acc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_conftest_funcargs_only_available_in_subdir/sub2/test_in_sub2.py
@@ -0,0 +1,2 @@
+def test_2(arg2):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_detect_recursive_dependency_error.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_detect_recursive_dependency_error.py
new file mode 100644
index 0000000000..d1efcbb338
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_detect_recursive_dependency_error.py
@@ -0,0 +1,15 @@
+import pytest
+
+
+@pytest.fixture
+def fix1(fix2):
+ return 1
+
+
+@pytest.fixture
+def fix2(fix1):
+ return 1
+
+
+def test(fix1):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/__init__.pyi b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/__init__.pyi
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/__init__.pyi
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/conftest.py
new file mode 100644
index 0000000000..5dfd2f7795
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/conftest.py
@@ -0,0 +1,6 @@
+import pytest
+
+
+@pytest.fixture
+def spam():
+ return "spam"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/__init__.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/conftest.py
new file mode 100644
index 0000000000..4e22ce5a13
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/conftest.py
@@ -0,0 +1,6 @@
+import pytest
+
+
+@pytest.fixture
+def spam(spam):
+ return spam * 2
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/test_spam.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/test_spam.py
new file mode 100644
index 0000000000..0d891fbb50
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_conftest/pkg/test_spam.py
@@ -0,0 +1,2 @@
+def test_spam(spam):
+ assert spam == "spamspam"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/__init__.pyi b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/__init__.pyi
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/__init__.pyi
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/conftest.py
new file mode 100644
index 0000000000..5dfd2f7795
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/conftest.py
@@ -0,0 +1,6 @@
+import pytest
+
+
+@pytest.fixture
+def spam():
+ return "spam"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/test_extend_fixture_conftest_module.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/test_extend_fixture_conftest_module.py
new file mode 100644
index 0000000000..46d1446f47
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_conftest_module/test_extend_fixture_conftest_module.py
@@ -0,0 +1,10 @@
+import pytest
+
+
+@pytest.fixture
+def spam(spam):
+ return spam * 2
+
+
+def test_spam(spam):
+ assert spam == "spamspam"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_module_class.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_module_class.py
new file mode 100644
index 0000000000..87a0c89411
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_extend_fixture_module_class.py
@@ -0,0 +1,15 @@
+import pytest
+
+
+@pytest.fixture
+def spam():
+ return "spam"
+
+
+class TestSpam:
+ @pytest.fixture
+ def spam(self, spam):
+ return spam * 2
+
+ def test_spam(self, spam):
+ assert spam == "spamspam"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_basic.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_basic.py
new file mode 100644
index 0000000000..0661cb301f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_basic.py
@@ -0,0 +1,15 @@
+import pytest
+
+
+@pytest.fixture
+def some(request):
+ return request.function.__name__
+
+
+@pytest.fixture
+def other(request):
+ return 42
+
+
+def test_func(some, other):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_classlevel.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_classlevel.py
new file mode 100644
index 0000000000..256b92a17d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_classlevel.py
@@ -0,0 +1,10 @@
+import pytest
+
+
+class TestClass:
+ @pytest.fixture
+ def something(self, request):
+ return request.instance
+
+ def test_method(self, something):
+ assert something is self
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_modulelevel.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_modulelevel.py
new file mode 100644
index 0000000000..e15dbd2ca4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookup_modulelevel.py
@@ -0,0 +1,15 @@
+import pytest
+
+
+@pytest.fixture
+def something(request):
+ return request.function.__name__
+
+
+class TestClass:
+ def test_method(self, something):
+ assert something == "test_method"
+
+
+def test_func(something):
+ assert something == "test_func"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookupfails.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookupfails.py
new file mode 100644
index 0000000000..b775203231
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/fill_fixtures/test_funcarg_lookupfails.py
@@ -0,0 +1,10 @@
+import pytest
+
+
+@pytest.fixture
+def xyzsomething(request):
+ return 42
+
+
+def test_func(some):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/test_fixture_named_request.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/test_fixture_named_request.py
new file mode 100644
index 0000000000..75514bf8b8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/test_fixture_named_request.py
@@ -0,0 +1,10 @@
+import pytest
+
+
+@pytest.fixture
+def request():
+ pass
+
+
+def test():
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/test_getfixturevalue_dynamic.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/test_getfixturevalue_dynamic.py
new file mode 100644
index 0000000000..055a1220b1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/fixtures/test_getfixturevalue_dynamic.py
@@ -0,0 +1,20 @@
+import pytest
+
+
+@pytest.fixture
+def dynamic():
+ pass
+
+
+@pytest.fixture
+def a(request):
+ request.getfixturevalue("dynamic")
+
+
+@pytest.fixture
+def b(a):
+ pass
+
+
+def test(b, request):
+ assert request.fixturenames == ["b", "request", "a", "dynamic"]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/__init__.pyi b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/__init__.pyi
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/__init__.pyi
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/conftest.py
new file mode 100644
index 0000000000..cb8f5d671e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/conftest.py
@@ -0,0 +1,14 @@
+import pytest
+
+
+class MyFile(pytest.File):
+ def collect(self):
+ return [MyItem.from_parent(name="hello", parent=self)]
+
+
+def pytest_collect_file(file_path, parent):
+ return MyFile.from_parent(path=file_path, parent=parent)
+
+
+class MyItem(pytest.Item):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/test_hello.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/test_hello.py
new file mode 100644
index 0000000000..56444d1474
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue88_initial_file_multinodes/test_hello.py
@@ -0,0 +1,2 @@
+def test_hello():
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue_519.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue_519.py
new file mode 100644
index 0000000000..e44367fca0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/issue_519.py
@@ -0,0 +1,53 @@
+import pprint
+from typing import List
+from typing import Tuple
+
+import pytest
+
+
+def pytest_generate_tests(metafunc):
+ if "arg1" in metafunc.fixturenames:
+ metafunc.parametrize("arg1", ["arg1v1", "arg1v2"], scope="module")
+
+ if "arg2" in metafunc.fixturenames:
+ metafunc.parametrize("arg2", ["arg2v1", "arg2v2"], scope="function")
+
+
+@pytest.fixture(scope="session")
+def checked_order():
+ order: List[Tuple[str, str, str]] = []
+
+ yield order
+ pprint.pprint(order)
+ assert order == [
+ ("issue_519.py", "fix1", "arg1v1"),
+ ("test_one[arg1v1-arg2v1]", "fix2", "arg2v1"),
+ ("test_two[arg1v1-arg2v1]", "fix2", "arg2v1"),
+ ("test_one[arg1v1-arg2v2]", "fix2", "arg2v2"),
+ ("test_two[arg1v1-arg2v2]", "fix2", "arg2v2"),
+ ("issue_519.py", "fix1", "arg1v2"),
+ ("test_one[arg1v2-arg2v1]", "fix2", "arg2v1"),
+ ("test_two[arg1v2-arg2v1]", "fix2", "arg2v1"),
+ ("test_one[arg1v2-arg2v2]", "fix2", "arg2v2"),
+ ("test_two[arg1v2-arg2v2]", "fix2", "arg2v2"),
+ ]
+
+
+@pytest.fixture(scope="module")
+def fix1(request, arg1, checked_order):
+ checked_order.append((request.node.name, "fix1", arg1))
+ yield "fix1-" + arg1
+
+
+@pytest.fixture(scope="function")
+def fix2(request, fix1, arg2, checked_order):
+ checked_order.append((request.node.name, "fix2", arg2))
+ yield "fix2-" + arg2 + fix1
+
+
+def test_one(fix2):
+ pass
+
+
+def test_two(fix2):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/junit-10.xsd b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/junit-10.xsd
new file mode 100644
index 0000000000..286fbf7c87
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/junit-10.xsd
@@ -0,0 +1,147 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+The MIT License (MIT)
+
+Copyright (c) 2014, Gregory Boissinot
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+-->
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
+ <xs:simpleType name="SUREFIRE_TIME">
+ <xs:restriction base="xs:string">
+ <xs:pattern value="(([0-9]{0,3},)*[0-9]{3}|[0-9]{0,3})*(\.[0-9]{0,3})?"/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <xs:complexType name="rerunType" mixed="true"> <!-- mixed (XML contains text) to be compatible with version previous than 2.22.1 -->
+ <xs:sequence>
+ <xs:element name="stackTrace" type="xs:string" minOccurs="0" /> <!-- optional to be compatible with version previous than 2.22.1 -->
+ <xs:element name="system-out" type="xs:string" minOccurs="0" />
+ <xs:element name="system-err" type="xs:string" minOccurs="0" />
+ </xs:sequence>
+ <xs:attribute name="message" type="xs:string" />
+ <xs:attribute name="type" type="xs:string" use="required" />
+ </xs:complexType>
+
+ <xs:element name="failure">
+ <xs:complexType mixed="true">
+ <xs:attribute name="type" type="xs:string"/>
+ <xs:attribute name="message" type="xs:string"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="error">
+ <xs:complexType mixed="true">
+ <xs:attribute name="type" type="xs:string"/>
+ <xs:attribute name="message" type="xs:string"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="skipped">
+ <xs:complexType mixed="true">
+ <xs:attribute name="type" type="xs:string"/>
+ <xs:attribute name="message" type="xs:string"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="properties">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="property" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="property">
+ <xs:complexType>
+ <xs:attribute name="name" type="xs:string" use="required"/>
+ <xs:attribute name="value" type="xs:string" use="required"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="system-err" type="xs:string"/>
+ <xs:element name="system-out" type="xs:string"/>
+ <xs:element name="rerunFailure" type="rerunType"/>
+ <xs:element name="rerunError" type="rerunType"/>
+ <xs:element name="flakyFailure" type="rerunType"/>
+ <xs:element name="flakyError" type="rerunType"/>
+
+ <xs:element name="testcase">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:choice minOccurs="0" maxOccurs="unbounded">
+ <xs:element ref="skipped"/>
+ <xs:element ref="error"/>
+ <xs:element ref="failure"/>
+ <xs:element ref="rerunFailure" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element ref="rerunError" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element ref="flakyFailure" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element ref="flakyError" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element ref="system-out"/>
+ <xs:element ref="system-err"/>
+ </xs:choice>
+ </xs:sequence>
+ <xs:attribute name="name" type="xs:string" use="required"/>
+ <xs:attribute name="time" type="xs:string"/>
+ <xs:attribute name="classname" type="xs:string"/>
+ <xs:attribute name="group" type="xs:string"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="testsuite">
+ <xs:complexType>
+ <xs:choice minOccurs="0" maxOccurs="unbounded">
+ <xs:element ref="testsuite"/>
+ <xs:element ref="properties"/>
+ <xs:element ref="testcase"/>
+ <xs:element ref="system-out"/>
+ <xs:element ref="system-err"/>
+ </xs:choice>
+ <xs:attribute name="name" type="xs:string" use="required"/>
+ <xs:attribute name="tests" type="xs:string" use="required"/>
+ <xs:attribute name="failures" type="xs:string" use="required"/>
+ <xs:attribute name="errors" type="xs:string" use="required"/>
+ <xs:attribute name="group" type="xs:string" />
+ <xs:attribute name="time" type="SUREFIRE_TIME"/>
+ <xs:attribute name="skipped" type="xs:string" />
+ <xs:attribute name="timestamp" type="xs:string" />
+ <xs:attribute name="hostname" type="xs:string" />
+ <xs:attribute name="id" type="xs:string" />
+ <xs:attribute name="package" type="xs:string" />
+ <xs:attribute name="file" type="xs:string"/>
+ <xs:attribute name="log" type="xs:string"/>
+ <xs:attribute name="url" type="xs:string"/>
+ <xs:attribute name="version" type="xs:string"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="testsuites">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="testsuite" minOccurs="0" maxOccurs="unbounded" />
+ </xs:sequence>
+ <xs:attribute name="name" type="xs:string" />
+ <xs:attribute name="time" type="SUREFIRE_TIME"/>
+ <xs:attribute name="tests" type="xs:string" />
+ <xs:attribute name="failures" type="xs:string" />
+ <xs:attribute name="errors" type="xs:string" />
+ </xs:complexType>
+ </xs:element>
+
+</xs:schema>
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/__init__.pyi b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/__init__.pyi
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/__init__.pyi
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/conftest.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/conftest.py
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/test_marks_as_keywords.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/test_marks_as_keywords.py
new file mode 100644
index 0000000000..35a2c7b762
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/marks/marks_considered_keywords/test_marks_as_keywords.py
@@ -0,0 +1,6 @@
+import pytest
+
+
+@pytest.mark.foo
+def test_mark():
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/.gitignore b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/.gitignore
new file mode 100644
index 0000000000..1c45c2ea35
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/.gitignore
@@ -0,0 +1 @@
+foo_*
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/generate_folders.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/generate_folders.py
new file mode 100644
index 0000000000..ff1eaf7d6b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/generate_folders.py
@@ -0,0 +1,27 @@
+import argparse
+import pathlib
+
+HERE = pathlib.Path(__file__).parent
+TEST_CONTENT = (HERE / "template_test.py").read_bytes()
+
+parser = argparse.ArgumentParser()
+parser.add_argument("numbers", nargs="*", type=int)
+
+
+def generate_folders(root, elements, *more_numbers):
+ fill_len = len(str(elements))
+ if more_numbers:
+ for i in range(elements):
+ new_folder = root.joinpath(f"foo_{i:0>{fill_len}}")
+ new_folder.mkdir()
+ new_folder.joinpath("__init__.py").write_bytes(TEST_CONTENT)
+ generate_folders(new_folder, *more_numbers)
+ else:
+ for i in range(elements):
+ new_test = root.joinpath(f"test_{i:0<{fill_len}}.py")
+ new_test.write_bytes(TEST_CONTENT)
+
+
+if __name__ == "__main__":
+ args = parser.parse_args()
+ generate_folders(HERE, *(args.numbers or (10, 100)))
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/template_test.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/template_test.py
new file mode 100644
index 0000000000..064ade190a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/perf_examples/collect_stats/template_test.py
@@ -0,0 +1,2 @@
+def test_x():
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/pytest.ini b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/pytest.ini
new file mode 100644
index 0000000000..ec5fe0e83a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/pytest.ini
@@ -0,0 +1,2 @@
+[pytest]
+# dummy pytest.ini to ease direct running of example scripts
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/tmpdir/tmp_path_fixture.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/tmpdir/tmp_path_fixture.py
new file mode 100644
index 0000000000..8675eb2fa6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/tmpdir/tmp_path_fixture.py
@@ -0,0 +1,7 @@
+import pytest
+
+
+@pytest.mark.parametrize("a", [r"qwe/\abc"])
+def test_fixture(tmp_path, a):
+ assert tmp_path.is_dir()
+ assert list(tmp_path.iterdir()) == []
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_parametrized_fixture_error_message.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_parametrized_fixture_error_message.py
new file mode 100644
index 0000000000..d421ce927c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_parametrized_fixture_error_message.py
@@ -0,0 +1,14 @@
+import unittest
+
+import pytest
+
+
+@pytest.fixture(params=[1, 2])
+def two(request):
+ return request.param
+
+
+@pytest.mark.usefixtures("two")
+class TestSomethingElse(unittest.TestCase):
+ def test_two(self):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip.py
new file mode 100644
index 0000000000..93f79bb3b2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip.py
@@ -0,0 +1,13 @@
+"""Skipping an entire subclass with unittest.skip() should *not* call setUp from a base class."""
+import unittest
+
+
+class Base(unittest.TestCase):
+ def setUp(self):
+ assert 0
+
+
+@unittest.skip("skip all tests")
+class Test(Base):
+ def test_foo(self):
+ assert 0
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip_class.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip_class.py
new file mode 100644
index 0000000000..4f251dcba1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip_class.py
@@ -0,0 +1,14 @@
+"""Skipping an entire subclass with unittest.skip() should *not* call setUpClass from a base class."""
+import unittest
+
+
+class Base(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ assert 0
+
+
+@unittest.skip("skip all tests")
+class Test(Base):
+ def test_foo(self):
+ assert 0
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip_module.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip_module.py
new file mode 100644
index 0000000000..98befbe510
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_setup_skip_module.py
@@ -0,0 +1,12 @@
+"""setUpModule is always called, even if all tests in the module are skipped"""
+import unittest
+
+
+def setUpModule():
+ assert 0
+
+
+@unittest.skip("skip all tests")
+class Base(unittest.TestCase):
+ def test(self):
+ assert 0
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_asyncio.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_asyncio.py
new file mode 100644
index 0000000000..1cd2168604
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_asyncio.py
@@ -0,0 +1,25 @@
+from typing import List
+from unittest import IsolatedAsyncioTestCase
+
+
+teardowns: List[None] = []
+
+
+class AsyncArguments(IsolatedAsyncioTestCase):
+ async def asyncTearDown(self):
+ teardowns.append(None)
+
+ async def test_something_async(self):
+ async def addition(x, y):
+ return x + y
+
+ self.assertEqual(await addition(2, 2), 4)
+
+ async def test_something_async_fails(self):
+ async def addition(x, y):
+ return x + y
+
+ self.assertEqual(await addition(2, 2), 3)
+
+ def test_teardowns(self):
+ assert len(teardowns) == 2
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_asynctest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_asynctest.py
new file mode 100644
index 0000000000..fb26617067
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_asynctest.py
@@ -0,0 +1,23 @@
+"""Issue #7110"""
+import asyncio
+from typing import List
+
+import asynctest
+
+
+teardowns: List[None] = []
+
+
+class Test(asynctest.TestCase):
+ async def tearDown(self):
+ teardowns.append(None)
+
+ async def test_error(self):
+ await asyncio.sleep(0)
+ self.fail("failing on purpose")
+
+ async def test_ok(self):
+ await asyncio.sleep(0)
+
+ def test_teardowns(self):
+ assert len(teardowns) == 2
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_plain_async.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_plain_async.py
new file mode 100644
index 0000000000..78dfece684
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/unittest/test_unittest_plain_async.py
@@ -0,0 +1,6 @@
+import unittest
+
+
+class Test(unittest.TestCase):
+ async def test_foo(self):
+ assert False
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message.py
new file mode 100644
index 0000000000..6985caa440
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message.py
@@ -0,0 +1,21 @@
+import warnings
+
+import pytest
+
+
+def func(msg):
+ warnings.warn(UserWarning(msg))
+
+
+@pytest.mark.parametrize("i", range(5))
+def test_foo(i):
+ func("foo")
+
+
+def test_foo_1():
+ func("foo")
+
+
+@pytest.mark.parametrize("i", range(5))
+def test_bar(i):
+ func("bar")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_1.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_1.py
new file mode 100644
index 0000000000..b8c11cb71c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_1.py
@@ -0,0 +1,21 @@
+import warnings
+
+import pytest
+
+
+def func(msg):
+ warnings.warn(UserWarning(msg))
+
+
+@pytest.mark.parametrize("i", range(20))
+def test_foo(i):
+ func("foo")
+
+
+def test_foo_1():
+ func("foo")
+
+
+@pytest.mark.parametrize("i", range(20))
+def test_bar(i):
+ func("bar")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_2.py b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_2.py
new file mode 100644
index 0000000000..636d04a550
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/example_scripts/warnings/test_group_warnings_by_message_summary/test_2.py
@@ -0,0 +1,5 @@
+from test_1 import func
+
+
+def test_2():
+ func("foo")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/examples/test_issue519.py b/testing/web-platform/tests/tools/third_party/pytest/testing/examples/test_issue519.py
new file mode 100644
index 0000000000..7b9c109889
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/examples/test_issue519.py
@@ -0,0 +1,7 @@
+from _pytest.pytester import Pytester
+
+
+def test_519(pytester: Pytester) -> None:
+ pytester.copy_example("issue_519.py")
+ res = pytester.runpytest("issue_519.py")
+ res.assert_outcomes(passed=8)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/.gitignore b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/.gitignore
new file mode 100644
index 0000000000..b533190872
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/.gitignore
@@ -0,0 +1,3 @@
+build/
+dist/
+*.spec
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/create_executable.py b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/create_executable.py
new file mode 100644
index 0000000000..998df7b1ca
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/create_executable.py
@@ -0,0 +1,11 @@
+"""Generate an executable with pytest runner embedded using PyInstaller."""
+if __name__ == "__main__":
+ import pytest
+ import subprocess
+
+ hidden = []
+ for x in pytest.freeze_includes():
+ hidden.extend(["--hidden-import", x])
+ hidden.extend(["--hidden-import", "distutils"])
+ args = ["pyinstaller", "--noconfirm"] + hidden + ["runtests_script.py"]
+ subprocess.check_call(" ".join(args), shell=True)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/runtests_script.py b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/runtests_script.py
new file mode 100644
index 0000000000..591863016a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/runtests_script.py
@@ -0,0 +1,10 @@
+"""
+This is the script that is actually frozen into an executable: simply executes
+pytest main().
+"""
+
+if __name__ == "__main__":
+ import sys
+ import pytest
+
+ sys.exit(pytest.main())
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tests/test_doctest.txt b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tests/test_doctest.txt
new file mode 100644
index 0000000000..e18a4b68cc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tests/test_doctest.txt
@@ -0,0 +1,6 @@
+
+
+Testing doctest::
+
+ >>> 1 + 1
+ 2
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tests/test_trivial.py b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tests/test_trivial.py
new file mode 100644
index 0000000000..08a55552ab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tests/test_trivial.py
@@ -0,0 +1,6 @@
+def test_upper():
+ assert "foo".upper() == "FOO"
+
+
+def test_lower():
+ assert "FOO".lower() == "foo"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tox_run.py b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tox_run.py
new file mode 100644
index 0000000000..678a69c858
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/freeze/tox_run.py
@@ -0,0 +1,12 @@
+"""
+Called by tox.ini: uses the generated executable to run the tests in ./tests/
+directory.
+"""
+if __name__ == "__main__":
+ import os
+ import sys
+
+ executable = os.path.join(os.getcwd(), "dist", "runtests_script", "runtests_script")
+ if sys.platform.startswith("win"):
+ executable += ".exe"
+ sys.exit(os.system("%s tests" % executable))
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/io/test_saferepr.py b/testing/web-platform/tests/tools/third_party/pytest/testing/io/test_saferepr.py
new file mode 100644
index 0000000000..63d3af822b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/io/test_saferepr.py
@@ -0,0 +1,181 @@
+import pytest
+from _pytest._io.saferepr import _pformat_dispatch
+from _pytest._io.saferepr import DEFAULT_REPR_MAX_SIZE
+from _pytest._io.saferepr import saferepr
+
+
+def test_simple_repr():
+ assert saferepr(1) == "1"
+ assert saferepr(None) == "None"
+
+
+def test_maxsize():
+ s = saferepr("x" * 50, maxsize=25)
+ assert len(s) == 25
+ expected = repr("x" * 10 + "..." + "x" * 10)
+ assert s == expected
+
+
+def test_no_maxsize():
+ text = "x" * DEFAULT_REPR_MAX_SIZE * 10
+ s = saferepr(text, maxsize=None)
+ expected = repr(text)
+ assert s == expected
+
+
+def test_maxsize_error_on_instance():
+ class A:
+ def __repr__(self):
+ raise ValueError("...")
+
+ s = saferepr(("*" * 50, A()), maxsize=25)
+ assert len(s) == 25
+ assert s[0] == "(" and s[-1] == ")"
+
+
+def test_exceptions() -> None:
+ class BrokenRepr:
+ def __init__(self, ex):
+ self.ex = ex
+
+ def __repr__(self):
+ raise self.ex
+
+ class BrokenReprException(Exception):
+ __str__ = None # type: ignore[assignment]
+ __repr__ = None # type: ignore[assignment]
+
+ assert "Exception" in saferepr(BrokenRepr(Exception("broken")))
+ s = saferepr(BrokenReprException("really broken"))
+ assert "TypeError" in s
+ assert "TypeError" in saferepr(BrokenRepr("string"))
+
+ none = None
+ try:
+ none() # type: ignore[misc]
+ except BaseException as exc:
+ exp_exc = repr(exc)
+ obj = BrokenRepr(BrokenReprException("omg even worse"))
+ s2 = saferepr(obj)
+ assert s2 == (
+ "<[unpresentable exception ({!s}) raised in repr()] BrokenRepr object at 0x{:x}>".format(
+ exp_exc, id(obj)
+ )
+ )
+
+
+def test_baseexception():
+ """Test saferepr() with BaseExceptions, which includes pytest outcomes."""
+
+ class RaisingOnStrRepr(BaseException):
+ def __init__(self, exc_types):
+ self.exc_types = exc_types
+
+ def raise_exc(self, *args):
+ try:
+ self.exc_type = self.exc_types.pop(0)
+ except IndexError:
+ pass
+ if hasattr(self.exc_type, "__call__"):
+ raise self.exc_type(*args)
+ raise self.exc_type
+
+ def __str__(self):
+ self.raise_exc("__str__")
+
+ def __repr__(self):
+ self.raise_exc("__repr__")
+
+ class BrokenObj:
+ def __init__(self, exc):
+ self.exc = exc
+
+ def __repr__(self):
+ raise self.exc
+
+ __str__ = __repr__
+
+ baseexc_str = BaseException("__str__")
+ obj = BrokenObj(RaisingOnStrRepr([BaseException]))
+ assert saferepr(obj) == (
+ "<[unpresentable exception ({!r}) "
+ "raised in repr()] BrokenObj object at 0x{:x}>".format(baseexc_str, id(obj))
+ )
+ obj = BrokenObj(RaisingOnStrRepr([RaisingOnStrRepr([BaseException])]))
+ assert saferepr(obj) == (
+ "<[{!r} raised in repr()] BrokenObj object at 0x{:x}>".format(
+ baseexc_str, id(obj)
+ )
+ )
+
+ with pytest.raises(KeyboardInterrupt):
+ saferepr(BrokenObj(KeyboardInterrupt()))
+
+ with pytest.raises(SystemExit):
+ saferepr(BrokenObj(SystemExit()))
+
+ with pytest.raises(KeyboardInterrupt):
+ saferepr(BrokenObj(RaisingOnStrRepr([KeyboardInterrupt])))
+
+ with pytest.raises(SystemExit):
+ saferepr(BrokenObj(RaisingOnStrRepr([SystemExit])))
+
+ with pytest.raises(KeyboardInterrupt):
+ print(saferepr(BrokenObj(RaisingOnStrRepr([BaseException, KeyboardInterrupt]))))
+
+ with pytest.raises(SystemExit):
+ saferepr(BrokenObj(RaisingOnStrRepr([BaseException, SystemExit])))
+
+
+def test_buggy_builtin_repr():
+ # Simulate a case where a repr for a builtin raises.
+ # reprlib dispatches by type name, so use "int".
+
+ class int:
+ def __repr__(self):
+ raise ValueError("Buggy repr!")
+
+ assert "Buggy" in saferepr(int())
+
+
+def test_big_repr():
+ from _pytest._io.saferepr import SafeRepr
+
+ assert len(saferepr(range(1000))) <= len("[" + SafeRepr(0).maxlist * "1000" + "]")
+
+
+def test_repr_on_newstyle() -> None:
+ class Function:
+ def __repr__(self):
+ return "<%s>" % (self.name) # type: ignore[attr-defined]
+
+ assert saferepr(Function())
+
+
+def test_unicode():
+ val = "£€"
+ reprval = "'£€'"
+ assert saferepr(val) == reprval
+
+
+def test_pformat_dispatch():
+ assert _pformat_dispatch("a") == "'a'"
+ assert _pformat_dispatch("a" * 10, width=5) == "'aaaaaaaaaa'"
+ assert _pformat_dispatch("foo bar", width=5) == "('foo '\n 'bar')"
+
+
+def test_broken_getattribute():
+ """saferepr() can create proper representations of classes with
+ broken __getattribute__ (#7145)
+ """
+
+ class SomeClass:
+ def __getattribute__(self, attr):
+ raise RuntimeError
+
+ def __repr__(self):
+ raise RuntimeError
+
+ assert saferepr(SomeClass()).startswith(
+ "<[RuntimeError() raised in repr()] SomeClass object at 0x"
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/io/test_terminalwriter.py b/testing/web-platform/tests/tools/third_party/pytest/testing/io/test_terminalwriter.py
new file mode 100644
index 0000000000..4866c94a55
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/io/test_terminalwriter.py
@@ -0,0 +1,293 @@
+import io
+import os
+import re
+import shutil
+import sys
+from pathlib import Path
+from typing import Generator
+from unittest import mock
+
+import pytest
+from _pytest._io import terminalwriter
+from _pytest.monkeypatch import MonkeyPatch
+
+
+# These tests were initially copied from py 1.8.1.
+
+
+def test_terminal_width_COLUMNS(monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.setenv("COLUMNS", "42")
+ assert terminalwriter.get_terminal_width() == 42
+ monkeypatch.delenv("COLUMNS", raising=False)
+
+
+def test_terminalwriter_width_bogus(monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.setattr(shutil, "get_terminal_size", mock.Mock(return_value=(10, 10)))
+ monkeypatch.delenv("COLUMNS", raising=False)
+ tw = terminalwriter.TerminalWriter()
+ assert tw.fullwidth == 80
+
+
+def test_terminalwriter_computes_width(monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.setattr(terminalwriter, "get_terminal_width", lambda: 42)
+ tw = terminalwriter.TerminalWriter()
+ assert tw.fullwidth == 42
+
+
+def test_terminalwriter_dumb_term_no_markup(monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.setattr(os, "environ", {"TERM": "dumb", "PATH": ""})
+
+ class MyFile:
+ closed = False
+
+ def isatty(self):
+ return True
+
+ with monkeypatch.context() as m:
+ m.setattr(sys, "stdout", MyFile())
+ assert sys.stdout.isatty()
+ tw = terminalwriter.TerminalWriter()
+ assert not tw.hasmarkup
+
+
+def test_terminalwriter_not_unicode() -> None:
+ """If the file doesn't support Unicode, the string is unicode-escaped (#7475)."""
+ buffer = io.BytesIO()
+ file = io.TextIOWrapper(buffer, encoding="cp1252")
+ tw = terminalwriter.TerminalWriter(file)
+ tw.write("hello 🌀 wôrld ×בג", flush=True)
+ assert buffer.getvalue() == br"hello \U0001f300 w\xf4rld \u05d0\u05d1\u05d2"
+
+
+win32 = int(sys.platform == "win32")
+
+
+class TestTerminalWriter:
+ @pytest.fixture(params=["path", "stringio"])
+ def tw(
+ self, request, tmp_path: Path
+ ) -> Generator[terminalwriter.TerminalWriter, None, None]:
+ if request.param == "path":
+ p = tmp_path.joinpath("tmpfile")
+ f = open(str(p), "w+", encoding="utf8")
+ tw = terminalwriter.TerminalWriter(f)
+
+ def getlines():
+ f.flush()
+ with open(str(p), encoding="utf8") as fp:
+ return fp.readlines()
+
+ elif request.param == "stringio":
+ f = io.StringIO()
+ tw = terminalwriter.TerminalWriter(f)
+
+ def getlines():
+ f.seek(0)
+ return f.readlines()
+
+ tw.getlines = getlines # type: ignore
+ tw.getvalue = lambda: "".join(getlines()) # type: ignore
+
+ with f:
+ yield tw
+
+ def test_line(self, tw) -> None:
+ tw.line("hello")
+ lines = tw.getlines()
+ assert len(lines) == 1
+ assert lines[0] == "hello\n"
+
+ def test_line_unicode(self, tw) -> None:
+ msg = "b\u00f6y"
+ tw.line(msg)
+ lines = tw.getlines()
+ assert lines[0] == msg + "\n"
+
+ def test_sep_no_title(self, tw) -> None:
+ tw.sep("-", fullwidth=60)
+ lines = tw.getlines()
+ assert len(lines) == 1
+ assert lines[0] == "-" * (60 - win32) + "\n"
+
+ def test_sep_with_title(self, tw) -> None:
+ tw.sep("-", "hello", fullwidth=60)
+ lines = tw.getlines()
+ assert len(lines) == 1
+ assert lines[0] == "-" * 26 + " hello " + "-" * (27 - win32) + "\n"
+
+ def test_sep_longer_than_width(self, tw) -> None:
+ tw.sep("-", "a" * 10, fullwidth=5)
+ (line,) = tw.getlines()
+ # even though the string is wider than the line, still have a separator
+ assert line == "- aaaaaaaaaa -\n"
+
+ @pytest.mark.skipif(sys.platform == "win32", reason="win32 has no native ansi")
+ @pytest.mark.parametrize("bold", (True, False))
+ @pytest.mark.parametrize("color", ("red", "green"))
+ def test_markup(self, tw, bold: bool, color: str) -> None:
+ text = tw.markup("hello", **{color: True, "bold": bold})
+ assert "hello" in text
+
+ def test_markup_bad(self, tw) -> None:
+ with pytest.raises(ValueError):
+ tw.markup("x", wronkw=3)
+ with pytest.raises(ValueError):
+ tw.markup("x", wronkw=0)
+
+ def test_line_write_markup(self, tw) -> None:
+ tw.hasmarkup = True
+ tw.line("x", bold=True)
+ tw.write("x\n", red=True)
+ lines = tw.getlines()
+ if sys.platform != "win32":
+ assert len(lines[0]) >= 2, lines
+ assert len(lines[1]) >= 2, lines
+
+ def test_attr_fullwidth(self, tw) -> None:
+ tw.sep("-", "hello", fullwidth=70)
+ tw.fullwidth = 70
+ tw.sep("-", "hello")
+ lines = tw.getlines()
+ assert len(lines[0]) == len(lines[1])
+
+
+@pytest.mark.skipif(sys.platform == "win32", reason="win32 has no native ansi")
+def test_attr_hasmarkup() -> None:
+ file = io.StringIO()
+ tw = terminalwriter.TerminalWriter(file)
+ assert not tw.hasmarkup
+ tw.hasmarkup = True
+ tw.line("hello", bold=True)
+ s = file.getvalue()
+ assert len(s) > len("hello\n")
+ assert "\x1b[1m" in s
+ assert "\x1b[0m" in s
+
+
+def assert_color_set():
+ file = io.StringIO()
+ tw = terminalwriter.TerminalWriter(file)
+ assert tw.hasmarkup
+ tw.line("hello", bold=True)
+ s = file.getvalue()
+ assert len(s) > len("hello\n")
+ assert "\x1b[1m" in s
+ assert "\x1b[0m" in s
+
+
+def assert_color_not_set():
+ f = io.StringIO()
+ f.isatty = lambda: True # type: ignore
+ tw = terminalwriter.TerminalWriter(file=f)
+ assert not tw.hasmarkup
+ tw.line("hello", bold=True)
+ s = f.getvalue()
+ assert s == "hello\n"
+
+
+def test_should_do_markup_PY_COLORS_eq_1(monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.setitem(os.environ, "PY_COLORS", "1")
+ assert_color_set()
+
+
+def test_should_not_do_markup_PY_COLORS_eq_0(monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.setitem(os.environ, "PY_COLORS", "0")
+ assert_color_not_set()
+
+
+def test_should_not_do_markup_NO_COLOR(monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.setitem(os.environ, "NO_COLOR", "1")
+ assert_color_not_set()
+
+
+def test_should_do_markup_FORCE_COLOR(monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.setitem(os.environ, "FORCE_COLOR", "1")
+ assert_color_set()
+
+
+def test_should_not_do_markup_NO_COLOR_and_FORCE_COLOR(
+ monkeypatch: MonkeyPatch,
+) -> None:
+ monkeypatch.setitem(os.environ, "NO_COLOR", "1")
+ monkeypatch.setitem(os.environ, "FORCE_COLOR", "1")
+ assert_color_not_set()
+
+
+class TestTerminalWriterLineWidth:
+ def test_init(self) -> None:
+ tw = terminalwriter.TerminalWriter()
+ assert tw.width_of_current_line == 0
+
+ def test_update(self) -> None:
+ tw = terminalwriter.TerminalWriter()
+ tw.write("hello world")
+ assert tw.width_of_current_line == 11
+
+ def test_update_with_newline(self) -> None:
+ tw = terminalwriter.TerminalWriter()
+ tw.write("hello\nworld")
+ assert tw.width_of_current_line == 5
+
+ def test_update_with_wide_text(self) -> None:
+ tw = terminalwriter.TerminalWriter()
+ tw.write("ä¹‡ä¹‚ã„’å°ºå‚ ã„’å„丨匚匚")
+ assert tw.width_of_current_line == 21 # 5*2 + 1 + 5*2
+
+ def test_composed(self) -> None:
+ tw = terminalwriter.TerminalWriter()
+ text = "café food"
+ assert len(text) == 9
+ tw.write(text)
+ assert tw.width_of_current_line == 9
+
+ def test_combining(self) -> None:
+ tw = terminalwriter.TerminalWriter()
+ text = "cafeÌ food"
+ assert len(text) == 10
+ tw.write(text)
+ assert tw.width_of_current_line == 9
+
+
+@pytest.mark.parametrize(
+ ("has_markup", "code_highlight", "expected"),
+ [
+ pytest.param(
+ True,
+ True,
+ "{kw}assert{hl-reset} {number}0{hl-reset}\n",
+ id="with markup and code_highlight",
+ ),
+ pytest.param(
+ True,
+ False,
+ "assert 0\n",
+ id="with markup but no code_highlight",
+ ),
+ pytest.param(
+ False,
+ True,
+ "assert 0\n",
+ id="without markup but with code_highlight",
+ ),
+ pytest.param(
+ False,
+ False,
+ "assert 0\n",
+ id="neither markup nor code_highlight",
+ ),
+ ],
+)
+def test_code_highlight(has_markup, code_highlight, expected, color_mapping):
+ f = io.StringIO()
+ tw = terminalwriter.TerminalWriter(f)
+ tw.hasmarkup = has_markup
+ tw.code_highlight = code_highlight
+ tw._write_source(["assert 0"])
+
+ assert f.getvalue().splitlines(keepends=True) == color_mapping.format([expected])
+
+ with pytest.raises(
+ ValueError,
+ match=re.escape("indents size (2) should have same size as lines (1)"),
+ ):
+ tw._write_source(["assert 0"], [" ", " "])
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/io/test_wcwidth.py b/testing/web-platform/tests/tools/third_party/pytest/testing/io/test_wcwidth.py
new file mode 100644
index 0000000000..7cc74df5d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/io/test_wcwidth.py
@@ -0,0 +1,38 @@
+import pytest
+from _pytest._io.wcwidth import wcswidth
+from _pytest._io.wcwidth import wcwidth
+
+
+@pytest.mark.parametrize(
+ ("c", "expected"),
+ [
+ ("\0", 0),
+ ("\n", -1),
+ ("a", 1),
+ ("1", 1),
+ ("×", 1),
+ ("\u200B", 0),
+ ("\u1ABE", 0),
+ ("\u0591", 0),
+ ("ðŸ‰", 2),
+ ("$", 2),
+ ],
+)
+def test_wcwidth(c: str, expected: int) -> None:
+ assert wcwidth(c) == expected
+
+
+@pytest.mark.parametrize(
+ ("s", "expected"),
+ [
+ ("", 0),
+ ("hello, world!", 13),
+ ("hello, world!\n", -1),
+ ("0123456789", 10),
+ ("שלו×, עול×!", 11),
+ ("שְבֻעָיי×", 6),
+ ("ðŸ‰ðŸ‰ðŸ‰", 6),
+ ],
+)
+def test_wcswidth(s: str, expected: int) -> None:
+ assert wcswidth(s) == expected
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_fixture.py b/testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_fixture.py
new file mode 100644
index 0000000000..bcb20de580
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_fixture.py
@@ -0,0 +1,310 @@
+import logging
+
+import pytest
+from _pytest.logging import caplog_records_key
+from _pytest.pytester import Pytester
+
+logger = logging.getLogger(__name__)
+sublogger = logging.getLogger(__name__ + ".baz")
+
+
+def test_fixture_help(pytester: Pytester) -> None:
+ result = pytester.runpytest("--fixtures")
+ result.stdout.fnmatch_lines(["*caplog*"])
+
+
+def test_change_level(caplog):
+ caplog.set_level(logging.INFO)
+ logger.debug("handler DEBUG level")
+ logger.info("handler INFO level")
+
+ caplog.set_level(logging.CRITICAL, logger=sublogger.name)
+ sublogger.warning("logger WARNING level")
+ sublogger.critical("logger CRITICAL level")
+
+ assert "DEBUG" not in caplog.text
+ assert "INFO" in caplog.text
+ assert "WARNING" not in caplog.text
+ assert "CRITICAL" in caplog.text
+
+
+def test_change_level_undo(pytester: Pytester) -> None:
+ """Ensure that 'set_level' is undone after the end of the test.
+
+ Tests the logging output themselves (affacted both by logger and handler levels).
+ """
+ pytester.makepyfile(
+ """
+ import logging
+
+ def test1(caplog):
+ caplog.set_level(logging.INFO)
+ # using + operator here so fnmatch_lines doesn't match the code in the traceback
+ logging.info('log from ' + 'test1')
+ assert 0
+
+ def test2(caplog):
+ # using + operator here so fnmatch_lines doesn't match the code in the traceback
+ logging.info('log from ' + 'test2')
+ assert 0
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*log from test1*", "*2 failed in *"])
+ result.stdout.no_fnmatch_line("*log from test2*")
+
+
+def test_change_level_undos_handler_level(pytester: Pytester) -> None:
+ """Ensure that 'set_level' is undone after the end of the test (handler).
+
+ Issue #7569. Tests the handler level specifically.
+ """
+ pytester.makepyfile(
+ """
+ import logging
+
+ def test1(caplog):
+ assert caplog.handler.level == 0
+ caplog.set_level(9999)
+ caplog.set_level(41)
+ assert caplog.handler.level == 41
+
+ def test2(caplog):
+ assert caplog.handler.level == 0
+
+ def test3(caplog):
+ assert caplog.handler.level == 0
+ caplog.set_level(43)
+ assert caplog.handler.level == 43
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=3)
+
+
+def test_with_statement(caplog):
+ with caplog.at_level(logging.INFO):
+ logger.debug("handler DEBUG level")
+ logger.info("handler INFO level")
+
+ with caplog.at_level(logging.CRITICAL, logger=sublogger.name):
+ sublogger.warning("logger WARNING level")
+ sublogger.critical("logger CRITICAL level")
+
+ assert "DEBUG" not in caplog.text
+ assert "INFO" in caplog.text
+ assert "WARNING" not in caplog.text
+ assert "CRITICAL" in caplog.text
+
+
+def test_log_access(caplog):
+ caplog.set_level(logging.INFO)
+ logger.info("boo %s", "arg")
+ assert caplog.records[0].levelname == "INFO"
+ assert caplog.records[0].msg == "boo %s"
+ assert "boo arg" in caplog.text
+
+
+def test_messages(caplog):
+ caplog.set_level(logging.INFO)
+ logger.info("boo %s", "arg")
+ logger.info("bar %s\nbaz %s", "arg1", "arg2")
+ assert "boo arg" == caplog.messages[0]
+ assert "bar arg1\nbaz arg2" == caplog.messages[1]
+ assert caplog.text.count("\n") > len(caplog.messages)
+ assert len(caplog.text.splitlines()) > len(caplog.messages)
+
+ try:
+ raise Exception("test")
+ except Exception:
+ logger.exception("oops")
+
+ assert "oops" in caplog.text
+ assert "oops" in caplog.messages[-1]
+ # Tracebacks are stored in the record and not added until the formatter or handler.
+ assert "Exception" in caplog.text
+ assert "Exception" not in caplog.messages[-1]
+
+
+def test_record_tuples(caplog):
+ caplog.set_level(logging.INFO)
+ logger.info("boo %s", "arg")
+
+ assert caplog.record_tuples == [(__name__, logging.INFO, "boo arg")]
+
+
+def test_unicode(caplog):
+ caplog.set_level(logging.INFO)
+ logger.info("bū")
+ assert caplog.records[0].levelname == "INFO"
+ assert caplog.records[0].msg == "bū"
+ assert "bū" in caplog.text
+
+
+def test_clear(caplog):
+ caplog.set_level(logging.INFO)
+ logger.info("bū")
+ assert len(caplog.records)
+ assert caplog.text
+ caplog.clear()
+ assert not len(caplog.records)
+ assert not caplog.text
+
+
+@pytest.fixture
+def logging_during_setup_and_teardown(caplog):
+ caplog.set_level("INFO")
+ logger.info("a_setup_log")
+ yield
+ logger.info("a_teardown_log")
+ assert [x.message for x in caplog.get_records("teardown")] == ["a_teardown_log"]
+
+
+def test_caplog_captures_for_all_stages(caplog, logging_during_setup_and_teardown):
+ assert not caplog.records
+ assert not caplog.get_records("call")
+ logger.info("a_call_log")
+ assert [x.message for x in caplog.get_records("call")] == ["a_call_log"]
+
+ assert [x.message for x in caplog.get_records("setup")] == ["a_setup_log"]
+
+ # This reaches into private API, don't use this type of thing in real tests!
+ assert set(caplog._item.stash[caplog_records_key]) == {"setup", "call"}
+
+
+def test_ini_controls_global_log_level(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_level_override(request, caplog):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_level == logging.ERROR
+ logger = logging.getLogger('catchlog')
+ logger.warning("WARNING message won't be shown")
+ logger.error("ERROR message will be shown")
+ assert 'WARNING' not in caplog.text
+ assert 'ERROR' in caplog.text
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ log_level=ERROR
+ """
+ )
+
+ result = pytester.runpytest()
+ # make sure that that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+
+
+def test_caplog_can_override_global_log_level(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_level_override(request, caplog):
+ logger = logging.getLogger('catchlog')
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_level == logging.WARNING
+
+ logger.info("INFO message won't be shown")
+
+ caplog.set_level(logging.INFO, logger.name)
+
+ with caplog.at_level(logging.DEBUG, logger.name):
+ logger.debug("DEBUG message will be shown")
+
+ logger.debug("DEBUG message won't be shown")
+
+ with caplog.at_level(logging.CRITICAL, logger.name):
+ logger.warning("WARNING message won't be shown")
+
+ logger.debug("DEBUG message won't be shown")
+ logger.info("INFO message will be shown")
+
+ assert "message won't be shown" not in caplog.text
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ log_level=WARNING
+ """
+ )
+
+ result = pytester.runpytest()
+ assert result.ret == 0
+
+
+def test_caplog_captures_despite_exception(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_level_override(request, caplog):
+ logger = logging.getLogger('catchlog')
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_level == logging.WARNING
+
+ logger.error("ERROR message " + "will be shown")
+
+ with caplog.at_level(logging.DEBUG, logger.name):
+ logger.debug("DEBUG message " + "won't be shown")
+ raise Exception()
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ log_level=WARNING
+ """
+ )
+
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*ERROR message will be shown*"])
+ result.stdout.no_fnmatch_line("*DEBUG message won't be shown*")
+ assert result.ret == 1
+
+
+def test_log_report_captures_according_to_config_option_upon_failure(
+ pytester: Pytester,
+) -> None:
+ """Test that upon failure:
+ (1) `caplog` succeeded to capture the DEBUG message and assert on it => No `Exception` is raised.
+ (2) The `DEBUG` message does NOT appear in the `Captured log call` report.
+ (3) The stdout, `INFO`, and `WARNING` messages DO appear in the test reports due to `--log-level=INFO`.
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+
+ def function_that_logs():
+ logging.debug('DEBUG log ' + 'message')
+ logging.info('INFO log ' + 'message')
+ logging.warning('WARNING log ' + 'message')
+ print('Print ' + 'message')
+
+ def test_that_fails(request, caplog):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_level == logging.INFO
+
+ with caplog.at_level(logging.DEBUG):
+ function_that_logs()
+
+ if 'DEBUG log ' + 'message' not in caplog.text:
+ raise Exception('caplog failed to ' + 'capture DEBUG')
+
+ assert False
+ """
+ )
+
+ result = pytester.runpytest("--log-level=INFO")
+ result.stdout.no_fnmatch_line("*Exception: caplog failed to capture DEBUG*")
+ result.stdout.no_fnmatch_line("*DEBUG log message*")
+ result.stdout.fnmatch_lines(
+ ["*Print message*", "*INFO log message*", "*WARNING log message*"]
+ )
+ assert result.ret == 1
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_formatter.py b/testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_formatter.py
new file mode 100644
index 0000000000..3797129372
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_formatter.py
@@ -0,0 +1,173 @@
+import logging
+from typing import Any
+
+from _pytest._io import TerminalWriter
+from _pytest.logging import ColoredLevelFormatter
+
+
+def test_coloredlogformatter() -> None:
+ logfmt = "%(filename)-25s %(lineno)4d %(levelname)-8s %(message)s"
+
+ record = logging.LogRecord(
+ name="dummy",
+ level=logging.INFO,
+ pathname="dummypath",
+ lineno=10,
+ msg="Test Message",
+ args=(),
+ exc_info=None,
+ )
+
+ tw = TerminalWriter()
+ tw.hasmarkup = True
+ formatter = ColoredLevelFormatter(tw, logfmt)
+ output = formatter.format(record)
+ assert output == (
+ "dummypath 10 \x1b[32mINFO \x1b[0m Test Message"
+ )
+
+ tw.hasmarkup = False
+ formatter = ColoredLevelFormatter(tw, logfmt)
+ output = formatter.format(record)
+ assert output == ("dummypath 10 INFO Test Message")
+
+
+def test_coloredlogformatter_with_width_precision() -> None:
+ logfmt = "%(filename)-25s %(lineno)4d %(levelname)-8.8s %(message)s"
+
+ record = logging.LogRecord(
+ name="dummy",
+ level=logging.INFO,
+ pathname="dummypath",
+ lineno=10,
+ msg="Test Message",
+ args=(),
+ exc_info=None,
+ )
+
+ tw = TerminalWriter()
+ tw.hasmarkup = True
+ formatter = ColoredLevelFormatter(tw, logfmt)
+ output = formatter.format(record)
+ assert output == (
+ "dummypath 10 \x1b[32mINFO \x1b[0m Test Message"
+ )
+
+ tw.hasmarkup = False
+ formatter = ColoredLevelFormatter(tw, logfmt)
+ output = formatter.format(record)
+ assert output == ("dummypath 10 INFO Test Message")
+
+
+def test_multiline_message() -> None:
+ from _pytest.logging import PercentStyleMultiline
+
+ logfmt = "%(filename)-25s %(lineno)4d %(levelname)-8s %(message)s"
+
+ record: Any = logging.LogRecord(
+ name="dummy",
+ level=logging.INFO,
+ pathname="dummypath",
+ lineno=10,
+ msg="Test Message line1\nline2",
+ args=(),
+ exc_info=None,
+ )
+ # this is called by logging.Formatter.format
+ record.message = record.getMessage()
+
+ ai_on_style = PercentStyleMultiline(logfmt, True)
+ output = ai_on_style.format(record)
+ assert output == (
+ "dummypath 10 INFO Test Message line1\n"
+ " line2"
+ )
+
+ ai_off_style = PercentStyleMultiline(logfmt, False)
+ output = ai_off_style.format(record)
+ assert output == (
+ "dummypath 10 INFO Test Message line1\nline2"
+ )
+
+ ai_none_style = PercentStyleMultiline(logfmt, None)
+ output = ai_none_style.format(record)
+ assert output == (
+ "dummypath 10 INFO Test Message line1\nline2"
+ )
+
+ record.auto_indent = False
+ output = ai_on_style.format(record)
+ assert output == (
+ "dummypath 10 INFO Test Message line1\nline2"
+ )
+
+ record.auto_indent = True
+ output = ai_off_style.format(record)
+ assert output == (
+ "dummypath 10 INFO Test Message line1\n"
+ " line2"
+ )
+
+ record.auto_indent = "False"
+ output = ai_on_style.format(record)
+ assert output == (
+ "dummypath 10 INFO Test Message line1\nline2"
+ )
+
+ record.auto_indent = "True"
+ output = ai_off_style.format(record)
+ assert output == (
+ "dummypath 10 INFO Test Message line1\n"
+ " line2"
+ )
+
+ # bad string values default to False
+ record.auto_indent = "junk"
+ output = ai_off_style.format(record)
+ assert output == (
+ "dummypath 10 INFO Test Message line1\nline2"
+ )
+
+ # anything other than string or int will default to False
+ record.auto_indent = dict()
+ output = ai_off_style.format(record)
+ assert output == (
+ "dummypath 10 INFO Test Message line1\nline2"
+ )
+
+ record.auto_indent = "5"
+ output = ai_off_style.format(record)
+ assert output == (
+ "dummypath 10 INFO Test Message line1\n line2"
+ )
+
+ record.auto_indent = 5
+ output = ai_off_style.format(record)
+ assert output == (
+ "dummypath 10 INFO Test Message line1\n line2"
+ )
+
+
+def test_colored_short_level() -> None:
+ logfmt = "%(levelname).1s %(message)s"
+
+ record = logging.LogRecord(
+ name="dummy",
+ level=logging.INFO,
+ pathname="dummypath",
+ lineno=10,
+ msg="Test Message",
+ args=(),
+ exc_info=None,
+ )
+
+ class ColorConfig:
+ class option:
+ pass
+
+ tw = TerminalWriter()
+ tw.hasmarkup = True
+ formatter = ColoredLevelFormatter(tw, logfmt)
+ output = formatter.format(record)
+ # the I (of INFO) is colored
+ assert output == ("\x1b[32mI\x1b[0m Test Message")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_reporting.py b/testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_reporting.py
new file mode 100644
index 0000000000..323ff7b244
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/logging/test_reporting.py
@@ -0,0 +1,1167 @@
+import io
+import os
+import re
+from typing import cast
+
+import pytest
+from _pytest.capture import CaptureManager
+from _pytest.config import ExitCode
+from _pytest.fixtures import FixtureRequest
+from _pytest.pytester import Pytester
+from _pytest.terminal import TerminalReporter
+
+
+def test_nothing_logged(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import sys
+
+ def test_foo():
+ sys.stdout.write('text going to stdout')
+ sys.stderr.write('text going to stderr')
+ assert False
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*- Captured stdout call -*", "text going to stdout"])
+ result.stdout.fnmatch_lines(["*- Captured stderr call -*", "text going to stderr"])
+ with pytest.raises(pytest.fail.Exception):
+ result.stdout.fnmatch_lines(["*- Captured *log call -*"])
+
+
+def test_messages_logged(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import sys
+ import logging
+
+ logger = logging.getLogger(__name__)
+
+ def test_foo():
+ sys.stdout.write('text going to stdout')
+ sys.stderr.write('text going to stderr')
+ logger.info('text going to logger')
+ assert False
+ """
+ )
+ result = pytester.runpytest("--log-level=INFO")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*- Captured *log call -*", "*text going to logger*"])
+ result.stdout.fnmatch_lines(["*- Captured stdout call -*", "text going to stdout"])
+ result.stdout.fnmatch_lines(["*- Captured stderr call -*", "text going to stderr"])
+
+
+def test_root_logger_affected(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import logging
+ logger = logging.getLogger()
+
+ def test_foo():
+ logger.info('info text ' + 'going to logger')
+ logger.warning('warning text ' + 'going to logger')
+ logger.error('error text ' + 'going to logger')
+
+ assert 0
+ """
+ )
+ log_file = str(pytester.path.joinpath("pytest.log"))
+ result = pytester.runpytest("--log-level=ERROR", "--log-file=pytest.log")
+ assert result.ret == 1
+
+ # The capture log calls in the stdout section only contain the
+ # logger.error msg, because of --log-level=ERROR.
+ result.stdout.fnmatch_lines(["*error text going to logger*"])
+ stdout = result.stdout.str()
+ assert "warning text going to logger" not in stdout
+ assert "info text going to logger" not in stdout
+
+ # The log file should contain the warning and the error log messages and
+ # not the info one, because the default level of the root logger is
+ # WARNING.
+ assert os.path.isfile(log_file)
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert "info text going to logger" not in contents
+ assert "warning text going to logger" in contents
+ assert "error text going to logger" in contents
+
+
+def test_log_cli_level_log_level_interaction(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import logging
+ logger = logging.getLogger()
+
+ def test_foo():
+ logger.debug('debug text ' + 'going to logger')
+ logger.info('info text ' + 'going to logger')
+ logger.warning('warning text ' + 'going to logger')
+ logger.error('error text ' + 'going to logger')
+ assert 0
+ """
+ )
+
+ result = pytester.runpytest("--log-cli-level=INFO", "--log-level=ERROR")
+ assert result.ret == 1
+
+ result.stdout.fnmatch_lines(
+ [
+ "*-- live log call --*",
+ "*INFO*info text going to logger",
+ "*WARNING*warning text going to logger",
+ "*ERROR*error text going to logger",
+ "=* 1 failed in *=",
+ ]
+ )
+ result.stdout.no_re_match_line("DEBUG")
+
+
+def test_setup_logging(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import logging
+
+ logger = logging.getLogger(__name__)
+
+ def setup_function(function):
+ logger.info('text going to logger from setup')
+
+ def test_foo():
+ logger.info('text going to logger from call')
+ assert False
+ """
+ )
+ result = pytester.runpytest("--log-level=INFO")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ [
+ "*- Captured *log setup -*",
+ "*text going to logger from setup*",
+ "*- Captured *log call -*",
+ "*text going to logger from call*",
+ ]
+ )
+
+
+def test_teardown_logging(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import logging
+
+ logger = logging.getLogger(__name__)
+
+ def test_foo():
+ logger.info('text going to logger from call')
+
+ def teardown_function(function):
+ logger.info('text going to logger from teardown')
+ assert False
+ """
+ )
+ result = pytester.runpytest("--log-level=INFO")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ [
+ "*- Captured *log call -*",
+ "*text going to logger from call*",
+ "*- Captured *log teardown -*",
+ "*text going to logger from teardown*",
+ ]
+ )
+
+
+@pytest.mark.parametrize("enabled", [True, False])
+def test_log_cli_enabled_disabled(pytester: Pytester, enabled: bool) -> None:
+ msg = "critical message logged by test"
+ pytester.makepyfile(
+ """
+ import logging
+ def test_log_cli():
+ logging.critical("{}")
+ """.format(
+ msg
+ )
+ )
+ if enabled:
+ pytester.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+ result = pytester.runpytest()
+ if enabled:
+ result.stdout.fnmatch_lines(
+ [
+ "test_log_cli_enabled_disabled.py::test_log_cli ",
+ "*-- live log call --*",
+ "CRITICAL *test_log_cli_enabled_disabled.py* critical message logged by test",
+ "PASSED*",
+ ]
+ )
+ else:
+ assert msg not in result.stdout.str()
+
+
+def test_log_cli_default_level(pytester: Pytester) -> None:
+ # Default log file level
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_cli(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_cli_handler.level == logging.NOTSET
+ logging.getLogger('catchlog').info("INFO message won't be shown")
+ logging.getLogger('catchlog').warning("WARNING message will be shown")
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = pytester.runpytest()
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(
+ [
+ "test_log_cli_default_level.py::test_log_cli ",
+ "WARNING*test_log_cli_default_level.py* message will be shown*",
+ ]
+ )
+ result.stdout.no_fnmatch_line("*INFO message won't be shown*")
+ # make sure that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+
+
+def test_log_cli_default_level_multiple_tests(
+ pytester: Pytester, request: FixtureRequest
+) -> None:
+ """Ensure we reset the first newline added by the live logger between tests"""
+ filename = request.node.name + ".py"
+ pytester.makepyfile(
+ """
+ import logging
+
+ def test_log_1():
+ logging.warning("log message from test_log_1")
+
+ def test_log_2():
+ logging.warning("log message from test_log_2")
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ f"{filename}::test_log_1 ",
+ "*WARNING*log message from test_log_1*",
+ "PASSED *50%*",
+ f"{filename}::test_log_2 ",
+ "*WARNING*log message from test_log_2*",
+ "PASSED *100%*",
+ "=* 2 passed in *=",
+ ]
+ )
+
+
+def test_log_cli_default_level_sections(
+ pytester: Pytester, request: FixtureRequest
+) -> None:
+ """Check that with live logging enable we are printing the correct headers during
+ start/setup/call/teardown/finish."""
+ filename = request.node.name + ".py"
+ pytester.makeconftest(
+ """
+ import pytest
+ import logging
+
+ def pytest_runtest_logstart():
+ logging.warning('>>>>> START >>>>>')
+
+ def pytest_runtest_logfinish():
+ logging.warning('<<<<< END <<<<<<<')
+ """
+ )
+
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+
+ @pytest.fixture
+ def fix(request):
+ logging.warning("log message from setup of {}".format(request.node.name))
+ yield
+ logging.warning("log message from teardown of {}".format(request.node.name))
+
+ def test_log_1(fix):
+ logging.warning("log message from test_log_1")
+
+ def test_log_2(fix):
+ logging.warning("log message from test_log_2")
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ f"{filename}::test_log_1 ",
+ "*-- live log start --*",
+ "*WARNING* >>>>> START >>>>>*",
+ "*-- live log setup --*",
+ "*WARNING*log message from setup of test_log_1*",
+ "*-- live log call --*",
+ "*WARNING*log message from test_log_1*",
+ "PASSED *50%*",
+ "*-- live log teardown --*",
+ "*WARNING*log message from teardown of test_log_1*",
+ "*-- live log finish --*",
+ "*WARNING* <<<<< END <<<<<<<*",
+ f"{filename}::test_log_2 ",
+ "*-- live log start --*",
+ "*WARNING* >>>>> START >>>>>*",
+ "*-- live log setup --*",
+ "*WARNING*log message from setup of test_log_2*",
+ "*-- live log call --*",
+ "*WARNING*log message from test_log_2*",
+ "PASSED *100%*",
+ "*-- live log teardown --*",
+ "*WARNING*log message from teardown of test_log_2*",
+ "*-- live log finish --*",
+ "*WARNING* <<<<< END <<<<<<<*",
+ "=* 2 passed in *=",
+ ]
+ )
+
+
+def test_live_logs_unknown_sections(
+ pytester: Pytester, request: FixtureRequest
+) -> None:
+ """Check that with live logging enable we are printing the correct headers during
+ start/setup/call/teardown/finish."""
+ filename = request.node.name + ".py"
+ pytester.makeconftest(
+ """
+ import pytest
+ import logging
+
+ def pytest_runtest_protocol(item, nextitem):
+ logging.warning('Unknown Section!')
+
+ def pytest_runtest_logstart():
+ logging.warning('>>>>> START >>>>>')
+
+ def pytest_runtest_logfinish():
+ logging.warning('<<<<< END <<<<<<<')
+ """
+ )
+
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+
+ @pytest.fixture
+ def fix(request):
+ logging.warning("log message from setup of {}".format(request.node.name))
+ yield
+ logging.warning("log message from teardown of {}".format(request.node.name))
+
+ def test_log_1(fix):
+ logging.warning("log message from test_log_1")
+
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*WARNING*Unknown Section*",
+ f"{filename}::test_log_1 ",
+ "*WARNING* >>>>> START >>>>>*",
+ "*-- live log setup --*",
+ "*WARNING*log message from setup of test_log_1*",
+ "*-- live log call --*",
+ "*WARNING*log message from test_log_1*",
+ "PASSED *100%*",
+ "*-- live log teardown --*",
+ "*WARNING*log message from teardown of test_log_1*",
+ "*WARNING* <<<<< END <<<<<<<*",
+ "=* 1 passed in *=",
+ ]
+ )
+
+
+def test_sections_single_new_line_after_test_outcome(
+ pytester: Pytester, request: FixtureRequest
+) -> None:
+ """Check that only a single new line is written between log messages during
+ teardown/finish."""
+ filename = request.node.name + ".py"
+ pytester.makeconftest(
+ """
+ import pytest
+ import logging
+
+ def pytest_runtest_logstart():
+ logging.warning('>>>>> START >>>>>')
+
+ def pytest_runtest_logfinish():
+ logging.warning('<<<<< END <<<<<<<')
+ logging.warning('<<<<< END <<<<<<<')
+ """
+ )
+
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+
+ @pytest.fixture
+ def fix(request):
+ logging.warning("log message from setup of {}".format(request.node.name))
+ yield
+ logging.warning("log message from teardown of {}".format(request.node.name))
+ logging.warning("log message from teardown of {}".format(request.node.name))
+
+ def test_log_1(fix):
+ logging.warning("log message from test_log_1")
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ f"{filename}::test_log_1 ",
+ "*-- live log start --*",
+ "*WARNING* >>>>> START >>>>>*",
+ "*-- live log setup --*",
+ "*WARNING*log message from setup of test_log_1*",
+ "*-- live log call --*",
+ "*WARNING*log message from test_log_1*",
+ "PASSED *100%*",
+ "*-- live log teardown --*",
+ "*WARNING*log message from teardown of test_log_1*",
+ "*-- live log finish --*",
+ "*WARNING* <<<<< END <<<<<<<*",
+ "*WARNING* <<<<< END <<<<<<<*",
+ "=* 1 passed in *=",
+ ]
+ )
+ assert (
+ re.search(
+ r"(.+)live log teardown(.+)\nWARNING(.+)\nWARNING(.+)",
+ result.stdout.str(),
+ re.MULTILINE,
+ )
+ is not None
+ )
+ assert (
+ re.search(
+ r"(.+)live log finish(.+)\nWARNING(.+)\nWARNING(.+)",
+ result.stdout.str(),
+ re.MULTILINE,
+ )
+ is not None
+ )
+
+
+def test_log_cli_level(pytester: Pytester) -> None:
+ # Default log file level
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_cli(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_cli_handler.level == logging.INFO
+ logging.getLogger('catchlog').debug("This log message won't be shown")
+ logging.getLogger('catchlog').info("This log message will be shown")
+ print('PASSED')
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ log_cli=true
+ """
+ )
+
+ result = pytester.runpytest("-s", "--log-cli-level=INFO")
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(
+ [
+ "*test_log_cli_level.py*This log message will be shown",
+ "PASSED", # 'PASSED' on its own line because the log message prints a new line
+ ]
+ )
+ result.stdout.no_fnmatch_line("*This log message won't be shown*")
+
+ # make sure that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+
+ result = pytester.runpytest("-s", "--log-level=INFO")
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(
+ [
+ "*test_log_cli_level.py* This log message will be shown",
+ "PASSED", # 'PASSED' on its own line because the log message prints a new line
+ ]
+ )
+ result.stdout.no_fnmatch_line("*This log message won't be shown*")
+
+ # make sure that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+
+
+def test_log_cli_ini_level(pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ log_cli=true
+ log_cli_level = INFO
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_cli(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_cli_handler.level == logging.INFO
+ logging.getLogger('catchlog').debug("This log message won't be shown")
+ logging.getLogger('catchlog').info("This log message will be shown")
+ print('PASSED')
+ """
+ )
+
+ result = pytester.runpytest("-s")
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(
+ [
+ "*test_log_cli_ini_level.py* This log message will be shown",
+ "PASSED", # 'PASSED' on its own line because the log message prints a new line
+ ]
+ )
+ result.stdout.no_fnmatch_line("*This log message won't be shown*")
+
+ # make sure that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+
+
+@pytest.mark.parametrize(
+ "cli_args",
+ ["", "--log-level=WARNING", "--log-file-level=WARNING", "--log-cli-level=WARNING"],
+)
+def test_log_cli_auto_enable(pytester: Pytester, cli_args: str) -> None:
+ """Check that live logs are enabled if --log-level or --log-cli-level is passed on the CLI.
+ It should not be auto enabled if the same configs are set on the INI file.
+ """
+ pytester.makepyfile(
+ """
+ import logging
+
+ def test_log_1():
+ logging.info("log message from test_log_1 not to be shown")
+ logging.warning("log message from test_log_1")
+
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ log_level=INFO
+ log_cli_level=INFO
+ """
+ )
+
+ result = pytester.runpytest(cli_args)
+ stdout = result.stdout.str()
+ if cli_args == "--log-cli-level=WARNING":
+ result.stdout.fnmatch_lines(
+ [
+ "*::test_log_1 ",
+ "*-- live log call --*",
+ "*WARNING*log message from test_log_1*",
+ "PASSED *100%*",
+ "=* 1 passed in *=",
+ ]
+ )
+ assert "INFO" not in stdout
+ else:
+ result.stdout.fnmatch_lines(
+ ["*test_log_cli_auto_enable*100%*", "=* 1 passed in *="]
+ )
+ assert "INFO" not in stdout
+ assert "WARNING" not in stdout
+
+
+def test_log_file_cli(pytester: Pytester) -> None:
+ # Default log file level
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_file(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_file_handler.level == logging.WARNING
+ logging.getLogger('catchlog').info("This log message won't be shown")
+ logging.getLogger('catchlog').warning("This log message will be shown")
+ print('PASSED')
+ """
+ )
+
+ log_file = str(pytester.path.joinpath("pytest.log"))
+
+ result = pytester.runpytest(
+ "-s", f"--log-file={log_file}", "--log-file-level=WARNING"
+ )
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(["test_log_file_cli.py PASSED"])
+
+ # make sure that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+ assert os.path.isfile(log_file)
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert "This log message will be shown" in contents
+ assert "This log message won't be shown" not in contents
+
+
+def test_log_file_cli_level(pytester: Pytester) -> None:
+ # Default log file level
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_file(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_file_handler.level == logging.INFO
+ logging.getLogger('catchlog').debug("This log message won't be shown")
+ logging.getLogger('catchlog').info("This log message will be shown")
+ print('PASSED')
+ """
+ )
+
+ log_file = str(pytester.path.joinpath("pytest.log"))
+
+ result = pytester.runpytest("-s", f"--log-file={log_file}", "--log-file-level=INFO")
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(["test_log_file_cli_level.py PASSED"])
+
+ # make sure that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+ assert os.path.isfile(log_file)
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert "This log message will be shown" in contents
+ assert "This log message won't be shown" not in contents
+
+
+def test_log_level_not_changed_by_default(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import logging
+ def test_log_file():
+ assert logging.getLogger().level == logging.WARNING
+ """
+ )
+ result = pytester.runpytest("-s")
+ result.stdout.fnmatch_lines(["* 1 passed in *"])
+
+
+def test_log_file_ini(pytester: Pytester) -> None:
+ log_file = str(pytester.path.joinpath("pytest.log"))
+
+ pytester.makeini(
+ """
+ [pytest]
+ log_file={}
+ log_file_level=WARNING
+ """.format(
+ log_file
+ )
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_file(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_file_handler.level == logging.WARNING
+ logging.getLogger('catchlog').info("This log message won't be shown")
+ logging.getLogger('catchlog').warning("This log message will be shown")
+ print('PASSED')
+ """
+ )
+
+ result = pytester.runpytest("-s")
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(["test_log_file_ini.py PASSED"])
+
+ # make sure that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+ assert os.path.isfile(log_file)
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert "This log message will be shown" in contents
+ assert "This log message won't be shown" not in contents
+
+
+def test_log_file_ini_level(pytester: Pytester) -> None:
+ log_file = str(pytester.path.joinpath("pytest.log"))
+
+ pytester.makeini(
+ """
+ [pytest]
+ log_file={}
+ log_file_level = INFO
+ """.format(
+ log_file
+ )
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ def test_log_file(request):
+ plugin = request.config.pluginmanager.getplugin('logging-plugin')
+ assert plugin.log_file_handler.level == logging.INFO
+ logging.getLogger('catchlog').debug("This log message won't be shown")
+ logging.getLogger('catchlog').info("This log message will be shown")
+ print('PASSED')
+ """
+ )
+
+ result = pytester.runpytest("-s")
+
+ # fnmatch_lines does an assertion internally
+ result.stdout.fnmatch_lines(["test_log_file_ini_level.py PASSED"])
+
+ # make sure that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+ assert os.path.isfile(log_file)
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert "This log message will be shown" in contents
+ assert "This log message won't be shown" not in contents
+
+
+def test_log_file_unicode(pytester: Pytester) -> None:
+ log_file = str(pytester.path.joinpath("pytest.log"))
+
+ pytester.makeini(
+ """
+ [pytest]
+ log_file={}
+ log_file_level = INFO
+ """.format(
+ log_file
+ )
+ )
+ pytester.makepyfile(
+ """\
+ import logging
+
+ def test_log_file():
+ logging.getLogger('catchlog').info("Normal message")
+ logging.getLogger('catchlog').info("├")
+ logging.getLogger('catchlog').info("Another normal message")
+ """
+ )
+
+ result = pytester.runpytest()
+
+ # make sure that we get a '0' exit code for the testsuite
+ assert result.ret == 0
+ assert os.path.isfile(log_file)
+ with open(log_file, encoding="utf-8") as rfh:
+ contents = rfh.read()
+ assert "Normal message" in contents
+ assert "├" in contents
+ assert "Another normal message" in contents
+
+
+@pytest.mark.parametrize("has_capture_manager", [True, False])
+def test_live_logging_suspends_capture(
+ has_capture_manager: bool, request: FixtureRequest
+) -> None:
+ """Test that capture manager is suspended when we emitting messages for live logging.
+
+ This tests the implementation calls instead of behavior because it is difficult/impossible to do it using
+ ``pytester`` facilities because they do their own capturing.
+
+ We parametrize the test to also make sure _LiveLoggingStreamHandler works correctly if no capture manager plugin
+ is installed.
+ """
+ import logging
+ import contextlib
+ from functools import partial
+ from _pytest.logging import _LiveLoggingStreamHandler
+
+ class MockCaptureManager:
+ calls = []
+
+ @contextlib.contextmanager
+ def global_and_fixture_disabled(self):
+ self.calls.append("enter disabled")
+ yield
+ self.calls.append("exit disabled")
+
+ class DummyTerminal(io.StringIO):
+ def section(self, *args, **kwargs):
+ pass
+
+ out_file = cast(TerminalReporter, DummyTerminal())
+ capture_manager = (
+ cast(CaptureManager, MockCaptureManager()) if has_capture_manager else None
+ )
+ handler = _LiveLoggingStreamHandler(out_file, capture_manager)
+ handler.set_when("call")
+
+ logger = logging.getLogger(__name__ + ".test_live_logging_suspends_capture")
+ logger.addHandler(handler)
+ request.addfinalizer(partial(logger.removeHandler, handler))
+
+ logger.critical("some message")
+ if has_capture_manager:
+ assert MockCaptureManager.calls == ["enter disabled", "exit disabled"]
+ else:
+ assert MockCaptureManager.calls == []
+ assert cast(io.StringIO, out_file).getvalue() == "\nsome message\n"
+
+
+def test_collection_live_logging(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import logging
+
+ logging.getLogger().info("Normal message")
+ """
+ )
+
+ result = pytester.runpytest("--log-cli-level=INFO")
+ result.stdout.fnmatch_lines(
+ ["*--- live log collection ---*", "*Normal message*", "collected 0 items"]
+ )
+
+
+@pytest.mark.parametrize("verbose", ["", "-q", "-qq"])
+def test_collection_collect_only_live_logging(pytester: Pytester, verbose: str) -> None:
+ pytester.makepyfile(
+ """
+ def test_simple():
+ pass
+ """
+ )
+
+ result = pytester.runpytest("--collect-only", "--log-cli-level=INFO", verbose)
+
+ expected_lines = []
+
+ if not verbose:
+ expected_lines.extend(
+ [
+ "*collected 1 item*",
+ "*<Module test_collection_collect_only_live_logging.py>*",
+ "*1 test collected*",
+ ]
+ )
+ elif verbose == "-q":
+ result.stdout.no_fnmatch_line("*collected 1 item**")
+ expected_lines.extend(
+ [
+ "*test_collection_collect_only_live_logging.py::test_simple*",
+ "1 test collected in [0-9].[0-9][0-9]s",
+ ]
+ )
+ elif verbose == "-qq":
+ result.stdout.no_fnmatch_line("*collected 1 item**")
+ expected_lines.extend(["*test_collection_collect_only_live_logging.py: 1*"])
+
+ result.stdout.fnmatch_lines(expected_lines)
+
+
+def test_collection_logging_to_file(pytester: Pytester) -> None:
+ log_file = str(pytester.path.joinpath("pytest.log"))
+
+ pytester.makeini(
+ """
+ [pytest]
+ log_file={}
+ log_file_level = INFO
+ """.format(
+ log_file
+ )
+ )
+
+ pytester.makepyfile(
+ """
+ import logging
+
+ logging.getLogger().info("Normal message")
+
+ def test_simple():
+ logging.getLogger().debug("debug message in test_simple")
+ logging.getLogger().info("info message in test_simple")
+ """
+ )
+
+ result = pytester.runpytest()
+
+ result.stdout.no_fnmatch_line("*--- live log collection ---*")
+
+ assert result.ret == 0
+ assert os.path.isfile(log_file)
+ with open(log_file, encoding="utf-8") as rfh:
+ contents = rfh.read()
+ assert "Normal message" in contents
+ assert "debug message in test_simple" not in contents
+ assert "info message in test_simple" in contents
+
+
+def test_log_in_hooks(pytester: Pytester) -> None:
+ log_file = str(pytester.path.joinpath("pytest.log"))
+
+ pytester.makeini(
+ """
+ [pytest]
+ log_file={}
+ log_file_level = INFO
+ log_cli=true
+ """.format(
+ log_file
+ )
+ )
+ pytester.makeconftest(
+ """
+ import logging
+
+ def pytest_runtestloop(session):
+ logging.info('runtestloop')
+
+ def pytest_sessionstart(session):
+ logging.info('sessionstart')
+
+ def pytest_sessionfinish(session, exitstatus):
+ logging.info('sessionfinish')
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*sessionstart*", "*runtestloop*", "*sessionfinish*"])
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert "sessionstart" in contents
+ assert "runtestloop" in contents
+ assert "sessionfinish" in contents
+
+
+def test_log_in_runtest_logreport(pytester: Pytester) -> None:
+ log_file = str(pytester.path.joinpath("pytest.log"))
+
+ pytester.makeini(
+ """
+ [pytest]
+ log_file={}
+ log_file_level = INFO
+ log_cli=true
+ """.format(
+ log_file
+ )
+ )
+ pytester.makeconftest(
+ """
+ import logging
+ logger = logging.getLogger(__name__)
+
+ def pytest_runtest_logreport(report):
+ logger.info("logreport")
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_first():
+ assert True
+ """
+ )
+ pytester.runpytest()
+ with open(log_file) as rfh:
+ contents = rfh.read()
+ assert contents.count("logreport") == 3
+
+
+def test_log_set_path(pytester: Pytester) -> None:
+ report_dir_base = str(pytester.path)
+
+ pytester.makeini(
+ """
+ [pytest]
+ log_file_level = DEBUG
+ log_cli=true
+ """
+ )
+ pytester.makeconftest(
+ """
+ import os
+ import pytest
+ @pytest.hookimpl(hookwrapper=True, tryfirst=True)
+ def pytest_runtest_setup(item):
+ config = item.config
+ logging_plugin = config.pluginmanager.get_plugin("logging-plugin")
+ report_file = os.path.join({}, item._request.node.name)
+ logging_plugin.set_log_path(report_file)
+ yield
+ """.format(
+ repr(report_dir_base)
+ )
+ )
+ pytester.makepyfile(
+ """
+ import logging
+ logger = logging.getLogger("testcase-logger")
+ def test_first():
+ logger.info("message from test 1")
+ assert True
+
+ def test_second():
+ logger.debug("message from test 2")
+ assert True
+ """
+ )
+ pytester.runpytest()
+ with open(os.path.join(report_dir_base, "test_first")) as rfh:
+ content = rfh.read()
+ assert "message from test 1" in content
+
+ with open(os.path.join(report_dir_base, "test_second")) as rfh:
+ content = rfh.read()
+ assert "message from test 2" in content
+
+
+def test_colored_captured_log(pytester: Pytester) -> None:
+ """Test that the level names of captured log messages of a failing test
+ are colored."""
+ pytester.makepyfile(
+ """
+ import logging
+
+ logger = logging.getLogger(__name__)
+
+ def test_foo():
+ logger.info('text going to logger from call')
+ assert False
+ """
+ )
+ result = pytester.runpytest("--log-level=INFO", "--color=yes")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ [
+ "*-- Captured log call --*",
+ "\x1b[32mINFO \x1b[0m*text going to logger from call",
+ ]
+ )
+
+
+def test_colored_ansi_esc_caplogtext(pytester: Pytester) -> None:
+ """Make sure that caplog.text does not contain ANSI escape sequences."""
+ pytester.makepyfile(
+ """
+ import logging
+
+ logger = logging.getLogger(__name__)
+
+ def test_foo(caplog):
+ logger.info('text going to logger from call')
+ assert '\x1b' not in caplog.text
+ """
+ )
+ result = pytester.runpytest("--log-level=INFO", "--color=yes")
+ assert result.ret == 0
+
+
+def test_logging_emit_error(pytester: Pytester) -> None:
+ """An exception raised during emit() should fail the test.
+
+ The default behavior of logging is to print "Logging error"
+ to stderr with the call stack and some extra details.
+
+ pytest overrides this behavior to propagate the exception.
+ """
+ pytester.makepyfile(
+ """
+ import logging
+
+ def test_bad_log():
+ logging.warning('oops', 'first', 2)
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(failed=1)
+ result.stdout.fnmatch_lines(
+ [
+ "====* FAILURES *====",
+ "*not all arguments converted during string formatting*",
+ ]
+ )
+
+
+def test_logging_emit_error_supressed(pytester: Pytester) -> None:
+ """If logging is configured to silently ignore errors, pytest
+ doesn't propagate errors either."""
+ pytester.makepyfile(
+ """
+ import logging
+
+ def test_bad_log(monkeypatch):
+ monkeypatch.setattr(logging, 'raiseExceptions', False)
+ logging.warning('oops', 'first', 2)
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=1)
+
+
+def test_log_file_cli_subdirectories_are_successfully_created(
+ pytester: Pytester,
+) -> None:
+ path = pytester.makepyfile(""" def test_logger(): pass """)
+ expected = os.path.join(os.path.dirname(str(path)), "foo", "bar")
+ result = pytester.runpytest("--log-file=foo/bar/logf.log")
+ assert "logf.log" in os.listdir(expected)
+ assert result.ret == ExitCode.OK
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/.gitignore b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/.gitignore
new file mode 100644
index 0000000000..d934447a03
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/.gitignore
@@ -0,0 +1,2 @@
+*.html
+assets/
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/README.rst b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/README.rst
new file mode 100644
index 0000000000..8f027c3bd3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/README.rst
@@ -0,0 +1,13 @@
+This folder contains tests and support files for smoke testing popular plugins against the current pytest version.
+
+The objective is to gauge if any intentional or unintentional changes in pytest break plugins.
+
+As a rule of thumb, we should add plugins here:
+
+1. That are used at large. This might be subjective in some cases, but if answer is yes to
+ the question: *if a new release of pytest causes pytest-X to break, will this break a ton of test suites out there?*.
+2. That don't have large external dependencies: such as external services.
+
+Besides adding the plugin as dependency, we should also add a quick test which uses some
+minimal part of the plugin, a smoke test. Also consider reusing one of the existing tests if that's
+possible.
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/bdd_wallet.feature b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/bdd_wallet.feature
new file mode 100644
index 0000000000..e404c4948e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/bdd_wallet.feature
@@ -0,0 +1,9 @@
+Feature: Buy things with apple
+
+ Scenario: Buy fruits
+ Given A wallet with 50
+
+ When I buy some apples for 1
+ And I buy some bananas for 2
+
+ Then I have 47 left
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/bdd_wallet.py b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/bdd_wallet.py
new file mode 100644
index 0000000000..35927ea587
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/bdd_wallet.py
@@ -0,0 +1,39 @@
+from pytest_bdd import given
+from pytest_bdd import scenario
+from pytest_bdd import then
+from pytest_bdd import when
+
+import pytest
+
+
+@scenario("bdd_wallet.feature", "Buy fruits")
+def test_publish():
+ pass
+
+
+@pytest.fixture
+def wallet():
+ class Wallet:
+ amount = 0
+
+ return Wallet()
+
+
+@given("A wallet with 50")
+def fill_wallet(wallet):
+ wallet.amount = 50
+
+
+@when("I buy some apples for 1")
+def buy_apples(wallet):
+ wallet.amount -= 1
+
+
+@when("I buy some bananas for 2")
+def buy_bananas(wallet):
+ wallet.amount -= 2
+
+
+@then("I have 47 left")
+def check(wallet):
+ assert wallet.amount == 47
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/django_settings.py b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/django_settings.py
new file mode 100644
index 0000000000..0715f47653
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/django_settings.py
@@ -0,0 +1 @@
+SECRET_KEY = "mysecret"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest.ini b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest.ini
new file mode 100644
index 0000000000..b42b07d145
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest.ini
@@ -0,0 +1,5 @@
+[pytest]
+addopts = --strict-markers
+filterwarnings =
+ error::pytest.PytestWarning
+ ignore:.*.fspath is deprecated and will be replaced by .*.path.*:pytest.PytestDeprecationWarning
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_anyio_integration.py b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_anyio_integration.py
new file mode 100644
index 0000000000..65c2f59366
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_anyio_integration.py
@@ -0,0 +1,8 @@
+import anyio
+
+import pytest
+
+
+@pytest.mark.anyio
+async def test_sleep():
+ await anyio.sleep(0)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_asyncio_integration.py b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_asyncio_integration.py
new file mode 100644
index 0000000000..5d2a3faccf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_asyncio_integration.py
@@ -0,0 +1,8 @@
+import asyncio
+
+import pytest
+
+
+@pytest.mark.asyncio
+async def test_sleep():
+ await asyncio.sleep(0)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_mock_integration.py b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_mock_integration.py
new file mode 100644
index 0000000000..740469d00f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_mock_integration.py
@@ -0,0 +1,2 @@
+def test_mocker(mocker):
+ mocker.MagicMock()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_trio_integration.py b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_trio_integration.py
new file mode 100644
index 0000000000..199f7850bc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_trio_integration.py
@@ -0,0 +1,8 @@
+import trio
+
+import pytest
+
+
+@pytest.mark.trio
+async def test_sleep():
+ await trio.sleep(0)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_twisted_integration.py b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_twisted_integration.py
new file mode 100644
index 0000000000..94748d036e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/pytest_twisted_integration.py
@@ -0,0 +1,18 @@
+import pytest_twisted
+from twisted.internet.task import deferLater
+
+
+def sleep():
+ import twisted.internet.reactor
+
+ return deferLater(clock=twisted.internet.reactor, delay=0)
+
+
+@pytest_twisted.inlineCallbacks
+def test_inlineCallbacks():
+ yield sleep()
+
+
+@pytest_twisted.ensureDeferred
+async def test_inlineCallbacks_async():
+ await sleep()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/requirements.txt b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/requirements.txt
new file mode 100644
index 0000000000..90b253cc6d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/requirements.txt
@@ -0,0 +1,15 @@
+anyio[curio,trio]==3.4.0
+django==3.2.9
+pytest-asyncio==0.16.0
+pytest-bdd==5.0.0
+pytest-cov==3.0.0
+pytest-django==4.5.1
+pytest-flakes==4.0.5
+pytest-html==3.1.1
+pytest-mock==3.6.1
+pytest-rerunfailures==10.2
+pytest-sugar==0.9.4
+pytest-trio==0.7.0
+pytest-twisted==1.13.4
+twisted==21.7.0
+pytest-xvfb==2.0.0
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/simple_integration.py b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/simple_integration.py
new file mode 100644
index 0000000000..20b2fc4b5b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/plugins_integration/simple_integration.py
@@ -0,0 +1,10 @@
+import pytest
+
+
+def test_foo():
+ assert True
+
+
+@pytest.mark.parametrize("i", range(3))
+def test_bar(i):
+ assert True
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/python/approx.py b/testing/web-platform/tests/tools/third_party/pytest/testing/python/approx.py
new file mode 100644
index 0000000000..0d411d8a6d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/python/approx.py
@@ -0,0 +1,872 @@
+import operator
+import sys
+from contextlib import contextmanager
+from decimal import Decimal
+from fractions import Fraction
+from operator import eq
+from operator import ne
+from typing import Optional
+
+import pytest
+from _pytest.pytester import Pytester
+from pytest import approx
+
+inf, nan = float("inf"), float("nan")
+
+
+@pytest.fixture
+def mocked_doctest_runner(monkeypatch):
+ import doctest
+
+ class MockedPdb:
+ def __init__(self, out):
+ pass
+
+ def set_trace(self):
+ raise NotImplementedError("not used")
+
+ def reset(self):
+ pass
+
+ def set_continue(self):
+ pass
+
+ monkeypatch.setattr("doctest._OutputRedirectingPdb", MockedPdb)
+
+ class MyDocTestRunner(doctest.DocTestRunner):
+ def report_failure(self, out, test, example, got):
+ raise AssertionError(
+ "'{}' evaluates to '{}', not '{}'".format(
+ example.source.strip(), got.strip(), example.want.strip()
+ )
+ )
+
+ return MyDocTestRunner()
+
+
+@contextmanager
+def temporary_verbosity(config, verbosity=0):
+ original_verbosity = config.getoption("verbose")
+ config.option.verbose = verbosity
+ try:
+ yield
+ finally:
+ config.option.verbose = original_verbosity
+
+
+@pytest.fixture
+def assert_approx_raises_regex(pytestconfig):
+ def do_assert(lhs, rhs, expected_message, verbosity_level=0):
+ import re
+
+ with temporary_verbosity(pytestconfig, verbosity_level):
+ with pytest.raises(AssertionError) as e:
+ assert lhs == approx(rhs)
+
+ nl = "\n"
+ obtained_message = str(e.value).splitlines()[1:]
+ assert len(obtained_message) == len(expected_message), (
+ "Regex message length doesn't match obtained.\n"
+ "Obtained:\n"
+ f"{nl.join(obtained_message)}\n\n"
+ "Expected regex:\n"
+ f"{nl.join(expected_message)}\n\n"
+ )
+
+ for i, (obtained_line, expected_line) in enumerate(
+ zip(obtained_message, expected_message)
+ ):
+ regex = re.compile(expected_line)
+ assert regex.match(obtained_line) is not None, (
+ "Unexpected error message:\n"
+ f"{nl.join(obtained_message)}\n\n"
+ "Did not match regex:\n"
+ f"{nl.join(expected_message)}\n\n"
+ f"With verbosity level = {verbosity_level}, on line {i}"
+ )
+
+ return do_assert
+
+
+SOME_FLOAT = r"[+-]?([0-9]*[.])?[0-9]+\s*"
+SOME_INT = r"[0-9]+\s*"
+
+
+class TestApprox:
+ def test_error_messages(self, assert_approx_raises_regex):
+ np = pytest.importorskip("numpy")
+
+ assert_approx_raises_regex(
+ 2.0,
+ 1.0,
+ [
+ " comparison failed",
+ f" Obtained: {SOME_FLOAT}",
+ f" Expected: {SOME_FLOAT} ± {SOME_FLOAT}",
+ ],
+ )
+
+ assert_approx_raises_regex(
+ {"a": 1.0, "b": 1000.0, "c": 1000000.0},
+ {
+ "a": 2.0,
+ "b": 1000.0,
+ "c": 3000000.0,
+ },
+ [
+ r" comparison failed. Mismatched elements: 2 / 3:",
+ rf" Max absolute difference: {SOME_FLOAT}",
+ rf" Max relative difference: {SOME_FLOAT}",
+ r" Index \| Obtained\s+\| Expected ",
+ rf" a \| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ rf" c \| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ ],
+ )
+
+ assert_approx_raises_regex(
+ [1.0, 2.0, 3.0, 4.0],
+ [1.0, 3.0, 3.0, 5.0],
+ [
+ r" comparison failed. Mismatched elements: 2 / 4:",
+ rf" Max absolute difference: {SOME_FLOAT}",
+ rf" Max relative difference: {SOME_FLOAT}",
+ r" Index \| Obtained\s+\| Expected ",
+ rf" 1 \| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ rf" 3 \| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ ],
+ )
+
+ a = np.linspace(0, 100, 20)
+ b = np.linspace(0, 100, 20)
+ a[10] += 0.5
+ assert_approx_raises_regex(
+ a,
+ b,
+ [
+ r" comparison failed. Mismatched elements: 1 / 20:",
+ rf" Max absolute difference: {SOME_FLOAT}",
+ rf" Max relative difference: {SOME_FLOAT}",
+ r" Index \| Obtained\s+\| Expected",
+ rf" \(10,\) \| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ ],
+ )
+
+ assert_approx_raises_regex(
+ np.array(
+ [
+ [[1.1987311, 12412342.3], [3.214143244, 1423412423415.677]],
+ [[1, 2], [3, 219371297321973]],
+ ]
+ ),
+ np.array(
+ [
+ [[1.12313, 12412342.3], [3.214143244, 534523542345.677]],
+ [[1, 2], [3, 7]],
+ ]
+ ),
+ [
+ r" comparison failed. Mismatched elements: 3 / 8:",
+ rf" Max absolute difference: {SOME_FLOAT}",
+ rf" Max relative difference: {SOME_FLOAT}",
+ r" Index\s+\| Obtained\s+\| Expected\s+",
+ rf" \(0, 0, 0\) \| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ rf" \(0, 1, 1\) \| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ rf" \(1, 1, 1\) \| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ ],
+ )
+
+ # Specific test for comparison with 0.0 (relative diff will be 'inf')
+ assert_approx_raises_regex(
+ [0.0],
+ [1.0],
+ [
+ r" comparison failed. Mismatched elements: 1 / 1:",
+ rf" Max absolute difference: {SOME_FLOAT}",
+ r" Max relative difference: inf",
+ r" Index \| Obtained\s+\| Expected ",
+ rf"\s*0\s*\| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ ],
+ )
+
+ assert_approx_raises_regex(
+ np.array([0.0]),
+ np.array([1.0]),
+ [
+ r" comparison failed. Mismatched elements: 1 / 1:",
+ rf" Max absolute difference: {SOME_FLOAT}",
+ r" Max relative difference: inf",
+ r" Index \| Obtained\s+\| Expected ",
+ rf"\s*\(0,\)\s*\| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ ],
+ )
+
+ def test_error_messages_invalid_args(self, assert_approx_raises_regex):
+ np = pytest.importorskip("numpy")
+ with pytest.raises(AssertionError) as e:
+ assert np.array([[1.2, 3.4], [4.0, 5.0]]) == pytest.approx(
+ np.array([[4.0], [5.0]])
+ )
+ message = "\n".join(str(e.value).split("\n")[1:])
+ assert message == "\n".join(
+ [
+ " Impossible to compare arrays with different shapes.",
+ " Shapes: (2, 1) and (2, 2)",
+ ]
+ )
+
+ with pytest.raises(AssertionError) as e:
+ assert [1.0, 2.0, 3.0] == pytest.approx([4.0, 5.0])
+ message = "\n".join(str(e.value).split("\n")[1:])
+ assert message == "\n".join(
+ [
+ " Impossible to compare lists with different sizes.",
+ " Lengths: 2 and 3",
+ ]
+ )
+
+ def test_error_messages_with_different_verbosity(self, assert_approx_raises_regex):
+ np = pytest.importorskip("numpy")
+ for v in [0, 1, 2]:
+ # Verbosity level doesn't affect the error message for scalars
+ assert_approx_raises_regex(
+ 2.0,
+ 1.0,
+ [
+ " comparison failed",
+ f" Obtained: {SOME_FLOAT}",
+ f" Expected: {SOME_FLOAT} ± {SOME_FLOAT}",
+ ],
+ verbosity_level=v,
+ )
+
+ a = np.linspace(1, 101, 20)
+ b = np.linspace(2, 102, 20)
+ assert_approx_raises_regex(
+ a,
+ b,
+ [
+ r" comparison failed. Mismatched elements: 20 / 20:",
+ rf" Max absolute difference: {SOME_FLOAT}",
+ rf" Max relative difference: {SOME_FLOAT}",
+ r" Index \| Obtained\s+\| Expected",
+ rf" \(0,\)\s+\| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ rf" \(1,\)\s+\| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}",
+ rf" \(2,\)\s+\| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}...",
+ "",
+ rf"\s*...Full output truncated \({SOME_INT} lines hidden\), use '-vv' to show",
+ ],
+ verbosity_level=0,
+ )
+
+ assert_approx_raises_regex(
+ a,
+ b,
+ [
+ r" comparison failed. Mismatched elements: 20 / 20:",
+ rf" Max absolute difference: {SOME_FLOAT}",
+ rf" Max relative difference: {SOME_FLOAT}",
+ r" Index \| Obtained\s+\| Expected",
+ ]
+ + [
+ rf" \({i},\)\s+\| {SOME_FLOAT} \| {SOME_FLOAT} ± {SOME_FLOAT}"
+ for i in range(20)
+ ],
+ verbosity_level=2,
+ )
+
+ def test_repr_string(self):
+ assert repr(approx(1.0)) == "1.0 ± 1.0e-06"
+ assert repr(approx([1.0, 2.0])) == "approx([1.0 ± 1.0e-06, 2.0 ± 2.0e-06])"
+ assert repr(approx((1.0, 2.0))) == "approx((1.0 ± 1.0e-06, 2.0 ± 2.0e-06))"
+ assert repr(approx(inf)) == "inf"
+ assert repr(approx(1.0, rel=nan)) == "1.0 ± ???"
+ assert repr(approx(1.0, rel=inf)) == "1.0 ± inf"
+
+ # Dictionaries aren't ordered, so we need to check both orders.
+ assert repr(approx({"a": 1.0, "b": 2.0})) in (
+ "approx({'a': 1.0 ± 1.0e-06, 'b': 2.0 ± 2.0e-06})",
+ "approx({'b': 2.0 ± 2.0e-06, 'a': 1.0 ± 1.0e-06})",
+ )
+
+ def test_repr_complex_numbers(self):
+ assert repr(approx(inf + 1j)) == "(inf+1j)"
+ assert repr(approx(1.0j, rel=inf)) == "1j ± inf"
+
+ # can't compute a sensible tolerance
+ assert repr(approx(nan + 1j)) == "(nan+1j) ± ???"
+
+ assert repr(approx(1.0j)) == "1j ± 1.0e-06 ∠ ±180°"
+
+ # relative tolerance is scaled to |3+4j| = 5
+ assert repr(approx(3 + 4 * 1j)) == "(3+4j) ± 5.0e-06 ∠ ±180°"
+
+ # absolute tolerance is not scaled
+ assert repr(approx(3.3 + 4.4 * 1j, abs=0.02)) == "(3.3+4.4j) ± 2.0e-02 ∠ ±180°"
+
+ @pytest.mark.parametrize(
+ "value, expected_repr_string",
+ [
+ (5.0, "approx(5.0 ± 5.0e-06)"),
+ ([5.0], "approx([5.0 ± 5.0e-06])"),
+ ([[5.0]], "approx([[5.0 ± 5.0e-06]])"),
+ ([[5.0, 6.0]], "approx([[5.0 ± 5.0e-06, 6.0 ± 6.0e-06]])"),
+ ([[5.0], [6.0]], "approx([[5.0 ± 5.0e-06], [6.0 ± 6.0e-06]])"),
+ ],
+ )
+ def test_repr_nd_array(self, value, expected_repr_string):
+ """Make sure that arrays of all different dimensions are repr'd correctly."""
+ np = pytest.importorskip("numpy")
+ np_array = np.array(value)
+ assert repr(approx(np_array)) == expected_repr_string
+
+ def test_bool(self):
+ with pytest.raises(AssertionError) as err:
+ assert approx(1)
+
+ assert err.match(r"approx\(\) is not supported in a boolean context")
+
+ def test_operator_overloading(self):
+ assert 1 == approx(1, rel=1e-6, abs=1e-12)
+ assert not (1 != approx(1, rel=1e-6, abs=1e-12))
+ assert 10 != approx(1, rel=1e-6, abs=1e-12)
+ assert not (10 == approx(1, rel=1e-6, abs=1e-12))
+
+ def test_exactly_equal(self):
+ examples = [
+ (2.0, 2.0),
+ (0.1e200, 0.1e200),
+ (1.123e-300, 1.123e-300),
+ (12345, 12345.0),
+ (0.0, -0.0),
+ (345678, 345678),
+ (Decimal("1.0001"), Decimal("1.0001")),
+ (Fraction(1, 3), Fraction(-1, -3)),
+ ]
+ for a, x in examples:
+ assert a == approx(x)
+
+ def test_opposite_sign(self):
+ examples = [(eq, 1e-100, -1e-100), (ne, 1e100, -1e100)]
+ for op, a, x in examples:
+ assert op(a, approx(x))
+
+ def test_zero_tolerance(self):
+ within_1e10 = [(1.1e-100, 1e-100), (-1.1e-100, -1e-100)]
+ for a, x in within_1e10:
+ assert x == approx(x, rel=0.0, abs=0.0)
+ assert a != approx(x, rel=0.0, abs=0.0)
+ assert a == approx(x, rel=0.0, abs=5e-101)
+ assert a != approx(x, rel=0.0, abs=5e-102)
+ assert a == approx(x, rel=5e-1, abs=0.0)
+ assert a != approx(x, rel=5e-2, abs=0.0)
+
+ @pytest.mark.parametrize(
+ ("rel", "abs"),
+ [
+ (-1e100, None),
+ (None, -1e100),
+ (1e100, -1e100),
+ (-1e100, 1e100),
+ (-1e100, -1e100),
+ ],
+ )
+ def test_negative_tolerance(
+ self, rel: Optional[float], abs: Optional[float]
+ ) -> None:
+ # Negative tolerances are not allowed.
+ with pytest.raises(ValueError):
+ 1.1 == approx(1, rel, abs)
+
+ def test_negative_tolerance_message(self):
+ # Error message for negative tolerance should include the value.
+ with pytest.raises(ValueError, match="-3"):
+ 0 == approx(1, abs=-3)
+ with pytest.raises(ValueError, match="-3"):
+ 0 == approx(1, rel=-3)
+
+ def test_inf_tolerance(self):
+ # Everything should be equal if the tolerance is infinite.
+ large_diffs = [(1, 1000), (1e-50, 1e50), (-1.0, -1e300), (0.0, 10)]
+ for a, x in large_diffs:
+ assert a != approx(x, rel=0.0, abs=0.0)
+ assert a == approx(x, rel=inf, abs=0.0)
+ assert a == approx(x, rel=0.0, abs=inf)
+ assert a == approx(x, rel=inf, abs=inf)
+
+ def test_inf_tolerance_expecting_zero(self) -> None:
+ # If the relative tolerance is zero but the expected value is infinite,
+ # the actual tolerance is a NaN, which should be an error.
+ with pytest.raises(ValueError):
+ 1 == approx(0, rel=inf, abs=0.0)
+ with pytest.raises(ValueError):
+ 1 == approx(0, rel=inf, abs=inf)
+
+ def test_nan_tolerance(self) -> None:
+ with pytest.raises(ValueError):
+ 1.1 == approx(1, rel=nan)
+ with pytest.raises(ValueError):
+ 1.1 == approx(1, abs=nan)
+ with pytest.raises(ValueError):
+ 1.1 == approx(1, rel=nan, abs=nan)
+
+ def test_reasonable_defaults(self):
+ # Whatever the defaults are, they should work for numbers close to 1
+ # than have a small amount of floating-point error.
+ assert 0.1 + 0.2 == approx(0.3)
+
+ def test_default_tolerances(self):
+ # This tests the defaults as they are currently set. If you change the
+ # defaults, this test will fail but you should feel free to change it.
+ # None of the other tests (except the doctests) should be affected by
+ # the choice of defaults.
+ examples = [
+ # Relative tolerance used.
+ (eq, 1e100 + 1e94, 1e100),
+ (ne, 1e100 + 2e94, 1e100),
+ (eq, 1e0 + 1e-6, 1e0),
+ (ne, 1e0 + 2e-6, 1e0),
+ # Absolute tolerance used.
+ (eq, 1e-100, +1e-106),
+ (eq, 1e-100, +2e-106),
+ (eq, 1e-100, 0),
+ ]
+ for op, a, x in examples:
+ assert op(a, approx(x))
+
+ def test_custom_tolerances(self):
+ assert 1e8 + 1e0 == approx(1e8, rel=5e-8, abs=5e0)
+ assert 1e8 + 1e0 == approx(1e8, rel=5e-9, abs=5e0)
+ assert 1e8 + 1e0 == approx(1e8, rel=5e-8, abs=5e-1)
+ assert 1e8 + 1e0 != approx(1e8, rel=5e-9, abs=5e-1)
+
+ assert 1e0 + 1e-8 == approx(1e0, rel=5e-8, abs=5e-8)
+ assert 1e0 + 1e-8 == approx(1e0, rel=5e-9, abs=5e-8)
+ assert 1e0 + 1e-8 == approx(1e0, rel=5e-8, abs=5e-9)
+ assert 1e0 + 1e-8 != approx(1e0, rel=5e-9, abs=5e-9)
+
+ assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-8, abs=5e-16)
+ assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-9, abs=5e-16)
+ assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-8, abs=5e-17)
+ assert 1e-8 + 1e-16 != approx(1e-8, rel=5e-9, abs=5e-17)
+
+ def test_relative_tolerance(self):
+ within_1e8_rel = [(1e8 + 1e0, 1e8), (1e0 + 1e-8, 1e0), (1e-8 + 1e-16, 1e-8)]
+ for a, x in within_1e8_rel:
+ assert a == approx(x, rel=5e-8, abs=0.0)
+ assert a != approx(x, rel=5e-9, abs=0.0)
+
+ def test_absolute_tolerance(self):
+ within_1e8_abs = [(1e8 + 9e-9, 1e8), (1e0 + 9e-9, 1e0), (1e-8 + 9e-9, 1e-8)]
+ for a, x in within_1e8_abs:
+ assert a == approx(x, rel=0, abs=5e-8)
+ assert a != approx(x, rel=0, abs=5e-9)
+
+ def test_expecting_zero(self):
+ examples = [
+ (ne, 1e-6, 0.0),
+ (ne, -1e-6, 0.0),
+ (eq, 1e-12, 0.0),
+ (eq, -1e-12, 0.0),
+ (ne, 2e-12, 0.0),
+ (ne, -2e-12, 0.0),
+ (ne, inf, 0.0),
+ (ne, nan, 0.0),
+ ]
+ for op, a, x in examples:
+ assert op(a, approx(x, rel=0.0, abs=1e-12))
+ assert op(a, approx(x, rel=1e-6, abs=1e-12))
+
+ def test_expecting_inf(self):
+ examples = [
+ (eq, inf, inf),
+ (eq, -inf, -inf),
+ (ne, inf, -inf),
+ (ne, 0.0, inf),
+ (ne, nan, inf),
+ ]
+ for op, a, x in examples:
+ assert op(a, approx(x))
+
+ def test_expecting_nan(self):
+ examples = [
+ (eq, nan, nan),
+ (eq, -nan, -nan),
+ (eq, nan, -nan),
+ (ne, 0.0, nan),
+ (ne, inf, nan),
+ ]
+ for op, a, x in examples:
+ # Nothing is equal to NaN by default.
+ assert a != approx(x)
+
+ # If ``nan_ok=True``, then NaN is equal to NaN.
+ assert op(a, approx(x, nan_ok=True))
+
+ def test_int(self):
+ within_1e6 = [(1000001, 1000000), (-1000001, -1000000)]
+ for a, x in within_1e6:
+ assert a == approx(x, rel=5e-6, abs=0)
+ assert a != approx(x, rel=5e-7, abs=0)
+ assert approx(x, rel=5e-6, abs=0) == a
+ assert approx(x, rel=5e-7, abs=0) != a
+
+ def test_decimal(self):
+ within_1e6 = [
+ (Decimal("1.000001"), Decimal("1.0")),
+ (Decimal("-1.000001"), Decimal("-1.0")),
+ ]
+ for a, x in within_1e6:
+ assert a == approx(x)
+ assert a == approx(x, rel=Decimal("5e-6"), abs=0)
+ assert a != approx(x, rel=Decimal("5e-7"), abs=0)
+ assert approx(x, rel=Decimal("5e-6"), abs=0) == a
+ assert approx(x, rel=Decimal("5e-7"), abs=0) != a
+
+ def test_fraction(self):
+ within_1e6 = [
+ (1 + Fraction(1, 1000000), Fraction(1)),
+ (-1 - Fraction(-1, 1000000), Fraction(-1)),
+ ]
+ for a, x in within_1e6:
+ assert a == approx(x, rel=5e-6, abs=0)
+ assert a != approx(x, rel=5e-7, abs=0)
+ assert approx(x, rel=5e-6, abs=0) == a
+ assert approx(x, rel=5e-7, abs=0) != a
+
+ def test_complex(self):
+ within_1e6 = [
+ (1.000001 + 1.0j, 1.0 + 1.0j),
+ (1.0 + 1.000001j, 1.0 + 1.0j),
+ (-1.000001 + 1.0j, -1.0 + 1.0j),
+ (1.0 - 1.000001j, 1.0 - 1.0j),
+ ]
+ for a, x in within_1e6:
+ assert a == approx(x, rel=5e-6, abs=0)
+ assert a != approx(x, rel=5e-7, abs=0)
+ assert approx(x, rel=5e-6, abs=0) == a
+ assert approx(x, rel=5e-7, abs=0) != a
+
+ def test_list(self):
+ actual = [1 + 1e-7, 2 + 1e-8]
+ expected = [1, 2]
+
+ # Return false if any element is outside the tolerance.
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == actual
+ assert approx(expected, rel=5e-8, abs=0) != actual
+
+ def test_list_decimal(self):
+ actual = [Decimal("1.000001"), Decimal("2.000001")]
+ expected = [Decimal("1"), Decimal("2")]
+
+ assert actual == approx(expected)
+
+ def test_list_wrong_len(self):
+ assert [1, 2] != approx([1])
+ assert [1, 2] != approx([1, 2, 3])
+
+ def test_tuple(self):
+ actual = (1 + 1e-7, 2 + 1e-8)
+ expected = (1, 2)
+
+ # Return false if any element is outside the tolerance.
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == actual
+ assert approx(expected, rel=5e-8, abs=0) != actual
+
+ def test_tuple_wrong_len(self):
+ assert (1, 2) != approx((1,))
+ assert (1, 2) != approx((1, 2, 3))
+
+ def test_tuple_vs_other(self):
+ assert 1 != approx((1,))
+
+ def test_dict(self):
+ actual = {"a": 1 + 1e-7, "b": 2 + 1e-8}
+ # Dictionaries became ordered in python3.6, so switch up the order here
+ # to make sure it doesn't matter.
+ expected = {"b": 2, "a": 1}
+
+ # Return false if any element is outside the tolerance.
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == actual
+ assert approx(expected, rel=5e-8, abs=0) != actual
+
+ def test_dict_decimal(self):
+ actual = {"a": Decimal("1.000001"), "b": Decimal("2.000001")}
+ # Dictionaries became ordered in python3.6, so switch up the order here
+ # to make sure it doesn't matter.
+ expected = {"b": Decimal("2"), "a": Decimal("1")}
+
+ assert actual == approx(expected)
+
+ def test_dict_wrong_len(self):
+ assert {"a": 1, "b": 2} != approx({"a": 1})
+ assert {"a": 1, "b": 2} != approx({"a": 1, "c": 2})
+ assert {"a": 1, "b": 2} != approx({"a": 1, "b": 2, "c": 3})
+
+ def test_dict_nonnumeric(self):
+ assert {"a": 1.0, "b": None} == pytest.approx({"a": 1.0, "b": None})
+ assert {"a": 1.0, "b": 1} != pytest.approx({"a": 1.0, "b": None})
+
+ def test_dict_vs_other(self):
+ assert 1 != approx({"a": 0})
+
+ def test_numpy_array(self):
+ np = pytest.importorskip("numpy")
+
+ actual = np.array([1 + 1e-7, 2 + 1e-8])
+ expected = np.array([1, 2])
+
+ # Return false if any element is outside the tolerance.
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == expected
+ assert approx(expected, rel=5e-8, abs=0) != actual
+
+ # Should be able to compare lists with numpy arrays.
+ assert list(actual) == approx(expected, rel=5e-7, abs=0)
+ assert list(actual) != approx(expected, rel=5e-8, abs=0)
+ assert actual == approx(list(expected), rel=5e-7, abs=0)
+ assert actual != approx(list(expected), rel=5e-8, abs=0)
+
+ def test_numpy_tolerance_args(self):
+ """
+ Check that numpy rel/abs args are handled correctly
+ for comparison against an np.array
+ Check both sides of the operator, hopefully it doesn't impact things.
+ Test all permutations of where the approx and np.array() can show up
+ """
+ np = pytest.importorskip("numpy")
+ expected = 100.0
+ actual = 99.0
+ abs_diff = expected - actual
+ rel_diff = (expected - actual) / expected
+
+ tests = [
+ (eq, abs_diff, 0),
+ (eq, 0, rel_diff),
+ (ne, 0, rel_diff / 2.0), # rel diff fail
+ (ne, abs_diff / 2.0, 0), # abs diff fail
+ ]
+
+ for op, _abs, _rel in tests:
+ assert op(np.array(actual), approx(expected, abs=_abs, rel=_rel)) # a, b
+ assert op(approx(expected, abs=_abs, rel=_rel), np.array(actual)) # b, a
+
+ assert op(actual, approx(np.array(expected), abs=_abs, rel=_rel)) # a, b
+ assert op(approx(np.array(expected), abs=_abs, rel=_rel), actual) # b, a
+
+ assert op(np.array(actual), approx(np.array(expected), abs=_abs, rel=_rel))
+ assert op(approx(np.array(expected), abs=_abs, rel=_rel), np.array(actual))
+
+ def test_numpy_expecting_nan(self):
+ np = pytest.importorskip("numpy")
+ examples = [
+ (eq, nan, nan),
+ (eq, -nan, -nan),
+ (eq, nan, -nan),
+ (ne, 0.0, nan),
+ (ne, inf, nan),
+ ]
+ for op, a, x in examples:
+ # Nothing is equal to NaN by default.
+ assert np.array(a) != approx(x)
+ assert a != approx(np.array(x))
+
+ # If ``nan_ok=True``, then NaN is equal to NaN.
+ assert op(np.array(a), approx(x, nan_ok=True))
+ assert op(a, approx(np.array(x), nan_ok=True))
+
+ def test_numpy_expecting_inf(self):
+ np = pytest.importorskip("numpy")
+ examples = [
+ (eq, inf, inf),
+ (eq, -inf, -inf),
+ (ne, inf, -inf),
+ (ne, 0.0, inf),
+ (ne, nan, inf),
+ ]
+ for op, a, x in examples:
+ assert op(np.array(a), approx(x))
+ assert op(a, approx(np.array(x)))
+ assert op(np.array(a), approx(np.array(x)))
+
+ def test_numpy_array_wrong_shape(self):
+ np = pytest.importorskip("numpy")
+
+ a12 = np.array([[1, 2]])
+ a21 = np.array([[1], [2]])
+
+ assert a12 != approx(a21)
+ assert a21 != approx(a12)
+
+ def test_numpy_array_protocol(self):
+ """
+ array-like objects such as tensorflow's DeviceArray are handled like ndarray.
+ See issue #8132
+ """
+ np = pytest.importorskip("numpy")
+
+ class DeviceArray:
+ def __init__(self, value, size):
+ self.value = value
+ self.size = size
+
+ def __array__(self):
+ return self.value * np.ones(self.size)
+
+ class DeviceScalar:
+ def __init__(self, value):
+ self.value = value
+
+ def __array__(self):
+ return np.array(self.value)
+
+ expected = 1
+ actual = 1 + 1e-6
+ assert approx(expected) == DeviceArray(actual, size=1)
+ assert approx(expected) == DeviceArray(actual, size=2)
+ assert approx(expected) == DeviceScalar(actual)
+ assert approx(DeviceScalar(expected)) == actual
+ assert approx(DeviceScalar(expected)) == DeviceScalar(actual)
+
+ def test_doctests(self, mocked_doctest_runner) -> None:
+ import doctest
+
+ parser = doctest.DocTestParser()
+ assert approx.__doc__ is not None
+ test = parser.get_doctest(
+ approx.__doc__, {"approx": approx}, approx.__name__, None, None
+ )
+ mocked_doctest_runner.run(test)
+
+ def test_unicode_plus_minus(self, pytester: Pytester) -> None:
+ """
+ Comparing approx instances inside lists should not produce an error in the detailed diff.
+ Integration test for issue #2111.
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_foo():
+ assert [3] == [pytest.approx(4)]
+ """
+ )
+ expected = "4.0e-06"
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [f"*At index 0 diff: 3 != 4 ± {expected}", "=* 1 failed in *="]
+ )
+
+ @pytest.mark.parametrize(
+ "x, name",
+ [
+ pytest.param([[1]], "data structures", id="nested-list"),
+ pytest.param({"key": {"key": 1}}, "dictionaries", id="nested-dict"),
+ ],
+ )
+ def test_expected_value_type_error(self, x, name):
+ with pytest.raises(
+ TypeError,
+ match=fr"pytest.approx\(\) does not support nested {name}:",
+ ):
+ approx(x)
+
+ @pytest.mark.parametrize(
+ "x",
+ [
+ pytest.param(None),
+ pytest.param("string"),
+ pytest.param(["string"], id="nested-str"),
+ pytest.param({"key": "string"}, id="dict-with-string"),
+ ],
+ )
+ def test_nonnumeric_okay_if_equal(self, x):
+ assert x == approx(x)
+
+ @pytest.mark.parametrize(
+ "x",
+ [
+ pytest.param("string"),
+ pytest.param(["string"], id="nested-str"),
+ pytest.param({"key": "string"}, id="dict-with-string"),
+ ],
+ )
+ def test_nonnumeric_false_if_unequal(self, x):
+ """For nonnumeric types, x != pytest.approx(y) reduces to x != y"""
+ assert "ab" != approx("abc")
+ assert ["ab"] != approx(["abc"])
+ # in particular, both of these should return False
+ assert {"a": 1.0} != approx({"a": None})
+ assert {"a": None} != approx({"a": 1.0})
+
+ assert 1.0 != approx(None)
+ assert None != approx(1.0) # noqa: E711
+
+ assert 1.0 != approx([None])
+ assert None != approx([1.0]) # noqa: E711
+
+ @pytest.mark.skipif(sys.version_info < (3, 7), reason="requires ordered dicts")
+ def test_nonnumeric_dict_repr(self):
+ """Dicts with non-numerics and infinites have no tolerances"""
+ x1 = {"foo": 1.0000005, "bar": None, "foobar": inf}
+ assert (
+ repr(approx(x1))
+ == "approx({'foo': 1.0000005 ± 1.0e-06, 'bar': None, 'foobar': inf})"
+ )
+
+ def test_nonnumeric_list_repr(self):
+ """Lists with non-numerics and infinites have no tolerances"""
+ x1 = [1.0000005, None, inf]
+ assert repr(approx(x1)) == "approx([1.0000005 ± 1.0e-06, None, inf])"
+
+ @pytest.mark.parametrize(
+ "op",
+ [
+ pytest.param(operator.le, id="<="),
+ pytest.param(operator.lt, id="<"),
+ pytest.param(operator.ge, id=">="),
+ pytest.param(operator.gt, id=">"),
+ ],
+ )
+ def test_comparison_operator_type_error(self, op):
+ """pytest.approx should raise TypeError for operators other than == and != (#2003)."""
+ with pytest.raises(TypeError):
+ op(1, approx(1, rel=1e-6, abs=1e-12))
+
+ def test_numpy_array_with_scalar(self):
+ np = pytest.importorskip("numpy")
+
+ actual = np.array([1 + 1e-7, 1 - 1e-8])
+ expected = 1.0
+
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == actual
+ assert approx(expected, rel=5e-8, abs=0) != actual
+
+ def test_numpy_scalar_with_array(self):
+ np = pytest.importorskip("numpy")
+
+ actual = 1.0
+ expected = np.array([1 + 1e-7, 1 - 1e-8])
+
+ assert actual == approx(expected, rel=5e-7, abs=0)
+ assert actual != approx(expected, rel=5e-8, abs=0)
+ assert approx(expected, rel=5e-7, abs=0) == actual
+ assert approx(expected, rel=5e-8, abs=0) != actual
+
+ def test_generic_sized_iterable_object(self):
+ class MySizedIterable:
+ def __iter__(self):
+ return iter([1, 2, 3, 4])
+
+ def __len__(self):
+ return 4
+
+ expected = MySizedIterable()
+ assert [1, 2, 3, 4] == approx(expected)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/python/collect.py b/testing/web-platform/tests/tools/third_party/pytest/testing/python/collect.py
new file mode 100644
index 0000000000..ac3edd395a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/python/collect.py
@@ -0,0 +1,1493 @@
+import os
+import sys
+import textwrap
+from typing import Any
+from typing import Dict
+
+import _pytest._code
+import pytest
+from _pytest.config import ExitCode
+from _pytest.main import Session
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.nodes import Collector
+from _pytest.pytester import Pytester
+from _pytest.python import Class
+from _pytest.python import Function
+
+
+class TestModule:
+ def test_failing_import(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol("import alksdjalskdjalkjals")
+ pytest.raises(Collector.CollectError, modcol.collect)
+
+ def test_import_duplicate(self, pytester: Pytester) -> None:
+ a = pytester.mkdir("a")
+ b = pytester.mkdir("b")
+ p1 = a.joinpath("test_whatever.py")
+ p1.touch()
+ p2 = b.joinpath("test_whatever.py")
+ p2.touch()
+ # ensure we don't have it imported already
+ sys.modules.pop(p1.stem, None)
+
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*import*mismatch*",
+ "*imported*test_whatever*",
+ "*%s*" % p1,
+ "*not the same*",
+ "*%s*" % p2,
+ "*HINT*",
+ ]
+ )
+
+ def test_import_prepend_append(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ root1 = pytester.mkdir("root1")
+ root2 = pytester.mkdir("root2")
+ root1.joinpath("x456.py").touch()
+ root2.joinpath("x456.py").touch()
+ p = root2.joinpath("test_x456.py")
+ monkeypatch.syspath_prepend(str(root1))
+ p.write_text(
+ textwrap.dedent(
+ """\
+ import x456
+ def test():
+ assert x456.__file__.startswith({!r})
+ """.format(
+ str(root2)
+ )
+ )
+ )
+ with monkeypatch.context() as mp:
+ mp.chdir(root2)
+ reprec = pytester.inline_run("--import-mode=append")
+ reprec.assertoutcome(passed=0, failed=1)
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_syntax_error_in_module(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol("this is a syntax error")
+ pytest.raises(modcol.CollectError, modcol.collect)
+ pytest.raises(modcol.CollectError, modcol.collect)
+
+ def test_module_considers_pluginmanager_at_import(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol("pytest_plugins='xasdlkj',")
+ pytest.raises(ImportError, lambda: modcol.obj)
+
+ def test_invalid_test_module_name(self, pytester: Pytester) -> None:
+ a = pytester.mkdir("a")
+ a.joinpath("test_one.part1.py").touch()
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "ImportError while importing test module*test_one.part1*",
+ "Hint: make sure your test modules/packages have valid Python names.",
+ ]
+ )
+
+ @pytest.mark.parametrize("verbose", [0, 1, 2])
+ def test_show_traceback_import_error(
+ self, pytester: Pytester, verbose: int
+ ) -> None:
+ """Import errors when collecting modules should display the traceback (#1976).
+
+ With low verbosity we omit pytest and internal modules, otherwise show all traceback entries.
+ """
+ pytester.makepyfile(
+ foo_traceback_import_error="""
+ from bar_traceback_import_error import NOT_AVAILABLE
+ """,
+ bar_traceback_import_error="",
+ )
+ pytester.makepyfile(
+ """
+ import foo_traceback_import_error
+ """
+ )
+ args = ("-v",) * verbose
+ result = pytester.runpytest(*args)
+ result.stdout.fnmatch_lines(
+ [
+ "ImportError while importing test module*",
+ "Traceback:",
+ "*from bar_traceback_import_error import NOT_AVAILABLE",
+ "*cannot import name *NOT_AVAILABLE*",
+ ]
+ )
+ assert result.ret == 2
+
+ stdout = result.stdout.str()
+ if verbose == 2:
+ assert "_pytest" in stdout
+ else:
+ assert "_pytest" not in stdout
+
+ def test_show_traceback_import_error_unicode(self, pytester: Pytester) -> None:
+ """Check test modules collected which raise ImportError with unicode messages
+ are handled properly (#2336).
+ """
+ pytester.makepyfile("raise ImportError('Something bad happened ☺')")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "ImportError while importing test module*",
+ "Traceback:",
+ "*raise ImportError*Something bad happened*",
+ ]
+ )
+ assert result.ret == 2
+
+
+class TestClass:
+ def test_class_with_init_warning(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ class TestClass1(object):
+ def __init__(self):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*cannot collect test class 'TestClass1' because it has "
+ "a __init__ constructor (from: test_class_with_init_warning.py)"
+ ]
+ )
+
+ def test_class_with_new_warning(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ class TestClass1(object):
+ def __new__(self):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*cannot collect test class 'TestClass1' because it has "
+ "a __new__ constructor (from: test_class_with_new_warning.py)"
+ ]
+ )
+
+ def test_class_subclassobject(self, pytester: Pytester) -> None:
+ pytester.getmodulecol(
+ """
+ class test(object):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*collected 0*"])
+
+ def test_static_method(self, pytester: Pytester) -> None:
+ """Support for collecting staticmethod tests (#2528, #2699)"""
+ pytester.getmodulecol(
+ """
+ import pytest
+ class Test(object):
+ @staticmethod
+ def test_something():
+ pass
+
+ @pytest.fixture
+ def fix(self):
+ return 1
+
+ @staticmethod
+ def test_fix(fix):
+ assert fix == 1
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*collected 2 items*", "*2 passed in*"])
+
+ def test_setup_teardown_class_as_classmethod(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_mod1="""
+ class TestClassMethod(object):
+ @classmethod
+ def setup_class(cls):
+ pass
+ def test_1(self):
+ pass
+ @classmethod
+ def teardown_class(cls):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_issue1035_obj_has_getattr(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol(
+ """
+ class Chameleon(object):
+ def __getattr__(self, name):
+ return True
+ chameleon = Chameleon()
+ """
+ )
+ colitems = modcol.collect()
+ assert len(colitems) == 0
+
+ def test_issue1579_namedtuple(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import collections
+
+ TestCase = collections.namedtuple('TestCase', ['a'])
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ "*cannot collect test class 'TestCase' "
+ "because it has a __new__ constructor*"
+ )
+
+ def test_issue2234_property(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ class TestCase(object):
+ @property
+ def prop(self):
+ raise NotImplementedError()
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+class TestFunction:
+ def test_getmodulecollector(self, pytester: Pytester) -> None:
+ item = pytester.getitem("def test_func(): pass")
+ modcol = item.getparent(pytest.Module)
+ assert isinstance(modcol, pytest.Module)
+ assert hasattr(modcol.obj, "test_func")
+
+ @pytest.mark.filterwarnings("default")
+ def test_function_as_object_instance_ignored(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ class A(object):
+ def __call__(self, tmp_path):
+ 0/0
+
+ test_a = A()
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "collected 0 items",
+ "*test_function_as_object_instance_ignored.py:2: "
+ "*cannot collect 'test_a' because it is not a function.",
+ ]
+ )
+
+ @staticmethod
+ def make_function(pytester: Pytester, **kwargs: Any) -> Any:
+ from _pytest.fixtures import FixtureManager
+
+ config = pytester.parseconfigure()
+ session = Session.from_config(config)
+ session._fixturemanager = FixtureManager(session)
+
+ return pytest.Function.from_parent(parent=session, **kwargs)
+
+ def test_function_equality(self, pytester: Pytester) -> None:
+ def func1():
+ pass
+
+ def func2():
+ pass
+
+ f1 = self.make_function(pytester, name="name", callobj=func1)
+ assert f1 == f1
+ f2 = self.make_function(
+ pytester, name="name", callobj=func2, originalname="foobar"
+ )
+ assert f1 != f2
+
+ def test_repr_produces_actual_test_id(self, pytester: Pytester) -> None:
+ f = self.make_function(
+ pytester, name=r"test[\xe5]", callobj=self.test_repr_produces_actual_test_id
+ )
+ assert repr(f) == r"<Function test[\xe5]>"
+
+ def test_issue197_parametrize_emptyset(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('arg', [])
+ def test_function(arg):
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(skipped=1)
+
+ def test_single_tuple_unwraps_values(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize(('arg',), [(1,)])
+ def test_function(arg):
+ assert arg == 1
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_issue213_parametrize_value_no_equal(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ class A(object):
+ def __eq__(self, other):
+ raise ValueError("not possible")
+ @pytest.mark.parametrize('arg', [A()])
+ def test_function(arg):
+ assert arg.__class__.__name__ == "A"
+ """
+ )
+ reprec = pytester.inline_run("--fulltrace")
+ reprec.assertoutcome(passed=1)
+
+ def test_parametrize_with_non_hashable_values(self, pytester: Pytester) -> None:
+ """Test parametrization with non-hashable values."""
+ pytester.makepyfile(
+ """
+ archival_mapping = {
+ '1.0': {'tag': '1.0'},
+ '1.2.2a1': {'tag': 'release-1.2.2a1'},
+ }
+
+ import pytest
+ @pytest.mark.parametrize('key value'.split(),
+ archival_mapping.items())
+ def test_archival_to_version(key, value):
+ assert key in archival_mapping
+ assert value == archival_mapping[key]
+ """
+ )
+ rec = pytester.inline_run()
+ rec.assertoutcome(passed=2)
+
+ def test_parametrize_with_non_hashable_values_indirect(
+ self, pytester: Pytester
+ ) -> None:
+ """Test parametrization with non-hashable values with indirect parametrization."""
+ pytester.makepyfile(
+ """
+ archival_mapping = {
+ '1.0': {'tag': '1.0'},
+ '1.2.2a1': {'tag': 'release-1.2.2a1'},
+ }
+
+ import pytest
+
+ @pytest.fixture
+ def key(request):
+ return request.param
+
+ @pytest.fixture
+ def value(request):
+ return request.param
+
+ @pytest.mark.parametrize('key value'.split(),
+ archival_mapping.items(), indirect=True)
+ def test_archival_to_version(key, value):
+ assert key in archival_mapping
+ assert value == archival_mapping[key]
+ """
+ )
+ rec = pytester.inline_run()
+ rec.assertoutcome(passed=2)
+
+ def test_parametrize_overrides_fixture(self, pytester: Pytester) -> None:
+ """Test parametrization when parameter overrides existing fixture with same name."""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def value():
+ return 'value'
+
+ @pytest.mark.parametrize('value',
+ ['overridden'])
+ def test_overridden_via_param(value):
+ assert value == 'overridden'
+
+ @pytest.mark.parametrize('somevalue', ['overridden'])
+ def test_not_overridden(value, somevalue):
+ assert value == 'value'
+ assert somevalue == 'overridden'
+
+ @pytest.mark.parametrize('other,value', [('foo', 'overridden')])
+ def test_overridden_via_multiparam(other, value):
+ assert other == 'foo'
+ assert value == 'overridden'
+ """
+ )
+ rec = pytester.inline_run()
+ rec.assertoutcome(passed=3)
+
+ def test_parametrize_overrides_parametrized_fixture(
+ self, pytester: Pytester
+ ) -> None:
+ """Test parametrization when parameter overrides existing parametrized fixture with same name."""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2])
+ def value(request):
+ return request.param
+
+ @pytest.mark.parametrize('value',
+ ['overridden'])
+ def test_overridden_via_param(value):
+ assert value == 'overridden'
+ """
+ )
+ rec = pytester.inline_run()
+ rec.assertoutcome(passed=1)
+
+ def test_parametrize_overrides_indirect_dependency_fixture(
+ self, pytester: Pytester
+ ) -> None:
+ """Test parametrization when parameter overrides a fixture that a test indirectly depends on"""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ fix3_instantiated = False
+
+ @pytest.fixture
+ def fix1(fix2):
+ return fix2 + '1'
+
+ @pytest.fixture
+ def fix2(fix3):
+ return fix3 + '2'
+
+ @pytest.fixture
+ def fix3():
+ global fix3_instantiated
+ fix3_instantiated = True
+ return '3'
+
+ @pytest.mark.parametrize('fix2', ['2'])
+ def test_it(fix1):
+ assert fix1 == '21'
+ assert not fix3_instantiated
+ """
+ )
+ rec = pytester.inline_run()
+ rec.assertoutcome(passed=1)
+
+ def test_parametrize_with_mark(self, pytester: Pytester) -> None:
+ items = pytester.getitems(
+ """
+ import pytest
+ @pytest.mark.foo
+ @pytest.mark.parametrize('arg', [
+ 1,
+ pytest.param(2, marks=[pytest.mark.baz, pytest.mark.bar])
+ ])
+ def test_function(arg):
+ pass
+ """
+ )
+ keywords = [item.keywords for item in items]
+ assert (
+ "foo" in keywords[0]
+ and "bar" not in keywords[0]
+ and "baz" not in keywords[0]
+ )
+ assert "foo" in keywords[1] and "bar" in keywords[1] and "baz" in keywords[1]
+
+ def test_parametrize_with_empty_string_arguments(self, pytester: Pytester) -> None:
+ items = pytester.getitems(
+ """\
+ import pytest
+
+ @pytest.mark.parametrize('v', ('', ' '))
+ @pytest.mark.parametrize('w', ('', ' '))
+ def test(v, w): ...
+ """
+ )
+ names = {item.name for item in items}
+ assert names == {"test[-]", "test[ -]", "test[- ]", "test[ - ]"}
+
+ def test_function_equality_with_callspec(self, pytester: Pytester) -> None:
+ items = pytester.getitems(
+ """
+ import pytest
+ @pytest.mark.parametrize('arg', [1,2])
+ def test_function(arg):
+ pass
+ """
+ )
+ assert items[0] != items[1]
+ assert not (items[0] == items[1])
+
+ def test_pyfunc_call(self, pytester: Pytester) -> None:
+ item = pytester.getitem("def test_func(): raise ValueError")
+ config = item.config
+
+ class MyPlugin1:
+ def pytest_pyfunc_call(self):
+ raise ValueError
+
+ class MyPlugin2:
+ def pytest_pyfunc_call(self):
+ return True
+
+ config.pluginmanager.register(MyPlugin1())
+ config.pluginmanager.register(MyPlugin2())
+ config.hook.pytest_runtest_setup(item=item)
+ config.hook.pytest_pyfunc_call(pyfuncitem=item)
+
+ def test_multiple_parametrize(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol(
+ """
+ import pytest
+ @pytest.mark.parametrize('x', [0, 1])
+ @pytest.mark.parametrize('y', [2, 3])
+ def test1(x, y):
+ pass
+ """
+ )
+ colitems = modcol.collect()
+ assert colitems[0].name == "test1[2-0]"
+ assert colitems[1].name == "test1[2-1]"
+ assert colitems[2].name == "test1[3-0]"
+ assert colitems[3].name == "test1[3-1]"
+
+ def test_issue751_multiple_parametrize_with_ids(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol(
+ """
+ import pytest
+ @pytest.mark.parametrize('x', [0], ids=['c'])
+ @pytest.mark.parametrize('y', [0, 1], ids=['a', 'b'])
+ class Test(object):
+ def test1(self, x, y):
+ pass
+ def test2(self, x, y):
+ pass
+ """
+ )
+ colitems = modcol.collect()[0].collect()
+ assert colitems[0].name == "test1[a-c]"
+ assert colitems[1].name == "test1[b-c]"
+ assert colitems[2].name == "test2[a-c]"
+ assert colitems[3].name == "test2[b-c]"
+
+ def test_parametrize_skipif(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.skipif('True')
+
+ @pytest.mark.parametrize('x', [0, 1, pytest.param(2, marks=m)])
+ def test_skip_if(x):
+ assert x < 2
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 2 passed, 1 skipped in *"])
+
+ def test_parametrize_skip(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.skip('')
+
+ @pytest.mark.parametrize('x', [0, 1, pytest.param(2, marks=m)])
+ def test_skip(x):
+ assert x < 2
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 2 passed, 1 skipped in *"])
+
+ def test_parametrize_skipif_no_skip(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.skipif('False')
+
+ @pytest.mark.parametrize('x', [0, 1, m(2)])
+ def test_skipif_no_skip(x):
+ assert x < 2
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 1 failed, 2 passed in *"])
+
+ def test_parametrize_xfail(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.xfail('True')
+
+ @pytest.mark.parametrize('x', [0, 1, pytest.param(2, marks=m)])
+ def test_xfail(x):
+ assert x < 2
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 2 passed, 1 xfailed in *"])
+
+ def test_parametrize_passed(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.xfail('True')
+
+ @pytest.mark.parametrize('x', [0, 1, pytest.param(2, marks=m)])
+ def test_xfail(x):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 2 passed, 1 xpassed in *"])
+
+ def test_parametrize_xfail_passed(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ m = pytest.mark.xfail('False')
+
+ @pytest.mark.parametrize('x', [0, 1, m(2)])
+ def test_passed(x):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 3 passed in *"])
+
+ def test_function_originalname(self, pytester: Pytester) -> None:
+ items = pytester.getitems(
+ """
+ import pytest
+
+ @pytest.mark.parametrize('arg', [1,2])
+ def test_func(arg):
+ pass
+
+ def test_no_param():
+ pass
+ """
+ )
+ originalnames = []
+ for x in items:
+ assert isinstance(x, pytest.Function)
+ originalnames.append(x.originalname)
+ assert originalnames == [
+ "test_func",
+ "test_func",
+ "test_no_param",
+ ]
+
+ def test_function_with_square_brackets(self, pytester: Pytester) -> None:
+ """Check that functions with square brackets don't cause trouble."""
+ p1 = pytester.makepyfile(
+ """
+ locals()["test_foo[name]"] = lambda: None
+ """
+ )
+ result = pytester.runpytest("-v", str(p1))
+ result.stdout.fnmatch_lines(
+ [
+ "test_function_with_square_brackets.py::test_foo[[]name[]] PASSED *",
+ "*= 1 passed in *",
+ ]
+ )
+
+
+class TestSorting:
+ def test_check_equality(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol(
+ """
+ def test_pass(): pass
+ def test_fail(): assert 0
+ """
+ )
+ fn1 = pytester.collect_by_name(modcol, "test_pass")
+ assert isinstance(fn1, pytest.Function)
+ fn2 = pytester.collect_by_name(modcol, "test_pass")
+ assert isinstance(fn2, pytest.Function)
+
+ assert fn1 == fn2
+ assert fn1 != modcol
+ assert hash(fn1) == hash(fn2)
+
+ fn3 = pytester.collect_by_name(modcol, "test_fail")
+ assert isinstance(fn3, pytest.Function)
+ assert not (fn1 == fn3)
+ assert fn1 != fn3
+
+ for fn in fn1, fn2, fn3:
+ assert fn != 3 # type: ignore[comparison-overlap]
+ assert fn != modcol
+ assert fn != [1, 2, 3] # type: ignore[comparison-overlap]
+ assert [1, 2, 3] != fn # type: ignore[comparison-overlap]
+ assert modcol != fn
+
+ def test_allow_sane_sorting_for_decorators(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol(
+ """
+ def dec(f):
+ g = lambda: f(2)
+ g.place_as = f
+ return g
+
+
+ def test_b(y):
+ pass
+ test_b = dec(test_b)
+
+ def test_a(y):
+ pass
+ test_a = dec(test_a)
+ """
+ )
+ colitems = modcol.collect()
+ assert len(colitems) == 2
+ assert [item.name for item in colitems] == ["test_b", "test_a"]
+
+ def test_ordered_by_definition_order(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """\
+ class Test1:
+ def test_foo(): pass
+ def test_bar(): pass
+ class Test2:
+ def test_foo(): pass
+ test_bar = Test1.test_bar
+ class Test3(Test2):
+ def test_baz(): pass
+ """
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(
+ [
+ "*Class Test1*",
+ "*Function test_foo*",
+ "*Function test_bar*",
+ "*Class Test2*",
+ # previously the order was flipped due to Test1.test_bar reference
+ "*Function test_foo*",
+ "*Function test_bar*",
+ "*Class Test3*",
+ "*Function test_foo*",
+ "*Function test_bar*",
+ "*Function test_baz*",
+ ]
+ )
+
+
+class TestConftestCustomization:
+ def test_pytest_pycollect_module(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ class MyModule(pytest.Module):
+ pass
+ def pytest_pycollect_makemodule(module_path, parent):
+ if module_path.name == "test_xyz.py":
+ return MyModule.from_parent(path=module_path, parent=parent)
+ """
+ )
+ pytester.makepyfile("def test_some(): pass")
+ pytester.makepyfile(test_xyz="def test_func(): pass")
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*<Module*test_pytest*", "*<MyModule*xyz*"])
+
+ def test_customized_pymakemodule_issue205_subdir(self, pytester: Pytester) -> None:
+ b = pytester.path.joinpath("a", "b")
+ b.mkdir(parents=True)
+ b.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_pycollect_makemodule():
+ outcome = yield
+ mod = outcome.get_result()
+ mod.obj.hello = "world"
+ """
+ )
+ )
+ b.joinpath("test_module.py").write_text(
+ textwrap.dedent(
+ """\
+ def test_hello():
+ assert hello == "world"
+ """
+ )
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_customized_pymakeitem(self, pytester: Pytester) -> None:
+ b = pytester.path.joinpath("a", "b")
+ b.mkdir(parents=True)
+ b.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_pycollect_makeitem():
+ outcome = yield
+ if outcome.excinfo is None:
+ result = outcome.get_result()
+ if result:
+ for func in result:
+ func._some123 = "world"
+ """
+ )
+ )
+ b.joinpath("test_module.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ @pytest.fixture()
+ def obj(request):
+ return request.node._some123
+ def test_hello(obj):
+ assert obj == "world"
+ """
+ )
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_pytest_pycollect_makeitem(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ class MyFunction(pytest.Function):
+ pass
+ def pytest_pycollect_makeitem(collector, name, obj):
+ if name == "some":
+ return MyFunction.from_parent(name=name, parent=collector)
+ """
+ )
+ pytester.makepyfile("def some(): pass")
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*MyFunction*some*"])
+
+ def test_issue2369_collect_module_fileext(self, pytester: Pytester) -> None:
+ """Ensure we can collect files with weird file extensions as Python
+ modules (#2369)"""
+ # We'll implement a little finder and loader to import files containing
+ # Python source code whose file extension is ".narf".
+ pytester.makeconftest(
+ """
+ import sys, os, imp
+ from _pytest.python import Module
+
+ class Loader(object):
+ def load_module(self, name):
+ return imp.load_source(name, name + ".narf")
+ class Finder(object):
+ def find_module(self, name, path=None):
+ if os.path.exists(name + ".narf"):
+ return Loader()
+ sys.meta_path.append(Finder())
+
+ def pytest_collect_file(file_path, parent):
+ if file_path.suffix == ".narf":
+ return Module.from_parent(path=file_path, parent=parent)"""
+ )
+ pytester.makefile(
+ ".narf",
+ """\
+ def test_something():
+ assert 1 + 1 == 2""",
+ )
+ # Use runpytest_subprocess, since we're futzing with sys.meta_path.
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_early_ignored_attributes(self, pytester: Pytester) -> None:
+ """Builtin attributes should be ignored early on, even if
+ configuration would otherwise allow them.
+
+ This tests a performance optimization, not correctness, really,
+ although it tests PytestCollectionWarning is not raised, while
+ it would have been raised otherwise.
+ """
+ pytester.makeini(
+ """
+ [pytest]
+ python_classes=*
+ python_functions=*
+ """
+ )
+ pytester.makepyfile(
+ """
+ class TestEmpty:
+ pass
+ test_empty = TestEmpty()
+ def test_real():
+ pass
+ """
+ )
+ items, rec = pytester.inline_genitems()
+ assert rec.ret == 0
+ assert len(items) == 1
+
+
+def test_setup_only_available_in_subdir(pytester: Pytester) -> None:
+ sub1 = pytester.mkpydir("sub1")
+ sub2 = pytester.mkpydir("sub2")
+ sub1.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ def pytest_runtest_setup(item):
+ assert item.path.stem == "test_in_sub1"
+ def pytest_runtest_call(item):
+ assert item.path.stem == "test_in_sub1"
+ def pytest_runtest_teardown(item):
+ assert item.path.stem == "test_in_sub1"
+ """
+ )
+ )
+ sub2.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ def pytest_runtest_setup(item):
+ assert item.path.stem == "test_in_sub2"
+ def pytest_runtest_call(item):
+ assert item.path.stem == "test_in_sub2"
+ def pytest_runtest_teardown(item):
+ assert item.path.stem == "test_in_sub2"
+ """
+ )
+ )
+ sub1.joinpath("test_in_sub1.py").write_text("def test_1(): pass")
+ sub2.joinpath("test_in_sub2.py").write_text("def test_2(): pass")
+ result = pytester.runpytest("-v", "-s")
+ result.assert_outcomes(passed=2)
+
+
+def test_modulecol_roundtrip(pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol("pass", withinit=False)
+ trail = modcol.nodeid
+ newcol = modcol.session.perform_collect([trail], genitems=0)[0]
+ assert modcol.name == newcol.name
+
+
+class TestTracebackCutting:
+ def test_skip_simple(self):
+ with pytest.raises(pytest.skip.Exception) as excinfo:
+ pytest.skip("xxx")
+ assert excinfo.traceback[-1].frame.code.name == "skip"
+ assert excinfo.traceback[-1].ishidden()
+ assert excinfo.traceback[-2].frame.code.name == "test_skip_simple"
+ assert not excinfo.traceback[-2].ishidden()
+
+ def test_traceback_argsetup(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def hello(request):
+ raise ValueError("xyz")
+ """
+ )
+ p = pytester.makepyfile("def test(hello): pass")
+ result = pytester.runpytest(p)
+ assert result.ret != 0
+ out = result.stdout.str()
+ assert "xyz" in out
+ assert "conftest.py:5: ValueError" in out
+ numentries = out.count("_ _ _") # separator for traceback entries
+ assert numentries == 0
+
+ result = pytester.runpytest("--fulltrace", p)
+ out = result.stdout.str()
+ assert "conftest.py:5: ValueError" in out
+ numentries = out.count("_ _ _ _") # separator for traceback entries
+ assert numentries > 3
+
+ def test_traceback_error_during_import(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ x = 1
+ x = 2
+ x = 17
+ asd
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret != 0
+ out = result.stdout.str()
+ assert "x = 1" not in out
+ assert "x = 2" not in out
+ result.stdout.fnmatch_lines([" *asd*", "E*NameError*"])
+ result = pytester.runpytest("--fulltrace")
+ out = result.stdout.str()
+ assert "x = 1" in out
+ assert "x = 2" in out
+ result.stdout.fnmatch_lines([">*asd*", "E*NameError*"])
+
+ def test_traceback_filter_error_during_fixture_collection(
+ self, pytester: Pytester
+ ) -> None:
+ """Integration test for issue #995."""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ def fail_me(func):
+ ns = {}
+ exec('def w(): raise ValueError("fail me")', ns)
+ return ns['w']
+
+ @pytest.fixture(scope='class')
+ @fail_me
+ def fail_fixture():
+ pass
+
+ def test_failing_fixture(fail_fixture):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret != 0
+ out = result.stdout.str()
+ assert "INTERNALERROR>" not in out
+ result.stdout.fnmatch_lines(["*ValueError: fail me*", "* 1 error in *"])
+
+ def test_filter_traceback_generated_code(self) -> None:
+ """Test that filter_traceback() works with the fact that
+ _pytest._code.code.Code.path attribute might return an str object.
+
+ In this case, one of the entries on the traceback was produced by
+ dynamically generated code.
+ See: https://bitbucket.org/pytest-dev/py/issues/71
+ This fixes #995.
+ """
+ from _pytest._code import filter_traceback
+
+ tb = None
+ try:
+ ns: Dict[str, Any] = {}
+ exec("def foo(): raise ValueError", ns)
+ ns["foo"]()
+ except ValueError:
+ _, _, tb = sys.exc_info()
+
+ assert tb is not None
+ traceback = _pytest._code.Traceback(tb)
+ assert isinstance(traceback[-1].path, str)
+ assert not filter_traceback(traceback[-1])
+
+ def test_filter_traceback_path_no_longer_valid(self, pytester: Pytester) -> None:
+ """Test that filter_traceback() works with the fact that
+ _pytest._code.code.Code.path attribute might return an str object.
+
+ In this case, one of the files in the traceback no longer exists.
+ This fixes #1133.
+ """
+ from _pytest._code import filter_traceback
+
+ pytester.syspathinsert()
+ pytester.makepyfile(
+ filter_traceback_entry_as_str="""
+ def foo():
+ raise ValueError
+ """
+ )
+ tb = None
+ try:
+ import filter_traceback_entry_as_str
+
+ filter_traceback_entry_as_str.foo()
+ except ValueError:
+ _, _, tb = sys.exc_info()
+
+ assert tb is not None
+ pytester.path.joinpath("filter_traceback_entry_as_str.py").unlink()
+ traceback = _pytest._code.Traceback(tb)
+ assert isinstance(traceback[-1].path, str)
+ assert filter_traceback(traceback[-1])
+
+
+class TestReportInfo:
+ def test_itemreport_reportinfo(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ class MyFunction(pytest.Function):
+ def reportinfo(self):
+ return "ABCDE", 42, "custom"
+ def pytest_pycollect_makeitem(collector, name, obj):
+ if name == "test_func":
+ return MyFunction.from_parent(name=name, parent=collector)
+ """
+ )
+ item = pytester.getitem("def test_func(): pass")
+ item.config.pluginmanager.getplugin("runner")
+ assert item.location == ("ABCDE", 42, "custom")
+
+ def test_func_reportinfo(self, pytester: Pytester) -> None:
+ item = pytester.getitem("def test_func(): pass")
+ path, lineno, modpath = item.reportinfo()
+ assert os.fspath(path) == str(item.path)
+ assert lineno == 0
+ assert modpath == "test_func"
+
+ def test_class_reportinfo(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol(
+ """
+ # lineno 0
+ class TestClass(object):
+ def test_hello(self): pass
+ """
+ )
+ classcol = pytester.collect_by_name(modcol, "TestClass")
+ assert isinstance(classcol, Class)
+ path, lineno, msg = classcol.reportinfo()
+ assert os.fspath(path) == str(modcol.path)
+ assert lineno == 1
+ assert msg == "TestClass"
+
+ @pytest.mark.filterwarnings(
+ "ignore:usage of Generator.Function is deprecated, please use pytest.Function instead"
+ )
+ def test_reportinfo_with_nasty_getattr(self, pytester: Pytester) -> None:
+ # https://github.com/pytest-dev/pytest/issues/1204
+ modcol = pytester.getmodulecol(
+ """
+ # lineno 0
+ class TestClass:
+ def __getattr__(self, name):
+ return "this is not an int"
+
+ def __class_getattr__(cls, name):
+ return "this is not an int"
+
+ def intest_foo(self):
+ pass
+
+ def test_bar(self):
+ pass
+ """
+ )
+ classcol = pytester.collect_by_name(modcol, "TestClass")
+ assert isinstance(classcol, Class)
+ path, lineno, msg = classcol.reportinfo()
+ func = list(classcol.collect())[0]
+ assert isinstance(func, Function)
+ path, lineno, msg = func.reportinfo()
+
+
+def test_customized_python_discovery(pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ python_files=check_*.py
+ python_classes=Check
+ python_functions=check
+ """
+ )
+ p = pytester.makepyfile(
+ """
+ def check_simple():
+ pass
+ class CheckMyApp(object):
+ def check_meth(self):
+ pass
+ """
+ )
+ p2 = p.with_name(p.name.replace("test", "check"))
+ p.rename(p2)
+ result = pytester.runpytest("--collect-only", "-s")
+ result.stdout.fnmatch_lines(
+ ["*check_customized*", "*check_simple*", "*CheckMyApp*", "*check_meth*"]
+ )
+
+ result = pytester.runpytest()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_customized_python_discovery_functions(pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ python_functions=_test
+ """
+ )
+ pytester.makepyfile(
+ """
+ def _test_underscore():
+ pass
+ """
+ )
+ result = pytester.runpytest("--collect-only", "-s")
+ result.stdout.fnmatch_lines(["*_test_underscore*"])
+
+ result = pytester.runpytest()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_unorderable_types(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ class TestJoinEmpty(object):
+ pass
+
+ def make_test():
+ class Test(object):
+ pass
+ Test.__name__ = "TestFoo"
+ return Test
+ TestFoo = make_test()
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.no_fnmatch_line("*TypeError*")
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+@pytest.mark.filterwarnings("default::pytest.PytestCollectionWarning")
+def test_dont_collect_non_function_callable(pytester: Pytester) -> None:
+ """Test for issue https://github.com/pytest-dev/pytest/issues/331
+
+ In this case an INTERNALERROR occurred trying to report the failure of
+ a test like this one because pytest failed to get the source lines.
+ """
+ pytester.makepyfile(
+ """
+ class Oh(object):
+ def __call__(self):
+ pass
+
+ test_a = Oh()
+
+ def test_real():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*collected 1 item*",
+ "*test_dont_collect_non_function_callable.py:2: *cannot collect 'test_a' because it is not a function*",
+ "*1 passed, 1 warning in *",
+ ]
+ )
+
+
+def test_class_injection_does_not_break_collection(pytester: Pytester) -> None:
+ """Tests whether injection during collection time will terminate testing.
+
+ In this case the error should not occur if the TestClass itself
+ is modified during collection time, and the original method list
+ is still used for collection.
+ """
+ pytester.makeconftest(
+ """
+ from test_inject import TestClass
+ def pytest_generate_tests(metafunc):
+ TestClass.changed_var = {}
+ """
+ )
+ pytester.makepyfile(
+ test_inject='''
+ class TestClass(object):
+ def test_injection(self):
+ """Test being parametrized."""
+ pass
+ '''
+ )
+ result = pytester.runpytest()
+ assert (
+ "RuntimeError: dictionary changed size during iteration"
+ not in result.stdout.str()
+ )
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_syntax_error_with_non_ascii_chars(pytester: Pytester) -> None:
+ """Fix decoding issue while formatting SyntaxErrors during collection (#578)."""
+ pytester.makepyfile("☃")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*ERROR collecting*", "*SyntaxError*", "*1 error in*"])
+
+
+def test_collect_error_with_fulltrace(pytester: Pytester) -> None:
+ pytester.makepyfile("assert 0")
+ result = pytester.runpytest("--fulltrace")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 0 items / 1 error",
+ "",
+ "*= ERRORS =*",
+ "*_ ERROR collecting test_collect_error_with_fulltrace.py _*",
+ "",
+ "> assert 0",
+ "E assert 0",
+ "",
+ "test_collect_error_with_fulltrace.py:1: AssertionError",
+ "*! Interrupted: 1 error during collection !*",
+ ]
+ )
+
+
+def test_skip_duplicates_by_default(pytester: Pytester) -> None:
+ """Test for issue https://github.com/pytest-dev/pytest/issues/1609 (#1609)
+
+ Ignore duplicate directories.
+ """
+ a = pytester.mkdir("a")
+ fh = a.joinpath("test_a.py")
+ fh.write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ def test_real():
+ pass
+ """
+ )
+ )
+ result = pytester.runpytest(str(a), str(a))
+ result.stdout.fnmatch_lines(["*collected 1 item*"])
+
+
+def test_keep_duplicates(pytester: Pytester) -> None:
+ """Test for issue https://github.com/pytest-dev/pytest/issues/1609 (#1609)
+
+ Use --keep-duplicates to collect tests from duplicate directories.
+ """
+ a = pytester.mkdir("a")
+ fh = a.joinpath("test_a.py")
+ fh.write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ def test_real():
+ pass
+ """
+ )
+ )
+ result = pytester.runpytest("--keep-duplicates", str(a), str(a))
+ result.stdout.fnmatch_lines(["*collected 2 item*"])
+
+
+def test_package_collection_infinite_recursion(pytester: Pytester) -> None:
+ pytester.copy_example("collect/package_infinite_recursion")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_package_collection_init_given_as_argument(pytester: Pytester) -> None:
+ """Regression test for #3749"""
+ p = pytester.copy_example("collect/package_init_given_as_arg")
+ result = pytester.runpytest(p / "pkg" / "__init__.py")
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_package_with_modules(pytester: Pytester) -> None:
+ """
+ .
+ └── root
+ ├── __init__.py
+ ├── sub1
+ │ ├── __init__.py
+ │ └── sub1_1
+ │ ├── __init__.py
+ │ └── test_in_sub1.py
+ └── sub2
+ └── test
+ └── test_in_sub2.py
+
+ """
+ root = pytester.mkpydir("root")
+ sub1 = root.joinpath("sub1")
+ sub1_test = sub1.joinpath("sub1_1")
+ sub1_test.mkdir(parents=True)
+ for d in (sub1, sub1_test):
+ d.joinpath("__init__.py").touch()
+
+ sub2 = root.joinpath("sub2")
+ sub2_test = sub2.joinpath("test")
+ sub2_test.mkdir(parents=True)
+
+ sub1_test.joinpath("test_in_sub1.py").write_text("def test_1(): pass")
+ sub2_test.joinpath("test_in_sub2.py").write_text("def test_2(): pass")
+
+ # Execute from .
+ result = pytester.runpytest("-v", "-s")
+ result.assert_outcomes(passed=2)
+
+ # Execute from . with one argument "root"
+ result = pytester.runpytest("-v", "-s", "root")
+ result.assert_outcomes(passed=2)
+
+ # Chdir into package's root and execute with no args
+ os.chdir(root)
+ result = pytester.runpytest("-v", "-s")
+ result.assert_outcomes(passed=2)
+
+
+def test_package_ordering(pytester: Pytester) -> None:
+ """
+ .
+ └── root
+ ├── Test_root.py
+ ├── __init__.py
+ ├── sub1
+ │ ├── Test_sub1.py
+ │ └── __init__.py
+ └── sub2
+ └── test
+ └── test_sub2.py
+
+ """
+ pytester.makeini(
+ """
+ [pytest]
+ python_files=*.py
+ """
+ )
+ root = pytester.mkpydir("root")
+ sub1 = root.joinpath("sub1")
+ sub1.mkdir()
+ sub1.joinpath("__init__.py").touch()
+ sub2 = root.joinpath("sub2")
+ sub2_test = sub2.joinpath("test")
+ sub2_test.mkdir(parents=True)
+
+ root.joinpath("Test_root.py").write_text("def test_1(): pass")
+ sub1.joinpath("Test_sub1.py").write_text("def test_2(): pass")
+ sub2_test.joinpath("test_sub2.py").write_text("def test_3(): pass")
+
+ # Execute from .
+ result = pytester.runpytest("-v", "-s")
+ result.assert_outcomes(passed=3)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/python/fixtures.py b/testing/web-platform/tests/tools/third_party/pytest/testing/python/fixtures.py
new file mode 100644
index 0000000000..f29ca1dfa5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/python/fixtures.py
@@ -0,0 +1,4474 @@
+import os
+import sys
+import textwrap
+from pathlib import Path
+
+import pytest
+from _pytest import fixtures
+from _pytest.compat import getfuncargnames
+from _pytest.config import ExitCode
+from _pytest.fixtures import FixtureRequest
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import get_public_names
+from _pytest.pytester import Pytester
+from _pytest.python import Function
+
+
+def test_getfuncargnames_functions():
+ """Test getfuncargnames for normal functions"""
+
+ def f():
+ raise NotImplementedError()
+
+ assert not getfuncargnames(f)
+
+ def g(arg):
+ raise NotImplementedError()
+
+ assert getfuncargnames(g) == ("arg",)
+
+ def h(arg1, arg2="hello"):
+ raise NotImplementedError()
+
+ assert getfuncargnames(h) == ("arg1",)
+
+ def j(arg1, arg2, arg3="hello"):
+ raise NotImplementedError()
+
+ assert getfuncargnames(j) == ("arg1", "arg2")
+
+
+def test_getfuncargnames_methods():
+ """Test getfuncargnames for normal methods"""
+
+ class A:
+ def f(self, arg1, arg2="hello"):
+ raise NotImplementedError()
+
+ assert getfuncargnames(A().f) == ("arg1",)
+
+
+def test_getfuncargnames_staticmethod():
+ """Test getfuncargnames for staticmethods"""
+
+ class A:
+ @staticmethod
+ def static(arg1, arg2, x=1):
+ raise NotImplementedError()
+
+ assert getfuncargnames(A.static, cls=A) == ("arg1", "arg2")
+
+
+def test_getfuncargnames_staticmethod_inherited() -> None:
+ """Test getfuncargnames for inherited staticmethods (#8061)"""
+
+ class A:
+ @staticmethod
+ def static(arg1, arg2, x=1):
+ raise NotImplementedError()
+
+ class B(A):
+ pass
+
+ assert getfuncargnames(B.static, cls=B) == ("arg1", "arg2")
+
+
+def test_getfuncargnames_partial():
+ """Check getfuncargnames for methods defined with functools.partial (#5701)"""
+ import functools
+
+ def check(arg1, arg2, i):
+ raise NotImplementedError()
+
+ class T:
+ test_ok = functools.partial(check, i=2)
+
+ values = getfuncargnames(T().test_ok, name="test_ok")
+ assert values == ("arg1", "arg2")
+
+
+def test_getfuncargnames_staticmethod_partial():
+ """Check getfuncargnames for staticmethods defined with functools.partial (#5701)"""
+ import functools
+
+ def check(arg1, arg2, i):
+ raise NotImplementedError()
+
+ class T:
+ test_ok = staticmethod(functools.partial(check, i=2))
+
+ values = getfuncargnames(T().test_ok, name="test_ok")
+ assert values == ("arg1", "arg2")
+
+
+@pytest.mark.pytester_example_path("fixtures/fill_fixtures")
+class TestFillFixtures:
+ def test_fillfuncargs_exposed(self):
+ # used by oejskit, kept for compatibility
+ assert pytest._fillfuncargs == fixtures._fillfuncargs
+
+ def test_funcarg_lookupfails(self, pytester: Pytester) -> None:
+ pytester.copy_example()
+ result = pytester.runpytest() # "--collect-only")
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ """
+ *def test_func(some)*
+ *fixture*some*not found*
+ *xyzsomething*
+ """
+ )
+
+ def test_detect_recursive_dependency_error(self, pytester: Pytester) -> None:
+ pytester.copy_example()
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*recursive dependency involving fixture 'fix1' detected*"]
+ )
+
+ def test_funcarg_basic(self, pytester: Pytester) -> None:
+ pytester.copy_example()
+ item = pytester.getitem(Path("test_funcarg_basic.py"))
+ assert isinstance(item, Function)
+ # Execute's item's setup, which fills fixtures.
+ item.session._setupstate.setup(item)
+ del item.funcargs["request"]
+ assert len(get_public_names(item.funcargs)) == 2
+ assert item.funcargs["some"] == "test_func"
+ assert item.funcargs["other"] == 42
+
+ def test_funcarg_lookup_modulelevel(self, pytester: Pytester) -> None:
+ pytester.copy_example()
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_funcarg_lookup_classlevel(self, pytester: Pytester) -> None:
+ p = pytester.copy_example()
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_conftest_funcargs_only_available_in_subdir(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.copy_example()
+ result = pytester.runpytest("-v")
+ result.assert_outcomes(passed=2)
+
+ def test_extend_fixture_module_class(self, pytester: Pytester) -> None:
+ testfile = pytester.copy_example()
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = pytester.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_extend_fixture_conftest_module(self, pytester: Pytester) -> None:
+ p = pytester.copy_example()
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = pytester.runpytest(str(next(Path(str(p)).rglob("test_*.py"))))
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_extend_fixture_conftest_conftest(self, pytester: Pytester) -> None:
+ p = pytester.copy_example()
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = pytester.runpytest(str(next(Path(str(p)).rglob("test_*.py"))))
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_extend_fixture_conftest_plugin(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ testplugin="""
+ import pytest
+
+ @pytest.fixture
+ def foo():
+ return 7
+ """
+ )
+ pytester.syspathinsert()
+ pytester.makeconftest(
+ """
+ import pytest
+
+ pytest_plugins = 'testplugin'
+
+ @pytest.fixture
+ def foo(foo):
+ return foo + 7
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_foo(foo):
+ assert foo == 14
+ """
+ )
+ result = pytester.runpytest("-s")
+ assert result.ret == 0
+
+ def test_extend_fixture_plugin_plugin(self, pytester: Pytester) -> None:
+ # Two plugins should extend each order in loading order
+ pytester.makepyfile(
+ testplugin0="""
+ import pytest
+
+ @pytest.fixture
+ def foo():
+ return 7
+ """
+ )
+ pytester.makepyfile(
+ testplugin1="""
+ import pytest
+
+ @pytest.fixture
+ def foo(foo):
+ return foo + 7
+ """
+ )
+ pytester.syspathinsert()
+ pytester.makepyfile(
+ """
+ pytest_plugins = ['testplugin0', 'testplugin1']
+
+ def test_foo(foo):
+ assert foo == 14
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+
+ def test_override_parametrized_fixture_conftest_module(
+ self, pytester: Pytester
+ ) -> None:
+ """Test override of the parametrized fixture with non-parametrized one on the test module level."""
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2, 3])
+ def spam(request):
+ return request.param
+ """
+ )
+ testfile = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam():
+ return 'spam'
+
+ def test_spam(spam):
+ assert spam == 'spam'
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = pytester.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_override_parametrized_fixture_conftest_conftest(
+ self, pytester: Pytester
+ ) -> None:
+ """Test override of the parametrized fixture with non-parametrized one on the conftest level."""
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2, 3])
+ def spam(request):
+ return request.param
+ """
+ )
+ subdir = pytester.mkpydir("subdir")
+ subdir.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ @pytest.fixture
+ def spam():
+ return 'spam'
+ """
+ )
+ )
+ testfile = subdir.joinpath("test_spam.py")
+ testfile.write_text(
+ textwrap.dedent(
+ """\
+ def test_spam(spam):
+ assert spam == "spam"
+ """
+ )
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = pytester.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_override_non_parametrized_fixture_conftest_module(
+ self, pytester: Pytester
+ ) -> None:
+ """Test override of the non-parametrized fixture with parametrized one on the test module level."""
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam():
+ return 'spam'
+ """
+ )
+ testfile = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2, 3])
+ def spam(request):
+ return request.param
+
+ params = {'spam': 1}
+
+ def test_spam(spam):
+ assert spam == params['spam']
+ params['spam'] += 1
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*3 passed*"])
+ result = pytester.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+ def test_override_non_parametrized_fixture_conftest_conftest(
+ self, pytester: Pytester
+ ) -> None:
+ """Test override of the non-parametrized fixture with parametrized one on the conftest level."""
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def spam():
+ return 'spam'
+ """
+ )
+ subdir = pytester.mkpydir("subdir")
+ subdir.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ @pytest.fixture(params=[1, 2, 3])
+ def spam(request):
+ return request.param
+ """
+ )
+ )
+ testfile = subdir.joinpath("test_spam.py")
+ testfile.write_text(
+ textwrap.dedent(
+ """\
+ params = {'spam': 1}
+
+ def test_spam(spam):
+ assert spam == params['spam']
+ params['spam'] += 1
+ """
+ )
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*3 passed*"])
+ result = pytester.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+ def test_override_autouse_fixture_with_parametrized_fixture_conftest_conftest(
+ self, pytester: Pytester
+ ) -> None:
+ """Test override of the autouse fixture with parametrized one on the conftest level.
+ This test covers the issue explained in issue 1601
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(autouse=True)
+ def spam():
+ return 'spam'
+ """
+ )
+ subdir = pytester.mkpydir("subdir")
+ subdir.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ @pytest.fixture(params=[1, 2, 3])
+ def spam(request):
+ return request.param
+ """
+ )
+ )
+ testfile = subdir.joinpath("test_spam.py")
+ testfile.write_text(
+ textwrap.dedent(
+ """\
+ params = {'spam': 1}
+
+ def test_spam(spam):
+ assert spam == params['spam']
+ params['spam'] += 1
+ """
+ )
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*3 passed*"])
+ result = pytester.runpytest(testfile)
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+ def test_override_fixture_reusing_super_fixture_parametrization(
+ self, pytester: Pytester
+ ) -> None:
+ """Override a fixture at a lower level, reusing the higher-level fixture that
+ is parametrized (#1953).
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2])
+ def foo(request):
+ return request.param
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def foo(foo):
+ return foo * 2
+
+ def test_spam(foo):
+ assert foo in (2, 4)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ def test_override_parametrize_fixture_and_indirect(
+ self, pytester: Pytester
+ ) -> None:
+ """Override a fixture at a lower level, reusing the higher-level fixture that
+ is parametrized, while also using indirect parametrization.
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2])
+ def foo(request):
+ return request.param
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def foo(foo):
+ return foo * 2
+
+ @pytest.fixture
+ def bar(request):
+ return request.param * 100
+
+ @pytest.mark.parametrize("bar", [42], indirect=True)
+ def test_spam(bar, foo):
+ assert bar == 4200
+ assert foo in (2, 4)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ def test_override_top_level_fixture_reusing_super_fixture_parametrization(
+ self, pytester: Pytester
+ ) -> None:
+ """Same as the above test, but with another level of overwriting."""
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(params=['unused', 'unused'])
+ def foo(request):
+ return request.param
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2])
+ def foo(request):
+ return request.param
+
+ class Test:
+
+ @pytest.fixture
+ def foo(self, foo):
+ return foo * 2
+
+ def test_spam(self, foo):
+ assert foo in (2, 4)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ def test_override_parametrized_fixture_with_new_parametrized_fixture(
+ self, pytester: Pytester
+ ) -> None:
+ """Overriding a parametrized fixture, while also parametrizing the new fixture and
+ simultaneously requesting the overwritten fixture as parameter, yields the same value
+ as ``request.param``.
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(params=['ignored', 'ignored'])
+ def foo(request):
+ return request.param
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[10, 20])
+ def foo(foo, request):
+ assert request.param == foo
+ return foo * 2
+
+ def test_spam(foo):
+ assert foo in (20, 40)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ def test_autouse_fixture_plugin(self, pytester: Pytester) -> None:
+ # A fixture from a plugin has no baseid set, which screwed up
+ # the autouse fixture handling.
+ pytester.makepyfile(
+ testplugin="""
+ import pytest
+
+ @pytest.fixture(autouse=True)
+ def foo(request):
+ request.function.foo = 7
+ """
+ )
+ pytester.syspathinsert()
+ pytester.makepyfile(
+ """
+ pytest_plugins = 'testplugin'
+
+ def test_foo(request):
+ assert request.function.foo == 7
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+
+ def test_funcarg_lookup_error(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def a_fixture(): pass
+
+ @pytest.fixture
+ def b_fixture(): pass
+
+ @pytest.fixture
+ def c_fixture(): pass
+
+ @pytest.fixture
+ def d_fixture(): pass
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_lookup_error(unknown):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR at setup of test_lookup_error*",
+ " def test_lookup_error(unknown):*",
+ "E fixture 'unknown' not found",
+ "> available fixtures:*a_fixture,*b_fixture,*c_fixture,*d_fixture*monkeypatch,*",
+ # sorted
+ "> use 'py*test --fixtures *' for help on them.",
+ "*1 error*",
+ ]
+ )
+ result.stdout.no_fnmatch_line("*INTERNAL*")
+
+ def test_fixture_excinfo_leak(self, pytester: Pytester) -> None:
+ # on python2 sys.excinfo would leak into fixture executions
+ pytester.makepyfile(
+ """
+ import sys
+ import traceback
+ import pytest
+
+ @pytest.fixture
+ def leak():
+ if sys.exc_info()[0]: # python3 bug :)
+ traceback.print_exc()
+ #fails
+ assert sys.exc_info() == (None, None, None)
+
+ def test_leak(leak):
+ if sys.exc_info()[0]: # python3 bug :)
+ traceback.print_exc()
+ assert sys.exc_info() == (None, None, None)
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+
+
+class TestRequestBasic:
+ def test_request_attributes(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+
+ @pytest.fixture
+ def something(request): pass
+ def test_func(something): pass
+ """
+ )
+ assert isinstance(item, Function)
+ req = fixtures.FixtureRequest(item, _ispytest=True)
+ assert req.function == item.obj
+ assert req.keywords == item.keywords
+ assert hasattr(req.module, "test_func")
+ assert req.cls is None
+ assert req.function.__name__ == "test_func"
+ assert req.config == item.config
+ assert repr(req).find(req.function.__name__) != -1
+
+ def test_request_attributes_method(self, pytester: Pytester) -> None:
+ (item,) = pytester.getitems(
+ """
+ import pytest
+ class TestB(object):
+
+ @pytest.fixture
+ def something(self, request):
+ return 1
+ def test_func(self, something):
+ pass
+ """
+ )
+ assert isinstance(item, Function)
+ req = item._request
+ assert req.cls.__name__ == "TestB"
+ assert req.instance.__class__ == req.cls
+
+ def test_request_contains_funcarg_arg2fixturedefs(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol(
+ """
+ import pytest
+ @pytest.fixture
+ def something(request):
+ pass
+ class TestClass(object):
+ def test_method(self, something):
+ pass
+ """
+ )
+ (item1,) = pytester.genitems([modcol])
+ assert item1.name == "test_method"
+ arg2fixturedefs = fixtures.FixtureRequest(
+ item1, _ispytest=True
+ )._arg2fixturedefs
+ assert len(arg2fixturedefs) == 1
+ assert arg2fixturedefs["something"][0].argname == "something"
+
+ @pytest.mark.skipif(
+ hasattr(sys, "pypy_version_info"),
+ reason="this method of test doesn't work on pypy",
+ )
+ def test_request_garbage(self, pytester: Pytester) -> None:
+ try:
+ import xdist # noqa
+ except ImportError:
+ pass
+ else:
+ pytest.xfail("this test is flaky when executed with xdist")
+ pytester.makepyfile(
+ """
+ import sys
+ import pytest
+ from _pytest.fixtures import PseudoFixtureDef
+ import gc
+
+ @pytest.fixture(autouse=True)
+ def something(request):
+ original = gc.get_debug()
+ gc.set_debug(gc.DEBUG_SAVEALL)
+ gc.collect()
+
+ yield
+
+ try:
+ gc.collect()
+ leaked = [x for _ in gc.garbage if isinstance(_, PseudoFixtureDef)]
+ assert leaked == []
+ finally:
+ gc.set_debug(original)
+
+ def test_func():
+ pass
+ """
+ )
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["* 1 passed in *"])
+
+ def test_getfixturevalue_recursive(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def something(request):
+ return 1
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def something(request):
+ return request.getfixturevalue("something") + 1
+ def test_func(something):
+ assert something == 2
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_getfixturevalue_teardown(self, pytester: Pytester) -> None:
+ """
+ Issue #1895
+
+ `test_inner` requests `inner` fixture, which in turn requests `resource`
+ using `getfixturevalue`. `test_func` then requests `resource`.
+
+ `resource` is teardown before `inner` because the fixture mechanism won't consider
+ `inner` dependent on `resource` when it is used via `getfixturevalue`: `test_func`
+ will then cause the `resource`'s finalizer to be called first because of this.
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='session')
+ def resource():
+ r = ['value']
+ yield r
+ r.pop()
+
+ @pytest.fixture(scope='session')
+ def inner(request):
+ resource = request.getfixturevalue('resource')
+ assert resource == ['value']
+ yield
+ assert resource == ['value']
+
+ def test_inner(inner):
+ pass
+
+ def test_func(resource):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 2 passed in *"])
+
+ def test_getfixturevalue(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+
+ @pytest.fixture
+ def something(request):
+ return 1
+
+ values = [2]
+ @pytest.fixture
+ def other(request):
+ return values.pop()
+
+ def test_func(something): pass
+ """
+ )
+ assert isinstance(item, Function)
+ req = item._request
+
+ # Execute item's setup.
+ item.session._setupstate.setup(item)
+
+ with pytest.raises(pytest.FixtureLookupError):
+ req.getfixturevalue("notexists")
+ val = req.getfixturevalue("something")
+ assert val == 1
+ val = req.getfixturevalue("something")
+ assert val == 1
+ val2 = req.getfixturevalue("other")
+ assert val2 == 2
+ val2 = req.getfixturevalue("other") # see about caching
+ assert val2 == 2
+ assert item.funcargs["something"] == 1
+ assert len(get_public_names(item.funcargs)) == 2
+ assert "request" in item.funcargs
+
+ def test_request_addfinalizer(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ teardownlist = []
+ @pytest.fixture
+ def something(request):
+ request.addfinalizer(lambda: teardownlist.append(1))
+ def test_func(something): pass
+ """
+ )
+ assert isinstance(item, Function)
+ item.session._setupstate.setup(item)
+ item._request._fillfixtures()
+ # successively check finalization calls
+ parent = item.getparent(pytest.Module)
+ assert parent is not None
+ teardownlist = parent.obj.teardownlist
+ ss = item.session._setupstate
+ assert not teardownlist
+ ss.teardown_exact(None)
+ print(ss.stack)
+ assert teardownlist == [1]
+
+ def test_request_addfinalizer_failing_setup(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = [1]
+ @pytest.fixture
+ def myfix(request):
+ request.addfinalizer(values.pop)
+ assert 0
+ def test_fix(myfix):
+ pass
+ def test_finalizer_ran():
+ assert not values
+ """
+ )
+ reprec = pytester.inline_run("-s")
+ reprec.assertoutcome(failed=1, passed=1)
+
+ def test_request_addfinalizer_failing_setup_module(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = [1, 2]
+ @pytest.fixture(scope="module")
+ def myfix(request):
+ request.addfinalizer(values.pop)
+ request.addfinalizer(values.pop)
+ assert 0
+ def test_fix(myfix):
+ pass
+ """
+ )
+ reprec = pytester.inline_run("-s")
+ mod = reprec.getcalls("pytest_runtest_setup")[0].item.module
+ assert not mod.values
+
+ def test_request_addfinalizer_partial_setup_failure(
+ self, pytester: Pytester
+ ) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture
+ def something(request):
+ request.addfinalizer(lambda: values.append(None))
+ def test_func(something, missingarg):
+ pass
+ def test_second():
+ assert len(values) == 1
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ ["*1 error*"] # XXX the whole module collection fails
+ )
+
+ def test_request_subrequest_addfinalizer_exceptions(
+ self, pytester: Pytester
+ ) -> None:
+ """
+ Ensure exceptions raised during teardown by a finalizer are suppressed
+ until all finalizers are called, re-raising the first exception (#2440)
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ def _excepts(where):
+ raise Exception('Error in %s fixture' % where)
+ @pytest.fixture
+ def subrequest(request):
+ return request
+ @pytest.fixture
+ def something(subrequest):
+ subrequest.addfinalizer(lambda: values.append(1))
+ subrequest.addfinalizer(lambda: values.append(2))
+ subrequest.addfinalizer(lambda: _excepts('something'))
+ @pytest.fixture
+ def excepts(subrequest):
+ subrequest.addfinalizer(lambda: _excepts('excepts'))
+ subrequest.addfinalizer(lambda: values.append(3))
+ def test_first(something, excepts):
+ pass
+ def test_second():
+ assert values == [3, 2, 1]
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*Exception: Error in excepts fixture", "* 2 passed, 1 error in *"]
+ )
+
+ def test_request_getmodulepath(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol("def test_somefunc(): pass")
+ (item,) = pytester.genitems([modcol])
+ req = fixtures.FixtureRequest(item, _ispytest=True)
+ assert req.path == modcol.path
+
+ def test_request_fixturenames(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ from _pytest.pytester import get_public_names
+ @pytest.fixture()
+ def arg1():
+ pass
+ @pytest.fixture()
+ def farg(arg1):
+ pass
+ @pytest.fixture(autouse=True)
+ def sarg(tmp_path):
+ pass
+ def test_function(request, farg):
+ assert set(get_public_names(request.fixturenames)) == \
+ set(["sarg", "arg1", "request", "farg",
+ "tmp_path", "tmp_path_factory"])
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_request_fixturenames_dynamic_fixture(self, pytester: Pytester) -> None:
+ """Regression test for #3057"""
+ pytester.copy_example("fixtures/test_getfixturevalue_dynamic.py")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_setupdecorator_and_xunit(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope='module', autouse=True)
+ def setup_module():
+ values.append("module")
+ @pytest.fixture(autouse=True)
+ def setup_function():
+ values.append("function")
+
+ def test_func():
+ pass
+
+ class TestClass(object):
+ @pytest.fixture(scope="class", autouse=True)
+ def setup_class(self):
+ values.append("class")
+ @pytest.fixture(autouse=True)
+ def setup_method(self):
+ values.append("method")
+ def test_method(self):
+ pass
+ def test_all():
+ assert values == ["module", "function", "class",
+ "function", "method", "function"]
+ """
+ )
+ reprec = pytester.inline_run("-v")
+ reprec.assertoutcome(passed=3)
+
+ def test_fixtures_sub_subdir_normalize_sep(self, pytester: Pytester) -> None:
+ # this tests that normalization of nodeids takes place
+ b = pytester.path.joinpath("tests", "unit")
+ b.mkdir(parents=True)
+ b.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def arg1():
+ pass
+ """
+ )
+ )
+ p = b.joinpath("test_module.py")
+ p.write_text("def test_func(arg1): pass")
+ result = pytester.runpytest(p, "--fixtures")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ """
+ *fixtures defined*conftest*
+ *arg1*
+ """
+ )
+
+ def test_show_fixtures_color_yes(self, pytester: Pytester) -> None:
+ pytester.makepyfile("def test_this(): assert 1")
+ result = pytester.runpytest("--color=yes", "--fixtures")
+ assert "\x1b[32mtmp_path" in result.stdout.str()
+
+ def test_newstyle_with_request(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture()
+ def arg(request):
+ pass
+ def test_1(arg):
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_setupcontext_no_param(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(params=[1,2])
+ def arg(request):
+ return request.param
+
+ @pytest.fixture(autouse=True)
+ def mysetup(request, arg):
+ assert not hasattr(request, "param")
+ def test_1(arg):
+ assert arg in (1,2)
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+
+class TestRequestSessionScoped:
+ @pytest.fixture(scope="session")
+ def session_request(self, request):
+ return request
+
+ @pytest.mark.parametrize("name", ["path", "module"])
+ def test_session_scoped_unavailable_attributes(self, session_request, name):
+ with pytest.raises(
+ AttributeError,
+ match=f"{name} not available in session-scoped context",
+ ):
+ getattr(session_request, name)
+
+
+class TestRequestMarking:
+ def test_applymarker(self, pytester: Pytester) -> None:
+ item1, item2 = pytester.getitems(
+ """
+ import pytest
+
+ @pytest.fixture
+ def something(request):
+ pass
+ class TestClass(object):
+ def test_func1(self, something):
+ pass
+ def test_func2(self, something):
+ pass
+ """
+ )
+ req1 = fixtures.FixtureRequest(item1, _ispytest=True)
+ assert "xfail" not in item1.keywords
+ req1.applymarker(pytest.mark.xfail)
+ assert "xfail" in item1.keywords
+ assert "skipif" not in item1.keywords
+ req1.applymarker(pytest.mark.skipif)
+ assert "skipif" in item1.keywords
+ with pytest.raises(ValueError):
+ req1.applymarker(42) # type: ignore[arg-type]
+
+ def test_accesskeywords(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture()
+ def keywords(request):
+ return request.keywords
+ @pytest.mark.XYZ
+ def test_function(keywords):
+ assert keywords["XYZ"]
+ assert "abc" not in keywords
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_accessmarker_dynamic(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ @pytest.fixture()
+ def keywords(request):
+ return request.keywords
+
+ @pytest.fixture(scope="class", autouse=True)
+ def marking(request):
+ request.applymarker(pytest.mark.XYZ("hello"))
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_fun1(keywords):
+ assert keywords["XYZ"] is not None
+ assert "abc" not in keywords
+ def test_fun2(keywords):
+ assert keywords["XYZ"] is not None
+ assert "abc" not in keywords
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+
+class TestFixtureUsages:
+ def test_noargfixturedec(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg1():
+ return 1
+
+ def test_func(arg1):
+ assert arg1 == 1
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_receives_funcargs(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture()
+ def arg1():
+ return 1
+
+ @pytest.fixture()
+ def arg2(arg1):
+ return arg1 + 1
+
+ def test_add(arg2):
+ assert arg2 == 2
+ def test_all(arg1, arg2):
+ assert arg1 == 1
+ assert arg2 == 2
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_receives_funcargs_scope_mismatch(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="function")
+ def arg1():
+ return 1
+
+ @pytest.fixture(scope="module")
+ def arg2(arg1):
+ return arg1 + 1
+
+ def test_add(arg2):
+ assert arg2 == 2
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*ScopeMismatch*involved factories*",
+ "test_receives_funcargs_scope_mismatch.py:6: def arg2(arg1)",
+ "test_receives_funcargs_scope_mismatch.py:2: def arg1()",
+ "*1 error*",
+ ]
+ )
+
+ def test_receives_funcargs_scope_mismatch_issue660(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="function")
+ def arg1():
+ return 1
+
+ @pytest.fixture(scope="module")
+ def arg2(arg1):
+ return arg1 + 1
+
+ def test_add(arg1, arg2):
+ assert arg2 == 2
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*ScopeMismatch*involved factories*", "* def arg2*", "*1 error*"]
+ )
+
+ def test_invalid_scope(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="functions")
+ def badscope():
+ pass
+
+ def test_nothing(badscope):
+ pass
+ """
+ )
+ result = pytester.runpytest_inprocess()
+ result.stdout.fnmatch_lines(
+ "*Fixture 'badscope' from test_invalid_scope.py got an unexpected scope value 'functions'"
+ )
+
+ @pytest.mark.parametrize("scope", ["function", "session"])
+ def test_parameters_without_eq_semantics(self, scope, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ class NoEq1: # fails on `a == b` statement
+ def __eq__(self, _):
+ raise RuntimeError
+
+ class NoEq2: # fails on `if a == b:` statement
+ def __eq__(self, _):
+ class NoBool:
+ def __bool__(self):
+ raise RuntimeError
+ return NoBool()
+
+ import pytest
+ @pytest.fixture(params=[NoEq1(), NoEq2()], scope={scope!r})
+ def no_eq(request):
+ return request.param
+
+ def test1(no_eq):
+ pass
+
+ def test2(no_eq):
+ pass
+ """.format(
+ scope=scope
+ )
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*4 passed*"])
+
+ def test_funcarg_parametrized_and_used_twice(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(params=[1,2])
+ def arg1(request):
+ values.append(1)
+ return request.param
+
+ @pytest.fixture()
+ def arg2(arg1):
+ return arg1 + 1
+
+ def test_add(arg1, arg2):
+ assert arg2 == arg1 + 1
+ assert len(values) == arg1
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ def test_factory_uses_unknown_funcarg_as_dependency_error(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture()
+ def fail(missing):
+ return
+
+ @pytest.fixture()
+ def call_fail(fail):
+ return
+
+ def test_missing(call_fail):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *pytest.fixture()*
+ *def call_fail(fail)*
+ *pytest.fixture()*
+ *def fail*
+ *fixture*'missing'*not found*
+ """
+ )
+
+ def test_factory_setup_as_classes_fails(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ class arg1(object):
+ def __init__(self, request):
+ self.x = 1
+ arg1 = pytest.fixture()(arg1)
+
+ """
+ )
+ reprec = pytester.inline_run()
+ values = reprec.getfailedcollections()
+ assert len(values) == 1
+
+ def test_usefixtures_marker(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ values = []
+
+ @pytest.fixture(scope="class")
+ def myfix(request):
+ request.cls.hello = "world"
+ values.append(1)
+
+ class TestClass(object):
+ def test_one(self):
+ assert self.hello == "world"
+ assert len(values) == 1
+ def test_two(self):
+ assert self.hello == "world"
+ assert len(values) == 1
+ pytest.mark.usefixtures("myfix")(TestClass)
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_usefixtures_ini(self, pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ usefixtures = myfix
+ """
+ )
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(scope="class")
+ def myfix(request):
+ request.cls.hello = "world"
+
+ """
+ )
+ pytester.makepyfile(
+ """
+ class TestClass(object):
+ def test_one(self):
+ assert self.hello == "world"
+ def test_two(self):
+ assert self.hello == "world"
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_usefixtures_seen_in_showmarkers(self, pytester: Pytester) -> None:
+ result = pytester.runpytest("--markers")
+ result.stdout.fnmatch_lines(
+ """
+ *usefixtures(fixturename1*mark tests*fixtures*
+ """
+ )
+
+ def test_request_instance_issue203(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ class TestClass(object):
+ @pytest.fixture
+ def setup1(self, request):
+ assert self == request.instance
+ self.arg1 = 1
+ def test_hello(self, setup1):
+ assert self.arg1 == 1
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_fixture_parametrized_with_iterator(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ values = []
+ def f():
+ yield 1
+ yield 2
+ dec = pytest.fixture(scope="module", params=f())
+
+ @dec
+ def arg(request):
+ return request.param
+ @dec
+ def arg2(request):
+ return request.param
+
+ def test_1(arg):
+ values.append(arg)
+ def test_2(arg2):
+ values.append(arg2*10)
+ """
+ )
+ reprec = pytester.inline_run("-v")
+ reprec.assertoutcome(passed=4)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ assert values == [1, 2, 10, 20]
+
+ def test_setup_functions_as_fixtures(self, pytester: Pytester) -> None:
+ """Ensure setup_* methods obey fixture scope rules (#517, #3094)."""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ DB_INITIALIZED = None
+
+ @pytest.fixture(scope="session", autouse=True)
+ def db():
+ global DB_INITIALIZED
+ DB_INITIALIZED = True
+ yield
+ DB_INITIALIZED = False
+
+ def setup_module():
+ assert DB_INITIALIZED
+
+ def teardown_module():
+ assert DB_INITIALIZED
+
+ class TestClass(object):
+
+ def setup_method(self, method):
+ assert DB_INITIALIZED
+
+ def teardown_method(self, method):
+ assert DB_INITIALIZED
+
+ def test_printer_1(self):
+ pass
+
+ def test_printer_2(self):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 2 passed in *"])
+
+
+class TestFixtureManagerParseFactories:
+ @pytest.fixture
+ def pytester(self, pytester: Pytester) -> Pytester:
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def hello(request):
+ return "conftest"
+
+ @pytest.fixture
+ def fm(request):
+ return request._fixturemanager
+
+ @pytest.fixture
+ def item(request):
+ return request._pyfuncitem
+ """
+ )
+ return pytester
+
+ def test_parsefactories_evil_objects_issue214(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ class A(object):
+ def __call__(self):
+ pass
+ def __getattr__(self, name):
+ raise RuntimeError()
+ a = A()
+ def test_hello():
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1, failed=0)
+
+ def test_parsefactories_conftest(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_hello(item, fm):
+ for name in ("fm", "hello", "item"):
+ faclist = fm.getfixturedefs(name, item.nodeid)
+ assert len(faclist) == 1
+ fac = faclist[0]
+ assert fac.func.__name__ == name
+ """
+ )
+ reprec = pytester.inline_run("-s")
+ reprec.assertoutcome(passed=1)
+
+ def test_parsefactories_conftest_and_module_and_class(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """\
+ import pytest
+
+ @pytest.fixture
+ def hello(request):
+ return "module"
+ class TestClass(object):
+ @pytest.fixture
+ def hello(self, request):
+ return "class"
+ def test_hello(self, item, fm):
+ faclist = fm.getfixturedefs("hello", item.nodeid)
+ print(faclist)
+ assert len(faclist) == 3
+
+ assert faclist[0].func(item._request) == "conftest"
+ assert faclist[1].func(item._request) == "module"
+ assert faclist[2].func(item._request) == "class"
+ """
+ )
+ reprec = pytester.inline_run("-s")
+ reprec.assertoutcome(passed=1)
+
+ def test_parsefactories_relative_node_ids(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ # example mostly taken from:
+ # https://mail.python.org/pipermail/pytest-dev/2014-September/002617.html
+ runner = pytester.mkdir("runner")
+ package = pytester.mkdir("package")
+ package.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def one():
+ return 1
+ """
+ )
+ )
+ package.joinpath("test_x.py").write_text(
+ textwrap.dedent(
+ """\
+ def test_x(one):
+ assert one == 1
+ """
+ )
+ )
+ sub = package.joinpath("sub")
+ sub.mkdir()
+ sub.joinpath("__init__.py").touch()
+ sub.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def one():
+ return 2
+ """
+ )
+ )
+ sub.joinpath("test_y.py").write_text(
+ textwrap.dedent(
+ """\
+ def test_x(one):
+ assert one == 2
+ """
+ )
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+ with monkeypatch.context() as mp:
+ mp.chdir(runner)
+ reprec = pytester.inline_run("..")
+ reprec.assertoutcome(passed=2)
+
+ def test_package_xunit_fixture(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ __init__="""\
+ values = []
+ """
+ )
+ package = pytester.mkdir("package")
+ package.joinpath("__init__.py").write_text(
+ textwrap.dedent(
+ """\
+ from .. import values
+ def setup_module():
+ values.append("package")
+ def teardown_module():
+ values[:] = []
+ """
+ )
+ )
+ package.joinpath("test_x.py").write_text(
+ textwrap.dedent(
+ """\
+ from .. import values
+ def test_x():
+ assert values == ["package"]
+ """
+ )
+ )
+ package = pytester.mkdir("package2")
+ package.joinpath("__init__.py").write_text(
+ textwrap.dedent(
+ """\
+ from .. import values
+ def setup_module():
+ values.append("package2")
+ def teardown_module():
+ values[:] = []
+ """
+ )
+ )
+ package.joinpath("test_x.py").write_text(
+ textwrap.dedent(
+ """\
+ from .. import values
+ def test_x():
+ assert values == ["package2"]
+ """
+ )
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_package_fixture_complex(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ __init__="""\
+ values = []
+ """
+ )
+ pytester.syspathinsert(pytester.path.name)
+ package = pytester.mkdir("package")
+ package.joinpath("__init__.py").write_text("")
+ package.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ from .. import values
+ @pytest.fixture(scope="package")
+ def one():
+ values.append("package")
+ yield values
+ values.pop()
+ @pytest.fixture(scope="package", autouse=True)
+ def two():
+ values.append("package-auto")
+ yield values
+ values.pop()
+ """
+ )
+ )
+ package.joinpath("test_x.py").write_text(
+ textwrap.dedent(
+ """\
+ from .. import values
+ def test_package_autouse():
+ assert values == ["package-auto"]
+ def test_package(one):
+ assert values == ["package-auto", "package"]
+ """
+ )
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_collect_custom_items(self, pytester: Pytester) -> None:
+ pytester.copy_example("fixtures/custom_item")
+ result = pytester.runpytest("foo")
+ result.stdout.fnmatch_lines(["*passed*"])
+
+
+class TestAutouseDiscovery:
+ @pytest.fixture
+ def pytester(self, pytester: Pytester) -> Pytester:
+ pytester.makeconftest(
+ """
+ import pytest
+ @pytest.fixture(autouse=True)
+ def perfunction(request, tmp_path):
+ pass
+
+ @pytest.fixture()
+ def arg1(tmp_path):
+ pass
+ @pytest.fixture(autouse=True)
+ def perfunction2(arg1):
+ pass
+
+ @pytest.fixture
+ def fm(request):
+ return request._fixturemanager
+
+ @pytest.fixture
+ def item(request):
+ return request._pyfuncitem
+ """
+ )
+ return pytester
+
+ def test_parsefactories_conftest(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ from _pytest.pytester import get_public_names
+ def test_check_setup(item, fm):
+ autousenames = list(fm._getautousenames(item.nodeid))
+ assert len(get_public_names(autousenames)) == 2
+ assert "perfunction2" in autousenames
+ assert "perfunction" in autousenames
+ """
+ )
+ reprec = pytester.inline_run("-s")
+ reprec.assertoutcome(passed=1)
+
+ def test_two_classes_separated_autouse(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ class TestA(object):
+ values = []
+ @pytest.fixture(autouse=True)
+ def setup1(self):
+ self.values.append(1)
+ def test_setup1(self):
+ assert self.values == [1]
+ class TestB(object):
+ values = []
+ @pytest.fixture(autouse=True)
+ def setup2(self):
+ self.values.append(1)
+ def test_setup2(self):
+ assert self.values == [1]
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_setup_at_classlevel(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ class TestClass(object):
+ @pytest.fixture(autouse=True)
+ def permethod(self, request):
+ request.instance.funcname = request.function.__name__
+ def test_method1(self):
+ assert self.funcname == "test_method1"
+ def test_method2(self):
+ assert self.funcname == "test_method2"
+ """
+ )
+ reprec = pytester.inline_run("-s")
+ reprec.assertoutcome(passed=2)
+
+ @pytest.mark.xfail(reason="'enabled' feature not implemented")
+ def test_setup_enabled_functionnode(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ def enabled(parentnode, markers):
+ return "needsdb" in markers
+
+ @pytest.fixture(params=[1,2])
+ def db(request):
+ return request.param
+
+ @pytest.fixture(enabled=enabled, autouse=True)
+ def createdb(db):
+ pass
+
+ def test_func1(request):
+ assert "db" not in request.fixturenames
+
+ @pytest.mark.needsdb
+ def test_func2(request):
+ assert "db" in request.fixturenames
+ """
+ )
+ reprec = pytester.inline_run("-s")
+ reprec.assertoutcome(passed=2)
+
+ def test_callables_nocode(self, pytester: Pytester) -> None:
+ """An imported mock.call would break setup/factory discovery due to
+ it being callable and __code__ not being a code object."""
+ pytester.makepyfile(
+ """
+ class _call(tuple):
+ def __call__(self, *k, **kw):
+ pass
+ def __getattr__(self, k):
+ return self
+
+ call = _call()
+ """
+ )
+ reprec = pytester.inline_run("-s")
+ reprec.assertoutcome(failed=0, passed=0)
+
+ def test_autouse_in_conftests(self, pytester: Pytester) -> None:
+ a = pytester.mkdir("a")
+ b = pytester.mkdir("a1")
+ conftest = pytester.makeconftest(
+ """
+ import pytest
+ @pytest.fixture(autouse=True)
+ def hello():
+ xxx
+ """
+ )
+ conftest.rename(a.joinpath(conftest.name))
+ a.joinpath("test_something.py").write_text("def test_func(): pass")
+ b.joinpath("test_otherthing.py").write_text("def test_func(): pass")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *1 passed*1 error*
+ """
+ )
+
+ def test_autouse_in_module_and_two_classes(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(autouse=True)
+ def append1():
+ values.append("module")
+ def test_x():
+ assert values == ["module"]
+
+ class TestA(object):
+ @pytest.fixture(autouse=True)
+ def append2(self):
+ values.append("A")
+ def test_hello(self):
+ assert values == ["module", "module", "A"], values
+ class TestA2(object):
+ def test_world(self):
+ assert values == ["module", "module", "A", "module"], values
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=3)
+
+
+class TestAutouseManagement:
+ def test_autouse_conftest_mid_directory(self, pytester: Pytester) -> None:
+ pkgdir = pytester.mkpydir("xyz123")
+ pkgdir.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.fixture(autouse=True)
+ def app():
+ import sys
+ sys._myapp = "hello"
+ """
+ )
+ )
+ sub = pkgdir.joinpath("tests")
+ sub.mkdir()
+ t = sub.joinpath("test_app.py")
+ t.touch()
+ t.write_text(
+ textwrap.dedent(
+ """\
+ import sys
+ def test_app():
+ assert sys._myapp == "hello"
+ """
+ )
+ )
+ reprec = pytester.inline_run("-s")
+ reprec.assertoutcome(passed=1)
+
+ def test_funcarg_and_setup(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope="module")
+ def arg():
+ values.append(1)
+ return 0
+ @pytest.fixture(scope="module", autouse=True)
+ def something(arg):
+ values.append(2)
+
+ def test_hello(arg):
+ assert len(values) == 2
+ assert values == [1,2]
+ assert arg == 0
+
+ def test_hello2(arg):
+ assert len(values) == 2
+ assert values == [1,2]
+ assert arg == 0
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_uses_parametrized_resource(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(params=[1,2])
+ def arg(request):
+ return request.param
+
+ @pytest.fixture(autouse=True)
+ def something(arg):
+ values.append(arg)
+
+ def test_hello():
+ if len(values) == 1:
+ assert values == [1]
+ elif len(values) == 2:
+ assert values == [1, 2]
+ else:
+ 0/0
+
+ """
+ )
+ reprec = pytester.inline_run("-s")
+ reprec.assertoutcome(passed=2)
+
+ def test_session_parametrized_function(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ values = []
+
+ @pytest.fixture(scope="session", params=[1,2])
+ def arg(request):
+ return request.param
+
+ @pytest.fixture(scope="function", autouse=True)
+ def append(request, arg):
+ if request.function.__name__ == "test_some":
+ values.append(arg)
+
+ def test_some():
+ pass
+
+ def test_result(arg):
+ assert len(values) == arg
+ assert values[:arg] == [1,2][:arg]
+ """
+ )
+ reprec = pytester.inline_run("-v", "-s")
+ reprec.assertoutcome(passed=4)
+
+ def test_class_function_parametrization_finalization(
+ self, pytester: Pytester
+ ) -> None:
+ p = pytester.makeconftest(
+ """
+ import pytest
+ import pprint
+
+ values = []
+
+ @pytest.fixture(scope="function", params=[1,2])
+ def farg(request):
+ return request.param
+
+ @pytest.fixture(scope="class", params=list("ab"))
+ def carg(request):
+ return request.param
+
+ @pytest.fixture(scope="function", autouse=True)
+ def append(request, farg, carg):
+ def fin():
+ values.append("fin_%s%s" % (carg, farg))
+ request.addfinalizer(fin)
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ class TestClass(object):
+ def test_1(self):
+ pass
+ class TestClass2(object):
+ def test_2(self):
+ pass
+ """
+ )
+ reprec = pytester.inline_run("-v", "-s", "--confcutdir", pytester.path)
+ reprec.assertoutcome(passed=8)
+ config = reprec.getcalls("pytest_unconfigure")[0].config
+ values = config.pluginmanager._getconftestmodules(
+ p, importmode="prepend", rootpath=pytester.path
+ )[0].values
+ assert values == ["fin_a1", "fin_a2", "fin_b1", "fin_b2"] * 2
+
+ def test_scope_ordering(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope="function", autouse=True)
+ def fappend2():
+ values.append(2)
+ @pytest.fixture(scope="class", autouse=True)
+ def classappend3():
+ values.append(3)
+ @pytest.fixture(scope="module", autouse=True)
+ def mappend():
+ values.append(1)
+
+ class TestHallo(object):
+ def test_method(self):
+ assert values == [1,3,2]
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_parametrization_setup_teardown_ordering(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ def pytest_generate_tests(metafunc):
+ if metafunc.cls is None:
+ assert metafunc.function is test_finish
+ if metafunc.cls is not None:
+ metafunc.parametrize("item", [1,2], scope="class")
+ class TestClass(object):
+ @pytest.fixture(scope="class", autouse=True)
+ def addteardown(self, item, request):
+ values.append("setup-%d" % item)
+ request.addfinalizer(lambda: values.append("teardown-%d" % item))
+ def test_step1(self, item):
+ values.append("step1-%d" % item)
+ def test_step2(self, item):
+ values.append("step2-%d" % item)
+
+ def test_finish():
+ print(values)
+ assert values == ["setup-1", "step1-1", "step2-1", "teardown-1",
+ "setup-2", "step1-2", "step2-2", "teardown-2",]
+ """
+ )
+ reprec = pytester.inline_run("-s")
+ reprec.assertoutcome(passed=5)
+
+ def test_ordering_autouse_before_explicit(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ values = []
+ @pytest.fixture(autouse=True)
+ def fix1():
+ values.append(1)
+ @pytest.fixture()
+ def arg1():
+ values.append(2)
+ def test_hello(arg1):
+ assert values == [1,2]
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ @pytest.mark.parametrize("param1", ["", "params=[1]"], ids=["p00", "p01"])
+ @pytest.mark.parametrize("param2", ["", "params=[1]"], ids=["p10", "p11"])
+ def test_ordering_dependencies_torndown_first(
+ self, pytester: Pytester, param1, param2
+ ) -> None:
+ """#226"""
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(%(param1)s)
+ def arg1(request):
+ request.addfinalizer(lambda: values.append("fin1"))
+ values.append("new1")
+ @pytest.fixture(%(param2)s)
+ def arg2(request, arg1):
+ request.addfinalizer(lambda: values.append("fin2"))
+ values.append("new2")
+
+ def test_arg(arg2):
+ pass
+ def test_check():
+ assert values == ["new1", "new2", "fin2", "fin1"]
+ """
+ % locals()
+ )
+ reprec = pytester.inline_run("-s")
+ reprec.assertoutcome(passed=2)
+
+
+class TestFixtureMarker:
+ def test_parametrize(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(params=["a", "b", "c"])
+ def arg(request):
+ return request.param
+ values = []
+ def test_param(arg):
+ values.append(arg)
+ def test_result():
+ assert values == list("abc")
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=4)
+
+ def test_multiple_parametrization_issue_736(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1,2,3])
+ def foo(request):
+ return request.param
+
+ @pytest.mark.parametrize('foobar', [4,5,6])
+ def test_issue(foo, foobar):
+ assert foo in [1,2,3]
+ assert foobar in [4,5,6]
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=9)
+
+ @pytest.mark.parametrize(
+ "param_args",
+ ["'fixt, val'", "'fixt,val'", "['fixt', 'val']", "('fixt', 'val')"],
+ )
+ def test_override_parametrized_fixture_issue_979(
+ self, pytester: Pytester, param_args
+ ) -> None:
+ """Make sure a parametrized argument can override a parametrized fixture.
+
+ This was a regression introduced in the fix for #736.
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1, 2])
+ def fixt(request):
+ return request.param
+
+ @pytest.mark.parametrize(%s, [(3, 'x'), (4, 'x')])
+ def test_foo(fixt, val):
+ pass
+ """
+ % param_args
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_scope_session(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope="module")
+ def arg():
+ values.append(1)
+ return 1
+
+ def test_1(arg):
+ assert arg == 1
+ def test_2(arg):
+ assert arg == 1
+ assert len(values) == 1
+ class TestClass(object):
+ def test3(self, arg):
+ assert arg == 1
+ assert len(values) == 1
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=3)
+
+ def test_scope_session_exc(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope="session")
+ def fix():
+ values.append(1)
+ pytest.skip('skipping')
+
+ def test_1(fix):
+ pass
+ def test_2(fix):
+ pass
+ def test_last():
+ assert values == [1]
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(skipped=2, passed=1)
+
+ def test_scope_session_exc_two_fix(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ m = []
+ @pytest.fixture(scope="session")
+ def a():
+ values.append(1)
+ pytest.skip('skipping')
+ @pytest.fixture(scope="session")
+ def b(a):
+ m.append(1)
+
+ def test_1(b):
+ pass
+ def test_2(b):
+ pass
+ def test_last():
+ assert values == [1]
+ assert m == []
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(skipped=2, passed=1)
+
+ def test_scope_exc(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_foo="""
+ def test_foo(fix):
+ pass
+ """,
+ test_bar="""
+ def test_bar(fix):
+ pass
+ """,
+ conftest="""
+ import pytest
+ reqs = []
+ @pytest.fixture(scope="session")
+ def fix(request):
+ reqs.append(1)
+ pytest.skip()
+ @pytest.fixture
+ def req_list():
+ return reqs
+ """,
+ test_real="""
+ def test_last(req_list):
+ assert req_list == [1]
+ """,
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(skipped=2, passed=1)
+
+ def test_scope_module_uses_session(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(scope="module")
+ def arg():
+ values.append(1)
+ return 1
+
+ def test_1(arg):
+ assert arg == 1
+ def test_2(arg):
+ assert arg == 1
+ assert len(values) == 1
+ class TestClass(object):
+ def test3(self, arg):
+ assert arg == 1
+ assert len(values) == 1
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=3)
+
+ def test_scope_module_and_finalizer(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ finalized_list = []
+ created_list = []
+ @pytest.fixture(scope="module")
+ def arg(request):
+ created_list.append(1)
+ assert request.scope == "module"
+ request.addfinalizer(lambda: finalized_list.append(1))
+ @pytest.fixture
+ def created(request):
+ return len(created_list)
+ @pytest.fixture
+ def finalized(request):
+ return len(finalized_list)
+ """
+ )
+ pytester.makepyfile(
+ test_mod1="""
+ def test_1(arg, created, finalized):
+ assert created == 1
+ assert finalized == 0
+ def test_2(arg, created, finalized):
+ assert created == 1
+ assert finalized == 0""",
+ test_mod2="""
+ def test_3(arg, created, finalized):
+ assert created == 2
+ assert finalized == 1""",
+ test_mode3="""
+ def test_4(arg, created, finalized):
+ assert created == 3
+ assert finalized == 2
+ """,
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=4)
+
+ def test_scope_mismatch_various(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ finalized = []
+ created = []
+ @pytest.fixture(scope="function")
+ def arg(request):
+ pass
+ """
+ )
+ pytester.makepyfile(
+ test_mod1="""
+ import pytest
+ @pytest.fixture(scope="session")
+ def arg(request):
+ request.getfixturevalue("arg")
+ def test_1(arg):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ ["*ScopeMismatch*You tried*function*session*request*"]
+ )
+
+ def test_dynamic_scope(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+
+
+ def pytest_addoption(parser):
+ parser.addoption("--extend-scope", action="store_true", default=False)
+
+
+ def dynamic_scope(fixture_name, config):
+ if config.getoption("--extend-scope"):
+ return "session"
+ return "function"
+
+
+ @pytest.fixture(scope=dynamic_scope)
+ def dynamic_fixture(calls=[]):
+ calls.append("call")
+ return len(calls)
+
+ """
+ )
+
+ pytester.makepyfile(
+ """
+ def test_first(dynamic_fixture):
+ assert dynamic_fixture == 1
+
+
+ def test_second(dynamic_fixture):
+ assert dynamic_fixture == 2
+
+ """
+ )
+
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ reprec = pytester.inline_run("--extend-scope")
+ reprec.assertoutcome(passed=1, failed=1)
+
+ def test_dynamic_scope_bad_return(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ def dynamic_scope(**_):
+ return "wrong-scope"
+
+ @pytest.fixture(scope=dynamic_scope)
+ def fixture():
+ pass
+
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ "Fixture 'fixture' from test_dynamic_scope_bad_return.py "
+ "got an unexpected scope value 'wrong-scope'"
+ )
+
+ def test_register_only_with_mark(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ @pytest.fixture()
+ def arg():
+ return 1
+ """
+ )
+ pytester.makepyfile(
+ test_mod1="""
+ import pytest
+ @pytest.fixture()
+ def arg(arg):
+ return arg + 1
+ def test_1(arg):
+ assert arg == 2
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_parametrize_and_scope(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="module", params=["a", "b", "c"])
+ def arg(request):
+ return request.param
+ values = []
+ def test_param(arg):
+ values.append(arg)
+ """
+ )
+ reprec = pytester.inline_run("-v")
+ reprec.assertoutcome(passed=3)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ assert len(values) == 3
+ assert "a" in values
+ assert "b" in values
+ assert "c" in values
+
+ def test_scope_mismatch(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ @pytest.fixture(scope="function")
+ def arg(request):
+ pass
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="session")
+ def arg(arg):
+ pass
+ def test_mismatch(arg):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*ScopeMismatch*", "*1 error*"])
+
+ def test_parametrize_separated_order(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="module", params=[1, 2])
+ def arg(request):
+ return request.param
+
+ values = []
+ def test_1(arg):
+ values.append(arg)
+ def test_2(arg):
+ values.append(arg)
+ """
+ )
+ reprec = pytester.inline_run("-v")
+ reprec.assertoutcome(passed=4)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ assert values == [1, 1, 2, 2]
+
+ def test_module_parametrized_ordering(self, pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ console_output_style=classic
+ """
+ )
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(scope="session", params="s1 s2".split())
+ def sarg():
+ pass
+ @pytest.fixture(scope="module", params="m1 m2".split())
+ def marg():
+ pass
+ """
+ )
+ pytester.makepyfile(
+ test_mod1="""
+ def test_func(sarg):
+ pass
+ def test_func1(marg):
+ pass
+ """,
+ test_mod2="""
+ def test_func2(sarg):
+ pass
+ def test_func3(sarg, marg):
+ pass
+ def test_func3b(sarg, marg):
+ pass
+ def test_func4(marg):
+ pass
+ """,
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ """
+ test_mod1.py::test_func[s1] PASSED
+ test_mod2.py::test_func2[s1] PASSED
+ test_mod2.py::test_func3[s1-m1] PASSED
+ test_mod2.py::test_func3b[s1-m1] PASSED
+ test_mod2.py::test_func3[s1-m2] PASSED
+ test_mod2.py::test_func3b[s1-m2] PASSED
+ test_mod1.py::test_func[s2] PASSED
+ test_mod2.py::test_func2[s2] PASSED
+ test_mod2.py::test_func3[s2-m1] PASSED
+ test_mod2.py::test_func3b[s2-m1] PASSED
+ test_mod2.py::test_func4[m1] PASSED
+ test_mod2.py::test_func3[s2-m2] PASSED
+ test_mod2.py::test_func3b[s2-m2] PASSED
+ test_mod2.py::test_func4[m2] PASSED
+ test_mod1.py::test_func1[m1] PASSED
+ test_mod1.py::test_func1[m2] PASSED
+ """
+ )
+
+ def test_dynamic_parametrized_ordering(self, pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ console_output_style=classic
+ """
+ )
+ pytester.makeconftest(
+ """
+ import pytest
+
+ def pytest_configure(config):
+ class DynamicFixturePlugin(object):
+ @pytest.fixture(scope='session', params=['flavor1', 'flavor2'])
+ def flavor(self, request):
+ return request.param
+ config.pluginmanager.register(DynamicFixturePlugin(), 'flavor-fixture')
+
+ @pytest.fixture(scope='session', params=['vxlan', 'vlan'])
+ def encap(request):
+ return request.param
+
+ @pytest.fixture(scope='session', autouse='True')
+ def reprovision(request, flavor, encap):
+ pass
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test(reprovision):
+ pass
+ def test2(reprovision):
+ pass
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ """
+ test_dynamic_parametrized_ordering.py::test[flavor1-vxlan] PASSED
+ test_dynamic_parametrized_ordering.py::test2[flavor1-vxlan] PASSED
+ test_dynamic_parametrized_ordering.py::test[flavor2-vxlan] PASSED
+ test_dynamic_parametrized_ordering.py::test2[flavor2-vxlan] PASSED
+ test_dynamic_parametrized_ordering.py::test[flavor2-vlan] PASSED
+ test_dynamic_parametrized_ordering.py::test2[flavor2-vlan] PASSED
+ test_dynamic_parametrized_ordering.py::test[flavor1-vlan] PASSED
+ test_dynamic_parametrized_ordering.py::test2[flavor1-vlan] PASSED
+ """
+ )
+
+ def test_class_ordering(self, pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ console_output_style=classic
+ """
+ )
+ pytester.makeconftest(
+ """
+ import pytest
+
+ values = []
+
+ @pytest.fixture(scope="function", params=[1,2])
+ def farg(request):
+ return request.param
+
+ @pytest.fixture(scope="class", params=list("ab"))
+ def carg(request):
+ return request.param
+
+ @pytest.fixture(scope="function", autouse=True)
+ def append(request, farg, carg):
+ def fin():
+ values.append("fin_%s%s" % (carg, farg))
+ request.addfinalizer(fin)
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ class TestClass2(object):
+ def test_1(self):
+ pass
+ def test_2(self):
+ pass
+ class TestClass(object):
+ def test_3(self):
+ pass
+ """
+ )
+ result = pytester.runpytest("-vs")
+ result.stdout.re_match_lines(
+ r"""
+ test_class_ordering.py::TestClass2::test_1\[a-1\] PASSED
+ test_class_ordering.py::TestClass2::test_1\[a-2\] PASSED
+ test_class_ordering.py::TestClass2::test_2\[a-1\] PASSED
+ test_class_ordering.py::TestClass2::test_2\[a-2\] PASSED
+ test_class_ordering.py::TestClass2::test_1\[b-1\] PASSED
+ test_class_ordering.py::TestClass2::test_1\[b-2\] PASSED
+ test_class_ordering.py::TestClass2::test_2\[b-1\] PASSED
+ test_class_ordering.py::TestClass2::test_2\[b-2\] PASSED
+ test_class_ordering.py::TestClass::test_3\[a-1\] PASSED
+ test_class_ordering.py::TestClass::test_3\[a-2\] PASSED
+ test_class_ordering.py::TestClass::test_3\[b-1\] PASSED
+ test_class_ordering.py::TestClass::test_3\[b-2\] PASSED
+ """
+ )
+
+ def test_parametrize_separated_order_higher_scope_first(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="function", params=[1, 2])
+ def arg(request):
+ param = request.param
+ request.addfinalizer(lambda: values.append("fin:%s" % param))
+ values.append("create:%s" % param)
+ return request.param
+
+ @pytest.fixture(scope="module", params=["mod1", "mod2"])
+ def modarg(request):
+ param = request.param
+ request.addfinalizer(lambda: values.append("fin:%s" % param))
+ values.append("create:%s" % param)
+ return request.param
+
+ values = []
+ def test_1(arg):
+ values.append("test1")
+ def test_2(modarg):
+ values.append("test2")
+ def test_3(arg, modarg):
+ values.append("test3")
+ def test_4(modarg, arg):
+ values.append("test4")
+ """
+ )
+ reprec = pytester.inline_run("-v")
+ reprec.assertoutcome(passed=12)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ expected = [
+ "create:1",
+ "test1",
+ "fin:1",
+ "create:2",
+ "test1",
+ "fin:2",
+ "create:mod1",
+ "test2",
+ "create:1",
+ "test3",
+ "fin:1",
+ "create:2",
+ "test3",
+ "fin:2",
+ "create:1",
+ "test4",
+ "fin:1",
+ "create:2",
+ "test4",
+ "fin:2",
+ "fin:mod1",
+ "create:mod2",
+ "test2",
+ "create:1",
+ "test3",
+ "fin:1",
+ "create:2",
+ "test3",
+ "fin:2",
+ "create:1",
+ "test4",
+ "fin:1",
+ "create:2",
+ "test4",
+ "fin:2",
+ "fin:mod2",
+ ]
+ import pprint
+
+ pprint.pprint(list(zip(values, expected)))
+ assert values == expected
+
+ def test_parametrized_fixture_teardown_order(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(params=[1,2], scope="class")
+ def param1(request):
+ return request.param
+
+ values = []
+
+ class TestClass(object):
+ @classmethod
+ @pytest.fixture(scope="class", autouse=True)
+ def setup1(self, request, param1):
+ values.append(1)
+ request.addfinalizer(self.teardown1)
+ @classmethod
+ def teardown1(self):
+ assert values.pop() == 1
+ @pytest.fixture(scope="class", autouse=True)
+ def setup2(self, request, param1):
+ values.append(2)
+ request.addfinalizer(self.teardown2)
+ @classmethod
+ def teardown2(self):
+ assert values.pop() == 2
+ def test(self):
+ pass
+
+ def test_finish():
+ assert not values
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ """
+ *3 passed*
+ """
+ )
+ result.stdout.no_fnmatch_line("*error*")
+
+ def test_fixture_finalizer(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ import sys
+
+ @pytest.fixture
+ def browser(request):
+
+ def finalize():
+ sys.stdout.write_text('Finalized')
+ request.addfinalizer(finalize)
+ return {}
+ """
+ )
+ b = pytester.mkdir("subdir")
+ b.joinpath("test_overridden_fixture_finalizer.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def browser(browser):
+ browser['visited'] = True
+ return browser
+
+ def test_browser(browser):
+ assert browser['visited'] is True
+ """
+ )
+ )
+ reprec = pytester.runpytest("-s")
+ for test in ["test_browser"]:
+ reprec.stdout.fnmatch_lines(["*Finalized*"])
+
+ def test_class_scope_with_normal_tests(self, pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import pytest
+
+ class Box(object):
+ value = 0
+
+ @pytest.fixture(scope='class')
+ def a(request):
+ Box.value += 1
+ return Box.value
+
+ def test_a(a):
+ assert a == 1
+
+ class Test1(object):
+ def test_b(self, a):
+ assert a == 2
+
+ class Test2(object):
+ def test_c(self, a):
+ assert a == 3"""
+ )
+ reprec = pytester.inline_run(testpath)
+ for test in ["test_a", "test_b", "test_c"]:
+ assert reprec.matchreport(test).passed
+
+ def test_request_is_clean(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(params=[1, 2])
+ def fix(request):
+ request.addfinalizer(lambda: values.append(request.param))
+ def test_fix(fix):
+ pass
+ """
+ )
+ reprec = pytester.inline_run("-s")
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ assert values == [1, 2]
+
+ def test_parametrize_separated_lifecycle(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ values = []
+ @pytest.fixture(scope="module", params=[1, 2])
+ def arg(request):
+ x = request.param
+ request.addfinalizer(lambda: values.append("fin%s" % x))
+ return request.param
+ def test_1(arg):
+ values.append(arg)
+ def test_2(arg):
+ values.append(arg)
+ """
+ )
+ reprec = pytester.inline_run("-vs")
+ reprec.assertoutcome(passed=4)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ import pprint
+
+ pprint.pprint(values)
+ # assert len(values) == 6
+ assert values[0] == values[1] == 1
+ assert values[2] == "fin1"
+ assert values[3] == values[4] == 2
+ assert values[5] == "fin2"
+
+ def test_parametrize_function_scoped_finalizers_called(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="function", params=[1, 2])
+ def arg(request):
+ x = request.param
+ request.addfinalizer(lambda: values.append("fin%s" % x))
+ return request.param
+
+ values = []
+ def test_1(arg):
+ values.append(arg)
+ def test_2(arg):
+ values.append(arg)
+ def test_3():
+ assert len(values) == 8
+ assert values == [1, "fin1", 2, "fin2", 1, "fin1", 2, "fin2"]
+ """
+ )
+ reprec = pytester.inline_run("-v")
+ reprec.assertoutcome(passed=5)
+
+ @pytest.mark.parametrize("scope", ["session", "function", "module"])
+ def test_finalizer_order_on_parametrization(
+ self, scope, pytester: Pytester
+ ) -> None:
+ """#246"""
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+
+ @pytest.fixture(scope=%(scope)r, params=["1"])
+ def fix1(request):
+ return request.param
+
+ @pytest.fixture(scope=%(scope)r)
+ def fix2(request, base):
+ def cleanup_fix2():
+ assert not values, "base should not have been finalized"
+ request.addfinalizer(cleanup_fix2)
+
+ @pytest.fixture(scope=%(scope)r)
+ def base(request, fix1):
+ def cleanup_base():
+ values.append("fin_base")
+ print("finalizing base")
+ request.addfinalizer(cleanup_base)
+
+ def test_begin():
+ pass
+ def test_baz(base, fix2):
+ pass
+ def test_other():
+ pass
+ """
+ % {"scope": scope}
+ )
+ reprec = pytester.inline_run("-lvs")
+ reprec.assertoutcome(passed=3)
+
+ def test_class_scope_parametrization_ordering(self, pytester: Pytester) -> None:
+ """#396"""
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ @pytest.fixture(params=["John", "Doe"], scope="class")
+ def human(request):
+ request.addfinalizer(lambda: values.append("fin %s" % request.param))
+ return request.param
+
+ class TestGreetings(object):
+ def test_hello(self, human):
+ values.append("test_hello")
+
+ class TestMetrics(object):
+ def test_name(self, human):
+ values.append("test_name")
+
+ def test_population(self, human):
+ values.append("test_population")
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=6)
+ values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
+ assert values == [
+ "test_hello",
+ "fin John",
+ "test_hello",
+ "fin Doe",
+ "test_name",
+ "test_population",
+ "fin John",
+ "test_name",
+ "test_population",
+ "fin Doe",
+ ]
+
+ def test_parametrize_setup_function(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="module", params=[1, 2])
+ def arg(request):
+ return request.param
+
+ @pytest.fixture(scope="module", autouse=True)
+ def mysetup(request, arg):
+ request.addfinalizer(lambda: values.append("fin%s" % arg))
+ values.append("setup%s" % arg)
+
+ values = []
+ def test_1(arg):
+ values.append(arg)
+ def test_2(arg):
+ values.append(arg)
+ def test_3():
+ import pprint
+ pprint.pprint(values)
+ if arg == 1:
+ assert values == ["setup1", 1, 1, ]
+ elif arg == 2:
+ assert values == ["setup1", 1, 1, "fin1",
+ "setup2", 2, 2, ]
+
+ """
+ )
+ reprec = pytester.inline_run("-v")
+ reprec.assertoutcome(passed=6)
+
+ def test_fixture_marked_function_not_collected_as_test(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def test_app():
+ return 1
+
+ def test_something(test_app):
+ assert test_app == 1
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_params_and_ids(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[object(), object()],
+ ids=['alpha', 'beta'])
+ def fix(request):
+ return request.param
+
+ def test_foo(fix):
+ assert 1
+ """
+ )
+ res = pytester.runpytest("-v")
+ res.stdout.fnmatch_lines(["*test_foo*alpha*", "*test_foo*beta*"])
+
+ def test_params_and_ids_yieldfixture(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[object(), object()], ids=['alpha', 'beta'])
+ def fix(request):
+ yield request.param
+
+ def test_foo(fix):
+ assert 1
+ """
+ )
+ res = pytester.runpytest("-v")
+ res.stdout.fnmatch_lines(["*test_foo*alpha*", "*test_foo*beta*"])
+
+ def test_deterministic_fixture_collection(
+ self, pytester: Pytester, monkeypatch
+ ) -> None:
+ """#920"""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="module",
+ params=["A",
+ "B",
+ "C"])
+ def A(request):
+ return request.param
+
+ @pytest.fixture(scope="module",
+ params=["DDDDDDDDD", "EEEEEEEEEEEE", "FFFFFFFFFFF", "banansda"])
+ def B(request, A):
+ return request.param
+
+ def test_foo(B):
+ # Something funky is going on here.
+ # Despite specified seeds, on what is collected,
+ # sometimes we get unexpected passes. hashing B seems
+ # to help?
+ assert hash(B) or True
+ """
+ )
+ monkeypatch.setenv("PYTHONHASHSEED", "1")
+ out1 = pytester.runpytest_subprocess("-v")
+ monkeypatch.setenv("PYTHONHASHSEED", "2")
+ out2 = pytester.runpytest_subprocess("-v")
+ output1 = [
+ line
+ for line in out1.outlines
+ if line.startswith("test_deterministic_fixture_collection.py::test_foo")
+ ]
+ output2 = [
+ line
+ for line in out2.outlines
+ if line.startswith("test_deterministic_fixture_collection.py::test_foo")
+ ]
+ assert len(output1) == 12
+ assert output1 == output2
+
+
+class TestRequestScopeAccess:
+ pytestmark = pytest.mark.parametrize(
+ ("scope", "ok", "error"),
+ [
+ ["session", "", "path class function module"],
+ ["module", "module path", "cls function"],
+ ["class", "module path cls", "function"],
+ ["function", "module path cls function", ""],
+ ],
+ )
+
+ def test_setup(self, pytester: Pytester, scope, ok, error) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope=%r, autouse=True)
+ def myscoped(request):
+ for x in %r:
+ assert hasattr(request, x)
+ for x in %r:
+ pytest.raises(AttributeError, lambda:
+ getattr(request, x))
+ assert request.session
+ assert request.config
+ def test_func():
+ pass
+ """
+ % (scope, ok.split(), error.split())
+ )
+ reprec = pytester.inline_run("-l")
+ reprec.assertoutcome(passed=1)
+
+ def test_funcarg(self, pytester: Pytester, scope, ok, error) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope=%r)
+ def arg(request):
+ for x in %r:
+ assert hasattr(request, x)
+ for x in %r:
+ pytest.raises(AttributeError, lambda:
+ getattr(request, x))
+ assert request.session
+ assert request.config
+ def test_func(arg):
+ pass
+ """
+ % (scope, ok.split(), error.split())
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+class TestErrors:
+ def test_subfactory_missing_funcarg(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture()
+ def gen(qwe123):
+ return 1
+ def test_something(gen):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ ["*def gen(qwe123):*", "*fixture*qwe123*not found*", "*1 error*"]
+ )
+
+ def test_issue498_fixture_finalizer_failing(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def fix1(request):
+ def f():
+ raise KeyError
+ request.addfinalizer(f)
+ return object()
+
+ values = []
+ def test_1(fix1):
+ values.append(fix1)
+ def test_2(fix1):
+ values.append(fix1)
+ def test_3():
+ assert values[0] != values[1]
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *ERROR*teardown*test_1*
+ *KeyError*
+ *ERROR*teardown*test_2*
+ *KeyError*
+ *3 pass*2 errors*
+ """
+ )
+
+ def test_setupfunc_missing_funcarg(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(autouse=True)
+ def gen(qwe123):
+ return 1
+ def test_something():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ ["*def gen(qwe123):*", "*fixture*qwe123*not found*", "*1 error*"]
+ )
+
+
+class TestShowFixtures:
+ def test_funcarg_compat(self, pytester: Pytester) -> None:
+ config = pytester.parseconfigure("--funcargs")
+ assert config.option.showfixtures
+
+ def test_show_fixtures(self, pytester: Pytester) -> None:
+ result = pytester.runpytest("--fixtures")
+ result.stdout.fnmatch_lines(
+ [
+ "tmp_path_factory [[]session scope[]] -- .../_pytest/tmpdir.py:*",
+ "*for the test session*",
+ "tmp_path -- .../_pytest/tmpdir.py:*",
+ "*temporary directory*",
+ ]
+ )
+
+ def test_show_fixtures_verbose(self, pytester: Pytester) -> None:
+ result = pytester.runpytest("--fixtures", "-v")
+ result.stdout.fnmatch_lines(
+ [
+ "tmp_path_factory [[]session scope[]] -- .../_pytest/tmpdir.py:*",
+ "*for the test session*",
+ "tmp_path -- .../_pytest/tmpdir.py:*",
+ "*temporary directory*",
+ ]
+ )
+
+ def test_show_fixtures_testmodule(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def _arg0():
+ """ hidden """
+ @pytest.fixture
+ def arg1():
+ """ hello world """
+ '''
+ )
+ result = pytester.runpytest("--fixtures", p)
+ result.stdout.fnmatch_lines(
+ """
+ *tmp_path -- *
+ *fixtures defined from*
+ *arg1 -- test_show_fixtures_testmodule.py:6*
+ *hello world*
+ """
+ )
+ result.stdout.no_fnmatch_line("*arg0*")
+
+ @pytest.mark.parametrize("testmod", [True, False])
+ def test_show_fixtures_conftest(self, pytester: Pytester, testmod) -> None:
+ pytester.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """ hello world """
+ '''
+ )
+ if testmod:
+ pytester.makepyfile(
+ """
+ def test_hello():
+ pass
+ """
+ )
+ result = pytester.runpytest("--fixtures")
+ result.stdout.fnmatch_lines(
+ """
+ *tmp_path*
+ *fixtures defined from*conftest*
+ *arg1*
+ *hello world*
+ """
+ )
+
+ def test_show_fixtures_trimmed_doc(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ textwrap.dedent(
+ '''\
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """
+ line1
+ line2
+
+ """
+ @pytest.fixture
+ def arg2():
+ """
+ line1
+ line2
+
+ """
+ '''
+ )
+ )
+ result = pytester.runpytest("--fixtures", p)
+ result.stdout.fnmatch_lines(
+ textwrap.dedent(
+ """\
+ * fixtures defined from test_show_fixtures_trimmed_doc *
+ arg2 -- test_show_fixtures_trimmed_doc.py:10
+ line1
+ line2
+ arg1 -- test_show_fixtures_trimmed_doc.py:3
+ line1
+ line2
+ """
+ )
+ )
+
+ def test_show_fixtures_indented_doc(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ textwrap.dedent(
+ '''\
+ import pytest
+ @pytest.fixture
+ def fixture1():
+ """
+ line1
+ indented line
+ """
+ '''
+ )
+ )
+ result = pytester.runpytest("--fixtures", p)
+ result.stdout.fnmatch_lines(
+ textwrap.dedent(
+ """\
+ * fixtures defined from test_show_fixtures_indented_doc *
+ fixture1 -- test_show_fixtures_indented_doc.py:3
+ line1
+ indented line
+ """
+ )
+ )
+
+ def test_show_fixtures_indented_doc_first_line_unindented(
+ self, pytester: Pytester
+ ) -> None:
+ p = pytester.makepyfile(
+ textwrap.dedent(
+ '''\
+ import pytest
+ @pytest.fixture
+ def fixture1():
+ """line1
+ line2
+ indented line
+ """
+ '''
+ )
+ )
+ result = pytester.runpytest("--fixtures", p)
+ result.stdout.fnmatch_lines(
+ textwrap.dedent(
+ """\
+ * fixtures defined from test_show_fixtures_indented_doc_first_line_unindented *
+ fixture1 -- test_show_fixtures_indented_doc_first_line_unindented.py:3
+ line1
+ line2
+ indented line
+ """
+ )
+ )
+
+ def test_show_fixtures_indented_in_class(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ textwrap.dedent(
+ '''\
+ import pytest
+ class TestClass(object):
+ @pytest.fixture
+ def fixture1(self):
+ """line1
+ line2
+ indented line
+ """
+ '''
+ )
+ )
+ result = pytester.runpytest("--fixtures", p)
+ result.stdout.fnmatch_lines(
+ textwrap.dedent(
+ """\
+ * fixtures defined from test_show_fixtures_indented_in_class *
+ fixture1 -- test_show_fixtures_indented_in_class.py:4
+ line1
+ line2
+ indented line
+ """
+ )
+ )
+
+ def test_show_fixtures_different_files(self, pytester: Pytester) -> None:
+ """`--fixtures` only shows fixtures from first file (#833)."""
+ pytester.makepyfile(
+ test_a='''
+ import pytest
+
+ @pytest.fixture
+ def fix_a():
+ """Fixture A"""
+ pass
+
+ def test_a(fix_a):
+ pass
+ '''
+ )
+ pytester.makepyfile(
+ test_b='''
+ import pytest
+
+ @pytest.fixture
+ def fix_b():
+ """Fixture B"""
+ pass
+
+ def test_b(fix_b):
+ pass
+ '''
+ )
+ result = pytester.runpytest("--fixtures")
+ result.stdout.fnmatch_lines(
+ """
+ * fixtures defined from test_a *
+ fix_a -- test_a.py:4
+ Fixture A
+
+ * fixtures defined from test_b *
+ fix_b -- test_b.py:4
+ Fixture B
+ """
+ )
+
+ def test_show_fixtures_with_same_name(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """Hello World in conftest.py"""
+ return "Hello World"
+ '''
+ )
+ pytester.makepyfile(
+ """
+ def test_foo(arg1):
+ assert arg1 == "Hello World"
+ """
+ )
+ pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """Hi from test module"""
+ return "Hi"
+ def test_bar(arg1):
+ assert arg1 == "Hi"
+ '''
+ )
+ result = pytester.runpytest("--fixtures")
+ result.stdout.fnmatch_lines(
+ """
+ * fixtures defined from conftest *
+ arg1 -- conftest.py:3
+ Hello World in conftest.py
+
+ * fixtures defined from test_show_fixtures_with_same_name *
+ arg1 -- test_show_fixtures_with_same_name.py:3
+ Hi from test module
+ """
+ )
+
+ def test_fixture_disallow_twice(self):
+ """Test that applying @pytest.fixture twice generates an error (#2334)."""
+ with pytest.raises(ValueError):
+
+ @pytest.fixture
+ @pytest.fixture
+ def foo():
+ raise NotImplementedError()
+
+
+class TestContextManagerFixtureFuncs:
+ def test_simple(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg1():
+ print("setup")
+ yield 1
+ print("teardown")
+ def test_1(arg1):
+ print("test1", arg1)
+ def test_2(arg1):
+ print("test2", arg1)
+ assert 0
+ """
+ )
+ result = pytester.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *setup*
+ *test1 1*
+ *teardown*
+ *setup*
+ *test2 1*
+ *teardown*
+ """
+ )
+
+ def test_scoped(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="module")
+ def arg1():
+ print("setup")
+ yield 1
+ print("teardown")
+ def test_1(arg1):
+ print("test1", arg1)
+ def test_2(arg1):
+ print("test2", arg1)
+ """
+ )
+ result = pytester.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *setup*
+ *test1 1*
+ *test2 1*
+ *teardown*
+ """
+ )
+
+ def test_setup_exception(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="module")
+ def arg1():
+ pytest.fail("setup")
+ yield 1
+ def test_1(arg1):
+ pass
+ """
+ )
+ result = pytester.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *pytest.fail*setup*
+ *1 error*
+ """
+ )
+
+ def test_teardown_exception(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="module")
+ def arg1():
+ yield 1
+ pytest.fail("teardown")
+ def test_1(arg1):
+ pass
+ """
+ )
+ result = pytester.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *pytest.fail*teardown*
+ *1 passed*1 error*
+ """
+ )
+
+ def test_yields_more_than_one(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="module")
+ def arg1():
+ yield 1
+ yield 2
+ def test_1(arg1):
+ pass
+ """
+ )
+ result = pytester.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *fixture function*
+ *test_yields*:2*
+ """
+ )
+
+ def test_custom_name(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(name='meow')
+ def arg1():
+ return 'mew'
+ def test_1(meow):
+ print(meow)
+ """
+ )
+ result = pytester.runpytest("-s")
+ result.stdout.fnmatch_lines(["*mew*"])
+
+
+class TestParameterizedSubRequest:
+ def test_call_from_fixture(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_call_from_fixture="""
+ import pytest
+
+ @pytest.fixture(params=[0, 1, 2])
+ def fix_with_param(request):
+ return request.param
+
+ @pytest.fixture
+ def get_named_fixture(request):
+ return request.getfixturevalue('fix_with_param')
+
+ def test_foo(request, get_named_fixture):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "The requested fixture has no parameter defined for test:",
+ " test_call_from_fixture.py::test_foo",
+ "Requested fixture 'fix_with_param' defined in:",
+ "test_call_from_fixture.py:4",
+ "Requested here:",
+ "test_call_from_fixture.py:9",
+ "*1 error in*",
+ ]
+ )
+
+ def test_call_from_test(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_call_from_test="""
+ import pytest
+
+ @pytest.fixture(params=[0, 1, 2])
+ def fix_with_param(request):
+ return request.param
+
+ def test_foo(request):
+ request.getfixturevalue('fix_with_param')
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "The requested fixture has no parameter defined for test:",
+ " test_call_from_test.py::test_foo",
+ "Requested fixture 'fix_with_param' defined in:",
+ "test_call_from_test.py:4",
+ "Requested here:",
+ "test_call_from_test.py:8",
+ "*1 failed*",
+ ]
+ )
+
+ def test_external_fixture(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(params=[0, 1, 2])
+ def fix_with_param(request):
+ return request.param
+ """
+ )
+
+ pytester.makepyfile(
+ test_external_fixture="""
+ def test_foo(request):
+ request.getfixturevalue('fix_with_param')
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "The requested fixture has no parameter defined for test:",
+ " test_external_fixture.py::test_foo",
+ "",
+ "Requested fixture 'fix_with_param' defined in:",
+ "conftest.py:4",
+ "Requested here:",
+ "test_external_fixture.py:2",
+ "*1 failed*",
+ ]
+ )
+
+ def test_non_relative_path(self, pytester: Pytester) -> None:
+ tests_dir = pytester.mkdir("tests")
+ fixdir = pytester.mkdir("fixtures")
+ fixfile = fixdir.joinpath("fix.py")
+ fixfile.write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ @pytest.fixture(params=[0, 1, 2])
+ def fix_with_param(request):
+ return request.param
+ """
+ )
+ )
+
+ testfile = tests_dir.joinpath("test_foos.py")
+ testfile.write_text(
+ textwrap.dedent(
+ """\
+ from fix import fix_with_param
+
+ def test_foo(request):
+ request.getfixturevalue('fix_with_param')
+ """
+ )
+ )
+
+ os.chdir(tests_dir)
+ pytester.syspathinsert(fixdir)
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "The requested fixture has no parameter defined for test:",
+ " test_foos.py::test_foo",
+ "",
+ "Requested fixture 'fix_with_param' defined in:",
+ f"{fixfile}:4",
+ "Requested here:",
+ "test_foos.py:4",
+ "*1 failed*",
+ ]
+ )
+
+ # With non-overlapping rootdir, passing tests_dir.
+ rootdir = pytester.mkdir("rootdir")
+ os.chdir(rootdir)
+ result = pytester.runpytest("--rootdir", rootdir, tests_dir)
+ result.stdout.fnmatch_lines(
+ [
+ "The requested fixture has no parameter defined for test:",
+ " test_foos.py::test_foo",
+ "",
+ "Requested fixture 'fix_with_param' defined in:",
+ f"{fixfile}:4",
+ "Requested here:",
+ f"{testfile}:4",
+ "*1 failed*",
+ ]
+ )
+
+
+def test_pytest_fixture_setup_and_post_finalizer_hook(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_fixture_setup(fixturedef, request):
+ print('ROOT setup hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))
+ def pytest_fixture_post_finalizer(fixturedef, request):
+ print('ROOT finalizer hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))
+ """
+ )
+ pytester.makepyfile(
+ **{
+ "tests/conftest.py": """
+ def pytest_fixture_setup(fixturedef, request):
+ print('TESTS setup hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))
+ def pytest_fixture_post_finalizer(fixturedef, request):
+ print('TESTS finalizer hook called for {0} from {1}'.format(fixturedef.argname, request.node.name))
+ """,
+ "tests/test_hooks.py": """
+ import pytest
+
+ @pytest.fixture()
+ def my_fixture():
+ return 'some'
+
+ def test_func(my_fixture):
+ print('TEST test_func')
+ assert my_fixture == 'some'
+ """,
+ }
+ )
+ result = pytester.runpytest("-s")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "*TESTS setup hook called for my_fixture from test_func*",
+ "*ROOT setup hook called for my_fixture from test_func*",
+ "*TEST test_func*",
+ "*TESTS finalizer hook called for my_fixture from test_func*",
+ "*ROOT finalizer hook called for my_fixture from test_func*",
+ ]
+ )
+
+
+class TestScopeOrdering:
+ """Class of tests that ensure fixtures are ordered based on their scopes (#2405)"""
+
+ @pytest.mark.parametrize("variant", ["mark", "autouse"])
+ def test_func_closure_module_auto(
+ self, pytester: Pytester, variant, monkeypatch
+ ) -> None:
+ """Semantically identical to the example posted in #2405 when ``use_mark=True``"""
+ monkeypatch.setenv("FIXTURE_ACTIVATION_VARIANT", variant)
+ pytester.makepyfile(
+ """
+ import warnings
+ import os
+ import pytest
+ VAR = 'FIXTURE_ACTIVATION_VARIANT'
+ VALID_VARS = ('autouse', 'mark')
+
+ VARIANT = os.environ.get(VAR)
+ if VARIANT is None or VARIANT not in VALID_VARS:
+ warnings.warn("{!r} is not in {}, assuming autouse".format(VARIANT, VALID_VARS) )
+ variant = 'mark'
+
+ @pytest.fixture(scope='module', autouse=VARIANT == 'autouse')
+ def m1(): pass
+
+ if VARIANT=='mark':
+ pytestmark = pytest.mark.usefixtures('m1')
+
+ @pytest.fixture(scope='function', autouse=True)
+ def f1(): pass
+
+ def test_func(m1):
+ pass
+ """
+ )
+ items, _ = pytester.inline_genitems()
+ request = FixtureRequest(items[0], _ispytest=True)
+ assert request.fixturenames == "m1 f1".split()
+
+ def test_func_closure_with_native_fixtures(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ """Sanity check that verifies the order returned by the closures and the actual fixture execution order:
+ The execution order may differ because of fixture inter-dependencies.
+ """
+ monkeypatch.setattr(pytest, "FIXTURE_ORDER", [], raising=False)
+ pytester.makepyfile(
+ """
+ import pytest
+
+ FIXTURE_ORDER = pytest.FIXTURE_ORDER
+
+ @pytest.fixture(scope="session")
+ def s1():
+ FIXTURE_ORDER.append('s1')
+
+ @pytest.fixture(scope="package")
+ def p1():
+ FIXTURE_ORDER.append('p1')
+
+ @pytest.fixture(scope="module")
+ def m1():
+ FIXTURE_ORDER.append('m1')
+
+ @pytest.fixture(scope='session')
+ def my_tmp_path_factory():
+ FIXTURE_ORDER.append('my_tmp_path_factory')
+
+ @pytest.fixture
+ def my_tmp_path(my_tmp_path_factory):
+ FIXTURE_ORDER.append('my_tmp_path')
+
+ @pytest.fixture
+ def f1(my_tmp_path):
+ FIXTURE_ORDER.append('f1')
+
+ @pytest.fixture
+ def f2():
+ FIXTURE_ORDER.append('f2')
+
+ def test_foo(f1, p1, m1, f2, s1): pass
+ """
+ )
+ items, _ = pytester.inline_genitems()
+ request = FixtureRequest(items[0], _ispytest=True)
+ # order of fixtures based on their scope and position in the parameter list
+ assert (
+ request.fixturenames
+ == "s1 my_tmp_path_factory p1 m1 f1 f2 my_tmp_path".split()
+ )
+ pytester.runpytest()
+ # actual fixture execution differs: dependent fixtures must be created first ("my_tmp_path")
+ FIXTURE_ORDER = pytest.FIXTURE_ORDER # type: ignore[attr-defined]
+ assert FIXTURE_ORDER == "s1 my_tmp_path_factory p1 m1 my_tmp_path f1 f2".split()
+
+ def test_func_closure_module(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='module')
+ def m1(): pass
+
+ @pytest.fixture(scope='function')
+ def f1(): pass
+
+ def test_func(f1, m1):
+ pass
+ """
+ )
+ items, _ = pytester.inline_genitems()
+ request = FixtureRequest(items[0], _ispytest=True)
+ assert request.fixturenames == "m1 f1".split()
+
+ def test_func_closure_scopes_reordered(self, pytester: Pytester) -> None:
+ """Test ensures that fixtures are ordered by scope regardless of the order of the parameters, although
+ fixtures of same scope keep the declared order
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='session')
+ def s1(): pass
+
+ @pytest.fixture(scope='module')
+ def m1(): pass
+
+ @pytest.fixture(scope='function')
+ def f1(): pass
+
+ @pytest.fixture(scope='function')
+ def f2(): pass
+
+ class Test:
+
+ @pytest.fixture(scope='class')
+ def c1(cls): pass
+
+ def test_func(self, f2, f1, c1, m1, s1):
+ pass
+ """
+ )
+ items, _ = pytester.inline_genitems()
+ request = FixtureRequest(items[0], _ispytest=True)
+ assert request.fixturenames == "s1 m1 c1 f2 f1".split()
+
+ def test_func_closure_same_scope_closer_root_first(
+ self, pytester: Pytester
+ ) -> None:
+ """Auto-use fixtures of same scope are ordered by closer-to-root first"""
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(scope='module', autouse=True)
+ def m_conf(): pass
+ """
+ )
+ pytester.makepyfile(
+ **{
+ "sub/conftest.py": """
+ import pytest
+
+ @pytest.fixture(scope='package', autouse=True)
+ def p_sub(): pass
+
+ @pytest.fixture(scope='module', autouse=True)
+ def m_sub(): pass
+ """,
+ "sub/__init__.py": "",
+ "sub/test_func.py": """
+ import pytest
+
+ @pytest.fixture(scope='module', autouse=True)
+ def m_test(): pass
+
+ @pytest.fixture(scope='function')
+ def f1(): pass
+
+ def test_func(m_test, f1):
+ pass
+ """,
+ }
+ )
+ items, _ = pytester.inline_genitems()
+ request = FixtureRequest(items[0], _ispytest=True)
+ assert request.fixturenames == "p_sub m_conf m_sub m_test f1".split()
+
+ def test_func_closure_all_scopes_complex(self, pytester: Pytester) -> None:
+ """Complex test involving all scopes and mixing autouse with normal fixtures"""
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(scope='session')
+ def s1(): pass
+
+ @pytest.fixture(scope='package', autouse=True)
+ def p1(): pass
+ """
+ )
+ pytester.makepyfile(**{"__init__.py": ""})
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='module', autouse=True)
+ def m1(): pass
+
+ @pytest.fixture(scope='module')
+ def m2(s1): pass
+
+ @pytest.fixture(scope='function')
+ def f1(): pass
+
+ @pytest.fixture(scope='function')
+ def f2(): pass
+
+ class Test:
+
+ @pytest.fixture(scope='class', autouse=True)
+ def c1(self):
+ pass
+
+ def test_func(self, f2, f1, m2):
+ pass
+ """
+ )
+ items, _ = pytester.inline_genitems()
+ request = FixtureRequest(items[0], _ispytest=True)
+ assert request.fixturenames == "s1 p1 m1 m2 c1 f2 f1".split()
+
+ def test_multiple_packages(self, pytester: Pytester) -> None:
+ """Complex test involving multiple package fixtures. Make sure teardowns
+ are executed in order.
+ .
+ └── root
+ ├── __init__.py
+ ├── sub1
+ │ ├── __init__.py
+ │ ├── conftest.py
+ │ └── test_1.py
+ └── sub2
+ ├── __init__.py
+ ├── conftest.py
+ └── test_2.py
+ """
+ root = pytester.mkdir("root")
+ root.joinpath("__init__.py").write_text("values = []")
+ sub1 = root.joinpath("sub1")
+ sub1.mkdir()
+ sub1.joinpath("__init__.py").touch()
+ sub1.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ from .. import values
+ @pytest.fixture(scope="package")
+ def fix():
+ values.append("pre-sub1")
+ yield values
+ assert values.pop() == "pre-sub1"
+ """
+ )
+ )
+ sub1.joinpath("test_1.py").write_text(
+ textwrap.dedent(
+ """\
+ from .. import values
+ def test_1(fix):
+ assert values == ["pre-sub1"]
+ """
+ )
+ )
+ sub2 = root.joinpath("sub2")
+ sub2.mkdir()
+ sub2.joinpath("__init__.py").touch()
+ sub2.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ from .. import values
+ @pytest.fixture(scope="package")
+ def fix():
+ values.append("pre-sub2")
+ yield values
+ assert values.pop() == "pre-sub2"
+ """
+ )
+ )
+ sub2.joinpath("test_2.py").write_text(
+ textwrap.dedent(
+ """\
+ from .. import values
+ def test_2(fix):
+ assert values == ["pre-sub2"]
+ """
+ )
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_class_fixture_self_instance(self, pytester: Pytester) -> None:
+ """Check that plugin classes which implement fixtures receive the plugin instance
+ as self (see #2270).
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+
+ def pytest_configure(config):
+ config.pluginmanager.register(MyPlugin())
+
+ class MyPlugin():
+ def __init__(self):
+ self.arg = 1
+
+ @pytest.fixture(scope='function')
+ def myfix(self):
+ assert isinstance(self, MyPlugin)
+ return self.arg
+ """
+ )
+
+ pytester.makepyfile(
+ """
+ class TestClass(object):
+ def test_1(self, myfix):
+ assert myfix == 1
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_call_fixture_function_error():
+ """Check if an error is raised if a fixture function is called directly (#4545)"""
+
+ @pytest.fixture
+ def fix():
+ raise NotImplementedError()
+
+ with pytest.raises(pytest.fail.Exception):
+ assert fix() == 1
+
+
+def test_fixture_param_shadowing(pytester: Pytester) -> None:
+ """Parametrized arguments would be shadowed if a fixture with the same name also exists (#5036)"""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=['a', 'b'])
+ def argroot(request):
+ return request.param
+
+ @pytest.fixture
+ def arg(argroot):
+ return argroot
+
+ # This should only be parametrized directly
+ @pytest.mark.parametrize("arg", [1])
+ def test_direct(arg):
+ assert arg == 1
+
+ # This should be parametrized based on the fixtures
+ def test_normal_fixture(arg):
+ assert isinstance(arg, str)
+
+ # Indirect should still work:
+
+ @pytest.fixture
+ def arg2(request):
+ return 2*request.param
+
+ @pytest.mark.parametrize("arg2", [1], indirect=True)
+ def test_indirect(arg2):
+ assert arg2 == 2
+ """
+ )
+ # Only one test should have run
+ result = pytester.runpytest("-v")
+ result.assert_outcomes(passed=4)
+ result.stdout.fnmatch_lines(["*::test_direct[[]1[]]*"])
+ result.stdout.fnmatch_lines(["*::test_normal_fixture[[]a[]]*"])
+ result.stdout.fnmatch_lines(["*::test_normal_fixture[[]b[]]*"])
+ result.stdout.fnmatch_lines(["*::test_indirect[[]1[]]*"])
+
+
+def test_fixture_named_request(pytester: Pytester) -> None:
+ pytester.copy_example("fixtures/test_fixture_named_request.py")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*'request' is a reserved word for fixtures, use another name:",
+ " *test_fixture_named_request.py:5",
+ ]
+ )
+
+
+def test_indirect_fixture_does_not_break_scope(pytester: Pytester) -> None:
+ """Ensure that fixture scope is respected when using indirect fixtures (#570)"""
+ pytester.makepyfile(
+ """
+ import pytest
+ instantiated = []
+
+ @pytest.fixture(scope="session")
+ def fixture_1(request):
+ instantiated.append(("fixture_1", request.param))
+
+
+ @pytest.fixture(scope="session")
+ def fixture_2(request):
+ instantiated.append(("fixture_2", request.param))
+
+
+ scenarios = [
+ ("A", "a1"),
+ ("A", "a2"),
+ ("B", "b1"),
+ ("B", "b2"),
+ ("C", "c1"),
+ ("C", "c2"),
+ ]
+
+ @pytest.mark.parametrize(
+ "fixture_1,fixture_2", scenarios, indirect=["fixture_1", "fixture_2"]
+ )
+ def test_create_fixtures(fixture_1, fixture_2):
+ pass
+
+
+ def test_check_fixture_instantiations():
+ assert instantiated == [
+ ('fixture_1', 'A'),
+ ('fixture_2', 'a1'),
+ ('fixture_2', 'a2'),
+ ('fixture_1', 'B'),
+ ('fixture_2', 'b1'),
+ ('fixture_2', 'b2'),
+ ('fixture_1', 'C'),
+ ('fixture_2', 'c1'),
+ ('fixture_2', 'c2'),
+ ]
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=7)
+
+
+def test_fixture_parametrization_nparray(pytester: Pytester) -> None:
+ pytest.importorskip("numpy")
+
+ pytester.makepyfile(
+ """
+ from numpy import linspace
+ from pytest import fixture
+
+ @fixture(params=linspace(1, 10, 10))
+ def value(request):
+ return request.param
+
+ def test_bug(value):
+ assert value == value
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=10)
+
+
+def test_fixture_arg_ordering(pytester: Pytester) -> None:
+ """
+ This test describes how fixtures in the same scope but without explicit dependencies
+ between them are created. While users should make dependencies explicit, often
+ they rely on this order, so this test exists to catch regressions in this regard.
+ See #6540 and #6492.
+ """
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+
+ suffixes = []
+
+ @pytest.fixture
+ def fix_1(): suffixes.append("fix_1")
+ @pytest.fixture
+ def fix_2(): suffixes.append("fix_2")
+ @pytest.fixture
+ def fix_3(): suffixes.append("fix_3")
+ @pytest.fixture
+ def fix_4(): suffixes.append("fix_4")
+ @pytest.fixture
+ def fix_5(): suffixes.append("fix_5")
+
+ @pytest.fixture
+ def fix_combined(fix_1, fix_2, fix_3, fix_4, fix_5): pass
+
+ def test_suffix(fix_combined):
+ assert suffixes == ["fix_1", "fix_2", "fix_3", "fix_4", "fix_5"]
+ """
+ )
+ result = pytester.runpytest("-vv", str(p1))
+ assert result.ret == 0
+
+
+def test_yield_fixture_with_no_value(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(name='custom')
+ def empty_yield():
+ if False:
+ yield
+
+ def test_fixt(custom):
+ pass
+ """
+ )
+ expected = "E ValueError: custom did not yield a value"
+ result = pytester.runpytest()
+ result.assert_outcomes(errors=1)
+ result.stdout.fnmatch_lines([expected])
+ assert result.ret == ExitCode.TESTS_FAILED
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/python/integration.py b/testing/web-platform/tests/tools/third_party/pytest/testing/python/integration.py
new file mode 100644
index 0000000000..d138b72663
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/python/integration.py
@@ -0,0 +1,503 @@
+from typing import Any
+
+import pytest
+from _pytest import runner
+from _pytest._code import getfslineno
+from _pytest.fixtures import getfixturemarker
+from _pytest.pytester import Pytester
+from _pytest.python import Function
+
+
+class TestOEJSKITSpecials:
+ def test_funcarg_non_pycollectobj(
+ self, pytester: Pytester, recwarn
+ ) -> None: # rough jstests usage
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_pycollect_makeitem(collector, name, obj):
+ if name == "MyClass":
+ return MyCollector.from_parent(collector, name=name)
+ class MyCollector(pytest.Collector):
+ def reportinfo(self):
+ return self.path, 3, "xyz"
+ """
+ )
+ modcol = pytester.getmodulecol(
+ """
+ import pytest
+ @pytest.fixture
+ def arg1(request):
+ return 42
+ class MyClass(object):
+ pass
+ """
+ )
+ # this hook finds funcarg factories
+ rep = runner.collect_one_node(collector=modcol)
+ # TODO: Don't treat as Any.
+ clscol: Any = rep.result[0]
+ clscol.obj = lambda arg1: None
+ clscol.funcargs = {}
+ pytest._fillfuncargs(clscol)
+ assert clscol.funcargs["arg1"] == 42
+
+ def test_autouse_fixture(
+ self, pytester: Pytester, recwarn
+ ) -> None: # rough jstests usage
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_pycollect_makeitem(collector, name, obj):
+ if name == "MyClass":
+ return MyCollector.from_parent(collector, name=name)
+ class MyCollector(pytest.Collector):
+ def reportinfo(self):
+ return self.path, 3, "xyz"
+ """
+ )
+ modcol = pytester.getmodulecol(
+ """
+ import pytest
+ @pytest.fixture(autouse=True)
+ def hello():
+ pass
+ @pytest.fixture
+ def arg1(request):
+ return 42
+ class MyClass(object):
+ pass
+ """
+ )
+ # this hook finds funcarg factories
+ rep = runner.collect_one_node(modcol)
+ # TODO: Don't treat as Any.
+ clscol: Any = rep.result[0]
+ clscol.obj = lambda: None
+ clscol.funcargs = {}
+ pytest._fillfuncargs(clscol)
+ assert not clscol.funcargs
+
+
+def test_wrapped_getfslineno() -> None:
+ def func():
+ pass
+
+ def wrap(f):
+ func.__wrapped__ = f # type: ignore
+ func.patchings = ["qwe"] # type: ignore
+ return func
+
+ @wrap
+ def wrapped_func(x, y, z):
+ pass
+
+ fs, lineno = getfslineno(wrapped_func)
+ fs2, lineno2 = getfslineno(wrap)
+ assert lineno > lineno2, "getfslineno does not unwrap correctly"
+
+
+class TestMockDecoration:
+ def test_wrapped_getfuncargnames(self) -> None:
+ from _pytest.compat import getfuncargnames
+
+ def wrap(f):
+ def func():
+ pass
+
+ func.__wrapped__ = f # type: ignore
+ return func
+
+ @wrap
+ def f(x):
+ pass
+
+ values = getfuncargnames(f)
+ assert values == ("x",)
+
+ def test_getfuncargnames_patching(self):
+ from _pytest.compat import getfuncargnames
+ from unittest.mock import patch
+
+ class T:
+ def original(self, x, y, z):
+ pass
+
+ @patch.object(T, "original")
+ def f(x, y, z):
+ pass
+
+ values = getfuncargnames(f)
+ assert values == ("y", "z")
+
+ def test_unittest_mock(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import unittest.mock
+ class T(unittest.TestCase):
+ @unittest.mock.patch("os.path.abspath")
+ def test_hello(self, abspath):
+ import os
+ os.path.abspath("hello")
+ abspath.assert_any_call("hello")
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_unittest_mock_and_fixture(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import os.path
+ import unittest.mock
+ import pytest
+
+ @pytest.fixture
+ def inject_me():
+ pass
+
+ @unittest.mock.patch.object(os.path, "abspath",
+ new=unittest.mock.MagicMock)
+ def test_hello(inject_me):
+ import os
+ os.path.abspath("hello")
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_unittest_mock_and_pypi_mock(self, pytester: Pytester) -> None:
+ pytest.importorskip("mock", "1.0.1")
+ pytester.makepyfile(
+ """
+ import mock
+ import unittest.mock
+ class TestBoth(object):
+ @unittest.mock.patch("os.path.abspath")
+ def test_hello(self, abspath):
+ import os
+ os.path.abspath("hello")
+ abspath.assert_any_call("hello")
+
+ @mock.patch("os.path.abspath")
+ def test_hello_mock(self, abspath):
+ import os
+ os.path.abspath("hello")
+ abspath.assert_any_call("hello")
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_mock_sentinel_check_against_numpy_like(self, pytester: Pytester) -> None:
+ """Ensure our function that detects mock arguments compares against sentinels using
+ identity to circumvent objects which can't be compared with equality against others
+ in a truth context, like with numpy arrays (#5606).
+ """
+ pytester.makepyfile(
+ dummy="""
+ class NumpyLike:
+ def __init__(self, value):
+ self.value = value
+ def __eq__(self, other):
+ raise ValueError("like numpy, cannot compare against others for truth")
+ FOO = NumpyLike(10)
+ """
+ )
+ pytester.makepyfile(
+ """
+ from unittest.mock import patch
+ import dummy
+ class Test(object):
+ @patch("dummy.FOO", new=dummy.NumpyLike(50))
+ def test_hello(self):
+ assert dummy.FOO.value == 50
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_mock(self, pytester: Pytester) -> None:
+ pytest.importorskip("mock", "1.0.1")
+ pytester.makepyfile(
+ """
+ import os
+ import unittest
+ import mock
+
+ class T(unittest.TestCase):
+ @mock.patch("os.path.abspath")
+ def test_hello(self, abspath):
+ os.path.abspath("hello")
+ abspath.assert_any_call("hello")
+ def mock_basename(path):
+ return "mock_basename"
+ @mock.patch("os.path.abspath")
+ @mock.patch("os.path.normpath")
+ @mock.patch("os.path.basename", new=mock_basename)
+ def test_someting(normpath, abspath, tmp_path):
+ abspath.return_value = "this"
+ os.path.normpath(os.path.abspath("hello"))
+ normpath.assert_any_call("this")
+ assert os.path.basename("123") == "mock_basename"
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+ calls = reprec.getcalls("pytest_runtest_logreport")
+ funcnames = [
+ call.report.location[2] for call in calls if call.report.when == "call"
+ ]
+ assert funcnames == ["T.test_hello", "test_someting"]
+
+ def test_mock_sorting(self, pytester: Pytester) -> None:
+ pytest.importorskip("mock", "1.0.1")
+ pytester.makepyfile(
+ """
+ import os
+ import mock
+
+ @mock.patch("os.path.abspath")
+ def test_one(abspath):
+ pass
+ @mock.patch("os.path.abspath")
+ def test_two(abspath):
+ pass
+ @mock.patch("os.path.abspath")
+ def test_three(abspath):
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ calls = reprec.getreports("pytest_runtest_logreport")
+ calls = [x for x in calls if x.when == "call"]
+ names = [x.nodeid.split("::")[-1] for x in calls]
+ assert names == ["test_one", "test_two", "test_three"]
+
+ def test_mock_double_patch_issue473(self, pytester: Pytester) -> None:
+ pytest.importorskip("mock", "1.0.1")
+ pytester.makepyfile(
+ """
+ from mock import patch
+ from pytest import mark
+
+ @patch('os.getcwd')
+ @patch('os.path')
+ @mark.slow
+ class TestSimple(object):
+ def test_simple_thing(self, mock_path, mock_getcwd):
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+class TestReRunTests:
+ def test_rerun(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ from _pytest.runner import runtestprotocol
+ def pytest_runtest_protocol(item, nextitem):
+ runtestprotocol(item, log=False, nextitem=nextitem)
+ runtestprotocol(item, log=True, nextitem=nextitem)
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+ count = 0
+ req = None
+ @pytest.fixture
+ def fix(request):
+ global count, req
+ assert request != req
+ req = request
+ print("fix count %s" % count)
+ count += 1
+ def test_fix(fix):
+ pass
+ """
+ )
+ result = pytester.runpytest("-s")
+ result.stdout.fnmatch_lines(
+ """
+ *fix count 0*
+ *fix count 1*
+ """
+ )
+ result.stdout.fnmatch_lines(
+ """
+ *2 passed*
+ """
+ )
+
+
+def test_pytestconfig_is_session_scoped() -> None:
+ from _pytest.fixtures import pytestconfig
+
+ marker = getfixturemarker(pytestconfig)
+ assert marker is not None
+ assert marker.scope == "session"
+
+
+class TestNoselikeTestAttribute:
+ def test_module_with_global_test(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ __test__ = False
+ def test_hello():
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ assert not reprec.getfailedcollections()
+ calls = reprec.getreports("pytest_runtest_logreport")
+ assert not calls
+
+ def test_class_and_method(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ __test__ = True
+ def test_func():
+ pass
+ test_func.__test__ = False
+
+ class TestSome(object):
+ __test__ = False
+ def test_method(self):
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ assert not reprec.getfailedcollections()
+ calls = reprec.getreports("pytest_runtest_logreport")
+ assert not calls
+
+ def test_unittest_class(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import unittest
+ class TC(unittest.TestCase):
+ def test_1(self):
+ pass
+ class TC2(unittest.TestCase):
+ __test__ = False
+ def test_2(self):
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ assert not reprec.getfailedcollections()
+ call = reprec.getcalls("pytest_collection_modifyitems")[0]
+ assert len(call.items) == 1
+ assert call.items[0].cls.__name__ == "TC"
+
+ def test_class_with_nasty_getattr(self, pytester: Pytester) -> None:
+ """Make sure we handle classes with a custom nasty __getattr__ right.
+
+ With a custom __getattr__ which e.g. returns a function (like with a
+ RPC wrapper), we shouldn't assume this meant "__test__ = True".
+ """
+ # https://github.com/pytest-dev/pytest/issues/1204
+ pytester.makepyfile(
+ """
+ class MetaModel(type):
+
+ def __getattr__(cls, key):
+ return lambda: None
+
+
+ BaseModel = MetaModel('Model', (), {})
+
+
+ class Model(BaseModel):
+
+ __metaclass__ = MetaModel
+
+ def test_blah(self):
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ assert not reprec.getfailedcollections()
+ call = reprec.getcalls("pytest_collection_modifyitems")[0]
+ assert not call.items
+
+
+class TestParameterize:
+ """#351"""
+
+ def test_idfn_marker(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ def idfn(param):
+ if param == 0:
+ return 'spam'
+ elif param == 1:
+ return 'ham'
+ else:
+ return None
+
+ @pytest.mark.parametrize('a,b', [(0, 2), (1, 2)], ids=idfn)
+ def test_params(a, b):
+ pass
+ """
+ )
+ res = pytester.runpytest("--collect-only")
+ res.stdout.fnmatch_lines(["*spam-2*", "*ham-2*"])
+
+ def test_idfn_fixture(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ def idfn(param):
+ if param == 0:
+ return 'spam'
+ elif param == 1:
+ return 'ham'
+ else:
+ return None
+
+ @pytest.fixture(params=[0, 1], ids=idfn)
+ def a(request):
+ return request.param
+
+ @pytest.fixture(params=[1, 2], ids=idfn)
+ def b(request):
+ return request.param
+
+ def test_params(a, b):
+ pass
+ """
+ )
+ res = pytester.runpytest("--collect-only")
+ res.stdout.fnmatch_lines(["*spam-2*", "*ham-2*"])
+
+
+def test_function_instance(pytester: Pytester) -> None:
+ items = pytester.getitems(
+ """
+ def test_func(): pass
+ class TestIt:
+ def test_method(self): pass
+ @classmethod
+ def test_class(cls): pass
+ @staticmethod
+ def test_static(): pass
+ """
+ )
+ assert len(items) == 3
+ assert isinstance(items[0], Function)
+ assert items[0].name == "test_func"
+ assert items[0].instance is None
+ assert isinstance(items[1], Function)
+ assert items[1].name == "test_method"
+ assert items[1].instance is not None
+ assert items[1].instance.__class__.__name__ == "TestIt"
+ assert isinstance(items[2], Function)
+ assert items[2].name == "test_static"
+ assert items[2].instance is None
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/python/metafunc.py b/testing/web-platform/tests/tools/third_party/pytest/testing/python/metafunc.py
new file mode 100644
index 0000000000..fc0082eb6b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/python/metafunc.py
@@ -0,0 +1,1907 @@
+import itertools
+import re
+import sys
+import textwrap
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+import attr
+import hypothesis
+from hypothesis import strategies
+
+import pytest
+from _pytest import fixtures
+from _pytest import python
+from _pytest.compat import _format_args
+from _pytest.compat import getfuncargnames
+from _pytest.compat import NOTSET
+from _pytest.outcomes import fail
+from _pytest.pytester import Pytester
+from _pytest.python import _idval
+from _pytest.python import idmaker
+from _pytest.scope import Scope
+
+
+class TestMetafunc:
+ def Metafunc(self, func, config=None) -> python.Metafunc:
+ # The unit tests of this class check if things work correctly
+ # on the funcarg level, so we don't need a full blown
+ # initialization.
+ class FuncFixtureInfoMock:
+ name2fixturedefs = None
+
+ def __init__(self, names):
+ self.names_closure = names
+
+ @attr.s
+ class DefinitionMock(python.FunctionDefinition):
+ obj = attr.ib()
+ _nodeid = attr.ib()
+
+ names = getfuncargnames(func)
+ fixtureinfo: Any = FuncFixtureInfoMock(names)
+ definition: Any = DefinitionMock._create(func, "mock::nodeid")
+ return python.Metafunc(definition, fixtureinfo, config, _ispytest=True)
+
+ def test_no_funcargs(self) -> None:
+ def function():
+ pass
+
+ metafunc = self.Metafunc(function)
+ assert not metafunc.fixturenames
+ repr(metafunc._calls)
+
+ def test_function_basic(self) -> None:
+ def func(arg1, arg2="qwe"):
+ pass
+
+ metafunc = self.Metafunc(func)
+ assert len(metafunc.fixturenames) == 1
+ assert "arg1" in metafunc.fixturenames
+ assert metafunc.function is func
+ assert metafunc.cls is None
+
+ def test_parametrize_error(self) -> None:
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x", [1, 2])
+ pytest.raises(ValueError, lambda: metafunc.parametrize("x", [5, 6]))
+ pytest.raises(ValueError, lambda: metafunc.parametrize("x", [5, 6]))
+ metafunc.parametrize("y", [1, 2])
+ pytest.raises(ValueError, lambda: metafunc.parametrize("y", [5, 6]))
+ pytest.raises(ValueError, lambda: metafunc.parametrize("y", [5, 6]))
+
+ with pytest.raises(TypeError, match="^ids must be a callable or an iterable$"):
+ metafunc.parametrize("y", [5, 6], ids=42) # type: ignore[arg-type]
+
+ def test_parametrize_error_iterator(self) -> None:
+ def func(x):
+ raise NotImplementedError()
+
+ class Exc(Exception):
+ def __repr__(self):
+ return "Exc(from_gen)"
+
+ def gen() -> Iterator[Union[int, None, Exc]]:
+ yield 0
+ yield None
+ yield Exc()
+
+ metafunc = self.Metafunc(func)
+ # When the input is an iterator, only len(args) are taken,
+ # so the bad Exc isn't reached.
+ metafunc.parametrize("x", [1, 2], ids=gen()) # type: ignore[arg-type]
+ assert [(x.funcargs, x.id) for x in metafunc._calls] == [
+ ({"x": 1}, "0"),
+ ({"x": 2}, "2"),
+ ]
+ with pytest.raises(
+ fail.Exception,
+ match=(
+ r"In func: ids must be list of string/float/int/bool, found:"
+ r" Exc\(from_gen\) \(type: <class .*Exc'>\) at index 2"
+ ),
+ ):
+ metafunc.parametrize("x", [1, 2, 3], ids=gen()) # type: ignore[arg-type]
+
+ def test_parametrize_bad_scope(self) -> None:
+ def func(x):
+ pass
+
+ metafunc = self.Metafunc(func)
+ with pytest.raises(
+ fail.Exception,
+ match=r"parametrize\(\) call in func got an unexpected scope value 'doggy'",
+ ):
+ metafunc.parametrize("x", [1], scope="doggy") # type: ignore[arg-type]
+
+ def test_parametrize_request_name(self, pytester: Pytester) -> None:
+ """Show proper error when 'request' is used as a parameter name in parametrize (#6183)"""
+
+ def func(request):
+ raise NotImplementedError()
+
+ metafunc = self.Metafunc(func)
+ with pytest.raises(
+ fail.Exception,
+ match=r"'request' is a reserved name and cannot be used in @pytest.mark.parametrize",
+ ):
+ metafunc.parametrize("request", [1])
+
+ def test_find_parametrized_scope(self) -> None:
+ """Unit test for _find_parametrized_scope (#3941)."""
+ from _pytest.python import _find_parametrized_scope
+
+ @attr.s
+ class DummyFixtureDef:
+ _scope = attr.ib()
+
+ fixtures_defs = cast(
+ Dict[str, Sequence[fixtures.FixtureDef[object]]],
+ dict(
+ session_fix=[DummyFixtureDef(Scope.Session)],
+ package_fix=[DummyFixtureDef(Scope.Package)],
+ module_fix=[DummyFixtureDef(Scope.Module)],
+ class_fix=[DummyFixtureDef(Scope.Class)],
+ func_fix=[DummyFixtureDef(Scope.Function)],
+ ),
+ )
+
+ # use arguments to determine narrow scope; the cause of the bug is that it would look on all
+ # fixture defs given to the method
+ def find_scope(argnames, indirect):
+ return _find_parametrized_scope(argnames, fixtures_defs, indirect=indirect)
+
+ assert find_scope(["func_fix"], indirect=True) == Scope.Function
+ assert find_scope(["class_fix"], indirect=True) == Scope.Class
+ assert find_scope(["module_fix"], indirect=True) == Scope.Module
+ assert find_scope(["package_fix"], indirect=True) == Scope.Package
+ assert find_scope(["session_fix"], indirect=True) == Scope.Session
+
+ assert find_scope(["class_fix", "func_fix"], indirect=True) == Scope.Function
+ assert find_scope(["func_fix", "session_fix"], indirect=True) == Scope.Function
+ assert find_scope(["session_fix", "class_fix"], indirect=True) == Scope.Class
+ assert (
+ find_scope(["package_fix", "session_fix"], indirect=True) == Scope.Package
+ )
+ assert find_scope(["module_fix", "session_fix"], indirect=True) == Scope.Module
+
+ # when indirect is False or is not for all scopes, always use function
+ assert (
+ find_scope(["session_fix", "module_fix"], indirect=False) == Scope.Function
+ )
+ assert (
+ find_scope(["session_fix", "module_fix"], indirect=["module_fix"])
+ == Scope.Function
+ )
+ assert (
+ find_scope(
+ ["session_fix", "module_fix"], indirect=["session_fix", "module_fix"]
+ )
+ == Scope.Module
+ )
+
+ def test_parametrize_and_id(self) -> None:
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+
+ metafunc.parametrize("x", [1, 2], ids=["basic", "advanced"])
+ metafunc.parametrize("y", ["abc", "def"])
+ ids = [x.id for x in metafunc._calls]
+ assert ids == ["basic-abc", "basic-def", "advanced-abc", "advanced-def"]
+
+ def test_parametrize_and_id_unicode(self) -> None:
+ """Allow unicode strings for "ids" parameter in Python 2 (##1905)"""
+
+ def func(x):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x", [1, 2], ids=["basic", "advanced"])
+ ids = [x.id for x in metafunc._calls]
+ assert ids == ["basic", "advanced"]
+
+ def test_parametrize_with_wrong_number_of_ids(self) -> None:
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+
+ with pytest.raises(fail.Exception):
+ metafunc.parametrize("x", [1, 2], ids=["basic"])
+
+ with pytest.raises(fail.Exception):
+ metafunc.parametrize(
+ ("x", "y"), [("abc", "def"), ("ghi", "jkl")], ids=["one"]
+ )
+
+ def test_parametrize_ids_iterator_without_mark(self) -> None:
+ def func(x, y):
+ pass
+
+ it = itertools.count()
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x", [1, 2], ids=it)
+ metafunc.parametrize("y", [3, 4], ids=it)
+ ids = [x.id for x in metafunc._calls]
+ assert ids == ["0-2", "0-3", "1-2", "1-3"]
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x", [1, 2], ids=it)
+ metafunc.parametrize("y", [3, 4], ids=it)
+ ids = [x.id for x in metafunc._calls]
+ assert ids == ["4-6", "4-7", "5-6", "5-7"]
+
+ def test_parametrize_empty_list(self) -> None:
+ """#510"""
+
+ def func(y):
+ pass
+
+ class MockConfig:
+ def getini(self, name):
+ return ""
+
+ @property
+ def hook(self):
+ return self
+
+ def pytest_make_parametrize_id(self, **kw):
+ pass
+
+ metafunc = self.Metafunc(func, MockConfig())
+ metafunc.parametrize("y", [])
+ assert "skip" == metafunc._calls[0].marks[0].name
+
+ def test_parametrize_with_userobjects(self) -> None:
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+
+ class A:
+ pass
+
+ metafunc.parametrize("x", [A(), A()])
+ metafunc.parametrize("y", list("ab"))
+ assert metafunc._calls[0].id == "x0-a"
+ assert metafunc._calls[1].id == "x0-b"
+ assert metafunc._calls[2].id == "x1-a"
+ assert metafunc._calls[3].id == "x1-b"
+
+ @hypothesis.given(strategies.text() | strategies.binary())
+ @hypothesis.settings(
+ deadline=400.0
+ ) # very close to std deadline and CI boxes are not reliable in CPU power
+ def test_idval_hypothesis(self, value) -> None:
+ escaped = _idval(value, "a", 6, None, nodeid=None, config=None)
+ assert isinstance(escaped, str)
+ escaped.encode("ascii")
+
+ def test_unicode_idval(self) -> None:
+ """Test that Unicode strings outside the ASCII character set get
+ escaped, using byte escapes if they're in that range or unicode
+ escapes if they're not.
+
+ """
+ values = [
+ ("", r""),
+ ("ascii", r"ascii"),
+ ("ação", r"a\xe7\xe3o"),
+ ("josé@blah.com", r"jos\xe9@blah.com"),
+ (
+ r"δοκ.ιμή@παÏάδειγμα.δοκιμή",
+ r"\u03b4\u03bf\u03ba.\u03b9\u03bc\u03ae@\u03c0\u03b1\u03c1\u03ac\u03b4\u03b5\u03b9\u03b3"
+ r"\u03bc\u03b1.\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae",
+ ),
+ ]
+ for val, expected in values:
+ assert _idval(val, "a", 6, None, nodeid=None, config=None) == expected
+
+ def test_unicode_idval_with_config(self) -> None:
+ """Unit test for expected behavior to obtain ids with
+ disable_test_id_escaping_and_forfeit_all_rights_to_community_support
+ option (#5294)."""
+
+ class MockConfig:
+ def __init__(self, config):
+ self.config = config
+
+ @property
+ def hook(self):
+ return self
+
+ def pytest_make_parametrize_id(self, **kw):
+ pass
+
+ def getini(self, name):
+ return self.config[name]
+
+ option = "disable_test_id_escaping_and_forfeit_all_rights_to_community_support"
+
+ values: List[Tuple[str, Any, str]] = [
+ ("ação", MockConfig({option: True}), "ação"),
+ ("ação", MockConfig({option: False}), "a\\xe7\\xe3o"),
+ ]
+ for val, config, expected in values:
+ actual = _idval(val, "a", 6, None, nodeid=None, config=config)
+ assert actual == expected
+
+ def test_bytes_idval(self) -> None:
+ """Unit test for the expected behavior to obtain ids for parametrized
+ bytes values: bytes objects are always escaped using "binary escape"."""
+ values = [
+ (b"", r""),
+ (b"\xc3\xb4\xff\xe4", r"\xc3\xb4\xff\xe4"),
+ (b"ascii", r"ascii"),
+ ("αÏά".encode(), r"\xce\xb1\xcf\x81\xce\xac"),
+ ]
+ for val, expected in values:
+ assert _idval(val, "a", 6, idfn=None, nodeid=None, config=None) == expected
+
+ def test_class_or_function_idval(self) -> None:
+ """Unit test for the expected behavior to obtain ids for parametrized
+ values that are classes or functions: their __name__."""
+
+ class TestClass:
+ pass
+
+ def test_function():
+ pass
+
+ values = [(TestClass, "TestClass"), (test_function, "test_function")]
+ for val, expected in values:
+ assert _idval(val, "a", 6, None, nodeid=None, config=None) == expected
+
+ def test_notset_idval(self) -> None:
+ """Test that a NOTSET value (used by an empty parameterset) generates
+ a proper ID.
+
+ Regression test for #7686.
+ """
+ assert _idval(NOTSET, "a", 0, None, nodeid=None, config=None) == "a0"
+
+ def test_idmaker_autoname(self) -> None:
+ """#250"""
+ result = idmaker(
+ ("a", "b"), [pytest.param("string", 1.0), pytest.param("st-ring", 2.0)]
+ )
+ assert result == ["string-1.0", "st-ring-2.0"]
+
+ result = idmaker(
+ ("a", "b"), [pytest.param(object(), 1.0), pytest.param(object(), object())]
+ )
+ assert result == ["a0-1.0", "a1-b1"]
+ # unicode mixing, issue250
+ result = idmaker(("a", "b"), [pytest.param({}, b"\xc3\xb4")])
+ assert result == ["a0-\\xc3\\xb4"]
+
+ def test_idmaker_with_bytes_regex(self) -> None:
+ result = idmaker(("a"), [pytest.param(re.compile(b"foo"), 1.0)])
+ assert result == ["foo"]
+
+ def test_idmaker_native_strings(self) -> None:
+ result = idmaker(
+ ("a", "b"),
+ [
+ pytest.param(1.0, -1.1),
+ pytest.param(2, -202),
+ pytest.param("three", "three hundred"),
+ pytest.param(True, False),
+ pytest.param(None, None),
+ pytest.param(re.compile("foo"), re.compile("bar")),
+ pytest.param(str, int),
+ pytest.param(list("six"), [66, 66]),
+ pytest.param({7}, set("seven")),
+ pytest.param(tuple("eight"), (8, -8, 8)),
+ pytest.param(b"\xc3\xb4", b"name"),
+ pytest.param(b"\xc3\xb4", "other"),
+ pytest.param(1.0j, -2.0j),
+ ],
+ )
+ assert result == [
+ "1.0--1.1",
+ "2--202",
+ "three-three hundred",
+ "True-False",
+ "None-None",
+ "foo-bar",
+ "str-int",
+ "a7-b7",
+ "a8-b8",
+ "a9-b9",
+ "\\xc3\\xb4-name",
+ "\\xc3\\xb4-other",
+ "1j-(-0-2j)",
+ ]
+
+ def test_idmaker_non_printable_characters(self) -> None:
+ result = idmaker(
+ ("s", "n"),
+ [
+ pytest.param("\x00", 1),
+ pytest.param("\x05", 2),
+ pytest.param(b"\x00", 3),
+ pytest.param(b"\x05", 4),
+ pytest.param("\t", 5),
+ pytest.param(b"\t", 6),
+ ],
+ )
+ assert result == ["\\x00-1", "\\x05-2", "\\x00-3", "\\x05-4", "\\t-5", "\\t-6"]
+
+ def test_idmaker_manual_ids_must_be_printable(self) -> None:
+ result = idmaker(
+ ("s",),
+ [
+ pytest.param("x00", id="hello \x00"),
+ pytest.param("x05", id="hello \x05"),
+ ],
+ )
+ assert result == ["hello \\x00", "hello \\x05"]
+
+ def test_idmaker_enum(self) -> None:
+ enum = pytest.importorskip("enum")
+ e = enum.Enum("Foo", "one, two")
+ result = idmaker(("a", "b"), [pytest.param(e.one, e.two)])
+ assert result == ["Foo.one-Foo.two"]
+
+ def test_idmaker_idfn(self) -> None:
+ """#351"""
+
+ def ids(val: object) -> Optional[str]:
+ if isinstance(val, Exception):
+ return repr(val)
+ return None
+
+ result = idmaker(
+ ("a", "b"),
+ [
+ pytest.param(10.0, IndexError()),
+ pytest.param(20, KeyError()),
+ pytest.param("three", [1, 2, 3]),
+ ],
+ idfn=ids,
+ )
+ assert result == ["10.0-IndexError()", "20-KeyError()", "three-b2"]
+
+ def test_idmaker_idfn_unique_names(self) -> None:
+ """#351"""
+
+ def ids(val: object) -> str:
+ return "a"
+
+ result = idmaker(
+ ("a", "b"),
+ [
+ pytest.param(10.0, IndexError()),
+ pytest.param(20, KeyError()),
+ pytest.param("three", [1, 2, 3]),
+ ],
+ idfn=ids,
+ )
+ assert result == ["a-a0", "a-a1", "a-a2"]
+
+ def test_idmaker_with_idfn_and_config(self) -> None:
+ """Unit test for expected behavior to create ids with idfn and
+ disable_test_id_escaping_and_forfeit_all_rights_to_community_support
+ option (#5294).
+ """
+
+ class MockConfig:
+ def __init__(self, config):
+ self.config = config
+
+ @property
+ def hook(self):
+ return self
+
+ def pytest_make_parametrize_id(self, **kw):
+ pass
+
+ def getini(self, name):
+ return self.config[name]
+
+ option = "disable_test_id_escaping_and_forfeit_all_rights_to_community_support"
+
+ values: List[Tuple[Any, str]] = [
+ (MockConfig({option: True}), "ação"),
+ (MockConfig({option: False}), "a\\xe7\\xe3o"),
+ ]
+ for config, expected in values:
+ result = idmaker(
+ ("a",),
+ [pytest.param("string")],
+ idfn=lambda _: "ação",
+ config=config,
+ )
+ assert result == [expected]
+
+ def test_idmaker_with_ids_and_config(self) -> None:
+ """Unit test for expected behavior to create ids with ids and
+ disable_test_id_escaping_and_forfeit_all_rights_to_community_support
+ option (#5294).
+ """
+
+ class MockConfig:
+ def __init__(self, config):
+ self.config = config
+
+ @property
+ def hook(self):
+ return self
+
+ def pytest_make_parametrize_id(self, **kw):
+ pass
+
+ def getini(self, name):
+ return self.config[name]
+
+ option = "disable_test_id_escaping_and_forfeit_all_rights_to_community_support"
+
+ values: List[Tuple[Any, str]] = [
+ (MockConfig({option: True}), "ação"),
+ (MockConfig({option: False}), "a\\xe7\\xe3o"),
+ ]
+ for config, expected in values:
+ result = idmaker(
+ ("a",),
+ [pytest.param("string")],
+ ids=["ação"],
+ config=config,
+ )
+ assert result == [expected]
+
+ def test_parametrize_ids_exception(self, pytester: Pytester) -> None:
+ """
+ :param pytester: the instance of Pytester class, a temporary
+ test directory.
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+
+ def ids(arg):
+ raise Exception("bad ids")
+
+ @pytest.mark.parametrize("arg", ["a", "b"], ids=ids)
+ def test_foo(arg):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*Exception: bad ids",
+ "*test_foo: error raised while trying to determine id of parameter 'arg' at position 0",
+ ]
+ )
+
+ def test_parametrize_ids_returns_non_string(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """\
+ import pytest
+
+ def ids(d):
+ return d
+
+ @pytest.mark.parametrize("arg", ({1: 2}, {3, 4}), ids=ids)
+ def test(arg):
+ assert arg
+
+ @pytest.mark.parametrize("arg", (1, 2.0, True), ids=ids)
+ def test_int(arg):
+ assert arg
+ """
+ )
+ result = pytester.runpytest("-vv", "-s")
+ result.stdout.fnmatch_lines(
+ [
+ "test_parametrize_ids_returns_non_string.py::test[arg0] PASSED",
+ "test_parametrize_ids_returns_non_string.py::test[arg1] PASSED",
+ "test_parametrize_ids_returns_non_string.py::test_int[1] PASSED",
+ "test_parametrize_ids_returns_non_string.py::test_int[2.0] PASSED",
+ "test_parametrize_ids_returns_non_string.py::test_int[True] PASSED",
+ ]
+ )
+
+ def test_idmaker_with_ids(self) -> None:
+ result = idmaker(
+ ("a", "b"), [pytest.param(1, 2), pytest.param(3, 4)], ids=["a", None]
+ )
+ assert result == ["a", "3-4"]
+
+ def test_idmaker_with_paramset_id(self) -> None:
+ result = idmaker(
+ ("a", "b"),
+ [pytest.param(1, 2, id="me"), pytest.param(3, 4, id="you")],
+ ids=["a", None],
+ )
+ assert result == ["me", "you"]
+
+ def test_idmaker_with_ids_unique_names(self) -> None:
+ result = idmaker(
+ ("a"), map(pytest.param, [1, 2, 3, 4, 5]), ids=["a", "a", "b", "c", "b"]
+ )
+ assert result == ["a0", "a1", "b0", "c", "b1"]
+
+ def test_parametrize_indirect(self) -> None:
+ """#714"""
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x", [1], indirect=True)
+ metafunc.parametrize("y", [2, 3], indirect=True)
+ assert len(metafunc._calls) == 2
+ assert metafunc._calls[0].funcargs == {}
+ assert metafunc._calls[1].funcargs == {}
+ assert metafunc._calls[0].params == dict(x=1, y=2)
+ assert metafunc._calls[1].params == dict(x=1, y=3)
+
+ def test_parametrize_indirect_list(self) -> None:
+ """#714"""
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x, y", [("a", "b")], indirect=["x"])
+ assert metafunc._calls[0].funcargs == dict(y="b")
+ assert metafunc._calls[0].params == dict(x="a")
+
+ def test_parametrize_indirect_list_all(self) -> None:
+ """#714"""
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x, y", [("a", "b")], indirect=["x", "y"])
+ assert metafunc._calls[0].funcargs == {}
+ assert metafunc._calls[0].params == dict(x="a", y="b")
+
+ def test_parametrize_indirect_list_empty(self) -> None:
+ """#714"""
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ metafunc.parametrize("x, y", [("a", "b")], indirect=[])
+ assert metafunc._calls[0].funcargs == dict(x="a", y="b")
+ assert metafunc._calls[0].params == {}
+
+ def test_parametrize_indirect_wrong_type(self) -> None:
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ with pytest.raises(
+ fail.Exception,
+ match="In func: expected Sequence or boolean for indirect, got dict",
+ ):
+ metafunc.parametrize("x, y", [("a", "b")], indirect={}) # type: ignore[arg-type]
+
+ def test_parametrize_indirect_list_functional(self, pytester: Pytester) -> None:
+ """
+ #714
+ Test parametrization with 'indirect' parameter applied on
+ particular arguments. As y is direct, its value should
+ be used directly rather than being passed to the fixture y.
+
+ :param pytester: the instance of Pytester class, a temporary
+ test directory.
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='function')
+ def x(request):
+ return request.param * 3
+ @pytest.fixture(scope='function')
+ def y(request):
+ return request.param * 2
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=['x'])
+ def test_simple(x,y):
+ assert len(x) == 3
+ assert len(y) == 1
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(["*test_simple*a-b*", "*1 passed*"])
+
+ def test_parametrize_indirect_list_error(self) -> None:
+ """#714"""
+
+ def func(x, y):
+ pass
+
+ metafunc = self.Metafunc(func)
+ with pytest.raises(fail.Exception):
+ metafunc.parametrize("x, y", [("a", "b")], indirect=["x", "z"])
+
+ def test_parametrize_uses_no_fixture_error_indirect_false(
+ self, pytester: Pytester
+ ) -> None:
+ """The 'uses no fixture' error tells the user at collection time
+ that the parametrize data they've set up doesn't correspond to the
+ fixtures in their test function, rather than silently ignoring this
+ and letting the test potentially pass.
+
+ #714
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=False)
+ def test_simple(x):
+ assert len(x) == 3
+ """
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*uses no argument 'y'*"])
+
+ def test_parametrize_uses_no_fixture_error_indirect_true(
+ self, pytester: Pytester
+ ) -> None:
+ """#714"""
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='function')
+ def x(request):
+ return request.param * 3
+ @pytest.fixture(scope='function')
+ def y(request):
+ return request.param * 2
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=True)
+ def test_simple(x):
+ assert len(x) == 3
+ """
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*uses no fixture 'y'*"])
+
+ def test_parametrize_indirect_uses_no_fixture_error_indirect_string(
+ self, pytester: Pytester
+ ) -> None:
+ """#714"""
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='function')
+ def x(request):
+ return request.param * 3
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect='y')
+ def test_simple(x):
+ assert len(x) == 3
+ """
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*uses no fixture 'y'*"])
+
+ def test_parametrize_indirect_uses_no_fixture_error_indirect_list(
+ self, pytester: Pytester
+ ) -> None:
+ """#714"""
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='function')
+ def x(request):
+ return request.param * 3
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=['y'])
+ def test_simple(x):
+ assert len(x) == 3
+ """
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*uses no fixture 'y'*"])
+
+ def test_parametrize_argument_not_in_indirect_list(
+ self, pytester: Pytester
+ ) -> None:
+ """#714"""
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='function')
+ def x(request):
+ return request.param * 3
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')], indirect=['x'])
+ def test_simple(x):
+ assert len(x) == 3
+ """
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*uses no argument 'y'*"])
+
+ def test_parametrize_gives_indicative_error_on_function_with_default_argument(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize('x, y', [('a', 'b')])
+ def test_simple(x, y=1):
+ assert len(x) == 1
+ """
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(
+ ["*already takes an argument 'y' with a default value"]
+ )
+
+ def test_parametrize_functional(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize('x', [1,2], indirect=True)
+ metafunc.parametrize('y', [2])
+ @pytest.fixture
+ def x(request):
+ return request.param * 10
+
+ def test_simple(x,y):
+ assert x in (10,20)
+ assert y == 2
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ ["*test_simple*1-2*", "*test_simple*2-2*", "*2 passed*"]
+ )
+
+ def test_parametrize_onearg(self) -> None:
+ metafunc = self.Metafunc(lambda x: None)
+ metafunc.parametrize("x", [1, 2])
+ assert len(metafunc._calls) == 2
+ assert metafunc._calls[0].funcargs == dict(x=1)
+ assert metafunc._calls[0].id == "1"
+ assert metafunc._calls[1].funcargs == dict(x=2)
+ assert metafunc._calls[1].id == "2"
+
+ def test_parametrize_onearg_indirect(self) -> None:
+ metafunc = self.Metafunc(lambda x: None)
+ metafunc.parametrize("x", [1, 2], indirect=True)
+ assert metafunc._calls[0].params == dict(x=1)
+ assert metafunc._calls[0].id == "1"
+ assert metafunc._calls[1].params == dict(x=2)
+ assert metafunc._calls[1].id == "2"
+
+ def test_parametrize_twoargs(self) -> None:
+ metafunc = self.Metafunc(lambda x, y: None)
+ metafunc.parametrize(("x", "y"), [(1, 2), (3, 4)])
+ assert len(metafunc._calls) == 2
+ assert metafunc._calls[0].funcargs == dict(x=1, y=2)
+ assert metafunc._calls[0].id == "1-2"
+ assert metafunc._calls[1].funcargs == dict(x=3, y=4)
+ assert metafunc._calls[1].id == "3-4"
+
+ def test_parametrize_multiple_times(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ pytestmark = pytest.mark.parametrize("x", [1,2])
+ def test_func(x):
+ assert 0, x
+ class TestClass(object):
+ pytestmark = pytest.mark.parametrize("y", [3,4])
+ def test_meth(self, x, y):
+ assert 0, x
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 1
+ result.assert_outcomes(failed=6)
+
+ def test_parametrize_CSV(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize("x, y,", [(1,2), (2,3)])
+ def test_func(x, y):
+ assert x+1 == y
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ def test_parametrize_class_scenarios(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ # same as doc/en/example/parametrize scenario example
+ def pytest_generate_tests(metafunc):
+ idlist = []
+ argvalues = []
+ for scenario in metafunc.cls.scenarios:
+ idlist.append(scenario[0])
+ items = scenario[1].items()
+ argnames = [x[0] for x in items]
+ argvalues.append(([x[1] for x in items]))
+ metafunc.parametrize(argnames, argvalues, ids=idlist, scope="class")
+
+ class Test(object):
+ scenarios = [['1', {'arg': {1: 2}, "arg2": "value2"}],
+ ['2', {'arg':'value2', "arg2": "value2"}]]
+
+ def test_1(self, arg, arg2):
+ pass
+
+ def test_2(self, arg2, arg):
+ pass
+
+ def test_3(self, arg, arg2):
+ pass
+ """
+ )
+ result = pytester.runpytest("-v")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ """
+ *test_1*1*
+ *test_2*1*
+ *test_3*1*
+ *test_1*2*
+ *test_2*2*
+ *test_3*2*
+ *6 passed*
+ """
+ )
+
+ def test_format_args(self) -> None:
+ def function1():
+ pass
+
+ assert _format_args(function1) == "()"
+
+ def function2(arg1):
+ pass
+
+ assert _format_args(function2) == "(arg1)"
+
+ def function3(arg1, arg2="qwe"):
+ pass
+
+ assert _format_args(function3) == "(arg1, arg2='qwe')"
+
+ def function4(arg1, *args, **kwargs):
+ pass
+
+ assert _format_args(function4) == "(arg1, *args, **kwargs)"
+
+
+class TestMetafuncFunctional:
+ def test_attributes(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ # assumes that generate/provide runs in the same process
+ import sys, pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize('metafunc', [metafunc])
+
+ @pytest.fixture
+ def metafunc(request):
+ return request.param
+
+ def test_function(metafunc, pytestconfig):
+ assert metafunc.config == pytestconfig
+ assert metafunc.module.__name__ == __name__
+ assert metafunc.function == test_function
+ assert metafunc.cls is None
+
+ class TestClass(object):
+ def test_method(self, metafunc, pytestconfig):
+ assert metafunc.config == pytestconfig
+ assert metafunc.module.__name__ == __name__
+ unbound = TestClass.test_method
+ assert metafunc.function == unbound
+ assert metafunc.cls == TestClass
+ """
+ )
+ result = pytester.runpytest(p, "-v")
+ result.assert_outcomes(passed=2)
+
+ def test_two_functions(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize('arg1', [10, 20], ids=['0', '1'])
+
+ def test_func1(arg1):
+ assert arg1 == 10
+
+ def test_func2(arg1):
+ assert arg1 in (10, 20)
+ """
+ )
+ result = pytester.runpytest("-v", p)
+ result.stdout.fnmatch_lines(
+ [
+ "*test_func1*0*PASS*",
+ "*test_func1*1*FAIL*",
+ "*test_func2*PASS*",
+ "*test_func2*PASS*",
+ "*1 failed, 3 passed*",
+ ]
+ )
+
+ def test_noself_in_method(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ assert 'xyz' not in metafunc.fixturenames
+
+ class TestHello(object):
+ def test_hello(xyz):
+ pass
+ """
+ )
+ result = pytester.runpytest(p)
+ result.assert_outcomes(passed=1)
+
+ def test_generate_tests_in_class(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ class TestClass(object):
+ def pytest_generate_tests(self, metafunc):
+ metafunc.parametrize('hello', ['world'], ids=['hellow'])
+
+ def test_myfunc(self, hello):
+ assert hello == "world"
+ """
+ )
+ result = pytester.runpytest("-v", p)
+ result.stdout.fnmatch_lines(["*test_myfunc*hello*PASS*", "*1 passed*"])
+
+ def test_two_functions_not_same_instance(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize('arg1', [10, 20], ids=["0", "1"])
+
+ class TestClass(object):
+ def test_func(self, arg1):
+ assert not hasattr(self, 'x')
+ self.x = 1
+ """
+ )
+ result = pytester.runpytest("-v", p)
+ result.stdout.fnmatch_lines(
+ ["*test_func*0*PASS*", "*test_func*1*PASS*", "*2 pass*"]
+ )
+
+ def test_issue28_setup_method_in_generate_tests(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize('arg1', [1])
+
+ class TestClass(object):
+ def test_method(self, arg1):
+ assert arg1 == self.val
+ def setup_method(self, func):
+ self.val = 1
+ """
+ )
+ result = pytester.runpytest(p)
+ result.assert_outcomes(passed=1)
+
+ def test_parametrize_functional2(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize("arg1", [1,2])
+ metafunc.parametrize("arg2", [4,5])
+ def test_hello(arg1, arg2):
+ assert 0, (arg1, arg2)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*(1, 4)*", "*(1, 5)*", "*(2, 4)*", "*(2, 5)*", "*4 failed*"]
+ )
+
+ def test_parametrize_and_inner_getfixturevalue(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize("arg1", [1], indirect=True)
+ metafunc.parametrize("arg2", [10], indirect=True)
+
+ import pytest
+ @pytest.fixture
+ def arg1(request):
+ x = request.getfixturevalue("arg2")
+ return x + request.param
+
+ @pytest.fixture
+ def arg2(request):
+ return request.param
+
+ def test_func1(arg1, arg2):
+ assert arg1 == 11
+ """
+ )
+ result = pytester.runpytest("-v", p)
+ result.stdout.fnmatch_lines(["*test_func1*1*PASS*", "*1 passed*"])
+
+ def test_parametrize_on_setup_arg(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def pytest_generate_tests(metafunc):
+ assert "arg1" in metafunc.fixturenames
+ metafunc.parametrize("arg1", [1], indirect=True)
+
+ import pytest
+ @pytest.fixture
+ def arg1(request):
+ return request.param
+
+ @pytest.fixture
+ def arg2(request, arg1):
+ return 10 * arg1
+
+ def test_func(arg2):
+ assert arg2 == 10
+ """
+ )
+ result = pytester.runpytest("-v", p)
+ result.stdout.fnmatch_lines(["*test_func*1*PASS*", "*1 passed*"])
+
+ def test_parametrize_with_ids(self, pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ console_output_style=classic
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize(("a", "b"), [(1,1), (1,2)],
+ ids=["basic", "advanced"])
+
+ def test_function(a, b):
+ assert a == b
+ """
+ )
+ result = pytester.runpytest("-v")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines_random(
+ ["*test_function*basic*PASSED", "*test_function*advanced*FAILED"]
+ )
+
+ def test_parametrize_without_ids(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize(("a", "b"),
+ [(1,object()), (1.3,object())])
+
+ def test_function(a, b):
+ assert 1
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ """
+ *test_function*1-b0*
+ *test_function*1.3-b1*
+ """
+ )
+
+ def test_parametrize_with_None_in_ids(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize(("a", "b"), [(1,1), (1,1), (1,2)],
+ ids=["basic", None, "advanced"])
+
+ def test_function(a, b):
+ assert a == b
+ """
+ )
+ result = pytester.runpytest("-v")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines_random(
+ [
+ "*test_function*basic*PASSED*",
+ "*test_function*1-1*PASSED*",
+ "*test_function*advanced*FAILED*",
+ ]
+ )
+
+ def test_fixture_parametrized_empty_ids(self, pytester: Pytester) -> None:
+ """Fixtures parametrized with empty ids cause an internal error (#1849)."""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope="module", ids=[], params=[])
+ def temp(request):
+ return request.param
+
+ def test_temp(temp):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 1 skipped *"])
+
+ def test_parametrized_empty_ids(self, pytester: Pytester) -> None:
+ """Tests parametrized with empty ids cause an internal error (#1849)."""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize('temp', [], ids=list())
+ def test_temp(temp):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 1 skipped *"])
+
+ def test_parametrized_ids_invalid_type(self, pytester: Pytester) -> None:
+ """Test error with non-strings/non-ints, without generator (#1857)."""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize("x, expected", [(1, 2), (3, 4), (5, 6)], ids=(None, 2, type))
+ def test_ids_numbers(x,expected):
+ assert x * 2 == expected
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "In test_ids_numbers: ids must be list of string/float/int/bool,"
+ " found: <class 'type'> (type: <class 'type'>) at index 2"
+ ]
+ )
+
+ def test_parametrize_with_identical_ids_get_unique_names(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ metafunc.parametrize(("a", "b"), [(1,1), (1,2)],
+ ids=["a", "a"])
+
+ def test_function(a, b):
+ assert a == b
+ """
+ )
+ result = pytester.runpytest("-v")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines_random(
+ ["*test_function*a0*PASSED*", "*test_function*a1*FAILED*"]
+ )
+
+ @pytest.mark.parametrize(("scope", "length"), [("module", 2), ("function", 4)])
+ def test_parametrize_scope_overrides(
+ self, pytester: Pytester, scope: str, length: int
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ values = []
+ def pytest_generate_tests(metafunc):
+ if "arg" in metafunc.fixturenames:
+ metafunc.parametrize("arg", [1,2], indirect=True,
+ scope=%r)
+ @pytest.fixture
+ def arg(request):
+ values.append(request.param)
+ return request.param
+ def test_hello(arg):
+ assert arg in (1,2)
+ def test_world(arg):
+ assert arg in (1,2)
+ def test_checklength():
+ assert len(values) == %d
+ """
+ % (scope, length)
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=5)
+
+ def test_parametrize_issue323(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='module', params=range(966))
+ def foo(request):
+ return request.param
+
+ def test_it(foo):
+ pass
+ def test_it2(foo):
+ pass
+ """
+ )
+ reprec = pytester.inline_run("--collect-only")
+ assert not reprec.getcalls("pytest_internalerror")
+
+ def test_usefixtures_seen_in_generate_tests(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ assert "abc" in metafunc.fixturenames
+ metafunc.parametrize("abc", [1])
+
+ @pytest.mark.usefixtures("abc")
+ def test_function():
+ pass
+ """
+ )
+ reprec = pytester.runpytest()
+ reprec.assert_outcomes(passed=1)
+
+ def test_generate_tests_only_done_in_subdir(self, pytester: Pytester) -> None:
+ sub1 = pytester.mkpydir("sub1")
+ sub2 = pytester.mkpydir("sub2")
+ sub1.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ def pytest_generate_tests(metafunc):
+ assert metafunc.function.__name__ == "test_1"
+ """
+ )
+ )
+ sub2.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ def pytest_generate_tests(metafunc):
+ assert metafunc.function.__name__ == "test_2"
+ """
+ )
+ )
+ sub1.joinpath("test_in_sub1.py").write_text("def test_1(): pass")
+ sub2.joinpath("test_in_sub2.py").write_text("def test_2(): pass")
+ result = pytester.runpytest("--keep-duplicates", "-v", "-s", sub1, sub2, sub1)
+ result.assert_outcomes(passed=3)
+
+ def test_generate_same_function_names_issue403(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ def make_tests():
+ @pytest.mark.parametrize("x", range(2))
+ def test_foo(x):
+ pass
+ return test_foo
+
+ test_x = make_tests()
+ test_y = make_tests()
+ """
+ )
+ reprec = pytester.runpytest()
+ reprec.assert_outcomes(passed=4)
+
+ def test_parametrize_misspelling(self, pytester: Pytester) -> None:
+ """#463"""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrise("x", range(2))
+ def test_foo(x):
+ pass
+ """
+ )
+ result = pytester.runpytest("--collectonly")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 0 items / 1 error",
+ "",
+ "*= ERRORS =*",
+ "*_ ERROR collecting test_parametrize_misspelling.py _*",
+ "test_parametrize_misspelling.py:3: in <module>",
+ ' @pytest.mark.parametrise("x", range(2))',
+ "E Failed: Unknown 'parametrise' mark, did you mean 'parametrize'?",
+ "*! Interrupted: 1 error during collection !*",
+ "*= no tests collected, 1 error in *",
+ ]
+ )
+
+
+class TestMetafuncFunctionalAuto:
+ """Tests related to automatically find out the correct scope for
+ parametrized tests (#1832)."""
+
+ def test_parametrize_auto_scope(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='session', autouse=True)
+ def fixture():
+ return 1
+
+ @pytest.mark.parametrize('animal', ["dog", "cat"])
+ def test_1(animal):
+ assert animal in ('dog', 'cat')
+
+ @pytest.mark.parametrize('animal', ['fish'])
+ def test_2(animal):
+ assert animal == 'fish'
+
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 3 passed *"])
+
+ def test_parametrize_auto_scope_indirect(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='session')
+ def echo(request):
+ return request.param
+
+ @pytest.mark.parametrize('animal, echo', [("dog", 1), ("cat", 2)], indirect=['echo'])
+ def test_1(animal, echo):
+ assert animal in ('dog', 'cat')
+ assert echo in (1, 2, 3)
+
+ @pytest.mark.parametrize('animal, echo', [('fish', 3)], indirect=['echo'])
+ def test_2(animal, echo):
+ assert animal == 'fish'
+ assert echo in (1, 2, 3)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 3 passed *"])
+
+ def test_parametrize_auto_scope_override_fixture(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='session', autouse=True)
+ def animal():
+ return 'fox'
+
+ @pytest.mark.parametrize('animal', ["dog", "cat"])
+ def test_1(animal):
+ assert animal in ('dog', 'cat')
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 2 passed *"])
+
+ def test_parametrize_all_indirects(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture()
+ def animal(request):
+ return request.param
+
+ @pytest.fixture(scope='session')
+ def echo(request):
+ return request.param
+
+ @pytest.mark.parametrize('animal, echo', [("dog", 1), ("cat", 2)], indirect=True)
+ def test_1(animal, echo):
+ assert animal in ('dog', 'cat')
+ assert echo in (1, 2, 3)
+
+ @pytest.mark.parametrize('animal, echo', [("fish", 3)], indirect=True)
+ def test_2(animal, echo):
+ assert animal == 'fish'
+ assert echo in (1, 2, 3)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 3 passed *"])
+
+ def test_parametrize_some_arguments_auto_scope(
+ self, pytester: Pytester, monkeypatch
+ ) -> None:
+ """Integration test for (#3941)"""
+ class_fix_setup: List[object] = []
+ monkeypatch.setattr(sys, "class_fix_setup", class_fix_setup, raising=False)
+ func_fix_setup: List[object] = []
+ monkeypatch.setattr(sys, "func_fix_setup", func_fix_setup, raising=False)
+
+ pytester.makepyfile(
+ """
+ import pytest
+ import sys
+
+ @pytest.fixture(scope='class', autouse=True)
+ def class_fix(request):
+ sys.class_fix_setup.append(request.param)
+
+ @pytest.fixture(autouse=True)
+ def func_fix():
+ sys.func_fix_setup.append(True)
+
+ @pytest.mark.parametrize('class_fix', [10, 20], indirect=True)
+ class Test:
+ def test_foo(self):
+ pass
+ def test_bar(self):
+ pass
+ """
+ )
+ result = pytester.runpytest_inprocess()
+ result.stdout.fnmatch_lines(["* 4 passed in *"])
+ assert func_fix_setup == [True] * 4
+ assert class_fix_setup == [10, 20]
+
+ def test_parametrize_issue634(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(scope='module')
+ def foo(request):
+ print('preparing foo-%d' % request.param)
+ return 'foo-%d' % request.param
+
+ def test_one(foo):
+ pass
+
+ def test_two(foo):
+ pass
+
+ test_two.test_with = (2, 3)
+
+ def pytest_generate_tests(metafunc):
+ params = (1, 2, 3, 4)
+ if not 'foo' in metafunc.fixturenames:
+ return
+
+ test_with = getattr(metafunc.function, 'test_with', None)
+ if test_with:
+ params = test_with
+ metafunc.parametrize('foo', params, indirect=True)
+ """
+ )
+ result = pytester.runpytest("-s")
+ output = result.stdout.str()
+ assert output.count("preparing foo-2") == 1
+ assert output.count("preparing foo-3") == 1
+
+
+class TestMarkersWithParametrization:
+ """#308"""
+
+ def test_simple_mark(self, pytester: Pytester) -> None:
+ s = """
+ import pytest
+
+ @pytest.mark.foo
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.param(1, 3, marks=pytest.mark.bar),
+ (2, 3),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ items = pytester.getitems(s)
+ assert len(items) == 3
+ for item in items:
+ assert "foo" in item.keywords
+ assert "bar" not in items[0].keywords
+ assert "bar" in items[1].keywords
+ assert "bar" not in items[2].keywords
+
+ def test_select_based_on_mark(self, pytester: Pytester) -> None:
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.param(2, 3, marks=pytest.mark.foo),
+ (3, 4),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ pytester.makepyfile(s)
+ rec = pytester.inline_run("-m", "foo")
+ passed, skipped, fail = rec.listoutcomes()
+ assert len(passed) == 1
+ assert len(skipped) == 0
+ assert len(fail) == 0
+
+ def test_simple_xfail(self, pytester: Pytester) -> None:
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.param(1, 3, marks=pytest.mark.xfail),
+ (2, 3),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ pytester.makepyfile(s)
+ reprec = pytester.inline_run()
+ # xfail is skip??
+ reprec.assertoutcome(passed=2, skipped=1)
+
+ def test_simple_xfail_single_argname(self, pytester: Pytester) -> None:
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize("n", [
+ 2,
+ pytest.param(3, marks=pytest.mark.xfail),
+ 4,
+ ])
+ def test_isEven(n):
+ assert n % 2 == 0
+ """
+ pytester.makepyfile(s)
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2, skipped=1)
+
+ def test_xfail_with_arg(self, pytester: Pytester) -> None:
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.param(1, 3, marks=pytest.mark.xfail("True")),
+ (2, 3),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ pytester.makepyfile(s)
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2, skipped=1)
+
+ def test_xfail_with_kwarg(self, pytester: Pytester) -> None:
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.param(1, 3, marks=pytest.mark.xfail(reason="some bug")),
+ (2, 3),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ pytester.makepyfile(s)
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2, skipped=1)
+
+ def test_xfail_with_arg_and_kwarg(self, pytester: Pytester) -> None:
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.param(1, 3, marks=pytest.mark.xfail("True", reason="some bug")),
+ (2, 3),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ pytester.makepyfile(s)
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2, skipped=1)
+
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_xfail_passing_is_xpass(self, pytester: Pytester, strict: bool) -> None:
+ s = """
+ import pytest
+
+ m = pytest.mark.xfail("sys.version_info > (0, 0, 0)", reason="some bug", strict={strict})
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ (1, 2),
+ pytest.param(2, 3, marks=m),
+ (3, 4),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """.format(
+ strict=strict
+ )
+ pytester.makepyfile(s)
+ reprec = pytester.inline_run()
+ passed, failed = (2, 1) if strict else (3, 0)
+ reprec.assertoutcome(passed=passed, failed=failed)
+
+ def test_parametrize_called_in_generate_tests(self, pytester: Pytester) -> None:
+ s = """
+ import pytest
+
+
+ def pytest_generate_tests(metafunc):
+ passingTestData = [(1, 2),
+ (2, 3)]
+ failingTestData = [(1, 3),
+ (2, 2)]
+
+ testData = passingTestData + [pytest.param(*d, marks=pytest.mark.xfail)
+ for d in failingTestData]
+ metafunc.parametrize(("n", "expected"), testData)
+
+
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """
+ pytester.makepyfile(s)
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2, skipped=2)
+
+ def test_parametrize_ID_generation_string_int_works(
+ self, pytester: Pytester
+ ) -> None:
+ """#290"""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def myfixture():
+ return 'example'
+ @pytest.mark.parametrize(
+ 'limit', (0, '0'))
+ def test_limit(limit, myfixture):
+ return
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=2)
+
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_parametrize_marked_value(self, pytester: Pytester, strict: bool) -> None:
+ s = """
+ import pytest
+
+ @pytest.mark.parametrize(("n", "expected"), [
+ pytest.param(
+ 2,3,
+ marks=pytest.mark.xfail("sys.version_info > (0, 0, 0)", reason="some bug", strict={strict}),
+ ),
+ pytest.param(
+ 2,3,
+ marks=[pytest.mark.xfail("sys.version_info > (0, 0, 0)", reason="some bug", strict={strict})],
+ ),
+ ])
+ def test_increment(n, expected):
+ assert n + 1 == expected
+ """.format(
+ strict=strict
+ )
+ pytester.makepyfile(s)
+ reprec = pytester.inline_run()
+ passed, failed = (0, 2) if strict else (2, 0)
+ reprec.assertoutcome(passed=passed, failed=failed)
+
+ def test_pytest_make_parametrize_id(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_make_parametrize_id(config, val):
+ return str(val * 2)
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize("x", range(2))
+ def test_func(x):
+ pass
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(["*test_func*0*PASS*", "*test_func*2*PASS*"])
+
+ def test_pytest_make_parametrize_id_with_argname(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_make_parametrize_id(config, val, argname):
+ return str(val * 2 if argname == 'x' else val * 10)
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize("x", range(2))
+ def test_func_a(x):
+ pass
+
+ @pytest.mark.parametrize("y", [1])
+ def test_func_b(y):
+ pass
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ ["*test_func_a*0*PASS*", "*test_func_a*2*PASS*", "*test_func_b*10*PASS*"]
+ )
+
+ def test_parametrize_positional_args(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize("a", [1], False)
+ def test_foo(a):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=1)
+
+ def test_parametrize_iterator(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import itertools
+ import pytest
+
+ id_parametrize = pytest.mark.parametrize(
+ ids=("param%d" % i for i in itertools.count())
+ )
+
+ @id_parametrize('y', ['a', 'b'])
+ def test1(y):
+ pass
+
+ @id_parametrize('y', ['a', 'b'])
+ def test2(y):
+ pass
+
+ @pytest.mark.parametrize("a, b", [(1, 2), (3, 4)], ids=itertools.count())
+ def test_converted_to_str(a, b):
+ pass
+ """
+ )
+ result = pytester.runpytest("-vv", "-s")
+ result.stdout.fnmatch_lines(
+ [
+ "test_parametrize_iterator.py::test1[param0] PASSED",
+ "test_parametrize_iterator.py::test1[param1] PASSED",
+ "test_parametrize_iterator.py::test2[param0] PASSED",
+ "test_parametrize_iterator.py::test2[param1] PASSED",
+ "test_parametrize_iterator.py::test_converted_to_str[0] PASSED",
+ "test_parametrize_iterator.py::test_converted_to_str[1] PASSED",
+ "*= 6 passed in *",
+ ]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/python/raises.py b/testing/web-platform/tests/tools/third_party/pytest/testing/python/raises.py
new file mode 100644
index 0000000000..2d62e91091
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/python/raises.py
@@ -0,0 +1,298 @@
+import re
+import sys
+
+import pytest
+from _pytest.outcomes import Failed
+from _pytest.pytester import Pytester
+
+
+class TestRaises:
+ def test_check_callable(self) -> None:
+ with pytest.raises(TypeError, match=r".* must be callable"):
+ pytest.raises(RuntimeError, "int('qwe')") # type: ignore[call-overload]
+
+ def test_raises(self):
+ excinfo = pytest.raises(ValueError, int, "qwe")
+ assert "invalid literal" in str(excinfo.value)
+
+ def test_raises_function(self):
+ excinfo = pytest.raises(ValueError, int, "hello")
+ assert "invalid literal" in str(excinfo.value)
+
+ def test_raises_callable_no_exception(self) -> None:
+ class A:
+ def __call__(self):
+ pass
+
+ try:
+ pytest.raises(ValueError, A())
+ except pytest.fail.Exception:
+ pass
+
+ def test_raises_falsey_type_error(self) -> None:
+ with pytest.raises(TypeError):
+ with pytest.raises(AssertionError, match=0): # type: ignore[call-overload]
+ raise AssertionError("ohai")
+
+ def test_raises_repr_inflight(self):
+ """Ensure repr() on an exception info inside a pytest.raises with block works (#4386)"""
+
+ class E(Exception):
+ pass
+
+ with pytest.raises(E) as excinfo:
+ # this test prints the inflight uninitialized object
+ # using repr and str as well as pprint to demonstrate
+ # it works
+ print(str(excinfo))
+ print(repr(excinfo))
+ import pprint
+
+ pprint.pprint(excinfo)
+ raise E()
+
+ def test_raises_as_contextmanager(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ import _pytest._code
+
+ def test_simple():
+ with pytest.raises(ZeroDivisionError) as excinfo:
+ assert isinstance(excinfo, _pytest._code.ExceptionInfo)
+ 1/0
+ print(excinfo)
+ assert excinfo.type == ZeroDivisionError
+ assert isinstance(excinfo.value, ZeroDivisionError)
+
+ def test_noraise():
+ with pytest.raises(pytest.raises.Exception):
+ with pytest.raises(ValueError):
+ int()
+
+ def test_raise_wrong_exception_passes_by():
+ with pytest.raises(ZeroDivisionError):
+ with pytest.raises(ValueError):
+ 1/0
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+ def test_does_not_raise(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ from contextlib import contextmanager
+ import pytest
+
+ @contextmanager
+ def does_not_raise():
+ yield
+
+ @pytest.mark.parametrize('example_input,expectation', [
+ (3, does_not_raise()),
+ (2, does_not_raise()),
+ (1, does_not_raise()),
+ (0, pytest.raises(ZeroDivisionError)),
+ ])
+ def test_division(example_input, expectation):
+ '''Test how much I know division.'''
+ with expectation:
+ assert (6 / example_input) is not None
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*4 passed*"])
+
+ def test_does_not_raise_does_raise(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ from contextlib import contextmanager
+ import pytest
+
+ @contextmanager
+ def does_not_raise():
+ yield
+
+ @pytest.mark.parametrize('example_input,expectation', [
+ (0, does_not_raise()),
+ (1, pytest.raises(ZeroDivisionError)),
+ ])
+ def test_division(example_input, expectation):
+ '''Test how much I know division.'''
+ with expectation:
+ assert (6 / example_input) is not None
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*2 failed*"])
+
+ def test_noclass(self) -> None:
+ with pytest.raises(TypeError):
+ pytest.raises("wrong", lambda: None) # type: ignore[call-overload]
+
+ def test_invalid_arguments_to_raises(self) -> None:
+ with pytest.raises(TypeError, match="unknown"):
+ with pytest.raises(TypeError, unknown="bogus"): # type: ignore[call-overload]
+ raise ValueError()
+
+ def test_tuple(self):
+ with pytest.raises((KeyError, ValueError)):
+ raise KeyError("oops")
+
+ def test_no_raise_message(self) -> None:
+ try:
+ pytest.raises(ValueError, int, "0")
+ except pytest.fail.Exception as e:
+ assert e.msg == f"DID NOT RAISE {repr(ValueError)}"
+ else:
+ assert False, "Expected pytest.raises.Exception"
+
+ try:
+ with pytest.raises(ValueError):
+ pass
+ except pytest.fail.Exception as e:
+ assert e.msg == f"DID NOT RAISE {repr(ValueError)}"
+ else:
+ assert False, "Expected pytest.raises.Exception"
+
+ @pytest.mark.parametrize("method", ["function", "function_match", "with"])
+ def test_raises_cyclic_reference(self, method):
+ """Ensure pytest.raises does not leave a reference cycle (#1965)."""
+ import gc
+
+ class T:
+ def __call__(self):
+ raise ValueError
+
+ t = T()
+ refcount = len(gc.get_referrers(t))
+
+ if method == "function":
+ pytest.raises(ValueError, t)
+ elif method == "function_match":
+ pytest.raises(ValueError, t).match("^$")
+ else:
+ with pytest.raises(ValueError):
+ t()
+
+ # ensure both forms of pytest.raises don't leave exceptions in sys.exc_info()
+ assert sys.exc_info() == (None, None, None)
+
+ assert refcount == len(gc.get_referrers(t))
+
+ def test_raises_match(self) -> None:
+ msg = r"with base \d+"
+ with pytest.raises(ValueError, match=msg):
+ int("asdf")
+
+ msg = "with base 10"
+ with pytest.raises(ValueError, match=msg):
+ int("asdf")
+
+ msg = "with base 16"
+ expr = "Regex pattern {!r} does not match \"invalid literal for int() with base 10: 'asdf'\".".format(
+ msg
+ )
+ with pytest.raises(AssertionError, match=re.escape(expr)):
+ with pytest.raises(ValueError, match=msg):
+ int("asdf", base=10)
+
+ # "match" without context manager.
+ pytest.raises(ValueError, int, "asdf").match("invalid literal")
+ with pytest.raises(AssertionError) as excinfo:
+ pytest.raises(ValueError, int, "asdf").match(msg)
+ assert str(excinfo.value) == expr
+
+ pytest.raises(TypeError, int, match="invalid")
+
+ def tfunc(match):
+ raise ValueError(f"match={match}")
+
+ pytest.raises(ValueError, tfunc, match="asdf").match("match=asdf")
+ pytest.raises(ValueError, tfunc, match="").match("match=")
+
+ def test_match_failure_string_quoting(self):
+ with pytest.raises(AssertionError) as excinfo:
+ with pytest.raises(AssertionError, match="'foo"):
+ raise AssertionError("'bar")
+ (msg,) = excinfo.value.args
+ assert msg == 'Regex pattern "\'foo" does not match "\'bar".'
+
+ def test_match_failure_exact_string_message(self):
+ message = "Oh here is a message with (42) numbers in parameters"
+ with pytest.raises(AssertionError) as excinfo:
+ with pytest.raises(AssertionError, match=message):
+ raise AssertionError(message)
+ (msg,) = excinfo.value.args
+ assert msg == (
+ "Regex pattern 'Oh here is a message with (42) numbers in "
+ "parameters' does not match 'Oh here is a message with (42) "
+ "numbers in parameters'. Did you mean to `re.escape()` the regex?"
+ )
+
+ def test_raises_match_wrong_type(self):
+ """Raising an exception with the wrong type and match= given.
+
+ pytest should throw the unexpected exception - the pattern match is not
+ really relevant if we got a different exception.
+ """
+ with pytest.raises(ValueError):
+ with pytest.raises(IndexError, match="nomatch"):
+ int("asdf")
+
+ def test_raises_exception_looks_iterable(self):
+ class Meta(type):
+ def __getitem__(self, item):
+ return 1 / 0
+
+ def __len__(self):
+ return 1
+
+ class ClassLooksIterableException(Exception, metaclass=Meta):
+ pass
+
+ with pytest.raises(
+ Failed,
+ match=r"DID NOT RAISE <class 'raises(\..*)*ClassLooksIterableException'>",
+ ):
+ pytest.raises(ClassLooksIterableException, lambda: None)
+
+ def test_raises_with_raising_dunder_class(self) -> None:
+ """Test current behavior with regard to exceptions via __class__ (#4284)."""
+
+ class CrappyClass(Exception):
+ # Type ignored because it's bypassed intentionally.
+ @property # type: ignore
+ def __class__(self):
+ assert False, "via __class__"
+
+ with pytest.raises(AssertionError) as excinfo:
+ with pytest.raises(CrappyClass()): # type: ignore[call-overload]
+ pass
+ assert "via __class__" in excinfo.value.args[0]
+
+ def test_raises_context_manager_with_kwargs(self):
+ with pytest.raises(TypeError) as excinfo:
+ with pytest.raises(Exception, foo="bar"): # type: ignore[call-overload]
+ pass
+ assert "Unexpected keyword arguments" in str(excinfo.value)
+
+ def test_expected_exception_is_not_a_baseexception(self) -> None:
+ with pytest.raises(TypeError) as excinfo:
+ with pytest.raises("hello"): # type: ignore[call-overload]
+ pass # pragma: no cover
+ assert "must be a BaseException type, not str" in str(excinfo.value)
+
+ class NotAnException:
+ pass
+
+ with pytest.raises(TypeError) as excinfo:
+ with pytest.raises(NotAnException): # type: ignore[type-var]
+ pass # pragma: no cover
+ assert "must be a BaseException type, not NotAnException" in str(excinfo.value)
+
+ with pytest.raises(TypeError) as excinfo:
+ with pytest.raises(("hello", NotAnException)): # type: ignore[arg-type]
+ pass # pragma: no cover
+ assert "must be a BaseException type, not str" in str(excinfo.value)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/python/show_fixtures_per_test.py b/testing/web-platform/tests/tools/third_party/pytest/testing/python/show_fixtures_per_test.py
new file mode 100644
index 0000000000..f756dca41c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/python/show_fixtures_per_test.py
@@ -0,0 +1,254 @@
+from _pytest.pytester import Pytester
+
+
+def test_no_items_should_not_show_output(pytester: Pytester) -> None:
+ result = pytester.runpytest("--fixtures-per-test")
+ result.stdout.no_fnmatch_line("*fixtures used by*")
+ assert result.ret == 0
+
+
+def test_fixtures_in_module(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def _arg0():
+ """hidden arg0 fixture"""
+ @pytest.fixture
+ def arg1():
+ """arg1 docstring"""
+ def test_arg1(arg1):
+ pass
+ '''
+ )
+
+ result = pytester.runpytest("--fixtures-per-test", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*fixtures used by test_arg1*",
+ "*(test_fixtures_in_module.py:9)*",
+ "arg1 -- test_fixtures_in_module.py:6",
+ " arg1 docstring",
+ ]
+ )
+ result.stdout.no_fnmatch_line("*_arg0*")
+
+
+def test_fixtures_in_conftest(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """arg1 docstring"""
+ @pytest.fixture
+ def arg2():
+ """arg2 docstring"""
+ @pytest.fixture
+ def arg3(arg1, arg2):
+ """arg3
+ docstring
+ """
+ '''
+ )
+ p = pytester.makepyfile(
+ """
+ def test_arg2(arg2):
+ pass
+ def test_arg3(arg3):
+ pass
+ """
+ )
+ result = pytester.runpytest("--fixtures-per-test", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*fixtures used by test_arg2*",
+ "*(test_fixtures_in_conftest.py:2)*",
+ "arg2 -- conftest.py:6",
+ " arg2 docstring",
+ "*fixtures used by test_arg3*",
+ "*(test_fixtures_in_conftest.py:4)*",
+ "arg1 -- conftest.py:3",
+ " arg1 docstring",
+ "arg2 -- conftest.py:6",
+ " arg2 docstring",
+ "arg3 -- conftest.py:9",
+ " arg3",
+ ]
+ )
+
+
+def test_should_show_fixtures_used_by_test(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """arg1 from conftest"""
+ @pytest.fixture
+ def arg2():
+ """arg2 from conftest"""
+ '''
+ )
+ p = pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """arg1 from testmodule"""
+ def test_args(arg1, arg2):
+ pass
+ '''
+ )
+ result = pytester.runpytest("--fixtures-per-test", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*fixtures used by test_args*",
+ "*(test_should_show_fixtures_used_by_test.py:6)*",
+ "arg1 -- test_should_show_fixtures_used_by_test.py:3",
+ " arg1 from testmodule",
+ "arg2 -- conftest.py:6",
+ " arg2 from conftest",
+ ]
+ )
+
+
+def test_verbose_include_private_fixtures_and_loc(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture
+ def _arg1():
+ """_arg1 from conftest"""
+ @pytest.fixture
+ def arg2(_arg1):
+ """arg2 from conftest"""
+ '''
+ )
+ p = pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg3():
+ """arg3 from testmodule"""
+ def test_args(arg2, arg3):
+ pass
+ '''
+ )
+ result = pytester.runpytest("--fixtures-per-test", "-v", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*fixtures used by test_args*",
+ "*(test_verbose_include_private_fixtures_and_loc.py:6)*",
+ "_arg1 -- conftest.py:3",
+ " _arg1 from conftest",
+ "arg2 -- conftest.py:6",
+ " arg2 from conftest",
+ "arg3 -- test_verbose_include_private_fixtures_and_loc.py:3",
+ " arg3 from testmodule",
+ ]
+ )
+
+
+def test_doctest_items(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ '''
+ def foo():
+ """
+ >>> 1 + 1
+ 2
+ """
+ '''
+ )
+ pytester.maketxtfile(
+ """
+ >>> 1 + 1
+ 2
+ """
+ )
+ result = pytester.runpytest(
+ "--fixtures-per-test", "--doctest-modules", "--doctest-glob=*.txt", "-v"
+ )
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(["*collected 2 items*"])
+
+
+def test_multiline_docstring_in_module(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """Docstring content that spans across multiple lines,
+ through second line,
+ and through third line.
+
+ Docstring content that extends into a second paragraph.
+
+ Docstring content that extends into a third paragraph.
+ """
+ def test_arg1(arg1):
+ pass
+ '''
+ )
+
+ result = pytester.runpytest("--fixtures-per-test", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*fixtures used by test_arg1*",
+ "*(test_multiline_docstring_in_module.py:13)*",
+ "arg1 -- test_multiline_docstring_in_module.py:3",
+ " Docstring content that spans across multiple lines,",
+ " through second line,",
+ " and through third line.",
+ ]
+ )
+
+
+def test_verbose_include_multiline_docstring(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg1():
+ """Docstring content that spans across multiple lines,
+ through second line,
+ and through third line.
+
+ Docstring content that extends into a second paragraph.
+
+ Docstring content that extends into a third paragraph.
+ """
+ def test_arg1(arg1):
+ pass
+ '''
+ )
+
+ result = pytester.runpytest("--fixtures-per-test", "-v", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*fixtures used by test_arg1*",
+ "*(test_verbose_include_multiline_docstring.py:13)*",
+ "arg1 -- test_verbose_include_multiline_docstring.py:3",
+ " Docstring content that spans across multiple lines,",
+ " through second line,",
+ " and through third line.",
+ " ",
+ " Docstring content that extends into a second paragraph.",
+ " ",
+ " Docstring content that extends into a third paragraph.",
+ ]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_argcomplete.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_argcomplete.py
new file mode 100644
index 0000000000..8c10e230b0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_argcomplete.py
@@ -0,0 +1,95 @@
+import subprocess
+import sys
+from pathlib import Path
+
+import pytest
+from _pytest.monkeypatch import MonkeyPatch
+
+# Test for _argcomplete but not specific for any application.
+
+
+def equal_with_bash(prefix, ffc, fc, out=None):
+ res = ffc(prefix)
+ res_bash = set(fc(prefix))
+ retval = set(res) == res_bash
+ if out:
+ out.write(f"equal_with_bash({prefix}) {retval} {res}\n")
+ if not retval:
+ out.write(" python - bash: %s\n" % (set(res) - res_bash))
+ out.write(" bash - python: %s\n" % (res_bash - set(res)))
+ return retval
+
+
+# Copied from argcomplete.completers as import from there.
+# Also pulls in argcomplete.__init__ which opens filedescriptor 9.
+# This gives an OSError at the end of testrun.
+
+
+def _wrapcall(*args, **kargs):
+ try:
+ return subprocess.check_output(*args, **kargs).decode().splitlines()
+ except subprocess.CalledProcessError:
+ return []
+
+
+class FilesCompleter:
+ """File completer class, optionally takes a list of allowed extensions."""
+
+ def __init__(self, allowednames=(), directories=True):
+ # Fix if someone passes in a string instead of a list
+ if type(allowednames) is str:
+ allowednames = [allowednames]
+
+ self.allowednames = [x.lstrip("*").lstrip(".") for x in allowednames]
+ self.directories = directories
+
+ def __call__(self, prefix, **kwargs):
+ completion = []
+ if self.allowednames:
+ if self.directories:
+ files = _wrapcall(["bash", "-c", f"compgen -A directory -- '{prefix}'"])
+ completion += [f + "/" for f in files]
+ for x in self.allowednames:
+ completion += _wrapcall(
+ ["bash", "-c", f"compgen -A file -X '!*.{x}' -- '{prefix}'"]
+ )
+ else:
+ completion += _wrapcall(["bash", "-c", f"compgen -A file -- '{prefix}'"])
+
+ anticomp = _wrapcall(["bash", "-c", f"compgen -A directory -- '{prefix}'"])
+
+ completion = list(set(completion) - set(anticomp))
+
+ if self.directories:
+ completion += [f + "/" for f in anticomp]
+ return completion
+
+
+class TestArgComplete:
+ @pytest.mark.skipif("sys.platform in ('win32', 'darwin')")
+ def test_compare_with_compgen(
+ self, tmp_path: Path, monkeypatch: MonkeyPatch
+ ) -> None:
+ from _pytest._argcomplete import FastFilesCompleter
+
+ ffc = FastFilesCompleter()
+ fc = FilesCompleter()
+
+ monkeypatch.chdir(tmp_path)
+
+ assert equal_with_bash("", ffc, fc, out=sys.stdout)
+
+ tmp_path.cwd().joinpath("data").touch()
+
+ for x in ["d", "data", "doesnotexist", ""]:
+ assert equal_with_bash(x, ffc, fc, out=sys.stdout)
+
+ @pytest.mark.skipif("sys.platform in ('win32', 'darwin')")
+ def test_remove_dir_prefix(self):
+ """This is not compatible with compgen but it is with bash itself: ls /usr/<TAB>."""
+ from _pytest._argcomplete import FastFilesCompleter
+
+ ffc = FastFilesCompleter()
+ fc = FilesCompleter()
+ for x in "/usr/".split():
+ assert not equal_with_bash(x, ffc, fc, out=sys.stdout)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_assertion.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_assertion.py
new file mode 100644
index 0000000000..2516ff1629
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_assertion.py
@@ -0,0 +1,1685 @@
+import collections
+import sys
+import textwrap
+from typing import Any
+from typing import List
+from typing import MutableSequence
+from typing import Optional
+
+import attr
+
+import _pytest.assertion as plugin
+import pytest
+from _pytest import outcomes
+from _pytest.assertion import truncate
+from _pytest.assertion import util
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+
+
+def mock_config(verbose=0):
+ class Config:
+ def getoption(self, name):
+ if name == "verbose":
+ return verbose
+ raise KeyError("Not mocked out: %s" % name)
+
+ return Config()
+
+
+class TestImportHookInstallation:
+ @pytest.mark.parametrize("initial_conftest", [True, False])
+ @pytest.mark.parametrize("mode", ["plain", "rewrite"])
+ def test_conftest_assertion_rewrite(
+ self, pytester: Pytester, initial_conftest, mode
+ ) -> None:
+ """Test that conftest files are using assertion rewrite on import (#1619)."""
+ pytester.mkdir("foo")
+ pytester.mkdir("foo/tests")
+ conftest_path = "conftest.py" if initial_conftest else "foo/conftest.py"
+ contents = {
+ conftest_path: """
+ import pytest
+ @pytest.fixture
+ def check_first():
+ def check(values, value):
+ assert values.pop(0) == value
+ return check
+ """,
+ "foo/tests/test_foo.py": """
+ def test(check_first):
+ check_first([10, 30], 30)
+ """,
+ }
+ pytester.makepyfile(**contents)
+ result = pytester.runpytest_subprocess("--assert=%s" % mode)
+ if mode == "plain":
+ expected = "E AssertionError"
+ elif mode == "rewrite":
+ expected = "*assert 10 == 30*"
+ else:
+ assert 0
+ result.stdout.fnmatch_lines([expected])
+
+ def test_rewrite_assertions_pytester_plugin(self, pytester: Pytester) -> None:
+ """
+ Assertions in the pytester plugin must also benefit from assertion
+ rewriting (#1920).
+ """
+ pytester.makepyfile(
+ """
+ pytest_plugins = ['pytester']
+ def test_dummy_failure(pytester): # how meta!
+ pytester.makepyfile('def test(): assert 0')
+ r = pytester.inline_run()
+ r.assertoutcome(passed=1)
+ """
+ )
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(
+ [
+ "> r.assertoutcome(passed=1)",
+ "E AssertionError: ([[][]], [[][]], [[]<TestReport *>[]])*",
+ "E assert {'failed': 1,... 'skipped': 0} == {'failed': 0,... 'skipped': 0}",
+ "E Omitting 1 identical items, use -vv to show",
+ "E Differing items:",
+ "E Use -v to get the full diff",
+ ]
+ )
+ # XXX: unstable output.
+ result.stdout.fnmatch_lines_random(
+ [
+ "E {'failed': 1} != {'failed': 0}",
+ "E {'passed': 0} != {'passed': 1}",
+ ]
+ )
+
+ @pytest.mark.parametrize("mode", ["plain", "rewrite"])
+ def test_pytest_plugins_rewrite(self, pytester: Pytester, mode) -> None:
+ contents = {
+ "conftest.py": """
+ pytest_plugins = ['ham']
+ """,
+ "ham.py": """
+ import pytest
+ @pytest.fixture
+ def check_first():
+ def check(values, value):
+ assert values.pop(0) == value
+ return check
+ """,
+ "test_foo.py": """
+ def test_foo(check_first):
+ check_first([10, 30], 30)
+ """,
+ }
+ pytester.makepyfile(**contents)
+ result = pytester.runpytest_subprocess("--assert=%s" % mode)
+ if mode == "plain":
+ expected = "E AssertionError"
+ elif mode == "rewrite":
+ expected = "*assert 10 == 30*"
+ else:
+ assert 0
+ result.stdout.fnmatch_lines([expected])
+
+ @pytest.mark.parametrize("mode", ["str", "list"])
+ def test_pytest_plugins_rewrite_module_names(
+ self, pytester: Pytester, mode
+ ) -> None:
+ """Test that pluginmanager correct marks pytest_plugins variables
+ for assertion rewriting if they are defined as plain strings or
+ list of strings (#1888).
+ """
+ plugins = '"ham"' if mode == "str" else '["ham"]'
+ contents = {
+ "conftest.py": """
+ pytest_plugins = {plugins}
+ """.format(
+ plugins=plugins
+ ),
+ "ham.py": """
+ import pytest
+ """,
+ "test_foo.py": """
+ def test_foo(pytestconfig):
+ assert 'ham' in pytestconfig.pluginmanager.rewrite_hook._must_rewrite
+ """,
+ }
+ pytester.makepyfile(**contents)
+ result = pytester.runpytest_subprocess("--assert=rewrite")
+ assert result.ret == 0
+
+ def test_pytest_plugins_rewrite_module_names_correctly(
+ self, pytester: Pytester
+ ) -> None:
+ """Test that we match files correctly when they are marked for rewriting (#2939)."""
+ contents = {
+ "conftest.py": """\
+ pytest_plugins = "ham"
+ """,
+ "ham.py": "",
+ "hamster.py": "",
+ "test_foo.py": """\
+ def test_foo(pytestconfig):
+ assert pytestconfig.pluginmanager.rewrite_hook.find_spec('ham') is not None
+ assert pytestconfig.pluginmanager.rewrite_hook.find_spec('hamster') is None
+ """,
+ }
+ pytester.makepyfile(**contents)
+ result = pytester.runpytest_subprocess("--assert=rewrite")
+ assert result.ret == 0
+
+ @pytest.mark.parametrize("mode", ["plain", "rewrite"])
+ def test_installed_plugin_rewrite(
+ self, pytester: Pytester, mode, monkeypatch
+ ) -> None:
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
+ # Make sure the hook is installed early enough so that plugins
+ # installed via setuptools are rewritten.
+ pytester.mkdir("hampkg")
+ contents = {
+ "hampkg/__init__.py": """\
+ import pytest
+
+ @pytest.fixture
+ def check_first2():
+ def check(values, value):
+ assert values.pop(0) == value
+ return check
+ """,
+ "spamplugin.py": """\
+ import pytest
+ from hampkg import check_first2
+
+ @pytest.fixture
+ def check_first():
+ def check(values, value):
+ assert values.pop(0) == value
+ return check
+ """,
+ "mainwrapper.py": """\
+ import pytest
+ from _pytest.compat import importlib_metadata
+
+ class DummyEntryPoint(object):
+ name = 'spam'
+ module_name = 'spam.py'
+ group = 'pytest11'
+
+ def load(self):
+ import spamplugin
+ return spamplugin
+
+ class DummyDistInfo(object):
+ version = '1.0'
+ files = ('spamplugin.py', 'hampkg/__init__.py')
+ entry_points = (DummyEntryPoint(),)
+ metadata = {'name': 'foo'}
+
+ def distributions():
+ return (DummyDistInfo(),)
+
+ importlib_metadata.distributions = distributions
+ pytest.main()
+ """,
+ "test_foo.py": """\
+ def test(check_first):
+ check_first([10, 30], 30)
+
+ def test2(check_first2):
+ check_first([10, 30], 30)
+ """,
+ }
+ pytester.makepyfile(**contents)
+ result = pytester.run(
+ sys.executable, "mainwrapper.py", "-s", "--assert=%s" % mode
+ )
+ if mode == "plain":
+ expected = "E AssertionError"
+ elif mode == "rewrite":
+ expected = "*assert 10 == 30*"
+ else:
+ assert 0
+ result.stdout.fnmatch_lines([expected])
+
+ def test_rewrite_ast(self, pytester: Pytester) -> None:
+ pytester.mkdir("pkg")
+ contents = {
+ "pkg/__init__.py": """
+ import pytest
+ pytest.register_assert_rewrite('pkg.helper')
+ """,
+ "pkg/helper.py": """
+ def tool():
+ a, b = 2, 3
+ assert a == b
+ """,
+ "pkg/plugin.py": """
+ import pytest, pkg.helper
+ @pytest.fixture
+ def tool():
+ return pkg.helper.tool
+ """,
+ "pkg/other.py": """
+ values = [3, 2]
+ def tool():
+ assert values.pop() == 3
+ """,
+ "conftest.py": """
+ pytest_plugins = ['pkg.plugin']
+ """,
+ "test_pkg.py": """
+ import pkg.other
+ def test_tool(tool):
+ tool()
+ def test_other():
+ pkg.other.tool()
+ """,
+ }
+ pytester.makepyfile(**contents)
+ result = pytester.runpytest_subprocess("--assert=rewrite")
+ result.stdout.fnmatch_lines(
+ [
+ ">*assert a == b*",
+ "E*assert 2 == 3*",
+ ">*assert values.pop() == 3*",
+ "E*AssertionError",
+ ]
+ )
+
+ def test_register_assert_rewrite_checks_types(self) -> None:
+ with pytest.raises(TypeError):
+ pytest.register_assert_rewrite(["pytest_tests_internal_non_existing"]) # type: ignore
+ pytest.register_assert_rewrite(
+ "pytest_tests_internal_non_existing", "pytest_tests_internal_non_existing2"
+ )
+
+
+class TestBinReprIntegration:
+ def test_pytest_assertrepr_compare_called(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ values = []
+ def pytest_assertrepr_compare(op, left, right):
+ values.append((op, left, right))
+
+ @pytest.fixture
+ def list(request):
+ return values
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_hello():
+ assert 0 == 1
+ def test_check(list):
+ assert list == [("==", 0, 1)]
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(["*test_hello*FAIL*", "*test_check*PASS*"])
+
+
+def callop(op: str, left: Any, right: Any, verbose: int = 0) -> Optional[List[str]]:
+ config = mock_config(verbose=verbose)
+ return plugin.pytest_assertrepr_compare(config, op, left, right)
+
+
+def callequal(left: Any, right: Any, verbose: int = 0) -> Optional[List[str]]:
+ return callop("==", left, right, verbose)
+
+
+class TestAssert_reprcompare:
+ def test_different_types(self) -> None:
+ assert callequal([0, 1], "foo") is None
+
+ def test_summary(self) -> None:
+ lines = callequal([0, 1], [0, 2])
+ assert lines is not None
+ summary = lines[0]
+ assert len(summary) < 65
+
+ def test_text_diff(self) -> None:
+ assert callequal("spam", "eggs") == [
+ "'spam' == 'eggs'",
+ "- eggs",
+ "+ spam",
+ ]
+
+ def test_text_skipping(self) -> None:
+ lines = callequal("a" * 50 + "spam", "a" * 50 + "eggs")
+ assert lines is not None
+ assert "Skipping" in lines[1]
+ for line in lines:
+ assert "a" * 50 not in line
+
+ def test_text_skipping_verbose(self) -> None:
+ lines = callequal("a" * 50 + "spam", "a" * 50 + "eggs", verbose=1)
+ assert lines is not None
+ assert "- " + "a" * 50 + "eggs" in lines
+ assert "+ " + "a" * 50 + "spam" in lines
+
+ def test_multiline_text_diff(self) -> None:
+ left = "foo\nspam\nbar"
+ right = "foo\neggs\nbar"
+ diff = callequal(left, right)
+ assert diff is not None
+ assert "- eggs" in diff
+ assert "+ spam" in diff
+
+ def test_bytes_diff_normal(self) -> None:
+ """Check special handling for bytes diff (#5260)"""
+ diff = callequal(b"spam", b"eggs")
+
+ assert diff == [
+ "b'spam' == b'eggs'",
+ "At index 0 diff: b's' != b'e'",
+ "Use -v to get the full diff",
+ ]
+
+ def test_bytes_diff_verbose(self) -> None:
+ """Check special handling for bytes diff (#5260)"""
+ diff = callequal(b"spam", b"eggs", verbose=1)
+ assert diff == [
+ "b'spam' == b'eggs'",
+ "At index 0 diff: b's' != b'e'",
+ "Full diff:",
+ "- b'eggs'",
+ "+ b'spam'",
+ ]
+
+ def test_list(self) -> None:
+ expl = callequal([0, 1], [0, 2])
+ assert expl is not None
+ assert len(expl) > 1
+
+ @pytest.mark.parametrize(
+ ["left", "right", "expected"],
+ [
+ pytest.param(
+ [0, 1],
+ [0, 2],
+ """
+ Full diff:
+ - [0, 2]
+ ? ^
+ + [0, 1]
+ ? ^
+ """,
+ id="lists",
+ ),
+ pytest.param(
+ {0: 1},
+ {0: 2},
+ """
+ Full diff:
+ - {0: 2}
+ ? ^
+ + {0: 1}
+ ? ^
+ """,
+ id="dicts",
+ ),
+ pytest.param(
+ {0, 1},
+ {0, 2},
+ """
+ Full diff:
+ - {0, 2}
+ ? ^
+ + {0, 1}
+ ? ^
+ """,
+ id="sets",
+ ),
+ ],
+ )
+ def test_iterable_full_diff(self, left, right, expected) -> None:
+ """Test the full diff assertion failure explanation.
+
+ When verbose is False, then just a -v notice to get the diff is rendered,
+ when verbose is True, then ndiff of the pprint is returned.
+ """
+ expl = callequal(left, right, verbose=0)
+ assert expl is not None
+ assert expl[-1] == "Use -v to get the full diff"
+ verbose_expl = callequal(left, right, verbose=1)
+ assert verbose_expl is not None
+ assert "\n".join(verbose_expl).endswith(textwrap.dedent(expected).strip())
+
+ def test_iterable_full_diff_ci(
+ self, monkeypatch: MonkeyPatch, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ r"""
+ def test_full_diff():
+ left = [0, 1]
+ right = [0, 2]
+ assert left == right
+ """
+ )
+ monkeypatch.setenv("CI", "true")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["E Full diff:"])
+
+ monkeypatch.delenv("CI", raising=False)
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["E Use -v to get the full diff"])
+
+ def test_list_different_lengths(self) -> None:
+ expl = callequal([0, 1], [0, 1, 2])
+ assert expl is not None
+ assert len(expl) > 1
+ expl = callequal([0, 1, 2], [0, 1])
+ assert expl is not None
+ assert len(expl) > 1
+
+ def test_list_wrap_for_multiple_lines(self) -> None:
+ long_d = "d" * 80
+ l1 = ["a", "b", "c"]
+ l2 = ["a", "b", "c", long_d]
+ diff = callequal(l1, l2, verbose=True)
+ assert diff == [
+ "['a', 'b', 'c'] == ['a', 'b', 'c...dddddddddddd']",
+ "Right contains one more item: '" + long_d + "'",
+ "Full diff:",
+ " [",
+ " 'a',",
+ " 'b',",
+ " 'c',",
+ "- '" + long_d + "',",
+ " ]",
+ ]
+
+ diff = callequal(l2, l1, verbose=True)
+ assert diff == [
+ "['a', 'b', 'c...dddddddddddd'] == ['a', 'b', 'c']",
+ "Left contains one more item: '" + long_d + "'",
+ "Full diff:",
+ " [",
+ " 'a',",
+ " 'b',",
+ " 'c',",
+ "+ '" + long_d + "',",
+ " ]",
+ ]
+
+ def test_list_wrap_for_width_rewrap_same_length(self) -> None:
+ long_a = "a" * 30
+ long_b = "b" * 30
+ long_c = "c" * 30
+ l1 = [long_a, long_b, long_c]
+ l2 = [long_b, long_c, long_a]
+ diff = callequal(l1, l2, verbose=True)
+ assert diff == [
+ "['aaaaaaaaaaa...cccccccccccc'] == ['bbbbbbbbbbb...aaaaaaaaaaaa']",
+ "At index 0 diff: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' != 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'",
+ "Full diff:",
+ " [",
+ "+ 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',",
+ " 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',",
+ " 'cccccccccccccccccccccccccccccc',",
+ "- 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',",
+ " ]",
+ ]
+
+ def test_list_dont_wrap_strings(self) -> None:
+ long_a = "a" * 10
+ l1 = ["a"] + [long_a for _ in range(0, 7)]
+ l2 = ["should not get wrapped"]
+ diff = callequal(l1, l2, verbose=True)
+ assert diff == [
+ "['a', 'aaaaaa...aaaaaaa', ...] == ['should not get wrapped']",
+ "At index 0 diff: 'a' != 'should not get wrapped'",
+ "Left contains 7 more items, first extra item: 'aaaaaaaaaa'",
+ "Full diff:",
+ " [",
+ "- 'should not get wrapped',",
+ "+ 'a',",
+ "+ 'aaaaaaaaaa',",
+ "+ 'aaaaaaaaaa',",
+ "+ 'aaaaaaaaaa',",
+ "+ 'aaaaaaaaaa',",
+ "+ 'aaaaaaaaaa',",
+ "+ 'aaaaaaaaaa',",
+ "+ 'aaaaaaaaaa',",
+ " ]",
+ ]
+
+ def test_dict_wrap(self) -> None:
+ d1 = {"common": 1, "env": {"env1": 1, "env2": 2}}
+ d2 = {"common": 1, "env": {"env1": 1}}
+
+ diff = callequal(d1, d2, verbose=True)
+ assert diff == [
+ "{'common': 1,...1, 'env2': 2}} == {'common': 1,...: {'env1': 1}}",
+ "Omitting 1 identical items, use -vv to show",
+ "Differing items:",
+ "{'env': {'env1': 1, 'env2': 2}} != {'env': {'env1': 1}}",
+ "Full diff:",
+ "- {'common': 1, 'env': {'env1': 1}}",
+ "+ {'common': 1, 'env': {'env1': 1, 'env2': 2}}",
+ "? +++++++++++",
+ ]
+
+ long_a = "a" * 80
+ sub = {"long_a": long_a, "sub1": {"long_a": "substring that gets wrapped " * 2}}
+ d1 = {"env": {"sub": sub}}
+ d2 = {"env": {"sub": sub}, "new": 1}
+ diff = callequal(d1, d2, verbose=True)
+ assert diff == [
+ "{'env': {'sub... wrapped '}}}} == {'env': {'sub...}}}, 'new': 1}",
+ "Omitting 1 identical items, use -vv to show",
+ "Right contains 1 more item:",
+ "{'new': 1}",
+ "Full diff:",
+ " {",
+ " 'env': {'sub': {'long_a': '" + long_a + "',",
+ " 'sub1': {'long_a': 'substring that gets wrapped substring '",
+ " 'that gets wrapped '}}},",
+ "- 'new': 1,",
+ " }",
+ ]
+
+ def test_dict(self) -> None:
+ expl = callequal({"a": 0}, {"a": 1})
+ assert expl is not None
+ assert len(expl) > 1
+
+ def test_dict_omitting(self) -> None:
+ lines = callequal({"a": 0, "b": 1}, {"a": 1, "b": 1})
+ assert lines is not None
+ assert lines[1].startswith("Omitting 1 identical item")
+ assert "Common items" not in lines
+ for line in lines[1:]:
+ assert "b" not in line
+
+ def test_dict_omitting_with_verbosity_1(self) -> None:
+ """Ensure differing items are visible for verbosity=1 (#1512)."""
+ lines = callequal({"a": 0, "b": 1}, {"a": 1, "b": 1}, verbose=1)
+ assert lines is not None
+ assert lines[1].startswith("Omitting 1 identical item")
+ assert lines[2].startswith("Differing items")
+ assert lines[3] == "{'a': 0} != {'a': 1}"
+ assert "Common items" not in lines
+
+ def test_dict_omitting_with_verbosity_2(self) -> None:
+ lines = callequal({"a": 0, "b": 1}, {"a": 1, "b": 1}, verbose=2)
+ assert lines is not None
+ assert lines[1].startswith("Common items:")
+ assert "Omitting" not in lines[1]
+ assert lines[2] == "{'b': 1}"
+
+ def test_dict_different_items(self) -> None:
+ lines = callequal({"a": 0}, {"b": 1, "c": 2}, verbose=2)
+ assert lines == [
+ "{'a': 0} == {'b': 1, 'c': 2}",
+ "Left contains 1 more item:",
+ "{'a': 0}",
+ "Right contains 2 more items:",
+ "{'b': 1, 'c': 2}",
+ "Full diff:",
+ "- {'b': 1, 'c': 2}",
+ "+ {'a': 0}",
+ ]
+ lines = callequal({"b": 1, "c": 2}, {"a": 0}, verbose=2)
+ assert lines == [
+ "{'b': 1, 'c': 2} == {'a': 0}",
+ "Left contains 2 more items:",
+ "{'b': 1, 'c': 2}",
+ "Right contains 1 more item:",
+ "{'a': 0}",
+ "Full diff:",
+ "- {'a': 0}",
+ "+ {'b': 1, 'c': 2}",
+ ]
+
+ def test_sequence_different_items(self) -> None:
+ lines = callequal((1, 2), (3, 4, 5), verbose=2)
+ assert lines == [
+ "(1, 2) == (3, 4, 5)",
+ "At index 0 diff: 1 != 3",
+ "Right contains one more item: 5",
+ "Full diff:",
+ "- (3, 4, 5)",
+ "+ (1, 2)",
+ ]
+ lines = callequal((1, 2, 3), (4,), verbose=2)
+ assert lines == [
+ "(1, 2, 3) == (4,)",
+ "At index 0 diff: 1 != 4",
+ "Left contains 2 more items, first extra item: 2",
+ "Full diff:",
+ "- (4,)",
+ "+ (1, 2, 3)",
+ ]
+
+ def test_set(self) -> None:
+ expl = callequal({0, 1}, {0, 2})
+ assert expl is not None
+ assert len(expl) > 1
+
+ def test_frozenzet(self) -> None:
+ expl = callequal(frozenset([0, 1]), {0, 2})
+ assert expl is not None
+ assert len(expl) > 1
+
+ def test_Sequence(self) -> None:
+ # Test comparing with a Sequence subclass.
+ class TestSequence(MutableSequence[int]):
+ def __init__(self, iterable):
+ self.elements = list(iterable)
+
+ def __getitem__(self, item):
+ return self.elements[item]
+
+ def __len__(self):
+ return len(self.elements)
+
+ def __setitem__(self, item, value):
+ pass
+
+ def __delitem__(self, item):
+ pass
+
+ def insert(self, item, index):
+ pass
+
+ expl = callequal(TestSequence([0, 1]), list([0, 2]))
+ assert expl is not None
+ assert len(expl) > 1
+
+ def test_list_tuples(self) -> None:
+ expl = callequal([], [(1, 2)])
+ assert expl is not None
+ assert len(expl) > 1
+ expl = callequal([(1, 2)], [])
+ assert expl is not None
+ assert len(expl) > 1
+
+ def test_repr_verbose(self) -> None:
+ class Nums:
+ def __init__(self, nums):
+ self.nums = nums
+
+ def __repr__(self):
+ return str(self.nums)
+
+ list_x = list(range(5000))
+ list_y = list(range(5000))
+ list_y[len(list_y) // 2] = 3
+ nums_x = Nums(list_x)
+ nums_y = Nums(list_y)
+
+ assert callequal(nums_x, nums_y) is None
+
+ expl = callequal(nums_x, nums_y, verbose=1)
+ assert expl is not None
+ assert "+" + repr(nums_x) in expl
+ assert "-" + repr(nums_y) in expl
+
+ expl = callequal(nums_x, nums_y, verbose=2)
+ assert expl is not None
+ assert "+" + repr(nums_x) in expl
+ assert "-" + repr(nums_y) in expl
+
+ def test_list_bad_repr(self) -> None:
+ class A:
+ def __repr__(self):
+ raise ValueError(42)
+
+ expl = callequal([], [A()])
+ assert expl is not None
+ assert "ValueError" in "".join(expl)
+ expl = callequal({}, {"1": A()}, verbose=2)
+ assert expl is not None
+ assert expl[0].startswith("{} == <[ValueError")
+ assert "raised in repr" in expl[0]
+ assert expl[1:] == [
+ "(pytest_assertion plugin: representation of details failed:"
+ " {}:{}: ValueError: 42.".format(
+ __file__, A.__repr__.__code__.co_firstlineno + 1
+ ),
+ " Probably an object has a faulty __repr__.)",
+ ]
+
+ def test_one_repr_empty(self) -> None:
+ """The faulty empty string repr did trigger an unbound local error in _diff_text."""
+
+ class A(str):
+ def __repr__(self):
+ return ""
+
+ expl = callequal(A(), "")
+ assert not expl
+
+ def test_repr_no_exc(self) -> None:
+ expl = callequal("foo", "bar")
+ assert expl is not None
+ assert "raised in repr()" not in " ".join(expl)
+
+ def test_unicode(self) -> None:
+ assert callequal("£€", "£") == [
+ "'£€' == '£'",
+ "- £",
+ "+ £€",
+ ]
+
+ def test_nonascii_text(self) -> None:
+ """
+ :issue: 877
+ non ascii python2 str caused a UnicodeDecodeError
+ """
+
+ class A(str):
+ def __repr__(self):
+ return "\xff"
+
+ expl = callequal(A(), "1")
+ assert expl == ["ÿ == '1'", "- 1"]
+
+ def test_format_nonascii_explanation(self) -> None:
+ assert util.format_explanation("λ")
+
+ def test_mojibake(self) -> None:
+ # issue 429
+ left = b"e"
+ right = b"\xc3\xa9"
+ expl = callequal(left, right)
+ assert expl is not None
+ for line in expl:
+ assert isinstance(line, str)
+ msg = "\n".join(expl)
+ assert msg
+
+
+class TestAssert_reprcompare_dataclass:
+ @pytest.mark.skipif(sys.version_info < (3, 7), reason="Dataclasses in Python3.7+")
+ def test_dataclasses(self, pytester: Pytester) -> None:
+ p = pytester.copy_example("dataclasses/test_compare_dataclasses.py")
+ result = pytester.runpytest(p)
+ result.assert_outcomes(failed=1, passed=0)
+ result.stdout.fnmatch_lines(
+ [
+ "E Omitting 1 identical items, use -vv to show",
+ "E Differing attributes:",
+ "E ['field_b']",
+ "E ",
+ "E Drill down into differing attribute field_b:",
+ "E field_b: 'b' != 'c'...",
+ "E ",
+ "E ...Full output truncated (3 lines hidden), use '-vv' to show",
+ ],
+ consecutive=True,
+ )
+
+ @pytest.mark.skipif(sys.version_info < (3, 7), reason="Dataclasses in Python3.7+")
+ def test_recursive_dataclasses(self, pytester: Pytester) -> None:
+ p = pytester.copy_example("dataclasses/test_compare_recursive_dataclasses.py")
+ result = pytester.runpytest(p)
+ result.assert_outcomes(failed=1, passed=0)
+ result.stdout.fnmatch_lines(
+ [
+ "E Omitting 1 identical items, use -vv to show",
+ "E Differing attributes:",
+ "E ['g', 'h', 'j']",
+ "E ",
+ "E Drill down into differing attribute g:",
+ "E g: S(a=10, b='ten') != S(a=20, b='xxx')...",
+ "E ",
+ "E ...Full output truncated (52 lines hidden), use '-vv' to show",
+ ],
+ consecutive=True,
+ )
+
+ @pytest.mark.skipif(sys.version_info < (3, 7), reason="Dataclasses in Python3.7+")
+ def test_recursive_dataclasses_verbose(self, pytester: Pytester) -> None:
+ p = pytester.copy_example("dataclasses/test_compare_recursive_dataclasses.py")
+ result = pytester.runpytest(p, "-vv")
+ result.assert_outcomes(failed=1, passed=0)
+ result.stdout.fnmatch_lines(
+ [
+ "E Matching attributes:",
+ "E ['i']",
+ "E Differing attributes:",
+ "E ['g', 'h', 'j']",
+ "E ",
+ "E Drill down into differing attribute g:",
+ "E g: S(a=10, b='ten') != S(a=20, b='xxx')",
+ "E ",
+ "E Differing attributes:",
+ "E ['a', 'b']",
+ "E ",
+ "E Drill down into differing attribute a:",
+ "E a: 10 != 20",
+ "E +10",
+ "E -20",
+ "E ",
+ "E Drill down into differing attribute b:",
+ "E b: 'ten' != 'xxx'",
+ "E - xxx",
+ "E + ten",
+ "E ",
+ "E Drill down into differing attribute h:",
+ ],
+ consecutive=True,
+ )
+
+ @pytest.mark.skipif(sys.version_info < (3, 7), reason="Dataclasses in Python3.7+")
+ def test_dataclasses_verbose(self, pytester: Pytester) -> None:
+ p = pytester.copy_example("dataclasses/test_compare_dataclasses_verbose.py")
+ result = pytester.runpytest(p, "-vv")
+ result.assert_outcomes(failed=1, passed=0)
+ result.stdout.fnmatch_lines(
+ [
+ "*Matching attributes:*",
+ "*['field_a']*",
+ "*Differing attributes:*",
+ "*field_b: 'b' != 'c'*",
+ ]
+ )
+
+ @pytest.mark.skipif(sys.version_info < (3, 7), reason="Dataclasses in Python3.7+")
+ def test_dataclasses_with_attribute_comparison_off(
+ self, pytester: Pytester
+ ) -> None:
+ p = pytester.copy_example(
+ "dataclasses/test_compare_dataclasses_field_comparison_off.py"
+ )
+ result = pytester.runpytest(p, "-vv")
+ result.assert_outcomes(failed=0, passed=1)
+
+ @pytest.mark.skipif(sys.version_info < (3, 7), reason="Dataclasses in Python3.7+")
+ def test_comparing_two_different_data_classes(self, pytester: Pytester) -> None:
+ p = pytester.copy_example(
+ "dataclasses/test_compare_two_different_dataclasses.py"
+ )
+ result = pytester.runpytest(p, "-vv")
+ result.assert_outcomes(failed=0, passed=1)
+
+
+class TestAssert_reprcompare_attrsclass:
+ def test_attrs(self) -> None:
+ @attr.s
+ class SimpleDataObject:
+ field_a = attr.ib()
+ field_b = attr.ib()
+
+ left = SimpleDataObject(1, "b")
+ right = SimpleDataObject(1, "c")
+
+ lines = callequal(left, right)
+ assert lines is not None
+ assert lines[2].startswith("Omitting 1 identical item")
+ assert "Matching attributes" not in lines
+ for line in lines[2:]:
+ assert "field_a" not in line
+
+ def test_attrs_recursive(self) -> None:
+ @attr.s
+ class OtherDataObject:
+ field_c = attr.ib()
+ field_d = attr.ib()
+
+ @attr.s
+ class SimpleDataObject:
+ field_a = attr.ib()
+ field_b = attr.ib()
+
+ left = SimpleDataObject(OtherDataObject(1, "a"), "b")
+ right = SimpleDataObject(OtherDataObject(1, "b"), "b")
+
+ lines = callequal(left, right)
+ assert lines is not None
+ assert "Matching attributes" not in lines
+ for line in lines[1:]:
+ assert "field_b:" not in line
+ assert "field_c:" not in line
+
+ def test_attrs_recursive_verbose(self) -> None:
+ @attr.s
+ class OtherDataObject:
+ field_c = attr.ib()
+ field_d = attr.ib()
+
+ @attr.s
+ class SimpleDataObject:
+ field_a = attr.ib()
+ field_b = attr.ib()
+
+ left = SimpleDataObject(OtherDataObject(1, "a"), "b")
+ right = SimpleDataObject(OtherDataObject(1, "b"), "b")
+
+ lines = callequal(left, right)
+ assert lines is not None
+ # indentation in output because of nested object structure
+ assert " field_d: 'a' != 'b'" in lines
+
+ def test_attrs_verbose(self) -> None:
+ @attr.s
+ class SimpleDataObject:
+ field_a = attr.ib()
+ field_b = attr.ib()
+
+ left = SimpleDataObject(1, "b")
+ right = SimpleDataObject(1, "c")
+
+ lines = callequal(left, right, verbose=2)
+ assert lines is not None
+ assert lines[2].startswith("Matching attributes:")
+ assert "Omitting" not in lines[2]
+ assert lines[3] == "['field_a']"
+
+ def test_attrs_with_attribute_comparison_off(self) -> None:
+ @attr.s
+ class SimpleDataObject:
+ field_a = attr.ib()
+ field_b = attr.ib(eq=False)
+
+ left = SimpleDataObject(1, "b")
+ right = SimpleDataObject(1, "b")
+
+ lines = callequal(left, right, verbose=2)
+ print(lines)
+ assert lines is not None
+ assert lines[2].startswith("Matching attributes:")
+ assert "Omitting" not in lines[1]
+ assert lines[3] == "['field_a']"
+ for line in lines[3:]:
+ assert "field_b" not in line
+
+ def test_comparing_two_different_attrs_classes(self) -> None:
+ @attr.s
+ class SimpleDataObjectOne:
+ field_a = attr.ib()
+ field_b = attr.ib()
+
+ @attr.s
+ class SimpleDataObjectTwo:
+ field_a = attr.ib()
+ field_b = attr.ib()
+
+ left = SimpleDataObjectOne(1, "b")
+ right = SimpleDataObjectTwo(1, "c")
+
+ lines = callequal(left, right)
+ assert lines is None
+
+
+class TestAssert_reprcompare_namedtuple:
+ def test_namedtuple(self) -> None:
+ NT = collections.namedtuple("NT", ["a", "b"])
+
+ left = NT(1, "b")
+ right = NT(1, "c")
+
+ lines = callequal(left, right)
+ assert lines == [
+ "NT(a=1, b='b') == NT(a=1, b='c')",
+ "",
+ "Omitting 1 identical items, use -vv to show",
+ "Differing attributes:",
+ "['b']",
+ "",
+ "Drill down into differing attribute b:",
+ " b: 'b' != 'c'",
+ " - c",
+ " + b",
+ "Use -v to get the full diff",
+ ]
+
+ def test_comparing_two_different_namedtuple(self) -> None:
+ NT1 = collections.namedtuple("NT1", ["a", "b"])
+ NT2 = collections.namedtuple("NT2", ["a", "b"])
+
+ left = NT1(1, "b")
+ right = NT2(2, "b")
+
+ lines = callequal(left, right)
+ # Because the types are different, uses the generic sequence matcher.
+ assert lines == [
+ "NT1(a=1, b='b') == NT2(a=2, b='b')",
+ "At index 0 diff: 1 != 2",
+ "Use -v to get the full diff",
+ ]
+
+
+class TestFormatExplanation:
+ def test_special_chars_full(self, pytester: Pytester) -> None:
+ # Issue 453, for the bug this would raise IndexError
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert '\\n}' == ''
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*AssertionError*"])
+
+ def test_fmt_simple(self) -> None:
+ expl = "assert foo"
+ assert util.format_explanation(expl) == "assert foo"
+
+ def test_fmt_where(self) -> None:
+ expl = "\n".join(["assert 1", "{1 = foo", "} == 2"])
+ res = "\n".join(["assert 1 == 2", " + where 1 = foo"])
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_and(self) -> None:
+ expl = "\n".join(["assert 1", "{1 = foo", "} == 2", "{2 = bar", "}"])
+ res = "\n".join(["assert 1 == 2", " + where 1 = foo", " + and 2 = bar"])
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_where_nested(self) -> None:
+ expl = "\n".join(["assert 1", "{1 = foo", "{foo = bar", "}", "} == 2"])
+ res = "\n".join(["assert 1 == 2", " + where 1 = foo", " + where foo = bar"])
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_newline(self) -> None:
+ expl = "\n".join(['assert "foo" == "bar"', "~- foo", "~+ bar"])
+ res = "\n".join(['assert "foo" == "bar"', " - foo", " + bar"])
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_newline_escaped(self) -> None:
+ expl = "\n".join(["assert foo == bar", "baz"])
+ res = "assert foo == bar\\nbaz"
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_newline_before_where(self) -> None:
+ expl = "\n".join(
+ [
+ "the assertion message here",
+ ">assert 1",
+ "{1 = foo",
+ "} == 2",
+ "{2 = bar",
+ "}",
+ ]
+ )
+ res = "\n".join(
+ [
+ "the assertion message here",
+ "assert 1 == 2",
+ " + where 1 = foo",
+ " + and 2 = bar",
+ ]
+ )
+ assert util.format_explanation(expl) == res
+
+ def test_fmt_multi_newline_before_where(self) -> None:
+ expl = "\n".join(
+ [
+ "the assertion",
+ "~message here",
+ ">assert 1",
+ "{1 = foo",
+ "} == 2",
+ "{2 = bar",
+ "}",
+ ]
+ )
+ res = "\n".join(
+ [
+ "the assertion",
+ " message here",
+ "assert 1 == 2",
+ " + where 1 = foo",
+ " + and 2 = bar",
+ ]
+ )
+ assert util.format_explanation(expl) == res
+
+
+class TestTruncateExplanation:
+ # The number of lines in the truncation explanation message. Used
+ # to calculate that results have the expected length.
+ LINES_IN_TRUNCATION_MSG = 2
+
+ def test_doesnt_truncate_when_input_is_empty_list(self) -> None:
+ expl: List[str] = []
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=100)
+ assert result == expl
+
+ def test_doesnt_truncate_at_when_input_is_5_lines_and_LT_max_chars(self) -> None:
+ expl = ["a" * 100 for x in range(5)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=8 * 80)
+ assert result == expl
+
+ def test_truncates_at_8_lines_when_given_list_of_empty_strings(self) -> None:
+ expl = ["" for x in range(50)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=100)
+ assert result != expl
+ assert len(result) == 8 + self.LINES_IN_TRUNCATION_MSG
+ assert "Full output truncated" in result[-1]
+ assert "43 lines hidden" in result[-1]
+ last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]
+ assert last_line_before_trunc_msg.endswith("...")
+
+ def test_truncates_at_8_lines_when_first_8_lines_are_LT_max_chars(self) -> None:
+ expl = ["a" for x in range(100)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=8 * 80)
+ assert result != expl
+ assert len(result) == 8 + self.LINES_IN_TRUNCATION_MSG
+ assert "Full output truncated" in result[-1]
+ assert "93 lines hidden" in result[-1]
+ last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]
+ assert last_line_before_trunc_msg.endswith("...")
+
+ def test_truncates_at_8_lines_when_first_8_lines_are_EQ_max_chars(self) -> None:
+ expl = ["a" * 80 for x in range(16)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=8 * 80)
+ assert result != expl
+ assert len(result) == 8 + self.LINES_IN_TRUNCATION_MSG
+ assert "Full output truncated" in result[-1]
+ assert "9 lines hidden" in result[-1]
+ last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]
+ assert last_line_before_trunc_msg.endswith("...")
+
+ def test_truncates_at_4_lines_when_first_4_lines_are_GT_max_chars(self) -> None:
+ expl = ["a" * 250 for x in range(10)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=999)
+ assert result != expl
+ assert len(result) == 4 + self.LINES_IN_TRUNCATION_MSG
+ assert "Full output truncated" in result[-1]
+ assert "7 lines hidden" in result[-1]
+ last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]
+ assert last_line_before_trunc_msg.endswith("...")
+
+ def test_truncates_at_1_line_when_first_line_is_GT_max_chars(self) -> None:
+ expl = ["a" * 250 for x in range(1000)]
+ result = truncate._truncate_explanation(expl, max_lines=8, max_chars=100)
+ assert result != expl
+ assert len(result) == 1 + self.LINES_IN_TRUNCATION_MSG
+ assert "Full output truncated" in result[-1]
+ assert "1000 lines hidden" in result[-1]
+ last_line_before_trunc_msg = result[-self.LINES_IN_TRUNCATION_MSG - 1]
+ assert last_line_before_trunc_msg.endswith("...")
+
+ def test_full_output_truncated(self, monkeypatch, pytester: Pytester) -> None:
+ """Test against full runpytest() output."""
+
+ line_count = 7
+ line_len = 100
+ expected_truncated_lines = 2
+ pytester.makepyfile(
+ r"""
+ def test_many_lines():
+ a = list([str(i)[0] * %d for i in range(%d)])
+ b = a[::2]
+ a = '\n'.join(map(str, a))
+ b = '\n'.join(map(str, b))
+ assert a == b
+ """
+ % (line_len, line_count)
+ )
+ monkeypatch.delenv("CI", raising=False)
+
+ result = pytester.runpytest()
+ # without -vv, truncate the message showing a few diff lines only
+ result.stdout.fnmatch_lines(
+ [
+ "*+ 1*",
+ "*+ 3*",
+ "*+ 5*",
+ "*truncated (%d lines hidden)*use*-vv*" % expected_truncated_lines,
+ ]
+ )
+
+ result = pytester.runpytest("-vv")
+ result.stdout.fnmatch_lines(["* 6*"])
+
+ monkeypatch.setenv("CI", "1")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 6*"])
+
+
+def test_python25_compile_issue257(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_rewritten():
+ assert 1 == 2
+ # some comment
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ """
+ *E*assert 1 == 2*
+ *1 failed*
+ """
+ )
+
+
+def test_rewritten(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_rewritten():
+ assert "@py_builtins" in globals()
+ """
+ )
+ assert pytester.runpytest().ret == 0
+
+
+def test_reprcompare_notin() -> None:
+ assert callop("not in", "foo", "aaafoobbb") == [
+ "'foo' not in 'aaafoobbb'",
+ "'foo' is contained here:",
+ " aaafoobbb",
+ "? +++",
+ ]
+
+
+def test_reprcompare_whitespaces() -> None:
+ assert callequal("\r\n", "\n") == [
+ r"'\r\n' == '\n'",
+ r"Strings contain only whitespace, escaping them using repr()",
+ r"- '\n'",
+ r"+ '\r\n'",
+ r"? ++",
+ ]
+
+
+def test_pytest_assertrepr_compare_integration(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_hello():
+ x = set(range(100))
+ y = x.copy()
+ y.remove(50)
+ assert x == y
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*def test_hello():*",
+ "*assert x == y*",
+ "*E*Extra items*left*",
+ "*E*50*",
+ "*= 1 failed in*",
+ ]
+ )
+
+
+def test_sequence_comparison_uses_repr(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_hello():
+ x = set("hello x")
+ y = set("hello y")
+ assert x == y
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*def test_hello():*",
+ "*assert x == y*",
+ "*E*Extra items*left*",
+ "*E*'x'*",
+ "*E*Extra items*right*",
+ "*E*'y'*",
+ ]
+ )
+
+
+def test_assertrepr_loaded_per_dir(pytester: Pytester) -> None:
+ pytester.makepyfile(test_base=["def test_base(): assert 1 == 2"])
+ a = pytester.mkdir("a")
+ a.joinpath("test_a.py").write_text("def test_a(): assert 1 == 2")
+ a.joinpath("conftest.py").write_text(
+ 'def pytest_assertrepr_compare(): return ["summary a"]'
+ )
+ b = pytester.mkdir("b")
+ b.joinpath("test_b.py").write_text("def test_b(): assert 1 == 2")
+ b.joinpath("conftest.py").write_text(
+ 'def pytest_assertrepr_compare(): return ["summary b"]'
+ )
+
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*def test_base():*",
+ "*E*assert 1 == 2*",
+ "*def test_a():*",
+ "*E*assert summary a*",
+ "*def test_b():*",
+ "*E*assert summary b*",
+ ]
+ )
+
+
+def test_assertion_options(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_hello():
+ x = 3
+ assert x == 4
+ """
+ )
+ result = pytester.runpytest()
+ assert "3 == 4" in result.stdout.str()
+ result = pytester.runpytest_subprocess("--assert=plain")
+ result.stdout.no_fnmatch_line("*3 == 4*")
+
+
+def test_triple_quoted_string_issue113(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_hello():
+ assert "" == '''
+ '''"""
+ )
+ result = pytester.runpytest("--fulltrace")
+ result.stdout.fnmatch_lines(["*1 failed*"])
+ result.stdout.no_fnmatch_line("*SyntaxError*")
+
+
+def test_traceback_failure(pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def g():
+ return 2
+ def f(x):
+ assert x == g()
+ def test_onefails():
+ f(3)
+ """
+ )
+ result = pytester.runpytest(p1, "--tb=long")
+ result.stdout.fnmatch_lines(
+ [
+ "*test_traceback_failure.py F*",
+ "====* FAILURES *====",
+ "____*____",
+ "",
+ " def test_onefails():",
+ "> f(3)",
+ "",
+ "*test_*.py:6: ",
+ "_ _ _ *",
+ # "",
+ " def f(x):",
+ "> assert x == g()",
+ "E assert 3 == 2",
+ "E + where 2 = g()",
+ "",
+ "*test_traceback_failure.py:4: AssertionError",
+ ]
+ )
+
+ result = pytester.runpytest(p1) # "auto"
+ result.stdout.fnmatch_lines(
+ [
+ "*test_traceback_failure.py F*",
+ "====* FAILURES *====",
+ "____*____",
+ "",
+ " def test_onefails():",
+ "> f(3)",
+ "",
+ "*test_*.py:6: ",
+ "",
+ " def f(x):",
+ "> assert x == g()",
+ "E assert 3 == 2",
+ "E + where 2 = g()",
+ "",
+ "*test_traceback_failure.py:4: AssertionError",
+ ]
+ )
+
+
+def test_exception_handling_no_traceback(pytester: Pytester) -> None:
+ """Handle chain exceptions in tasks submitted by the multiprocess module (#1984)."""
+ p1 = pytester.makepyfile(
+ """
+ from multiprocessing import Pool
+
+ def process_task(n):
+ assert n == 10
+
+ def multitask_job():
+ tasks = [1]
+ with Pool(processes=1) as pool:
+ pool.map(process_task, tasks)
+
+ def test_multitask_job():
+ multitask_job()
+ """
+ )
+ pytester.syspathinsert()
+ result = pytester.runpytest(p1, "--tb=long")
+ result.stdout.fnmatch_lines(
+ [
+ "====* FAILURES *====",
+ "*multiprocessing.pool.RemoteTraceback:*",
+ "Traceback (most recent call last):",
+ "*assert n == 10",
+ "The above exception was the direct cause of the following exception:",
+ "> * multitask_job()",
+ ]
+ )
+
+
+@pytest.mark.skipif("'__pypy__' in sys.builtin_module_names")
+@pytest.mark.parametrize(
+ "cmdline_args, warning_output",
+ [
+ (
+ ["-OO", "-m", "pytest", "-h"],
+ ["warning :*PytestConfigWarning:*assert statements are not executed*"],
+ ),
+ (
+ ["-OO", "-m", "pytest"],
+ [
+ "=*= warnings summary =*=",
+ "*PytestConfigWarning:*assert statements are not executed*",
+ ],
+ ),
+ (
+ ["-OO", "-m", "pytest", "--assert=plain"],
+ [
+ "=*= warnings summary =*=",
+ "*PytestConfigWarning: ASSERTIONS ARE NOT EXECUTED and FAILING TESTS WILL PASS. "
+ "Are you using python -O?",
+ ],
+ ),
+ ],
+)
+def test_warn_missing(pytester: Pytester, cmdline_args, warning_output) -> None:
+ pytester.makepyfile("")
+
+ result = pytester.run(sys.executable, *cmdline_args)
+ result.stdout.fnmatch_lines(warning_output)
+
+
+def test_recursion_source_decode(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_something():
+ pass
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ python_files = *.py
+ """
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(
+ """
+ <Module*>
+ """
+ )
+
+
+def test_AssertionError_message(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_hello():
+ x,y = 1,2
+ assert 0, (x,y)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *def test_hello*
+ *assert 0, (x,y)*
+ *AssertionError: (1, 2)*
+ """
+ )
+
+
+def test_diff_newline_at_end(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ r"""
+ def test_diff():
+ assert 'asdf' == 'asdf\n'
+ """
+ )
+
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ r"""
+ *assert 'asdf' == 'asdf\n'
+ * - asdf
+ * ? -
+ * + asdf
+ """
+ )
+
+
+@pytest.mark.filterwarnings("default")
+def test_assert_tuple_warning(pytester: Pytester) -> None:
+ msg = "assertion is always true"
+ pytester.makepyfile(
+ """
+ def test_tuple():
+ assert(False, 'you shall not pass')
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines([f"*test_assert_tuple_warning.py:2:*{msg}*"])
+
+ # tuples with size != 2 should not trigger the warning
+ pytester.makepyfile(
+ """
+ def test_tuple():
+ assert ()
+ """
+ )
+ result = pytester.runpytest()
+ assert msg not in result.stdout.str()
+
+
+def test_assert_indirect_tuple_no_warning(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_tuple():
+ tpl = ('foo', 'bar')
+ assert tpl
+ """
+ )
+ result = pytester.runpytest()
+ output = "\n".join(result.stdout.lines)
+ assert "WR1" not in output
+
+
+def test_assert_with_unicode(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """\
+ def test_unicode():
+ assert '유니코드' == 'Unicode'
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*AssertionError*"])
+
+
+def test_raise_unprintable_assertion_error(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ r"""
+ def test_raise_assertion_error():
+ raise AssertionError('\xff')
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [r"> raise AssertionError('\xff')", "E AssertionError: *"]
+ )
+
+
+def test_raise_assertion_error_raising_repr(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ class RaisingRepr(object):
+ def __repr__(self):
+ raise Exception()
+ def test_raising_repr():
+ raise AssertionError(RaisingRepr())
+ """
+ )
+ result = pytester.runpytest()
+ if sys.version_info >= (3, 11):
+ # python 3.11 has native support for un-str-able exceptions
+ result.stdout.fnmatch_lines(
+ ["E AssertionError: <exception str() failed>"]
+ )
+ else:
+ result.stdout.fnmatch_lines(
+ ["E AssertionError: <unprintable AssertionError object>"]
+ )
+
+
+def test_issue_1944(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def f():
+ return
+
+ assert f() == 10
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 error*"])
+ assert (
+ "AttributeError: 'Module' object has no attribute '_obj'"
+ not in result.stdout.str()
+ )
+
+
+def test_exit_from_assertrepr_compare(monkeypatch) -> None:
+ def raise_exit(obj):
+ outcomes.exit("Quitting debugger")
+
+ monkeypatch.setattr(util, "istext", raise_exit)
+
+ with pytest.raises(outcomes.Exit, match="Quitting debugger"):
+ callequal(1, 1)
+
+
+def test_assertion_location_with_coverage(pytester: Pytester) -> None:
+ """This used to report the wrong location when run with coverage (#5754)."""
+ p = pytester.makepyfile(
+ """
+ def test():
+ assert False, 1
+ assert False, 2
+ """
+ )
+ result = pytester.runpytest(str(p))
+ result.stdout.fnmatch_lines(
+ [
+ "> assert False, 1",
+ "E AssertionError: 1",
+ "E assert False",
+ "*= 1 failed in*",
+ ]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_assertrewrite.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_assertrewrite.py
new file mode 100644
index 0000000000..4417eb4350
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_assertrewrite.py
@@ -0,0 +1,1841 @@
+import ast
+import errno
+import glob
+import importlib
+import marshal
+import os
+import py_compile
+import stat
+import sys
+import textwrap
+import zipfile
+from functools import partial
+from pathlib import Path
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Set
+
+import _pytest._code
+import pytest
+from _pytest._io.saferepr import DEFAULT_REPR_MAX_SIZE
+from _pytest.assertion import util
+from _pytest.assertion.rewrite import _get_assertion_exprs
+from _pytest.assertion.rewrite import _get_maxsize_for_saferepr
+from _pytest.assertion.rewrite import AssertionRewritingHook
+from _pytest.assertion.rewrite import get_cache_dir
+from _pytest.assertion.rewrite import PYC_TAIL
+from _pytest.assertion.rewrite import PYTEST_TAG
+from _pytest.assertion.rewrite import rewrite_asserts
+from _pytest.config import Config
+from _pytest.config import ExitCode
+from _pytest.pathlib import make_numbered_dir
+from _pytest.pytester import Pytester
+
+
+def rewrite(src: str) -> ast.Module:
+ tree = ast.parse(src)
+ rewrite_asserts(tree, src.encode())
+ return tree
+
+
+def getmsg(
+ f, extra_ns: Optional[Mapping[str, object]] = None, *, must_pass: bool = False
+) -> Optional[str]:
+ """Rewrite the assertions in f, run it, and get the failure message."""
+ src = "\n".join(_pytest._code.Code.from_function(f).source().lines)
+ mod = rewrite(src)
+ code = compile(mod, "<test>", "exec")
+ ns: Dict[str, object] = {}
+ if extra_ns is not None:
+ ns.update(extra_ns)
+ exec(code, ns)
+ func = ns[f.__name__]
+ try:
+ func() # type: ignore[operator]
+ except AssertionError:
+ if must_pass:
+ pytest.fail("shouldn't have raised")
+ s = str(sys.exc_info()[1])
+ if not s.startswith("assert"):
+ return "AssertionError: " + s
+ return s
+ else:
+ if not must_pass:
+ pytest.fail("function didn't raise at all")
+ return None
+
+
+class TestAssertionRewrite:
+ def test_place_initial_imports(self) -> None:
+ s = """'Doc string'\nother = stuff"""
+ m = rewrite(s)
+ assert isinstance(m.body[0], ast.Expr)
+ for imp in m.body[1:3]:
+ assert isinstance(imp, ast.Import)
+ assert imp.lineno == 2
+ assert imp.col_offset == 0
+ assert isinstance(m.body[3], ast.Assign)
+ s = """from __future__ import division\nother_stuff"""
+ m = rewrite(s)
+ assert isinstance(m.body[0], ast.ImportFrom)
+ for imp in m.body[1:3]:
+ assert isinstance(imp, ast.Import)
+ assert imp.lineno == 2
+ assert imp.col_offset == 0
+ assert isinstance(m.body[3], ast.Expr)
+ s = """'doc string'\nfrom __future__ import division"""
+ m = rewrite(s)
+ assert isinstance(m.body[0], ast.Expr)
+ assert isinstance(m.body[1], ast.ImportFrom)
+ for imp in m.body[2:4]:
+ assert isinstance(imp, ast.Import)
+ assert imp.lineno == 2
+ assert imp.col_offset == 0
+ s = """'doc string'\nfrom __future__ import division\nother"""
+ m = rewrite(s)
+ assert isinstance(m.body[0], ast.Expr)
+ assert isinstance(m.body[1], ast.ImportFrom)
+ for imp in m.body[2:4]:
+ assert isinstance(imp, ast.Import)
+ assert imp.lineno == 3
+ assert imp.col_offset == 0
+ assert isinstance(m.body[4], ast.Expr)
+ s = """from . import relative\nother_stuff"""
+ m = rewrite(s)
+ for imp in m.body[:2]:
+ assert isinstance(imp, ast.Import)
+ assert imp.lineno == 1
+ assert imp.col_offset == 0
+ assert isinstance(m.body[3], ast.Expr)
+
+ def test_location_is_set(self) -> None:
+ s = textwrap.dedent(
+ """
+
+ assert False, (
+
+ "Ouch"
+ )
+
+ """
+ )
+ m = rewrite(s)
+ for node in m.body:
+ if isinstance(node, ast.Import):
+ continue
+ for n in [node, *ast.iter_child_nodes(node)]:
+ assert n.lineno == 3
+ assert n.col_offset == 0
+ if sys.version_info >= (3, 8):
+ assert n.end_lineno == 6
+ assert n.end_col_offset == 3
+
+ def test_dont_rewrite(self) -> None:
+ s = """'PYTEST_DONT_REWRITE'\nassert 14"""
+ m = rewrite(s)
+ assert len(m.body) == 2
+ assert isinstance(m.body[1], ast.Assert)
+ assert m.body[1].msg is None
+
+ def test_dont_rewrite_plugin(self, pytester: Pytester) -> None:
+ contents = {
+ "conftest.py": "pytest_plugins = 'plugin'; import plugin",
+ "plugin.py": "'PYTEST_DONT_REWRITE'",
+ "test_foo.py": "def test_foo(): pass",
+ }
+ pytester.makepyfile(**contents)
+ result = pytester.runpytest_subprocess()
+ assert "warning" not in "".join(result.outlines)
+
+ def test_rewrites_plugin_as_a_package(self, pytester: Pytester) -> None:
+ pkgdir = pytester.mkpydir("plugin")
+ pkgdir.joinpath("__init__.py").write_text(
+ "import pytest\n"
+ "@pytest.fixture\n"
+ "def special_asserter():\n"
+ " def special_assert(x, y):\n"
+ " assert x == y\n"
+ " return special_assert\n"
+ )
+ pytester.makeconftest('pytest_plugins = ["plugin"]')
+ pytester.makepyfile("def test(special_asserter): special_asserter(1, 2)\n")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*assert 1 == 2*"])
+
+ def test_honors_pep_235(self, pytester: Pytester, monkeypatch) -> None:
+ # note: couldn't make it fail on macos with a single `sys.path` entry
+ # note: these modules are named `test_*` to trigger rewriting
+ pytester.makepyfile(test_y="x = 1")
+ xdir = pytester.mkdir("x")
+ pytester.mkpydir(str(xdir.joinpath("test_Y")))
+ xdir.joinpath("test_Y").joinpath("__init__.py").write_text("x = 2")
+ pytester.makepyfile(
+ "import test_y\n"
+ "import test_Y\n"
+ "def test():\n"
+ " assert test_y.x == 1\n"
+ " assert test_Y.x == 2\n"
+ )
+ monkeypatch.syspath_prepend(str(xdir))
+ pytester.runpytest().assert_outcomes(passed=1)
+
+ def test_name(self, request) -> None:
+ def f1() -> None:
+ assert False
+
+ assert getmsg(f1) == "assert False"
+
+ def f2() -> None:
+ f = False
+ assert f
+
+ assert getmsg(f2) == "assert False"
+
+ def f3() -> None:
+ assert a_global # type: ignore[name-defined] # noqa
+
+ assert getmsg(f3, {"a_global": False}) == "assert False"
+
+ def f4() -> None:
+ assert sys == 42 # type: ignore[comparison-overlap]
+
+ verbose = request.config.getoption("verbose")
+ msg = getmsg(f4, {"sys": sys})
+ if verbose > 0:
+ assert msg == (
+ "assert <module 'sys' (built-in)> == 42\n"
+ " +<module 'sys' (built-in)>\n"
+ " -42"
+ )
+ else:
+ assert msg == "assert sys == 42"
+
+ def f5() -> None:
+ assert cls == 42 # type: ignore[name-defined] # noqa: F821
+
+ class X:
+ pass
+
+ msg = getmsg(f5, {"cls": X})
+ assert msg is not None
+ lines = msg.splitlines()
+ if verbose > 1:
+ assert lines == [
+ f"assert {X!r} == 42",
+ f" +{X!r}",
+ " -42",
+ ]
+ elif verbose > 0:
+ assert lines == [
+ "assert <class 'test_...e.<locals>.X'> == 42",
+ f" +{X!r}",
+ " -42",
+ ]
+ else:
+ assert lines == ["assert cls == 42"]
+
+ def test_assertrepr_compare_same_width(self, request) -> None:
+ """Should use same width/truncation with same initial width."""
+
+ def f() -> None:
+ assert "1234567890" * 5 + "A" == "1234567890" * 5 + "B"
+
+ msg = getmsg(f)
+ assert msg is not None
+ line = msg.splitlines()[0]
+ if request.config.getoption("verbose") > 1:
+ assert line == (
+ "assert '12345678901234567890123456789012345678901234567890A' "
+ "== '12345678901234567890123456789012345678901234567890B'"
+ )
+ else:
+ assert line == (
+ "assert '123456789012...901234567890A' "
+ "== '123456789012...901234567890B'"
+ )
+
+ def test_dont_rewrite_if_hasattr_fails(self, request) -> None:
+ class Y:
+ """A class whose getattr fails, but not with `AttributeError`."""
+
+ def __getattr__(self, attribute_name):
+ raise KeyError()
+
+ def __repr__(self) -> str:
+ return "Y"
+
+ def __init__(self) -> None:
+ self.foo = 3
+
+ def f() -> None:
+ assert cls().foo == 2 # type: ignore[name-defined] # noqa: F821
+
+ # XXX: looks like the "where" should also be there in verbose mode?!
+ msg = getmsg(f, {"cls": Y})
+ assert msg is not None
+ lines = msg.splitlines()
+ if request.config.getoption("verbose") > 0:
+ assert lines == ["assert 3 == 2", " +3", " -2"]
+ else:
+ assert lines == [
+ "assert 3 == 2",
+ " + where 3 = Y.foo",
+ " + where Y = cls()",
+ ]
+
+ def test_assert_already_has_message(self) -> None:
+ def f():
+ assert False, "something bad!"
+
+ assert getmsg(f) == "AssertionError: something bad!\nassert False"
+
+ def test_assertion_message(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 2, "The failure message"
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ ["*AssertionError*The failure message*", "*assert 1 == 2*"]
+ )
+
+ def test_assertion_message_multiline(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 2, "A multiline\\nfailure message"
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ ["*AssertionError*A multiline*", "*failure message*", "*assert 1 == 2*"]
+ )
+
+ def test_assertion_message_tuple(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 2, (1, 2)
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ ["*AssertionError*%s*" % repr((1, 2)), "*assert 1 == 2*"]
+ )
+
+ def test_assertion_message_expr(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 2, 1 + 2
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*AssertionError*3*", "*assert 1 == 2*"])
+
+ def test_assertion_message_escape(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 2, 'To be escaped: %'
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ ["*AssertionError: To be escaped: %", "*assert 1 == 2"]
+ )
+
+ def test_assertion_messages_bytes(self, pytester: Pytester) -> None:
+ pytester.makepyfile("def test_bytes_assertion():\n assert False, b'ohai!'\n")
+ result = pytester.runpytest()
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*AssertionError: b'ohai!'", "*assert False"])
+
+ def test_boolop(self) -> None:
+ def f1() -> None:
+ f = g = False
+ assert f and g
+
+ assert getmsg(f1) == "assert (False)"
+
+ def f2() -> None:
+ f = True
+ g = False
+ assert f and g
+
+ assert getmsg(f2) == "assert (True and False)"
+
+ def f3() -> None:
+ f = False
+ g = True
+ assert f and g
+
+ assert getmsg(f3) == "assert (False)"
+
+ def f4() -> None:
+ f = g = False
+ assert f or g
+
+ assert getmsg(f4) == "assert (False or False)"
+
+ def f5() -> None:
+ f = g = False
+ assert not f and not g
+
+ getmsg(f5, must_pass=True)
+
+ def x() -> bool:
+ return False
+
+ def f6() -> None:
+ assert x() and x()
+
+ assert (
+ getmsg(f6, {"x": x})
+ == """assert (False)
+ + where False = x()"""
+ )
+
+ def f7() -> None:
+ assert False or x()
+
+ assert (
+ getmsg(f7, {"x": x})
+ == """assert (False or False)
+ + where False = x()"""
+ )
+
+ def f8() -> None:
+ assert 1 in {} and 2 in {}
+
+ assert getmsg(f8) == "assert (1 in {})"
+
+ def f9() -> None:
+ x = 1
+ y = 2
+ assert x in {1: None} and y in {}
+
+ assert getmsg(f9) == "assert (1 in {1: None} and 2 in {})"
+
+ def f10() -> None:
+ f = True
+ g = False
+ assert f or g
+
+ getmsg(f10, must_pass=True)
+
+ def f11() -> None:
+ f = g = h = lambda: True
+ assert f() and g() and h()
+
+ getmsg(f11, must_pass=True)
+
+ def test_short_circuit_evaluation(self) -> None:
+ def f1() -> None:
+ assert True or explode # type: ignore[name-defined,unreachable] # noqa: F821
+
+ getmsg(f1, must_pass=True)
+
+ def f2() -> None:
+ x = 1
+ assert x == 1 or x == 2
+
+ getmsg(f2, must_pass=True)
+
+ def test_unary_op(self) -> None:
+ def f1() -> None:
+ x = True
+ assert not x
+
+ assert getmsg(f1) == "assert not True"
+
+ def f2() -> None:
+ x = 0
+ assert ~x + 1
+
+ assert getmsg(f2) == "assert (~0 + 1)"
+
+ def f3() -> None:
+ x = 3
+ assert -x + x
+
+ assert getmsg(f3) == "assert (-3 + 3)"
+
+ def f4() -> None:
+ x = 0
+ assert +x + x
+
+ assert getmsg(f4) == "assert (+0 + 0)"
+
+ def test_binary_op(self) -> None:
+ def f1() -> None:
+ x = 1
+ y = -1
+ assert x + y
+
+ assert getmsg(f1) == "assert (1 + -1)"
+
+ def f2() -> None:
+ assert not 5 % 4
+
+ assert getmsg(f2) == "assert not (5 % 4)"
+
+ def test_boolop_percent(self) -> None:
+ def f1() -> None:
+ assert 3 % 2 and False
+
+ assert getmsg(f1) == "assert ((3 % 2) and False)"
+
+ def f2() -> None:
+ assert False or 4 % 2
+
+ assert getmsg(f2) == "assert (False or (4 % 2))"
+
+ def test_at_operator_issue1290(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ class Matrix(object):
+ def __init__(self, num):
+ self.num = num
+ def __matmul__(self, other):
+ return self.num * other.num
+
+ def test_multmat_operator():
+ assert Matrix(2) @ Matrix(3) == 6"""
+ )
+ pytester.runpytest().assert_outcomes(passed=1)
+
+ def test_starred_with_side_effect(self, pytester: Pytester) -> None:
+ """See #4412"""
+ pytester.makepyfile(
+ """\
+ def test():
+ f = lambda x: x
+ x = iter([1, 2, 3])
+ assert 2 * next(x) == f(*[next(x)])
+ """
+ )
+ pytester.runpytest().assert_outcomes(passed=1)
+
+ def test_call(self) -> None:
+ def g(a=42, *args, **kwargs) -> bool:
+ return False
+
+ ns = {"g": g}
+
+ def f1() -> None:
+ assert g()
+
+ assert (
+ getmsg(f1, ns)
+ == """assert False
+ + where False = g()"""
+ )
+
+ def f2() -> None:
+ assert g(1)
+
+ assert (
+ getmsg(f2, ns)
+ == """assert False
+ + where False = g(1)"""
+ )
+
+ def f3() -> None:
+ assert g(1, 2)
+
+ assert (
+ getmsg(f3, ns)
+ == """assert False
+ + where False = g(1, 2)"""
+ )
+
+ def f4() -> None:
+ assert g(1, g=42)
+
+ assert (
+ getmsg(f4, ns)
+ == """assert False
+ + where False = g(1, g=42)"""
+ )
+
+ def f5() -> None:
+ assert g(1, 3, g=23)
+
+ assert (
+ getmsg(f5, ns)
+ == """assert False
+ + where False = g(1, 3, g=23)"""
+ )
+
+ def f6() -> None:
+ seq = [1, 2, 3]
+ assert g(*seq)
+
+ assert (
+ getmsg(f6, ns)
+ == """assert False
+ + where False = g(*[1, 2, 3])"""
+ )
+
+ def f7() -> None:
+ x = "a"
+ assert g(**{x: 2})
+
+ assert (
+ getmsg(f7, ns)
+ == """assert False
+ + where False = g(**{'a': 2})"""
+ )
+
+ def test_attribute(self) -> None:
+ class X:
+ g = 3
+
+ ns = {"x": X}
+
+ def f1() -> None:
+ assert not x.g # type: ignore[name-defined] # noqa: F821
+
+ assert (
+ getmsg(f1, ns)
+ == """assert not 3
+ + where 3 = x.g"""
+ )
+
+ def f2() -> None:
+ x.a = False # type: ignore[name-defined] # noqa: F821
+ assert x.a # type: ignore[name-defined] # noqa: F821
+
+ assert (
+ getmsg(f2, ns)
+ == """assert False
+ + where False = x.a"""
+ )
+
+ def test_comparisons(self) -> None:
+ def f1() -> None:
+ a, b = range(2)
+ assert b < a
+
+ assert getmsg(f1) == """assert 1 < 0"""
+
+ def f2() -> None:
+ a, b, c = range(3)
+ assert a > b > c
+
+ assert getmsg(f2) == """assert 0 > 1"""
+
+ def f3() -> None:
+ a, b, c = range(3)
+ assert a < b > c
+
+ assert getmsg(f3) == """assert 1 > 2"""
+
+ def f4() -> None:
+ a, b, c = range(3)
+ assert a < b <= c
+
+ getmsg(f4, must_pass=True)
+
+ def f5() -> None:
+ a, b, c = range(3)
+ assert a < b
+ assert b < c
+
+ getmsg(f5, must_pass=True)
+
+ def test_len(self, request) -> None:
+ def f():
+ values = list(range(10))
+ assert len(values) == 11
+
+ msg = getmsg(f)
+ if request.config.getoption("verbose") > 0:
+ assert msg == "assert 10 == 11\n +10\n -11"
+ else:
+ assert msg == "assert 10 == 11\n + where 10 = len([0, 1, 2, 3, 4, 5, ...])"
+
+ def test_custom_reprcompare(self, monkeypatch) -> None:
+ def my_reprcompare1(op, left, right) -> str:
+ return "42"
+
+ monkeypatch.setattr(util, "_reprcompare", my_reprcompare1)
+
+ def f1() -> None:
+ assert 42 < 3
+
+ assert getmsg(f1) == "assert 42"
+
+ def my_reprcompare2(op, left, right) -> str:
+ return f"{left} {op} {right}"
+
+ monkeypatch.setattr(util, "_reprcompare", my_reprcompare2)
+
+ def f2() -> None:
+ assert 1 < 3 < 5 <= 4 < 7
+
+ assert getmsg(f2) == "assert 5 <= 4"
+
+ def test_assert_raising__bool__in_comparison(self) -> None:
+ def f() -> None:
+ class A:
+ def __bool__(self):
+ raise ValueError(42)
+
+ def __lt__(self, other):
+ return A()
+
+ def __repr__(self):
+ return "<MY42 object>"
+
+ def myany(x) -> bool:
+ return False
+
+ assert myany(A() < 0)
+
+ msg = getmsg(f)
+ assert msg is not None
+ assert "<MY42 object> < 0" in msg
+
+ def test_formatchar(self) -> None:
+ def f() -> None:
+ assert "%test" == "test" # type: ignore[comparison-overlap]
+
+ msg = getmsg(f)
+ assert msg is not None
+ assert msg.startswith("assert '%test' == 'test'")
+
+ def test_custom_repr(self, request) -> None:
+ def f() -> None:
+ class Foo:
+ a = 1
+
+ def __repr__(self):
+ return "\n{ \n~ \n}"
+
+ f = Foo()
+ assert 0 == f.a
+
+ msg = getmsg(f)
+ assert msg is not None
+ lines = util._format_lines([msg])
+ if request.config.getoption("verbose") > 0:
+ assert lines == ["assert 0 == 1\n +0\n -1"]
+ else:
+ assert lines == ["assert 0 == 1\n + where 1 = \\n{ \\n~ \\n}.a"]
+
+ def test_custom_repr_non_ascii(self) -> None:
+ def f() -> None:
+ class A:
+ name = "ä"
+
+ def __repr__(self):
+ return self.name.encode("UTF-8") # only legal in python2
+
+ a = A()
+ assert not a.name
+
+ msg = getmsg(f)
+ assert msg is not None
+ assert "UnicodeDecodeError" not in msg
+ assert "UnicodeEncodeError" not in msg
+
+
+class TestRewriteOnImport:
+ def test_pycache_is_a_file(self, pytester: Pytester) -> None:
+ pytester.path.joinpath("__pycache__").write_text("Hello")
+ pytester.makepyfile(
+ """
+ def test_rewritten():
+ assert "@py_builtins" in globals()"""
+ )
+ assert pytester.runpytest().ret == 0
+
+ def test_pycache_is_readonly(self, pytester: Pytester) -> None:
+ cache = pytester.mkdir("__pycache__")
+ old_mode = cache.stat().st_mode
+ cache.chmod(old_mode ^ stat.S_IWRITE)
+ pytester.makepyfile(
+ """
+ def test_rewritten():
+ assert "@py_builtins" in globals()"""
+ )
+ try:
+ assert pytester.runpytest().ret == 0
+ finally:
+ cache.chmod(old_mode)
+
+ def test_zipfile(self, pytester: Pytester) -> None:
+ z = pytester.path.joinpath("myzip.zip")
+ z_fn = str(z)
+ f = zipfile.ZipFile(z_fn, "w")
+ try:
+ f.writestr("test_gum/__init__.py", "")
+ f.writestr("test_gum/test_lizard.py", "")
+ finally:
+ f.close()
+ z.chmod(256)
+ pytester.makepyfile(
+ """
+ import sys
+ sys.path.append(%r)
+ import test_gum.test_lizard"""
+ % (z_fn,)
+ )
+ assert pytester.runpytest().ret == ExitCode.NO_TESTS_COLLECTED
+
+ @pytest.mark.skipif(
+ sys.version_info < (3, 9),
+ reason="importlib.resources.files was introduced in 3.9",
+ )
+ def test_load_resource_via_files_with_rewrite(self, pytester: Pytester) -> None:
+ example = pytester.path.joinpath("demo") / "example"
+ init = pytester.path.joinpath("demo") / "__init__.py"
+ pytester.makepyfile(
+ **{
+ "demo/__init__.py": """
+ from importlib.resources import files
+
+ def load():
+ return files(__name__)
+ """,
+ "test_load": f"""
+ pytest_plugins = ["demo"]
+
+ def test_load():
+ from demo import load
+ found = {{str(i) for i in load().iterdir() if i.name != "__pycache__"}}
+ assert found == {{{str(example)!r}, {str(init)!r}}}
+ """,
+ }
+ )
+ example.mkdir()
+
+ assert pytester.runpytest("-vv").ret == ExitCode.OK
+
+ def test_readonly(self, pytester: Pytester) -> None:
+ sub = pytester.mkdir("testing")
+ sub.joinpath("test_readonly.py").write_bytes(
+ b"""
+def test_rewritten():
+ assert "@py_builtins" in globals()
+ """,
+ )
+ old_mode = sub.stat().st_mode
+ sub.chmod(320)
+ try:
+ assert pytester.runpytest().ret == 0
+ finally:
+ sub.chmod(old_mode)
+
+ def test_dont_write_bytecode(self, pytester: Pytester, monkeypatch) -> None:
+ monkeypatch.delenv("PYTHONPYCACHEPREFIX", raising=False)
+
+ pytester.makepyfile(
+ """
+ import os
+ def test_no_bytecode():
+ assert "__pycache__" in __cached__
+ assert not os.path.exists(__cached__)
+ assert not os.path.exists(os.path.dirname(__cached__))"""
+ )
+ monkeypatch.setenv("PYTHONDONTWRITEBYTECODE", "1")
+ assert pytester.runpytest_subprocess().ret == 0
+
+ def test_orphaned_pyc_file(self, pytester: Pytester, monkeypatch) -> None:
+ monkeypatch.delenv("PYTHONPYCACHEPREFIX", raising=False)
+ monkeypatch.setattr(sys, "pycache_prefix", None, raising=False)
+
+ pytester.makepyfile(
+ """
+ import orphan
+ def test_it():
+ assert orphan.value == 17
+ """
+ )
+ pytester.makepyfile(
+ orphan="""
+ value = 17
+ """
+ )
+ py_compile.compile("orphan.py")
+ os.remove("orphan.py")
+
+ # Python 3 puts the .pyc files in a __pycache__ directory, and will
+ # not import from there without source. It will import a .pyc from
+ # the source location though.
+ if not os.path.exists("orphan.pyc"):
+ pycs = glob.glob("__pycache__/orphan.*.pyc")
+ assert len(pycs) == 1
+ os.rename(pycs[0], "orphan.pyc")
+
+ assert pytester.runpytest().ret == 0
+
+ def test_cached_pyc_includes_pytest_version(
+ self, pytester: Pytester, monkeypatch
+ ) -> None:
+ """Avoid stale caches (#1671)"""
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", raising=False)
+ monkeypatch.delenv("PYTHONPYCACHEPREFIX", raising=False)
+ pytester.makepyfile(
+ test_foo="""
+ def test_foo():
+ assert True
+ """
+ )
+ result = pytester.runpytest_subprocess()
+ assert result.ret == 0
+ found_names = glob.glob(f"__pycache__/*-pytest-{pytest.__version__}.pyc")
+ assert found_names, "pyc with expected tag not found in names: {}".format(
+ glob.glob("__pycache__/*.pyc")
+ )
+
+ @pytest.mark.skipif('"__pypy__" in sys.modules')
+ def test_pyc_vs_pyo(self, pytester: Pytester, monkeypatch) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_optimized():
+ "hello"
+ assert test_optimized.__doc__ is None"""
+ )
+ p = make_numbered_dir(root=Path(pytester.path), prefix="runpytest-")
+ tmp = "--basetemp=%s" % p
+ monkeypatch.setenv("PYTHONOPTIMIZE", "2")
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", raising=False)
+ monkeypatch.delenv("PYTHONPYCACHEPREFIX", raising=False)
+ assert pytester.runpytest_subprocess(tmp).ret == 0
+ tagged = "test_pyc_vs_pyo." + PYTEST_TAG
+ assert tagged + ".pyo" in os.listdir("__pycache__")
+ monkeypatch.undo()
+ monkeypatch.delenv("PYTHONDONTWRITEBYTECODE", raising=False)
+ monkeypatch.delenv("PYTHONPYCACHEPREFIX", raising=False)
+ assert pytester.runpytest_subprocess(tmp).ret == 1
+ assert tagged + ".pyc" in os.listdir("__pycache__")
+
+ def test_package(self, pytester: Pytester) -> None:
+ pkg = pytester.path.joinpath("pkg")
+ pkg.mkdir()
+ pkg.joinpath("__init__.py")
+ pkg.joinpath("test_blah.py").write_text(
+ """
+def test_rewritten():
+ assert "@py_builtins" in globals()"""
+ )
+ assert pytester.runpytest().ret == 0
+
+ def test_translate_newlines(self, pytester: Pytester) -> None:
+ content = "def test_rewritten():\r\n assert '@py_builtins' in globals()"
+ b = content.encode("utf-8")
+ pytester.path.joinpath("test_newlines.py").write_bytes(b)
+ assert pytester.runpytest().ret == 0
+
+ def test_package_without__init__py(self, pytester: Pytester) -> None:
+ pkg = pytester.mkdir("a_package_without_init_py")
+ pkg.joinpath("module.py").touch()
+ pytester.makepyfile("import a_package_without_init_py.module")
+ assert pytester.runpytest().ret == ExitCode.NO_TESTS_COLLECTED
+
+ def test_rewrite_warning(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ pytest.register_assert_rewrite("_pytest")
+ """
+ )
+ # needs to be a subprocess because pytester explicitly disables this warning
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*Module already imported*: _pytest"])
+
+ def test_rewrite_module_imported_from_conftest(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import test_rewrite_module_imported
+ """
+ )
+ pytester.makepyfile(
+ test_rewrite_module_imported="""
+ def test_rewritten():
+ assert "@py_builtins" in globals()
+ """
+ )
+ assert pytester.runpytest_subprocess().ret == 0
+
+ def test_remember_rewritten_modules(
+ self, pytestconfig, pytester: Pytester, monkeypatch
+ ) -> None:
+ """`AssertionRewriteHook` should remember rewritten modules so it
+ doesn't give false positives (#2005)."""
+ monkeypatch.syspath_prepend(pytester.path)
+ pytester.makepyfile(test_remember_rewritten_modules="")
+ warnings = []
+ hook = AssertionRewritingHook(pytestconfig)
+ monkeypatch.setattr(
+ hook, "_warn_already_imported", lambda code, msg: warnings.append(msg)
+ )
+ spec = hook.find_spec("test_remember_rewritten_modules")
+ assert spec is not None
+ module = importlib.util.module_from_spec(spec)
+ hook.exec_module(module)
+ hook.mark_rewrite("test_remember_rewritten_modules")
+ hook.mark_rewrite("test_remember_rewritten_modules")
+ assert warnings == []
+
+ def test_rewrite_warning_using_pytest_plugins(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ **{
+ "conftest.py": "pytest_plugins = ['core', 'gui', 'sci']",
+ "core.py": "",
+ "gui.py": "pytest_plugins = ['core', 'sci']",
+ "sci.py": "pytest_plugins = ['core']",
+ "test_rewrite_warning_pytest_plugins.py": "def test(): pass",
+ }
+ )
+ pytester.chdir()
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*= 1 passed in *=*"])
+ result.stdout.no_fnmatch_line("*pytest-warning summary*")
+
+ def test_rewrite_warning_using_pytest_plugins_env_var(
+ self, pytester: Pytester, monkeypatch
+ ) -> None:
+ monkeypatch.setenv("PYTEST_PLUGINS", "plugin")
+ pytester.makepyfile(
+ **{
+ "plugin.py": "",
+ "test_rewrite_warning_using_pytest_plugins_env_var.py": """
+ import plugin
+ pytest_plugins = ['plugin']
+ def test():
+ pass
+ """,
+ }
+ )
+ pytester.chdir()
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*= 1 passed in *=*"])
+ result.stdout.no_fnmatch_line("*pytest-warning summary*")
+
+
+class TestAssertionRewriteHookDetails:
+ def test_sys_meta_path_munged(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_meta_path():
+ import sys; sys.meta_path = []"""
+ )
+ assert pytester.runpytest().ret == 0
+
+ def test_write_pyc(self, pytester: Pytester, tmp_path, monkeypatch) -> None:
+ from _pytest.assertion.rewrite import _write_pyc
+ from _pytest.assertion import AssertionState
+
+ config = pytester.parseconfig()
+ state = AssertionState(config, "rewrite")
+ tmp_path.joinpath("source.py").touch()
+ source_path = str(tmp_path)
+ pycpath = tmp_path.joinpath("pyc")
+ co = compile("1", "f.py", "single")
+ assert _write_pyc(state, co, os.stat(source_path), pycpath)
+
+ if sys.platform == "win32":
+ from contextlib import contextmanager
+
+ @contextmanager
+ def atomic_write_failed(fn, mode="r", overwrite=False):
+ e = OSError()
+ e.errno = 10
+ raise e
+ yield # type:ignore[unreachable]
+
+ monkeypatch.setattr(
+ _pytest.assertion.rewrite, "atomic_write", atomic_write_failed
+ )
+ else:
+
+ def raise_oserror(*args):
+ raise OSError()
+
+ monkeypatch.setattr("os.rename", raise_oserror)
+
+ assert not _write_pyc(state, co, os.stat(source_path), pycpath)
+
+ def test_resources_provider_for_loader(self, pytester: Pytester) -> None:
+ """
+ Attempts to load resources from a package should succeed normally,
+ even when the AssertionRewriteHook is used to load the modules.
+
+ See #366 for details.
+ """
+ pytest.importorskip("pkg_resources")
+
+ pytester.mkpydir("testpkg")
+ contents = {
+ "testpkg/test_pkg": """
+ import pkg_resources
+
+ import pytest
+ from _pytest.assertion.rewrite import AssertionRewritingHook
+
+ def test_load_resource():
+ assert isinstance(__loader__, AssertionRewritingHook)
+ res = pkg_resources.resource_string(__name__, 'resource.txt')
+ res = res.decode('ascii')
+ assert res == 'Load me please.'
+ """
+ }
+ pytester.makepyfile(**contents)
+ pytester.maketxtfile(**{"testpkg/resource": "Load me please."})
+
+ result = pytester.runpytest_subprocess()
+ result.assert_outcomes(passed=1)
+
+ def test_read_pyc(self, tmp_path: Path) -> None:
+ """
+ Ensure that the `_read_pyc` can properly deal with corrupted pyc files.
+ In those circumstances it should just give up instead of generating
+ an exception that is propagated to the caller.
+ """
+ import py_compile
+ from _pytest.assertion.rewrite import _read_pyc
+
+ source = tmp_path / "source.py"
+ pyc = Path(str(source) + "c")
+
+ source.write_text("def test(): pass")
+ py_compile.compile(str(source), str(pyc))
+
+ contents = pyc.read_bytes()
+ strip_bytes = 20 # header is around 16 bytes, strip a little more
+ assert len(contents) > strip_bytes
+ pyc.write_bytes(contents[:strip_bytes])
+
+ assert _read_pyc(source, pyc) is None # no error
+
+ def test_read_pyc_success(self, tmp_path: Path, pytester: Pytester) -> None:
+ """
+ Ensure that the _rewrite_test() -> _write_pyc() produces a pyc file
+ that can be properly read with _read_pyc()
+ """
+ from _pytest.assertion import AssertionState
+ from _pytest.assertion.rewrite import _read_pyc
+ from _pytest.assertion.rewrite import _rewrite_test
+ from _pytest.assertion.rewrite import _write_pyc
+
+ config = pytester.parseconfig()
+ state = AssertionState(config, "rewrite")
+
+ fn = tmp_path / "source.py"
+ pyc = Path(str(fn) + "c")
+
+ fn.write_text("def test(): assert True")
+
+ source_stat, co = _rewrite_test(fn, config)
+ _write_pyc(state, co, source_stat, pyc)
+ assert _read_pyc(fn, pyc, state.trace) is not None
+
+ @pytest.mark.skipif(
+ sys.version_info < (3, 7), reason="Only the Python 3.7 format for simplicity"
+ )
+ def test_read_pyc_more_invalid(self, tmp_path: Path) -> None:
+ from _pytest.assertion.rewrite import _read_pyc
+
+ source = tmp_path / "source.py"
+ pyc = tmp_path / "source.pyc"
+
+ source_bytes = b"def test(): pass\n"
+ source.write_bytes(source_bytes)
+
+ magic = importlib.util.MAGIC_NUMBER
+
+ flags = b"\x00\x00\x00\x00"
+
+ mtime = b"\x58\x3c\xb0\x5f"
+ mtime_int = int.from_bytes(mtime, "little")
+ os.utime(source, (mtime_int, mtime_int))
+
+ size = len(source_bytes).to_bytes(4, "little")
+
+ code = marshal.dumps(compile(source_bytes, str(source), "exec"))
+
+ # Good header.
+ pyc.write_bytes(magic + flags + mtime + size + code)
+ assert _read_pyc(source, pyc, print) is not None
+
+ # Too short.
+ pyc.write_bytes(magic + flags + mtime)
+ assert _read_pyc(source, pyc, print) is None
+
+ # Bad magic.
+ pyc.write_bytes(b"\x12\x34\x56\x78" + flags + mtime + size + code)
+ assert _read_pyc(source, pyc, print) is None
+
+ # Unsupported flags.
+ pyc.write_bytes(magic + b"\x00\xff\x00\x00" + mtime + size + code)
+ assert _read_pyc(source, pyc, print) is None
+
+ # Bad mtime.
+ pyc.write_bytes(magic + flags + b"\x58\x3d\xb0\x5f" + size + code)
+ assert _read_pyc(source, pyc, print) is None
+
+ # Bad size.
+ pyc.write_bytes(magic + flags + mtime + b"\x99\x00\x00\x00" + code)
+ assert _read_pyc(source, pyc, print) is None
+
+ def test_reload_is_same_and_reloads(self, pytester: Pytester) -> None:
+ """Reloading a (collected) module after change picks up the change."""
+ pytester.makeini(
+ """
+ [pytest]
+ python_files = *.py
+ """
+ )
+ pytester.makepyfile(
+ file="""
+ def reloaded():
+ return False
+
+ def rewrite_self():
+ with open(__file__, 'w') as self:
+ self.write('def reloaded(): return True')
+ """,
+ test_fun="""
+ import sys
+ from importlib import reload
+
+ def test_loader():
+ import file
+ assert not file.reloaded()
+ file.rewrite_self()
+ assert sys.modules["file"] is reload(file)
+ assert file.reloaded()
+ """,
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 1 passed*"])
+
+ def test_get_data_support(self, pytester: Pytester) -> None:
+ """Implement optional PEP302 api (#808)."""
+ path = pytester.mkpydir("foo")
+ path.joinpath("test_foo.py").write_text(
+ textwrap.dedent(
+ """\
+ class Test(object):
+ def test_foo(self):
+ import pkgutil
+ data = pkgutil.get_data('foo.test_foo', 'data.txt')
+ assert data == b'Hey'
+ """
+ )
+ )
+ path.joinpath("data.txt").write_text("Hey")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_issue731(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ class LongReprWithBraces(object):
+ def __repr__(self):
+ return 'LongReprWithBraces({' + ('a' * 80) + '}' + ('a' * 120) + ')'
+
+ def some_method(self):
+ return False
+
+ def test_long_repr():
+ obj = LongReprWithBraces()
+ assert obj.some_method()
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.no_fnmatch_line("*unbalanced braces*")
+
+
+class TestIssue925:
+ def test_simple_case(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_ternary_display():
+ assert (False == False) == False
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*E*assert (False == False) == False"])
+
+ def test_long_case(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_ternary_display():
+ assert False == (False == True) == True
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*E*assert (False == True) == True"])
+
+ def test_many_brackets(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_ternary_display():
+ assert True == ((False == True) == True)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*E*assert True == ((False == True) == True)"])
+
+
+class TestIssue2121:
+ def test_rewrite_python_files_contain_subdirs(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ **{
+ "tests/file.py": """
+ def test_simple_failure():
+ assert 1 + 1 == 3
+ """
+ }
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ python_files = tests/**.py
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*E*assert (1 + 1) == 3"])
+
+
+@pytest.mark.skipif(
+ sys.maxsize <= (2 ** 31 - 1), reason="Causes OverflowError on 32bit systems"
+)
+@pytest.mark.parametrize("offset", [-1, +1])
+def test_source_mtime_long_long(pytester: Pytester, offset) -> None:
+ """Support modification dates after 2038 in rewritten files (#4903).
+
+ pytest would crash with:
+
+ fp.write(struct.pack("<ll", mtime, size))
+ E struct.error: argument out of range
+ """
+ p = pytester.makepyfile(
+ """
+ def test(): pass
+ """
+ )
+ # use unsigned long timestamp which overflows signed long,
+ # which was the cause of the bug
+ # +1 offset also tests masking of 0xFFFFFFFF
+ timestamp = 2 ** 32 + offset
+ os.utime(str(p), (timestamp, timestamp))
+ result = pytester.runpytest()
+ assert result.ret == 0
+
+
+def test_rewrite_infinite_recursion(
+ pytester: Pytester, pytestconfig, monkeypatch
+) -> None:
+ """Fix infinite recursion when writing pyc files: if an import happens to be triggered when writing the pyc
+ file, this would cause another call to the hook, which would trigger another pyc writing, which could
+ trigger another import, and so on. (#3506)"""
+ from _pytest.assertion import rewrite as rewritemod
+
+ pytester.syspathinsert()
+ pytester.makepyfile(test_foo="def test_foo(): pass")
+ pytester.makepyfile(test_bar="def test_bar(): pass")
+
+ original_write_pyc = rewritemod._write_pyc
+
+ write_pyc_called = []
+
+ def spy_write_pyc(*args, **kwargs):
+ # make a note that we have called _write_pyc
+ write_pyc_called.append(True)
+ # try to import a module at this point: we should not try to rewrite this module
+ assert hook.find_spec("test_bar") is None
+ return original_write_pyc(*args, **kwargs)
+
+ monkeypatch.setattr(rewritemod, "_write_pyc", spy_write_pyc)
+ monkeypatch.setattr(sys, "dont_write_bytecode", False)
+
+ hook = AssertionRewritingHook(pytestconfig)
+ spec = hook.find_spec("test_foo")
+ assert spec is not None
+ module = importlib.util.module_from_spec(spec)
+ hook.exec_module(module)
+ assert len(write_pyc_called) == 1
+
+
+class TestEarlyRewriteBailout:
+ @pytest.fixture
+ def hook(
+ self, pytestconfig, monkeypatch, pytester: Pytester
+ ) -> AssertionRewritingHook:
+ """Returns a patched AssertionRewritingHook instance so we can configure its initial paths and track
+ if PathFinder.find_spec has been called.
+ """
+ import importlib.machinery
+
+ self.find_spec_calls: List[str] = []
+ self.initial_paths: Set[Path] = set()
+
+ class StubSession:
+ _initialpaths = self.initial_paths
+
+ def isinitpath(self, p):
+ return p in self._initialpaths
+
+ def spy_find_spec(name, path):
+ self.find_spec_calls.append(name)
+ return importlib.machinery.PathFinder.find_spec(name, path)
+
+ hook = AssertionRewritingHook(pytestconfig)
+ # use default patterns, otherwise we inherit pytest's testing config
+ hook.fnpats[:] = ["test_*.py", "*_test.py"]
+ monkeypatch.setattr(hook, "_find_spec", spy_find_spec)
+ hook.set_session(StubSession()) # type: ignore[arg-type]
+ pytester.syspathinsert()
+ return hook
+
+ def test_basic(self, pytester: Pytester, hook: AssertionRewritingHook) -> None:
+ """
+ Ensure we avoid calling PathFinder.find_spec when we know for sure a certain
+ module will not be rewritten to optimize assertion rewriting (#3918).
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+ @pytest.fixture
+ def fix(): return 1
+ """
+ )
+ pytester.makepyfile(test_foo="def test_foo(): pass")
+ pytester.makepyfile(bar="def bar(): pass")
+ foobar_path = pytester.makepyfile(foobar="def foobar(): pass")
+ self.initial_paths.add(foobar_path)
+
+ # conftest files should always be rewritten
+ assert hook.find_spec("conftest") is not None
+ assert self.find_spec_calls == ["conftest"]
+
+ # files matching "python_files" mask should always be rewritten
+ assert hook.find_spec("test_foo") is not None
+ assert self.find_spec_calls == ["conftest", "test_foo"]
+
+ # file does not match "python_files": early bailout
+ assert hook.find_spec("bar") is None
+ assert self.find_spec_calls == ["conftest", "test_foo"]
+
+ # file is an initial path (passed on the command-line): should be rewritten
+ assert hook.find_spec("foobar") is not None
+ assert self.find_spec_calls == ["conftest", "test_foo", "foobar"]
+
+ def test_pattern_contains_subdirectories(
+ self, pytester: Pytester, hook: AssertionRewritingHook
+ ) -> None:
+ """If one of the python_files patterns contain subdirectories ("tests/**.py") we can't bailout early
+ because we need to match with the full path, which can only be found by calling PathFinder.find_spec
+ """
+ pytester.makepyfile(
+ **{
+ "tests/file.py": """\
+ def test_simple_failure():
+ assert 1 + 1 == 3
+ """
+ }
+ )
+ pytester.syspathinsert("tests")
+ hook.fnpats[:] = ["tests/**.py"]
+ assert hook.find_spec("file") is not None
+ assert self.find_spec_calls == ["file"]
+
+ @pytest.mark.skipif(
+ sys.platform.startswith("win32"), reason="cannot remove cwd on Windows"
+ )
+ @pytest.mark.skipif(
+ sys.platform.startswith("sunos5"), reason="cannot remove cwd on Solaris"
+ )
+ def test_cwd_changed(self, pytester: Pytester, monkeypatch) -> None:
+ # Setup conditions for py's fspath trying to import pathlib on py34
+ # always (previously triggered via xdist only).
+ # Ref: https://github.com/pytest-dev/py/pull/207
+ monkeypatch.syspath_prepend("")
+ monkeypatch.delitem(sys.modules, "pathlib", raising=False)
+
+ pytester.makepyfile(
+ **{
+ "test_setup_nonexisting_cwd.py": """\
+ import os
+ import tempfile
+
+ with tempfile.TemporaryDirectory() as d:
+ os.chdir(d)
+ """,
+ "test_test.py": """\
+ def test():
+ pass
+ """,
+ }
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 1 passed in *"])
+
+
+class TestAssertionPass:
+ def test_option_default(self, pytester: Pytester) -> None:
+ config = pytester.parseconfig()
+ assert config.getini("enable_assertion_pass_hook") is False
+
+ @pytest.fixture
+ def flag_on(self, pytester: Pytester):
+ pytester.makeini("[pytest]\nenable_assertion_pass_hook = True\n")
+
+ @pytest.fixture
+ def hook_on(self, pytester: Pytester):
+ pytester.makeconftest(
+ """\
+ def pytest_assertion_pass(item, lineno, orig, expl):
+ raise Exception("Assertion Passed: {} {} at line {}".format(orig, expl, lineno))
+ """
+ )
+
+ def test_hook_call(self, pytester: Pytester, flag_on, hook_on) -> None:
+ pytester.makepyfile(
+ """\
+ def test_simple():
+ a=1
+ b=2
+ c=3
+ d=0
+
+ assert a+b == c+d
+
+ # cover failing assertions with a message
+ def test_fails():
+ assert False, "assert with message"
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ "*Assertion Passed: a+b == c+d (1 + 2) == (3 + 0) at line 7*"
+ )
+
+ def test_hook_call_with_parens(self, pytester: Pytester, flag_on, hook_on) -> None:
+ pytester.makepyfile(
+ """\
+ def f(): return 1
+ def test():
+ assert f()
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines("*Assertion Passed: f() 1")
+
+ def test_hook_not_called_without_hookimpl(
+ self, pytester: Pytester, monkeypatch, flag_on
+ ) -> None:
+ """Assertion pass should not be called (and hence formatting should
+ not occur) if there is no hook declared for pytest_assertion_pass"""
+
+ def raise_on_assertionpass(*_, **__):
+ raise Exception("Assertion passed called when it shouldn't!")
+
+ monkeypatch.setattr(
+ _pytest.assertion.rewrite, "_call_assertion_pass", raise_on_assertionpass
+ )
+
+ pytester.makepyfile(
+ """\
+ def test_simple():
+ a=1
+ b=2
+ c=3
+ d=0
+
+ assert a+b == c+d
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=1)
+
+ def test_hook_not_called_without_cmd_option(
+ self, pytester: Pytester, monkeypatch
+ ) -> None:
+ """Assertion pass should not be called (and hence formatting should
+ not occur) if there is no hook declared for pytest_assertion_pass"""
+
+ def raise_on_assertionpass(*_, **__):
+ raise Exception("Assertion passed called when it shouldn't!")
+
+ monkeypatch.setattr(
+ _pytest.assertion.rewrite, "_call_assertion_pass", raise_on_assertionpass
+ )
+
+ pytester.makeconftest(
+ """\
+ def pytest_assertion_pass(item, lineno, orig, expl):
+ raise Exception("Assertion Passed: {} {} at line {}".format(orig, expl, lineno))
+ """
+ )
+
+ pytester.makepyfile(
+ """\
+ def test_simple():
+ a=1
+ b=2
+ c=3
+ d=0
+
+ assert a+b == c+d
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=1)
+
+
+@pytest.mark.parametrize(
+ ("src", "expected"),
+ (
+ # fmt: off
+ pytest.param(b"", {}, id="trivial"),
+ pytest.param(
+ b"def x(): assert 1\n",
+ {1: "1"},
+ id="assert statement not on own line",
+ ),
+ pytest.param(
+ b"def x():\n"
+ b" assert 1\n"
+ b" assert 1+2\n",
+ {2: "1", 3: "1+2"},
+ id="multiple assertions",
+ ),
+ pytest.param(
+ # changes in encoding cause the byte offsets to be different
+ "# -*- coding: latin1\n"
+ "def ÀÀÀÀÀ(): assert 1\n".encode("latin1"),
+ {2: "1"},
+ id="latin1 encoded on first line\n",
+ ),
+ pytest.param(
+ # using the default utf-8 encoding
+ "def ÀÀÀÀÀ(): assert 1\n".encode(),
+ {1: "1"},
+ id="utf-8 encoded on first line",
+ ),
+ pytest.param(
+ b"def x():\n"
+ b" assert (\n"
+ b" 1 + 2 # comment\n"
+ b" )\n",
+ {2: "(\n 1 + 2 # comment\n )"},
+ id="multi-line assertion",
+ ),
+ pytest.param(
+ b"def x():\n"
+ b" assert y == [\n"
+ b" 1, 2, 3\n"
+ b" ]\n",
+ {2: "y == [\n 1, 2, 3\n ]"},
+ id="multi line assert with list continuation",
+ ),
+ pytest.param(
+ b"def x():\n"
+ b" assert 1 + \\\n"
+ b" 2\n",
+ {2: "1 + \\\n 2"},
+ id="backslash continuation",
+ ),
+ pytest.param(
+ b"def x():\n"
+ b" assert x, y\n",
+ {2: "x"},
+ id="assertion with message",
+ ),
+ pytest.param(
+ b"def x():\n"
+ b" assert (\n"
+ b" f(1, 2, 3)\n"
+ b" ), 'f did not work!'\n",
+ {2: "(\n f(1, 2, 3)\n )"},
+ id="assertion with message, test spanning multiple lines",
+ ),
+ pytest.param(
+ b"def x():\n"
+ b" assert \\\n"
+ b" x\\\n"
+ b" , 'failure message'\n",
+ {2: "x"},
+ id="escaped newlines plus message",
+ ),
+ pytest.param(
+ b"def x(): assert 5",
+ {1: "5"},
+ id="no newline at end of file",
+ ),
+ # fmt: on
+ ),
+)
+def test_get_assertion_exprs(src, expected) -> None:
+ assert _get_assertion_exprs(src) == expected
+
+
+def test_try_makedirs(monkeypatch, tmp_path: Path) -> None:
+ from _pytest.assertion.rewrite import try_makedirs
+
+ p = tmp_path / "foo"
+
+ # create
+ assert try_makedirs(p)
+ assert p.is_dir()
+
+ # already exist
+ assert try_makedirs(p)
+
+ # monkeypatch to simulate all error situations
+ def fake_mkdir(p, exist_ok=False, *, exc):
+ assert isinstance(p, Path)
+ raise exc
+
+ monkeypatch.setattr(os, "makedirs", partial(fake_mkdir, exc=FileNotFoundError()))
+ assert not try_makedirs(p)
+
+ monkeypatch.setattr(os, "makedirs", partial(fake_mkdir, exc=NotADirectoryError()))
+ assert not try_makedirs(p)
+
+ monkeypatch.setattr(os, "makedirs", partial(fake_mkdir, exc=PermissionError()))
+ assert not try_makedirs(p)
+
+ err = OSError()
+ err.errno = errno.EROFS
+ monkeypatch.setattr(os, "makedirs", partial(fake_mkdir, exc=err))
+ assert not try_makedirs(p)
+
+ # unhandled OSError should raise
+ err = OSError()
+ err.errno = errno.ECHILD
+ monkeypatch.setattr(os, "makedirs", partial(fake_mkdir, exc=err))
+ with pytest.raises(OSError) as exc_info:
+ try_makedirs(p)
+ assert exc_info.value.errno == errno.ECHILD
+
+
+class TestPyCacheDir:
+ @pytest.mark.parametrize(
+ "prefix, source, expected",
+ [
+ ("c:/tmp/pycs", "d:/projects/src/foo.py", "c:/tmp/pycs/projects/src"),
+ (None, "d:/projects/src/foo.py", "d:/projects/src/__pycache__"),
+ ("/tmp/pycs", "/home/projects/src/foo.py", "/tmp/pycs/home/projects/src"),
+ (None, "/home/projects/src/foo.py", "/home/projects/src/__pycache__"),
+ ],
+ )
+ def test_get_cache_dir(self, monkeypatch, prefix, source, expected) -> None:
+ monkeypatch.delenv("PYTHONPYCACHEPREFIX", raising=False)
+
+ if prefix is not None and sys.version_info < (3, 8):
+ pytest.skip("pycache_prefix not available in py<38")
+ monkeypatch.setattr(sys, "pycache_prefix", prefix, raising=False)
+
+ assert get_cache_dir(Path(source)) == Path(expected)
+
+ @pytest.mark.skipif(
+ sys.version_info < (3, 8), reason="pycache_prefix not available in py<38"
+ )
+ @pytest.mark.skipif(
+ sys.version_info[:2] == (3, 9) and sys.platform.startswith("win"),
+ reason="#9298",
+ )
+ def test_sys_pycache_prefix_integration(
+ self, tmp_path, monkeypatch, pytester: Pytester
+ ) -> None:
+ """Integration test for sys.pycache_prefix (#4730)."""
+ pycache_prefix = tmp_path / "my/pycs"
+ monkeypatch.setattr(sys, "pycache_prefix", str(pycache_prefix))
+ monkeypatch.setattr(sys, "dont_write_bytecode", False)
+
+ pytester.makepyfile(
+ **{
+ "src/test_foo.py": """
+ import bar
+ def test_foo():
+ pass
+ """,
+ "src/bar/__init__.py": "",
+ }
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+
+ test_foo = pytester.path.joinpath("src/test_foo.py")
+ bar_init = pytester.path.joinpath("src/bar/__init__.py")
+ assert test_foo.is_file()
+ assert bar_init.is_file()
+
+ # test file: rewritten, custom pytest cache tag
+ test_foo_pyc = get_cache_dir(test_foo) / ("test_foo" + PYC_TAIL)
+ assert test_foo_pyc.is_file()
+
+ # normal file: not touched by pytest, normal cache tag
+ bar_init_pyc = get_cache_dir(bar_init) / "__init__.{cache_tag}.pyc".format(
+ cache_tag=sys.implementation.cache_tag
+ )
+ assert bar_init_pyc.is_file()
+
+
+class TestReprSizeVerbosity:
+ """
+ Check that verbosity also controls the string length threshold to shorten it using
+ ellipsis.
+ """
+
+ @pytest.mark.parametrize(
+ "verbose, expected_size",
+ [
+ (0, DEFAULT_REPR_MAX_SIZE),
+ (1, DEFAULT_REPR_MAX_SIZE * 10),
+ (2, None),
+ (3, None),
+ ],
+ )
+ def test_get_maxsize_for_saferepr(self, verbose: int, expected_size) -> None:
+ class FakeConfig:
+ def getoption(self, name: str) -> int:
+ assert name == "verbose"
+ return verbose
+
+ config = FakeConfig()
+ assert _get_maxsize_for_saferepr(cast(Config, config)) == expected_size
+
+ def create_test_file(self, pytester: Pytester, size: int) -> None:
+ pytester.makepyfile(
+ f"""
+ def test_very_long_string():
+ text = "x" * {size}
+ assert "hello world" in text
+ """
+ )
+
+ def test_default_verbosity(self, pytester: Pytester) -> None:
+ self.create_test_file(pytester, DEFAULT_REPR_MAX_SIZE)
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*xxx...xxx*"])
+
+ def test_increased_verbosity(self, pytester: Pytester) -> None:
+ self.create_test_file(pytester, DEFAULT_REPR_MAX_SIZE)
+ result = pytester.runpytest("-v")
+ result.stdout.no_fnmatch_line("*xxx...xxx*")
+
+ def test_max_increased_verbosity(self, pytester: Pytester) -> None:
+ self.create_test_file(pytester, DEFAULT_REPR_MAX_SIZE * 10)
+ result = pytester.runpytest("-vv")
+ result.stdout.no_fnmatch_line("*xxx...xxx*")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_cacheprovider.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_cacheprovider.py
new file mode 100644
index 0000000000..cc6d547dfb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_cacheprovider.py
@@ -0,0 +1,1251 @@
+import os
+import shutil
+from pathlib import Path
+from typing import Generator
+from typing import List
+
+import pytest
+from _pytest.config import ExitCode
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+from _pytest.tmpdir import TempPathFactory
+
+pytest_plugins = ("pytester",)
+
+
+class TestNewAPI:
+ def test_config_cache_mkdir(self, pytester: Pytester) -> None:
+ pytester.makeini("[pytest]")
+ config = pytester.parseconfigure()
+ assert config.cache is not None
+ with pytest.raises(ValueError):
+ config.cache.mkdir("key/name")
+
+ p = config.cache.mkdir("name")
+ assert p.is_dir()
+
+ def test_config_cache_dataerror(self, pytester: Pytester) -> None:
+ pytester.makeini("[pytest]")
+ config = pytester.parseconfigure()
+ assert config.cache is not None
+ cache = config.cache
+ pytest.raises(TypeError, lambda: cache.set("key/name", cache))
+ config.cache.set("key/name", 0)
+ config.cache._getvaluepath("key/name").write_bytes(b"123invalid")
+ val = config.cache.get("key/name", -2)
+ assert val == -2
+
+ @pytest.mark.filterwarnings("ignore:could not create cache path")
+ def test_cache_writefail_cachfile_silent(self, pytester: Pytester) -> None:
+ pytester.makeini("[pytest]")
+ pytester.path.joinpath(".pytest_cache").write_text("gone wrong")
+ config = pytester.parseconfigure()
+ cache = config.cache
+ assert cache is not None
+ cache.set("test/broken", [])
+
+ @pytest.fixture
+ def unwritable_cache_dir(self, pytester: Pytester) -> Generator[Path, None, None]:
+ cache_dir = pytester.path.joinpath(".pytest_cache")
+ cache_dir.mkdir()
+ mode = cache_dir.stat().st_mode
+ cache_dir.chmod(0)
+ if os.access(cache_dir, os.W_OK):
+ pytest.skip("Failed to make cache dir unwritable")
+
+ yield cache_dir
+ cache_dir.chmod(mode)
+
+ @pytest.mark.filterwarnings(
+ "ignore:could not create cache path:pytest.PytestWarning"
+ )
+ def test_cache_writefail_permissions(
+ self, unwritable_cache_dir: Path, pytester: Pytester
+ ) -> None:
+ pytester.makeini("[pytest]")
+ config = pytester.parseconfigure()
+ cache = config.cache
+ assert cache is not None
+ cache.set("test/broken", [])
+
+ @pytest.mark.filterwarnings("default")
+ def test_cache_failure_warns(
+ self,
+ pytester: Pytester,
+ monkeypatch: MonkeyPatch,
+ unwritable_cache_dir: Path,
+ ) -> None:
+ monkeypatch.setenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "1")
+
+ pytester.makepyfile("def test_error(): raise Exception")
+ result = pytester.runpytest()
+ assert result.ret == 1
+ # warnings from nodeids, lastfailed, and stepwise
+ result.stdout.fnmatch_lines(
+ [
+ # Validate location/stacklevel of warning from cacheprovider.
+ "*= warnings summary =*",
+ "*/cacheprovider.py:*",
+ " */cacheprovider.py:*: PytestCacheWarning: could not create cache path "
+ f"{unwritable_cache_dir}/v/cache/nodeids",
+ ' config.cache.set("cache/nodeids", sorted(self.cached_nodeids))',
+ "*1 failed, 3 warnings in*",
+ ]
+ )
+
+ def test_config_cache(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_configure(config):
+ # see that we get cache information early on
+ assert hasattr(config, "cache")
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_session(pytestconfig):
+ assert hasattr(pytestconfig, "cache")
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_cachefuncarg(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_cachefuncarg(cache):
+ val = cache.get("some/thing", None)
+ assert val is None
+ cache.set("some/thing", [1])
+ pytest.raises(TypeError, lambda: cache.get("some/thing"))
+ val = cache.get("some/thing", [])
+ assert val == [1]
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_custom_rel_cache_dir(self, pytester: Pytester) -> None:
+ rel_cache_dir = os.path.join("custom_cache_dir", "subdir")
+ pytester.makeini(
+ """
+ [pytest]
+ cache_dir = {cache_dir}
+ """.format(
+ cache_dir=rel_cache_dir
+ )
+ )
+ pytester.makepyfile(test_errored="def test_error():\n assert False")
+ pytester.runpytest()
+ assert pytester.path.joinpath(rel_cache_dir).is_dir()
+
+ def test_custom_abs_cache_dir(
+ self, pytester: Pytester, tmp_path_factory: TempPathFactory
+ ) -> None:
+ tmp = tmp_path_factory.mktemp("tmp")
+ abs_cache_dir = tmp / "custom_cache_dir"
+ pytester.makeini(
+ """
+ [pytest]
+ cache_dir = {cache_dir}
+ """.format(
+ cache_dir=abs_cache_dir
+ )
+ )
+ pytester.makepyfile(test_errored="def test_error():\n assert False")
+ pytester.runpytest()
+ assert abs_cache_dir.is_dir()
+
+ def test_custom_cache_dir_with_env_var(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ monkeypatch.setenv("env_var", "custom_cache_dir")
+ pytester.makeini(
+ """
+ [pytest]
+ cache_dir = {cache_dir}
+ """.format(
+ cache_dir="$env_var"
+ )
+ )
+ pytester.makepyfile(test_errored="def test_error():\n assert False")
+ pytester.runpytest()
+ assert pytester.path.joinpath("custom_cache_dir").is_dir()
+
+
+@pytest.mark.parametrize("env", ((), ("TOX_ENV_DIR", "/tox_env_dir")))
+def test_cache_reportheader(env, pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
+ pytester.makepyfile("""def test_foo(): pass""")
+ if env:
+ monkeypatch.setenv(*env)
+ expected = os.path.join(env[1], ".pytest_cache")
+ else:
+ monkeypatch.delenv("TOX_ENV_DIR", raising=False)
+ expected = ".pytest_cache"
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(["cachedir: %s" % expected])
+
+
+def test_cache_reportheader_external_abspath(
+ pytester: Pytester, tmp_path_factory: TempPathFactory
+) -> None:
+ external_cache = tmp_path_factory.mktemp(
+ "test_cache_reportheader_external_abspath_abs"
+ )
+
+ pytester.makepyfile("def test_hello(): pass")
+ pytester.makeini(
+ """
+ [pytest]
+ cache_dir = {abscache}
+ """.format(
+ abscache=external_cache
+ )
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines([f"cachedir: {external_cache}"])
+
+
+def test_cache_show(pytester: Pytester) -> None:
+ result = pytester.runpytest("--cache-show")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*cache is empty*"])
+ pytester.makeconftest(
+ """
+ def pytest_configure(config):
+ config.cache.set("my/name", [1,2,3])
+ config.cache.set("my/hello", "world")
+ config.cache.set("other/some", {1:2})
+ dp = config.cache.mkdir("mydb")
+ dp.joinpath("hello").touch()
+ dp.joinpath("world").touch()
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 5 # no tests executed
+
+ result = pytester.runpytest("--cache-show")
+ result.stdout.fnmatch_lines(
+ [
+ "*cachedir:*",
+ "*- cache values for '[*]' -*",
+ "cache/nodeids contains:",
+ "my/name contains:",
+ " [1, 2, 3]",
+ "other/some contains:",
+ " {*'1': 2}",
+ "*- cache directories for '[*]' -*",
+ "*mydb/hello*length 0*",
+ "*mydb/world*length 0*",
+ ]
+ )
+ assert result.ret == 0
+
+ result = pytester.runpytest("--cache-show", "*/hello")
+ result.stdout.fnmatch_lines(
+ [
+ "*cachedir:*",
+ "*- cache values for '[*]/hello' -*",
+ "my/hello contains:",
+ " *'world'",
+ "*- cache directories for '[*]/hello' -*",
+ "d/mydb/hello*length 0*",
+ ]
+ )
+ stdout = result.stdout.str()
+ assert "other/some" not in stdout
+ assert "d/mydb/world" not in stdout
+ assert result.ret == 0
+
+
+class TestLastFailed:
+ def test_lastfailed_usecase(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ monkeypatch.setattr("sys.dont_write_bytecode", True)
+ p = pytester.makepyfile(
+ """
+ def test_1(): assert 0
+ def test_2(): assert 0
+ def test_3(): assert 1
+ """
+ )
+ result = pytester.runpytest(str(p))
+ result.stdout.fnmatch_lines(["*2 failed*"])
+ p = pytester.makepyfile(
+ """
+ def test_1(): assert 1
+ def test_2(): assert 1
+ def test_3(): assert 0
+ """
+ )
+ result = pytester.runpytest(str(p), "--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 3 items / 1 deselected / 2 selected",
+ "run-last-failure: rerun previous 2 failures",
+ "*= 2 passed, 1 deselected in *",
+ ]
+ )
+ result = pytester.runpytest(str(p), "--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 3 items",
+ "run-last-failure: no previously failed tests, not deselecting items.",
+ "*1 failed*2 passed*",
+ ]
+ )
+ pytester.path.joinpath(".pytest_cache", ".git").mkdir(parents=True)
+ result = pytester.runpytest(str(p), "--lf", "--cache-clear")
+ result.stdout.fnmatch_lines(["*1 failed*2 passed*"])
+ assert pytester.path.joinpath(".pytest_cache", "README.md").is_file()
+ assert pytester.path.joinpath(".pytest_cache", ".git").is_dir()
+
+ # Run this again to make sure clear-cache is robust
+ if os.path.isdir(".pytest_cache"):
+ shutil.rmtree(".pytest_cache")
+ result = pytester.runpytest("--lf", "--cache-clear")
+ result.stdout.fnmatch_lines(["*1 failed*2 passed*"])
+
+ def test_failedfirst_order(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_a="def test_always_passes(): pass",
+ test_b="def test_always_fails(): assert 0",
+ )
+ result = pytester.runpytest()
+ # Test order will be collection order; alphabetical
+ result.stdout.fnmatch_lines(["test_a.py*", "test_b.py*"])
+ result = pytester.runpytest("--ff")
+ # Test order will be failing tests first
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "run-last-failure: rerun previous 1 failure first",
+ "test_b.py*",
+ "test_a.py*",
+ ]
+ )
+
+ def test_lastfailed_failedfirst_order(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_a="def test_always_passes(): assert 1",
+ test_b="def test_always_fails(): assert 0",
+ )
+ result = pytester.runpytest()
+ # Test order will be collection order; alphabetical
+ result.stdout.fnmatch_lines(["test_a.py*", "test_b.py*"])
+ result = pytester.runpytest("--lf", "--ff")
+ # Test order will be failing tests first
+ result.stdout.fnmatch_lines(["test_b.py*"])
+ result.stdout.no_fnmatch_line("*test_a.py*")
+
+ def test_lastfailed_difference_invocations(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ monkeypatch.setattr("sys.dont_write_bytecode", True)
+ pytester.makepyfile(
+ test_a="""
+ def test_a1(): assert 0
+ def test_a2(): assert 1
+ """,
+ test_b="def test_b1(): assert 0",
+ )
+ p = pytester.path.joinpath("test_a.py")
+ p2 = pytester.path.joinpath("test_b.py")
+
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*2 failed*"])
+ result = pytester.runpytest("--lf", p2)
+ result.stdout.fnmatch_lines(["*1 failed*"])
+
+ pytester.makepyfile(test_b="def test_b1(): assert 1")
+ result = pytester.runpytest("--lf", p2)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = pytester.runpytest("--lf", p)
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items / 1 deselected / 1 selected",
+ "run-last-failure: rerun previous 1 failure",
+ "*= 1 failed, 1 deselected in *",
+ ]
+ )
+
+ def test_lastfailed_usecase_splice(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ monkeypatch.setattr("sys.dont_write_bytecode", True)
+ pytester.makepyfile(
+ "def test_1(): assert 0", test_something="def test_2(): assert 0"
+ )
+ p2 = pytester.path.joinpath("test_something.py")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*2 failed*"])
+ result = pytester.runpytest("--lf", p2)
+ result.stdout.fnmatch_lines(["*1 failed*"])
+ result = pytester.runpytest("--lf")
+ result.stdout.fnmatch_lines(["*2 failed*"])
+
+ def test_lastfailed_xpass(self, pytester: Pytester) -> None:
+ pytester.inline_runsource(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_hello():
+ assert 1
+ """
+ )
+ config = pytester.parseconfigure()
+ assert config.cache is not None
+ lastfailed = config.cache.get("cache/lastfailed", -1)
+ assert lastfailed == -1
+
+ def test_non_serializable_parametrize(self, pytester: Pytester) -> None:
+ """Test that failed parametrized tests with unmarshable parameters
+ don't break pytest-cache.
+ """
+ pytester.makepyfile(
+ r"""
+ import pytest
+
+ @pytest.mark.parametrize('val', [
+ b'\xac\x10\x02G',
+ ])
+ def test_fail(val):
+ assert False
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 failed in*"])
+
+ def test_terminal_report_lastfailed(self, pytester: Pytester) -> None:
+ test_a = pytester.makepyfile(
+ test_a="""
+ def test_a1(): pass
+ def test_a2(): pass
+ """
+ )
+ test_b = pytester.makepyfile(
+ test_b="""
+ def test_b1(): assert 0
+ def test_b2(): assert 0
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["collected 4 items", "*2 failed, 2 passed in*"])
+
+ result = pytester.runpytest("--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "run-last-failure: rerun previous 2 failures (skipped 1 file)",
+ "*2 failed in*",
+ ]
+ )
+
+ result = pytester.runpytest(test_a, "--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "run-last-failure: 2 known failures not in selected tests",
+ "*2 passed in*",
+ ]
+ )
+
+ result = pytester.runpytest(test_b, "--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "run-last-failure: rerun previous 2 failures",
+ "*2 failed in*",
+ ]
+ )
+
+ result = pytester.runpytest("test_b.py::test_b1", "--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 1 item",
+ "run-last-failure: rerun previous 1 failure",
+ "*1 failed in*",
+ ]
+ )
+
+ def test_terminal_report_failedfirst(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_a="""
+ def test_a1(): assert 0
+ def test_a2(): pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["collected 2 items", "*1 failed, 1 passed in*"])
+
+ result = pytester.runpytest("--ff")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "run-last-failure: rerun previous 1 failure first",
+ "*1 failed, 1 passed in*",
+ ]
+ )
+
+ def test_lastfailed_collectfailure(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+
+ pytester.makepyfile(
+ test_maybe="""
+ import os
+ env = os.environ
+ if '1' == env['FAILIMPORT']:
+ raise ImportError('fail')
+ def test_hello():
+ assert '0' == env['FAILTEST']
+ """
+ )
+
+ def rlf(fail_import, fail_run):
+ monkeypatch.setenv("FAILIMPORT", str(fail_import))
+ monkeypatch.setenv("FAILTEST", str(fail_run))
+
+ pytester.runpytest("-q")
+ config = pytester.parseconfigure()
+ assert config.cache is not None
+ lastfailed = config.cache.get("cache/lastfailed", -1)
+ return lastfailed
+
+ lastfailed = rlf(fail_import=0, fail_run=0)
+ assert lastfailed == -1
+
+ lastfailed = rlf(fail_import=1, fail_run=0)
+ assert list(lastfailed) == ["test_maybe.py"]
+
+ lastfailed = rlf(fail_import=0, fail_run=1)
+ assert list(lastfailed) == ["test_maybe.py::test_hello"]
+
+ def test_lastfailed_failure_subset(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ pytester.makepyfile(
+ test_maybe="""
+ import os
+ env = os.environ
+ if '1' == env['FAILIMPORT']:
+ raise ImportError('fail')
+ def test_hello():
+ assert '0' == env['FAILTEST']
+ """
+ )
+
+ pytester.makepyfile(
+ test_maybe2="""
+ import os
+ env = os.environ
+ if '1' == env['FAILIMPORT']:
+ raise ImportError('fail')
+
+ def test_hello():
+ assert '0' == env['FAILTEST']
+
+ def test_pass():
+ pass
+ """
+ )
+
+ def rlf(fail_import, fail_run, args=()):
+ monkeypatch.setenv("FAILIMPORT", str(fail_import))
+ monkeypatch.setenv("FAILTEST", str(fail_run))
+
+ result = pytester.runpytest("-q", "--lf", *args)
+ config = pytester.parseconfigure()
+ assert config.cache is not None
+ lastfailed = config.cache.get("cache/lastfailed", -1)
+ return result, lastfailed
+
+ result, lastfailed = rlf(fail_import=0, fail_run=0)
+ assert lastfailed == -1
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+ result, lastfailed = rlf(fail_import=1, fail_run=0)
+ assert sorted(list(lastfailed)) == ["test_maybe.py", "test_maybe2.py"]
+
+ result, lastfailed = rlf(fail_import=0, fail_run=0, args=("test_maybe2.py",))
+ assert list(lastfailed) == ["test_maybe.py"]
+
+ # edge case of test selection - even if we remember failures
+ # from other tests we still need to run all tests if no test
+ # matches the failures
+ result, lastfailed = rlf(fail_import=0, fail_run=0, args=("test_maybe2.py",))
+ assert list(lastfailed) == ["test_maybe.py"]
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ def test_lastfailed_creates_cache_when_needed(self, pytester: Pytester) -> None:
+ # Issue #1342
+ pytester.makepyfile(test_empty="")
+ pytester.runpytest("-q", "--lf")
+ assert not os.path.exists(".pytest_cache/v/cache/lastfailed")
+
+ pytester.makepyfile(test_successful="def test_success():\n assert True")
+ pytester.runpytest("-q", "--lf")
+ assert not os.path.exists(".pytest_cache/v/cache/lastfailed")
+
+ pytester.makepyfile(test_errored="def test_error():\n assert False")
+ pytester.runpytest("-q", "--lf")
+ assert os.path.exists(".pytest_cache/v/cache/lastfailed")
+
+ def test_xfail_not_considered_failure(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test(): assert 0
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 xfailed*"])
+ assert self.get_cached_last_failed(pytester) == []
+
+ def test_xfail_strict_considered_failure(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(strict=True)
+ def test(): pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 failed*"])
+ assert self.get_cached_last_failed(pytester) == [
+ "test_xfail_strict_considered_failure.py::test"
+ ]
+
+ @pytest.mark.parametrize("mark", ["mark.xfail", "mark.skip"])
+ def test_failed_changed_to_xfail_or_skip(
+ self, pytester: Pytester, mark: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def test(): assert 0
+ """
+ )
+ result = pytester.runpytest()
+ assert self.get_cached_last_failed(pytester) == [
+ "test_failed_changed_to_xfail_or_skip.py::test"
+ ]
+ assert result.ret == 1
+
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.{mark}
+ def test(): assert 0
+ """.format(
+ mark=mark
+ )
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+ assert self.get_cached_last_failed(pytester) == []
+ assert result.ret == 0
+
+ @pytest.mark.parametrize("quiet", [True, False])
+ @pytest.mark.parametrize("opt", ["--ff", "--lf"])
+ def test_lf_and_ff_prints_no_needless_message(
+ self, quiet: bool, opt: str, pytester: Pytester
+ ) -> None:
+ # Issue 3853
+ pytester.makepyfile("def test(): assert 0")
+ args = [opt]
+ if quiet:
+ args.append("-q")
+ result = pytester.runpytest(*args)
+ result.stdout.no_fnmatch_line("*run all*")
+
+ result = pytester.runpytest(*args)
+ if quiet:
+ result.stdout.no_fnmatch_line("*run all*")
+ else:
+ assert "rerun previous" in result.stdout.str()
+
+ def get_cached_last_failed(self, pytester: Pytester) -> List[str]:
+ config = pytester.parseconfigure()
+ assert config.cache is not None
+ return sorted(config.cache.get("cache/lastfailed", {}))
+
+ def test_cache_cumulative(self, pytester: Pytester) -> None:
+ """Test workflow where user fixes errors gradually file by file using --lf."""
+ # 1. initial run
+ test_bar = pytester.makepyfile(
+ test_bar="""
+ def test_bar_1(): pass
+ def test_bar_2(): assert 0
+ """
+ )
+ test_foo = pytester.makepyfile(
+ test_foo="""
+ def test_foo_3(): pass
+ def test_foo_4(): assert 0
+ """
+ )
+ pytester.runpytest()
+ assert self.get_cached_last_failed(pytester) == [
+ "test_bar.py::test_bar_2",
+ "test_foo.py::test_foo_4",
+ ]
+
+ # 2. fix test_bar_2, run only test_bar.py
+ pytester.makepyfile(
+ test_bar="""
+ def test_bar_1(): pass
+ def test_bar_2(): pass
+ """
+ )
+ result = pytester.runpytest(test_bar)
+ result.stdout.fnmatch_lines(["*2 passed*"])
+ # ensure cache does not forget that test_foo_4 failed once before
+ assert self.get_cached_last_failed(pytester) == ["test_foo.py::test_foo_4"]
+
+ result = pytester.runpytest("--last-failed")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 1 item",
+ "run-last-failure: rerun previous 1 failure (skipped 1 file)",
+ "*= 1 failed in *",
+ ]
+ )
+ assert self.get_cached_last_failed(pytester) == ["test_foo.py::test_foo_4"]
+
+ # 3. fix test_foo_4, run only test_foo.py
+ test_foo = pytester.makepyfile(
+ test_foo="""
+ def test_foo_3(): pass
+ def test_foo_4(): pass
+ """
+ )
+ result = pytester.runpytest(test_foo, "--last-failed")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items / 1 deselected / 1 selected",
+ "run-last-failure: rerun previous 1 failure",
+ "*= 1 passed, 1 deselected in *",
+ ]
+ )
+ assert self.get_cached_last_failed(pytester) == []
+
+ result = pytester.runpytest("--last-failed")
+ result.stdout.fnmatch_lines(["*4 passed*"])
+ assert self.get_cached_last_failed(pytester) == []
+
+ def test_lastfailed_no_failures_behavior_all_passed(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_1(): pass
+ def test_2(): pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed*"])
+ result = pytester.runpytest("--lf")
+ result.stdout.fnmatch_lines(["*2 passed*"])
+ result = pytester.runpytest("--lf", "--lfnf", "all")
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ # Ensure the list passed to pytest_deselected is a copy,
+ # and not a reference which is cleared right after.
+ pytester.makeconftest(
+ """
+ deselected = []
+
+ def pytest_deselected(items):
+ global deselected
+ deselected = items
+
+ def pytest_sessionfinish():
+ print("\\ndeselected={}".format(len(deselected)))
+ """
+ )
+
+ result = pytester.runpytest("--lf", "--lfnf", "none")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items / 2 deselected",
+ "run-last-failure: no previously failed tests, deselecting all items.",
+ "deselected=2",
+ "* 2 deselected in *",
+ ]
+ )
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+ def test_lastfailed_no_failures_behavior_empty_cache(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_1(): pass
+ def test_2(): assert 0
+ """
+ )
+ result = pytester.runpytest("--lf", "--cache-clear")
+ result.stdout.fnmatch_lines(["*1 failed*1 passed*"])
+ result = pytester.runpytest("--lf", "--cache-clear", "--lfnf", "all")
+ result.stdout.fnmatch_lines(["*1 failed*1 passed*"])
+ result = pytester.runpytest("--lf", "--cache-clear", "--lfnf", "none")
+ result.stdout.fnmatch_lines(["*2 desel*"])
+
+ def test_lastfailed_skip_collection(self, pytester: Pytester) -> None:
+ """
+ Test --lf behavior regarding skipping collection of files that are not marked as
+ failed in the cache (#5172).
+ """
+ pytester.makepyfile(
+ **{
+ "pkg1/test_1.py": """
+ import pytest
+
+ @pytest.mark.parametrize('i', range(3))
+ def test_1(i): pass
+ """,
+ "pkg2/test_2.py": """
+ import pytest
+
+ @pytest.mark.parametrize('i', range(5))
+ def test_1(i):
+ assert i not in (1, 3)
+ """,
+ }
+ )
+ # first run: collects 8 items (test_1: 3, test_2: 5)
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["collected 8 items", "*2 failed*6 passed*"])
+ # second run: collects only 5 items from test_2, because all tests from test_1 have passed
+ result = pytester.runpytest("--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "run-last-failure: rerun previous 2 failures (skipped 1 file)",
+ "*= 2 failed in *",
+ ]
+ )
+
+ # add another file and check if message is correct when skipping more than 1 file
+ pytester.makepyfile(
+ **{
+ "pkg1/test_3.py": """
+ def test_3(): pass
+ """
+ }
+ )
+ result = pytester.runpytest("--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "run-last-failure: rerun previous 2 failures (skipped 2 files)",
+ "*= 2 failed in *",
+ ]
+ )
+
+ def test_lastfailed_with_known_failures_not_being_selected(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ **{
+ "pkg1/test_1.py": """def test_1(): assert 0""",
+ "pkg1/test_2.py": """def test_2(): pass""",
+ }
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["collected 2 items", "* 1 failed, 1 passed in *"])
+
+ Path("pkg1/test_1.py").unlink()
+ result = pytester.runpytest("--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 1 item",
+ "run-last-failure: 1 known failures not in selected tests",
+ "* 1 passed in *",
+ ]
+ )
+
+ # Recreate file with known failure.
+ pytester.makepyfile(**{"pkg1/test_1.py": """def test_1(): assert 0"""})
+ result = pytester.runpytest("--lf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 1 item",
+ "run-last-failure: rerun previous 1 failure (skipped 1 file)",
+ "* 1 failed in *",
+ ]
+ )
+
+ # Remove/rename test: collects the file again.
+ pytester.makepyfile(**{"pkg1/test_1.py": """def test_renamed(): assert 0"""})
+ result = pytester.runpytest("--lf", "-rf")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "run-last-failure: 1 known failures not in selected tests",
+ "pkg1/test_1.py F *",
+ "pkg1/test_2.py . *",
+ "FAILED pkg1/test_1.py::test_renamed - assert 0",
+ "* 1 failed, 1 passed in *",
+ ]
+ )
+
+ result = pytester.runpytest("--lf", "--co")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 1 item",
+ "run-last-failure: rerun previous 1 failure (skipped 1 file)",
+ "",
+ "<Module pkg1/test_1.py>",
+ " <Function test_renamed>",
+ ]
+ )
+
+ def test_lastfailed_args_with_deselected(self, pytester: Pytester) -> None:
+ """Test regression with --lf running into NoMatch error.
+
+ This was caused by it not collecting (non-failed) nodes given as
+ arguments.
+ """
+ pytester.makepyfile(
+ **{
+ "pkg1/test_1.py": """
+ def test_pass(): pass
+ def test_fail(): assert 0
+ """,
+ }
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["collected 2 items", "* 1 failed, 1 passed in *"])
+ assert result.ret == 1
+
+ result = pytester.runpytest("pkg1/test_1.py::test_pass", "--lf", "--co")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "*collected 1 item",
+ "run-last-failure: 1 known failures not in selected tests",
+ "",
+ "<Module pkg1/test_1.py>",
+ " <Function test_pass>",
+ ],
+ consecutive=True,
+ )
+
+ result = pytester.runpytest(
+ "pkg1/test_1.py::test_pass", "pkg1/test_1.py::test_fail", "--lf", "--co"
+ )
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items / 1 deselected / 1 selected",
+ "run-last-failure: rerun previous 1 failure",
+ "",
+ "<Module pkg1/test_1.py>",
+ " <Function test_fail>",
+ "*= 1/2 tests collected (1 deselected) in *",
+ ],
+ )
+
+ def test_lastfailed_with_class_items(self, pytester: Pytester) -> None:
+ """Test regression with --lf deselecting whole classes."""
+ pytester.makepyfile(
+ **{
+ "pkg1/test_1.py": """
+ class TestFoo:
+ def test_pass(self): pass
+ def test_fail(self): assert 0
+
+ def test_other(): assert 0
+ """,
+ }
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["collected 3 items", "* 2 failed, 1 passed in *"])
+ assert result.ret == 1
+
+ result = pytester.runpytest("--lf", "--co")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "collected 3 items / 1 deselected / 2 selected",
+ "run-last-failure: rerun previous 2 failures",
+ "",
+ "<Module pkg1/test_1.py>",
+ " <Class TestFoo>",
+ " <Function test_fail>",
+ " <Function test_other>",
+ "",
+ "*= 2/3 tests collected (1 deselected) in *",
+ ],
+ consecutive=True,
+ )
+
+ def test_lastfailed_with_all_filtered(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ **{
+ "pkg1/test_1.py": """
+ def test_fail(): assert 0
+ def test_pass(): pass
+ """,
+ }
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["collected 2 items", "* 1 failed, 1 passed in *"])
+ assert result.ret == 1
+
+ # Remove known failure.
+ pytester.makepyfile(
+ **{
+ "pkg1/test_1.py": """
+ def test_pass(): pass
+ """,
+ }
+ )
+ result = pytester.runpytest("--lf", "--co")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 1 item",
+ "run-last-failure: 1 known failures not in selected tests",
+ "",
+ "<Module pkg1/test_1.py>",
+ " <Function test_pass>",
+ "",
+ "*= 1 test collected in*",
+ ],
+ consecutive=True,
+ )
+ assert result.ret == 0
+
+ def test_packages(self, pytester: Pytester) -> None:
+ """Regression test for #7758.
+
+ The particular issue here was that Package nodes were included in the
+ filtering, being themselves Modules for the __init__.py, even if they
+ had failed Modules in them.
+
+ The tests includes a test in an __init__.py file just to make sure the
+ fix doesn't somehow regress that, it is not critical for the issue.
+ """
+ pytester.makepyfile(
+ **{
+ "__init__.py": "",
+ "a/__init__.py": "def test_a_init(): assert False",
+ "a/test_one.py": "def test_1(): assert False",
+ "b/__init__.py": "",
+ "b/test_two.py": "def test_2(): assert False",
+ },
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ python_files = *.py
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(failed=3)
+ result = pytester.runpytest("--lf")
+ result.assert_outcomes(failed=3)
+
+
+class TestNewFirst:
+ def test_newfirst_usecase(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ **{
+ "test_1/test_1.py": """
+ def test_1(): assert 1
+ """,
+ "test_2/test_2.py": """
+ def test_1(): assert 1
+ """,
+ }
+ )
+
+ p1 = pytester.path.joinpath("test_1/test_1.py")
+ os.utime(p1, ns=(p1.stat().st_atime_ns, int(1e9)))
+
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ ["*test_1/test_1.py::test_1 PASSED*", "*test_2/test_2.py::test_1 PASSED*"]
+ )
+
+ result = pytester.runpytest("-v", "--nf")
+ result.stdout.fnmatch_lines(
+ ["*test_2/test_2.py::test_1 PASSED*", "*test_1/test_1.py::test_1 PASSED*"]
+ )
+
+ p1.write_text("def test_1(): assert 1\n" "def test_2(): assert 1\n")
+ os.utime(p1, ns=(p1.stat().st_atime_ns, int(1e9)))
+
+ result = pytester.runpytest("--nf", "--collect-only", "-q")
+ result.stdout.fnmatch_lines(
+ [
+ "test_1/test_1.py::test_2",
+ "test_2/test_2.py::test_1",
+ "test_1/test_1.py::test_1",
+ ]
+ )
+
+ # Newest first with (plugin) pytest_collection_modifyitems hook.
+ pytester.makepyfile(
+ myplugin="""
+ def pytest_collection_modifyitems(items):
+ items[:] = sorted(items, key=lambda item: item.nodeid)
+ print("new_items:", [x.nodeid for x in items])
+ """
+ )
+ pytester.syspathinsert()
+ result = pytester.runpytest("--nf", "-p", "myplugin", "--collect-only", "-q")
+ result.stdout.fnmatch_lines(
+ [
+ "new_items: *test_1.py*test_1.py*test_2.py*",
+ "test_1/test_1.py::test_2",
+ "test_2/test_2.py::test_1",
+ "test_1/test_1.py::test_1",
+ ]
+ )
+
+ def test_newfirst_parametrize(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ **{
+ "test_1/test_1.py": """
+ import pytest
+ @pytest.mark.parametrize('num', [1, 2])
+ def test_1(num): assert num
+ """,
+ "test_2/test_2.py": """
+ import pytest
+ @pytest.mark.parametrize('num', [1, 2])
+ def test_1(num): assert num
+ """,
+ }
+ )
+
+ p1 = pytester.path.joinpath("test_1/test_1.py")
+ os.utime(p1, ns=(p1.stat().st_atime_ns, int(1e9)))
+
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ [
+ "*test_1/test_1.py::test_1[1*",
+ "*test_1/test_1.py::test_1[2*",
+ "*test_2/test_2.py::test_1[1*",
+ "*test_2/test_2.py::test_1[2*",
+ ]
+ )
+
+ result = pytester.runpytest("-v", "--nf")
+ result.stdout.fnmatch_lines(
+ [
+ "*test_2/test_2.py::test_1[1*",
+ "*test_2/test_2.py::test_1[2*",
+ "*test_1/test_1.py::test_1[1*",
+ "*test_1/test_1.py::test_1[2*",
+ ]
+ )
+
+ p1.write_text(
+ "import pytest\n"
+ "@pytest.mark.parametrize('num', [1, 2, 3])\n"
+ "def test_1(num): assert num\n"
+ )
+ os.utime(p1, ns=(p1.stat().st_atime_ns, int(1e9)))
+
+ # Running only a subset does not forget about existing ones.
+ result = pytester.runpytest("-v", "--nf", "test_2/test_2.py")
+ result.stdout.fnmatch_lines(
+ ["*test_2/test_2.py::test_1[1*", "*test_2/test_2.py::test_1[2*"]
+ )
+
+ result = pytester.runpytest("-v", "--nf")
+ result.stdout.fnmatch_lines(
+ [
+ "*test_1/test_1.py::test_1[3*",
+ "*test_2/test_2.py::test_1[1*",
+ "*test_2/test_2.py::test_1[2*",
+ "*test_1/test_1.py::test_1[1*",
+ "*test_1/test_1.py::test_1[2*",
+ ]
+ )
+
+
+class TestReadme:
+ def check_readme(self, pytester: Pytester) -> bool:
+ config = pytester.parseconfigure()
+ assert config.cache is not None
+ readme = config.cache._cachedir.joinpath("README.md")
+ return readme.is_file()
+
+ def test_readme_passed(self, pytester: Pytester) -> None:
+ pytester.makepyfile("def test_always_passes(): pass")
+ pytester.runpytest()
+ assert self.check_readme(pytester) is True
+
+ def test_readme_failed(self, pytester: Pytester) -> None:
+ pytester.makepyfile("def test_always_fails(): assert 0")
+ pytester.runpytest()
+ assert self.check_readme(pytester) is True
+
+
+def test_gitignore(pytester: Pytester) -> None:
+ """Ensure we automatically create .gitignore file in the pytest_cache directory (#3286)."""
+ from _pytest.cacheprovider import Cache
+
+ config = pytester.parseconfig()
+ cache = Cache.for_config(config, _ispytest=True)
+ cache.set("foo", "bar")
+ msg = "# Created by pytest automatically.\n*\n"
+ gitignore_path = cache._cachedir.joinpath(".gitignore")
+ assert gitignore_path.read_text(encoding="UTF-8") == msg
+
+ # Does not overwrite existing/custom one.
+ gitignore_path.write_text("custom")
+ cache.set("something", "else")
+ assert gitignore_path.read_text(encoding="UTF-8") == "custom"
+
+
+def test_preserve_keys_order(pytester: Pytester) -> None:
+ """Ensure keys order is preserved when saving dicts (#9205)."""
+ from _pytest.cacheprovider import Cache
+
+ config = pytester.parseconfig()
+ cache = Cache.for_config(config, _ispytest=True)
+ cache.set("foo", {"z": 1, "b": 2, "a": 3, "d": 10})
+ read_back = cache.get("foo", None)
+ assert list(read_back.items()) == [("z", 1), ("b", 2), ("a", 3), ("d", 10)]
+
+
+def test_does_not_create_boilerplate_in_existing_dirs(pytester: Pytester) -> None:
+ from _pytest.cacheprovider import Cache
+
+ pytester.makeini(
+ """
+ [pytest]
+ cache_dir = .
+ """
+ )
+ config = pytester.parseconfig()
+ cache = Cache.for_config(config, _ispytest=True)
+ cache.set("foo", "bar")
+
+ assert os.path.isdir("v") # cache contents
+ assert not os.path.exists(".gitignore")
+ assert not os.path.exists("README.md")
+
+
+def test_cachedir_tag(pytester: Pytester) -> None:
+ """Ensure we automatically create CACHEDIR.TAG file in the pytest_cache directory (#4278)."""
+ from _pytest.cacheprovider import Cache
+ from _pytest.cacheprovider import CACHEDIR_TAG_CONTENT
+
+ config = pytester.parseconfig()
+ cache = Cache.for_config(config, _ispytest=True)
+ cache.set("foo", "bar")
+ cachedir_tag_path = cache._cachedir.joinpath("CACHEDIR.TAG")
+ assert cachedir_tag_path.read_bytes() == CACHEDIR_TAG_CONTENT
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_capture.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_capture.py
new file mode 100644
index 0000000000..1bc1f2f8db
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_capture.py
@@ -0,0 +1,1666 @@
+import contextlib
+import io
+import os
+import subprocess
+import sys
+import textwrap
+from io import UnsupportedOperation
+from typing import BinaryIO
+from typing import cast
+from typing import Generator
+from typing import TextIO
+
+import pytest
+from _pytest import capture
+from _pytest.capture import _get_multicapture
+from _pytest.capture import CaptureFixture
+from _pytest.capture import CaptureManager
+from _pytest.capture import CaptureResult
+from _pytest.capture import MultiCapture
+from _pytest.config import ExitCode
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+
+# note: py.io capture tests where copied from
+# pylib 1.4.20.dev2 (rev 13d9af95547e)
+
+
+def StdCaptureFD(
+ out: bool = True, err: bool = True, in_: bool = True
+) -> MultiCapture[str]:
+ return capture.MultiCapture(
+ in_=capture.FDCapture(0) if in_ else None,
+ out=capture.FDCapture(1) if out else None,
+ err=capture.FDCapture(2) if err else None,
+ )
+
+
+def StdCapture(
+ out: bool = True, err: bool = True, in_: bool = True
+) -> MultiCapture[str]:
+ return capture.MultiCapture(
+ in_=capture.SysCapture(0) if in_ else None,
+ out=capture.SysCapture(1) if out else None,
+ err=capture.SysCapture(2) if err else None,
+ )
+
+
+def TeeStdCapture(
+ out: bool = True, err: bool = True, in_: bool = True
+) -> MultiCapture[str]:
+ return capture.MultiCapture(
+ in_=capture.SysCapture(0, tee=True) if in_ else None,
+ out=capture.SysCapture(1, tee=True) if out else None,
+ err=capture.SysCapture(2, tee=True) if err else None,
+ )
+
+
+class TestCaptureManager:
+ @pytest.mark.parametrize("method", ["no", "sys", "fd"])
+ def test_capturing_basic_api(self, method) -> None:
+ capouter = StdCaptureFD()
+ old = sys.stdout, sys.stderr, sys.stdin
+ try:
+ capman = CaptureManager(method)
+ capman.start_global_capturing()
+ capman.suspend_global_capture()
+ outerr = capman.read_global_capture()
+ assert outerr == ("", "")
+ capman.suspend_global_capture()
+ outerr = capman.read_global_capture()
+ assert outerr == ("", "")
+ print("hello")
+ capman.suspend_global_capture()
+ out, err = capman.read_global_capture()
+ if method == "no":
+ assert old == (sys.stdout, sys.stderr, sys.stdin)
+ else:
+ assert not out
+ capman.resume_global_capture()
+ print("hello")
+ capman.suspend_global_capture()
+ out, err = capman.read_global_capture()
+ if method != "no":
+ assert out == "hello\n"
+ capman.stop_global_capturing()
+ finally:
+ capouter.stop_capturing()
+
+ def test_init_capturing(self):
+ capouter = StdCaptureFD()
+ try:
+ capman = CaptureManager("fd")
+ capman.start_global_capturing()
+ pytest.raises(AssertionError, capman.start_global_capturing)
+ capman.stop_global_capturing()
+ finally:
+ capouter.stop_capturing()
+
+
+@pytest.mark.parametrize("method", ["fd", "sys"])
+def test_capturing_unicode(pytester: Pytester, method: str) -> None:
+ obj = "'b\u00f6y'"
+ pytester.makepyfile(
+ """\
+ # taken from issue 227 from nosetests
+ def test_unicode():
+ import sys
+ print(sys.stdout)
+ print(%s)
+ """
+ % obj
+ )
+ result = pytester.runpytest("--capture=%s" % method)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+@pytest.mark.parametrize("method", ["fd", "sys"])
+def test_capturing_bytes_in_utf8_encoding(pytester: Pytester, method: str) -> None:
+ pytester.makepyfile(
+ """\
+ def test_unicode():
+ print('b\\u00f6y')
+ """
+ )
+ result = pytester.runpytest("--capture=%s" % method)
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_collect_capturing(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import sys
+
+ print("collect %s failure" % 13)
+ sys.stderr.write("collect %s_stderr failure" % 13)
+ import xyz42123
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*Captured stdout*",
+ "collect 13 failure",
+ "*Captured stderr*",
+ "collect 13_stderr failure",
+ ]
+ )
+
+
+class TestPerTestCapturing:
+ def test_capture_and_fixtures(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def setup_module(mod):
+ print("setup module")
+ def setup_function(function):
+ print("setup " + function.__name__)
+ def test_func1():
+ print("in func1")
+ assert 0
+ def test_func2():
+ print("in func2")
+ assert 0
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "setup module*",
+ "setup test_func1*",
+ "in func1*",
+ "setup test_func2*",
+ "in func2*",
+ ]
+ )
+
+ @pytest.mark.xfail(reason="unimplemented feature")
+ def test_capture_scope_cache(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import sys
+ def setup_module(func):
+ print("module-setup")
+ def setup_function(func):
+ print("function-setup")
+ def test_func():
+ print("in function")
+ assert 0
+ def teardown_function(func):
+ print("in teardown")
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*test_func():*",
+ "*Captured stdout during setup*",
+ "module-setup*",
+ "function-setup*",
+ "*Captured stdout*",
+ "in teardown*",
+ ]
+ )
+
+ def test_no_carry_over(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def test_func1():
+ print("in func1")
+ def test_func2():
+ print("in func2")
+ assert 0
+ """
+ )
+ result = pytester.runpytest(p)
+ s = result.stdout.str()
+ assert "in func1" not in s
+ assert "in func2" in s
+
+ def test_teardown_capturing(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def setup_function(function):
+ print("setup func1")
+ def teardown_function(function):
+ print("teardown func1")
+ assert 0
+ def test_func1():
+ print("in func1")
+ pass
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*teardown_function*",
+ "*Captured stdout*",
+ "setup func1*",
+ "in func1*",
+ "teardown func1*",
+ # "*1 fixture failure*"
+ ]
+ )
+
+ def test_teardown_capturing_final(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def teardown_module(mod):
+ print("teardown module")
+ assert 0
+ def test_func():
+ pass
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*def teardown_module(mod):*",
+ "*Captured stdout*",
+ "*teardown module*",
+ "*1 error*",
+ ]
+ )
+
+ def test_capturing_outerr(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """\
+ import sys
+ def test_capturing():
+ print(42)
+ sys.stderr.write(str(23))
+ def test_capturing_error():
+ print(1)
+ sys.stderr.write(str(2))
+ raise ValueError
+ """
+ )
+ result = pytester.runpytest(p1)
+ result.stdout.fnmatch_lines(
+ [
+ "*test_capturing_outerr.py .F*",
+ "====* FAILURES *====",
+ "____*____",
+ "*test_capturing_outerr.py:8: ValueError",
+ "*--- Captured stdout *call*",
+ "1",
+ "*--- Captured stderr *call*",
+ "2",
+ ]
+ )
+
+
+class TestLoggingInteraction:
+ def test_logging_stream_ownership(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """\
+ def test_logging():
+ import logging
+ import pytest
+ stream = capture.CaptureIO()
+ logging.basicConfig(stream=stream)
+ stream.close() # to free memory/release resources
+ """
+ )
+ result = pytester.runpytest_subprocess(p)
+ assert result.stderr.str().find("atexit") == -1
+
+ def test_logging_and_immediate_setupteardown(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """\
+ import logging
+ def setup_function(function):
+ logging.warning("hello1")
+
+ def test_logging():
+ logging.warning("hello2")
+ assert 0
+
+ def teardown_function(function):
+ logging.warning("hello3")
+ assert 0
+ """
+ )
+ for optargs in (("--capture=sys",), ("--capture=fd",)):
+ print(optargs)
+ result = pytester.runpytest_subprocess(p, *optargs)
+ s = result.stdout.str()
+ result.stdout.fnmatch_lines(
+ ["*WARN*hello3", "*WARN*hello1", "*WARN*hello2"] # errors show first!
+ )
+ # verify proper termination
+ assert "closed" not in s
+
+ def test_logging_and_crossscope_fixtures(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """\
+ import logging
+ def setup_module(function):
+ logging.warning("hello1")
+
+ def test_logging():
+ logging.warning("hello2")
+ assert 0
+
+ def teardown_module(function):
+ logging.warning("hello3")
+ assert 0
+ """
+ )
+ for optargs in (("--capture=sys",), ("--capture=fd",)):
+ print(optargs)
+ result = pytester.runpytest_subprocess(p, *optargs)
+ s = result.stdout.str()
+ result.stdout.fnmatch_lines(
+ ["*WARN*hello3", "*WARN*hello1", "*WARN*hello2"] # errors come first
+ )
+ # verify proper termination
+ assert "closed" not in s
+
+ def test_conftestlogging_is_shown(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """\
+ import logging
+ logging.basicConfig()
+ logging.warning("hello435")
+ """
+ )
+ # make sure that logging is still captured in tests
+ result = pytester.runpytest_subprocess("-s", "-p", "no:capturelog")
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ result.stderr.fnmatch_lines(["WARNING*hello435*"])
+ assert "operation on closed file" not in result.stderr.str()
+
+ def test_conftestlogging_and_test_logging(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """\
+ import logging
+ logging.basicConfig()
+ """
+ )
+ # make sure that logging is still captured in tests
+ p = pytester.makepyfile(
+ """\
+ def test_hello():
+ import logging
+ logging.warning("hello433")
+ assert 0
+ """
+ )
+ result = pytester.runpytest_subprocess(p, "-p", "no:capturelog")
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(["WARNING*hello433*"])
+ assert "something" not in result.stderr.str()
+ assert "operation on closed file" not in result.stderr.str()
+
+ def test_logging_after_cap_stopped(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """\
+ import pytest
+ import logging
+
+ log = logging.getLogger(__name__)
+
+ @pytest.fixture
+ def log_on_teardown():
+ yield
+ log.warning('Logging on teardown')
+ """
+ )
+ # make sure that logging is still captured in tests
+ p = pytester.makepyfile(
+ """\
+ def test_hello(log_on_teardown):
+ import logging
+ logging.warning("hello433")
+ assert 1
+ raise KeyboardInterrupt()
+ """
+ )
+ result = pytester.runpytest_subprocess(p, "--log-cli-level", "info")
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ ["*WARNING*hello433*", "*WARNING*Logging on teardown*"]
+ )
+ assert (
+ "AttributeError: 'NoneType' object has no attribute 'resume_capturing'"
+ not in result.stderr.str()
+ )
+
+
+class TestCaptureFixture:
+ @pytest.mark.parametrize("opt", [[], ["-s"]])
+ def test_std_functional(self, pytester: Pytester, opt) -> None:
+ reprec = pytester.inline_runsource(
+ """\
+ def test_hello(capsys):
+ print(42)
+ out, err = capsys.readouterr()
+ assert out.startswith("42")
+ """,
+ *opt,
+ )
+ reprec.assertoutcome(passed=1)
+
+ def test_capsyscapfd(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """\
+ def test_one(capsys, capfd):
+ pass
+ def test_two(capfd, capsys):
+ pass
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR*setup*test_one*",
+ "E*capfd*capsys*same*time*",
+ "*ERROR*setup*test_two*",
+ "E*capsys*capfd*same*time*",
+ "*2 errors*",
+ ]
+ )
+
+ def test_capturing_getfixturevalue(self, pytester: Pytester) -> None:
+ """Test that asking for "capfd" and "capsys" using request.getfixturevalue
+ in the same test is an error.
+ """
+ pytester.makepyfile(
+ """\
+ def test_one(capsys, request):
+ request.getfixturevalue("capfd")
+ def test_two(capfd, request):
+ request.getfixturevalue("capsys")
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*test_one*",
+ "E * cannot use capfd and capsys at the same time",
+ "*test_two*",
+ "E * cannot use capsys and capfd at the same time",
+ "*2 failed in*",
+ ]
+ )
+
+ def test_capsyscapfdbinary(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """\
+ def test_one(capsys, capfdbinary):
+ pass
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ ["*ERROR*setup*test_one*", "E*capfdbinary*capsys*same*time*", "*1 error*"]
+ )
+
+ @pytest.mark.parametrize("method", ["sys", "fd"])
+ def test_capture_is_represented_on_failure_issue128(
+ self, pytester: Pytester, method
+ ) -> None:
+ p = pytester.makepyfile(
+ """\
+ def test_hello(cap{}):
+ print("xxx42xxx")
+ assert 0
+ """.format(
+ method
+ )
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["xxx42xxx"])
+
+ def test_stdfd_functional(self, pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """\
+ def test_hello(capfd):
+ import os
+ os.write(1, b"42")
+ out, err = capfd.readouterr()
+ assert out.startswith("42")
+ capfd.close()
+ """
+ )
+ reprec.assertoutcome(passed=1)
+
+ @pytest.mark.parametrize("nl", ("\n", "\r\n", "\r"))
+ def test_cafd_preserves_newlines(self, capfd, nl) -> None:
+ print("test", end=nl)
+ out, err = capfd.readouterr()
+ assert out.endswith(nl)
+
+ def test_capfdbinary(self, pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """\
+ def test_hello(capfdbinary):
+ import os
+ # some likely un-decodable bytes
+ os.write(1, b'\\xfe\\x98\\x20')
+ out, err = capfdbinary.readouterr()
+ assert out == b'\\xfe\\x98\\x20'
+ assert err == b''
+ """
+ )
+ reprec.assertoutcome(passed=1)
+
+ def test_capsysbinary(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ r"""
+ def test_hello(capsysbinary):
+ import sys
+
+ sys.stdout.buffer.write(b'hello')
+
+ # Some likely un-decodable bytes.
+ sys.stdout.buffer.write(b'\xfe\x98\x20')
+
+ sys.stdout.buffer.flush()
+
+ # Ensure writing in text mode still works and is captured.
+ # https://github.com/pytest-dev/pytest/issues/6871
+ print("world", flush=True)
+
+ out, err = capsysbinary.readouterr()
+ assert out == b'hello\xfe\x98\x20world\n'
+ assert err == b''
+
+ print("stdout after")
+ print("stderr after", file=sys.stderr)
+ """
+ )
+ result = pytester.runpytest(str(p1), "-rA")
+ result.stdout.fnmatch_lines(
+ [
+ "*- Captured stdout call -*",
+ "stdout after",
+ "*- Captured stderr call -*",
+ "stderr after",
+ "*= 1 passed in *",
+ ]
+ )
+
+ def test_partial_setup_failure(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """\
+ def test_hello(capsys, missingarg):
+ pass
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["*test_partial_setup_failure*", "*1 error*"])
+
+ def test_keyboardinterrupt_disables_capturing(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """\
+ def test_hello(capfd):
+ import os
+ os.write(1, b'42')
+ raise KeyboardInterrupt()
+ """
+ )
+ result = pytester.runpytest_subprocess(p)
+ result.stdout.fnmatch_lines(["*KeyboardInterrupt*"])
+ assert result.ret == 2
+
+ def test_capture_and_logging(self, pytester: Pytester) -> None:
+ """#14"""
+ p = pytester.makepyfile(
+ """\
+ import logging
+ def test_log(capsys):
+ logging.error('x')
+ """
+ )
+ result = pytester.runpytest_subprocess(p)
+ assert "closed" not in result.stderr.str()
+
+ @pytest.mark.parametrize("fixture", ["capsys", "capfd"])
+ @pytest.mark.parametrize("no_capture", [True, False])
+ def test_disabled_capture_fixture(
+ self, pytester: Pytester, fixture: str, no_capture: bool
+ ) -> None:
+ pytester.makepyfile(
+ """\
+ def test_disabled({fixture}):
+ print('captured before')
+ with {fixture}.disabled():
+ print('while capture is disabled')
+ print('captured after')
+ assert {fixture}.readouterr() == ('captured before\\ncaptured after\\n', '')
+
+ def test_normal():
+ print('test_normal executed')
+ """.format(
+ fixture=fixture
+ )
+ )
+ args = ("-s",) if no_capture else ()
+ result = pytester.runpytest_subprocess(*args)
+ result.stdout.fnmatch_lines(["*while capture is disabled*", "*= 2 passed in *"])
+ result.stdout.no_fnmatch_line("*captured before*")
+ result.stdout.no_fnmatch_line("*captured after*")
+ if no_capture:
+ assert "test_normal executed" in result.stdout.str()
+ else:
+ result.stdout.no_fnmatch_line("*test_normal executed*")
+
+ def test_disabled_capture_fixture_twice(self, pytester: Pytester) -> None:
+ """Test that an inner disabled() exit doesn't undo an outer disabled().
+
+ Issue #7148.
+ """
+ pytester.makepyfile(
+ """
+ def test_disabled(capfd):
+ print('captured before')
+ with capfd.disabled():
+ print('while capture is disabled 1')
+ with capfd.disabled():
+ print('while capture is disabled 2')
+ print('while capture is disabled 1 after')
+ print('captured after')
+ assert capfd.readouterr() == ('captured before\\ncaptured after\\n', '')
+ """
+ )
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(
+ [
+ "*while capture is disabled 1",
+ "*while capture is disabled 2",
+ "*while capture is disabled 1 after",
+ ],
+ consecutive=True,
+ )
+
+ @pytest.mark.parametrize("fixture", ["capsys", "capfd"])
+ def test_fixture_use_by_other_fixtures(self, pytester: Pytester, fixture) -> None:
+ """Ensure that capsys and capfd can be used by other fixtures during
+ setup and teardown."""
+ pytester.makepyfile(
+ """\
+ import sys
+ import pytest
+
+ @pytest.fixture
+ def captured_print({fixture}):
+ print('stdout contents begin')
+ print('stderr contents begin', file=sys.stderr)
+ out, err = {fixture}.readouterr()
+
+ yield out, err
+
+ print('stdout contents end')
+ print('stderr contents end', file=sys.stderr)
+ out, err = {fixture}.readouterr()
+ assert out == 'stdout contents end\\n'
+ assert err == 'stderr contents end\\n'
+
+ def test_captured_print(captured_print):
+ out, err = captured_print
+ assert out == 'stdout contents begin\\n'
+ assert err == 'stderr contents begin\\n'
+ """.format(
+ fixture=fixture
+ )
+ )
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result.stdout.no_fnmatch_line("*stdout contents begin*")
+ result.stdout.no_fnmatch_line("*stderr contents begin*")
+
+ @pytest.mark.parametrize("cap", ["capsys", "capfd"])
+ def test_fixture_use_by_other_fixtures_teardown(
+ self, pytester: Pytester, cap
+ ) -> None:
+ """Ensure we can access setup and teardown buffers from teardown when using capsys/capfd (##3033)"""
+ pytester.makepyfile(
+ """\
+ import sys
+ import pytest
+ import os
+
+ @pytest.fixture()
+ def fix({cap}):
+ print("setup out")
+ sys.stderr.write("setup err\\n")
+ yield
+ out, err = {cap}.readouterr()
+ assert out == 'setup out\\ncall out\\n'
+ assert err == 'setup err\\ncall err\\n'
+
+ def test_a(fix):
+ print("call out")
+ sys.stderr.write("call err\\n")
+ """.format(
+ cap=cap
+ )
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_setup_failure_does_not_kill_capturing(pytester: Pytester) -> None:
+ sub1 = pytester.mkpydir("sub1")
+ sub1.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ def pytest_runtest_setup(item):
+ raise ValueError(42)
+ """
+ )
+ )
+ sub1.joinpath("test_mod.py").write_text("def test_func1(): pass")
+ result = pytester.runpytest(pytester.path, "--traceconfig")
+ result.stdout.fnmatch_lines(["*ValueError(42)*", "*1 error*"])
+
+
+def test_capture_conftest_runtest_setup(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_runtest_setup():
+ print("hello19")
+ """
+ )
+ pytester.makepyfile("def test_func(): pass")
+ result = pytester.runpytest()
+ assert result.ret == 0
+ result.stdout.no_fnmatch_line("*hello19*")
+
+
+def test_capture_badoutput_issue412(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import os
+
+ def test_func():
+ omg = bytearray([1,129,1])
+ os.write(1, omg)
+ assert 0
+ """
+ )
+ result = pytester.runpytest("--capture=fd")
+ result.stdout.fnmatch_lines(
+ """
+ *def test_func*
+ *assert 0*
+ *Captured*
+ *1 failed*
+ """
+ )
+
+
+def test_capture_early_option_parsing(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_runtest_setup():
+ print("hello19")
+ """
+ )
+ pytester.makepyfile("def test_func(): pass")
+ result = pytester.runpytest("-vs")
+ assert result.ret == 0
+ assert "hello19" in result.stdout.str()
+
+
+def test_capture_binary_output(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ r"""
+ import pytest
+
+ def test_a():
+ import sys
+ import subprocess
+ subprocess.call([sys.executable, __file__])
+
+ def test_foo():
+ import os;os.write(1, b'\xc3')
+
+ if __name__ == '__main__':
+ test_foo()
+ """
+ )
+ result = pytester.runpytest("--assert=plain")
+ result.assert_outcomes(passed=2)
+
+
+def test_error_during_readouterr(pytester: Pytester) -> None:
+ """Make sure we suspend capturing if errors occur during readouterr"""
+ pytester.makepyfile(
+ pytest_xyz="""
+ from _pytest.capture import FDCapture
+
+ def bad_snap(self):
+ raise Exception('boom')
+
+ assert FDCapture.snap
+ FDCapture.snap = bad_snap
+ """
+ )
+ result = pytester.runpytest_subprocess("-p", "pytest_xyz", "--version")
+ result.stderr.fnmatch_lines(
+ ["*in bad_snap", " raise Exception('boom')", "Exception: boom"]
+ )
+
+
+class TestCaptureIO:
+ def test_text(self) -> None:
+ f = capture.CaptureIO()
+ f.write("hello")
+ s = f.getvalue()
+ assert s == "hello"
+ f.close()
+
+ def test_unicode_and_str_mixture(self) -> None:
+ f = capture.CaptureIO()
+ f.write("\u00f6")
+ pytest.raises(TypeError, f.write, b"hello")
+
+ def test_write_bytes_to_buffer(self) -> None:
+ """In python3, stdout / stderr are text io wrappers (exposing a buffer
+ property of the underlying bytestream). See issue #1407
+ """
+ f = capture.CaptureIO()
+ f.buffer.write(b"foo\r\n")
+ assert f.getvalue() == "foo\r\n"
+
+
+class TestTeeCaptureIO(TestCaptureIO):
+ def test_text(self) -> None:
+ sio = io.StringIO()
+ f = capture.TeeCaptureIO(sio)
+ f.write("hello")
+ s1 = f.getvalue()
+ assert s1 == "hello"
+ s2 = sio.getvalue()
+ assert s2 == s1
+ f.close()
+ sio.close()
+
+ def test_unicode_and_str_mixture(self) -> None:
+ sio = io.StringIO()
+ f = capture.TeeCaptureIO(sio)
+ f.write("\u00f6")
+ pytest.raises(TypeError, f.write, b"hello")
+
+
+def test_dontreadfrominput() -> None:
+ from _pytest.capture import DontReadFromInput
+
+ f = DontReadFromInput()
+ assert f.buffer is f
+ assert not f.isatty()
+ pytest.raises(OSError, f.read)
+ pytest.raises(OSError, f.readlines)
+ iter_f = iter(f)
+ pytest.raises(OSError, next, iter_f)
+ pytest.raises(UnsupportedOperation, f.fileno)
+ f.close() # just for completeness
+
+
+def test_captureresult() -> None:
+ cr = CaptureResult("out", "err")
+ assert len(cr) == 2
+ assert cr.out == "out"
+ assert cr.err == "err"
+ out, err = cr
+ assert out == "out"
+ assert err == "err"
+ assert cr[0] == "out"
+ assert cr[1] == "err"
+ assert cr == cr
+ assert cr == CaptureResult("out", "err")
+ assert cr != CaptureResult("wrong", "err")
+ assert cr == ("out", "err")
+ assert cr != ("out", "wrong")
+ assert hash(cr) == hash(CaptureResult("out", "err"))
+ assert hash(cr) == hash(("out", "err"))
+ assert hash(cr) != hash(("out", "wrong"))
+ assert cr < ("z",)
+ assert cr < ("z", "b")
+ assert cr < ("z", "b", "c")
+ assert cr.count("err") == 1
+ assert cr.count("wrong") == 0
+ assert cr.index("err") == 1
+ with pytest.raises(ValueError):
+ assert cr.index("wrong") == 0
+ assert next(iter(cr)) == "out"
+ assert cr._replace(err="replaced") == ("out", "replaced")
+
+
+@pytest.fixture
+def tmpfile(pytester: Pytester) -> Generator[BinaryIO, None, None]:
+ f = pytester.makepyfile("").open("wb+")
+ yield f
+ if not f.closed:
+ f.close()
+
+
+@contextlib.contextmanager
+def lsof_check():
+ pid = os.getpid()
+ try:
+ out = subprocess.check_output(("lsof", "-p", str(pid))).decode()
+ except (OSError, subprocess.CalledProcessError, UnicodeDecodeError) as exc:
+ # about UnicodeDecodeError, see note on pytester
+ pytest.skip(f"could not run 'lsof' ({exc!r})")
+ yield
+ out2 = subprocess.check_output(("lsof", "-p", str(pid))).decode()
+ len1 = len([x for x in out.split("\n") if "REG" in x])
+ len2 = len([x for x in out2.split("\n") if "REG" in x])
+ assert len2 < len1 + 3, out2
+
+
+class TestFDCapture:
+ def test_simple(self, tmpfile: BinaryIO) -> None:
+ fd = tmpfile.fileno()
+ cap = capture.FDCapture(fd)
+ data = b"hello"
+ os.write(fd, data)
+ pytest.raises(AssertionError, cap.snap)
+ cap.done()
+ cap = capture.FDCapture(fd)
+ cap.start()
+ os.write(fd, data)
+ s = cap.snap()
+ cap.done()
+ assert s == "hello"
+
+ def test_simple_many(self, tmpfile: BinaryIO) -> None:
+ for i in range(10):
+ self.test_simple(tmpfile)
+
+ def test_simple_many_check_open_files(self, pytester: Pytester) -> None:
+ with lsof_check():
+ with pytester.makepyfile("").open("wb+") as tmpfile:
+ self.test_simple_many(tmpfile)
+
+ def test_simple_fail_second_start(self, tmpfile: BinaryIO) -> None:
+ fd = tmpfile.fileno()
+ cap = capture.FDCapture(fd)
+ cap.done()
+ pytest.raises(AssertionError, cap.start)
+
+ def test_stderr(self) -> None:
+ cap = capture.FDCapture(2)
+ cap.start()
+ print("hello", file=sys.stderr)
+ s = cap.snap()
+ cap.done()
+ assert s == "hello\n"
+
+ def test_stdin(self) -> None:
+ cap = capture.FDCapture(0)
+ cap.start()
+ x = os.read(0, 100).strip()
+ cap.done()
+ assert x == b""
+
+ def test_writeorg(self, tmpfile: BinaryIO) -> None:
+ data1, data2 = b"foo", b"bar"
+ cap = capture.FDCapture(tmpfile.fileno())
+ cap.start()
+ tmpfile.write(data1)
+ tmpfile.flush()
+ cap.writeorg(data2.decode("ascii"))
+ scap = cap.snap()
+ cap.done()
+ assert scap == data1.decode("ascii")
+ with open(tmpfile.name, "rb") as stmp_file:
+ stmp = stmp_file.read()
+ assert stmp == data2
+
+ def test_simple_resume_suspend(self) -> None:
+ with saved_fd(1):
+ cap = capture.FDCapture(1)
+ cap.start()
+ data = b"hello"
+ os.write(1, data)
+ sys.stdout.write("whatever")
+ s = cap.snap()
+ assert s == "hellowhatever"
+ cap.suspend()
+ os.write(1, b"world")
+ sys.stdout.write("qlwkej")
+ assert not cap.snap()
+ cap.resume()
+ os.write(1, b"but now")
+ sys.stdout.write(" yes\n")
+ s = cap.snap()
+ assert s == "but now yes\n"
+ cap.suspend()
+ cap.done()
+ pytest.raises(AssertionError, cap.suspend)
+
+ assert repr(cap) == (
+ "<FDCapture 1 oldfd={} _state='done' tmpfile={!r}>".format(
+ cap.targetfd_save, cap.tmpfile
+ )
+ )
+ # Should not crash with missing "_old".
+ assert repr(cap.syscapture) == (
+ "<SysCapture stdout _old=<UNSET> _state='done' tmpfile={!r}>".format(
+ cap.syscapture.tmpfile
+ )
+ )
+
+ def test_capfd_sys_stdout_mode(self, capfd) -> None:
+ assert "b" not in sys.stdout.mode
+
+
+@contextlib.contextmanager
+def saved_fd(fd):
+ new_fd = os.dup(fd)
+ try:
+ yield
+ finally:
+ os.dup2(new_fd, fd)
+ os.close(new_fd)
+
+
+class TestStdCapture:
+ captureclass = staticmethod(StdCapture)
+
+ @contextlib.contextmanager
+ def getcapture(self, **kw):
+ cap = self.__class__.captureclass(**kw)
+ cap.start_capturing()
+ try:
+ yield cap
+ finally:
+ cap.stop_capturing()
+
+ def test_capturing_done_simple(self) -> None:
+ with self.getcapture() as cap:
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ out, err = cap.readouterr()
+ assert out == "hello"
+ assert err == "world"
+
+ def test_capturing_reset_simple(self) -> None:
+ with self.getcapture() as cap:
+ print("hello world")
+ sys.stderr.write("hello error\n")
+ out, err = cap.readouterr()
+ assert out == "hello world\n"
+ assert err == "hello error\n"
+
+ def test_capturing_readouterr(self) -> None:
+ with self.getcapture() as cap:
+ print("hello world")
+ sys.stderr.write("hello error\n")
+ out, err = cap.readouterr()
+ assert out == "hello world\n"
+ assert err == "hello error\n"
+ sys.stderr.write("error2")
+ out, err = cap.readouterr()
+ assert err == "error2"
+
+ def test_capture_results_accessible_by_attribute(self) -> None:
+ with self.getcapture() as cap:
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ capture_result = cap.readouterr()
+ assert capture_result.out == "hello"
+ assert capture_result.err == "world"
+
+ def test_capturing_readouterr_unicode(self) -> None:
+ with self.getcapture() as cap:
+ print("hxąć")
+ out, err = cap.readouterr()
+ assert out == "hxąć\n"
+
+ def test_reset_twice_error(self) -> None:
+ with self.getcapture() as cap:
+ print("hello")
+ out, err = cap.readouterr()
+ pytest.raises(ValueError, cap.stop_capturing)
+ assert out == "hello\n"
+ assert not err
+
+ def test_capturing_modify_sysouterr_in_between(self) -> None:
+ oldout = sys.stdout
+ olderr = sys.stderr
+ with self.getcapture() as cap:
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ sys.stdout = capture.CaptureIO()
+ sys.stderr = capture.CaptureIO()
+ print("not seen")
+ sys.stderr.write("not seen\n")
+ out, err = cap.readouterr()
+ assert out == "hello"
+ assert err == "world"
+ assert sys.stdout == oldout
+ assert sys.stderr == olderr
+
+ def test_capturing_error_recursive(self) -> None:
+ with self.getcapture() as cap1:
+ print("cap1")
+ with self.getcapture() as cap2:
+ print("cap2")
+ out2, err2 = cap2.readouterr()
+ out1, err1 = cap1.readouterr()
+ assert out1 == "cap1\n"
+ assert out2 == "cap2\n"
+
+ def test_just_out_capture(self) -> None:
+ with self.getcapture(out=True, err=False) as cap:
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ out, err = cap.readouterr()
+ assert out == "hello"
+ assert not err
+
+ def test_just_err_capture(self) -> None:
+ with self.getcapture(out=False, err=True) as cap:
+ sys.stdout.write("hello")
+ sys.stderr.write("world")
+ out, err = cap.readouterr()
+ assert err == "world"
+ assert not out
+
+ def test_stdin_restored(self) -> None:
+ old = sys.stdin
+ with self.getcapture(in_=True):
+ newstdin = sys.stdin
+ assert newstdin != sys.stdin
+ assert sys.stdin is old
+
+ def test_stdin_nulled_by_default(self) -> None:
+ print("XXX this test may well hang instead of crashing")
+ print("XXX which indicates an error in the underlying capturing")
+ print("XXX mechanisms")
+ with self.getcapture():
+ pytest.raises(OSError, sys.stdin.read)
+
+
+class TestTeeStdCapture(TestStdCapture):
+ captureclass = staticmethod(TeeStdCapture)
+
+ def test_capturing_error_recursive(self) -> None:
+ r"""For TeeStdCapture since we passthrough stderr/stdout, cap1
+ should get all output, while cap2 should only get "cap2\n"."""
+
+ with self.getcapture() as cap1:
+ print("cap1")
+ with self.getcapture() as cap2:
+ print("cap2")
+ out2, err2 = cap2.readouterr()
+ out1, err1 = cap1.readouterr()
+ assert out1 == "cap1\ncap2\n"
+ assert out2 == "cap2\n"
+
+
+class TestStdCaptureFD(TestStdCapture):
+ captureclass = staticmethod(StdCaptureFD)
+
+ def test_simple_only_fd(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """\
+ import os
+ def test_x():
+ os.write(1, b"hello\\n")
+ assert 0
+ """
+ )
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(
+ """
+ *test_x*
+ *assert 0*
+ *Captured stdout*
+ """
+ )
+
+ def test_intermingling(self):
+ with self.getcapture() as cap:
+ os.write(1, b"1")
+ sys.stdout.write(str(2))
+ sys.stdout.flush()
+ os.write(1, b"3")
+ os.write(2, b"a")
+ sys.stderr.write("b")
+ sys.stderr.flush()
+ os.write(2, b"c")
+ out, err = cap.readouterr()
+ assert out == "123"
+ assert err == "abc"
+
+ def test_many(self, capfd):
+ with lsof_check():
+ for i in range(10):
+ cap = StdCaptureFD()
+ cap.start_capturing()
+ cap.stop_capturing()
+
+
+class TestStdCaptureFDinvalidFD:
+ def test_stdcapture_fd_invalid_fd(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import os
+ from fnmatch import fnmatch
+ from _pytest import capture
+
+ def StdCaptureFD(out=True, err=True, in_=True):
+ return capture.MultiCapture(
+ in_=capture.FDCapture(0) if in_ else None,
+ out=capture.FDCapture(1) if out else None,
+ err=capture.FDCapture(2) if err else None,
+ )
+
+ def test_stdout():
+ os.close(1)
+ cap = StdCaptureFD(out=True, err=False, in_=False)
+ assert fnmatch(repr(cap.out), "<FDCapture 1 oldfd=* _state='initialized' tmpfile=*>")
+ cap.start_capturing()
+ os.write(1, b"stdout")
+ assert cap.readouterr() == ("stdout", "")
+ cap.stop_capturing()
+
+ def test_stderr():
+ os.close(2)
+ cap = StdCaptureFD(out=False, err=True, in_=False)
+ assert fnmatch(repr(cap.err), "<FDCapture 2 oldfd=* _state='initialized' tmpfile=*>")
+ cap.start_capturing()
+ os.write(2, b"stderr")
+ assert cap.readouterr() == ("", "stderr")
+ cap.stop_capturing()
+
+ def test_stdin():
+ os.close(0)
+ cap = StdCaptureFD(out=False, err=False, in_=True)
+ assert fnmatch(repr(cap.in_), "<FDCapture 0 oldfd=* _state='initialized' tmpfile=*>")
+ cap.stop_capturing()
+ """
+ )
+ result = pytester.runpytest_subprocess("--capture=fd")
+ assert result.ret == 0
+ assert result.parseoutcomes()["passed"] == 3
+
+ def test_fdcapture_invalid_fd_with_fd_reuse(self, pytester: Pytester) -> None:
+ with saved_fd(1):
+ os.close(1)
+ cap = capture.FDCaptureBinary(1)
+ cap.start()
+ os.write(1, b"started")
+ cap.suspend()
+ os.write(1, b" suspended")
+ cap.resume()
+ os.write(1, b" resumed")
+ assert cap.snap() == b"started resumed"
+ cap.done()
+ with pytest.raises(OSError):
+ os.write(1, b"done")
+
+ def test_fdcapture_invalid_fd_without_fd_reuse(self, pytester: Pytester) -> None:
+ with saved_fd(1), saved_fd(2):
+ os.close(1)
+ os.close(2)
+ cap = capture.FDCaptureBinary(2)
+ cap.start()
+ os.write(2, b"started")
+ cap.suspend()
+ os.write(2, b" suspended")
+ cap.resume()
+ os.write(2, b" resumed")
+ assert cap.snap() == b"started resumed"
+ cap.done()
+ with pytest.raises(OSError):
+ os.write(2, b"done")
+
+
+def test_capture_not_started_but_reset() -> None:
+ capsys = StdCapture()
+ capsys.stop_capturing()
+
+
+def test_using_capsys_fixture_works_with_sys_stdout_encoding(
+ capsys: CaptureFixture[str],
+) -> None:
+ test_text = "test text"
+
+ print(test_text.encode(sys.stdout.encoding, "replace"))
+ (out, err) = capsys.readouterr()
+ assert out
+ assert err == ""
+
+
+def test_capsys_results_accessible_by_attribute(capsys: CaptureFixture[str]) -> None:
+ sys.stdout.write("spam")
+ sys.stderr.write("eggs")
+ capture_result = capsys.readouterr()
+ assert capture_result.out == "spam"
+ assert capture_result.err == "eggs"
+
+
+def test_fdcapture_tmpfile_remains_the_same() -> None:
+ cap = StdCaptureFD(out=False, err=True)
+ try:
+ cap.start_capturing()
+ capfile = cap.err.tmpfile
+ cap.readouterr()
+ finally:
+ cap.stop_capturing()
+ capfile2 = cap.err.tmpfile
+ assert capfile2 == capfile
+
+
+def test_close_and_capture_again(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import os
+ def test_close():
+ os.close(1)
+ def test_capture_again():
+ os.write(1, b"hello\\n")
+ assert 0
+ """
+ )
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(
+ """
+ *test_capture_again*
+ *assert 0*
+ *stdout*
+ *hello*
+ """
+ )
+
+
+@pytest.mark.parametrize(
+ "method", ["SysCapture(2)", "SysCapture(2, tee=True)", "FDCapture(2)"]
+)
+def test_capturing_and_logging_fundamentals(pytester: Pytester, method: str) -> None:
+ # here we check a fundamental feature
+ p = pytester.makepyfile(
+ """
+ import sys, os, logging
+ from _pytest import capture
+ cap = capture.MultiCapture(
+ in_=None,
+ out=None,
+ err=capture.%s,
+ )
+ cap.start_capturing()
+
+ logging.warning("hello1")
+ outerr = cap.readouterr()
+ print("suspend, captured %%s" %%(outerr,))
+ logging.warning("hello2")
+
+ cap.pop_outerr_to_orig()
+ logging.warning("hello3")
+
+ outerr = cap.readouterr()
+ print("suspend2, captured %%s" %% (outerr,))
+ """
+ % (method,)
+ )
+ result = pytester.runpython(p)
+ result.stdout.fnmatch_lines(
+ """
+ suspend, captured*hello1*
+ suspend2, captured*WARNING:root:hello3*
+ """
+ )
+ result.stderr.fnmatch_lines(
+ """
+ WARNING:root:hello2
+ """
+ )
+ assert "atexit" not in result.stderr.str()
+
+
+def test_error_attribute_issue555(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import sys
+ def test_capattr():
+ assert sys.stdout.errors == "replace"
+ assert sys.stderr.errors == "replace"
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+@pytest.mark.skipif(
+ not sys.platform.startswith("win"),
+ reason="only on windows",
+)
+def test_py36_windowsconsoleio_workaround_non_standard_streams() -> None:
+ """
+ Ensure _py36_windowsconsoleio_workaround function works with objects that
+ do not implement the full ``io``-based stream protocol, for example execnet channels (#2666).
+ """
+ from _pytest.capture import _py36_windowsconsoleio_workaround
+
+ class DummyStream:
+ def write(self, s):
+ pass
+
+ stream = cast(TextIO, DummyStream())
+ _py36_windowsconsoleio_workaround(stream)
+
+
+def test_dontreadfrominput_has_encoding(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import sys
+ def test_capattr():
+ # should not raise AttributeError
+ assert sys.stdout.encoding
+ assert sys.stderr.encoding
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_crash_on_closing_tmpfile_py27(
+ pytester: Pytester, monkeypatch: MonkeyPatch
+) -> None:
+ p = pytester.makepyfile(
+ """
+ import threading
+ import sys
+
+ printing = threading.Event()
+
+ def spam():
+ f = sys.stderr
+ print('SPAMBEFORE', end='', file=f)
+ printing.set()
+
+ while True:
+ try:
+ f.flush()
+ except (OSError, ValueError):
+ break
+
+ def test_spam_in_thread():
+ t = threading.Thread(target=spam)
+ t.daemon = True
+ t.start()
+
+ printing.wait()
+ """
+ )
+ # Do not consider plugins like hypothesis, which might output to stderr.
+ monkeypatch.setenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "1")
+ result = pytester.runpytest_subprocess(str(p))
+ assert result.ret == 0
+ assert result.stderr.str() == ""
+ result.stdout.no_fnmatch_line("*OSError*")
+
+
+def test_global_capture_with_live_logging(pytester: Pytester) -> None:
+ # Issue 3819
+ # capture should work with live cli logging
+
+ # Teardown report seems to have the capture for the whole process (setup, capture, teardown)
+ pytester.makeconftest(
+ """
+ def pytest_runtest_logreport(report):
+ if "test_global" in report.nodeid:
+ if report.when == "teardown":
+ with open("caplog", "w") as f:
+ f.write(report.caplog)
+ with open("capstdout", "w") as f:
+ f.write(report.capstdout)
+ """
+ )
+
+ pytester.makepyfile(
+ """
+ import logging
+ import sys
+ import pytest
+
+ logger = logging.getLogger(__name__)
+
+ @pytest.fixture
+ def fix1():
+ print("fix setup")
+ logging.info("fix setup")
+ yield
+ logging.info("fix teardown")
+ print("fix teardown")
+
+ def test_global(fix1):
+ print("begin test")
+ logging.info("something in test")
+ print("end test")
+ """
+ )
+ result = pytester.runpytest_subprocess("--log-cli-level=INFO")
+ assert result.ret == 0
+
+ with open("caplog") as f:
+ caplog = f.read()
+
+ assert "fix setup" in caplog
+ assert "something in test" in caplog
+ assert "fix teardown" in caplog
+
+ with open("capstdout") as f:
+ capstdout = f.read()
+
+ assert "fix setup" in capstdout
+ assert "begin test" in capstdout
+ assert "end test" in capstdout
+ assert "fix teardown" in capstdout
+
+
+@pytest.mark.parametrize("capture_fixture", ["capsys", "capfd"])
+def test_capture_with_live_logging(
+ pytester: Pytester, capture_fixture: CaptureFixture[str]
+) -> None:
+ # Issue 3819
+ # capture should work with live cli logging
+
+ pytester.makepyfile(
+ """
+ import logging
+ import sys
+
+ logger = logging.getLogger(__name__)
+
+ def test_capture({0}):
+ print("hello")
+ sys.stderr.write("world\\n")
+ captured = {0}.readouterr()
+ assert captured.out == "hello\\n"
+ assert captured.err == "world\\n"
+
+ logging.info("something")
+ print("next")
+ logging.info("something")
+
+ captured = {0}.readouterr()
+ assert captured.out == "next\\n"
+ """.format(
+ capture_fixture
+ )
+ )
+
+ result = pytester.runpytest_subprocess("--log-cli-level=INFO")
+ assert result.ret == 0
+
+
+def test_typeerror_encodedfile_write(pytester: Pytester) -> None:
+ """It should behave the same with and without output capturing (#4861)."""
+ p = pytester.makepyfile(
+ """
+ def test_fails():
+ import sys
+ sys.stdout.write(b"foo")
+ """
+ )
+ result_without_capture = pytester.runpytest("-s", str(p))
+ result_with_capture = pytester.runpytest(str(p))
+
+ assert result_with_capture.ret == result_without_capture.ret
+ out = result_with_capture.stdout.str()
+ assert ("TypeError: write() argument must be str, not bytes" in out) or (
+ "TypeError: unicode argument expected, got 'bytes'" in out
+ )
+
+
+def test_stderr_write_returns_len(capsys: CaptureFixture[str]) -> None:
+ """Write on Encoded files, namely captured stderr, should return number of characters written."""
+ assert sys.stderr.write("Foo") == 3
+
+
+def test_encodedfile_writelines(tmpfile: BinaryIO) -> None:
+ ef = capture.EncodedFile(tmpfile, encoding="utf-8")
+ with pytest.raises(TypeError):
+ ef.writelines([b"line1", b"line2"]) # type: ignore[list-item]
+ assert ef.writelines(["line3", "line4"]) is None # type: ignore[func-returns-value]
+ ef.flush()
+ tmpfile.seek(0)
+ assert tmpfile.read() == b"line3line4"
+ tmpfile.close()
+ with pytest.raises(ValueError):
+ ef.read()
+
+
+def test__get_multicapture() -> None:
+ assert isinstance(_get_multicapture("no"), MultiCapture)
+ pytest.raises(ValueError, _get_multicapture, "unknown").match(
+ r"^unknown capturing method: 'unknown'"
+ )
+
+
+def test_logging_while_collecting(pytester: Pytester) -> None:
+ """Issue #6240: Calls to logging.xxx() during collection causes all logging calls to be duplicated to stderr"""
+ p = pytester.makepyfile(
+ """\
+ import logging
+
+ logging.warning("during collection")
+
+ def test_logging():
+ logging.warning("during call")
+ assert False
+ """
+ )
+ result = pytester.runpytest_subprocess(p)
+ assert result.ret == ExitCode.TESTS_FAILED
+ result.stdout.fnmatch_lines(
+ [
+ "*test_*.py F*",
+ "====* FAILURES *====",
+ "____*____",
+ "*--- Captured log call*",
+ "WARNING * during call",
+ "*1 failed*",
+ ]
+ )
+ result.stdout.no_fnmatch_line("*Captured stderr call*")
+ result.stdout.no_fnmatch_line("*during collection*")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_collection.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_collection.py
new file mode 100644
index 0000000000..6a8a5c1cef
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_collection.py
@@ -0,0 +1,1506 @@
+import os
+import pprint
+import shutil
+import sys
+import textwrap
+from pathlib import Path
+from typing import List
+
+import pytest
+from _pytest.config import ExitCode
+from _pytest.fixtures import FixtureRequest
+from _pytest.main import _in_venv
+from _pytest.main import Session
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.nodes import Item
+from _pytest.pathlib import symlink_or_skip
+from _pytest.pytester import HookRecorder
+from _pytest.pytester import Pytester
+
+
+def ensure_file(file_path: Path) -> Path:
+ """Ensure that file exists"""
+ file_path.parent.mkdir(parents=True, exist_ok=True)
+ file_path.touch(exist_ok=True)
+ return file_path
+
+
+class TestCollector:
+ def test_collect_versus_item(self) -> None:
+ from pytest import Collector
+ from pytest import Item
+
+ assert not issubclass(Collector, Item)
+ assert not issubclass(Item, Collector)
+
+ def test_check_equality(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol(
+ """
+ def test_pass(): pass
+ def test_fail(): assert 0
+ """
+ )
+ fn1 = pytester.collect_by_name(modcol, "test_pass")
+ assert isinstance(fn1, pytest.Function)
+ fn2 = pytester.collect_by_name(modcol, "test_pass")
+ assert isinstance(fn2, pytest.Function)
+
+ assert fn1 == fn2
+ assert fn1 != modcol
+ assert hash(fn1) == hash(fn2)
+
+ fn3 = pytester.collect_by_name(modcol, "test_fail")
+ assert isinstance(fn3, pytest.Function)
+ assert not (fn1 == fn3)
+ assert fn1 != fn3
+
+ for fn in fn1, fn2, fn3:
+ assert isinstance(fn, pytest.Function)
+ assert fn != 3 # type: ignore[comparison-overlap]
+ assert fn != modcol
+ assert fn != [1, 2, 3] # type: ignore[comparison-overlap]
+ assert [1, 2, 3] != fn # type: ignore[comparison-overlap]
+ assert modcol != fn
+
+ assert pytester.collect_by_name(modcol, "doesnotexist") is None
+
+ def test_getparent_and_accessors(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol(
+ """
+ class TestClass:
+ def test_foo(self):
+ pass
+ """
+ )
+ cls = pytester.collect_by_name(modcol, "TestClass")
+ assert isinstance(cls, pytest.Class)
+ fn = pytester.collect_by_name(cls, "test_foo")
+ assert isinstance(fn, pytest.Function)
+
+ assert fn.getparent(pytest.Module) is modcol
+ assert modcol.module is not None
+ assert modcol.cls is None
+ assert modcol.instance is None
+
+ assert fn.getparent(pytest.Class) is cls
+ assert cls.module is not None
+ assert cls.cls is not None
+ assert cls.instance is None
+
+ assert fn.getparent(pytest.Function) is fn
+ assert fn.module is not None
+ assert fn.cls is not None
+ assert fn.instance is not None
+ assert fn.function is not None
+
+ def test_getcustomfile_roundtrip(self, pytester: Pytester) -> None:
+ hello = pytester.makefile(".xxx", hello="world")
+ pytester.makepyfile(
+ conftest="""
+ import pytest
+ class CustomFile(pytest.File):
+ pass
+ def pytest_collect_file(file_path, parent):
+ if file_path.suffix == ".xxx":
+ return CustomFile.from_parent(path=file_path, parent=parent)
+ """
+ )
+ node = pytester.getpathnode(hello)
+ assert isinstance(node, pytest.File)
+ assert node.name == "hello.xxx"
+ nodes = node.session.perform_collect([node.nodeid], genitems=False)
+ assert len(nodes) == 1
+ assert isinstance(nodes[0], pytest.File)
+
+ def test_can_skip_class_with_test_attr(self, pytester: Pytester) -> None:
+ """Assure test class is skipped when using `__test__=False` (See #2007)."""
+ pytester.makepyfile(
+ """
+ class TestFoo(object):
+ __test__ = False
+ def __init__(self):
+ pass
+ def test_foo():
+ assert True
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["collected 0 items", "*no tests ran in*"])
+
+
+class TestCollectFS:
+ def test_ignored_certain_directories(self, pytester: Pytester) -> None:
+ tmp_path = pytester.path
+ ensure_file(tmp_path / "build" / "test_notfound.py")
+ ensure_file(tmp_path / "dist" / "test_notfound.py")
+ ensure_file(tmp_path / "_darcs" / "test_notfound.py")
+ ensure_file(tmp_path / "CVS" / "test_notfound.py")
+ ensure_file(tmp_path / "{arch}" / "test_notfound.py")
+ ensure_file(tmp_path / ".whatever" / "test_notfound.py")
+ ensure_file(tmp_path / ".bzr" / "test_notfound.py")
+ ensure_file(tmp_path / "normal" / "test_found.py")
+ for x in tmp_path.rglob("test_*.py"):
+ x.write_text("def test_hello(): pass", "utf-8")
+
+ result = pytester.runpytest("--collect-only")
+ s = result.stdout.str()
+ assert "test_notfound" not in s
+ assert "test_found" in s
+
+ @pytest.mark.parametrize(
+ "fname",
+ (
+ "activate",
+ "activate.csh",
+ "activate.fish",
+ "Activate",
+ "Activate.bat",
+ "Activate.ps1",
+ ),
+ )
+ def test_ignored_virtualenvs(self, pytester: Pytester, fname: str) -> None:
+ bindir = "Scripts" if sys.platform.startswith("win") else "bin"
+ ensure_file(pytester.path / "virtual" / bindir / fname)
+ testfile = ensure_file(pytester.path / "virtual" / "test_invenv.py")
+ testfile.write_text("def test_hello(): pass")
+
+ # by default, ignore tests inside a virtualenv
+ result = pytester.runpytest()
+ result.stdout.no_fnmatch_line("*test_invenv*")
+ # allow test collection if user insists
+ result = pytester.runpytest("--collect-in-virtualenv")
+ assert "test_invenv" in result.stdout.str()
+ # allow test collection if user directly passes in the directory
+ result = pytester.runpytest("virtual")
+ assert "test_invenv" in result.stdout.str()
+
+ @pytest.mark.parametrize(
+ "fname",
+ (
+ "activate",
+ "activate.csh",
+ "activate.fish",
+ "Activate",
+ "Activate.bat",
+ "Activate.ps1",
+ ),
+ )
+ def test_ignored_virtualenvs_norecursedirs_precedence(
+ self, pytester: Pytester, fname: str
+ ) -> None:
+ bindir = "Scripts" if sys.platform.startswith("win") else "bin"
+ # norecursedirs takes priority
+ ensure_file(pytester.path / ".virtual" / bindir / fname)
+ testfile = ensure_file(pytester.path / ".virtual" / "test_invenv.py")
+ testfile.write_text("def test_hello(): pass")
+ result = pytester.runpytest("--collect-in-virtualenv")
+ result.stdout.no_fnmatch_line("*test_invenv*")
+ # ...unless the virtualenv is explicitly given on the CLI
+ result = pytester.runpytest("--collect-in-virtualenv", ".virtual")
+ assert "test_invenv" in result.stdout.str()
+
+ @pytest.mark.parametrize(
+ "fname",
+ (
+ "activate",
+ "activate.csh",
+ "activate.fish",
+ "Activate",
+ "Activate.bat",
+ "Activate.ps1",
+ ),
+ )
+ def test__in_venv(self, pytester: Pytester, fname: str) -> None:
+ """Directly test the virtual env detection function"""
+ bindir = "Scripts" if sys.platform.startswith("win") else "bin"
+ # no bin/activate, not a virtualenv
+ base_path = pytester.mkdir("venv")
+ assert _in_venv(base_path) is False
+ # with bin/activate, totally a virtualenv
+ bin_path = base_path.joinpath(bindir)
+ bin_path.mkdir()
+ bin_path.joinpath(fname).touch()
+ assert _in_venv(base_path) is True
+
+ def test_custom_norecursedirs(self, pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ norecursedirs = mydir xyz*
+ """
+ )
+ tmp_path = pytester.path
+ ensure_file(tmp_path / "mydir" / "test_hello.py").write_text(
+ "def test_1(): pass"
+ )
+ ensure_file(tmp_path / "xyz123" / "test_2.py").write_text("def test_2(): 0/0")
+ ensure_file(tmp_path / "xy" / "test_ok.py").write_text("def test_3(): pass")
+ rec = pytester.inline_run()
+ rec.assertoutcome(passed=1)
+ rec = pytester.inline_run("xyz123/test_2.py")
+ rec.assertoutcome(failed=1)
+
+ def test_testpaths_ini(self, pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ testpaths = gui uts
+ """
+ )
+ tmp_path = pytester.path
+ ensure_file(tmp_path / "env" / "test_1.py").write_text("def test_env(): pass")
+ ensure_file(tmp_path / "gui" / "test_2.py").write_text("def test_gui(): pass")
+ ensure_file(tmp_path / "uts" / "test_3.py").write_text("def test_uts(): pass")
+
+ # executing from rootdir only tests from `testpaths` directories
+ # are collected
+ items, reprec = pytester.inline_genitems("-v")
+ assert [x.name for x in items] == ["test_gui", "test_uts"]
+
+ # check that explicitly passing directories in the command-line
+ # collects the tests
+ for dirname in ("env", "gui", "uts"):
+ items, reprec = pytester.inline_genitems(tmp_path.joinpath(dirname))
+ assert [x.name for x in items] == ["test_%s" % dirname]
+
+ # changing cwd to each subdirectory and running pytest without
+ # arguments collects the tests in that directory normally
+ for dirname in ("env", "gui", "uts"):
+ monkeypatch.chdir(pytester.path.joinpath(dirname))
+ items, reprec = pytester.inline_genitems()
+ assert [x.name for x in items] == ["test_%s" % dirname]
+
+
+class TestCollectPluginHookRelay:
+ def test_pytest_collect_file(self, pytester: Pytester) -> None:
+ wascalled = []
+
+ class Plugin:
+ def pytest_collect_file(self, file_path: Path) -> None:
+ if not file_path.name.startswith("."):
+ # Ignore hidden files, e.g. .testmondata.
+ wascalled.append(file_path)
+
+ pytester.makefile(".abc", "xyz")
+ pytest.main(pytester.path, plugins=[Plugin()])
+ assert len(wascalled) == 1
+ assert wascalled[0].suffix == ".abc"
+
+
+class TestPrunetraceback:
+ def test_custom_repr_failure(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import not_exists
+ """
+ )
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_collect_file(file_path, parent):
+ return MyFile.from_parent(path=file_path, parent=parent)
+ class MyError(Exception):
+ pass
+ class MyFile(pytest.File):
+ def collect(self):
+ raise MyError()
+ def repr_failure(self, excinfo):
+ if isinstance(excinfo.value, MyError):
+ return "hello world"
+ return pytest.File.repr_failure(self, excinfo)
+ """
+ )
+
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["*ERROR collecting*", "*hello world*"])
+
+ @pytest.mark.xfail(reason="other mechanism for adding to reporting needed")
+ def test_collect_report_postprocessing(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import not_exists
+ """
+ )
+ pytester.makeconftest(
+ """
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_make_collect_report():
+ outcome = yield
+ rep = outcome.get_result()
+ rep.headerlines += ["header1"]
+ outcome.force_result(rep)
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["*ERROR collecting*", "*header1*"])
+
+
+class TestCustomConftests:
+ def test_ignore_collect_path(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_ignore_collect(collection_path, config):
+ return collection_path.name.startswith("x") or collection_path.name == "test_one.py"
+ """
+ )
+ sub = pytester.mkdir("xy123")
+ ensure_file(sub / "test_hello.py").write_text("syntax error")
+ sub.joinpath("conftest.py").write_text("syntax error")
+ pytester.makepyfile("def test_hello(): pass")
+ pytester.makepyfile(test_one="syntax error")
+ result = pytester.runpytest("--fulltrace")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_ignore_collect_not_called_on_argument(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_ignore_collect(collection_path, config):
+ return True
+ """
+ )
+ p = pytester.makepyfile("def test_hello(): pass")
+ result = pytester.runpytest(p)
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ result.stdout.fnmatch_lines(["*collected 0 items*"])
+
+ def test_collectignore_exclude_on_option(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ from pathlib import Path
+
+ class MyPathLike:
+ def __init__(self, path):
+ self.path = path
+ def __fspath__(self):
+ return "path"
+
+ collect_ignore = [MyPathLike('hello'), 'test_world.py', Path('bye')]
+
+ def pytest_addoption(parser):
+ parser.addoption("--XX", action="store_true", default=False)
+
+ def pytest_configure(config):
+ if config.getvalue("XX"):
+ collect_ignore[:] = []
+ """
+ )
+ pytester.mkdir("hello")
+ pytester.makepyfile(test_world="def test_hello(): pass")
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ result.stdout.no_fnmatch_line("*passed*")
+ result = pytester.runpytest("--XX")
+ assert result.ret == 0
+ assert "passed" in result.stdout.str()
+
+ def test_collectignoreglob_exclude_on_option(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ collect_ignore_glob = ['*w*l[dt]*']
+ def pytest_addoption(parser):
+ parser.addoption("--XX", action="store_true", default=False)
+ def pytest_configure(config):
+ if config.getvalue("XX"):
+ collect_ignore_glob[:] = []
+ """
+ )
+ pytester.makepyfile(test_world="def test_hello(): pass")
+ pytester.makepyfile(test_welt="def test_hallo(): pass")
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ result.stdout.fnmatch_lines(["*collected 0 items*"])
+ result = pytester.runpytest("--XX")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ def test_pytest_fs_collect_hooks_are_seen(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ class MyModule(pytest.Module):
+ pass
+ def pytest_collect_file(file_path, parent):
+ if file_path.suffix == ".py":
+ return MyModule.from_parent(path=file_path, parent=parent)
+ """
+ )
+ pytester.mkdir("sub")
+ pytester.makepyfile("def test_x(): pass")
+ result = pytester.runpytest("--co")
+ result.stdout.fnmatch_lines(["*MyModule*", "*test_x*"])
+
+ def test_pytest_collect_file_from_sister_dir(self, pytester: Pytester) -> None:
+ sub1 = pytester.mkpydir("sub1")
+ sub2 = pytester.mkpydir("sub2")
+ conf1 = pytester.makeconftest(
+ """
+ import pytest
+ class MyModule1(pytest.Module):
+ pass
+ def pytest_collect_file(file_path, parent):
+ if file_path.suffix == ".py":
+ return MyModule1.from_parent(path=file_path, parent=parent)
+ """
+ )
+ conf1.replace(sub1.joinpath(conf1.name))
+ conf2 = pytester.makeconftest(
+ """
+ import pytest
+ class MyModule2(pytest.Module):
+ pass
+ def pytest_collect_file(file_path, parent):
+ if file_path.suffix == ".py":
+ return MyModule2.from_parent(path=file_path, parent=parent)
+ """
+ )
+ conf2.replace(sub2.joinpath(conf2.name))
+ p = pytester.makepyfile("def test_x(): pass")
+ shutil.copy(p, sub1.joinpath(p.name))
+ shutil.copy(p, sub2.joinpath(p.name))
+ result = pytester.runpytest("--co")
+ result.stdout.fnmatch_lines(["*MyModule1*", "*MyModule2*", "*test_x*"])
+
+
+class TestSession:
+ def test_collect_topdir(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile("def test_func(): pass")
+ id = "::".join([p.name, "test_func"])
+ # XXX migrate to collectonly? (see below)
+ config = pytester.parseconfig(id)
+ topdir = pytester.path
+ rcol = Session.from_config(config)
+ assert topdir == rcol.path
+ # rootid = rcol.nodeid
+ # root2 = rcol.perform_collect([rcol.nodeid], genitems=False)[0]
+ # assert root2 == rcol, rootid
+ colitems = rcol.perform_collect([rcol.nodeid], genitems=False)
+ assert len(colitems) == 1
+ assert colitems[0].path == p
+
+ def get_reported_items(self, hookrec: HookRecorder) -> List[Item]:
+ """Return pytest.Item instances reported by the pytest_collectreport hook"""
+ calls = hookrec.getcalls("pytest_collectreport")
+ return [
+ x
+ for call in calls
+ for x in call.report.result
+ if isinstance(x, pytest.Item)
+ ]
+
+ def test_collect_protocol_single_function(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile("def test_func(): pass")
+ id = "::".join([p.name, "test_func"])
+ items, hookrec = pytester.inline_genitems(id)
+ (item,) = items
+ assert item.name == "test_func"
+ newid = item.nodeid
+ assert newid == id
+ pprint.pprint(hookrec.calls)
+ topdir = pytester.path # noqa
+ hookrec.assert_contains(
+ [
+ ("pytest_collectstart", "collector.path == topdir"),
+ ("pytest_make_collect_report", "collector.path == topdir"),
+ ("pytest_collectstart", "collector.path == p"),
+ ("pytest_make_collect_report", "collector.path == p"),
+ ("pytest_pycollect_makeitem", "name == 'test_func'"),
+ ("pytest_collectreport", "report.result[0].name == 'test_func'"),
+ ]
+ )
+ # ensure we are reporting the collection of the single test item (#2464)
+ assert [x.name for x in self.get_reported_items(hookrec)] == ["test_func"]
+
+ def test_collect_protocol_method(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ normid = p.name + "::TestClass::test_method"
+ for id in [p.name, p.name + "::TestClass", normid]:
+ items, hookrec = pytester.inline_genitems(id)
+ assert len(items) == 1
+ assert items[0].name == "test_method"
+ newid = items[0].nodeid
+ assert newid == normid
+ # ensure we are reporting the collection of the single test item (#2464)
+ assert [x.name for x in self.get_reported_items(hookrec)] == ["test_method"]
+
+ def test_collect_custom_nodes_multi_id(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile("def test_func(): pass")
+ pytester.makeconftest(
+ """
+ import pytest
+ class SpecialItem(pytest.Item):
+ def runtest(self):
+ return # ok
+ class SpecialFile(pytest.File):
+ def collect(self):
+ return [SpecialItem.from_parent(name="check", parent=self)]
+ def pytest_collect_file(file_path, parent):
+ if file_path.name == %r:
+ return SpecialFile.from_parent(path=file_path, parent=parent)
+ """
+ % p.name
+ )
+ id = p.name
+
+ items, hookrec = pytester.inline_genitems(id)
+ pprint.pprint(hookrec.calls)
+ assert len(items) == 2
+ hookrec.assert_contains(
+ [
+ ("pytest_collectstart", "collector.path == collector.session.path"),
+ (
+ "pytest_collectstart",
+ "collector.__class__.__name__ == 'SpecialFile'",
+ ),
+ ("pytest_collectstart", "collector.__class__.__name__ == 'Module'"),
+ ("pytest_pycollect_makeitem", "name == 'test_func'"),
+ ("pytest_collectreport", "report.nodeid.startswith(p.name)"),
+ ]
+ )
+ assert len(self.get_reported_items(hookrec)) == 2
+
+ def test_collect_subdir_event_ordering(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile("def test_func(): pass")
+ aaa = pytester.mkpydir("aaa")
+ test_aaa = aaa.joinpath("test_aaa.py")
+ p.replace(test_aaa)
+
+ items, hookrec = pytester.inline_genitems()
+ assert len(items) == 1
+ pprint.pprint(hookrec.calls)
+ hookrec.assert_contains(
+ [
+ ("pytest_collectstart", "collector.path == test_aaa"),
+ ("pytest_pycollect_makeitem", "name == 'test_func'"),
+ ("pytest_collectreport", "report.nodeid.startswith('aaa/test_aaa.py')"),
+ ]
+ )
+
+ def test_collect_two_commandline_args(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile("def test_func(): pass")
+ aaa = pytester.mkpydir("aaa")
+ bbb = pytester.mkpydir("bbb")
+ test_aaa = aaa.joinpath("test_aaa.py")
+ shutil.copy(p, test_aaa)
+ test_bbb = bbb.joinpath("test_bbb.py")
+ p.replace(test_bbb)
+
+ id = "."
+
+ items, hookrec = pytester.inline_genitems(id)
+ assert len(items) == 2
+ pprint.pprint(hookrec.calls)
+ hookrec.assert_contains(
+ [
+ ("pytest_collectstart", "collector.path == test_aaa"),
+ ("pytest_pycollect_makeitem", "name == 'test_func'"),
+ ("pytest_collectreport", "report.nodeid == 'aaa/test_aaa.py'"),
+ ("pytest_collectstart", "collector.path == test_bbb"),
+ ("pytest_pycollect_makeitem", "name == 'test_func'"),
+ ("pytest_collectreport", "report.nodeid == 'bbb/test_bbb.py'"),
+ ]
+ )
+
+ def test_serialization_byid(self, pytester: Pytester) -> None:
+ pytester.makepyfile("def test_func(): pass")
+ items, hookrec = pytester.inline_genitems()
+ assert len(items) == 1
+ (item,) = items
+ items2, hookrec = pytester.inline_genitems(item.nodeid)
+ (item2,) = items2
+ assert item2.name == item.name
+ assert item2.path == item.path
+
+ def test_find_byid_without_instance_parents(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ arg = p.name + "::TestClass::test_method"
+ items, hookrec = pytester.inline_genitems(arg)
+ assert len(items) == 1
+ (item,) = items
+ assert item.nodeid.endswith("TestClass::test_method")
+ # ensure we are reporting the collection of the single test item (#2464)
+ assert [x.name for x in self.get_reported_items(hookrec)] == ["test_method"]
+
+
+class Test_getinitialnodes:
+ def test_global_file(self, pytester: Pytester) -> None:
+ tmp_path = pytester.path
+ x = ensure_file(tmp_path / "x.py")
+ config = pytester.parseconfigure(x)
+ col = pytester.getnode(config, x)
+ assert isinstance(col, pytest.Module)
+ assert col.name == "x.py"
+ assert col.parent is not None
+ assert col.parent.parent is None
+ for parent in col.listchain():
+ assert parent.config is config
+
+ def test_pkgfile(self, pytester: Pytester) -> None:
+ """Verify nesting when a module is within a package.
+ The parent chain should match: Module<x.py> -> Package<subdir> -> Session.
+ Session's parent should always be None.
+ """
+ tmp_path = pytester.path
+ subdir = tmp_path.joinpath("subdir")
+ x = ensure_file(subdir / "x.py")
+ ensure_file(subdir / "__init__.py")
+ with subdir.cwd():
+ config = pytester.parseconfigure(x)
+ col = pytester.getnode(config, x)
+ assert col is not None
+ assert col.name == "x.py"
+ assert isinstance(col, pytest.Module)
+ assert isinstance(col.parent, pytest.Package)
+ assert isinstance(col.parent.parent, pytest.Session)
+ # session is batman (has no parents)
+ assert col.parent.parent.parent is None
+ for parent in col.listchain():
+ assert parent.config is config
+
+
+class Test_genitems:
+ def test_check_collect_hashes(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def test_1():
+ pass
+
+ def test_2():
+ pass
+ """
+ )
+ shutil.copy(p, p.parent / (p.stem + "2" + ".py"))
+ items, reprec = pytester.inline_genitems(p.parent)
+ assert len(items) == 4
+ for numi, i in enumerate(items):
+ for numj, j in enumerate(items):
+ if numj != numi:
+ assert hash(i) != hash(j)
+ assert i != j
+
+ def test_example_items1(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ def testone():
+ pass
+
+ class TestX(object):
+ def testmethod_one(self):
+ pass
+
+ class TestY(TestX):
+ @pytest.mark.parametrize("arg0", [".["])
+ def testmethod_two(self, arg0):
+ pass
+ """
+ )
+ items, reprec = pytester.inline_genitems(p)
+ assert len(items) == 4
+ assert items[0].name == "testone"
+ assert items[1].name == "testmethod_one"
+ assert items[2].name == "testmethod_one"
+ assert items[3].name == "testmethod_two[.[]"
+
+ # let's also test getmodpath here
+ assert items[0].getmodpath() == "testone" # type: ignore[attr-defined]
+ assert items[1].getmodpath() == "TestX.testmethod_one" # type: ignore[attr-defined]
+ assert items[2].getmodpath() == "TestY.testmethod_one" # type: ignore[attr-defined]
+ # PR #6202: Fix incorrect result of getmodpath method. (Resolves issue #6189)
+ assert items[3].getmodpath() == "TestY.testmethod_two[.[]" # type: ignore[attr-defined]
+
+ s = items[0].getmodpath(stopatmodule=False) # type: ignore[attr-defined]
+ assert s.endswith("test_example_items1.testone")
+ print(s)
+
+ def test_class_and_functions_discovery_using_glob(self, pytester: Pytester) -> None:
+ """Test that Python_classes and Python_functions config options work
+ as prefixes and glob-like patterns (#600)."""
+ pytester.makeini(
+ """
+ [pytest]
+ python_classes = *Suite Test
+ python_functions = *_test test
+ """
+ )
+ p = pytester.makepyfile(
+ """
+ class MyTestSuite(object):
+ def x_test(self):
+ pass
+
+ class TestCase(object):
+ def test_y(self):
+ pass
+ """
+ )
+ items, reprec = pytester.inline_genitems(p)
+ ids = [x.getmodpath() for x in items] # type: ignore[attr-defined]
+ assert ids == ["MyTestSuite.x_test", "TestCase.test_y"]
+
+
+def test_matchnodes_two_collections_same_file(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_configure(config):
+ config.pluginmanager.register(Plugin2())
+
+ class Plugin2(object):
+ def pytest_collect_file(self, file_path, parent):
+ if file_path.suffix == ".abc":
+ return MyFile2.from_parent(path=file_path, parent=parent)
+
+ def pytest_collect_file(file_path, parent):
+ if file_path.suffix == ".abc":
+ return MyFile1.from_parent(path=file_path, parent=parent)
+
+ class MyFile1(pytest.File):
+ def collect(self):
+ yield Item1.from_parent(name="item1", parent=self)
+
+ class MyFile2(pytest.File):
+ def collect(self):
+ yield Item2.from_parent(name="item2", parent=self)
+
+ class Item1(pytest.Item):
+ def runtest(self):
+ pass
+
+ class Item2(pytest.Item):
+ def runtest(self):
+ pass
+ """
+ )
+ p = pytester.makefile(".abc", "")
+ result = pytester.runpytest()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*2 passed*"])
+ res = pytester.runpytest("%s::item2" % p.name)
+ res.stdout.fnmatch_lines(["*1 passed*"])
+
+
+class TestNodeKeywords:
+ def test_no_under(self, pytester: Pytester) -> None:
+ modcol = pytester.getmodulecol(
+ """
+ def test_pass(): pass
+ def test_fail(): assert 0
+ """
+ )
+ values = list(modcol.keywords)
+ assert modcol.name in values
+ for x in values:
+ assert not x.startswith("_")
+ assert modcol.name in repr(modcol.keywords)
+
+ def test_issue345(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_should_not_be_selected():
+ assert False, 'I should not have been selected to run'
+
+ def test___repr__():
+ pass
+ """
+ )
+ reprec = pytester.inline_run("-k repr")
+ reprec.assertoutcome(passed=1, failed=0)
+
+ def test_keyword_matching_is_case_insensitive_by_default(
+ self, pytester: Pytester
+ ) -> None:
+ """Check that selection via -k EXPRESSION is case-insensitive.
+
+ Since markers are also added to the node keywords, they too can
+ be matched without having to think about case sensitivity.
+
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+
+ def test_sPeCiFiCToPiC_1():
+ assert True
+
+ class TestSpecificTopic_2:
+ def test(self):
+ assert True
+
+ @pytest.mark.sPeCiFiCToPic_3
+ def test():
+ assert True
+
+ @pytest.mark.sPeCiFiCToPic_4
+ class Test:
+ def test(self):
+ assert True
+
+ def test_failing_5():
+ assert False, "This should not match"
+
+ """
+ )
+ num_matching_tests = 4
+ for expression in ("specifictopic", "SPECIFICTOPIC", "SpecificTopic"):
+ reprec = pytester.inline_run("-k " + expression)
+ reprec.assertoutcome(passed=num_matching_tests, failed=0)
+
+ def test_duplicates_handled_correctly(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ pytestmark = pytest.mark.kw
+ class TestClass:
+ pytestmark = pytest.mark.kw
+ def test_method(self): pass
+ test_method.kw = 'method'
+ """,
+ "test_method",
+ )
+ assert item.parent is not None and item.parent.parent is not None
+ item.parent.parent.keywords["kw"] = "class"
+
+ assert item.keywords["kw"] == "method"
+ assert len(item.keywords) == len(set(item.keywords))
+
+
+COLLECTION_ERROR_PY_FILES = dict(
+ test_01_failure="""
+ def test_1():
+ assert False
+ """,
+ test_02_import_error="""
+ import asdfasdfasdf
+ def test_2():
+ assert True
+ """,
+ test_03_import_error="""
+ import asdfasdfasdf
+ def test_3():
+ assert True
+ """,
+ test_04_success="""
+ def test_4():
+ assert True
+ """,
+)
+
+
+def test_exit_on_collection_error(pytester: Pytester) -> None:
+ """Verify that all collection errors are collected and no tests executed"""
+ pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
+
+ res = pytester.runpytest()
+ assert res.ret == 2
+
+ res.stdout.fnmatch_lines(
+ [
+ "collected 2 items / 2 errors",
+ "*ERROR collecting test_02_import_error.py*",
+ "*No module named *asdfa*",
+ "*ERROR collecting test_03_import_error.py*",
+ "*No module named *asdfa*",
+ ]
+ )
+
+
+def test_exit_on_collection_with_maxfail_smaller_than_n_errors(
+ pytester: Pytester,
+) -> None:
+ """
+ Verify collection is aborted once maxfail errors are encountered ignoring
+ further modules which would cause more collection errors.
+ """
+ pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
+
+ res = pytester.runpytest("--maxfail=1")
+ assert res.ret == 1
+ res.stdout.fnmatch_lines(
+ [
+ "collected 1 item / 1 error",
+ "*ERROR collecting test_02_import_error.py*",
+ "*No module named *asdfa*",
+ "*! stopping after 1 failures !*",
+ "*= 1 error in *",
+ ]
+ )
+ res.stdout.no_fnmatch_line("*test_03*")
+
+
+def test_exit_on_collection_with_maxfail_bigger_than_n_errors(
+ pytester: Pytester,
+) -> None:
+ """
+ Verify the test run aborts due to collection errors even if maxfail count of
+ errors was not reached.
+ """
+ pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
+
+ res = pytester.runpytest("--maxfail=4")
+ assert res.ret == 2
+ res.stdout.fnmatch_lines(
+ [
+ "collected 2 items / 2 errors",
+ "*ERROR collecting test_02_import_error.py*",
+ "*No module named *asdfa*",
+ "*ERROR collecting test_03_import_error.py*",
+ "*No module named *asdfa*",
+ "*! Interrupted: 2 errors during collection !*",
+ "*= 2 errors in *",
+ ]
+ )
+
+
+def test_continue_on_collection_errors(pytester: Pytester) -> None:
+ """
+ Verify tests are executed even when collection errors occur when the
+ --continue-on-collection-errors flag is set
+ """
+ pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
+
+ res = pytester.runpytest("--continue-on-collection-errors")
+ assert res.ret == 1
+
+ res.stdout.fnmatch_lines(
+ ["collected 2 items / 2 errors", "*1 failed, 1 passed, 2 errors*"]
+ )
+
+
+def test_continue_on_collection_errors_maxfail(pytester: Pytester) -> None:
+ """
+ Verify tests are executed even when collection errors occur and that maxfail
+ is honoured (including the collection error count).
+ 4 tests: 2 collection errors + 1 failure + 1 success
+ test_4 is never executed because the test run is with --maxfail=3 which
+ means it is interrupted after the 2 collection errors + 1 failure.
+ """
+ pytester.makepyfile(**COLLECTION_ERROR_PY_FILES)
+
+ res = pytester.runpytest("--continue-on-collection-errors", "--maxfail=3")
+ assert res.ret == 1
+
+ res.stdout.fnmatch_lines(["collected 2 items / 2 errors", "*1 failed, 2 errors*"])
+
+
+def test_fixture_scope_sibling_conftests(pytester: Pytester) -> None:
+ """Regression test case for https://github.com/pytest-dev/pytest/issues/2836"""
+ foo_path = pytester.mkdir("foo")
+ foo_path.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def fix():
+ return 1
+ """
+ )
+ )
+ foo_path.joinpath("test_foo.py").write_text("def test_foo(fix): assert fix == 1")
+
+ # Tests in `food/` should not see the conftest fixture from `foo/`
+ food_path = pytester.mkpydir("food")
+ food_path.joinpath("test_food.py").write_text("def test_food(fix): assert fix == 1")
+
+ res = pytester.runpytest()
+ assert res.ret == 1
+
+ res.stdout.fnmatch_lines(
+ [
+ "*ERROR at setup of test_food*",
+ "E*fixture 'fix' not found",
+ "*1 passed, 1 error*",
+ ]
+ )
+
+
+def test_collect_init_tests(pytester: Pytester) -> None:
+ """Check that we collect files from __init__.py files when they patch the 'python_files' (#3773)"""
+ p = pytester.copy_example("collect/collect_init_tests")
+ result = pytester.runpytest(p, "--collect-only")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "<Package tests>",
+ " <Module __init__.py>",
+ " <Function test_init>",
+ " <Module test_foo.py>",
+ " <Function test_foo>",
+ ]
+ )
+ result = pytester.runpytest("./tests", "--collect-only")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "<Package tests>",
+ " <Module __init__.py>",
+ " <Function test_init>",
+ " <Module test_foo.py>",
+ " <Function test_foo>",
+ ]
+ )
+ # Ignores duplicates with "." and pkginit (#4310).
+ result = pytester.runpytest("./tests", ".", "--collect-only")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "<Package tests>",
+ " <Module __init__.py>",
+ " <Function test_init>",
+ " <Module test_foo.py>",
+ " <Function test_foo>",
+ ]
+ )
+ # Same as before, but different order.
+ result = pytester.runpytest(".", "tests", "--collect-only")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 2 items",
+ "<Package tests>",
+ " <Module __init__.py>",
+ " <Function test_init>",
+ " <Module test_foo.py>",
+ " <Function test_foo>",
+ ]
+ )
+ result = pytester.runpytest("./tests/test_foo.py", "--collect-only")
+ result.stdout.fnmatch_lines(
+ ["<Package tests>", " <Module test_foo.py>", " <Function test_foo>"]
+ )
+ result.stdout.no_fnmatch_line("*test_init*")
+ result = pytester.runpytest("./tests/__init__.py", "--collect-only")
+ result.stdout.fnmatch_lines(
+ ["<Package tests>", " <Module __init__.py>", " <Function test_init>"]
+ )
+ result.stdout.no_fnmatch_line("*test_foo*")
+
+
+def test_collect_invalid_signature_message(pytester: Pytester) -> None:
+ """Check that we issue a proper message when we can't determine the signature of a test
+ function (#4026).
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+
+ class TestCase:
+ @pytest.fixture
+ def fix():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ ["Could not determine arguments of *.fix *: invalid method signature"]
+ )
+
+
+def test_collect_handles_raising_on_dunder_class(pytester: Pytester) -> None:
+ """Handle proxy classes like Django's LazySettings that might raise on
+ ``isinstance`` (#4266).
+ """
+ pytester.makepyfile(
+ """
+ class ImproperlyConfigured(Exception):
+ pass
+
+ class RaisesOnGetAttr(object):
+ def raises(self):
+ raise ImproperlyConfigured
+
+ __class__ = property(raises)
+
+ raises = RaisesOnGetAttr()
+
+
+ def test_1():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed in*"])
+ assert result.ret == 0
+
+
+def test_collect_with_chdir_during_import(pytester: Pytester) -> None:
+ subdir = pytester.mkdir("sub")
+ pytester.path.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """
+ import os
+ os.chdir(%r)
+ """
+ % (str(subdir),)
+ )
+ )
+ pytester.makepyfile(
+ """
+ def test_1():
+ import os
+ assert os.getcwd() == %r
+ """
+ % (str(subdir),)
+ )
+ with pytester.path.cwd():
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed in*"])
+ assert result.ret == 0
+
+ # Handles relative testpaths.
+ pytester.makeini(
+ """
+ [pytest]
+ testpaths = .
+ """
+ )
+ with pytester.path.cwd():
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["collected 1 item"])
+
+
+def test_collect_pyargs_with_testpaths(
+ pytester: Pytester, monkeypatch: MonkeyPatch
+) -> None:
+ testmod = pytester.mkdir("testmod")
+ # NOTE: __init__.py is not collected since it does not match python_files.
+ testmod.joinpath("__init__.py").write_text("def test_func(): pass")
+ testmod.joinpath("test_file.py").write_text("def test_func(): pass")
+
+ root = pytester.mkdir("root")
+ root.joinpath("pytest.ini").write_text(
+ textwrap.dedent(
+ """
+ [pytest]
+ addopts = --pyargs
+ testpaths = testmod
+ """
+ )
+ )
+ monkeypatch.setenv("PYTHONPATH", str(pytester.path), prepend=os.pathsep)
+ with root.cwd():
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*1 passed in*"])
+
+
+def test_collect_symlink_file_arg(pytester: Pytester) -> None:
+ """Collect a direct symlink works even if it does not match python_files (#4325)."""
+ real = pytester.makepyfile(
+ real="""
+ def test_nodeid(request):
+ assert request.node.nodeid == "symlink.py::test_nodeid"
+ """
+ )
+ symlink = pytester.path.joinpath("symlink.py")
+ symlink_or_skip(real, symlink)
+ result = pytester.runpytest("-v", symlink)
+ result.stdout.fnmatch_lines(["symlink.py::test_nodeid PASSED*", "*1 passed in*"])
+ assert result.ret == 0
+
+
+def test_collect_symlink_out_of_tree(pytester: Pytester) -> None:
+ """Test collection of symlink via out-of-tree rootdir."""
+ sub = pytester.mkdir("sub")
+ real = sub.joinpath("test_real.py")
+ real.write_text(
+ textwrap.dedent(
+ """
+ def test_nodeid(request):
+ # Should not contain sub/ prefix.
+ assert request.node.nodeid == "test_real.py::test_nodeid"
+ """
+ ),
+ )
+
+ out_of_tree = pytester.mkdir("out_of_tree")
+ symlink_to_sub = out_of_tree.joinpath("symlink_to_sub")
+ symlink_or_skip(sub, symlink_to_sub)
+ os.chdir(sub)
+ result = pytester.runpytest("-vs", "--rootdir=%s" % sub, symlink_to_sub)
+ result.stdout.fnmatch_lines(
+ [
+ # Should not contain "sub/"!
+ "test_real.py::test_nodeid PASSED"
+ ]
+ )
+ assert result.ret == 0
+
+
+def test_collect_symlink_dir(pytester: Pytester) -> None:
+ """A symlinked directory is collected."""
+ dir = pytester.mkdir("dir")
+ dir.joinpath("test_it.py").write_text("def test_it(): pass", "utf-8")
+ symlink_or_skip(pytester.path.joinpath("symlink_dir"), dir)
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=2)
+
+
+def test_collectignore_via_conftest(pytester: Pytester) -> None:
+ """collect_ignore in parent conftest skips importing child (issue #4592)."""
+ tests = pytester.mkpydir("tests")
+ tests.joinpath("conftest.py").write_text("collect_ignore = ['ignore_me']")
+
+ ignore_me = tests.joinpath("ignore_me")
+ ignore_me.mkdir()
+ ignore_me.joinpath("__init__.py").touch()
+ ignore_me.joinpath("conftest.py").write_text("assert 0, 'should_not_be_called'")
+
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+def test_collect_pkg_init_and_file_in_args(pytester: Pytester) -> None:
+ subdir = pytester.mkdir("sub")
+ init = subdir.joinpath("__init__.py")
+ init.write_text("def test_init(): pass")
+ p = subdir.joinpath("test_file.py")
+ p.write_text("def test_file(): pass")
+
+ # NOTE: without "-o python_files=*.py" this collects test_file.py twice.
+ # This changed/broke with "Add package scoped fixtures #2283" (2b1410895)
+ # initially (causing a RecursionError).
+ result = pytester.runpytest("-v", str(init), str(p))
+ result.stdout.fnmatch_lines(
+ [
+ "sub/test_file.py::test_file PASSED*",
+ "sub/test_file.py::test_file PASSED*",
+ "*2 passed in*",
+ ]
+ )
+
+ result = pytester.runpytest("-v", "-o", "python_files=*.py", str(init), str(p))
+ result.stdout.fnmatch_lines(
+ [
+ "sub/__init__.py::test_init PASSED*",
+ "sub/test_file.py::test_file PASSED*",
+ "*2 passed in*",
+ ]
+ )
+
+
+def test_collect_pkg_init_only(pytester: Pytester) -> None:
+ subdir = pytester.mkdir("sub")
+ init = subdir.joinpath("__init__.py")
+ init.write_text("def test_init(): pass")
+
+ result = pytester.runpytest(str(init))
+ result.stdout.fnmatch_lines(["*no tests ran in*"])
+
+ result = pytester.runpytest("-v", "-o", "python_files=*.py", str(init))
+ result.stdout.fnmatch_lines(["sub/__init__.py::test_init PASSED*", "*1 passed in*"])
+
+
+@pytest.mark.parametrize("use_pkg", (True, False))
+def test_collect_sub_with_symlinks(use_pkg: bool, pytester: Pytester) -> None:
+ """Collection works with symlinked files and broken symlinks"""
+ sub = pytester.mkdir("sub")
+ if use_pkg:
+ sub.joinpath("__init__.py").touch()
+ sub.joinpath("test_file.py").write_text("def test_file(): pass")
+
+ # Create a broken symlink.
+ symlink_or_skip("test_doesnotexist.py", sub.joinpath("test_broken.py"))
+
+ # Symlink that gets collected.
+ symlink_or_skip("test_file.py", sub.joinpath("test_symlink.py"))
+
+ result = pytester.runpytest("-v", str(sub))
+ result.stdout.fnmatch_lines(
+ [
+ "sub/test_file.py::test_file PASSED*",
+ "sub/test_symlink.py::test_file PASSED*",
+ "*2 passed in*",
+ ]
+ )
+
+
+def test_collector_respects_tbstyle(pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("assert 0")
+ result = pytester.runpytest(p1, "--tb=native")
+ assert result.ret == ExitCode.INTERRUPTED
+ result.stdout.fnmatch_lines(
+ [
+ "*_ ERROR collecting test_collector_respects_tbstyle.py _*",
+ "Traceback (most recent call last):",
+ ' File "*/test_collector_respects_tbstyle.py", line 1, in <module>',
+ " assert 0",
+ "AssertionError: assert 0",
+ "*! Interrupted: 1 error during collection !*",
+ "*= 1 error in *",
+ ]
+ )
+
+
+def test_does_not_eagerly_collect_packages(pytester: Pytester) -> None:
+ pytester.makepyfile("def test(): pass")
+ pydir = pytester.mkpydir("foopkg")
+ pydir.joinpath("__init__.py").write_text("assert False")
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.OK
+
+
+def test_does_not_put_src_on_path(pytester: Pytester) -> None:
+ # `src` is not on sys.path so it should not be importable
+ ensure_file(pytester.path / "src/nope/__init__.py")
+ pytester.makepyfile(
+ "import pytest\n"
+ "def test():\n"
+ " with pytest.raises(ImportError):\n"
+ " import nope\n"
+ )
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.OK
+
+
+def test_fscollector_from_parent(pytester: Pytester, request: FixtureRequest) -> None:
+ """Ensure File.from_parent can forward custom arguments to the constructor.
+
+ Context: https://github.com/pytest-dev/pytest-cpp/pull/47
+ """
+
+ class MyCollector(pytest.File):
+ def __init__(self, *k, x, **kw):
+ super().__init__(*k, **kw)
+ self.x = x
+
+ collector = MyCollector.from_parent(
+ parent=request.session, path=pytester.path / "foo", x=10
+ )
+ assert collector.x == 10
+
+
+def test_class_from_parent(pytester: Pytester, request: FixtureRequest) -> None:
+ """Ensure Class.from_parent can forward custom arguments to the constructor."""
+
+ class MyCollector(pytest.Class):
+ def __init__(self, name, parent, x):
+ super().__init__(name, parent)
+ self.x = x
+
+ @classmethod
+ def from_parent(cls, parent, *, name, x):
+ return super().from_parent(parent=parent, name=name, x=x)
+
+ collector = MyCollector.from_parent(parent=request.session, name="foo", x=10)
+ assert collector.x == 10
+
+
+class TestImportModeImportlib:
+ def test_collect_duplicate_names(self, pytester: Pytester) -> None:
+ """--import-mode=importlib can import modules with same names that are not in packages."""
+ pytester.makepyfile(
+ **{
+ "tests_a/test_foo.py": "def test_foo1(): pass",
+ "tests_b/test_foo.py": "def test_foo2(): pass",
+ }
+ )
+ result = pytester.runpytest("-v", "--import-mode=importlib")
+ result.stdout.fnmatch_lines(
+ [
+ "tests_a/test_foo.py::test_foo1 *",
+ "tests_b/test_foo.py::test_foo2 *",
+ "* 2 passed in *",
+ ]
+ )
+
+ def test_conftest(self, pytester: Pytester) -> None:
+ """Directory containing conftest modules are not put in sys.path as a side-effect of
+ importing them."""
+ tests_dir = pytester.path.joinpath("tests")
+ pytester.makepyfile(
+ **{
+ "tests/conftest.py": "",
+ "tests/test_foo.py": """
+ import sys
+ def test_check():
+ assert r"{tests_dir}" not in sys.path
+ """.format(
+ tests_dir=tests_dir
+ ),
+ }
+ )
+ result = pytester.runpytest("-v", "--import-mode=importlib")
+ result.stdout.fnmatch_lines(["* 1 passed in *"])
+
+ def setup_conftest_and_foo(self, pytester: Pytester) -> None:
+ """Setup a tests folder to be used to test if modules in that folder can be imported
+ due to side-effects of --import-mode or not."""
+ pytester.makepyfile(
+ **{
+ "tests/conftest.py": "",
+ "tests/foo.py": """
+ def foo(): return 42
+ """,
+ "tests/test_foo.py": """
+ def test_check():
+ from foo import foo
+ assert foo() == 42
+ """,
+ }
+ )
+
+ def test_modules_importable_as_side_effect(self, pytester: Pytester) -> None:
+ """In import-modes `prepend` and `append`, we are able to import modules from folders
+ containing conftest.py files due to the side effect of changing sys.path."""
+ self.setup_conftest_and_foo(pytester)
+ result = pytester.runpytest("-v", "--import-mode=prepend")
+ result.stdout.fnmatch_lines(["* 1 passed in *"])
+
+ def test_modules_not_importable_as_side_effect(self, pytester: Pytester) -> None:
+ """In import-mode `importlib`, modules in folders containing conftest.py are not
+ importable, as don't change sys.path or sys.modules as side effect of importing
+ the conftest.py file.
+ """
+ self.setup_conftest_and_foo(pytester)
+ result = pytester.runpytest("-v", "--import-mode=importlib")
+ result.stdout.fnmatch_lines(
+ [
+ "*ModuleNotFoundError: No module named 'foo'",
+ "tests?test_foo.py:2: ModuleNotFoundError",
+ "* 1 failed in *",
+ ]
+ )
+
+
+def test_does_not_crash_on_error_from_decorated_function(pytester: Pytester) -> None:
+ """Regression test for an issue around bad exception formatting due to
+ assertion rewriting mangling lineno's (#4984)."""
+ pytester.makepyfile(
+ """
+ @pytest.fixture
+ def a(): return 4
+ """
+ )
+ result = pytester.runpytest()
+ # Not INTERNAL_ERROR
+ assert result.ret == ExitCode.INTERRUPTED
+
+
+def test_does_not_crash_on_recursive_symlink(pytester: Pytester) -> None:
+ """Regression test for an issue around recursive symlinks (#7951)."""
+ symlink_or_skip("recursive", pytester.path.joinpath("recursive"))
+ pytester.makepyfile(
+ """
+ def test_foo(): assert True
+ """
+ )
+ result = pytester.runpytest()
+
+ assert result.ret == ExitCode.OK
+ assert result.parseoutcomes() == {"passed": 1}
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_compat.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_compat.py
new file mode 100644
index 0000000000..37cf4a077d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_compat.py
@@ -0,0 +1,265 @@
+import enum
+import sys
+from functools import partial
+from functools import wraps
+from typing import TYPE_CHECKING
+from typing import Union
+
+import pytest
+from _pytest.compat import _PytestWrapper
+from _pytest.compat import assert_never
+from _pytest.compat import cached_property
+from _pytest.compat import get_real_func
+from _pytest.compat import is_generator
+from _pytest.compat import safe_getattr
+from _pytest.compat import safe_isclass
+from _pytest.outcomes import OutcomeException
+from _pytest.pytester import Pytester
+
+if TYPE_CHECKING:
+ from typing_extensions import Literal
+
+
+def test_is_generator() -> None:
+ def zap():
+ yield # pragma: no cover
+
+ def foo():
+ pass # pragma: no cover
+
+ assert is_generator(zap)
+ assert not is_generator(foo)
+
+
+def test_real_func_loop_limit() -> None:
+ class Evil:
+ def __init__(self):
+ self.left = 1000
+
+ def __repr__(self):
+ return f"<Evil left={self.left}>"
+
+ def __getattr__(self, attr):
+ if not self.left:
+ raise RuntimeError("it's over") # pragma: no cover
+ self.left -= 1
+ return self
+
+ evil = Evil()
+
+ with pytest.raises(
+ ValueError,
+ match=(
+ "could not find real function of <Evil left=800>\n"
+ "stopped at <Evil left=800>"
+ ),
+ ):
+ get_real_func(evil)
+
+
+def test_get_real_func() -> None:
+ """Check that get_real_func correctly unwraps decorators until reaching the real function"""
+
+ def decorator(f):
+ @wraps(f)
+ def inner():
+ pass # pragma: no cover
+
+ return inner
+
+ def func():
+ pass # pragma: no cover
+
+ wrapped_func = decorator(decorator(func))
+ assert get_real_func(wrapped_func) is func
+
+ wrapped_func2 = decorator(decorator(wrapped_func))
+ assert get_real_func(wrapped_func2) is func
+
+ # special case for __pytest_wrapped__ attribute: used to obtain the function up until the point
+ # a function was wrapped by pytest itself
+ wrapped_func2.__pytest_wrapped__ = _PytestWrapper(wrapped_func)
+ assert get_real_func(wrapped_func2) is wrapped_func
+
+
+def test_get_real_func_partial() -> None:
+ """Test get_real_func handles partial instances correctly"""
+
+ def foo(x):
+ return x
+
+ assert get_real_func(foo) is foo
+ assert get_real_func(partial(foo)) is foo
+
+
+@pytest.mark.skipif(sys.version_info >= (3, 11), reason="couroutine removed")
+def test_is_generator_asyncio(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ from _pytest.compat import is_generator
+ import asyncio
+ @asyncio.coroutine
+ def baz():
+ yield from [1,2,3]
+
+ def test_is_generator_asyncio():
+ assert not is_generator(baz)
+ """
+ )
+ # avoid importing asyncio into pytest's own process,
+ # which in turn imports logging (#8)
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_is_generator_async_syntax(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ from _pytest.compat import is_generator
+ def test_is_generator_py35():
+ async def foo():
+ await foo()
+
+ async def bar():
+ pass
+
+ assert not is_generator(foo)
+ assert not is_generator(bar)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_is_generator_async_gen_syntax(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ from _pytest.compat import is_generator
+ def test_is_generator_py36():
+ async def foo():
+ yield
+ await foo()
+
+ async def bar():
+ yield
+
+ assert not is_generator(foo)
+ assert not is_generator(bar)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+class ErrorsHelper:
+ @property
+ def raise_baseexception(self):
+ raise BaseException("base exception should be raised")
+
+ @property
+ def raise_exception(self):
+ raise Exception("exception should be caught")
+
+ @property
+ def raise_fail_outcome(self):
+ pytest.fail("fail should be caught")
+
+
+def test_helper_failures() -> None:
+ helper = ErrorsHelper()
+ with pytest.raises(Exception):
+ helper.raise_exception
+ with pytest.raises(OutcomeException):
+ helper.raise_fail_outcome
+
+
+def test_safe_getattr() -> None:
+ helper = ErrorsHelper()
+ assert safe_getattr(helper, "raise_exception", "default") == "default"
+ assert safe_getattr(helper, "raise_fail_outcome", "default") == "default"
+ with pytest.raises(BaseException):
+ assert safe_getattr(helper, "raise_baseexception", "default")
+
+
+def test_safe_isclass() -> None:
+ assert safe_isclass(type) is True
+
+ class CrappyClass(Exception):
+ # Type ignored because it's bypassed intentionally.
+ @property # type: ignore
+ def __class__(self):
+ assert False, "Should be ignored"
+
+ assert safe_isclass(CrappyClass()) is False
+
+
+def test_cached_property() -> None:
+ ncalls = 0
+
+ class Class:
+ @cached_property
+ def prop(self) -> int:
+ nonlocal ncalls
+ ncalls += 1
+ return ncalls
+
+ c1 = Class()
+ assert ncalls == 0
+ assert c1.prop == 1
+ assert c1.prop == 1
+ c2 = Class()
+ assert ncalls == 1
+ assert c2.prop == 2
+ assert c1.prop == 1
+
+
+def test_assert_never_union() -> None:
+ x: Union[int, str] = 10
+
+ if isinstance(x, int):
+ pass
+ else:
+ with pytest.raises(AssertionError):
+ assert_never(x) # type: ignore[arg-type]
+
+ if isinstance(x, int):
+ pass
+ elif isinstance(x, str):
+ pass
+ else:
+ assert_never(x)
+
+
+def test_assert_never_enum() -> None:
+ E = enum.Enum("E", "a b")
+ x: E = E.a
+
+ if x is E.a:
+ pass
+ else:
+ with pytest.raises(AssertionError):
+ assert_never(x) # type: ignore[arg-type]
+
+ if x is E.a:
+ pass
+ elif x is E.b:
+ pass
+ else:
+ assert_never(x)
+
+
+def test_assert_never_literal() -> None:
+ x: Literal["a", "b"] = "a"
+
+ if x == "a":
+ pass
+ else:
+ with pytest.raises(AssertionError):
+ assert_never(x) # type: ignore[arg-type]
+
+ if x == "a":
+ pass
+ elif x == "b":
+ pass
+ else:
+ assert_never(x)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_config.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_config.py
new file mode 100644
index 0000000000..8013966f07
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_config.py
@@ -0,0 +1,2115 @@
+import os
+import re
+import sys
+import textwrap
+from pathlib import Path
+from typing import Dict
+from typing import List
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import Union
+
+import attr
+
+import _pytest._code
+import pytest
+from _pytest.compat import importlib_metadata
+from _pytest.config import _get_plugin_specs_as_list
+from _pytest.config import _iter_rewritable_modules
+from _pytest.config import _strtobool
+from _pytest.config import Config
+from _pytest.config import ConftestImportFailure
+from _pytest.config import ExitCode
+from _pytest.config import parse_warning_filter
+from _pytest.config.exceptions import UsageError
+from _pytest.config.findpaths import determine_setup
+from _pytest.config.findpaths import get_common_ancestor
+from _pytest.config.findpaths import locate_config
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pathlib import absolutepath
+from _pytest.pytester import Pytester
+
+
+class TestParseIni:
+ @pytest.mark.parametrize(
+ "section, filename", [("pytest", "pytest.ini"), ("tool:pytest", "setup.cfg")]
+ )
+ def test_getcfg_and_config(
+ self,
+ pytester: Pytester,
+ tmp_path: Path,
+ section: str,
+ filename: str,
+ monkeypatch: MonkeyPatch,
+ ) -> None:
+ sub = tmp_path / "sub"
+ sub.mkdir()
+ monkeypatch.chdir(sub)
+ (tmp_path / filename).write_text(
+ textwrap.dedent(
+ """\
+ [{section}]
+ name = value
+ """.format(
+ section=section
+ )
+ ),
+ encoding="utf-8",
+ )
+ _, _, cfg = locate_config([sub])
+ assert cfg["name"] == "value"
+ config = pytester.parseconfigure(str(sub))
+ assert config.inicfg["name"] == "value"
+
+ def test_setupcfg_uses_toolpytest_with_pytest(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("def test(): pass")
+ pytester.makefile(
+ ".cfg",
+ setup="""
+ [tool:pytest]
+ testpaths=%s
+ [pytest]
+ testpaths=ignored
+ """
+ % p1.name,
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*, configfile: setup.cfg, *", "* 1 passed in *"])
+ assert result.ret == 0
+
+ def test_append_parse_args(
+ self, pytester: Pytester, tmp_path: Path, monkeypatch: MonkeyPatch
+ ) -> None:
+ monkeypatch.setenv("PYTEST_ADDOPTS", '--color no -rs --tb="short"')
+ tmp_path.joinpath("pytest.ini").write_text(
+ textwrap.dedent(
+ """\
+ [pytest]
+ addopts = --verbose
+ """
+ )
+ )
+ config = pytester.parseconfig(tmp_path)
+ assert config.option.color == "no"
+ assert config.option.reportchars == "s"
+ assert config.option.tbstyle == "short"
+ assert config.option.verbose
+
+ def test_tox_ini_wrong_version(self, pytester: Pytester) -> None:
+ pytester.makefile(
+ ".ini",
+ tox="""
+ [pytest]
+ minversion=999.0
+ """,
+ )
+ result = pytester.runpytest()
+ assert result.ret != 0
+ result.stderr.fnmatch_lines(
+ ["*tox.ini: 'minversion' requires pytest-999.0, actual pytest-*"]
+ )
+
+ @pytest.mark.parametrize(
+ "section, name",
+ [("tool:pytest", "setup.cfg"), ("pytest", "tox.ini"), ("pytest", "pytest.ini")],
+ )
+ def test_ini_names(self, pytester: Pytester, name, section) -> None:
+ pytester.path.joinpath(name).write_text(
+ textwrap.dedent(
+ """
+ [{section}]
+ minversion = 1.0
+ """.format(
+ section=section
+ )
+ )
+ )
+ config = pytester.parseconfig()
+ assert config.getini("minversion") == "1.0"
+
+ def test_pyproject_toml(self, pytester: Pytester) -> None:
+ pytester.makepyprojecttoml(
+ """
+ [tool.pytest.ini_options]
+ minversion = "1.0"
+ """
+ )
+ config = pytester.parseconfig()
+ assert config.getini("minversion") == "1.0"
+
+ def test_toxini_before_lower_pytestini(self, pytester: Pytester) -> None:
+ sub = pytester.mkdir("sub")
+ sub.joinpath("tox.ini").write_text(
+ textwrap.dedent(
+ """
+ [pytest]
+ minversion = 2.0
+ """
+ )
+ )
+ pytester.path.joinpath("pytest.ini").write_text(
+ textwrap.dedent(
+ """
+ [pytest]
+ minversion = 1.5
+ """
+ )
+ )
+ config = pytester.parseconfigure(sub)
+ assert config.getini("minversion") == "2.0"
+
+ def test_ini_parse_error(self, pytester: Pytester) -> None:
+ pytester.path.joinpath("pytest.ini").write_text("addopts = -x")
+ result = pytester.runpytest()
+ assert result.ret != 0
+ result.stderr.fnmatch_lines(["ERROR: *pytest.ini:1: no section header defined"])
+
+ @pytest.mark.xfail(reason="probably not needed")
+ def test_confcutdir(self, pytester: Pytester) -> None:
+ sub = pytester.mkdir("sub")
+ os.chdir(sub)
+ pytester.makeini(
+ """
+ [pytest]
+ addopts = --qwe
+ """
+ )
+ result = pytester.inline_run("--confcutdir=.")
+ assert result.ret == 0
+
+ @pytest.mark.parametrize(
+ "ini_file_text, invalid_keys, warning_output, exception_text",
+ [
+ pytest.param(
+ """
+ [pytest]
+ unknown_ini = value1
+ another_unknown_ini = value2
+ """,
+ ["unknown_ini", "another_unknown_ini"],
+ [
+ "=*= warnings summary =*=",
+ "*PytestConfigWarning:*Unknown config option: another_unknown_ini",
+ "*PytestConfigWarning:*Unknown config option: unknown_ini",
+ ],
+ "Unknown config option: another_unknown_ini",
+ id="2-unknowns",
+ ),
+ pytest.param(
+ """
+ [pytest]
+ unknown_ini = value1
+ minversion = 5.0.0
+ """,
+ ["unknown_ini"],
+ [
+ "=*= warnings summary =*=",
+ "*PytestConfigWarning:*Unknown config option: unknown_ini",
+ ],
+ "Unknown config option: unknown_ini",
+ id="1-unknown",
+ ),
+ pytest.param(
+ """
+ [some_other_header]
+ unknown_ini = value1
+ [pytest]
+ minversion = 5.0.0
+ """,
+ [],
+ [],
+ "",
+ id="unknown-in-other-header",
+ ),
+ pytest.param(
+ """
+ [pytest]
+ minversion = 5.0.0
+ """,
+ [],
+ [],
+ "",
+ id="no-unknowns",
+ ),
+ pytest.param(
+ """
+ [pytest]
+ conftest_ini_key = 1
+ """,
+ [],
+ [],
+ "",
+ id="1-known",
+ ),
+ ],
+ )
+ @pytest.mark.filterwarnings("default")
+ def test_invalid_config_options(
+ self,
+ pytester: Pytester,
+ ini_file_text,
+ invalid_keys,
+ warning_output,
+ exception_text,
+ ) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("conftest_ini_key", "")
+ """
+ )
+ pytester.makepyfile("def test(): pass")
+ pytester.makeini(ini_file_text)
+
+ config = pytester.parseconfig()
+ assert sorted(config._get_unknown_ini_keys()) == sorted(invalid_keys)
+
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(warning_output)
+
+ result = pytester.runpytest("--strict-config")
+ if exception_text:
+ result.stderr.fnmatch_lines("ERROR: " + exception_text)
+ assert result.ret == pytest.ExitCode.USAGE_ERROR
+ else:
+ result.stderr.no_fnmatch_line(exception_text)
+ assert result.ret == pytest.ExitCode.OK
+
+ @pytest.mark.filterwarnings("default")
+ def test_silence_unknown_key_warning(self, pytester: Pytester) -> None:
+ """Unknown config key warnings can be silenced using filterwarnings (#7620)"""
+ pytester.makeini(
+ """
+ [pytest]
+ filterwarnings =
+ ignore:Unknown config option:pytest.PytestConfigWarning
+ foobar=1
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.no_fnmatch_line("*PytestConfigWarning*")
+
+ @pytest.mark.filterwarnings("default::pytest.PytestConfigWarning")
+ def test_disable_warnings_plugin_disables_config_warnings(
+ self, pytester: Pytester
+ ) -> None:
+ """Disabling 'warnings' plugin also disables config time warnings"""
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_configure(config):
+ config.issue_config_time_warning(
+ pytest.PytestConfigWarning("custom config warning"),
+ stacklevel=2,
+ )
+ """
+ )
+ result = pytester.runpytest("-pno:warnings")
+ result.stdout.no_fnmatch_line("*PytestConfigWarning*")
+
+ @pytest.mark.parametrize(
+ "ini_file_text, plugin_version, exception_text",
+ [
+ pytest.param(
+ """
+ [pytest]
+ required_plugins = a z
+ """,
+ "1.5",
+ "Missing required plugins: a, z",
+ id="2-missing",
+ ),
+ pytest.param(
+ """
+ [pytest]
+ required_plugins = a z myplugin
+ """,
+ "1.5",
+ "Missing required plugins: a, z",
+ id="2-missing-1-ok",
+ ),
+ pytest.param(
+ """
+ [pytest]
+ required_plugins = myplugin
+ """,
+ "1.5",
+ None,
+ id="1-ok",
+ ),
+ pytest.param(
+ """
+ [pytest]
+ required_plugins = myplugin==1.5
+ """,
+ "1.5",
+ None,
+ id="1-ok-pin-exact",
+ ),
+ pytest.param(
+ """
+ [pytest]
+ required_plugins = myplugin>1.0,<2.0
+ """,
+ "1.5",
+ None,
+ id="1-ok-pin-loose",
+ ),
+ pytest.param(
+ """
+ [pytest]
+ required_plugins = myplugin
+ """,
+ "1.5a1",
+ None,
+ id="1-ok-prerelease",
+ ),
+ pytest.param(
+ """
+ [pytest]
+ required_plugins = myplugin==1.6
+ """,
+ "1.5",
+ "Missing required plugins: myplugin==1.6",
+ id="missing-version",
+ ),
+ pytest.param(
+ """
+ [pytest]
+ required_plugins = myplugin==1.6 other==1.0
+ """,
+ "1.5",
+ "Missing required plugins: myplugin==1.6, other==1.0",
+ id="missing-versions",
+ ),
+ pytest.param(
+ """
+ [some_other_header]
+ required_plugins = won't be triggered
+ [pytest]
+ """,
+ "1.5",
+ None,
+ id="invalid-header",
+ ),
+ ],
+ )
+ def test_missing_required_plugins(
+ self,
+ pytester: Pytester,
+ monkeypatch: MonkeyPatch,
+ ini_file_text: str,
+ plugin_version: str,
+ exception_text: str,
+ ) -> None:
+ """Check 'required_plugins' option with various settings.
+
+ This test installs a mock "myplugin-1.5" which is used in the parametrized test cases.
+ """
+
+ @attr.s
+ class DummyEntryPoint:
+ name = attr.ib()
+ module = attr.ib()
+ group = "pytest11"
+
+ def load(self):
+ __import__(self.module)
+ return sys.modules[self.module]
+
+ entry_points = [
+ DummyEntryPoint("myplugin1", "myplugin1_module"),
+ ]
+
+ @attr.s
+ class DummyDist:
+ entry_points = attr.ib()
+ files = ()
+ version = plugin_version
+
+ @property
+ def metadata(self):
+ return {"name": "myplugin"}
+
+ def my_dists():
+ return [DummyDist(entry_points)]
+
+ pytester.makepyfile(myplugin1_module="# my plugin module")
+ pytester.syspathinsert()
+
+ monkeypatch.setattr(importlib_metadata, "distributions", my_dists)
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
+
+ pytester.makeini(ini_file_text)
+
+ if exception_text:
+ with pytest.raises(pytest.UsageError, match=exception_text):
+ pytester.parseconfig()
+ else:
+ pytester.parseconfig()
+
+ def test_early_config_cmdline(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ """early_config contains options registered by third-party plugins.
+
+ This is a regression involving pytest-cov (and possibly others) introduced in #7700.
+ """
+ pytester.makepyfile(
+ myplugin="""
+ def pytest_addoption(parser):
+ parser.addoption('--foo', default=None, dest='foo')
+
+ def pytest_load_initial_conftests(early_config, parser, args):
+ assert early_config.known_args_namespace.foo == "1"
+ """
+ )
+ monkeypatch.setenv("PYTEST_PLUGINS", "myplugin")
+ pytester.syspathinsert()
+ result = pytester.runpytest("--foo=1")
+ result.stdout.fnmatch_lines("* no tests ran in *")
+
+
+class TestConfigCmdlineParsing:
+ def test_parsing_again_fails(self, pytester: Pytester) -> None:
+ config = pytester.parseconfig()
+ pytest.raises(AssertionError, lambda: config.parse([]))
+
+ def test_explicitly_specified_config_file_is_loaded(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("custom", "")
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ custom = 0
+ """
+ )
+ pytester.makefile(
+ ".ini",
+ custom="""
+ [pytest]
+ custom = 1
+ """,
+ )
+ config = pytester.parseconfig("-c", "custom.ini")
+ assert config.getini("custom") == "1"
+
+ pytester.makefile(
+ ".cfg",
+ custom_tool_pytest_section="""
+ [tool:pytest]
+ custom = 1
+ """,
+ )
+ config = pytester.parseconfig("-c", "custom_tool_pytest_section.cfg")
+ assert config.getini("custom") == "1"
+
+ pytester.makefile(
+ ".toml",
+ custom="""
+ [tool.pytest.ini_options]
+ custom = 1
+ value = [
+ ] # this is here on purpose, as it makes this an invalid '.ini' file
+ """,
+ )
+ config = pytester.parseconfig("-c", "custom.toml")
+ assert config.getini("custom") == "1"
+
+ def test_absolute_win32_path(self, pytester: Pytester) -> None:
+ temp_ini_file = pytester.makefile(
+ ".ini",
+ custom="""
+ [pytest]
+ addopts = --version
+ """,
+ )
+ from os.path import normpath
+
+ temp_ini_file_norm = normpath(str(temp_ini_file))
+ ret = pytest.main(["-c", temp_ini_file_norm])
+ assert ret == ExitCode.OK
+
+
+class TestConfigAPI:
+ def test_config_trace(self, pytester: Pytester) -> None:
+ config = pytester.parseconfig()
+ values: List[str] = []
+ config.trace.root.setwriter(values.append)
+ config.trace("hello")
+ assert len(values) == 1
+ assert values[0] == "hello [config]\n"
+
+ def test_config_getoption(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addoption("--hello", "-X", dest="hello")
+ """
+ )
+ config = pytester.parseconfig("--hello=this")
+ for x in ("hello", "--hello", "-X"):
+ assert config.getoption(x) == "this"
+ pytest.raises(ValueError, config.getoption, "qweqwe")
+
+ def test_config_getoption_unicode(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addoption('--hello', type=str)
+ """
+ )
+ config = pytester.parseconfig("--hello=this")
+ assert config.getoption("hello") == "this"
+
+ def test_config_getvalueorskip(self, pytester: Pytester) -> None:
+ config = pytester.parseconfig()
+ pytest.raises(pytest.skip.Exception, config.getvalueorskip, "hello")
+ verbose = config.getvalueorskip("verbose")
+ assert verbose == config.option.verbose
+
+ def test_config_getvalueorskip_None(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addoption("--hello")
+ """
+ )
+ config = pytester.parseconfig()
+ with pytest.raises(pytest.skip.Exception):
+ config.getvalueorskip("hello")
+
+ def test_getoption(self, pytester: Pytester) -> None:
+ config = pytester.parseconfig()
+ with pytest.raises(ValueError):
+ config.getvalue("x")
+ assert config.getoption("x", 1) == 1
+
+ def test_getconftest_pathlist(self, pytester: Pytester, tmp_path: Path) -> None:
+ somepath = tmp_path.joinpath("x", "y", "z")
+ p = tmp_path.joinpath("conftest.py")
+ p.write_text(f"mylist = {['.', str(somepath)]}")
+ config = pytester.parseconfigure(p)
+ assert (
+ config._getconftest_pathlist("notexist", path=tmp_path, rootpath=tmp_path)
+ is None
+ )
+ pl = (
+ config._getconftest_pathlist("mylist", path=tmp_path, rootpath=tmp_path)
+ or []
+ )
+ print(pl)
+ assert len(pl) == 2
+ assert pl[0] == tmp_path
+ assert pl[1] == somepath
+
+ @pytest.mark.parametrize("maybe_type", ["not passed", "None", '"string"'])
+ def test_addini(self, pytester: Pytester, maybe_type: str) -> None:
+ if maybe_type == "not passed":
+ type_string = ""
+ else:
+ type_string = f", {maybe_type}"
+
+ pytester.makeconftest(
+ f"""
+ def pytest_addoption(parser):
+ parser.addini("myname", "my new ini value"{type_string})
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ myname=hello
+ """
+ )
+ config = pytester.parseconfig()
+ val = config.getini("myname")
+ assert val == "hello"
+ pytest.raises(ValueError, config.getini, "other")
+
+ @pytest.mark.parametrize("config_type", ["ini", "pyproject"])
+ def test_addini_paths(self, pytester: Pytester, config_type: str) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("paths", "my new ini value", type="paths")
+ parser.addini("abc", "abc value")
+ """
+ )
+ if config_type == "ini":
+ inipath = pytester.makeini(
+ """
+ [pytest]
+ paths=hello world/sub.py
+ """
+ )
+ elif config_type == "pyproject":
+ inipath = pytester.makepyprojecttoml(
+ """
+ [tool.pytest.ini_options]
+ paths=["hello", "world/sub.py"]
+ """
+ )
+ config = pytester.parseconfig()
+ values = config.getini("paths")
+ assert len(values) == 2
+ assert values[0] == inipath.parent.joinpath("hello")
+ assert values[1] == inipath.parent.joinpath("world/sub.py")
+ pytest.raises(ValueError, config.getini, "other")
+
+ def make_conftest_for_args(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("args", "new args", type="args")
+ parser.addini("a2", "", "args", default="1 2 3".split())
+ """
+ )
+
+ def test_addini_args_ini_files(self, pytester: Pytester) -> None:
+ self.make_conftest_for_args(pytester)
+ pytester.makeini(
+ """
+ [pytest]
+ args=123 "123 hello" "this"
+ """
+ )
+ self.check_config_args(pytester)
+
+ def test_addini_args_pyproject_toml(self, pytester: Pytester) -> None:
+ self.make_conftest_for_args(pytester)
+ pytester.makepyprojecttoml(
+ """
+ [tool.pytest.ini_options]
+ args = ["123", "123 hello", "this"]
+ """
+ )
+ self.check_config_args(pytester)
+
+ def check_config_args(self, pytester: Pytester) -> None:
+ config = pytester.parseconfig()
+ values = config.getini("args")
+ assert values == ["123", "123 hello", "this"]
+ values = config.getini("a2")
+ assert values == list("123")
+
+ def make_conftest_for_linelist(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("xy", "", type="linelist")
+ parser.addini("a2", "", "linelist")
+ """
+ )
+
+ def test_addini_linelist_ini_files(self, pytester: Pytester) -> None:
+ self.make_conftest_for_linelist(pytester)
+ pytester.makeini(
+ """
+ [pytest]
+ xy= 123 345
+ second line
+ """
+ )
+ self.check_config_linelist(pytester)
+
+ def test_addini_linelist_pprojecttoml(self, pytester: Pytester) -> None:
+ self.make_conftest_for_linelist(pytester)
+ pytester.makepyprojecttoml(
+ """
+ [tool.pytest.ini_options]
+ xy = ["123 345", "second line"]
+ """
+ )
+ self.check_config_linelist(pytester)
+
+ def check_config_linelist(self, pytester: Pytester) -> None:
+ config = pytester.parseconfig()
+ values = config.getini("xy")
+ assert len(values) == 2
+ assert values == ["123 345", "second line"]
+ values = config.getini("a2")
+ assert values == []
+
+ @pytest.mark.parametrize(
+ "str_val, bool_val", [("True", True), ("no", False), ("no-ini", True)]
+ )
+ def test_addini_bool(
+ self, pytester: Pytester, str_val: str, bool_val: bool
+ ) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("strip", "", type="bool", default=True)
+ """
+ )
+ if str_val != "no-ini":
+ pytester.makeini(
+ """
+ [pytest]
+ strip=%s
+ """
+ % str_val
+ )
+ config = pytester.parseconfig()
+ assert config.getini("strip") is bool_val
+
+ def test_addinivalue_line_existing(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("xy", "", type="linelist")
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ xy= 123
+ """
+ )
+ config = pytester.parseconfig()
+ values = config.getini("xy")
+ assert len(values) == 1
+ assert values == ["123"]
+ config.addinivalue_line("xy", "456")
+ values = config.getini("xy")
+ assert len(values) == 2
+ assert values == ["123", "456"]
+
+ def test_addinivalue_line_new(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("xy", "", type="linelist")
+ """
+ )
+ config = pytester.parseconfig()
+ assert not config.getini("xy")
+ config.addinivalue_line("xy", "456")
+ values = config.getini("xy")
+ assert len(values) == 1
+ assert values == ["456"]
+ config.addinivalue_line("xy", "123")
+ values = config.getini("xy")
+ assert len(values) == 2
+ assert values == ["456", "123"]
+
+ def test_confcutdir_check_isdir(self, pytester: Pytester) -> None:
+ """Give an error if --confcutdir is not a valid directory (#2078)"""
+ exp_match = r"^--confcutdir must be a directory, given: "
+ with pytest.raises(pytest.UsageError, match=exp_match):
+ pytester.parseconfig("--confcutdir", pytester.path.joinpath("file"))
+ with pytest.raises(pytest.UsageError, match=exp_match):
+ pytester.parseconfig("--confcutdir", pytester.path.joinpath("nonexistent"))
+
+ p = pytester.mkdir("dir")
+ config = pytester.parseconfig("--confcutdir", p)
+ assert config.getoption("confcutdir") == str(p)
+
+ @pytest.mark.parametrize(
+ "names, expected",
+ [
+ # dist-info based distributions root are files as will be put in PYTHONPATH
+ (["bar.py"], ["bar"]),
+ (["foo/bar.py"], ["bar"]),
+ (["foo/bar.pyc"], []),
+ (["foo/__init__.py"], ["foo"]),
+ (["bar/__init__.py", "xz.py"], ["bar", "xz"]),
+ (["setup.py"], []),
+ # egg based distributions root contain the files from the dist root
+ (["src/bar/__init__.py"], ["bar"]),
+ (["src/bar/__init__.py", "setup.py"], ["bar"]),
+ (["source/python/bar/__init__.py", "setup.py"], ["bar"]),
+ ],
+ )
+ def test_iter_rewritable_modules(self, names, expected) -> None:
+ assert list(_iter_rewritable_modules(names)) == expected
+
+
+class TestConfigFromdictargs:
+ def test_basic_behavior(self, _sys_snapshot) -> None:
+ option_dict = {"verbose": 444, "foo": "bar", "capture": "no"}
+ args = ["a", "b"]
+
+ config = Config.fromdictargs(option_dict, args)
+ with pytest.raises(AssertionError):
+ config.parse(["should refuse to parse again"])
+ assert config.option.verbose == 444
+ assert config.option.foo == "bar"
+ assert config.option.capture == "no"
+ assert config.args == args
+
+ def test_invocation_params_args(self, _sys_snapshot) -> None:
+ """Show that fromdictargs can handle args in their "orig" format"""
+ option_dict: Dict[str, object] = {}
+ args = ["-vvvv", "-s", "a", "b"]
+
+ config = Config.fromdictargs(option_dict, args)
+ assert config.args == ["a", "b"]
+ assert config.invocation_params.args == tuple(args)
+ assert config.option.verbose == 4
+ assert config.option.capture == "no"
+
+ def test_inifilename(self, tmp_path: Path) -> None:
+ d1 = tmp_path.joinpath("foo")
+ d1.mkdir()
+ p1 = d1.joinpath("bar.ini")
+ p1.touch()
+ p1.write_text(
+ textwrap.dedent(
+ """\
+ [pytest]
+ name = value
+ """
+ )
+ )
+
+ inifilename = "../../foo/bar.ini"
+ option_dict = {"inifilename": inifilename, "capture": "no"}
+
+ cwd = tmp_path.joinpath("a/b")
+ cwd.mkdir(parents=True)
+ p2 = cwd.joinpath("pytest.ini")
+ p2.touch()
+ p2.write_text(
+ textwrap.dedent(
+ """\
+ [pytest]
+ name = wrong-value
+ should_not_be_set = true
+ """
+ )
+ )
+ with MonkeyPatch.context() as mp:
+ mp.chdir(cwd)
+ config = Config.fromdictargs(option_dict, ())
+ inipath = absolutepath(inifilename)
+
+ assert config.args == [str(cwd)]
+ assert config.option.inifilename == inifilename
+ assert config.option.capture == "no"
+
+ # this indicates this is the file used for getting configuration values
+ assert config.inipath == inipath
+ assert config.inicfg.get("name") == "value"
+ assert config.inicfg.get("should_not_be_set") is None
+
+
+def test_options_on_small_file_do_not_blow_up(pytester: Pytester) -> None:
+ def runfiletest(opts: Sequence[str]) -> None:
+ reprec = pytester.inline_run(*opts)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 2
+ assert skipped == passed == 0
+
+ path = str(
+ pytester.makepyfile(
+ """
+ def test_f1(): assert 0
+ def test_f2(): assert 0
+ """
+ )
+ )
+
+ runfiletest([path])
+ runfiletest(["-l", path])
+ runfiletest(["-s", path])
+ runfiletest(["--tb=no", path])
+ runfiletest(["--tb=short", path])
+ runfiletest(["--tb=long", path])
+ runfiletest(["--fulltrace", path])
+ runfiletest(["--traceconfig", path])
+ runfiletest(["-v", path])
+ runfiletest(["-v", "-v", path])
+
+
+def test_preparse_ordering_with_setuptools(
+ pytester: Pytester, monkeypatch: MonkeyPatch
+) -> None:
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
+
+ class EntryPoint:
+ name = "mytestplugin"
+ group = "pytest11"
+
+ def load(self):
+ class PseudoPlugin:
+ x = 42
+
+ return PseudoPlugin()
+
+ class Dist:
+ files = ()
+ metadata = {"name": "foo"}
+ entry_points = (EntryPoint(),)
+
+ def my_dists():
+ return (Dist,)
+
+ monkeypatch.setattr(importlib_metadata, "distributions", my_dists)
+ pytester.makeconftest(
+ """
+ pytest_plugins = "mytestplugin",
+ """
+ )
+ monkeypatch.setenv("PYTEST_PLUGINS", "mytestplugin")
+ config = pytester.parseconfig()
+ plugin = config.pluginmanager.getplugin("mytestplugin")
+ assert plugin.x == 42
+
+
+def test_setuptools_importerror_issue1479(
+ pytester: Pytester, monkeypatch: MonkeyPatch
+) -> None:
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
+
+ class DummyEntryPoint:
+ name = "mytestplugin"
+ group = "pytest11"
+
+ def load(self):
+ raise ImportError("Don't hide me!")
+
+ class Distribution:
+ version = "1.0"
+ files = ("foo.txt",)
+ metadata = {"name": "foo"}
+ entry_points = (DummyEntryPoint(),)
+
+ def distributions():
+ return (Distribution(),)
+
+ monkeypatch.setattr(importlib_metadata, "distributions", distributions)
+ with pytest.raises(ImportError):
+ pytester.parseconfig()
+
+
+def test_importlib_metadata_broken_distribution(
+ pytester: Pytester, monkeypatch: MonkeyPatch
+) -> None:
+ """Integration test for broken distributions with 'files' metadata being None (#5389)"""
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
+
+ class DummyEntryPoint:
+ name = "mytestplugin"
+ group = "pytest11"
+
+ def load(self):
+ return object()
+
+ class Distribution:
+ version = "1.0"
+ files = None
+ metadata = {"name": "foo"}
+ entry_points = (DummyEntryPoint(),)
+
+ def distributions():
+ return (Distribution(),)
+
+ monkeypatch.setattr(importlib_metadata, "distributions", distributions)
+ pytester.parseconfig()
+
+
+@pytest.mark.parametrize("block_it", [True, False])
+def test_plugin_preparse_prevents_setuptools_loading(
+ pytester: Pytester, monkeypatch: MonkeyPatch, block_it: bool
+) -> None:
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
+
+ plugin_module_placeholder = object()
+
+ class DummyEntryPoint:
+ name = "mytestplugin"
+ group = "pytest11"
+
+ def load(self):
+ return plugin_module_placeholder
+
+ class Distribution:
+ version = "1.0"
+ files = ("foo.txt",)
+ metadata = {"name": "foo"}
+ entry_points = (DummyEntryPoint(),)
+
+ def distributions():
+ return (Distribution(),)
+
+ monkeypatch.setattr(importlib_metadata, "distributions", distributions)
+ args = ("-p", "no:mytestplugin") if block_it else ()
+ config = pytester.parseconfig(*args)
+ config.pluginmanager.import_plugin("mytestplugin")
+ if block_it:
+ assert "mytestplugin" not in sys.modules
+ assert config.pluginmanager.get_plugin("mytestplugin") is None
+ else:
+ assert (
+ config.pluginmanager.get_plugin("mytestplugin") is plugin_module_placeholder
+ )
+
+
+@pytest.mark.parametrize(
+ "parse_args,should_load", [(("-p", "mytestplugin"), True), ((), False)]
+)
+def test_disable_plugin_autoload(
+ pytester: Pytester,
+ monkeypatch: MonkeyPatch,
+ parse_args: Union[Tuple[str, str], Tuple[()]],
+ should_load: bool,
+) -> None:
+ class DummyEntryPoint:
+ project_name = name = "mytestplugin"
+ group = "pytest11"
+ version = "1.0"
+
+ def load(self):
+ return sys.modules[self.name]
+
+ class Distribution:
+ metadata = {"name": "foo"}
+ entry_points = (DummyEntryPoint(),)
+ files = ()
+
+ class PseudoPlugin:
+ x = 42
+
+ attrs_used = []
+
+ def __getattr__(self, name):
+ assert name == "__loader__"
+ self.attrs_used.append(name)
+ return object()
+
+ def distributions():
+ return (Distribution(),)
+
+ monkeypatch.setenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "1")
+ monkeypatch.setattr(importlib_metadata, "distributions", distributions)
+ monkeypatch.setitem(sys.modules, "mytestplugin", PseudoPlugin()) # type: ignore[misc]
+ config = pytester.parseconfig(*parse_args)
+ has_loaded = config.pluginmanager.get_plugin("mytestplugin") is not None
+ assert has_loaded == should_load
+ if should_load:
+ assert PseudoPlugin.attrs_used == ["__loader__"]
+ else:
+ assert PseudoPlugin.attrs_used == []
+
+
+def test_plugin_loading_order(pytester: Pytester) -> None:
+ """Test order of plugin loading with `-p`."""
+ p1 = pytester.makepyfile(
+ """
+ def test_terminal_plugin(request):
+ import myplugin
+ assert myplugin.terminal_plugin == [False, True]
+ """,
+ **{
+ "myplugin": """
+ terminal_plugin = []
+
+ def pytest_configure(config):
+ terminal_plugin.append(bool(config.pluginmanager.get_plugin("terminalreporter")))
+
+ def pytest_sessionstart(session):
+ config = session.config
+ terminal_plugin.append(bool(config.pluginmanager.get_plugin("terminalreporter")))
+ """
+ },
+ )
+ pytester.syspathinsert()
+ result = pytester.runpytest("-p", "myplugin", str(p1))
+ assert result.ret == 0
+
+
+def test_cmdline_processargs_simple(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_cmdline_preparse(args):
+ args.append("-h")
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*pytest*", "*-h*"])
+
+
+def test_invalid_options_show_extra_information(pytester: Pytester) -> None:
+ """Display extra information when pytest exits due to unrecognized
+ options in the command-line."""
+ pytester.makeini(
+ """
+ [pytest]
+ addopts = --invalid-option
+ """
+ )
+ result = pytester.runpytest()
+ result.stderr.fnmatch_lines(
+ [
+ "*error: unrecognized arguments: --invalid-option*",
+ "* inifile: %s*" % pytester.path.joinpath("tox.ini"),
+ "* rootdir: %s*" % pytester.path,
+ ]
+ )
+
+
+@pytest.mark.parametrize(
+ "args",
+ [
+ ["dir1", "dir2", "-v"],
+ ["dir1", "-v", "dir2"],
+ ["dir2", "-v", "dir1"],
+ ["-v", "dir2", "dir1"],
+ ],
+)
+def test_consider_args_after_options_for_rootdir(
+ pytester: Pytester, args: List[str]
+) -> None:
+ """
+ Consider all arguments in the command-line for rootdir
+ discovery, even if they happen to occur after an option. #949
+ """
+ # replace "dir1" and "dir2" from "args" into their real directory
+ root = pytester.mkdir("myroot")
+ d1 = root.joinpath("dir1")
+ d1.mkdir()
+ d2 = root.joinpath("dir2")
+ d2.mkdir()
+ for i, arg in enumerate(args):
+ if arg == "dir1":
+ args[i] = str(d1)
+ elif arg == "dir2":
+ args[i] = str(d2)
+ with MonkeyPatch.context() as mp:
+ mp.chdir(root)
+ result = pytester.runpytest(*args)
+ result.stdout.fnmatch_lines(["*rootdir: *myroot"])
+
+
+def test_toolongargs_issue224(pytester: Pytester) -> None:
+ result = pytester.runpytest("-m", "hello" * 500)
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+def test_config_in_subdirectory_colon_command_line_issue2148(
+ pytester: Pytester,
+) -> None:
+ conftest_source = """
+ def pytest_addoption(parser):
+ parser.addini('foo', 'foo')
+ """
+
+ pytester.makefile(
+ ".ini",
+ **{"pytest": "[pytest]\nfoo = root", "subdir/pytest": "[pytest]\nfoo = subdir"},
+ )
+
+ pytester.makepyfile(
+ **{
+ "conftest": conftest_source,
+ "subdir/conftest": conftest_source,
+ "subdir/test_foo": """\
+ def test_foo(pytestconfig):
+ assert pytestconfig.getini('foo') == 'subdir'
+ """,
+ }
+ )
+
+ result = pytester.runpytest("subdir/test_foo.py::test_foo")
+ assert result.ret == 0
+
+
+def test_notify_exception(pytester: Pytester, capfd) -> None:
+ config = pytester.parseconfig()
+ with pytest.raises(ValueError) as excinfo:
+ raise ValueError(1)
+ config.notify_exception(excinfo, config.option)
+ _, err = capfd.readouterr()
+ assert "ValueError" in err
+
+ class A:
+ def pytest_internalerror(self):
+ return True
+
+ config.pluginmanager.register(A())
+ config.notify_exception(excinfo, config.option)
+ _, err = capfd.readouterr()
+ assert not err
+
+ config = pytester.parseconfig("-p", "no:terminal")
+ with pytest.raises(ValueError) as excinfo:
+ raise ValueError(1)
+ config.notify_exception(excinfo, config.option)
+ _, err = capfd.readouterr()
+ assert "ValueError" in err
+
+
+def test_no_terminal_discovery_error(pytester: Pytester) -> None:
+ pytester.makepyfile("raise TypeError('oops!')")
+ result = pytester.runpytest("-p", "no:terminal", "--collect-only")
+ assert result.ret == ExitCode.INTERRUPTED
+
+
+def test_load_initial_conftest_last_ordering(_config_for_test):
+ pm = _config_for_test.pluginmanager
+
+ class My:
+ def pytest_load_initial_conftests(self):
+ pass
+
+ m = My()
+ pm.register(m)
+ hc = pm.hook.pytest_load_initial_conftests
+ hookimpls = [
+ (
+ hookimpl.function.__module__,
+ "wrapper" if hookimpl.hookwrapper else "nonwrapper",
+ )
+ for hookimpl in hc.get_hookimpls()
+ ]
+ assert hookimpls == [
+ ("_pytest.config", "nonwrapper"),
+ (m.__module__, "nonwrapper"),
+ ("_pytest.legacypath", "nonwrapper"),
+ ("_pytest.python_path", "nonwrapper"),
+ ("_pytest.capture", "wrapper"),
+ ("_pytest.warnings", "wrapper"),
+ ]
+
+
+def test_get_plugin_specs_as_list() -> None:
+ def exp_match(val: object) -> str:
+ return (
+ "Plugins may be specified as a sequence or a ','-separated string of plugin names. Got: %s"
+ % re.escape(repr(val))
+ )
+
+ with pytest.raises(pytest.UsageError, match=exp_match({"foo"})):
+ _get_plugin_specs_as_list({"foo"}) # type: ignore[arg-type]
+ with pytest.raises(pytest.UsageError, match=exp_match({})):
+ _get_plugin_specs_as_list(dict()) # type: ignore[arg-type]
+
+ assert _get_plugin_specs_as_list(None) == []
+ assert _get_plugin_specs_as_list("") == []
+ assert _get_plugin_specs_as_list("foo") == ["foo"]
+ assert _get_plugin_specs_as_list("foo,bar") == ["foo", "bar"]
+ assert _get_plugin_specs_as_list(["foo", "bar"]) == ["foo", "bar"]
+ assert _get_plugin_specs_as_list(("foo", "bar")) == ["foo", "bar"]
+
+
+def test_collect_pytest_prefix_bug_integration(pytester: Pytester) -> None:
+ """Integration test for issue #3775"""
+ p = pytester.copy_example("config/collect_pytest_prefix")
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["* 1 passed *"])
+
+
+def test_collect_pytest_prefix_bug(pytestconfig):
+ """Ensure we collect only actual functions from conftest files (#3775)"""
+
+ class Dummy:
+ class pytest_something:
+ pass
+
+ pm = pytestconfig.pluginmanager
+ assert pm.parse_hookimpl_opts(Dummy(), "pytest_something") is None
+
+
+class TestRootdir:
+ def test_simple_noini(self, tmp_path: Path, monkeypatch: MonkeyPatch) -> None:
+ assert get_common_ancestor([tmp_path]) == tmp_path
+ a = tmp_path / "a"
+ a.mkdir()
+ assert get_common_ancestor([a, tmp_path]) == tmp_path
+ assert get_common_ancestor([tmp_path, a]) == tmp_path
+ monkeypatch.chdir(tmp_path)
+ assert get_common_ancestor([]) == tmp_path
+ no_path = tmp_path / "does-not-exist"
+ assert get_common_ancestor([no_path]) == tmp_path
+ assert get_common_ancestor([no_path / "a"]) == tmp_path
+
+ @pytest.mark.parametrize(
+ "name, contents",
+ [
+ pytest.param("pytest.ini", "[pytest]\nx=10", id="pytest.ini"),
+ pytest.param(
+ "pyproject.toml", "[tool.pytest.ini_options]\nx=10", id="pyproject.toml"
+ ),
+ pytest.param("tox.ini", "[pytest]\nx=10", id="tox.ini"),
+ pytest.param("setup.cfg", "[tool:pytest]\nx=10", id="setup.cfg"),
+ ],
+ )
+ def test_with_ini(self, tmp_path: Path, name: str, contents: str) -> None:
+ inipath = tmp_path / name
+ inipath.write_text(contents, "utf-8")
+
+ a = tmp_path / "a"
+ a.mkdir()
+ b = a / "b"
+ b.mkdir()
+ for args in ([str(tmp_path)], [str(a)], [str(b)]):
+ rootpath, parsed_inipath, _ = determine_setup(None, args)
+ assert rootpath == tmp_path
+ assert parsed_inipath == inipath
+ rootpath, parsed_inipath, ini_config = determine_setup(None, [str(b), str(a)])
+ assert rootpath == tmp_path
+ assert parsed_inipath == inipath
+ assert ini_config == {"x": "10"}
+
+ @pytest.mark.parametrize("name", ["setup.cfg", "tox.ini"])
+ def test_pytestini_overrides_empty_other(self, tmp_path: Path, name: str) -> None:
+ inipath = tmp_path / "pytest.ini"
+ inipath.touch()
+ a = tmp_path / "a"
+ a.mkdir()
+ (a / name).touch()
+ rootpath, parsed_inipath, _ = determine_setup(None, [str(a)])
+ assert rootpath == tmp_path
+ assert parsed_inipath == inipath
+
+ def test_setuppy_fallback(self, tmp_path: Path) -> None:
+ a = tmp_path / "a"
+ a.mkdir()
+ (a / "setup.cfg").touch()
+ (tmp_path / "setup.py").touch()
+ rootpath, inipath, inicfg = determine_setup(None, [str(a)])
+ assert rootpath == tmp_path
+ assert inipath is None
+ assert inicfg == {}
+
+ def test_nothing(self, tmp_path: Path, monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.chdir(tmp_path)
+ rootpath, inipath, inicfg = determine_setup(None, [str(tmp_path)])
+ assert rootpath == tmp_path
+ assert inipath is None
+ assert inicfg == {}
+
+ @pytest.mark.parametrize(
+ "name, contents",
+ [
+ # pytest.param("pytest.ini", "[pytest]\nx=10", id="pytest.ini"),
+ pytest.param(
+ "pyproject.toml", "[tool.pytest.ini_options]\nx=10", id="pyproject.toml"
+ ),
+ # pytest.param("tox.ini", "[pytest]\nx=10", id="tox.ini"),
+ # pytest.param("setup.cfg", "[tool:pytest]\nx=10", id="setup.cfg"),
+ ],
+ )
+ def test_with_specific_inifile(
+ self, tmp_path: Path, name: str, contents: str
+ ) -> None:
+ p = tmp_path / name
+ p.touch()
+ p.write_text(contents, "utf-8")
+ rootpath, inipath, ini_config = determine_setup(str(p), [str(tmp_path)])
+ assert rootpath == tmp_path
+ assert inipath == p
+ assert ini_config == {"x": "10"}
+
+ def test_explicit_config_file_sets_rootdir(
+ self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
+ ) -> None:
+ tests_dir = tmp_path / "tests"
+ tests_dir.mkdir()
+
+ monkeypatch.chdir(tmp_path)
+
+ # No config file is explicitly given: rootdir is determined to be cwd.
+ rootpath, found_inipath, *_ = determine_setup(None, [str(tests_dir)])
+ assert rootpath == tmp_path
+ assert found_inipath is None
+
+ # Config file is explicitly given: rootdir is determined to be inifile's directory.
+ inipath = tmp_path / "pytest.ini"
+ inipath.touch()
+ rootpath, found_inipath, *_ = determine_setup(str(inipath), [str(tests_dir)])
+ assert rootpath == tmp_path
+ assert found_inipath == inipath
+
+ def test_with_arg_outside_cwd_without_inifile(
+ self, tmp_path: Path, monkeypatch: MonkeyPatch
+ ) -> None:
+ monkeypatch.chdir(tmp_path)
+ a = tmp_path / "a"
+ a.mkdir()
+ b = tmp_path / "b"
+ b.mkdir()
+ rootpath, inifile, _ = determine_setup(None, [str(a), str(b)])
+ assert rootpath == tmp_path
+ assert inifile is None
+
+ def test_with_arg_outside_cwd_with_inifile(self, tmp_path: Path) -> None:
+ a = tmp_path / "a"
+ a.mkdir()
+ b = tmp_path / "b"
+ b.mkdir()
+ inipath = a / "pytest.ini"
+ inipath.touch()
+ rootpath, parsed_inipath, _ = determine_setup(None, [str(a), str(b)])
+ assert rootpath == a
+ assert inipath == parsed_inipath
+
+ @pytest.mark.parametrize("dirs", ([], ["does-not-exist"], ["a/does-not-exist"]))
+ def test_with_non_dir_arg(
+ self, dirs: Sequence[str], tmp_path: Path, monkeypatch: MonkeyPatch
+ ) -> None:
+ monkeypatch.chdir(tmp_path)
+ rootpath, inipath, _ = determine_setup(None, dirs)
+ assert rootpath == tmp_path
+ assert inipath is None
+
+ def test_with_existing_file_in_subdir(
+ self, tmp_path: Path, monkeypatch: MonkeyPatch
+ ) -> None:
+ a = tmp_path / "a"
+ a.mkdir()
+ (a / "exists").touch()
+ monkeypatch.chdir(tmp_path)
+ rootpath, inipath, _ = determine_setup(None, ["a/exist"])
+ assert rootpath == tmp_path
+ assert inipath is None
+
+ def test_with_config_also_in_parent_directory(
+ self, tmp_path: Path, monkeypatch: MonkeyPatch
+ ) -> None:
+ """Regression test for #7807."""
+ (tmp_path / "setup.cfg").write_text("[tool:pytest]\n", "utf-8")
+ (tmp_path / "myproject").mkdir()
+ (tmp_path / "myproject" / "setup.cfg").write_text("[tool:pytest]\n", "utf-8")
+ (tmp_path / "myproject" / "tests").mkdir()
+ monkeypatch.chdir(tmp_path / "myproject")
+
+ rootpath, inipath, _ = determine_setup(None, ["tests/"])
+
+ assert rootpath == tmp_path / "myproject"
+ assert inipath == tmp_path / "myproject" / "setup.cfg"
+
+
+class TestOverrideIniArgs:
+ @pytest.mark.parametrize("name", "setup.cfg tox.ini pytest.ini".split())
+ def test_override_ini_names(self, pytester: Pytester, name: str) -> None:
+ section = "[pytest]" if name != "setup.cfg" else "[tool:pytest]"
+ pytester.path.joinpath(name).write_text(
+ textwrap.dedent(
+ """
+ {section}
+ custom = 1.0""".format(
+ section=section
+ )
+ )
+ )
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("custom", "")"""
+ )
+ pytester.makepyfile(
+ """
+ def test_pass(pytestconfig):
+ ini_val = pytestconfig.getini("custom")
+ print('\\ncustom_option:%s\\n' % ini_val)"""
+ )
+
+ result = pytester.runpytest("--override-ini", "custom=2.0", "-s")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["custom_option:2.0"])
+
+ result = pytester.runpytest(
+ "--override-ini", "custom=2.0", "--override-ini=custom=3.0", "-s"
+ )
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["custom_option:3.0"])
+
+ def test_override_ini_paths(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("paths", "my new ini value", type="paths")"""
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ paths=blah.py"""
+ )
+ pytester.makepyfile(
+ r"""
+ def test_overriden(pytestconfig):
+ config_paths = pytestconfig.getini("paths")
+ print(config_paths)
+ for cpf in config_paths:
+ print('\nuser_path:%s' % cpf.name)
+ """
+ )
+ result = pytester.runpytest(
+ "--override-ini", "paths=foo/bar1.py foo/bar2.py", "-s"
+ )
+ result.stdout.fnmatch_lines(["user_path:bar1.py", "user_path:bar2.py"])
+
+ def test_override_multiple_and_default(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ addini = parser.addini
+ addini("custom_option_1", "", default="o1")
+ addini("custom_option_2", "", default="o2")
+ addini("custom_option_3", "", default=False, type="bool")
+ addini("custom_option_4", "", default=True, type="bool")"""
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ custom_option_1=custom_option_1
+ custom_option_2=custom_option_2
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_multiple_options(pytestconfig):
+ prefix = "custom_option"
+ for x in range(1, 5):
+ ini_value=pytestconfig.getini("%s_%d" % (prefix, x))
+ print('\\nini%d:%s' % (x, ini_value))
+ """
+ )
+ result = pytester.runpytest(
+ "--override-ini",
+ "custom_option_1=fulldir=/tmp/user1",
+ "-o",
+ "custom_option_2=url=/tmp/user2?a=b&d=e",
+ "-o",
+ "custom_option_3=True",
+ "-o",
+ "custom_option_4=no",
+ "-s",
+ )
+ result.stdout.fnmatch_lines(
+ [
+ "ini1:fulldir=/tmp/user1",
+ "ini2:url=/tmp/user2?a=b&d=e",
+ "ini3:True",
+ "ini4:False",
+ ]
+ )
+
+ def test_override_ini_usage_error_bad_style(self, pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ xdist_strict=False
+ """
+ )
+ result = pytester.runpytest("--override-ini", "xdist_strict", "True")
+ result.stderr.fnmatch_lines(
+ [
+ "ERROR: -o/--override-ini expects option=value style (got: 'xdist_strict').",
+ ]
+ )
+
+ @pytest.mark.parametrize("with_ini", [True, False])
+ def test_override_ini_handled_asap(
+ self, pytester: Pytester, with_ini: bool
+ ) -> None:
+ """-o should be handled as soon as possible and always override what's in ini files (#2238)"""
+ if with_ini:
+ pytester.makeini(
+ """
+ [pytest]
+ python_files=test_*.py
+ """
+ )
+ pytester.makepyfile(
+ unittest_ini_handle="""
+ def test():
+ pass
+ """
+ )
+ result = pytester.runpytest("--override-ini", "python_files=unittest_*.py")
+ result.stdout.fnmatch_lines(["*1 passed in*"])
+
+ def test_addopts_before_initini(
+ self, monkeypatch: MonkeyPatch, _config_for_test, _sys_snapshot
+ ) -> None:
+ cache_dir = ".custom_cache"
+ monkeypatch.setenv("PYTEST_ADDOPTS", "-o cache_dir=%s" % cache_dir)
+ config = _config_for_test
+ config._preparse([], addopts=True)
+ assert config._override_ini == ["cache_dir=%s" % cache_dir]
+
+ def test_addopts_from_env_not_concatenated(
+ self, monkeypatch: MonkeyPatch, _config_for_test
+ ) -> None:
+ """PYTEST_ADDOPTS should not take values from normal args (#4265)."""
+ monkeypatch.setenv("PYTEST_ADDOPTS", "-o")
+ config = _config_for_test
+ with pytest.raises(UsageError) as excinfo:
+ config._preparse(["cache_dir=ignored"], addopts=True)
+ assert (
+ "error: argument -o/--override-ini: expected one argument (via PYTEST_ADDOPTS)"
+ in excinfo.value.args[0]
+ )
+
+ def test_addopts_from_ini_not_concatenated(self, pytester: Pytester) -> None:
+ """`addopts` from ini should not take values from normal args (#4265)."""
+ pytester.makeini(
+ """
+ [pytest]
+ addopts=-o
+ """
+ )
+ result = pytester.runpytest("cache_dir=ignored")
+ result.stderr.fnmatch_lines(
+ [
+ "%s: error: argument -o/--override-ini: expected one argument (via addopts config)"
+ % (pytester._request.config._parser.optparser.prog,)
+ ]
+ )
+ assert result.ret == _pytest.config.ExitCode.USAGE_ERROR
+
+ def test_override_ini_does_not_contain_paths(
+ self, _config_for_test, _sys_snapshot
+ ) -> None:
+ """Check that -o no longer swallows all options after it (#3103)"""
+ config = _config_for_test
+ config._preparse(["-o", "cache_dir=/cache", "/some/test/path"])
+ assert config._override_ini == ["cache_dir=/cache"]
+
+ def test_multiple_override_ini_options(self, pytester: Pytester) -> None:
+ """Ensure a file path following a '-o' option does not generate an error (#3103)"""
+ pytester.makepyfile(
+ **{
+ "conftest.py": """
+ def pytest_addoption(parser):
+ parser.addini('foo', default=None, help='some option')
+ parser.addini('bar', default=None, help='some option')
+ """,
+ "test_foo.py": """
+ def test(pytestconfig):
+ assert pytestconfig.getini('foo') == '1'
+ assert pytestconfig.getini('bar') == '0'
+ """,
+ "test_bar.py": """
+ def test():
+ assert False
+ """,
+ }
+ )
+ result = pytester.runpytest("-o", "foo=1", "-o", "bar=0", "test_foo.py")
+ assert "ERROR:" not in result.stderr.str()
+ result.stdout.fnmatch_lines(["collected 1 item", "*= 1 passed in *="])
+
+
+def test_help_via_addopts(pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ addopts = --unknown-option-should-allow-for-help --help
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "usage: *",
+ "positional arguments:",
+ # Displays full/default help.
+ "to see available markers type: pytest --markers",
+ ]
+ )
+
+
+def test_help_and_version_after_argument_error(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def validate(arg):
+ raise argparse.ArgumentTypeError("argerror")
+
+ def pytest_addoption(parser):
+ group = parser.getgroup('cov')
+ group.addoption(
+ "--invalid-option-should-allow-for-help",
+ type=validate,
+ )
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ addopts = --invalid-option-should-allow-for-help
+ """
+ )
+ result = pytester.runpytest("--help")
+ result.stdout.fnmatch_lines(
+ [
+ "usage: *",
+ "positional arguments:",
+ "NOTE: displaying only minimal help due to UsageError.",
+ ]
+ )
+ result.stderr.fnmatch_lines(
+ [
+ "ERROR: usage: *",
+ "%s: error: argument --invalid-option-should-allow-for-help: expected one argument"
+ % (pytester._request.config._parser.optparser.prog,),
+ ]
+ )
+ # Does not display full/default help.
+ assert "to see available markers type: pytest --markers" not in result.stdout.lines
+ assert result.ret == ExitCode.USAGE_ERROR
+
+ result = pytester.runpytest("--version")
+ result.stdout.fnmatch_lines([f"pytest {pytest.__version__}"])
+ assert result.ret == ExitCode.USAGE_ERROR
+
+
+def test_help_formatter_uses_py_get_terminal_width(monkeypatch: MonkeyPatch) -> None:
+ from _pytest.config.argparsing import DropShorterLongHelpFormatter
+
+ monkeypatch.setenv("COLUMNS", "90")
+ formatter = DropShorterLongHelpFormatter("prog")
+ assert formatter._width == 90
+
+ monkeypatch.setattr("_pytest._io.get_terminal_width", lambda: 160)
+ formatter = DropShorterLongHelpFormatter("prog")
+ assert formatter._width == 160
+
+ formatter = DropShorterLongHelpFormatter("prog", width=42)
+ assert formatter._width == 42
+
+
+def test_config_does_not_load_blocked_plugin_from_args(pytester: Pytester) -> None:
+ """This tests that pytest's config setup handles "-p no:X"."""
+ p = pytester.makepyfile("def test(capfd): pass")
+ result = pytester.runpytest(str(p), "-pno:capture")
+ result.stdout.fnmatch_lines(["E fixture 'capfd' not found"])
+ assert result.ret == ExitCode.TESTS_FAILED
+
+ result = pytester.runpytest(str(p), "-pno:capture", "-s")
+ result.stderr.fnmatch_lines(["*: error: unrecognized arguments: -s"])
+ assert result.ret == ExitCode.USAGE_ERROR
+
+
+def test_invocation_args(pytester: Pytester) -> None:
+ """Ensure that Config.invocation_* arguments are correctly defined"""
+
+ class DummyPlugin:
+ pass
+
+ p = pytester.makepyfile("def test(): pass")
+ plugin = DummyPlugin()
+ rec = pytester.inline_run(p, "-v", plugins=[plugin])
+ calls = rec.getcalls("pytest_runtest_protocol")
+ assert len(calls) == 1
+ call = calls[0]
+ config = call.item.config
+
+ assert config.invocation_params.args == (str(p), "-v")
+ assert config.invocation_params.dir == pytester.path
+
+ plugins = config.invocation_params.plugins
+ assert len(plugins) == 2
+ assert plugins[0] is plugin
+ assert type(plugins[1]).__name__ == "Collect" # installed by pytester.inline_run()
+
+ # args cannot be None
+ with pytest.raises(TypeError):
+ Config.InvocationParams(args=None, plugins=None, dir=Path()) # type: ignore[arg-type]
+
+
+@pytest.mark.parametrize(
+ "plugin",
+ [
+ x
+ for x in _pytest.config.default_plugins
+ if x not in _pytest.config.essential_plugins
+ ],
+)
+def test_config_blocked_default_plugins(pytester: Pytester, plugin: str) -> None:
+ if plugin == "debugging":
+ # Fixed in xdist (after 1.27.0).
+ # https://github.com/pytest-dev/pytest-xdist/pull/422
+ try:
+ import xdist # noqa: F401
+ except ImportError:
+ pass
+ else:
+ pytest.skip("does not work with xdist currently")
+
+ p = pytester.makepyfile("def test(): pass")
+ result = pytester.runpytest(str(p), "-pno:%s" % plugin)
+
+ if plugin == "python":
+ assert result.ret == ExitCode.USAGE_ERROR
+ result.stderr.fnmatch_lines(
+ [
+ "ERROR: not found: */test_config_blocked_default_plugins.py",
+ "(no name '*/test_config_blocked_default_plugins.py' in any of [])",
+ ]
+ )
+ return
+
+ assert result.ret == ExitCode.OK
+ if plugin != "terminal":
+ result.stdout.fnmatch_lines(["* 1 passed in *"])
+
+ p = pytester.makepyfile("def test(): assert 0")
+ result = pytester.runpytest(str(p), "-pno:%s" % plugin)
+ assert result.ret == ExitCode.TESTS_FAILED
+ if plugin != "terminal":
+ result.stdout.fnmatch_lines(["* 1 failed in *"])
+ else:
+ assert result.stdout.lines == []
+
+
+class TestSetupCfg:
+ def test_pytest_setup_cfg_unsupported(self, pytester: Pytester) -> None:
+ pytester.makefile(
+ ".cfg",
+ setup="""
+ [pytest]
+ addopts = --verbose
+ """,
+ )
+ with pytest.raises(pytest.fail.Exception):
+ pytester.runpytest()
+
+ def test_pytest_custom_cfg_unsupported(self, pytester: Pytester) -> None:
+ pytester.makefile(
+ ".cfg",
+ custom="""
+ [pytest]
+ addopts = --verbose
+ """,
+ )
+ with pytest.raises(pytest.fail.Exception):
+ pytester.runpytest("-c", "custom.cfg")
+
+
+class TestPytestPluginsVariable:
+ def test_pytest_plugins_in_non_top_level_conftest_unsupported(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ **{
+ "subdirectory/conftest.py": """
+ pytest_plugins=['capture']
+ """
+ }
+ )
+ pytester.makepyfile(
+ """
+ def test_func():
+ pass
+ """
+ )
+ res = pytester.runpytest()
+ assert res.ret == 2
+ msg = "Defining 'pytest_plugins' in a non-top-level conftest is no longer supported"
+ res.stdout.fnmatch_lines([f"*{msg}*", f"*subdirectory{os.sep}conftest.py*"])
+
+ @pytest.mark.parametrize("use_pyargs", [True, False])
+ def test_pytest_plugins_in_non_top_level_conftest_unsupported_pyargs(
+ self, pytester: Pytester, use_pyargs: bool
+ ) -> None:
+ """When using --pyargs, do not emit the warning about non-top-level conftest warnings (#4039, #4044)"""
+
+ files = {
+ "src/pkg/__init__.py": "",
+ "src/pkg/conftest.py": "",
+ "src/pkg/test_root.py": "def test(): pass",
+ "src/pkg/sub/__init__.py": "",
+ "src/pkg/sub/conftest.py": "pytest_plugins=['capture']",
+ "src/pkg/sub/test_bar.py": "def test(): pass",
+ }
+ pytester.makepyfile(**files)
+ pytester.syspathinsert(pytester.path.joinpath("src"))
+
+ args = ("--pyargs", "pkg") if use_pyargs else ()
+ res = pytester.runpytest(*args)
+ assert res.ret == (0 if use_pyargs else 2)
+ msg = (
+ msg
+ ) = "Defining 'pytest_plugins' in a non-top-level conftest is no longer supported"
+ if use_pyargs:
+ assert msg not in res.stdout.str()
+ else:
+ res.stdout.fnmatch_lines([f"*{msg}*"])
+
+ def test_pytest_plugins_in_non_top_level_conftest_unsupported_no_top_level_conftest(
+ self, pytester: Pytester
+ ) -> None:
+ subdirectory = pytester.path.joinpath("subdirectory")
+ subdirectory.mkdir()
+ pytester.makeconftest(
+ """
+ pytest_plugins=['capture']
+ """
+ )
+ pytester.path.joinpath("conftest.py").rename(
+ subdirectory.joinpath("conftest.py")
+ )
+
+ pytester.makepyfile(
+ """
+ def test_func():
+ pass
+ """
+ )
+
+ res = pytester.runpytest_subprocess()
+ assert res.ret == 2
+ msg = "Defining 'pytest_plugins' in a non-top-level conftest is no longer supported"
+ res.stdout.fnmatch_lines([f"*{msg}*", f"*subdirectory{os.sep}conftest.py*"])
+
+ def test_pytest_plugins_in_non_top_level_conftest_unsupported_no_false_positives(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ "def test_func(): pass",
+ **{
+ "subdirectory/conftest": "pass",
+ "conftest": """
+ import warnings
+ warnings.filterwarnings('always', category=DeprecationWarning)
+ pytest_plugins=['capture']
+ """,
+ },
+ )
+ res = pytester.runpytest_subprocess()
+ assert res.ret == 0
+ msg = "Defining 'pytest_plugins' in a non-top-level conftest is no longer supported"
+ assert msg not in res.stdout.str()
+
+
+def test_conftest_import_error_repr(tmp_path: Path) -> None:
+ """`ConftestImportFailure` should use a short error message and readable
+ path to the failed conftest.py file."""
+ path = tmp_path.joinpath("foo/conftest.py")
+ with pytest.raises(
+ ConftestImportFailure,
+ match=re.escape(f"RuntimeError: some error (from {path})"),
+ ):
+ try:
+ raise RuntimeError("some error")
+ except Exception as exc:
+ assert exc.__traceback__ is not None
+ exc_info = (type(exc), exc, exc.__traceback__)
+ raise ConftestImportFailure(path, exc_info) from exc
+
+
+def test_strtobool() -> None:
+ assert _strtobool("YES")
+ assert not _strtobool("NO")
+ with pytest.raises(ValueError):
+ _strtobool("unknown")
+
+
+@pytest.mark.parametrize(
+ "arg, escape, expected",
+ [
+ ("ignore", False, ("ignore", "", Warning, "", 0)),
+ (
+ "ignore::DeprecationWarning",
+ False,
+ ("ignore", "", DeprecationWarning, "", 0),
+ ),
+ (
+ "ignore:some msg:DeprecationWarning",
+ False,
+ ("ignore", "some msg", DeprecationWarning, "", 0),
+ ),
+ (
+ "ignore::DeprecationWarning:mod",
+ False,
+ ("ignore", "", DeprecationWarning, "mod", 0),
+ ),
+ (
+ "ignore::DeprecationWarning:mod:42",
+ False,
+ ("ignore", "", DeprecationWarning, "mod", 42),
+ ),
+ ("error:some\\msg:::", True, ("error", "some\\\\msg", Warning, "", 0)),
+ ("error:::mod\\foo:", True, ("error", "", Warning, "mod\\\\foo\\Z", 0)),
+ ],
+)
+def test_parse_warning_filter(
+ arg: str, escape: bool, expected: Tuple[str, str, Type[Warning], str, int]
+) -> None:
+ assert parse_warning_filter(arg, escape=escape) == expected
+
+
+@pytest.mark.parametrize(
+ "arg",
+ [
+ # Too much parts.
+ ":" * 5,
+ # Invalid action.
+ "FOO::",
+ # ImportError when importing the warning class.
+ "::test_parse_warning_filter_failure.NonExistentClass::",
+ # Class is not a Warning subclass.
+ "::list::",
+ # Negative line number.
+ "::::-1",
+ # Not a line number.
+ "::::not-a-number",
+ ],
+)
+def test_parse_warning_filter_failure(arg: str) -> None:
+ with pytest.raises(pytest.UsageError):
+ parse_warning_filter(arg, escape=True)
+
+
+class TestDebugOptions:
+ def test_without_debug_does_not_write_log(self, pytester: Pytester) -> None:
+ result = pytester.runpytest()
+ result.stderr.no_fnmatch_line(
+ "*writing pytest debug information to*pytestdebug.log"
+ )
+ result.stderr.no_fnmatch_line(
+ "*wrote pytest debug information to*pytestdebug.log"
+ )
+ assert not [f.name for f in pytester.path.glob("**/*.log")]
+
+ def test_with_only_debug_writes_pytestdebug_log(self, pytester: Pytester) -> None:
+ result = pytester.runpytest("--debug")
+ result.stderr.fnmatch_lines(
+ [
+ "*writing pytest debug information to*pytestdebug.log",
+ "*wrote pytest debug information to*pytestdebug.log",
+ ]
+ )
+ assert "pytestdebug.log" in [f.name for f in pytester.path.glob("**/*.log")]
+
+ def test_multiple_custom_debug_logs(self, pytester: Pytester) -> None:
+ result = pytester.runpytest("--debug", "bar.log")
+ result.stderr.fnmatch_lines(
+ [
+ "*writing pytest debug information to*bar.log",
+ "*wrote pytest debug information to*bar.log",
+ ]
+ )
+ result = pytester.runpytest("--debug", "foo.log")
+ result.stderr.fnmatch_lines(
+ [
+ "*writing pytest debug information to*foo.log",
+ "*wrote pytest debug information to*foo.log",
+ ]
+ )
+
+ assert {"bar.log", "foo.log"} == {
+ f.name for f in pytester.path.glob("**/*.log")
+ }
+
+ def test_debug_help(self, pytester: Pytester) -> None:
+ result = pytester.runpytest("-h")
+ result.stdout.fnmatch_lines(
+ [
+ "*store internal tracing debug information in this log*",
+ "*This file is opened with 'w' and truncated as a result*",
+ "*Defaults to 'pytestdebug.log'.",
+ ]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_conftest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_conftest.py
new file mode 100644
index 0000000000..64c1014a53
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_conftest.py
@@ -0,0 +1,696 @@
+import argparse
+import os
+import textwrap
+from pathlib import Path
+from typing import cast
+from typing import Dict
+from typing import Generator
+from typing import List
+from typing import Optional
+
+import pytest
+from _pytest.config import ExitCode
+from _pytest.config import PytestPluginManager
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pathlib import symlink_or_skip
+from _pytest.pytester import Pytester
+from _pytest.tmpdir import TempPathFactory
+
+
+def ConftestWithSetinitial(path) -> PytestPluginManager:
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, [path])
+ return conftest
+
+
+def conftest_setinitial(
+ conftest: PytestPluginManager, args, confcutdir: Optional["os.PathLike[str]"] = None
+) -> None:
+ class Namespace:
+ def __init__(self) -> None:
+ self.file_or_dir = args
+ self.confcutdir = os.fspath(confcutdir) if confcutdir is not None else None
+ self.noconftest = False
+ self.pyargs = False
+ self.importmode = "prepend"
+
+ namespace = cast(argparse.Namespace, Namespace())
+ conftest._set_initial_conftests(namespace, rootpath=Path(args[0]))
+
+
+@pytest.mark.usefixtures("_sys_snapshot")
+class TestConftestValueAccessGlobal:
+ @pytest.fixture(scope="module", params=["global", "inpackage"])
+ def basedir(
+ self, request, tmp_path_factory: TempPathFactory
+ ) -> Generator[Path, None, None]:
+ tmp_path = tmp_path_factory.mktemp("basedir", numbered=True)
+ tmp_path.joinpath("adir/b").mkdir(parents=True)
+ tmp_path.joinpath("adir/conftest.py").write_text("a=1 ; Directory = 3")
+ tmp_path.joinpath("adir/b/conftest.py").write_text("b=2 ; a = 1.5")
+ if request.param == "inpackage":
+ tmp_path.joinpath("adir/__init__.py").touch()
+ tmp_path.joinpath("adir/b/__init__.py").touch()
+
+ yield tmp_path
+
+ def test_basic_init(self, basedir: Path) -> None:
+ conftest = PytestPluginManager()
+ p = basedir / "adir"
+ assert (
+ conftest._rget_with_confmod("a", p, importmode="prepend", rootpath=basedir)[
+ 1
+ ]
+ == 1
+ )
+
+ def test_immediate_initialiation_and_incremental_are_the_same(
+ self, basedir: Path
+ ) -> None:
+ conftest = PytestPluginManager()
+ assert not len(conftest._dirpath2confmods)
+ conftest._getconftestmodules(
+ basedir, importmode="prepend", rootpath=Path(basedir)
+ )
+ snap1 = len(conftest._dirpath2confmods)
+ assert snap1 == 1
+ conftest._getconftestmodules(
+ basedir / "adir", importmode="prepend", rootpath=basedir
+ )
+ assert len(conftest._dirpath2confmods) == snap1 + 1
+ conftest._getconftestmodules(
+ basedir / "b", importmode="prepend", rootpath=basedir
+ )
+ assert len(conftest._dirpath2confmods) == snap1 + 2
+
+ def test_value_access_not_existing(self, basedir: Path) -> None:
+ conftest = ConftestWithSetinitial(basedir)
+ with pytest.raises(KeyError):
+ conftest._rget_with_confmod(
+ "a", basedir, importmode="prepend", rootpath=Path(basedir)
+ )
+
+ def test_value_access_by_path(self, basedir: Path) -> None:
+ conftest = ConftestWithSetinitial(basedir)
+ adir = basedir / "adir"
+ assert (
+ conftest._rget_with_confmod(
+ "a", adir, importmode="prepend", rootpath=basedir
+ )[1]
+ == 1
+ )
+ assert (
+ conftest._rget_with_confmod(
+ "a", adir / "b", importmode="prepend", rootpath=basedir
+ )[1]
+ == 1.5
+ )
+
+ def test_value_access_with_confmod(self, basedir: Path) -> None:
+ startdir = basedir / "adir" / "b"
+ startdir.joinpath("xx").mkdir()
+ conftest = ConftestWithSetinitial(startdir)
+ mod, value = conftest._rget_with_confmod(
+ "a", startdir, importmode="prepend", rootpath=Path(basedir)
+ )
+ assert value == 1.5
+ path = Path(mod.__file__)
+ assert path.parent == basedir / "adir" / "b"
+ assert path.stem == "conftest"
+
+
+def test_conftest_in_nonpkg_with_init(tmp_path: Path, _sys_snapshot) -> None:
+ tmp_path.joinpath("adir-1.0/b").mkdir(parents=True)
+ tmp_path.joinpath("adir-1.0/conftest.py").write_text("a=1 ; Directory = 3")
+ tmp_path.joinpath("adir-1.0/b/conftest.py").write_text("b=2 ; a = 1.5")
+ tmp_path.joinpath("adir-1.0/b/__init__.py").touch()
+ tmp_path.joinpath("adir-1.0/__init__.py").touch()
+ ConftestWithSetinitial(tmp_path.joinpath("adir-1.0", "b"))
+
+
+def test_doubledash_considered(pytester: Pytester) -> None:
+ conf = pytester.mkdir("--option")
+ conf.joinpath("conftest.py").touch()
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, [conf.name, conf.name])
+ values = conftest._getconftestmodules(
+ conf, importmode="prepend", rootpath=pytester.path
+ )
+ assert len(values) == 1
+
+
+def test_issue151_load_all_conftests(pytester: Pytester) -> None:
+ names = "code proj src".split()
+ for name in names:
+ p = pytester.mkdir(name)
+ p.joinpath("conftest.py").touch()
+
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, names)
+ d = list(conftest._conftestpath2mod.values())
+ assert len(d) == len(names)
+
+
+def test_conftest_global_import(pytester: Pytester) -> None:
+ pytester.makeconftest("x=3")
+ p = pytester.makepyfile(
+ """
+ from pathlib import Path
+ import pytest
+ from _pytest.config import PytestPluginManager
+ conf = PytestPluginManager()
+ mod = conf._importconftest(Path("conftest.py"), importmode="prepend", rootpath=Path.cwd())
+ assert mod.x == 3
+ import conftest
+ assert conftest is mod, (conftest, mod)
+ sub = Path("sub")
+ sub.mkdir()
+ subconf = sub / "conftest.py"
+ subconf.write_text("y=4")
+ mod2 = conf._importconftest(subconf, importmode="prepend", rootpath=Path.cwd())
+ assert mod != mod2
+ assert mod2.y == 4
+ import conftest
+ assert conftest is mod2, (conftest, mod)
+ """
+ )
+ res = pytester.runpython(p)
+ assert res.ret == 0
+
+
+def test_conftestcutdir(pytester: Pytester) -> None:
+ conf = pytester.makeconftest("")
+ p = pytester.mkdir("x")
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, [pytester.path], confcutdir=p)
+ values = conftest._getconftestmodules(
+ p, importmode="prepend", rootpath=pytester.path
+ )
+ assert len(values) == 0
+ values = conftest._getconftestmodules(
+ conf.parent, importmode="prepend", rootpath=pytester.path
+ )
+ assert len(values) == 0
+ assert Path(conf) not in conftest._conftestpath2mod
+ # but we can still import a conftest directly
+ conftest._importconftest(conf, importmode="prepend", rootpath=pytester.path)
+ values = conftest._getconftestmodules(
+ conf.parent, importmode="prepend", rootpath=pytester.path
+ )
+ assert values[0].__file__.startswith(str(conf))
+ # and all sub paths get updated properly
+ values = conftest._getconftestmodules(
+ p, importmode="prepend", rootpath=pytester.path
+ )
+ assert len(values) == 1
+ assert values[0].__file__.startswith(str(conf))
+
+
+def test_conftestcutdir_inplace_considered(pytester: Pytester) -> None:
+ conf = pytester.makeconftest("")
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, [conf.parent], confcutdir=conf.parent)
+ values = conftest._getconftestmodules(
+ conf.parent, importmode="prepend", rootpath=pytester.path
+ )
+ assert len(values) == 1
+ assert values[0].__file__.startswith(str(conf))
+
+
+@pytest.mark.parametrize("name", "test tests whatever .dotdir".split())
+def test_setinitial_conftest_subdirs(pytester: Pytester, name: str) -> None:
+ sub = pytester.mkdir(name)
+ subconftest = sub.joinpath("conftest.py")
+ subconftest.touch()
+ conftest = PytestPluginManager()
+ conftest_setinitial(conftest, [sub.parent], confcutdir=pytester.path)
+ key = subconftest.resolve()
+ if name not in ("whatever", ".dotdir"):
+ assert key in conftest._conftestpath2mod
+ assert len(conftest._conftestpath2mod) == 1
+ else:
+ assert key not in conftest._conftestpath2mod
+ assert len(conftest._conftestpath2mod) == 0
+
+
+def test_conftest_confcutdir(pytester: Pytester) -> None:
+ pytester.makeconftest("assert 0")
+ x = pytester.mkdir("x")
+ x.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ def pytest_addoption(parser):
+ parser.addoption("--xyz", action="store_true")
+ """
+ )
+ )
+ result = pytester.runpytest("-h", "--confcutdir=%s" % x, x)
+ result.stdout.fnmatch_lines(["*--xyz*"])
+ result.stdout.no_fnmatch_line("*warning: could not load initial*")
+
+
+def test_conftest_symlink(pytester: Pytester) -> None:
+ """`conftest.py` discovery follows normal path resolution and does not resolve symlinks."""
+ # Structure:
+ # /real
+ # /real/conftest.py
+ # /real/app
+ # /real/app/tests
+ # /real/app/tests/test_foo.py
+
+ # Links:
+ # /symlinktests -> /real/app/tests (running at symlinktests should fail)
+ # /symlink -> /real (running at /symlink should work)
+
+ real = pytester.mkdir("real")
+ realtests = real.joinpath("app/tests")
+ realtests.mkdir(parents=True)
+ symlink_or_skip(realtests, pytester.path.joinpath("symlinktests"))
+ symlink_or_skip(real, pytester.path.joinpath("symlink"))
+ pytester.makepyfile(
+ **{
+ "real/app/tests/test_foo.py": "def test1(fixture): pass",
+ "real/conftest.py": textwrap.dedent(
+ """
+ import pytest
+
+ print("conftest_loaded")
+
+ @pytest.fixture
+ def fixture():
+ print("fixture_used")
+ """
+ ),
+ }
+ )
+
+ # Should fail because conftest cannot be found from the link structure.
+ result = pytester.runpytest("-vs", "symlinktests")
+ result.stdout.fnmatch_lines(["*fixture 'fixture' not found*"])
+ assert result.ret == ExitCode.TESTS_FAILED
+
+ # Should not cause "ValueError: Plugin already registered" (#4174).
+ result = pytester.runpytest("-vs", "symlink")
+ assert result.ret == ExitCode.OK
+
+
+def test_conftest_symlink_files(pytester: Pytester) -> None:
+ """Symlinked conftest.py are found when pytest is executed in a directory with symlinked
+ files."""
+ real = pytester.mkdir("real")
+ source = {
+ "app/test_foo.py": "def test1(fixture): pass",
+ "app/__init__.py": "",
+ "app/conftest.py": textwrap.dedent(
+ """
+ import pytest
+
+ print("conftest_loaded")
+
+ @pytest.fixture
+ def fixture():
+ print("fixture_used")
+ """
+ ),
+ }
+ pytester.makepyfile(**{"real/%s" % k: v for k, v in source.items()})
+
+ # Create a build directory that contains symlinks to actual files
+ # but doesn't symlink actual directories.
+ build = pytester.mkdir("build")
+ build.joinpath("app").mkdir()
+ for f in source:
+ symlink_or_skip(real.joinpath(f), build.joinpath(f))
+ os.chdir(build)
+ result = pytester.runpytest("-vs", "app/test_foo.py")
+ result.stdout.fnmatch_lines(["*conftest_loaded*", "PASSED"])
+ assert result.ret == ExitCode.OK
+
+
+@pytest.mark.skipif(
+ os.path.normcase("x") != os.path.normcase("X"),
+ reason="only relevant for case insensitive file systems",
+)
+def test_conftest_badcase(pytester: Pytester) -> None:
+ """Check conftest.py loading when directory casing is wrong (#5792)."""
+ pytester.path.joinpath("JenkinsRoot/test").mkdir(parents=True)
+ source = {"setup.py": "", "test/__init__.py": "", "test/conftest.py": ""}
+ pytester.makepyfile(**{"JenkinsRoot/%s" % k: v for k, v in source.items()})
+
+ os.chdir(pytester.path.joinpath("jenkinsroot/test"))
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+def test_conftest_uppercase(pytester: Pytester) -> None:
+ """Check conftest.py whose qualified name contains uppercase characters (#5819)"""
+ source = {"__init__.py": "", "Foo/conftest.py": "", "Foo/__init__.py": ""}
+ pytester.makepyfile(**source)
+
+ os.chdir(pytester.path)
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+def test_no_conftest(pytester: Pytester) -> None:
+ pytester.makeconftest("assert 0")
+ result = pytester.runpytest("--noconftest")
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.USAGE_ERROR
+
+
+def test_conftest_existing_junitxml(pytester: Pytester) -> None:
+ x = pytester.mkdir("tests")
+ x.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ def pytest_addoption(parser):
+ parser.addoption("--xyz", action="store_true")
+ """
+ )
+ )
+ pytester.makefile(ext=".xml", junit="") # Writes junit.xml
+ result = pytester.runpytest("-h", "--junitxml", "junit.xml")
+ result.stdout.fnmatch_lines(["*--xyz*"])
+
+
+def test_conftest_import_order(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
+ ct1 = pytester.makeconftest("")
+ sub = pytester.mkdir("sub")
+ ct2 = sub / "conftest.py"
+ ct2.write_text("")
+
+ def impct(p, importmode, root):
+ return p
+
+ conftest = PytestPluginManager()
+ conftest._confcutdir = pytester.path
+ monkeypatch.setattr(conftest, "_importconftest", impct)
+ mods = cast(
+ List[Path],
+ conftest._getconftestmodules(sub, importmode="prepend", rootpath=pytester.path),
+ )
+ expected = [ct1, ct2]
+ assert mods == expected
+
+
+def test_fixture_dependency(pytester: Pytester) -> None:
+ pytester.makeconftest("")
+ pytester.path.joinpath("__init__.py").touch()
+ sub = pytester.mkdir("sub")
+ sub.joinpath("__init__.py").touch()
+ sub.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ @pytest.fixture
+ def not_needed():
+ assert False, "Should not be called!"
+
+ @pytest.fixture
+ def foo():
+ assert False, "Should not be called!"
+
+ @pytest.fixture
+ def bar(foo):
+ return 'bar'
+ """
+ )
+ )
+ subsub = sub.joinpath("subsub")
+ subsub.mkdir()
+ subsub.joinpath("__init__.py").touch()
+ subsub.joinpath("test_bar.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ @pytest.fixture
+ def bar():
+ return 'sub bar'
+
+ def test_event_fixture(bar):
+ assert bar == 'sub bar'
+ """
+ )
+ )
+ result = pytester.runpytest("sub")
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_conftest_found_with_double_dash(pytester: Pytester) -> None:
+ sub = pytester.mkdir("sub")
+ sub.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ def pytest_addoption(parser):
+ parser.addoption("--hello-world", action="store_true")
+ """
+ )
+ )
+ p = sub.joinpath("test_hello.py")
+ p.write_text("def test_hello(): pass")
+ result = pytester.runpytest(str(p) + "::test_hello", "-h")
+ result.stdout.fnmatch_lines(
+ """
+ *--hello-world*
+ """
+ )
+
+
+class TestConftestVisibility:
+ def _setup_tree(self, pytester: Pytester) -> Dict[str, Path]: # for issue616
+ # example mostly taken from:
+ # https://mail.python.org/pipermail/pytest-dev/2014-September/002617.html
+ runner = pytester.mkdir("empty")
+ package = pytester.mkdir("package")
+
+ package.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def fxtr():
+ return "from-package"
+ """
+ )
+ )
+ package.joinpath("test_pkgroot.py").write_text(
+ textwrap.dedent(
+ """\
+ def test_pkgroot(fxtr):
+ assert fxtr == "from-package"
+ """
+ )
+ )
+
+ swc = package.joinpath("swc")
+ swc.mkdir()
+ swc.joinpath("__init__.py").touch()
+ swc.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def fxtr():
+ return "from-swc"
+ """
+ )
+ )
+ swc.joinpath("test_with_conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ def test_with_conftest(fxtr):
+ assert fxtr == "from-swc"
+ """
+ )
+ )
+
+ snc = package.joinpath("snc")
+ snc.mkdir()
+ snc.joinpath("__init__.py").touch()
+ snc.joinpath("test_no_conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ def test_no_conftest(fxtr):
+ assert fxtr == "from-package" # No local conftest.py, so should
+ # use value from parent dir's
+ """
+ )
+ )
+ print("created directory structure:")
+ for x in pytester.path.rglob(""):
+ print(" " + str(x.relative_to(pytester.path)))
+
+ return {"runner": runner, "package": package, "swc": swc, "snc": snc}
+
+ # N.B.: "swc" stands for "subdir with conftest.py"
+ # "snc" stands for "subdir no [i.e. without] conftest.py"
+ @pytest.mark.parametrize(
+ "chdir,testarg,expect_ntests_passed",
+ [
+ # Effective target: package/..
+ ("runner", "..", 3),
+ ("package", "..", 3),
+ ("swc", "../..", 3),
+ ("snc", "../..", 3),
+ # Effective target: package
+ ("runner", "../package", 3),
+ ("package", ".", 3),
+ ("swc", "..", 3),
+ ("snc", "..", 3),
+ # Effective target: package/swc
+ ("runner", "../package/swc", 1),
+ ("package", "./swc", 1),
+ ("swc", ".", 1),
+ ("snc", "../swc", 1),
+ # Effective target: package/snc
+ ("runner", "../package/snc", 1),
+ ("package", "./snc", 1),
+ ("swc", "../snc", 1),
+ ("snc", ".", 1),
+ ],
+ )
+ def test_parsefactories_relative_node_ids(
+ self, pytester: Pytester, chdir: str, testarg: str, expect_ntests_passed: int
+ ) -> None:
+ """#616"""
+ dirs = self._setup_tree(pytester)
+ print("pytest run in cwd: %s" % (dirs[chdir].relative_to(pytester.path)))
+ print("pytestarg : %s" % testarg)
+ print("expected pass : %s" % expect_ntests_passed)
+ os.chdir(dirs[chdir])
+ reprec = pytester.inline_run(testarg, "-q", "--traceconfig")
+ reprec.assertoutcome(passed=expect_ntests_passed)
+
+
+@pytest.mark.parametrize(
+ "confcutdir,passed,error", [(".", 2, 0), ("src", 1, 1), (None, 1, 1)]
+)
+def test_search_conftest_up_to_inifile(
+ pytester: Pytester, confcutdir: str, passed: int, error: int
+) -> None:
+ """Test that conftest files are detected only up to an ini file, unless
+ an explicit --confcutdir option is given.
+ """
+ root = pytester.path
+ src = root.joinpath("src")
+ src.mkdir()
+ src.joinpath("pytest.ini").write_text("[pytest]")
+ src.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def fix1(): pass
+ """
+ )
+ )
+ src.joinpath("test_foo.py").write_text(
+ textwrap.dedent(
+ """\
+ def test_1(fix1):
+ pass
+ def test_2(out_of_reach):
+ pass
+ """
+ )
+ )
+ root.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+ @pytest.fixture
+ def out_of_reach(): pass
+ """
+ )
+ )
+
+ args = [str(src)]
+ if confcutdir:
+ args = ["--confcutdir=%s" % root.joinpath(confcutdir)]
+ result = pytester.runpytest(*args)
+ match = ""
+ if passed:
+ match += "*%d passed*" % passed
+ if error:
+ match += "*%d error*" % error
+ result.stdout.fnmatch_lines(match)
+
+
+def test_issue1073_conftest_special_objects(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """\
+ class DontTouchMe(object):
+ def __getattr__(self, x):
+ raise Exception('cant touch me')
+
+ x = DontTouchMe()
+ """
+ )
+ pytester.makepyfile(
+ """\
+ def test_some():
+ pass
+ """
+ )
+ res = pytester.runpytest()
+ assert res.ret == 0
+
+
+def test_conftest_exception_handling(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """\
+ raise ValueError()
+ """
+ )
+ pytester.makepyfile(
+ """\
+ def test_some():
+ pass
+ """
+ )
+ res = pytester.runpytest()
+ assert res.ret == 4
+ assert "raise ValueError()" in [line.strip() for line in res.errlines]
+
+
+def test_hook_proxy(pytester: Pytester) -> None:
+ """Session's gethookproxy() would cache conftests incorrectly (#2016).
+ It was decided to remove the cache altogether.
+ """
+ pytester.makepyfile(
+ **{
+ "root/demo-0/test_foo1.py": "def test1(): pass",
+ "root/demo-a/test_foo2.py": "def test1(): pass",
+ "root/demo-a/conftest.py": """\
+ def pytest_ignore_collect(collection_path, config):
+ return True
+ """,
+ "root/demo-b/test_foo3.py": "def test1(): pass",
+ "root/demo-c/test_foo4.py": "def test1(): pass",
+ }
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*test_foo1.py*", "*test_foo3.py*", "*test_foo4.py*", "*3 passed*"]
+ )
+
+
+def test_required_option_help(pytester: Pytester) -> None:
+ pytester.makeconftest("assert 0")
+ x = pytester.mkdir("x")
+ x.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ def pytest_addoption(parser):
+ parser.addoption("--xyz", action="store_true", required=True)
+ """
+ )
+ )
+ result = pytester.runpytest("-h", x)
+ result.stdout.no_fnmatch_line("*argument --xyz is required*")
+ assert "general:" in result.stdout.str()
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_debugging.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_debugging.py
new file mode 100644
index 0000000000..a822bb57f5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_debugging.py
@@ -0,0 +1,1327 @@
+import os
+import sys
+from typing import List
+
+import _pytest._code
+import pytest
+from _pytest.debugging import _validate_usepdb_cls
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+
+try:
+ # Type ignored for Python <= 3.6.
+ breakpoint # type: ignore
+except NameError:
+ SUPPORTS_BREAKPOINT_BUILTIN = False
+else:
+ SUPPORTS_BREAKPOINT_BUILTIN = True
+
+
+_ENVIRON_PYTHONBREAKPOINT = os.environ.get("PYTHONBREAKPOINT", "")
+
+
+@pytest.fixture(autouse=True)
+def pdb_env(request):
+ if "pytester" in request.fixturenames:
+ # Disable pdb++ with inner tests.
+ pytester = request.getfixturevalue("pytester")
+ pytester._monkeypatch.setenv("PDBPP_HIJACK_PDB", "0")
+
+
+def runpdb_and_get_report(pytester: Pytester, source: str):
+ p = pytester.makepyfile(source)
+ result = pytester.runpytest_inprocess("--pdb", p)
+ reports = result.reprec.getreports("pytest_runtest_logreport") # type: ignore[attr-defined]
+ assert len(reports) == 3, reports # setup/call/teardown
+ return reports[1]
+
+
+@pytest.fixture
+def custom_pdb_calls() -> List[str]:
+ called = []
+
+ # install dummy debugger class and track which methods were called on it
+ class _CustomPdb:
+ quitting = False
+
+ def __init__(self, *args, **kwargs):
+ called.append("init")
+
+ def reset(self):
+ called.append("reset")
+
+ def interaction(self, *args):
+ called.append("interaction")
+
+ _pytest._CustomPdb = _CustomPdb # type: ignore
+ return called
+
+
+@pytest.fixture
+def custom_debugger_hook():
+ called = []
+
+ # install dummy debugger class and track which methods were called on it
+ class _CustomDebugger:
+ def __init__(self, *args, **kwargs):
+ called.append("init")
+
+ def reset(self):
+ called.append("reset")
+
+ def interaction(self, *args):
+ called.append("interaction")
+
+ def set_trace(self, frame):
+ print("**CustomDebugger**")
+ called.append("set_trace")
+
+ _pytest._CustomDebugger = _CustomDebugger # type: ignore
+ yield called
+ del _pytest._CustomDebugger # type: ignore
+
+
+class TestPDB:
+ @pytest.fixture
+ def pdblist(self, request):
+ monkeypatch = request.getfixturevalue("monkeypatch")
+ pdblist = []
+
+ def mypdb(*args):
+ pdblist.append(args)
+
+ plugin = request.config.pluginmanager.getplugin("debugging")
+ monkeypatch.setattr(plugin, "post_mortem", mypdb)
+ return pdblist
+
+ def test_pdb_on_fail(self, pytester: Pytester, pdblist) -> None:
+ rep = runpdb_and_get_report(
+ pytester,
+ """
+ def test_func():
+ assert 0
+ """,
+ )
+ assert rep.failed
+ assert len(pdblist) == 1
+ tb = _pytest._code.Traceback(pdblist[0][0])
+ assert tb[-1].name == "test_func"
+
+ def test_pdb_on_xfail(self, pytester: Pytester, pdblist) -> None:
+ rep = runpdb_and_get_report(
+ pytester,
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_func():
+ assert 0
+ """,
+ )
+ assert "xfail" in rep.keywords
+ assert not pdblist
+
+ def test_pdb_on_skip(self, pytester, pdblist) -> None:
+ rep = runpdb_and_get_report(
+ pytester,
+ """
+ import pytest
+ def test_func():
+ pytest.skip("hello")
+ """,
+ )
+ assert rep.skipped
+ assert len(pdblist) == 0
+
+ def test_pdb_on_BdbQuit(self, pytester, pdblist) -> None:
+ rep = runpdb_and_get_report(
+ pytester,
+ """
+ import bdb
+ def test_func():
+ raise bdb.BdbQuit
+ """,
+ )
+ assert rep.failed
+ assert len(pdblist) == 0
+
+ def test_pdb_on_KeyboardInterrupt(self, pytester, pdblist) -> None:
+ rep = runpdb_and_get_report(
+ pytester,
+ """
+ def test_func():
+ raise KeyboardInterrupt
+ """,
+ )
+ assert rep.failed
+ assert len(pdblist) == 1
+
+ @staticmethod
+ def flush(child):
+ if child.isalive():
+ # Read if the test has not (e.g. test_pdb_unittest_skip).
+ child.read()
+ child.wait()
+ assert not child.isalive()
+
+ def test_pdb_unittest_postmortem(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import unittest
+ class Blub(unittest.TestCase):
+ def tearDown(self):
+ self.filename = None
+ def test_false(self):
+ self.filename = 'debug' + '.me'
+ assert 0
+ """
+ )
+ child = pytester.spawn_pytest(f"--pdb {p1}")
+ child.expect("Pdb")
+ child.sendline("p self.filename")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "debug.me" in rest
+ self.flush(child)
+
+ def test_pdb_unittest_skip(self, pytester: Pytester) -> None:
+ """Test for issue #2137"""
+ p1 = pytester.makepyfile(
+ """
+ import unittest
+ @unittest.skipIf(True, 'Skipping also with pdb active')
+ class MyTestCase(unittest.TestCase):
+ def test_one(self):
+ assert 0
+ """
+ )
+ child = pytester.spawn_pytest(f"-rs --pdb {p1}")
+ child.expect("Skipping also with pdb active")
+ child.expect_exact("= 1 skipped in")
+ child.sendeof()
+ self.flush(child)
+
+ def test_pdb_print_captured_stdout_and_stderr(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_1():
+ import sys
+ sys.stderr.write("get\\x20rekt")
+ print("get\\x20rekt")
+ assert False
+
+ def test_not_called_due_to_quit():
+ pass
+ """
+ )
+ child = pytester.spawn_pytest("--pdb %s" % p1)
+ child.expect("captured stdout")
+ child.expect("get rekt")
+ child.expect("captured stderr")
+ child.expect("get rekt")
+ child.expect("traceback")
+ child.expect("def test_1")
+ child.expect("Pdb")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "Exit: Quitting debugger" in rest
+ assert "= 1 failed in" in rest
+ assert "def test_1" not in rest
+ assert "get rekt" not in rest
+ self.flush(child)
+
+ def test_pdb_dont_print_empty_captured_stdout_and_stderr(
+ self, pytester: Pytester
+ ) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_1():
+ assert False
+ """
+ )
+ child = pytester.spawn_pytest("--pdb %s" % p1)
+ child.expect("Pdb")
+ output = child.before.decode("utf8")
+ child.sendeof()
+ assert "captured stdout" not in output
+ assert "captured stderr" not in output
+ self.flush(child)
+
+ @pytest.mark.parametrize("showcapture", ["all", "no", "log"])
+ def test_pdb_print_captured_logs(self, pytester, showcapture: str) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_1():
+ import logging
+ logging.warn("get " + "rekt")
+ assert False
+ """
+ )
+ child = pytester.spawn_pytest(f"--show-capture={showcapture} --pdb {p1}")
+ if showcapture in ("all", "log"):
+ child.expect("captured log")
+ child.expect("get rekt")
+ child.expect("Pdb")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ self.flush(child)
+
+ def test_pdb_print_captured_logs_nologging(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_1():
+ import logging
+ logging.warn("get " + "rekt")
+ assert False
+ """
+ )
+ child = pytester.spawn_pytest("--show-capture=all --pdb -p no:logging %s" % p1)
+ child.expect("get rekt")
+ output = child.before.decode("utf8")
+ assert "captured log" not in output
+ child.expect("Pdb")
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ self.flush(child)
+
+ def test_pdb_interaction_exception(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+ def globalfunc():
+ pass
+ def test_1():
+ pytest.raises(ValueError, globalfunc)
+ """
+ )
+ child = pytester.spawn_pytest("--pdb %s" % p1)
+ child.expect(".*def test_1")
+ child.expect(".*pytest.raises.*globalfunc")
+ child.expect("Pdb")
+ child.sendline("globalfunc")
+ child.expect(".*function")
+ child.sendeof()
+ child.expect("1 failed")
+ self.flush(child)
+
+ def test_pdb_interaction_on_collection_issue181(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+ xxx
+ """
+ )
+ child = pytester.spawn_pytest("--pdb %s" % p1)
+ # child.expect(".*import pytest.*")
+ child.expect("Pdb")
+ child.sendline("c")
+ child.expect("1 error")
+ self.flush(child)
+
+ def test_pdb_interaction_on_internal_error(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_runtest_protocol():
+ 0/0
+ """
+ )
+ p1 = pytester.makepyfile("def test_func(): pass")
+ child = pytester.spawn_pytest("--pdb %s" % p1)
+ child.expect("Pdb")
+
+ # INTERNALERROR is only displayed once via terminal reporter.
+ assert (
+ len(
+ [
+ x
+ for x in child.before.decode().splitlines()
+ if x.startswith("INTERNALERROR> Traceback")
+ ]
+ )
+ == 1
+ )
+
+ child.sendeof()
+ self.flush(child)
+
+ def test_pdb_prevent_ConftestImportFailure_hiding_exception(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile("def test_func(): pass")
+ sub_dir = pytester.path.joinpath("ns")
+ sub_dir.mkdir()
+ sub_dir.joinpath("conftest").with_suffix(".py").write_text(
+ "import unknown", "utf-8"
+ )
+ sub_dir.joinpath("test_file").with_suffix(".py").write_text(
+ "def test_func(): pass", "utf-8"
+ )
+
+ result = pytester.runpytest_subprocess("--pdb", ".")
+ result.stdout.fnmatch_lines(["-> import unknown"])
+
+ def test_pdb_interaction_capturing_simple(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+ def test_1():
+ i = 0
+ print("hello17")
+ pytest.set_trace()
+ i == 1
+ assert 0
+ """
+ )
+ child = pytester.spawn_pytest(str(p1))
+ child.expect(r"test_1\(\)")
+ child.expect("i == 1")
+ child.expect("Pdb")
+ child.sendline("c")
+ rest = child.read().decode("utf-8")
+ assert "AssertionError" in rest
+ assert "1 failed" in rest
+ assert "def test_1" in rest
+ assert "hello17" in rest # out is captured
+ self.flush(child)
+
+ def test_pdb_set_trace_kwargs(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+ def test_1():
+ i = 0
+ print("hello17")
+ pytest.set_trace(header="== my_header ==")
+ x = 3
+ assert 0
+ """
+ )
+ child = pytester.spawn_pytest(str(p1))
+ child.expect("== my_header ==")
+ assert "PDB set_trace" not in child.before.decode()
+ child.expect("Pdb")
+ child.sendline("c")
+ rest = child.read().decode("utf-8")
+ assert "1 failed" in rest
+ assert "def test_1" in rest
+ assert "hello17" in rest # out is captured
+ self.flush(child)
+
+ def test_pdb_set_trace_interception(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pdb
+ def test_1():
+ pdb.set_trace()
+ """
+ )
+ child = pytester.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.expect("Pdb")
+ child.sendline("q")
+ rest = child.read().decode("utf8")
+ assert "no tests ran" in rest
+ assert "reading from stdin while output" not in rest
+ assert "BdbQuit" not in rest
+ self.flush(child)
+
+ def test_pdb_and_capsys(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+ def test_1(capsys):
+ print("hello1")
+ pytest.set_trace()
+ """
+ )
+ child = pytester.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.send("capsys.readouterr()\n")
+ child.expect("hello1")
+ child.sendeof()
+ child.read()
+ self.flush(child)
+
+ def test_pdb_with_caplog_on_pdb_invocation(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_1(capsys, caplog):
+ import logging
+ logging.getLogger(__name__).warning("some_warning")
+ assert 0
+ """
+ )
+ child = pytester.spawn_pytest("--pdb %s" % str(p1))
+ child.send("caplog.record_tuples\n")
+ child.expect_exact(
+ "[('test_pdb_with_caplog_on_pdb_invocation', 30, 'some_warning')]"
+ )
+ child.sendeof()
+ child.read()
+ self.flush(child)
+
+ def test_set_trace_capturing_afterwards(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pdb
+ def test_1():
+ pdb.set_trace()
+ def test_2():
+ print("hello")
+ assert 0
+ """
+ )
+ child = pytester.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.send("c\n")
+ child.expect("test_2")
+ child.expect("Captured")
+ child.expect("hello")
+ child.sendeof()
+ child.read()
+ self.flush(child)
+
+ def test_pdb_interaction_doctest(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def function_1():
+ '''
+ >>> i = 0
+ >>> assert i == 1
+ '''
+ """
+ )
+ child = pytester.spawn_pytest("--doctest-modules --pdb %s" % p1)
+ child.expect("Pdb")
+
+ assert "UNEXPECTED EXCEPTION: AssertionError()" in child.before.decode("utf8")
+
+ child.sendline("'i=%i.' % i")
+ child.expect("Pdb")
+ assert "\r\n'i=0.'\r\n" in child.before.decode("utf8")
+
+ child.sendeof()
+ rest = child.read().decode("utf8")
+ assert "! _pytest.outcomes.Exit: Quitting debugger !" in rest
+ assert "BdbQuit" not in rest
+ assert "1 failed" in rest
+ self.flush(child)
+
+ def test_doctest_set_trace_quit(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def function_1():
+ '''
+ >>> __import__('pdb').set_trace()
+ '''
+ """
+ )
+ # NOTE: does not use pytest.set_trace, but Python's patched pdb,
+ # therefore "-s" is required.
+ child = pytester.spawn_pytest("--doctest-modules --pdb -s %s" % p1)
+ child.expect("Pdb")
+ child.sendline("q")
+ rest = child.read().decode("utf8")
+
+ assert "! _pytest.outcomes.Exit: Quitting debugger !" in rest
+ assert "= no tests ran in" in rest
+ assert "BdbQuit" not in rest
+ assert "UNEXPECTED EXCEPTION" not in rest
+
+ def test_pdb_interaction_capturing_twice(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+ def test_1():
+ i = 0
+ print("hello17")
+ pytest.set_trace()
+ x = 3
+ print("hello18")
+ pytest.set_trace()
+ x = 4
+ assert 0
+ """
+ )
+ child = pytester.spawn_pytest(str(p1))
+ child.expect(r"PDB set_trace \(IO-capturing turned off\)")
+ child.expect("test_1")
+ child.expect("x = 3")
+ child.expect("Pdb")
+ child.sendline("c")
+ child.expect(r"PDB continue \(IO-capturing resumed\)")
+ child.expect(r"PDB set_trace \(IO-capturing turned off\)")
+ child.expect("x = 4")
+ child.expect("Pdb")
+ child.sendline("c")
+ child.expect("_ test_1 _")
+ child.expect("def test_1")
+ rest = child.read().decode("utf8")
+ assert "Captured stdout call" in rest
+ assert "hello17" in rest # out is captured
+ assert "hello18" in rest # out is captured
+ assert "1 failed" in rest
+ self.flush(child)
+
+ def test_pdb_with_injected_do_debug(self, pytester: Pytester) -> None:
+ """Simulates pdbpp, which injects Pdb into do_debug, and uses
+ self.__class__ in do_continue.
+ """
+ p1 = pytester.makepyfile(
+ mytest="""
+ import pdb
+ import pytest
+
+ count_continue = 0
+
+ class CustomPdb(pdb.Pdb, object):
+ def do_debug(self, arg):
+ import sys
+ import types
+
+ do_debug_func = pdb.Pdb.do_debug
+
+ newglobals = do_debug_func.__globals__.copy()
+ newglobals['Pdb'] = self.__class__
+ orig_do_debug = types.FunctionType(
+ do_debug_func.__code__, newglobals,
+ do_debug_func.__name__, do_debug_func.__defaults__,
+ )
+ return orig_do_debug(self, arg)
+ do_debug.__doc__ = pdb.Pdb.do_debug.__doc__
+
+ def do_continue(self, *args, **kwargs):
+ global count_continue
+ count_continue += 1
+ return super(CustomPdb, self).do_continue(*args, **kwargs)
+
+ def foo():
+ print("print_from_foo")
+
+ def test_1():
+ i = 0
+ print("hello17")
+ pytest.set_trace()
+ x = 3
+ print("hello18")
+
+ assert count_continue == 2, "unexpected_failure: %d != 2" % count_continue
+ pytest.fail("expected_failure")
+ """
+ )
+ child = pytester.spawn_pytest("--pdbcls=mytest:CustomPdb %s" % str(p1))
+ child.expect(r"PDB set_trace \(IO-capturing turned off\)")
+ child.expect(r"\n\(Pdb")
+ child.sendline("debug foo()")
+ child.expect("ENTERING RECURSIVE DEBUGGER")
+ child.expect(r"\n\(\(Pdb")
+ child.sendline("c")
+ child.expect("LEAVING RECURSIVE DEBUGGER")
+ assert b"PDB continue" not in child.before
+ # No extra newline.
+ assert child.before.endswith(b"c\r\nprint_from_foo\r\n")
+
+ # set_debug should not raise outcomes. Exit, if used recursively.
+ child.sendline("debug 42")
+ child.sendline("q")
+ child.expect("LEAVING RECURSIVE DEBUGGER")
+ assert b"ENTERING RECURSIVE DEBUGGER" in child.before
+ assert b"Quitting debugger" not in child.before
+
+ child.sendline("c")
+ child.expect(r"PDB continue \(IO-capturing resumed\)")
+ rest = child.read().decode("utf8")
+ assert "hello17" in rest # out is captured
+ assert "hello18" in rest # out is captured
+ assert "1 failed" in rest
+ assert "Failed: expected_failure" in rest
+ assert "AssertionError: unexpected_failure" not in rest
+ self.flush(child)
+
+ def test_pdb_without_capture(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+ def test_1():
+ pytest.set_trace()
+ """
+ )
+ child = pytester.spawn_pytest("-s %s" % p1)
+ child.expect(r">>> PDB set_trace >>>")
+ child.expect("Pdb")
+ child.sendline("c")
+ child.expect(r">>> PDB continue >>>")
+ child.expect("1 passed")
+ self.flush(child)
+
+ @pytest.mark.parametrize("capture_arg", ("", "-s", "-p no:capture"))
+ def test_pdb_continue_with_recursive_debug(
+ self, capture_arg, pytester: Pytester
+ ) -> None:
+ """Full coverage for do_debug without capturing.
+
+ This is very similar to test_pdb_interaction_continue_recursive in general,
+ but mocks out ``pdb.set_trace`` for providing more coverage.
+ """
+ p1 = pytester.makepyfile(
+ """
+ try:
+ input = raw_input
+ except NameError:
+ pass
+
+ def set_trace():
+ __import__('pdb').set_trace()
+
+ def test_1(monkeypatch):
+ import _pytest.debugging
+
+ class pytestPDBTest(_pytest.debugging.pytestPDB):
+ @classmethod
+ def set_trace(cls, *args, **kwargs):
+ # Init PytestPdbWrapper to handle capturing.
+ _pdb = cls._init_pdb("set_trace", *args, **kwargs)
+
+ # Mock out pdb.Pdb.do_continue.
+ import pdb
+ pdb.Pdb.do_continue = lambda self, arg: None
+
+ print("===" + " SET_TRACE ===")
+ assert input() == "debug set_trace()"
+
+ # Simulate PytestPdbWrapper.do_debug
+ cls._recursive_debug += 1
+ print("ENTERING RECURSIVE DEBUGGER")
+ print("===" + " SET_TRACE_2 ===")
+
+ assert input() == "c"
+ _pdb.do_continue("")
+ print("===" + " SET_TRACE_3 ===")
+
+ # Simulate PytestPdbWrapper.do_debug
+ print("LEAVING RECURSIVE DEBUGGER")
+ cls._recursive_debug -= 1
+
+ print("===" + " SET_TRACE_4 ===")
+ assert input() == "c"
+ _pdb.do_continue("")
+
+ def do_continue(self, arg):
+ print("=== do_continue")
+
+ monkeypatch.setattr(_pytest.debugging, "pytestPDB", pytestPDBTest)
+
+ import pdb
+ monkeypatch.setattr(pdb, "set_trace", pytestPDBTest.set_trace)
+
+ set_trace()
+ """
+ )
+ child = pytester.spawn_pytest(f"--tb=short {p1} {capture_arg}")
+ child.expect("=== SET_TRACE ===")
+ before = child.before.decode("utf8")
+ if not capture_arg:
+ assert ">>> PDB set_trace (IO-capturing turned off) >>>" in before
+ else:
+ assert ">>> PDB set_trace >>>" in before
+ child.sendline("debug set_trace()")
+ child.expect("=== SET_TRACE_2 ===")
+ before = child.before.decode("utf8")
+ assert "\r\nENTERING RECURSIVE DEBUGGER\r\n" in before
+ child.sendline("c")
+ child.expect("=== SET_TRACE_3 ===")
+
+ # No continue message with recursive debugging.
+ before = child.before.decode("utf8")
+ assert ">>> PDB continue " not in before
+
+ child.sendline("c")
+ child.expect("=== SET_TRACE_4 ===")
+ before = child.before.decode("utf8")
+ assert "\r\nLEAVING RECURSIVE DEBUGGER\r\n" in before
+ child.sendline("c")
+ rest = child.read().decode("utf8")
+ if not capture_arg:
+ assert "> PDB continue (IO-capturing resumed) >" in rest
+ else:
+ assert "> PDB continue >" in rest
+ assert "= 1 passed in" in rest
+
+ def test_pdb_used_outside_test(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+ pytest.set_trace()
+ x = 5
+ """
+ )
+ child = pytester.spawn(f"{sys.executable} {p1}")
+ child.expect("x = 5")
+ child.expect("Pdb")
+ child.sendeof()
+ self.flush(child)
+
+ def test_pdb_used_in_generate_tests(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+ def pytest_generate_tests(metafunc):
+ pytest.set_trace()
+ x = 5
+ def test_foo(a):
+ pass
+ """
+ )
+ child = pytester.spawn_pytest(str(p1))
+ child.expect("x = 5")
+ child.expect("Pdb")
+ child.sendeof()
+ self.flush(child)
+
+ def test_pdb_collection_failure_is_shown(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("xxx")
+ result = pytester.runpytest_subprocess("--pdb", p1)
+ result.stdout.fnmatch_lines(
+ ["E NameError: *xxx*", "*! *Exit: Quitting debugger !*"] # due to EOF
+ )
+
+ @pytest.mark.parametrize("post_mortem", (False, True))
+ def test_enter_leave_pdb_hooks_are_called(
+ self, post_mortem, pytester: Pytester
+ ) -> None:
+ pytester.makeconftest(
+ """
+ mypdb = None
+
+ def pytest_configure(config):
+ config.testing_verification = 'configured'
+
+ def pytest_enter_pdb(config, pdb):
+ assert config.testing_verification == 'configured'
+ print('enter_pdb_hook')
+
+ global mypdb
+ mypdb = pdb
+ mypdb.set_attribute = "bar"
+
+ def pytest_leave_pdb(config, pdb):
+ assert config.testing_verification == 'configured'
+ print('leave_pdb_hook')
+
+ global mypdb
+ assert mypdb is pdb
+ assert mypdb.set_attribute == "bar"
+ """
+ )
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+
+ def test_set_trace():
+ pytest.set_trace()
+ assert 0
+
+ def test_post_mortem():
+ assert 0
+ """
+ )
+ if post_mortem:
+ child = pytester.spawn_pytest(str(p1) + " --pdb -s -k test_post_mortem")
+ else:
+ child = pytester.spawn_pytest(str(p1) + " -k test_set_trace")
+ child.expect("enter_pdb_hook")
+ child.sendline("c")
+ if post_mortem:
+ child.expect(r"PDB continue")
+ else:
+ child.expect(r"PDB continue \(IO-capturing resumed\)")
+ child.expect("Captured stdout call")
+ rest = child.read().decode("utf8")
+ assert "leave_pdb_hook" in rest
+ assert "1 failed" in rest
+ self.flush(child)
+
+ def test_pdb_custom_cls(
+ self, pytester: Pytester, custom_pdb_calls: List[str]
+ ) -> None:
+ p1 = pytester.makepyfile("""xxx """)
+ result = pytester.runpytest_inprocess(
+ "--pdb", "--pdbcls=_pytest:_CustomPdb", p1
+ )
+ result.stdout.fnmatch_lines(["*NameError*xxx*", "*1 error*"])
+ assert custom_pdb_calls == ["init", "reset", "interaction"]
+
+ def test_pdb_custom_cls_invalid(self, pytester: Pytester) -> None:
+ result = pytester.runpytest_inprocess("--pdbcls=invalid")
+ result.stderr.fnmatch_lines(
+ [
+ "*: error: argument --pdbcls: 'invalid' is not in the format 'modname:classname'"
+ ]
+ )
+
+ def test_pdb_validate_usepdb_cls(self):
+ assert _validate_usepdb_cls("os.path:dirname.__name__") == (
+ "os.path",
+ "dirname.__name__",
+ )
+
+ assert _validate_usepdb_cls("pdb:DoesNotExist") == ("pdb", "DoesNotExist")
+
+ def test_pdb_custom_cls_without_pdb(
+ self, pytester: Pytester, custom_pdb_calls: List[str]
+ ) -> None:
+ p1 = pytester.makepyfile("""xxx """)
+ result = pytester.runpytest_inprocess("--pdbcls=_pytest:_CustomPdb", p1)
+ result.stdout.fnmatch_lines(["*NameError*xxx*", "*1 error*"])
+ assert custom_pdb_calls == []
+
+ def test_pdb_custom_cls_with_set_trace(
+ self,
+ pytester: Pytester,
+ monkeypatch: MonkeyPatch,
+ ) -> None:
+ pytester.makepyfile(
+ custom_pdb="""
+ class CustomPdb(object):
+ def __init__(self, *args, **kwargs):
+ skip = kwargs.pop("skip")
+ assert skip == ["foo.*"]
+ print("__init__")
+ super(CustomPdb, self).__init__(*args, **kwargs)
+
+ def set_trace(*args, **kwargs):
+ print('custom set_trace>')
+ """
+ )
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+
+ def test_foo():
+ pytest.set_trace(skip=['foo.*'])
+ """
+ )
+ monkeypatch.setenv("PYTHONPATH", str(pytester.path))
+ child = pytester.spawn_pytest("--pdbcls=custom_pdb:CustomPdb %s" % str(p1))
+
+ child.expect("__init__")
+ child.expect("custom set_trace>")
+ self.flush(child)
+
+
+class TestDebuggingBreakpoints:
+ def test_supports_breakpoint_module_global(self) -> None:
+ """Test that supports breakpoint global marks on Python 3.7+."""
+ if sys.version_info >= (3, 7):
+ assert SUPPORTS_BREAKPOINT_BUILTIN is True
+
+ @pytest.mark.skipif(
+ not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
+ )
+ @pytest.mark.parametrize("arg", ["--pdb", ""])
+ def test_sys_breakpointhook_configure_and_unconfigure(
+ self, pytester: Pytester, arg: str
+ ) -> None:
+ """
+ Test that sys.breakpointhook is set to the custom Pdb class once configured, test that
+ hook is reset to system value once pytest has been unconfigured
+ """
+ pytester.makeconftest(
+ """
+ import sys
+ from pytest import hookimpl
+ from _pytest.debugging import pytestPDB
+
+ def pytest_configure(config):
+ config.add_cleanup(check_restored)
+
+ def check_restored():
+ assert sys.breakpointhook == sys.__breakpointhook__
+
+ def test_check():
+ assert sys.breakpointhook == pytestPDB.set_trace
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_nothing(): pass
+ """
+ )
+ args = (arg,) if arg else ()
+ result = pytester.runpytest_subprocess(*args)
+ result.stdout.fnmatch_lines(["*1 passed in *"])
+
+ @pytest.mark.skipif(
+ not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
+ )
+ def test_pdb_custom_cls(self, pytester: Pytester, custom_debugger_hook) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_nothing():
+ breakpoint()
+ """
+ )
+ result = pytester.runpytest_inprocess(
+ "--pdb", "--pdbcls=_pytest:_CustomDebugger", p1
+ )
+ result.stdout.fnmatch_lines(["*CustomDebugger*", "*1 passed*"])
+ assert custom_debugger_hook == ["init", "set_trace"]
+
+ @pytest.mark.parametrize("arg", ["--pdb", ""])
+ @pytest.mark.skipif(
+ not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
+ )
+ def test_environ_custom_class(
+ self, pytester: Pytester, custom_debugger_hook, arg: str
+ ) -> None:
+ pytester.makeconftest(
+ """
+ import os
+ import sys
+
+ os.environ['PYTHONBREAKPOINT'] = '_pytest._CustomDebugger.set_trace'
+
+ def pytest_configure(config):
+ config.add_cleanup(check_restored)
+
+ def check_restored():
+ assert sys.breakpointhook == sys.__breakpointhook__
+
+ def test_check():
+ import _pytest
+ assert sys.breakpointhook is _pytest._CustomDebugger.set_trace
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_nothing(): pass
+ """
+ )
+ args = (arg,) if arg else ()
+ result = pytester.runpytest_subprocess(*args)
+ result.stdout.fnmatch_lines(["*1 passed in *"])
+
+ @pytest.mark.skipif(
+ not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
+ )
+ @pytest.mark.skipif(
+ not _ENVIRON_PYTHONBREAKPOINT == "",
+ reason="Requires breakpoint() default value",
+ )
+ def test_sys_breakpoint_interception(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_1():
+ breakpoint()
+ """
+ )
+ child = pytester.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.expect("Pdb")
+ child.sendline("quit")
+ rest = child.read().decode("utf8")
+ assert "Quitting debugger" in rest
+ assert "reading from stdin while output" not in rest
+ TestPDB.flush(child)
+
+ @pytest.mark.skipif(
+ not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
+ )
+ def test_pdb_not_altered(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pdb
+ def test_1():
+ pdb.set_trace()
+ assert 0
+ """
+ )
+ child = pytester.spawn_pytest(str(p1))
+ child.expect("test_1")
+ child.expect("Pdb")
+ child.sendline("c")
+ rest = child.read().decode("utf8")
+ assert "1 failed" in rest
+ assert "reading from stdin while output" not in rest
+ TestPDB.flush(child)
+
+
+class TestTraceOption:
+ def test_trace_sets_breakpoint(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_1():
+ assert True
+
+ def test_2():
+ pass
+
+ def test_3():
+ pass
+ """
+ )
+ child = pytester.spawn_pytest("--trace " + str(p1))
+ child.expect("test_1")
+ child.expect("Pdb")
+ child.sendline("c")
+ child.expect("test_2")
+ child.expect("Pdb")
+ child.sendline("c")
+ child.expect("test_3")
+ child.expect("Pdb")
+ child.sendline("q")
+ child.expect_exact("Exit: Quitting debugger")
+ rest = child.read().decode("utf8")
+ assert "= 2 passed in" in rest
+ assert "reading from stdin while output" not in rest
+ # Only printed once - not on stderr.
+ assert "Exit: Quitting debugger" not in child.before.decode("utf8")
+ TestPDB.flush(child)
+
+ def test_trace_with_parametrize_handles_shared_fixtureinfo(
+ self, pytester: Pytester
+ ) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('myparam', [1,2])
+ def test_1(myparam, request):
+ assert myparam in (1, 2)
+ assert request.function.__name__ == "test_1"
+ @pytest.mark.parametrize('func', [1,2])
+ def test_func(func, request):
+ assert func in (1, 2)
+ assert request.function.__name__ == "test_func"
+ @pytest.mark.parametrize('myparam', [1,2])
+ def test_func_kw(myparam, request, func="func_kw"):
+ assert myparam in (1, 2)
+ assert func == "func_kw"
+ assert request.function.__name__ == "test_func_kw"
+ """
+ )
+ child = pytester.spawn_pytest("--trace " + str(p1))
+ for func, argname in [
+ ("test_1", "myparam"),
+ ("test_func", "func"),
+ ("test_func_kw", "myparam"),
+ ]:
+ child.expect_exact("> PDB runcall (IO-capturing turned off) >")
+ child.expect_exact(func)
+ child.expect_exact("Pdb")
+ child.sendline("args")
+ child.expect_exact(f"{argname} = 1\r\n")
+ child.expect_exact("Pdb")
+ child.sendline("c")
+ child.expect_exact("Pdb")
+ child.sendline("args")
+ child.expect_exact(f"{argname} = 2\r\n")
+ child.expect_exact("Pdb")
+ child.sendline("c")
+ child.expect_exact("> PDB continue (IO-capturing resumed) >")
+ rest = child.read().decode("utf8")
+ assert "= 6 passed in" in rest
+ assert "reading from stdin while output" not in rest
+ # Only printed once - not on stderr.
+ assert "Exit: Quitting debugger" not in child.before.decode("utf8")
+ TestPDB.flush(child)
+
+
+def test_trace_after_runpytest(pytester: Pytester) -> None:
+ """Test that debugging's pytest_configure is re-entrant."""
+ p1 = pytester.makepyfile(
+ """
+ from _pytest.debugging import pytestPDB
+
+ def test_outer(pytester) -> None:
+ assert len(pytestPDB._saved) == 1
+
+ pytester.makepyfile(
+ \"""
+ from _pytest.debugging import pytestPDB
+
+ def test_inner():
+ assert len(pytestPDB._saved) == 2
+ print()
+ print("test_inner_" + "end")
+ \"""
+ )
+
+ result = pytester.runpytest("-s", "-k", "test_inner")
+ assert result.ret == 0
+
+ assert len(pytestPDB._saved) == 1
+ """
+ )
+ result = pytester.runpytest_subprocess("-s", "-p", "pytester", str(p1))
+ result.stdout.fnmatch_lines(["test_inner_end"])
+ assert result.ret == 0
+
+
+def test_quit_with_swallowed_SystemExit(pytester: Pytester) -> None:
+ """Test that debugging's pytest_configure is re-entrant."""
+ p1 = pytester.makepyfile(
+ """
+ def call_pdb_set_trace():
+ __import__('pdb').set_trace()
+
+
+ def test_1():
+ try:
+ call_pdb_set_trace()
+ except SystemExit:
+ pass
+
+
+ def test_2():
+ pass
+ """
+ )
+ child = pytester.spawn_pytest(str(p1))
+ child.expect("Pdb")
+ child.sendline("q")
+ child.expect_exact("Exit: Quitting debugger")
+ rest = child.read().decode("utf8")
+ assert "no tests ran" in rest
+ TestPDB.flush(child)
+
+
+@pytest.mark.parametrize("fixture", ("capfd", "capsys"))
+def test_pdb_suspends_fixture_capturing(pytester: Pytester, fixture: str) -> None:
+ """Using "-s" with pytest should suspend/resume fixture capturing."""
+ p1 = pytester.makepyfile(
+ """
+ def test_inner({fixture}):
+ import sys
+
+ print("out_inner_before")
+ sys.stderr.write("err_inner_before\\n")
+
+ __import__("pdb").set_trace()
+
+ print("out_inner_after")
+ sys.stderr.write("err_inner_after\\n")
+
+ out, err = {fixture}.readouterr()
+ assert out =="out_inner_before\\nout_inner_after\\n"
+ assert err =="err_inner_before\\nerr_inner_after\\n"
+ """.format(
+ fixture=fixture
+ )
+ )
+
+ child = pytester.spawn_pytest(str(p1) + " -s")
+
+ child.expect("Pdb")
+ before = child.before.decode("utf8")
+ assert (
+ "> PDB set_trace (IO-capturing turned off for fixture %s) >" % (fixture)
+ in before
+ )
+
+ # Test that capturing is really suspended.
+ child.sendline("p 40 + 2")
+ child.expect("Pdb")
+ assert "\r\n42\r\n" in child.before.decode("utf8")
+
+ child.sendline("c")
+ rest = child.read().decode("utf8")
+ assert "out_inner" not in rest
+ assert "err_inner" not in rest
+
+ TestPDB.flush(child)
+ assert child.exitstatus == 0
+ assert "= 1 passed in" in rest
+ assert "> PDB continue (IO-capturing resumed for fixture %s) >" % (fixture) in rest
+
+
+def test_pdbcls_via_local_module(pytester: Pytester) -> None:
+ """It should be imported in pytest_configure or later only."""
+ p1 = pytester.makepyfile(
+ """
+ def test():
+ print("before_set_trace")
+ __import__("pdb").set_trace()
+ """,
+ mypdb="""
+ class Wrapped:
+ class MyPdb:
+ def set_trace(self, *args):
+ print("set_trace_called", args)
+
+ def runcall(self, *args, **kwds):
+ print("runcall_called", args, kwds)
+ """,
+ )
+ result = pytester.runpytest(
+ str(p1), "--pdbcls=really.invalid:Value", syspathinsert=True
+ )
+ result.stdout.fnmatch_lines(
+ [
+ "*= FAILURES =*",
+ "E * --pdbcls: could not import 'really.invalid:Value': No module named *really*",
+ ]
+ )
+ assert result.ret == 1
+
+ result = pytester.runpytest(
+ str(p1), "--pdbcls=mypdb:Wrapped.MyPdb", syspathinsert=True
+ )
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*set_trace_called*", "* 1 passed in *"])
+
+ # Ensure that it also works with --trace.
+ result = pytester.runpytest(
+ str(p1), "--pdbcls=mypdb:Wrapped.MyPdb", "--trace", syspathinsert=True
+ )
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*runcall_called*", "* 1 passed in *"])
+
+
+def test_raises_bdbquit_with_eoferror(pytester: Pytester) -> None:
+ """It is not guaranteed that DontReadFromInput's read is called."""
+
+ p1 = pytester.makepyfile(
+ """
+ def input_without_read(*args, **kwargs):
+ raise EOFError()
+
+ def test(monkeypatch):
+ import builtins
+ monkeypatch.setattr(builtins, "input", input_without_read)
+ __import__('pdb').set_trace()
+ """
+ )
+ result = pytester.runpytest(str(p1))
+ result.stdout.fnmatch_lines(["E *BdbQuit", "*= 1 failed in*"])
+ assert result.ret == 1
+
+
+def test_pdb_wrapper_class_is_reused(pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test():
+ __import__("pdb").set_trace()
+ __import__("pdb").set_trace()
+
+ import mypdb
+ instances = mypdb.instances
+ assert len(instances) == 2
+ assert instances[0].__class__ is instances[1].__class__
+ """,
+ mypdb="""
+ instances = []
+
+ class MyPdb:
+ def __init__(self, *args, **kwargs):
+ instances.append(self)
+
+ def set_trace(self, *args):
+ print("set_trace_called", args)
+ """,
+ )
+ result = pytester.runpytest(str(p1), "--pdbcls=mypdb:MyPdb", syspathinsert=True)
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ ["*set_trace_called*", "*set_trace_called*", "* 1 passed in *"]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_doctest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_doctest.py
new file mode 100644
index 0000000000..67b8ccdb7e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_doctest.py
@@ -0,0 +1,1572 @@
+import inspect
+import sys
+import textwrap
+from pathlib import Path
+from typing import Callable
+from typing import Optional
+
+import pytest
+from _pytest.doctest import _get_checker
+from _pytest.doctest import _is_main_py
+from _pytest.doctest import _is_mocked
+from _pytest.doctest import _is_setup_py
+from _pytest.doctest import _patch_unwrap_mock_aware
+from _pytest.doctest import DoctestItem
+from _pytest.doctest import DoctestModule
+from _pytest.doctest import DoctestTextfile
+from _pytest.pytester import Pytester
+
+
+class TestDoctests:
+ def test_collect_testtextfile(self, pytester: Pytester):
+ w = pytester.maketxtfile(whatever="")
+ checkfile = pytester.maketxtfile(
+ test_something="""
+ alskdjalsdk
+ >>> i = 5
+ >>> i-1
+ 4
+ """
+ )
+
+ for x in (pytester.path, checkfile):
+ # print "checking that %s returns custom items" % (x,)
+ items, reprec = pytester.inline_genitems(x)
+ assert len(items) == 1
+ assert isinstance(items[0], DoctestItem)
+ assert isinstance(items[0].parent, DoctestTextfile)
+ # Empty file has no items.
+ items, reprec = pytester.inline_genitems(w)
+ assert len(items) == 0
+
+ def test_collect_module_empty(self, pytester: Pytester):
+ path = pytester.makepyfile(whatever="#")
+ for p in (path, pytester.path):
+ items, reprec = pytester.inline_genitems(p, "--doctest-modules")
+ assert len(items) == 0
+
+ def test_collect_module_single_modulelevel_doctest(self, pytester: Pytester):
+ path = pytester.makepyfile(whatever='""">>> pass"""')
+ for p in (path, pytester.path):
+ items, reprec = pytester.inline_genitems(p, "--doctest-modules")
+ assert len(items) == 1
+ assert isinstance(items[0], DoctestItem)
+ assert isinstance(items[0].parent, DoctestModule)
+
+ def test_collect_module_two_doctest_one_modulelevel(self, pytester: Pytester):
+ path = pytester.makepyfile(
+ whatever="""
+ '>>> x = None'
+ def my_func():
+ ">>> magic = 42 "
+ """
+ )
+ for p in (path, pytester.path):
+ items, reprec = pytester.inline_genitems(p, "--doctest-modules")
+ assert len(items) == 2
+ assert isinstance(items[0], DoctestItem)
+ assert isinstance(items[1], DoctestItem)
+ assert isinstance(items[0].parent, DoctestModule)
+ assert items[0].parent is items[1].parent
+
+ @pytest.mark.parametrize("filename", ["__init__", "whatever"])
+ def test_collect_module_two_doctest_no_modulelevel(
+ self,
+ pytester: Pytester,
+ filename: str,
+ ) -> None:
+ path = pytester.makepyfile(
+ **{
+ filename: """
+ '# Empty'
+ def my_func():
+ ">>> magic = 42 "
+ def useless():
+ '''
+ # This is a function
+ # >>> # it doesn't have any doctest
+ '''
+ def another():
+ '''
+ # This is another function
+ >>> import os # this one does have a doctest
+ '''
+ """,
+ },
+ )
+ for p in (path, pytester.path):
+ items, reprec = pytester.inline_genitems(p, "--doctest-modules")
+ assert len(items) == 2
+ assert isinstance(items[0], DoctestItem)
+ assert isinstance(items[1], DoctestItem)
+ assert isinstance(items[0].parent, DoctestModule)
+ assert items[0].parent is items[1].parent
+
+ def test_simple_doctestfile(self, pytester: Pytester):
+ p = pytester.maketxtfile(
+ test_doc="""
+ >>> x = 1
+ >>> x == 1
+ False
+ """
+ )
+ reprec = pytester.inline_run(p)
+ reprec.assertoutcome(failed=1)
+
+ def test_new_pattern(self, pytester: Pytester):
+ p = pytester.maketxtfile(
+ xdoc="""
+ >>> x = 1
+ >>> x == 1
+ False
+ """
+ )
+ reprec = pytester.inline_run(p, "--doctest-glob=x*.txt")
+ reprec.assertoutcome(failed=1)
+
+ def test_multiple_patterns(self, pytester: Pytester):
+ """Test support for multiple --doctest-glob arguments (#1255)."""
+ pytester.maketxtfile(
+ xdoc="""
+ >>> 1
+ 1
+ """
+ )
+ pytester.makefile(
+ ".foo",
+ test="""
+ >>> 1
+ 1
+ """,
+ )
+ pytester.maketxtfile(
+ test_normal="""
+ >>> 1
+ 1
+ """
+ )
+ expected = {"xdoc.txt", "test.foo", "test_normal.txt"}
+ assert {x.name for x in pytester.path.iterdir()} == expected
+ args = ["--doctest-glob=xdoc*.txt", "--doctest-glob=*.foo"]
+ result = pytester.runpytest(*args)
+ result.stdout.fnmatch_lines(["*test.foo *", "*xdoc.txt *", "*2 passed*"])
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*test_normal.txt *", "*1 passed*"])
+
+ @pytest.mark.parametrize(
+ " test_string, encoding",
+ [("foo", "ascii"), ("öäü", "latin1"), ("öäü", "utf-8")],
+ )
+ def test_encoding(self, pytester, test_string, encoding):
+ """Test support for doctest_encoding ini option."""
+ pytester.makeini(
+ """
+ [pytest]
+ doctest_encoding={}
+ """.format(
+ encoding
+ )
+ )
+ doctest = """
+ >>> "{}"
+ {}
+ """.format(
+ test_string, repr(test_string)
+ )
+ fn = pytester.path / "test_encoding.txt"
+ fn.write_text(doctest, encoding=encoding)
+
+ result = pytester.runpytest()
+
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_doctest_unexpected_exception(self, pytester: Pytester):
+ pytester.maketxtfile(
+ """
+ >>> i = 0
+ >>> 0 / i
+ 2
+ """
+ )
+ result = pytester.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "test_doctest_unexpected_exception.txt F *",
+ "",
+ "*= FAILURES =*",
+ "*_ [[]doctest[]] test_doctest_unexpected_exception.txt _*",
+ "001 >>> i = 0",
+ "002 >>> 0 / i",
+ "UNEXPECTED EXCEPTION: ZeroDivisionError*",
+ "Traceback (most recent call last):",
+ ' File "*/doctest.py", line *, in __run',
+ " *",
+ *((" *^^^^*",) if sys.version_info >= (3, 11) else ()),
+ ' File "<doctest test_doctest_unexpected_exception.txt[1]>", line 1, in <module>',
+ "ZeroDivisionError: division by zero",
+ "*/test_doctest_unexpected_exception.txt:2: UnexpectedException",
+ ],
+ consecutive=True,
+ )
+
+ def test_doctest_outcomes(self, pytester: Pytester):
+ pytester.maketxtfile(
+ test_skip="""
+ >>> 1
+ 1
+ >>> import pytest
+ >>> pytest.skip("")
+ >>> 2
+ 3
+ """,
+ test_xfail="""
+ >>> import pytest
+ >>> pytest.xfail("xfail_reason")
+ >>> foo
+ bar
+ """,
+ test_importorskip="""
+ >>> import pytest
+ >>> pytest.importorskip("doesnotexist")
+ >>> foo
+ bar
+ """,
+ )
+ result = pytester.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 3 items",
+ "",
+ "test_importorskip.txt s *",
+ "test_skip.txt s *",
+ "test_xfail.txt x *",
+ "",
+ "*= 2 skipped, 1 xfailed in *",
+ ]
+ )
+
+ def test_docstring_partial_context_around_error(self, pytester: Pytester):
+ """Test that we show some context before the actual line of a failing
+ doctest.
+ """
+ pytester.makepyfile(
+ '''
+ def foo():
+ """
+ text-line-1
+ text-line-2
+ text-line-3
+ text-line-4
+ text-line-5
+ text-line-6
+ text-line-7
+ text-line-8
+ text-line-9
+ text-line-10
+ text-line-11
+ >>> 1 + 1
+ 3
+
+ text-line-after
+ """
+ '''
+ )
+ result = pytester.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "*docstring_partial_context_around_error*",
+ "005*text-line-3",
+ "006*text-line-4",
+ "013*text-line-11",
+ "014*>>> 1 + 1",
+ "Expected:",
+ " 3",
+ "Got:",
+ " 2",
+ ]
+ )
+ # lines below should be trimmed out
+ result.stdout.no_fnmatch_line("*text-line-2*")
+ result.stdout.no_fnmatch_line("*text-line-after*")
+
+ def test_docstring_full_context_around_error(self, pytester: Pytester):
+ """Test that we show the whole context before the actual line of a failing
+ doctest, provided that the context is up to 10 lines long.
+ """
+ pytester.makepyfile(
+ '''
+ def foo():
+ """
+ text-line-1
+ text-line-2
+
+ >>> 1 + 1
+ 3
+ """
+ '''
+ )
+ result = pytester.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "*docstring_full_context_around_error*",
+ "003*text-line-1",
+ "004*text-line-2",
+ "006*>>> 1 + 1",
+ "Expected:",
+ " 3",
+ "Got:",
+ " 2",
+ ]
+ )
+
+ def test_doctest_linedata_missing(self, pytester: Pytester):
+ pytester.path.joinpath("hello.py").write_text(
+ textwrap.dedent(
+ """\
+ class Fun(object):
+ @property
+ def test(self):
+ '''
+ >>> a = 1
+ >>> 1/0
+ '''
+ """
+ )
+ )
+ result = pytester.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(
+ ["*hello*", "006*>>> 1/0*", "*UNEXPECTED*ZeroDivision*", "*1 failed*"]
+ )
+
+ def test_doctest_linedata_on_property(self, pytester: Pytester):
+ pytester.makepyfile(
+ """
+ class Sample(object):
+ @property
+ def some_property(self):
+ '''
+ >>> Sample().some_property
+ 'another thing'
+ '''
+ return 'something'
+ """
+ )
+ result = pytester.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "*= FAILURES =*",
+ "*_ [[]doctest[]] test_doctest_linedata_on_property.Sample.some_property _*",
+ "004 ",
+ "005 >>> Sample().some_property",
+ "Expected:",
+ " 'another thing'",
+ "Got:",
+ " 'something'",
+ "",
+ "*/test_doctest_linedata_on_property.py:5: DocTestFailure",
+ "*= 1 failed in *",
+ ]
+ )
+
+ def test_doctest_no_linedata_on_overriden_property(self, pytester: Pytester):
+ pytester.makepyfile(
+ """
+ class Sample(object):
+ @property
+ def some_property(self):
+ '''
+ >>> Sample().some_property
+ 'another thing'
+ '''
+ return 'something'
+ some_property = property(some_property.__get__, None, None, some_property.__doc__)
+ """
+ )
+ result = pytester.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "*= FAILURES =*",
+ "*_ [[]doctest[]] test_doctest_no_linedata_on_overriden_property.Sample.some_property _*",
+ "EXAMPLE LOCATION UNKNOWN, not showing all tests of that example",
+ "[?][?][?] >>> Sample().some_property",
+ "Expected:",
+ " 'another thing'",
+ "Got:",
+ " 'something'",
+ "",
+ "*/test_doctest_no_linedata_on_overriden_property.py:None: DocTestFailure",
+ "*= 1 failed in *",
+ ]
+ )
+
+ def test_doctest_unex_importerror_only_txt(self, pytester: Pytester):
+ pytester.maketxtfile(
+ """
+ >>> import asdalsdkjaslkdjasd
+ >>>
+ """
+ )
+ result = pytester.runpytest()
+ # doctest is never executed because of error during hello.py collection
+ result.stdout.fnmatch_lines(
+ [
+ "*>>> import asdals*",
+ "*UNEXPECTED*ModuleNotFoundError*",
+ "ModuleNotFoundError: No module named *asdal*",
+ ]
+ )
+
+ def test_doctest_unex_importerror_with_module(self, pytester: Pytester):
+ pytester.path.joinpath("hello.py").write_text(
+ textwrap.dedent(
+ """\
+ import asdalsdkjaslkdjasd
+ """
+ )
+ )
+ pytester.maketxtfile(
+ """
+ >>> import hello
+ >>>
+ """
+ )
+ result = pytester.runpytest("--doctest-modules")
+ # doctest is never executed because of error during hello.py collection
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR collecting hello.py*",
+ "*ModuleNotFoundError: No module named *asdals*",
+ "*Interrupted: 1 error during collection*",
+ ]
+ )
+
+ def test_doctestmodule(self, pytester: Pytester):
+ p = pytester.makepyfile(
+ """
+ '''
+ >>> x = 1
+ >>> x == 1
+ False
+
+ '''
+ """
+ )
+ reprec = pytester.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(failed=1)
+
+ def test_doctestmodule_external_and_issue116(self, pytester: Pytester):
+ p = pytester.mkpydir("hello")
+ p.joinpath("__init__.py").write_text(
+ textwrap.dedent(
+ """\
+ def somefunc():
+ '''
+ >>> i = 0
+ >>> i + 1
+ 2
+ '''
+ """
+ )
+ )
+ result = pytester.runpytest(p, "--doctest-modules")
+ result.stdout.fnmatch_lines(
+ [
+ "003 *>>> i = 0",
+ "004 *>>> i + 1",
+ "*Expected:",
+ "* 2",
+ "*Got:",
+ "* 1",
+ "*:4: DocTestFailure",
+ ]
+ )
+
+ def test_txtfile_failing(self, pytester: Pytester):
+ p = pytester.maketxtfile(
+ """
+ >>> i = 0
+ >>> i + 1
+ 2
+ """
+ )
+ result = pytester.runpytest(p, "-s")
+ result.stdout.fnmatch_lines(
+ [
+ "001 >>> i = 0",
+ "002 >>> i + 1",
+ "Expected:",
+ " 2",
+ "Got:",
+ " 1",
+ "*test_txtfile_failing.txt:2: DocTestFailure",
+ ]
+ )
+
+ def test_txtfile_with_fixtures(self, pytester: Pytester):
+ p = pytester.maketxtfile(
+ """
+ >>> p = getfixture('tmp_path')
+ >>> p.is_dir()
+ True
+ """
+ )
+ reprec = pytester.inline_run(p)
+ reprec.assertoutcome(passed=1)
+
+ def test_txtfile_with_usefixtures_in_ini(self, pytester: Pytester):
+ pytester.makeini(
+ """
+ [pytest]
+ usefixtures = myfixture
+ """
+ )
+ pytester.makeconftest(
+ """
+ import pytest
+ @pytest.fixture
+ def myfixture(monkeypatch):
+ monkeypatch.setenv("HELLO", "WORLD")
+ """
+ )
+
+ p = pytester.maketxtfile(
+ """
+ >>> import os
+ >>> os.environ["HELLO"]
+ 'WORLD'
+ """
+ )
+ reprec = pytester.inline_run(p)
+ reprec.assertoutcome(passed=1)
+
+ def test_doctestmodule_with_fixtures(self, pytester: Pytester):
+ p = pytester.makepyfile(
+ """
+ '''
+ >>> p = getfixture('tmp_path')
+ >>> p.is_dir()
+ True
+ '''
+ """
+ )
+ reprec = pytester.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(passed=1)
+
+ def test_doctestmodule_three_tests(self, pytester: Pytester):
+ p = pytester.makepyfile(
+ """
+ '''
+ >>> p = getfixture('tmp_path')
+ >>> p.is_dir()
+ True
+ '''
+ def my_func():
+ '''
+ >>> magic = 42
+ >>> magic - 42
+ 0
+ '''
+ def useless():
+ pass
+ def another():
+ '''
+ >>> import os
+ >>> os is os
+ True
+ '''
+ """
+ )
+ reprec = pytester.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(passed=3)
+
+ def test_doctestmodule_two_tests_one_fail(self, pytester: Pytester):
+ p = pytester.makepyfile(
+ """
+ class MyClass(object):
+ def bad_meth(self):
+ '''
+ >>> magic = 42
+ >>> magic
+ 0
+ '''
+ def nice_meth(self):
+ '''
+ >>> magic = 42
+ >>> magic - 42
+ 0
+ '''
+ """
+ )
+ reprec = pytester.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(failed=1, passed=1)
+
+ def test_ignored_whitespace(self, pytester: Pytester):
+ pytester.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ELLIPSIS NORMALIZE_WHITESPACE
+ """
+ )
+ p = pytester.makepyfile(
+ """
+ class MyClass(object):
+ '''
+ >>> a = "foo "
+ >>> print(a)
+ foo
+ '''
+ pass
+ """
+ )
+ reprec = pytester.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(passed=1)
+
+ def test_non_ignored_whitespace(self, pytester: Pytester):
+ pytester.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ELLIPSIS
+ """
+ )
+ p = pytester.makepyfile(
+ """
+ class MyClass(object):
+ '''
+ >>> a = "foo "
+ >>> print(a)
+ foo
+ '''
+ pass
+ """
+ )
+ reprec = pytester.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(failed=1, passed=0)
+
+ def test_ignored_whitespace_glob(self, pytester: Pytester):
+ pytester.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ELLIPSIS NORMALIZE_WHITESPACE
+ """
+ )
+ p = pytester.maketxtfile(
+ xdoc="""
+ >>> a = "foo "
+ >>> print(a)
+ foo
+ """
+ )
+ reprec = pytester.inline_run(p, "--doctest-glob=x*.txt")
+ reprec.assertoutcome(passed=1)
+
+ def test_non_ignored_whitespace_glob(self, pytester: Pytester):
+ pytester.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ELLIPSIS
+ """
+ )
+ p = pytester.maketxtfile(
+ xdoc="""
+ >>> a = "foo "
+ >>> print(a)
+ foo
+ """
+ )
+ reprec = pytester.inline_run(p, "--doctest-glob=x*.txt")
+ reprec.assertoutcome(failed=1, passed=0)
+
+ def test_contains_unicode(self, pytester: Pytester):
+ """Fix internal error with docstrings containing non-ascii characters."""
+ pytester.makepyfile(
+ '''\
+ def foo():
+ """
+ >>> name = 'Ñ' # not letter 'c' but instead Cyrillic 's'.
+ 'anything'
+ """
+ '''
+ )
+ result = pytester.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(["Got nothing", "* 1 failed in*"])
+
+ def test_ignore_import_errors_on_doctest(self, pytester: Pytester):
+ p = pytester.makepyfile(
+ """
+ import asdf
+
+ def add_one(x):
+ '''
+ >>> add_one(1)
+ 2
+ '''
+ return x + 1
+ """
+ )
+
+ reprec = pytester.inline_run(
+ p, "--doctest-modules", "--doctest-ignore-import-errors"
+ )
+ reprec.assertoutcome(skipped=1, failed=1, passed=0)
+
+ def test_junit_report_for_doctest(self, pytester: Pytester):
+ """#713: Fix --junit-xml option when used with --doctest-modules."""
+ p = pytester.makepyfile(
+ """
+ def foo():
+ '''
+ >>> 1 + 1
+ 3
+ '''
+ pass
+ """
+ )
+ reprec = pytester.inline_run(p, "--doctest-modules", "--junit-xml=junit.xml")
+ reprec.assertoutcome(failed=1)
+
+ def test_unicode_doctest(self, pytester: Pytester):
+ """
+ Test case for issue 2434: DecodeError on Python 2 when doctest contains non-ascii
+ characters.
+ """
+ p = pytester.maketxtfile(
+ test_unicode_doctest="""
+ .. doctest::
+
+ >>> print("Hi\\n\\nByé")
+ Hi
+ ...
+ Byé
+ >>> 1 / 0 # Byé
+ 1
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ ["*UNEXPECTED EXCEPTION: ZeroDivisionError*", "*1 failed*"]
+ )
+
+ def test_unicode_doctest_module(self, pytester: Pytester):
+ """
+ Test case for issue 2434: DecodeError on Python 2 when doctest docstring
+ contains non-ascii characters.
+ """
+ p = pytester.makepyfile(
+ test_unicode_doctest_module="""
+ def fix_bad_unicode(text):
+ '''
+ >>> print(fix_bad_unicode('único'))
+ único
+ '''
+ return "único"
+ """
+ )
+ result = pytester.runpytest(p, "--doctest-modules")
+ result.stdout.fnmatch_lines(["* 1 passed *"])
+
+ def test_print_unicode_value(self, pytester: Pytester):
+ """
+ Test case for issue 3583: Printing Unicode in doctest under Python 2.7
+ doesn't work
+ """
+ p = pytester.maketxtfile(
+ test_print_unicode_value=r"""
+ Here is a doctest::
+
+ >>> print('\xE5\xE9\xEE\xF8\xFC')
+ åéîøü
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["* 1 passed *"])
+
+ def test_reportinfo(self, pytester: Pytester):
+ """Make sure that DoctestItem.reportinfo() returns lineno."""
+ p = pytester.makepyfile(
+ test_reportinfo="""
+ def foo(x):
+ '''
+ >>> foo('a')
+ 'b'
+ '''
+ return 'c'
+ """
+ )
+ items, reprec = pytester.inline_genitems(p, "--doctest-modules")
+ reportinfo = items[0].reportinfo()
+ assert reportinfo[1] == 1
+
+ def test_valid_setup_py(self, pytester: Pytester):
+ """
+ Test to make sure that pytest ignores valid setup.py files when ran
+ with --doctest-modules
+ """
+ p = pytester.makepyfile(
+ setup="""
+ from setuptools import setup, find_packages
+ if __name__ == '__main__':
+ setup(name='sample',
+ version='0.0',
+ description='description',
+ packages=find_packages()
+ )
+ """
+ )
+ result = pytester.runpytest(p, "--doctest-modules")
+ result.stdout.fnmatch_lines(["*collected 0 items*"])
+
+ def test_main_py_does_not_cause_import_errors(self, pytester: Pytester):
+ p = pytester.copy_example("doctest/main_py")
+ result = pytester.runpytest(p, "--doctest-modules")
+ result.stdout.fnmatch_lines(["*collected 2 items*", "*1 failed, 1 passed*"])
+
+ def test_invalid_setup_py(self, pytester: Pytester):
+ """
+ Test to make sure that pytest reads setup.py files that are not used
+ for python packages when ran with --doctest-modules
+ """
+ p = pytester.makepyfile(
+ setup="""
+ def test_foo():
+ return 'bar'
+ """
+ )
+ result = pytester.runpytest(p, "--doctest-modules")
+ result.stdout.fnmatch_lines(["*collected 1 item*"])
+
+
+class TestLiterals:
+ @pytest.mark.parametrize("config_mode", ["ini", "comment"])
+ def test_allow_unicode(self, pytester, config_mode):
+ """Test that doctests which output unicode work in all python versions
+ tested by pytest when the ALLOW_UNICODE option is used (either in
+ the ini file or by an inline comment).
+ """
+ if config_mode == "ini":
+ pytester.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ALLOW_UNICODE
+ """
+ )
+ comment = ""
+ else:
+ comment = "#doctest: +ALLOW_UNICODE"
+
+ pytester.maketxtfile(
+ test_doc="""
+ >>> b'12'.decode('ascii') {comment}
+ '12'
+ """.format(
+ comment=comment
+ )
+ )
+ pytester.makepyfile(
+ foo="""
+ def foo():
+ '''
+ >>> b'12'.decode('ascii') {comment}
+ '12'
+ '''
+ """.format(
+ comment=comment
+ )
+ )
+ reprec = pytester.inline_run("--doctest-modules")
+ reprec.assertoutcome(passed=2)
+
+ @pytest.mark.parametrize("config_mode", ["ini", "comment"])
+ def test_allow_bytes(self, pytester, config_mode):
+ """Test that doctests which output bytes work in all python versions
+ tested by pytest when the ALLOW_BYTES option is used (either in
+ the ini file or by an inline comment)(#1287).
+ """
+ if config_mode == "ini":
+ pytester.makeini(
+ """
+ [pytest]
+ doctest_optionflags = ALLOW_BYTES
+ """
+ )
+ comment = ""
+ else:
+ comment = "#doctest: +ALLOW_BYTES"
+
+ pytester.maketxtfile(
+ test_doc="""
+ >>> b'foo' {comment}
+ 'foo'
+ """.format(
+ comment=comment
+ )
+ )
+ pytester.makepyfile(
+ foo="""
+ def foo():
+ '''
+ >>> b'foo' {comment}
+ 'foo'
+ '''
+ """.format(
+ comment=comment
+ )
+ )
+ reprec = pytester.inline_run("--doctest-modules")
+ reprec.assertoutcome(passed=2)
+
+ def test_unicode_string(self, pytester: Pytester):
+ """Test that doctests which output unicode fail in Python 2 when
+ the ALLOW_UNICODE option is not used. The same test should pass
+ in Python 3.
+ """
+ pytester.maketxtfile(
+ test_doc="""
+ >>> b'12'.decode('ascii')
+ '12'
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_bytes_literal(self, pytester: Pytester):
+ """Test that doctests which output bytes fail in Python 3 when
+ the ALLOW_BYTES option is not used. (#1287).
+ """
+ pytester.maketxtfile(
+ test_doc="""
+ >>> b'foo'
+ 'foo'
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(failed=1)
+
+ def test_number_re(self) -> None:
+ _number_re = _get_checker()._number_re # type: ignore
+ for s in [
+ "1.",
+ "+1.",
+ "-1.",
+ ".1",
+ "+.1",
+ "-.1",
+ "0.1",
+ "+0.1",
+ "-0.1",
+ "1e5",
+ "+1e5",
+ "1e+5",
+ "+1e+5",
+ "1e-5",
+ "+1e-5",
+ "-1e-5",
+ "1.2e3",
+ "-1.2e-3",
+ ]:
+ print(s)
+ m = _number_re.match(s)
+ assert m is not None
+ assert float(m.group()) == pytest.approx(float(s))
+ for s in ["1", "abc"]:
+ print(s)
+ assert _number_re.match(s) is None
+
+ @pytest.mark.parametrize("config_mode", ["ini", "comment"])
+ def test_number_precision(self, pytester, config_mode):
+ """Test the NUMBER option."""
+ if config_mode == "ini":
+ pytester.makeini(
+ """
+ [pytest]
+ doctest_optionflags = NUMBER
+ """
+ )
+ comment = ""
+ else:
+ comment = "#doctest: +NUMBER"
+
+ pytester.maketxtfile(
+ test_doc="""
+
+ Scalars:
+
+ >>> import math
+ >>> math.pi {comment}
+ 3.141592653589793
+ >>> math.pi {comment}
+ 3.1416
+ >>> math.pi {comment}
+ 3.14
+ >>> -math.pi {comment}
+ -3.14
+ >>> math.pi {comment}
+ 3.
+ >>> 3. {comment}
+ 3.0
+ >>> 3. {comment}
+ 3.
+ >>> 3. {comment}
+ 3.01
+ >>> 3. {comment}
+ 2.99
+ >>> .299 {comment}
+ .3
+ >>> .301 {comment}
+ .3
+ >>> 951. {comment}
+ 1e3
+ >>> 1049. {comment}
+ 1e3
+ >>> -1049. {comment}
+ -1e3
+ >>> 1e3 {comment}
+ 1e3
+ >>> 1e3 {comment}
+ 1000.
+
+ Lists:
+
+ >>> [3.1415, 0.097, 13.1, 7, 8.22222e5, 0.598e-2] {comment}
+ [3.14, 0.1, 13., 7, 8.22e5, 6.0e-3]
+ >>> [[0.333, 0.667], [0.999, 1.333]] {comment}
+ [[0.33, 0.667], [0.999, 1.333]]
+ >>> [[[0.101]]] {comment}
+ [[[0.1]]]
+
+ Doesn't barf on non-numbers:
+
+ >>> 'abc' {comment}
+ 'abc'
+ >>> None {comment}
+ """.format(
+ comment=comment
+ )
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ @pytest.mark.parametrize(
+ "expression,output",
+ [
+ # ints shouldn't match floats:
+ ("3.0", "3"),
+ ("3e0", "3"),
+ ("1e3", "1000"),
+ ("3", "3.0"),
+ # Rounding:
+ ("3.1", "3.0"),
+ ("3.1", "3.2"),
+ ("3.1", "4.0"),
+ ("8.22e5", "810000.0"),
+ # Only the actual output is rounded up, not the expected output:
+ ("3.0", "2.98"),
+ ("1e3", "999"),
+ # The current implementation doesn't understand that numbers inside
+ # strings shouldn't be treated as numbers:
+ pytest.param("'3.1416'", "'3.14'", marks=pytest.mark.xfail),
+ ],
+ )
+ def test_number_non_matches(self, pytester, expression, output):
+ pytester.maketxtfile(
+ test_doc="""
+ >>> {expression} #doctest: +NUMBER
+ {output}
+ """.format(
+ expression=expression, output=output
+ )
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=0, failed=1)
+
+ def test_number_and_allow_unicode(self, pytester: Pytester):
+ pytester.maketxtfile(
+ test_doc="""
+ >>> from collections import namedtuple
+ >>> T = namedtuple('T', 'a b c')
+ >>> T(a=0.2330000001, b=u'str', c=b'bytes') # doctest: +ALLOW_UNICODE, +ALLOW_BYTES, +NUMBER
+ T(a=0.233, b=u'str', c='bytes')
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+class TestDoctestSkips:
+ """
+ If all examples in a doctest are skipped due to the SKIP option, then
+ the tests should be SKIPPED rather than PASSED. (#957)
+ """
+
+ @pytest.fixture(params=["text", "module"])
+ def makedoctest(self, pytester, request):
+ def makeit(doctest):
+ mode = request.param
+ if mode == "text":
+ pytester.maketxtfile(doctest)
+ else:
+ assert mode == "module"
+ pytester.makepyfile('"""\n%s"""' % doctest)
+
+ return makeit
+
+ def test_one_skipped(self, pytester, makedoctest):
+ makedoctest(
+ """
+ >>> 1 + 1 # doctest: +SKIP
+ 2
+ >>> 2 + 2
+ 4
+ """
+ )
+ reprec = pytester.inline_run("--doctest-modules")
+ reprec.assertoutcome(passed=1)
+
+ def test_one_skipped_failed(self, pytester, makedoctest):
+ makedoctest(
+ """
+ >>> 1 + 1 # doctest: +SKIP
+ 2
+ >>> 2 + 2
+ 200
+ """
+ )
+ reprec = pytester.inline_run("--doctest-modules")
+ reprec.assertoutcome(failed=1)
+
+ def test_all_skipped(self, pytester, makedoctest):
+ makedoctest(
+ """
+ >>> 1 + 1 # doctest: +SKIP
+ 2
+ >>> 2 + 2 # doctest: +SKIP
+ 200
+ """
+ )
+ reprec = pytester.inline_run("--doctest-modules")
+ reprec.assertoutcome(skipped=1)
+
+ def test_vacuous_all_skipped(self, pytester, makedoctest):
+ makedoctest("")
+ reprec = pytester.inline_run("--doctest-modules")
+ reprec.assertoutcome(passed=0, skipped=0)
+
+ def test_continue_on_failure(self, pytester: Pytester):
+ pytester.maketxtfile(
+ test_something="""
+ >>> i = 5
+ >>> def foo():
+ ... raise ValueError('error1')
+ >>> foo()
+ >>> i
+ >>> i + 2
+ 7
+ >>> i + 1
+ """
+ )
+ result = pytester.runpytest(
+ "--doctest-modules", "--doctest-continue-on-failure"
+ )
+ result.assert_outcomes(passed=0, failed=1)
+ # The lines that contains the failure are 4, 5, and 8. The first one
+ # is a stack trace and the other two are mismatches.
+ result.stdout.fnmatch_lines(
+ ["*4: UnexpectedException*", "*5: DocTestFailure*", "*8: DocTestFailure*"]
+ )
+
+ def test_skipping_wrapped_test(self, pytester):
+ """
+ Issue 8796: INTERNALERROR raised when skipping a decorated DocTest
+ through pytest_collection_modifyitems.
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+ from _pytest.doctest import DoctestItem
+
+ def pytest_collection_modifyitems(config, items):
+ skip_marker = pytest.mark.skip()
+
+ for item in items:
+ if isinstance(item, DoctestItem):
+ item.add_marker(skip_marker)
+ """
+ )
+
+ pytester.makepyfile(
+ """
+ from contextlib import contextmanager
+
+ @contextmanager
+ def my_config_context():
+ '''
+ >>> import os
+ '''
+ """
+ )
+
+ result = pytester.runpytest("--doctest-modules")
+ assert "INTERNALERROR" not in result.stdout.str()
+ result.assert_outcomes(skipped=1)
+
+
+class TestDoctestAutoUseFixtures:
+
+ SCOPES = ["module", "session", "class", "function"]
+
+ def test_doctest_module_session_fixture(self, pytester: Pytester):
+ """Test that session fixtures are initialized for doctest modules (#768)."""
+ # session fixture which changes some global data, which will
+ # be accessed by doctests in a module
+ pytester.makeconftest(
+ """
+ import pytest
+ import sys
+
+ @pytest.fixture(autouse=True, scope='session')
+ def myfixture():
+ assert not hasattr(sys, 'pytest_session_data')
+ sys.pytest_session_data = 1
+ yield
+ del sys.pytest_session_data
+ """
+ )
+ pytester.makepyfile(
+ foo="""
+ import sys
+
+ def foo():
+ '''
+ >>> assert sys.pytest_session_data == 1
+ '''
+
+ def bar():
+ '''
+ >>> assert sys.pytest_session_data == 1
+ '''
+ """
+ )
+ result = pytester.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+ @pytest.mark.parametrize("scope", SCOPES)
+ @pytest.mark.parametrize("enable_doctest", [True, False])
+ def test_fixture_scopes(self, pytester, scope, enable_doctest):
+ """Test that auto-use fixtures work properly with doctest modules.
+ See #1057 and #1100.
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(autouse=True, scope="{scope}")
+ def auto(request):
+ return 99
+ """.format(
+ scope=scope
+ )
+ )
+ pytester.makepyfile(
+ test_1='''
+ def test_foo():
+ """
+ >>> getfixture('auto') + 1
+ 100
+ """
+ def test_bar():
+ assert 1
+ '''
+ )
+ params = ("--doctest-modules",) if enable_doctest else ()
+ passes = 3 if enable_doctest else 2
+ result = pytester.runpytest(*params)
+ result.stdout.fnmatch_lines(["*=== %d passed in *" % passes])
+
+ @pytest.mark.parametrize("scope", SCOPES)
+ @pytest.mark.parametrize("autouse", [True, False])
+ @pytest.mark.parametrize("use_fixture_in_doctest", [True, False])
+ def test_fixture_module_doctest_scopes(
+ self, pytester, scope, autouse, use_fixture_in_doctest
+ ):
+ """Test that auto-use fixtures work properly with doctest files.
+ See #1057 and #1100.
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(autouse={autouse}, scope="{scope}")
+ def auto(request):
+ return 99
+ """.format(
+ scope=scope, autouse=autouse
+ )
+ )
+ if use_fixture_in_doctest:
+ pytester.maketxtfile(
+ test_doc="""
+ >>> getfixture('auto')
+ 99
+ """
+ )
+ else:
+ pytester.maketxtfile(
+ test_doc="""
+ >>> 1 + 1
+ 2
+ """
+ )
+ result = pytester.runpytest("--doctest-modules")
+ result.stdout.no_fnmatch_line("*FAILURES*")
+ result.stdout.fnmatch_lines(["*=== 1 passed in *"])
+
+ @pytest.mark.parametrize("scope", SCOPES)
+ def test_auto_use_request_attributes(self, pytester, scope):
+ """Check that all attributes of a request in an autouse fixture
+ behave as expected when requested for a doctest item.
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture(autouse=True, scope="{scope}")
+ def auto(request):
+ if "{scope}" == 'module':
+ assert request.module is None
+ if "{scope}" == 'class':
+ assert request.cls is None
+ if "{scope}" == 'function':
+ assert request.function is None
+ return 99
+ """.format(
+ scope=scope
+ )
+ )
+ pytester.maketxtfile(
+ test_doc="""
+ >>> 1 + 1
+ 2
+ """
+ )
+ result = pytester.runpytest("--doctest-modules")
+ str(result.stdout.no_fnmatch_line("*FAILURES*"))
+ result.stdout.fnmatch_lines(["*=== 1 passed in *"])
+
+
+class TestDoctestNamespaceFixture:
+
+ SCOPES = ["module", "session", "class", "function"]
+
+ @pytest.mark.parametrize("scope", SCOPES)
+ def test_namespace_doctestfile(self, pytester, scope):
+ """
+ Check that inserting something into the namespace works in a
+ simple text file doctest
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+ import contextlib
+
+ @pytest.fixture(autouse=True, scope="{scope}")
+ def add_contextlib(doctest_namespace):
+ doctest_namespace['cl'] = contextlib
+ """.format(
+ scope=scope
+ )
+ )
+ p = pytester.maketxtfile(
+ """
+ >>> print(cl.__name__)
+ contextlib
+ """
+ )
+ reprec = pytester.inline_run(p)
+ reprec.assertoutcome(passed=1)
+
+ @pytest.mark.parametrize("scope", SCOPES)
+ def test_namespace_pyfile(self, pytester, scope):
+ """
+ Check that inserting something into the namespace works in a
+ simple Python file docstring doctest
+ """
+ pytester.makeconftest(
+ """
+ import pytest
+ import contextlib
+
+ @pytest.fixture(autouse=True, scope="{scope}")
+ def add_contextlib(doctest_namespace):
+ doctest_namespace['cl'] = contextlib
+ """.format(
+ scope=scope
+ )
+ )
+ p = pytester.makepyfile(
+ """
+ def foo():
+ '''
+ >>> print(cl.__name__)
+ contextlib
+ '''
+ """
+ )
+ reprec = pytester.inline_run(p, "--doctest-modules")
+ reprec.assertoutcome(passed=1)
+
+
+class TestDoctestReportingOption:
+ def _run_doctest_report(self, pytester, format):
+ pytester.makepyfile(
+ """
+ def foo():
+ '''
+ >>> foo()
+ a b
+ 0 1 4
+ 1 2 4
+ 2 3 6
+ '''
+ print(' a b\\n'
+ '0 1 4\\n'
+ '1 2 5\\n'
+ '2 3 6')
+ """
+ )
+ return pytester.runpytest("--doctest-modules", "--doctest-report", format)
+
+ @pytest.mark.parametrize("format", ["udiff", "UDIFF", "uDiFf"])
+ def test_doctest_report_udiff(self, pytester, format):
+ result = self._run_doctest_report(pytester, format)
+ result.stdout.fnmatch_lines(
+ [" 0 1 4", " -1 2 4", " +1 2 5", " 2 3 6"]
+ )
+
+ def test_doctest_report_cdiff(self, pytester: Pytester):
+ result = self._run_doctest_report(pytester, "cdiff")
+ result.stdout.fnmatch_lines(
+ [
+ " a b",
+ " 0 1 4",
+ " ! 1 2 4",
+ " 2 3 6",
+ " --- 1,4 ----",
+ " a b",
+ " 0 1 4",
+ " ! 1 2 5",
+ " 2 3 6",
+ ]
+ )
+
+ def test_doctest_report_ndiff(self, pytester: Pytester):
+ result = self._run_doctest_report(pytester, "ndiff")
+ result.stdout.fnmatch_lines(
+ [
+ " a b",
+ " 0 1 4",
+ " - 1 2 4",
+ " ? ^",
+ " + 1 2 5",
+ " ? ^",
+ " 2 3 6",
+ ]
+ )
+
+ @pytest.mark.parametrize("format", ["none", "only_first_failure"])
+ def test_doctest_report_none_or_only_first_failure(self, pytester, format):
+ result = self._run_doctest_report(pytester, format)
+ result.stdout.fnmatch_lines(
+ [
+ "Expected:",
+ " a b",
+ " 0 1 4",
+ " 1 2 4",
+ " 2 3 6",
+ "Got:",
+ " a b",
+ " 0 1 4",
+ " 1 2 5",
+ " 2 3 6",
+ ]
+ )
+
+ def test_doctest_report_invalid(self, pytester: Pytester):
+ result = self._run_doctest_report(pytester, "obviously_invalid_format")
+ result.stderr.fnmatch_lines(
+ [
+ "*error: argument --doctest-report: invalid choice: 'obviously_invalid_format' (choose from*"
+ ]
+ )
+
+
+@pytest.mark.parametrize("mock_module", ["mock", "unittest.mock"])
+def test_doctest_mock_objects_dont_recurse_missbehaved(mock_module, pytester: Pytester):
+ pytest.importorskip(mock_module)
+ pytester.makepyfile(
+ """
+ from {mock_module} import call
+ class Example(object):
+ '''
+ >>> 1 + 1
+ 2
+ '''
+ """.format(
+ mock_module=mock_module
+ )
+ )
+ result = pytester.runpytest("--doctest-modules")
+ result.stdout.fnmatch_lines(["* 1 passed *"])
+
+
+class Broken:
+ def __getattr__(self, _):
+ raise KeyError("This should be an AttributeError")
+
+
+@pytest.mark.parametrize( # pragma: no branch (lambdas are not called)
+ "stop", [None, _is_mocked, lambda f: None, lambda f: False, lambda f: True]
+)
+def test_warning_on_unwrap_of_broken_object(
+ stop: Optional[Callable[[object], object]]
+) -> None:
+ bad_instance = Broken()
+ assert inspect.unwrap.__module__ == "inspect"
+ with _patch_unwrap_mock_aware():
+ assert inspect.unwrap.__module__ != "inspect"
+ with pytest.warns(
+ pytest.PytestWarning, match="^Got KeyError.* when unwrapping"
+ ):
+ with pytest.raises(KeyError):
+ inspect.unwrap(bad_instance, stop=stop) # type: ignore[arg-type]
+ assert inspect.unwrap.__module__ == "inspect"
+
+
+def test_is_setup_py_not_named_setup_py(tmp_path: Path) -> None:
+ not_setup_py = tmp_path.joinpath("not_setup.py")
+ not_setup_py.write_text('from setuptools import setup; setup(name="foo")')
+ assert not _is_setup_py(not_setup_py)
+
+
+@pytest.mark.parametrize("mod", ("setuptools", "distutils.core"))
+def test_is_setup_py_is_a_setup_py(tmp_path: Path, mod: str) -> None:
+ setup_py = tmp_path.joinpath("setup.py")
+ setup_py.write_text(f'from {mod} import setup; setup(name="foo")', "utf-8")
+ assert _is_setup_py(setup_py)
+
+
+@pytest.mark.parametrize("mod", ("setuptools", "distutils.core"))
+def test_is_setup_py_different_encoding(tmp_path: Path, mod: str) -> None:
+ setup_py = tmp_path.joinpath("setup.py")
+ contents = (
+ "# -*- coding: cp1252 -*-\n"
+ 'from {} import setup; setup(name="foo", description="€")\n'.format(mod)
+ )
+ setup_py.write_bytes(contents.encode("cp1252"))
+ assert _is_setup_py(setup_py)
+
+
+@pytest.mark.parametrize(
+ "name, expected", [("__main__.py", True), ("__init__.py", False)]
+)
+def test_is_main_py(tmp_path: Path, name: str, expected: bool) -> None:
+ dunder_main = tmp_path.joinpath(name)
+ assert _is_main_py(dunder_main) == expected
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_entry_points.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_entry_points.py
new file mode 100644
index 0000000000..5d00312736
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_entry_points.py
@@ -0,0 +1,7 @@
+from _pytest.compat import importlib_metadata
+
+
+def test_pytest_entry_points_are_identical():
+ dist = importlib_metadata.distribution("pytest")
+ entry_map = {ep.name: ep for ep in dist.entry_points}
+ assert entry_map["pytest"].value == entry_map["py.test"].value
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_error_diffs.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_error_diffs.py
new file mode 100644
index 0000000000..1668e929ab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_error_diffs.py
@@ -0,0 +1,283 @@
+"""
+Tests and examples for correct "+/-" usage in error diffs.
+
+See https://github.com/pytest-dev/pytest/issues/3333 for details.
+
+"""
+import sys
+
+import pytest
+from _pytest.pytester import Pytester
+
+
+TESTCASES = [
+ pytest.param(
+ """
+ def test_this():
+ result = [1, 4, 3]
+ expected = [1, 2, 3]
+ assert result == expected
+ """,
+ """
+ > assert result == expected
+ E assert [1, 4, 3] == [1, 2, 3]
+ E At index 1 diff: 4 != 2
+ E Full diff:
+ E - [1, 2, 3]
+ E ? ^
+ E + [1, 4, 3]
+ E ? ^
+ """,
+ id="Compare lists, one item differs",
+ ),
+ pytest.param(
+ """
+ def test_this():
+ result = [1, 2, 3]
+ expected = [1, 2]
+ assert result == expected
+ """,
+ """
+ > assert result == expected
+ E assert [1, 2, 3] == [1, 2]
+ E Left contains one more item: 3
+ E Full diff:
+ E - [1, 2]
+ E + [1, 2, 3]
+ E ? +++
+ """,
+ id="Compare lists, one extra item",
+ ),
+ pytest.param(
+ """
+ def test_this():
+ result = [1, 3]
+ expected = [1, 2, 3]
+ assert result == expected
+ """,
+ """
+ > assert result == expected
+ E assert [1, 3] == [1, 2, 3]
+ E At index 1 diff: 3 != 2
+ E Right contains one more item: 3
+ E Full diff:
+ E - [1, 2, 3]
+ E ? ---
+ E + [1, 3]
+ """,
+ id="Compare lists, one item missing",
+ ),
+ pytest.param(
+ """
+ def test_this():
+ result = (1, 4, 3)
+ expected = (1, 2, 3)
+ assert result == expected
+ """,
+ """
+ > assert result == expected
+ E assert (1, 4, 3) == (1, 2, 3)
+ E At index 1 diff: 4 != 2
+ E Full diff:
+ E - (1, 2, 3)
+ E ? ^
+ E + (1, 4, 3)
+ E ? ^
+ """,
+ id="Compare tuples",
+ ),
+ pytest.param(
+ """
+ def test_this():
+ result = {1, 3, 4}
+ expected = {1, 2, 3}
+ assert result == expected
+ """,
+ """
+ > assert result == expected
+ E assert {1, 3, 4} == {1, 2, 3}
+ E Extra items in the left set:
+ E 4
+ E Extra items in the right set:
+ E 2
+ E Full diff:
+ E - {1, 2, 3}
+ E ? ^ ^
+ E + {1, 3, 4}
+ E ? ^ ^
+ """,
+ id="Compare sets",
+ ),
+ pytest.param(
+ """
+ def test_this():
+ result = {1: 'spam', 3: 'eggs'}
+ expected = {1: 'spam', 2: 'eggs'}
+ assert result == expected
+ """,
+ """
+ > assert result == expected
+ E AssertionError: assert {1: 'spam', 3: 'eggs'} == {1: 'spam', 2: 'eggs'}
+ E Common items:
+ E {1: 'spam'}
+ E Left contains 1 more item:
+ E {3: 'eggs'}
+ E Right contains 1 more item:
+ E {2: 'eggs'}
+ E Full diff:
+ E - {1: 'spam', 2: 'eggs'}
+ E ? ^
+ E + {1: 'spam', 3: 'eggs'}
+ E ? ^
+ """,
+ id="Compare dicts with differing keys",
+ ),
+ pytest.param(
+ """
+ def test_this():
+ result = {1: 'spam', 2: 'eggs'}
+ expected = {1: 'spam', 2: 'bacon'}
+ assert result == expected
+ """,
+ """
+ > assert result == expected
+ E AssertionError: assert {1: 'spam', 2: 'eggs'} == {1: 'spam', 2: 'bacon'}
+ E Common items:
+ E {1: 'spam'}
+ E Differing items:
+ E {2: 'eggs'} != {2: 'bacon'}
+ E Full diff:
+ E - {1: 'spam', 2: 'bacon'}
+ E ? ^^^^^
+ E + {1: 'spam', 2: 'eggs'}
+ E ? ^^^^
+ """,
+ id="Compare dicts with differing values",
+ ),
+ pytest.param(
+ """
+ def test_this():
+ result = {1: 'spam', 2: 'eggs'}
+ expected = {1: 'spam', 3: 'bacon'}
+ assert result == expected
+ """,
+ """
+ > assert result == expected
+ E AssertionError: assert {1: 'spam', 2: 'eggs'} == {1: 'spam', 3: 'bacon'}
+ E Common items:
+ E {1: 'spam'}
+ E Left contains 1 more item:
+ E {2: 'eggs'}
+ E Right contains 1 more item:
+ E {3: 'bacon'}
+ E Full diff:
+ E - {1: 'spam', 3: 'bacon'}
+ E ? ^ ^^^^^
+ E + {1: 'spam', 2: 'eggs'}
+ E ? ^ ^^^^
+ """,
+ id="Compare dicts with differing items",
+ ),
+ pytest.param(
+ """
+ def test_this():
+ result = "spmaeggs"
+ expected = "spameggs"
+ assert result == expected
+ """,
+ """
+ > assert result == expected
+ E AssertionError: assert 'spmaeggs' == 'spameggs'
+ E - spameggs
+ E ? -
+ E + spmaeggs
+ E ? +
+ """,
+ id="Compare strings",
+ ),
+ pytest.param(
+ """
+ def test_this():
+ result = "spam bacon eggs"
+ assert "bacon" not in result
+ """,
+ """
+ > assert "bacon" not in result
+ E AssertionError: assert 'bacon' not in 'spam bacon eggs'
+ E 'bacon' is contained here:
+ E spam bacon eggs
+ E ? +++++
+ """,
+ id='Test "not in" string',
+ ),
+]
+if sys.version_info[:2] >= (3, 7):
+ TESTCASES.extend(
+ [
+ pytest.param(
+ """
+ from dataclasses import dataclass
+
+ @dataclass
+ class A:
+ a: int
+ b: str
+
+ def test_this():
+ result = A(1, 'spam')
+ expected = A(2, 'spam')
+ assert result == expected
+ """,
+ """
+ > assert result == expected
+ E AssertionError: assert A(a=1, b='spam') == A(a=2, b='spam')
+ E Matching attributes:
+ E ['b']
+ E Differing attributes:
+ E ['a']
+ E Drill down into differing attribute a:
+ E a: 1 != 2
+ E +1
+ E -2
+ """,
+ id="Compare data classes",
+ ),
+ pytest.param(
+ """
+ import attr
+
+ @attr.s(auto_attribs=True)
+ class A:
+ a: int
+ b: str
+
+ def test_this():
+ result = A(1, 'spam')
+ expected = A(1, 'eggs')
+ assert result == expected
+ """,
+ """
+ > assert result == expected
+ E AssertionError: assert A(a=1, b='spam') == A(a=1, b='eggs')
+ E Matching attributes:
+ E ['a']
+ E Differing attributes:
+ E ['b']
+ E Drill down into differing attribute b:
+ E b: 'spam' != 'eggs'
+ E - eggs
+ E + spam
+ """,
+ id="Compare attrs classes",
+ ),
+ ]
+ )
+
+
+@pytest.mark.parametrize("code, expected", TESTCASES)
+def test_error_diff(code: str, expected: str, pytester: Pytester) -> None:
+ expected_lines = [line.lstrip() for line in expected.splitlines()]
+ p = pytester.makepyfile(code)
+ result = pytester.runpytest(p, "-vv")
+ result.stdout.fnmatch_lines(expected_lines)
+ assert result.ret == 1
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_faulthandler.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_faulthandler.py
new file mode 100644
index 0000000000..5b7911f21f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_faulthandler.py
@@ -0,0 +1,172 @@
+import io
+import sys
+
+import pytest
+from _pytest.pytester import Pytester
+
+
+def test_enabled(pytester: Pytester) -> None:
+ """Test single crashing test displays a traceback."""
+ pytester.makepyfile(
+ """
+ import faulthandler
+ def test_crash():
+ faulthandler._sigabrt()
+ """
+ )
+ result = pytester.runpytest_subprocess()
+ result.stderr.fnmatch_lines(["*Fatal Python error*"])
+ assert result.ret != 0
+
+
+def setup_crashing_test(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import faulthandler
+ import atexit
+ def test_ok():
+ atexit.register(faulthandler._sigabrt)
+ """
+ )
+
+
+def test_crash_during_shutdown_captured(pytester: Pytester) -> None:
+ """
+ Re-enable faulthandler if pytest encountered it enabled during configure.
+ We should be able to then see crashes during interpreter shutdown.
+ """
+ setup_crashing_test(pytester)
+ args = (sys.executable, "-Xfaulthandler", "-mpytest")
+ result = pytester.run(*args)
+ result.stderr.fnmatch_lines(["*Fatal Python error*"])
+ assert result.ret != 0
+
+
+def test_crash_during_shutdown_not_captured(pytester: Pytester) -> None:
+ """
+ Check that pytest leaves faulthandler disabled if it was not enabled during configure.
+ This prevents us from seeing crashes during interpreter shutdown (see #8260).
+ """
+ setup_crashing_test(pytester)
+ args = (sys.executable, "-mpytest")
+ result = pytester.run(*args)
+ result.stderr.no_fnmatch_line("*Fatal Python error*")
+ assert result.ret != 0
+
+
+def test_disabled(pytester: Pytester) -> None:
+ """Test option to disable fault handler in the command line."""
+ pytester.makepyfile(
+ """
+ import faulthandler
+ def test_disabled():
+ assert not faulthandler.is_enabled()
+ """
+ )
+ result = pytester.runpytest_subprocess("-p", "no:faulthandler")
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ assert result.ret == 0
+
+
+@pytest.mark.parametrize(
+ "enabled",
+ [
+ pytest.param(
+ True, marks=pytest.mark.skip(reason="sometimes crashes on CI (#7022)")
+ ),
+ False,
+ ],
+)
+def test_timeout(pytester: Pytester, enabled: bool) -> None:
+ """Test option to dump tracebacks after a certain timeout.
+
+ If faulthandler is disabled, no traceback will be dumped.
+ """
+ pytester.makepyfile(
+ """
+ import os, time
+ def test_timeout():
+ time.sleep(1 if "CI" in os.environ else 0.1)
+ """
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ faulthandler_timeout = 0.01
+ """
+ )
+ args = ["-p", "no:faulthandler"] if not enabled else []
+
+ result = pytester.runpytest_subprocess(*args)
+ tb_output = "most recent call first"
+ if enabled:
+ result.stderr.fnmatch_lines(["*%s*" % tb_output])
+ else:
+ assert tb_output not in result.stderr.str()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ assert result.ret == 0
+
+
+@pytest.mark.parametrize("hook_name", ["pytest_enter_pdb", "pytest_exception_interact"])
+def test_cancel_timeout_on_hook(monkeypatch, hook_name) -> None:
+ """Make sure that we are cancelling any scheduled traceback dumping due
+ to timeout before entering pdb (pytest-dev/pytest-faulthandler#12) or any
+ other interactive exception (pytest-dev/pytest-faulthandler#14)."""
+ import faulthandler
+ from _pytest import faulthandler as faulthandler_plugin
+
+ called = []
+
+ monkeypatch.setattr(
+ faulthandler, "cancel_dump_traceback_later", lambda: called.append(1)
+ )
+
+ # call our hook explicitly, we can trust that pytest will call the hook
+ # for us at the appropriate moment
+ hook_func = getattr(faulthandler_plugin, hook_name)
+ hook_func()
+ assert called == [1]
+
+
+def test_already_initialized_crash(pytester: Pytester) -> None:
+ """Even if faulthandler is already initialized, we still dump tracebacks on crashes (#8258)."""
+ pytester.makepyfile(
+ """
+ def test():
+ import faulthandler
+ faulthandler._sigabrt()
+ """
+ )
+ result = pytester.run(
+ sys.executable,
+ "-X",
+ "faulthandler",
+ "-mpytest",
+ pytester.path,
+ )
+ result.stderr.fnmatch_lines(["*Fatal Python error*"])
+ assert result.ret != 0
+
+
+def test_get_stderr_fileno_invalid_fd() -> None:
+ """Test for faulthandler being able to handle invalid file descriptors for stderr (#8249)."""
+ from _pytest.faulthandler import get_stderr_fileno
+
+ class StdErrWrapper(io.StringIO):
+ """
+ Mimic ``twisted.logger.LoggingFile`` to simulate returning an invalid file descriptor.
+
+ https://github.com/twisted/twisted/blob/twisted-20.3.0/src/twisted/logger/_io.py#L132-L139
+ """
+
+ def fileno(self):
+ return -1
+
+ wrapper = StdErrWrapper()
+
+ with pytest.MonkeyPatch.context() as mp:
+ mp.setattr("sys.stderr", wrapper)
+
+ # Even when the stderr wrapper signals an invalid file descriptor,
+ # ``_get_stderr_fileno()`` should return the real one.
+ assert get_stderr_fileno() == 2
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_findpaths.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_findpaths.py
new file mode 100644
index 0000000000..3a2917261a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_findpaths.py
@@ -0,0 +1,135 @@
+from pathlib import Path
+from textwrap import dedent
+
+import pytest
+from _pytest.config import UsageError
+from _pytest.config.findpaths import get_common_ancestor
+from _pytest.config.findpaths import get_dirs_from_args
+from _pytest.config.findpaths import load_config_dict_from_file
+
+
+class TestLoadConfigDictFromFile:
+ def test_empty_pytest_ini(self, tmp_path: Path) -> None:
+ """pytest.ini files are always considered for configuration, even if empty"""
+ fn = tmp_path / "pytest.ini"
+ fn.write_text("", encoding="utf-8")
+ assert load_config_dict_from_file(fn) == {}
+
+ def test_pytest_ini(self, tmp_path: Path) -> None:
+ """[pytest] section in pytest.ini files is read correctly"""
+ fn = tmp_path / "pytest.ini"
+ fn.write_text("[pytest]\nx=1", encoding="utf-8")
+ assert load_config_dict_from_file(fn) == {"x": "1"}
+
+ def test_custom_ini(self, tmp_path: Path) -> None:
+ """[pytest] section in any .ini file is read correctly"""
+ fn = tmp_path / "custom.ini"
+ fn.write_text("[pytest]\nx=1", encoding="utf-8")
+ assert load_config_dict_from_file(fn) == {"x": "1"}
+
+ def test_custom_ini_without_section(self, tmp_path: Path) -> None:
+ """Custom .ini files without [pytest] section are not considered for configuration"""
+ fn = tmp_path / "custom.ini"
+ fn.write_text("[custom]", encoding="utf-8")
+ assert load_config_dict_from_file(fn) is None
+
+ def test_custom_cfg_file(self, tmp_path: Path) -> None:
+ """Custom .cfg files without [tool:pytest] section are not considered for configuration"""
+ fn = tmp_path / "custom.cfg"
+ fn.write_text("[custom]", encoding="utf-8")
+ assert load_config_dict_from_file(fn) is None
+
+ def test_valid_cfg_file(self, tmp_path: Path) -> None:
+ """Custom .cfg files with [tool:pytest] section are read correctly"""
+ fn = tmp_path / "custom.cfg"
+ fn.write_text("[tool:pytest]\nx=1", encoding="utf-8")
+ assert load_config_dict_from_file(fn) == {"x": "1"}
+
+ def test_unsupported_pytest_section_in_cfg_file(self, tmp_path: Path) -> None:
+ """.cfg files with [pytest] section are no longer supported and should fail to alert users"""
+ fn = tmp_path / "custom.cfg"
+ fn.write_text("[pytest]", encoding="utf-8")
+ with pytest.raises(pytest.fail.Exception):
+ load_config_dict_from_file(fn)
+
+ def test_invalid_toml_file(self, tmp_path: Path) -> None:
+ """Invalid .toml files should raise `UsageError`."""
+ fn = tmp_path / "myconfig.toml"
+ fn.write_text("]invalid toml[", encoding="utf-8")
+ with pytest.raises(UsageError):
+ load_config_dict_from_file(fn)
+
+ def test_custom_toml_file(self, tmp_path: Path) -> None:
+ """.toml files without [tool.pytest.ini_options] are not considered for configuration."""
+ fn = tmp_path / "myconfig.toml"
+ fn.write_text(
+ dedent(
+ """
+ [build_system]
+ x = 1
+ """
+ ),
+ encoding="utf-8",
+ )
+ assert load_config_dict_from_file(fn) is None
+
+ def test_valid_toml_file(self, tmp_path: Path) -> None:
+ """.toml files with [tool.pytest.ini_options] are read correctly, including changing
+ data types to str/list for compatibility with other configuration options."""
+ fn = tmp_path / "myconfig.toml"
+ fn.write_text(
+ dedent(
+ """
+ [tool.pytest.ini_options]
+ x = 1
+ y = 20.0
+ values = ["tests", "integration"]
+ name = "foo"
+ heterogeneous_array = [1, "str"]
+ """
+ ),
+ encoding="utf-8",
+ )
+ assert load_config_dict_from_file(fn) == {
+ "x": "1",
+ "y": "20.0",
+ "values": ["tests", "integration"],
+ "name": "foo",
+ "heterogeneous_array": [1, "str"],
+ }
+
+
+class TestCommonAncestor:
+ def test_has_ancestor(self, tmp_path: Path) -> None:
+ fn1 = tmp_path / "foo" / "bar" / "test_1.py"
+ fn1.parent.mkdir(parents=True)
+ fn1.touch()
+ fn2 = tmp_path / "foo" / "zaz" / "test_2.py"
+ fn2.parent.mkdir(parents=True)
+ fn2.touch()
+ assert get_common_ancestor([fn1, fn2]) == tmp_path / "foo"
+ assert get_common_ancestor([fn1.parent, fn2]) == tmp_path / "foo"
+ assert get_common_ancestor([fn1.parent, fn2.parent]) == tmp_path / "foo"
+ assert get_common_ancestor([fn1, fn2.parent]) == tmp_path / "foo"
+
+ def test_single_dir(self, tmp_path: Path) -> None:
+ assert get_common_ancestor([tmp_path]) == tmp_path
+
+ def test_single_file(self, tmp_path: Path) -> None:
+ fn = tmp_path / "foo.py"
+ fn.touch()
+ assert get_common_ancestor([fn]) == tmp_path
+
+
+def test_get_dirs_from_args(tmp_path):
+ """get_dirs_from_args() skips over non-existing directories and files"""
+ fn = tmp_path / "foo.py"
+ fn.touch()
+ d = tmp_path / "tests"
+ d.mkdir()
+ option = "--foobar=/foo.txt"
+ # xdist uses options in this format for its rsync feature (#7638)
+ xdist_rsync_option = "popen=c:/dest"
+ assert get_dirs_from_args(
+ [str(fn), str(tmp_path / "does_not_exist"), str(d), option, xdist_rsync_option]
+ ) == [fn.parent, d]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_helpconfig.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_helpconfig.py
new file mode 100644
index 0000000000..44c2c9295b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_helpconfig.py
@@ -0,0 +1,124 @@
+import pytest
+from _pytest.config import ExitCode
+from _pytest.pytester import Pytester
+
+
+def test_version_verbose(pytester: Pytester, pytestconfig, monkeypatch) -> None:
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD")
+ result = pytester.runpytest("--version", "--version")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines([f"*pytest*{pytest.__version__}*imported from*"])
+ if pytestconfig.pluginmanager.list_plugin_distinfo():
+ result.stdout.fnmatch_lines(["*setuptools registered plugins:", "*at*"])
+
+
+def test_version_less_verbose(pytester: Pytester, pytestconfig, monkeypatch) -> None:
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD")
+ result = pytester.runpytest("--version")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines([f"pytest {pytest.__version__}"])
+
+
+def test_versions():
+ """Regression check for the public version attributes in pytest."""
+ assert isinstance(pytest.__version__, str)
+ assert isinstance(pytest.version_tuple, tuple)
+
+
+def test_help(pytester: Pytester) -> None:
+ result = pytester.runpytest("--help")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ """
+ -m MARKEXPR only run tests matching given mark expression.
+ For example: -m 'mark1 and not mark2'.
+ reporting:
+ --durations=N *
+ -V, --version display pytest version and information about plugins.
+ When given twice, also display information about
+ plugins.
+ *setup.cfg*
+ *minversion*
+ *to see*markers*pytest --markers*
+ *to see*fixtures*pytest --fixtures*
+ """
+ )
+
+
+def test_none_help_param_raises_exception(pytester: Pytester) -> None:
+ """Test that a None help param raises a TypeError."""
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("test_ini", None, default=True, type="bool")
+ """
+ )
+ result = pytester.runpytest("--help")
+ result.stderr.fnmatch_lines(
+ ["*TypeError: help argument cannot be None for test_ini*"]
+ )
+
+
+def test_empty_help_param(pytester: Pytester) -> None:
+ """Test that an empty help param is displayed correctly."""
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("test_ini", "", default=True, type="bool")
+ """
+ )
+ result = pytester.runpytest("--help")
+ assert result.ret == 0
+ lines = [
+ " required_plugins (args):",
+ " plugins that must be present for pytest to run*",
+ " test_ini (bool):*",
+ "environment variables:",
+ ]
+ result.stdout.fnmatch_lines(lines, consecutive=True)
+
+
+def test_hookvalidation_unknown(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_hello(xyz):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(["*unknown hook*pytest_hello*"])
+
+
+def test_hookvalidation_optional(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ @pytest.hookimpl(optionalhook=True)
+ def pytest_hello(xyz):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+def test_traceconfig(pytester: Pytester) -> None:
+ result = pytester.runpytest("--traceconfig")
+ result.stdout.fnmatch_lines(["*using*pytest*", "*active plugins*"])
+
+
+def test_debug(pytester: Pytester) -> None:
+ result = pytester.runpytest_subprocess("--debug")
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ p = pytester.path.joinpath("pytestdebug.log")
+ assert "pytest_sessionstart" in p.read_text("utf-8")
+
+
+def test_PYTEST_DEBUG(pytester: Pytester, monkeypatch) -> None:
+ monkeypatch.setenv("PYTEST_DEBUG", "1")
+ result = pytester.runpytest_subprocess()
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ result.stderr.fnmatch_lines(
+ ["*pytest_plugin_registered*", "*manager*PluginManager*"]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_junitxml.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_junitxml.py
new file mode 100644
index 0000000000..02531e8143
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_junitxml.py
@@ -0,0 +1,1703 @@
+import os
+import platform
+from datetime import datetime
+from pathlib import Path
+from typing import cast
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+from xml.dom import minidom
+
+import xmlschema
+
+import pytest
+from _pytest.config import Config
+from _pytest.junitxml import bin_xml_escape
+from _pytest.junitxml import LogXML
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+from _pytest.pytester import RunResult
+from _pytest.reports import BaseReport
+from _pytest.reports import TestReport
+from _pytest.stash import Stash
+
+
+@pytest.fixture(scope="session")
+def schema() -> xmlschema.XMLSchema:
+ """Return an xmlschema.XMLSchema object for the junit-10.xsd file."""
+ fn = Path(__file__).parent / "example_scripts/junit-10.xsd"
+ with fn.open() as f:
+ return xmlschema.XMLSchema(f)
+
+
+class RunAndParse:
+ def __init__(self, pytester: Pytester, schema: xmlschema.XMLSchema) -> None:
+ self.pytester = pytester
+ self.schema = schema
+
+ def __call__(
+ self, *args: Union[str, "os.PathLike[str]"], family: Optional[str] = "xunit1"
+ ) -> Tuple[RunResult, "DomNode"]:
+ if family:
+ args = ("-o", "junit_family=" + family) + args
+ xml_path = self.pytester.path.joinpath("junit.xml")
+ result = self.pytester.runpytest("--junitxml=%s" % xml_path, *args)
+ if family == "xunit2":
+ with xml_path.open() as f:
+ self.schema.validate(f)
+ xmldoc = minidom.parse(str(xml_path))
+ return result, DomNode(xmldoc)
+
+
+@pytest.fixture
+def run_and_parse(pytester: Pytester, schema: xmlschema.XMLSchema) -> RunAndParse:
+ """Fixture that returns a function that can be used to execute pytest and
+ return the parsed ``DomNode`` of the root xml node.
+
+ The ``family`` parameter is used to configure the ``junit_family`` of the written report.
+ "xunit2" is also automatically validated against the schema.
+ """
+ return RunAndParse(pytester, schema)
+
+
+def assert_attr(node, **kwargs):
+ __tracebackhide__ = True
+
+ def nodeval(node, name):
+ anode = node.getAttributeNode(name)
+ if anode is not None:
+ return anode.value
+
+ expected = {name: str(value) for name, value in kwargs.items()}
+ on_node = {name: nodeval(node, name) for name in expected}
+ assert on_node == expected
+
+
+class DomNode:
+ def __init__(self, dom):
+ self.__node = dom
+
+ def __repr__(self):
+ return self.__node.toxml()
+
+ def find_first_by_tag(self, tag):
+ return self.find_nth_by_tag(tag, 0)
+
+ def _by_tag(self, tag):
+ return self.__node.getElementsByTagName(tag)
+
+ @property
+ def children(self):
+ return [type(self)(x) for x in self.__node.childNodes]
+
+ @property
+ def get_unique_child(self):
+ children = self.children
+ assert len(children) == 1
+ return children[0]
+
+ def find_nth_by_tag(self, tag, n):
+ items = self._by_tag(tag)
+ try:
+ nth = items[n]
+ except IndexError:
+ pass
+ else:
+ return type(self)(nth)
+
+ def find_by_tag(self, tag):
+ t = type(self)
+ return [t(x) for x in self.__node.getElementsByTagName(tag)]
+
+ def __getitem__(self, key):
+ node = self.__node.getAttributeNode(key)
+ if node is not None:
+ return node.value
+
+ def assert_attr(self, **kwargs):
+ __tracebackhide__ = True
+ return assert_attr(self.__node, **kwargs)
+
+ def toxml(self):
+ return self.__node.toxml()
+
+ @property
+ def text(self):
+ return self.__node.childNodes[0].wholeText
+
+ @property
+ def tag(self):
+ return self.__node.tagName
+
+ @property
+ def next_sibling(self):
+ return type(self)(self.__node.nextSibling)
+
+
+parametrize_families = pytest.mark.parametrize("xunit_family", ["xunit1", "xunit2"])
+
+
+class TestPython:
+ @parametrize_families
+ def test_summing_simple(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_pass():
+ pass
+ def test_fail():
+ assert 0
+ def test_skip():
+ pytest.skip("")
+ @pytest.mark.xfail
+ def test_xfail():
+ assert 0
+ @pytest.mark.xfail
+ def test_xpass():
+ assert 1
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(name="pytest", errors=0, failures=1, skipped=2, tests=5)
+
+ @parametrize_families
+ def test_summing_simple_with_errors(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def fixture():
+ raise Exception()
+ def test_pass():
+ pass
+ def test_fail():
+ assert 0
+ def test_error(fixture):
+ pass
+ @pytest.mark.xfail
+ def test_xfail():
+ assert False
+ @pytest.mark.xfail(strict=True)
+ def test_xpass():
+ assert True
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(name="pytest", errors=1, failures=2, skipped=1, tests=5)
+
+ @parametrize_families
+ def test_hostname_in_xml(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_pass():
+ pass
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(hostname=platform.node())
+
+ @parametrize_families
+ def test_timestamp_in_xml(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_pass():
+ pass
+ """
+ )
+ start_time = datetime.now()
+ result, dom = run_and_parse(family=xunit_family)
+ node = dom.find_first_by_tag("testsuite")
+ timestamp = datetime.strptime(node["timestamp"], "%Y-%m-%dT%H:%M:%S.%f")
+ assert start_time <= timestamp < datetime.now()
+
+ def test_timing_function(
+ self, pytester: Pytester, run_and_parse: RunAndParse, mock_timing
+ ) -> None:
+ pytester.makepyfile(
+ """
+ from _pytest import timing
+ def setup_module():
+ timing.sleep(1)
+ def teardown_module():
+ timing.sleep(2)
+ def test_sleep():
+ timing.sleep(4)
+ """
+ )
+ result, dom = run_and_parse()
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ val = tnode["time"]
+ assert float(val) == 7.0
+
+ @pytest.mark.parametrize("duration_report", ["call", "total"])
+ def test_junit_duration_report(
+ self,
+ pytester: Pytester,
+ monkeypatch: MonkeyPatch,
+ duration_report: str,
+ run_and_parse: RunAndParse,
+ ) -> None:
+
+ # mock LogXML.node_reporter so it always sets a known duration to each test report object
+ original_node_reporter = LogXML.node_reporter
+
+ def node_reporter_wrapper(s, report):
+ report.duration = 1.0
+ reporter = original_node_reporter(s, report)
+ return reporter
+
+ monkeypatch.setattr(LogXML, "node_reporter", node_reporter_wrapper)
+
+ pytester.makepyfile(
+ """
+ def test_foo():
+ pass
+ """
+ )
+ result, dom = run_and_parse("-o", f"junit_duration_report={duration_report}")
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ val = float(tnode["time"])
+ if duration_report == "total":
+ assert val == 3.0
+ else:
+ assert duration_report == "call"
+ assert val == 1.0
+
+ @parametrize_families
+ def test_setup_error(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg(request):
+ raise ValueError("Error reason")
+ def test_function(arg):
+ pass
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(errors=1, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="test_setup_error", name="test_function")
+ fnode = tnode.find_first_by_tag("error")
+ fnode.assert_attr(message='failed on setup with "ValueError: Error reason"')
+ assert "ValueError" in fnode.toxml()
+
+ @parametrize_families
+ def test_teardown_error(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg():
+ yield
+ raise ValueError('Error reason')
+ def test_function(arg):
+ pass
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="test_teardown_error", name="test_function")
+ fnode = tnode.find_first_by_tag("error")
+ fnode.assert_attr(message='failed on teardown with "ValueError: Error reason"')
+ assert "ValueError" in fnode.toxml()
+
+ @parametrize_families
+ def test_call_failure_teardown_error(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg():
+ yield
+ raise Exception("Teardown Exception")
+ def test_function(arg):
+ raise Exception("Call Exception")
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(errors=1, failures=1, tests=1)
+ first, second = dom.find_by_tag("testcase")
+ assert first
+ assert second
+ assert first != second
+ fnode = first.find_first_by_tag("failure")
+ fnode.assert_attr(message="Exception: Call Exception")
+ snode = second.find_first_by_tag("error")
+ snode.assert_attr(
+ message='failed on teardown with "Exception: Teardown Exception"'
+ )
+
+ @parametrize_families
+ def test_skip_contains_name_reason(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_skip():
+ pytest.skip("hello23")
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skipped=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="test_skip_contains_name_reason", name="test_skip")
+ snode = tnode.find_first_by_tag("skipped")
+ snode.assert_attr(type="pytest.skip", message="hello23")
+
+ @parametrize_families
+ def test_mark_skip_contains_name_reason(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip(reason="hello24")
+ def test_skip():
+ assert True
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skipped=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ classname="test_mark_skip_contains_name_reason", name="test_skip"
+ )
+ snode = tnode.find_first_by_tag("skipped")
+ snode.assert_attr(type="pytest.skip", message="hello24")
+
+ @parametrize_families
+ def test_mark_skipif_contains_name_reason(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ GLOBAL_CONDITION = True
+ @pytest.mark.skipif(GLOBAL_CONDITION, reason="hello25")
+ def test_skip():
+ assert True
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skipped=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ classname="test_mark_skipif_contains_name_reason", name="test_skip"
+ )
+ snode = tnode.find_first_by_tag("skipped")
+ snode.assert_attr(type="pytest.skip", message="hello25")
+
+ @parametrize_families
+ def test_mark_skip_doesnt_capture_output(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip(reason="foo")
+ def test_skip():
+ print("bar!")
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret == 0
+ node_xml = dom.find_first_by_tag("testsuite").toxml()
+ assert "bar!" not in node_xml
+
+ @parametrize_families
+ def test_classname_instance(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ class TestClass(object):
+ def test_method(self):
+ assert 0
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(failures=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(
+ classname="test_classname_instance.TestClass", name="test_method"
+ )
+
+ @parametrize_families
+ def test_classname_nested_dir(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ p = pytester.mkdir("sub").joinpath("test_hello.py")
+ p.write_text("def test_func(): 0/0")
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(failures=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="sub.test_hello", name="test_func")
+
+ @parametrize_families
+ def test_internal_error(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makeconftest("def pytest_runtest_protocol(): 0 / 0")
+ pytester.makepyfile("def test_function(): pass")
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(errors=1, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="pytest", name="internal")
+ fnode = tnode.find_first_by_tag("error")
+ fnode.assert_attr(message="internal error")
+ assert "Division" in fnode.toxml()
+
+ @pytest.mark.parametrize(
+ "junit_logging", ["no", "log", "system-out", "system-err", "out-err", "all"]
+ )
+ @parametrize_families
+ def test_failure_function(
+ self,
+ pytester: Pytester,
+ junit_logging,
+ run_and_parse: RunAndParse,
+ xunit_family,
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import logging
+ import sys
+
+ def test_fail():
+ print("hello-stdout")
+ sys.stderr.write("hello-stderr\\n")
+ logging.info('info msg')
+ logging.warning('warning msg')
+ raise ValueError(42)
+ """
+ )
+
+ result, dom = run_and_parse(
+ "-o", "junit_logging=%s" % junit_logging, family=xunit_family
+ )
+ assert result.ret, "Expected ret > 0"
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(failures=1, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="test_failure_function", name="test_fail")
+ fnode = tnode.find_first_by_tag("failure")
+ fnode.assert_attr(message="ValueError: 42")
+ assert "ValueError" in fnode.toxml(), "ValueError not included"
+
+ if junit_logging in ["log", "all"]:
+ logdata = tnode.find_first_by_tag("system-out")
+ log_xml = logdata.toxml()
+ assert logdata.tag == "system-out", "Expected tag: system-out"
+ assert "info msg" not in log_xml, "Unexpected INFO message"
+ assert "warning msg" in log_xml, "Missing WARN message"
+ if junit_logging in ["system-out", "out-err", "all"]:
+ systemout = tnode.find_first_by_tag("system-out")
+ systemout_xml = systemout.toxml()
+ assert systemout.tag == "system-out", "Expected tag: system-out"
+ assert "info msg" not in systemout_xml, "INFO message found in system-out"
+ assert (
+ "hello-stdout" in systemout_xml
+ ), "Missing 'hello-stdout' in system-out"
+ if junit_logging in ["system-err", "out-err", "all"]:
+ systemerr = tnode.find_first_by_tag("system-err")
+ systemerr_xml = systemerr.toxml()
+ assert systemerr.tag == "system-err", "Expected tag: system-err"
+ assert "info msg" not in systemerr_xml, "INFO message found in system-err"
+ assert (
+ "hello-stderr" in systemerr_xml
+ ), "Missing 'hello-stderr' in system-err"
+ assert (
+ "warning msg" not in systemerr_xml
+ ), "WARN message found in system-err"
+ if junit_logging == "no":
+ assert not tnode.find_by_tag("log"), "Found unexpected content: log"
+ assert not tnode.find_by_tag(
+ "system-out"
+ ), "Found unexpected content: system-out"
+ assert not tnode.find_by_tag(
+ "system-err"
+ ), "Found unexpected content: system-err"
+
+ @parametrize_families
+ def test_failure_verbose_message(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import sys
+ def test_fail():
+ assert 0, "An error"
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ fnode = tnode.find_first_by_tag("failure")
+ fnode.assert_attr(message="AssertionError: An error\nassert 0")
+
+ @parametrize_families
+ def test_failure_escape(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('arg1', "<&'", ids="<&'")
+ def test_func(arg1):
+ print(arg1)
+ assert 0
+ """
+ )
+ result, dom = run_and_parse(
+ "-o", "junit_logging=system-out", family=xunit_family
+ )
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(failures=3, tests=3)
+
+ for index, char in enumerate("<&'"):
+
+ tnode = node.find_nth_by_tag("testcase", index)
+ tnode.assert_attr(
+ classname="test_failure_escape", name="test_func[%s]" % char
+ )
+ sysout = tnode.find_first_by_tag("system-out")
+ text = sysout.text
+ assert "%s\n" % char in text
+
+ @parametrize_families
+ def test_junit_prefixing(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_func():
+ assert 0
+ class TestHello(object):
+ def test_hello(self):
+ pass
+ """
+ )
+ result, dom = run_and_parse("--junitprefix=xyz", family=xunit_family)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(failures=1, tests=2)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="xyz.test_junit_prefixing", name="test_func")
+ tnode = node.find_nth_by_tag("testcase", 1)
+ tnode.assert_attr(
+ classname="xyz.test_junit_prefixing.TestHello", name="test_hello"
+ )
+
+ @parametrize_families
+ def test_xfailure_function(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_xfail():
+ pytest.xfail("42")
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert not result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skipped=1, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="test_xfailure_function", name="test_xfail")
+ fnode = tnode.find_first_by_tag("skipped")
+ fnode.assert_attr(type="pytest.xfail", message="42")
+
+ @parametrize_families
+ def test_xfailure_marker(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(reason="42")
+ def test_xfail():
+ assert False
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert not result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skipped=1, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="test_xfailure_marker", name="test_xfail")
+ fnode = tnode.find_first_by_tag("skipped")
+ fnode.assert_attr(type="pytest.xfail", message="42")
+
+ @pytest.mark.parametrize(
+ "junit_logging", ["no", "log", "system-out", "system-err", "out-err", "all"]
+ )
+ def test_xfail_captures_output_once(
+ self, pytester: Pytester, junit_logging: str, run_and_parse: RunAndParse
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import sys
+ import pytest
+
+ @pytest.mark.xfail()
+ def test_fail():
+ sys.stdout.write('XFAIL This is stdout')
+ sys.stderr.write('XFAIL This is stderr')
+ assert 0
+ """
+ )
+ result, dom = run_and_parse("-o", "junit_logging=%s" % junit_logging)
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ if junit_logging in ["system-err", "out-err", "all"]:
+ assert len(tnode.find_by_tag("system-err")) == 1
+ else:
+ assert len(tnode.find_by_tag("system-err")) == 0
+
+ if junit_logging in ["log", "system-out", "out-err", "all"]:
+ assert len(tnode.find_by_tag("system-out")) == 1
+ else:
+ assert len(tnode.find_by_tag("system-out")) == 0
+
+ @parametrize_families
+ def test_xfailure_xpass(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_xpass():
+ pass
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ # assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skipped=0, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="test_xfailure_xpass", name="test_xpass")
+
+ @parametrize_families
+ def test_xfailure_xpass_strict(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(strict=True, reason="This needs to fail!")
+ def test_xpass():
+ pass
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ # assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(skipped=0, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(classname="test_xfailure_xpass_strict", name="test_xpass")
+ fnode = tnode.find_first_by_tag("failure")
+ fnode.assert_attr(message="[XPASS(strict)] This needs to fail!")
+
+ @parametrize_families
+ def test_collect_error(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makepyfile("syntax error")
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(errors=1, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ fnode = tnode.find_first_by_tag("error")
+ fnode.assert_attr(message="collection failure")
+ assert "SyntaxError" in fnode.toxml()
+
+ def test_unicode(self, pytester: Pytester, run_and_parse: RunAndParse) -> None:
+ value = "hx\xc4\x85\xc4\x87\n"
+ pytester.makepyfile(
+ """\
+ # coding: latin1
+ def test_hello():
+ print(%r)
+ assert 0
+ """
+ % value
+ )
+ result, dom = run_and_parse()
+ assert result.ret == 1
+ tnode = dom.find_first_by_tag("testcase")
+ fnode = tnode.find_first_by_tag("failure")
+ assert "hx" in fnode.toxml()
+
+ def test_assertion_binchars(
+ self, pytester: Pytester, run_and_parse: RunAndParse
+ ) -> None:
+ """This test did fail when the escaping wasn't strict."""
+ pytester.makepyfile(
+ """
+
+ M1 = '\x01\x02\x03\x04'
+ M2 = '\x01\x02\x03\x05'
+
+ def test_str_compare():
+ assert M1 == M2
+ """
+ )
+ result, dom = run_and_parse()
+ print(dom.toxml())
+
+ @pytest.mark.parametrize("junit_logging", ["no", "system-out"])
+ def test_pass_captures_stdout(
+ self, pytester: Pytester, run_and_parse: RunAndParse, junit_logging: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_pass():
+ print('hello-stdout')
+ """
+ )
+ result, dom = run_and_parse("-o", "junit_logging=%s" % junit_logging)
+ node = dom.find_first_by_tag("testsuite")
+ pnode = node.find_first_by_tag("testcase")
+ if junit_logging == "no":
+ assert not node.find_by_tag(
+ "system-out"
+ ), "system-out should not be generated"
+ if junit_logging == "system-out":
+ systemout = pnode.find_first_by_tag("system-out")
+ assert (
+ "hello-stdout" in systemout.toxml()
+ ), "'hello-stdout' should be in system-out"
+
+ @pytest.mark.parametrize("junit_logging", ["no", "system-err"])
+ def test_pass_captures_stderr(
+ self, pytester: Pytester, run_and_parse: RunAndParse, junit_logging: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import sys
+ def test_pass():
+ sys.stderr.write('hello-stderr')
+ """
+ )
+ result, dom = run_and_parse("-o", "junit_logging=%s" % junit_logging)
+ node = dom.find_first_by_tag("testsuite")
+ pnode = node.find_first_by_tag("testcase")
+ if junit_logging == "no":
+ assert not node.find_by_tag(
+ "system-err"
+ ), "system-err should not be generated"
+ if junit_logging == "system-err":
+ systemerr = pnode.find_first_by_tag("system-err")
+ assert (
+ "hello-stderr" in systemerr.toxml()
+ ), "'hello-stderr' should be in system-err"
+
+ @pytest.mark.parametrize("junit_logging", ["no", "system-out"])
+ def test_setup_error_captures_stdout(
+ self, pytester: Pytester, run_and_parse: RunAndParse, junit_logging: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg(request):
+ print('hello-stdout')
+ raise ValueError()
+ def test_function(arg):
+ pass
+ """
+ )
+ result, dom = run_and_parse("-o", "junit_logging=%s" % junit_logging)
+ node = dom.find_first_by_tag("testsuite")
+ pnode = node.find_first_by_tag("testcase")
+ if junit_logging == "no":
+ assert not node.find_by_tag(
+ "system-out"
+ ), "system-out should not be generated"
+ if junit_logging == "system-out":
+ systemout = pnode.find_first_by_tag("system-out")
+ assert (
+ "hello-stdout" in systemout.toxml()
+ ), "'hello-stdout' should be in system-out"
+
+ @pytest.mark.parametrize("junit_logging", ["no", "system-err"])
+ def test_setup_error_captures_stderr(
+ self, pytester: Pytester, run_and_parse: RunAndParse, junit_logging: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import sys
+ import pytest
+
+ @pytest.fixture
+ def arg(request):
+ sys.stderr.write('hello-stderr')
+ raise ValueError()
+ def test_function(arg):
+ pass
+ """
+ )
+ result, dom = run_and_parse("-o", "junit_logging=%s" % junit_logging)
+ node = dom.find_first_by_tag("testsuite")
+ pnode = node.find_first_by_tag("testcase")
+ if junit_logging == "no":
+ assert not node.find_by_tag(
+ "system-err"
+ ), "system-err should not be generated"
+ if junit_logging == "system-err":
+ systemerr = pnode.find_first_by_tag("system-err")
+ assert (
+ "hello-stderr" in systemerr.toxml()
+ ), "'hello-stderr' should be in system-err"
+
+ @pytest.mark.parametrize("junit_logging", ["no", "system-out"])
+ def test_avoid_double_stdout(
+ self, pytester: Pytester, run_and_parse: RunAndParse, junit_logging: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ import sys
+ import pytest
+
+ @pytest.fixture
+ def arg(request):
+ yield
+ sys.stdout.write('hello-stdout teardown')
+ raise ValueError()
+ def test_function(arg):
+ sys.stdout.write('hello-stdout call')
+ """
+ )
+ result, dom = run_and_parse("-o", "junit_logging=%s" % junit_logging)
+ node = dom.find_first_by_tag("testsuite")
+ pnode = node.find_first_by_tag("testcase")
+ if junit_logging == "no":
+ assert not node.find_by_tag(
+ "system-out"
+ ), "system-out should not be generated"
+ if junit_logging == "system-out":
+ systemout = pnode.find_first_by_tag("system-out")
+ assert "hello-stdout call" in systemout.toxml()
+ assert "hello-stdout teardown" in systemout.toxml()
+
+
+def test_mangle_test_address() -> None:
+ from _pytest.junitxml import mangle_test_address
+
+ address = "::".join(["a/my.py.thing.py", "Class", "method", "[a-1-::]"])
+ newnames = mangle_test_address(address)
+ assert newnames == ["a.my.py.thing", "Class", "method", "[a-1-::]"]
+
+
+def test_dont_configure_on_workers(tmp_path: Path) -> None:
+ gotten: List[object] = []
+
+ class FakeConfig:
+ if TYPE_CHECKING:
+ workerinput = None
+
+ def __init__(self):
+ self.pluginmanager = self
+ self.option = self
+ self.stash = Stash()
+
+ def getini(self, name):
+ return "pytest"
+
+ junitprefix = None
+ # XXX: shouldn't need tmp_path ?
+ xmlpath = str(tmp_path.joinpath("junix.xml"))
+ register = gotten.append
+
+ fake_config = cast(Config, FakeConfig())
+ from _pytest import junitxml
+
+ junitxml.pytest_configure(fake_config)
+ assert len(gotten) == 1
+ FakeConfig.workerinput = None
+ junitxml.pytest_configure(fake_config)
+ assert len(gotten) == 1
+
+
+class TestNonPython:
+ @parametrize_families
+ def test_summing_simple(
+ self, pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+ ) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_collect_file(file_path, parent):
+ if file_path.suffix == ".xyz":
+ return MyItem.from_parent(name=file_path.name, parent=parent)
+ class MyItem(pytest.Item):
+ def runtest(self):
+ raise ValueError(42)
+ def repr_failure(self, excinfo):
+ return "custom item runtest failed"
+ """
+ )
+ pytester.path.joinpath("myfile.xyz").write_text("hello")
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(errors=0, failures=1, skipped=0, tests=1)
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(name="myfile.xyz")
+ fnode = tnode.find_first_by_tag("failure")
+ fnode.assert_attr(message="custom item runtest failed")
+ assert "custom item runtest failed" in fnode.toxml()
+
+
+@pytest.mark.parametrize("junit_logging", ["no", "system-out"])
+def test_nullbyte(pytester: Pytester, junit_logging: str) -> None:
+ # A null byte can not occur in XML (see section 2.2 of the spec)
+ pytester.makepyfile(
+ """
+ import sys
+ def test_print_nullbyte():
+ sys.stdout.write('Here the null -->' + chr(0) + '<--')
+ sys.stdout.write('In repr form -->' + repr(chr(0)) + '<--')
+ assert False
+ """
+ )
+ xmlf = pytester.path.joinpath("junit.xml")
+ pytester.runpytest("--junitxml=%s" % xmlf, "-o", "junit_logging=%s" % junit_logging)
+ text = xmlf.read_text()
+ assert "\x00" not in text
+ if junit_logging == "system-out":
+ assert "#x00" in text
+ if junit_logging == "no":
+ assert "#x00" not in text
+
+
+@pytest.mark.parametrize("junit_logging", ["no", "system-out"])
+def test_nullbyte_replace(pytester: Pytester, junit_logging: str) -> None:
+ # Check if the null byte gets replaced
+ pytester.makepyfile(
+ """
+ import sys
+ def test_print_nullbyte():
+ sys.stdout.write('Here the null -->' + chr(0) + '<--')
+ sys.stdout.write('In repr form -->' + repr(chr(0)) + '<--')
+ assert False
+ """
+ )
+ xmlf = pytester.path.joinpath("junit.xml")
+ pytester.runpytest("--junitxml=%s" % xmlf, "-o", "junit_logging=%s" % junit_logging)
+ text = xmlf.read_text()
+ if junit_logging == "system-out":
+ assert "#x0" in text
+ if junit_logging == "no":
+ assert "#x0" not in text
+
+
+def test_invalid_xml_escape() -> None:
+ # Test some more invalid xml chars, the full range should be
+ # tested really but let's just test the edges of the ranges
+ # instead.
+ # XXX This only tests low unicode character points for now as
+ # there are some issues with the testing infrastructure for
+ # the higher ones.
+ # XXX Testing 0xD (\r) is tricky as it overwrites the just written
+ # line in the output, so we skip it too.
+ invalid = (
+ 0x00,
+ 0x1,
+ 0xB,
+ 0xC,
+ 0xE,
+ 0x19,
+ 27, # issue #126
+ 0xD800,
+ 0xDFFF,
+ 0xFFFE,
+ 0x0FFFF,
+ ) # , 0x110000)
+ valid = (0x9, 0xA, 0x20)
+ # 0xD, 0xD7FF, 0xE000, 0xFFFD, 0x10000, 0x10FFFF)
+
+ for i in invalid:
+ got = bin_xml_escape(chr(i))
+ if i <= 0xFF:
+ expected = "#x%02X" % i
+ else:
+ expected = "#x%04X" % i
+ assert got == expected
+ for i in valid:
+ assert chr(i) == bin_xml_escape(chr(i))
+
+
+def test_logxml_path_expansion(tmp_path: Path, monkeypatch: MonkeyPatch) -> None:
+ home_tilde = Path(os.path.expanduser("~")).joinpath("test.xml")
+ xml_tilde = LogXML(Path("~", "test.xml"), None)
+ assert xml_tilde.logfile == str(home_tilde)
+
+ monkeypatch.setenv("HOME", str(tmp_path))
+ home_var = os.path.normpath(os.path.expandvars("$HOME/test.xml"))
+ xml_var = LogXML(Path("$HOME", "test.xml"), None)
+ assert xml_var.logfile == str(home_var)
+
+
+def test_logxml_changingdir(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_func():
+ import os
+ os.chdir("a")
+ """
+ )
+ pytester.mkdir("a")
+ result = pytester.runpytest("--junitxml=a/x.xml")
+ assert result.ret == 0
+ assert pytester.path.joinpath("a/x.xml").exists()
+
+
+def test_logxml_makedir(pytester: Pytester) -> None:
+ """--junitxml should automatically create directories for the xml file"""
+ pytester.makepyfile(
+ """
+ def test_pass():
+ pass
+ """
+ )
+ result = pytester.runpytest("--junitxml=path/to/results.xml")
+ assert result.ret == 0
+ assert pytester.path.joinpath("path/to/results.xml").exists()
+
+
+def test_logxml_check_isdir(pytester: Pytester) -> None:
+ """Give an error if --junit-xml is a directory (#2089)"""
+ result = pytester.runpytest("--junit-xml=.")
+ result.stderr.fnmatch_lines(["*--junitxml must be a filename*"])
+
+
+def test_escaped_parametrized_names_xml(
+ pytester: Pytester, run_and_parse: RunAndParse
+) -> None:
+ pytester.makepyfile(
+ """\
+ import pytest
+ @pytest.mark.parametrize('char', ["\\x00"])
+ def test_func(char):
+ assert char
+ """
+ )
+ result, dom = run_and_parse()
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testcase")
+ node.assert_attr(name="test_func[\\x00]")
+
+
+def test_double_colon_split_function_issue469(
+ pytester: Pytester, run_and_parse: RunAndParse
+) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('param', ["double::colon"])
+ def test_func(param):
+ pass
+ """
+ )
+ result, dom = run_and_parse()
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testcase")
+ node.assert_attr(classname="test_double_colon_split_function_issue469")
+ node.assert_attr(name="test_func[double::colon]")
+
+
+def test_double_colon_split_method_issue469(
+ pytester: Pytester, run_and_parse: RunAndParse
+) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ class TestClass(object):
+ @pytest.mark.parametrize('param', ["double::colon"])
+ def test_func(self, param):
+ pass
+ """
+ )
+ result, dom = run_and_parse()
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testcase")
+ node.assert_attr(classname="test_double_colon_split_method_issue469.TestClass")
+ node.assert_attr(name="test_func[double::colon]")
+
+
+def test_unicode_issue368(pytester: Pytester) -> None:
+ path = pytester.path.joinpath("test.xml")
+ log = LogXML(str(path), None)
+ ustr = "Ð’ÐИ!"
+
+ class Report(BaseReport):
+ longrepr = ustr
+ sections: List[Tuple[str, str]] = []
+ nodeid = "something"
+ location = "tests/filename.py", 42, "TestClass.method"
+ when = "teardown"
+
+ test_report = cast(TestReport, Report())
+
+ # hopefully this is not too brittle ...
+ log.pytest_sessionstart()
+ node_reporter = log._opentestcase(test_report)
+ node_reporter.append_failure(test_report)
+ node_reporter.append_collect_error(test_report)
+ node_reporter.append_collect_skipped(test_report)
+ node_reporter.append_error(test_report)
+ test_report.longrepr = "filename", 1, ustr
+ node_reporter.append_skipped(test_report)
+ test_report.longrepr = "filename", 1, "Skipped: å¡å˜£å˜£"
+ node_reporter.append_skipped(test_report)
+ test_report.wasxfail = ustr # type: ignore[attr-defined]
+ node_reporter.append_skipped(test_report)
+ log.pytest_sessionfinish()
+
+
+def test_record_property(pytester: Pytester, run_and_parse: RunAndParse) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def other(record_property):
+ record_property("bar", 1)
+ def test_record(record_property, other):
+ record_property("foo", "<1");
+ """
+ )
+ result, dom = run_and_parse()
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ psnode = tnode.find_first_by_tag("properties")
+ pnodes = psnode.find_by_tag("property")
+ pnodes[0].assert_attr(name="bar", value="1")
+ pnodes[1].assert_attr(name="foo", value="<1")
+ result.stdout.fnmatch_lines(["*= 1 passed in *"])
+
+
+def test_record_property_same_name(
+ pytester: Pytester, run_and_parse: RunAndParse
+) -> None:
+ pytester.makepyfile(
+ """
+ def test_record_with_same_name(record_property):
+ record_property("foo", "bar")
+ record_property("foo", "baz")
+ """
+ )
+ result, dom = run_and_parse()
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ psnode = tnode.find_first_by_tag("properties")
+ pnodes = psnode.find_by_tag("property")
+ pnodes[0].assert_attr(name="foo", value="bar")
+ pnodes[1].assert_attr(name="foo", value="baz")
+
+
+@pytest.mark.parametrize("fixture_name", ["record_property", "record_xml_attribute"])
+def test_record_fixtures_without_junitxml(
+ pytester: Pytester, fixture_name: str
+) -> None:
+ pytester.makepyfile(
+ """
+ def test_record({fixture_name}):
+ {fixture_name}("foo", "bar")
+ """.format(
+ fixture_name=fixture_name
+ )
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+
+
+@pytest.mark.filterwarnings("default")
+def test_record_attribute(pytester: Pytester, run_and_parse: RunAndParse) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ junit_family = xunit1
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def other(record_xml_attribute):
+ record_xml_attribute("bar", 1)
+ def test_record(record_xml_attribute, other):
+ record_xml_attribute("foo", "<1");
+ """
+ )
+ result, dom = run_and_parse()
+ node = dom.find_first_by_tag("testsuite")
+ tnode = node.find_first_by_tag("testcase")
+ tnode.assert_attr(bar="1")
+ tnode.assert_attr(foo="<1")
+ result.stdout.fnmatch_lines(
+ ["*test_record_attribute.py:6:*record_xml_attribute is an experimental feature"]
+ )
+
+
+@pytest.mark.filterwarnings("default")
+@pytest.mark.parametrize("fixture_name", ["record_xml_attribute", "record_property"])
+def test_record_fixtures_xunit2(
+ pytester: Pytester, fixture_name: str, run_and_parse: RunAndParse
+) -> None:
+ """Ensure record_xml_attribute and record_property drop values when outside of legacy family."""
+ pytester.makeini(
+ """
+ [pytest]
+ junit_family = xunit2
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def other({fixture_name}):
+ {fixture_name}("bar", 1)
+ def test_record({fixture_name}, other):
+ {fixture_name}("foo", "<1");
+ """.format(
+ fixture_name=fixture_name
+ )
+ )
+
+ result, dom = run_and_parse(family=None)
+ expected_lines = []
+ if fixture_name == "record_xml_attribute":
+ expected_lines.append(
+ "*test_record_fixtures_xunit2.py:6:*record_xml_attribute is an experimental feature"
+ )
+ expected_lines = [
+ "*test_record_fixtures_xunit2.py:6:*{fixture_name} is incompatible "
+ "with junit_family 'xunit2' (use 'legacy' or 'xunit1')".format(
+ fixture_name=fixture_name
+ )
+ ]
+ result.stdout.fnmatch_lines(expected_lines)
+
+
+def test_random_report_log_xdist(
+ pytester: Pytester, monkeypatch: MonkeyPatch, run_and_parse: RunAndParse
+) -> None:
+ """`xdist` calls pytest_runtest_logreport as they are executed by the workers,
+ with nodes from several nodes overlapping, so junitxml must cope with that
+ to produce correct reports (#1064)."""
+ pytest.importorskip("xdist")
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
+ pytester.makepyfile(
+ """
+ import pytest, time
+ @pytest.mark.parametrize('i', list(range(30)))
+ def test_x(i):
+ assert i != 22
+ """
+ )
+ _, dom = run_and_parse("-n2")
+ suite_node = dom.find_first_by_tag("testsuite")
+ failed = []
+ for case_node in suite_node.find_by_tag("testcase"):
+ if case_node.find_first_by_tag("failure"):
+ failed.append(case_node["name"])
+
+ assert failed == ["test_x[22]"]
+
+
+@parametrize_families
+def test_root_testsuites_tag(
+ pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+) -> None:
+ pytester.makepyfile(
+ """
+ def test_x():
+ pass
+ """
+ )
+ _, dom = run_and_parse(family=xunit_family)
+ root = dom.get_unique_child
+ assert root.tag == "testsuites"
+ suite_node = root.get_unique_child
+ assert suite_node.tag == "testsuite"
+
+
+def test_runs_twice(pytester: Pytester, run_and_parse: RunAndParse) -> None:
+ f = pytester.makepyfile(
+ """
+ def test_pass():
+ pass
+ """
+ )
+
+ result, dom = run_and_parse(f, f)
+ result.stdout.no_fnmatch_line("*INTERNALERROR*")
+ first, second = (x["classname"] for x in dom.find_by_tag("testcase"))
+ assert first == second
+
+
+def test_runs_twice_xdist(
+ pytester: Pytester, monkeypatch: MonkeyPatch, run_and_parse: RunAndParse
+) -> None:
+ pytest.importorskip("xdist")
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD")
+ f = pytester.makepyfile(
+ """
+ def test_pass():
+ pass
+ """
+ )
+
+ result, dom = run_and_parse(f, "--dist", "each", "--tx", "2*popen")
+ result.stdout.no_fnmatch_line("*INTERNALERROR*")
+ first, second = (x["classname"] for x in dom.find_by_tag("testcase"))
+ assert first == second
+
+
+def test_fancy_items_regression(pytester: Pytester, run_and_parse: RunAndParse) -> None:
+ # issue 1259
+ pytester.makeconftest(
+ """
+ import pytest
+ class FunItem(pytest.Item):
+ def runtest(self):
+ pass
+ class NoFunItem(pytest.Item):
+ def runtest(self):
+ pass
+
+ class FunCollector(pytest.File):
+ def collect(self):
+ return [
+ FunItem.from_parent(name='a', parent=self),
+ NoFunItem.from_parent(name='a', parent=self),
+ NoFunItem.from_parent(name='b', parent=self),
+ ]
+
+ def pytest_collect_file(file_path, parent):
+ if file_path.suffix == '.py':
+ return FunCollector.from_parent(path=file_path, parent=parent)
+ """
+ )
+
+ pytester.makepyfile(
+ """
+ def test_pass():
+ pass
+ """
+ )
+
+ result, dom = run_and_parse()
+
+ result.stdout.no_fnmatch_line("*INTERNALERROR*")
+
+ items = sorted("%(classname)s %(name)s" % x for x in dom.find_by_tag("testcase"))
+ import pprint
+
+ pprint.pprint(items)
+ assert items == [
+ "conftest a",
+ "conftest a",
+ "conftest b",
+ "test_fancy_items_regression a",
+ "test_fancy_items_regression a",
+ "test_fancy_items_regression b",
+ "test_fancy_items_regression test_pass",
+ ]
+
+
+@parametrize_families
+def test_global_properties(pytester: Pytester, xunit_family: str) -> None:
+ path = pytester.path.joinpath("test_global_properties.xml")
+ log = LogXML(str(path), None, family=xunit_family)
+
+ class Report(BaseReport):
+ sections: List[Tuple[str, str]] = []
+ nodeid = "test_node_id"
+
+ log.pytest_sessionstart()
+ log.add_global_property("foo", "1")
+ log.add_global_property("bar", "2")
+ log.pytest_sessionfinish()
+
+ dom = minidom.parse(str(path))
+
+ properties = dom.getElementsByTagName("properties")
+
+ assert properties.length == 1, "There must be one <properties> node"
+
+ property_list = dom.getElementsByTagName("property")
+
+ assert property_list.length == 2, "There most be only 2 property nodes"
+
+ expected = {"foo": "1", "bar": "2"}
+ actual = {}
+
+ for p in property_list:
+ k = str(p.getAttribute("name"))
+ v = str(p.getAttribute("value"))
+ actual[k] = v
+
+ assert actual == expected
+
+
+def test_url_property(pytester: Pytester) -> None:
+ test_url = "http://www.github.com/pytest-dev"
+ path = pytester.path.joinpath("test_url_property.xml")
+ log = LogXML(str(path), None)
+
+ class Report(BaseReport):
+ longrepr = "FooBarBaz"
+ sections: List[Tuple[str, str]] = []
+ nodeid = "something"
+ location = "tests/filename.py", 42, "TestClass.method"
+ url = test_url
+
+ test_report = cast(TestReport, Report())
+
+ log.pytest_sessionstart()
+ node_reporter = log._opentestcase(test_report)
+ node_reporter.append_failure(test_report)
+ log.pytest_sessionfinish()
+
+ test_case = minidom.parse(str(path)).getElementsByTagName("testcase")[0]
+
+ assert (
+ test_case.getAttribute("url") == test_url
+ ), "The URL did not get written to the xml"
+
+
+@parametrize_families
+def test_record_testsuite_property(
+ pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+) -> None:
+ pytester.makepyfile(
+ """
+ def test_func1(record_testsuite_property):
+ record_testsuite_property("stats", "all good")
+
+ def test_func2(record_testsuite_property):
+ record_testsuite_property("stats", 10)
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testsuite")
+ properties_node = node.find_first_by_tag("properties")
+ p1_node = properties_node.find_nth_by_tag("property", 0)
+ p2_node = properties_node.find_nth_by_tag("property", 1)
+ p1_node.assert_attr(name="stats", value="all good")
+ p2_node.assert_attr(name="stats", value="10")
+
+
+def test_record_testsuite_property_junit_disabled(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_func1(record_testsuite_property):
+ record_testsuite_property("stats", "all good")
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+
+
+@pytest.mark.parametrize("junit", [True, False])
+def test_record_testsuite_property_type_checking(
+ pytester: Pytester, junit: bool
+) -> None:
+ pytester.makepyfile(
+ """
+ def test_func1(record_testsuite_property):
+ record_testsuite_property(1, 2)
+ """
+ )
+ args = ("--junitxml=tests.xml",) if junit else ()
+ result = pytester.runpytest(*args)
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ ["*TypeError: name parameter needs to be a string, but int given"]
+ )
+
+
+@pytest.mark.parametrize("suite_name", ["my_suite", ""])
+@parametrize_families
+def test_set_suite_name(
+ pytester: Pytester, suite_name: str, run_and_parse: RunAndParse, xunit_family: str
+) -> None:
+ if suite_name:
+ pytester.makeini(
+ """
+ [pytest]
+ junit_suite_name={suite_name}
+ junit_family={family}
+ """.format(
+ suite_name=suite_name, family=xunit_family
+ )
+ )
+ expected = suite_name
+ else:
+ expected = "pytest"
+ pytester.makepyfile(
+ """
+ import pytest
+
+ def test_func():
+ pass
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testsuite")
+ node.assert_attr(name=expected)
+
+
+def test_escaped_skipreason_issue3533(
+ pytester: Pytester, run_and_parse: RunAndParse
+) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip(reason='1 <> 2')
+ def test_skip():
+ pass
+ """
+ )
+ _, dom = run_and_parse()
+ node = dom.find_first_by_tag("testcase")
+ snode = node.find_first_by_tag("skipped")
+ assert "1 <> 2" in snode.text
+ snode.assert_attr(message="1 <> 2")
+
+
+@parametrize_families
+def test_logging_passing_tests_disabled_does_not_log_test_output(
+ pytester: Pytester, run_and_parse: RunAndParse, xunit_family: str
+) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ junit_log_passing_tests=False
+ junit_logging=system-out
+ junit_family={family}
+ """.format(
+ family=xunit_family
+ )
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ import sys
+
+ def test_func():
+ sys.stdout.write('This is stdout')
+ sys.stderr.write('This is stderr')
+ logging.warning('hello')
+ """
+ )
+ result, dom = run_and_parse(family=xunit_family)
+ assert result.ret == 0
+ node = dom.find_first_by_tag("testcase")
+ assert len(node.find_by_tag("system-err")) == 0
+ assert len(node.find_by_tag("system-out")) == 0
+
+
+@parametrize_families
+@pytest.mark.parametrize("junit_logging", ["no", "system-out", "system-err"])
+def test_logging_passing_tests_disabled_logs_output_for_failing_test_issue5430(
+ pytester: Pytester,
+ junit_logging: str,
+ run_and_parse: RunAndParse,
+ xunit_family: str,
+) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ junit_log_passing_tests=False
+ junit_family={family}
+ """.format(
+ family=xunit_family
+ )
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+ import logging
+ import sys
+
+ def test_func():
+ logging.warning('hello')
+ assert 0
+ """
+ )
+ result, dom = run_and_parse(
+ "-o", "junit_logging=%s" % junit_logging, family=xunit_family
+ )
+ assert result.ret == 1
+ node = dom.find_first_by_tag("testcase")
+ if junit_logging == "system-out":
+ assert len(node.find_by_tag("system-err")) == 0
+ assert len(node.find_by_tag("system-out")) == 1
+ elif junit_logging == "system-err":
+ assert len(node.find_by_tag("system-err")) == 1
+ assert len(node.find_by_tag("system-out")) == 0
+ else:
+ assert junit_logging == "no"
+ assert len(node.find_by_tag("system-err")) == 0
+ assert len(node.find_by_tag("system-out")) == 0
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_legacypath.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_legacypath.py
new file mode 100644
index 0000000000..8acafe98e7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_legacypath.py
@@ -0,0 +1,180 @@
+from pathlib import Path
+
+import pytest
+from _pytest.compat import LEGACY_PATH
+from _pytest.legacypath import TempdirFactory
+from _pytest.legacypath import Testdir
+
+
+def test_item_fspath(pytester: pytest.Pytester) -> None:
+ pytester.makepyfile("def test_func(): pass")
+ items, hookrec = pytester.inline_genitems()
+ assert len(items) == 1
+ (item,) = items
+ items2, hookrec = pytester.inline_genitems(item.nodeid)
+ (item2,) = items2
+ assert item2.name == item.name
+ assert item2.fspath == item.fspath # type: ignore[attr-defined]
+ assert item2.path == item.path
+
+
+def test_testdir_testtmproot(testdir: Testdir) -> None:
+ """Check test_tmproot is a py.path attribute for backward compatibility."""
+ assert testdir.test_tmproot.check(dir=1)
+
+
+def test_testdir_makefile_dot_prefixes_extension_silently(
+ testdir: Testdir,
+) -> None:
+ """For backwards compat #8192"""
+ p1 = testdir.makefile("foo.bar", "")
+ assert ".foo.bar" in str(p1)
+
+
+def test_testdir_makefile_ext_none_raises_type_error(testdir: Testdir) -> None:
+ """For backwards compat #8192"""
+ with pytest.raises(TypeError):
+ testdir.makefile(None, "")
+
+
+def test_testdir_makefile_ext_empty_string_makes_file(testdir: Testdir) -> None:
+ """For backwards compat #8192"""
+ p1 = testdir.makefile("", "")
+ assert "test_testdir_makefile" in str(p1)
+
+
+def attempt_symlink_to(path: str, to_path: str) -> None:
+ """Try to make a symlink from "path" to "to_path", skipping in case this platform
+ does not support it or we don't have sufficient privileges (common on Windows)."""
+ try:
+ Path(path).symlink_to(Path(to_path))
+ except OSError:
+ pytest.skip("could not create symbolic link")
+
+
+def test_tmpdir_factory(
+ tmpdir_factory: TempdirFactory,
+ tmp_path_factory: pytest.TempPathFactory,
+) -> None:
+ assert str(tmpdir_factory.getbasetemp()) == str(tmp_path_factory.getbasetemp())
+ dir = tmpdir_factory.mktemp("foo")
+ assert dir.exists()
+
+
+def test_tmpdir_equals_tmp_path(tmpdir: LEGACY_PATH, tmp_path: Path) -> None:
+ assert Path(tmpdir) == tmp_path
+
+
+def test_tmpdir_always_is_realpath(pytester: pytest.Pytester) -> None:
+ # See test_tmp_path_always_is_realpath.
+ realtemp = pytester.mkdir("myrealtemp")
+ linktemp = pytester.path.joinpath("symlinktemp")
+ attempt_symlink_to(str(linktemp), str(realtemp))
+ p = pytester.makepyfile(
+ """
+ def test_1(tmpdir):
+ import os
+ assert os.path.realpath(str(tmpdir)) == str(tmpdir)
+ """
+ )
+ result = pytester.runpytest("-s", p, "--basetemp=%s/bt" % linktemp)
+ assert not result.ret
+
+
+def test_cache_makedir(cache: pytest.Cache) -> None:
+ dir = cache.makedir("foo") # type: ignore[attr-defined]
+ assert dir.exists()
+ dir.remove()
+
+
+def test_fixturerequest_getmodulepath(pytester: pytest.Pytester) -> None:
+ modcol = pytester.getmodulecol("def test_somefunc(): pass")
+ (item,) = pytester.genitems([modcol])
+ req = pytest.FixtureRequest(item, _ispytest=True)
+ assert req.path == modcol.path
+ assert req.fspath == modcol.fspath # type: ignore[attr-defined]
+
+
+class TestFixtureRequestSessionScoped:
+ @pytest.fixture(scope="session")
+ def session_request(self, request):
+ return request
+
+ def test_session_scoped_unavailable_attributes(self, session_request):
+ with pytest.raises(
+ AttributeError,
+ match="path not available in session-scoped context",
+ ):
+ session_request.fspath
+
+
+@pytest.mark.parametrize("config_type", ["ini", "pyproject"])
+def test_addini_paths(pytester: pytest.Pytester, config_type: str) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("paths", "my new ini value", type="pathlist")
+ parser.addini("abc", "abc value")
+ """
+ )
+ if config_type == "ini":
+ inipath = pytester.makeini(
+ """
+ [pytest]
+ paths=hello world/sub.py
+ """
+ )
+ elif config_type == "pyproject":
+ inipath = pytester.makepyprojecttoml(
+ """
+ [tool.pytest.ini_options]
+ paths=["hello", "world/sub.py"]
+ """
+ )
+ config = pytester.parseconfig()
+ values = config.getini("paths")
+ assert len(values) == 2
+ assert values[0] == inipath.parent.joinpath("hello")
+ assert values[1] == inipath.parent.joinpath("world/sub.py")
+ pytest.raises(ValueError, config.getini, "other")
+
+
+def test_override_ini_paths(pytester: pytest.Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_addoption(parser):
+ parser.addini("paths", "my new ini value", type="pathlist")"""
+ )
+ pytester.makeini(
+ """
+ [pytest]
+ paths=blah.py"""
+ )
+ pytester.makepyfile(
+ r"""
+ def test_overriden(pytestconfig):
+ config_paths = pytestconfig.getini("paths")
+ print(config_paths)
+ for cpf in config_paths:
+ print('\nuser_path:%s' % cpf.basename)
+ """
+ )
+ result = pytester.runpytest("--override-ini", "paths=foo/bar1.py foo/bar2.py", "-s")
+ result.stdout.fnmatch_lines(["user_path:bar1.py", "user_path:bar2.py"])
+
+
+def test_inifile_from_cmdline_main_hook(pytester: pytest.Pytester) -> None:
+ """Ensure Config.inifile is available during pytest_cmdline_main (#9396)."""
+ p = pytester.makeini(
+ """
+ [pytest]
+ """
+ )
+ pytester.makeconftest(
+ """
+ def pytest_cmdline_main(config):
+ print("pytest_cmdline_main inifile =", config.inifile)
+ """
+ )
+ result = pytester.runpytest_subprocess("-s")
+ result.stdout.fnmatch_lines(f"*pytest_cmdline_main inifile = {p}")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_link_resolve.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_link_resolve.py
new file mode 100644
index 0000000000..60a86ada36
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_link_resolve.py
@@ -0,0 +1,80 @@
+import os.path
+import subprocess
+import sys
+import textwrap
+from contextlib import contextmanager
+from pathlib import Path
+from string import ascii_lowercase
+
+from _pytest.pytester import Pytester
+
+
+@contextmanager
+def subst_path_windows(filepath: Path):
+ for c in ascii_lowercase[7:]: # Create a subst drive from H-Z.
+ c += ":"
+ if not os.path.exists(c):
+ drive = c
+ break
+ else:
+ raise AssertionError("Unable to find suitable drive letter for subst.")
+
+ directory = filepath.parent
+ basename = filepath.name
+
+ args = ["subst", drive, str(directory)]
+ subprocess.check_call(args)
+ assert os.path.exists(drive)
+ try:
+ filename = Path(drive, os.sep, basename)
+ yield filename
+ finally:
+ args = ["subst", "/D", drive]
+ subprocess.check_call(args)
+
+
+@contextmanager
+def subst_path_linux(filepath: Path):
+ directory = filepath.parent
+ basename = filepath.name
+
+ target = directory / ".." / "sub2"
+ os.symlink(str(directory), str(target), target_is_directory=True)
+ try:
+ filename = target / basename
+ yield filename
+ finally:
+ # We don't need to unlink (it's all in the tempdir).
+ pass
+
+
+def test_link_resolve(pytester: Pytester) -> None:
+ """See: https://github.com/pytest-dev/pytest/issues/5965."""
+ sub1 = pytester.mkpydir("sub1")
+ p = sub1.joinpath("test_foo.py")
+ p.write_text(
+ textwrap.dedent(
+ """
+ import pytest
+ def test_foo():
+ raise AssertionError()
+ """
+ )
+ )
+
+ subst = subst_path_linux
+ if sys.platform == "win32":
+ subst = subst_path_windows
+
+ with subst(p) as subst_p:
+ result = pytester.runpytest(str(subst_p), "-v")
+ # i.e.: Make sure that the error is reported as a relative path, not as a
+ # resolved path.
+ # See: https://github.com/pytest-dev/pytest/issues/5965
+ stdout = result.stdout.str()
+ assert "sub1/test_foo.py" not in stdout
+
+ # i.e.: Expect drive on windows because we just have drive:filename, whereas
+ # we expect a relative path on Linux.
+ expect = f"*{subst_p}*" if sys.platform == "win32" else "*sub2/test_foo.py*"
+ result.stdout.fnmatch_lines([expect])
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_main.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_main.py
new file mode 100644
index 0000000000..2df51bb7bb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_main.py
@@ -0,0 +1,264 @@
+import argparse
+import os
+import re
+import sys
+from pathlib import Path
+from typing import Optional
+
+import pytest
+from _pytest.config import ExitCode
+from _pytest.config import UsageError
+from _pytest.main import resolve_collection_argument
+from _pytest.main import validate_basetemp
+from _pytest.pytester import Pytester
+
+
+@pytest.mark.parametrize(
+ "ret_exc",
+ (
+ pytest.param((None, ValueError)),
+ pytest.param((42, SystemExit)),
+ pytest.param((False, SystemExit)),
+ ),
+)
+def test_wrap_session_notify_exception(ret_exc, pytester: Pytester) -> None:
+ returncode, exc = ret_exc
+ c1 = pytester.makeconftest(
+ """
+ import pytest
+
+ def pytest_sessionstart():
+ raise {exc}("boom")
+
+ def pytest_internalerror(excrepr, excinfo):
+ returncode = {returncode!r}
+ if returncode is not False:
+ pytest.exit("exiting after %s..." % excinfo.typename, returncode={returncode!r})
+ """.format(
+ returncode=returncode, exc=exc.__name__
+ )
+ )
+ result = pytester.runpytest()
+ if returncode:
+ assert result.ret == returncode
+ else:
+ assert result.ret == ExitCode.INTERNAL_ERROR
+ assert result.stdout.lines[0] == "INTERNALERROR> Traceback (most recent call last):"
+
+ end_lines = (
+ result.stdout.lines[-4:]
+ if sys.version_info >= (3, 11)
+ else result.stdout.lines[-3:]
+ )
+
+ if exc == SystemExit:
+ assert end_lines == [
+ f'INTERNALERROR> File "{c1}", line 4, in pytest_sessionstart',
+ 'INTERNALERROR> raise SystemExit("boom")',
+ *(
+ ("INTERNALERROR> ^^^^^^^^^^^^^^^^^^^^^^^^",)
+ if sys.version_info >= (3, 11)
+ else ()
+ ),
+ "INTERNALERROR> SystemExit: boom",
+ ]
+ else:
+ assert end_lines == [
+ f'INTERNALERROR> File "{c1}", line 4, in pytest_sessionstart',
+ 'INTERNALERROR> raise ValueError("boom")',
+ *(
+ ("INTERNALERROR> ^^^^^^^^^^^^^^^^^^^^^^^^",)
+ if sys.version_info >= (3, 11)
+ else ()
+ ),
+ "INTERNALERROR> ValueError: boom",
+ ]
+ if returncode is False:
+ assert result.stderr.lines == ["mainloop: caught unexpected SystemExit!"]
+ else:
+ assert result.stderr.lines == [f"Exit: exiting after {exc.__name__}..."]
+
+
+@pytest.mark.parametrize("returncode", (None, 42))
+def test_wrap_session_exit_sessionfinish(
+ returncode: Optional[int], pytester: Pytester
+) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_sessionfinish():
+ pytest.exit(reason="exit_pytest_sessionfinish", returncode={returncode})
+ """.format(
+ returncode=returncode
+ )
+ )
+ result = pytester.runpytest()
+ if returncode:
+ assert result.ret == returncode
+ else:
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ assert result.stdout.lines[-1] == "collected 0 items"
+ assert result.stderr.lines == ["Exit: exit_pytest_sessionfinish"]
+
+
+@pytest.mark.parametrize("basetemp", ["foo", "foo/bar"])
+def test_validate_basetemp_ok(tmp_path, basetemp, monkeypatch):
+ monkeypatch.chdir(str(tmp_path))
+ validate_basetemp(tmp_path / basetemp)
+
+
+@pytest.mark.parametrize("basetemp", ["", ".", ".."])
+def test_validate_basetemp_fails(tmp_path, basetemp, monkeypatch):
+ monkeypatch.chdir(str(tmp_path))
+ msg = "basetemp must not be empty, the current working directory or any parent directory of it"
+ with pytest.raises(argparse.ArgumentTypeError, match=msg):
+ if basetemp:
+ basetemp = tmp_path / basetemp
+ validate_basetemp(basetemp)
+
+
+def test_validate_basetemp_integration(pytester: Pytester) -> None:
+ result = pytester.runpytest("--basetemp=.")
+ result.stderr.fnmatch_lines("*basetemp must not be*")
+
+
+class TestResolveCollectionArgument:
+ @pytest.fixture
+ def invocation_path(self, pytester: Pytester) -> Path:
+ pytester.syspathinsert(pytester.path / "src")
+ pytester.chdir()
+
+ pkg = pytester.path.joinpath("src/pkg")
+ pkg.mkdir(parents=True)
+ pkg.joinpath("__init__.py").touch()
+ pkg.joinpath("test.py").touch()
+ return pytester.path
+
+ def test_file(self, invocation_path: Path) -> None:
+ """File and parts."""
+ assert resolve_collection_argument(invocation_path, "src/pkg/test.py") == (
+ invocation_path / "src/pkg/test.py",
+ [],
+ )
+ assert resolve_collection_argument(invocation_path, "src/pkg/test.py::") == (
+ invocation_path / "src/pkg/test.py",
+ [""],
+ )
+ assert resolve_collection_argument(
+ invocation_path, "src/pkg/test.py::foo::bar"
+ ) == (invocation_path / "src/pkg/test.py", ["foo", "bar"])
+ assert resolve_collection_argument(
+ invocation_path, "src/pkg/test.py::foo::bar::"
+ ) == (invocation_path / "src/pkg/test.py", ["foo", "bar", ""])
+
+ def test_dir(self, invocation_path: Path) -> None:
+ """Directory and parts."""
+ assert resolve_collection_argument(invocation_path, "src/pkg") == (
+ invocation_path / "src/pkg",
+ [],
+ )
+
+ with pytest.raises(
+ UsageError, match=r"directory argument cannot contain :: selection parts"
+ ):
+ resolve_collection_argument(invocation_path, "src/pkg::")
+
+ with pytest.raises(
+ UsageError, match=r"directory argument cannot contain :: selection parts"
+ ):
+ resolve_collection_argument(invocation_path, "src/pkg::foo::bar")
+
+ def test_pypath(self, invocation_path: Path) -> None:
+ """Dotted name and parts."""
+ assert resolve_collection_argument(
+ invocation_path, "pkg.test", as_pypath=True
+ ) == (invocation_path / "src/pkg/test.py", [])
+ assert resolve_collection_argument(
+ invocation_path, "pkg.test::foo::bar", as_pypath=True
+ ) == (invocation_path / "src/pkg/test.py", ["foo", "bar"])
+ assert resolve_collection_argument(invocation_path, "pkg", as_pypath=True) == (
+ invocation_path / "src/pkg",
+ [],
+ )
+
+ with pytest.raises(
+ UsageError, match=r"package argument cannot contain :: selection parts"
+ ):
+ resolve_collection_argument(
+ invocation_path, "pkg::foo::bar", as_pypath=True
+ )
+
+ def test_parametrized_name_with_colons(self, invocation_path: Path) -> None:
+ ret = resolve_collection_argument(
+ invocation_path, "src/pkg/test.py::test[a::b]"
+ )
+ assert ret == (invocation_path / "src/pkg/test.py", ["test[a::b]"])
+
+ def test_does_not_exist(self, invocation_path: Path) -> None:
+ """Given a file/module that does not exist raises UsageError."""
+ with pytest.raises(
+ UsageError, match=re.escape("file or directory not found: foobar")
+ ):
+ resolve_collection_argument(invocation_path, "foobar")
+
+ with pytest.raises(
+ UsageError,
+ match=re.escape(
+ "module or package not found: foobar (missing __init__.py?)"
+ ),
+ ):
+ resolve_collection_argument(invocation_path, "foobar", as_pypath=True)
+
+ def test_absolute_paths_are_resolved_correctly(self, invocation_path: Path) -> None:
+ """Absolute paths resolve back to absolute paths."""
+ full_path = str(invocation_path / "src")
+ assert resolve_collection_argument(invocation_path, full_path) == (
+ Path(os.path.abspath("src")),
+ [],
+ )
+
+ # ensure full paths given in the command-line without the drive letter resolve
+ # to the full path correctly (#7628)
+ drive, full_path_without_drive = os.path.splitdrive(full_path)
+ assert resolve_collection_argument(
+ invocation_path, full_path_without_drive
+ ) == (Path(os.path.abspath("src")), [])
+
+
+def test_module_full_path_without_drive(pytester: Pytester) -> None:
+ """Collect and run test using full path except for the drive letter (#7628).
+
+ Passing a full path without a drive letter would trigger a bug in legacy_path
+ where it would keep the full path without the drive letter around, instead of resolving
+ to the full path, resulting in fixtures node ids not matching against test node ids correctly.
+ """
+ pytester.makepyfile(
+ **{
+ "project/conftest.py": """
+ import pytest
+ @pytest.fixture
+ def fix(): return 1
+ """,
+ }
+ )
+
+ pytester.makepyfile(
+ **{
+ "project/tests/dummy_test.py": """
+ def test(fix):
+ assert fix == 1
+ """
+ }
+ )
+ fn = pytester.path.joinpath("project/tests/dummy_test.py")
+ assert fn.is_file()
+
+ drive, path = os.path.splitdrive(str(fn))
+
+ result = pytester.runpytest(path, "-v")
+ result.stdout.fnmatch_lines(
+ [
+ os.path.join("project", "tests", "dummy_test.py") + "::test PASSED *",
+ "* 1 passed in *",
+ ]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_mark.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_mark.py
new file mode 100644
index 0000000000..da67d1ea7b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_mark.py
@@ -0,0 +1,1130 @@
+import os
+import sys
+from typing import List
+from typing import Optional
+from unittest import mock
+
+import pytest
+from _pytest.config import ExitCode
+from _pytest.mark import MarkGenerator
+from _pytest.mark.structures import EMPTY_PARAMETERSET_OPTION
+from _pytest.nodes import Collector
+from _pytest.nodes import Node
+from _pytest.pytester import Pytester
+
+
+class TestMark:
+ @pytest.mark.parametrize("attr", ["mark", "param"])
+ def test_pytest_exists_in_namespace_all(self, attr: str) -> None:
+ module = sys.modules["pytest"]
+ assert attr in module.__all__ # type: ignore
+
+ def test_pytest_mark_notcallable(self) -> None:
+ mark = MarkGenerator(_ispytest=True)
+ with pytest.raises(TypeError):
+ mark() # type: ignore[operator]
+
+ def test_mark_with_param(self):
+ def some_function(abc):
+ pass
+
+ class SomeClass:
+ pass
+
+ assert pytest.mark.foo(some_function) is some_function
+ marked_with_args = pytest.mark.foo.with_args(some_function)
+ assert marked_with_args is not some_function # type: ignore[comparison-overlap]
+
+ assert pytest.mark.foo(SomeClass) is SomeClass
+ assert pytest.mark.foo.with_args(SomeClass) is not SomeClass # type: ignore[comparison-overlap]
+
+ def test_pytest_mark_name_starts_with_underscore(self) -> None:
+ mark = MarkGenerator(_ispytest=True)
+ with pytest.raises(AttributeError):
+ mark._some_name
+
+
+def test_marked_class_run_twice(pytester: Pytester) -> None:
+ """Test fails file is run twice that contains marked class.
+ See issue#683.
+ """
+ py_file = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('abc', [1, 2, 3])
+ class Test1(object):
+ def test_1(self, abc):
+ assert abc in [1, 2, 3]
+ """
+ )
+ file_name = os.path.basename(py_file)
+ rec = pytester.inline_run(file_name, file_name)
+ rec.assertoutcome(passed=6)
+
+
+def test_ini_markers(pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ markers =
+ a1: this is a webtest marker
+ a2: this is a smoke marker
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_markers(pytestconfig):
+ markers = pytestconfig.getini("markers")
+ print(markers)
+ assert len(markers) >= 2
+ assert markers[0].startswith("a1:")
+ assert markers[1].startswith("a2:")
+ """
+ )
+ rec = pytester.inline_run()
+ rec.assertoutcome(passed=1)
+
+
+def test_markers_option(pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ markers =
+ a1: this is a webtest marker
+ a1some: another marker
+ nodescription
+ """
+ )
+ result = pytester.runpytest("--markers")
+ result.stdout.fnmatch_lines(
+ ["*a1*this is a webtest*", "*a1some*another marker", "*nodescription*"]
+ )
+
+
+def test_ini_markers_whitespace(pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ markers =
+ a1 : this is a whitespace marker
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.a1
+ def test_markers():
+ assert True
+ """
+ )
+ rec = pytester.inline_run("--strict-markers", "-m", "a1")
+ rec.assertoutcome(passed=1)
+
+
+def test_marker_without_description(pytester: Pytester) -> None:
+ pytester.makefile(
+ ".cfg",
+ setup="""
+ [tool:pytest]
+ markers=slow
+ """,
+ )
+ pytester.makeconftest(
+ """
+ import pytest
+ pytest.mark.xfail('FAIL')
+ """
+ )
+ ftdir = pytester.mkdir("ft1_dummy")
+ pytester.path.joinpath("conftest.py").replace(ftdir.joinpath("conftest.py"))
+ rec = pytester.runpytest("--strict-markers")
+ rec.assert_outcomes()
+
+
+def test_markers_option_with_plugin_in_current_dir(pytester: Pytester) -> None:
+ pytester.makeconftest('pytest_plugins = "flip_flop"')
+ pytester.makepyfile(
+ flip_flop="""\
+ def pytest_configure(config):
+ config.addinivalue_line("markers", "flip:flop")
+
+ def pytest_generate_tests(metafunc):
+ try:
+ mark = metafunc.function.flipper
+ except AttributeError:
+ return
+ metafunc.parametrize("x", (10, 20))"""
+ )
+ pytester.makepyfile(
+ """\
+ import pytest
+ @pytest.mark.flipper
+ def test_example(x):
+ assert x"""
+ )
+
+ result = pytester.runpytest("--markers")
+ result.stdout.fnmatch_lines(["*flip*flop*"])
+
+
+def test_mark_on_pseudo_function(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.r(lambda x: 0/0)
+ def test_hello():
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+@pytest.mark.parametrize("option_name", ["--strict-markers", "--strict"])
+def test_strict_prohibits_unregistered_markers(
+ pytester: Pytester, option_name: str
+) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.unregisteredmark
+ def test_hello():
+ pass
+ """
+ )
+ result = pytester.runpytest(option_name)
+ assert result.ret != 0
+ result.stdout.fnmatch_lines(
+ ["'unregisteredmark' not found in `markers` configuration option"]
+ )
+
+
+@pytest.mark.parametrize(
+ ("expr", "expected_passed"),
+ [
+ ("xyz", ["test_one"]),
+ ("((( xyz)) )", ["test_one"]),
+ ("not not xyz", ["test_one"]),
+ ("xyz and xyz2", []),
+ ("xyz2", ["test_two"]),
+ ("xyz or xyz2", ["test_one", "test_two"]),
+ ],
+)
+def test_mark_option(
+ expr: str, expected_passed: List[Optional[str]], pytester: Pytester
+) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xyz
+ def test_one():
+ pass
+ @pytest.mark.xyz2
+ def test_two():
+ pass
+ """
+ )
+ rec = pytester.inline_run("-m", expr)
+ passed, skipped, fail = rec.listoutcomes()
+ passed_str = [x.nodeid.split("::")[-1] for x in passed]
+ assert passed_str == expected_passed
+
+
+@pytest.mark.parametrize(
+ ("expr", "expected_passed"),
+ [("interface", ["test_interface"]), ("not interface", ["test_nointer"])],
+)
+def test_mark_option_custom(
+ expr: str, expected_passed: List[str], pytester: Pytester
+) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_collection_modifyitems(items):
+ for item in items:
+ if "interface" in item.nodeid:
+ item.add_marker(pytest.mark.interface)
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_interface():
+ pass
+ def test_nointer():
+ pass
+ """
+ )
+ rec = pytester.inline_run("-m", expr)
+ passed, skipped, fail = rec.listoutcomes()
+ passed_str = [x.nodeid.split("::")[-1] for x in passed]
+ assert passed_str == expected_passed
+
+
+@pytest.mark.parametrize(
+ ("expr", "expected_passed"),
+ [
+ ("interface", ["test_interface"]),
+ ("not interface", ["test_nointer", "test_pass", "test_1", "test_2"]),
+ ("pass", ["test_pass"]),
+ ("not pass", ["test_interface", "test_nointer", "test_1", "test_2"]),
+ ("not not not (pass)", ["test_interface", "test_nointer", "test_1", "test_2"]),
+ ("1 or 2", ["test_1", "test_2"]),
+ ("not (1 or 2)", ["test_interface", "test_nointer", "test_pass"]),
+ ],
+)
+def test_keyword_option_custom(
+ expr: str, expected_passed: List[str], pytester: Pytester
+) -> None:
+ pytester.makepyfile(
+ """
+ def test_interface():
+ pass
+ def test_nointer():
+ pass
+ def test_pass():
+ pass
+ def test_1():
+ pass
+ def test_2():
+ pass
+ """
+ )
+ rec = pytester.inline_run("-k", expr)
+ passed, skipped, fail = rec.listoutcomes()
+ passed_str = [x.nodeid.split("::")[-1] for x in passed]
+ assert passed_str == expected_passed
+
+
+def test_keyword_option_considers_mark(pytester: Pytester) -> None:
+ pytester.copy_example("marks/marks_considered_keywords")
+ rec = pytester.inline_run("-k", "foo")
+ passed = rec.listoutcomes()[0]
+ assert len(passed) == 1
+
+
+@pytest.mark.parametrize(
+ ("expr", "expected_passed"),
+ [
+ ("None", ["test_func[None]"]),
+ ("[1.3]", ["test_func[1.3]"]),
+ ("2-3", ["test_func[2-3]"]),
+ ],
+)
+def test_keyword_option_parametrize(
+ expr: str, expected_passed: List[str], pytester: Pytester
+) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize("arg", [None, 1.3, "2-3"])
+ def test_func(arg):
+ pass
+ """
+ )
+ rec = pytester.inline_run("-k", expr)
+ passed, skipped, fail = rec.listoutcomes()
+ passed_str = [x.nodeid.split("::")[-1] for x in passed]
+ assert passed_str == expected_passed
+
+
+def test_parametrize_with_module(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize("arg", [pytest,])
+ def test_func(arg):
+ pass
+ """
+ )
+ rec = pytester.inline_run()
+ passed, skipped, fail = rec.listoutcomes()
+ expected_id = "test_func[" + pytest.__name__ + "]"
+ assert passed[0].nodeid.split("::")[-1] == expected_id
+
+
+@pytest.mark.parametrize(
+ ("expr", "expected_error"),
+ [
+ (
+ "foo or",
+ "at column 7: expected not OR left parenthesis OR identifier; got end of input",
+ ),
+ (
+ "foo or or",
+ "at column 8: expected not OR left parenthesis OR identifier; got or",
+ ),
+ (
+ "(foo",
+ "at column 5: expected right parenthesis; got end of input",
+ ),
+ (
+ "foo bar",
+ "at column 5: expected end of input; got identifier",
+ ),
+ (
+ "or or",
+ "at column 1: expected not OR left parenthesis OR identifier; got or",
+ ),
+ (
+ "not or",
+ "at column 5: expected not OR left parenthesis OR identifier; got or",
+ ),
+ ],
+)
+def test_keyword_option_wrong_arguments(
+ expr: str, expected_error: str, pytester: Pytester, capsys
+) -> None:
+ pytester.makepyfile(
+ """
+ def test_func(arg):
+ pass
+ """
+ )
+ pytester.inline_run("-k", expr)
+ err = capsys.readouterr().err
+ assert expected_error in err
+
+
+def test_parametrized_collected_from_command_line(pytester: Pytester) -> None:
+ """Parametrized test not collected if test named specified in command
+ line issue#649."""
+ py_file = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize("arg", [None, 1.3, "2-3"])
+ def test_func(arg):
+ pass
+ """
+ )
+ file_name = os.path.basename(py_file)
+ rec = pytester.inline_run(file_name + "::" + "test_func")
+ rec.assertoutcome(passed=3)
+
+
+def test_parametrized_collect_with_wrong_args(pytester: Pytester) -> None:
+ """Test collect parametrized func with wrong number of args."""
+ py_file = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize('foo, bar', [(1, 2, 3)])
+ def test_func(foo, bar):
+ pass
+ """
+ )
+
+ result = pytester.runpytest(py_file)
+ result.stdout.fnmatch_lines(
+ [
+ 'test_parametrized_collect_with_wrong_args.py::test_func: in "parametrize" the number of names (2):',
+ " ['foo', 'bar']",
+ "must be equal to the number of values (3):",
+ " (1, 2, 3)",
+ ]
+ )
+
+
+def test_parametrized_with_kwargs(pytester: Pytester) -> None:
+ """Test collect parametrized func with wrong number of args."""
+ py_file = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture(params=[1,2])
+ def a(request):
+ return request.param
+
+ @pytest.mark.parametrize(argnames='b', argvalues=[1, 2])
+ def test_func(a, b):
+ pass
+ """
+ )
+
+ result = pytester.runpytest(py_file)
+ assert result.ret == 0
+
+
+def test_parametrize_iterator(pytester: Pytester) -> None:
+ """`parametrize` should work with generators (#5354)."""
+ py_file = pytester.makepyfile(
+ """\
+ import pytest
+
+ def gen():
+ yield 1
+ yield 2
+ yield 3
+
+ @pytest.mark.parametrize('a', gen())
+ def test(a):
+ assert a >= 1
+ """
+ )
+ result = pytester.runpytest(py_file)
+ assert result.ret == 0
+ # should not skip any tests
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+
+class TestFunctional:
+ def test_merging_markers_deep(self, pytester: Pytester) -> None:
+ # issue 199 - propagate markers into nested classes
+ p = pytester.makepyfile(
+ """
+ import pytest
+ class TestA(object):
+ pytestmark = pytest.mark.a
+ def test_b(self):
+ assert True
+ class TestC(object):
+ # this one didn't get marked
+ def test_d(self):
+ assert True
+ """
+ )
+ items, rec = pytester.inline_genitems(p)
+ for item in items:
+ print(item, item.keywords)
+ assert [x for x in item.iter_markers() if x.name == "a"]
+
+ def test_mark_decorator_subclass_does_not_propagate_to_base(
+ self, pytester: Pytester
+ ) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.a
+ class Base(object): pass
+
+ @pytest.mark.b
+ class Test1(Base):
+ def test_foo(self): pass
+
+ class Test2(Base):
+ def test_bar(self): pass
+ """
+ )
+ items, rec = pytester.inline_genitems(p)
+ self.assert_markers(items, test_foo=("a", "b"), test_bar=("a",))
+
+ def test_mark_should_not_pass_to_siebling_class(self, pytester: Pytester) -> None:
+ """#568"""
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ class TestBase(object):
+ def test_foo(self):
+ pass
+
+ @pytest.mark.b
+ class TestSub(TestBase):
+ pass
+
+
+ class TestOtherSub(TestBase):
+ pass
+
+ """
+ )
+ items, rec = pytester.inline_genitems(p)
+ base_item, sub_item, sub_item_other = items
+ print(items, [x.nodeid for x in items])
+ # new api segregates
+ assert not list(base_item.iter_markers(name="b"))
+ assert not list(sub_item_other.iter_markers(name="b"))
+ assert list(sub_item.iter_markers(name="b"))
+
+ def test_mark_decorator_baseclasses_merged(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.a
+ class Base(object): pass
+
+ @pytest.mark.b
+ class Base2(Base): pass
+
+ @pytest.mark.c
+ class Test1(Base2):
+ def test_foo(self): pass
+
+ class Test2(Base2):
+ @pytest.mark.d
+ def test_bar(self): pass
+ """
+ )
+ items, rec = pytester.inline_genitems(p)
+ self.assert_markers(items, test_foo=("a", "b", "c"), test_bar=("a", "b", "d"))
+
+ def test_mark_closest(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.c(location="class")
+ class Test:
+ @pytest.mark.c(location="function")
+ def test_has_own(self):
+ pass
+
+ def test_has_inherited(self):
+ pass
+
+ """
+ )
+ items, rec = pytester.inline_genitems(p)
+ has_own, has_inherited = items
+ has_own_marker = has_own.get_closest_marker("c")
+ has_inherited_marker = has_inherited.get_closest_marker("c")
+ assert has_own_marker is not None
+ assert has_inherited_marker is not None
+ assert has_own_marker.kwargs == {"location": "function"}
+ assert has_inherited_marker.kwargs == {"location": "class"}
+ assert has_own.get_closest_marker("missing") is None
+
+ def test_mark_with_wrong_marker(self, pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ import pytest
+ class pytestmark(object):
+ pass
+ def test_func():
+ pass
+ """
+ )
+ values = reprec.getfailedcollections()
+ assert len(values) == 1
+ assert "TypeError" in str(values[0].longrepr)
+
+ def test_mark_dynamically_in_funcarg(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ @pytest.fixture
+ def arg(request):
+ request.applymarker(pytest.mark.hello)
+ def pytest_terminal_summary(terminalreporter):
+ values = terminalreporter.stats['passed']
+ terminalreporter._tw.line("keyword: %s" % values[0].keywords)
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_func(arg):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["keyword: *hello*"])
+
+ def test_no_marker_match_on_unmarked_names(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.shouldmatch
+ def test_marked():
+ assert 1
+
+ def test_unmarked():
+ assert 1
+ """
+ )
+ reprec = pytester.inline_run("-m", "test_unmarked", p)
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(passed) + len(skipped) + len(failed) == 0
+ dlist = reprec.getcalls("pytest_deselected")
+ deselected_tests = dlist[0].items
+ assert len(deselected_tests) == 2
+
+ def test_keywords_at_node_level(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope="session", autouse=True)
+ def some(request):
+ request.keywords["hello"] = 42
+ assert "world" not in request.keywords
+
+ @pytest.fixture(scope="function", autouse=True)
+ def funcsetup(request):
+ assert "world" in request.keywords
+ assert "hello" in request.keywords
+
+ @pytest.mark.world
+ def test_function():
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+ def test_keyword_added_for_session(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_collection_modifyitems(session):
+ session.add_marker("mark1")
+ session.add_marker(pytest.mark.mark2)
+ session.add_marker(pytest.mark.mark3)
+ pytest.raises(ValueError, lambda:
+ session.add_marker(10))
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_some(request):
+ assert "mark1" in request.keywords
+ assert "mark2" in request.keywords
+ assert "mark3" in request.keywords
+ assert 10 not in request.keywords
+ marker = request.node.get_closest_marker("mark1")
+ assert marker.name == "mark1"
+ assert marker.args == ()
+ assert marker.kwargs == {}
+ """
+ )
+ reprec = pytester.inline_run("-m", "mark1")
+ reprec.assertoutcome(passed=1)
+
+ def assert_markers(self, items, **expected) -> None:
+ """Assert that given items have expected marker names applied to them.
+ expected should be a dict of (item name -> seq of expected marker names).
+
+ Note: this could be moved to ``pytester`` if proven to be useful
+ to other modules.
+ """
+ items = {x.name: x for x in items}
+ for name, expected_markers in expected.items():
+ markers = {m.name for m in items[name].iter_markers()}
+ assert markers == set(expected_markers)
+
+ @pytest.mark.filterwarnings("ignore")
+ def test_mark_from_parameters(self, pytester: Pytester) -> None:
+ """#1540"""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ pytestmark = pytest.mark.skipif(True, reason='skip all')
+
+ # skipifs inside fixture params
+ params = [pytest.mark.skipif(False, reason='dont skip')('parameter')]
+
+
+ @pytest.fixture(params=params)
+ def parameter(request):
+ return request.param
+
+
+ def test_1(parameter):
+ assert True
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(skipped=1)
+
+ def test_reevaluate_dynamic_expr(self, pytester: Pytester) -> None:
+ """#7360"""
+ py_file1 = pytester.makepyfile(
+ test_reevaluate_dynamic_expr1="""
+ import pytest
+
+ skip = True
+
+ @pytest.mark.skipif("skip")
+ def test_should_skip():
+ assert True
+ """
+ )
+ py_file2 = pytester.makepyfile(
+ test_reevaluate_dynamic_expr2="""
+ import pytest
+
+ skip = False
+
+ @pytest.mark.skipif("skip")
+ def test_should_not_skip():
+ assert True
+ """
+ )
+
+ file_name1 = os.path.basename(py_file1)
+ file_name2 = os.path.basename(py_file2)
+ reprec = pytester.inline_run(file_name1, file_name2)
+ reprec.assertoutcome(passed=1, skipped=1)
+
+
+class TestKeywordSelection:
+ def test_select_simple(self, pytester: Pytester) -> None:
+ file_test = pytester.makepyfile(
+ """
+ def test_one():
+ assert 0
+ class TestClass(object):
+ def test_method_one(self):
+ assert 42 == 43
+ """
+ )
+
+ def check(keyword, name):
+ reprec = pytester.inline_run("-s", "-k", keyword, file_test)
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(failed) == 1
+ assert failed[0].nodeid.split("::")[-1] == name
+ assert len(reprec.getcalls("pytest_deselected")) == 1
+
+ for keyword in ["test_one", "est_on"]:
+ check(keyword, "test_one")
+ check("TestClass and test", "test_method_one")
+
+ @pytest.mark.parametrize(
+ "keyword",
+ [
+ "xxx",
+ "xxx and test_2",
+ "TestClass",
+ "xxx and not test_1",
+ "TestClass and test_2",
+ "xxx and TestClass and test_2",
+ ],
+ )
+ def test_select_extra_keywords(self, pytester: Pytester, keyword) -> None:
+ p = pytester.makepyfile(
+ test_select="""
+ def test_1():
+ pass
+ class TestClass(object):
+ def test_2(self):
+ pass
+ """
+ )
+ pytester.makepyfile(
+ conftest="""
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_pycollect_makeitem(name):
+ outcome = yield
+ if name == "TestClass":
+ item = outcome.get_result()
+ item.extra_keyword_matches.add("xxx")
+ """
+ )
+ reprec = pytester.inline_run(p.parent, "-s", "-k", keyword)
+ print("keyword", repr(keyword))
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(passed) == 1
+ assert passed[0].nodeid.endswith("test_2")
+ dlist = reprec.getcalls("pytest_deselected")
+ assert len(dlist) == 1
+ assert dlist[0].items[0].name == "test_1"
+
+ def test_select_starton(self, pytester: Pytester) -> None:
+ threepass = pytester.makepyfile(
+ test_threepass="""
+ def test_one(): assert 1
+ def test_two(): assert 1
+ def test_three(): assert 1
+ """
+ )
+ reprec = pytester.inline_run(
+ "-Wignore::pytest.PytestRemovedIn7Warning", "-k", "test_two:", threepass
+ )
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(passed) == 2
+ assert not failed
+ dlist = reprec.getcalls("pytest_deselected")
+ assert len(dlist) == 1
+ item = dlist[0].items[0]
+ assert item.name == "test_one"
+
+ def test_keyword_extra(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def test_one():
+ assert 0
+ test_one.mykeyword = True
+ """
+ )
+ reprec = pytester.inline_run("-k", "mykeyword", p)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 1
+
+ @pytest.mark.xfail
+ def test_keyword_extra_dash(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def test_one():
+ assert 0
+ test_one.mykeyword = True
+ """
+ )
+ # with argparse the argument to an option cannot
+ # start with '-'
+ reprec = pytester.inline_run("-k", "-mykeyword", p)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert passed + skipped + failed == 0
+
+ @pytest.mark.parametrize(
+ "keyword",
+ ["__", "+", ".."],
+ )
+ def test_no_magic_values(self, pytester: Pytester, keyword: str) -> None:
+ """Make sure the tests do not match on magic values,
+ no double underscored values, like '__dict__' and '+'.
+ """
+ p = pytester.makepyfile(
+ """
+ def test_one(): assert 1
+ """
+ )
+
+ reprec = pytester.inline_run("-k", keyword, p)
+ passed, skipped, failed = reprec.countoutcomes()
+ dlist = reprec.getcalls("pytest_deselected")
+ assert passed + skipped + failed == 0
+ deselected_tests = dlist[0].items
+ assert len(deselected_tests) == 1
+
+ def test_no_match_directories_outside_the_suite(self, pytester: Pytester) -> None:
+ """`-k` should not match against directories containing the test suite (#7040)."""
+ test_contents = """
+ def test_aaa(): pass
+ def test_ddd(): pass
+ """
+ pytester.makepyfile(
+ **{"ddd/tests/__init__.py": "", "ddd/tests/test_foo.py": test_contents}
+ )
+
+ def get_collected_names(*args):
+ _, rec = pytester.inline_genitems(*args)
+ calls = rec.getcalls("pytest_collection_finish")
+ assert len(calls) == 1
+ return [x.name for x in calls[0].session.items]
+
+ # sanity check: collect both tests in normal runs
+ assert get_collected_names() == ["test_aaa", "test_ddd"]
+
+ # do not collect anything based on names outside the collection tree
+ assert get_collected_names("-k", pytester._name) == []
+
+ # "-k ddd" should only collect "test_ddd", but not
+ # 'test_aaa' just because one of its parent directories is named "ddd";
+ # this was matched previously because Package.name would contain the full path
+ # to the package
+ assert get_collected_names("-k", "ddd") == ["test_ddd"]
+
+
+class TestMarkDecorator:
+ @pytest.mark.parametrize(
+ "lhs, rhs, expected",
+ [
+ (pytest.mark.foo(), pytest.mark.foo(), True),
+ (pytest.mark.foo(), pytest.mark.bar(), False),
+ (pytest.mark.foo(), "bar", False),
+ ("foo", pytest.mark.bar(), False),
+ ],
+ )
+ def test__eq__(self, lhs, rhs, expected) -> None:
+ assert (lhs == rhs) == expected
+
+ def test_aliases(self) -> None:
+ md = pytest.mark.foo(1, "2", three=3)
+ assert md.name == "foo"
+ assert md.args == (1, "2")
+ assert md.kwargs == {"three": 3}
+
+
+@pytest.mark.parametrize("mark", [None, "", "skip", "xfail"])
+def test_parameterset_for_parametrize_marks(
+ pytester: Pytester, mark: Optional[str]
+) -> None:
+ if mark is not None:
+ pytester.makeini(
+ """
+ [pytest]
+ {}={}
+ """.format(
+ EMPTY_PARAMETERSET_OPTION, mark
+ )
+ )
+
+ config = pytester.parseconfig()
+ from _pytest.mark import pytest_configure, get_empty_parameterset_mark
+
+ pytest_configure(config)
+ result_mark = get_empty_parameterset_mark(config, ["a"], all)
+ if mark in (None, ""):
+ # normalize to the requested name
+ mark = "skip"
+ assert result_mark.name == mark
+ assert result_mark.kwargs["reason"].startswith("got empty parameter set ")
+ if mark == "xfail":
+ assert result_mark.kwargs.get("run") is False
+
+
+def test_parameterset_for_fail_at_collect(pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ {}=fail_at_collect
+ """.format(
+ EMPTY_PARAMETERSET_OPTION
+ )
+ )
+
+ config = pytester.parseconfig()
+ from _pytest.mark import pytest_configure, get_empty_parameterset_mark
+
+ pytest_configure(config)
+
+ with pytest.raises(
+ Collector.CollectError,
+ match=r"Empty parameter set in 'pytest_configure' at line \d\d+",
+ ):
+ get_empty_parameterset_mark(config, ["a"], pytest_configure)
+
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize("empty", [])
+ def test():
+ pass
+ """
+ )
+ result = pytester.runpytest(str(p1))
+ result.stdout.fnmatch_lines(
+ [
+ "collected 0 items / 1 error",
+ "* ERROR collecting test_parameterset_for_fail_at_collect.py *",
+ "Empty parameter set in 'test' at line 3",
+ "*= 1 error in *",
+ ]
+ )
+ assert result.ret == ExitCode.INTERRUPTED
+
+
+def test_parameterset_for_parametrize_bad_markname(pytester: Pytester) -> None:
+ with pytest.raises(pytest.UsageError):
+ test_parameterset_for_parametrize_marks(pytester, "bad")
+
+
+def test_mark_expressions_no_smear(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ class BaseTests(object):
+ def test_something(self):
+ pass
+
+ @pytest.mark.FOO
+ class TestFooClass(BaseTests):
+ pass
+
+ @pytest.mark.BAR
+ class TestBarClass(BaseTests):
+ pass
+ """
+ )
+
+ reprec = pytester.inline_run("-m", "FOO")
+ passed, skipped, failed = reprec.countoutcomes()
+ dlist = reprec.getcalls("pytest_deselected")
+ assert passed == 1
+ assert skipped == failed == 0
+ deselected_tests = dlist[0].items
+ assert len(deselected_tests) == 1
+
+ # todo: fixed
+ # keywords smear - expected behaviour
+ # reprec_keywords = pytester.inline_run("-k", "FOO")
+ # passed_k, skipped_k, failed_k = reprec_keywords.countoutcomes()
+ # assert passed_k == 2
+ # assert skipped_k == failed_k == 0
+
+
+def test_addmarker_order(pytester) -> None:
+ session = mock.Mock()
+ session.own_markers = []
+ session.parent = None
+ session.nodeid = ""
+ session.path = pytester.path
+ node = Node.from_parent(session, name="Test")
+ node.add_marker("foo")
+ node.add_marker("bar")
+ node.add_marker("baz", append=False)
+ extracted = [x.name for x in node.iter_markers()]
+ assert extracted == ["baz", "foo", "bar"]
+
+
+@pytest.mark.filterwarnings("ignore")
+def test_markers_from_parametrize(pytester: Pytester) -> None:
+ """#3605"""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ first_custom_mark = pytest.mark.custom_marker
+ custom_mark = pytest.mark.custom_mark
+ @pytest.fixture(autouse=True)
+ def trigger(request):
+ custom_mark = list(request.node.iter_markers('custom_mark'))
+ print("Custom mark %s" % custom_mark)
+
+ @custom_mark("custom mark non parametrized")
+ def test_custom_mark_non_parametrized():
+ print("Hey from test")
+
+ @pytest.mark.parametrize(
+ "obj_type",
+ [
+ first_custom_mark("first custom mark")("template"),
+ pytest.param( # Think this should be recommended way?
+ "disk",
+ marks=custom_mark('custom mark1')
+ ),
+ custom_mark("custom mark2")("vm"), # Tried also this
+ ]
+ )
+ def test_custom_mark_parametrized(obj_type):
+ print("obj_type is:", obj_type)
+ """
+ )
+
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=4)
+
+
+def test_pytest_param_id_requires_string() -> None:
+ with pytest.raises(TypeError) as excinfo:
+ pytest.param(id=True) # type: ignore[arg-type]
+ (msg,) = excinfo.value.args
+ assert msg == "Expected id to be a string, got <class 'bool'>: True"
+
+
+@pytest.mark.parametrize("s", (None, "hello world"))
+def test_pytest_param_id_allows_none_or_string(s) -> None:
+ assert pytest.param(id=s)
+
+
+@pytest.mark.parametrize("expr", ("NOT internal_err", "NOT (internal_err)", "bogus="))
+def test_marker_expr_eval_failure_handling(pytester: Pytester, expr) -> None:
+ foo = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.internal_err
+ def test_foo():
+ pass
+ """
+ )
+ expected = f"ERROR: Wrong expression passed to '-m': {expr}: *"
+ result = pytester.runpytest(foo, "-m", expr)
+ result.stderr.fnmatch_lines([expected])
+ assert result.ret == ExitCode.USAGE_ERROR
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_mark_expression.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_mark_expression.py
new file mode 100644
index 0000000000..f3643e7b40
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_mark_expression.py
@@ -0,0 +1,195 @@
+from typing import Callable
+
+import pytest
+from _pytest.mark.expression import Expression
+from _pytest.mark.expression import ParseError
+
+
+def evaluate(input: str, matcher: Callable[[str], bool]) -> bool:
+ return Expression.compile(input).evaluate(matcher)
+
+
+def test_empty_is_false() -> None:
+ assert not evaluate("", lambda ident: False)
+ assert not evaluate("", lambda ident: True)
+ assert not evaluate(" ", lambda ident: False)
+ assert not evaluate("\t", lambda ident: False)
+
+
+@pytest.mark.parametrize(
+ ("expr", "expected"),
+ (
+ ("true", True),
+ ("true", True),
+ ("false", False),
+ ("not true", False),
+ ("not false", True),
+ ("not not true", True),
+ ("not not false", False),
+ ("true and true", True),
+ ("true and false", False),
+ ("false and true", False),
+ ("true and true and true", True),
+ ("true and true and false", False),
+ ("true and true and not true", False),
+ ("false or false", False),
+ ("false or true", True),
+ ("true or true", True),
+ ("true or true or false", True),
+ ("true and true or false", True),
+ ("not true or true", True),
+ ("(not true) or true", True),
+ ("not (true or true)", False),
+ ("true and true or false and false", True),
+ ("true and (true or false) and false", False),
+ ("true and (true or (not (not false))) and false", False),
+ ),
+)
+def test_basic(expr: str, expected: bool) -> None:
+ matcher = {"true": True, "false": False}.__getitem__
+ assert evaluate(expr, matcher) is expected
+
+
+@pytest.mark.parametrize(
+ ("expr", "expected"),
+ (
+ (" true ", True),
+ (" ((((((true)))))) ", True),
+ (" ( ((\t (((true))))) \t \t)", True),
+ ("( true and (((false))))", False),
+ ("not not not not true", True),
+ ("not not not not not true", False),
+ ),
+)
+def test_syntax_oddeties(expr: str, expected: bool) -> None:
+ matcher = {"true": True, "false": False}.__getitem__
+ assert evaluate(expr, matcher) is expected
+
+
+def test_backslash_not_treated_specially() -> None:
+ r"""When generating nodeids, if the source name contains special characters
+ like a newline, they are escaped into two characters like \n. Therefore, a
+ user will never need to insert a literal newline, only \n (two chars). So
+ mark expressions themselves do not support escaping, instead they treat
+ backslashes as regular identifier characters."""
+ matcher = {r"\nfoo\n"}.__contains__
+
+ assert evaluate(r"\nfoo\n", matcher)
+ assert not evaluate(r"foo", matcher)
+ with pytest.raises(ParseError):
+ evaluate("\nfoo\n", matcher)
+
+
+@pytest.mark.parametrize(
+ ("expr", "column", "message"),
+ (
+ ("(", 2, "expected not OR left parenthesis OR identifier; got end of input"),
+ (
+ " (",
+ 3,
+ "expected not OR left parenthesis OR identifier; got end of input",
+ ),
+ (
+ ")",
+ 1,
+ "expected not OR left parenthesis OR identifier; got right parenthesis",
+ ),
+ (
+ ") ",
+ 1,
+ "expected not OR left parenthesis OR identifier; got right parenthesis",
+ ),
+ (
+ "not",
+ 4,
+ "expected not OR left parenthesis OR identifier; got end of input",
+ ),
+ (
+ "not not",
+ 8,
+ "expected not OR left parenthesis OR identifier; got end of input",
+ ),
+ (
+ "(not)",
+ 5,
+ "expected not OR left parenthesis OR identifier; got right parenthesis",
+ ),
+ ("and", 1, "expected not OR left parenthesis OR identifier; got and"),
+ (
+ "ident and",
+ 10,
+ "expected not OR left parenthesis OR identifier; got end of input",
+ ),
+ (
+ "ident and or",
+ 11,
+ "expected not OR left parenthesis OR identifier; got or",
+ ),
+ ("ident ident", 7, "expected end of input; got identifier"),
+ ),
+)
+def test_syntax_errors(expr: str, column: int, message: str) -> None:
+ with pytest.raises(ParseError) as excinfo:
+ evaluate(expr, lambda ident: True)
+ assert excinfo.value.column == column
+ assert excinfo.value.message == message
+
+
+@pytest.mark.parametrize(
+ "ident",
+ (
+ ".",
+ "...",
+ ":::",
+ "a:::c",
+ "a+-b",
+ r"\nhe\\l\lo\n\t\rbye",
+ "a/b",
+ "×בגד",
+ "aa×בגדcc",
+ "a[bcd]",
+ "1234",
+ "1234abcd",
+ "1234and",
+ "notandor",
+ "not_and_or",
+ "not[and]or",
+ "1234+5678",
+ "123.232",
+ "True",
+ "False",
+ "None",
+ "if",
+ "else",
+ "while",
+ ),
+)
+def test_valid_idents(ident: str) -> None:
+ assert evaluate(ident, {ident: True}.__getitem__)
+
+
+@pytest.mark.parametrize(
+ "ident",
+ (
+ "^",
+ "*",
+ "=",
+ "&",
+ "%",
+ "$",
+ "#",
+ "@",
+ "!",
+ "~",
+ "{",
+ "}",
+ '"',
+ "'",
+ "|",
+ ";",
+ "â†",
+ ),
+)
+def test_invalid_idents(ident: str) -> None:
+ with pytest.raises(ParseError):
+ evaluate(ident, lambda ident: True)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_meta.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_meta.py
new file mode 100644
index 0000000000..9201bd2161
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_meta.py
@@ -0,0 +1,32 @@
+"""Test importing of all internal packages and modules.
+
+This ensures all internal packages can be imported without needing the pytest
+namespace being set, which is critical for the initialization of xdist.
+"""
+import pkgutil
+import subprocess
+import sys
+from typing import List
+
+import _pytest
+import pytest
+
+
+def _modules() -> List[str]:
+ pytest_pkg: str = _pytest.__path__ # type: ignore
+ return sorted(
+ n
+ for _, n, _ in pkgutil.walk_packages(pytest_pkg, prefix=_pytest.__name__ + ".")
+ )
+
+
+@pytest.mark.slow
+@pytest.mark.parametrize("module", _modules())
+def test_no_warnings(module: str) -> None:
+ # fmt: off
+ subprocess.check_call((
+ sys.executable,
+ "-W", "error",
+ "-c", f"__import__({module!r})",
+ ))
+ # fmt: on
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_monkeypatch.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_monkeypatch.py
new file mode 100644
index 0000000000..9552181802
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_monkeypatch.py
@@ -0,0 +1,455 @@
+import os
+import re
+import sys
+import textwrap
+from pathlib import Path
+from typing import Dict
+from typing import Generator
+from typing import Type
+
+import pytest
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+
+
+@pytest.fixture
+def mp() -> Generator[MonkeyPatch, None, None]:
+ cwd = os.getcwd()
+ sys_path = list(sys.path)
+ yield MonkeyPatch()
+ sys.path[:] = sys_path
+ os.chdir(cwd)
+
+
+def test_setattr() -> None:
+ class A:
+ x = 1
+
+ monkeypatch = MonkeyPatch()
+ pytest.raises(AttributeError, monkeypatch.setattr, A, "notexists", 2)
+ monkeypatch.setattr(A, "y", 2, raising=False)
+ assert A.y == 2 # type: ignore
+ monkeypatch.undo()
+ assert not hasattr(A, "y")
+
+ monkeypatch = MonkeyPatch()
+ monkeypatch.setattr(A, "x", 2)
+ assert A.x == 2
+ monkeypatch.setattr(A, "x", 3)
+ assert A.x == 3
+ monkeypatch.undo()
+ assert A.x == 1
+
+ A.x = 5
+ monkeypatch.undo() # double-undo makes no modification
+ assert A.x == 5
+
+ with pytest.raises(TypeError):
+ monkeypatch.setattr(A, "y") # type: ignore[call-overload]
+
+
+class TestSetattrWithImportPath:
+ def test_string_expression(self, monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.setattr("os.path.abspath", lambda x: "hello2")
+ assert os.path.abspath("123") == "hello2"
+
+ def test_string_expression_class(self, monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.setattr("_pytest.config.Config", 42)
+ import _pytest
+
+ assert _pytest.config.Config == 42 # type: ignore
+
+ def test_unicode_string(self, monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.setattr("_pytest.config.Config", 42)
+ import _pytest
+
+ assert _pytest.config.Config == 42 # type: ignore
+ monkeypatch.delattr("_pytest.config.Config")
+
+ def test_wrong_target(self, monkeypatch: MonkeyPatch) -> None:
+ with pytest.raises(TypeError):
+ monkeypatch.setattr(None, None) # type: ignore[call-overload]
+
+ def test_unknown_import(self, monkeypatch: MonkeyPatch) -> None:
+ with pytest.raises(ImportError):
+ monkeypatch.setattr("unkn123.classx", None)
+
+ def test_unknown_attr(self, monkeypatch: MonkeyPatch) -> None:
+ with pytest.raises(AttributeError):
+ monkeypatch.setattr("os.path.qweqwe", None)
+
+ def test_unknown_attr_non_raising(self, monkeypatch: MonkeyPatch) -> None:
+ # https://github.com/pytest-dev/pytest/issues/746
+ monkeypatch.setattr("os.path.qweqwe", 42, raising=False)
+ assert os.path.qweqwe == 42 # type: ignore
+
+ def test_delattr(self, monkeypatch: MonkeyPatch) -> None:
+ monkeypatch.delattr("os.path.abspath")
+ assert not hasattr(os.path, "abspath")
+ monkeypatch.undo()
+ assert os.path.abspath
+
+
+def test_delattr() -> None:
+ class A:
+ x = 1
+
+ monkeypatch = MonkeyPatch()
+ monkeypatch.delattr(A, "x")
+ assert not hasattr(A, "x")
+ monkeypatch.undo()
+ assert A.x == 1
+
+ monkeypatch = MonkeyPatch()
+ monkeypatch.delattr(A, "x")
+ pytest.raises(AttributeError, monkeypatch.delattr, A, "y")
+ monkeypatch.delattr(A, "y", raising=False)
+ monkeypatch.setattr(A, "x", 5, raising=False)
+ assert A.x == 5
+ monkeypatch.undo()
+ assert A.x == 1
+
+
+def test_setitem() -> None:
+ d = {"x": 1}
+ monkeypatch = MonkeyPatch()
+ monkeypatch.setitem(d, "x", 2)
+ monkeypatch.setitem(d, "y", 1700)
+ monkeypatch.setitem(d, "y", 1700)
+ assert d["x"] == 2
+ assert d["y"] == 1700
+ monkeypatch.setitem(d, "x", 3)
+ assert d["x"] == 3
+ monkeypatch.undo()
+ assert d["x"] == 1
+ assert "y" not in d
+ d["x"] = 5
+ monkeypatch.undo()
+ assert d["x"] == 5
+
+
+def test_setitem_deleted_meanwhile() -> None:
+ d: Dict[str, object] = {}
+ monkeypatch = MonkeyPatch()
+ monkeypatch.setitem(d, "x", 2)
+ del d["x"]
+ monkeypatch.undo()
+ assert not d
+
+
+@pytest.mark.parametrize("before", [True, False])
+def test_setenv_deleted_meanwhile(before: bool) -> None:
+ key = "qwpeoip123"
+ if before:
+ os.environ[key] = "world"
+ monkeypatch = MonkeyPatch()
+ monkeypatch.setenv(key, "hello")
+ del os.environ[key]
+ monkeypatch.undo()
+ if before:
+ assert os.environ[key] == "world"
+ del os.environ[key]
+ else:
+ assert key not in os.environ
+
+
+def test_delitem() -> None:
+ d: Dict[str, object] = {"x": 1}
+ monkeypatch = MonkeyPatch()
+ monkeypatch.delitem(d, "x")
+ assert "x" not in d
+ monkeypatch.delitem(d, "y", raising=False)
+ pytest.raises(KeyError, monkeypatch.delitem, d, "y")
+ assert not d
+ monkeypatch.setitem(d, "y", 1700)
+ assert d["y"] == 1700
+ d["hello"] = "world"
+ monkeypatch.setitem(d, "x", 1500)
+ assert d["x"] == 1500
+ monkeypatch.undo()
+ assert d == {"hello": "world", "x": 1}
+
+
+def test_setenv() -> None:
+ monkeypatch = MonkeyPatch()
+ with pytest.warns(pytest.PytestWarning):
+ monkeypatch.setenv("XYZ123", 2) # type: ignore[arg-type]
+ import os
+
+ assert os.environ["XYZ123"] == "2"
+ monkeypatch.undo()
+ assert "XYZ123" not in os.environ
+
+
+def test_delenv() -> None:
+ name = "xyz1234"
+ assert name not in os.environ
+ monkeypatch = MonkeyPatch()
+ pytest.raises(KeyError, monkeypatch.delenv, name, raising=True)
+ monkeypatch.delenv(name, raising=False)
+ monkeypatch.undo()
+ os.environ[name] = "1"
+ try:
+ monkeypatch = MonkeyPatch()
+ monkeypatch.delenv(name)
+ assert name not in os.environ
+ monkeypatch.setenv(name, "3")
+ assert os.environ[name] == "3"
+ monkeypatch.undo()
+ assert os.environ[name] == "1"
+ finally:
+ if name in os.environ:
+ del os.environ[name]
+
+
+class TestEnvironWarnings:
+ """
+ os.environ keys and values should be native strings, otherwise it will cause problems with other modules (notably
+ subprocess). On Python 2 os.environ accepts anything without complaining, while Python 3 does the right thing
+ and raises an error.
+ """
+
+ VAR_NAME = "PYTEST_INTERNAL_MY_VAR"
+
+ def test_setenv_non_str_warning(self, monkeypatch: MonkeyPatch) -> None:
+ value = 2
+ msg = (
+ "Value of environment variable PYTEST_INTERNAL_MY_VAR type should be str, "
+ "but got 2 (type: int); converted to str implicitly"
+ )
+ with pytest.warns(pytest.PytestWarning, match=re.escape(msg)):
+ monkeypatch.setenv(str(self.VAR_NAME), value) # type: ignore[arg-type]
+
+
+def test_setenv_prepend() -> None:
+ import os
+
+ monkeypatch = MonkeyPatch()
+ monkeypatch.setenv("XYZ123", "2", prepend="-")
+ monkeypatch.setenv("XYZ123", "3", prepend="-")
+ assert os.environ["XYZ123"] == "3-2"
+ monkeypatch.undo()
+ assert "XYZ123" not in os.environ
+
+
+def test_monkeypatch_plugin(pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ def test_method(monkeypatch):
+ assert monkeypatch.__class__.__name__ == "MonkeyPatch"
+ """
+ )
+ res = reprec.countoutcomes()
+ assert tuple(res) == (1, 0, 0), res
+
+
+def test_syspath_prepend(mp: MonkeyPatch) -> None:
+ old = list(sys.path)
+ mp.syspath_prepend("world")
+ mp.syspath_prepend("hello")
+ assert sys.path[0] == "hello"
+ assert sys.path[1] == "world"
+ mp.undo()
+ assert sys.path == old
+ mp.undo()
+ assert sys.path == old
+
+
+def test_syspath_prepend_double_undo(mp: MonkeyPatch) -> None:
+ old_syspath = sys.path[:]
+ try:
+ mp.syspath_prepend("hello world")
+ mp.undo()
+ sys.path.append("more hello world")
+ mp.undo()
+ assert sys.path[-1] == "more hello world"
+ finally:
+ sys.path[:] = old_syspath
+
+
+def test_chdir_with_path_local(mp: MonkeyPatch, tmp_path: Path) -> None:
+ mp.chdir(tmp_path)
+ assert os.getcwd() == str(tmp_path)
+
+
+def test_chdir_with_str(mp: MonkeyPatch, tmp_path: Path) -> None:
+ mp.chdir(str(tmp_path))
+ assert os.getcwd() == str(tmp_path)
+
+
+def test_chdir_undo(mp: MonkeyPatch, tmp_path: Path) -> None:
+ cwd = os.getcwd()
+ mp.chdir(tmp_path)
+ mp.undo()
+ assert os.getcwd() == cwd
+
+
+def test_chdir_double_undo(mp: MonkeyPatch, tmp_path: Path) -> None:
+ mp.chdir(str(tmp_path))
+ mp.undo()
+ os.chdir(tmp_path)
+ mp.undo()
+ assert os.getcwd() == str(tmp_path)
+
+
+def test_issue185_time_breaks(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import time
+ def test_m(monkeypatch):
+ def f():
+ raise Exception
+ monkeypatch.setattr(time, "time", f)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *1 passed*
+ """
+ )
+
+
+def test_importerror(pytester: Pytester) -> None:
+ p = pytester.mkpydir("package")
+ p.joinpath("a.py").write_text(
+ textwrap.dedent(
+ """\
+ import doesnotexist
+
+ x = 1
+ """
+ )
+ )
+ pytester.path.joinpath("test_importerror.py").write_text(
+ textwrap.dedent(
+ """\
+ def test_importerror(monkeypatch):
+ monkeypatch.setattr('package.a.x', 2)
+ """
+ )
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *import error in package.a: No module named 'doesnotexist'*
+ """
+ )
+
+
+class Sample:
+ @staticmethod
+ def hello() -> bool:
+ return True
+
+
+class SampleInherit(Sample):
+ pass
+
+
+@pytest.mark.parametrize(
+ "Sample",
+ [Sample, SampleInherit],
+ ids=["new", "new-inherit"],
+)
+def test_issue156_undo_staticmethod(Sample: Type[Sample]) -> None:
+ monkeypatch = MonkeyPatch()
+
+ monkeypatch.setattr(Sample, "hello", None)
+ assert Sample.hello is None
+
+ monkeypatch.undo() # type: ignore[unreachable]
+ assert Sample.hello()
+
+
+def test_undo_class_descriptors_delattr() -> None:
+ class SampleParent:
+ @classmethod
+ def hello(_cls):
+ pass
+
+ @staticmethod
+ def world():
+ pass
+
+ class SampleChild(SampleParent):
+ pass
+
+ monkeypatch = MonkeyPatch()
+
+ original_hello = SampleChild.hello
+ original_world = SampleChild.world
+ monkeypatch.delattr(SampleParent, "hello")
+ monkeypatch.delattr(SampleParent, "world")
+ assert getattr(SampleParent, "hello", None) is None
+ assert getattr(SampleParent, "world", None) is None
+
+ monkeypatch.undo()
+ assert original_hello == SampleChild.hello
+ assert original_world == SampleChild.world
+
+
+def test_issue1338_name_resolving() -> None:
+ pytest.importorskip("requests")
+ monkeypatch = MonkeyPatch()
+ try:
+ monkeypatch.delattr("requests.sessions.Session.request")
+ finally:
+ monkeypatch.undo()
+
+
+def test_context() -> None:
+ monkeypatch = MonkeyPatch()
+
+ import functools
+ import inspect
+
+ with monkeypatch.context() as m:
+ m.setattr(functools, "partial", 3)
+ assert not inspect.isclass(functools.partial)
+ assert inspect.isclass(functools.partial)
+
+
+def test_context_classmethod() -> None:
+ class A:
+ x = 1
+
+ with MonkeyPatch.context() as m:
+ m.setattr(A, "x", 2)
+ assert A.x == 2
+ assert A.x == 1
+
+
+def test_syspath_prepend_with_namespace_packages(
+ pytester: Pytester, monkeypatch: MonkeyPatch
+) -> None:
+ for dirname in "hello", "world":
+ d = pytester.mkdir(dirname)
+ ns = d.joinpath("ns_pkg")
+ ns.mkdir()
+ ns.joinpath("__init__.py").write_text(
+ "__import__('pkg_resources').declare_namespace(__name__)"
+ )
+ lib = ns.joinpath(dirname)
+ lib.mkdir()
+ lib.joinpath("__init__.py").write_text("def check(): return %r" % dirname)
+
+ monkeypatch.syspath_prepend("hello")
+ import ns_pkg.hello
+
+ assert ns_pkg.hello.check() == "hello"
+
+ with pytest.raises(ImportError):
+ import ns_pkg.world
+
+ # Prepending should call fixup_namespace_packages.
+ monkeypatch.syspath_prepend("world")
+ import ns_pkg.world
+
+ assert ns_pkg.world.check() == "world"
+
+ # Should invalidate caches via importlib.invalidate_caches.
+ modules_tmpdir = pytester.mkdir("modules_tmpdir")
+ monkeypatch.syspath_prepend(str(modules_tmpdir))
+ modules_tmpdir.joinpath("main_app.py").write_text("app = True")
+ from main_app import app # noqa: F401
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_nodes.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_nodes.py
new file mode 100644
index 0000000000..df1439e1c4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_nodes.py
@@ -0,0 +1,167 @@
+import re
+import warnings
+from pathlib import Path
+from typing import cast
+from typing import List
+from typing import Type
+
+import pytest
+from _pytest import nodes
+from _pytest.compat import legacy_path
+from _pytest.outcomes import OutcomeException
+from _pytest.pytester import Pytester
+from _pytest.warning_types import PytestWarning
+
+
+@pytest.mark.parametrize(
+ ("nodeid", "expected"),
+ (
+ ("", [""]),
+ ("a", ["", "a"]),
+ ("aa/b", ["", "aa", "aa/b"]),
+ ("a/b/c", ["", "a", "a/b", "a/b/c"]),
+ ("a/bbb/c::D", ["", "a", "a/bbb", "a/bbb/c", "a/bbb/c::D"]),
+ ("a/b/c::D::eee", ["", "a", "a/b", "a/b/c", "a/b/c::D", "a/b/c::D::eee"]),
+ ("::xx", ["", "::xx"]),
+ # / only considered until first ::
+ ("a/b/c::D/d::e", ["", "a", "a/b", "a/b/c", "a/b/c::D/d", "a/b/c::D/d::e"]),
+ # : alone is not a separator.
+ ("a/b::D:e:f::g", ["", "a", "a/b", "a/b::D:e:f", "a/b::D:e:f::g"]),
+ # / not considered if a part of a test name
+ ("a/b::c/d::e[/test]", ["", "a", "a/b", "a/b::c/d", "a/b::c/d::e[/test]"]),
+ ),
+)
+def test_iterparentnodeids(nodeid: str, expected: List[str]) -> None:
+ result = list(nodes.iterparentnodeids(nodeid))
+ assert result == expected
+
+
+def test_node_from_parent_disallowed_arguments() -> None:
+ with pytest.raises(TypeError, match="session is"):
+ nodes.Node.from_parent(None, session=None) # type: ignore[arg-type]
+ with pytest.raises(TypeError, match="config is"):
+ nodes.Node.from_parent(None, config=None) # type: ignore[arg-type]
+
+
+def test_node_direct_construction_deprecated() -> None:
+ with pytest.raises(
+ OutcomeException,
+ match=(
+ "Direct construction of _pytest.nodes.Node has been deprecated, please "
+ "use _pytest.nodes.Node.from_parent.\nSee "
+ "https://docs.pytest.org/en/stable/deprecations.html#node-construction-changed-to-node-from-parent"
+ " for more details."
+ ),
+ ):
+ nodes.Node(None, session=None) # type: ignore[arg-type]
+
+
+def test_subclassing_both_item_and_collector_deprecated(
+ request, tmp_path: Path
+) -> None:
+ """
+ Verifies we warn on diamond inheritance as well as correctly managing legacy
+ inheritance constructors with missing args as found in plugins.
+ """
+
+ # We do not expect any warnings messages to issued during class definition.
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+
+ class SoWrong(nodes.Item, nodes.File):
+ def __init__(self, fspath, parent):
+ """Legacy ctor with legacy call # don't wana see"""
+ super().__init__(fspath, parent)
+
+ with pytest.warns(PytestWarning) as rec:
+ SoWrong.from_parent(
+ request.session, fspath=legacy_path(tmp_path / "broken.txt")
+ )
+ messages = [str(x.message) for x in rec]
+ assert any(
+ re.search(".*SoWrong.* not using a cooperative constructor.*", x)
+ for x in messages
+ )
+ assert any(
+ re.search("(?m)SoWrong .* should not be a collector", x) for x in messages
+ )
+
+
+@pytest.mark.parametrize(
+ "warn_type, msg", [(DeprecationWarning, "deprecated"), (PytestWarning, "pytest")]
+)
+def test_node_warn_is_no_longer_only_pytest_warnings(
+ pytester: Pytester, warn_type: Type[Warning], msg: str
+) -> None:
+ items = pytester.getitems(
+ """
+ def test():
+ pass
+ """
+ )
+ with pytest.warns(warn_type, match=msg):
+ items[0].warn(warn_type(msg))
+
+
+def test_node_warning_enforces_warning_types(pytester: Pytester) -> None:
+ items = pytester.getitems(
+ """
+ def test():
+ pass
+ """
+ )
+ with pytest.raises(
+ ValueError, match="warning must be an instance of Warning or subclass"
+ ):
+ items[0].warn(Exception("ok")) # type: ignore[arg-type]
+
+
+def test__check_initialpaths_for_relpath() -> None:
+ """Ensure that it handles dirs, and does not always use dirname."""
+ cwd = Path.cwd()
+
+ class FakeSession1:
+ _initialpaths = frozenset({cwd})
+
+ session = cast(pytest.Session, FakeSession1)
+
+ assert nodes._check_initialpaths_for_relpath(session, cwd) == ""
+
+ sub = cwd / "file"
+
+ class FakeSession2:
+ _initialpaths = frozenset({cwd})
+
+ session = cast(pytest.Session, FakeSession2)
+
+ assert nodes._check_initialpaths_for_relpath(session, sub) == "file"
+
+ outside = Path("/outside-this-does-not-exist")
+ assert nodes._check_initialpaths_for_relpath(session, outside) is None
+
+
+def test_failure_with_changed_cwd(pytester: Pytester) -> None:
+ """
+ Test failure lines should use absolute paths if cwd has changed since
+ invocation, so the path is correct (#6428).
+ """
+ p = pytester.makepyfile(
+ """
+ import os
+ import pytest
+
+ @pytest.fixture
+ def private_dir():
+ out_dir = 'ddd'
+ os.mkdir(out_dir)
+ old_dir = os.getcwd()
+ os.chdir(out_dir)
+ yield out_dir
+ os.chdir(old_dir)
+
+ def test_show_wrong_path(private_dir):
+ assert False
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines([str(p) + ":*: AssertionError", "*1 failed in *"])
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_nose.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_nose.py
new file mode 100644
index 0000000000..1ded8854bb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_nose.py
@@ -0,0 +1,498 @@
+import pytest
+from _pytest.pytester import Pytester
+
+
+def setup_module(mod):
+ mod.nose = pytest.importorskip("nose")
+
+
+def test_nose_setup(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ values = []
+ from nose.tools import with_setup
+
+ @with_setup(lambda: values.append(1), lambda: values.append(2))
+ def test_hello():
+ assert values == [1]
+
+ def test_world():
+ assert values == [1,2]
+
+ test_hello.setup = lambda: values.append(1)
+ test_hello.teardown = lambda: values.append(2)
+ """
+ )
+ result = pytester.runpytest(p, "-p", "nose")
+ result.assert_outcomes(passed=2)
+
+
+def test_setup_func_with_setup_decorator() -> None:
+ from _pytest.nose import call_optional
+
+ values = []
+
+ class A:
+ @pytest.fixture(autouse=True)
+ def f(self):
+ values.append(1)
+
+ call_optional(A(), "f")
+ assert not values
+
+
+def test_setup_func_not_callable() -> None:
+ from _pytest.nose import call_optional
+
+ class A:
+ f = 1
+
+ call_optional(A(), "f")
+
+
+def test_nose_setup_func(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ from nose.tools import with_setup
+
+ values = []
+
+ def my_setup():
+ a = 1
+ values.append(a)
+
+ def my_teardown():
+ b = 2
+ values.append(b)
+
+ @with_setup(my_setup, my_teardown)
+ def test_hello():
+ print(values)
+ assert values == [1]
+
+ def test_world():
+ print(values)
+ assert values == [1,2]
+
+ """
+ )
+ result = pytester.runpytest(p, "-p", "nose")
+ result.assert_outcomes(passed=2)
+
+
+def test_nose_setup_func_failure(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ from nose.tools import with_setup
+
+ values = []
+ my_setup = lambda x: 1
+ my_teardown = lambda x: 2
+
+ @with_setup(my_setup, my_teardown)
+ def test_hello():
+ print(values)
+ assert values == [1]
+
+ def test_world():
+ print(values)
+ assert values == [1,2]
+
+ """
+ )
+ result = pytester.runpytest(p, "-p", "nose")
+ result.stdout.fnmatch_lines(["*TypeError: <lambda>()*"])
+
+
+def test_nose_setup_func_failure_2(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ values = []
+
+ my_setup = 1
+ my_teardown = 2
+
+ def test_hello():
+ assert values == []
+
+ test_hello.setup = my_setup
+ test_hello.teardown = my_teardown
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_nose_setup_partial(pytester: Pytester) -> None:
+ pytest.importorskip("functools")
+ p = pytester.makepyfile(
+ """
+ from functools import partial
+
+ values = []
+
+ def my_setup(x):
+ a = x
+ values.append(a)
+
+ def my_teardown(x):
+ b = x
+ values.append(b)
+
+ my_setup_partial = partial(my_setup, 1)
+ my_teardown_partial = partial(my_teardown, 2)
+
+ def test_hello():
+ print(values)
+ assert values == [1]
+
+ def test_world():
+ print(values)
+ assert values == [1,2]
+
+ test_hello.setup = my_setup_partial
+ test_hello.teardown = my_teardown_partial
+ """
+ )
+ result = pytester.runpytest(p, "-p", "nose")
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_module_level_setup(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ from nose.tools import with_setup
+ items = {}
+
+ def setup():
+ items.setdefault("setup", []).append("up")
+
+ def teardown():
+ items.setdefault("setup", []).append("down")
+
+ def setup2():
+ items.setdefault("setup2", []).append("up")
+
+ def teardown2():
+ items.setdefault("setup2", []).append("down")
+
+ def test_setup_module_setup():
+ assert items["setup"] == ["up"]
+
+ def test_setup_module_setup_again():
+ assert items["setup"] == ["up"]
+
+ @with_setup(setup2, teardown2)
+ def test_local_setup():
+ assert items["setup"] == ["up"]
+ assert items["setup2"] == ["up"]
+
+ @with_setup(setup2, teardown2)
+ def test_local_setup_again():
+ assert items["setup"] == ["up"]
+ assert items["setup2"] == ["up", "down", "up"]
+ """
+ )
+ result = pytester.runpytest("-p", "nose")
+ result.stdout.fnmatch_lines(["*4 passed*"])
+
+
+def test_nose_style_setup_teardown(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ values = []
+
+ def setup_module():
+ values.append(1)
+
+ def teardown_module():
+ del values[0]
+
+ def test_hello():
+ assert values == [1]
+
+ def test_world():
+ assert values == [1]
+ """
+ )
+ result = pytester.runpytest("-p", "nose")
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_fixtures_nose_setup_issue8394(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def setup_module():
+ pass
+
+ def teardown_module():
+ pass
+
+ def setup_function(func):
+ pass
+
+ def teardown_function(func):
+ pass
+
+ def test_world():
+ pass
+
+ class Test(object):
+ def setup_class(cls):
+ pass
+
+ def teardown_class(cls):
+ pass
+
+ def setup_method(self, meth):
+ pass
+
+ def teardown_method(self, meth):
+ pass
+
+ def test_method(self): pass
+ """
+ )
+ match = "*no docstring available*"
+ result = pytester.runpytest("--fixtures")
+ assert result.ret == 0
+ result.stdout.no_fnmatch_line(match)
+
+ result = pytester.runpytest("--fixtures", "-v")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines([match, match, match, match])
+
+
+def test_nose_setup_ordering(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def setup_module(mod):
+ mod.visited = True
+
+ class TestClass(object):
+ def setup(self):
+ assert visited
+ self.visited_cls = True
+ def test_first(self):
+ assert visited
+ assert self.visited_cls
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_apiwrapper_problem_issue260(pytester: Pytester) -> None:
+ # this would end up trying a call an optional teardown on the class
+ # for plain unittests we don't want nose behaviour
+ pytester.makepyfile(
+ """
+ import unittest
+ class TestCase(unittest.TestCase):
+ def setup(self):
+ #should not be called in unittest testcases
+ assert 0, 'setup'
+ def teardown(self):
+ #should not be called in unittest testcases
+ assert 0, 'teardown'
+ def setUp(self):
+ print('setup')
+ def tearDown(self):
+ print('teardown')
+ def test_fun(self):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=1)
+
+
+def test_setup_teardown_linking_issue265(pytester: Pytester) -> None:
+ # we accidentally didn't integrate nose setupstate with normal setupstate
+ # this test ensures that won't happen again
+ pytester.makepyfile(
+ '''
+ import pytest
+
+ class TestGeneric(object):
+ def test_nothing(self):
+ """Tests the API of the implementation (for generic and specialized)."""
+
+ @pytest.mark.skipif("True", reason=
+ "Skip tests to check if teardown is skipped as well.")
+ class TestSkipTeardown(TestGeneric):
+
+ def setup(self):
+ """Sets up my specialized implementation for $COOL_PLATFORM."""
+ raise Exception("should not call setup for skipped tests")
+
+ def teardown(self):
+ """Undoes the setup."""
+ raise Exception("should not call teardown for skipped tests")
+ '''
+ )
+ reprec = pytester.runpytest()
+ reprec.assert_outcomes(passed=1, skipped=1)
+
+
+def test_SkipTest_during_collection(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import nose
+ raise nose.SkipTest("during collection")
+ def test_failing():
+ assert False
+ """
+ )
+ result = pytester.runpytest(p)
+ result.assert_outcomes(skipped=1, warnings=1)
+
+
+def test_SkipTest_in_test(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import nose
+
+ def test_skipping():
+ raise nose.SkipTest("in test")
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(skipped=1)
+
+
+def test_istest_function_decorator(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import nose.tools
+ @nose.tools.istest
+ def not_test_prefix():
+ pass
+ """
+ )
+ result = pytester.runpytest(p)
+ result.assert_outcomes(passed=1)
+
+
+def test_nottest_function_decorator(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import nose.tools
+ @nose.tools.nottest
+ def test_prefix():
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ assert not reprec.getfailedcollections()
+ calls = reprec.getreports("pytest_runtest_logreport")
+ assert not calls
+
+
+def test_istest_class_decorator(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import nose.tools
+ @nose.tools.istest
+ class NotTestPrefix(object):
+ def test_method(self):
+ pass
+ """
+ )
+ result = pytester.runpytest(p)
+ result.assert_outcomes(passed=1)
+
+
+def test_nottest_class_decorator(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import nose.tools
+ @nose.tools.nottest
+ class TestPrefix(object):
+ def test_method(self):
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ assert not reprec.getfailedcollections()
+ calls = reprec.getreports("pytest_runtest_logreport")
+ assert not calls
+
+
+def test_skip_test_with_unicode(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """\
+ import unittest
+ class TestClass():
+ def test_io(self):
+ raise unittest.SkipTest('😊')
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["* 1 skipped *"])
+
+
+def test_raises(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ from nose.tools import raises
+
+ @raises(RuntimeError)
+ def test_raises_runtimeerror():
+ raise RuntimeError
+
+ @raises(Exception)
+ def test_raises_baseexception_not_caught():
+ raise BaseException
+
+ @raises(BaseException)
+ def test_raises_baseexception_caught():
+ raise BaseException
+ """
+ )
+ result = pytester.runpytest("-vv")
+ result.stdout.fnmatch_lines(
+ [
+ "test_raises.py::test_raises_runtimeerror PASSED*",
+ "test_raises.py::test_raises_baseexception_not_caught FAILED*",
+ "test_raises.py::test_raises_baseexception_caught PASSED*",
+ "*= FAILURES =*",
+ "*_ test_raises_baseexception_not_caught _*",
+ "",
+ "arg = (), kw = {}",
+ "",
+ " def newfunc(*arg, **kw):",
+ " try:",
+ "> func(*arg, **kw)",
+ "",
+ "*/nose/*: ",
+ "_ _ *",
+ "",
+ " @raises(Exception)",
+ " def test_raises_baseexception_not_caught():",
+ "> raise BaseException",
+ "E BaseException",
+ "",
+ "test_raises.py:9: BaseException",
+ "* 1 failed, 2 passed *",
+ ]
+ )
+
+
+def test_nose_setup_skipped_if_non_callable(pytester: Pytester) -> None:
+ """Regression test for #9391."""
+ p = pytester.makepyfile(
+ __init__="",
+ setup="""
+ """,
+ teardown="""
+ """,
+ test_it="""
+ from . import setup, teardown
+
+ def test_it():
+ pass
+ """,
+ )
+ result = pytester.runpytest(p, "-p", "nose")
+ assert result.ret == 0
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_parseopt.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_parseopt.py
new file mode 100644
index 0000000000..28529d0437
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_parseopt.py
@@ -0,0 +1,344 @@
+import argparse
+import os
+import shlex
+import subprocess
+import sys
+from pathlib import Path
+
+import pytest
+from _pytest.config import argparsing as parseopt
+from _pytest.config.exceptions import UsageError
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+
+
+@pytest.fixture
+def parser() -> parseopt.Parser:
+ return parseopt.Parser(_ispytest=True)
+
+
+class TestParser:
+ def test_no_help_by_default(self) -> None:
+ parser = parseopt.Parser(usage="xyz", _ispytest=True)
+ pytest.raises(UsageError, lambda: parser.parse(["-h"]))
+
+ def test_custom_prog(self, parser: parseopt.Parser) -> None:
+ """Custom prog can be set for `argparse.ArgumentParser`."""
+ assert parser._getparser().prog == os.path.basename(sys.argv[0])
+ parser.prog = "custom-prog"
+ assert parser._getparser().prog == "custom-prog"
+
+ def test_argument(self) -> None:
+ with pytest.raises(parseopt.ArgumentError):
+ # need a short or long option
+ argument = parseopt.Argument()
+ argument = parseopt.Argument("-t")
+ assert argument._short_opts == ["-t"]
+ assert argument._long_opts == []
+ assert argument.dest == "t"
+ argument = parseopt.Argument("-t", "--test")
+ assert argument._short_opts == ["-t"]
+ assert argument._long_opts == ["--test"]
+ assert argument.dest == "test"
+ argument = parseopt.Argument("-t", "--test", dest="abc")
+ assert argument.dest == "abc"
+ assert str(argument) == (
+ "Argument(_short_opts: ['-t'], _long_opts: ['--test'], dest: 'abc')"
+ )
+
+ def test_argument_type(self) -> None:
+ argument = parseopt.Argument("-t", dest="abc", type=int)
+ assert argument.type is int
+ argument = parseopt.Argument("-t", dest="abc", type=str)
+ assert argument.type is str
+ argument = parseopt.Argument("-t", dest="abc", type=float)
+ assert argument.type is float
+ with pytest.warns(DeprecationWarning):
+ with pytest.raises(KeyError):
+ argument = parseopt.Argument("-t", dest="abc", type="choice")
+ argument = parseopt.Argument(
+ "-t", dest="abc", type=str, choices=["red", "blue"]
+ )
+ assert argument.type is str
+
+ def test_argument_processopt(self) -> None:
+ argument = parseopt.Argument("-t", type=int)
+ argument.default = 42
+ argument.dest = "abc"
+ res = argument.attrs()
+ assert res["default"] == 42
+ assert res["dest"] == "abc"
+
+ def test_group_add_and_get(self, parser: parseopt.Parser) -> None:
+ group = parser.getgroup("hello", description="desc")
+ assert group.name == "hello"
+ assert group.description == "desc"
+
+ def test_getgroup_simple(self, parser: parseopt.Parser) -> None:
+ group = parser.getgroup("hello", description="desc")
+ assert group.name == "hello"
+ assert group.description == "desc"
+ group2 = parser.getgroup("hello")
+ assert group2 is group
+
+ def test_group_ordering(self, parser: parseopt.Parser) -> None:
+ parser.getgroup("1")
+ parser.getgroup("2")
+ parser.getgroup("3", after="1")
+ groups = parser._groups
+ groups_names = [x.name for x in groups]
+ assert groups_names == list("132")
+
+ def test_group_addoption(self) -> None:
+ group = parseopt.OptionGroup("hello", _ispytest=True)
+ group.addoption("--option1", action="store_true")
+ assert len(group.options) == 1
+ assert isinstance(group.options[0], parseopt.Argument)
+
+ def test_group_addoption_conflict(self) -> None:
+ group = parseopt.OptionGroup("hello again", _ispytest=True)
+ group.addoption("--option1", "--option-1", action="store_true")
+ with pytest.raises(ValueError) as err:
+ group.addoption("--option1", "--option-one", action="store_true")
+ assert str({"--option1"}) in str(err.value)
+
+ def test_group_shortopt_lowercase(self, parser: parseopt.Parser) -> None:
+ group = parser.getgroup("hello")
+ with pytest.raises(ValueError):
+ group.addoption("-x", action="store_true")
+ assert len(group.options) == 0
+ group._addoption("-x", action="store_true")
+ assert len(group.options) == 1
+
+ def test_parser_addoption(self, parser: parseopt.Parser) -> None:
+ group = parser.getgroup("custom options")
+ assert len(group.options) == 0
+ group.addoption("--option1", action="store_true")
+ assert len(group.options) == 1
+
+ def test_parse(self, parser: parseopt.Parser) -> None:
+ parser.addoption("--hello", dest="hello", action="store")
+ args = parser.parse(["--hello", "world"])
+ assert args.hello == "world"
+ assert not getattr(args, parseopt.FILE_OR_DIR)
+
+ def test_parse2(self, parser: parseopt.Parser) -> None:
+ args = parser.parse([Path(".")])
+ assert getattr(args, parseopt.FILE_OR_DIR)[0] == "."
+
+ def test_parse_known_args(self, parser: parseopt.Parser) -> None:
+ parser.parse_known_args([Path(".")])
+ parser.addoption("--hello", action="store_true")
+ ns = parser.parse_known_args(["x", "--y", "--hello", "this"])
+ assert ns.hello
+ assert ns.file_or_dir == ["x"]
+
+ def test_parse_known_and_unknown_args(self, parser: parseopt.Parser) -> None:
+ parser.addoption("--hello", action="store_true")
+ ns, unknown = parser.parse_known_and_unknown_args(
+ ["x", "--y", "--hello", "this"]
+ )
+ assert ns.hello
+ assert ns.file_or_dir == ["x"]
+ assert unknown == ["--y", "this"]
+
+ def test_parse_will_set_default(self, parser: parseopt.Parser) -> None:
+ parser.addoption("--hello", dest="hello", default="x", action="store")
+ option = parser.parse([])
+ assert option.hello == "x"
+ del option.hello
+ parser.parse_setoption([], option)
+ assert option.hello == "x"
+
+ def test_parse_setoption(self, parser: parseopt.Parser) -> None:
+ parser.addoption("--hello", dest="hello", action="store")
+ parser.addoption("--world", dest="world", default=42)
+
+ option = argparse.Namespace()
+ args = parser.parse_setoption(["--hello", "world"], option)
+ assert option.hello == "world"
+ assert option.world == 42
+ assert not args
+
+ def test_parse_special_destination(self, parser: parseopt.Parser) -> None:
+ parser.addoption("--ultimate-answer", type=int)
+ args = parser.parse(["--ultimate-answer", "42"])
+ assert args.ultimate_answer == 42
+
+ def test_parse_split_positional_arguments(self, parser: parseopt.Parser) -> None:
+ parser.addoption("-R", action="store_true")
+ parser.addoption("-S", action="store_false")
+ args = parser.parse(["-R", "4", "2", "-S"])
+ assert getattr(args, parseopt.FILE_OR_DIR) == ["4", "2"]
+ args = parser.parse(["-R", "-S", "4", "2", "-R"])
+ assert getattr(args, parseopt.FILE_OR_DIR) == ["4", "2"]
+ assert args.R is True
+ assert args.S is False
+ args = parser.parse(["-R", "4", "-S", "2"])
+ assert getattr(args, parseopt.FILE_OR_DIR) == ["4", "2"]
+ assert args.R is True
+ assert args.S is False
+
+ def test_parse_defaultgetter(self) -> None:
+ def defaultget(option):
+ if not hasattr(option, "type"):
+ return
+ if option.type is int:
+ option.default = 42
+ elif option.type is str:
+ option.default = "world"
+
+ parser = parseopt.Parser(processopt=defaultget, _ispytest=True)
+ parser.addoption("--this", dest="this", type=int, action="store")
+ parser.addoption("--hello", dest="hello", type=str, action="store")
+ parser.addoption("--no", dest="no", action="store_true")
+ option = parser.parse([])
+ assert option.hello == "world"
+ assert option.this == 42
+ assert option.no is False
+
+ def test_drop_short_helper(self) -> None:
+ parser = argparse.ArgumentParser(
+ formatter_class=parseopt.DropShorterLongHelpFormatter, allow_abbrev=False
+ )
+ parser.add_argument(
+ "-t", "--twoword", "--duo", "--two-word", "--two", help="foo"
+ )
+ # throws error on --deux only!
+ parser.add_argument(
+ "-d", "--deuxmots", "--deux-mots", action="store_true", help="foo"
+ )
+ parser.add_argument("-s", action="store_true", help="single short")
+ parser.add_argument("--abc", "-a", action="store_true", help="bar")
+ parser.add_argument("--klm", "-k", "--kl-m", action="store_true", help="bar")
+ parser.add_argument(
+ "-P", "--pq-r", "-p", "--pqr", action="store_true", help="bar"
+ )
+ parser.add_argument(
+ "--zwei-wort", "--zweiwort", "--zweiwort", action="store_true", help="bar"
+ )
+ parser.add_argument(
+ "-x", "--exit-on-first", "--exitfirst", action="store_true", help="spam"
+ )
+ parser.add_argument("files_and_dirs", nargs="*")
+ args = parser.parse_args(["-k", "--duo", "hallo", "--exitfirst"])
+ assert args.twoword == "hallo"
+ assert args.klm is True
+ assert args.zwei_wort is False
+ assert args.exit_on_first is True
+ assert args.s is False
+ args = parser.parse_args(["--deux-mots"])
+ with pytest.raises(AttributeError):
+ assert args.deux_mots is True
+ assert args.deuxmots is True
+ args = parser.parse_args(["file", "dir"])
+ assert "|".join(args.files_and_dirs) == "file|dir"
+
+ def test_drop_short_0(self, parser: parseopt.Parser) -> None:
+ parser.addoption("--funcarg", "--func-arg", action="store_true")
+ parser.addoption("--abc-def", "--abc-def", action="store_true")
+ parser.addoption("--klm-hij", action="store_true")
+ with pytest.raises(UsageError):
+ parser.parse(["--funcarg", "--k"])
+
+ def test_drop_short_2(self, parser: parseopt.Parser) -> None:
+ parser.addoption("--func-arg", "--doit", action="store_true")
+ args = parser.parse(["--doit"])
+ assert args.func_arg is True
+
+ def test_drop_short_3(self, parser: parseopt.Parser) -> None:
+ parser.addoption("--func-arg", "--funcarg", "--doit", action="store_true")
+ args = parser.parse(["abcd"])
+ assert args.func_arg is False
+ assert args.file_or_dir == ["abcd"]
+
+ def test_drop_short_help0(self, parser: parseopt.Parser) -> None:
+ parser.addoption("--func-args", "--doit", help="foo", action="store_true")
+ parser.parse([])
+ help = parser.optparser.format_help()
+ assert "--func-args, --doit foo" in help
+
+ # testing would be more helpful with all help generated
+ def test_drop_short_help1(self, parser: parseopt.Parser) -> None:
+ group = parser.getgroup("general")
+ group.addoption("--doit", "--func-args", action="store_true", help="foo")
+ group._addoption(
+ "-h",
+ "--help",
+ action="store_true",
+ dest="help",
+ help="show help message and configuration info",
+ )
+ parser.parse(["-h"])
+ help = parser.optparser.format_help()
+ assert "-doit, --func-args foo" in help
+
+ def test_multiple_metavar_help(self, parser: parseopt.Parser) -> None:
+ """
+ Help text for options with a metavar tuple should display help
+ in the form "--preferences=value1 value2 value3" (#2004).
+ """
+ group = parser.getgroup("general")
+ group.addoption(
+ "--preferences", metavar=("value1", "value2", "value3"), nargs=3
+ )
+ group._addoption("-h", "--help", action="store_true", dest="help")
+ parser.parse(["-h"])
+ help = parser.optparser.format_help()
+ assert "--preferences=value1 value2 value3" in help
+
+
+def test_argcomplete(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
+ try:
+ bash_version = subprocess.run(
+ ["bash", "--version"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ check=True,
+ universal_newlines=True,
+ ).stdout
+ except (OSError, subprocess.CalledProcessError):
+ pytest.skip("bash is not available")
+ if "GNU bash" not in bash_version:
+ # See #7518.
+ pytest.skip("not a real bash")
+
+ script = str(pytester.path.joinpath("test_argcomplete"))
+
+ with open(str(script), "w") as fp:
+ # redirect output from argcomplete to stdin and stderr is not trivial
+ # http://stackoverflow.com/q/12589419/1307905
+ # so we use bash
+ fp.write(
+ 'COMP_WORDBREAKS="$COMP_WORDBREAKS" {} -m pytest 8>&1 9>&2'.format(
+ shlex.quote(sys.executable)
+ )
+ )
+ # alternative would be extended Pytester.{run(),_run(),popen()} to be able
+ # to handle a keyword argument env that replaces os.environ in popen or
+ # extends the copy, advantage: could not forget to restore
+ monkeypatch.setenv("_ARGCOMPLETE", "1")
+ monkeypatch.setenv("_ARGCOMPLETE_IFS", "\x0b")
+ monkeypatch.setenv("COMP_WORDBREAKS", " \\t\\n\"\\'><=;|&(:")
+
+ arg = "--fu"
+ monkeypatch.setenv("COMP_LINE", "pytest " + arg)
+ monkeypatch.setenv("COMP_POINT", str(len("pytest " + arg)))
+ result = pytester.run("bash", str(script), arg)
+ if result.ret == 255:
+ # argcomplete not found
+ pytest.skip("argcomplete not available")
+ elif not result.stdout.str():
+ pytest.skip(
+ "bash provided no output on stdout, argcomplete not available? (stderr={!r})".format(
+ result.stderr.str()
+ )
+ )
+ else:
+ result.stdout.fnmatch_lines(["--funcargs", "--fulltrace"])
+ os.mkdir("test_argcomplete.d")
+ arg = "test_argc"
+ monkeypatch.setenv("COMP_LINE", "pytest " + arg)
+ monkeypatch.setenv("COMP_POINT", str(len("pytest " + arg)))
+ result = pytester.run("bash", str(script), arg)
+ result.stdout.fnmatch_lines(["test_argcomplete", "test_argcomplete.d/"])
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_pastebin.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_pastebin.py
new file mode 100644
index 0000000000..b338519ae1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_pastebin.py
@@ -0,0 +1,184 @@
+import io
+from typing import List
+from typing import Union
+
+import pytest
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+
+
+class TestPasteCapture:
+ @pytest.fixture
+ def pastebinlist(self, monkeypatch, request) -> List[Union[str, bytes]]:
+ pastebinlist: List[Union[str, bytes]] = []
+ plugin = request.config.pluginmanager.getplugin("pastebin")
+ monkeypatch.setattr(plugin, "create_new_paste", pastebinlist.append)
+ return pastebinlist
+
+ def test_failed(self, pytester: Pytester, pastebinlist) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import pytest
+ def test_pass() -> None:
+ pass
+ def test_fail():
+ assert 0
+ def test_skip():
+ pytest.skip("")
+ """
+ )
+ reprec = pytester.inline_run(testpath, "--pastebin=failed")
+ assert len(pastebinlist) == 1
+ s = pastebinlist[0]
+ assert s.find("def test_fail") != -1
+ assert reprec.countoutcomes() == [1, 1, 1]
+
+ def test_all(self, pytester: Pytester, pastebinlist) -> None:
+ from _pytest.pytester import LineMatcher
+
+ testpath = pytester.makepyfile(
+ """
+ import pytest
+ def test_pass():
+ pass
+ def test_fail():
+ assert 0
+ def test_skip():
+ pytest.skip("")
+ """
+ )
+ reprec = pytester.inline_run(testpath, "--pastebin=all", "-v")
+ assert reprec.countoutcomes() == [1, 1, 1]
+ assert len(pastebinlist) == 1
+ contents = pastebinlist[0].decode("utf-8")
+ matcher = LineMatcher(contents.splitlines())
+ matcher.fnmatch_lines(
+ [
+ "*test_pass PASSED*",
+ "*test_fail FAILED*",
+ "*test_skip SKIPPED*",
+ "*== 1 failed, 1 passed, 1 skipped in *",
+ ]
+ )
+
+ def test_non_ascii_paste_text(self, pytester: Pytester, pastebinlist) -> None:
+ """Make sure that text which contains non-ascii characters is pasted
+ correctly. See #1219.
+ """
+ pytester.makepyfile(
+ test_unicode="""\
+ def test():
+ assert '☺' == 1
+ """
+ )
+ result = pytester.runpytest("--pastebin=all")
+ expected_msg = "*assert '☺' == 1*"
+ result.stdout.fnmatch_lines(
+ [
+ expected_msg,
+ "*== 1 failed in *",
+ "*Sending information to Paste Service*",
+ ]
+ )
+ assert len(pastebinlist) == 1
+
+
+class TestPaste:
+ @pytest.fixture
+ def pastebin(self, request):
+ return request.config.pluginmanager.getplugin("pastebin")
+
+ @pytest.fixture
+ def mocked_urlopen_fail(self, monkeypatch: MonkeyPatch):
+ """Monkeypatch the actual urlopen call to emulate a HTTP Error 400."""
+ calls = []
+
+ import urllib.error
+ import urllib.request
+
+ def mocked(url, data):
+ calls.append((url, data))
+ raise urllib.error.HTTPError(url, 400, "Bad request", {}, io.BytesIO())
+
+ monkeypatch.setattr(urllib.request, "urlopen", mocked)
+ return calls
+
+ @pytest.fixture
+ def mocked_urlopen_invalid(self, monkeypatch: MonkeyPatch):
+ """Monkeypatch the actual urlopen calls done by the internal plugin
+ function that connects to bpaste service, but return a url in an
+ unexpected format."""
+ calls = []
+
+ def mocked(url, data):
+ calls.append((url, data))
+
+ class DummyFile:
+ def read(self):
+ # part of html of a normal response
+ return b'View <a href="/invalid/3c0c6750bd">raw</a>.'
+
+ return DummyFile()
+
+ import urllib.request
+
+ monkeypatch.setattr(urllib.request, "urlopen", mocked)
+ return calls
+
+ @pytest.fixture
+ def mocked_urlopen(self, monkeypatch: MonkeyPatch):
+ """Monkeypatch the actual urlopen calls done by the internal plugin
+ function that connects to bpaste service."""
+ calls = []
+
+ def mocked(url, data):
+ calls.append((url, data))
+
+ class DummyFile:
+ def read(self):
+ # part of html of a normal response
+ return b'View <a href="/raw/3c0c6750bd">raw</a>.'
+
+ return DummyFile()
+
+ import urllib.request
+
+ monkeypatch.setattr(urllib.request, "urlopen", mocked)
+ return calls
+
+ def test_pastebin_invalid_url(self, pastebin, mocked_urlopen_invalid) -> None:
+ result = pastebin.create_new_paste(b"full-paste-contents")
+ assert (
+ result
+ == "bad response: invalid format ('View <a href=\"/invalid/3c0c6750bd\">raw</a>.')"
+ )
+ assert len(mocked_urlopen_invalid) == 1
+
+ def test_pastebin_http_error(self, pastebin, mocked_urlopen_fail) -> None:
+ result = pastebin.create_new_paste(b"full-paste-contents")
+ assert result == "bad response: HTTP Error 400: Bad request"
+ assert len(mocked_urlopen_fail) == 1
+
+ def test_create_new_paste(self, pastebin, mocked_urlopen) -> None:
+ result = pastebin.create_new_paste(b"full-paste-contents")
+ assert result == "https://bpa.st/show/3c0c6750bd"
+ assert len(mocked_urlopen) == 1
+ url, data = mocked_urlopen[0]
+ assert type(data) is bytes
+ lexer = "text"
+ assert url == "https://bpa.st"
+ assert "lexer=%s" % lexer in data.decode()
+ assert "code=full-paste-contents" in data.decode()
+ assert "expiry=1week" in data.decode()
+
+ def test_create_new_paste_failure(self, pastebin, monkeypatch: MonkeyPatch) -> None:
+ import io
+ import urllib.request
+
+ def response(url, data):
+ stream = io.BytesIO(b"something bad occurred")
+ return stream
+
+ monkeypatch.setattr(urllib.request, "urlopen", response)
+ result = pastebin.create_new_paste(b"full-paste-contents")
+ assert result == "bad response: invalid format ('something bad occurred')"
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_pathlib.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_pathlib.py
new file mode 100644
index 0000000000..5eb153e847
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_pathlib.py
@@ -0,0 +1,574 @@
+import os.path
+import pickle
+import sys
+import unittest.mock
+from pathlib import Path
+from textwrap import dedent
+from types import ModuleType
+from typing import Any
+from typing import Generator
+
+import pytest
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pathlib import bestrelpath
+from _pytest.pathlib import commonpath
+from _pytest.pathlib import ensure_deletable
+from _pytest.pathlib import fnmatch_ex
+from _pytest.pathlib import get_extended_length_path_str
+from _pytest.pathlib import get_lock_path
+from _pytest.pathlib import import_path
+from _pytest.pathlib import ImportPathMismatchError
+from _pytest.pathlib import insert_missing_modules
+from _pytest.pathlib import maybe_delete_a_numbered_dir
+from _pytest.pathlib import module_name_from_path
+from _pytest.pathlib import resolve_package_path
+from _pytest.pathlib import symlink_or_skip
+from _pytest.pathlib import visit
+from _pytest.tmpdir import TempPathFactory
+
+
+class TestFNMatcherPort:
+ """Test our port of py.common.FNMatcher (fnmatch_ex)."""
+
+ if sys.platform == "win32":
+ drv1 = "c:"
+ drv2 = "d:"
+ else:
+ drv1 = "/c"
+ drv2 = "/d"
+
+ @pytest.mark.parametrize(
+ "pattern, path",
+ [
+ ("*.py", "foo.py"),
+ ("*.py", "bar/foo.py"),
+ ("test_*.py", "foo/test_foo.py"),
+ ("tests/*.py", "tests/foo.py"),
+ (f"{drv1}/*.py", f"{drv1}/foo.py"),
+ (f"{drv1}/foo/*.py", f"{drv1}/foo/foo.py"),
+ ("tests/**/test*.py", "tests/foo/test_foo.py"),
+ ("tests/**/doc/test*.py", "tests/foo/bar/doc/test_foo.py"),
+ ("tests/**/doc/**/test*.py", "tests/foo/doc/bar/test_foo.py"),
+ ],
+ )
+ def test_matching(self, pattern: str, path: str) -> None:
+ assert fnmatch_ex(pattern, path)
+
+ def test_matching_abspath(self) -> None:
+ abspath = os.path.abspath(os.path.join("tests/foo.py"))
+ assert fnmatch_ex("tests/foo.py", abspath)
+
+ @pytest.mark.parametrize(
+ "pattern, path",
+ [
+ ("*.py", "foo.pyc"),
+ ("*.py", "foo/foo.pyc"),
+ ("tests/*.py", "foo/foo.py"),
+ (f"{drv1}/*.py", f"{drv2}/foo.py"),
+ (f"{drv1}/foo/*.py", f"{drv2}/foo/foo.py"),
+ ("tests/**/test*.py", "tests/foo.py"),
+ ("tests/**/test*.py", "foo/test_foo.py"),
+ ("tests/**/doc/test*.py", "tests/foo/bar/doc/foo.py"),
+ ("tests/**/doc/test*.py", "tests/foo/bar/test_foo.py"),
+ ],
+ )
+ def test_not_matching(self, pattern: str, path: str) -> None:
+ assert not fnmatch_ex(pattern, path)
+
+
+class TestImportPath:
+ """
+
+ Most of the tests here were copied from py lib's tests for "py.local.path.pyimport".
+
+ Having our own pyimport-like function is inline with removing py.path dependency in the future.
+ """
+
+ @pytest.fixture(scope="session")
+ def path1(self, tmp_path_factory: TempPathFactory) -> Generator[Path, None, None]:
+ path = tmp_path_factory.mktemp("path")
+ self.setuptestfs(path)
+ yield path
+ assert path.joinpath("samplefile").exists()
+
+ def setuptestfs(self, path: Path) -> None:
+ # print "setting up test fs for", repr(path)
+ samplefile = path / "samplefile"
+ samplefile.write_text("samplefile\n")
+
+ execfile = path / "execfile"
+ execfile.write_text("x=42")
+
+ execfilepy = path / "execfile.py"
+ execfilepy.write_text("x=42")
+
+ d = {1: 2, "hello": "world", "answer": 42}
+ path.joinpath("samplepickle").write_bytes(pickle.dumps(d, 1))
+
+ sampledir = path / "sampledir"
+ sampledir.mkdir()
+ sampledir.joinpath("otherfile").touch()
+
+ otherdir = path / "otherdir"
+ otherdir.mkdir()
+ otherdir.joinpath("__init__.py").touch()
+
+ module_a = otherdir / "a.py"
+ module_a.write_text("from .b import stuff as result\n")
+ module_b = otherdir / "b.py"
+ module_b.write_text('stuff="got it"\n')
+ module_c = otherdir / "c.py"
+ module_c.write_text(
+ dedent(
+ """
+ import pluggy;
+ import otherdir.a
+ value = otherdir.a.result
+ """
+ )
+ )
+ module_d = otherdir / "d.py"
+ module_d.write_text(
+ dedent(
+ """
+ import pluggy;
+ from otherdir import a
+ value2 = a.result
+ """
+ )
+ )
+
+ def test_smoke_test(self, path1: Path) -> None:
+ obj = import_path(path1 / "execfile.py", root=path1)
+ assert obj.x == 42 # type: ignore[attr-defined]
+ assert obj.__name__ == "execfile"
+
+ def test_renamed_dir_creates_mismatch(
+ self, tmp_path: Path, monkeypatch: MonkeyPatch
+ ) -> None:
+ tmp_path.joinpath("a").mkdir()
+ p = tmp_path.joinpath("a", "test_x123.py")
+ p.touch()
+ import_path(p, root=tmp_path)
+ tmp_path.joinpath("a").rename(tmp_path.joinpath("b"))
+ with pytest.raises(ImportPathMismatchError):
+ import_path(tmp_path.joinpath("b", "test_x123.py"), root=tmp_path)
+
+ # Errors can be ignored.
+ monkeypatch.setenv("PY_IGNORE_IMPORTMISMATCH", "1")
+ import_path(tmp_path.joinpath("b", "test_x123.py"), root=tmp_path)
+
+ # PY_IGNORE_IMPORTMISMATCH=0 does not ignore error.
+ monkeypatch.setenv("PY_IGNORE_IMPORTMISMATCH", "0")
+ with pytest.raises(ImportPathMismatchError):
+ import_path(tmp_path.joinpath("b", "test_x123.py"), root=tmp_path)
+
+ def test_messy_name(self, tmp_path: Path) -> None:
+ # https://bitbucket.org/hpk42/py-trunk/issue/129
+ path = tmp_path / "foo__init__.py"
+ path.touch()
+ module = import_path(path, root=tmp_path)
+ assert module.__name__ == "foo__init__"
+
+ def test_dir(self, tmp_path: Path) -> None:
+ p = tmp_path / "hello_123"
+ p.mkdir()
+ p_init = p / "__init__.py"
+ p_init.touch()
+ m = import_path(p, root=tmp_path)
+ assert m.__name__ == "hello_123"
+ m = import_path(p_init, root=tmp_path)
+ assert m.__name__ == "hello_123"
+
+ def test_a(self, path1: Path) -> None:
+ otherdir = path1 / "otherdir"
+ mod = import_path(otherdir / "a.py", root=path1)
+ assert mod.result == "got it" # type: ignore[attr-defined]
+ assert mod.__name__ == "otherdir.a"
+
+ def test_b(self, path1: Path) -> None:
+ otherdir = path1 / "otherdir"
+ mod = import_path(otherdir / "b.py", root=path1)
+ assert mod.stuff == "got it" # type: ignore[attr-defined]
+ assert mod.__name__ == "otherdir.b"
+
+ def test_c(self, path1: Path) -> None:
+ otherdir = path1 / "otherdir"
+ mod = import_path(otherdir / "c.py", root=path1)
+ assert mod.value == "got it" # type: ignore[attr-defined]
+
+ def test_d(self, path1: Path) -> None:
+ otherdir = path1 / "otherdir"
+ mod = import_path(otherdir / "d.py", root=path1)
+ assert mod.value2 == "got it" # type: ignore[attr-defined]
+
+ def test_import_after(self, tmp_path: Path) -> None:
+ tmp_path.joinpath("xxxpackage").mkdir()
+ tmp_path.joinpath("xxxpackage", "__init__.py").touch()
+ mod1path = tmp_path.joinpath("xxxpackage", "module1.py")
+ mod1path.touch()
+ mod1 = import_path(mod1path, root=tmp_path)
+ assert mod1.__name__ == "xxxpackage.module1"
+ from xxxpackage import module1
+
+ assert module1 is mod1
+
+ def test_check_filepath_consistency(
+ self, monkeypatch: MonkeyPatch, tmp_path: Path
+ ) -> None:
+ name = "pointsback123"
+ p = tmp_path.joinpath(name + ".py")
+ p.touch()
+ for ending in (".pyc", ".pyo"):
+ mod = ModuleType(name)
+ pseudopath = tmp_path.joinpath(name + ending)
+ pseudopath.touch()
+ mod.__file__ = str(pseudopath)
+ monkeypatch.setitem(sys.modules, name, mod)
+ newmod = import_path(p, root=tmp_path)
+ assert mod == newmod
+ monkeypatch.undo()
+ mod = ModuleType(name)
+ pseudopath = tmp_path.joinpath(name + "123.py")
+ pseudopath.touch()
+ mod.__file__ = str(pseudopath)
+ monkeypatch.setitem(sys.modules, name, mod)
+ with pytest.raises(ImportPathMismatchError) as excinfo:
+ import_path(p, root=tmp_path)
+ modname, modfile, orig = excinfo.value.args
+ assert modname == name
+ assert modfile == str(pseudopath)
+ assert orig == p
+ assert issubclass(ImportPathMismatchError, ImportError)
+
+ def test_issue131_on__init__(self, tmp_path: Path) -> None:
+ # __init__.py files may be namespace packages, and thus the
+ # __file__ of an imported module may not be ourselves
+ # see issue
+ tmp_path.joinpath("proja").mkdir()
+ p1 = tmp_path.joinpath("proja", "__init__.py")
+ p1.touch()
+ tmp_path.joinpath("sub", "proja").mkdir(parents=True)
+ p2 = tmp_path.joinpath("sub", "proja", "__init__.py")
+ p2.touch()
+ m1 = import_path(p1, root=tmp_path)
+ m2 = import_path(p2, root=tmp_path)
+ assert m1 == m2
+
+ def test_ensuresyspath_append(self, tmp_path: Path) -> None:
+ root1 = tmp_path / "root1"
+ root1.mkdir()
+ file1 = root1 / "x123.py"
+ file1.touch()
+ assert str(root1) not in sys.path
+ import_path(file1, mode="append", root=tmp_path)
+ assert str(root1) == sys.path[-1]
+ assert str(root1) not in sys.path[:-1]
+
+ def test_invalid_path(self, tmp_path: Path) -> None:
+ with pytest.raises(ImportError):
+ import_path(tmp_path / "invalid.py", root=tmp_path)
+
+ @pytest.fixture
+ def simple_module(self, tmp_path: Path) -> Path:
+ fn = tmp_path / "_src/tests/mymod.py"
+ fn.parent.mkdir(parents=True)
+ fn.write_text("def foo(x): return 40 + x")
+ return fn
+
+ def test_importmode_importlib(self, simple_module: Path, tmp_path: Path) -> None:
+ """`importlib` mode does not change sys.path."""
+ module = import_path(simple_module, mode="importlib", root=tmp_path)
+ assert module.foo(2) == 42 # type: ignore[attr-defined]
+ assert str(simple_module.parent) not in sys.path
+ assert module.__name__ in sys.modules
+ assert module.__name__ == "_src.tests.mymod"
+ assert "_src" in sys.modules
+ assert "_src.tests" in sys.modules
+
+ def test_importmode_twice_is_different_module(
+ self, simple_module: Path, tmp_path: Path
+ ) -> None:
+ """`importlib` mode always returns a new module."""
+ module1 = import_path(simple_module, mode="importlib", root=tmp_path)
+ module2 = import_path(simple_module, mode="importlib", root=tmp_path)
+ assert module1 is not module2
+
+ def test_no_meta_path_found(
+ self, simple_module: Path, monkeypatch: MonkeyPatch, tmp_path: Path
+ ) -> None:
+ """Even without any meta_path should still import module."""
+ monkeypatch.setattr(sys, "meta_path", [])
+ module = import_path(simple_module, mode="importlib", root=tmp_path)
+ assert module.foo(2) == 42 # type: ignore[attr-defined]
+
+ # mode='importlib' fails if no spec is found to load the module
+ import importlib.util
+
+ monkeypatch.setattr(
+ importlib.util, "spec_from_file_location", lambda *args: None
+ )
+ with pytest.raises(ImportError):
+ import_path(simple_module, mode="importlib", root=tmp_path)
+
+
+def test_resolve_package_path(tmp_path: Path) -> None:
+ pkg = tmp_path / "pkg1"
+ pkg.mkdir()
+ (pkg / "__init__.py").touch()
+ (pkg / "subdir").mkdir()
+ (pkg / "subdir/__init__.py").touch()
+ assert resolve_package_path(pkg) == pkg
+ assert resolve_package_path(pkg.joinpath("subdir", "__init__.py")) == pkg
+
+
+def test_package_unimportable(tmp_path: Path) -> None:
+ pkg = tmp_path / "pkg1-1"
+ pkg.mkdir()
+ pkg.joinpath("__init__.py").touch()
+ subdir = pkg.joinpath("subdir")
+ subdir.mkdir()
+ pkg.joinpath("subdir/__init__.py").touch()
+ assert resolve_package_path(subdir) == subdir
+ xyz = subdir.joinpath("xyz.py")
+ xyz.touch()
+ assert resolve_package_path(xyz) == subdir
+ assert not resolve_package_path(pkg)
+
+
+def test_access_denied_during_cleanup(tmp_path: Path, monkeypatch: MonkeyPatch) -> None:
+ """Ensure that deleting a numbered dir does not fail because of OSErrors (#4262)."""
+ path = tmp_path / "temp-1"
+ path.mkdir()
+
+ def renamed_failed(*args):
+ raise OSError("access denied")
+
+ monkeypatch.setattr(Path, "rename", renamed_failed)
+
+ lock_path = get_lock_path(path)
+ maybe_delete_a_numbered_dir(path)
+ assert not lock_path.is_file()
+
+
+def test_long_path_during_cleanup(tmp_path: Path) -> None:
+ """Ensure that deleting long path works (particularly on Windows (#6775))."""
+ path = (tmp_path / ("a" * 250)).resolve()
+ if sys.platform == "win32":
+ # make sure that the full path is > 260 characters without any
+ # component being over 260 characters
+ assert len(str(path)) > 260
+ extended_path = "\\\\?\\" + str(path)
+ else:
+ extended_path = str(path)
+ os.mkdir(extended_path)
+ assert os.path.isdir(extended_path)
+ maybe_delete_a_numbered_dir(path)
+ assert not os.path.isdir(extended_path)
+
+
+def test_get_extended_length_path_str() -> None:
+ assert get_extended_length_path_str(r"c:\foo") == r"\\?\c:\foo"
+ assert get_extended_length_path_str(r"\\share\foo") == r"\\?\UNC\share\foo"
+ assert get_extended_length_path_str(r"\\?\UNC\share\foo") == r"\\?\UNC\share\foo"
+ assert get_extended_length_path_str(r"\\?\c:\foo") == r"\\?\c:\foo"
+
+
+def test_suppress_error_removing_lock(tmp_path: Path) -> None:
+ """ensure_deletable should be resilient if lock file cannot be removed (#5456, #7491)"""
+ path = tmp_path / "dir"
+ path.mkdir()
+ lock = get_lock_path(path)
+ lock.touch()
+ mtime = lock.stat().st_mtime
+
+ with unittest.mock.patch.object(Path, "unlink", side_effect=OSError) as m:
+ assert not ensure_deletable(
+ path, consider_lock_dead_if_created_before=mtime + 30
+ )
+ assert m.call_count == 1
+ assert lock.is_file()
+
+ with unittest.mock.patch.object(Path, "is_file", side_effect=OSError) as m:
+ assert not ensure_deletable(
+ path, consider_lock_dead_if_created_before=mtime + 30
+ )
+ assert m.call_count == 1
+ assert lock.is_file()
+
+ # check now that we can remove the lock file in normal circumstances
+ assert ensure_deletable(path, consider_lock_dead_if_created_before=mtime + 30)
+ assert not lock.is_file()
+
+
+def test_bestrelpath() -> None:
+ curdir = Path("/foo/bar/baz/path")
+ assert bestrelpath(curdir, curdir) == "."
+ assert bestrelpath(curdir, curdir / "hello" / "world") == "hello" + os.sep + "world"
+ assert bestrelpath(curdir, curdir.parent / "sister") == ".." + os.sep + "sister"
+ assert bestrelpath(curdir, curdir.parent) == ".."
+ assert bestrelpath(curdir, Path("hello")) == "hello"
+
+
+def test_commonpath() -> None:
+ path = Path("/foo/bar/baz/path")
+ subpath = path / "sampledir"
+ assert commonpath(path, subpath) == path
+ assert commonpath(subpath, path) == path
+ assert commonpath(Path(str(path) + "suffix"), path) == path.parent
+ assert commonpath(path, path.parent.parent) == path.parent.parent
+
+
+def test_visit_ignores_errors(tmp_path: Path) -> None:
+ symlink_or_skip("recursive", tmp_path / "recursive")
+ tmp_path.joinpath("foo").write_bytes(b"")
+ tmp_path.joinpath("bar").write_bytes(b"")
+
+ assert [
+ entry.name for entry in visit(str(tmp_path), recurse=lambda entry: False)
+ ] == ["bar", "foo"]
+
+
+@pytest.mark.skipif(not sys.platform.startswith("win"), reason="Windows only")
+def test_samefile_false_negatives(tmp_path: Path, monkeypatch: MonkeyPatch) -> None:
+ """
+ import_file() should not raise ImportPathMismatchError if the paths are exactly
+ equal on Windows. It seems directories mounted as UNC paths make os.path.samefile
+ return False, even when they are clearly equal.
+ """
+ module_path = tmp_path.joinpath("my_module.py")
+ module_path.write_text("def foo(): return 42")
+ monkeypatch.syspath_prepend(tmp_path)
+
+ with monkeypatch.context() as mp:
+ # Forcibly make os.path.samefile() return False here to ensure we are comparing
+ # the paths too. Using a context to narrow the patch as much as possible given
+ # this is an important system function.
+ mp.setattr(os.path, "samefile", lambda x, y: False)
+ module = import_path(module_path, root=tmp_path)
+ assert getattr(module, "foo")() == 42
+
+
+class TestImportLibMode:
+ @pytest.mark.skipif(sys.version_info < (3, 7), reason="Dataclasses in Python3.7+")
+ def test_importmode_importlib_with_dataclass(self, tmp_path: Path) -> None:
+ """Ensure that importlib mode works with a module containing dataclasses (#7856)."""
+ fn = tmp_path.joinpath("_src/tests/test_dataclass.py")
+ fn.parent.mkdir(parents=True)
+ fn.write_text(
+ dedent(
+ """
+ from dataclasses import dataclass
+
+ @dataclass
+ class Data:
+ value: str
+ """
+ )
+ )
+
+ module = import_path(fn, mode="importlib", root=tmp_path)
+ Data: Any = getattr(module, "Data")
+ data = Data(value="foo")
+ assert data.value == "foo"
+ assert data.__module__ == "_src.tests.test_dataclass"
+
+ def test_importmode_importlib_with_pickle(self, tmp_path: Path) -> None:
+ """Ensure that importlib mode works with pickle (#7859)."""
+ fn = tmp_path.joinpath("_src/tests/test_pickle.py")
+ fn.parent.mkdir(parents=True)
+ fn.write_text(
+ dedent(
+ """
+ import pickle
+
+ def _action():
+ return 42
+
+ def round_trip():
+ s = pickle.dumps(_action)
+ return pickle.loads(s)
+ """
+ )
+ )
+
+ module = import_path(fn, mode="importlib", root=tmp_path)
+ round_trip = getattr(module, "round_trip")
+ action = round_trip()
+ assert action() == 42
+
+ def test_importmode_importlib_with_pickle_separate_modules(
+ self, tmp_path: Path
+ ) -> None:
+ """
+ Ensure that importlib mode works can load pickles that look similar but are
+ defined in separate modules.
+ """
+ fn1 = tmp_path.joinpath("_src/m1/tests/test.py")
+ fn1.parent.mkdir(parents=True)
+ fn1.write_text(
+ dedent(
+ """
+ import attr
+ import pickle
+
+ @attr.s(auto_attribs=True)
+ class Data:
+ x: int = 42
+ """
+ )
+ )
+
+ fn2 = tmp_path.joinpath("_src/m2/tests/test.py")
+ fn2.parent.mkdir(parents=True)
+ fn2.write_text(
+ dedent(
+ """
+ import attr
+ import pickle
+
+ @attr.s(auto_attribs=True)
+ class Data:
+ x: str = ""
+ """
+ )
+ )
+
+ import pickle
+
+ def round_trip(obj):
+ s = pickle.dumps(obj)
+ return pickle.loads(s)
+
+ module = import_path(fn1, mode="importlib", root=tmp_path)
+ Data1 = getattr(module, "Data")
+
+ module = import_path(fn2, mode="importlib", root=tmp_path)
+ Data2 = getattr(module, "Data")
+
+ assert round_trip(Data1(20)) == Data1(20)
+ assert round_trip(Data2("hello")) == Data2("hello")
+ assert Data1.__module__ == "_src.m1.tests.test"
+ assert Data2.__module__ == "_src.m2.tests.test"
+
+ def test_module_name_from_path(self, tmp_path: Path) -> None:
+ result = module_name_from_path(tmp_path / "src/tests/test_foo.py", tmp_path)
+ assert result == "src.tests.test_foo"
+
+ # Path is not relative to root dir: use the full path to obtain the module name.
+ result = module_name_from_path(Path("/home/foo/test_foo.py"), Path("/bar"))
+ assert result == "home.foo.test_foo"
+
+ def test_insert_missing_modules(self) -> None:
+ modules = {"src.tests.foo": ModuleType("src.tests.foo")}
+ insert_missing_modules(modules, "src.tests.foo")
+ assert sorted(modules) == ["src", "src.tests", "src.tests.foo"]
+
+ mod = ModuleType("mod", doc="My Module")
+ modules = {"src": mod}
+ insert_missing_modules(modules, "src")
+ assert modules == {"src": mod}
+
+ modules = {}
+ insert_missing_modules(modules, "")
+ assert modules == {}
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_pluginmanager.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_pluginmanager.py
new file mode 100644
index 0000000000..9fe23d1779
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_pluginmanager.py
@@ -0,0 +1,427 @@
+import os
+import shutil
+import sys
+import types
+from typing import List
+
+import pytest
+from _pytest.config import Config
+from _pytest.config import ExitCode
+from _pytest.config import PytestPluginManager
+from _pytest.config.exceptions import UsageError
+from _pytest.main import Session
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pathlib import import_path
+from _pytest.pytester import Pytester
+
+
+@pytest.fixture
+def pytestpm() -> PytestPluginManager:
+ return PytestPluginManager()
+
+
+class TestPytestPluginInteractions:
+ def test_addhooks_conftestplugin(
+ self, pytester: Pytester, _config_for_test: Config
+ ) -> None:
+ pytester.makepyfile(
+ newhooks="""
+ def pytest_myhook(xyz):
+ "new hook"
+ """
+ )
+ conf = pytester.makeconftest(
+ """
+ import newhooks
+ def pytest_addhooks(pluginmanager):
+ pluginmanager.add_hookspecs(newhooks)
+ def pytest_myhook(xyz):
+ return xyz + 1
+ """
+ )
+ config = _config_for_test
+ pm = config.pluginmanager
+ pm.hook.pytest_addhooks.call_historic(
+ kwargs=dict(pluginmanager=config.pluginmanager)
+ )
+ config.pluginmanager._importconftest(
+ conf, importmode="prepend", rootpath=pytester.path
+ )
+ # print(config.pluginmanager.get_plugins())
+ res = config.hook.pytest_myhook(xyz=10)
+ assert res == [11]
+
+ def test_addhooks_nohooks(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import sys
+ def pytest_addhooks(pluginmanager):
+ pluginmanager.add_hookspecs(sys)
+ """
+ )
+ res = pytester.runpytest()
+ assert res.ret != 0
+ res.stderr.fnmatch_lines(["*did not find*sys*"])
+
+ def test_do_option_postinitialize(self, pytester: Pytester) -> None:
+ config = pytester.parseconfigure()
+ assert not hasattr(config.option, "test123")
+ p = pytester.makepyfile(
+ """
+ def pytest_addoption(parser):
+ parser.addoption('--test123', action="store_true",
+ default=True)
+ """
+ )
+ config.pluginmanager._importconftest(
+ p, importmode="prepend", rootpath=pytester.path
+ )
+ assert config.option.test123
+
+ def test_configure(self, pytester: Pytester) -> None:
+ config = pytester.parseconfig()
+ values = []
+
+ class A:
+ def pytest_configure(self):
+ values.append(self)
+
+ config.pluginmanager.register(A())
+ assert len(values) == 0
+ config._do_configure()
+ assert len(values) == 1
+ config.pluginmanager.register(A()) # leads to a configured() plugin
+ assert len(values) == 2
+ assert values[0] != values[1]
+
+ config._ensure_unconfigure()
+ config.pluginmanager.register(A())
+ assert len(values) == 2
+
+ def test_hook_tracing(self, _config_for_test: Config) -> None:
+ pytestpm = _config_for_test.pluginmanager # fully initialized with plugins
+ saveindent = []
+
+ class api1:
+ def pytest_plugin_registered(self):
+ saveindent.append(pytestpm.trace.root.indent)
+
+ class api2:
+ def pytest_plugin_registered(self):
+ saveindent.append(pytestpm.trace.root.indent)
+ raise ValueError()
+
+ values: List[str] = []
+ pytestpm.trace.root.setwriter(values.append)
+ undo = pytestpm.enable_tracing()
+ try:
+ indent = pytestpm.trace.root.indent
+ p = api1()
+ pytestpm.register(p)
+ assert pytestpm.trace.root.indent == indent
+ assert len(values) >= 2
+ assert "pytest_plugin_registered" in values[0]
+ assert "finish" in values[1]
+
+ values[:] = []
+ with pytest.raises(ValueError):
+ pytestpm.register(api2())
+ assert pytestpm.trace.root.indent == indent
+ assert saveindent[0] > indent
+ finally:
+ undo()
+
+ def test_hook_proxy(self, pytester: Pytester) -> None:
+ """Test the gethookproxy function(#2016)"""
+ config = pytester.parseconfig()
+ session = Session.from_config(config)
+ pytester.makepyfile(**{"tests/conftest.py": "", "tests/subdir/conftest.py": ""})
+
+ conftest1 = pytester.path.joinpath("tests/conftest.py")
+ conftest2 = pytester.path.joinpath("tests/subdir/conftest.py")
+
+ config.pluginmanager._importconftest(
+ conftest1, importmode="prepend", rootpath=pytester.path
+ )
+ ihook_a = session.gethookproxy(pytester.path / "tests")
+ assert ihook_a is not None
+ config.pluginmanager._importconftest(
+ conftest2, importmode="prepend", rootpath=pytester.path
+ )
+ ihook_b = session.gethookproxy(pytester.path / "tests")
+ assert ihook_a is not ihook_b
+
+ def test_hook_with_addoption(self, pytester: Pytester) -> None:
+ """Test that hooks can be used in a call to pytest_addoption"""
+ pytester.makepyfile(
+ newhooks="""
+ import pytest
+ @pytest.hookspec(firstresult=True)
+ def pytest_default_value():
+ pass
+ """
+ )
+ pytester.makepyfile(
+ myplugin="""
+ import newhooks
+ def pytest_addhooks(pluginmanager):
+ pluginmanager.add_hookspecs(newhooks)
+ def pytest_addoption(parser, pluginmanager):
+ default_value = pluginmanager.hook.pytest_default_value()
+ parser.addoption("--config", help="Config, defaults to %(default)s", default=default_value)
+ """
+ )
+ pytester.makeconftest(
+ """
+ pytest_plugins=("myplugin",)
+ def pytest_default_value():
+ return "default_value"
+ """
+ )
+ res = pytester.runpytest("--help")
+ res.stdout.fnmatch_lines(["*--config=CONFIG*default_value*"])
+
+
+def test_default_markers(pytester: Pytester) -> None:
+ result = pytester.runpytest("--markers")
+ result.stdout.fnmatch_lines(["*tryfirst*first*", "*trylast*last*"])
+
+
+def test_importplugin_error_message(
+ pytester: Pytester, pytestpm: PytestPluginManager
+) -> None:
+ """Don't hide import errors when importing plugins and provide
+ an easy to debug message.
+
+ See #375 and #1998.
+ """
+ pytester.syspathinsert(pytester.path)
+ pytester.makepyfile(
+ qwe="""\
+ def test_traceback():
+ raise ImportError('Not possible to import: ☺')
+ test_traceback()
+ """
+ )
+ with pytest.raises(ImportError) as excinfo:
+ pytestpm.import_plugin("qwe")
+
+ assert str(excinfo.value).endswith(
+ 'Error importing plugin "qwe": Not possible to import: ☺'
+ )
+ assert "in test_traceback" in str(excinfo.traceback[-1])
+
+
+class TestPytestPluginManager:
+ def test_register_imported_modules(self) -> None:
+ pm = PytestPluginManager()
+ mod = types.ModuleType("x.y.pytest_hello")
+ pm.register(mod)
+ assert pm.is_registered(mod)
+ values = pm.get_plugins()
+ assert mod in values
+ pytest.raises(ValueError, pm.register, mod)
+ pytest.raises(ValueError, lambda: pm.register(mod))
+ # assert not pm.is_registered(mod2)
+ assert pm.get_plugins() == values
+
+ def test_canonical_import(self, monkeypatch):
+ mod = types.ModuleType("pytest_xyz")
+ monkeypatch.setitem(sys.modules, "pytest_xyz", mod)
+ pm = PytestPluginManager()
+ pm.import_plugin("pytest_xyz")
+ assert pm.get_plugin("pytest_xyz") == mod
+ assert pm.is_registered(mod)
+
+ def test_consider_module(
+ self, pytester: Pytester, pytestpm: PytestPluginManager
+ ) -> None:
+ pytester.syspathinsert()
+ pytester.makepyfile(pytest_p1="#")
+ pytester.makepyfile(pytest_p2="#")
+ mod = types.ModuleType("temp")
+ mod.__dict__["pytest_plugins"] = ["pytest_p1", "pytest_p2"]
+ pytestpm.consider_module(mod)
+ assert pytestpm.get_plugin("pytest_p1").__name__ == "pytest_p1"
+ assert pytestpm.get_plugin("pytest_p2").__name__ == "pytest_p2"
+
+ def test_consider_module_import_module(
+ self, pytester: Pytester, _config_for_test: Config
+ ) -> None:
+ pytestpm = _config_for_test.pluginmanager
+ mod = types.ModuleType("x")
+ mod.__dict__["pytest_plugins"] = "pytest_a"
+ aplugin = pytester.makepyfile(pytest_a="#")
+ reprec = pytester.make_hook_recorder(pytestpm)
+ pytester.syspathinsert(aplugin.parent)
+ pytestpm.consider_module(mod)
+ call = reprec.getcall(pytestpm.hook.pytest_plugin_registered.name)
+ assert call.plugin.__name__ == "pytest_a"
+
+ # check that it is not registered twice
+ pytestpm.consider_module(mod)
+ values = reprec.getcalls("pytest_plugin_registered")
+ assert len(values) == 1
+
+ def test_consider_env_fails_to_import(
+ self, monkeypatch: MonkeyPatch, pytestpm: PytestPluginManager
+ ) -> None:
+ monkeypatch.setenv("PYTEST_PLUGINS", "nonexisting", prepend=",")
+ with pytest.raises(ImportError):
+ pytestpm.consider_env()
+
+ @pytest.mark.filterwarnings("always")
+ def test_plugin_skip(self, pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
+ p = pytester.makepyfile(
+ skipping1="""
+ import pytest
+ pytest.skip("hello", allow_module_level=True)
+ """
+ )
+ shutil.copy(p, p.with_name("skipping2.py"))
+ monkeypatch.setenv("PYTEST_PLUGINS", "skipping2")
+ result = pytester.runpytest("-p", "skipping1", syspathinsert=True)
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+ result.stdout.fnmatch_lines(
+ ["*skipped plugin*skipping1*hello*", "*skipped plugin*skipping2*hello*"]
+ )
+
+ def test_consider_env_plugin_instantiation(
+ self,
+ pytester: Pytester,
+ monkeypatch: MonkeyPatch,
+ pytestpm: PytestPluginManager,
+ ) -> None:
+ pytester.syspathinsert()
+ pytester.makepyfile(xy123="#")
+ monkeypatch.setitem(os.environ, "PYTEST_PLUGINS", "xy123")
+ l1 = len(pytestpm.get_plugins())
+ pytestpm.consider_env()
+ l2 = len(pytestpm.get_plugins())
+ assert l2 == l1 + 1
+ assert pytestpm.get_plugin("xy123")
+ pytestpm.consider_env()
+ l3 = len(pytestpm.get_plugins())
+ assert l2 == l3
+
+ def test_pluginmanager_ENV_startup(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ pytester.makepyfile(pytest_x500="#")
+ p = pytester.makepyfile(
+ """
+ import pytest
+ def test_hello(pytestconfig):
+ plugin = pytestconfig.pluginmanager.get_plugin('pytest_x500')
+ assert plugin is not None
+ """
+ )
+ monkeypatch.setenv("PYTEST_PLUGINS", "pytest_x500", prepend=",")
+ result = pytester.runpytest(p, syspathinsert=True)
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_import_plugin_importname(
+ self, pytester: Pytester, pytestpm: PytestPluginManager
+ ) -> None:
+ pytest.raises(ImportError, pytestpm.import_plugin, "qweqwex.y")
+ pytest.raises(ImportError, pytestpm.import_plugin, "pytest_qweqwx.y")
+
+ pytester.syspathinsert()
+ pluginname = "pytest_hello"
+ pytester.makepyfile(**{pluginname: ""})
+ pytestpm.import_plugin("pytest_hello")
+ len1 = len(pytestpm.get_plugins())
+ pytestpm.import_plugin("pytest_hello")
+ len2 = len(pytestpm.get_plugins())
+ assert len1 == len2
+ plugin1 = pytestpm.get_plugin("pytest_hello")
+ assert plugin1.__name__.endswith("pytest_hello")
+ plugin2 = pytestpm.get_plugin("pytest_hello")
+ assert plugin2 is plugin1
+
+ def test_import_plugin_dotted_name(
+ self, pytester: Pytester, pytestpm: PytestPluginManager
+ ) -> None:
+ pytest.raises(ImportError, pytestpm.import_plugin, "qweqwex.y")
+ pytest.raises(ImportError, pytestpm.import_plugin, "pytest_qweqwex.y")
+
+ pytester.syspathinsert()
+ pytester.mkpydir("pkg").joinpath("plug.py").write_text("x=3")
+ pluginname = "pkg.plug"
+ pytestpm.import_plugin(pluginname)
+ mod = pytestpm.get_plugin("pkg.plug")
+ assert mod.x == 3
+
+ def test_consider_conftest_deps(
+ self,
+ pytester: Pytester,
+ pytestpm: PytestPluginManager,
+ ) -> None:
+ mod = import_path(
+ pytester.makepyfile("pytest_plugins='xyz'"), root=pytester.path
+ )
+ with pytest.raises(ImportError):
+ pytestpm.consider_conftest(mod)
+
+
+class TestPytestPluginManagerBootstrapming:
+ def test_preparse_args(self, pytestpm: PytestPluginManager) -> None:
+ pytest.raises(
+ ImportError, lambda: pytestpm.consider_preparse(["xyz", "-p", "hello123"])
+ )
+
+ # Handles -p without space (#3532).
+ with pytest.raises(ImportError) as excinfo:
+ pytestpm.consider_preparse(["-phello123"])
+ assert '"hello123"' in excinfo.value.args[0]
+ pytestpm.consider_preparse(["-pno:hello123"])
+
+ # Handles -p without following arg (when used without argparse).
+ pytestpm.consider_preparse(["-p"])
+
+ with pytest.raises(UsageError, match="^plugin main cannot be disabled$"):
+ pytestpm.consider_preparse(["-p", "no:main"])
+
+ def test_plugin_prevent_register(self, pytestpm: PytestPluginManager) -> None:
+ pytestpm.consider_preparse(["xyz", "-p", "no:abc"])
+ l1 = pytestpm.get_plugins()
+ pytestpm.register(42, name="abc")
+ l2 = pytestpm.get_plugins()
+ assert len(l2) == len(l1)
+ assert 42 not in l2
+
+ def test_plugin_prevent_register_unregistered_alredy_registered(
+ self, pytestpm: PytestPluginManager
+ ) -> None:
+ pytestpm.register(42, name="abc")
+ l1 = pytestpm.get_plugins()
+ assert 42 in l1
+ pytestpm.consider_preparse(["xyz", "-p", "no:abc"])
+ l2 = pytestpm.get_plugins()
+ assert 42 not in l2
+
+ def test_plugin_prevent_register_stepwise_on_cacheprovider_unregister(
+ self, pytestpm: PytestPluginManager
+ ) -> None:
+ """From PR #4304: The only way to unregister a module is documented at
+ the end of https://docs.pytest.org/en/stable/how-to/plugins.html.
+
+ When unregister cacheprovider, then unregister stepwise too.
+ """
+ pytestpm.register(42, name="cacheprovider")
+ pytestpm.register(43, name="stepwise")
+ l1 = pytestpm.get_plugins()
+ assert 42 in l1
+ assert 43 in l1
+ pytestpm.consider_preparse(["xyz", "-p", "no:cacheprovider"])
+ l2 = pytestpm.get_plugins()
+ assert 42 not in l2
+ assert 43 not in l2
+
+ def test_blocked_plugin_can_be_used(self, pytestpm: PytestPluginManager) -> None:
+ pytestpm.consider_preparse(["xyz", "-p", "no:abc", "-p", "abc"])
+
+ assert pytestpm.has_plugin("abc")
+ assert not pytestpm.is_blocked("abc")
+ assert not pytestpm.is_blocked("pytest_abc")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_pytester.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_pytester.py
new file mode 100644
index 0000000000..049f8b22d8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_pytester.py
@@ -0,0 +1,855 @@
+import os
+import subprocess
+import sys
+import time
+from pathlib import Path
+from types import ModuleType
+from typing import List
+
+import _pytest.pytester as pytester_mod
+import pytest
+from _pytest.config import ExitCode
+from _pytest.config import PytestPluginManager
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import CwdSnapshot
+from _pytest.pytester import HookRecorder
+from _pytest.pytester import LineMatcher
+from _pytest.pytester import Pytester
+from _pytest.pytester import SysModulesSnapshot
+from _pytest.pytester import SysPathsSnapshot
+
+
+def test_make_hook_recorder(pytester: Pytester) -> None:
+ item = pytester.getitem("def test_func(): pass")
+ recorder = pytester.make_hook_recorder(item.config.pluginmanager)
+ assert not recorder.getfailures()
+
+ # (The silly condition is to fool mypy that the code below this is reachable)
+ if 1 + 1 == 2:
+ pytest.xfail("internal reportrecorder tests need refactoring")
+
+ class rep:
+ excinfo = None
+ passed = False
+ failed = True
+ skipped = False
+ when = "call"
+
+ recorder.hook.pytest_runtest_logreport(report=rep) # type: ignore[attr-defined]
+ failures = recorder.getfailures()
+ assert failures == [rep] # type: ignore[comparison-overlap]
+ failures = recorder.getfailures()
+ assert failures == [rep] # type: ignore[comparison-overlap]
+
+ class rep2:
+ excinfo = None
+ passed = False
+ failed = False
+ skipped = True
+ when = "call"
+
+ rep2.passed = False
+ rep2.skipped = True
+ recorder.hook.pytest_runtest_logreport(report=rep2) # type: ignore[attr-defined]
+
+ modcol = pytester.getmodulecol("")
+ rep3 = modcol.config.hook.pytest_make_collect_report(collector=modcol)
+ rep3.passed = False
+ rep3.failed = True
+ rep3.skipped = False
+ recorder.hook.pytest_collectreport(report=rep3) # type: ignore[attr-defined]
+
+ passed, skipped, failed = recorder.listoutcomes()
+ assert not passed and skipped and failed
+
+ numpassed, numskipped, numfailed = recorder.countoutcomes()
+ assert numpassed == 0
+ assert numskipped == 1
+ assert numfailed == 1
+ assert len(recorder.getfailedcollections()) == 1
+
+ recorder.unregister() # type: ignore[attr-defined]
+ recorder.clear()
+ recorder.hook.pytest_runtest_logreport(report=rep3) # type: ignore[attr-defined]
+ pytest.raises(ValueError, recorder.getfailures)
+
+
+def test_parseconfig(pytester: Pytester) -> None:
+ config1 = pytester.parseconfig()
+ config2 = pytester.parseconfig()
+ assert config2 is not config1
+
+
+def test_pytester_runs_with_plugin(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ pytest_plugins = "pytester"
+ def test_hello(pytester):
+ assert 1
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=1)
+
+
+def test_pytester_with_doctest(pytester: Pytester) -> None:
+ """Check that pytester can be used within doctests.
+
+ It used to use `request.function`, which is `None` with doctests."""
+ pytester.makepyfile(
+ **{
+ "sub/t-doctest.py": """
+ '''
+ >>> import os
+ >>> pytester = getfixture("pytester")
+ >>> str(pytester.makepyfile("content")).replace(os.sep, '/')
+ '.../basetemp/sub.t-doctest0/sub.py'
+ '''
+ """,
+ "sub/__init__.py": "",
+ }
+ )
+ result = pytester.runpytest(
+ "-p", "pytester", "--doctest-modules", "sub/t-doctest.py"
+ )
+ assert result.ret == 0
+
+
+def test_runresult_assertion_on_xfail(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ pytest_plugins = "pytester"
+
+ @pytest.mark.xfail
+ def test_potato():
+ assert False
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(xfailed=1)
+ assert result.ret == 0
+
+
+def test_runresult_assertion_on_xpassed(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ pytest_plugins = "pytester"
+
+ @pytest.mark.xfail
+ def test_potato():
+ assert True
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(xpassed=1)
+ assert result.ret == 0
+
+
+def test_xpassed_with_strict_is_considered_a_failure(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ pytest_plugins = "pytester"
+
+ @pytest.mark.xfail(strict=True)
+ def test_potato():
+ assert True
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(failed=1)
+ assert result.ret != 0
+
+
+def make_holder():
+ class apiclass:
+ def pytest_xyz(self, arg):
+ """X"""
+
+ def pytest_xyz_noarg(self):
+ """X"""
+
+ apimod = type(os)("api")
+
+ def pytest_xyz(arg):
+ """X"""
+
+ def pytest_xyz_noarg():
+ """X"""
+
+ apimod.pytest_xyz = pytest_xyz # type: ignore
+ apimod.pytest_xyz_noarg = pytest_xyz_noarg # type: ignore
+ return apiclass, apimod
+
+
+@pytest.mark.parametrize("holder", make_holder())
+def test_hookrecorder_basic(holder) -> None:
+ pm = PytestPluginManager()
+ pm.add_hookspecs(holder)
+ rec = HookRecorder(pm, _ispytest=True)
+ pm.hook.pytest_xyz(arg=123)
+ call = rec.popcall("pytest_xyz")
+ assert call.arg == 123
+ assert call._name == "pytest_xyz"
+ pytest.raises(pytest.fail.Exception, rec.popcall, "abc")
+ pm.hook.pytest_xyz_noarg()
+ call = rec.popcall("pytest_xyz_noarg")
+ assert call._name == "pytest_xyz_noarg"
+
+
+def test_makepyfile_unicode(pytester: Pytester) -> None:
+ pytester.makepyfile(chr(0xFFFD))
+
+
+def test_makepyfile_utf8(pytester: Pytester) -> None:
+ """Ensure makepyfile accepts utf-8 bytes as input (#2738)"""
+ utf8_contents = """
+ def setup_function(function):
+ mixed_encoding = 'São Paulo'
+ """.encode()
+ p = pytester.makepyfile(utf8_contents)
+ assert "mixed_encoding = 'São Paulo'".encode() in p.read_bytes()
+
+
+class TestInlineRunModulesCleanup:
+ def test_inline_run_test_module_not_cleaned_up(self, pytester: Pytester) -> None:
+ test_mod = pytester.makepyfile("def test_foo(): assert True")
+ result = pytester.inline_run(str(test_mod))
+ assert result.ret == ExitCode.OK
+ # rewrite module, now test should fail if module was re-imported
+ test_mod.write_text("def test_foo(): assert False")
+ result2 = pytester.inline_run(str(test_mod))
+ assert result2.ret == ExitCode.TESTS_FAILED
+
+ def spy_factory(self):
+ class SysModulesSnapshotSpy:
+ instances: List["SysModulesSnapshotSpy"] = [] # noqa: F821
+
+ def __init__(self, preserve=None) -> None:
+ SysModulesSnapshotSpy.instances.append(self)
+ self._spy_restore_count = 0
+ self._spy_preserve = preserve
+ self.__snapshot = SysModulesSnapshot(preserve=preserve)
+
+ def restore(self):
+ self._spy_restore_count += 1
+ return self.__snapshot.restore()
+
+ return SysModulesSnapshotSpy
+
+ def test_inline_run_taking_and_restoring_a_sys_modules_snapshot(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ spy_factory = self.spy_factory()
+ monkeypatch.setattr(pytester_mod, "SysModulesSnapshot", spy_factory)
+ pytester.syspathinsert()
+ original = dict(sys.modules)
+ pytester.makepyfile(import1="# you son of a silly person")
+ pytester.makepyfile(import2="# my hovercraft is full of eels")
+ test_mod = pytester.makepyfile(
+ """
+ import import1
+ def test_foo(): import import2"""
+ )
+ pytester.inline_run(str(test_mod))
+ assert len(spy_factory.instances) == 1
+ spy = spy_factory.instances[0]
+ assert spy._spy_restore_count == 1
+ assert sys.modules == original
+ assert all(sys.modules[x] is original[x] for x in sys.modules)
+
+ def test_inline_run_sys_modules_snapshot_restore_preserving_modules(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ spy_factory = self.spy_factory()
+ monkeypatch.setattr(pytester_mod, "SysModulesSnapshot", spy_factory)
+ test_mod = pytester.makepyfile("def test_foo(): pass")
+ pytester.inline_run(str(test_mod))
+ spy = spy_factory.instances[0]
+ assert not spy._spy_preserve("black_knight")
+ assert spy._spy_preserve("zope")
+ assert spy._spy_preserve("zope.interface")
+ assert spy._spy_preserve("zopelicious")
+
+ def test_external_test_module_imports_not_cleaned_up(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.syspathinsert()
+ pytester.makepyfile(imported="data = 'you son of a silly person'")
+ import imported
+
+ test_mod = pytester.makepyfile(
+ """
+ def test_foo():
+ import imported
+ imported.data = 42"""
+ )
+ pytester.inline_run(str(test_mod))
+ assert imported.data == 42
+
+
+def test_assert_outcomes_after_pytest_error(pytester: Pytester) -> None:
+ pytester.makepyfile("def test_foo(): assert True")
+
+ result = pytester.runpytest("--unexpected-argument")
+ with pytest.raises(ValueError, match="Pytest terminal summary report not found"):
+ result.assert_outcomes(passed=0)
+
+
+def test_cwd_snapshot(pytester: Pytester) -> None:
+ foo = pytester.mkdir("foo")
+ bar = pytester.mkdir("bar")
+ os.chdir(foo)
+ snapshot = CwdSnapshot()
+ os.chdir(bar)
+ assert Path().absolute() == bar
+ snapshot.restore()
+ assert Path().absolute() == foo
+
+
+class TestSysModulesSnapshot:
+ key = "my-test-module"
+
+ def test_remove_added(self) -> None:
+ original = dict(sys.modules)
+ assert self.key not in sys.modules
+ snapshot = SysModulesSnapshot()
+ sys.modules[self.key] = ModuleType("something")
+ assert self.key in sys.modules
+ snapshot.restore()
+ assert sys.modules == original
+
+ def test_add_removed(self, monkeypatch: MonkeyPatch) -> None:
+ assert self.key not in sys.modules
+ monkeypatch.setitem(sys.modules, self.key, ModuleType("something"))
+ assert self.key in sys.modules
+ original = dict(sys.modules)
+ snapshot = SysModulesSnapshot()
+ del sys.modules[self.key]
+ assert self.key not in sys.modules
+ snapshot.restore()
+ assert sys.modules == original
+
+ def test_restore_reloaded(self, monkeypatch: MonkeyPatch) -> None:
+ assert self.key not in sys.modules
+ monkeypatch.setitem(sys.modules, self.key, ModuleType("something"))
+ assert self.key in sys.modules
+ original = dict(sys.modules)
+ snapshot = SysModulesSnapshot()
+ sys.modules[self.key] = ModuleType("something else")
+ snapshot.restore()
+ assert sys.modules == original
+
+ def test_preserve_modules(self, monkeypatch: MonkeyPatch) -> None:
+ key = [self.key + str(i) for i in range(3)]
+ assert not any(k in sys.modules for k in key)
+ for i, k in enumerate(key):
+ mod = ModuleType("something" + str(i))
+ monkeypatch.setitem(sys.modules, k, mod)
+ original = dict(sys.modules)
+
+ def preserve(name):
+ return name in (key[0], key[1], "some-other-key")
+
+ snapshot = SysModulesSnapshot(preserve=preserve)
+ sys.modules[key[0]] = original[key[0]] = ModuleType("something else0")
+ sys.modules[key[1]] = original[key[1]] = ModuleType("something else1")
+ sys.modules[key[2]] = ModuleType("something else2")
+ snapshot.restore()
+ assert sys.modules == original
+
+ def test_preserve_container(self, monkeypatch: MonkeyPatch) -> None:
+ original = dict(sys.modules)
+ assert self.key not in original
+ replacement = dict(sys.modules)
+ replacement[self.key] = ModuleType("life of brian")
+ snapshot = SysModulesSnapshot()
+ monkeypatch.setattr(sys, "modules", replacement)
+ snapshot.restore()
+ assert sys.modules is replacement
+ assert sys.modules == original
+
+
+@pytest.mark.parametrize("path_type", ("path", "meta_path"))
+class TestSysPathsSnapshot:
+ other_path = {"path": "meta_path", "meta_path": "path"}
+
+ @staticmethod
+ def path(n: int) -> str:
+ return "my-dirty-little-secret-" + str(n)
+
+ def test_restore(self, monkeypatch: MonkeyPatch, path_type) -> None:
+ other_path_type = self.other_path[path_type]
+ for i in range(10):
+ assert self.path(i) not in getattr(sys, path_type)
+ sys_path = [self.path(i) for i in range(6)]
+ monkeypatch.setattr(sys, path_type, sys_path)
+ original = list(sys_path)
+ original_other = list(getattr(sys, other_path_type))
+ snapshot = SysPathsSnapshot()
+ transformation = {"source": (0, 1, 2, 3, 4, 5), "target": (6, 2, 9, 7, 5, 8)}
+ assert sys_path == [self.path(x) for x in transformation["source"]]
+ sys_path[1] = self.path(6)
+ sys_path[3] = self.path(7)
+ sys_path.append(self.path(8))
+ del sys_path[4]
+ sys_path[3:3] = [self.path(9)]
+ del sys_path[0]
+ assert sys_path == [self.path(x) for x in transformation["target"]]
+ snapshot.restore()
+ assert getattr(sys, path_type) is sys_path
+ assert getattr(sys, path_type) == original
+ assert getattr(sys, other_path_type) == original_other
+
+ def test_preserve_container(self, monkeypatch: MonkeyPatch, path_type) -> None:
+ other_path_type = self.other_path[path_type]
+ original_data = list(getattr(sys, path_type))
+ original_other = getattr(sys, other_path_type)
+ original_other_data = list(original_other)
+ new: List[object] = []
+ snapshot = SysPathsSnapshot()
+ monkeypatch.setattr(sys, path_type, new)
+ snapshot.restore()
+ assert getattr(sys, path_type) is new
+ assert getattr(sys, path_type) == original_data
+ assert getattr(sys, other_path_type) is original_other
+ assert getattr(sys, other_path_type) == original_other_data
+
+
+def test_pytester_subprocess(pytester: Pytester) -> None:
+ testfile = pytester.makepyfile("def test_one(): pass")
+ assert pytester.runpytest_subprocess(testfile).ret == 0
+
+
+def test_pytester_subprocess_via_runpytest_arg(pytester: Pytester) -> None:
+ testfile = pytester.makepyfile(
+ """
+ def test_pytester_subprocess(pytester):
+ import os
+ testfile = pytester.makepyfile(
+ \"""
+ import os
+ def test_one():
+ assert {} != os.getpid()
+ \""".format(os.getpid())
+ )
+ assert pytester.runpytest(testfile).ret == 0
+ """
+ )
+ result = pytester.runpytest_inprocess(
+ "-p", "pytester", "--runpytest", "subprocess", testfile
+ )
+ assert result.ret == 0
+
+
+def test_unicode_args(pytester: Pytester) -> None:
+ result = pytester.runpytest("-k", "×בג")
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+def test_pytester_run_no_timeout(pytester: Pytester) -> None:
+ testfile = pytester.makepyfile("def test_no_timeout(): pass")
+ assert pytester.runpytest_subprocess(testfile).ret == ExitCode.OK
+
+
+def test_pytester_run_with_timeout(pytester: Pytester) -> None:
+ testfile = pytester.makepyfile("def test_no_timeout(): pass")
+
+ timeout = 120
+
+ start = time.time()
+ result = pytester.runpytest_subprocess(testfile, timeout=timeout)
+ end = time.time()
+ duration = end - start
+
+ assert result.ret == ExitCode.OK
+ assert duration < timeout
+
+
+def test_pytester_run_timeout_expires(pytester: Pytester) -> None:
+ testfile = pytester.makepyfile(
+ """
+ import time
+
+ def test_timeout():
+ time.sleep(10)"""
+ )
+ with pytest.raises(pytester.TimeoutExpired):
+ pytester.runpytest_subprocess(testfile, timeout=1)
+
+
+def test_linematcher_with_nonlist() -> None:
+ """Test LineMatcher with regard to passing in a set (accidentally)."""
+ from _pytest._code.source import Source
+
+ lm = LineMatcher([])
+ with pytest.raises(TypeError, match="invalid type for lines2: set"):
+ lm.fnmatch_lines(set()) # type: ignore[arg-type]
+ with pytest.raises(TypeError, match="invalid type for lines2: dict"):
+ lm.fnmatch_lines({}) # type: ignore[arg-type]
+ with pytest.raises(TypeError, match="invalid type for lines2: set"):
+ lm.re_match_lines(set()) # type: ignore[arg-type]
+ with pytest.raises(TypeError, match="invalid type for lines2: dict"):
+ lm.re_match_lines({}) # type: ignore[arg-type]
+ with pytest.raises(TypeError, match="invalid type for lines2: Source"):
+ lm.fnmatch_lines(Source()) # type: ignore[arg-type]
+ lm.fnmatch_lines([])
+ lm.fnmatch_lines(())
+ lm.fnmatch_lines("")
+ assert lm._getlines({}) == {} # type: ignore[arg-type,comparison-overlap]
+ assert lm._getlines(set()) == set() # type: ignore[arg-type,comparison-overlap]
+ assert lm._getlines(Source()) == []
+ assert lm._getlines(Source("pass\npass")) == ["pass", "pass"]
+
+
+def test_linematcher_match_failure() -> None:
+ lm = LineMatcher(["foo", "foo", "bar"])
+ with pytest.raises(pytest.fail.Exception) as e:
+ lm.fnmatch_lines(["foo", "f*", "baz"])
+ assert e.value.msg is not None
+ assert e.value.msg.splitlines() == [
+ "exact match: 'foo'",
+ "fnmatch: 'f*'",
+ " with: 'foo'",
+ "nomatch: 'baz'",
+ " and: 'bar'",
+ "remains unmatched: 'baz'",
+ ]
+
+ lm = LineMatcher(["foo", "foo", "bar"])
+ with pytest.raises(pytest.fail.Exception) as e:
+ lm.re_match_lines(["foo", "^f.*", "baz"])
+ assert e.value.msg is not None
+ assert e.value.msg.splitlines() == [
+ "exact match: 'foo'",
+ "re.match: '^f.*'",
+ " with: 'foo'",
+ " nomatch: 'baz'",
+ " and: 'bar'",
+ "remains unmatched: 'baz'",
+ ]
+
+
+def test_linematcher_consecutive() -> None:
+ lm = LineMatcher(["1", "", "2"])
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ lm.fnmatch_lines(["1", "2"], consecutive=True)
+ assert str(excinfo.value).splitlines() == [
+ "exact match: '1'",
+ "no consecutive match: '2'",
+ " with: ''",
+ ]
+
+ lm.re_match_lines(["1", r"\d?", "2"], consecutive=True)
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ lm.re_match_lines(["1", r"\d", "2"], consecutive=True)
+ assert str(excinfo.value).splitlines() == [
+ "exact match: '1'",
+ r"no consecutive match: '\\d'",
+ " with: ''",
+ ]
+
+
+@pytest.mark.parametrize("function", ["no_fnmatch_line", "no_re_match_line"])
+def test_linematcher_no_matching(function: str) -> None:
+ if function == "no_fnmatch_line":
+ good_pattern = "*.py OK*"
+ bad_pattern = "*X.py OK*"
+ else:
+ assert function == "no_re_match_line"
+ good_pattern = r".*py OK"
+ bad_pattern = r".*Xpy OK"
+
+ lm = LineMatcher(
+ [
+ "cachedir: .pytest_cache",
+ "collecting ... collected 1 item",
+ "",
+ "show_fixtures_per_test.py OK",
+ "=== elapsed 1s ===",
+ ]
+ )
+
+ # check the function twice to ensure we don't accumulate the internal buffer
+ for i in range(2):
+ with pytest.raises(pytest.fail.Exception) as e:
+ func = getattr(lm, function)
+ func(good_pattern)
+ obtained = str(e.value).splitlines()
+ if function == "no_fnmatch_line":
+ assert obtained == [
+ f"nomatch: '{good_pattern}'",
+ " and: 'cachedir: .pytest_cache'",
+ " and: 'collecting ... collected 1 item'",
+ " and: ''",
+ f"fnmatch: '{good_pattern}'",
+ " with: 'show_fixtures_per_test.py OK'",
+ ]
+ else:
+ assert obtained == [
+ f" nomatch: '{good_pattern}'",
+ " and: 'cachedir: .pytest_cache'",
+ " and: 'collecting ... collected 1 item'",
+ " and: ''",
+ f"re.match: '{good_pattern}'",
+ " with: 'show_fixtures_per_test.py OK'",
+ ]
+
+ func = getattr(lm, function)
+ func(bad_pattern) # bad pattern does not match any line: passes
+
+
+def test_linematcher_no_matching_after_match() -> None:
+ lm = LineMatcher(["1", "2", "3"])
+ lm.fnmatch_lines(["1", "3"])
+ with pytest.raises(pytest.fail.Exception) as e:
+ lm.no_fnmatch_line("*")
+ assert str(e.value).splitlines() == ["fnmatch: '*'", " with: '1'"]
+
+
+def test_linematcher_string_api() -> None:
+ lm = LineMatcher(["foo", "bar"])
+ assert str(lm) == "foo\nbar"
+
+
+def test_pytest_addopts_before_pytester(request, monkeypatch: MonkeyPatch) -> None:
+ orig = os.environ.get("PYTEST_ADDOPTS", None)
+ monkeypatch.setenv("PYTEST_ADDOPTS", "--orig-unused")
+ pytester: Pytester = request.getfixturevalue("pytester")
+ assert "PYTEST_ADDOPTS" not in os.environ
+ pytester._finalize()
+ assert os.environ.get("PYTEST_ADDOPTS") == "--orig-unused"
+ monkeypatch.undo()
+ assert os.environ.get("PYTEST_ADDOPTS") == orig
+
+
+def test_run_stdin(pytester: Pytester) -> None:
+ with pytest.raises(pytester.TimeoutExpired):
+ pytester.run(
+ sys.executable,
+ "-c",
+ "import sys, time; time.sleep(1); print(sys.stdin.read())",
+ stdin=subprocess.PIPE,
+ timeout=0.1,
+ )
+
+ with pytest.raises(pytester.TimeoutExpired):
+ result = pytester.run(
+ sys.executable,
+ "-c",
+ "import sys, time; time.sleep(1); print(sys.stdin.read())",
+ stdin=b"input\n2ndline",
+ timeout=0.1,
+ )
+
+ result = pytester.run(
+ sys.executable,
+ "-c",
+ "import sys; print(sys.stdin.read())",
+ stdin=b"input\n2ndline",
+ )
+ assert result.stdout.lines == ["input", "2ndline"]
+ assert result.stderr.str() == ""
+ assert result.ret == 0
+
+
+def test_popen_stdin_pipe(pytester: Pytester) -> None:
+ proc = pytester.popen(
+ [sys.executable, "-c", "import sys; print(sys.stdin.read())"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ )
+ stdin = b"input\n2ndline"
+ stdout, stderr = proc.communicate(input=stdin)
+ assert stdout.decode("utf8").splitlines() == ["input", "2ndline"]
+ assert stderr == b""
+ assert proc.returncode == 0
+
+
+def test_popen_stdin_bytes(pytester: Pytester) -> None:
+ proc = pytester.popen(
+ [sys.executable, "-c", "import sys; print(sys.stdin.read())"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdin=b"input\n2ndline",
+ )
+ stdout, stderr = proc.communicate()
+ assert stdout.decode("utf8").splitlines() == ["input", "2ndline"]
+ assert stderr == b""
+ assert proc.returncode == 0
+
+
+def test_popen_default_stdin_stderr_and_stdin_None(pytester: Pytester) -> None:
+ # stdout, stderr default to pipes,
+ # stdin can be None to not close the pipe, avoiding
+ # "ValueError: flush of closed file" with `communicate()`.
+ #
+ # Wraps the test to make it not hang when run with "-s".
+ p1 = pytester.makepyfile(
+ '''
+ import sys
+
+ def test_inner(pytester):
+ p1 = pytester.makepyfile(
+ """
+ import sys
+ print(sys.stdin.read()) # empty
+ print('stdout')
+ sys.stderr.write('stderr')
+ """
+ )
+ proc = pytester.popen([sys.executable, str(p1)], stdin=None)
+ stdout, stderr = proc.communicate(b"ignored")
+ assert stdout.splitlines() == [b"", b"stdout"]
+ assert stderr.splitlines() == [b"stderr"]
+ assert proc.returncode == 0
+ '''
+ )
+ result = pytester.runpytest("-p", "pytester", str(p1))
+ assert result.ret == 0
+
+
+def test_spawn_uses_tmphome(pytester: Pytester) -> None:
+ tmphome = str(pytester.path)
+ assert os.environ.get("HOME") == tmphome
+
+ pytester._monkeypatch.setenv("CUSTOMENV", "42")
+
+ p1 = pytester.makepyfile(
+ """
+ import os
+
+ def test():
+ assert os.environ["HOME"] == {tmphome!r}
+ assert os.environ["CUSTOMENV"] == "42"
+ """.format(
+ tmphome=tmphome
+ )
+ )
+ child = pytester.spawn_pytest(str(p1))
+ out = child.read()
+ assert child.wait() == 0, out.decode("utf8")
+
+
+def test_run_result_repr() -> None:
+ outlines = ["some", "normal", "output"]
+ errlines = ["some", "nasty", "errors", "happened"]
+
+ # known exit code
+ r = pytester_mod.RunResult(1, outlines, errlines, duration=0.5)
+ assert repr(r) == (
+ f"<RunResult ret={str(pytest.ExitCode.TESTS_FAILED)} len(stdout.lines)=3"
+ " len(stderr.lines)=4 duration=0.50s>"
+ )
+
+ # unknown exit code: just the number
+ r = pytester_mod.RunResult(99, outlines, errlines, duration=0.5)
+ assert (
+ repr(r) == "<RunResult ret=99 len(stdout.lines)=3"
+ " len(stderr.lines)=4 duration=0.50s>"
+ )
+
+
+def test_pytester_outcomes_with_multiple_errors(pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def bad_fixture():
+ raise Exception("bad")
+
+ def test_error1(bad_fixture):
+ pass
+
+ def test_error2(bad_fixture):
+ pass
+ """
+ )
+ result = pytester.runpytest(str(p1))
+ result.assert_outcomes(errors=2)
+
+ assert result.parseoutcomes() == {"errors": 2}
+
+
+def test_parse_summary_line_always_plural() -> None:
+ """Parsing summaries always returns plural nouns (#6505)"""
+ lines = [
+ "some output 1",
+ "some output 2",
+ "======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ====",
+ "done.",
+ ]
+ assert pytester_mod.RunResult.parse_summary_nouns(lines) == {
+ "errors": 1,
+ "failed": 1,
+ "passed": 1,
+ "warnings": 1,
+ }
+
+ lines = [
+ "some output 1",
+ "some output 2",
+ "======= 1 failed, 1 passed, 2 warnings, 2 errors in 0.13s ====",
+ "done.",
+ ]
+ assert pytester_mod.RunResult.parse_summary_nouns(lines) == {
+ "errors": 2,
+ "failed": 1,
+ "passed": 1,
+ "warnings": 2,
+ }
+
+
+def test_makefile_joins_absolute_path(pytester: Pytester) -> None:
+ absfile = pytester.path / "absfile"
+ p1 = pytester.makepyfile(**{str(absfile): ""})
+ assert str(p1) == str(pytester.path / "absfile.py")
+
+
+def test_pytester_makefile_dot_prefixes_extension_with_warning(
+ pytester: Pytester,
+) -> None:
+ with pytest.raises(
+ ValueError,
+ match="pytester.makefile expects a file extension, try .foo.bar instead of foo.bar",
+ ):
+ pytester.makefile("foo.bar", "")
+
+
+@pytest.mark.filterwarnings("default")
+def test_pytester_assert_outcomes_warnings(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import warnings
+
+ def test_with_warning():
+ warnings.warn(UserWarning("some custom warning"))
+ """
+ )
+ result = pytester.runpytest()
+ result.assert_outcomes(passed=1, warnings=1)
+ # If warnings is not passed, it is not checked at all.
+ result.assert_outcomes(passed=1)
+
+
+def test_pytester_outcomes_deselected(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_one():
+ pass
+
+ def test_two():
+ pass
+ """
+ )
+ result = pytester.runpytest("-k", "test_one")
+ result.assert_outcomes(passed=1, deselected=1)
+ # If deselected is not passed, it is not checked at all.
+ result.assert_outcomes(passed=1)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_python_path.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_python_path.py
new file mode 100644
index 0000000000..5ee0f55e36
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_python_path.py
@@ -0,0 +1,110 @@
+import sys
+from textwrap import dedent
+from typing import Generator
+from typing import List
+from typing import Optional
+
+import pytest
+from _pytest.pytester import Pytester
+
+
+@pytest.fixture()
+def file_structure(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_foo="""
+ from foo import foo
+
+ def test_foo():
+ assert foo() == 1
+ """
+ )
+
+ pytester.makepyfile(
+ test_bar="""
+ from bar import bar
+
+ def test_bar():
+ assert bar() == 2
+ """
+ )
+
+ foo_py = pytester.mkdir("sub") / "foo.py"
+ content = dedent(
+ """
+ def foo():
+ return 1
+ """
+ )
+ foo_py.write_text(content, encoding="utf-8")
+
+ bar_py = pytester.mkdir("sub2") / "bar.py"
+ content = dedent(
+ """
+ def bar():
+ return 2
+ """
+ )
+ bar_py.write_text(content, encoding="utf-8")
+
+
+def test_one_dir(pytester: Pytester, file_structure) -> None:
+ pytester.makefile(".ini", pytest="[pytest]\npythonpath=sub\n")
+ result = pytester.runpytest("test_foo.py")
+ assert result.ret == 0
+ result.assert_outcomes(passed=1)
+
+
+def test_two_dirs(pytester: Pytester, file_structure) -> None:
+ pytester.makefile(".ini", pytest="[pytest]\npythonpath=sub sub2\n")
+ result = pytester.runpytest("test_foo.py", "test_bar.py")
+ assert result.ret == 0
+ result.assert_outcomes(passed=2)
+
+
+def test_module_not_found(pytester: Pytester, file_structure) -> None:
+ """Without the pythonpath setting, the module should not be found."""
+ pytester.makefile(".ini", pytest="[pytest]\n")
+ result = pytester.runpytest("test_foo.py")
+ assert result.ret == pytest.ExitCode.INTERRUPTED
+ result.assert_outcomes(errors=1)
+ expected_error = "E ModuleNotFoundError: No module named 'foo'"
+ result.stdout.fnmatch_lines([expected_error])
+
+
+def test_no_ini(pytester: Pytester, file_structure) -> None:
+ """If no ini file, test should error."""
+ result = pytester.runpytest("test_foo.py")
+ assert result.ret == pytest.ExitCode.INTERRUPTED
+ result.assert_outcomes(errors=1)
+ expected_error = "E ModuleNotFoundError: No module named 'foo'"
+ result.stdout.fnmatch_lines([expected_error])
+
+
+def test_clean_up(pytester: Pytester) -> None:
+ """Test that the plugin cleans up after itself."""
+ # This is tough to test behaviorly because the cleanup really runs last.
+ # So the test make several implementation assumptions:
+ # - Cleanup is done in pytest_unconfigure().
+ # - Not a hookwrapper.
+ # So we can add a hookwrapper ourselves to test what it does.
+ pytester.makefile(".ini", pytest="[pytest]\npythonpath=I_SHALL_BE_REMOVED\n")
+ pytester.makepyfile(test_foo="""def test_foo(): pass""")
+
+ before: Optional[List[str]] = None
+ after: Optional[List[str]] = None
+
+ class Plugin:
+ @pytest.hookimpl(hookwrapper=True, tryfirst=True)
+ def pytest_unconfigure(self) -> Generator[None, None, None]:
+ nonlocal before, after
+ before = sys.path.copy()
+ yield
+ after = sys.path.copy()
+
+ result = pytester.runpytest_inprocess(plugins=[Plugin()])
+ assert result.ret == 0
+
+ assert before is not None
+ assert after is not None
+ assert any("I_SHALL_BE_REMOVED" in entry for entry in before)
+ assert not any("I_SHALL_BE_REMOVED" in entry for entry in after)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_recwarn.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_recwarn.py
new file mode 100644
index 0000000000..d3f218f166
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_recwarn.py
@@ -0,0 +1,410 @@
+import re
+import warnings
+from typing import Optional
+
+import pytest
+from _pytest.pytester import Pytester
+from _pytest.recwarn import WarningsRecorder
+
+
+def test_recwarn_stacklevel(recwarn: WarningsRecorder) -> None:
+ warnings.warn("hello")
+ warn = recwarn.pop()
+ assert warn.filename == __file__
+
+
+def test_recwarn_functional(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import warnings
+ def test_method(recwarn):
+ warnings.warn("hello")
+ warn = recwarn.pop()
+ assert isinstance(warn.message, UserWarning)
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+@pytest.mark.filterwarnings("")
+def test_recwarn_captures_deprecation_warning(recwarn: WarningsRecorder) -> None:
+ """
+ Check that recwarn can capture DeprecationWarning by default
+ without custom filterwarnings (see #8666).
+ """
+ warnings.warn(DeprecationWarning("some deprecation"))
+ assert len(recwarn) == 1
+ assert recwarn.pop(DeprecationWarning)
+
+
+class TestWarningsRecorderChecker:
+ def test_recording(self) -> None:
+ rec = WarningsRecorder(_ispytest=True)
+ with rec:
+ assert not rec.list
+ warnings.warn_explicit("hello", UserWarning, "xyz", 13)
+ assert len(rec.list) == 1
+ warnings.warn(DeprecationWarning("hello"))
+ assert len(rec.list) == 2
+ warn = rec.pop()
+ assert str(warn.message) == "hello"
+ values = rec.list
+ rec.clear()
+ assert len(rec.list) == 0
+ assert values is rec.list
+ pytest.raises(AssertionError, rec.pop)
+
+ def test_warn_stacklevel(self) -> None:
+ """#4243"""
+ rec = WarningsRecorder(_ispytest=True)
+ with rec:
+ warnings.warn("test", DeprecationWarning, 2)
+
+ def test_typechecking(self) -> None:
+ from _pytest.recwarn import WarningsChecker
+
+ with pytest.raises(TypeError):
+ WarningsChecker(5, _ispytest=True) # type: ignore[arg-type]
+ with pytest.raises(TypeError):
+ WarningsChecker(("hi", RuntimeWarning), _ispytest=True) # type: ignore[arg-type]
+ with pytest.raises(TypeError):
+ WarningsChecker([DeprecationWarning, RuntimeWarning], _ispytest=True) # type: ignore[arg-type]
+
+ def test_invalid_enter_exit(self) -> None:
+ # wrap this test in WarningsRecorder to ensure warning state gets reset
+ with WarningsRecorder(_ispytest=True):
+ with pytest.raises(RuntimeError):
+ rec = WarningsRecorder(_ispytest=True)
+ rec.__exit__(None, None, None) # can't exit before entering
+
+ with pytest.raises(RuntimeError):
+ rec = WarningsRecorder(_ispytest=True)
+ with rec:
+ with rec:
+ pass # can't enter twice
+
+
+class TestDeprecatedCall:
+ """test pytest.deprecated_call()"""
+
+ def dep(self, i: int, j: Optional[int] = None) -> int:
+ if i == 0:
+ warnings.warn("is deprecated", DeprecationWarning, stacklevel=1)
+ return 42
+
+ def dep_explicit(self, i: int) -> None:
+ if i == 0:
+ warnings.warn_explicit(
+ "dep_explicit", category=DeprecationWarning, filename="hello", lineno=3
+ )
+
+ def test_deprecated_call_raises(self) -> None:
+ with pytest.raises(pytest.fail.Exception, match="No warnings of type"):
+ pytest.deprecated_call(self.dep, 3, 5)
+
+ def test_deprecated_call(self) -> None:
+ pytest.deprecated_call(self.dep, 0, 5)
+
+ def test_deprecated_call_ret(self) -> None:
+ ret = pytest.deprecated_call(self.dep, 0)
+ assert ret == 42
+
+ def test_deprecated_call_preserves(self) -> None:
+ # Type ignored because `onceregistry` and `filters` are not
+ # documented API.
+ onceregistry = warnings.onceregistry.copy() # type: ignore
+ filters = warnings.filters[:] # type: ignore
+ warn = warnings.warn
+ warn_explicit = warnings.warn_explicit
+ self.test_deprecated_call_raises()
+ self.test_deprecated_call()
+ assert onceregistry == warnings.onceregistry # type: ignore
+ assert filters == warnings.filters # type: ignore
+ assert warn is warnings.warn
+ assert warn_explicit is warnings.warn_explicit
+
+ def test_deprecated_explicit_call_raises(self) -> None:
+ with pytest.raises(pytest.fail.Exception):
+ pytest.deprecated_call(self.dep_explicit, 3)
+
+ def test_deprecated_explicit_call(self) -> None:
+ pytest.deprecated_call(self.dep_explicit, 0)
+ pytest.deprecated_call(self.dep_explicit, 0)
+
+ @pytest.mark.parametrize("mode", ["context_manager", "call"])
+ def test_deprecated_call_no_warning(self, mode) -> None:
+ """Ensure deprecated_call() raises the expected failure when its block/function does
+ not raise a deprecation warning.
+ """
+
+ def f():
+ pass
+
+ msg = "No warnings of type (.*DeprecationWarning.*, .*PendingDeprecationWarning.*)"
+ with pytest.raises(pytest.fail.Exception, match=msg):
+ if mode == "call":
+ pytest.deprecated_call(f)
+ else:
+ with pytest.deprecated_call():
+ f()
+
+ @pytest.mark.parametrize(
+ "warning_type", [PendingDeprecationWarning, DeprecationWarning]
+ )
+ @pytest.mark.parametrize("mode", ["context_manager", "call"])
+ @pytest.mark.parametrize("call_f_first", [True, False])
+ @pytest.mark.filterwarnings("ignore")
+ def test_deprecated_call_modes(self, warning_type, mode, call_f_first) -> None:
+ """Ensure deprecated_call() captures a deprecation warning as expected inside its
+ block/function.
+ """
+
+ def f():
+ warnings.warn(warning_type("hi"))
+ return 10
+
+ # ensure deprecated_call() can capture the warning even if it has already been triggered
+ if call_f_first:
+ assert f() == 10
+ if mode == "call":
+ assert pytest.deprecated_call(f) == 10
+ else:
+ with pytest.deprecated_call():
+ assert f() == 10
+
+ @pytest.mark.parametrize("mode", ["context_manager", "call"])
+ def test_deprecated_call_exception_is_raised(self, mode) -> None:
+ """If the block of the code being tested by deprecated_call() raises an exception,
+ it must raise the exception undisturbed.
+ """
+
+ def f():
+ raise ValueError("some exception")
+
+ with pytest.raises(ValueError, match="some exception"):
+ if mode == "call":
+ pytest.deprecated_call(f)
+ else:
+ with pytest.deprecated_call():
+ f()
+
+ def test_deprecated_call_specificity(self) -> None:
+ other_warnings = [
+ Warning,
+ UserWarning,
+ SyntaxWarning,
+ RuntimeWarning,
+ FutureWarning,
+ ImportWarning,
+ UnicodeWarning,
+ ]
+ for warning in other_warnings:
+
+ def f():
+ warnings.warn(warning("hi"))
+
+ with pytest.raises(pytest.fail.Exception):
+ pytest.deprecated_call(f)
+ with pytest.raises(pytest.fail.Exception):
+ with pytest.deprecated_call():
+ f()
+
+ def test_deprecated_call_supports_match(self) -> None:
+ with pytest.deprecated_call(match=r"must be \d+$"):
+ warnings.warn("value must be 42", DeprecationWarning)
+
+ with pytest.raises(pytest.fail.Exception):
+ with pytest.deprecated_call(match=r"must be \d+$"):
+ warnings.warn("this is not here", DeprecationWarning)
+
+
+class TestWarns:
+ def test_check_callable(self) -> None:
+ source = "warnings.warn('w1', RuntimeWarning)"
+ with pytest.raises(TypeError, match=r".* must be callable"):
+ pytest.warns(RuntimeWarning, source) # type: ignore
+
+ def test_several_messages(self) -> None:
+ # different messages, b/c Python suppresses multiple identical warnings
+ pytest.warns(RuntimeWarning, lambda: warnings.warn("w1", RuntimeWarning))
+ with pytest.raises(pytest.fail.Exception):
+ pytest.warns(UserWarning, lambda: warnings.warn("w2", RuntimeWarning))
+ pytest.warns(RuntimeWarning, lambda: warnings.warn("w3", RuntimeWarning))
+
+ def test_function(self) -> None:
+ pytest.warns(
+ SyntaxWarning, lambda msg: warnings.warn(msg, SyntaxWarning), "syntax"
+ )
+
+ def test_warning_tuple(self) -> None:
+ pytest.warns(
+ (RuntimeWarning, SyntaxWarning), lambda: warnings.warn("w1", RuntimeWarning)
+ )
+ pytest.warns(
+ (RuntimeWarning, SyntaxWarning), lambda: warnings.warn("w2", SyntaxWarning)
+ )
+ pytest.raises(
+ pytest.fail.Exception,
+ lambda: pytest.warns(
+ (RuntimeWarning, SyntaxWarning),
+ lambda: warnings.warn("w3", UserWarning),
+ ),
+ )
+
+ def test_as_contextmanager(self) -> None:
+ with pytest.warns(RuntimeWarning):
+ warnings.warn("runtime", RuntimeWarning)
+
+ with pytest.warns(UserWarning):
+ warnings.warn("user", UserWarning)
+
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ with pytest.warns(RuntimeWarning):
+ warnings.warn("user", UserWarning)
+ excinfo.match(
+ r"DID NOT WARN. No warnings of type \(.+RuntimeWarning.+,\) were emitted. "
+ r"The list of emitted warnings is: \[UserWarning\('user',?\)\]."
+ )
+
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ with pytest.warns(UserWarning):
+ warnings.warn("runtime", RuntimeWarning)
+ excinfo.match(
+ r"DID NOT WARN. No warnings of type \(.+UserWarning.+,\) were emitted. "
+ r"The list of emitted warnings is: \[RuntimeWarning\('runtime',?\)\]."
+ )
+
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ with pytest.warns(UserWarning):
+ pass
+ excinfo.match(
+ r"DID NOT WARN. No warnings of type \(.+UserWarning.+,\) were emitted. "
+ r"The list of emitted warnings is: \[\]."
+ )
+
+ warning_classes = (UserWarning, FutureWarning)
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ with pytest.warns(warning_classes) as warninfo:
+ warnings.warn("runtime", RuntimeWarning)
+ warnings.warn("import", ImportWarning)
+
+ message_template = (
+ "DID NOT WARN. No warnings of type {0} were emitted. "
+ "The list of emitted warnings is: {1}."
+ )
+ excinfo.match(
+ re.escape(
+ message_template.format(
+ warning_classes, [each.message for each in warninfo]
+ )
+ )
+ )
+
+ def test_record(self) -> None:
+ with pytest.warns(UserWarning) as record:
+ warnings.warn("user", UserWarning)
+
+ assert len(record) == 1
+ assert str(record[0].message) == "user"
+
+ def test_record_only(self) -> None:
+ with pytest.warns() as record:
+ warnings.warn("user", UserWarning)
+ warnings.warn("runtime", RuntimeWarning)
+
+ assert len(record) == 2
+ assert str(record[0].message) == "user"
+ assert str(record[1].message) == "runtime"
+
+ def test_record_only_none_deprecated_warn(self) -> None:
+ # This should become an error when WARNS_NONE_ARG is removed in Pytest 8.0
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ with pytest.warns(None) as record: # type: ignore[call-overload]
+ warnings.warn("user", UserWarning)
+ warnings.warn("runtime", RuntimeWarning)
+
+ assert len(record) == 2
+ assert str(record[0].message) == "user"
+ assert str(record[1].message) == "runtime"
+
+ def test_record_by_subclass(self) -> None:
+ with pytest.warns(Warning) as record:
+ warnings.warn("user", UserWarning)
+ warnings.warn("runtime", RuntimeWarning)
+
+ assert len(record) == 2
+ assert str(record[0].message) == "user"
+ assert str(record[1].message) == "runtime"
+
+ class MyUserWarning(UserWarning):
+ pass
+
+ class MyRuntimeWarning(RuntimeWarning):
+ pass
+
+ with pytest.warns((UserWarning, RuntimeWarning)) as record:
+ warnings.warn("user", MyUserWarning)
+ warnings.warn("runtime", MyRuntimeWarning)
+
+ assert len(record) == 2
+ assert str(record[0].message) == "user"
+ assert str(record[1].message) == "runtime"
+
+ def test_double_test(self, pytester: Pytester) -> None:
+ """If a test is run again, the warning should still be raised"""
+ pytester.makepyfile(
+ """
+ import pytest
+ import warnings
+
+ @pytest.mark.parametrize('run', [1, 2])
+ def test(run):
+ with pytest.warns(RuntimeWarning):
+ warnings.warn("runtime", RuntimeWarning)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed in*"])
+
+ def test_match_regex(self) -> None:
+ with pytest.warns(UserWarning, match=r"must be \d+$"):
+ warnings.warn("value must be 42", UserWarning)
+
+ with pytest.raises(pytest.fail.Exception):
+ with pytest.warns(UserWarning, match=r"must be \d+$"):
+ warnings.warn("this is not here", UserWarning)
+
+ with pytest.raises(pytest.fail.Exception):
+ with pytest.warns(FutureWarning, match=r"must be \d+$"):
+ warnings.warn("value must be 42", UserWarning)
+
+ def test_one_from_multiple_warns(self) -> None:
+ with pytest.warns(UserWarning, match=r"aaa"):
+ warnings.warn("cccccccccc", UserWarning)
+ warnings.warn("bbbbbbbbbb", UserWarning)
+ warnings.warn("aaaaaaaaaa", UserWarning)
+
+ def test_none_of_multiple_warns(self) -> None:
+ with pytest.raises(pytest.fail.Exception):
+ with pytest.warns(UserWarning, match=r"aaa"):
+ warnings.warn("bbbbbbbbbb", UserWarning)
+ warnings.warn("cccccccccc", UserWarning)
+
+ @pytest.mark.filterwarnings("ignore")
+ def test_can_capture_previously_warned(self) -> None:
+ def f() -> int:
+ warnings.warn(UserWarning("ohai"))
+ return 10
+
+ assert f() == 10
+ assert pytest.warns(UserWarning, f) == 10
+ assert pytest.warns(UserWarning, f) == 10
+ assert pytest.warns(UserWarning, f) != "10" # type: ignore[comparison-overlap]
+
+ def test_warns_context_manager_with_kwargs(self) -> None:
+ with pytest.raises(TypeError) as excinfo:
+ with pytest.warns(UserWarning, foo="bar"): # type: ignore
+ pass
+ assert "Unexpected keyword arguments" in str(excinfo.value)
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_reports.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_reports.py
new file mode 100644
index 0000000000..31b6cf1afc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_reports.py
@@ -0,0 +1,488 @@
+from typing import Sequence
+from typing import Union
+
+import pytest
+from _pytest._code.code import ExceptionChainRepr
+from _pytest._code.code import ExceptionRepr
+from _pytest.config import Config
+from _pytest.pytester import Pytester
+from _pytest.reports import CollectReport
+from _pytest.reports import TestReport
+
+
+class TestReportSerialization:
+ def test_xdist_longrepr_to_str_issue_241(self, pytester: Pytester) -> None:
+ """Regarding issue pytest-xdist#241.
+
+ This test came originally from test_remote.py in xdist (ca03269).
+ """
+ pytester.makepyfile(
+ """
+ def test_a(): assert False
+ def test_b(): pass
+ """
+ )
+ reprec = pytester.inline_run()
+ reports = reprec.getreports("pytest_runtest_logreport")
+ assert len(reports) == 6
+ test_a_call = reports[1]
+ assert test_a_call.when == "call"
+ assert test_a_call.outcome == "failed"
+ assert test_a_call._to_json()["longrepr"]["reprtraceback"]["style"] == "long"
+ test_b_call = reports[4]
+ assert test_b_call.when == "call"
+ assert test_b_call.outcome == "passed"
+ assert test_b_call._to_json()["longrepr"] is None
+
+ def test_xdist_report_longrepr_reprcrash_130(self, pytester: Pytester) -> None:
+ """Regarding issue pytest-xdist#130
+
+ This test came originally from test_remote.py in xdist (ca03269).
+ """
+ reprec = pytester.inline_runsource(
+ """
+ def test_fail():
+ assert False, 'Expected Message'
+ """
+ )
+ reports = reprec.getreports("pytest_runtest_logreport")
+ assert len(reports) == 3
+ rep = reports[1]
+ added_section = ("Failure Metadata", "metadata metadata", "*")
+ assert isinstance(rep.longrepr, ExceptionRepr)
+ rep.longrepr.sections.append(added_section)
+ d = rep._to_json()
+ a = TestReport._from_json(d)
+ assert isinstance(a.longrepr, ExceptionRepr)
+ # Check assembled == rep
+ assert a.__dict__.keys() == rep.__dict__.keys()
+ for key in rep.__dict__.keys():
+ if key != "longrepr":
+ assert getattr(a, key) == getattr(rep, key)
+ assert rep.longrepr.reprcrash is not None
+ assert a.longrepr.reprcrash is not None
+ assert rep.longrepr.reprcrash.lineno == a.longrepr.reprcrash.lineno
+ assert rep.longrepr.reprcrash.message == a.longrepr.reprcrash.message
+ assert rep.longrepr.reprcrash.path == a.longrepr.reprcrash.path
+ assert rep.longrepr.reprtraceback.entrysep == a.longrepr.reprtraceback.entrysep
+ assert (
+ rep.longrepr.reprtraceback.extraline == a.longrepr.reprtraceback.extraline
+ )
+ assert rep.longrepr.reprtraceback.style == a.longrepr.reprtraceback.style
+ assert rep.longrepr.sections == a.longrepr.sections
+ # Missing section attribute PR171
+ assert added_section in a.longrepr.sections
+
+ def test_reprentries_serialization_170(self, pytester: Pytester) -> None:
+ """Regarding issue pytest-xdist#170
+
+ This test came originally from test_remote.py in xdist (ca03269).
+ """
+ from _pytest._code.code import ReprEntry
+
+ reprec = pytester.inline_runsource(
+ """
+ def test_repr_entry():
+ x = 0
+ assert x
+ """,
+ "--showlocals",
+ )
+ reports = reprec.getreports("pytest_runtest_logreport")
+ assert len(reports) == 3
+ rep = reports[1]
+ assert isinstance(rep.longrepr, ExceptionRepr)
+ d = rep._to_json()
+ a = TestReport._from_json(d)
+ assert isinstance(a.longrepr, ExceptionRepr)
+
+ rep_entries = rep.longrepr.reprtraceback.reprentries
+ a_entries = a.longrepr.reprtraceback.reprentries
+ for i in range(len(a_entries)):
+ rep_entry = rep_entries[i]
+ assert isinstance(rep_entry, ReprEntry)
+ assert rep_entry.reprfileloc is not None
+ assert rep_entry.reprfuncargs is not None
+ assert rep_entry.reprlocals is not None
+
+ a_entry = a_entries[i]
+ assert isinstance(a_entry, ReprEntry)
+ assert a_entry.reprfileloc is not None
+ assert a_entry.reprfuncargs is not None
+ assert a_entry.reprlocals is not None
+
+ assert rep_entry.lines == a_entry.lines
+ assert rep_entry.reprfileloc.lineno == a_entry.reprfileloc.lineno
+ assert rep_entry.reprfileloc.message == a_entry.reprfileloc.message
+ assert rep_entry.reprfileloc.path == a_entry.reprfileloc.path
+ assert rep_entry.reprfuncargs.args == a_entry.reprfuncargs.args
+ assert rep_entry.reprlocals.lines == a_entry.reprlocals.lines
+ assert rep_entry.style == a_entry.style
+
+ def test_reprentries_serialization_196(self, pytester: Pytester) -> None:
+ """Regarding issue pytest-xdist#196
+
+ This test came originally from test_remote.py in xdist (ca03269).
+ """
+ from _pytest._code.code import ReprEntryNative
+
+ reprec = pytester.inline_runsource(
+ """
+ def test_repr_entry_native():
+ x = 0
+ assert x
+ """,
+ "--tb=native",
+ )
+ reports = reprec.getreports("pytest_runtest_logreport")
+ assert len(reports) == 3
+ rep = reports[1]
+ assert isinstance(rep.longrepr, ExceptionRepr)
+ d = rep._to_json()
+ a = TestReport._from_json(d)
+ assert isinstance(a.longrepr, ExceptionRepr)
+
+ rep_entries = rep.longrepr.reprtraceback.reprentries
+ a_entries = a.longrepr.reprtraceback.reprentries
+ for i in range(len(a_entries)):
+ assert isinstance(rep_entries[i], ReprEntryNative)
+ assert rep_entries[i].lines == a_entries[i].lines
+
+ def test_itemreport_outcomes(self, pytester: Pytester) -> None:
+ # This test came originally from test_remote.py in xdist (ca03269).
+ reprec = pytester.inline_runsource(
+ """
+ import pytest
+ def test_pass(): pass
+ def test_fail(): 0/0
+ @pytest.mark.skipif("True")
+ def test_skip(): pass
+ def test_skip_imperative():
+ pytest.skip("hello")
+ @pytest.mark.xfail("True")
+ def test_xfail(): 0/0
+ def test_xfail_imperative():
+ pytest.xfail("hello")
+ """
+ )
+ reports = reprec.getreports("pytest_runtest_logreport")
+ assert len(reports) == 17 # with setup/teardown "passed" reports
+ for rep in reports:
+ d = rep._to_json()
+ newrep = TestReport._from_json(d)
+ assert newrep.passed == rep.passed
+ assert newrep.failed == rep.failed
+ assert newrep.skipped == rep.skipped
+ if newrep.skipped and not hasattr(newrep, "wasxfail"):
+ assert isinstance(newrep.longrepr, tuple)
+ assert len(newrep.longrepr) == 3
+ assert newrep.outcome == rep.outcome
+ assert newrep.when == rep.when
+ assert newrep.keywords == rep.keywords
+ if rep.failed:
+ assert newrep.longreprtext == rep.longreprtext
+
+ def test_collectreport_passed(self, pytester: Pytester) -> None:
+ """This test came originally from test_remote.py in xdist (ca03269)."""
+ reprec = pytester.inline_runsource("def test_func(): pass")
+ reports = reprec.getreports("pytest_collectreport")
+ for rep in reports:
+ d = rep._to_json()
+ newrep = CollectReport._from_json(d)
+ assert newrep.passed == rep.passed
+ assert newrep.failed == rep.failed
+ assert newrep.skipped == rep.skipped
+
+ def test_collectreport_fail(self, pytester: Pytester) -> None:
+ """This test came originally from test_remote.py in xdist (ca03269)."""
+ reprec = pytester.inline_runsource("qwe abc")
+ reports = reprec.getreports("pytest_collectreport")
+ assert reports
+ for rep in reports:
+ d = rep._to_json()
+ newrep = CollectReport._from_json(d)
+ assert newrep.passed == rep.passed
+ assert newrep.failed == rep.failed
+ assert newrep.skipped == rep.skipped
+ if rep.failed:
+ assert newrep.longrepr == str(rep.longrepr)
+
+ def test_extended_report_deserialization(self, pytester: Pytester) -> None:
+ """This test came originally from test_remote.py in xdist (ca03269)."""
+ reprec = pytester.inline_runsource("qwe abc")
+ reports = reprec.getreports("pytest_collectreport")
+ assert reports
+ for rep in reports:
+ rep.extra = True # type: ignore[attr-defined]
+ d = rep._to_json()
+ newrep = CollectReport._from_json(d)
+ assert newrep.extra
+ assert newrep.passed == rep.passed
+ assert newrep.failed == rep.failed
+ assert newrep.skipped == rep.skipped
+ if rep.failed:
+ assert newrep.longrepr == str(rep.longrepr)
+
+ def test_paths_support(self, pytester: Pytester) -> None:
+ """Report attributes which are path-like should become strings."""
+ pytester.makepyfile(
+ """
+ def test_a():
+ assert False
+ """
+ )
+
+ class MyPathLike:
+ def __init__(self, path: str) -> None:
+ self.path = path
+
+ def __fspath__(self) -> str:
+ return self.path
+
+ reprec = pytester.inline_run()
+ reports = reprec.getreports("pytest_runtest_logreport")
+ assert len(reports) == 3
+ test_a_call = reports[1]
+ test_a_call.path1 = MyPathLike(str(pytester.path)) # type: ignore[attr-defined]
+ test_a_call.path2 = pytester.path # type: ignore[attr-defined]
+ data = test_a_call._to_json()
+ assert data["path1"] == str(pytester.path)
+ assert data["path2"] == str(pytester.path)
+
+ def test_deserialization_failure(self, pytester: Pytester) -> None:
+ """Check handling of failure during deserialization of report types."""
+ pytester.makepyfile(
+ """
+ def test_a():
+ assert False
+ """
+ )
+ reprec = pytester.inline_run()
+ reports = reprec.getreports("pytest_runtest_logreport")
+ assert len(reports) == 3
+ test_a_call = reports[1]
+ data = test_a_call._to_json()
+ entry = data["longrepr"]["reprtraceback"]["reprentries"][0]
+ assert entry["type"] == "ReprEntry"
+
+ entry["type"] = "Unknown"
+ with pytest.raises(
+ RuntimeError, match="INTERNALERROR: Unknown entry type returned: Unknown"
+ ):
+ TestReport._from_json(data)
+
+ @pytest.mark.parametrize("report_class", [TestReport, CollectReport])
+ def test_chained_exceptions(
+ self, pytester: Pytester, tw_mock, report_class
+ ) -> None:
+ """Check serialization/deserialization of report objects containing chained exceptions (#5786)"""
+ pytester.makepyfile(
+ """
+ def foo():
+ raise ValueError('value error')
+ def test_a():
+ try:
+ foo()
+ except ValueError as e:
+ raise RuntimeError('runtime error') from e
+ if {error_during_import}:
+ test_a()
+ """.format(
+ error_during_import=report_class is CollectReport
+ )
+ )
+
+ reprec = pytester.inline_run()
+ if report_class is TestReport:
+ reports: Union[
+ Sequence[TestReport], Sequence[CollectReport]
+ ] = reprec.getreports("pytest_runtest_logreport")
+ # we have 3 reports: setup/call/teardown
+ assert len(reports) == 3
+ # get the call report
+ report = reports[1]
+ else:
+ assert report_class is CollectReport
+ # two collection reports: session and test file
+ reports = reprec.getreports("pytest_collectreport")
+ assert len(reports) == 2
+ report = reports[1]
+
+ def check_longrepr(longrepr: ExceptionChainRepr) -> None:
+ """Check the attributes of the given longrepr object according to the test file.
+
+ We can get away with testing both CollectReport and TestReport with this function because
+ the longrepr objects are very similar.
+ """
+ assert isinstance(longrepr, ExceptionChainRepr)
+ assert longrepr.sections == [("title", "contents", "=")]
+ assert len(longrepr.chain) == 2
+ entry1, entry2 = longrepr.chain
+ tb1, fileloc1, desc1 = entry1
+ tb2, fileloc2, desc2 = entry2
+
+ assert "ValueError('value error')" in str(tb1)
+ assert "RuntimeError('runtime error')" in str(tb2)
+
+ assert (
+ desc1
+ == "The above exception was the direct cause of the following exception:"
+ )
+ assert desc2 is None
+
+ assert report.failed
+ assert len(report.sections) == 0
+ assert isinstance(report.longrepr, ExceptionChainRepr)
+ report.longrepr.addsection("title", "contents", "=")
+ check_longrepr(report.longrepr)
+
+ data = report._to_json()
+ loaded_report = report_class._from_json(data)
+
+ assert loaded_report.failed
+ check_longrepr(loaded_report.longrepr)
+
+ # make sure we don't blow up on ``toterminal`` call; we don't test the actual output because it is very
+ # brittle and hard to maintain, but we can assume it is correct because ``toterminal`` is already tested
+ # elsewhere and we do check the contents of the longrepr object after loading it.
+ loaded_report.longrepr.toterminal(tw_mock)
+
+ def test_chained_exceptions_no_reprcrash(self, pytester: Pytester, tw_mock) -> None:
+ """Regression test for tracebacks without a reprcrash (#5971)
+
+ This happens notably on exceptions raised by multiprocess.pool: the exception transfer
+ from subprocess to main process creates an artificial exception, which ExceptionInfo
+ can't obtain the ReprFileLocation from.
+ """
+ pytester.makepyfile(
+ """
+ from concurrent.futures import ProcessPoolExecutor
+
+ def func():
+ raise ValueError('value error')
+
+ def test_a():
+ with ProcessPoolExecutor() as p:
+ p.submit(func).result()
+ """
+ )
+
+ pytester.syspathinsert()
+ reprec = pytester.inline_run()
+
+ reports = reprec.getreports("pytest_runtest_logreport")
+
+ def check_longrepr(longrepr: object) -> None:
+ assert isinstance(longrepr, ExceptionChainRepr)
+ assert len(longrepr.chain) == 2
+ entry1, entry2 = longrepr.chain
+ tb1, fileloc1, desc1 = entry1
+ tb2, fileloc2, desc2 = entry2
+
+ assert "RemoteTraceback" in str(tb1)
+ assert "ValueError: value error" in str(tb2)
+
+ assert fileloc1 is None
+ assert fileloc2 is not None
+ assert fileloc2.message == "ValueError: value error"
+
+ # 3 reports: setup/call/teardown: get the call report
+ assert len(reports) == 3
+ report = reports[1]
+
+ assert report.failed
+ check_longrepr(report.longrepr)
+
+ data = report._to_json()
+ loaded_report = TestReport._from_json(data)
+
+ assert loaded_report.failed
+ check_longrepr(loaded_report.longrepr)
+
+ # for same reasons as previous test, ensure we don't blow up here
+ assert loaded_report.longrepr is not None
+ assert isinstance(loaded_report.longrepr, ExceptionChainRepr)
+ loaded_report.longrepr.toterminal(tw_mock)
+
+ def test_report_prevent_ConftestImportFailure_hiding_exception(
+ self, pytester: Pytester
+ ) -> None:
+ sub_dir = pytester.path.joinpath("ns")
+ sub_dir.mkdir()
+ sub_dir.joinpath("conftest.py").write_text("import unknown")
+
+ result = pytester.runpytest_subprocess(".")
+ result.stdout.fnmatch_lines(["E *Error: No module named 'unknown'"])
+ result.stdout.no_fnmatch_line("ERROR - *ConftestImportFailure*")
+
+
+class TestHooks:
+ """Test that the hooks are working correctly for plugins"""
+
+ def test_test_report(self, pytester: Pytester, pytestconfig: Config) -> None:
+ pytester.makepyfile(
+ """
+ def test_a(): assert False
+ def test_b(): pass
+ """
+ )
+ reprec = pytester.inline_run()
+ reports = reprec.getreports("pytest_runtest_logreport")
+ assert len(reports) == 6
+ for rep in reports:
+ data = pytestconfig.hook.pytest_report_to_serializable(
+ config=pytestconfig, report=rep
+ )
+ assert data["$report_type"] == "TestReport"
+ new_rep = pytestconfig.hook.pytest_report_from_serializable(
+ config=pytestconfig, data=data
+ )
+ assert new_rep.nodeid == rep.nodeid
+ assert new_rep.when == rep.when
+ assert new_rep.outcome == rep.outcome
+
+ def test_collect_report(self, pytester: Pytester, pytestconfig: Config) -> None:
+ pytester.makepyfile(
+ """
+ def test_a(): assert False
+ def test_b(): pass
+ """
+ )
+ reprec = pytester.inline_run()
+ reports = reprec.getreports("pytest_collectreport")
+ assert len(reports) == 2
+ for rep in reports:
+ data = pytestconfig.hook.pytest_report_to_serializable(
+ config=pytestconfig, report=rep
+ )
+ assert data["$report_type"] == "CollectReport"
+ new_rep = pytestconfig.hook.pytest_report_from_serializable(
+ config=pytestconfig, data=data
+ )
+ assert new_rep.nodeid == rep.nodeid
+ assert new_rep.when == "collect"
+ assert new_rep.outcome == rep.outcome
+
+ @pytest.mark.parametrize(
+ "hook_name", ["pytest_runtest_logreport", "pytest_collectreport"]
+ )
+ def test_invalid_report_types(
+ self, pytester: Pytester, pytestconfig: Config, hook_name: str
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_a(): pass
+ """
+ )
+ reprec = pytester.inline_run()
+ reports = reprec.getreports(hook_name)
+ assert reports
+ rep = reports[0]
+ data = pytestconfig.hook.pytest_report_to_serializable(
+ config=pytestconfig, report=rep
+ )
+ data["$report_type"] = "Unknown"
+ with pytest.raises(AssertionError):
+ _ = pytestconfig.hook.pytest_report_from_serializable(
+ config=pytestconfig, data=data
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_runner.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_runner.py
new file mode 100644
index 0000000000..2e2c462d97
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_runner.py
@@ -0,0 +1,1061 @@
+import inspect
+import os
+import sys
+import types
+from pathlib import Path
+from typing import Dict
+from typing import List
+from typing import Tuple
+from typing import Type
+
+import pytest
+from _pytest import outcomes
+from _pytest import reports
+from _pytest import runner
+from _pytest._code import ExceptionInfo
+from _pytest._code.code import ExceptionChainRepr
+from _pytest.config import ExitCode
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.outcomes import OutcomeException
+from _pytest.pytester import Pytester
+
+
+class TestSetupState:
+ def test_setup(self, pytester: Pytester) -> None:
+ item = pytester.getitem("def test_func(): pass")
+ ss = item.session._setupstate
+ values = [1]
+ ss.setup(item)
+ ss.addfinalizer(values.pop, item)
+ assert values
+ ss.teardown_exact(None)
+ assert not values
+
+ def test_teardown_exact_stack_empty(self, pytester: Pytester) -> None:
+ item = pytester.getitem("def test_func(): pass")
+ ss = item.session._setupstate
+ ss.setup(item)
+ ss.teardown_exact(None)
+ ss.teardown_exact(None)
+ ss.teardown_exact(None)
+
+ def test_setup_fails_and_failure_is_cached(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ def setup_module(mod):
+ raise ValueError(42)
+ def test_func(): pass
+ """
+ )
+ ss = item.session._setupstate
+ with pytest.raises(ValueError):
+ ss.setup(item)
+ with pytest.raises(ValueError):
+ ss.setup(item)
+
+ def test_teardown_multiple_one_fails(self, pytester: Pytester) -> None:
+ r = []
+
+ def fin1():
+ r.append("fin1")
+
+ def fin2():
+ raise Exception("oops")
+
+ def fin3():
+ r.append("fin3")
+
+ item = pytester.getitem("def test_func(): pass")
+ ss = item.session._setupstate
+ ss.setup(item)
+ ss.addfinalizer(fin1, item)
+ ss.addfinalizer(fin2, item)
+ ss.addfinalizer(fin3, item)
+ with pytest.raises(Exception) as err:
+ ss.teardown_exact(None)
+ assert err.value.args == ("oops",)
+ assert r == ["fin3", "fin1"]
+
+ def test_teardown_multiple_fail(self, pytester: Pytester) -> None:
+ # Ensure the first exception is the one which is re-raised.
+ # Ideally both would be reported however.
+ def fin1():
+ raise Exception("oops1")
+
+ def fin2():
+ raise Exception("oops2")
+
+ item = pytester.getitem("def test_func(): pass")
+ ss = item.session._setupstate
+ ss.setup(item)
+ ss.addfinalizer(fin1, item)
+ ss.addfinalizer(fin2, item)
+ with pytest.raises(Exception) as err:
+ ss.teardown_exact(None)
+ assert err.value.args == ("oops2",)
+
+ def test_teardown_multiple_scopes_one_fails(self, pytester: Pytester) -> None:
+ module_teardown = []
+
+ def fin_func():
+ raise Exception("oops1")
+
+ def fin_module():
+ module_teardown.append("fin_module")
+
+ item = pytester.getitem("def test_func(): pass")
+ mod = item.listchain()[-2]
+ ss = item.session._setupstate
+ ss.setup(item)
+ ss.addfinalizer(fin_module, mod)
+ ss.addfinalizer(fin_func, item)
+ with pytest.raises(Exception, match="oops1"):
+ ss.teardown_exact(None)
+ assert module_teardown == ["fin_module"]
+
+
+class BaseFunctionalTests:
+ def test_passfunction(self, pytester: Pytester) -> None:
+ reports = pytester.runitem(
+ """
+ def test_func():
+ pass
+ """
+ )
+ rep = reports[1]
+ assert rep.passed
+ assert not rep.failed
+ assert rep.outcome == "passed"
+ assert not rep.longrepr
+
+ def test_failfunction(self, pytester: Pytester) -> None:
+ reports = pytester.runitem(
+ """
+ def test_func():
+ assert 0
+ """
+ )
+ rep = reports[1]
+ assert not rep.passed
+ assert not rep.skipped
+ assert rep.failed
+ assert rep.when == "call"
+ assert rep.outcome == "failed"
+ # assert isinstance(rep.longrepr, ReprExceptionInfo)
+
+ def test_skipfunction(self, pytester: Pytester) -> None:
+ reports = pytester.runitem(
+ """
+ import pytest
+ def test_func():
+ pytest.skip("hello")
+ """
+ )
+ rep = reports[1]
+ assert not rep.failed
+ assert not rep.passed
+ assert rep.skipped
+ assert rep.outcome == "skipped"
+ # assert rep.skipped.when == "call"
+ # assert rep.skipped.when == "call"
+ # assert rep.skipped == "%sreason == "hello"
+ # assert rep.skipped.location.lineno == 3
+ # assert rep.skipped.location.path
+ # assert not rep.skipped.failurerepr
+
+ def test_skip_in_setup_function(self, pytester: Pytester) -> None:
+ reports = pytester.runitem(
+ """
+ import pytest
+ def setup_function(func):
+ pytest.skip("hello")
+ def test_func():
+ pass
+ """
+ )
+ print(reports)
+ rep = reports[0]
+ assert not rep.failed
+ assert not rep.passed
+ assert rep.skipped
+ # assert rep.skipped.reason == "hello"
+ # assert rep.skipped.location.lineno == 3
+ # assert rep.skipped.location.lineno == 3
+ assert len(reports) == 2
+ assert reports[1].passed # teardown
+
+ def test_failure_in_setup_function(self, pytester: Pytester) -> None:
+ reports = pytester.runitem(
+ """
+ import pytest
+ def setup_function(func):
+ raise ValueError(42)
+ def test_func():
+ pass
+ """
+ )
+ rep = reports[0]
+ assert not rep.skipped
+ assert not rep.passed
+ assert rep.failed
+ assert rep.when == "setup"
+ assert len(reports) == 2
+
+ def test_failure_in_teardown_function(self, pytester: Pytester) -> None:
+ reports = pytester.runitem(
+ """
+ import pytest
+ def teardown_function(func):
+ raise ValueError(42)
+ def test_func():
+ pass
+ """
+ )
+ print(reports)
+ assert len(reports) == 3
+ rep = reports[2]
+ assert not rep.skipped
+ assert not rep.passed
+ assert rep.failed
+ assert rep.when == "teardown"
+ # assert rep.longrepr.reprcrash.lineno == 3
+ # assert rep.longrepr.reprtraceback.reprentries
+
+ def test_custom_failure_repr(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ conftest="""
+ import pytest
+ class Function(pytest.Function):
+ def repr_failure(self, excinfo):
+ return "hello"
+ """
+ )
+ reports = pytester.runitem(
+ """
+ import pytest
+ def test_func():
+ assert 0
+ """
+ )
+ rep = reports[1]
+ assert not rep.skipped
+ assert not rep.passed
+ assert rep.failed
+ # assert rep.outcome.when == "call"
+ # assert rep.failed.where.lineno == 3
+ # assert rep.failed.where.path.basename == "test_func.py"
+ # assert rep.failed.failurerepr == "hello"
+
+ def test_teardown_final_returncode(self, pytester: Pytester) -> None:
+ rec = pytester.inline_runsource(
+ """
+ def test_func():
+ pass
+ def teardown_function(func):
+ raise ValueError(42)
+ """
+ )
+ assert rec.ret == 1
+
+ def test_logstart_logfinish_hooks(self, pytester: Pytester) -> None:
+ rec = pytester.inline_runsource(
+ """
+ import pytest
+ def test_func():
+ pass
+ """
+ )
+ reps = rec.getcalls("pytest_runtest_logstart pytest_runtest_logfinish")
+ assert [x._name for x in reps] == [
+ "pytest_runtest_logstart",
+ "pytest_runtest_logfinish",
+ ]
+ for rep in reps:
+ assert rep.nodeid == "test_logstart_logfinish_hooks.py::test_func"
+ assert rep.location == ("test_logstart_logfinish_hooks.py", 1, "test_func")
+
+ def test_exact_teardown_issue90(self, pytester: Pytester) -> None:
+ rec = pytester.inline_runsource(
+ """
+ import pytest
+
+ class TestClass(object):
+ def test_method(self):
+ pass
+ def teardown_class(cls):
+ raise Exception()
+
+ def test_func():
+ import sys
+ # on python2 exc_info is kept till a function exits
+ # so we would end up calling test functions while
+ # sys.exc_info would return the indexerror
+ # from guessing the lastitem
+ excinfo = sys.exc_info()
+ import traceback
+ assert excinfo[0] is None, \
+ traceback.format_exception(*excinfo)
+ def teardown_function(func):
+ raise ValueError(42)
+ """
+ )
+ reps = rec.getreports("pytest_runtest_logreport")
+ print(reps)
+ for i in range(2):
+ assert reps[i].nodeid.endswith("test_method")
+ assert reps[i].passed
+ assert reps[2].when == "teardown"
+ assert reps[2].failed
+ assert len(reps) == 6
+ for i in range(3, 5):
+ assert reps[i].nodeid.endswith("test_func")
+ assert reps[i].passed
+ assert reps[5].when == "teardown"
+ assert reps[5].nodeid.endswith("test_func")
+ assert reps[5].failed
+
+ def test_exact_teardown_issue1206(self, pytester: Pytester) -> None:
+ """Issue shadowing error with wrong number of arguments on teardown_method."""
+ rec = pytester.inline_runsource(
+ """
+ import pytest
+
+ class TestClass(object):
+ def teardown_method(self, x, y, z):
+ pass
+
+ def test_method(self):
+ assert True
+ """
+ )
+ reps = rec.getreports("pytest_runtest_logreport")
+ print(reps)
+ assert len(reps) == 3
+ #
+ assert reps[0].nodeid.endswith("test_method")
+ assert reps[0].passed
+ assert reps[0].when == "setup"
+ #
+ assert reps[1].nodeid.endswith("test_method")
+ assert reps[1].passed
+ assert reps[1].when == "call"
+ #
+ assert reps[2].nodeid.endswith("test_method")
+ assert reps[2].failed
+ assert reps[2].when == "teardown"
+ longrepr = reps[2].longrepr
+ assert isinstance(longrepr, ExceptionChainRepr)
+ assert longrepr.reprcrash
+ assert longrepr.reprcrash.message in (
+ "TypeError: teardown_method() missing 2 required positional arguments: 'y' and 'z'",
+ # Python >= 3.10
+ "TypeError: TestClass.teardown_method() missing 2 required positional arguments: 'y' and 'z'",
+ )
+
+ def test_failure_in_setup_function_ignores_custom_repr(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ conftest="""
+ import pytest
+ class Function(pytest.Function):
+ def repr_failure(self, excinfo):
+ assert 0
+ """
+ )
+ reports = pytester.runitem(
+ """
+ def setup_function(func):
+ raise ValueError(42)
+ def test_func():
+ pass
+ """
+ )
+ assert len(reports) == 2
+ rep = reports[0]
+ print(rep)
+ assert not rep.skipped
+ assert not rep.passed
+ assert rep.failed
+ # assert rep.outcome.when == "setup"
+ # assert rep.outcome.where.lineno == 3
+ # assert rep.outcome.where.path.basename == "test_func.py"
+ # assert instanace(rep.failed.failurerepr, PythonFailureRepr)
+
+ def test_systemexit_does_not_bail_out(self, pytester: Pytester) -> None:
+ try:
+ reports = pytester.runitem(
+ """
+ def test_func():
+ raise SystemExit(42)
+ """
+ )
+ except SystemExit:
+ assert False, "runner did not catch SystemExit"
+ rep = reports[1]
+ assert rep.failed
+ assert rep.when == "call"
+
+ def test_exit_propagates(self, pytester: Pytester) -> None:
+ try:
+ pytester.runitem(
+ """
+ import pytest
+ def test_func():
+ raise pytest.exit.Exception()
+ """
+ )
+ except pytest.exit.Exception:
+ pass
+ else:
+ assert False, "did not raise"
+
+
+class TestExecutionNonForked(BaseFunctionalTests):
+ def getrunner(self):
+ def f(item):
+ return runner.runtestprotocol(item, log=False)
+
+ return f
+
+ def test_keyboardinterrupt_propagates(self, pytester: Pytester) -> None:
+ try:
+ pytester.runitem(
+ """
+ def test_func():
+ raise KeyboardInterrupt("fake")
+ """
+ )
+ except KeyboardInterrupt:
+ pass
+ else:
+ assert False, "did not raise"
+
+
+class TestSessionReports:
+ def test_collect_result(self, pytester: Pytester) -> None:
+ col = pytester.getmodulecol(
+ """
+ def test_func1():
+ pass
+ class TestClass(object):
+ pass
+ """
+ )
+ rep = runner.collect_one_node(col)
+ assert not rep.failed
+ assert not rep.skipped
+ assert rep.passed
+ locinfo = rep.location
+ assert locinfo[0] == col.path.name
+ assert not locinfo[1]
+ assert locinfo[2] == col.path.name
+ res = rep.result
+ assert len(res) == 2
+ assert res[0].name == "test_func1"
+ assert res[1].name == "TestClass"
+
+
+reporttypes: List[Type[reports.BaseReport]] = [
+ reports.BaseReport,
+ reports.TestReport,
+ reports.CollectReport,
+]
+
+
+@pytest.mark.parametrize(
+ "reporttype", reporttypes, ids=[x.__name__ for x in reporttypes]
+)
+def test_report_extra_parameters(reporttype: Type[reports.BaseReport]) -> None:
+ args = list(inspect.signature(reporttype.__init__).parameters.keys())[1:]
+ basekw: Dict[str, List[object]] = dict.fromkeys(args, [])
+ report = reporttype(newthing=1, **basekw)
+ assert report.newthing == 1
+
+
+def test_callinfo() -> None:
+ ci = runner.CallInfo.from_call(lambda: 0, "collect")
+ assert ci.when == "collect"
+ assert ci.result == 0
+ assert "result" in repr(ci)
+ assert repr(ci) == "<CallInfo when='collect' result: 0>"
+ assert str(ci) == "<CallInfo when='collect' result: 0>"
+
+ ci2 = runner.CallInfo.from_call(lambda: 0 / 0, "collect")
+ assert ci2.when == "collect"
+ assert not hasattr(ci2, "result")
+ assert repr(ci2) == f"<CallInfo when='collect' excinfo={ci2.excinfo!r}>"
+ assert str(ci2) == repr(ci2)
+ assert ci2.excinfo
+
+ # Newlines are escaped.
+ def raise_assertion():
+ assert 0, "assert_msg"
+
+ ci3 = runner.CallInfo.from_call(raise_assertion, "call")
+ assert repr(ci3) == f"<CallInfo when='call' excinfo={ci3.excinfo!r}>"
+ assert "\n" not in repr(ci3)
+
+
+# design question: do we want general hooks in python files?
+# then something like the following functional tests makes sense
+
+
+@pytest.mark.xfail
+def test_runtest_in_module_ordering(pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ import pytest
+ def pytest_runtest_setup(item): # runs after class-level!
+ item.function.mylist.append("module")
+ class TestClass(object):
+ def pytest_runtest_setup(self, item):
+ assert not hasattr(item.function, 'mylist')
+ item.function.mylist = ['class']
+ @pytest.fixture
+ def mylist(self, request):
+ return request.function.mylist
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_call(self, item):
+ try:
+ (yield).get_result()
+ except ValueError:
+ pass
+ def test_hello1(self, mylist):
+ assert mylist == ['class', 'module'], mylist
+ raise ValueError()
+ def test_hello2(self, mylist):
+ assert mylist == ['class', 'module'], mylist
+ def pytest_runtest_teardown(item):
+ del item.function.mylist
+ """
+ )
+ result = pytester.runpytest(p1)
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_outcomeexception_exceptionattributes() -> None:
+ outcome = outcomes.OutcomeException("test")
+ assert outcome.args[0] == outcome.msg
+
+
+def test_outcomeexception_passes_except_Exception() -> None:
+ with pytest.raises(outcomes.OutcomeException):
+ try:
+ raise outcomes.OutcomeException("test")
+ except Exception as e:
+ raise NotImplementedError from e
+
+
+def test_pytest_exit() -> None:
+ with pytest.raises(pytest.exit.Exception) as excinfo:
+ pytest.exit("hello")
+ assert excinfo.errisinstance(pytest.exit.Exception)
+
+
+def test_pytest_fail() -> None:
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ pytest.fail("hello")
+ s = excinfo.exconly(tryshort=True)
+ assert s.startswith("Failed")
+
+
+def test_pytest_exit_msg(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+
+ def pytest_configure(config):
+ pytest.exit('oh noes')
+ """
+ )
+ result = pytester.runpytest()
+ result.stderr.fnmatch_lines(["Exit: oh noes"])
+
+
+def _strip_resource_warnings(lines):
+ # Assert no output on stderr, except for unreliable ResourceWarnings.
+ # (https://github.com/pytest-dev/pytest/issues/5088)
+ return [
+ x
+ for x in lines
+ if not x.startswith(("Exception ignored in:", "ResourceWarning"))
+ ]
+
+
+def test_pytest_exit_returncode(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """\
+ import pytest
+ def test_foo():
+ pytest.exit("some exit msg", 99)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*! *Exit: some exit msg !*"])
+
+ assert _strip_resource_warnings(result.stderr.lines) == []
+ assert result.ret == 99
+
+ # It prints to stderr also in case of exit during pytest_sessionstart.
+ pytester.makeconftest(
+ """\
+ import pytest
+
+ def pytest_sessionstart():
+ pytest.exit("during_sessionstart", 98)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*! *Exit: during_sessionstart !*"])
+ assert _strip_resource_warnings(result.stderr.lines) == [
+ "Exit: during_sessionstart"
+ ]
+ assert result.ret == 98
+
+
+def test_pytest_fail_notrace_runtest(pytester: Pytester) -> None:
+ """Test pytest.fail(..., pytrace=False) does not show tracebacks during test run."""
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_hello():
+ pytest.fail("hello", pytrace=False)
+ def teardown_function(function):
+ pytest.fail("world", pytrace=False)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["world", "hello"])
+ result.stdout.no_fnmatch_line("*def teardown_function*")
+
+
+def test_pytest_fail_notrace_collection(pytester: Pytester) -> None:
+ """Test pytest.fail(..., pytrace=False) does not show tracebacks during collection."""
+ pytester.makepyfile(
+ """
+ import pytest
+ def some_internal_function():
+ pytest.fail("hello", pytrace=False)
+ some_internal_function()
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["hello"])
+ result.stdout.no_fnmatch_line("*def some_internal_function()*")
+
+
+def test_pytest_fail_notrace_non_ascii(pytester: Pytester) -> None:
+ """Fix pytest.fail with pytrace=False with non-ascii characters (#1178).
+
+ This tests with native and unicode strings containing non-ascii chars.
+ """
+ pytester.makepyfile(
+ """\
+ import pytest
+
+ def test_hello():
+ pytest.fail('oh oh: ☺', pytrace=False)
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*test_hello*", "oh oh: ☺"])
+ result.stdout.no_fnmatch_line("*def test_hello*")
+
+
+def test_pytest_no_tests_collected_exit_status(pytester: Pytester) -> None:
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*collected 0 items*"])
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+ pytester.makepyfile(
+ test_foo="""
+ def test_foo():
+ assert 1
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*collected 1 item*"])
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ assert result.ret == ExitCode.OK
+
+ result = pytester.runpytest("-k nonmatch")
+ result.stdout.fnmatch_lines(["*collected 1 item*"])
+ result.stdout.fnmatch_lines(["*1 deselected*"])
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+def test_exception_printing_skip() -> None:
+ assert pytest.skip.Exception == pytest.skip.Exception
+ try:
+ pytest.skip("hello")
+ except pytest.skip.Exception:
+ excinfo = ExceptionInfo.from_current()
+ s = excinfo.exconly(tryshort=True)
+ assert s.startswith("Skipped")
+
+
+def test_importorskip(monkeypatch) -> None:
+ importorskip = pytest.importorskip
+
+ def f():
+ importorskip("asdlkj")
+
+ try:
+ sysmod = importorskip("sys")
+ assert sysmod is sys
+ # path = pytest.importorskip("os.path")
+ # assert path == os.path
+ excinfo = pytest.raises(pytest.skip.Exception, f)
+ assert excinfo is not None
+ excrepr = excinfo.getrepr()
+ assert excrepr is not None
+ assert excrepr.reprcrash is not None
+ path = Path(excrepr.reprcrash.path)
+ # check that importorskip reports the actual call
+ # in this test the test_runner.py file
+ assert path.stem == "test_runner"
+ pytest.raises(SyntaxError, pytest.importorskip, "x y z")
+ pytest.raises(SyntaxError, pytest.importorskip, "x=y")
+ mod = types.ModuleType("hello123")
+ mod.__version__ = "1.3" # type: ignore
+ monkeypatch.setitem(sys.modules, "hello123", mod)
+ with pytest.raises(pytest.skip.Exception):
+ pytest.importorskip("hello123", minversion="1.3.1")
+ mod2 = pytest.importorskip("hello123", minversion="1.3")
+ assert mod2 == mod
+ except pytest.skip.Exception: # pragma: no cover
+ assert False, f"spurious skip: {ExceptionInfo.from_current()}"
+
+
+def test_importorskip_imports_last_module_part() -> None:
+ ospath = pytest.importorskip("os.path")
+ assert os.path == ospath
+
+
+def test_importorskip_dev_module(monkeypatch) -> None:
+ try:
+ mod = types.ModuleType("mockmodule")
+ mod.__version__ = "0.13.0.dev-43290" # type: ignore
+ monkeypatch.setitem(sys.modules, "mockmodule", mod)
+ mod2 = pytest.importorskip("mockmodule", minversion="0.12.0")
+ assert mod2 == mod
+ with pytest.raises(pytest.skip.Exception):
+ pytest.importorskip("mockmodule1", minversion="0.14.0")
+ except pytest.skip.Exception: # pragma: no cover
+ assert False, f"spurious skip: {ExceptionInfo.from_current()}"
+
+
+def test_importorskip_module_level(pytester: Pytester) -> None:
+ """`importorskip` must be able to skip entire modules when used at module level."""
+ pytester.makepyfile(
+ """
+ import pytest
+ foobarbaz = pytest.importorskip("foobarbaz")
+
+ def test_foo():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*collected 0 items / 1 skipped*"])
+
+
+def test_importorskip_custom_reason(pytester: Pytester) -> None:
+ """Make sure custom reasons are used."""
+ pytester.makepyfile(
+ """
+ import pytest
+ foobarbaz = pytest.importorskip("foobarbaz2", reason="just because")
+
+ def test_foo():
+ pass
+ """
+ )
+ result = pytester.runpytest("-ra")
+ result.stdout.fnmatch_lines(["*just because*"])
+ result.stdout.fnmatch_lines(["*collected 0 items / 1 skipped*"])
+
+
+def test_pytest_cmdline_main(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ def test_hello():
+ assert 1
+ if __name__ == '__main__':
+ pytest.cmdline.main([__file__])
+ """
+ )
+ import subprocess
+
+ popen = subprocess.Popen([sys.executable, str(p)], stdout=subprocess.PIPE)
+ popen.communicate()
+ ret = popen.wait()
+ assert ret == 0
+
+
+def test_unicode_in_longrepr(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """\
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_makereport():
+ outcome = yield
+ rep = outcome.get_result()
+ if rep.when == "call":
+ rep.longrepr = 'ä'
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_out():
+ assert 0
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 1
+ assert "UnicodeEncodeError" not in result.stderr.str()
+
+
+def test_failure_in_setup(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def setup_module():
+ 0/0
+ def test_func():
+ pass
+ """
+ )
+ result = pytester.runpytest("--tb=line")
+ result.stdout.no_fnmatch_line("*def setup_module*")
+
+
+def test_makereport_getsource(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo():
+ if False: pass
+ else: assert False
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.no_fnmatch_line("*INTERNALERROR*")
+ result.stdout.fnmatch_lines(["*else: assert False*"])
+
+
+def test_makereport_getsource_dynamic_code(
+ pytester: Pytester, monkeypatch: MonkeyPatch
+) -> None:
+ """Test that exception in dynamically generated code doesn't break getting the source line."""
+ import inspect
+
+ original_findsource = inspect.findsource
+
+ def findsource(obj):
+ # Can be triggered by dynamically created functions
+ if obj.__name__ == "foo":
+ raise IndexError()
+ return original_findsource(obj)
+
+ monkeypatch.setattr(inspect, "findsource", findsource)
+
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def foo(missing):
+ pass
+
+ def test_fix(foo):
+ assert False
+ """
+ )
+ result = pytester.runpytest("-vv")
+ result.stdout.no_fnmatch_line("*INTERNALERROR*")
+ result.stdout.fnmatch_lines(["*test_fix*", "*fixture*'missing'*not found*"])
+
+
+def test_store_except_info_on_error() -> None:
+ """Test that upon test failure, the exception info is stored on
+ sys.last_traceback and friends."""
+ # Simulate item that might raise a specific exception, depending on `raise_error` class var
+ class ItemMightRaise:
+ nodeid = "item_that_raises"
+ raise_error = True
+
+ def runtest(self):
+ if self.raise_error:
+ raise IndexError("TEST")
+
+ try:
+ runner.pytest_runtest_call(ItemMightRaise()) # type: ignore[arg-type]
+ except IndexError:
+ pass
+ # Check that exception info is stored on sys
+ assert sys.last_type is IndexError
+ assert isinstance(sys.last_value, IndexError)
+ assert sys.last_value.args[0] == "TEST"
+ assert sys.last_traceback
+
+ # The next run should clear the exception info stored by the previous run
+ ItemMightRaise.raise_error = False
+ runner.pytest_runtest_call(ItemMightRaise()) # type: ignore[arg-type]
+ assert not hasattr(sys, "last_type")
+ assert not hasattr(sys, "last_value")
+ assert not hasattr(sys, "last_traceback")
+
+
+def test_current_test_env_var(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
+ pytest_current_test_vars: List[Tuple[str, str]] = []
+ monkeypatch.setattr(
+ sys, "pytest_current_test_vars", pytest_current_test_vars, raising=False
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+ import sys
+ import os
+
+ @pytest.fixture
+ def fix():
+ sys.pytest_current_test_vars.append(('setup', os.environ['PYTEST_CURRENT_TEST']))
+ yield
+ sys.pytest_current_test_vars.append(('teardown', os.environ['PYTEST_CURRENT_TEST']))
+
+ def test(fix):
+ sys.pytest_current_test_vars.append(('call', os.environ['PYTEST_CURRENT_TEST']))
+ """
+ )
+ result = pytester.runpytest_inprocess()
+ assert result.ret == 0
+ test_id = "test_current_test_env_var.py::test"
+ assert pytest_current_test_vars == [
+ ("setup", test_id + " (setup)"),
+ ("call", test_id + " (call)"),
+ ("teardown", test_id + " (teardown)"),
+ ]
+ assert "PYTEST_CURRENT_TEST" not in os.environ
+
+
+class TestReportContents:
+ """Test user-level API of ``TestReport`` objects."""
+
+ def getrunner(self):
+ return lambda item: runner.runtestprotocol(item, log=False)
+
+ def test_longreprtext_pass(self, pytester: Pytester) -> None:
+ reports = pytester.runitem(
+ """
+ def test_func():
+ pass
+ """
+ )
+ rep = reports[1]
+ assert rep.longreprtext == ""
+
+ def test_longreprtext_skip(self, pytester: Pytester) -> None:
+ """TestReport.longreprtext can handle non-str ``longrepr`` attributes (#7559)"""
+ reports = pytester.runitem(
+ """
+ import pytest
+ def test_func():
+ pytest.skip()
+ """
+ )
+ _, call_rep, _ = reports
+ assert isinstance(call_rep.longrepr, tuple)
+ assert "Skipped" in call_rep.longreprtext
+
+ def test_longreprtext_collect_skip(self, pytester: Pytester) -> None:
+ """CollectReport.longreprtext can handle non-str ``longrepr`` attributes (#7559)"""
+ pytester.makepyfile(
+ """
+ import pytest
+ pytest.skip(allow_module_level=True)
+ """
+ )
+ rec = pytester.inline_run()
+ calls = rec.getcalls("pytest_collectreport")
+ _, call = calls
+ assert isinstance(call.report.longrepr, tuple)
+ assert "Skipped" in call.report.longreprtext
+
+ def test_longreprtext_failure(self, pytester: Pytester) -> None:
+ reports = pytester.runitem(
+ """
+ def test_func():
+ x = 1
+ assert x == 4
+ """
+ )
+ rep = reports[1]
+ assert "assert 1 == 4" in rep.longreprtext
+
+ def test_captured_text(self, pytester: Pytester) -> None:
+ reports = pytester.runitem(
+ """
+ import pytest
+ import sys
+
+ @pytest.fixture
+ def fix():
+ sys.stdout.write('setup: stdout\\n')
+ sys.stderr.write('setup: stderr\\n')
+ yield
+ sys.stdout.write('teardown: stdout\\n')
+ sys.stderr.write('teardown: stderr\\n')
+ assert 0
+
+ def test_func(fix):
+ sys.stdout.write('call: stdout\\n')
+ sys.stderr.write('call: stderr\\n')
+ assert 0
+ """
+ )
+ setup, call, teardown = reports
+ assert setup.capstdout == "setup: stdout\n"
+ assert call.capstdout == "setup: stdout\ncall: stdout\n"
+ assert teardown.capstdout == "setup: stdout\ncall: stdout\nteardown: stdout\n"
+
+ assert setup.capstderr == "setup: stderr\n"
+ assert call.capstderr == "setup: stderr\ncall: stderr\n"
+ assert teardown.capstderr == "setup: stderr\ncall: stderr\nteardown: stderr\n"
+
+ def test_no_captured_text(self, pytester: Pytester) -> None:
+ reports = pytester.runitem(
+ """
+ def test_func():
+ pass
+ """
+ )
+ rep = reports[1]
+ assert rep.capstdout == ""
+ assert rep.capstderr == ""
+
+ def test_longrepr_type(self, pytester: Pytester) -> None:
+ reports = pytester.runitem(
+ """
+ import pytest
+ def test_func():
+ pytest.fail(pytrace=False)
+ """
+ )
+ rep = reports[1]
+ assert isinstance(rep.longrepr, ExceptionChainRepr)
+
+
+def test_outcome_exception_bad_msg() -> None:
+ """Check that OutcomeExceptions validate their input to prevent confusing errors (#5578)"""
+
+ def func() -> None:
+ raise NotImplementedError()
+
+ expected = (
+ "OutcomeException expected string as 'msg' parameter, got 'function' instead.\n"
+ "Perhaps you meant to use a mark?"
+ )
+ with pytest.raises(TypeError) as excinfo:
+ OutcomeException(func) # type: ignore
+ assert str(excinfo.value) == expected
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_runner_xunit.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_runner_xunit.py
new file mode 100644
index 0000000000..e077ac41e2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_runner_xunit.py
@@ -0,0 +1,297 @@
+"""Test correct setup/teardowns at module, class, and instance level."""
+from typing import List
+
+import pytest
+from _pytest.pytester import Pytester
+
+
+def test_module_and_function_setup(pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ modlevel = []
+ def setup_module(module):
+ assert not modlevel
+ module.modlevel.append(42)
+
+ def teardown_module(module):
+ modlevel.pop()
+
+ def setup_function(function):
+ function.answer = 17
+
+ def teardown_function(function):
+ del function.answer
+
+ def test_modlevel():
+ assert modlevel[0] == 42
+ assert test_modlevel.answer == 17
+
+ class TestFromClass(object):
+ def test_module(self):
+ assert modlevel[0] == 42
+ assert not hasattr(test_modlevel, 'answer')
+ """
+ )
+ rep = reprec.matchreport("test_modlevel")
+ assert rep.passed
+ rep = reprec.matchreport("test_module")
+ assert rep.passed
+
+
+def test_module_setup_failure_no_teardown(pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ values = []
+ def setup_module(module):
+ values.append(1)
+ 0/0
+
+ def test_nothing():
+ pass
+
+ def teardown_module(module):
+ values.append(2)
+ """
+ )
+ reprec.assertoutcome(failed=1)
+ calls = reprec.getcalls("pytest_runtest_setup")
+ assert calls[0].item.module.values == [1]
+
+
+def test_setup_function_failure_no_teardown(pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ modlevel = []
+ def setup_function(function):
+ modlevel.append(1)
+ 0/0
+
+ def teardown_function(module):
+ modlevel.append(2)
+
+ def test_func():
+ pass
+ """
+ )
+ calls = reprec.getcalls("pytest_runtest_setup")
+ assert calls[0].item.module.modlevel == [1]
+
+
+def test_class_setup(pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ class TestSimpleClassSetup(object):
+ clslevel = []
+ def setup_class(cls):
+ cls.clslevel.append(23)
+
+ def teardown_class(cls):
+ cls.clslevel.pop()
+
+ def test_classlevel(self):
+ assert self.clslevel[0] == 23
+
+ class TestInheritedClassSetupStillWorks(TestSimpleClassSetup):
+ def test_classlevel_anothertime(self):
+ assert self.clslevel == [23]
+
+ def test_cleanup():
+ assert not TestSimpleClassSetup.clslevel
+ assert not TestInheritedClassSetupStillWorks.clslevel
+ """
+ )
+ reprec.assertoutcome(passed=1 + 2 + 1)
+
+
+def test_class_setup_failure_no_teardown(pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ class TestSimpleClassSetup(object):
+ clslevel = []
+ def setup_class(cls):
+ 0/0
+
+ def teardown_class(cls):
+ cls.clslevel.append(1)
+
+ def test_classlevel(self):
+ pass
+
+ def test_cleanup():
+ assert not TestSimpleClassSetup.clslevel
+ """
+ )
+ reprec.assertoutcome(failed=1, passed=1)
+
+
+def test_method_setup(pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ class TestSetupMethod(object):
+ def setup_method(self, meth):
+ self.methsetup = meth
+ def teardown_method(self, meth):
+ del self.methsetup
+
+ def test_some(self):
+ assert self.methsetup == self.test_some
+
+ def test_other(self):
+ assert self.methsetup == self.test_other
+ """
+ )
+ reprec.assertoutcome(passed=2)
+
+
+def test_method_setup_failure_no_teardown(pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ class TestMethodSetup(object):
+ clslevel = []
+ def setup_method(self, method):
+ self.clslevel.append(1)
+ 0/0
+
+ def teardown_method(self, method):
+ self.clslevel.append(2)
+
+ def test_method(self):
+ pass
+
+ def test_cleanup():
+ assert TestMethodSetup.clslevel == [1]
+ """
+ )
+ reprec.assertoutcome(failed=1, passed=1)
+
+
+def test_method_setup_uses_fresh_instances(pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ class TestSelfState1(object):
+ memory = []
+ def test_hello(self):
+ self.memory.append(self)
+
+ def test_afterhello(self):
+ assert self != self.memory[0]
+ """
+ )
+ reprec.assertoutcome(passed=2, failed=0)
+
+
+def test_setup_that_skips_calledagain(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ def setup_module(mod):
+ pytest.skip("x")
+ def test_function1():
+ pass
+ def test_function2():
+ pass
+ """
+ )
+ reprec = pytester.inline_run(p)
+ reprec.assertoutcome(skipped=2)
+
+
+def test_setup_fails_again_on_all_tests(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ def setup_module(mod):
+ raise ValueError(42)
+ def test_function1():
+ pass
+ def test_function2():
+ pass
+ """
+ )
+ reprec = pytester.inline_run(p)
+ reprec.assertoutcome(failed=2)
+
+
+def test_setup_funcarg_setup_when_outer_scope_fails(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ def setup_module(mod):
+ raise ValueError(42)
+ @pytest.fixture
+ def hello(request):
+ raise ValueError("xyz43")
+ def test_function1(hello):
+ pass
+ def test_function2(hello):
+ pass
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ [
+ "*function1*",
+ "*ValueError*42*",
+ "*function2*",
+ "*ValueError*42*",
+ "*2 errors*",
+ ]
+ )
+ result.stdout.no_fnmatch_line("*xyz43*")
+
+
+@pytest.mark.parametrize("arg", ["", "arg"])
+def test_setup_teardown_function_level_with_optional_argument(
+ pytester: Pytester,
+ monkeypatch,
+ arg: str,
+) -> None:
+ """Parameter to setup/teardown xunit-style functions parameter is now optional (#1728)."""
+ import sys
+
+ trace_setups_teardowns: List[str] = []
+ monkeypatch.setattr(
+ sys, "trace_setups_teardowns", trace_setups_teardowns, raising=False
+ )
+ p = pytester.makepyfile(
+ """
+ import pytest
+ import sys
+
+ trace = sys.trace_setups_teardowns.append
+
+ def setup_module({arg}): trace('setup_module')
+ def teardown_module({arg}): trace('teardown_module')
+
+ def setup_function({arg}): trace('setup_function')
+ def teardown_function({arg}): trace('teardown_function')
+
+ def test_function_1(): pass
+ def test_function_2(): pass
+
+ class Test(object):
+ def setup_method(self, {arg}): trace('setup_method')
+ def teardown_method(self, {arg}): trace('teardown_method')
+
+ def test_method_1(self): pass
+ def test_method_2(self): pass
+ """.format(
+ arg=arg
+ )
+ )
+ result = pytester.inline_run(p)
+ result.assertoutcome(passed=4)
+
+ expected = [
+ "setup_module",
+ "setup_function",
+ "teardown_function",
+ "setup_function",
+ "teardown_function",
+ "setup_method",
+ "teardown_method",
+ "setup_method",
+ "teardown_method",
+ "teardown_module",
+ ]
+ assert trace_setups_teardowns == expected
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_scope.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_scope.py
new file mode 100644
index 0000000000..09ee1343a8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_scope.py
@@ -0,0 +1,39 @@
+import re
+
+import pytest
+from _pytest.scope import Scope
+
+
+def test_ordering() -> None:
+ assert Scope.Session > Scope.Package
+ assert Scope.Package > Scope.Module
+ assert Scope.Module > Scope.Class
+ assert Scope.Class > Scope.Function
+
+
+def test_next_lower() -> None:
+ assert Scope.Session.next_lower() is Scope.Package
+ assert Scope.Package.next_lower() is Scope.Module
+ assert Scope.Module.next_lower() is Scope.Class
+ assert Scope.Class.next_lower() is Scope.Function
+
+ with pytest.raises(ValueError, match="Function is the lower-most scope"):
+ Scope.Function.next_lower()
+
+
+def test_next_higher() -> None:
+ assert Scope.Function.next_higher() is Scope.Class
+ assert Scope.Class.next_higher() is Scope.Module
+ assert Scope.Module.next_higher() is Scope.Package
+ assert Scope.Package.next_higher() is Scope.Session
+
+ with pytest.raises(ValueError, match="Session is the upper-most scope"):
+ Scope.Session.next_higher()
+
+
+def test_from_user() -> None:
+ assert Scope.from_user("module", "for parametrize", "some::id") is Scope.Module
+
+ expected_msg = "for parametrize from some::id got an unexpected scope value 'foo'"
+ with pytest.raises(pytest.fail.Exception, match=re.escape(expected_msg)):
+ Scope.from_user("foo", "for parametrize", "some::id") # type:ignore[arg-type]
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_session.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_session.py
new file mode 100644
index 0000000000..3ca6d39038
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_session.py
@@ -0,0 +1,369 @@
+import pytest
+from _pytest.config import ExitCode
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+
+
+class SessionTests:
+ def test_basic_testitem_events(self, pytester: Pytester) -> None:
+ tfile = pytester.makepyfile(
+ """
+ def test_one():
+ pass
+ def test_one_one():
+ assert 0
+ def test_other():
+ raise ValueError(23)
+ class TestClass(object):
+ def test_two(self, someargs):
+ pass
+ """
+ )
+ reprec = pytester.inline_run(tfile)
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(skipped) == 0
+ assert len(passed) == 1
+ assert len(failed) == 3
+
+ def end(x):
+ return x.nodeid.split("::")[-1]
+
+ assert end(failed[0]) == "test_one_one"
+ assert end(failed[1]) == "test_other"
+ itemstarted = reprec.getcalls("pytest_itemcollected")
+ assert len(itemstarted) == 4
+ # XXX check for failing funcarg setup
+ # colreports = reprec.getcalls("pytest_collectreport")
+ # assert len(colreports) == 4
+ # assert colreports[1].report.failed
+
+ def test_nested_import_error(self, pytester: Pytester) -> None:
+ tfile = pytester.makepyfile(
+ """
+ import import_fails
+ def test_this():
+ assert import_fails.a == 1
+ """,
+ import_fails="""
+ import does_not_work
+ a = 1
+ """,
+ )
+ reprec = pytester.inline_run(tfile)
+ values = reprec.getfailedcollections()
+ assert len(values) == 1
+ out = str(values[0].longrepr)
+ assert out.find("does_not_work") != -1
+
+ def test_raises_output(self, pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ import pytest
+ def test_raises_doesnt():
+ pytest.raises(ValueError, int, "3")
+ """
+ )
+ passed, skipped, failed = reprec.listoutcomes()
+ assert len(failed) == 1
+ out = failed[0].longrepr.reprcrash.message # type: ignore[union-attr]
+ assert "DID NOT RAISE" in out
+
+ def test_syntax_error_module(self, pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource("this is really not python")
+ values = reprec.getfailedcollections()
+ assert len(values) == 1
+ out = str(values[0].longrepr)
+ assert out.find("not python") != -1
+
+ def test_exit_first_problem(self, pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ def test_one(): assert 0
+ def test_two(): assert 0
+ """,
+ "--exitfirst",
+ )
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 1
+ assert passed == skipped == 0
+
+ def test_maxfail(self, pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ def test_one(): assert 0
+ def test_two(): assert 0
+ def test_three(): assert 0
+ """,
+ "--maxfail=2",
+ )
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 2
+ assert passed == skipped == 0
+
+ def test_broken_repr(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ class reprexc(BaseException):
+ def __str__(self):
+ return "Ha Ha fooled you, I'm a broken repr()."
+
+ class BrokenRepr1(object):
+ foo=0
+ def __repr__(self):
+ raise reprexc
+
+ class TestBrokenClass(object):
+ def test_explicit_bad_repr(self):
+ t = BrokenRepr1()
+ with pytest.raises(BaseException, match="broken repr"):
+ repr(t)
+
+ def test_implicit_bad_repr1(self):
+ t = BrokenRepr1()
+ assert t.foo == 1
+
+ """
+ )
+ reprec = pytester.inline_run(p)
+ passed, skipped, failed = reprec.listoutcomes()
+ assert (len(passed), len(skipped), len(failed)) == (1, 0, 1)
+ out = failed[0].longrepr.reprcrash.message # type: ignore[union-attr]
+ assert out.find("<[reprexc() raised in repr()] BrokenRepr1") != -1
+
+ def test_broken_repr_with_showlocals_verbose(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ class ObjWithErrorInRepr:
+ def __repr__(self):
+ raise NotImplementedError
+
+ def test_repr_error():
+ x = ObjWithErrorInRepr()
+ assert x == "value"
+ """
+ )
+ reprec = pytester.inline_run("--showlocals", "-vv", p)
+ passed, skipped, failed = reprec.listoutcomes()
+ assert (len(passed), len(skipped), len(failed)) == (0, 0, 1)
+ entries = failed[0].longrepr.reprtraceback.reprentries # type: ignore[union-attr]
+ assert len(entries) == 1
+ repr_locals = entries[0].reprlocals
+ assert repr_locals.lines
+ assert len(repr_locals.lines) == 1
+ assert repr_locals.lines[0].startswith(
+ "x = <[NotImplementedError() raised in repr()] ObjWithErrorInRepr"
+ )
+
+ def test_skip_file_by_conftest(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ conftest="""
+ import pytest
+ def pytest_collect_file():
+ pytest.skip("intentional")
+ """,
+ test_file="""
+ def test_one(): pass
+ """,
+ )
+ try:
+ reprec = pytester.inline_run(pytester.path)
+ except pytest.skip.Exception: # pragma: no cover
+ pytest.fail("wrong skipped caught")
+ reports = reprec.getreports("pytest_collectreport")
+ assert len(reports) == 1
+ assert reports[0].skipped
+
+
+class TestNewSession(SessionTests):
+ def test_order_of_execution(self, pytester: Pytester) -> None:
+ reprec = pytester.inline_runsource(
+ """
+ values = []
+ def test_1():
+ values.append(1)
+ def test_2():
+ values.append(2)
+ def test_3():
+ assert values == [1,2]
+ class Testmygroup(object):
+ reslist = values
+ def test_1(self):
+ self.reslist.append(1)
+ def test_2(self):
+ self.reslist.append(2)
+ def test_3(self):
+ self.reslist.append(3)
+ def test_4(self):
+ assert self.reslist == [1,2,1,2,3]
+ """
+ )
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == skipped == 0
+ assert passed == 7
+
+ def test_collect_only_with_various_situations(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ test_one="""
+ def test_one():
+ raise ValueError()
+
+ class TestX(object):
+ def test_method_one(self):
+ pass
+
+ class TestY(TestX):
+ pass
+ """,
+ test_three="xxxdsadsadsadsa",
+ __init__="",
+ )
+ reprec = pytester.inline_run("--collect-only", p.parent)
+
+ itemstarted = reprec.getcalls("pytest_itemcollected")
+ assert len(itemstarted) == 3
+ assert not reprec.getreports("pytest_runtest_logreport")
+ started = reprec.getcalls("pytest_collectstart")
+ finished = reprec.getreports("pytest_collectreport")
+ assert len(started) == len(finished)
+ assert len(started) == 6
+ colfail = [x for x in finished if x.failed]
+ assert len(colfail) == 1
+
+ def test_minus_x_import_error(self, pytester: Pytester) -> None:
+ pytester.makepyfile(__init__="")
+ pytester.makepyfile(test_one="xxxx", test_two="yyyy")
+ reprec = pytester.inline_run("-x", pytester.path)
+ finished = reprec.getreports("pytest_collectreport")
+ colfail = [x for x in finished if x.failed]
+ assert len(colfail) == 1
+
+ def test_minus_x_overridden_by_maxfail(self, pytester: Pytester) -> None:
+ pytester.makepyfile(__init__="")
+ pytester.makepyfile(test_one="xxxx", test_two="yyyy", test_third="zzz")
+ reprec = pytester.inline_run("-x", "--maxfail=2", pytester.path)
+ finished = reprec.getreports("pytest_collectreport")
+ colfail = [x for x in finished if x.failed]
+ assert len(colfail) == 2
+
+
+def test_plugin_specify(pytester: Pytester) -> None:
+ with pytest.raises(ImportError):
+ pytester.parseconfig("-p", "nqweotexistent")
+ # pytest.raises(ImportError,
+ # "config.do_configure(config)"
+ # )
+
+
+def test_plugin_already_exists(pytester: Pytester) -> None:
+ config = pytester.parseconfig("-p", "terminal")
+ assert config.option.plugins == ["terminal"]
+ config._do_configure()
+ config._ensure_unconfigure()
+
+
+def test_exclude(pytester: Pytester) -> None:
+ hellodir = pytester.mkdir("hello")
+ hellodir.joinpath("test_hello.py").write_text("x y syntaxerror")
+ hello2dir = pytester.mkdir("hello2")
+ hello2dir.joinpath("test_hello2.py").write_text("x y syntaxerror")
+ pytester.makepyfile(test_ok="def test_pass(): pass")
+ result = pytester.runpytest("--ignore=hello", "--ignore=hello2")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_exclude_glob(pytester: Pytester) -> None:
+ hellodir = pytester.mkdir("hello")
+ hellodir.joinpath("test_hello.py").write_text("x y syntaxerror")
+ hello2dir = pytester.mkdir("hello2")
+ hello2dir.joinpath("test_hello2.py").write_text("x y syntaxerror")
+ hello3dir = pytester.mkdir("hallo3")
+ hello3dir.joinpath("test_hello3.py").write_text("x y syntaxerror")
+ subdir = pytester.mkdir("sub")
+ subdir.joinpath("test_hello4.py").write_text("x y syntaxerror")
+ pytester.makepyfile(test_ok="def test_pass(): pass")
+ result = pytester.runpytest("--ignore-glob=*h[ea]llo*")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_deselect(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_a="""
+ import pytest
+
+ def test_a1(): pass
+
+ @pytest.mark.parametrize('b', range(3))
+ def test_a2(b): pass
+
+ class TestClass:
+ def test_c1(self): pass
+
+ def test_c2(self): pass
+ """
+ )
+ result = pytester.runpytest(
+ "-v",
+ "--deselect=test_a.py::test_a2[1]",
+ "--deselect=test_a.py::test_a2[2]",
+ "--deselect=test_a.py::TestClass::test_c1",
+ )
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*3 passed, 3 deselected*"])
+ for line in result.stdout.lines:
+ assert not line.startswith(("test_a.py::test_a2[1]", "test_a.py::test_a2[2]"))
+
+
+def test_sessionfinish_with_start(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import os
+ values = []
+ def pytest_sessionstart():
+ values.append(os.getcwd())
+ os.chdir("..")
+
+ def pytest_sessionfinish():
+ assert values[0] == os.getcwd()
+
+ """
+ )
+ res = pytester.runpytest("--collect-only")
+ assert res.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+@pytest.mark.parametrize("path", ["root", "{relative}/root", "{environment}/root"])
+def test_rootdir_option_arg(
+ pytester: Pytester, monkeypatch: MonkeyPatch, path: str
+) -> None:
+ monkeypatch.setenv("PY_ROOTDIR_PATH", str(pytester.path))
+ path = path.format(relative=str(pytester.path), environment="$PY_ROOTDIR_PATH")
+
+ rootdir = pytester.path / "root" / "tests"
+ rootdir.mkdir(parents=True)
+ pytester.makepyfile(
+ """
+ import os
+ def test_one():
+ assert 1
+ """
+ )
+
+ result = pytester.runpytest(f"--rootdir={path}")
+ result.stdout.fnmatch_lines(
+ [
+ f"*rootdir: {pytester.path}/root",
+ "root/test_rootdir_option_arg.py *",
+ "*1 passed*",
+ ]
+ )
+
+
+def test_rootdir_wrong_option_arg(pytester: Pytester) -> None:
+ result = pytester.runpytest("--rootdir=wrong_dir")
+ result.stderr.fnmatch_lines(
+ ["*Directory *wrong_dir* not found. Check your '--rootdir' option.*"]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_setuponly.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_setuponly.py
new file mode 100644
index 0000000000..fe4bdc514e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_setuponly.py
@@ -0,0 +1,318 @@
+import sys
+
+import pytest
+from _pytest.config import ExitCode
+from _pytest.pytester import Pytester
+
+
+@pytest.fixture(params=["--setup-only", "--setup-plan", "--setup-show"], scope="module")
+def mode(request):
+ return request.param
+
+
+def test_show_only_active_fixtures(
+ pytester: Pytester, mode, dummy_yaml_custom_test
+) -> None:
+ pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def _arg0():
+ """hidden arg0 fixture"""
+ @pytest.fixture
+ def arg1():
+ """arg1 docstring"""
+ def test_arg1(arg1):
+ pass
+ '''
+ )
+
+ result = pytester.runpytest(mode)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ ["*SETUP F arg1*", "*test_arg1 (fixtures used: arg1)*", "*TEARDOWN F arg1*"]
+ )
+ result.stdout.no_fnmatch_line("*_arg0*")
+
+
+def test_show_different_scopes(pytester: Pytester, mode) -> None:
+ p = pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg_function():
+ """function scoped fixture"""
+ @pytest.fixture(scope='session')
+ def arg_session():
+ """session scoped fixture"""
+ def test_arg1(arg_session, arg_function):
+ pass
+ '''
+ )
+
+ result = pytester.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "SETUP S arg_session*",
+ "*SETUP F arg_function*",
+ "*test_arg1 (fixtures used: arg_function, arg_session)*",
+ "*TEARDOWN F arg_function*",
+ "TEARDOWN S arg_session*",
+ ]
+ )
+
+
+def test_show_nested_fixtures(pytester: Pytester, mode) -> None:
+ pytester.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture(scope='session')
+ def arg_same():
+ """session scoped fixture"""
+ '''
+ )
+ p = pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture(scope='function')
+ def arg_same(arg_same):
+ """function scoped fixture"""
+ def test_arg1(arg_same):
+ pass
+ '''
+ )
+
+ result = pytester.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "SETUP S arg_same*",
+ "*SETUP F arg_same (fixtures used: arg_same)*",
+ "*test_arg1 (fixtures used: arg_same)*",
+ "*TEARDOWN F arg_same*",
+ "TEARDOWN S arg_same*",
+ ]
+ )
+
+
+def test_show_fixtures_with_autouse(pytester: Pytester, mode) -> None:
+ p = pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture
+ def arg_function():
+ """function scoped fixture"""
+ @pytest.fixture(scope='session', autouse=True)
+ def arg_session():
+ """session scoped fixture"""
+ def test_arg1(arg_function):
+ pass
+ '''
+ )
+
+ result = pytester.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "SETUP S arg_session*",
+ "*SETUP F arg_function*",
+ "*test_arg1 (fixtures used: arg_function, arg_session)*",
+ ]
+ )
+
+
+def test_show_fixtures_with_parameters(pytester: Pytester, mode) -> None:
+ pytester.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture(scope='session', params=['foo', 'bar'])
+ def arg_same():
+ """session scoped fixture"""
+ '''
+ )
+ p = pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture(scope='function')
+ def arg_other(arg_same):
+ """function scoped fixture"""
+ def test_arg1(arg_other):
+ pass
+ '''
+ )
+
+ result = pytester.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "SETUP S arg_same?'foo'?",
+ "TEARDOWN S arg_same?'foo'?",
+ "SETUP S arg_same?'bar'?",
+ "TEARDOWN S arg_same?'bar'?",
+ ]
+ )
+
+
+def test_show_fixtures_with_parameter_ids(pytester: Pytester, mode) -> None:
+ pytester.makeconftest(
+ '''
+ import pytest
+ @pytest.fixture(
+ scope='session', params=['foo', 'bar'], ids=['spam', 'ham'])
+ def arg_same():
+ """session scoped fixture"""
+ '''
+ )
+ p = pytester.makepyfile(
+ '''
+ import pytest
+ @pytest.fixture(scope='function')
+ def arg_other(arg_same):
+ """function scoped fixture"""
+ def test_arg1(arg_other):
+ pass
+ '''
+ )
+
+ result = pytester.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ ["SETUP S arg_same?'spam'?", "SETUP S arg_same?'ham'?"]
+ )
+
+
+def test_show_fixtures_with_parameter_ids_function(pytester: Pytester, mode) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(params=['foo', 'bar'], ids=lambda p: p.upper())
+ def foobar():
+ pass
+ def test_foobar(foobar):
+ pass
+ """
+ )
+
+ result = pytester.runpytest(mode, p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ ["*SETUP F foobar?'FOO'?", "*SETUP F foobar?'BAR'?"]
+ )
+
+
+def test_dynamic_fixture_request(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture()
+ def dynamically_requested_fixture():
+ pass
+ @pytest.fixture()
+ def dependent_fixture(request):
+ request.getfixturevalue('dynamically_requested_fixture')
+ def test_dyn(dependent_fixture):
+ pass
+ """
+ )
+
+ result = pytester.runpytest("--setup-only", p)
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ [
+ "*SETUP F dynamically_requested_fixture",
+ "*TEARDOWN F dynamically_requested_fixture",
+ ]
+ )
+
+
+def test_capturing(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest, sys
+ @pytest.fixture()
+ def one():
+ sys.stdout.write('this should be captured')
+ sys.stderr.write('this should also be captured')
+ @pytest.fixture()
+ def two(one):
+ assert 0
+ def test_capturing(two):
+ pass
+ """
+ )
+
+ result = pytester.runpytest("--setup-only", p)
+ result.stdout.fnmatch_lines(
+ ["this should be captured", "this should also be captured"]
+ )
+
+
+def test_show_fixtures_and_execute_test(pytester: Pytester) -> None:
+ """Verify that setups are shown and tests are executed."""
+ p = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg():
+ assert True
+ def test_arg(arg):
+ assert False
+ """
+ )
+
+ result = pytester.runpytest("--setup-show", p)
+ assert result.ret == 1
+
+ result.stdout.fnmatch_lines(
+ ["*SETUP F arg*", "*test_arg (fixtures used: arg)F*", "*TEARDOWN F arg*"]
+ )
+
+
+def test_setup_show_with_KeyboardInterrupt_in_test(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg():
+ pass
+ def test_arg(arg):
+ raise KeyboardInterrupt()
+ """
+ )
+ result = pytester.runpytest("--setup-show", p, no_reraise_ctrlc=True)
+ result.stdout.fnmatch_lines(
+ [
+ "*SETUP F arg*",
+ "*test_arg (fixtures used: arg)*",
+ "*TEARDOWN F arg*",
+ "*! KeyboardInterrupt !*",
+ "*= no tests ran in *",
+ ]
+ )
+ assert result.ret == ExitCode.INTERRUPTED
+
+
+def test_show_fixture_action_with_bytes(pytester: Pytester) -> None:
+ # Issue 7126, BytesWarning when using --setup-show with bytes parameter
+ test_file = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.parametrize('data', [b'Hello World'])
+ def test_data(data):
+ pass
+ """
+ )
+ result = pytester.run(
+ sys.executable, "-bb", "-m", "pytest", "--setup-show", str(test_file)
+ )
+ assert result.ret == 0
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_setupplan.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_setupplan.py
new file mode 100644
index 0000000000..d51a187395
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_setupplan.py
@@ -0,0 +1,120 @@
+from _pytest.pytester import Pytester
+
+
+def test_show_fixtures_and_test(
+ pytester: Pytester, dummy_yaml_custom_test: None
+) -> None:
+ """Verify that fixtures are not executed."""
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg():
+ assert False
+ def test_arg(arg):
+ assert False
+ """
+ )
+
+ result = pytester.runpytest("--setup-plan")
+ assert result.ret == 0
+
+ result.stdout.fnmatch_lines(
+ ["*SETUP F arg*", "*test_arg (fixtures used: arg)", "*TEARDOWN F arg*"]
+ )
+
+
+def test_show_multi_test_fixture_setup_and_teardown_correctly_simple(
+ pytester: Pytester,
+) -> None:
+ """Verify that when a fixture lives for longer than a single test, --setup-plan
+ correctly displays the SETUP/TEARDOWN indicators the right number of times.
+
+ As reported in https://github.com/pytest-dev/pytest/issues/2049
+ --setup-plan was showing SETUP/TEARDOWN on every test, even when the fixture
+ should persist through multiple tests.
+
+ (Note that this bug never affected actual test execution, which used the
+ correct fixture lifetimes. It was purely a display bug for --setup-plan, and
+ did not affect the related --setup-show or --setup-only.)
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope = 'class')
+ def fix():
+ return object()
+ class TestClass:
+ def test_one(self, fix):
+ assert False
+ def test_two(self, fix):
+ assert False
+ """
+ )
+
+ result = pytester.runpytest("--setup-plan")
+ assert result.ret == 0
+
+ setup_fragment = "SETUP C fix"
+ setup_count = 0
+
+ teardown_fragment = "TEARDOWN C fix"
+ teardown_count = 0
+
+ for line in result.stdout.lines:
+ if setup_fragment in line:
+ setup_count += 1
+ if teardown_fragment in line:
+ teardown_count += 1
+
+ # before the fix this tests, there would have been a setup/teardown
+ # message for each test, so the counts would each have been 2
+ assert setup_count == 1
+ assert teardown_count == 1
+
+
+def test_show_multi_test_fixture_setup_and_teardown_same_as_setup_show(
+ pytester: Pytester,
+) -> None:
+ """Verify that SETUP/TEARDOWN messages match what comes out of --setup-show."""
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope = 'session')
+ def sess():
+ return True
+ @pytest.fixture(scope = 'module')
+ def mod():
+ return True
+ @pytest.fixture(scope = 'class')
+ def cls():
+ return True
+ @pytest.fixture(scope = 'function')
+ def func():
+ return True
+ def test_outside(sess, mod, cls, func):
+ assert True
+ class TestCls:
+ def test_one(self, sess, mod, cls, func):
+ assert True
+ def test_two(self, sess, mod, cls, func):
+ assert True
+ """
+ )
+
+ plan_result = pytester.runpytest("--setup-plan")
+ show_result = pytester.runpytest("--setup-show")
+
+ # the number and text of these lines should be identical
+ plan_lines = [
+ line
+ for line in plan_result.stdout.lines
+ if "SETUP" in line or "TEARDOWN" in line
+ ]
+ show_lines = [
+ line
+ for line in show_result.stdout.lines
+ if "SETUP" in line or "TEARDOWN" in line
+ ]
+
+ assert plan_lines == show_lines
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_skipping.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_skipping.py
new file mode 100644
index 0000000000..3010943607
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_skipping.py
@@ -0,0 +1,1533 @@
+import sys
+import textwrap
+
+import pytest
+from _pytest.pytester import Pytester
+from _pytest.runner import runtestprotocol
+from _pytest.skipping import evaluate_skip_marks
+from _pytest.skipping import evaluate_xfail_marks
+from _pytest.skipping import pytest_runtest_setup
+
+
+class TestEvaluation:
+ def test_no_marker(self, pytester: Pytester) -> None:
+ item = pytester.getitem("def test_func(): pass")
+ skipped = evaluate_skip_marks(item)
+ assert not skipped
+
+ def test_marked_xfail_no_args(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_func():
+ pass
+ """
+ )
+ xfailed = evaluate_xfail_marks(item)
+ assert xfailed
+ assert xfailed.reason == ""
+ assert xfailed.run
+
+ def test_marked_skipif_no_args(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.skipif
+ def test_func():
+ pass
+ """
+ )
+ skipped = evaluate_skip_marks(item)
+ assert skipped
+ assert skipped.reason == ""
+
+ def test_marked_one_arg(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.skipif("hasattr(os, 'sep')")
+ def test_func():
+ pass
+ """
+ )
+ skipped = evaluate_skip_marks(item)
+ assert skipped
+ assert skipped.reason == "condition: hasattr(os, 'sep')"
+
+ def test_marked_one_arg_with_reason(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.skipif("hasattr(os, 'sep')", attr=2, reason="hello world")
+ def test_func():
+ pass
+ """
+ )
+ skipped = evaluate_skip_marks(item)
+ assert skipped
+ assert skipped.reason == "hello world"
+
+ def test_marked_one_arg_twice(self, pytester: Pytester) -> None:
+ lines = [
+ """@pytest.mark.skipif("not hasattr(os, 'murks')")""",
+ """@pytest.mark.skipif(condition="hasattr(os, 'murks')")""",
+ ]
+ for i in range(0, 2):
+ item = pytester.getitem(
+ """
+ import pytest
+ %s
+ %s
+ def test_func():
+ pass
+ """
+ % (lines[i], lines[(i + 1) % 2])
+ )
+ skipped = evaluate_skip_marks(item)
+ assert skipped
+ assert skipped.reason == "condition: not hasattr(os, 'murks')"
+
+ def test_marked_one_arg_twice2(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.skipif("hasattr(os, 'murks')")
+ @pytest.mark.skipif("not hasattr(os, 'murks')")
+ def test_func():
+ pass
+ """
+ )
+ skipped = evaluate_skip_marks(item)
+ assert skipped
+ assert skipped.reason == "condition: not hasattr(os, 'murks')"
+
+ def test_marked_skipif_with_boolean_without_reason(
+ self, pytester: Pytester
+ ) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.skipif(False)
+ def test_func():
+ pass
+ """
+ )
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ evaluate_skip_marks(item)
+ assert excinfo.value.msg is not None
+ assert (
+ """Error evaluating 'skipif': you need to specify reason=STRING when using booleans as conditions."""
+ in excinfo.value.msg
+ )
+
+ def test_marked_skipif_with_invalid_boolean(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+
+ class InvalidBool:
+ def __bool__(self):
+ raise TypeError("INVALID")
+
+ @pytest.mark.skipif(InvalidBool(), reason="xxx")
+ def test_func():
+ pass
+ """
+ )
+ with pytest.raises(pytest.fail.Exception) as excinfo:
+ evaluate_skip_marks(item)
+ assert excinfo.value.msg is not None
+ assert "Error evaluating 'skipif' condition as a boolean" in excinfo.value.msg
+ assert "INVALID" in excinfo.value.msg
+
+ def test_skipif_class(self, pytester: Pytester) -> None:
+ (item,) = pytester.getitems(
+ """
+ import pytest
+ class TestClass(object):
+ pytestmark = pytest.mark.skipif("config._hackxyz")
+ def test_func(self):
+ pass
+ """
+ )
+ item.config._hackxyz = 3 # type: ignore[attr-defined]
+ skipped = evaluate_skip_marks(item)
+ assert skipped
+ assert skipped.reason == "condition: config._hackxyz"
+
+ def test_skipif_markeval_namespace(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+
+ def pytest_markeval_namespace():
+ return {"color": "green"}
+ """
+ )
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.skipif("color == 'green'")
+ def test_1():
+ assert True
+
+ @pytest.mark.skipif("color == 'red'")
+ def test_2():
+ assert True
+ """
+ )
+ res = pytester.runpytest(p)
+ assert res.ret == 0
+ res.stdout.fnmatch_lines(["*1 skipped*"])
+ res.stdout.fnmatch_lines(["*1 passed*"])
+
+ def test_skipif_markeval_namespace_multiple(self, pytester: Pytester) -> None:
+ """Keys defined by ``pytest_markeval_namespace()`` in nested plugins override top-level ones."""
+ root = pytester.mkdir("root")
+ root.joinpath("__init__.py").touch()
+ root.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ def pytest_markeval_namespace():
+ return {"arg": "root"}
+ """
+ )
+ )
+ root.joinpath("test_root.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ @pytest.mark.skipif("arg == 'root'")
+ def test_root():
+ assert False
+ """
+ )
+ )
+ foo = root.joinpath("foo")
+ foo.mkdir()
+ foo.joinpath("__init__.py").touch()
+ foo.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ def pytest_markeval_namespace():
+ return {"arg": "foo"}
+ """
+ )
+ )
+ foo.joinpath("test_foo.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ @pytest.mark.skipif("arg == 'foo'")
+ def test_foo():
+ assert False
+ """
+ )
+ )
+ bar = root.joinpath("bar")
+ bar.mkdir()
+ bar.joinpath("__init__.py").touch()
+ bar.joinpath("conftest.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ def pytest_markeval_namespace():
+ return {"arg": "bar"}
+ """
+ )
+ )
+ bar.joinpath("test_bar.py").write_text(
+ textwrap.dedent(
+ """\
+ import pytest
+
+ @pytest.mark.skipif("arg == 'bar'")
+ def test_bar():
+ assert False
+ """
+ )
+ )
+
+ reprec = pytester.inline_run("-vs", "--capture=no")
+ reprec.assertoutcome(skipped=3)
+
+ def test_skipif_markeval_namespace_ValueError(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+
+ def pytest_markeval_namespace():
+ return True
+ """
+ )
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.skipif("color == 'green'")
+ def test_1():
+ assert True
+ """
+ )
+ res = pytester.runpytest(p)
+ assert res.ret == 1
+ res.stdout.fnmatch_lines(
+ [
+ "*ValueError: pytest_markeval_namespace() needs to return a dict, got True*"
+ ]
+ )
+
+
+class TestXFail:
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_xfail_simple(self, pytester: Pytester, strict: bool) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.xfail(strict=%s)
+ def test_func():
+ assert 0
+ """
+ % strict
+ )
+ reports = runtestprotocol(item, log=False)
+ assert len(reports) == 3
+ callreport = reports[1]
+ assert callreport.skipped
+ assert callreport.wasxfail == ""
+
+ def test_xfail_xpassed(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.xfail(reason="this is an xfail")
+ def test_func():
+ assert 1
+ """
+ )
+ reports = runtestprotocol(item, log=False)
+ assert len(reports) == 3
+ callreport = reports[1]
+ assert callreport.passed
+ assert callreport.wasxfail == "this is an xfail"
+
+ def test_xfail_using_platform(self, pytester: Pytester) -> None:
+ """Verify that platform can be used with xfail statements."""
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.xfail("platform.platform() == platform.platform()")
+ def test_func():
+ assert 0
+ """
+ )
+ reports = runtestprotocol(item, log=False)
+ assert len(reports) == 3
+ callreport = reports[1]
+ assert callreport.wasxfail
+
+ def test_xfail_xpassed_strict(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.xfail(strict=True, reason="nope")
+ def test_func():
+ assert 1
+ """
+ )
+ reports = runtestprotocol(item, log=False)
+ assert len(reports) == 3
+ callreport = reports[1]
+ assert callreport.failed
+ assert str(callreport.longrepr) == "[XPASS(strict)] nope"
+ assert not hasattr(callreport, "wasxfail")
+
+ def test_xfail_run_anyway(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_func():
+ assert 0
+ def test_func2():
+ pytest.xfail("hello")
+ """
+ )
+ result = pytester.runpytest("--runxfail")
+ result.stdout.fnmatch_lines(
+ ["*def test_func():*", "*assert 0*", "*1 failed*1 pass*"]
+ )
+
+ @pytest.mark.parametrize(
+ "test_input,expected",
+ [
+ (
+ ["-rs"],
+ ["SKIPPED [1] test_sample.py:2: unconditional skip", "*1 skipped*"],
+ ),
+ (
+ ["-rs", "--runxfail"],
+ ["SKIPPED [1] test_sample.py:2: unconditional skip", "*1 skipped*"],
+ ),
+ ],
+ )
+ def test_xfail_run_with_skip_mark(
+ self, pytester: Pytester, test_input, expected
+ ) -> None:
+ pytester.makepyfile(
+ test_sample="""
+ import pytest
+ @pytest.mark.skip
+ def test_skip_location() -> None:
+ assert 0
+ """
+ )
+ result = pytester.runpytest(*test_input)
+ result.stdout.fnmatch_lines(expected)
+
+ def test_xfail_evalfalse_but_fails(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.xfail('False')
+ def test_func():
+ assert 0
+ """
+ )
+ reports = runtestprotocol(item, log=False)
+ callreport = reports[1]
+ assert callreport.failed
+ assert not hasattr(callreport, "wasxfail")
+ assert "xfail" in callreport.keywords
+
+ def test_xfail_not_report_default(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ test_one="""
+ import pytest
+ @pytest.mark.xfail
+ def test_this():
+ assert 0
+ """
+ )
+ pytester.runpytest(p, "-v")
+ # result.stdout.fnmatch_lines([
+ # "*HINT*use*-r*"
+ # ])
+
+ def test_xfail_not_run_xfail_reporting(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ test_one="""
+ import pytest
+ @pytest.mark.xfail(run=False, reason="noway")
+ def test_this():
+ assert 0
+ @pytest.mark.xfail("True", run=False)
+ def test_this_true():
+ assert 0
+ @pytest.mark.xfail("False", run=False, reason="huh")
+ def test_this_false():
+ assert 1
+ """
+ )
+ result = pytester.runpytest(p, "-rx")
+ result.stdout.fnmatch_lines(
+ [
+ "*test_one*test_this*",
+ "*NOTRUN*noway",
+ "*test_one*test_this_true*",
+ "*NOTRUN*condition:*True*",
+ "*1 passed*",
+ ]
+ )
+
+ def test_xfail_not_run_no_setup_run(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ test_one="""
+ import pytest
+ @pytest.mark.xfail(run=False, reason="hello")
+ def test_this():
+ assert 0
+ def setup_module(mod):
+ raise ValueError(42)
+ """
+ )
+ result = pytester.runpytest(p, "-rx")
+ result.stdout.fnmatch_lines(
+ ["*test_one*test_this*", "*NOTRUN*hello", "*1 xfailed*"]
+ )
+
+ def test_xfail_xpass(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ test_one="""
+ import pytest
+ @pytest.mark.xfail
+ def test_that():
+ assert 1
+ """
+ )
+ result = pytester.runpytest(p, "-rX")
+ result.stdout.fnmatch_lines(["*XPASS*test_that*", "*1 xpassed*"])
+ assert result.ret == 0
+
+ def test_xfail_imperative(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ def test_this():
+ pytest.xfail("hello")
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["*1 xfailed*"])
+ result = pytester.runpytest(p, "-rx")
+ result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"])
+ result = pytester.runpytest(p, "--runxfail")
+ result.stdout.fnmatch_lines(["*1 pass*"])
+
+ def test_xfail_imperative_in_setup_function(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ def setup_function(function):
+ pytest.xfail("hello")
+
+ def test_this():
+ assert 0
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["*1 xfailed*"])
+ result = pytester.runpytest(p, "-rx")
+ result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"])
+ result = pytester.runpytest(p, "--runxfail")
+ result.stdout.fnmatch_lines(
+ """
+ *def test_this*
+ *1 fail*
+ """
+ )
+
+ def xtest_dynamic_xfail_set_during_setup(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ def setup_function(function):
+ pytest.mark.xfail(function)
+ def test_this():
+ assert 0
+ def test_that():
+ assert 1
+ """
+ )
+ result = pytester.runpytest(p, "-rxX")
+ result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*XPASS*test_that*"])
+
+ def test_dynamic_xfail_no_run(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg(request):
+ request.applymarker(pytest.mark.xfail(run=False))
+ def test_this(arg):
+ assert 0
+ """
+ )
+ result = pytester.runpytest(p, "-rxX")
+ result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*NOTRUN*"])
+
+ def test_dynamic_xfail_set_during_funcarg_setup(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def arg(request):
+ request.applymarker(pytest.mark.xfail)
+ def test_this2(arg):
+ assert 0
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["*1 xfailed*"])
+
+ def test_dynamic_xfail_set_during_runtest_failed(self, pytester: Pytester) -> None:
+ # Issue #7486.
+ p = pytester.makepyfile(
+ """
+ import pytest
+ def test_this(request):
+ request.node.add_marker(pytest.mark.xfail(reason="xfail"))
+ assert 0
+ """
+ )
+ result = pytester.runpytest(p)
+ result.assert_outcomes(xfailed=1)
+
+ def test_dynamic_xfail_set_during_runtest_passed_strict(
+ self, pytester: Pytester
+ ) -> None:
+ # Issue #7486.
+ p = pytester.makepyfile(
+ """
+ import pytest
+ def test_this(request):
+ request.node.add_marker(pytest.mark.xfail(reason="xfail", strict=True))
+ """
+ )
+ result = pytester.runpytest(p)
+ result.assert_outcomes(failed=1)
+
+ @pytest.mark.parametrize(
+ "expected, actual, matchline",
+ [
+ ("TypeError", "TypeError", "*1 xfailed*"),
+ ("(AttributeError, TypeError)", "TypeError", "*1 xfailed*"),
+ ("TypeError", "IndexError", "*1 failed*"),
+ ("(AttributeError, TypeError)", "IndexError", "*1 failed*"),
+ ],
+ )
+ def test_xfail_raises(
+ self, expected, actual, matchline, pytester: Pytester
+ ) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(raises=%s)
+ def test_raises():
+ raise %s()
+ """
+ % (expected, actual)
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines([matchline])
+
+ def test_strict_sanity(self, pytester: Pytester) -> None:
+ """Sanity check for xfail(strict=True): a failing test should behave
+ exactly like a normal xfail."""
+ p = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(reason='unsupported feature', strict=True)
+ def test_foo():
+ assert 0
+ """
+ )
+ result = pytester.runpytest(p, "-rxX")
+ result.stdout.fnmatch_lines(["*XFAIL*", "*unsupported feature*"])
+ assert result.ret == 0
+
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_strict_xfail(self, pytester: Pytester, strict: bool) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.xfail(reason='unsupported feature', strict=%s)
+ def test_foo():
+ with open('foo_executed', 'w'): pass # make sure test executes
+ """
+ % strict
+ )
+ result = pytester.runpytest(p, "-rxX")
+ if strict:
+ result.stdout.fnmatch_lines(
+ ["*test_foo*", "*XPASS(strict)*unsupported feature*"]
+ )
+ else:
+ result.stdout.fnmatch_lines(
+ [
+ "*test_strict_xfail*",
+ "XPASS test_strict_xfail.py::test_foo unsupported feature",
+ ]
+ )
+ assert result.ret == (1 if strict else 0)
+ assert pytester.path.joinpath("foo_executed").exists()
+
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_strict_xfail_condition(self, pytester: Pytester, strict: bool) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.xfail(False, reason='unsupported feature', strict=%s)
+ def test_foo():
+ pass
+ """
+ % strict
+ )
+ result = pytester.runpytest(p, "-rxX")
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ assert result.ret == 0
+
+ @pytest.mark.parametrize("strict", [True, False])
+ def test_xfail_condition_keyword(self, pytester: Pytester, strict: bool) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.xfail(condition=False, reason='unsupported feature', strict=%s)
+ def test_foo():
+ pass
+ """
+ % strict
+ )
+ result = pytester.runpytest(p, "-rxX")
+ result.stdout.fnmatch_lines(["*1 passed*"])
+ assert result.ret == 0
+
+ @pytest.mark.parametrize("strict_val", ["true", "false"])
+ def test_strict_xfail_default_from_file(
+ self, pytester: Pytester, strict_val
+ ) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ xfail_strict = %s
+ """
+ % strict_val
+ )
+ p = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(reason='unsupported feature')
+ def test_foo():
+ pass
+ """
+ )
+ result = pytester.runpytest(p, "-rxX")
+ strict = strict_val == "true"
+ result.stdout.fnmatch_lines(["*1 failed*" if strict else "*1 xpassed*"])
+ assert result.ret == (1 if strict else 0)
+
+ def test_xfail_markeval_namespace(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+
+ def pytest_markeval_namespace():
+ return {"color": "green"}
+ """
+ )
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.xfail("color == 'green'")
+ def test_1():
+ assert False
+
+ @pytest.mark.xfail("color == 'red'")
+ def test_2():
+ assert False
+ """
+ )
+ res = pytester.runpytest(p)
+ assert res.ret == 1
+ res.stdout.fnmatch_lines(["*1 failed*"])
+ res.stdout.fnmatch_lines(["*1 xfailed*"])
+
+
+class TestXFailwithSetupTeardown:
+ def test_failing_setup_issue9(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def setup_function(func):
+ assert 0
+
+ @pytest.mark.xfail
+ def test_func():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 xfail*"])
+
+ def test_failing_teardown_issue9(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def teardown_function(func):
+ assert 0
+
+ @pytest.mark.xfail
+ def test_func():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 xfail*"])
+
+
+class TestSkip:
+ def test_skip_class(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip
+ class TestSomething(object):
+ def test_foo(self):
+ pass
+ def test_bar(self):
+ pass
+
+ def test_baz():
+ pass
+ """
+ )
+ rec = pytester.inline_run()
+ rec.assertoutcome(skipped=2, passed=1)
+
+ def test_skips_on_false_string(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip('False')
+ def test_foo():
+ pass
+ """
+ )
+ rec = pytester.inline_run()
+ rec.assertoutcome(skipped=1)
+
+ def test_arg_as_reason(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip('testing stuff')
+ def test_bar():
+ pass
+ """
+ )
+ result = pytester.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*testing stuff*", "*1 skipped*"])
+
+ def test_skip_no_reason(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip
+ def test_foo():
+ pass
+ """
+ )
+ result = pytester.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*unconditional skip*", "*1 skipped*"])
+
+ def test_skip_with_reason(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip(reason="for lolz")
+ def test_bar():
+ pass
+ """
+ )
+ result = pytester.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*for lolz*", "*1 skipped*"])
+
+ def test_only_skips_marked_test(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip
+ def test_foo():
+ pass
+ @pytest.mark.skip(reason="nothing in particular")
+ def test_bar():
+ pass
+ def test_baz():
+ assert True
+ """
+ )
+ result = pytester.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*nothing in particular*", "*1 passed*2 skipped*"])
+
+ def test_strict_and_skip(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip
+ def test_hello():
+ pass
+ """
+ )
+ result = pytester.runpytest("-rs", "--strict-markers")
+ result.stdout.fnmatch_lines(["*unconditional skip*", "*1 skipped*"])
+
+ def test_wrong_skip_usage(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skip(False, reason="I thought this was skipif")
+ def test_hello():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*TypeError: *__init__() got multiple values for argument 'reason'"
+ " - maybe you meant pytest.mark.skipif?"
+ ]
+ )
+
+
+class TestSkipif:
+ def test_skipif_conditional(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.skipif("hasattr(os, 'sep')")
+ def test_func():
+ pass
+ """
+ )
+ x = pytest.raises(pytest.skip.Exception, lambda: pytest_runtest_setup(item))
+ assert x.value.msg == "condition: hasattr(os, 'sep')"
+
+ @pytest.mark.parametrize(
+ "params", ["\"hasattr(sys, 'platform')\"", 'True, reason="invalid platform"']
+ )
+ def test_skipif_reporting(self, pytester: Pytester, params) -> None:
+ p = pytester.makepyfile(
+ test_foo="""
+ import pytest
+ @pytest.mark.skipif(%(params)s)
+ def test_that():
+ assert 0
+ """
+ % dict(params=params)
+ )
+ result = pytester.runpytest(p, "-s", "-rs")
+ result.stdout.fnmatch_lines(["*SKIP*1*test_foo.py*platform*", "*1 skipped*"])
+ assert result.ret == 0
+
+ def test_skipif_using_platform(self, pytester: Pytester) -> None:
+ item = pytester.getitem(
+ """
+ import pytest
+ @pytest.mark.skipif("platform.platform() == platform.platform()")
+ def test_func():
+ pass
+ """
+ )
+ pytest.raises(pytest.skip.Exception, lambda: pytest_runtest_setup(item))
+
+ @pytest.mark.parametrize(
+ "marker, msg1, msg2",
+ [("skipif", "SKIP", "skipped"), ("xfail", "XPASS", "xpassed")],
+ )
+ def test_skipif_reporting_multiple(
+ self, pytester: Pytester, marker, msg1, msg2
+ ) -> None:
+ pytester.makepyfile(
+ test_foo="""
+ import pytest
+ @pytest.mark.{marker}(False, reason='first_condition')
+ @pytest.mark.{marker}(True, reason='second_condition')
+ def test_foobar():
+ assert 1
+ """.format(
+ marker=marker
+ )
+ )
+ result = pytester.runpytest("-s", "-rsxX")
+ result.stdout.fnmatch_lines(
+ [f"*{msg1}*test_foo.py*second_condition*", f"*1 {msg2}*"]
+ )
+ assert result.ret == 0
+
+
+def test_skip_not_report_default(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ test_one="""
+ import pytest
+ def test_this():
+ pytest.skip("hello")
+ """
+ )
+ result = pytester.runpytest(p, "-v")
+ result.stdout.fnmatch_lines(
+ [
+ # "*HINT*use*-r*",
+ "*1 skipped*"
+ ]
+ )
+
+
+def test_skipif_class(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ class TestClass(object):
+ pytestmark = pytest.mark.skipif("True")
+ def test_that(self):
+ assert 0
+ def test_though(self):
+ assert 0
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(["*2 skipped*"])
+
+
+def test_skipped_reasons_functional(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_one="""
+ import pytest
+ from conftest import doskip
+
+ def setup_function(func):
+ doskip()
+
+ def test_func():
+ pass
+
+ class TestClass(object):
+ def test_method(self):
+ doskip()
+
+ @pytest.mark.skip("via_decorator")
+ def test_deco(self):
+ assert 0
+ """,
+ conftest="""
+ import pytest, sys
+ def doskip():
+ assert sys._getframe().f_lineno == 3
+ pytest.skip('test')
+ """,
+ )
+ result = pytester.runpytest("-rs")
+ result.stdout.fnmatch_lines_random(
+ [
+ "SKIPPED [[]2[]] conftest.py:4: test",
+ "SKIPPED [[]1[]] test_one.py:14: via_decorator",
+ ]
+ )
+ assert result.ret == 0
+
+
+def test_skipped_folding(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_one="""
+ import pytest
+ pytestmark = pytest.mark.skip("Folding")
+ def setup_function(func):
+ pass
+ def test_func():
+ pass
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ result = pytester.runpytest("-rs")
+ result.stdout.fnmatch_lines(["*SKIP*2*test_one.py: Folding"])
+ assert result.ret == 0
+
+
+def test_reportchars(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_1():
+ assert 0
+ @pytest.mark.xfail
+ def test_2():
+ assert 0
+ @pytest.mark.xfail
+ def test_3():
+ pass
+ def test_4():
+ pytest.skip("four")
+ """
+ )
+ result = pytester.runpytest("-rfxXs")
+ result.stdout.fnmatch_lines(
+ ["FAIL*test_1*", "XFAIL*test_2*", "XPASS*test_3*", "SKIP*four*"]
+ )
+
+
+def test_reportchars_error(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ conftest="""
+ def pytest_runtest_teardown():
+ assert 0
+ """,
+ test_simple="""
+ def test_foo():
+ pass
+ """,
+ )
+ result = pytester.runpytest("-rE")
+ result.stdout.fnmatch_lines(["ERROR*test_foo*"])
+
+
+def test_reportchars_all(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_1():
+ assert 0
+ @pytest.mark.xfail
+ def test_2():
+ assert 0
+ @pytest.mark.xfail
+ def test_3():
+ pass
+ def test_4():
+ pytest.skip("four")
+ @pytest.fixture
+ def fail():
+ assert 0
+ def test_5(fail):
+ pass
+ """
+ )
+ result = pytester.runpytest("-ra")
+ result.stdout.fnmatch_lines(
+ [
+ "SKIP*four*",
+ "XFAIL*test_2*",
+ "XPASS*test_3*",
+ "ERROR*test_5*",
+ "FAIL*test_1*",
+ ]
+ )
+
+
+def test_reportchars_all_error(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ conftest="""
+ def pytest_runtest_teardown():
+ assert 0
+ """,
+ test_simple="""
+ def test_foo():
+ pass
+ """,
+ )
+ result = pytester.runpytest("-ra")
+ result.stdout.fnmatch_lines(["ERROR*test_foo*"])
+
+
+def test_errors_in_xfail_skip_expressions(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skipif("asd")
+ def test_nameerror():
+ pass
+ @pytest.mark.xfail("syntax error")
+ def test_syntax():
+ pass
+
+ def test_func():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ markline = " ^"
+ pypy_version_info = getattr(sys, "pypy_version_info", None)
+ if pypy_version_info is not None and pypy_version_info < (6,):
+ markline = markline[5:]
+ elif sys.version_info >= (3, 8) or hasattr(sys, "pypy_version_info"):
+ markline = markline[4:]
+
+ if sys.version_info[:2] >= (3, 10):
+ expected = [
+ "*ERROR*test_nameerror*",
+ "*asd*",
+ "",
+ "During handling of the above exception, another exception occurred:",
+ ]
+ else:
+ expected = [
+ "*ERROR*test_nameerror*",
+ ]
+
+ expected += [
+ "*evaluating*skipif*condition*",
+ "*asd*",
+ "*ERROR*test_syntax*",
+ "*evaluating*xfail*condition*",
+ " syntax error",
+ markline,
+ "SyntaxError: invalid syntax",
+ "*1 pass*2 errors*",
+ ]
+ result.stdout.fnmatch_lines(expected)
+
+
+def test_xfail_skipif_with_globals(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ x = 3
+ @pytest.mark.skipif("x == 3")
+ def test_skip1():
+ pass
+ @pytest.mark.xfail("x == 3")
+ def test_boolean():
+ assert 0
+ """
+ )
+ result = pytester.runpytest("-rsx")
+ result.stdout.fnmatch_lines(["*SKIP*x == 3*", "*XFAIL*test_boolean*", "*x == 3*"])
+
+
+def test_default_markers(pytester: Pytester) -> None:
+ result = pytester.runpytest("--markers")
+ result.stdout.fnmatch_lines(
+ [
+ "*skipif(condition, ..., [*], reason=...)*skip*",
+ "*xfail(condition, ..., [*], reason=..., run=True, raises=None, strict=xfail_strict)*expected failure*",
+ ]
+ )
+
+
+def test_xfail_test_setup_exception(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_runtest_setup():
+ 0 / 0
+ """
+ )
+ p = pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_func():
+ assert 0
+ """
+ )
+ result = pytester.runpytest(p)
+ assert result.ret == 0
+ assert "xfailed" in result.stdout.str()
+ result.stdout.no_fnmatch_line("*xpassed*")
+
+
+def test_imperativeskip_on_xfail_test(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail
+ def test_that_fails():
+ assert 0
+
+ @pytest.mark.skipif("True")
+ def test_hello():
+ pass
+ """
+ )
+ pytester.makeconftest(
+ """
+ import pytest
+ def pytest_runtest_setup(item):
+ pytest.skip("abc")
+ """
+ )
+ result = pytester.runpytest("-rsxX")
+ result.stdout.fnmatch_lines_random(
+ """
+ *SKIP*abc*
+ *SKIP*condition: True*
+ *2 skipped*
+ """
+ )
+
+
+class TestBooleanCondition:
+ def test_skipif(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skipif(True, reason="True123")
+ def test_func1():
+ pass
+ @pytest.mark.skipif(False, reason="True123")
+ def test_func2():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *1 passed*1 skipped*
+ """
+ )
+
+ def test_skipif_noreason(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.skipif(True)
+ def test_func():
+ pass
+ """
+ )
+ result = pytester.runpytest("-rs")
+ result.stdout.fnmatch_lines(
+ """
+ *1 error*
+ """
+ )
+
+ def test_xfail(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.xfail(True, reason="True123")
+ def test_func():
+ assert 0
+ """
+ )
+ result = pytester.runpytest("-rxs")
+ result.stdout.fnmatch_lines(
+ """
+ *XFAIL*
+ *True123*
+ *1 xfail*
+ """
+ )
+
+
+def test_xfail_item(pytester: Pytester) -> None:
+ # Ensure pytest.xfail works with non-Python Item
+ pytester.makeconftest(
+ """
+ import pytest
+
+ class MyItem(pytest.Item):
+ nodeid = 'foo'
+ def runtest(self):
+ pytest.xfail("Expected Failure")
+
+ def pytest_collect_file(file_path, parent):
+ return MyItem.from_parent(name="foo", parent=parent)
+ """
+ )
+ result = pytester.inline_run()
+ passed, skipped, failed = result.listoutcomes()
+ assert not failed
+ xfailed = [r for r in skipped if hasattr(r, "wasxfail")]
+ assert xfailed
+
+
+def test_module_level_skip_error(pytester: Pytester) -> None:
+ """Verify that using pytest.skip at module level causes a collection error."""
+ pytester.makepyfile(
+ """
+ import pytest
+ pytest.skip("skip_module_level")
+
+ def test_func():
+ assert True
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ ["*Using pytest.skip outside of a test will skip the entire module*"]
+ )
+
+
+def test_module_level_skip_with_allow_module_level(pytester: Pytester) -> None:
+ """Verify that using pytest.skip(allow_module_level=True) is allowed."""
+ pytester.makepyfile(
+ """
+ import pytest
+ pytest.skip("skip_module_level", allow_module_level=True)
+
+ def test_func():
+ assert 0
+ """
+ )
+ result = pytester.runpytest("-rxs")
+ result.stdout.fnmatch_lines(["*SKIP*skip_module_level"])
+
+
+def test_invalid_skip_keyword_parameter(pytester: Pytester) -> None:
+ """Verify that using pytest.skip() with unknown parameter raises an error."""
+ pytester.makepyfile(
+ """
+ import pytest
+ pytest.skip("skip_module_level", unknown=1)
+
+ def test_func():
+ assert 0
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*TypeError:*['unknown']*"])
+
+
+def test_mark_xfail_item(pytester: Pytester) -> None:
+ # Ensure pytest.mark.xfail works with non-Python Item
+ pytester.makeconftest(
+ """
+ import pytest
+
+ class MyItem(pytest.Item):
+ nodeid = 'foo'
+ def setup(self):
+ marker = pytest.mark.xfail("1 == 2", reason="Expected failure - false")
+ self.add_marker(marker)
+ marker = pytest.mark.xfail(True, reason="Expected failure - true")
+ self.add_marker(marker)
+ def runtest(self):
+ assert False
+
+ def pytest_collect_file(file_path, parent):
+ return MyItem.from_parent(name="foo", parent=parent)
+ """
+ )
+ result = pytester.inline_run()
+ passed, skipped, failed = result.listoutcomes()
+ assert not failed
+ xfailed = [r for r in skipped if hasattr(r, "wasxfail")]
+ assert xfailed
+
+
+def test_summary_list_after_errors(pytester: Pytester) -> None:
+ """Ensure the list of errors/fails/xfails/skips appears after tracebacks in terminal reporting."""
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_fail():
+ assert 0
+ """
+ )
+ result = pytester.runpytest("-ra")
+ result.stdout.fnmatch_lines(
+ [
+ "=* FAILURES *=",
+ "*= short test summary info =*",
+ "FAILED test_summary_list_after_errors.py::test_fail - assert 0",
+ ]
+ )
+
+
+def test_importorskip() -> None:
+ with pytest.raises(
+ pytest.skip.Exception,
+ match="^could not import 'doesnotexist': No module named .*",
+ ):
+ pytest.importorskip("doesnotexist")
+
+
+def test_relpath_rootdir(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ **{
+ "tests/test_1.py": """
+ import pytest
+ @pytest.mark.skip()
+ def test_pass():
+ pass
+ """,
+ }
+ )
+ result = pytester.runpytest("-rs", "tests/test_1.py", "--rootdir=tests")
+ result.stdout.fnmatch_lines(
+ ["SKIPPED [[]1[]] tests/test_1.py:2: unconditional skip"]
+ )
+
+
+def test_skip_using_reason_works_ok(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ def test_skipping_reason():
+ pytest.skip(reason="skippedreason")
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.no_fnmatch_line("*PytestDeprecationWarning*")
+ result.assert_outcomes(skipped=1)
+
+
+def test_fail_using_reason_works_ok(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ def test_failing_reason():
+ pytest.fail(reason="failedreason")
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.no_fnmatch_line("*PytestDeprecationWarning*")
+ result.assert_outcomes(failed=1)
+
+
+def test_fail_fails_with_msg_and_reason(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ def test_fail_both_arguments():
+ pytest.fail(reason="foo", msg="bar")
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ "*UsageError: Passing both ``reason`` and ``msg`` to pytest.fail(...) is not permitted.*"
+ )
+ result.assert_outcomes(failed=1)
+
+
+def test_skip_fails_with_msg_and_reason(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ def test_skip_both_arguments():
+ pytest.skip(reason="foo", msg="bar")
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ "*UsageError: Passing both ``reason`` and ``msg`` to pytest.skip(...) is not permitted.*"
+ )
+ result.assert_outcomes(failed=1)
+
+
+def test_exit_with_msg_and_reason_fails(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ def test_exit_both_arguments():
+ pytest.exit(reason="foo", msg="bar")
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines(
+ "*UsageError: cannot pass reason and msg to exit(), `msg` is deprecated, use `reason`.*"
+ )
+ result.assert_outcomes(failed=1)
+
+
+def test_exit_with_reason_works_ok(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ def test_exit_reason_only():
+ pytest.exit(reason="foo")
+ """
+ )
+ result = pytester.runpytest(p)
+ result.stdout.fnmatch_lines("*_pytest.outcomes.Exit: foo*")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_stash.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_stash.py
new file mode 100644
index 0000000000..2c9df4832e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_stash.py
@@ -0,0 +1,67 @@
+import pytest
+from _pytest.stash import Stash
+from _pytest.stash import StashKey
+
+
+def test_stash() -> None:
+ stash = Stash()
+
+ assert len(stash) == 0
+ assert not stash
+
+ key1 = StashKey[str]()
+ key2 = StashKey[int]()
+
+ # Basic functionality - single key.
+ assert key1 not in stash
+ stash[key1] = "hello"
+ assert key1 in stash
+ assert stash[key1] == "hello"
+ assert stash.get(key1, None) == "hello"
+ stash[key1] = "world"
+ assert stash[key1] == "world"
+ # Has correct type (no mypy error).
+ stash[key1] + "string"
+ assert len(stash) == 1
+ assert stash
+
+ # No interaction with another key.
+ assert key2 not in stash
+ assert stash.get(key2, None) is None
+ with pytest.raises(KeyError):
+ stash[key2]
+ with pytest.raises(KeyError):
+ del stash[key2]
+ stash[key2] = 1
+ assert stash[key2] == 1
+ # Has correct type (no mypy error).
+ stash[key2] + 20
+ del stash[key1]
+ with pytest.raises(KeyError):
+ del stash[key1]
+ with pytest.raises(KeyError):
+ stash[key1]
+
+ # setdefault
+ stash[key1] = "existing"
+ assert stash.setdefault(key1, "default") == "existing"
+ assert stash[key1] == "existing"
+ key_setdefault = StashKey[bytes]()
+ assert stash.setdefault(key_setdefault, b"default") == b"default"
+ assert stash[key_setdefault] == b"default"
+ assert len(stash) == 3
+ assert stash
+
+ # Can't accidentally add attributes to stash object itself.
+ with pytest.raises(AttributeError):
+ stash.foo = "nope" # type: ignore[attr-defined]
+
+ # No interaction with another stash.
+ stash2 = Stash()
+ key3 = StashKey[int]()
+ assert key2 not in stash2
+ stash2[key2] = 100
+ stash2[key3] = 200
+ assert stash2[key2] + stash2[key3] == 300
+ assert stash[key2] == 1
+ assert key3 not in stash
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_stepwise.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_stepwise.py
new file mode 100644
index 0000000000..63d29d6241
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_stepwise.py
@@ -0,0 +1,280 @@
+import pytest
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+
+
+@pytest.fixture
+def stepwise_pytester(pytester: Pytester) -> Pytester:
+ # Rather than having to modify our testfile between tests, we introduce
+ # a flag for whether or not the second test should fail.
+ pytester.makeconftest(
+ """
+def pytest_addoption(parser):
+ group = parser.getgroup('general')
+ group.addoption('--fail', action='store_true', dest='fail')
+ group.addoption('--fail-last', action='store_true', dest='fail_last')
+"""
+ )
+
+ # Create a simple test suite.
+ pytester.makepyfile(
+ test_a="""
+def test_success_before_fail():
+ assert 1
+
+def test_fail_on_flag(request):
+ assert not request.config.getvalue('fail')
+
+def test_success_after_fail():
+ assert 1
+
+def test_fail_last_on_flag(request):
+ assert not request.config.getvalue('fail_last')
+
+def test_success_after_last_fail():
+ assert 1
+"""
+ )
+
+ pytester.makepyfile(
+ test_b="""
+def test_success():
+ assert 1
+"""
+ )
+
+ # customize cache directory so we don't use the tox's cache directory, which makes tests in this module flaky
+ pytester.makeini(
+ """
+ [pytest]
+ cache_dir = .cache
+ """
+ )
+
+ return pytester
+
+
+@pytest.fixture
+def error_pytester(pytester: Pytester) -> Pytester:
+ pytester.makepyfile(
+ test_a="""
+def test_error(nonexisting_fixture):
+ assert 1
+
+def test_success_after_fail():
+ assert 1
+"""
+ )
+
+ return pytester
+
+
+@pytest.fixture
+def broken_pytester(pytester: Pytester) -> Pytester:
+ pytester.makepyfile(
+ working_testfile="def test_proper(): assert 1", broken_testfile="foobar"
+ )
+ return pytester
+
+
+def _strip_resource_warnings(lines):
+ # Strip unreliable ResourceWarnings, so no-output assertions on stderr can work.
+ # (https://github.com/pytest-dev/pytest/issues/5088)
+ return [
+ x
+ for x in lines
+ if not x.startswith(("Exception ignored in:", "ResourceWarning"))
+ ]
+
+
+def test_run_without_stepwise(stepwise_pytester: Pytester) -> None:
+ result = stepwise_pytester.runpytest("-v", "--strict-markers", "--fail")
+ result.stdout.fnmatch_lines(["*test_success_before_fail PASSED*"])
+ result.stdout.fnmatch_lines(["*test_fail_on_flag FAILED*"])
+ result.stdout.fnmatch_lines(["*test_success_after_fail PASSED*"])
+
+
+def test_stepwise_output_summary(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize("expected", [True, True, True, True, False])
+ def test_data(expected):
+ assert expected
+ """
+ )
+ result = pytester.runpytest("-v", "--stepwise")
+ result.stdout.fnmatch_lines(["stepwise: no previously failed tests, not skipping."])
+ result = pytester.runpytest("-v", "--stepwise")
+ result.stdout.fnmatch_lines(
+ ["stepwise: skipping 4 already passed items.", "*1 failed, 4 deselected*"]
+ )
+
+
+def test_fail_and_continue_with_stepwise(stepwise_pytester: Pytester) -> None:
+ # Run the tests with a failing second test.
+ result = stepwise_pytester.runpytest(
+ "-v", "--strict-markers", "--stepwise", "--fail"
+ )
+ assert _strip_resource_warnings(result.stderr.lines) == []
+
+ stdout = result.stdout.str()
+ # Make sure we stop after first failing test.
+ assert "test_success_before_fail PASSED" in stdout
+ assert "test_fail_on_flag FAILED" in stdout
+ assert "test_success_after_fail" not in stdout
+
+ # "Fix" the test that failed in the last run and run it again.
+ result = stepwise_pytester.runpytest("-v", "--strict-markers", "--stepwise")
+ assert _strip_resource_warnings(result.stderr.lines) == []
+
+ stdout = result.stdout.str()
+ # Make sure the latest failing test runs and then continues.
+ assert "test_success_before_fail" not in stdout
+ assert "test_fail_on_flag PASSED" in stdout
+ assert "test_success_after_fail PASSED" in stdout
+
+
+@pytest.mark.parametrize("stepwise_skip", ["--stepwise-skip", "--sw-skip"])
+def test_run_with_skip_option(stepwise_pytester: Pytester, stepwise_skip: str) -> None:
+ result = stepwise_pytester.runpytest(
+ "-v",
+ "--strict-markers",
+ "--stepwise",
+ stepwise_skip,
+ "--fail",
+ "--fail-last",
+ )
+ assert _strip_resource_warnings(result.stderr.lines) == []
+
+ stdout = result.stdout.str()
+ # Make sure first fail is ignore and second fail stops the test run.
+ assert "test_fail_on_flag FAILED" in stdout
+ assert "test_success_after_fail PASSED" in stdout
+ assert "test_fail_last_on_flag FAILED" in stdout
+ assert "test_success_after_last_fail" not in stdout
+
+
+def test_fail_on_errors(error_pytester: Pytester) -> None:
+ result = error_pytester.runpytest("-v", "--strict-markers", "--stepwise")
+
+ assert _strip_resource_warnings(result.stderr.lines) == []
+ stdout = result.stdout.str()
+
+ assert "test_error ERROR" in stdout
+ assert "test_success_after_fail" not in stdout
+
+
+def test_change_testfile(stepwise_pytester: Pytester) -> None:
+ result = stepwise_pytester.runpytest(
+ "-v", "--strict-markers", "--stepwise", "--fail", "test_a.py"
+ )
+ assert _strip_resource_warnings(result.stderr.lines) == []
+
+ stdout = result.stdout.str()
+ assert "test_fail_on_flag FAILED" in stdout
+
+ # Make sure the second test run starts from the beginning, since the
+ # test to continue from does not exist in testfile_b.
+ result = stepwise_pytester.runpytest(
+ "-v", "--strict-markers", "--stepwise", "test_b.py"
+ )
+ assert _strip_resource_warnings(result.stderr.lines) == []
+
+ stdout = result.stdout.str()
+ assert "test_success PASSED" in stdout
+
+
+@pytest.mark.parametrize("broken_first", [True, False])
+def test_stop_on_collection_errors(
+ broken_pytester: Pytester, broken_first: bool
+) -> None:
+ """Stop during collection errors. Broken test first or broken test last
+ actually surfaced a bug (#5444), so we test both situations."""
+ files = ["working_testfile.py", "broken_testfile.py"]
+ if broken_first:
+ files.reverse()
+ result = broken_pytester.runpytest("-v", "--strict-markers", "--stepwise", *files)
+ result.stdout.fnmatch_lines("*error during collection*")
+
+
+def test_xfail_handling(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
+ """Ensure normal xfail is ignored, and strict xfail interrupts the session in sw mode
+
+ (#5547)
+ """
+ monkeypatch.setattr("sys.dont_write_bytecode", True)
+
+ contents = """
+ import pytest
+ def test_a(): pass
+
+ @pytest.mark.xfail(strict={strict})
+ def test_b(): assert {assert_value}
+
+ def test_c(): pass
+ def test_d(): pass
+ """
+ pytester.makepyfile(contents.format(assert_value="0", strict="False"))
+ result = pytester.runpytest("--sw", "-v")
+ result.stdout.fnmatch_lines(
+ [
+ "*::test_a PASSED *",
+ "*::test_b XFAIL *",
+ "*::test_c PASSED *",
+ "*::test_d PASSED *",
+ "* 3 passed, 1 xfailed in *",
+ ]
+ )
+
+ pytester.makepyfile(contents.format(assert_value="1", strict="True"))
+ result = pytester.runpytest("--sw", "-v")
+ result.stdout.fnmatch_lines(
+ [
+ "*::test_a PASSED *",
+ "*::test_b FAILED *",
+ "* Interrupted*",
+ "* 1 failed, 1 passed in *",
+ ]
+ )
+
+ pytester.makepyfile(contents.format(assert_value="0", strict="True"))
+ result = pytester.runpytest("--sw", "-v")
+ result.stdout.fnmatch_lines(
+ [
+ "*::test_b XFAIL *",
+ "*::test_c PASSED *",
+ "*::test_d PASSED *",
+ "* 2 passed, 1 deselected, 1 xfailed in *",
+ ]
+ )
+
+
+def test_stepwise_skip_is_independent(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_one():
+ assert False
+
+ def test_two():
+ assert False
+
+ def test_three():
+ assert False
+
+ """
+ )
+ result = pytester.runpytest("--tb", "no", "--stepwise-skip")
+ result.assert_outcomes(failed=2)
+ result.stdout.fnmatch_lines(
+ [
+ "FAILED test_stepwise_skip_is_independent.py::test_one - assert False",
+ "FAILED test_stepwise_skip_is_independent.py::test_two - assert False",
+ "*Interrupted: Test failed, continuing from this test next run.*",
+ ]
+ )
+
+
+def test_sw_skip_help(pytester: Pytester) -> None:
+ result = pytester.runpytest("-h")
+ result.stdout.fnmatch_lines("*implicitly enables --stepwise.")
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_terminal.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_terminal.py
new file mode 100644
index 0000000000..23f597e332
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_terminal.py
@@ -0,0 +1,2486 @@
+"""Terminal reporting of the full testing process."""
+import collections
+import os
+import sys
+import textwrap
+from io import StringIO
+from pathlib import Path
+from types import SimpleNamespace
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Tuple
+
+import pluggy
+
+import _pytest.config
+import _pytest.terminal
+import pytest
+from _pytest._io.wcwidth import wcswidth
+from _pytest.config import Config
+from _pytest.config import ExitCode
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+from _pytest.reports import BaseReport
+from _pytest.reports import CollectReport
+from _pytest.reports import TestReport
+from _pytest.terminal import _folded_skips
+from _pytest.terminal import _format_trimmed
+from _pytest.terminal import _get_line_with_reprcrash_message
+from _pytest.terminal import _get_raw_skip_reason
+from _pytest.terminal import _plugin_nameversions
+from _pytest.terminal import getreportopt
+from _pytest.terminal import TerminalReporter
+
+DistInfo = collections.namedtuple("DistInfo", ["project_name", "version"])
+
+
+TRANS_FNMATCH = str.maketrans({"[": "[[]", "]": "[]]"})
+
+
+class Option:
+ def __init__(self, verbosity=0):
+ self.verbosity = verbosity
+
+ @property
+ def args(self):
+ values = []
+ values.append("--verbosity=%d" % self.verbosity)
+ return values
+
+
+@pytest.fixture(
+ params=[Option(verbosity=0), Option(verbosity=1), Option(verbosity=-1)],
+ ids=["default", "verbose", "quiet"],
+)
+def option(request):
+ return request.param
+
+
+@pytest.mark.parametrize(
+ "input,expected",
+ [
+ ([DistInfo(project_name="test", version=1)], ["test-1"]),
+ ([DistInfo(project_name="pytest-test", version=1)], ["test-1"]),
+ (
+ [
+ DistInfo(project_name="test", version=1),
+ DistInfo(project_name="test", version=1),
+ ],
+ ["test-1"],
+ ),
+ ],
+ ids=["normal", "prefix-strip", "deduplicate"],
+)
+def test_plugin_nameversion(input, expected):
+ pluginlist = [(None, x) for x in input]
+ result = _plugin_nameversions(pluginlist)
+ assert result == expected
+
+
+class TestTerminal:
+ def test_pass_skip_fail(self, pytester: Pytester, option) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_ok():
+ pass
+ def test_skip():
+ pytest.skip("xx")
+ def test_func():
+ assert 0
+ """
+ )
+ result = pytester.runpytest(*option.args)
+ if option.verbosity > 0:
+ result.stdout.fnmatch_lines(
+ [
+ "*test_pass_skip_fail.py::test_ok PASS*",
+ "*test_pass_skip_fail.py::test_skip SKIP*",
+ "*test_pass_skip_fail.py::test_func FAIL*",
+ ]
+ )
+ elif option.verbosity == 0:
+ result.stdout.fnmatch_lines(["*test_pass_skip_fail.py .sF*"])
+ else:
+ result.stdout.fnmatch_lines([".sF*"])
+ result.stdout.fnmatch_lines(
+ [" def test_func():", "> assert 0", "E assert 0"]
+ )
+
+ def test_internalerror(self, pytester: Pytester, linecomp) -> None:
+ modcol = pytester.getmodulecol("def test_one(): pass")
+ rep = TerminalReporter(modcol.config, file=linecomp.stringio)
+ with pytest.raises(ValueError) as excinfo:
+ raise ValueError("hello")
+ rep.pytest_internalerror(excinfo.getrepr())
+ linecomp.assert_contains_lines(["INTERNALERROR> *ValueError*hello*"])
+
+ def test_writeline(self, pytester: Pytester, linecomp) -> None:
+ modcol = pytester.getmodulecol("def test_one(): pass")
+ rep = TerminalReporter(modcol.config, file=linecomp.stringio)
+ rep.write_fspath_result(modcol.nodeid, ".")
+ rep.write_line("hello world")
+ lines = linecomp.stringio.getvalue().split("\n")
+ assert not lines[0]
+ assert lines[1].endswith(modcol.name + " .")
+ assert lines[2] == "hello world"
+
+ def test_show_runtest_logstart(self, pytester: Pytester, linecomp) -> None:
+ item = pytester.getitem("def test_func(): pass")
+ tr = TerminalReporter(item.config, file=linecomp.stringio)
+ item.config.pluginmanager.register(tr)
+ location = item.reportinfo()
+ tr.config.hook.pytest_runtest_logstart(
+ nodeid=item.nodeid, location=location, fspath=str(item.path)
+ )
+ linecomp.assert_contains_lines(["*test_show_runtest_logstart.py*"])
+
+ def test_runtest_location_shown_before_test_starts(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_1():
+ import time
+ time.sleep(20)
+ """
+ )
+ child = pytester.spawn_pytest("")
+ child.expect(".*test_runtest_location.*py")
+ child.sendeof()
+ child.kill(15)
+
+ def test_report_collect_after_half_a_second(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ """Test for "collecting" being updated after 0.5s"""
+
+ pytester.makepyfile(
+ **{
+ "test1.py": """
+ import _pytest.terminal
+
+ _pytest.terminal.REPORT_COLLECTING_RESOLUTION = 0
+
+ def test_1():
+ pass
+ """,
+ "test2.py": "def test_2(): pass",
+ }
+ )
+ # Explicitly test colored output.
+ monkeypatch.setenv("PY_COLORS", "1")
+
+ child = pytester.spawn_pytest("-v test1.py test2.py")
+ child.expect(r"collecting \.\.\.")
+ child.expect(r"collecting 1 item")
+ child.expect(r"collecting 2 items")
+ child.expect(r"collected 2 items")
+ rest = child.read().decode("utf8")
+ assert "= \x1b[32m\x1b[1m2 passed\x1b[0m\x1b[32m in" in rest
+
+ def test_itemreport_subclasses_show_subclassed_file(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ **{
+ "tests/test_p1": """
+ class BaseTests(object):
+ fail = False
+
+ def test_p1(self):
+ if self.fail: assert 0
+ """,
+ "tests/test_p2": """
+ from test_p1 import BaseTests
+
+ class TestMore(BaseTests): pass
+ """,
+ "tests/test_p3.py": """
+ from test_p1 import BaseTests
+
+ BaseTests.fail = True
+
+ class TestMore(BaseTests): pass
+ """,
+ }
+ )
+ result = pytester.runpytest("tests/test_p2.py", "--rootdir=tests")
+ result.stdout.fnmatch_lines(["tests/test_p2.py .*", "=* 1 passed in *"])
+
+ result = pytester.runpytest("-vv", "-rA", "tests/test_p2.py", "--rootdir=tests")
+ result.stdout.fnmatch_lines(
+ [
+ "tests/test_p2.py::TestMore::test_p1 <- test_p1.py PASSED *",
+ "*= short test summary info =*",
+ "PASSED tests/test_p2.py::TestMore::test_p1",
+ ]
+ )
+ result = pytester.runpytest("-vv", "-rA", "tests/test_p3.py", "--rootdir=tests")
+ result.stdout.fnmatch_lines(
+ [
+ "tests/test_p3.py::TestMore::test_p1 <- test_p1.py FAILED *",
+ "*_ TestMore.test_p1 _*",
+ " def test_p1(self):",
+ "> if self.fail: assert 0",
+ "E assert 0",
+ "",
+ "tests/test_p1.py:5: AssertionError",
+ "*= short test summary info =*",
+ "FAILED tests/test_p3.py::TestMore::test_p1 - assert 0",
+ "*= 1 failed in *",
+ ]
+ )
+
+ def test_itemreport_directclasses_not_shown_as_subclasses(
+ self, pytester: Pytester
+ ) -> None:
+ a = pytester.mkpydir("a123")
+ a.joinpath("test_hello123.py").write_text(
+ textwrap.dedent(
+ """\
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ )
+ result = pytester.runpytest("-vv")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*a123/test_hello123.py*PASS*"])
+ result.stdout.no_fnmatch_line("* <- *")
+
+ @pytest.mark.parametrize("fulltrace", ("", "--fulltrace"))
+ def test_keyboard_interrupt(self, pytester: Pytester, fulltrace) -> None:
+ pytester.makepyfile(
+ """
+ def test_foobar():
+ assert 0
+ def test_spamegg():
+ import py; pytest.skip('skip me please!')
+ def test_interrupt_me():
+ raise KeyboardInterrupt # simulating the user
+ """
+ )
+
+ result = pytester.runpytest(fulltrace, no_reraise_ctrlc=True)
+ result.stdout.fnmatch_lines(
+ [
+ " def test_foobar():",
+ "> assert 0",
+ "E assert 0",
+ "*_keyboard_interrupt.py:6: KeyboardInterrupt*",
+ ]
+ )
+ if fulltrace:
+ result.stdout.fnmatch_lines(
+ ["*raise KeyboardInterrupt # simulating the user*"]
+ )
+ else:
+ result.stdout.fnmatch_lines(
+ ["(to show a full traceback on KeyboardInterrupt use --full-trace)"]
+ )
+ result.stdout.fnmatch_lines(["*KeyboardInterrupt*"])
+
+ def test_keyboard_in_sessionstart(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_sessionstart():
+ raise KeyboardInterrupt
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_foobar():
+ pass
+ """
+ )
+
+ result = pytester.runpytest(no_reraise_ctrlc=True)
+ assert result.ret == 2
+ result.stdout.fnmatch_lines(["*KeyboardInterrupt*"])
+
+ def test_collect_single_item(self, pytester: Pytester) -> None:
+ """Use singular 'item' when reporting a single test item"""
+ pytester.makepyfile(
+ """
+ def test_foobar():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["collected 1 item"])
+
+ def test_rewrite(self, pytester: Pytester, monkeypatch) -> None:
+ config = pytester.parseconfig()
+ f = StringIO()
+ monkeypatch.setattr(f, "isatty", lambda *args: True)
+ tr = TerminalReporter(config, f)
+ tr._tw.fullwidth = 10
+ tr.write("hello")
+ tr.rewrite("hey", erase=True)
+ assert f.getvalue() == "hello" + "\r" + "hey" + (6 * " ")
+
+ def test_report_teststatus_explicit_markup(
+ self, monkeypatch: MonkeyPatch, pytester: Pytester, color_mapping
+ ) -> None:
+ """Test that TerminalReporter handles markup explicitly provided by
+ a pytest_report_teststatus hook."""
+ monkeypatch.setenv("PY_COLORS", "1")
+ pytester.makeconftest(
+ """
+ def pytest_report_teststatus(report):
+ return 'foo', 'F', ('FOO', {'red': True})
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_foobar():
+ pass
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ color_mapping.format_for_fnmatch(["*{red}FOO{reset}*"])
+ )
+
+ def test_verbose_skip_reason(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.skip(reason="123")
+ def test_1():
+ pass
+
+ @pytest.mark.xfail(reason="456")
+ def test_2():
+ pass
+
+ @pytest.mark.xfail(reason="789")
+ def test_3():
+ assert False
+
+ @pytest.mark.xfail(reason="")
+ def test_4():
+ assert False
+
+ @pytest.mark.skip
+ def test_5():
+ pass
+
+ @pytest.mark.xfail
+ def test_6():
+ pass
+
+ def test_7():
+ pytest.skip()
+
+ def test_8():
+ pytest.skip("888 is great")
+
+ def test_9():
+ pytest.xfail()
+
+ def test_10():
+ pytest.xfail("It's 🕙 o'clock")
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ [
+ "test_verbose_skip_reason.py::test_1 SKIPPED (123) *",
+ "test_verbose_skip_reason.py::test_2 XPASS (456) *",
+ "test_verbose_skip_reason.py::test_3 XFAIL (789) *",
+ "test_verbose_skip_reason.py::test_4 XFAIL *",
+ "test_verbose_skip_reason.py::test_5 SKIPPED (unconditional skip) *",
+ "test_verbose_skip_reason.py::test_6 XPASS *",
+ "test_verbose_skip_reason.py::test_7 SKIPPED *",
+ "test_verbose_skip_reason.py::test_8 SKIPPED (888 is great) *",
+ "test_verbose_skip_reason.py::test_9 XFAIL *",
+ "test_verbose_skip_reason.py::test_10 XFAIL (It's 🕙 o'clock) *",
+ ]
+ )
+
+
+class TestCollectonly:
+ def test_collectonly_basic(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_func():
+ pass
+ """
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(
+ ["<Module test_collectonly_basic.py>", " <Function test_func>"]
+ )
+
+ def test_collectonly_skipped_module(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ pytest.skip("hello")
+ """
+ )
+ result = pytester.runpytest("--collect-only", "-rs")
+ result.stdout.fnmatch_lines(["*ERROR collecting*"])
+
+ def test_collectonly_displays_test_description(
+ self, pytester: Pytester, dummy_yaml_custom_test
+ ) -> None:
+ """Used dummy_yaml_custom_test for an Item without ``obj``."""
+ pytester.makepyfile(
+ """
+ def test_with_description():
+ ''' This test has a description.
+
+ more1.
+ more2.'''
+ """
+ )
+ result = pytester.runpytest("--collect-only", "--verbose")
+ result.stdout.fnmatch_lines(
+ [
+ "<YamlFile test1.yaml>",
+ " <YamlItem test1.yaml>",
+ "<Module test_collectonly_displays_test_description.py>",
+ " <Function test_with_description>",
+ " This test has a description.",
+ " ",
+ " more1.",
+ " more2.",
+ ],
+ consecutive=True,
+ )
+
+ def test_collectonly_failed_module(self, pytester: Pytester) -> None:
+ pytester.makepyfile("""raise ValueError(0)""")
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*raise ValueError*", "*1 error*"])
+
+ def test_collectonly_fatal(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_collectstart(collector):
+ assert 0, "urgs"
+ """
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines(["*INTERNAL*args*"])
+ assert result.ret == 3
+
+ def test_collectonly_simple(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ def test_func1():
+ pass
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ result = pytester.runpytest("--collect-only", p)
+ # assert stderr.startswith("inserting into sys.path")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "*<Module *.py>",
+ "* <Function test_func1>",
+ "* <Class TestClass>",
+ "* <Function test_method>",
+ ]
+ )
+
+ def test_collectonly_error(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile("import Errlkjqweqwe")
+ result = pytester.runpytest("--collect-only", p)
+ assert result.ret == 2
+ result.stdout.fnmatch_lines(
+ textwrap.dedent(
+ """\
+ *ERROR*
+ *ImportError*
+ *No module named *Errlk*
+ *1 error*
+ """
+ ).strip()
+ )
+
+ def test_collectonly_missing_path(self, pytester: Pytester) -> None:
+ """Issue 115: failure in parseargs will cause session not to
+ have the items attribute."""
+ result = pytester.runpytest("--collect-only", "uhm_missing_path")
+ assert result.ret == 4
+ result.stderr.fnmatch_lines(
+ ["*ERROR: file or directory not found: uhm_missing_path"]
+ )
+
+ def test_collectonly_quiet(self, pytester: Pytester) -> None:
+ pytester.makepyfile("def test_foo(): pass")
+ result = pytester.runpytest("--collect-only", "-q")
+ result.stdout.fnmatch_lines(["*test_foo*"])
+
+ def test_collectonly_more_quiet(self, pytester: Pytester) -> None:
+ pytester.makepyfile(test_fun="def test_foo(): pass")
+ result = pytester.runpytest("--collect-only", "-qq")
+ result.stdout.fnmatch_lines(["*test_fun.py: 1*"])
+
+ def test_collect_only_summary_status(self, pytester: Pytester) -> None:
+ """Custom status depending on test selection using -k or -m. #7701."""
+ pytester.makepyfile(
+ test_collect_foo="""
+ def test_foo(): pass
+ """,
+ test_collect_bar="""
+ def test_foobar(): pass
+ def test_bar(): pass
+ """,
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines("*== 3 tests collected in * ==*")
+
+ result = pytester.runpytest("--collect-only", "test_collect_foo.py")
+ result.stdout.fnmatch_lines("*== 1 test collected in * ==*")
+
+ result = pytester.runpytest("--collect-only", "-k", "foo")
+ result.stdout.fnmatch_lines("*== 2/3 tests collected (1 deselected) in * ==*")
+
+ result = pytester.runpytest("--collect-only", "-k", "test_bar")
+ result.stdout.fnmatch_lines("*== 1/3 tests collected (2 deselected) in * ==*")
+
+ result = pytester.runpytest("--collect-only", "-k", "invalid")
+ result.stdout.fnmatch_lines("*== no tests collected (3 deselected) in * ==*")
+
+ pytester.mkdir("no_tests_here")
+ result = pytester.runpytest("--collect-only", "no_tests_here")
+ result.stdout.fnmatch_lines("*== no tests collected in * ==*")
+
+ pytester.makepyfile(
+ test_contains_error="""
+ raise RuntimeError
+ """,
+ )
+ result = pytester.runpytest("--collect-only")
+ result.stdout.fnmatch_lines("*== 3 tests collected, 1 error in * ==*")
+ result = pytester.runpytest("--collect-only", "-k", "foo")
+ result.stdout.fnmatch_lines(
+ "*== 2/3 tests collected (1 deselected), 1 error in * ==*"
+ )
+
+
+class TestFixtureReporting:
+ def test_setup_fixture_error(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def setup_function(function):
+ print("setup func")
+ assert 0
+ def test_nada():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR at setup of test_nada*",
+ "*setup_function(function):*",
+ "*setup func*",
+ "*assert 0*",
+ "*1 error*",
+ ]
+ )
+ assert result.ret != 0
+
+ def test_teardown_fixture_error(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_nada():
+ pass
+ def teardown_function(function):
+ print("teardown func")
+ assert 0
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR at teardown*",
+ "*teardown_function(function):*",
+ "*assert 0*",
+ "*Captured stdout*",
+ "*teardown func*",
+ "*1 passed*1 error*",
+ ]
+ )
+
+ def test_teardown_fixture_error_and_test_failure(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_fail():
+ assert 0, "failingfunc"
+
+ def teardown_function(function):
+ print("teardown func")
+ assert False
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*ERROR at teardown of test_fail*",
+ "*teardown_function(function):*",
+ "*assert False*",
+ "*Captured stdout*",
+ "*teardown func*",
+ "*test_fail*",
+ "*def test_fail():",
+ "*failingfunc*",
+ "*1 failed*1 error*",
+ ]
+ )
+
+ def test_setup_teardown_output_and_test_failure(self, pytester: Pytester) -> None:
+ """Test for issue #442."""
+ pytester.makepyfile(
+ """
+ def setup_function(function):
+ print("setup func")
+
+ def test_fail():
+ assert 0, "failingfunc"
+
+ def teardown_function(function):
+ print("teardown func")
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*test_fail*",
+ "*def test_fail():",
+ "*failingfunc*",
+ "*Captured stdout setup*",
+ "*setup func*",
+ "*Captured stdout teardown*",
+ "*teardown func*",
+ "*1 failed*",
+ ]
+ )
+
+
+class TestTerminalFunctional:
+ def test_deselected(self, pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ def test_one():
+ pass
+ def test_two():
+ pass
+ def test_three():
+ pass
+ """
+ )
+ result = pytester.runpytest(
+ "-Wignore::pytest.PytestRemovedIn7Warning", "-k", "test_two:", testpath
+ )
+ result.stdout.fnmatch_lines(
+ ["collected 3 items / 1 deselected / 2 selected", "*test_deselected.py ..*"]
+ )
+ assert result.ret == 0
+
+ def test_deselected_with_hookwrapper(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_collection_modifyitems(config, items):
+ yield
+ deselected = items.pop()
+ config.hook.pytest_deselected(items=[deselected])
+ """
+ )
+ testpath = pytester.makepyfile(
+ """
+ def test_one():
+ pass
+ def test_two():
+ pass
+ def test_three():
+ pass
+ """
+ )
+ result = pytester.runpytest(testpath)
+ result.stdout.fnmatch_lines(
+ [
+ "collected 3 items / 1 deselected / 2 selected",
+ "*= 2 passed, 1 deselected in*",
+ ]
+ )
+ assert result.ret == 0
+
+ def test_show_deselected_items_using_markexpr_before_test_execution(
+ self, pytester: Pytester
+ ) -> None:
+ pytester.makepyfile(
+ test_show_deselected="""
+ import pytest
+
+ @pytest.mark.foo
+ def test_foobar():
+ pass
+
+ @pytest.mark.bar
+ def test_bar():
+ pass
+
+ def test_pass():
+ pass
+ """
+ )
+ result = pytester.runpytest("-m", "not foo")
+ result.stdout.fnmatch_lines(
+ [
+ "collected 3 items / 1 deselected / 2 selected",
+ "*test_show_deselected.py ..*",
+ "*= 2 passed, 1 deselected in * =*",
+ ]
+ )
+ result.stdout.no_fnmatch_line("*= 1 deselected =*")
+ assert result.ret == 0
+
+ def test_no_skip_summary_if_failure(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def test_ok():
+ pass
+ def test_fail():
+ assert 0
+ def test_skip():
+ pytest.skip("dontshow")
+ """
+ )
+ result = pytester.runpytest()
+ assert result.stdout.str().find("skip test summary") == -1
+ assert result.ret == 1
+
+ def test_passes(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_passes():
+ pass
+ class TestClass(object):
+ def test_method(self):
+ pass
+ """
+ )
+ old = p1.parent
+ pytester.chdir()
+ try:
+ result = pytester.runpytest()
+ finally:
+ os.chdir(old)
+ result.stdout.fnmatch_lines(["test_passes.py ..*", "* 2 pass*"])
+ assert result.ret == 0
+
+ def test_header_trailer_info(
+ self, monkeypatch: MonkeyPatch, pytester: Pytester, request
+ ) -> None:
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD")
+ pytester.makepyfile(
+ """
+ def test_passes():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ verinfo = ".".join(map(str, sys.version_info[:3]))
+ result.stdout.fnmatch_lines(
+ [
+ "*===== test session starts ====*",
+ "platform %s -- Python %s*pytest-%s**pluggy-%s"
+ % (
+ sys.platform,
+ verinfo,
+ pytest.__version__,
+ pluggy.__version__,
+ ),
+ "*test_header_trailer_info.py .*",
+ "=* 1 passed*in *.[0-9][0-9]s *=",
+ ]
+ )
+ if request.config.pluginmanager.list_plugin_distinfo():
+ result.stdout.fnmatch_lines(["plugins: *"])
+
+ def test_no_header_trailer_info(
+ self, monkeypatch: MonkeyPatch, pytester: Pytester, request
+ ) -> None:
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD")
+ pytester.makepyfile(
+ """
+ def test_passes():
+ pass
+ """
+ )
+ result = pytester.runpytest("--no-header")
+ verinfo = ".".join(map(str, sys.version_info[:3]))
+ result.stdout.no_fnmatch_line(
+ "platform %s -- Python %s*pytest-%s**pluggy-%s"
+ % (
+ sys.platform,
+ verinfo,
+ pytest.__version__,
+ pluggy.__version__,
+ )
+ )
+ if request.config.pluginmanager.list_plugin_distinfo():
+ result.stdout.no_fnmatch_line("plugins: *")
+
+ def test_header(self, pytester: Pytester) -> None:
+ pytester.path.joinpath("tests").mkdir()
+ pytester.path.joinpath("gui").mkdir()
+
+ # no ini file
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["rootdir: *test_header0"])
+
+ # with configfile
+ pytester.makeini("""[pytest]""")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["rootdir: *test_header0, configfile: tox.ini"])
+
+ # with testpaths option, and not passing anything in the command-line
+ pytester.makeini(
+ """
+ [pytest]
+ testpaths = tests gui
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ ["rootdir: *test_header0, configfile: tox.ini, testpaths: tests, gui"]
+ )
+
+ # with testpaths option, passing directory in command-line: do not show testpaths then
+ result = pytester.runpytest("tests")
+ result.stdout.fnmatch_lines(["rootdir: *test_header0, configfile: tox.ini"])
+
+ def test_header_absolute_testpath(
+ self, pytester: Pytester, monkeypatch: MonkeyPatch
+ ) -> None:
+ """Regresstion test for #7814."""
+ tests = pytester.path.joinpath("tests")
+ tests.mkdir()
+ pytester.makepyprojecttoml(
+ """
+ [tool.pytest.ini_options]
+ testpaths = ['{}']
+ """.format(
+ tests
+ )
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "rootdir: *absolute_testpath0, configfile: pyproject.toml, testpaths: {}".format(
+ tests
+ )
+ ]
+ )
+
+ def test_no_header(self, pytester: Pytester) -> None:
+ pytester.path.joinpath("tests").mkdir()
+ pytester.path.joinpath("gui").mkdir()
+
+ # with testpaths option, and not passing anything in the command-line
+ pytester.makeini(
+ """
+ [pytest]
+ testpaths = tests gui
+ """
+ )
+ result = pytester.runpytest("--no-header")
+ result.stdout.no_fnmatch_line(
+ "rootdir: *test_header0, inifile: tox.ini, testpaths: tests, gui"
+ )
+
+ # with testpaths option, passing directory in command-line: do not show testpaths then
+ result = pytester.runpytest("tests", "--no-header")
+ result.stdout.no_fnmatch_line("rootdir: *test_header0, inifile: tox.ini")
+
+ def test_no_summary(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_no_summary():
+ assert false
+ """
+ )
+ result = pytester.runpytest(p1, "--no-summary")
+ result.stdout.no_fnmatch_line("*= FAILURES =*")
+
+ def test_showlocals(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_showlocals():
+ x = 3
+ y = "x" * 5000
+ assert 0
+ """
+ )
+ result = pytester.runpytest(p1, "-l")
+ result.stdout.fnmatch_lines(
+ [
+ # "_ _ * Locals *",
+ "x* = 3",
+ "y* = 'xxxxxx*",
+ ]
+ )
+
+ def test_showlocals_short(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def test_showlocals_short():
+ x = 3
+ y = "xxxx"
+ assert 0
+ """
+ )
+ result = pytester.runpytest(p1, "-l", "--tb=short")
+ result.stdout.fnmatch_lines(
+ [
+ "test_showlocals_short.py:*",
+ " assert 0",
+ "E assert 0",
+ " x = 3",
+ " y = 'xxxx'",
+ ]
+ )
+
+ @pytest.fixture
+ def verbose_testfile(self, pytester: Pytester) -> Path:
+ return pytester.makepyfile(
+ """
+ import pytest
+ def test_fail():
+ raise ValueError()
+ def test_pass():
+ pass
+ class TestClass(object):
+ def test_skip(self):
+ pytest.skip("hello")
+ def test_gen():
+ def check(x):
+ assert x == 1
+ yield check, 0
+ """
+ )
+
+ def test_verbose_reporting(self, verbose_testfile, pytester: Pytester) -> None:
+ result = pytester.runpytest(
+ verbose_testfile, "-v", "-Walways::pytest.PytestWarning"
+ )
+ result.stdout.fnmatch_lines(
+ [
+ "*test_verbose_reporting.py::test_fail *FAIL*",
+ "*test_verbose_reporting.py::test_pass *PASS*",
+ "*test_verbose_reporting.py::TestClass::test_skip *SKIP*",
+ "*test_verbose_reporting.py::test_gen *XFAIL*",
+ ]
+ )
+ assert result.ret == 1
+
+ def test_verbose_reporting_xdist(
+ self,
+ verbose_testfile,
+ monkeypatch: MonkeyPatch,
+ pytester: Pytester,
+ pytestconfig,
+ ) -> None:
+ if not pytestconfig.pluginmanager.get_plugin("xdist"):
+ pytest.skip("xdist plugin not installed")
+
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD")
+ result = pytester.runpytest(
+ verbose_testfile, "-v", "-n 1", "-Walways::pytest.PytestWarning"
+ )
+ result.stdout.fnmatch_lines(
+ ["*FAIL*test_verbose_reporting_xdist.py::test_fail*"]
+ )
+ assert result.ret == 1
+
+ def test_quiet_reporting(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("def test_pass(): pass")
+ result = pytester.runpytest(p1, "-q")
+ s = result.stdout.str()
+ assert "test session starts" not in s
+ assert p1.name not in s
+ assert "===" not in s
+ assert "passed" in s
+
+ def test_more_quiet_reporting(self, pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("def test_pass(): pass")
+ result = pytester.runpytest(p1, "-qq")
+ s = result.stdout.str()
+ assert "test session starts" not in s
+ assert p1.name not in s
+ assert "===" not in s
+ assert "passed" not in s
+
+ @pytest.mark.parametrize(
+ "params", [(), ("--collect-only",)], ids=["no-params", "collect-only"]
+ )
+ def test_report_collectionfinish_hook(self, pytester: Pytester, params) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_report_collectionfinish(config, start_path, items):
+ return [f'hello from hook: {len(items)} items']
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('i', range(3))
+ def test(i):
+ pass
+ """
+ )
+ result = pytester.runpytest(*params)
+ result.stdout.fnmatch_lines(["collected 3 items", "hello from hook: 3 items"])
+
+ def test_summary_f_alias(self, pytester: Pytester) -> None:
+ """Test that 'f' and 'F' report chars are aliases and don't show up twice in the summary (#6334)"""
+ pytester.makepyfile(
+ """
+ def test():
+ assert False
+ """
+ )
+ result = pytester.runpytest("-rfF")
+ expected = "FAILED test_summary_f_alias.py::test - assert False"
+ result.stdout.fnmatch_lines([expected])
+ assert result.stdout.lines.count(expected) == 1
+
+ def test_summary_s_alias(self, pytester: Pytester) -> None:
+ """Test that 's' and 'S' report chars are aliases and don't show up twice in the summary"""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.skip
+ def test():
+ pass
+ """
+ )
+ result = pytester.runpytest("-rsS")
+ expected = "SKIPPED [1] test_summary_s_alias.py:3: unconditional skip"
+ result.stdout.fnmatch_lines([expected])
+ assert result.stdout.lines.count(expected) == 1
+
+
+def test_fail_extra_reporting(pytester: Pytester, monkeypatch) -> None:
+ monkeypatch.setenv("COLUMNS", "80")
+ pytester.makepyfile("def test_this(): assert 0, 'this_failed' * 100")
+ result = pytester.runpytest("-rN")
+ result.stdout.no_fnmatch_line("*short test summary*")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*test summary*",
+ "FAILED test_fail_extra_reporting.py::test_this - AssertionError: this_failedt...",
+ ]
+ )
+
+
+def test_fail_reporting_on_pass(pytester: Pytester) -> None:
+ pytester.makepyfile("def test_this(): assert 1")
+ result = pytester.runpytest("-rf")
+ result.stdout.no_fnmatch_line("*short test summary*")
+
+
+def test_pass_extra_reporting(pytester: Pytester) -> None:
+ pytester.makepyfile("def test_this(): assert 1")
+ result = pytester.runpytest()
+ result.stdout.no_fnmatch_line("*short test summary*")
+ result = pytester.runpytest("-rp")
+ result.stdout.fnmatch_lines(["*test summary*", "PASS*test_pass_extra_reporting*"])
+
+
+def test_pass_reporting_on_fail(pytester: Pytester) -> None:
+ pytester.makepyfile("def test_this(): assert 0")
+ result = pytester.runpytest("-rp")
+ result.stdout.no_fnmatch_line("*short test summary*")
+
+
+def test_pass_output_reporting(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def setup_module():
+ print("setup_module")
+
+ def teardown_module():
+ print("teardown_module")
+
+ def test_pass_has_output():
+ print("Four score and seven years ago...")
+
+ def test_pass_no_output():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ s = result.stdout.str()
+ assert "test_pass_has_output" not in s
+ assert "Four score and seven years ago..." not in s
+ assert "test_pass_no_output" not in s
+ result = pytester.runpytest("-rPp")
+ result.stdout.fnmatch_lines(
+ [
+ "*= PASSES =*",
+ "*_ test_pass_has_output _*",
+ "*- Captured stdout setup -*",
+ "setup_module",
+ "*- Captured stdout call -*",
+ "Four score and seven years ago...",
+ "*- Captured stdout teardown -*",
+ "teardown_module",
+ "*= short test summary info =*",
+ "PASSED test_pass_output_reporting.py::test_pass_has_output",
+ "PASSED test_pass_output_reporting.py::test_pass_no_output",
+ "*= 2 passed in *",
+ ]
+ )
+
+
+def test_color_yes(pytester: Pytester, color_mapping) -> None:
+ p1 = pytester.makepyfile(
+ """
+ def fail():
+ assert 0
+
+ def test_this():
+ fail()
+ """
+ )
+ result = pytester.runpytest("--color=yes", str(p1))
+ result.stdout.fnmatch_lines(
+ color_mapping.format_for_fnmatch(
+ [
+ "{bold}=*= test session starts =*={reset}",
+ "collected 1 item",
+ "",
+ "test_color_yes.py {red}F{reset}{red} * [100%]{reset}",
+ "",
+ "=*= FAILURES =*=",
+ "{red}{bold}_*_ test_this _*_{reset}",
+ "",
+ " {kw}def{hl-reset} {function}test_this{hl-reset}():",
+ "> fail()",
+ "",
+ "{bold}{red}test_color_yes.py{reset}:5: ",
+ "_ _ * _ _*",
+ "",
+ " {kw}def{hl-reset} {function}fail{hl-reset}():",
+ "> {kw}assert{hl-reset} {number}0{hl-reset}",
+ "{bold}{red}E assert 0{reset}",
+ "",
+ "{bold}{red}test_color_yes.py{reset}:2: AssertionError",
+ "{red}=*= {red}{bold}1 failed{reset}{red} in *s{reset}{red} =*={reset}",
+ ]
+ )
+ )
+ result = pytester.runpytest("--color=yes", "--tb=short", str(p1))
+ result.stdout.fnmatch_lines(
+ color_mapping.format_for_fnmatch(
+ [
+ "{bold}=*= test session starts =*={reset}",
+ "collected 1 item",
+ "",
+ "test_color_yes.py {red}F{reset}{red} * [100%]{reset}",
+ "",
+ "=*= FAILURES =*=",
+ "{red}{bold}_*_ test_this _*_{reset}",
+ "{bold}{red}test_color_yes.py{reset}:5: in test_this",
+ " fail()",
+ "{bold}{red}test_color_yes.py{reset}:2: in fail",
+ " {kw}assert{hl-reset} {number}0{hl-reset}",
+ "{bold}{red}E assert 0{reset}",
+ "{red}=*= {red}{bold}1 failed{reset}{red} in *s{reset}{red} =*={reset}",
+ ]
+ )
+ )
+
+
+def test_color_no(pytester: Pytester) -> None:
+ pytester.makepyfile("def test_this(): assert 1")
+ result = pytester.runpytest("--color=no")
+ assert "test session starts" in result.stdout.str()
+ result.stdout.no_fnmatch_line("*\x1b[1m*")
+
+
+@pytest.mark.parametrize("verbose", [True, False])
+def test_color_yes_collection_on_non_atty(pytester: Pytester, verbose) -> None:
+ """#1397: Skip collect progress report when working on non-terminals."""
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize('i', range(10))
+ def test_this(i):
+ assert 1
+ """
+ )
+ args = ["--color=yes"]
+ if verbose:
+ args.append("-vv")
+ result = pytester.runpytest(*args)
+ assert "test session starts" in result.stdout.str()
+ assert "\x1b[1m" in result.stdout.str()
+ result.stdout.no_fnmatch_line("*collecting 10 items*")
+ if verbose:
+ assert "collecting ..." in result.stdout.str()
+ assert "collected 10 items" in result.stdout.str()
+
+
+def test_getreportopt() -> None:
+ from _pytest.terminal import _REPORTCHARS_DEFAULT
+
+ class FakeConfig:
+ class Option:
+ reportchars = _REPORTCHARS_DEFAULT
+ disable_warnings = False
+
+ option = Option()
+
+ config = cast(Config, FakeConfig())
+
+ assert _REPORTCHARS_DEFAULT == "fE"
+
+ # Default.
+ assert getreportopt(config) == "wfE"
+
+ config.option.reportchars = "sf"
+ assert getreportopt(config) == "wsf"
+
+ config.option.reportchars = "sfxw"
+ assert getreportopt(config) == "sfxw"
+
+ config.option.reportchars = "a"
+ assert getreportopt(config) == "wsxXEf"
+
+ config.option.reportchars = "N"
+ assert getreportopt(config) == "w"
+
+ config.option.reportchars = "NwfE"
+ assert getreportopt(config) == "wfE"
+
+ config.option.reportchars = "NfENx"
+ assert getreportopt(config) == "wx"
+
+ # Now with --disable-warnings.
+ config.option.disable_warnings = True
+ config.option.reportchars = "a"
+ assert getreportopt(config) == "sxXEf"
+
+ config.option.reportchars = "sfx"
+ assert getreportopt(config) == "sfx"
+
+ config.option.reportchars = "sfxw"
+ assert getreportopt(config) == "sfx"
+
+ config.option.reportchars = "a"
+ assert getreportopt(config) == "sxXEf"
+
+ config.option.reportchars = "A"
+ assert getreportopt(config) == "PpsxXEf"
+
+ config.option.reportchars = "AN"
+ assert getreportopt(config) == ""
+
+ config.option.reportchars = "NwfE"
+ assert getreportopt(config) == "fE"
+
+
+def test_terminalreporter_reportopt_addopts(pytester: Pytester) -> None:
+ pytester.makeini("[pytest]\naddopts=-rs")
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def tr(request):
+ tr = request.config.pluginmanager.getplugin("terminalreporter")
+ return tr
+ def test_opt(tr):
+ assert tr.hasopt('skipped')
+ assert not tr.hasopt('qwe')
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+
+def test_tbstyle_short(pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.fixture
+ def arg(request):
+ return 42
+ def test_opt(arg):
+ x = 0
+ assert x
+ """
+ )
+ result = pytester.runpytest("--tb=short")
+ s = result.stdout.str()
+ assert "arg = 42" not in s
+ assert "x = 0" not in s
+ result.stdout.fnmatch_lines(["*%s:8*" % p.name, " assert x", "E assert*"])
+ result = pytester.runpytest()
+ s = result.stdout.str()
+ assert "x = 0" in s
+ assert "assert x" in s
+
+
+def test_traceconfig(pytester: Pytester) -> None:
+ result = pytester.runpytest("--traceconfig")
+ result.stdout.fnmatch_lines(["*active plugins*"])
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+class TestGenericReporting:
+ """Test class which can be subclassed with a different option provider to
+ run e.g. distributed tests."""
+
+ def test_collect_fail(self, pytester: Pytester, option) -> None:
+ pytester.makepyfile("import xyz\n")
+ result = pytester.runpytest(*option.args)
+ result.stdout.fnmatch_lines(
+ ["ImportError while importing*", "*No module named *xyz*", "*1 error*"]
+ )
+
+ def test_maxfailures(self, pytester: Pytester, option) -> None:
+ pytester.makepyfile(
+ """
+ def test_1():
+ assert 0
+ def test_2():
+ assert 0
+ def test_3():
+ assert 0
+ """
+ )
+ result = pytester.runpytest("--maxfail=2", *option.args)
+ result.stdout.fnmatch_lines(
+ [
+ "*def test_1():*",
+ "*def test_2():*",
+ "*! stopping after 2 failures !*",
+ "*2 failed*",
+ ]
+ )
+
+ def test_maxfailures_with_interrupted(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test(request):
+ request.session.shouldstop = "session_interrupted"
+ assert 0
+ """
+ )
+ result = pytester.runpytest("--maxfail=1", "-ra")
+ result.stdout.fnmatch_lines(
+ [
+ "*= short test summary info =*",
+ "FAILED *",
+ "*! stopping after 1 failures !*",
+ "*! session_interrupted !*",
+ "*= 1 failed in*",
+ ]
+ )
+
+ def test_tb_option(self, pytester: Pytester, option) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ def g():
+ raise IndexError
+ def test_func():
+ print(6*7)
+ g() # --calling--
+ """
+ )
+ for tbopt in ["long", "short", "no"]:
+ print("testing --tb=%s..." % tbopt)
+ result = pytester.runpytest("-rN", "--tb=%s" % tbopt)
+ s = result.stdout.str()
+ if tbopt == "long":
+ assert "print(6*7)" in s
+ else:
+ assert "print(6*7)" not in s
+ if tbopt != "no":
+ assert "--calling--" in s
+ assert "IndexError" in s
+ else:
+ assert "FAILURES" not in s
+ assert "--calling--" not in s
+ assert "IndexError" not in s
+
+ def test_tb_crashline(self, pytester: Pytester, option) -> None:
+ p = pytester.makepyfile(
+ """
+ import pytest
+ def g():
+ raise IndexError
+ def test_func1():
+ print(6*7)
+ g() # --calling--
+ def test_func2():
+ assert 0, "hello"
+ """
+ )
+ result = pytester.runpytest("--tb=line")
+ bn = p.name
+ result.stdout.fnmatch_lines(
+ ["*%s:3: IndexError*" % bn, "*%s:8: AssertionError: hello*" % bn]
+ )
+ s = result.stdout.str()
+ assert "def test_func2" not in s
+
+ def test_pytest_report_header(self, pytester: Pytester, option) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_sessionstart(session):
+ session.config._somevalue = 42
+ def pytest_report_header(config):
+ return "hello: %s" % config._somevalue
+ """
+ )
+ pytester.mkdir("a").joinpath("conftest.py").write_text(
+ """
+def pytest_report_header(config, start_path):
+ return ["line1", str(start_path)]
+"""
+ )
+ result = pytester.runpytest("a")
+ result.stdout.fnmatch_lines(["*hello: 42*", "line1", str(pytester.path)])
+
+ def test_show_capture(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import sys
+ import logging
+ def test_one():
+ sys.stdout.write('!This is stdout!')
+ sys.stderr.write('!This is stderr!')
+ logging.warning('!This is a warning log msg!')
+ assert False, 'Something failed'
+ """
+ )
+
+ result = pytester.runpytest("--tb=short")
+ result.stdout.fnmatch_lines(
+ [
+ "!This is stdout!",
+ "!This is stderr!",
+ "*WARNING*!This is a warning log msg!",
+ ]
+ )
+
+ result = pytester.runpytest("--show-capture=all", "--tb=short")
+ result.stdout.fnmatch_lines(
+ [
+ "!This is stdout!",
+ "!This is stderr!",
+ "*WARNING*!This is a warning log msg!",
+ ]
+ )
+
+ stdout = pytester.runpytest("--show-capture=stdout", "--tb=short").stdout.str()
+ assert "!This is stderr!" not in stdout
+ assert "!This is stdout!" in stdout
+ assert "!This is a warning log msg!" not in stdout
+
+ stdout = pytester.runpytest("--show-capture=stderr", "--tb=short").stdout.str()
+ assert "!This is stdout!" not in stdout
+ assert "!This is stderr!" in stdout
+ assert "!This is a warning log msg!" not in stdout
+
+ stdout = pytester.runpytest("--show-capture=log", "--tb=short").stdout.str()
+ assert "!This is stdout!" not in stdout
+ assert "!This is stderr!" not in stdout
+ assert "!This is a warning log msg!" in stdout
+
+ stdout = pytester.runpytest("--show-capture=no", "--tb=short").stdout.str()
+ assert "!This is stdout!" not in stdout
+ assert "!This is stderr!" not in stdout
+ assert "!This is a warning log msg!" not in stdout
+
+ def test_show_capture_with_teardown_logs(self, pytester: Pytester) -> None:
+ """Ensure that the capturing of teardown logs honor --show-capture setting"""
+ pytester.makepyfile(
+ """
+ import logging
+ import sys
+ import pytest
+
+ @pytest.fixture(scope="function", autouse="True")
+ def hook_each_test(request):
+ yield
+ sys.stdout.write("!stdout!")
+ sys.stderr.write("!stderr!")
+ logging.warning("!log!")
+
+ def test_func():
+ assert False
+ """
+ )
+
+ result = pytester.runpytest("--show-capture=stdout", "--tb=short").stdout.str()
+ assert "!stdout!" in result
+ assert "!stderr!" not in result
+ assert "!log!" not in result
+
+ result = pytester.runpytest("--show-capture=stderr", "--tb=short").stdout.str()
+ assert "!stdout!" not in result
+ assert "!stderr!" in result
+ assert "!log!" not in result
+
+ result = pytester.runpytest("--show-capture=log", "--tb=short").stdout.str()
+ assert "!stdout!" not in result
+ assert "!stderr!" not in result
+ assert "!log!" in result
+
+ result = pytester.runpytest("--show-capture=no", "--tb=short").stdout.str()
+ assert "!stdout!" not in result
+ assert "!stderr!" not in result
+ assert "!log!" not in result
+
+
+@pytest.mark.xfail("not hasattr(os, 'dup')")
+def test_fdopen_kept_alive_issue124(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import os, sys
+ k = []
+ def test_open_file_and_keep_alive(capfd):
+ stdout = os.fdopen(1, 'w', 1)
+ k.append(stdout)
+
+ def test_close_kept_alive_file():
+ stdout = k.pop()
+ stdout.close()
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_tbstyle_native_setup_error(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture
+ def setup_error_fixture():
+ raise Exception("error in exception")
+
+ def test_error_fixture(setup_error_fixture):
+ pass
+ """
+ )
+ result = pytester.runpytest("--tb=native")
+ result.stdout.fnmatch_lines(
+ ['*File *test_tbstyle_native_setup_error.py", line *, in setup_error_fixture*']
+ )
+
+
+def test_terminal_summary(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_terminal_summary(terminalreporter, exitstatus):
+ w = terminalreporter
+ w.section("hello")
+ w.line("world")
+ w.line("exitstatus: {0}".format(exitstatus))
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ """
+ *==== hello ====*
+ world
+ exitstatus: 5
+ """
+ )
+
+
+@pytest.mark.filterwarnings("default::UserWarning")
+def test_terminal_summary_warnings_are_displayed(pytester: Pytester) -> None:
+ """Test that warnings emitted during pytest_terminal_summary are displayed.
+ (#1305).
+ """
+ pytester.makeconftest(
+ """
+ import warnings
+ def pytest_terminal_summary(terminalreporter):
+ warnings.warn(UserWarning('internal warning'))
+ """
+ )
+ pytester.makepyfile(
+ """
+ def test_failure():
+ import warnings
+ warnings.warn("warning_from_" + "test")
+ assert 0
+ """
+ )
+ result = pytester.runpytest("-ra")
+ result.stdout.fnmatch_lines(
+ [
+ "*= warnings summary =*",
+ "*warning_from_test*",
+ "*= short test summary info =*",
+ "*= warnings summary (final) =*",
+ "*conftest.py:3:*internal warning",
+ "*== 1 failed, 2 warnings in *",
+ ]
+ )
+ result.stdout.no_fnmatch_line("*None*")
+ stdout = result.stdout.str()
+ assert stdout.count("warning_from_test") == 1
+ assert stdout.count("=== warnings summary ") == 2
+
+
+@pytest.mark.filterwarnings("default::UserWarning")
+def test_terminal_summary_warnings_header_once(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_failure():
+ import warnings
+ warnings.warn("warning_from_" + "test")
+ assert 0
+ """
+ )
+ result = pytester.runpytest("-ra")
+ result.stdout.fnmatch_lines(
+ [
+ "*= warnings summary =*",
+ "*warning_from_test*",
+ "*= short test summary info =*",
+ "*== 1 failed, 1 warning in *",
+ ]
+ )
+ result.stdout.no_fnmatch_line("*None*")
+ stdout = result.stdout.str()
+ assert stdout.count("warning_from_test") == 1
+ assert stdout.count("=== warnings summary ") == 1
+
+
+@pytest.mark.filterwarnings("default")
+def test_terminal_no_summary_warnings_header_once(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ def test_failure():
+ import warnings
+ warnings.warn("warning_from_" + "test")
+ assert 0
+ """
+ )
+ result = pytester.runpytest("--no-summary")
+ result.stdout.no_fnmatch_line("*= warnings summary =*")
+ result.stdout.no_fnmatch_line("*= short test summary info =*")
+
+
+@pytest.fixture(scope="session")
+def tr() -> TerminalReporter:
+ config = _pytest.config._prepareconfig()
+ return TerminalReporter(config)
+
+
+@pytest.mark.parametrize(
+ "exp_color, exp_line, stats_arg",
+ [
+ # The method under test only cares about the length of each
+ # dict value, not the actual contents, so tuples of anything
+ # suffice
+ # Important statuses -- the highest priority of these always wins
+ ("red", [("1 failed", {"bold": True, "red": True})], {"failed": [1]}),
+ (
+ "red",
+ [
+ ("1 failed", {"bold": True, "red": True}),
+ ("1 passed", {"bold": False, "green": True}),
+ ],
+ {"failed": [1], "passed": [1]},
+ ),
+ ("red", [("1 error", {"bold": True, "red": True})], {"error": [1]}),
+ ("red", [("2 errors", {"bold": True, "red": True})], {"error": [1, 2]}),
+ (
+ "red",
+ [
+ ("1 passed", {"bold": False, "green": True}),
+ ("1 error", {"bold": True, "red": True}),
+ ],
+ {"error": [1], "passed": [1]},
+ ),
+ # (a status that's not known to the code)
+ ("yellow", [("1 weird", {"bold": True, "yellow": True})], {"weird": [1]}),
+ (
+ "yellow",
+ [
+ ("1 passed", {"bold": False, "green": True}),
+ ("1 weird", {"bold": True, "yellow": True}),
+ ],
+ {"weird": [1], "passed": [1]},
+ ),
+ ("yellow", [("1 warning", {"bold": True, "yellow": True})], {"warnings": [1]}),
+ (
+ "yellow",
+ [
+ ("1 passed", {"bold": False, "green": True}),
+ ("1 warning", {"bold": True, "yellow": True}),
+ ],
+ {"warnings": [1], "passed": [1]},
+ ),
+ (
+ "green",
+ [("5 passed", {"bold": True, "green": True})],
+ {"passed": [1, 2, 3, 4, 5]},
+ ),
+ # "Boring" statuses. These have no effect on the color of the summary
+ # line. Thus, if *every* test has a boring status, the summary line stays
+ # at its default color, i.e. yellow, to warn the user that the test run
+ # produced no useful information
+ ("yellow", [("1 skipped", {"bold": True, "yellow": True})], {"skipped": [1]}),
+ (
+ "green",
+ [
+ ("1 passed", {"bold": True, "green": True}),
+ ("1 skipped", {"bold": False, "yellow": True}),
+ ],
+ {"skipped": [1], "passed": [1]},
+ ),
+ (
+ "yellow",
+ [("1 deselected", {"bold": True, "yellow": True})],
+ {"deselected": [1]},
+ ),
+ (
+ "green",
+ [
+ ("1 passed", {"bold": True, "green": True}),
+ ("1 deselected", {"bold": False, "yellow": True}),
+ ],
+ {"deselected": [1], "passed": [1]},
+ ),
+ ("yellow", [("1 xfailed", {"bold": True, "yellow": True})], {"xfailed": [1]}),
+ (
+ "green",
+ [
+ ("1 passed", {"bold": True, "green": True}),
+ ("1 xfailed", {"bold": False, "yellow": True}),
+ ],
+ {"xfailed": [1], "passed": [1]},
+ ),
+ ("yellow", [("1 xpassed", {"bold": True, "yellow": True})], {"xpassed": [1]}),
+ (
+ "yellow",
+ [
+ ("1 passed", {"bold": False, "green": True}),
+ ("1 xpassed", {"bold": True, "yellow": True}),
+ ],
+ {"xpassed": [1], "passed": [1]},
+ ),
+ # Likewise if no tests were found at all
+ ("yellow", [("no tests ran", {"yellow": True})], {}),
+ # Test the empty-key special case
+ ("yellow", [("no tests ran", {"yellow": True})], {"": [1]}),
+ (
+ "green",
+ [("1 passed", {"bold": True, "green": True})],
+ {"": [1], "passed": [1]},
+ ),
+ # A couple more complex combinations
+ (
+ "red",
+ [
+ ("1 failed", {"bold": True, "red": True}),
+ ("2 passed", {"bold": False, "green": True}),
+ ("3 xfailed", {"bold": False, "yellow": True}),
+ ],
+ {"passed": [1, 2], "failed": [1], "xfailed": [1, 2, 3]},
+ ),
+ (
+ "green",
+ [
+ ("1 passed", {"bold": True, "green": True}),
+ ("2 skipped", {"bold": False, "yellow": True}),
+ ("3 deselected", {"bold": False, "yellow": True}),
+ ("2 xfailed", {"bold": False, "yellow": True}),
+ ],
+ {
+ "passed": [1],
+ "skipped": [1, 2],
+ "deselected": [1, 2, 3],
+ "xfailed": [1, 2],
+ },
+ ),
+ ],
+)
+def test_summary_stats(
+ tr: TerminalReporter,
+ exp_line: List[Tuple[str, Dict[str, bool]]],
+ exp_color: str,
+ stats_arg: Dict[str, List[object]],
+) -> None:
+ tr.stats = stats_arg
+
+ # Fake "_is_last_item" to be True.
+ class fake_session:
+ testscollected = 0
+
+ tr._session = fake_session # type: ignore[assignment]
+ assert tr._is_last_item
+
+ # Reset cache.
+ tr._main_color = None
+
+ print("Based on stats: %s" % stats_arg)
+ print(f'Expect summary: "{exp_line}"; with color "{exp_color}"')
+ (line, color) = tr.build_summary_stats_line()
+ print(f'Actually got: "{line}"; with color "{color}"')
+ assert line == exp_line
+ assert color == exp_color
+
+
+def test_skip_counting_towards_summary(tr):
+ class DummyReport(BaseReport):
+ count_towards_summary = True
+
+ r1 = DummyReport()
+ r2 = DummyReport()
+ tr.stats = {"failed": (r1, r2)}
+ tr._main_color = None
+ res = tr.build_summary_stats_line()
+ assert res == ([("2 failed", {"bold": True, "red": True})], "red")
+
+ r1.count_towards_summary = False
+ tr.stats = {"failed": (r1, r2)}
+ tr._main_color = None
+ res = tr.build_summary_stats_line()
+ assert res == ([("1 failed", {"bold": True, "red": True})], "red")
+
+
+class TestClassicOutputStyle:
+ """Ensure classic output style works as expected (#3883)"""
+
+ @pytest.fixture
+ def test_files(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ **{
+ "test_one.py": "def test_one(): pass",
+ "test_two.py": "def test_two(): assert 0",
+ "sub/test_three.py": """
+ def test_three_1(): pass
+ def test_three_2(): assert 0
+ def test_three_3(): pass
+ """,
+ }
+ )
+
+ def test_normal_verbosity(self, pytester: Pytester, test_files) -> None:
+ result = pytester.runpytest("-o", "console_output_style=classic")
+ result.stdout.fnmatch_lines(
+ [
+ "test_one.py .",
+ "test_two.py F",
+ f"sub{os.sep}test_three.py .F.",
+ "*2 failed, 3 passed in*",
+ ]
+ )
+
+ def test_verbose(self, pytester: Pytester, test_files) -> None:
+ result = pytester.runpytest("-o", "console_output_style=classic", "-v")
+ result.stdout.fnmatch_lines(
+ [
+ "test_one.py::test_one PASSED",
+ "test_two.py::test_two FAILED",
+ f"sub{os.sep}test_three.py::test_three_1 PASSED",
+ f"sub{os.sep}test_three.py::test_three_2 FAILED",
+ f"sub{os.sep}test_three.py::test_three_3 PASSED",
+ "*2 failed, 3 passed in*",
+ ]
+ )
+
+ def test_quiet(self, pytester: Pytester, test_files) -> None:
+ result = pytester.runpytest("-o", "console_output_style=classic", "-q")
+ result.stdout.fnmatch_lines([".F.F.", "*2 failed, 3 passed in*"])
+
+
+class TestProgressOutputStyle:
+ @pytest.fixture
+ def many_tests_files(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_bar="""
+ import pytest
+ @pytest.mark.parametrize('i', range(10))
+ def test_bar(i): pass
+ """,
+ test_foo="""
+ import pytest
+ @pytest.mark.parametrize('i', range(5))
+ def test_foo(i): pass
+ """,
+ test_foobar="""
+ import pytest
+ @pytest.mark.parametrize('i', range(5))
+ def test_foobar(i): pass
+ """,
+ )
+
+ def test_zero_tests_collected(self, pytester: Pytester) -> None:
+ """Some plugins (testmon for example) might issue pytest_runtest_logreport without any tests being
+ actually collected (#2971)."""
+ pytester.makeconftest(
+ """
+ def pytest_collection_modifyitems(items, config):
+ from _pytest.runner import CollectReport
+ for node_id in ('nodeid1', 'nodeid2'):
+ rep = CollectReport(node_id, 'passed', None, None)
+ rep.when = 'passed'
+ rep.duration = 0.1
+ config.hook.pytest_runtest_logreport(report=rep)
+ """
+ )
+ output = pytester.runpytest()
+ output.stdout.no_fnmatch_line("*ZeroDivisionError*")
+ output.stdout.fnmatch_lines(["=* 2 passed in *="])
+
+ def test_normal(self, many_tests_files, pytester: Pytester) -> None:
+ output = pytester.runpytest()
+ output.stdout.re_match_lines(
+ [
+ r"test_bar.py \.{10} \s+ \[ 50%\]",
+ r"test_foo.py \.{5} \s+ \[ 75%\]",
+ r"test_foobar.py \.{5} \s+ \[100%\]",
+ ]
+ )
+
+ def test_colored_progress(
+ self, pytester: Pytester, monkeypatch, color_mapping
+ ) -> None:
+ monkeypatch.setenv("PY_COLORS", "1")
+ pytester.makepyfile(
+ test_axfail="""
+ import pytest
+ @pytest.mark.xfail
+ def test_axfail(): assert 0
+ """,
+ test_bar="""
+ import pytest
+ @pytest.mark.parametrize('i', range(10))
+ def test_bar(i): pass
+ """,
+ test_foo="""
+ import pytest
+ import warnings
+ @pytest.mark.parametrize('i', range(5))
+ def test_foo(i):
+ warnings.warn(DeprecationWarning("collection"))
+ pass
+ """,
+ test_foobar="""
+ import pytest
+ @pytest.mark.parametrize('i', range(5))
+ def test_foobar(i): raise ValueError()
+ """,
+ )
+ result = pytester.runpytest()
+ result.stdout.re_match_lines(
+ color_mapping.format_for_rematch(
+ [
+ r"test_axfail.py {yellow}x{reset}{green} \s+ \[ 4%\]{reset}",
+ r"test_bar.py ({green}\.{reset}){{10}}{green} \s+ \[ 52%\]{reset}",
+ r"test_foo.py ({green}\.{reset}){{5}}{yellow} \s+ \[ 76%\]{reset}",
+ r"test_foobar.py ({red}F{reset}){{5}}{red} \s+ \[100%\]{reset}",
+ ]
+ )
+ )
+
+ # Only xfail should have yellow progress indicator.
+ result = pytester.runpytest("test_axfail.py")
+ result.stdout.re_match_lines(
+ color_mapping.format_for_rematch(
+ [
+ r"test_axfail.py {yellow}x{reset}{yellow} \s+ \[100%\]{reset}",
+ r"^{yellow}=+ ({yellow}{bold}|{bold}{yellow})1 xfailed{reset}{yellow} in ",
+ ]
+ )
+ )
+
+ def test_count(self, many_tests_files, pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ console_output_style = count
+ """
+ )
+ output = pytester.runpytest()
+ output.stdout.re_match_lines(
+ [
+ r"test_bar.py \.{10} \s+ \[10/20\]",
+ r"test_foo.py \.{5} \s+ \[15/20\]",
+ r"test_foobar.py \.{5} \s+ \[20/20\]",
+ ]
+ )
+
+ def test_verbose(self, many_tests_files, pytester: Pytester) -> None:
+ output = pytester.runpytest("-v")
+ output.stdout.re_match_lines(
+ [
+ r"test_bar.py::test_bar\[0\] PASSED \s+ \[ 5%\]",
+ r"test_foo.py::test_foo\[4\] PASSED \s+ \[ 75%\]",
+ r"test_foobar.py::test_foobar\[4\] PASSED \s+ \[100%\]",
+ ]
+ )
+
+ def test_verbose_count(self, many_tests_files, pytester: Pytester) -> None:
+ pytester.makeini(
+ """
+ [pytest]
+ console_output_style = count
+ """
+ )
+ output = pytester.runpytest("-v")
+ output.stdout.re_match_lines(
+ [
+ r"test_bar.py::test_bar\[0\] PASSED \s+ \[ 1/20\]",
+ r"test_foo.py::test_foo\[4\] PASSED \s+ \[15/20\]",
+ r"test_foobar.py::test_foobar\[4\] PASSED \s+ \[20/20\]",
+ ]
+ )
+
+ def test_xdist_normal(
+ self, many_tests_files, pytester: Pytester, monkeypatch
+ ) -> None:
+ pytest.importorskip("xdist")
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
+ output = pytester.runpytest("-n2")
+ output.stdout.re_match_lines([r"\.{20} \s+ \[100%\]"])
+
+ def test_xdist_normal_count(
+ self, many_tests_files, pytester: Pytester, monkeypatch
+ ) -> None:
+ pytest.importorskip("xdist")
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
+ pytester.makeini(
+ """
+ [pytest]
+ console_output_style = count
+ """
+ )
+ output = pytester.runpytest("-n2")
+ output.stdout.re_match_lines([r"\.{20} \s+ \[20/20\]"])
+
+ def test_xdist_verbose(
+ self, many_tests_files, pytester: Pytester, monkeypatch
+ ) -> None:
+ pytest.importorskip("xdist")
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
+ output = pytester.runpytest("-n2", "-v")
+ output.stdout.re_match_lines_random(
+ [
+ r"\[gw\d\] \[\s*\d+%\] PASSED test_bar.py::test_bar\[1\]",
+ r"\[gw\d\] \[\s*\d+%\] PASSED test_foo.py::test_foo\[1\]",
+ r"\[gw\d\] \[\s*\d+%\] PASSED test_foobar.py::test_foobar\[1\]",
+ ]
+ )
+ output.stdout.fnmatch_lines_random(
+ [
+ line.translate(TRANS_FNMATCH)
+ for line in [
+ "test_bar.py::test_bar[0] ",
+ "test_foo.py::test_foo[0] ",
+ "test_foobar.py::test_foobar[0] ",
+ "[gw?] [ 5%] PASSED test_*[?] ",
+ "[gw?] [ 10%] PASSED test_*[?] ",
+ "[gw?] [ 55%] PASSED test_*[?] ",
+ "[gw?] [ 60%] PASSED test_*[?] ",
+ "[gw?] [ 95%] PASSED test_*[?] ",
+ "[gw?] [100%] PASSED test_*[?] ",
+ ]
+ ]
+ )
+
+ def test_capture_no(self, many_tests_files, pytester: Pytester) -> None:
+ output = pytester.runpytest("-s")
+ output.stdout.re_match_lines(
+ [r"test_bar.py \.{10}", r"test_foo.py \.{5}", r"test_foobar.py \.{5}"]
+ )
+
+ output = pytester.runpytest("--capture=no")
+ output.stdout.no_fnmatch_line("*%]*")
+
+
+class TestProgressWithTeardown:
+ """Ensure we show the correct percentages for tests that fail during teardown (#3088)"""
+
+ @pytest.fixture
+ def contest_with_teardown_fixture(self, pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+
+ @pytest.fixture
+ def fail_teardown():
+ yield
+ assert False
+ """
+ )
+
+ @pytest.fixture
+ def many_files(self, pytester: Pytester, contest_with_teardown_fixture) -> None:
+ pytester.makepyfile(
+ test_bar="""
+ import pytest
+ @pytest.mark.parametrize('i', range(5))
+ def test_bar(fail_teardown, i):
+ pass
+ """,
+ test_foo="""
+ import pytest
+ @pytest.mark.parametrize('i', range(15))
+ def test_foo(fail_teardown, i):
+ pass
+ """,
+ )
+
+ def test_teardown_simple(
+ self, pytester: Pytester, contest_with_teardown_fixture
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo(fail_teardown):
+ pass
+ """
+ )
+ output = pytester.runpytest()
+ output.stdout.re_match_lines([r"test_teardown_simple.py \.E\s+\[100%\]"])
+
+ def test_teardown_with_test_also_failing(
+ self, pytester: Pytester, contest_with_teardown_fixture
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo(fail_teardown):
+ assert 0
+ """
+ )
+ output = pytester.runpytest("-rfE")
+ output.stdout.re_match_lines(
+ [
+ r"test_teardown_with_test_also_failing.py FE\s+\[100%\]",
+ "FAILED test_teardown_with_test_also_failing.py::test_foo - assert 0",
+ "ERROR test_teardown_with_test_also_failing.py::test_foo - assert False",
+ ]
+ )
+
+ def test_teardown_many(self, pytester: Pytester, many_files) -> None:
+ output = pytester.runpytest()
+ output.stdout.re_match_lines(
+ [r"test_bar.py (\.E){5}\s+\[ 25%\]", r"test_foo.py (\.E){15}\s+\[100%\]"]
+ )
+
+ def test_teardown_many_verbose(
+ self, pytester: Pytester, many_files, color_mapping
+ ) -> None:
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ color_mapping.format_for_fnmatch(
+ [
+ "test_bar.py::test_bar[0] PASSED * [ 5%]",
+ "test_bar.py::test_bar[0] ERROR * [ 5%]",
+ "test_bar.py::test_bar[4] PASSED * [ 25%]",
+ "test_foo.py::test_foo[14] PASSED * [100%]",
+ "test_foo.py::test_foo[14] ERROR * [100%]",
+ "=* 20 passed, 20 errors in *",
+ ]
+ )
+ )
+
+ def test_xdist_normal(self, many_files, pytester: Pytester, monkeypatch) -> None:
+ pytest.importorskip("xdist")
+ monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
+ output = pytester.runpytest("-n2")
+ output.stdout.re_match_lines([r"[\.E]{40} \s+ \[100%\]"])
+
+
+def test_skip_reasons_folding() -> None:
+ path = "xyz"
+ lineno = 3
+ message = "justso"
+ longrepr = (path, lineno, message)
+
+ class X:
+ pass
+
+ ev1 = cast(CollectReport, X())
+ ev1.when = "execute"
+ ev1.skipped = True # type: ignore[misc]
+ ev1.longrepr = longrepr
+
+ ev2 = cast(CollectReport, X())
+ ev2.when = "execute"
+ ev2.longrepr = longrepr
+ ev2.skipped = True # type: ignore[misc]
+
+ # ev3 might be a collection report
+ ev3 = cast(CollectReport, X())
+ ev3.when = "collect"
+ ev3.longrepr = longrepr
+ ev3.skipped = True # type: ignore[misc]
+
+ values = _folded_skips(Path.cwd(), [ev1, ev2, ev3])
+ assert len(values) == 1
+ num, fspath, lineno_, reason = values[0]
+ assert num == 3
+ assert fspath == path
+ assert lineno_ == lineno
+ assert reason == message
+
+
+def test_line_with_reprcrash(monkeypatch: MonkeyPatch) -> None:
+ mocked_verbose_word = "FAILED"
+
+ mocked_pos = "some::nodeid"
+
+ def mock_get_pos(*args):
+ return mocked_pos
+
+ monkeypatch.setattr(_pytest.terminal, "_get_pos", mock_get_pos)
+
+ class config:
+ pass
+
+ class rep:
+ def _get_verbose_word(self, *args):
+ return mocked_verbose_word
+
+ class longrepr:
+ class reprcrash:
+ pass
+
+ def check(msg, width, expected):
+ __tracebackhide__ = True
+ if msg:
+ rep.longrepr.reprcrash.message = msg # type: ignore
+ actual = _get_line_with_reprcrash_message(config, rep(), width) # type: ignore
+
+ assert actual == expected
+ if actual != f"{mocked_verbose_word} {mocked_pos}":
+ assert len(actual) <= width
+ assert wcswidth(actual) <= width
+
+ # AttributeError with message
+ check(None, 80, "FAILED some::nodeid")
+
+ check("msg", 80, "FAILED some::nodeid - msg")
+ check("msg", 3, "FAILED some::nodeid")
+
+ check("msg", 24, "FAILED some::nodeid")
+ check("msg", 25, "FAILED some::nodeid - msg")
+
+ check("some longer msg", 24, "FAILED some::nodeid")
+ check("some longer msg", 25, "FAILED some::nodeid - ...")
+ check("some longer msg", 26, "FAILED some::nodeid - s...")
+
+ check("some\nmessage", 25, "FAILED some::nodeid - ...")
+ check("some\nmessage", 26, "FAILED some::nodeid - some")
+ check("some\nmessage", 80, "FAILED some::nodeid - some")
+
+ # Test unicode safety.
+ check("ðŸ‰ðŸ‰ðŸ‰ðŸ‰ðŸ‰\n2nd line", 25, "FAILED some::nodeid - ...")
+ check("ðŸ‰ðŸ‰ðŸ‰ðŸ‰ðŸ‰\n2nd line", 26, "FAILED some::nodeid - ...")
+ check("ðŸ‰ðŸ‰ðŸ‰ðŸ‰ðŸ‰\n2nd line", 27, "FAILED some::nodeid - ðŸ‰...")
+ check("ðŸ‰ðŸ‰ðŸ‰ðŸ‰ðŸ‰\n2nd line", 28, "FAILED some::nodeid - ðŸ‰...")
+ check("ðŸ‰ðŸ‰ðŸ‰ðŸ‰ðŸ‰\n2nd line", 29, "FAILED some::nodeid - ðŸ‰ðŸ‰...")
+
+ # NOTE: constructed, not sure if this is supported.
+ mocked_pos = "nodeid::ðŸ‰::withunicode"
+ check("ðŸ‰ðŸ‰ðŸ‰ðŸ‰ðŸ‰\n2nd line", 29, "FAILED nodeid::ðŸ‰::withunicode")
+ check("ðŸ‰ðŸ‰ðŸ‰ðŸ‰ðŸ‰\n2nd line", 40, "FAILED nodeid::ðŸ‰::withunicode - ðŸ‰ðŸ‰...")
+ check("ðŸ‰ðŸ‰ðŸ‰ðŸ‰ðŸ‰\n2nd line", 41, "FAILED nodeid::ðŸ‰::withunicode - ðŸ‰ðŸ‰...")
+ check("ðŸ‰ðŸ‰ðŸ‰ðŸ‰ðŸ‰\n2nd line", 42, "FAILED nodeid::ðŸ‰::withunicode - ðŸ‰ðŸ‰ðŸ‰...")
+ check("ðŸ‰ðŸ‰ðŸ‰ðŸ‰ðŸ‰\n2nd line", 80, "FAILED nodeid::ðŸ‰::withunicode - ðŸ‰ðŸ‰ðŸ‰ðŸ‰ðŸ‰")
+
+
+@pytest.mark.parametrize(
+ "seconds, expected",
+ [
+ (10.0, "10.00s"),
+ (10.34, "10.34s"),
+ (59.99, "59.99s"),
+ (60.55, "60.55s (0:01:00)"),
+ (123.55, "123.55s (0:02:03)"),
+ (60 * 60 + 0.5, "3600.50s (1:00:00)"),
+ ],
+)
+def test_format_session_duration(seconds, expected):
+ from _pytest.terminal import format_session_duration
+
+ assert format_session_duration(seconds) == expected
+
+
+def test_collecterror(pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("raise SyntaxError()")
+ result = pytester.runpytest("-ra", str(p1))
+ result.stdout.fnmatch_lines(
+ [
+ "collected 0 items / 1 error",
+ "*= ERRORS =*",
+ "*_ ERROR collecting test_collecterror.py _*",
+ "E SyntaxError: *",
+ "*= short test summary info =*",
+ "ERROR test_collecterror.py",
+ "*! Interrupted: 1 error during collection !*",
+ "*= 1 error in *",
+ ]
+ )
+
+
+def test_no_summary_collecterror(pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("raise SyntaxError()")
+ result = pytester.runpytest("-ra", "--no-summary", str(p1))
+ result.stdout.no_fnmatch_line("*= ERRORS =*")
+
+
+def test_via_exec(pytester: Pytester) -> None:
+ p1 = pytester.makepyfile("exec('def test_via_exec(): pass')")
+ result = pytester.runpytest(str(p1), "-vv")
+ result.stdout.fnmatch_lines(
+ ["test_via_exec.py::test_via_exec <- <string> PASSED*", "*= 1 passed in *"]
+ )
+
+
+class TestCodeHighlight:
+ def test_code_highlight_simple(self, pytester: Pytester, color_mapping) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 10
+ """
+ )
+ result = pytester.runpytest("--color=yes")
+ result.stdout.fnmatch_lines(
+ color_mapping.format_for_fnmatch(
+ [
+ " {kw}def{hl-reset} {function}test_foo{hl-reset}():",
+ "> {kw}assert{hl-reset} {number}1{hl-reset} == {number}10{hl-reset}",
+ "{bold}{red}E assert 1 == 10{reset}",
+ ]
+ )
+ )
+
+ def test_code_highlight_continuation(
+ self, pytester: Pytester, color_mapping
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo():
+ print('''
+ '''); assert 0
+ """
+ )
+ result = pytester.runpytest("--color=yes")
+
+ result.stdout.fnmatch_lines(
+ color_mapping.format_for_fnmatch(
+ [
+ " {kw}def{hl-reset} {function}test_foo{hl-reset}():",
+ " {print}print{hl-reset}({str}'''{hl-reset}{str}{hl-reset}",
+ "> {str} {hl-reset}{str}'''{hl-reset}); {kw}assert{hl-reset} {number}0{hl-reset}",
+ "{bold}{red}E assert 0{reset}",
+ ]
+ )
+ )
+
+ def test_code_highlight_custom_theme(
+ self, pytester: Pytester, color_mapping, monkeypatch: MonkeyPatch
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 10
+ """
+ )
+ monkeypatch.setenv("PYTEST_THEME", "solarized-dark")
+ monkeypatch.setenv("PYTEST_THEME_MODE", "dark")
+ result = pytester.runpytest("--color=yes")
+ result.stdout.fnmatch_lines(
+ color_mapping.format_for_fnmatch(
+ [
+ " {kw}def{hl-reset} {function}test_foo{hl-reset}():",
+ "> {kw}assert{hl-reset} {number}1{hl-reset} == {number}10{hl-reset}",
+ "{bold}{red}E assert 1 == 10{reset}",
+ ]
+ )
+ )
+
+ def test_code_highlight_invalid_theme(
+ self, pytester: Pytester, color_mapping, monkeypatch: MonkeyPatch
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 10
+ """
+ )
+ monkeypatch.setenv("PYTEST_THEME", "invalid")
+ result = pytester.runpytest_subprocess("--color=yes")
+ result.stderr.fnmatch_lines(
+ "ERROR: PYTEST_THEME environment variable had an invalid value: 'invalid'. "
+ "Only valid pygment styles are allowed."
+ )
+
+ def test_code_highlight_invalid_theme_mode(
+ self, pytester: Pytester, color_mapping, monkeypatch: MonkeyPatch
+ ) -> None:
+ pytester.makepyfile(
+ """
+ def test_foo():
+ assert 1 == 10
+ """
+ )
+ monkeypatch.setenv("PYTEST_THEME_MODE", "invalid")
+ result = pytester.runpytest_subprocess("--color=yes")
+ result.stderr.fnmatch_lines(
+ "ERROR: PYTEST_THEME_MODE environment variable had an invalid value: 'invalid'. "
+ "The only allowed values are 'dark' and 'light'."
+ )
+
+
+def test_raw_skip_reason_skipped() -> None:
+ report = SimpleNamespace()
+ report.skipped = True
+ report.longrepr = ("xyz", 3, "Skipped: Just so")
+
+ reason = _get_raw_skip_reason(cast(TestReport, report))
+ assert reason == "Just so"
+
+
+def test_raw_skip_reason_xfail() -> None:
+ report = SimpleNamespace()
+ report.wasxfail = "reason: To everything there is a season"
+
+ reason = _get_raw_skip_reason(cast(TestReport, report))
+ assert reason == "To everything there is a season"
+
+
+def test_format_trimmed() -> None:
+ msg = "unconditional skip"
+
+ assert _format_trimmed(" ({}) ", msg, len(msg) + 4) == " (unconditional skip) "
+ assert _format_trimmed(" ({}) ", msg, len(msg) + 3) == " (unconditional ...) "
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_threadexception.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_threadexception.py
new file mode 100644
index 0000000000..5b7519f27d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_threadexception.py
@@ -0,0 +1,137 @@
+import sys
+
+import pytest
+from _pytest.pytester import Pytester
+
+
+if sys.version_info < (3, 8):
+ pytest.skip("threadexception plugin needs Python>=3.8", allow_module_level=True)
+
+
+@pytest.mark.filterwarnings("default::pytest.PytestUnhandledThreadExceptionWarning")
+def test_unhandled_thread_exception(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_it="""
+ import threading
+
+ def test_it():
+ def oops():
+ raise ValueError("Oops")
+
+ t = threading.Thread(target=oops, name="MyThread")
+ t.start()
+ t.join()
+
+ def test_2(): pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+ assert result.parseoutcomes() == {"passed": 2, "warnings": 1}
+ result.stdout.fnmatch_lines(
+ [
+ "*= warnings summary =*",
+ "test_it.py::test_it",
+ " * PytestUnhandledThreadExceptionWarning: Exception in thread MyThread",
+ " ",
+ " Traceback (most recent call last):",
+ " ValueError: Oops",
+ " ",
+ " warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))",
+ ]
+ )
+
+
+@pytest.mark.filterwarnings("default::pytest.PytestUnhandledThreadExceptionWarning")
+def test_unhandled_thread_exception_in_setup(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_it="""
+ import threading
+ import pytest
+
+ @pytest.fixture
+ def threadexc():
+ def oops():
+ raise ValueError("Oops")
+ t = threading.Thread(target=oops, name="MyThread")
+ t.start()
+ t.join()
+
+ def test_it(threadexc): pass
+ def test_2(): pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+ assert result.parseoutcomes() == {"passed": 2, "warnings": 1}
+ result.stdout.fnmatch_lines(
+ [
+ "*= warnings summary =*",
+ "test_it.py::test_it",
+ " * PytestUnhandledThreadExceptionWarning: Exception in thread MyThread",
+ " ",
+ " Traceback (most recent call last):",
+ " ValueError: Oops",
+ " ",
+ " warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))",
+ ]
+ )
+
+
+@pytest.mark.filterwarnings("default::pytest.PytestUnhandledThreadExceptionWarning")
+def test_unhandled_thread_exception_in_teardown(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_it="""
+ import threading
+ import pytest
+
+ @pytest.fixture
+ def threadexc():
+ def oops():
+ raise ValueError("Oops")
+ yield
+ t = threading.Thread(target=oops, name="MyThread")
+ t.start()
+ t.join()
+
+ def test_it(threadexc): pass
+ def test_2(): pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+ assert result.parseoutcomes() == {"passed": 2, "warnings": 1}
+ result.stdout.fnmatch_lines(
+ [
+ "*= warnings summary =*",
+ "test_it.py::test_it",
+ " * PytestUnhandledThreadExceptionWarning: Exception in thread MyThread",
+ " ",
+ " Traceback (most recent call last):",
+ " ValueError: Oops",
+ " ",
+ " warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))",
+ ]
+ )
+
+
+@pytest.mark.filterwarnings("error::pytest.PytestUnhandledThreadExceptionWarning")
+def test_unhandled_thread_exception_warning_error(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_it="""
+ import threading
+ import pytest
+
+ def test_it():
+ def oops():
+ raise ValueError("Oops")
+ t = threading.Thread(target=oops, name="MyThread")
+ t.start()
+ t.join()
+
+ def test_2(): pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == pytest.ExitCode.TESTS_FAILED
+ assert result.parseoutcomes() == {"passed": 1, "failed": 1}
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_tmpdir.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_tmpdir.py
new file mode 100644
index 0000000000..4f7c538470
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_tmpdir.py
@@ -0,0 +1,480 @@
+import os
+import stat
+import sys
+import warnings
+from pathlib import Path
+from typing import Callable
+from typing import cast
+from typing import List
+
+import attr
+
+import pytest
+from _pytest import pathlib
+from _pytest.config import Config
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pathlib import cleanup_numbered_dir
+from _pytest.pathlib import create_cleanup_lock
+from _pytest.pathlib import make_numbered_dir
+from _pytest.pathlib import maybe_delete_a_numbered_dir
+from _pytest.pathlib import on_rm_rf_error
+from _pytest.pathlib import register_cleanup_lock_removal
+from _pytest.pathlib import rm_rf
+from _pytest.pytester import Pytester
+from _pytest.tmpdir import get_user
+from _pytest.tmpdir import TempPathFactory
+
+
+def test_tmp_path_fixture(pytester: Pytester) -> None:
+ p = pytester.copy_example("tmpdir/tmp_path_fixture.py")
+ results = pytester.runpytest(p)
+ results.stdout.fnmatch_lines(["*1 passed*"])
+
+
+@attr.s
+class FakeConfig:
+ basetemp = attr.ib()
+
+ @property
+ def trace(self):
+ return self
+
+ def get(self, key):
+ return lambda *k: None
+
+ @property
+ def option(self):
+ return self
+
+
+class TestTmpPathHandler:
+ def test_mktemp(self, tmp_path):
+ config = cast(Config, FakeConfig(tmp_path))
+ t = TempPathFactory.from_config(config, _ispytest=True)
+ tmp = t.mktemp("world")
+ assert str(tmp.relative_to(t.getbasetemp())) == "world0"
+ tmp = t.mktemp("this")
+ assert str(tmp.relative_to(t.getbasetemp())).startswith("this")
+ tmp2 = t.mktemp("this")
+ assert str(tmp2.relative_to(t.getbasetemp())).startswith("this")
+ assert tmp2 != tmp
+
+ def test_tmppath_relative_basetemp_absolute(self, tmp_path, monkeypatch):
+ """#4425"""
+ monkeypatch.chdir(tmp_path)
+ config = cast(Config, FakeConfig("hello"))
+ t = TempPathFactory.from_config(config, _ispytest=True)
+ assert t.getbasetemp().resolve() == (tmp_path / "hello").resolve()
+
+
+class TestConfigTmpPath:
+ def test_getbasetemp_custom_removes_old(self, pytester: Pytester) -> None:
+ mytemp = pytester.path.joinpath("xyz")
+ p = pytester.makepyfile(
+ """
+ def test_1(tmp_path):
+ pass
+ """
+ )
+ pytester.runpytest(p, "--basetemp=%s" % mytemp)
+ assert mytemp.exists()
+ mytemp.joinpath("hello").touch()
+
+ pytester.runpytest(p, "--basetemp=%s" % mytemp)
+ assert mytemp.exists()
+ assert not mytemp.joinpath("hello").exists()
+
+
+testdata = [
+ ("mypath", True),
+ ("/mypath1", False),
+ ("./mypath1", True),
+ ("../mypath3", False),
+ ("../../mypath4", False),
+ ("mypath5/..", False),
+ ("mypath6/../mypath6", True),
+ ("mypath7/../mypath7/..", False),
+]
+
+
+@pytest.mark.parametrize("basename, is_ok", testdata)
+def test_mktemp(pytester: Pytester, basename: str, is_ok: bool) -> None:
+ mytemp = pytester.mkdir("mytemp")
+ p = pytester.makepyfile(
+ """
+ def test_abs_path(tmp_path_factory):
+ tmp_path_factory.mktemp('{}', numbered=False)
+ """.format(
+ basename
+ )
+ )
+
+ result = pytester.runpytest(p, "--basetemp=%s" % mytemp)
+ if is_ok:
+ assert result.ret == 0
+ assert mytemp.joinpath(basename).exists()
+ else:
+ assert result.ret == 1
+ result.stdout.fnmatch_lines("*ValueError*")
+
+
+def test_tmp_path_always_is_realpath(pytester: Pytester, monkeypatch) -> None:
+ # the reason why tmp_path should be a realpath is that
+ # when you cd to it and do "os.getcwd()" you will anyway
+ # get the realpath. Using the symlinked path can thus
+ # easily result in path-inequality
+ # XXX if that proves to be a problem, consider using
+ # os.environ["PWD"]
+ realtemp = pytester.mkdir("myrealtemp")
+ linktemp = pytester.path.joinpath("symlinktemp")
+ attempt_symlink_to(linktemp, str(realtemp))
+ monkeypatch.setenv("PYTEST_DEBUG_TEMPROOT", str(linktemp))
+ pytester.makepyfile(
+ """
+ def test_1(tmp_path):
+ assert tmp_path.resolve() == tmp_path
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_tmp_path_too_long_on_parametrization(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.mark.parametrize("arg", ["1"*1000])
+ def test_some(arg, tmp_path):
+ tmp_path.joinpath("hello").touch()
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_tmp_path_factory(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import pytest
+ @pytest.fixture(scope='session')
+ def session_dir(tmp_path_factory):
+ return tmp_path_factory.mktemp('data', numbered=False)
+ def test_some(session_dir):
+ assert session_dir.is_dir()
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+def test_tmp_path_fallback_tox_env(pytester: Pytester, monkeypatch) -> None:
+ """Test that tmp_path works even if environment variables required by getpass
+ module are missing (#1010).
+ """
+ monkeypatch.delenv("USER", raising=False)
+ monkeypatch.delenv("USERNAME", raising=False)
+ pytester.makepyfile(
+ """
+ def test_some(tmp_path):
+ assert tmp_path.is_dir()
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+@pytest.fixture
+def break_getuser(monkeypatch):
+ monkeypatch.setattr("os.getuid", lambda: -1)
+ # taken from python 2.7/3.4
+ for envvar in ("LOGNAME", "USER", "LNAME", "USERNAME"):
+ monkeypatch.delenv(envvar, raising=False)
+
+
+@pytest.mark.usefixtures("break_getuser")
+@pytest.mark.skipif(sys.platform.startswith("win"), reason="no os.getuid on windows")
+def test_tmp_path_fallback_uid_not_found(pytester: Pytester) -> None:
+ """Test that tmp_path works even if the current process's user id does not
+ correspond to a valid user.
+ """
+
+ pytester.makepyfile(
+ """
+ def test_some(tmp_path):
+ assert tmp_path.is_dir()
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+@pytest.mark.usefixtures("break_getuser")
+@pytest.mark.skipif(sys.platform.startswith("win"), reason="no os.getuid on windows")
+def test_get_user_uid_not_found():
+ """Test that get_user() function works even if the current process's
+ user id does not correspond to a valid user (e.g. running pytest in a
+ Docker container with 'docker run -u'.
+ """
+ assert get_user() is None
+
+
+@pytest.mark.skipif(not sys.platform.startswith("win"), reason="win only")
+def test_get_user(monkeypatch):
+ """Test that get_user() function works even if environment variables
+ required by getpass module are missing from the environment on Windows
+ (#1010).
+ """
+ monkeypatch.delenv("USER", raising=False)
+ monkeypatch.delenv("USERNAME", raising=False)
+ assert get_user() is None
+
+
+class TestNumberedDir:
+ PREFIX = "fun-"
+
+ def test_make(self, tmp_path):
+ for i in range(10):
+ d = make_numbered_dir(root=tmp_path, prefix=self.PREFIX)
+ assert d.name.startswith(self.PREFIX)
+ assert d.name.endswith(str(i))
+
+ symlink = tmp_path.joinpath(self.PREFIX + "current")
+ if symlink.exists():
+ # unix
+ assert symlink.is_symlink()
+ assert symlink.resolve() == d.resolve()
+
+ def test_cleanup_lock_create(self, tmp_path):
+ d = tmp_path.joinpath("test")
+ d.mkdir()
+ lockfile = create_cleanup_lock(d)
+ with pytest.raises(OSError, match="cannot create lockfile in .*"):
+ create_cleanup_lock(d)
+
+ lockfile.unlink()
+
+ def test_lock_register_cleanup_removal(self, tmp_path: Path) -> None:
+ lock = create_cleanup_lock(tmp_path)
+
+ registry: List[Callable[..., None]] = []
+ register_cleanup_lock_removal(lock, register=registry.append)
+
+ (cleanup_func,) = registry
+
+ assert lock.is_file()
+
+ cleanup_func(original_pid="intentionally_different")
+
+ assert lock.is_file()
+
+ cleanup_func()
+
+ assert not lock.exists()
+
+ cleanup_func()
+
+ assert not lock.exists()
+
+ def _do_cleanup(self, tmp_path: Path) -> None:
+ self.test_make(tmp_path)
+ cleanup_numbered_dir(
+ root=tmp_path,
+ prefix=self.PREFIX,
+ keep=2,
+ consider_lock_dead_if_created_before=0,
+ )
+
+ def test_cleanup_keep(self, tmp_path):
+ self._do_cleanup(tmp_path)
+ a, b = (x for x in tmp_path.iterdir() if not x.is_symlink())
+ print(a, b)
+
+ def test_cleanup_locked(self, tmp_path):
+ p = make_numbered_dir(root=tmp_path, prefix=self.PREFIX)
+
+ create_cleanup_lock(p)
+
+ assert not pathlib.ensure_deletable(
+ p, consider_lock_dead_if_created_before=p.stat().st_mtime - 1
+ )
+ assert pathlib.ensure_deletable(
+ p, consider_lock_dead_if_created_before=p.stat().st_mtime + 1
+ )
+
+ def test_cleanup_ignores_symlink(self, tmp_path):
+ the_symlink = tmp_path / (self.PREFIX + "current")
+ attempt_symlink_to(the_symlink, tmp_path / (self.PREFIX + "5"))
+ self._do_cleanup(tmp_path)
+
+ def test_removal_accepts_lock(self, tmp_path):
+ folder = make_numbered_dir(root=tmp_path, prefix=self.PREFIX)
+ create_cleanup_lock(folder)
+ maybe_delete_a_numbered_dir(folder)
+ assert folder.is_dir()
+
+
+class TestRmRf:
+ def test_rm_rf(self, tmp_path):
+ adir = tmp_path / "adir"
+ adir.mkdir()
+ rm_rf(adir)
+
+ assert not adir.exists()
+
+ adir.mkdir()
+ afile = adir / "afile"
+ afile.write_bytes(b"aa")
+
+ rm_rf(adir)
+ assert not adir.exists()
+
+ def test_rm_rf_with_read_only_file(self, tmp_path):
+ """Ensure rm_rf can remove directories with read-only files in them (#5524)"""
+ fn = tmp_path / "dir/foo.txt"
+ fn.parent.mkdir()
+
+ fn.touch()
+
+ self.chmod_r(fn)
+
+ rm_rf(fn.parent)
+
+ assert not fn.parent.is_dir()
+
+ def chmod_r(self, path):
+ mode = os.stat(str(path)).st_mode
+ os.chmod(str(path), mode & ~stat.S_IWRITE)
+
+ def test_rm_rf_with_read_only_directory(self, tmp_path):
+ """Ensure rm_rf can remove read-only directories (#5524)"""
+ adir = tmp_path / "dir"
+ adir.mkdir()
+
+ (adir / "foo.txt").touch()
+ self.chmod_r(adir)
+
+ rm_rf(adir)
+
+ assert not adir.is_dir()
+
+ def test_on_rm_rf_error(self, tmp_path: Path) -> None:
+ adir = tmp_path / "dir"
+ adir.mkdir()
+
+ fn = adir / "foo.txt"
+ fn.touch()
+ self.chmod_r(fn)
+
+ # unknown exception
+ with pytest.warns(pytest.PytestWarning):
+ exc_info1 = (None, RuntimeError(), None)
+ on_rm_rf_error(os.unlink, str(fn), exc_info1, start_path=tmp_path)
+ assert fn.is_file()
+
+ # we ignore FileNotFoundError
+ exc_info2 = (None, FileNotFoundError(), None)
+ assert not on_rm_rf_error(None, str(fn), exc_info2, start_path=tmp_path)
+
+ # unknown function
+ with pytest.warns(
+ pytest.PytestWarning,
+ match=r"^\(rm_rf\) unknown function None when removing .*foo.txt:\nNone: ",
+ ):
+ exc_info3 = (None, PermissionError(), None)
+ on_rm_rf_error(None, str(fn), exc_info3, start_path=tmp_path)
+ assert fn.is_file()
+
+ # ignored function
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ with pytest.warns(None) as warninfo: # type: ignore[call-overload]
+ exc_info4 = (None, PermissionError(), None)
+ on_rm_rf_error(os.open, str(fn), exc_info4, start_path=tmp_path)
+ assert fn.is_file()
+ assert not [x.message for x in warninfo]
+
+ exc_info5 = (None, PermissionError(), None)
+ on_rm_rf_error(os.unlink, str(fn), exc_info5, start_path=tmp_path)
+ assert not fn.is_file()
+
+
+def attempt_symlink_to(path, to_path):
+ """Try to make a symlink from "path" to "to_path", skipping in case this platform
+ does not support it or we don't have sufficient privileges (common on Windows)."""
+ try:
+ Path(path).symlink_to(Path(to_path))
+ except OSError:
+ pytest.skip("could not create symbolic link")
+
+
+def test_basetemp_with_read_only_files(pytester: Pytester) -> None:
+ """Integration test for #5524"""
+ pytester.makepyfile(
+ """
+ import os
+ import stat
+
+ def test(tmp_path):
+ fn = tmp_path / 'foo.txt'
+ fn.write_text('hello')
+ mode = os.stat(str(fn)).st_mode
+ os.chmod(str(fn), mode & ~stat.S_IREAD)
+ """
+ )
+ result = pytester.runpytest("--basetemp=tmp")
+ assert result.ret == 0
+ # running a second time and ensure we don't crash
+ result = pytester.runpytest("--basetemp=tmp")
+ assert result.ret == 0
+
+
+def test_tmp_path_factory_handles_invalid_dir_characters(
+ tmp_path_factory: TempPathFactory, monkeypatch: MonkeyPatch
+) -> None:
+ monkeypatch.setattr("getpass.getuser", lambda: "os/<:*?;>agnostic")
+ # _basetemp / _given_basetemp are cached / set in parallel runs, patch them
+ monkeypatch.setattr(tmp_path_factory, "_basetemp", None)
+ monkeypatch.setattr(tmp_path_factory, "_given_basetemp", None)
+ p = tmp_path_factory.getbasetemp()
+ assert "pytest-of-unknown" in str(p)
+
+
+@pytest.mark.skipif(not hasattr(os, "getuid"), reason="checks unix permissions")
+def test_tmp_path_factory_create_directory_with_safe_permissions(
+ tmp_path: Path, monkeypatch: MonkeyPatch
+) -> None:
+ """Verify that pytest creates directories under /tmp with private permissions."""
+ # Use the test's tmp_path as the system temproot (/tmp).
+ monkeypatch.setenv("PYTEST_DEBUG_TEMPROOT", str(tmp_path))
+ tmp_factory = TempPathFactory(None, lambda *args: None, _ispytest=True)
+ basetemp = tmp_factory.getbasetemp()
+
+ # No world-readable permissions.
+ assert (basetemp.stat().st_mode & 0o077) == 0
+ # Parent too (pytest-of-foo).
+ assert (basetemp.parent.stat().st_mode & 0o077) == 0
+
+
+@pytest.mark.skipif(not hasattr(os, "getuid"), reason="checks unix permissions")
+def test_tmp_path_factory_fixes_up_world_readable_permissions(
+ tmp_path: Path, monkeypatch: MonkeyPatch
+) -> None:
+ """Verify that if a /tmp/pytest-of-foo directory already exists with
+ world-readable permissions, it is fixed.
+
+ pytest used to mkdir with such permissions, that's why we fix it up.
+ """
+ # Use the test's tmp_path as the system temproot (/tmp).
+ monkeypatch.setenv("PYTEST_DEBUG_TEMPROOT", str(tmp_path))
+ tmp_factory = TempPathFactory(None, lambda *args: None, _ispytest=True)
+ basetemp = tmp_factory.getbasetemp()
+
+ # Before - simulate bad perms.
+ os.chmod(basetemp.parent, 0o777)
+ assert (basetemp.parent.stat().st_mode & 0o077) != 0
+
+ tmp_factory = TempPathFactory(None, lambda *args: None, _ispytest=True)
+ basetemp = tmp_factory.getbasetemp()
+
+ # After - fixed.
+ assert (basetemp.parent.stat().st_mode & 0o077) == 0
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_unittest.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_unittest.py
new file mode 100644
index 0000000000..1601086d5b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_unittest.py
@@ -0,0 +1,1500 @@
+import gc
+import sys
+from typing import List
+
+import pytest
+from _pytest.config import ExitCode
+from _pytest.monkeypatch import MonkeyPatch
+from _pytest.pytester import Pytester
+
+
+def test_simple_unittest(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ def testpassing(self):
+ self.assertEqual('foo', 'foo')
+ def test_failing(self):
+ self.assertEqual('foo', 'bar')
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ assert reprec.matchreport("testpassing").passed
+ assert reprec.matchreport("test_failing").failed
+
+
+def test_runTest_method(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCaseWithRunTest(unittest.TestCase):
+ def runTest(self):
+ self.assertEqual('foo', 'foo')
+ class MyTestCaseWithoutRunTest(unittest.TestCase):
+ def runTest(self):
+ self.assertEqual('foo', 'foo')
+ def test_something(self):
+ pass
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ """
+ *MyTestCaseWithRunTest::runTest*
+ *MyTestCaseWithoutRunTest::test_something*
+ *2 passed*
+ """
+ )
+
+
+def test_isclasscheck_issue53(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class _E(object):
+ def __getattr__(self, tag):
+ pass
+ E = _E()
+ """
+ )
+ result = pytester.runpytest(testpath)
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+def test_setup(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ def setUp(self):
+ self.foo = 1
+ def setup_method(self, method):
+ self.foo2 = 1
+ def test_both(self):
+ self.assertEqual(1, self.foo)
+ assert self.foo2 == 1
+ def teardown_method(self, method):
+ assert 0, "42"
+
+ """
+ )
+ reprec = pytester.inline_run("-s", testpath)
+ assert reprec.matchreport("test_both", when="call").passed
+ rep = reprec.matchreport("test_both", when="teardown")
+ assert rep.failed and "42" in str(rep.longrepr)
+
+
+def test_setUpModule(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ values = []
+
+ def setUpModule():
+ values.append(1)
+
+ def tearDownModule():
+ del values[0]
+
+ def test_hello():
+ assert values == [1]
+
+ def test_world():
+ assert values == [1]
+ """
+ )
+ result = pytester.runpytest(testpath)
+ result.stdout.fnmatch_lines(["*2 passed*"])
+
+
+def test_setUpModule_failing_no_teardown(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ values = []
+
+ def setUpModule():
+ 0/0
+
+ def tearDownModule():
+ values.append(1)
+
+ def test_hello():
+ pass
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ reprec.assertoutcome(passed=0, failed=1)
+ call = reprec.getcalls("pytest_runtest_setup")[0]
+ assert not call.item.module.values
+
+
+def test_new_instances(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ def test_func1(self):
+ self.x = 2
+ def test_func2(self):
+ assert not hasattr(self, 'x')
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ reprec.assertoutcome(passed=2)
+
+
+def test_function_item_obj_is_instance(pytester: Pytester) -> None:
+ """item.obj should be a bound method on unittest.TestCase function items (#5390)."""
+ pytester.makeconftest(
+ """
+ def pytest_runtest_makereport(item, call):
+ if call.when == 'call':
+ class_ = item.parent.obj
+ assert isinstance(item.obj.__self__, class_)
+ """
+ )
+ pytester.makepyfile(
+ """
+ import unittest
+
+ class Test(unittest.TestCase):
+ def test_foo(self):
+ pass
+ """
+ )
+ result = pytester.runpytest_inprocess()
+ result.stdout.fnmatch_lines(["* 1 passed in*"])
+
+
+def test_teardown(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ values = []
+ def test_one(self):
+ pass
+ def tearDown(self):
+ self.values.append(None)
+ class Second(unittest.TestCase):
+ def test_check(self):
+ self.assertEqual(MyTestCase.values, [None])
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 0, failed
+ assert passed == 2
+ assert passed + skipped + failed == 2
+
+
+def test_teardown_issue1649(pytester: Pytester) -> None:
+ """
+ Are TestCase objects cleaned up? Often unittest TestCase objects set
+ attributes that are large and expensive during setUp.
+
+ The TestCase will not be cleaned up if the test fails, because it
+ would then exist in the stackframe.
+ """
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class TestCaseObjectsShouldBeCleanedUp(unittest.TestCase):
+ def setUp(self):
+ self.an_expensive_object = 1
+ def test_demo(self):
+ pass
+
+ """
+ )
+ pytester.inline_run("-s", testpath)
+ gc.collect()
+ for obj in gc.get_objects():
+ assert type(obj).__name__ != "TestCaseObjectsShouldBeCleanedUp"
+
+
+def test_unittest_skip_issue148(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+
+ @unittest.skip("hello")
+ class MyTestCase(unittest.TestCase):
+ @classmethod
+ def setUpClass(self):
+ xxx
+ def test_one(self):
+ pass
+ @classmethod
+ def tearDownClass(self):
+ xxx
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ reprec.assertoutcome(skipped=1)
+
+
+def test_method_and_teardown_failing_reporting(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import unittest
+ class TC(unittest.TestCase):
+ def tearDown(self):
+ assert 0, "down1"
+ def test_method(self):
+ assert False, "down2"
+ """
+ )
+ result = pytester.runpytest("-s")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(
+ [
+ "*tearDown*",
+ "*assert 0*",
+ "*test_method*",
+ "*assert False*",
+ "*1 failed*1 error*",
+ ]
+ )
+
+
+def test_setup_failure_is_shown(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import unittest
+ import pytest
+ class TC(unittest.TestCase):
+ def setUp(self):
+ assert 0, "down1"
+ def test_method(self):
+ print("never42")
+ xyz
+ """
+ )
+ result = pytester.runpytest("-s")
+ assert result.ret == 1
+ result.stdout.fnmatch_lines(["*setUp*", "*assert 0*down1*", "*1 failed*"])
+ result.stdout.no_fnmatch_line("*never42*")
+
+
+def test_setup_setUpClass(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ import pytest
+ class MyTestCase(unittest.TestCase):
+ x = 0
+ @classmethod
+ def setUpClass(cls):
+ cls.x += 1
+ def test_func1(self):
+ assert self.x == 1
+ def test_func2(self):
+ assert self.x == 1
+ @classmethod
+ def tearDownClass(cls):
+ cls.x -= 1
+ def test_teareddown():
+ assert MyTestCase.x == 0
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ reprec.assertoutcome(passed=3)
+
+
+def test_fixtures_setup_setUpClass_issue8394(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ pass
+ def test_func1(self):
+ pass
+ @classmethod
+ def tearDownClass(cls):
+ pass
+ """
+ )
+ result = pytester.runpytest("--fixtures")
+ assert result.ret == 0
+ result.stdout.no_fnmatch_line("*no docstring available*")
+
+ result = pytester.runpytest("--fixtures", "-v")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(["*no docstring available*"])
+
+
+def test_setup_class(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ import pytest
+ class MyTestCase(unittest.TestCase):
+ x = 0
+ def setup_class(cls):
+ cls.x += 1
+ def test_func1(self):
+ assert self.x == 1
+ def test_func2(self):
+ assert self.x == 1
+ def teardown_class(cls):
+ cls.x -= 1
+ def test_teareddown():
+ assert MyTestCase.x == 0
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ reprec.assertoutcome(passed=3)
+
+
+@pytest.mark.parametrize("type", ["Error", "Failure"])
+def test_testcase_adderrorandfailure_defers(pytester: Pytester, type: str) -> None:
+ pytester.makepyfile(
+ """
+ from unittest import TestCase
+ import pytest
+ class MyTestCase(TestCase):
+ def run(self, result):
+ excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
+ try:
+ result.add%s(self, excinfo._excinfo)
+ except KeyboardInterrupt:
+ raise
+ except:
+ pytest.fail("add%s should not raise")
+ def test_hello(self):
+ pass
+ """
+ % (type, type)
+ )
+ result = pytester.runpytest()
+ result.stdout.no_fnmatch_line("*should not raise*")
+
+
+@pytest.mark.parametrize("type", ["Error", "Failure"])
+def test_testcase_custom_exception_info(pytester: Pytester, type: str) -> None:
+ pytester.makepyfile(
+ """
+ from typing import Generic, TypeVar
+ from unittest import TestCase
+ import pytest, _pytest._code
+
+ class MyTestCase(TestCase):
+ def run(self, result):
+ excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
+ # We fake an incompatible exception info.
+ class FakeExceptionInfo(Generic[TypeVar("E")]):
+ def __init__(self, *args, **kwargs):
+ mp.undo()
+ raise TypeError()
+ @classmethod
+ def from_current(cls):
+ return cls()
+ @classmethod
+ def from_exc_info(cls, *args, **kwargs):
+ return cls()
+ mp = pytest.MonkeyPatch()
+ mp.setattr(_pytest._code, 'ExceptionInfo', FakeExceptionInfo)
+ try:
+ excinfo = excinfo._excinfo
+ result.add%(type)s(self, excinfo)
+ finally:
+ mp.undo()
+
+ def test_hello(self):
+ pass
+ """
+ % locals()
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "NOTE: Incompatible Exception Representation*",
+ "*ZeroDivisionError*",
+ "*1 failed*",
+ ]
+ )
+
+
+def test_testcase_totally_incompatible_exception_info(pytester: Pytester) -> None:
+ import _pytest.unittest
+
+ (item,) = pytester.getitems(
+ """
+ from unittest import TestCase
+ class MyTestCase(TestCase):
+ def test_hello(self):
+ pass
+ """
+ )
+ assert isinstance(item, _pytest.unittest.TestCaseFunction)
+ item.addError(None, 42) # type: ignore[arg-type]
+ excinfo = item._excinfo
+ assert excinfo is not None
+ assert "ERROR: Unknown Incompatible" in str(excinfo.pop(0).getrepr())
+
+
+def test_module_level_pytestmark(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ import pytest
+ pytestmark = pytest.mark.xfail
+ class MyTestCase(unittest.TestCase):
+ def test_func1(self):
+ assert 0
+ """
+ )
+ reprec = pytester.inline_run(testpath, "-s")
+ reprec.assertoutcome(skipped=1)
+
+
+class TestTrialUnittest:
+ def setup_class(cls):
+ cls.ut = pytest.importorskip("twisted.trial.unittest")
+ # on windows trial uses a socket for a reactor and apparently doesn't close it properly
+ # https://twistedmatrix.com/trac/ticket/9227
+ cls.ignore_unclosed_socket_warning = ("-W", "always")
+
+ def test_trial_testcase_runtest_not_collected(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ from twisted.trial.unittest import TestCase
+
+ class TC(TestCase):
+ def test_hello(self):
+ pass
+ """
+ )
+ reprec = pytester.inline_run(*self.ignore_unclosed_socket_warning)
+ reprec.assertoutcome(passed=1)
+ pytester.makepyfile(
+ """
+ from twisted.trial.unittest import TestCase
+
+ class TC(TestCase):
+ def runTest(self):
+ pass
+ """
+ )
+ reprec = pytester.inline_run(*self.ignore_unclosed_socket_warning)
+ reprec.assertoutcome(passed=1)
+
+ def test_trial_exceptions_with_skips(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ from twisted.trial import unittest
+ import pytest
+ class TC(unittest.TestCase):
+ def test_hello(self):
+ pytest.skip("skip_in_method")
+ @pytest.mark.skipif("sys.version_info != 1")
+ def test_hello2(self):
+ pass
+ @pytest.mark.xfail(reason="iwanto")
+ def test_hello3(self):
+ assert 0
+ def test_hello4(self):
+ pytest.xfail("i2wanto")
+ def test_trial_skip(self):
+ pass
+ test_trial_skip.skip = "trialselfskip"
+
+ def test_trial_todo(self):
+ assert 0
+ test_trial_todo.todo = "mytodo"
+
+ def test_trial_todo_success(self):
+ pass
+ test_trial_todo_success.todo = "mytodo"
+
+ class TC2(unittest.TestCase):
+ def setup_class(cls):
+ pytest.skip("skip_in_setup_class")
+ def test_method(self):
+ pass
+ """
+ )
+ result = pytester.runpytest("-rxs", *self.ignore_unclosed_socket_warning)
+ result.stdout.fnmatch_lines_random(
+ [
+ "*XFAIL*test_trial_todo*",
+ "*trialselfskip*",
+ "*skip_in_setup_class*",
+ "*iwanto*",
+ "*i2wanto*",
+ "*sys.version_info*",
+ "*skip_in_method*",
+ "*1 failed*4 skipped*3 xfailed*",
+ ]
+ )
+ assert result.ret == 1
+
+ def test_trial_error(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ from twisted.trial.unittest import TestCase
+ from twisted.internet.defer import Deferred
+ from twisted.internet import reactor
+
+ class TC(TestCase):
+ def test_one(self):
+ crash
+
+ def test_two(self):
+ def f(_):
+ crash
+
+ d = Deferred()
+ d.addCallback(f)
+ reactor.callLater(0.3, d.callback, None)
+ return d
+
+ def test_three(self):
+ def f():
+ pass # will never get called
+ reactor.callLater(0.3, f)
+ # will crash at teardown
+
+ def test_four(self):
+ def f(_):
+ reactor.callLater(0.3, f)
+ crash
+
+ d = Deferred()
+ d.addCallback(f)
+ reactor.callLater(0.3, d.callback, None)
+ return d
+ # will crash both at test time and at teardown
+ """
+ )
+ result = pytester.runpytest(
+ "-vv", "-oconsole_output_style=classic", "-W", "ignore::DeprecationWarning"
+ )
+ result.stdout.fnmatch_lines(
+ [
+ "test_trial_error.py::TC::test_four FAILED",
+ "test_trial_error.py::TC::test_four ERROR",
+ "test_trial_error.py::TC::test_one FAILED",
+ "test_trial_error.py::TC::test_three FAILED",
+ "test_trial_error.py::TC::test_two FAILED",
+ "*ERRORS*",
+ "*_ ERROR at teardown of TC.test_four _*",
+ "*DelayedCalls*",
+ "*= FAILURES =*",
+ "*_ TC.test_four _*",
+ "*NameError*crash*",
+ "*_ TC.test_one _*",
+ "*NameError*crash*",
+ "*_ TC.test_three _*",
+ "*DelayedCalls*",
+ "*_ TC.test_two _*",
+ "*NameError*crash*",
+ "*= 4 failed, 1 error in *",
+ ]
+ )
+
+ def test_trial_pdb(self, pytester: Pytester) -> None:
+ p = pytester.makepyfile(
+ """
+ from twisted.trial import unittest
+ import pytest
+ class TC(unittest.TestCase):
+ def test_hello(self):
+ assert 0, "hellopdb"
+ """
+ )
+ child = pytester.spawn_pytest(str(p))
+ child.expect("hellopdb")
+ child.sendeof()
+
+ def test_trial_testcase_skip_property(self, pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ from twisted.trial import unittest
+ class MyTestCase(unittest.TestCase):
+ skip = 'dont run'
+ def test_func(self):
+ pass
+ """
+ )
+ reprec = pytester.inline_run(testpath, "-s")
+ reprec.assertoutcome(skipped=1)
+
+ def test_trial_testfunction_skip_property(self, pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ from twisted.trial import unittest
+ class MyTestCase(unittest.TestCase):
+ def test_func(self):
+ pass
+ test_func.skip = 'dont run'
+ """
+ )
+ reprec = pytester.inline_run(testpath, "-s")
+ reprec.assertoutcome(skipped=1)
+
+ def test_trial_testcase_todo_property(self, pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ from twisted.trial import unittest
+ class MyTestCase(unittest.TestCase):
+ todo = 'dont run'
+ def test_func(self):
+ assert 0
+ """
+ )
+ reprec = pytester.inline_run(testpath, "-s")
+ reprec.assertoutcome(skipped=1)
+
+ def test_trial_testfunction_todo_property(self, pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ from twisted.trial import unittest
+ class MyTestCase(unittest.TestCase):
+ def test_func(self):
+ assert 0
+ test_func.todo = 'dont run'
+ """
+ )
+ reprec = pytester.inline_run(
+ testpath, "-s", *self.ignore_unclosed_socket_warning
+ )
+ reprec.assertoutcome(skipped=1)
+
+
+def test_djangolike_testcase(pytester: Pytester) -> None:
+ # contributed from Morten Breekevold
+ pytester.makepyfile(
+ """
+ from unittest import TestCase, main
+
+ class DjangoLikeTestCase(TestCase):
+
+ def setUp(self):
+ print("setUp()")
+
+ def test_presetup_has_been_run(self):
+ print("test_thing()")
+ self.assertTrue(hasattr(self, 'was_presetup'))
+
+ def tearDown(self):
+ print("tearDown()")
+
+ def __call__(self, result=None):
+ try:
+ self._pre_setup()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except Exception:
+ import sys
+ result.addError(self, sys.exc_info())
+ return
+ super(DjangoLikeTestCase, self).__call__(result)
+ try:
+ self._post_teardown()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except Exception:
+ import sys
+ result.addError(self, sys.exc_info())
+ return
+
+ def _pre_setup(self):
+ print("_pre_setup()")
+ self.was_presetup = True
+
+ def _post_teardown(self):
+ print("_post_teardown()")
+ """
+ )
+ result = pytester.runpytest("-s")
+ assert result.ret == 0
+ result.stdout.fnmatch_lines(
+ [
+ "*_pre_setup()*",
+ "*setUp()*",
+ "*test_thing()*",
+ "*tearDown()*",
+ "*_post_teardown()*",
+ ]
+ )
+
+
+def test_unittest_not_shown_in_traceback(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import unittest
+ class t(unittest.TestCase):
+ def test_hello(self):
+ x = 3
+ self.assertEqual(x, 4)
+ """
+ )
+ res = pytester.runpytest()
+ res.stdout.no_fnmatch_line("*failUnlessEqual*")
+
+
+def test_unorderable_types(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import unittest
+ class TestJoinEmpty(unittest.TestCase):
+ pass
+
+ def make_test():
+ class Test(unittest.TestCase):
+ pass
+ Test.__name__ = "TestFoo"
+ return Test
+ TestFoo = make_test()
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.no_fnmatch_line("*TypeError*")
+ assert result.ret == ExitCode.NO_TESTS_COLLECTED
+
+
+def test_unittest_typerror_traceback(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import unittest
+ class TestJoinEmpty(unittest.TestCase):
+ def test_hello(self, arg1):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ assert "TypeError" in result.stdout.str()
+ assert result.ret == 1
+
+
+@pytest.mark.parametrize("runner", ["pytest", "unittest"])
+def test_unittest_expected_failure_for_failing_test_is_xfail(
+ pytester: Pytester, runner
+) -> None:
+ script = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ @unittest.expectedFailure
+ def test_failing_test_is_xfail(self):
+ assert False
+ if __name__ == '__main__':
+ unittest.main()
+ """
+ )
+ if runner == "pytest":
+ result = pytester.runpytest("-rxX")
+ result.stdout.fnmatch_lines(
+ ["*XFAIL*MyTestCase*test_failing_test_is_xfail*", "*1 xfailed*"]
+ )
+ else:
+ result = pytester.runpython(script)
+ result.stderr.fnmatch_lines(["*1 test in*", "*OK*(expected failures=1)*"])
+ assert result.ret == 0
+
+
+@pytest.mark.parametrize("runner", ["pytest", "unittest"])
+def test_unittest_expected_failure_for_passing_test_is_fail(
+ pytester: Pytester,
+ runner: str,
+) -> None:
+ script = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ @unittest.expectedFailure
+ def test_passing_test_is_fail(self):
+ assert True
+ if __name__ == '__main__':
+ unittest.main()
+ """
+ )
+
+ if runner == "pytest":
+ result = pytester.runpytest("-rxX")
+ result.stdout.fnmatch_lines(
+ [
+ "*MyTestCase*test_passing_test_is_fail*",
+ "Unexpected success",
+ "*1 failed*",
+ ]
+ )
+ else:
+ result = pytester.runpython(script)
+ result.stderr.fnmatch_lines(["*1 test in*", "*(unexpected successes=1)*"])
+
+ assert result.ret == 1
+
+
+@pytest.mark.parametrize("stmt", ["return", "yield"])
+def test_unittest_setup_interaction(pytester: Pytester, stmt: str) -> None:
+ pytester.makepyfile(
+ """
+ import unittest
+ import pytest
+ class MyTestCase(unittest.TestCase):
+ @pytest.fixture(scope="class", autouse=True)
+ def perclass(self, request):
+ request.cls.hello = "world"
+ {stmt}
+ @pytest.fixture(scope="function", autouse=True)
+ def perfunction(self, request):
+ request.instance.funcname = request.function.__name__
+ {stmt}
+
+ def test_method1(self):
+ assert self.funcname == "test_method1"
+ assert self.hello == "world"
+
+ def test_method2(self):
+ assert self.funcname == "test_method2"
+
+ def test_classattr(self):
+ assert self.__class__.hello == "world"
+ """.format(
+ stmt=stmt
+ )
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*3 passed*"])
+
+
+def test_non_unittest_no_setupclass_support(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ class TestFoo(object):
+ x = 0
+
+ @classmethod
+ def setUpClass(cls):
+ cls.x = 1
+
+ def test_method1(self):
+ assert self.x == 0
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.x = 1
+
+ def test_not_teareddown():
+ assert TestFoo.x == 0
+
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ reprec.assertoutcome(passed=2)
+
+
+def test_no_teardown_if_setupclass_failed(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ x = 0
+
+ @classmethod
+ def setUpClass(cls):
+ cls.x = 1
+ assert False
+
+ def test_func1(self):
+ cls.x = 10
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.x = 100
+
+ def test_notTornDown():
+ assert MyTestCase.x == 1
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ reprec.assertoutcome(passed=1, failed=1)
+
+
+def test_cleanup_functions(pytester: Pytester) -> None:
+ """Ensure functions added with addCleanup are always called after each test ends (#6947)"""
+ pytester.makepyfile(
+ """
+ import unittest
+
+ cleanups = []
+
+ class Test(unittest.TestCase):
+
+ def test_func_1(self):
+ self.addCleanup(cleanups.append, "test_func_1")
+
+ def test_func_2(self):
+ self.addCleanup(cleanups.append, "test_func_2")
+ assert 0
+
+ def test_func_3_check_cleanups(self):
+ assert cleanups == ["test_func_1", "test_func_2"]
+ """
+ )
+ result = pytester.runpytest("-v")
+ result.stdout.fnmatch_lines(
+ [
+ "*::test_func_1 PASSED *",
+ "*::test_func_2 FAILED *",
+ "*::test_func_3_check_cleanups PASSED *",
+ ]
+ )
+
+
+def test_issue333_result_clearing(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ import pytest
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_call(item):
+ yield
+ assert 0
+ """
+ )
+ pytester.makepyfile(
+ """
+ import unittest
+ class TestIt(unittest.TestCase):
+ def test_func(self):
+ 0/0
+ """
+ )
+
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(failed=1)
+
+
+def test_unittest_raise_skip_issue748(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_foo="""
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ def test_one(self):
+ raise unittest.SkipTest('skipping due to reasons')
+ """
+ )
+ result = pytester.runpytest("-v", "-rs")
+ result.stdout.fnmatch_lines(
+ """
+ *SKIP*[1]*test_foo.py*skipping due to reasons*
+ *1 skipped*
+ """
+ )
+
+
+def test_unittest_skip_issue1169(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_foo="""
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ @unittest.skip("skipping due to reasons")
+ def test_skip(self):
+ self.fail()
+ """
+ )
+ result = pytester.runpytest("-v", "-rs")
+ result.stdout.fnmatch_lines(
+ """
+ *SKIP*[1]*skipping due to reasons*
+ *1 skipped*
+ """
+ )
+
+
+def test_class_method_containing_test_issue1558(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_foo="""
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ def test_should_run(self):
+ pass
+ def test_should_not_run(self):
+ pass
+ test_should_not_run.__test__ = False
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(passed=1)
+
+
+@pytest.mark.parametrize("base", ["builtins.object", "unittest.TestCase"])
+def test_usefixtures_marker_on_unittest(base, pytester: Pytester) -> None:
+ """#3498"""
+ module = base.rsplit(".", 1)[0]
+ pytest.importorskip(module)
+ pytester.makepyfile(
+ conftest="""
+ import pytest
+
+ @pytest.fixture(scope='function')
+ def fixture1(request, monkeypatch):
+ monkeypatch.setattr(request.instance, 'fixture1', True )
+
+
+ @pytest.fixture(scope='function')
+ def fixture2(request, monkeypatch):
+ monkeypatch.setattr(request.instance, 'fixture2', True )
+
+ def node_and_marks(item):
+ print(item.nodeid)
+ for mark in item.iter_markers():
+ print(" ", mark)
+
+ @pytest.fixture(autouse=True)
+ def my_marks(request):
+ node_and_marks(request.node)
+
+ def pytest_collection_modifyitems(items):
+ for item in items:
+ node_and_marks(item)
+
+ """
+ )
+
+ pytester.makepyfile(
+ """
+ import pytest
+ import {module}
+
+ class Tests({base}):
+ fixture1 = False
+ fixture2 = False
+
+ @pytest.mark.usefixtures("fixture1")
+ def test_one(self):
+ assert self.fixture1
+ assert not self.fixture2
+
+ @pytest.mark.usefixtures("fixture1", "fixture2")
+ def test_two(self):
+ assert self.fixture1
+ assert self.fixture2
+
+
+ """.format(
+ module=module, base=base
+ )
+ )
+
+ result = pytester.runpytest("-s")
+ result.assert_outcomes(passed=2)
+
+
+def test_testcase_handles_init_exceptions(pytester: Pytester) -> None:
+ """
+ Regression test to make sure exceptions in the __init__ method are bubbled up correctly.
+ See https://github.com/pytest-dev/pytest/issues/3788
+ """
+ pytester.makepyfile(
+ """
+ from unittest import TestCase
+ import pytest
+ class MyTestCase(TestCase):
+ def __init__(self, *args, **kwargs):
+ raise Exception("should raise this exception")
+ def test_hello(self):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ assert "should raise this exception" in result.stdout.str()
+ result.stdout.no_fnmatch_line("*ERROR at teardown of MyTestCase.test_hello*")
+
+
+def test_error_message_with_parametrized_fixtures(pytester: Pytester) -> None:
+ pytester.copy_example("unittest/test_parametrized_fixture_error_message.py")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*test_two does not support fixtures*",
+ "*TestSomethingElse::test_two",
+ "*Function type: TestCaseFunction",
+ ]
+ )
+
+
+@pytest.mark.parametrize(
+ "test_name, expected_outcome",
+ [
+ ("test_setup_skip.py", "1 skipped"),
+ ("test_setup_skip_class.py", "1 skipped"),
+ ("test_setup_skip_module.py", "1 error"),
+ ],
+)
+def test_setup_inheritance_skipping(
+ pytester: Pytester, test_name, expected_outcome
+) -> None:
+ """Issue #4700"""
+ pytester.copy_example(f"unittest/{test_name}")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines([f"* {expected_outcome} in *"])
+
+
+def test_BdbQuit(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_foo="""
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ def test_bdbquit(self):
+ import bdb
+ raise bdb.BdbQuit()
+
+ def test_should_not_run(self):
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(failed=1, passed=1)
+
+
+def test_exit_outcome(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_foo="""
+ import pytest
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ def test_exit_outcome(self):
+ pytest.exit("pytest_exit called")
+
+ def test_should_not_run(self):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*Exit: pytest_exit called*", "*= no tests ran in *"])
+
+
+def test_trace(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
+ calls = []
+
+ def check_call(*args, **kwargs):
+ calls.append((args, kwargs))
+ assert args == ("runcall",)
+
+ class _pdb:
+ def runcall(*args, **kwargs):
+ calls.append((args, kwargs))
+
+ return _pdb
+
+ monkeypatch.setattr("_pytest.debugging.pytestPDB._init_pdb", check_call)
+
+ p1 = pytester.makepyfile(
+ """
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ def test(self):
+ self.assertEqual('foo', 'foo')
+ """
+ )
+ result = pytester.runpytest("--trace", str(p1))
+ assert len(calls) == 2
+ assert result.ret == 0
+
+
+def test_pdb_teardown_called(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
+ """Ensure tearDown() is always called when --pdb is given in the command-line.
+
+ We delay the normal tearDown() calls when --pdb is given, so this ensures we are calling
+ tearDown() eventually to avoid memory leaks when using --pdb.
+ """
+ teardowns: List[str] = []
+ monkeypatch.setattr(
+ pytest, "test_pdb_teardown_called_teardowns", teardowns, raising=False
+ )
+
+ pytester.makepyfile(
+ """
+ import unittest
+ import pytest
+
+ class MyTestCase(unittest.TestCase):
+
+ def tearDown(self):
+ pytest.test_pdb_teardown_called_teardowns.append(self.id())
+
+ def test_1(self):
+ pass
+ def test_2(self):
+ pass
+ """
+ )
+ result = pytester.runpytest_inprocess("--pdb")
+ result.stdout.fnmatch_lines("* 2 passed in *")
+ assert teardowns == [
+ "test_pdb_teardown_called.MyTestCase.test_1",
+ "test_pdb_teardown_called.MyTestCase.test_2",
+ ]
+
+
+@pytest.mark.parametrize("mark", ["@unittest.skip", "@pytest.mark.skip"])
+def test_pdb_teardown_skipped(
+ pytester: Pytester, monkeypatch: MonkeyPatch, mark: str
+) -> None:
+ """With --pdb, setUp and tearDown should not be called for skipped tests."""
+ tracked: List[str] = []
+ monkeypatch.setattr(pytest, "test_pdb_teardown_skipped", tracked, raising=False)
+
+ pytester.makepyfile(
+ """
+ import unittest
+ import pytest
+
+ class MyTestCase(unittest.TestCase):
+
+ def setUp(self):
+ pytest.test_pdb_teardown_skipped.append("setUp:" + self.id())
+
+ def tearDown(self):
+ pytest.test_pdb_teardown_skipped.append("tearDown:" + self.id())
+
+ {mark}("skipped for reasons")
+ def test_1(self):
+ pass
+
+ """.format(
+ mark=mark
+ )
+ )
+ result = pytester.runpytest_inprocess("--pdb")
+ result.stdout.fnmatch_lines("* 1 skipped in *")
+ assert tracked == []
+
+
+def test_async_support(pytester: Pytester) -> None:
+ pytest.importorskip("unittest.async_case")
+
+ pytester.copy_example("unittest/test_unittest_asyncio.py")
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(failed=1, passed=2)
+
+
+def test_asynctest_support(pytester: Pytester) -> None:
+ """Check asynctest support (#7110)"""
+ pytest.importorskip("asynctest")
+
+ pytester.copy_example("unittest/test_unittest_asynctest.py")
+ reprec = pytester.inline_run()
+ reprec.assertoutcome(failed=1, passed=2)
+
+
+def test_plain_unittest_does_not_support_async(pytester: Pytester) -> None:
+ """Async functions in plain unittest.TestCase subclasses are not supported without plugins.
+
+ This test exists here to avoid introducing this support by accident, leading users
+ to expect that it works, rather than doing so intentionally as a feature.
+
+ See https://github.com/pytest-dev/pytest-asyncio/issues/180 for more context.
+ """
+ pytester.copy_example("unittest/test_unittest_plain_async.py")
+ result = pytester.runpytest_subprocess()
+ if hasattr(sys, "pypy_version_info"):
+ # in PyPy we can't reliable get the warning about the coroutine not being awaited,
+ # because it depends on the coroutine being garbage collected; given that
+ # we are running in a subprocess, that's difficult to enforce
+ expected_lines = ["*1 passed*"]
+ else:
+ expected_lines = [
+ "*RuntimeWarning: coroutine * was never awaited",
+ "*1 passed*",
+ ]
+ result.stdout.fnmatch_lines(expected_lines)
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 8), reason="Feature introduced in Python 3.8"
+)
+def test_do_class_cleanups_on_success(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ values = []
+ @classmethod
+ def setUpClass(cls):
+ def cleanup():
+ cls.values.append(1)
+ cls.addClassCleanup(cleanup)
+ def test_one(self):
+ pass
+ def test_two(self):
+ pass
+ def test_cleanup_called_exactly_once():
+ assert MyTestCase.values == [1]
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 0
+ assert passed == 3
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 8), reason="Feature introduced in Python 3.8"
+)
+def test_do_class_cleanups_on_setupclass_failure(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ values = []
+ @classmethod
+ def setUpClass(cls):
+ def cleanup():
+ cls.values.append(1)
+ cls.addClassCleanup(cleanup)
+ assert False
+ def test_one(self):
+ pass
+ def test_cleanup_called_exactly_once():
+ assert MyTestCase.values == [1]
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 1
+ assert passed == 1
+
+
+@pytest.mark.skipif(
+ sys.version_info < (3, 8), reason="Feature introduced in Python 3.8"
+)
+def test_do_class_cleanups_on_teardownclass_failure(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ values = []
+ @classmethod
+ def setUpClass(cls):
+ def cleanup():
+ cls.values.append(1)
+ cls.addClassCleanup(cleanup)
+ @classmethod
+ def tearDownClass(cls):
+ assert False
+ def test_one(self):
+ pass
+ def test_two(self):
+ pass
+ def test_cleanup_called_exactly_once():
+ assert MyTestCase.values == [1]
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert passed == 3
+
+
+def test_do_cleanups_on_success(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ values = []
+ def setUp(self):
+ def cleanup():
+ self.values.append(1)
+ self.addCleanup(cleanup)
+ def test_one(self):
+ pass
+ def test_two(self):
+ pass
+ def test_cleanup_called_the_right_number_of_times():
+ assert MyTestCase.values == [1, 1]
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 0
+ assert passed == 3
+
+
+def test_do_cleanups_on_setup_failure(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ values = []
+ def setUp(self):
+ def cleanup():
+ self.values.append(1)
+ self.addCleanup(cleanup)
+ assert False
+ def test_one(self):
+ pass
+ def test_two(self):
+ pass
+ def test_cleanup_called_the_right_number_of_times():
+ assert MyTestCase.values == [1, 1]
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 2
+ assert passed == 1
+
+
+def test_do_cleanups_on_teardown_failure(pytester: Pytester) -> None:
+ testpath = pytester.makepyfile(
+ """
+ import unittest
+ class MyTestCase(unittest.TestCase):
+ values = []
+ def setUp(self):
+ def cleanup():
+ self.values.append(1)
+ self.addCleanup(cleanup)
+ def tearDown(self):
+ assert False
+ def test_one(self):
+ pass
+ def test_two(self):
+ pass
+ def test_cleanup_called_the_right_number_of_times():
+ assert MyTestCase.values == [1, 1]
+ """
+ )
+ reprec = pytester.inline_run(testpath)
+ passed, skipped, failed = reprec.countoutcomes()
+ assert failed == 2
+ assert passed == 1
+
+
+def test_traceback_pruning(pytester: Pytester) -> None:
+ """Regression test for #9610 - doesn't crash during traceback pruning."""
+ pytester.makepyfile(
+ """
+ import unittest
+
+ class MyTestCase(unittest.TestCase):
+ def __init__(self, test_method):
+ unittest.TestCase.__init__(self, test_method)
+
+ class TestIt(MyTestCase):
+ @classmethod
+ def tearDownClass(cls) -> None:
+ assert False
+
+ def test_it(self):
+ pass
+ """
+ )
+ reprec = pytester.inline_run()
+ passed, skipped, failed = reprec.countoutcomes()
+ assert passed == 1
+ assert failed == 1
+ assert reprec.ret == 1
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_unraisableexception.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_unraisableexception.py
new file mode 100644
index 0000000000..f625833dce
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_unraisableexception.py
@@ -0,0 +1,133 @@
+import sys
+
+import pytest
+from _pytest.pytester import Pytester
+
+
+if sys.version_info < (3, 8):
+ pytest.skip("unraisableexception plugin needs Python>=3.8", allow_module_level=True)
+
+
+@pytest.mark.filterwarnings("default::pytest.PytestUnraisableExceptionWarning")
+def test_unraisable(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_it="""
+ class BrokenDel:
+ def __del__(self):
+ raise ValueError("del is broken")
+
+ def test_it():
+ obj = BrokenDel()
+ del obj
+
+ def test_2(): pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+ assert result.parseoutcomes() == {"passed": 2, "warnings": 1}
+ result.stdout.fnmatch_lines(
+ [
+ "*= warnings summary =*",
+ "test_it.py::test_it",
+ " * PytestUnraisableExceptionWarning: Exception ignored in: <function BrokenDel.__del__ at *>",
+ " ",
+ " Traceback (most recent call last):",
+ " ValueError: del is broken",
+ " ",
+ " warnings.warn(pytest.PytestUnraisableExceptionWarning(msg))",
+ ]
+ )
+
+
+@pytest.mark.filterwarnings("default::pytest.PytestUnraisableExceptionWarning")
+def test_unraisable_in_setup(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_it="""
+ import pytest
+
+ class BrokenDel:
+ def __del__(self):
+ raise ValueError("del is broken")
+
+ @pytest.fixture
+ def broken_del():
+ obj = BrokenDel()
+ del obj
+
+ def test_it(broken_del): pass
+ def test_2(): pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+ assert result.parseoutcomes() == {"passed": 2, "warnings": 1}
+ result.stdout.fnmatch_lines(
+ [
+ "*= warnings summary =*",
+ "test_it.py::test_it",
+ " * PytestUnraisableExceptionWarning: Exception ignored in: <function BrokenDel.__del__ at *>",
+ " ",
+ " Traceback (most recent call last):",
+ " ValueError: del is broken",
+ " ",
+ " warnings.warn(pytest.PytestUnraisableExceptionWarning(msg))",
+ ]
+ )
+
+
+@pytest.mark.filterwarnings("default::pytest.PytestUnraisableExceptionWarning")
+def test_unraisable_in_teardown(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_it="""
+ import pytest
+
+ class BrokenDel:
+ def __del__(self):
+ raise ValueError("del is broken")
+
+ @pytest.fixture
+ def broken_del():
+ yield
+ obj = BrokenDel()
+ del obj
+
+ def test_it(broken_del): pass
+ def test_2(): pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == 0
+ assert result.parseoutcomes() == {"passed": 2, "warnings": 1}
+ result.stdout.fnmatch_lines(
+ [
+ "*= warnings summary =*",
+ "test_it.py::test_it",
+ " * PytestUnraisableExceptionWarning: Exception ignored in: <function BrokenDel.__del__ at *>",
+ " ",
+ " Traceback (most recent call last):",
+ " ValueError: del is broken",
+ " ",
+ " warnings.warn(pytest.PytestUnraisableExceptionWarning(msg))",
+ ]
+ )
+
+
+@pytest.mark.filterwarnings("error::pytest.PytestUnraisableExceptionWarning")
+def test_unraisable_warning_error(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ test_it="""
+ class BrokenDel:
+ def __del__(self) -> None:
+ raise ValueError("del is broken")
+
+ def test_it() -> None:
+ obj = BrokenDel()
+ del obj
+
+ def test_2(): pass
+ """
+ )
+ result = pytester.runpytest()
+ assert result.ret == pytest.ExitCode.TESTS_FAILED
+ assert result.parseoutcomes() == {"passed": 1, "failed": 1}
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_warning_types.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_warning_types.py
new file mode 100644
index 0000000000..b49cc68f9c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_warning_types.py
@@ -0,0 +1,38 @@
+import inspect
+
+import pytest
+from _pytest import warning_types
+from _pytest.pytester import Pytester
+
+
+@pytest.mark.parametrize(
+ "warning_class",
+ [
+ w
+ for n, w in vars(warning_types).items()
+ if inspect.isclass(w) and issubclass(w, Warning)
+ ],
+)
+def test_warning_types(warning_class: UserWarning) -> None:
+ """Make sure all warnings declared in _pytest.warning_types are displayed as coming
+ from 'pytest' instead of the internal module (#5452).
+ """
+ assert warning_class.__module__ == "pytest"
+
+
+@pytest.mark.filterwarnings("error::pytest.PytestWarning")
+def test_pytest_warnings_repr_integration_test(pytester: Pytester) -> None:
+ """Small integration test to ensure our small hack of setting the __module__ attribute
+ of our warnings actually works (#5452).
+ """
+ pytester.makepyfile(
+ """
+ import pytest
+ import warnings
+
+ def test():
+ warnings.warn(pytest.PytestWarning("some warning"))
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["E pytest.PytestWarning: some warning"])
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/test_warnings.py b/testing/web-platform/tests/tools/third_party/pytest/testing/test_warnings.py
new file mode 100644
index 0000000000..5663c46cea
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/test_warnings.py
@@ -0,0 +1,775 @@
+import os
+import warnings
+from typing import List
+from typing import Optional
+from typing import Tuple
+
+import pytest
+from _pytest.fixtures import FixtureRequest
+from _pytest.pytester import Pytester
+
+WARNINGS_SUMMARY_HEADER = "warnings summary"
+
+
+@pytest.fixture
+def pyfile_with_warnings(pytester: Pytester, request: FixtureRequest) -> str:
+ """Create a test file which calls a function in a module which generates warnings."""
+ pytester.syspathinsert()
+ test_name = request.function.__name__
+ module_name = test_name.lstrip("test_") + "_module"
+ test_file = pytester.makepyfile(
+ """
+ import {module_name}
+ def test_func():
+ assert {module_name}.foo() == 1
+ """.format(
+ module_name=module_name
+ ),
+ **{
+ module_name: """
+ import warnings
+ def foo():
+ warnings.warn(UserWarning("user warning"))
+ warnings.warn(RuntimeWarning("runtime warning"))
+ return 1
+ """,
+ },
+ )
+ return str(test_file)
+
+
+@pytest.mark.filterwarnings("default::UserWarning", "default::RuntimeWarning")
+def test_normal_flow(pytester: Pytester, pyfile_with_warnings) -> None:
+ """Check that the warnings section is displayed."""
+ result = pytester.runpytest(pyfile_with_warnings)
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "test_normal_flow.py::test_func",
+ "*normal_flow_module.py:3: UserWarning: user warning",
+ '* warnings.warn(UserWarning("user warning"))',
+ "*normal_flow_module.py:4: RuntimeWarning: runtime warning",
+ '* warnings.warn(RuntimeWarning("runtime warning"))',
+ "* 1 passed, 2 warnings*",
+ ]
+ )
+
+
+@pytest.mark.filterwarnings("always::UserWarning")
+def test_setup_teardown_warnings(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import warnings
+ import pytest
+
+ @pytest.fixture
+ def fix():
+ warnings.warn(UserWarning("warning during setup"))
+ yield
+ warnings.warn(UserWarning("warning during teardown"))
+
+ def test_func(fix):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "*test_setup_teardown_warnings.py:6: UserWarning: warning during setup",
+ '*warnings.warn(UserWarning("warning during setup"))',
+ "*test_setup_teardown_warnings.py:8: UserWarning: warning during teardown",
+ '*warnings.warn(UserWarning("warning during teardown"))',
+ "* 1 passed, 2 warnings*",
+ ]
+ )
+
+
+@pytest.mark.parametrize("method", ["cmdline", "ini"])
+def test_as_errors(pytester: Pytester, pyfile_with_warnings, method) -> None:
+ args = ("-W", "error") if method == "cmdline" else ()
+ if method == "ini":
+ pytester.makeini(
+ """
+ [pytest]
+ filterwarnings=error
+ """
+ )
+ # Use a subprocess, since changing logging level affects other threads
+ # (xdist).
+ result = pytester.runpytest_subprocess(*args, pyfile_with_warnings)
+ result.stdout.fnmatch_lines(
+ [
+ "E UserWarning: user warning",
+ "as_errors_module.py:3: UserWarning",
+ "* 1 failed in *",
+ ]
+ )
+
+
+@pytest.mark.parametrize("method", ["cmdline", "ini"])
+def test_ignore(pytester: Pytester, pyfile_with_warnings, method) -> None:
+ args = ("-W", "ignore") if method == "cmdline" else ()
+ if method == "ini":
+ pytester.makeini(
+ """
+ [pytest]
+ filterwarnings= ignore
+ """
+ )
+
+ result = pytester.runpytest(*args, pyfile_with_warnings)
+ result.stdout.fnmatch_lines(["* 1 passed in *"])
+ assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()
+
+
+@pytest.mark.filterwarnings("always::UserWarning")
+def test_unicode(pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """
+ import warnings
+ import pytest
+
+
+ @pytest.fixture
+ def fix():
+ warnings.warn("测试")
+ yield
+
+ def test_func(fix):
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "*test_unicode.py:7: UserWarning: \u6d4b\u8bd5*",
+ "* 1 passed, 1 warning*",
+ ]
+ )
+
+
+def test_works_with_filterwarnings(pytester: Pytester) -> None:
+ """Ensure our warnings capture does not mess with pre-installed filters (#2430)."""
+ pytester.makepyfile(
+ """
+ import warnings
+
+ class MyWarning(Warning):
+ pass
+
+ warnings.filterwarnings("error", category=MyWarning)
+
+ class TestWarnings(object):
+ def test_my_warning(self):
+ try:
+ warnings.warn(MyWarning("warn!"))
+ assert False
+ except MyWarning:
+ assert True
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(["*== 1 passed in *"])
+
+
+@pytest.mark.parametrize("default_config", ["ini", "cmdline"])
+def test_filterwarnings_mark(pytester: Pytester, default_config) -> None:
+ """Test ``filterwarnings`` mark works and takes precedence over command
+ line and ini options."""
+ if default_config == "ini":
+ pytester.makeini(
+ """
+ [pytest]
+ filterwarnings = always::RuntimeWarning
+ """
+ )
+ pytester.makepyfile(
+ """
+ import warnings
+ import pytest
+
+ @pytest.mark.filterwarnings('ignore::RuntimeWarning')
+ def test_ignore_runtime_warning():
+ warnings.warn(RuntimeWarning())
+
+ @pytest.mark.filterwarnings('error')
+ def test_warning_error():
+ warnings.warn(RuntimeWarning())
+
+ def test_show_warning():
+ warnings.warn(RuntimeWarning())
+ """
+ )
+ result = pytester.runpytest(
+ "-W always::RuntimeWarning" if default_config == "cmdline" else ""
+ )
+ result.stdout.fnmatch_lines(["*= 1 failed, 2 passed, 1 warning in *"])
+
+
+def test_non_string_warning_argument(pytester: Pytester) -> None:
+ """Non-str argument passed to warning breaks pytest (#2956)"""
+ pytester.makepyfile(
+ """\
+ import warnings
+ import pytest
+
+ def test():
+ warnings.warn(UserWarning(1, 'foo'))
+ """
+ )
+ result = pytester.runpytest("-W", "always::UserWarning")
+ result.stdout.fnmatch_lines(["*= 1 passed, 1 warning in *"])
+
+
+def test_filterwarnings_mark_registration(pytester: Pytester) -> None:
+ """Ensure filterwarnings mark is registered"""
+ pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.filterwarnings('error')
+ def test_func():
+ pass
+ """
+ )
+ result = pytester.runpytest("--strict-markers")
+ assert result.ret == 0
+
+
+@pytest.mark.filterwarnings("always::UserWarning")
+def test_warning_captured_hook(pytester: Pytester) -> None:
+ pytester.makeconftest(
+ """
+ def pytest_configure(config):
+ config.issue_config_time_warning(UserWarning("config warning"), stacklevel=2)
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest, warnings
+
+ warnings.warn(UserWarning("collect warning"))
+
+ @pytest.fixture
+ def fix():
+ warnings.warn(UserWarning("setup warning"))
+ yield 1
+ warnings.warn(UserWarning("teardown warning"))
+
+ def test_func(fix):
+ warnings.warn(UserWarning("call warning"))
+ assert fix == 1
+ """
+ )
+
+ collected = []
+
+ class WarningCollector:
+ def pytest_warning_recorded(self, warning_message, when, nodeid, location):
+ collected.append((str(warning_message.message), when, nodeid, location))
+
+ result = pytester.runpytest(plugins=[WarningCollector()])
+ result.stdout.fnmatch_lines(["*1 passed*"])
+
+ expected = [
+ ("config warning", "config", ""),
+ ("collect warning", "collect", ""),
+ ("setup warning", "runtest", "test_warning_captured_hook.py::test_func"),
+ ("call warning", "runtest", "test_warning_captured_hook.py::test_func"),
+ ("teardown warning", "runtest", "test_warning_captured_hook.py::test_func"),
+ ]
+ for index in range(len(expected)):
+ collected_result = collected[index]
+ expected_result = expected[index]
+
+ assert collected_result[0] == expected_result[0], str(collected)
+ assert collected_result[1] == expected_result[1], str(collected)
+ assert collected_result[2] == expected_result[2], str(collected)
+
+ # NOTE: collected_result[3] is location, which differs based on the platform you are on
+ # thus, the best we can do here is assert the types of the parameters match what we expect
+ # and not try and preload it in the expected array
+ if collected_result[3] is not None:
+ assert type(collected_result[3][0]) is str, str(collected)
+ assert type(collected_result[3][1]) is int, str(collected)
+ assert type(collected_result[3][2]) is str, str(collected)
+ else:
+ assert collected_result[3] is None, str(collected)
+
+
+@pytest.mark.filterwarnings("always::UserWarning")
+def test_collection_warnings(pytester: Pytester) -> None:
+ """Check that we also capture warnings issued during test collection (#3251)."""
+ pytester.makepyfile(
+ """
+ import warnings
+
+ warnings.warn(UserWarning("collection warning"))
+
+ def test_foo():
+ pass
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ " *collection_warnings.py:3: UserWarning: collection warning",
+ ' warnings.warn(UserWarning("collection warning"))',
+ "* 1 passed, 1 warning*",
+ ]
+ )
+
+
+@pytest.mark.filterwarnings("always::UserWarning")
+def test_mark_regex_escape(pytester: Pytester) -> None:
+ """@pytest.mark.filterwarnings should not try to escape regex characters (#3936)"""
+ pytester.makepyfile(
+ r"""
+ import pytest, warnings
+
+ @pytest.mark.filterwarnings(r"ignore:some \(warning\)")
+ def test_foo():
+ warnings.warn(UserWarning("some (warning)"))
+ """
+ )
+ result = pytester.runpytest()
+ assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()
+
+
+@pytest.mark.filterwarnings("default::pytest.PytestWarning")
+@pytest.mark.parametrize("ignore_pytest_warnings", ["no", "ini", "cmdline"])
+def test_hide_pytest_internal_warnings(
+ pytester: Pytester, ignore_pytest_warnings
+) -> None:
+ """Make sure we can ignore internal pytest warnings using a warnings filter."""
+ pytester.makepyfile(
+ """
+ import pytest
+ import warnings
+
+ warnings.warn(pytest.PytestWarning("some internal warning"))
+
+ def test_bar():
+ pass
+ """
+ )
+ if ignore_pytest_warnings == "ini":
+ pytester.makeini(
+ """
+ [pytest]
+ filterwarnings = ignore::pytest.PytestWarning
+ """
+ )
+ args = (
+ ["-W", "ignore::pytest.PytestWarning"]
+ if ignore_pytest_warnings == "cmdline"
+ else []
+ )
+ result = pytester.runpytest(*args)
+ if ignore_pytest_warnings != "no":
+ assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()
+ else:
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "*test_hide_pytest_internal_warnings.py:4: PytestWarning: some internal warning",
+ "* 1 passed, 1 warning *",
+ ]
+ )
+
+
+@pytest.mark.parametrize("ignore_on_cmdline", [True, False])
+def test_option_precedence_cmdline_over_ini(
+ pytester: Pytester, ignore_on_cmdline
+) -> None:
+ """Filters defined in the command-line should take precedence over filters in ini files (#3946)."""
+ pytester.makeini(
+ """
+ [pytest]
+ filterwarnings = error::UserWarning
+ """
+ )
+ pytester.makepyfile(
+ """
+ import warnings
+ def test():
+ warnings.warn(UserWarning('hello'))
+ """
+ )
+ args = ["-W", "ignore"] if ignore_on_cmdline else []
+ result = pytester.runpytest(*args)
+ if ignore_on_cmdline:
+ result.stdout.fnmatch_lines(["* 1 passed in*"])
+ else:
+ result.stdout.fnmatch_lines(["* 1 failed in*"])
+
+
+def test_option_precedence_mark(pytester: Pytester) -> None:
+ """Filters defined by marks should always take precedence (#3946)."""
+ pytester.makeini(
+ """
+ [pytest]
+ filterwarnings = ignore
+ """
+ )
+ pytester.makepyfile(
+ """
+ import pytest, warnings
+ @pytest.mark.filterwarnings('error')
+ def test():
+ warnings.warn(UserWarning('hello'))
+ """
+ )
+ result = pytester.runpytest("-W", "ignore")
+ result.stdout.fnmatch_lines(["* 1 failed in*"])
+
+
+class TestDeprecationWarningsByDefault:
+ """
+ Note: all pytest runs are executed in a subprocess so we don't inherit warning filters
+ from pytest's own test suite
+ """
+
+ def create_file(self, pytester: Pytester, mark="") -> None:
+ pytester.makepyfile(
+ """
+ import pytest, warnings
+
+ warnings.warn(DeprecationWarning("collection"))
+
+ {mark}
+ def test_foo():
+ warnings.warn(PendingDeprecationWarning("test run"))
+ """.format(
+ mark=mark
+ )
+ )
+
+ @pytest.mark.parametrize("customize_filters", [True, False])
+ def test_shown_by_default(self, pytester: Pytester, customize_filters) -> None:
+ """Show deprecation warnings by default, even if user has customized the warnings filters (#4013)."""
+ self.create_file(pytester)
+ if customize_filters:
+ pytester.makeini(
+ """
+ [pytest]
+ filterwarnings =
+ once::UserWarning
+ """
+ )
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "*test_shown_by_default.py:3: DeprecationWarning: collection",
+ "*test_shown_by_default.py:7: PendingDeprecationWarning: test run",
+ "* 1 passed, 2 warnings*",
+ ]
+ )
+
+ def test_hidden_by_ini(self, pytester: Pytester) -> None:
+ self.create_file(pytester)
+ pytester.makeini(
+ """
+ [pytest]
+ filterwarnings =
+ ignore::DeprecationWarning
+ ignore::PendingDeprecationWarning
+ """
+ )
+ result = pytester.runpytest_subprocess()
+ assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()
+
+ def test_hidden_by_mark(self, pytester: Pytester) -> None:
+ """Should hide the deprecation warning from the function, but the warning during collection should
+ be displayed normally.
+ """
+ self.create_file(
+ pytester,
+ mark='@pytest.mark.filterwarnings("ignore::PendingDeprecationWarning")',
+ )
+ result = pytester.runpytest_subprocess()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "*test_hidden_by_mark.py:3: DeprecationWarning: collection",
+ "* 1 passed, 1 warning*",
+ ]
+ )
+
+ def test_hidden_by_cmdline(self, pytester: Pytester) -> None:
+ self.create_file(pytester)
+ result = pytester.runpytest_subprocess(
+ "-W",
+ "ignore::DeprecationWarning",
+ "-W",
+ "ignore::PendingDeprecationWarning",
+ )
+ assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()
+
+ def test_hidden_by_system(self, pytester: Pytester, monkeypatch) -> None:
+ self.create_file(pytester)
+ monkeypatch.setenv("PYTHONWARNINGS", "once::UserWarning")
+ result = pytester.runpytest_subprocess()
+ assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()
+
+
+@pytest.mark.parametrize("change_default", [None, "ini", "cmdline"])
+def test_removed_in_x_warning_as_error(pytester: Pytester, change_default) -> None:
+ """This ensures that PytestRemovedInXWarnings raised by pytest are turned into errors.
+
+ This test should be enabled as part of each major release, and skipped again afterwards
+ to ensure our deprecations are turning into warnings as expected.
+ """
+ pytester.makepyfile(
+ """
+ import warnings, pytest
+ def test():
+ warnings.warn(pytest.PytestRemovedIn7Warning("some warning"))
+ """
+ )
+ if change_default == "ini":
+ pytester.makeini(
+ """
+ [pytest]
+ filterwarnings =
+ ignore::pytest.PytestRemovedIn7Warning
+ """
+ )
+
+ args = (
+ ("-Wignore::pytest.PytestRemovedIn7Warning",)
+ if change_default == "cmdline"
+ else ()
+ )
+ result = pytester.runpytest(*args)
+ if change_default is None:
+ result.stdout.fnmatch_lines(["* 1 failed in *"])
+ else:
+ assert change_default in ("ini", "cmdline")
+ result.stdout.fnmatch_lines(["* 1 passed in *"])
+
+
+class TestAssertionWarnings:
+ @staticmethod
+ def assert_result_warns(result, msg) -> None:
+ result.stdout.fnmatch_lines(["*PytestAssertRewriteWarning: %s*" % msg])
+
+ def test_tuple_warning(self, pytester: Pytester) -> None:
+ pytester.makepyfile(
+ """\
+ def test_foo():
+ assert (1,2)
+ """
+ )
+ result = pytester.runpytest()
+ self.assert_result_warns(
+ result, "assertion is always true, perhaps remove parentheses?"
+ )
+
+
+def test_warnings_checker_twice() -> None:
+ """Issue #4617"""
+ expectation = pytest.warns(UserWarning)
+ with expectation:
+ warnings.warn("Message A", UserWarning)
+ with expectation:
+ warnings.warn("Message B", UserWarning)
+
+
+@pytest.mark.filterwarnings("always::UserWarning")
+def test_group_warnings_by_message(pytester: Pytester) -> None:
+ pytester.copy_example("warnings/test_group_warnings_by_message.py")
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "test_group_warnings_by_message.py::test_foo[[]0[]]",
+ "test_group_warnings_by_message.py::test_foo[[]1[]]",
+ "test_group_warnings_by_message.py::test_foo[[]2[]]",
+ "test_group_warnings_by_message.py::test_foo[[]3[]]",
+ "test_group_warnings_by_message.py::test_foo[[]4[]]",
+ "test_group_warnings_by_message.py::test_foo_1",
+ " */test_group_warnings_by_message.py:*: UserWarning: foo",
+ " warnings.warn(UserWarning(msg))",
+ "",
+ "test_group_warnings_by_message.py::test_bar[[]0[]]",
+ "test_group_warnings_by_message.py::test_bar[[]1[]]",
+ "test_group_warnings_by_message.py::test_bar[[]2[]]",
+ "test_group_warnings_by_message.py::test_bar[[]3[]]",
+ "test_group_warnings_by_message.py::test_bar[[]4[]]",
+ " */test_group_warnings_by_message.py:*: UserWarning: bar",
+ " warnings.warn(UserWarning(msg))",
+ "",
+ "-- Docs: *",
+ "*= 11 passed, 11 warnings *",
+ ],
+ consecutive=True,
+ )
+
+
+@pytest.mark.filterwarnings("always::UserWarning")
+def test_group_warnings_by_message_summary(pytester: Pytester) -> None:
+ pytester.copy_example("warnings/test_group_warnings_by_message_summary")
+ pytester.syspathinsert()
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ [
+ "*== %s ==*" % WARNINGS_SUMMARY_HEADER,
+ "test_1.py: 21 warnings",
+ "test_2.py: 1 warning",
+ " */test_1.py:7: UserWarning: foo",
+ " warnings.warn(UserWarning(msg))",
+ "",
+ "test_1.py: 20 warnings",
+ " */test_1.py:7: UserWarning: bar",
+ " warnings.warn(UserWarning(msg))",
+ "",
+ "-- Docs: *",
+ "*= 42 passed, 42 warnings *",
+ ],
+ consecutive=True,
+ )
+
+
+def test_pytest_configure_warning(pytester: Pytester, recwarn) -> None:
+ """Issue 5115."""
+ pytester.makeconftest(
+ """
+ def pytest_configure():
+ import warnings
+
+ warnings.warn("from pytest_configure")
+ """
+ )
+
+ result = pytester.runpytest()
+ assert result.ret == 5
+ assert "INTERNALERROR" not in result.stderr.str()
+ warning = recwarn.pop()
+ assert str(warning.message) == "from pytest_configure"
+
+
+class TestStackLevel:
+ @pytest.fixture
+ def capwarn(self, pytester: Pytester):
+ class CapturedWarnings:
+ captured: List[
+ Tuple[warnings.WarningMessage, Optional[Tuple[str, int, str]]]
+ ] = []
+
+ @classmethod
+ def pytest_warning_recorded(cls, warning_message, when, nodeid, location):
+ cls.captured.append((warning_message, location))
+
+ pytester.plugins = [CapturedWarnings()]
+
+ return CapturedWarnings
+
+ def test_issue4445_rewrite(self, pytester: Pytester, capwarn) -> None:
+ """#4445: Make sure the warning points to a reasonable location
+ See origin of _issue_warning_captured at: _pytest.assertion.rewrite.py:241
+ """
+ pytester.makepyfile(some_mod="")
+ conftest = pytester.makeconftest(
+ """
+ import some_mod
+ import pytest
+
+ pytest.register_assert_rewrite("some_mod")
+ """
+ )
+ pytester.parseconfig()
+
+ # with stacklevel=5 the warning originates from register_assert_rewrite
+ # function in the created conftest.py
+ assert len(capwarn.captured) == 1
+ warning, location = capwarn.captured.pop()
+ file, lineno, func = location
+
+ assert "Module already imported" in str(warning.message)
+ assert file == str(conftest)
+ assert func == "<module>" # the above conftest.py
+ assert lineno == 4
+
+ def test_issue4445_preparse(self, pytester: Pytester, capwarn) -> None:
+ """#4445: Make sure the warning points to a reasonable location
+ See origin of _issue_warning_captured at: _pytest.config.__init__.py:910
+ """
+ pytester.makeconftest(
+ """
+ import nothing
+ """
+ )
+ pytester.parseconfig("--help")
+
+ # with stacklevel=2 the warning should originate from config._preparse and is
+ # thrown by an erroneous conftest.py
+ assert len(capwarn.captured) == 1
+ warning, location = capwarn.captured.pop()
+ file, _, func = location
+
+ assert "could not load initial conftests" in str(warning.message)
+ assert f"config{os.sep}__init__.py" in file
+ assert func == "_preparse"
+
+ @pytest.mark.filterwarnings("default")
+ def test_conftest_warning_captured(self, pytester: Pytester) -> None:
+ """Warnings raised during importing of conftest.py files is captured (#2891)."""
+ pytester.makeconftest(
+ """
+ import warnings
+ warnings.warn(UserWarning("my custom warning"))
+ """
+ )
+ result = pytester.runpytest()
+ result.stdout.fnmatch_lines(
+ ["conftest.py:2", "*UserWarning: my custom warning*"]
+ )
+
+ def test_issue4445_import_plugin(self, pytester: Pytester, capwarn) -> None:
+ """#4445: Make sure the warning points to a reasonable location"""
+ pytester.makepyfile(
+ some_plugin="""
+ import pytest
+ pytest.skip("thing", allow_module_level=True)
+ """
+ )
+ pytester.syspathinsert()
+ pytester.parseconfig("-p", "some_plugin")
+
+ # with stacklevel=2 the warning should originate from
+ # config.PytestPluginManager.import_plugin is thrown by a skipped plugin
+
+ assert len(capwarn.captured) == 1
+ warning, location = capwarn.captured.pop()
+ file, _, func = location
+
+ assert "skipped plugin 'some_plugin': thing" in str(warning.message)
+ assert f"config{os.sep}__init__.py" in file
+ assert func == "_warn_about_skipped_plugins"
+
+ def test_issue4445_issue5928_mark_generator(self, pytester: Pytester) -> None:
+ """#4445 and #5928: Make sure the warning from an unknown mark points to
+ the test file where this mark is used.
+ """
+ testfile = pytester.makepyfile(
+ """
+ import pytest
+
+ @pytest.mark.unknown
+ def test_it():
+ pass
+ """
+ )
+ result = pytester.runpytest_subprocess()
+ # with stacklevel=2 the warning should originate from the above created test file
+ result.stdout.fnmatch_lines_random(
+ [
+ f"*{testfile}:3*",
+ "*Unknown pytest.mark.unknown*",
+ ]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/pytest/testing/typing_checks.py b/testing/web-platform/tests/tools/third_party/pytest/testing/typing_checks.py
new file mode 100644
index 0000000000..0a6b5ad284
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/testing/typing_checks.py
@@ -0,0 +1,24 @@
+"""File for checking typing issues.
+
+This file is not executed, it is only checked by mypy to ensure that
+none of the code triggers any mypy errors.
+"""
+import pytest
+
+
+# Issue #7488.
+@pytest.mark.xfail(raises=RuntimeError)
+def check_mark_xfail_raises() -> None:
+ pass
+
+
+# Issue #7494.
+@pytest.fixture(params=[(0, 0), (1, 1)], ids=lambda x: str(x[0]))
+def check_fixture_ids_callable() -> None:
+ pass
+
+
+# Issue #7494.
+@pytest.mark.parametrize("func", [str, int], ids=lambda x: str(x.__name__))
+def check_parametrize_ids_callable(func) -> None:
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pytest/tox.ini b/testing/web-platform/tests/tools/third_party/pytest/tox.ini
new file mode 100644
index 0000000000..b2f90008ce
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pytest/tox.ini
@@ -0,0 +1,184 @@
+[tox]
+isolated_build = True
+minversion = 3.20.0
+distshare = {homedir}/.tox/distshare
+envlist =
+ linting
+ py36
+ py37
+ py38
+ py39
+ py310
+ py311
+ pypy3
+ py37-{pexpect,xdist,unittestextras,numpy,pluggymain}
+ doctesting
+ plugins
+ py37-freeze
+ docs
+ docs-checklinks
+
+
+
+[testenv]
+commands =
+ {env:_PYTEST_TOX_COVERAGE_RUN:} pytest {posargs:{env:_PYTEST_TOX_DEFAULT_POSARGS:}}
+ doctesting: {env:_PYTEST_TOX_COVERAGE_RUN:} pytest --doctest-modules --pyargs _pytest
+ coverage: coverage combine
+ coverage: coverage report -m
+passenv = USER USERNAME COVERAGE_* PYTEST_ADDOPTS TERM SETUPTOOLS_SCM_PRETEND_VERSION_FOR_PYTEST
+setenv =
+ _PYTEST_TOX_DEFAULT_POSARGS={env:_PYTEST_TOX_POSARGS_DOCTESTING:} {env:_PYTEST_TOX_POSARGS_LSOF:} {env:_PYTEST_TOX_POSARGS_XDIST:}
+
+ # Configuration to run with coverage similar to CI, e.g.
+ # "tox -e py37-coverage".
+ coverage: _PYTEST_TOX_COVERAGE_RUN=coverage run -m
+ coverage: _PYTEST_TOX_EXTRA_DEP=coverage-enable-subprocess
+ coverage: COVERAGE_FILE={toxinidir}/.coverage
+ coverage: COVERAGE_PROCESS_START={toxinidir}/.coveragerc
+
+ doctesting: _PYTEST_TOX_POSARGS_DOCTESTING=doc/en
+
+ nobyte: PYTHONDONTWRITEBYTECODE=1
+
+ lsof: _PYTEST_TOX_POSARGS_LSOF=--lsof
+
+ xdist: _PYTEST_TOX_POSARGS_XDIST=-n auto
+extras = testing
+deps =
+ doctesting: PyYAML
+ numpy: numpy>=1.19.4
+ pexpect: pexpect>=4.8.0
+ pluggymain: pluggy @ git+https://github.com/pytest-dev/pluggy.git
+ unittestextras: twisted
+ unittestextras: asynctest
+ xdist: pytest-xdist>=2.1.0
+ xdist: -e .
+ {env:_PYTEST_TOX_EXTRA_DEP:}
+
+[testenv:linting]
+skip_install = True
+basepython = python3
+deps = pre-commit>=2.9.3
+commands = pre-commit run --all-files --show-diff-on-failure {posargs:}
+
+[testenv:docs]
+basepython = python3
+usedevelop = True
+deps =
+ -r{toxinidir}/doc/en/requirements.txt
+ # https://github.com/twisted/towncrier/issues/340
+ towncrier<21.3.0
+commands =
+ python scripts/towncrier-draft-to-file.py
+ # the '-t changelog_towncrier_draft' tags makes sphinx include the draft
+ # changelog in the docs; this does not happen on ReadTheDocs because it uses
+ # the standard sphinx command so the 'changelog_towncrier_draft' is never set there
+ sphinx-build -W --keep-going -b html doc/en doc/en/_build/html -t changelog_towncrier_draft {posargs:}
+
+[testenv:docs-checklinks]
+basepython = python3
+usedevelop = True
+changedir = doc/en
+deps = -r{toxinidir}/doc/en/requirements.txt
+commands =
+ sphinx-build -W -q --keep-going -b linkcheck . _build
+
+[testenv:regen]
+changedir = doc/en
+basepython = python3
+passenv = SETUPTOOLS_SCM_PRETEND_VERSION_FOR_PYTEST
+deps =
+ dataclasses
+ PyYAML
+ regendoc>=0.8.1
+ sphinx
+whitelist_externals =
+ make
+commands =
+ make regen
+
+[testenv:plugins]
+# use latest versions of all plugins, including pre-releases
+pip_pre=true
+# use latest pip and new dependency resolver (#7783)
+download=true
+install_command=python -m pip --use-feature=2020-resolver install {opts} {packages}
+changedir = testing/plugins_integration
+deps = -rtesting/plugins_integration/requirements.txt
+setenv =
+ PYTHONPATH=.
+commands =
+ pip check
+ pytest bdd_wallet.py
+ pytest --cov=. simple_integration.py
+ pytest --ds=django_settings simple_integration.py
+ pytest --html=simple.html simple_integration.py
+ pytest --reruns 5 simple_integration.py
+ pytest pytest_anyio_integration.py
+ pytest pytest_asyncio_integration.py
+ pytest pytest_mock_integration.py
+ pytest pytest_trio_integration.py
+ pytest pytest_twisted_integration.py
+ pytest simple_integration.py --force-sugar --flakes
+
+[testenv:py37-freeze]
+changedir = testing/freeze
+deps =
+ pyinstaller
+commands =
+ {envpython} create_executable.py
+ {envpython} tox_run.py
+
+[testenv:release]
+description = do a release, required posarg of the version number
+basepython = python3
+usedevelop = True
+passenv = *
+deps =
+ colorama
+ github3.py
+ pre-commit>=2.9.3
+ wheel
+ # https://github.com/twisted/towncrier/issues/340
+ towncrier<21.3.0
+commands = python scripts/release.py {posargs}
+
+[testenv:prepare-release-pr]
+description = prepare a release PR from a manual trigger in GitHub actions
+usedevelop = {[testenv:release]usedevelop}
+passenv = {[testenv:release]passenv}
+deps = {[testenv:release]deps}
+commands = python scripts/prepare-release-pr.py {posargs}
+
+[testenv:publish-gh-release-notes]
+description = create GitHub release after deployment
+basepython = python3
+usedevelop = True
+passenv = GH_RELEASE_NOTES_TOKEN GITHUB_REF GITHUB_REPOSITORY
+deps =
+ github3.py
+ pypandoc
+commands = python scripts/publish-gh-release-notes.py {posargs}
+
+[flake8]
+max-line-length = 120
+extend-ignore =
+ ; whitespace before ':'
+ E203
+ ; Missing Docstrings
+ D100,D101,D102,D103,D104,D105,D106,D107
+ ; Whitespace Issues
+ D202,D203,D204,D205,D209,D213
+ ; Quotes Issues
+ D302
+ ; Docstring Content Issues
+ D400,D401,D401,D402,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D415,D416,D417
+
+
+[isort]
+; This config mimics what reorder-python-imports does.
+force_single_line = 1
+known_localfolder = pytest,_pytest
+known_third_party = test_source,test_excinfo
+force_alphabetical_sort_within_sections = 1
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/.gitignore b/testing/web-platform/tests/tools/third_party/pywebsocket3/.gitignore
new file mode 100644
index 0000000000..70f2867054
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/.gitignore
@@ -0,0 +1,4 @@
+*.pyc
+build/
+*.egg-info/
+dist/
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/.travis.yml b/testing/web-platform/tests/tools/third_party/pywebsocket3/.travis.yml
new file mode 100644
index 0000000000..2065a644dd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/.travis.yml
@@ -0,0 +1,17 @@
+language: python
+python:
+ - 2.7
+ - 3.5
+ - 3.6
+ - 3.7
+ - 3.8
+ - nightly
+
+matrix:
+ allow_failures:
+ - python: 3.5, nightly
+install:
+ - pip install six yapf
+script:
+ - python test/run_all.py
+ - yapf --diff --recursive .
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/CONTRIBUTING b/testing/web-platform/tests/tools/third_party/pywebsocket3/CONTRIBUTING
new file mode 100644
index 0000000000..f975be126f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/CONTRIBUTING
@@ -0,0 +1,30 @@
+# How to Contribute
+
+We'd love to accept your patches and contributions to this project. There are
+just a few small guidelines you need to follow.
+
+## Contributor License Agreement
+
+Contributions to this project must be accompanied by a Contributor License
+Agreement. You (or your employer) retain the copyright to your contribution;
+this simply gives us permission to use and redistribute your contributions as
+part of the project. Head over to <https://cla.developers.google.com/> to see
+your current agreements on file or to sign a new one.
+
+You generally only need to submit a CLA once, so if you've already submitted one
+(even if it was for a different project), you probably don't need to do it
+again.
+
+## Code reviews
+
+All submissions, including submissions by project members, require review. We
+use GitHub pull requests for this purpose. Consult
+[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
+information on using pull requests.
+For instructions for contributing code, please read:
+https://github.com/google/pywebsocket/wiki/CodeReviewInstruction
+
+## Community Guidelines
+
+This project follows
+[Google's Open Source Community Guidelines](https://opensource.google/conduct/).
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/LICENSE b/testing/web-platform/tests/tools/third_party/pywebsocket3/LICENSE
new file mode 100644
index 0000000000..c91bea9025
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/LICENSE
@@ -0,0 +1,28 @@
+Copyright 2020, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/MANIFEST.in b/testing/web-platform/tests/tools/third_party/pywebsocket3/MANIFEST.in
new file mode 100644
index 0000000000..19256882c5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/MANIFEST.in
@@ -0,0 +1,6 @@
+include COPYING
+include MANIFEST.in
+include README
+recursive-include example *.py
+recursive-include mod_pywebsocket *.py
+recursive-include test *.py
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/README.md b/testing/web-platform/tests/tools/third_party/pywebsocket3/README.md
new file mode 100644
index 0000000000..8684f2cc7e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/README.md
@@ -0,0 +1,36 @@
+
+# pywebsocket3 #
+
+The pywebsocket project aims to provide a [WebSocket](https://tools.ietf.org/html/rfc6455) standalone server.
+
+pywebsocket is intended for **testing** or **experimental** purposes.
+
+Run this to read the general document:
+```
+$ pydoc mod_pywebsocket
+```
+
+Please see [Wiki](https://github.com/GoogleChromeLabs/pywebsocket3/wiki) for more details.
+
+# INSTALL #
+
+To install this package to the system, run this:
+```
+$ python setup.py build
+$ sudo python setup.py install
+```
+
+To install this package as a normal user, run this instead:
+
+```
+$ python setup.py build
+$ python setup.py install --user
+```
+# LAUNCH #
+
+To use pywebsocket as standalone server, run this to read the document:
+```
+$ pydoc mod_pywebsocket.standalone
+```
+# Disclaimer #
+This is not an officially supported Google product
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/abort_handshake_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/abort_handshake_wsh.py
new file mode 100644
index 0000000000..1b719ca897
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/abort_handshake_wsh.py
@@ -0,0 +1,43 @@
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import
+from mod_pywebsocket import handshake
+
+
+def web_socket_do_extra_handshake(request):
+ raise handshake.AbortedByUserException(
+ "Aborted in web_socket_do_extra_handshake")
+
+
+def web_socket_transfer_data(request):
+ pass
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/abort_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/abort_wsh.py
new file mode 100644
index 0000000000..d4c240bf2c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/abort_wsh.py
@@ -0,0 +1,43 @@
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import
+from mod_pywebsocket import handshake
+
+
+def web_socket_do_extra_handshake(request):
+ pass
+
+
+def web_socket_transfer_data(request):
+ raise handshake.AbortedByUserException(
+ "Aborted in web_socket_transfer_data")
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/arraybuffer_benchmark.html b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/arraybuffer_benchmark.html
new file mode 100644
index 0000000000..869cd7e1ee
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/arraybuffer_benchmark.html
@@ -0,0 +1,134 @@
+<!--
+Copyright 2013, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+-->
+
+<html>
+<head>
+<title>ArrayBuffer benchmark</title>
+<script src="util.js"></script>
+<script>
+var PRINT_SIZE = true;
+
+// Initial size of arrays.
+var START_SIZE = 10 * 1024;
+// Stops benchmark when the size of an array exceeds this threshold.
+var STOP_THRESHOLD = 100000 * 1024;
+// If the size of each array is small, write/read the array multiple times
+// until the sum of sizes reaches this threshold.
+var MIN_TOTAL = 100000 * 1024;
+var MULTIPLIERS = [5, 2];
+
+// Repeat benchmark for several times to measure performance of optimized
+// (such as JIT) run.
+var REPEAT_FOR_WARMUP = 3;
+
+function writeBenchmark(size, minTotal) {
+ var totalSize = 0;
+ while (totalSize < minTotal) {
+ var arrayBuffer = new ArrayBuffer(size);
+
+ // Write 'a's.
+ fillArrayBuffer(arrayBuffer, 0x61);
+
+ totalSize += size;
+ }
+ return totalSize;
+}
+
+function readBenchmark(size, minTotal) {
+ var totalSize = 0;
+ while (totalSize < minTotal) {
+ var arrayBuffer = new ArrayBuffer(size);
+
+ if (!verifyArrayBuffer(arrayBuffer, 0x00)) {
+ queueLog('Verification failed');
+ return -1;
+ }
+
+ totalSize += size;
+ }
+ return totalSize;
+}
+
+function runBenchmark(benchmarkFunction,
+ size,
+ stopThreshold,
+ minTotal,
+ multipliers,
+ multiplierIndex) {
+ while (size <= stopThreshold) {
+ var maxSpeed = 0;
+
+ for (var i = 0; i < REPEAT_FOR_WARMUP; ++i) {
+ var startTimeInMs = getTimeStamp();
+
+ var totalSize = benchmarkFunction(size, minTotal);
+
+ maxSpeed = Math.max(maxSpeed,
+ calculateSpeedInKB(totalSize, startTimeInMs));
+ }
+ queueLog(formatResultInKiB(size, maxSpeed, PRINT_SIZE));
+
+ size *= multipliers[multiplierIndex];
+ multiplierIndex = (multiplierIndex + 1) % multipliers.length;
+ }
+}
+
+function runBenchmarks() {
+ queueLog('Message size in KiB, Speed in kB/s');
+
+ queueLog('Write benchmark');
+ runBenchmark(
+ writeBenchmark, START_SIZE, STOP_THRESHOLD, MIN_TOTAL, MULTIPLIERS, 0);
+ queueLog('Finished');
+
+ queueLog('Read benchmark');
+ runBenchmark(
+ readBenchmark, START_SIZE, STOP_THRESHOLD, MIN_TOTAL, MULTIPLIERS, 0);
+ addToLog('Finished');
+}
+
+function init() {
+ logBox = document.getElementById('log');
+
+ queueLog(window.navigator.userAgent.toLowerCase());
+
+ addToLog('Started...');
+
+ setTimeout(runBenchmarks, 0);
+}
+
+</script>
+</head>
+<body onload="init()">
+<textarea
+ id="log" rows="50" style="width: 100%" readonly></textarea>
+</body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/bench_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/bench_wsh.py
new file mode 100644
index 0000000000..2df50e77db
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/bench_wsh.py
@@ -0,0 +1,59 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""A simple load tester for WebSocket clients.
+
+A client program sends a message formatted as "<time> <count> <message>" to
+this handler. This handler starts sending total <count> WebSocket messages
+containing <message> every <time> seconds. <time> can be a floating point
+value. <count> must be an integer value.
+"""
+
+from __future__ import absolute_import
+import time
+from six.moves import range
+
+
+def web_socket_do_extra_handshake(request):
+ pass # Always accept.
+
+
+def web_socket_transfer_data(request):
+ line = request.ws_stream.receive_message()
+ parts = line.split(' ')
+ if len(parts) != 3:
+ raise ValueError('Bad parameter format')
+ wait = float(parts[0])
+ count = int(parts[1])
+ message = parts[2]
+ for i in range(count):
+ request.ws_stream.send_message(message)
+ time.sleep(wait)
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark.html b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark.html
new file mode 100644
index 0000000000..f1e5c97b3a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark.html
@@ -0,0 +1,175 @@
+<!--
+Copyright 2013, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+-->
+
+<html>
+<head>
+<title>WebSocket benchmark</title>
+<script src="util_main.js"></script>
+<script src="util.js"></script>
+<script src="benchmark.js"></script>
+<script>
+var addressBox = null;
+
+function getConfig() {
+ return {
+ prefixUrl: addressBox.value,
+ printSize: getBoolFromCheckBox('printsize'),
+ numSockets: getIntFromInput('numsockets'),
+ // Initial size of messages.
+ numIterations: getIntFromInput('numiterations'),
+ numWarmUpIterations: getIntFromInput('numwarmupiterations'),
+ startSize: getIntFromInput('startsize'),
+ // Stops benchmark when the size of message exceeds this threshold.
+ stopThreshold: getIntFromInput('stopthreshold'),
+ // If the size of each message is small, send/receive multiple messages
+ // until the sum of sizes reaches this threshold.
+ minTotal: getIntFromInput('mintotal'),
+ multipliers: getFloatArrayFromInput('multipliers'),
+ verifyData: getBoolFromCheckBox('verifydata'),
+ addToLog: addToLog,
+ addToSummary: addToSummary,
+ measureValue: measureValue,
+ notifyAbort: notifyAbort
+ };
+}
+
+function onSendBenchmark() {
+ var config = getConfig();
+ doAction(config, getBoolFromCheckBox('worker'), 'sendBenchmark');
+}
+
+function onReceiveBenchmark() {
+ var config = getConfig();
+ doAction(config, getBoolFromCheckBox('worker'), 'receiveBenchmark');
+}
+
+function onBatchBenchmark() {
+ var config = getConfig();
+ doAction(config, getBoolFromCheckBox('worker'), 'batchBenchmark');
+}
+
+function onStop() {
+ var config = getConfig();
+ doAction(config, getBoolFromCheckBox('worker'), 'stop');
+}
+
+function init() {
+ addressBox = document.getElementById('address');
+ logBox = document.getElementById('log');
+
+ summaryBox = document.getElementById('summary');
+
+ var scheme = window.location.protocol == 'https:' ? 'wss://' : 'ws://';
+ var defaultAddress = scheme + window.location.host + '/benchmark_helper';
+
+ addressBox.value = defaultAddress;
+
+ addToLog(window.navigator.userAgent.toLowerCase());
+ addToSummary(window.navigator.userAgent.toLowerCase());
+
+ if (!('WebSocket' in window)) {
+ addToLog('WebSocket is not available');
+ }
+
+ initWorker('');
+}
+</script>
+</head>
+<body onload="init()">
+
+<div id="benchmark_div">
+ url <input type="text" id="address" size="40">
+ <input type="button" value="send" onclick="onSendBenchmark()">
+ <input type="button" value="receive" onclick="onReceiveBenchmark()">
+ <input type="button" value="batch" onclick="onBatchBenchmark()">
+ <input type="button" value="stop" onclick="onStop()">
+
+ <br/>
+
+ <input type="checkbox" id="printsize" checked>
+ <label for="printsize">Print size and time per message</label>
+ <input type="checkbox" id="verifydata" checked>
+ <label for="verifydata">Verify data</label>
+ <input type="checkbox" id="worker">
+ <label for="worker">Run on worker</label>
+
+ <br/>
+
+ Parameters:
+
+ <br/>
+
+ <table>
+ <tr>
+ <td>Num sockets</td>
+ <td><input type="text" id="numsockets" value="1"></td>
+ </tr>
+ <tr>
+ <td>Number of iterations</td>
+ <td><input type="text" id="numiterations" value="1"></td>
+ </tr>
+ <tr>
+ <td>Number of warm-up iterations</td>
+ <td><input type="text" id="numwarmupiterations" value="0"></td>
+ </tr>
+ <tr>
+ <td>Start size</td>
+ <td><input type="text" id="startsize" value="10240"></td>
+ </tr>
+ <tr>
+ <td>Stop threshold</td>
+ <td><input type="text" id="stopthreshold" value="102400000"></td>
+ </tr>
+ <tr>
+ <td>Minimum total</td>
+ <td><input type="text" id="mintotal" value="102400000"></td>
+ </tr>
+ <tr>
+ <td>Multipliers</td>
+ <td><input type="text" id="multipliers" value="5, 2"></td>
+ </tr>
+ </table>
+</div>
+
+<div id="log_div">
+ <textarea
+ id="log" rows="20" style="width: 100%" readonly></textarea>
+</div>
+<div id="summary_div">
+ Summary
+ <textarea
+ id="summary" rows="20" style="width: 100%" readonly></textarea>
+</div>
+
+Note: Effect of RTT is not eliminated.
+
+</body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark.js b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark.js
new file mode 100644
index 0000000000..2701472a4f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark.js
@@ -0,0 +1,238 @@
+// Copyright 2014, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+if (typeof importScripts !== "undefined") {
+ // Running on a worker
+ importScripts('util.js', 'util_worker.js');
+}
+
+// Namespace for holding globals.
+var benchmark = {startTimeInMs: 0};
+
+var sockets = [];
+var numEstablishedSockets = 0;
+
+var timerID = null;
+
+function destroySocket(socket) {
+ socket.onopen = null;
+ socket.onmessage = null;
+ socket.onerror = null;
+ socket.onclose = null;
+ socket.close();
+}
+
+function destroyAllSockets() {
+ for (var i = 0; i < sockets.length; ++i) {
+ destroySocket(sockets[i]);
+ }
+ sockets = [];
+}
+
+function sendBenchmarkStep(size, config, isWarmUp) {
+ timerID = null;
+
+ var totalSize = 0;
+ var totalReplied = 0;
+
+ var onMessageHandler = function(event) {
+ if (!verifyAcknowledgement(config, event.data, size)) {
+ destroyAllSockets();
+ config.notifyAbort();
+ return;
+ }
+
+ totalReplied += size;
+
+ if (totalReplied < totalSize) {
+ return;
+ }
+
+ calculateAndLogResult(config, size, benchmark.startTimeInMs, totalSize,
+ isWarmUp);
+
+ runNextTask(config);
+ };
+
+ for (var i = 0; i < sockets.length; ++i) {
+ var socket = sockets[i];
+ socket.onmessage = onMessageHandler;
+ }
+
+ var dataArray = [];
+
+ while (totalSize < config.minTotal) {
+ var buffer = new ArrayBuffer(size);
+
+ fillArrayBuffer(buffer, 0x61);
+
+ dataArray.push(buffer);
+ totalSize += size;
+ }
+
+ benchmark.startTimeInMs = getTimeStamp();
+
+ totalSize = 0;
+
+ var socketIndex = 0;
+ var dataIndex = 0;
+ while (totalSize < config.minTotal) {
+ var command = ['send'];
+ command.push(config.verifyData ? '1' : '0');
+ sockets[socketIndex].send(command.join(' '));
+ sockets[socketIndex].send(dataArray[dataIndex]);
+ socketIndex = (socketIndex + 1) % sockets.length;
+
+ totalSize += size;
+ ++dataIndex;
+ }
+}
+
+function receiveBenchmarkStep(size, config, isWarmUp) {
+ timerID = null;
+
+ var totalSize = 0;
+ var totalReplied = 0;
+
+ var onMessageHandler = function(event) {
+ var bytesReceived = event.data.byteLength;
+ if (bytesReceived != size) {
+ config.addToLog('Expected ' + size + 'B but received ' +
+ bytesReceived + 'B');
+ destroyAllSockets();
+ config.notifyAbort();
+ return;
+ }
+
+ if (config.verifyData && !verifyArrayBuffer(event.data, 0x61)) {
+ config.addToLog('Response verification failed');
+ destroyAllSockets();
+ config.notifyAbort();
+ return;
+ }
+
+ totalReplied += bytesReceived;
+
+ if (totalReplied < totalSize) {
+ return;
+ }
+
+ calculateAndLogResult(config, size, benchmark.startTimeInMs, totalSize,
+ isWarmUp);
+
+ runNextTask(config);
+ };
+
+ for (var i = 0; i < sockets.length; ++i) {
+ var socket = sockets[i];
+ socket.binaryType = 'arraybuffer';
+ socket.onmessage = onMessageHandler;
+ }
+
+ benchmark.startTimeInMs = getTimeStamp();
+
+ var socketIndex = 0;
+ while (totalSize < config.minTotal) {
+ sockets[socketIndex].send('receive ' + size);
+ socketIndex = (socketIndex + 1) % sockets.length;
+
+ totalSize += size;
+ }
+}
+
+function createSocket(config) {
+ // TODO(tyoshino): Add TCP warm up.
+ var url = config.prefixUrl;
+
+ config.addToLog('Connect ' + url);
+
+ var socket = new WebSocket(url);
+ socket.onmessage = function(event) {
+ config.addToLog('Unexpected message received. Aborting.');
+ };
+ socket.onerror = function() {
+ config.addToLog('Error');
+ };
+ socket.onclose = function(event) {
+ config.addToLog('Closed');
+ config.notifyAbort();
+ };
+ return socket;
+}
+
+function startBenchmark(config) {
+ clearTimeout(timerID);
+ destroyAllSockets();
+
+ numEstablishedSockets = 0;
+
+ for (var i = 0; i < config.numSockets; ++i) {
+ var socket = createSocket(config);
+ socket.onopen = function() {
+ config.addToLog('Opened');
+
+ ++numEstablishedSockets;
+
+ if (numEstablishedSockets == sockets.length) {
+ runNextTask(config);
+ }
+ };
+ sockets.push(socket);
+ }
+}
+
+function getConfigString(config) {
+ return '(WebSocket' +
+ ', ' + (typeof importScripts !== "undefined" ? 'Worker' : 'Main') +
+ ', numSockets=' + config.numSockets +
+ ', numIterations=' + config.numIterations +
+ ', verifyData=' + config.verifyData +
+ ', minTotal=' + config.minTotal +
+ ', numWarmUpIterations=' + config.numWarmUpIterations +
+ ')';
+}
+
+function batchBenchmark(config) {
+ config.addToLog('Batch benchmark');
+ config.addToLog(buildLegendString(config));
+
+ tasks = [];
+ clearAverageData();
+ addTasks(config, sendBenchmarkStep);
+ addResultReportingTask(config, 'Send Benchmark ' + getConfigString(config));
+ addTasks(config, receiveBenchmarkStep);
+ addResultReportingTask(config, 'Receive Benchmark ' +
+ getConfigString(config));
+ startBenchmark(config);
+}
+
+function cleanup() {
+ destroyAllSockets();
+}
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark_helper_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark_helper_wsh.py
new file mode 100644
index 0000000000..fc17533335
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/benchmark_helper_wsh.py
@@ -0,0 +1,84 @@
+# Copyright 2013, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Handler for benchmark.html."""
+from __future__ import absolute_import
+import six
+
+
+def web_socket_do_extra_handshake(request):
+ # Turn off compression.
+ request.ws_extension_processors = []
+
+
+def web_socket_transfer_data(request):
+ data = b''
+
+ while True:
+ command = request.ws_stream.receive_message()
+ if command is None:
+ return
+
+ if not isinstance(command, six.text_type):
+ raise ValueError('Invalid command data:' + command)
+ commands = command.split(' ')
+ if len(commands) == 0:
+ raise ValueError('Invalid command data: ' + command)
+
+ if commands[0] == 'receive':
+ if len(commands) != 2:
+ raise ValueError(
+ 'Illegal number of arguments for send command' + command)
+ size = int(commands[1])
+
+ # Reuse data if possible.
+ if len(data) != size:
+ data = b'a' * size
+ request.ws_stream.send_message(data, binary=True)
+ elif commands[0] == 'send':
+ if len(commands) != 2:
+ raise ValueError(
+ 'Illegal number of arguments for receive command' +
+ command)
+ verify_data = commands[1] == '1'
+
+ data = request.ws_stream.receive_message()
+ if data is None:
+ raise ValueError('Payload not received')
+ size = len(data)
+
+ if verify_data:
+ if data != b'a' * size:
+ raise ValueError('Payload verification failed')
+
+ request.ws_stream.send_message(str(size))
+ else:
+ raise ValueError('Invalid command: ' + commands[0])
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/cgi-bin/hi.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/cgi-bin/hi.py
new file mode 100755
index 0000000000..f136f2c442
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/cgi-bin/hi.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python
+
+print('Content-Type: text/plain')
+print('')
+print('Hi from hi.py')
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/close_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/close_wsh.py
new file mode 100644
index 0000000000..8f0005ffea
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/close_wsh.py
@@ -0,0 +1,70 @@
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import
+import struct
+
+from mod_pywebsocket import common
+from mod_pywebsocket import stream
+
+
+def web_socket_do_extra_handshake(request):
+ pass
+
+
+def web_socket_transfer_data(request):
+ while True:
+ line = request.ws_stream.receive_message()
+ if line is None:
+ return
+ code, reason = line.split(' ', 1)
+ if code is None or reason is None:
+ return
+ request.ws_stream.close_connection(int(code), reason)
+ # close_connection() initiates closing handshake. It validates code
+ # and reason. If you want to send a broken close frame for a test,
+ # following code will be useful.
+ # > data = struct.pack('!H', int(code)) + reason.encode('UTF-8')
+ # > request.connection.write(stream.create_close_frame(data))
+ # > # Suppress to re-respond client responding close frame.
+ # > raise Exception("customized server initiated closing handshake")
+
+
+def web_socket_passive_closing_handshake(request):
+ # Simply echo a close status code
+ code, reason = request.ws_close_code, request.ws_close_reason
+
+ # pywebsocket sets pseudo code for receiving an empty body close frame.
+ if code == common.STATUS_NO_STATUS_RECEIVED:
+ code = None
+ reason = ''
+ return code, reason
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/console.html b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/console.html
new file mode 100644
index 0000000000..ccd6d8f806
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/console.html
@@ -0,0 +1,317 @@
+<!--
+Copyright 2011, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+-->
+
+<!--
+A simple console for testing WebSocket server.
+
+Type an address into the top text input and click connect to establish
+WebSocket. Then, type some message into the bottom text input and click send
+to send the message. Received/sent messages and connection state will be shown
+on the middle textarea.
+-->
+
+<html>
+<head>
+<title>WebSocket console</title>
+<script>
+var socket = null;
+
+var showTimeStamp = false;
+
+var addressBox = null;
+var protocolsBox = null;
+var logBox = null;
+var messageBox = null;
+var fileBox = null;
+var codeBox = null;
+var reasonBox = null;
+
+function getTimeStamp() {
+ return new Date().getTime();
+}
+
+function addToLog(log) {
+ if (showTimeStamp) {
+ logBox.value += '[' + getTimeStamp() + '] ';
+ }
+ logBox.value += log + '\n'
+ // Large enough to keep showing the latest message.
+ logBox.scrollTop = 1000000;
+}
+
+function setbinarytype(binaryType) {
+ if (!socket) {
+ addToLog('Not connected');
+ return;
+ }
+
+ socket.binaryType = binaryType;
+ addToLog('Set binaryType to ' + binaryType);
+}
+
+function send() {
+ if (!socket) {
+ addToLog('Not connected');
+ return;
+ }
+
+ socket.send(messageBox.value);
+ addToLog('> ' + messageBox.value);
+ messageBox.value = '';
+}
+
+function sendfile() {
+ if (!socket) {
+ addToLog('Not connected');
+ return;
+ }
+
+ var files = fileBox.files;
+
+ if (files.length == 0) {
+ addToLog('File not selected');
+ return;
+ }
+
+ socket.send(files[0]);
+ addToLog('> Send ' + files[0].name);
+}
+
+function parseProtocols(protocolsText) {
+ var protocols = protocolsText.split(',');
+ for (var i = 0; i < protocols.length; ++i) {
+ protocols[i] = protocols[i].trim();
+ }
+
+ if (protocols.length == 0) {
+ // Don't pass.
+ protocols = null;
+ } else if (protocols.length == 1) {
+ if (protocols[0].length == 0) {
+ // Don't pass.
+ protocols = null;
+ } else {
+ // Pass as a string.
+ protocols = protocols[0];
+ }
+ }
+
+ return protocols;
+}
+
+function connect() {
+ var url = addressBox.value;
+ var protocols = parseProtocols(protocolsBox.value);
+
+ if ('WebSocket' in window) {
+ if (protocols) {
+ socket = new WebSocket(url, protocols);
+ } else {
+ socket = new WebSocket(url);
+ }
+ } else {
+ return;
+ }
+
+ socket.onopen = function () {
+ var extraInfo = [];
+ if (('protocol' in socket) && socket.protocol) {
+ extraInfo.push('protocol = ' + socket.protocol);
+ }
+ if (('extensions' in socket) && socket.extensions) {
+ extraInfo.push('extensions = ' + socket.extensions);
+ }
+
+ var logMessage = 'Opened';
+ if (extraInfo.length > 0) {
+ logMessage += ' (' + extraInfo.join(', ') + ')';
+ }
+ addToLog(logMessage);
+ };
+ socket.onmessage = function (event) {
+ if (('ArrayBuffer' in window) && (event.data instanceof ArrayBuffer)) {
+ addToLog('< Received an ArrayBuffer of ' + event.data.byteLength +
+ ' bytes')
+ } else if (('Blob' in window) && (event.data instanceof Blob)) {
+ addToLog('< Received a Blob of ' + event.data.size + ' bytes')
+ } else {
+ addToLog('< ' + event.data);
+ }
+ };
+ socket.onerror = function () {
+ addToLog('Error');
+ };
+ socket.onclose = function (event) {
+ var logMessage = 'Closed (';
+ if ((arguments.length == 1) && ('CloseEvent' in window) &&
+ (event instanceof CloseEvent)) {
+ logMessage += 'wasClean = ' + event.wasClean;
+ // code and reason are present only for
+ // draft-ietf-hybi-thewebsocketprotocol-06 and later
+ if ('code' in event) {
+ logMessage += ', code = ' + event.code;
+ }
+ if ('reason' in event) {
+ logMessage += ', reason = ' + event.reason;
+ }
+ } else {
+ logMessage += 'CloseEvent is not available';
+ }
+ addToLog(logMessage + ')');
+ };
+
+ if (protocols) {
+ addToLog('Connect ' + url + ' (protocols = ' + protocols + ')');
+ } else {
+ addToLog('Connect ' + url);
+ }
+}
+
+function closeSocket() {
+ if (!socket) {
+ addToLog('Not connected');
+ return;
+ }
+
+ if (codeBox.value || reasonBox.value) {
+ socket.close(codeBox.value, reasonBox.value);
+ } else {
+ socket.close();
+ }
+}
+
+function printState() {
+ if (!socket) {
+ addToLog('Not connected');
+ return;
+ }
+
+ addToLog(
+ 'url = ' + socket.url +
+ ', readyState = ' + socket.readyState +
+ ', bufferedAmount = ' + socket.bufferedAmount);
+}
+
+function init() {
+ var scheme = window.location.protocol == 'https:' ? 'wss://' : 'ws://';
+ var defaultAddress = scheme + window.location.host + '/echo';
+
+ addressBox = document.getElementById('address');
+ protocolsBox = document.getElementById('protocols');
+ logBox = document.getElementById('log');
+ messageBox = document.getElementById('message');
+ fileBox = document.getElementById('file');
+ codeBox = document.getElementById('code');
+ reasonBox = document.getElementById('reason');
+
+ addressBox.value = defaultAddress;
+
+ if (!('WebSocket' in window)) {
+ addToLog('WebSocket is not available');
+ }
+}
+</script>
+<style type="text/css">
+form {
+ margin: 0px;
+}
+
+#connect_div, #log_div, #send_div, #sendfile_div, #close_div, #printstate_div {
+ padding: 5px;
+ margin: 5px;
+ border-width: 0px 0px 0px 10px;
+ border-style: solid;
+ border-color: silver;
+}
+</style>
+</head>
+<body onload="init()">
+
+<div>
+
+<div id="connect_div">
+ <form action="#" onsubmit="connect(); return false;">
+ url <input type="text" id="address" size="40">
+ <input type="submit" value="connect">
+ <br/>
+ protocols <input type="text" id="protocols" size="20">
+ </form>
+</div>
+
+<div id="log_div">
+ <textarea id="log" rows="10" cols="40" readonly></textarea>
+ <br/>
+ <input type="checkbox"
+ name="showtimestamp"
+ value="showtimestamp"
+ onclick="showTimeStamp = this.checked">Show time stamp
+</div>
+
+<div id="send_div">
+ <form action="#" onsubmit="send(); return false;">
+ data <input type="text" id="message" size="40">
+ <input type="submit" value="send">
+ </form>
+</div>
+
+<div id="sendfile_div">
+ <form action="#" onsubmit="sendfile(); return false;">
+ <input type="file" id="file" size="40">
+ <input type="submit" value="send file">
+ </form>
+
+ Set binaryType
+ <input type="radio"
+ name="binarytype"
+ value="blob"
+ onclick="setbinarytype('blob')" checked>blob
+ <input type="radio"
+ name="binarytype"
+ value="arraybuffer"
+ onclick="setbinarytype('arraybuffer')">arraybuffer
+</div>
+
+<div id="close_div">
+ <form action="#" onsubmit="closeSocket(); return false;">
+ code <input type="text" id="code" size="10">
+ reason <input type="text" id="reason" size="20">
+ <input type="submit" value="close">
+ </form>
+</div>
+
+<div id="printstate_div">
+ <input type="button" value="print state" onclick="printState();">
+</div>
+
+</div>
+
+</body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/cookie_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/cookie_wsh.py
new file mode 100644
index 0000000000..815209694e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/cookie_wsh.py
@@ -0,0 +1,54 @@
+# Copyright 2020 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import
+from six.moves import urllib
+
+
+def _add_set_cookie(request, value):
+ request.extra_headers.append(('Set-Cookie', value))
+
+
+def web_socket_do_extra_handshake(request):
+ components = urllib.parse.urlparse(request.uri)
+ command = components[4]
+
+ ONE_DAY_LIFE = 'Max-Age=86400'
+
+ if command == 'set':
+ _add_set_cookie(request, '; '.join(['foo=bar', ONE_DAY_LIFE]))
+ elif command == 'set_httponly':
+ _add_set_cookie(
+ request, '; '.join(['httpOnlyFoo=bar', ONE_DAY_LIFE, 'httpOnly']))
+ elif command == 'clear':
+ _add_set_cookie(request, 'foo=0; Max-Age=0')
+ _add_set_cookie(request, 'httpOnlyFoo=0; Max-Age=0')
+
+
+def web_socket_transfer_data(request):
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/echo_client.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/echo_client.py
new file mode 100755
index 0000000000..2ed60b3b59
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/echo_client.py
@@ -0,0 +1,699 @@
+#!/usr/bin/env python
+#
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Simple WebSocket client named echo_client just because of historical reason.
+
+mod_pywebsocket directory must be in PYTHONPATH.
+
+Example Usage:
+
+# server setup
+ % cd $pywebsocket
+ % PYTHONPATH=$cwd/src python ./mod_pywebsocket/standalone.py -p 8880 \
+ -d $cwd/src/example
+
+# run client
+ % PYTHONPATH=$cwd/src python ./src/example/echo_client.py -p 8880 \
+ -s localhost \
+ -o http://localhost -r /echo -m test
+"""
+
+from __future__ import absolute_import
+from __future__ import print_function
+import base64
+import codecs
+from hashlib import sha1
+import logging
+import argparse
+import os
+import random
+import re
+import six
+import socket
+import ssl
+import struct
+import sys
+
+from mod_pywebsocket import common
+from mod_pywebsocket.extensions import PerMessageDeflateExtensionProcessor
+from mod_pywebsocket.extensions import _PerMessageDeflateFramer
+from mod_pywebsocket.extensions import _parse_window_bits
+from mod_pywebsocket.stream import Stream
+from mod_pywebsocket.stream import StreamOptions
+from mod_pywebsocket import util
+
+_TIMEOUT_SEC = 10
+_UNDEFINED_PORT = -1
+
+_UPGRADE_HEADER = 'Upgrade: websocket\r\n'
+_CONNECTION_HEADER = 'Connection: Upgrade\r\n'
+
+# Special message that tells the echo server to start closing handshake
+_GOODBYE_MESSAGE = 'Goodbye'
+
+_PROTOCOL_VERSION_HYBI13 = 'hybi13'
+
+
+class ClientHandshakeError(Exception):
+ pass
+
+
+def _build_method_line(resource):
+ return 'GET %s HTTP/1.1\r\n' % resource
+
+
+def _origin_header(header, origin):
+ # 4.1 13. concatenation of the string "Origin:", a U+0020 SPACE character,
+ # and the /origin/ value, converted to ASCII lowercase, to /fields/.
+ return '%s: %s\r\n' % (header, origin.lower())
+
+
+def _format_host_header(host, port, secure):
+ # 4.1 9. Let /hostport/ be an empty string.
+ # 4.1 10. Append the /host/ value, converted to ASCII lowercase, to
+ # /hostport/
+ hostport = host.lower()
+ # 4.1 11. If /secure/ is false, and /port/ is not 80, or if /secure/
+ # is true, and /port/ is not 443, then append a U+003A COLON character
+ # (:) followed by the value of /port/, expressed as a base-ten integer,
+ # to /hostport/
+ if ((not secure and port != common.DEFAULT_WEB_SOCKET_PORT)
+ or (secure and port != common.DEFAULT_WEB_SOCKET_SECURE_PORT)):
+ hostport += ':' + str(port)
+ # 4.1 12. concatenation of the string "Host:", a U+0020 SPACE
+ # character, and /hostport/, to /fields/.
+ return '%s: %s\r\n' % (common.HOST_HEADER, hostport)
+
+
+def _receive_bytes(socket, length):
+ recv_bytes = []
+ remaining = length
+ while remaining > 0:
+ received_bytes = socket.recv(remaining)
+ if not received_bytes:
+ raise IOError(
+ 'Connection closed before receiving requested length '
+ '(requested %d bytes but received only %d bytes)' %
+ (length, length - remaining))
+ recv_bytes.append(received_bytes)
+ remaining -= len(received_bytes)
+ return b''.join(recv_bytes)
+
+
+def _get_mandatory_header(fields, name):
+ """Gets the value of the header specified by name from fields.
+
+ This function expects that there's only one header with the specified name
+ in fields. Otherwise, raises an ClientHandshakeError.
+ """
+
+ values = fields.get(name.lower())
+ if values is None or len(values) == 0:
+ raise ClientHandshakeError('%s header not found: %r' % (name, values))
+ if len(values) > 1:
+ raise ClientHandshakeError('Multiple %s headers found: %r' %
+ (name, values))
+ return values[0]
+
+
+def _validate_mandatory_header(fields,
+ name,
+ expected_value,
+ case_sensitive=False):
+ """Gets and validates the value of the header specified by name from
+ fields.
+
+ If expected_value is specified, compares expected value and actual value
+ and raises an ClientHandshakeError on failure. You can specify case
+ sensitiveness in this comparison by case_sensitive parameter. This function
+ expects that there's only one header with the specified name in fields.
+ Otherwise, raises an ClientHandshakeError.
+ """
+
+ value = _get_mandatory_header(fields, name)
+
+ if ((case_sensitive and value != expected_value) or
+ (not case_sensitive and value.lower() != expected_value.lower())):
+ raise ClientHandshakeError(
+ 'Illegal value for header %s: %r (expected) vs %r (actual)' %
+ (name, expected_value, value))
+
+
+class _TLSSocket(object):
+ """Wrapper for a TLS connection."""
+ def __init__(self, raw_socket):
+ self._logger = util.get_class_logger(self)
+
+ self._tls_socket = ssl.wrap_socket(raw_socket)
+
+ # Print cipher in use. Handshake is done on wrap_socket call.
+ self._logger.info("Cipher: %s", self._tls_socket.cipher())
+
+ def send(self, data):
+ return self._tls_socket.write(data)
+
+ def sendall(self, data):
+ return self._tls_socket.sendall(data)
+
+ def recv(self, size=-1):
+ return self._tls_socket.read(size)
+
+ def close(self):
+ return self._tls_socket.close()
+
+ def getpeername(self):
+ return self._tls_socket.getpeername()
+
+
+class ClientHandshakeBase(object):
+ """A base class for WebSocket opening handshake processors for each
+ protocol version.
+ """
+ def __init__(self):
+ self._logger = util.get_class_logger(self)
+
+ def _read_fields(self):
+ # 4.1 32. let /fields/ be a list of name-value pairs, initially empty.
+ fields = {}
+ while True: # "Field"
+ # 4.1 33. let /name/ and /value/ be empty byte arrays
+ name = b''
+ value = b''
+ # 4.1 34. read /name/
+ name = self._read_name()
+ if name is None:
+ break
+ # 4.1 35. read spaces
+ # TODO(tyoshino): Skip only one space as described in the spec.
+ ch = self._skip_spaces()
+ # 4.1 36. read /value/
+ value = self._read_value(ch)
+ # 4.1 37. read a byte from the server
+ ch = _receive_bytes(self._socket, 1)
+ if ch != b'\n': # 0x0A
+ raise ClientHandshakeError(
+ 'Expected LF but found %r while reading value %r for '
+ 'header %r' % (ch, value, name))
+ self._logger.debug('Received %r header', name)
+ # 4.1 38. append an entry to the /fields/ list that has the name
+ # given by the string obtained by interpreting the /name/ byte
+ # array as a UTF-8 stream and the value given by the string
+ # obtained by interpreting the /value/ byte array as a UTF-8 byte
+ # stream.
+ fields.setdefault(name.decode('UTF-8'),
+ []).append(value.decode('UTF-8'))
+ # 4.1 39. return to the "Field" step above
+ return fields
+
+ def _read_name(self):
+ # 4.1 33. let /name/ be empty byte arrays
+ name = b''
+ while True:
+ # 4.1 34. read a byte from the server
+ ch = _receive_bytes(self._socket, 1)
+ if ch == b'\r': # 0x0D
+ return None
+ elif ch == b'\n': # 0x0A
+ raise ClientHandshakeError(
+ 'Unexpected LF when reading header name %r' % name)
+ elif ch == b':': # 0x3A
+ return name.lower()
+ else:
+ name += ch
+
+ def _skip_spaces(self):
+ # 4.1 35. read a byte from the server
+ while True:
+ ch = _receive_bytes(self._socket, 1)
+ if ch == b' ': # 0x20
+ continue
+ return ch
+
+ def _read_value(self, ch):
+ # 4.1 33. let /value/ be empty byte arrays
+ value = b''
+ # 4.1 36. read a byte from server.
+ while True:
+ if ch == b'\r': # 0x0D
+ return value
+ elif ch == b'\n': # 0x0A
+ raise ClientHandshakeError(
+ 'Unexpected LF when reading header value %r' % value)
+ else:
+ value += ch
+ ch = _receive_bytes(self._socket, 1)
+
+
+def _get_permessage_deflate_framer(extension_response):
+ """Validate the response and return a framer object using the parameters in
+ the response. This method doesn't accept the server_.* parameters.
+ """
+
+ client_max_window_bits = None
+ client_no_context_takeover = None
+
+ client_max_window_bits_name = (
+ PerMessageDeflateExtensionProcessor._CLIENT_MAX_WINDOW_BITS_PARAM)
+ client_no_context_takeover_name = (
+ PerMessageDeflateExtensionProcessor._CLIENT_NO_CONTEXT_TAKEOVER_PARAM)
+
+ # We didn't send any server_.* parameter.
+ # Handle those parameters as invalid if found in the response.
+
+ for param_name, param_value in extension_response.get_parameters():
+ if param_name == client_max_window_bits_name:
+ if client_max_window_bits is not None:
+ raise ClientHandshakeError('Multiple %s found' %
+ client_max_window_bits_name)
+
+ parsed_value = _parse_window_bits(param_value)
+ if parsed_value is None:
+ raise ClientHandshakeError(
+ 'Bad %s: %r' % (client_max_window_bits_name, param_value))
+ client_max_window_bits = parsed_value
+ elif param_name == client_no_context_takeover_name:
+ if client_no_context_takeover is not None:
+ raise ClientHandshakeError('Multiple %s found' %
+ client_no_context_takeover_name)
+
+ if param_value is not None:
+ raise ClientHandshakeError(
+ 'Bad %s: Has value %r' %
+ (client_no_context_takeover_name, param_value))
+ client_no_context_takeover = True
+
+ if client_no_context_takeover is None:
+ client_no_context_takeover = False
+
+ return _PerMessageDeflateFramer(client_max_window_bits,
+ client_no_context_takeover)
+
+
+class ClientHandshakeProcessor(ClientHandshakeBase):
+ """WebSocket opening handshake processor
+ """
+ def __init__(self, socket, options):
+ super(ClientHandshakeProcessor, self).__init__()
+
+ self._socket = socket
+ self._options = options
+
+ self._logger = util.get_class_logger(self)
+
+ def handshake(self):
+ """Performs opening handshake on the specified socket.
+
+ Raises:
+ ClientHandshakeError: handshake failed.
+ """
+
+ request_line = _build_method_line(self._options.resource)
+ self._logger.debug('Client\'s opening handshake Request-Line: %r',
+ request_line)
+ self._socket.sendall(request_line.encode('UTF-8'))
+
+ fields = []
+ fields.append(
+ _format_host_header(self._options.server_host,
+ self._options.server_port,
+ self._options.use_tls))
+ fields.append(_UPGRADE_HEADER)
+ fields.append(_CONNECTION_HEADER)
+ if self._options.origin is not None:
+ fields.append(
+ _origin_header(common.ORIGIN_HEADER, self._options.origin))
+
+ original_key = os.urandom(16)
+ self._key = base64.b64encode(original_key)
+ self._logger.debug('%s: %r (%s)', common.SEC_WEBSOCKET_KEY_HEADER,
+ self._key, util.hexify(original_key))
+ fields.append(
+ '%s: %s\r\n' %
+ (common.SEC_WEBSOCKET_KEY_HEADER, self._key.decode('UTF-8')))
+
+ fields.append(
+ '%s: %d\r\n' %
+ (common.SEC_WEBSOCKET_VERSION_HEADER, common.VERSION_HYBI_LATEST))
+
+ extensions_to_request = []
+
+ if self._options.use_permessage_deflate:
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ # Accept the client_max_window_bits extension parameter by default.
+ extension.add_parameter(
+ PerMessageDeflateExtensionProcessor.
+ _CLIENT_MAX_WINDOW_BITS_PARAM, None)
+ extensions_to_request.append(extension)
+
+ if len(extensions_to_request) != 0:
+ fields.append('%s: %s\r\n' %
+ (common.SEC_WEBSOCKET_EXTENSIONS_HEADER,
+ common.format_extensions(extensions_to_request)))
+
+ for field in fields:
+ self._socket.sendall(field.encode('UTF-8'))
+
+ self._socket.sendall(b'\r\n')
+
+ self._logger.debug('Sent client\'s opening handshake headers: %r',
+ fields)
+ self._logger.debug('Start reading Status-Line')
+
+ status_line = b''
+ while True:
+ ch = _receive_bytes(self._socket, 1)
+ status_line += ch
+ if ch == b'\n':
+ break
+
+ m = re.match(b'HTTP/\\d+\.\\d+ (\\d\\d\\d) .*\r\n', status_line)
+ if m is None:
+ raise ClientHandshakeError('Wrong status line format: %r' %
+ status_line)
+ status_code = m.group(1)
+ if status_code != b'101':
+ self._logger.debug(
+ 'Unexpected status code %s with following headers: %r',
+ status_code, self._read_fields())
+ raise ClientHandshakeError(
+ 'Expected HTTP status code 101 but found %r' % status_code)
+
+ self._logger.debug('Received valid Status-Line')
+ self._logger.debug('Start reading headers until we see an empty line')
+
+ fields = self._read_fields()
+
+ ch = _receive_bytes(self._socket, 1)
+ if ch != b'\n': # 0x0A
+ raise ClientHandshakeError(
+ 'Expected LF but found %r while reading value %r for header '
+ 'name %r' % (ch, value, name))
+
+ self._logger.debug('Received an empty line')
+ self._logger.debug('Server\'s opening handshake headers: %r', fields)
+
+ _validate_mandatory_header(fields, common.UPGRADE_HEADER,
+ common.WEBSOCKET_UPGRADE_TYPE, False)
+
+ _validate_mandatory_header(fields, common.CONNECTION_HEADER,
+ common.UPGRADE_CONNECTION_TYPE, False)
+
+ accept = _get_mandatory_header(fields,
+ common.SEC_WEBSOCKET_ACCEPT_HEADER)
+
+ # Validate
+ try:
+ binary_accept = base64.b64decode(accept)
+ except TypeError:
+ raise HandshakeError('Illegal value for header %s: %r' %
+ (common.SEC_WEBSOCKET_ACCEPT_HEADER, accept))
+
+ if len(binary_accept) != 20:
+ raise ClientHandshakeError(
+ 'Decoded value of %s is not 20-byte long' %
+ common.SEC_WEBSOCKET_ACCEPT_HEADER)
+
+ self._logger.debug('Response for challenge : %r (%s)', accept,
+ util.hexify(binary_accept))
+
+ binary_expected_accept = sha1(self._key +
+ common.WEBSOCKET_ACCEPT_UUID).digest()
+ expected_accept = base64.b64encode(binary_expected_accept)
+
+ self._logger.debug('Expected response for challenge: %r (%s)',
+ expected_accept,
+ util.hexify(binary_expected_accept))
+
+ if accept != expected_accept.decode('UTF-8'):
+ raise ClientHandshakeError(
+ 'Invalid %s header: %r (expected: %s)' %
+ (common.SEC_WEBSOCKET_ACCEPT_HEADER, accept, expected_accept))
+
+ permessage_deflate_accepted = False
+
+ extensions_header = fields.get(
+ common.SEC_WEBSOCKET_EXTENSIONS_HEADER.lower())
+ accepted_extensions = []
+ if extensions_header is not None and len(extensions_header) != 0:
+ accepted_extensions = common.parse_extensions(extensions_header[0])
+
+ for extension in accepted_extensions:
+ extension_name = extension.name()
+ if (extension_name == common.PERMESSAGE_DEFLATE_EXTENSION
+ and self._options.use_permessage_deflate):
+ permessage_deflate_accepted = True
+
+ framer = _get_permessage_deflate_framer(extension)
+ framer.set_compress_outgoing_enabled(True)
+ self._options.use_permessage_deflate = framer
+ continue
+
+ raise ClientHandshakeError('Unexpected extension %r' %
+ extension_name)
+
+ if (self._options.use_permessage_deflate
+ and not permessage_deflate_accepted):
+ raise ClientHandshakeError(
+ 'Requested %s, but the server rejected it' %
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+
+ # TODO(tyoshino): Handle Sec-WebSocket-Protocol
+ # TODO(tyoshino): Handle Cookie, etc.
+
+
+class ClientConnection(object):
+ """A wrapper for socket object to provide the mp_conn interface.
+ """
+ def __init__(self, socket):
+ self._socket = socket
+
+ def write(self, data):
+ self._socket.sendall(data)
+
+ def read(self, n):
+ return self._socket.recv(n)
+
+ def get_remote_addr(self):
+ return self._socket.getpeername()
+
+ remote_addr = property(get_remote_addr)
+
+
+class ClientRequest(object):
+ """A wrapper class just to make it able to pass a socket object to
+ functions that expect a mp_request object.
+ """
+ def __init__(self, socket):
+ self._logger = util.get_class_logger(self)
+
+ self._socket = socket
+ self.connection = ClientConnection(socket)
+ self.ws_version = common.VERSION_HYBI_LATEST
+
+
+class EchoClient(object):
+ """WebSocket echo client."""
+ def __init__(self, options):
+ self._options = options
+ self._socket = None
+
+ self._logger = util.get_class_logger(self)
+
+ def run(self):
+ """Run the client.
+
+ Shake hands and then repeat sending message and receiving its echo.
+ """
+
+ self._socket = socket.socket()
+ self._socket.settimeout(self._options.socket_timeout)
+ try:
+ self._socket.connect(
+ (self._options.server_host, self._options.server_port))
+ if self._options.use_tls:
+ self._socket = _TLSSocket(self._socket)
+
+ self._handshake = ClientHandshakeProcessor(self._socket,
+ self._options)
+
+ self._handshake.handshake()
+
+ self._logger.info('Connection established')
+
+ request = ClientRequest(self._socket)
+
+ stream_option = StreamOptions()
+ stream_option.mask_send = True
+ stream_option.unmask_receive = False
+
+ if self._options.use_permessage_deflate is not False:
+ framer = self._options.use_permessage_deflate
+ framer.setup_stream_options(stream_option)
+
+ self._stream = Stream(request, stream_option)
+
+ for line in self._options.message.split(','):
+ self._stream.send_message(line)
+ if self._options.verbose:
+ print('Send: %s' % line)
+ try:
+ received = self._stream.receive_message()
+
+ if self._options.verbose:
+ print('Recv: %s' % received)
+ except Exception as e:
+ if self._options.verbose:
+ print('Error: %s' % e)
+ raise
+
+ self._do_closing_handshake()
+ finally:
+ self._socket.close()
+
+ def _do_closing_handshake(self):
+ """Perform closing handshake using the specified closing frame."""
+
+ if self._options.message.split(',')[-1] == _GOODBYE_MESSAGE:
+ # requested server initiated closing handshake, so
+ # expecting closing handshake message from server.
+ self._logger.info('Wait for server-initiated closing handshake')
+ message = self._stream.receive_message()
+ if message is None:
+ print('Recv close')
+ print('Send ack')
+ self._logger.info('Received closing handshake and sent ack')
+ return
+ print('Send close')
+ self._stream.close_connection()
+ self._logger.info('Sent closing handshake')
+ print('Recv ack')
+ self._logger.info('Received ack')
+
+
+def main():
+ # Force Python 2 to use the locale encoding, even when the output is not a
+ # tty. This makes the behaviour the same as Python 3. The encoding won't
+ # necessarily support all unicode characters. This problem is particularly
+ # prevalent on Windows.
+ if six.PY2:
+ import locale
+ encoding = locale.getpreferredencoding()
+ sys.stdout = codecs.getwriter(encoding)(sys.stdout)
+
+ parser = argparse.ArgumentParser()
+ # We accept --command_line_flag style flags which is the same as Google
+ # gflags in addition to common --command-line-flag style flags.
+ parser.add_argument('-s',
+ '--server-host',
+ '--server_host',
+ dest='server_host',
+ type=six.text_type,
+ default='localhost',
+ help='server host')
+ parser.add_argument('-p',
+ '--server-port',
+ '--server_port',
+ dest='server_port',
+ type=int,
+ default=_UNDEFINED_PORT,
+ help='server port')
+ parser.add_argument('-o',
+ '--origin',
+ dest='origin',
+ type=six.text_type,
+ default=None,
+ help='origin')
+ parser.add_argument('-r',
+ '--resource',
+ dest='resource',
+ type=six.text_type,
+ default='/echo',
+ help='resource path')
+ parser.add_argument(
+ '-m',
+ '--message',
+ dest='message',
+ type=six.text_type,
+ default=u'Hello,<>',
+ help=('comma-separated messages to send. '
+ '%s will force close the connection from server.' %
+ _GOODBYE_MESSAGE))
+ parser.add_argument('-q',
+ '--quiet',
+ dest='verbose',
+ action='store_false',
+ default=True,
+ help='suppress messages')
+ parser.add_argument('-t',
+ '--tls',
+ dest='use_tls',
+ action='store_true',
+ default=False,
+ help='use TLS (wss://).')
+ parser.add_argument('-k',
+ '--socket-timeout',
+ '--socket_timeout',
+ dest='socket_timeout',
+ type=int,
+ default=_TIMEOUT_SEC,
+ help='Timeout(sec) for sockets')
+ parser.add_argument('--use-permessage-deflate',
+ '--use_permessage_deflate',
+ dest='use_permessage_deflate',
+ action='store_true',
+ default=False,
+ help='Use the permessage-deflate extension.')
+ parser.add_argument('--log-level',
+ '--log_level',
+ type=six.text_type,
+ dest='log_level',
+ default='warn',
+ choices=['debug', 'info', 'warn', 'error', 'critical'],
+ help='Log level.')
+
+ options = parser.parse_args()
+
+ logging.basicConfig(level=logging.getLevelName(options.log_level.upper()))
+
+ # Default port number depends on whether TLS is used.
+ if options.server_port == _UNDEFINED_PORT:
+ if options.use_tls:
+ options.server_port = common.DEFAULT_WEB_SOCKET_SECURE_PORT
+ else:
+ options.server_port = common.DEFAULT_WEB_SOCKET_PORT
+
+ EchoClient(options).run()
+
+
+if __name__ == '__main__':
+ main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/echo_noext_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/echo_noext_wsh.py
new file mode 100644
index 0000000000..eba5032218
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/echo_noext_wsh.py
@@ -0,0 +1,62 @@
+# Copyright 2013, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import six
+
+_GOODBYE_MESSAGE = u'Goodbye'
+
+
+def web_socket_do_extra_handshake(request):
+ """Received Sec-WebSocket-Extensions header value is parsed into
+ request.ws_requested_extensions. pywebsocket creates extension
+ processors using it before do_extra_handshake call and never looks at it
+ after the call.
+
+ To reject requested extensions, clear the processor list.
+ """
+
+ request.ws_extension_processors = []
+
+
+def web_socket_transfer_data(request):
+ """Echo. Same as echo_wsh.py."""
+
+ while True:
+ line = request.ws_stream.receive_message()
+ if line is None:
+ return
+ if isinstance(line, six.text_type):
+ request.ws_stream.send_message(line, binary=False)
+ if line == _GOODBYE_MESSAGE:
+ return
+ else:
+ request.ws_stream.send_message(line, binary=True)
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/echo_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/echo_wsh.py
new file mode 100644
index 0000000000..f7b3c6c531
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/echo_wsh.py
@@ -0,0 +1,55 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import six
+
+_GOODBYE_MESSAGE = u'Goodbye'
+
+
+def web_socket_do_extra_handshake(request):
+ # This example handler accepts any request. See origin_check_wsh.py for how
+ # to reject access from untrusted scripts based on origin value.
+
+ pass # Always accept.
+
+
+def web_socket_transfer_data(request):
+ while True:
+ line = request.ws_stream.receive_message()
+ if line is None:
+ return
+ if isinstance(line, six.text_type):
+ request.ws_stream.send_message(line, binary=False)
+ if line == _GOODBYE_MESSAGE:
+ return
+ else:
+ request.ws_stream.send_message(line, binary=True)
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/handler_map.txt b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/handler_map.txt
new file mode 100644
index 0000000000..21c4c09aa0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/handler_map.txt
@@ -0,0 +1,11 @@
+# websocket handler map file, used by standalone.py -m option.
+# A line starting with '#' is a comment line.
+# Each line consists of 'alias_resource_path' and 'existing_resource_path'
+# separated by spaces.
+# Aliasing is processed from the top to the bottom of the line, and
+# 'existing_resource_path' must exist before it is aliased.
+# For example,
+# / /echo
+# means that a request to '/' will be handled by handlers for '/echo'.
+/ /echo
+
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/hsts_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/hsts_wsh.py
new file mode 100644
index 0000000000..e861946921
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/hsts_wsh.py
@@ -0,0 +1,40 @@
+# Copyright 2013, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+def web_socket_do_extra_handshake(request):
+ request.extra_headers.append(
+ ('Strict-Transport-Security', 'max-age=86400'))
+
+
+def web_socket_transfer_data(request):
+ request.ws_stream.send_message('Hello', binary=False)
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/internal_error_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/internal_error_wsh.py
new file mode 100644
index 0000000000..04aa684283
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/internal_error_wsh.py
@@ -0,0 +1,42 @@
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import
+from mod_pywebsocket import msgutil
+
+
+def web_socket_do_extra_handshake(request):
+ pass
+
+
+def web_socket_transfer_data(request):
+ raise msgutil.BadOperationException('Intentional')
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/origin_check_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/origin_check_wsh.py
new file mode 100644
index 0000000000..e05767ab93
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/origin_check_wsh.py
@@ -0,0 +1,44 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# This example is derived from test/testdata/handlers/origin_check_wsh.py.
+
+
+def web_socket_do_extra_handshake(request):
+ if request.ws_origin == 'http://example.com':
+ return
+ raise ValueError('Unacceptable origin: %r' % request.ws_origin)
+
+
+def web_socket_transfer_data(request):
+ request.connection.write('origin_check_wsh.py is called for %s, %s' %
+ (request.ws_resource, request.ws_protocol))
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/performance_test_iframe.html b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/performance_test_iframe.html
new file mode 100644
index 0000000000..c18b2c08f6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/performance_test_iframe.html
@@ -0,0 +1,37 @@
+<!--
+Copyright 2020, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+-->
+
+<!DOCTYPE html>
+<head>
+<script src="util.js"></script>
+<script src="performance_test_iframe.js"></script>
+<script src="benchmark.js"></script>
+</head>
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/performance_test_iframe.js b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/performance_test_iframe.js
new file mode 100644
index 0000000000..270409aa6e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/performance_test_iframe.js
@@ -0,0 +1,86 @@
+// Copyright 2020, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function perfTestAddToLog(text) {
+ parent.postMessage({'command': 'log', 'value': text}, '*');
+}
+
+function perfTestAddToSummary(text) {
+}
+
+function perfTestMeasureValue(value) {
+ parent.postMessage({'command': 'measureValue', 'value': value}, '*');
+}
+
+function perfTestNotifyAbort() {
+ parent.postMessage({'command': 'notifyAbort'}, '*');
+}
+
+function getConfigForPerformanceTest(dataType, async,
+ verifyData, numIterations,
+ numWarmUpIterations) {
+
+ return {
+ prefixUrl: 'ws://' + location.host + '/benchmark_helper',
+ printSize: true,
+ numSockets: 1,
+ // + 1 is for a warmup iteration by the Telemetry framework.
+ numIterations: numIterations + numWarmUpIterations + 1,
+ numWarmUpIterations: numWarmUpIterations,
+ minTotal: 10240000,
+ startSize: 10240000,
+ stopThreshold: 10240000,
+ multipliers: [2],
+ verifyData: verifyData,
+ dataType: dataType,
+ async: async,
+ addToLog: perfTestAddToLog,
+ addToSummary: perfTestAddToSummary,
+ measureValue: perfTestMeasureValue,
+ notifyAbort: perfTestNotifyAbort
+ };
+}
+
+var data;
+onmessage = function(message) {
+ var action;
+ if (message.data.command === 'start') {
+ data = message.data;
+ initWorker('http://' + location.host);
+ action = data.benchmarkName;
+ } else {
+ action = 'stop';
+ }
+
+ var config = getConfigForPerformanceTest(data.dataType, data.async,
+ data.verifyData,
+ data.numIterations,
+ data.numWarmUpIterations);
+ doAction(config, data.isWorker, action);
+};
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/special_headers.cgi b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/special_headers.cgi
new file mode 100755
index 0000000000..703cb7401b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/special_headers.cgi
@@ -0,0 +1,26 @@
+#!/usr/bin/python
+
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Use of this source code is governed by a BSD-style
+# license that can be found in the COPYING file or at
+# https://developers.google.com/open-source/licenses/bsd
+"""CGI script sample for testing effect of HTTP headers on the origin page.
+
+Note that CGI scripts don't work on the standalone pywebsocket running in TLS
+mode.
+"""
+
+print """Content-type: text/html
+Content-Security-Policy: connect-src self
+
+<html>
+<head>
+<title></title>
+</head>
+<body>
+<script>
+var socket = new WebSocket("ws://example.com");
+</script>
+</body>
+</html>"""
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/util.js b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/util.js
new file mode 100644
index 0000000000..990160cb40
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/util.js
@@ -0,0 +1,323 @@
+// Copyright 2013, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+// Utilities for example applications (for both main and worker thread).
+
+var results = {};
+
+function getTimeStamp() {
+ return Date.now();
+}
+
+function formatResultInKiB(size, timePerMessageInMs, stddevTimePerMessageInMs,
+ speed, printSize) {
+ if (printSize) {
+ return (size / 1024) +
+ '\t' + timePerMessageInMs.toFixed(3) +
+ (stddevTimePerMessageInMs == -1 ?
+ '' :
+ '\t' + stddevTimePerMessageInMs.toFixed(3)) +
+ '\t' + speed.toFixed(3);
+ } else {
+ return speed.toString();
+ }
+}
+
+function clearAverageData() {
+ results = {};
+}
+
+function reportAverageData(config) {
+ config.addToSummary(
+ 'Size[KiB]\tAverage time[ms]\tStddev time[ms]\tSpeed[KB/s]');
+ for (var size in results) {
+ var averageTimePerMessageInMs = results[size].sum_t / results[size].n;
+ var speed = calculateSpeedInKB(size, averageTimePerMessageInMs);
+ // Calculate sample standard deviation
+ var stddevTimePerMessageInMs = Math.sqrt(
+ (results[size].sum_t2 / results[size].n -
+ averageTimePerMessageInMs * averageTimePerMessageInMs) *
+ results[size].n /
+ (results[size].n - 1));
+ config.addToSummary(formatResultInKiB(
+ size, averageTimePerMessageInMs, stddevTimePerMessageInMs, speed,
+ true));
+ }
+}
+
+function calculateSpeedInKB(size, timeSpentInMs) {
+ return Math.round(size / timeSpentInMs * 1000) / 1000;
+}
+
+function calculateAndLogResult(config, size, startTimeInMs, totalSize,
+ isWarmUp) {
+ var timeSpentInMs = getTimeStamp() - startTimeInMs;
+ var speed = calculateSpeedInKB(totalSize, timeSpentInMs);
+ var timePerMessageInMs = timeSpentInMs / (totalSize / size);
+ if (!isWarmUp) {
+ config.measureValue(timePerMessageInMs);
+ if (!results[size]) {
+ results[size] = {n: 0, sum_t: 0, sum_t2: 0};
+ }
+ results[size].n ++;
+ results[size].sum_t += timePerMessageInMs;
+ results[size].sum_t2 += timePerMessageInMs * timePerMessageInMs;
+ }
+ config.addToLog(formatResultInKiB(size, timePerMessageInMs, -1, speed,
+ config.printSize));
+}
+
+function repeatString(str, count) {
+ var data = '';
+ var expChunk = str;
+ var remain = count;
+ while (true) {
+ if (remain % 2) {
+ data += expChunk;
+ remain = (remain - 1) / 2;
+ } else {
+ remain /= 2;
+ }
+
+ if (remain == 0)
+ break;
+
+ expChunk = expChunk + expChunk;
+ }
+ return data;
+}
+
+function fillArrayBuffer(buffer, c) {
+ var i;
+
+ var u32Content = c * 0x01010101;
+
+ var u32Blocks = Math.floor(buffer.byteLength / 4);
+ var u32View = new Uint32Array(buffer, 0, u32Blocks);
+ // length attribute is slow on Chrome. Don't use it for loop condition.
+ for (i = 0; i < u32Blocks; ++i) {
+ u32View[i] = u32Content;
+ }
+
+ // Fraction
+ var u8Blocks = buffer.byteLength - u32Blocks * 4;
+ var u8View = new Uint8Array(buffer, u32Blocks * 4, u8Blocks);
+ for (i = 0; i < u8Blocks; ++i) {
+ u8View[i] = c;
+ }
+}
+
+function verifyArrayBuffer(buffer, expectedChar) {
+ var i;
+
+ var expectedU32Value = expectedChar * 0x01010101;
+
+ var u32Blocks = Math.floor(buffer.byteLength / 4);
+ var u32View = new Uint32Array(buffer, 0, u32Blocks);
+ for (i = 0; i < u32Blocks; ++i) {
+ if (u32View[i] != expectedU32Value) {
+ return false;
+ }
+ }
+
+ var u8Blocks = buffer.byteLength - u32Blocks * 4;
+ var u8View = new Uint8Array(buffer, u32Blocks * 4, u8Blocks);
+ for (i = 0; i < u8Blocks; ++i) {
+ if (u8View[i] != expectedChar) {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+function verifyBlob(config, blob, expectedChar, doneCallback) {
+ var reader = new FileReader(blob);
+ reader.onerror = function() {
+ config.addToLog('FileReader Error: ' + reader.error.message);
+ doneCallback(blob.size, false);
+ }
+ reader.onloadend = function() {
+ var result = verifyArrayBuffer(reader.result, expectedChar);
+ doneCallback(blob.size, result);
+ }
+ reader.readAsArrayBuffer(blob);
+}
+
+function verifyAcknowledgement(config, message, size) {
+ if (typeof message != 'string') {
+ config.addToLog('Invalid ack type: ' + typeof message);
+ return false;
+ }
+ var parsedAck = parseInt(message);
+ if (isNaN(parsedAck)) {
+ config.addToLog('Invalid ack value: ' + message);
+ return false;
+ }
+ if (parsedAck != size) {
+ config.addToLog(
+ 'Expected ack for ' + size + 'B but received one for ' + parsedAck +
+ 'B');
+ return false;
+ }
+
+ return true;
+}
+
+function cloneConfig(obj) {
+ var newObj = {};
+ for (key in obj) {
+ newObj[key] = obj[key];
+ }
+ return newObj;
+}
+
+var tasks = [];
+
+function runNextTask(config) {
+ var task = tasks.shift();
+ if (task == undefined) {
+ config.addToLog('Finished');
+ cleanup();
+ return;
+ }
+ timerID = setTimeout(task, 0);
+}
+
+function buildLegendString(config) {
+ var legend = ''
+ if (config.printSize)
+ legend = 'Message size in KiB, Time/message in ms, ';
+ legend += 'Speed in kB/s';
+ return legend;
+}
+
+function addTasks(config, stepFunc) {
+ for (var i = 0;
+ i < config.numWarmUpIterations + config.numIterations; ++i) {
+ var multiplierIndex = 0;
+ for (var size = config.startSize;
+ size <= config.stopThreshold;
+ ++multiplierIndex) {
+ var task = stepFunc.bind(
+ null,
+ size,
+ config,
+ i < config.numWarmUpIterations);
+ tasks.push(task);
+ var multiplier = config.multipliers[
+ multiplierIndex % config.multipliers.length];
+ if (multiplier <= 1) {
+ config.addToLog('Invalid multiplier ' + multiplier);
+ config.notifyAbort();
+ throw new Error('Invalid multipler');
+ }
+ size = Math.ceil(size * multiplier);
+ }
+ }
+}
+
+function addResultReportingTask(config, title) {
+ tasks.push(function(){
+ timerID = null;
+ config.addToSummary(title);
+ reportAverageData(config);
+ clearAverageData();
+ runNextTask(config);
+ });
+}
+
+function sendBenchmark(config) {
+ config.addToLog('Send benchmark');
+ config.addToLog(buildLegendString(config));
+
+ tasks = [];
+ clearAverageData();
+ addTasks(config, sendBenchmarkStep);
+ addResultReportingTask(config, 'Send Benchmark ' + getConfigString(config));
+ startBenchmark(config);
+}
+
+function receiveBenchmark(config) {
+ config.addToLog('Receive benchmark');
+ config.addToLog(buildLegendString(config));
+
+ tasks = [];
+ clearAverageData();
+ addTasks(config, receiveBenchmarkStep);
+ addResultReportingTask(config,
+ 'Receive Benchmark ' + getConfigString(config));
+ startBenchmark(config);
+}
+
+function stop(config) {
+ clearTimeout(timerID);
+ timerID = null;
+ tasks = [];
+ config.addToLog('Stopped');
+ cleanup();
+}
+
+var worker;
+
+function initWorker(origin) {
+ worker = new Worker(origin + '/benchmark.js');
+}
+
+function doAction(config, isWindowToWorker, action) {
+ if (isWindowToWorker) {
+ worker.onmessage = function(addToLog, addToSummary,
+ measureValue, notifyAbort, message) {
+ if (message.data.type === 'addToLog')
+ addToLog(message.data.data);
+ else if (message.data.type === 'addToSummary')
+ addToSummary(message.data.data);
+ else if (message.data.type === 'measureValue')
+ measureValue(message.data.data);
+ else if (message.data.type === 'notifyAbort')
+ notifyAbort();
+ }.bind(undefined, config.addToLog, config.addToSummary,
+ config.measureValue, config.notifyAbort);
+ config.addToLog = undefined;
+ config.addToSummary = undefined;
+ config.measureValue = undefined;
+ config.notifyAbort = undefined;
+ worker.postMessage({type: action, config: config});
+ } else {
+ if (action === 'sendBenchmark')
+ sendBenchmark(config);
+ else if (action === 'receiveBenchmark')
+ receiveBenchmark(config);
+ else if (action === 'batchBenchmark')
+ batchBenchmark(config);
+ else if (action === 'stop')
+ stop(config);
+ }
+}
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/util_main.js b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/util_main.js
new file mode 100644
index 0000000000..78add48731
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/util_main.js
@@ -0,0 +1,89 @@
+// Copyright 2014, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+// Utilities for example applications (for the main thread only).
+
+var logBox = null;
+var queuedLog = '';
+
+var summaryBox = null;
+
+function queueLog(log) {
+ queuedLog += log + '\n';
+}
+
+function addToLog(log) {
+ logBox.value += queuedLog;
+ queuedLog = '';
+ logBox.value += log + '\n';
+ logBox.scrollTop = 1000000;
+}
+
+function addToSummary(log) {
+ summaryBox.value += log + '\n';
+ summaryBox.scrollTop = 1000000;
+}
+
+// value: execution time in milliseconds.
+// config.measureValue is intended to be used in Performance Tests.
+// Do nothing here in non-PerformanceTest.
+function measureValue(value) {
+}
+
+// config.notifyAbort is called when the benchmark failed and aborted, and
+// intended to be used in Performance Tests.
+// Do nothing here in non-PerformanceTest.
+function notifyAbort() {
+}
+
+function getIntFromInput(id) {
+ return parseInt(document.getElementById(id).value);
+}
+
+function getStringFromRadioBox(name) {
+ var list = document.getElementById('benchmark_form')[name];
+ for (var i = 0; i < list.length; ++i)
+ if (list.item(i).checked)
+ return list.item(i).value;
+ return undefined;
+}
+function getBoolFromCheckBox(id) {
+ return document.getElementById(id).checked;
+}
+
+function getIntArrayFromInput(id) {
+ var strArray = document.getElementById(id).value.split(',');
+ return strArray.map(function(str) { return parseInt(str, 10); });
+}
+
+function getFloatArrayFromInput(id) {
+ var strArray = document.getElementById(id).value.split(',');
+ return strArray.map(parseFloat);
+}
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/example/util_worker.js b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/util_worker.js
new file mode 100644
index 0000000000..dd90449a90
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/example/util_worker.js
@@ -0,0 +1,44 @@
+// Copyright 2014, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+// Utilities for example applications (for the worker threads only).
+
+onmessage = function (message) {
+ var config = message.data.config;
+ config.addToLog = function(text) {
+ postMessage({type: 'addToLog', data: text}); };
+ config.addToSummary = function(text) {
+ postMessage({type: 'addToSummary', data: text}); };
+ config.measureValue = function(value) {
+ postMessage({type: 'measureValue', data: value}); };
+ config.notifyAbort = function() { postMessage({type: 'notifyAbort'}); };
+
+ doAction(config, false, message.data.type);
+};
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/__init__.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/__init__.py
new file mode 100644
index 0000000000..28d5f5950f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/__init__.py
@@ -0,0 +1,172 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+""" A Standalone WebSocket Server for testing purposes
+
+mod_pywebsocket is an API that provides WebSocket functionalities with
+a standalone WebSocket server. It is intended for testing or
+experimental purposes.
+
+Installation
+============
+1. Follow standalone server documentation to start running the
+standalone server. It can be read by running the following command:
+
+ $ pydoc mod_pywebsocket.standalone
+
+2. Once the standalone server is launched verify it by accessing
+http://localhost[:port]/console.html. Include the port number when
+specified on launch. If everything is working correctly, you
+will see a simple echo console.
+
+
+Writing WebSocket handlers
+==========================
+
+When a WebSocket request comes in, the resource name
+specified in the handshake is considered as if it is a file path under
+<websock_handlers> and the handler defined in
+<websock_handlers>/<resource_name>_wsh.py is invoked.
+
+For example, if the resource name is /example/chat, the handler defined in
+<websock_handlers>/example/chat_wsh.py is invoked.
+
+A WebSocket handler is composed of the following three functions:
+
+ web_socket_do_extra_handshake(request)
+ web_socket_transfer_data(request)
+ web_socket_passive_closing_handshake(request)
+
+where:
+ request: mod_python request.
+
+web_socket_do_extra_handshake is called during the handshake after the
+headers are successfully parsed and WebSocket properties (ws_origin,
+and ws_resource) are added to request. A handler
+can reject the request by raising an exception.
+
+A request object has the following properties that you can use during the
+extra handshake (web_socket_do_extra_handshake):
+- ws_resource
+- ws_origin
+- ws_version
+- ws_extensions
+- ws_deflate
+- ws_protocol
+- ws_requested_protocols
+
+The last two are a bit tricky. See the next subsection.
+
+
+Subprotocol Negotiation
+-----------------------
+
+ws_protocol is always set to None when
+web_socket_do_extra_handshake is called. If ws_requested_protocols is not
+None, you must choose one subprotocol from this list and set it to
+ws_protocol.
+
+Data Transfer
+-------------
+
+web_socket_transfer_data is called after the handshake completed
+successfully. A handler can receive/send messages from/to the client
+using request. mod_pywebsocket.msgutil module provides utilities
+for data transfer.
+
+You can receive a message by the following statement.
+
+ message = request.ws_stream.receive_message()
+
+This call blocks until any complete text frame arrives, and the payload data
+of the incoming frame will be stored into message. When you're using IETF
+HyBi 00 or later protocol, receive_message() will return None on receiving
+client-initiated closing handshake. When any error occurs, receive_message()
+will raise some exception.
+
+You can send a message by the following statement.
+
+ request.ws_stream.send_message(message)
+
+
+Closing Connection
+------------------
+
+Executing the following statement or just return-ing from
+web_socket_transfer_data cause connection close.
+
+ request.ws_stream.close_connection()
+
+close_connection will wait
+for closing handshake acknowledgement coming from the client. When it
+couldn't receive a valid acknowledgement, raises an exception.
+
+web_socket_passive_closing_handshake is called after the server receives
+incoming closing frame from the client peer immediately. You can specify
+code and reason by return values. They are sent as a outgoing closing frame
+from the server. A request object has the following properties that you can
+use in web_socket_passive_closing_handshake.
+- ws_close_code
+- ws_close_reason
+
+
+Threading
+---------
+
+A WebSocket handler must be thread-safe. The standalone
+server uses threads by default.
+
+
+Configuring WebSocket Extension Processors
+------------------------------------------
+
+See extensions.py for supported WebSocket extensions. Note that they are
+unstable and their APIs are subject to change substantially.
+
+A request object has these extension processing related attributes.
+
+- ws_requested_extensions:
+
+ A list of common.ExtensionParameter instances representing extension
+ parameters received from the client in the client's opening handshake.
+ You shouldn't modify it manually.
+
+- ws_extensions:
+
+ A list of common.ExtensionParameter instances representing extension
+ parameters to send back to the client in the server's opening handshake.
+ You shouldn't touch it directly. Instead, call methods on extension
+ processors.
+
+- ws_extension_processors:
+
+ A list of loaded extension processors. Find the processor for the
+ extension you want to configure from it, and call its methods.
+"""
+
+# vi:sts=4 sw=4 et tw=72
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/_stream_exceptions.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/_stream_exceptions.py
new file mode 100644
index 0000000000..b47878bc4a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/_stream_exceptions.py
@@ -0,0 +1,82 @@
+# Copyright 2020, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Stream Exceptions.
+"""
+
+# Note: request.connection.write/read are used in this module, even though
+# mod_python document says that they should be used only in connection
+# handlers. Unfortunately, we have no other options. For example,
+# request.write/read are not suitable because they don't allow direct raw bytes
+# writing/reading.
+
+
+# Exceptions
+class ConnectionTerminatedException(Exception):
+ """This exception will be raised when a connection is terminated
+ unexpectedly.
+ """
+
+ pass
+
+
+class InvalidFrameException(ConnectionTerminatedException):
+ """This exception will be raised when we received an invalid frame we
+ cannot parse.
+ """
+
+ pass
+
+
+class BadOperationException(Exception):
+ """This exception will be raised when send_message() is called on
+ server-terminated connection or receive_message() is called on
+ client-terminated connection.
+ """
+
+ pass
+
+
+class UnsupportedFrameException(Exception):
+ """This exception will be raised when we receive a frame with flag, opcode
+ we cannot handle. Handlers can just catch and ignore this exception and
+ call receive_message() again to continue processing the next frame.
+ """
+
+ pass
+
+
+class InvalidUTF8Exception(Exception):
+ """This exception will be raised when we receive a text frame which
+ contains invalid UTF-8 strings.
+ """
+
+ pass
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/common.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/common.py
new file mode 100644
index 0000000000..9cb11f15cb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/common.py
@@ -0,0 +1,273 @@
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""This file must not depend on any module specific to the WebSocket protocol.
+"""
+
+from __future__ import absolute_import
+from mod_pywebsocket import http_header_util
+
+# Additional log level definitions.
+LOGLEVEL_FINE = 9
+
+# Constants indicating WebSocket protocol version.
+VERSION_HYBI13 = 13
+VERSION_HYBI14 = 13
+VERSION_HYBI15 = 13
+VERSION_HYBI16 = 13
+VERSION_HYBI17 = 13
+
+# Constants indicating WebSocket protocol latest version.
+VERSION_HYBI_LATEST = VERSION_HYBI13
+
+# Port numbers
+DEFAULT_WEB_SOCKET_PORT = 80
+DEFAULT_WEB_SOCKET_SECURE_PORT = 443
+
+# Schemes
+WEB_SOCKET_SCHEME = 'ws'
+WEB_SOCKET_SECURE_SCHEME = 'wss'
+
+# Frame opcodes defined in the spec.
+OPCODE_CONTINUATION = 0x0
+OPCODE_TEXT = 0x1
+OPCODE_BINARY = 0x2
+OPCODE_CLOSE = 0x8
+OPCODE_PING = 0x9
+OPCODE_PONG = 0xa
+
+# UUID for the opening handshake and frame masking.
+WEBSOCKET_ACCEPT_UUID = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
+
+# Opening handshake header names and expected values.
+UPGRADE_HEADER = 'Upgrade'
+WEBSOCKET_UPGRADE_TYPE = 'websocket'
+CONNECTION_HEADER = 'Connection'
+UPGRADE_CONNECTION_TYPE = 'Upgrade'
+HOST_HEADER = 'Host'
+ORIGIN_HEADER = 'Origin'
+SEC_WEBSOCKET_KEY_HEADER = 'Sec-WebSocket-Key'
+SEC_WEBSOCKET_ACCEPT_HEADER = 'Sec-WebSocket-Accept'
+SEC_WEBSOCKET_VERSION_HEADER = 'Sec-WebSocket-Version'
+SEC_WEBSOCKET_PROTOCOL_HEADER = 'Sec-WebSocket-Protocol'
+SEC_WEBSOCKET_EXTENSIONS_HEADER = 'Sec-WebSocket-Extensions'
+
+# Extensions
+PERMESSAGE_DEFLATE_EXTENSION = 'permessage-deflate'
+
+# Status codes
+# Code STATUS_NO_STATUS_RECEIVED, STATUS_ABNORMAL_CLOSURE, and
+# STATUS_TLS_HANDSHAKE are pseudo codes to indicate specific error cases.
+# Could not be used for codes in actual closing frames.
+# Application level errors must use codes in the range
+# STATUS_USER_REGISTERED_BASE to STATUS_USER_PRIVATE_MAX. The codes in the
+# range STATUS_USER_REGISTERED_BASE to STATUS_USER_REGISTERED_MAX are managed
+# by IANA. Usually application must define user protocol level errors in the
+# range STATUS_USER_PRIVATE_BASE to STATUS_USER_PRIVATE_MAX.
+STATUS_NORMAL_CLOSURE = 1000
+STATUS_GOING_AWAY = 1001
+STATUS_PROTOCOL_ERROR = 1002
+STATUS_UNSUPPORTED_DATA = 1003
+STATUS_NO_STATUS_RECEIVED = 1005
+STATUS_ABNORMAL_CLOSURE = 1006
+STATUS_INVALID_FRAME_PAYLOAD_DATA = 1007
+STATUS_POLICY_VIOLATION = 1008
+STATUS_MESSAGE_TOO_BIG = 1009
+STATUS_MANDATORY_EXTENSION = 1010
+STATUS_INTERNAL_ENDPOINT_ERROR = 1011
+STATUS_TLS_HANDSHAKE = 1015
+STATUS_USER_REGISTERED_BASE = 3000
+STATUS_USER_REGISTERED_MAX = 3999
+STATUS_USER_PRIVATE_BASE = 4000
+STATUS_USER_PRIVATE_MAX = 4999
+# Following definitions are aliases to keep compatibility. Applications must
+# not use these obsoleted definitions anymore.
+STATUS_NORMAL = STATUS_NORMAL_CLOSURE
+STATUS_UNSUPPORTED = STATUS_UNSUPPORTED_DATA
+STATUS_CODE_NOT_AVAILABLE = STATUS_NO_STATUS_RECEIVED
+STATUS_ABNORMAL_CLOSE = STATUS_ABNORMAL_CLOSURE
+STATUS_INVALID_FRAME_PAYLOAD = STATUS_INVALID_FRAME_PAYLOAD_DATA
+STATUS_MANDATORY_EXT = STATUS_MANDATORY_EXTENSION
+
+# HTTP status codes
+HTTP_STATUS_BAD_REQUEST = 400
+HTTP_STATUS_FORBIDDEN = 403
+HTTP_STATUS_NOT_FOUND = 404
+
+
+def is_control_opcode(opcode):
+ return (opcode >> 3) == 1
+
+
+class ExtensionParameter(object):
+ """This is exchanged on extension negotiation in opening handshake."""
+ def __init__(self, name):
+ self._name = name
+ # TODO(tyoshino): Change the data structure to more efficient one such
+ # as dict when the spec changes to say like
+ # - Parameter names must be unique
+ # - The order of parameters is not significant
+ self._parameters = []
+
+ def name(self):
+ """Return the extension name."""
+ return self._name
+
+ def add_parameter(self, name, value):
+ """Add a parameter."""
+ self._parameters.append((name, value))
+
+ def get_parameters(self):
+ """Return the parameters."""
+ return self._parameters
+
+ def get_parameter_names(self):
+ """Return the names of the parameters."""
+ return [name for name, unused_value in self._parameters]
+
+ def has_parameter(self, name):
+ """Test if a parameter exists."""
+ for param_name, param_value in self._parameters:
+ if param_name == name:
+ return True
+ return False
+
+ def get_parameter_value(self, name):
+ """Get the value of a specific parameter."""
+ for param_name, param_value in self._parameters:
+ if param_name == name:
+ return param_value
+
+
+class ExtensionParsingException(Exception):
+ """Exception to handle errors in extension parsing."""
+ def __init__(self, name):
+ super(ExtensionParsingException, self).__init__(name)
+
+
+def _parse_extension_param(state, definition):
+ param_name = http_header_util.consume_token(state)
+
+ if param_name is None:
+ raise ExtensionParsingException('No valid parameter name found')
+
+ http_header_util.consume_lwses(state)
+
+ if not http_header_util.consume_string(state, '='):
+ definition.add_parameter(param_name, None)
+ return
+
+ http_header_util.consume_lwses(state)
+
+ # TODO(tyoshino): Add code to validate that parsed param_value is token
+ param_value = http_header_util.consume_token_or_quoted_string(state)
+ if param_value is None:
+ raise ExtensionParsingException(
+ 'No valid parameter value found on the right-hand side of '
+ 'parameter %r' % param_name)
+
+ definition.add_parameter(param_name, param_value)
+
+
+def _parse_extension(state):
+ extension_token = http_header_util.consume_token(state)
+ if extension_token is None:
+ return None
+
+ extension = ExtensionParameter(extension_token)
+
+ while True:
+ http_header_util.consume_lwses(state)
+
+ if not http_header_util.consume_string(state, ';'):
+ break
+
+ http_header_util.consume_lwses(state)
+
+ try:
+ _parse_extension_param(state, extension)
+ except ExtensionParsingException as e:
+ raise ExtensionParsingException(
+ 'Failed to parse parameter for %r (%r)' % (extension_token, e))
+
+ return extension
+
+
+def parse_extensions(data):
+ """Parse Sec-WebSocket-Extensions header value.
+
+ Returns a list of ExtensionParameter objects.
+ Leading LWSes must be trimmed.
+ """
+ state = http_header_util.ParsingState(data)
+
+ extension_list = []
+ while True:
+ extension = _parse_extension(state)
+ if extension is not None:
+ extension_list.append(extension)
+
+ http_header_util.consume_lwses(state)
+
+ if http_header_util.peek(state) is None:
+ break
+
+ if not http_header_util.consume_string(state, ','):
+ raise ExtensionParsingException(
+ 'Failed to parse Sec-WebSocket-Extensions header: '
+ 'Expected a comma but found %r' % http_header_util.peek(state))
+
+ http_header_util.consume_lwses(state)
+
+ if len(extension_list) == 0:
+ raise ExtensionParsingException('No valid extension entry found')
+
+ return extension_list
+
+
+def format_extension(extension):
+ """Format an ExtensionParameter object."""
+ formatted_params = [extension.name()]
+ for param_name, param_value in extension.get_parameters():
+ if param_value is None:
+ formatted_params.append(param_name)
+ else:
+ quoted_value = http_header_util.quote_if_necessary(param_value)
+ formatted_params.append('%s=%s' % (param_name, quoted_value))
+ return '; '.join(formatted_params)
+
+
+def format_extensions(extension_list):
+ """Format a list of ExtensionParameter objects."""
+ formatted_extension_list = []
+ for extension in extension_list:
+ formatted_extension_list.append(format_extension(extension))
+ return ', '.join(formatted_extension_list)
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/dispatch.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/dispatch.py
new file mode 100644
index 0000000000..4ee943a5b8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/dispatch.py
@@ -0,0 +1,385 @@
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Dispatch WebSocket request.
+"""
+
+from __future__ import absolute_import
+import logging
+import os
+import re
+import traceback
+
+from mod_pywebsocket import common
+from mod_pywebsocket import handshake
+from mod_pywebsocket import msgutil
+from mod_pywebsocket import stream
+from mod_pywebsocket import util
+
+_SOURCE_PATH_PATTERN = re.compile(r'(?i)_wsh\.py$')
+_SOURCE_SUFFIX = '_wsh.py'
+_DO_EXTRA_HANDSHAKE_HANDLER_NAME = 'web_socket_do_extra_handshake'
+_TRANSFER_DATA_HANDLER_NAME = 'web_socket_transfer_data'
+_PASSIVE_CLOSING_HANDSHAKE_HANDLER_NAME = (
+ 'web_socket_passive_closing_handshake')
+
+
+class DispatchException(Exception):
+ """Exception in dispatching WebSocket request."""
+ def __init__(self, name, status=common.HTTP_STATUS_NOT_FOUND):
+ super(DispatchException, self).__init__(name)
+ self.status = status
+
+
+def _default_passive_closing_handshake_handler(request):
+ """Default web_socket_passive_closing_handshake handler."""
+
+ return common.STATUS_NORMAL_CLOSURE, ''
+
+
+def _normalize_path(path):
+ """Normalize path.
+
+ Args:
+ path: the path to normalize.
+
+ Path is converted to the absolute path.
+ The input path can use either '\\' or '/' as the separator.
+ The normalized path always uses '/' regardless of the platform.
+ """
+
+ path = path.replace('\\', os.path.sep)
+ path = os.path.realpath(path)
+ path = path.replace('\\', '/')
+ return path
+
+
+def _create_path_to_resource_converter(base_dir):
+ """Returns a function that converts the path of a WebSocket handler source
+ file to a resource string by removing the path to the base directory from
+ its head, removing _SOURCE_SUFFIX from its tail, and replacing path
+ separators in it with '/'.
+
+ Args:
+ base_dir: the path to the base directory.
+ """
+
+ base_dir = _normalize_path(base_dir)
+
+ base_len = len(base_dir)
+ suffix_len = len(_SOURCE_SUFFIX)
+
+ def converter(path):
+ if not path.endswith(_SOURCE_SUFFIX):
+ return None
+ # _normalize_path must not be used because resolving symlink breaks
+ # following path check.
+ path = path.replace('\\', '/')
+ if not path.startswith(base_dir):
+ return None
+ return path[base_len:-suffix_len]
+
+ return converter
+
+
+def _enumerate_handler_file_paths(directory):
+ """Returns a generator that enumerates WebSocket Handler source file names
+ in the given directory.
+ """
+
+ for root, unused_dirs, files in os.walk(directory):
+ for base in files:
+ path = os.path.join(root, base)
+ if _SOURCE_PATH_PATTERN.search(path):
+ yield path
+
+
+class _HandlerSuite(object):
+ """A handler suite holder class."""
+ def __init__(self, do_extra_handshake, transfer_data,
+ passive_closing_handshake):
+ self.do_extra_handshake = do_extra_handshake
+ self.transfer_data = transfer_data
+ self.passive_closing_handshake = passive_closing_handshake
+
+
+def _source_handler_file(handler_definition):
+ """Source a handler definition string.
+
+ Args:
+ handler_definition: a string containing Python statements that define
+ handler functions.
+ """
+
+ global_dic = {}
+ try:
+ # This statement is gramatically different in python 2 and 3.
+ # Hence, yapf will complain about this. To overcome this, we disable
+ # yapf for this line.
+ exec(handler_definition, global_dic) # yapf: disable
+ except Exception:
+ raise DispatchException('Error in sourcing handler:' +
+ traceback.format_exc())
+ passive_closing_handshake_handler = None
+ try:
+ passive_closing_handshake_handler = _extract_handler(
+ global_dic, _PASSIVE_CLOSING_HANDSHAKE_HANDLER_NAME)
+ except Exception:
+ passive_closing_handshake_handler = (
+ _default_passive_closing_handshake_handler)
+ return _HandlerSuite(
+ _extract_handler(global_dic, _DO_EXTRA_HANDSHAKE_HANDLER_NAME),
+ _extract_handler(global_dic, _TRANSFER_DATA_HANDLER_NAME),
+ passive_closing_handshake_handler)
+
+
+def _extract_handler(dic, name):
+ """Extracts a callable with the specified name from the given dictionary
+ dic.
+ """
+
+ if name not in dic:
+ raise DispatchException('%s is not defined.' % name)
+ handler = dic[name]
+ if not callable(handler):
+ raise DispatchException('%s is not callable.' % name)
+ return handler
+
+
+class Dispatcher(object):
+ """Dispatches WebSocket requests.
+
+ This class maintains a map from resource name to handlers.
+ """
+ def __init__(self,
+ root_dir,
+ scan_dir=None,
+ allow_handlers_outside_root_dir=True):
+ """Construct an instance.
+
+ Args:
+ root_dir: The directory where handler definition files are
+ placed.
+ scan_dir: The directory where handler definition files are
+ searched. scan_dir must be a directory under root_dir,
+ including root_dir itself. If scan_dir is None,
+ root_dir is used as scan_dir. scan_dir can be useful
+ in saving scan time when root_dir contains many
+ subdirectories.
+ allow_handlers_outside_root_dir: Scans handler files even if their
+ canonical path is not under root_dir.
+ """
+
+ self._logger = util.get_class_logger(self)
+
+ self._handler_suite_map = {}
+ self._source_warnings = []
+ if scan_dir is None:
+ scan_dir = root_dir
+ if not os.path.realpath(scan_dir).startswith(
+ os.path.realpath(root_dir)):
+ raise DispatchException('scan_dir:%s must be a directory under '
+ 'root_dir:%s.' % (scan_dir, root_dir))
+ self._source_handler_files_in_dir(root_dir, scan_dir,
+ allow_handlers_outside_root_dir)
+
+ def add_resource_path_alias(self, alias_resource_path,
+ existing_resource_path):
+ """Add resource path alias.
+
+ Once added, request to alias_resource_path would be handled by
+ handler registered for existing_resource_path.
+
+ Args:
+ alias_resource_path: alias resource path
+ existing_resource_path: existing resource path
+ """
+ try:
+ handler_suite = self._handler_suite_map[existing_resource_path]
+ self._handler_suite_map[alias_resource_path] = handler_suite
+ except KeyError:
+ raise DispatchException('No handler for: %r' %
+ existing_resource_path)
+
+ def source_warnings(self):
+ """Return warnings in sourcing handlers."""
+
+ return self._source_warnings
+
+ def do_extra_handshake(self, request):
+ """Do extra checking in WebSocket handshake.
+
+ Select a handler based on request.uri and call its
+ web_socket_do_extra_handshake function.
+
+ Args:
+ request: mod_python request.
+
+ Raises:
+ DispatchException: when handler was not found
+ AbortedByUserException: when user handler abort connection
+ HandshakeException: when opening handshake failed
+ """
+
+ handler_suite = self.get_handler_suite(request.ws_resource)
+ if handler_suite is None:
+ raise DispatchException('No handler for: %r' % request.ws_resource)
+ do_extra_handshake_ = handler_suite.do_extra_handshake
+ try:
+ do_extra_handshake_(request)
+ except handshake.AbortedByUserException as e:
+ # Re-raise to tell the caller of this function to finish this
+ # connection without sending any error.
+ self._logger.debug('%s', traceback.format_exc())
+ raise
+ except Exception as e:
+ util.prepend_message_to_exception(
+ '%s raised exception for %s: ' %
+ (_DO_EXTRA_HANDSHAKE_HANDLER_NAME, request.ws_resource), e)
+ raise handshake.HandshakeException(e, common.HTTP_STATUS_FORBIDDEN)
+
+ def transfer_data(self, request):
+ """Let a handler transfer_data with a WebSocket client.
+
+ Select a handler based on request.ws_resource and call its
+ web_socket_transfer_data function.
+
+ Args:
+ request: mod_python request.
+
+ Raises:
+ DispatchException: when handler was not found
+ AbortedByUserException: when user handler abort connection
+ """
+
+ # TODO(tyoshino): Terminate underlying TCP connection if possible.
+ try:
+ handler_suite = self.get_handler_suite(request.ws_resource)
+ if handler_suite is None:
+ raise DispatchException('No handler for: %r' %
+ request.ws_resource)
+ transfer_data_ = handler_suite.transfer_data
+ transfer_data_(request)
+
+ if not request.server_terminated:
+ request.ws_stream.close_connection()
+ # Catch non-critical exceptions the handler didn't handle.
+ except handshake.AbortedByUserException as e:
+ self._logger.debug('%s', traceback.format_exc())
+ raise
+ except msgutil.BadOperationException as e:
+ self._logger.debug('%s', e)
+ request.ws_stream.close_connection(
+ common.STATUS_INTERNAL_ENDPOINT_ERROR)
+ except msgutil.InvalidFrameException as e:
+ # InvalidFrameException must be caught before
+ # ConnectionTerminatedException that catches InvalidFrameException.
+ self._logger.debug('%s', e)
+ request.ws_stream.close_connection(common.STATUS_PROTOCOL_ERROR)
+ except msgutil.UnsupportedFrameException as e:
+ self._logger.debug('%s', e)
+ request.ws_stream.close_connection(common.STATUS_UNSUPPORTED_DATA)
+ except stream.InvalidUTF8Exception as e:
+ self._logger.debug('%s', e)
+ request.ws_stream.close_connection(
+ common.STATUS_INVALID_FRAME_PAYLOAD_DATA)
+ except msgutil.ConnectionTerminatedException as e:
+ self._logger.debug('%s', e)
+ except Exception as e:
+ # Any other exceptions are forwarded to the caller of this
+ # function.
+ util.prepend_message_to_exception(
+ '%s raised exception for %s: ' %
+ (_TRANSFER_DATA_HANDLER_NAME, request.ws_resource), e)
+ raise
+
+ def passive_closing_handshake(self, request):
+ """Prepare code and reason for responding client initiated closing
+ handshake.
+ """
+
+ handler_suite = self.get_handler_suite(request.ws_resource)
+ if handler_suite is None:
+ return _default_passive_closing_handshake_handler(request)
+ return handler_suite.passive_closing_handshake(request)
+
+ def get_handler_suite(self, resource):
+ """Retrieves two handlers (one for extra handshake processing, and one
+ for data transfer) for the given request as a HandlerSuite object.
+ """
+
+ fragment = None
+ if '#' in resource:
+ resource, fragment = resource.split('#', 1)
+ if '?' in resource:
+ resource = resource.split('?', 1)[0]
+ handler_suite = self._handler_suite_map.get(resource)
+ if handler_suite and fragment:
+ raise DispatchException(
+ 'Fragment identifiers MUST NOT be used on WebSocket URIs',
+ common.HTTP_STATUS_BAD_REQUEST)
+ return handler_suite
+
+ def _source_handler_files_in_dir(self, root_dir, scan_dir,
+ allow_handlers_outside_root_dir):
+ """Source all the handler source files in the scan_dir directory.
+
+ The resource path is determined relative to root_dir.
+ """
+
+ # We build a map from resource to handler code assuming that there's
+ # only one path from root_dir to scan_dir and it can be obtained by
+ # comparing realpath of them.
+
+ # Here we cannot use abspath. See
+ # https://bugs.webkit.org/show_bug.cgi?id=31603
+
+ convert = _create_path_to_resource_converter(root_dir)
+ scan_realpath = os.path.realpath(scan_dir)
+ root_realpath = os.path.realpath(root_dir)
+ for path in _enumerate_handler_file_paths(scan_realpath):
+ if (not allow_handlers_outside_root_dir and
+ (not os.path.realpath(path).startswith(root_realpath))):
+ self._logger.debug(
+ 'Canonical path of %s is not under root directory' % path)
+ continue
+ try:
+ with open(path) as handler_file:
+ handler_suite = _source_handler_file(handler_file.read())
+ except DispatchException as e:
+ self._source_warnings.append('%s: %s' % (path, e))
+ continue
+ resource = convert(path)
+ if resource is None:
+ self._logger.debug('Path to resource conversion on %s failed' %
+ path)
+ else:
+ self._handler_suite_map[convert(path)] = handler_suite
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/extensions.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/extensions.py
new file mode 100644
index 0000000000..314a949d45
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/extensions.py
@@ -0,0 +1,474 @@
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import
+from mod_pywebsocket import common
+from mod_pywebsocket import util
+from mod_pywebsocket.http_header_util import quote_if_necessary
+
+# The list of available server side extension processor classes.
+_available_processors = {}
+
+
+class ExtensionProcessorInterface(object):
+ def __init__(self, request):
+ self._logger = util.get_class_logger(self)
+
+ self._request = request
+ self._active = True
+
+ def request(self):
+ return self._request
+
+ def name(self):
+ return None
+
+ def check_consistency_with_other_processors(self, processors):
+ pass
+
+ def set_active(self, active):
+ self._active = active
+
+ def is_active(self):
+ return self._active
+
+ def _get_extension_response_internal(self):
+ return None
+
+ def get_extension_response(self):
+ if not self._active:
+ self._logger.debug('Extension %s is deactivated', self.name())
+ return None
+
+ response = self._get_extension_response_internal()
+ if response is None:
+ self._active = False
+ return response
+
+ def _setup_stream_options_internal(self, stream_options):
+ pass
+
+ def setup_stream_options(self, stream_options):
+ if self._active:
+ self._setup_stream_options_internal(stream_options)
+
+
+def _log_outgoing_compression_ratio(logger, original_bytes, filtered_bytes,
+ average_ratio):
+ # Print inf when ratio is not available.
+ ratio = float('inf')
+ if original_bytes != 0:
+ ratio = float(filtered_bytes) / original_bytes
+
+ logger.debug('Outgoing compression ratio: %f (average: %f)' %
+ (ratio, average_ratio))
+
+
+def _log_incoming_compression_ratio(logger, received_bytes, filtered_bytes,
+ average_ratio):
+ # Print inf when ratio is not available.
+ ratio = float('inf')
+ if filtered_bytes != 0:
+ ratio = float(received_bytes) / filtered_bytes
+
+ logger.debug('Incoming compression ratio: %f (average: %f)' %
+ (ratio, average_ratio))
+
+
+def _parse_window_bits(bits):
+ """Return parsed integer value iff the given string conforms to the
+ grammar of the window bits extension parameters.
+ """
+
+ if bits is None:
+ raise ValueError('Value is required')
+
+ # For non integer values such as "10.0", ValueError will be raised.
+ int_bits = int(bits)
+
+ # First condition is to drop leading zero case e.g. "08".
+ if bits != str(int_bits) or int_bits < 8 or int_bits > 15:
+ raise ValueError('Invalid value: %r' % bits)
+
+ return int_bits
+
+
+class _AverageRatioCalculator(object):
+ """Stores total bytes of original and result data, and calculates average
+ result / original ratio.
+ """
+ def __init__(self):
+ self._total_original_bytes = 0
+ self._total_result_bytes = 0
+
+ def add_original_bytes(self, value):
+ self._total_original_bytes += value
+
+ def add_result_bytes(self, value):
+ self._total_result_bytes += value
+
+ def get_average_ratio(self):
+ if self._total_original_bytes != 0:
+ return (float(self._total_result_bytes) /
+ self._total_original_bytes)
+ else:
+ return float('inf')
+
+
+class PerMessageDeflateExtensionProcessor(ExtensionProcessorInterface):
+ """permessage-deflate extension processor.
+
+ Specification:
+ http://tools.ietf.org/html/draft-ietf-hybi-permessage-compression-08
+ """
+
+ _SERVER_MAX_WINDOW_BITS_PARAM = 'server_max_window_bits'
+ _SERVER_NO_CONTEXT_TAKEOVER_PARAM = 'server_no_context_takeover'
+ _CLIENT_MAX_WINDOW_BITS_PARAM = 'client_max_window_bits'
+ _CLIENT_NO_CONTEXT_TAKEOVER_PARAM = 'client_no_context_takeover'
+
+ def __init__(self, request):
+ """Construct PerMessageDeflateExtensionProcessor."""
+
+ ExtensionProcessorInterface.__init__(self, request)
+ self._logger = util.get_class_logger(self)
+
+ self._preferred_client_max_window_bits = None
+ self._client_no_context_takeover = False
+
+ def name(self):
+ # This method returns "deflate" (not "permessage-deflate") for
+ # compatibility.
+ return 'deflate'
+
+ def _get_extension_response_internal(self):
+ for name in self._request.get_parameter_names():
+ if name not in [
+ self._SERVER_MAX_WINDOW_BITS_PARAM,
+ self._SERVER_NO_CONTEXT_TAKEOVER_PARAM,
+ self._CLIENT_MAX_WINDOW_BITS_PARAM
+ ]:
+ self._logger.debug('Unknown parameter: %r', name)
+ return None
+
+ server_max_window_bits = None
+ if self._request.has_parameter(self._SERVER_MAX_WINDOW_BITS_PARAM):
+ server_max_window_bits = self._request.get_parameter_value(
+ self._SERVER_MAX_WINDOW_BITS_PARAM)
+ try:
+ server_max_window_bits = _parse_window_bits(
+ server_max_window_bits)
+ except ValueError as e:
+ self._logger.debug('Bad %s parameter: %r',
+ self._SERVER_MAX_WINDOW_BITS_PARAM, e)
+ return None
+
+ server_no_context_takeover = self._request.has_parameter(
+ self._SERVER_NO_CONTEXT_TAKEOVER_PARAM)
+ if (server_no_context_takeover and self._request.get_parameter_value(
+ self._SERVER_NO_CONTEXT_TAKEOVER_PARAM) is not None):
+ self._logger.debug('%s parameter must not have a value: %r',
+ self._SERVER_NO_CONTEXT_TAKEOVER_PARAM,
+ server_no_context_takeover)
+ return None
+
+ # client_max_window_bits from a client indicates whether the client can
+ # accept client_max_window_bits from a server or not.
+ client_client_max_window_bits = self._request.has_parameter(
+ self._CLIENT_MAX_WINDOW_BITS_PARAM)
+ if (client_client_max_window_bits
+ and self._request.get_parameter_value(
+ self._CLIENT_MAX_WINDOW_BITS_PARAM) is not None):
+ self._logger.debug(
+ '%s parameter must not have a value in a '
+ 'client\'s opening handshake: %r',
+ self._CLIENT_MAX_WINDOW_BITS_PARAM,
+ client_client_max_window_bits)
+ return None
+
+ self._rfc1979_deflater = util._RFC1979Deflater(
+ server_max_window_bits, server_no_context_takeover)
+
+ # Note that we prepare for incoming messages compressed with window
+ # bits upto 15 regardless of the client_max_window_bits value to be
+ # sent to the client.
+ self._rfc1979_inflater = util._RFC1979Inflater()
+
+ self._framer = _PerMessageDeflateFramer(server_max_window_bits,
+ server_no_context_takeover)
+ self._framer.set_bfinal(False)
+ self._framer.set_compress_outgoing_enabled(True)
+
+ response = common.ExtensionParameter(self._request.name())
+
+ if server_max_window_bits is not None:
+ response.add_parameter(self._SERVER_MAX_WINDOW_BITS_PARAM,
+ str(server_max_window_bits))
+
+ if server_no_context_takeover:
+ response.add_parameter(self._SERVER_NO_CONTEXT_TAKEOVER_PARAM,
+ None)
+
+ if self._preferred_client_max_window_bits is not None:
+ if not client_client_max_window_bits:
+ self._logger.debug(
+ 'Processor is configured to use %s but '
+ 'the client cannot accept it',
+ self._CLIENT_MAX_WINDOW_BITS_PARAM)
+ return None
+ response.add_parameter(self._CLIENT_MAX_WINDOW_BITS_PARAM,
+ str(self._preferred_client_max_window_bits))
+
+ if self._client_no_context_takeover:
+ response.add_parameter(self._CLIENT_NO_CONTEXT_TAKEOVER_PARAM,
+ None)
+
+ self._logger.debug('Enable %s extension ('
+ 'request: server_max_window_bits=%s; '
+ 'server_no_context_takeover=%r, '
+ 'response: client_max_window_bits=%s; '
+ 'client_no_context_takeover=%r)' %
+ (self._request.name(), server_max_window_bits,
+ server_no_context_takeover,
+ self._preferred_client_max_window_bits,
+ self._client_no_context_takeover))
+
+ return response
+
+ def _setup_stream_options_internal(self, stream_options):
+ self._framer.setup_stream_options(stream_options)
+
+ def set_client_max_window_bits(self, value):
+ """If this option is specified, this class adds the
+ client_max_window_bits extension parameter to the handshake response,
+ but doesn't reduce the LZ77 sliding window size of its inflater.
+ I.e., you can use this for testing client implementation but cannot
+ reduce memory usage of this class.
+
+ If this method has been called with True and an offer without the
+ client_max_window_bits extension parameter is received,
+
+ - (When processing the permessage-deflate extension) this processor
+ declines the request.
+ - (When processing the permessage-compress extension) this processor
+ accepts the request.
+ """
+
+ self._preferred_client_max_window_bits = value
+
+ def set_client_no_context_takeover(self, value):
+ """If this option is specified, this class adds the
+ client_no_context_takeover extension parameter to the handshake
+ response, but doesn't reset inflater for each message. I.e., you can
+ use this for testing client implementation but cannot reduce memory
+ usage of this class.
+ """
+
+ self._client_no_context_takeover = value
+
+ def set_bfinal(self, value):
+ self._framer.set_bfinal(value)
+
+ def enable_outgoing_compression(self):
+ self._framer.set_compress_outgoing_enabled(True)
+
+ def disable_outgoing_compression(self):
+ self._framer.set_compress_outgoing_enabled(False)
+
+
+class _PerMessageDeflateFramer(object):
+ """A framer for extensions with per-message DEFLATE feature."""
+ def __init__(self, deflate_max_window_bits, deflate_no_context_takeover):
+ self._logger = util.get_class_logger(self)
+
+ self._rfc1979_deflater = util._RFC1979Deflater(
+ deflate_max_window_bits, deflate_no_context_takeover)
+
+ self._rfc1979_inflater = util._RFC1979Inflater()
+
+ self._bfinal = False
+
+ self._compress_outgoing_enabled = False
+
+ # True if a message is fragmented and compression is ongoing.
+ self._compress_ongoing = False
+
+ # Calculates
+ # (Total outgoing bytes supplied to this filter) /
+ # (Total bytes sent to the network after applying this filter)
+ self._outgoing_average_ratio_calculator = _AverageRatioCalculator()
+
+ # Calculates
+ # (Total bytes received from the network) /
+ # (Total incoming bytes obtained after applying this filter)
+ self._incoming_average_ratio_calculator = _AverageRatioCalculator()
+
+ def set_bfinal(self, value):
+ self._bfinal = value
+
+ def set_compress_outgoing_enabled(self, value):
+ self._compress_outgoing_enabled = value
+
+ def _process_incoming_message(self, message, decompress):
+ if not decompress:
+ return message
+
+ received_payload_size = len(message)
+ self._incoming_average_ratio_calculator.add_result_bytes(
+ received_payload_size)
+
+ message = self._rfc1979_inflater.filter(message)
+
+ filtered_payload_size = len(message)
+ self._incoming_average_ratio_calculator.add_original_bytes(
+ filtered_payload_size)
+
+ _log_incoming_compression_ratio(
+ self._logger, received_payload_size, filtered_payload_size,
+ self._incoming_average_ratio_calculator.get_average_ratio())
+
+ return message
+
+ def _process_outgoing_message(self, message, end, binary):
+ if not binary:
+ message = message.encode('utf-8')
+
+ if not self._compress_outgoing_enabled:
+ return message
+
+ original_payload_size = len(message)
+ self._outgoing_average_ratio_calculator.add_original_bytes(
+ original_payload_size)
+
+ message = self._rfc1979_deflater.filter(message,
+ end=end,
+ bfinal=self._bfinal)
+
+ filtered_payload_size = len(message)
+ self._outgoing_average_ratio_calculator.add_result_bytes(
+ filtered_payload_size)
+
+ _log_outgoing_compression_ratio(
+ self._logger, original_payload_size, filtered_payload_size,
+ self._outgoing_average_ratio_calculator.get_average_ratio())
+
+ if not self._compress_ongoing:
+ self._outgoing_frame_filter.set_compression_bit()
+ self._compress_ongoing = not end
+ return message
+
+ def _process_incoming_frame(self, frame):
+ if frame.rsv1 == 1 and not common.is_control_opcode(frame.opcode):
+ self._incoming_message_filter.decompress_next_message()
+ frame.rsv1 = 0
+
+ def _process_outgoing_frame(self, frame, compression_bit):
+ if (not compression_bit or common.is_control_opcode(frame.opcode)):
+ return
+
+ frame.rsv1 = 1
+
+ def setup_stream_options(self, stream_options):
+ """Creates filters and sets them to the StreamOptions."""
+ class _OutgoingMessageFilter(object):
+ def __init__(self, parent):
+ self._parent = parent
+
+ def filter(self, message, end=True, binary=False):
+ return self._parent._process_outgoing_message(
+ message, end, binary)
+
+ class _IncomingMessageFilter(object):
+ def __init__(self, parent):
+ self._parent = parent
+ self._decompress_next_message = False
+
+ def decompress_next_message(self):
+ self._decompress_next_message = True
+
+ def filter(self, message):
+ message = self._parent._process_incoming_message(
+ message, self._decompress_next_message)
+ self._decompress_next_message = False
+ return message
+
+ self._outgoing_message_filter = _OutgoingMessageFilter(self)
+ self._incoming_message_filter = _IncomingMessageFilter(self)
+ stream_options.outgoing_message_filters.append(
+ self._outgoing_message_filter)
+ stream_options.incoming_message_filters.append(
+ self._incoming_message_filter)
+
+ class _OutgoingFrameFilter(object):
+ def __init__(self, parent):
+ self._parent = parent
+ self._set_compression_bit = False
+
+ def set_compression_bit(self):
+ self._set_compression_bit = True
+
+ def filter(self, frame):
+ self._parent._process_outgoing_frame(frame,
+ self._set_compression_bit)
+ self._set_compression_bit = False
+
+ class _IncomingFrameFilter(object):
+ def __init__(self, parent):
+ self._parent = parent
+
+ def filter(self, frame):
+ self._parent._process_incoming_frame(frame)
+
+ self._outgoing_frame_filter = _OutgoingFrameFilter(self)
+ self._incoming_frame_filter = _IncomingFrameFilter(self)
+ stream_options.outgoing_frame_filters.append(
+ self._outgoing_frame_filter)
+ stream_options.incoming_frame_filters.append(
+ self._incoming_frame_filter)
+
+ stream_options.encode_text_message_to_utf8 = False
+
+
+_available_processors[common.PERMESSAGE_DEFLATE_EXTENSION] = (
+ PerMessageDeflateExtensionProcessor)
+
+
+def get_extension_processor(extension_request):
+ """Given an ExtensionParameter representing an extension offer received
+ from a client, configures and returns an instance of the corresponding
+ extension processor class.
+ """
+
+ processor_class = _available_processors.get(extension_request.name())
+ if processor_class is None:
+ return None
+ return processor_class(extension_request)
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/fast_masking.i b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/fast_masking.i
new file mode 100644
index 0000000000..ddaad27f53
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/fast_masking.i
@@ -0,0 +1,98 @@
+// Copyright 2013, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+%module fast_masking
+
+%include "cstring.i"
+
+%{
+#include <cstring>
+
+#ifdef __SSE2__
+#include <emmintrin.h>
+#endif
+%}
+
+%apply (char *STRING, int LENGTH) {
+ (const char* payload, int payload_length),
+ (const char* masking_key, int masking_key_length) };
+%cstring_output_allocate_size(
+ char** result, int* result_length, delete [] *$1);
+
+%inline %{
+
+void mask(
+ const char* payload, int payload_length,
+ const char* masking_key, int masking_key_length,
+ int masking_key_index,
+ char** result, int* result_length) {
+ *result = new char[payload_length];
+ *result_length = payload_length;
+ memcpy(*result, payload, payload_length);
+
+ char* cursor = *result;
+ char* cursor_end = *result + *result_length;
+
+#ifdef __SSE2__
+ while ((cursor < cursor_end) &&
+ (reinterpret_cast<size_t>(cursor) & 0xf)) {
+ *cursor ^= masking_key[masking_key_index];
+ ++cursor;
+ masking_key_index = (masking_key_index + 1) % masking_key_length;
+ }
+ if (cursor == cursor_end) {
+ return;
+ }
+
+ const int kBlockSize = 16;
+ __m128i masking_key_block;
+ for (int i = 0; i < kBlockSize; ++i) {
+ *(reinterpret_cast<char*>(&masking_key_block) + i) =
+ masking_key[masking_key_index];
+ masking_key_index = (masking_key_index + 1) % masking_key_length;
+ }
+
+ while (cursor + kBlockSize <= cursor_end) {
+ __m128i payload_block =
+ _mm_load_si128(reinterpret_cast<__m128i*>(cursor));
+ _mm_stream_si128(reinterpret_cast<__m128i*>(cursor),
+ _mm_xor_si128(payload_block, masking_key_block));
+ cursor += kBlockSize;
+ }
+#endif
+
+ while (cursor < cursor_end) {
+ *cursor ^= masking_key[masking_key_index];
+ ++cursor;
+ masking_key_index = (masking_key_index + 1) % masking_key_length;
+ }
+}
+
+%}
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/__init__.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/__init__.py
new file mode 100644
index 0000000000..4bc1c67c57
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/__init__.py
@@ -0,0 +1,101 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""WebSocket opening handshake processor. This class try to apply available
+opening handshake processors for each protocol version until a connection is
+successfully established.
+"""
+
+from __future__ import absolute_import
+import logging
+
+from mod_pywebsocket import common
+from mod_pywebsocket.handshake import hybi
+# Export AbortedByUserException, HandshakeException, and VersionException
+# symbol from this module.
+from mod_pywebsocket.handshake.base import AbortedByUserException
+from mod_pywebsocket.handshake.base import HandshakeException
+from mod_pywebsocket.handshake.base import VersionException
+
+_LOGGER = logging.getLogger(__name__)
+
+
+def do_handshake(request, dispatcher):
+ """Performs WebSocket handshake.
+
+ Args:
+ request: mod_python request.
+ dispatcher: Dispatcher (dispatch.Dispatcher).
+
+ Handshaker will add attributes such as ws_resource in performing
+ handshake.
+ """
+
+ _LOGGER.debug('Client\'s opening handshake resource: %r', request.uri)
+ # To print mimetools.Message as escaped one-line string, we converts
+ # headers_in to dict object. Without conversion, if we use %r, it just
+ # prints the type and address, and if we use %s, it prints the original
+ # header string as multiple lines.
+ #
+ # Both mimetools.Message and MpTable_Type of mod_python can be
+ # converted to dict.
+ #
+ # mimetools.Message.__str__ returns the original header string.
+ # dict(mimetools.Message object) returns the map from header names to
+ # header values. While MpTable_Type doesn't have such __str__ but just
+ # __repr__ which formats itself as well as dictionary object.
+ _LOGGER.debug('Client\'s opening handshake headers: %r',
+ dict(request.headers_in))
+
+ handshakers = []
+ handshakers.append(('RFC 6455', hybi.Handshaker(request, dispatcher)))
+
+ for name, handshaker in handshakers:
+ _LOGGER.debug('Trying protocol version %s', name)
+ try:
+ handshaker.do_handshake()
+ _LOGGER.info('Established (%s protocol)', name)
+ return
+ except HandshakeException as e:
+ _LOGGER.debug(
+ 'Failed to complete opening handshake as %s protocol: %r',
+ name, e)
+ if e.status:
+ raise e
+ except AbortedByUserException as e:
+ raise
+ except VersionException as e:
+ raise
+
+ # TODO(toyoshim): Add a test to cover the case all handshakers fail.
+ raise HandshakeException(
+ 'Failed to complete opening handshake for all available protocols',
+ status=common.HTTP_STATUS_BAD_REQUEST)
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/base.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/base.py
new file mode 100644
index 0000000000..ffad0614d6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/base.py
@@ -0,0 +1,396 @@
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Common functions and exceptions used by WebSocket opening handshake
+processors.
+"""
+
+from __future__ import absolute_import
+
+from mod_pywebsocket import common
+from mod_pywebsocket import http_header_util
+from mod_pywebsocket.extensions import get_extension_processor
+from mod_pywebsocket.stream import StreamOptions
+from mod_pywebsocket.stream import Stream
+from mod_pywebsocket import util
+
+from six.moves import map
+from six.moves import range
+
+# Defining aliases for values used frequently.
+_VERSION_LATEST = common.VERSION_HYBI_LATEST
+_VERSION_LATEST_STRING = str(_VERSION_LATEST)
+_SUPPORTED_VERSIONS = [
+ _VERSION_LATEST,
+]
+
+
+class AbortedByUserException(Exception):
+ """Exception for aborting a connection intentionally.
+
+ If this exception is raised in do_extra_handshake handler, the connection
+ will be abandoned. No other WebSocket or HTTP(S) handler will be invoked.
+
+ If this exception is raised in transfer_data_handler, the connection will
+ be closed without closing handshake. No other WebSocket or HTTP(S) handler
+ will be invoked.
+ """
+
+ pass
+
+
+class HandshakeException(Exception):
+ """This exception will be raised when an error occurred while processing
+ WebSocket initial handshake.
+ """
+ def __init__(self, name, status=None):
+ super(HandshakeException, self).__init__(name)
+ self.status = status
+
+
+class VersionException(Exception):
+ """This exception will be raised when a version of client request does not
+ match with version the server supports.
+ """
+ def __init__(self, name, supported_versions=''):
+ """Construct an instance.
+
+ Args:
+ supported_version: a str object to show supported hybi versions.
+ (e.g. '13')
+ """
+ super(VersionException, self).__init__(name)
+ self.supported_versions = supported_versions
+
+
+def get_default_port(is_secure):
+ if is_secure:
+ return common.DEFAULT_WEB_SOCKET_SECURE_PORT
+ else:
+ return common.DEFAULT_WEB_SOCKET_PORT
+
+
+def validate_subprotocol(subprotocol):
+ """Validate a value in the Sec-WebSocket-Protocol field.
+
+ See the Section 4.1., 4.2.2., and 4.3. of RFC 6455.
+ """
+
+ if not subprotocol:
+ raise HandshakeException('Invalid subprotocol name: empty')
+
+ # Parameter should be encoded HTTP token.
+ state = http_header_util.ParsingState(subprotocol)
+ token = http_header_util.consume_token(state)
+ rest = http_header_util.peek(state)
+ # If |rest| is not None, |subprotocol| is not one token or invalid. If
+ # |rest| is None, |token| must not be None because |subprotocol| is
+ # concatenation of |token| and |rest| and is not None.
+ if rest is not None:
+ raise HandshakeException('Invalid non-token string in subprotocol '
+ 'name: %r' % rest)
+
+
+def parse_host_header(request):
+ fields = request.headers_in[common.HOST_HEADER].split(':', 1)
+ if len(fields) == 1:
+ return fields[0], get_default_port(request.is_https())
+ try:
+ return fields[0], int(fields[1])
+ except ValueError as e:
+ raise HandshakeException('Invalid port number format: %r' % e)
+
+
+def get_mandatory_header(request, key):
+ value = request.headers_in.get(key)
+ if value is None:
+ raise HandshakeException('Header %s is not defined' % key)
+ return value
+
+
+def validate_mandatory_header(request, key, expected_value, fail_status=None):
+ value = get_mandatory_header(request, key)
+
+ if value.lower() != expected_value.lower():
+ raise HandshakeException(
+ 'Expected %r for header %s but found %r (case-insensitive)' %
+ (expected_value, key, value),
+ status=fail_status)
+
+
+def parse_token_list(data):
+ """Parses a header value which follows 1#token and returns parsed elements
+ as a list of strings.
+
+ Leading LWSes must be trimmed.
+ """
+
+ state = http_header_util.ParsingState(data)
+
+ token_list = []
+
+ while True:
+ token = http_header_util.consume_token(state)
+ if token is not None:
+ token_list.append(token)
+
+ http_header_util.consume_lwses(state)
+
+ if http_header_util.peek(state) is None:
+ break
+
+ if not http_header_util.consume_string(state, ','):
+ raise HandshakeException('Expected a comma but found %r' %
+ http_header_util.peek(state))
+
+ http_header_util.consume_lwses(state)
+
+ if len(token_list) == 0:
+ raise HandshakeException('No valid token found')
+
+ return token_list
+
+
+class HandshakerBase(object):
+ def __init__(self, request, dispatcher):
+ self._logger = util.get_class_logger(self)
+ self._request = request
+ self._dispatcher = dispatcher
+
+ """ subclasses must implement the five following methods """
+
+ def _protocol_rfc(self):
+ """ Return the name of the RFC that the handshake class is implementing.
+ """
+
+ raise AssertionError("subclasses should implement this method")
+
+ def _transform_header(self, header):
+ """
+ :param header: header name
+
+ transform the header name if needed. For example, HTTP/2 subclass will
+ return the name of the header in lower case.
+ """
+
+ raise AssertionError("subclasses should implement this method")
+
+ def _validate_request(self):
+ """ validate that all the mandatory fields are set """
+
+ raise AssertionError("subclasses should implement this method")
+
+ def _set_accept(self):
+ """ Computes accept value based on Sec-WebSocket-Accept if needed. """
+
+ raise AssertionError("subclasses should implement this method")
+
+ def _send_handshake(self):
+ """ Prepare and send the response after it has been parsed and processed.
+ """
+
+ raise AssertionError("subclasses should implement this method")
+
+ def do_handshake(self):
+ self._request.ws_close_code = None
+ self._request.ws_close_reason = None
+
+ # Parsing.
+ self._validate_request()
+ self._request.ws_resource = self._request.uri
+ self._request.ws_version = self._check_version()
+
+ try:
+ self._get_origin()
+ self._set_protocol()
+ self._parse_extensions()
+
+ self._set_accept()
+
+ self._logger.debug('Protocol version is ' + self._protocol_rfc())
+
+ # Setup extension processors.
+ self._request.ws_extension_processors = self._get_extension_processors_requested(
+ )
+
+ # List of extra headers. The extra handshake handler may add header
+ # data as name/value pairs to this list and pywebsocket appends
+ # them to the WebSocket handshake.
+ self._request.extra_headers = []
+
+ # Extra handshake handler may modify/remove processors.
+ self._dispatcher.do_extra_handshake(self._request)
+
+ stream_options = StreamOptions()
+ self._process_extensions(stream_options)
+
+ self._request.ws_stream = Stream(self._request, stream_options)
+
+ if self._request.ws_requested_protocols is not None:
+ if self._request.ws_protocol is None:
+ raise HandshakeException(
+ 'do_extra_handshake must choose one subprotocol from '
+ 'ws_requested_protocols and set it to ws_protocol')
+ validate_subprotocol(self._request.ws_protocol)
+
+ self._logger.debug('Subprotocol accepted: %r',
+ self._request.ws_protocol)
+ else:
+ if self._request.ws_protocol is not None:
+ raise HandshakeException(
+ 'ws_protocol must be None when the client didn\'t '
+ 'request any subprotocol')
+
+ self._send_handshake()
+ except HandshakeException as e:
+ if not e.status:
+ # Fallback to 400 bad request by default.
+ e.status = common.HTTP_STATUS_BAD_REQUEST
+ raise e
+
+ def _check_version(self):
+ sec_websocket_version_header = self._transform_header(
+ common.SEC_WEBSOCKET_VERSION_HEADER)
+ version = get_mandatory_header(self._request,
+ sec_websocket_version_header)
+ if version == _VERSION_LATEST_STRING:
+ return _VERSION_LATEST
+
+ if version.find(',') >= 0:
+ raise HandshakeException(
+ 'Multiple versions (%r) are not allowed for header %s' %
+ (version, sec_websocket_version_header),
+ status=common.HTTP_STATUS_BAD_REQUEST)
+ raise VersionException('Unsupported version %r for header %s' %
+ (version, sec_websocket_version_header),
+ supported_versions=', '.join(
+ map(str, _SUPPORTED_VERSIONS)))
+
+ def _get_origin(self):
+ origin_header = self._transform_header(common.ORIGIN_HEADER)
+ origin = self._request.headers_in.get(origin_header)
+ if origin is None:
+ self._logger.debug('Client request does not have origin header')
+ self._request.ws_origin = origin
+
+ def _set_protocol(self):
+ self._request.ws_protocol = None
+
+ sec_websocket_protocol_header = self._transform_header(
+ common.SEC_WEBSOCKET_PROTOCOL_HEADER)
+ protocol_header = self._request.headers_in.get(
+ sec_websocket_protocol_header)
+
+ if protocol_header is None:
+ self._request.ws_requested_protocols = None
+ return
+
+ self._request.ws_requested_protocols = parse_token_list(
+ protocol_header)
+ self._logger.debug('Subprotocols requested: %r',
+ self._request.ws_requested_protocols)
+
+ def _parse_extensions(self):
+ sec_websocket_extensions_header = self._transform_header(
+ common.SEC_WEBSOCKET_EXTENSIONS_HEADER)
+ extensions_header = self._request.headers_in.get(
+ sec_websocket_extensions_header)
+ if not extensions_header:
+ self._request.ws_requested_extensions = None
+ return
+
+ try:
+ self._request.ws_requested_extensions = common.parse_extensions(
+ extensions_header)
+ except common.ExtensionParsingException as e:
+ raise HandshakeException(
+ 'Failed to parse sec-websocket-extensions header: %r' % e)
+
+ self._logger.debug(
+ 'Extensions requested: %r',
+ list(
+ map(common.ExtensionParameter.name,
+ self._request.ws_requested_extensions)))
+
+ def _get_extension_processors_requested(self):
+ processors = []
+ if self._request.ws_requested_extensions is not None:
+ for extension_request in self._request.ws_requested_extensions:
+ processor = get_extension_processor(extension_request)
+ # Unknown extension requests are just ignored.
+ if processor is not None:
+ processors.append(processor)
+ return processors
+
+ def _process_extensions(self, stream_options):
+ processors = [
+ processor for processor in self._request.ws_extension_processors
+ if processor is not None
+ ]
+
+ # Ask each processor if there are extensions on the request which
+ # cannot co-exist. When processor decided other processors cannot
+ # co-exist with it, the processor marks them (or itself) as
+ # "inactive". The first extension processor has the right to
+ # make the final call.
+ for processor in reversed(processors):
+ if processor.is_active():
+ processor.check_consistency_with_other_processors(processors)
+ processors = [
+ processor for processor in processors if processor.is_active()
+ ]
+
+ accepted_extensions = []
+
+ for index, processor in enumerate(processors):
+ if not processor.is_active():
+ continue
+
+ extension_response = processor.get_extension_response()
+ if extension_response is None:
+ # Rejected.
+ continue
+
+ accepted_extensions.append(extension_response)
+
+ processor.setup_stream_options(stream_options)
+
+ # Inactivate all of the following compression extensions.
+ for j in range(index + 1, len(processors)):
+ processors[j].set_active(False)
+
+ if len(accepted_extensions) > 0:
+ self._request.ws_extensions = accepted_extensions
+ self._logger.debug(
+ 'Extensions accepted: %r',
+ list(map(common.ExtensionParameter.name, accepted_extensions)))
+ else:
+ self._request.ws_extensions = None
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/hybi.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/hybi.py
new file mode 100644
index 0000000000..cf931db5a5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/handshake/hybi.py
@@ -0,0 +1,223 @@
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""This file provides the opening handshake processor for the WebSocket
+protocol (RFC 6455).
+
+Specification:
+http://tools.ietf.org/html/rfc6455
+"""
+
+from __future__ import absolute_import
+import base64
+import re
+from hashlib import sha1
+
+from mod_pywebsocket import common
+from mod_pywebsocket.handshake.base import get_mandatory_header
+from mod_pywebsocket.handshake.base import HandshakeException
+from mod_pywebsocket.handshake.base import parse_token_list
+from mod_pywebsocket.handshake.base import validate_mandatory_header
+from mod_pywebsocket.handshake.base import HandshakerBase
+from mod_pywebsocket import util
+
+# Used to validate the value in the Sec-WebSocket-Key header strictly. RFC 4648
+# disallows non-zero padding, so the character right before == must be any of
+# A, Q, g and w.
+_SEC_WEBSOCKET_KEY_REGEX = re.compile('^[+/0-9A-Za-z]{21}[AQgw]==$')
+
+
+def check_request_line(request):
+ # 5.1 1. The three character UTF-8 string "GET".
+ # 5.1 2. A UTF-8-encoded U+0020 SPACE character (0x20 byte).
+ if request.method != u'GET':
+ raise HandshakeException('Method is not GET: %r' % request.method)
+
+ if request.protocol != u'HTTP/1.1':
+ raise HandshakeException('Version is not HTTP/1.1: %r' %
+ request.protocol)
+
+
+def compute_accept(key):
+ """Computes value for the Sec-WebSocket-Accept header from value of the
+ Sec-WebSocket-Key header.
+ """
+
+ accept_binary = sha1(key + common.WEBSOCKET_ACCEPT_UUID).digest()
+ accept = base64.b64encode(accept_binary)
+
+ return accept
+
+
+def compute_accept_from_unicode(unicode_key):
+ """A wrapper function for compute_accept which takes a unicode string as an
+ argument, and encodes it to byte string. It then passes it on to
+ compute_accept.
+ """
+
+ key = unicode_key.encode('UTF-8')
+ return compute_accept(key)
+
+
+def format_header(name, value):
+ return u'%s: %s\r\n' % (name, value)
+
+
+class Handshaker(HandshakerBase):
+ """Opening handshake processor for the WebSocket protocol (RFC 6455)."""
+ def __init__(self, request, dispatcher):
+ """Construct an instance.
+
+ Args:
+ request: mod_python request.
+ dispatcher: Dispatcher (dispatch.Dispatcher).
+
+ Handshaker will add attributes such as ws_resource during handshake.
+ """
+ super(Handshaker, self).__init__(request, dispatcher)
+
+ def _transform_header(self, header):
+ return header
+
+ def _protocol_rfc(self):
+ return 'RFC 6455'
+
+ def _validate_connection_header(self):
+ connection = get_mandatory_header(self._request,
+ common.CONNECTION_HEADER)
+
+ try:
+ connection_tokens = parse_token_list(connection)
+ except HandshakeException as e:
+ raise HandshakeException('Failed to parse %s: %s' %
+ (common.CONNECTION_HEADER, e))
+
+ connection_is_valid = False
+ for token in connection_tokens:
+ if token.lower() == common.UPGRADE_CONNECTION_TYPE.lower():
+ connection_is_valid = True
+ break
+ if not connection_is_valid:
+ raise HandshakeException(
+ '%s header doesn\'t contain "%s"' %
+ (common.CONNECTION_HEADER, common.UPGRADE_CONNECTION_TYPE))
+
+ def _validate_request(self):
+ check_request_line(self._request)
+ validate_mandatory_header(self._request, common.UPGRADE_HEADER,
+ common.WEBSOCKET_UPGRADE_TYPE)
+ self._validate_connection_header()
+ unused_host = get_mandatory_header(self._request, common.HOST_HEADER)
+
+ def _set_accept(self):
+ # Key validation, response generation.
+ key = self._get_key()
+ accept = compute_accept(key)
+ self._logger.debug('%s: %r (%s)', common.SEC_WEBSOCKET_ACCEPT_HEADER,
+ accept, util.hexify(base64.b64decode(accept)))
+ self._request._accept = accept
+
+ def _validate_key(self, key):
+ if key.find(',') >= 0:
+ raise HandshakeException('Request has multiple %s header lines or '
+ 'contains illegal character \',\': %r' %
+ (common.SEC_WEBSOCKET_KEY_HEADER, key))
+
+ # Validate
+ key_is_valid = False
+ try:
+ # Validate key by quick regex match before parsing by base64
+ # module. Because base64 module skips invalid characters, we have
+ # to do this in advance to make this server strictly reject illegal
+ # keys.
+ if _SEC_WEBSOCKET_KEY_REGEX.match(key):
+ decoded_key = base64.b64decode(key)
+ if len(decoded_key) == 16:
+ key_is_valid = True
+ except TypeError as e:
+ pass
+
+ if not key_is_valid:
+ raise HandshakeException('Illegal value for header %s: %r' %
+ (common.SEC_WEBSOCKET_KEY_HEADER, key))
+
+ return decoded_key
+
+ def _get_key(self):
+ key = get_mandatory_header(self._request,
+ common.SEC_WEBSOCKET_KEY_HEADER)
+
+ decoded_key = self._validate_key(key)
+
+ self._logger.debug('%s: %r (%s)', common.SEC_WEBSOCKET_KEY_HEADER, key,
+ util.hexify(decoded_key))
+
+ return key.encode('UTF-8')
+
+ def _create_handshake_response(self, accept):
+ response = []
+
+ response.append(u'HTTP/1.1 101 Switching Protocols\r\n')
+
+ # WebSocket headers
+ response.append(
+ format_header(common.UPGRADE_HEADER,
+ common.WEBSOCKET_UPGRADE_TYPE))
+ response.append(
+ format_header(common.CONNECTION_HEADER,
+ common.UPGRADE_CONNECTION_TYPE))
+ response.append(
+ format_header(common.SEC_WEBSOCKET_ACCEPT_HEADER,
+ accept.decode('UTF-8')))
+ if self._request.ws_protocol is not None:
+ response.append(
+ format_header(common.SEC_WEBSOCKET_PROTOCOL_HEADER,
+ self._request.ws_protocol))
+ if (self._request.ws_extensions is not None
+ and len(self._request.ws_extensions) != 0):
+ response.append(
+ format_header(
+ common.SEC_WEBSOCKET_EXTENSIONS_HEADER,
+ common.format_extensions(self._request.ws_extensions)))
+
+ # Headers not specific for WebSocket
+ for name, value in self._request.extra_headers:
+ response.append(format_header(name, value))
+
+ response.append(u'\r\n')
+
+ return u''.join(response)
+
+ def _send_handshake(self):
+ raw_response = self._create_handshake_response(self._request._accept)
+ self._request.connection.write(raw_response.encode('UTF-8'))
+ self._logger.debug('Sent server\'s opening handshake: %r',
+ raw_response)
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/http_header_util.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/http_header_util.py
new file mode 100644
index 0000000000..21fde59af1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/http_header_util.py
@@ -0,0 +1,254 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Utilities for parsing and formatting headers that follow the grammar defined
+in HTTP RFC http://www.ietf.org/rfc/rfc2616.txt.
+"""
+
+from __future__ import absolute_import
+import six.moves.urllib.parse
+
+_SEPARATORS = '()<>@,;:\\"/[]?={} \t'
+
+
+def _is_char(c):
+ """Returns true iff c is in CHAR as specified in HTTP RFC."""
+
+ return ord(c) <= 127
+
+
+def _is_ctl(c):
+ """Returns true iff c is in CTL as specified in HTTP RFC."""
+
+ return ord(c) <= 31 or ord(c) == 127
+
+
+class ParsingState(object):
+ def __init__(self, data):
+ self.data = data
+ self.head = 0
+
+
+def peek(state, pos=0):
+ """Peeks the character at pos from the head of data."""
+
+ if state.head + pos >= len(state.data):
+ return None
+
+ return state.data[state.head + pos]
+
+
+def consume(state, amount=1):
+ """Consumes specified amount of bytes from the head and returns the
+ consumed bytes. If there's not enough bytes to consume, returns None.
+ """
+
+ if state.head + amount > len(state.data):
+ return None
+
+ result = state.data[state.head:state.head + amount]
+ state.head = state.head + amount
+ return result
+
+
+def consume_string(state, expected):
+ """Given a parsing state and a expected string, consumes the string from
+ the head. Returns True if consumed successfully. Otherwise, returns
+ False.
+ """
+
+ pos = 0
+
+ for c in expected:
+ if c != peek(state, pos):
+ return False
+ pos += 1
+
+ consume(state, pos)
+ return True
+
+
+def consume_lws(state):
+ """Consumes a LWS from the head. Returns True if any LWS is consumed.
+ Otherwise, returns False.
+
+ LWS = [CRLF] 1*( SP | HT )
+ """
+
+ original_head = state.head
+
+ consume_string(state, '\r\n')
+
+ pos = 0
+
+ while True:
+ c = peek(state, pos)
+ if c == ' ' or c == '\t':
+ pos += 1
+ else:
+ if pos == 0:
+ state.head = original_head
+ return False
+ else:
+ consume(state, pos)
+ return True
+
+
+def consume_lwses(state):
+ r"""Consumes \*LWS from the head."""
+
+ while consume_lws(state):
+ pass
+
+
+def consume_token(state):
+ """Consumes a token from the head. Returns the token or None if no token
+ was found.
+ """
+
+ pos = 0
+
+ while True:
+ c = peek(state, pos)
+ if c is None or c in _SEPARATORS or _is_ctl(c) or not _is_char(c):
+ if pos == 0:
+ return None
+
+ return consume(state, pos)
+ else:
+ pos += 1
+
+
+def consume_token_or_quoted_string(state):
+ """Consumes a token or a quoted-string, and returns the token or unquoted
+ string. If no token or quoted-string was found, returns None.
+ """
+
+ original_head = state.head
+
+ if not consume_string(state, '"'):
+ return consume_token(state)
+
+ result = []
+
+ expect_quoted_pair = False
+
+ while True:
+ if not expect_quoted_pair and consume_lws(state):
+ result.append(' ')
+ continue
+
+ c = consume(state)
+ if c is None:
+ # quoted-string is not enclosed with double quotation
+ state.head = original_head
+ return None
+ elif expect_quoted_pair:
+ expect_quoted_pair = False
+ if _is_char(c):
+ result.append(c)
+ else:
+ # Non CHAR character found in quoted-pair
+ state.head = original_head
+ return None
+ elif c == '\\':
+ expect_quoted_pair = True
+ elif c == '"':
+ return ''.join(result)
+ elif _is_ctl(c):
+ # Invalid character %r found in qdtext
+ state.head = original_head
+ return None
+ else:
+ result.append(c)
+
+
+def quote_if_necessary(s):
+ """Quotes arbitrary string into quoted-string."""
+
+ quote = False
+ if s == '':
+ return '""'
+
+ result = []
+ for c in s:
+ if c == '"' or c in _SEPARATORS or _is_ctl(c) or not _is_char(c):
+ quote = True
+
+ if c == '"' or _is_ctl(c):
+ result.append('\\' + c)
+ else:
+ result.append(c)
+
+ if quote:
+ return '"' + ''.join(result) + '"'
+ else:
+ return ''.join(result)
+
+
+def parse_uri(uri):
+ """Parse absolute URI then return host, port and resource."""
+
+ parsed = six.moves.urllib.parse.urlsplit(uri)
+ if parsed.scheme != 'wss' and parsed.scheme != 'ws':
+ # |uri| must be a relative URI.
+ # TODO(toyoshim): Should validate |uri|.
+ return None, None, uri
+
+ if parsed.hostname is None:
+ return None, None, None
+
+ port = None
+ try:
+ port = parsed.port
+ except ValueError:
+ # The port property cause ValueError on invalid null port descriptions
+ # like 'ws://host:INVALID_PORT/path', where the assigned port is not
+ # *DIGIT. For python 3.6 and later, ValueError also raises when
+ # assigning invalid port numbers such as 'ws://host:-1/path'. Earlier
+ # versions simply return None and ignore invalid port attributes.
+ return None, None, None
+
+ if port is None:
+ if parsed.scheme == 'ws':
+ port = 80
+ else:
+ port = 443
+
+ path = parsed.path
+ if not path:
+ path += '/'
+ if parsed.query:
+ path += '?' + parsed.query
+ if parsed.fragment:
+ path += '#' + parsed.fragment
+
+ return parsed.hostname, port, path
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/memorizingfile.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/memorizingfile.py
new file mode 100644
index 0000000000..d353967618
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/memorizingfile.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+#
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Memorizing file.
+
+A memorizing file wraps a file and memorizes lines read by readline.
+"""
+
+from __future__ import absolute_import
+import sys
+
+
+class MemorizingFile(object):
+ """MemorizingFile wraps a file and memorizes lines read by readline.
+
+ Note that data read by other methods are not memorized. This behavior
+ is good enough for memorizing lines SimpleHTTPServer reads before
+ the control reaches WebSocketRequestHandler.
+ """
+ def __init__(self, file_, max_memorized_lines=sys.maxsize):
+ """Construct an instance.
+
+ Args:
+ file_: the file object to wrap.
+ max_memorized_lines: the maximum number of lines to memorize.
+ Only the first max_memorized_lines are memorized.
+ Default: sys.maxint.
+ """
+ self._file = file_
+ self._memorized_lines = []
+ self._max_memorized_lines = max_memorized_lines
+ self._buffered = False
+ self._buffered_line = None
+
+ def __getattribute__(self, name):
+ """Return a file attribute.
+
+ Returns the value overridden by this class for some attributes,
+ and forwards the call to _file for the other attributes.
+ """
+ if name in ('_file', '_memorized_lines', '_max_memorized_lines',
+ '_buffered', '_buffered_line', 'readline',
+ 'get_memorized_lines'):
+ return object.__getattribute__(self, name)
+ return self._file.__getattribute__(name)
+
+ def readline(self, size=-1):
+ """Override file.readline and memorize the line read.
+
+ Note that even if size is specified and smaller than actual size,
+ the whole line will be read out from underlying file object by
+ subsequent readline calls.
+ """
+ if self._buffered:
+ line = self._buffered_line
+ self._buffered = False
+ else:
+ line = self._file.readline()
+ if line and len(self._memorized_lines) < self._max_memorized_lines:
+ self._memorized_lines.append(line)
+ if size >= 0 and size < len(line):
+ self._buffered = True
+ self._buffered_line = line[size:]
+ return line[:size]
+ return line
+
+ def get_memorized_lines(self):
+ """Get lines memorized so far."""
+ return self._memorized_lines
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/msgutil.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/msgutil.py
new file mode 100644
index 0000000000..f58ca78e14
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/msgutil.py
@@ -0,0 +1,214 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Message related utilities.
+
+Note: request.connection.write/read are used in this module, even though
+mod_python document says that they should be used only in connection
+handlers. Unfortunately, we have no other options. For example,
+request.write/read are not suitable because they don't allow direct raw
+bytes writing/reading.
+"""
+
+from __future__ import absolute_import
+import six.moves.queue
+import threading
+
+# Export Exception symbols from msgutil for backward compatibility
+from mod_pywebsocket._stream_exceptions import ConnectionTerminatedException
+from mod_pywebsocket._stream_exceptions import InvalidFrameException
+from mod_pywebsocket._stream_exceptions import BadOperationException
+from mod_pywebsocket._stream_exceptions import UnsupportedFrameException
+
+
+# An API for handler to send/receive WebSocket messages.
+def close_connection(request):
+ """Close connection.
+
+ Args:
+ request: mod_python request.
+ """
+ request.ws_stream.close_connection()
+
+
+def send_message(request, payload_data, end=True, binary=False):
+ """Send a message (or part of a message).
+
+ Args:
+ request: mod_python request.
+ payload_data: unicode text or str binary to send.
+ end: True to terminate a message.
+ False to send payload_data as part of a message that is to be
+ terminated by next or later send_message call with end=True.
+ binary: send payload_data as binary frame(s).
+ Raises:
+ BadOperationException: when server already terminated.
+ """
+ request.ws_stream.send_message(payload_data, end, binary)
+
+
+def receive_message(request):
+ """Receive a WebSocket frame and return its payload as a text in
+ unicode or a binary in str.
+
+ Args:
+ request: mod_python request.
+ Raises:
+ InvalidFrameException: when client send invalid frame.
+ UnsupportedFrameException: when client send unsupported frame e.g. some
+ of reserved bit is set but no extension can
+ recognize it.
+ InvalidUTF8Exception: when client send a text frame containing any
+ invalid UTF-8 string.
+ ConnectionTerminatedException: when the connection is closed
+ unexpectedly.
+ BadOperationException: when client already terminated.
+ """
+ return request.ws_stream.receive_message()
+
+
+def send_ping(request, body):
+ request.ws_stream.send_ping(body)
+
+
+class MessageReceiver(threading.Thread):
+ """This class receives messages from the client.
+
+ This class provides three ways to receive messages: blocking,
+ non-blocking, and via callback. Callback has the highest precedence.
+
+ Note: This class should not be used with the standalone server for wss
+ because pyOpenSSL used by the server raises a fatal error if the socket
+ is accessed from multiple threads.
+ """
+ def __init__(self, request, onmessage=None):
+ """Construct an instance.
+
+ Args:
+ request: mod_python request.
+ onmessage: a function to be called when a message is received.
+ May be None. If not None, the function is called on
+ another thread. In that case, MessageReceiver.receive
+ and MessageReceiver.receive_nowait are useless
+ because they will never return any messages.
+ """
+
+ threading.Thread.__init__(self)
+ self._request = request
+ self._queue = six.moves.queue.Queue()
+ self._onmessage = onmessage
+ self._stop_requested = False
+ self.setDaemon(True)
+ self.start()
+
+ def run(self):
+ try:
+ while not self._stop_requested:
+ message = receive_message(self._request)
+ if self._onmessage:
+ self._onmessage(message)
+ else:
+ self._queue.put(message)
+ finally:
+ close_connection(self._request)
+
+ def receive(self):
+ """ Receive a message from the channel, blocking.
+
+ Returns:
+ message as a unicode string.
+ """
+ return self._queue.get()
+
+ def receive_nowait(self):
+ """ Receive a message from the channel, non-blocking.
+
+ Returns:
+ message as a unicode string if available. None otherwise.
+ """
+ try:
+ message = self._queue.get_nowait()
+ except six.moves.queue.Empty:
+ message = None
+ return message
+
+ def stop(self):
+ """Request to stop this instance.
+
+ The instance will be stopped after receiving the next message.
+ This method may not be very useful, but there is no clean way
+ in Python to forcefully stop a running thread.
+ """
+ self._stop_requested = True
+
+
+class MessageSender(threading.Thread):
+ """This class sends messages to the client.
+
+ This class provides both synchronous and asynchronous ways to send
+ messages.
+
+ Note: This class should not be used with the standalone server for wss
+ because pyOpenSSL used by the server raises a fatal error if the socket
+ is accessed from multiple threads.
+ """
+ def __init__(self, request):
+ """Construct an instance.
+
+ Args:
+ request: mod_python request.
+ """
+ threading.Thread.__init__(self)
+ self._request = request
+ self._queue = six.moves.queue.Queue()
+ self.setDaemon(True)
+ self.start()
+
+ def run(self):
+ while True:
+ message, condition = self._queue.get()
+ condition.acquire()
+ send_message(self._request, message)
+ condition.notify()
+ condition.release()
+
+ def send(self, message):
+ """Send a message, blocking."""
+
+ condition = threading.Condition()
+ condition.acquire()
+ self._queue.put((message, condition))
+ condition.wait()
+
+ def send_nowait(self, message):
+ """Send a message, non-blocking."""
+
+ self._queue.put((message, threading.Condition()))
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/request_handler.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/request_handler.py
new file mode 100644
index 0000000000..5e9c875dc7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/request_handler.py
@@ -0,0 +1,319 @@
+# Copyright 2020, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Request Handler and Request/Connection classes for standalone server.
+"""
+
+import os
+
+from six.moves import CGIHTTPServer
+from six.moves import http_client
+
+from mod_pywebsocket import common
+from mod_pywebsocket import dispatch
+from mod_pywebsocket import handshake
+from mod_pywebsocket import http_header_util
+from mod_pywebsocket import memorizingfile
+from mod_pywebsocket import util
+
+# 1024 is practically large enough to contain WebSocket handshake lines.
+_MAX_MEMORIZED_LINES = 1024
+
+
+class _StandaloneConnection(object):
+ """Mimic mod_python mp_conn."""
+ def __init__(self, request_handler):
+ """Construct an instance.
+
+ Args:
+ request_handler: A WebSocketRequestHandler instance.
+ """
+
+ self._request_handler = request_handler
+
+ def get_local_addr(self):
+ """Getter to mimic mp_conn.local_addr."""
+
+ return (self._request_handler.server.server_name,
+ self._request_handler.server.server_port)
+
+ local_addr = property(get_local_addr)
+
+ def get_remote_addr(self):
+ """Getter to mimic mp_conn.remote_addr.
+
+ Setting the property in __init__ won't work because the request
+ handler is not initialized yet there."""
+
+ return self._request_handler.client_address
+
+ remote_addr = property(get_remote_addr)
+
+ def write(self, data):
+ """Mimic mp_conn.write()."""
+
+ return self._request_handler.wfile.write(data)
+
+ def read(self, length):
+ """Mimic mp_conn.read()."""
+
+ return self._request_handler.rfile.read(length)
+
+ def get_memorized_lines(self):
+ """Get memorized lines."""
+
+ return self._request_handler.rfile.get_memorized_lines()
+
+
+class _StandaloneRequest(object):
+ """Mimic mod_python request."""
+ def __init__(self, request_handler, use_tls):
+ """Construct an instance.
+
+ Args:
+ request_handler: A WebSocketRequestHandler instance.
+ """
+
+ self._logger = util.get_class_logger(self)
+
+ self._request_handler = request_handler
+ self.connection = _StandaloneConnection(request_handler)
+ self._use_tls = use_tls
+ self.headers_in = request_handler.headers
+
+ def get_uri(self):
+ """Getter to mimic request.uri.
+
+ This method returns the raw data at the Request-URI part of the
+ Request-Line, while the uri method on the request object of mod_python
+ returns the path portion after parsing the raw data. This behavior is
+ kept for compatibility.
+ """
+
+ return self._request_handler.path
+
+ uri = property(get_uri)
+
+ def get_unparsed_uri(self):
+ """Getter to mimic request.unparsed_uri."""
+
+ return self._request_handler.path
+
+ unparsed_uri = property(get_unparsed_uri)
+
+ def get_method(self):
+ """Getter to mimic request.method."""
+
+ return self._request_handler.command
+
+ method = property(get_method)
+
+ def get_protocol(self):
+ """Getter to mimic request.protocol."""
+
+ return self._request_handler.request_version
+
+ protocol = property(get_protocol)
+
+ def is_https(self):
+ """Mimic request.is_https()."""
+
+ return self._use_tls
+
+
+class WebSocketRequestHandler(CGIHTTPServer.CGIHTTPRequestHandler):
+ """CGIHTTPRequestHandler specialized for WebSocket."""
+
+ # Use httplib.HTTPMessage instead of mimetools.Message.
+ MessageClass = http_client.HTTPMessage
+
+ def setup(self):
+ """Override SocketServer.StreamRequestHandler.setup to wrap rfile
+ with MemorizingFile.
+
+ This method will be called by BaseRequestHandler's constructor
+ before calling BaseHTTPRequestHandler.handle.
+ BaseHTTPRequestHandler.handle will call
+ BaseHTTPRequestHandler.handle_one_request and it will call
+ WebSocketRequestHandler.parse_request.
+ """
+
+ # Call superclass's setup to prepare rfile, wfile, etc. See setup
+ # definition on the root class SocketServer.StreamRequestHandler to
+ # understand what this does.
+ CGIHTTPServer.CGIHTTPRequestHandler.setup(self)
+
+ self.rfile = memorizingfile.MemorizingFile(
+ self.rfile, max_memorized_lines=_MAX_MEMORIZED_LINES)
+
+ def __init__(self, request, client_address, server):
+ self._logger = util.get_class_logger(self)
+
+ self._options = server.websocket_server_options
+
+ # Overrides CGIHTTPServerRequestHandler.cgi_directories.
+ self.cgi_directories = self._options.cgi_directories
+ # Replace CGIHTTPRequestHandler.is_executable method.
+ if self._options.is_executable_method is not None:
+ self.is_executable = self._options.is_executable_method
+
+ # This actually calls BaseRequestHandler.__init__.
+ CGIHTTPServer.CGIHTTPRequestHandler.__init__(self, request,
+ client_address, server)
+
+ def parse_request(self):
+ """Override BaseHTTPServer.BaseHTTPRequestHandler.parse_request.
+
+ Return True to continue processing for HTTP(S), False otherwise.
+
+ See BaseHTTPRequestHandler.handle_one_request method which calls
+ this method to understand how the return value will be handled.
+ """
+
+ # We hook parse_request method, but also call the original
+ # CGIHTTPRequestHandler.parse_request since when we return False,
+ # CGIHTTPRequestHandler.handle_one_request continues processing and
+ # it needs variables set by CGIHTTPRequestHandler.parse_request.
+ #
+ # Variables set by this method will be also used by WebSocket request
+ # handling (self.path, self.command, self.requestline, etc. See also
+ # how _StandaloneRequest's members are implemented using these
+ # attributes).
+ if not CGIHTTPServer.CGIHTTPRequestHandler.parse_request(self):
+ return False
+
+ if self._options.use_basic_auth:
+ auth = self.headers.get('Authorization')
+ if auth != self._options.basic_auth_credential:
+ self.send_response(401)
+ self.send_header('WWW-Authenticate',
+ 'Basic realm="Pywebsocket"')
+ self.end_headers()
+ self._logger.info('Request basic authentication')
+ return False
+
+ host, port, resource = http_header_util.parse_uri(self.path)
+ if resource is None:
+ self._logger.info('Invalid URI: %r', self.path)
+ self._logger.info('Fallback to CGIHTTPRequestHandler')
+ return True
+ server_options = self.server.websocket_server_options
+ if host is not None:
+ validation_host = server_options.validation_host
+ if validation_host is not None and host != validation_host:
+ self._logger.info('Invalid host: %r (expected: %r)', host,
+ validation_host)
+ self._logger.info('Fallback to CGIHTTPRequestHandler')
+ return True
+ if port is not None:
+ validation_port = server_options.validation_port
+ if validation_port is not None and port != validation_port:
+ self._logger.info('Invalid port: %r (expected: %r)', port,
+ validation_port)
+ self._logger.info('Fallback to CGIHTTPRequestHandler')
+ return True
+ self.path = resource
+
+ request = _StandaloneRequest(self, self._options.use_tls)
+
+ try:
+ # Fallback to default http handler for request paths for which
+ # we don't have request handlers.
+ if not self._options.dispatcher.get_handler_suite(self.path):
+ self._logger.info('No handler for resource: %r', self.path)
+ self._logger.info('Fallback to CGIHTTPRequestHandler')
+ return True
+ except dispatch.DispatchException as e:
+ self._logger.info('Dispatch failed for error: %s', e)
+ self.send_error(e.status)
+ return False
+
+ # If any Exceptions without except clause setup (including
+ # DispatchException) is raised below this point, it will be caught
+ # and logged by WebSocketServer.
+
+ try:
+ try:
+ handshake.do_handshake(request, self._options.dispatcher)
+ except handshake.VersionException as e:
+ self._logger.info('Handshake failed for version error: %s', e)
+ self.send_response(common.HTTP_STATUS_BAD_REQUEST)
+ self.send_header(common.SEC_WEBSOCKET_VERSION_HEADER,
+ e.supported_versions)
+ self.end_headers()
+ return False
+ except handshake.HandshakeException as e:
+ # Handshake for ws(s) failed.
+ self._logger.info('Handshake failed for error: %s', e)
+ self.send_error(e.status)
+ return False
+
+ request._dispatcher = self._options.dispatcher
+ self._options.dispatcher.transfer_data(request)
+ except handshake.AbortedByUserException as e:
+ self._logger.info('Aborted: %s', e)
+ return False
+
+ def log_request(self, code='-', size='-'):
+ """Override BaseHTTPServer.log_request."""
+
+ self._logger.info('"%s" %s %s', self.requestline, str(code), str(size))
+
+ def log_error(self, *args):
+ """Override BaseHTTPServer.log_error."""
+
+ # Despite the name, this method is for warnings than for errors.
+ # For example, HTTP status code is logged by this method.
+ self._logger.warning('%s - %s', self.address_string(),
+ args[0] % args[1:])
+
+ def is_cgi(self):
+ """Test whether self.path corresponds to a CGI script.
+
+ Add extra check that self.path doesn't contains ..
+ Also check if the file is a executable file or not.
+ If the file is not executable, it is handled as static file or dir
+ rather than a CGI script.
+ """
+
+ if CGIHTTPServer.CGIHTTPRequestHandler.is_cgi(self):
+ if '..' in self.path:
+ return False
+ # strip query parameter from request path
+ resource_name = self.path.split('?', 2)[0]
+ # convert resource_name into real path name in filesystem.
+ scriptfile = self.translate_path(resource_name)
+ if not os.path.isfile(scriptfile):
+ return False
+ if not self.is_executable(scriptfile):
+ return False
+ return True
+ return False
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/server_util.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/server_util.py
new file mode 100644
index 0000000000..8f9e273e97
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/server_util.py
@@ -0,0 +1,87 @@
+# Copyright 2020, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Server related utilities."""
+
+import logging
+import logging.handlers
+import threading
+import time
+
+from mod_pywebsocket import common
+from mod_pywebsocket import util
+
+
+def _get_logger_from_class(c):
+ return logging.getLogger('%s.%s' % (c.__module__, c.__name__))
+
+
+def configure_logging(options):
+ logging.addLevelName(common.LOGLEVEL_FINE, 'FINE')
+
+ logger = logging.getLogger()
+ logger.setLevel(logging.getLevelName(options.log_level.upper()))
+ if options.log_file:
+ handler = logging.handlers.RotatingFileHandler(options.log_file, 'a',
+ options.log_max,
+ options.log_count)
+ else:
+ handler = logging.StreamHandler()
+ formatter = logging.Formatter(
+ '[%(asctime)s] [%(levelname)s] %(name)s: %(message)s')
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+
+ deflate_log_level_name = logging.getLevelName(
+ options.deflate_log_level.upper())
+ _get_logger_from_class(util._Deflater).setLevel(deflate_log_level_name)
+ _get_logger_from_class(util._Inflater).setLevel(deflate_log_level_name)
+
+
+class ThreadMonitor(threading.Thread):
+ daemon = True
+
+ def __init__(self, interval_in_sec):
+ threading.Thread.__init__(self, name='ThreadMonitor')
+
+ self._logger = util.get_class_logger(self)
+
+ self._interval_in_sec = interval_in_sec
+
+ def run(self):
+ while True:
+ thread_name_list = []
+ for thread in threading.enumerate():
+ thread_name_list.append(thread.name)
+ self._logger.info("%d active threads: %s",
+ threading.active_count(),
+ ', '.join(thread_name_list))
+ time.sleep(self._interval_in_sec)
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/standalone.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/standalone.py
new file mode 100755
index 0000000000..0a3bcdbacd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/standalone.py
@@ -0,0 +1,481 @@
+#!/usr/bin/env python
+#
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Standalone WebSocket server.
+
+Use this file to launch pywebsocket as a standalone server.
+
+
+BASIC USAGE
+===========
+
+Go to the src directory and run
+
+ $ python mod_pywebsocket/standalone.py [-p <ws_port>]
+ [-w <websock_handlers>]
+ [-d <document_root>]
+
+<ws_port> is the port number to use for ws:// connection.
+
+<document_root> is the path to the root directory of HTML files.
+
+<websock_handlers> is the path to the root directory of WebSocket handlers.
+If not specified, <document_root> will be used. See __init__.py (or
+run $ pydoc mod_pywebsocket) for how to write WebSocket handlers.
+
+For more detail and other options, run
+
+ $ python mod_pywebsocket/standalone.py --help
+
+or see _build_option_parser method below.
+
+For trouble shooting, adding "--log_level debug" might help you.
+
+
+TRY DEMO
+========
+
+Go to the src directory and run standalone.py with -d option to set the
+document root to the directory containing example HTMLs and handlers like this:
+
+ $ cd src
+ $ PYTHONPATH=. python mod_pywebsocket/standalone.py -d example
+
+to launch pywebsocket with the sample handler and html on port 80. Open
+http://localhost/console.html, click the connect button, type something into
+the text box next to the send button and click the send button. If everything
+is working, you'll see the message you typed echoed by the server.
+
+
+USING TLS
+=========
+
+To run the standalone server with TLS support, run it with -t, -k, and -c
+options. When TLS is enabled, the standalone server accepts only TLS connection.
+
+Note that when ssl module is used and the key/cert location is incorrect,
+TLS connection silently fails while pyOpenSSL fails on startup.
+
+Example:
+
+ $ PYTHONPATH=. python mod_pywebsocket/standalone.py \
+ -d example \
+ -p 10443 \
+ -t \
+ -c ../test/cert/cert.pem \
+ -k ../test/cert/key.pem \
+
+Note that when passing a relative path to -c and -k option, it will be resolved
+using the document root directory as the base.
+
+
+USING CLIENT AUTHENTICATION
+===========================
+
+To run the standalone server with TLS client authentication support, run it with
+--tls-client-auth and --tls-client-ca options in addition to ones required for
+TLS support.
+
+Example:
+
+ $ PYTHONPATH=. python mod_pywebsocket/standalone.py -d example -p 10443 -t \
+ -c ../test/cert/cert.pem -k ../test/cert/key.pem \
+ --tls-client-auth \
+ --tls-client-ca=../test/cert/cacert.pem
+
+Note that when passing a relative path to --tls-client-ca option, it will be
+resolved using the document root directory as the base.
+
+
+CONFIGURATION FILE
+==================
+
+You can also write a configuration file and use it by specifying the path to
+the configuration file by --config option. Please write a configuration file
+following the documentation of the Python ConfigParser library. Name of each
+entry must be the long version argument name. E.g. to set log level to debug,
+add the following line:
+
+log_level=debug
+
+For options which doesn't take value, please add some fake value. E.g. for
+--tls option, add the following line:
+
+tls=True
+
+Note that tls will be enabled even if you write tls=False as the value part is
+fake.
+
+When both a command line argument and a configuration file entry are set for
+the same configuration item, the command line value will override one in the
+configuration file.
+
+
+THREADING
+=========
+
+This server is derived from SocketServer.ThreadingMixIn. Hence a thread is
+used for each request.
+
+
+SECURITY WARNING
+================
+
+This uses CGIHTTPServer and CGIHTTPServer is not secure.
+It may execute arbitrary Python code or external programs. It should not be
+used outside a firewall.
+"""
+
+from __future__ import absolute_import
+from six.moves import configparser
+import base64
+import logging
+import argparse
+import os
+import six
+import sys
+import traceback
+
+from mod_pywebsocket import common
+from mod_pywebsocket import util
+from mod_pywebsocket import server_util
+from mod_pywebsocket.websocket_server import WebSocketServer
+
+_DEFAULT_LOG_MAX_BYTES = 1024 * 256
+_DEFAULT_LOG_BACKUP_COUNT = 5
+
+_DEFAULT_REQUEST_QUEUE_SIZE = 128
+
+
+def _build_option_parser():
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument(
+ '--config',
+ dest='config_file',
+ type=six.text_type,
+ default=None,
+ help=('Path to configuration file. See the file comment '
+ 'at the top of this file for the configuration '
+ 'file format'))
+ parser.add_argument('-H',
+ '--server-host',
+ '--server_host',
+ dest='server_host',
+ default='',
+ help='server hostname to listen to')
+ parser.add_argument('-V',
+ '--validation-host',
+ '--validation_host',
+ dest='validation_host',
+ default=None,
+ help='server hostname to validate in absolute path.')
+ parser.add_argument('-p',
+ '--port',
+ dest='port',
+ type=int,
+ default=common.DEFAULT_WEB_SOCKET_PORT,
+ help='port to listen to')
+ parser.add_argument('-P',
+ '--validation-port',
+ '--validation_port',
+ dest='validation_port',
+ type=int,
+ default=None,
+ help='server port to validate in absolute path.')
+ parser.add_argument(
+ '-w',
+ '--websock-handlers',
+ '--websock_handlers',
+ dest='websock_handlers',
+ default='.',
+ help=('The root directory of WebSocket handler files. '
+ 'If the path is relative, --document-root is used '
+ 'as the base.'))
+ parser.add_argument('-m',
+ '--websock-handlers-map-file',
+ '--websock_handlers_map_file',
+ dest='websock_handlers_map_file',
+ default=None,
+ help=('WebSocket handlers map file. '
+ 'Each line consists of alias_resource_path and '
+ 'existing_resource_path, separated by spaces.'))
+ parser.add_argument('-s',
+ '--scan-dir',
+ '--scan_dir',
+ dest='scan_dir',
+ default=None,
+ help=('Must be a directory under --websock-handlers. '
+ 'Only handlers under this directory are scanned '
+ 'and registered to the server. '
+ 'Useful for saving scan time when the handler '
+ 'root directory contains lots of files that are '
+ 'not handler file or are handler files but you '
+ 'don\'t want them to be registered. '))
+ parser.add_argument(
+ '--allow-handlers-outside-root-dir',
+ '--allow_handlers_outside_root_dir',
+ dest='allow_handlers_outside_root_dir',
+ action='store_true',
+ default=False,
+ help=('Scans WebSocket handlers even if their canonical '
+ 'path is not under --websock-handlers.'))
+ parser.add_argument('-d',
+ '--document-root',
+ '--document_root',
+ dest='document_root',
+ default='.',
+ help='Document root directory.')
+ parser.add_argument('-x',
+ '--cgi-paths',
+ '--cgi_paths',
+ dest='cgi_paths',
+ default=None,
+ help=('CGI paths relative to document_root.'
+ 'Comma-separated. (e.g -x /cgi,/htbin) '
+ 'Files under document_root/cgi_path are handled '
+ 'as CGI programs. Must be executable.'))
+ parser.add_argument('-t',
+ '--tls',
+ dest='use_tls',
+ action='store_true',
+ default=False,
+ help='use TLS (wss://)')
+ parser.add_argument('-k',
+ '--private-key',
+ '--private_key',
+ dest='private_key',
+ default='',
+ help='TLS private key file.')
+ parser.add_argument('-c',
+ '--certificate',
+ dest='certificate',
+ default='',
+ help='TLS certificate file.')
+ parser.add_argument('--tls-client-auth',
+ dest='tls_client_auth',
+ action='store_true',
+ default=False,
+ help='Requests TLS client auth on every connection.')
+ parser.add_argument('--tls-client-cert-optional',
+ dest='tls_client_cert_optional',
+ action='store_true',
+ default=False,
+ help=('Makes client certificate optional even though '
+ 'TLS client auth is enabled.'))
+ parser.add_argument('--tls-client-ca',
+ dest='tls_client_ca',
+ default='',
+ help=('Specifies a pem file which contains a set of '
+ 'concatenated CA certificates which are used to '
+ 'validate certificates passed from clients'))
+ parser.add_argument('--basic-auth',
+ dest='use_basic_auth',
+ action='store_true',
+ default=False,
+ help='Requires Basic authentication.')
+ parser.add_argument(
+ '--basic-auth-credential',
+ dest='basic_auth_credential',
+ default='test:test',
+ help='Specifies the credential of basic authentication '
+ 'by username:password pair (e.g. test:test).')
+ parser.add_argument('-l',
+ '--log-file',
+ '--log_file',
+ dest='log_file',
+ default='',
+ help='Log file.')
+ # Custom log level:
+ # - FINE: Prints status of each frame processing step
+ parser.add_argument('--log-level',
+ '--log_level',
+ type=six.text_type,
+ dest='log_level',
+ default='warn',
+ choices=[
+ 'fine', 'debug', 'info', 'warning', 'warn',
+ 'error', 'critical'
+ ],
+ help='Log level.')
+ parser.add_argument(
+ '--deflate-log-level',
+ '--deflate_log_level',
+ type=six.text_type,
+ dest='deflate_log_level',
+ default='warn',
+ choices=['debug', 'info', 'warning', 'warn', 'error', 'critical'],
+ help='Log level for _Deflater and _Inflater.')
+ parser.add_argument('--thread-monitor-interval-in-sec',
+ '--thread_monitor_interval_in_sec',
+ dest='thread_monitor_interval_in_sec',
+ type=int,
+ default=-1,
+ help=('If positive integer is specified, run a thread '
+ 'monitor to show the status of server threads '
+ 'periodically in the specified inteval in '
+ 'second. If non-positive integer is specified, '
+ 'disable the thread monitor.'))
+ parser.add_argument('--log-max',
+ '--log_max',
+ dest='log_max',
+ type=int,
+ default=_DEFAULT_LOG_MAX_BYTES,
+ help='Log maximum bytes')
+ parser.add_argument('--log-count',
+ '--log_count',
+ dest='log_count',
+ type=int,
+ default=_DEFAULT_LOG_BACKUP_COUNT,
+ help='Log backup count')
+ parser.add_argument('-q',
+ '--queue',
+ dest='request_queue_size',
+ type=int,
+ default=_DEFAULT_REQUEST_QUEUE_SIZE,
+ help='request queue size')
+
+ return parser
+
+
+def _parse_args_and_config(args):
+ parser = _build_option_parser()
+
+ # First, parse options without configuration file.
+ temporary_options, temporary_args = parser.parse_known_args(args=args)
+ if temporary_args:
+ logging.critical('Unrecognized positional arguments: %r',
+ temporary_args)
+ sys.exit(1)
+
+ if temporary_options.config_file:
+ try:
+ config_fp = open(temporary_options.config_file, 'r')
+ except IOError as e:
+ logging.critical('Failed to open configuration file %r: %r',
+ temporary_options.config_file, e)
+ sys.exit(1)
+
+ config_parser = configparser.SafeConfigParser()
+ config_parser.readfp(config_fp)
+ config_fp.close()
+
+ args_from_config = []
+ for name, value in config_parser.items('pywebsocket'):
+ args_from_config.append('--' + name)
+ args_from_config.append(value)
+ if args is None:
+ args = args_from_config
+ else:
+ args = args_from_config + args
+ return parser.parse_known_args(args=args)
+ else:
+ return temporary_options, temporary_args
+
+
+def _main(args=None):
+ """You can call this function from your own program, but please note that
+ this function has some side-effects that might affect your program. For
+ example, it changes the current directory.
+ """
+
+ options, args = _parse_args_and_config(args=args)
+
+ os.chdir(options.document_root)
+
+ server_util.configure_logging(options)
+
+ # TODO(tyoshino): Clean up initialization of CGI related values. Move some
+ # of code here to WebSocketRequestHandler class if it's better.
+ options.cgi_directories = []
+ options.is_executable_method = None
+ if options.cgi_paths:
+ options.cgi_directories = options.cgi_paths.split(',')
+ if sys.platform in ('cygwin', 'win32'):
+ cygwin_path = None
+ # For Win32 Python, it is expected that CYGWIN_PATH
+ # is set to a directory of cygwin binaries.
+ # For example, websocket_server.py in Chromium sets CYGWIN_PATH to
+ # full path of third_party/cygwin/bin.
+ if 'CYGWIN_PATH' in os.environ:
+ cygwin_path = os.environ['CYGWIN_PATH']
+
+ def __check_script(scriptpath):
+ return util.get_script_interp(scriptpath, cygwin_path)
+
+ options.is_executable_method = __check_script
+
+ if options.use_tls:
+ logging.debug('Using ssl module')
+
+ if not options.private_key or not options.certificate:
+ logging.critical(
+ 'To use TLS, specify private_key and certificate.')
+ sys.exit(1)
+
+ if (options.tls_client_cert_optional and not options.tls_client_auth):
+ logging.critical('Client authentication must be enabled to '
+ 'specify tls_client_cert_optional')
+ sys.exit(1)
+ else:
+ if options.tls_client_auth:
+ logging.critical('TLS must be enabled for client authentication.')
+ sys.exit(1)
+
+ if options.tls_client_cert_optional:
+ logging.critical('TLS must be enabled for client authentication.')
+ sys.exit(1)
+
+ if not options.scan_dir:
+ options.scan_dir = options.websock_handlers
+
+ if options.use_basic_auth:
+ options.basic_auth_credential = 'Basic ' + base64.b64encode(
+ options.basic_auth_credential.encode('UTF-8')).decode()
+
+ try:
+ if options.thread_monitor_interval_in_sec > 0:
+ # Run a thread monitor to show the status of server threads for
+ # debugging.
+ server_util.ThreadMonitor(
+ options.thread_monitor_interval_in_sec).start()
+
+ server = WebSocketServer(options)
+ server.serve_forever()
+ except Exception as e:
+ logging.critical('mod_pywebsocket: %s' % e)
+ logging.critical('mod_pywebsocket: %s' % traceback.format_exc())
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ _main(sys.argv[1:])
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/stream.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/stream.py
new file mode 100644
index 0000000000..82d1ea619c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/stream.py
@@ -0,0 +1,950 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""This file provides classes and helper functions for parsing/building frames
+of the WebSocket protocol (RFC 6455).
+
+Specification:
+http://tools.ietf.org/html/rfc6455
+"""
+
+from collections import deque
+import logging
+import os
+import struct
+import time
+import socket
+import six
+
+from mod_pywebsocket import common
+from mod_pywebsocket import util
+from mod_pywebsocket._stream_exceptions import BadOperationException
+from mod_pywebsocket._stream_exceptions import ConnectionTerminatedException
+from mod_pywebsocket._stream_exceptions import InvalidFrameException
+from mod_pywebsocket._stream_exceptions import InvalidUTF8Exception
+from mod_pywebsocket._stream_exceptions import UnsupportedFrameException
+
+_NOOP_MASKER = util.NoopMasker()
+
+
+class Frame(object):
+ def __init__(self,
+ fin=1,
+ rsv1=0,
+ rsv2=0,
+ rsv3=0,
+ opcode=None,
+ payload=b''):
+ self.fin = fin
+ self.rsv1 = rsv1
+ self.rsv2 = rsv2
+ self.rsv3 = rsv3
+ self.opcode = opcode
+ self.payload = payload
+
+
+# Helper functions made public to be used for writing unittests for WebSocket
+# clients.
+
+
+def create_length_header(length, mask):
+ """Creates a length header.
+
+ Args:
+ length: Frame length. Must be less than 2^63.
+ mask: Mask bit. Must be boolean.
+
+ Raises:
+ ValueError: when bad data is given.
+ """
+
+ if mask:
+ mask_bit = 1 << 7
+ else:
+ mask_bit = 0
+
+ if length < 0:
+ raise ValueError('length must be non negative integer')
+ elif length <= 125:
+ return util.pack_byte(mask_bit | length)
+ elif length < (1 << 16):
+ return util.pack_byte(mask_bit | 126) + struct.pack('!H', length)
+ elif length < (1 << 63):
+ return util.pack_byte(mask_bit | 127) + struct.pack('!Q', length)
+ else:
+ raise ValueError('Payload is too big for one frame')
+
+
+def create_header(opcode, payload_length, fin, rsv1, rsv2, rsv3, mask):
+ """Creates a frame header.
+
+ Raises:
+ Exception: when bad data is given.
+ """
+
+ if opcode < 0 or 0xf < opcode:
+ raise ValueError('Opcode out of range')
+
+ if payload_length < 0 or (1 << 63) <= payload_length:
+ raise ValueError('payload_length out of range')
+
+ if (fin | rsv1 | rsv2 | rsv3) & ~1:
+ raise ValueError('FIN bit and Reserved bit parameter must be 0 or 1')
+
+ header = b''
+
+ first_byte = ((fin << 7)
+ | (rsv1 << 6) | (rsv2 << 5) | (rsv3 << 4)
+ | opcode)
+ header += util.pack_byte(first_byte)
+ header += create_length_header(payload_length, mask)
+
+ return header
+
+
+def _build_frame(header, body, mask):
+ if not mask:
+ return header + body
+
+ masking_nonce = os.urandom(4)
+ masker = util.RepeatedXorMasker(masking_nonce)
+
+ return header + masking_nonce + masker.mask(body)
+
+
+def _filter_and_format_frame_object(frame, mask, frame_filters):
+ for frame_filter in frame_filters:
+ frame_filter.filter(frame)
+
+ header = create_header(frame.opcode, len(frame.payload), frame.fin,
+ frame.rsv1, frame.rsv2, frame.rsv3, mask)
+ return _build_frame(header, frame.payload, mask)
+
+
+def create_binary_frame(message,
+ opcode=common.OPCODE_BINARY,
+ fin=1,
+ mask=False,
+ frame_filters=[]):
+ """Creates a simple binary frame with no extension, reserved bit."""
+
+ frame = Frame(fin=fin, opcode=opcode, payload=message)
+ return _filter_and_format_frame_object(frame, mask, frame_filters)
+
+
+def create_text_frame(message,
+ opcode=common.OPCODE_TEXT,
+ fin=1,
+ mask=False,
+ frame_filters=[]):
+ """Creates a simple text frame with no extension, reserved bit."""
+
+ encoded_message = message.encode('utf-8')
+ return create_binary_frame(encoded_message, opcode, fin, mask,
+ frame_filters)
+
+
+def parse_frame(receive_bytes,
+ logger=None,
+ ws_version=common.VERSION_HYBI_LATEST,
+ unmask_receive=True):
+ """Parses a frame. Returns a tuple containing each header field and
+ payload.
+
+ Args:
+ receive_bytes: a function that reads frame data from a stream or
+ something similar. The function takes length of the bytes to be
+ read. The function must raise ConnectionTerminatedException if
+ there is not enough data to be read.
+ logger: a logging object.
+ ws_version: the version of WebSocket protocol.
+ unmask_receive: unmask received frames. When received unmasked
+ frame, raises InvalidFrameException.
+
+ Raises:
+ ConnectionTerminatedException: when receive_bytes raises it.
+ InvalidFrameException: when the frame contains invalid data.
+ """
+
+ if not logger:
+ logger = logging.getLogger()
+
+ logger.log(common.LOGLEVEL_FINE, 'Receive the first 2 octets of a frame')
+
+ first_byte = ord(receive_bytes(1))
+ fin = (first_byte >> 7) & 1
+ rsv1 = (first_byte >> 6) & 1
+ rsv2 = (first_byte >> 5) & 1
+ rsv3 = (first_byte >> 4) & 1
+ opcode = first_byte & 0xf
+
+ second_byte = ord(receive_bytes(1))
+ mask = (second_byte >> 7) & 1
+ payload_length = second_byte & 0x7f
+
+ logger.log(
+ common.LOGLEVEL_FINE, 'FIN=%s, RSV1=%s, RSV2=%s, RSV3=%s, opcode=%s, '
+ 'Mask=%s, Payload_length=%s', fin, rsv1, rsv2, rsv3, opcode, mask,
+ payload_length)
+
+ if (mask == 1) != unmask_receive:
+ raise InvalidFrameException(
+ 'Mask bit on the received frame did\'nt match masking '
+ 'configuration for received frames')
+
+ # The HyBi and later specs disallow putting a value in 0x0-0xFFFF
+ # into the 8-octet extended payload length field (or 0x0-0xFD in
+ # 2-octet field).
+ valid_length_encoding = True
+ length_encoding_bytes = 1
+ if payload_length == 127:
+ logger.log(common.LOGLEVEL_FINE,
+ 'Receive 8-octet extended payload length')
+
+ extended_payload_length = receive_bytes(8)
+ payload_length = struct.unpack('!Q', extended_payload_length)[0]
+ if payload_length > 0x7FFFFFFFFFFFFFFF:
+ raise InvalidFrameException('Extended payload length >= 2^63')
+ if ws_version >= 13 and payload_length < 0x10000:
+ valid_length_encoding = False
+ length_encoding_bytes = 8
+
+ logger.log(common.LOGLEVEL_FINE, 'Decoded_payload_length=%s',
+ payload_length)
+ elif payload_length == 126:
+ logger.log(common.LOGLEVEL_FINE,
+ 'Receive 2-octet extended payload length')
+
+ extended_payload_length = receive_bytes(2)
+ payload_length = struct.unpack('!H', extended_payload_length)[0]
+ if ws_version >= 13 and payload_length < 126:
+ valid_length_encoding = False
+ length_encoding_bytes = 2
+
+ logger.log(common.LOGLEVEL_FINE, 'Decoded_payload_length=%s',
+ payload_length)
+
+ if not valid_length_encoding:
+ logger.warning(
+ 'Payload length is not encoded using the minimal number of '
+ 'bytes (%d is encoded using %d bytes)', payload_length,
+ length_encoding_bytes)
+
+ if mask == 1:
+ logger.log(common.LOGLEVEL_FINE, 'Receive mask')
+
+ masking_nonce = receive_bytes(4)
+ masker = util.RepeatedXorMasker(masking_nonce)
+
+ logger.log(common.LOGLEVEL_FINE, 'Mask=%r', masking_nonce)
+ else:
+ masker = _NOOP_MASKER
+
+ logger.log(common.LOGLEVEL_FINE, 'Receive payload data')
+ if logger.isEnabledFor(common.LOGLEVEL_FINE):
+ receive_start = time.time()
+
+ raw_payload_bytes = receive_bytes(payload_length)
+
+ if logger.isEnabledFor(common.LOGLEVEL_FINE):
+ logger.log(
+ common.LOGLEVEL_FINE, 'Done receiving payload data at %s MB/s',
+ payload_length / (time.time() - receive_start) / 1000 / 1000)
+ logger.log(common.LOGLEVEL_FINE, 'Unmask payload data')
+
+ if logger.isEnabledFor(common.LOGLEVEL_FINE):
+ unmask_start = time.time()
+
+ unmasked_bytes = masker.mask(raw_payload_bytes)
+
+ if logger.isEnabledFor(common.LOGLEVEL_FINE):
+ logger.log(common.LOGLEVEL_FINE,
+ 'Done unmasking payload data at %s MB/s',
+ payload_length / (time.time() - unmask_start) / 1000 / 1000)
+
+ return opcode, unmasked_bytes, fin, rsv1, rsv2, rsv3
+
+
+class FragmentedFrameBuilder(object):
+ """A stateful class to send a message as fragments."""
+ def __init__(self, mask, frame_filters=[], encode_utf8=True):
+ """Constructs an instance."""
+
+ self._mask = mask
+ self._frame_filters = frame_filters
+ # This is for skipping UTF-8 encoding when building text type frames
+ # from compressed data.
+ self._encode_utf8 = encode_utf8
+
+ self._started = False
+
+ # Hold opcode of the first frame in messages to verify types of other
+ # frames in the message are all the same.
+ self._opcode = common.OPCODE_TEXT
+
+ def build(self, payload_data, end, binary):
+ if binary:
+ frame_type = common.OPCODE_BINARY
+ else:
+ frame_type = common.OPCODE_TEXT
+ if self._started:
+ if self._opcode != frame_type:
+ raise ValueError('Message types are different in frames for '
+ 'the same message')
+ opcode = common.OPCODE_CONTINUATION
+ else:
+ opcode = frame_type
+ self._opcode = frame_type
+
+ if end:
+ self._started = False
+ fin = 1
+ else:
+ self._started = True
+ fin = 0
+
+ if binary or not self._encode_utf8:
+ return create_binary_frame(payload_data, opcode, fin, self._mask,
+ self._frame_filters)
+ else:
+ return create_text_frame(payload_data, opcode, fin, self._mask,
+ self._frame_filters)
+
+
+def _create_control_frame(opcode, body, mask, frame_filters):
+ frame = Frame(opcode=opcode, payload=body)
+
+ for frame_filter in frame_filters:
+ frame_filter.filter(frame)
+
+ if len(frame.payload) > 125:
+ raise BadOperationException(
+ 'Payload data size of control frames must be 125 bytes or less')
+
+ header = create_header(frame.opcode, len(frame.payload), frame.fin,
+ frame.rsv1, frame.rsv2, frame.rsv3, mask)
+ return _build_frame(header, frame.payload, mask)
+
+
+def create_ping_frame(body, mask=False, frame_filters=[]):
+ return _create_control_frame(common.OPCODE_PING, body, mask, frame_filters)
+
+
+def create_pong_frame(body, mask=False, frame_filters=[]):
+ return _create_control_frame(common.OPCODE_PONG, body, mask, frame_filters)
+
+
+def create_close_frame(body, mask=False, frame_filters=[]):
+ return _create_control_frame(common.OPCODE_CLOSE, body, mask,
+ frame_filters)
+
+
+def create_closing_handshake_body(code, reason):
+ body = b''
+ if code is not None:
+ if (code > common.STATUS_USER_PRIVATE_MAX
+ or code < common.STATUS_NORMAL_CLOSURE):
+ raise BadOperationException('Status code is out of range')
+ if (code == common.STATUS_NO_STATUS_RECEIVED
+ or code == common.STATUS_ABNORMAL_CLOSURE
+ or code == common.STATUS_TLS_HANDSHAKE):
+ raise BadOperationException('Status code is reserved pseudo '
+ 'code')
+ encoded_reason = reason.encode('utf-8')
+ body = struct.pack('!H', code) + encoded_reason
+ return body
+
+
+class StreamOptions(object):
+ """Holds option values to configure Stream objects."""
+ def __init__(self):
+ """Constructs StreamOptions."""
+
+ # Filters applied to frames.
+ self.outgoing_frame_filters = []
+ self.incoming_frame_filters = []
+
+ # Filters applied to messages. Control frames are not affected by them.
+ self.outgoing_message_filters = []
+ self.incoming_message_filters = []
+
+ self.encode_text_message_to_utf8 = True
+ self.mask_send = False
+ self.unmask_receive = True
+
+
+class Stream(object):
+ """A class for parsing/building frames of the WebSocket protocol
+ (RFC 6455).
+ """
+ def __init__(self, request, options):
+ """Constructs an instance.
+
+ Args:
+ request: mod_python request.
+ """
+
+ self._logger = util.get_class_logger(self)
+
+ self._options = options
+ self._request = request
+
+ self._request.client_terminated = False
+ self._request.server_terminated = False
+
+ # Holds body of received fragments.
+ self._received_fragments = []
+ # Holds the opcode of the first fragment.
+ self._original_opcode = None
+
+ self._writer = FragmentedFrameBuilder(
+ self._options.mask_send, self._options.outgoing_frame_filters,
+ self._options.encode_text_message_to_utf8)
+
+ self._ping_queue = deque()
+
+ def _read(self, length):
+ """Reads length bytes from connection. In case we catch any exception,
+ prepends remote address to the exception message and raise again.
+
+ Raises:
+ ConnectionTerminatedException: when read returns empty string.
+ """
+
+ try:
+ read_bytes = self._request.connection.read(length)
+ if not read_bytes:
+ raise ConnectionTerminatedException(
+ 'Receiving %d byte failed. Peer (%r) closed connection' %
+ (length, (self._request.connection.remote_addr, )))
+ return read_bytes
+ except IOError as e:
+ # Also catch an IOError because mod_python throws it.
+ raise ConnectionTerminatedException(
+ 'Receiving %d byte failed. IOError (%s) occurred' %
+ (length, e))
+
+ def _write(self, bytes_to_write):
+ """Writes given bytes to connection. In case we catch any exception,
+ prepends remote address to the exception message and raise again.
+ """
+
+ try:
+ self._request.connection.write(bytes_to_write)
+ except Exception as e:
+ util.prepend_message_to_exception(
+ 'Failed to send message to %r: ' %
+ (self._request.connection.remote_addr, ), e)
+ raise
+
+ def receive_bytes(self, length):
+ """Receives multiple bytes. Retries read when we couldn't receive the
+ specified amount. This method returns byte strings.
+
+ Raises:
+ ConnectionTerminatedException: when read returns empty string.
+ """
+
+ read_bytes = []
+ while length > 0:
+ new_read_bytes = self._read(length)
+ read_bytes.append(new_read_bytes)
+ length -= len(new_read_bytes)
+ return b''.join(read_bytes)
+
+ def _read_until(self, delim_char):
+ """Reads bytes until we encounter delim_char. The result will not
+ contain delim_char.
+
+ Raises:
+ ConnectionTerminatedException: when read returns empty string.
+ """
+
+ read_bytes = []
+ while True:
+ ch = self._read(1)
+ if ch == delim_char:
+ break
+ read_bytes.append(ch)
+ return b''.join(read_bytes)
+
+ def _receive_frame(self):
+ """Receives a frame and return data in the frame as a tuple containing
+ each header field and payload separately.
+
+ Raises:
+ ConnectionTerminatedException: when read returns empty
+ string.
+ InvalidFrameException: when the frame contains invalid data.
+ """
+ def _receive_bytes(length):
+ return self.receive_bytes(length)
+
+ return parse_frame(receive_bytes=_receive_bytes,
+ logger=self._logger,
+ ws_version=self._request.ws_version,
+ unmask_receive=self._options.unmask_receive)
+
+ def _receive_frame_as_frame_object(self):
+ opcode, unmasked_bytes, fin, rsv1, rsv2, rsv3 = self._receive_frame()
+
+ return Frame(fin=fin,
+ rsv1=rsv1,
+ rsv2=rsv2,
+ rsv3=rsv3,
+ opcode=opcode,
+ payload=unmasked_bytes)
+
+ def receive_filtered_frame(self):
+ """Receives a frame and applies frame filters and message filters.
+ The frame to be received must satisfy following conditions:
+ - The frame is not fragmented.
+ - The opcode of the frame is TEXT or BINARY.
+
+ DO NOT USE this method except for testing purpose.
+ """
+
+ frame = self._receive_frame_as_frame_object()
+ if not frame.fin:
+ raise InvalidFrameException(
+ 'Segmented frames must not be received via '
+ 'receive_filtered_frame()')
+ if (frame.opcode != common.OPCODE_TEXT
+ and frame.opcode != common.OPCODE_BINARY):
+ raise InvalidFrameException(
+ 'Control frames must not be received via '
+ 'receive_filtered_frame()')
+
+ for frame_filter in self._options.incoming_frame_filters:
+ frame_filter.filter(frame)
+ for message_filter in self._options.incoming_message_filters:
+ frame.payload = message_filter.filter(frame.payload)
+ return frame
+
+ def send_message(self, message, end=True, binary=False):
+ """Send message.
+
+ Args:
+ message: text in unicode or binary in str to send.
+ binary: send message as binary frame.
+
+ Raises:
+ BadOperationException: when called on a server-terminated
+ connection or called with inconsistent message type or
+ binary parameter.
+ """
+
+ if self._request.server_terminated:
+ raise BadOperationException(
+ 'Requested send_message after sending out a closing handshake')
+
+ if binary and isinstance(message, six.text_type):
+ raise BadOperationException(
+ 'Message for binary frame must not be instance of Unicode')
+
+ for message_filter in self._options.outgoing_message_filters:
+ message = message_filter.filter(message, end, binary)
+
+ try:
+ # Set this to any positive integer to limit maximum size of data in
+ # payload data of each frame.
+ MAX_PAYLOAD_DATA_SIZE = -1
+
+ if MAX_PAYLOAD_DATA_SIZE <= 0:
+ self._write(self._writer.build(message, end, binary))
+ return
+
+ bytes_written = 0
+ while True:
+ end_for_this_frame = end
+ bytes_to_write = len(message) - bytes_written
+ if (MAX_PAYLOAD_DATA_SIZE > 0
+ and bytes_to_write > MAX_PAYLOAD_DATA_SIZE):
+ end_for_this_frame = False
+ bytes_to_write = MAX_PAYLOAD_DATA_SIZE
+
+ frame = self._writer.build(
+ message[bytes_written:bytes_written + bytes_to_write],
+ end_for_this_frame, binary)
+ self._write(frame)
+
+ bytes_written += bytes_to_write
+
+ # This if must be placed here (the end of while block) so that
+ # at least one frame is sent.
+ if len(message) <= bytes_written:
+ break
+ except ValueError as e:
+ raise BadOperationException(e)
+
+ def _get_message_from_frame(self, frame):
+ """Gets a message from frame. If the message is composed of fragmented
+ frames and the frame is not the last fragmented frame, this method
+ returns None. The whole message will be returned when the last
+ fragmented frame is passed to this method.
+
+ Raises:
+ InvalidFrameException: when the frame doesn't match defragmentation
+ context, or the frame contains invalid data.
+ """
+
+ if frame.opcode == common.OPCODE_CONTINUATION:
+ if not self._received_fragments:
+ if frame.fin:
+ raise InvalidFrameException(
+ 'Received a termination frame but fragmentation '
+ 'not started')
+ else:
+ raise InvalidFrameException(
+ 'Received an intermediate frame but '
+ 'fragmentation not started')
+
+ if frame.fin:
+ # End of fragmentation frame
+ self._received_fragments.append(frame.payload)
+ message = b''.join(self._received_fragments)
+ self._received_fragments = []
+ return message
+ else:
+ # Intermediate frame
+ self._received_fragments.append(frame.payload)
+ return None
+ else:
+ if self._received_fragments:
+ if frame.fin:
+ raise InvalidFrameException(
+ 'Received an unfragmented frame without '
+ 'terminating existing fragmentation')
+ else:
+ raise InvalidFrameException(
+ 'New fragmentation started without terminating '
+ 'existing fragmentation')
+
+ if frame.fin:
+ # Unfragmented frame
+
+ self._original_opcode = frame.opcode
+ return frame.payload
+ else:
+ # Start of fragmentation frame
+
+ if common.is_control_opcode(frame.opcode):
+ raise InvalidFrameException(
+ 'Control frames must not be fragmented')
+
+ self._original_opcode = frame.opcode
+ self._received_fragments.append(frame.payload)
+ return None
+
+ def _process_close_message(self, message):
+ """Processes close message.
+
+ Args:
+ message: close message.
+
+ Raises:
+ InvalidFrameException: when the message is invalid.
+ """
+
+ self._request.client_terminated = True
+
+ # Status code is optional. We can have status reason only if we
+ # have status code. Status reason can be empty string. So,
+ # allowed cases are
+ # - no application data: no code no reason
+ # - 2 octet of application data: has code but no reason
+ # - 3 or more octet of application data: both code and reason
+ if len(message) == 0:
+ self._logger.debug('Received close frame (empty body)')
+ self._request.ws_close_code = common.STATUS_NO_STATUS_RECEIVED
+ elif len(message) == 1:
+ raise InvalidFrameException(
+ 'If a close frame has status code, the length of '
+ 'status code must be 2 octet')
+ elif len(message) >= 2:
+ self._request.ws_close_code = struct.unpack('!H', message[0:2])[0]
+ self._request.ws_close_reason = message[2:].decode(
+ 'utf-8', 'replace')
+ self._logger.debug('Received close frame (code=%d, reason=%r)',
+ self._request.ws_close_code,
+ self._request.ws_close_reason)
+
+ # As we've received a close frame, no more data is coming over the
+ # socket. We can now safely close the socket without worrying about
+ # RST sending.
+
+ if self._request.server_terminated:
+ self._logger.debug(
+ 'Received ack for server-initiated closing handshake')
+ return
+
+ self._logger.debug('Received client-initiated closing handshake')
+
+ code = common.STATUS_NORMAL_CLOSURE
+ reason = ''
+ if hasattr(self._request, '_dispatcher'):
+ dispatcher = self._request._dispatcher
+ code, reason = dispatcher.passive_closing_handshake(self._request)
+ if code is None and reason is not None and len(reason) > 0:
+ self._logger.warning(
+ 'Handler specified reason despite code being None')
+ reason = ''
+ if reason is None:
+ reason = ''
+ self._send_closing_handshake(code, reason)
+ self._logger.debug(
+ 'Acknowledged closing handshake initiated by the peer '
+ '(code=%r, reason=%r)', code, reason)
+
+ def _process_ping_message(self, message):
+ """Processes ping message.
+
+ Args:
+ message: ping message.
+ """
+
+ try:
+ handler = self._request.on_ping_handler
+ if handler:
+ handler(self._request, message)
+ return
+ except AttributeError:
+ pass
+ self._send_pong(message)
+
+ def _process_pong_message(self, message):
+ """Processes pong message.
+
+ Args:
+ message: pong message.
+ """
+
+ # TODO(tyoshino): Add ping timeout handling.
+
+ inflight_pings = deque()
+
+ while True:
+ try:
+ expected_body = self._ping_queue.popleft()
+ if expected_body == message:
+ # inflight_pings contains pings ignored by the
+ # other peer. Just forget them.
+ self._logger.debug(
+ 'Ping %r is acked (%d pings were ignored)',
+ expected_body, len(inflight_pings))
+ break
+ else:
+ inflight_pings.append(expected_body)
+ except IndexError:
+ # The received pong was unsolicited pong. Keep the
+ # ping queue as is.
+ self._ping_queue = inflight_pings
+ self._logger.debug('Received a unsolicited pong')
+ break
+
+ try:
+ handler = self._request.on_pong_handler
+ if handler:
+ handler(self._request, message)
+ except AttributeError:
+ pass
+
+ def receive_message(self):
+ """Receive a WebSocket frame and return its payload as a text in
+ unicode or a binary in str.
+
+ Returns:
+ payload data of the frame
+ - as unicode instance if received text frame
+ - as str instance if received binary frame
+ or None iff received closing handshake.
+ Raises:
+ BadOperationException: when called on a client-terminated
+ connection.
+ ConnectionTerminatedException: when read returns empty
+ string.
+ InvalidFrameException: when the frame contains invalid
+ data.
+ UnsupportedFrameException: when the received frame has
+ flags, opcode we cannot handle. You can ignore this
+ exception and continue receiving the next frame.
+ """
+
+ if self._request.client_terminated:
+ raise BadOperationException(
+ 'Requested receive_message after receiving a closing '
+ 'handshake')
+
+ while True:
+ # mp_conn.read will block if no bytes are available.
+
+ frame = self._receive_frame_as_frame_object()
+
+ # Check the constraint on the payload size for control frames
+ # before extension processes the frame.
+ # See also http://tools.ietf.org/html/rfc6455#section-5.5
+ if (common.is_control_opcode(frame.opcode)
+ and len(frame.payload) > 125):
+ raise InvalidFrameException(
+ 'Payload data size of control frames must be 125 bytes or '
+ 'less')
+
+ for frame_filter in self._options.incoming_frame_filters:
+ frame_filter.filter(frame)
+
+ if frame.rsv1 or frame.rsv2 or frame.rsv3:
+ raise UnsupportedFrameException(
+ 'Unsupported flag is set (rsv = %d%d%d)' %
+ (frame.rsv1, frame.rsv2, frame.rsv3))
+
+ message = self._get_message_from_frame(frame)
+ if message is None:
+ continue
+
+ for message_filter in self._options.incoming_message_filters:
+ message = message_filter.filter(message)
+
+ if self._original_opcode == common.OPCODE_TEXT:
+ # The WebSocket protocol section 4.4 specifies that invalid
+ # characters must be replaced with U+fffd REPLACEMENT
+ # CHARACTER.
+ try:
+ return message.decode('utf-8')
+ except UnicodeDecodeError as e:
+ raise InvalidUTF8Exception(e)
+ elif self._original_opcode == common.OPCODE_BINARY:
+ return message
+ elif self._original_opcode == common.OPCODE_CLOSE:
+ self._process_close_message(message)
+ return None
+ elif self._original_opcode == common.OPCODE_PING:
+ self._process_ping_message(message)
+ elif self._original_opcode == common.OPCODE_PONG:
+ self._process_pong_message(message)
+ else:
+ raise UnsupportedFrameException('Opcode %d is not supported' %
+ self._original_opcode)
+
+ def _send_closing_handshake(self, code, reason):
+ body = create_closing_handshake_body(code, reason)
+ frame = create_close_frame(
+ body,
+ mask=self._options.mask_send,
+ frame_filters=self._options.outgoing_frame_filters)
+
+ self._request.server_terminated = True
+
+ self._write(frame)
+
+ def close_connection(self,
+ code=common.STATUS_NORMAL_CLOSURE,
+ reason='',
+ wait_response=True):
+ """Closes a WebSocket connection. Note that this method blocks until
+ it receives acknowledgement to the closing handshake.
+
+ Args:
+ code: Status code for close frame. If code is None, a close
+ frame with empty body will be sent.
+ reason: string representing close reason.
+ wait_response: True when caller want to wait the response.
+ Raises:
+ BadOperationException: when reason is specified with code None
+ or reason is not an instance of both str and unicode.
+ """
+
+ if self._request.server_terminated:
+ self._logger.debug(
+ 'Requested close_connection but server is already terminated')
+ return
+
+ # When we receive a close frame, we call _process_close_message().
+ # _process_close_message() immediately acknowledges to the
+ # server-initiated closing handshake and sets server_terminated to
+ # True. So, here we can assume that we haven't received any close
+ # frame. We're initiating a closing handshake.
+
+ if code is None:
+ if reason is not None and len(reason) > 0:
+ raise BadOperationException(
+ 'close reason must not be specified if code is None')
+ reason = ''
+ else:
+ if not isinstance(reason, bytes) and not isinstance(
+ reason, six.text_type):
+ raise BadOperationException(
+ 'close reason must be an instance of bytes or unicode')
+
+ self._send_closing_handshake(code, reason)
+ self._logger.debug('Initiated closing handshake (code=%r, reason=%r)',
+ code, reason)
+
+ if (code == common.STATUS_GOING_AWAY
+ or code == common.STATUS_PROTOCOL_ERROR) or not wait_response:
+ # It doesn't make sense to wait for a close frame if the reason is
+ # protocol error or that the server is going away. For some of
+ # other reasons, it might not make sense to wait for a close frame,
+ # but it's not clear, yet.
+ return
+
+ # TODO(ukai): 2. wait until the /client terminated/ flag has been set,
+ # or until a server-defined timeout expires.
+ #
+ # For now, we expect receiving closing handshake right after sending
+ # out closing handshake.
+ message = self.receive_message()
+ if message is not None:
+ raise ConnectionTerminatedException(
+ 'Didn\'t receive valid ack for closing handshake')
+ # TODO: 3. close the WebSocket connection.
+ # note: mod_python Connection (mp_conn) doesn't have close method.
+
+ def send_ping(self, body, binary=False):
+ if not binary and isinstance(body, six.text_type):
+ body = body.encode('UTF-8')
+ frame = create_ping_frame(body, self._options.mask_send,
+ self._options.outgoing_frame_filters)
+ self._write(frame)
+
+ self._ping_queue.append(body)
+
+ def _send_pong(self, body):
+ frame = create_pong_frame(body, self._options.mask_send,
+ self._options.outgoing_frame_filters)
+ self._write(frame)
+
+ def get_last_received_opcode(self):
+ """Returns the opcode of the WebSocket message which the last received
+ frame belongs to. The return value is valid iff immediately after
+ receive_message call.
+ """
+
+ return self._original_opcode
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/util.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/util.py
new file mode 100644
index 0000000000..04006ecacd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/util.py
@@ -0,0 +1,386 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""WebSocket utilities."""
+
+from __future__ import absolute_import
+import array
+import errno
+import logging
+import os
+import re
+import six
+from six.moves import map
+from six.moves import range
+import socket
+import struct
+import zlib
+
+try:
+ from mod_pywebsocket import fast_masking
+except ImportError:
+ pass
+
+
+def prepend_message_to_exception(message, exc):
+ """Prepend message to the exception."""
+ exc.args = (message + str(exc), )
+ return
+
+
+def __translate_interp(interp, cygwin_path):
+ """Translate interp program path for Win32 python to run cygwin program
+ (e.g. perl). Note that it doesn't support path that contains space,
+ which is typically true for Unix, where #!-script is written.
+ For Win32 python, cygwin_path is a directory of cygwin binaries.
+
+ Args:
+ interp: interp command line
+ cygwin_path: directory name of cygwin binary, or None
+ Returns:
+ translated interp command line.
+ """
+ if not cygwin_path:
+ return interp
+ m = re.match('^[^ ]*/([^ ]+)( .*)?', interp)
+ if m:
+ cmd = os.path.join(cygwin_path, m.group(1))
+ return cmd + m.group(2)
+ return interp
+
+
+def get_script_interp(script_path, cygwin_path=None):
+ r"""Get #!-interpreter command line from the script.
+
+ It also fixes command path. When Cygwin Python is used, e.g. in WebKit,
+ it could run "/usr/bin/perl -wT hello.pl".
+ When Win32 Python is used, e.g. in Chromium, it couldn't. So, fix
+ "/usr/bin/perl" to "<cygwin_path>\perl.exe".
+
+ Args:
+ script_path: pathname of the script
+ cygwin_path: directory name of cygwin binary, or None
+ Returns:
+ #!-interpreter command line, or None if it is not #!-script.
+ """
+ fp = open(script_path)
+ line = fp.readline()
+ fp.close()
+ m = re.match('^#!(.*)', line)
+ if m:
+ return __translate_interp(m.group(1), cygwin_path)
+ return None
+
+
+def hexify(s):
+ return ' '.join(['%02x' % x for x in six.iterbytes(s)])
+
+
+def get_class_logger(o):
+ """Return the logging class information."""
+ return logging.getLogger('%s.%s' %
+ (o.__class__.__module__, o.__class__.__name__))
+
+
+def pack_byte(b):
+ """Pack an integer to network-ordered byte"""
+ return struct.pack('!B', b)
+
+
+class NoopMasker(object):
+ """A NoOp masking object.
+
+ This has the same interface as RepeatedXorMasker but just returns
+ the string passed in without making any change.
+ """
+ def __init__(self):
+ """NoOp."""
+ pass
+
+ def mask(self, s):
+ """NoOp."""
+ return s
+
+
+class RepeatedXorMasker(object):
+ """A masking object that applies XOR on the string.
+
+ Applies XOR on the byte string given to mask method with the masking bytes
+ given to the constructor repeatedly. This object remembers the position
+ in the masking bytes the last mask method call ended and resumes from
+ that point on the next mask method call.
+ """
+ def __init__(self, masking_key):
+ self._masking_key = masking_key
+ self._masking_key_index = 0
+
+ def _mask_using_swig(self, s):
+ """Perform the mask via SWIG."""
+ masked_data = fast_masking.mask(s, self._masking_key,
+ self._masking_key_index)
+ self._masking_key_index = ((self._masking_key_index + len(s)) %
+ len(self._masking_key))
+ return masked_data
+
+ def _mask_using_array(self, s):
+ """Perform the mask via python."""
+ if isinstance(s, six.text_type):
+ raise Exception(
+ 'Masking Operation should not process unicode strings')
+
+ result = bytearray(s)
+
+ # Use temporary local variables to eliminate the cost to access
+ # attributes
+ masking_key = [c for c in six.iterbytes(self._masking_key)]
+ masking_key_size = len(masking_key)
+ masking_key_index = self._masking_key_index
+
+ for i in range(len(result)):
+ result[i] ^= masking_key[masking_key_index]
+ masking_key_index = (masking_key_index + 1) % masking_key_size
+
+ self._masking_key_index = masking_key_index
+
+ return bytes(result)
+
+ if 'fast_masking' in globals():
+ mask = _mask_using_swig
+ else:
+ mask = _mask_using_array
+
+
+# By making wbits option negative, we can suppress CMF/FLG (2 octet) and
+# ADLER32 (4 octet) fields of zlib so that we can use zlib module just as
+# deflate library. DICTID won't be added as far as we don't set dictionary.
+# LZ77 window of 32K will be used for both compression and decompression.
+# For decompression, we can just use 32K to cover any windows size. For
+# compression, we use 32K so receivers must use 32K.
+#
+# Compression level is Z_DEFAULT_COMPRESSION. We don't have to match level
+# to decode.
+#
+# See zconf.h, deflate.cc, inflate.cc of zlib library, and zlibmodule.c of
+# Python. See also RFC1950 (ZLIB 3.3).
+
+
+class _Deflater(object):
+ def __init__(self, window_bits):
+ self._logger = get_class_logger(self)
+
+ # Using the smallest window bits of 9 for generating input frames.
+ # On WebSocket spec, the smallest window bit is 8. However, zlib does
+ # not accept window_bit = 8.
+ #
+ # Because of a zlib deflate quirk, back-references will not use the
+ # entire range of 1 << window_bits, but will instead use a restricted
+ # range of (1 << window_bits) - 262. With an increased window_bits = 9,
+ # back-references will be within a range of 250. These can still be
+ # decompressed with window_bits = 8 and the 256-byte window used there.
+ #
+ # Similar disscussions can be found in https://crbug.com/691074
+ window_bits = max(window_bits, 9)
+
+ self._compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
+ zlib.DEFLATED, -window_bits)
+
+ def compress(self, bytes):
+ compressed_bytes = self._compress.compress(bytes)
+ self._logger.debug('Compress input %r', bytes)
+ self._logger.debug('Compress result %r', compressed_bytes)
+ return compressed_bytes
+
+ def compress_and_flush(self, bytes):
+ compressed_bytes = self._compress.compress(bytes)
+ compressed_bytes += self._compress.flush(zlib.Z_SYNC_FLUSH)
+ self._logger.debug('Compress input %r', bytes)
+ self._logger.debug('Compress result %r', compressed_bytes)
+ return compressed_bytes
+
+ def compress_and_finish(self, bytes):
+ compressed_bytes = self._compress.compress(bytes)
+ compressed_bytes += self._compress.flush(zlib.Z_FINISH)
+ self._logger.debug('Compress input %r', bytes)
+ self._logger.debug('Compress result %r', compressed_bytes)
+ return compressed_bytes
+
+
+class _Inflater(object):
+ def __init__(self, window_bits):
+ self._logger = get_class_logger(self)
+ self._window_bits = window_bits
+
+ self._unconsumed = b''
+
+ self.reset()
+
+ def decompress(self, size):
+ if not (size == -1 or size > 0):
+ raise Exception('size must be -1 or positive')
+
+ data = b''
+
+ while True:
+ data += self._decompress.decompress(self._unconsumed,
+ max(0, size - len(data)))
+ self._unconsumed = self._decompress.unconsumed_tail
+ if self._decompress.unused_data:
+ # Encountered a last block (i.e. a block with BFINAL = 1) and
+ # found a new stream (unused_data). We cannot use the same
+ # zlib.Decompress object for the new stream. Create a new
+ # Decompress object to decompress the new one.
+ #
+ # It's fine to ignore unconsumed_tail if unused_data is not
+ # empty.
+ self._unconsumed = self._decompress.unused_data
+ self.reset()
+ if size >= 0 and len(data) == size:
+ # data is filled. Don't call decompress again.
+ break
+ else:
+ # Re-invoke Decompress.decompress to try to decompress all
+ # available bytes before invoking read which blocks until
+ # any new byte is available.
+ continue
+ else:
+ # Here, since unused_data is empty, even if unconsumed_tail is
+ # not empty, bytes of requested length are already in data. We
+ # don't have to "continue" here.
+ break
+
+ if data:
+ self._logger.debug('Decompressed %r', data)
+ return data
+
+ def append(self, data):
+ self._logger.debug('Appended %r', data)
+ self._unconsumed += data
+
+ def reset(self):
+ self._logger.debug('Reset')
+ self._decompress = zlib.decompressobj(-self._window_bits)
+
+
+# Compresses/decompresses given octets using the method introduced in RFC1979.
+
+
+class _RFC1979Deflater(object):
+ """A compressor class that applies DEFLATE to given byte sequence and
+ flushes using the algorithm described in the RFC1979 section 2.1.
+ """
+ def __init__(self, window_bits, no_context_takeover):
+ self._deflater = None
+ if window_bits is None:
+ window_bits = zlib.MAX_WBITS
+ self._window_bits = window_bits
+ self._no_context_takeover = no_context_takeover
+
+ def filter(self, bytes, end=True, bfinal=False):
+ if self._deflater is None:
+ self._deflater = _Deflater(self._window_bits)
+
+ if bfinal:
+ result = self._deflater.compress_and_finish(bytes)
+ # Add a padding block with BFINAL = 0 and BTYPE = 0.
+ result = result + pack_byte(0)
+ self._deflater = None
+ return result
+
+ result = self._deflater.compress_and_flush(bytes)
+ if end:
+ # Strip last 4 octets which is LEN and NLEN field of a
+ # non-compressed block added for Z_SYNC_FLUSH.
+ result = result[:-4]
+
+ if self._no_context_takeover and end:
+ self._deflater = None
+
+ return result
+
+
+class _RFC1979Inflater(object):
+ """A decompressor class a la RFC1979.
+
+ A decompressor class for byte sequence compressed and flushed following
+ the algorithm described in the RFC1979 section 2.1.
+ """
+ def __init__(self, window_bits=zlib.MAX_WBITS):
+ self._inflater = _Inflater(window_bits)
+
+ def filter(self, bytes):
+ # Restore stripped LEN and NLEN field of a non-compressed block added
+ # for Z_SYNC_FLUSH.
+ self._inflater.append(bytes + b'\x00\x00\xff\xff')
+ return self._inflater.decompress(-1)
+
+
+class DeflateSocket(object):
+ """A wrapper class for socket object to intercept send and recv to perform
+ deflate compression and decompression transparently.
+ """
+
+ # Size of the buffer passed to recv to receive compressed data.
+ _RECV_SIZE = 4096
+
+ def __init__(self, socket):
+ self._socket = socket
+
+ self._logger = get_class_logger(self)
+
+ self._deflater = _Deflater(zlib.MAX_WBITS)
+ self._inflater = _Inflater(zlib.MAX_WBITS)
+
+ def recv(self, size):
+ """Receives data from the socket specified on the construction up
+ to the specified size. Once any data is available, returns it even
+ if it's smaller than the specified size.
+ """
+
+ # TODO(tyoshino): Allow call with size=0. It should block until any
+ # decompressed data is available.
+ if size <= 0:
+ raise Exception('Non-positive size passed')
+ while True:
+ data = self._inflater.decompress(size)
+ if len(data) != 0:
+ return data
+
+ read_data = self._socket.recv(DeflateSocket._RECV_SIZE)
+ if not read_data:
+ return b''
+ self._inflater.append(read_data)
+
+ def sendall(self, bytes):
+ self.send(bytes)
+
+ def send(self, bytes):
+ self._socket.sendall(self._deflater.compress_and_flush(bytes))
+ return len(bytes)
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/websocket_server.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/websocket_server.py
new file mode 100644
index 0000000000..fa24bb9651
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/mod_pywebsocket/websocket_server.py
@@ -0,0 +1,285 @@
+# Copyright 2020, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Standalone WebsocketServer
+
+This file deals with the main module of standalone server. Although it is fine
+to import this file directly to use WebSocketServer, it is strongly recommended
+to use standalone.py, since it is intended to act as a skeleton of this module.
+"""
+
+from __future__ import absolute_import
+from six.moves import BaseHTTPServer
+from six.moves import socketserver
+import logging
+import re
+import select
+import socket
+import ssl
+import threading
+import traceback
+
+from mod_pywebsocket import dispatch
+from mod_pywebsocket import util
+from mod_pywebsocket.request_handler import WebSocketRequestHandler
+
+
+def _alias_handlers(dispatcher, websock_handlers_map_file):
+ """Set aliases specified in websock_handler_map_file in dispatcher.
+
+ Args:
+ dispatcher: dispatch.Dispatcher instance
+ websock_handler_map_file: alias map file
+ """
+
+ with open(websock_handlers_map_file) as f:
+ for line in f:
+ if line[0] == '#' or line.isspace():
+ continue
+ m = re.match(r'(\S+)\s+(\S+)$', line)
+ if not m:
+ logging.warning('Wrong format in map file:' + line)
+ continue
+ try:
+ dispatcher.add_resource_path_alias(m.group(1), m.group(2))
+ except dispatch.DispatchException as e:
+ logging.error(str(e))
+
+
+class WebSocketServer(socketserver.ThreadingMixIn, BaseHTTPServer.HTTPServer):
+ """HTTPServer specialized for WebSocket."""
+
+ # Overrides SocketServer.ThreadingMixIn.daemon_threads
+ daemon_threads = True
+ # Overrides BaseHTTPServer.HTTPServer.allow_reuse_address
+ allow_reuse_address = True
+
+ def __init__(self, options):
+ """Override SocketServer.TCPServer.__init__ to set SSL enabled
+ socket object to self.socket before server_bind and server_activate,
+ if necessary.
+ """
+
+ # Share a Dispatcher among request handlers to save time for
+ # instantiation. Dispatcher can be shared because it is thread-safe.
+ options.dispatcher = dispatch.Dispatcher(
+ options.websock_handlers, options.scan_dir,
+ options.allow_handlers_outside_root_dir)
+ if options.websock_handlers_map_file:
+ _alias_handlers(options.dispatcher,
+ options.websock_handlers_map_file)
+ warnings = options.dispatcher.source_warnings()
+ if warnings:
+ for warning in warnings:
+ logging.warning('Warning in source loading: %s' % warning)
+
+ self._logger = util.get_class_logger(self)
+
+ self.request_queue_size = options.request_queue_size
+ self.__ws_is_shut_down = threading.Event()
+ self.__ws_serving = False
+
+ socketserver.BaseServer.__init__(self,
+ (options.server_host, options.port),
+ WebSocketRequestHandler)
+
+ # Expose the options object to allow handler objects access it. We name
+ # it with websocket_ prefix to avoid conflict.
+ self.websocket_server_options = options
+
+ self._create_sockets()
+ self.server_bind()
+ self.server_activate()
+
+ def _create_sockets(self):
+ self.server_name, self.server_port = self.server_address
+ self._sockets = []
+ if not self.server_name:
+ # On platforms that doesn't support IPv6, the first bind fails.
+ # On platforms that supports IPv6
+ # - If it binds both IPv4 and IPv6 on call with AF_INET6, the
+ # first bind succeeds and the second fails (we'll see 'Address
+ # already in use' error).
+ # - If it binds only IPv6 on call with AF_INET6, both call are
+ # expected to succeed to listen both protocol.
+ addrinfo_array = [(socket.AF_INET6, socket.SOCK_STREAM, '', '',
+ ''),
+ (socket.AF_INET, socket.SOCK_STREAM, '', '', '')]
+ else:
+ addrinfo_array = socket.getaddrinfo(self.server_name,
+ self.server_port,
+ socket.AF_UNSPEC,
+ socket.SOCK_STREAM,
+ socket.IPPROTO_TCP)
+ for addrinfo in addrinfo_array:
+ self._logger.info('Create socket on: %r', addrinfo)
+ family, socktype, proto, canonname, sockaddr = addrinfo
+ try:
+ socket_ = socket.socket(family, socktype)
+ except Exception as e:
+ self._logger.info('Skip by failure: %r', e)
+ continue
+ server_options = self.websocket_server_options
+ if server_options.use_tls:
+ if server_options.tls_client_auth:
+ if server_options.tls_client_cert_optional:
+ client_cert_ = ssl.CERT_OPTIONAL
+ else:
+ client_cert_ = ssl.CERT_REQUIRED
+ else:
+ client_cert_ = ssl.CERT_NONE
+ socket_ = ssl.wrap_socket(
+ socket_,
+ keyfile=server_options.private_key,
+ certfile=server_options.certificate,
+ ca_certs=server_options.tls_client_ca,
+ cert_reqs=client_cert_)
+ self._sockets.append((socket_, addrinfo))
+
+ def server_bind(self):
+ """Override SocketServer.TCPServer.server_bind to enable multiple
+ sockets bind.
+ """
+
+ failed_sockets = []
+
+ for socketinfo in self._sockets:
+ socket_, addrinfo = socketinfo
+ self._logger.info('Bind on: %r', addrinfo)
+ if self.allow_reuse_address:
+ socket_.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ try:
+ socket_.bind(self.server_address)
+ except Exception as e:
+ self._logger.info('Skip by failure: %r', e)
+ socket_.close()
+ failed_sockets.append(socketinfo)
+ if self.server_address[1] == 0:
+ # The operating system assigns the actual port number for port
+ # number 0. This case, the second and later sockets should use
+ # the same port number. Also self.server_port is rewritten
+ # because it is exported, and will be used by external code.
+ self.server_address = (self.server_name,
+ socket_.getsockname()[1])
+ self.server_port = self.server_address[1]
+ self._logger.info('Port %r is assigned', self.server_port)
+
+ for socketinfo in failed_sockets:
+ self._sockets.remove(socketinfo)
+
+ def server_activate(self):
+ """Override SocketServer.TCPServer.server_activate to enable multiple
+ sockets listen.
+ """
+
+ failed_sockets = []
+
+ for socketinfo in self._sockets:
+ socket_, addrinfo = socketinfo
+ self._logger.info('Listen on: %r', addrinfo)
+ try:
+ socket_.listen(self.request_queue_size)
+ except Exception as e:
+ self._logger.info('Skip by failure: %r', e)
+ socket_.close()
+ failed_sockets.append(socketinfo)
+
+ for socketinfo in failed_sockets:
+ self._sockets.remove(socketinfo)
+
+ if len(self._sockets) == 0:
+ self._logger.critical(
+ 'No sockets activated. Use info log level to see the reason.')
+
+ def server_close(self):
+ """Override SocketServer.TCPServer.server_close to enable multiple
+ sockets close.
+ """
+
+ for socketinfo in self._sockets:
+ socket_, addrinfo = socketinfo
+ self._logger.info('Close on: %r', addrinfo)
+ socket_.close()
+
+ def fileno(self):
+ """Override SocketServer.TCPServer.fileno."""
+
+ self._logger.critical('Not supported: fileno')
+ return self._sockets[0][0].fileno()
+
+ def handle_error(self, request, client_address):
+ """Override SocketServer.handle_error."""
+
+ self._logger.error('Exception in processing request from: %r\n%s',
+ client_address, traceback.format_exc())
+ # Note: client_address is a tuple.
+
+ def get_request(self):
+ """Override TCPServer.get_request."""
+
+ accepted_socket, client_address = self.socket.accept()
+
+ server_options = self.websocket_server_options
+ if server_options.use_tls:
+ # Print cipher in use. Handshake is done on accept.
+ self._logger.debug('Cipher: %s', accepted_socket.cipher())
+ self._logger.debug('Client cert: %r',
+ accepted_socket.getpeercert())
+
+ return accepted_socket, client_address
+
+ def serve_forever(self, poll_interval=0.5):
+ """Override SocketServer.BaseServer.serve_forever."""
+
+ self.__ws_serving = True
+ self.__ws_is_shut_down.clear()
+ handle_request = self.handle_request
+ if hasattr(self, '_handle_request_noblock'):
+ handle_request = self._handle_request_noblock
+ else:
+ self._logger.warning('Fallback to blocking request handler')
+ try:
+ while self.__ws_serving:
+ r, w, e = select.select(
+ [socket_[0] for socket_ in self._sockets], [], [],
+ poll_interval)
+ for socket_ in r:
+ self.socket = socket_
+ handle_request()
+ self.socket = None
+ finally:
+ self.__ws_is_shut_down.set()
+
+ def shutdown(self):
+ """Override SocketServer.BaseServer.shutdown."""
+
+ self.__ws_serving = False
+ self.__ws_is_shut_down.wait()
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/setup.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/setup.py
new file mode 100755
index 0000000000..b65904c94f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/setup.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+#
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Set up script for mod_pywebsocket.
+"""
+
+from __future__ import absolute_import
+from __future__ import print_function
+from setuptools import setup, Extension
+import sys
+
+_PACKAGE_NAME = 'mod_pywebsocket'
+
+# Build and use a C++ extension for faster masking. SWIG is required.
+_USE_FAST_MASKING = False
+
+# This is used since python_requires field is not recognized with
+# pip version 9.0.0 and earlier
+if sys.hexversion < 0x020700f0:
+ print('%s requires Python 2.7 or later.' % _PACKAGE_NAME, file=sys.stderr)
+ sys.exit(1)
+
+if _USE_FAST_MASKING:
+ setup(ext_modules=[
+ Extension('mod_pywebsocket/_fast_masking',
+ ['mod_pywebsocket/fast_masking.i'],
+ swig_opts=['-c++'])
+ ])
+
+setup(
+ author='Yuzo Fujishima',
+ author_email='yuzo@chromium.org',
+ description='Standalone WebSocket Server for testing purposes.',
+ long_description=('mod_pywebsocket is a standalone server for '
+ 'the WebSocket Protocol (RFC 6455). '
+ 'See mod_pywebsocket/__init__.py for more detail.'),
+ license='See LICENSE',
+ name=_PACKAGE_NAME,
+ packages=[_PACKAGE_NAME, _PACKAGE_NAME + '.handshake'],
+ python_requires='>=2.7',
+ install_requires=['six'],
+ url='https://github.com/GoogleChromeLabs/pywebsocket3',
+ version='3.0.1',
+)
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/__init__.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/cacert.pem b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/cacert.pem
new file mode 100644
index 0000000000..4dadae121b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/cacert.pem
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE-----
+MIICvDCCAiWgAwIBAgIJAKqVghkGF1rSMA0GCSqGSIb3DQEBBQUAMEkxCzAJBgNV
+BAYTAkpQMQ4wDAYDVQQIEwVUb2t5bzEUMBIGA1UEChMLcHl3ZWJzb2NrZXQxFDAS
+BgNVBAMTC3B5d2Vic29ja2V0MB4XDTEyMDYwNjA3MjQzM1oXDTM5MTAyMzA3MjQz
+M1owSTELMAkGA1UEBhMCSlAxDjAMBgNVBAgTBVRva3lvMRQwEgYDVQQKEwtweXdl
+YnNvY2tldDEUMBIGA1UEAxMLcHl3ZWJzb2NrZXQwgZ8wDQYJKoZIhvcNAQEBBQAD
+gY0AMIGJAoGBAKoSEW2biQxVrMMKdn/8PJzDYiSXDPR9WQbLRRQ1Gm5jkCYiahXW
+u2CbTThfPPfi2NHA3I+HlT7gO9yR7RVUvN6ISUzGwXDEq4f4UNqtQOhQaqqK+CZ9
+LO/BhO/YYfNrbSPlYzHUKaT9ese7xO9VzVKLW+qUf2Mjh4/+SzxBDNP7AgMBAAGj
+gaswgagwHQYDVR0OBBYEFOsWdxCSuyhwaZeab6BoTho3++bzMHkGA1UdIwRyMHCA
+FOsWdxCSuyhwaZeab6BoTho3++bzoU2kSzBJMQswCQYDVQQGEwJKUDEOMAwGA1UE
+CBMFVG9reW8xFDASBgNVBAoTC3B5d2Vic29ja2V0MRQwEgYDVQQDEwtweXdlYnNv
+Y2tldIIJAKqVghkGF1rSMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEA
+gsMI1WEYqNw/jhUIdrTBcCxJ0X6hJvA9ziKANVm1Rs+4P3YDArkQ8bCr6xY+Kw7s
+Zp0yE7dM8GMdi+DU6hL3t3E5eMkTS1yZr9WCK4f2RLo+et98selZydpHemF3DJJ3
+gAj8Sx4LBaG8Cb/WnEMPv3MxG3fBE5favF6V4jU07hQ=
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/cert.pem b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/cert.pem
new file mode 100644
index 0000000000..25379a72b0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/cert.pem
@@ -0,0 +1,61 @@
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 1 (0x1)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: C=JP, ST=Tokyo, O=pywebsocket, CN=pywebsocket
+ Validity
+ Not Before: Jun 6 07:25:08 2012 GMT
+ Not After : Oct 23 07:25:08 2039 GMT
+ Subject: C=JP, ST=Tokyo, O=pywebsocket, CN=pywebsocket
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ RSA Public Key: (1024 bit)
+ Modulus (1024 bit):
+ 00:de:10:ce:3a:5a:04:a4:1c:29:93:5c:23:82:1a:
+ f2:06:01:e6:2b:a4:0f:dd:77:49:76:89:03:a2:21:
+ de:04:75:c6:e2:dd:fb:35:27:3a:a2:92:8e:12:62:
+ 2b:3e:1f:f4:78:df:b6:94:cb:27:d6:cb:d6:37:d7:
+ 5c:08:f0:09:3e:c9:ce:24:2d:00:c9:df:4a:e0:99:
+ e5:fb:23:a9:e2:d6:c9:3d:96:fa:01:88:de:5a:89:
+ b0:cf:03:67:6f:04:86:1d:ef:62:1c:55:a9:07:9a:
+ 2e:66:2a:73:5b:4c:62:03:f9:82:83:db:68:bf:b8:
+ 4b:0b:8b:93:11:b8:54:73:7b
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Basic Constraints:
+ CA:FALSE
+ Netscape Cert Type:
+ SSL Server
+ Netscape Comment:
+ OpenSSL Generated Certificate
+ X509v3 Subject Key Identifier:
+ 82:A1:73:8B:16:0C:7C:E4:D3:46:95:13:95:1A:32:C1:84:E9:06:00
+ X509v3 Authority Key Identifier:
+ keyid:EB:16:77:10:92:BB:28:70:69:97:9A:6F:A0:68:4E:1A:37:FB:E6:F3
+
+ Signature Algorithm: sha1WithRSAEncryption
+ 6b:b3:46:29:02:df:b0:c8:8e:c4:d7:7f:a0:1e:0d:1a:eb:2f:
+ df:d1:48:57:36:5f:95:8c:1b:f0:51:d6:52:e7:8d:84:3b:9f:
+ d8:ed:22:9c:aa:bd:ee:9b:90:1d:84:a3:4c:0b:cb:eb:64:73:
+ ba:f7:15:ce:da:5f:db:8b:15:07:a6:28:7f:b9:8c:11:9b:64:
+ d3:f1:be:52:4f:c3:d8:58:fe:de:56:63:63:3b:51:ed:a7:81:
+ f9:05:51:70:63:32:09:0e:94:7e:05:fe:a1:56:18:34:98:d5:
+ 99:1e:4e:27:38:89:90:6a:e5:ce:60:35:01:f5:de:34:60:b1:
+ cb:ae
+-----BEGIN CERTIFICATE-----
+MIICmDCCAgGgAwIBAgIBATANBgkqhkiG9w0BAQUFADBJMQswCQYDVQQGEwJKUDEO
+MAwGA1UECBMFVG9reW8xFDASBgNVBAoTC3B5d2Vic29ja2V0MRQwEgYDVQQDEwtw
+eXdlYnNvY2tldDAeFw0xMjA2MDYwNzI1MDhaFw0zOTEwMjMwNzI1MDhaMEkxCzAJ
+BgNVBAYTAkpQMQ4wDAYDVQQIEwVUb2t5bzEUMBIGA1UEChMLcHl3ZWJzb2NrZXQx
+FDASBgNVBAMTC3B5d2Vic29ja2V0MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB
+gQDeEM46WgSkHCmTXCOCGvIGAeYrpA/dd0l2iQOiId4Edcbi3fs1Jzqiko4SYis+
+H/R437aUyyfWy9Y311wI8Ak+yc4kLQDJ30rgmeX7I6ni1sk9lvoBiN5aibDPA2dv
+BIYd72IcVakHmi5mKnNbTGID+YKD22i/uEsLi5MRuFRzewIDAQABo4GPMIGMMAkG
+A1UdEwQCMAAwEQYJYIZIAYb4QgEBBAQDAgZAMCwGCWCGSAGG+EIBDQQfFh1PcGVu
+U1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUgqFzixYMfOTTRpUT
+lRoywYTpBgAwHwYDVR0jBBgwFoAU6xZ3EJK7KHBpl5pvoGhOGjf75vMwDQYJKoZI
+hvcNAQEFBQADgYEAa7NGKQLfsMiOxNd/oB4NGusv39FIVzZflYwb8FHWUueNhDuf
+2O0inKq97puQHYSjTAvL62RzuvcVztpf24sVB6Yof7mMEZtk0/G+Uk/D2Fj+3lZj
+YztR7aeB+QVRcGMyCQ6UfgX+oVYYNJjVmR5OJziJkGrlzmA1AfXeNGCxy64=
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/client_cert.p12 b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/client_cert.p12
new file mode 100644
index 0000000000..14e1399279
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/client_cert.p12
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/key.pem b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/key.pem
new file mode 100644
index 0000000000..fae858318f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/cert/key.pem
@@ -0,0 +1,15 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIICXgIBAAKBgQDeEM46WgSkHCmTXCOCGvIGAeYrpA/dd0l2iQOiId4Edcbi3fs1
+Jzqiko4SYis+H/R437aUyyfWy9Y311wI8Ak+yc4kLQDJ30rgmeX7I6ni1sk9lvoB
+iN5aibDPA2dvBIYd72IcVakHmi5mKnNbTGID+YKD22i/uEsLi5MRuFRzewIDAQAB
+AoGBAIuCuV1Vcnb7rm8CwtgZP5XgmY8vSjxTldafa6XvawEYUTP0S77v/1llg1Yv
+UIV+I+PQgG9oVoYOl22LoimHS/Z3e1fsot5tDYszGe8/Gkst4oaReSoxvBUa6WXp
+QSo7YFCajuHtE+W/gzF+UHbdzzXIDjQZ314LNF5t+4UnsEPBAkEA+girImqWoM2t
+3UR8f8oekERwsmEMf9DH5YpH4cvUnvI+kwesC/r2U8Sho++fyEMUNm7aIXGqNLga
+ogAM+4NX4QJBAONdSxSay22egTGNoIhLndljWkuOt/9FWj2klf/4QxD4blMJQ5Oq
+QdOGAh7nVQjpPLQ5D7CBVAKpGM2CD+QJBtsCQEP2kz35pxPylG3urcC2mfQxBkkW
+ZCViBNP58GwJ0bOauTOSBEwFXWuLqTw8aDwxL49UNmqc0N0fpe2fAehj3UECQQCm
+FH/DjU8Lw7ybddjNtm6XXPuYNagxz3cbkB4B3FchDleIUDwMoVF0MW9bI5/54mV1
+QDk1tUKortxvQZJaAD4BAkEAhGOHQqPd6bBBoFBvpaLzPJMxwLKrB+Wtkq/QlC72
+ClRiMn2g8SALiIL3BDgGXKcKE/Wy7jo/af/JCzQ/cPqt/A==
+-----END RSA PRIVATE KEY-----
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/client_for_testing.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/client_for_testing.py
new file mode 100644
index 0000000000..a45e8f5cf2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/client_for_testing.py
@@ -0,0 +1,726 @@
+#!/usr/bin/env python
+#
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""WebSocket client utility for testing.
+
+This module contains helper methods for performing handshake, frame
+sending/receiving as a WebSocket client.
+
+This is code for testing mod_pywebsocket. Keep this code independent from
+mod_pywebsocket. Don't import e.g. Stream class for generating frame for
+testing. Using util.hexify, etc. that are not related to protocol processing
+is allowed.
+
+Note:
+This code is far from robust, e.g., we cut corners in handshake.
+"""
+
+from __future__ import absolute_import
+import base64
+import errno
+import logging
+import os
+import random
+import re
+import socket
+import struct
+import time
+from hashlib import sha1
+from six import iterbytes
+from six import indexbytes
+
+from mod_pywebsocket import common
+from mod_pywebsocket import util
+from mod_pywebsocket.handshake import HandshakeException
+
+DEFAULT_PORT = 80
+DEFAULT_SECURE_PORT = 443
+
+# Opcodes introduced in IETF HyBi 01 for the new framing format
+OPCODE_CONTINUATION = 0x0
+OPCODE_CLOSE = 0x8
+OPCODE_PING = 0x9
+OPCODE_PONG = 0xa
+OPCODE_TEXT = 0x1
+OPCODE_BINARY = 0x2
+
+# Strings used for handshake
+_UPGRADE_HEADER = 'Upgrade: websocket\r\n'
+_CONNECTION_HEADER = 'Connection: Upgrade\r\n'
+
+WEBSOCKET_ACCEPT_UUID = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
+
+# Status codes
+STATUS_NORMAL_CLOSURE = 1000
+STATUS_GOING_AWAY = 1001
+STATUS_PROTOCOL_ERROR = 1002
+STATUS_UNSUPPORTED_DATA = 1003
+STATUS_NO_STATUS_RECEIVED = 1005
+STATUS_ABNORMAL_CLOSURE = 1006
+STATUS_INVALID_FRAME_PAYLOAD_DATA = 1007
+STATUS_POLICY_VIOLATION = 1008
+STATUS_MESSAGE_TOO_BIG = 1009
+STATUS_MANDATORY_EXT = 1010
+STATUS_INTERNAL_ENDPOINT_ERROR = 1011
+STATUS_TLS_HANDSHAKE = 1015
+
+# Extension tokens
+_PERMESSAGE_DEFLATE_EXTENSION = 'permessage-deflate'
+
+
+def _method_line(resource):
+ return 'GET %s HTTP/1.1\r\n' % resource
+
+
+def _sec_origin_header(origin):
+ return 'Sec-WebSocket-Origin: %s\r\n' % origin.lower()
+
+
+def _origin_header(origin):
+ # 4.1 13. concatenation of the string "Origin:", a U+0020 SPACE character,
+ # and the /origin/ value, converted to ASCII lowercase, to /fields/.
+ return 'Origin: %s\r\n' % origin.lower()
+
+
+def _format_host_header(host, port, secure):
+ # 4.1 9. Let /hostport/ be an empty string.
+ # 4.1 10. Append the /host/ value, converted to ASCII lowercase, to
+ # /hostport/
+ hostport = host.lower()
+ # 4.1 11. If /secure/ is false, and /port/ is not 80, or if /secure/
+ # is true, and /port/ is not 443, then append a U+003A COLON character
+ # (:) followed by the value of /port/, expressed as a base-ten integer,
+ # to /hostport/
+ if ((not secure and port != DEFAULT_PORT)
+ or (secure and port != DEFAULT_SECURE_PORT)):
+ hostport += ':' + str(port)
+ # 4.1 12. concatenation of the string "Host:", a U+0020 SPACE
+ # character, and /hostport/, to /fields/.
+ return 'Host: %s\r\n' % hostport
+
+
+# TODO(tyoshino): Define a base class and move these shared methods to that.
+
+
+def receive_bytes(socket, length):
+ received_bytes = []
+ remaining = length
+ while remaining > 0:
+ new_received_bytes = socket.recv(remaining)
+ if not new_received_bytes:
+ raise Exception(
+ 'Connection closed before receiving requested length '
+ '(requested %d bytes but received only %d bytes)' %
+ (length, length - remaining))
+ received_bytes.append(new_received_bytes)
+ remaining -= len(new_received_bytes)
+ return b''.join(received_bytes)
+
+
+# TODO(tyoshino): Now the WebSocketHandshake class diverts these methods. We
+# should move to HTTP parser as specified in RFC 6455.
+
+
+def _read_fields(socket):
+ # 4.1 32. let /fields/ be a list of name-value pairs, initially empty.
+ fields = {}
+ while True:
+ # 4.1 33. let /name/ and /value/ be empty byte arrays
+ name = b''
+ value = b''
+ # 4.1 34. read /name/
+ name = _read_name(socket)
+ if name is None:
+ break
+ # 4.1 35. read spaces
+ # TODO(tyoshino): Skip only one space as described in the spec.
+ ch = _skip_spaces(socket)
+ # 4.1 36. read /value/
+ value = _read_value(socket, ch)
+ # 4.1 37. read a byte from the server
+ ch = receive_bytes(socket, 1)
+ if ch != b'\n': # 0x0A
+ raise Exception(
+ 'Expected LF but found %r while reading value %r for header '
+ '%r' % (ch, name, value))
+ # 4.1 38. append an entry to the /fields/ list that has the name
+ # given by the string obtained by interpreting the /name/ byte
+ # array as a UTF-8 stream and the value given by the string
+ # obtained by interpreting the /value/ byte array as a UTF-8 byte
+ # stream.
+ fields.setdefault(name.decode('UTF-8'),
+ []).append(value.decode('UTF-8'))
+ # 4.1 39. return to the "Field" step above
+ return fields
+
+
+def _read_name(socket):
+ # 4.1 33. let /name/ be empty byte arrays
+ name = b''
+ while True:
+ # 4.1 34. read a byte from the server
+ ch = receive_bytes(socket, 1)
+ if ch == b'\r': # 0x0D
+ return None
+ elif ch == b'\n': # 0x0A
+ raise Exception('Unexpected LF when reading header name %r' % name)
+ elif ch == b':': # 0x3A
+ return name.lower()
+ else:
+ name += ch
+
+
+def _skip_spaces(socket):
+ # 4.1 35. read a byte from the server
+ while True:
+ ch = receive_bytes(socket, 1)
+ if ch == b' ': # 0x20
+ continue
+ return ch
+
+
+def _read_value(socket, ch):
+ # 4.1 33. let /value/ be empty byte arrays
+ value = b''
+ # 4.1 36. read a byte from server.
+ while True:
+ if ch == b'\r': # 0x0D
+ return value
+ elif ch == b'\n': # 0x0A
+ raise Exception('Unexpected LF when reading header value %r' %
+ value)
+ else:
+ value += ch
+ ch = receive_bytes(socket, 1)
+
+
+def read_frame_header(socket):
+
+ first_byte = ord(receive_bytes(socket, 1))
+ fin = (first_byte >> 7) & 1
+ rsv1 = (first_byte >> 6) & 1
+ rsv2 = (first_byte >> 5) & 1
+ rsv3 = (first_byte >> 4) & 1
+ opcode = first_byte & 0xf
+
+ second_byte = ord(receive_bytes(socket, 1))
+ mask = (second_byte >> 7) & 1
+ payload_length = second_byte & 0x7f
+
+ if mask != 0:
+ raise Exception('Mask bit must be 0 for frames coming from server')
+
+ if payload_length == 127:
+ extended_payload_length = receive_bytes(socket, 8)
+ payload_length = struct.unpack('!Q', extended_payload_length)[0]
+ if payload_length > 0x7FFFFFFFFFFFFFFF:
+ raise Exception('Extended payload length >= 2^63')
+ elif payload_length == 126:
+ extended_payload_length = receive_bytes(socket, 2)
+ payload_length = struct.unpack('!H', extended_payload_length)[0]
+
+ return fin, rsv1, rsv2, rsv3, opcode, payload_length
+
+
+class _TLSSocket(object):
+ """Wrapper for a TLS connection."""
+ def __init__(self, raw_socket):
+ self._ssl = socket.ssl(raw_socket)
+
+ def send(self, bytes):
+ return self._ssl.write(bytes)
+
+ def recv(self, size=-1):
+ return self._ssl.read(size)
+
+ def close(self):
+ # Nothing to do.
+ pass
+
+
+class HttpStatusException(Exception):
+ """This exception will be raised when unexpected http status code was
+ received as a result of handshake.
+ """
+ def __init__(self, name, status):
+ super(HttpStatusException, self).__init__(name)
+ self.status = status
+
+
+class WebSocketHandshake(object):
+ """Opening handshake processor for the WebSocket protocol (RFC 6455)."""
+ def __init__(self, options):
+ self._logger = util.get_class_logger(self)
+
+ self._options = options
+
+ def handshake(self, socket):
+ """Handshake WebSocket.
+
+ Raises:
+ Exception: handshake failed.
+ """
+
+ self._socket = socket
+
+ request_line = _method_line(self._options.resource)
+ self._logger.debug('Opening handshake Request-Line: %r', request_line)
+ self._socket.sendall(request_line.encode('UTF-8'))
+
+ fields = []
+ fields.append(_UPGRADE_HEADER)
+ fields.append(_CONNECTION_HEADER)
+
+ fields.append(
+ _format_host_header(self._options.server_host,
+ self._options.server_port,
+ self._options.use_tls))
+
+ if self._options.version == 8:
+ fields.append(_sec_origin_header(self._options.origin))
+ else:
+ fields.append(_origin_header(self._options.origin))
+
+ original_key = os.urandom(16)
+ key = base64.b64encode(original_key)
+ self._logger.debug('Sec-WebSocket-Key: %s (%s)', key,
+ util.hexify(original_key))
+ fields.append(u'Sec-WebSocket-Key: %s\r\n' % key.decode('UTF-8'))
+
+ fields.append(u'Sec-WebSocket-Version: %d\r\n' % self._options.version)
+
+ if self._options.use_basic_auth:
+ credential = 'Basic ' + base64.b64encode(
+ self._options.basic_auth_credential.encode('UTF-8')).decode()
+ fields.append(u'Authorization: %s\r\n' % credential)
+
+ # Setting up extensions.
+ if len(self._options.extensions) > 0:
+ fields.append(u'Sec-WebSocket-Extensions: %s\r\n' %
+ ', '.join(self._options.extensions))
+
+ self._logger.debug('Opening handshake request headers: %r', fields)
+
+ for field in fields:
+ self._socket.sendall(field.encode('UTF-8'))
+ self._socket.sendall(b'\r\n')
+
+ self._logger.info('Sent opening handshake request')
+
+ field = b''
+ while True:
+ ch = receive_bytes(self._socket, 1)
+ field += ch
+ if ch == b'\n':
+ break
+
+ self._logger.debug('Opening handshake Response-Line: %r', field)
+
+ # Will raise a UnicodeDecodeError when the decode fails
+ if len(field) < 7 or not field.endswith(b'\r\n'):
+ raise Exception('Wrong status line: %s' % field.decode('Latin-1'))
+ m = re.match(b'[^ ]* ([^ ]*) .*', field)
+ if m is None:
+ raise Exception('No HTTP status code found in status line: %s' %
+ field.decode('Latin-1'))
+ code = m.group(1)
+ if not re.match(b'[0-9][0-9][0-9]$', code):
+ raise Exception(
+ 'HTTP status code %s is not three digit in status line: %s' %
+ (code.decode('Latin-1'), field.decode('Latin-1')))
+ if code != b'101':
+ raise HttpStatusException(
+ 'Expected HTTP status code 101 but found %s in status line: '
+ '%r' % (code.decode('Latin-1'), field.decode('Latin-1')),
+ int(code))
+ fields = _read_fields(self._socket)
+ ch = receive_bytes(self._socket, 1)
+ if ch != b'\n': # 0x0A
+ raise Exception('Expected LF but found: %r' % ch)
+
+ self._logger.debug('Opening handshake response headers: %r', fields)
+
+ # Check /fields/
+ if len(fields['upgrade']) != 1:
+ raise Exception('Multiple Upgrade headers found: %s' %
+ fields['upgrade'])
+ if len(fields['connection']) != 1:
+ raise Exception('Multiple Connection headers found: %s' %
+ fields['connection'])
+ if fields['upgrade'][0] != 'websocket':
+ raise Exception('Unexpected Upgrade header value: %s' %
+ fields['upgrade'][0])
+ if fields['connection'][0].lower() != 'upgrade':
+ raise Exception('Unexpected Connection header value: %s' %
+ fields['connection'][0])
+
+ if len(fields['sec-websocket-accept']) != 1:
+ raise Exception('Multiple Sec-WebSocket-Accept headers found: %s' %
+ fields['sec-websocket-accept'])
+
+ accept = fields['sec-websocket-accept'][0]
+
+ # Validate
+ try:
+ decoded_accept = base64.b64decode(accept)
+ except TypeError as e:
+ raise HandshakeException(
+ 'Illegal value for header Sec-WebSocket-Accept: ' + accept)
+
+ if len(decoded_accept) != 20:
+ raise HandshakeException(
+ 'Decoded value of Sec-WebSocket-Accept is not 20-byte long')
+
+ self._logger.debug('Actual Sec-WebSocket-Accept: %r (%s)', accept,
+ util.hexify(decoded_accept))
+
+ original_expected_accept = sha1(key + WEBSOCKET_ACCEPT_UUID).digest()
+ expected_accept = base64.b64encode(original_expected_accept)
+
+ self._logger.debug('Expected Sec-WebSocket-Accept: %r (%s)',
+ expected_accept,
+ util.hexify(original_expected_accept))
+
+ if accept != expected_accept.decode('UTF-8'):
+ raise Exception(
+ 'Invalid Sec-WebSocket-Accept header: %r (expected) != %r '
+ '(actual)' % (accept, expected_accept))
+
+ server_extensions_header = fields.get('sec-websocket-extensions')
+ accepted_extensions = []
+ if server_extensions_header is not None:
+ accepted_extensions = common.parse_extensions(
+ ', '.join(server_extensions_header))
+
+ # Scan accepted extension list to check if there is any unrecognized
+ # extensions or extensions we didn't request in it. Then, for
+ # extensions we request, parse them and store parameters. They will be
+ # used later by each extension.
+ for extension in accepted_extensions:
+ if extension.name() == _PERMESSAGE_DEFLATE_EXTENSION:
+ checker = self._options.check_permessage_deflate
+ if checker:
+ checker(extension)
+ continue
+
+ raise Exception('Received unrecognized extension: %s' %
+ extension.name())
+
+
+class WebSocketStream(object):
+ """Frame processor for the WebSocket protocol (RFC 6455)."""
+ def __init__(self, socket, handshake):
+ self._handshake = handshake
+ self._socket = socket
+
+ # Filters applied to application data part of data frames.
+ self._outgoing_frame_filter = None
+ self._incoming_frame_filter = None
+
+ self._fragmented = False
+
+ def _mask_hybi(self, s):
+ # TODO(tyoshino): os.urandom does open/read/close for every call. If
+ # performance matters, change this to some library call that generates
+ # cryptographically secure pseudo random number sequence.
+ masking_nonce = os.urandom(4)
+ result = [masking_nonce]
+ count = 0
+ for c in iterbytes(s):
+ result.append(util.pack_byte(c ^ indexbytes(masking_nonce, count)))
+ count = (count + 1) % len(masking_nonce)
+ return b''.join(result)
+
+ def send_frame_of_arbitrary_bytes(self, header, body):
+ self._socket.sendall(header + self._mask_hybi(body))
+
+ def send_data(self,
+ payload,
+ frame_type,
+ end=True,
+ mask=True,
+ rsv1=0,
+ rsv2=0,
+ rsv3=0):
+ if self._outgoing_frame_filter is not None:
+ payload = self._outgoing_frame_filter.filter(payload)
+
+ if self._fragmented:
+ opcode = OPCODE_CONTINUATION
+ else:
+ opcode = frame_type
+
+ if end:
+ self._fragmented = False
+ fin = 1
+ else:
+ self._fragmented = True
+ fin = 0
+
+ if mask:
+ mask_bit = 1 << 7
+ else:
+ mask_bit = 0
+
+ header = util.pack_byte(fin << 7 | rsv1 << 6 | rsv2 << 5 | rsv3 << 4
+ | opcode)
+ payload_length = len(payload)
+ if payload_length <= 125:
+ header += util.pack_byte(mask_bit | payload_length)
+ elif payload_length < 1 << 16:
+ header += util.pack_byte(mask_bit | 126) + struct.pack(
+ '!H', payload_length)
+ elif payload_length < 1 << 63:
+ header += util.pack_byte(mask_bit | 127) + struct.pack(
+ '!Q', payload_length)
+ else:
+ raise Exception('Too long payload (%d byte)' % payload_length)
+ if mask:
+ payload = self._mask_hybi(payload)
+ self._socket.sendall(header + payload)
+
+ def send_binary(self, payload, end=True, mask=True):
+ self.send_data(payload, OPCODE_BINARY, end, mask)
+
+ def send_text(self, payload, end=True, mask=True):
+ self.send_data(payload.encode('utf-8'), OPCODE_TEXT, end, mask)
+
+ def _assert_receive_data(self, payload, opcode, fin, rsv1, rsv2, rsv3):
+ (actual_fin, actual_rsv1, actual_rsv2, actual_rsv3, actual_opcode,
+ payload_length) = read_frame_header(self._socket)
+
+ if actual_opcode != opcode:
+ raise Exception('Unexpected opcode: %d (expected) vs %d (actual)' %
+ (opcode, actual_opcode))
+
+ if actual_fin != fin:
+ raise Exception('Unexpected fin: %d (expected) vs %d (actual)' %
+ (fin, actual_fin))
+
+ if rsv1 is None:
+ rsv1 = 0
+
+ if rsv2 is None:
+ rsv2 = 0
+
+ if rsv3 is None:
+ rsv3 = 0
+
+ if actual_rsv1 != rsv1:
+ raise Exception('Unexpected rsv1: %r (expected) vs %r (actual)' %
+ (rsv1, actual_rsv1))
+
+ if actual_rsv2 != rsv2:
+ raise Exception('Unexpected rsv2: %r (expected) vs %r (actual)' %
+ (rsv2, actual_rsv2))
+
+ if actual_rsv3 != rsv3:
+ raise Exception('Unexpected rsv3: %r (expected) vs %r (actual)' %
+ (rsv3, actual_rsv3))
+
+ received = receive_bytes(self._socket, payload_length)
+
+ if self._incoming_frame_filter is not None:
+ received = self._incoming_frame_filter.filter(received)
+
+ if len(received) != len(payload):
+ raise Exception(
+ 'Unexpected payload length: %d (expected) vs %d (actual)' %
+ (len(payload), len(received)))
+
+ if payload != received:
+ raise Exception(
+ 'Unexpected payload: %r (expected) vs %r (actual)' %
+ (payload, received))
+
+ def assert_receive_binary(self,
+ payload,
+ opcode=OPCODE_BINARY,
+ fin=1,
+ rsv1=None,
+ rsv2=None,
+ rsv3=None):
+ self._assert_receive_data(payload, opcode, fin, rsv1, rsv2, rsv3)
+
+ def assert_receive_text(self,
+ payload,
+ opcode=OPCODE_TEXT,
+ fin=1,
+ rsv1=None,
+ rsv2=None,
+ rsv3=None):
+ self._assert_receive_data(payload.encode('utf-8'), opcode, fin, rsv1,
+ rsv2, rsv3)
+
+ def _build_close_frame(self, code, reason, mask):
+ frame = util.pack_byte(1 << 7 | OPCODE_CLOSE)
+
+ if code is not None:
+ body = struct.pack('!H', code) + reason.encode('utf-8')
+ else:
+ body = b''
+ if mask:
+ frame += util.pack_byte(1 << 7 | len(body)) + self._mask_hybi(body)
+ else:
+ frame += util.pack_byte(len(body)) + body
+ return frame
+
+ def send_close(self, code, reason):
+ self._socket.sendall(self._build_close_frame(code, reason, True))
+
+ def assert_receive_close(self, code, reason):
+ expected_frame = self._build_close_frame(code, reason, False)
+ actual_frame = receive_bytes(self._socket, len(expected_frame))
+ if actual_frame != expected_frame:
+ raise Exception(
+ 'Unexpected close frame: %r (expected) vs %r (actual)' %
+ (expected_frame, actual_frame))
+
+
+class ClientOptions(object):
+ """Holds option values to configure the Client object."""
+ def __init__(self):
+ self.version = 13
+ self.server_host = ''
+ self.origin = ''
+ self.resource = ''
+ self.server_port = -1
+ self.socket_timeout = 1000
+ self.use_tls = False
+ self.use_basic_auth = False
+ self.basic_auth_credential = 'test:test'
+ self.extensions = []
+
+
+def connect_socket_with_retry(host,
+ port,
+ timeout,
+ use_tls,
+ retry=10,
+ sleep_sec=0.1):
+ retry_count = 0
+ while retry_count < retry:
+ try:
+ s = socket.socket()
+ s.settimeout(timeout)
+ s.connect((host, port))
+ if use_tls:
+ return _TLSSocket(s)
+ return s
+ except socket.error as e:
+ if e.errno != errno.ECONNREFUSED:
+ raise
+ else:
+ retry_count = retry_count + 1
+ time.sleep(sleep_sec)
+
+ return None
+
+
+class Client(object):
+ """WebSocket client."""
+ def __init__(self, options, handshake, stream_class):
+ self._logger = util.get_class_logger(self)
+
+ self._options = options
+ self._socket = None
+
+ self._handshake = handshake
+ self._stream_class = stream_class
+
+ def connect(self):
+ self._socket = connect_socket_with_retry(self._options.server_host,
+ self._options.server_port,
+ self._options.socket_timeout,
+ self._options.use_tls)
+
+ self._handshake.handshake(self._socket)
+
+ self._stream = self._stream_class(self._socket, self._handshake)
+
+ self._logger.info('Connection established')
+
+ def send_frame_of_arbitrary_bytes(self, header, body):
+ self._stream.send_frame_of_arbitrary_bytes(header, body)
+
+ def send_message(self,
+ message,
+ end=True,
+ binary=False,
+ raw=False,
+ mask=True):
+ if binary:
+ self._stream.send_binary(message, end, mask)
+ elif raw:
+ self._stream.send_data(message, OPCODE_TEXT, end, mask)
+ else:
+ self._stream.send_text(message, end, mask)
+
+ def assert_receive(self, payload, binary=False):
+ if binary:
+ self._stream.assert_receive_binary(payload)
+ else:
+ self._stream.assert_receive_text(payload)
+
+ def send_close(self, code=STATUS_NORMAL_CLOSURE, reason=''):
+ self._stream.send_close(code, reason)
+
+ def assert_receive_close(self, code=STATUS_NORMAL_CLOSURE, reason=''):
+ self._stream.assert_receive_close(code, reason)
+
+ def close_socket(self):
+ self._socket.close()
+
+ def assert_connection_closed(self):
+ try:
+ read_data = receive_bytes(self._socket, 1)
+ except Exception as e:
+ if str(e).find(
+ 'Connection closed before receiving requested length '
+ ) == 0:
+ return
+ try:
+ error_number, message = e
+ for error_name in ['ECONNRESET', 'WSAECONNRESET']:
+ if (error_name in dir(errno)
+ and error_number == getattr(errno, error_name)):
+ return
+ except:
+ raise e
+ raise e
+
+ raise Exception('Connection is not closed (Read: %r)' % read_data)
+
+
+def create_client(options):
+ return Client(options, WebSocketHandshake(options), WebSocketStream)
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/mock.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/mock.py
new file mode 100644
index 0000000000..eeaef52ecf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/mock.py
@@ -0,0 +1,227 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Mocks for testing.
+"""
+
+from __future__ import absolute_import
+import six.moves.queue
+import threading
+import struct
+import six
+
+from mod_pywebsocket import common
+from mod_pywebsocket import util
+from mod_pywebsocket.stream import Stream
+from mod_pywebsocket.stream import StreamOptions
+from six.moves import range
+
+
+class _MockConnBase(object):
+ """Base class of mocks for mod_python.apache.mp_conn.
+
+ This enables tests to check what is written to a (mock) mp_conn.
+ """
+ def __init__(self):
+ self._write_data = []
+ self.remote_addr = b'fake_address'
+
+ def write(self, data):
+ """Override mod_python.apache.mp_conn.write.
+
+ data should be bytes when touching this method manually.
+ """
+
+ self._write_data.append(data)
+
+ def written_data(self):
+ """Get bytes written to this mock."""
+
+ return b''.join(self._write_data)
+
+
+class MockConn(_MockConnBase):
+ """Mock for mod_python.apache.mp_conn.
+
+ This enables tests to specify what should be read from a (mock) mp_conn as
+ well as to check what is written to it.
+ """
+ def __init__(self, read_data):
+ """Constructs an instance.
+
+ Args:
+ read_data: bytes that should be returned when read* methods are
+ called.
+ """
+
+ _MockConnBase.__init__(self)
+ self._read_data = read_data
+ self._read_pos = 0
+
+ def readline(self):
+ """Override mod_python.apache.mp_conn.readline."""
+
+ if self._read_pos >= len(self._read_data):
+ return b''
+ end_index = self._read_data.find(b'\n', self._read_pos) + 1
+ if not end_index:
+ end_index = len(self._read_data)
+ return self._read_up_to(end_index)
+
+ def read(self, length):
+ """Override mod_python.apache.mp_conn.read."""
+
+ if self._read_pos >= len(self._read_data):
+ return b''
+ end_index = min(len(self._read_data), self._read_pos + length)
+ return self._read_up_to(end_index)
+
+ def _read_up_to(self, end_index):
+ line = self._read_data[self._read_pos:end_index]
+ self._read_pos = end_index
+ return line
+
+
+class MockBlockingConn(_MockConnBase):
+ """Blocking mock for mod_python.apache.mp_conn.
+
+ This enables tests to specify what should be read from a (mock) mp_conn as
+ well as to check what is written to it.
+ Callers of read* methods will block if there is no bytes available.
+ """
+ def __init__(self):
+ _MockConnBase.__init__(self)
+ self._queue = six.moves.queue.Queue()
+
+ def readline(self):
+ """Override mod_python.apache.mp_conn.readline."""
+ line = bytearray()
+ while True:
+ c = self._queue.get()
+ line.append(c)
+ if c == ord(b'\n'):
+ return bytes(line)
+
+ def read(self, length):
+ """Override mod_python.apache.mp_conn.read."""
+
+ data = bytearray()
+ for unused in range(length):
+ data.append(self._queue.get())
+
+ return bytes(data)
+
+ def put_bytes(self, bytes):
+ """Put bytes to be read from this mock.
+
+ Args:
+ bytes: bytes to be read.
+ """
+
+ for byte in six.iterbytes(bytes):
+ self._queue.put(byte)
+
+
+class MockTable(dict):
+ """Mock table.
+
+ This mimics mod_python mp_table. Note that only the methods used by
+ tests are overridden.
+ """
+ def __init__(self, copy_from={}):
+ if isinstance(copy_from, dict):
+ copy_from = list(copy_from.items())
+ for key, value in copy_from:
+ self.__setitem__(key, value)
+
+ def __getitem__(self, key):
+ return super(MockTable, self).__getitem__(key.lower())
+
+ def __setitem__(self, key, value):
+ super(MockTable, self).__setitem__(key.lower(), value)
+
+ def get(self, key, def_value=None):
+ return super(MockTable, self).get(key.lower(), def_value)
+
+
+class MockRequest(object):
+ """Mock request.
+
+ This mimics mod_python request.
+ """
+ def __init__(self,
+ uri=None,
+ headers_in={},
+ connection=None,
+ method='GET',
+ protocol='HTTP/1.1',
+ is_https=False):
+ """Construct an instance.
+
+ Arguments:
+ uri: URI of the request.
+ headers_in: Request headers.
+ connection: Connection used for the request.
+ method: request method.
+ is_https: Whether this request is over SSL.
+
+ See the document of mod_python Request for details.
+ """
+ self.uri = uri
+ self.unparsed_uri = uri
+ self.connection = connection
+ self.method = method
+ self.protocol = protocol
+ self.headers_in = MockTable(headers_in)
+ # self.is_https_ needs to be accessible from tests. To avoid name
+ # conflict with self.is_https(), it is named as such.
+ self.is_https_ = is_https
+ self.ws_stream = Stream(self, StreamOptions())
+ self.ws_close_code = None
+ self.ws_close_reason = None
+ self.ws_version = common.VERSION_HYBI_LATEST
+ self.ws_deflate = False
+
+ def is_https(self):
+ """Return whether this request is over SSL."""
+ return self.is_https_
+
+
+class MockDispatcher(object):
+ """Mock for dispatch.Dispatcher."""
+ def __init__(self):
+ self.do_extra_handshake_called = False
+
+ def do_extra_handshake(self, conn_context):
+ self.do_extra_handshake_called = True
+
+ def transfer_data(self, conn_context):
+ pass
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/run_all.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/run_all.py
new file mode 100755
index 0000000000..ea52223cea
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/run_all.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+#
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Run all tests in the same directory.
+
+This suite is expected to be run under pywebsocket's src directory, i.e. the
+directory containing mod_pywebsocket, test, etc.
+
+To change loggin level, please specify --log-level option.
+ python test/run_test.py --log-level debug
+
+To pass any option to unittest module, please specify options after '--'. For
+example, run this for making the test runner verbose.
+ python test/run_test.py --log-level debug -- -v
+"""
+
+from __future__ import absolute_import
+import logging
+import argparse
+import os
+import re
+import six
+import sys
+import unittest
+
+_TEST_MODULE_PATTERN = re.compile(r'^(test_.+)\.py$')
+
+
+def _list_test_modules(directory):
+ module_names = []
+ for filename in os.listdir(directory):
+ match = _TEST_MODULE_PATTERN.search(filename)
+ if match:
+ module_names.append(match.group(1))
+ return module_names
+
+
+def _suite():
+ loader = unittest.TestLoader()
+ return loader.loadTestsFromNames(
+ _list_test_modules(os.path.dirname(__file__)))
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--log-level',
+ '--log_level',
+ type=six.text_type,
+ dest='log_level',
+ default='warning',
+ choices=['debug', 'info', 'warning', 'warn', 'error', 'critical'])
+ options, args = parser.parse_known_args()
+ logging.basicConfig(level=logging.getLevelName(options.log_level.upper()),
+ format='%(levelname)s %(asctime)s '
+ '%(filename)s:%(lineno)d] '
+ '%(message)s',
+ datefmt='%H:%M:%S')
+ unittest.main(defaultTest='_suite', argv=[sys.argv[0]] + args)
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/set_sys_path.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/set_sys_path.py
new file mode 100644
index 0000000000..48d0e116a5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/set_sys_path.py
@@ -0,0 +1,41 @@
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Configuration for testing.
+
+Test files should import this module before mod_pywebsocket.
+"""
+
+from __future__ import absolute_import
+import os
+import sys
+
+# Add the parent directory to sys.path to enable importing mod_pywebsocket.
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_dispatch.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_dispatch.py
new file mode 100755
index 0000000000..132dd92d76
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_dispatch.py
@@ -0,0 +1,298 @@
+#!/usr/bin/env python
+#
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests for dispatch module."""
+
+from __future__ import absolute_import
+import os
+import unittest
+
+import set_sys_path # Update sys.path to locate mod_pywebsocket module.
+
+from mod_pywebsocket import dispatch
+from mod_pywebsocket import handshake
+from test import mock
+from six.moves import zip
+
+_TEST_HANDLERS_DIR = os.path.join(os.path.dirname(__file__), 'testdata',
+ 'handlers')
+
+_TEST_HANDLERS_SUB_DIR = os.path.join(_TEST_HANDLERS_DIR, 'sub')
+
+
+class DispatcherTest(unittest.TestCase):
+ """A unittest for dispatch module."""
+ def test_normalize_path(self):
+ self.assertEqual(
+ os.path.abspath('/a/b').replace('\\', '/'),
+ dispatch._normalize_path('/a/b'))
+ self.assertEqual(
+ os.path.abspath('/a/b').replace('\\', '/'),
+ dispatch._normalize_path('\\a\\b'))
+ self.assertEqual(
+ os.path.abspath('/a/b').replace('\\', '/'),
+ dispatch._normalize_path('/a/c/../b'))
+ self.assertEqual(
+ os.path.abspath('abc').replace('\\', '/'),
+ dispatch._normalize_path('abc'))
+
+ def test_converter(self):
+ converter = dispatch._create_path_to_resource_converter('/a/b')
+ # Python built by MSC inserts a drive name like 'C:\' via realpath().
+ # Converter Generator expands provided path using realpath() and uses
+ # the path including a drive name to verify the prefix.
+ os_root = os.path.realpath('/')
+ self.assertEqual('/h', converter(os_root + 'a/b/h_wsh.py'))
+ self.assertEqual('/c/h', converter(os_root + 'a/b/c/h_wsh.py'))
+ self.assertEqual(None, converter(os_root + 'a/b/h.py'))
+ self.assertEqual(None, converter('a/b/h_wsh.py'))
+
+ converter = dispatch._create_path_to_resource_converter('a/b')
+ self.assertEqual('/h',
+ converter(dispatch._normalize_path('a/b/h_wsh.py')))
+
+ converter = dispatch._create_path_to_resource_converter('/a/b///')
+ self.assertEqual('/h', converter(os_root + 'a/b/h_wsh.py'))
+ self.assertEqual(
+ '/h', converter(dispatch._normalize_path('/a/b/../b/h_wsh.py')))
+
+ converter = dispatch._create_path_to_resource_converter(
+ '/a/../a/b/../b/')
+ self.assertEqual('/h', converter(os_root + 'a/b/h_wsh.py'))
+
+ converter = dispatch._create_path_to_resource_converter(r'\a\b')
+ self.assertEqual('/h', converter(os_root + r'a\b\h_wsh.py'))
+ self.assertEqual('/h', converter(os_root + r'a/b/h_wsh.py'))
+
+ def test_enumerate_handler_file_paths(self):
+ paths = list(
+ dispatch._enumerate_handler_file_paths(_TEST_HANDLERS_DIR))
+ paths.sort()
+ self.assertEqual(8, len(paths))
+ expected_paths = [
+ os.path.join(_TEST_HANDLERS_DIR, 'abort_by_user_wsh.py'),
+ os.path.join(_TEST_HANDLERS_DIR, 'blank_wsh.py'),
+ os.path.join(_TEST_HANDLERS_DIR, 'origin_check_wsh.py'),
+ os.path.join(_TEST_HANDLERS_DIR, 'sub',
+ 'exception_in_transfer_wsh.py'),
+ os.path.join(_TEST_HANDLERS_DIR, 'sub', 'non_callable_wsh.py'),
+ os.path.join(_TEST_HANDLERS_DIR, 'sub', 'plain_wsh.py'),
+ os.path.join(_TEST_HANDLERS_DIR, 'sub',
+ 'wrong_handshake_sig_wsh.py'),
+ os.path.join(_TEST_HANDLERS_DIR, 'sub',
+ 'wrong_transfer_sig_wsh.py'),
+ ]
+ for expected, actual in zip(expected_paths, paths):
+ self.assertEqual(expected, actual)
+
+ def test_source_handler_file(self):
+ self.assertRaises(dispatch.DispatchException,
+ dispatch._source_handler_file, '')
+ self.assertRaises(dispatch.DispatchException,
+ dispatch._source_handler_file, 'def')
+ self.assertRaises(dispatch.DispatchException,
+ dispatch._source_handler_file, '1/0')
+ self.assertTrue(
+ dispatch._source_handler_file(
+ 'def web_socket_do_extra_handshake(request):pass\n'
+ 'def web_socket_transfer_data(request):pass\n'))
+
+ def test_source_warnings(self):
+ dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
+ warnings = dispatcher.source_warnings()
+ warnings.sort()
+ expected_warnings = [
+ (os.path.realpath(os.path.join(_TEST_HANDLERS_DIR, 'blank_wsh.py'))
+ + ': web_socket_do_extra_handshake is not defined.'),
+ (os.path.realpath(
+ os.path.join(_TEST_HANDLERS_DIR, 'sub', 'non_callable_wsh.py'))
+ + ': web_socket_do_extra_handshake is not callable.'),
+ (os.path.realpath(
+ os.path.join(_TEST_HANDLERS_DIR, 'sub',
+ 'wrong_handshake_sig_wsh.py')) +
+ ': web_socket_do_extra_handshake is not defined.'),
+ (os.path.realpath(
+ os.path.join(_TEST_HANDLERS_DIR, 'sub',
+ 'wrong_transfer_sig_wsh.py')) +
+ ': web_socket_transfer_data is not defined.'),
+ ]
+ self.assertEqual(4, len(warnings))
+ for expected, actual in zip(expected_warnings, warnings):
+ self.assertEqual(expected, actual)
+
+ def test_do_extra_handshake(self):
+ dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
+ request = mock.MockRequest()
+ request.ws_resource = '/origin_check'
+ request.ws_origin = 'http://example.com'
+ dispatcher.do_extra_handshake(request) # Must not raise exception.
+
+ request.ws_origin = 'http://bad.example.com'
+ try:
+ dispatcher.do_extra_handshake(request)
+ self.fail('Could not catch HandshakeException with 403 status')
+ except handshake.HandshakeException as e:
+ self.assertEqual(403, e.status)
+ except Exception as e:
+ self.fail('Unexpected exception: %r' % e)
+
+ def test_abort_extra_handshake(self):
+ dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
+ request = mock.MockRequest()
+ request.ws_resource = '/abort_by_user'
+ self.assertRaises(handshake.AbortedByUserException,
+ dispatcher.do_extra_handshake, request)
+
+ def test_transfer_data(self):
+ dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
+
+ request = mock.MockRequest(
+ connection=mock.MockConn(b'\x88\x02\x03\xe8'))
+ request.ws_resource = '/origin_check'
+ request.ws_protocol = 'p1'
+ dispatcher.transfer_data(request)
+ self.assertEqual(
+ b'origin_check_wsh.py is called for /origin_check, p1'
+ b'\x88\x02\x03\xe8', request.connection.written_data())
+
+ request = mock.MockRequest(
+ connection=mock.MockConn(b'\x88\x02\x03\xe8'))
+ request.ws_resource = '/sub/plain'
+ request.ws_protocol = None
+ dispatcher.transfer_data(request)
+ self.assertEqual(
+ b'sub/plain_wsh.py is called for /sub/plain, None'
+ b'\x88\x02\x03\xe8', request.connection.written_data())
+
+ request = mock.MockRequest(
+ connection=mock.MockConn(b'\x88\x02\x03\xe8'))
+ request.ws_resource = '/sub/plain?'
+ request.ws_protocol = None
+ dispatcher.transfer_data(request)
+ self.assertEqual(
+ b'sub/plain_wsh.py is called for /sub/plain?, None'
+ b'\x88\x02\x03\xe8', request.connection.written_data())
+
+ request = mock.MockRequest(
+ connection=mock.MockConn(b'\x88\x02\x03\xe8'))
+ request.ws_resource = '/sub/plain?q=v'
+ request.ws_protocol = None
+ dispatcher.transfer_data(request)
+ self.assertEqual(
+ b'sub/plain_wsh.py is called for /sub/plain?q=v, None'
+ b'\x88\x02\x03\xe8', request.connection.written_data())
+
+ def test_transfer_data_no_handler(self):
+ dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
+ for resource in [
+ '/blank', '/sub/non_callable', '/sub/no_wsh_at_the_end',
+ '/does/not/exist'
+ ]:
+ request = mock.MockRequest(connection=mock.MockConn(b''))
+ request.ws_resource = resource
+ request.ws_protocol = 'p2'
+ try:
+ dispatcher.transfer_data(request)
+ self.fail()
+ except dispatch.DispatchException as e:
+ self.assertTrue(str(e).find('No handler') != -1)
+ except Exception:
+ self.fail()
+
+ def test_transfer_data_handler_exception(self):
+ dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
+ request = mock.MockRequest(connection=mock.MockConn(b''))
+ request.ws_resource = '/sub/exception_in_transfer'
+ request.ws_protocol = 'p3'
+ try:
+ dispatcher.transfer_data(request)
+ self.fail()
+ except Exception as e:
+ self.assertTrue(
+ str(e).find('Intentional') != -1,
+ 'Unexpected exception: %s' % e)
+
+ def test_abort_transfer_data(self):
+ dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
+ request = mock.MockRequest()
+ request.ws_resource = '/abort_by_user'
+ self.assertRaises(handshake.AbortedByUserException,
+ dispatcher.transfer_data, request)
+
+ def test_scan_dir(self):
+ disp = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
+ self.assertEqual(4, len(disp._handler_suite_map))
+ self.assertTrue('/origin_check' in disp._handler_suite_map)
+ self.assertTrue(
+ '/sub/exception_in_transfer' in disp._handler_suite_map)
+ self.assertTrue('/sub/plain' in disp._handler_suite_map)
+
+ def test_scan_sub_dir(self):
+ disp = dispatch.Dispatcher(_TEST_HANDLERS_DIR, _TEST_HANDLERS_SUB_DIR)
+ self.assertEqual(2, len(disp._handler_suite_map))
+ self.assertFalse('/origin_check' in disp._handler_suite_map)
+ self.assertTrue(
+ '/sub/exception_in_transfer' in disp._handler_suite_map)
+ self.assertTrue('/sub/plain' in disp._handler_suite_map)
+
+ def test_scan_sub_dir_as_root(self):
+ disp = dispatch.Dispatcher(_TEST_HANDLERS_SUB_DIR,
+ _TEST_HANDLERS_SUB_DIR)
+ self.assertEqual(2, len(disp._handler_suite_map))
+ self.assertFalse('/origin_check' in disp._handler_suite_map)
+ self.assertFalse(
+ '/sub/exception_in_transfer' in disp._handler_suite_map)
+ self.assertFalse('/sub/plain' in disp._handler_suite_map)
+ self.assertTrue('/exception_in_transfer' in disp._handler_suite_map)
+ self.assertTrue('/plain' in disp._handler_suite_map)
+
+ def test_scan_dir_must_under_root(self):
+ dispatch.Dispatcher('a/b', 'a/b/c') # OK
+ dispatch.Dispatcher('a/b///', 'a/b') # OK
+ self.assertRaises(dispatch.DispatchException, dispatch.Dispatcher,
+ 'a/b/c', 'a/b')
+
+ def test_resource_path_alias(self):
+ disp = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
+ disp.add_resource_path_alias('/', '/origin_check')
+ self.assertEqual(5, len(disp._handler_suite_map))
+ self.assertTrue('/origin_check' in disp._handler_suite_map)
+ self.assertTrue(
+ '/sub/exception_in_transfer' in disp._handler_suite_map)
+ self.assertTrue('/sub/plain' in disp._handler_suite_map)
+ self.assertTrue('/' in disp._handler_suite_map)
+ self.assertRaises(dispatch.DispatchException,
+ disp.add_resource_path_alias, '/alias', '/not-exist')
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_endtoend.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_endtoend.py
new file mode 100755
index 0000000000..2789e4a57e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_endtoend.py
@@ -0,0 +1,738 @@
+#!/usr/bin/env python
+#
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""End-to-end tests for pywebsocket. Tests standalone.py.
+"""
+
+from __future__ import absolute_import
+from six.moves import urllib
+import locale
+import logging
+import os
+import signal
+import socket
+import subprocess
+import sys
+import time
+import unittest
+
+import set_sys_path # Update sys.path to locate mod_pywebsocket module.
+
+from test import client_for_testing
+
+# Special message that tells the echo server to start closing handshake
+_GOODBYE_MESSAGE = 'Goodbye'
+
+_SERVER_WARMUP_IN_SEC = 0.2
+
+
+# Test body functions
+def _echo_check_procedure(client):
+ client.connect()
+
+ client.send_message('test')
+ client.assert_receive('test')
+ client.send_message('helloworld')
+ client.assert_receive('helloworld')
+
+ client.send_close()
+ client.assert_receive_close()
+
+ client.assert_connection_closed()
+
+
+def _echo_check_procedure_with_binary(client):
+ client.connect()
+
+ client.send_message(b'binary', binary=True)
+ client.assert_receive(b'binary', binary=True)
+ client.send_message(b'\x00\x80\xfe\xff\x00\x80', binary=True)
+ client.assert_receive(b'\x00\x80\xfe\xff\x00\x80', binary=True)
+
+ client.send_close()
+ client.assert_receive_close()
+
+ client.assert_connection_closed()
+
+
+def _echo_check_procedure_with_goodbye(client):
+ client.connect()
+
+ client.send_message('test')
+ client.assert_receive('test')
+
+ client.send_message(_GOODBYE_MESSAGE)
+ client.assert_receive(_GOODBYE_MESSAGE)
+
+ client.assert_receive_close()
+ client.send_close()
+
+ client.assert_connection_closed()
+
+
+def _echo_check_procedure_with_code_and_reason(client, code, reason):
+ client.connect()
+
+ client.send_close(code, reason)
+ client.assert_receive_close(code, reason)
+
+ client.assert_connection_closed()
+
+
+def _unmasked_frame_check_procedure(client):
+ client.connect()
+
+ client.send_message('test', mask=False)
+ client.assert_receive_close(client_for_testing.STATUS_PROTOCOL_ERROR, '')
+
+ client.assert_connection_closed()
+
+
+def _check_handshake_with_basic_auth(client):
+ client.connect()
+
+ client.send_message(_GOODBYE_MESSAGE)
+ client.assert_receive(_GOODBYE_MESSAGE)
+
+ client.assert_receive_close()
+ client.send_close()
+
+ client.assert_connection_closed()
+
+
+class EndToEndTestBase(unittest.TestCase):
+ """Base class for end-to-end tests that launch pywebsocket standalone
+ server as a separate process, connect to it using the client_for_testing
+ module, and check if the server behaves correctly by exchanging opening
+ handshake and frames over a TCP connection.
+ """
+ def setUp(self):
+ self.server_stderr = None
+ self.top_dir = os.path.join(os.path.dirname(__file__), '..')
+ os.putenv('PYTHONPATH', os.path.pathsep.join(sys.path))
+ self.standalone_command = os.path.join(self.top_dir, 'mod_pywebsocket',
+ 'standalone.py')
+ self.document_root = os.path.join(self.top_dir, 'example')
+ s = socket.socket()
+ s.bind(('localhost', 0))
+ (_, self.test_port) = s.getsockname()
+ s.close()
+
+ self._options = client_for_testing.ClientOptions()
+ self._options.server_host = 'localhost'
+ self._options.origin = 'http://localhost'
+ self._options.resource = '/echo'
+
+ self._options.server_port = self.test_port
+
+ # TODO(tyoshino): Use tearDown to kill the server.
+
+ def _run_python_command(self, commandline, stdout=None, stderr=None):
+ close_fds = True if sys.platform != 'win32' else None
+ return subprocess.Popen([sys.executable] + commandline,
+ close_fds=close_fds,
+ stdout=stdout,
+ stderr=stderr)
+
+ def _run_server(self, extra_args=[]):
+ args = [
+ self.standalone_command, '-H', 'localhost', '-V', 'localhost',
+ '-p',
+ str(self.test_port), '-P',
+ str(self.test_port), '-d', self.document_root
+ ]
+
+ # Inherit the level set to the root logger by test runner.
+ root_logger = logging.getLogger()
+ log_level = root_logger.getEffectiveLevel()
+ if log_level != logging.NOTSET:
+ args.append('--log-level')
+ args.append(logging.getLevelName(log_level).lower())
+
+ args += extra_args
+
+ return self._run_python_command(args, stderr=self.server_stderr)
+
+ def _close_server(self, server):
+ """
+
+ This method mimics Popen.__exit__ to gracefully kill the server process.
+ Its main purpose is to maintain comptaibility between python 2 and 3,
+ since Popen in python 2 does not have __exit__ attribute.
+
+ """
+ server.kill()
+
+ if server.stdout:
+ server.stdout.close()
+ if server.stderr:
+ server.stderr.close()
+ if server.stdin:
+ server.stdin.close()
+
+ server.wait()
+
+
+class EndToEndHyBiTest(EndToEndTestBase):
+ def setUp(self):
+ EndToEndTestBase.setUp(self)
+
+ def _run_test_with_options(self,
+ test_function,
+ options,
+ server_options=[]):
+ server = self._run_server(server_options)
+ try:
+ # TODO(tyoshino): add some logic to poll the server until it
+ # becomes ready
+ time.sleep(_SERVER_WARMUP_IN_SEC)
+
+ client = client_for_testing.create_client(options)
+ try:
+ test_function(client)
+ finally:
+ client.close_socket()
+ finally:
+ self._close_server(server)
+
+ def _run_test(self, test_function):
+ self._run_test_with_options(test_function, self._options)
+
+ def _run_permessage_deflate_test(self, offer, response_checker,
+ test_function):
+ server = self._run_server()
+ try:
+ time.sleep(_SERVER_WARMUP_IN_SEC)
+
+ self._options.extensions += offer
+ self._options.check_permessage_deflate = response_checker
+ client = client_for_testing.create_client(self._options)
+
+ try:
+ client.connect()
+
+ if test_function is not None:
+ test_function(client)
+
+ client.assert_connection_closed()
+ finally:
+ client.close_socket()
+ finally:
+ self._close_server(server)
+
+ def _run_close_with_code_and_reason_test(self,
+ test_function,
+ code,
+ reason,
+ server_options=[]):
+ server = self._run_server()
+ try:
+ time.sleep(_SERVER_WARMUP_IN_SEC)
+
+ client = client_for_testing.create_client(self._options)
+ try:
+ test_function(client, code, reason)
+ finally:
+ client.close_socket()
+ finally:
+ self._close_server(server)
+
+ def _run_http_fallback_test(self, options, status):
+ server = self._run_server()
+ try:
+ time.sleep(_SERVER_WARMUP_IN_SEC)
+
+ client = client_for_testing.create_client(options)
+ try:
+ client.connect()
+ self.fail('Could not catch HttpStatusException')
+ except client_for_testing.HttpStatusException as e:
+ self.assertEqual(status, e.status)
+ except Exception as e:
+ self.fail('Catch unexpected exception')
+ finally:
+ client.close_socket()
+ finally:
+ self._close_server(server)
+
+ def test_echo(self):
+ self._run_test(_echo_check_procedure)
+
+ def test_echo_binary(self):
+ self._run_test(_echo_check_procedure_with_binary)
+
+ def test_echo_server_close(self):
+ self._run_test(_echo_check_procedure_with_goodbye)
+
+ def test_unmasked_frame(self):
+ self._run_test(_unmasked_frame_check_procedure)
+
+ def test_echo_permessage_deflate(self):
+ def test_function(client):
+ # From the examples in the spec.
+ compressed_hello = b'\xf2\x48\xcd\xc9\xc9\x07\x00'
+ client._stream.send_data(compressed_hello,
+ client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+ client._stream.assert_receive_binary(
+ compressed_hello,
+ opcode=client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+
+ client.send_close()
+ client.assert_receive_close()
+
+ def response_checker(parameter):
+ self.assertEqual('permessage-deflate', parameter.name())
+ self.assertEqual([], parameter.get_parameters())
+
+ self._run_permessage_deflate_test(['permessage-deflate'],
+ response_checker, test_function)
+
+ def test_echo_permessage_deflate_two_frames(self):
+ def test_function(client):
+ # From the examples in the spec.
+ client._stream.send_data(b'\xf2\x48\xcd',
+ client_for_testing.OPCODE_TEXT,
+ end=False,
+ rsv1=1)
+ client._stream.send_data(b'\xc9\xc9\x07\x00',
+ client_for_testing.OPCODE_TEXT)
+ client._stream.assert_receive_binary(
+ b'\xf2\x48\xcd\xc9\xc9\x07\x00',
+ opcode=client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+
+ client.send_close()
+ client.assert_receive_close()
+
+ def response_checker(parameter):
+ self.assertEqual('permessage-deflate', parameter.name())
+ self.assertEqual([], parameter.get_parameters())
+
+ self._run_permessage_deflate_test(['permessage-deflate'],
+ response_checker, test_function)
+
+ def test_echo_permessage_deflate_two_messages(self):
+ def test_function(client):
+ # From the examples in the spec.
+ client._stream.send_data(b'\xf2\x48\xcd\xc9\xc9\x07\x00',
+ client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+ client._stream.send_data(b'\xf2\x00\x11\x00\x00',
+ client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+ client._stream.assert_receive_binary(
+ b'\xf2\x48\xcd\xc9\xc9\x07\x00',
+ opcode=client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+ client._stream.assert_receive_binary(
+ b'\xf2\x00\x11\x00\x00',
+ opcode=client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+
+ client.send_close()
+ client.assert_receive_close()
+
+ def response_checker(parameter):
+ self.assertEqual('permessage-deflate', parameter.name())
+ self.assertEqual([], parameter.get_parameters())
+
+ self._run_permessage_deflate_test(['permessage-deflate'],
+ response_checker, test_function)
+
+ def test_echo_permessage_deflate_two_msgs_server_no_context_takeover(self):
+ def test_function(client):
+ # From the examples in the spec.
+ client._stream.send_data(b'\xf2\x48\xcd\xc9\xc9\x07\x00',
+ client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+ client._stream.send_data(b'\xf2\x00\x11\x00\x00',
+ client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+ client._stream.assert_receive_binary(
+ b'\xf2\x48\xcd\xc9\xc9\x07\x00',
+ opcode=client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+ client._stream.assert_receive_binary(
+ b'\xf2\x48\xcd\xc9\xc9\x07\x00',
+ opcode=client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+
+ client.send_close()
+ client.assert_receive_close()
+
+ def response_checker(parameter):
+ self.assertEqual('permessage-deflate', parameter.name())
+ self.assertEqual([('server_no_context_takeover', None)],
+ parameter.get_parameters())
+
+ self._run_permessage_deflate_test(
+ ['permessage-deflate; server_no_context_takeover'],
+ response_checker, test_function)
+
+ def test_echo_permessage_deflate_preference(self):
+ def test_function(client):
+ # From the examples in the spec.
+ compressed_hello = b'\xf2\x48\xcd\xc9\xc9\x07\x00'
+ client._stream.send_data(compressed_hello,
+ client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+ client._stream.assert_receive_binary(
+ compressed_hello,
+ opcode=client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+
+ client.send_close()
+ client.assert_receive_close()
+
+ def response_checker(parameter):
+ self.assertEqual('permessage-deflate', parameter.name())
+ self.assertEqual([], parameter.get_parameters())
+
+ self._run_permessage_deflate_test(
+ ['permessage-deflate', 'deflate-frame'], response_checker,
+ test_function)
+
+ def test_echo_permessage_deflate_with_parameters(self):
+ def test_function(client):
+ # From the examples in the spec.
+ compressed_hello = b'\xf2\x48\xcd\xc9\xc9\x07\x00'
+ client._stream.send_data(compressed_hello,
+ client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+ client._stream.assert_receive_binary(
+ compressed_hello,
+ opcode=client_for_testing.OPCODE_TEXT,
+ rsv1=1)
+
+ client.send_close()
+ client.assert_receive_close()
+
+ def response_checker(parameter):
+ self.assertEqual('permessage-deflate', parameter.name())
+ self.assertEqual([('server_max_window_bits', '10'),
+ ('server_no_context_takeover', None)],
+ parameter.get_parameters())
+
+ self._run_permessage_deflate_test([
+ 'permessage-deflate; server_max_window_bits=10; '
+ 'server_no_context_takeover'
+ ], response_checker, test_function)
+
+ def test_echo_permessage_deflate_with_bad_server_max_window_bits(self):
+ def test_function(client):
+ client.send_close()
+ client.assert_receive_close()
+
+ def response_checker(parameter):
+ raise Exception('Unexpected acceptance of permessage-deflate')
+
+ self._run_permessage_deflate_test(
+ ['permessage-deflate; server_max_window_bits=3000000'],
+ response_checker, test_function)
+
+ def test_echo_permessage_deflate_with_bad_server_max_window_bits(self):
+ def test_function(client):
+ client.send_close()
+ client.assert_receive_close()
+
+ def response_checker(parameter):
+ raise Exception('Unexpected acceptance of permessage-deflate')
+
+ self._run_permessage_deflate_test(
+ ['permessage-deflate; server_max_window_bits=3000000'],
+ response_checker, test_function)
+
+ def test_echo_permessage_deflate_with_undefined_parameter(self):
+ def test_function(client):
+ client.send_close()
+ client.assert_receive_close()
+
+ def response_checker(parameter):
+ raise Exception('Unexpected acceptance of permessage-deflate')
+
+ self._run_permessage_deflate_test(['permessage-deflate; foo=bar'],
+ response_checker, test_function)
+
+ def test_echo_close_with_code_and_reason(self):
+ self._options.resource = '/close'
+ self._run_close_with_code_and_reason_test(
+ _echo_check_procedure_with_code_and_reason, 3333, 'sunsunsunsun')
+
+ def test_echo_close_with_empty_body(self):
+ self._options.resource = '/close'
+ self._run_close_with_code_and_reason_test(
+ _echo_check_procedure_with_code_and_reason, None, '')
+
+ def test_close_on_protocol_error(self):
+ """Tests that the server sends a close frame with protocol error status
+ code when the client sends data with some protocol error.
+ """
+ def test_function(client):
+ client.connect()
+
+ # Intermediate frame without any preceding start of fragmentation
+ # frame.
+ client.send_frame_of_arbitrary_bytes(b'\x80\x80', '')
+ client.assert_receive_close(
+ client_for_testing.STATUS_PROTOCOL_ERROR)
+
+ self._run_test(test_function)
+
+ def test_close_on_unsupported_frame(self):
+ """Tests that the server sends a close frame with unsupported operation
+ status code when the client sends data asking some operation that is
+ not supported by the server.
+ """
+ def test_function(client):
+ client.connect()
+
+ # Text frame with RSV3 bit raised.
+ client.send_frame_of_arbitrary_bytes(b'\x91\x80', '')
+ client.assert_receive_close(
+ client_for_testing.STATUS_UNSUPPORTED_DATA)
+
+ self._run_test(test_function)
+
+ def test_close_on_invalid_frame(self):
+ """Tests that the server sends a close frame with invalid frame payload
+ data status code when the client sends an invalid frame like containing
+ invalid UTF-8 character.
+ """
+ def test_function(client):
+ client.connect()
+
+ # Text frame with invalid UTF-8 string.
+ client.send_message(b'\x80', raw=True)
+ client.assert_receive_close(
+ client_for_testing.STATUS_INVALID_FRAME_PAYLOAD_DATA)
+
+ self._run_test(test_function)
+
+ def test_close_on_internal_endpoint_error(self):
+ """Tests that the server sends a close frame with internal endpoint
+ error status code when the handler does bad operation.
+ """
+
+ self._options.resource = '/internal_error'
+
+ def test_function(client):
+ client.connect()
+ client.assert_receive_close(
+ client_for_testing.STATUS_INTERNAL_ENDPOINT_ERROR)
+
+ self._run_test(test_function)
+
+ def test_absolute_uri(self):
+ """Tests absolute uri request."""
+
+ options = self._options
+ options.resource = 'ws://localhost:%d/echo' % options.server_port
+ self._run_test_with_options(_echo_check_procedure, options)
+
+ def test_invalid_absolute_uri(self):
+ """Tests invalid absolute uri request."""
+
+ options = self._options
+ options.resource = 'ws://invalidlocalhost:%d/echo' % options.server_port
+ options.server_stderr = subprocess.PIPE
+
+ self._run_http_fallback_test(options, 404)
+
+ def test_origin_check(self):
+ """Tests http fallback on origin check fail."""
+
+ options = self._options
+ options.resource = '/origin_check'
+ # Server shows warning message for http 403 fallback. This warning
+ # message is confusing. Following pipe disposes warning messages.
+ self.server_stderr = subprocess.PIPE
+ self._run_http_fallback_test(options, 403)
+
+ def test_invalid_resource(self):
+ """Tests invalid resource path."""
+
+ options = self._options
+ options.resource = '/no_resource'
+
+ self.server_stderr = subprocess.PIPE
+ self._run_http_fallback_test(options, 404)
+
+ def test_fragmentized_resource(self):
+ """Tests resource name with fragment"""
+
+ options = self._options
+ options.resource = '/echo#fragment'
+
+ self.server_stderr = subprocess.PIPE
+ self._run_http_fallback_test(options, 400)
+
+ def test_version_check(self):
+ """Tests http fallback on version check fail."""
+
+ options = self._options
+ options.version = 99
+ self._run_http_fallback_test(options, 400)
+
+ def test_basic_auth_connection(self):
+ """Test successful basic auth"""
+
+ options = self._options
+ options.use_basic_auth = True
+
+ self.server_stderr = subprocess.PIPE
+ self._run_test_with_options(_check_handshake_with_basic_auth,
+ options,
+ server_options=['--basic-auth'])
+
+ def test_invalid_basic_auth_connection(self):
+ """Tests basic auth with invalid credentials"""
+
+ options = self._options
+ options.use_basic_auth = True
+ options.basic_auth_credential = 'invalid:test'
+
+ self.server_stderr = subprocess.PIPE
+
+ with self.assertRaises(client_for_testing.HttpStatusException) as e:
+ self._run_test_with_options(_check_handshake_with_basic_auth,
+ options,
+ server_options=['--basic-auth'])
+ self.assertEqual(101, e.exception.status)
+
+
+class EndToEndTestWithEchoClient(EndToEndTestBase):
+ def setUp(self):
+ EndToEndTestBase.setUp(self)
+
+ def _check_example_echo_client_result(self, expected, stdoutdata,
+ stderrdata):
+ actual = stdoutdata.decode(locale.getpreferredencoding())
+
+ # In Python 3 on Windows we get "\r\n" terminators back from
+ # the subprocess and we need to replace them with "\n" to get
+ # a match. This is a bit of a hack, but avoids platform- and
+ # version- specific code.
+ actual = actual.replace('\r\n', '\n')
+
+ if actual != expected:
+ raise Exception('Unexpected result on example echo client: '
+ '%r (expected) vs %r (actual)' %
+ (expected, actual))
+ if stderrdata is not None:
+ raise Exception('Unexpected error message on example echo '
+ 'client: %r' % stderrdata)
+
+ def test_example_echo_client(self):
+ """Tests that the echo_client.py example can talk with the server."""
+
+ server = self._run_server()
+ try:
+ time.sleep(_SERVER_WARMUP_IN_SEC)
+
+ client_command = os.path.join(self.top_dir, 'example',
+ 'echo_client.py')
+
+ # Expected output for the default messages.
+ default_expectation = (u'Send: Hello\n'
+ u'Recv: Hello\n'
+ u'Send: <>\n'
+ u'Recv: <>\n'
+ u'Send close\n'
+ u'Recv ack\n')
+
+ args = [client_command, '-p', str(self._options.server_port)]
+ client = self._run_python_command(args, stdout=subprocess.PIPE)
+ stdoutdata, stderrdata = client.communicate()
+ self._check_example_echo_client_result(default_expectation,
+ stdoutdata, stderrdata)
+
+ # Process a big message for which extended payload length is used.
+ # To handle extended payload length, ws_version attribute will be
+ # accessed. This test checks that ws_version is correctly set.
+ big_message = 'a' * 1024
+ args = [
+ client_command, '-p',
+ str(self._options.server_port), '-m', big_message
+ ]
+ client = self._run_python_command(args, stdout=subprocess.PIPE)
+ stdoutdata, stderrdata = client.communicate()
+ expected = ('Send: %s\nRecv: %s\nSend close\nRecv ack\n' %
+ (big_message, big_message))
+ self._check_example_echo_client_result(expected, stdoutdata,
+ stderrdata)
+
+ # Test the permessage-deflate extension.
+ args = [
+ client_command, '-p',
+ str(self._options.server_port), '--use_permessage_deflate'
+ ]
+ client = self._run_python_command(args, stdout=subprocess.PIPE)
+ stdoutdata, stderrdata = client.communicate()
+ self._check_example_echo_client_result(default_expectation,
+ stdoutdata, stderrdata)
+ finally:
+ self._close_server(server)
+
+
+class EndToEndTestWithCgi(EndToEndTestBase):
+ def setUp(self):
+ EndToEndTestBase.setUp(self)
+
+ def test_cgi(self):
+ """Verifies that CGI scripts work."""
+
+ server = self._run_server(extra_args=['--cgi-paths', '/cgi-bin'])
+ time.sleep(_SERVER_WARMUP_IN_SEC)
+
+ url = 'http://localhost:%d/cgi-bin/hi.py' % self._options.server_port
+
+ # urlopen() in Python 2.7 doesn't support "with".
+ try:
+ f = urllib.request.urlopen(url)
+ except:
+ self._close_server(server)
+ raise
+
+ try:
+ self.assertEqual(f.getcode(), 200)
+ self.assertEqual(f.info().get('Content-Type'), 'text/plain')
+ body = f.read()
+ self.assertEqual(body.rstrip(b'\r\n'), b'Hi from hi.py')
+ finally:
+ f.close()
+ self._close_server(server)
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_extensions.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_extensions.py
new file mode 100755
index 0000000000..39a111888b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_extensions.py
@@ -0,0 +1,192 @@
+#!/usr/bin/env python
+#
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests for extensions module."""
+
+from __future__ import absolute_import
+import unittest
+import zlib
+
+import set_sys_path # Update sys.path to locate mod_pywebsocket module.
+
+from mod_pywebsocket import common
+from mod_pywebsocket import extensions
+
+
+class ExtensionsTest(unittest.TestCase):
+ """A unittest for non-class methods in extensions.py"""
+ def test_parse_window_bits(self):
+ self.assertRaises(ValueError, extensions._parse_window_bits, None)
+ self.assertRaises(ValueError, extensions._parse_window_bits, 'foobar')
+ self.assertRaises(ValueError, extensions._parse_window_bits, ' 8 ')
+ self.assertRaises(ValueError, extensions._parse_window_bits, 'a8a')
+ self.assertRaises(ValueError, extensions._parse_window_bits, '00000')
+ self.assertRaises(ValueError, extensions._parse_window_bits, '00008')
+ self.assertRaises(ValueError, extensions._parse_window_bits, '0x8')
+
+ self.assertRaises(ValueError, extensions._parse_window_bits, '9.5')
+ self.assertRaises(ValueError, extensions._parse_window_bits, '8.0')
+
+ self.assertTrue(extensions._parse_window_bits, '8')
+ self.assertTrue(extensions._parse_window_bits, '15')
+
+ self.assertRaises(ValueError, extensions._parse_window_bits, '-8')
+ self.assertRaises(ValueError, extensions._parse_window_bits, '0')
+ self.assertRaises(ValueError, extensions._parse_window_bits, '7')
+
+ self.assertRaises(ValueError, extensions._parse_window_bits, '16')
+ self.assertRaises(ValueError, extensions._parse_window_bits,
+ '10000000')
+
+
+class PerMessageDeflateExtensionProcessorParsingTest(unittest.TestCase):
+ """A unittest for checking that PerMessageDeflateExtensionProcessor parses
+ given extension parameter correctly.
+ """
+ def test_registry(self):
+ processor = extensions.get_extension_processor(
+ common.ExtensionParameter('permessage-deflate'))
+ self.assertIsInstance(processor,
+ extensions.PerMessageDeflateExtensionProcessor)
+
+ def test_minimal_offer(self):
+ processor = extensions.PerMessageDeflateExtensionProcessor(
+ common.ExtensionParameter('permessage-deflate'))
+
+ response = processor.get_extension_response()
+ self.assertEqual('permessage-deflate', response.name())
+ self.assertEqual(0, len(response.get_parameters()))
+
+ self.assertEqual(zlib.MAX_WBITS,
+ processor._rfc1979_deflater._window_bits)
+ self.assertFalse(processor._rfc1979_deflater._no_context_takeover)
+
+ def test_offer_with_max_window_bits(self):
+ parameter = common.ExtensionParameter('permessage-deflate')
+ parameter.add_parameter('server_max_window_bits', '10')
+ processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
+
+ response = processor.get_extension_response()
+ self.assertEqual('permessage-deflate', response.name())
+ self.assertEqual([('server_max_window_bits', '10')],
+ response.get_parameters())
+
+ self.assertEqual(10, processor._rfc1979_deflater._window_bits)
+
+ def test_offer_with_out_of_range_max_window_bits(self):
+ parameter = common.ExtensionParameter('permessage-deflate')
+ parameter.add_parameter('server_max_window_bits', '0')
+ processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
+
+ self.assertIsNone(processor.get_extension_response())
+
+ def test_offer_with_max_window_bits_without_value(self):
+ parameter = common.ExtensionParameter('permessage-deflate')
+ parameter.add_parameter('server_max_window_bits', None)
+ processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
+
+ self.assertIsNone(processor.get_extension_response())
+
+ def test_offer_with_no_context_takeover(self):
+ parameter = common.ExtensionParameter('permessage-deflate')
+ parameter.add_parameter('server_no_context_takeover', None)
+ processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
+
+ response = processor.get_extension_response()
+ self.assertEqual('permessage-deflate', response.name())
+ self.assertEqual([('server_no_context_takeover', None)],
+ response.get_parameters())
+
+ self.assertTrue(processor._rfc1979_deflater._no_context_takeover)
+
+ def test_offer_with_no_context_takeover_with_value(self):
+ parameter = common.ExtensionParameter('permessage-deflate')
+ parameter.add_parameter('server_no_context_takeover', 'foobar')
+ processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
+
+ self.assertIsNone(processor.get_extension_response())
+
+ def test_offer_with_unknown_parameter(self):
+ parameter = common.ExtensionParameter('permessage-deflate')
+ parameter.add_parameter('foo', 'bar')
+ processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
+
+ self.assertIsNone(processor.get_extension_response())
+
+
+class PerMessageDeflateExtensionProcessorBuildingTest(unittest.TestCase):
+ """A unittest for checking that PerMessageDeflateExtensionProcessor builds
+ a response based on specified options correctly.
+ """
+ def test_response_with_max_window_bits(self):
+ parameter = common.ExtensionParameter('permessage-deflate')
+ parameter.add_parameter('client_max_window_bits', None)
+ processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
+ processor.set_client_max_window_bits(10)
+
+ response = processor.get_extension_response()
+ self.assertEqual('permessage-deflate', response.name())
+ self.assertEqual([('client_max_window_bits', '10')],
+ response.get_parameters())
+
+ def test_response_with_max_window_bits_without_client_permission(self):
+ processor = extensions.PerMessageDeflateExtensionProcessor(
+ common.ExtensionParameter('permessage-deflate'))
+ processor.set_client_max_window_bits(10)
+
+ response = processor.get_extension_response()
+ self.assertIsNone(response)
+
+ def test_response_with_true_for_no_context_takeover(self):
+ processor = extensions.PerMessageDeflateExtensionProcessor(
+ common.ExtensionParameter('permessage-deflate'))
+
+ processor.set_client_no_context_takeover(True)
+
+ response = processor.get_extension_response()
+ self.assertEqual('permessage-deflate', response.name())
+ self.assertEqual([('client_no_context_takeover', None)],
+ response.get_parameters())
+
+ def test_response_with_false_for_no_context_takeover(self):
+ processor = extensions.PerMessageDeflateExtensionProcessor(
+ common.ExtensionParameter('permessage-deflate'))
+
+ processor.set_client_no_context_takeover(False)
+
+ response = processor.get_extension_response()
+ self.assertEqual('permessage-deflate', response.name())
+ self.assertEqual(0, len(response.get_parameters()))
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_handshake.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_handshake.py
new file mode 100755
index 0000000000..7f4acf56ff
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_handshake.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python
+#
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests for handshake.base module."""
+
+from __future__ import absolute_import
+import unittest
+
+import set_sys_path # Update sys.path to locate mod_pywebsocket module.
+
+from mod_pywebsocket.common import ExtensionParameter
+from mod_pywebsocket.common import ExtensionParsingException
+from mod_pywebsocket.common import format_extensions
+from mod_pywebsocket.common import parse_extensions
+from mod_pywebsocket.handshake.base import HandshakeException
+from mod_pywebsocket.handshake.base import validate_subprotocol
+
+
+class ValidateSubprotocolTest(unittest.TestCase):
+ """A unittest for validate_subprotocol method."""
+ def test_validate_subprotocol(self):
+ # Should succeed.
+ validate_subprotocol('sample')
+ validate_subprotocol('Sample')
+ validate_subprotocol('sample\x7eprotocol')
+
+ # Should fail.
+ self.assertRaises(HandshakeException, validate_subprotocol, '')
+ self.assertRaises(HandshakeException, validate_subprotocol,
+ 'sample\x09protocol')
+ self.assertRaises(HandshakeException, validate_subprotocol,
+ 'sample\x19protocol')
+ self.assertRaises(HandshakeException, validate_subprotocol,
+ 'sample\x20protocol')
+ self.assertRaises(HandshakeException, validate_subprotocol,
+ 'sample\x7fprotocol')
+ self.assertRaises(
+ HandshakeException,
+ validate_subprotocol,
+ # "Japan" in Japanese
+ u'\u65e5\u672c')
+
+
+_TEST_TOKEN_EXTENSION_DATA = [
+ ('foo', [('foo', [])]),
+ ('foo; bar', [('foo', [('bar', None)])]),
+ ('foo; bar=baz', [('foo', [('bar', 'baz')])]),
+ ('foo; bar=baz; car=cdr', [('foo', [('bar', 'baz'), ('car', 'cdr')])]),
+ ('foo; bar=baz, car; cdr', [('foo', [('bar', 'baz')]),
+ ('car', [('cdr', None)])]),
+ ('a, b, c, d', [('a', []), ('b', []), ('c', []), ('d', [])]),
+]
+
+_TEST_QUOTED_EXTENSION_DATA = [
+ ('foo; bar=""', [('foo', [('bar', '')])]),
+ ('foo; bar=" baz "', [('foo', [('bar', ' baz ')])]),
+ ('foo; bar=",baz;"', [('foo', [('bar', ',baz;')])]),
+ ('foo; bar="\\\r\\\nbaz"', [('foo', [('bar', '\r\nbaz')])]),
+ ('foo; bar="\\"baz"', [('foo', [('bar', '"baz')])]),
+ ('foo; bar="\xbbbaz"', [('foo', [('bar', '\xbbbaz')])]),
+]
+
+_TEST_REDUNDANT_TOKEN_EXTENSION_DATA = [
+ ('foo \t ', [('foo', [])]),
+ ('foo; \r\n bar', [('foo', [('bar', None)])]),
+ ('foo; bar=\r\n \r\n baz', [('foo', [('bar', 'baz')])]),
+ ('foo ;bar = baz ', [('foo', [('bar', 'baz')])]),
+ ('foo,bar,,baz', [('foo', []), ('bar', []), ('baz', [])]),
+]
+
+_TEST_REDUNDANT_QUOTED_EXTENSION_DATA = [
+ ('foo; bar="\r\n \r\n baz"', [('foo', [('bar', ' baz')])]),
+]
+
+
+class ExtensionsParserTest(unittest.TestCase):
+ def _verify_extension_list(self, expected_list, actual_list):
+ """Verifies that ExtensionParameter objects in actual_list have the
+ same members as extension definitions in expected_list. Extension
+ definition used in this test is a pair of an extension name and a
+ parameter dictionary.
+ """
+
+ self.assertEqual(len(expected_list), len(actual_list))
+ for expected, actual in zip(expected_list, actual_list):
+ (name, parameters) = expected
+ self.assertEqual(name, actual._name)
+ self.assertEqual(parameters, actual._parameters)
+
+ def test_parse(self):
+ for formatted_string, definition in _TEST_TOKEN_EXTENSION_DATA:
+ self._verify_extension_list(definition,
+ parse_extensions(formatted_string))
+
+ def test_parse_quoted_data(self):
+ for formatted_string, definition in _TEST_QUOTED_EXTENSION_DATA:
+ self._verify_extension_list(definition,
+ parse_extensions(formatted_string))
+
+ def test_parse_redundant_data(self):
+ for (formatted_string,
+ definition) in _TEST_REDUNDANT_TOKEN_EXTENSION_DATA:
+ self._verify_extension_list(definition,
+ parse_extensions(formatted_string))
+
+ def test_parse_redundant_quoted_data(self):
+ for (formatted_string,
+ definition) in _TEST_REDUNDANT_QUOTED_EXTENSION_DATA:
+ self._verify_extension_list(definition,
+ parse_extensions(formatted_string))
+
+ def test_parse_bad_data(self):
+ _TEST_BAD_EXTENSION_DATA = [
+ ('foo; ; '),
+ ('foo; a a'),
+ ('foo foo'),
+ (',,,'),
+ ('foo; bar='),
+ ('foo; bar="hoge'),
+ ('foo; bar="a\r"'),
+ ('foo; bar="\\\xff"'),
+ ('foo; bar=\ra'),
+ ]
+
+ for formatted_string in _TEST_BAD_EXTENSION_DATA:
+ self.assertRaises(ExtensionParsingException, parse_extensions,
+ formatted_string)
+
+
+class FormatExtensionsTest(unittest.TestCase):
+ def test_format_extensions(self):
+ for formatted_string, definitions in _TEST_TOKEN_EXTENSION_DATA:
+ extensions = []
+ for definition in definitions:
+ (name, parameters) = definition
+ extension = ExtensionParameter(name)
+ extension._parameters = parameters
+ extensions.append(extension)
+ self.assertEqual(formatted_string, format_extensions(extensions))
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_handshake_hybi.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_handshake_hybi.py
new file mode 100755
index 0000000000..8c65822170
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_handshake_hybi.py
@@ -0,0 +1,422 @@
+#!/usr/bin/env python
+#
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests for handshake module."""
+
+from __future__ import absolute_import
+import unittest
+
+import set_sys_path # Update sys.path to locate mod_pywebsocket module.
+from mod_pywebsocket import common
+from mod_pywebsocket.handshake.base import AbortedByUserException
+from mod_pywebsocket.handshake.base import HandshakeException
+from mod_pywebsocket.handshake.base import VersionException
+from mod_pywebsocket.handshake.hybi import Handshaker
+
+from test import mock
+
+
+class RequestDefinition(object):
+ """A class for holding data for constructing opening handshake strings for
+ testing the opening handshake processor.
+ """
+ def __init__(self, method, uri, headers):
+ self.method = method
+ self.uri = uri
+ self.headers = headers
+
+
+def _create_good_request_def():
+ return RequestDefinition(
+ 'GET', '/demo', {
+ 'Host': 'server.example.com',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': 'dGhlIHNhbXBsZSBub25jZQ==',
+ 'Sec-WebSocket-Version': '13',
+ 'Origin': 'http://example.com'
+ })
+
+
+def _create_request(request_def):
+ conn = mock.MockConn(b'')
+ return mock.MockRequest(method=request_def.method,
+ uri=request_def.uri,
+ headers_in=request_def.headers,
+ connection=conn)
+
+
+def _create_handshaker(request):
+ handshaker = Handshaker(request, mock.MockDispatcher())
+ return handshaker
+
+
+class SubprotocolChoosingDispatcher(object):
+ """A dispatcher for testing. This dispatcher sets the i-th subprotocol
+ of requested ones to ws_protocol where i is given on construction as index
+ argument. If index is negative, default_value will be set to ws_protocol.
+ """
+ def __init__(self, index, default_value=None):
+ self.index = index
+ self.default_value = default_value
+
+ def do_extra_handshake(self, conn_context):
+ if self.index >= 0:
+ conn_context.ws_protocol = conn_context.ws_requested_protocols[
+ self.index]
+ else:
+ conn_context.ws_protocol = self.default_value
+
+ def transfer_data(self, conn_context):
+ pass
+
+
+class HandshakeAbortedException(Exception):
+ pass
+
+
+class AbortingDispatcher(object):
+ """A dispatcher for testing. This dispatcher raises an exception in
+ do_extra_handshake to reject the request.
+ """
+ def do_extra_handshake(self, conn_context):
+ raise HandshakeAbortedException('An exception to reject the request')
+
+ def transfer_data(self, conn_context):
+ pass
+
+
+class AbortedByUserDispatcher(object):
+ """A dispatcher for testing. This dispatcher raises an
+ AbortedByUserException in do_extra_handshake to reject the request.
+ """
+ def do_extra_handshake(self, conn_context):
+ raise AbortedByUserException('An AbortedByUserException to reject the '
+ 'request')
+
+ def transfer_data(self, conn_context):
+ pass
+
+
+_EXPECTED_RESPONSE = (
+ b'HTTP/1.1 101 Switching Protocols\r\n'
+ b'Upgrade: websocket\r\n'
+ b'Connection: Upgrade\r\n'
+ b'Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n\r\n')
+
+
+class HandshakerTest(unittest.TestCase):
+ """A unittest for draft-ietf-hybi-thewebsocketprotocol-06 and later
+ handshake processor.
+ """
+ def test_do_handshake(self):
+ request = _create_request(_create_good_request_def())
+ dispatcher = mock.MockDispatcher()
+ handshaker = Handshaker(request, dispatcher)
+ handshaker.do_handshake()
+
+ self.assertTrue(dispatcher.do_extra_handshake_called)
+
+ self.assertEqual(_EXPECTED_RESPONSE, request.connection.written_data())
+ self.assertEqual('/demo', request.ws_resource)
+ self.assertEqual('http://example.com', request.ws_origin)
+ self.assertEqual(None, request.ws_protocol)
+ self.assertEqual(None, request.ws_extensions)
+ self.assertEqual(common.VERSION_HYBI_LATEST, request.ws_version)
+
+ def test_do_handshake_with_extra_headers(self):
+ request_def = _create_good_request_def()
+ # Add headers not related to WebSocket opening handshake.
+ request_def.headers['FooKey'] = 'BarValue'
+ request_def.headers['EmptyKey'] = ''
+
+ request = _create_request(request_def)
+ handshaker = _create_handshaker(request)
+ handshaker.do_handshake()
+ self.assertEqual(_EXPECTED_RESPONSE, request.connection.written_data())
+
+ def test_do_handshake_with_capitalized_value(self):
+ request_def = _create_good_request_def()
+ request_def.headers['upgrade'] = 'WEBSOCKET'
+
+ request = _create_request(request_def)
+ handshaker = _create_handshaker(request)
+ handshaker.do_handshake()
+ self.assertEqual(_EXPECTED_RESPONSE, request.connection.written_data())
+
+ request_def = _create_good_request_def()
+ request_def.headers['Connection'] = 'UPGRADE'
+
+ request = _create_request(request_def)
+ handshaker = _create_handshaker(request)
+ handshaker.do_handshake()
+ self.assertEqual(_EXPECTED_RESPONSE, request.connection.written_data())
+
+ def test_do_handshake_with_multiple_connection_values(self):
+ request_def = _create_good_request_def()
+ request_def.headers['Connection'] = 'Upgrade, keep-alive, , '
+
+ request = _create_request(request_def)
+ handshaker = _create_handshaker(request)
+ handshaker.do_handshake()
+ self.assertEqual(_EXPECTED_RESPONSE, request.connection.written_data())
+
+ def test_aborting_handshake(self):
+ handshaker = Handshaker(_create_request(_create_good_request_def()),
+ AbortingDispatcher())
+ # do_extra_handshake raises an exception. Check that it's not caught by
+ # do_handshake.
+ self.assertRaises(HandshakeAbortedException, handshaker.do_handshake)
+
+ def test_do_handshake_with_protocol(self):
+ request_def = _create_good_request_def()
+ request_def.headers['Sec-WebSocket-Protocol'] = 'chat, superchat'
+
+ request = _create_request(request_def)
+ handshaker = Handshaker(request, SubprotocolChoosingDispatcher(0))
+ handshaker.do_handshake()
+
+ EXPECTED_RESPONSE = (
+ b'HTTP/1.1 101 Switching Protocols\r\n'
+ b'Upgrade: websocket\r\n'
+ b'Connection: Upgrade\r\n'
+ b'Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n'
+ b'Sec-WebSocket-Protocol: chat\r\n\r\n')
+
+ self.assertEqual(EXPECTED_RESPONSE, request.connection.written_data())
+ self.assertEqual('chat', request.ws_protocol)
+
+ def test_do_handshake_protocol_not_in_request_but_in_response(self):
+ request_def = _create_good_request_def()
+ request = _create_request(request_def)
+ handshaker = Handshaker(request,
+ SubprotocolChoosingDispatcher(-1, 'foobar'))
+ # No request has been made but ws_protocol is set. HandshakeException
+ # must be raised.
+ self.assertRaises(HandshakeException, handshaker.do_handshake)
+
+ def test_do_handshake_with_protocol_no_protocol_selection(self):
+ request_def = _create_good_request_def()
+ request_def.headers['Sec-WebSocket-Protocol'] = 'chat, superchat'
+
+ request = _create_request(request_def)
+ handshaker = _create_handshaker(request)
+ # ws_protocol is not set. HandshakeException must be raised.
+ self.assertRaises(HandshakeException, handshaker.do_handshake)
+
+ def test_do_handshake_with_extensions(self):
+ request_def = _create_good_request_def()
+ request_def.headers['Sec-WebSocket-Extensions'] = (
+ 'permessage-deflate; server_no_context_takeover')
+
+ EXPECTED_RESPONSE = (
+ b'HTTP/1.1 101 Switching Protocols\r\n'
+ b'Upgrade: websocket\r\n'
+ b'Connection: Upgrade\r\n'
+ b'Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n'
+ b'Sec-WebSocket-Extensions: '
+ b'permessage-deflate; server_no_context_takeover\r\n'
+ b'\r\n')
+
+ request = _create_request(request_def)
+ handshaker = _create_handshaker(request)
+ handshaker.do_handshake()
+ self.assertEqual(EXPECTED_RESPONSE, request.connection.written_data())
+ self.assertEqual(1, len(request.ws_extensions))
+ extension = request.ws_extensions[0]
+ self.assertEqual(common.PERMESSAGE_DEFLATE_EXTENSION, extension.name())
+ self.assertEqual(['server_no_context_takeover'],
+ extension.get_parameter_names())
+ self.assertEqual(
+ None, extension.get_parameter_value('server_no_context_takeover'))
+ self.assertEqual(1, len(request.ws_extension_processors))
+ self.assertEqual('deflate', request.ws_extension_processors[0].name())
+
+ def test_do_handshake_with_quoted_extensions(self):
+ request_def = _create_good_request_def()
+ request_def.headers['Sec-WebSocket-Extensions'] = (
+ 'permessage-deflate, , '
+ 'unknown; e = "mc^2"; ma="\r\n \\\rf "; pv=nrt')
+
+ request = _create_request(request_def)
+ handshaker = _create_handshaker(request)
+ handshaker.do_handshake()
+ self.assertEqual(2, len(request.ws_requested_extensions))
+ first_extension = request.ws_requested_extensions[0]
+ self.assertEqual('permessage-deflate', first_extension.name())
+ second_extension = request.ws_requested_extensions[1]
+ self.assertEqual('unknown', second_extension.name())
+ self.assertEqual(['e', 'ma', 'pv'],
+ second_extension.get_parameter_names())
+ self.assertEqual('mc^2', second_extension.get_parameter_value('e'))
+ self.assertEqual(' \rf ', second_extension.get_parameter_value('ma'))
+ self.assertEqual('nrt', second_extension.get_parameter_value('pv'))
+
+ def test_do_handshake_with_optional_headers(self):
+ request_def = _create_good_request_def()
+ request_def.headers['EmptyValue'] = ''
+ request_def.headers['AKey'] = 'AValue'
+
+ request = _create_request(request_def)
+ handshaker = _create_handshaker(request)
+ handshaker.do_handshake()
+ self.assertEqual('AValue', request.headers_in['AKey'])
+ self.assertEqual('', request.headers_in['EmptyValue'])
+
+ def test_abort_extra_handshake(self):
+ handshaker = Handshaker(_create_request(_create_good_request_def()),
+ AbortedByUserDispatcher())
+ # do_extra_handshake raises an AbortedByUserException. Check that it's
+ # not caught by do_handshake.
+ self.assertRaises(AbortedByUserException, handshaker.do_handshake)
+
+ def test_bad_requests(self):
+ bad_cases = [
+ ('HTTP request',
+ RequestDefinition(
+ 'GET', '/demo', {
+ 'Host':
+ 'www.google.com',
+ 'User-Agent':
+ 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5;'
+ ' en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3'
+ ' GTB6 GTBA',
+ 'Accept':
+ 'text/html,application/xhtml+xml,application/xml;q=0.9,'
+ '*/*;q=0.8',
+ 'Accept-Language':
+ 'en-us,en;q=0.5',
+ 'Accept-Encoding':
+ 'gzip,deflate',
+ 'Accept-Charset':
+ 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
+ 'Keep-Alive':
+ '300',
+ 'Connection':
+ 'keep-alive'
+ }), None, True)
+ ]
+
+ request_def = _create_good_request_def()
+ request_def.method = 'POST'
+ bad_cases.append(('Wrong method', request_def, None, True))
+
+ request_def = _create_good_request_def()
+ del request_def.headers['Host']
+ bad_cases.append(('Missing Host', request_def, None, True))
+
+ request_def = _create_good_request_def()
+ del request_def.headers['Upgrade']
+ bad_cases.append(('Missing Upgrade', request_def, None, True))
+
+ request_def = _create_good_request_def()
+ request_def.headers['Upgrade'] = 'nonwebsocket'
+ bad_cases.append(('Wrong Upgrade', request_def, None, True))
+
+ request_def = _create_good_request_def()
+ del request_def.headers['Connection']
+ bad_cases.append(('Missing Connection', request_def, None, True))
+
+ request_def = _create_good_request_def()
+ request_def.headers['Connection'] = 'Downgrade'
+ bad_cases.append(('Wrong Connection', request_def, None, True))
+
+ request_def = _create_good_request_def()
+ del request_def.headers['Sec-WebSocket-Key']
+ bad_cases.append(('Missing Sec-WebSocket-Key', request_def, 400, True))
+
+ request_def = _create_good_request_def()
+ request_def.headers['Sec-WebSocket-Key'] = (
+ 'dGhlIHNhbXBsZSBub25jZQ==garbage')
+ bad_cases.append(('Wrong Sec-WebSocket-Key (with garbage on the tail)',
+ request_def, 400, True))
+
+ request_def = _create_good_request_def()
+ request_def.headers['Sec-WebSocket-Key'] = 'YQ==' # BASE64 of 'a'
+ bad_cases.append(
+ ('Wrong Sec-WebSocket-Key (decoded value is not 16 octets long)',
+ request_def, 400, True))
+
+ request_def = _create_good_request_def()
+ # The last character right before == must be any of A, Q, w and g.
+ request_def.headers['Sec-WebSocket-Key'] = 'AQIDBAUGBwgJCgsMDQ4PEC=='
+ bad_cases.append(
+ ('Wrong Sec-WebSocket-Key (padding bits are not zero)',
+ request_def, 400, True))
+
+ request_def = _create_good_request_def()
+ request_def.headers['Sec-WebSocket-Key'] = (
+ 'dGhlIHNhbXBsZSBub25jZQ==,dGhlIHNhbXBsZSBub25jZQ==')
+ bad_cases.append(('Wrong Sec-WebSocket-Key (multiple values)',
+ request_def, 400, True))
+
+ request_def = _create_good_request_def()
+ del request_def.headers['Sec-WebSocket-Version']
+ bad_cases.append(
+ ('Missing Sec-WebSocket-Version', request_def, None, True))
+
+ request_def = _create_good_request_def()
+ request_def.headers['Sec-WebSocket-Version'] = '3'
+ bad_cases.append(
+ ('Wrong Sec-WebSocket-Version', request_def, None, False))
+
+ request_def = _create_good_request_def()
+ request_def.headers['Sec-WebSocket-Version'] = '13, 13'
+ bad_cases.append(('Wrong Sec-WebSocket-Version (multiple values)',
+ request_def, 400, True))
+
+ request_def = _create_good_request_def()
+ request_def.headers['Sec-WebSocket-Protocol'] = 'illegal\x09protocol'
+ bad_cases.append(
+ ('Illegal Sec-WebSocket-Protocol', request_def, 400, True))
+
+ request_def = _create_good_request_def()
+ request_def.headers['Sec-WebSocket-Protocol'] = ''
+ bad_cases.append(
+ ('Empty Sec-WebSocket-Protocol', request_def, 400, True))
+
+ for (case_name, request_def, expected_status,
+ expect_handshake_exception) in bad_cases:
+ request = _create_request(request_def)
+ handshaker = Handshaker(request, mock.MockDispatcher())
+ try:
+ handshaker.do_handshake()
+ self.fail('No exception thrown for \'%s\' case' % case_name)
+ except HandshakeException as e:
+ self.assertTrue(expect_handshake_exception)
+ self.assertEqual(expected_status, e.status)
+ except VersionException as e:
+ self.assertFalse(expect_handshake_exception)
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_http_header_util.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_http_header_util.py
new file mode 100755
index 0000000000..f8c8e7a981
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_http_header_util.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+#
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests for http_header_util module."""
+
+from __future__ import absolute_import
+import unittest
+import sys
+
+from mod_pywebsocket import http_header_util
+
+
+class UnitTest(unittest.TestCase):
+ """A unittest for http_header_util module."""
+ def test_parse_relative_uri(self):
+ host, port, resource = http_header_util.parse_uri('/ws/test')
+ self.assertEqual(None, host)
+ self.assertEqual(None, port)
+ self.assertEqual('/ws/test', resource)
+
+ def test_parse_absolute_uri(self):
+ host, port, resource = http_header_util.parse_uri(
+ 'ws://localhost:10080/ws/test')
+ self.assertEqual('localhost', host)
+ self.assertEqual(10080, port)
+ self.assertEqual('/ws/test', resource)
+
+ host, port, resource = http_header_util.parse_uri(
+ 'ws://example.com/ws/test')
+ self.assertEqual('example.com', host)
+ self.assertEqual(80, port)
+ self.assertEqual('/ws/test', resource)
+
+ host, port, resource = http_header_util.parse_uri('wss://example.com/')
+ self.assertEqual('example.com', host)
+ self.assertEqual(443, port)
+ self.assertEqual('/', resource)
+
+ host, port, resource = http_header_util.parse_uri(
+ 'ws://example.com:8080')
+ self.assertEqual('example.com', host)
+ self.assertEqual(8080, port)
+ self.assertEqual('/', resource)
+
+ def test_parse_invalid_uri(self):
+ host, port, resource = http_header_util.parse_uri('ws:///')
+ self.assertEqual(None, resource)
+
+ host, port, resource = http_header_util.parse_uri(
+ 'ws://localhost:INVALID_PORT')
+ self.assertEqual(None, resource)
+
+ host, port, resource = http_header_util.parse_uri(
+ 'ws://localhost:-1/ws')
+ if sys.hexversion >= 0x030600f0:
+ self.assertEqual(None, resource)
+ else:
+ self.assertEqual('localhost', host)
+ self.assertEqual(80, port)
+ self.assertEqual('/ws', resource)
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_memorizingfile.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_memorizingfile.py
new file mode 100755
index 0000000000..f7288c510b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_memorizingfile.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+#
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests for memorizingfile module."""
+
+from __future__ import absolute_import
+import unittest
+import six
+
+import set_sys_path # Update sys.path to locate mod_pywebsocket module.
+
+from mod_pywebsocket import memorizingfile
+
+
+class UtilTest(unittest.TestCase):
+ """A unittest for memorizingfile module."""
+ def check(self, memorizing_file, num_read, expected_list):
+ for unused in range(num_read):
+ memorizing_file.readline()
+ actual_list = memorizing_file.get_memorized_lines()
+ self.assertEqual(len(expected_list), len(actual_list))
+ for expected, actual in zip(expected_list, actual_list):
+ self.assertEqual(expected, actual)
+
+ def check_with_size(self, memorizing_file, read_size, expected_list):
+ read_list = []
+ read_line = ''
+ while True:
+ line = memorizing_file.readline(read_size)
+ line_length = len(line)
+ self.assertTrue(line_length <= read_size)
+ if line_length == 0:
+ if read_line != '':
+ read_list.append(read_line)
+ break
+ read_line += line
+ if line[line_length - 1] == '\n':
+ read_list.append(read_line)
+ read_line = ''
+ actual_list = memorizing_file.get_memorized_lines()
+ self.assertEqual(len(expected_list), len(actual_list))
+ self.assertEqual(len(expected_list), len(read_list))
+ for expected, actual, read in zip(expected_list, actual_list,
+ read_list):
+ self.assertEqual(expected, actual)
+ self.assertEqual(expected, read)
+
+ def test_get_memorized_lines(self):
+ memorizing_file = memorizingfile.MemorizingFile(
+ six.StringIO('Hello\nWorld\nWelcome'))
+ self.check(memorizing_file, 3, ['Hello\n', 'World\n', 'Welcome'])
+
+ def test_get_memorized_lines_limit_memorized_lines(self):
+ memorizing_file = memorizingfile.MemorizingFile(
+ six.StringIO('Hello\nWorld\nWelcome'), 2)
+ self.check(memorizing_file, 3, ['Hello\n', 'World\n'])
+
+ def test_get_memorized_lines_empty_file(self):
+ memorizing_file = memorizingfile.MemorizingFile(six.StringIO(''))
+ self.check(memorizing_file, 10, [])
+
+ def test_get_memorized_lines_with_size(self):
+ for size in range(1, 10):
+ memorizing_file = memorizingfile.MemorizingFile(
+ six.StringIO('Hello\nWorld\nWelcome'))
+ self.check_with_size(memorizing_file, size,
+ ['Hello\n', 'World\n', 'Welcome'])
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_mock.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_mock.py
new file mode 100755
index 0000000000..073873dde9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_mock.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+#
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests for mock module."""
+
+from __future__ import absolute_import
+import six.moves.queue
+import threading
+import unittest
+
+import set_sys_path # Update sys.path to locate mod_pywebsocket module.
+
+from test import mock
+
+
+class MockConnTest(unittest.TestCase):
+ """A unittest for MockConn class."""
+ def setUp(self):
+ self._conn = mock.MockConn(b'ABC\r\nDEFG\r\n\r\nHIJK')
+
+ def test_readline(self):
+ self.assertEqual(b'ABC\r\n', self._conn.readline())
+ self.assertEqual(b'DEFG\r\n', self._conn.readline())
+ self.assertEqual(b'\r\n', self._conn.readline())
+ self.assertEqual(b'HIJK', self._conn.readline())
+ self.assertEqual(b'', self._conn.readline())
+
+ def test_read(self):
+ self.assertEqual(b'ABC\r\nD', self._conn.read(6))
+ self.assertEqual(b'EFG\r\n\r\nHI', self._conn.read(9))
+ self.assertEqual(b'JK', self._conn.read(10))
+ self.assertEqual(b'', self._conn.read(10))
+
+ def test_read_and_readline(self):
+ self.assertEqual(b'ABC\r\nD', self._conn.read(6))
+ self.assertEqual(b'EFG\r\n', self._conn.readline())
+ self.assertEqual(b'\r\nHIJK', self._conn.read(9))
+ self.assertEqual(b'', self._conn.readline())
+
+ def test_write(self):
+ self._conn.write(b'Hello\r\n')
+ self._conn.write(b'World\r\n')
+ self.assertEqual(b'Hello\r\nWorld\r\n', self._conn.written_data())
+
+
+class MockBlockingConnTest(unittest.TestCase):
+ """A unittest for MockBlockingConn class."""
+ def test_read(self):
+ """Tests that data put to MockBlockingConn by put_bytes method can be
+ read from it.
+ """
+ class LineReader(threading.Thread):
+ """A test class that launches a thread, calls readline on the
+ specified conn repeatedly and puts the read data to the specified
+ queue.
+ """
+ def __init__(self, conn, queue):
+ threading.Thread.__init__(self)
+ self._queue = queue
+ self._conn = conn
+ self.setDaemon(True)
+ self.start()
+
+ def run(self):
+ while True:
+ data = self._conn.readline()
+ self._queue.put(data)
+
+ conn = mock.MockBlockingConn()
+ queue = six.moves.queue.Queue()
+ reader = LineReader(conn, queue)
+ self.assertTrue(queue.empty())
+ conn.put_bytes(b'Foo bar\r\n')
+ read = queue.get()
+ self.assertEqual(b'Foo bar\r\n', read)
+
+
+class MockTableTest(unittest.TestCase):
+ """A unittest for MockTable class."""
+ def test_create_from_dict(self):
+ table = mock.MockTable({'Key': 'Value'})
+ self.assertEqual('Value', table.get('KEY'))
+ self.assertEqual('Value', table['key'])
+
+ def test_create_from_list(self):
+ table = mock.MockTable([('Key', 'Value')])
+ self.assertEqual('Value', table.get('KEY'))
+ self.assertEqual('Value', table['key'])
+
+ def test_create_from_tuple(self):
+ table = mock.MockTable((('Key', 'Value'), ))
+ self.assertEqual('Value', table.get('KEY'))
+ self.assertEqual('Value', table['key'])
+
+ def test_set_and_get(self):
+ table = mock.MockTable()
+ self.assertEqual(None, table.get('Key'))
+ table['Key'] = 'Value'
+ self.assertEqual('Value', table.get('Key'))
+ self.assertEqual('Value', table.get('key'))
+ self.assertEqual('Value', table.get('KEY'))
+ self.assertEqual('Value', table['Key'])
+ self.assertEqual('Value', table['key'])
+ self.assertEqual('Value', table['KEY'])
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_msgutil.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_msgutil.py
new file mode 100755
index 0000000000..1122c281b7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_msgutil.py
@@ -0,0 +1,912 @@
+#!/usr/bin/env python
+#
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests for msgutil module."""
+
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import division
+import array
+import six.moves.queue
+import random
+import struct
+import unittest
+import zlib
+
+import set_sys_path # Update sys.path to locate mod_pywebsocket module.
+
+from mod_pywebsocket import common
+from mod_pywebsocket.extensions import PerMessageDeflateExtensionProcessor
+from mod_pywebsocket import msgutil
+from mod_pywebsocket.stream import InvalidUTF8Exception
+from mod_pywebsocket.stream import Stream
+from mod_pywebsocket.stream import StreamOptions
+from mod_pywebsocket import util
+from test import mock
+from six.moves import map
+from six.moves import range
+from six import iterbytes
+
+# We use one fixed nonce for testing instead of cryptographically secure PRNG.
+_MASKING_NONCE = b'ABCD'
+
+
+def _mask_hybi(frame):
+ if isinstance(frame, six.text_type):
+ Exception('masking does not accept Texts')
+
+ frame_key = list(iterbytes(_MASKING_NONCE))
+ frame_key_len = len(frame_key)
+ result = bytearray(frame)
+ count = 0
+
+ for i in range(len(result)):
+ result[i] ^= frame_key[count]
+ count = (count + 1) % frame_key_len
+
+ return _MASKING_NONCE + bytes(result)
+
+
+def _install_extension_processor(processor, request, stream_options):
+ response = processor.get_extension_response()
+ if response is not None:
+ processor.setup_stream_options(stream_options)
+ request.ws_extension_processors.append(processor)
+
+
+def _create_request_from_rawdata(read_data, permessage_deflate_request=None):
+ req = mock.MockRequest(connection=mock.MockConn(read_data))
+ req.ws_version = common.VERSION_HYBI_LATEST
+ req.ws_extension_processors = []
+
+ processor = None
+ if permessage_deflate_request is not None:
+ processor = PerMessageDeflateExtensionProcessor(
+ permessage_deflate_request)
+
+ stream_options = StreamOptions()
+ if processor is not None:
+ _install_extension_processor(processor, req, stream_options)
+ req.ws_stream = Stream(req, stream_options)
+
+ return req
+
+
+def _create_request(*frames):
+ """Creates MockRequest using data given as frames.
+
+ frames will be returned on calling request.connection.read() where request
+ is MockRequest returned by this function.
+ """
+
+ read_data = []
+ for (header, body) in frames:
+ read_data.append(header + _mask_hybi(body))
+
+ return _create_request_from_rawdata(b''.join(read_data))
+
+
+def _create_blocking_request():
+ """Creates MockRequest.
+
+ Data written to a MockRequest can be read out by calling
+ request.connection.written_data().
+ """
+
+ req = mock.MockRequest(connection=mock.MockBlockingConn())
+ req.ws_version = common.VERSION_HYBI_LATEST
+ stream_options = StreamOptions()
+ req.ws_stream = Stream(req, stream_options)
+ return req
+
+
+class BasicMessageTest(unittest.TestCase):
+ """Basic tests for Stream."""
+ def test_send_message(self):
+ request = _create_request()
+ msgutil.send_message(request, 'Hello')
+ self.assertEqual(b'\x81\x05Hello', request.connection.written_data())
+
+ payload = 'a' * 125
+ request = _create_request()
+ msgutil.send_message(request, payload)
+ self.assertEqual(b'\x81\x7d' + payload.encode('UTF-8'),
+ request.connection.written_data())
+
+ def test_send_medium_message(self):
+ payload = 'a' * 126
+ request = _create_request()
+ msgutil.send_message(request, payload)
+ self.assertEqual(b'\x81\x7e\x00\x7e' + payload.encode('UTF-8'),
+ request.connection.written_data())
+
+ payload = 'a' * ((1 << 16) - 1)
+ request = _create_request()
+ msgutil.send_message(request, payload)
+ self.assertEqual(b'\x81\x7e\xff\xff' + payload.encode('UTF-8'),
+ request.connection.written_data())
+
+ def test_send_large_message(self):
+ payload = 'a' * (1 << 16)
+ request = _create_request()
+ msgutil.send_message(request, payload)
+ self.assertEqual(
+ b'\x81\x7f\x00\x00\x00\x00\x00\x01\x00\x00' +
+ payload.encode('UTF-8'), request.connection.written_data())
+
+ def test_send_message_unicode(self):
+ request = _create_request()
+ msgutil.send_message(request, u'\u65e5')
+ # U+65e5 is encoded as e6,97,a5 in UTF-8
+ self.assertEqual(b'\x81\x03\xe6\x97\xa5',
+ request.connection.written_data())
+
+ def test_send_message_fragments(self):
+ request = _create_request()
+ msgutil.send_message(request, 'Hello', False)
+ msgutil.send_message(request, ' ', False)
+ msgutil.send_message(request, 'World', False)
+ msgutil.send_message(request, '!', True)
+ self.assertEqual(b'\x01\x05Hello\x00\x01 \x00\x05World\x80\x01!',
+ request.connection.written_data())
+
+ def test_send_fragments_immediate_zero_termination(self):
+ request = _create_request()
+ msgutil.send_message(request, 'Hello World!', False)
+ msgutil.send_message(request, '', True)
+ self.assertEqual(b'\x01\x0cHello World!\x80\x00',
+ request.connection.written_data())
+
+ def test_receive_message(self):
+ request = _create_request((b'\x81\x85', b'Hello'),
+ (b'\x81\x86', b'World!'))
+ self.assertEqual('Hello', msgutil.receive_message(request))
+ self.assertEqual('World!', msgutil.receive_message(request))
+
+ payload = b'a' * 125
+ request = _create_request((b'\x81\xfd', payload))
+ self.assertEqual(payload.decode('UTF-8'),
+ msgutil.receive_message(request))
+
+ def test_receive_medium_message(self):
+ payload = b'a' * 126
+ request = _create_request((b'\x81\xfe\x00\x7e', payload))
+ self.assertEqual(payload.decode('UTF-8'),
+ msgutil.receive_message(request))
+
+ payload = b'a' * ((1 << 16) - 1)
+ request = _create_request((b'\x81\xfe\xff\xff', payload))
+ self.assertEqual(payload.decode('UTF-8'),
+ msgutil.receive_message(request))
+
+ def test_receive_large_message(self):
+ payload = b'a' * (1 << 16)
+ request = _create_request(
+ (b'\x81\xff\x00\x00\x00\x00\x00\x01\x00\x00', payload))
+ self.assertEqual(payload.decode('UTF-8'),
+ msgutil.receive_message(request))
+
+ def test_receive_length_not_encoded_using_minimal_number_of_bytes(self):
+ # Log warning on receiving bad payload length field that doesn't use
+ # minimal number of bytes but continue processing.
+
+ payload = b'a'
+ # 1 byte can be represented without extended payload length field.
+ request = _create_request(
+ (b'\x81\xff\x00\x00\x00\x00\x00\x00\x00\x01', payload))
+ self.assertEqual(payload.decode('UTF-8'),
+ msgutil.receive_message(request))
+
+ def test_receive_message_unicode(self):
+ request = _create_request((b'\x81\x83', b'\xe6\x9c\xac'))
+ # U+672c is encoded as e6,9c,ac in UTF-8
+ self.assertEqual(u'\u672c', msgutil.receive_message(request))
+
+ def test_receive_message_erroneous_unicode(self):
+ # \x80 and \x81 are invalid as UTF-8.
+ request = _create_request((b'\x81\x82', b'\x80\x81'))
+ # Invalid characters should raise InvalidUTF8Exception
+ self.assertRaises(InvalidUTF8Exception, msgutil.receive_message,
+ request)
+
+ def test_receive_fragments(self):
+ request = _create_request((b'\x01\x85', b'Hello'), (b'\x00\x81', b' '),
+ (b'\x00\x85', b'World'), (b'\x80\x81', b'!'))
+ self.assertEqual('Hello World!', msgutil.receive_message(request))
+
+ def test_receive_fragments_unicode(self):
+ # UTF-8 encodes U+6f22 into e6bca2 and U+5b57 into e5ad97.
+ request = _create_request((b'\x01\x82', b'\xe6\xbc'),
+ (b'\x00\x82', b'\xa2\xe5'),
+ (b'\x80\x82', b'\xad\x97'))
+ self.assertEqual(u'\u6f22\u5b57', msgutil.receive_message(request))
+
+ def test_receive_fragments_immediate_zero_termination(self):
+ request = _create_request((b'\x01\x8c', b'Hello World!'),
+ (b'\x80\x80', b''))
+ self.assertEqual('Hello World!', msgutil.receive_message(request))
+
+ def test_receive_fragments_duplicate_start(self):
+ request = _create_request((b'\x01\x85', b'Hello'),
+ (b'\x01\x85', b'World'))
+ self.assertRaises(msgutil.InvalidFrameException,
+ msgutil.receive_message, request)
+
+ def test_receive_fragments_intermediate_but_not_started(self):
+ request = _create_request((b'\x00\x85', b'Hello'))
+ self.assertRaises(msgutil.InvalidFrameException,
+ msgutil.receive_message, request)
+
+ def test_receive_fragments_end_but_not_started(self):
+ request = _create_request((b'\x80\x85', b'Hello'))
+ self.assertRaises(msgutil.InvalidFrameException,
+ msgutil.receive_message, request)
+
+ def test_receive_message_discard(self):
+ request = _create_request(
+ (b'\x8f\x86', b'IGNORE'), (b'\x81\x85', b'Hello'),
+ (b'\x8f\x89', b'DISREGARD'), (b'\x81\x86', b'World!'))
+ self.assertRaises(msgutil.UnsupportedFrameException,
+ msgutil.receive_message, request)
+ self.assertEqual('Hello', msgutil.receive_message(request))
+ self.assertRaises(msgutil.UnsupportedFrameException,
+ msgutil.receive_message, request)
+ self.assertEqual('World!', msgutil.receive_message(request))
+
+ def test_receive_close(self):
+ request = _create_request(
+ (b'\x88\x8a', struct.pack('!H', 1000) + b'Good bye'))
+ self.assertEqual(None, msgutil.receive_message(request))
+ self.assertEqual(1000, request.ws_close_code)
+ self.assertEqual('Good bye', request.ws_close_reason)
+
+ def test_send_longest_close(self):
+ reason = 'a' * 123
+ request = _create_request(
+ (b'\x88\xfd', struct.pack('!H', common.STATUS_NORMAL_CLOSURE) +
+ reason.encode('UTF-8')))
+ request.ws_stream.close_connection(common.STATUS_NORMAL_CLOSURE,
+ reason)
+ self.assertEqual(request.ws_close_code, common.STATUS_NORMAL_CLOSURE)
+ self.assertEqual(request.ws_close_reason, reason)
+
+ def test_send_close_too_long(self):
+ request = _create_request()
+ self.assertRaises(msgutil.BadOperationException,
+ Stream.close_connection, request.ws_stream,
+ common.STATUS_NORMAL_CLOSURE, 'a' * 124)
+
+ def test_send_close_inconsistent_code_and_reason(self):
+ request = _create_request()
+ # reason parameter must not be specified when code is None.
+ self.assertRaises(msgutil.BadOperationException,
+ Stream.close_connection, request.ws_stream, None,
+ 'a')
+
+ def test_send_ping(self):
+ request = _create_request()
+ msgutil.send_ping(request, 'Hello World!')
+ self.assertEqual(b'\x89\x0cHello World!',
+ request.connection.written_data())
+
+ def test_send_longest_ping(self):
+ request = _create_request()
+ msgutil.send_ping(request, 'a' * 125)
+ self.assertEqual(b'\x89\x7d' + b'a' * 125,
+ request.connection.written_data())
+
+ def test_send_ping_too_long(self):
+ request = _create_request()
+ self.assertRaises(msgutil.BadOperationException, msgutil.send_ping,
+ request, 'a' * 126)
+
+ def test_receive_ping(self):
+ """Tests receiving a ping control frame."""
+ def handler(request, message):
+ request.called = True
+
+ # Stream automatically respond to ping with pong without any action
+ # by application layer.
+ request = _create_request((b'\x89\x85', b'Hello'),
+ (b'\x81\x85', b'World'))
+ self.assertEqual('World', msgutil.receive_message(request))
+ self.assertEqual(b'\x8a\x05Hello', request.connection.written_data())
+
+ request = _create_request((b'\x89\x85', b'Hello'),
+ (b'\x81\x85', b'World'))
+ request.on_ping_handler = handler
+ self.assertEqual('World', msgutil.receive_message(request))
+ self.assertTrue(request.called)
+
+ def test_receive_longest_ping(self):
+ request = _create_request((b'\x89\xfd', b'a' * 125),
+ (b'\x81\x85', b'World'))
+ self.assertEqual('World', msgutil.receive_message(request))
+ self.assertEqual(b'\x8a\x7d' + b'a' * 125,
+ request.connection.written_data())
+
+ def test_receive_ping_too_long(self):
+ request = _create_request((b'\x89\xfe\x00\x7e', b'a' * 126))
+ self.assertRaises(msgutil.InvalidFrameException,
+ msgutil.receive_message, request)
+
+ def test_receive_pong(self):
+ """Tests receiving a pong control frame."""
+ def handler(request, message):
+ request.called = True
+
+ request = _create_request((b'\x8a\x85', b'Hello'),
+ (b'\x81\x85', b'World'))
+ request.on_pong_handler = handler
+ msgutil.send_ping(request, 'Hello')
+ self.assertEqual(b'\x89\x05Hello', request.connection.written_data())
+ # Valid pong is received, but receive_message won't return for it.
+ self.assertEqual('World', msgutil.receive_message(request))
+ # Check that nothing was written after receive_message call.
+ self.assertEqual(b'\x89\x05Hello', request.connection.written_data())
+
+ self.assertTrue(request.called)
+
+ def test_receive_unsolicited_pong(self):
+ # Unsolicited pong is allowed from HyBi 07.
+ request = _create_request((b'\x8a\x85', b'Hello'),
+ (b'\x81\x85', b'World'))
+ msgutil.receive_message(request)
+
+ request = _create_request((b'\x8a\x85', b'Hello'),
+ (b'\x81\x85', b'World'))
+ msgutil.send_ping(request, 'Jumbo')
+ # Body mismatch.
+ msgutil.receive_message(request)
+
+ def test_ping_cannot_be_fragmented(self):
+ request = _create_request((b'\x09\x85', b'Hello'))
+ self.assertRaises(msgutil.InvalidFrameException,
+ msgutil.receive_message, request)
+
+ def test_ping_with_too_long_payload(self):
+ request = _create_request((b'\x89\xfe\x01\x00', b'a' * 256))
+ self.assertRaises(msgutil.InvalidFrameException,
+ msgutil.receive_message, request)
+
+
+class PerMessageDeflateTest(unittest.TestCase):
+ """Tests for permessage-deflate extension."""
+ def test_response_parameters(self):
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ extension.add_parameter('server_no_context_takeover', None)
+ processor = PerMessageDeflateExtensionProcessor(extension)
+ response = processor.get_extension_response()
+ self.assertTrue(response.has_parameter('server_no_context_takeover'))
+ self.assertEqual(
+ None, response.get_parameter_value('server_no_context_takeover'))
+
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ extension.add_parameter('client_max_window_bits', None)
+ processor = PerMessageDeflateExtensionProcessor(extension)
+
+ processor.set_client_max_window_bits(8)
+ processor.set_client_no_context_takeover(True)
+ response = processor.get_extension_response()
+ self.assertEqual(
+ '8', response.get_parameter_value('client_max_window_bits'))
+ self.assertTrue(response.has_parameter('client_no_context_takeover'))
+ self.assertEqual(
+ None, response.get_parameter_value('client_no_context_takeover'))
+
+ def test_send_message(self):
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ b'', permessage_deflate_request=extension)
+ msgutil.send_message(request, 'Hello')
+
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+ compressed_hello = compress.compress(b'Hello')
+ compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH)
+ compressed_hello = compressed_hello[:-4]
+ expected = b'\xc1%c' % len(compressed_hello)
+ expected += compressed_hello
+ self.assertEqual(expected, request.connection.written_data())
+
+ def test_send_empty_message(self):
+ """Test that an empty message is compressed correctly."""
+
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ b'', permessage_deflate_request=extension)
+
+ msgutil.send_message(request, '')
+
+ # Payload in binary: 0b00000000
+ # From LSB,
+ # - 1 bit of BFINAL (0)
+ # - 2 bits of BTYPE (no compression)
+ # - 5 bits of padding
+ self.assertEqual(b'\xc1\x01\x00', request.connection.written_data())
+
+ def test_send_message_with_null_character(self):
+ """Test that a simple payload (one null) is framed correctly."""
+
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ b'', permessage_deflate_request=extension)
+
+ msgutil.send_message(request, '\x00')
+
+ # Payload in binary: 0b01100010 0b00000000 0b00000000
+ # From LSB,
+ # - 1 bit of BFINAL (0)
+ # - 2 bits of BTYPE (01 that means fixed Huffman)
+ # - 8 bits of the first code (00110000 that is the code for the literal
+ # alphabet 0x00)
+ # - 7 bits of the second code (0000000 that is the code for the
+ # end-of-block)
+ # - 1 bit of BFINAL (0)
+ # - 2 bits of BTYPE (no compression)
+ # - 2 bits of padding
+ self.assertEqual(b'\xc1\x03\x62\x00\x00',
+ request.connection.written_data())
+
+ def test_send_two_messages(self):
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ b'', permessage_deflate_request=extension)
+ msgutil.send_message(request, 'Hello')
+ msgutil.send_message(request, 'World')
+
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+
+ expected = b''
+
+ compressed_hello = compress.compress(b'Hello')
+ compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH)
+ compressed_hello = compressed_hello[:-4]
+ expected += b'\xc1%c' % len(compressed_hello)
+ expected += compressed_hello
+
+ compressed_world = compress.compress(b'World')
+ compressed_world += compress.flush(zlib.Z_SYNC_FLUSH)
+ compressed_world = compressed_world[:-4]
+ expected += b'\xc1%c' % len(compressed_world)
+ expected += compressed_world
+
+ self.assertEqual(expected, request.connection.written_data())
+
+ def test_send_message_fragmented(self):
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ b'', permessage_deflate_request=extension)
+ msgutil.send_message(request, 'Hello', end=False)
+ msgutil.send_message(request, 'Goodbye', end=False)
+ msgutil.send_message(request, 'World')
+
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+ compressed_hello = compress.compress(b'Hello')
+ compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH)
+ expected = b'\x41%c' % len(compressed_hello)
+ expected += compressed_hello
+ compressed_goodbye = compress.compress(b'Goodbye')
+ compressed_goodbye += compress.flush(zlib.Z_SYNC_FLUSH)
+ expected += b'\x00%c' % len(compressed_goodbye)
+ expected += compressed_goodbye
+ compressed_world = compress.compress(b'World')
+ compressed_world += compress.flush(zlib.Z_SYNC_FLUSH)
+ compressed_world = compressed_world[:-4]
+ expected += b'\x80%c' % len(compressed_world)
+ expected += compressed_world
+ self.assertEqual(expected, request.connection.written_data())
+
+ def test_send_message_fragmented_empty_first_frame(self):
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ b'', permessage_deflate_request=extension)
+ msgutil.send_message(request, '', end=False)
+ msgutil.send_message(request, 'Hello')
+
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+ compressed_hello = compress.compress(b'')
+ compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH)
+ expected = b'\x41%c' % len(compressed_hello)
+ expected += compressed_hello
+ compressed_empty = compress.compress(b'Hello')
+ compressed_empty += compress.flush(zlib.Z_SYNC_FLUSH)
+ compressed_empty = compressed_empty[:-4]
+ expected += b'\x80%c' % len(compressed_empty)
+ expected += compressed_empty
+ self.assertEqual(expected, request.connection.written_data())
+
+ def test_send_message_fragmented_empty_last_frame(self):
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ b'', permessage_deflate_request=extension)
+ msgutil.send_message(request, 'Hello', end=False)
+ msgutil.send_message(request, '')
+
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+ compressed_hello = compress.compress(b'Hello')
+ compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH)
+ expected = b'\x41%c' % len(compressed_hello)
+ expected += compressed_hello
+ compressed_empty = compress.compress(b'')
+ compressed_empty += compress.flush(zlib.Z_SYNC_FLUSH)
+ compressed_empty = compressed_empty[:-4]
+ expected += b'\x80%c' % len(compressed_empty)
+ expected += compressed_empty
+ self.assertEqual(expected, request.connection.written_data())
+
+ def test_send_message_using_small_window(self):
+ common_part = 'abcdefghijklmnopqrstuvwxyz'
+ test_message = common_part + '-' * 30000 + common_part
+
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ extension.add_parameter('server_max_window_bits', '8')
+ request = _create_request_from_rawdata(
+ b'', permessage_deflate_request=extension)
+ msgutil.send_message(request, test_message)
+
+ expected_websocket_header_size = 2
+ expected_websocket_payload_size = 91
+
+ actual_frame = request.connection.written_data()
+ self.assertEqual(
+ expected_websocket_header_size + expected_websocket_payload_size,
+ len(actual_frame))
+ actual_header = actual_frame[0:expected_websocket_header_size]
+ actual_payload = actual_frame[expected_websocket_header_size:]
+
+ self.assertEqual(b'\xc1%c' % expected_websocket_payload_size,
+ actual_header)
+ decompress = zlib.decompressobj(-8)
+ decompressed_message = decompress.decompress(actual_payload +
+ b'\x00\x00\xff\xff')
+ decompressed_message += decompress.flush()
+ self.assertEqual(test_message, decompressed_message.decode('UTF-8'))
+ self.assertEqual(0, len(decompress.unused_data))
+ self.assertEqual(0, len(decompress.unconsumed_tail))
+
+ def test_send_message_no_context_takeover_parameter(self):
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ extension.add_parameter('server_no_context_takeover', None)
+ request = _create_request_from_rawdata(
+ b'', permessage_deflate_request=extension)
+ for i in range(3):
+ msgutil.send_message(request, 'Hello', end=False)
+ msgutil.send_message(request, 'Hello', end=True)
+
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+
+ first_hello = compress.compress(b'Hello')
+ first_hello += compress.flush(zlib.Z_SYNC_FLUSH)
+ expected = b'\x41%c' % len(first_hello)
+ expected += first_hello
+ second_hello = compress.compress(b'Hello')
+ second_hello += compress.flush(zlib.Z_SYNC_FLUSH)
+ second_hello = second_hello[:-4]
+ expected += b'\x80%c' % len(second_hello)
+ expected += second_hello
+
+ self.assertEqual(expected + expected + expected,
+ request.connection.written_data())
+
+ def test_send_message_fragmented_bfinal(self):
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ b'', permessage_deflate_request=extension)
+ self.assertEqual(1, len(request.ws_extension_processors))
+ request.ws_extension_processors[0].set_bfinal(True)
+ msgutil.send_message(request, 'Hello', end=False)
+ msgutil.send_message(request, 'World', end=True)
+
+ expected = b''
+
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+ compressed_hello = compress.compress(b'Hello')
+ compressed_hello += compress.flush(zlib.Z_FINISH)
+ compressed_hello = compressed_hello + struct.pack('!B', 0)
+ expected += b'\x41%c' % len(compressed_hello)
+ expected += compressed_hello
+
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+ compressed_world = compress.compress(b'World')
+ compressed_world += compress.flush(zlib.Z_FINISH)
+ compressed_world = compressed_world + struct.pack('!B', 0)
+ expected += b'\x80%c' % len(compressed_world)
+ expected += compressed_world
+
+ self.assertEqual(expected, request.connection.written_data())
+
+ def test_receive_message_deflate(self):
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+
+ compressed_hello = compress.compress(b'Hello')
+ compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH)
+ compressed_hello = compressed_hello[:-4]
+ data = b'\xc1%c' % (len(compressed_hello) | 0x80)
+ data += _mask_hybi(compressed_hello)
+
+ # Close frame
+ data += b'\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + b'Good bye')
+
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ data, permessage_deflate_request=extension)
+ self.assertEqual('Hello', msgutil.receive_message(request))
+
+ self.assertEqual(None, msgutil.receive_message(request))
+
+ def test_receive_message_random_section(self):
+ """Test that a compressed message fragmented into lots of chunks is
+ correctly received.
+ """
+
+ random.seed(a=0)
+ payload = b''.join(
+ [struct.pack('!B', random.randint(0, 255)) for i in range(1000)])
+
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+ compressed_payload = compress.compress(payload)
+ compressed_payload += compress.flush(zlib.Z_SYNC_FLUSH)
+ compressed_payload = compressed_payload[:-4]
+
+ # Fragment the compressed payload into lots of frames.
+ bytes_chunked = 0
+ data = b''
+ frame_count = 0
+
+ chunk_sizes = []
+
+ while bytes_chunked < len(compressed_payload):
+ # Make sure that
+ # - the length of chunks are equal or less than 125 so that we can
+ # use 1 octet length header format for all frames.
+ # - at least 10 chunks are created.
+ chunk_size = random.randint(
+ 1,
+ min(125,
+ len(compressed_payload) // 10,
+ len(compressed_payload) - bytes_chunked))
+ chunk_sizes.append(chunk_size)
+ chunk = compressed_payload[bytes_chunked:bytes_chunked +
+ chunk_size]
+ bytes_chunked += chunk_size
+
+ first_octet = 0x00
+ if len(data) == 0:
+ first_octet = first_octet | 0x42
+ if bytes_chunked == len(compressed_payload):
+ first_octet = first_octet | 0x80
+
+ data += b'%c%c' % (first_octet, chunk_size | 0x80)
+ data += _mask_hybi(chunk)
+
+ frame_count += 1
+
+ self.assertTrue(len(chunk_sizes) > 10)
+
+ # Close frame
+ data += b'\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + b'Good bye')
+
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ data, permessage_deflate_request=extension)
+ self.assertEqual(payload, msgutil.receive_message(request))
+
+ self.assertEqual(None, msgutil.receive_message(request))
+
+ def test_receive_two_messages(self):
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+
+ data = b''
+
+ compressed_hello = compress.compress(b'HelloWebSocket')
+ compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH)
+ compressed_hello = compressed_hello[:-4]
+ split_position = len(compressed_hello) // 2
+ data += b'\x41%c' % (split_position | 0x80)
+ data += _mask_hybi(compressed_hello[:split_position])
+
+ data += b'\x80%c' % ((len(compressed_hello) - split_position) | 0x80)
+ data += _mask_hybi(compressed_hello[split_position:])
+
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED,
+ -zlib.MAX_WBITS)
+
+ compressed_world = compress.compress(b'World')
+ compressed_world += compress.flush(zlib.Z_SYNC_FLUSH)
+ compressed_world = compressed_world[:-4]
+ data += b'\xc1%c' % (len(compressed_world) | 0x80)
+ data += _mask_hybi(compressed_world)
+
+ # Close frame
+ data += b'\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + b'Good bye')
+
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ data, permessage_deflate_request=extension)
+ self.assertEqual('HelloWebSocket', msgutil.receive_message(request))
+ self.assertEqual('World', msgutil.receive_message(request))
+
+ self.assertEqual(None, msgutil.receive_message(request))
+
+ def test_receive_message_mixed_btype(self):
+ """Test that a message compressed using lots of DEFLATE blocks with
+ various flush mode is correctly received.
+ """
+
+ random.seed(a=0)
+ payload = b''.join(
+ [struct.pack('!B', random.randint(0, 255)) for i in range(1000)])
+
+ compress = None
+
+ # Fragment the compressed payload into lots of frames.
+ bytes_chunked = 0
+ compressed_payload = b''
+
+ chunk_sizes = []
+ methods = []
+ sync_used = False
+ finish_used = False
+
+ while bytes_chunked < len(payload):
+ # Make sure at least 10 chunks are created.
+ chunk_size = random.randint(1,
+ min(100,
+ len(payload) - bytes_chunked))
+ chunk_sizes.append(chunk_size)
+ chunk = payload[bytes_chunked:bytes_chunked + chunk_size]
+
+ bytes_chunked += chunk_size
+
+ if compress is None:
+ compress = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
+ zlib.DEFLATED, -zlib.MAX_WBITS)
+
+ if bytes_chunked == len(payload):
+ compressed_payload += compress.compress(chunk)
+ compressed_payload += compress.flush(zlib.Z_SYNC_FLUSH)
+ compressed_payload = compressed_payload[:-4]
+ else:
+ method = random.randint(0, 1)
+ methods.append(method)
+ if method == 0:
+ compressed_payload += compress.compress(chunk)
+ compressed_payload += compress.flush(zlib.Z_SYNC_FLUSH)
+ sync_used = True
+ else:
+ compressed_payload += compress.compress(chunk)
+ compressed_payload += compress.flush(zlib.Z_FINISH)
+ compress = None
+ finish_used = True
+
+ self.assertTrue(len(chunk_sizes) > 10)
+ self.assertTrue(sync_used)
+ self.assertTrue(finish_used)
+
+ self.assertTrue(125 < len(compressed_payload))
+ self.assertTrue(len(compressed_payload) < 65536)
+ data = b'\xc2\xfe' + struct.pack('!H', len(compressed_payload))
+ data += _mask_hybi(compressed_payload)
+
+ # Close frame
+ data += b'\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + b'Good bye')
+
+ extension = common.ExtensionParameter(
+ common.PERMESSAGE_DEFLATE_EXTENSION)
+ request = _create_request_from_rawdata(
+ data, permessage_deflate_request=extension)
+ self.assertEqual(payload, msgutil.receive_message(request))
+
+ self.assertEqual(None, msgutil.receive_message(request))
+
+
+class MessageReceiverTest(unittest.TestCase):
+ """Tests the Stream class using MessageReceiver."""
+ def test_queue(self):
+ request = _create_blocking_request()
+ receiver = msgutil.MessageReceiver(request)
+
+ self.assertEqual(None, receiver.receive_nowait())
+
+ request.connection.put_bytes(b'\x81\x86' + _mask_hybi(b'Hello!'))
+ self.assertEqual('Hello!', receiver.receive())
+
+ def test_onmessage(self):
+ onmessage_queue = six.moves.queue.Queue()
+
+ def onmessage_handler(message):
+ onmessage_queue.put(message)
+
+ request = _create_blocking_request()
+ receiver = msgutil.MessageReceiver(request, onmessage_handler)
+
+ request.connection.put_bytes(b'\x81\x86' + _mask_hybi(b'Hello!'))
+ self.assertEqual('Hello!', onmessage_queue.get())
+
+
+class MessageSenderTest(unittest.TestCase):
+ """Tests the Stream class using MessageSender."""
+ def test_send(self):
+ request = _create_blocking_request()
+ sender = msgutil.MessageSender(request)
+
+ sender.send('World')
+ self.assertEqual(b'\x81\x05World', request.connection.written_data())
+
+ def test_send_nowait(self):
+ # Use a queue to check the bytes written by MessageSender.
+ # request.connection.written_data() cannot be used here because
+ # MessageSender runs in a separate thread.
+ send_queue = six.moves.queue.Queue()
+
+ def write(bytes):
+ send_queue.put(bytes)
+
+ request = _create_blocking_request()
+ request.connection.write = write
+
+ sender = msgutil.MessageSender(request)
+
+ sender.send_nowait('Hello')
+ sender.send_nowait('World')
+ self.assertEqual(b'\x81\x05Hello', send_queue.get())
+ self.assertEqual(b'\x81\x05World', send_queue.get())
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_stream.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_stream.py
new file mode 100755
index 0000000000..153899d205
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_stream.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+#
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests for stream module."""
+
+from __future__ import absolute_import
+import unittest
+
+import set_sys_path # Update sys.path to locate mod_pywebsocket module.
+
+from mod_pywebsocket import common
+from mod_pywebsocket import stream
+
+
+class StreamTest(unittest.TestCase):
+ """A unittest for stream module."""
+ def test_create_header(self):
+ # more, rsv1, ..., rsv4 are all true
+ header = stream.create_header(common.OPCODE_TEXT, 1, 1, 1, 1, 1, 1)
+ self.assertEqual(b'\xf1\x81', header)
+
+ # Maximum payload size
+ header = stream.create_header(common.OPCODE_TEXT, (1 << 63) - 1, 0, 0,
+ 0, 0, 0)
+ self.assertEqual(b'\x01\x7f\x7f\xff\xff\xff\xff\xff\xff\xff', header)
+
+ # Invalid opcode 0x10
+ self.assertRaises(ValueError, stream.create_header, 0x10, 0, 0, 0, 0,
+ 0, 0)
+
+ # Invalid value 0xf passed to more parameter
+ self.assertRaises(ValueError, stream.create_header, common.OPCODE_TEXT,
+ 0, 0xf, 0, 0, 0, 0)
+
+ # Too long payload_length
+ self.assertRaises(ValueError, stream.create_header, common.OPCODE_TEXT,
+ 1 << 63, 0, 0, 0, 0, 0)
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_util.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_util.py
new file mode 100755
index 0000000000..bf4bd32bba
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/test_util.py
@@ -0,0 +1,191 @@
+#!/usr/bin/env python
+#
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests for util module."""
+
+from __future__ import absolute_import
+from __future__ import print_function
+import os
+import random
+import sys
+import unittest
+import struct
+
+import set_sys_path # Update sys.path to locate mod_pywebsocket module.
+
+from mod_pywebsocket import util
+from six.moves import range
+from six import PY3
+from six import int2byte
+
+_TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'testdata')
+
+
+class UtilTest(unittest.TestCase):
+ """A unittest for util module."""
+ def test_prepend_message_to_exception(self):
+ exc = Exception('World')
+ self.assertEqual('World', str(exc))
+ util.prepend_message_to_exception('Hello ', exc)
+ self.assertEqual('Hello World', str(exc))
+
+ def test_get_script_interp(self):
+ cygwin_path = 'c:\\cygwin\\bin'
+ cygwin_perl = os.path.join(cygwin_path, 'perl')
+ self.assertEqual(
+ None, util.get_script_interp(os.path.join(_TEST_DATA_DIR,
+ 'README')))
+ self.assertEqual(
+ None,
+ util.get_script_interp(os.path.join(_TEST_DATA_DIR, 'README'),
+ cygwin_path))
+ self.assertEqual(
+ '/usr/bin/perl -wT',
+ util.get_script_interp(os.path.join(_TEST_DATA_DIR, 'hello.pl')))
+ self.assertEqual(
+ cygwin_perl + ' -wT',
+ util.get_script_interp(os.path.join(_TEST_DATA_DIR, 'hello.pl'),
+ cygwin_path))
+
+ def test_hexify(self):
+ self.assertEqual('61 7a 41 5a 30 39 20 09 0d 0a 00 ff',
+ util.hexify(b'azAZ09 \t\r\n\x00\xff'))
+
+
+class RepeatedXorMaskerTest(unittest.TestCase):
+ """A unittest for RepeatedXorMasker class."""
+ def test_mask(self):
+ # Sample input e6,97,a5 is U+65e5 in UTF-8
+ masker = util.RepeatedXorMasker(b'\xff\xff\xff\xff')
+ result = masker.mask(b'\xe6\x97\xa5')
+ self.assertEqual(b'\x19\x68\x5a', result)
+
+ masker = util.RepeatedXorMasker(b'\x00\x00\x00\x00')
+ result = masker.mask(b'\xe6\x97\xa5')
+ self.assertEqual(b'\xe6\x97\xa5', result)
+
+ masker = util.RepeatedXorMasker(b'\xe6\x97\xa5\x20')
+ result = masker.mask(b'\xe6\x97\xa5')
+ self.assertEqual(b'\x00\x00\x00', result)
+
+ def test_mask_twice(self):
+ masker = util.RepeatedXorMasker(b'\x00\x7f\xff\x20')
+ # mask[0], mask[1], ... will be used.
+ result = masker.mask(b'\x00\x00\x00\x00\x00')
+ self.assertEqual(b'\x00\x7f\xff\x20\x00', result)
+ # mask[2], mask[0], ... will be used for the next call.
+ result = masker.mask(b'\x00\x00\x00\x00\x00')
+ self.assertEqual(b'\x7f\xff\x20\x00\x7f', result)
+
+ def test_mask_large_data(self):
+ masker = util.RepeatedXorMasker(b'mASk')
+ original = b''.join([util.pack_byte(i % 256) for i in range(1000)])
+ result = masker.mask(original)
+ expected = b''.join([
+ util.pack_byte((i % 256) ^ ord('mASk'[i % 4])) for i in range(1000)
+ ])
+ self.assertEqual(expected, result)
+
+ masker = util.RepeatedXorMasker(b'MaSk')
+ first_part = b'The WebSocket Protocol enables two-way communication.'
+ result = masker.mask(first_part)
+ self.assertEqual(
+ b'\x19\t6K\x1a\x0418"\x028\x0e9A\x03\x19"\x15<\x08"\rs\x0e#'
+ b'\x001\x07(\x12s\x1f:\x0e~\x1c,\x18s\x08"\x0c>\x1e#\x080\n9'
+ b'\x08<\x05c', result)
+ second_part = b'It has two parts: a handshake and the data transfer.'
+ result = masker.mask(second_part)
+ self.assertEqual(
+ b"('K%\x00 K9\x16<K=\x00!\x1f>[s\nm\t2\x05)\x12;\n&\x04s\n#"
+ b"\x05s\x1f%\x04s\x0f,\x152K9\x132\x05>\x076\x19c", result)
+
+
+def get_random_section(source, min_num_chunks):
+ chunks = []
+ bytes_chunked = 0
+
+ while bytes_chunked < len(source):
+ chunk_size = random.randint(
+ 1, min(len(source) / min_num_chunks,
+ len(source) - bytes_chunked))
+ chunk = source[bytes_chunked:bytes_chunked + chunk_size]
+ chunks.append(chunk)
+ bytes_chunked += chunk_size
+
+ return chunks
+
+
+class InflaterDeflaterTest(unittest.TestCase):
+ """A unittest for _Inflater and _Deflater class."""
+ def test_inflate_deflate_default(self):
+ input = b'hello' + b'-' * 30000 + b'hello'
+ inflater15 = util._Inflater(15)
+ deflater15 = util._Deflater(15)
+ inflater8 = util._Inflater(8)
+ deflater8 = util._Deflater(8)
+
+ compressed15 = deflater15.compress_and_finish(input)
+ compressed8 = deflater8.compress_and_finish(input)
+
+ inflater15.append(compressed15)
+ inflater8.append(compressed8)
+
+ self.assertNotEqual(compressed15, compressed8)
+ self.assertEqual(input, inflater15.decompress(-1))
+ self.assertEqual(input, inflater8.decompress(-1))
+
+ def test_random_section(self):
+ random.seed(a=0)
+ source = b''.join(
+ [int2byte(random.randint(0, 255)) for i in range(100 * 1024)])
+
+ chunked_input = get_random_section(source, 10)
+
+ deflater = util._Deflater(15)
+ compressed = []
+ for chunk in chunked_input:
+ compressed.append(deflater.compress(chunk))
+ compressed.append(deflater.compress_and_finish(b''))
+
+ chunked_expectation = get_random_section(source, 10)
+
+ inflater = util._Inflater(15)
+ inflater.append(b''.join(compressed))
+ for chunk in chunked_expectation:
+ decompressed = inflater.decompress(len(chunk))
+ self.assertEqual(chunk, decompressed)
+
+ self.assertEqual(b'', inflater.decompress(-1))
+
+
+if __name__ == '__main__':
+ unittest.main()
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/README b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/README
new file mode 100644
index 0000000000..c001aa5595
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/README
@@ -0,0 +1 @@
+Test data directory
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/abort_by_user_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/abort_by_user_wsh.py
new file mode 100644
index 0000000000..63cb541bb7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/abort_by_user_wsh.py
@@ -0,0 +1,41 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from mod_pywebsocket import handshake
+
+
+def web_socket_do_extra_handshake(request):
+ raise handshake.AbortedByUserException("abort for test")
+
+
+def web_socket_transfer_data(request):
+ raise handshake.AbortedByUserException("abort for test")
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/blank_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/blank_wsh.py
new file mode 100644
index 0000000000..b398e96778
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/blank_wsh.py
@@ -0,0 +1,30 @@
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# intentionally left blank
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/origin_check_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/origin_check_wsh.py
new file mode 100644
index 0000000000..bf6442e65b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/origin_check_wsh.py
@@ -0,0 +1,43 @@
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+def web_socket_do_extra_handshake(request):
+ if request.ws_origin == 'http://example.com':
+ return
+ raise ValueError('Unacceptable origin: %r' % request.ws_origin)
+
+
+def web_socket_transfer_data(request):
+ message = 'origin_check_wsh.py is called for %s, %s' % (
+ request.ws_resource, request.ws_protocol)
+ request.connection.write(message.encode('UTF-8'))
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/exception_in_transfer_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/exception_in_transfer_wsh.py
new file mode 100644
index 0000000000..54390994d7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/exception_in_transfer_wsh.py
@@ -0,0 +1,42 @@
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Exception in web_socket_transfer_data().
+"""
+
+
+def web_socket_do_extra_handshake(request):
+ pass
+
+
+def web_socket_transfer_data(request):
+ raise Exception('Intentional Exception for %s, %s' %
+ (request.ws_resource, request.ws_protocol))
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/no_wsh_at_the_end.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/no_wsh_at_the_end.py
new file mode 100644
index 0000000000..ebfddb7449
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/no_wsh_at_the_end.py
@@ -0,0 +1,43 @@
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Correct signatures, wrong file name.
+"""
+
+
+def web_socket_do_extra_handshake(request):
+ pass
+
+
+def web_socket_transfer_data(request):
+ message = 'sub/no_wsh_at_the_end.py is called for %s, %s' % (
+ request.ws_resource, request.ws_protocol)
+ request.connection.write(message.encode('UTF-8'))
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/non_callable_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/non_callable_wsh.py
new file mode 100644
index 0000000000..8afcfa97a9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/non_callable_wsh.py
@@ -0,0 +1,35 @@
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Non-callable handlers.
+"""
+
+web_socket_do_extra_handshake = True
+web_socket_transfer_data = 1
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/plain_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/plain_wsh.py
new file mode 100644
index 0000000000..8a7db1e5ac
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/plain_wsh.py
@@ -0,0 +1,41 @@
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+def web_socket_do_extra_handshake(request):
+ pass
+
+
+def web_socket_transfer_data(request):
+ message = 'sub/plain_wsh.py is called for %s, %s' % (request.ws_resource,
+ request.ws_protocol)
+ request.connection.write(message.encode('UTF-8'))
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/wrong_handshake_sig_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/wrong_handshake_sig_wsh.py
new file mode 100644
index 0000000000..cebb0da1ab
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/wrong_handshake_sig_wsh.py
@@ -0,0 +1,43 @@
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Wrong web_socket_do_extra_handshake signature.
+"""
+
+
+def no_web_socket_do_extra_handshake(request):
+ pass
+
+
+def web_socket_transfer_data(request):
+ message = 'sub/wrong_handshake_sig_wsh.py is called for %s, %s' % (
+ request.ws_resource, request.ws_protocol)
+ request.connection.write(message.encode('UTF-8'))
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/wrong_transfer_sig_wsh.py b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/wrong_transfer_sig_wsh.py
new file mode 100644
index 0000000000..ad27d6bcfe
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/wrong_transfer_sig_wsh.py
@@ -0,0 +1,43 @@
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Wrong web_socket_transfer_data() signature.
+"""
+
+
+def web_socket_do_extra_handshake(request):
+ pass
+
+
+def no_web_socket_transfer_data(request):
+ message = 'sub/wrong_transfer_sig_wsh.py is called for %s, %s' % (
+ request.ws_resource, request.ws_protocol)
+ request.connection.write(message.encode('UTF-8'))
+
+
+# vi:sts=4 sw=4 et
diff --git a/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/hello.pl b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/hello.pl
new file mode 100644
index 0000000000..882ef5a100
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/pywebsocket3/test/testdata/hello.pl
@@ -0,0 +1,32 @@
+#!/usr/bin/perl -wT
+#
+# Copyright 2012, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+print "Hello\n";
diff --git a/testing/web-platform/tests/tools/third_party/six/CHANGES b/testing/web-platform/tests/tools/third_party/six/CHANGES
new file mode 100644
index 0000000000..ffa702601b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/CHANGES
@@ -0,0 +1,315 @@
+Changelog for six
+=================
+
+This file lists the changes in each six version.
+
+1.13.0
+------
+
+- Issue #298, pull request #299: Add `six.moves.dbm_ndbm`.
+
+- Issue #155: Add `six.moves.collections_abc`, which aliases the `collections`
+ module on Python 2-3.2 and the `collections.abc` on Python 3.3 and greater.
+
+- Pull request #304: Re-add distutils fallback in `setup.py`.
+
+- Pull request #305: On Python 3.7, `with_metaclass` supports classes using PEP
+ 560 features.
+
+1.12.0
+------
+
+- Issue #259, pull request #260: `six.add_metaclass` now preserves
+ `__qualname__` from the original class.
+
+- Pull request #204: Add `six.ensure_binary`, `six.ensure_text`, and
+ `six.ensure_str`.
+
+1.11.0
+------
+
+- Pull request #178: `with_metaclass` now properly proxies `__prepare__` to the
+ underlying metaclass.
+
+- Pull request #191: Allow `with_metaclass` to work with metaclasses implemented
+ in C.
+
+- Pull request #203: Add parse_http_list and parse_keqv_list to moved
+ urllib.request.
+
+- Pull request #172 and issue #171: Add unquote_to_bytes to moved urllib.parse.
+
+- Pull request #167: Add `six.moves.getoutput`.
+
+- Pull request #80: Add `six.moves.urllib_parse.splitvalue`.
+
+- Pull request #75: Add `six.moves.email_mime_image`.
+
+- Pull request #72: Avoid creating reference cycles through tracebacks in
+ `reraise`.
+
+1.10.0
+------
+
+- Issue #122: Improve the performance of `six.int2byte` on Python 3.
+
+- Pull request #55 and issue #99: Don't add the `winreg` module to `six.moves`
+ on non-Windows platforms.
+
+- Pull request #60 and issue #108: Add `six.moves.getcwd` and
+ `six.moves.getcwdu`.
+
+- Pull request #64: Add `create_unbound_method` to create unbound methods.
+
+1.9.0
+-----
+
+- Issue #106: Support the `flush` parameter to `six.print_`.
+
+- Pull request #48 and issue #15: Add the `python_2_unicode_compatible`
+ decorator.
+
+- Pull request #57 and issue #50: Add several compatibility methods for unittest
+ assertions that were renamed between Python 2 and 3.
+
+- Issue #105 and pull request #58: Ensure `six.wraps` respects the *updated* and
+ *assigned* arguments.
+
+- Issue #102: Add `raise_from` to abstract out Python 3's raise from syntax.
+
+- Issue #97: Optimize `six.iterbytes` on Python 2.
+
+- Issue #98: Fix `six.moves` race condition in multi-threaded code.
+
+- Pull request #51: Add `six.view(keys|values|itmes)`, which provide dictionary
+ views on Python 2.7+.
+
+- Issue #112: `six.moves.reload_module` now uses the importlib module on
+ Python 3.4+.
+
+1.8.0
+-----
+
+- Issue #90: Add `six.moves.shlex_quote`.
+
+- Issue #59: Add `six.moves.intern`.
+
+- Add `six.urllib.parse.uses_(fragment|netloc|params|query|relative)`.
+
+- Issue #88: Fix add_metaclass when the class has `__slots__` containing
+ `__weakref__` or `__dict__`.
+
+- Issue #89: Make six use absolute imports.
+
+- Issue #85: Always accept *updated* and *assigned* arguments for `wraps()`.
+
+- Issue #86: In `reraise()`, instantiate the exception if the second argument is
+ `None`.
+
+- Pull request #45: Add `six.moves.email_mime_nonmultipart`.
+
+- Issue #81: Add `six.urllib.request.splittag` mapping.
+
+- Issue #80: Add `six.urllib.request.splituser` mapping.
+
+1.7.3
+-----
+
+- Issue #77: Fix import six on Python 3.4 with a custom loader.
+
+- Issue #74: `six.moves.xmlrpc_server` should map to `SimpleXMLRPCServer` on Python
+ 2 as documented not `xmlrpclib`.
+
+1.7.2
+-----
+
+- Issue #72: Fix installing on Python 2.
+
+1.7.1
+-----
+
+- Issue #71: Make the six.moves meta path importer handle reloading of the six
+ module gracefully.
+
+1.7.0
+-----
+
+- Pull request #30: Implement six.moves with a PEP 302 meta path hook.
+
+- Pull request #32: Add six.wraps, which is like functools.wraps but always sets
+ the __wrapped__ attribute.
+
+- Pull request #35: Improve add_metaclass, so that it doesn't end up inserting
+ another class into the hierarchy.
+
+- Pull request #34: Add import mappings for dummy_thread.
+
+- Pull request #33: Add import mappings for UserDict and UserList.
+
+- Pull request #31: Select the implementations of dictionary iterator routines
+ at import time for a 20% speed boost.
+
+1.6.1
+-----
+
+- Raise an AttributeError for six.moves.X when X is a module not available in
+ the current interpreter.
+
+1.6.0
+-----
+
+- Raise an AttributeError for every attribute of unimportable modules.
+
+- Issue #56: Make the fake modules six.moves puts into sys.modules appear not to
+ have a __path__ unless they are loaded.
+
+- Pull request #28: Add support for SplitResult.
+
+- Issue #55: Add move mapping for xmlrpc.server.
+
+- Pull request #29: Add move for urllib.parse.splitquery.
+
+1.5.2
+-----
+
+- Issue #53: Make the fake modules six.moves puts into sys.modules appear not to
+ have a __name__ unless they are loaded.
+
+1.5.1
+-----
+
+- Issue #51: Hack around the Django autoreloader after recent six.moves changes.
+
+1.5.0
+-----
+
+- Removed support for Python 2.4. This is because py.test no longer supports
+ 2.4.
+
+- Fix various import problems including issues #19 and #41. six.moves modules
+ are now lazy wrappers over the underlying modules instead of the actual
+ modules themselves.
+
+- Issue #49: Add six.moves mapping for tkinter.ttk.
+
+- Pull request #24: Add __dir__ special method to six.moves modules.
+
+- Issue #47: Fix add_metaclass on classes with a string for the __slots__
+ variable.
+
+- Issue #44: Fix interpretation of backslashes on Python 2 in the u() function.
+
+- Pull request #21: Add import mapping for urllib's proxy_bypass function.
+
+- Issue #43: Add import mapping for the Python 2 xmlrpclib module.
+
+- Issue #39: Add import mapping for the Python 2 thread module.
+
+- Issue #40: Add import mapping for the Python 2 gdbm module.
+
+- Issue #35: On Python versions less than 2.7, print_ now encodes unicode
+ strings when outputing to standard streams. (Python 2.7 handles this
+ automatically.)
+
+1.4.1
+-----
+
+- Issue #32: urllib module wrappings don't work when six is not a toplevel file.
+
+1.4.0
+-----
+
+- Issue #31: Add six.moves mapping for UserString.
+
+- Pull request #12: Add six.add_metaclass, a decorator for adding a metaclass to
+ a class.
+
+- Add six.moves.zip_longest and six.moves.filterfalse, which correspond
+ respectively to itertools.izip_longest and itertools.ifilterfalse on Python 2
+ and itertools.zip_longest and itertools.filterfalse on Python 3.
+
+- Issue #25: Add the unichr function, which returns a string for a Unicode
+ codepoint.
+
+- Issue #26: Add byte2int function, which complements int2byte.
+
+- Add a PY2 constant with obvious semantics.
+
+- Add helpers for indexing and iterating over bytes: iterbytes and indexbytes.
+
+- Add create_bound_method() wrapper.
+
+- Issue #23: Allow multiple base classes to be passed to with_metaclass.
+
+- Issue #24: Add six.moves.range alias. This exactly the same as the current
+ xrange alias.
+
+- Pull request #5: Create six.moves.urllib, which contains abstractions for a
+ bunch of things which are in urllib in Python 3 and spread out across urllib,
+ urllib2, and urlparse in Python 2.
+
+1.3.0
+-----
+
+- Issue #21: Add methods to access the closure and globals of a function.
+
+- In six.iter(items/keys/values/lists), passed keyword arguments through to the
+ underlying method.
+
+- Add six.iterlists().
+
+- Issue #20: Fix tests if tkinter is not available.
+
+- Issue #17: Define callable to be builtin callable when it is available again
+ in Python 3.2+.
+
+- Issue #16: Rename Python 2 exec_'s arguments, so casually calling exec_ with
+ keyword arguments will raise.
+
+- Issue #14: Put the six.moves package in sys.modules based on the name six is
+ imported under.
+
+- Fix Jython detection.
+
+- Pull request #4: Add email_mime_multipart, email_mime_text, and
+ email_mime_base to six.moves.
+
+1.2.0
+-----
+
+- Issue #13: Make iterkeys/itervalues/iteritems return iterators on Python 3
+ instead of iterables.
+
+- Issue #11: Fix maxsize support on Jython.
+
+- Add six.next() as an alias for six.advance_iterator().
+
+- Use the builtin next() function for advance_iterator() where is available
+ (2.6+), not just Python 3.
+
+- Add the Iterator class for writing portable iterators.
+
+1.1.0
+-----
+
+- Add the int2byte function.
+
+- Add compatibility mappings for iterators over the keys, values, and items of a
+ dictionary.
+
+- Fix six.MAXSIZE on platforms where sizeof(long) != sizeof(Py_ssize_t).
+
+- Issue #3: Add six.moves mappings for filter, map, and zip.
+
+1.0.0
+-----
+
+- Issue #2: u() on Python 2.x now resolves unicode escapes.
+
+- Expose an API for adding mappings to six.moves.
+
+1.0 beta 1
+----------
+
+- Reworked six into one .py file. This breaks imports. Please tell me if you
+ are interested in an import compatibility layer.
diff --git a/testing/web-platform/tests/tools/third_party/six/LICENSE b/testing/web-platform/tests/tools/third_party/six/LICENSE
new file mode 100644
index 0000000000..4b05a54526
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/LICENSE
@@ -0,0 +1,18 @@
+Copyright (c) 2010-2019 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/six/MANIFEST.in b/testing/web-platform/tests/tools/third_party/six/MANIFEST.in
new file mode 100644
index 0000000000..b924e068ee
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/MANIFEST.in
@@ -0,0 +1,6 @@
+include CHANGES
+include LICENSE
+include test_six.py
+
+recursive-include documentation *
+prune documentation/_build
diff --git a/testing/web-platform/tests/tools/third_party/six/README.rst b/testing/web-platform/tests/tools/third_party/six/README.rst
new file mode 100644
index 0000000000..a99e6f5585
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/README.rst
@@ -0,0 +1,32 @@
+.. image:: https://img.shields.io/pypi/v/six.svg
+ :target: https://pypi.org/project/six/
+ :alt: six on PyPI
+
+.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master
+ :target: https://travis-ci.org/benjaminp/six
+ :alt: six on TravisCI
+
+.. image:: https://readthedocs.org/projects/six/badge/?version=latest
+ :target: https://six.readthedocs.io/
+ :alt: six's documentation on Read the Docs
+
+.. image:: https://img.shields.io/badge/license-MIT-green.svg
+ :target: https://github.com/benjaminp/six/blob/master/LICENSE
+ :alt: MIT License badge
+
+Six is a Python 2 and 3 compatibility library. It provides utility functions
+for smoothing over the differences between the Python versions with the goal of
+writing Python code that is compatible on both Python versions. See the
+documentation for more information on what is provided.
+
+Six supports every Python version since 2.6. It is contained in only one Python
+file, so it can be easily copied into your project. (The copyright and license
+notice must be retained.)
+
+Online documentation is at https://six.readthedocs.io/.
+
+Bugs can be reported to https://github.com/benjaminp/six. The code can also
+be found there.
+
+For questions about six or porting in general, email the python-porting mailing
+list: https://mail.python.org/mailman/listinfo/python-porting
diff --git a/testing/web-platform/tests/tools/third_party/six/documentation/Makefile b/testing/web-platform/tests/tools/third_party/six/documentation/Makefile
new file mode 100644
index 0000000000..eebafcd6d6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/documentation/Makefile
@@ -0,0 +1,130 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/six.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/six.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/six"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/six"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ make -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/testing/web-platform/tests/tools/third_party/six/documentation/conf.py b/testing/web-platform/tests/tools/third_party/six/documentation/conf.py
new file mode 100644
index 0000000000..b3d1328adc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/documentation/conf.py
@@ -0,0 +1,217 @@
+# -*- coding: utf-8 -*-
+#
+# six documentation build configuration file
+
+import os
+import sys
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.append(os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+needs_sphinx = "1.0"
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ["sphinx.ext.intersphinx"]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# The suffix of source filenames.
+source_suffix = ".rst"
+
+# The encoding of source files.
+#source_encoding = "utf-8-sig"
+
+# The master toctree document.
+master_doc = "index"
+
+# General information about the project.
+project = u"six"
+copyright = u"2010-2019, Benjamin Peterson"
+
+sys.path.append(os.path.abspath(os.path.join(".", "..")))
+from six import __version__ as six_version
+sys.path.pop()
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = six_version[:-2]
+# The full version, including alpha/beta/rc tags.
+release = six_version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ["_build"]
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = "sphinx"
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = "default"
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ["_static"]
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'sixdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ("index", "six.tex", u"six Documentation",
+ u"Benjamin Peterson", "manual"),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ("index", "six", u"six Documentation",
+ [u"Benjamin Peterson"], 1)
+]
+
+# -- Intersphinx ---------------------------------------------------------------
+
+intersphinx_mapping = {"py2" : ("https://docs.python.org/2/", None),
+ "py3" : ("https://docs.python.org/3/", None)}
diff --git a/testing/web-platform/tests/tools/third_party/six/documentation/index.rst b/testing/web-platform/tests/tools/third_party/six/documentation/index.rst
new file mode 100644
index 0000000000..b7ec2754ec
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/documentation/index.rst
@@ -0,0 +1,875 @@
+Six: Python 2 and 3 Compatibility Library
+=========================================
+
+.. module:: six
+ :synopsis: Python 2 and 3 compatibility
+
+.. moduleauthor:: Benjamin Peterson <benjamin@python.org>
+.. sectionauthor:: Benjamin Peterson <benjamin@python.org>
+
+
+Six provides simple utilities for wrapping over differences between Python 2 and
+Python 3. It is intended to support codebases that work on both Python 2 and 3
+without modification. six consists of only one Python file, so it is painless
+to copy into a project.
+
+Six can be downloaded on `PyPI <https://pypi.org/project/six/>`_. Its bug
+tracker and code hosting is on `GitHub <https://github.com/benjaminp/six>`_.
+
+The name, "six", comes from the fact that 2*3 equals 6. Why not addition?
+Multiplication is more powerful, and, anyway, "five" has already been snatched
+away by the (admittedly now moribund) Zope Five project.
+
+
+Indices and tables
+------------------
+
+* :ref:`genindex`
+* :ref:`search`
+
+
+Package contents
+----------------
+
+.. data:: PY2
+
+ A boolean indicating if the code is running on Python 2.
+
+.. data:: PY3
+
+ A boolean indicating if the code is running on Python 3.
+
+
+Constants
+>>>>>>>>>
+
+Six provides constants that may differ between Python versions. Ones ending
+``_types`` are mostly useful as the second argument to ``isinstance`` or
+``issubclass``.
+
+
+.. data:: class_types
+
+ Possible class types. In Python 2, this encompasses old-style
+ :data:`py2:types.ClassType` and new-style ``type`` classes. In Python 3,
+ this is just ``type``.
+
+
+.. data:: integer_types
+
+ Possible integer types. In Python 2, this is :func:`py2:long` and
+ :func:`py2:int`, and in Python 3, just :func:`py3:int`.
+
+
+.. data:: string_types
+
+ Possible types for text data. This is :func:`py2:basestring` in Python 2 and
+ :func:`py3:str` in Python 3.
+
+
+.. data:: text_type
+
+ Type for representing (Unicode) textual data. This is :func:`py2:unicode` in
+ Python 2 and :func:`py3:str` in Python 3.
+
+
+.. data:: binary_type
+
+ Type for representing binary data. This is :func:`py2:str` in Python 2 and
+ :func:`py3:bytes` in Python 3. Python 2.6 and 2.7 include ``bytes`` as a
+ builtin alias of ``str``, so six’s version is only necessary for Python 2.5
+ compatibility.
+
+
+.. data:: MAXSIZE
+
+ The maximum size of a container like :func:`py3:list` or :func:`py3:dict`.
+ This is equivalent to :data:`py3:sys.maxsize` in Python 2.6 and later
+ (including 3.x). Note, this is temptingly similar to, but not the same as
+ :data:`py2:sys.maxint` in Python 2. There is no direct equivalent to
+ :data:`py2:sys.maxint` in Python 3 because its integer type has no limits
+ aside from memory.
+
+
+Here's example usage of the module::
+
+ import six
+
+ def dispatch_types(value):
+ if isinstance(value, six.integer_types):
+ handle_integer(value)
+ elif isinstance(value, six.class_types):
+ handle_class(value)
+ elif isinstance(value, six.string_types):
+ handle_string(value)
+
+
+Object model compatibility
+>>>>>>>>>>>>>>>>>>>>>>>>>>
+
+Python 3 renamed the attributes of several interpreter data structures. The
+following accessors are available. Note that the recommended way to inspect
+functions and methods is the stdlib :mod:`py3:inspect` module.
+
+
+.. function:: get_unbound_function(meth)
+
+ Get the function out of unbound method *meth*. In Python 3, unbound methods
+ don't exist, so this function just returns *meth* unchanged. Example
+ usage::
+
+ from six import get_unbound_function
+
+ class X(object):
+ def method(self):
+ pass
+ method_function = get_unbound_function(X.method)
+
+
+.. function:: get_method_function(meth)
+
+ Get the function out of method object *meth*.
+
+
+.. function:: get_method_self(meth)
+
+ Get the ``self`` of bound method *meth*.
+
+
+.. function:: get_function_closure(func)
+
+ Get the closure (list of cells) associated with *func*. This is equivalent
+ to ``func.__closure__`` on Python 2.6+ and ``func.func_closure`` on Python
+ 2.5.
+
+
+.. function:: get_function_code(func)
+
+ Get the code object associated with *func*. This is equivalent to
+ ``func.__code__`` on Python 2.6+ and ``func.func_code`` on Python 2.5.
+
+
+.. function:: get_function_defaults(func)
+
+ Get the defaults tuple associated with *func*. This is equivalent to
+ ``func.__defaults__`` on Python 2.6+ and ``func.func_defaults`` on Python
+ 2.5.
+
+
+.. function:: get_function_globals(func)
+
+ Get the globals of *func*. This is equivalent to ``func.__globals__`` on
+ Python 2.6+ and ``func.func_globals`` on Python 2.5.
+
+
+.. function:: next(it)
+ advance_iterator(it)
+
+ Get the next item of iterator *it*. :exc:`py3:StopIteration` is raised if
+ the iterator is exhausted. This is a replacement for calling ``it.next()``
+ in Python 2 and ``next(it)`` in Python 3. Python 2.6 and above have a
+ builtin ``next`` function, so six's version is only necessary for Python 2.5
+ compatibility.
+
+
+.. function:: callable(obj)
+
+ Check if *obj* can be called. Note ``callable`` has returned in Python 3.2,
+ so using six's version is only necessary when supporting Python 3.0 or 3.1.
+
+
+.. function:: iterkeys(dictionary, **kwargs)
+
+ Returns an iterator over *dictionary*\'s keys. This replaces
+ ``dictionary.iterkeys()`` on Python 2 and ``dictionary.keys()`` on
+ Python 3. *kwargs* are passed through to the underlying method.
+
+
+.. function:: itervalues(dictionary, **kwargs)
+
+ Returns an iterator over *dictionary*\'s values. This replaces
+ ``dictionary.itervalues()`` on Python 2 and ``dictionary.values()`` on
+ Python 3. *kwargs* are passed through to the underlying method.
+
+
+.. function:: iteritems(dictionary, **kwargs)
+
+ Returns an iterator over *dictionary*\'s items. This replaces
+ ``dictionary.iteritems()`` on Python 2 and ``dictionary.items()`` on
+ Python 3. *kwargs* are passed through to the underlying method.
+
+
+.. function:: iterlists(dictionary, **kwargs)
+
+ Calls ``dictionary.iterlists()`` on Python 2 and ``dictionary.lists()`` on
+ Python 3. No builtin Python mapping type has such a method; this method is
+ intended for use with multi-valued dictionaries like `Werkzeug's
+ <http://werkzeug.pocoo.org/docs/datastructures/#werkzeug.datastructures.MultiDict>`_.
+ *kwargs* are passed through to the underlying method.
+
+
+.. function:: viewkeys(dictionary)
+
+ Return a view over *dictionary*\'s keys. This replaces
+ :meth:`py2:dict.viewkeys` on Python 2.7 and :meth:`py3:dict.keys` on
+ Python 3.
+
+
+.. function:: viewvalues(dictionary)
+
+ Return a view over *dictionary*\'s values. This replaces
+ :meth:`py2:dict.viewvalues` on Python 2.7 and :meth:`py3:dict.values` on
+ Python 3.
+
+
+.. function:: viewitems(dictionary)
+
+ Return a view over *dictionary*\'s items. This replaces
+ :meth:`py2:dict.viewitems` on Python 2.7 and :meth:`py3:dict.items` on
+ Python 3.
+
+
+.. function:: create_bound_method(func, obj)
+
+ Return a method object wrapping *func* and bound to *obj*. On both Python 2
+ and 3, this will return a :func:`py3:types.MethodType` object. The reason
+ this wrapper exists is that on Python 2, the ``MethodType`` constructor
+ requires the *obj*'s class to be passed.
+
+
+.. function:: create_unbound_method(func, cls)
+
+ Return an unbound method object wrapping *func*. In Python 2, this will
+ return a :func:`py2:types.MethodType` object. In Python 3, unbound methods
+ do not exist and this wrapper will simply return *func*.
+
+
+.. class:: Iterator
+
+ A class for making portable iterators. The intention is that it be subclassed
+ and subclasses provide a ``__next__`` method. In Python 2, :class:`Iterator`
+ has one method: ``next``. It simply delegates to ``__next__``. An alternate
+ way to do this would be to simply alias ``next`` to ``__next__``. However,
+ this interacts badly with subclasses that override
+ ``__next__``. :class:`Iterator` is empty on Python 3. (In fact, it is just
+ aliased to :class:`py3:object`.)
+
+
+.. decorator:: wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES)
+
+ This is exactly the :func:`py3:functools.wraps` decorator, but it sets the
+ ``__wrapped__`` attribute on what it decorates as :func:`py3:functools.wraps`
+ does on Python versions after 3.2.
+
+
+Syntax compatibility
+>>>>>>>>>>>>>>>>>>>>
+
+These functions smooth over operations which have different syntaxes between
+Python 2 and 3.
+
+
+.. function:: exec_(code, globals=None, locals=None)
+
+ Execute *code* in the scope of *globals* and *locals*. *code* can be a
+ string or a code object. If *globals* or *locals* are not given, they will
+ default to the scope of the caller. If just *globals* is given, it will also
+ be used as *locals*.
+
+ .. note::
+
+ Python 3's :func:`py3:exec` doesn't take keyword arguments, so calling
+ :func:`exec` with them should be avoided.
+
+
+.. function:: print_(*args, *, file=sys.stdout, end="\\n", sep=" ", flush=False)
+
+ Print *args* into *file*. Each argument will be separated with *sep* and
+ *end* will be written to the file after the last argument is printed. If
+ *flush* is true, ``file.flush()`` will be called after all data is written.
+
+ .. note::
+
+ In Python 2, this function imitates Python 3's :func:`py3:print` by not
+ having softspace support. If you don't know what that is, you're probably
+ ok. :)
+
+
+.. function:: raise_from(exc_value, exc_value_from)
+
+ Raise an exception from a context. On Python 3, this is equivalent to
+ ``raise exc_value from exc_value_from``. On Python 2, which does not support
+ exception chaining, it is equivalent to ``raise exc_value``.
+
+
+.. function:: reraise(exc_type, exc_value, exc_traceback=None)
+
+ Reraise an exception, possibly with a different traceback. In the simple
+ case, ``reraise(*sys.exc_info())`` with an active exception (in an except
+ block) reraises the current exception with the last traceback. A different
+ traceback can be specified with the *exc_traceback* parameter. Note that
+ since the exception reraising is done within the :func:`reraise` function,
+ Python will attach the call frame of :func:`reraise` to whatever traceback is
+ raised.
+
+
+.. function:: with_metaclass(metaclass, *bases)
+
+ Create a new class with base classes *bases* and metaclass *metaclass*. This
+ is designed to be used in class declarations like this: ::
+
+ from six import with_metaclass
+
+ class Meta(type):
+ pass
+
+ class Base(object):
+ pass
+
+ class MyClass(with_metaclass(Meta, Base)):
+ pass
+
+ Another way to set a metaclass on a class is with the :func:`add_metaclass`
+ decorator.
+
+
+.. decorator:: add_metaclass(metaclass)
+
+ Class decorator that replaces a normally-constructed class with a
+ metaclass-constructed one. Example usage: ::
+
+ @add_metaclass(Meta)
+ class MyClass(object):
+ pass
+
+ That code produces a class equivalent to ::
+
+ class MyClass(object, metaclass=Meta):
+ pass
+
+ on Python 3 or ::
+
+ class MyClass(object):
+ __metaclass__ = Meta
+
+ on Python 2.
+
+ Note that class decorators require Python 2.6. However, the effect of the
+ decorator can be emulated on Python 2.5 like so::
+
+ class MyClass(object):
+ pass
+ MyClass = add_metaclass(Meta)(MyClass)
+
+
+Binary and text data
+>>>>>>>>>>>>>>>>>>>>
+
+Python 3 enforces the distinction between byte strings and text strings far more
+rigorously than Python 2 does; binary data cannot be automatically coerced to
+or from text data. six provides several functions to assist in classifying
+string data in all Python versions.
+
+
+.. function:: b(data)
+
+ A "fake" bytes literal. *data* should always be a normal string literal. In
+ Python 2, :func:`b` returns an 8-bit string. In Python 3, *data* is encoded
+ with the latin-1 encoding to bytes.
+
+
+ .. note::
+
+ Since all Python versions 2.6 and after support the ``b`` prefix,
+ code without 2.5 support doesn't need :func:`b`.
+
+
+.. function:: u(text)
+
+ A "fake" unicode literal. *text* should always be a normal string literal.
+ In Python 2, :func:`u` returns unicode, and in Python 3, a string. Also, in
+ Python 2, the string is decoded with the ``unicode-escape`` codec, which
+ allows unicode escapes to be used in it.
+
+
+ .. note::
+
+ In Python 3.3, the ``u`` prefix has been reintroduced. Code that only
+ supports Python 3 versions of 3.3 and higher thus does not need
+ :func:`u`.
+
+ .. note::
+
+ On Python 2, :func:`u` doesn't know what the encoding of the literal
+ is. Each byte is converted directly to the unicode codepoint of the same
+ value. Because of this, it's only safe to use :func:`u` with strings of
+ ASCII data.
+
+
+.. function:: unichr(c)
+
+ Return the (Unicode) string representing the codepoint *c*. This is
+ equivalent to :func:`py2:unichr` on Python 2 and :func:`py3:chr` on Python 3.
+
+
+.. function:: int2byte(i)
+
+ Converts *i* to a byte. *i* must be in ``range(0, 256)``. This is
+ equivalent to :func:`py2:chr` in Python 2 and ``bytes((i,))`` in Python 3.
+
+
+.. function:: byte2int(bs)
+
+ Converts the first byte of *bs* to an integer. This is equivalent to
+ ``ord(bs[0])`` on Python 2 and ``bs[0]`` on Python 3.
+
+
+.. function:: indexbytes(buf, i)
+
+ Return the byte at index *i* of *buf* as an integer. This is equivalent to
+ indexing a bytes object in Python 3.
+
+
+.. function:: iterbytes(buf)
+
+ Return an iterator over bytes in *buf* as integers. This is equivalent to
+ a bytes object iterator in Python 3.
+
+
+.. function:: ensure_binary(s, encoding='utf-8', errors='strict')
+
+ Coerce *s* to :data:`binary_type`. *encoding*, *errors* are the same as
+ :meth:`py3:str.encode`
+
+
+.. function:: ensure_str(s, encoding='utf-8', errors='strict')
+
+ Coerce *s* to ``str``. *encoding*, *errors* are the same as
+ :meth:`py3:str.encode`
+
+
+.. function:: ensure_text(s, encoding='utf-8', errors='strict')
+
+ Coerce *s* to :data:`text_type`. *encoding*, *errors* are the same as
+ :meth:`py3:str.encode`
+
+
+.. data:: StringIO
+
+ This is a fake file object for textual data. It's an alias for
+ :class:`py2:StringIO.StringIO` in Python 2 and :class:`py3:io.StringIO` in
+ Python 3.
+
+
+.. data:: BytesIO
+
+ This is a fake file object for binary data. In Python 2, it's an alias for
+ :class:`py2:StringIO.StringIO`, but in Python 3, it's an alias for
+ :class:`py3:io.BytesIO`.
+
+
+.. decorator:: python_2_unicode_compatible
+
+ A class decorator that takes a class defining a ``__str__`` method. On
+ Python 3, the decorator does nothing. On Python 2, it aliases the
+ ``__str__`` method to ``__unicode__`` and creates a new ``__str__`` method
+ that returns the result of ``__unicode__()`` encoded with UTF-8.
+
+
+unittest assertions
+>>>>>>>>>>>>>>>>>>>
+
+Six contains compatibility shims for unittest assertions that have been renamed.
+The parameters are the same as their aliases, but you must pass the test method
+as the first argument. For example::
+
+ import six
+ import unittest
+
+ class TestAssertCountEqual(unittest.TestCase):
+ def test(self):
+ six.assertCountEqual(self, (1, 2), [2, 1])
+
+Note these functions are only available on Python 2.7 or later.
+
+.. function:: assertCountEqual()
+
+ Alias for :meth:`~py3:unittest.TestCase.assertCountEqual` on Python 3 and
+ :meth:`~py2:unittest.TestCase.assertItemsEqual` on Python 2.
+
+
+.. function:: assertRaisesRegex()
+
+ Alias for :meth:`~py3:unittest.TestCase.assertRaisesRegex` on Python 3 and
+ :meth:`~py2:unittest.TestCase.assertRaisesRegexp` on Python 2.
+
+
+.. function:: assertRegex()
+
+ Alias for :meth:`~py3:unittest.TestCase.assertRegex` on Python 3 and
+ :meth:`~py2:unittest.TestCase.assertRegexpMatches` on Python 2.
+
+
+Renamed modules and attributes compatibility
+>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+
+.. module:: six.moves
+ :synopsis: Renamed modules and attributes compatibility
+
+Python 3 reorganized the standard library and moved several functions to
+different modules. Six provides a consistent interface to them through the fake
+:mod:`six.moves` module. For example, to load the module for parsing HTML on
+Python 2 or 3, write::
+
+ from six.moves import html_parser
+
+Similarly, to get the function to reload modules, which was moved from the
+builtin module to the ``importlib`` module, use::
+
+ from six.moves import reload_module
+
+For the most part, :mod:`six.moves` aliases are the names of the modules in
+Python 3. When the new Python 3 name is a package, the components of the name
+are separated by underscores. For example, ``html.parser`` becomes
+``html_parser``. In some cases where several modules have been combined, the
+Python 2 name is retained. This is so the appropriate modules can be found when
+running on Python 2. For example, ``BaseHTTPServer`` which is in
+``http.server`` in Python 3 is aliased as ``BaseHTTPServer``.
+
+Some modules which had two implementations have been merged in Python 3. For
+example, ``cPickle`` no longer exists in Python 3; it was merged with
+``pickle``. In these cases, fetching the fast version will load the fast one on
+Python 2 and the merged module in Python 3.
+
+The :mod:`py2:urllib`, :mod:`py2:urllib2`, and :mod:`py2:urlparse` modules have
+been combined in the :mod:`py3:urllib` package in Python 3. The
+:mod:`six.moves.urllib` package is a version-independent location for this
+functionality; its structure mimics the structure of the Python 3
+:mod:`py3:urllib` package.
+
+.. note::
+
+ In order to make imports of the form::
+
+ from six.moves.cPickle import loads
+
+ work, six places special proxy objects in :data:`py3:sys.modules`. These
+ proxies lazily load the underlying module when an attribute is fetched. This
+ will fail if the underlying module is not available in the Python
+ interpreter. For example, ``sys.modules["six.moves.winreg"].LoadKey`` would
+ fail on any non-Windows platform. Unfortunately, some applications try to
+ load attributes on every module in :data:`py3:sys.modules`. six mitigates
+ this problem for some applications by pretending attributes on unimportable
+ modules do not exist. This hack does not work in every case, though. If you are
+ encountering problems with the lazy modules and don't use any from imports
+ directly from ``six.moves`` modules, you can workaround the issue by removing
+ the six proxy modules::
+
+ d = [name for name in sys.modules if name.startswith("six.moves.")]
+ for name in d:
+ del sys.modules[name]
+
+Supported renames:
+
++------------------------------+-------------------------------------+---------------------------------------+
+| Name | Python 2 name | Python 3 name |
++==============================+=====================================+=======================================+
+| ``builtins`` | :mod:`py2:__builtin__` | :mod:`py3:builtins` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``configparser`` | :mod:`py2:ConfigParser` | :mod:`py3:configparser` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``copyreg`` | :mod:`py2:copy_reg` | :mod:`py3:copyreg` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``cPickle`` | :mod:`py2:cPickle` | :mod:`py3:pickle` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``cStringIO`` | :func:`py2:cStringIO.StringIO` | :class:`py3:io.StringIO` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``collections_abc`` | :mod:`py2:collections` | :mod:`py3:collections.abc` (3.3+) |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``dbm_gnu`` | :mod:`py2:gdbm` | :mod:`py3:dbm.gnu` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``dbm_ndbm`` | :mod:`py2:dbm` | :mod:`py3:dbm.ndbm` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``_dummy_thread`` | :mod:`py2:dummy_thread` | :mod:`py3:_dummy_thread` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``email_mime_base`` | :mod:`py2:email.MIMEBase` | :mod:`py3:email.mime.base` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``email_mime_image`` | :mod:`py2:email.MIMEImage` | :mod:`py3:email.mime.image` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``email_mime_multipart`` | :mod:`py2:email.MIMEMultipart` | :mod:`py3:email.mime.multipart` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``email_mime_nonmultipart`` | :mod:`py2:email.MIMENonMultipart` | :mod:`py3:email.mime.nonmultipart` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``email_mime_text`` | :mod:`py2:email.MIMEText` | :mod:`py3:email.mime.text` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``filter`` | :func:`py2:itertools.ifilter` | :func:`py3:filter` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``filterfalse`` | :func:`py2:itertools.ifilterfalse` | :func:`py3:itertools.filterfalse` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``getcwd`` | :func:`py2:os.getcwdu` | :func:`py3:os.getcwd` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``getcwdb`` | :func:`py2:os.getcwd` | :func:`py3:os.getcwdb` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``getoutput`` | :func:`py2:commands.getoutput` | :func:`py3:subprocess.getoutput` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``http_cookiejar`` | :mod:`py2:cookielib` | :mod:`py3:http.cookiejar` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``http_cookies`` | :mod:`py2:Cookie` | :mod:`py3:http.cookies` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``html_entities`` | :mod:`py2:htmlentitydefs` | :mod:`py3:html.entities` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``html_parser`` | :mod:`py2:HTMLParser` | :mod:`py3:html.parser` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``http_client`` | :mod:`py2:httplib` | :mod:`py3:http.client` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``BaseHTTPServer`` | :mod:`py2:BaseHTTPServer` | :mod:`py3:http.server` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``CGIHTTPServer`` | :mod:`py2:CGIHTTPServer` | :mod:`py3:http.server` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``SimpleHTTPServer`` | :mod:`py2:SimpleHTTPServer` | :mod:`py3:http.server` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``input`` | :func:`py2:raw_input` | :func:`py3:input` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``intern`` | :func:`py2:intern` | :func:`py3:sys.intern` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``map`` | :func:`py2:itertools.imap` | :func:`py3:map` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``queue`` | :mod:`py2:Queue` | :mod:`py3:queue` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``range`` | :func:`py2:xrange` | :func:`py3:range` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``reduce`` | :func:`py2:reduce` | :func:`py3:functools.reduce` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``reload_module`` | :func:`py2:reload` | :func:`py3:imp.reload`, |
+| | | :func:`py3:importlib.reload` |
+| | | on Python 3.4+ |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``reprlib`` | :mod:`py2:repr` | :mod:`py3:reprlib` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``shlex_quote`` | :mod:`py2:pipes.quote` | :mod:`py3:shlex.quote` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``socketserver`` | :mod:`py2:SocketServer` | :mod:`py3:socketserver` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``_thread`` | :mod:`py2:thread` | :mod:`py3:_thread` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter`` | :mod:`py2:Tkinter` | :mod:`py3:tkinter` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_dialog`` | :mod:`py2:Dialog` | :mod:`py3:tkinter.dialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_filedialog`` | :mod:`py2:FileDialog` | :mod:`py3:tkinter.FileDialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_scrolledtext`` | :mod:`py2:ScrolledText` | :mod:`py3:tkinter.scrolledtext` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_simpledialog`` | :mod:`py2:SimpleDialog` | :mod:`py3:tkinter.simpledialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_ttk`` | :mod:`py2:ttk` | :mod:`py3:tkinter.ttk` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_tix`` | :mod:`py2:Tix` | :mod:`py3:tkinter.tix` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_constants`` | :mod:`py2:Tkconstants` | :mod:`py3:tkinter.constants` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_dnd`` | :mod:`py2:Tkdnd` | :mod:`py3:tkinter.dnd` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_colorchooser`` | :mod:`py2:tkColorChooser` | :mod:`py3:tkinter.colorchooser` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_commondialog`` | :mod:`py2:tkCommonDialog` | :mod:`py3:tkinter.commondialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_tkfiledialog`` | :mod:`py2:tkFileDialog` | :mod:`py3:tkinter.filedialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_font`` | :mod:`py2:tkFont` | :mod:`py3:tkinter.font` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_messagebox`` | :mod:`py2:tkMessageBox` | :mod:`py3:tkinter.messagebox` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``tkinter_tksimpledialog`` | :mod:`py2:tkSimpleDialog` | :mod:`py3:tkinter.simpledialog` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib.parse`` | See :mod:`six.moves.urllib.parse` | :mod:`py3:urllib.parse` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib.error`` | See :mod:`six.moves.urllib.error` | :mod:`py3:urllib.error` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib.request`` | See :mod:`six.moves.urllib.request` | :mod:`py3:urllib.request` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib.response`` | See :mod:`six.moves.urllib.response`| :mod:`py3:urllib.response` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib.robotparser`` | :mod:`py2:robotparser` | :mod:`py3:urllib.robotparser` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``urllib_robotparser`` | :mod:`py2:robotparser` | :mod:`py3:urllib.robotparser` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``UserDict`` | :class:`py2:UserDict.UserDict` | :class:`py3:collections.UserDict` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``UserList`` | :class:`py2:UserList.UserList` | :class:`py3:collections.UserList` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``UserString`` | :class:`py2:UserString.UserString` | :class:`py3:collections.UserString` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``winreg`` | :mod:`py2:_winreg` | :mod:`py3:winreg` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``xmlrpc_client`` | :mod:`py2:xmlrpclib` | :mod:`py3:xmlrpc.client` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``xmlrpc_server`` | :mod:`py2:SimpleXMLRPCServer` | :mod:`py3:xmlrpc.server` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``xrange`` | :func:`py2:xrange` | :func:`py3:range` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``zip`` | :func:`py2:itertools.izip` | :func:`py3:zip` |
++------------------------------+-------------------------------------+---------------------------------------+
+| ``zip_longest`` | :func:`py2:itertools.izip_longest` | :func:`py3:itertools.zip_longest` |
++------------------------------+-------------------------------------+---------------------------------------+
+
+urllib parse
+<<<<<<<<<<<<
+
+.. module:: six.moves.urllib.parse
+ :synopsis: Stuff from :mod:`py2:urlparse` and :mod:`py2:urllib` in Python 2 and :mod:`py3:urllib.parse` in Python 3
+
+Contains functions from Python 3's :mod:`py3:urllib.parse` and Python 2's:
+
+:mod:`py2:urlparse`:
+
+* :func:`py2:urlparse.ParseResult`
+* :func:`py2:urlparse.SplitResult`
+* :func:`py2:urlparse.urlparse`
+* :func:`py2:urlparse.urlunparse`
+* :func:`py2:urlparse.parse_qs`
+* :func:`py2:urlparse.parse_qsl`
+* :func:`py2:urlparse.urljoin`
+* :func:`py2:urlparse.urldefrag`
+* :func:`py2:urlparse.urlsplit`
+* :func:`py2:urlparse.urlunsplit`
+* :func:`py2:urlparse.splitquery`
+* :func:`py2:urlparse.uses_fragment`
+* :func:`py2:urlparse.uses_netloc`
+* :func:`py2:urlparse.uses_params`
+* :func:`py2:urlparse.uses_query`
+* :func:`py2:urlparse.uses_relative`
+
+and :mod:`py2:urllib`:
+
+* :func:`py2:urllib.quote`
+* :func:`py2:urllib.quote_plus`
+* :func:`py2:urllib.splittag`
+* :func:`py2:urllib.splituser`
+* :func:`py2:urllib.splitvalue`
+* :func:`py2:urllib.unquote` (also exposed as :func:`py3:urllib.parse.unquote_to_bytes`)
+* :func:`py2:urllib.unquote_plus`
+* :func:`py2:urllib.urlencode`
+
+
+urllib error
+<<<<<<<<<<<<
+
+.. module:: six.moves.urllib.error
+ :synopsis: Stuff from :mod:`py2:urllib` and :mod:`py2:urllib2` in Python 2 and :mod:`py3:urllib.error` in Python 3
+
+Contains exceptions from Python 3's :mod:`py3:urllib.error` and Python 2's:
+
+:mod:`py2:urllib`:
+
+* :exc:`py2:urllib.ContentTooShortError`
+
+and :mod:`py2:urllib2`:
+
+* :exc:`py2:urllib2.URLError`
+* :exc:`py2:urllib2.HTTPError`
+
+
+urllib request
+<<<<<<<<<<<<<<
+
+.. module:: six.moves.urllib.request
+ :synopsis: Stuff from :mod:`py2:urllib` and :mod:`py2:urllib2` in Python 2 and :mod:`py3:urllib.request` in Python 3
+
+Contains items from Python 3's :mod:`py3:urllib.request` and Python 2's:
+
+:mod:`py2:urllib`:
+
+* :func:`py2:urllib.pathname2url`
+* :func:`py2:urllib.url2pathname`
+* :func:`py2:urllib.getproxies`
+* :func:`py2:urllib.urlretrieve`
+* :func:`py2:urllib.urlcleanup`
+* :class:`py2:urllib.URLopener`
+* :class:`py2:urllib.FancyURLopener`
+* :func:`py2:urllib.proxy_bypass`
+
+and :mod:`py2:urllib2`:
+
+* :func:`py2:urllib2.urlopen`
+* :func:`py2:urllib2.install_opener`
+* :func:`py2:urllib2.build_opener`
+* :func:`py2:urllib2.parse_http_list`
+* :func:`py2:urllib2.parse_keqv_list`
+* :class:`py2:urllib2.Request`
+* :class:`py2:urllib2.OpenerDirector`
+* :class:`py2:urllib2.HTTPDefaultErrorHandler`
+* :class:`py2:urllib2.HTTPRedirectHandler`
+* :class:`py2:urllib2.HTTPCookieProcessor`
+* :class:`py2:urllib2.ProxyHandler`
+* :class:`py2:urllib2.BaseHandler`
+* :class:`py2:urllib2.HTTPPasswordMgr`
+* :class:`py2:urllib2.HTTPPasswordMgrWithDefaultRealm`
+* :class:`py2:urllib2.AbstractBasicAuthHandler`
+* :class:`py2:urllib2.HTTPBasicAuthHandler`
+* :class:`py2:urllib2.ProxyBasicAuthHandler`
+* :class:`py2:urllib2.AbstractDigestAuthHandler`
+* :class:`py2:urllib2.HTTPDigestAuthHandler`
+* :class:`py2:urllib2.ProxyDigestAuthHandler`
+* :class:`py2:urllib2.HTTPHandler`
+* :class:`py2:urllib2.HTTPSHandler`
+* :class:`py2:urllib2.FileHandler`
+* :class:`py2:urllib2.FTPHandler`
+* :class:`py2:urllib2.CacheFTPHandler`
+* :class:`py2:urllib2.UnknownHandler`
+* :class:`py2:urllib2.HTTPErrorProcessor`
+
+
+urllib response
+<<<<<<<<<<<<<<<
+
+.. module:: six.moves.urllib.response
+ :synopsis: Stuff from :mod:`py2:urllib` in Python 2 and :mod:`py3:urllib.response` in Python 3
+
+Contains classes from Python 3's :mod:`py3:urllib.response` and Python 2's:
+
+:mod:`py2:urllib`:
+
+* :class:`py2:urllib.addbase`
+* :class:`py2:urllib.addclosehook`
+* :class:`py2:urllib.addinfo`
+* :class:`py2:urllib.addinfourl`
+
+
+Advanced - Customizing renames
+<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
+
+.. currentmodule:: six
+
+It is possible to add additional names to the :mod:`six.moves` namespace.
+
+
+.. function:: add_move(item)
+
+ Add *item* to the :mod:`six.moves` mapping. *item* should be a
+ :class:`MovedAttribute` or :class:`MovedModule` instance.
+
+
+.. function:: remove_move(name)
+
+ Remove the :mod:`six.moves` mapping called *name*. *name* should be a
+ string.
+
+
+Instances of the following classes can be passed to :func:`add_move`. Neither
+have any public members.
+
+
+.. class:: MovedModule(name, old_mod, new_mod)
+
+ Create a mapping for :mod:`six.moves` called *name* that references different
+ modules in Python 2 and 3. *old_mod* is the name of the Python 2 module.
+ *new_mod* is the name of the Python 3 module.
+
+
+.. class:: MovedAttribute(name, old_mod, new_mod, old_attr=None, new_attr=None)
+
+ Create a mapping for :mod:`six.moves` called *name* that references different
+ attributes in Python 2 and 3. *old_mod* is the name of the Python 2 module.
+ *new_mod* is the name of the Python 3 module. If *new_attr* is not given, it
+ defaults to *old_attr*. If neither is given, they both default to *name*.
diff --git a/testing/web-platform/tests/tools/third_party/six/setup.cfg b/testing/web-platform/tests/tools/third_party/six/setup.cfg
new file mode 100644
index 0000000000..317e016c8c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/setup.cfg
@@ -0,0 +1,20 @@
+[bdist_wheel]
+universal = 1
+
+[flake8]
+max-line-length = 100
+ignore = F821
+
+[metadata]
+license_file = LICENSE
+
+[tool:pytest]
+minversion=2.2.0
+pep8ignore =
+ documentation/*.py ALL
+ test_six.py ALL
+
+flakes-ignore =
+ documentation/*.py ALL
+ test_six.py ALL
+ six.py UndefinedName
diff --git a/testing/web-platform/tests/tools/third_party/six/setup.py b/testing/web-platform/tests/tools/third_party/six/setup.py
new file mode 100644
index 0000000000..97c685b5a5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/setup.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2010-2019 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+from __future__ import with_statement
+
+# Six is a dependency of setuptools, so using setuptools creates a
+# circular dependency when building a Python stack from source. We
+# therefore allow falling back to distutils to install six.
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+import six
+
+six_classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 3",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Topic :: Software Development :: Libraries",
+ "Topic :: Utilities",
+]
+
+with open("README.rst", "r") as fp:
+ six_long_description = fp.read()
+
+setup(name="six",
+ version=six.__version__,
+ author="Benjamin Peterson",
+ author_email="benjamin@python.org",
+ url="https://github.com/benjaminp/six",
+ tests_require=["pytest"],
+ py_modules=["six"],
+ description="Python 2 and 3 compatibility utilities",
+ long_description=six_long_description,
+ license="MIT",
+ classifiers=six_classifiers,
+ python_requires=">=2.6, !=3.0.*, !=3.1.*",
+ )
diff --git a/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/INSTALLER b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/LICENSE b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/LICENSE
new file mode 100644
index 0000000000..de6633112c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/LICENSE
@@ -0,0 +1,18 @@
+Copyright (c) 2010-2020 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/METADATA b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/METADATA
new file mode 100644
index 0000000000..869bf25a88
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/METADATA
@@ -0,0 +1,49 @@
+Metadata-Version: 2.1
+Name: six
+Version: 1.15.0
+Summary: Python 2 and 3 compatibility utilities
+Home-page: https://github.com/benjaminp/six
+Author: Benjamin Peterson
+Author-email: benjamin@python.org
+License: MIT
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*
+
+.. image:: https://img.shields.io/pypi/v/six.svg
+ :target: https://pypi.org/project/six/
+ :alt: six on PyPI
+
+.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master
+ :target: https://travis-ci.org/benjaminp/six
+ :alt: six on TravisCI
+
+.. image:: https://readthedocs.org/projects/six/badge/?version=latest
+ :target: https://six.readthedocs.io/
+ :alt: six's documentation on Read the Docs
+
+.. image:: https://img.shields.io/badge/license-MIT-green.svg
+ :target: https://github.com/benjaminp/six/blob/master/LICENSE
+ :alt: MIT License badge
+
+Six is a Python 2 and 3 compatibility library. It provides utility functions
+for smoothing over the differences between the Python versions with the goal of
+writing Python code that is compatible on both Python versions. See the
+documentation for more information on what is provided.
+
+Six supports Python 2.7 and 3.3+. It is contained in only one Python
+file, so it can be easily copied into your project. (The copyright and license
+notice must be retained.)
+
+Online documentation is at https://six.readthedocs.io/.
+
+Bugs can be reported to https://github.com/benjaminp/six. The code can also
+be found there.
+
+
diff --git a/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/RECORD b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/RECORD
new file mode 100644
index 0000000000..d9754c61c4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/RECORD
@@ -0,0 +1,8 @@
+six-1.15.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+six-1.15.0.dist-info/LICENSE,sha256=i7hQxWWqOJ_cFvOkaWWtI9gq3_YPI5P8J2K2MYXo5sk,1066
+six-1.15.0.dist-info/METADATA,sha256=W6rlyoeMZHXh6srP9NXNsm0rjAf_660re8WdH5TBT8E,1795
+six-1.15.0.dist-info/RECORD,,
+six-1.15.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+six-1.15.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4
+six.py,sha256=U4Z_yv534W5CNyjY9i8V1OXY2SjAny8y2L5vDLhhThM,34159
+six.pyc,,
diff --git a/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/WHEEL b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/WHEEL
new file mode 100644
index 0000000000..ef99c6cf32
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/top_level.txt b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..ffe2fce498
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/six-1.15.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+six
diff --git a/testing/web-platform/tests/tools/third_party/six/six.py b/testing/web-platform/tests/tools/third_party/six/six.py
new file mode 100644
index 0000000000..83f69783d1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/six.py
@@ -0,0 +1,982 @@
+# Copyright (c) 2010-2020 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.15.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ del io
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+ _assertNotRegex = "assertNotRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+def assertNotRegex(self, *args, **kwargs):
+ return getattr(self, _assertNotRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
+""")
+
+
+if sys.version_info[:2] > (3,):
+ exec_("""def raise_from(value, from_value):
+ try:
+ raise value from from_value
+ finally:
+ value = None
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ # This does exactly the same what the :func:`py3:functools.update_wrapper`
+ # function does on Python versions after 3.2. It sets the ``__wrapped__``
+ # attribute on ``wrapper`` object and it doesn't raise an error if any of
+ # the attributes mentioned in ``assigned`` and ``updated`` are missing on
+ # ``wrapped`` object.
+ def _update_wrapper(wrapper, wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ for attr in assigned:
+ try:
+ value = getattr(wrapped, attr)
+ except AttributeError:
+ continue
+ else:
+ setattr(wrapper, attr, value)
+ for attr in updated:
+ getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+ wrapper.__wrapped__ = wrapped
+ return wrapper
+ _update_wrapper.__doc__ = functools.update_wrapper.__doc__
+
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ return functools.partial(_update_wrapper, wrapped=wrapped,
+ assigned=assigned, updated=updated)
+ wraps.__doc__ = functools.wraps.__doc__
+
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(type):
+
+ def __new__(cls, name, this_bases, d):
+ if sys.version_info[:2] >= (3, 7):
+ # This version introduced PEP 560 that requires a bit
+ # of extra care (we mimic what is done by __build_class__).
+ resolved_bases = types.resolve_bases(bases)
+ if resolved_bases is not bases:
+ d['__orig_bases__'] = bases
+ else:
+ resolved_bases = bases
+ return meta(name, resolved_bases, d)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ if hasattr(cls, '__qualname__'):
+ orig_vars['__qualname__'] = cls.__qualname__
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, binary_type):
+ return s
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ # Optimization: Fast return for the common case.
+ if type(s) is str:
+ return s
+ if PY2 and isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A class decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/testing/web-platform/tests/tools/third_party/six/test_six.py b/testing/web-platform/tests/tools/third_party/six/test_six.py
new file mode 100644
index 0000000000..3eefce37c3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/six/test_six.py
@@ -0,0 +1,1052 @@
+# Copyright (c) 2010-2019 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+import operator
+import sys
+import types
+import unittest
+import abc
+
+import pytest
+
+import six
+
+
+def test_add_doc():
+ def f():
+ """Icky doc"""
+ pass
+ six._add_doc(f, """New doc""")
+ assert f.__doc__ == "New doc"
+
+
+def test_import_module():
+ from logging import handlers
+ m = six._import_module("logging.handlers")
+ assert m is handlers
+
+
+def test_integer_types():
+ assert isinstance(1, six.integer_types)
+ assert isinstance(-1, six.integer_types)
+ assert isinstance(six.MAXSIZE + 23, six.integer_types)
+ assert not isinstance(.1, six.integer_types)
+
+
+def test_string_types():
+ assert isinstance("hi", six.string_types)
+ assert isinstance(six.u("hi"), six.string_types)
+ assert issubclass(six.text_type, six.string_types)
+
+
+def test_class_types():
+ class X:
+ pass
+ class Y(object):
+ pass
+ assert isinstance(X, six.class_types)
+ assert isinstance(Y, six.class_types)
+ assert not isinstance(X(), six.class_types)
+
+
+def test_text_type():
+ assert type(six.u("hi")) is six.text_type
+
+
+def test_binary_type():
+ assert type(six.b("hi")) is six.binary_type
+
+
+def test_MAXSIZE():
+ try:
+ # This shouldn't raise an overflow error.
+ six.MAXSIZE.__index__()
+ except AttributeError:
+ # Before Python 2.6.
+ pass
+ pytest.raises(
+ (ValueError, OverflowError),
+ operator.mul, [None], six.MAXSIZE + 1)
+
+
+def test_lazy():
+ if six.PY3:
+ html_name = "html.parser"
+ else:
+ html_name = "HTMLParser"
+ assert html_name not in sys.modules
+ mod = six.moves.html_parser
+ assert sys.modules[html_name] is mod
+ assert "htmlparser" not in six._MovedItems.__dict__
+
+
+try:
+ import _tkinter
+except ImportError:
+ have_tkinter = False
+else:
+ have_tkinter = True
+
+have_gdbm = True
+try:
+ import gdbm
+except ImportError:
+ try:
+ import dbm.gnu
+ except ImportError:
+ have_gdbm = False
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._moved_attributes])
+def test_move_items(item_name):
+ """Ensure that everything loads correctly."""
+ try:
+ item = getattr(six.moves, item_name)
+ if isinstance(item, types.ModuleType):
+ __import__("six.moves." + item_name)
+ except AttributeError:
+ if item_name == "zip_longest" and sys.version_info < (2, 6):
+ pytest.skip("zip_longest only available on 2.6+")
+ except ImportError:
+ if item_name == "winreg" and not sys.platform.startswith("win"):
+ pytest.skip("Windows only module")
+ if item_name.startswith("tkinter"):
+ if not have_tkinter:
+ pytest.skip("requires tkinter")
+ if item_name == "tkinter_ttk" and sys.version_info[:2] <= (2, 6):
+ pytest.skip("ttk only available on 2.7+")
+ if item_name.startswith("dbm_gnu") and not have_gdbm:
+ pytest.skip("requires gdbm")
+ raise
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves)
+
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._urllib_parse_moved_attributes])
+def test_move_items_urllib_parse(item_name):
+ """Ensure that everything loads correctly."""
+ if item_name == "ParseResult" and sys.version_info < (2, 5):
+ pytest.skip("ParseResult is only found on 2.5+")
+ if item_name in ("parse_qs", "parse_qsl") and sys.version_info < (2, 6):
+ pytest.skip("parse_qs[l] is new in 2.6")
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves.urllib.parse)
+ getattr(six.moves.urllib.parse, item_name)
+
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._urllib_error_moved_attributes])
+def test_move_items_urllib_error(item_name):
+ """Ensure that everything loads correctly."""
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves.urllib.error)
+ getattr(six.moves.urllib.error, item_name)
+
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._urllib_request_moved_attributes])
+def test_move_items_urllib_request(item_name):
+ """Ensure that everything loads correctly."""
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves.urllib.request)
+ getattr(six.moves.urllib.request, item_name)
+
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._urllib_response_moved_attributes])
+def test_move_items_urllib_response(item_name):
+ """Ensure that everything loads correctly."""
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves.urllib.response)
+ getattr(six.moves.urllib.response, item_name)
+
+
+@pytest.mark.parametrize("item_name",
+ [item.name for item in six._urllib_robotparser_moved_attributes])
+def test_move_items_urllib_robotparser(item_name):
+ """Ensure that everything loads correctly."""
+ if sys.version_info[:2] >= (2, 6):
+ assert item_name in dir(six.moves.urllib.robotparser)
+ getattr(six.moves.urllib.robotparser, item_name)
+
+
+def test_import_moves_error_1():
+ from six.moves.urllib.parse import urljoin
+ from six import moves
+ # In 1.4.1: AttributeError: 'Module_six_moves_urllib_parse' object has no attribute 'urljoin'
+ assert moves.urllib.parse.urljoin
+
+
+def test_import_moves_error_2():
+ from six import moves
+ assert moves.urllib.parse.urljoin
+ # In 1.4.1: ImportError: cannot import name urljoin
+ from six.moves.urllib.parse import urljoin
+
+
+def test_import_moves_error_3():
+ from six.moves.urllib.parse import urljoin
+ # In 1.4.1: ImportError: cannot import name urljoin
+ from six.moves.urllib_parse import urljoin
+
+
+def test_from_imports():
+ from six.moves.queue import Queue
+ assert isinstance(Queue, six.class_types)
+ from six.moves.configparser import ConfigParser
+ assert isinstance(ConfigParser, six.class_types)
+
+
+def test_filter():
+ from six.moves import filter
+ f = filter(lambda x: x % 2, range(10))
+ assert six.advance_iterator(f) == 1
+
+
+def test_filter_false():
+ from six.moves import filterfalse
+ f = filterfalse(lambda x: x % 3, range(10))
+ assert six.advance_iterator(f) == 0
+ assert six.advance_iterator(f) == 3
+ assert six.advance_iterator(f) == 6
+
+def test_map():
+ from six.moves import map
+ assert six.advance_iterator(map(lambda x: x + 1, range(2))) == 1
+
+
+def test_getoutput():
+ from six.moves import getoutput
+ output = getoutput('echo "foo"')
+ assert output == 'foo'
+
+
+def test_zip():
+ from six.moves import zip
+ assert six.advance_iterator(zip(range(2), range(2))) == (0, 0)
+
+
+@pytest.mark.skipif("sys.version_info < (2, 6)")
+def test_zip_longest():
+ from six.moves import zip_longest
+ it = zip_longest(range(2), range(1))
+
+ assert six.advance_iterator(it) == (0, 0)
+ assert six.advance_iterator(it) == (1, None)
+
+
+class TestCustomizedMoves:
+
+ def teardown_method(self, meth):
+ try:
+ del six._MovedItems.spam
+ except AttributeError:
+ pass
+ try:
+ del six.moves.__dict__["spam"]
+ except KeyError:
+ pass
+
+
+ def test_moved_attribute(self):
+ attr = six.MovedAttribute("spam", "foo", "bar")
+ if six.PY3:
+ assert attr.mod == "bar"
+ else:
+ assert attr.mod == "foo"
+ assert attr.attr == "spam"
+ attr = six.MovedAttribute("spam", "foo", "bar", "lemma")
+ assert attr.attr == "lemma"
+ attr = six.MovedAttribute("spam", "foo", "bar", "lemma", "theorm")
+ if six.PY3:
+ assert attr.attr == "theorm"
+ else:
+ assert attr.attr == "lemma"
+
+
+ def test_moved_module(self):
+ attr = six.MovedModule("spam", "foo")
+ if six.PY3:
+ assert attr.mod == "spam"
+ else:
+ assert attr.mod == "foo"
+ attr = six.MovedModule("spam", "foo", "bar")
+ if six.PY3:
+ assert attr.mod == "bar"
+ else:
+ assert attr.mod == "foo"
+
+
+ def test_custom_move_module(self):
+ attr = six.MovedModule("spam", "six", "six")
+ six.add_move(attr)
+ six.remove_move("spam")
+ assert not hasattr(six.moves, "spam")
+ attr = six.MovedModule("spam", "six", "six")
+ six.add_move(attr)
+ from six.moves import spam
+ assert spam is six
+ six.remove_move("spam")
+ assert not hasattr(six.moves, "spam")
+
+
+ def test_custom_move_attribute(self):
+ attr = six.MovedAttribute("spam", "six", "six", "u", "u")
+ six.add_move(attr)
+ six.remove_move("spam")
+ assert not hasattr(six.moves, "spam")
+ attr = six.MovedAttribute("spam", "six", "six", "u", "u")
+ six.add_move(attr)
+ from six.moves import spam
+ assert spam is six.u
+ six.remove_move("spam")
+ assert not hasattr(six.moves, "spam")
+
+
+ def test_empty_remove(self):
+ pytest.raises(AttributeError, six.remove_move, "eggs")
+
+
+def test_get_unbound_function():
+ class X(object):
+ def m(self):
+ pass
+ assert six.get_unbound_function(X.m) is X.__dict__["m"]
+
+
+def test_get_method_self():
+ class X(object):
+ def m(self):
+ pass
+ x = X()
+ assert six.get_method_self(x.m) is x
+ pytest.raises(AttributeError, six.get_method_self, 42)
+
+
+def test_get_method_function():
+ class X(object):
+ def m(self):
+ pass
+ x = X()
+ assert six.get_method_function(x.m) is X.__dict__["m"]
+ pytest.raises(AttributeError, six.get_method_function, hasattr)
+
+
+def test_get_function_closure():
+ def f():
+ x = 42
+ def g():
+ return x
+ return g
+ cell = six.get_function_closure(f())[0]
+ assert type(cell).__name__ == "cell"
+
+
+def test_get_function_code():
+ def f():
+ pass
+ assert isinstance(six.get_function_code(f), types.CodeType)
+ if not hasattr(sys, "pypy_version_info"):
+ pytest.raises(AttributeError, six.get_function_code, hasattr)
+
+
+def test_get_function_defaults():
+ def f(x, y=3, b=4):
+ pass
+ assert six.get_function_defaults(f) == (3, 4)
+
+
+def test_get_function_globals():
+ def f():
+ pass
+ assert six.get_function_globals(f) is globals()
+
+
+def test_dictionary_iterators(monkeypatch):
+ def stock_method_name(iterwhat):
+ """Given a method suffix like "lists" or "values", return the name
+ of the dict method that delivers those on the version of Python
+ we're running in."""
+ if six.PY3:
+ return iterwhat
+ return 'iter' + iterwhat
+
+ class MyDict(dict):
+ if not six.PY3:
+ def lists(self, **kw):
+ return [1, 2, 3]
+ def iterlists(self, **kw):
+ return iter([1, 2, 3])
+ f = MyDict.iterlists
+ del MyDict.iterlists
+ setattr(MyDict, stock_method_name('lists'), f)
+
+ d = MyDict(zip(range(10), reversed(range(10))))
+ for name in "keys", "values", "items", "lists":
+ meth = getattr(six, "iter" + name)
+ it = meth(d)
+ assert not isinstance(it, list)
+ assert list(it) == list(getattr(d, name)())
+ pytest.raises(StopIteration, six.advance_iterator, it)
+ record = []
+ def with_kw(*args, **kw):
+ record.append(kw["kw"])
+ return old(*args)
+ old = getattr(MyDict, stock_method_name(name))
+ monkeypatch.setattr(MyDict, stock_method_name(name), with_kw)
+ meth(d, kw=42)
+ assert record == [42]
+ monkeypatch.undo()
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (2, 7)",
+ reason="view methods on dictionaries only available on 2.7+")
+def test_dictionary_views():
+ d = dict(zip(range(10), (range(11, 20))))
+ for name in "keys", "values", "items":
+ meth = getattr(six, "view" + name)
+ view = meth(d)
+ assert set(view) == set(getattr(d, name)())
+
+
+def test_advance_iterator():
+ assert six.next is six.advance_iterator
+ l = [1, 2]
+ it = iter(l)
+ assert six.next(it) == 1
+ assert six.next(it) == 2
+ pytest.raises(StopIteration, six.next, it)
+ pytest.raises(StopIteration, six.next, it)
+
+
+def test_iterator():
+ class myiter(six.Iterator):
+ def __next__(self):
+ return 13
+ assert six.advance_iterator(myiter()) == 13
+ class myitersub(myiter):
+ def __next__(self):
+ return 14
+ assert six.advance_iterator(myitersub()) == 14
+
+
+def test_callable():
+ class X:
+ def __call__(self):
+ pass
+ def method(self):
+ pass
+ assert six.callable(X)
+ assert six.callable(X())
+ assert six.callable(test_callable)
+ assert six.callable(hasattr)
+ assert six.callable(X.method)
+ assert six.callable(X().method)
+ assert not six.callable(4)
+ assert not six.callable("string")
+
+
+def test_create_bound_method():
+ class X(object):
+ pass
+ def f(self):
+ return self
+ x = X()
+ b = six.create_bound_method(f, x)
+ assert isinstance(b, types.MethodType)
+ assert b() is x
+
+
+def test_create_unbound_method():
+ class X(object):
+ pass
+
+ def f(self):
+ return self
+ u = six.create_unbound_method(f, X)
+ pytest.raises(TypeError, u)
+ if six.PY2:
+ assert isinstance(u, types.MethodType)
+ x = X()
+ assert f(x) is x
+
+
+if six.PY3:
+
+ def test_b():
+ data = six.b("\xff")
+ assert isinstance(data, bytes)
+ assert len(data) == 1
+ assert data == bytes([255])
+
+
+ def test_u():
+ s = six.u("hi \u0439 \U00000439 \\ \\\\ \n")
+ assert isinstance(s, str)
+ assert s == "hi \u0439 \U00000439 \\ \\\\ \n"
+
+else:
+
+ def test_b():
+ data = six.b("\xff")
+ assert isinstance(data, str)
+ assert len(data) == 1
+ assert data == "\xff"
+
+
+ def test_u():
+ s = six.u("hi \u0439 \U00000439 \\ \\\\ \n")
+ assert isinstance(s, unicode)
+ assert s == "hi \xd0\xb9 \xd0\xb9 \\ \\\\ \n".decode("utf8")
+
+
+def test_u_escapes():
+ s = six.u("\u1234")
+ assert len(s) == 1
+
+
+def test_unichr():
+ assert six.u("\u1234") == six.unichr(0x1234)
+ assert type(six.u("\u1234")) is type(six.unichr(0x1234))
+
+
+def test_int2byte():
+ assert six.int2byte(3) == six.b("\x03")
+ pytest.raises(Exception, six.int2byte, 256)
+
+
+def test_byte2int():
+ assert six.byte2int(six.b("\x03")) == 3
+ assert six.byte2int(six.b("\x03\x04")) == 3
+ pytest.raises(IndexError, six.byte2int, six.b(""))
+
+
+def test_bytesindex():
+ assert six.indexbytes(six.b("hello"), 3) == ord("l")
+
+
+def test_bytesiter():
+ it = six.iterbytes(six.b("hi"))
+ assert six.next(it) == ord("h")
+ assert six.next(it) == ord("i")
+ pytest.raises(StopIteration, six.next, it)
+
+
+def test_StringIO():
+ fp = six.StringIO()
+ fp.write(six.u("hello"))
+ assert fp.getvalue() == six.u("hello")
+
+
+def test_BytesIO():
+ fp = six.BytesIO()
+ fp.write(six.b("hello"))
+ assert fp.getvalue() == six.b("hello")
+
+
+def test_exec_():
+ def f():
+ l = []
+ six.exec_("l.append(1)")
+ assert l == [1]
+ f()
+ ns = {}
+ six.exec_("x = 42", ns)
+ assert ns["x"] == 42
+ glob = {}
+ loc = {}
+ six.exec_("global y; y = 42; x = 12", glob, loc)
+ assert glob["y"] == 42
+ assert "x" not in glob
+ assert loc["x"] == 12
+ assert "y" not in loc
+
+
+def test_reraise():
+ def get_next(tb):
+ if six.PY3:
+ return tb.tb_next.tb_next
+ else:
+ return tb.tb_next
+ e = Exception("blah")
+ try:
+ raise e
+ except Exception:
+ tp, val, tb = sys.exc_info()
+ try:
+ six.reraise(tp, val, tb)
+ except Exception:
+ tp2, value2, tb2 = sys.exc_info()
+ assert tp2 is Exception
+ assert value2 is e
+ assert tb is get_next(tb2)
+ try:
+ six.reraise(tp, val)
+ except Exception:
+ tp2, value2, tb2 = sys.exc_info()
+ assert tp2 is Exception
+ assert value2 is e
+ assert tb2 is not tb
+ try:
+ six.reraise(tp, val, tb2)
+ except Exception:
+ tp2, value2, tb3 = sys.exc_info()
+ assert tp2 is Exception
+ assert value2 is e
+ assert get_next(tb3) is tb2
+ try:
+ six.reraise(tp, None, tb)
+ except Exception:
+ tp2, value2, tb2 = sys.exc_info()
+ assert tp2 is Exception
+ assert value2 is not val
+ assert isinstance(value2, Exception)
+ assert tb is get_next(tb2)
+
+
+def test_raise_from():
+ try:
+ try:
+ raise Exception("blah")
+ except Exception:
+ ctx = sys.exc_info()[1]
+ f = Exception("foo")
+ six.raise_from(f, None)
+ except Exception:
+ tp, val, tb = sys.exc_info()
+ if sys.version_info[:2] > (3, 0):
+ # We should have done a raise f from None equivalent.
+ assert val.__cause__ is None
+ assert val.__context__ is ctx
+ if sys.version_info[:2] >= (3, 3):
+ # And that should suppress the context on the exception.
+ assert val.__suppress_context__
+ # For all versions the outer exception should have raised successfully.
+ assert str(val) == "foo"
+
+
+def test_print_():
+ save = sys.stdout
+ out = sys.stdout = six.moves.StringIO()
+ try:
+ six.print_("Hello,", "person!")
+ finally:
+ sys.stdout = save
+ assert out.getvalue() == "Hello, person!\n"
+ out = six.StringIO()
+ six.print_("Hello,", "person!", file=out)
+ assert out.getvalue() == "Hello, person!\n"
+ out = six.StringIO()
+ six.print_("Hello,", "person!", file=out, end="")
+ assert out.getvalue() == "Hello, person!"
+ out = six.StringIO()
+ six.print_("Hello,", "person!", file=out, sep="X")
+ assert out.getvalue() == "Hello,Xperson!\n"
+ out = six.StringIO()
+ six.print_(six.u("Hello,"), six.u("person!"), file=out)
+ result = out.getvalue()
+ assert isinstance(result, six.text_type)
+ assert result == six.u("Hello, person!\n")
+ six.print_("Hello", file=None) # This works.
+ out = six.StringIO()
+ six.print_(None, file=out)
+ assert out.getvalue() == "None\n"
+ class FlushableStringIO(six.StringIO):
+ def __init__(self):
+ six.StringIO.__init__(self)
+ self.flushed = False
+ def flush(self):
+ self.flushed = True
+ out = FlushableStringIO()
+ six.print_("Hello", file=out)
+ assert not out.flushed
+ six.print_("Hello", file=out, flush=True)
+ assert out.flushed
+
+
+@pytest.mark.skipif("sys.version_info[:2] >= (2, 6)")
+def test_print_encoding(monkeypatch):
+ # Fool the type checking in print_.
+ monkeypatch.setattr(six, "file", six.BytesIO, raising=False)
+ out = six.BytesIO()
+ out.encoding = "utf-8"
+ out.errors = None
+ six.print_(six.u("\u053c"), end="", file=out)
+ assert out.getvalue() == six.b("\xd4\xbc")
+ out = six.BytesIO()
+ out.encoding = "ascii"
+ out.errors = "strict"
+ pytest.raises(UnicodeEncodeError, six.print_, six.u("\u053c"), file=out)
+ out.errors = "backslashreplace"
+ six.print_(six.u("\u053c"), end="", file=out)
+ assert out.getvalue() == six.b("\\u053c")
+
+
+def test_print_exceptions():
+ pytest.raises(TypeError, six.print_, x=3)
+ pytest.raises(TypeError, six.print_, end=3)
+ pytest.raises(TypeError, six.print_, sep=42)
+
+
+def test_with_metaclass():
+ class Meta(type):
+ pass
+ class X(six.with_metaclass(Meta)):
+ pass
+ assert type(X) is Meta
+ assert issubclass(X, object)
+ class Base(object):
+ pass
+ class X(six.with_metaclass(Meta, Base)):
+ pass
+ assert type(X) is Meta
+ assert issubclass(X, Base)
+ class Base2(object):
+ pass
+ class X(six.with_metaclass(Meta, Base, Base2)):
+ pass
+ assert type(X) is Meta
+ assert issubclass(X, Base)
+ assert issubclass(X, Base2)
+ assert X.__mro__ == (X, Base, Base2, object)
+ class X(six.with_metaclass(Meta)):
+ pass
+ class MetaSub(Meta):
+ pass
+ class Y(six.with_metaclass(MetaSub, X)):
+ pass
+ assert type(Y) is MetaSub
+ assert Y.__mro__ == (Y, X, object)
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (2, 7)")
+def test_with_metaclass_typing():
+ try:
+ import typing
+ except ImportError:
+ pytest.skip("typing module required")
+ class Meta(type):
+ pass
+ if sys.version_info[:2] < (3, 7):
+ # Generics with custom metaclasses were broken on older versions.
+ class Meta(Meta, typing.GenericMeta):
+ pass
+ T = typing.TypeVar('T')
+ class G(six.with_metaclass(Meta, typing.Generic[T])):
+ pass
+ class GA(six.with_metaclass(abc.ABCMeta, typing.Generic[T])):
+ pass
+ assert isinstance(G, Meta)
+ assert isinstance(GA, abc.ABCMeta)
+ assert G[int] is not G[G[int]]
+ assert GA[int] is not GA[GA[int]]
+ assert G.__bases__ == (typing.Generic,)
+ assert G.__orig_bases__ == (typing.Generic[T],)
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (3, 7)")
+def test_with_metaclass_pep_560():
+ class Meta(type):
+ pass
+ class A:
+ pass
+ class B:
+ pass
+ class Fake:
+ def __mro_entries__(self, bases):
+ return (A, B)
+ fake = Fake()
+ class G(six.with_metaclass(Meta, fake)):
+ pass
+ class GA(six.with_metaclass(abc.ABCMeta, fake)):
+ pass
+ assert isinstance(G, Meta)
+ assert isinstance(GA, abc.ABCMeta)
+ assert G.__bases__ == (A, B)
+ assert G.__orig_bases__ == (fake,)
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (3, 0)")
+def test_with_metaclass_prepare():
+ """Test that with_metaclass causes Meta.__prepare__ to be called with the correct arguments."""
+
+ class MyDict(dict):
+ pass
+
+ class Meta(type):
+
+ @classmethod
+ def __prepare__(cls, name, bases):
+ namespace = MyDict(super().__prepare__(name, bases), cls=cls, bases=bases)
+ namespace['namespace'] = namespace
+ return namespace
+
+ class Base(object):
+ pass
+
+ bases = (Base,)
+
+ class X(six.with_metaclass(Meta, *bases)):
+ pass
+
+ assert getattr(X, 'cls', type) is Meta
+ assert getattr(X, 'bases', ()) == bases
+ assert isinstance(getattr(X, 'namespace', {}), MyDict)
+
+
+def test_wraps():
+ def f(g):
+ @six.wraps(g)
+ def w():
+ return 42
+ return w
+ def k():
+ pass
+ original_k = k
+ k = f(f(k))
+ assert hasattr(k, '__wrapped__')
+ k = k.__wrapped__
+ assert hasattr(k, '__wrapped__')
+ k = k.__wrapped__
+ assert k is original_k
+ assert not hasattr(k, '__wrapped__')
+
+ def f(g, assign, update):
+ def w():
+ return 42
+ w.glue = {"foo" : "bar"}
+ return six.wraps(g, assign, update)(w)
+ k.glue = {"melon" : "egg"}
+ k.turnip = 43
+ k = f(k, ["turnip"], ["glue"])
+ assert k.__name__ == "w"
+ assert k.turnip == 43
+ assert k.glue == {"melon" : "egg", "foo" : "bar"}
+
+
+def test_add_metaclass():
+ class Meta(type):
+ pass
+ class X:
+ "success"
+ X = six.add_metaclass(Meta)(X)
+ assert type(X) is Meta
+ assert issubclass(X, object)
+ assert X.__module__ == __name__
+ assert X.__doc__ == "success"
+ class Base(object):
+ pass
+ class X(Base):
+ pass
+ X = six.add_metaclass(Meta)(X)
+ assert type(X) is Meta
+ assert issubclass(X, Base)
+ class Base2(object):
+ pass
+ class X(Base, Base2):
+ pass
+ X = six.add_metaclass(Meta)(X)
+ assert type(X) is Meta
+ assert issubclass(X, Base)
+ assert issubclass(X, Base2)
+
+ # Test a second-generation subclass of a type.
+ class Meta1(type):
+ m1 = "m1"
+ class Meta2(Meta1):
+ m2 = "m2"
+ class Base:
+ b = "b"
+ Base = six.add_metaclass(Meta1)(Base)
+ class X(Base):
+ x = "x"
+ X = six.add_metaclass(Meta2)(X)
+ assert type(X) is Meta2
+ assert issubclass(X, Base)
+ assert type(Base) is Meta1
+ assert "__dict__" not in vars(X)
+ instance = X()
+ instance.attr = "test"
+ assert vars(instance) == {"attr": "test"}
+ assert instance.b == Base.b
+ assert instance.x == X.x
+
+ # Test a class with slots.
+ class MySlots(object):
+ __slots__ = ["a", "b"]
+ MySlots = six.add_metaclass(Meta1)(MySlots)
+
+ assert MySlots.__slots__ == ["a", "b"]
+ instance = MySlots()
+ instance.a = "foo"
+ pytest.raises(AttributeError, setattr, instance, "c", "baz")
+
+ # Test a class with string for slots.
+ class MyStringSlots(object):
+ __slots__ = "ab"
+ MyStringSlots = six.add_metaclass(Meta1)(MyStringSlots)
+ assert MyStringSlots.__slots__ == "ab"
+ instance = MyStringSlots()
+ instance.ab = "foo"
+ pytest.raises(AttributeError, setattr, instance, "a", "baz")
+ pytest.raises(AttributeError, setattr, instance, "b", "baz")
+
+ class MySlotsWeakref(object):
+ __slots__ = "__weakref__",
+ MySlotsWeakref = six.add_metaclass(Meta)(MySlotsWeakref)
+ assert type(MySlotsWeakref) is Meta
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (3, 3)")
+def test_add_metaclass_nested():
+ # Regression test for https://github.com/benjaminp/six/issues/259
+ class Meta(type):
+ pass
+
+ class A:
+ class B: pass
+
+ expected = 'test_add_metaclass_nested.<locals>.A.B'
+
+ assert A.B.__qualname__ == expected
+
+ class A:
+ @six.add_metaclass(Meta)
+ class B: pass
+
+ assert A.B.__qualname__ == expected
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (2, 7) or sys.version_info[:2] in ((3, 0), (3, 1))")
+def test_assertCountEqual():
+ class TestAssertCountEqual(unittest.TestCase):
+ def test(self):
+ with self.assertRaises(AssertionError):
+ six.assertCountEqual(self, (1, 2), [3, 4, 5])
+
+ six.assertCountEqual(self, (1, 2), [2, 1])
+
+ TestAssertCountEqual('test').test()
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (2, 7)")
+def test_assertRegex():
+ class TestAssertRegex(unittest.TestCase):
+ def test(self):
+ with self.assertRaises(AssertionError):
+ six.assertRegex(self, 'test', r'^a')
+
+ six.assertRegex(self, 'test', r'^t')
+
+ TestAssertRegex('test').test()
+
+
+@pytest.mark.skipif("sys.version_info[:2] < (2, 7)")
+def test_assertRaisesRegex():
+ class TestAssertRaisesRegex(unittest.TestCase):
+ def test(self):
+ with six.assertRaisesRegex(self, AssertionError, '^Foo'):
+ raise AssertionError('Foo')
+
+ with self.assertRaises(AssertionError):
+ with six.assertRaisesRegex(self, AssertionError, r'^Foo'):
+ raise AssertionError('Bar')
+
+ TestAssertRaisesRegex('test').test()
+
+
+def test_python_2_unicode_compatible():
+ @six.python_2_unicode_compatible
+ class MyTest(object):
+ def __str__(self):
+ return six.u('hello')
+
+ def __bytes__(self):
+ return six.b('hello')
+
+ my_test = MyTest()
+
+ if six.PY2:
+ assert str(my_test) == six.b("hello")
+ assert unicode(my_test) == six.u("hello")
+ elif six.PY3:
+ assert bytes(my_test) == six.b("hello")
+ assert str(my_test) == six.u("hello")
+
+ assert getattr(six.moves.builtins, 'bytes', str)(my_test) == six.b("hello")
+
+
+class EnsureTests:
+
+ # grinning face emoji
+ UNICODE_EMOJI = six.u("\U0001F600")
+ BINARY_EMOJI = b"\xf0\x9f\x98\x80"
+
+ def test_ensure_binary_raise_type_error(self):
+ with pytest.raises(TypeError):
+ six.ensure_str(8)
+
+ def test_errors_and_encoding(self):
+ six.ensure_binary(self.UNICODE_EMOJI, encoding='latin-1', errors='ignore')
+ with pytest.raises(UnicodeEncodeError):
+ six.ensure_binary(self.UNICODE_EMOJI, encoding='latin-1', errors='strict')
+
+ def test_ensure_binary_raise(self):
+ converted_unicode = six.ensure_binary(self.UNICODE_EMOJI, encoding='utf-8', errors='strict')
+ converted_binary = six.ensure_binary(self.BINARY_EMOJI, encoding="utf-8", errors='strict')
+ if six.PY2:
+ # PY2: unicode -> str
+ assert converted_unicode == self.BINARY_EMOJI and isinstance(converted_unicode, str)
+ # PY2: str -> str
+ assert converted_binary == self.BINARY_EMOJI and isinstance(converted_binary, str)
+ else:
+ # PY3: str -> bytes
+ assert converted_unicode == self.BINARY_EMOJI and isinstance(converted_unicode, bytes)
+ # PY3: bytes -> bytes
+ assert converted_binary == self.BINARY_EMOJI and isinstance(converted_binary, bytes)
+
+ def test_ensure_str(self):
+ converted_unicode = six.ensure_str(self.UNICODE_EMOJI, encoding='utf-8', errors='strict')
+ converted_binary = six.ensure_str(self.BINARY_EMOJI, encoding="utf-8", errors='strict')
+ if six.PY2:
+ # PY2: unicode -> str
+ assert converted_unicode == self.BINARY_EMOJI and isinstance(converted_unicode, str)
+ # PY2: str -> str
+ assert converted_binary == self.BINARY_EMOJI and isinstance(converted_binary, str)
+ else:
+ # PY3: str -> str
+ assert converted_unicode == self.UNICODE_EMOJI and isinstance(converted_unicode, str)
+ # PY3: bytes -> str
+ assert converted_binary == self.UNICODE_EMOJI and isinstance(converted_unicode, str)
+
+ def test_ensure_text(self):
+ converted_unicode = six.ensure_text(self.UNICODE_EMOJI, encoding='utf-8', errors='strict')
+ converted_binary = six.ensure_text(self.BINARY_EMOJI, encoding="utf-8", errors='strict')
+ if six.PY2:
+ # PY2: unicode -> unicode
+ assert converted_unicode == self.UNICODE_EMOJI and isinstance(converted_unicode, unicode)
+ # PY2: str -> unicode
+ assert converted_binary == self.UNICODE_EMOJI and isinstance(converted_unicode, unicode)
+ else:
+ # PY3: str -> str
+ assert converted_unicode == self.UNICODE_EMOJI and isinstance(converted_unicode, str)
+ # PY3: bytes -> str
+ assert converted_binary == self.UNICODE_EMOJI and isinstance(converted_unicode, str)
diff --git a/testing/web-platform/tests/tools/third_party/tooltool/tooltool.py b/testing/web-platform/tests/tools/third_party/tooltool/tooltool.py
new file mode 100755
index 0000000000..53929fce88
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/tooltool/tooltool.py
@@ -0,0 +1,1316 @@
+#!/usr/bin/env python
+
+# tooltool is a lookaside cache implemented in Python
+# Copyright (C) 2011 John H. Ford <john@johnford.info>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation version 2
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301, USA.
+
+# A manifest file specifies files in that directory that are stored
+# elsewhere. This file should only list files in the same directory
+# in which the manifest file resides and it should be called
+# 'manifest.tt'
+
+from __future__ import print_function
+
+import base64
+import calendar
+import hashlib
+import hmac
+import json
+import logging
+import math
+import optparse
+import os
+import pprint
+import re
+import shutil
+import sys
+import tarfile
+import tempfile
+import threading
+import time
+import zipfile
+
+from io import open
+from io import BytesIO
+from subprocess import PIPE
+from subprocess import Popen
+
+__version__ = '1'
+
+# Allowed request header characters:
+# !#$%&'()*+,-./:;<=>?@[]^_`{|}~ and space, a-z, A-Z, 0-9, \, "
+REQUEST_HEADER_ATTRIBUTE_CHARS = re.compile(
+ r"^[ a-zA-Z0-9_\!#\$%&'\(\)\*\+,\-\./\:;<\=>\?@\[\]\^`\{\|\}~]*$")
+DEFAULT_MANIFEST_NAME = 'manifest.tt'
+TOOLTOOL_PACKAGE_SUFFIX = '.TOOLTOOL-PACKAGE'
+HAWK_VER = 1
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ open_attrs = dict(mode='w', encoding='utf-8')
+ six_binary_type = bytes
+ six_text_type = str
+ unicode = str # Silence `pyflakes` from reporting `undefined name 'unicode'` in Python 3.
+ import urllib.request as urllib2
+ from http.client import HTTPSConnection, HTTPConnection
+ from urllib.parse import urlparse, urljoin
+ from urllib.request import Request
+ from urllib.error import HTTPError, URLError
+else:
+ open_attrs = dict(mode='wb')
+ six_binary_type = str
+ six_text_type = unicode
+ import urllib2
+ from httplib import HTTPSConnection, HTTPConnection
+ from urllib2 import Request, HTTPError, URLError
+ from urlparse import urlparse, urljoin
+
+
+log = logging.getLogger(__name__)
+
+
+def request_has_data(req):
+ if PY3:
+ return req.data is not None
+ return req.has_data()
+
+
+def to_binary(val):
+ if isinstance(val, six_text_type):
+ return val.encode('utf-8')
+ return val
+
+
+def to_text(val):
+ if isinstance(val, six_binary_type):
+ return val.decode('utf-8')
+ return val
+
+
+def get_hexdigest(val):
+ return hashlib.sha512(to_binary(val)).hexdigest()
+
+
+class FileRecordJSONEncoderException(Exception):
+ pass
+
+
+class InvalidManifest(Exception):
+ pass
+
+
+class ExceptionWithFilename(Exception):
+
+ def __init__(self, filename):
+ Exception.__init__(self)
+ self.filename = filename
+
+
+class BadFilenameException(ExceptionWithFilename):
+ pass
+
+
+class DigestMismatchException(ExceptionWithFilename):
+ pass
+
+
+class MissingFileException(ExceptionWithFilename):
+ pass
+
+
+class InvalidCredentials(Exception):
+ pass
+
+
+class BadHeaderValue(Exception):
+ pass
+
+
+def parse_url(url):
+ url_parts = urlparse(url)
+ url_dict = {
+ 'scheme': url_parts.scheme,
+ 'hostname': url_parts.hostname,
+ 'port': url_parts.port,
+ 'path': url_parts.path,
+ 'resource': url_parts.path,
+ 'query': url_parts.query,
+ }
+ if len(url_dict['query']) > 0:
+ url_dict['resource'] = '%s?%s' % (url_dict['resource'], # pragma: no cover
+ url_dict['query'])
+
+ if url_parts.port is None:
+ if url_parts.scheme == 'http':
+ url_dict['port'] = 80
+ elif url_parts.scheme == 'https': # pragma: no cover
+ url_dict['port'] = 443
+ return url_dict
+
+
+def utc_now(offset_in_seconds=0.0):
+ return int(math.floor(calendar.timegm(time.gmtime()) + float(offset_in_seconds)))
+
+
+def random_string(length):
+ return base64.urlsafe_b64encode(os.urandom(length))[:length]
+
+
+def prepare_header_val(val):
+ if isinstance(val, six_binary_type):
+ val = val.decode('utf-8')
+
+ if not REQUEST_HEADER_ATTRIBUTE_CHARS.match(val):
+ raise BadHeaderValue( # pragma: no cover
+ 'header value value={val} contained an illegal character'.format(val=repr(val)))
+
+ return val
+
+
+def parse_content_type(content_type): # pragma: no cover
+ if content_type:
+ return content_type.split(';')[0].strip().lower()
+ else:
+ return ''
+
+
+def calculate_payload_hash(algorithm, payload, content_type): # pragma: no cover
+ parts = [
+ part if isinstance(part, six_binary_type) else part.encode('utf8')
+ for part in ['hawk.' + str(HAWK_VER) + '.payload\n',
+ parse_content_type(content_type) + '\n',
+ payload or '',
+ '\n',
+ ]
+ ]
+
+ p_hash = hashlib.new(algorithm)
+ p_hash.update(''.join(parts))
+
+ log.debug('calculating payload hash from:\n{parts}'.format(parts=pprint.pformat(parts)))
+
+ return base64.b64encode(p_hash.digest())
+
+
+def validate_taskcluster_credentials(credentials):
+ if not hasattr(credentials, '__getitem__'):
+ raise InvalidCredentials('credentials must be a dict-like object') # pragma: no cover
+ try:
+ credentials['clientId']
+ credentials['accessToken']
+ except KeyError: # pragma: no cover
+ etype, val, tb = sys.exc_info()
+ raise InvalidCredentials('{etype}: {val}'.format(etype=etype, val=val))
+
+
+def normalize_header_attr(val):
+ if isinstance(val, six_binary_type):
+ return val.decode('utf-8')
+ return val # pragma: no cover
+
+
+def normalize_string(mac_type,
+ timestamp,
+ nonce,
+ method,
+ name,
+ host,
+ port,
+ content_hash,
+ ):
+ return '\n'.join([
+ normalize_header_attr(header)
+ # The blank lines are important. They follow what the Node Hawk lib does.
+ for header in ['hawk.' + str(HAWK_VER) + '.' + mac_type,
+ timestamp,
+ nonce,
+ method or '',
+ name or '',
+ host,
+ port,
+ content_hash or '',
+ '', # for ext which is empty in this case
+ '', # Add trailing new line.
+ ]
+ ])
+
+
+def calculate_mac(mac_type,
+ access_token,
+ algorithm,
+ timestamp,
+ nonce,
+ method,
+ name,
+ host,
+ port,
+ content_hash,
+ ):
+ normalized = normalize_string(mac_type,
+ timestamp,
+ nonce,
+ method,
+ name,
+ host,
+ port,
+ content_hash)
+ log.debug(u'normalized resource for mac calc: {norm}'.format(norm=normalized))
+ digestmod = getattr(hashlib, algorithm)
+
+ if not isinstance(normalized, six_binary_type):
+ normalized = normalized.encode('utf8')
+
+ if not isinstance(access_token, six_binary_type):
+ access_token = access_token.encode('ascii')
+
+ result = hmac.new(access_token, normalized, digestmod)
+ return base64.b64encode(result.digest())
+
+
+def make_taskcluster_header(credentials, req):
+ validate_taskcluster_credentials(credentials)
+
+ url = req.get_full_url()
+ method = req.get_method()
+ algorithm = 'sha256'
+ timestamp = str(utc_now())
+ nonce = random_string(6)
+ url_parts = parse_url(url)
+
+ content_hash = None
+ if request_has_data(req):
+ content_hash = calculate_payload_hash( # pragma: no cover
+ algorithm,
+ req.get_data(),
+ # maybe we should detect this from req.headers but we anyway expect json
+ content_type='application/json',
+ )
+
+ mac = calculate_mac('header',
+ credentials['accessToken'],
+ algorithm,
+ timestamp,
+ nonce,
+ method,
+ url_parts['resource'],
+ url_parts['hostname'],
+ str(url_parts['port']),
+ content_hash,
+ )
+
+ header = u'Hawk mac="{}"'.format(prepare_header_val(mac))
+
+ if content_hash: # pragma: no cover
+ header = u'{}, hash="{}"'.format(header, prepare_header_val(content_hash))
+
+ header = u'{header}, id="{id}", ts="{ts}", nonce="{nonce}"'.format(
+ header=header,
+ id=prepare_header_val(credentials['clientId']),
+ ts=prepare_header_val(timestamp),
+ nonce=prepare_header_val(nonce),
+ )
+
+ log.debug('Hawk header for URL={} method={}: {}'.format(url, method, header))
+
+ return header
+
+
+class FileRecord(object):
+
+ def __init__(self, filename, size, digest, algorithm, unpack=False,
+ version=None, visibility=None):
+ object.__init__(self)
+ if '/' in filename or '\\' in filename:
+ log.error(
+ "The filename provided contains path information and is, therefore, invalid.")
+ raise BadFilenameException(filename=filename)
+ self.filename = filename
+ self.size = size
+ self.digest = digest
+ self.algorithm = algorithm
+ self.unpack = unpack
+ self.version = version
+ self.visibility = visibility
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ if self.filename == other.filename and \
+ self.size == other.size and \
+ self.digest == other.digest and \
+ self.algorithm == other.algorithm and \
+ self.version == other.version and \
+ self.visibility == other.visibility:
+ return True
+ else:
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __str__(self):
+ return repr(self)
+
+ def __repr__(self):
+ return "%s.%s(filename='%s', size=%s, digest='%s', algorithm='%s', visibility=%r)" % (
+ __name__, self.__class__.__name__, self.filename, self.size,
+ self.digest, self.algorithm, self.visibility)
+
+ def present(self):
+ # Doesn't check validity
+ return os.path.exists(self.filename)
+
+ def validate_size(self):
+ if self.present():
+ return self.size == os.path.getsize(self.filename)
+ else:
+ log.debug(
+ "trying to validate size on a missing file, %s", self.filename)
+ raise MissingFileException(filename=self.filename)
+
+ def validate_digest(self):
+ if self.present():
+ with open(self.filename, 'rb') as f:
+ return self.digest == digest_file(f, self.algorithm)
+ else:
+ log.debug(
+ "trying to validate digest on a missing file, %s', self.filename")
+ raise MissingFileException(filename=self.filename)
+
+ def validate(self):
+ if self.size is None or self.validate_size():
+ if self.validate_digest():
+ return True
+ return False
+
+ def describe(self):
+ if self.present() and self.validate():
+ return "'%s' is present and valid" % self.filename
+ elif self.present():
+ return "'%s' is present and invalid" % self.filename
+ else:
+ return "'%s' is absent" % self.filename
+
+
+def create_file_record(filename, algorithm):
+ fo = open(filename, 'rb')
+ stored_filename = os.path.split(filename)[1]
+ fr = FileRecord(stored_filename, os.path.getsize(
+ filename), digest_file(fo, algorithm), algorithm)
+ fo.close()
+ return fr
+
+
+class FileRecordJSONEncoder(json.JSONEncoder):
+
+ def encode_file_record(self, obj):
+ if not issubclass(type(obj), FileRecord):
+ err = "FileRecordJSONEncoder is only for FileRecord and lists of FileRecords, " \
+ "not %s" % obj.__class__.__name__
+ log.warn(err)
+ raise FileRecordJSONEncoderException(err)
+ else:
+ rv = {
+ 'filename': obj.filename,
+ 'size': obj.size,
+ 'algorithm': obj.algorithm,
+ 'digest': obj.digest,
+ }
+ if obj.unpack:
+ rv['unpack'] = True
+ if obj.version:
+ rv['version'] = obj.version
+ if obj.visibility is not None:
+ rv['visibility'] = obj.visibility
+ return rv
+
+ def default(self, f):
+ if issubclass(type(f), list):
+ record_list = []
+ for i in f:
+ record_list.append(self.encode_file_record(i))
+ return record_list
+ else:
+ return self.encode_file_record(f)
+
+
+class FileRecordJSONDecoder(json.JSONDecoder):
+
+ """I help the json module materialize a FileRecord from
+ a JSON file. I understand FileRecords and lists of
+ FileRecords. I ignore things that I don't expect for now"""
+ # TODO: make this more explicit in what it's looking for
+ # and error out on unexpected things
+
+ def process_file_records(self, obj):
+ if isinstance(obj, list):
+ record_list = []
+ for i in obj:
+ record = self.process_file_records(i)
+ if issubclass(type(record), FileRecord):
+ record_list.append(record)
+ return record_list
+ required_fields = [
+ 'filename',
+ 'size',
+ 'algorithm',
+ 'digest',
+ ]
+ if isinstance(obj, dict):
+ missing = False
+ for req in required_fields:
+ if req not in obj:
+ missing = True
+ break
+
+ if not missing:
+ unpack = obj.get('unpack', False)
+ version = obj.get('version', None)
+ visibility = obj.get('visibility', None)
+ rv = FileRecord(
+ obj['filename'], obj['size'], obj['digest'], obj['algorithm'],
+ unpack, version, visibility)
+ log.debug("materialized %s" % rv)
+ return rv
+ return obj
+
+ def decode(self, s):
+ decoded = json.JSONDecoder.decode(self, s)
+ rv = self.process_file_records(decoded)
+ return rv
+
+
+class Manifest(object):
+
+ valid_formats = ('json',)
+
+ def __init__(self, file_records=None):
+ self.file_records = file_records or []
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ if len(self.file_records) != len(other.file_records):
+ log.debug('Manifests differ in number of files')
+ return False
+ # sort the file records by filename before comparing
+ mine = sorted((fr.filename, fr) for fr in self.file_records)
+ theirs = sorted((fr.filename, fr) for fr in other.file_records)
+ return mine == theirs
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __deepcopy__(self, memo):
+ # This is required for a deep copy
+ return Manifest(self.file_records[:])
+
+ def __copy__(self):
+ return Manifest(self.file_records)
+
+ def copy(self):
+ return Manifest(self.file_records[:])
+
+ def present(self):
+ return all(i.present() for i in self.file_records)
+
+ def validate_sizes(self):
+ return all(i.validate_size() for i in self.file_records)
+
+ def validate_digests(self):
+ return all(i.validate_digest() for i in self.file_records)
+
+ def validate(self):
+ return all(i.validate() for i in self.file_records)
+
+ def load(self, data_file, fmt='json'):
+ assert fmt in self.valid_formats
+ if fmt == 'json':
+ try:
+ self.file_records.extend(
+ json.load(data_file, cls=FileRecordJSONDecoder))
+ except ValueError:
+ raise InvalidManifest("trying to read invalid manifest file")
+
+ def loads(self, data_string, fmt='json'):
+ assert fmt in self.valid_formats
+ if fmt == 'json':
+ try:
+ self.file_records.extend(
+ json.loads(data_string, cls=FileRecordJSONDecoder))
+ except ValueError:
+ raise InvalidManifest("trying to read invalid manifest file")
+
+ def dump(self, output_file, fmt='json'):
+ assert fmt in self.valid_formats
+ if fmt == 'json':
+ return json.dump(
+ self.file_records, output_file,
+ indent=2, separators=(',', ': '),
+ cls=FileRecordJSONEncoder,
+ )
+
+ def dumps(self, fmt='json'):
+ assert fmt in self.valid_formats
+ if fmt == 'json':
+ return json.dumps(
+ self.file_records,
+ indent=2, separators=(',', ': '),
+ cls=FileRecordJSONEncoder,
+ )
+
+
+def digest_file(f, a):
+ """I take a file like object 'f' and return a hex-string containing
+ of the result of the algorithm 'a' applied to 'f'."""
+ h = hashlib.new(a)
+ chunk_size = 1024 * 10
+ data = f.read(chunk_size)
+ while data:
+ h.update(data)
+ data = f.read(chunk_size)
+ name = repr(f.name) if hasattr(f, 'name') else 'a file'
+ log.debug('hashed %s with %s to be %s', name, a, h.hexdigest())
+ return h.hexdigest()
+
+
+def execute(cmd):
+ """Execute CMD, logging its stdout at the info level"""
+ process = Popen(cmd, shell=True, stdout=PIPE)
+ while True:
+ line = process.stdout.readline()
+ if not line:
+ break
+ log.info(line.replace('\n', ' '))
+ return process.wait() == 0
+
+
+def open_manifest(manifest_file):
+ """I know how to take a filename and load it into a Manifest object"""
+ if os.path.exists(manifest_file):
+ manifest = Manifest()
+ with open(manifest_file, "rb") as f:
+ manifest.load(f)
+ log.debug("loaded manifest from file '%s'" % manifest_file)
+ return manifest
+ else:
+ log.debug("tried to load absent file '%s' as manifest" % manifest_file)
+ raise InvalidManifest(
+ "manifest file '%s' does not exist" % manifest_file)
+
+
+def list_manifest(manifest_file):
+ """I know how print all the files in a location"""
+ try:
+ manifest = open_manifest(manifest_file)
+ except InvalidManifest as e:
+ log.error("failed to load manifest file at '%s': %s" % (
+ manifest_file,
+ str(e),
+ ))
+ return False
+ for f in manifest.file_records:
+ print("{}\t{}\t{}".format("P" if f.present() else "-",
+ "V" if f.present() and f.validate() else "-",
+ f.filename))
+ return True
+
+
+def validate_manifest(manifest_file):
+ """I validate that all files in a manifest are present and valid but
+ don't fetch or delete them if they aren't"""
+ try:
+ manifest = open_manifest(manifest_file)
+ except InvalidManifest as e:
+ log.error("failed to load manifest file at '%s': %s" % (
+ manifest_file,
+ str(e),
+ ))
+ return False
+ invalid_files = []
+ absent_files = []
+ for f in manifest.file_records:
+ if not f.present():
+ absent_files.append(f)
+ else:
+ if not f.validate():
+ invalid_files.append(f)
+ if len(invalid_files + absent_files) == 0:
+ return True
+ else:
+ return False
+
+
+def add_files(manifest_file, algorithm, filenames, version, visibility, unpack):
+ # returns True if all files successfully added, False if not
+ # and doesn't catch library Exceptions. If any files are already
+ # tracked in the manifest, return will be False because they weren't
+ # added
+ all_files_added = True
+ # Create a old_manifest object to add to
+ if os.path.exists(manifest_file):
+ old_manifest = open_manifest(manifest_file)
+ else:
+ old_manifest = Manifest()
+ log.debug("creating a new manifest file")
+ new_manifest = Manifest() # use a different manifest for the output
+ for filename in filenames:
+ log.debug("adding %s" % filename)
+ path, name = os.path.split(filename)
+ new_fr = create_file_record(filename, algorithm)
+ new_fr.version = version
+ new_fr.visibility = visibility
+ new_fr.unpack = unpack
+ log.debug("appending a new file record to manifest file")
+ add = True
+ for fr in old_manifest.file_records:
+ log.debug("manifest file has '%s'" % "', ".join(
+ [x.filename for x in old_manifest.file_records]))
+ if new_fr == fr:
+ log.info("file already in old_manifest")
+ add = False
+ elif filename == fr.filename:
+ log.error("manifest already contains a different file named %s" % filename)
+ add = False
+ if add:
+ new_manifest.file_records.append(new_fr)
+ log.debug("added '%s' to manifest" % filename)
+ else:
+ all_files_added = False
+ # copy any files in the old manifest that aren't in the new one
+ new_filenames = set(fr.filename for fr in new_manifest.file_records)
+ for old_fr in old_manifest.file_records:
+ if old_fr.filename not in new_filenames:
+ new_manifest.file_records.append(old_fr)
+ with open(manifest_file, **open_attrs) as output:
+ new_manifest.dump(output, fmt='json')
+ return all_files_added
+
+
+def touch(f):
+ """Used to modify mtime in cached files;
+ mtime is used by the purge command"""
+ try:
+ os.utime(f, None)
+ except OSError:
+ log.warn('impossible to update utime of file %s' % f)
+
+
+def fetch_file(base_urls, file_record, grabchunk=1024 * 4, auth_file=None, region=None):
+ # A file which is requested to be fetched that exists locally will be
+ # overwritten by this function
+ fd, temp_path = tempfile.mkstemp(dir=os.getcwd())
+ os.close(fd)
+ fetched_path = None
+ for base_url in base_urls:
+ # Generate the URL for the file on the server side
+ url = urljoin(base_url,
+ '%s/%s' % (file_record.algorithm, file_record.digest))
+ if region is not None:
+ url += '?region=' + region
+
+ log.info("Attempting to fetch from '%s'..." % base_url)
+
+ # Well, the file doesn't exist locally. Let's fetch it.
+ try:
+ req = Request(url)
+ _authorize(req, auth_file)
+ f = urllib2.urlopen(req)
+ log.debug("opened %s for reading" % url)
+ with open(temp_path, **open_attrs) as out:
+ k = True
+ size = 0
+ while k:
+ # TODO: print statistics as file transfers happen both for info and to stop
+ # buildbot timeouts
+ indata = f.read(grabchunk)
+ if PY3:
+ indata = to_text(indata)
+ out.write(indata)
+ size += len(indata)
+ if len(indata) == 0:
+ k = False
+ log.info("File %s fetched from %s as %s" %
+ (file_record.filename, base_url, temp_path))
+ fetched_path = temp_path
+ break
+ except (URLError, HTTPError, ValueError):
+ log.info("...failed to fetch '%s' from %s" %
+ (file_record.filename, base_url), exc_info=True)
+ except IOError: # pragma: no cover
+ log.info("failed to write to temporary file for '%s'" %
+ file_record.filename, exc_info=True)
+
+ # cleanup temp file in case of issues
+ if fetched_path:
+ return os.path.split(fetched_path)[1]
+ else:
+ try:
+ os.remove(temp_path)
+ except OSError: # pragma: no cover
+ pass
+ return None
+
+
+def clean_path(dirname):
+ """Remove a subtree if is exists. Helper for unpack_file()."""
+ if os.path.exists(dirname):
+ log.info('rm tree: %s' % dirname)
+ shutil.rmtree(dirname)
+
+
+CHECKSUM_SUFFIX = ".checksum"
+
+
+def unpack_file(filename):
+ """Untar `filename`, assuming it is uncompressed or compressed with bzip2,
+ xz, gzip, or unzip a zip file. The file is assumed to contain a single
+ directory with a name matching the base of the given filename.
+ Xz support is handled by shelling out to 'tar'."""
+ if os.path.isfile(filename) and tarfile.is_tarfile(filename):
+ tar_file, zip_ext = os.path.splitext(filename)
+ base_file, tar_ext = os.path.splitext(tar_file)
+ clean_path(base_file)
+ log.info('untarring "%s"' % filename)
+ tar = tarfile.open(filename)
+ tar.extractall()
+ tar.close()
+ elif os.path.isfile(filename) and filename.endswith('.tar.xz'):
+ base_file = filename.replace('.tar.xz', '')
+ clean_path(base_file)
+ log.info('untarring "%s"' % filename)
+ # Not using tar -Jxf because it fails on Windows for some reason.
+ process = Popen(['xz', '-d', '-c', filename], stdout=PIPE)
+ stdout, stderr = process.communicate()
+ if process.returncode != 0:
+ return False
+ fileobj = BytesIO()
+ fileobj.write(stdout)
+ fileobj.seek(0)
+ tar = tarfile.open(fileobj=fileobj, mode='r|')
+ tar.extractall()
+ tar.close()
+ elif os.path.isfile(filename) and zipfile.is_zipfile(filename):
+ base_file = filename.replace('.zip', '')
+ clean_path(base_file)
+ log.info('unzipping "%s"' % filename)
+ z = zipfile.ZipFile(filename)
+ z.extractall()
+ z.close()
+ else:
+ log.error("Unknown archive extension for filename '%s'" % filename)
+ return False
+ return True
+
+
+def fetch_files(manifest_file, base_urls, filenames=[], cache_folder=None,
+ auth_file=None, region=None):
+ # Lets load the manifest file
+ try:
+ manifest = open_manifest(manifest_file)
+ except InvalidManifest as e:
+ log.error("failed to load manifest file at '%s': %s" % (
+ manifest_file,
+ str(e),
+ ))
+ return False
+
+ # we want to track files already in current working directory AND valid
+ # we will not need to fetch these
+ present_files = []
+
+ # We want to track files that fail to be fetched as well as
+ # files that are fetched
+ failed_files = []
+ fetched_files = []
+
+ # Files that we want to unpack.
+ unpack_files = []
+
+ # Lets go through the manifest and fetch the files that we want
+ for f in manifest.file_records:
+ # case 1: files are already present
+ if f.present():
+ if f.validate():
+ present_files.append(f.filename)
+ if f.unpack:
+ unpack_files.append(f.filename)
+ else:
+ # we have an invalid file here, better to cleanup!
+ # this invalid file needs to be replaced with a good one
+ # from the local cash or fetched from a tooltool server
+ log.info("File %s is present locally but it is invalid, so I will remove it "
+ "and try to fetch it" % f.filename)
+ os.remove(os.path.join(os.getcwd(), f.filename))
+
+ # check if file is already in cache
+ if cache_folder and f.filename not in present_files:
+ try:
+ shutil.copy(os.path.join(cache_folder, f.digest),
+ os.path.join(os.getcwd(), f.filename))
+ log.info("File %s retrieved from local cache %s" %
+ (f.filename, cache_folder))
+ touch(os.path.join(cache_folder, f.digest))
+
+ filerecord_for_validation = FileRecord(
+ f.filename, f.size, f.digest, f.algorithm)
+ if filerecord_for_validation.validate():
+ present_files.append(f.filename)
+ if f.unpack:
+ unpack_files.append(f.filename)
+ else:
+ # the file copied from the cache is invalid, better to
+ # clean up the cache version itself as well
+ log.warn("File %s retrieved from cache is invalid! I am deleting it from the "
+ "cache as well" % f.filename)
+ os.remove(os.path.join(os.getcwd(), f.filename))
+ os.remove(os.path.join(cache_folder, f.digest))
+ except IOError:
+ log.info("File %s not present in local cache folder %s" %
+ (f.filename, cache_folder))
+
+ # now I will try to fetch all files which are not already present and
+ # valid, appending a suffix to avoid race conditions
+ temp_file_name = None
+ # 'filenames' is the list of filenames to be managed, if this variable
+ # is a non empty list it can be used to filter if filename is in
+ # present_files, it means that I have it already because it was already
+ # either in the working dir or in the cache
+ if (f.filename in filenames or len(filenames) == 0) and f.filename not in present_files:
+ log.debug("fetching %s" % f.filename)
+ temp_file_name = fetch_file(base_urls, f, auth_file=auth_file, region=region)
+ if temp_file_name:
+ fetched_files.append((f, temp_file_name))
+ else:
+ failed_files.append(f.filename)
+ else:
+ log.debug("skipping %s" % f.filename)
+
+ # lets ensure that fetched files match what the manifest specified
+ for localfile, temp_file_name in fetched_files:
+ # since I downloaded to a temp file, I need to perform all validations on the temp file
+ # this is why filerecord_for_validation is created
+
+ filerecord_for_validation = FileRecord(
+ temp_file_name, localfile.size, localfile.digest, localfile.algorithm)
+
+ if filerecord_for_validation.validate():
+ # great!
+ # I can rename the temp file
+ log.info("File integrity verified, renaming %s to %s" %
+ (temp_file_name, localfile.filename))
+ os.rename(os.path.join(os.getcwd(), temp_file_name),
+ os.path.join(os.getcwd(), localfile.filename))
+
+ if localfile.unpack:
+ unpack_files.append(localfile.filename)
+
+ # if I am using a cache and a new file has just been retrieved from a
+ # remote location, I need to update the cache as well
+ if cache_folder:
+ log.info("Updating local cache %s..." % cache_folder)
+ try:
+ if not os.path.exists(cache_folder):
+ log.info("Creating cache in %s..." % cache_folder)
+ os.makedirs(cache_folder, 0o0700)
+ shutil.copy(os.path.join(os.getcwd(), localfile.filename),
+ os.path.join(cache_folder, localfile.digest))
+ log.info("Local cache %s updated with %s" % (cache_folder,
+ localfile.filename))
+ touch(os.path.join(cache_folder, localfile.digest))
+ except (OSError, IOError):
+ log.warning('Impossible to add file %s to cache folder %s' %
+ (localfile.filename, cache_folder), exc_info=True)
+ else:
+ failed_files.append(localfile.filename)
+ log.error("'%s'" % filerecord_for_validation.describe())
+ os.remove(temp_file_name)
+
+ # Unpack files that need to be unpacked.
+ for filename in unpack_files:
+ if not unpack_file(filename):
+ failed_files.append(filename)
+
+ # If we failed to fetch or validate a file, we need to fail
+ if len(failed_files) > 0:
+ log.error("The following files failed: '%s'" %
+ "', ".join(failed_files))
+ return False
+ return True
+
+
+def freespace(p):
+ "Returns the number of bytes free under directory `p`"
+ if sys.platform == 'win32': # pragma: no cover
+ # os.statvfs doesn't work on Windows
+ import win32file
+
+ secsPerClus, bytesPerSec, nFreeClus, totClus = win32file.GetDiskFreeSpace(
+ p)
+ return secsPerClus * bytesPerSec * nFreeClus
+ else:
+ r = os.statvfs(p)
+ return r.f_frsize * r.f_bavail
+
+
+def purge(folder, gigs):
+ """If gigs is non 0, it deletes files in `folder` until `gigs` GB are free,
+ starting from older files. If gigs is 0, a full purge will be performed.
+ No recursive deletion of files in subfolder is performed."""
+
+ full_purge = bool(gigs == 0)
+ gigs *= 1024 * 1024 * 1024
+
+ if not full_purge and freespace(folder) >= gigs:
+ log.info("No need to cleanup")
+ return
+
+ files = []
+ for f in os.listdir(folder):
+ p = os.path.join(folder, f)
+ # it deletes files in folder without going into subfolders,
+ # assuming the cache has a flat structure
+ if not os.path.isfile(p):
+ continue
+ mtime = os.path.getmtime(p)
+ files.append((mtime, p))
+
+ # iterate files sorted by mtime
+ for _, f in sorted(files):
+ log.info("removing %s to free up space" % f)
+ try:
+ os.remove(f)
+ except OSError:
+ log.info("Impossible to remove %s" % f, exc_info=True)
+ if not full_purge and freespace(folder) >= gigs:
+ break
+
+
+def _log_api_error(e):
+ if hasattr(e, 'hdrs') and e.hdrs['content-type'] == 'application/json':
+ json_resp = json.load(e.fp)
+ log.error("%s: %s" % (json_resp['error']['name'],
+ json_resp['error']['description']))
+ else:
+ log.exception("Error making RelengAPI request:")
+
+
+def _authorize(req, auth_file):
+ if not auth_file:
+ return
+
+ is_taskcluster_auth = False
+ with open(auth_file) as f:
+ auth_file_content = f.read().strip()
+ try:
+ auth_file_content = json.loads(auth_file_content)
+ is_taskcluster_auth = True
+ except Exception:
+ pass
+
+ if is_taskcluster_auth:
+ taskcluster_header = make_taskcluster_header(auth_file_content, req)
+ log.debug("Using taskcluster credentials in %s" % auth_file)
+ req.add_unredirected_header('Authorization', taskcluster_header)
+ else:
+ log.debug("Using Bearer token in %s" % auth_file)
+ req.add_unredirected_header('Authorization', 'Bearer %s' % auth_file_content)
+
+
+def _send_batch(base_url, auth_file, batch, region):
+ url = urljoin(base_url, 'upload')
+ if region is not None:
+ url += "?region=" + region
+ if PY3:
+ data = to_binary(json.dumps(batch))
+ else:
+ data = json.dumps(batch)
+ req = Request(url, data, {'Content-Type': 'application/json'})
+ _authorize(req, auth_file)
+ try:
+ resp = urllib2.urlopen(req)
+ except (URLError, HTTPError) as e:
+ _log_api_error(e)
+ return None
+ return json.load(resp)['result']
+
+
+def _s3_upload(filename, file):
+ # urllib2 does not support streaming, so we fall back to good old httplib
+ url = urlparse(file['put_url'])
+ cls = HTTPSConnection if url.scheme == 'https' else HTTPConnection
+ host, port = url.netloc.split(':') if ':' in url.netloc else (url.netloc, 443)
+ port = int(port)
+ conn = cls(host, port)
+ try:
+ req_path = "%s?%s" % (url.path, url.query) if url.query else url.path
+ with open(filename, 'rb') as f:
+ content = f.read()
+ content_length = len(content)
+ f.seek(0)
+ conn.request(
+ 'PUT',
+ req_path,
+ f,
+ {
+ 'Content-Type': 'application/octet-stream',
+ 'Content-Length': str(content_length),
+ },
+ )
+ resp = conn.getresponse()
+ resp_body = resp.read()
+ conn.close()
+ if resp.status != 200:
+ raise RuntimeError("Non-200 return from AWS: %s %s\n%s" %
+ (resp.status, resp.reason, resp_body))
+ except Exception:
+ file['upload_exception'] = sys.exc_info()
+ file['upload_ok'] = False
+ else:
+ file['upload_ok'] = True
+
+
+def _notify_upload_complete(base_url, auth_file, file):
+ req = Request(
+ urljoin(
+ base_url,
+ 'upload/complete/%(algorithm)s/%(digest)s' % file))
+ _authorize(req, auth_file)
+ try:
+ urllib2.urlopen(req)
+ except HTTPError as e:
+ if e.code != 409:
+ _log_api_error(e)
+ return
+ # 409 indicates that the upload URL hasn't expired yet and we
+ # should retry after a delay
+ to_wait = int(e.headers.get('X-Retry-After', 60))
+ log.warning("Waiting %d seconds for upload URLs to expire" % to_wait)
+ time.sleep(to_wait)
+ _notify_upload_complete(base_url, auth_file, file)
+ except Exception:
+ log.exception("While notifying server of upload completion:")
+
+
+def upload(manifest, message, base_urls, auth_file, region):
+ try:
+ manifest = open_manifest(manifest)
+ except InvalidManifest:
+ log.exception("failed to load manifest file at '%s'")
+ return False
+
+ # verify the manifest, since we'll need the files present to upload
+ if not manifest.validate():
+ log.error('manifest is invalid')
+ return False
+
+ if any(fr.visibility is None for fr in manifest.file_records):
+ log.error('All files in a manifest for upload must have a visibility set')
+
+ # convert the manifest to an upload batch
+ batch = {
+ 'message': message,
+ 'files': {},
+ }
+ for fr in manifest.file_records:
+ batch['files'][fr.filename] = {
+ 'size': fr.size,
+ 'digest': fr.digest,
+ 'algorithm': fr.algorithm,
+ 'visibility': fr.visibility,
+ }
+
+ # make the upload request
+ resp = _send_batch(base_urls[0], auth_file, batch, region)
+ if not resp:
+ return None
+ files = resp['files']
+
+ # Upload the files, each in a thread. This allows us to start all of the
+ # uploads before any of the URLs expire.
+ threads = {}
+ for filename, file in files.items():
+ if 'put_url' in file:
+ log.info("%s: starting upload" % (filename,))
+ thd = threading.Thread(target=_s3_upload,
+ args=(filename, file))
+ thd.daemon = 1
+ thd.start()
+ threads[filename] = thd
+ else:
+ log.info("%s: already exists on server" % (filename,))
+
+ # re-join all of those threads as they exit
+ success = True
+ while threads:
+ for filename, thread in list(threads.items()):
+ if not thread.is_alive():
+ # _s3_upload has annotated file with result information
+ file = files[filename]
+ thread.join()
+ if file['upload_ok']:
+ log.info("%s: uploaded" % filename)
+ else:
+ log.error("%s: failed" % filename,
+ exc_info=file['upload_exception'])
+ success = False
+ del threads[filename]
+
+ # notify the server that the uploads are completed. If the notification
+ # fails, we don't consider that an error (the server will notice
+ # eventually)
+ for filename, file in files.items():
+ if 'put_url' in file and file['upload_ok']:
+ log.info("notifying server of upload completion for %s" % (filename,))
+ _notify_upload_complete(base_urls[0], auth_file, file)
+
+ return success
+
+
+def process_command(options, args):
+ """ I know how to take a list of program arguments and
+ start doing the right thing with them"""
+ cmd = args[0]
+ cmd_args = args[1:]
+ log.debug("processing '%s' command with args '%s'" %
+ (cmd, '", "'.join(cmd_args)))
+ log.debug("using options: %s" % options)
+
+ if cmd == 'list':
+ return list_manifest(options['manifest'])
+ if cmd == 'validate':
+ return validate_manifest(options['manifest'])
+ elif cmd == 'add':
+ return add_files(options['manifest'], options['algorithm'], cmd_args,
+ options['version'], options['visibility'],
+ options['unpack'])
+ elif cmd == 'purge':
+ if options['cache_folder']:
+ purge(folder=options['cache_folder'], gigs=options['size'])
+ else:
+ log.critical('please specify the cache folder to be purged')
+ return False
+ elif cmd == 'fetch':
+ return fetch_files(
+ options['manifest'],
+ options['base_url'],
+ cmd_args,
+ cache_folder=options['cache_folder'],
+ auth_file=options.get("auth_file"),
+ region=options.get('region'))
+ elif cmd == 'upload':
+ if not options.get('message'):
+ log.critical('upload command requires a message')
+ return False
+ return upload(
+ options.get('manifest'),
+ options.get('message'),
+ options.get('base_url'),
+ options.get('auth_file'),
+ options.get('region'))
+ else:
+ log.critical('command "%s" is not implemented' % cmd)
+ return False
+
+
+def main(argv, _skip_logging=False):
+ # Set up option parsing
+ parser = optparse.OptionParser()
+ parser.add_option('-q', '--quiet', default=logging.INFO,
+ dest='loglevel', action='store_const', const=logging.ERROR)
+ parser.add_option('-v', '--verbose',
+ dest='loglevel', action='store_const', const=logging.DEBUG)
+ parser.add_option('-m', '--manifest', default=DEFAULT_MANIFEST_NAME,
+ dest='manifest', action='store',
+ help='specify the manifest file to be operated on')
+ parser.add_option('-d', '--algorithm', default='sha512',
+ dest='algorithm', action='store',
+ help='hashing algorithm to use (only sha512 is allowed)')
+ parser.add_option('--visibility', default=None,
+ dest='visibility', choices=['internal', 'public'],
+ help='Visibility level of this file; "internal" is for '
+ 'files that cannot be distributed out of the company '
+ 'but not for secrets; "public" files are available to '
+ 'anyone without restriction')
+ parser.add_option('--unpack', default=False,
+ dest='unpack', action='store_true',
+ help='Request unpacking this file after fetch.'
+ ' This is helpful with tarballs.')
+ parser.add_option('--version', default=None,
+ dest='version', action='store',
+ help='Version string for this file. This annotates the '
+ 'manifest entry with a version string to help '
+ 'identify the contents.')
+ parser.add_option('-o', '--overwrite', default=False,
+ dest='overwrite', action='store_true',
+ help='UNUSED; present for backward compatibility')
+ parser.add_option('--url', dest='base_url', action='append',
+ help='RelengAPI URL ending with /tooltool/; default '
+ 'is appropriate for Mozilla')
+ parser.add_option('-c', '--cache-folder', dest='cache_folder',
+ help='Local cache folder')
+ parser.add_option('-s', '--size',
+ help='free space required (in GB)', dest='size',
+ type='float', default=0.)
+ parser.add_option('-r', '--region', help='Preferred AWS region for upload or fetch; '
+ 'example: --region=us-west-2')
+ parser.add_option('--message',
+ help='The "commit message" for an upload; format with a bug number '
+ 'and brief comment',
+ dest='message')
+ parser.add_option('--authentication-file',
+ help='Use the RelengAPI token found in the given file to '
+ 'authenticate to the RelengAPI server.',
+ dest='auth_file')
+
+ (options_obj, args) = parser.parse_args(argv[1:])
+
+ # default the options list if not provided
+ if not options_obj.base_url:
+ options_obj.base_url = ['https://tooltool.mozilla-releng.net/']
+
+ # ensure all URLs have a trailing slash
+ def add_slash(url):
+ return url if url.endswith('/') else (url + '/')
+ options_obj.base_url = [add_slash(u) for u in options_obj.base_url]
+
+ # expand ~ in --authentication-file
+ if options_obj.auth_file:
+ options_obj.auth_file = os.path.expanduser(options_obj.auth_file)
+
+ # Dictionaries are easier to work with
+ options = vars(options_obj)
+
+ log.setLevel(options['loglevel'])
+
+ # Set up logging, for now just to the console
+ if not _skip_logging: # pragma: no cover
+ ch = logging.StreamHandler()
+ cf = logging.Formatter("%(levelname)s - %(message)s")
+ ch.setFormatter(cf)
+ log.addHandler(ch)
+
+ if options['algorithm'] != 'sha512':
+ parser.error('only --algorithm sha512 is supported')
+
+ if len(args) < 1:
+ parser.error('You must specify a command')
+
+ return 0 if process_command(options, args) else 1
+
+
+if __name__ == "__main__": # pragma: no cover
+ sys.exit(main(sys.argv))
diff --git a/testing/web-platform/tests/tools/third_party/webencodings/PKG-INFO b/testing/web-platform/tests/tools/third_party/webencodings/PKG-INFO
new file mode 100644
index 0000000000..2a827bbad9
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/webencodings/PKG-INFO
@@ -0,0 +1,50 @@
+Metadata-Version: 1.1
+Name: webencodings
+Version: 0.5.1
+Summary: Character encoding aliases for legacy web content
+Home-page: https://github.com/SimonSapin/python-webencodings
+Author: Geoffrey Sneddon
+Author-email: me@gsnedders.com
+License: BSD
+Description: python-webencodings
+ ===================
+
+ This is a Python implementation of the `WHATWG Encoding standard
+ <http://encoding.spec.whatwg.org/>`_.
+
+ * Latest documentation: http://packages.python.org/webencodings/
+ * Source code and issue tracker:
+ https://github.com/gsnedders/python-webencodings
+ * PyPI releases: http://pypi.python.org/pypi/webencodings
+ * License: BSD
+ * Python 2.6+ and 3.3+
+
+ In order to be compatible with legacy web content
+ when interpreting something like ``Content-Type: text/html; charset=latin1``,
+ tools need to use a particular set of aliases for encoding labels
+ as well as some overriding rules.
+ For example, ``US-ASCII`` and ``iso-8859-1`` on the web are actually
+ aliases for ``windows-1252``, and an UTF-8 or UTF-16 BOM takes precedence
+ over any other encoding declaration.
+ The Encoding standard defines all such details so that implementations do
+ not have to reverse-engineer each other.
+
+ This module has encoding labels and BOM detection,
+ but the actual implementation for encoders and decoders is Python’s.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP
diff --git a/testing/web-platform/tests/tools/third_party/webencodings/README.rst b/testing/web-platform/tests/tools/third_party/webencodings/README.rst
new file mode 100644
index 0000000000..c7e0f0cc3e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/webencodings/README.rst
@@ -0,0 +1,25 @@
+python-webencodings
+===================
+
+This is a Python implementation of the `WHATWG Encoding standard
+<http://encoding.spec.whatwg.org/>`_.
+
+* Latest documentation: http://packages.python.org/webencodings/
+* Source code and issue tracker:
+ https://github.com/gsnedders/python-webencodings
+* PyPI releases: http://pypi.python.org/pypi/webencodings
+* License: BSD
+* Python 2.6+ and 3.3+
+
+In order to be compatible with legacy web content
+when interpreting something like ``Content-Type: text/html; charset=latin1``,
+tools need to use a particular set of aliases for encoding labels
+as well as some overriding rules.
+For example, ``US-ASCII`` and ``iso-8859-1`` on the web are actually
+aliases for ``windows-1252``, and an UTF-8 or UTF-16 BOM takes precedence
+over any other encoding declaration.
+The Encoding standard defines all such details so that implementations do
+not have to reverse-engineer each other.
+
+This module has encoding labels and BOM detection,
+but the actual implementation for encoders and decoders is Python’s.
diff --git a/testing/web-platform/tests/tools/third_party/webencodings/setup.cfg b/testing/web-platform/tests/tools/third_party/webencodings/setup.cfg
new file mode 100644
index 0000000000..460b0b4057
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/webencodings/setup.cfg
@@ -0,0 +1,14 @@
+[bdist_wheel]
+universal = 1
+
+[build_sphinx]
+source-dir = docs
+build-dir = docs/_build
+
+[upload_sphinx]
+upload-dir = docs/_build/html
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/testing/web-platform/tests/tools/third_party/webencodings/setup.py b/testing/web-platform/tests/tools/third_party/webencodings/setup.py
new file mode 100644
index 0000000000..cf341cfd47
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/webencodings/setup.py
@@ -0,0 +1,47 @@
+from setuptools import setup, find_packages
+import io
+from os import path
+import re
+
+
+VERSION = re.search("VERSION = '([^']+)'", io.open(
+ path.join(path.dirname(__file__), 'webencodings', '__init__.py'),
+ encoding='utf-8'
+).read().strip()).group(1)
+
+LONG_DESCRIPTION = io.open(
+ path.join(path.dirname(__file__), 'README.rst'),
+ encoding='utf-8'
+).read()
+
+
+setup(
+ name='webencodings',
+ version=VERSION,
+ url='https://github.com/SimonSapin/python-webencodings',
+ license='BSD',
+ author='Simon Sapin',
+ author_email='simon.sapin@exyr.org',
+ maintainer='Geoffrey Sneddon',
+ maintainer_email='me@gsnedders.com',
+ description='Character encoding aliases for legacy web content',
+ long_description=LONG_DESCRIPTION,
+ classifiers=[
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: BSD License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+ 'Topic :: Internet :: WWW/HTTP',
+ ],
+ packages=find_packages(),
+)
diff --git a/testing/web-platform/tests/tools/third_party/webencodings/webencodings/__init__.py b/testing/web-platform/tests/tools/third_party/webencodings/webencodings/__init__.py
new file mode 100644
index 0000000000..d21d697c88
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/webencodings/webencodings/__init__.py
@@ -0,0 +1,342 @@
+# coding: utf-8
+"""
+
+ webencodings
+ ~~~~~~~~~~~~
+
+ This is a Python implementation of the `WHATWG Encoding standard
+ <http://encoding.spec.whatwg.org/>`. See README for details.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+from __future__ import unicode_literals
+
+import codecs
+
+from .labels import LABELS
+
+
+VERSION = '0.5.1'
+
+
+# Some names in Encoding are not valid Python aliases. Remap these.
+PYTHON_NAMES = {
+ 'iso-8859-8-i': 'iso-8859-8',
+ 'x-mac-cyrillic': 'mac-cyrillic',
+ 'macintosh': 'mac-roman',
+ 'windows-874': 'cp874'}
+
+CACHE = {}
+
+
+def ascii_lower(string):
+ r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.
+
+ :param string: An Unicode string.
+ :returns: A new Unicode string.
+
+ This is used for `ASCII case-insensitive
+ <http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_
+ matching of encoding labels.
+ The same matching is also used, among other things,
+ for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_.
+
+ This is different from the :meth:`~py:str.lower` method of Unicode strings
+ which also affect non-ASCII characters,
+ sometimes mapping them into the ASCII range:
+
+ >>> keyword = u'Bac\N{KELVIN SIGN}ground'
+ >>> assert keyword.lower() == u'background'
+ >>> assert ascii_lower(keyword) != keyword.lower()
+ >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'
+
+ """
+ # This turns out to be faster than unicode.translate()
+ return string.encode('utf8').lower().decode('utf8')
+
+
+def lookup(label):
+ """
+ Look for an encoding by its label.
+ This is the spec’s `get an encoding
+ <http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm.
+ Supported labels are listed there.
+
+ :param label: A string.
+ :returns:
+ An :class:`Encoding` object, or :obj:`None` for an unknown label.
+
+ """
+ # Only strip ASCII whitespace: U+0009, U+000A, U+000C, U+000D, and U+0020.
+ label = ascii_lower(label.strip('\t\n\f\r '))
+ name = LABELS.get(label)
+ if name is None:
+ return None
+ encoding = CACHE.get(name)
+ if encoding is None:
+ if name == 'x-user-defined':
+ from .x_user_defined import codec_info
+ else:
+ python_name = PYTHON_NAMES.get(name, name)
+ # Any python_name value that gets to here should be valid.
+ codec_info = codecs.lookup(python_name)
+ encoding = Encoding(name, codec_info)
+ CACHE[name] = encoding
+ return encoding
+
+
+def _get_encoding(encoding_or_label):
+ """
+ Accept either an encoding object or label.
+
+ :param encoding: An :class:`Encoding` object or a label string.
+ :returns: An :class:`Encoding` object.
+ :raises: :exc:`~exceptions.LookupError` for an unknown label.
+
+ """
+ if hasattr(encoding_or_label, 'codec_info'):
+ return encoding_or_label
+
+ encoding = lookup(encoding_or_label)
+ if encoding is None:
+ raise LookupError('Unknown encoding label: %r' % encoding_or_label)
+ return encoding
+
+
+class Encoding(object):
+ """Reresents a character encoding such as UTF-8,
+ that can be used for decoding or encoding.
+
+ .. attribute:: name
+
+ Canonical name of the encoding
+
+ .. attribute:: codec_info
+
+ The actual implementation of the encoding,
+ a stdlib :class:`~codecs.CodecInfo` object.
+ See :func:`codecs.register`.
+
+ """
+ def __init__(self, name, codec_info):
+ self.name = name
+ self.codec_info = codec_info
+
+ def __repr__(self):
+ return '<Encoding %s>' % self.name
+
+
+#: The UTF-8 encoding. Should be used for new content and formats.
+UTF8 = lookup('utf-8')
+
+_UTF16LE = lookup('utf-16le')
+_UTF16BE = lookup('utf-16be')
+
+
+def decode(input, fallback_encoding, errors='replace'):
+ """
+ Decode a single string.
+
+ :param input: A byte string
+ :param fallback_encoding:
+ An :class:`Encoding` object or a label string.
+ The encoding to use if :obj:`input` does note have a BOM.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+ :return:
+ A ``(output, encoding)`` tuple of an Unicode string
+ and an :obj:`Encoding`.
+
+ """
+ # Fail early if `encoding` is an invalid label.
+ fallback_encoding = _get_encoding(fallback_encoding)
+ bom_encoding, input = _detect_bom(input)
+ encoding = bom_encoding or fallback_encoding
+ return encoding.codec_info.decode(input, errors)[0], encoding
+
+
+def _detect_bom(input):
+ """Return (bom_encoding, input), with any BOM removed from the input."""
+ if input.startswith(b'\xFF\xFE'):
+ return _UTF16LE, input[2:]
+ if input.startswith(b'\xFE\xFF'):
+ return _UTF16BE, input[2:]
+ if input.startswith(b'\xEF\xBB\xBF'):
+ return UTF8, input[3:]
+ return None, input
+
+
+def encode(input, encoding=UTF8, errors='strict'):
+ """
+ Encode a single string.
+
+ :param input: An Unicode string.
+ :param encoding: An :class:`Encoding` object or a label string.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+ :return: A byte string.
+
+ """
+ return _get_encoding(encoding).codec_info.encode(input, errors)[0]
+
+
+def iter_decode(input, fallback_encoding, errors='replace'):
+ """
+ "Pull"-based decoder.
+
+ :param input:
+ An iterable of byte strings.
+
+ The input is first consumed just enough to determine the encoding
+ based on the precense of a BOM,
+ then consumed on demand when the return value is.
+ :param fallback_encoding:
+ An :class:`Encoding` object or a label string.
+ The encoding to use if :obj:`input` does note have a BOM.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+ :returns:
+ An ``(output, encoding)`` tuple.
+ :obj:`output` is an iterable of Unicode strings,
+ :obj:`encoding` is the :obj:`Encoding` that is being used.
+
+ """
+
+ decoder = IncrementalDecoder(fallback_encoding, errors)
+ generator = _iter_decode_generator(input, decoder)
+ encoding = next(generator)
+ return generator, encoding
+
+
+def _iter_decode_generator(input, decoder):
+ """Return a generator that first yields the :obj:`Encoding`,
+ then yields output chukns as Unicode strings.
+
+ """
+ decode = decoder.decode
+ input = iter(input)
+ for chunck in input:
+ output = decode(chunck)
+ if output:
+ assert decoder.encoding is not None
+ yield decoder.encoding
+ yield output
+ break
+ else:
+ # Input exhausted without determining the encoding
+ output = decode(b'', final=True)
+ assert decoder.encoding is not None
+ yield decoder.encoding
+ if output:
+ yield output
+ return
+
+ for chunck in input:
+ output = decode(chunck)
+ if output:
+ yield output
+ output = decode(b'', final=True)
+ if output:
+ yield output
+
+
+def iter_encode(input, encoding=UTF8, errors='strict'):
+ """
+ “Pullâ€-based encoder.
+
+ :param input: An iterable of Unicode strings.
+ :param encoding: An :class:`Encoding` object or a label string.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+ :returns: An iterable of byte strings.
+
+ """
+ # Fail early if `encoding` is an invalid label.
+ encode = IncrementalEncoder(encoding, errors).encode
+ return _iter_encode_generator(input, encode)
+
+
+def _iter_encode_generator(input, encode):
+ for chunck in input:
+ output = encode(chunck)
+ if output:
+ yield output
+ output = encode('', final=True)
+ if output:
+ yield output
+
+
+class IncrementalDecoder(object):
+ """
+ “Pushâ€-based decoder.
+
+ :param fallback_encoding:
+ An :class:`Encoding` object or a label string.
+ The encoding to use if :obj:`input` does note have a BOM.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+
+ """
+ def __init__(self, fallback_encoding, errors='replace'):
+ # Fail early if `encoding` is an invalid label.
+ self._fallback_encoding = _get_encoding(fallback_encoding)
+ self._errors = errors
+ self._buffer = b''
+ self._decoder = None
+ #: The actual :class:`Encoding` that is being used,
+ #: or :obj:`None` if that is not determined yet.
+ #: (Ie. if there is not enough input yet to determine
+ #: if there is a BOM.)
+ self.encoding = None # Not known yet.
+
+ def decode(self, input, final=False):
+ """Decode one chunk of the input.
+
+ :param input: A byte string.
+ :param final:
+ Indicate that no more input is available.
+ Must be :obj:`True` if this is the last call.
+ :returns: An Unicode string.
+
+ """
+ decoder = self._decoder
+ if decoder is not None:
+ return decoder(input, final)
+
+ input = self._buffer + input
+ encoding, input = _detect_bom(input)
+ if encoding is None:
+ if len(input) < 3 and not final: # Not enough data yet.
+ self._buffer = input
+ return ''
+ else: # No BOM
+ encoding = self._fallback_encoding
+ decoder = encoding.codec_info.incrementaldecoder(self._errors).decode
+ self._decoder = decoder
+ self.encoding = encoding
+ return decoder(input, final)
+
+
+class IncrementalEncoder(object):
+ """
+ “Pushâ€-based encoder.
+
+ :param encoding: An :class:`Encoding` object or a label string.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+
+ .. method:: encode(input, final=False)
+
+ :param input: An Unicode string.
+ :param final:
+ Indicate that no more input is available.
+ Must be :obj:`True` if this is the last call.
+ :returns: A byte string.
+
+ """
+ def __init__(self, encoding=UTF8, errors='strict'):
+ encoding = _get_encoding(encoding)
+ self.encode = encoding.codec_info.incrementalencoder(errors).encode
diff --git a/testing/web-platform/tests/tools/third_party/webencodings/webencodings/labels.py b/testing/web-platform/tests/tools/third_party/webencodings/webencodings/labels.py
new file mode 100644
index 0000000000..29cbf91ef7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/webencodings/webencodings/labels.py
@@ -0,0 +1,231 @@
+"""
+
+ webencodings.labels
+ ~~~~~~~~~~~~~~~~~~~
+
+ Map encoding labels to their name.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+# XXX Do not edit!
+# This file is automatically generated by mklabels.py
+
+LABELS = {
+ 'unicode-1-1-utf-8': 'utf-8',
+ 'utf-8': 'utf-8',
+ 'utf8': 'utf-8',
+ '866': 'ibm866',
+ 'cp866': 'ibm866',
+ 'csibm866': 'ibm866',
+ 'ibm866': 'ibm866',
+ 'csisolatin2': 'iso-8859-2',
+ 'iso-8859-2': 'iso-8859-2',
+ 'iso-ir-101': 'iso-8859-2',
+ 'iso8859-2': 'iso-8859-2',
+ 'iso88592': 'iso-8859-2',
+ 'iso_8859-2': 'iso-8859-2',
+ 'iso_8859-2:1987': 'iso-8859-2',
+ 'l2': 'iso-8859-2',
+ 'latin2': 'iso-8859-2',
+ 'csisolatin3': 'iso-8859-3',
+ 'iso-8859-3': 'iso-8859-3',
+ 'iso-ir-109': 'iso-8859-3',
+ 'iso8859-3': 'iso-8859-3',
+ 'iso88593': 'iso-8859-3',
+ 'iso_8859-3': 'iso-8859-3',
+ 'iso_8859-3:1988': 'iso-8859-3',
+ 'l3': 'iso-8859-3',
+ 'latin3': 'iso-8859-3',
+ 'csisolatin4': 'iso-8859-4',
+ 'iso-8859-4': 'iso-8859-4',
+ 'iso-ir-110': 'iso-8859-4',
+ 'iso8859-4': 'iso-8859-4',
+ 'iso88594': 'iso-8859-4',
+ 'iso_8859-4': 'iso-8859-4',
+ 'iso_8859-4:1988': 'iso-8859-4',
+ 'l4': 'iso-8859-4',
+ 'latin4': 'iso-8859-4',
+ 'csisolatincyrillic': 'iso-8859-5',
+ 'cyrillic': 'iso-8859-5',
+ 'iso-8859-5': 'iso-8859-5',
+ 'iso-ir-144': 'iso-8859-5',
+ 'iso8859-5': 'iso-8859-5',
+ 'iso88595': 'iso-8859-5',
+ 'iso_8859-5': 'iso-8859-5',
+ 'iso_8859-5:1988': 'iso-8859-5',
+ 'arabic': 'iso-8859-6',
+ 'asmo-708': 'iso-8859-6',
+ 'csiso88596e': 'iso-8859-6',
+ 'csiso88596i': 'iso-8859-6',
+ 'csisolatinarabic': 'iso-8859-6',
+ 'ecma-114': 'iso-8859-6',
+ 'iso-8859-6': 'iso-8859-6',
+ 'iso-8859-6-e': 'iso-8859-6',
+ 'iso-8859-6-i': 'iso-8859-6',
+ 'iso-ir-127': 'iso-8859-6',
+ 'iso8859-6': 'iso-8859-6',
+ 'iso88596': 'iso-8859-6',
+ 'iso_8859-6': 'iso-8859-6',
+ 'iso_8859-6:1987': 'iso-8859-6',
+ 'csisolatingreek': 'iso-8859-7',
+ 'ecma-118': 'iso-8859-7',
+ 'elot_928': 'iso-8859-7',
+ 'greek': 'iso-8859-7',
+ 'greek8': 'iso-8859-7',
+ 'iso-8859-7': 'iso-8859-7',
+ 'iso-ir-126': 'iso-8859-7',
+ 'iso8859-7': 'iso-8859-7',
+ 'iso88597': 'iso-8859-7',
+ 'iso_8859-7': 'iso-8859-7',
+ 'iso_8859-7:1987': 'iso-8859-7',
+ 'sun_eu_greek': 'iso-8859-7',
+ 'csiso88598e': 'iso-8859-8',
+ 'csisolatinhebrew': 'iso-8859-8',
+ 'hebrew': 'iso-8859-8',
+ 'iso-8859-8': 'iso-8859-8',
+ 'iso-8859-8-e': 'iso-8859-8',
+ 'iso-ir-138': 'iso-8859-8',
+ 'iso8859-8': 'iso-8859-8',
+ 'iso88598': 'iso-8859-8',
+ 'iso_8859-8': 'iso-8859-8',
+ 'iso_8859-8:1988': 'iso-8859-8',
+ 'visual': 'iso-8859-8',
+ 'csiso88598i': 'iso-8859-8-i',
+ 'iso-8859-8-i': 'iso-8859-8-i',
+ 'logical': 'iso-8859-8-i',
+ 'csisolatin6': 'iso-8859-10',
+ 'iso-8859-10': 'iso-8859-10',
+ 'iso-ir-157': 'iso-8859-10',
+ 'iso8859-10': 'iso-8859-10',
+ 'iso885910': 'iso-8859-10',
+ 'l6': 'iso-8859-10',
+ 'latin6': 'iso-8859-10',
+ 'iso-8859-13': 'iso-8859-13',
+ 'iso8859-13': 'iso-8859-13',
+ 'iso885913': 'iso-8859-13',
+ 'iso-8859-14': 'iso-8859-14',
+ 'iso8859-14': 'iso-8859-14',
+ 'iso885914': 'iso-8859-14',
+ 'csisolatin9': 'iso-8859-15',
+ 'iso-8859-15': 'iso-8859-15',
+ 'iso8859-15': 'iso-8859-15',
+ 'iso885915': 'iso-8859-15',
+ 'iso_8859-15': 'iso-8859-15',
+ 'l9': 'iso-8859-15',
+ 'iso-8859-16': 'iso-8859-16',
+ 'cskoi8r': 'koi8-r',
+ 'koi': 'koi8-r',
+ 'koi8': 'koi8-r',
+ 'koi8-r': 'koi8-r',
+ 'koi8_r': 'koi8-r',
+ 'koi8-u': 'koi8-u',
+ 'csmacintosh': 'macintosh',
+ 'mac': 'macintosh',
+ 'macintosh': 'macintosh',
+ 'x-mac-roman': 'macintosh',
+ 'dos-874': 'windows-874',
+ 'iso-8859-11': 'windows-874',
+ 'iso8859-11': 'windows-874',
+ 'iso885911': 'windows-874',
+ 'tis-620': 'windows-874',
+ 'windows-874': 'windows-874',
+ 'cp1250': 'windows-1250',
+ 'windows-1250': 'windows-1250',
+ 'x-cp1250': 'windows-1250',
+ 'cp1251': 'windows-1251',
+ 'windows-1251': 'windows-1251',
+ 'x-cp1251': 'windows-1251',
+ 'ansi_x3.4-1968': 'windows-1252',
+ 'ascii': 'windows-1252',
+ 'cp1252': 'windows-1252',
+ 'cp819': 'windows-1252',
+ 'csisolatin1': 'windows-1252',
+ 'ibm819': 'windows-1252',
+ 'iso-8859-1': 'windows-1252',
+ 'iso-ir-100': 'windows-1252',
+ 'iso8859-1': 'windows-1252',
+ 'iso88591': 'windows-1252',
+ 'iso_8859-1': 'windows-1252',
+ 'iso_8859-1:1987': 'windows-1252',
+ 'l1': 'windows-1252',
+ 'latin1': 'windows-1252',
+ 'us-ascii': 'windows-1252',
+ 'windows-1252': 'windows-1252',
+ 'x-cp1252': 'windows-1252',
+ 'cp1253': 'windows-1253',
+ 'windows-1253': 'windows-1253',
+ 'x-cp1253': 'windows-1253',
+ 'cp1254': 'windows-1254',
+ 'csisolatin5': 'windows-1254',
+ 'iso-8859-9': 'windows-1254',
+ 'iso-ir-148': 'windows-1254',
+ 'iso8859-9': 'windows-1254',
+ 'iso88599': 'windows-1254',
+ 'iso_8859-9': 'windows-1254',
+ 'iso_8859-9:1989': 'windows-1254',
+ 'l5': 'windows-1254',
+ 'latin5': 'windows-1254',
+ 'windows-1254': 'windows-1254',
+ 'x-cp1254': 'windows-1254',
+ 'cp1255': 'windows-1255',
+ 'windows-1255': 'windows-1255',
+ 'x-cp1255': 'windows-1255',
+ 'cp1256': 'windows-1256',
+ 'windows-1256': 'windows-1256',
+ 'x-cp1256': 'windows-1256',
+ 'cp1257': 'windows-1257',
+ 'windows-1257': 'windows-1257',
+ 'x-cp1257': 'windows-1257',
+ 'cp1258': 'windows-1258',
+ 'windows-1258': 'windows-1258',
+ 'x-cp1258': 'windows-1258',
+ 'x-mac-cyrillic': 'x-mac-cyrillic',
+ 'x-mac-ukrainian': 'x-mac-cyrillic',
+ 'chinese': 'gbk',
+ 'csgb2312': 'gbk',
+ 'csiso58gb231280': 'gbk',
+ 'gb2312': 'gbk',
+ 'gb_2312': 'gbk',
+ 'gb_2312-80': 'gbk',
+ 'gbk': 'gbk',
+ 'iso-ir-58': 'gbk',
+ 'x-gbk': 'gbk',
+ 'gb18030': 'gb18030',
+ 'hz-gb-2312': 'hz-gb-2312',
+ 'big5': 'big5',
+ 'big5-hkscs': 'big5',
+ 'cn-big5': 'big5',
+ 'csbig5': 'big5',
+ 'x-x-big5': 'big5',
+ 'cseucpkdfmtjapanese': 'euc-jp',
+ 'euc-jp': 'euc-jp',
+ 'x-euc-jp': 'euc-jp',
+ 'csiso2022jp': 'iso-2022-jp',
+ 'iso-2022-jp': 'iso-2022-jp',
+ 'csshiftjis': 'shift_jis',
+ 'ms_kanji': 'shift_jis',
+ 'shift-jis': 'shift_jis',
+ 'shift_jis': 'shift_jis',
+ 'sjis': 'shift_jis',
+ 'windows-31j': 'shift_jis',
+ 'x-sjis': 'shift_jis',
+ 'cseuckr': 'euc-kr',
+ 'csksc56011987': 'euc-kr',
+ 'euc-kr': 'euc-kr',
+ 'iso-ir-149': 'euc-kr',
+ 'korean': 'euc-kr',
+ 'ks_c_5601-1987': 'euc-kr',
+ 'ks_c_5601-1989': 'euc-kr',
+ 'ksc5601': 'euc-kr',
+ 'ksc_5601': 'euc-kr',
+ 'windows-949': 'euc-kr',
+ 'csiso2022kr': 'iso-2022-kr',
+ 'iso-2022-kr': 'iso-2022-kr',
+ 'utf-16be': 'utf-16be',
+ 'utf-16': 'utf-16le',
+ 'utf-16le': 'utf-16le',
+ 'x-user-defined': 'x-user-defined',
+}
diff --git a/testing/web-platform/tests/tools/third_party/webencodings/webencodings/mklabels.py b/testing/web-platform/tests/tools/third_party/webencodings/webencodings/mklabels.py
new file mode 100644
index 0000000000..295dc928ba
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/webencodings/webencodings/mklabels.py
@@ -0,0 +1,59 @@
+"""
+
+ webencodings.mklabels
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Regenarate the webencodings.labels module.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+import json
+try:
+ from urllib import urlopen
+except ImportError:
+ from urllib.request import urlopen
+
+
+def assert_lower(string):
+ assert string == string.lower()
+ return string
+
+
+def generate(url):
+ parts = ['''\
+"""
+
+ webencodings.labels
+ ~~~~~~~~~~~~~~~~~~~
+
+ Map encoding labels to their name.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+# XXX Do not edit!
+# This file is automatically generated by mklabels.py
+
+LABELS = {
+''']
+ labels = [
+ (repr(assert_lower(label)).lstrip('u'),
+ repr(encoding['name']).lstrip('u'))
+ for category in json.loads(urlopen(url).read().decode('ascii'))
+ for encoding in category['encodings']
+ for label in encoding['labels']]
+ max_len = max(len(label) for label, name in labels)
+ parts.extend(
+ ' %s:%s %s,\n' % (label, ' ' * (max_len - len(label)), name)
+ for label, name in labels)
+ parts.append('}')
+ return ''.join(parts)
+
+
+if __name__ == '__main__':
+ print(generate('http://encoding.spec.whatwg.org/encodings.json'))
diff --git a/testing/web-platform/tests/tools/third_party/webencodings/webencodings/tests.py b/testing/web-platform/tests/tools/third_party/webencodings/webencodings/tests.py
new file mode 100644
index 0000000000..e12c10d033
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/webencodings/webencodings/tests.py
@@ -0,0 +1,153 @@
+# coding: utf-8
+"""
+
+ webencodings.tests
+ ~~~~~~~~~~~~~~~~~~
+
+ A basic test suite for Encoding.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+from __future__ import unicode_literals
+
+from . import (lookup, LABELS, decode, encode, iter_decode, iter_encode,
+ IncrementalDecoder, IncrementalEncoder, UTF8)
+
+
+def assert_raises(exception, function, *args, **kwargs):
+ try:
+ function(*args, **kwargs)
+ except exception:
+ return
+ else: # pragma: no cover
+ raise AssertionError('Did not raise %s.' % exception)
+
+
+def test_labels():
+ assert lookup('utf-8').name == 'utf-8'
+ assert lookup('Utf-8').name == 'utf-8'
+ assert lookup('UTF-8').name == 'utf-8'
+ assert lookup('utf8').name == 'utf-8'
+ assert lookup('utf8').name == 'utf-8'
+ assert lookup('utf8 ').name == 'utf-8'
+ assert lookup(' \r\nutf8\t').name == 'utf-8'
+ assert lookup('u8') is None # Python label.
+ assert lookup('utf-8 ') is None # Non-ASCII white space.
+
+ assert lookup('US-ASCII').name == 'windows-1252'
+ assert lookup('iso-8859-1').name == 'windows-1252'
+ assert lookup('latin1').name == 'windows-1252'
+ assert lookup('LATIN1').name == 'windows-1252'
+ assert lookup('latin-1') is None
+ assert lookup('LATÄ°N1') is None # ASCII-only case insensitivity.
+
+
+def test_all_labels():
+ for label in LABELS:
+ assert decode(b'', label) == ('', lookup(label))
+ assert encode('', label) == b''
+ for repeat in [0, 1, 12]:
+ output, _ = iter_decode([b''] * repeat, label)
+ assert list(output) == []
+ assert list(iter_encode([''] * repeat, label)) == []
+ decoder = IncrementalDecoder(label)
+ assert decoder.decode(b'') == ''
+ assert decoder.decode(b'', final=True) == ''
+ encoder = IncrementalEncoder(label)
+ assert encoder.encode('') == b''
+ assert encoder.encode('', final=True) == b''
+ # All encoding names are valid labels too:
+ for name in set(LABELS.values()):
+ assert lookup(name).name == name
+
+
+def test_invalid_label():
+ assert_raises(LookupError, decode, b'\xEF\xBB\xBF\xc3\xa9', 'invalid')
+ assert_raises(LookupError, encode, 'é', 'invalid')
+ assert_raises(LookupError, iter_decode, [], 'invalid')
+ assert_raises(LookupError, iter_encode, [], 'invalid')
+ assert_raises(LookupError, IncrementalDecoder, 'invalid')
+ assert_raises(LookupError, IncrementalEncoder, 'invalid')
+
+
+def test_decode():
+ assert decode(b'\x80', 'latin1') == ('€', lookup('latin1'))
+ assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1'))
+ assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8'))
+ assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8'))
+ assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii'))
+ assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == ('é', lookup('utf8')) # UTF-8 with BOM
+
+ assert decode(b'\xFE\xFF\x00\xe9', 'ascii') == ('é', lookup('utf-16be')) # UTF-16-BE with BOM
+ assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le')) # UTF-16-LE with BOM
+ assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be'))
+ assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == ('\ue900', lookup('utf-16le'))
+
+ assert decode(b'\x00\xe9', 'UTF-16BE') == ('é', lookup('utf-16be'))
+ assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le'))
+ assert decode(b'\xe9\x00', 'UTF-16') == ('é', lookup('utf-16le'))
+
+ assert decode(b'\xe9\x00', 'UTF-16BE') == ('\ue900', lookup('utf-16be'))
+ assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le'))
+ assert decode(b'\x00\xe9', 'UTF-16') == ('\ue900', lookup('utf-16le'))
+
+
+def test_encode():
+ assert encode('é', 'latin1') == b'\xe9'
+ assert encode('é', 'utf8') == b'\xc3\xa9'
+ assert encode('é', 'utf8') == b'\xc3\xa9'
+ assert encode('é', 'utf-16') == b'\xe9\x00'
+ assert encode('é', 'utf-16le') == b'\xe9\x00'
+ assert encode('é', 'utf-16be') == b'\x00\xe9'
+
+
+def test_iter_decode():
+ def iter_decode_to_string(input, fallback_encoding):
+ output, _encoding = iter_decode(input, fallback_encoding)
+ return ''.join(output)
+ assert iter_decode_to_string([], 'latin1') == ''
+ assert iter_decode_to_string([b''], 'latin1') == ''
+ assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é'
+ assert iter_decode_to_string([b'hello'], 'latin1') == 'hello'
+ assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello'
+ assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello'
+ assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é'
+ assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é'
+ assert iter_decode_to_string([
+ b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é'
+ assert iter_decode_to_string([
+ b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD'
+ assert iter_decode_to_string([
+ b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é'
+ assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == ''
+ assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»'
+ assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é'
+ assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é'
+ assert iter_decode_to_string([
+ b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é'
+ assert iter_decode_to_string([
+ b'', b'h\xe9', b'llo'], 'x-user-defined') == 'h\uF7E9llo'
+
+
+def test_iter_encode():
+ assert b''.join(iter_encode([], 'latin1')) == b''
+ assert b''.join(iter_encode([''], 'latin1')) == b''
+ assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9'
+ assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9'
+ assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00'
+ assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00'
+ assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9'
+ assert b''.join(iter_encode([
+ '', 'h\uF7E9', '', 'llo'], 'x-user-defined')) == b'h\xe9llo'
+
+
+def test_x_user_defined():
+ encoded = b'2,\x0c\x0b\x1aO\xd9#\xcb\x0f\xc9\xbbt\xcf\xa8\xca'
+ decoded = '2,\x0c\x0b\x1aO\uf7d9#\uf7cb\x0f\uf7c9\uf7bbt\uf7cf\uf7a8\uf7ca'
+ encoded = b'aa'
+ decoded = 'aa'
+ assert decode(encoded, 'x-user-defined') == (decoded, lookup('x-user-defined'))
+ assert encode(decoded, 'x-user-defined') == encoded
diff --git a/testing/web-platform/tests/tools/third_party/webencodings/webencodings/x_user_defined.py b/testing/web-platform/tests/tools/third_party/webencodings/webencodings/x_user_defined.py
new file mode 100644
index 0000000000..d16e326024
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/webencodings/webencodings/x_user_defined.py
@@ -0,0 +1,325 @@
+# coding: utf-8
+"""
+
+ webencodings.x_user_defined
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ An implementation of the x-user-defined encoding.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+from __future__ import unicode_literals
+
+import codecs
+
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+ def encode(self, input, errors='strict'):
+ return codecs.charmap_encode(input, errors, encoding_table)
+
+ def decode(self, input, errors='strict'):
+ return codecs.charmap_decode(input, errors, decoding_table)
+
+
+class IncrementalEncoder(codecs.IncrementalEncoder):
+ def encode(self, input, final=False):
+ return codecs.charmap_encode(input, self.errors, encoding_table)[0]
+
+
+class IncrementalDecoder(codecs.IncrementalDecoder):
+ def decode(self, input, final=False):
+ return codecs.charmap_decode(input, self.errors, decoding_table)[0]
+
+
+class StreamWriter(Codec, codecs.StreamWriter):
+ pass
+
+
+class StreamReader(Codec, codecs.StreamReader):
+ pass
+
+
+### encodings module API
+
+codec_info = codecs.CodecInfo(
+ name='x-user-defined',
+ encode=Codec().encode,
+ decode=Codec().decode,
+ incrementalencoder=IncrementalEncoder,
+ incrementaldecoder=IncrementalDecoder,
+ streamreader=StreamReader,
+ streamwriter=StreamWriter,
+)
+
+
+### Decoding Table
+
+# Python 3:
+# for c in range(256): print(' %r' % chr(c if c < 128 else c + 0xF700))
+decoding_table = (
+ '\x00'
+ '\x01'
+ '\x02'
+ '\x03'
+ '\x04'
+ '\x05'
+ '\x06'
+ '\x07'
+ '\x08'
+ '\t'
+ '\n'
+ '\x0b'
+ '\x0c'
+ '\r'
+ '\x0e'
+ '\x0f'
+ '\x10'
+ '\x11'
+ '\x12'
+ '\x13'
+ '\x14'
+ '\x15'
+ '\x16'
+ '\x17'
+ '\x18'
+ '\x19'
+ '\x1a'
+ '\x1b'
+ '\x1c'
+ '\x1d'
+ '\x1e'
+ '\x1f'
+ ' '
+ '!'
+ '"'
+ '#'
+ '$'
+ '%'
+ '&'
+ "'"
+ '('
+ ')'
+ '*'
+ '+'
+ ','
+ '-'
+ '.'
+ '/'
+ '0'
+ '1'
+ '2'
+ '3'
+ '4'
+ '5'
+ '6'
+ '7'
+ '8'
+ '9'
+ ':'
+ ';'
+ '<'
+ '='
+ '>'
+ '?'
+ '@'
+ 'A'
+ 'B'
+ 'C'
+ 'D'
+ 'E'
+ 'F'
+ 'G'
+ 'H'
+ 'I'
+ 'J'
+ 'K'
+ 'L'
+ 'M'
+ 'N'
+ 'O'
+ 'P'
+ 'Q'
+ 'R'
+ 'S'
+ 'T'
+ 'U'
+ 'V'
+ 'W'
+ 'X'
+ 'Y'
+ 'Z'
+ '['
+ '\\'
+ ']'
+ '^'
+ '_'
+ '`'
+ 'a'
+ 'b'
+ 'c'
+ 'd'
+ 'e'
+ 'f'
+ 'g'
+ 'h'
+ 'i'
+ 'j'
+ 'k'
+ 'l'
+ 'm'
+ 'n'
+ 'o'
+ 'p'
+ 'q'
+ 'r'
+ 's'
+ 't'
+ 'u'
+ 'v'
+ 'w'
+ 'x'
+ 'y'
+ 'z'
+ '{'
+ '|'
+ '}'
+ '~'
+ '\x7f'
+ '\uf780'
+ '\uf781'
+ '\uf782'
+ '\uf783'
+ '\uf784'
+ '\uf785'
+ '\uf786'
+ '\uf787'
+ '\uf788'
+ '\uf789'
+ '\uf78a'
+ '\uf78b'
+ '\uf78c'
+ '\uf78d'
+ '\uf78e'
+ '\uf78f'
+ '\uf790'
+ '\uf791'
+ '\uf792'
+ '\uf793'
+ '\uf794'
+ '\uf795'
+ '\uf796'
+ '\uf797'
+ '\uf798'
+ '\uf799'
+ '\uf79a'
+ '\uf79b'
+ '\uf79c'
+ '\uf79d'
+ '\uf79e'
+ '\uf79f'
+ '\uf7a0'
+ '\uf7a1'
+ '\uf7a2'
+ '\uf7a3'
+ '\uf7a4'
+ '\uf7a5'
+ '\uf7a6'
+ '\uf7a7'
+ '\uf7a8'
+ '\uf7a9'
+ '\uf7aa'
+ '\uf7ab'
+ '\uf7ac'
+ '\uf7ad'
+ '\uf7ae'
+ '\uf7af'
+ '\uf7b0'
+ '\uf7b1'
+ '\uf7b2'
+ '\uf7b3'
+ '\uf7b4'
+ '\uf7b5'
+ '\uf7b6'
+ '\uf7b7'
+ '\uf7b8'
+ '\uf7b9'
+ '\uf7ba'
+ '\uf7bb'
+ '\uf7bc'
+ '\uf7bd'
+ '\uf7be'
+ '\uf7bf'
+ '\uf7c0'
+ '\uf7c1'
+ '\uf7c2'
+ '\uf7c3'
+ '\uf7c4'
+ '\uf7c5'
+ '\uf7c6'
+ '\uf7c7'
+ '\uf7c8'
+ '\uf7c9'
+ '\uf7ca'
+ '\uf7cb'
+ '\uf7cc'
+ '\uf7cd'
+ '\uf7ce'
+ '\uf7cf'
+ '\uf7d0'
+ '\uf7d1'
+ '\uf7d2'
+ '\uf7d3'
+ '\uf7d4'
+ '\uf7d5'
+ '\uf7d6'
+ '\uf7d7'
+ '\uf7d8'
+ '\uf7d9'
+ '\uf7da'
+ '\uf7db'
+ '\uf7dc'
+ '\uf7dd'
+ '\uf7de'
+ '\uf7df'
+ '\uf7e0'
+ '\uf7e1'
+ '\uf7e2'
+ '\uf7e3'
+ '\uf7e4'
+ '\uf7e5'
+ '\uf7e6'
+ '\uf7e7'
+ '\uf7e8'
+ '\uf7e9'
+ '\uf7ea'
+ '\uf7eb'
+ '\uf7ec'
+ '\uf7ed'
+ '\uf7ee'
+ '\uf7ef'
+ '\uf7f0'
+ '\uf7f1'
+ '\uf7f2'
+ '\uf7f3'
+ '\uf7f4'
+ '\uf7f5'
+ '\uf7f6'
+ '\uf7f7'
+ '\uf7f8'
+ '\uf7f9'
+ '\uf7fa'
+ '\uf7fb'
+ '\uf7fc'
+ '\uf7fd'
+ '\uf7fe'
+ '\uf7ff'
+)
+
+### Encoding table
+encoding_table = codecs.charmap_build(decoding_table)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/.appveyor.yml b/testing/web-platform/tests/tools/third_party/websockets/.appveyor.yml
new file mode 100644
index 0000000000..7954ee4be7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/.appveyor.yml
@@ -0,0 +1,27 @@
+branches:
+ only:
+ - master
+
+skip_branch_with_pr: true
+
+environment:
+# websockets only works on Python >= 3.6.
+ CIBW_SKIP: cp27-* cp33-* cp34-* cp35-*
+ CIBW_TEST_COMMAND: python -W default -m unittest
+ WEBSOCKETS_TESTS_TIMEOUT_FACTOR: 100
+
+install:
+# Ensure python is Python 3.
+ - set PATH=C:\Python37;%PATH%
+ - cmd: python -m pip install --upgrade cibuildwheel
+# Create file '.cibuildwheel' so that extension build is not optional (c.f. setup.py).
+ - cmd: touch .cibuildwheel
+
+build_script:
+ - cmd: python -m cibuildwheel --output-dir wheelhouse
+# Upload to PyPI on tags
+ - ps: >-
+ if ($env:APPVEYOR_REPO_TAG -eq "true") {
+ Invoke-Expression "python -m pip install twine"
+ Invoke-Expression "python -m twine upload --skip-existing wheelhouse/*.whl"
+ }
diff --git a/testing/web-platform/tests/tools/third_party/websockets/.circleci/config.yml b/testing/web-platform/tests/tools/third_party/websockets/.circleci/config.yml
new file mode 100644
index 0000000000..0877c161ad
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/.circleci/config.yml
@@ -0,0 +1,55 @@
+version: 2
+
+jobs:
+ main:
+ docker:
+ - image: circleci/python:3.7
+ steps:
+ # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway.
+ - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc
+ - checkout
+ - run: sudo pip install tox codecov
+ - run: tox -e coverage,black,flake8,isort,mypy
+ - run: codecov
+ py36:
+ docker:
+ - image: circleci/python:3.6
+ steps:
+ # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway.
+ - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc
+ - checkout
+ - run: sudo pip install tox
+ - run: tox -e py36
+ py37:
+ docker:
+ - image: circleci/python:3.7
+ steps:
+ # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway.
+ - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc
+ - checkout
+ - run: sudo pip install tox
+ - run: tox -e py37
+ py38:
+ docker:
+ - image: circleci/python:3.8.0rc1
+ steps:
+ # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway.
+ - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc
+ - checkout
+ - run: sudo pip install tox
+ - run: tox -e py38
+
+workflows:
+ version: 2
+ build:
+ jobs:
+ - main
+ - py36:
+ requires:
+ - main
+ - py37:
+ requires:
+ - main
+ - py38:
+ requires:
+ - main
diff --git a/testing/web-platform/tests/tools/third_party/websockets/.github/FUNDING.yml b/testing/web-platform/tests/tools/third_party/websockets/.github/FUNDING.yml
new file mode 100644
index 0000000000..7ae223b3d8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/.github/FUNDING.yml
@@ -0,0 +1 @@
+tidelift: "pypi/websockets"
diff --git a/testing/web-platform/tests/tools/third_party/websockets/.gitignore b/testing/web-platform/tests/tools/third_party/websockets/.gitignore
new file mode 100644
index 0000000000..ef0d16520c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/.gitignore
@@ -0,0 +1,12 @@
+*.pyc
+*.so
+.coverage
+.mypy_cache
+.tox
+build/
+compliance/reports/
+dist/
+docs/_build/
+htmlcov/
+MANIFEST
+websockets.egg-info/
diff --git a/testing/web-platform/tests/tools/third_party/websockets/.readthedocs.yml b/testing/web-platform/tests/tools/third_party/websockets/.readthedocs.yml
new file mode 100644
index 0000000000..109affab45
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/.readthedocs.yml
@@ -0,0 +1,7 @@
+build:
+ image: latest
+
+python:
+ version: 3.7
+
+requirements_file: docs/requirements.txt
diff --git a/testing/web-platform/tests/tools/third_party/websockets/.travis.yml b/testing/web-platform/tests/tools/third_party/websockets/.travis.yml
new file mode 100644
index 0000000000..0306937597
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/.travis.yml
@@ -0,0 +1,36 @@
+env:
+ global:
+ # websockets only works on Python >= 3.6.
+ - CIBW_SKIP="cp27-* cp33-* cp34-* cp35-*"
+ - CIBW_TEST_COMMAND="python3 -W default -m unittest"
+ - WEBSOCKETS_TESTS_TIMEOUT_FACTOR=100
+
+matrix:
+ include:
+ - language: python
+ dist: xenial # required for Python 3.7 (travis-ci/travis-ci#9069)
+ sudo: required
+ python: "3.7"
+ services:
+ - docker
+ - os: osx
+ osx_image: xcode8.3
+
+install:
+# Python 3 is needed to run cibuildwheel for websockets.
+ - if [ "${TRAVIS_OS_NAME:-}" == "osx" ]; then
+ brew update;
+ brew upgrade python;
+ fi
+# Install cibuildwheel using pip3 to make sure Python 3 is used.
+ - pip3 install --upgrade cibuildwheel
+# Create file '.cibuildwheel' so that extension build is not optional (c.f. setup.py).
+ - touch .cibuildwheel
+
+script:
+ - cibuildwheel --output-dir wheelhouse
+# Upload to PyPI on tags
+ - if [ "${TRAVIS_TAG:-}" != "" ]; then
+ pip3 install twine;
+ python3 -m twine upload --skip-existing wheelhouse/*;
+ fi
diff --git a/testing/web-platform/tests/tools/third_party/websockets/CODE_OF_CONDUCT.md b/testing/web-platform/tests/tools/third_party/websockets/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..80f80d51b1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/CODE_OF_CONDUCT.md
@@ -0,0 +1,46 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at aymeric DOT augustin AT fractalideas DOT com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/testing/web-platform/tests/tools/third_party/websockets/LICENSE b/testing/web-platform/tests/tools/third_party/websockets/LICENSE
new file mode 100644
index 0000000000..b2962adba2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/LICENSE
@@ -0,0 +1,25 @@
+Copyright (c) 2013-2019 Aymeric Augustin and contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of websockets nor the names of its contributors may
+ be used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/testing/web-platform/tests/tools/third_party/websockets/MANIFEST.in b/testing/web-platform/tests/tools/third_party/websockets/MANIFEST.in
new file mode 100644
index 0000000000..1c660b95b1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/MANIFEST.in
@@ -0,0 +1,2 @@
+include LICENSE
+include src/websockets/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/websockets/Makefile b/testing/web-platform/tests/tools/third_party/websockets/Makefile
new file mode 100644
index 0000000000..d9e16fefe3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/Makefile
@@ -0,0 +1,29 @@
+.PHONY: default style test coverage build clean
+
+export PYTHONASYNCIODEBUG=1
+export PYTHONPATH=src
+
+default: coverage style
+
+style:
+ isort --recursive src tests
+ black src tests
+ flake8 src tests
+ mypy --strict src
+
+test:
+ python -W default -m unittest
+
+coverage:
+ python -m coverage erase
+ python -W default -m coverage run -m unittest
+ python -m coverage html
+ python -m coverage report --show-missing --fail-under=100
+
+build:
+ python setup.py build_ext --inplace
+
+clean:
+ find . -name '*.pyc' -o -name '*.so' -delete
+ find . -name __pycache__ -delete
+ rm -rf .coverage build compliance/reports dist docs/_build htmlcov MANIFEST src/websockets.egg-info
diff --git a/testing/web-platform/tests/tools/third_party/websockets/README.rst b/testing/web-platform/tests/tools/third_party/websockets/README.rst
new file mode 100644
index 0000000000..1e15ba1981
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/README.rst
@@ -0,0 +1,154 @@
+.. image:: logo/horizontal.svg
+ :width: 480px
+ :alt: websockets
+
+|rtd| |pypi-v| |pypi-pyversions| |pypi-l| |pypi-wheel| |circleci| |codecov|
+
+.. |rtd| image:: https://readthedocs.org/projects/websockets/badge/?version=latest
+ :target: https://websockets.readthedocs.io/
+
+.. |pypi-v| image:: https://img.shields.io/pypi/v/websockets.svg
+ :target: https://pypi.python.org/pypi/websockets
+
+.. |pypi-pyversions| image:: https://img.shields.io/pypi/pyversions/websockets.svg
+ :target: https://pypi.python.org/pypi/websockets
+
+.. |pypi-l| image:: https://img.shields.io/pypi/l/websockets.svg
+ :target: https://pypi.python.org/pypi/websockets
+
+.. |pypi-wheel| image:: https://img.shields.io/pypi/wheel/websockets.svg
+ :target: https://pypi.python.org/pypi/websockets
+
+.. |circleci| image:: https://img.shields.io/circleci/project/github/aaugustin/websockets.svg
+ :target: https://circleci.com/gh/aaugustin/websockets
+
+.. |codecov| image:: https://codecov.io/gh/aaugustin/websockets/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aaugustin/websockets
+
+What is ``websockets``?
+-----------------------
+
+``websockets`` is a library for building WebSocket servers_ and clients_ in
+Python with a focus on correctness and simplicity.
+
+.. _servers: https://github.com/aaugustin/websockets/blob/master/example/server.py
+.. _clients: https://github.com/aaugustin/websockets/blob/master/example/client.py
+
+Built on top of ``asyncio``, Python's standard asynchronous I/O framework, it
+provides an elegant coroutine-based API.
+
+`Documentation is available on Read the Docs. <https://websockets.readthedocs.io/>`_
+
+Here's how a client sends and receives messages:
+
+.. copy-pasted because GitHub doesn't support the include directive
+
+.. code:: python
+
+ #!/usr/bin/env python
+
+ import asyncio
+ import websockets
+
+ async def hello(uri):
+ async with websockets.connect(uri) as websocket:
+ await websocket.send("Hello world!")
+ await websocket.recv()
+
+ asyncio.get_event_loop().run_until_complete(
+ hello('ws://localhost:8765'))
+
+And here's an echo server:
+
+.. code:: python
+
+ #!/usr/bin/env python
+
+ import asyncio
+ import websockets
+
+ async def echo(websocket, path):
+ async for message in websocket:
+ await websocket.send(message)
+
+ asyncio.get_event_loop().run_until_complete(
+ websockets.serve(echo, 'localhost', 8765))
+ asyncio.get_event_loop().run_forever()
+
+Does that look good?
+
+`Get started with the tutorial! <https://websockets.readthedocs.io/en/stable/intro.html>`_
+
+.. raw:: html
+
+ <hr>
+ <img align="left" height="150" width="150" src="https://raw.githubusercontent.com/aaugustin/websockets/master/logo/tidelift.png">
+ <h3 align="center"><i>websockets for enterprise</i></h3>
+ <p align="center"><i>Available as part of the Tidelift Subscription</i></p>
+ <p align="center"><i>The maintainers of websockets and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. <a href="https://tidelift.com/subscription/pkg/pypi-websockets?utm_source=pypi-websockets&utm_medium=referral&utm_campaign=readme">Learn more.</a></i></p>
+ <hr>
+ <p>(If you contribute to <code>websockets</code> and would like to become an official support provider, <a href="https://fractalideas.com/">let me know</a>.)</p>
+
+Why should I use ``websockets``?
+--------------------------------
+
+The development of ``websockets`` is shaped by four principles:
+
+1. **Simplicity**: all you need to understand is ``msg = await ws.recv()`` and
+ ``await ws.send(msg)``; ``websockets`` takes care of managing connections
+ so you can focus on your application.
+
+2. **Robustness**: ``websockets`` is built for production; for example it was
+ the only library to `handle backpressure correctly`_ before the issue
+ became widely known in the Python community.
+
+3. **Quality**: ``websockets`` is heavily tested. Continuous integration fails
+ under 100% branch coverage. Also it passes the industry-standard `Autobahn
+ Testsuite`_.
+
+4. **Performance**: memory use is configurable. An extension written in C
+ accelerates expensive operations. It's pre-compiled for Linux, macOS and
+ Windows and packaged in the wheel format for each system and Python version.
+
+Documentation is a first class concern in the project. Head over to `Read the
+Docs`_ and see for yourself.
+
+.. _Read the Docs: https://websockets.readthedocs.io/
+.. _handle backpressure correctly: https://vorpus.org/blog/some-thoughts-on-asynchronous-api-design-in-a-post-asyncawait-world/#websocket-servers
+.. _Autobahn Testsuite: https://github.com/aaugustin/websockets/blob/master/compliance/README.rst
+
+Why shouldn't I use ``websockets``?
+-----------------------------------
+
+* If you prefer callbacks over coroutines: ``websockets`` was created to
+ provide the best coroutine-based API to manage WebSocket connections in
+ Python. Pick another library for a callback-based API.
+* If you're looking for a mixed HTTP / WebSocket library: ``websockets`` aims
+ at being an excellent implementation of :rfc:`6455`: The WebSocket Protocol
+ and :rfc:`7692`: Compression Extensions for WebSocket. Its support for HTTP
+ is minimal — just enough for a HTTP health check.
+* If you want to use Python 2: ``websockets`` builds upon ``asyncio`` which
+ only works on Python 3. ``websockets`` requires Python ≥ 3.6.1.
+
+What else?
+----------
+
+Bug reports, patches and suggestions are welcome!
+
+To report a security vulnerability, please use the `Tidelift security
+contact`_. Tidelift will coordinate the fix and disclosure.
+
+.. _Tidelift security contact: https://tidelift.com/security
+
+For anything else, please open an issue_ or send a `pull request`_.
+
+.. _issue: https://github.com/aaugustin/websockets/issues/new
+.. _pull request: https://github.com/aaugustin/websockets/compare/
+
+Participants must uphold the `Contributor Covenant code of conduct`_.
+
+.. _Contributor Covenant code of conduct: https://github.com/aaugustin/websockets/blob/master/CODE_OF_CONDUCT.md
+
+``websockets`` is released under the `BSD license`_.
+
+.. _BSD license: https://github.com/aaugustin/websockets/blob/master/LICENSE
diff --git a/testing/web-platform/tests/tools/third_party/websockets/compliance/README.rst b/testing/web-platform/tests/tools/third_party/websockets/compliance/README.rst
new file mode 100644
index 0000000000..8570f9176d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/compliance/README.rst
@@ -0,0 +1,50 @@
+Autobahn Testsuite
+==================
+
+General information and installation instructions are available at
+https://github.com/crossbario/autobahn-testsuite.
+
+To improve performance, you should compile the C extension first::
+
+ $ python setup.py build_ext --inplace
+
+Running the test suite
+----------------------
+
+All commands below must be run from the directory containing this file.
+
+To test the server::
+
+ $ PYTHONPATH=.. python test_server.py
+ $ wstest -m fuzzingclient
+
+To test the client::
+
+ $ wstest -m fuzzingserver
+ $ PYTHONPATH=.. python test_client.py
+
+Run the first command in a shell. Run the second command in another shell.
+It should take about ten minutes to complete — wstest is the bottleneck.
+Then kill the first one with Ctrl-C.
+
+The test client or server shouldn't display any exceptions. The results are
+stored in reports/clients/index.html.
+
+Note that the Autobahn software only supports Python 2, while ``websockets``
+only supports Python 3; you need two different environments.
+
+Conformance notes
+-----------------
+
+Some test cases are more strict than the RFC. Given the implementation of the
+library and the test echo client or server, ``websockets`` gets a "Non-Strict"
+in these cases.
+
+In 3.2, 3.3, 4.1.3, 4.1.4, 4.2.3, 4.2.4, and 5.15 ``websockets`` notices the
+protocol error and closes the connection before it has had a chance to echo
+the previous frame.
+
+In 6.4.3 and 6.4.4, even though it uses an incremental decoder, ``websockets``
+doesn't notice the invalid utf-8 fast enough to get a "Strict" pass. These
+tests are more strict than the RFC.
+
diff --git a/testing/web-platform/tests/tools/third_party/websockets/compliance/fuzzingclient.json b/testing/web-platform/tests/tools/third_party/websockets/compliance/fuzzingclient.json
new file mode 100644
index 0000000000..202ff49a03
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/compliance/fuzzingclient.json
@@ -0,0 +1,11 @@
+
+{
+ "options": {"failByDrop": false},
+ "outdir": "./reports/servers",
+
+ "servers": [{"agent": "websockets", "url": "ws://localhost:8642", "options": {"version": 18}}],
+
+ "cases": ["*"],
+ "exclude-cases": [],
+ "exclude-agent-cases": {}
+}
diff --git a/testing/web-platform/tests/tools/third_party/websockets/compliance/fuzzingserver.json b/testing/web-platform/tests/tools/third_party/websockets/compliance/fuzzingserver.json
new file mode 100644
index 0000000000..1bdb42723e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/compliance/fuzzingserver.json
@@ -0,0 +1,12 @@
+
+{
+ "url": "ws://localhost:8642",
+
+ "options": {"failByDrop": false},
+ "outdir": "./reports/clients",
+ "webport": 8080,
+
+ "cases": ["*"],
+ "exclude-cases": [],
+ "exclude-agent-cases": {}
+}
diff --git a/testing/web-platform/tests/tools/third_party/websockets/compliance/test_client.py b/testing/web-platform/tests/tools/third_party/websockets/compliance/test_client.py
new file mode 100644
index 0000000000..5fd0f4b4fb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/compliance/test_client.py
@@ -0,0 +1,49 @@
+import json
+import logging
+import urllib.parse
+
+import asyncio
+import websockets
+
+
+logging.basicConfig(level=logging.WARNING)
+
+# Uncomment this line to make only websockets more verbose.
+# logging.getLogger('websockets').setLevel(logging.DEBUG)
+
+
+SERVER = "ws://127.0.0.1:8642"
+AGENT = "websockets"
+
+
+async def get_case_count(server):
+ uri = f"{server}/getCaseCount"
+ async with websockets.connect(uri) as ws:
+ msg = ws.recv()
+ return json.loads(msg)
+
+
+async def run_case(server, case, agent):
+ uri = f"{server}/runCase?case={case}&agent={agent}"
+ async with websockets.connect(uri, max_size=2 ** 25, max_queue=1) as ws:
+ async for msg in ws:
+ await ws.send(msg)
+
+
+async def update_reports(server, agent):
+ uri = f"{server}/updateReports?agent={agent}"
+ async with websockets.connect(uri):
+ pass
+
+
+async def run_tests(server, agent):
+ cases = await get_case_count(server)
+ for case in range(1, cases + 1):
+ print(f"Running test case {case} out of {cases}", end="\r")
+ await run_case(server, case, agent)
+ print(f"Ran {cases} test cases ")
+ await update_reports(server, agent)
+
+
+main = run_tests(SERVER, urllib.parse.quote(AGENT))
+asyncio.get_event_loop().run_until_complete(main)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/compliance/test_server.py b/testing/web-platform/tests/tools/third_party/websockets/compliance/test_server.py
new file mode 100644
index 0000000000..8020f68d35
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/compliance/test_server.py
@@ -0,0 +1,27 @@
+import logging
+
+import asyncio
+import websockets
+
+
+logging.basicConfig(level=logging.WARNING)
+
+# Uncomment this line to make only websockets more verbose.
+# logging.getLogger('websockets').setLevel(logging.DEBUG)
+
+
+HOST, PORT = "127.0.0.1", 8642
+
+
+async def echo(ws, path):
+ async for msg in ws:
+ await ws.send(msg)
+
+
+start_server = websockets.serve(echo, HOST, PORT, max_size=2 ** 25, max_queue=1)
+
+try:
+ asyncio.get_event_loop().run_until_complete(start_server)
+ asyncio.get_event_loop().run_forever()
+except KeyboardInterrupt:
+ pass
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/Makefile b/testing/web-platform/tests/tools/third_party/websockets/docs/Makefile
new file mode 100644
index 0000000000..bb25aa49d0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/Makefile
@@ -0,0 +1,160 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+ @echo " spelling to check for typos in documentation"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/websockets.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/websockets.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/websockets"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/websockets"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+spelling:
+ $(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling
+ @echo
+ @echo "Check finished. Wrong words can be found in " \
+ "$(BUILDDIR)/spelling/output.txt."
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/_static/tidelift.png b/testing/web-platform/tests/tools/third_party/websockets/docs/_static/tidelift.png
new file mode 100644
index 0000000000..317dc4d985
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/_static/tidelift.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/_static/websockets.svg b/testing/web-platform/tests/tools/third_party/websockets/docs/_static/websockets.svg
new file mode 100644
index 0000000000..b07fb22387
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/_static/websockets.svg
@@ -0,0 +1,31 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="480" height="320" viewBox="0 0 480 320">
+ <linearGradient id="w" x1="0.2333" y1="0" x2="0.5889" y2="0.5333">
+ <stop offset="0%" stop-color="#ffe873" />
+ <stop offset="100%" stop-color="#ffd43b" />
+ </linearGradient>
+ <linearGradient id="s" x1="0.2333" y1="0" x2="0.5889" y2="0.5333">
+ <stop offset="0%" stop-color="#5a9fd4" />
+ <stop offset="100%" stop-color="#306998" />
+ </linearGradient>
+ <g>
+ <path fill="url(#w)" d="m 263.40708,146.81618 c -0.43704,0.0747 -0.88656,0.12978 -1.35572,0.14933 -2.45813,0.0764 -4.25357,-0.58665 -5.82335,-2.15107 l -8.89246,-8.85942 -11.23464,-11.19805 -36.04076,-35.919454 c -3.43568,-3.42217 -7.33248,-5.347474 -11.58962,-5.723468 -2.22981,-0.198219 -4.47388,0.03111 -6.64036,0.675545 -3.24213,0.944875 -6.13552,2.664848 -8.59366,5.116366 -3.83437,3.819499 -5.86349,8.414979 -5.87598,13.287801 -0.0607,4.95281 1.95153,9.60074 5.8082,13.44424 l 55.62289,55.43648 c 1.82219,1.84175 2.65971,3.79549 2.63384,6.14568 l 0.004,0.208 c 0.0527,2.43196 -0.75991,4.34571 -2.6267,6.20612 -1.78028,1.77598 -3.8094,2.65241 -6.30945,2.75552 -2.45814,0.0764 -4.25446,-0.58844 -5.82514,-2.15286 L 160.50255,128.2618 c -5.21417,-5.19459 -11.7029,-6.98745 -18.22998,-5.04881 -3.2457,0.9431 -6.13553,2.66307 -8.59545,5.11459 -3.83437,3.82127 -5.86527,8.41676 -5.87597,13.28957 -0.0562,4.95281 1.95152,9.60252 5.80641,13.4478 l 58.10689,57.90577 c 8.31984,8.29143 19.34042,11.9376 32.74331,10.83806 12.57967,-1.02043 23.02317,-5.5848 31.03441,-13.57313 7.51265,-7.4861 11.96423,-16.35175 13.28695,-26.42537 10.47206,-1.68264 19.29494,-6.04524 26.27512,-13.00158 4.01364,-3.99994 7.14963,-8.3972 9.40531,-13.16157 -14.15569,-0.39911 -28.23645,-4.00972 -41.05247,-10.83095 z" />
+ <path fill="url(#s)" d="m 308.76038,138.11854 c 0.10259,-12.84514 -4.43017,-23.98541 -13.50635,-33.1346 L 259.37292,69.225372 c -0.24349,-0.240885 -0.46469,-0.487992 -0.68678,-0.744877 -1.48416,-1.739529 -2.18788,-3.583056 -2.21018,-5.807022 -0.0259,-2.470184 0.84911,-4.508375 2.7605,-6.407902 1.91406,-1.909304 3.8531,-2.737735 6.36564,-2.684403 2.53662,0.024 4.62728,0.943097 6.57257,2.881734 l 60.59178,60.384848 12.11408,-12.06914 c 1.12203,-0.90755 1.95777,-1.76887 2.87823,-2.93418 5.91879,-7.515442 5.26947,-18.272611 -1.51003,-25.028952 L 299.00456,29.727312 c -9.19393,-9.157192 -20.36703,-13.776677 -33.16789,-13.7269 -12.94266,-0.05067 -24.14163,4.548375 -33.28739,13.662901 -9.02892,8.996307 -13.64015,19.93925 -13.7008,32.487501 l -0.004,0.14222 c -0.002,0.167998 -0.005,0.336884 -0.005,0.506659 -0.091,12.232701 4.10729,22.95787 12.48154,31.881285 0.40226,0.43022 0.80274,0.85777 1.22283,1.27821 l 35.75088,35.626122 c 1.88909,1.88174 2.71769,3.79638 2.69361,6.20968 l 0.003,0.20977 c 0.0527,2.43197 -0.76081,4.34571 -2.6276,6.20791 -1.44759,1.43909 -3.06286,2.27818 -4.9564,2.60262 12.81601,6.82123 26.89677,10.43184 41.05246,10.83362 2.80598,-5.92525 4.2509,-12.41848 4.29906,-19.43526 z" />
+ <path fill="#ffffff" d="m 327.48093,85.181572 c 2.84701,-2.838179 7.46359,-2.836401 10.30883,0 2.84433,2.834623 2.84612,7.435446 -0.002,10.270956 -2.84345,2.83818 -7.46271,2.83818 -10.30704,0 -2.84524,-2.83551 -2.84791,-7.435444 0,-10.270956 z" />
+ </g>
+ <g>
+ <g fill="#ffd43b">
+ <path d="m 25.719398,284.91839 c 0,2.59075 0.912299,4.79875 2.736898,6.62269 1.824599,1.82657 4.033255,2.73821 6.625313,2.73821 2.591402,0 4.800058,-0.91164 6.624002,-2.73821 1.825254,-1.82394 2.738209,-4.03194 2.738209,-6.62269 v -21.77984 c 0,-1.32126 0.475811,-2.45901 1.42809,-3.40998 0.952278,-0.95359 2.089375,-1.43006 3.411947,-1.43006 h 0.0793 c 1.348132,0 2.471467,0.47647 3.371969,1.43006 0.952278,0.95097 1.428745,2.08938 1.428745,3.40998 v 21.77984 c 0,2.59075 0.912299,4.79875 2.738209,6.62269 1.823944,1.82657 4.031289,2.73821 6.624002,2.73821 2.618274,0 4.839382,-0.91164 6.663981,-2.73821 1.825254,-1.82394 2.738209,-4.03194 2.738209,-6.62269 v -21.77984 c 0,-1.32126 0.475156,-2.45901 1.42809,-3.40998 0.897881,-0.95359 2.022526,-1.43006 3.371969,-1.43006 h 0.07865 c 1.323228,0 2.460325,0.47647 3.411948,1.43006 0.926062,0.95097 1.388766,2.08938 1.388766,3.40998 v 21.77984 c 0,5.26211 -1.865233,9.75807 -5.593077,13.48657 -3.729156,3.7285 -8.22577,5.59373 -13.487876,5.59373 -6.294998,0 -11.028207,-2.08807 -14.202904,-6.26747 -3.199602,4.1794 -7.94723,6.26747 -14.240916,6.26747 -5.262763,0 -9.759377,-1.86523 -13.487876,-5.59373 C 17.866544,294.67646 16,290.18115 16,284.91839 v -21.77984 c 0,-1.32126 0.476467,-2.45901 1.428745,-3.40998 0.951623,-0.95359 2.075612,-1.43006 3.371969,-1.43006 h 0.11928 c 1.295702,0 2.419036,0.47647 3.372625,1.43006 0.950967,0.95097 1.427434,2.08938 1.427434,3.40998 v 21.77984 z" />
+ <path d="m 132.94801,271.6291 c 0.31786,0.66063 0.47712,1.33371 0.47712,2.02252 0,0.55577 -0.10551,1.11089 -0.3172,1.66665 -0.45026,1.24262 -1.29636,2.14181 -2.53898,2.69692 -3.70293,1.66665 -8.56853,3.8622 -14.59875,6.58599 -7.48453,3.38442 -11.87497,5.38139 -13.17067,5.9909 2.00942,2.53832 5.14414,3.80715 9.40219,3.80715 2.82931,0 5.39515,-0.83234 7.69556,-2.499 2.24798,-1.63977 3.82222,-3.75537 4.72141,-6.34808 0.76746,-2.16868 2.30107,-3.25269 4.60148,-3.25269 1.63912,0 2.94859,0.68881 3.92708,2.06185 0.6082,0.84742 0.9123,1.7335 0.9123,2.65891 0,0.55577 -0.10552,1.12399 -0.31655,1.70532 -1.56048,4.52348 -4.29869,8.17334 -8.21135,10.95087 -3.96706,2.88108 -8.41059,4.32293 -13.32993,4.32293 -6.29434,0 -11.67639,-2.23356 -16.145474,-6.70395 -4.469743,-4.46975 -6.704615,-9.85114 -6.704615,-16.14679 0,-6.29434 2.234872,-11.67507 6.704615,-16.14678 4.468434,-4.46843 9.851134,-6.70396 16.145474,-6.70396 4.54773,0 8.70027,1.24392 12.45629,3.7285 3.72785,2.43607 6.49162,5.63437 8.29,9.60274 z m -20.74695,-3.5332 c -3.64985,0 -6.7577,1.28391 -9.32289,3.84909 -2.53897,2.5665 -3.808452,5.67435 -3.808452,9.32289 v 0.27789 l 22.175692,-9.95731 c -1.95633,-2.32597 -4.97177,-3.49256 -9.04435,-3.49256 z" />
+ <path d="m 146.11999,242.03442 c 1.2957,0 2.41904,0.46336 3.37197,1.38876 0.95228,0.95097 1.42874,2.08938 1.42874,3.4113 v 15.4311 c 2.98792,-2.64318 7.36525,-3.96707 13.13004,-3.96707 6.29434,0 11.67638,2.23488 16.14613,6.70396 4.46908,4.47106 6.70461,9.85245 6.70461,16.14679 0,6.29499 -2.23553,11.67638 -6.70461,16.14678 -4.46909,4.4704 -9.85113,6.70396 -16.14613,6.70396 -6.295,0 -11.66262,-2.22111 -16.10549,-6.66529 -4.4704,-4.41469 -6.71838,-9.77052 -6.7446,-16.06617 v -34.43341 c 0,-1.32257 0.47647,-2.46032 1.42875,-3.41129 0.95162,-0.92541 2.07561,-1.38877 3.37197,-1.38877 h 0.11862 z m 17.93009,26.06148 c -3.64919,0 -6.75704,1.28391 -9.32288,3.84909 -2.53767,2.5665 -3.80781,5.67435 -3.80781,9.32289 0,3.62364 1.27014,6.71772 3.80781,9.28291 2.56584,2.56519 5.67303,3.84778 9.32288,3.84778 3.62364,0 6.71773,-1.28259 9.28357,-3.84778 2.56387,-2.56519 3.84712,-5.65927 3.84712,-9.28291 0,-3.64788 -1.28325,-6.75639 -3.84712,-9.32289 -2.56584,-2.56518 -5.65927,-3.84909 -9.28357,-3.84909 z" />
+ </g>
+ <g fill="#306998">
+ <path d="m 205.94246,268.01922 c -1.16397,0 -2.14247,0.39586 -2.93548,1.18888 -0.79368,0.82054 -1.19019,1.79838 -1.19019,2.93548 0,1.58735 0.76681,2.77753 2.30172,3.56989 0.52825,0.29165 2.7369,0.95228 6.62466,1.98386 3.14717,0.89985 5.48691,2.07627 7.02051,3.53057 2.19621,2.09003 3.29267,5.06549 3.29267,8.92704 0,3.80714 -1.34879,7.0736 -4.04571,9.79739 -2.72444,2.69823 -5.9909,4.04636 -9.7987,4.04636 h -10.35381 c -1.29701,0 -2.41969,-0.47516 -3.37262,-1.42875 -0.95228,-0.89853 -1.42875,-2.02252 -1.42875,-3.37065 v -0.0806 c 0,-1.32126 0.47647,-2.45901 1.42875,-3.41129 0.95227,-0.95228 2.07561,-1.42874 3.37262,-1.42874 h 10.75032 c 1.16331,0 2.14246,-0.39586 2.93548,-1.18888 0.79368,-0.79367 1.19019,-1.77151 1.19019,-2.93548 0,-1.45561 -0.7537,-2.55339 -2.26044,-3.29201 -0.3965,-0.18678 -2.61892,-0.84742 -6.66529,-1.98386 -3.14782,-0.9254 -5.48887,-2.14377 -7.02247,-3.65051 -2.19555,-2.1418 -3.29202,-5.17035 -3.29202,-9.08432 0,-3.80846 1.34945,-7.06049 4.04702,-9.75807 2.72314,-2.72379 5.99024,-4.087 9.79805,-4.087 h 7.2997 c 1.32192,0 2.45967,0.47647 3.41195,1.43006 0.95162,0.95097 1.42809,2.08938 1.42809,3.40998 v 0.0793 c 0,1.34945 -0.47647,2.47409 -1.42809,3.37263 -0.95228,0.95097 -2.09003,1.42874 -3.41195,1.42874 z" />
+ <path d="m 249.06434,258.29851 c 6.29434,0 11.67573,2.23488 16.14612,6.70396 4.46909,4.47106 6.70396,9.85245 6.70396,16.14679 0,6.29499 -2.23487,11.67638 -6.70396,16.14678 -4.46974,4.46974 -9.85178,6.70396 -16.14612,6.70396 -6.29435,0 -11.67639,-2.23356 -16.14548,-6.70396 -4.46974,-4.46974 -6.70461,-9.85113 -6.70461,-16.14678 0,-6.29434 2.23487,-11.67508 6.70461,-16.14679 4.46909,-4.46908 9.85113,-6.70396 16.14548,-6.70396 z m 0,9.79739 c -3.64986,0 -6.7577,1.28391 -9.32289,3.84909 -2.53963,2.5665 -3.80911,5.67435 -3.80911,9.32289 0,3.62364 1.26948,6.71772 3.80911,9.28291 2.56519,2.56519 5.67238,3.84778 9.32289,3.84778 3.62298,0 6.71706,-1.28259 9.28291,-3.84778 2.56518,-2.56519 3.84778,-5.65927 3.84778,-9.28291 0,-3.64788 -1.2826,-6.75639 -3.84778,-9.32289 -2.56585,-2.56518 -5.65928,-3.84909 -9.28291,-3.84909 z" />
+ <path d="m 307.22146,259.37007 c 2.24864,0.71438 3.37263,2.24798 3.37263,4.60148 v 0.19989 c 0,1.6116 -0.64884,2.89419 -1.94454,3.84778 -0.89919,0.63376 -1.82525,0.95097 -2.77622,0.95097 -0.50334,0 -1.01913,-0.0793 -1.54737,-0.23791 -1.29636,-0.42272 -2.63204,-0.63638 -4.00638,-0.63638 -3.64986,0 -6.75836,1.28391 -9.32289,3.84909 -2.53963,2.5665 -3.80846,5.67435 -3.80846,9.32289 0,3.62364 1.26883,6.71772 3.80846,9.28291 2.56453,2.56519 5.67238,3.84778 9.32289,3.84778 1.375,0 2.71068,-0.21103 4.00638,-0.63507 0.50203,-0.1586 1.00471,-0.2379 1.50739,-0.2379 0.97718,0 1.91767,0.31851 2.81686,0.95358 1.2957,0.95097 1.94453,2.24798 1.94453,3.88776 0,2.32728 -1.12464,3.86089 -3.37262,4.60148 -2.22111,0.6875 -4.52152,1.03027 -6.90189,1.03027 -6.29434,0 -11.67638,-2.23356 -16.14678,-6.70396 -4.46843,-4.46974 -6.70396,-9.85113 -6.70396,-16.14678 0,-6.29435 2.23487,-11.67508 6.70396,-16.14679 4.46974,-4.46843 9.85178,-6.70396 16.14678,-6.70396 2.37906,0.001 4.68012,0.35981 6.90123,1.07287 z" />
+ <path d="m 322.25671,242.03442 c 1.29504,0 2.41903,0.46336 3.37262,1.38876 0.95163,0.95097 1.42809,2.08938 1.42809,3.4113 v 27.49154 h 1.50739 c 3.38508,0 6.33301,-1.12399 8.84708,-3.37263 2.45901,-2.24798 3.86023,-5.0242 4.20431,-8.33063 0.15861,-1.24261 0.68816,-2.26174 1.58735,-3.0541 0.89854,-0.84611 1.96944,-1.27015 3.21271,-1.27015 h 0.11863 c 1.40252,0 2.5796,0.53021 3.53122,1.58735 0.84676,0.92541 1.26949,1.99697 1.26949,3.21271 0,0.15861 -0.0138,0.33163 -0.0393,0.51579 -0.63507,6.63842 -3.17405,11.61019 -7.61692,14.91531 2.32663,1.43006 4.46909,3.84909 6.42739,7.26039 2.03563,3.51746 3.05476,7.31412 3.05476,11.38473 v 2.02515 c 0,1.34813 -0.47712,2.47147 -1.42809,3.37066 -0.95359,0.95359 -2.07692,1.42874 -3.37263,1.42874 h -0.11928 c -1.29635,0 -2.41969,-0.47515 -3.37196,-1.42874 -0.95228,-0.89854 -1.42809,-2.02253 -1.42809,-3.37066 v -2.02515 c -0.0275,-3.59414 -1.31012,-6.67708 -3.84844,-9.24358 -2.56584,-2.53832 -5.66058,-3.80715 -9.28291,-3.80715 h -3.25269 v 15.07523 c 0,1.34813 -0.47646,2.47146 -1.42809,3.37065 -0.95293,0.95359 -2.07758,1.42875 -3.37262,1.42875 h -0.12059 c -1.2957,0 -2.41838,-0.47516 -3.37132,-1.42875 -0.95162,-0.89853 -1.42809,-2.02252 -1.42809,-3.37065 v -52.36547 c 0,-1.32257 0.47647,-2.46032 1.42809,-3.41129 0.95228,-0.92541 2.07562,-1.38877 3.37132,-1.38877 h 0.12059 z" />
+ <path d="m 402.31164,271.6291 c 0.31721,0.66063 0.47581,1.33371 0.47581,2.02252 0,0.55577 -0.10617,1.11089 -0.31655,1.66665 -0.45025,1.24262 -1.29635,2.14181 -2.53897,2.69692 -3.70294,1.66665 -8.56919,3.8622 -14.59876,6.58599 -7.48452,3.38442 -11.87496,5.38139 -13.17067,5.9909 2.00877,2.53832 5.14349,3.80715 9.40219,3.80715 2.82866,0 5.3945,-0.83234 7.69622,-2.499 2.24732,-1.63977 3.82091,-3.75537 4.7201,-6.34808 0.76681,-2.16868 2.30172,-3.25269 4.60148,-3.25269 1.63978,0 2.94924,0.68881 3.92839,2.06185 0.60689,0.84742 0.91165,1.7335 0.91165,2.65891 0,0.55577 -0.10552,1.12399 -0.31721,1.70532 -1.56048,4.52348 -4.29738,8.17334 -8.21135,10.95087 -3.96706,2.88108 -8.40994,4.32293 -13.32928,4.32293 -6.29434,0 -11.67638,-2.23356 -16.14547,-6.70395 -4.46974,-4.46975 -6.70461,-9.85114 -6.70461,-16.14679 0,-6.29434 2.23487,-11.67507 6.70461,-16.14678 4.46843,-4.46843 9.85113,-6.70396 16.14547,-6.70396 4.54774,0 8.70093,1.24392 12.4563,3.7285 3.7285,2.43607 6.49161,5.63437 8.29065,9.60274 z m -20.7476,-3.5332 c -3.6492,0 -6.7577,1.28391 -9.32289,3.84909 -2.53897,2.5665 -3.80846,5.67435 -3.80846,9.32289 v 0.27789 l 22.1757,-9.95731 c -1.95699,-2.32597 -4.97177,-3.49256 -9.04435,-3.49256 z" />
+ <path d="m 415.48166,242.03442 c 1.2957,0 2.41969,0.46336 3.37262,1.38876 0.95162,0.95097 1.42809,2.08938 1.42809,3.4113 v 11.46403 h 5.95092 c 1.2957,0 2.41903,0.47647 3.37262,1.43006 0.95163,0.95097 1.42678,2.08938 1.42678,3.40998 v 0.0793 c 0,1.34945 -0.47515,2.47409 -1.42678,3.37263 -0.95293,0.95097 -2.07692,1.42874 -3.37262,1.42874 h -5.95092 v 23.52252 c 0,0.76811 0.26347,1.41695 0.79367,1.94453 0.5289,0.53021 1.19019,0.79368 1.98321,0.79368 h 3.17404 c 1.2957,0 2.41903,0.47646 3.37262,1.42874 0.95163,0.95228 1.42678,2.09003 1.42678,3.41129 v 0.0806 c 0,1.34813 -0.47515,2.47146 -1.42678,3.37065 C 428.65298,303.52484 427.52899,304 426.23329,304 h -3.17404 c -3.43817,0 -6.38675,-1.21574 -8.84642,-3.6492 -2.43411,-2.45901 -3.6492,-5.39515 -3.6492,-8.80775 v -44.70726 c 0,-1.32258 0.47581,-2.46033 1.42809,-3.4113 0.95228,-0.9254 2.07627,-1.38876 3.37197,-1.38876 h 0.11797 z" />
+ <path d="m 448.88545,268.01922 c -1.16397,0 -2.14246,0.39586 -2.93548,1.18888 -0.79368,0.82054 -1.19019,1.79838 -1.19019,2.93548 0,1.58735 0.76681,2.77753 2.30042,3.56989 0.5302,0.29165 2.7382,0.95228 6.62596,1.98386 3.14652,0.89985 5.48691,2.07627 7.02117,3.53057 2.19489,2.09003 3.29267,5.06549 3.29267,8.92704 0,3.80714 -1.34945,7.0736 -4.04637,9.79739 -2.72379,2.69823 -5.99089,4.04636 -9.79869,4.04636 h -10.35382 c -1.29635,0 -2.41969,-0.47516 -3.37262,-1.42875 -0.95228,-0.89853 -1.42744,-2.02252 -1.42744,-3.37065 v -0.0806 c 0,-1.32126 0.47516,-2.45901 1.42744,-3.41129 0.95228,-0.95228 2.07627,-1.42874 3.37262,-1.42874 h 10.75032 c 1.16332,0 2.14312,-0.39586 2.93549,-1.18888 0.79367,-0.79367 1.19018,-1.77151 1.19018,-2.93548 0,-1.45561 -0.7537,-2.55339 -2.26043,-3.29201 -0.39782,-0.18678 -2.61893,-0.84742 -6.66529,-1.98386 -3.14783,-0.9254 -5.48887,-2.14377 -7.02248,-3.65051 -2.19555,-2.1418 -3.29201,-5.17035 -3.29201,-9.08432 0,-3.80846 1.34944,-7.06049 4.04701,-9.75807 2.72314,-2.72379 5.99025,-4.087 9.7987,-4.087 h 7.29906 c 1.32322,0 2.45967,0.47647 3.41129,1.43006 0.95228,0.95097 1.42809,2.08938 1.42809,3.40998 v 0.0793 c 0,1.34945 -0.47581,2.47409 -1.42809,3.37263 -0.95162,0.95097 -2.08872,1.42874 -3.41129,1.42874 z" />
+ </g>
+ </g>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/api.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/api.rst
new file mode 100644
index 0000000000..d265a91c2c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/api.rst
@@ -0,0 +1,152 @@
+API
+===
+
+Design
+------
+
+``websockets`` provides complete client and server implementations, as shown
+in the :doc:`getting started guide <intro>`. These functions are built on top
+of low-level APIs reflecting the two phases of the WebSocket protocol:
+
+1. An opening handshake, in the form of an HTTP Upgrade request;
+
+2. Data transfer, as framed messages, ending with a closing handshake.
+
+The first phase is designed to integrate with existing HTTP software.
+``websockets`` provides a minimal implementation to build, parse and validate
+HTTP requests and responses.
+
+The second phase is the core of the WebSocket protocol. ``websockets``
+provides a complete implementation on top of ``asyncio`` with a simple API.
+
+For convenience, public APIs can be imported directly from the
+:mod:`websockets` package, unless noted otherwise. Anything that isn't listed
+in this document is a private API.
+
+High-level
+----------
+
+Server
+......
+
+.. automodule:: websockets.server
+
+ .. autofunction:: serve(ws_handler, host=None, port=None, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None, **kwds)
+ :async:
+
+ .. autofunction:: unix_serve(ws_handler, path, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None, **kwds)
+ :async:
+
+
+ .. autoclass:: WebSocketServer
+
+ .. automethod:: close
+ .. automethod:: wait_closed
+ .. autoattribute:: sockets
+
+ .. autoclass:: WebSocketServerProtocol(ws_handler, ws_server, *, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None)
+
+ .. automethod:: handshake
+ .. automethod:: process_request
+ .. automethod:: select_subprotocol
+
+Client
+......
+
+.. automodule:: websockets.client
+
+ .. autofunction:: connect(uri, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds)
+ :async:
+
+ .. autofunction:: unix_connect(path, uri="ws://localhost/", *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds)
+ :async:
+
+ .. autoclass:: WebSocketClientProtocol(*, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origin=None, extensions=None, subprotocols=None, extra_headers=None)
+
+ .. automethod:: handshake
+
+Shared
+......
+
+.. automodule:: websockets.protocol
+
+ .. autoclass:: WebSocketCommonProtocol(*, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None)
+
+ .. automethod:: close
+ .. automethod:: wait_closed
+
+ .. automethod:: recv
+ .. automethod:: send
+
+ .. automethod:: ping
+ .. automethod:: pong
+
+ .. autoattribute:: local_address
+ .. autoattribute:: remote_address
+
+ .. autoattribute:: open
+ .. autoattribute:: closed
+
+Types
+.....
+
+.. automodule:: websockets.typing
+
+ .. autodata:: Data
+
+
+Per-Message Deflate Extension
+.............................
+
+.. automodule:: websockets.extensions.permessage_deflate
+
+ .. autoclass:: ServerPerMessageDeflateFactory
+
+ .. autoclass:: ClientPerMessageDeflateFactory
+
+HTTP Basic Auth
+...............
+
+.. automodule:: websockets.auth
+
+ .. autofunction:: basic_auth_protocol_factory
+
+ .. autoclass:: BasicAuthWebSocketServerProtocol
+
+ .. automethod:: process_request
+
+Exceptions
+..........
+
+.. automodule:: websockets.exceptions
+ :members:
+
+Low-level
+---------
+
+Opening handshake
+.................
+
+.. automodule:: websockets.handshake
+ :members:
+
+Data transfer
+.............
+
+.. automodule:: websockets.framing
+ :members:
+
+URI parser
+..........
+
+.. automodule:: websockets.uri
+ :members:
+
+Utilities
+.........
+
+.. automodule:: websockets.headers
+ :members:
+
+.. automodule:: websockets.http
+ :members:
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/changelog.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/changelog.rst
new file mode 100644
index 0000000000..04f18a7657
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/changelog.rst
@@ -0,0 +1,563 @@
+Changelog
+---------
+
+.. currentmodule:: websockets
+
+8.2
+...
+
+*In development*
+
+8.1
+...
+
+* Added compatibility with Python 3.8.
+
+8.0.2
+.....
+
+* Restored the ability to pass a socket with the ``sock`` parameter of
+ :func:`~server.serve`.
+
+* Removed an incorrect assertion when a connection drops.
+
+8.0.1
+.....
+
+* Restored the ability to import ``WebSocketProtocolError`` from
+ ``websockets``.
+
+8.0
+...
+
+.. warning::
+
+ **Version 8.0 drops compatibility with Python 3.4 and 3.5.**
+
+.. note::
+
+ **Version 8.0 expects** ``process_request`` **to be a coroutine.**
+
+ Previously, it could be a function or a coroutine.
+
+ If you're passing a ``process_request`` argument to :func:`~server.serve`
+ or :class:`~server.WebSocketServerProtocol`, or if you're overriding
+ :meth:`~protocol.WebSocketServerProtocol.process_request` in a subclass,
+ define it with ``async def`` instead of ``def``.
+
+ For backwards compatibility, functions are still mostly supported, but
+ mixing functions and coroutines won't work in some inheritance scenarios.
+
+.. note::
+
+ **Version 8.0 changes the behavior of the** ``max_queue`` **parameter.**
+
+ If you were setting ``max_queue=0`` to make the queue of incoming messages
+ unbounded, change it to ``max_queue=None``.
+
+.. note::
+
+ **Version 8.0 deprecates the** ``host`` **,** ``port`` **, and** ``secure``
+ **attributes of** :class:`~protocol.WebSocketCommonProtocol`.
+
+ Use :attr:`~protocol.WebSocketCommonProtocol.local_address` in servers and
+ :attr:`~protocol.WebSocketCommonProtocol.remote_address` in clients
+ instead of ``host`` and ``port``.
+
+.. note::
+
+ **Version 8.0 renames the** ``WebSocketProtocolError`` **exception**
+ to :exc:`ProtocolError` **.**
+
+ A ``WebSocketProtocolError`` alias provides backwards compatibility.
+
+.. note::
+
+ **Version 8.0 adds the reason phrase to the return type of the low-level
+ API** :func:`~http.read_response` **.**
+
+Also:
+
+* :meth:`~protocol.WebSocketCommonProtocol.send`,
+ :meth:`~protocol.WebSocketCommonProtocol.ping`, and
+ :meth:`~protocol.WebSocketCommonProtocol.pong` support bytes-like types
+ :class:`bytearray` and :class:`memoryview` in addition to :class:`bytes`.
+
+* Added :exc:`~exceptions.ConnectionClosedOK` and
+ :exc:`~exceptions.ConnectionClosedError` subclasses of
+ :exc:`~exceptions.ConnectionClosed` to tell apart normal connection
+ termination from errors.
+
+* Added :func:`~auth.basic_auth_protocol_factory` to enforce HTTP Basic Auth
+ on the server side.
+
+* :func:`~client.connect` handles redirects from the server during the
+ handshake.
+
+* :func:`~client.connect` supports overriding ``host`` and ``port``.
+
+* Added :func:`~client.unix_connect` for connecting to Unix sockets.
+
+* Improved support for sending fragmented messages by accepting asynchronous
+ iterators in :meth:`~protocol.WebSocketCommonProtocol.send`.
+
+* Prevented spurious log messages about :exc:`~exceptions.ConnectionClosed`
+ exceptions in keepalive ping task. If you were using ``ping_timeout=None``
+ as a workaround, you can remove it.
+
+* Changed :meth:`WebSocketServer.close() <server.WebSocketServer.close>` to
+ perform a proper closing handshake instead of failing the connection.
+
+* Avoided a crash when a ``extra_headers`` callable returns ``None``.
+
+* Improved error messages when HTTP parsing fails.
+
+* Enabled readline in the interactive client.
+
+* Added type hints (:pep:`484`).
+
+* Added a FAQ to the documentation.
+
+* Added documentation for extensions.
+
+* Documented how to optimize memory usage.
+
+* Improved API documentation.
+
+7.0
+...
+
+.. warning::
+
+ **Version 7.0 renames the** ``timeout`` **argument of**
+ :func:`~server.serve()` **and** :func:`~client.connect` **to**
+ ``close_timeout`` **.**
+
+ This prevents confusion with ``ping_timeout``.
+
+ For backwards compatibility, ``timeout`` is still supported.
+
+.. warning::
+
+ **Version 7.0 changes how a server terminates connections when it's
+ closed with** :meth:`~server.WebSocketServer.close` **.**
+
+ Previously, connections handlers were canceled. Now, connections are
+ closed with close code 1001 (going away). From the perspective of the
+ connection handler, this is the same as if the remote endpoint was
+ disconnecting. This removes the need to prepare for
+ :exc:`~asyncio.CancelledError` in connection handlers.
+
+ You can restore the previous behavior by adding the following line at the
+ beginning of connection handlers::
+
+ def handler(websocket, path):
+ closed = asyncio.ensure_future(websocket.wait_closed())
+ closed.add_done_callback(lambda task: task.cancel())
+
+.. note::
+
+ **Version 7.0 changes how a** :meth:`~protocol.WebSocketCommonProtocol.ping`
+ **that hasn't received a pong yet behaves when the connection is closed.**
+
+ The ping — as in ``ping = await websocket.ping()`` — used to be canceled
+ when the connection is closed, so that ``await ping`` raised
+ :exc:`~asyncio.CancelledError`. Now ``await ping`` raises
+ :exc:`~exceptions.ConnectionClosed` like other public APIs.
+
+.. note::
+
+ **Version 7.0 raises a** :exc:`RuntimeError` **exception if two coroutines
+ call** :meth:`~protocol.WebSocketCommonProtocol.recv` **concurrently.**
+
+ Concurrent calls lead to non-deterministic behavior because there are no
+ guarantees about which coroutine will receive which message.
+
+Also:
+
+* ``websockets`` sends Ping frames at regular intervals and closes the
+ connection if it doesn't receive a matching Pong frame. See
+ :class:`~protocol.WebSocketCommonProtocol` for details.
+
+* Added ``process_request`` and ``select_subprotocol`` arguments to
+ :func:`~server.serve` and :class:`~server.WebSocketServerProtocol` to
+ customize :meth:`~server.WebSocketServerProtocol.process_request` and
+ :meth:`~server.WebSocketServerProtocol.select_subprotocol` without
+ subclassing :class:`~server.WebSocketServerProtocol`.
+
+* Added support for sending fragmented messages.
+
+* Added the :meth:`~protocol.WebSocketCommonProtocol.wait_closed` method to
+ protocols.
+
+* Added an interactive client: ``python -m websockets <uri>``.
+
+* Changed the ``origins`` argument to represent the lack of an origin with
+ ``None`` rather than ``''``.
+
+* Fixed a data loss bug in :meth:`~protocol.WebSocketCommonProtocol.recv`:
+ canceling it at the wrong time could result in messages being dropped.
+
+* Improved handling of multiple HTTP headers with the same name.
+
+* Improved error messages when a required HTTP header is missing.
+
+6.0
+...
+
+.. warning::
+
+ **Version 6.0 introduces the** :class:`~http.Headers` **class for managing
+ HTTP headers and changes several public APIs:**
+
+ * :meth:`~server.WebSocketServerProtocol.process_request` now receives a
+ :class:`~http.Headers` instead of a :class:`~http.client.HTTPMessage` in
+ the ``request_headers`` argument.
+
+ * The :attr:`~protocol.WebSocketCommonProtocol.request_headers` and
+ :attr:`~protocol.WebSocketCommonProtocol.response_headers` attributes of
+ :class:`~protocol.WebSocketCommonProtocol` are :class:`~http.Headers`
+ instead of :class:`~http.client.HTTPMessage`.
+
+ * The :attr:`~protocol.WebSocketCommonProtocol.raw_request_headers` and
+ :attr:`~protocol.WebSocketCommonProtocol.raw_response_headers`
+ attributes of :class:`~protocol.WebSocketCommonProtocol` are removed.
+ Use :meth:`~http.Headers.raw_items` instead.
+
+ * Functions defined in the :mod:`~handshake` module now receive
+ :class:`~http.Headers` in argument instead of ``get_header`` or
+ ``set_header`` functions. This affects libraries that rely on
+ low-level APIs.
+
+ * Functions defined in the :mod:`~http` module now return HTTP headers as
+ :class:`~http.Headers` instead of lists of ``(name, value)`` pairs.
+
+ Since :class:`~http.Headers` and :class:`~http.client.HTTPMessage` provide
+ similar APIs, this change won't affect most of the code dealing with HTTP
+ headers.
+
+
+Also:
+
+* Added compatibility with Python 3.7.
+
+5.0.1
+.....
+
+* Fixed a regression in the 5.0 release that broke some invocations of
+ :func:`~server.serve()` and :func:`~client.connect`.
+
+5.0
+...
+
+.. note::
+
+ **Version 5.0 fixes a security issue introduced in version 4.0.**
+
+ Version 4.0 was vulnerable to denial of service by memory exhaustion
+ because it didn't enforce ``max_size`` when decompressing compressed
+ messages (`CVE-2018-1000518`_).
+
+ .. _CVE-2018-1000518: https://nvd.nist.gov/vuln/detail/CVE-2018-1000518
+
+.. note::
+
+ **Version 5.0 adds a** ``user_info`` **field to the return value of**
+ :func:`~uri.parse_uri` **and** :class:`~uri.WebSocketURI` **.**
+
+ If you're unpacking :class:`~exceptions.WebSocketURI` into four variables,
+ adjust your code to account for that fifth field.
+
+Also:
+
+* :func:`~client.connect` performs HTTP Basic Auth when the URI contains
+ credentials.
+
+* Iterating on incoming messages no longer raises an exception when the
+ connection terminates with close code 1001 (going away).
+
+* A plain HTTP request now receives a 426 Upgrade Required response and
+ doesn't log a stack trace.
+
+* :func:`~server.unix_serve` can be used as an asynchronous context manager on
+ Python ≥ 3.5.1.
+
+* Added the :attr:`~protocol.WebSocketCommonProtocol.closed` property to
+ protocols.
+
+* If a :meth:`~protocol.WebSocketCommonProtocol.ping` doesn't receive a pong,
+ it's canceled when the connection is closed.
+
+* Reported the cause of :exc:`~exceptions.ConnectionClosed` exceptions.
+
+* Added new examples in the documentation.
+
+* Updated documentation with new features from Python 3.6.
+
+* Improved several other sections of the documentation.
+
+* Fixed missing close code, which caused :exc:`TypeError` on connection close.
+
+* Fixed a race condition in the closing handshake that raised
+ :exc:`~exceptions.InvalidState`.
+
+* Stopped logging stack traces when the TCP connection dies prematurely.
+
+* Prevented writing to a closing TCP connection during unclean shutdowns.
+
+* Made connection termination more robust to network congestion.
+
+* Prevented processing of incoming frames after failing the connection.
+
+4.0.1
+.....
+
+* Fixed issues with the packaging of the 4.0 release.
+
+4.0
+...
+
+.. warning::
+
+ **Version 4.0 enables compression with the permessage-deflate extension.**
+
+ In August 2017, Firefox and Chrome support it, but not Safari and IE.
+
+ Compression should improve performance but it increases RAM and CPU use.
+
+ If you want to disable compression, add ``compression=None`` when calling
+ :func:`~server.serve()` or :func:`~client.connect`.
+
+.. warning::
+
+ **Version 4.0 drops compatibility with Python 3.3.**
+
+.. note::
+
+ **Version 4.0 removes the** ``state_name`` **attribute of protocols.**
+
+ Use ``protocol.state.name`` instead of ``protocol.state_name``.
+
+Also:
+
+* :class:`~protocol.WebSocketCommonProtocol` instances can be used as
+ asynchronous iterators on Python ≥ 3.6. They yield incoming messages.
+
+* Added :func:`~server.unix_serve` for listening on Unix sockets.
+
+* Added the :attr:`~server.WebSocketServer.sockets` attribute to the return
+ value of :func:`~server.serve`.
+
+* Reorganized and extended documentation.
+
+* Aborted connections if they don't close within the configured ``timeout``.
+
+* Rewrote connection termination to increase robustness in edge cases.
+
+* Stopped leaking pending tasks when :meth:`~asyncio.Task.cancel` is called on
+ a connection while it's being closed.
+
+* Reduced verbosity of "Failing the WebSocket connection" logs.
+
+* Allowed ``extra_headers`` to override ``Server`` and ``User-Agent`` headers.
+
+3.4
+...
+
+* Renamed :func:`~server.serve()` and :func:`~client.connect`'s ``klass``
+ argument to ``create_protocol`` to reflect that it can also be a callable.
+ For backwards compatibility, ``klass`` is still supported.
+
+* :func:`~server.serve` can be used as an asynchronous context manager on
+ Python ≥ 3.5.1.
+
+* Added support for customizing handling of incoming connections with
+ :meth:`~server.WebSocketServerProtocol.process_request`.
+
+* Made read and write buffer sizes configurable.
+
+* Rewrote HTTP handling for simplicity and performance.
+
+* Added an optional C extension to speed up low-level operations.
+
+* An invalid response status code during :func:`~client.connect` now raises
+ :class:`~exceptions.InvalidStatusCode` with a ``code`` attribute.
+
+* Providing a ``sock`` argument to :func:`~client.connect` no longer
+ crashes.
+
+3.3
+...
+
+* Ensured compatibility with Python 3.6.
+
+* Reduced noise in logs caused by connection resets.
+
+* Avoided crashing on concurrent writes on slow connections.
+
+3.2
+...
+
+* Added ``timeout``, ``max_size``, and ``max_queue`` arguments to
+ :func:`~client.connect()` and :func:`~server.serve`.
+
+* Made server shutdown more robust.
+
+3.1
+...
+
+* Avoided a warning when closing a connection before the opening handshake.
+
+* Added flow control for incoming data.
+
+3.0
+...
+
+.. warning::
+
+ **Version 3.0 introduces a backwards-incompatible change in the**
+ :meth:`~protocol.WebSocketCommonProtocol.recv` **API.**
+
+ **If you're upgrading from 2.x or earlier, please read this carefully.**
+
+ :meth:`~protocol.WebSocketCommonProtocol.recv` used to return ``None``
+ when the connection was closed. This required checking the return value of
+ every call::
+
+ message = await websocket.recv()
+ if message is None:
+ return
+
+ Now it raises a :exc:`~exceptions.ConnectionClosed` exception instead.
+ This is more Pythonic. The previous code can be simplified to::
+
+ message = await websocket.recv()
+
+ When implementing a server, which is the more popular use case, there's no
+ strong reason to handle such exceptions. Let them bubble up, terminate the
+ handler coroutine, and the server will simply ignore them.
+
+ In order to avoid stranding projects built upon an earlier version, the
+ previous behavior can be restored by passing ``legacy_recv=True`` to
+ :func:`~server.serve`, :func:`~client.connect`,
+ :class:`~server.WebSocketServerProtocol`, or
+ :class:`~client.WebSocketClientProtocol`. ``legacy_recv`` isn't documented
+ in their signatures but isn't scheduled for deprecation either.
+
+Also:
+
+* :func:`~client.connect` can be used as an asynchronous context manager on
+ Python ≥ 3.5.1.
+
+* Updated documentation with ``await`` and ``async`` syntax from Python 3.5.
+
+* :meth:`~protocol.WebSocketCommonProtocol.ping` and
+ :meth:`~protocol.WebSocketCommonProtocol.pong` support data passed as
+ :class:`str` in addition to :class:`bytes`.
+
+* Worked around an asyncio bug affecting connection termination under load.
+
+* Made ``state_name`` attribute on protocols a public API.
+
+* Improved documentation.
+
+2.7
+...
+
+* Added compatibility with Python 3.5.
+
+* Refreshed documentation.
+
+2.6
+...
+
+* Added ``local_address`` and ``remote_address`` attributes on protocols.
+
+* Closed open connections with code 1001 when a server shuts down.
+
+* Avoided TCP fragmentation of small frames.
+
+2.5
+...
+
+* Improved documentation.
+
+* Provided access to handshake request and response HTTP headers.
+
+* Allowed customizing handshake request and response HTTP headers.
+
+* Supported running on a non-default event loop.
+
+* Returned a 403 status code instead of 400 when the request Origin isn't
+ allowed.
+
+* Canceling :meth:`~protocol.WebSocketCommonProtocol.recv` no longer drops
+ the next message.
+
+* Clarified that the closing handshake can be initiated by the client.
+
+* Set the close code and reason more consistently.
+
+* Strengthened connection termination by simplifying the implementation.
+
+* Improved tests, added tox configuration, and enforced 100% branch coverage.
+
+2.4
+...
+
+* Added support for subprotocols.
+
+* Supported non-default event loop.
+
+* Added ``loop`` argument to :func:`~client.connect` and
+ :func:`~server.serve`.
+
+2.3
+...
+
+* Improved compliance of close codes.
+
+2.2
+...
+
+* Added support for limiting message size.
+
+2.1
+...
+
+* Added ``host``, ``port`` and ``secure`` attributes on protocols.
+
+* Added support for providing and checking Origin_.
+
+.. _Origin: https://tools.ietf.org/html/rfc6455#section-10.2
+
+2.0
+...
+
+.. warning::
+
+ **Version 2.0 introduces a backwards-incompatible change in the**
+ :meth:`~protocol.WebSocketCommonProtocol.send`,
+ :meth:`~protocol.WebSocketCommonProtocol.ping`, and
+ :meth:`~protocol.WebSocketCommonProtocol.pong` **APIs.**
+
+ **If you're upgrading from 1.x or earlier, please read this carefully.**
+
+ These APIs used to be functions. Now they're coroutines.
+
+ Instead of::
+
+ websocket.send(message)
+
+ you must now write::
+
+ await websocket.send(message)
+
+Also:
+
+* Added flow control for outgoing data.
+
+1.0
+...
+
+* Initial public release.
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/cheatsheet.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/cheatsheet.rst
new file mode 100644
index 0000000000..f897326a6b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/cheatsheet.rst
@@ -0,0 +1,109 @@
+Cheat sheet
+===========
+
+.. currentmodule:: websockets
+
+Server
+------
+
+* Write a coroutine that handles a single connection. It receives a WebSocket
+ protocol instance and the URI path in argument.
+
+ * Call :meth:`~protocol.WebSocketCommonProtocol.recv` and
+ :meth:`~protocol.WebSocketCommonProtocol.send` to receive and send
+ messages at any time.
+
+ * When :meth:`~protocol.WebSocketCommonProtocol.recv` or
+ :meth:`~protocol.WebSocketCommonProtocol.send` raises
+ :exc:`~exceptions.ConnectionClosed`, clean up and exit. If you started
+ other :class:`asyncio.Task`, terminate them before exiting.
+
+ * If you aren't awaiting :meth:`~protocol.WebSocketCommonProtocol.recv`,
+ consider awaiting :meth:`~protocol.WebSocketCommonProtocol.wait_closed`
+ to detect quickly when the connection is closed.
+
+ * You may :meth:`~protocol.WebSocketCommonProtocol.ping` or
+ :meth:`~protocol.WebSocketCommonProtocol.pong` if you wish but it isn't
+ needed in general.
+
+* Create a server with :func:`~server.serve` which is similar to asyncio's
+ :meth:`~asyncio.AbstractEventLoop.create_server`. You can also use it as an
+ asynchronous context manager.
+
+ * The server takes care of establishing connections, then lets the handler
+ execute the application logic, and finally closes the connection after the
+ handler exits normally or with an exception.
+
+ * For advanced customization, you may subclass
+ :class:`~server.WebSocketServerProtocol` and pass either this subclass or
+ a factory function as the ``create_protocol`` argument.
+
+Client
+------
+
+* Create a client with :func:`~client.connect` which is similar to asyncio's
+ :meth:`~asyncio.BaseEventLoop.create_connection`. You can also use it as an
+ asynchronous context manager.
+
+ * For advanced customization, you may subclass
+ :class:`~server.WebSocketClientProtocol` and pass either this subclass or
+ a factory function as the ``create_protocol`` argument.
+
+* Call :meth:`~protocol.WebSocketCommonProtocol.recv` and
+ :meth:`~protocol.WebSocketCommonProtocol.send` to receive and send messages
+ at any time.
+
+* You may :meth:`~protocol.WebSocketCommonProtocol.ping` or
+ :meth:`~protocol.WebSocketCommonProtocol.pong` if you wish but it isn't
+ needed in general.
+
+* If you aren't using :func:`~client.connect` as a context manager, call
+ :meth:`~protocol.WebSocketCommonProtocol.close` to terminate the connection.
+
+.. _debugging:
+
+Debugging
+---------
+
+If you don't understand what ``websockets`` is doing, enable logging::
+
+ import logging
+ logger = logging.getLogger('websockets')
+ logger.setLevel(logging.INFO)
+ logger.addHandler(logging.StreamHandler())
+
+The logs contain:
+
+* Exceptions in the connection handler at the ``ERROR`` level
+* Exceptions in the opening or closing handshake at the ``INFO`` level
+* All frames at the ``DEBUG`` level — this can be very verbose
+
+If you're new to ``asyncio``, you will certainly encounter issues that are
+related to asynchronous programming in general rather than to ``websockets``
+in particular. Fortunately Python's official documentation provides advice to
+`develop with asyncio`_. Check it out: it's invaluable!
+
+.. _develop with asyncio: https://docs.python.org/3/library/asyncio-dev.html
+
+Passing additional arguments to the connection handler
+------------------------------------------------------
+
+When writing a server, if you need to pass additional arguments to the
+connection handler, you can bind them with :func:`functools.partial`::
+
+ import asyncio
+ import functools
+ import websockets
+
+ async def handler(websocket, path, extra_argument):
+ ...
+
+ bound_handler = functools.partial(handler, extra_argument='spam')
+ start_server = websockets.serve(bound_handler, '127.0.0.1', 8765)
+
+ asyncio.get_event_loop().run_until_complete(start_server)
+ asyncio.get_event_loop().run_forever()
+
+Another way to achieve this result is to define the ``handler`` coroutine in
+a scope where the ``extra_argument`` variable exists instead of injecting it
+through an argument.
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/conf.py b/testing/web-platform/tests/tools/third_party/websockets/docs/conf.py
new file mode 100644
index 0000000000..064c657bf1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/conf.py
@@ -0,0 +1,272 @@
+# -*- coding: utf-8 -*-
+#
+# websockets documentation build configuration file, created by
+# sphinx-quickstart on Sun Mar 31 20:48:44 2013.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os, datetime
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.join(os.path.abspath('..'), 'src'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.viewcode',
+ 'sphinx_autodoc_typehints',
+ 'sphinxcontrib_trio',
+ ]
+
+# Spelling check needs an additional module that is not installed by default.
+# Add it only if spelling check is requested so docs can be generated without it.
+if 'spelling' in sys.argv:
+ extensions.append('sphinxcontrib.spelling')
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'websockets'
+copyright = f'2013-{datetime.date.today().year}, Aymeric Augustin and contributors'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '8.1'
+# The full version, including alpha/beta/rc tags.
+release = '8.1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'alabaster'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+html_theme_options = {
+ 'logo': 'websockets.svg',
+ 'description': 'A library for building WebSocket servers and clients in Python with a focus on correctness and simplicity.',
+ 'github_button': True,
+ 'github_user': 'aaugustin',
+ 'github_repo': 'websockets',
+ 'tidelift_url': 'https://tidelift.com/subscription/pkg/pypi-websockets?utm_source=pypi-websockets&utm_medium=referral&utm_campaign=docs',
+}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+html_sidebars = {
+ '**': [
+ 'about.html',
+ 'searchbox.html',
+ 'navigation.html',
+ 'relations.html',
+ 'donate.html',
+ ]
+}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'websocketsdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'websockets.tex', 'websockets Documentation',
+ 'Aymeric Augustin', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'websockets', 'websockets Documentation',
+ ['Aymeric Augustin'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'websockets', 'websockets Documentation',
+ 'Aymeric Augustin', 'websockets', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {'https://docs.python.org/3/': None}
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/contributing.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/contributing.rst
new file mode 100644
index 0000000000..40f1dbb54a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/contributing.rst
@@ -0,0 +1,61 @@
+Contributing
+============
+
+Thanks for taking the time to contribute to websockets!
+
+Code of Conduct
+---------------
+
+This project and everyone participating in it is governed by the `Code of
+Conduct`_. By participating, you are expected to uphold this code. Please
+report inappropriate behavior to aymeric DOT augustin AT fractalideas DOT com.
+
+.. _Code of Conduct: https://github.com/aaugustin/websockets/blob/master/CODE_OF_CONDUCT.md
+
+*(If I'm the person with the inappropriate behavior, please accept my
+apologies. I know I can mess up. I can't expect you to tell me, but if you
+choose to do so, I'll do my best to handle criticism constructively.
+-- Aymeric)*
+
+Contributions
+-------------
+
+Bug reports, patches and suggestions are welcome!
+
+Please open an issue_ or send a `pull request`_.
+
+Feedback about the documentation is especially valuable — the authors of
+``websockets`` feel more confident about writing code than writing docs :-)
+
+If you're wondering why things are done in a certain way, the :doc:`design
+document <design>` provides lots of details about the internals of websockets.
+
+.. _issue: https://github.com/aaugustin/websockets/issues/new
+.. _pull request: https://github.com/aaugustin/websockets/compare/
+
+Questions
+---------
+
+GitHub issues aren't a good medium for handling questions. There are better
+places to ask questions, for example Stack Overflow.
+
+If you want to ask a question anyway, please make sure that:
+
+- it's a question about ``websockets`` and not about :mod:`asyncio`;
+- it isn't answered by the documentation;
+- it wasn't asked already.
+
+A good question can be written as a suggestion to improve the documentation.
+
+Bitcoin users
+-------------
+
+websockets appears to be quite popular for interfacing with Bitcoin or other
+cryptocurrency trackers. I'm strongly opposed to Bitcoin's carbon footprint.
+
+Please stop heating the planet where my children are supposed to live, thanks.
+
+Since ``websockets`` is released under an open-source license, you can use it
+for any purpose you like. However, I won't spend any of my time to help.
+
+I will summarily close issues related to Bitcoin or cryptocurrency in any way.
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/deployment.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/deployment.rst
new file mode 100644
index 0000000000..5b05afff14
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/deployment.rst
@@ -0,0 +1,162 @@
+Deployment
+==========
+
+.. currentmodule:: websockets
+
+Application server
+------------------
+
+The author of ``websockets`` isn't aware of best practices for deploying
+network services based on :mod:`asyncio`, let alone application servers.
+
+You can run a script similar to the :ref:`server example <server-example>`,
+inside a supervisor if you deem that useful.
+
+You can also add a wrapper to daemonize the process. Third-party libraries
+provide solutions for that.
+
+If you can share knowledge on this topic, please file an issue_. Thanks!
+
+.. _issue: https://github.com/aaugustin/websockets/issues/new
+
+Graceful shutdown
+-----------------
+
+You may want to close connections gracefully when shutting down the server,
+perhaps after executing some cleanup logic. There are two ways to achieve this
+with the object returned by :func:`~server.serve`:
+
+- using it as a asynchronous context manager, or
+- calling its ``close()`` method, then waiting for its ``wait_closed()``
+ method to complete.
+
+On Unix systems, shutdown is usually triggered by sending a signal.
+
+Here's a full example for handling SIGTERM on Unix:
+
+.. literalinclude:: ../example/shutdown.py
+ :emphasize-lines: 13,17-19
+
+This example is easily adapted to handle other signals. If you override the
+default handler for SIGINT, which raises :exc:`KeyboardInterrupt`, be aware
+that you won't be able to interrupt a program with Ctrl-C anymore when it's
+stuck in a loop.
+
+It's more difficult to achieve the same effect on Windows. Some third-party
+projects try to help with this problem.
+
+If your server doesn't run in the main thread, look at
+:func:`~asyncio.AbstractEventLoop.call_soon_threadsafe`.
+
+Memory usage
+------------
+
+.. _memory-usage:
+
+In most cases, memory usage of a WebSocket server is proportional to the
+number of open connections. When a server handles thousands of connections,
+memory usage can become a bottleneck.
+
+Memory usage of a single connection is the sum of:
+
+1. the baseline amount of memory ``websockets`` requires for each connection,
+2. the amount of data held in buffers before the application processes it,
+3. any additional memory allocated by the application itself.
+
+Baseline
+........
+
+Compression settings are the main factor affecting the baseline amount of
+memory used by each connection.
+
+By default ``websockets`` maximizes compression rate at the expense of memory
+usage. If memory usage is an issue, lowering compression settings can help:
+
+- Context Takeover is necessary to get good performance for almost all
+ applications. It should remain enabled.
+- Window Bits is a trade-off between memory usage and compression rate.
+ It defaults to 15 and can be lowered. The default value isn't optimal
+ for small, repetitive messages which are typical of WebSocket servers.
+- Memory Level is a trade-off between memory usage and compression speed.
+ It defaults to 8 and can be lowered. A lower memory level can actually
+ increase speed thanks to memory locality, even if the CPU does more work!
+
+See this :ref:`example <per-message-deflate-configuration-example>` for how to
+configure compression settings.
+
+Here's how various compression settings affect memory usage of a single
+connection on a 64-bit system, as well a benchmark_ of compressed size and
+compression time for a corpus of small JSON documents.
+
++-------------+-------------+--------------+--------------+------------------+------------------+
+| Compression | Window Bits | Memory Level | Memory usage | Size vs. default | Time vs. default |
++=============+=============+==============+==============+==================+==================+
+| *default* | 15 | 8 | 325 KiB | +0% | +0% +
++-------------+-------------+--------------+--------------+------------------+------------------+
+| | 14 | 7 | 181 KiB | +1.5% | -5.3% |
++-------------+-------------+--------------+--------------+------------------+------------------+
+| | 13 | 6 | 110 KiB | +2.8% | -7.5% |
++-------------+-------------+--------------+--------------+------------------+------------------+
+| | 12 | 5 | 73 KiB | +4.4% | -18.9% |
++-------------+-------------+--------------+--------------+------------------+------------------+
+| | 11 | 4 | 55 KiB | +8.5% | -18.8% |
++-------------+-------------+--------------+--------------+------------------+------------------+
+| *disabled* | N/A | N/A | 22 KiB | N/A | N/A |
++-------------+-------------+--------------+--------------+------------------+------------------+
+
+*Don't assume this example is representative! Compressed size and compression
+time depend heavily on the kind of messages exchanged by the application!*
+
+You can run the same benchmark for your application by creating a list of
+typical messages and passing it to the ``_benchmark`` function_.
+
+.. _benchmark: https://gist.github.com/aaugustin/fbea09ce8b5b30c4e56458eb081fe599
+.. _function: https://gist.github.com/aaugustin/fbea09ce8b5b30c4e56458eb081fe599#file-compression-py-L48-L144
+
+This `blog post by Ilya Grigorik`_ provides more details about how compression
+settings affect memory usage and how to optimize them.
+
+.. _blog post by Ilya Grigorik: https://www.igvita.com/2013/11/27/configuring-and-optimizing-websocket-compression/
+
+This `experiment by Peter Thorson`_ suggests Window Bits = 11, Memory Level =
+4 as a sweet spot for optimizing memory usage.
+
+.. _experiment by Peter Thorson: https://www.ietf.org/mail-archive/web/hybi/current/msg10222.html
+
+Buffers
+.......
+
+Under normal circumstances, buffers are almost always empty.
+
+Under high load, if a server receives more messages than it can process,
+bufferbloat can result in excessive memory use.
+
+By default ``websockets`` has generous limits. It is strongly recommended to
+adapt them to your application. When you call :func:`~server.serve`:
+
+- Set ``max_size`` (default: 1 MiB, UTF-8 encoded) to the maximum size of
+ messages your application generates.
+- Set ``max_queue`` (default: 32) to the maximum number of messages your
+ application expects to receive faster than it can process them. The queue
+ provides burst tolerance without slowing down the TCP connection.
+
+Furthermore, you can lower ``read_limit`` and ``write_limit`` (default:
+64 KiB) to reduce the size of buffers for incoming and outgoing data.
+
+The design document provides :ref:`more details about buffers<buffers>`.
+
+Port sharing
+------------
+
+The WebSocket protocol is an extension of HTTP/1.1. It can be tempting to
+serve both HTTP and WebSocket on the same port.
+
+The author of ``websockets`` doesn't think that's a good idea, due to the
+widely different operational characteristics of HTTP and WebSocket.
+
+``websockets`` provide minimal support for responding to HTTP requests with
+the :meth:`~server.WebSocketServerProtocol.process_request` hook. Typical
+use cases include health checks. Here's an example:
+
+.. literalinclude:: ../example/health_check_server.py
+ :emphasize-lines: 9-11,17-19
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/design.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/design.rst
new file mode 100644
index 0000000000..74279b87f6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/design.rst
@@ -0,0 +1,571 @@
+Design
+======
+
+.. currentmodule:: websockets
+
+This document describes the design of ``websockets``. It assumes familiarity
+with the specification of the WebSocket protocol in :rfc:`6455`.
+
+It's primarily intended at maintainers. It may also be useful for users who
+wish to understand what happens under the hood.
+
+.. warning::
+
+ Internals described in this document may change at any time.
+
+ Backwards compatibility is only guaranteed for `public APIs <api>`_.
+
+
+Lifecycle
+---------
+
+State
+.....
+
+WebSocket connections go through a trivial state machine:
+
+- ``CONNECTING``: initial state,
+- ``OPEN``: when the opening handshake is complete,
+- ``CLOSING``: when the closing handshake is started,
+- ``CLOSED``: when the TCP connection is closed.
+
+Transitions happen in the following places:
+
+- ``CONNECTING -> OPEN``: in
+ :meth:`~protocol.WebSocketCommonProtocol.connection_open` which runs when
+ the :ref:`opening handshake <opening-handshake>` completes and the WebSocket
+ connection is established — not to be confused with
+ :meth:`~asyncio.Protocol.connection_made` which runs when the TCP connection
+ is established;
+- ``OPEN -> CLOSING``: in
+ :meth:`~protocol.WebSocketCommonProtocol.write_frame` immediately before
+ sending a close frame; since receiving a close frame triggers sending a
+ close frame, this does the right thing regardless of which side started the
+ :ref:`closing handshake <closing-handshake>`; also in
+ :meth:`~protocol.WebSocketCommonProtocol.fail_connection` which duplicates
+ a few lines of code from ``write_close_frame()`` and ``write_frame()``;
+- ``* -> CLOSED``: in
+ :meth:`~protocol.WebSocketCommonProtocol.connection_lost` which is always
+ called exactly once when the TCP connection is closed.
+
+Coroutines
+..........
+
+The following diagram shows which coroutines are running at each stage of the
+connection lifecycle on the client side.
+
+.. image:: lifecycle.svg
+ :target: _images/lifecycle.svg
+
+The lifecycle is identical on the server side, except inversion of control
+makes the equivalent of :meth:`~client.connect` implicit.
+
+Coroutines shown in green are called by the application. Multiple coroutines
+may interact with the WebSocket connection concurrently.
+
+Coroutines shown in gray manage the connection. When the opening handshake
+succeeds, :meth:`~protocol.WebSocketCommonProtocol.connection_open` starts
+two tasks:
+
+- :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` runs
+ :meth:`~protocol.WebSocketCommonProtocol.transfer_data` which handles
+ incoming data and lets :meth:`~protocol.WebSocketCommonProtocol.recv`
+ consume it. It may be canceled to terminate the connection. It never exits
+ with an exception other than :exc:`~asyncio.CancelledError`. See :ref:`data
+ transfer <data-transfer>` below.
+
+- :attr:`~protocol.WebSocketCommonProtocol.keepalive_ping_task` runs
+ :meth:`~protocol.WebSocketCommonProtocol.keepalive_ping` which sends Ping
+ frames at regular intervals and ensures that corresponding Pong frames are
+ received. It is canceled when the connection terminates. It never exits
+ with an exception other than :exc:`~asyncio.CancelledError`.
+
+- :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` runs
+ :meth:`~protocol.WebSocketCommonProtocol.close_connection` which waits for
+ the data transfer to terminate, then takes care of closing the TCP
+ connection. It must not be canceled. It never exits with an exception. See
+ :ref:`connection termination <connection-termination>` below.
+
+Besides, :meth:`~protocol.WebSocketCommonProtocol.fail_connection` starts
+the same :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` when
+the opening handshake fails, in order to close the TCP connection.
+
+Splitting the responsibilities between two tasks makes it easier to guarantee
+that ``websockets`` can terminate connections:
+
+- within a fixed timeout,
+- without leaking pending tasks,
+- without leaking open TCP connections,
+
+regardless of whether the connection terminates normally or abnormally.
+
+:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` completes when no
+more data will be received on the connection. Under normal circumstances, it
+exits after exchanging close frames.
+
+:attr:`~protocol.WebSocketCommonProtocol.close_connection_task` completes when
+the TCP connection is closed.
+
+
+.. _opening-handshake:
+
+Opening handshake
+-----------------
+
+``websockets`` performs the opening handshake when establishing a WebSocket
+connection. On the client side, :meth:`~client.connect` executes it before
+returning the protocol to the caller. On the server side, it's executed before
+passing the protocol to the ``ws_handler`` coroutine handling the connection.
+
+While the opening handshake is asymmetrical — the client sends an HTTP Upgrade
+request and the server replies with an HTTP Switching Protocols response —
+``websockets`` aims at keeping the implementation of both sides consistent
+with one another.
+
+On the client side, :meth:`~client.WebSocketClientProtocol.handshake`:
+
+- builds a HTTP request based on the ``uri`` and parameters passed to
+ :meth:`~client.connect`;
+- writes the HTTP request to the network;
+- reads a HTTP response from the network;
+- checks the HTTP response, validates ``extensions`` and ``subprotocol``, and
+ configures the protocol accordingly;
+- moves to the ``OPEN`` state.
+
+On the server side, :meth:`~server.WebSocketServerProtocol.handshake`:
+
+- reads a HTTP request from the network;
+- calls :meth:`~server.WebSocketServerProtocol.process_request` which may
+ abort the WebSocket handshake and return a HTTP response instead; this
+ hook only makes sense on the server side;
+- checks the HTTP request, negotiates ``extensions`` and ``subprotocol``, and
+ configures the protocol accordingly;
+- builds a HTTP response based on the above and parameters passed to
+ :meth:`~server.serve`;
+- writes the HTTP response to the network;
+- moves to the ``OPEN`` state;
+- returns the ``path`` part of the ``uri``.
+
+The most significant asymmetry between the two sides of the opening handshake
+lies in the negotiation of extensions and, to a lesser extent, of the
+subprotocol. The server knows everything about both sides and decides what the
+parameters should be for the connection. The client merely applies them.
+
+If anything goes wrong during the opening handshake, ``websockets``
+:ref:`fails the connection <connection-failure>`.
+
+
+.. _data-transfer:
+
+Data transfer
+-------------
+
+Symmetry
+........
+
+Once the opening handshake has completed, the WebSocket protocol enters the
+data transfer phase. This part is almost symmetrical. There are only two
+differences between a server and a client:
+
+- `client-to-server masking`_: the client masks outgoing frames; the server
+ unmasks incoming frames;
+- `closing the TCP connection`_: the server closes the connection immediately;
+ the client waits for the server to do it.
+
+.. _client-to-server masking: https://tools.ietf.org/html/rfc6455#section-5.3
+.. _closing the TCP connection: https://tools.ietf.org/html/rfc6455#section-5.5.1
+
+These differences are so minor that all the logic for `data framing`_, for
+`sending and receiving data`_ and for `closing the connection`_ is implemented
+in the same class, :class:`~protocol.WebSocketCommonProtocol`.
+
+.. _data framing: https://tools.ietf.org/html/rfc6455#section-5
+.. _sending and receiving data: https://tools.ietf.org/html/rfc6455#section-6
+.. _closing the connection: https://tools.ietf.org/html/rfc6455#section-7
+
+The :attr:`~protocol.WebSocketCommonProtocol.is_client` attribute tells which
+side a protocol instance is managing. This attribute is defined on the
+:attr:`~server.WebSocketServerProtocol` and
+:attr:`~client.WebSocketClientProtocol` classes.
+
+Data flow
+.........
+
+The following diagram shows how data flows between an application built on top
+of ``websockets`` and a remote endpoint. It applies regardless of which side
+is the server or the client.
+
+.. image:: protocol.svg
+ :target: _images/protocol.svg
+
+Public methods are shown in green, private methods in yellow, and buffers in
+orange. Methods related to connection termination are omitted; connection
+termination is discussed in another section below.
+
+Receiving data
+..............
+
+The left side of the diagram shows how ``websockets`` receives data.
+
+Incoming data is written to a :class:`~asyncio.StreamReader` in order to
+implement flow control and provide backpressure on the TCP connection.
+
+:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`, which is started
+when the WebSocket connection is established, processes this data.
+
+When it receives data frames, it reassembles fragments and puts the resulting
+messages in the :attr:`~protocol.WebSocketCommonProtocol.messages` queue.
+
+When it encounters a control frame:
+
+- if it's a close frame, it starts the closing handshake;
+- if it's a ping frame, it answers with a pong frame;
+- if it's a pong frame, it acknowledges the corresponding ping (unless it's an
+ unsolicited pong).
+
+Running this process in a task guarantees that control frames are processed
+promptly. Without such a task, ``websockets`` would depend on the application
+to drive the connection by having exactly one coroutine awaiting
+:meth:`~protocol.WebSocketCommonProtocol.recv` at any time. While this
+happens naturally in many use cases, it cannot be relied upon.
+
+Then :meth:`~protocol.WebSocketCommonProtocol.recv` fetches the next message
+from the :attr:`~protocol.WebSocketCommonProtocol.messages` queue, with some
+complexity added for handling backpressure and termination correctly.
+
+Sending data
+............
+
+The right side of the diagram shows how ``websockets`` sends data.
+
+:meth:`~protocol.WebSocketCommonProtocol.send` writes one or several data
+frames containing the message. While sending a fragmented message, concurrent
+calls to :meth:`~protocol.WebSocketCommonProtocol.send` are put on hold until
+all fragments are sent. This makes concurrent calls safe.
+
+:meth:`~protocol.WebSocketCommonProtocol.ping` writes a ping frame and
+yields a :class:`~asyncio.Future` which will be completed when a matching pong
+frame is received.
+
+:meth:`~protocol.WebSocketCommonProtocol.pong` writes a pong frame.
+
+:meth:`~protocol.WebSocketCommonProtocol.close` writes a close frame and
+waits for the TCP connection to terminate.
+
+Outgoing data is written to a :class:`~asyncio.StreamWriter` in order to
+implement flow control and provide backpressure from the TCP connection.
+
+.. _closing-handshake:
+
+Closing handshake
+.................
+
+When the other side of the connection initiates the closing handshake,
+:meth:`~protocol.WebSocketCommonProtocol.read_message` receives a close
+frame while in the ``OPEN`` state. It moves to the ``CLOSING`` state, sends a
+close frame, and returns ``None``, causing
+:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to terminate.
+
+When this side of the connection initiates the closing handshake with
+:meth:`~protocol.WebSocketCommonProtocol.close`, it moves to the ``CLOSING``
+state and sends a close frame. When the other side sends a close frame,
+:meth:`~protocol.WebSocketCommonProtocol.read_message` receives it in the
+``CLOSING`` state and returns ``None``, also causing
+:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to terminate.
+
+If the other side doesn't send a close frame within the connection's close
+timeout, ``websockets`` :ref:`fails the connection <connection-failure>`.
+
+The closing handshake can take up to ``2 * close_timeout``: one
+``close_timeout`` to write a close frame and one ``close_timeout`` to receive
+a close frame.
+
+Then ``websockets`` terminates the TCP connection.
+
+
+.. _connection-termination:
+
+Connection termination
+----------------------
+
+:attr:`~protocol.WebSocketCommonProtocol.close_connection_task`, which is
+started when the WebSocket connection is established, is responsible for
+eventually closing the TCP connection.
+
+First :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` waits
+for :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to terminate,
+which may happen as a result of:
+
+- a successful closing handshake: as explained above, this exits the infinite
+ loop in :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`;
+- a timeout while waiting for the closing handshake to complete: this cancels
+ :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`;
+- a protocol error, including connection errors: depending on the exception,
+ :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` :ref:`fails the
+ connection <connection-failure>` with a suitable code and exits.
+
+:attr:`~protocol.WebSocketCommonProtocol.close_connection_task` is separate
+from :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to make it
+easier to implement the timeout on the closing handshake. Canceling
+:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` creates no risk
+of canceling :attr:`~protocol.WebSocketCommonProtocol.close_connection_task`
+and failing to close the TCP connection, thus leaking resources.
+
+Then :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` cancels
+:attr:`~protocol.WebSocketCommonProtocol.keepalive_ping`. This task has no
+protocol compliance responsibilities. Terminating it to avoid leaking it is
+the only concern.
+
+Terminating the TCP connection can take up to ``2 * close_timeout`` on the
+server side and ``3 * close_timeout`` on the client side. Clients start by
+waiting for the server to close the connection, hence the extra
+``close_timeout``. Then both sides go through the following steps until the
+TCP connection is lost: half-closing the connection (only for non-TLS
+connections), closing the connection, aborting the connection. At this point
+the connection drops regardless of what happens on the network.
+
+
+.. _connection-failure:
+
+Connection failure
+------------------
+
+If the opening handshake doesn't complete successfully, ``websockets`` fails
+the connection by closing the TCP connection.
+
+Once the opening handshake has completed, ``websockets`` fails the connection
+by canceling :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` and
+sending a close frame if appropriate.
+
+:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` exits, unblocking
+:attr:`~protocol.WebSocketCommonProtocol.close_connection_task`, which closes
+the TCP connection.
+
+
+.. _server-shutdown:
+
+Server shutdown
+---------------
+
+:class:`~websockets.server.WebSocketServer` closes asynchronously like
+:class:`asyncio.Server`. The shutdown happen in two steps:
+
+1. Stop listening and accepting new connections;
+2. Close established connections with close code 1001 (going away) or, if
+ the opening handshake is still in progress, with HTTP status code 503
+ (Service Unavailable).
+
+The first call to :class:`~websockets.server.WebSocketServer.close` starts a
+task that performs this sequence. Further calls are ignored. This is the
+easiest way to make :class:`~websockets.server.WebSocketServer.close` and
+:class:`~websockets.server.WebSocketServer.wait_closed` idempotent.
+
+
+.. _cancellation:
+
+Cancellation
+------------
+
+User code
+.........
+
+``websockets`` provides a WebSocket application server. It manages connections
+and passes them to user-provided connection handlers. This is an *inversion of
+control* scenario: library code calls user code.
+
+If a connection drops, the corresponding handler should terminate. If the
+server shuts down, all connection handlers must terminate. Canceling
+connection handlers would terminate them.
+
+However, using cancellation for this purpose would require all connection
+handlers to handle it properly. For example, if a connection handler starts
+some tasks, it should catch :exc:`~asyncio.CancelledError`, terminate or
+cancel these tasks, and then re-raise the exception.
+
+Cancellation is tricky in :mod:`asyncio` applications, especially when it
+interacts with finalization logic. In the example above, what if a handler
+gets interrupted with :exc:`~asyncio.CancelledError` while it's finalizing
+the tasks it started, after detecting that the connection dropped?
+
+``websockets`` considers that cancellation may only be triggered by the caller
+of a coroutine when it doesn't care about the results of that coroutine
+anymore. (Source: `Guido van Rossum <https://groups.google.com/forum/#!msg
+/python-tulip/LZQe38CR3bg/7qZ1p_q5yycJ>`_). Since connection handlers run
+arbitrary user code, ``websockets`` has no way of deciding whether that code
+is still doing something worth caring about.
+
+For these reasons, ``websockets`` never cancels connection handlers. Instead
+it expects them to detect when the connection is closed, execute finalization
+logic if needed, and exit.
+
+Conversely, cancellation isn't a concern for WebSocket clients because they
+don't involve inversion of control.
+
+Library
+.......
+
+Most :doc:`public APIs <api>` of ``websockets`` are coroutines. They may be
+canceled, for example if the user starts a task that calls these coroutines
+and cancels the task later. ``websockets`` must handle this situation.
+
+Cancellation during the opening handshake is handled like any other exception:
+the TCP connection is closed and the exception is re-raised. This can only
+happen on the client side. On the server side, the opening handshake is
+managed by ``websockets`` and nothing results in a cancellation.
+
+Once the WebSocket connection is established, internal tasks
+:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` and
+:attr:`~protocol.WebSocketCommonProtocol.close_connection_task` mustn't get
+accidentally canceled if a coroutine that awaits them is canceled. In other
+words, they must be shielded from cancellation.
+
+:meth:`~protocol.WebSocketCommonProtocol.recv` waits for the next message in
+the queue or for :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`
+to terminate, whichever comes first. It relies on :func:`~asyncio.wait` for
+waiting on two futures in parallel. As a consequence, even though it's waiting
+on a :class:`~asyncio.Future` signaling the next message and on
+:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`, it doesn't
+propagate cancellation to them.
+
+:meth:`~protocol.WebSocketCommonProtocol.ensure_open` is called by
+:meth:`~protocol.WebSocketCommonProtocol.send`,
+:meth:`~protocol.WebSocketCommonProtocol.ping`, and
+:meth:`~protocol.WebSocketCommonProtocol.pong`. When the connection state is
+``CLOSING``, it waits for
+:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` but shields it to
+prevent cancellation.
+
+:meth:`~protocol.WebSocketCommonProtocol.close` waits for the data transfer
+task to terminate with :func:`~asyncio.wait_for`. If it's canceled or if the
+timeout elapses, :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`
+is canceled, which is correct at this point.
+:meth:`~protocol.WebSocketCommonProtocol.close` then waits for
+:attr:`~protocol.WebSocketCommonProtocol.close_connection_task` but shields it
+to prevent cancellation.
+
+:meth:`~protocol.WebSocketCommonProtocol.close` and
+:func:`~protocol.WebSocketCommonProtocol.fail_connection` are the only
+places where :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` may
+be canceled.
+
+:attr:`~protocol.WebSocketCommonProtocol.close_connnection_task` starts by
+waiting for :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`. It
+catches :exc:`~asyncio.CancelledError` to prevent a cancellation of
+:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` from propagating
+to :attr:`~protocol.WebSocketCommonProtocol.close_connnection_task`.
+
+.. _backpressure:
+
+Backpressure
+------------
+
+.. note::
+
+ This section discusses backpressure from the perspective of a server but
+ the concept applies to clients symmetrically.
+
+With a naive implementation, if a server receives inputs faster than it can
+process them, or if it generates outputs faster than it can send them, data
+accumulates in buffers, eventually causing the server to run out of memory and
+crash.
+
+The solution to this problem is backpressure. Any part of the server that
+receives inputs faster than it can process them and send the outputs
+must propagate that information back to the previous part in the chain.
+
+``websockets`` is designed to make it easy to get backpressure right.
+
+For incoming data, ``websockets`` builds upon :class:`~asyncio.StreamReader`
+which propagates backpressure to its own buffer and to the TCP stream. Frames
+are parsed from the input stream and added to a bounded queue. If the queue
+fills up, parsing halts until the application reads a frame.
+
+For outgoing data, ``websockets`` builds upon :class:`~asyncio.StreamWriter`
+which implements flow control. If the output buffers grow too large, it waits
+until they're drained. That's why all APIs that write frames are asynchronous.
+
+Of course, it's still possible for an application to create its own unbounded
+buffers and break the backpressure. Be careful with queues.
+
+
+.. _buffers:
+
+Buffers
+-------
+
+.. note::
+
+ This section discusses buffers from the perspective of a server but it
+ applies to clients as well.
+
+An asynchronous systems works best when its buffers are almost always empty.
+
+For example, if a client sends data too fast for a server, the queue of
+incoming messages will be constantly full. The server will always be 32
+messages (by default) behind the client. This consumes memory and increases
+latency for no good reason. The problem is called bufferbloat.
+
+If buffers are almost always full and that problem cannot be solved by adding
+capacity — typically because the system is bottlenecked by the output and
+constantly regulated by backpressure — reducing the size of buffers minimizes
+negative consequences.
+
+By default ``websockets`` has rather high limits. You can decrease them
+according to your application's characteristics.
+
+Bufferbloat can happen at every level in the stack where there is a buffer.
+For each connection, the receiving side contains these buffers:
+
+- OS buffers: tuning them is an advanced optimization.
+- :class:`~asyncio.StreamReader` bytes buffer: the default limit is 64 KiB.
+ You can set another limit by passing a ``read_limit`` keyword argument to
+ :func:`~client.connect()` or :func:`~server.serve`.
+- Incoming messages :class:`~collections.deque`: its size depends both on
+ the size and the number of messages it contains. By default the maximum
+ UTF-8 encoded size is 1 MiB and the maximum number is 32. In the worst case,
+ after UTF-8 decoding, a single message could take up to 4 MiB of memory and
+ the overall memory consumption could reach 128 MiB. You should adjust these
+ limits by setting the ``max_size`` and ``max_queue`` keyword arguments of
+ :func:`~client.connect()` or :func:`~server.serve` according to your
+ application's requirements.
+
+For each connection, the sending side contains these buffers:
+
+- :class:`~asyncio.StreamWriter` bytes buffer: the default size is 64 KiB.
+ You can set another limit by passing a ``write_limit`` keyword argument to
+ :func:`~client.connect()` or :func:`~server.serve`.
+- OS buffers: tuning them is an advanced optimization.
+
+Concurrency
+-----------
+
+Awaiting any combination of :meth:`~protocol.WebSocketCommonProtocol.recv`,
+:meth:`~protocol.WebSocketCommonProtocol.send`,
+:meth:`~protocol.WebSocketCommonProtocol.close`
+:meth:`~protocol.WebSocketCommonProtocol.ping`, or
+:meth:`~protocol.WebSocketCommonProtocol.pong` concurrently is safe, including
+multiple calls to the same method, with one exception and one limitation.
+
+* **Only one coroutine can receive messages at a time.** This constraint
+ avoids non-deterministic behavior (and simplifies the implementation). If a
+ coroutine is awaiting :meth:`~protocol.WebSocketCommonProtocol.recv`,
+ awaiting it again in another coroutine raises :exc:`RuntimeError`.
+
+* **Sending a fragmented message forces serialization.** Indeed, the WebSocket
+ protocol doesn't support multiplexing messages. If a coroutine is awaiting
+ :meth:`~protocol.WebSocketCommonProtocol.send` to send a fragmented message,
+ awaiting it again in another coroutine waits until the first call completes.
+ This will be transparent in many cases. It may be a concern if the
+ fragmented message is generated slowly by an asynchronous iterator.
+
+Receiving frames is independent from sending frames. This isolates
+:meth:`~protocol.WebSocketCommonProtocol.recv`, which receives frames, from
+the other methods, which send frames.
+
+While the connection is open, each frame is sent with a single write. Combined
+with the concurrency model of :mod:`asyncio`, this enforces serialization. The
+only other requirement is to prevent interleaving other data frames in the
+middle of a fragmented message.
+
+After the connection is closed, sending a frame raises
+:exc:`~websockets.exceptions.ConnectionClosed`, which is safe.
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/extensions.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/extensions.rst
new file mode 100644
index 0000000000..4000340906
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/extensions.rst
@@ -0,0 +1,87 @@
+Extensions
+==========
+
+.. currentmodule:: websockets
+
+The WebSocket protocol supports extensions_.
+
+At the time of writing, there's only one `registered extension`_, WebSocket
+Per-Message Deflate, specified in :rfc:`7692`.
+
+.. _extensions: https://tools.ietf.org/html/rfc6455#section-9
+.. _registered extension: https://www.iana.org/assignments/websocket/websocket.xhtml#extension-name
+
+Per-Message Deflate
+-------------------
+
+:func:`~server.serve()` and :func:`~client.connect` enable the Per-Message
+Deflate extension by default. You can disable this with ``compression=None``.
+
+You can also configure the Per-Message Deflate extension explicitly if you
+want to customize its parameters.
+
+.. _per-message-deflate-configuration-example:
+
+Here's an example on the server side::
+
+ import websockets
+ from websockets.extensions import permessage_deflate
+
+ websockets.serve(
+ ...,
+ extensions=[
+ permessage_deflate.ServerPerMessageDeflateFactory(
+ server_max_window_bits=11,
+ client_max_window_bits=11,
+ compress_settings={'memLevel': 4},
+ ),
+ ],
+ )
+
+Here's an example on the client side::
+
+ import websockets
+ from websockets.extensions import permessage_deflate
+
+ websockets.connect(
+ ...,
+ extensions=[
+ permessage_deflate.ClientPerMessageDeflateFactory(
+ server_max_window_bits=11,
+ client_max_window_bits=11,
+ compress_settings={'memLevel': 4},
+ ),
+ ],
+ )
+
+Refer to the API documentation of
+:class:`~extensions.permessage_deflate.ServerPerMessageDeflateFactory` and
+:class:`~extensions.permessage_deflate.ClientPerMessageDeflateFactory` for
+details.
+
+Writing an extension
+--------------------
+
+During the opening handshake, WebSocket clients and servers negotiate which
+extensions will be used with which parameters. Then each frame is processed by
+extensions before it's sent and after it's received.
+
+As a consequence writing an extension requires implementing several classes:
+
+1. Extension Factory: it negotiates parameters and instantiates the extension.
+ Clients and servers require separate extension factories with distinct APIs.
+
+2. Extension: it decodes incoming frames and encodes outgoing frames. If the
+ extension is symmetrical, clients and servers can use the same class.
+
+``websockets`` provides abstract base classes for extension factories and
+extensions.
+
+.. autoclass:: websockets.extensions.base.ServerExtensionFactory
+ :members:
+
+.. autoclass:: websockets.extensions.base.ClientExtensionFactory
+ :members:
+
+.. autoclass:: websockets.extensions.base.Extension
+ :members:
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/faq.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/faq.rst
new file mode 100644
index 0000000000..cea3f53583
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/faq.rst
@@ -0,0 +1,261 @@
+FAQ
+===
+
+.. currentmodule:: websockets
+
+.. note::
+
+ Many questions asked in :mod:`websockets`' issue tracker are actually
+ about :mod:`asyncio`. Python's documentation about `developing with
+ asyncio`_ is a good complement.
+
+ .. _developing with asyncio: https://docs.python.org/3/library/asyncio-dev.html
+
+Server side
+-----------
+
+Why does the server close the connection after processing one message?
+......................................................................
+
+Your connection handler exits after processing one message. Write a loop to
+process multiple messages.
+
+For example, if your handler looks like this::
+
+ async def handler(websocket, path):
+ print(websocket.recv())
+
+change it like this::
+
+ async def handler(websocket, path):
+ async for message in websocket:
+ print(message)
+
+*Don't feel bad if this happens to you — it's the most common question in
+websockets' issue tracker :-)*
+
+Why can only one client connect at a time?
+..........................................
+
+Your connection handler blocks the event loop. Look for blocking calls.
+Any call that may take some time must be asynchronous.
+
+For example, if you have::
+
+ async def handler(websocket, path):
+ time.sleep(1)
+
+change it to::
+
+ async def handler(websocket, path):
+ await asyncio.sleep(1)
+
+This is part of learning asyncio. It isn't specific to websockets.
+
+See also Python's documentation about `running blocking code`_.
+
+.. _running blocking code: https://docs.python.org/3/library/asyncio-dev.html#running-blocking-code
+
+How do I get access HTTP headers, for example cookies?
+......................................................
+
+To access HTTP headers during the WebSocket handshake, you can override
+:attr:`~server.WebSocketServerProtocol.process_request`::
+
+ async def process_request(self, path, request_headers):
+ cookies = request_header["Cookie"]
+
+See
+
+Once the connection is established, they're available in
+:attr:`~protocol.WebSocketServerProtocol.request_headers`::
+
+ async def handler(websocket, path):
+ cookies = websocket.request_headers["Cookie"]
+
+How do I get the IP address of the client connecting to my server?
+..................................................................
+
+It's available in :attr:`~protocol.WebSocketCommonProtocol.remote_address`::
+
+ async def handler(websocket, path):
+ remote_ip = websocket.remote_address[0]
+
+How do I set which IP addresses my server listens to?
+.....................................................
+
+Look at the ``host`` argument of :meth:`~asyncio.loop.create_server`.
+
+:func:`serve` accepts the same arguments as
+:meth:`~asyncio.loop.create_server`.
+
+How do I close a connection properly?
+.....................................
+
+websockets takes care of closing the connection when the handler exits.
+
+How do I run a HTTP server and WebSocket server on the same port?
+.................................................................
+
+This isn't supported.
+
+Providing a HTTP server is out of scope for websockets. It only aims at
+providing a WebSocket server.
+
+There's limited support for returning HTTP responses with the
+:attr:`~server.WebSocketServerProtocol.process_request` hook.
+If you need more, pick a HTTP server and run it separately.
+
+Client side
+-----------
+
+How do I close a connection properly?
+.....................................
+
+The easiest is to use :func:`connect` as a context manager::
+
+ async with connect(...) as websocket:
+ ...
+
+How do I reconnect automatically when the connection drops?
+...........................................................
+
+See `issue 414`_.
+
+.. _issue 414: https://github.com/aaugustin/websockets/issues/414
+
+How do I disable TLS/SSL certificate verification?
+..................................................
+
+Look at the ``ssl`` argument of :meth:`~asyncio.loop.create_connection`.
+
+:func:`connect` accepts the same arguments as
+:meth:`~asyncio.loop.create_connection`.
+
+Both sides
+----------
+
+How do I do two things in parallel? How do I integrate with another coroutine?
+..............................................................................
+
+You must start two tasks, which the event loop will run concurrently. You can
+achieve this with :func:`asyncio.gather` or :func:`asyncio.wait`.
+
+This is also part of learning asyncio and not specific to websockets.
+
+Keep track of the tasks and make sure they terminate or you cancel them when
+the connection terminates.
+
+How do I create channels or topics?
+...................................
+
+websockets doesn't have built-in publish / subscribe for these use cases.
+
+Depending on the scale of your service, a simple in-memory implementation may
+do the job or you may need an external publish / subscribe component.
+
+What does ``ConnectionClosedError: code = 1006`` mean?
+......................................................
+
+If you're seeing this traceback in the logs of a server:
+
+.. code-block:: pytb
+
+ Error in connection handler
+ Traceback (most recent call last):
+ ...
+ asyncio.streams.IncompleteReadError: 0 bytes read on a total of 2 expected bytes
+
+ The above exception was the direct cause of the following exception:
+
+ Traceback (most recent call last):
+ ...
+ websockets.exceptions.ConnectionClosedError: code = 1006 (connection closed abnormally [internal]), no reason
+
+or if a client crashes with this traceback:
+
+.. code-block:: pytb
+
+ Traceback (most recent call last):
+ ...
+ ConnectionResetError: [Errno 54] Connection reset by peer
+
+ The above exception was the direct cause of the following exception:
+
+ Traceback (most recent call last):
+ ...
+ websockets.exceptions.ConnectionClosedError: code = 1006 (connection closed abnormally [internal]), no reason
+
+it means that the TCP connection was lost. As a consequence, the WebSocket
+connection was closed without receiving a close frame, which is abnormal.
+
+You can catch and handle :exc:`~exceptions.ConnectionClosed` to prevent it
+from being logged.
+
+There are several reasons why long-lived connections may be lost:
+
+* End-user devices tend to lose network connectivity often and unpredictably
+ because they can move out of wireless network coverage, get unplugged from
+ a wired network, enter airplane mode, be put to sleep, etc.
+* HTTP load balancers or proxies that aren't configured for long-lived
+ connections may terminate connections after a short amount of time, usually
+ 30 seconds.
+
+If you're facing a reproducible issue, :ref:`enable debug logs <debugging>` to
+see when and how connections are closed.
+
+Are there ``onopen``, ``onmessage``, ``onerror``, and ``onclose`` callbacks?
+............................................................................
+
+No, there aren't.
+
+websockets provides high-level, coroutine-based APIs. Compared to callbacks,
+coroutines make it easier to manage control flow in concurrent code.
+
+If you prefer callback-based APIs, you should use another library.
+
+Can I use ``websockets`` synchronously, without ``async`` / ``await``?
+......................................................................
+
+You can convert every asynchronous call to a synchronous call by wrapping it
+in ``asyncio.get_event_loop().run_until_complete(...)``.
+
+If this turns out to be impractical, you should use another library.
+
+Miscellaneous
+-------------
+
+How do I set a timeout on ``recv()``?
+.....................................
+
+Use :func:`~asyncio.wait_for`::
+
+ await asyncio.wait_for(websocket.recv(), timeout=10)
+
+This technique works for most APIs, except for asynchronous context managers.
+See `issue 574`_.
+
+.. _issue 574: https://github.com/aaugustin/websockets/issues/574
+
+How do I keep idle connections open?
+....................................
+
+websockets sends pings at 20 seconds intervals to keep the connection open.
+
+In closes the connection if it doesn't get a pong within 20 seconds.
+
+You can adjust this behavior with ``ping_interval`` and ``ping_timeout``.
+
+How do I respond to pings?
+..........................
+
+websockets takes care of responding to pings with pongs.
+
+Is there a Python 2 version?
+............................
+
+No, there isn't.
+
+websockets builds upon asyncio which requires Python 3.
+
+
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/index.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/index.rst
new file mode 100644
index 0000000000..1b2f85f0a4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/index.rst
@@ -0,0 +1,99 @@
+websockets
+==========
+
+|pypi-v| |pypi-pyversions| |pypi-l| |pypi-wheel| |circleci| |codecov|
+
+.. |pypi-v| image:: https://img.shields.io/pypi/v/websockets.svg
+ :target: https://pypi.python.org/pypi/websockets
+
+.. |pypi-pyversions| image:: https://img.shields.io/pypi/pyversions/websockets.svg
+ :target: https://pypi.python.org/pypi/websockets
+
+.. |pypi-l| image:: https://img.shields.io/pypi/l/websockets.svg
+ :target: https://pypi.python.org/pypi/websockets
+
+.. |pypi-wheel| image:: https://img.shields.io/pypi/wheel/websockets.svg
+ :target: https://pypi.python.org/pypi/websockets
+
+.. |circleci| image:: https://img.shields.io/circleci/project/github/aaugustin/websockets.svg
+ :target: https://circleci.com/gh/aaugustin/websockets
+
+.. |codecov| image:: https://codecov.io/gh/aaugustin/websockets/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aaugustin/websockets
+
+``websockets`` is a library for building WebSocket servers_ and clients_ in
+Python with a focus on correctness and simplicity.
+
+.. _servers: https://github.com/aaugustin/websockets/blob/master/example/server.py
+.. _clients: https://github.com/aaugustin/websockets/blob/master/example/client.py
+
+Built on top of :mod:`asyncio`, Python's standard asynchronous I/O framework,
+it provides an elegant coroutine-based API.
+
+Here's how a client sends and receives messages:
+
+.. literalinclude:: ../example/hello.py
+
+And here's an echo server:
+
+.. literalinclude:: ../example/echo.py
+
+Do you like it? Let's dive in!
+
+Tutorials
+---------
+
+If you're new to ``websockets``, this is the place to start.
+
+.. toctree::
+ :maxdepth: 2
+
+ intro
+ faq
+
+How-to guides
+-------------
+
+These guides will help you build and deploy a ``websockets`` application.
+
+.. toctree::
+ :maxdepth: 2
+
+ cheatsheet
+ deployment
+ extensions
+
+Reference
+---------
+
+Find all the details you could ask for, and then some.
+
+.. toctree::
+ :maxdepth: 2
+
+ api
+
+Discussions
+-----------
+
+Get a deeper understanding of how ``websockets`` is built and why.
+
+.. toctree::
+ :maxdepth: 2
+
+ design
+ limitations
+ security
+
+Project
+-------
+
+This is about websockets-the-project rather than websockets-the-software.
+
+.. toctree::
+ :maxdepth: 2
+
+ changelog
+ contributing
+ license
+ For enterprise <tidelift>
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/intro.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/intro.rst
new file mode 100644
index 0000000000..8be700239f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/intro.rst
@@ -0,0 +1,209 @@
+Getting started
+===============
+
+.. currentmodule:: websockets
+
+Requirements
+------------
+
+``websockets`` requires Python ≥ 3.6.1.
+
+You should use the latest version of Python if possible. If you're using an
+older version, be aware that for each minor version (3.x), only the latest
+bugfix release (3.x.y) is officially supported.
+
+Installation
+------------
+
+Install ``websockets`` with::
+
+ pip install websockets
+
+Basic example
+-------------
+
+.. _server-example:
+
+Here's a WebSocket server example.
+
+It reads a name from the client, sends a greeting, and closes the connection.
+
+.. literalinclude:: ../example/server.py
+ :emphasize-lines: 8,17
+
+.. _client-example:
+
+On the server side, ``websockets`` executes the handler coroutine ``hello``
+once for each WebSocket connection. It closes the connection when the handler
+coroutine returns.
+
+Here's a corresponding WebSocket client example.
+
+.. literalinclude:: ../example/client.py
+ :emphasize-lines: 8,10
+
+Using :func:`connect` as an asynchronous context manager ensures the
+connection is closed before exiting the ``hello`` coroutine.
+
+.. _secure-server-example:
+
+Secure example
+--------------
+
+Secure WebSocket connections improve confidentiality and also reliability
+because they reduce the risk of interference by bad proxies.
+
+The WSS protocol is to WS what HTTPS is to HTTP: the connection is encrypted
+with Transport Layer Security (TLS) — which is often referred to as Secure
+Sockets Layer (SSL). WSS requires TLS certificates like HTTPS.
+
+Here's how to adapt the server example to provide secure connections. See the
+documentation of the :mod:`ssl` module for configuring the context securely.
+
+.. literalinclude:: ../example/secure_server.py
+ :emphasize-lines: 19,23-25
+
+Here's how to adapt the client.
+
+.. literalinclude:: ../example/secure_client.py
+ :emphasize-lines: 10,15-18
+
+This client needs a context because the server uses a self-signed certificate.
+
+A client connecting to a secure WebSocket server with a valid certificate
+(i.e. signed by a CA that your Python installation trusts) can simply pass
+``ssl=True`` to :func:`connect` instead of building a context.
+
+Browser-based example
+---------------------
+
+Here's an example of how to run a WebSocket server and connect from a browser.
+
+Run this script in a console:
+
+.. literalinclude:: ../example/show_time.py
+
+Then open this HTML file in a browser.
+
+.. literalinclude:: ../example/show_time.html
+ :language: html
+
+Synchronization example
+-----------------------
+
+A WebSocket server can receive events from clients, process them to update the
+application state, and synchronize the resulting state across clients.
+
+Here's an example where any client can increment or decrement a counter.
+Updates are propagated to all connected clients.
+
+The concurrency model of :mod:`asyncio` guarantees that updates are
+serialized.
+
+Run this script in a console:
+
+.. literalinclude:: ../example/counter.py
+
+Then open this HTML file in several browsers.
+
+.. literalinclude:: ../example/counter.html
+ :language: html
+
+Common patterns
+---------------
+
+You will usually want to process several messages during the lifetime of a
+connection. Therefore you must write a loop. Here are the basic patterns for
+building a WebSocket server.
+
+Consumer
+........
+
+For receiving messages and passing them to a ``consumer`` coroutine::
+
+ async def consumer_handler(websocket, path):
+ async for message in websocket:
+ await consumer(message)
+
+In this example, ``consumer`` represents your business logic for processing
+messages received on the WebSocket connection.
+
+Iteration terminates when the client disconnects.
+
+Producer
+........
+
+For getting messages from a ``producer`` coroutine and sending them::
+
+ async def producer_handler(websocket, path):
+ while True:
+ message = await producer()
+ await websocket.send(message)
+
+In this example, ``producer`` represents your business logic for generating
+messages to send on the WebSocket connection.
+
+:meth:`~protocol.WebSocketCommonProtocol.send` raises a
+:exc:`~exceptions.ConnectionClosed` exception when the client disconnects,
+which breaks out of the ``while True`` loop.
+
+Both
+....
+
+You can read and write messages on the same connection by combining the two
+patterns shown above and running the two tasks in parallel::
+
+ async def handler(websocket, path):
+ consumer_task = asyncio.ensure_future(
+ consumer_handler(websocket, path))
+ producer_task = asyncio.ensure_future(
+ producer_handler(websocket, path))
+ done, pending = await asyncio.wait(
+ [consumer_task, producer_task],
+ return_when=asyncio.FIRST_COMPLETED,
+ )
+ for task in pending:
+ task.cancel()
+
+Registration
+............
+
+As shown in the synchronization example above, if you need to maintain a list
+of currently connected clients, you must register them when they connect and
+unregister them when they disconnect.
+
+::
+
+ connected = set()
+
+ async def handler(websocket, path):
+ # Register.
+ connected.add(websocket)
+ try:
+ # Implement logic here.
+ await asyncio.wait([ws.send("Hello!") for ws in connected])
+ await asyncio.sleep(10)
+ finally:
+ # Unregister.
+ connected.remove(websocket)
+
+This simplistic example keeps track of connected clients in memory. This only
+works as long as you run a single process. In a practical application, the
+handler may subscribe to some channels on a message broker, for example.
+
+That's all!
+-----------
+
+The design of the ``websockets`` API was driven by simplicity.
+
+You don't have to worry about performing the opening or the closing handshake,
+answering pings, or any other behavior required by the specification.
+
+``websockets`` handles all this under the hood so you don't have to.
+
+One more thing...
+-----------------
+
+``websockets`` provides an interactive client::
+
+ $ python -m websockets wss://echo.websocket.org/
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/license.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/license.rst
new file mode 100644
index 0000000000..842d3b07fc
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/license.rst
@@ -0,0 +1,4 @@
+License
+-------
+
+.. literalinclude:: ../LICENSE
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/lifecycle.graffle b/testing/web-platform/tests/tools/third_party/websockets/docs/lifecycle.graffle
new file mode 100644
index 0000000000..a8ab7ff09f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/lifecycle.graffle
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/lifecycle.svg b/testing/web-platform/tests/tools/third_party/websockets/docs/lifecycle.svg
new file mode 100644
index 0000000000..0a9818d293
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/lifecycle.svg
@@ -0,0 +1,3 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xl="http://www.w3.org/1999/xlink" version="1.1" viewBox="-14.3464565 112.653543 624.6929 372.69291" width="624.6929pt" height="372.69291pt" xmlns:dc="http://purl.org/dc/elements/1.1/"><metadata> Produced by OmniGraffle 6.6.2 <dc:date>2018-07-29 15:25:34 +0000</dc:date></metadata><defs><font-face font-family="Courier New" font-size="12" panose-1="2 7 6 9 2 2 5 2 4 4" units-per-em="1000" underline-position="-232.91016" underline-thickness="100.097656" slope="0" x-height="443.35938" cap-height="591.79688" ascent="832.51953" descent="-300.29297" font-weight="bold"><font-face-src><font-face-name name="CourierNewPS-BoldMT"/></font-face-src></font-face><font-face font-family="Courier New" font-size="12" panose-1="2 7 3 9 2 2 5 2 4 4" units-per-em="1000" underline-position="-232.91016" underline-thickness="41.015625" slope="0" x-height="422.85156" cap-height="571.28906" ascent="832.51953" descent="-300.29297" font-weight="500"><font-face-src><font-face-name name="CourierNewPSMT"/></font-face-src></font-face><marker orient="auto" overflow="visible" markerUnits="strokeWidth" id="StickArrow_Marker" viewBox="-1 -4 8 8" markerWidth="8" markerHeight="8" color="black"><g><path d="M 5.8666667 0 L 0 0 M 0 -2.2 L 5.8666667 0 L 0 2.2" fill="none" stroke="currentColor" stroke-width="1"/></g></marker><font-face font-family="Verdana" font-size="12" panose-1="2 11 6 4 3 5 4 4 2 4" units-per-em="1000" underline-position="-87.890625" underline-thickness="58.59375" slope="0" x-height="545.41016" cap-height="727.0508" ascent="1005.3711" descent="-209.96094" font-weight="500"><font-face-src><font-face-name name="Verdana"/></font-face-src></font-face></defs><g stroke="none" stroke-opacity="1" stroke-dasharray="none" fill="none" fill-opacity="1"><title>Canvas 1</title><g><title>Layer 1</title><text transform="translate(19.173228 148.90551)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="1.5138254" y="10" textLength="72.01172">CONNECTING</tspan></text><text transform="translate(160.90551 148.90551)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="23.117341" y="10" textLength="28.804688">OPEN</tspan></text><text transform="translate(359.3307 148.90551)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="12.315583" y="10" textLength="50.408203">CLOSING</tspan></text><text transform="translate(501.063 148.90551)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="15.916169" y="10" textLength="43.20703">CLOSED</tspan></text><line x1="198.4252" y1="170.07874" x2="198.4252" y2="453.5433" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5" stroke-dasharray="1,3"/><line x1="396.8504" y1="170.07874" x2="396.8504" y2="453.5433" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5" stroke-dasharray="1,3"/><line x1="538.58267" y1="170.07874" x2="538.58267" y2="453.5433" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5" stroke-dasharray="1,3"/><line x1="56.692913" y1="170.07874" x2="56.692913" y2="453.5433" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5" stroke-dasharray="1,3"/><path d="M 240.94488 240.94488 L 411.02362 240.94488 C 418.85128 240.94488 425.19685 247.29045 425.19685 255.11811 L 425.19685 255.11811 C 425.19685 262.94577 418.85128 269.29134 411.02362 269.29134 L 240.94488 269.29134 C 233.11722 269.29134 226.77165 262.94577 226.77165 255.11811 L 226.77165 255.11811 C 226.77165 247.29045 233.11722 240.94488 240.94488 240.94488 Z" fill="#dadada"/><path d="M 240.94488 240.94488 L 411.02362 240.94488 C 418.85128 240.94488 425.19685 247.29045 425.19685 255.11811 L 425.19685 255.11811 C 425.19685 262.94577 418.85128 269.29134 411.02362 269.29134 L 240.94488 269.29134 C 233.11722 269.29134 226.77165 262.94577 226.77165 255.11811 L 226.77165 255.11811 C 226.77165 247.29045 233.11722 240.94488 240.94488 240.94488 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(226.77165 248.11811)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="52.40498" y="10" textLength="93.615234">transfer_data</tspan></text><path d="M 240.94488 354.3307 L 552.7559 354.3307 C 560.58356 354.3307 566.92913 360.67628 566.92913 368.50393 L 566.92913 368.50393 C 566.92913 376.3316 560.58356 382.67716 552.7559 382.67716 L 240.94488 382.67716 C 233.11722 382.67716 226.77165 376.3316 226.77165 368.50393 L 226.77165 368.50393 C 226.77165 360.67628 233.11722 354.3307 240.94488 354.3307 Z" fill="#dadada"/><path d="M 240.94488 354.3307 L 552.7559 354.3307 C 560.58356 354.3307 566.92913 360.67628 566.92913 368.50393 L 566.92913 368.50393 C 566.92913 376.3316 560.58356 382.67716 552.7559 382.67716 L 240.94488 382.67716 C 233.11722 382.67716 226.77165 376.3316 226.77165 368.50393 L 226.77165 368.50393 C 226.77165 360.67628 233.11722 354.3307 240.94488 354.3307 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(231.77165 361.50393)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="107.469364" y="10" textLength="115.21875">close_connection</tspan></text><path d="M 99.2126 184.25197 L 155.90551 184.25197 C 163.73317 184.25197 170.07874 190.59754 170.07874 198.4252 L 170.07874 198.4252 C 170.07874 206.25285 163.73317 212.59842 155.90551 212.59842 L 99.2126 212.59842 C 91.38494 212.59842 85.03937 206.25285 85.03937 198.4252 L 85.03937 198.4252 C 85.03937 190.59754 91.38494 184.25197 99.2126 184.25197 Z" fill="#6f6"/><path d="M 99.2126 184.25197 L 155.90551 184.25197 C 163.73317 184.25197 170.07874 190.59754 170.07874 198.4252 L 170.07874 198.4252 C 170.07874 206.25285 163.73317 212.59842 155.90551 212.59842 L 99.2126 212.59842 C 91.38494 212.59842 85.03937 206.25285 85.03937 198.4252 L 85.03937 198.4252 C 85.03937 190.59754 91.38494 184.25197 99.2126 184.25197 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(90.03937 191.4252)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="12.315583" y="10" textLength="50.408203">connect</tspan></text><path d="M 240.94488 184.25197 L 496.063 184.25197 C 503.89065 184.25197 510.23622 190.59754 510.23622 198.4252 L 510.23622 198.4252 C 510.23622 206.25285 503.89065 212.59842 496.063 212.59842 L 240.94488 212.59842 C 233.11722 212.59842 226.77165 206.25285 226.77165 198.4252 L 226.77165 198.4252 C 226.77165 190.59754 233.11722 184.25197 240.94488 184.25197 Z" fill="#6f6"/><path d="M 240.94488 184.25197 L 496.063 184.25197 C 503.89065 184.25197 510.23622 190.59754 510.23622 198.4252 L 510.23622 198.4252 C 510.23622 206.25285 503.89065 212.59842 496.063 212.59842 L 240.94488 212.59842 C 233.11722 212.59842 226.77165 206.25285 226.77165 198.4252 L 226.77165 198.4252 C 226.77165 190.59754 233.11722 184.25197 240.94488 184.25197 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(231.77165 191.4252)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="17.912947" y="10" textLength="100.816406">recv / send / </tspan><tspan font-family="Courier New" font-size="12" font-weight="500" x="118.72935" y="10" textLength="93.615234">ping / pong /</tspan><tspan font-family="Courier New" font-size="12" font-weight="bold" x="212.34459" y="10" textLength="50.408203"> close </tspan></text><path d="M 170.07874 198.4252 L 183.97874 198.4252 L 198.4252 198.4252 L 198.4252 283.46457 L 198.4252 368.50393 L 212.87165 368.50393 L 215.37165 368.50393" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(75.86614 410.19685)" fill="black"><tspan font-family="Verdana" font-size="12" font-weight="500" x="27.760296" y="12" textLength="52.083984">opening </tspan><tspan font-family="Verdana" font-size="12" font-weight="500" x="19.164593" y="27" textLength="58.02539">handshak</tspan><tspan font-family="Verdana" font-size="12" font-weight="500" x="77.072796" y="27" textLength="7.1484375">e</tspan></text><text transform="translate(416.02362 410.19685)" fill="black"><tspan font-family="Verdana" font-size="12" font-weight="500" x="19.182171" y="12" textLength="65.021484">connection</tspan><tspan font-family="Verdana" font-size="12" font-weight="500" x="16.861858" y="27" textLength="69.66211">termination</tspan></text><text transform="translate(217.59842 410.19685)" fill="black"><tspan font-family="Verdana" font-size="12" font-weight="500" x="41.03058" y="12" textLength="40.6875">data tr</tspan><tspan font-family="Verdana" font-size="12" font-weight="500" x="81.507143" y="12" textLength="37.541016">ansfer</tspan><tspan font-family="Verdana" font-size="12" font-weight="500" x="18.211245" y="27" textLength="116.625">&amp; closing handshak</tspan><tspan font-family="Verdana" font-size="12" font-weight="500" x="134.71906" y="27" textLength="7.1484375">e</tspan></text><path d="M 425.19685 255.11811 L 439.09685 255.11811 L 453.5433 255.11811 L 453.5433 342.9307" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><path d="M 240.94488 297.6378 L 411.02362 297.6378 C 418.85128 297.6378 425.19685 303.98336 425.19685 311.81102 L 425.19685 311.81102 C 425.19685 319.63868 418.85128 325.98425 411.02362 325.98425 L 240.94488 325.98425 C 233.11722 325.98425 226.77165 319.63868 226.77165 311.81102 L 226.77165 311.81102 C 226.77165 303.98336 233.11722 297.6378 240.94488 297.6378 Z" fill="#dadada"/><path d="M 240.94488 297.6378 L 411.02362 297.6378 C 418.85128 297.6378 425.19685 303.98336 425.19685 311.81102 L 425.19685 311.81102 C 425.19685 319.63868 418.85128 325.98425 411.02362 325.98425 L 240.94488 325.98425 C 233.11722 325.98425 226.77165 319.63868 226.77165 311.81102 L 226.77165 311.81102 C 226.77165 303.98336 233.11722 297.6378 240.94488 297.6378 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(226.77165 304.81102)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="48.804395" y="10" textLength="100.816406">keepalive_ping</tspan></text><line x1="198.4252" y1="255.11811" x2="214.62165" y2="255.11811" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><line x1="198.4252" y1="311.81102" x2="215.37165" y2="311.81102" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/></g></g></svg>
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/limitations.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/limitations.rst
new file mode 100644
index 0000000000..bd6d32b2f6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/limitations.rst
@@ -0,0 +1,10 @@
+Limitations
+-----------
+
+The client doesn't attempt to guarantee that there is no more than one
+connection to a given IP address in a CONNECTING state.
+
+The client doesn't support connecting through a proxy.
+
+There is no way to fragment outgoing messages. A message is always sent in a
+single frame.
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/protocol.graffle b/testing/web-platform/tests/tools/third_party/websockets/docs/protocol.graffle
new file mode 100644
index 0000000000..df76f49607
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/protocol.graffle
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/protocol.svg b/testing/web-platform/tests/tools/third_party/websockets/docs/protocol.svg
new file mode 100644
index 0000000000..51bfd982be
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/protocol.svg
@@ -0,0 +1,3 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xl="http://www.w3.org/1999/xlink" version="1.1" viewBox="0 0 624.34646 822.34646" width="624.34646pt" height="822.34646pt" xmlns:dc="http://purl.org/dc/elements/1.1/"><metadata> Produced by OmniGraffle 6.6.2 <dc:date>2019-07-07 08:38:24 +0000</dc:date></metadata><defs><font-face font-family="Verdana" font-size="12" panose-1="2 11 6 4 3 5 4 4 2 4" units-per-em="1000" underline-position="-87.890625" underline-thickness="58.59375" slope="0" x-height="545.41016" cap-height="727.0508" ascent="1005.3711" descent="-209.96094" font-weight="500"><font-face-src><font-face-name name="Verdana"/></font-face-src></font-face><font-face font-family="Courier New" font-size="12" panose-1="2 7 3 9 2 2 5 2 4 4" units-per-em="1000" underline-position="-232.91016" underline-thickness="41.015625" slope="0" x-height="422.85156" cap-height="571.28906" ascent="832.51953" descent="-300.29297" font-weight="500"><font-face-src><font-face-name name="CourierNewPSMT"/></font-face-src></font-face><font-face font-family="Courier New" font-size="12" panose-1="2 7 6 9 2 2 5 2 4 4" units-per-em="1000" underline-position="-232.91016" underline-thickness="100.097656" slope="0" x-height="443.35938" cap-height="591.79688" ascent="832.51953" descent="-300.29297" font-weight="bold"><font-face-src><font-face-name name="CourierNewPS-BoldMT"/></font-face-src></font-face><font-face font-family="Courier New" font-size="10" panose-1="2 7 3 9 2 2 5 2 4 4" units-per-em="1000" underline-position="-232.91016" underline-thickness="41.015625" slope="0" x-height="422.85156" cap-height="571.28906" ascent="832.51953" descent="-300.29297" font-weight="500"><font-face-src><font-face-name name="CourierNewPSMT"/></font-face-src></font-face><marker orient="auto" overflow="visible" markerUnits="strokeWidth" id="StickArrow_Marker" viewBox="-1 -4 8 8" markerWidth="8" markerHeight="8" color="black"><g><path d="M 5.8666667 0 L 0 0 M 0 -2.2 L 5.8666667 0 L 0 2.2" fill="none" stroke="currentColor" stroke-width="1"/></g></marker><radialGradient cx="0" cy="0" r="1" id="Gradient" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="white"/><stop offset="1" stop-color="#a5a5a5"/></radialGradient><radialGradient id="Obj_Gradient" xl:href="#Gradient" gradientTransform="translate(311.81102 708.6614) scale(145.75703)"/><marker orient="auto" overflow="visible" markerUnits="strokeWidth" id="StickArrow_Marker_2" viewBox="-1 -6 14 12" markerWidth="14" markerHeight="12" color="black"><g><path d="M 12 0 L 0 0 M 0 -4.5 L 12 0 L 0 4.5" fill="none" stroke="currentColor" stroke-width="1"/></g></marker><marker orient="auto" overflow="visible" markerUnits="strokeWidth" id="StickArrow_Marker_3" viewBox="-1 -4 8 8" markerWidth="8" markerHeight="8" color="black"><g><path d="M 5.9253333 0 L 0 0 M 0 -2.222 L 5.9253333 0 L 0 2.222" fill="none" stroke="currentColor" stroke-width="1"/></g></marker></defs><g stroke="none" stroke-opacity="1" stroke-dasharray="none" fill="none" fill-opacity="1"><title>Canvas 1</title><rect fill="white" width="1314" height="1698"/><g><title>Layer 1</title><rect x="28.346457" y="765.35433" width="566.92913" height="28.346457" fill="#6cf"/><rect x="28.346457" y="765.35433" width="566.92913" height="28.346457" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(33.346457 772.02755)" fill="black"><tspan font-family="Verdana" font-size="12" font-weight="500" x="228.50753" y="12" textLength="99.91406">remote endpoint</tspan></text><rect x="28.346457" y="85.03937" width="566.92913" height="566.92913" fill="white"/><rect x="28.346457" y="85.03937" width="566.92913" height="566.92913" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(33.346457 90.03937)" fill="black"><tspan font-family="Verdana" font-size="12" font-weight="500" x="243.79171" y="12" textLength="51.333984">websock</tspan><tspan font-family="Verdana" font-size="12" font-weight="500" x="295.00851" y="12" textLength="18.128906">ets</tspan><tspan font-family="Courier New" font-size="12" font-weight="500" x="195.65109" y="25" textLength="165.62695">WebSocketCommonProtocol</tspan></text><rect x="28.346457" y="28.346457" width="566.92913" height="28.346457" fill="#6f6"/><rect x="28.346457" y="28.346457" width="566.92913" height="28.346457" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(33.346457 35.019685)" fill="black"><tspan font-family="Verdana" font-size="12" font-weight="500" x="230.0046" y="12" textLength="96.91992">application logic</tspan></text><path d="M 102.047243 586.77165 L 238.11023 586.77165 C 247.49858 586.77165 255.11811 596.93102 255.11811 609.4488 C 255.11811 621.9666 247.49858 632.12598 238.11023 632.12598 L 102.047243 632.12598 C 92.658897 632.12598 85.03937 621.9666 85.03937 609.4488 C 85.03937 596.93102 92.658897 586.77165 102.047243 586.77165" fill="#fc6"/><path d="M 102.047243 586.77165 L 238.11023 586.77165 C 247.49858 586.77165 255.11811 596.93102 255.11811 609.4488 C 255.11811 621.9666 247.49858 632.12598 238.11023 632.12598 L 102.047243 632.12598 C 92.658897 632.12598 85.03937 621.9666 85.03937 609.4488 C 85.03937 596.93102 92.658897 586.77165 102.047243 586.77165 M 238.11023 586.77165 C 228.72189 586.77165 221.10236 596.93102 221.10236 609.4488 C 221.10236 621.9666 228.72189 632.12598 238.11023 632.12598" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(125.33071 596.9488)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="14.896484" y="10" textLength="43.20703">reader</tspan><tspan font-family="Courier New" font-size="10" font-weight="500" x=".49414062" y="22" textLength="72.01172">StreamReader</tspan></text><path d="M 385.5118 586.77165 L 521.5748 586.77165 C 530.96315 586.77165 538.58267 596.93102 538.58267 609.4488 C 538.58267 621.9666 530.96315 632.12598 521.5748 632.12598 L 385.5118 632.12598 C 376.12346 632.12598 368.50393 621.9666 368.50393 609.4488 C 368.50393 596.93102 376.12346 586.77165 385.5118 586.77165" fill="#fc6"/><path d="M 385.5118 586.77165 L 521.5748 586.77165 C 530.96315 586.77165 538.58267 596.93102 538.58267 609.4488 C 538.58267 621.9666 530.96315 632.12598 521.5748 632.12598 L 385.5118 632.12598 C 376.12346 632.12598 368.50393 621.9666 368.50393 609.4488 C 368.50393 596.93102 376.12346 586.77165 385.5118 586.77165 M 521.5748 586.77165 C 512.18645 586.77165 504.56693 596.93102 504.56693 609.4488 C 504.56693 621.9666 512.18645 632.12598 521.5748 632.12598" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(408.79527 596.9488)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="14.896484" y="10" textLength="43.20703">writer</tspan><tspan font-family="Courier New" font-size="10" font-weight="500" x=".49414062" y="22" textLength="72.01172">StreamWriter</tspan></text><path d="M 481.88976 419.52756 L 481.88976 374.17323 C 481.88976 371.04378 469.19055 368.50393 453.5433 368.50393 C 437.89606 368.50393 425.19685 371.04378 425.19685 374.17323 L 425.19685 419.52756 C 425.19685 422.657 437.89606 425.19685 453.5433 425.19685 C 469.19055 425.19685 481.88976 422.657 481.88976 419.52756" fill="#fecc66"/><path d="M 481.88976 419.52756 L 481.88976 374.17323 C 481.88976 371.04378 469.19055 368.50393 453.5433 368.50393 C 437.89606 368.50393 425.19685 371.04378 425.19685 374.17323 L 425.19685 419.52756 C 425.19685 422.657 437.89606 425.19685 453.5433 425.19685 C 469.19055 425.19685 481.88976 422.657 481.88976 419.52756 M 481.88976 374.17323 C 481.88976 377.30267 469.19055 379.84252 453.5433 379.84252 C 437.89606 379.84252 425.19685 377.30267 425.19685 374.17323" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width=".75"/><text transform="translate(429.19685 387.18504)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="500" x="6.343527" y="10" textLength="36.00586">pings</tspan><tspan font-family="Courier New" font-size="10" font-weight="500" x="12.3445034" y="22" textLength="24.003906">dict</tspan></text><path d="M 85.039413 283.46457 L 255.11806 283.46457 C 270.7734 283.46457 283.46457 296.15573 283.46457 311.81107 L 283.46457 481.88972 C 283.46457 497.54506 270.7734 510.23622 255.11806 510.23622 L 85.039413 510.23622 C 69.384074 510.23622 56.692913 497.54506 56.692913 481.88972 L 56.692913 311.81107 C 56.692913 296.15573 69.384074 283.46457 85.039413 283.46457 Z" fill="#dadada"/><path d="M 85.039413 283.46457 L 255.11806 283.46457 C 270.7734 283.46457 283.46457 296.15573 283.46457 311.81107 L 283.46457 481.88972 C 283.46457 497.54506 270.7734 510.23622 255.11806 510.23622 L 85.039413 510.23622 C 69.384074 510.23622 56.692913 497.54506 56.692913 481.88972 L 56.692913 311.81107 C 56.692913 296.15573 69.384074 283.46457 85.039413 283.46457 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(61.692913 288.46457)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="43.57528" y="10" textLength="129.62109">transfer_data_task</tspan><tspan font-family="Courier New" font-size="10" font-weight="500" x="96.383873" y="22" textLength="24.003906">Task</tspan></text><path d="M 297.6378 765.35433 L 297.6378 609.4488 L 255.11811 609.4488 L 269.01811 609.4488 L 266.51811 609.4488" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><path d="M 368.50393 609.4488 L 354.60393 609.4488 L 325.98425 609.4488 L 325.98425 753.95433" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><path d="M 207.03401 712.3154 C 161.22047 708.6614 179.48976 677.90097 252.5726 683.1496 C 259.35307 672.91835 344.33858 674.579 343.783 683.1496 C 397.0715 672.1877 465.17102 694.04553 419.49354 705.00744 C 474.30425 710.32206 418.80189 738.9565 373.8189 734.17322 C 370.2189 742.14584 289.80283 744.9358 282.74457 734.17322 C 237.20882 745.66715 142.25953 727.9946 207.03401 712.3154 Z" fill="url(#Obj_Gradient)"/><path d="M 207.03401 712.3154 C 161.22047 708.6614 179.48976 677.90097 252.5726 683.1496 C 259.35307 672.91835 344.33858 674.579 343.783 683.1496 C 397.0715 672.1877 465.17102 694.04553 419.49354 705.00744 C 474.30425 710.32206 418.80189 738.9565 373.8189 734.17322 C 370.2189 742.14584 289.80283 744.9358 282.74457 734.17322 C 237.20882 745.66715 142.25953 727.9946 207.03401 712.3154 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(217.59842 701.1614)" fill="black"><tspan font-family="Verdana" font-size="12" font-weight="500" x="69.81416" y="12" textLength="48.796875">network</tspan></text><rect x="85.03937" y="453.5433" width="170.07874" height="28.346457" fill="#ff6"/><rect x="85.03937" y="453.5433" width="170.07874" height="28.346457" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(90.03937 460.71653)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="44.03351" y="10" textLength="72.01172">read_frame</tspan></text><rect x="85.03937" y="396.8504" width="170.07874" height="28.346457" fill="#ff6"/><rect x="85.03937" y="396.8504" width="170.07874" height="28.346457" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(90.03937 404.02362)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="26.03058" y="10" textLength="108.01758">read_data_frame</tspan></text><rect x="85.03937" y="340.15748" width="170.07874" height="28.346457" fill="#ff6"/><rect x="85.03937" y="340.15748" width="170.07874" height="28.346457" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(90.03937 347.3307)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="36.832338" y="10" textLength="86.41406">read_message</tspan></text><text transform="translate(178.07874 490.563)" fill="black"><tspan font-family="Courier New" font-size="10" font-weight="500" x="0" y="8" textLength="30.004883">bytes</tspan></text><text transform="translate(178.07874 433.87008)" fill="black"><tspan font-family="Courier New" font-size="10" font-weight="500" x="0" y="8" textLength="36.00586">frames</tspan></text><text transform="translate(178.07874 371.67716)" fill="black"><tspan font-family="Courier New" font-size="10" font-weight="500" x="0" y="8" textLength="24.003906">data</tspan><tspan font-family="Courier New" font-size="10" font-weight="500" x="0" y="19" textLength="36.00586">frames</tspan></text><rect x="368.50393" y="510.23622" width="170.07874" height="28.346457" fill="#ff6"/><rect x="368.50393" y="510.23622" width="170.07874" height="28.346457" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(373.50393 517.40945)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="40.432924" y="10" textLength="79.21289">write_frame</tspan></text><path d="M 85.03937 609.4488 L 71.13937 609.4488 L 56.692913 609.4488 L 56.692913 595.2756 L 56.692913 566.92913 L 113.385826 566.92913 L 170.07874 566.92913 L 170.07874 495.78976 L 170.07874 494.03976" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><path d="M 453.5433 539.33267 L 453.5433 552.48267 L 453.5433 566.92913 L 510.23622 566.92913 L 569.76378 566.92913 L 569.76378 595.2756 L 569.76378 609.4488 L 552.48267 609.4488 L 549.98267 609.4488" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><line x1="170.07874" y1="453.5433" x2="170.07874" y2="437.34685" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><line x1="170.07874" y1="396.8504" x2="170.07874" y2="380.65393" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><path d="M 102.047243 204.09449 L 238.11023 204.09449 C 247.49858 204.09449 255.11811 214.25386 255.11811 226.77165 C 255.11811 239.28945 247.49858 249.44882 238.11023 249.44882 L 102.047243 249.44882 C 92.658897 249.44882 85.03937 239.28945 85.03937 226.77165 C 85.03937 214.25386 92.658897 204.09449 102.047243 204.09449" fill="#fc6"/><path d="M 102.047243 204.09449 L 238.11023 204.09449 C 247.49858 204.09449 255.11811 214.25386 255.11811 226.77165 C 255.11811 239.28945 247.49858 249.44882 238.11023 249.44882 L 102.047243 249.44882 C 92.658897 249.44882 85.03937 239.28945 85.03937 226.77165 C 85.03937 214.25386 92.658897 204.09449 102.047243 204.09449 M 238.11023 204.09449 C 228.72189 204.09449 221.10236 214.25386 221.10236 226.77165 C 221.10236 239.28945 228.72189 249.44882 238.11023 249.44882" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(132.33071 214.27165)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x=".1953125" y="10" textLength="57.609375">messages</tspan><tspan font-family="Courier New" font-size="10" font-weight="500" x="13.997559" y="22" textLength="30.004883">deque</tspan></text><path d="M 255.11811 354.3307 L 269.01811 354.3307 L 297.6378 354.3307 L 297.6378 328.8189 L 297.6378 226.77165 L 269.01811 226.77165 L 266.51811 226.77165" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><rect x="85.03937" y="141.73228" width="170.07874" height="28.346457" fill="#cf6"/><rect x="85.03937" y="141.73228" width="170.07874" height="28.346457" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(90.03937 148.90551)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="65.637026" y="10" textLength="28.804688">recv</tspan></text><path d="M 85.03937 226.77165 L 71.13937 226.77165 L 42.519685 226.77165 L 42.519685 209.76378 L 42.519685 155.90551 L 71.13937 155.90551 L 73.63937 155.90551" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><line x1="170.07874" y1="141.73228" x2="170.07874" y2="68.092913" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><line x1="453.5433" y1="56.692913" x2="453.5433" y2="130.33228" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><line x1="467.71653" y1="56.692913" x2="467.71653" y2="187.8752" marker-end="url(#StickArrow_Marker_2)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width=".75"/><line x1="481.88976" y1="56.692913" x2="481.88976" y2="244.56811" marker-end="url(#StickArrow_Marker_2)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width=".75"/><line x1="496.063" y1="56.692913" x2="496.063" y2="300.32302" marker-end="url(#StickArrow_Marker_3)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><rect x="368.50393" y="141.73228" width="170.07874" height="28.346457" fill="#cf6"/><rect x="368.50393" y="141.73228" width="170.07874" height="28.346457" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(373.50393 148.90551)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="65.637026" y="10" textLength="28.804688">send</tspan></text><rect x="368.50393" y="198.4252" width="170.07874" height="28.346457" fill="#cf6"/><rect x="368.50393" y="198.4252" width="170.07874" height="28.346457" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width=".75"/><text transform="translate(373.50393 205.59842)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="500" x="65.637026" y="10" textLength="28.804688">ping</tspan></text><rect x="368.50393" y="255.11811" width="170.07874" height="28.346457" fill="#cf6"/><rect x="368.50393" y="255.11811" width="170.07874" height="28.346457" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width=".75"/><text transform="translate(373.50393 262.29134)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="500" x="65.637026" y="10" textLength="28.804688">pong</tspan></text><rect x="368.50393" y="311.81102" width="170.07874" height="28.346457" fill="#cf6"/><rect x="368.50393" y="311.81102" width="170.07874" height="28.346457" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(373.50393 318.98425)" fill="black"><tspan font-family="Courier New" font-size="12" font-weight="bold" x="62.03644" y="10" textLength="36.00586">close</tspan></text><path d="M 538.58267 155.90551 L 552.48267 155.90551 L 566.92913 155.90551 L 566.92913 481.88976 L 453.5433 481.88976 L 453.5433 496.33622 L 453.5433 498.08622" marker-end="url(#StickArrow_Marker)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><line x1="538.58267" y1="212.59842" x2="566.92913" y2="212.59842" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width=".75"/><line x1="538.58267" y1="269.29134" x2="566.92913" y2="269.29134" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width=".75"/><line x1="538.58267" y1="325.98425" x2="566.92913" y2="325.98425" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><path d="M 255.86811 411.02362 L 262.61811 411.02362 L 340.15748 411.02362 L 340.15748 481.88976 L 453.5433 481.88976" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5"/><text transform="translate(291.94527 399.02362)" fill="black"><tspan font-family="Courier New" font-size="10" font-weight="500" x="0" y="8" textLength="42.006836">control</tspan><tspan font-family="Courier New" font-size="10" font-weight="500" x="0" y="21" textLength="36.00586">frames</tspan></text><line x1="340.15748" y1="411.02362" x2="414.64685" y2="411.02362" marker-end="url(#StickArrow_Marker_2)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width=".75"/><path d="M 368.50393 212.59842 L 361.75393 212.59842 L 340.15748 212.59842 L 340.15748 340.15748 L 340.15748 382.67716" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width=".75"/><text transform="translate(461.5433 547.2559)" fill="black"><tspan font-family="Courier New" font-size="10" font-weight="500" x="0" y="8" textLength="30.004883">bytes</tspan></text><text transform="translate(461.5433 490.563)" fill="black"><tspan font-family="Courier New" font-size="10" font-weight="500" x="0" y="8" textLength="36.00586">frames</tspan></text><line x1="340.15748" y1="382.67716" x2="414.64685" y2="382.67716" marker-end="url(#StickArrow_Marker_2)" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width=".75"/></g></g></svg>
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/requirements.txt b/testing/web-platform/tests/tools/third_party/websockets/docs/requirements.txt
new file mode 100644
index 0000000000..0eaf94fbe8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/requirements.txt
@@ -0,0 +1,4 @@
+sphinx
+sphinx-autodoc-typehints
+sphinxcontrib-spelling
+sphinxcontrib-trio
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/security.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/security.rst
new file mode 100644
index 0000000000..e9acf0629c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/security.rst
@@ -0,0 +1,39 @@
+Security
+========
+
+Encryption
+----------
+
+For production use, a server should require encrypted connections.
+
+See this example of :ref:`encrypting connections with TLS
+<secure-server-example>`.
+
+Memory use
+----------
+
+.. warning::
+
+ An attacker who can open an arbitrary number of connections will be able
+ to perform a denial of service by memory exhaustion. If you're concerned
+ by denial of service attacks, you must reject suspicious connections
+ before they reach ``websockets``, typically in a reverse proxy.
+
+With the default settings, opening a connection uses 325 KiB of memory.
+
+Sending some highly compressed messages could use up to 128 MiB of memory
+with an amplification factor of 1000 between network traffic and memory use.
+
+Configuring a server to :ref:`optimize memory usage <memory-usage>` will
+improve security in addition to improving performance.
+
+Other limits
+------------
+
+``websockets`` implements additional limits on the amount of data it accepts
+in order to minimize exposure to security vulnerabilities.
+
+In the opening handshake, ``websockets`` limits the number of HTTP headers to
+256 and the size of an individual header to 4096 bytes. These limits are 10 to
+20 times larger than what's expected in standard use cases. They're hard-coded.
+If you need to change them, monkey-patch the constants in ``websockets.http``.
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/spelling_wordlist.txt b/testing/web-platform/tests/tools/third_party/websockets/docs/spelling_wordlist.txt
new file mode 100644
index 0000000000..1eacc491df
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/spelling_wordlist.txt
@@ -0,0 +1,39 @@
+attr
+augustin
+Auth
+awaitable
+aymeric
+backpressure
+Backpressure
+Bitcoin
+bufferbloat
+Bufferbloat
+bugfix
+bytestring
+bytestrings
+changelog
+cryptocurrency
+daemonize
+fractalideas
+iterable
+keepalive
+KiB
+lifecycle
+Lifecycle
+MiB
+nginx
+permessage
+pong
+pongs
+Pythonic
+serializers
+subclassing
+subprotocol
+subprotocols
+TLS
+Unparse
+uple
+username
+websocket
+WebSocket
+websockets
diff --git a/testing/web-platform/tests/tools/third_party/websockets/docs/tidelift.rst b/testing/web-platform/tests/tools/third_party/websockets/docs/tidelift.rst
new file mode 100644
index 0000000000..43b457aafa
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/docs/tidelift.rst
@@ -0,0 +1,112 @@
+websockets for enterprise
+=========================
+
+Available as part of the Tidelift Subscription
+----------------------------------------------
+
+.. image:: _static/tidelift.png
+ :height: 150px
+ :width: 150px
+ :align: left
+
+Tidelift is working with the maintainers of websockets and thousands of other
+open source projects to deliver commercial support and maintenance for the
+open source dependencies you use to build your applications. Save time, reduce
+risk, and improve code health, while paying the maintainers of the exact
+dependencies you use.
+
+.. raw:: html
+
+ <style type="text/css">
+ .tidelift-links {
+ display: flex;
+ justify-content: center;
+ }
+ @media only screen and (max-width: 600px) {
+ .tidelift-links {
+ flex-direction: column;
+ }
+ }
+ .tidelift-links a {
+ border: thin solid #f6914d;
+ border-radius: 0.25em;
+ font-family: Verdana, sans-serif;
+ font-size: 15px;
+ margin: 0.5em 2em;
+ padding: 0.5em 2em;
+ text-align: center;
+ text-decoration: none;
+ text-transform: uppercase;
+ }
+ .tidelift-links a.tidelift-links__learn-more {
+ background-color: white;
+ color: #f6914d;
+ }
+ .tidelift-links a.tidelift-links__request-a-demo {
+ background-color: #f6914d;
+ color: white;
+ }
+ </style>
+
+ <div class="tidelift-links">
+ <a class="tidelift-links__learn-more" href="https://tidelift.com/subscription/pkg/pypi-websockets?utm_source=pypi-websockets&utm_medium=referral&utm_campaign=enterprise">Learn more</a>
+ <a class="tidelift-links__request-a-demo" href="https://tidelift.com/subscription/request-a-demo?utm_source=pypi-websockets&utm_medium=referral&utm_campaign=enterprise">Request a demo</a>
+ </div>
+
+Enterprise-ready open source software—managed for you
+-----------------------------------------------------
+
+The Tidelift Subscription is a managed open source subscription for
+application dependencies covering millions of open source projects across
+JavaScript, Python, Java, PHP, Ruby, .NET, and more.
+
+Your subscription includes:
+
+* **Security updates**
+
+ * Tidelift’s security response team coordinates patches for new breaking
+ security vulnerabilities and alerts immediately through a private channel,
+ so your software supply chain is always secure.
+
+* **Licensing verification and indemnification**
+
+ * Tidelift verifies license information to enable easy policy enforcement
+ and adds intellectual property indemnification to cover creators and users
+ in case something goes wrong. You always have a 100% up-to-date bill of
+ materials for your dependencies to share with your legal team, customers,
+ or partners.
+
+* **Maintenance and code improvement**
+
+ * Tidelift ensures the software you rely on keeps working as long as you
+ need it to work. Your managed dependencies are actively maintained and we
+ recruit additional maintainers where required.
+
+* **Package selection and version guidance**
+
+ * We help you choose the best open source packages from the start—and then
+ guide you through updates to stay on the best releases as new issues
+ arise.
+
+* **Roadmap input**
+
+ * Take a seat at the table with the creators behind the software you use.
+ Tidelift’s participating maintainers earn more income as their software is
+ used by more subscribers, so they’re interested in knowing what you need.
+
+* **Tooling and cloud integration**
+
+ * Tidelift works with GitHub, GitLab, BitBucket, and more. We support every
+ cloud platform (and other deployment targets, too).
+
+The end result? All of the capabilities you expect from commercial-grade
+software, for the full breadth of open source you use. That means less time
+grappling with esoteric open source trivia, and more time building your own
+applications—and your business.
+
+.. raw:: html
+
+ <div class="tidelift-links">
+ <a class="tidelift-links__learn-more" href="https://tidelift.com/subscription/pkg/pypi-websockets?utm_source=pypi-websockets&utm_medium=referral&utm_campaign=enterprise">Learn more</a>
+ <a class="tidelift-links__request-a-demo" href="https://tidelift.com/subscription/request-a-demo?utm_source=pypi-websockets&utm_medium=referral&utm_campaign=enterprise">Request a demo</a>
+ </div>
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/basic_auth_client.py b/testing/web-platform/tests/tools/third_party/websockets/example/basic_auth_client.py
new file mode 100755
index 0000000000..cc94dbe4b4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/basic_auth_client.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+
+# WS client example with HTTP Basic Authentication
+
+import asyncio
+import websockets
+
+async def hello():
+ uri = "ws://mary:p@ssw0rd@localhost:8765"
+ async with websockets.connect(uri) as websocket:
+ greeting = await websocket.recv()
+ print(greeting)
+
+asyncio.get_event_loop().run_until_complete(hello())
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/basic_auth_server.py b/testing/web-platform/tests/tools/third_party/websockets/example/basic_auth_server.py
new file mode 100755
index 0000000000..6740d57989
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/basic_auth_server.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Server example with HTTP Basic Authentication over TLS
+
+import asyncio
+import websockets
+
+async def hello(websocket, path):
+ greeting = f"Hello {websocket.username}!"
+ await websocket.send(greeting)
+
+start_server = websockets.serve(
+ hello, "localhost", 8765,
+ create_protocol=websockets.basic_auth_protocol_factory(
+ realm="example", credentials=("mary", "p@ssw0rd")
+ ),
+)
+
+asyncio.get_event_loop().run_until_complete(start_server)
+asyncio.get_event_loop().run_forever()
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/client.py b/testing/web-platform/tests/tools/third_party/websockets/example/client.py
new file mode 100755
index 0000000000..4f969c478a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/client.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# WS client example
+
+import asyncio
+import websockets
+
+async def hello():
+ uri = "ws://localhost:8765"
+ async with websockets.connect(uri) as websocket:
+ name = input("What's your name? ")
+
+ await websocket.send(name)
+ print(f"> {name}")
+
+ greeting = await websocket.recv()
+ print(f"< {greeting}")
+
+asyncio.get_event_loop().run_until_complete(hello())
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/counter.html b/testing/web-platform/tests/tools/third_party/websockets/example/counter.html
new file mode 100644
index 0000000000..6310c4a16d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/counter.html
@@ -0,0 +1,80 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>WebSocket demo</title>
+ <style type="text/css">
+ body {
+ font-family: "Courier New", sans-serif;
+ text-align: center;
+ }
+ .buttons {
+ font-size: 4em;
+ display: flex;
+ justify-content: center;
+ }
+ .button, .value {
+ line-height: 1;
+ padding: 2rem;
+ margin: 2rem;
+ border: medium solid;
+ min-height: 1em;
+ min-width: 1em;
+ }
+ .button {
+ cursor: pointer;
+ user-select: none;
+ }
+ .minus {
+ color: red;
+ }
+ .plus {
+ color: green;
+ }
+ .value {
+ min-width: 2em;
+ }
+ .state {
+ font-size: 2em;
+ }
+ </style>
+ </head>
+ <body>
+ <div class="buttons">
+ <div class="minus button">-</div>
+ <div class="value">?</div>
+ <div class="plus button">+</div>
+ </div>
+ <div class="state">
+ <span class="users">?</span> online
+ </div>
+ <script>
+ var minus = document.querySelector('.minus'),
+ plus = document.querySelector('.plus'),
+ value = document.querySelector('.value'),
+ users = document.querySelector('.users'),
+ websocket = new WebSocket("ws://127.0.0.1:6789/");
+ minus.onclick = function (event) {
+ websocket.send(JSON.stringify({action: 'minus'}));
+ }
+ plus.onclick = function (event) {
+ websocket.send(JSON.stringify({action: 'plus'}));
+ }
+ websocket.onmessage = function (event) {
+ data = JSON.parse(event.data);
+ switch (data.type) {
+ case 'state':
+ value.textContent = data.value;
+ break;
+ case 'users':
+ users.textContent = (
+ data.count.toString() + " user" +
+ (data.count == 1 ? "" : "s"));
+ break;
+ default:
+ console.error(
+ "unsupported event", data);
+ }
+ };
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/counter.py b/testing/web-platform/tests/tools/third_party/websockets/example/counter.py
new file mode 100755
index 0000000000..dbbbe59358
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/counter.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+
+# WS server example that synchronizes state across clients
+
+import asyncio
+import json
+import logging
+import websockets
+
+logging.basicConfig()
+
+STATE = {"value": 0}
+
+USERS = set()
+
+
+def state_event():
+ return json.dumps({"type": "state", **STATE})
+
+
+def users_event():
+ return json.dumps({"type": "users", "count": len(USERS)})
+
+
+async def notify_state():
+ if USERS: # asyncio.wait doesn't accept an empty list
+ message = state_event()
+ await asyncio.wait([user.send(message) for user in USERS])
+
+
+async def notify_users():
+ if USERS: # asyncio.wait doesn't accept an empty list
+ message = users_event()
+ await asyncio.wait([user.send(message) for user in USERS])
+
+
+async def register(websocket):
+ USERS.add(websocket)
+ await notify_users()
+
+
+async def unregister(websocket):
+ USERS.remove(websocket)
+ await notify_users()
+
+
+async def counter(websocket, path):
+ # register(websocket) sends user_event() to websocket
+ await register(websocket)
+ try:
+ await websocket.send(state_event())
+ async for message in websocket:
+ data = json.loads(message)
+ if data["action"] == "minus":
+ STATE["value"] -= 1
+ await notify_state()
+ elif data["action"] == "plus":
+ STATE["value"] += 1
+ await notify_state()
+ else:
+ logging.error("unsupported event: {}", data)
+ finally:
+ await unregister(websocket)
+
+
+start_server = websockets.serve(counter, "localhost", 6789)
+
+asyncio.get_event_loop().run_until_complete(start_server)
+asyncio.get_event_loop().run_forever()
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/echo.py b/testing/web-platform/tests/tools/third_party/websockets/example/echo.py
new file mode 100755
index 0000000000..b7ca38d321
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/echo.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+
+import asyncio
+import websockets
+
+async def echo(websocket, path):
+ async for message in websocket:
+ await websocket.send(message)
+
+start_server = websockets.serve(echo, "localhost", 8765)
+
+asyncio.get_event_loop().run_until_complete(start_server)
+asyncio.get_event_loop().run_forever()
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/health_check_server.py b/testing/web-platform/tests/tools/third_party/websockets/example/health_check_server.py
new file mode 100755
index 0000000000..417063fce7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/health_check_server.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# WS echo server with HTTP endpoint at /health/
+
+import asyncio
+import http
+import websockets
+
+async def health_check(path, request_headers):
+ if path == "/health/":
+ return http.HTTPStatus.OK, [], b"OK\n"
+
+async def echo(websocket, path):
+ async for message in websocket:
+ await websocket.send(message)
+
+start_server = websockets.serve(
+ echo, "localhost", 8765, process_request=health_check
+)
+
+asyncio.get_event_loop().run_until_complete(start_server)
+asyncio.get_event_loop().run_forever()
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/hello.py b/testing/web-platform/tests/tools/third_party/websockets/example/hello.py
new file mode 100755
index 0000000000..6c9c839d82
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/hello.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+
+import asyncio
+import websockets
+
+async def hello():
+ uri = "ws://localhost:8765"
+ async with websockets.connect(uri) as websocket:
+ await websocket.send("Hello world!")
+ await websocket.recv()
+
+asyncio.get_event_loop().run_until_complete(hello())
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/localhost.pem b/testing/web-platform/tests/tools/third_party/websockets/example/localhost.pem
new file mode 100644
index 0000000000..f9a30ba8f6
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/localhost.pem
@@ -0,0 +1,48 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDG8iDak4UBpurI
+TWjSfqJ0YVG/S56nhswehupCaIzu0xQ8wqPSs36h5t1jMexJPZfvwyvFjcV+hYpj
+LMM0wMJPx9oBQEe0bsmlC66e8aF0UpSQw1aVfYoxA9BejgEyrFNE7cRbQNYFEb/5
+3HfqZKdEQA2fgQSlZ0RTRmLrD+l72iO5o2xl5bttXpqYZB2XOkyO79j/xWdu9zFE
+sgZJ5ysWbqoRAGgnxjdYYr9DARd8bIE/hN3SW7mDt5v4LqCIhGn1VmrwtT3d5AuG
+QPz4YEbm0t6GOlmFjIMYH5Y7pALRVfoJKRj6DGNIR1JicL+wqLV66kcVnj8WKbla
+20i7fR7NAgMBAAECggEAG5yvgqbG5xvLqlFUIyMAWTbIqcxNEONcoUAIc38fUGZr
+gKNjKXNQOBha0dG0AdZSqCxmftzWdGEEfA9SaJf4YCpUz6ekTB60Tfv5GIZg6kwr
+4ou6ELWD4Jmu6fC7qdTRGdgGUMQG8F0uT/eRjS67KHXbbi/x/SMAEK7MO+PRfCbj
++JGzS9Ym9mUweINPotgjHdDGwwd039VWYS+9A+QuNK27p3zq4hrWRb4wshSC8fKy
+oLoe4OQt81aowpX9k6mAU6N8vOmP8/EcQHYC+yFIIDZB2EmDP07R1LUEH3KJnzo7
+plCK1/kYPhX0a05cEdTpXdKa74AlvSRkS11sGqfUAQKBgQDj1SRv0AUGsHSA0LWx
+a0NT1ZLEXCG0uqgdgh0sTqIeirQsPROw3ky4lH5MbjkfReArFkhHu3M6KoywEPxE
+wanSRh/t1qcNjNNZUvFoUzAKVpb33RLkJppOTVEWPt+wtyDlfz1ZAXzMV66tACrx
+H2a3v0ZWUz6J+x/dESH5TTNL4QKBgQDfirmknp408pwBE+bulngKy0QvU09En8H0
+uvqr8q4jCXqJ1tXon4wsHg2yF4Fa37SCpSmvONIDwJvVWkkYLyBHKOns/fWCkW3n
+hIcYx0q2jgcoOLU0uoaM9ArRXhIxoWqV/KGkQzN+3xXC1/MxZ5OhyxBxfPCPIYIN
+YN3M1t/QbQKBgDImhsC+D30rdlmsl3IYZFed2ZKznQ/FTqBANd+8517FtWdPgnga
+VtUCitKUKKrDnNafLwXrMzAIkbNn6b/QyWrp2Lln2JnY9+TfpxgJx7de3BhvZ2sl
+PC4kQsccy+yAQxOBcKWY+Dmay251bP5qpRepWPhDlq6UwqzMyqev4KzBAoGAWDMi
+IEO9ZGK9DufNXCHeZ1PgKVQTmJ34JxmHQkTUVFqvEKfFaq1Y3ydUfAouLa7KSCnm
+ko42vuhGFB41bOdbMvh/o9RoBAZheNGfhDVN002ioUoOpSlbYU4A3q7hOtfXeCpf
+lLI3JT3cFi6ic8HMTDAU4tJLEA5GhATOPr4hPNkCgYB8jTYGcLvoeFaLEveg0kS2
+cz6ZXGLJx5m1AOQy5g9FwGaW+10lr8TF2k3AldwoiwX0R6sHAf/945aGU83ms5v9
+PB9/x66AYtSRUos9MwB4y1ur4g6FiXZUBgTJUqzz2nehPCyGjYhh49WucjszqcjX
+chS1bKZOY+1knWq8xj5Qyg==
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDTTCCAjWgAwIBAgIJAOjte6l+03jvMA0GCSqGSIb3DQEBCwUAMEwxCzAJBgNV
+BAYTAkZSMQ4wDAYDVQQHDAVQYXJpczEZMBcGA1UECgwQQXltZXJpYyBBdWd1c3Rp
+bjESMBAGA1UEAwwJbG9jYWxob3N0MCAXDTE4MDUwNTE2NTkyOVoYDzIwNjAwNTA0
+MTY1OTI5WjBMMQswCQYDVQQGEwJGUjEOMAwGA1UEBwwFUGFyaXMxGTAXBgNVBAoM
+EEF5bWVyaWMgQXVndXN0aW4xEjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMbyINqThQGm6shNaNJ+onRhUb9LnqeGzB6G
+6kJojO7TFDzCo9KzfqHm3WMx7Ek9l+/DK8WNxX6FimMswzTAwk/H2gFAR7RuyaUL
+rp7xoXRSlJDDVpV9ijED0F6OATKsU0TtxFtA1gURv/ncd+pkp0RADZ+BBKVnRFNG
+YusP6XvaI7mjbGXlu21emphkHZc6TI7v2P/FZ273MUSyBknnKxZuqhEAaCfGN1hi
+v0MBF3xsgT+E3dJbuYO3m/guoIiEafVWavC1Pd3kC4ZA/PhgRubS3oY6WYWMgxgf
+ljukAtFV+gkpGPoMY0hHUmJwv7CotXrqRxWePxYpuVrbSLt9Hs0CAwEAAaMwMC4w
+LAYDVR0RBCUwI4IJbG9jYWxob3N0hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABMA0G
+CSqGSIb3DQEBCwUAA4IBAQC9TsTxTEvqHPUS6sfvF77eG0D6HLOONVN91J+L7LiX
+v3bFeS1xbUS6/wIxZi5EnAt/te5vaHk/5Q1UvznQP4j2gNoM6lH/DRkSARvRitVc
+H0qN4Xp2Yk1R9VEx4ZgArcyMpI+GhE4vJRx1LE/hsuAzw7BAdsTt9zicscNg2fxO
+3ao/eBcdaC6n9aFYdE6CADMpB1lCX2oWNVdj6IavQLu7VMc+WJ3RKncwC9th+5OP
+ISPvkVZWf25rR2STmvvb0qEm3CZjk4Xd7N+gxbKKUvzEgPjrLSWzKKJAWHjCLugI
+/kQqhpjWVlTbtKzWz5bViqCjSbrIPpU2MgG9AUV9y3iV
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/secure_client.py b/testing/web-platform/tests/tools/third_party/websockets/example/secure_client.py
new file mode 100755
index 0000000000..54971b9847
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/secure_client.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# WSS (WS over TLS) client example, with a self-signed certificate
+
+import asyncio
+import pathlib
+import ssl
+import websockets
+
+ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+localhost_pem = pathlib.Path(__file__).with_name("localhost.pem")
+ssl_context.load_verify_locations(localhost_pem)
+
+async def hello():
+ uri = "wss://localhost:8765"
+ async with websockets.connect(
+ uri, ssl=ssl_context
+ ) as websocket:
+ name = input("What's your name? ")
+
+ await websocket.send(name)
+ print(f"> {name}")
+
+ greeting = await websocket.recv()
+ print(f"< {greeting}")
+
+asyncio.get_event_loop().run_until_complete(hello())
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/secure_server.py b/testing/web-platform/tests/tools/third_party/websockets/example/secure_server.py
new file mode 100755
index 0000000000..2a00bdb504
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/secure_server.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# WSS (WS over TLS) server example, with a self-signed certificate
+
+import asyncio
+import pathlib
+import ssl
+import websockets
+
+async def hello(websocket, path):
+ name = await websocket.recv()
+ print(f"< {name}")
+
+ greeting = f"Hello {name}!"
+
+ await websocket.send(greeting)
+ print(f"> {greeting}")
+
+ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
+localhost_pem = pathlib.Path(__file__).with_name("localhost.pem")
+ssl_context.load_cert_chain(localhost_pem)
+
+start_server = websockets.serve(
+ hello, "localhost", 8765, ssl=ssl_context
+)
+
+asyncio.get_event_loop().run_until_complete(start_server)
+asyncio.get_event_loop().run_forever()
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/server.py b/testing/web-platform/tests/tools/third_party/websockets/example/server.py
new file mode 100755
index 0000000000..c8ab69971b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/server.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# WS server example
+
+import asyncio
+import websockets
+
+async def hello(websocket, path):
+ name = await websocket.recv()
+ print(f"< {name}")
+
+ greeting = f"Hello {name}!"
+
+ await websocket.send(greeting)
+ print(f"> {greeting}")
+
+start_server = websockets.serve(hello, "localhost", 8765)
+
+asyncio.get_event_loop().run_until_complete(start_server)
+asyncio.get_event_loop().run_forever()
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/show_time.html b/testing/web-platform/tests/tools/third_party/websockets/example/show_time.html
new file mode 100644
index 0000000000..721f44264e
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/show_time.html
@@ -0,0 +1,20 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>WebSocket demo</title>
+ </head>
+ <body>
+ <script>
+ var ws = new WebSocket("ws://127.0.0.1:5678/"),
+ messages = document.createElement('ul');
+ ws.onmessage = function (event) {
+ var messages = document.getElementsByTagName('ul')[0],
+ message = document.createElement('li'),
+ content = document.createTextNode(event.data);
+ message.appendChild(content);
+ messages.appendChild(message);
+ };
+ document.body.appendChild(messages);
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/show_time.py b/testing/web-platform/tests/tools/third_party/websockets/example/show_time.py
new file mode 100755
index 0000000000..e5d6ac9aa3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/show_time.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# WS server that sends messages at random intervals
+
+import asyncio
+import datetime
+import random
+import websockets
+
+async def time(websocket, path):
+ while True:
+ now = datetime.datetime.utcnow().isoformat() + "Z"
+ await websocket.send(now)
+ await asyncio.sleep(random.random() * 3)
+
+start_server = websockets.serve(time, "127.0.0.1", 5678)
+
+asyncio.get_event_loop().run_until_complete(start_server)
+asyncio.get_event_loop().run_forever()
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/shutdown.py b/testing/web-platform/tests/tools/third_party/websockets/example/shutdown.py
new file mode 100755
index 0000000000..86846abe73
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/shutdown.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+import asyncio
+import signal
+import websockets
+
+async def echo(websocket, path):
+ async for message in websocket:
+ await websocket.send(message)
+
+async def echo_server(stop):
+ async with websockets.serve(echo, "localhost", 8765):
+ await stop
+
+loop = asyncio.get_event_loop()
+
+# The stop condition is set when receiving SIGTERM.
+stop = loop.create_future()
+loop.add_signal_handler(signal.SIGTERM, stop.set_result, None)
+
+# Run the server until the stop condition is met.
+loop.run_until_complete(echo_server(stop))
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/unix_client.py b/testing/web-platform/tests/tools/third_party/websockets/example/unix_client.py
new file mode 100755
index 0000000000..577135b3db
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/unix_client.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# WS client example connecting to a Unix socket
+
+import asyncio
+import os.path
+import websockets
+
+async def hello():
+ socket_path = os.path.join(os.path.dirname(__file__), "socket")
+ async with websockets.unix_connect(socket_path) as websocket:
+ name = input("What's your name? ")
+ await websocket.send(name)
+ print(f"> {name}")
+
+ greeting = await websocket.recv()
+ print(f"< {greeting}")
+
+asyncio.get_event_loop().run_until_complete(hello())
diff --git a/testing/web-platform/tests/tools/third_party/websockets/example/unix_server.py b/testing/web-platform/tests/tools/third_party/websockets/example/unix_server.py
new file mode 100755
index 0000000000..a6ec0168a2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/example/unix_server.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+# WS server example listening on a Unix socket
+
+import asyncio
+import os.path
+import websockets
+
+async def hello(websocket, path):
+ name = await websocket.recv()
+ print(f"< {name}")
+
+ greeting = f"Hello {name}!"
+
+ await websocket.send(greeting)
+ print(f"> {greeting}")
+
+socket_path = os.path.join(os.path.dirname(__file__), "socket")
+start_server = websockets.unix_serve(hello, socket_path)
+
+asyncio.get_event_loop().run_until_complete(start_server)
+asyncio.get_event_loop().run_forever()
diff --git a/testing/web-platform/tests/tools/third_party/websockets/logo/horizontal.svg b/testing/web-platform/tests/tools/third_party/websockets/logo/horizontal.svg
new file mode 100644
index 0000000000..ee872dc478
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/logo/horizontal.svg
@@ -0,0 +1,31 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="1024" height="256" viewBox="0 0 1024 256">
+ <linearGradient id="w" x1="0" y1="0" x2="0.1667" y2="0.6667">
+ <stop offset="0%" stop-color="#ffe873" />
+ <stop offset="100%" stop-color="#ffd43b" />
+ </linearGradient>
+ <linearGradient id="s" x1="0" y1="0" x2="0.1667" y2="0.6667">
+ <stop offset="0%" stop-color="#5a9fd4" />
+ <stop offset="100%" stop-color="#306998" />
+ </linearGradient>
+<g>
+ <path fill="url(#w)" d="m 151.60708,154.81618 c -0.43704,0.0747 -0.88656,0.12978 -1.35572,0.14933 -2.45813,0.0764 -4.25357,-0.58665 -5.82335,-2.15107 l -8.89246,-8.85942 -11.23464,-11.19805 -36.040757,-35.919452 c -3.43568,-3.42217 -7.332485,-5.347474 -11.589626,-5.723468 -2.229803,-0.198219 -4.473877,0.03111 -6.640354,0.675545 -3.242133,0.944875 -6.135526,2.664848 -8.593662,5.116366 -3.834369,3.819499 -5.86349,8.414979 -5.875977,13.287799 -0.06065,4.95281 1.951523,9.60074 5.808192,13.44424 l 55.622894,55.43648 c 1.82219,1.84175 2.65971,3.79549 2.63384,6.14568 l 0.004,0.208 c 0.0527,2.43196 -0.75991,4.34571 -2.6267,6.20612 -1.78028,1.77598 -3.8094,2.65241 -6.30945,2.75552 -2.45814,0.0764 -4.25446,-0.58844 -5.82514,-2.15286 L 48.702551,136.2618 c -5.214172,-5.19459 -11.702899,-6.98745 -18.22998,-5.04881 -3.245701,0.9431 -6.135527,2.66307 -8.595446,5.11459 -3.83437,3.82127 -5.865275,8.41676 -5.875978,13.28957 -0.05619,4.95281 1.951524,9.60252 5.806409,13.4478 l 58.10689,57.90577 c 8.319842,8.29143 19.340421,11.9376 32.743314,10.83806 12.57967,-1.02043 23.02317,-5.5848 31.03441,-13.57313 7.51265,-7.4861 11.96423,-16.35175 13.28695,-26.42537 10.47206,-1.68264 19.29494,-6.04524 26.27512,-13.00158 4.01364,-3.99994 7.14963,-8.3972 9.40531,-13.16157 -14.15569,-0.39911 -28.23645,-4.00972 -41.05247,-10.83095 z" />
+ <path fill="url(#s)" d="m 196.96038,146.11854 c 0.10259,-12.84514 -4.43017,-23.98541 -13.50635,-33.1346 L 147.57292,77.225374 c -0.24349,-0.240885 -0.46469,-0.487992 -0.68678,-0.744877 -1.48416,-1.739529 -2.18788,-3.583056 -2.21018,-5.807022 -0.0259,-2.470184 0.84911,-4.508375 2.7605,-6.407902 1.91406,-1.909304 3.8531,-2.737735 6.36564,-2.684403 2.53662,0.024 4.62728,0.943097 6.57257,2.881734 l 60.59178,60.384846 12.11408,-12.06914 c 1.12203,-0.90755 1.95777,-1.76887 2.87823,-2.93418 5.91879,-7.51544 5.26947,-18.272609 -1.51003,-25.02895 L 187.20456,37.727314 c -9.19393,-9.157192 -20.36703,-13.776677 -33.16789,-13.7269 -12.94266,-0.05067 -24.14163,4.548375 -33.28739,13.662901 -9.02892,8.996307 -13.64015,19.93925 -13.7008,32.487501 l -0.004,0.14222 c -0.002,0.167998 -0.005,0.336884 -0.005,0.506659 -0.091,12.232701 4.10729,22.95787 12.48154,31.881285 0.40226,0.43022 0.80274,0.85777 1.22283,1.27821 l 35.75088,35.62612 c 1.88909,1.88174 2.71769,3.79638 2.69361,6.20968 l 0.003,0.20977 c 0.0527,2.43197 -0.76081,4.34571 -2.6276,6.20791 -1.44759,1.43909 -3.06286,2.27818 -4.9564,2.60262 12.81601,6.82123 26.89677,10.43184 41.05246,10.83362 2.80598,-5.92525 4.2509,-12.41848 4.29906,-19.43526 z" />
+ <path fill="#ffffff" d="m 215.68093,93.181574 c 2.84701,-2.838179 7.46359,-2.836401 10.30883,0 2.84433,2.834623 2.84612,7.435446 -0.002,10.270956 -2.84345,2.83818 -7.46271,2.83818 -10.30704,0 -2.84524,-2.83551 -2.84791,-7.435444 0,-10.270956 z" />
+ </g>
+ <g>
+ <g fill="#ffd43b">
+ <path d="m 271.62046,177.33313 c 0,4.1637 1.46619,7.71227 4.39858,10.64361 2.9324,2.93556 6.48202,4.40069 10.64783,4.40069 4.16475,0 7.71438,-1.46513 10.64572,-4.40069 2.93344,-2.93134 4.40069,-6.47991 4.40069,-10.64361 v -35.00332 c 0,-2.12345 0.7647,-3.95198 2.29514,-5.48032 1.53045,-1.53256 3.35793,-2.29831 5.48349,-2.29831 h 0.12745 c 2.16664,0 3.972,0.76575 5.41923,2.29831 1.53045,1.52834 2.2962,3.35793 2.2962,5.48032 v 35.00332 c 0,4.1637 1.4662,7.71227 4.40069,10.64361 2.93134,2.93556 6.47886,4.40069 10.64572,4.40069 4.20794,0 7.77758,-1.46513 10.70997,-4.40069 2.93345,-2.93134 4.40069,-6.47991 4.40069,-10.64361 v -35.00332 c 0,-2.12345 0.76365,-3.95198 2.29515,-5.48032 1.44302,-1.53256 3.25049,-2.29831 5.41924,-2.29831 h 0.1264 c 2.12661,0 3.95409,0.76575 5.48349,2.29831 1.48831,1.52834 2.23194,3.35793 2.23194,5.48032 v 35.00332 c 0,8.45696 -2.9977,15.68261 -8.98887,21.67484 -5.99329,5.99224 -13.21999,8.98993 -21.67695,8.98993 -10.11696,0 -17.7239,-3.35583 -22.82609,-10.07272 -5.14222,6.71689 -12.77234,10.07272 -22.88719,10.07272 -8.45801,0 -15.68471,-2.99769 -21.67695,-8.98993 C 258.9998,193.01574 256,185.79113 256,177.33313 v -35.00332 c 0,-2.12345 0.76575,-3.95198 2.29619,-5.48032 1.5294,-1.53256 3.33581,-2.29831 5.41924,-2.29831 h 0.1917 c 2.08238,0 3.88774,0.76575 5.42029,2.29831 1.52834,1.52834 2.29409,3.35793 2.29409,5.48032 v 35.00332 z" />
+ <path d="m 443.95216,155.97534 c 0.51085,1.06173 0.7668,2.14346 0.7668,3.25048 0,0.8932 -0.16957,1.78536 -0.50979,2.67854 -0.72363,1.99707 -2.08343,3.4422 -4.0805,4.33434 -5.95114,2.67854 -13.77085,6.20711 -23.46228,10.58463 -12.02871,5.43924 -19.08477,8.64866 -21.16715,9.62823 3.22943,4.07944 8.26737,6.11863 15.11067,6.11863 4.5471,0 8.67077,-1.33769 12.36786,-4.01625 3.61283,-2.63534 6.14286,-6.03541 7.58798,-10.20227 1.23342,-3.48538 3.69815,-5.22754 7.39524,-5.22754 2.6343,0 4.7388,1.10702 6.31138,3.31369 0.97746,1.36193 1.46619,2.78598 1.46619,4.27325 0,0.8932 -0.16958,1.80641 -0.50874,2.74069 -2.50791,7.26988 -6.90861,13.13573 -13.19681,17.59961 -6.37563,4.63031 -13.51702,6.94757 -21.4231,6.94757 -10.11591,0 -18.76563,-3.58965 -25.94809,-10.7742 -7.18351,-7.18353 -10.77527,-15.83219 -10.77527,-25.9502 0,-10.11591 3.59176,-18.76351 10.77527,-25.95019 7.18142,-7.1814 15.83218,-10.77422 25.94809,-10.77422 7.30885,0 13.98257,1.99916 20.01904,5.99223 5.99118,3.91512 10.43296,9.05524 13.32321,15.43298 z m -33.34331,-5.67836 c -5.86583,0 -10.86059,2.06343 -14.98322,6.18604 -4.08049,4.12473 -6.12073,9.11949 -6.12073,14.98322 v 0.44661 l 35.63951,-16.00282 c -3.1441,-3.73817 -7.99035,-5.61305 -14.53556,-5.61305 z" />
+ <path d="m 465.12141,108.41246 c 2.08238,0 3.88775,0.74469 5.41924,2.23194 1.53045,1.52834 2.29619,3.35793 2.29619,5.48244 v 24.79998 c 4.80202,-4.24796 11.83701,-6.37564 21.10185,-6.37564 10.11591,0 18.76561,3.59177 25.94914,10.77422 7.18245,7.18563 10.77527,15.83429 10.77527,25.9502 0,10.11695 -3.59282,18.76561 -10.77527,25.95018 C 512.70536,204.41035 504.05566,208 493.93869,208 c -10.11696,0 -18.74349,-3.56964 -25.88382,-10.71207 -7.18457,-7.09504 -10.7974,-15.70262 -10.83954,-25.82063 v -55.33941 c 0,-2.12556 0.76576,-3.95409 2.29621,-5.48243 1.52939,-1.48727 3.3358,-2.23196 5.41924,-2.23196 h 0.19063 z m 28.81622,41.88452 c -5.86477,0 -10.85953,2.06343 -14.9832,6.18604 -4.0784,4.12473 -6.11969,9.11949 -6.11969,14.98322 0,5.8237 2.04129,10.79633 6.11969,14.91896 4.12367,4.12263 9.11737,6.18393 14.9832,6.18393 5.82371,0 10.79635,-2.0613 14.92002,-6.18393 4.12051,-4.12263 6.18288,-9.09526 6.18288,-14.91896 0,-5.86267 -2.06237,-10.85849 -6.18288,-14.98322 -4.12367,-4.12261 -9.09525,-6.18604 -14.92002,-6.18604 z" />
+ </g>
+ <g fill="#306998">
+ <path d="m 561.26467,150.17375 c -1.87066,0 -3.44325,0.6362 -4.71773,1.9107 -1.27556,1.31872 -1.91281,2.89025 -1.91281,4.71773 0,2.5511 1.23237,4.46389 3.69919,5.73733 0.84898,0.46872 4.39859,1.53045 10.64678,3.18834 5.05795,1.44619 8.81825,3.33686 11.28296,5.67413 3.52963,3.35898 5.29179,8.14097 5.29179,14.34703 0,6.11862 -2.16769,11.36829 -6.50203,15.74581 -4.37857,4.33644 -9.62823,6.50308 -15.74791,6.50308 h -16.64005 c -2.08448,0 -3.88879,-0.76365 -5.42029,-2.29621 -1.53045,-1.44407 -2.2962,-3.25048 -2.2962,-5.41712 v -0.12953 c 0,-2.12345 0.76575,-3.95198 2.2962,-5.48243 1.53044,-1.53045 3.33581,-2.29619 5.42029,-2.29619 h 17.2773 c 1.8696,0 3.44324,-0.6362 4.71773,-1.9107 1.27556,-1.27554 1.91281,-2.84707 1.91281,-4.71774 0,-2.33937 -1.21131,-4.10366 -3.63285,-5.29073 -0.63723,-0.30018 -4.20898,-1.36192 -10.71208,-3.18834 -5.05899,-1.48725 -8.82139,-3.44535 -11.28611,-5.8669 -3.52856,-3.44217 -5.29075,-8.30949 -5.29075,-14.5998 0,-6.12073 2.16876,-11.34721 6.50414,-15.68261 4.37648,-4.37752 9.62718,-6.56839 15.74687,-6.56839 h 11.73166 c 2.12452,0 3.95304,0.76575 5.48349,2.29831 1.52939,1.52834 2.29515,3.35793 2.29515,5.48032 v 0.12745 c 0,2.16876 -0.76576,3.97622 -2.29515,5.4203 -1.53045,1.52834 -3.35897,2.29619 -5.48349,2.29619 z" />
+ <path d="m 630.5677,134.55118 c 10.1159,0 18.76456,3.59177 25.94912,10.77422 7.18246,7.18563 10.77422,15.83429 10.77422,25.9502 0,10.11695 -3.59176,18.76561 -10.77422,25.95018 C 649.33331,204.40929 640.6836,208 630.5677,208 c -10.11592,0 -18.76563,-3.58965 -25.9481,-10.77422 -7.18351,-7.18351 -10.77526,-15.83217 -10.77526,-25.95018 0,-10.11591 3.59175,-18.76352 10.77526,-25.9502 7.18247,-7.18245 15.83218,-10.77422 25.9481,-10.77422 z m 0,15.7458 c -5.86585,0 -10.86059,2.06343 -14.98322,6.18604 -4.08155,4.12473 -6.12178,9.11949 -6.12178,14.98322 0,5.8237 2.04023,10.79633 6.12178,14.91896 4.12263,4.12263 9.11632,6.18393 14.98322,6.18393 5.82264,0 10.79527,-2.0613 14.91896,-6.18393 4.12261,-4.12263 6.18393,-9.09526 6.18393,-14.91896 0,-5.86267 -2.06132,-10.85849 -6.18393,-14.98322 -4.12369,-4.12261 -9.09527,-6.18604 -14.91896,-6.18604 z" />
+ <path d="m 724.0345,136.27333 c 3.61388,1.14811 5.4203,3.61282 5.4203,7.39523 v 0.32125 c 0,2.59008 -1.04278,4.65138 -3.12516,6.18394 -1.44512,1.01854 -2.93343,1.52834 -4.46178,1.52834 -0.80894,0 -1.63789,-0.12745 -2.48684,-0.38235 -2.08344,-0.67938 -4.23007,-1.02276 -6.43883,-1.02276 -5.86585,0 -10.86165,2.06343 -14.98322,6.18604 -4.08154,4.12473 -6.12074,9.11949 -6.12074,14.98322 0,5.8237 2.0392,10.79633 6.12074,14.91896 4.12157,4.12263 9.11633,6.18393 14.98322,6.18393 2.20982,0 4.35645,-0.33915 6.43883,-1.02065 0.80683,-0.25489 1.61471,-0.38234 2.42259,-0.38234 1.57046,0 3.08197,0.5119 4.52709,1.53254 2.08238,1.52835 3.12514,3.61283 3.12514,6.24819 0,3.74027 -1.80746,6.205 -5.42028,7.39524 -3.56964,1.10491 -7.26673,1.65579 -11.09232,1.65579 -10.11591,0 -18.76562,-3.58965 -25.95019,-10.77423 -7.1814,-7.18351 -10.77422,-15.83217 -10.77422,-25.95018 0,-10.11592 3.59176,-18.76352 10.77422,-25.9502 7.18351,-7.1814 15.83322,-10.77422 25.95019,-10.77422 3.82348,0.002 7.52162,0.57827 11.09126,1.72426 z" />
+ <path d="m 748.19829,108.41246 c 2.08132,0 3.88773,0.74469 5.42029,2.23194 1.5294,1.52834 2.29514,3.35793 2.29514,5.48244 v 44.18284 h 2.42259 c 5.44031,0 10.17805,-1.80642 14.21852,-5.4203 3.95198,-3.61283 6.20394,-8.07461 6.75693,-13.38852 0.25491,-1.99705 1.10597,-3.63494 2.5511,-4.90837 1.44408,-1.35982 3.16517,-2.04131 5.16328,-2.04131 h 0.19066 c 2.25405,0 4.14578,0.85212 5.67517,2.55109 1.36087,1.48727 2.04026,3.20942 2.04026,5.16329 0,0.25491 -0.0222,0.53298 -0.0632,0.82895 -1.02064,10.66889 -5.10115,18.65923 -12.24147,23.97103 3.73922,2.29831 7.18246,6.18604 10.32973,11.66849 3.27155,5.65306 4.90944,11.75483 4.90944,18.29688 v 3.25471 c 0,2.16664 -0.7668,3.972 -2.29515,5.41713 -1.53255,1.53256 -3.33791,2.29619 -5.4203,2.29619 h -0.1917 c -2.08342,0 -3.88879,-0.76363 -5.41922,-2.29619 -1.53045,-1.44408 -2.29514,-3.25049 -2.29514,-5.41713 v -3.25471 c -0.0442,-5.77629 -2.10555,-10.73102 -6.185,-14.85575 -4.12367,-4.07944 -9.09736,-6.11863 -14.91896,-6.11863 h -5.22754 v 24.22804 c 0,2.16664 -0.76574,3.97199 -2.29514,5.41712 -1.5315,1.53256 -3.33897,2.29621 -5.42028,2.29621 h -0.19381 c -2.08237,0 -3.88668,-0.76365 -5.41819,-2.29621 -1.52939,-1.44407 -2.29515,-3.25048 -2.29515,-5.41712 v -84.15879 c 0,-2.12556 0.76576,-3.95408 2.29515,-5.48243 1.53045,-1.48727 3.33582,-2.23195 5.41819,-2.23195 h 0.19381 z" />
+ <path d="m 876.85801,155.97534 c 0.5098,1.06173 0.76469,2.14346 0.76469,3.25048 0,0.8932 -0.17063,1.78536 -0.50874,2.67854 -0.72362,1.99707 -2.08342,3.4422 -4.08049,4.33434 -5.95115,2.67854 -13.77191,6.20711 -23.46229,10.58463 -12.02869,5.43924 -19.08476,8.64866 -21.16715,9.62823 3.22838,4.07944 8.26632,6.11863 15.11066,6.11863 4.54606,0 8.66973,-1.33769 12.36893,-4.01625 3.61176,-2.63534 6.14075,-6.03541 7.58587,-10.20227 1.23238,-3.48538 3.6992,-5.22754 7.39524,-5.22754 2.63536,0 4.73985,1.10702 6.31348,3.31369 0.97536,1.36193 1.46515,2.78598 1.46515,4.27325 0,0.8932 -0.16958,1.80641 -0.5098,2.74069 -2.50791,7.26988 -6.9065,13.13573 -13.19681,17.59961 -6.37563,4.63031 -13.51598,6.94757 -21.42206,6.94757 -10.1159,0 -18.76561,-3.58965 -25.94808,-10.7742 -7.18351,-7.18353 -10.77526,-15.83219 -10.77526,-25.9502 0,-10.11591 3.59175,-18.76351 10.77526,-25.95019 7.18141,-7.1814 15.83218,-10.77422 25.94808,-10.77422 7.30887,0 13.98364,1.99916 20.01906,5.99223 5.99223,3.91512 10.43294,9.05524 13.32426,15.43298 z m -33.34436,-5.67836 c -5.86479,0 -10.86059,2.06343 -14.98322,6.18604 -4.08049,4.12473 -6.12074,9.11949 -6.12074,14.98322 v 0.44661 l 35.63952,-16.00282 c -3.14516,-3.73817 -7.99034,-5.61305 -14.53556,-5.61305 z" />
+ <path d="m 898.02411,108.41246 c 2.08238,0 3.88879,0.74469 5.42028,2.23194 1.52939,1.52834 2.29515,3.35793 2.29515,5.48244 v 18.42434 h 9.56398 c 2.08237,0 3.88772,0.76575 5.42028,2.29831 1.5294,1.52834 2.29304,3.35793 2.29304,5.48032 v 0.12745 c 0,2.16876 -0.76364,3.97621 -2.29304,5.4203 -1.5315,1.52834 -3.33791,2.29619 -5.42028,2.29619 h -9.56398 v 37.80405 c 0,1.23446 0.42343,2.27724 1.27554,3.12514 0.85002,0.85212 1.9128,1.27555 3.1873,1.27555 h 5.10114 c 2.08237,0 3.88772,0.76574 5.42028,2.29619 1.5294,1.53045 2.29304,3.35898 2.29304,5.48243 v 0.12954 c 0,2.16664 -0.76364,3.97199 -2.29304,5.41711 C 919.1923,207.23635 917.38589,208 915.30352,208 h -5.10114 c -5.52563,0 -10.26442,-1.95387 -14.21746,-5.86478 -3.91196,-3.95198 -5.86479,-8.67078 -5.86479,-14.15532 v -71.85095 c 0,-2.12558 0.7647,-3.9541 2.29515,-5.48245 1.53045,-1.48725 3.33686,-2.23193 5.41924,-2.23193 h 0.18959 z" />
+ <path d="m 951.70877,150.17375 c -1.87066,0 -3.44324,0.6362 -4.71773,1.9107 -1.27556,1.31872 -1.91281,2.89025 -1.91281,4.71773 0,2.5511 1.23238,4.46389 3.69711,5.73733 0.8521,0.46872 4.40067,1.53045 10.64886,3.18834 5.05691,1.44619 8.81825,3.33686 11.28402,5.67413 3.52751,3.35898 5.2918,8.14097 5.2918,14.34703 0,6.11862 -2.16876,11.36829 -6.5031,15.74581 -4.37752,4.33644 -9.62822,6.50308 -15.74789,6.50308 h -16.64007 c -2.08342,0 -3.88879,-0.76365 -5.42028,-2.29621 -1.53045,-1.44407 -2.2941,-3.25048 -2.2941,-5.41712 v -0.12953 c 0,-2.12345 0.76365,-3.95198 2.2941,-5.48243 1.53045,-1.53045 3.33686,-2.29619 5.42028,-2.29619 h 17.2773 c 1.86962,0 3.4443,-0.6362 4.71775,-1.9107 1.27554,-1.27554 1.91279,-2.84707 1.91279,-4.71774 0,-2.33937 -1.2113,-4.10366 -3.63283,-5.29073 -0.63936,-0.30018 -4.209,-1.36192 -10.71208,-3.18834 -5.05901,-1.48725 -8.8214,-3.44535 -11.28613,-5.8669 -3.52856,-3.44217 -5.29073,-8.30949 -5.29073,-14.5998 0,-6.12073 2.16875,-11.34721 6.50413,-15.68261 4.37647,-4.37752 9.62718,-6.56839 15.74791,-6.56839 h 11.73063 c 2.1266,0 3.95304,0.76575 5.48243,2.29831 1.53045,1.52834 2.29514,3.35793 2.29514,5.48032 v 0.12745 c 0,2.16876 -0.76469,3.97622 -2.29514,5.4203 -1.52939,1.52834 -3.35687,2.29619 -5.48243,2.29619 z" />
+ </g>
+ </g>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/websockets/logo/icon.svg b/testing/web-platform/tests/tools/third_party/websockets/logo/icon.svg
new file mode 100644
index 0000000000..cb760940aa
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/logo/icon.svg
@@ -0,0 +1,15 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="256" height="256" viewBox="0 0 256 256">
+ <linearGradient id="w" x1="0" y1="0" x2="0.6667" y2="0.6667">
+ <stop offset="0%" stop-color="#ffe873" />
+ <stop offset="100%" stop-color="#ffd43b" />
+ </linearGradient>
+ <linearGradient id="s" x1="0" y1="0" x2="0.6667" y2="0.6667">
+ <stop offset="0%" stop-color="#5a9fd4" />
+ <stop offset="100%" stop-color="#306998" />
+ </linearGradient>
+ <g>
+ <path fill="url(#w)" d="m 151.60708,154.81618 c -0.43704,0.0747 -0.88656,0.12978 -1.35572,0.14933 -2.45813,0.0764 -4.25357,-0.58665 -5.82335,-2.15107 l -8.89246,-8.85942 -11.23464,-11.19805 -36.040757,-35.919452 c -3.43568,-3.42217 -7.332485,-5.347474 -11.589626,-5.723468 -2.229803,-0.198219 -4.473877,0.03111 -6.640354,0.675545 -3.242133,0.944875 -6.135526,2.664848 -8.593662,5.116366 -3.834369,3.819499 -5.86349,8.414979 -5.875977,13.287799 -0.06065,4.95281 1.951523,9.60074 5.808192,13.44424 l 55.622894,55.43648 c 1.82219,1.84175 2.65971,3.79549 2.63384,6.14568 l 0.004,0.208 c 0.0527,2.43196 -0.75991,4.34571 -2.6267,6.20612 -1.78028,1.77598 -3.8094,2.65241 -6.30945,2.75552 -2.45814,0.0764 -4.25446,-0.58844 -5.82514,-2.15286 L 48.702551,136.2618 c -5.214172,-5.19459 -11.702899,-6.98745 -18.22998,-5.04881 -3.245701,0.9431 -6.135527,2.66307 -8.595446,5.11459 -3.83437,3.82127 -5.865275,8.41676 -5.875978,13.28957 -0.05619,4.95281 1.951524,9.60252 5.806409,13.4478 l 58.10689,57.90577 c 8.319842,8.29143 19.340421,11.9376 32.743314,10.83806 12.57967,-1.02043 23.02317,-5.5848 31.03441,-13.57313 7.51265,-7.4861 11.96423,-16.35175 13.28695,-26.42537 10.47206,-1.68264 19.29494,-6.04524 26.27512,-13.00158 4.01364,-3.99994 7.14963,-8.3972 9.40531,-13.16157 -14.15569,-0.39911 -28.23645,-4.00972 -41.05247,-10.83095 z" />
+ <path fill="url(#s)" d="m 196.96038,146.11854 c 0.10259,-12.84514 -4.43017,-23.98541 -13.50635,-33.1346 L 147.57292,77.225374 c -0.24349,-0.240885 -0.46469,-0.487992 -0.68678,-0.744877 -1.48416,-1.739529 -2.18788,-3.583056 -2.21018,-5.807022 -0.0259,-2.470184 0.84911,-4.508375 2.7605,-6.407902 1.91406,-1.909304 3.8531,-2.737735 6.36564,-2.684403 2.53662,0.024 4.62728,0.943097 6.57257,2.881734 l 60.59178,60.384846 12.11408,-12.06914 c 1.12203,-0.90755 1.95777,-1.76887 2.87823,-2.93418 5.91879,-7.51544 5.26947,-18.272609 -1.51003,-25.02895 L 187.20456,37.727314 c -9.19393,-9.157192 -20.36703,-13.776677 -33.16789,-13.7269 -12.94266,-0.05067 -24.14163,4.548375 -33.28739,13.662901 -9.02892,8.996307 -13.64015,19.93925 -13.7008,32.487501 l -0.004,0.14222 c -0.002,0.167998 -0.005,0.336884 -0.005,0.506659 -0.091,12.232701 4.10729,22.95787 12.48154,31.881285 0.40226,0.43022 0.80274,0.85777 1.22283,1.27821 l 35.75088,35.62612 c 1.88909,1.88174 2.71769,3.79638 2.69361,6.20968 l 0.003,0.20977 c 0.0527,2.43197 -0.76081,4.34571 -2.6276,6.20791 -1.44759,1.43909 -3.06286,2.27818 -4.9564,2.60262 12.81601,6.82123 26.89677,10.43184 41.05246,10.83362 2.80598,-5.92525 4.2509,-12.41848 4.29906,-19.43526 z" />
+ <path fill="#ffffff" d="m 215.68093,93.181574 c 2.84701,-2.838179 7.46359,-2.836401 10.30883,0 2.84433,2.834623 2.84612,7.435446 -0.002,10.270956 -2.84345,2.83818 -7.46271,2.83818 -10.30704,0 -2.84524,-2.83551 -2.84791,-7.435444 0,-10.270956 z" />
+ </g>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/websockets/logo/old.svg b/testing/web-platform/tests/tools/third_party/websockets/logo/old.svg
new file mode 100644
index 0000000000..a073139e33
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/logo/old.svg
@@ -0,0 +1,14 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="360" height="120" viewBox="0 0 21 7">
+ <linearGradient id="w" x1="0" y1="0" x2="1" y2="1">
+ <stop offset="0%" stop-color="#5a9fd4" />
+ <stop offset="100%" stop-color="#306998" />
+ </linearGradient>
+ <linearGradient id="s" x1="0" y1="0" x2="1" y2="1">
+ <stop offset="0%" stop-color="#ffe873" />
+ <stop offset="100%" stop-color="#ffd43b" />
+ </linearGradient>
+ <polyline fill="none" stroke="url(#w)" stroke-linecap="round" stroke-linejoin="round"
+ points="1,1 1,5 5,5 5,1 5,5 9,5 9,1"/>
+ <polyline fill="none" stroke="url(#s)" stroke-linecap="round" stroke-linejoin="round"
+ points="19,1 11,1 11,3 19,3 19,5 11,5"/>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/websockets/logo/tidelift.png b/testing/web-platform/tests/tools/third_party/websockets/logo/tidelift.png
new file mode 100644
index 0000000000..317dc4d985
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/logo/tidelift.png
Binary files differ
diff --git a/testing/web-platform/tests/tools/third_party/websockets/logo/vertical.svg b/testing/web-platform/tests/tools/third_party/websockets/logo/vertical.svg
new file mode 100644
index 0000000000..b07fb22387
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/logo/vertical.svg
@@ -0,0 +1,31 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="480" height="320" viewBox="0 0 480 320">
+ <linearGradient id="w" x1="0.2333" y1="0" x2="0.5889" y2="0.5333">
+ <stop offset="0%" stop-color="#ffe873" />
+ <stop offset="100%" stop-color="#ffd43b" />
+ </linearGradient>
+ <linearGradient id="s" x1="0.2333" y1="0" x2="0.5889" y2="0.5333">
+ <stop offset="0%" stop-color="#5a9fd4" />
+ <stop offset="100%" stop-color="#306998" />
+ </linearGradient>
+ <g>
+ <path fill="url(#w)" d="m 263.40708,146.81618 c -0.43704,0.0747 -0.88656,0.12978 -1.35572,0.14933 -2.45813,0.0764 -4.25357,-0.58665 -5.82335,-2.15107 l -8.89246,-8.85942 -11.23464,-11.19805 -36.04076,-35.919454 c -3.43568,-3.42217 -7.33248,-5.347474 -11.58962,-5.723468 -2.22981,-0.198219 -4.47388,0.03111 -6.64036,0.675545 -3.24213,0.944875 -6.13552,2.664848 -8.59366,5.116366 -3.83437,3.819499 -5.86349,8.414979 -5.87598,13.287801 -0.0607,4.95281 1.95153,9.60074 5.8082,13.44424 l 55.62289,55.43648 c 1.82219,1.84175 2.65971,3.79549 2.63384,6.14568 l 0.004,0.208 c 0.0527,2.43196 -0.75991,4.34571 -2.6267,6.20612 -1.78028,1.77598 -3.8094,2.65241 -6.30945,2.75552 -2.45814,0.0764 -4.25446,-0.58844 -5.82514,-2.15286 L 160.50255,128.2618 c -5.21417,-5.19459 -11.7029,-6.98745 -18.22998,-5.04881 -3.2457,0.9431 -6.13553,2.66307 -8.59545,5.11459 -3.83437,3.82127 -5.86527,8.41676 -5.87597,13.28957 -0.0562,4.95281 1.95152,9.60252 5.80641,13.4478 l 58.10689,57.90577 c 8.31984,8.29143 19.34042,11.9376 32.74331,10.83806 12.57967,-1.02043 23.02317,-5.5848 31.03441,-13.57313 7.51265,-7.4861 11.96423,-16.35175 13.28695,-26.42537 10.47206,-1.68264 19.29494,-6.04524 26.27512,-13.00158 4.01364,-3.99994 7.14963,-8.3972 9.40531,-13.16157 -14.15569,-0.39911 -28.23645,-4.00972 -41.05247,-10.83095 z" />
+ <path fill="url(#s)" d="m 308.76038,138.11854 c 0.10259,-12.84514 -4.43017,-23.98541 -13.50635,-33.1346 L 259.37292,69.225372 c -0.24349,-0.240885 -0.46469,-0.487992 -0.68678,-0.744877 -1.48416,-1.739529 -2.18788,-3.583056 -2.21018,-5.807022 -0.0259,-2.470184 0.84911,-4.508375 2.7605,-6.407902 1.91406,-1.909304 3.8531,-2.737735 6.36564,-2.684403 2.53662,0.024 4.62728,0.943097 6.57257,2.881734 l 60.59178,60.384848 12.11408,-12.06914 c 1.12203,-0.90755 1.95777,-1.76887 2.87823,-2.93418 5.91879,-7.515442 5.26947,-18.272611 -1.51003,-25.028952 L 299.00456,29.727312 c -9.19393,-9.157192 -20.36703,-13.776677 -33.16789,-13.7269 -12.94266,-0.05067 -24.14163,4.548375 -33.28739,13.662901 -9.02892,8.996307 -13.64015,19.93925 -13.7008,32.487501 l -0.004,0.14222 c -0.002,0.167998 -0.005,0.336884 -0.005,0.506659 -0.091,12.232701 4.10729,22.95787 12.48154,31.881285 0.40226,0.43022 0.80274,0.85777 1.22283,1.27821 l 35.75088,35.626122 c 1.88909,1.88174 2.71769,3.79638 2.69361,6.20968 l 0.003,0.20977 c 0.0527,2.43197 -0.76081,4.34571 -2.6276,6.20791 -1.44759,1.43909 -3.06286,2.27818 -4.9564,2.60262 12.81601,6.82123 26.89677,10.43184 41.05246,10.83362 2.80598,-5.92525 4.2509,-12.41848 4.29906,-19.43526 z" />
+ <path fill="#ffffff" d="m 327.48093,85.181572 c 2.84701,-2.838179 7.46359,-2.836401 10.30883,0 2.84433,2.834623 2.84612,7.435446 -0.002,10.270956 -2.84345,2.83818 -7.46271,2.83818 -10.30704,0 -2.84524,-2.83551 -2.84791,-7.435444 0,-10.270956 z" />
+ </g>
+ <g>
+ <g fill="#ffd43b">
+ <path d="m 25.719398,284.91839 c 0,2.59075 0.912299,4.79875 2.736898,6.62269 1.824599,1.82657 4.033255,2.73821 6.625313,2.73821 2.591402,0 4.800058,-0.91164 6.624002,-2.73821 1.825254,-1.82394 2.738209,-4.03194 2.738209,-6.62269 v -21.77984 c 0,-1.32126 0.475811,-2.45901 1.42809,-3.40998 0.952278,-0.95359 2.089375,-1.43006 3.411947,-1.43006 h 0.0793 c 1.348132,0 2.471467,0.47647 3.371969,1.43006 0.952278,0.95097 1.428745,2.08938 1.428745,3.40998 v 21.77984 c 0,2.59075 0.912299,4.79875 2.738209,6.62269 1.823944,1.82657 4.031289,2.73821 6.624002,2.73821 2.618274,0 4.839382,-0.91164 6.663981,-2.73821 1.825254,-1.82394 2.738209,-4.03194 2.738209,-6.62269 v -21.77984 c 0,-1.32126 0.475156,-2.45901 1.42809,-3.40998 0.897881,-0.95359 2.022526,-1.43006 3.371969,-1.43006 h 0.07865 c 1.323228,0 2.460325,0.47647 3.411948,1.43006 0.926062,0.95097 1.388766,2.08938 1.388766,3.40998 v 21.77984 c 0,5.26211 -1.865233,9.75807 -5.593077,13.48657 -3.729156,3.7285 -8.22577,5.59373 -13.487876,5.59373 -6.294998,0 -11.028207,-2.08807 -14.202904,-6.26747 -3.199602,4.1794 -7.94723,6.26747 -14.240916,6.26747 -5.262763,0 -9.759377,-1.86523 -13.487876,-5.59373 C 17.866544,294.67646 16,290.18115 16,284.91839 v -21.77984 c 0,-1.32126 0.476467,-2.45901 1.428745,-3.40998 0.951623,-0.95359 2.075612,-1.43006 3.371969,-1.43006 h 0.11928 c 1.295702,0 2.419036,0.47647 3.372625,1.43006 0.950967,0.95097 1.427434,2.08938 1.427434,3.40998 v 21.77984 z" />
+ <path d="m 132.94801,271.6291 c 0.31786,0.66063 0.47712,1.33371 0.47712,2.02252 0,0.55577 -0.10551,1.11089 -0.3172,1.66665 -0.45026,1.24262 -1.29636,2.14181 -2.53898,2.69692 -3.70293,1.66665 -8.56853,3.8622 -14.59875,6.58599 -7.48453,3.38442 -11.87497,5.38139 -13.17067,5.9909 2.00942,2.53832 5.14414,3.80715 9.40219,3.80715 2.82931,0 5.39515,-0.83234 7.69556,-2.499 2.24798,-1.63977 3.82222,-3.75537 4.72141,-6.34808 0.76746,-2.16868 2.30107,-3.25269 4.60148,-3.25269 1.63912,0 2.94859,0.68881 3.92708,2.06185 0.6082,0.84742 0.9123,1.7335 0.9123,2.65891 0,0.55577 -0.10552,1.12399 -0.31655,1.70532 -1.56048,4.52348 -4.29869,8.17334 -8.21135,10.95087 -3.96706,2.88108 -8.41059,4.32293 -13.32993,4.32293 -6.29434,0 -11.67639,-2.23356 -16.145474,-6.70395 -4.469743,-4.46975 -6.704615,-9.85114 -6.704615,-16.14679 0,-6.29434 2.234872,-11.67507 6.704615,-16.14678 4.468434,-4.46843 9.851134,-6.70396 16.145474,-6.70396 4.54773,0 8.70027,1.24392 12.45629,3.7285 3.72785,2.43607 6.49162,5.63437 8.29,9.60274 z m -20.74695,-3.5332 c -3.64985,0 -6.7577,1.28391 -9.32289,3.84909 -2.53897,2.5665 -3.808452,5.67435 -3.808452,9.32289 v 0.27789 l 22.175692,-9.95731 c -1.95633,-2.32597 -4.97177,-3.49256 -9.04435,-3.49256 z" />
+ <path d="m 146.11999,242.03442 c 1.2957,0 2.41904,0.46336 3.37197,1.38876 0.95228,0.95097 1.42874,2.08938 1.42874,3.4113 v 15.4311 c 2.98792,-2.64318 7.36525,-3.96707 13.13004,-3.96707 6.29434,0 11.67638,2.23488 16.14613,6.70396 4.46908,4.47106 6.70461,9.85245 6.70461,16.14679 0,6.29499 -2.23553,11.67638 -6.70461,16.14678 -4.46909,4.4704 -9.85113,6.70396 -16.14613,6.70396 -6.295,0 -11.66262,-2.22111 -16.10549,-6.66529 -4.4704,-4.41469 -6.71838,-9.77052 -6.7446,-16.06617 v -34.43341 c 0,-1.32257 0.47647,-2.46032 1.42875,-3.41129 0.95162,-0.92541 2.07561,-1.38877 3.37197,-1.38877 h 0.11862 z m 17.93009,26.06148 c -3.64919,0 -6.75704,1.28391 -9.32288,3.84909 -2.53767,2.5665 -3.80781,5.67435 -3.80781,9.32289 0,3.62364 1.27014,6.71772 3.80781,9.28291 2.56584,2.56519 5.67303,3.84778 9.32288,3.84778 3.62364,0 6.71773,-1.28259 9.28357,-3.84778 2.56387,-2.56519 3.84712,-5.65927 3.84712,-9.28291 0,-3.64788 -1.28325,-6.75639 -3.84712,-9.32289 -2.56584,-2.56518 -5.65927,-3.84909 -9.28357,-3.84909 z" />
+ </g>
+ <g fill="#306998">
+ <path d="m 205.94246,268.01922 c -1.16397,0 -2.14247,0.39586 -2.93548,1.18888 -0.79368,0.82054 -1.19019,1.79838 -1.19019,2.93548 0,1.58735 0.76681,2.77753 2.30172,3.56989 0.52825,0.29165 2.7369,0.95228 6.62466,1.98386 3.14717,0.89985 5.48691,2.07627 7.02051,3.53057 2.19621,2.09003 3.29267,5.06549 3.29267,8.92704 0,3.80714 -1.34879,7.0736 -4.04571,9.79739 -2.72444,2.69823 -5.9909,4.04636 -9.7987,4.04636 h -10.35381 c -1.29701,0 -2.41969,-0.47516 -3.37262,-1.42875 -0.95228,-0.89853 -1.42875,-2.02252 -1.42875,-3.37065 v -0.0806 c 0,-1.32126 0.47647,-2.45901 1.42875,-3.41129 0.95227,-0.95228 2.07561,-1.42874 3.37262,-1.42874 h 10.75032 c 1.16331,0 2.14246,-0.39586 2.93548,-1.18888 0.79368,-0.79367 1.19019,-1.77151 1.19019,-2.93548 0,-1.45561 -0.7537,-2.55339 -2.26044,-3.29201 -0.3965,-0.18678 -2.61892,-0.84742 -6.66529,-1.98386 -3.14782,-0.9254 -5.48887,-2.14377 -7.02247,-3.65051 -2.19555,-2.1418 -3.29202,-5.17035 -3.29202,-9.08432 0,-3.80846 1.34945,-7.06049 4.04702,-9.75807 2.72314,-2.72379 5.99024,-4.087 9.79805,-4.087 h 7.2997 c 1.32192,0 2.45967,0.47647 3.41195,1.43006 0.95162,0.95097 1.42809,2.08938 1.42809,3.40998 v 0.0793 c 0,1.34945 -0.47647,2.47409 -1.42809,3.37263 -0.95228,0.95097 -2.09003,1.42874 -3.41195,1.42874 z" />
+ <path d="m 249.06434,258.29851 c 6.29434,0 11.67573,2.23488 16.14612,6.70396 4.46909,4.47106 6.70396,9.85245 6.70396,16.14679 0,6.29499 -2.23487,11.67638 -6.70396,16.14678 -4.46974,4.46974 -9.85178,6.70396 -16.14612,6.70396 -6.29435,0 -11.67639,-2.23356 -16.14548,-6.70396 -4.46974,-4.46974 -6.70461,-9.85113 -6.70461,-16.14678 0,-6.29434 2.23487,-11.67508 6.70461,-16.14679 4.46909,-4.46908 9.85113,-6.70396 16.14548,-6.70396 z m 0,9.79739 c -3.64986,0 -6.7577,1.28391 -9.32289,3.84909 -2.53963,2.5665 -3.80911,5.67435 -3.80911,9.32289 0,3.62364 1.26948,6.71772 3.80911,9.28291 2.56519,2.56519 5.67238,3.84778 9.32289,3.84778 3.62298,0 6.71706,-1.28259 9.28291,-3.84778 2.56518,-2.56519 3.84778,-5.65927 3.84778,-9.28291 0,-3.64788 -1.2826,-6.75639 -3.84778,-9.32289 -2.56585,-2.56518 -5.65928,-3.84909 -9.28291,-3.84909 z" />
+ <path d="m 307.22146,259.37007 c 2.24864,0.71438 3.37263,2.24798 3.37263,4.60148 v 0.19989 c 0,1.6116 -0.64884,2.89419 -1.94454,3.84778 -0.89919,0.63376 -1.82525,0.95097 -2.77622,0.95097 -0.50334,0 -1.01913,-0.0793 -1.54737,-0.23791 -1.29636,-0.42272 -2.63204,-0.63638 -4.00638,-0.63638 -3.64986,0 -6.75836,1.28391 -9.32289,3.84909 -2.53963,2.5665 -3.80846,5.67435 -3.80846,9.32289 0,3.62364 1.26883,6.71772 3.80846,9.28291 2.56453,2.56519 5.67238,3.84778 9.32289,3.84778 1.375,0 2.71068,-0.21103 4.00638,-0.63507 0.50203,-0.1586 1.00471,-0.2379 1.50739,-0.2379 0.97718,0 1.91767,0.31851 2.81686,0.95358 1.2957,0.95097 1.94453,2.24798 1.94453,3.88776 0,2.32728 -1.12464,3.86089 -3.37262,4.60148 -2.22111,0.6875 -4.52152,1.03027 -6.90189,1.03027 -6.29434,0 -11.67638,-2.23356 -16.14678,-6.70396 -4.46843,-4.46974 -6.70396,-9.85113 -6.70396,-16.14678 0,-6.29435 2.23487,-11.67508 6.70396,-16.14679 4.46974,-4.46843 9.85178,-6.70396 16.14678,-6.70396 2.37906,0.001 4.68012,0.35981 6.90123,1.07287 z" />
+ <path d="m 322.25671,242.03442 c 1.29504,0 2.41903,0.46336 3.37262,1.38876 0.95163,0.95097 1.42809,2.08938 1.42809,3.4113 v 27.49154 h 1.50739 c 3.38508,0 6.33301,-1.12399 8.84708,-3.37263 2.45901,-2.24798 3.86023,-5.0242 4.20431,-8.33063 0.15861,-1.24261 0.68816,-2.26174 1.58735,-3.0541 0.89854,-0.84611 1.96944,-1.27015 3.21271,-1.27015 h 0.11863 c 1.40252,0 2.5796,0.53021 3.53122,1.58735 0.84676,0.92541 1.26949,1.99697 1.26949,3.21271 0,0.15861 -0.0138,0.33163 -0.0393,0.51579 -0.63507,6.63842 -3.17405,11.61019 -7.61692,14.91531 2.32663,1.43006 4.46909,3.84909 6.42739,7.26039 2.03563,3.51746 3.05476,7.31412 3.05476,11.38473 v 2.02515 c 0,1.34813 -0.47712,2.47147 -1.42809,3.37066 -0.95359,0.95359 -2.07692,1.42874 -3.37263,1.42874 h -0.11928 c -1.29635,0 -2.41969,-0.47515 -3.37196,-1.42874 -0.95228,-0.89854 -1.42809,-2.02253 -1.42809,-3.37066 v -2.02515 c -0.0275,-3.59414 -1.31012,-6.67708 -3.84844,-9.24358 -2.56584,-2.53832 -5.66058,-3.80715 -9.28291,-3.80715 h -3.25269 v 15.07523 c 0,1.34813 -0.47646,2.47146 -1.42809,3.37065 -0.95293,0.95359 -2.07758,1.42875 -3.37262,1.42875 h -0.12059 c -1.2957,0 -2.41838,-0.47516 -3.37132,-1.42875 -0.95162,-0.89853 -1.42809,-2.02252 -1.42809,-3.37065 v -52.36547 c 0,-1.32257 0.47647,-2.46032 1.42809,-3.41129 0.95228,-0.92541 2.07562,-1.38877 3.37132,-1.38877 h 0.12059 z" />
+ <path d="m 402.31164,271.6291 c 0.31721,0.66063 0.47581,1.33371 0.47581,2.02252 0,0.55577 -0.10617,1.11089 -0.31655,1.66665 -0.45025,1.24262 -1.29635,2.14181 -2.53897,2.69692 -3.70294,1.66665 -8.56919,3.8622 -14.59876,6.58599 -7.48452,3.38442 -11.87496,5.38139 -13.17067,5.9909 2.00877,2.53832 5.14349,3.80715 9.40219,3.80715 2.82866,0 5.3945,-0.83234 7.69622,-2.499 2.24732,-1.63977 3.82091,-3.75537 4.7201,-6.34808 0.76681,-2.16868 2.30172,-3.25269 4.60148,-3.25269 1.63978,0 2.94924,0.68881 3.92839,2.06185 0.60689,0.84742 0.91165,1.7335 0.91165,2.65891 0,0.55577 -0.10552,1.12399 -0.31721,1.70532 -1.56048,4.52348 -4.29738,8.17334 -8.21135,10.95087 -3.96706,2.88108 -8.40994,4.32293 -13.32928,4.32293 -6.29434,0 -11.67638,-2.23356 -16.14547,-6.70395 -4.46974,-4.46975 -6.70461,-9.85114 -6.70461,-16.14679 0,-6.29434 2.23487,-11.67507 6.70461,-16.14678 4.46843,-4.46843 9.85113,-6.70396 16.14547,-6.70396 4.54774,0 8.70093,1.24392 12.4563,3.7285 3.7285,2.43607 6.49161,5.63437 8.29065,9.60274 z m -20.7476,-3.5332 c -3.6492,0 -6.7577,1.28391 -9.32289,3.84909 -2.53897,2.5665 -3.80846,5.67435 -3.80846,9.32289 v 0.27789 l 22.1757,-9.95731 c -1.95699,-2.32597 -4.97177,-3.49256 -9.04435,-3.49256 z" />
+ <path d="m 415.48166,242.03442 c 1.2957,0 2.41969,0.46336 3.37262,1.38876 0.95162,0.95097 1.42809,2.08938 1.42809,3.4113 v 11.46403 h 5.95092 c 1.2957,0 2.41903,0.47647 3.37262,1.43006 0.95163,0.95097 1.42678,2.08938 1.42678,3.40998 v 0.0793 c 0,1.34945 -0.47515,2.47409 -1.42678,3.37263 -0.95293,0.95097 -2.07692,1.42874 -3.37262,1.42874 h -5.95092 v 23.52252 c 0,0.76811 0.26347,1.41695 0.79367,1.94453 0.5289,0.53021 1.19019,0.79368 1.98321,0.79368 h 3.17404 c 1.2957,0 2.41903,0.47646 3.37262,1.42874 0.95163,0.95228 1.42678,2.09003 1.42678,3.41129 v 0.0806 c 0,1.34813 -0.47515,2.47146 -1.42678,3.37065 C 428.65298,303.52484 427.52899,304 426.23329,304 h -3.17404 c -3.43817,0 -6.38675,-1.21574 -8.84642,-3.6492 -2.43411,-2.45901 -3.6492,-5.39515 -3.6492,-8.80775 v -44.70726 c 0,-1.32258 0.47581,-2.46033 1.42809,-3.4113 0.95228,-0.9254 2.07627,-1.38876 3.37197,-1.38876 h 0.11797 z" />
+ <path d="m 448.88545,268.01922 c -1.16397,0 -2.14246,0.39586 -2.93548,1.18888 -0.79368,0.82054 -1.19019,1.79838 -1.19019,2.93548 0,1.58735 0.76681,2.77753 2.30042,3.56989 0.5302,0.29165 2.7382,0.95228 6.62596,1.98386 3.14652,0.89985 5.48691,2.07627 7.02117,3.53057 2.19489,2.09003 3.29267,5.06549 3.29267,8.92704 0,3.80714 -1.34945,7.0736 -4.04637,9.79739 -2.72379,2.69823 -5.99089,4.04636 -9.79869,4.04636 h -10.35382 c -1.29635,0 -2.41969,-0.47516 -3.37262,-1.42875 -0.95228,-0.89853 -1.42744,-2.02252 -1.42744,-3.37065 v -0.0806 c 0,-1.32126 0.47516,-2.45901 1.42744,-3.41129 0.95228,-0.95228 2.07627,-1.42874 3.37262,-1.42874 h 10.75032 c 1.16332,0 2.14312,-0.39586 2.93549,-1.18888 0.79367,-0.79367 1.19018,-1.77151 1.19018,-2.93548 0,-1.45561 -0.7537,-2.55339 -2.26043,-3.29201 -0.39782,-0.18678 -2.61893,-0.84742 -6.66529,-1.98386 -3.14783,-0.9254 -5.48887,-2.14377 -7.02248,-3.65051 -2.19555,-2.1418 -3.29201,-5.17035 -3.29201,-9.08432 0,-3.80846 1.34944,-7.06049 4.04701,-9.75807 2.72314,-2.72379 5.99025,-4.087 9.7987,-4.087 h 7.29906 c 1.32322,0 2.45967,0.47647 3.41129,1.43006 0.95228,0.95097 1.42809,2.08938 1.42809,3.40998 v 0.0793 c 0,1.34945 -0.47581,2.47409 -1.42809,3.37263 -0.95162,0.95097 -2.08872,1.42874 -3.41129,1.42874 z" />
+ </g>
+ </g>
+</svg>
diff --git a/testing/web-platform/tests/tools/third_party/websockets/performance/mem_client.py b/testing/web-platform/tests/tools/third_party/websockets/performance/mem_client.py
new file mode 100644
index 0000000000..890216edf8
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/performance/mem_client.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+
+import asyncio
+import statistics
+import tracemalloc
+
+import websockets
+from websockets.extensions import permessage_deflate
+
+
+CLIENTS = 10
+INTERVAL = 1 / 10 # seconds
+
+MEM_SIZE = []
+
+
+async def mem_client(client):
+ # Space out connections to make them sequential.
+ await asyncio.sleep(client * INTERVAL)
+
+ tracemalloc.start()
+
+ async with websockets.connect(
+ "ws://localhost:8765",
+ extensions=[
+ permessage_deflate.ClientPerMessageDeflateFactory(
+ server_max_window_bits=10,
+ client_max_window_bits=10,
+ compress_settings={"memLevel": 3},
+ )
+ ],
+ ) as ws:
+ await ws.send("hello")
+ await ws.recv()
+
+ await ws.send(b"hello")
+ await ws.recv()
+
+ MEM_SIZE.append(tracemalloc.get_traced_memory()[0])
+ tracemalloc.stop()
+
+ # Hold connection open until the end of the test.
+ await asyncio.sleep(CLIENTS * INTERVAL)
+
+
+asyncio.get_event_loop().run_until_complete(
+ asyncio.gather(*[mem_client(client) for client in range(CLIENTS + 1)])
+)
+
+# First connection incurs non-representative setup costs.
+del MEM_SIZE[0]
+
+print(f"µ = {statistics.mean(MEM_SIZE) / 1024:.1f} KiB")
+print(f"σ = {statistics.stdev(MEM_SIZE) / 1024:.1f} KiB")
diff --git a/testing/web-platform/tests/tools/third_party/websockets/performance/mem_server.py b/testing/web-platform/tests/tools/third_party/websockets/performance/mem_server.py
new file mode 100644
index 0000000000..0a4a29f76c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/performance/mem_server.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+
+import asyncio
+import signal
+import statistics
+import tracemalloc
+
+import websockets
+from websockets.extensions import permessage_deflate
+
+
+CLIENTS = 10
+INTERVAL = 1 / 10 # seconds
+
+MEM_SIZE = []
+
+
+async def handler(ws, path):
+ msg = await ws.recv()
+ await ws.send(msg)
+
+ msg = await ws.recv()
+ await ws.send(msg)
+
+ MEM_SIZE.append(tracemalloc.get_traced_memory()[0])
+ tracemalloc.stop()
+
+ tracemalloc.start()
+
+ # Hold connection open until the end of the test.
+ await asyncio.sleep(CLIENTS * INTERVAL)
+
+
+async def mem_server(stop):
+ async with websockets.serve(
+ handler,
+ "localhost",
+ 8765,
+ extensions=[
+ permessage_deflate.ServerPerMessageDeflateFactory(
+ server_max_window_bits=10,
+ client_max_window_bits=10,
+ compress_settings={"memLevel": 3},
+ )
+ ],
+ ):
+ await stop
+
+
+loop = asyncio.get_event_loop()
+
+stop = loop.create_future()
+loop.add_signal_handler(signal.SIGINT, stop.set_result, None)
+
+tracemalloc.start()
+
+loop.run_until_complete(mem_server(stop))
+
+# First connection incurs non-representative setup costs.
+del MEM_SIZE[0]
+
+print(f"µ = {statistics.mean(MEM_SIZE) / 1024:.1f} KiB")
+print(f"σ = {statistics.stdev(MEM_SIZE) / 1024:.1f} KiB")
diff --git a/testing/web-platform/tests/tools/third_party/websockets/setup.cfg b/testing/web-platform/tests/tools/third_party/websockets/setup.cfg
new file mode 100644
index 0000000000..c306b2d4fb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/setup.cfg
@@ -0,0 +1,30 @@
+[bdist_wheel]
+python-tag = py36.py37
+
+[metadata]
+license_file = LICENSE
+
+[flake8]
+ignore = E731,F403,F405,W503
+max-line-length = 88
+
+[isort]
+combine_as_imports = True
+force_grid_wrap = 0
+include_trailing_comma = True
+known_standard_library = asyncio
+line_length = 88
+lines_after_imports = 2
+multi_line_output = 3
+
+[coverage:run]
+branch = True
+omit = */__main__.py
+source =
+ websockets
+ tests
+
+[coverage:paths]
+source =
+ src/websockets
+ .tox/*/lib/python*/site-packages/websockets
diff --git a/testing/web-platform/tests/tools/third_party/websockets/setup.py b/testing/web-platform/tests/tools/third_party/websockets/setup.py
new file mode 100644
index 0000000000..f358192477
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/setup.py
@@ -0,0 +1,66 @@
+import pathlib
+import re
+import sys
+
+import setuptools
+
+
+root_dir = pathlib.Path(__file__).parent
+
+description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
+
+long_description = (root_dir / 'README.rst').read_text(encoding='utf-8')
+
+# PyPI disables the "raw" directive.
+long_description = re.sub(
+ r"^\.\. raw:: html.*?^(?=\w)",
+ "",
+ long_description,
+ flags=re.DOTALL | re.MULTILINE,
+)
+
+exec((root_dir / 'src' / 'websockets' / 'version.py').read_text(encoding='utf-8'))
+
+if sys.version_info[:3] < (3, 6, 1):
+ raise Exception("websockets requires Python >= 3.6.1.")
+
+packages = ['websockets', 'websockets/extensions']
+
+ext_modules = [
+ setuptools.Extension(
+ 'websockets.speedups',
+ sources=['src/websockets/speedups.c'],
+ optional=not (root_dir / '.cibuildwheel').exists(),
+ )
+]
+
+setuptools.setup(
+ name='websockets',
+ version=version,
+ description=description,
+ long_description=long_description,
+ url='https://github.com/aaugustin/websockets',
+ author='Aymeric Augustin',
+ author_email='aymeric.augustin@m4x.org',
+ license='BSD',
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Environment :: Web Environment',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: BSD License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ ],
+ package_dir = {'': 'src'},
+ package_data = {'websockets': ['py.typed']},
+ packages=packages,
+ ext_modules=ext_modules,
+ include_package_data=True,
+ zip_safe=False,
+ python_requires='>=3.6.1',
+ test_loader='unittest:TestLoader',
+)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/__init__.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/__init__.py
new file mode 100644
index 0000000000..ea1d829a33
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/__init__.py
@@ -0,0 +1,55 @@
+# This relies on each of the submodules having an __all__ variable.
+
+from .auth import * # noqa
+from .client import * # noqa
+from .exceptions import * # noqa
+from .protocol import * # noqa
+from .server import * # noqa
+from .typing import * # noqa
+from .uri import * # noqa
+from .version import version as __version__ # noqa
+
+
+__all__ = [
+ "AbortHandshake",
+ "basic_auth_protocol_factory",
+ "BasicAuthWebSocketServerProtocol",
+ "connect",
+ "ConnectionClosed",
+ "ConnectionClosedError",
+ "ConnectionClosedOK",
+ "Data",
+ "DuplicateParameter",
+ "ExtensionHeader",
+ "ExtensionParameter",
+ "InvalidHandshake",
+ "InvalidHeader",
+ "InvalidHeaderFormat",
+ "InvalidHeaderValue",
+ "InvalidMessage",
+ "InvalidOrigin",
+ "InvalidParameterName",
+ "InvalidParameterValue",
+ "InvalidState",
+ "InvalidStatusCode",
+ "InvalidUpgrade",
+ "InvalidURI",
+ "NegotiationError",
+ "Origin",
+ "parse_uri",
+ "PayloadTooBig",
+ "ProtocolError",
+ "RedirectHandshake",
+ "SecurityError",
+ "serve",
+ "Subprotocol",
+ "unix_connect",
+ "unix_serve",
+ "WebSocketClientProtocol",
+ "WebSocketCommonProtocol",
+ "WebSocketException",
+ "WebSocketProtocolError",
+ "WebSocketServer",
+ "WebSocketServerProtocol",
+ "WebSocketURI",
+]
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/__main__.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/__main__.py
new file mode 100644
index 0000000000..394f7ac799
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/__main__.py
@@ -0,0 +1,206 @@
+import argparse
+import asyncio
+import os
+import signal
+import sys
+import threading
+from typing import Any, Set
+
+from .client import connect
+from .exceptions import ConnectionClosed, format_close
+
+
+if sys.platform == "win32":
+
+ def win_enable_vt100() -> None:
+ """
+ Enable VT-100 for console output on Windows.
+
+ See also https://bugs.python.org/issue29059.
+
+ """
+ import ctypes
+
+ STD_OUTPUT_HANDLE = ctypes.c_uint(-11)
+ INVALID_HANDLE_VALUE = ctypes.c_uint(-1)
+ ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x004
+
+ handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
+ if handle == INVALID_HANDLE_VALUE:
+ raise RuntimeError("unable to obtain stdout handle")
+
+ cur_mode = ctypes.c_uint()
+ if ctypes.windll.kernel32.GetConsoleMode(handle, ctypes.byref(cur_mode)) == 0:
+ raise RuntimeError("unable to query current console mode")
+
+ # ctypes ints lack support for the required bit-OR operation.
+ # Temporarily convert to Py int, do the OR and convert back.
+ py_int_mode = int.from_bytes(cur_mode, sys.byteorder)
+ new_mode = ctypes.c_uint(py_int_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
+
+ if ctypes.windll.kernel32.SetConsoleMode(handle, new_mode) == 0:
+ raise RuntimeError("unable to set console mode")
+
+
+def exit_from_event_loop_thread(
+ loop: asyncio.AbstractEventLoop, stop: "asyncio.Future[None]"
+) -> None:
+ loop.stop()
+ if not stop.done():
+ # When exiting the thread that runs the event loop, raise
+ # KeyboardInterrupt in the main thread to exit the program.
+ try:
+ ctrl_c = signal.CTRL_C_EVENT # Windows
+ except AttributeError:
+ ctrl_c = signal.SIGINT # POSIX
+ os.kill(os.getpid(), ctrl_c)
+
+
+def print_during_input(string: str) -> None:
+ sys.stdout.write(
+ # Save cursor position
+ "\N{ESC}7"
+ # Add a new line
+ "\N{LINE FEED}"
+ # Move cursor up
+ "\N{ESC}[A"
+ # Insert blank line, scroll last line down
+ "\N{ESC}[L"
+ # Print string in the inserted blank line
+ f"{string}\N{LINE FEED}"
+ # Restore cursor position
+ "\N{ESC}8"
+ # Move cursor down
+ "\N{ESC}[B"
+ )
+ sys.stdout.flush()
+
+
+def print_over_input(string: str) -> None:
+ sys.stdout.write(
+ # Move cursor to beginning of line
+ "\N{CARRIAGE RETURN}"
+ # Delete current line
+ "\N{ESC}[K"
+ # Print string
+ f"{string}\N{LINE FEED}"
+ )
+ sys.stdout.flush()
+
+
+async def run_client(
+ uri: str,
+ loop: asyncio.AbstractEventLoop,
+ inputs: "asyncio.Queue[str]",
+ stop: "asyncio.Future[None]",
+) -> None:
+ try:
+ websocket = await connect(uri)
+ except Exception as exc:
+ print_over_input(f"Failed to connect to {uri}: {exc}.")
+ exit_from_event_loop_thread(loop, stop)
+ return
+ else:
+ print_during_input(f"Connected to {uri}.")
+
+ try:
+ while True:
+ incoming: asyncio.Future[Any] = asyncio.ensure_future(websocket.recv())
+ outgoing: asyncio.Future[Any] = asyncio.ensure_future(inputs.get())
+ done: Set[asyncio.Future[Any]]
+ pending: Set[asyncio.Future[Any]]
+ done, pending = await asyncio.wait(
+ [incoming, outgoing, stop], return_when=asyncio.FIRST_COMPLETED
+ )
+
+ # Cancel pending tasks to avoid leaking them.
+ if incoming in pending:
+ incoming.cancel()
+ if outgoing in pending:
+ outgoing.cancel()
+
+ if incoming in done:
+ try:
+ message = incoming.result()
+ except ConnectionClosed:
+ break
+ else:
+ if isinstance(message, str):
+ print_during_input("< " + message)
+ else:
+ print_during_input("< (binary) " + message.hex())
+
+ if outgoing in done:
+ message = outgoing.result()
+ await websocket.send(message)
+
+ if stop in done:
+ break
+
+ finally:
+ await websocket.close()
+ close_status = format_close(websocket.close_code, websocket.close_reason)
+
+ print_over_input(f"Connection closed: {close_status}.")
+
+ exit_from_event_loop_thread(loop, stop)
+
+
+def main() -> None:
+ # If we're on Windows, enable VT100 terminal support.
+ if sys.platform == "win32":
+ try:
+ win_enable_vt100()
+ except RuntimeError as exc:
+ sys.stderr.write(
+ f"Unable to set terminal to VT100 mode. This is only "
+ f"supported since Win10 anniversary update. Expect "
+ f"weird symbols on the terminal.\nError: {exc}\n"
+ )
+ sys.stderr.flush()
+
+ try:
+ import readline # noqa
+ except ImportError: # Windows has no `readline` normally
+ pass
+
+ # Parse command line arguments.
+ parser = argparse.ArgumentParser(
+ prog="python -m websockets",
+ description="Interactive WebSocket client.",
+ add_help=False,
+ )
+ parser.add_argument("uri", metavar="<uri>")
+ args = parser.parse_args()
+
+ # Create an event loop that will run in a background thread.
+ loop = asyncio.new_event_loop()
+
+ # Create a queue of user inputs. There's no need to limit its size.
+ inputs: asyncio.Queue[str] = asyncio.Queue(loop=loop)
+
+ # Create a stop condition when receiving SIGINT or SIGTERM.
+ stop: asyncio.Future[None] = loop.create_future()
+
+ # Schedule the task that will manage the connection.
+ asyncio.ensure_future(run_client(args.uri, loop, inputs, stop), loop=loop)
+
+ # Start the event loop in a background thread.
+ thread = threading.Thread(target=loop.run_forever)
+ thread.start()
+
+ # Read from stdin in the main thread in order to receive signals.
+ try:
+ while True:
+ # Since there's no size limit, put_nowait is identical to put.
+ message = input("> ")
+ loop.call_soon_threadsafe(inputs.put_nowait, message)
+ except (KeyboardInterrupt, EOFError): # ^C, ^D
+ loop.call_soon_threadsafe(stop.set_result, None)
+
+ # Wait for the event loop to terminate.
+ thread.join()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/auth.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/auth.py
new file mode 100644
index 0000000000..ae204b8d9c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/auth.py
@@ -0,0 +1,160 @@
+"""
+:mod:`websockets.auth` provides HTTP Basic Authentication according to
+:rfc:`7235` and :rfc:`7617`.
+
+"""
+
+
+import functools
+import http
+from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Type, Union
+
+from .exceptions import InvalidHeader
+from .headers import build_www_authenticate_basic, parse_authorization_basic
+from .http import Headers
+from .server import HTTPResponse, WebSocketServerProtocol
+
+
+__all__ = ["BasicAuthWebSocketServerProtocol", "basic_auth_protocol_factory"]
+
+Credentials = Tuple[str, str]
+
+
+def is_credentials(value: Any) -> bool:
+ try:
+ username, password = value
+ except (TypeError, ValueError):
+ return False
+ else:
+ return isinstance(username, str) and isinstance(password, str)
+
+
+class BasicAuthWebSocketServerProtocol(WebSocketServerProtocol):
+ """
+ WebSocket server protocol that enforces HTTP Basic Auth.
+
+ """
+
+ def __init__(
+ self,
+ *args: Any,
+ realm: str,
+ check_credentials: Callable[[str, str], Awaitable[bool]],
+ **kwargs: Any,
+ ) -> None:
+ self.realm = realm
+ self.check_credentials = check_credentials
+ super().__init__(*args, **kwargs)
+
+ async def process_request(
+ self, path: str, request_headers: Headers
+ ) -> Optional[HTTPResponse]:
+ """
+ Check HTTP Basic Auth and return a HTTP 401 or 403 response if needed.
+
+ If authentication succeeds, the username of the authenticated user is
+ stored in the ``username`` attribute.
+
+ """
+ try:
+ authorization = request_headers["Authorization"]
+ except KeyError:
+ return (
+ http.HTTPStatus.UNAUTHORIZED,
+ [("WWW-Authenticate", build_www_authenticate_basic(self.realm))],
+ b"Missing credentials\n",
+ )
+
+ try:
+ username, password = parse_authorization_basic(authorization)
+ except InvalidHeader:
+ return (
+ http.HTTPStatus.UNAUTHORIZED,
+ [("WWW-Authenticate", build_www_authenticate_basic(self.realm))],
+ b"Unsupported credentials\n",
+ )
+
+ if not await self.check_credentials(username, password):
+ return (
+ http.HTTPStatus.UNAUTHORIZED,
+ [("WWW-Authenticate", build_www_authenticate_basic(self.realm))],
+ b"Invalid credentials\n",
+ )
+
+ self.username = username
+
+ return await super().process_request(path, request_headers)
+
+
+def basic_auth_protocol_factory(
+ realm: str,
+ credentials: Optional[Union[Credentials, Iterable[Credentials]]] = None,
+ check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None,
+ create_protocol: Type[
+ BasicAuthWebSocketServerProtocol
+ ] = BasicAuthWebSocketServerProtocol,
+) -> Callable[[Any], BasicAuthWebSocketServerProtocol]:
+ """
+ Protocol factory that enforces HTTP Basic Auth.
+
+ ``basic_auth_protocol_factory`` is designed to integrate with
+ :func:`~websockets.server.serve` like this::
+
+ websockets.serve(
+ ...,
+ create_protocol=websockets.basic_auth_protocol_factory(
+ realm="my dev server",
+ credentials=("hello", "iloveyou"),
+ )
+ )
+
+ ``realm`` indicates the scope of protection. It should contain only ASCII
+ characters because the encoding of non-ASCII characters is undefined.
+ Refer to section 2.2 of :rfc:`7235` for details.
+
+ ``credentials`` defines hard coded authorized credentials. It can be a
+ ``(username, password)`` pair or a list of such pairs.
+
+ ``check_credentials`` defines a coroutine that checks whether credentials
+ are authorized. This coroutine receives ``username`` and ``password``
+ arguments and returns a :class:`bool`.
+
+ One of ``credentials`` or ``check_credentials`` must be provided but not
+ both.
+
+ By default, ``basic_auth_protocol_factory`` creates a factory for building
+ :class:`BasicAuthWebSocketServerProtocol` instances. You can override this
+ with the ``create_protocol`` parameter.
+
+ :param realm: scope of protection
+ :param credentials: hard coded credentials
+ :param check_credentials: coroutine that verifies credentials
+ :raises TypeError: if the credentials argument has the wrong type
+
+ """
+ if (credentials is None) == (check_credentials is None):
+ raise TypeError("provide either credentials or check_credentials")
+
+ if credentials is not None:
+ if is_credentials(credentials):
+
+ async def check_credentials(username: str, password: str) -> bool:
+ return (username, password) == credentials
+
+ elif isinstance(credentials, Iterable):
+ credentials_list = list(credentials)
+ if all(is_credentials(item) for item in credentials_list):
+ credentials_dict = dict(credentials_list)
+
+ async def check_credentials(username: str, password: str) -> bool:
+ return credentials_dict.get(username) == password
+
+ else:
+ raise TypeError(f"invalid credentials argument: {credentials}")
+
+ else:
+ raise TypeError(f"invalid credentials argument: {credentials}")
+
+ return functools.partial(
+ create_protocol, realm=realm, check_credentials=check_credentials
+ )
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/client.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/client.py
new file mode 100644
index 0000000000..eb58f9f484
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/client.py
@@ -0,0 +1,584 @@
+"""
+:mod:`websockets.client` defines the WebSocket client APIs.
+
+"""
+
+import asyncio
+import collections.abc
+import functools
+import logging
+import warnings
+from types import TracebackType
+from typing import Any, Generator, List, Optional, Sequence, Tuple, Type, cast
+
+from .exceptions import (
+ InvalidHandshake,
+ InvalidHeader,
+ InvalidMessage,
+ InvalidStatusCode,
+ NegotiationError,
+ RedirectHandshake,
+ SecurityError,
+)
+from .extensions.base import ClientExtensionFactory, Extension
+from .extensions.permessage_deflate import ClientPerMessageDeflateFactory
+from .handshake import build_request, check_response
+from .headers import (
+ build_authorization_basic,
+ build_extension,
+ build_subprotocol,
+ parse_extension,
+ parse_subprotocol,
+)
+from .http import USER_AGENT, Headers, HeadersLike, read_response
+from .protocol import WebSocketCommonProtocol
+from .typing import ExtensionHeader, Origin, Subprotocol
+from .uri import WebSocketURI, parse_uri
+
+
+__all__ = ["connect", "unix_connect", "WebSocketClientProtocol"]
+
+logger = logging.getLogger(__name__)
+
+
+class WebSocketClientProtocol(WebSocketCommonProtocol):
+ """
+ :class:`~asyncio.Protocol` subclass implementing a WebSocket client.
+
+ This class inherits most of its methods from
+ :class:`~websockets.protocol.WebSocketCommonProtocol`.
+
+ """
+
+ is_client = True
+ side = "client"
+
+ def __init__(
+ self,
+ *,
+ origin: Optional[Origin] = None,
+ extensions: Optional[Sequence[ClientExtensionFactory]] = None,
+ subprotocols: Optional[Sequence[Subprotocol]] = None,
+ extra_headers: Optional[HeadersLike] = None,
+ **kwargs: Any,
+ ) -> None:
+ self.origin = origin
+ self.available_extensions = extensions
+ self.available_subprotocols = subprotocols
+ self.extra_headers = extra_headers
+ super().__init__(**kwargs)
+
+ def write_http_request(self, path: str, headers: Headers) -> None:
+ """
+ Write request line and headers to the HTTP request.
+
+ """
+ self.path = path
+ self.request_headers = headers
+
+ logger.debug("%s > GET %s HTTP/1.1", self.side, path)
+ logger.debug("%s > %r", self.side, headers)
+
+ # Since the path and headers only contain ASCII characters,
+ # we can keep this simple.
+ request = f"GET {path} HTTP/1.1\r\n"
+ request += str(headers)
+
+ self.transport.write(request.encode())
+
+ async def read_http_response(self) -> Tuple[int, Headers]:
+ """
+ Read status line and headers from the HTTP response.
+
+ If the response contains a body, it may be read from ``self.reader``
+ after this coroutine returns.
+
+ :raises ~websockets.exceptions.InvalidMessage: if the HTTP message is
+ malformed or isn't an HTTP/1.1 GET response
+
+ """
+ try:
+ status_code, reason, headers = await read_response(self.reader)
+ except Exception as exc:
+ raise InvalidMessage("did not receive a valid HTTP response") from exc
+
+ logger.debug("%s < HTTP/1.1 %d %s", self.side, status_code, reason)
+ logger.debug("%s < %r", self.side, headers)
+
+ self.response_headers = headers
+
+ return status_code, self.response_headers
+
+ @staticmethod
+ def process_extensions(
+ headers: Headers,
+ available_extensions: Optional[Sequence[ClientExtensionFactory]],
+ ) -> List[Extension]:
+ """
+ Handle the Sec-WebSocket-Extensions HTTP response header.
+
+ Check that each extension is supported, as well as its parameters.
+
+ Return the list of accepted extensions.
+
+ Raise :exc:`~websockets.exceptions.InvalidHandshake` to abort the
+ connection.
+
+ :rfc:`6455` leaves the rules up to the specification of each
+ :extension.
+
+ To provide this level of flexibility, for each extension accepted by
+ the server, we check for a match with each extension available in the
+ client configuration. If no match is found, an exception is raised.
+
+ If several variants of the same extension are accepted by the server,
+ it may be configured severel times, which won't make sense in general.
+ Extensions must implement their own requirements. For this purpose,
+ the list of previously accepted extensions is provided.
+
+ Other requirements, for example related to mandatory extensions or the
+ order of extensions, may be implemented by overriding this method.
+
+ """
+ accepted_extensions: List[Extension] = []
+
+ header_values = headers.get_all("Sec-WebSocket-Extensions")
+
+ if header_values:
+
+ if available_extensions is None:
+ raise InvalidHandshake("no extensions supported")
+
+ parsed_header_values: List[ExtensionHeader] = sum(
+ [parse_extension(header_value) for header_value in header_values], []
+ )
+
+ for name, response_params in parsed_header_values:
+
+ for extension_factory in available_extensions:
+
+ # Skip non-matching extensions based on their name.
+ if extension_factory.name != name:
+ continue
+
+ # Skip non-matching extensions based on their params.
+ try:
+ extension = extension_factory.process_response_params(
+ response_params, accepted_extensions
+ )
+ except NegotiationError:
+ continue
+
+ # Add matching extension to the final list.
+ accepted_extensions.append(extension)
+
+ # Break out of the loop once we have a match.
+ break
+
+ # If we didn't break from the loop, no extension in our list
+ # matched what the server sent. Fail the connection.
+ else:
+ raise NegotiationError(
+ f"Unsupported extension: "
+ f"name = {name}, params = {response_params}"
+ )
+
+ return accepted_extensions
+
+ @staticmethod
+ def process_subprotocol(
+ headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]]
+ ) -> Optional[Subprotocol]:
+ """
+ Handle the Sec-WebSocket-Protocol HTTP response header.
+
+ Check that it contains exactly one supported subprotocol.
+
+ Return the selected subprotocol.
+
+ """
+ subprotocol: Optional[Subprotocol] = None
+
+ header_values = headers.get_all("Sec-WebSocket-Protocol")
+
+ if header_values:
+
+ if available_subprotocols is None:
+ raise InvalidHandshake("no subprotocols supported")
+
+ parsed_header_values: Sequence[Subprotocol] = sum(
+ [parse_subprotocol(header_value) for header_value in header_values], []
+ )
+
+ if len(parsed_header_values) > 1:
+ subprotocols = ", ".join(parsed_header_values)
+ raise InvalidHandshake(f"multiple subprotocols: {subprotocols}")
+
+ subprotocol = parsed_header_values[0]
+
+ if subprotocol not in available_subprotocols:
+ raise NegotiationError(f"unsupported subprotocol: {subprotocol}")
+
+ return subprotocol
+
+ async def handshake(
+ self,
+ wsuri: WebSocketURI,
+ origin: Optional[Origin] = None,
+ available_extensions: Optional[Sequence[ClientExtensionFactory]] = None,
+ available_subprotocols: Optional[Sequence[Subprotocol]] = None,
+ extra_headers: Optional[HeadersLike] = None,
+ ) -> None:
+ """
+ Perform the client side of the opening handshake.
+
+ :param origin: sets the Origin HTTP header
+ :param available_extensions: list of supported extensions in the order
+ in which they should be used
+ :param available_subprotocols: list of supported subprotocols in order
+ of decreasing preference
+ :param extra_headers: sets additional HTTP request headers; it must be
+ a :class:`~websockets.http.Headers` instance, a
+ :class:`~collections.abc.Mapping`, or an iterable of ``(name,
+ value)`` pairs
+ :raises ~websockets.exceptions.InvalidHandshake: if the handshake
+ fails
+
+ """
+ request_headers = Headers()
+
+ if wsuri.port == (443 if wsuri.secure else 80): # pragma: no cover
+ request_headers["Host"] = wsuri.host
+ else:
+ request_headers["Host"] = f"{wsuri.host}:{wsuri.port}"
+
+ if wsuri.user_info:
+ request_headers["Authorization"] = build_authorization_basic(
+ *wsuri.user_info
+ )
+
+ if origin is not None:
+ request_headers["Origin"] = origin
+
+ key = build_request(request_headers)
+
+ if available_extensions is not None:
+ extensions_header = build_extension(
+ [
+ (extension_factory.name, extension_factory.get_request_params())
+ for extension_factory in available_extensions
+ ]
+ )
+ request_headers["Sec-WebSocket-Extensions"] = extensions_header
+
+ if available_subprotocols is not None:
+ protocol_header = build_subprotocol(available_subprotocols)
+ request_headers["Sec-WebSocket-Protocol"] = protocol_header
+
+ if extra_headers is not None:
+ if isinstance(extra_headers, Headers):
+ extra_headers = extra_headers.raw_items()
+ elif isinstance(extra_headers, collections.abc.Mapping):
+ extra_headers = extra_headers.items()
+ for name, value in extra_headers:
+ request_headers[name] = value
+
+ request_headers.setdefault("User-Agent", USER_AGENT)
+
+ self.write_http_request(wsuri.resource_name, request_headers)
+
+ status_code, response_headers = await self.read_http_response()
+ if status_code in (301, 302, 303, 307, 308):
+ if "Location" not in response_headers:
+ raise InvalidHeader("Location")
+ raise RedirectHandshake(response_headers["Location"])
+ elif status_code != 101:
+ raise InvalidStatusCode(status_code)
+
+ check_response(response_headers, key)
+
+ self.extensions = self.process_extensions(
+ response_headers, available_extensions
+ )
+
+ self.subprotocol = self.process_subprotocol(
+ response_headers, available_subprotocols
+ )
+
+ self.connection_open()
+
+
+class Connect:
+ """
+ Connect to the WebSocket server at the given ``uri``.
+
+ Awaiting :func:`connect` yields a :class:`WebSocketClientProtocol` which
+ can then be used to send and receive messages.
+
+ :func:`connect` can also be used as a asynchronous context manager. In
+ that case, the connection is closed when exiting the context.
+
+ :func:`connect` is a wrapper around the event loop's
+ :meth:`~asyncio.loop.create_connection` method. Unknown keyword arguments
+ are passed to :meth:`~asyncio.loop.create_connection`.
+
+ For example, you can set the ``ssl`` keyword argument to a
+ :class:`~ssl.SSLContext` to enforce some TLS settings. When connecting to
+ a ``wss://`` URI, if this argument isn't provided explicitly,
+ :func:`ssl.create_default_context` is called to create a context.
+
+ You can connect to a different host and port from those found in ``uri``
+ by setting ``host`` and ``port`` keyword arguments. This only changes the
+ destination of the TCP connection. The host name from ``uri`` is still
+ used in the TLS handshake for secure connections and in the ``Host`` HTTP
+ header.
+
+ The ``create_protocol`` parameter allows customizing the
+ :class:`~asyncio.Protocol` that manages the connection. It should be a
+ callable or class accepting the same arguments as
+ :class:`WebSocketClientProtocol` and returning an instance of
+ :class:`WebSocketClientProtocol` or a subclass. It defaults to
+ :class:`WebSocketClientProtocol`.
+
+ The behavior of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
+ ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` is
+ described in :class:`~websockets.protocol.WebSocketCommonProtocol`.
+
+ :func:`connect` also accepts the following optional arguments:
+
+ * ``compression`` is a shortcut to configure compression extensions;
+ by default it enables the "permessage-deflate" extension; set it to
+ ``None`` to disable compression
+ * ``origin`` sets the Origin HTTP header
+ * ``extensions`` is a list of supported extensions in order of
+ decreasing preference
+ * ``subprotocols`` is a list of supported subprotocols in order of
+ decreasing preference
+ * ``extra_headers`` sets additional HTTP request headers; it can be a
+ :class:`~websockets.http.Headers` instance, a
+ :class:`~collections.abc.Mapping`, or an iterable of ``(name, value)``
+ pairs
+
+ :raises ~websockets.uri.InvalidURI: if ``uri`` is invalid
+ :raises ~websockets.handshake.InvalidHandshake: if the opening handshake
+ fails
+
+ """
+
+ MAX_REDIRECTS_ALLOWED = 10
+
+ def __init__(
+ self,
+ uri: str,
+ *,
+ path: Optional[str] = None,
+ create_protocol: Optional[Type[WebSocketClientProtocol]] = None,
+ ping_interval: float = 20,
+ ping_timeout: float = 20,
+ close_timeout: Optional[float] = None,
+ max_size: int = 2 ** 20,
+ max_queue: int = 2 ** 5,
+ read_limit: int = 2 ** 16,
+ write_limit: int = 2 ** 16,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ legacy_recv: bool = False,
+ klass: Optional[Type[WebSocketClientProtocol]] = None,
+ timeout: Optional[float] = None,
+ compression: Optional[str] = "deflate",
+ origin: Optional[Origin] = None,
+ extensions: Optional[Sequence[ClientExtensionFactory]] = None,
+ subprotocols: Optional[Sequence[Subprotocol]] = None,
+ extra_headers: Optional[HeadersLike] = None,
+ **kwargs: Any,
+ ) -> None:
+ # Backwards compatibility: close_timeout used to be called timeout.
+ if timeout is None:
+ timeout = 10
+ else:
+ warnings.warn("rename timeout to close_timeout", DeprecationWarning)
+ # If both are specified, timeout is ignored.
+ if close_timeout is None:
+ close_timeout = timeout
+
+ # Backwards compatibility: create_protocol used to be called klass.
+ if klass is None:
+ klass = WebSocketClientProtocol
+ else:
+ warnings.warn("rename klass to create_protocol", DeprecationWarning)
+ # If both are specified, klass is ignored.
+ if create_protocol is None:
+ create_protocol = klass
+
+ if loop is None:
+ loop = asyncio.get_event_loop()
+
+ wsuri = parse_uri(uri)
+ if wsuri.secure:
+ kwargs.setdefault("ssl", True)
+ elif kwargs.get("ssl") is not None:
+ raise ValueError(
+ "connect() received a ssl argument for a ws:// URI, "
+ "use a wss:// URI to enable TLS"
+ )
+
+ if compression == "deflate":
+ if extensions is None:
+ extensions = []
+ if not any(
+ extension_factory.name == ClientPerMessageDeflateFactory.name
+ for extension_factory in extensions
+ ):
+ extensions = list(extensions) + [
+ ClientPerMessageDeflateFactory(client_max_window_bits=True)
+ ]
+ elif compression is not None:
+ raise ValueError(f"unsupported compression: {compression}")
+
+ factory = functools.partial(
+ create_protocol,
+ ping_interval=ping_interval,
+ ping_timeout=ping_timeout,
+ close_timeout=close_timeout,
+ max_size=max_size,
+ max_queue=max_queue,
+ read_limit=read_limit,
+ write_limit=write_limit,
+ loop=loop,
+ host=wsuri.host,
+ port=wsuri.port,
+ secure=wsuri.secure,
+ legacy_recv=legacy_recv,
+ origin=origin,
+ extensions=extensions,
+ subprotocols=subprotocols,
+ extra_headers=extra_headers,
+ )
+
+ if path is None:
+ host: Optional[str]
+ port: Optional[int]
+ if kwargs.get("sock") is None:
+ host, port = wsuri.host, wsuri.port
+ else:
+ # If sock is given, host and port shouldn't be specified.
+ host, port = None, None
+ # If host and port are given, override values from the URI.
+ host = kwargs.pop("host", host)
+ port = kwargs.pop("port", port)
+ create_connection = functools.partial(
+ loop.create_connection, factory, host, port, **kwargs
+ )
+ else:
+ create_connection = functools.partial(
+ loop.create_unix_connection, factory, path, **kwargs
+ )
+
+ # This is a coroutine function.
+ self._create_connection = create_connection
+ self._wsuri = wsuri
+
+ def handle_redirect(self, uri: str) -> None:
+ # Update the state of this instance to connect to a new URI.
+ old_wsuri = self._wsuri
+ new_wsuri = parse_uri(uri)
+
+ # Forbid TLS downgrade.
+ if old_wsuri.secure and not new_wsuri.secure:
+ raise SecurityError("redirect from WSS to WS")
+
+ same_origin = (
+ old_wsuri.host == new_wsuri.host and old_wsuri.port == new_wsuri.port
+ )
+
+ # Rewrite the host and port arguments for cross-origin redirects.
+ # This preserves connection overrides with the host and port
+ # arguments if the redirect points to the same host and port.
+ if not same_origin:
+ # Replace the host and port argument passed to the protocol factory.
+ factory = self._create_connection.args[0]
+ factory = functools.partial(
+ factory.func,
+ *factory.args,
+ **dict(factory.keywords, host=new_wsuri.host, port=new_wsuri.port),
+ )
+ # Replace the host and port argument passed to create_connection.
+ self._create_connection = functools.partial(
+ self._create_connection.func,
+ *(factory, new_wsuri.host, new_wsuri.port),
+ **self._create_connection.keywords,
+ )
+
+ # Set the new WebSocket URI. This suffices for same-origin redirects.
+ self._wsuri = new_wsuri
+
+ # async with connect(...)
+
+ async def __aenter__(self) -> WebSocketClientProtocol:
+ return await self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
+ await self.ws_client.close()
+
+ # await connect(...)
+
+ def __await__(self) -> Generator[Any, None, WebSocketClientProtocol]:
+ # Create a suitable iterator by calling __await__ on a coroutine.
+ return self.__await_impl__().__await__()
+
+ async def __await_impl__(self) -> WebSocketClientProtocol:
+ for redirects in range(self.MAX_REDIRECTS_ALLOWED):
+ transport, protocol = await self._create_connection()
+ # https://github.com/python/typeshed/pull/2756
+ transport = cast(asyncio.Transport, transport)
+ protocol = cast(WebSocketClientProtocol, protocol)
+
+ try:
+ try:
+ await protocol.handshake(
+ self._wsuri,
+ origin=protocol.origin,
+ available_extensions=protocol.available_extensions,
+ available_subprotocols=protocol.available_subprotocols,
+ extra_headers=protocol.extra_headers,
+ )
+ except Exception:
+ protocol.fail_connection()
+ await protocol.wait_closed()
+ raise
+ else:
+ self.ws_client = protocol
+ return protocol
+ except RedirectHandshake as exc:
+ self.handle_redirect(exc.uri)
+ else:
+ raise SecurityError("too many redirects")
+
+ # yield from connect(...)
+
+ __iter__ = __await__
+
+
+connect = Connect
+
+
+def unix_connect(path: str, uri: str = "ws://localhost/", **kwargs: Any) -> Connect:
+ """
+ Similar to :func:`connect`, but for connecting to a Unix socket.
+
+ This function calls the event loop's
+ :meth:`~asyncio.loop.create_unix_connection` method.
+
+ It is only available on Unix.
+
+ It's mainly useful for debugging servers listening on Unix sockets.
+
+ :param path: file system path to the Unix socket
+ :param uri: WebSocket URI
+
+ """
+ return connect(uri=uri, path=path, **kwargs)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/exceptions.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/exceptions.py
new file mode 100644
index 0000000000..9873a17170
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/exceptions.py
@@ -0,0 +1,366 @@
+"""
+:mod:`websockets.exceptions` defines the following exception hierarchy:
+
+* :exc:`WebSocketException`
+ * :exc:`ConnectionClosed`
+ * :exc:`ConnectionClosedError`
+ * :exc:`ConnectionClosedOK`
+ * :exc:`InvalidHandshake`
+ * :exc:`SecurityError`
+ * :exc:`InvalidMessage`
+ * :exc:`InvalidHeader`
+ * :exc:`InvalidHeaderFormat`
+ * :exc:`InvalidHeaderValue`
+ * :exc:`InvalidOrigin`
+ * :exc:`InvalidUpgrade`
+ * :exc:`InvalidStatusCode`
+ * :exc:`NegotiationError`
+ * :exc:`DuplicateParameter`
+ * :exc:`InvalidParameterName`
+ * :exc:`InvalidParameterValue`
+ * :exc:`AbortHandshake`
+ * :exc:`RedirectHandshake`
+ * :exc:`InvalidState`
+ * :exc:`InvalidURI`
+ * :exc:`PayloadTooBig`
+ * :exc:`ProtocolError`
+
+"""
+
+import http
+from typing import Optional
+
+from .http import Headers, HeadersLike
+
+
+__all__ = [
+ "WebSocketException",
+ "ConnectionClosed",
+ "ConnectionClosedError",
+ "ConnectionClosedOK",
+ "InvalidHandshake",
+ "SecurityError",
+ "InvalidMessage",
+ "InvalidHeader",
+ "InvalidHeaderFormat",
+ "InvalidHeaderValue",
+ "InvalidOrigin",
+ "InvalidUpgrade",
+ "InvalidStatusCode",
+ "NegotiationError",
+ "DuplicateParameter",
+ "InvalidParameterName",
+ "InvalidParameterValue",
+ "AbortHandshake",
+ "RedirectHandshake",
+ "InvalidState",
+ "InvalidURI",
+ "PayloadTooBig",
+ "ProtocolError",
+ "WebSocketProtocolError",
+]
+
+
+class WebSocketException(Exception):
+ """
+ Base class for all exceptions defined by :mod:`websockets`.
+
+ """
+
+
+CLOSE_CODES = {
+ 1000: "OK",
+ 1001: "going away",
+ 1002: "protocol error",
+ 1003: "unsupported type",
+ # 1004 is reserved
+ 1005: "no status code [internal]",
+ 1006: "connection closed abnormally [internal]",
+ 1007: "invalid data",
+ 1008: "policy violation",
+ 1009: "message too big",
+ 1010: "extension required",
+ 1011: "unexpected error",
+ 1015: "TLS failure [internal]",
+}
+
+
+def format_close(code: int, reason: str) -> str:
+ """
+ Display a human-readable version of the close code and reason.
+
+ """
+ if 3000 <= code < 4000:
+ explanation = "registered"
+ elif 4000 <= code < 5000:
+ explanation = "private use"
+ else:
+ explanation = CLOSE_CODES.get(code, "unknown")
+ result = f"code = {code} ({explanation}), "
+
+ if reason:
+ result += f"reason = {reason}"
+ else:
+ result += "no reason"
+
+ return result
+
+
+class ConnectionClosed(WebSocketException):
+ """
+ Raised when trying to interact with a closed connection.
+
+ Provides the connection close code and reason in its ``code`` and
+ ``reason`` attributes respectively.
+
+ """
+
+ def __init__(self, code: int, reason: str) -> None:
+ self.code = code
+ self.reason = reason
+ super().__init__(format_close(code, reason))
+
+
+class ConnectionClosedError(ConnectionClosed):
+ """
+ Like :exc:`ConnectionClosed`, when the connection terminated with an error.
+
+ This means the close code is different from 1000 (OK) and 1001 (going away).
+
+ """
+
+ def __init__(self, code: int, reason: str) -> None:
+ assert code != 1000 and code != 1001
+ super().__init__(code, reason)
+
+
+class ConnectionClosedOK(ConnectionClosed):
+ """
+ Like :exc:`ConnectionClosed`, when the connection terminated properly.
+
+ This means the close code is 1000 (OK) or 1001 (going away).
+
+ """
+
+ def __init__(self, code: int, reason: str) -> None:
+ assert code == 1000 or code == 1001
+ super().__init__(code, reason)
+
+
+class InvalidHandshake(WebSocketException):
+ """
+ Raised during the handshake when the WebSocket connection fails.
+
+ """
+
+
+class SecurityError(InvalidHandshake):
+ """
+ Raised when a handshake request or response breaks a security rule.
+
+ Security limits are hard coded.
+
+ """
+
+
+class InvalidMessage(InvalidHandshake):
+ """
+ Raised when a handshake request or response is malformed.
+
+ """
+
+
+class InvalidHeader(InvalidHandshake):
+ """
+ Raised when a HTTP header doesn't have a valid format or value.
+
+ """
+
+ def __init__(self, name: str, value: Optional[str] = None) -> None:
+ self.name = name
+ self.value = value
+ if value is None:
+ message = f"missing {name} header"
+ elif value == "":
+ message = f"empty {name} header"
+ else:
+ message = f"invalid {name} header: {value}"
+ super().__init__(message)
+
+
+class InvalidHeaderFormat(InvalidHeader):
+ """
+ Raised when a HTTP header cannot be parsed.
+
+ The format of the header doesn't match the grammar for that header.
+
+ """
+
+ def __init__(self, name: str, error: str, header: str, pos: int) -> None:
+ self.name = name
+ error = f"{error} at {pos} in {header}"
+ super().__init__(name, error)
+
+
+class InvalidHeaderValue(InvalidHeader):
+ """
+ Raised when a HTTP header has a wrong value.
+
+ The format of the header is correct but a value isn't acceptable.
+
+ """
+
+
+class InvalidOrigin(InvalidHeader):
+ """
+ Raised when the Origin header in a request isn't allowed.
+
+ """
+
+ def __init__(self, origin: Optional[str]) -> None:
+ super().__init__("Origin", origin)
+
+
+class InvalidUpgrade(InvalidHeader):
+ """
+ Raised when the Upgrade or Connection header isn't correct.
+
+ """
+
+
+class InvalidStatusCode(InvalidHandshake):
+ """
+ Raised when a handshake response status code is invalid.
+
+ The integer status code is available in the ``status_code`` attribute.
+
+ """
+
+ def __init__(self, status_code: int) -> None:
+ self.status_code = status_code
+ message = f"server rejected WebSocket connection: HTTP {status_code}"
+ super().__init__(message)
+
+
+class NegotiationError(InvalidHandshake):
+ """
+ Raised when negotiating an extension fails.
+
+ """
+
+
+class DuplicateParameter(NegotiationError):
+ """
+ Raised when a parameter name is repeated in an extension header.
+
+ """
+
+ def __init__(self, name: str) -> None:
+ self.name = name
+ message = f"duplicate parameter: {name}"
+ super().__init__(message)
+
+
+class InvalidParameterName(NegotiationError):
+ """
+ Raised when a parameter name in an extension header is invalid.
+
+ """
+
+ def __init__(self, name: str) -> None:
+ self.name = name
+ message = f"invalid parameter name: {name}"
+ super().__init__(message)
+
+
+class InvalidParameterValue(NegotiationError):
+ """
+ Raised when a parameter value in an extension header is invalid.
+
+ """
+
+ def __init__(self, name: str, value: Optional[str]) -> None:
+ self.name = name
+ self.value = value
+ if value is None:
+ message = f"missing value for parameter {name}"
+ elif value == "":
+ message = f"empty value for parameter {name}"
+ else:
+ message = f"invalid value for parameter {name}: {value}"
+ super().__init__(message)
+
+
+class AbortHandshake(InvalidHandshake):
+ """
+ Raised to abort the handshake on purpose and return a HTTP response.
+
+ This exception is an implementation detail.
+
+ The public API is :meth:`~server.WebSocketServerProtocol.process_request`.
+
+ """
+
+ def __init__(
+ self, status: http.HTTPStatus, headers: HeadersLike, body: bytes = b""
+ ) -> None:
+ self.status = status
+ self.headers = Headers(headers)
+ self.body = body
+ message = f"HTTP {status}, {len(self.headers)} headers, {len(body)} bytes"
+ super().__init__(message)
+
+
+class RedirectHandshake(InvalidHandshake):
+ """
+ Raised when a handshake gets redirected.
+
+ This exception is an implementation detail.
+
+ """
+
+ def __init__(self, uri: str) -> None:
+ self.uri = uri
+
+ def __str__(self) -> str:
+ return f"redirect to {self.uri}"
+
+
+class InvalidState(WebSocketException, AssertionError):
+ """
+ Raised when an operation is forbidden in the current state.
+
+ This exception is an implementation detail.
+
+ It should never be raised in normal circumstances.
+
+ """
+
+
+class InvalidURI(WebSocketException):
+ """
+ Raised when connecting to an URI that isn't a valid WebSocket URI.
+
+ """
+
+ def __init__(self, uri: str) -> None:
+ self.uri = uri
+ message = "{} isn't a valid URI".format(uri)
+ super().__init__(message)
+
+
+class PayloadTooBig(WebSocketException):
+ """
+ Raised when receiving a frame with a payload exceeding the maximum size.
+
+ """
+
+
+class ProtocolError(WebSocketException):
+ """
+ Raised when the other side breaks the protocol.
+
+ """
+
+
+WebSocketProtocolError = ProtocolError # for backwards compatibility
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/__init__.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/base.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/base.py
new file mode 100644
index 0000000000..aa52a7adbf
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/base.py
@@ -0,0 +1,119 @@
+"""
+:mod:`websockets.extensions.base` defines abstract classes for implementing
+extensions.
+
+See `section 9 of RFC 6455`_.
+
+.. _section 9 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-9
+
+"""
+
+from typing import List, Optional, Sequence, Tuple
+
+from ..framing import Frame
+from ..typing import ExtensionName, ExtensionParameter
+
+
+__all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"]
+
+
+class Extension:
+ """
+ Abstract class for extensions.
+
+ """
+
+ @property
+ def name(self) -> ExtensionName:
+ """
+ Extension identifier.
+
+ """
+
+ def decode(self, frame: Frame, *, max_size: Optional[int] = None) -> Frame:
+ """
+ Decode an incoming frame.
+
+ :param frame: incoming frame
+ :param max_size: maximum payload size in bytes
+
+ """
+
+ def encode(self, frame: Frame) -> Frame:
+ """
+ Encode an outgoing frame.
+
+ :param frame: outgoing frame
+
+ """
+
+
+class ClientExtensionFactory:
+ """
+ Abstract class for client-side extension factories.
+
+ """
+
+ @property
+ def name(self) -> ExtensionName:
+ """
+ Extension identifier.
+
+ """
+
+ def get_request_params(self) -> List[ExtensionParameter]:
+ """
+ Build request parameters.
+
+ Return a list of ``(name, value)`` pairs.
+
+ """
+
+ def process_response_params(
+ self,
+ params: Sequence[ExtensionParameter],
+ accepted_extensions: Sequence[Extension],
+ ) -> Extension:
+ """
+ Process response parameters received from the server.
+
+ :param params: list of ``(name, value)`` pairs.
+ :param accepted_extensions: list of previously accepted extensions.
+ :raises ~websockets.exceptions.NegotiationError: if parameters aren't
+ acceptable
+
+ """
+
+
+class ServerExtensionFactory:
+ """
+ Abstract class for server-side extension factories.
+
+ """
+
+ @property
+ def name(self) -> ExtensionName:
+ """
+ Extension identifier.
+
+ """
+
+ def process_request_params(
+ self,
+ params: Sequence[ExtensionParameter],
+ accepted_extensions: Sequence[Extension],
+ ) -> Tuple[List[ExtensionParameter], Extension]:
+ """
+ Process request parameters received from the client.
+
+ To accept the offer, return a 2-uple containing:
+
+ - response parameters: a list of ``(name, value)`` pairs
+ - an extension: an instance of a subclass of :class:`Extension`
+
+ :param params: list of ``(name, value)`` pairs.
+ :param accepted_extensions: list of previously accepted extensions.
+ :raises ~websockets.exceptions.NegotiationError: to reject the offer,
+ if parameters aren't acceptable
+
+ """
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/permessage_deflate.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/permessage_deflate.py
new file mode 100644
index 0000000000..e38d9edaba
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/extensions/permessage_deflate.py
@@ -0,0 +1,588 @@
+"""
+:mod:`websockets.extensions.permessage_deflate` implements the Compression
+Extensions for WebSocket as specified in :rfc:`7692`.
+
+"""
+
+import zlib
+from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
+
+from ..exceptions import (
+ DuplicateParameter,
+ InvalidParameterName,
+ InvalidParameterValue,
+ NegotiationError,
+ PayloadTooBig,
+)
+from ..framing import CTRL_OPCODES, OP_CONT, Frame
+from ..typing import ExtensionName, ExtensionParameter
+from .base import ClientExtensionFactory, Extension, ServerExtensionFactory
+
+
+__all__ = [
+ "PerMessageDeflate",
+ "ClientPerMessageDeflateFactory",
+ "ServerPerMessageDeflateFactory",
+]
+
+_EMPTY_UNCOMPRESSED_BLOCK = b"\x00\x00\xff\xff"
+
+_MAX_WINDOW_BITS_VALUES = [str(bits) for bits in range(8, 16)]
+
+
+class PerMessageDeflate(Extension):
+ """
+ Per-Message Deflate extension.
+
+ """
+
+ name = ExtensionName("permessage-deflate")
+
+ def __init__(
+ self,
+ remote_no_context_takeover: bool,
+ local_no_context_takeover: bool,
+ remote_max_window_bits: int,
+ local_max_window_bits: int,
+ compress_settings: Optional[Dict[Any, Any]] = None,
+ ) -> None:
+ """
+ Configure the Per-Message Deflate extension.
+
+ """
+ if compress_settings is None:
+ compress_settings = {}
+
+ assert remote_no_context_takeover in [False, True]
+ assert local_no_context_takeover in [False, True]
+ assert 8 <= remote_max_window_bits <= 15
+ assert 8 <= local_max_window_bits <= 15
+ assert "wbits" not in compress_settings
+
+ self.remote_no_context_takeover = remote_no_context_takeover
+ self.local_no_context_takeover = local_no_context_takeover
+ self.remote_max_window_bits = remote_max_window_bits
+ self.local_max_window_bits = local_max_window_bits
+ self.compress_settings = compress_settings
+
+ if not self.remote_no_context_takeover:
+ self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits)
+
+ if not self.local_no_context_takeover:
+ self.encoder = zlib.compressobj(
+ wbits=-self.local_max_window_bits, **self.compress_settings
+ )
+
+ # To handle continuation frames properly, we must keep track of
+ # whether that initial frame was encoded.
+ self.decode_cont_data = False
+ # There's no need for self.encode_cont_data because we always encode
+ # outgoing frames, so it would always be True.
+
+ def __repr__(self) -> str:
+ return (
+ f"PerMessageDeflate("
+ f"remote_no_context_takeover={self.remote_no_context_takeover}, "
+ f"local_no_context_takeover={self.local_no_context_takeover}, "
+ f"remote_max_window_bits={self.remote_max_window_bits}, "
+ f"local_max_window_bits={self.local_max_window_bits})"
+ )
+
+ def decode(self, frame: Frame, *, max_size: Optional[int] = None) -> Frame:
+ """
+ Decode an incoming frame.
+
+ """
+ # Skip control frames.
+ if frame.opcode in CTRL_OPCODES:
+ return frame
+
+ # Handle continuation data frames:
+ # - skip if the initial data frame wasn't encoded
+ # - reset "decode continuation data" flag if it's a final frame
+ if frame.opcode == OP_CONT:
+ if not self.decode_cont_data:
+ return frame
+ if frame.fin:
+ self.decode_cont_data = False
+
+ # Handle text and binary data frames:
+ # - skip if the frame isn't encoded
+ # - set "decode continuation data" flag if it's a non-final frame
+ else:
+ if not frame.rsv1:
+ return frame
+ if not frame.fin: # frame.rsv1 is True at this point
+ self.decode_cont_data = True
+
+ # Re-initialize per-message decoder.
+ if self.remote_no_context_takeover:
+ self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits)
+
+ # Uncompress compressed frames. Protect against zip bombs by
+ # preventing zlib from decompressing more than max_length bytes
+ # (except when the limit is disabled with max_size = None).
+ data = frame.data
+ if frame.fin:
+ data += _EMPTY_UNCOMPRESSED_BLOCK
+ max_length = 0 if max_size is None else max_size
+ data = self.decoder.decompress(data, max_length)
+ if self.decoder.unconsumed_tail:
+ raise PayloadTooBig(
+ f"Uncompressed payload length exceeds size limit (? > {max_size} bytes)"
+ )
+
+ # Allow garbage collection of the decoder if it won't be reused.
+ if frame.fin and self.remote_no_context_takeover:
+ del self.decoder
+
+ return frame._replace(data=data, rsv1=False)
+
+ def encode(self, frame: Frame) -> Frame:
+ """
+ Encode an outgoing frame.
+
+ """
+ # Skip control frames.
+ if frame.opcode in CTRL_OPCODES:
+ return frame
+
+ # Since we always encode and never fragment messages, there's no logic
+ # similar to decode() here at this time.
+
+ if frame.opcode != OP_CONT:
+ # Re-initialize per-message decoder.
+ if self.local_no_context_takeover:
+ self.encoder = zlib.compressobj(
+ wbits=-self.local_max_window_bits, **self.compress_settings
+ )
+
+ # Compress data frames.
+ data = self.encoder.compress(frame.data) + self.encoder.flush(zlib.Z_SYNC_FLUSH)
+ if frame.fin and data.endswith(_EMPTY_UNCOMPRESSED_BLOCK):
+ data = data[:-4]
+
+ # Allow garbage collection of the encoder if it won't be reused.
+ if frame.fin and self.local_no_context_takeover:
+ del self.encoder
+
+ return frame._replace(data=data, rsv1=True)
+
+
+def _build_parameters(
+ server_no_context_takeover: bool,
+ client_no_context_takeover: bool,
+ server_max_window_bits: Optional[int],
+ client_max_window_bits: Optional[Union[int, bool]],
+) -> List[ExtensionParameter]:
+ """
+ Build a list of ``(name, value)`` pairs for some compression parameters.
+
+ """
+ params: List[ExtensionParameter] = []
+ if server_no_context_takeover:
+ params.append(("server_no_context_takeover", None))
+ if client_no_context_takeover:
+ params.append(("client_no_context_takeover", None))
+ if server_max_window_bits:
+ params.append(("server_max_window_bits", str(server_max_window_bits)))
+ if client_max_window_bits is True: # only in handshake requests
+ params.append(("client_max_window_bits", None))
+ elif client_max_window_bits:
+ params.append(("client_max_window_bits", str(client_max_window_bits)))
+ return params
+
+
+def _extract_parameters(
+ params: Sequence[ExtensionParameter], *, is_server: bool
+) -> Tuple[bool, bool, Optional[int], Optional[Union[int, bool]]]:
+ """
+ Extract compression parameters from a list of ``(name, value)`` pairs.
+
+ If ``is_server`` is ``True``, ``client_max_window_bits`` may be provided
+ without a value. This is only allow in handshake requests.
+
+ """
+ server_no_context_takeover: bool = False
+ client_no_context_takeover: bool = False
+ server_max_window_bits: Optional[int] = None
+ client_max_window_bits: Optional[Union[int, bool]] = None
+
+ for name, value in params:
+
+ if name == "server_no_context_takeover":
+ if server_no_context_takeover:
+ raise DuplicateParameter(name)
+ if value is None:
+ server_no_context_takeover = True
+ else:
+ raise InvalidParameterValue(name, value)
+
+ elif name == "client_no_context_takeover":
+ if client_no_context_takeover:
+ raise DuplicateParameter(name)
+ if value is None:
+ client_no_context_takeover = True
+ else:
+ raise InvalidParameterValue(name, value)
+
+ elif name == "server_max_window_bits":
+ if server_max_window_bits is not None:
+ raise DuplicateParameter(name)
+ if value in _MAX_WINDOW_BITS_VALUES:
+ server_max_window_bits = int(value)
+ else:
+ raise InvalidParameterValue(name, value)
+
+ elif name == "client_max_window_bits":
+ if client_max_window_bits is not None:
+ raise DuplicateParameter(name)
+ if is_server and value is None: # only in handshake requests
+ client_max_window_bits = True
+ elif value in _MAX_WINDOW_BITS_VALUES:
+ client_max_window_bits = int(value)
+ else:
+ raise InvalidParameterValue(name, value)
+
+ else:
+ raise InvalidParameterName(name)
+
+ return (
+ server_no_context_takeover,
+ client_no_context_takeover,
+ server_max_window_bits,
+ client_max_window_bits,
+ )
+
+
+class ClientPerMessageDeflateFactory(ClientExtensionFactory):
+ """
+ Client-side extension factory for the Per-Message Deflate extension.
+
+ Parameters behave as described in `section 7.1 of RFC 7692`_. Set them to
+ ``True`` to include them in the negotiation offer without a value or to an
+ integer value to include them with this value.
+
+ .. _section 7.1 of RFC 7692: https://tools.ietf.org/html/rfc7692#section-7.1
+
+ :param server_no_context_takeover: defaults to ``False``
+ :param client_no_context_takeover: defaults to ``False``
+ :param server_max_window_bits: optional, defaults to ``None``
+ :param client_max_window_bits: optional, defaults to ``None``
+ :param compress_settings: optional, keyword arguments for
+ :func:`zlib.compressobj`, excluding ``wbits``
+
+ """
+
+ name = ExtensionName("permessage-deflate")
+
+ def __init__(
+ self,
+ server_no_context_takeover: bool = False,
+ client_no_context_takeover: bool = False,
+ server_max_window_bits: Optional[int] = None,
+ client_max_window_bits: Optional[Union[int, bool]] = None,
+ compress_settings: Optional[Dict[str, Any]] = None,
+ ) -> None:
+ """
+ Configure the Per-Message Deflate extension factory.
+
+ """
+ if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15):
+ raise ValueError("server_max_window_bits must be between 8 and 15")
+ if not (
+ client_max_window_bits is None
+ or client_max_window_bits is True
+ or 8 <= client_max_window_bits <= 15
+ ):
+ raise ValueError("client_max_window_bits must be between 8 and 15")
+ if compress_settings is not None and "wbits" in compress_settings:
+ raise ValueError(
+ "compress_settings must not include wbits, "
+ "set client_max_window_bits instead"
+ )
+
+ self.server_no_context_takeover = server_no_context_takeover
+ self.client_no_context_takeover = client_no_context_takeover
+ self.server_max_window_bits = server_max_window_bits
+ self.client_max_window_bits = client_max_window_bits
+ self.compress_settings = compress_settings
+
+ def get_request_params(self) -> List[ExtensionParameter]:
+ """
+ Build request parameters.
+
+ """
+ return _build_parameters(
+ self.server_no_context_takeover,
+ self.client_no_context_takeover,
+ self.server_max_window_bits,
+ self.client_max_window_bits,
+ )
+
+ def process_response_params(
+ self,
+ params: Sequence[ExtensionParameter],
+ accepted_extensions: Sequence["Extension"],
+ ) -> PerMessageDeflate:
+ """
+ Process response parameters.
+
+ Return an extension instance.
+
+ """
+ if any(other.name == self.name for other in accepted_extensions):
+ raise NegotiationError(f"received duplicate {self.name}")
+
+ # Request parameters are available in instance variables.
+
+ # Load response parameters in local variables.
+ (
+ server_no_context_takeover,
+ client_no_context_takeover,
+ server_max_window_bits,
+ client_max_window_bits,
+ ) = _extract_parameters(params, is_server=False)
+
+ # After comparing the request and the response, the final
+ # configuration must be available in the local variables.
+
+ # server_no_context_takeover
+ #
+ # Req. Resp. Result
+ # ------ ------ --------------------------------------------------
+ # False False False
+ # False True True
+ # True False Error!
+ # True True True
+
+ if self.server_no_context_takeover:
+ if not server_no_context_takeover:
+ raise NegotiationError("expected server_no_context_takeover")
+
+ # client_no_context_takeover
+ #
+ # Req. Resp. Result
+ # ------ ------ --------------------------------------------------
+ # False False False
+ # False True True
+ # True False True - must change value
+ # True True True
+
+ if self.client_no_context_takeover:
+ if not client_no_context_takeover:
+ client_no_context_takeover = True
+
+ # server_max_window_bits
+
+ # Req. Resp. Result
+ # ------ ------ --------------------------------------------------
+ # None None None
+ # None 8≤M≤15 M
+ # 8≤N≤15 None Error!
+ # 8≤N≤15 8≤M≤N M
+ # 8≤N≤15 N<M≤15 Error!
+
+ if self.server_max_window_bits is None:
+ pass
+
+ else:
+ if server_max_window_bits is None:
+ raise NegotiationError("expected server_max_window_bits")
+ elif server_max_window_bits > self.server_max_window_bits:
+ raise NegotiationError("unsupported server_max_window_bits")
+
+ # client_max_window_bits
+
+ # Req. Resp. Result
+ # ------ ------ --------------------------------------------------
+ # None None None
+ # None 8≤M≤15 Error!
+ # True None None
+ # True 8≤M≤15 M
+ # 8≤N≤15 None N - must change value
+ # 8≤N≤15 8≤M≤N M
+ # 8≤N≤15 N<M≤15 Error!
+
+ if self.client_max_window_bits is None:
+ if client_max_window_bits is not None:
+ raise NegotiationError("unexpected client_max_window_bits")
+
+ elif self.client_max_window_bits is True:
+ pass
+
+ else:
+ if client_max_window_bits is None:
+ client_max_window_bits = self.client_max_window_bits
+ elif client_max_window_bits > self.client_max_window_bits:
+ raise NegotiationError("unsupported client_max_window_bits")
+
+ return PerMessageDeflate(
+ server_no_context_takeover, # remote_no_context_takeover
+ client_no_context_takeover, # local_no_context_takeover
+ server_max_window_bits or 15, # remote_max_window_bits
+ client_max_window_bits or 15, # local_max_window_bits
+ self.compress_settings,
+ )
+
+
+class ServerPerMessageDeflateFactory(ServerExtensionFactory):
+ """
+ Server-side extension factory for the Per-Message Deflate extension.
+
+ Parameters behave as described in `section 7.1 of RFC 7692`_. Set them to
+ ``True`` to include them in the negotiation offer without a value or to an
+ integer value to include them with this value.
+
+ .. _section 7.1 of RFC 7692: https://tools.ietf.org/html/rfc7692#section-7.1
+
+ :param server_no_context_takeover: defaults to ``False``
+ :param client_no_context_takeover: defaults to ``False``
+ :param server_max_window_bits: optional, defaults to ``None``
+ :param client_max_window_bits: optional, defaults to ``None``
+ :param compress_settings: optional, keyword arguments for
+ :func:`zlib.compressobj`, excluding ``wbits``
+
+ """
+
+ name = ExtensionName("permessage-deflate")
+
+ def __init__(
+ self,
+ server_no_context_takeover: bool = False,
+ client_no_context_takeover: bool = False,
+ server_max_window_bits: Optional[int] = None,
+ client_max_window_bits: Optional[int] = None,
+ compress_settings: Optional[Dict[str, Any]] = None,
+ ) -> None:
+ """
+ Configure the Per-Message Deflate extension factory.
+
+ """
+ if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15):
+ raise ValueError("server_max_window_bits must be between 8 and 15")
+ if not (client_max_window_bits is None or 8 <= client_max_window_bits <= 15):
+ raise ValueError("client_max_window_bits must be between 8 and 15")
+ if compress_settings is not None and "wbits" in compress_settings:
+ raise ValueError(
+ "compress_settings must not include wbits, "
+ "set server_max_window_bits instead"
+ )
+
+ self.server_no_context_takeover = server_no_context_takeover
+ self.client_no_context_takeover = client_no_context_takeover
+ self.server_max_window_bits = server_max_window_bits
+ self.client_max_window_bits = client_max_window_bits
+ self.compress_settings = compress_settings
+
+ def process_request_params(
+ self,
+ params: Sequence[ExtensionParameter],
+ accepted_extensions: Sequence["Extension"],
+ ) -> Tuple[List[ExtensionParameter], PerMessageDeflate]:
+ """
+ Process request parameters.
+
+ Return response params and an extension instance.
+
+ """
+ if any(other.name == self.name for other in accepted_extensions):
+ raise NegotiationError(f"skipped duplicate {self.name}")
+
+ # Load request parameters in local variables.
+ (
+ server_no_context_takeover,
+ client_no_context_takeover,
+ server_max_window_bits,
+ client_max_window_bits,
+ ) = _extract_parameters(params, is_server=True)
+
+ # Configuration parameters are available in instance variables.
+
+ # After comparing the request and the configuration, the response must
+ # be available in the local variables.
+
+ # server_no_context_takeover
+ #
+ # Config Req. Resp.
+ # ------ ------ --------------------------------------------------
+ # False False False
+ # False True True
+ # True False True - must change value to True
+ # True True True
+
+ if self.server_no_context_takeover:
+ if not server_no_context_takeover:
+ server_no_context_takeover = True
+
+ # client_no_context_takeover
+ #
+ # Config Req. Resp.
+ # ------ ------ --------------------------------------------------
+ # False False False
+ # False True True (or False)
+ # True False True - must change value to True
+ # True True True (or False)
+
+ if self.client_no_context_takeover:
+ if not client_no_context_takeover:
+ client_no_context_takeover = True
+
+ # server_max_window_bits
+
+ # Config Req. Resp.
+ # ------ ------ --------------------------------------------------
+ # None None None
+ # None 8≤M≤15 M
+ # 8≤N≤15 None N - must change value
+ # 8≤N≤15 8≤M≤N M
+ # 8≤N≤15 N<M≤15 N - must change value
+
+ if self.server_max_window_bits is None:
+ pass
+
+ else:
+ if server_max_window_bits is None:
+ server_max_window_bits = self.server_max_window_bits
+ elif server_max_window_bits > self.server_max_window_bits:
+ server_max_window_bits = self.server_max_window_bits
+
+ # client_max_window_bits
+
+ # Config Req. Resp.
+ # ------ ------ --------------------------------------------------
+ # None None None
+ # None True None - must change value
+ # None 8≤M≤15 M (or None)
+ # 8≤N≤15 None Error!
+ # 8≤N≤15 True N - must change value
+ # 8≤N≤15 8≤M≤N M (or None)
+ # 8≤N≤15 N<M≤15 N
+
+ if self.client_max_window_bits is None:
+ if client_max_window_bits is True:
+ client_max_window_bits = self.client_max_window_bits
+
+ else:
+ if client_max_window_bits is None:
+ raise NegotiationError("required client_max_window_bits")
+ elif client_max_window_bits is True:
+ client_max_window_bits = self.client_max_window_bits
+ elif self.client_max_window_bits < client_max_window_bits:
+ client_max_window_bits = self.client_max_window_bits
+
+ return (
+ _build_parameters(
+ server_no_context_takeover,
+ client_no_context_takeover,
+ server_max_window_bits,
+ client_max_window_bits,
+ ),
+ PerMessageDeflate(
+ client_no_context_takeover, # remote_no_context_takeover
+ server_no_context_takeover, # local_no_context_takeover
+ client_max_window_bits or 15, # remote_max_window_bits
+ server_max_window_bits or 15, # local_max_window_bits
+ self.compress_settings,
+ ),
+ )
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/framing.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/framing.py
new file mode 100644
index 0000000000..26e58cdbfb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/framing.py
@@ -0,0 +1,342 @@
+"""
+:mod:`websockets.framing` reads and writes WebSocket frames.
+
+It deals with a single frame at a time. Anything that depends on the sequence
+of frames is implemented in :mod:`websockets.protocol`.
+
+See `section 5 of RFC 6455`_.
+
+.. _section 5 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-5
+
+"""
+
+import io
+import random
+import struct
+from typing import Any, Awaitable, Callable, NamedTuple, Optional, Sequence, Tuple
+
+from .exceptions import PayloadTooBig, ProtocolError
+from .typing import Data
+
+
+try:
+ from .speedups import apply_mask
+except ImportError: # pragma: no cover
+ from .utils import apply_mask
+
+
+__all__ = [
+ "DATA_OPCODES",
+ "CTRL_OPCODES",
+ "OP_CONT",
+ "OP_TEXT",
+ "OP_BINARY",
+ "OP_CLOSE",
+ "OP_PING",
+ "OP_PONG",
+ "Frame",
+ "prepare_data",
+ "encode_data",
+ "parse_close",
+ "serialize_close",
+]
+
+DATA_OPCODES = OP_CONT, OP_TEXT, OP_BINARY = 0x00, 0x01, 0x02
+CTRL_OPCODES = OP_CLOSE, OP_PING, OP_PONG = 0x08, 0x09, 0x0A
+
+# Close code that are allowed in a close frame.
+# Using a list optimizes `code in EXTERNAL_CLOSE_CODES`.
+EXTERNAL_CLOSE_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011]
+
+
+# Consider converting to a dataclass when dropping support for Python < 3.7.
+
+
+class Frame(NamedTuple):
+ """
+ WebSocket frame.
+
+ :param bool fin: FIN bit
+ :param bool rsv1: RSV1 bit
+ :param bool rsv2: RSV2 bit
+ :param bool rsv3: RSV3 bit
+ :param int opcode: opcode
+ :param bytes data: payload data
+
+ Only these fields are needed. The MASK bit, payload length and masking-key
+ are handled on the fly by :meth:`read` and :meth:`write`.
+
+ """
+
+ fin: bool
+ opcode: int
+ data: bytes
+ rsv1: bool = False
+ rsv2: bool = False
+ rsv3: bool = False
+
+ @classmethod
+ async def read(
+ cls,
+ reader: Callable[[int], Awaitable[bytes]],
+ *,
+ mask: bool,
+ max_size: Optional[int] = None,
+ extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None,
+ ) -> "Frame":
+ """
+ Read a WebSocket frame.
+
+ :param reader: coroutine that reads exactly the requested number of
+ bytes, unless the end of file is reached
+ :param mask: whether the frame should be masked i.e. whether the read
+ happens on the server side
+ :param max_size: maximum payload size in bytes
+ :param extensions: list of classes with a ``decode()`` method that
+ transforms the frame and return a new frame; extensions are applied
+ in reverse order
+ :raises ~websockets.exceptions.PayloadTooBig: if the frame exceeds
+ ``max_size``
+ :raises ~websockets.exceptions.ProtocolError: if the frame
+ contains incorrect values
+
+ """
+ # Read the header.
+ data = await reader(2)
+ head1, head2 = struct.unpack("!BB", data)
+
+ # While not Pythonic, this is marginally faster than calling bool().
+ fin = True if head1 & 0b10000000 else False
+ rsv1 = True if head1 & 0b01000000 else False
+ rsv2 = True if head1 & 0b00100000 else False
+ rsv3 = True if head1 & 0b00010000 else False
+ opcode = head1 & 0b00001111
+
+ if (True if head2 & 0b10000000 else False) != mask:
+ raise ProtocolError("incorrect masking")
+
+ length = head2 & 0b01111111
+ if length == 126:
+ data = await reader(2)
+ (length,) = struct.unpack("!H", data)
+ elif length == 127:
+ data = await reader(8)
+ (length,) = struct.unpack("!Q", data)
+ if max_size is not None and length > max_size:
+ raise PayloadTooBig(
+ f"payload length exceeds size limit ({length} > {max_size} bytes)"
+ )
+ if mask:
+ mask_bits = await reader(4)
+
+ # Read the data.
+ data = await reader(length)
+ if mask:
+ data = apply_mask(data, mask_bits)
+
+ frame = cls(fin, opcode, data, rsv1, rsv2, rsv3)
+
+ if extensions is None:
+ extensions = []
+ for extension in reversed(extensions):
+ frame = extension.decode(frame, max_size=max_size)
+
+ frame.check()
+
+ return frame
+
+ def write(
+ frame,
+ write: Callable[[bytes], Any],
+ *,
+ mask: bool,
+ extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None,
+ ) -> None:
+ """
+ Write a WebSocket frame.
+
+ :param frame: frame to write
+ :param write: function that writes bytes
+ :param mask: whether the frame should be masked i.e. whether the write
+ happens on the client side
+ :param extensions: list of classes with an ``encode()`` method that
+ transform the frame and return a new frame; extensions are applied
+ in order
+ :raises ~websockets.exceptions.ProtocolError: if the frame
+ contains incorrect values
+
+ """
+ # The first parameter is called `frame` rather than `self`,
+ # but it's the instance of class to which this method is bound.
+
+ frame.check()
+
+ if extensions is None:
+ extensions = []
+ for extension in extensions:
+ frame = extension.encode(frame)
+
+ output = io.BytesIO()
+
+ # Prepare the header.
+ head1 = (
+ (0b10000000 if frame.fin else 0)
+ | (0b01000000 if frame.rsv1 else 0)
+ | (0b00100000 if frame.rsv2 else 0)
+ | (0b00010000 if frame.rsv3 else 0)
+ | frame.opcode
+ )
+
+ head2 = 0b10000000 if mask else 0
+
+ length = len(frame.data)
+ if length < 126:
+ output.write(struct.pack("!BB", head1, head2 | length))
+ elif length < 65536:
+ output.write(struct.pack("!BBH", head1, head2 | 126, length))
+ else:
+ output.write(struct.pack("!BBQ", head1, head2 | 127, length))
+
+ if mask:
+ mask_bits = struct.pack("!I", random.getrandbits(32))
+ output.write(mask_bits)
+
+ # Prepare the data.
+ if mask:
+ data = apply_mask(frame.data, mask_bits)
+ else:
+ data = frame.data
+ output.write(data)
+
+ # Send the frame.
+
+ # The frame is written in a single call to write in order to prevent
+ # TCP fragmentation. See #68 for details. This also makes it safe to
+ # send frames concurrently from multiple coroutines.
+ write(output.getvalue())
+
+ def check(frame) -> None:
+ """
+ Check that reserved bits and opcode have acceptable values.
+
+ :raises ~websockets.exceptions.ProtocolError: if a reserved
+ bit or the opcode is invalid
+
+ """
+ # The first parameter is called `frame` rather than `self`,
+ # but it's the instance of class to which this method is bound.
+
+ if frame.rsv1 or frame.rsv2 or frame.rsv3:
+ raise ProtocolError("reserved bits must be 0")
+
+ if frame.opcode in DATA_OPCODES:
+ return
+ elif frame.opcode in CTRL_OPCODES:
+ if len(frame.data) > 125:
+ raise ProtocolError("control frame too long")
+ if not frame.fin:
+ raise ProtocolError("fragmented control frame")
+ else:
+ raise ProtocolError(f"invalid opcode: {frame.opcode}")
+
+
+def prepare_data(data: Data) -> Tuple[int, bytes]:
+ """
+ Convert a string or byte-like object to an opcode and a bytes-like object.
+
+ This function is designed for data frames.
+
+ If ``data`` is a :class:`str`, return ``OP_TEXT`` and a :class:`bytes`
+ object encoding ``data`` in UTF-8.
+
+ If ``data`` is a bytes-like object, return ``OP_BINARY`` and a bytes-like
+ object.
+
+ :raises TypeError: if ``data`` doesn't have a supported type
+
+ """
+ if isinstance(data, str):
+ return OP_TEXT, data.encode("utf-8")
+ elif isinstance(data, (bytes, bytearray)):
+ return OP_BINARY, data
+ elif isinstance(data, memoryview):
+ if data.c_contiguous:
+ return OP_BINARY, data
+ else:
+ return OP_BINARY, data.tobytes()
+ else:
+ raise TypeError("data must be bytes-like or str")
+
+
+def encode_data(data: Data) -> bytes:
+ """
+ Convert a string or byte-like object to bytes.
+
+ This function is designed for ping and pong frames.
+
+ If ``data`` is a :class:`str`, return a :class:`bytes` object encoding
+ ``data`` in UTF-8.
+
+ If ``data`` is a bytes-like object, return a :class:`bytes` object.
+
+ :raises TypeError: if ``data`` doesn't have a supported type
+
+ """
+ if isinstance(data, str):
+ return data.encode("utf-8")
+ elif isinstance(data, (bytes, bytearray)):
+ return bytes(data)
+ elif isinstance(data, memoryview):
+ return data.tobytes()
+ else:
+ raise TypeError("data must be bytes-like or str")
+
+
+def parse_close(data: bytes) -> Tuple[int, str]:
+ """
+ Parse the payload from a close frame.
+
+ Return ``(code, reason)``.
+
+ :raises ~websockets.exceptions.ProtocolError: if data is ill-formed
+ :raises UnicodeDecodeError: if the reason isn't valid UTF-8
+
+ """
+ length = len(data)
+ if length >= 2:
+ (code,) = struct.unpack("!H", data[:2])
+ check_close(code)
+ reason = data[2:].decode("utf-8")
+ return code, reason
+ elif length == 0:
+ return 1005, ""
+ else:
+ assert length == 1
+ raise ProtocolError("close frame too short")
+
+
+def serialize_close(code: int, reason: str) -> bytes:
+ """
+ Serialize the payload for a close frame.
+
+ This is the reverse of :func:`parse_close`.
+
+ """
+ check_close(code)
+ return struct.pack("!H", code) + reason.encode("utf-8")
+
+
+def check_close(code: int) -> None:
+ """
+ Check that the close code has an acceptable value for a close frame.
+
+ :raises ~websockets.exceptions.ProtocolError: if the close code
+ is invalid
+
+ """
+ if not (code in EXTERNAL_CLOSE_CODES or 3000 <= code < 5000):
+ raise ProtocolError("invalid status code")
+
+
+# at the bottom to allow circular import, because Extension depends on Frame
+import websockets.extensions.base # isort:skip # noqa
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/handshake.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/handshake.py
new file mode 100644
index 0000000000..9bfe27754f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/handshake.py
@@ -0,0 +1,185 @@
+"""
+:mod:`websockets.handshake` provides helpers for the WebSocket handshake.
+
+See `section 4 of RFC 6455`_.
+
+.. _section 4 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-4
+
+Some checks cannot be performed because they depend too much on the
+context; instead, they're documented below.
+
+To accept a connection, a server must:
+
+- Read the request, check that the method is GET, and check the headers with
+ :func:`check_request`,
+- Send a 101 response to the client with the headers created by
+ :func:`build_response` if the request is valid; otherwise, send an
+ appropriate HTTP error code.
+
+To open a connection, a client must:
+
+- Send a GET request to the server with the headers created by
+ :func:`build_request`,
+- Read the response, check that the status code is 101, and check the headers
+ with :func:`check_response`.
+
+"""
+
+import base64
+import binascii
+import hashlib
+import random
+from typing import List
+
+from .exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade
+from .headers import ConnectionOption, UpgradeProtocol, parse_connection, parse_upgrade
+from .http import Headers, MultipleValuesError
+
+
+__all__ = ["build_request", "check_request", "build_response", "check_response"]
+
+GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
+
+
+def build_request(headers: Headers) -> str:
+ """
+ Build a handshake request to send to the server.
+
+ Update request headers passed in argument.
+
+ :param headers: request headers
+ :returns: ``key`` which must be passed to :func:`check_response`
+
+ """
+ raw_key = bytes(random.getrandbits(8) for _ in range(16))
+ key = base64.b64encode(raw_key).decode()
+ headers["Upgrade"] = "websocket"
+ headers["Connection"] = "Upgrade"
+ headers["Sec-WebSocket-Key"] = key
+ headers["Sec-WebSocket-Version"] = "13"
+ return key
+
+
+def check_request(headers: Headers) -> str:
+ """
+ Check a handshake request received from the client.
+
+ This function doesn't verify that the request is an HTTP/1.1 or higher GET
+ request and doesn't perform ``Host`` and ``Origin`` checks. These controls
+ are usually performed earlier in the HTTP request handling code. They're
+ the responsibility of the caller.
+
+ :param headers: request headers
+ :returns: ``key`` which must be passed to :func:`build_response`
+ :raises ~websockets.exceptions.InvalidHandshake: if the handshake request
+ is invalid; then the server must return 400 Bad Request error
+
+ """
+ connection: List[ConnectionOption] = sum(
+ [parse_connection(value) for value in headers.get_all("Connection")], []
+ )
+
+ if not any(value.lower() == "upgrade" for value in connection):
+ raise InvalidUpgrade("Connection", ", ".join(connection))
+
+ upgrade: List[UpgradeProtocol] = sum(
+ [parse_upgrade(value) for value in headers.get_all("Upgrade")], []
+ )
+
+ # For compatibility with non-strict implementations, ignore case when
+ # checking the Upgrade header. It's supposed to be 'WebSocket'.
+ if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
+ raise InvalidUpgrade("Upgrade", ", ".join(upgrade))
+
+ try:
+ s_w_key = headers["Sec-WebSocket-Key"]
+ except KeyError:
+ raise InvalidHeader("Sec-WebSocket-Key")
+ except MultipleValuesError:
+ raise InvalidHeader(
+ "Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found"
+ )
+
+ try:
+ raw_key = base64.b64decode(s_w_key.encode(), validate=True)
+ except binascii.Error:
+ raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key)
+ if len(raw_key) != 16:
+ raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key)
+
+ try:
+ s_w_version = headers["Sec-WebSocket-Version"]
+ except KeyError:
+ raise InvalidHeader("Sec-WebSocket-Version")
+ except MultipleValuesError:
+ raise InvalidHeader(
+ "Sec-WebSocket-Version", "more than one Sec-WebSocket-Version header found"
+ )
+
+ if s_w_version != "13":
+ raise InvalidHeaderValue("Sec-WebSocket-Version", s_w_version)
+
+ return s_w_key
+
+
+def build_response(headers: Headers, key: str) -> None:
+ """
+ Build a handshake response to send to the client.
+
+ Update response headers passed in argument.
+
+ :param headers: response headers
+ :param key: comes from :func:`check_request`
+
+ """
+ headers["Upgrade"] = "websocket"
+ headers["Connection"] = "Upgrade"
+ headers["Sec-WebSocket-Accept"] = accept(key)
+
+
+def check_response(headers: Headers, key: str) -> None:
+ """
+ Check a handshake response received from the server.
+
+ This function doesn't verify that the response is an HTTP/1.1 or higher
+ response with a 101 status code. These controls are the responsibility of
+ the caller.
+
+ :param headers: response headers
+ :param key: comes from :func:`build_request`
+ :raises ~websockets.exceptions.InvalidHandshake: if the handshake response
+ is invalid
+
+ """
+ connection: List[ConnectionOption] = sum(
+ [parse_connection(value) for value in headers.get_all("Connection")], []
+ )
+
+ if not any(value.lower() == "upgrade" for value in connection):
+ raise InvalidUpgrade("Connection", " ".join(connection))
+
+ upgrade: List[UpgradeProtocol] = sum(
+ [parse_upgrade(value) for value in headers.get_all("Upgrade")], []
+ )
+
+ # For compatibility with non-strict implementations, ignore case when
+ # checking the Upgrade header. It's supposed to be 'WebSocket'.
+ if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
+ raise InvalidUpgrade("Upgrade", ", ".join(upgrade))
+
+ try:
+ s_w_accept = headers["Sec-WebSocket-Accept"]
+ except KeyError:
+ raise InvalidHeader("Sec-WebSocket-Accept")
+ except MultipleValuesError:
+ raise InvalidHeader(
+ "Sec-WebSocket-Accept", "more than one Sec-WebSocket-Accept header found"
+ )
+
+ if s_w_accept != accept(key):
+ raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept)
+
+
+def accept(key: str) -> str:
+ sha1 = hashlib.sha1((key + GUID).encode()).digest()
+ return base64.b64encode(sha1).decode()
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/headers.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/headers.py
new file mode 100644
index 0000000000..f33c94c046
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/headers.py
@@ -0,0 +1,515 @@
+"""
+:mod:`websockets.headers` provides parsers and serializers for HTTP headers
+used in WebSocket handshake messages.
+
+These APIs cannot be imported from :mod:`websockets`. They must be imported
+from :mod:`websockets.headers`.
+
+"""
+
+import base64
+import binascii
+import re
+from typing import Callable, List, NewType, Optional, Sequence, Tuple, TypeVar, cast
+
+from .exceptions import InvalidHeaderFormat, InvalidHeaderValue
+from .typing import ExtensionHeader, ExtensionName, ExtensionParameter, Subprotocol
+
+
+__all__ = [
+ "parse_connection",
+ "parse_upgrade",
+ "parse_extension",
+ "build_extension",
+ "parse_subprotocol",
+ "build_subprotocol",
+ "build_www_authenticate_basic",
+ "parse_authorization_basic",
+ "build_authorization_basic",
+]
+
+
+T = TypeVar("T")
+
+ConnectionOption = NewType("ConnectionOption", str)
+UpgradeProtocol = NewType("UpgradeProtocol", str)
+
+
+# To avoid a dependency on a parsing library, we implement manually the ABNF
+# described in https://tools.ietf.org/html/rfc6455#section-9.1 with the
+# definitions from https://tools.ietf.org/html/rfc7230#appendix-B.
+
+
+def peek_ahead(header: str, pos: int) -> Optional[str]:
+ """
+ Return the next character from ``header`` at the given position.
+
+ Return ``None`` at the end of ``header``.
+
+ We never need to peek more than one character ahead.
+
+ """
+ return None if pos == len(header) else header[pos]
+
+
+_OWS_re = re.compile(r"[\t ]*")
+
+
+def parse_OWS(header: str, pos: int) -> int:
+ """
+ Parse optional whitespace from ``header`` at the given position.
+
+ Return the new position.
+
+ The whitespace itself isn't returned because it isn't significant.
+
+ """
+ # There's always a match, possibly empty, whose content doesn't matter.
+ match = _OWS_re.match(header, pos)
+ assert match is not None
+ return match.end()
+
+
+_token_re = re.compile(r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+")
+
+
+def parse_token(header: str, pos: int, header_name: str) -> Tuple[str, int]:
+ """
+ Parse a token from ``header`` at the given position.
+
+ Return the token value and the new position.
+
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ match = _token_re.match(header, pos)
+ if match is None:
+ raise InvalidHeaderFormat(header_name, "expected token", header, pos)
+ return match.group(), match.end()
+
+
+_quoted_string_re = re.compile(
+ r'"(?:[\x09\x20-\x21\x23-\x5b\x5d-\x7e]|\\[\x09\x20-\x7e\x80-\xff])*"'
+)
+
+
+_unquote_re = re.compile(r"\\([\x09\x20-\x7e\x80-\xff])")
+
+
+def parse_quoted_string(header: str, pos: int, header_name: str) -> Tuple[str, int]:
+ """
+ Parse a quoted string from ``header`` at the given position.
+
+ Return the unquoted value and the new position.
+
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ match = _quoted_string_re.match(header, pos)
+ if match is None:
+ raise InvalidHeaderFormat(header_name, "expected quoted string", header, pos)
+ return _unquote_re.sub(r"\1", match.group()[1:-1]), match.end()
+
+
+_quotable_re = re.compile(r"[\x09\x20-\x7e\x80-\xff]*")
+
+
+_quote_re = re.compile(r"([\x22\x5c])")
+
+
+def build_quoted_string(value: str) -> str:
+ """
+ Format ``value`` as a quoted string.
+
+ This is the reverse of :func:`parse_quoted_string`.
+
+ """
+ match = _quotable_re.fullmatch(value)
+ if match is None:
+ raise ValueError("invalid characters for quoted-string encoding")
+ return '"' + _quote_re.sub(r"\\\1", value) + '"'
+
+
+def parse_list(
+ parse_item: Callable[[str, int, str], Tuple[T, int]],
+ header: str,
+ pos: int,
+ header_name: str,
+) -> List[T]:
+ """
+ Parse a comma-separated list from ``header`` at the given position.
+
+ This is appropriate for parsing values with the following grammar:
+
+ 1#item
+
+ ``parse_item`` parses one item.
+
+ ``header`` is assumed not to start or end with whitespace.
+
+ (This function is designed for parsing an entire header value and
+ :func:`~websockets.http.read_headers` strips whitespace from values.)
+
+ Return a list of items.
+
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ # Per https://tools.ietf.org/html/rfc7230#section-7, "a recipient MUST
+ # parse and ignore a reasonable number of empty list elements"; hence
+ # while loops that remove extra delimiters.
+
+ # Remove extra delimiters before the first item.
+ while peek_ahead(header, pos) == ",":
+ pos = parse_OWS(header, pos + 1)
+
+ items = []
+ while True:
+ # Loop invariant: a item starts at pos in header.
+ item, pos = parse_item(header, pos, header_name)
+ items.append(item)
+ pos = parse_OWS(header, pos)
+
+ # We may have reached the end of the header.
+ if pos == len(header):
+ break
+
+ # There must be a delimiter after each element except the last one.
+ if peek_ahead(header, pos) == ",":
+ pos = parse_OWS(header, pos + 1)
+ else:
+ raise InvalidHeaderFormat(header_name, "expected comma", header, pos)
+
+ # Remove extra delimiters before the next item.
+ while peek_ahead(header, pos) == ",":
+ pos = parse_OWS(header, pos + 1)
+
+ # We may have reached the end of the header.
+ if pos == len(header):
+ break
+
+ # Since we only advance in the header by one character with peek_ahead()
+ # or with the end position of a regex match, we can't overshoot the end.
+ assert pos == len(header)
+
+ return items
+
+
+def parse_connection_option(
+ header: str, pos: int, header_name: str
+) -> Tuple[ConnectionOption, int]:
+ """
+ Parse a Connection option from ``header`` at the given position.
+
+ Return the protocol value and the new position.
+
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ item, pos = parse_token(header, pos, header_name)
+ return cast(ConnectionOption, item), pos
+
+
+def parse_connection(header: str) -> List[ConnectionOption]:
+ """
+ Parse a ``Connection`` header.
+
+ Return a list of HTTP connection options.
+
+ :param header: value of the ``Connection`` header
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ return parse_list(parse_connection_option, header, 0, "Connection")
+
+
+_protocol_re = re.compile(
+ r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+(?:/[-!#$%&\'*+.^_`|~0-9a-zA-Z]+)?"
+)
+
+
+def parse_upgrade_protocol(
+ header: str, pos: int, header_name: str
+) -> Tuple[UpgradeProtocol, int]:
+ """
+ Parse an Upgrade protocol from ``header`` at the given position.
+
+ Return the protocol value and the new position.
+
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ match = _protocol_re.match(header, pos)
+ if match is None:
+ raise InvalidHeaderFormat(header_name, "expected protocol", header, pos)
+ return cast(UpgradeProtocol, match.group()), match.end()
+
+
+def parse_upgrade(header: str) -> List[UpgradeProtocol]:
+ """
+ Parse an ``Upgrade`` header.
+
+ Return a list of HTTP protocols.
+
+ :param header: value of the ``Upgrade`` header
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ return parse_list(parse_upgrade_protocol, header, 0, "Upgrade")
+
+
+def parse_extension_item_param(
+ header: str, pos: int, header_name: str
+) -> Tuple[ExtensionParameter, int]:
+ """
+ Parse a single extension parameter from ``header`` at the given position.
+
+ Return a ``(name, value)`` pair and the new position.
+
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ # Extract parameter name.
+ name, pos = parse_token(header, pos, header_name)
+ pos = parse_OWS(header, pos)
+ # Extract parameter value, if there is one.
+ value: Optional[str] = None
+ if peek_ahead(header, pos) == "=":
+ pos = parse_OWS(header, pos + 1)
+ if peek_ahead(header, pos) == '"':
+ pos_before = pos # for proper error reporting below
+ value, pos = parse_quoted_string(header, pos, header_name)
+ # https://tools.ietf.org/html/rfc6455#section-9.1 says: the value
+ # after quoted-string unescaping MUST conform to the 'token' ABNF.
+ if _token_re.fullmatch(value) is None:
+ raise InvalidHeaderFormat(
+ header_name, "invalid quoted header content", header, pos_before
+ )
+ else:
+ value, pos = parse_token(header, pos, header_name)
+ pos = parse_OWS(header, pos)
+
+ return (name, value), pos
+
+
+def parse_extension_item(
+ header: str, pos: int, header_name: str
+) -> Tuple[ExtensionHeader, int]:
+ """
+ Parse an extension definition from ``header`` at the given position.
+
+ Return an ``(extension name, parameters)`` pair, where ``parameters`` is a
+ list of ``(name, value)`` pairs, and the new position.
+
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ # Extract extension name.
+ name, pos = parse_token(header, pos, header_name)
+ pos = parse_OWS(header, pos)
+ # Extract all parameters.
+ parameters = []
+ while peek_ahead(header, pos) == ";":
+ pos = parse_OWS(header, pos + 1)
+ parameter, pos = parse_extension_item_param(header, pos, header_name)
+ parameters.append(parameter)
+ return (cast(ExtensionName, name), parameters), pos
+
+
+def parse_extension(header: str) -> List[ExtensionHeader]:
+ """
+ Parse a ``Sec-WebSocket-Extensions`` header.
+
+ Return a list of WebSocket extensions and their parameters in this format::
+
+ [
+ (
+ 'extension name',
+ [
+ ('parameter name', 'parameter value'),
+ ....
+ ]
+ ),
+ ...
+ ]
+
+ Parameter values are ``None`` when no value is provided.
+
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ return parse_list(parse_extension_item, header, 0, "Sec-WebSocket-Extensions")
+
+
+parse_extension_list = parse_extension # alias for backwards compatibility
+
+
+def build_extension_item(
+ name: ExtensionName, parameters: List[ExtensionParameter]
+) -> str:
+ """
+ Build an extension definition.
+
+ This is the reverse of :func:`parse_extension_item`.
+
+ """
+ return "; ".join(
+ [cast(str, name)]
+ + [
+ # Quoted strings aren't necessary because values are always tokens.
+ name if value is None else f"{name}={value}"
+ for name, value in parameters
+ ]
+ )
+
+
+def build_extension(extensions: Sequence[ExtensionHeader]) -> str:
+ """
+ Build a ``Sec-WebSocket-Extensions`` header.
+
+ This is the reverse of :func:`parse_extension`.
+
+ """
+ return ", ".join(
+ build_extension_item(name, parameters) for name, parameters in extensions
+ )
+
+
+build_extension_list = build_extension # alias for backwards compatibility
+
+
+def parse_subprotocol_item(
+ header: str, pos: int, header_name: str
+) -> Tuple[Subprotocol, int]:
+ """
+ Parse a subprotocol from ``header`` at the given position.
+
+ Return the subprotocol value and the new position.
+
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ item, pos = parse_token(header, pos, header_name)
+ return cast(Subprotocol, item), pos
+
+
+def parse_subprotocol(header: str) -> List[Subprotocol]:
+ """
+ Parse a ``Sec-WebSocket-Protocol`` header.
+
+ Return a list of WebSocket subprotocols.
+
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ return parse_list(parse_subprotocol_item, header, 0, "Sec-WebSocket-Protocol")
+
+
+parse_subprotocol_list = parse_subprotocol # alias for backwards compatibility
+
+
+def build_subprotocol(protocols: Sequence[Subprotocol]) -> str:
+ """
+ Build a ``Sec-WebSocket-Protocol`` header.
+
+ This is the reverse of :func:`parse_subprotocol`.
+
+ """
+ return ", ".join(protocols)
+
+
+build_subprotocol_list = build_subprotocol # alias for backwards compatibility
+
+
+def build_www_authenticate_basic(realm: str) -> str:
+ """
+ Build a ``WWW-Authenticate`` header for HTTP Basic Auth.
+
+ :param realm: authentication realm
+
+ """
+ # https://tools.ietf.org/html/rfc7617#section-2
+ realm = build_quoted_string(realm)
+ charset = build_quoted_string("UTF-8")
+ return f"Basic realm={realm}, charset={charset}"
+
+
+_token68_re = re.compile(r"[A-Za-z0-9-._~+/]+=*")
+
+
+def parse_token68(header: str, pos: int, header_name: str) -> Tuple[str, int]:
+ """
+ Parse a token68 from ``header`` at the given position.
+
+ Return the token value and the new position.
+
+ :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs.
+
+ """
+ match = _token68_re.match(header, pos)
+ if match is None:
+ raise InvalidHeaderFormat(header_name, "expected token68", header, pos)
+ return match.group(), match.end()
+
+
+def parse_end(header: str, pos: int, header_name: str) -> None:
+ """
+ Check that parsing reached the end of header.
+
+ """
+ if pos < len(header):
+ raise InvalidHeaderFormat(header_name, "trailing data", header, pos)
+
+
+def parse_authorization_basic(header: str) -> Tuple[str, str]:
+ """
+ Parse an ``Authorization`` header for HTTP Basic Auth.
+
+ Return a ``(username, password)`` tuple.
+
+ :param header: value of the ``Authorization`` header
+ :raises InvalidHeaderFormat: on invalid inputs
+ :raises InvalidHeaderValue: on unsupported inputs
+
+ """
+ # https://tools.ietf.org/html/rfc7235#section-2.1
+ # https://tools.ietf.org/html/rfc7617#section-2
+ scheme, pos = parse_token(header, 0, "Authorization")
+ if scheme.lower() != "basic":
+ raise InvalidHeaderValue("Authorization", f"unsupported scheme: {scheme}")
+ if peek_ahead(header, pos) != " ":
+ raise InvalidHeaderFormat(
+ "Authorization", "expected space after scheme", header, pos
+ )
+ pos += 1
+ basic_credentials, pos = parse_token68(header, pos, "Authorization")
+ parse_end(header, pos, "Authorization")
+
+ try:
+ user_pass = base64.b64decode(basic_credentials.encode()).decode()
+ except binascii.Error:
+ raise InvalidHeaderValue(
+ "Authorization", "expected base64-encoded credentials"
+ ) from None
+ try:
+ username, password = user_pass.split(":", 1)
+ except ValueError:
+ raise InvalidHeaderValue(
+ "Authorization", "expected username:password credentials"
+ ) from None
+
+ return username, password
+
+
+def build_authorization_basic(username: str, password: str) -> str:
+ """
+ Build an ``Authorization`` header for HTTP Basic Auth.
+
+ This is the reverse of :func:`parse_authorization_basic`.
+
+ """
+ # https://tools.ietf.org/html/rfc7617#section-2
+ assert ":" not in username
+ user_pass = f"{username}:{password}"
+ basic_credentials = base64.b64encode(user_pass.encode()).decode()
+ return "Basic " + basic_credentials
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/http.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/http.py
new file mode 100644
index 0000000000..ba6d274bf1
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/http.py
@@ -0,0 +1,360 @@
+"""
+:mod:`websockets.http` module provides basic HTTP/1.1 support. It is merely
+:adequate for WebSocket handshake messages.
+
+These APIs cannot be imported from :mod:`websockets`. They must be imported
+from :mod:`websockets.http`.
+
+"""
+
+import asyncio
+import re
+import sys
+from typing import (
+ Any,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ MutableMapping,
+ Tuple,
+ Union,
+)
+
+from .version import version as websockets_version
+
+
+__all__ = [
+ "read_request",
+ "read_response",
+ "Headers",
+ "MultipleValuesError",
+ "USER_AGENT",
+]
+
+MAX_HEADERS = 256
+MAX_LINE = 4096
+
+USER_AGENT = f"Python/{sys.version[:3]} websockets/{websockets_version}"
+
+
+def d(value: bytes) -> str:
+ """
+ Decode a bytestring for interpolating into an error message.
+
+ """
+ return value.decode(errors="backslashreplace")
+
+
+# See https://tools.ietf.org/html/rfc7230#appendix-B.
+
+# Regex for validating header names.
+
+_token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+")
+
+# Regex for validating header values.
+
+# We don't attempt to support obsolete line folding.
+
+# Include HTAB (\x09), SP (\x20), VCHAR (\x21-\x7e), obs-text (\x80-\xff).
+
+# The ABNF is complicated because it attempts to express that optional
+# whitespace is ignored. We strip whitespace and don't revalidate that.
+
+# See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189
+
+_value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*")
+
+
+async def read_request(stream: asyncio.StreamReader) -> Tuple[str, "Headers"]:
+ """
+ Read an HTTP/1.1 GET request and return ``(path, headers)``.
+
+ ``path`` isn't URL-decoded or validated in any way.
+
+ ``path`` and ``headers`` are expected to contain only ASCII characters.
+ Other characters are represented with surrogate escapes.
+
+ :func:`read_request` doesn't attempt to read the request body because
+ WebSocket handshake requests don't have one. If the request contains a
+ body, it may be read from ``stream`` after this coroutine returns.
+
+ :param stream: input to read the request from
+ :raises EOFError: if the connection is closed without a full HTTP request
+ :raises SecurityError: if the request exceeds a security limit
+ :raises ValueError: if the request isn't well formatted
+
+ """
+ # https://tools.ietf.org/html/rfc7230#section-3.1.1
+
+ # Parsing is simple because fixed values are expected for method and
+ # version and because path isn't checked. Since WebSocket software tends
+ # to implement HTTP/1.1 strictly, there's little need for lenient parsing.
+
+ try:
+ request_line = await read_line(stream)
+ except EOFError as exc:
+ raise EOFError("connection closed while reading HTTP request line") from exc
+
+ try:
+ method, raw_path, version = request_line.split(b" ", 2)
+ except ValueError: # not enough values to unpack (expected 3, got 1-2)
+ raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None
+
+ if method != b"GET":
+ raise ValueError(f"unsupported HTTP method: {d(method)}")
+ if version != b"HTTP/1.1":
+ raise ValueError(f"unsupported HTTP version: {d(version)}")
+ path = raw_path.decode("ascii", "surrogateescape")
+
+ headers = await read_headers(stream)
+
+ return path, headers
+
+
+async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, "Headers"]:
+ """
+ Read an HTTP/1.1 response and return ``(status_code, reason, headers)``.
+
+ ``reason`` and ``headers`` are expected to contain only ASCII characters.
+ Other characters are represented with surrogate escapes.
+
+ :func:`read_request` doesn't attempt to read the response body because
+ WebSocket handshake responses don't have one. If the response contains a
+ body, it may be read from ``stream`` after this coroutine returns.
+
+ :param stream: input to read the response from
+ :raises EOFError: if the connection is closed without a full HTTP response
+ :raises SecurityError: if the response exceeds a security limit
+ :raises ValueError: if the response isn't well formatted
+
+ """
+ # https://tools.ietf.org/html/rfc7230#section-3.1.2
+
+ # As in read_request, parsing is simple because a fixed value is expected
+ # for version, status_code is a 3-digit number, and reason can be ignored.
+
+ try:
+ status_line = await read_line(stream)
+ except EOFError as exc:
+ raise EOFError("connection closed while reading HTTP status line") from exc
+
+ try:
+ version, raw_status_code, raw_reason = status_line.split(b" ", 2)
+ except ValueError: # not enough values to unpack (expected 3, got 1-2)
+ raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None
+
+ if version != b"HTTP/1.1":
+ raise ValueError(f"unsupported HTTP version: {d(version)}")
+ try:
+ status_code = int(raw_status_code)
+ except ValueError: # invalid literal for int() with base 10
+ raise ValueError(f"invalid HTTP status code: {d(raw_status_code)}") from None
+ if not 100 <= status_code < 1000:
+ raise ValueError(f"unsupported HTTP status code: {d(raw_status_code)}")
+ if not _value_re.fullmatch(raw_reason):
+ raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}")
+ reason = raw_reason.decode()
+
+ headers = await read_headers(stream)
+
+ return status_code, reason, headers
+
+
+async def read_headers(stream: asyncio.StreamReader) -> "Headers":
+ """
+ Read HTTP headers from ``stream``.
+
+ Non-ASCII characters are represented with surrogate escapes.
+
+ """
+ # https://tools.ietf.org/html/rfc7230#section-3.2
+
+ # We don't attempt to support obsolete line folding.
+
+ headers = Headers()
+ for _ in range(MAX_HEADERS + 1):
+ try:
+ line = await read_line(stream)
+ except EOFError as exc:
+ raise EOFError("connection closed while reading HTTP headers") from exc
+ if line == b"":
+ break
+
+ try:
+ raw_name, raw_value = line.split(b":", 1)
+ except ValueError: # not enough values to unpack (expected 2, got 1)
+ raise ValueError(f"invalid HTTP header line: {d(line)}") from None
+ if not _token_re.fullmatch(raw_name):
+ raise ValueError(f"invalid HTTP header name: {d(raw_name)}")
+ raw_value = raw_value.strip(b" \t")
+ if not _value_re.fullmatch(raw_value):
+ raise ValueError(f"invalid HTTP header value: {d(raw_value)}")
+
+ name = raw_name.decode("ascii") # guaranteed to be ASCII at this point
+ value = raw_value.decode("ascii", "surrogateescape")
+ headers[name] = value
+
+ else:
+ raise websockets.exceptions.SecurityError("too many HTTP headers")
+
+ return headers
+
+
+async def read_line(stream: asyncio.StreamReader) -> bytes:
+ """
+ Read a single line from ``stream``.
+
+ CRLF is stripped from the return value.
+
+ """
+ # Security: this is bounded by the StreamReader's limit (default = 32 KiB).
+ line = await stream.readline()
+ # Security: this guarantees header values are small (hard-coded = 4 KiB)
+ if len(line) > MAX_LINE:
+ raise websockets.exceptions.SecurityError("line too long")
+ # Not mandatory but safe - https://tools.ietf.org/html/rfc7230#section-3.5
+ if not line.endswith(b"\r\n"):
+ raise EOFError("line without CRLF")
+ return line[:-2]
+
+
+class MultipleValuesError(LookupError):
+ """
+ Exception raised when :class:`Headers` has more than one value for a key.
+
+ """
+
+ def __str__(self) -> str:
+ # Implement the same logic as KeyError_str in Objects/exceptions.c.
+ if len(self.args) == 1:
+ return repr(self.args[0])
+ return super().__str__()
+
+
+class Headers(MutableMapping[str, str]):
+ """
+ Efficient data structure for manipulating HTTP headers.
+
+ A :class:`list` of ``(name, values)`` is inefficient for lookups.
+
+ A :class:`dict` doesn't suffice because header names are case-insensitive
+ and multiple occurrences of headers with the same name are possible.
+
+ :class:`Headers` stores HTTP headers in a hybrid data structure to provide
+ efficient insertions and lookups while preserving the original data.
+
+ In order to account for multiple values with minimal hassle,
+ :class:`Headers` follows this logic:
+
+ - When getting a header with ``headers[name]``:
+ - if there's no value, :exc:`KeyError` is raised;
+ - if there's exactly one value, it's returned;
+ - if there's more than one value, :exc:`MultipleValuesError` is raised.
+
+ - When setting a header with ``headers[name] = value``, the value is
+ appended to the list of values for that header.
+
+ - When deleting a header with ``del headers[name]``, all values for that
+ header are removed (this is slow).
+
+ Other methods for manipulating headers are consistent with this logic.
+
+ As long as no header occurs multiple times, :class:`Headers` behaves like
+ :class:`dict`, except keys are lower-cased to provide case-insensitivity.
+
+ Two methods support support manipulating multiple values explicitly:
+
+ - :meth:`get_all` returns a list of all values for a header;
+ - :meth:`raw_items` returns an iterator of ``(name, values)`` pairs.
+
+ """
+
+ __slots__ = ["_dict", "_list"]
+
+ def __init__(self, *args: Any, **kwargs: str) -> None:
+ self._dict: Dict[str, List[str]] = {}
+ self._list: List[Tuple[str, str]] = []
+ # MutableMapping.update calls __setitem__ for each (name, value) pair.
+ self.update(*args, **kwargs)
+
+ def __str__(self) -> str:
+ return "".join(f"{key}: {value}\r\n" for key, value in self._list) + "\r\n"
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}({self._list!r})"
+
+ def copy(self) -> "Headers":
+ copy = self.__class__()
+ copy._dict = self._dict.copy()
+ copy._list = self._list.copy()
+ return copy
+
+ # Collection methods
+
+ def __contains__(self, key: object) -> bool:
+ return isinstance(key, str) and key.lower() in self._dict
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._dict)
+
+ def __len__(self) -> int:
+ return len(self._dict)
+
+ # MutableMapping methods
+
+ def __getitem__(self, key: str) -> str:
+ value = self._dict[key.lower()]
+ if len(value) == 1:
+ return value[0]
+ else:
+ raise MultipleValuesError(key)
+
+ def __setitem__(self, key: str, value: str) -> None:
+ self._dict.setdefault(key.lower(), []).append(value)
+ self._list.append((key, value))
+
+ def __delitem__(self, key: str) -> None:
+ key_lower = key.lower()
+ self._dict.__delitem__(key_lower)
+ # This is inefficent. Fortunately deleting HTTP headers is uncommon.
+ self._list = [(k, v) for k, v in self._list if k.lower() != key_lower]
+
+ def __eq__(self, other: Any) -> bool:
+ if not isinstance(other, Headers):
+ return NotImplemented
+ return self._list == other._list
+
+ def clear(self) -> None:
+ """
+ Remove all headers.
+
+ """
+ self._dict = {}
+ self._list = []
+
+ # Methods for handling multiple values
+
+ def get_all(self, key: str) -> List[str]:
+ """
+ Return the (possibly empty) list of all values for a header.
+
+ :param key: header name
+
+ """
+ return self._dict.get(key.lower(), [])
+
+ def raw_items(self) -> Iterator[Tuple[str, str]]:
+ """
+ Return an iterator of all values as ``(name, value)`` pairs.
+
+ """
+ return iter(self._list)
+
+
+HeadersLike = Union[Headers, Mapping[str, str], Iterable[Tuple[str, str]]]
+
+
+# at the bottom to allow circular import, because AbortHandshake depends on HeadersLike
+import websockets.exceptions # isort:skip # noqa
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/protocol.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/protocol.py
new file mode 100644
index 0000000000..ede636d0db
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/protocol.py
@@ -0,0 +1,1429 @@
+"""
+:mod:`websockets.protocol` handles WebSocket control and data frames.
+
+See `sections 4 to 8 of RFC 6455`_.
+
+.. _sections 4 to 8 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-4
+
+"""
+
+import asyncio
+import codecs
+import collections
+import enum
+import logging
+import random
+import struct
+import sys
+import warnings
+from typing import (
+ Any,
+ AsyncIterable,
+ AsyncIterator,
+ Awaitable,
+ Deque,
+ Dict,
+ Iterable,
+ List,
+ Optional,
+ Union,
+ cast,
+)
+
+from .exceptions import (
+ ConnectionClosed,
+ ConnectionClosedError,
+ ConnectionClosedOK,
+ InvalidState,
+ PayloadTooBig,
+ ProtocolError,
+)
+from .extensions.base import Extension
+from .framing import *
+from .handshake import *
+from .http import Headers
+from .typing import Data
+
+
+__all__ = ["WebSocketCommonProtocol"]
+
+logger = logging.getLogger(__name__)
+
+
+# A WebSocket connection goes through the following four states, in order:
+
+
+class State(enum.IntEnum):
+ CONNECTING, OPEN, CLOSING, CLOSED = range(4)
+
+
+# In order to ensure consistency, the code always checks the current value of
+# WebSocketCommonProtocol.state before assigning a new value and never yields
+# between the check and the assignment.
+
+
+class WebSocketCommonProtocol(asyncio.Protocol):
+ """
+ :class:`~asyncio.Protocol` subclass implementing the data transfer phase.
+
+ Once the WebSocket connection is established, during the data transfer
+ phase, the protocol is almost symmetrical between the server side and the
+ client side. :class:`WebSocketCommonProtocol` implements logic that's
+ shared between servers and clients..
+
+ Subclasses such as :class:`~websockets.server.WebSocketServerProtocol` and
+ :class:`~websockets.client.WebSocketClientProtocol` implement the opening
+ handshake, which is different between servers and clients.
+
+ :class:`WebSocketCommonProtocol` performs four functions:
+
+ * It runs a task that stores incoming data frames in a queue and makes
+ them available with the :meth:`recv` coroutine.
+ * It sends outgoing data frames with the :meth:`send` coroutine.
+ * It deals with control frames automatically.
+ * It performs the closing handshake.
+
+ :class:`WebSocketCommonProtocol` supports asynchronous iteration::
+
+ async for message in websocket:
+ await process(message)
+
+ The iterator yields incoming messages. It exits normally when the
+ connection is closed with the close code 1000 (OK) or 1001 (going away).
+ It raises a :exc:`~websockets.exceptions.ConnectionClosedError` exception
+ when the connection is closed with any other code.
+
+ Once the connection is open, a `Ping frame`_ is sent every
+ ``ping_interval`` seconds. This serves as a keepalive. It helps keeping
+ the connection open, especially in the presence of proxies with short
+ timeouts on inactive connections. Set ``ping_interval`` to ``None`` to
+ disable this behavior.
+
+ .. _Ping frame: https://tools.ietf.org/html/rfc6455#section-5.5.2
+
+ If the corresponding `Pong frame`_ isn't received within ``ping_timeout``
+ seconds, the connection is considered unusable and is closed with
+ code 1011. This ensures that the remote endpoint remains responsive. Set
+ ``ping_timeout`` to ``None`` to disable this behavior.
+
+ .. _Pong frame: https://tools.ietf.org/html/rfc6455#section-5.5.3
+
+ The ``close_timeout`` parameter defines a maximum wait time in seconds for
+ completing the closing handshake and terminating the TCP connection.
+ :meth:`close` completes in at most ``4 * close_timeout`` on the server
+ side and ``5 * close_timeout`` on the client side.
+
+ ``close_timeout`` needs to be a parameter of the protocol because
+ ``websockets`` usually calls :meth:`close` implicitly:
+
+ - on the server side, when the connection handler terminates,
+ - on the client side, when exiting the context manager for the connection.
+
+ To apply a timeout to any other API, wrap it in :func:`~asyncio.wait_for`.
+
+ The ``max_size`` parameter enforces the maximum size for incoming messages
+ in bytes. The default value is 1 MiB. ``None`` disables the limit. If a
+ message larger than the maximum size is received, :meth:`recv` will
+ raise :exc:`~websockets.exceptions.ConnectionClosedError` and the
+ connection will be closed with code 1009.
+
+ The ``max_queue`` parameter sets the maximum length of the queue that
+ holds incoming messages. The default value is ``32``. ``None`` disables
+ the limit. Messages are added to an in-memory queue when they're received;
+ then :meth:`recv` pops from that queue. In order to prevent excessive
+ memory consumption when messages are received faster than they can be
+ processed, the queue must be bounded. If the queue fills up, the protocol
+ stops processing incoming data until :meth:`recv` is called. In this
+ situation, various receive buffers (at least in ``asyncio`` and in the OS)
+ will fill up, then the TCP receive window will shrink, slowing down
+ transmission to avoid packet loss.
+
+ Since Python can use up to 4 bytes of memory to represent a single
+ character, each connection may use up to ``4 * max_size * max_queue``
+ bytes of memory to store incoming messages. By default, this is 128 MiB.
+ You may want to lower the limits, depending on your application's
+ requirements.
+
+ The ``read_limit`` argument sets the high-water limit of the buffer for
+ incoming bytes. The low-water limit is half the high-water limit. The
+ default value is 64 KiB, half of asyncio's default (based on the current
+ implementation of :class:`~asyncio.StreamReader`).
+
+ The ``write_limit`` argument sets the high-water limit of the buffer for
+ outgoing bytes. The low-water limit is a quarter of the high-water limit.
+ The default value is 64 KiB, equal to asyncio's default (based on the
+ current implementation of ``FlowControlMixin``).
+
+ As soon as the HTTP request and response in the opening handshake are
+ processed:
+
+ * the request path is available in the :attr:`path` attribute;
+ * the request and response HTTP headers are available in the
+ :attr:`request_headers` and :attr:`response_headers` attributes,
+ which are :class:`~websockets.http.Headers` instances.
+
+ If a subprotocol was negotiated, it's available in the :attr:`subprotocol`
+ attribute.
+
+ Once the connection is closed, the code is available in the
+ :attr:`close_code` attribute and the reason in :attr:`close_reason`.
+
+ All these attributes must be treated as read-only.
+
+ """
+
+ # There are only two differences between the client-side and server-side
+ # behavior: masking the payload and closing the underlying TCP connection.
+ # Set is_client = True/False and side = "client"/"server" to pick a side.
+ is_client: bool
+ side: str = "undefined"
+
+ def __init__(
+ self,
+ *,
+ ping_interval: Optional[float] = 20,
+ ping_timeout: Optional[float] = 20,
+ close_timeout: Optional[float] = None,
+ max_size: Optional[int] = 2 ** 20,
+ max_queue: Optional[int] = 2 ** 5,
+ read_limit: int = 2 ** 16,
+ write_limit: int = 2 ** 16,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ # The following arguments are kept only for backwards compatibility.
+ host: Optional[str] = None,
+ port: Optional[int] = None,
+ secure: Optional[bool] = None,
+ legacy_recv: bool = False,
+ timeout: Optional[float] = None,
+ ) -> None:
+ # Backwards compatibility: close_timeout used to be called timeout.
+ if timeout is None:
+ timeout = 10
+ else:
+ warnings.warn("rename timeout to close_timeout", DeprecationWarning)
+ # If both are specified, timeout is ignored.
+ if close_timeout is None:
+ close_timeout = timeout
+
+ self.ping_interval = ping_interval
+ self.ping_timeout = ping_timeout
+ self.close_timeout = close_timeout
+ self.max_size = max_size
+ self.max_queue = max_queue
+ self.read_limit = read_limit
+ self.write_limit = write_limit
+
+ if loop is None:
+ loop = asyncio.get_event_loop()
+ self.loop = loop
+
+ self._host = host
+ self._port = port
+ self._secure = secure
+ self.legacy_recv = legacy_recv
+
+ # Configure read buffer limits. The high-water limit is defined by
+ # ``self.read_limit``. The ``limit`` argument controls the line length
+ # limit and half the buffer limit of :class:`~asyncio.StreamReader`.
+ # That's why it must be set to half of ``self.read_limit``.
+ self.reader = asyncio.StreamReader(limit=read_limit // 2, loop=loop)
+
+ # Copied from asyncio.FlowControlMixin
+ self._paused = False
+ self._drain_waiter: Optional[asyncio.Future[None]] = None
+
+ self._drain_lock = asyncio.Lock(
+ **({"loop": loop} if sys.version_info[:2] < (3, 8) else {})
+ )
+
+ # This class implements the data transfer and closing handshake, which
+ # are shared between the client-side and the server-side.
+ # Subclasses implement the opening handshake and, on success, execute
+ # :meth:`connection_open` to change the state to OPEN.
+ self.state = State.CONNECTING
+ logger.debug("%s - state = CONNECTING", self.side)
+
+ # HTTP protocol parameters.
+ self.path: str
+ self.request_headers: Headers
+ self.response_headers: Headers
+
+ # WebSocket protocol parameters.
+ self.extensions: List[Extension] = []
+ self.subprotocol: Optional[str] = None
+
+ # The close code and reason are set when receiving a close frame or
+ # losing the TCP connection.
+ self.close_code: int
+ self.close_reason: str
+
+ # Completed when the connection state becomes CLOSED. Translates the
+ # :meth:`connection_lost` callback to a :class:`~asyncio.Future`
+ # that can be awaited. (Other :class:`~asyncio.Protocol` callbacks are
+ # translated by ``self.stream_reader``).
+ self.connection_lost_waiter: asyncio.Future[None] = loop.create_future()
+
+ # Queue of received messages.
+ self.messages: Deque[Data] = collections.deque()
+ self._pop_message_waiter: Optional[asyncio.Future[None]] = None
+ self._put_message_waiter: Optional[asyncio.Future[None]] = None
+
+ # Protect sending fragmented messages.
+ self._fragmented_message_waiter: Optional[asyncio.Future[None]] = None
+
+ # Mapping of ping IDs to waiters, in chronological order.
+ self.pings: Dict[bytes, asyncio.Future[None]] = {}
+
+ # Task running the data transfer.
+ self.transfer_data_task: asyncio.Task[None]
+
+ # Exception that occurred during data transfer, if any.
+ self.transfer_data_exc: Optional[BaseException] = None
+
+ # Task sending keepalive pings.
+ self.keepalive_ping_task: asyncio.Task[None]
+
+ # Task closing the TCP connection.
+ self.close_connection_task: asyncio.Task[None]
+
+ # Copied from asyncio.FlowControlMixin
+ async def _drain_helper(self) -> None: # pragma: no cover
+ if self.connection_lost_waiter.done():
+ raise ConnectionResetError("Connection lost")
+ if not self._paused:
+ return
+ waiter = self._drain_waiter
+ assert waiter is None or waiter.cancelled()
+ waiter = self.loop.create_future()
+ self._drain_waiter = waiter
+ await waiter
+
+ # Copied from asyncio.StreamWriter
+ async def _drain(self) -> None: # pragma: no cover
+ if self.reader is not None:
+ exc = self.reader.exception()
+ if exc is not None:
+ raise exc
+ if self.transport is not None:
+ if self.transport.is_closing():
+ # Yield to the event loop so connection_lost() may be
+ # called. Without this, _drain_helper() would return
+ # immediately, and code that calls
+ # write(...); yield from drain()
+ # in a loop would never call connection_lost(), so it
+ # would not see an error when the socket is closed.
+ await asyncio.sleep(
+ 0, **({"loop": self.loop} if sys.version_info[:2] < (3, 8) else {})
+ )
+ await self._drain_helper()
+
+ def connection_open(self) -> None:
+ """
+ Callback when the WebSocket opening handshake completes.
+
+ Enter the OPEN state and start the data transfer phase.
+
+ """
+ # 4.1. The WebSocket Connection is Established.
+ assert self.state is State.CONNECTING
+ self.state = State.OPEN
+ logger.debug("%s - state = OPEN", self.side)
+ # Start the task that receives incoming WebSocket messages.
+ self.transfer_data_task = self.loop.create_task(self.transfer_data())
+ # Start the task that sends pings at regular intervals.
+ self.keepalive_ping_task = self.loop.create_task(self.keepalive_ping())
+ # Start the task that eventually closes the TCP connection.
+ self.close_connection_task = self.loop.create_task(self.close_connection())
+
+ @property
+ def host(self) -> Optional[str]:
+ alternative = "remote_address" if self.is_client else "local_address"
+ warnings.warn(f"use {alternative}[0] instead of host", DeprecationWarning)
+ return self._host
+
+ @property
+ def port(self) -> Optional[int]:
+ alternative = "remote_address" if self.is_client else "local_address"
+ warnings.warn(f"use {alternative}[1] instead of port", DeprecationWarning)
+ return self._port
+
+ @property
+ def secure(self) -> Optional[bool]:
+ warnings.warn(f"don't use secure", DeprecationWarning)
+ return self._secure
+
+ # Public API
+
+ @property
+ def local_address(self) -> Any:
+ """
+ Local address of the connection.
+
+ This is a ``(host, port)`` tuple or ``None`` if the connection hasn't
+ been established yet.
+
+ """
+ try:
+ transport = self.transport
+ except AttributeError:
+ return None
+ else:
+ return transport.get_extra_info("sockname")
+
+ @property
+ def remote_address(self) -> Any:
+ """
+ Remote address of the connection.
+
+ This is a ``(host, port)`` tuple or ``None`` if the connection hasn't
+ been established yet.
+
+ """
+ try:
+ transport = self.transport
+ except AttributeError:
+ return None
+ else:
+ return transport.get_extra_info("peername")
+
+ @property
+ def open(self) -> bool:
+ """
+ ``True`` when the connection is usable.
+
+ It may be used to detect disconnections. However, this approach is
+ discouraged per the EAFP_ principle.
+
+ When ``open`` is ``False``, using the connection raises a
+ :exc:`~websockets.exceptions.ConnectionClosed` exception.
+
+ .. _EAFP: https://docs.python.org/3/glossary.html#term-eafp
+
+ """
+ return self.state is State.OPEN and not self.transfer_data_task.done()
+
+ @property
+ def closed(self) -> bool:
+ """
+ ``True`` once the connection is closed.
+
+ Be aware that both :attr:`open` and :attr:`closed` are ``False`` during
+ the opening and closing sequences.
+
+ """
+ return self.state is State.CLOSED
+
+ async def wait_closed(self) -> None:
+ """
+ Wait until the connection is closed.
+
+ This is identical to :attr:`closed`, except it can be awaited.
+
+ This can make it easier to handle connection termination, regardless
+ of its cause, in tasks that interact with the WebSocket connection.
+
+ """
+ await asyncio.shield(self.connection_lost_waiter)
+
+ async def __aiter__(self) -> AsyncIterator[Data]:
+ """
+ Iterate on received messages.
+
+ Exit normally when the connection is closed with code 1000 or 1001.
+
+ Raise an exception in other cases.
+
+ """
+ try:
+ while True:
+ yield await self.recv()
+ except ConnectionClosedOK:
+ return
+
+ async def recv(self) -> Data:
+ """
+ Receive the next message.
+
+ Return a :class:`str` for a text frame and :class:`bytes` for a binary
+ frame.
+
+ When the end of the message stream is reached, :meth:`recv` raises
+ :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it
+ raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal
+ connection closure and
+ :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol
+ error or a network failure.
+
+ .. versionchanged:: 3.0
+
+ :meth:`recv` used to return ``None`` instead. Refer to the
+ changelog for details.
+
+ Canceling :meth:`recv` is safe. There's no risk of losing the next
+ message. The next invocation of :meth:`recv` will return it. This
+ makes it possible to enforce a timeout by wrapping :meth:`recv` in
+ :func:`~asyncio.wait_for`.
+
+ :raises ~websockets.exceptions.ConnectionClosed: when the
+ connection is closed
+ :raises RuntimeError: if two coroutines call :meth:`recv` concurrently
+
+ """
+ if self._pop_message_waiter is not None:
+ raise RuntimeError(
+ "cannot call recv while another coroutine "
+ "is already waiting for the next message"
+ )
+
+ # Don't await self.ensure_open() here:
+ # - messages could be available in the queue even if the connection
+ # is closed;
+ # - messages could be received before the closing frame even if the
+ # connection is closing.
+
+ # Wait until there's a message in the queue (if necessary) or the
+ # connection is closed.
+ while len(self.messages) <= 0:
+ pop_message_waiter: asyncio.Future[None] = self.loop.create_future()
+ self._pop_message_waiter = pop_message_waiter
+ try:
+ # If asyncio.wait() is canceled, it doesn't cancel
+ # pop_message_waiter and self.transfer_data_task.
+ await asyncio.wait(
+ [pop_message_waiter, self.transfer_data_task],
+ **({"loop": self.loop} if sys.version_info[:2] < (3, 8) else {}),
+ return_when=asyncio.FIRST_COMPLETED,
+ )
+ finally:
+ self._pop_message_waiter = None
+
+ # If asyncio.wait(...) exited because self.transfer_data_task
+ # completed before receiving a new message, raise a suitable
+ # exception (or return None if legacy_recv is enabled).
+ if not pop_message_waiter.done():
+ if self.legacy_recv:
+ return None # type: ignore
+ else:
+ # Wait until the connection is closed to raise
+ # ConnectionClosed with the correct code and reason.
+ await self.ensure_open()
+
+ # Pop a message from the queue.
+ message = self.messages.popleft()
+
+ # Notify transfer_data().
+ if self._put_message_waiter is not None:
+ self._put_message_waiter.set_result(None)
+ self._put_message_waiter = None
+
+ return message
+
+ async def send(
+ self, message: Union[Data, Iterable[Data], AsyncIterable[Data]]
+ ) -> None:
+ """
+ Send a message.
+
+ A string (:class:`str`) is sent as a `Text frame`_. A bytestring or
+ bytes-like object (:class:`bytes`, :class:`bytearray`, or
+ :class:`memoryview`) is sent as a `Binary frame`_.
+
+ .. _Text frame: https://tools.ietf.org/html/rfc6455#section-5.6
+ .. _Binary frame: https://tools.ietf.org/html/rfc6455#section-5.6
+
+ :meth:`send` also accepts an iterable or an asynchronous iterable of
+ strings, bytestrings, or bytes-like objects. In that case the message
+ is fragmented. Each item is treated as a message fragment and sent in
+ its own frame. All items must be of the same type, or else
+ :meth:`send` will raise a :exc:`TypeError` and the connection will be
+ closed.
+
+ Canceling :meth:`send` is discouraged. Instead, you should close the
+ connection with :meth:`close`. Indeed, there only two situations where
+ :meth:`send` yields control to the event loop:
+
+ 1. The write buffer is full. If you don't want to wait until enough
+ data is sent, your only alternative is to close the connection.
+ :meth:`close` will likely time out then abort the TCP connection.
+ 2. ``message`` is an asynchronous iterator. Stopping in the middle of
+ a fragmented message will cause a protocol error. Closing the
+ connection has the same effect.
+
+ :raises TypeError: for unsupported inputs
+
+ """
+ await self.ensure_open()
+
+ # While sending a fragmented message, prevent sending other messages
+ # until all fragments are sent.
+ while self._fragmented_message_waiter is not None:
+ await asyncio.shield(self._fragmented_message_waiter)
+
+ # Unfragmented message -- this case must be handled first because
+ # strings and bytes-like objects are iterable.
+
+ if isinstance(message, (str, bytes, bytearray, memoryview)):
+ opcode, data = prepare_data(message)
+ await self.write_frame(True, opcode, data)
+
+ # Fragmented message -- regular iterator.
+
+ elif isinstance(message, Iterable):
+
+ # Work around https://github.com/python/mypy/issues/6227
+ message = cast(Iterable[Data], message)
+
+ iter_message = iter(message)
+ try:
+ message_chunk = next(iter_message)
+ except StopIteration:
+ return
+ opcode, data = prepare_data(message_chunk)
+
+ self._fragmented_message_waiter = asyncio.Future()
+ try:
+ # First fragment.
+ await self.write_frame(False, opcode, data)
+
+ # Other fragments.
+ for message_chunk in iter_message:
+ confirm_opcode, data = prepare_data(message_chunk)
+ if confirm_opcode != opcode:
+ raise TypeError("data contains inconsistent types")
+ await self.write_frame(False, OP_CONT, data)
+
+ # Final fragment.
+ await self.write_frame(True, OP_CONT, b"")
+
+ except Exception:
+ # We're half-way through a fragmented message and we can't
+ # complete it. This makes the connection unusable.
+ self.fail_connection(1011)
+ raise
+
+ finally:
+ self._fragmented_message_waiter.set_result(None)
+ self._fragmented_message_waiter = None
+
+ # Fragmented message -- asynchronous iterator
+
+ elif isinstance(message, AsyncIterable):
+ # aiter_message = aiter(message) without aiter
+ # https://github.com/python/mypy/issues/5738
+ aiter_message = type(message).__aiter__(message) # type: ignore
+ try:
+ # message_chunk = anext(aiter_message) without anext
+ # https://github.com/python/mypy/issues/5738
+ message_chunk = await type(aiter_message).__anext__( # type: ignore
+ aiter_message
+ )
+ except StopAsyncIteration:
+ return
+ opcode, data = prepare_data(message_chunk)
+
+ self._fragmented_message_waiter = asyncio.Future()
+ try:
+ # First fragment.
+ await self.write_frame(False, opcode, data)
+
+ # Other fragments.
+ # https://github.com/python/mypy/issues/5738
+ async for message_chunk in aiter_message: # type: ignore
+ confirm_opcode, data = prepare_data(message_chunk)
+ if confirm_opcode != opcode:
+ raise TypeError("data contains inconsistent types")
+ await self.write_frame(False, OP_CONT, data)
+
+ # Final fragment.
+ await self.write_frame(True, OP_CONT, b"")
+
+ except Exception:
+ # We're half-way through a fragmented message and we can't
+ # complete it. This makes the connection unusable.
+ self.fail_connection(1011)
+ raise
+
+ finally:
+ self._fragmented_message_waiter.set_result(None)
+ self._fragmented_message_waiter = None
+
+ else:
+ raise TypeError("data must be bytes, str, or iterable")
+
+ async def close(self, code: int = 1000, reason: str = "") -> None:
+ """
+ Perform the closing handshake.
+
+ :meth:`close` waits for the other end to complete the handshake and
+ for the TCP connection to terminate. As a consequence, there's no need
+ to await :meth:`wait_closed`; :meth:`close` already does it.
+
+ :meth:`close` is idempotent: it doesn't do anything once the
+ connection is closed.
+
+ Wrapping :func:`close` in :func:`~asyncio.create_task` is safe, given
+ that errors during connection termination aren't particularly useful.
+
+ Canceling :meth:`close` is discouraged. If it takes too long, you can
+ set a shorter ``close_timeout``. If you don't want to wait, let the
+ Python process exit, then the OS will close the TCP connection.
+
+ :param code: WebSocket close code
+ :param reason: WebSocket close reason
+
+ """
+ try:
+ await asyncio.wait_for(
+ self.write_close_frame(serialize_close(code, reason)),
+ self.close_timeout,
+ **({"loop": self.loop} if sys.version_info[:2] < (3, 8) else {}),
+ )
+ except asyncio.TimeoutError:
+ # If the close frame cannot be sent because the send buffers
+ # are full, the closing handshake won't complete anyway.
+ # Fail the connection to shut down faster.
+ self.fail_connection()
+
+ # If no close frame is received within the timeout, wait_for() cancels
+ # the data transfer task and raises TimeoutError.
+
+ # If close() is called multiple times concurrently and one of these
+ # calls hits the timeout, the data transfer task will be cancelled.
+ # Other calls will receive a CancelledError here.
+
+ try:
+ # If close() is canceled during the wait, self.transfer_data_task
+ # is canceled before the timeout elapses.
+ await asyncio.wait_for(
+ self.transfer_data_task,
+ self.close_timeout,
+ **({"loop": self.loop} if sys.version_info[:2] < (3, 8) else {}),
+ )
+ except (asyncio.TimeoutError, asyncio.CancelledError):
+ pass
+
+ # Wait for the close connection task to close the TCP connection.
+ await asyncio.shield(self.close_connection_task)
+
+ async def ping(self, data: Optional[Data] = None) -> Awaitable[None]:
+ """
+ Send a ping.
+
+ Return a :class:`~asyncio.Future` which will be completed when the
+ corresponding pong is received and which you may ignore if you don't
+ want to wait.
+
+ A ping may serve as a keepalive or as a check that the remote endpoint
+ received all messages up to this point::
+
+ pong_waiter = await ws.ping()
+ await pong_waiter # only if you want to wait for the pong
+
+ By default, the ping contains four random bytes. This payload may be
+ overridden with the optional ``data`` argument which must be a string
+ (which will be encoded to UTF-8) or a bytes-like object.
+
+ Canceling :meth:`ping` is discouraged. If :meth:`ping` doesn't return
+ immediately, it means the write buffer is full. If you don't want to
+ wait, you should close the connection.
+
+ Canceling the :class:`~asyncio.Future` returned by :meth:`ping` has no
+ effect.
+
+ """
+ await self.ensure_open()
+
+ if data is not None:
+ data = encode_data(data)
+
+ # Protect against duplicates if a payload is explicitly set.
+ if data in self.pings:
+ raise ValueError("already waiting for a pong with the same data")
+
+ # Generate a unique random payload otherwise.
+ while data is None or data in self.pings:
+ data = struct.pack("!I", random.getrandbits(32))
+
+ self.pings[data] = self.loop.create_future()
+
+ await self.write_frame(True, OP_PING, data)
+
+ return asyncio.shield(self.pings[data])
+
+ async def pong(self, data: Data = b"") -> None:
+ """
+ Send a pong.
+
+ An unsolicited pong may serve as a unidirectional heartbeat.
+
+ The payload may be set with the optional ``data`` argument which must
+ be a string (which will be encoded to UTF-8) or a bytes-like object.
+
+ Canceling :meth:`pong` is discouraged for the same reason as
+ :meth:`ping`.
+
+ """
+ await self.ensure_open()
+
+ data = encode_data(data)
+
+ await self.write_frame(True, OP_PONG, data)
+
+ # Private methods - no guarantees.
+
+ def connection_closed_exc(self) -> ConnectionClosed:
+ exception: ConnectionClosed
+ if self.close_code == 1000 or self.close_code == 1001:
+ exception = ConnectionClosedOK(self.close_code, self.close_reason)
+ else:
+ exception = ConnectionClosedError(self.close_code, self.close_reason)
+ # Chain to the exception that terminated data transfer, if any.
+ exception.__cause__ = self.transfer_data_exc
+ return exception
+
+ async def ensure_open(self) -> None:
+ """
+ Check that the WebSocket connection is open.
+
+ Raise :exc:`~websockets.exceptions.ConnectionClosed` if it isn't.
+
+ """
+ # Handle cases from most common to least common for performance.
+ if self.state is State.OPEN:
+ # If self.transfer_data_task exited without a closing handshake,
+ # self.close_connection_task may be closing the connection, going
+ # straight from OPEN to CLOSED.
+ if self.transfer_data_task.done():
+ await asyncio.shield(self.close_connection_task)
+ raise self.connection_closed_exc()
+ else:
+ return
+
+ if self.state is State.CLOSED:
+ raise self.connection_closed_exc()
+
+ if self.state is State.CLOSING:
+ # If we started the closing handshake, wait for its completion to
+ # get the proper close code and reason. self.close_connection_task
+ # will complete within 4 or 5 * close_timeout after close(). The
+ # CLOSING state also occurs when failing the connection. In that
+ # case self.close_connection_task will complete even faster.
+ await asyncio.shield(self.close_connection_task)
+ raise self.connection_closed_exc()
+
+ # Control may only reach this point in buggy third-party subclasses.
+ assert self.state is State.CONNECTING
+ raise InvalidState("WebSocket connection isn't established yet")
+
+ async def transfer_data(self) -> None:
+ """
+ Read incoming messages and put them in a queue.
+
+ This coroutine runs in a task until the closing handshake is started.
+
+ """
+ try:
+ while True:
+ message = await self.read_message()
+
+ # Exit the loop when receiving a close frame.
+ if message is None:
+ break
+
+ # Wait until there's room in the queue (if necessary).
+ if self.max_queue is not None:
+ while len(self.messages) >= self.max_queue:
+ self._put_message_waiter = self.loop.create_future()
+ try:
+ await asyncio.shield(self._put_message_waiter)
+ finally:
+ self._put_message_waiter = None
+
+ # Put the message in the queue.
+ self.messages.append(message)
+
+ # Notify recv().
+ if self._pop_message_waiter is not None:
+ self._pop_message_waiter.set_result(None)
+ self._pop_message_waiter = None
+
+ except asyncio.CancelledError as exc:
+ self.transfer_data_exc = exc
+ # If fail_connection() cancels this task, avoid logging the error
+ # twice and failing the connection again.
+ raise
+
+ except ProtocolError as exc:
+ self.transfer_data_exc = exc
+ self.fail_connection(1002)
+
+ except (ConnectionError, EOFError) as exc:
+ # Reading data with self.reader.readexactly may raise:
+ # - most subclasses of ConnectionError if the TCP connection
+ # breaks, is reset, or is aborted;
+ # - IncompleteReadError, a subclass of EOFError, if fewer
+ # bytes are available than requested.
+ self.transfer_data_exc = exc
+ self.fail_connection(1006)
+
+ except UnicodeDecodeError as exc:
+ self.transfer_data_exc = exc
+ self.fail_connection(1007)
+
+ except PayloadTooBig as exc:
+ self.transfer_data_exc = exc
+ self.fail_connection(1009)
+
+ except Exception as exc:
+ # This shouldn't happen often because exceptions expected under
+ # regular circumstances are handled above. If it does, consider
+ # catching and handling more exceptions.
+ logger.error("Error in data transfer", exc_info=True)
+
+ self.transfer_data_exc = exc
+ self.fail_connection(1011)
+
+ async def read_message(self) -> Optional[Data]:
+ """
+ Read a single message from the connection.
+
+ Re-assemble data frames if the message is fragmented.
+
+ Return ``None`` when the closing handshake is started.
+
+ """
+ frame = await self.read_data_frame(max_size=self.max_size)
+
+ # A close frame was received.
+ if frame is None:
+ return None
+
+ if frame.opcode == OP_TEXT:
+ text = True
+ elif frame.opcode == OP_BINARY:
+ text = False
+ else: # frame.opcode == OP_CONT
+ raise ProtocolError("unexpected opcode")
+
+ # Shortcut for the common case - no fragmentation
+ if frame.fin:
+ return frame.data.decode("utf-8") if text else frame.data
+
+ # 5.4. Fragmentation
+ chunks: List[Data] = []
+ max_size = self.max_size
+ if text:
+ decoder_factory = codecs.getincrementaldecoder("utf-8")
+ decoder = decoder_factory(errors="strict")
+ if max_size is None:
+
+ def append(frame: Frame) -> None:
+ nonlocal chunks
+ chunks.append(decoder.decode(frame.data, frame.fin))
+
+ else:
+
+ def append(frame: Frame) -> None:
+ nonlocal chunks, max_size
+ chunks.append(decoder.decode(frame.data, frame.fin))
+ assert isinstance(max_size, int)
+ max_size -= len(frame.data)
+
+ else:
+ if max_size is None:
+
+ def append(frame: Frame) -> None:
+ nonlocal chunks
+ chunks.append(frame.data)
+
+ else:
+
+ def append(frame: Frame) -> None:
+ nonlocal chunks, max_size
+ chunks.append(frame.data)
+ assert isinstance(max_size, int)
+ max_size -= len(frame.data)
+
+ append(frame)
+
+ while not frame.fin:
+ frame = await self.read_data_frame(max_size=max_size)
+ if frame is None:
+ raise ProtocolError("incomplete fragmented message")
+ if frame.opcode != OP_CONT:
+ raise ProtocolError("unexpected opcode")
+ append(frame)
+
+ # mypy cannot figure out that chunks have the proper type.
+ return ("" if text else b"").join(chunks) # type: ignore
+
+ async def read_data_frame(self, max_size: Optional[int]) -> Optional[Frame]:
+ """
+ Read a single data frame from the connection.
+
+ Process control frames received before the next data frame.
+
+ Return ``None`` if a close frame is encountered before any data frame.
+
+ """
+ # 6.2. Receiving Data
+ while True:
+ frame = await self.read_frame(max_size)
+
+ # 5.5. Control Frames
+ if frame.opcode == OP_CLOSE:
+ # 7.1.5. The WebSocket Connection Close Code
+ # 7.1.6. The WebSocket Connection Close Reason
+ self.close_code, self.close_reason = parse_close(frame.data)
+ try:
+ # Echo the original data instead of re-serializing it with
+ # serialize_close() because that fails when the close frame
+ # is empty and parse_close() synthetizes a 1005 close code.
+ await self.write_close_frame(frame.data)
+ except ConnectionClosed:
+ # It doesn't really matter if the connection was closed
+ # before we could send back a close frame.
+ pass
+ return None
+
+ elif frame.opcode == OP_PING:
+ # Answer pings.
+ ping_hex = frame.data.hex() or "[empty]"
+ logger.debug(
+ "%s - received ping, sending pong: %s", self.side, ping_hex
+ )
+ await self.pong(frame.data)
+
+ elif frame.opcode == OP_PONG:
+ # Acknowledge pings on solicited pongs.
+ if frame.data in self.pings:
+ logger.debug(
+ "%s - received solicited pong: %s",
+ self.side,
+ frame.data.hex() or "[empty]",
+ )
+ # Acknowledge all pings up to the one matching this pong.
+ ping_id = None
+ ping_ids = []
+ for ping_id, ping in self.pings.items():
+ ping_ids.append(ping_id)
+ if not ping.done():
+ ping.set_result(None)
+ if ping_id == frame.data:
+ break
+ else: # pragma: no cover
+ assert False, "ping_id is in self.pings"
+ # Remove acknowledged pings from self.pings.
+ for ping_id in ping_ids:
+ del self.pings[ping_id]
+ ping_ids = ping_ids[:-1]
+ if ping_ids:
+ pings_hex = ", ".join(
+ ping_id.hex() or "[empty]" for ping_id in ping_ids
+ )
+ plural = "s" if len(ping_ids) > 1 else ""
+ logger.debug(
+ "%s - acknowledged previous ping%s: %s",
+ self.side,
+ plural,
+ pings_hex,
+ )
+ else:
+ logger.debug(
+ "%s - received unsolicited pong: %s",
+ self.side,
+ frame.data.hex() or "[empty]",
+ )
+
+ # 5.6. Data Frames
+ else:
+ return frame
+
+ async def read_frame(self, max_size: Optional[int]) -> Frame:
+ """
+ Read a single frame from the connection.
+
+ """
+ frame = await Frame.read(
+ self.reader.readexactly,
+ mask=not self.is_client,
+ max_size=max_size,
+ extensions=self.extensions,
+ )
+ logger.debug("%s < %r", self.side, frame)
+ return frame
+
+ async def write_frame(
+ self, fin: bool, opcode: int, data: bytes, *, _expected_state: int = State.OPEN
+ ) -> None:
+ # Defensive assertion for protocol compliance.
+ if self.state is not _expected_state: # pragma: no cover
+ raise InvalidState(
+ f"Cannot write to a WebSocket in the {self.state.name} state"
+ )
+
+ frame = Frame(fin, opcode, data)
+ logger.debug("%s > %r", self.side, frame)
+ frame.write(
+ self.transport.write, mask=self.is_client, extensions=self.extensions
+ )
+
+ try:
+ # drain() cannot be called concurrently by multiple coroutines:
+ # http://bugs.python.org/issue29930. Remove this lock when no
+ # version of Python where this bugs exists is supported anymore.
+ async with self._drain_lock:
+ # Handle flow control automatically.
+ await self._drain()
+ except ConnectionError:
+ # Terminate the connection if the socket died.
+ self.fail_connection()
+ # Wait until the connection is closed to raise ConnectionClosed
+ # with the correct code and reason.
+ await self.ensure_open()
+
+ async def write_close_frame(self, data: bytes = b"") -> None:
+ """
+ Write a close frame if and only if the connection state is OPEN.
+
+ This dedicated coroutine must be used for writing close frames to
+ ensure that at most one close frame is sent on a given connection.
+
+ """
+ # Test and set the connection state before sending the close frame to
+ # avoid sending two frames in case of concurrent calls.
+ if self.state is State.OPEN:
+ # 7.1.3. The WebSocket Closing Handshake is Started
+ self.state = State.CLOSING
+ logger.debug("%s - state = CLOSING", self.side)
+
+ # 7.1.2. Start the WebSocket Closing Handshake
+ await self.write_frame(True, OP_CLOSE, data, _expected_state=State.CLOSING)
+
+ async def keepalive_ping(self) -> None:
+ """
+ Send a Ping frame and wait for a Pong frame at regular intervals.
+
+ This coroutine exits when the connection terminates and one of the
+ following happens:
+
+ - :meth:`ping` raises :exc:`ConnectionClosed`, or
+ - :meth:`close_connection` cancels :attr:`keepalive_ping_task`.
+
+ """
+ if self.ping_interval is None:
+ return
+
+ try:
+ while True:
+ await asyncio.sleep(
+ self.ping_interval,
+ **({"loop": self.loop} if sys.version_info[:2] < (3, 8) else {}),
+ )
+
+ # ping() raises CancelledError if the connection is closed,
+ # when close_connection() cancels self.keepalive_ping_task.
+
+ # ping() raises ConnectionClosed if the connection is lost,
+ # when connection_lost() calls abort_pings().
+
+ ping_waiter = await self.ping()
+
+ if self.ping_timeout is not None:
+ try:
+ await asyncio.wait_for(
+ ping_waiter,
+ self.ping_timeout,
+ **({"loop": self.loop} if sys.version_info[:2] < (3, 8) else {}),
+ )
+ except asyncio.TimeoutError:
+ logger.debug("%s ! timed out waiting for pong", self.side)
+ self.fail_connection(1011)
+ break
+
+ except asyncio.CancelledError:
+ raise
+
+ except ConnectionClosed:
+ pass
+
+ except Exception:
+ logger.warning("Unexpected exception in keepalive ping task", exc_info=True)
+
+ async def close_connection(self) -> None:
+ """
+ 7.1.1. Close the WebSocket Connection
+
+ When the opening handshake succeeds, :meth:`connection_open` starts
+ this coroutine in a task. It waits for the data transfer phase to
+ complete then it closes the TCP connection cleanly.
+
+ When the opening handshake fails, :meth:`fail_connection` does the
+ same. There's no data transfer phase in that case.
+
+ """
+ try:
+ # Wait for the data transfer phase to complete.
+ if hasattr(self, "transfer_data_task"):
+ try:
+ await self.transfer_data_task
+ except asyncio.CancelledError:
+ pass
+
+ # Cancel the keepalive ping task.
+ if hasattr(self, "keepalive_ping_task"):
+ self.keepalive_ping_task.cancel()
+
+ # A client should wait for a TCP close from the server.
+ if self.is_client and hasattr(self, "transfer_data_task"):
+ if await self.wait_for_connection_lost():
+ return
+ logger.debug("%s ! timed out waiting for TCP close", self.side)
+
+ # Half-close the TCP connection if possible (when there's no TLS).
+ if self.transport.can_write_eof():
+ logger.debug("%s x half-closing TCP connection", self.side)
+ self.transport.write_eof()
+
+ if await self.wait_for_connection_lost():
+ return
+ logger.debug("%s ! timed out waiting for TCP close", self.side)
+
+ finally:
+ # The try/finally ensures that the transport never remains open,
+ # even if this coroutine is canceled (for example).
+
+ # If connection_lost() was called, the TCP connection is closed.
+ # However, if TLS is enabled, the transport still needs closing.
+ # Else asyncio complains: ResourceWarning: unclosed transport.
+ if self.connection_lost_waiter.done() and self.transport.is_closing():
+ return
+
+ # Close the TCP connection. Buffers are flushed asynchronously.
+ logger.debug("%s x closing TCP connection", self.side)
+ self.transport.close()
+
+ if await self.wait_for_connection_lost():
+ return
+ logger.debug("%s ! timed out waiting for TCP close", self.side)
+
+ # Abort the TCP connection. Buffers are discarded.
+ logger.debug("%s x aborting TCP connection", self.side)
+ self.transport.abort()
+
+ # connection_lost() is called quickly after aborting.
+ await self.wait_for_connection_lost()
+
+ async def wait_for_connection_lost(self) -> bool:
+ """
+ Wait until the TCP connection is closed or ``self.close_timeout`` elapses.
+
+ Return ``True`` if the connection is closed and ``False`` otherwise.
+
+ """
+ if not self.connection_lost_waiter.done():
+ try:
+ await asyncio.wait_for(
+ asyncio.shield(self.connection_lost_waiter),
+ self.close_timeout,
+ **({"loop": self.loop} if sys.version_info[:2] < (3, 8) else {}),
+ )
+ except asyncio.TimeoutError:
+ pass
+ # Re-check self.connection_lost_waiter.done() synchronously because
+ # connection_lost() could run between the moment the timeout occurs
+ # and the moment this coroutine resumes running.
+ return self.connection_lost_waiter.done()
+
+ def fail_connection(self, code: int = 1006, reason: str = "") -> None:
+ """
+ 7.1.7. Fail the WebSocket Connection
+
+ This requires:
+
+ 1. Stopping all processing of incoming data, which means cancelling
+ :attr:`transfer_data_task`. The close code will be 1006 unless a
+ close frame was received earlier.
+
+ 2. Sending a close frame with an appropriate code if the opening
+ handshake succeeded and the other side is likely to process it.
+
+ 3. Closing the connection. :meth:`close_connection` takes care of
+ this once :attr:`transfer_data_task` exits after being canceled.
+
+ (The specification describes these steps in the opposite order.)
+
+ """
+ logger.debug(
+ "%s ! failing %s WebSocket connection with code %d",
+ self.side,
+ self.state.name,
+ code,
+ )
+
+ # Cancel transfer_data_task if the opening handshake succeeded.
+ # cancel() is idempotent and ignored if the task is done already.
+ if hasattr(self, "transfer_data_task"):
+ self.transfer_data_task.cancel()
+
+ # Send a close frame when the state is OPEN (a close frame was already
+ # sent if it's CLOSING), except when failing the connection because of
+ # an error reading from or writing to the network.
+ # Don't send a close frame if the connection is broken.
+ if code != 1006 and self.state is State.OPEN:
+
+ frame_data = serialize_close(code, reason)
+
+ # Write the close frame without draining the write buffer.
+
+ # Keeping fail_connection() synchronous guarantees it can't
+ # get stuck and simplifies the implementation of the callers.
+ # Not drainig the write buffer is acceptable in this context.
+
+ # This duplicates a few lines of code from write_close_frame()
+ # and write_frame().
+
+ self.state = State.CLOSING
+ logger.debug("%s - state = CLOSING", self.side)
+
+ frame = Frame(True, OP_CLOSE, frame_data)
+ logger.debug("%s > %r", self.side, frame)
+ frame.write(
+ self.transport.write, mask=self.is_client, extensions=self.extensions
+ )
+
+ # Start close_connection_task if the opening handshake didn't succeed.
+ if not hasattr(self, "close_connection_task"):
+ self.close_connection_task = self.loop.create_task(self.close_connection())
+
+ def abort_pings(self) -> None:
+ """
+ Raise ConnectionClosed in pending keepalive pings.
+
+ They'll never receive a pong once the connection is closed.
+
+ """
+ assert self.state is State.CLOSED
+ exc = self.connection_closed_exc()
+
+ for ping in self.pings.values():
+ ping.set_exception(exc)
+ # If the exception is never retrieved, it will be logged when ping
+ # is garbage-collected. This is confusing for users.
+ # Given that ping is done (with an exception), canceling it does
+ # nothing, but it prevents logging the exception.
+ ping.cancel()
+
+ if self.pings:
+ pings_hex = ", ".join(ping_id.hex() or "[empty]" for ping_id in self.pings)
+ plural = "s" if len(self.pings) > 1 else ""
+ logger.debug(
+ "%s - aborted pending ping%s: %s", self.side, plural, pings_hex
+ )
+
+ # asyncio.Protocol methods
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ """
+ Configure write buffer limits.
+
+ The high-water limit is defined by ``self.write_limit``.
+
+ The low-water limit currently defaults to ``self.write_limit // 4`` in
+ :meth:`~asyncio.WriteTransport.set_write_buffer_limits`, which should
+ be all right for reasonable use cases of this library.
+
+ This is the earliest point where we can get hold of the transport,
+ which means it's the best point for configuring it.
+
+ """
+ logger.debug("%s - event = connection_made(%s)", self.side, transport)
+
+ transport = cast(asyncio.Transport, transport)
+ transport.set_write_buffer_limits(self.write_limit)
+ self.transport = transport
+
+ # Copied from asyncio.StreamReaderProtocol
+ self.reader.set_transport(transport)
+
+ def connection_lost(self, exc: Optional[Exception]) -> None:
+ """
+ 7.1.4. The WebSocket Connection is Closed.
+
+ """
+ logger.debug("%s - event = connection_lost(%s)", self.side, exc)
+ self.state = State.CLOSED
+ logger.debug("%s - state = CLOSED", self.side)
+ if not hasattr(self, "close_code"):
+ self.close_code = 1006
+ if not hasattr(self, "close_reason"):
+ self.close_reason = ""
+ logger.debug(
+ "%s x code = %d, reason = %s",
+ self.side,
+ self.close_code,
+ self.close_reason or "[no reason]",
+ )
+ self.abort_pings()
+ # If self.connection_lost_waiter isn't pending, that's a bug, because:
+ # - it's set only here in connection_lost() which is called only once;
+ # - it must never be canceled.
+ self.connection_lost_waiter.set_result(None)
+
+ if True: # pragma: no cover
+
+ # Copied from asyncio.StreamReaderProtocol
+ if self.reader is not None:
+ if exc is None:
+ self.reader.feed_eof()
+ else:
+ self.reader.set_exception(exc)
+
+ # Copied from asyncio.FlowControlMixin
+ # Wake up the writer if currently paused.
+ if not self._paused:
+ return
+ waiter = self._drain_waiter
+ if waiter is None:
+ return
+ self._drain_waiter = None
+ if waiter.done():
+ return
+ if exc is None:
+ waiter.set_result(None)
+ else:
+ waiter.set_exception(exc)
+
+ def pause_writing(self) -> None: # pragma: no cover
+ assert not self._paused
+ self._paused = True
+
+ def resume_writing(self) -> None: # pragma: no cover
+ assert self._paused
+ self._paused = False
+
+ waiter = self._drain_waiter
+ if waiter is not None:
+ self._drain_waiter = None
+ if not waiter.done():
+ waiter.set_result(None)
+
+ def data_received(self, data: bytes) -> None:
+ logger.debug("%s - event = data_received(<%d bytes>)", self.side, len(data))
+ self.reader.feed_data(data)
+
+ def eof_received(self) -> None:
+ """
+ Close the transport after receiving EOF.
+
+ The WebSocket protocol has its own closing handshake: endpoints close
+ the TCP or TLS connection after sending and receiving a close frame.
+
+ As a consequence, they never need to write after receiving EOF, so
+ there's no reason to keep the transport open by returning ``True``.
+
+ Besides, that doesn't work on TLS connections.
+
+ """
+ logger.debug("%s - event = eof_received()", self.side)
+ self.reader.feed_eof()
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/py.typed b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/py.typed
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/server.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/server.py
new file mode 100644
index 0000000000..0592083ef7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/server.py
@@ -0,0 +1,996 @@
+"""
+:mod:`websockets.server` defines the WebSocket server APIs.
+
+"""
+
+import asyncio
+import collections.abc
+import email.utils
+import functools
+import http
+import logging
+import socket
+import sys
+import warnings
+from types import TracebackType
+from typing import (
+ Any,
+ Awaitable,
+ Callable,
+ Generator,
+ List,
+ Optional,
+ Sequence,
+ Set,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+from .exceptions import (
+ AbortHandshake,
+ InvalidHandshake,
+ InvalidHeader,
+ InvalidMessage,
+ InvalidOrigin,
+ InvalidUpgrade,
+ NegotiationError,
+)
+from .extensions.base import Extension, ServerExtensionFactory
+from .extensions.permessage_deflate import ServerPerMessageDeflateFactory
+from .handshake import build_response, check_request
+from .headers import build_extension, parse_extension, parse_subprotocol
+from .http import USER_AGENT, Headers, HeadersLike, MultipleValuesError, read_request
+from .protocol import WebSocketCommonProtocol
+from .typing import ExtensionHeader, Origin, Subprotocol
+
+
+__all__ = ["serve", "unix_serve", "WebSocketServerProtocol", "WebSocketServer"]
+
+logger = logging.getLogger(__name__)
+
+
+HeadersLikeOrCallable = Union[HeadersLike, Callable[[str, Headers], HeadersLike]]
+
+HTTPResponse = Tuple[http.HTTPStatus, HeadersLike, bytes]
+
+
+class WebSocketServerProtocol(WebSocketCommonProtocol):
+ """
+ :class:`~asyncio.Protocol` subclass implementing a WebSocket server.
+
+ This class inherits most of its methods from
+ :class:`~websockets.protocol.WebSocketCommonProtocol`.
+
+ For the sake of simplicity, it doesn't rely on a full HTTP implementation.
+ Its support for HTTP responses is very limited.
+
+ """
+
+ is_client = False
+ side = "server"
+
+ def __init__(
+ self,
+ ws_handler: Callable[["WebSocketServerProtocol", str], Awaitable[Any]],
+ ws_server: "WebSocketServer",
+ *,
+ origins: Optional[Sequence[Optional[Origin]]] = None,
+ extensions: Optional[Sequence[ServerExtensionFactory]] = None,
+ subprotocols: Optional[Sequence[Subprotocol]] = None,
+ extra_headers: Optional[HeadersLikeOrCallable] = None,
+ process_request: Optional[
+ Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]]
+ ] = None,
+ select_subprotocol: Optional[
+ Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol]
+ ] = None,
+ **kwargs: Any,
+ ) -> None:
+ # For backwards compatibility with 6.0 or earlier.
+ if origins is not None and "" in origins:
+ warnings.warn("use None instead of '' in origins", DeprecationWarning)
+ origins = [None if origin == "" else origin for origin in origins]
+ self.ws_handler = ws_handler
+ self.ws_server = ws_server
+ self.origins = origins
+ self.available_extensions = extensions
+ self.available_subprotocols = subprotocols
+ self.extra_headers = extra_headers
+ self._process_request = process_request
+ self._select_subprotocol = select_subprotocol
+ super().__init__(**kwargs)
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ """
+ Register connection and initialize a task to handle it.
+
+ """
+ super().connection_made(transport)
+ # Register the connection with the server before creating the handler
+ # task. Registering at the beginning of the handler coroutine would
+ # create a race condition between the creation of the task, which
+ # schedules its execution, and the moment the handler starts running.
+ self.ws_server.register(self)
+ self.handler_task = self.loop.create_task(self.handler())
+
+ async def handler(self) -> None:
+ """
+ Handle the lifecycle of a WebSocket connection.
+
+ Since this method doesn't have a caller able to handle exceptions, it
+ attemps to log relevant ones and guarantees that the TCP connection is
+ closed before exiting.
+
+ """
+ try:
+
+ try:
+ path = await self.handshake(
+ origins=self.origins,
+ available_extensions=self.available_extensions,
+ available_subprotocols=self.available_subprotocols,
+ extra_headers=self.extra_headers,
+ )
+ except ConnectionError:
+ logger.debug("Connection error in opening handshake", exc_info=True)
+ raise
+ except Exception as exc:
+ if isinstance(exc, AbortHandshake):
+ status, headers, body = exc.status, exc.headers, exc.body
+ elif isinstance(exc, InvalidOrigin):
+ logger.debug("Invalid origin", exc_info=True)
+ status, headers, body = (
+ http.HTTPStatus.FORBIDDEN,
+ Headers(),
+ f"Failed to open a WebSocket connection: {exc}.\n".encode(),
+ )
+ elif isinstance(exc, InvalidUpgrade):
+ logger.debug("Invalid upgrade", exc_info=True)
+ status, headers, body = (
+ http.HTTPStatus.UPGRADE_REQUIRED,
+ Headers([("Upgrade", "websocket")]),
+ (
+ f"Failed to open a WebSocket connection: {exc}.\n"
+ f"\n"
+ f"You cannot access a WebSocket server directly "
+ f"with a browser. You need a WebSocket client.\n"
+ ).encode(),
+ )
+ elif isinstance(exc, InvalidHandshake):
+ logger.debug("Invalid handshake", exc_info=True)
+ status, headers, body = (
+ http.HTTPStatus.BAD_REQUEST,
+ Headers(),
+ f"Failed to open a WebSocket connection: {exc}.\n".encode(),
+ )
+ else:
+ logger.warning("Error in opening handshake", exc_info=True)
+ status, headers, body = (
+ http.HTTPStatus.INTERNAL_SERVER_ERROR,
+ Headers(),
+ (
+ b"Failed to open a WebSocket connection.\n"
+ b"See server log for more information.\n"
+ ),
+ )
+
+ headers.setdefault("Date", email.utils.formatdate(usegmt=True))
+ headers.setdefault("Server", USER_AGENT)
+ headers.setdefault("Content-Length", str(len(body)))
+ headers.setdefault("Content-Type", "text/plain")
+ headers.setdefault("Connection", "close")
+
+ self.write_http_response(status, headers, body)
+ self.fail_connection()
+ await self.wait_closed()
+ return
+
+ try:
+ await self.ws_handler(self, path)
+ except Exception:
+ logger.error("Error in connection handler", exc_info=True)
+ if not self.closed:
+ self.fail_connection(1011)
+ raise
+
+ try:
+ await self.close()
+ except ConnectionError:
+ logger.debug("Connection error in closing handshake", exc_info=True)
+ raise
+ except Exception:
+ logger.warning("Error in closing handshake", exc_info=True)
+ raise
+
+ except Exception:
+ # Last-ditch attempt to avoid leaking connections on errors.
+ try:
+ self.transport.close()
+ except Exception: # pragma: no cover
+ pass
+
+ finally:
+ # Unregister the connection with the server when the handler task
+ # terminates. Registration is tied to the lifecycle of the handler
+ # task because the server waits for tasks attached to registered
+ # connections before terminating.
+ self.ws_server.unregister(self)
+
+ async def read_http_request(self) -> Tuple[str, Headers]:
+ """
+ Read request line and headers from the HTTP request.
+
+ If the request contains a body, it may be read from ``self.reader``
+ after this coroutine returns.
+
+ :raises ~websockets.exceptions.InvalidMessage: if the HTTP message is
+ malformed or isn't an HTTP/1.1 GET request
+
+ """
+ try:
+ path, headers = await read_request(self.reader)
+ except Exception as exc:
+ raise InvalidMessage("did not receive a valid HTTP request") from exc
+
+ logger.debug("%s < GET %s HTTP/1.1", self.side, path)
+ logger.debug("%s < %r", self.side, headers)
+
+ self.path = path
+ self.request_headers = headers
+
+ return path, headers
+
+ def write_http_response(
+ self, status: http.HTTPStatus, headers: Headers, body: Optional[bytes] = None
+ ) -> None:
+ """
+ Write status line and headers to the HTTP response.
+
+ This coroutine is also able to write a response body.
+
+ """
+ self.response_headers = headers
+
+ logger.debug("%s > HTTP/1.1 %d %s", self.side, status.value, status.phrase)
+ logger.debug("%s > %r", self.side, headers)
+
+ # Since the status line and headers only contain ASCII characters,
+ # we can keep this simple.
+ response = f"HTTP/1.1 {status.value} {status.phrase}\r\n"
+ response += str(headers)
+
+ self.transport.write(response.encode())
+
+ if body is not None:
+ logger.debug("%s > body (%d bytes)", self.side, len(body))
+ self.transport.write(body)
+
+ async def process_request(
+ self, path: str, request_headers: Headers
+ ) -> Optional[HTTPResponse]:
+ """
+ Intercept the HTTP request and return an HTTP response if appropriate.
+
+ If ``process_request`` returns ``None``, the WebSocket handshake
+ continues. If it returns 3-uple containing a status code, response
+ headers and a response body, that HTTP response is sent and the
+ connection is closed. In that case:
+
+ * The HTTP status must be a :class:`~http.HTTPStatus`.
+ * HTTP headers must be a :class:`~websockets.http.Headers` instance, a
+ :class:`~collections.abc.Mapping`, or an iterable of ``(name,
+ value)`` pairs.
+ * The HTTP response body must be :class:`bytes`. It may be empty.
+
+ This coroutine may be overridden in a :class:`WebSocketServerProtocol`
+ subclass, for example:
+
+ * to return a HTTP 200 OK response on a given path; then a load
+ balancer can use this path for a health check;
+ * to authenticate the request and return a HTTP 401 Unauthorized or a
+ HTTP 403 Forbidden when authentication fails.
+
+ Instead of subclassing, it is possible to override this method by
+ passing a ``process_request`` argument to the :func:`serve` function
+ or the :class:`WebSocketServerProtocol` constructor. This is
+ equivalent, except ``process_request`` won't have access to the
+ protocol instance, so it can't store information for later use.
+
+ ``process_request`` is expected to complete quickly. If it may run for
+ a long time, then it should await :meth:`wait_closed` and exit if
+ :meth:`wait_closed` completes, or else it could prevent the server
+ from shutting down.
+
+ :param path: request path, including optional query string
+ :param request_headers: request headers
+
+ """
+ if self._process_request is not None:
+ response = self._process_request(path, request_headers)
+ if isinstance(response, Awaitable):
+ return await response
+ else:
+ # For backwards compatibility with 7.0.
+ warnings.warn(
+ "declare process_request as a coroutine", DeprecationWarning
+ )
+ return response # type: ignore
+ return None
+
+ @staticmethod
+ def process_origin(
+ headers: Headers, origins: Optional[Sequence[Optional[Origin]]] = None
+ ) -> Optional[Origin]:
+ """
+ Handle the Origin HTTP request header.
+
+ :param headers: request headers
+ :param origins: optional list of acceptable origins
+ :raises ~websockets.exceptions.InvalidOrigin: if the origin isn't
+ acceptable
+
+ """
+ # "The user agent MUST NOT include more than one Origin header field"
+ # per https://tools.ietf.org/html/rfc6454#section-7.3.
+ try:
+ origin = cast(Origin, headers.get("Origin"))
+ except MultipleValuesError:
+ raise InvalidHeader("Origin", "more than one Origin header found")
+ if origins is not None:
+ if origin not in origins:
+ raise InvalidOrigin(origin)
+ return origin
+
+ @staticmethod
+ def process_extensions(
+ headers: Headers,
+ available_extensions: Optional[Sequence[ServerExtensionFactory]],
+ ) -> Tuple[Optional[str], List[Extension]]:
+ """
+ Handle the Sec-WebSocket-Extensions HTTP request header.
+
+ Accept or reject each extension proposed in the client request.
+ Negotiate parameters for accepted extensions.
+
+ Return the Sec-WebSocket-Extensions HTTP response header and the list
+ of accepted extensions.
+
+ :rfc:`6455` leaves the rules up to the specification of each
+ :extension.
+
+ To provide this level of flexibility, for each extension proposed by
+ the client, we check for a match with each extension available in the
+ server configuration. If no match is found, the extension is ignored.
+
+ If several variants of the same extension are proposed by the client,
+ it may be accepted severel times, which won't make sense in general.
+ Extensions must implement their own requirements. For this purpose,
+ the list of previously accepted extensions is provided.
+
+ This process doesn't allow the server to reorder extensions. It can
+ only select a subset of the extensions proposed by the client.
+
+ Other requirements, for example related to mandatory extensions or the
+ order of extensions, may be implemented by overriding this method.
+
+ :param headers: request headers
+ :param extensions: optional list of supported extensions
+ :raises ~websockets.exceptions.InvalidHandshake: to abort the
+ handshake with an HTTP 400 error code
+
+ """
+ response_header_value: Optional[str] = None
+
+ extension_headers: List[ExtensionHeader] = []
+ accepted_extensions: List[Extension] = []
+
+ header_values = headers.get_all("Sec-WebSocket-Extensions")
+
+ if header_values and available_extensions:
+
+ parsed_header_values: List[ExtensionHeader] = sum(
+ [parse_extension(header_value) for header_value in header_values], []
+ )
+
+ for name, request_params in parsed_header_values:
+
+ for ext_factory in available_extensions:
+
+ # Skip non-matching extensions based on their name.
+ if ext_factory.name != name:
+ continue
+
+ # Skip non-matching extensions based on their params.
+ try:
+ response_params, extension = ext_factory.process_request_params(
+ request_params, accepted_extensions
+ )
+ except NegotiationError:
+ continue
+
+ # Add matching extension to the final list.
+ extension_headers.append((name, response_params))
+ accepted_extensions.append(extension)
+
+ # Break out of the loop once we have a match.
+ break
+
+ # If we didn't break from the loop, no extension in our list
+ # matched what the client sent. The extension is declined.
+
+ # Serialize extension header.
+ if extension_headers:
+ response_header_value = build_extension(extension_headers)
+
+ return response_header_value, accepted_extensions
+
+ # Not @staticmethod because it calls self.select_subprotocol()
+ def process_subprotocol(
+ self, headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]]
+ ) -> Optional[Subprotocol]:
+ """
+ Handle the Sec-WebSocket-Protocol HTTP request header.
+
+ Return Sec-WebSocket-Protocol HTTP response header, which is the same
+ as the selected subprotocol.
+
+ :param headers: request headers
+ :param available_subprotocols: optional list of supported subprotocols
+ :raises ~websockets.exceptions.InvalidHandshake: to abort the
+ handshake with an HTTP 400 error code
+
+ """
+ subprotocol: Optional[Subprotocol] = None
+
+ header_values = headers.get_all("Sec-WebSocket-Protocol")
+
+ if header_values and available_subprotocols:
+
+ parsed_header_values: List[Subprotocol] = sum(
+ [parse_subprotocol(header_value) for header_value in header_values], []
+ )
+
+ subprotocol = self.select_subprotocol(
+ parsed_header_values, available_subprotocols
+ )
+
+ return subprotocol
+
+ def select_subprotocol(
+ self,
+ client_subprotocols: Sequence[Subprotocol],
+ server_subprotocols: Sequence[Subprotocol],
+ ) -> Optional[Subprotocol]:
+ """
+ Pick a subprotocol among those offered by the client.
+
+ If several subprotocols are supported by the client and the server,
+ the default implementation selects the preferred subprotocols by
+ giving equal value to the priorities of the client and the server.
+
+ If no subprotocol is supported by the client and the server, it
+ proceeds without a subprotocol.
+
+ This is unlikely to be the most useful implementation in practice, as
+ many servers providing a subprotocol will require that the client uses
+ that subprotocol. Such rules can be implemented in a subclass.
+
+ Instead of subclassing, it is possible to override this method by
+ passing a ``select_subprotocol`` argument to the :func:`serve`
+ function or the :class:`WebSocketServerProtocol` constructor
+
+ :param client_subprotocols: list of subprotocols offered by the client
+ :param server_subprotocols: list of subprotocols available on the server
+
+ """
+ if self._select_subprotocol is not None:
+ return self._select_subprotocol(client_subprotocols, server_subprotocols)
+
+ subprotocols = set(client_subprotocols) & set(server_subprotocols)
+ if not subprotocols:
+ return None
+ priority = lambda p: (
+ client_subprotocols.index(p) + server_subprotocols.index(p)
+ )
+ return sorted(subprotocols, key=priority)[0]
+
+ async def handshake(
+ self,
+ origins: Optional[Sequence[Optional[Origin]]] = None,
+ available_extensions: Optional[Sequence[ServerExtensionFactory]] = None,
+ available_subprotocols: Optional[Sequence[Subprotocol]] = None,
+ extra_headers: Optional[HeadersLikeOrCallable] = None,
+ ) -> str:
+ """
+ Perform the server side of the opening handshake.
+
+ Return the path of the URI of the request.
+
+ :param origins: list of acceptable values of the Origin HTTP header;
+ include ``None`` if the lack of an origin is acceptable
+ :param available_extensions: list of supported extensions in the order
+ in which they should be used
+ :param available_subprotocols: list of supported subprotocols in order
+ of decreasing preference
+ :param extra_headers: sets additional HTTP response headers when the
+ handshake succeeds; it can be a :class:`~websockets.http.Headers`
+ instance, a :class:`~collections.abc.Mapping`, an iterable of
+ ``(name, value)`` pairs, or a callable taking the request path and
+ headers in arguments and returning one of the above.
+ :raises ~websockets.exceptions.InvalidHandshake: if the handshake
+ fails
+
+ """
+ path, request_headers = await self.read_http_request()
+
+ # Hook for customizing request handling, for example checking
+ # authentication or treating some paths as plain HTTP endpoints.
+ early_response_awaitable = self.process_request(path, request_headers)
+ if isinstance(early_response_awaitable, Awaitable):
+ early_response = await early_response_awaitable
+ else:
+ # For backwards compatibility with 7.0.
+ warnings.warn("declare process_request as a coroutine", DeprecationWarning)
+ early_response = early_response_awaitable # type: ignore
+
+ # Change the response to a 503 error if the server is shutting down.
+ if not self.ws_server.is_serving():
+ early_response = (
+ http.HTTPStatus.SERVICE_UNAVAILABLE,
+ [],
+ b"Server is shutting down.\n",
+ )
+
+ if early_response is not None:
+ raise AbortHandshake(*early_response)
+
+ key = check_request(request_headers)
+
+ self.origin = self.process_origin(request_headers, origins)
+
+ extensions_header, self.extensions = self.process_extensions(
+ request_headers, available_extensions
+ )
+
+ protocol_header = self.subprotocol = self.process_subprotocol(
+ request_headers, available_subprotocols
+ )
+
+ response_headers = Headers()
+
+ build_response(response_headers, key)
+
+ if extensions_header is not None:
+ response_headers["Sec-WebSocket-Extensions"] = extensions_header
+
+ if protocol_header is not None:
+ response_headers["Sec-WebSocket-Protocol"] = protocol_header
+
+ if callable(extra_headers):
+ extra_headers = extra_headers(path, self.request_headers)
+ if extra_headers is not None:
+ if isinstance(extra_headers, Headers):
+ extra_headers = extra_headers.raw_items()
+ elif isinstance(extra_headers, collections.abc.Mapping):
+ extra_headers = extra_headers.items()
+ for name, value in extra_headers:
+ response_headers[name] = value
+
+ response_headers.setdefault("Date", email.utils.formatdate(usegmt=True))
+ response_headers.setdefault("Server", USER_AGENT)
+
+ self.write_http_response(http.HTTPStatus.SWITCHING_PROTOCOLS, response_headers)
+
+ self.connection_open()
+
+ return path
+
+
+class WebSocketServer:
+ """
+ WebSocket server returned by :func:`~websockets.server.serve`.
+
+ This class provides the same interface as
+ :class:`~asyncio.AbstractServer`, namely the
+ :meth:`~asyncio.AbstractServer.close` and
+ :meth:`~asyncio.AbstractServer.wait_closed` methods.
+
+ It keeps track of WebSocket connections in order to close them properly
+ when shutting down.
+
+ Instances of this class store a reference to the :class:`~asyncio.Server`
+ object returned by :meth:`~asyncio.loop.create_server` rather than inherit
+ from :class:`~asyncio.Server` in part because
+ :meth:`~asyncio.loop.create_server` doesn't support passing a custom
+ :class:`~asyncio.Server` class.
+
+ """
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ # Store a reference to loop to avoid relying on self.server._loop.
+ self.loop = loop
+
+ # Keep track of active connections.
+ self.websockets: Set[WebSocketServerProtocol] = set()
+
+ # Task responsible for closing the server and terminating connections.
+ self.close_task: Optional[asyncio.Task[None]] = None
+
+ # Completed when the server is closed and connections are terminated.
+ self.closed_waiter: asyncio.Future[None] = loop.create_future()
+
+ def wrap(self, server: asyncio.AbstractServer) -> None:
+ """
+ Attach to a given :class:`~asyncio.Server`.
+
+ Since :meth:`~asyncio.loop.create_server` doesn't support injecting a
+ custom ``Server`` class, the easiest solution that doesn't rely on
+ private :mod:`asyncio` APIs is to:
+
+ - instantiate a :class:`WebSocketServer`
+ - give the protocol factory a reference to that instance
+ - call :meth:`~asyncio.loop.create_server` with the factory
+ - attach the resulting :class:`~asyncio.Server` with this method
+
+ """
+ self.server = server
+
+ def register(self, protocol: WebSocketServerProtocol) -> None:
+ """
+ Register a connection with this server.
+
+ """
+ self.websockets.add(protocol)
+
+ def unregister(self, protocol: WebSocketServerProtocol) -> None:
+ """
+ Unregister a connection with this server.
+
+ """
+ self.websockets.remove(protocol)
+
+ def is_serving(self) -> bool:
+ """
+ Tell whether the server is accepting new connections or shutting down.
+
+ """
+ try:
+ # Python ≥ 3.7
+ return self.server.is_serving()
+ except AttributeError: # pragma: no cover
+ # Python < 3.7
+ return self.server.sockets is not None
+
+ def close(self) -> None:
+ """
+ Close the server.
+
+ This method:
+
+ * closes the underlying :class:`~asyncio.Server`;
+ * rejects new WebSocket connections with an HTTP 503 (service
+ unavailable) error; this happens when the server accepted the TCP
+ connection but didn't complete the WebSocket opening handshake prior
+ to closing;
+ * closes open WebSocket connections with close code 1001 (going away).
+
+ :meth:`close` is idempotent.
+
+ """
+ if self.close_task is None:
+ self.close_task = self.loop.create_task(self._close())
+
+ async def _close(self) -> None:
+ """
+ Implementation of :meth:`close`.
+
+ This calls :meth:`~asyncio.Server.close` on the underlying
+ :class:`~asyncio.Server` object to stop accepting new connections and
+ then closes open connections with close code 1001.
+
+ """
+ # Stop accepting new connections.
+ self.server.close()
+
+ # Wait until self.server.close() completes.
+ await self.server.wait_closed()
+
+ # Wait until all accepted connections reach connection_made() and call
+ # register(). See https://bugs.python.org/issue34852 for details.
+ await asyncio.sleep(
+ 0,
+ **({"loop": self.loop} if sys.version_info[:2] < (3, 8) else {}),
+ )
+
+ # Close OPEN connections with status code 1001. Since the server was
+ # closed, handshake() closes OPENING conections with a HTTP 503 error.
+ # Wait until all connections are closed.
+
+ # asyncio.wait doesn't accept an empty first argument
+ if self.websockets:
+ await asyncio.wait(
+ [websocket.close(1001) for websocket in self.websockets],
+ **({"loop": self.loop} if sys.version_info[:2] < (3, 8) else {}),
+ )
+
+ # Wait until all connection handlers are complete.
+
+ # asyncio.wait doesn't accept an empty first argument.
+ if self.websockets:
+ await asyncio.wait(
+ [websocket.handler_task for websocket in self.websockets],
+ **({"loop": self.loop} if sys.version_info[:2] < (3, 8) else {}),
+ )
+
+ # Tell wait_closed() to return.
+ self.closed_waiter.set_result(None)
+
+ async def wait_closed(self) -> None:
+ """
+ Wait until the server is closed.
+
+ When :meth:`wait_closed` returns, all TCP connections are closed and
+ all connection handlers have returned.
+
+ """
+ await asyncio.shield(self.closed_waiter)
+
+ @property
+ def sockets(self) -> Optional[List[socket.socket]]:
+ """
+ List of :class:`~socket.socket` objects the server is listening to.
+
+ ``None`` if the server is closed.
+
+ """
+ return self.server.sockets
+
+
+class Serve:
+ """
+
+ Create, start, and return a WebSocket server on ``host`` and ``port``.
+
+ Whenever a client connects, the server accepts the connection, creates a
+ :class:`WebSocketServerProtocol`, performs the opening handshake, and
+ delegates to the connection handler defined by ``ws_handler``. Once the
+ handler completes, either normally or with an exception, the server
+ performs the closing handshake and closes the connection.
+
+ Awaiting :func:`serve` yields a :class:`WebSocketServer`. This instance
+ provides :meth:`~websockets.server.WebSocketServer.close` and
+ :meth:`~websockets.server.WebSocketServer.wait_closed` methods for
+ terminating the server and cleaning up its resources.
+
+ When a server is closed with :meth:`~WebSocketServer.close`, it closes all
+ connections with close code 1001 (going away). Connections handlers, which
+ are running the ``ws_handler`` coroutine, will receive a
+ :exc:`~websockets.exceptions.ConnectionClosedOK` exception on their
+ current or next interaction with the WebSocket connection.
+
+ :func:`serve` can also be used as an asynchronous context manager. In
+ this case, the server is shut down when exiting the context.
+
+ :func:`serve` is a wrapper around the event loop's
+ :meth:`~asyncio.loop.create_server` method. It creates and starts a
+ :class:`~asyncio.Server` with :meth:`~asyncio.loop.create_server`. Then it
+ wraps the :class:`~asyncio.Server` in a :class:`WebSocketServer` and
+ returns the :class:`WebSocketServer`.
+
+ The ``ws_handler`` argument is the WebSocket handler. It must be a
+ coroutine accepting two arguments: a :class:`WebSocketServerProtocol` and
+ the request URI.
+
+ The ``host`` and ``port`` arguments, as well as unrecognized keyword
+ arguments, are passed along to :meth:`~asyncio.loop.create_server`.
+
+ For example, you can set the ``ssl`` keyword argument to a
+ :class:`~ssl.SSLContext` to enable TLS.
+
+ The ``create_protocol`` parameter allows customizing the
+ :class:`~asyncio.Protocol` that manages the connection. It should be a
+ callable or class accepting the same arguments as
+ :class:`WebSocketServerProtocol` and returning an instance of
+ :class:`WebSocketServerProtocol` or a subclass. It defaults to
+ :class:`WebSocketServerProtocol`.
+
+ The behavior of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
+ ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` is
+ described in :class:`~websockets.protocol.WebSocketCommonProtocol`.
+
+ :func:`serve` also accepts the following optional arguments:
+
+ * ``compression`` is a shortcut to configure compression extensions;
+ by default it enables the "permessage-deflate" extension; set it to
+ ``None`` to disable compression
+ * ``origins`` defines acceptable Origin HTTP headers; include ``None`` if
+ the lack of an origin is acceptable
+ * ``extensions`` is a list of supported extensions in order of
+ decreasing preference
+ * ``subprotocols`` is a list of supported subprotocols in order of
+ decreasing preference
+ * ``extra_headers`` sets additional HTTP response headers when the
+ handshake succeeds; it can be a :class:`~websockets.http.Headers`
+ instance, a :class:`~collections.abc.Mapping`, an iterable of ``(name,
+ value)`` pairs, or a callable taking the request path and headers in
+ arguments and returning one of the above
+ * ``process_request`` allows intercepting the HTTP request; it must be a
+ coroutine taking the request path and headers in argument; see
+ :meth:`~WebSocketServerProtocol.process_request` for details
+ * ``select_subprotocol`` allows customizing the logic for selecting a
+ subprotocol; it must be a callable taking the subprotocols offered by
+ the client and available on the server in argument; see
+ :meth:`~WebSocketServerProtocol.select_subprotocol` for details
+
+ Since there's no useful way to propagate exceptions triggered in handlers,
+ they're sent to the ``'websockets.server'`` logger instead. Debugging is
+ much easier if you configure logging to print them::
+
+ import logging
+ logger = logging.getLogger('websockets.server')
+ logger.setLevel(logging.ERROR)
+ logger.addHandler(logging.StreamHandler())
+
+ """
+
+ def __init__(
+ self,
+ ws_handler: Callable[[WebSocketServerProtocol, str], Awaitable[Any]],
+ host: Optional[Union[str, Sequence[str]]] = None,
+ port: Optional[int] = None,
+ *,
+ path: Optional[str] = None,
+ create_protocol: Optional[Type[WebSocketServerProtocol]] = None,
+ ping_interval: float = 20,
+ ping_timeout: float = 20,
+ close_timeout: Optional[float] = None,
+ max_size: int = 2 ** 20,
+ max_queue: int = 2 ** 5,
+ read_limit: int = 2 ** 16,
+ write_limit: int = 2 ** 16,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ legacy_recv: bool = False,
+ klass: Optional[Type[WebSocketServerProtocol]] = None,
+ timeout: Optional[float] = None,
+ compression: Optional[str] = "deflate",
+ origins: Optional[Sequence[Optional[Origin]]] = None,
+ extensions: Optional[Sequence[ServerExtensionFactory]] = None,
+ subprotocols: Optional[Sequence[Subprotocol]] = None,
+ extra_headers: Optional[HeadersLikeOrCallable] = None,
+ process_request: Optional[
+ Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]]
+ ] = None,
+ select_subprotocol: Optional[
+ Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol]
+ ] = None,
+ **kwargs: Any,
+ ) -> None:
+ # Backwards compatibility: close_timeout used to be called timeout.
+ if timeout is None:
+ timeout = 10
+ else:
+ warnings.warn("rename timeout to close_timeout", DeprecationWarning)
+ # If both are specified, timeout is ignored.
+ if close_timeout is None:
+ close_timeout = timeout
+
+ # Backwards compatibility: create_protocol used to be called klass.
+ if klass is None:
+ klass = WebSocketServerProtocol
+ else:
+ warnings.warn("rename klass to create_protocol", DeprecationWarning)
+ # If both are specified, klass is ignored.
+ if create_protocol is None:
+ create_protocol = klass
+
+ if loop is None:
+ loop = asyncio.get_event_loop()
+
+ ws_server = WebSocketServer(loop)
+
+ secure = kwargs.get("ssl") is not None
+
+ if compression == "deflate":
+ if extensions is None:
+ extensions = []
+ if not any(
+ ext_factory.name == ServerPerMessageDeflateFactory.name
+ for ext_factory in extensions
+ ):
+ extensions = list(extensions) + [ServerPerMessageDeflateFactory()]
+ elif compression is not None:
+ raise ValueError(f"unsupported compression: {compression}")
+
+ factory = functools.partial(
+ create_protocol,
+ ws_handler,
+ ws_server,
+ host=host,
+ port=port,
+ secure=secure,
+ ping_interval=ping_interval,
+ ping_timeout=ping_timeout,
+ close_timeout=close_timeout,
+ max_size=max_size,
+ max_queue=max_queue,
+ read_limit=read_limit,
+ write_limit=write_limit,
+ loop=loop,
+ legacy_recv=legacy_recv,
+ origins=origins,
+ extensions=extensions,
+ subprotocols=subprotocols,
+ extra_headers=extra_headers,
+ process_request=process_request,
+ select_subprotocol=select_subprotocol,
+ )
+
+ if path is None:
+ create_server = functools.partial(
+ loop.create_server, factory, host, port, **kwargs
+ )
+ else:
+ # unix_serve(path) must not specify host and port parameters.
+ assert host is None and port is None
+ create_server = functools.partial(
+ loop.create_unix_server, factory, path, **kwargs
+ )
+
+ # This is a coroutine function.
+ self._create_server = create_server
+ self.ws_server = ws_server
+
+ # async with serve(...)
+
+ async def __aenter__(self) -> WebSocketServer:
+ return await self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
+ self.ws_server.close()
+ await self.ws_server.wait_closed()
+
+ # await serve(...)
+
+ def __await__(self) -> Generator[Any, None, WebSocketServer]:
+ # Create a suitable iterator by calling __await__ on a coroutine.
+ return self.__await_impl__().__await__()
+
+ async def __await_impl__(self) -> WebSocketServer:
+ server = await self._create_server()
+ self.ws_server.wrap(server)
+ return self.ws_server
+
+ # yield from serve(...)
+
+ __iter__ = __await__
+
+
+serve = Serve
+
+
+def unix_serve(
+ ws_handler: Callable[[WebSocketServerProtocol, str], Awaitable[Any]],
+ path: str,
+ **kwargs: Any,
+) -> Serve:
+ """
+ Similar to :func:`serve`, but for listening on Unix sockets.
+
+ This function calls the event loop's
+ :meth:`~asyncio.loop.create_unix_server` method.
+
+ It is only available on Unix.
+
+ It's useful for deploying a server behind a reverse proxy such as nginx.
+
+ :param path: file system path to the Unix socket
+
+ """
+ return serve(ws_handler, path=path, **kwargs)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/speedups.c b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/speedups.c
new file mode 100644
index 0000000000..d1c2b37e60
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/speedups.c
@@ -0,0 +1,206 @@
+/* C implementation of performance sensitive functions. */
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+#include <stdint.h> /* uint32_t, uint64_t */
+
+#if __SSE2__
+#include <emmintrin.h>
+#endif
+
+static const Py_ssize_t MASK_LEN = 4;
+
+/* Similar to PyBytes_AsStringAndSize, but accepts more types */
+
+static int
+_PyBytesLike_AsStringAndSize(PyObject *obj, char **buffer, Py_ssize_t *length)
+{
+ // This supports bytes, bytearrays, and C-contiguous memoryview objects,
+ // which are the most useful data structures for handling byte streams.
+ // websockets.framing.prepare_data() returns only values of these types.
+ // Any object implementing the buffer protocol could be supported, however
+ // that would require allocation or copying memory, which is expensive.
+ if (PyBytes_Check(obj))
+ {
+ *buffer = PyBytes_AS_STRING(obj);
+ *length = PyBytes_GET_SIZE(obj);
+ }
+ else if (PyByteArray_Check(obj))
+ {
+ *buffer = PyByteArray_AS_STRING(obj);
+ *length = PyByteArray_GET_SIZE(obj);
+ }
+ else if (PyMemoryView_Check(obj))
+ {
+ Py_buffer *mv_buf;
+ mv_buf = PyMemoryView_GET_BUFFER(obj);
+ if (PyBuffer_IsContiguous(mv_buf, 'C'))
+ {
+ *buffer = mv_buf->buf;
+ *length = mv_buf->len;
+ }
+ else
+ {
+ PyErr_Format(
+ PyExc_TypeError,
+ "expected a contiguous memoryview");
+ return -1;
+ }
+ }
+ else
+ {
+ PyErr_Format(
+ PyExc_TypeError,
+ "expected a bytes-like object, %.200s found",
+ Py_TYPE(obj)->tp_name);
+ return -1;
+ }
+
+ return 0;
+}
+
+/* C implementation of websockets.utils.apply_mask */
+
+static PyObject *
+apply_mask(PyObject *self, PyObject *args, PyObject *kwds)
+{
+
+ // In order to support various bytes-like types, accept any Python object.
+
+ static char *kwlist[] = {"data", "mask", NULL};
+ PyObject *input_obj;
+ PyObject *mask_obj;
+
+ // A pointer to a char * + length will be extracted from the data and mask
+ // arguments, possibly via a Py_buffer.
+
+ char *input;
+ Py_ssize_t input_len;
+ char *mask;
+ Py_ssize_t mask_len;
+
+ // Initialize a PyBytesObject then get a pointer to the underlying char *
+ // in order to avoid an extra memory copy in PyBytes_FromStringAndSize.
+
+ PyObject *result;
+ char *output;
+
+ // Other variables.
+
+ Py_ssize_t i = 0;
+
+ // Parse inputs.
+
+ if (!PyArg_ParseTupleAndKeywords(
+ args, kwds, "OO", kwlist, &input_obj, &mask_obj))
+ {
+ return NULL;
+ }
+
+ if (_PyBytesLike_AsStringAndSize(input_obj, &input, &input_len) == -1)
+ {
+ return NULL;
+ }
+
+ if (_PyBytesLike_AsStringAndSize(mask_obj, &mask, &mask_len) == -1)
+ {
+ return NULL;
+ }
+
+ if (mask_len != MASK_LEN)
+ {
+ PyErr_SetString(PyExc_ValueError, "mask must contain 4 bytes");
+ return NULL;
+ }
+
+ // Create output.
+
+ result = PyBytes_FromStringAndSize(NULL, input_len);
+ if (result == NULL)
+ {
+ return NULL;
+ }
+
+ // Since we juste created result, we don't need error checks.
+ output = PyBytes_AS_STRING(result);
+
+ // Perform the masking operation.
+
+ // Apparently GCC cannot figure out the following optimizations by itself.
+
+ // We need a new scope for MSVC 2010 (non C99 friendly)
+ {
+#if __SSE2__
+
+ // With SSE2 support, XOR by blocks of 16 bytes = 128 bits.
+
+ // Since we cannot control the 16-bytes alignment of input and output
+ // buffers, we rely on loadu/storeu rather than load/store.
+
+ Py_ssize_t input_len_128 = input_len & ~15;
+ __m128i mask_128 = _mm_set1_epi32(*(uint32_t *)mask);
+
+ for (; i < input_len_128; i += 16)
+ {
+ __m128i in_128 = _mm_loadu_si128((__m128i *)(input + i));
+ __m128i out_128 = _mm_xor_si128(in_128, mask_128);
+ _mm_storeu_si128((__m128i *)(output + i), out_128);
+ }
+
+#else
+
+ // Without SSE2 support, XOR by blocks of 8 bytes = 64 bits.
+
+ // We assume the memory allocator aligns everything on 8 bytes boundaries.
+
+ Py_ssize_t input_len_64 = input_len & ~7;
+ uint32_t mask_32 = *(uint32_t *)mask;
+ uint64_t mask_64 = ((uint64_t)mask_32 << 32) | (uint64_t)mask_32;
+
+ for (; i < input_len_64; i += 8)
+ {
+ *(uint64_t *)(output + i) = *(uint64_t *)(input + i) ^ mask_64;
+ }
+
+#endif
+ }
+
+ // XOR the remainder of the input byte by byte.
+
+ for (; i < input_len; i++)
+ {
+ output[i] = input[i] ^ mask[i & (MASK_LEN - 1)];
+ }
+
+ return result;
+
+}
+
+static PyMethodDef speedups_methods[] = {
+ {
+ "apply_mask",
+ (PyCFunction)apply_mask,
+ METH_VARARGS | METH_KEYWORDS,
+ "Apply masking to websocket message.",
+ },
+ {NULL, NULL, 0, NULL}, /* Sentinel */
+};
+
+static struct PyModuleDef speedups_module = {
+ PyModuleDef_HEAD_INIT,
+ "websocket.speedups", /* m_name */
+ "C implementation of performance sensitive functions.",
+ /* m_doc */
+ -1, /* m_size */
+ speedups_methods, /* m_methods */
+ NULL,
+ NULL,
+ NULL,
+ NULL
+};
+
+PyMODINIT_FUNC
+PyInit_speedups(void)
+{
+ return PyModule_Create(&speedups_module);
+}
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/speedups.pyi b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/speedups.pyi
new file mode 100644
index 0000000000..821438a064
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/speedups.pyi
@@ -0,0 +1 @@
+def apply_mask(data: bytes, mask: bytes) -> bytes: ...
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/typing.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/typing.py
new file mode 100644
index 0000000000..4a60f93f64
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/typing.py
@@ -0,0 +1,49 @@
+from typing import List, NewType, Optional, Tuple, Union
+
+
+__all__ = ["Data", "Origin", "ExtensionHeader", "ExtensionParameter", "Subprotocol"]
+
+Data = Union[str, bytes]
+
+Data__doc__ = """
+Types supported in a WebSocket message:
+
+- :class:`str` for text messages
+- :class:`bytes` for binary messages
+
+"""
+# Remove try / except when dropping support for Python < 3.7
+try:
+ Data.__doc__ = Data__doc__ # type: ignore
+except AttributeError: # pragma: no cover
+ pass
+
+
+Origin = NewType("Origin", str)
+Origin.__doc__ = """Value of a Origin header"""
+
+
+ExtensionName = NewType("ExtensionName", str)
+ExtensionName.__doc__ = """Name of a WebSocket extension"""
+
+
+ExtensionParameter = Tuple[str, Optional[str]]
+
+ExtensionParameter__doc__ = """Parameter of a WebSocket extension"""
+try:
+ ExtensionParameter.__doc__ = ExtensionParameter__doc__ # type: ignore
+except AttributeError: # pragma: no cover
+ pass
+
+
+ExtensionHeader = Tuple[ExtensionName, List[ExtensionParameter]]
+
+ExtensionHeader__doc__ = """Item parsed in a Sec-WebSocket-Extensions header"""
+try:
+ ExtensionHeader.__doc__ = ExtensionHeader__doc__ # type: ignore
+except AttributeError: # pragma: no cover
+ pass
+
+
+Subprotocol = NewType("Subprotocol", str)
+Subprotocol.__doc__ = """Items parsed in a Sec-WebSocket-Protocol header"""
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/uri.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/uri.py
new file mode 100644
index 0000000000..6669e56686
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/uri.py
@@ -0,0 +1,81 @@
+"""
+:mod:`websockets.uri` parses WebSocket URIs.
+
+See `section 3 of RFC 6455`_.
+
+.. _section 3 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-3
+
+"""
+
+import urllib.parse
+from typing import NamedTuple, Optional, Tuple
+
+from .exceptions import InvalidURI
+
+
+__all__ = ["parse_uri", "WebSocketURI"]
+
+
+# Consider converting to a dataclass when dropping support for Python < 3.7.
+
+
+class WebSocketURI(NamedTuple):
+ """
+ WebSocket URI.
+
+ :param bool secure: secure flag
+ :param str host: lower-case host
+ :param int port: port, always set even if it's the default
+ :param str resource_name: path and optional query
+ :param str user_info: ``(username, password)`` tuple when the URI contains
+ `User Information`_, else ``None``.
+
+ .. _User Information: https://tools.ietf.org/html/rfc3986#section-3.2.1
+ """
+
+ secure: bool
+ host: str
+ port: int
+ resource_name: str
+ user_info: Optional[Tuple[str, str]]
+
+
+# Work around https://bugs.python.org/issue19931
+
+WebSocketURI.secure.__doc__ = ""
+WebSocketURI.host.__doc__ = ""
+WebSocketURI.port.__doc__ = ""
+WebSocketURI.resource_name.__doc__ = ""
+WebSocketURI.user_info.__doc__ = ""
+
+
+def parse_uri(uri: str) -> WebSocketURI:
+ """
+ Parse and validate a WebSocket URI.
+
+ :raises ValueError: if ``uri`` isn't a valid WebSocket URI.
+
+ """
+ parsed = urllib.parse.urlparse(uri)
+ try:
+ assert parsed.scheme in ["ws", "wss"]
+ assert parsed.params == ""
+ assert parsed.fragment == ""
+ assert parsed.hostname is not None
+ except AssertionError as exc:
+ raise InvalidURI(uri) from exc
+
+ secure = parsed.scheme == "wss"
+ host = parsed.hostname
+ port = parsed.port or (443 if secure else 80)
+ resource_name = parsed.path or "/"
+ if parsed.query:
+ resource_name += "?" + parsed.query
+ user_info = None
+ if parsed.username is not None:
+ # urllib.parse.urlparse accepts URLs with a username but without a
+ # password. This doesn't make sense for HTTP Basic Auth credentials.
+ if parsed.password is None:
+ raise InvalidURI(uri)
+ user_info = (parsed.username, parsed.password)
+ return WebSocketURI(secure, host, port, resource_name, user_info)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/utils.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/utils.py
new file mode 100644
index 0000000000..40ac8559ff
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/utils.py
@@ -0,0 +1,18 @@
+import itertools
+
+
+__all__ = ["apply_mask"]
+
+
+def apply_mask(data: bytes, mask: bytes) -> bytes:
+ """
+ Apply masking to the data of a WebSocket message.
+
+ :param data: Data to mask
+ :param mask: 4-bytes mask
+
+ """
+ if len(mask) != 4:
+ raise ValueError("mask must contain 4 bytes")
+
+ return bytes(b ^ m for b, m in zip(data, itertools.cycle(mask)))
diff --git a/testing/web-platform/tests/tools/third_party/websockets/src/websockets/version.py b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/version.py
new file mode 100644
index 0000000000..7377332e12
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/src/websockets/version.py
@@ -0,0 +1 @@
+version = "8.1"
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/__init__.py b/testing/web-platform/tests/tools/third_party/websockets/tests/__init__.py
new file mode 100644
index 0000000000..dd78609f5b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/__init__.py
@@ -0,0 +1,5 @@
+import logging
+
+
+# Avoid displaying stack traces at the ERROR logging level.
+logging.basicConfig(level=logging.CRITICAL)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/extensions/__init__.py b/testing/web-platform/tests/tools/third_party/websockets/tests/extensions/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/extensions/__init__.py
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/extensions/test_base.py b/testing/web-platform/tests/tools/third_party/websockets/tests/extensions/test_base.py
new file mode 100644
index 0000000000..ba8657b654
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/extensions/test_base.py
@@ -0,0 +1,4 @@
+from websockets.extensions.base import * # noqa
+
+
+# Abstract classes don't provide any behavior to test.
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/extensions/test_permessage_deflate.py b/testing/web-platform/tests/tools/third_party/websockets/tests/extensions/test_permessage_deflate.py
new file mode 100644
index 0000000000..0ec49c6c02
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/extensions/test_permessage_deflate.py
@@ -0,0 +1,792 @@
+import unittest
+import zlib
+
+from websockets.exceptions import (
+ DuplicateParameter,
+ InvalidParameterName,
+ InvalidParameterValue,
+ NegotiationError,
+ PayloadTooBig,
+)
+from websockets.extensions.permessage_deflate import *
+from websockets.framing import (
+ OP_BINARY,
+ OP_CLOSE,
+ OP_CONT,
+ OP_PING,
+ OP_PONG,
+ OP_TEXT,
+ Frame,
+ serialize_close,
+)
+
+
+class ExtensionTestsMixin:
+ def assertExtensionEqual(self, extension1, extension2):
+ self.assertEqual(
+ extension1.remote_no_context_takeover, extension2.remote_no_context_takeover
+ )
+ self.assertEqual(
+ extension1.local_no_context_takeover, extension2.local_no_context_takeover
+ )
+ self.assertEqual(
+ extension1.remote_max_window_bits, extension2.remote_max_window_bits
+ )
+ self.assertEqual(
+ extension1.local_max_window_bits, extension2.local_max_window_bits
+ )
+
+
+class PerMessageDeflateTests(unittest.TestCase, ExtensionTestsMixin):
+ def setUp(self):
+ # Set up an instance of the permessage-deflate extension with the most
+ # common settings. Since the extension is symmetrical, this instance
+ # may be used for testing both encoding and decoding.
+ self.extension = PerMessageDeflate(False, False, 15, 15)
+
+ def test_name(self):
+ assert self.extension.name == "permessage-deflate"
+
+ def test_repr(self):
+ self.assertExtensionEqual(eval(repr(self.extension)), self.extension)
+
+ # Control frames aren't encoded or decoded.
+
+ def test_no_encode_decode_ping_frame(self):
+ frame = Frame(True, OP_PING, b"")
+
+ self.assertEqual(self.extension.encode(frame), frame)
+
+ self.assertEqual(self.extension.decode(frame), frame)
+
+ def test_no_encode_decode_pong_frame(self):
+ frame = Frame(True, OP_PONG, b"")
+
+ self.assertEqual(self.extension.encode(frame), frame)
+
+ self.assertEqual(self.extension.decode(frame), frame)
+
+ def test_no_encode_decode_close_frame(self):
+ frame = Frame(True, OP_CLOSE, serialize_close(1000, ""))
+
+ self.assertEqual(self.extension.encode(frame), frame)
+
+ self.assertEqual(self.extension.decode(frame), frame)
+
+ # Data frames are encoded and decoded.
+
+ def test_encode_decode_text_frame(self):
+ frame = Frame(True, OP_TEXT, "café".encode("utf-8"))
+
+ enc_frame = self.extension.encode(frame)
+
+ self.assertEqual(enc_frame, frame._replace(rsv1=True, data=b"JNL;\xbc\x12\x00"))
+
+ dec_frame = self.extension.decode(enc_frame)
+
+ self.assertEqual(dec_frame, frame)
+
+ def test_encode_decode_binary_frame(self):
+ frame = Frame(True, OP_BINARY, b"tea")
+
+ enc_frame = self.extension.encode(frame)
+
+ self.assertEqual(enc_frame, frame._replace(rsv1=True, data=b"*IM\x04\x00"))
+
+ dec_frame = self.extension.decode(enc_frame)
+
+ self.assertEqual(dec_frame, frame)
+
+ def test_encode_decode_fragmented_text_frame(self):
+ frame1 = Frame(False, OP_TEXT, "café".encode("utf-8"))
+ frame2 = Frame(False, OP_CONT, " & ".encode("utf-8"))
+ frame3 = Frame(True, OP_CONT, "croissants".encode("utf-8"))
+
+ enc_frame1 = self.extension.encode(frame1)
+ enc_frame2 = self.extension.encode(frame2)
+ enc_frame3 = self.extension.encode(frame3)
+
+ self.assertEqual(
+ enc_frame1,
+ frame1._replace(rsv1=True, data=b"JNL;\xbc\x12\x00\x00\x00\xff\xff"),
+ )
+ self.assertEqual(
+ enc_frame2, frame2._replace(rsv1=True, data=b"RPS\x00\x00\x00\x00\xff\xff")
+ )
+ self.assertEqual(
+ enc_frame3, frame3._replace(rsv1=True, data=b"J.\xca\xcf,.N\xcc+)\x06\x00")
+ )
+
+ dec_frame1 = self.extension.decode(enc_frame1)
+ dec_frame2 = self.extension.decode(enc_frame2)
+ dec_frame3 = self.extension.decode(enc_frame3)
+
+ self.assertEqual(dec_frame1, frame1)
+ self.assertEqual(dec_frame2, frame2)
+ self.assertEqual(dec_frame3, frame3)
+
+ def test_encode_decode_fragmented_binary_frame(self):
+ frame1 = Frame(False, OP_TEXT, b"tea ")
+ frame2 = Frame(True, OP_CONT, b"time")
+
+ enc_frame1 = self.extension.encode(frame1)
+ enc_frame2 = self.extension.encode(frame2)
+
+ self.assertEqual(
+ enc_frame1, frame1._replace(rsv1=True, data=b"*IMT\x00\x00\x00\x00\xff\xff")
+ )
+ self.assertEqual(
+ enc_frame2, frame2._replace(rsv1=True, data=b"*\xc9\xccM\x05\x00")
+ )
+
+ dec_frame1 = self.extension.decode(enc_frame1)
+ dec_frame2 = self.extension.decode(enc_frame2)
+
+ self.assertEqual(dec_frame1, frame1)
+ self.assertEqual(dec_frame2, frame2)
+
+ def test_no_decode_text_frame(self):
+ frame = Frame(True, OP_TEXT, "café".encode("utf-8"))
+
+ # Try decoding a frame that wasn't encoded.
+ self.assertEqual(self.extension.decode(frame), frame)
+
+ def test_no_decode_binary_frame(self):
+ frame = Frame(True, OP_TEXT, b"tea")
+
+ # Try decoding a frame that wasn't encoded.
+ self.assertEqual(self.extension.decode(frame), frame)
+
+ def test_no_decode_fragmented_text_frame(self):
+ frame1 = Frame(False, OP_TEXT, "café".encode("utf-8"))
+ frame2 = Frame(False, OP_CONT, " & ".encode("utf-8"))
+ frame3 = Frame(True, OP_CONT, "croissants".encode("utf-8"))
+
+ dec_frame1 = self.extension.decode(frame1)
+ dec_frame2 = self.extension.decode(frame2)
+ dec_frame3 = self.extension.decode(frame3)
+
+ self.assertEqual(dec_frame1, frame1)
+ self.assertEqual(dec_frame2, frame2)
+ self.assertEqual(dec_frame3, frame3)
+
+ def test_no_decode_fragmented_binary_frame(self):
+ frame1 = Frame(False, OP_TEXT, b"tea ")
+ frame2 = Frame(True, OP_CONT, b"time")
+
+ dec_frame1 = self.extension.decode(frame1)
+ dec_frame2 = self.extension.decode(frame2)
+
+ self.assertEqual(dec_frame1, frame1)
+ self.assertEqual(dec_frame2, frame2)
+
+ def test_context_takeover(self):
+ frame = Frame(True, OP_TEXT, "café".encode("utf-8"))
+
+ enc_frame1 = self.extension.encode(frame)
+ enc_frame2 = self.extension.encode(frame)
+
+ self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00")
+ self.assertEqual(enc_frame2.data, b"J\x06\x11\x00\x00")
+
+ def test_remote_no_context_takeover(self):
+ # No context takeover when decoding messages.
+ self.extension = PerMessageDeflate(True, False, 15, 15)
+
+ frame = Frame(True, OP_TEXT, "café".encode("utf-8"))
+
+ enc_frame1 = self.extension.encode(frame)
+ enc_frame2 = self.extension.encode(frame)
+
+ self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00")
+ self.assertEqual(enc_frame2.data, b"J\x06\x11\x00\x00")
+
+ dec_frame1 = self.extension.decode(enc_frame1)
+ self.assertEqual(dec_frame1, frame)
+
+ with self.assertRaises(zlib.error) as exc:
+ self.extension.decode(enc_frame2)
+ self.assertIn("invalid distance too far back", str(exc.exception))
+
+ def test_local_no_context_takeover(self):
+ # No context takeover when encoding and decoding messages.
+ self.extension = PerMessageDeflate(True, True, 15, 15)
+
+ frame = Frame(True, OP_TEXT, "café".encode("utf-8"))
+
+ enc_frame1 = self.extension.encode(frame)
+ enc_frame2 = self.extension.encode(frame)
+
+ self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00")
+ self.assertEqual(enc_frame2.data, b"JNL;\xbc\x12\x00")
+
+ dec_frame1 = self.extension.decode(enc_frame1)
+ dec_frame2 = self.extension.decode(enc_frame2)
+
+ self.assertEqual(dec_frame1, frame)
+ self.assertEqual(dec_frame2, frame)
+
+ # Compression settings can be customized.
+
+ def test_compress_settings(self):
+ # Configure an extension so that no compression actually occurs.
+ extension = PerMessageDeflate(False, False, 15, 15, {"level": 0})
+
+ frame = Frame(True, OP_TEXT, "café".encode("utf-8"))
+
+ enc_frame = extension.encode(frame)
+
+ self.assertEqual(
+ enc_frame,
+ frame._replace(
+ rsv1=True, data=b"\x00\x05\x00\xfa\xffcaf\xc3\xa9\x00" # not compressed
+ ),
+ )
+
+ # Frames aren't decoded beyond max_length.
+
+ def test_decompress_max_size(self):
+ frame = Frame(True, OP_TEXT, ("a" * 20).encode("utf-8"))
+
+ enc_frame = self.extension.encode(frame)
+
+ self.assertEqual(enc_frame.data, b"JL\xc4\x04\x00\x00")
+
+ with self.assertRaises(PayloadTooBig):
+ self.extension.decode(enc_frame, max_size=10)
+
+
+class ClientPerMessageDeflateFactoryTests(unittest.TestCase, ExtensionTestsMixin):
+ def test_name(self):
+ assert ClientPerMessageDeflateFactory.name == "permessage-deflate"
+
+ def test_init(self):
+ for config in [
+ (False, False, 8, None), # server_max_window_bits ≥ 8
+ (False, True, 15, None), # server_max_window_bits ≤ 15
+ (True, False, None, 8), # client_max_window_bits ≥ 8
+ (True, True, None, 15), # client_max_window_bits ≤ 15
+ (False, False, None, True), # client_max_window_bits
+ (False, False, None, None, {"memLevel": 4}),
+ ]:
+ with self.subTest(config=config):
+ # This does not raise an exception.
+ ClientPerMessageDeflateFactory(*config)
+
+ def test_init_error(self):
+ for config in [
+ (False, False, 7, 8), # server_max_window_bits < 8
+ (False, True, 8, 7), # client_max_window_bits < 8
+ (True, False, 16, 15), # server_max_window_bits > 15
+ (True, True, 15, 16), # client_max_window_bits > 15
+ (False, False, True, None), # server_max_window_bits
+ (False, False, None, None, {"wbits": 11}),
+ ]:
+ with self.subTest(config=config):
+ with self.assertRaises(ValueError):
+ ClientPerMessageDeflateFactory(*config)
+
+ def test_get_request_params(self):
+ for config, result in [
+ # Test without any parameter
+ ((False, False, None, None), []),
+ # Test server_no_context_takeover
+ ((True, False, None, None), [("server_no_context_takeover", None)]),
+ # Test client_no_context_takeover
+ ((False, True, None, None), [("client_no_context_takeover", None)]),
+ # Test server_max_window_bits
+ ((False, False, 10, None), [("server_max_window_bits", "10")]),
+ # Test client_max_window_bits
+ ((False, False, None, 10), [("client_max_window_bits", "10")]),
+ ((False, False, None, True), [("client_max_window_bits", None)]),
+ # Test all parameters together
+ (
+ (True, True, 12, 12),
+ [
+ ("server_no_context_takeover", None),
+ ("client_no_context_takeover", None),
+ ("server_max_window_bits", "12"),
+ ("client_max_window_bits", "12"),
+ ],
+ ),
+ ]:
+ with self.subTest(config=config):
+ factory = ClientPerMessageDeflateFactory(*config)
+ self.assertEqual(factory.get_request_params(), result)
+
+ def test_process_response_params(self):
+ for config, response_params, result in [
+ # Test without any parameter
+ ((False, False, None, None), [], (False, False, 15, 15)),
+ ((False, False, None, None), [("unknown", None)], InvalidParameterName),
+ # Test server_no_context_takeover
+ (
+ (False, False, None, None),
+ [("server_no_context_takeover", None)],
+ (True, False, 15, 15),
+ ),
+ ((True, False, None, None), [], NegotiationError),
+ (
+ (True, False, None, None),
+ [("server_no_context_takeover", None)],
+ (True, False, 15, 15),
+ ),
+ (
+ (True, False, None, None),
+ [("server_no_context_takeover", None)] * 2,
+ DuplicateParameter,
+ ),
+ (
+ (True, False, None, None),
+ [("server_no_context_takeover", "42")],
+ InvalidParameterValue,
+ ),
+ # Test client_no_context_takeover
+ (
+ (False, False, None, None),
+ [("client_no_context_takeover", None)],
+ (False, True, 15, 15),
+ ),
+ ((False, True, None, None), [], (False, True, 15, 15)),
+ (
+ (False, True, None, None),
+ [("client_no_context_takeover", None)],
+ (False, True, 15, 15),
+ ),
+ (
+ (False, True, None, None),
+ [("client_no_context_takeover", None)] * 2,
+ DuplicateParameter,
+ ),
+ (
+ (False, True, None, None),
+ [("client_no_context_takeover", "42")],
+ InvalidParameterValue,
+ ),
+ # Test server_max_window_bits
+ (
+ (False, False, None, None),
+ [("server_max_window_bits", "7")],
+ NegotiationError,
+ ),
+ (
+ (False, False, None, None),
+ [("server_max_window_bits", "10")],
+ (False, False, 10, 15),
+ ),
+ (
+ (False, False, None, None),
+ [("server_max_window_bits", "16")],
+ NegotiationError,
+ ),
+ ((False, False, 12, None), [], NegotiationError),
+ (
+ (False, False, 12, None),
+ [("server_max_window_bits", "10")],
+ (False, False, 10, 15),
+ ),
+ (
+ (False, False, 12, None),
+ [("server_max_window_bits", "12")],
+ (False, False, 12, 15),
+ ),
+ (
+ (False, False, 12, None),
+ [("server_max_window_bits", "13")],
+ NegotiationError,
+ ),
+ (
+ (False, False, 12, None),
+ [("server_max_window_bits", "12")] * 2,
+ DuplicateParameter,
+ ),
+ (
+ (False, False, 12, None),
+ [("server_max_window_bits", "42")],
+ InvalidParameterValue,
+ ),
+ # Test client_max_window_bits
+ (
+ (False, False, None, None),
+ [("client_max_window_bits", "10")],
+ NegotiationError,
+ ),
+ ((False, False, None, True), [], (False, False, 15, 15)),
+ (
+ (False, False, None, True),
+ [("client_max_window_bits", "7")],
+ NegotiationError,
+ ),
+ (
+ (False, False, None, True),
+ [("client_max_window_bits", "10")],
+ (False, False, 15, 10),
+ ),
+ (
+ (False, False, None, True),
+ [("client_max_window_bits", "16")],
+ NegotiationError,
+ ),
+ ((False, False, None, 12), [], (False, False, 15, 12)),
+ (
+ (False, False, None, 12),
+ [("client_max_window_bits", "10")],
+ (False, False, 15, 10),
+ ),
+ (
+ (False, False, None, 12),
+ [("client_max_window_bits", "12")],
+ (False, False, 15, 12),
+ ),
+ (
+ (False, False, None, 12),
+ [("client_max_window_bits", "13")],
+ NegotiationError,
+ ),
+ (
+ (False, False, None, 12),
+ [("client_max_window_bits", "12")] * 2,
+ DuplicateParameter,
+ ),
+ (
+ (False, False, None, 12),
+ [("client_max_window_bits", "42")],
+ InvalidParameterValue,
+ ),
+ # Test all parameters together
+ (
+ (True, True, 12, 12),
+ [
+ ("server_no_context_takeover", None),
+ ("client_no_context_takeover", None),
+ ("server_max_window_bits", "10"),
+ ("client_max_window_bits", "10"),
+ ],
+ (True, True, 10, 10),
+ ),
+ (
+ (False, False, None, True),
+ [
+ ("server_no_context_takeover", None),
+ ("client_no_context_takeover", None),
+ ("server_max_window_bits", "10"),
+ ("client_max_window_bits", "10"),
+ ],
+ (True, True, 10, 10),
+ ),
+ (
+ (True, True, 12, 12),
+ [
+ ("server_no_context_takeover", None),
+ ("server_max_window_bits", "12"),
+ ],
+ (True, True, 12, 12),
+ ),
+ ]:
+ with self.subTest(config=config, response_params=response_params):
+ factory = ClientPerMessageDeflateFactory(*config)
+ if isinstance(result, type) and issubclass(result, Exception):
+ with self.assertRaises(result):
+ factory.process_response_params(response_params, [])
+ else:
+ extension = factory.process_response_params(response_params, [])
+ expected = PerMessageDeflate(*result)
+ self.assertExtensionEqual(extension, expected)
+
+ def test_process_response_params_deduplication(self):
+ factory = ClientPerMessageDeflateFactory(False, False, None, None)
+ with self.assertRaises(NegotiationError):
+ factory.process_response_params(
+ [], [PerMessageDeflate(False, False, 15, 15)]
+ )
+
+
+class ServerPerMessageDeflateFactoryTests(unittest.TestCase, ExtensionTestsMixin):
+ def test_name(self):
+ assert ServerPerMessageDeflateFactory.name == "permessage-deflate"
+
+ def test_init(self):
+ for config in [
+ (False, False, 8, None), # server_max_window_bits ≥ 8
+ (False, True, 15, None), # server_max_window_bits ≤ 15
+ (True, False, None, 8), # client_max_window_bits ≥ 8
+ (True, True, None, 15), # client_max_window_bits ≤ 15
+ (False, False, None, None, {"memLevel": 4}),
+ ]:
+ with self.subTest(config=config):
+ # This does not raise an exception.
+ ServerPerMessageDeflateFactory(*config)
+
+ def test_init_error(self):
+ for config in [
+ (False, False, 7, 8), # server_max_window_bits < 8
+ (False, True, 8, 7), # client_max_window_bits < 8
+ (True, False, 16, 15), # server_max_window_bits > 15
+ (True, True, 15, 16), # client_max_window_bits > 15
+ (False, False, None, True), # client_max_window_bits
+ (False, False, True, None), # server_max_window_bits
+ (False, False, None, None, {"wbits": 11}),
+ ]:
+ with self.subTest(config=config):
+ with self.assertRaises(ValueError):
+ ServerPerMessageDeflateFactory(*config)
+
+ def test_process_request_params(self):
+ # Parameters in result appear swapped vs. config because the order is
+ # (remote, local) vs. (server, client).
+ for config, request_params, response_params, result in [
+ # Test without any parameter
+ ((False, False, None, None), [], [], (False, False, 15, 15)),
+ (
+ (False, False, None, None),
+ [("unknown", None)],
+ None,
+ InvalidParameterName,
+ ),
+ # Test server_no_context_takeover
+ (
+ (False, False, None, None),
+ [("server_no_context_takeover", None)],
+ [("server_no_context_takeover", None)],
+ (False, True, 15, 15),
+ ),
+ (
+ (True, False, None, None),
+ [],
+ [("server_no_context_takeover", None)],
+ (False, True, 15, 15),
+ ),
+ (
+ (True, False, None, None),
+ [("server_no_context_takeover", None)],
+ [("server_no_context_takeover", None)],
+ (False, True, 15, 15),
+ ),
+ (
+ (True, False, None, None),
+ [("server_no_context_takeover", None)] * 2,
+ None,
+ DuplicateParameter,
+ ),
+ (
+ (True, False, None, None),
+ [("server_no_context_takeover", "42")],
+ None,
+ InvalidParameterValue,
+ ),
+ # Test client_no_context_takeover
+ (
+ (False, False, None, None),
+ [("client_no_context_takeover", None)],
+ [("client_no_context_takeover", None)], # doesn't matter
+ (True, False, 15, 15),
+ ),
+ (
+ (False, True, None, None),
+ [],
+ [("client_no_context_takeover", None)],
+ (True, False, 15, 15),
+ ),
+ (
+ (False, True, None, None),
+ [("client_no_context_takeover", None)],
+ [("client_no_context_takeover", None)], # doesn't matter
+ (True, False, 15, 15),
+ ),
+ (
+ (False, True, None, None),
+ [("client_no_context_takeover", None)] * 2,
+ None,
+ DuplicateParameter,
+ ),
+ (
+ (False, True, None, None),
+ [("client_no_context_takeover", "42")],
+ None,
+ InvalidParameterValue,
+ ),
+ # Test server_max_window_bits
+ (
+ (False, False, None, None),
+ [("server_max_window_bits", "7")],
+ None,
+ NegotiationError,
+ ),
+ (
+ (False, False, None, None),
+ [("server_max_window_bits", "10")],
+ [("server_max_window_bits", "10")],
+ (False, False, 15, 10),
+ ),
+ (
+ (False, False, None, None),
+ [("server_max_window_bits", "16")],
+ None,
+ NegotiationError,
+ ),
+ (
+ (False, False, 12, None),
+ [],
+ [("server_max_window_bits", "12")],
+ (False, False, 15, 12),
+ ),
+ (
+ (False, False, 12, None),
+ [("server_max_window_bits", "10")],
+ [("server_max_window_bits", "10")],
+ (False, False, 15, 10),
+ ),
+ (
+ (False, False, 12, None),
+ [("server_max_window_bits", "12")],
+ [("server_max_window_bits", "12")],
+ (False, False, 15, 12),
+ ),
+ (
+ (False, False, 12, None),
+ [("server_max_window_bits", "13")],
+ [("server_max_window_bits", "12")],
+ (False, False, 15, 12),
+ ),
+ (
+ (False, False, 12, None),
+ [("server_max_window_bits", "12")] * 2,
+ None,
+ DuplicateParameter,
+ ),
+ (
+ (False, False, 12, None),
+ [("server_max_window_bits", "42")],
+ None,
+ InvalidParameterValue,
+ ),
+ # Test client_max_window_bits
+ (
+ (False, False, None, None),
+ [("client_max_window_bits", None)],
+ [],
+ (False, False, 15, 15),
+ ),
+ (
+ (False, False, None, None),
+ [("client_max_window_bits", "7")],
+ None,
+ InvalidParameterValue,
+ ),
+ (
+ (False, False, None, None),
+ [("client_max_window_bits", "10")],
+ [("client_max_window_bits", "10")], # doesn't matter
+ (False, False, 10, 15),
+ ),
+ (
+ (False, False, None, None),
+ [("client_max_window_bits", "16")],
+ None,
+ InvalidParameterValue,
+ ),
+ ((False, False, None, 12), [], None, NegotiationError),
+ (
+ (False, False, None, 12),
+ [("client_max_window_bits", None)],
+ [("client_max_window_bits", "12")],
+ (False, False, 12, 15),
+ ),
+ (
+ (False, False, None, 12),
+ [("client_max_window_bits", "10")],
+ [("client_max_window_bits", "10")],
+ (False, False, 10, 15),
+ ),
+ (
+ (False, False, None, 12),
+ [("client_max_window_bits", "12")],
+ [("client_max_window_bits", "12")], # doesn't matter
+ (False, False, 12, 15),
+ ),
+ (
+ (False, False, None, 12),
+ [("client_max_window_bits", "13")],
+ [("client_max_window_bits", "12")], # doesn't matter
+ (False, False, 12, 15),
+ ),
+ (
+ (False, False, None, 12),
+ [("client_max_window_bits", "12")] * 2,
+ None,
+ DuplicateParameter,
+ ),
+ (
+ (False, False, None, 12),
+ [("client_max_window_bits", "42")],
+ None,
+ InvalidParameterValue,
+ ),
+ # # Test all parameters together
+ (
+ (True, True, 12, 12),
+ [
+ ("server_no_context_takeover", None),
+ ("client_no_context_takeover", None),
+ ("server_max_window_bits", "10"),
+ ("client_max_window_bits", "10"),
+ ],
+ [
+ ("server_no_context_takeover", None),
+ ("client_no_context_takeover", None),
+ ("server_max_window_bits", "10"),
+ ("client_max_window_bits", "10"),
+ ],
+ (True, True, 10, 10),
+ ),
+ (
+ (False, False, None, None),
+ [
+ ("server_no_context_takeover", None),
+ ("client_no_context_takeover", None),
+ ("server_max_window_bits", "10"),
+ ("client_max_window_bits", "10"),
+ ],
+ [
+ ("server_no_context_takeover", None),
+ ("client_no_context_takeover", None),
+ ("server_max_window_bits", "10"),
+ ("client_max_window_bits", "10"),
+ ],
+ (True, True, 10, 10),
+ ),
+ (
+ (True, True, 12, 12),
+ [("client_max_window_bits", None)],
+ [
+ ("server_no_context_takeover", None),
+ ("client_no_context_takeover", None),
+ ("server_max_window_bits", "12"),
+ ("client_max_window_bits", "12"),
+ ],
+ (True, True, 12, 12),
+ ),
+ ]:
+ with self.subTest(
+ config=config,
+ request_params=request_params,
+ response_params=response_params,
+ ):
+ factory = ServerPerMessageDeflateFactory(*config)
+ if isinstance(result, type) and issubclass(result, Exception):
+ with self.assertRaises(result):
+ factory.process_request_params(request_params, [])
+ else:
+ params, extension = factory.process_request_params(
+ request_params, []
+ )
+ self.assertEqual(params, response_params)
+ expected = PerMessageDeflate(*result)
+ self.assertExtensionEqual(extension, expected)
+
+ def test_process_response_params_deduplication(self):
+ factory = ServerPerMessageDeflateFactory(False, False, None, None)
+ with self.assertRaises(NegotiationError):
+ factory.process_request_params(
+ [], [PerMessageDeflate(False, False, 15, 15)]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_auth.py b/testing/web-platform/tests/tools/third_party/websockets/tests/test_auth.py
new file mode 100644
index 0000000000..97a4485a0f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_auth.py
@@ -0,0 +1,139 @@
+import unittest
+import urllib.error
+
+from websockets.auth import *
+from websockets.auth import is_credentials
+from websockets.exceptions import InvalidStatusCode
+from websockets.headers import build_authorization_basic
+
+from .test_client_server import ClientServerTestsMixin, with_client, with_server
+from .utils import AsyncioTestCase
+
+
+class AuthTests(unittest.TestCase):
+ def test_is_credentials(self):
+ self.assertTrue(is_credentials(("username", "password")))
+
+ def test_is_not_credentials(self):
+ self.assertFalse(is_credentials(None))
+ self.assertFalse(is_credentials("username"))
+
+
+class AuthClientServerTests(ClientServerTestsMixin, AsyncioTestCase):
+
+ create_protocol = basic_auth_protocol_factory(
+ realm="auth-tests", credentials=("hello", "iloveyou")
+ )
+
+ @with_server(create_protocol=create_protocol)
+ @with_client(user_info=("hello", "iloveyou"))
+ def test_basic_auth(self):
+ req_headers = self.client.request_headers
+ resp_headers = self.client.response_headers
+ self.assertEqual(req_headers["Authorization"], "Basic aGVsbG86aWxvdmV5b3U=")
+ self.assertNotIn("WWW-Authenticate", resp_headers)
+
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ self.loop.run_until_complete(self.client.recv())
+
+ def test_basic_auth_server_no_credentials(self):
+ with self.assertRaises(TypeError) as raised:
+ basic_auth_protocol_factory(realm="auth-tests", credentials=None)
+ self.assertEqual(
+ str(raised.exception), "provide either credentials or check_credentials"
+ )
+
+ def test_basic_auth_server_bad_credentials(self):
+ with self.assertRaises(TypeError) as raised:
+ basic_auth_protocol_factory(realm="auth-tests", credentials=42)
+ self.assertEqual(str(raised.exception), "invalid credentials argument: 42")
+
+ create_protocol_multiple_credentials = basic_auth_protocol_factory(
+ realm="auth-tests",
+ credentials=[("hello", "iloveyou"), ("goodbye", "stillloveu")],
+ )
+
+ @with_server(create_protocol=create_protocol_multiple_credentials)
+ @with_client(user_info=("hello", "iloveyou"))
+ def test_basic_auth_server_multiple_credentials(self):
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ self.loop.run_until_complete(self.client.recv())
+
+ def test_basic_auth_bad_multiple_credentials(self):
+ with self.assertRaises(TypeError) as raised:
+ basic_auth_protocol_factory(
+ realm="auth-tests", credentials=[("hello", "iloveyou"), 42]
+ )
+ self.assertEqual(
+ str(raised.exception),
+ "invalid credentials argument: [('hello', 'iloveyou'), 42]",
+ )
+
+ async def check_credentials(username, password):
+ return password == "iloveyou"
+
+ create_protocol_check_credentials = basic_auth_protocol_factory(
+ realm="auth-tests", check_credentials=check_credentials
+ )
+
+ @with_server(create_protocol=create_protocol_check_credentials)
+ @with_client(user_info=("hello", "iloveyou"))
+ def test_basic_auth_check_credentials(self):
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ self.loop.run_until_complete(self.client.recv())
+
+ @with_server(create_protocol=create_protocol)
+ def test_basic_auth_missing_credentials(self):
+ with self.assertRaises(InvalidStatusCode) as raised:
+ self.start_client()
+ self.assertEqual(raised.exception.status_code, 401)
+
+ @with_server(create_protocol=create_protocol)
+ def test_basic_auth_missing_credentials_details(self):
+ with self.assertRaises(urllib.error.HTTPError) as raised:
+ self.loop.run_until_complete(self.make_http_request())
+ self.assertEqual(raised.exception.code, 401)
+ self.assertEqual(
+ raised.exception.headers["WWW-Authenticate"],
+ 'Basic realm="auth-tests", charset="UTF-8"',
+ )
+ self.assertEqual(raised.exception.read().decode(), "Missing credentials\n")
+
+ @with_server(create_protocol=create_protocol)
+ def test_basic_auth_unsupported_credentials(self):
+ with self.assertRaises(InvalidStatusCode) as raised:
+ self.start_client(extra_headers={"Authorization": "Digest ..."})
+ self.assertEqual(raised.exception.status_code, 401)
+
+ @with_server(create_protocol=create_protocol)
+ def test_basic_auth_unsupported_credentials_details(self):
+ with self.assertRaises(urllib.error.HTTPError) as raised:
+ self.loop.run_until_complete(
+ self.make_http_request(headers={"Authorization": "Digest ..."})
+ )
+ self.assertEqual(raised.exception.code, 401)
+ self.assertEqual(
+ raised.exception.headers["WWW-Authenticate"],
+ 'Basic realm="auth-tests", charset="UTF-8"',
+ )
+ self.assertEqual(raised.exception.read().decode(), "Unsupported credentials\n")
+
+ @with_server(create_protocol=create_protocol)
+ def test_basic_auth_invalid_credentials(self):
+ with self.assertRaises(InvalidStatusCode) as raised:
+ self.start_client(user_info=("hello", "ihateyou"))
+ self.assertEqual(raised.exception.status_code, 401)
+
+ @with_server(create_protocol=create_protocol)
+ def test_basic_auth_invalid_credentials_details(self):
+ with self.assertRaises(urllib.error.HTTPError) as raised:
+ authorization = build_authorization_basic("hello", "ihateyou")
+ self.loop.run_until_complete(
+ self.make_http_request(headers={"Authorization": authorization})
+ )
+ self.assertEqual(raised.exception.code, 401)
+ self.assertEqual(
+ raised.exception.headers["WWW-Authenticate"],
+ 'Basic realm="auth-tests", charset="UTF-8"',
+ )
+ self.assertEqual(raised.exception.read().decode(), "Invalid credentials\n")
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_client_server.py b/testing/web-platform/tests/tools/third_party/websockets/tests/test_client_server.py
new file mode 100644
index 0000000000..35913666c5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_client_server.py
@@ -0,0 +1,1546 @@
+import asyncio
+import contextlib
+import functools
+import http
+import pathlib
+import random
+import socket
+import ssl
+import tempfile
+import unittest
+import unittest.mock
+import urllib.error
+import urllib.request
+import warnings
+
+from websockets.client import *
+from websockets.exceptions import (
+ ConnectionClosed,
+ InvalidHandshake,
+ InvalidHeader,
+ InvalidStatusCode,
+ NegotiationError,
+)
+from websockets.extensions.permessage_deflate import (
+ ClientPerMessageDeflateFactory,
+ PerMessageDeflate,
+ ServerPerMessageDeflateFactory,
+)
+from websockets.handshake import build_response
+from websockets.http import USER_AGENT, Headers, read_response
+from websockets.protocol import State
+from websockets.server import *
+from websockets.uri import parse_uri
+
+from .test_protocol import MS
+from .utils import AsyncioTestCase
+
+
+# Generate TLS certificate with:
+# $ openssl req -x509 -config test_localhost.cnf -days 15340 -newkey rsa:2048 \
+# -out test_localhost.crt -keyout test_localhost.key
+# $ cat test_localhost.key test_localhost.crt > test_localhost.pem
+# $ rm test_localhost.key test_localhost.crt
+
+testcert = bytes(pathlib.Path(__file__).with_name("test_localhost.pem"))
+
+
+async def handler(ws, path):
+ if path == "/deprecated_attributes":
+ await ws.recv() # delay that allows catching warnings
+ await ws.send(repr((ws.host, ws.port, ws.secure)))
+ elif path == "/close_timeout":
+ await ws.send(repr(ws.close_timeout))
+ elif path == "/path":
+ await ws.send(str(ws.path))
+ elif path == "/headers":
+ await ws.send(repr(ws.request_headers))
+ await ws.send(repr(ws.response_headers))
+ elif path == "/extensions":
+ await ws.send(repr(ws.extensions))
+ elif path == "/subprotocol":
+ await ws.send(repr(ws.subprotocol))
+ elif path == "/slow_stop":
+ await ws.wait_closed()
+ await asyncio.sleep(2 * MS)
+ else:
+ await ws.send((await ws.recv()))
+
+
+@contextlib.contextmanager
+def temp_test_server(test, **kwargs):
+ test.start_server(**kwargs)
+ try:
+ yield
+ finally:
+ test.stop_server()
+
+
+@contextlib.contextmanager
+def temp_test_redirecting_server(
+ test, status, include_location=True, force_insecure=False
+):
+ test.start_redirecting_server(status, include_location, force_insecure)
+ try:
+ yield
+ finally:
+ test.stop_redirecting_server()
+
+
+@contextlib.contextmanager
+def temp_test_client(test, *args, **kwargs):
+ test.start_client(*args, **kwargs)
+ try:
+ yield
+ finally:
+ test.stop_client()
+
+
+def with_manager(manager, *args, **kwargs):
+ """
+ Return a decorator that wraps a function with a context manager.
+
+ """
+
+ def decorate(func):
+ @functools.wraps(func)
+ def _decorate(self, *_args, **_kwargs):
+ with manager(self, *args, **kwargs):
+ return func(self, *_args, **_kwargs)
+
+ return _decorate
+
+ return decorate
+
+
+def with_server(**kwargs):
+ """
+ Return a decorator for TestCase methods that starts and stops a server.
+
+ """
+ return with_manager(temp_test_server, **kwargs)
+
+
+def with_client(*args, **kwargs):
+ """
+ Return a decorator for TestCase methods that starts and stops a client.
+
+ """
+ return with_manager(temp_test_client, *args, **kwargs)
+
+
+def get_server_uri(server, secure=False, resource_name="/", user_info=None):
+ """
+ Return a WebSocket URI for connecting to the given server.
+
+ """
+ proto = "wss" if secure else "ws"
+
+ user_info = ":".join(user_info) + "@" if user_info else ""
+
+ # Pick a random socket in order to test both IPv4 and IPv6 on systems
+ # where both are available. Randomizing tests is usually a bad idea. If
+ # needed, either use the first socket, or test separately IPv4 and IPv6.
+ server_socket = random.choice(server.sockets)
+
+ if server_socket.family == socket.AF_INET6: # pragma: no cover
+ host, port = server_socket.getsockname()[:2] # (no IPv6 on CI)
+ host = f"[{host}]"
+ elif server_socket.family == socket.AF_INET:
+ host, port = server_socket.getsockname()
+ else: # pragma: no cover
+ raise ValueError("expected an IPv6, IPv4, or Unix socket")
+
+ return f"{proto}://{user_info}{host}:{port}{resource_name}"
+
+
+class UnauthorizedServerProtocol(WebSocketServerProtocol):
+ async def process_request(self, path, request_headers):
+ # Test returning headers as a Headers instance (1/3)
+ return http.HTTPStatus.UNAUTHORIZED, Headers([("X-Access", "denied")]), b""
+
+
+class ForbiddenServerProtocol(WebSocketServerProtocol):
+ async def process_request(self, path, request_headers):
+ # Test returning headers as a dict (2/3)
+ return http.HTTPStatus.FORBIDDEN, {"X-Access": "denied"}, b""
+
+
+class HealthCheckServerProtocol(WebSocketServerProtocol):
+ async def process_request(self, path, request_headers):
+ # Test returning headers as a list of pairs (3/3)
+ if path == "/__health__/":
+ return http.HTTPStatus.OK, [("X-Access", "OK")], b"status = green\n"
+
+
+class SlowOpeningHandshakeProtocol(WebSocketServerProtocol):
+ async def process_request(self, path, request_headers):
+ await asyncio.sleep(10 * MS)
+
+
+class FooClientProtocol(WebSocketClientProtocol):
+ pass
+
+
+class BarClientProtocol(WebSocketClientProtocol):
+ pass
+
+
+class ClientNoOpExtensionFactory:
+ name = "x-no-op"
+
+ def get_request_params(self):
+ return []
+
+ def process_response_params(self, params, accepted_extensions):
+ if params:
+ raise NegotiationError()
+ return NoOpExtension()
+
+
+class ServerNoOpExtensionFactory:
+ name = "x-no-op"
+
+ def __init__(self, params=None):
+ self.params = params or []
+
+ def process_request_params(self, params, accepted_extensions):
+ return self.params, NoOpExtension()
+
+
+class NoOpExtension:
+ name = "x-no-op"
+
+ def __repr__(self):
+ return "NoOpExtension()"
+
+ def decode(self, frame, *, max_size=None):
+ return frame
+
+ def encode(self, frame):
+ return frame
+
+
+class ClientServerTestsMixin:
+
+ secure = False
+
+ def setUp(self):
+ super().setUp()
+ self.server = None
+ self.redirecting_server = None
+
+ @property
+ def server_context(self):
+ return None
+
+ def start_server(self, deprecation_warnings=None, **kwargs):
+ # Disable compression by default in tests.
+ kwargs.setdefault("compression", None)
+ # Disable pings by default in tests.
+ kwargs.setdefault("ping_interval", None)
+
+ with warnings.catch_warnings(record=True) as recorded_warnings:
+ start_server = serve(handler, "localhost", 0, **kwargs)
+ self.server = self.loop.run_until_complete(start_server)
+
+ expected_warnings = [] if deprecation_warnings is None else deprecation_warnings
+ self.assertDeprecationWarnings(recorded_warnings, expected_warnings)
+
+ def start_redirecting_server(
+ self, status, include_location=True, force_insecure=False
+ ):
+ async def process_request(path, headers):
+ server_uri = get_server_uri(self.server, self.secure, path)
+ if force_insecure:
+ server_uri = server_uri.replace("wss:", "ws:")
+ headers = {"Location": server_uri} if include_location else []
+ return status, headers, b""
+
+ start_server = serve(
+ handler,
+ "localhost",
+ 0,
+ compression=None,
+ ping_interval=None,
+ process_request=process_request,
+ ssl=self.server_context,
+ )
+ self.redirecting_server = self.loop.run_until_complete(start_server)
+
+ def start_client(
+ self, resource_name="/", user_info=None, deprecation_warnings=None, **kwargs
+ ):
+ # Disable compression by default in tests.
+ kwargs.setdefault("compression", None)
+ # Disable pings by default in tests.
+ kwargs.setdefault("ping_interval", None)
+ secure = kwargs.get("ssl") is not None
+ try:
+ server_uri = kwargs.pop("uri")
+ except KeyError:
+ server = self.redirecting_server if self.redirecting_server else self.server
+ server_uri = get_server_uri(server, secure, resource_name, user_info)
+
+ with warnings.catch_warnings(record=True) as recorded_warnings:
+ start_client = connect(server_uri, **kwargs)
+ self.client = self.loop.run_until_complete(start_client)
+
+ expected_warnings = [] if deprecation_warnings is None else deprecation_warnings
+ self.assertDeprecationWarnings(recorded_warnings, expected_warnings)
+
+ def stop_client(self):
+ try:
+ self.loop.run_until_complete(
+ asyncio.wait_for(self.client.close_connection_task, timeout=1)
+ )
+ except asyncio.TimeoutError: # pragma: no cover
+ self.fail("Client failed to stop")
+
+ def stop_server(self):
+ self.server.close()
+ try:
+ self.loop.run_until_complete(
+ asyncio.wait_for(self.server.wait_closed(), timeout=1)
+ )
+ except asyncio.TimeoutError: # pragma: no cover
+ self.fail("Server failed to stop")
+
+ def stop_redirecting_server(self):
+ self.redirecting_server.close()
+ try:
+ self.loop.run_until_complete(
+ asyncio.wait_for(self.redirecting_server.wait_closed(), timeout=1)
+ )
+ except asyncio.TimeoutError: # pragma: no cover
+ self.fail("Redirecting server failed to stop")
+ finally:
+ self.redirecting_server = None
+
+ @contextlib.contextmanager
+ def temp_server(self, **kwargs):
+ with temp_test_server(self, **kwargs):
+ yield
+
+ @contextlib.contextmanager
+ def temp_client(self, *args, **kwargs):
+ with temp_test_client(self, *args, **kwargs):
+ yield
+
+ def make_http_request(self, path="/", headers=None):
+ if headers is None:
+ headers = {}
+
+ # Set url to 'https?://<host>:<port><path>'.
+ url = get_server_uri(
+ self.server, resource_name=path, secure=self.secure
+ ).replace("ws", "http")
+
+ request = urllib.request.Request(url=url, headers=headers)
+
+ if self.secure:
+ open_health_check = functools.partial(
+ urllib.request.urlopen, request, context=self.client_context
+ )
+ else:
+ open_health_check = functools.partial(urllib.request.urlopen, request)
+
+ return self.loop.run_in_executor(None, open_health_check)
+
+
+class SecureClientServerTestsMixin(ClientServerTestsMixin):
+
+ secure = True
+
+ @property
+ def server_context(self):
+ ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
+ ssl_context.load_cert_chain(testcert)
+ return ssl_context
+
+ @property
+ def client_context(self):
+ ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ ssl_context.load_verify_locations(testcert)
+ return ssl_context
+
+ def start_server(self, **kwargs):
+ kwargs.setdefault("ssl", self.server_context)
+ super().start_server(**kwargs)
+
+ def start_client(self, path="/", **kwargs):
+ kwargs.setdefault("ssl", self.client_context)
+ super().start_client(path, **kwargs)
+
+
+class CommonClientServerTests:
+ """
+ Mixin that defines most tests but doesn't inherit unittest.TestCase.
+
+ Tests are run by the ClientServerTests and SecureClientServerTests subclasses.
+
+ """
+
+ @with_server()
+ @with_client()
+ def test_basic(self):
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ reply = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(reply, "Hello!")
+
+ @with_server()
+ def test_redirect(self):
+ redirect_statuses = [
+ http.HTTPStatus.MOVED_PERMANENTLY,
+ http.HTTPStatus.FOUND,
+ http.HTTPStatus.SEE_OTHER,
+ http.HTTPStatus.TEMPORARY_REDIRECT,
+ http.HTTPStatus.PERMANENT_REDIRECT,
+ ]
+ for status in redirect_statuses:
+ with temp_test_redirecting_server(self, status):
+ with temp_test_client(self):
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ reply = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(reply, "Hello!")
+
+ def test_infinite_redirect(self):
+ with temp_test_redirecting_server(self, http.HTTPStatus.FOUND):
+ self.server = self.redirecting_server
+ with self.assertRaises(InvalidHandshake):
+ with temp_test_client(self):
+ self.fail("Did not raise") # pragma: no cover
+
+ @with_server()
+ def test_redirect_missing_location(self):
+ with temp_test_redirecting_server(
+ self, http.HTTPStatus.FOUND, include_location=False
+ ):
+ with self.assertRaises(InvalidHeader):
+ with temp_test_client(self):
+ self.fail("Did not raise") # pragma: no cover
+
+ def test_explicit_event_loop(self):
+ with self.temp_server(loop=self.loop):
+ with self.temp_client(loop=self.loop):
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ reply = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(reply, "Hello!")
+
+ @with_server()
+ def test_explicit_host_port(self):
+ uri = get_server_uri(self.server, self.secure)
+ wsuri = parse_uri(uri)
+
+ # Change host and port to invalid values.
+ changed_uri = uri.replace(wsuri.host, "example.com").replace(
+ str(wsuri.port), str(65535 - wsuri.port)
+ )
+
+ with self.temp_client(uri=changed_uri, host=wsuri.host, port=wsuri.port):
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ reply = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(reply, "Hello!")
+
+ @with_server()
+ def test_explicit_socket(self):
+ class TrackedSocket(socket.socket):
+ def __init__(self, *args, **kwargs):
+ self.used_for_read = False
+ self.used_for_write = False
+ super().__init__(*args, **kwargs)
+
+ def recv(self, *args, **kwargs):
+ self.used_for_read = True
+ return super().recv(*args, **kwargs)
+
+ def send(self, *args, **kwargs):
+ self.used_for_write = True
+ return super().send(*args, **kwargs)
+
+ server_socket = [
+ sock for sock in self.server.sockets if sock.family == socket.AF_INET
+ ][0]
+ client_socket = TrackedSocket(socket.AF_INET, socket.SOCK_STREAM)
+ client_socket.connect(server_socket.getsockname())
+
+ try:
+ self.assertFalse(client_socket.used_for_read)
+ self.assertFalse(client_socket.used_for_write)
+
+ with self.temp_client(
+ sock=client_socket,
+ # "You must set server_hostname when using ssl without a host"
+ server_hostname="localhost" if self.secure else None,
+ ):
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ reply = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(reply, "Hello!")
+
+ self.assertTrue(client_socket.used_for_read)
+ self.assertTrue(client_socket.used_for_write)
+
+ finally:
+ client_socket.close()
+
+ @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "this test requires Unix sockets")
+ def test_unix_socket(self):
+ with tempfile.TemporaryDirectory() as temp_dir:
+ path = bytes(pathlib.Path(temp_dir) / "websockets")
+
+ # Like self.start_server() but with unix_serve().
+ unix_server = unix_serve(handler, path)
+ self.server = self.loop.run_until_complete(unix_server)
+ try:
+ # Like self.start_client() but with unix_connect()
+ unix_client = unix_connect(path)
+ self.client = self.loop.run_until_complete(unix_client)
+ try:
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ reply = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(reply, "Hello!")
+ finally:
+ self.stop_client()
+ finally:
+ self.stop_server()
+
+ async def process_request_OK(path, request_headers):
+ return http.HTTPStatus.OK, [], b"OK\n"
+
+ @with_server(process_request=process_request_OK)
+ def test_process_request_argument(self):
+ response = self.loop.run_until_complete(self.make_http_request("/"))
+
+ with contextlib.closing(response):
+ self.assertEqual(response.code, 200)
+
+ def legacy_process_request_OK(path, request_headers):
+ return http.HTTPStatus.OK, [], b"OK\n"
+
+ @with_server(process_request=legacy_process_request_OK)
+ def test_process_request_argument_backwards_compatibility(self):
+ with warnings.catch_warnings(record=True) as recorded_warnings:
+ response = self.loop.run_until_complete(self.make_http_request("/"))
+
+ with contextlib.closing(response):
+ self.assertEqual(response.code, 200)
+
+ self.assertDeprecationWarnings(
+ recorded_warnings, ["declare process_request as a coroutine"]
+ )
+
+ class ProcessRequestOKServerProtocol(WebSocketServerProtocol):
+ async def process_request(self, path, request_headers):
+ return http.HTTPStatus.OK, [], b"OK\n"
+
+ @with_server(create_protocol=ProcessRequestOKServerProtocol)
+ def test_process_request_override(self):
+ response = self.loop.run_until_complete(self.make_http_request("/"))
+
+ with contextlib.closing(response):
+ self.assertEqual(response.code, 200)
+
+ class LegacyProcessRequestOKServerProtocol(WebSocketServerProtocol):
+ def process_request(self, path, request_headers):
+ return http.HTTPStatus.OK, [], b"OK\n"
+
+ @with_server(create_protocol=LegacyProcessRequestOKServerProtocol)
+ def test_process_request_override_backwards_compatibility(self):
+ with warnings.catch_warnings(record=True) as recorded_warnings:
+ response = self.loop.run_until_complete(self.make_http_request("/"))
+
+ with contextlib.closing(response):
+ self.assertEqual(response.code, 200)
+
+ self.assertDeprecationWarnings(
+ recorded_warnings, ["declare process_request as a coroutine"]
+ )
+
+ def select_subprotocol_chat(client_subprotocols, server_subprotocols):
+ return "chat"
+
+ @with_server(
+ subprotocols=["superchat", "chat"], select_subprotocol=select_subprotocol_chat
+ )
+ @with_client("/subprotocol", subprotocols=["superchat", "chat"])
+ def test_select_subprotocol_argument(self):
+ server_subprotocol = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_subprotocol, repr("chat"))
+ self.assertEqual(self.client.subprotocol, "chat")
+
+ class SelectSubprotocolChatServerProtocol(WebSocketServerProtocol):
+ def select_subprotocol(self, client_subprotocols, server_subprotocols):
+ return "chat"
+
+ @with_server(
+ subprotocols=["superchat", "chat"],
+ create_protocol=SelectSubprotocolChatServerProtocol,
+ )
+ @with_client("/subprotocol", subprotocols=["superchat", "chat"])
+ def test_select_subprotocol_override(self):
+ server_subprotocol = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_subprotocol, repr("chat"))
+ self.assertEqual(self.client.subprotocol, "chat")
+
+ @with_server()
+ @with_client("/deprecated_attributes")
+ def test_protocol_deprecated_attributes(self):
+ # The test could be connecting with IPv6 or IPv4.
+ expected_client_attrs = [
+ server_socket.getsockname()[:2] + (self.secure,)
+ for server_socket in self.server.sockets
+ ]
+ with warnings.catch_warnings(record=True) as recorded_warnings:
+ client_attrs = (self.client.host, self.client.port, self.client.secure)
+ self.assertDeprecationWarnings(
+ recorded_warnings,
+ [
+ "use remote_address[0] instead of host",
+ "use remote_address[1] instead of port",
+ "don't use secure",
+ ],
+ )
+ self.assertIn(client_attrs, expected_client_attrs)
+
+ expected_server_attrs = ("localhost", 0, self.secure)
+ with warnings.catch_warnings(record=True) as recorded_warnings:
+ self.loop.run_until_complete(self.client.send(""))
+ server_attrs = self.loop.run_until_complete(self.client.recv())
+ self.assertDeprecationWarnings(
+ recorded_warnings,
+ [
+ "use local_address[0] instead of host",
+ "use local_address[1] instead of port",
+ "don't use secure",
+ ],
+ )
+ self.assertEqual(server_attrs, repr(expected_server_attrs))
+
+ @with_server()
+ @with_client("/path")
+ def test_protocol_path(self):
+ client_path = self.client.path
+ self.assertEqual(client_path, "/path")
+ server_path = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_path, "/path")
+
+ @with_server()
+ @with_client("/headers")
+ def test_protocol_headers(self):
+ client_req = self.client.request_headers
+ client_resp = self.client.response_headers
+ self.assertEqual(client_req["User-Agent"], USER_AGENT)
+ self.assertEqual(client_resp["Server"], USER_AGENT)
+ server_req = self.loop.run_until_complete(self.client.recv())
+ server_resp = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_req, repr(client_req))
+ self.assertEqual(server_resp, repr(client_resp))
+
+ @with_server()
+ @with_client("/headers", extra_headers=Headers({"X-Spam": "Eggs"}))
+ def test_protocol_custom_request_headers(self):
+ req_headers = self.loop.run_until_complete(self.client.recv())
+ self.loop.run_until_complete(self.client.recv())
+ self.assertIn("('X-Spam', 'Eggs')", req_headers)
+
+ @with_server()
+ @with_client("/headers", extra_headers={"X-Spam": "Eggs"})
+ def test_protocol_custom_request_headers_dict(self):
+ req_headers = self.loop.run_until_complete(self.client.recv())
+ self.loop.run_until_complete(self.client.recv())
+ self.assertIn("('X-Spam', 'Eggs')", req_headers)
+
+ @with_server()
+ @with_client("/headers", extra_headers=[("X-Spam", "Eggs")])
+ def test_protocol_custom_request_headers_list(self):
+ req_headers = self.loop.run_until_complete(self.client.recv())
+ self.loop.run_until_complete(self.client.recv())
+ self.assertIn("('X-Spam', 'Eggs')", req_headers)
+
+ @with_server()
+ @with_client("/headers", extra_headers=[("User-Agent", "Eggs")])
+ def test_protocol_custom_request_user_agent(self):
+ req_headers = self.loop.run_until_complete(self.client.recv())
+ self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(req_headers.count("User-Agent"), 1)
+ self.assertIn("('User-Agent', 'Eggs')", req_headers)
+
+ @with_server(extra_headers=lambda p, r: Headers({"X-Spam": "Eggs"}))
+ @with_client("/headers")
+ def test_protocol_custom_response_headers_callable(self):
+ self.loop.run_until_complete(self.client.recv())
+ resp_headers = self.loop.run_until_complete(self.client.recv())
+ self.assertIn("('X-Spam', 'Eggs')", resp_headers)
+
+ @with_server(extra_headers=lambda p, r: {"X-Spam": "Eggs"})
+ @with_client("/headers")
+ def test_protocol_custom_response_headers_callable_dict(self):
+ self.loop.run_until_complete(self.client.recv())
+ resp_headers = self.loop.run_until_complete(self.client.recv())
+ self.assertIn("('X-Spam', 'Eggs')", resp_headers)
+
+ @with_server(extra_headers=lambda p, r: [("X-Spam", "Eggs")])
+ @with_client("/headers")
+ def test_protocol_custom_response_headers_callable_list(self):
+ self.loop.run_until_complete(self.client.recv())
+ resp_headers = self.loop.run_until_complete(self.client.recv())
+ self.assertIn("('X-Spam', 'Eggs')", resp_headers)
+
+ @with_server(extra_headers=lambda p, r: None)
+ @with_client("/headers")
+ def test_protocol_custom_response_headers_callable_none(self):
+ self.loop.run_until_complete(self.client.recv()) # doesn't crash
+ self.loop.run_until_complete(self.client.recv()) # nothing to check
+
+ @with_server(extra_headers=Headers({"X-Spam": "Eggs"}))
+ @with_client("/headers")
+ def test_protocol_custom_response_headers(self):
+ self.loop.run_until_complete(self.client.recv())
+ resp_headers = self.loop.run_until_complete(self.client.recv())
+ self.assertIn("('X-Spam', 'Eggs')", resp_headers)
+
+ @with_server(extra_headers={"X-Spam": "Eggs"})
+ @with_client("/headers")
+ def test_protocol_custom_response_headers_dict(self):
+ self.loop.run_until_complete(self.client.recv())
+ resp_headers = self.loop.run_until_complete(self.client.recv())
+ self.assertIn("('X-Spam', 'Eggs')", resp_headers)
+
+ @with_server(extra_headers=[("X-Spam", "Eggs")])
+ @with_client("/headers")
+ def test_protocol_custom_response_headers_list(self):
+ self.loop.run_until_complete(self.client.recv())
+ resp_headers = self.loop.run_until_complete(self.client.recv())
+ self.assertIn("('X-Spam', 'Eggs')", resp_headers)
+
+ @with_server(extra_headers=[("Server", "Eggs")])
+ @with_client("/headers")
+ def test_protocol_custom_response_user_agent(self):
+ self.loop.run_until_complete(self.client.recv())
+ resp_headers = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(resp_headers.count("Server"), 1)
+ self.assertIn("('Server', 'Eggs')", resp_headers)
+
+ @with_server(create_protocol=HealthCheckServerProtocol)
+ def test_http_request_http_endpoint(self):
+ # Making a HTTP request to a HTTP endpoint succeeds.
+ response = self.loop.run_until_complete(self.make_http_request("/__health__/"))
+
+ with contextlib.closing(response):
+ self.assertEqual(response.code, 200)
+ self.assertEqual(response.read(), b"status = green\n")
+
+ @with_server(create_protocol=HealthCheckServerProtocol)
+ def test_http_request_ws_endpoint(self):
+ # Making a HTTP request to a WS endpoint fails.
+ with self.assertRaises(urllib.error.HTTPError) as raised:
+ self.loop.run_until_complete(self.make_http_request())
+
+ self.assertEqual(raised.exception.code, 426)
+ self.assertEqual(raised.exception.headers["Upgrade"], "websocket")
+
+ @with_server(create_protocol=HealthCheckServerProtocol)
+ def test_ws_connection_http_endpoint(self):
+ # Making a WS connection to a HTTP endpoint fails.
+ with self.assertRaises(InvalidStatusCode) as raised:
+ self.start_client("/__health__/")
+
+ self.assertEqual(raised.exception.status_code, 200)
+
+ @with_server(create_protocol=HealthCheckServerProtocol)
+ def test_ws_connection_ws_endpoint(self):
+ # Making a WS connection to a WS endpoint succeeds.
+ self.start_client()
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ self.loop.run_until_complete(self.client.recv())
+ self.stop_client()
+
+ def assert_client_raises_code(self, status_code):
+ with self.assertRaises(InvalidStatusCode) as raised:
+ self.start_client()
+ self.assertEqual(raised.exception.status_code, status_code)
+
+ @with_server(create_protocol=UnauthorizedServerProtocol)
+ def test_server_create_protocol(self):
+ self.assert_client_raises_code(401)
+
+ def create_unauthorized_server_protocol(*args, **kwargs):
+ return UnauthorizedServerProtocol(*args, **kwargs)
+
+ @with_server(create_protocol=create_unauthorized_server_protocol)
+ def test_server_create_protocol_function(self):
+ self.assert_client_raises_code(401)
+
+ @with_server(
+ klass=UnauthorizedServerProtocol,
+ deprecation_warnings=["rename klass to create_protocol"],
+ )
+ def test_server_klass_backwards_compatibility(self):
+ self.assert_client_raises_code(401)
+
+ @with_server(
+ create_protocol=ForbiddenServerProtocol,
+ klass=UnauthorizedServerProtocol,
+ deprecation_warnings=["rename klass to create_protocol"],
+ )
+ def test_server_create_protocol_over_klass(self):
+ self.assert_client_raises_code(403)
+
+ @with_server()
+ @with_client("/path", create_protocol=FooClientProtocol)
+ def test_client_create_protocol(self):
+ self.assertIsInstance(self.client, FooClientProtocol)
+
+ @with_server()
+ @with_client(
+ "/path",
+ create_protocol=(lambda *args, **kwargs: FooClientProtocol(*args, **kwargs)),
+ )
+ def test_client_create_protocol_function(self):
+ self.assertIsInstance(self.client, FooClientProtocol)
+
+ @with_server()
+ @with_client(
+ "/path",
+ klass=FooClientProtocol,
+ deprecation_warnings=["rename klass to create_protocol"],
+ )
+ def test_client_klass(self):
+ self.assertIsInstance(self.client, FooClientProtocol)
+
+ @with_server()
+ @with_client(
+ "/path",
+ create_protocol=BarClientProtocol,
+ klass=FooClientProtocol,
+ deprecation_warnings=["rename klass to create_protocol"],
+ )
+ def test_client_create_protocol_over_klass(self):
+ self.assertIsInstance(self.client, BarClientProtocol)
+
+ @with_server(close_timeout=7)
+ @with_client("/close_timeout")
+ def test_server_close_timeout(self):
+ close_timeout = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(eval(close_timeout), 7)
+
+ @with_server(timeout=6, deprecation_warnings=["rename timeout to close_timeout"])
+ @with_client("/close_timeout")
+ def test_server_timeout_backwards_compatibility(self):
+ close_timeout = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(eval(close_timeout), 6)
+
+ @with_server(
+ close_timeout=7,
+ timeout=6,
+ deprecation_warnings=["rename timeout to close_timeout"],
+ )
+ @with_client("/close_timeout")
+ def test_server_close_timeout_over_timeout(self):
+ close_timeout = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(eval(close_timeout), 7)
+
+ @with_server()
+ @with_client("/close_timeout", close_timeout=7)
+ def test_client_close_timeout(self):
+ self.assertEqual(self.client.close_timeout, 7)
+
+ @with_server()
+ @with_client(
+ "/close_timeout",
+ timeout=6,
+ deprecation_warnings=["rename timeout to close_timeout"],
+ )
+ def test_client_timeout_backwards_compatibility(self):
+ self.assertEqual(self.client.close_timeout, 6)
+
+ @with_server()
+ @with_client(
+ "/close_timeout",
+ close_timeout=7,
+ timeout=6,
+ deprecation_warnings=["rename timeout to close_timeout"],
+ )
+ def test_client_close_timeout_over_timeout(self):
+ self.assertEqual(self.client.close_timeout, 7)
+
+ @with_server()
+ @with_client("/extensions")
+ def test_no_extension(self):
+ server_extensions = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_extensions, repr([]))
+ self.assertEqual(repr(self.client.extensions), repr([]))
+
+ @with_server(extensions=[ServerNoOpExtensionFactory()])
+ @with_client("/extensions", extensions=[ClientNoOpExtensionFactory()])
+ def test_extension(self):
+ server_extensions = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_extensions, repr([NoOpExtension()]))
+ self.assertEqual(repr(self.client.extensions), repr([NoOpExtension()]))
+
+ @with_server()
+ @with_client("/extensions", extensions=[ClientNoOpExtensionFactory()])
+ def test_extension_not_accepted(self):
+ server_extensions = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_extensions, repr([]))
+ self.assertEqual(repr(self.client.extensions), repr([]))
+
+ @with_server(extensions=[ServerNoOpExtensionFactory()])
+ @with_client("/extensions")
+ def test_extension_not_requested(self):
+ server_extensions = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_extensions, repr([]))
+ self.assertEqual(repr(self.client.extensions), repr([]))
+
+ @with_server(extensions=[ServerNoOpExtensionFactory([("foo", None)])])
+ def test_extension_client_rejection(self):
+ with self.assertRaises(NegotiationError):
+ self.start_client("/extensions", extensions=[ClientNoOpExtensionFactory()])
+
+ @with_server(
+ extensions=[
+ # No match because the client doesn't send client_max_window_bits.
+ ServerPerMessageDeflateFactory(client_max_window_bits=10),
+ ServerPerMessageDeflateFactory(),
+ ]
+ )
+ @with_client("/extensions", extensions=[ClientPerMessageDeflateFactory()])
+ def test_extension_no_match_then_match(self):
+ # The order requested by the client has priority.
+ server_extensions = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(
+ server_extensions, repr([PerMessageDeflate(False, False, 15, 15)])
+ )
+ self.assertEqual(
+ repr(self.client.extensions),
+ repr([PerMessageDeflate(False, False, 15, 15)]),
+ )
+
+ @with_server(extensions=[ServerPerMessageDeflateFactory()])
+ @with_client("/extensions", extensions=[ClientNoOpExtensionFactory()])
+ def test_extension_mismatch(self):
+ server_extensions = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_extensions, repr([]))
+ self.assertEqual(repr(self.client.extensions), repr([]))
+
+ @with_server(
+ extensions=[ServerNoOpExtensionFactory(), ServerPerMessageDeflateFactory()]
+ )
+ @with_client(
+ "/extensions",
+ extensions=[ClientPerMessageDeflateFactory(), ClientNoOpExtensionFactory()],
+ )
+ def test_extension_order(self):
+ # The order requested by the client has priority.
+ server_extensions = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(
+ server_extensions,
+ repr([PerMessageDeflate(False, False, 15, 15), NoOpExtension()]),
+ )
+ self.assertEqual(
+ repr(self.client.extensions),
+ repr([PerMessageDeflate(False, False, 15, 15), NoOpExtension()]),
+ )
+
+ @with_server(extensions=[ServerNoOpExtensionFactory()])
+ @unittest.mock.patch.object(WebSocketServerProtocol, "process_extensions")
+ def test_extensions_error(self, _process_extensions):
+ _process_extensions.return_value = "x-no-op", [NoOpExtension()]
+
+ with self.assertRaises(NegotiationError):
+ self.start_client(
+ "/extensions", extensions=[ClientPerMessageDeflateFactory()]
+ )
+
+ @with_server(extensions=[ServerNoOpExtensionFactory()])
+ @unittest.mock.patch.object(WebSocketServerProtocol, "process_extensions")
+ def test_extensions_error_no_extensions(self, _process_extensions):
+ _process_extensions.return_value = "x-no-op", [NoOpExtension()]
+
+ with self.assertRaises(InvalidHandshake):
+ self.start_client("/extensions")
+
+ @with_server(compression="deflate")
+ @with_client("/extensions", compression="deflate")
+ def test_compression_deflate(self):
+ server_extensions = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(
+ server_extensions, repr([PerMessageDeflate(False, False, 15, 15)])
+ )
+ self.assertEqual(
+ repr(self.client.extensions),
+ repr([PerMessageDeflate(False, False, 15, 15)]),
+ )
+
+ @with_server(
+ extensions=[
+ ServerPerMessageDeflateFactory(
+ client_no_context_takeover=True, server_max_window_bits=10
+ )
+ ],
+ compression="deflate", # overridden by explicit config
+ )
+ @with_client(
+ "/extensions",
+ extensions=[
+ ClientPerMessageDeflateFactory(
+ server_no_context_takeover=True, client_max_window_bits=12
+ )
+ ],
+ compression="deflate", # overridden by explicit config
+ )
+ def test_compression_deflate_and_explicit_config(self):
+ server_extensions = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(
+ server_extensions, repr([PerMessageDeflate(True, True, 12, 10)])
+ )
+ self.assertEqual(
+ repr(self.client.extensions), repr([PerMessageDeflate(True, True, 10, 12)])
+ )
+
+ def test_compression_unsupported_server(self):
+ with self.assertRaises(ValueError):
+ self.start_server(compression="xz")
+
+ @with_server()
+ def test_compression_unsupported_client(self):
+ with self.assertRaises(ValueError):
+ self.start_client(compression="xz")
+
+ @with_server()
+ @with_client("/subprotocol")
+ def test_no_subprotocol(self):
+ server_subprotocol = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_subprotocol, repr(None))
+ self.assertEqual(self.client.subprotocol, None)
+
+ @with_server(subprotocols=["superchat", "chat"])
+ @with_client("/subprotocol", subprotocols=["otherchat", "chat"])
+ def test_subprotocol(self):
+ server_subprotocol = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_subprotocol, repr("chat"))
+ self.assertEqual(self.client.subprotocol, "chat")
+
+ @with_server(subprotocols=["superchat"])
+ @with_client("/subprotocol", subprotocols=["otherchat"])
+ def test_subprotocol_not_accepted(self):
+ server_subprotocol = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_subprotocol, repr(None))
+ self.assertEqual(self.client.subprotocol, None)
+
+ @with_server()
+ @with_client("/subprotocol", subprotocols=["otherchat", "chat"])
+ def test_subprotocol_not_offered(self):
+ server_subprotocol = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_subprotocol, repr(None))
+ self.assertEqual(self.client.subprotocol, None)
+
+ @with_server(subprotocols=["superchat", "chat"])
+ @with_client("/subprotocol")
+ def test_subprotocol_not_requested(self):
+ server_subprotocol = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(server_subprotocol, repr(None))
+ self.assertEqual(self.client.subprotocol, None)
+
+ @with_server(subprotocols=["superchat"])
+ @unittest.mock.patch.object(WebSocketServerProtocol, "process_subprotocol")
+ def test_subprotocol_error(self, _process_subprotocol):
+ _process_subprotocol.return_value = "superchat"
+
+ with self.assertRaises(NegotiationError):
+ self.start_client("/subprotocol", subprotocols=["otherchat"])
+ self.run_loop_once()
+
+ @with_server(subprotocols=["superchat"])
+ @unittest.mock.patch.object(WebSocketServerProtocol, "process_subprotocol")
+ def test_subprotocol_error_no_subprotocols(self, _process_subprotocol):
+ _process_subprotocol.return_value = "superchat"
+
+ with self.assertRaises(InvalidHandshake):
+ self.start_client("/subprotocol")
+ self.run_loop_once()
+
+ @with_server(subprotocols=["superchat", "chat"])
+ @unittest.mock.patch.object(WebSocketServerProtocol, "process_subprotocol")
+ def test_subprotocol_error_two_subprotocols(self, _process_subprotocol):
+ _process_subprotocol.return_value = "superchat, chat"
+
+ with self.assertRaises(InvalidHandshake):
+ self.start_client("/subprotocol", subprotocols=["superchat", "chat"])
+ self.run_loop_once()
+
+ @with_server()
+ @unittest.mock.patch("websockets.server.read_request")
+ def test_server_receives_malformed_request(self, _read_request):
+ _read_request.side_effect = ValueError("read_request failed")
+
+ with self.assertRaises(InvalidHandshake):
+ self.start_client()
+
+ @with_server()
+ @unittest.mock.patch("websockets.client.read_response")
+ def test_client_receives_malformed_response(self, _read_response):
+ _read_response.side_effect = ValueError("read_response failed")
+
+ with self.assertRaises(InvalidHandshake):
+ self.start_client()
+ self.run_loop_once()
+
+ @with_server()
+ @unittest.mock.patch("websockets.client.build_request")
+ def test_client_sends_invalid_handshake_request(self, _build_request):
+ def wrong_build_request(headers):
+ return "42"
+
+ _build_request.side_effect = wrong_build_request
+
+ with self.assertRaises(InvalidHandshake):
+ self.start_client()
+
+ @with_server()
+ @unittest.mock.patch("websockets.server.build_response")
+ def test_server_sends_invalid_handshake_response(self, _build_response):
+ def wrong_build_response(headers, key):
+ return build_response(headers, "42")
+
+ _build_response.side_effect = wrong_build_response
+
+ with self.assertRaises(InvalidHandshake):
+ self.start_client()
+
+ @with_server()
+ @unittest.mock.patch("websockets.client.read_response")
+ def test_server_does_not_switch_protocols(self, _read_response):
+ async def wrong_read_response(stream):
+ status_code, reason, headers = await read_response(stream)
+ return 400, "Bad Request", headers
+
+ _read_response.side_effect = wrong_read_response
+
+ with self.assertRaises(InvalidStatusCode):
+ self.start_client()
+ self.run_loop_once()
+
+ @with_server()
+ @unittest.mock.patch("websockets.server.WebSocketServerProtocol.process_request")
+ def test_server_error_in_handshake(self, _process_request):
+ _process_request.side_effect = Exception("process_request crashed")
+
+ with self.assertRaises(InvalidHandshake):
+ self.start_client()
+
+ @with_server()
+ @unittest.mock.patch("websockets.server.WebSocketServerProtocol.send")
+ def test_server_handler_crashes(self, send):
+ send.side_effect = ValueError("send failed")
+
+ with self.temp_client():
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.client.recv())
+
+ # Connection ends with an unexpected error.
+ self.assertEqual(self.client.close_code, 1011)
+
+ @with_server()
+ @unittest.mock.patch("websockets.server.WebSocketServerProtocol.close")
+ def test_server_close_crashes(self, close):
+ close.side_effect = ValueError("close failed")
+
+ with self.temp_client():
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ reply = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(reply, "Hello!")
+
+ # Connection ends with an abnormal closure.
+ self.assertEqual(self.client.close_code, 1006)
+
+ @with_server()
+ @with_client()
+ @unittest.mock.patch.object(WebSocketClientProtocol, "handshake")
+ def test_client_closes_connection_before_handshake(self, handshake):
+ # We have mocked the handshake() method to prevent the client from
+ # performing the opening handshake. Force it to close the connection.
+ self.client.transport.close()
+ # The server should stop properly anyway. It used to hang because the
+ # task handling the connection was waiting for the opening handshake.
+
+ @with_server(create_protocol=SlowOpeningHandshakeProtocol)
+ def test_server_shuts_down_during_opening_handshake(self):
+ self.loop.call_later(5 * MS, self.server.close)
+ with self.assertRaises(InvalidStatusCode) as raised:
+ self.start_client()
+ exception = raised.exception
+ self.assertEqual(
+ str(exception), "server rejected WebSocket connection: HTTP 503"
+ )
+ self.assertEqual(exception.status_code, 503)
+
+ @with_server()
+ def test_server_shuts_down_during_connection_handling(self):
+ with self.temp_client():
+ server_ws = next(iter(self.server.websockets))
+ self.server.close()
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.client.recv())
+
+ # Websocket connection closes properly with 1001 Going Away.
+ self.assertEqual(self.client.close_code, 1001)
+ self.assertEqual(server_ws.close_code, 1001)
+
+ @with_server()
+ def test_server_shuts_down_waits_until_handlers_terminate(self):
+ # This handler waits a bit after the connection is closed in order
+ # to test that wait_closed() really waits for handlers to complete.
+ self.start_client("/slow_stop")
+ server_ws = next(iter(self.server.websockets))
+
+ # Test that the handler task keeps running after close().
+ self.server.close()
+ self.loop.run_until_complete(asyncio.sleep(MS))
+ self.assertFalse(server_ws.handler_task.done())
+
+ # Test that the handler task terminates before wait_closed() returns.
+ self.loop.run_until_complete(self.server.wait_closed())
+ self.assertTrue(server_ws.handler_task.done())
+
+ @with_server(create_protocol=ForbiddenServerProtocol)
+ def test_invalid_status_error_during_client_connect(self):
+ with self.assertRaises(InvalidStatusCode) as raised:
+ self.start_client()
+ exception = raised.exception
+ self.assertEqual(
+ str(exception), "server rejected WebSocket connection: HTTP 403"
+ )
+ self.assertEqual(exception.status_code, 403)
+
+ @with_server()
+ @unittest.mock.patch(
+ "websockets.server.WebSocketServerProtocol.write_http_response"
+ )
+ @unittest.mock.patch("websockets.server.WebSocketServerProtocol.read_http_request")
+ def test_connection_error_during_opening_handshake(
+ self, _read_http_request, _write_http_response
+ ):
+ _read_http_request.side_effect = ConnectionError
+
+ # This exception is currently platform-dependent. It was observed to
+ # be ConnectionResetError on Linux in the non-TLS case, and
+ # InvalidMessage otherwise (including both Linux and macOS). This
+ # doesn't matter though since this test is primarily for testing a
+ # code path on the server side.
+ with self.assertRaises(Exception):
+ self.start_client()
+
+ # No response must not be written if the network connection is broken.
+ _write_http_response.assert_not_called()
+
+ @with_server()
+ @unittest.mock.patch("websockets.server.WebSocketServerProtocol.close")
+ def test_connection_error_during_closing_handshake(self, close):
+ close.side_effect = ConnectionError
+
+ with self.temp_client():
+ self.loop.run_until_complete(self.client.send("Hello!"))
+ reply = self.loop.run_until_complete(self.client.recv())
+ self.assertEqual(reply, "Hello!")
+
+ # Connection ends with an abnormal closure.
+ self.assertEqual(self.client.close_code, 1006)
+
+
+class ClientServerTests(
+ CommonClientServerTests, ClientServerTestsMixin, AsyncioTestCase
+):
+ pass
+
+
+class SecureClientServerTests(
+ CommonClientServerTests, SecureClientServerTestsMixin, AsyncioTestCase
+):
+
+ # TLS over Unix sockets doesn't make sense.
+ test_unix_socket = None
+
+ @with_server()
+ def test_ws_uri_is_rejected(self):
+ with self.assertRaises(ValueError):
+ connect(get_server_uri(self.server, secure=False), ssl=self.client_context)
+
+ @with_server()
+ def test_redirect_insecure(self):
+ with temp_test_redirecting_server(
+ self, http.HTTPStatus.FOUND, force_insecure=True
+ ):
+ with self.assertRaises(InvalidHandshake):
+ with temp_test_client(self):
+ self.fail("Did not raise") # pragma: no cover
+
+
+class ClientServerOriginTests(AsyncioTestCase):
+ def test_checking_origin_succeeds(self):
+ server = self.loop.run_until_complete(
+ serve(handler, "localhost", 0, origins=["http://localhost"])
+ )
+ client = self.loop.run_until_complete(
+ connect(get_server_uri(server), origin="http://localhost")
+ )
+
+ self.loop.run_until_complete(client.send("Hello!"))
+ self.assertEqual(self.loop.run_until_complete(client.recv()), "Hello!")
+
+ self.loop.run_until_complete(client.close())
+ server.close()
+ self.loop.run_until_complete(server.wait_closed())
+
+ def test_checking_origin_fails(self):
+ server = self.loop.run_until_complete(
+ serve(handler, "localhost", 0, origins=["http://localhost"])
+ )
+ with self.assertRaisesRegex(
+ InvalidHandshake, "server rejected WebSocket connection: HTTP 403"
+ ):
+ self.loop.run_until_complete(
+ connect(get_server_uri(server), origin="http://otherhost")
+ )
+
+ server.close()
+ self.loop.run_until_complete(server.wait_closed())
+
+ def test_checking_origins_fails_with_multiple_headers(self):
+ server = self.loop.run_until_complete(
+ serve(handler, "localhost", 0, origins=["http://localhost"])
+ )
+ with self.assertRaisesRegex(
+ InvalidHandshake, "server rejected WebSocket connection: HTTP 400"
+ ):
+ self.loop.run_until_complete(
+ connect(
+ get_server_uri(server),
+ origin="http://localhost",
+ extra_headers=[("Origin", "http://otherhost")],
+ )
+ )
+
+ server.close()
+ self.loop.run_until_complete(server.wait_closed())
+
+ def test_checking_lack_of_origin_succeeds(self):
+ server = self.loop.run_until_complete(
+ serve(handler, "localhost", 0, origins=[None])
+ )
+ client = self.loop.run_until_complete(connect(get_server_uri(server)))
+
+ self.loop.run_until_complete(client.send("Hello!"))
+ self.assertEqual(self.loop.run_until_complete(client.recv()), "Hello!")
+
+ self.loop.run_until_complete(client.close())
+ server.close()
+ self.loop.run_until_complete(server.wait_closed())
+
+ def test_checking_lack_of_origin_succeeds_backwards_compatibility(self):
+ with warnings.catch_warnings(record=True) as recorded_warnings:
+ server = self.loop.run_until_complete(
+ serve(handler, "localhost", 0, origins=[""])
+ )
+ client = self.loop.run_until_complete(connect(get_server_uri(server)))
+
+ self.assertDeprecationWarnings(
+ recorded_warnings, ["use None instead of '' in origins"]
+ )
+
+ self.loop.run_until_complete(client.send("Hello!"))
+ self.assertEqual(self.loop.run_until_complete(client.recv()), "Hello!")
+
+ self.loop.run_until_complete(client.close())
+ server.close()
+ self.loop.run_until_complete(server.wait_closed())
+
+
+class YieldFromTests(AsyncioTestCase):
+ def test_client(self):
+ start_server = serve(handler, "localhost", 0)
+ server = self.loop.run_until_complete(start_server)
+
+ # @asyncio.coroutine is deprecated on Python ≥ 3.8
+ with warnings.catch_warnings(record=True):
+
+ @asyncio.coroutine
+ def run_client():
+ # Yield from connect.
+ client = yield from connect(get_server_uri(server))
+ self.assertEqual(client.state, State.OPEN)
+ yield from client.close()
+ self.assertEqual(client.state, State.CLOSED)
+
+ self.loop.run_until_complete(run_client())
+
+ server.close()
+ self.loop.run_until_complete(server.wait_closed())
+
+ def test_server(self):
+ # @asyncio.coroutine is deprecated on Python ≥ 3.8
+ with warnings.catch_warnings(record=True):
+
+ @asyncio.coroutine
+ def run_server():
+ # Yield from serve.
+ server = yield from serve(handler, "localhost", 0)
+ self.assertTrue(server.sockets)
+ server.close()
+ yield from server.wait_closed()
+ self.assertFalse(server.sockets)
+
+ self.loop.run_until_complete(run_server())
+
+
+class AsyncAwaitTests(AsyncioTestCase):
+ def test_client(self):
+ start_server = serve(handler, "localhost", 0)
+ server = self.loop.run_until_complete(start_server)
+
+ async def run_client():
+ # Await connect.
+ client = await connect(get_server_uri(server))
+ self.assertEqual(client.state, State.OPEN)
+ await client.close()
+ self.assertEqual(client.state, State.CLOSED)
+
+ self.loop.run_until_complete(run_client())
+
+ server.close()
+ self.loop.run_until_complete(server.wait_closed())
+
+ def test_server(self):
+ async def run_server():
+ # Await serve.
+ server = await serve(handler, "localhost", 0)
+ self.assertTrue(server.sockets)
+ server.close()
+ await server.wait_closed()
+ self.assertFalse(server.sockets)
+
+ self.loop.run_until_complete(run_server())
+
+
+class ContextManagerTests(AsyncioTestCase):
+ def test_client(self):
+ start_server = serve(handler, "localhost", 0)
+ server = self.loop.run_until_complete(start_server)
+
+ async def run_client():
+ # Use connect as an asynchronous context manager.
+ async with connect(get_server_uri(server)) as client:
+ self.assertEqual(client.state, State.OPEN)
+
+ # Check that exiting the context manager closed the connection.
+ self.assertEqual(client.state, State.CLOSED)
+
+ self.loop.run_until_complete(run_client())
+
+ server.close()
+ self.loop.run_until_complete(server.wait_closed())
+
+ def test_server(self):
+ async def run_server():
+ # Use serve as an asynchronous context manager.
+ async with serve(handler, "localhost", 0) as server:
+ self.assertTrue(server.sockets)
+
+ # Check that exiting the context manager closed the server.
+ self.assertFalse(server.sockets)
+
+ self.loop.run_until_complete(run_server())
+
+ @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "this test requires Unix sockets")
+ def test_unix_server(self):
+ async def run_server(path):
+ async with unix_serve(handler, path) as server:
+ self.assertTrue(server.sockets)
+
+ # Check that exiting the context manager closed the server.
+ self.assertFalse(server.sockets)
+
+ with tempfile.TemporaryDirectory() as temp_dir:
+ path = bytes(pathlib.Path(temp_dir) / "websockets")
+ self.loop.run_until_complete(run_server(path))
+
+
+class AsyncIteratorTests(AsyncioTestCase):
+
+ # This is a protocol-level feature, but since it's a high-level API, it is
+ # much easier to exercise at the client or server level.
+
+ MESSAGES = ["3", "2", "1", "Fire!"]
+
+ def test_iterate_on_messages(self):
+ async def handler(ws, path):
+ for message in self.MESSAGES:
+ await ws.send(message)
+
+ start_server = serve(handler, "localhost", 0)
+ server = self.loop.run_until_complete(start_server)
+
+ messages = []
+
+ async def run_client():
+ nonlocal messages
+ async with connect(get_server_uri(server)) as ws:
+ async for message in ws:
+ messages.append(message)
+
+ self.loop.run_until_complete(run_client())
+
+ self.assertEqual(messages, self.MESSAGES)
+
+ server.close()
+ self.loop.run_until_complete(server.wait_closed())
+
+ def test_iterate_on_messages_going_away_exit_ok(self):
+ async def handler(ws, path):
+ for message in self.MESSAGES:
+ await ws.send(message)
+ await ws.close(1001)
+
+ start_server = serve(handler, "localhost", 0)
+ server = self.loop.run_until_complete(start_server)
+
+ messages = []
+
+ async def run_client():
+ nonlocal messages
+ async with connect(get_server_uri(server)) as ws:
+ async for message in ws:
+ messages.append(message)
+
+ self.loop.run_until_complete(run_client())
+
+ self.assertEqual(messages, self.MESSAGES)
+
+ server.close()
+ self.loop.run_until_complete(server.wait_closed())
+
+ def test_iterate_on_messages_internal_error_exit_not_ok(self):
+ async def handler(ws, path):
+ for message in self.MESSAGES:
+ await ws.send(message)
+ await ws.close(1011)
+
+ start_server = serve(handler, "localhost", 0)
+ server = self.loop.run_until_complete(start_server)
+
+ messages = []
+
+ async def run_client():
+ nonlocal messages
+ async with connect(get_server_uri(server)) as ws:
+ async for message in ws:
+ messages.append(message)
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(run_client())
+
+ self.assertEqual(messages, self.MESSAGES)
+
+ server.close()
+ self.loop.run_until_complete(server.wait_closed())
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_exceptions.py b/testing/web-platform/tests/tools/third_party/websockets/tests/test_exceptions.py
new file mode 100644
index 0000000000..7ad5ad8335
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_exceptions.py
@@ -0,0 +1,145 @@
+import unittest
+
+from websockets.exceptions import *
+from websockets.http import Headers
+
+
+class ExceptionsTests(unittest.TestCase):
+ def test_str(self):
+ for exception, exception_str in [
+ # fmt: off
+ (
+ WebSocketException("something went wrong"),
+ "something went wrong",
+ ),
+ (
+ ConnectionClosed(1000, ""),
+ "code = 1000 (OK), no reason",
+ ),
+ (
+ ConnectionClosed(1006, None),
+ "code = 1006 (connection closed abnormally [internal]), no reason"
+ ),
+ (
+ ConnectionClosed(3000, None),
+ "code = 3000 (registered), no reason"
+ ),
+ (
+ ConnectionClosed(4000, None),
+ "code = 4000 (private use), no reason"
+ ),
+ (
+ ConnectionClosedError(1016, None),
+ "code = 1016 (unknown), no reason"
+ ),
+ (
+ ConnectionClosedOK(1001, "bye"),
+ "code = 1001 (going away), reason = bye",
+ ),
+ (
+ InvalidHandshake("invalid request"),
+ "invalid request",
+ ),
+ (
+ SecurityError("redirect from WSS to WS"),
+ "redirect from WSS to WS",
+ ),
+ (
+ InvalidMessage("malformed HTTP message"),
+ "malformed HTTP message",
+ ),
+ (
+ InvalidHeader("Name"),
+ "missing Name header",
+ ),
+ (
+ InvalidHeader("Name", None),
+ "missing Name header",
+ ),
+ (
+ InvalidHeader("Name", ""),
+ "empty Name header",
+ ),
+ (
+ InvalidHeader("Name", "Value"),
+ "invalid Name header: Value",
+ ),
+ (
+ InvalidHeaderFormat(
+ "Sec-WebSocket-Protocol", "expected token", "a=|", 3
+ ),
+ "invalid Sec-WebSocket-Protocol header: "
+ "expected token at 3 in a=|",
+ ),
+ (
+ InvalidHeaderValue("Sec-WebSocket-Version", "42"),
+ "invalid Sec-WebSocket-Version header: 42",
+ ),
+ (
+ InvalidOrigin("http://bad.origin"),
+ "invalid Origin header: http://bad.origin",
+ ),
+ (
+ InvalidUpgrade("Upgrade"),
+ "missing Upgrade header",
+ ),
+ (
+ InvalidUpgrade("Connection", "websocket"),
+ "invalid Connection header: websocket",
+ ),
+ (
+ InvalidStatusCode(403),
+ "server rejected WebSocket connection: HTTP 403",
+ ),
+ (
+ NegotiationError("unsupported subprotocol: spam"),
+ "unsupported subprotocol: spam",
+ ),
+ (
+ DuplicateParameter("a"),
+ "duplicate parameter: a",
+ ),
+ (
+ InvalidParameterName("|"),
+ "invalid parameter name: |",
+ ),
+ (
+ InvalidParameterValue("a", None),
+ "missing value for parameter a",
+ ),
+ (
+ InvalidParameterValue("a", ""),
+ "empty value for parameter a",
+ ),
+ (
+ InvalidParameterValue("a", "|"),
+ "invalid value for parameter a: |",
+ ),
+ (
+ AbortHandshake(200, Headers(), b"OK\n"),
+ "HTTP 200, 0 headers, 3 bytes",
+ ),
+ (
+ RedirectHandshake("wss://example.com"),
+ "redirect to wss://example.com",
+ ),
+ (
+ InvalidState("WebSocket connection isn't established yet"),
+ "WebSocket connection isn't established yet",
+ ),
+ (
+ InvalidURI("|"),
+ "| isn't a valid URI",
+ ),
+ (
+ PayloadTooBig("payload length exceeds limit: 2 > 1 bytes"),
+ "payload length exceeds limit: 2 > 1 bytes",
+ ),
+ (
+ ProtocolError("invalid opcode: 7"),
+ "invalid opcode: 7",
+ ),
+ # fmt: on
+ ]:
+ with self.subTest(exception=exception):
+ self.assertEqual(str(exception), exception_str)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_exports.py b/testing/web-platform/tests/tools/third_party/websockets/tests/test_exports.py
new file mode 100644
index 0000000000..7fcbc80e38
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_exports.py
@@ -0,0 +1,22 @@
+import unittest
+
+import websockets
+
+
+combined_exports = (
+ websockets.auth.__all__
+ + websockets.client.__all__
+ + websockets.exceptions.__all__
+ + websockets.protocol.__all__
+ + websockets.server.__all__
+ + websockets.typing.__all__
+ + websockets.uri.__all__
+)
+
+
+class TestExportsAllSubmodules(unittest.TestCase):
+ def test_top_level_module_reexports_all_submodule_exports(self):
+ self.assertEqual(set(combined_exports), set(websockets.__all__))
+
+ def test_submodule_exports_are_globally_unique(self):
+ self.assertEqual(len(set(combined_exports)), len(combined_exports))
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_framing.py b/testing/web-platform/tests/tools/third_party/websockets/tests/test_framing.py
new file mode 100644
index 0000000000..5def415d28
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_framing.py
@@ -0,0 +1,242 @@
+import asyncio
+import codecs
+import unittest
+import unittest.mock
+
+from websockets.exceptions import PayloadTooBig, ProtocolError
+from websockets.framing import *
+
+from .utils import AsyncioTestCase
+
+
+class FramingTests(AsyncioTestCase):
+ def decode(self, message, mask=False, max_size=None, extensions=None):
+ self.stream = asyncio.StreamReader(loop=self.loop)
+ self.stream.feed_data(message)
+ self.stream.feed_eof()
+ frame = self.loop.run_until_complete(
+ Frame.read(
+ self.stream.readexactly,
+ mask=mask,
+ max_size=max_size,
+ extensions=extensions,
+ )
+ )
+ # Make sure all the data was consumed.
+ self.assertTrue(self.stream.at_eof())
+ return frame
+
+ def encode(self, frame, mask=False, extensions=None):
+ write = unittest.mock.Mock()
+ frame.write(write, mask=mask, extensions=extensions)
+ # Ensure the entire frame is sent with a single call to write().
+ # Multiple calls cause TCP fragmentation and degrade performance.
+ self.assertEqual(write.call_count, 1)
+ # The frame data is the single positional argument of that call.
+ self.assertEqual(len(write.call_args[0]), 1)
+ self.assertEqual(len(write.call_args[1]), 0)
+ return write.call_args[0][0]
+
+ def round_trip(self, message, expected, mask=False, extensions=None):
+ decoded = self.decode(message, mask, extensions=extensions)
+ self.assertEqual(decoded, expected)
+ encoded = self.encode(decoded, mask, extensions=extensions)
+ if mask: # non-deterministic encoding
+ decoded = self.decode(encoded, mask, extensions=extensions)
+ self.assertEqual(decoded, expected)
+ else: # deterministic encoding
+ self.assertEqual(encoded, message)
+
+ def round_trip_close(self, data, code, reason):
+ parsed = parse_close(data)
+ self.assertEqual(parsed, (code, reason))
+ serialized = serialize_close(code, reason)
+ self.assertEqual(serialized, data)
+
+ def test_text(self):
+ self.round_trip(b"\x81\x04Spam", Frame(True, OP_TEXT, b"Spam"))
+
+ def test_text_masked(self):
+ self.round_trip(
+ b"\x81\x84\x5b\xfb\xe1\xa8\x08\x8b\x80\xc5",
+ Frame(True, OP_TEXT, b"Spam"),
+ mask=True,
+ )
+
+ def test_binary(self):
+ self.round_trip(b"\x82\x04Eggs", Frame(True, OP_BINARY, b"Eggs"))
+
+ def test_binary_masked(self):
+ self.round_trip(
+ b"\x82\x84\x53\xcd\xe2\x89\x16\xaa\x85\xfa",
+ Frame(True, OP_BINARY, b"Eggs"),
+ mask=True,
+ )
+
+ def test_non_ascii_text(self):
+ self.round_trip(
+ b"\x81\x05caf\xc3\xa9", Frame(True, OP_TEXT, "café".encode("utf-8"))
+ )
+
+ def test_non_ascii_text_masked(self):
+ self.round_trip(
+ b"\x81\x85\x64\xbe\xee\x7e\x07\xdf\x88\xbd\xcd",
+ Frame(True, OP_TEXT, "café".encode("utf-8")),
+ mask=True,
+ )
+
+ def test_close(self):
+ self.round_trip(b"\x88\x00", Frame(True, OP_CLOSE, b""))
+
+ def test_ping(self):
+ self.round_trip(b"\x89\x04ping", Frame(True, OP_PING, b"ping"))
+
+ def test_pong(self):
+ self.round_trip(b"\x8a\x04pong", Frame(True, OP_PONG, b"pong"))
+
+ def test_long(self):
+ self.round_trip(
+ b"\x82\x7e\x00\x7e" + 126 * b"a", Frame(True, OP_BINARY, 126 * b"a")
+ )
+
+ def test_very_long(self):
+ self.round_trip(
+ b"\x82\x7f\x00\x00\x00\x00\x00\x01\x00\x00" + 65536 * b"a",
+ Frame(True, OP_BINARY, 65536 * b"a"),
+ )
+
+ def test_payload_too_big(self):
+ with self.assertRaises(PayloadTooBig):
+ self.decode(b"\x82\x7e\x04\x01" + 1025 * b"a", max_size=1024)
+
+ def test_bad_reserved_bits(self):
+ for encoded in [b"\xc0\x00", b"\xa0\x00", b"\x90\x00"]:
+ with self.subTest(encoded=encoded):
+ with self.assertRaises(ProtocolError):
+ self.decode(encoded)
+
+ def test_good_opcode(self):
+ for opcode in list(range(0x00, 0x03)) + list(range(0x08, 0x0B)):
+ encoded = bytes([0x80 | opcode, 0])
+ with self.subTest(encoded=encoded):
+ self.decode(encoded) # does not raise an exception
+
+ def test_bad_opcode(self):
+ for opcode in list(range(0x03, 0x08)) + list(range(0x0B, 0x10)):
+ encoded = bytes([0x80 | opcode, 0])
+ with self.subTest(encoded=encoded):
+ with self.assertRaises(ProtocolError):
+ self.decode(encoded)
+
+ def test_mask_flag(self):
+ # Mask flag correctly set.
+ self.decode(b"\x80\x80\x00\x00\x00\x00", mask=True)
+ # Mask flag incorrectly unset.
+ with self.assertRaises(ProtocolError):
+ self.decode(b"\x80\x80\x00\x00\x00\x00")
+ # Mask flag correctly unset.
+ self.decode(b"\x80\x00")
+ # Mask flag incorrectly set.
+ with self.assertRaises(ProtocolError):
+ self.decode(b"\x80\x00", mask=True)
+
+ def test_control_frame_max_length(self):
+ # At maximum allowed length.
+ self.decode(b"\x88\x7e\x00\x7d" + 125 * b"a")
+ # Above maximum allowed length.
+ with self.assertRaises(ProtocolError):
+ self.decode(b"\x88\x7e\x00\x7e" + 126 * b"a")
+
+ def test_prepare_data_str(self):
+ self.assertEqual(prepare_data("café"), (OP_TEXT, b"caf\xc3\xa9"))
+
+ def test_prepare_data_bytes(self):
+ self.assertEqual(prepare_data(b"tea"), (OP_BINARY, b"tea"))
+
+ def test_prepare_data_bytearray(self):
+ self.assertEqual(
+ prepare_data(bytearray(b"tea")), (OP_BINARY, bytearray(b"tea"))
+ )
+
+ def test_prepare_data_memoryview(self):
+ self.assertEqual(
+ prepare_data(memoryview(b"tea")), (OP_BINARY, memoryview(b"tea"))
+ )
+
+ def test_prepare_data_non_contiguous_memoryview(self):
+ self.assertEqual(prepare_data(memoryview(b"tteeaa")[::2]), (OP_BINARY, b"tea"))
+
+ def test_prepare_data_list(self):
+ with self.assertRaises(TypeError):
+ prepare_data([])
+
+ def test_prepare_data_none(self):
+ with self.assertRaises(TypeError):
+ prepare_data(None)
+
+ def test_encode_data_str(self):
+ self.assertEqual(encode_data("café"), b"caf\xc3\xa9")
+
+ def test_encode_data_bytes(self):
+ self.assertEqual(encode_data(b"tea"), b"tea")
+
+ def test_encode_data_bytearray(self):
+ self.assertEqual(encode_data(bytearray(b"tea")), b"tea")
+
+ def test_encode_data_memoryview(self):
+ self.assertEqual(encode_data(memoryview(b"tea")), b"tea")
+
+ def test_encode_data_non_contiguous_memoryview(self):
+ self.assertEqual(encode_data(memoryview(b"tteeaa")[::2]), b"tea")
+
+ def test_encode_data_list(self):
+ with self.assertRaises(TypeError):
+ encode_data([])
+
+ def test_encode_data_none(self):
+ with self.assertRaises(TypeError):
+ encode_data(None)
+
+ def test_fragmented_control_frame(self):
+ # Fin bit correctly set.
+ self.decode(b"\x88\x00")
+ # Fin bit incorrectly unset.
+ with self.assertRaises(ProtocolError):
+ self.decode(b"\x08\x00")
+
+ def test_parse_close_and_serialize_close(self):
+ self.round_trip_close(b"\x03\xe8", 1000, "")
+ self.round_trip_close(b"\x03\xe8OK", 1000, "OK")
+
+ def test_parse_close_empty(self):
+ self.assertEqual(parse_close(b""), (1005, ""))
+
+ def test_parse_close_errors(self):
+ with self.assertRaises(ProtocolError):
+ parse_close(b"\x03")
+ with self.assertRaises(ProtocolError):
+ parse_close(b"\x03\xe7")
+ with self.assertRaises(UnicodeDecodeError):
+ parse_close(b"\x03\xe8\xff\xff")
+
+ def test_serialize_close_errors(self):
+ with self.assertRaises(ProtocolError):
+ serialize_close(999, "")
+
+ def test_extensions(self):
+ class Rot13:
+ @staticmethod
+ def encode(frame):
+ assert frame.opcode == OP_TEXT
+ text = frame.data.decode()
+ data = codecs.encode(text, "rot13").encode()
+ return frame._replace(data=data)
+
+ # This extensions is symmetrical.
+ @staticmethod
+ def decode(frame, *, max_size=None):
+ return Rot13.encode(frame)
+
+ self.round_trip(
+ b"\x81\x05uryyb", Frame(True, OP_TEXT, b"hello"), extensions=[Rot13()]
+ )
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_handshake.py b/testing/web-platform/tests/tools/third_party/websockets/tests/test_handshake.py
new file mode 100644
index 0000000000..7d04777152
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_handshake.py
@@ -0,0 +1,190 @@
+import contextlib
+import unittest
+
+from websockets.exceptions import (
+ InvalidHandshake,
+ InvalidHeader,
+ InvalidHeaderValue,
+ InvalidUpgrade,
+)
+from websockets.handshake import *
+from websockets.handshake import accept # private API
+from websockets.http import Headers
+
+
+class HandshakeTests(unittest.TestCase):
+ def test_accept(self):
+ # Test vector from RFC 6455
+ key = "dGhlIHNhbXBsZSBub25jZQ=="
+ acc = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="
+ self.assertEqual(accept(key), acc)
+
+ def test_round_trip(self):
+ request_headers = Headers()
+ request_key = build_request(request_headers)
+ response_key = check_request(request_headers)
+ self.assertEqual(request_key, response_key)
+ response_headers = Headers()
+ build_response(response_headers, response_key)
+ check_response(response_headers, request_key)
+
+ @contextlib.contextmanager
+ def assertValidRequestHeaders(self):
+ """
+ Provide request headers for modification.
+
+ Assert that the transformation kept them valid.
+
+ """
+ headers = Headers()
+ build_request(headers)
+ yield headers
+ check_request(headers)
+
+ @contextlib.contextmanager
+ def assertInvalidRequestHeaders(self, exc_type):
+ """
+ Provide request headers for modification.
+
+ Assert that the transformation made them invalid.
+
+ """
+ headers = Headers()
+ build_request(headers)
+ yield headers
+ assert issubclass(exc_type, InvalidHandshake)
+ with self.assertRaises(exc_type):
+ check_request(headers)
+
+ def test_request_invalid_connection(self):
+ with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers:
+ del headers["Connection"]
+ headers["Connection"] = "Downgrade"
+
+ def test_request_missing_connection(self):
+ with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers:
+ del headers["Connection"]
+
+ def test_request_additional_connection(self):
+ with self.assertValidRequestHeaders() as headers:
+ headers["Connection"] = "close"
+
+ def test_request_invalid_upgrade(self):
+ with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers:
+ del headers["Upgrade"]
+ headers["Upgrade"] = "socketweb"
+
+ def test_request_missing_upgrade(self):
+ with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers:
+ del headers["Upgrade"]
+
+ def test_request_additional_upgrade(self):
+ with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers:
+ headers["Upgrade"] = "socketweb"
+
+ def test_request_invalid_key_not_base64(self):
+ with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers:
+ del headers["Sec-WebSocket-Key"]
+ headers["Sec-WebSocket-Key"] = "!@#$%^&*()"
+
+ def test_request_invalid_key_not_well_padded(self):
+ with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers:
+ del headers["Sec-WebSocket-Key"]
+ headers["Sec-WebSocket-Key"] = "CSIRmL8dWYxeAdr/XpEHRw"
+
+ def test_request_invalid_key_not_16_bytes_long(self):
+ with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers:
+ del headers["Sec-WebSocket-Key"]
+ headers["Sec-WebSocket-Key"] = "ZLpprpvK4PE="
+
+ def test_request_missing_key(self):
+ with self.assertInvalidRequestHeaders(InvalidHeader) as headers:
+ del headers["Sec-WebSocket-Key"]
+
+ def test_request_additional_key(self):
+ with self.assertInvalidRequestHeaders(InvalidHeader) as headers:
+ # This duplicates the Sec-WebSocket-Key header.
+ headers["Sec-WebSocket-Key"] = headers["Sec-WebSocket-Key"]
+
+ def test_request_invalid_version(self):
+ with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers:
+ del headers["Sec-WebSocket-Version"]
+ headers["Sec-WebSocket-Version"] = "42"
+
+ def test_request_missing_version(self):
+ with self.assertInvalidRequestHeaders(InvalidHeader) as headers:
+ del headers["Sec-WebSocket-Version"]
+
+ def test_request_additional_version(self):
+ with self.assertInvalidRequestHeaders(InvalidHeader) as headers:
+ # This duplicates the Sec-WebSocket-Version header.
+ headers["Sec-WebSocket-Version"] = headers["Sec-WebSocket-Version"]
+
+ @contextlib.contextmanager
+ def assertValidResponseHeaders(self, key="CSIRmL8dWYxeAdr/XpEHRw=="):
+ """
+ Provide response headers for modification.
+
+ Assert that the transformation kept them valid.
+
+ """
+ headers = Headers()
+ build_response(headers, key)
+ yield headers
+ check_response(headers, key)
+
+ @contextlib.contextmanager
+ def assertInvalidResponseHeaders(self, exc_type, key="CSIRmL8dWYxeAdr/XpEHRw=="):
+ """
+ Provide response headers for modification.
+
+ Assert that the transformation made them invalid.
+
+ """
+ headers = Headers()
+ build_response(headers, key)
+ yield headers
+ assert issubclass(exc_type, InvalidHandshake)
+ with self.assertRaises(exc_type):
+ check_response(headers, key)
+
+ def test_response_invalid_connection(self):
+ with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers:
+ del headers["Connection"]
+ headers["Connection"] = "Downgrade"
+
+ def test_response_missing_connection(self):
+ with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers:
+ del headers["Connection"]
+
+ def test_response_additional_connection(self):
+ with self.assertValidResponseHeaders() as headers:
+ headers["Connection"] = "close"
+
+ def test_response_invalid_upgrade(self):
+ with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers:
+ del headers["Upgrade"]
+ headers["Upgrade"] = "socketweb"
+
+ def test_response_missing_upgrade(self):
+ with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers:
+ del headers["Upgrade"]
+
+ def test_response_additional_upgrade(self):
+ with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers:
+ headers["Upgrade"] = "socketweb"
+
+ def test_response_invalid_accept(self):
+ with self.assertInvalidResponseHeaders(InvalidHeaderValue) as headers:
+ del headers["Sec-WebSocket-Accept"]
+ other_key = "1Eq4UDEFQYg3YspNgqxv5g=="
+ headers["Sec-WebSocket-Accept"] = accept(other_key)
+
+ def test_response_missing_accept(self):
+ with self.assertInvalidResponseHeaders(InvalidHeader) as headers:
+ del headers["Sec-WebSocket-Accept"]
+
+ def test_response_additional_accept(self):
+ with self.assertInvalidResponseHeaders(InvalidHeader) as headers:
+ # This duplicates the Sec-WebSocket-Accept header.
+ headers["Sec-WebSocket-Accept"] = headers["Sec-WebSocket-Accept"]
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_headers.py b/testing/web-platform/tests/tools/third_party/websockets/tests/test_headers.py
new file mode 100644
index 0000000000..26d85fa5ea
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_headers.py
@@ -0,0 +1,185 @@
+import unittest
+
+from websockets.exceptions import InvalidHeaderFormat, InvalidHeaderValue
+from websockets.headers import *
+
+
+class HeadersTests(unittest.TestCase):
+ def test_parse_connection(self):
+ for header, parsed in [
+ # Realistic use cases
+ ("Upgrade", ["Upgrade"]), # Safari, Chrome
+ ("keep-alive, Upgrade", ["keep-alive", "Upgrade"]), # Firefox
+ # Pathological example
+ (",,\t, , ,Upgrade ,,", ["Upgrade"]),
+ ]:
+ with self.subTest(header=header):
+ self.assertEqual(parse_connection(header), parsed)
+
+ def test_parse_connection_invalid_header_format(self):
+ for header in ["???", "keep-alive; Upgrade"]:
+ with self.subTest(header=header):
+ with self.assertRaises(InvalidHeaderFormat):
+ parse_connection(header)
+
+ def test_parse_upgrade(self):
+ for header, parsed in [
+ # Realistic use case
+ ("websocket", ["websocket"]),
+ # Synthetic example
+ ("http/3.0, websocket", ["http/3.0", "websocket"]),
+ # Pathological example
+ (",, WebSocket, \t,,", ["WebSocket"]),
+ ]:
+ with self.subTest(header=header):
+ self.assertEqual(parse_upgrade(header), parsed)
+
+ def test_parse_upgrade_invalid_header_format(self):
+ for header in ["???", "websocket 2", "http/3.0; websocket"]:
+ with self.subTest(header=header):
+ with self.assertRaises(InvalidHeaderFormat):
+ parse_upgrade(header)
+
+ def test_parse_extension(self):
+ for header, parsed in [
+ # Synthetic examples
+ ("foo", [("foo", [])]),
+ ("foo, bar", [("foo", []), ("bar", [])]),
+ (
+ 'foo; name; token=token; quoted-string="quoted-string", '
+ "bar; quux; quuux",
+ [
+ (
+ "foo",
+ [
+ ("name", None),
+ ("token", "token"),
+ ("quoted-string", "quoted-string"),
+ ],
+ ),
+ ("bar", [("quux", None), ("quuux", None)]),
+ ],
+ ),
+ # Pathological example
+ (
+ ",\t, , ,foo ;bar = 42,, baz,,",
+ [("foo", [("bar", "42")]), ("baz", [])],
+ ),
+ # Realistic use cases for permessage-deflate
+ ("permessage-deflate", [("permessage-deflate", [])]),
+ (
+ "permessage-deflate; client_max_window_bits",
+ [("permessage-deflate", [("client_max_window_bits", None)])],
+ ),
+ (
+ "permessage-deflate; server_max_window_bits=10",
+ [("permessage-deflate", [("server_max_window_bits", "10")])],
+ ),
+ ]:
+ with self.subTest(header=header):
+ self.assertEqual(parse_extension(header), parsed)
+ # Also ensure that build_extension round-trips cleanly.
+ unparsed = build_extension(parsed)
+ self.assertEqual(parse_extension(unparsed), parsed)
+
+ def test_parse_extension_invalid_header_format(self):
+ for header in [
+ # Truncated examples
+ "",
+ ",\t,",
+ "foo;",
+ "foo; bar;",
+ "foo; bar=",
+ 'foo; bar="baz',
+ # Wrong delimiter
+ "foo, bar, baz=quux; quuux",
+ # Value in quoted string parameter that isn't a token
+ 'foo; bar=" "',
+ ]:
+ with self.subTest(header=header):
+ with self.assertRaises(InvalidHeaderFormat):
+ parse_extension(header)
+
+ def test_parse_subprotocol(self):
+ for header, parsed in [
+ # Synthetic examples
+ ("foo", ["foo"]),
+ ("foo, bar", ["foo", "bar"]),
+ # Pathological example
+ (",\t, , ,foo ,, bar,baz,,", ["foo", "bar", "baz"]),
+ ]:
+ with self.subTest(header=header):
+ self.assertEqual(parse_subprotocol(header), parsed)
+ # Also ensure that build_subprotocol round-trips cleanly.
+ unparsed = build_subprotocol(parsed)
+ self.assertEqual(parse_subprotocol(unparsed), parsed)
+
+ def test_parse_subprotocol_invalid_header(self):
+ for header in [
+ # Truncated examples
+ "",
+ ",\t,"
+ # Wrong delimiter
+ "foo; bar",
+ ]:
+ with self.subTest(header=header):
+ with self.assertRaises(InvalidHeaderFormat):
+ parse_subprotocol(header)
+
+ def test_build_www_authenticate_basic(self):
+ # Test vector from RFC 7617
+ self.assertEqual(
+ build_www_authenticate_basic("foo"), 'Basic realm="foo", charset="UTF-8"'
+ )
+
+ def test_build_www_authenticate_basic_invalid_realm(self):
+ # Realm contains a control character forbidden in quoted-string encoding
+ with self.assertRaises(ValueError):
+ build_www_authenticate_basic("\u0007")
+
+ def test_build_authorization_basic(self):
+ # Test vector from RFC 7617
+ self.assertEqual(
+ build_authorization_basic("Aladdin", "open sesame"),
+ "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==",
+ )
+
+ def test_build_authorization_basic_utf8(self):
+ # Test vector from RFC 7617
+ self.assertEqual(
+ build_authorization_basic("test", "123£"), "Basic dGVzdDoxMjPCow=="
+ )
+
+ def test_parse_authorization_basic(self):
+ for header, parsed in [
+ ("Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==", ("Aladdin", "open sesame")),
+ # Password contains non-ASCII character
+ ("Basic dGVzdDoxMjPCow==", ("test", "123£")),
+ # Password contains a colon
+ ("Basic YWxhZGRpbjpvcGVuOnNlc2FtZQ==", ("aladdin", "open:sesame")),
+ # Scheme name must be case insensitive
+ ("basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==", ("Aladdin", "open sesame")),
+ ]:
+ with self.subTest(header=header):
+ self.assertEqual(parse_authorization_basic(header), parsed)
+
+ def test_parse_authorization_basic_invalid_header_format(self):
+ for header in [
+ "// Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==",
+ "Basic\tQWxhZGRpbjpvcGVuIHNlc2FtZQ==",
+ "Basic ****************************",
+ "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ== //",
+ ]:
+ with self.subTest(header=header):
+ with self.assertRaises(InvalidHeaderFormat):
+ parse_authorization_basic(header)
+
+ def test_parse_authorization_basic_invalid_header_value(self):
+ for header in [
+ "Digest ...",
+ "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ",
+ "Basic QWxhZGNlc2FtZQ==",
+ ]:
+ with self.subTest(header=header):
+ with self.assertRaises(InvalidHeaderValue):
+ parse_authorization_basic(header)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_http.py b/testing/web-platform/tests/tools/third_party/websockets/tests/test_http.py
new file mode 100644
index 0000000000..41b522c3d5
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_http.py
@@ -0,0 +1,249 @@
+import asyncio
+import unittest
+
+from websockets.exceptions import SecurityError
+from websockets.http import *
+from websockets.http import read_headers
+
+from .utils import AsyncioTestCase
+
+
+class HTTPAsyncTests(AsyncioTestCase):
+ def setUp(self):
+ super().setUp()
+ self.stream = asyncio.StreamReader(loop=self.loop)
+
+ async def test_read_request(self):
+ # Example from the protocol overview in RFC 6455
+ self.stream.feed_data(
+ b"GET /chat HTTP/1.1\r\n"
+ b"Host: server.example.com\r\n"
+ b"Upgrade: websocket\r\n"
+ b"Connection: Upgrade\r\n"
+ b"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n"
+ b"Origin: http://example.com\r\n"
+ b"Sec-WebSocket-Protocol: chat, superchat\r\n"
+ b"Sec-WebSocket-Version: 13\r\n"
+ b"\r\n"
+ )
+ path, headers = await read_request(self.stream)
+ self.assertEqual(path, "/chat")
+ self.assertEqual(headers["Upgrade"], "websocket")
+
+ async def test_read_request_empty(self):
+ self.stream.feed_eof()
+ with self.assertRaisesRegex(
+ EOFError, "connection closed while reading HTTP request line"
+ ):
+ await read_request(self.stream)
+
+ async def test_read_request_invalid_request_line(self):
+ self.stream.feed_data(b"GET /\r\n\r\n")
+ with self.assertRaisesRegex(ValueError, "invalid HTTP request line: GET /"):
+ await read_request(self.stream)
+
+ async def test_read_request_unsupported_method(self):
+ self.stream.feed_data(b"OPTIONS * HTTP/1.1\r\n\r\n")
+ with self.assertRaisesRegex(ValueError, "unsupported HTTP method: OPTIONS"):
+ await read_request(self.stream)
+
+ async def test_read_request_unsupported_version(self):
+ self.stream.feed_data(b"GET /chat HTTP/1.0\r\n\r\n")
+ with self.assertRaisesRegex(ValueError, "unsupported HTTP version: HTTP/1.0"):
+ await read_request(self.stream)
+
+ async def test_read_request_invalid_header(self):
+ self.stream.feed_data(b"GET /chat HTTP/1.1\r\nOops\r\n")
+ with self.assertRaisesRegex(ValueError, "invalid HTTP header line: Oops"):
+ await read_request(self.stream)
+
+ async def test_read_response(self):
+ # Example from the protocol overview in RFC 6455
+ self.stream.feed_data(
+ b"HTTP/1.1 101 Switching Protocols\r\n"
+ b"Upgrade: websocket\r\n"
+ b"Connection: Upgrade\r\n"
+ b"Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n"
+ b"Sec-WebSocket-Protocol: chat\r\n"
+ b"\r\n"
+ )
+ status_code, reason, headers = await read_response(self.stream)
+ self.assertEqual(status_code, 101)
+ self.assertEqual(reason, "Switching Protocols")
+ self.assertEqual(headers["Upgrade"], "websocket")
+
+ async def test_read_response_empty(self):
+ self.stream.feed_eof()
+ with self.assertRaisesRegex(
+ EOFError, "connection closed while reading HTTP status line"
+ ):
+ await read_response(self.stream)
+
+ async def test_read_request_invalid_status_line(self):
+ self.stream.feed_data(b"Hello!\r\n")
+ with self.assertRaisesRegex(ValueError, "invalid HTTP status line: Hello!"):
+ await read_response(self.stream)
+
+ async def test_read_response_unsupported_version(self):
+ self.stream.feed_data(b"HTTP/1.0 400 Bad Request\r\n\r\n")
+ with self.assertRaisesRegex(ValueError, "unsupported HTTP version: HTTP/1.0"):
+ await read_response(self.stream)
+
+ async def test_read_response_invalid_status(self):
+ self.stream.feed_data(b"HTTP/1.1 OMG WTF\r\n\r\n")
+ with self.assertRaisesRegex(ValueError, "invalid HTTP status code: OMG"):
+ await read_response(self.stream)
+
+ async def test_read_response_unsupported_status(self):
+ self.stream.feed_data(b"HTTP/1.1 007 My name is Bond\r\n\r\n")
+ with self.assertRaisesRegex(ValueError, "unsupported HTTP status code: 007"):
+ await read_response(self.stream)
+
+ async def test_read_response_invalid_reason(self):
+ self.stream.feed_data(b"HTTP/1.1 200 \x7f\r\n\r\n")
+ with self.assertRaisesRegex(ValueError, "invalid HTTP reason phrase: \\x7f"):
+ await read_response(self.stream)
+
+ async def test_read_response_invalid_header(self):
+ self.stream.feed_data(b"HTTP/1.1 500 Internal Server Error\r\nOops\r\n")
+ with self.assertRaisesRegex(ValueError, "invalid HTTP header line: Oops"):
+ await read_response(self.stream)
+
+ async def test_header_name(self):
+ self.stream.feed_data(b"foo bar: baz qux\r\n\r\n")
+ with self.assertRaises(ValueError):
+ await read_headers(self.stream)
+
+ async def test_header_value(self):
+ self.stream.feed_data(b"foo: \x00\x00\x0f\r\n\r\n")
+ with self.assertRaises(ValueError):
+ await read_headers(self.stream)
+
+ async def test_headers_limit(self):
+ self.stream.feed_data(b"foo: bar\r\n" * 257 + b"\r\n")
+ with self.assertRaises(SecurityError):
+ await read_headers(self.stream)
+
+ async def test_line_limit(self):
+ # Header line contains 5 + 4090 + 2 = 4097 bytes.
+ self.stream.feed_data(b"foo: " + b"a" * 4090 + b"\r\n\r\n")
+ with self.assertRaises(SecurityError):
+ await read_headers(self.stream)
+
+ async def test_line_ending(self):
+ self.stream.feed_data(b"foo: bar\n\n")
+ with self.assertRaises(EOFError):
+ await read_headers(self.stream)
+
+
+class HeadersTests(unittest.TestCase):
+ def setUp(self):
+ self.headers = Headers([("Connection", "Upgrade"), ("Server", USER_AGENT)])
+
+ def test_str(self):
+ self.assertEqual(
+ str(self.headers), f"Connection: Upgrade\r\nServer: {USER_AGENT}\r\n\r\n"
+ )
+
+ def test_repr(self):
+ self.assertEqual(
+ repr(self.headers),
+ f"Headers([('Connection', 'Upgrade'), " f"('Server', '{USER_AGENT}')])",
+ )
+
+ def test_multiple_values_error_str(self):
+ self.assertEqual(str(MultipleValuesError("Connection")), "'Connection'")
+ self.assertEqual(str(MultipleValuesError()), "")
+
+ def test_contains(self):
+ self.assertIn("Server", self.headers)
+
+ def test_contains_case_insensitive(self):
+ self.assertIn("server", self.headers)
+
+ def test_contains_not_found(self):
+ self.assertNotIn("Date", self.headers)
+
+ def test_contains_non_string_key(self):
+ self.assertNotIn(42, self.headers)
+
+ def test_iter(self):
+ self.assertEqual(set(iter(self.headers)), {"connection", "server"})
+
+ def test_len(self):
+ self.assertEqual(len(self.headers), 2)
+
+ def test_getitem(self):
+ self.assertEqual(self.headers["Server"], USER_AGENT)
+
+ def test_getitem_case_insensitive(self):
+ self.assertEqual(self.headers["server"], USER_AGENT)
+
+ def test_getitem_key_error(self):
+ with self.assertRaises(KeyError):
+ self.headers["Upgrade"]
+
+ def test_getitem_multiple_values_error(self):
+ self.headers["Server"] = "2"
+ with self.assertRaises(MultipleValuesError):
+ self.headers["Server"]
+
+ def test_setitem(self):
+ self.headers["Upgrade"] = "websocket"
+ self.assertEqual(self.headers["Upgrade"], "websocket")
+
+ def test_setitem_case_insensitive(self):
+ self.headers["upgrade"] = "websocket"
+ self.assertEqual(self.headers["Upgrade"], "websocket")
+
+ def test_setitem_multiple_values(self):
+ self.headers["Connection"] = "close"
+ with self.assertRaises(MultipleValuesError):
+ self.headers["Connection"]
+
+ def test_delitem(self):
+ del self.headers["Connection"]
+ with self.assertRaises(KeyError):
+ self.headers["Connection"]
+
+ def test_delitem_case_insensitive(self):
+ del self.headers["connection"]
+ with self.assertRaises(KeyError):
+ self.headers["Connection"]
+
+ def test_delitem_multiple_values(self):
+ self.headers["Connection"] = "close"
+ del self.headers["Connection"]
+ with self.assertRaises(KeyError):
+ self.headers["Connection"]
+
+ def test_eq(self):
+ other_headers = self.headers.copy()
+ self.assertEqual(self.headers, other_headers)
+
+ def test_eq_not_equal(self):
+ self.assertNotEqual(self.headers, [])
+
+ def test_clear(self):
+ self.headers.clear()
+ self.assertFalse(self.headers)
+ self.assertEqual(self.headers, Headers())
+
+ def test_get_all(self):
+ self.assertEqual(self.headers.get_all("Connection"), ["Upgrade"])
+
+ def test_get_all_case_insensitive(self):
+ self.assertEqual(self.headers.get_all("connection"), ["Upgrade"])
+
+ def test_get_all_no_values(self):
+ self.assertEqual(self.headers.get_all("Upgrade"), [])
+
+ def test_get_all_multiple_values(self):
+ self.headers["Connection"] = "close"
+ self.assertEqual(self.headers.get_all("Connection"), ["Upgrade", "close"])
+
+ def test_raw_items(self):
+ self.assertEqual(
+ list(self.headers.raw_items()),
+ [("Connection", "Upgrade"), ("Server", USER_AGENT)],
+ )
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_localhost.cnf b/testing/web-platform/tests/tools/third_party/websockets/tests/test_localhost.cnf
new file mode 100644
index 0000000000..6dc331ac69
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_localhost.cnf
@@ -0,0 +1,26 @@
+[ req ]
+
+default_md = sha256
+encrypt_key = no
+
+prompt = no
+
+distinguished_name = dn
+x509_extensions = ext
+
+[ dn ]
+
+C = "FR"
+L = "Paris"
+O = "Aymeric Augustin"
+CN = "localhost"
+
+[ ext ]
+
+subjectAltName = @san
+
+[ san ]
+
+DNS.1 = localhost
+IP.2 = 127.0.0.1
+IP.3 = ::1
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_localhost.pem b/testing/web-platform/tests/tools/third_party/websockets/tests/test_localhost.pem
new file mode 100644
index 0000000000..b8a9ea9ab3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_localhost.pem
@@ -0,0 +1,48 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCUgrQVkNbAWRlo
+zZUj14Ufz7YEp2MXmvmhdlfOGLwjy+xPO98aJRv5/nYF2eWM3llcmLe8FbBSK+QF
+To4su7ZVnc6qITOHqcSDUw06WarQUMs94bhHUvQp1u8+b2hNiMeGw6+QiBI6OJRO
+iGpLRbkN6Uj3AKwi8SYVoLyMiztuwbNyGf8fF3DDpHZtBitGtMSBCMsQsfB465pl
+2UoyBrWa2lsbLt3VvBZZvHqfEuPjpjjKN5USIXnaf0NizaR6ps3EyfftWy4i7zIQ
+N5uTExvaPDyPn9nH3q/dkT99mSMSU1AvTTpX8PN7DlqE6wZMbQsBPRGW7GElQ+Ox
+IKdKOLk5AgMBAAECggEAd3kqzQqnaTiEs4ZoC9yPUUc1pErQ8iWP27Ar9TZ67MVa
+B2ggFJV0C0sFwbFI9WnPNCn77gj4vzJmD0riH+SnS/tXThDFtscBu7BtvNp0C4Bj
+8RWMvXxjxuENuQnBPFbkRWtZ6wk8uK/Zx9AAyyt9M07Qjz1wPfAIdm/IH7zHBFMA
+gsqjnkLh1r0FvjNEbLiuGqYU/GVxaZYd+xy+JU52IxjHUUL9yD0BPWb+Szar6AM2
+gUpmTX6+BcCZwwZ//DzCoWYZ9JbP8akn6edBeZyuMPqYgLzZkPyQ+hRW46VPPw89
+yg4LR9nzgQiBHlac0laB4NrWa+d9QRRLitl1O3gVAQKBgQDDkptxXu7w9Lpc+HeE
+N/pJfpCzUuF7ZC4vatdoDzvfB5Ky6W88Poq+I7bB9m7StXdFAbDyUBxvisjTBMVA
+OtYqpAk/rhX8MjSAtjoFe2nH+eEiQriuZmtA5CdKEXS4hNbc/HhEPWhk7Zh8OV5v
+y7l4r6l4UHqaN9QyE0vlFdmcmQKBgQDCZZR/trJ2/g2OquaS+Zd2h/3NXw0NBq4z
+4OBEWqNa/R35jdK6WlWJH7+tKOacr+xtswLpPeZHGwMdk64/erbYWBuJWAjpH72J
+DM9+1H5fFHANWpWTNn94enQxwfzZRvdkxq4IWzGhesptYnHIzoAmaqC3lbn/e3u0
+Flng32hFoQKBgQCF3D4K3hib0lYQtnxPgmUMktWF+A+fflViXTWs4uhu4mcVkFNz
+n7clJ5q6reryzAQjtmGfqRedfRex340HRn46V2aBMK2Znd9zzcZu5CbmGnFvGs3/
+iNiWZNNDjike9sV+IkxLIODoW/vH4xhxWrbLFSjg0ezoy5ew4qZK2abF2QKBgQC5
+M5efeQpbjTyTUERtf/aKCZOGZmkDoPq0GCjxVjzNQdqd1z0NJ2TYR/QP36idXIlu
+FZ7PYZaS5aw5MGpQtfOe94n8dm++0et7t0WzunRO1yTNxCA+aSxWNquegAcJZa/q
+RdKlyWPmSRqzzZdDzWCPuQQ3AyF5wkYfUy/7qjwoIQKBgB2v96BV7+lICviIKzzb
+1o3A3VzAX5MGd98uLGjlK4qsBC+s7mk2eQztiNZgbA0W6fhQ5Dz3HcXJ5ppy8Okc
+jeAktrNRzz15hvi/XkWdO+VMqiHW4l+sWYukjhCyod1oO1KGHq0LYYvv076syxGw
+vRKLq7IJ4WIp1VtfaBlrIogq
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDTTCCAjWgAwIBAgIJAJ6VG2cQlsepMA0GCSqGSIb3DQEBCwUAMEwxCzAJBgNV
+BAYTAkZSMQ4wDAYDVQQHDAVQYXJpczEZMBcGA1UECgwQQXltZXJpYyBBdWd1c3Rp
+bjESMBAGA1UEAwwJbG9jYWxob3N0MCAXDTE4MDUwNTE2NTc1NloYDzIwNjAwNTA0
+MTY1NzU2WjBMMQswCQYDVQQGEwJGUjEOMAwGA1UEBwwFUGFyaXMxGTAXBgNVBAoM
+EEF5bWVyaWMgQXVndXN0aW4xEjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJSCtBWQ1sBZGWjNlSPXhR/PtgSnYxea+aF2
+V84YvCPL7E873xolG/n+dgXZ5YzeWVyYt7wVsFIr5AVOjiy7tlWdzqohM4epxINT
+DTpZqtBQyz3huEdS9CnW7z5vaE2Ix4bDr5CIEjo4lE6IaktFuQ3pSPcArCLxJhWg
+vIyLO27Bs3IZ/x8XcMOkdm0GK0a0xIEIyxCx8HjrmmXZSjIGtZraWxsu3dW8Flm8
+ep8S4+OmOMo3lRIhedp/Q2LNpHqmzcTJ9+1bLiLvMhA3m5MTG9o8PI+f2cfer92R
+P32ZIxJTUC9NOlfw83sOWoTrBkxtCwE9EZbsYSVD47Egp0o4uTkCAwEAAaMwMC4w
+LAYDVR0RBCUwI4IJbG9jYWxob3N0hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABMA0G
+CSqGSIb3DQEBCwUAA4IBAQA0imKp/rflfbDCCx78NdsR5rt0jKem2t3YPGT6tbeU
++FQz62SEdeD2OHWxpvfPf+6h3iTXJbkakr2R4lP3z7GHUe61lt3So9VHAvgbtPTH
+aB1gOdThA83o0fzQtnIv67jCvE9gwPQInViZLEcm2iQEZLj6AuSvBKmluTR7vNRj
+8/f2R4LsDfCWGrzk2W+deGRvSow7irS88NQ8BW8S8otgMiBx4D2UlOmQwqr6X+/r
+jYIDuMb6GDKRXtBUGDokfE94hjj9u2mrNRwt8y4tqu8ZNa//yLEQ0Ow2kP3QJPLY
+941VZpwRi2v/+JvI7OBYlvbOTFwM8nAk79k+Dgviygd9
+-----END CERTIFICATE-----
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_protocol.py b/testing/web-platform/tests/tools/third_party/websockets/tests/test_protocol.py
new file mode 100644
index 0000000000..d32c1f72e7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_protocol.py
@@ -0,0 +1,1475 @@
+import asyncio
+import contextlib
+import sys
+import unittest
+import unittest.mock
+import warnings
+
+from websockets.exceptions import ConnectionClosed, InvalidState
+from websockets.framing import *
+from websockets.protocol import State, WebSocketCommonProtocol
+
+from .utils import MS, AsyncioTestCase
+
+
+async def async_iterable(iterable):
+ for item in iterable:
+ yield item
+
+
+class TransportMock(unittest.mock.Mock):
+ """
+ Transport mock to control the protocol's inputs and outputs in tests.
+
+ It calls the protocol's connection_made and connection_lost methods like
+ actual transports.
+
+ It also calls the protocol's connection_open method to bypass the
+ WebSocket handshake.
+
+ To simulate incoming data, tests call the protocol's data_received and
+ eof_received methods directly.
+
+ They could also pause_writing and resume_writing to test flow control.
+
+ """
+
+ # This should happen in __init__ but overriding Mock.__init__ is hard.
+ def setup_mock(self, loop, protocol):
+ self.loop = loop
+ self.protocol = protocol
+ self._eof = False
+ self._closing = False
+ # Simulate a successful TCP handshake.
+ self.protocol.connection_made(self)
+ # Simulate a successful WebSocket handshake.
+ self.protocol.connection_open()
+
+ def can_write_eof(self):
+ return True
+
+ def write_eof(self):
+ # When the protocol half-closes the TCP connection, it expects the
+ # other end to close it. Simulate that.
+ if not self._eof:
+ self.loop.call_soon(self.close)
+ self._eof = True
+
+ def close(self):
+ # Simulate how actual transports drop the connection.
+ if not self._closing:
+ self.loop.call_soon(self.protocol.connection_lost, None)
+ self._closing = True
+
+ def abort(self):
+ # Change this to an `if` if tests call abort() multiple times.
+ assert self.protocol.state is not State.CLOSED
+ self.loop.call_soon(self.protocol.connection_lost, None)
+
+
+class CommonTests:
+ """
+ Mixin that defines most tests but doesn't inherit unittest.TestCase.
+
+ Tests are run by the ServerTests and ClientTests subclasses.
+
+ """
+
+ def setUp(self):
+ super().setUp()
+ # Disable pings to make it easier to test what frames are sent exactly.
+ self.protocol = WebSocketCommonProtocol(ping_interval=None)
+ self.transport = TransportMock()
+ self.transport.setup_mock(self.loop, self.protocol)
+
+ def tearDown(self):
+ self.transport.close()
+ self.loop.run_until_complete(self.protocol.close())
+ super().tearDown()
+
+ # Utilities for writing tests.
+
+ def make_drain_slow(self, delay=MS):
+ # Process connection_made in order to initialize self.protocol.transport.
+ self.run_loop_once()
+
+ original_drain = self.protocol._drain
+
+ async def delayed_drain():
+ await asyncio.sleep(
+ delay, loop=self.loop if sys.version_info[:2] < (3, 8) else None
+ )
+ await original_drain()
+
+ self.protocol._drain = delayed_drain
+
+ close_frame = Frame(True, OP_CLOSE, serialize_close(1000, "close"))
+ local_close = Frame(True, OP_CLOSE, serialize_close(1000, "local"))
+ remote_close = Frame(True, OP_CLOSE, serialize_close(1000, "remote"))
+
+ def receive_frame(self, frame):
+ """
+ Make the protocol receive a frame.
+
+ """
+ write = self.protocol.data_received
+ mask = not self.protocol.is_client
+ frame.write(write, mask=mask)
+
+ def receive_eof(self):
+ """
+ Make the protocol receive the end of the data stream.
+
+ Since ``WebSocketCommonProtocol.eof_received`` returns ``None``, an
+ actual transport would close itself after calling it. This function
+ emulates that behavior.
+
+ """
+ self.protocol.eof_received()
+ self.loop.call_soon(self.transport.close)
+
+ def receive_eof_if_client(self):
+ """
+ Like receive_eof, but only if this is the client side.
+
+ Since the server is supposed to initiate the termination of the TCP
+ connection, this method helps making tests work for both sides.
+
+ """
+ if self.protocol.is_client:
+ self.receive_eof()
+
+ def close_connection(self, code=1000, reason="close"):
+ """
+ Execute a closing handshake.
+
+ This puts the connection in the CLOSED state.
+
+ """
+ close_frame_data = serialize_close(code, reason)
+ # Prepare the response to the closing handshake from the remote side.
+ self.receive_frame(Frame(True, OP_CLOSE, close_frame_data))
+ self.receive_eof_if_client()
+ # Trigger the closing handshake from the local side and complete it.
+ self.loop.run_until_complete(self.protocol.close(code, reason))
+ # Empty the outgoing data stream so we can make assertions later on.
+ self.assertOneFrameSent(True, OP_CLOSE, close_frame_data)
+
+ assert self.protocol.state is State.CLOSED
+
+ def half_close_connection_local(self, code=1000, reason="close"):
+ """
+ Start a closing handshake but do not complete it.
+
+ The main difference with `close_connection` is that the connection is
+ left in the CLOSING state until the event loop runs again.
+
+ The current implementation returns a task that must be awaited or
+ canceled, else asyncio complains about destroying a pending task.
+
+ """
+ close_frame_data = serialize_close(code, reason)
+ # Trigger the closing handshake from the local endpoint.
+ close_task = self.loop.create_task(self.protocol.close(code, reason))
+ self.run_loop_once() # wait_for executes
+ self.run_loop_once() # write_frame executes
+ # Empty the outgoing data stream so we can make assertions later on.
+ self.assertOneFrameSent(True, OP_CLOSE, close_frame_data)
+
+ assert self.protocol.state is State.CLOSING
+
+ # Complete the closing sequence at 1ms intervals so the test can run
+ # at each point even it goes back to the event loop several times.
+ self.loop.call_later(
+ MS, self.receive_frame, Frame(True, OP_CLOSE, close_frame_data)
+ )
+ self.loop.call_later(2 * MS, self.receive_eof_if_client)
+
+ # This task must be awaited or canceled by the caller.
+ return close_task
+
+ def half_close_connection_remote(self, code=1000, reason="close"):
+ """
+ Receive a closing handshake but do not complete it.
+
+ The main difference with `close_connection` is that the connection is
+ left in the CLOSING state until the event loop runs again.
+
+ """
+ # On the server side, websockets completes the closing handshake and
+ # closes the TCP connection immediately. Yield to the event loop after
+ # sending the close frame to run the test while the connection is in
+ # the CLOSING state.
+ if not self.protocol.is_client:
+ self.make_drain_slow()
+
+ close_frame_data = serialize_close(code, reason)
+ # Trigger the closing handshake from the remote endpoint.
+ self.receive_frame(Frame(True, OP_CLOSE, close_frame_data))
+ self.run_loop_once() # read_frame executes
+ # Empty the outgoing data stream so we can make assertions later on.
+ self.assertOneFrameSent(True, OP_CLOSE, close_frame_data)
+
+ assert self.protocol.state is State.CLOSING
+
+ # Complete the closing sequence at 1ms intervals so the test can run
+ # at each point even it goes back to the event loop several times.
+ self.loop.call_later(2 * MS, self.receive_eof_if_client)
+
+ def process_invalid_frames(self):
+ """
+ Make the protocol fail quickly after simulating invalid data.
+
+ To achieve this, this function triggers the protocol's eof_received,
+ which interrupts pending reads waiting for more data.
+
+ """
+ self.run_loop_once()
+ self.receive_eof()
+ self.loop.run_until_complete(self.protocol.close_connection_task)
+
+ def sent_frames(self):
+ """
+ Read all frames sent to the transport.
+
+ """
+ stream = asyncio.StreamReader(loop=self.loop)
+
+ for (data,), kw in self.transport.write.call_args_list:
+ stream.feed_data(data)
+ self.transport.write.call_args_list = []
+ stream.feed_eof()
+
+ frames = []
+ while not stream.at_eof():
+ frames.append(
+ self.loop.run_until_complete(
+ Frame.read(stream.readexactly, mask=self.protocol.is_client)
+ )
+ )
+ return frames
+
+ def last_sent_frame(self):
+ """
+ Read the last frame sent to the transport.
+
+ This method assumes that at most one frame was sent. It raises an
+ AssertionError otherwise.
+
+ """
+ frames = self.sent_frames()
+ if frames:
+ assert len(frames) == 1
+ return frames[0]
+
+ def assertFramesSent(self, *frames):
+ self.assertEqual(self.sent_frames(), [Frame(*args) for args in frames])
+
+ def assertOneFrameSent(self, *args):
+ self.assertEqual(self.last_sent_frame(), Frame(*args))
+
+ def assertNoFrameSent(self):
+ self.assertIsNone(self.last_sent_frame())
+
+ def assertConnectionClosed(self, code, message):
+ # The following line guarantees that connection_lost was called.
+ self.assertEqual(self.protocol.state, State.CLOSED)
+ # A close frame was received.
+ self.assertEqual(self.protocol.close_code, code)
+ self.assertEqual(self.protocol.close_reason, message)
+
+ def assertConnectionFailed(self, code, message):
+ # The following line guarantees that connection_lost was called.
+ self.assertEqual(self.protocol.state, State.CLOSED)
+ # No close frame was received.
+ self.assertEqual(self.protocol.close_code, 1006)
+ self.assertEqual(self.protocol.close_reason, "")
+ # A close frame was sent -- unless the connection was already lost.
+ if code == 1006:
+ self.assertNoFrameSent()
+ else:
+ self.assertOneFrameSent(True, OP_CLOSE, serialize_close(code, message))
+
+ @contextlib.contextmanager
+ def assertCompletesWithin(self, min_time, max_time):
+ t0 = self.loop.time()
+ yield
+ t1 = self.loop.time()
+ dt = t1 - t0
+ self.assertGreaterEqual(dt, min_time, f"Too fast: {dt} < {min_time}")
+ self.assertLess(dt, max_time, f"Too slow: {dt} >= {max_time}")
+
+ # Test constructor.
+
+ def test_timeout_backwards_compatibility(self):
+ with warnings.catch_warnings(record=True) as recorded_warnings:
+ protocol = WebSocketCommonProtocol(timeout=5)
+
+ self.assertEqual(protocol.close_timeout, 5)
+
+ self.assertEqual(len(recorded_warnings), 1)
+ warning = recorded_warnings[0].message
+ self.assertEqual(str(warning), "rename timeout to close_timeout")
+ self.assertEqual(type(warning), DeprecationWarning)
+
+ # Test public attributes.
+
+ def test_local_address(self):
+ get_extra_info = unittest.mock.Mock(return_value=("host", 4312))
+ self.transport.get_extra_info = get_extra_info
+
+ self.assertEqual(self.protocol.local_address, ("host", 4312))
+ get_extra_info.assert_called_with("sockname")
+
+ def test_local_address_before_connection(self):
+ # Emulate the situation before connection_open() runs.
+ _transport = self.protocol.transport
+ del self.protocol.transport
+ try:
+ self.assertEqual(self.protocol.local_address, None)
+ finally:
+ self.protocol.transport = _transport
+
+ def test_remote_address(self):
+ get_extra_info = unittest.mock.Mock(return_value=("host", 4312))
+ self.transport.get_extra_info = get_extra_info
+
+ self.assertEqual(self.protocol.remote_address, ("host", 4312))
+ get_extra_info.assert_called_with("peername")
+
+ def test_remote_address_before_connection(self):
+ # Emulate the situation before connection_open() runs.
+ _transport = self.protocol.transport
+ del self.protocol.transport
+ try:
+ self.assertEqual(self.protocol.remote_address, None)
+ finally:
+ self.protocol.transport = _transport
+
+ def test_open(self):
+ self.assertTrue(self.protocol.open)
+ self.close_connection()
+ self.assertFalse(self.protocol.open)
+
+ def test_closed(self):
+ self.assertFalse(self.protocol.closed)
+ self.close_connection()
+ self.assertTrue(self.protocol.closed)
+
+ def test_wait_closed(self):
+ wait_closed = self.loop.create_task(self.protocol.wait_closed())
+ self.assertFalse(wait_closed.done())
+ self.close_connection()
+ self.assertTrue(wait_closed.done())
+
+ # Test the recv coroutine.
+
+ def test_recv_text(self):
+ self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8")))
+ data = self.loop.run_until_complete(self.protocol.recv())
+ self.assertEqual(data, "café")
+
+ def test_recv_binary(self):
+ self.receive_frame(Frame(True, OP_BINARY, b"tea"))
+ data = self.loop.run_until_complete(self.protocol.recv())
+ self.assertEqual(data, b"tea")
+
+ def test_recv_on_closing_connection_local(self):
+ close_task = self.half_close_connection_local()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.recv())
+
+ self.loop.run_until_complete(close_task) # cleanup
+
+ def test_recv_on_closing_connection_remote(self):
+ self.half_close_connection_remote()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.recv())
+
+ def test_recv_on_closed_connection(self):
+ self.close_connection()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.recv())
+
+ def test_recv_protocol_error(self):
+ self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8")))
+ self.process_invalid_frames()
+ self.assertConnectionFailed(1002, "")
+
+ def test_recv_unicode_error(self):
+ self.receive_frame(Frame(True, OP_TEXT, "café".encode("latin-1")))
+ self.process_invalid_frames()
+ self.assertConnectionFailed(1007, "")
+
+ def test_recv_text_payload_too_big(self):
+ self.protocol.max_size = 1024
+ self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8") * 205))
+ self.process_invalid_frames()
+ self.assertConnectionFailed(1009, "")
+
+ def test_recv_binary_payload_too_big(self):
+ self.protocol.max_size = 1024
+ self.receive_frame(Frame(True, OP_BINARY, b"tea" * 342))
+ self.process_invalid_frames()
+ self.assertConnectionFailed(1009, "")
+
+ def test_recv_text_no_max_size(self):
+ self.protocol.max_size = None # for test coverage
+ self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8") * 205))
+ data = self.loop.run_until_complete(self.protocol.recv())
+ self.assertEqual(data, "café" * 205)
+
+ def test_recv_binary_no_max_size(self):
+ self.protocol.max_size = None # for test coverage
+ self.receive_frame(Frame(True, OP_BINARY, b"tea" * 342))
+ data = self.loop.run_until_complete(self.protocol.recv())
+ self.assertEqual(data, b"tea" * 342)
+
+ def test_recv_queue_empty(self):
+ recv = self.loop.create_task(self.protocol.recv())
+ with self.assertRaises(asyncio.TimeoutError):
+ self.loop.run_until_complete(
+ asyncio.wait_for(asyncio.shield(recv), timeout=MS)
+ )
+
+ self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8")))
+ data = self.loop.run_until_complete(recv)
+ self.assertEqual(data, "café")
+
+ def test_recv_queue_full(self):
+ self.protocol.max_queue = 2
+ # Test internals because it's hard to verify buffers from the outside.
+ self.assertEqual(list(self.protocol.messages), [])
+
+ self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8")))
+ self.run_loop_once()
+ self.assertEqual(list(self.protocol.messages), ["café"])
+
+ self.receive_frame(Frame(True, OP_BINARY, b"tea"))
+ self.run_loop_once()
+ self.assertEqual(list(self.protocol.messages), ["café", b"tea"])
+
+ self.receive_frame(Frame(True, OP_BINARY, b"milk"))
+ self.run_loop_once()
+ self.assertEqual(list(self.protocol.messages), ["café", b"tea"])
+
+ self.loop.run_until_complete(self.protocol.recv())
+ self.run_loop_once()
+ self.assertEqual(list(self.protocol.messages), [b"tea", b"milk"])
+
+ self.loop.run_until_complete(self.protocol.recv())
+ self.run_loop_once()
+ self.assertEqual(list(self.protocol.messages), [b"milk"])
+
+ self.loop.run_until_complete(self.protocol.recv())
+ self.run_loop_once()
+ self.assertEqual(list(self.protocol.messages), [])
+
+ def test_recv_queue_no_limit(self):
+ self.protocol.max_queue = None
+
+ for _ in range(100):
+ self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8")))
+ self.run_loop_once()
+
+ # Incoming message queue can contain at least 100 messages.
+ self.assertEqual(list(self.protocol.messages), ["café"] * 100)
+
+ for _ in range(100):
+ self.loop.run_until_complete(self.protocol.recv())
+
+ self.assertEqual(list(self.protocol.messages), [])
+
+ def test_recv_other_error(self):
+ async def read_message():
+ raise Exception("BOOM")
+
+ self.protocol.read_message = read_message
+ self.process_invalid_frames()
+ self.assertConnectionFailed(1011, "")
+
+ def test_recv_canceled(self):
+ recv = self.loop.create_task(self.protocol.recv())
+ self.loop.call_soon(recv.cancel)
+
+ with self.assertRaises(asyncio.CancelledError):
+ self.loop.run_until_complete(recv)
+
+ # The next frame doesn't disappear in a vacuum (it used to).
+ self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8")))
+ data = self.loop.run_until_complete(self.protocol.recv())
+ self.assertEqual(data, "café")
+
+ def test_recv_canceled_race_condition(self):
+ recv = self.loop.create_task(
+ asyncio.wait_for(self.protocol.recv(), timeout=0.000_001)
+ )
+ self.loop.call_soon(
+ self.receive_frame, Frame(True, OP_TEXT, "café".encode("utf-8"))
+ )
+
+ with self.assertRaises(asyncio.TimeoutError):
+ self.loop.run_until_complete(recv)
+
+ # The previous frame doesn't disappear in a vacuum (it used to).
+ self.receive_frame(Frame(True, OP_TEXT, "tea".encode("utf-8")))
+ data = self.loop.run_until_complete(self.protocol.recv())
+ # If we're getting "tea" there, it means "café" was swallowed (ha, ha).
+ self.assertEqual(data, "café")
+
+ def test_recv_when_transfer_data_cancelled(self):
+ # Clog incoming queue.
+ self.protocol.max_queue = 1
+ self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8")))
+ self.receive_frame(Frame(True, OP_BINARY, b"tea"))
+ self.run_loop_once()
+
+ # Flow control kicks in (check with an implementation detail).
+ self.assertFalse(self.protocol._put_message_waiter.done())
+
+ # Schedule recv().
+ recv = self.loop.create_task(self.protocol.recv())
+
+ # Cancel transfer_data_task (again, implementation detail).
+ self.protocol.fail_connection()
+ self.run_loop_once()
+ self.assertTrue(self.protocol.transfer_data_task.cancelled())
+
+ # recv() completes properly.
+ self.assertEqual(self.loop.run_until_complete(recv), "café")
+
+ def test_recv_prevents_concurrent_calls(self):
+ recv = self.loop.create_task(self.protocol.recv())
+
+ with self.assertRaisesRegex(
+ RuntimeError,
+ "cannot call recv while another coroutine "
+ "is already waiting for the next message",
+ ):
+ self.loop.run_until_complete(self.protocol.recv())
+
+ recv.cancel()
+
+ # Test the send coroutine.
+
+ def test_send_text(self):
+ self.loop.run_until_complete(self.protocol.send("café"))
+ self.assertOneFrameSent(True, OP_TEXT, "café".encode("utf-8"))
+
+ def test_send_binary(self):
+ self.loop.run_until_complete(self.protocol.send(b"tea"))
+ self.assertOneFrameSent(True, OP_BINARY, b"tea")
+
+ def test_send_binary_from_bytearray(self):
+ self.loop.run_until_complete(self.protocol.send(bytearray(b"tea")))
+ self.assertOneFrameSent(True, OP_BINARY, b"tea")
+
+ def test_send_binary_from_memoryview(self):
+ self.loop.run_until_complete(self.protocol.send(memoryview(b"tea")))
+ self.assertOneFrameSent(True, OP_BINARY, b"tea")
+
+ def test_send_binary_from_non_contiguous_memoryview(self):
+ self.loop.run_until_complete(self.protocol.send(memoryview(b"tteeaa")[::2]))
+ self.assertOneFrameSent(True, OP_BINARY, b"tea")
+
+ def test_send_type_error(self):
+ with self.assertRaises(TypeError):
+ self.loop.run_until_complete(self.protocol.send(42))
+ self.assertNoFrameSent()
+
+ def test_send_iterable_text(self):
+ self.loop.run_until_complete(self.protocol.send(["ca", "fé"]))
+ self.assertFramesSent(
+ (False, OP_TEXT, "ca".encode("utf-8")),
+ (False, OP_CONT, "fé".encode("utf-8")),
+ (True, OP_CONT, "".encode("utf-8")),
+ )
+
+ def test_send_iterable_binary(self):
+ self.loop.run_until_complete(self.protocol.send([b"te", b"a"]))
+ self.assertFramesSent(
+ (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"")
+ )
+
+ def test_send_iterable_binary_from_bytearray(self):
+ self.loop.run_until_complete(
+ self.protocol.send([bytearray(b"te"), bytearray(b"a")])
+ )
+ self.assertFramesSent(
+ (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"")
+ )
+
+ def test_send_iterable_binary_from_memoryview(self):
+ self.loop.run_until_complete(
+ self.protocol.send([memoryview(b"te"), memoryview(b"a")])
+ )
+ self.assertFramesSent(
+ (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"")
+ )
+
+ def test_send_iterable_binary_from_non_contiguous_memoryview(self):
+ self.loop.run_until_complete(
+ self.protocol.send([memoryview(b"ttee")[::2], memoryview(b"aa")[::2]])
+ )
+ self.assertFramesSent(
+ (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"")
+ )
+
+ def test_send_empty_iterable(self):
+ self.loop.run_until_complete(self.protocol.send([]))
+ self.assertNoFrameSent()
+
+ def test_send_iterable_type_error(self):
+ with self.assertRaises(TypeError):
+ self.loop.run_until_complete(self.protocol.send([42]))
+ self.assertNoFrameSent()
+
+ def test_send_iterable_mixed_type_error(self):
+ with self.assertRaises(TypeError):
+ self.loop.run_until_complete(self.protocol.send(["café", b"tea"]))
+ self.assertFramesSent(
+ (False, OP_TEXT, "café".encode("utf-8")),
+ (True, OP_CLOSE, serialize_close(1011, "")),
+ )
+
+ def test_send_iterable_prevents_concurrent_send(self):
+ self.make_drain_slow(2 * MS)
+
+ async def send_iterable():
+ await self.protocol.send(["ca", "fé"])
+
+ async def send_concurrent():
+ await asyncio.sleep(MS)
+ await self.protocol.send(b"tea")
+
+ self.loop.run_until_complete(asyncio.gather(send_iterable(), send_concurrent()))
+ self.assertFramesSent(
+ (False, OP_TEXT, "ca".encode("utf-8")),
+ (False, OP_CONT, "fé".encode("utf-8")),
+ (True, OP_CONT, "".encode("utf-8")),
+ (True, OP_BINARY, b"tea"),
+ )
+
+ def test_send_async_iterable_text(self):
+ self.loop.run_until_complete(self.protocol.send(async_iterable(["ca", "fé"])))
+ self.assertFramesSent(
+ (False, OP_TEXT, "ca".encode("utf-8")),
+ (False, OP_CONT, "fé".encode("utf-8")),
+ (True, OP_CONT, "".encode("utf-8")),
+ )
+
+ def test_send_async_iterable_binary(self):
+ self.loop.run_until_complete(self.protocol.send(async_iterable([b"te", b"a"])))
+ self.assertFramesSent(
+ (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"")
+ )
+
+ def test_send_async_iterable_binary_from_bytearray(self):
+ self.loop.run_until_complete(
+ self.protocol.send(async_iterable([bytearray(b"te"), bytearray(b"a")]))
+ )
+ self.assertFramesSent(
+ (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"")
+ )
+
+ def test_send_async_iterable_binary_from_memoryview(self):
+ self.loop.run_until_complete(
+ self.protocol.send(async_iterable([memoryview(b"te"), memoryview(b"a")]))
+ )
+ self.assertFramesSent(
+ (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"")
+ )
+
+ def test_send_async_iterable_binary_from_non_contiguous_memoryview(self):
+ self.loop.run_until_complete(
+ self.protocol.send(
+ async_iterable([memoryview(b"ttee")[::2], memoryview(b"aa")[::2]])
+ )
+ )
+ self.assertFramesSent(
+ (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"")
+ )
+
+ def test_send_empty_async_iterable(self):
+ self.loop.run_until_complete(self.protocol.send(async_iterable([])))
+ self.assertNoFrameSent()
+
+ def test_send_async_iterable_type_error(self):
+ with self.assertRaises(TypeError):
+ self.loop.run_until_complete(self.protocol.send(async_iterable([42])))
+ self.assertNoFrameSent()
+
+ def test_send_async_iterable_mixed_type_error(self):
+ with self.assertRaises(TypeError):
+ self.loop.run_until_complete(
+ self.protocol.send(async_iterable(["café", b"tea"]))
+ )
+ self.assertFramesSent(
+ (False, OP_TEXT, "café".encode("utf-8")),
+ (True, OP_CLOSE, serialize_close(1011, "")),
+ )
+
+ def test_send_async_iterable_prevents_concurrent_send(self):
+ self.make_drain_slow(2 * MS)
+
+ async def send_async_iterable():
+ await self.protocol.send(async_iterable(["ca", "fé"]))
+
+ async def send_concurrent():
+ await asyncio.sleep(MS)
+ await self.protocol.send(b"tea")
+
+ self.loop.run_until_complete(
+ asyncio.gather(send_async_iterable(), send_concurrent())
+ )
+ self.assertFramesSent(
+ (False, OP_TEXT, "ca".encode("utf-8")),
+ (False, OP_CONT, "fé".encode("utf-8")),
+ (True, OP_CONT, "".encode("utf-8")),
+ (True, OP_BINARY, b"tea"),
+ )
+
+ def test_send_on_closing_connection_local(self):
+ close_task = self.half_close_connection_local()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.send("foobar"))
+
+ self.assertNoFrameSent()
+
+ self.loop.run_until_complete(close_task) # cleanup
+
+ def test_send_on_closing_connection_remote(self):
+ self.half_close_connection_remote()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.send("foobar"))
+
+ self.assertNoFrameSent()
+
+ def test_send_on_closed_connection(self):
+ self.close_connection()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.send("foobar"))
+
+ self.assertNoFrameSent()
+
+ # Test the ping coroutine.
+
+ def test_ping_default(self):
+ self.loop.run_until_complete(self.protocol.ping())
+ # With our testing tools, it's more convenient to extract the expected
+ # ping data from the library's internals than from the frame sent.
+ ping_data = next(iter(self.protocol.pings))
+ self.assertIsInstance(ping_data, bytes)
+ self.assertEqual(len(ping_data), 4)
+ self.assertOneFrameSent(True, OP_PING, ping_data)
+
+ def test_ping_text(self):
+ self.loop.run_until_complete(self.protocol.ping("café"))
+ self.assertOneFrameSent(True, OP_PING, "café".encode("utf-8"))
+
+ def test_ping_binary(self):
+ self.loop.run_until_complete(self.protocol.ping(b"tea"))
+ self.assertOneFrameSent(True, OP_PING, b"tea")
+
+ def test_ping_binary_from_bytearray(self):
+ self.loop.run_until_complete(self.protocol.ping(bytearray(b"tea")))
+ self.assertOneFrameSent(True, OP_PING, b"tea")
+
+ def test_ping_binary_from_memoryview(self):
+ self.loop.run_until_complete(self.protocol.ping(memoryview(b"tea")))
+ self.assertOneFrameSent(True, OP_PING, b"tea")
+
+ def test_ping_binary_from_non_contiguous_memoryview(self):
+ self.loop.run_until_complete(self.protocol.ping(memoryview(b"tteeaa")[::2]))
+ self.assertOneFrameSent(True, OP_PING, b"tea")
+
+ def test_ping_type_error(self):
+ with self.assertRaises(TypeError):
+ self.loop.run_until_complete(self.protocol.ping(42))
+ self.assertNoFrameSent()
+
+ def test_ping_on_closing_connection_local(self):
+ close_task = self.half_close_connection_local()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.ping())
+
+ self.assertNoFrameSent()
+
+ self.loop.run_until_complete(close_task) # cleanup
+
+ def test_ping_on_closing_connection_remote(self):
+ self.half_close_connection_remote()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.ping())
+
+ self.assertNoFrameSent()
+
+ def test_ping_on_closed_connection(self):
+ self.close_connection()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.ping())
+
+ self.assertNoFrameSent()
+
+ # Test the pong coroutine.
+
+ def test_pong_default(self):
+ self.loop.run_until_complete(self.protocol.pong())
+ self.assertOneFrameSent(True, OP_PONG, b"")
+
+ def test_pong_text(self):
+ self.loop.run_until_complete(self.protocol.pong("café"))
+ self.assertOneFrameSent(True, OP_PONG, "café".encode("utf-8"))
+
+ def test_pong_binary(self):
+ self.loop.run_until_complete(self.protocol.pong(b"tea"))
+ self.assertOneFrameSent(True, OP_PONG, b"tea")
+
+ def test_pong_binary_from_bytearray(self):
+ self.loop.run_until_complete(self.protocol.pong(bytearray(b"tea")))
+ self.assertOneFrameSent(True, OP_PONG, b"tea")
+
+ def test_pong_binary_from_memoryview(self):
+ self.loop.run_until_complete(self.protocol.pong(memoryview(b"tea")))
+ self.assertOneFrameSent(True, OP_PONG, b"tea")
+
+ def test_pong_binary_from_non_contiguous_memoryview(self):
+ self.loop.run_until_complete(self.protocol.pong(memoryview(b"tteeaa")[::2]))
+ self.assertOneFrameSent(True, OP_PONG, b"tea")
+
+ def test_pong_type_error(self):
+ with self.assertRaises(TypeError):
+ self.loop.run_until_complete(self.protocol.pong(42))
+ self.assertNoFrameSent()
+
+ def test_pong_on_closing_connection_local(self):
+ close_task = self.half_close_connection_local()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.pong())
+
+ self.assertNoFrameSent()
+
+ self.loop.run_until_complete(close_task) # cleanup
+
+ def test_pong_on_closing_connection_remote(self):
+ self.half_close_connection_remote()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.pong())
+
+ self.assertNoFrameSent()
+
+ def test_pong_on_closed_connection(self):
+ self.close_connection()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.pong())
+
+ self.assertNoFrameSent()
+
+ # Test the protocol's logic for acknowledging pings with pongs.
+
+ def test_answer_ping(self):
+ self.receive_frame(Frame(True, OP_PING, b"test"))
+ self.run_loop_once()
+ self.assertOneFrameSent(True, OP_PONG, b"test")
+
+ def test_ignore_pong(self):
+ self.receive_frame(Frame(True, OP_PONG, b"test"))
+ self.run_loop_once()
+ self.assertNoFrameSent()
+
+ def test_acknowledge_ping(self):
+ ping = self.loop.run_until_complete(self.protocol.ping())
+ self.assertFalse(ping.done())
+ ping_frame = self.last_sent_frame()
+ pong_frame = Frame(True, OP_PONG, ping_frame.data)
+ self.receive_frame(pong_frame)
+ self.run_loop_once()
+ self.run_loop_once()
+ self.assertTrue(ping.done())
+
+ def test_abort_ping(self):
+ ping = self.loop.run_until_complete(self.protocol.ping())
+ # Remove the frame from the buffer, else close_connection() complains.
+ self.last_sent_frame()
+ self.assertFalse(ping.done())
+ self.close_connection()
+ self.assertTrue(ping.done())
+ self.assertIsInstance(ping.exception(), ConnectionClosed)
+
+ def test_abort_ping_does_not_log_exception_if_not_retreived(self):
+ self.loop.run_until_complete(self.protocol.ping())
+ # Get the internal Future, which isn't directly returned by ping().
+ (ping,) = self.protocol.pings.values()
+ # Remove the frame from the buffer, else close_connection() complains.
+ self.last_sent_frame()
+ self.close_connection()
+ # Check a private attribute, for lack of a better solution.
+ self.assertFalse(ping._log_traceback)
+
+ def test_acknowledge_previous_pings(self):
+ pings = [
+ (self.loop.run_until_complete(self.protocol.ping()), self.last_sent_frame())
+ for i in range(3)
+ ]
+ # Unsolicited pong doesn't acknowledge pings
+ self.receive_frame(Frame(True, OP_PONG, b""))
+ self.run_loop_once()
+ self.run_loop_once()
+ self.assertFalse(pings[0][0].done())
+ self.assertFalse(pings[1][0].done())
+ self.assertFalse(pings[2][0].done())
+ # Pong acknowledges all previous pings
+ self.receive_frame(Frame(True, OP_PONG, pings[1][1].data))
+ self.run_loop_once()
+ self.run_loop_once()
+ self.assertTrue(pings[0][0].done())
+ self.assertTrue(pings[1][0].done())
+ self.assertFalse(pings[2][0].done())
+
+ def test_acknowledge_aborted_ping(self):
+ ping = self.loop.run_until_complete(self.protocol.ping())
+ ping_frame = self.last_sent_frame()
+ # Clog incoming queue. This lets connection_lost() abort pending pings
+ # with a ConnectionClosed exception before transfer_data_task
+ # terminates and close_connection cancels keepalive_ping_task.
+ self.protocol.max_queue = 1
+ self.receive_frame(Frame(True, OP_TEXT, b"1"))
+ self.receive_frame(Frame(True, OP_TEXT, b"2"))
+ # Add pong frame to the queue.
+ pong_frame = Frame(True, OP_PONG, ping_frame.data)
+ self.receive_frame(pong_frame)
+ # Connection drops.
+ self.receive_eof()
+ self.loop.run_until_complete(self.protocol.wait_closed())
+ # Ping receives a ConnectionClosed exception.
+ with self.assertRaises(ConnectionClosed):
+ ping.result()
+
+ # transfer_data doesn't crash, which would be logged.
+ with self.assertNoLogs():
+ # Unclog incoming queue.
+ self.loop.run_until_complete(self.protocol.recv())
+ self.loop.run_until_complete(self.protocol.recv())
+
+ def test_canceled_ping(self):
+ ping = self.loop.run_until_complete(self.protocol.ping())
+ ping_frame = self.last_sent_frame()
+ ping.cancel()
+ pong_frame = Frame(True, OP_PONG, ping_frame.data)
+ self.receive_frame(pong_frame)
+ self.run_loop_once()
+ self.run_loop_once()
+ self.assertTrue(ping.cancelled())
+
+ def test_duplicate_ping(self):
+ self.loop.run_until_complete(self.protocol.ping(b"foobar"))
+ self.assertOneFrameSent(True, OP_PING, b"foobar")
+ with self.assertRaises(ValueError):
+ self.loop.run_until_complete(self.protocol.ping(b"foobar"))
+ self.assertNoFrameSent()
+
+ # Test the protocol's logic for rebuilding fragmented messages.
+
+ def test_fragmented_text(self):
+ self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8")))
+ self.receive_frame(Frame(True, OP_CONT, "fé".encode("utf-8")))
+ data = self.loop.run_until_complete(self.protocol.recv())
+ self.assertEqual(data, "café")
+
+ def test_fragmented_binary(self):
+ self.receive_frame(Frame(False, OP_BINARY, b"t"))
+ self.receive_frame(Frame(False, OP_CONT, b"e"))
+ self.receive_frame(Frame(True, OP_CONT, b"a"))
+ data = self.loop.run_until_complete(self.protocol.recv())
+ self.assertEqual(data, b"tea")
+
+ def test_fragmented_text_payload_too_big(self):
+ self.protocol.max_size = 1024
+ self.receive_frame(Frame(False, OP_TEXT, "café".encode("utf-8") * 100))
+ self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8") * 105))
+ self.process_invalid_frames()
+ self.assertConnectionFailed(1009, "")
+
+ def test_fragmented_binary_payload_too_big(self):
+ self.protocol.max_size = 1024
+ self.receive_frame(Frame(False, OP_BINARY, b"tea" * 171))
+ self.receive_frame(Frame(True, OP_CONT, b"tea" * 171))
+ self.process_invalid_frames()
+ self.assertConnectionFailed(1009, "")
+
+ def test_fragmented_text_no_max_size(self):
+ self.protocol.max_size = None # for test coverage
+ self.receive_frame(Frame(False, OP_TEXT, "café".encode("utf-8") * 100))
+ self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8") * 105))
+ data = self.loop.run_until_complete(self.protocol.recv())
+ self.assertEqual(data, "café" * 205)
+
+ def test_fragmented_binary_no_max_size(self):
+ self.protocol.max_size = None # for test coverage
+ self.receive_frame(Frame(False, OP_BINARY, b"tea" * 171))
+ self.receive_frame(Frame(True, OP_CONT, b"tea" * 171))
+ data = self.loop.run_until_complete(self.protocol.recv())
+ self.assertEqual(data, b"tea" * 342)
+
+ def test_control_frame_within_fragmented_text(self):
+ self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8")))
+ self.receive_frame(Frame(True, OP_PING, b""))
+ self.receive_frame(Frame(True, OP_CONT, "fé".encode("utf-8")))
+ data = self.loop.run_until_complete(self.protocol.recv())
+ self.assertEqual(data, "café")
+ self.assertOneFrameSent(True, OP_PONG, b"")
+
+ def test_unterminated_fragmented_text(self):
+ self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8")))
+ # Missing the second part of the fragmented frame.
+ self.receive_frame(Frame(True, OP_BINARY, b"tea"))
+ self.process_invalid_frames()
+ self.assertConnectionFailed(1002, "")
+
+ def test_close_handshake_in_fragmented_text(self):
+ self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8")))
+ self.receive_frame(Frame(True, OP_CLOSE, b""))
+ self.process_invalid_frames()
+ # The RFC may have overlooked this case: it says that control frames
+ # can be interjected in the middle of a fragmented message and that a
+ # close frame must be echoed. Even though there's an unterminated
+ # message, technically, the closing handshake was successful.
+ self.assertConnectionClosed(1005, "")
+
+ def test_connection_close_in_fragmented_text(self):
+ self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8")))
+ self.process_invalid_frames()
+ self.assertConnectionFailed(1006, "")
+
+ # Test miscellaneous code paths to ensure full coverage.
+
+ def test_connection_lost(self):
+ # Test calling connection_lost without going through close_connection.
+ self.protocol.connection_lost(None)
+
+ self.assertConnectionFailed(1006, "")
+
+ def test_ensure_open_before_opening_handshake(self):
+ # Simulate a bug by forcibly reverting the protocol state.
+ self.protocol.state = State.CONNECTING
+
+ with self.assertRaises(InvalidState):
+ self.loop.run_until_complete(self.protocol.ensure_open())
+
+ def test_ensure_open_during_unclean_close(self):
+ # Process connection_made in order to start transfer_data_task.
+ self.run_loop_once()
+
+ # Ensure the test terminates quickly.
+ self.loop.call_later(MS, self.receive_eof_if_client)
+
+ # Simulate the case when close() times out sending a close frame.
+ self.protocol.fail_connection()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.ensure_open())
+
+ def test_legacy_recv(self):
+ # By default legacy_recv in disabled.
+ self.assertEqual(self.protocol.legacy_recv, False)
+
+ self.close_connection()
+
+ # Enable legacy_recv.
+ self.protocol.legacy_recv = True
+
+ # Now recv() returns None instead of raising ConnectionClosed.
+ self.assertIsNone(self.loop.run_until_complete(self.protocol.recv()))
+
+ def test_connection_closed_attributes(self):
+ self.close_connection()
+
+ with self.assertRaises(ConnectionClosed) as context:
+ self.loop.run_until_complete(self.protocol.recv())
+
+ connection_closed_exc = context.exception
+ self.assertEqual(connection_closed_exc.code, 1000)
+ self.assertEqual(connection_closed_exc.reason, "close")
+
+ # Test the protocol logic for sending keepalive pings.
+
+ def restart_protocol_with_keepalive_ping(
+ self, ping_interval=3 * MS, ping_timeout=3 * MS
+ ):
+ initial_protocol = self.protocol
+ # copied from tearDown
+ self.transport.close()
+ self.loop.run_until_complete(self.protocol.close())
+ # copied from setUp, but enables keepalive pings
+ self.protocol = WebSocketCommonProtocol(
+ ping_interval=ping_interval, ping_timeout=ping_timeout
+ )
+ self.transport = TransportMock()
+ self.transport.setup_mock(self.loop, self.protocol)
+ self.protocol.is_client = initial_protocol.is_client
+ self.protocol.side = initial_protocol.side
+
+ def test_keepalive_ping(self):
+ self.restart_protocol_with_keepalive_ping()
+
+ # Ping is sent at 3ms and acknowledged at 4ms.
+ self.loop.run_until_complete(asyncio.sleep(4 * MS))
+ (ping_1,) = tuple(self.protocol.pings)
+ self.assertOneFrameSent(True, OP_PING, ping_1)
+ self.receive_frame(Frame(True, OP_PONG, ping_1))
+
+ # Next ping is sent at 7ms.
+ self.loop.run_until_complete(asyncio.sleep(4 * MS))
+ (ping_2,) = tuple(self.protocol.pings)
+ self.assertOneFrameSent(True, OP_PING, ping_2)
+
+ # The keepalive ping task goes on.
+ self.assertFalse(self.protocol.keepalive_ping_task.done())
+
+ def test_keepalive_ping_not_acknowledged_closes_connection(self):
+ self.restart_protocol_with_keepalive_ping()
+
+ # Ping is sent at 3ms and not acknowleged.
+ self.loop.run_until_complete(asyncio.sleep(4 * MS))
+ (ping_1,) = tuple(self.protocol.pings)
+ self.assertOneFrameSent(True, OP_PING, ping_1)
+
+ # Connection is closed at 6ms.
+ self.loop.run_until_complete(asyncio.sleep(4 * MS))
+ self.assertOneFrameSent(True, OP_CLOSE, serialize_close(1011, ""))
+
+ # The keepalive ping task is complete.
+ self.assertEqual(self.protocol.keepalive_ping_task.result(), None)
+
+ def test_keepalive_ping_stops_when_connection_closing(self):
+ self.restart_protocol_with_keepalive_ping()
+ close_task = self.half_close_connection_local()
+
+ # No ping sent at 3ms because the closing handshake is in progress.
+ self.loop.run_until_complete(asyncio.sleep(4 * MS))
+ self.assertNoFrameSent()
+
+ # The keepalive ping task terminated.
+ self.assertTrue(self.protocol.keepalive_ping_task.cancelled())
+
+ self.loop.run_until_complete(close_task) # cleanup
+
+ def test_keepalive_ping_stops_when_connection_closed(self):
+ self.restart_protocol_with_keepalive_ping()
+ self.close_connection()
+
+ # The keepalive ping task terminated.
+ self.assertTrue(self.protocol.keepalive_ping_task.cancelled())
+
+ def test_keepalive_ping_does_not_crash_when_connection_lost(self):
+ self.restart_protocol_with_keepalive_ping()
+ # Clog incoming queue. This lets connection_lost() abort pending pings
+ # with a ConnectionClosed exception before transfer_data_task
+ # terminates and close_connection cancels keepalive_ping_task.
+ self.protocol.max_queue = 1
+ self.receive_frame(Frame(True, OP_TEXT, b"1"))
+ self.receive_frame(Frame(True, OP_TEXT, b"2"))
+ # Ping is sent at 3ms.
+ self.loop.run_until_complete(asyncio.sleep(4 * MS))
+ (ping_waiter,) = tuple(self.protocol.pings.values())
+ # Connection drops.
+ self.receive_eof()
+ self.loop.run_until_complete(self.protocol.wait_closed())
+
+ # The ping waiter receives a ConnectionClosed exception.
+ with self.assertRaises(ConnectionClosed):
+ ping_waiter.result()
+ # The keepalive ping task terminated properly.
+ self.assertIsNone(self.protocol.keepalive_ping_task.result())
+
+ # Unclog incoming queue to terminate the test quickly.
+ self.loop.run_until_complete(self.protocol.recv())
+ self.loop.run_until_complete(self.protocol.recv())
+
+ def test_keepalive_ping_with_no_ping_interval(self):
+ self.restart_protocol_with_keepalive_ping(ping_interval=None)
+
+ # No ping is sent at 3ms.
+ self.loop.run_until_complete(asyncio.sleep(4 * MS))
+ self.assertNoFrameSent()
+
+ def test_keepalive_ping_with_no_ping_timeout(self):
+ self.restart_protocol_with_keepalive_ping(ping_timeout=None)
+
+ # Ping is sent at 3ms and not acknowleged.
+ self.loop.run_until_complete(asyncio.sleep(4 * MS))
+ (ping_1,) = tuple(self.protocol.pings)
+ self.assertOneFrameSent(True, OP_PING, ping_1)
+
+ # Next ping is sent at 7ms anyway.
+ self.loop.run_until_complete(asyncio.sleep(4 * MS))
+ ping_1_again, ping_2 = tuple(self.protocol.pings)
+ self.assertEqual(ping_1, ping_1_again)
+ self.assertOneFrameSent(True, OP_PING, ping_2)
+
+ # The keepalive ping task goes on.
+ self.assertFalse(self.protocol.keepalive_ping_task.done())
+
+ def test_keepalive_ping_unexpected_error(self):
+ self.restart_protocol_with_keepalive_ping()
+
+ async def ping():
+ raise Exception("BOOM")
+
+ self.protocol.ping = ping
+
+ # The keepalive ping task fails when sending a ping at 3ms.
+ self.loop.run_until_complete(asyncio.sleep(4 * MS))
+
+ # The keepalive ping task is complete.
+ # It logs and swallows the exception.
+ self.assertEqual(self.protocol.keepalive_ping_task.result(), None)
+
+ # Test the protocol logic for closing the connection.
+
+ def test_local_close(self):
+ # Emulate how the remote endpoint answers the closing handshake.
+ self.loop.call_later(MS, self.receive_frame, self.close_frame)
+ self.loop.call_later(MS, self.receive_eof_if_client)
+
+ # Run the closing handshake.
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+
+ self.assertConnectionClosed(1000, "close")
+ self.assertOneFrameSent(*self.close_frame)
+
+ # Closing the connection again is a no-op.
+ self.loop.run_until_complete(self.protocol.close(reason="oh noes!"))
+
+ self.assertConnectionClosed(1000, "close")
+ self.assertNoFrameSent()
+
+ def test_remote_close(self):
+ # Emulate how the remote endpoint initiates the closing handshake.
+ self.loop.call_later(MS, self.receive_frame, self.close_frame)
+ self.loop.call_later(MS, self.receive_eof_if_client)
+
+ # Wait for some data in order to process the handshake.
+ # After recv() raises ConnectionClosed, the connection is closed.
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(self.protocol.recv())
+
+ self.assertConnectionClosed(1000, "close")
+ self.assertOneFrameSent(*self.close_frame)
+
+ # Closing the connection again is a no-op.
+ self.loop.run_until_complete(self.protocol.close(reason="oh noes!"))
+
+ self.assertConnectionClosed(1000, "close")
+ self.assertNoFrameSent()
+
+ def test_remote_close_and_connection_lost(self):
+ self.make_drain_slow()
+ # Drop the connection right after receiving a close frame,
+ # which prevents echoing the close frame properly.
+ self.receive_frame(self.close_frame)
+ self.receive_eof()
+
+ with self.assertNoLogs():
+ self.loop.run_until_complete(self.protocol.close(reason="oh noes!"))
+
+ self.assertConnectionClosed(1000, "close")
+ self.assertOneFrameSent(*self.close_frame)
+
+ def test_simultaneous_close(self):
+ # Receive the incoming close frame right after self.protocol.close()
+ # starts executing. This reproduces the error described in:
+ # https://github.com/aaugustin/websockets/issues/339
+ self.loop.call_soon(self.receive_frame, self.remote_close)
+ self.loop.call_soon(self.receive_eof_if_client)
+
+ self.loop.run_until_complete(self.protocol.close(reason="local"))
+
+ self.assertConnectionClosed(1000, "remote")
+ # The current implementation sends a close frame in response to the
+ # close frame received from the remote end. It skips the close frame
+ # that should be sent as a result of calling close().
+ self.assertOneFrameSent(*self.remote_close)
+
+ def test_close_preserves_incoming_frames(self):
+ self.receive_frame(Frame(True, OP_TEXT, b"hello"))
+
+ self.loop.call_later(MS, self.receive_frame, self.close_frame)
+ self.loop.call_later(MS, self.receive_eof_if_client)
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+
+ self.assertConnectionClosed(1000, "close")
+ self.assertOneFrameSent(*self.close_frame)
+
+ next_message = self.loop.run_until_complete(self.protocol.recv())
+ self.assertEqual(next_message, "hello")
+
+ def test_close_protocol_error(self):
+ invalid_close_frame = Frame(True, OP_CLOSE, b"\x00")
+ self.receive_frame(invalid_close_frame)
+ self.receive_eof_if_client()
+ self.run_loop_once()
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+
+ self.assertConnectionFailed(1002, "")
+
+ def test_close_connection_lost(self):
+ self.receive_eof()
+ self.run_loop_once()
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+
+ self.assertConnectionFailed(1006, "")
+
+ def test_local_close_during_recv(self):
+ recv = self.loop.create_task(self.protocol.recv())
+
+ self.loop.call_later(MS, self.receive_frame, self.close_frame)
+ self.loop.call_later(MS, self.receive_eof_if_client)
+
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(recv)
+
+ self.assertConnectionClosed(1000, "close")
+
+ # There is no test_remote_close_during_recv because it would be identical
+ # to test_remote_close.
+
+ def test_remote_close_during_send(self):
+ self.make_drain_slow()
+ send = self.loop.create_task(self.protocol.send("hello"))
+
+ self.receive_frame(self.close_frame)
+ self.receive_eof()
+
+ with self.assertRaises(ConnectionClosed):
+ self.loop.run_until_complete(send)
+
+ self.assertConnectionClosed(1000, "close")
+
+ # There is no test_local_close_during_send because this cannot really
+ # happen, considering that writes are serialized.
+
+
+class ServerTests(CommonTests, AsyncioTestCase):
+ def setUp(self):
+ super().setUp()
+ self.protocol.is_client = False
+ self.protocol.side = "server"
+
+ def test_local_close_send_close_frame_timeout(self):
+ self.protocol.close_timeout = 10 * MS
+ self.make_drain_slow(50 * MS)
+ # If we can't send a close frame, time out in 10ms.
+ # Check the timing within -1/+9ms for robustness.
+ with self.assertCompletesWithin(9 * MS, 19 * MS):
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+ self.assertConnectionClosed(1006, "")
+
+ def test_local_close_receive_close_frame_timeout(self):
+ self.protocol.close_timeout = 10 * MS
+ # If the client doesn't send a close frame, time out in 10ms.
+ # Check the timing within -1/+9ms for robustness.
+ with self.assertCompletesWithin(9 * MS, 19 * MS):
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+ self.assertConnectionClosed(1006, "")
+
+ def test_local_close_connection_lost_timeout_after_write_eof(self):
+ self.protocol.close_timeout = 10 * MS
+ # If the client doesn't close its side of the TCP connection after we
+ # half-close our side with write_eof(), time out in 10ms.
+ # Check the timing within -1/+9ms for robustness.
+ with self.assertCompletesWithin(9 * MS, 19 * MS):
+ # HACK: disable write_eof => other end drops connection emulation.
+ self.transport._eof = True
+ self.receive_frame(self.close_frame)
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+ self.assertConnectionClosed(1000, "close")
+
+ def test_local_close_connection_lost_timeout_after_close(self):
+ self.protocol.close_timeout = 10 * MS
+ # If the client doesn't close its side of the TCP connection after we
+ # half-close our side with write_eof() and close it with close(), time
+ # out in 20ms.
+ # Check the timing within -1/+9ms for robustness.
+ with self.assertCompletesWithin(19 * MS, 29 * MS):
+ # HACK: disable write_eof => other end drops connection emulation.
+ self.transport._eof = True
+ # HACK: disable close => other end drops connection emulation.
+ self.transport._closing = True
+ self.receive_frame(self.close_frame)
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+ self.assertConnectionClosed(1000, "close")
+
+
+class ClientTests(CommonTests, AsyncioTestCase):
+ def setUp(self):
+ super().setUp()
+ self.protocol.is_client = True
+ self.protocol.side = "client"
+
+ def test_local_close_send_close_frame_timeout(self):
+ self.protocol.close_timeout = 10 * MS
+ self.make_drain_slow(50 * MS)
+ # If we can't send a close frame, time out in 20ms.
+ # - 10ms waiting for sending a close frame
+ # - 10ms waiting for receiving a half-close
+ # Check the timing within -1/+9ms for robustness.
+ with self.assertCompletesWithin(19 * MS, 29 * MS):
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+ self.assertConnectionClosed(1006, "")
+
+ def test_local_close_receive_close_frame_timeout(self):
+ self.protocol.close_timeout = 10 * MS
+ # If the server doesn't send a close frame, time out in 20ms:
+ # - 10ms waiting for receiving a close frame
+ # - 10ms waiting for receiving a half-close
+ # Check the timing within -1/+9ms for robustness.
+ with self.assertCompletesWithin(19 * MS, 29 * MS):
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+ self.assertConnectionClosed(1006, "")
+
+ def test_local_close_connection_lost_timeout_after_write_eof(self):
+ self.protocol.close_timeout = 10 * MS
+ # If the server doesn't half-close its side of the TCP connection
+ # after we send a close frame, time out in 20ms:
+ # - 10ms waiting for receiving a half-close
+ # - 10ms waiting for receiving a close after write_eof
+ # Check the timing within -1/+9ms for robustness.
+ with self.assertCompletesWithin(19 * MS, 29 * MS):
+ # HACK: disable write_eof => other end drops connection emulation.
+ self.transport._eof = True
+ self.receive_frame(self.close_frame)
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+ self.assertConnectionClosed(1000, "close")
+
+ def test_local_close_connection_lost_timeout_after_close(self):
+ self.protocol.close_timeout = 10 * MS
+ # If the client doesn't close its side of the TCP connection after we
+ # half-close our side with write_eof() and close it with close(), time
+ # out in 20ms.
+ # - 10ms waiting for receiving a half-close
+ # - 10ms waiting for receiving a close after write_eof
+ # - 10ms waiting for receiving a close after close
+ # Check the timing within -1/+9ms for robustness.
+ with self.assertCompletesWithin(29 * MS, 39 * MS):
+ # HACK: disable write_eof => other end drops connection emulation.
+ self.transport._eof = True
+ # HACK: disable close => other end drops connection emulation.
+ self.transport._closing = True
+ self.receive_frame(self.close_frame)
+ self.loop.run_until_complete(self.protocol.close(reason="close"))
+ self.assertConnectionClosed(1000, "close")
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_uri.py b/testing/web-platform/tests/tools/third_party/websockets/tests/test_uri.py
new file mode 100644
index 0000000000..e41860b8e4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_uri.py
@@ -0,0 +1,33 @@
+import unittest
+
+from websockets.exceptions import InvalidURI
+from websockets.uri import *
+
+
+VALID_URIS = [
+ ("ws://localhost/", (False, "localhost", 80, "/", None)),
+ ("wss://localhost/", (True, "localhost", 443, "/", None)),
+ ("ws://localhost/path?query", (False, "localhost", 80, "/path?query", None)),
+ ("WS://LOCALHOST/PATH?QUERY", (False, "localhost", 80, "/PATH?QUERY", None)),
+ ("ws://user:pass@localhost/", (False, "localhost", 80, "/", ("user", "pass"))),
+]
+
+INVALID_URIS = [
+ "http://localhost/",
+ "https://localhost/",
+ "ws://localhost/path#fragment",
+ "ws://user@localhost/",
+]
+
+
+class URITests(unittest.TestCase):
+ def test_success(self):
+ for uri, parsed in VALID_URIS:
+ with self.subTest(uri=uri):
+ self.assertEqual(parse_uri(uri), parsed)
+
+ def test_error(self):
+ for uri in INVALID_URIS:
+ with self.subTest(uri=uri):
+ with self.assertRaises(InvalidURI):
+ parse_uri(uri)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/test_utils.py b/testing/web-platform/tests/tools/third_party/websockets/tests/test_utils.py
new file mode 100644
index 0000000000..e5570f098b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/test_utils.py
@@ -0,0 +1,92 @@
+import itertools
+import unittest
+
+from websockets.utils import apply_mask as py_apply_mask
+
+
+class UtilsTests(unittest.TestCase):
+ @staticmethod
+ def apply_mask(*args, **kwargs):
+ return py_apply_mask(*args, **kwargs)
+
+ apply_mask_type_combos = list(itertools.product([bytes, bytearray], repeat=2))
+
+ apply_mask_test_values = [
+ (b"", b"1234", b""),
+ (b"aBcDe", b"\x00\x00\x00\x00", b"aBcDe"),
+ (b"abcdABCD", b"1234", b"PPPPpppp"),
+ (b"abcdABCD" * 10, b"1234", b"PPPPpppp" * 10),
+ ]
+
+ def test_apply_mask(self):
+ for data_type, mask_type in self.apply_mask_type_combos:
+ for data_in, mask, data_out in self.apply_mask_test_values:
+ data_in, mask = data_type(data_in), mask_type(mask)
+
+ with self.subTest(data_in=data_in, mask=mask):
+ result = self.apply_mask(data_in, mask)
+ self.assertEqual(result, data_out)
+
+ def test_apply_mask_memoryview(self):
+ for data_type, mask_type in self.apply_mask_type_combos:
+ for data_in, mask, data_out in self.apply_mask_test_values:
+ data_in, mask = data_type(data_in), mask_type(mask)
+ data_in, mask = memoryview(data_in), memoryview(mask)
+
+ with self.subTest(data_in=data_in, mask=mask):
+ result = self.apply_mask(data_in, mask)
+ self.assertEqual(result, data_out)
+
+ def test_apply_mask_non_contiguous_memoryview(self):
+ for data_type, mask_type in self.apply_mask_type_combos:
+ for data_in, mask, data_out in self.apply_mask_test_values:
+ data_in, mask = data_type(data_in), mask_type(mask)
+ data_in, mask = memoryview(data_in), memoryview(mask)
+ data_in, mask = data_in[::-1], mask[::-1]
+ data_out = data_out[::-1]
+
+ with self.subTest(data_in=data_in, mask=mask):
+ result = self.apply_mask(data_in, mask)
+ self.assertEqual(result, data_out)
+
+ def test_apply_mask_check_input_types(self):
+ for data_in, mask in [(None, None), (b"abcd", None), (None, b"abcd")]:
+ with self.subTest(data_in=data_in, mask=mask):
+ with self.assertRaises(TypeError):
+ self.apply_mask(data_in, mask)
+
+ def test_apply_mask_check_mask_length(self):
+ for data_in, mask in [
+ (b"", b""),
+ (b"abcd", b"123"),
+ (b"", b"aBcDe"),
+ (b"12345678", b"12345678"),
+ ]:
+ with self.subTest(data_in=data_in, mask=mask):
+ with self.assertRaises(ValueError):
+ self.apply_mask(data_in, mask)
+
+
+try:
+ from websockets.speedups import apply_mask as c_apply_mask
+except ImportError: # pragma: no cover
+ pass
+else:
+
+ class SpeedupsTests(UtilsTests):
+ @staticmethod
+ def apply_mask(*args, **kwargs):
+ return c_apply_mask(*args, **kwargs)
+
+ def test_apply_mask_non_contiguous_memoryview(self):
+ for data_type, mask_type in self.apply_mask_type_combos:
+ for data_in, mask, data_out in self.apply_mask_test_values:
+ data_in, mask = data_type(data_in), mask_type(mask)
+ data_in, mask = memoryview(data_in), memoryview(mask)
+ data_in, mask = data_in[::-1], mask[::-1]
+ data_out = data_out[::-1]
+
+ with self.subTest(data_in=data_in, mask=mask):
+ # The C extension only supports contiguous memoryviews.
+ with self.assertRaises(TypeError):
+ self.apply_mask(data_in, mask)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tests/utils.py b/testing/web-platform/tests/tools/third_party/websockets/tests/utils.py
new file mode 100644
index 0000000000..983a91edf0
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tests/utils.py
@@ -0,0 +1,93 @@
+import asyncio
+import contextlib
+import functools
+import logging
+import os
+import time
+import unittest
+
+
+class AsyncioTestCase(unittest.TestCase):
+ """
+ Base class for tests that sets up an isolated event loop for each test.
+
+ """
+
+ def __init_subclass__(cls, **kwargs):
+ """
+ Convert test coroutines to test functions.
+
+ This supports asychronous tests transparently.
+
+ """
+ super().__init_subclass__(**kwargs)
+ for name in unittest.defaultTestLoader.getTestCaseNames(cls):
+ test = getattr(cls, name)
+ if asyncio.iscoroutinefunction(test):
+ setattr(cls, name, cls.convert_async_to_sync(test))
+
+ @staticmethod
+ def convert_async_to_sync(test):
+ """
+ Convert a test coroutine to a test function.
+
+ """
+
+ @functools.wraps(test)
+ def test_func(self, *args, **kwargs):
+ return self.loop.run_until_complete(test(self, *args, **kwargs))
+
+ return test_func
+
+ def setUp(self):
+ super().setUp()
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+
+ def tearDown(self):
+ self.loop.close()
+ super().tearDown()
+
+ def run_loop_once(self):
+ # Process callbacks scheduled with call_soon by appending a callback
+ # to stop the event loop then running it until it hits that callback.
+ self.loop.call_soon(self.loop.stop)
+ self.loop.run_forever()
+
+ @contextlib.contextmanager
+ def assertNoLogs(self, logger="websockets", level=logging.ERROR):
+ """
+ No message is logged on the given logger with at least the given level.
+
+ """
+ with self.assertLogs(logger, level) as logs:
+ # We want to test that no log message is emitted
+ # but assertLogs expects at least one log message.
+ logging.getLogger(logger).log(level, "dummy")
+ yield
+
+ level_name = logging.getLevelName(level)
+ self.assertEqual(logs.output, [f"{level_name}:{logger}:dummy"])
+
+ def assertDeprecationWarnings(self, recorded_warnings, expected_warnings):
+ """
+ Check recorded deprecation warnings match a list of expected messages.
+
+ """
+ self.assertEqual(len(recorded_warnings), len(expected_warnings))
+ for recorded, expected in zip(recorded_warnings, expected_warnings):
+ actual = recorded.message
+ self.assertEqual(str(actual), expected)
+ self.assertEqual(type(actual), DeprecationWarning)
+
+
+# Unit for timeouts. May be increased on slow machines by setting the
+# WEBSOCKETS_TESTS_TIMEOUT_FACTOR environment variable.
+MS = 0.001 * int(os.environ.get("WEBSOCKETS_TESTS_TIMEOUT_FACTOR", 1))
+
+# asyncio's debug mode has a 10x performance penalty for this test suite.
+if os.environ.get("PYTHONASYNCIODEBUG"): # pragma: no cover
+ MS *= 10
+
+# Ensure that timeouts are larger than the clock's resolution (for Windows).
+MS = max(MS, 2.5 * time.get_clock_info("monotonic").resolution)
diff --git a/testing/web-platform/tests/tools/third_party/websockets/tox.ini b/testing/web-platform/tests/tools/third_party/websockets/tox.ini
new file mode 100644
index 0000000000..825e34061f
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/websockets/tox.ini
@@ -0,0 +1,28 @@
+[tox]
+envlist = py36,py37,py38,coverage,black,flake8,isort,mypy
+
+[testenv]
+commands = python -W default -m unittest {posargs}
+
+[testenv:coverage]
+commands =
+ python -m coverage erase
+ python -W default -m coverage run -m unittest {posargs}
+ python -m coverage report --show-missing --fail-under=100
+deps = coverage
+
+[testenv:black]
+commands = black --check src tests
+deps = black
+
+[testenv:flake8]
+commands = flake8 src tests
+deps = flake8
+
+[testenv:isort]
+commands = isort --check-only --recursive src tests
+deps = isort
+
+[testenv:mypy]
+commands = mypy --strict src
+deps = mypy
diff --git a/testing/web-platform/tests/tools/third_party/zipp/.flake8 b/testing/web-platform/tests/tools/third_party/zipp/.flake8
new file mode 100644
index 0000000000..790c109fdb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/.flake8
@@ -0,0 +1,9 @@
+[flake8]
+max-line-length = 88
+ignore =
+ # W503 violates spec https://github.com/PyCQA/pycodestyle/issues/513
+ W503
+ # W504 has issues https://github.com/OCA/maintainer-quality-tools/issues/545
+ W504
+ # Black creates whitespace before colon
+ E203
diff --git a/testing/web-platform/tests/tools/third_party/zipp/.github/workflows/main.yml b/testing/web-platform/tests/tools/third_party/zipp/.github/workflows/main.yml
new file mode 100644
index 0000000000..8c5c232c36
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/.github/workflows/main.yml
@@ -0,0 +1,42 @@
+name: Automated Tests
+
+on: [push, pull_request]
+
+jobs:
+ test:
+ strategy:
+ matrix:
+ python: [3.6, 3.8, 3.9]
+ platform: [ubuntu-latest, macos-latest, windows-latest]
+ runs-on: ${{ matrix.platform }}
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python }}
+ - name: Install tox
+ run: |
+ python -m pip install tox
+ - name: Run tests
+ run: tox
+
+ release:
+ needs: test
+ if: github.event_name == 'push' && contains(github.ref, 'refs/tags/')
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ - name: Install tox
+ run: |
+ python -m pip install tox
+ - name: Release
+ run: tox -e release
+ env:
+ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/testing/web-platform/tests/tools/third_party/zipp/.pre-commit-config.yaml b/testing/web-platform/tests/tools/third_party/zipp/.pre-commit-config.yaml
new file mode 100644
index 0000000000..922d94247a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/.pre-commit-config.yaml
@@ -0,0 +1,5 @@
+repos:
+- repo: https://github.com/ambv/black
+ rev: 18.9b0
+ hooks:
+ - id: black
diff --git a/testing/web-platform/tests/tools/third_party/zipp/.readthedocs.yml b/testing/web-platform/tests/tools/third_party/zipp/.readthedocs.yml
new file mode 100644
index 0000000000..8ae4468428
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/.readthedocs.yml
@@ -0,0 +1,5 @@
+python:
+ version: 3
+ extra_requirements:
+ - docs
+ pip_install: true
diff --git a/testing/web-platform/tests/tools/third_party/zipp/.travis.yml b/testing/web-platform/tests/tools/third_party/zipp/.travis.yml
new file mode 100644
index 0000000000..b7d8f3ac9d
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/.travis.yml
@@ -0,0 +1,28 @@
+dist: xenial
+language: python
+
+python:
+- 2.7
+- 3.6
+- &latest_py3 3.8
+
+jobs:
+ fast_finish: true
+ include:
+ - stage: deploy
+ if: tag IS present
+ python: *latest_py3
+ before_script: skip
+ script: tox -e release
+
+cache: pip
+
+install:
+- pip install tox tox-venv
+
+before_script:
+ # Disable IPv6. Ref travis-ci/travis-ci#8361
+ - if [ "${TRAVIS_OS_NAME}" == "linux" ]; then
+ sudo sh -c 'echo 0 > /proc/sys/net/ipv6/conf/all/disable_ipv6';
+ fi
+script: tox
diff --git a/testing/web-platform/tests/tools/third_party/zipp/CHANGES.rst b/testing/web-platform/tests/tools/third_party/zipp/CHANGES.rst
new file mode 100644
index 0000000000..a464a6324b
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/CHANGES.rst
@@ -0,0 +1,100 @@
+v1.2.0
+======
+
+#44: ``zipp.Path.open()`` now supports a compatible signature
+as ``pathlib.Path.open()``, accepting text (default) or binary
+modes and soliciting keyword parameters passed through to
+``io.TextIOWrapper`` (encoding, newline, etc). The stream is
+opened in text-mode by default now. ``open`` no
+longer accepts ``pwd`` as a positional argument and does not
+accept the ``force_zip64`` parameter at all. This change is
+a backward-incompatible change for that single function.
+
+v1.1.1
+======
+
+#43: Restored performance of implicit dir computation.
+
+v1.1.0
+======
+
+#32: For read-only zip files, complexity of ``.exists`` and
+``joinpath`` is now constant time instead of ``O(n)``, preventing
+quadratic time in common use-cases and rendering large
+zip files unusable for Path. Big thanks to Benjy Weinberger
+for the bug report and contributed fix (#33).
+
+v1.0.0
+======
+
+Re-release of 0.6 to correspond with release as found in
+Python 3.8.
+
+v0.6.0
+======
+
+#12: When adding implicit dirs, ensure that ancestral directories
+are added and that duplicates are excluded.
+
+The library now relies on
+`more_itertools <https://pypi.org/project/more_itertools>`_.
+
+v0.5.2
+======
+
+#7: Parent of a directory now actually returns the parent.
+
+v0.5.1
+======
+
+Declared package as backport.
+
+v0.5.0
+======
+
+Add ``.joinpath()`` method and ``.parent`` property.
+
+Now a backport release of the ``zipfile.Path`` class.
+
+v0.4.0
+======
+
+#4: Add support for zip files with implied directories.
+
+v0.3.3
+======
+
+#3: Fix issue where ``.name`` on a directory was empty.
+
+v0.3.2
+======
+
+#2: Fix TypeError on Python 2.7 when classic division is used.
+
+v0.3.1
+======
+
+#1: Fix TypeError on Python 3.5 when joining to a path-like object.
+
+v0.3.0
+======
+
+Add support for constructing a ``zipp.Path`` from any path-like
+object.
+
+``zipp.Path`` is now a new-style class on Python 2.7.
+
+v0.2.1
+======
+
+Fix issue with ``__str__``.
+
+v0.2.0
+======
+
+Drop reliance on future-fstrings.
+
+v0.1.0
+======
+
+Initial release with basic functionality.
diff --git a/testing/web-platform/tests/tools/third_party/zipp/LICENSE b/testing/web-platform/tests/tools/third_party/zipp/LICENSE
new file mode 100644
index 0000000000..5e795a61f3
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/LICENSE
@@ -0,0 +1,7 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/testing/web-platform/tests/tools/third_party/zipp/PKG-INFO b/testing/web-platform/tests/tools/third_party/zipp/PKG-INFO
new file mode 100644
index 0000000000..33ef1cf01c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/PKG-INFO
@@ -0,0 +1,39 @@
+Metadata-Version: 2.1
+Name: zipp
+Version: 1.2.0
+Summary: Backport of pathlib-compatible object wrapper for zip files
+Home-page: https://github.com/jaraco/zipp
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Description: .. image:: https://img.shields.io/pypi/v/zipp.svg
+ :target: https://pypi.org/project/zipp
+
+ .. image:: https://img.shields.io/pypi/pyversions/zipp.svg
+
+ .. image:: https://img.shields.io/travis/jaraco/zipp/master.svg
+ :target: https://travis-ci.org/jaraco/zipp
+
+ .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+ :alt: Code style: Black
+
+ .. image:: https://img.shields.io/appveyor/ci/jaraco/zipp/master.svg
+ :target: https://ci.appveyor.com/project/jaraco/zipp/branch/master
+
+ .. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest
+ .. :target: https://zipp.readthedocs.io/en/latest/?badge=latest
+
+
+ A pathlib-compatible Zipfile object wrapper. A backport of the
+ `Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Requires-Python: >=2.7
+Provides-Extra: testing
+Provides-Extra: docs
diff --git a/testing/web-platform/tests/tools/third_party/zipp/README.rst b/testing/web-platform/tests/tools/third_party/zipp/README.rst
new file mode 100644
index 0000000000..ce128a32ba
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/README.rst
@@ -0,0 +1,21 @@
+.. image:: https://img.shields.io/pypi/v/zipp.svg
+ :target: https://pypi.org/project/zipp
+
+.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
+
+.. image:: https://img.shields.io/travis/jaraco/zipp/master.svg
+ :target: https://travis-ci.org/jaraco/zipp
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+ :alt: Code style: Black
+
+.. image:: https://img.shields.io/appveyor/ci/jaraco/zipp/master.svg
+ :target: https://ci.appveyor.com/project/jaraco/zipp/branch/master
+
+.. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest
+.. :target: https://zipp.readthedocs.io/en/latest/?badge=latest
+
+
+A pathlib-compatible Zipfile object wrapper. A backport of the
+`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.
diff --git a/testing/web-platform/tests/tools/third_party/zipp/appveyor.yml b/testing/web-platform/tests/tools/third_party/zipp/appveyor.yml
new file mode 100644
index 0000000000..f35aa27d68
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/appveyor.yml
@@ -0,0 +1,24 @@
+environment:
+
+ APPVEYOR: true
+
+ matrix:
+ - PYTHON: "C:\\Python36-x64"
+ - PYTHON: "C:\\Python27-x64"
+
+install:
+ # symlink python from a directory with a space
+ - "mklink /d \"C:\\Program Files\\Python\" %PYTHON%"
+ - "SET PYTHON=\"C:\\Program Files\\Python\""
+ - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
+
+build: off
+
+cache:
+ - '%LOCALAPPDATA%\pip\Cache'
+
+test_script:
+ - "python -m pip install -U tox tox-venv virtualenv"
+ - "tox"
+
+version: '{build}'
diff --git a/testing/web-platform/tests/tools/third_party/zipp/conftest.py b/testing/web-platform/tests/tools/third_party/zipp/conftest.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/conftest.py
diff --git a/testing/web-platform/tests/tools/third_party/zipp/docs/conf.py b/testing/web-platform/tests/tools/third_party/zipp/docs/conf.py
new file mode 100644
index 0000000000..41b53557fb
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/docs/conf.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker']
+
+master_doc = "index"
+
+link_files = {
+ '../CHANGES.rst': dict(
+ using=dict(GH='https://github.com'),
+ replace=[
+ dict(
+ pattern=r'(Issue #|\B#)(?P<issue>\d+)',
+ url='{package_url}/issues/{issue}',
+ ),
+ dict(
+ pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
+ with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n',
+ ),
+ dict(
+ pattern=r'PEP[- ](?P<pep_number>\d+)',
+ url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
+ ),
+ ],
+ )
+}
diff --git a/testing/web-platform/tests/tools/third_party/zipp/docs/history.rst b/testing/web-platform/tests/tools/third_party/zipp/docs/history.rst
new file mode 100644
index 0000000000..8e217503ba
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/docs/history.rst
@@ -0,0 +1,8 @@
+:tocdepth: 2
+
+.. _changes:
+
+History
+*******
+
+.. include:: ../CHANGES (links).rst
diff --git a/testing/web-platform/tests/tools/third_party/zipp/docs/index.rst b/testing/web-platform/tests/tools/third_party/zipp/docs/index.rst
new file mode 100644
index 0000000000..ff49bf9dc7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/docs/index.rst
@@ -0,0 +1,22 @@
+Welcome to zipp documentation!
+========================================
+
+.. toctree::
+ :maxdepth: 1
+
+ history
+
+
+.. automodule:: zipp
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/testing/web-platform/tests/tools/third_party/zipp/mypy.ini b/testing/web-platform/tests/tools/third_party/zipp/mypy.ini
new file mode 100644
index 0000000000..976ba02946
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/mypy.ini
@@ -0,0 +1,2 @@
+[mypy]
+ignore_missing_imports = True
diff --git a/testing/web-platform/tests/tools/third_party/zipp/pyproject.toml b/testing/web-platform/tests/tools/third_party/zipp/pyproject.toml
new file mode 100644
index 0000000000..3afc8c33b7
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/pyproject.toml
@@ -0,0 +1,6 @@
+[build-system]
+requires = ["setuptools>=34.4", "wheel", "setuptools_scm>=1.15"]
+build-backend = "setuptools.build_meta"
+
+[tool.black]
+skip-string-normalization = true
diff --git a/testing/web-platform/tests/tools/third_party/zipp/pytest.ini b/testing/web-platform/tests/tools/third_party/zipp/pytest.ini
new file mode 100644
index 0000000000..d7f0b11559
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/pytest.ini
@@ -0,0 +1,9 @@
+[pytest]
+norecursedirs=dist build .tox .eggs
+addopts=--doctest-modules
+doctest_optionflags=ALLOW_UNICODE ELLIPSIS
+# workaround for warning pytest-dev/pytest#6178
+junit_family=xunit2
+filterwarnings=
+ # https://github.com/pytest-dev/pytest/issues/6928
+ ignore:direct construction of .*Item has been deprecated:DeprecationWarning
diff --git a/testing/web-platform/tests/tools/third_party/zipp/setup.cfg b/testing/web-platform/tests/tools/third_party/zipp/setup.cfg
new file mode 100644
index 0000000000..ef4abd248a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/setup.cfg
@@ -0,0 +1,45 @@
+[bdist_wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE
+name = zipp
+author = Jason R. Coombs
+author_email = jaraco@jaraco.com
+description = Backport of pathlib-compatible object wrapper for zip files
+long_description = file:README.rst
+url = https://github.com/jaraco/zipp
+classifiers =
+ Development Status :: 5 - Production/Stable
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Programming Language :: Python :: 2.7
+ Programming Language :: Python :: 3
+
+[options]
+py_modules = zipp
+packages = find:
+include_package_data = true
+python_requires = >=2.7
+install_requires =
+ contextlib2; python_version < "3.4"
+setup_requires = setuptools_scm >= 1.15.0
+
+[options.extras_require]
+testing =
+
+ pathlib2
+ unittest2
+ jaraco.itertools
+ func-timeout
+docs =
+ sphinx
+ jaraco.packaging >= 3.2
+ rst.linker >= 1.9
+
+[options.entry_points]
+
+[egg_info]
+tag_build =
+tag_date = 0
+
diff --git a/testing/web-platform/tests/tools/third_party/zipp/setup.py b/testing/web-platform/tests/tools/third_party/zipp/setup.py
new file mode 100644
index 0000000000..827e955fcd
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/setup.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+
+import setuptools
+
+if __name__ == "__main__":
+ setuptools.setup(use_scm_version=True)
diff --git a/testing/web-platform/tests/tools/third_party/zipp/skeleton.md b/testing/web-platform/tests/tools/third_party/zipp/skeleton.md
new file mode 100644
index 0000000000..52b97f09b4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/skeleton.md
@@ -0,0 +1,137 @@
+# Overview
+
+This project is merged with [skeleton](https://github.com/jaraco/skeleton). What is skeleton? It's the scaffolding of a Python project jaraco [introduced in his blog](https://blog.jaraco.com/a-project-skeleton-for-python-projects/). It seeks to provide a means to re-use techniques and inherit advances when managing projects for distribution.
+
+## An SCM Managed Approach
+
+While maintaining dozens of projects in PyPI, jaraco derives best practices for project distribution and publishes them in the [skeleton repo](https://github.com/jaraco/skeleton), a git repo capturing the evolution and culmination of these best practices.
+
+It's intended to be used by a new or existing project to adopt these practices and honed and proven techniques. Adopters are encouraged to use the project directly and maintain a small deviation from the technique, make their own fork for more substantial changes unique to their environment or preferences, or simply adopt the skeleton once and abandon it thereafter.
+
+The primary advantage to using an SCM for maintaining these techniques is that those tools help facilitate the merge between the template and its adopting projects.
+
+Another advantage to using an SCM-managed approach is that tools like GitHub recognize that a change in the skeleton is the _same change_ across all projects that merge with that skeleton. Without the ancestry, with a traditional copy/paste approach, a [commit like this](https://github.com/jaraco/skeleton/commit/12eed1326e1bc26ce256e7b3f8cd8d3a5beab2d5) would produce notifications in the upstream project issue for each and every application, but because it's centralized, GitHub provides just the one notification when the change is added to the skeleton.
+
+# Usage
+
+## new projects
+
+To use skeleton for a new project, simply pull the skeleton into a new project:
+
+```
+$ git init my-new-project
+$ cd my-new-project
+$ git pull gh://jaraco/skeleton
+```
+
+Now customize the project to suit your individual project needs.
+
+## existing projects
+
+If you have an existing project, you can still incorporate the skeleton by merging it into the codebase.
+
+```
+$ git merge skeleton --allow-unrelated-histories
+```
+
+The `--allow-unrelated-histories` is necessary because the history from the skeleton was previously unrelated to the existing codebase. Resolve any merge conflicts and commit to the master, and now the project is based on the shared skeleton.
+
+## Updating
+
+Whenever a change is needed or desired for the general technique for packaging, it can be made in the skeleton project and then merged into each of the derived projects as needed, recommended before each release. As a result, features and best practices for packaging are centrally maintained and readily trickle into a whole suite of packages. This technique lowers the amount of tedious work necessary to create or maintain a project, and coupled with other techniques like continuous integration and deployment, lowers the cost of creating and maintaining refined Python projects to just a few, familiar git operations.
+
+Thereafter, the target project can make whatever customizations it deems relevant to the scaffolding. The project may even at some point decide that the divergence is too great to merit renewed merging with the original skeleton. This approach applies maximal guidance while creating minimal constraints.
+
+# Features
+
+The features/techniques employed by the skeleton include:
+
+- PEP 517/518 based build relying on setuptools as the build tool
+- setuptools declarative configuration using setup.cfg
+- tox for running tests
+- A README.rst as reStructuredText with some popular badges, but with readthedocs and appveyor badges commented out
+- A CHANGES.rst file intended for publishing release notes about the project
+- Use of [black](https://black.readthedocs.io/en/stable/) for code formatting (disabled on unsupported Python 3.5 and earlier)
+
+## Packaging Conventions
+
+A pyproject.toml is included to enable PEP 517 and PEP 518 compatibility and declares the requirements necessary to build the project on setuptools (a minimum version compatible with setup.cfg declarative config).
+
+The setup.cfg file implements the following features:
+
+- Assumes universal wheel for release
+- Advertises the project's LICENSE file (MIT by default)
+- Reads the README.rst file into the long description
+- Some common Trove classifiers
+- Includes all packages discovered in the repo
+- Data files in the package are also included (not just Python files)
+- Declares the required Python versions
+- Declares install requirements (empty by default)
+- Declares setup requirements for legacy environments
+- Supplies two 'extras':
+ - testing: requirements for running tests
+ - docs: requirements for building docs
+ - these extras split the declaration into "upstream" (requirements as declared by the skeleton) and "local" (those specific to the local project); these markers help avoid merge conflicts
+- Placeholder for defining entry points
+
+Additionally, the setup.py file declares `use_scm_version` which relies on [setuptools_scm](https://pypi.org/project/setuptools_scm) to do two things:
+
+- derive the project version from SCM tags
+- ensure that all files committed to the repo are automatically included in releases
+
+## Running Tests
+
+The skeleton assumes the developer has [tox](https://pypi.org/project/tox) installed. The developer is expected to run `tox` to run tests on the current Python version using [pytest](https://pypi.org/project/pytest).
+
+Other environments (invoked with `tox -e {name}`) supplied include:
+
+ - a `build-docs` environment to build the documentation
+ - a `release` environment to publish the package to PyPI
+
+A pytest.ini is included to define common options around running tests. In particular:
+
+- rely on default test discovery in the current directory
+- avoid recursing into common directories not containing tests
+- run doctests on modules and invoke flake8 tests
+- in doctests, allow unicode literals and regular literals to match, allowing for doctests to run on Python 2 and 3. Also enable ELLIPSES, a default that would be undone by supplying the prior option.
+- filters out known warnings caused by libraries/functionality included by the skeleton
+
+Relies a .flake8 file to correct some default behaviors:
+
+- disable mutually incompatible rules W503 and W504
+- support for black format
+
+## Continuous Integration
+
+The project is pre-configured to run tests in [Travis-CI](https://travis-ci.org) (.travis.yml). Any new project must be enabled either through their web site or with the `travis enable` command.
+
+Features include:
+- test against Python 2 and 3
+- run on Ubuntu Xenial
+- correct for broken IPv6
+
+Also provided is a minimal template for running under Appveyor (Windows).
+
+### Continuous Deployments
+
+In addition to running tests, an additional deploy stage is configured to automatically release tagged commits to PyPI using [API tokens](https://pypi.org/help/#apitoken). The release process expects an authorized token to be configured with Travis as the TWINE_PASSWORD environment variable. After the Travis project is created, configure the token through the web UI or with a command like the following (bash syntax):
+
+```
+TWINE_PASSWORD={token} travis env copy TWINE_PASSWORD
+```
+
+## Building Documentation
+
+Documentation is automatically built by [Read the Docs](https://readthedocs.org) when the project is registered with it, by way of the .readthedocs.yml file. To test the docs build manually, a tox env may be invoked as `tox -e build-docs`. Both techniques rely on the dependencies declared in `setup.cfg/options.extras_require.docs`.
+
+In addition to building the sphinx docs scaffolded in `docs/`, the docs build a `history.html` file that first injects release dates and hyperlinks into the CHANGES.rst before incorporating it as history in the docs.
+
+## Cutting releases
+
+By default, tagged commits are released through the continuous integration deploy stage.
+
+Releases may also be cut manually by invoking the tox environment `release` with the PyPI token set as the TWINE_PASSWORD:
+
+```
+TWINE_PASSWORD={token} tox -e release
+```
diff --git a/testing/web-platform/tests/tools/third_party/zipp/test_zipp.py b/testing/web-platform/tests/tools/third_party/zipp/test_zipp.py
new file mode 100644
index 0000000000..810d10bd68
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/test_zipp.py
@@ -0,0 +1,245 @@
+# coding: utf-8
+
+from __future__ import division, unicode_literals
+
+import io
+import zipfile
+import contextlib
+import tempfile
+import shutil
+import string
+
+try:
+ import pathlib
+except ImportError:
+ import pathlib2 as pathlib
+
+if not hasattr(contextlib, 'ExitStack'):
+ import contextlib2
+ contextlib.ExitStack = contextlib2.ExitStack
+
+try:
+ import unittest
+
+ unittest.TestCase.subTest
+except AttributeError:
+ import unittest2 as unittest
+
+import jaraco.itertools
+import func_timeout
+
+import zipp
+
+__metaclass__ = type
+consume = tuple
+
+
+def add_dirs(zf):
+ """
+ Given a writable zip file zf, inject directory entries for
+ any directories implied by the presence of children.
+ """
+ for name in zipp.CompleteDirs._implied_dirs(zf.namelist()):
+ zf.writestr(name, b"")
+ return zf
+
+
+def build_alpharep_fixture():
+ """
+ Create a zip file with this structure:
+
+ .
+ ├── a.txt
+ ├── b
+ │ ├── c.txt
+ │ ├── d
+ │ │ └── e.txt
+ │ └── f.txt
+ └── g
+ └── h
+ └── i.txt
+
+ This fixture has the following key characteristics:
+
+ - a file at the root (a)
+ - a file two levels deep (b/d/e)
+ - multiple files in a directory (b/c, b/f)
+ - a directory containing only a directory (g/h)
+
+ "alpha" because it uses alphabet
+ "rep" because it's a representative example
+ """
+ data = io.BytesIO()
+ zf = zipfile.ZipFile(data, "w")
+ zf.writestr("a.txt", b"content of a")
+ zf.writestr("b/c.txt", b"content of c")
+ zf.writestr("b/d/e.txt", b"content of e")
+ zf.writestr("b/f.txt", b"content of f")
+ zf.writestr("g/h/i.txt", b"content of i")
+ zf.filename = "alpharep.zip"
+ return zf
+
+
+@contextlib.contextmanager
+def temp_dir():
+ tmpdir = tempfile.mkdtemp()
+ try:
+ yield pathlib.Path(tmpdir)
+ finally:
+ shutil.rmtree(tmpdir)
+
+
+class TestPath(unittest.TestCase):
+ def setUp(self):
+ self.fixtures = contextlib.ExitStack()
+ self.addCleanup(self.fixtures.close)
+
+ def zipfile_alpharep(self):
+ with self.subTest():
+ yield build_alpharep_fixture()
+ with self.subTest():
+ yield add_dirs(build_alpharep_fixture())
+
+ def zipfile_ondisk(self):
+ tmpdir = pathlib.Path(self.fixtures.enter_context(temp_dir()))
+ for alpharep in self.zipfile_alpharep():
+ buffer = alpharep.fp
+ alpharep.close()
+ path = tmpdir / alpharep.filename
+ with path.open("wb") as strm:
+ strm.write(buffer.getvalue())
+ yield path
+
+ def test_iterdir_and_types(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ assert root.is_dir()
+ a, b, g = root.iterdir()
+ assert a.is_file()
+ assert b.is_dir()
+ assert g.is_dir()
+ c, f, d = b.iterdir()
+ assert c.is_file() and f.is_file()
+ e, = d.iterdir()
+ assert e.is_file()
+ h, = g.iterdir()
+ i, = h.iterdir()
+ assert i.is_file()
+
+ def test_open(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ a, b, g = root.iterdir()
+ with a.open() as strm:
+ data = strm.read()
+ assert data == "content of a"
+
+ def test_read(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ a, b, g = root.iterdir()
+ assert a.read_text() == "content of a"
+ assert a.read_bytes() == b"content of a"
+
+ def test_joinpath(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ a = root.joinpath("a")
+ assert a.is_file()
+ e = root.joinpath("b").joinpath("d").joinpath("e.txt")
+ assert e.read_text() == "content of e"
+
+ def test_traverse_truediv(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ a = root / "a"
+ assert a.is_file()
+ e = root / "b" / "d" / "e.txt"
+ assert e.read_text() == "content of e"
+
+ def test_traverse_simplediv(self):
+ """
+ Disable the __future__.division when testing traversal.
+ """
+ for alpharep in self.zipfile_alpharep():
+ code = compile(
+ source="zipp.Path(alpharep) / 'a'",
+ filename="(test)",
+ mode="eval",
+ dont_inherit=True,
+ )
+ eval(code)
+
+ def test_pathlike_construction(self):
+ """
+ zipp.Path should be constructable from a path-like object
+ """
+ for zipfile_ondisk in self.zipfile_ondisk():
+ pathlike = pathlib.Path(str(zipfile_ondisk))
+ zipp.Path(pathlike)
+
+ def test_traverse_pathlike(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ root / pathlib.Path("a")
+
+ def test_parent(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ assert (root / 'a').parent.at == ''
+ assert (root / 'a' / 'b').parent.at == 'a/'
+
+ def test_dir_parent(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ assert (root / 'b').parent.at == ''
+ assert (root / 'b/').parent.at == ''
+
+ def test_missing_dir_parent(self):
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ assert (root / 'missing dir/').parent.at == ''
+
+ def test_mutability(self):
+ """
+ If the underlying zipfile is changed, the Path object should
+ reflect that change.
+ """
+ for alpharep in self.zipfile_alpharep():
+ root = zipp.Path(alpharep)
+ a, b, g = root.iterdir()
+ alpharep.writestr('foo.txt', b'foo')
+ alpharep.writestr('bar/baz.txt', b'baz')
+ assert any(
+ child.name == 'foo.txt'
+ for child in root.iterdir())
+ assert (root / 'foo.txt').read_text() == 'foo'
+ baz, = (root / 'bar').iterdir()
+ assert baz.read_text() == 'baz'
+
+ HUGE_ZIPFILE_NUM_ENTRIES = 2 ** 13
+
+ def huge_zipfile(self):
+ """Create a read-only zipfile with a huge number of entries entries."""
+ strm = io.BytesIO()
+ zf = zipfile.ZipFile(strm, "w")
+ for entry in map(str, range(self.HUGE_ZIPFILE_NUM_ENTRIES)):
+ zf.writestr(entry, entry)
+ zf.mode = 'r'
+ return zf
+
+ def test_joinpath_constant_time(self):
+ """
+ Ensure joinpath on items in zipfile is linear time.
+ """
+ root = zipp.Path(self.huge_zipfile())
+ entries = jaraco.itertools.Counter(root.iterdir())
+ for entry in entries:
+ entry.joinpath('suffix')
+ # Check the file iterated all items
+ assert entries.count == self.HUGE_ZIPFILE_NUM_ENTRIES
+
+ @func_timeout.func_set_timeout(3)
+ def test_implied_dirs_performance(self):
+ data = ['/'.join(string.ascii_lowercase + str(n)) for n in range(10000)]
+ zipp.CompleteDirs._implied_dirs(data)
diff --git a/testing/web-platform/tests/tools/third_party/zipp/tox.ini b/testing/web-platform/tests/tools/third_party/zipp/tox.ini
new file mode 100644
index 0000000000..cb542c136c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/tox.ini
@@ -0,0 +1,36 @@
+[tox]
+envlist = python
+minversion = 3.2
+# https://github.com/jaraco/skeleton/issues/6
+tox_pip_extensions_ext_venv_update = true
+
+[testenv]
+deps =
+ setuptools>=31.0.1
+commands =
+ python -m unittest discover
+usedevelop = True
+extras = testing
+
+[testenv:build-docs]
+extras =
+ docs
+ testing
+changedir = docs
+commands =
+ python -m sphinx . {toxinidir}/build/html
+
+[testenv:release]
+skip_install = True
+deps =
+ pep517>=0.5
+ twine>=1.13
+ path.py
+passenv =
+ TWINE_PASSWORD
+setenv =
+ TWINE_USERNAME = {env:TWINE_USERNAME:__token__}
+commands =
+ python -c "import path; path.Path('dist').rmtree_p()"
+ python -m pep517.build .
+ python -m twine upload dist/*
diff --git a/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/PKG-INFO b/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/PKG-INFO
new file mode 100644
index 0000000000..33ef1cf01c
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/PKG-INFO
@@ -0,0 +1,39 @@
+Metadata-Version: 2.1
+Name: zipp
+Version: 1.2.0
+Summary: Backport of pathlib-compatible object wrapper for zip files
+Home-page: https://github.com/jaraco/zipp
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Description: .. image:: https://img.shields.io/pypi/v/zipp.svg
+ :target: https://pypi.org/project/zipp
+
+ .. image:: https://img.shields.io/pypi/pyversions/zipp.svg
+
+ .. image:: https://img.shields.io/travis/jaraco/zipp/master.svg
+ :target: https://travis-ci.org/jaraco/zipp
+
+ .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+ :alt: Code style: Black
+
+ .. image:: https://img.shields.io/appveyor/ci/jaraco/zipp/master.svg
+ :target: https://ci.appveyor.com/project/jaraco/zipp/branch/master
+
+ .. .. image:: https://readthedocs.org/projects/zipp/badge/?version=latest
+ .. :target: https://zipp.readthedocs.io/en/latest/?badge=latest
+
+
+ A pathlib-compatible Zipfile object wrapper. A backport of the
+ `Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.
+
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Requires-Python: >=2.7
+Provides-Extra: testing
+Provides-Extra: docs
diff --git a/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/SOURCES.txt b/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/SOURCES.txt
new file mode 100644
index 0000000000..845b342cef
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/SOURCES.txt
@@ -0,0 +1,24 @@
+.flake8
+.pre-commit-config.yaml
+.readthedocs.yml
+.travis.yml
+CHANGES.rst
+LICENSE
+README.rst
+appveyor.yml
+conftest.py
+pyproject.toml
+setup.cfg
+setup.py
+skeleton.md
+test_zipp.py
+tox.ini
+zipp.py
+docs/conf.py
+docs/history.rst
+docs/index.rst
+zipp.egg-info/PKG-INFO
+zipp.egg-info/SOURCES.txt
+zipp.egg-info/dependency_links.txt
+zipp.egg-info/requires.txt
+zipp.egg-info/top_level.txt \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/dependency_links.txt b/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/dependency_links.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/requires.txt b/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/requires.txt
new file mode 100644
index 0000000000..90bab46ac4
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/requires.txt
@@ -0,0 +1,14 @@
+
+[:python_version < "3.4"]
+contextlib2
+
+[docs]
+sphinx
+jaraco.packaging>=3.2
+rst.linker>=1.9
+
+[testing]
+pathlib2
+unittest2
+jaraco.itertools
+func-timeout
diff --git a/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/top_level.txt b/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/top_level.txt
new file mode 100644
index 0000000000..e82f676f82
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/zipp.egg-info/top_level.txt
@@ -0,0 +1 @@
+zipp
diff --git a/testing/web-platform/tests/tools/third_party/zipp/zipp.py b/testing/web-platform/tests/tools/third_party/zipp/zipp.py
new file mode 100644
index 0000000000..892205834a
--- /dev/null
+++ b/testing/web-platform/tests/tools/third_party/zipp/zipp.py
@@ -0,0 +1,286 @@
+# coding: utf-8
+
+from __future__ import division
+
+import io
+import sys
+import posixpath
+import zipfile
+import functools
+import itertools
+from collections import OrderedDict
+
+try:
+ from contextlib import suppress
+except ImportError:
+ from contextlib2 import suppress
+
+__metaclass__ = type
+
+
+def _parents(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all parents of that path.
+
+ >>> list(_parents('b/d'))
+ ['b']
+ >>> list(_parents('/b/d/'))
+ ['/b']
+ >>> list(_parents('b/d/f/'))
+ ['b/d', 'b']
+ >>> list(_parents('b'))
+ []
+ >>> list(_parents(''))
+ []
+ """
+ return itertools.islice(_ancestry(path), 1, None)
+
+
+def _ancestry(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all elements of that path
+
+ >>> list(_ancestry('b/d'))
+ ['b/d', 'b']
+ >>> list(_ancestry('/b/d/'))
+ ['/b/d', '/b']
+ >>> list(_ancestry('b/d/f/'))
+ ['b/d/f', 'b/d', 'b']
+ >>> list(_ancestry('b'))
+ ['b']
+ >>> list(_ancestry(''))
+ []
+ """
+ path = path.rstrip(posixpath.sep)
+ while path and path != posixpath.sep:
+ yield path
+ path, tail = posixpath.split(path)
+
+
+class CompleteDirs(zipfile.ZipFile):
+ """
+ A ZipFile subclass that ensures that implied directories
+ are always included in the namelist.
+ """
+
+ @staticmethod
+ def _implied_dirs(names):
+ parents = itertools.chain.from_iterable(map(_parents, names))
+ # Cast names to a set for O(1) lookups
+ existing = set(names)
+ # Deduplicate entries in original order
+ implied_dirs = OrderedDict.fromkeys(
+ p + posixpath.sep for p in parents
+ if p + posixpath.sep not in existing
+ )
+ return implied_dirs
+
+ def namelist(self):
+ names = super(CompleteDirs, self).namelist()
+ return names + list(self._implied_dirs(names))
+
+ def _name_set(self):
+ return set(self.namelist())
+
+ def resolve_dir(self, name):
+ """
+ If the name represents a directory, return that name
+ as a directory (with the trailing slash).
+ """
+ names = self._name_set()
+ dirname = name + '/'
+ dir_match = name not in names and dirname in names
+ return dirname if dir_match else name
+
+ @classmethod
+ def make(cls, source):
+ """
+ Given a source (filename or zipfile), return an
+ appropriate CompleteDirs subclass.
+ """
+ if isinstance(source, CompleteDirs):
+ return source
+
+ if not isinstance(source, zipfile.ZipFile):
+ return cls(_pathlib_compat(source))
+
+ # Only allow for FastPath when supplied zipfile is read-only
+ if 'r' not in source.mode:
+ cls = CompleteDirs
+
+ res = cls.__new__(cls)
+ vars(res).update(vars(source))
+ return res
+
+
+class FastLookup(CompleteDirs):
+ """
+ ZipFile subclass to ensure implicit
+ dirs exist and are resolved rapidly.
+ """
+ def namelist(self):
+ with suppress(AttributeError):
+ return self.__names
+ self.__names = super(FastLookup, self).namelist()
+ return self.__names
+
+ def _name_set(self):
+ with suppress(AttributeError):
+ return self.__lookup
+ self.__lookup = super(FastLookup, self)._name_set()
+ return self.__lookup
+
+
+def _pathlib_compat(path):
+ """
+ For path-like objects, convert to a filename for compatibility
+ on Python 3.6.1 and earlier.
+ """
+ try:
+ return path.__fspath__()
+ except AttributeError:
+ return str(path)
+
+
+class Path:
+ """
+ A pathlib-compatible interface for zip files.
+
+ Consider a zip file with this structure::
+
+ .
+ ├── a.txt
+ └── b
+ ├── c.txt
+ └── d
+ └── e.txt
+
+ >>> data = io.BytesIO()
+ >>> zf = zipfile.ZipFile(data, 'w')
+ >>> zf.writestr('a.txt', 'content of a')
+ >>> zf.writestr('b/c.txt', 'content of c')
+ >>> zf.writestr('b/d/e.txt', 'content of e')
+ >>> zf.filename = 'abcde.zip'
+
+ Path accepts the zipfile object itself or a filename
+
+ >>> root = Path(zf)
+
+ From there, several path operations are available.
+
+ Directory iteration (including the zip file itself):
+
+ >>> a, b = root.iterdir()
+ >>> a
+ Path('abcde.zip', 'a.txt')
+ >>> b
+ Path('abcde.zip', 'b/')
+
+ name property:
+
+ >>> b.name
+ 'b'
+
+ join with divide operator:
+
+ >>> c = b / 'c.txt'
+ >>> c
+ Path('abcde.zip', 'b/c.txt')
+ >>> c.name
+ 'c.txt'
+
+ Read text:
+
+ >>> c.read_text()
+ 'content of c'
+
+ existence:
+
+ >>> c.exists()
+ True
+ >>> (b / 'missing.txt').exists()
+ False
+
+ Coercion to string:
+
+ >>> str(c)
+ 'abcde.zip/b/c.txt'
+ """
+
+ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
+
+ def __init__(self, root, at=""):
+ self.root = FastLookup.make(root)
+ self.at = at
+
+ def open(self, mode='r', *args, **kwargs):
+ """
+ Open this entry as text or binary following the semantics
+ of ``pathlib.Path.open()`` by passing arguments through
+ to io.TextIOWrapper().
+ """
+ pwd = kwargs.pop('pwd', None)
+ zip_mode = mode[0]
+ stream = self.root.open(self.at, zip_mode, pwd=pwd)
+ if 'b' in mode:
+ if args or kwargs:
+ raise ValueError("encoding args invalid for binary operation")
+ return stream
+ return io.TextIOWrapper(stream, *args, **kwargs)
+
+ @property
+ def name(self):
+ return posixpath.basename(self.at.rstrip("/"))
+
+ def read_text(self, *args, **kwargs):
+ with self.open('r', *args, **kwargs) as strm:
+ return strm.read()
+
+ def read_bytes(self):
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def _is_child(self, path):
+ return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
+
+ def _next(self, at):
+ return Path(self.root, at)
+
+ def is_dir(self):
+ return not self.at or self.at.endswith("/")
+
+ def is_file(self):
+ return not self.is_dir()
+
+ def exists(self):
+ return self.at in self.root._name_set()
+
+ def iterdir(self):
+ if not self.is_dir():
+ raise ValueError("Can't listdir a file")
+ subs = map(self._next, self.root.namelist())
+ return filter(self._is_child, subs)
+
+ def __str__(self):
+ return posixpath.join(self.root.filename, self.at)
+
+ def __repr__(self):
+ return self.__repr.format(self=self)
+
+ def joinpath(self, add):
+ next = posixpath.join(self.at, _pathlib_compat(add))
+ return self._next(self.root.resolve_dir(next))
+
+ __truediv__ = joinpath
+
+ @property
+ def parent(self):
+ parent_at = posixpath.dirname(self.at.rstrip('/'))
+ if parent_at:
+ parent_at += '/'
+ return self._next(parent_at)
+
+ if sys.version_info < (3,):
+ __div__ = __truediv__
diff --git a/testing/web-platform/tests/tools/tox.ini b/testing/web-platform/tests/tools/tox.ini
new file mode 100644
index 0000000000..b4c250bee4
--- /dev/null
+++ b/testing/web-platform/tests/tools/tox.ini
@@ -0,0 +1,24 @@
+[tox]
+envlist = py36,py37,py38,py39,py310,{py36,py37,py38,py39,py310}-{flake8,mypy}
+skipsdist=True
+skip_missing_interpreters=False
+
+[testenv]
+deps =
+ !flake8-!mypy: -r{toxinidir}/requirements_pytest.txt
+ !flake8-!mypy: -r{toxinidir}/requirements_tests.txt
+ flake8: -r{toxinidir}/requirements_flake8.txt
+ mypy: -r{toxinidir}/requirements_mypy.txt
+
+changedir =
+ mypy: {toxinidir}/..
+
+commands =
+ !flake8-!mypy: pytest --cov=tools --cov-report=term {posargs}
+ flake8: flake8 --append-config={toxinidir}/flake8.ini {posargs}
+ mypy: mypy --config-file={toxinidir}/mypy.ini tools/
+
+passenv =
+ HYPOTHESIS_PROFILE
+ PY_COLORS
+ TASKCLUSTER_ROOT_URL
diff --git a/testing/web-platform/tests/tools/wave/.gitignore b/testing/web-platform/tests/tools/wave/.gitignore
new file mode 100644
index 0000000000..bf0aae9e13
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/.gitignore
@@ -0,0 +1,3 @@
+!www/lib
+!export/lib
+!export/css
diff --git a/testing/web-platform/tests/tools/wave/__init__.py b/testing/web-platform/tests/tools/wave/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/__init__.py
diff --git a/testing/web-platform/tests/tools/wave/config.default.json b/testing/web-platform/tests/tools/wave/config.default.json
new file mode 100644
index 0000000000..5b696929bb
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/config.default.json
@@ -0,0 +1,49 @@
+{
+ "browser_host": "web-platform.test",
+ "alternate_hosts": {
+ "alt": "not-web-platform.test"
+ },
+ "doc_root": ".",
+ "ws_doc_root": "./websockets/handlers",
+ "server_host": null,
+ "ports": {
+ "http": [8000, "auto"],
+ "https": [8443],
+ "ws": ["auto"],
+ "wss": ["auto"]
+ },
+ "check_subdomains": true,
+ "log_level": "debug",
+ "bind_address": true,
+ "ssl": {
+ "type": "pregenerated",
+ "encrypt_after_connect": false,
+ "openssl": {
+ "openssl_binary": "openssl",
+ "base_path": "_certs",
+ "force_regenerate": false,
+ "base_conf_path": null
+ },
+ "pregenerated": {
+ "host_key_path": "./tools/certs/web-platform.test.key",
+ "host_cert_path": "./tools/certs/web-platform.test.pem"
+ },
+ "none": {}
+ },
+ "aliases": [],
+ "wave": {
+ "results": "./results",
+ "timeouts": {
+ "automatic": 60000,
+ "manual": 300000
+ },
+ "enable_import_results": false,
+ "web_root": "/_wave",
+ "persisting_interval": 20,
+ "api_titles": [],
+ "enable_read_sessions": false,
+ "event_cache_duration": 60000,
+ "enable_test_type_selection": false,
+ "enable_test_file_selection": false
+ }
+}
diff --git a/testing/web-platform/tests/tools/wave/configuration_loader.py b/testing/web-platform/tests/tools/wave/configuration_loader.py
new file mode 100644
index 0000000000..2e2aa33151
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/configuration_loader.py
@@ -0,0 +1,97 @@
+# mypy: allow-untyped-defs
+
+import json
+import os
+
+from tools.wpt import wpt
+
+DEFAULT_CONFIGURATION_FILE_PATH = os.path.join(wpt.localpaths.repo_root, "./tools/wave/config.default.json")
+
+
+def load(configuration_file_path):
+ configuration = {}
+ if configuration_file_path:
+ configuration = load_configuration_file(configuration_file_path)
+ default_configuration = load_configuration_file(
+ DEFAULT_CONFIGURATION_FILE_PATH)
+
+ configuration["wpt_port"] = configuration.get(
+ "ports", default_configuration["ports"]).get(
+ "http", default_configuration["ports"]["http"])[0]
+ configuration["wpt_ssl_port"] = configuration.get(
+ "ports", default_configuration["ports"]).get(
+ "https", default_configuration["ports"]["https"])[0]
+
+ web_root = configuration.get(
+ "wave", default_configuration["wave"]).get(
+ "web_root", default_configuration["wave"]["web_root"])
+ if not web_root.startswith("/"):
+ web_root = "/" + web_root
+ if not web_root.endswith("/"):
+ web_root += "/"
+ configuration["web_root"] = web_root
+
+ configuration["results_directory_path"] = configuration.get(
+ "wave", default_configuration["wave"]).get(
+ "results", default_configuration["wave"]["results"])
+
+ configuration["timeouts"] = {}
+ configuration["timeouts"]["automatic"] = configuration.get(
+ "wave", default_configuration["wave"]).get(
+ "timeouts", default_configuration["wave"]["timeouts"]).get(
+ "automatic", default_configuration["wave"]["timeouts"]["automatic"])
+ configuration["timeouts"]["manual"] = configuration.get(
+ "wave", default_configuration["wave"]).get(
+ "timeouts", default_configuration["wave"]["timeouts"]).get(
+ "manual", default_configuration["wave"]["timeouts"]["manual"])
+
+ configuration["hostname"] = configuration.get(
+ "browser_host", default_configuration["browser_host"])
+
+ configuration["import_results_enabled"] = configuration.get(
+ "wave", default_configuration["wave"]).get(
+ "enable_import_results",
+ default_configuration["wave"]["enable_import_results"])
+
+ configuration["read_sessions_enabled"] = configuration.get(
+ "wave", default_configuration["wave"]).get(
+ "enable_read_sessions",
+ default_configuration["wave"]["enable_read_sessions"])
+
+ configuration["persisting_interval"] = configuration.get(
+ "wave", default_configuration["wave"]).get(
+ "persisting_interval", default_configuration["wave"]["persisting_interval"])
+
+ configuration["event_cache_duration"] = configuration.get(
+ "wave", default_configuration["wave"]).get(
+ "event_cache_duration", default_configuration["wave"]["event_cache_duration"])
+
+ configuration["tests_directory_path"] = os.getcwd()
+
+ configuration["manifest_file_path"] = os.path.join(
+ os.getcwd(), "MANIFEST.json")
+
+ configuration["api_titles"] = configuration.get(
+ "wave", default_configuration["wave"]).get(
+ "api_titles", default_configuration["wave"]["api_titles"])
+
+ configuration["enable_test_type_selection"] = configuration.get(
+ "wave", default_configuration["wave"]).get(
+ "enable_test_type_selection", default_configuration["wave"]["enable_test_type_selection"])
+
+ configuration["enable_test_file_selection"] = configuration.get(
+ "wave", default_configuration["wave"]).get(
+ "enable_test_file_selection", default_configuration["wave"]["enable_test_file_selection"])
+
+ return configuration
+
+
+def load_configuration_file(path):
+ if not os.path.isfile(path):
+ return {}
+
+ configuration = None
+ with open(path) as configuration_file:
+ configuration_file_content = configuration_file.read()
+ configuration = json.loads(configuration_file_content)
+ return configuration
diff --git a/testing/web-platform/tests/tools/wave/data/__init__.py b/testing/web-platform/tests/tools/wave/data/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/__init__.py
diff --git a/testing/web-platform/tests/tools/wave/data/client.py b/testing/web-platform/tests/tools/wave/data/client.py
new file mode 100644
index 0000000000..d5643a5660
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/client.py
@@ -0,0 +1,8 @@
+# mypy: allow-untyped-defs
+
+class Client:
+ def __init__(self, session_token):
+ self.session_token = session_token
+
+ def send_message(self, message):
+ raise Exception("Client.send_message(message) not implemented!")
diff --git a/testing/web-platform/tests/tools/wave/data/device.py b/testing/web-platform/tests/tools/wave/data/device.py
new file mode 100644
index 0000000000..b1d06cbf30
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/device.py
@@ -0,0 +1,8 @@
+# mypy: allow-untyped-defs
+
+class Device:
+ def __init__(self, token, user_agent, name, last_active):
+ self.token = token
+ self.user_agent = user_agent
+ self.name = name
+ self.last_active = last_active
diff --git a/testing/web-platform/tests/tools/wave/data/event_listener.py b/testing/web-platform/tests/tools/wave/data/event_listener.py
new file mode 100644
index 0000000000..c4b98653e1
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/event_listener.py
@@ -0,0 +1,10 @@
+# mypy: allow-untyped-defs
+
+class EventListener:
+ def __init__(self, dispatcher_token):
+ super().__init__()
+ self.dispatcher_token = dispatcher_token
+ self.token = None
+
+ def send_message(self, message):
+ raise Exception("Client.send_message(message) not implemented!")
diff --git a/testing/web-platform/tests/tools/wave/data/exceptions/__init__.py b/testing/web-platform/tests/tools/wave/data/exceptions/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/exceptions/__init__.py
diff --git a/testing/web-platform/tests/tools/wave/data/exceptions/duplicate_exception.py b/testing/web-platform/tests/tools/wave/data/exceptions/duplicate_exception.py
new file mode 100644
index 0000000000..2d64ea51bd
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/exceptions/duplicate_exception.py
@@ -0,0 +1,2 @@
+class DuplicateException(Exception):
+ pass
diff --git a/testing/web-platform/tests/tools/wave/data/exceptions/invalid_data_exception.py b/testing/web-platform/tests/tools/wave/data/exceptions/invalid_data_exception.py
new file mode 100644
index 0000000000..50c7e8f372
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/exceptions/invalid_data_exception.py
@@ -0,0 +1,2 @@
+class InvalidDataException(Exception):
+ pass
diff --git a/testing/web-platform/tests/tools/wave/data/exceptions/not_found_exception.py b/testing/web-platform/tests/tools/wave/data/exceptions/not_found_exception.py
new file mode 100644
index 0000000000..0e573506ad
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/exceptions/not_found_exception.py
@@ -0,0 +1,2 @@
+class NotFoundException(Exception):
+ pass
diff --git a/testing/web-platform/tests/tools/wave/data/exceptions/permission_denied_exception.py b/testing/web-platform/tests/tools/wave/data/exceptions/permission_denied_exception.py
new file mode 100644
index 0000000000..e51660f678
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/exceptions/permission_denied_exception.py
@@ -0,0 +1,2 @@
+class PermissionDeniedException(Exception):
+ pass
diff --git a/testing/web-platform/tests/tools/wave/data/http_polling_client.py b/testing/web-platform/tests/tools/wave/data/http_polling_client.py
new file mode 100644
index 0000000000..3235569a98
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/http_polling_client.py
@@ -0,0 +1,13 @@
+# mypy: allow-untyped-defs
+
+from .client import Client
+
+
+class HttpPollingClient(Client):
+ def __init__(self, session_token, event):
+ super().__init__(session_token)
+ self.event = event
+
+ def send_message(self, message):
+ self.message = message
+ self.event.set()
diff --git a/testing/web-platform/tests/tools/wave/data/http_polling_event_listener.py b/testing/web-platform/tests/tools/wave/data/http_polling_event_listener.py
new file mode 100644
index 0000000000..b1e46edd36
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/http_polling_event_listener.py
@@ -0,0 +1,13 @@
+# mypy: allow-untyped-defs
+
+from .event_listener import EventListener
+
+class HttpPollingEventListener(EventListener):
+ def __init__(self, dispatcher_token, event):
+ super().__init__(dispatcher_token)
+ self.event = event
+ self.message = None
+
+ def send_message(self, message):
+ self.message = message
+ self.event.set()
diff --git a/testing/web-platform/tests/tools/wave/data/session.py b/testing/web-platform/tests/tools/wave/data/session.py
new file mode 100644
index 0000000000..bb1b932dae
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/data/session.py
@@ -0,0 +1,78 @@
+# mypy: allow-untyped-defs
+
+from ..testing.test_loader import MANUAL, AUTOMATIC
+
+PAUSED = "paused"
+RUNNING = "running"
+COMPLETED = "completed"
+ABORTED = "aborted"
+PENDING = "pending"
+UNKNOWN = "unknown"
+
+
+class Session:
+ def __init__(
+ self,
+ token=None,
+ test_types=None,
+ user_agent=None,
+ labels=None,
+ tests=None,
+ pending_tests=None,
+ running_tests=None,
+ timeouts=None,
+ status=None,
+ test_state=None,
+ last_completed_test=None,
+ recent_completed_count=None,
+ date_created=None,
+ date_started=None,
+ date_finished=None,
+ is_public=None,
+ reference_tokens=None,
+ browser=None,
+ expiration_date=None,
+ type=None,
+ malfunctioning_tests=None
+ ):
+ if token is None:
+ token = ""
+ self.token = token
+ if test_types is None:
+ test_types = [AUTOMATIC, MANUAL]
+ self.test_types = test_types
+ if user_agent is None:
+ user_agent = ""
+ self.user_agent = user_agent
+ if labels is None:
+ labels = []
+ self.labels = labels
+ self.tests = tests
+ self.pending_tests = pending_tests
+ self.running_tests = running_tests
+ if timeouts is None:
+ timeouts = {}
+ self.timeouts = timeouts
+ if status is None:
+ status = UNKNOWN
+ self.status = status
+ self.test_state = test_state
+ self.last_completed_test = last_completed_test
+ if recent_completed_count is None:
+ recent_completed_count = 0
+ self.recent_completed_count = recent_completed_count
+ self.date_created = date_created
+ self.date_started = date_started
+ self.date_finished = date_finished
+ if is_public is None:
+ is_public = False
+ self.is_public = is_public
+ if reference_tokens is None:
+ reference_tokens = []
+ self.reference_tokens = reference_tokens
+ self.browser = browser
+ self.expiration_date = expiration_date
+ self.type = type
+ if malfunctioning_tests is None:
+ malfunctioning_tests = []
+ self.malfunctioning_tests = malfunctioning_tests
diff --git a/testing/web-platform/tests/tools/wave/docs/README.md b/testing/web-platform/tests/tools/wave/docs/README.md
new file mode 100644
index 0000000000..88092b63e7
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/README.md
@@ -0,0 +1,14 @@
+# WAVE Test Runner Documentation
+
+As part of the [WAVE project](https://cta.tech/Resources/Standards/WAVE-Project)
+the WAVE Test Runner was implemented to run tests that confirm proper implementation
+of specified features. The code base is used in different subprojects, each of which
+may have different requirements and scopes, so some features and screenshots in
+this documentation may use specific contexts.
+
+## Contents
+
+- [Configuration](./config.md): How to configure the test runner
+- [REST API](./rest-api/README.md): Documentation of endpoints, parameters and payloads
+ - [Guides](./rest-api/guides/README.md): How to use certain API mechanisms
+- [Usage Guide](./usage/usage.md): General usage guide
diff --git a/testing/web-platform/tests/tools/wave/docs/config.md b/testing/web-platform/tests/tools/wave/docs/config.md
new file mode 100644
index 0000000000..afc7c0b05f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/config.md
@@ -0,0 +1,326 @@
+# Configuration - [WAVE Test Runner](./README.md)
+
+Using a configuration file, the WAVE Test Runner can be configured to be more
+functional in different use cases. This document lists all configuration
+parameters and what they are used for.
+
+## Contents
+
+1. [Location and structure](#1-location-and-structure)
+2. [Parameters](#2-parameters)
+ 1. [Results directory](#21-results-directory)
+ 2. [Test Timeouts](#22-test-timeouts)
+ 3. [Enable import of results](#23-enable-import-of-results)
+ 4. [Web namespace](#24-web-namespace)
+ 5. [Persisting interval](#25-persisting-interval)
+ 6. [API titles](#26-api-titles)
+ 7. [Enable listing all sessions](#27-enable-listing-all-sessions)
+ 8. [Event caching duration](#28-event-caching-duration)
+ 9. [Enable test type selection](#29-enable-test-type-selection)
+
+## 1. Location and structure
+
+Configuration parameters are defined in a JSON file called `config.json` in
+the project root of the WPT runner. This configuration file is also used by
+the WPT runner, so any WAVE Test Runner related configuration parameters are
+wrapped inside a `wave` object.
+
+```
+<PRJ_ROOT>/config.json
+```
+
+```json
+{
+ "wave": {
+ "results": "./results"
+ }
+}
+```
+
+All the default values are stored in a configuration file inside the wave
+directory:
+
+```
+<PRJ_ROOT>/tools/wave/config.default.json
+```
+
+```json
+{
+ "wave": {
+ "results": "./results",
+ "timeouts": {
+ "automatic": 60000,
+ "manual": 300000
+ },
+ "enable_import_results": false,
+ "web_root": "/_wave",
+ "persisting_interval": 20,
+ "api_titles": [],
+ "enable_read_sessions": false,
+ "event_cache_duration": 60000
+ }
+}
+```
+[🠑 top](#configuration---wave-test-runner)
+
+## 2. Parameters
+
+### 2.1 Results directory
+
+The results parameter sets where results and session information are stored.
+
+**Parameters**:
+
+```json
+{
+ "results": "<String>"
+}
+```
+
+- **results**: Path to the results directory. Can be absolute, or relative to
+ the project root.
+
+**Default**:
+
+```json
+{
+ "results": "./results"
+}
+```
+
+[🠑 top](#configuration---wave-test-runner)
+
+### 2.2 Test Timeouts
+
+The test timeouts set the default test timeout for different test types.
+
+**Parameters**:
+
+```json
+{
+ "timeouts": {
+ "automatic": "<Number>",
+ "manual": "<Number>"
+ }
+}
+```
+
+- **timeouts**: Holds the key value pairs for different types of tests
+ - **automatic**: Default time to wait for automatic tests in milliseconds.
+ - **manual**: Default time to wait for manual tests in milliseconds.
+
+**Default**:
+
+```json
+{
+ "timeouts": {
+ "automatic": 600000,
+ "manual": 300000
+ }
+}
+```
+
+[🠑 top](#configuration---wave-test-runner)
+
+### 2.3 Enable import of results
+
+This parameter enables the capability to import session results from other
+WAVE Test Runner instances into the current one.
+
+**Parameters**:
+
+```json
+{
+ "enable_import_results": "<Boolean>"
+}
+```
+
+- **enable_import_results**: Sets whether or not to enable the [REST API endpoint to import results](./rest-api/results-api/import.md)
+
+**Default**:
+
+```json
+{
+ "enable_import_results": "false"
+}
+```
+
+[🠑 top](#configuration---wave-test-runner)
+
+### 2.4 Web namespace
+
+All static resources and REST API endpoints are accessible under a
+configurable namespace. This namespace can be set using the `web_root`
+parameter.
+
+**Parameters**:
+
+```json
+{
+ "web_root": "<String>"
+}
+```
+
+- **web_root**: The namespace to use
+
+**Default**:
+
+```json
+{
+ "web_root": "/_wave"
+}
+```
+
+[🠑 top](#configuration---wave-test-runner)
+
+### 2.5 Persisting interval
+
+The persisting interval specifies how many tests have to be completed until
+all session information is updated in the results directory.
+
+For example, if set to 5, then every 5 completed tests the `info.json` in the
+results directory is updated with the current state of the session. When
+restarting the server, this state is used to reconstruct all sessions testing
+state.
+
+**Parameters**:
+
+```json
+{
+ "persisting_interval": "<Number>"
+}
+```
+
+- **persisting_interval**: The number of tests to execute until the persisted
+ session information gets updated
+
+**Default**:
+
+```json
+{
+ "persisting_interval": 20
+}
+```
+
+[🠑 top](#configuration---wave-test-runner)
+
+### 2.6 API titles
+
+The API titles are used to display a more human readible representation of an
+API that tests are available for. Using the parameter it is possible to assign
+a name to an API subdirectory.
+
+**Parameters**:
+
+```json
+{
+ "api_titles": [
+ {
+ "title": "<String>",
+ "path": "<String>"
+ },
+ ...
+ ]
+}
+```
+
+- **api_titles**: An array of titles assigned to paths
+ - **title**: The displayed title of the API in the UI
+ - **path**: The path relative to the project root of the tested API
+
+**Default**:
+
+```json
+{
+ "api_titles": []
+}
+```
+
+**Example**:
+
+```json
+{
+ "api_titles": [
+ {
+ "title": "WebGL",
+ "path": "/webgl"
+ },
+ {
+ "title": "WebRTC Extensions",
+ "path": "/webrtc-extensions"
+ }
+ ]
+}
+```
+
+[🠑 top](#configuration---wave-test-runner)
+
+### 2.7 Enable listing all sessions
+
+This parameter enables the [REST API endpoint to list all available sessions](./rest-api/sessions-api/read_sessions.md).
+
+**Parameters**:
+
+```json
+{
+ "enable_read_sessions": "<Boolean>"
+}
+```
+
+- **enable_import_results**: Sets whether or not to enable the REST API endpoint read all sessions
+
+**Default**:
+
+```json
+{
+ "enable_read_sessions": "false"
+}
+```
+
+[🠑 top](#configuration---wave-test-runner)
+
+### 2.8 Event caching duration
+
+This parameters specifies how long events are hold in the cache. Depending on
+how fast clients are able to evaluate events, this value may be changed
+accordingly.
+
+**Parameters**:
+
+```json
+{
+ "event_cache_duration": "<Number>"
+}
+```
+
+- **event_cache_duration**: The duration events are hold in the cache in milliseconds
+
+**Default**:
+
+```json
+{
+ "event_cache_duration": 60000
+}
+```
+
+[🠑 top](#configuration---wave-test-runner)
+
+### 2.9 Enable test type selection
+
+Sets display of test type configuration UI elements.
+
+**Parameters**:
+
+```json
+{
+ "enable_test_type_selection": "<Boolean>"
+}
+```
+
+- **enable_test_type_selection**: Whether or not test type UI controls are displayed
+
+**Default**:
+
+False
+
+[🠑 top](#configuration---wave-test-runner)
diff --git a/testing/web-platform/tests/tools/wave/docs/res/configuration_page_bottom.jpg b/testing/web-platform/tests/tools/wave/docs/res/configuration_page_bottom.jpg
new file mode 100644
index 0000000000..85d4bdc3fc
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/configuration_page_bottom.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_malfunctioning.jpg b/testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_malfunctioning.jpg
new file mode 100644
index 0000000000..28f42230d5
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_malfunctioning.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_prev_excluded.jpg b/testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_prev_excluded.jpg
new file mode 100644
index 0000000000..c71b34f668
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_prev_excluded.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_raw.jpg b/testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_raw.jpg
new file mode 100644
index 0000000000..f1428f313d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/configuration_page_exclude_add_raw.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/configuration_page_top.jpg b/testing/web-platform/tests/tools/wave/docs/res/configuration_page_top.jpg
new file mode 100644
index 0000000000..93b6522bc7
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/configuration_page_top.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/landing_page.jpg b/testing/web-platform/tests/tools/wave/docs/res/landing_page.jpg
new file mode 100644
index 0000000000..c032a7b291
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/landing_page.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions.jpg b/testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions.jpg
new file mode 100644
index 0000000000..642a009d4a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions_filtered.jpg b/testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions_filtered.jpg
new file mode 100644
index 0000000000..266a5c1159
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions_filtered.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions_pinned_recent.jpg b/testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions_pinned_recent.jpg
new file mode 100644
index 0000000000..fb92a47bf4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/overview_page_sessions_pinned_recent.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/overview_page_top.jpg b/testing/web-platform/tests/tools/wave/docs/res/overview_page_top.jpg
new file mode 100644
index 0000000000..860272796a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/overview_page_top.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/results_page_api_results.jpg b/testing/web-platform/tests/tools/wave/docs/res/results_page_api_results.jpg
new file mode 100644
index 0000000000..e0e2314a78
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/results_page_api_results.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/results_page_api_results_export.jpg b/testing/web-platform/tests/tools/wave/docs/res/results_page_api_results_export.jpg
new file mode 100644
index 0000000000..c85ce980fe
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/results_page_api_results_export.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/results_page_bottom.jpg b/testing/web-platform/tests/tools/wave/docs/res/results_page_bottom.jpg
new file mode 100644
index 0000000000..60244a8e46
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/results_page_bottom.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/results_page_last_timed_out.jpg b/testing/web-platform/tests/tools/wave/docs/res/results_page_last_timed_out.jpg
new file mode 100644
index 0000000000..bb63bd4d51
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/results_page_last_timed_out.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/results_page_malfunctioning_list.jpg b/testing/web-platform/tests/tools/wave/docs/res/results_page_malfunctioning_list.jpg
new file mode 100644
index 0000000000..fcd6b370fd
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/results_page_malfunctioning_list.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/res/results_page_top.jpg b/testing/web-platform/tests/tools/wave/docs/res/results_page_top.jpg
new file mode 100644
index 0000000000..3d0f876c7b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/res/results_page_top.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/README.md b/testing/web-platform/tests/tools/wave/docs/rest-api/README.md
new file mode 100644
index 0000000000..c6d21823a7
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/README.md
@@ -0,0 +1,76 @@
+# REST API - [WAVE Test Runner](../README.md)
+
+The REST API allows the WAVE server to be integrated into other systems. Every
+call must be preceded with a namespace or web root, which is omitted in this
+documentation. The default web root is `/_wave`, which can be changed in the
+config.json using the keyword `web_root`.
+
+Additional [REST API Guides](./guides/README.md) can help to understand how to
+use these endpoints in context.
+
+## Sessions API <a name="sessions-api"></a>
+
+| Name | Description |
+| -------------------------------------------------- | -------------------------------------------------------------- |
+| [`create`](./sessions-api/create.md) | Creates a new test session. |
+| [`read session`](./sessions-api/read.md) | Reads a sessions configuration. |
+| [`read sessions`](./sessions-api/read_sessions.md) | Reads all session tokens, expandable with configs and statuses |
+| [`read public`](./sessions-api/read-public.md) | Reads all public sessions tokens. |
+| [`update`](./sessions-api/update.md) | Updates a session configuration. |
+| [`delete`](./sessions-api/delete.md) | Deletes a test session. |
+| [`status`](./sessions-api/status.md) | Reads the status and progress of a session. |
+| [`start`](./sessions-api/control.md#start) | Starts a test session. |
+| [`stop`](./sessions-api/control.md#stop) | Stops a test session. |
+| [`pause`](./sessions-api/control.md#pause) | Pauses a test session. |
+| [`find`](./sessions-api/find.md) | Finds a session token by providing a token fragment. |
+| [`labels`](./sessions-api/labels.md) | Attach labels to sessions for organization purposes. |
+| [`listen events`](./sessions-api/events.md) | Register for sessions specific events. |
+| [`push events`](./sessions-api/events.md) | Push session specific events. |
+
+## Tests API <a name="tests-api"></a>
+
+| Name | Description |
+| --------------------------------------------------------------- | ------------------------------------------------------ |
+| [`read all`](./tests-api/read-all.md) | Reads all tests available. |
+| [`read session`](./tests-api/read-session.md) | Reads all tests that are part of a session. |
+| [`read next`](./tests-api/read-next.md) | Reads the next test to run in a session. |
+| [`read last completed`](./tests-api/read-last-completed.md) | Reads the last completed tests of a session. |
+| [`read malfunctioning`](./tests-api/read-malfunctioning.md) | Reads the list of malfunctioning tests of a session. |
+| [`update malfunctioning`](./tests-api/update-malfunctioning.md) | Updates the list of malfunctioning tests of a session. |
+| [`read available apis`](./tests-api/read-available-apis.md) | Reads all available APIs names and paths. |
+
+## Results API <a name="results-api"></a>
+
+| Name | Description |
+| ------------------------------------------------------------------------ | ------------------------------------------------------------------------------- |
+| [`create`](./results-api/create.md) | Create a new test result for a test in a session. |
+| [`read`](./results-api/read.md) | Read all test results of a session. |
+| [`read compact`](./results-api/read-compact.md) | Read the number of passed, failed, timed out and not run tests of a session. |
+| [`import session`](./results-api/import.md#1-import-session) | Import session results. |
+| [`import api results`](./results-api/import.md#2-import-api-results) | Import results of a specific API into existing session. |
+| [`download`](./results-api/download.md#1-download) | Download all session results to import into other WMATS instance. |
+| [`download api`](./results-api/download.md#2-download-api) | Download all results of an API. |
+| [`download all apis`](./results-api/download.md#3-download-all-apis) | Download all results of all APIs. |
+| [`view report`](./results-api/download.md#4-download-report) | View the WPT report of an API of a session. |
+| [`view multi report`](./results-api/download.md#5-download-multi-report) | View the WPT report of an API of multiple sessions. |
+| [`download overview`](./results-api/download.md#6-download-overview) | Download an overview of results of all APIs of a session. |
+| [`view report`](./results-api/view.md#1-view-report) | Read an url to a hosted version of a WPT report for an API of a session. |
+| [`view multi report`](./results-api/view.md#2-view-multi-report) | Read an url to a hosted version of a WPT report for an API of multiple session. |
+
+## Devices API <a name="devices-api"></a>
+
+| Name | Description |
+| -------------------------------------------------------------------- | -------------------------------------- |
+| [`create`](./devices-api/create.md) | Registers a new device. |
+| [`read device`](./devices-api/read-device.md) | Read information of a specific device. |
+| [`read devices`](./devices-api/read-devices.md) | Read a list of all available devices. |
+| [`register event listener`](./devices-api/register.md) | Register for a device specific event. |
+| [`send event`](./devices-api/send-event.md) | Sends a device specific event. |
+| [`register global event listener`](./devices-api/register-global.md) | Register for a global device event. |
+| [`send global event`](./devices-api/send-global-event.md) | Sends a global device event. |
+
+## General API <a name="general-api"></a>
+
+| Name | Description |
+| ----------------------------------- | ---------------------------------------------------- |
+| [`status`](./general-api/status.md) | Returns information on how the server is configured. |
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/create.md b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/create.md
new file mode 100644
index 0000000000..5ed1355ad2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/create.md
@@ -0,0 +1,33 @@
+# `create` - [Devices API](../README.md#devices-api)
+
+The `create` method of the devices API registers a new devices to remotely
+start test sessions on. The device will automatically be unregistered if its
+not registering for an [event](./register.md) for more than a minute.
+
+## HTTP Request
+
+`POST /api/devices`
+
+## Response Payload
+
+```json
+{
+ "token": "<String>"
+}
+```
+
+- **token** specifies the handle to reference the registered device by.
+
+## Example
+
+**Request:**
+
+`POST /api/devices`
+
+**Response:**
+
+```json
+{
+ "token": "e5f0b92e-8309-11ea-a1b1-0021ccd76152"
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/event-types.md b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/event-types.md
new file mode 100644
index 0000000000..ead1f0695b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/event-types.md
@@ -0,0 +1,37 @@
+# Device Event Types - [Devices API](../README.md#devices-api)
+
+Device events are events that are triggered by actions related to devices.
+This document specifies what possible events can occur and what they mean.
+
+## Device specific <a name="device-specific"></a>
+
+Device specific events are always related to a specific device, referenced by
+its token.
+
+### Start session
+
+**Type identifier**: `start_session`
+**Payload**:
+```json
+{
+ "session_token": "<String>"
+}
+```
+**Description**: Triggered by a companion device, this event starts a
+pre-configured session on the registered device.
+
+## Global <a name="global"></a>
+
+Global device events have no special relation to any device.
+
+### Device added
+
+**Type identifier**: `device_added`
+**Payload**: Same as response of [`read device`](./read-device.md) method.
+**Description**: This event is triggered once a new device registers.
+
+### Device removed
+
+**Type identifier**: `device_removed`
+**Payload**: Same as response of [`read device`](./read-device.md) method.
+**Description**: This event is triggered once a device unregisters.
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/read-device.md b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/read-device.md
new file mode 100644
index 0000000000..9762b17e71
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/read-device.md
@@ -0,0 +1,41 @@
+# `read device` - [Devices API](../README.md#devices-api)
+
+The `read device` method of the devices API fetches available information regarding a
+specific device.
+
+## HTTP Request
+
+`GET /api/devices/<device_token>`
+
+## Response Payload
+
+```json
+{
+ "token": "<String>",
+ "user_agent": "<String>",
+ "last_active": "<String>",
+ "name": "<String>"
+}
+```
+
+- **token** is the unique identifier of the device.
+- **user_agent** is the user agent of the request the device was registered with.
+- **last_active** defines the point in time the device was last active. Expressed as ISO 8601 date and time format.
+- **name** the name the device was assign based on its user agent.
+
+## Example
+
+**Request:**
+
+`GET /api/devices/1d9f5d30-830f-11ea-8dcb-0021ccd76152`
+
+**Response:**
+
+```json
+{
+ "token": "1d9f5d30-830f-11ea-8dcb-0021ccd76152",
+ "user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36",
+ "last_active": 1587391153295,
+ "name": "Chrome 81.0.4044"
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/read-devices.md b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/read-devices.md
new file mode 100644
index 0000000000..519b06c610
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/read-devices.md
@@ -0,0 +1,47 @@
+# `read devices` - [Devices API](../README.md#devices-api)
+
+The `read devices` method of the devices API fetches a list of all registered
+devices.
+
+## HTTP Request
+
+`GET /api/devices`
+
+## Response Payload
+
+```json
+[
+ {
+ "token": "<String>",
+ "user_agent": "<String>",
+ "last_active": "<String>",
+ "name": "<String>"
+ },
+ ...
+]
+```
+
+- **token** is the unique identifier of the device.
+- **user_agent** is the user agent of the request the device was registered with.
+- **last_active** defines the point in time the device was last active. Expressed as ISO 8601 date and time format.
+- **name** the name the device was assign based on its user agent.
+
+## Example
+
+**Request:**
+
+`GET /api/devices`
+
+**Response:**
+
+```json
+[
+ {
+ "token": "1d9f5d30-830f-11ea-8dcb-0021ccd76152",
+ "user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36",
+ "last_active": 1587391153295,
+ "name": "Chrome 81.0.4044"
+ },
+ ...
+]
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/register-global.md b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/register-global.md
new file mode 100644
index 0000000000..fd3a2ad998
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/register-global.md
@@ -0,0 +1,54 @@
+# `register global event listener` - [Devices API](../README.md#devices-api)
+
+The `register global event listener` method of the devices API notifies a
+registered listener upon global device events. It uses HTTP long polling in
+send the event to this listener in real time, so upon receiving an event, the
+connection has to be reestablished by the client to receive further events.
+
+## HTTP Request
+
+`GET /api/devices/events`
+
+## Query Parameters
+
+| Parameter | Desciption | Example |
+| ---------------- | ---------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------- |
+| `device_token` | The token of the device which performed the request. (Optional) Lets the server know the registered device is still active. | `device_token=7dafeec0-c351-11e9-84c5-3d1ede2e7d2e` |
+
+## Response Payload
+
+```json
+{
+ "type": "<String>",
+ "data": "<Any>"
+}
+```
+
+- **type** defines what type of event has been triggered.
+- **data** contains the event specific payload.
+
+## Event Types
+
+See [global events](./event-types.md#global)
+
+## Example
+
+**Request:**
+
+`GET /api/devices/events`
+
+**Response:**
+
+```json
+{
+ "type": "device_added",
+ "data": {
+ "token": "1d9f5d30-830f-11ea-8dcb-0021ccd76152",
+ "user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36",
+ "last_active": 1587391153295,
+ "name": "Chrome 81.0.4044"
+ }
+}
+```
+
+
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/register.md b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/register.md
new file mode 100644
index 0000000000..adee3b4742
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/register.md
@@ -0,0 +1,52 @@
+# `register event listener` - [Devices API](../README.md#devices-api)
+
+The `register event listener` method of the devices API notifies a registered
+listener upon device specific events. It uses HTTP long polling in send the
+event to this listener in real time, so upon receiving an event, the
+connection has to be reestablished by the client to receive further events.
+
+## HTTP Request
+
+`GET /api/devices/<device_token>/events`
+
+## Query Parameters
+
+| Parameter | Desciption | Example |
+| ---------------- | ---------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------- |
+| `device_token` | The token of the device which performed the request. (Optional) Lets the server know the registered device is still active. | `device_token=7dafeec0-c351-11e9-84c5-3d1ede2e7d2e` |
+
+## Response Payload
+
+```json
+{
+ "type": "<String>",
+ "data": "<Any>"
+}
+```
+
+- **type** defines what type of event has been triggered.
+- **data** contains the event specific payload.
+
+## Event Types
+
+### Start session
+
+See [device specific events](./event-types.md#device-specific)
+
+## Example
+
+**Request:**
+
+`GET /api/devices/1d9f5d30-830f-11ea-8dcb-0021ccd76152/events`
+
+**Response:**
+
+```json
+{
+ "type": "start_session",
+ "data": {
+ "session_token": "974c84e0-c35d-11e9-8f8d-47bb5bb0037d"
+ }
+}
+```
+
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/send-event.md b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/send-event.md
new file mode 100644
index 0000000000..0ec74aec5c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/send-event.md
@@ -0,0 +1,43 @@
+# `send event` - [Devices API](../README.md#devices-api)
+
+The `send event` method of the devices API enables sending an event to
+listeners of specific devices events.
+
+## HTTP Request
+
+`POST /api/devices/<device_token>/events`
+
+## Request Payload
+
+```json
+{
+ "type": "<String>",
+ "data": "<Any>"
+}
+```
+
+- **type** defines what type of event has been triggered.
+- **data** contains the event specific payload.
+
+## Event Types
+
+See [device specific events](./event-types.md#device-specific)
+
+## Example
+
+**Request:**
+
+`POST /api/devices/1d9f5d30-830f-11ea-8dcb-0021ccd76152/events`
+
+```json
+{
+ "type": "start_session",
+ "data": {
+ "session_token": "974c84e0-c35d-11e9-8f8d-47bb5bb0037d"
+ }
+}
+```
+
+**Response:**
+
+`200 OK`
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/send-global-event.md b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/send-global-event.md
new file mode 100644
index 0000000000..aa5238dc7f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/devices-api/send-global-event.md
@@ -0,0 +1,46 @@
+# `send global event` - [Devices API](../README.md#devices-api)
+
+The `send global event` method of the devices API enables sending an event to
+listeners of global device events.
+
+## HTTP Request
+
+`POST /api/devices/events`
+
+## Request Payload
+
+```json
+{
+ "type": "<String>",
+ "data": "<Any>"
+}
+```
+
+- **type** defines what type of event has been triggered.
+- **data** contains the event specific payload.
+
+## Event Types
+
+See [global events](./event-types.md#global)
+
+## Example
+
+**Request:**
+
+`POST /api/devices/1d9f5d30-830f-11ea-8dcb-0021ccd76152/events`
+
+```json
+{
+ "type": "device_added",
+ "data": {
+ "token": "1d9f5d30-830f-11ea-8dcb-0021ccd76152",
+ "user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36",
+ "last_active": 1587391153295,
+ "name": "Chrome 81.0.4044"
+ }
+}
+```
+
+**Response:**
+
+`200 OK`
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/general-api/status.md b/testing/web-platform/tests/tools/wave/docs/rest-api/general-api/status.md
new file mode 100644
index 0000000000..8af510066a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/general-api/status.md
@@ -0,0 +1,41 @@
+# `status` - [General API](../README.md#general-api)
+
+The `status` method is used to ensure the server is reachable and to determine
+what features of different server APIs are enabled.
+
+## HTTP Request
+
+```
+GET /api/status
+```
+
+### Response
+
+```json
+{
+ "version_string": "String",
+ "import_results_enabled": "Boolean",
+ "reports_enabled": "Boolean",
+ "read_sessions_enabled": "Boolean"
+}
+```
+
+- **version_string**: The version of the server.
+- **import_results_enabled**: If true the [`import result`](../results-api/import.md) endpoint is available
+- **reports_enabled**: If true the server will generate reports for completed APIs in a given test session.
+- **read_sessions_enabled**: If true it is possible to list all sessions using the [`read sessions`](../sessions-api/read_sessions.md) endpoint of the sessions API
+
+## Example
+
+```
+GET /api/status
+```
+
+```json
+{
+ "version_string": "v2.0.0",
+ "import_results_enabled": false,
+ "reports_enabled": true,
+ "read_sessions_enabled": false
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/guides/README.md b/testing/web-platform/tests/tools/wave/docs/rest-api/guides/README.md
new file mode 100644
index 0000000000..c331bb8e66
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/guides/README.md
@@ -0,0 +1,10 @@
+# REST API Guides - [WAVE Test Runner](../../README.md)
+
+In addition to the [REST API documentation](../README.md), these guide shall
+provide a better understanding on how to properly use the different endpoints.
+
+[Starting sessions on a DUT using the devices API](./session-start-devices-api.md):
+How to register a DUT and start a pre-configured session on it.
+
+[Sending and receiving session events](./session-events.md):
+How to register for session events and push events to other listeners.
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/guides/session-events.md b/testing/web-platform/tests/tools/wave/docs/rest-api/guides/session-events.md
new file mode 100644
index 0000000000..462737c652
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/guides/session-events.md
@@ -0,0 +1,52 @@
+# Sending and receiving session events
+
+The session event endpoints allow to listen for events related to a specific
+session and to send new events to all registered listeners.
+
+See all [REST API Guides](./README.md).
+
+## Register for session specific events
+
+To receive events of a session, simply perform a GET request to the desired
+sessions event endpoint. For example, if we want to receive any events that
+are related to the session with token `6fdbd1a0-c339-11e9-b775-6d49dd567772`:
+
+```
+GET /_wave/api/sessions/6fdbd1a0-c339-11e9-b775-6d49dd567772/events
+```
+
+```json
+{
+ "type": "status",
+ "data": "paused"
+}
+```
+
+As this endpoint makes use of the HTTP long polling, you will not immediately
+receive a response. The connection stays open either until an event gets
+triggered, in which case the server will respond with that events data, or
+there is no event within the timeout, which will return an empty response.
+
+With one request only one event can be received. To get any further events,
+additional requests are necessary. To not miss any events, it is important to
+perform the next request immediately after receiving a response.
+
+## Sending events
+
+To create a new event, simply send a POST request containing the event data to
+the desired sessions event endpoint. For example, if you want to trigger a new
+event for a session with token `6fdbd1a0-c339-11e9-b775-6d49dd567772`:
+
+```
+POST /_wave/api/sessions/6fdbd1a0-c339-11e9-b775-6d49dd567772/events
+```
+
+```json
+{
+ "type": "status",
+ "data": "paused"
+}
+```
+
+This will cause any client, that currently has a connection open as described
+in the preceding section, to receive the specified event.
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/guides/session-start-devices-api.md b/testing/web-platform/tests/tools/wave/docs/rest-api/guides/session-start-devices-api.md
new file mode 100644
index 0000000000..a376d31617
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/guides/session-start-devices-api.md
@@ -0,0 +1,60 @@
+# Starting sessions on a DUT using the devices API
+
+See all [REST API Guides](./README.md).
+
+## Connecting the DUT
+
+To start a session on a DUT using the devices API, first register the DUT at
+the test runner.
+
+```
+POST /api/devices
+```
+
+```json
+{
+ "token": "fa3fb226-98ef-11ea-a21d-0021ccd76152"
+}
+```
+
+Using the device token, you can listen for any events related to the device.
+
+```
+GET /api/devices/fa3fb226-98ef-11ea-a21d-0021ccd76152/events
+```
+
+Once an event occurs, the response to this call will contain the event data.
+If no event occurs until the request times out, you have to perfom another call.
+
+```json
+{
+ "type": "start_session",
+ "data": {
+ "session_token": "98ed4b8e-98ed-11ea-9de7-0021ccd76152"
+ }
+}
+```
+
+Using this data you can start the session and get the URL to the next test to
+open.
+
+## Triggering the session start
+
+Once a device is registered and waits for events, you can use the device's
+event channel to push an event to start a session on it.
+
+```
+POST /api/devices/fa3fb226-98ef-11ea-a21d-0021ccd76152/events
+```
+
+```json
+{
+ "type": "start_session",
+ "data": {
+ "session_token": "98ed4b8e-98ed-11ea-9de7-0021ccd76152"
+ }
+}
+```
+
+The session related to the provided token can be a newly created one or may
+already be running.
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/config.md b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/config.md
new file mode 100644
index 0000000000..a60485dadc
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/config.md
@@ -0,0 +1,34 @@
+# `config` - [Results API](../README.md#results-api)
+
+The `config` method is used to determine what features of the results API are
+enabled. Features that can be enabled or disabled are the
+[`import`](./import.md) method and the generation of reports and therefore
+[`download and view`](./download.md) methods.
+
+## HTTP Request
+
+`GET /api/results/config`
+
+## Response
+
+```json
+{
+ "import_enabled": "Boolean",
+ "reports_enabled": "Boolean"
+}
+```
+
+## Example
+
+**Request:**
+
+`GET /api/results/config`
+
+**Response:**
+
+```json
+{
+ "import_enabled": false,
+ "reports_enabled": true
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/create.md b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/create.md
new file mode 100644
index 0000000000..5839702eda
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/create.md
@@ -0,0 +1,65 @@
+# `create` - [Results API](../README.md#results-api)
+
+The `create` method of the results API creates a test result for a given test of a test session.
+
+## HTTP Request
+
+`POST /api/results/<session_token>`
+
+## Request Payload
+
+```json
+{
+ "test": "String",
+ "status": "Enum['OK', 'ERROR', 'TIMEOUT', 'NOT_RUN']",
+ "message": "String",
+ "subtests": [
+ {
+ "name": "String",
+ "status": "Enum['PASS', 'FAIL', 'TIMEOUT', 'NOT_RUN']",
+ "message": "String"
+ }
+ ]
+}
+```
+
+- **test** specifies the test to create the result for.
+- **status** specifies the overall status of the test. It does not represent a result, but rather if the contained tests were executed as intended or if something went wrong running the test.
+ - **OK**: All tests were executed without problems.
+ - **ERROR**: There was an error running one or multiple tests.
+ - **TIMEOUT**: It took too long for the tests to execute.
+ - **NOT_RUN**: This test was skipped.
+- **message** contains the reason for the overall status. If the status is `OK` the message should be `null`.
+- **subtests** contains the actual results of the tests executed in this file.
+ - **name**: The name of the test.
+ - **status**: The status of the result:
+ - **PASS**: The test was executed successfully.
+ - **FAIL**: The test did not meet at least one assertion.
+ - **TIMEOUT**: It took too long for this test to execute.
+ - **NOT_RUN**: This test was skipped.
+ - **message** contains the reason for the tests failure.
+
+## Example
+
+**Request:**
+
+`POST /api/results/d89bcc00-c35b-11e9-8bb7-9e3d7595d40c`
+
+```json
+{
+ "test": "/apiOne/test/one.html",
+ "status": "OK",
+ "message": null,
+ "subtests": [
+ {
+ "name": "Value should be X",
+ "status": "FAIL",
+ "message": "Expected value to be X but got Y"
+ }
+ ]
+}
+```
+
+**Response:**
+
+`200 OK` \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/download.md b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/download.md
new file mode 100644
index 0000000000..8cfa7e7531
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/download.md
@@ -0,0 +1,127 @@
+# Downloading and viewing results and reports - [Results API](../README.md#results-api)
+
+There are multiple methods to download or view reports generated by the WPT
+Report tool or just the plain json results with the structure as described in
+the [`create`](./create.md) method of the results API.
+
+## 1. `download`
+
+Downloads all results of a session as ZIP, which other instances of the WMAS
+Test Runner can import.
+
+### HTTP Request
+
+`GET /api/results/<session_token>/export`
+
+### Example
+
+`GET /api/results/f63700a0-c35f-11e9-af33-9e0d4c1f1370/export`
+
+## 2. `download api`
+
+Downloads all results of a single API in one json file.
+
+### HTTP Request
+
+`GET /api/results/<session_token>/<api_name>/json`
+
+### File Structure
+
+```json
+{
+ "results": [
+ {
+ "test": "String",
+ "status": "Enum['OK', 'ERROR', 'TIMEOUT', 'NOT_RUN']",
+ "message": "String",
+ "subtests": [
+ {
+ "name": "String",
+ "status": "Enum['PASS', 'FAIL', 'TIMEOUT', 'NOT_RUN']",
+ "message": "String"
+ }
+ ]
+ }
+ ]
+}
+```
+
+Results are structured as explained in the [`create`](./create.md) method of the results API.
+
+### Example
+
+`GET /api/results/f63700a0-c35f-11e9-af33-9e0d4c1f1370/apiOne/json`
+
+## 3. `download all apis`
+
+Downloads all results of all APIs of a session as zip file containing one json file per API.
+
+### HTTP Request
+
+`GET /api/results/<session_token>/json`
+
+### File Structure
+
+There is one json file per API, each structured as described in the [`download api`](#download-api) method.
+
+### Example
+
+`GET /api/results/f63700a0-c35f-11e9-af33-9e0d4c1f1370/json`
+
+## 4. `view report`
+
+Returns a URL to a report of an API of a session, generated by the WPT Report tool, which is a static HTML page.
+
+### HTTP Request
+
+`GET /api/results/<session_token>/<api_name>/reporturl`
+
+### Example
+
+`GET /api/results/f63700a0-c35f-11e9-af33-9e0d4c1f1370/apiOne/reporturl`
+
+**Response**
+
+```json
+{
+ "uri": "/results/8f7f2fdc-62eb-11ea-8615-b8ca3a7b18ad/2dcontext/all.html"
+}
+```
+
+## 5. `view multi report`
+
+Returns a URL to a report of an API of multiple session, generated by the WPT Report tool, which is a static HTML page.
+
+### HTTP Request
+
+`GET /api/results/<api_name>/reporturl`
+
+### Query Parameters
+
+| Parameter | Description | Default | Example |
+| --------- | ------------------------------------------------------------ | ------- | -------------------------------- |
+| `tokens` | Comma separated list of tokens to create a multi report for. | none | `tokens=token_a,token_b,token_c` |
+
+### Example
+
+`GET /api/results/apiOne/reporturl?tokens=8f7f2fdc-62eb-11ea-8615-b8ca3a7b18ad,990b4734-62eb-11ea-a9a5-b8ca3a7b18ad`
+
+**Response**
+
+```json
+{
+ "uri": "/results/comparison-8f7f2fdc-990b473401488e04/reporturl/all.html"
+}
+```
+
+## 6. `download overview`
+
+Downloads a zip file containing an overview for all APIs results of a session as a static HTML page.
+
+### HTTP Request
+
+`GET /api/results/<session_token>/overview`
+
+### Example
+
+`GET /api/results/f63700a0-c35f-11e9-af33-9e0d4c1f1370/overview`
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/import.md b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/import.md
new file mode 100644
index 0000000000..08c7de6ff7
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/import.md
@@ -0,0 +1,66 @@
+# Import results - [Results API](../README.md#results-api)
+
+If enabled, the WMAS Test Runner can import results exported by any arbitrary other instance.
+
+## 1. Import session
+
+Upload results and create a new, finished session
+
+### HTTP Request
+
+```
+POST /api/results/import
+```
+
+#### HTTP Response
+
+If successful, the server responds with the token of the imported session:
+
+```json
+{
+ "token": "String"
+}
+```
+
+However, if an error occured, the server responds the error message:
+
+```json
+{
+ "error": "String"
+}
+```
+
+## 2. Import API results
+
+Upload a results json file and overwrite results of a specific API.
+
+### HTTP Request
+
+```
+POST /api/results/<session_token>/<api_name>/json
+```
+
+### File structure
+
+```json
+{
+ "results": [
+ {
+ "test": "String",
+ "status": "Enum['OK', 'ERROR', 'TIMEOUT', 'NOT_RUN']",
+ "message": "String",
+ "subtests": [
+ {
+ "name": "String",
+ "status": "Enum['PASS', 'FAIL', 'TIMEOUT', 'NOT_RUN']",
+ "message": "String"
+ }
+ ]
+ }
+ ]
+}
+```
+
+### HTTP Response
+
+If successful, the server responds with status code 200.
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/read-compact.md b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/read-compact.md
new file mode 100644
index 0000000000..55b6f41d4b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/read-compact.md
@@ -0,0 +1,59 @@
+# `read compact` - [Results API](../README.md#results-api)
+
+The `read compact` method of the results API returns the number of passed, failed, timed out and not run tests per API of a session.
+
+## HTTP Request
+
+`GET /api/results/<session_token>/compact`
+
+## Response Payload
+
+```json
+{
+ "<api_name>": {
+ "pass": "Integer",
+ "fail": "Integer",
+ "timeout": "Integer",
+ "not_run": "Integer",
+ "total": "Integer",
+ "complete": "Integer"
+ }
+}
+```
+
+## Example
+
+**Request:**
+
+`GET /api/results/620bbf70-c35e-11e9-bf9c-742c02629054/compact`
+
+**Response:**
+
+```json
+{
+ "apiOne": {
+ "pass": 311,
+ "fail": 59,
+ "timeout": 23,
+ "not_run": 20,
+ "total": 481,
+ "complete": 413
+ },
+ "apiTwo": {
+ "pass": 548,
+ "fail": 129,
+ "timeout": 53,
+ "not_run": 36,
+ "total": 766,
+ "complete": 766
+ },
+ "apiThree": {
+ "pass": 349,
+ "fail": 45,
+ "timeout": 14,
+ "not_run": 9,
+ "total": 523,
+ "complete": 417
+ }
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/read.md b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/read.md
new file mode 100644
index 0000000000..66894e69ee
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/read.md
@@ -0,0 +1,63 @@
+# `read` - [Results API](../README.md#results-api)
+
+The `read` method of the results API returns all available results of a session, grouped by API. It is possible to filter the results to return by test directory or file.
+
+## HTTP Request
+
+`GET /api/results/<session_token>`
+
+## Query Parameters
+
+| Parameter | Description | Default | Example |
+| --------- | ------------------------------ | ------- | --------------------------- |
+| `path` | Path of test directory or file | `/` | `path=/apiOne/test/sub/dir` |
+
+## Response Payload
+
+```json
+{
+ "<api_name>": [
+ {
+ "test": "String",
+ "status": "Enum['OK', 'ERROR', 'TIMEOUT', 'NOT_RUN']",
+ "message": "String",
+ "subtests": [
+ {
+ "name": "String",
+ "status": "Enum['PASS', 'FAIL', 'TIMEOUT', 'NOT_RUN']",
+ "message": "String"
+ }
+ ]
+ }
+ ]
+}
+```
+
+Arrays of results grouped by their respective APIs. Structure of results is the same as described in the [`create`](./create.md) method of the results API.
+
+## Example
+
+**Request:**
+
+`GET /api/results/974c84e0-c35d-11e9-8f8d-47bb5bb0037d?path=/apiOne/test/one.html`
+
+**Response:**
+
+```json
+{
+ "apiOne": [
+ {
+ "test": "/apiOne/test/one.html",
+ "status": "OK",
+ "message": null,
+ "subtests": [
+ {
+ "name": "Value should be X",
+ "status": "FAIL",
+ "message": "Expected value to be X but got Y"
+ }
+ ]
+ }
+ ]
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/view.md b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/view.md
new file mode 100644
index 0000000000..5b60d2ccf2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/results-api/view.md
@@ -0,0 +1,61 @@
+# Viewing Reports - [Results API](../README.md#results-api)
+
+It is possible to view the reports generated by the WPT Report tool directly in the browser using a version of the report that is hosted by the WAVE server. The methods listed here return urls to those hosted reports.
+
+## 1. `view report`
+
+Returns a URL to a report for an API of a single session, generated by the WPT Report tool.
+
+### HTTP Request
+
+`GET /api/results/<session_token>/<api_name>/reporturl`
+
+### Response Payload
+
+```json
+{
+ "uri": "String"
+}
+```
+
+### Example
+
+**Request:**
+
+`GET /api/results/d9caaae0-c362-11e9-943f-eedb305f22f6/apiOne/reporturl`
+
+**Response:**
+
+```json
+{
+ "uri": "/results/d9caaae0-c362-11e9-943f-eedb305f22f6/apiOne/all.html"
+}
+```
+
+## 2. `view multi report`
+
+Returns a URL to a report for an API of multiple session, generated by the WPT Report tool.
+
+### HTTP Request
+
+`GET /api/results/<api_name>/reporturl`
+
+### Query Parameters
+
+| Parameter | Description | Default | Example |
+| --------- | ------------------------------------------------------------ | ------- | -------------------------------- |
+| `tokens` | Comma separated list of tokens to create a multi report for. | none | `tokens=token_a,token_b,token_c` |
+
+### Example
+
+**Request:**
+
+`GET /api/results/apiOne/reporturl?tokens=ce2dc080-c283-11e9-b4d6-e046513784c2,cd922410-c344-11e9-858f-9063f6dd878f`
+
+**Response:**
+
+```json
+{
+ "uri": "/results/comparison-cd922410-ce2dc080-1709d631/apiOne/all.html"
+}
+``` \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/control.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/control.md
new file mode 100644
index 0000000000..c439c118f8
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/control.md
@@ -0,0 +1,25 @@
+# Controlling Sessions - [Sessions API](../README.md#sessions-api)
+
+It is possible to control the execution of tests on the device under test using the session APIs control methods. They change the status of a session and trigger the device under test to fetch a new url to change location to. Depending on the current status of the session this can be a test or a static page showing information about the current status.
+
+## `start`
+The `start` method changes the status of a session from either `PENDING` or `PAUSED` to `RUNNING` and triggers the device under test to execute tests when resuming a paused session.
+
+### HTTP Request
+
+`POST /api/sessions/<session_token>/start`
+
+## `pause`
+The `pause` method changes the status of a session from `RUNNING` to `PAUSED` and pauses the execution of tests on the device under test.
+
+### HTTP Request
+
+`POST /api/sessions/<session_token>/pause`
+
+## `stop`
+The `stop` method finishes a session early by skipping all pending tests, causing a change of the status to `ABORTED`. It is not possible to undo this action and can only be performed on sessions that are not `ABORTED` or `COMPLETED`.
+
+### HTTP Request
+
+`POST /api/sessions/<session_token>/stop`
+
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/create.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/create.md
new file mode 100644
index 0000000000..86c0674afd
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/create.md
@@ -0,0 +1,103 @@
+# `create` - [Sessions API](../README.md#sessions-api)
+
+The `create` method of the sessions API creates a new session. If provided with an configuration it creates a session accordingly. If no configuration is provided it uses default values. It returns the session token of the newly created session, which is the unique identifier of sessions. While a session has the status `PENDING` it is possible to modify the configuration using the [`update`](./update.md) method of the sessions API. As it is required to create the session from the device under test, this is really helpful, since it allows to configure the session using a second device.
+
+## HTTP Request
+
+`POST /api/sessions`
+
+## Request Payload
+
+```json
+{
+ "tests": {
+ "include": "Array<String>",
+ "exclude": "Array<String>"
+ },
+ "types": "Enum['automatic', 'manual']",
+ "timeouts": {
+ "automatic": "Integer",
+ "manual": "Integer",
+ "<test_path>": "Integer"
+ },
+ "reference_tokens": "Array<String>",
+ "labels": "Array<String>",
+ "type": "String"
+}
+```
+
+- **tests** specifies the tests of the session:
+ - **include** specifies what tests should be selected from all available tests. Can be a path to a test file or directory. Provided query parameters will be added to all matching test urls.
+ - **exclude** specifies what tests should be removed from the included tests. Can be a path to a test file or directory.
+- **types** what types of tests should be included. Possible values:
+ - **automatic** tests are tests that execute without user interaction.
+ - **manual** tests are tests that require user interaction.
+- **timeouts** specifies the time to wait for a test to finish in milliseconds.
+ - **automatic**: Sets the default timeout for all automatic tests.
+ - **manual**: Sets the default timeout for all manual tests.
+ - **custom test paths**: Set the timeout for a test file or directory by putting the path with all dots removed as the key.
+- **reference_tokens** specifies a set of completed sessions that is used to filter out all tests that have not passed in all those sessions from the session that is going to be created.
+- **labels** specifies the initial set of labels for the session.
+- **type** specifies the session type to trigger type specific behaviour like different control pages.
+
+### Default
+
+```json
+{
+ "tests": {
+ "include": ["/"],
+ "exclude": []
+ },
+ "types": ["automatic", "manual"],
+ "timeouts": {
+ "automatic": 60000,
+ "manual": 300000
+ },
+ "reference_tokens": [],
+ "labels": []
+}
+```
+
+## Response Payload
+
+If successful, the token of the new session is returned.
+
+```json
+{
+ "token": "String"
+}
+```
+
+## Example
+
+**Request:**
+
+`POST /api/sessions`
+
+```json
+{
+ "tests": {
+ "include": ["/apiOne", "/apiTwo/sub"],
+ "exclude": ["/apiOne/specials"]
+ },
+ "types": ["automatic"],
+ "timeouts": {
+ "automatic": 70000,
+ "/apiOne/example/dir": 30000,
+ "/apiOne/example/filehtml": 45000
+ },
+ "reference_tokens": [
+ "ce2dc080-c283-11e9-b4d6-e046513784c2",
+ "430f47d0-c283-11e9-8776-fcbc36b81035"
+ ],
+ "labels": ["label1", "label2", "label3"]
+}
+```
+
+**Response:**
+
+```json
+{
+ "token": "6fdbd1a0-c339-11e9-b775-6d49dd567772"
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/delete.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/delete.md
new file mode 100644
index 0000000000..f11d86035b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/delete.md
@@ -0,0 +1,25 @@
+# `delete` - [Sessions API](../README.md#sessions-api)
+
+The `delete` method of the sessions API is used to delete a session and single results associated with it. However artifacts like generated reports or JSON files containing results of a whole API remain, therefore urls to those resources are still working.
+
+## HTTP Request
+
+`DELETE /api/sessions/<session_token>`
+
+## Example
+
+**Request:**
+
+`DELETE /api/sessions/1592b880-c339-11e9-b414-61af09c491b1`
+
+**Response:**
+
+`200 OK`
+
+**Request:**
+
+`GET /api/sessions/1592b880-c339-11e9-b414-61af09c491b1`
+
+**Response:**
+
+`404 NOT FOUND`
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/event-types.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/event-types.md
new file mode 100644
index 0000000000..455e0a122b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/event-types.md
@@ -0,0 +1,27 @@
+# Session Event Types - [Sessions API](../README.md#sessions-api)
+
+Session events are events that are triggered by actions related to sessions.
+The [`event`](./events.md) functions of the sessions API make use of these events.
+
+## Status change
+
+**Type identifier**: `status`
+**Payload**: `"<String>"`
+Possible Values: `paused`, `running`, `completed`, `aborted`
+**Description**: Triggered once the status of the session changes.
+
+## Resume
+
+**Type identifier**: `resume`
+**Payload**: `"<String>"`
+Contains the token of the session to resume.
+**Description**: Triggered when a specific session is supposed to be resumed.
+This will discard the current session and continue executing the session with
+the provided token.
+
+## Test Completed
+
+**Type identifier**: `test_completed`
+**Payload**: `"<String>"`
+Contains the test case that completed.
+**Description**: Triggered when the test runner received a result for a test.
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/events.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/events.md
new file mode 100644
index 0000000000..c1b693c88c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/events.md
@@ -0,0 +1,104 @@
+# `events` - [Sessions API](../README.md#sessions-api)
+
+Session events can be used to send messages related to a specific session for
+others to receive. This can include status updates or action that running
+session react on.
+
+For possible events see [Session Event Types](./event-types.md)
+
+## 1. `listen events`
+
+Listen for session specific events by registering on the `events` endpoint using HTTP long polling.
+
+### HTTP Request
+
+```
+GET /api/sessions/<token>/events
+```
+
+### Query Parameters
+
+| Parameter | Desciption | Default | Example |
+| ------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------- | -------------- |
+| `last_event` | The number of the last received event. All events that are newer than `last_event` are returned immediately. If there are no newer events, connection stays open until a new event is triggered. | None | `last_event=5` |
+
+#### Response Payload
+
+```json
+[
+ {
+ "type": "String",
+ "data": "String",
+ "number": "Number"
+ },
+ ...
+]
+```
+
+- **type**: the type of event that occurred.
+- **data**: the actual payload of the event
+- **number**: the number of the event
+
+#### Example
+
+```
+GET /api/sessions/6fdbd1a0-c339-11e9-b775-6d49dd567772/events?last_event=8
+```
+
+```json
+[
+ {
+ "type": "status",
+ "data": "paused",
+ "number": 9
+ },
+ {
+ "type": "status",
+ "data": "running",
+ "number": 10
+ },
+ {
+ "type": "status",
+ "data": "paused",
+ "number": 11
+ },
+ {
+ "type": "status",
+ "data": "running",
+ "number": 12
+ }
+]
+```
+
+## 2. `push events`
+
+Push session specific events for any registered listeners to receive.
+
+### HTTP Request
+
+```
+POST /api/sessions/<token>/events
+```
+
+```json
+{
+ "type": "String",
+ "data": "String"
+}
+```
+
+- **type**: the type of event that occurred.
+- **data**: the actual payload of the event
+
+#### Example
+
+```
+POST /api/sessions/6fdbd1a0-c339-11e9-b775-6d49dd567772/events
+```
+
+```json
+{
+ "type": "status",
+ "data": "paused"
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/find.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/find.md
new file mode 100644
index 0000000000..3ffc6f58c4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/find.md
@@ -0,0 +1,29 @@
+# `find` - [Sessions API](../README.md#sessions-api)
+
+The `find` method of the sessions API searches for a session token using a provided token fragment, which is the beginning of a session token with at least 8 characters. Due to data protection, it is not possible to find multiple tokens using one fragment. If the server finds more than one session token, it returns none. In this case more characters need to be added to the fragment, until it matches only one session token.
+
+## HTTP Request
+
+`GET /api/sessions/<token_fragment>`
+
+## Response Payload
+
+```json
+{
+ "token": "String"
+}
+```
+
+### Example
+
+**Request:**
+
+`GET /api/sessions/afd4ecb0`
+
+**Response:**
+
+```json
+{
+ "token": "afd4ecb0-c339-11e9-b66c-eca76c2bea9c"
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/labels.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/labels.md
new file mode 100644
index 0000000000..00a8defd98
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/labels.md
@@ -0,0 +1,75 @@
+# `labels` - [Sessions API](../README.md#sessions-api)
+
+The `labels` methods of the sessions API allow for better organization of sessions.
+
+## Read labels
+
+Reads all labels of a session.
+
+### HTTP Request
+
+`GET /api/sessions/<token>/labels`
+
+### Response Payload
+
+```json
+"Array<String>"
+```
+
+#### Example
+
+**Request:**
+
+`GET /api/sessions/afd4ecb0-c339-11e9-b66c-eca76c2bea9c/labels`
+
+**Response:**
+
+```json
+["label1", "label2", "label3"]
+```
+
+## Update labels
+
+Update all labels of a session.
+
+### HTTP Request
+
+`PUT /api/sessions/<token>/labels`
+
+### Request Payload
+
+```json
+"Array<String>"
+```
+
+The array of labels provided in the request payload will replace all existing labels of the session.
+
+#### Example
+
+**Request:**
+
+`GET /api/sessions/afd4ecb0-c339-11e9-b66c-eca76c2bea9c/labels`
+
+**Response:**
+
+```json
+["label1", "label2", "label3"]
+```
+
+**Request:**
+
+`PUT /api/sessions/afd4ecb0-c339-11e9-b66c-eca76c2bea9c/labels`
+
+```json
+["label4", "label5"]
+```
+
+**Request:**
+
+`GET /api/sessions/afd4ecb0-c339-11e9-b66c-eca76c2bea9c/labels`
+
+**Response:**
+
+```json
+["label4", "label5"]
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read-public.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read-public.md
new file mode 100644
index 0000000000..3e0e9089c4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read-public.md
@@ -0,0 +1,30 @@
+# `read public` - [Sessions API](../README.md#sessions-api)
+
+The `read public` method of the sessions API fetches a list of all sessions that are publicly available. It is not possible to delete those sessions using the user interface or the REST API. Currently there is no way to change is-public-state of a session using the API.
+
+## HTTP Request
+
+`GET /api/sessions/public`
+
+## Response Payload
+
+```json
+"Array<String>"
+```
+
+## Example
+
+**Request:**
+
+`GET /api/sessions/public`
+
+**Response:**
+
+```json
+[
+ "bb7aafa0-6a92-11e9-8ec2-04f58dad2e4f",
+ "caf823e0-6a92-11e9-b732-3188d0065ebc",
+ "a50c6db0-6a94-11e9-8d1b-e23fc4555885",
+ "b2924d20-6a93-11e9-98b4-a11fb92a6d1c"
+]
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read.md
new file mode 100644
index 0000000000..4ec9c3f63f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read.md
@@ -0,0 +1,90 @@
+# `read session` - [Sessions API](../README.md#sessions-api)
+
+The `read` method of the sessions API fetches the configuration of a session, including values that can not be set by the user, but are created by the server upon creation.
+
+## HTTP Request
+
+`GET /api/sessions/<session_token>`
+
+## Response Payload
+
+```json
+{
+ "token": "String",
+ "tests": {
+ "include": "Array<String>",
+ "exclude": "Array<String>"
+ },
+ "types": "Enum['automatic', 'manual']",
+ "timeouts": {
+ "automatic": "Integer",
+ "manual": "Integer",
+ "<test_path>": "Integer"
+ },
+ "reference_tokens": "Array<String>",
+ "user_agent": "String",
+ "browser": {
+ "name": "String",
+ "version": "String"
+ },
+ "is_public": "Boolean",
+ "date_created": "String",
+ "labels": "Array<String>"
+}
+```
+
+- **token** is the unique identifier of the session.
+- **tests** specifies the tests of the session:
+ - **include** specifies what tests should be selected from all available tests. Can be a path to a test file or directory.
+ - **exclude** specifies what tests should be removed from the included tests. Can be a path to a test file or directory.
+- **types** what types of tests should be included. Possible values:
+ - **automatic** tests are tests that execute without user interaction.
+ - **manual** tests are tests that require user interaction.
+- **timeouts** specifies the time to wait for a test to finish in milliseconds.
+ - **automatic**: Sets the default timeout for all automatic tests.
+ - **manual**: Sets the default timeout for all manual tests.
+ - **custom test paths**: Set the timeout for a test file or directory by putting the path with all dots removed as the key.
+- **reference_tokens** specifies a set of completed sessions that is used to filter out all tests that have not passed in all those sessions from the session that is going to be created.
+- **user_agent** is the user agent string of the request that created the session. The request to create the session should performed by the device under test.
+- **browser** holds information about the browser, parsed from the user agent.
+ - **name**: The name of the browser.
+ - **version**: The version numbers of the browser.
+- **is_public** defines whether or not the session is listed when fetching the list of public session using [`read public`](./read-public.md).
+- **date_created**: The date the session was created in ISO 8601 format.
+- **labels**: A list of the sessions labels.
+
+## Example
+
+**Request:**
+
+`GET /api/sessions/47a6fa50-c331-11e9-8709-a8eaa0ecfd0e`
+
+**Response:**
+
+```json
+{
+ "token": "47a6fa50-c331-11e9-8709-a8eaa0ecfd0e",
+ "tests": {
+ "include": ["/apiOne", "/apiTwo/sub"],
+ "exclude": ["/apiOne/specials"]
+ },
+ "types": ["automatic"],
+ "timeouts": {
+ "automatic": 70000,
+ "/apiOne/example/dir": 30000,
+ "/apiOne/example/filehtml": 45000
+ },
+ "reference_tokens": [
+ "ce2dc080-c283-11e9-b4d6-e046513784c2",
+ "430f47d0-c283-11e9-8776-fcbc36b81035"
+ ],
+ "user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/76.0.3809.100 Chrome/76.0.3809.100 Safari/537.36",
+ "browser": {
+ "name": "Chromium",
+ "version": "76"
+ },
+ "is_public": "false",
+ "date_created": "2020-05-25T11:37:07",
+ "labels": ["labelA", "labelB"]
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read_sessions.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read_sessions.md
new file mode 100644
index 0000000000..d29371f006
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/read_sessions.md
@@ -0,0 +1,123 @@
+# `read sessions` - [Sessions API](../README.md#sessions-api)
+
+The `read sessions` method of the sessions API fetches a list of all available
+session's token with the option expand the returned data by all retrieved
+tokens corresponding session configurations or statuses.
+
+## HTTP Request
+
+```
+GET /api/sessions
+```
+
+### Query Parameters
+
+| Parameter | Description | Default |
+| --------- | ---------------------------------------------------------------------------------------------------- | ------- |
+| `index` | At what index of all session to start the returned list. | `0` |
+| `count` | How many entries to return starting from `index`. | `10` |
+| `expand` | Comma separated list of relations from `_links`. Includes additional data in the `_embedded` object. | none |
+
+### Response Payload
+
+```
+200 OK
+Content-Type: application/json+hal
+```
+
+```json
+{
+ "_links": {
+ "<relation>": {
+ "href": "String"
+ }
+ ...
+ },
+ "_embedded": {
+ "<relation>": "Array<Any>"
+ ...
+ },
+ "items": "Array<String>"
+}
+```
+
+- **\_links** contains URLs to related data. For more, see [HAL Specfication](https://tools.ietf.org/html/draft-kelly-json-hal).
+- **\_embedded** additional content specified by `expand` query paramater. For more, see [HAL Specfication](https://tools.ietf.org/html/draft-kelly-json-hal).
+- **items** contains the returned list of session tokens.
+
+## Example
+
+```
+GET /api/sessions?index=0&count=3&expand=status
+```
+
+```
+200 OK
+Content-Type: application/json+hal
+```
+
+```json
+{
+ "_links": {
+ "first": {
+ "href": "/_wave/api/sessions?index=0&count=3"
+ },
+ "last": {
+ "href": "/_wave/api/sessions?index=39&count=3"
+ },
+ "self": {
+ "href": "/_wave/api/sessions?index=0&count=3"
+ },
+ "next": {
+ "href": "/_wave/api/sessions?index=3&count=3"
+ },
+ "configuration": {
+ "href": "/_wave/api/sessions/{token}",
+ "templated": true
+ },
+ "status": {
+ "href": "/_wave/api/sessions/{token}/status",
+ "templated": true
+ }
+ },
+ "items": [
+ "13f80c84-9046-11ea-9c80-0021ccd76152",
+ "34db08e4-903b-11ea-89ce-0021ccd76152",
+ "a355f846-9465-11ea-ae9e-0021ccd76152"
+ ],
+ "_embedded": {
+ "status": [
+ {
+ "status": "completed",
+ "expiration_date": null,
+ "labels": [],
+ "date_finished": 1588844145897.157,
+ "token": "13f80c84-9046-11ea-9c80-0021ccd76152",
+ "date_created": null,
+ "date_started": 1588844127000,
+ "type": "wmas"
+ },
+ {
+ "status": "completed",
+ "expiration_date": null,
+ "labels": [],
+ "date_finished": 1588839822768.9568,
+ "token": "34db08e4-903b-11ea-89ce-0021ccd76152",
+ "date_created": null,
+ "date_started": 1588839522000,
+ "type": "wmas"
+ },
+ {
+ "status": "completed",
+ "expiration_date": null,
+ "labels": [],
+ "date_finished": 1589297485065.1802,
+ "token": "a355f846-9465-11ea-ae9e-0021ccd76152",
+ "date_created": null,
+ "date_started": 1589297484000,
+ "type": "wmas"
+ }
+ ]
+ }
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/status.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/status.md
new file mode 100644
index 0000000000..b2604a41df
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/status.md
@@ -0,0 +1,48 @@
+# `status` - [Sessions API](../README.md#sessions-api)
+
+The `status` method of the results API returns information about a sessions current status and progress.
+
+## HTTP Request
+
+`GET /api/sessions/<session_token>/status`
+
+## Response Payload
+
+```json
+{
+ "token": "String",
+ "status": "Enum['pending', 'running', 'paused', 'completed', 'aborted']",
+ "date_started": "String",
+ "date_finished": "String",
+ "expiration_date": "String"
+}
+```
+
+- **token** contains the token of the session corresponding to this status.
+- **status** specifies the current status of the session:
+ - **pending**: The session was created, can receive updates, however cannot execute tests.
+ - **running**: The session currently executes tests.
+ - **paused**: The execution of tests in this session is currently paused.
+ - **completed**: All tests files include in this session were executed and have a result.
+ - **aborted**: The session was finished before all tests were executed.
+- **date_started** contains the time the status changed from `PENDING` to `RUNNING` in ISO 8601.
+- **date_finished** contains the time the status changed to either `COMPLETED` or `ABORTED` in ISO 8601.
+- **expiration_date** contains the time at which the sessions will be deleted in ISO 8601.
+
+## Example
+
+**Request:**
+
+`GET /api/sessions/d9caaae0-c362-11e9-943f-eedb305f22f6/status`
+
+**Response:**
+
+```json
+{
+ "token": "d9caaae0-c362-11e9-943f-eedb305f22f6",
+ "status": "running",
+ "date_started": "2019-09-04T14:21:19",
+ "date_finished": null,
+ "expiration_date": "2019-09-04T14:26:19"
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/update.md b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/update.md
new file mode 100644
index 0000000000..3817c11593
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/sessions-api/update.md
@@ -0,0 +1,102 @@
+# `update` - [Sessions API](../README.md#sessions-api)
+
+The `update` method of the sessions API makes it possible to modify a sessions configuration while its status is `PENDING`. This can be used to configure the session on a second device, rather than on the device under test.
+
+## HTTP Request
+
+`PUT /api/sessions/<session_token>`
+
+## Request Payload
+
+The request payload is the same as in the [`create`](./sessions-api/create.md) method of the sessions API. Only keys that are an inherent part of the configuration will stay the same if not specified in the `update` payload. All others will be deleted if not included.
+
+## Example
+
+**Request:**
+
+`GET /api/sessions/47a6fa50-c331-11e9-8709-a8eaa0ecfd0e`
+
+**Response:**
+
+```json
+{
+ "token": "47a6fa50-c331-11e9-8709-a8eaa0ecfd0e",
+ "tests": {
+ "include": ["/apiOne", "/apiTwo/sub"],
+ "exclude": ["/apiOne/specials"]
+ },
+ "types": ["automatic"],
+ "timeouts": {
+ "automatic": 70000,
+ "/apiOne/example/dir": 30000,
+ "/apiOne/example/filehtml": 45000
+ },
+ "reference_tokens": [
+ "ce2dc080-c283-11e9-b4d6-e046513784c2",
+ "430f47d0-c283-11e9-8776-fcbc36b81035"
+ ],
+ "user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/76.0.3809.100 Chrome/76.0.3809.100 Safari/537.36",
+ "browser": {
+ "name": "Chromium",
+ "version": "76"
+ },
+ "is_public": "false",
+ "labels": []
+}
+```
+
+**Request:**
+
+`PUT /api/sessions/47a6fa50-c331-11e9-8709-a8eaa0ecfd0e`
+
+```json
+{
+ "tests": {
+ "include": ["/apiOne", "/apiThree"]
+ },
+ "timeouts": {
+ "automatic": 60000
+ },
+ "reference_tokens": [
+ "bb7aafa0-6a92-11e9-8ec2-04f58dad2e4f",
+ "a50c6db0-6a94-11e9-8d1b-e23fc4555885"
+ ],
+ "labels": ["label1", "label2"]
+}
+```
+
+**Response:**
+
+`200 OK`
+
+**Request:**
+
+`GET /api/sessions/47a6fa50-c331-11e9-8709-a8eaa0ecfd0e`
+
+**Response:**
+
+```json
+{
+ "token": "47a6fa50-c331-11e9-8709-a8eaa0ecfd0e",
+ "tests": {
+ "include": ["/apiOne", "/apiThree"],
+ "exclude": ["/apiOne/specials"]
+ },
+ "types": ["automatic"],
+ "timeouts": {
+ "automatic": 60000,
+ "manual": 360000
+ },
+ "reference_tokens": [
+ "bb7aafa0-6a92-11e9-8ec2-04f58dad2e4f",
+ "a50c6db0-6a94-11e9-8d1b-e23fc4555885"
+ ],
+ "user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/76.0.3809.100 Chrome/76.0.3809.100 Safari/537.36",
+ "browser": {
+ "name": "Chromium",
+ "version": "76"
+ },
+ "is_public": "false",
+ "labels": ["label1", "label2"]
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-all.md b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-all.md
new file mode 100644
index 0000000000..8480981b66
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-all.md
@@ -0,0 +1,43 @@
+# `read all` - [Tests API](../README.md#tests-api)
+
+The `read all` method of the tests API fetches all tests available to include into a test session.
+
+## HTTP Request
+
+`GET /api/tests`
+
+## Response Payload
+
+```json
+{
+ "<api_name>": "Array<String>"
+}
+```
+
+## Example
+
+**Request:**
+
+`GET /api/tests`
+
+**Response:**
+
+```json
+{
+ "apiOne": [
+ "/apiOne/test/one.html",
+ "/apiOne/test/two.html",
+ "/apiOne/test/three.html"
+ ],
+ "apiTwo": [
+ "/apiTwo/test/one.html",
+ "/apiTwo/test/two.html",
+ "/apiTWo/test/three.html"
+ ],
+ "apiThree": [
+ "/apiThree/test/one.html",
+ "/apiThree/test/two.html",
+ "/apiThree/test/three.html"
+ ]
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-available-apis.md b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-available-apis.md
new file mode 100644
index 0000000000..d197c2c21a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-available-apis.md
@@ -0,0 +1,43 @@
+# `read available apis` - [Tests API](../README.md#tests-api)
+
+The `read available apis` method return a list of all web APIs that the DUT
+can be tested for. It returns the human readable API name, as well as the
+directory name under which all corresponding tests reside.
+
+## HTTP Request
+
+`GET /api/tests/apis`
+
+## Response Payload
+
+```json
+[
+ {
+ "path": "String",
+ "name": "String"
+ },
+ ...
+]
+```
+
+## Example
+
+**Request:**
+
+`GET /api/tests/apis`
+
+**Response:**
+
+```json
+[
+ {
+ "path": "/2dcontext",
+ "name": "2D Context"
+ },
+ {
+ "path": "/media-source",
+ "name": "Media Source"
+ },
+ ...
+]
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-last-completed.md b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-last-completed.md
new file mode 100644
index 0000000000..1c1f87a726
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-last-completed.md
@@ -0,0 +1,47 @@
+# `read last completed` - [Tests API](../README.md#tests-api)
+
+The `read last completed` method of the tests API returns a list of test files, which most recently finished and have a result. The files are grouped by the status their respective result had.
+
+## HTTP Request
+
+`GET /api/tests/<session_token>/last_completed`
+
+## Query Parameters
+
+| Parameter | Desciption | Default | Example |
+| --------- | ------------------------------------------------------------------------------------------------------------------------- | ------- | --------------------- |
+| `count` | Number of files per status to return | 5 | `count=5` |
+| `status` | The status the files results must have. Comma separated list. Possible values: `all`, `pass`, `fail` and `timeout` | `all` | `status=timeout,pass` |
+
+## Response Payload
+
+```json
+{
+ "pass": "Array<String>",
+ "fail": "Array<String>",
+ "timeout": "Array<String>"
+}
+```
+
+## Example
+
+**Request:**
+
+`GET /api/tests/7dafeec0-c351-11e9-84c5-3d1ede2e7d2e/last_completed?count=3&status=fail,timeout`
+
+**Response:**
+
+```json
+{
+ "fail": [
+ "/apiTwo/test/four.html",
+ "/apiOne/test/twentyfour.html",
+ "/apiOne/test/nineteen.html"
+ ],
+ "timeout": [
+ "/apiFive/test/eight.html",
+ "/apiThree/test/five.html",
+ "/apiThree/test/two.html"
+ ]
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-malfunctioning.md b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-malfunctioning.md
new file mode 100644
index 0000000000..755b2f7897
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-malfunctioning.md
@@ -0,0 +1,30 @@
+# `read malfunctioning` - [Tests API](../README.md#tests-api)
+
+The `read malfunctioning` method of the tests API returns a list of test files, which were flagged as not working properly in a specific session. This is useful to [add them to the exclude list](../../usage/excluding-tests.md) of further test sessions.
+
+## HTTP Request
+
+`GET /api/tests/<session_token>/malfunctioning`
+
+## Response Payload
+
+```json
+"Array<String>"
+```
+
+## Example
+
+**Request:**
+
+`GET /api/tests/7dafeec0-c351-11e9-84c5-3d1ede2e7d2e/malfunctioning`
+
+**Response:**
+
+```json
+[
+ "/apiOne/test/one.html",
+ "/apiOne/test/five.html",
+ "/apiThree/test/two.html",
+ "/apiThree/test/twenty.html"
+]
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-next.md b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-next.md
new file mode 100644
index 0000000000..c10ba81a12
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-next.md
@@ -0,0 +1,29 @@
+# `read next` - [Tests API](../README.md#tests-api)
+
+The `read next` method of the tests API returns the next test of a test session, that is due to be executed. If the sessions status is not `RUNNING` it returns a static page containing information about the session and its current status.
+
+## HTTP Request
+
+`GET /api/tests/<session_token>/next`
+
+## Response Payload
+
+```json
+{
+ "next_test": "String"
+}
+```
+
+## Example
+
+**Request:**
+
+`GET /api/tests/d6667670-c350-11e9-b504-4ac471cdd99d/next`
+
+**Response:**
+
+```json
+{
+ "next_test": "http://web-platform.test:8000/apiOne/test/one.html?&token=d6667670-c350-11e9-b504-4ac471cdd99d&timeout=60000"
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-session.md b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-session.md
new file mode 100644
index 0000000000..59344d5e68
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/read-session.md
@@ -0,0 +1,61 @@
+# `read session` - [Tests API](../README.md#tests-api)
+
+The `read session` method of the tests API fetches all tests contained in a test session grouped by their status.
+
+## HTTP Request
+
+`GET /api/tests/<session_token>`
+
+## Response Payload
+
+```json
+{
+ "token": "String",
+ "pending_tests": {
+ "<api_name>": "Array<String>"
+ },
+ "running_tests": {
+ "<api_name>": "Array<String>"
+ },
+ "completed_tests": {
+ "<api_name>": "Array<String>"
+ }
+}
+```
+
+- **pending_tests** are tests that have yet to be executed.
+- **running_tests** are tests that are currently executed by the device under test. Although only one test at a time is executed, test that time out or fail to send a result may still wait for the time out to occur. In this case there are multiple tests in this list.
+- **completed_tests** are tests that are finished and have a result.
+
+## Example
+
+**Request:**
+
+`GET /api/tests/cd922410-c344-11e9-858f-9063f6dd878f`
+
+**Response:**
+
+```json
+{
+ "token": "cd922410-c344-11e9-858f-9063f6dd878f",
+ "pending_tests": {
+ "apiTwo": ["/apiTwo/test/three.html"],
+ "apiThree": [
+ "/apiThree/test/one.html",
+ "/apiThree/test/two.html",
+ "/apiThree/test/three.html"
+ ]
+ },
+ "running_tests": {
+ "apiTwo": ["/apiTwo/test/two.html"]
+ },
+ "completed_tests": {
+ "apiOne": [
+ "/apiOne/test/one.html",
+ "/apiOne/test/two.html",
+ "/apiOne/test/three.html"
+ ],
+ "apiTwo": ["/apiTwo/test/one.html"]
+ }
+}
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/update-malfunctioning.md b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/update-malfunctioning.md
new file mode 100644
index 0000000000..327d1c14d1
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/rest-api/tests-api/update-malfunctioning.md
@@ -0,0 +1,56 @@
+# `update malfunctioning` - [Tests API](../README.md#tests-api)
+
+The `update malfunctioning` method of the tests API sets the list of test files, that are flagged as not working properly in a specific session. It replaces the existing list with the new provided list.
+
+## HTTP Request
+
+`PUT /api/tests/<session_token>/malfunctioning`
+
+## Request Payload
+
+```json
+"Array<String>"
+```
+
+## Example
+
+**Request:**
+
+`GET /api/tests/7dafeec0-c351-11e9-84c5-3d1ede2e7d2e/malfunctioning`
+
+**Response:**
+
+```json
+[
+ "/apiOne/test/one.html",
+ "/apiOne/test/five.html",
+ "/apiThree/test/two.html",
+ "/apiThree/test/twenty.html"
+]
+```
+
+**Request:**
+
+`PUT /api/tests/7dafeec0-c351-11e9-84c5-3d1ede2e7d2e/malfunctioning`
+
+```json
+[
+ "/apiOne/test/three.html",
+ "/apiOne/test/eight.html",
+ "/apiThree/test/one.html"
+]
+```
+
+**Request:**
+
+`GET /api/tests/7dafeec0-c351-11e9-84c5-3d1ede2e7d2e/malfunctioning`
+
+**Response:**
+
+```json
+[
+ "/apiOne/test/three.html",
+ "/apiOne/test/eight.html",
+ "/apiThree/test/one.html"
+]
+```
diff --git a/testing/web-platform/tests/tools/wave/docs/usage/usage.md b/testing/web-platform/tests/tools/wave/docs/usage/usage.md
new file mode 100644
index 0000000000..50a99e0d5a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/docs/usage/usage.md
@@ -0,0 +1,231 @@
+# Usage Guide - [WAVE Test Runner](../README.md)
+
+With WAVE Test Runner v1.0.0 all files and REST API endpoints are served under
+a configurable namespace, by default `/_wave/`, which will be used in this
+usage guide.
+
+In this document the usage is explained using screenshots from the context of
+the WMAS project. However, practices can be applied to different contexts as well.
+
+## Contents
+
+1. [Creating test sessions](#1-creating-test-sessions)
+ 1. [The landing page](#11-the-landing-page)
+ 2. [Configuring a new session](#12-configuring-a-new-session)
+ 3. [Exclude tests](#13-exclude-tests)
+ 1. [Manually specify tests to exclude](#131-manually-specify-tests-to-exclude)
+ 2. [Use a session's malfunctioning list to add tests to exclude](#132-use-a-sessions-malfunctioning-list-to-add-tests-to-exclude)
+ 3. [Use a previous session's exclude list to add tests to exclude](#133-use-a-previous-sessions-exclude-list-to-add-tests-to-exclude)
+2. [Resuming test sessions](#2-resuming-test-sessions)
+ 1. [Using the webinterface](#21-using-the-webinterface)
+ 2. [Using a URL](#22-using-a-url)
+3. [Monitoring test sessions](#3-monitoring-test-sessions)
+4. [Managing test sessions](#4-managing-test-sessions)
+
+# 1. Creating test sessions
+
+Test sessions hold information about one test run one a particular device, like the current status.
+Each session is identified using a UUIDv1 token string to gather these information or perform actions on it.
+Each new session is configured using several parameters before the run starts.
+
+## 1.1 The landing page
+
+Every new session is created from the landing page.
+It is recommended to create a new session from the device that is tested, as the user agent is part of the displayed information, as well as the browser and version, which gets parsed from it.
+However, this does not influence the execution of tests or the creation of test results.
+To create a new session, open the landing page on the URI path `/_wave/index.html`.
+
+![landing_page]
+
+The landing page is divided into two section, one to create a new session and one to resume a session.
+As soon as the landing is opened, a new test session is created.
+Its token is displayed next to the QR-Code on the right, along with the expiration date.
+As the session was created automatically, it gets removed automatically once it expires.
+However, if you start the session, the expiration date gets removed and the sessions is available until you delete it.
+
+## 1.2 Configuring a new session
+
+To configure and start the session, either click on "Configure Session" or scan the QR-Code.
+In most cases it is recommended to scan the QR-Code, as it does not require any interaction with the landing page on the DUT.
+
+![configuration_page]
+
+In the configuration screen you can set parameters for the new session and start it.
+At the top the session's token and expiration date is displayed. Next there is the "Labels" option, which allows adding any number of labels to the session, helping to better organize sessions and allowing to apply filters while searching.
+Labels can be added and modified at any point in the future.
+Next there is the API selection, which allows defining the set of APIs to test in the new session. To exclude specific test or subdirectories of those selected APIs, there is the "Excluded Tests" option right below it. Here you can specify what tests to exclude in three distinct ways. (More details in [1.3 Exclude tests](#13-exclude-tests))
+
+![configuration_page_bottom]
+
+With the "Test Types" option you specify what types of test should be included into the session: in contrast to automatic tests, manual tests require user interaction to execute properly.
+The "Reference Browsers" option lets you select browsers that are used to further filter the set of tests included in the session.
+Only tests that have passed the reference test session in all selected browsers are included.
+The reference browsers represent the status of implementation of all WAVE APIs in modern desktop browsers, at about the time the WAVE specification was published.
+To start the session press "Start Session", note that the landing page has to stay opened, as the test are going to be execute in the same window.
+
+[To the top](#usage-guide---wave-test-runner)
+
+## 1.3 Exclude tests
+
+To have a fine control over what test cases are executed when configuring a session, it is possible to provide a list of test cases, that are omitted in the run.
+
+### 1.3.1 Manually specify tests to exclude
+
+To add tests to exclude by providing a plain text list, click on "Add Raw" in the "Excluded Tests" setting.
+This opens a input field, where you can enter multiple full paths to test files or directories.
+
+![Exclude List Add Raw][configuration_page_add_raw]
+
+Each line will be interpreted as a path to exclude a single or a group of tests.
+All tests that have a path starting with one of the provided, will be excluded in the session.
+Lines starting with a # symbol will be ignored, in case you want to organize test paths in a text file using comments.
+Click "Add" and you will see the paths listed in the table below.
+
+### 1.3.2 Use a session's malfunctioning list to add tests to exclude
+
+When flagging tests in a running session as malfunctioning, e.g. when crashing the device, it is possible to add these test to the exclude list of the new session.
+To do this, click on "Add Malfunctioning" in the "Excluded Tests" section.
+
+![Exclude List Add Malfunctioning][configuration_page_add_malfunctioning]
+
+Enter the first eight characters or more into the text field labelled "Session Token" to import all tests from the session's malfunctioning list into the new session's exclude list.
+Click "Add" to confirm.
+The tests should now appear in the list below.
+
+### 1.3.3 Use a previous session's exclude list to add tests to exclude
+
+If you have already specified a suitable exclude list or want to expand an existing, you can apply the exclude list of a previous session.
+Click on "Add Previous Excluded" in the "Excluded Tests" section to open the corresponding controls.
+
+![Exclude List Add Previously Excluded][configuration_page_add_prev_excluded]
+
+Enter the first eight characters or more into the text field labelled "Session Token" to import all tests from the previous session's exclude list into the new session's exclude list.
+Click "Add" to confirm.
+The tests should now appear in the list below.
+
+[To the top](#usage-guide---wave-test-runner)
+
+# 2. Resuming test sessions
+
+Certain test cases may cause some devices to crash, which makes the test runner unable to automatically run the next test.
+In this case, external interaction is necessary.
+To alleviate the process of resuming the test session, the are two mechanisms integrated into the web interface that reduce interaction with the device to a minimum.
+There is also a mechanism that can be useful if a test framework with access to the tested browser is utilized.
+
+## 2.1 Using the webinterface
+
+In any case, it is necessary to open the landing page on the device, in order to resume the session.
+
+![Landing Page][landing_page]
+
+On the landing page, in the section "Resume running session", you can see the token of the last session this device has run.
+To resume this particular session, click on the "Resume" button next to it, or simply press enter or space.
+If the presented token is not the one of the session you want to resume, you can change it from the configuration screen.
+To get there, press the "Configure Session" button or scan the QR-Code.
+
+![Configuration Page][configuration_page]
+
+At the very bottom of the configuration page, there is a section called "Resume session", where you can see the token that was previously displayed on the landing page in a text box.
+Here you can change the token of the session to resume, just enter the first eight characters or more of the token.
+When you're done, press the "Resume" button.
+Note that it is necessary to keep the landing page open in order to automatically run the next test, as it is loaded in the same window.
+
+## 2.2 Using a URL
+
+If you have access to the DUTs browser programmatically, you may want to resume a crashed test session automatically.
+To load the next test of a specific session, simply open the following URL:
+
+`/next.html?token=<session_token>`
+
+For example:
+
+`/_wave/next.html?token=24fcd360-ef4d-11e9-a95f-d6e1ad4c5fdb`
+
+[To the top](#usage-guide---wave-test-runner)
+
+# 3. Monitoring test sessions
+
+While running test sessions, the results page for second screen devices provide a convenient summary of the sessions current state, as well as controls to manipulate the test execution.
+Additionally, you can flag tests in case they interrupt the test execution by, e.g. crashing the test, to exclude them in future sessions and download test results and reports.
+
+![results_page_top]
+
+On the top right-hand side, there are controls to stop, pause or delete the session.
+Stopping, as well as deleting the session is irreversible.
+Below you find the session's details, including the token, user agent, test paths, excluded test paths, total test file count, status, the different test timeouts, the date and time the session has been started, the date and time the session has finished, the duration and labels.
+
+![results_page_last_completed]
+
+Right below, tests that have recently completed with result status TIMEOUT are listed to add them to the list of malfunctioning tests by clicking the button with the + symbol.
+Now that test appears in the list of malfunctioning tests at the very bottom of the result page.
+This list can be used to exclude tests when creating a new session. (more details in [1.3.2 Use a session's malfunctioning list to add tests to exclude](#132-use-a-sessions-malfunctioning-list-to-add-tests-to-exclude))
+
+![results_page_api_results]
+
+In the section "API Results" you can see the progress of each individual API selected for the session.
+As each test file can contain multiple subtests, the count of passed, failed, timed out and not run tests does not correlate to the count of test files run, which indicates the overall progress.
+Keep in mind that only test files that received a result will count as run, so even if all tests finished executing on the device, some may have failed to send the result, in which case the internal timeout has to run out to create it.
+
+![results_page_api_results_export]
+
+Once all test files of an API have received a result, it is possible to download the result data or view a report for that API, by clicking the corresponding button in the far right column of the table.
+
+![results_page_bottom]
+
+Below the table of API results, there are more options to download the results of the session.
+The first option downloads the results the same way it is persisted on the serverside, along with some meta data.
+This form is especially useful if you want to import the session details with the results into other instances of the WAVE Test Runner.
+Furthermore, there is the option to download the raw result in JSON format of all finished APIs.
+This the same JSON you get by clicking on the "JSON" button in the API results column, but of all finished APIs in a ZIP file.
+Lastly, you can download a static HTML page, similiar to the results view.
+Finally, at the bottom of the page you can find the list of malfunctioning tests that have been added from the list of last timed-out test files.
+Remove tests by clicking their corresponding button with the trashcan icon.
+
+[To the top](#usage-guide---wave-test-runner)
+
+# 4. Managing test sessions
+
+The overview page provides features that help to manage and organize multiple sessions. You can access it from the URL `/_wave/overview.html`.
+
+![overview_page]
+
+In the "Manage Sessions" section you can add more sessions to the list below by entering the first eight or more characters of the token.
+Clicking on "Add Session" will add the session to the list if it was the only one that could be associated with the provided token.
+If there are multiple sessions that match the provided input, none will be added.
+Additionally, you can compare multiple session, given that they are completed, used the same reference sessions and share tested APIs.
+Simply select the desired session from the list below and click "Compare Selected".
+You can also import sessions in the "Import Sessions" section, however, this feature has to be enabled in the server configuration.
+Below the "Manage Sessions" section, there is the list of reference and recent sessions.
+
+![overview_page_sessions]
+
+In the sessions list, sessions are organized in three lists: Reference Browsers, which are test results everyone can see, containing the results of the reference browsers for the corresponding WAVE specification, recent sessions, which are sessions there have recently been viewed or executed on the device, and pinned sessions, which are sessions pinned by the user from the list of recent sessions.
+Add label filters to show only matching sessions.
+
+![overview_page_sessions_pinned_recent]
+
+You can pin a session by clicking the button with the tag on a session in the recent sessions list and unpin them the same way from the pinned sessions list.
+Click the trashcan icon to remove a session from its list, this will not delete the session results.
+Sort the list of sessions by clicking on the column to filter them by.
+
+![overview_page_sessions_filtered]
+
+Add one or more tags to the filter to conveniently find the sessions you are looking for. Add labels to session when creating them or in their corresponding results page.
+
+[To the top](#usage-guide---wave-test-runner)
+
+[landing_page]: ../res/landing_page.jpg "Landing Page"
+[configuration_page]: ../res/configuration_page_top.jpg "Configuration Page"
+[configuration_page_bottom]: ../res/configuration_page_bottom.jpg "Configuration Page"
+[configuration_page_add_raw]: ../res/configuration_page_exclude_add_raw.jpg "Exclude Tests - Add Raw"
+[configuration_page_add_malfunctioning]: ../res/configuration_page_exclude_add_malfunctioning.jpg "Exclude Tests - Add Malfunctioning"
+[configuration_page_add_prev_excluded]: ../res/configuration_page_exclude_add_prev_excluded.jpg "Exclude Tests - Add Previously Excluded"
+[results_page_top]: ../res/results_page_top.jpg "Results Page"
+[results_page_last_completed]: ../res/results_page_last_timed_out.jpg "Results Page"
+[results_page_api_results]: ../res/results_page_api_results.jpg "Results Page"
+[results_page_api_results_export]: ../res/results_page_api_results_export.jpg "Results Page"
+[results_page_bottom]: ../res/results_page_bottom.jpg "Results Page"
+[overview_page]: ../res/overview_page_top.jpg "Overview Page"
+[overview_page_sessions]: ../res/overview_page_sessions.jpg "Overview Page Sessions"
+[overview_page_sessions_pinned_recent]: ../res/overview_page_sessions_pinned_recent.jpg "Overview Page Sessions"
+[overview_page_sessions_filtered]: ../res/overview_page_sessions_filtered.jpg "Overview Page Filter"
diff --git a/testing/web-platform/tests/tools/wave/export/css/bulma.min.css b/testing/web-platform/tests/tools/wave/export/css/bulma.min.css
new file mode 100644
index 0000000000..70f6e4450e
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/export/css/bulma.min.css
@@ -0,0 +1 @@
+/*! bulma.io v0.7.4 | MIT License | github.com/jgthms/bulma */@-webkit-keyframes spinAround{from{-webkit-transform:rotate(0);transform:rotate(0)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes spinAround{from{-webkit-transform:rotate(0);transform:rotate(0)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.breadcrumb,.button,.delete,.file,.is-unselectable,.modal-close,.pagination-ellipsis,.pagination-link,.pagination-next,.pagination-previous,.tabs{-webkit-touch-callout:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.navbar-link:not(.is-arrowless)::after,.select:not(.is-multiple):not(.is-loading)::after{border:3px solid transparent;border-radius:2px;border-right:0;border-top:0;content:" ";display:block;height:.625em;margin-top:-.4375em;pointer-events:none;position:absolute;top:50%;-webkit-transform:rotate(-45deg);transform:rotate(-45deg);-webkit-transform-origin:center;transform-origin:center;width:.625em}.block:not(:last-child),.box:not(:last-child),.breadcrumb:not(:last-child),.content:not(:last-child),.highlight:not(:last-child),.level:not(:last-child),.list:not(:last-child),.message:not(:last-child),.notification:not(:last-child),.progress:not(:last-child),.subtitle:not(:last-child),.table-container:not(:last-child),.table:not(:last-child),.tabs:not(:last-child),.title:not(:last-child){margin-bottom:1.5rem}.delete,.modal-close{-moz-appearance:none;-webkit-appearance:none;background-color:rgba(10,10,10,.2);border:none;border-radius:290486px;cursor:pointer;pointer-events:auto;display:inline-block;flex-grow:0;flex-shrink:0;font-size:0;height:20px;max-height:20px;max-width:20px;min-height:20px;min-width:20px;outline:0;position:relative;vertical-align:top;width:20px}.delete::after,.delete::before,.modal-close::after,.modal-close::before{background-color:#fff;content:"";display:block;left:50%;position:absolute;top:50%;-webkit-transform:translateX(-50%) translateY(-50%) rotate(45deg);transform:translateX(-50%) translateY(-50%) rotate(45deg);-webkit-transform-origin:center center;transform-origin:center center}.delete::before,.modal-close::before{height:2px;width:50%}.delete::after,.modal-close::after{height:50%;width:2px}.delete:focus,.delete:hover,.modal-close:focus,.modal-close:hover{background-color:rgba(10,10,10,.3)}.delete:active,.modal-close:active{background-color:rgba(10,10,10,.4)}.is-small.delete,.is-small.modal-close{height:16px;max-height:16px;max-width:16px;min-height:16px;min-width:16px;width:16px}.is-medium.delete,.is-medium.modal-close{height:24px;max-height:24px;max-width:24px;min-height:24px;min-width:24px;width:24px}.is-large.delete,.is-large.modal-close{height:32px;max-height:32px;max-width:32px;min-height:32px;min-width:32px;width:32px}.button.is-loading::after,.control.is-loading::after,.loader,.select.is-loading::after{-webkit-animation:spinAround .5s infinite linear;animation:spinAround .5s infinite linear;border:2px solid #dbdbdb;border-radius:290486px;border-right-color:transparent;border-top-color:transparent;content:"";display:block;height:1em;position:relative;width:1em}.hero-video,.image.is-16by9 .has-ratio,.image.is-16by9 img,.image.is-1by1 .has-ratio,.image.is-1by1 img,.image.is-1by2 .has-ratio,.image.is-1by2 img,.image.is-1by3 .has-ratio,.image.is-1by3 img,.image.is-2by1 .has-ratio,.image.is-2by1 img,.image.is-2by3 .has-ratio,.image.is-2by3 img,.image.is-3by1 .has-ratio,.image.is-3by1 img,.image.is-3by2 .has-ratio,.image.is-3by2 img,.image.is-3by4 .has-ratio,.image.is-3by4 img,.image.is-3by5 .has-ratio,.image.is-3by5 img,.image.is-4by3 .has-ratio,.image.is-4by3 img,.image.is-4by5 .has-ratio,.image.is-4by5 img,.image.is-5by3 .has-ratio,.image.is-5by3 img,.image.is-5by4 .has-ratio,.image.is-5by4 img,.image.is-9by16 .has-ratio,.image.is-9by16 img,.image.is-square .has-ratio,.image.is-square img,.is-overlay,.modal,.modal-background{bottom:0;left:0;position:absolute;right:0;top:0}.button,.file-cta,.file-name,.input,.pagination-ellipsis,.pagination-link,.pagination-next,.pagination-previous,.select select,.textarea{-moz-appearance:none;-webkit-appearance:none;align-items:center;border:1px solid transparent;border-radius:4px;box-shadow:none;display:inline-flex;font-size:1rem;height:2.25em;justify-content:flex-start;line-height:1.5;padding-bottom:calc(.375em - 1px);padding-left:calc(.625em - 1px);padding-right:calc(.625em - 1px);padding-top:calc(.375em - 1px);position:relative;vertical-align:top}.button:active,.button:focus,.file-cta:active,.file-cta:focus,.file-name:active,.file-name:focus,.input:active,.input:focus,.is-active.button,.is-active.file-cta,.is-active.file-name,.is-active.input,.is-active.pagination-ellipsis,.is-active.pagination-link,.is-active.pagination-next,.is-active.pagination-previous,.is-active.textarea,.is-focused.button,.is-focused.file-cta,.is-focused.file-name,.is-focused.input,.is-focused.pagination-ellipsis,.is-focused.pagination-link,.is-focused.pagination-next,.is-focused.pagination-previous,.is-focused.textarea,.pagination-ellipsis:active,.pagination-ellipsis:focus,.pagination-link:active,.pagination-link:focus,.pagination-next:active,.pagination-next:focus,.pagination-previous:active,.pagination-previous:focus,.select select.is-active,.select select.is-focused,.select select:active,.select select:focus,.textarea:active,.textarea:focus{outline:0}.button[disabled],.file-cta[disabled],.file-name[disabled],.input[disabled],.pagination-ellipsis[disabled],.pagination-link[disabled],.pagination-next[disabled],.pagination-previous[disabled],.select fieldset[disabled] select,.select select[disabled],.textarea[disabled],fieldset[disabled] .button,fieldset[disabled] .file-cta,fieldset[disabled] .file-name,fieldset[disabled] .input,fieldset[disabled] .pagination-ellipsis,fieldset[disabled] .pagination-link,fieldset[disabled] .pagination-next,fieldset[disabled] .pagination-previous,fieldset[disabled] .select select,fieldset[disabled] .textarea{cursor:not-allowed}/*! minireset.css v0.0.4 | MIT License | github.com/jgthms/minireset.css */blockquote,body,dd,dl,dt,fieldset,figure,h1,h2,h3,h4,h5,h6,hr,html,iframe,legend,li,ol,p,pre,textarea,ul{margin:0;padding:0}h1,h2,h3,h4,h5,h6{font-size:100%;font-weight:400}ul{list-style:none}button,input,select,textarea{margin:0}html{box-sizing:border-box}*,::after,::before{box-sizing:inherit}embed,iframe,img,object,video{height:auto;max-width:100%}audio{max-width:100%}iframe{border:0}table{border-collapse:collapse;border-spacing:0}td,th{padding:0;text-align:left}html{background-color:#fff;font-size:16px;-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;min-width:300px;overflow-x:hidden;overflow-y:scroll;text-rendering:optimizeLegibility;-webkit-text-size-adjust:100%;-moz-text-size-adjust:100%;-ms-text-size-adjust:100%;text-size-adjust:100%}article,aside,figure,footer,header,hgroup,section{display:block}body,button,input,select,textarea{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif}code,pre{-moz-osx-font-smoothing:auto;-webkit-font-smoothing:auto;font-family:monospace}body{color:#4a4a4a;font-size:1rem;font-weight:400;line-height:1.5}a{color:#3273dc;cursor:pointer;text-decoration:none}a strong{color:currentColor}a:hover{color:#363636}code{background-color:#f5f5f5;color:#ff3860;font-size:.875em;font-weight:400;padding:.25em .5em .25em}hr{background-color:#f5f5f5;border:none;display:block;height:2px;margin:1.5rem 0}img{height:auto;max-width:100%}input[type=checkbox],input[type=radio]{vertical-align:baseline}small{font-size:.875em}span{font-style:inherit;font-weight:inherit}strong{color:#363636;font-weight:700}fieldset{border:none}pre{-webkit-overflow-scrolling:touch;background-color:#f5f5f5;color:#4a4a4a;font-size:.875em;overflow-x:auto;padding:1.25rem 1.5rem;white-space:pre;word-wrap:normal}pre code{background-color:transparent;color:currentColor;font-size:1em;padding:0}table td,table th{text-align:left;vertical-align:top}table th{color:#363636}.is-clearfix::after{clear:both;content:" ";display:table}.is-pulled-left{float:left!important}.is-pulled-right{float:right!important}.is-clipped{overflow:hidden!important}.is-size-1{font-size:3rem!important}.is-size-2{font-size:2.5rem!important}.is-size-3{font-size:2rem!important}.is-size-4{font-size:1.5rem!important}.is-size-5{font-size:1.25rem!important}.is-size-6{font-size:1rem!important}.is-size-7{font-size:.75rem!important}@media screen and (max-width:768px){.is-size-1-mobile{font-size:3rem!important}.is-size-2-mobile{font-size:2.5rem!important}.is-size-3-mobile{font-size:2rem!important}.is-size-4-mobile{font-size:1.5rem!important}.is-size-5-mobile{font-size:1.25rem!important}.is-size-6-mobile{font-size:1rem!important}.is-size-7-mobile{font-size:.75rem!important}}@media screen and (min-width:769px),print{.is-size-1-tablet{font-size:3rem!important}.is-size-2-tablet{font-size:2.5rem!important}.is-size-3-tablet{font-size:2rem!important}.is-size-4-tablet{font-size:1.5rem!important}.is-size-5-tablet{font-size:1.25rem!important}.is-size-6-tablet{font-size:1rem!important}.is-size-7-tablet{font-size:.75rem!important}}@media screen and (max-width:1087px){.is-size-1-touch{font-size:3rem!important}.is-size-2-touch{font-size:2.5rem!important}.is-size-3-touch{font-size:2rem!important}.is-size-4-touch{font-size:1.5rem!important}.is-size-5-touch{font-size:1.25rem!important}.is-size-6-touch{font-size:1rem!important}.is-size-7-touch{font-size:.75rem!important}}@media screen and (min-width:1088px){.is-size-1-desktop{font-size:3rem!important}.is-size-2-desktop{font-size:2.5rem!important}.is-size-3-desktop{font-size:2rem!important}.is-size-4-desktop{font-size:1.5rem!important}.is-size-5-desktop{font-size:1.25rem!important}.is-size-6-desktop{font-size:1rem!important}.is-size-7-desktop{font-size:.75rem!important}}@media screen and (min-width:1280px){.is-size-1-widescreen{font-size:3rem!important}.is-size-2-widescreen{font-size:2.5rem!important}.is-size-3-widescreen{font-size:2rem!important}.is-size-4-widescreen{font-size:1.5rem!important}.is-size-5-widescreen{font-size:1.25rem!important}.is-size-6-widescreen{font-size:1rem!important}.is-size-7-widescreen{font-size:.75rem!important}}@media screen and (min-width:1472px){.is-size-1-fullhd{font-size:3rem!important}.is-size-2-fullhd{font-size:2.5rem!important}.is-size-3-fullhd{font-size:2rem!important}.is-size-4-fullhd{font-size:1.5rem!important}.is-size-5-fullhd{font-size:1.25rem!important}.is-size-6-fullhd{font-size:1rem!important}.is-size-7-fullhd{font-size:.75rem!important}}.has-text-centered{text-align:center!important}.has-text-justified{text-align:justify!important}.has-text-left{text-align:left!important}.has-text-right{text-align:right!important}@media screen and (max-width:768px){.has-text-centered-mobile{text-align:center!important}}@media screen and (min-width:769px),print{.has-text-centered-tablet{text-align:center!important}}@media screen and (min-width:769px) and (max-width:1087px){.has-text-centered-tablet-only{text-align:center!important}}@media screen and (max-width:1087px){.has-text-centered-touch{text-align:center!important}}@media screen and (min-width:1088px){.has-text-centered-desktop{text-align:center!important}}@media screen and (min-width:1088px) and (max-width:1279px){.has-text-centered-desktop-only{text-align:center!important}}@media screen and (min-width:1280px){.has-text-centered-widescreen{text-align:center!important}}@media screen and (min-width:1280px) and (max-width:1471px){.has-text-centered-widescreen-only{text-align:center!important}}@media screen and (min-width:1472px){.has-text-centered-fullhd{text-align:center!important}}@media screen and (max-width:768px){.has-text-justified-mobile{text-align:justify!important}}@media screen and (min-width:769px),print{.has-text-justified-tablet{text-align:justify!important}}@media screen and (min-width:769px) and (max-width:1087px){.has-text-justified-tablet-only{text-align:justify!important}}@media screen and (max-width:1087px){.has-text-justified-touch{text-align:justify!important}}@media screen and (min-width:1088px){.has-text-justified-desktop{text-align:justify!important}}@media screen and (min-width:1088px) and (max-width:1279px){.has-text-justified-desktop-only{text-align:justify!important}}@media screen and (min-width:1280px){.has-text-justified-widescreen{text-align:justify!important}}@media screen and (min-width:1280px) and (max-width:1471px){.has-text-justified-widescreen-only{text-align:justify!important}}@media screen and (min-width:1472px){.has-text-justified-fullhd{text-align:justify!important}}@media screen and (max-width:768px){.has-text-left-mobile{text-align:left!important}}@media screen and (min-width:769px),print{.has-text-left-tablet{text-align:left!important}}@media screen and (min-width:769px) and (max-width:1087px){.has-text-left-tablet-only{text-align:left!important}}@media screen and (max-width:1087px){.has-text-left-touch{text-align:left!important}}@media screen and (min-width:1088px){.has-text-left-desktop{text-align:left!important}}@media screen and (min-width:1088px) and (max-width:1279px){.has-text-left-desktop-only{text-align:left!important}}@media screen and (min-width:1280px){.has-text-left-widescreen{text-align:left!important}}@media screen and (min-width:1280px) and (max-width:1471px){.has-text-left-widescreen-only{text-align:left!important}}@media screen and (min-width:1472px){.has-text-left-fullhd{text-align:left!important}}@media screen and (max-width:768px){.has-text-right-mobile{text-align:right!important}}@media screen and (min-width:769px),print{.has-text-right-tablet{text-align:right!important}}@media screen and (min-width:769px) and (max-width:1087px){.has-text-right-tablet-only{text-align:right!important}}@media screen and (max-width:1087px){.has-text-right-touch{text-align:right!important}}@media screen and (min-width:1088px){.has-text-right-desktop{text-align:right!important}}@media screen and (min-width:1088px) and (max-width:1279px){.has-text-right-desktop-only{text-align:right!important}}@media screen and (min-width:1280px){.has-text-right-widescreen{text-align:right!important}}@media screen and (min-width:1280px) and (max-width:1471px){.has-text-right-widescreen-only{text-align:right!important}}@media screen and (min-width:1472px){.has-text-right-fullhd{text-align:right!important}}.is-capitalized{text-transform:capitalize!important}.is-lowercase{text-transform:lowercase!important}.is-uppercase{text-transform:uppercase!important}.is-italic{font-style:italic!important}.has-text-white{color:#fff!important}a.has-text-white:focus,a.has-text-white:hover{color:#e6e6e6!important}.has-background-white{background-color:#fff!important}.has-text-black{color:#0a0a0a!important}a.has-text-black:focus,a.has-text-black:hover{color:#000!important}.has-background-black{background-color:#0a0a0a!important}.has-text-light{color:#f5f5f5!important}a.has-text-light:focus,a.has-text-light:hover{color:#dbdbdb!important}.has-background-light{background-color:#f5f5f5!important}.has-text-dark{color:#363636!important}a.has-text-dark:focus,a.has-text-dark:hover{color:#1c1c1c!important}.has-background-dark{background-color:#363636!important}.has-text-primary{color:#00d1b2!important}a.has-text-primary:focus,a.has-text-primary:hover{color:#009e86!important}.has-background-primary{background-color:#00d1b2!important}.has-text-link{color:#3273dc!important}a.has-text-link:focus,a.has-text-link:hover{color:#205bbc!important}.has-background-link{background-color:#3273dc!important}.has-text-info{color:#209cee!important}a.has-text-info:focus,a.has-text-info:hover{color:#0f81cc!important}.has-background-info{background-color:#209cee!important}.has-text-success{color:#23d160!important}a.has-text-success:focus,a.has-text-success:hover{color:#1ca64c!important}.has-background-success{background-color:#23d160!important}.has-text-warning{color:#ffdd57!important}a.has-text-warning:focus,a.has-text-warning:hover{color:#ffd324!important}.has-background-warning{background-color:#ffdd57!important}.has-text-danger{color:#ff3860!important}a.has-text-danger:focus,a.has-text-danger:hover{color:#ff0537!important}.has-background-danger{background-color:#ff3860!important}.has-text-black-bis{color:#121212!important}.has-background-black-bis{background-color:#121212!important}.has-text-black-ter{color:#242424!important}.has-background-black-ter{background-color:#242424!important}.has-text-grey-darker{color:#363636!important}.has-background-grey-darker{background-color:#363636!important}.has-text-grey-dark{color:#4a4a4a!important}.has-background-grey-dark{background-color:#4a4a4a!important}.has-text-grey{color:#7a7a7a!important}.has-background-grey{background-color:#7a7a7a!important}.has-text-grey-light{color:#b5b5b5!important}.has-background-grey-light{background-color:#b5b5b5!important}.has-text-grey-lighter{color:#dbdbdb!important}.has-background-grey-lighter{background-color:#dbdbdb!important}.has-text-white-ter{color:#f5f5f5!important}.has-background-white-ter{background-color:#f5f5f5!important}.has-text-white-bis{color:#fafafa!important}.has-background-white-bis{background-color:#fafafa!important}.has-text-weight-light{font-weight:300!important}.has-text-weight-normal{font-weight:400!important}.has-text-weight-semibold{font-weight:600!important}.has-text-weight-bold{font-weight:700!important}.is-family-primary{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif!important}.is-family-secondary{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif!important}.is-family-sans-serif{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif!important}.is-family-monospace{font-family:monospace!important}.is-family-code{font-family:monospace!important}.is-block{display:block!important}@media screen and (max-width:768px){.is-block-mobile{display:block!important}}@media screen and (min-width:769px),print{.is-block-tablet{display:block!important}}@media screen and (min-width:769px) and (max-width:1087px){.is-block-tablet-only{display:block!important}}@media screen and (max-width:1087px){.is-block-touch{display:block!important}}@media screen and (min-width:1088px){.is-block-desktop{display:block!important}}@media screen and (min-width:1088px) and (max-width:1279px){.is-block-desktop-only{display:block!important}}@media screen and (min-width:1280px){.is-block-widescreen{display:block!important}}@media screen and (min-width:1280px) and (max-width:1471px){.is-block-widescreen-only{display:block!important}}@media screen and (min-width:1472px){.is-block-fullhd{display:block!important}}.is-flex{display:flex!important}@media screen and (max-width:768px){.is-flex-mobile{display:flex!important}}@media screen and (min-width:769px),print{.is-flex-tablet{display:flex!important}}@media screen and (min-width:769px) and (max-width:1087px){.is-flex-tablet-only{display:flex!important}}@media screen and (max-width:1087px){.is-flex-touch{display:flex!important}}@media screen and (min-width:1088px){.is-flex-desktop{display:flex!important}}@media screen and (min-width:1088px) and (max-width:1279px){.is-flex-desktop-only{display:flex!important}}@media screen and (min-width:1280px){.is-flex-widescreen{display:flex!important}}@media screen and (min-width:1280px) and (max-width:1471px){.is-flex-widescreen-only{display:flex!important}}@media screen and (min-width:1472px){.is-flex-fullhd{display:flex!important}}.is-inline{display:inline!important}@media screen and (max-width:768px){.is-inline-mobile{display:inline!important}}@media screen and (min-width:769px),print{.is-inline-tablet{display:inline!important}}@media screen and (min-width:769px) and (max-width:1087px){.is-inline-tablet-only{display:inline!important}}@media screen and (max-width:1087px){.is-inline-touch{display:inline!important}}@media screen and (min-width:1088px){.is-inline-desktop{display:inline!important}}@media screen and (min-width:1088px) and (max-width:1279px){.is-inline-desktop-only{display:inline!important}}@media screen and (min-width:1280px){.is-inline-widescreen{display:inline!important}}@media screen and (min-width:1280px) and (max-width:1471px){.is-inline-widescreen-only{display:inline!important}}@media screen and (min-width:1472px){.is-inline-fullhd{display:inline!important}}.is-inline-block{display:inline-block!important}@media screen and (max-width:768px){.is-inline-block-mobile{display:inline-block!important}}@media screen and (min-width:769px),print{.is-inline-block-tablet{display:inline-block!important}}@media screen and (min-width:769px) and (max-width:1087px){.is-inline-block-tablet-only{display:inline-block!important}}@media screen and (max-width:1087px){.is-inline-block-touch{display:inline-block!important}}@media screen and (min-width:1088px){.is-inline-block-desktop{display:inline-block!important}}@media screen and (min-width:1088px) and (max-width:1279px){.is-inline-block-desktop-only{display:inline-block!important}}@media screen and (min-width:1280px){.is-inline-block-widescreen{display:inline-block!important}}@media screen and (min-width:1280px) and (max-width:1471px){.is-inline-block-widescreen-only{display:inline-block!important}}@media screen and (min-width:1472px){.is-inline-block-fullhd{display:inline-block!important}}.is-inline-flex{display:inline-flex!important}@media screen and (max-width:768px){.is-inline-flex-mobile{display:inline-flex!important}}@media screen and (min-width:769px),print{.is-inline-flex-tablet{display:inline-flex!important}}@media screen and (min-width:769px) and (max-width:1087px){.is-inline-flex-tablet-only{display:inline-flex!important}}@media screen and (max-width:1087px){.is-inline-flex-touch{display:inline-flex!important}}@media screen and (min-width:1088px){.is-inline-flex-desktop{display:inline-flex!important}}@media screen and (min-width:1088px) and (max-width:1279px){.is-inline-flex-desktop-only{display:inline-flex!important}}@media screen and (min-width:1280px){.is-inline-flex-widescreen{display:inline-flex!important}}@media screen and (min-width:1280px) and (max-width:1471px){.is-inline-flex-widescreen-only{display:inline-flex!important}}@media screen and (min-width:1472px){.is-inline-flex-fullhd{display:inline-flex!important}}.is-hidden{display:none!important}.is-sr-only{border:none!important;clip:rect(0,0,0,0)!important;height:.01em!important;overflow:hidden!important;padding:0!important;position:absolute!important;white-space:nowrap!important;width:.01em!important}@media screen and (max-width:768px){.is-hidden-mobile{display:none!important}}@media screen and (min-width:769px),print{.is-hidden-tablet{display:none!important}}@media screen and (min-width:769px) and (max-width:1087px){.is-hidden-tablet-only{display:none!important}}@media screen and (max-width:1087px){.is-hidden-touch{display:none!important}}@media screen and (min-width:1088px){.is-hidden-desktop{display:none!important}}@media screen and (min-width:1088px) and (max-width:1279px){.is-hidden-desktop-only{display:none!important}}@media screen and (min-width:1280px){.is-hidden-widescreen{display:none!important}}@media screen and (min-width:1280px) and (max-width:1471px){.is-hidden-widescreen-only{display:none!important}}@media screen and (min-width:1472px){.is-hidden-fullhd{display:none!important}}.is-invisible{visibility:hidden!important}@media screen and (max-width:768px){.is-invisible-mobile{visibility:hidden!important}}@media screen and (min-width:769px),print{.is-invisible-tablet{visibility:hidden!important}}@media screen and (min-width:769px) and (max-width:1087px){.is-invisible-tablet-only{visibility:hidden!important}}@media screen and (max-width:1087px){.is-invisible-touch{visibility:hidden!important}}@media screen and (min-width:1088px){.is-invisible-desktop{visibility:hidden!important}}@media screen and (min-width:1088px) and (max-width:1279px){.is-invisible-desktop-only{visibility:hidden!important}}@media screen and (min-width:1280px){.is-invisible-widescreen{visibility:hidden!important}}@media screen and (min-width:1280px) and (max-width:1471px){.is-invisible-widescreen-only{visibility:hidden!important}}@media screen and (min-width:1472px){.is-invisible-fullhd{visibility:hidden!important}}.is-marginless{margin:0!important}.is-paddingless{padding:0!important}.is-radiusless{border-radius:0!important}.is-shadowless{box-shadow:none!important}.box{background-color:#fff;border-radius:6px;box-shadow:0 2px 3px rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.1);color:#4a4a4a;display:block;padding:1.25rem}a.box:focus,a.box:hover{box-shadow:0 2px 3px rgba(10,10,10,.1),0 0 0 1px #3273dc}a.box:active{box-shadow:inset 0 1px 2px rgba(10,10,10,.2),0 0 0 1px #3273dc}.button{background-color:#fff;border-color:#dbdbdb;border-width:1px;color:#363636;cursor:pointer;justify-content:center;padding-bottom:calc(.375em - 1px);padding-left:.75em;padding-right:.75em;padding-top:calc(.375em - 1px);text-align:center;white-space:nowrap}.button strong{color:inherit}.button .icon,.button .icon.is-large,.button .icon.is-medium,.button .icon.is-small{height:1.5em;width:1.5em}.button .icon:first-child:not(:last-child){margin-left:calc(-.375em - 1px);margin-right:.1875em}.button .icon:last-child:not(:first-child){margin-left:.1875em;margin-right:calc(-.375em - 1px)}.button .icon:first-child:last-child{margin-left:calc(-.375em - 1px);margin-right:calc(-.375em - 1px)}.button.is-hovered,.button:hover{border-color:#b5b5b5;color:#363636}.button.is-focused,.button:focus{border-color:#3273dc;color:#363636}.button.is-focused:not(:active),.button:focus:not(:active){box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.button.is-active,.button:active{border-color:#4a4a4a;color:#363636}.button.is-text{background-color:transparent;border-color:transparent;color:#4a4a4a;text-decoration:underline}.button.is-text.is-focused,.button.is-text.is-hovered,.button.is-text:focus,.button.is-text:hover{background-color:#f5f5f5;color:#363636}.button.is-text.is-active,.button.is-text:active{background-color:#e8e8e8;color:#363636}.button.is-text[disabled],fieldset[disabled] .button.is-text{background-color:transparent;border-color:transparent;box-shadow:none}.button.is-white{background-color:#fff;border-color:transparent;color:#0a0a0a}.button.is-white.is-hovered,.button.is-white:hover{background-color:#f9f9f9;border-color:transparent;color:#0a0a0a}.button.is-white.is-focused,.button.is-white:focus{border-color:transparent;color:#0a0a0a}.button.is-white.is-focused:not(:active),.button.is-white:focus:not(:active){box-shadow:0 0 0 .125em rgba(255,255,255,.25)}.button.is-white.is-active,.button.is-white:active{background-color:#f2f2f2;border-color:transparent;color:#0a0a0a}.button.is-white[disabled],fieldset[disabled] .button.is-white{background-color:#fff;border-color:transparent;box-shadow:none}.button.is-white.is-inverted{background-color:#0a0a0a;color:#fff}.button.is-white.is-inverted:hover{background-color:#000}.button.is-white.is-inverted[disabled],fieldset[disabled] .button.is-white.is-inverted{background-color:#0a0a0a;border-color:transparent;box-shadow:none;color:#fff}.button.is-white.is-loading::after{border-color:transparent transparent #0a0a0a #0a0a0a!important}.button.is-white.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-white.is-outlined:focus,.button.is-white.is-outlined:hover{background-color:#fff;border-color:#fff;color:#0a0a0a}.button.is-white.is-outlined.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-white.is-outlined[disabled],fieldset[disabled] .button.is-white.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-white.is-inverted.is-outlined{background-color:transparent;border-color:#0a0a0a;color:#0a0a0a}.button.is-white.is-inverted.is-outlined:focus,.button.is-white.is-inverted.is-outlined:hover{background-color:#0a0a0a;color:#fff}.button.is-white.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-white.is-inverted.is-outlined{background-color:transparent;border-color:#0a0a0a;box-shadow:none;color:#0a0a0a}.button.is-black{background-color:#0a0a0a;border-color:transparent;color:#fff}.button.is-black.is-hovered,.button.is-black:hover{background-color:#040404;border-color:transparent;color:#fff}.button.is-black.is-focused,.button.is-black:focus{border-color:transparent;color:#fff}.button.is-black.is-focused:not(:active),.button.is-black:focus:not(:active){box-shadow:0 0 0 .125em rgba(10,10,10,.25)}.button.is-black.is-active,.button.is-black:active{background-color:#000;border-color:transparent;color:#fff}.button.is-black[disabled],fieldset[disabled] .button.is-black{background-color:#0a0a0a;border-color:transparent;box-shadow:none}.button.is-black.is-inverted{background-color:#fff;color:#0a0a0a}.button.is-black.is-inverted:hover{background-color:#f2f2f2}.button.is-black.is-inverted[disabled],fieldset[disabled] .button.is-black.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#0a0a0a}.button.is-black.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-black.is-outlined{background-color:transparent;border-color:#0a0a0a;color:#0a0a0a}.button.is-black.is-outlined:focus,.button.is-black.is-outlined:hover{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.button.is-black.is-outlined.is-loading::after{border-color:transparent transparent #0a0a0a #0a0a0a!important}.button.is-black.is-outlined[disabled],fieldset[disabled] .button.is-black.is-outlined{background-color:transparent;border-color:#0a0a0a;box-shadow:none;color:#0a0a0a}.button.is-black.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-black.is-inverted.is-outlined:focus,.button.is-black.is-inverted.is-outlined:hover{background-color:#fff;color:#0a0a0a}.button.is-black.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-black.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-light{background-color:#f5f5f5;border-color:transparent;color:#363636}.button.is-light.is-hovered,.button.is-light:hover{background-color:#eee;border-color:transparent;color:#363636}.button.is-light.is-focused,.button.is-light:focus{border-color:transparent;color:#363636}.button.is-light.is-focused:not(:active),.button.is-light:focus:not(:active){box-shadow:0 0 0 .125em rgba(245,245,245,.25)}.button.is-light.is-active,.button.is-light:active{background-color:#e8e8e8;border-color:transparent;color:#363636}.button.is-light[disabled],fieldset[disabled] .button.is-light{background-color:#f5f5f5;border-color:transparent;box-shadow:none}.button.is-light.is-inverted{background-color:#363636;color:#f5f5f5}.button.is-light.is-inverted:hover{background-color:#292929}.button.is-light.is-inverted[disabled],fieldset[disabled] .button.is-light.is-inverted{background-color:#363636;border-color:transparent;box-shadow:none;color:#f5f5f5}.button.is-light.is-loading::after{border-color:transparent transparent #363636 #363636!important}.button.is-light.is-outlined{background-color:transparent;border-color:#f5f5f5;color:#f5f5f5}.button.is-light.is-outlined:focus,.button.is-light.is-outlined:hover{background-color:#f5f5f5;border-color:#f5f5f5;color:#363636}.button.is-light.is-outlined.is-loading::after{border-color:transparent transparent #f5f5f5 #f5f5f5!important}.button.is-light.is-outlined[disabled],fieldset[disabled] .button.is-light.is-outlined{background-color:transparent;border-color:#f5f5f5;box-shadow:none;color:#f5f5f5}.button.is-light.is-inverted.is-outlined{background-color:transparent;border-color:#363636;color:#363636}.button.is-light.is-inverted.is-outlined:focus,.button.is-light.is-inverted.is-outlined:hover{background-color:#363636;color:#f5f5f5}.button.is-light.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-light.is-inverted.is-outlined{background-color:transparent;border-color:#363636;box-shadow:none;color:#363636}.button.is-dark{background-color:#363636;border-color:transparent;color:#f5f5f5}.button.is-dark.is-hovered,.button.is-dark:hover{background-color:#2f2f2f;border-color:transparent;color:#f5f5f5}.button.is-dark.is-focused,.button.is-dark:focus{border-color:transparent;color:#f5f5f5}.button.is-dark.is-focused:not(:active),.button.is-dark:focus:not(:active){box-shadow:0 0 0 .125em rgba(54,54,54,.25)}.button.is-dark.is-active,.button.is-dark:active{background-color:#292929;border-color:transparent;color:#f5f5f5}.button.is-dark[disabled],fieldset[disabled] .button.is-dark{background-color:#363636;border-color:transparent;box-shadow:none}.button.is-dark.is-inverted{background-color:#f5f5f5;color:#363636}.button.is-dark.is-inverted:hover{background-color:#e8e8e8}.button.is-dark.is-inverted[disabled],fieldset[disabled] .button.is-dark.is-inverted{background-color:#f5f5f5;border-color:transparent;box-shadow:none;color:#363636}.button.is-dark.is-loading::after{border-color:transparent transparent #f5f5f5 #f5f5f5!important}.button.is-dark.is-outlined{background-color:transparent;border-color:#363636;color:#363636}.button.is-dark.is-outlined:focus,.button.is-dark.is-outlined:hover{background-color:#363636;border-color:#363636;color:#f5f5f5}.button.is-dark.is-outlined.is-loading::after{border-color:transparent transparent #363636 #363636!important}.button.is-dark.is-outlined[disabled],fieldset[disabled] .button.is-dark.is-outlined{background-color:transparent;border-color:#363636;box-shadow:none;color:#363636}.button.is-dark.is-inverted.is-outlined{background-color:transparent;border-color:#f5f5f5;color:#f5f5f5}.button.is-dark.is-inverted.is-outlined:focus,.button.is-dark.is-inverted.is-outlined:hover{background-color:#f5f5f5;color:#363636}.button.is-dark.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-dark.is-inverted.is-outlined{background-color:transparent;border-color:#f5f5f5;box-shadow:none;color:#f5f5f5}.button.is-primary{background-color:#00d1b2;border-color:transparent;color:#fff}.button.is-primary.is-hovered,.button.is-primary:hover{background-color:#00c4a7;border-color:transparent;color:#fff}.button.is-primary.is-focused,.button.is-primary:focus{border-color:transparent;color:#fff}.button.is-primary.is-focused:not(:active),.button.is-primary:focus:not(:active){box-shadow:0 0 0 .125em rgba(0,209,178,.25)}.button.is-primary.is-active,.button.is-primary:active{background-color:#00b89c;border-color:transparent;color:#fff}.button.is-primary[disabled],fieldset[disabled] .button.is-primary{background-color:#00d1b2;border-color:transparent;box-shadow:none}.button.is-primary.is-inverted{background-color:#fff;color:#00d1b2}.button.is-primary.is-inverted:hover{background-color:#f2f2f2}.button.is-primary.is-inverted[disabled],fieldset[disabled] .button.is-primary.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#00d1b2}.button.is-primary.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-primary.is-outlined{background-color:transparent;border-color:#00d1b2;color:#00d1b2}.button.is-primary.is-outlined:focus,.button.is-primary.is-outlined:hover{background-color:#00d1b2;border-color:#00d1b2;color:#fff}.button.is-primary.is-outlined.is-loading::after{border-color:transparent transparent #00d1b2 #00d1b2!important}.button.is-primary.is-outlined[disabled],fieldset[disabled] .button.is-primary.is-outlined{background-color:transparent;border-color:#00d1b2;box-shadow:none;color:#00d1b2}.button.is-primary.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-primary.is-inverted.is-outlined:focus,.button.is-primary.is-inverted.is-outlined:hover{background-color:#fff;color:#00d1b2}.button.is-primary.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-primary.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-link{background-color:#3273dc;border-color:transparent;color:#fff}.button.is-link.is-hovered,.button.is-link:hover{background-color:#276cda;border-color:transparent;color:#fff}.button.is-link.is-focused,.button.is-link:focus{border-color:transparent;color:#fff}.button.is-link.is-focused:not(:active),.button.is-link:focus:not(:active){box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.button.is-link.is-active,.button.is-link:active{background-color:#2366d1;border-color:transparent;color:#fff}.button.is-link[disabled],fieldset[disabled] .button.is-link{background-color:#3273dc;border-color:transparent;box-shadow:none}.button.is-link.is-inverted{background-color:#fff;color:#3273dc}.button.is-link.is-inverted:hover{background-color:#f2f2f2}.button.is-link.is-inverted[disabled],fieldset[disabled] .button.is-link.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#3273dc}.button.is-link.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-link.is-outlined{background-color:transparent;border-color:#3273dc;color:#3273dc}.button.is-link.is-outlined:focus,.button.is-link.is-outlined:hover{background-color:#3273dc;border-color:#3273dc;color:#fff}.button.is-link.is-outlined.is-loading::after{border-color:transparent transparent #3273dc #3273dc!important}.button.is-link.is-outlined[disabled],fieldset[disabled] .button.is-link.is-outlined{background-color:transparent;border-color:#3273dc;box-shadow:none;color:#3273dc}.button.is-link.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-link.is-inverted.is-outlined:focus,.button.is-link.is-inverted.is-outlined:hover{background-color:#fff;color:#3273dc}.button.is-link.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-link.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-info{background-color:#209cee;border-color:transparent;color:#fff}.button.is-info.is-hovered,.button.is-info:hover{background-color:#1496ed;border-color:transparent;color:#fff}.button.is-info.is-focused,.button.is-info:focus{border-color:transparent;color:#fff}.button.is-info.is-focused:not(:active),.button.is-info:focus:not(:active){box-shadow:0 0 0 .125em rgba(32,156,238,.25)}.button.is-info.is-active,.button.is-info:active{background-color:#118fe4;border-color:transparent;color:#fff}.button.is-info[disabled],fieldset[disabled] .button.is-info{background-color:#209cee;border-color:transparent;box-shadow:none}.button.is-info.is-inverted{background-color:#fff;color:#209cee}.button.is-info.is-inverted:hover{background-color:#f2f2f2}.button.is-info.is-inverted[disabled],fieldset[disabled] .button.is-info.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#209cee}.button.is-info.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-info.is-outlined{background-color:transparent;border-color:#209cee;color:#209cee}.button.is-info.is-outlined:focus,.button.is-info.is-outlined:hover{background-color:#209cee;border-color:#209cee;color:#fff}.button.is-info.is-outlined.is-loading::after{border-color:transparent transparent #209cee #209cee!important}.button.is-info.is-outlined[disabled],fieldset[disabled] .button.is-info.is-outlined{background-color:transparent;border-color:#209cee;box-shadow:none;color:#209cee}.button.is-info.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-info.is-inverted.is-outlined:focus,.button.is-info.is-inverted.is-outlined:hover{background-color:#fff;color:#209cee}.button.is-info.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-info.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-success{background-color:#23d160;border-color:transparent;color:#fff}.button.is-success.is-hovered,.button.is-success:hover{background-color:#22c65b;border-color:transparent;color:#fff}.button.is-success.is-focused,.button.is-success:focus{border-color:transparent;color:#fff}.button.is-success.is-focused:not(:active),.button.is-success:focus:not(:active){box-shadow:0 0 0 .125em rgba(35,209,96,.25)}.button.is-success.is-active,.button.is-success:active{background-color:#20bc56;border-color:transparent;color:#fff}.button.is-success[disabled],fieldset[disabled] .button.is-success{background-color:#23d160;border-color:transparent;box-shadow:none}.button.is-success.is-inverted{background-color:#fff;color:#23d160}.button.is-success.is-inverted:hover{background-color:#f2f2f2}.button.is-success.is-inverted[disabled],fieldset[disabled] .button.is-success.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#23d160}.button.is-success.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-success.is-outlined{background-color:transparent;border-color:#23d160;color:#23d160}.button.is-success.is-outlined:focus,.button.is-success.is-outlined:hover{background-color:#23d160;border-color:#23d160;color:#fff}.button.is-success.is-outlined.is-loading::after{border-color:transparent transparent #23d160 #23d160!important}.button.is-success.is-outlined[disabled],fieldset[disabled] .button.is-success.is-outlined{background-color:transparent;border-color:#23d160;box-shadow:none;color:#23d160}.button.is-success.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-success.is-inverted.is-outlined:focus,.button.is-success.is-inverted.is-outlined:hover{background-color:#fff;color:#23d160}.button.is-success.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-success.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-warning{background-color:#ffdd57;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning.is-hovered,.button.is-warning:hover{background-color:#ffdb4a;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning.is-focused,.button.is-warning:focus{border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning.is-focused:not(:active),.button.is-warning:focus:not(:active){box-shadow:0 0 0 .125em rgba(255,221,87,.25)}.button.is-warning.is-active,.button.is-warning:active{background-color:#ffd83d;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning[disabled],fieldset[disabled] .button.is-warning{background-color:#ffdd57;border-color:transparent;box-shadow:none}.button.is-warning.is-inverted{background-color:rgba(0,0,0,.7);color:#ffdd57}.button.is-warning.is-inverted:hover{background-color:rgba(0,0,0,.7)}.button.is-warning.is-inverted[disabled],fieldset[disabled] .button.is-warning.is-inverted{background-color:rgba(0,0,0,.7);border-color:transparent;box-shadow:none;color:#ffdd57}.button.is-warning.is-loading::after{border-color:transparent transparent rgba(0,0,0,.7) rgba(0,0,0,.7)!important}.button.is-warning.is-outlined{background-color:transparent;border-color:#ffdd57;color:#ffdd57}.button.is-warning.is-outlined:focus,.button.is-warning.is-outlined:hover{background-color:#ffdd57;border-color:#ffdd57;color:rgba(0,0,0,.7)}.button.is-warning.is-outlined.is-loading::after{border-color:transparent transparent #ffdd57 #ffdd57!important}.button.is-warning.is-outlined[disabled],fieldset[disabled] .button.is-warning.is-outlined{background-color:transparent;border-color:#ffdd57;box-shadow:none;color:#ffdd57}.button.is-warning.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,.7);color:rgba(0,0,0,.7)}.button.is-warning.is-inverted.is-outlined:focus,.button.is-warning.is-inverted.is-outlined:hover{background-color:rgba(0,0,0,.7);color:#ffdd57}.button.is-warning.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-warning.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,.7);box-shadow:none;color:rgba(0,0,0,.7)}.button.is-danger{background-color:#ff3860;border-color:transparent;color:#fff}.button.is-danger.is-hovered,.button.is-danger:hover{background-color:#ff2b56;border-color:transparent;color:#fff}.button.is-danger.is-focused,.button.is-danger:focus{border-color:transparent;color:#fff}.button.is-danger.is-focused:not(:active),.button.is-danger:focus:not(:active){box-shadow:0 0 0 .125em rgba(255,56,96,.25)}.button.is-danger.is-active,.button.is-danger:active{background-color:#ff1f4b;border-color:transparent;color:#fff}.button.is-danger[disabled],fieldset[disabled] .button.is-danger{background-color:#ff3860;border-color:transparent;box-shadow:none}.button.is-danger.is-inverted{background-color:#fff;color:#ff3860}.button.is-danger.is-inverted:hover{background-color:#f2f2f2}.button.is-danger.is-inverted[disabled],fieldset[disabled] .button.is-danger.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#ff3860}.button.is-danger.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-danger.is-outlined{background-color:transparent;border-color:#ff3860;color:#ff3860}.button.is-danger.is-outlined:focus,.button.is-danger.is-outlined:hover{background-color:#ff3860;border-color:#ff3860;color:#fff}.button.is-danger.is-outlined.is-loading::after{border-color:transparent transparent #ff3860 #ff3860!important}.button.is-danger.is-outlined[disabled],fieldset[disabled] .button.is-danger.is-outlined{background-color:transparent;border-color:#ff3860;box-shadow:none;color:#ff3860}.button.is-danger.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-danger.is-inverted.is-outlined:focus,.button.is-danger.is-inverted.is-outlined:hover{background-color:#fff;color:#ff3860}.button.is-danger.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-danger.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-small{border-radius:2px;font-size:.75rem}.button.is-normal{font-size:1rem}.button.is-medium{font-size:1.25rem}.button.is-large{font-size:1.5rem}.button[disabled],fieldset[disabled] .button{background-color:#fff;border-color:#dbdbdb;box-shadow:none;opacity:.5}.button.is-fullwidth{display:flex;width:100%}.button.is-loading{color:transparent!important;pointer-events:none}.button.is-loading::after{position:absolute;left:calc(50% - (1em / 2));top:calc(50% - (1em / 2));position:absolute!important}.button.is-static{background-color:#f5f5f5;border-color:#dbdbdb;color:#7a7a7a;box-shadow:none;pointer-events:none}.button.is-rounded{border-radius:290486px;padding-left:1em;padding-right:1em}.buttons{align-items:center;display:flex;flex-wrap:wrap;justify-content:flex-start}.buttons .button{margin-bottom:.5rem}.buttons .button:not(:last-child):not(.is-fullwidth){margin-right:.5rem}.buttons:last-child{margin-bottom:-.5rem}.buttons:not(:last-child){margin-bottom:1rem}.buttons.are-small .button:not(.is-normal):not(.is-medium):not(.is-large){border-radius:2px;font-size:.75rem}.buttons.are-medium .button:not(.is-small):not(.is-normal):not(.is-large){font-size:1.25rem}.buttons.are-large .button:not(.is-small):not(.is-normal):not(.is-medium){font-size:1.5rem}.buttons.has-addons .button:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.buttons.has-addons .button:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0;margin-right:-1px}.buttons.has-addons .button:last-child{margin-right:0}.buttons.has-addons .button.is-hovered,.buttons.has-addons .button:hover{z-index:2}.buttons.has-addons .button.is-active,.buttons.has-addons .button.is-focused,.buttons.has-addons .button.is-selected,.buttons.has-addons .button:active,.buttons.has-addons .button:focus{z-index:3}.buttons.has-addons .button.is-active:hover,.buttons.has-addons .button.is-focused:hover,.buttons.has-addons .button.is-selected:hover,.buttons.has-addons .button:active:hover,.buttons.has-addons .button:focus:hover{z-index:4}.buttons.has-addons .button.is-expanded{flex-grow:1}.buttons.is-centered{justify-content:center}.buttons.is-right{justify-content:flex-end}.container{margin:0 auto;position:relative}@media screen and (min-width:1088px){.container{max-width:960px;width:960px}.container.is-fluid{margin-left:64px;margin-right:64px;max-width:none;width:auto}}@media screen and (max-width:1279px){.container.is-widescreen{max-width:1152px;width:auto}}@media screen and (max-width:1471px){.container.is-fullhd{max-width:1344px;width:auto}}@media screen and (min-width:1280px){.container{max-width:1152px;width:1152px}}@media screen and (min-width:1472px){.container{max-width:1344px;width:1344px}}.content li+li{margin-top:.25em}.content blockquote:not(:last-child),.content dl:not(:last-child),.content ol:not(:last-child),.content p:not(:last-child),.content pre:not(:last-child),.content table:not(:last-child),.content ul:not(:last-child){margin-bottom:1em}.content h1,.content h2,.content h3,.content h4,.content h5,.content h6{color:#363636;font-weight:600;line-height:1.125}.content h1{font-size:2em;margin-bottom:.5em}.content h1:not(:first-child){margin-top:1em}.content h2{font-size:1.75em;margin-bottom:.5714em}.content h2:not(:first-child){margin-top:1.1428em}.content h3{font-size:1.5em;margin-bottom:.6666em}.content h3:not(:first-child){margin-top:1.3333em}.content h4{font-size:1.25em;margin-bottom:.8em}.content h5{font-size:1.125em;margin-bottom:.8888em}.content h6{font-size:1em;margin-bottom:1em}.content blockquote{background-color:#f5f5f5;border-left:5px solid #dbdbdb;padding:1.25em 1.5em}.content ol{list-style-position:outside;margin-left:2em;margin-top:1em}.content ol:not([type]){list-style-type:decimal}.content ol:not([type]).is-lower-alpha{list-style-type:lower-alpha}.content ol:not([type]).is-lower-roman{list-style-type:lower-roman}.content ol:not([type]).is-upper-alpha{list-style-type:upper-alpha}.content ol:not([type]).is-upper-roman{list-style-type:upper-roman}.content ul{list-style:disc outside;margin-left:2em;margin-top:1em}.content ul ul{list-style-type:circle;margin-top:.5em}.content ul ul ul{list-style-type:square}.content dd{margin-left:2em}.content figure{margin-left:2em;margin-right:2em;text-align:center}.content figure:not(:first-child){margin-top:2em}.content figure:not(:last-child){margin-bottom:2em}.content figure img{display:inline-block}.content figure figcaption{font-style:italic}.content pre{-webkit-overflow-scrolling:touch;overflow-x:auto;padding:1.25em 1.5em;white-space:pre;word-wrap:normal}.content sub,.content sup{font-size:75%}.content table{width:100%}.content table td,.content table th{border:1px solid #dbdbdb;border-width:0 0 1px;padding:.5em .75em;vertical-align:top}.content table th{color:#363636;text-align:left}.content table thead td,.content table thead th{border-width:0 0 2px;color:#363636}.content table tfoot td,.content table tfoot th{border-width:2px 0 0;color:#363636}.content table tbody tr:last-child td,.content table tbody tr:last-child th{border-bottom-width:0}.content.is-small{font-size:.75rem}.content.is-medium{font-size:1.25rem}.content.is-large{font-size:1.5rem}.input,.textarea{background-color:#fff;border-color:#dbdbdb;color:#363636;box-shadow:inset 0 1px 2px rgba(10,10,10,.1);max-width:100%;width:100%}.input::-moz-placeholder,.textarea::-moz-placeholder{color:rgba(54,54,54,.3)}.input::-webkit-input-placeholder,.textarea::-webkit-input-placeholder{color:rgba(54,54,54,.3)}.input:-moz-placeholder,.textarea:-moz-placeholder{color:rgba(54,54,54,.3)}.input:-ms-input-placeholder,.textarea:-ms-input-placeholder{color:rgba(54,54,54,.3)}.input.is-hovered,.input:hover,.textarea.is-hovered,.textarea:hover{border-color:#b5b5b5}.input.is-active,.input.is-focused,.input:active,.input:focus,.textarea.is-active,.textarea.is-focused,.textarea:active,.textarea:focus{border-color:#3273dc;box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.input[disabled],.textarea[disabled],fieldset[disabled] .input,fieldset[disabled] .textarea{background-color:#f5f5f5;border-color:#f5f5f5;box-shadow:none;color:#7a7a7a}.input[disabled]::-moz-placeholder,.textarea[disabled]::-moz-placeholder,fieldset[disabled] .input::-moz-placeholder,fieldset[disabled] .textarea::-moz-placeholder{color:rgba(122,122,122,.3)}.input[disabled]::-webkit-input-placeholder,.textarea[disabled]::-webkit-input-placeholder,fieldset[disabled] .input::-webkit-input-placeholder,fieldset[disabled] .textarea::-webkit-input-placeholder{color:rgba(122,122,122,.3)}.input[disabled]:-moz-placeholder,.textarea[disabled]:-moz-placeholder,fieldset[disabled] .input:-moz-placeholder,fieldset[disabled] .textarea:-moz-placeholder{color:rgba(122,122,122,.3)}.input[disabled]:-ms-input-placeholder,.textarea[disabled]:-ms-input-placeholder,fieldset[disabled] .input:-ms-input-placeholder,fieldset[disabled] .textarea:-ms-input-placeholder{color:rgba(122,122,122,.3)}.input[readonly],.textarea[readonly]{box-shadow:none}.input.is-white,.textarea.is-white{border-color:#fff}.input.is-white.is-active,.input.is-white.is-focused,.input.is-white:active,.input.is-white:focus,.textarea.is-white.is-active,.textarea.is-white.is-focused,.textarea.is-white:active,.textarea.is-white:focus{box-shadow:0 0 0 .125em rgba(255,255,255,.25)}.input.is-black,.textarea.is-black{border-color:#0a0a0a}.input.is-black.is-active,.input.is-black.is-focused,.input.is-black:active,.input.is-black:focus,.textarea.is-black.is-active,.textarea.is-black.is-focused,.textarea.is-black:active,.textarea.is-black:focus{box-shadow:0 0 0 .125em rgba(10,10,10,.25)}.input.is-light,.textarea.is-light{border-color:#f5f5f5}.input.is-light.is-active,.input.is-light.is-focused,.input.is-light:active,.input.is-light:focus,.textarea.is-light.is-active,.textarea.is-light.is-focused,.textarea.is-light:active,.textarea.is-light:focus{box-shadow:0 0 0 .125em rgba(245,245,245,.25)}.input.is-dark,.textarea.is-dark{border-color:#363636}.input.is-dark.is-active,.input.is-dark.is-focused,.input.is-dark:active,.input.is-dark:focus,.textarea.is-dark.is-active,.textarea.is-dark.is-focused,.textarea.is-dark:active,.textarea.is-dark:focus{box-shadow:0 0 0 .125em rgba(54,54,54,.25)}.input.is-primary,.textarea.is-primary{border-color:#00d1b2}.input.is-primary.is-active,.input.is-primary.is-focused,.input.is-primary:active,.input.is-primary:focus,.textarea.is-primary.is-active,.textarea.is-primary.is-focused,.textarea.is-primary:active,.textarea.is-primary:focus{box-shadow:0 0 0 .125em rgba(0,209,178,.25)}.input.is-link,.textarea.is-link{border-color:#3273dc}.input.is-link.is-active,.input.is-link.is-focused,.input.is-link:active,.input.is-link:focus,.textarea.is-link.is-active,.textarea.is-link.is-focused,.textarea.is-link:active,.textarea.is-link:focus{box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.input.is-info,.textarea.is-info{border-color:#209cee}.input.is-info.is-active,.input.is-info.is-focused,.input.is-info:active,.input.is-info:focus,.textarea.is-info.is-active,.textarea.is-info.is-focused,.textarea.is-info:active,.textarea.is-info:focus{box-shadow:0 0 0 .125em rgba(32,156,238,.25)}.input.is-success,.textarea.is-success{border-color:#23d160}.input.is-success.is-active,.input.is-success.is-focused,.input.is-success:active,.input.is-success:focus,.textarea.is-success.is-active,.textarea.is-success.is-focused,.textarea.is-success:active,.textarea.is-success:focus{box-shadow:0 0 0 .125em rgba(35,209,96,.25)}.input.is-warning,.textarea.is-warning{border-color:#ffdd57}.input.is-warning.is-active,.input.is-warning.is-focused,.input.is-warning:active,.input.is-warning:focus,.textarea.is-warning.is-active,.textarea.is-warning.is-focused,.textarea.is-warning:active,.textarea.is-warning:focus{box-shadow:0 0 0 .125em rgba(255,221,87,.25)}.input.is-danger,.textarea.is-danger{border-color:#ff3860}.input.is-danger.is-active,.input.is-danger.is-focused,.input.is-danger:active,.input.is-danger:focus,.textarea.is-danger.is-active,.textarea.is-danger.is-focused,.textarea.is-danger:active,.textarea.is-danger:focus{box-shadow:0 0 0 .125em rgba(255,56,96,.25)}.input.is-small,.textarea.is-small{border-radius:2px;font-size:.75rem}.input.is-medium,.textarea.is-medium{font-size:1.25rem}.input.is-large,.textarea.is-large{font-size:1.5rem}.input.is-fullwidth,.textarea.is-fullwidth{display:block;width:100%}.input.is-inline,.textarea.is-inline{display:inline;width:auto}.input.is-rounded{border-radius:290486px;padding-left:1em;padding-right:1em}.input.is-static{background-color:transparent;border-color:transparent;box-shadow:none;padding-left:0;padding-right:0}.textarea{display:block;max-width:100%;min-width:100%;padding:.625em;resize:vertical}.textarea:not([rows]){max-height:600px;min-height:120px}.textarea[rows]{height:initial}.textarea.has-fixed-size{resize:none}.checkbox,.radio{cursor:pointer;display:inline-block;line-height:1.25;position:relative}.checkbox input,.radio input{cursor:pointer}.checkbox:hover,.radio:hover{color:#363636}.checkbox[disabled],.radio[disabled],fieldset[disabled] .checkbox,fieldset[disabled] .radio{color:#7a7a7a;cursor:not-allowed}.radio+.radio{margin-left:.5em}.select{display:inline-block;max-width:100%;position:relative;vertical-align:top}.select:not(.is-multiple){height:2.25em}.select:not(.is-multiple):not(.is-loading)::after{border-color:#3273dc;right:1.125em;z-index:4}.select.is-rounded select{border-radius:290486px;padding-left:1em}.select select{background-color:#fff;border-color:#dbdbdb;color:#363636;cursor:pointer;display:block;font-size:1em;max-width:100%;outline:0}.select select::-moz-placeholder{color:rgba(54,54,54,.3)}.select select::-webkit-input-placeholder{color:rgba(54,54,54,.3)}.select select:-moz-placeholder{color:rgba(54,54,54,.3)}.select select:-ms-input-placeholder{color:rgba(54,54,54,.3)}.select select.is-hovered,.select select:hover{border-color:#b5b5b5}.select select.is-active,.select select.is-focused,.select select:active,.select select:focus{border-color:#3273dc;box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.select select[disabled],fieldset[disabled] .select select{background-color:#f5f5f5;border-color:#f5f5f5;box-shadow:none;color:#7a7a7a}.select select[disabled]::-moz-placeholder,fieldset[disabled] .select select::-moz-placeholder{color:rgba(122,122,122,.3)}.select select[disabled]::-webkit-input-placeholder,fieldset[disabled] .select select::-webkit-input-placeholder{color:rgba(122,122,122,.3)}.select select[disabled]:-moz-placeholder,fieldset[disabled] .select select:-moz-placeholder{color:rgba(122,122,122,.3)}.select select[disabled]:-ms-input-placeholder,fieldset[disabled] .select select:-ms-input-placeholder{color:rgba(122,122,122,.3)}.select select::-ms-expand{display:none}.select select[disabled]:hover,fieldset[disabled] .select select:hover{border-color:#f5f5f5}.select select:not([multiple]){padding-right:2.5em}.select select[multiple]{height:auto;padding:0}.select select[multiple] option{padding:.5em 1em}.select:not(.is-multiple):not(.is-loading):hover::after{border-color:#363636}.select.is-white:not(:hover)::after{border-color:#fff}.select.is-white select{border-color:#fff}.select.is-white select.is-hovered,.select.is-white select:hover{border-color:#f2f2f2}.select.is-white select.is-active,.select.is-white select.is-focused,.select.is-white select:active,.select.is-white select:focus{box-shadow:0 0 0 .125em rgba(255,255,255,.25)}.select.is-black:not(:hover)::after{border-color:#0a0a0a}.select.is-black select{border-color:#0a0a0a}.select.is-black select.is-hovered,.select.is-black select:hover{border-color:#000}.select.is-black select.is-active,.select.is-black select.is-focused,.select.is-black select:active,.select.is-black select:focus{box-shadow:0 0 0 .125em rgba(10,10,10,.25)}.select.is-light:not(:hover)::after{border-color:#f5f5f5}.select.is-light select{border-color:#f5f5f5}.select.is-light select.is-hovered,.select.is-light select:hover{border-color:#e8e8e8}.select.is-light select.is-active,.select.is-light select.is-focused,.select.is-light select:active,.select.is-light select:focus{box-shadow:0 0 0 .125em rgba(245,245,245,.25)}.select.is-dark:not(:hover)::after{border-color:#363636}.select.is-dark select{border-color:#363636}.select.is-dark select.is-hovered,.select.is-dark select:hover{border-color:#292929}.select.is-dark select.is-active,.select.is-dark select.is-focused,.select.is-dark select:active,.select.is-dark select:focus{box-shadow:0 0 0 .125em rgba(54,54,54,.25)}.select.is-primary:not(:hover)::after{border-color:#00d1b2}.select.is-primary select{border-color:#00d1b2}.select.is-primary select.is-hovered,.select.is-primary select:hover{border-color:#00b89c}.select.is-primary select.is-active,.select.is-primary select.is-focused,.select.is-primary select:active,.select.is-primary select:focus{box-shadow:0 0 0 .125em rgba(0,209,178,.25)}.select.is-link:not(:hover)::after{border-color:#3273dc}.select.is-link select{border-color:#3273dc}.select.is-link select.is-hovered,.select.is-link select:hover{border-color:#2366d1}.select.is-link select.is-active,.select.is-link select.is-focused,.select.is-link select:active,.select.is-link select:focus{box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.select.is-info:not(:hover)::after{border-color:#209cee}.select.is-info select{border-color:#209cee}.select.is-info select.is-hovered,.select.is-info select:hover{border-color:#118fe4}.select.is-info select.is-active,.select.is-info select.is-focused,.select.is-info select:active,.select.is-info select:focus{box-shadow:0 0 0 .125em rgba(32,156,238,.25)}.select.is-success:not(:hover)::after{border-color:#23d160}.select.is-success select{border-color:#23d160}.select.is-success select.is-hovered,.select.is-success select:hover{border-color:#20bc56}.select.is-success select.is-active,.select.is-success select.is-focused,.select.is-success select:active,.select.is-success select:focus{box-shadow:0 0 0 .125em rgba(35,209,96,.25)}.select.is-warning:not(:hover)::after{border-color:#ffdd57}.select.is-warning select{border-color:#ffdd57}.select.is-warning select.is-hovered,.select.is-warning select:hover{border-color:#ffd83d}.select.is-warning select.is-active,.select.is-warning select.is-focused,.select.is-warning select:active,.select.is-warning select:focus{box-shadow:0 0 0 .125em rgba(255,221,87,.25)}.select.is-danger:not(:hover)::after{border-color:#ff3860}.select.is-danger select{border-color:#ff3860}.select.is-danger select.is-hovered,.select.is-danger select:hover{border-color:#ff1f4b}.select.is-danger select.is-active,.select.is-danger select.is-focused,.select.is-danger select:active,.select.is-danger select:focus{box-shadow:0 0 0 .125em rgba(255,56,96,.25)}.select.is-small{border-radius:2px;font-size:.75rem}.select.is-medium{font-size:1.25rem}.select.is-large{font-size:1.5rem}.select.is-disabled::after{border-color:#7a7a7a}.select.is-fullwidth{width:100%}.select.is-fullwidth select{width:100%}.select.is-loading::after{margin-top:0;position:absolute;right:.625em;top:.625em;-webkit-transform:none;transform:none}.select.is-loading.is-small:after{font-size:.75rem}.select.is-loading.is-medium:after{font-size:1.25rem}.select.is-loading.is-large:after{font-size:1.5rem}.file{align-items:stretch;display:flex;justify-content:flex-start;position:relative}.file.is-white .file-cta{background-color:#fff;border-color:transparent;color:#0a0a0a}.file.is-white.is-hovered .file-cta,.file.is-white:hover .file-cta{background-color:#f9f9f9;border-color:transparent;color:#0a0a0a}.file.is-white.is-focused .file-cta,.file.is-white:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(255,255,255,.25);color:#0a0a0a}.file.is-white.is-active .file-cta,.file.is-white:active .file-cta{background-color:#f2f2f2;border-color:transparent;color:#0a0a0a}.file.is-black .file-cta{background-color:#0a0a0a;border-color:transparent;color:#fff}.file.is-black.is-hovered .file-cta,.file.is-black:hover .file-cta{background-color:#040404;border-color:transparent;color:#fff}.file.is-black.is-focused .file-cta,.file.is-black:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(10,10,10,.25);color:#fff}.file.is-black.is-active .file-cta,.file.is-black:active .file-cta{background-color:#000;border-color:transparent;color:#fff}.file.is-light .file-cta{background-color:#f5f5f5;border-color:transparent;color:#363636}.file.is-light.is-hovered .file-cta,.file.is-light:hover .file-cta{background-color:#eee;border-color:transparent;color:#363636}.file.is-light.is-focused .file-cta,.file.is-light:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(245,245,245,.25);color:#363636}.file.is-light.is-active .file-cta,.file.is-light:active .file-cta{background-color:#e8e8e8;border-color:transparent;color:#363636}.file.is-dark .file-cta{background-color:#363636;border-color:transparent;color:#f5f5f5}.file.is-dark.is-hovered .file-cta,.file.is-dark:hover .file-cta{background-color:#2f2f2f;border-color:transparent;color:#f5f5f5}.file.is-dark.is-focused .file-cta,.file.is-dark:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(54,54,54,.25);color:#f5f5f5}.file.is-dark.is-active .file-cta,.file.is-dark:active .file-cta{background-color:#292929;border-color:transparent;color:#f5f5f5}.file.is-primary .file-cta{background-color:#00d1b2;border-color:transparent;color:#fff}.file.is-primary.is-hovered .file-cta,.file.is-primary:hover .file-cta{background-color:#00c4a7;border-color:transparent;color:#fff}.file.is-primary.is-focused .file-cta,.file.is-primary:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(0,209,178,.25);color:#fff}.file.is-primary.is-active .file-cta,.file.is-primary:active .file-cta{background-color:#00b89c;border-color:transparent;color:#fff}.file.is-link .file-cta{background-color:#3273dc;border-color:transparent;color:#fff}.file.is-link.is-hovered .file-cta,.file.is-link:hover .file-cta{background-color:#276cda;border-color:transparent;color:#fff}.file.is-link.is-focused .file-cta,.file.is-link:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(50,115,220,.25);color:#fff}.file.is-link.is-active .file-cta,.file.is-link:active .file-cta{background-color:#2366d1;border-color:transparent;color:#fff}.file.is-info .file-cta{background-color:#209cee;border-color:transparent;color:#fff}.file.is-info.is-hovered .file-cta,.file.is-info:hover .file-cta{background-color:#1496ed;border-color:transparent;color:#fff}.file.is-info.is-focused .file-cta,.file.is-info:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(32,156,238,.25);color:#fff}.file.is-info.is-active .file-cta,.file.is-info:active .file-cta{background-color:#118fe4;border-color:transparent;color:#fff}.file.is-success .file-cta{background-color:#23d160;border-color:transparent;color:#fff}.file.is-success.is-hovered .file-cta,.file.is-success:hover .file-cta{background-color:#22c65b;border-color:transparent;color:#fff}.file.is-success.is-focused .file-cta,.file.is-success:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(35,209,96,.25);color:#fff}.file.is-success.is-active .file-cta,.file.is-success:active .file-cta{background-color:#20bc56;border-color:transparent;color:#fff}.file.is-warning .file-cta{background-color:#ffdd57;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-warning.is-hovered .file-cta,.file.is-warning:hover .file-cta{background-color:#ffdb4a;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-warning.is-focused .file-cta,.file.is-warning:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(255,221,87,.25);color:rgba(0,0,0,.7)}.file.is-warning.is-active .file-cta,.file.is-warning:active .file-cta{background-color:#ffd83d;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-danger .file-cta{background-color:#ff3860;border-color:transparent;color:#fff}.file.is-danger.is-hovered .file-cta,.file.is-danger:hover .file-cta{background-color:#ff2b56;border-color:transparent;color:#fff}.file.is-danger.is-focused .file-cta,.file.is-danger:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(255,56,96,.25);color:#fff}.file.is-danger.is-active .file-cta,.file.is-danger:active .file-cta{background-color:#ff1f4b;border-color:transparent;color:#fff}.file.is-small{font-size:.75rem}.file.is-medium{font-size:1.25rem}.file.is-medium .file-icon .fa{font-size:21px}.file.is-large{font-size:1.5rem}.file.is-large .file-icon .fa{font-size:28px}.file.has-name .file-cta{border-bottom-right-radius:0;border-top-right-radius:0}.file.has-name .file-name{border-bottom-left-radius:0;border-top-left-radius:0}.file.has-name.is-empty .file-cta{border-radius:4px}.file.has-name.is-empty .file-name{display:none}.file.is-boxed .file-label{flex-direction:column}.file.is-boxed .file-cta{flex-direction:column;height:auto;padding:1em 3em}.file.is-boxed .file-name{border-width:0 1px 1px}.file.is-boxed .file-icon{height:1.5em;width:1.5em}.file.is-boxed .file-icon .fa{font-size:21px}.file.is-boxed.is-small .file-icon .fa{font-size:14px}.file.is-boxed.is-medium .file-icon .fa{font-size:28px}.file.is-boxed.is-large .file-icon .fa{font-size:35px}.file.is-boxed.has-name .file-cta{border-radius:4px 4px 0 0}.file.is-boxed.has-name .file-name{border-radius:0 0 4px 4px;border-width:0 1px 1px}.file.is-centered{justify-content:center}.file.is-fullwidth .file-label{width:100%}.file.is-fullwidth .file-name{flex-grow:1;max-width:none}.file.is-right{justify-content:flex-end}.file.is-right .file-cta{border-radius:0 4px 4px 0}.file.is-right .file-name{border-radius:4px 0 0 4px;border-width:1px 0 1px 1px;order:-1}.file-label{align-items:stretch;display:flex;cursor:pointer;justify-content:flex-start;overflow:hidden;position:relative}.file-label:hover .file-cta{background-color:#eee;color:#363636}.file-label:hover .file-name{border-color:#d5d5d5}.file-label:active .file-cta{background-color:#e8e8e8;color:#363636}.file-label:active .file-name{border-color:#cfcfcf}.file-input{height:100%;left:0;opacity:0;outline:0;position:absolute;top:0;width:100%}.file-cta,.file-name{border-color:#dbdbdb;border-radius:4px;font-size:1em;padding-left:1em;padding-right:1em;white-space:nowrap}.file-cta{background-color:#f5f5f5;color:#4a4a4a}.file-name{border-color:#dbdbdb;border-style:solid;border-width:1px 1px 1px 0;display:block;max-width:16em;overflow:hidden;text-align:left;text-overflow:ellipsis}.file-icon{align-items:center;display:flex;height:1em;justify-content:center;margin-right:.5em;width:1em}.file-icon .fa{font-size:14px}.label{color:#363636;display:block;font-size:1rem;font-weight:700}.label:not(:last-child){margin-bottom:.5em}.label.is-small{font-size:.75rem}.label.is-medium{font-size:1.25rem}.label.is-large{font-size:1.5rem}.help{display:block;font-size:.75rem;margin-top:.25rem}.help.is-white{color:#fff}.help.is-black{color:#0a0a0a}.help.is-light{color:#f5f5f5}.help.is-dark{color:#363636}.help.is-primary{color:#00d1b2}.help.is-link{color:#3273dc}.help.is-info{color:#209cee}.help.is-success{color:#23d160}.help.is-warning{color:#ffdd57}.help.is-danger{color:#ff3860}.field:not(:last-child){margin-bottom:.75rem}.field.has-addons{display:flex;justify-content:flex-start}.field.has-addons .control:not(:last-child){margin-right:-1px}.field.has-addons .control:not(:first-child):not(:last-child) .button,.field.has-addons .control:not(:first-child):not(:last-child) .input,.field.has-addons .control:not(:first-child):not(:last-child) .select select{border-radius:0}.field.has-addons .control:first-child:not(:only-child) .button,.field.has-addons .control:first-child:not(:only-child) .input,.field.has-addons .control:first-child:not(:only-child) .select select{border-bottom-right-radius:0;border-top-right-radius:0}.field.has-addons .control:last-child:not(:only-child) .button,.field.has-addons .control:last-child:not(:only-child) .input,.field.has-addons .control:last-child:not(:only-child) .select select{border-bottom-left-radius:0;border-top-left-radius:0}.field.has-addons .control .button:not([disabled]).is-hovered,.field.has-addons .control .button:not([disabled]):hover,.field.has-addons .control .input:not([disabled]).is-hovered,.field.has-addons .control .input:not([disabled]):hover,.field.has-addons .control .select select:not([disabled]).is-hovered,.field.has-addons .control .select select:not([disabled]):hover{z-index:2}.field.has-addons .control .button:not([disabled]).is-active,.field.has-addons .control .button:not([disabled]).is-focused,.field.has-addons .control .button:not([disabled]):active,.field.has-addons .control .button:not([disabled]):focus,.field.has-addons .control .input:not([disabled]).is-active,.field.has-addons .control .input:not([disabled]).is-focused,.field.has-addons .control .input:not([disabled]):active,.field.has-addons .control .input:not([disabled]):focus,.field.has-addons .control .select select:not([disabled]).is-active,.field.has-addons .control .select select:not([disabled]).is-focused,.field.has-addons .control .select select:not([disabled]):active,.field.has-addons .control .select select:not([disabled]):focus{z-index:3}.field.has-addons .control .button:not([disabled]).is-active:hover,.field.has-addons .control .button:not([disabled]).is-focused:hover,.field.has-addons .control .button:not([disabled]):active:hover,.field.has-addons .control .button:not([disabled]):focus:hover,.field.has-addons .control .input:not([disabled]).is-active:hover,.field.has-addons .control .input:not([disabled]).is-focused:hover,.field.has-addons .control .input:not([disabled]):active:hover,.field.has-addons .control .input:not([disabled]):focus:hover,.field.has-addons .control .select select:not([disabled]).is-active:hover,.field.has-addons .control .select select:not([disabled]).is-focused:hover,.field.has-addons .control .select select:not([disabled]):active:hover,.field.has-addons .control .select select:not([disabled]):focus:hover{z-index:4}.field.has-addons .control.is-expanded{flex-grow:1}.field.has-addons.has-addons-centered{justify-content:center}.field.has-addons.has-addons-right{justify-content:flex-end}.field.has-addons.has-addons-fullwidth .control{flex-grow:1;flex-shrink:0}.field.is-grouped{display:flex;justify-content:flex-start}.field.is-grouped>.control{flex-shrink:0}.field.is-grouped>.control:not(:last-child){margin-bottom:0;margin-right:.75rem}.field.is-grouped>.control.is-expanded{flex-grow:1;flex-shrink:1}.field.is-grouped.is-grouped-centered{justify-content:center}.field.is-grouped.is-grouped-right{justify-content:flex-end}.field.is-grouped.is-grouped-multiline{flex-wrap:wrap}.field.is-grouped.is-grouped-multiline>.control:last-child,.field.is-grouped.is-grouped-multiline>.control:not(:last-child){margin-bottom:.75rem}.field.is-grouped.is-grouped-multiline:last-child{margin-bottom:-.75rem}.field.is-grouped.is-grouped-multiline:not(:last-child){margin-bottom:0}@media screen and (min-width:769px),print{.field.is-horizontal{display:flex}}.field-label .label{font-size:inherit}@media screen and (max-width:768px){.field-label{margin-bottom:.5rem}}@media screen and (min-width:769px),print{.field-label{flex-basis:0;flex-grow:1;flex-shrink:0;margin-right:1.5rem;text-align:right}.field-label.is-small{font-size:.75rem;padding-top:.375em}.field-label.is-normal{padding-top:.375em}.field-label.is-medium{font-size:1.25rem;padding-top:.375em}.field-label.is-large{font-size:1.5rem;padding-top:.375em}}.field-body .field .field{margin-bottom:0}@media screen and (min-width:769px),print{.field-body{display:flex;flex-basis:0;flex-grow:5;flex-shrink:1}.field-body .field{margin-bottom:0}.field-body>.field{flex-shrink:1}.field-body>.field:not(.is-narrow){flex-grow:1}.field-body>.field:not(:last-child){margin-right:.75rem}}.control{box-sizing:border-box;clear:both;font-size:1rem;position:relative;text-align:left}.control.has-icons-left .input:focus~.icon,.control.has-icons-left .select:focus~.icon,.control.has-icons-right .input:focus~.icon,.control.has-icons-right .select:focus~.icon{color:#7a7a7a}.control.has-icons-left .input.is-small~.icon,.control.has-icons-left .select.is-small~.icon,.control.has-icons-right .input.is-small~.icon,.control.has-icons-right .select.is-small~.icon{font-size:.75rem}.control.has-icons-left .input.is-medium~.icon,.control.has-icons-left .select.is-medium~.icon,.control.has-icons-right .input.is-medium~.icon,.control.has-icons-right .select.is-medium~.icon{font-size:1.25rem}.control.has-icons-left .input.is-large~.icon,.control.has-icons-left .select.is-large~.icon,.control.has-icons-right .input.is-large~.icon,.control.has-icons-right .select.is-large~.icon{font-size:1.5rem}.control.has-icons-left .icon,.control.has-icons-right .icon{color:#dbdbdb;height:2.25em;pointer-events:none;position:absolute;top:0;width:2.25em;z-index:4}.control.has-icons-left .input,.control.has-icons-left .select select{padding-left:2.25em}.control.has-icons-left .icon.is-left{left:0}.control.has-icons-right .input,.control.has-icons-right .select select{padding-right:2.25em}.control.has-icons-right .icon.is-right{right:0}.control.is-loading::after{position:absolute!important;right:.625em;top:.625em;z-index:4}.control.is-loading.is-small:after{font-size:.75rem}.control.is-loading.is-medium:after{font-size:1.25rem}.control.is-loading.is-large:after{font-size:1.5rem}.icon{align-items:center;display:inline-flex;justify-content:center;height:1.5rem;width:1.5rem}.icon.is-small{height:1rem;width:1rem}.icon.is-medium{height:2rem;width:2rem}.icon.is-large{height:3rem;width:3rem}.image{display:block;position:relative}.image img{display:block;height:auto;width:100%}.image img.is-rounded{border-radius:290486px}.image.is-16by9 .has-ratio,.image.is-16by9 img,.image.is-1by1 .has-ratio,.image.is-1by1 img,.image.is-1by2 .has-ratio,.image.is-1by2 img,.image.is-1by3 .has-ratio,.image.is-1by3 img,.image.is-2by1 .has-ratio,.image.is-2by1 img,.image.is-2by3 .has-ratio,.image.is-2by3 img,.image.is-3by1 .has-ratio,.image.is-3by1 img,.image.is-3by2 .has-ratio,.image.is-3by2 img,.image.is-3by4 .has-ratio,.image.is-3by4 img,.image.is-3by5 .has-ratio,.image.is-3by5 img,.image.is-4by3 .has-ratio,.image.is-4by3 img,.image.is-4by5 .has-ratio,.image.is-4by5 img,.image.is-5by3 .has-ratio,.image.is-5by3 img,.image.is-5by4 .has-ratio,.image.is-5by4 img,.image.is-9by16 .has-ratio,.image.is-9by16 img,.image.is-square .has-ratio,.image.is-square img{height:100%;width:100%}.image.is-1by1,.image.is-square{padding-top:100%}.image.is-5by4{padding-top:80%}.image.is-4by3{padding-top:75%}.image.is-3by2{padding-top:66.6666%}.image.is-5by3{padding-top:60%}.image.is-16by9{padding-top:56.25%}.image.is-2by1{padding-top:50%}.image.is-3by1{padding-top:33.3333%}.image.is-4by5{padding-top:125%}.image.is-3by4{padding-top:133.3333%}.image.is-2by3{padding-top:150%}.image.is-3by5{padding-top:166.6666%}.image.is-9by16{padding-top:177.7777%}.image.is-1by2{padding-top:200%}.image.is-1by3{padding-top:300%}.image.is-16x16{height:16px;width:16px}.image.is-24x24{height:24px;width:24px}.image.is-32x32{height:32px;width:32px}.image.is-48x48{height:48px;width:48px}.image.is-64x64{height:64px;width:64px}.image.is-96x96{height:96px;width:96px}.image.is-128x128{height:128px;width:128px}.notification{background-color:#f5f5f5;border-radius:4px;padding:1.25rem 2.5rem 1.25rem 1.5rem;position:relative}.notification a:not(.button):not(.dropdown-item){color:currentColor;text-decoration:underline}.notification strong{color:currentColor}.notification code,.notification pre{background:#fff}.notification pre code{background:0 0}.notification>.delete{position:absolute;right:.5rem;top:.5rem}.notification .content,.notification .subtitle,.notification .title{color:currentColor}.notification.is-white{background-color:#fff;color:#0a0a0a}.notification.is-black{background-color:#0a0a0a;color:#fff}.notification.is-light{background-color:#f5f5f5;color:#363636}.notification.is-dark{background-color:#363636;color:#f5f5f5}.notification.is-primary{background-color:#00d1b2;color:#fff}.notification.is-link{background-color:#3273dc;color:#fff}.notification.is-info{background-color:#209cee;color:#fff}.notification.is-success{background-color:#23d160;color:#fff}.notification.is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.notification.is-danger{background-color:#ff3860;color:#fff}.progress{-moz-appearance:none;-webkit-appearance:none;border:none;border-radius:290486px;display:block;height:1rem;overflow:hidden;padding:0;width:100%}.progress::-webkit-progress-bar{background-color:#dbdbdb}.progress::-webkit-progress-value{background-color:#4a4a4a}.progress::-moz-progress-bar{background-color:#4a4a4a}.progress::-ms-fill{background-color:#4a4a4a;border:none}.progress:indeterminate{-webkit-animation-duration:1.5s;animation-duration:1.5s;-webkit-animation-iteration-count:infinite;animation-iteration-count:infinite;-webkit-animation-name:moveIndeterminate;animation-name:moveIndeterminate;-webkit-animation-timing-function:linear;animation-timing-function:linear;background-color:#dbdbdb;background-image:linear-gradient(to right,#4a4a4a 30%,#dbdbdb 30%);background-position:top left;background-repeat:no-repeat;background-size:150% 150%}.progress:indeterminate::-webkit-progress-bar{background-color:transparent}.progress:indeterminate::-moz-progress-bar{background-color:transparent}.progress.is-white::-webkit-progress-value{background-color:#fff}.progress.is-white::-moz-progress-bar{background-color:#fff}.progress.is-white::-ms-fill{background-color:#fff}.progress.is-white:indeterminate{background-image:linear-gradient(to right,#fff 30%,#dbdbdb 30%)}.progress.is-black::-webkit-progress-value{background-color:#0a0a0a}.progress.is-black::-moz-progress-bar{background-color:#0a0a0a}.progress.is-black::-ms-fill{background-color:#0a0a0a}.progress.is-black:indeterminate{background-image:linear-gradient(to right,#0a0a0a 30%,#dbdbdb 30%)}.progress.is-light::-webkit-progress-value{background-color:#f5f5f5}.progress.is-light::-moz-progress-bar{background-color:#f5f5f5}.progress.is-light::-ms-fill{background-color:#f5f5f5}.progress.is-light:indeterminate{background-image:linear-gradient(to right,#f5f5f5 30%,#dbdbdb 30%)}.progress.is-dark::-webkit-progress-value{background-color:#363636}.progress.is-dark::-moz-progress-bar{background-color:#363636}.progress.is-dark::-ms-fill{background-color:#363636}.progress.is-dark:indeterminate{background-image:linear-gradient(to right,#363636 30%,#dbdbdb 30%)}.progress.is-primary::-webkit-progress-value{background-color:#00d1b2}.progress.is-primary::-moz-progress-bar{background-color:#00d1b2}.progress.is-primary::-ms-fill{background-color:#00d1b2}.progress.is-primary:indeterminate{background-image:linear-gradient(to right,#00d1b2 30%,#dbdbdb 30%)}.progress.is-link::-webkit-progress-value{background-color:#3273dc}.progress.is-link::-moz-progress-bar{background-color:#3273dc}.progress.is-link::-ms-fill{background-color:#3273dc}.progress.is-link:indeterminate{background-image:linear-gradient(to right,#3273dc 30%,#dbdbdb 30%)}.progress.is-info::-webkit-progress-value{background-color:#209cee}.progress.is-info::-moz-progress-bar{background-color:#209cee}.progress.is-info::-ms-fill{background-color:#209cee}.progress.is-info:indeterminate{background-image:linear-gradient(to right,#209cee 30%,#dbdbdb 30%)}.progress.is-success::-webkit-progress-value{background-color:#23d160}.progress.is-success::-moz-progress-bar{background-color:#23d160}.progress.is-success::-ms-fill{background-color:#23d160}.progress.is-success:indeterminate{background-image:linear-gradient(to right,#23d160 30%,#dbdbdb 30%)}.progress.is-warning::-webkit-progress-value{background-color:#ffdd57}.progress.is-warning::-moz-progress-bar{background-color:#ffdd57}.progress.is-warning::-ms-fill{background-color:#ffdd57}.progress.is-warning:indeterminate{background-image:linear-gradient(to right,#ffdd57 30%,#dbdbdb 30%)}.progress.is-danger::-webkit-progress-value{background-color:#ff3860}.progress.is-danger::-moz-progress-bar{background-color:#ff3860}.progress.is-danger::-ms-fill{background-color:#ff3860}.progress.is-danger:indeterminate{background-image:linear-gradient(to right,#ff3860 30%,#dbdbdb 30%)}.progress.is-small{height:.75rem}.progress.is-medium{height:1.25rem}.progress.is-large{height:1.5rem}@-webkit-keyframes moveIndeterminate{from{background-position:200% 0}to{background-position:-200% 0}}@keyframes moveIndeterminate{from{background-position:200% 0}to{background-position:-200% 0}}.table{background-color:#fff;color:#363636}.table td,.table th{border:1px solid #dbdbdb;border-width:0 0 1px;padding:.5em .75em;vertical-align:top}.table td.is-white,.table th.is-white{background-color:#fff;border-color:#fff;color:#0a0a0a}.table td.is-black,.table th.is-black{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.table td.is-light,.table th.is-light{background-color:#f5f5f5;border-color:#f5f5f5;color:#363636}.table td.is-dark,.table th.is-dark{background-color:#363636;border-color:#363636;color:#f5f5f5}.table td.is-primary,.table th.is-primary{background-color:#00d1b2;border-color:#00d1b2;color:#fff}.table td.is-link,.table th.is-link{background-color:#3273dc;border-color:#3273dc;color:#fff}.table td.is-info,.table th.is-info{background-color:#209cee;border-color:#209cee;color:#fff}.table td.is-success,.table th.is-success{background-color:#23d160;border-color:#23d160;color:#fff}.table td.is-warning,.table th.is-warning{background-color:#ffdd57;border-color:#ffdd57;color:rgba(0,0,0,.7)}.table td.is-danger,.table th.is-danger{background-color:#ff3860;border-color:#ff3860;color:#fff}.table td.is-narrow,.table th.is-narrow{white-space:nowrap;width:1%}.table td.is-selected,.table th.is-selected{background-color:#00d1b2;color:#fff}.table td.is-selected a,.table td.is-selected strong,.table th.is-selected a,.table th.is-selected strong{color:currentColor}.table th{color:#363636;text-align:left}.table tr.is-selected{background-color:#00d1b2;color:#fff}.table tr.is-selected a,.table tr.is-selected strong{color:currentColor}.table tr.is-selected td,.table tr.is-selected th{border-color:#fff;color:currentColor}.table thead{background-color:transparent}.table thead td,.table thead th{border-width:0 0 2px;color:#363636}.table tfoot{background-color:transparent}.table tfoot td,.table tfoot th{border-width:2px 0 0;color:#363636}.table tbody{background-color:transparent}.table tbody tr:last-child td,.table tbody tr:last-child th{border-bottom-width:0}.table.is-bordered td,.table.is-bordered th{border-width:1px}.table.is-bordered tr:last-child td,.table.is-bordered tr:last-child th{border-bottom-width:1px}.table.is-fullwidth{width:100%}.table.is-hoverable tbody tr:not(.is-selected):hover{background-color:#fafafa}.table.is-hoverable.is-striped tbody tr:not(.is-selected):hover{background-color:#fafafa}.table.is-hoverable.is-striped tbody tr:not(.is-selected):hover:nth-child(even){background-color:#f5f5f5}.table.is-narrow td,.table.is-narrow th{padding:.25em .5em}.table.is-striped tbody tr:not(.is-selected):nth-child(even){background-color:#fafafa}.table-container{-webkit-overflow-scrolling:touch;overflow:auto;overflow-y:hidden;max-width:100%}.tags{align-items:center;display:flex;flex-wrap:wrap;justify-content:flex-start}.tags .tag{margin-bottom:.5rem}.tags .tag:not(:last-child){margin-right:.5rem}.tags:last-child{margin-bottom:-.5rem}.tags:not(:last-child){margin-bottom:1rem}.tags.are-medium .tag:not(.is-normal):not(.is-large){font-size:1rem}.tags.are-large .tag:not(.is-normal):not(.is-medium){font-size:1.25rem}.tags.has-addons .tag{margin-right:0}.tags.has-addons .tag:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.tags.has-addons .tag:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0}.tags.is-centered{justify-content:center}.tags.is-centered .tag{margin-right:.25rem;margin-left:.25rem}.tags.is-right{justify-content:flex-end}.tags.is-right .tag:not(:first-child){margin-left:.5rem}.tags.is-right .tag:not(:last-child){margin-right:0}.tags.has-addons .tag{margin-right:0}.tags.has-addons .tag:not(:first-child){margin-left:0;border-bottom-left-radius:0;border-top-left-radius:0}.tags.has-addons .tag:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0}.tag:not(body){align-items:center;background-color:#f5f5f5;border-radius:4px;color:#4a4a4a;display:inline-flex;font-size:.75rem;height:2em;justify-content:center;line-height:1.5;padding-left:.75em;padding-right:.75em;white-space:nowrap}.tag:not(body) .delete{margin-left:.25rem;margin-right:-.375rem}.tag:not(body).is-white{background-color:#fff;color:#0a0a0a}.tag:not(body).is-black{background-color:#0a0a0a;color:#fff}.tag:not(body).is-light{background-color:#f5f5f5;color:#363636}.tag:not(body).is-dark{background-color:#363636;color:#f5f5f5}.tag:not(body).is-primary{background-color:#00d1b2;color:#fff}.tag:not(body).is-link{background-color:#3273dc;color:#fff}.tag:not(body).is-info{background-color:#209cee;color:#fff}.tag:not(body).is-success{background-color:#23d160;color:#fff}.tag:not(body).is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.tag:not(body).is-danger{background-color:#ff3860;color:#fff}.tag:not(body).is-normal{font-size:.75rem}.tag:not(body).is-medium{font-size:1rem}.tag:not(body).is-large{font-size:1.25rem}.tag:not(body) .icon:first-child:not(:last-child){margin-left:-.375em;margin-right:.1875em}.tag:not(body) .icon:last-child:not(:first-child){margin-left:.1875em;margin-right:-.375em}.tag:not(body) .icon:first-child:last-child{margin-left:-.375em;margin-right:-.375em}.tag:not(body).is-delete{margin-left:1px;padding:0;position:relative;width:2em}.tag:not(body).is-delete::after,.tag:not(body).is-delete::before{background-color:currentColor;content:"";display:block;left:50%;position:absolute;top:50%;-webkit-transform:translateX(-50%) translateY(-50%) rotate(45deg);transform:translateX(-50%) translateY(-50%) rotate(45deg);-webkit-transform-origin:center center;transform-origin:center center}.tag:not(body).is-delete::before{height:1px;width:50%}.tag:not(body).is-delete::after{height:50%;width:1px}.tag:not(body).is-delete:focus,.tag:not(body).is-delete:hover{background-color:#e8e8e8}.tag:not(body).is-delete:active{background-color:#dbdbdb}.tag:not(body).is-rounded{border-radius:290486px}a.tag:hover{text-decoration:underline}.subtitle,.title{word-break:break-word}.subtitle em,.subtitle span,.title em,.title span{font-weight:inherit}.subtitle sub,.title sub{font-size:.75em}.subtitle sup,.title sup{font-size:.75em}.subtitle .tag,.title .tag{vertical-align:middle}.title{color:#363636;font-size:2rem;font-weight:600;line-height:1.125}.title strong{color:inherit;font-weight:inherit}.title+.highlight{margin-top:-.75rem}.title:not(.is-spaced)+.subtitle{margin-top:-1.25rem}.title.is-1{font-size:3rem}.title.is-2{font-size:2.5rem}.title.is-3{font-size:2rem}.title.is-4{font-size:1.5rem}.title.is-5{font-size:1.25rem}.title.is-6{font-size:1rem}.title.is-7{font-size:.75rem}.subtitle{color:#4a4a4a;font-size:1.25rem;font-weight:400;line-height:1.25}.subtitle strong{color:#363636;font-weight:600}.subtitle:not(.is-spaced)+.title{margin-top:-1.25rem}.subtitle.is-1{font-size:3rem}.subtitle.is-2{font-size:2.5rem}.subtitle.is-3{font-size:2rem}.subtitle.is-4{font-size:1.5rem}.subtitle.is-5{font-size:1.25rem}.subtitle.is-6{font-size:1rem}.subtitle.is-7{font-size:.75rem}.heading{display:block;font-size:11px;letter-spacing:1px;margin-bottom:5px;text-transform:uppercase}.highlight{font-weight:400;max-width:100%;overflow:hidden;padding:0}.highlight pre{overflow:auto;max-width:100%}.number{align-items:center;background-color:#f5f5f5;border-radius:290486px;display:inline-flex;font-size:1.25rem;height:2em;justify-content:center;margin-right:1.5rem;min-width:2.5em;padding:.25rem .5rem;text-align:center;vertical-align:top}.breadcrumb{font-size:1rem;white-space:nowrap}.breadcrumb a{align-items:center;color:#3273dc;display:flex;justify-content:center;padding:0 .75em}.breadcrumb a:hover{color:#363636}.breadcrumb li{align-items:center;display:flex}.breadcrumb li:first-child a{padding-left:0}.breadcrumb li.is-active a{color:#363636;cursor:default;pointer-events:none}.breadcrumb li+li::before{color:#b5b5b5;content:"\0002f"}.breadcrumb ol,.breadcrumb ul{align-items:flex-start;display:flex;flex-wrap:wrap;justify-content:flex-start}.breadcrumb .icon:first-child{margin-right:.5em}.breadcrumb .icon:last-child{margin-left:.5em}.breadcrumb.is-centered ol,.breadcrumb.is-centered ul{justify-content:center}.breadcrumb.is-right ol,.breadcrumb.is-right ul{justify-content:flex-end}.breadcrumb.is-small{font-size:.75rem}.breadcrumb.is-medium{font-size:1.25rem}.breadcrumb.is-large{font-size:1.5rem}.breadcrumb.has-arrow-separator li+li::before{content:"\02192"}.breadcrumb.has-bullet-separator li+li::before{content:"\02022"}.breadcrumb.has-dot-separator li+li::before{content:"\000b7"}.breadcrumb.has-succeeds-separator li+li::before{content:"\0227B"}.card{background-color:#fff;box-shadow:0 2px 3px rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.1);color:#4a4a4a;max-width:100%;position:relative}.card-header{background-color:transparent;align-items:stretch;box-shadow:0 1px 2px rgba(10,10,10,.1);display:flex}.card-header-title{align-items:center;color:#363636;display:flex;flex-grow:1;font-weight:700;padding:.75rem}.card-header-title.is-centered{justify-content:center}.card-header-icon{align-items:center;cursor:pointer;display:flex;justify-content:center;padding:.75rem}.card-image{display:block;position:relative}.card-content{background-color:transparent;padding:1.5rem}.card-footer{background-color:transparent;border-top:1px solid #dbdbdb;align-items:stretch;display:flex}.card-footer-item{align-items:center;display:flex;flex-basis:0;flex-grow:1;flex-shrink:0;justify-content:center;padding:.75rem}.card-footer-item:not(:last-child){border-right:1px solid #dbdbdb}.card .media:not(:last-child){margin-bottom:.75rem}.dropdown{display:inline-flex;position:relative;vertical-align:top}.dropdown.is-active .dropdown-menu,.dropdown.is-hoverable:hover .dropdown-menu{display:block}.dropdown.is-right .dropdown-menu{left:auto;right:0}.dropdown.is-up .dropdown-menu{bottom:100%;padding-bottom:4px;padding-top:initial;top:auto}.dropdown-menu{display:none;left:0;min-width:12rem;padding-top:4px;position:absolute;top:100%;z-index:20}.dropdown-content{background-color:#fff;border-radius:4px;box-shadow:0 2px 3px rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.1);padding-bottom:.5rem;padding-top:.5rem}.dropdown-item{color:#4a4a4a;display:block;font-size:.875rem;line-height:1.5;padding:.375rem 1rem;position:relative}a.dropdown-item,button.dropdown-item{padding-right:3rem;text-align:left;white-space:nowrap;width:100%}a.dropdown-item:hover,button.dropdown-item:hover{background-color:#f5f5f5;color:#0a0a0a}a.dropdown-item.is-active,button.dropdown-item.is-active{background-color:#3273dc;color:#fff}.dropdown-divider{background-color:#dbdbdb;border:none;display:block;height:1px;margin:.5rem 0}.level{align-items:center;justify-content:space-between}.level code{border-radius:4px}.level img{display:inline-block;vertical-align:top}.level.is-mobile{display:flex}.level.is-mobile .level-left,.level.is-mobile .level-right{display:flex}.level.is-mobile .level-left+.level-right{margin-top:0}.level.is-mobile .level-item:not(:last-child){margin-bottom:0;margin-right:.75rem}.level.is-mobile .level-item:not(.is-narrow){flex-grow:1}@media screen and (min-width:769px),print{.level{display:flex}.level>.level-item:not(.is-narrow){flex-grow:1}}.level-item{align-items:center;display:flex;flex-basis:auto;flex-grow:0;flex-shrink:0;justify-content:center}.level-item .subtitle,.level-item .title{margin-bottom:0}@media screen and (max-width:768px){.level-item:not(:last-child){margin-bottom:.75rem}}.level-left,.level-right{flex-basis:auto;flex-grow:0;flex-shrink:0}.level-left .level-item.is-flexible,.level-right .level-item.is-flexible{flex-grow:1}@media screen and (min-width:769px),print{.level-left .level-item:not(:last-child),.level-right .level-item:not(:last-child){margin-right:.75rem}}.level-left{align-items:center;justify-content:flex-start}@media screen and (max-width:768px){.level-left+.level-right{margin-top:1.5rem}}@media screen and (min-width:769px),print{.level-left{display:flex}}.level-right{align-items:center;justify-content:flex-end}@media screen and (min-width:769px),print{.level-right{display:flex}}.list{background-color:#fff;border-radius:4px;box-shadow:0 2px 3px rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.1)}.list-item{display:block;padding:.5em 1em}.list-item:not(a){color:#4a4a4a}.list-item:first-child{border-top-left-radius:4px;border-top-right-radius:4px}.list-item:last-child{border-top-left-radius:4px;border-top-right-radius:4px}.list-item:not(:last-child){border-bottom:1px solid #dbdbdb}.list-item.is-active{background-color:#3273dc;color:#fff}a.list-item{background-color:#f5f5f5;cursor:pointer}.media{align-items:flex-start;display:flex;text-align:left}.media .content:not(:last-child){margin-bottom:.75rem}.media .media{border-top:1px solid rgba(219,219,219,.5);display:flex;padding-top:.75rem}.media .media .content:not(:last-child),.media .media .control:not(:last-child){margin-bottom:.5rem}.media .media .media{padding-top:.5rem}.media .media .media+.media{margin-top:.5rem}.media+.media{border-top:1px solid rgba(219,219,219,.5);margin-top:1rem;padding-top:1rem}.media.is-large+.media{margin-top:1.5rem;padding-top:1.5rem}.media-left,.media-right{flex-basis:auto;flex-grow:0;flex-shrink:0}.media-left{margin-right:1rem}.media-right{margin-left:1rem}.media-content{flex-basis:auto;flex-grow:1;flex-shrink:1;text-align:left}@media screen and (max-width:768px){.media-content{overflow-x:auto}}.menu{font-size:1rem}.menu.is-small{font-size:.75rem}.menu.is-medium{font-size:1.25rem}.menu.is-large{font-size:1.5rem}.menu-list{line-height:1.25}.menu-list a{border-radius:2px;color:#4a4a4a;display:block;padding:.5em .75em}.menu-list a:hover{background-color:#f5f5f5;color:#363636}.menu-list a.is-active{background-color:#3273dc;color:#fff}.menu-list li ul{border-left:1px solid #dbdbdb;margin:.75em;padding-left:.75em}.menu-label{color:#7a7a7a;font-size:.75em;letter-spacing:.1em;text-transform:uppercase}.menu-label:not(:first-child){margin-top:1em}.menu-label:not(:last-child){margin-bottom:1em}.message{background-color:#f5f5f5;border-radius:4px;font-size:1rem}.message strong{color:currentColor}.message a:not(.button):not(.tag):not(.dropdown-item){color:currentColor;text-decoration:underline}.message.is-small{font-size:.75rem}.message.is-medium{font-size:1.25rem}.message.is-large{font-size:1.5rem}.message.is-white{background-color:#fff}.message.is-white .message-header{background-color:#fff;color:#0a0a0a}.message.is-white .message-body{border-color:#fff;color:#4d4d4d}.message.is-black{background-color:#fafafa}.message.is-black .message-header{background-color:#0a0a0a;color:#fff}.message.is-black .message-body{border-color:#0a0a0a;color:#090909}.message.is-light{background-color:#fafafa}.message.is-light .message-header{background-color:#f5f5f5;color:#363636}.message.is-light .message-body{border-color:#f5f5f5;color:#505050}.message.is-dark{background-color:#fafafa}.message.is-dark .message-header{background-color:#363636;color:#f5f5f5}.message.is-dark .message-body{border-color:#363636;color:#2a2a2a}.message.is-primary{background-color:#f5fffd}.message.is-primary .message-header{background-color:#00d1b2;color:#fff}.message.is-primary .message-body{border-color:#00d1b2;color:#021310}.message.is-link{background-color:#f6f9fe}.message.is-link .message-header{background-color:#3273dc;color:#fff}.message.is-link .message-body{border-color:#3273dc;color:#22509a}.message.is-info{background-color:#f6fbfe}.message.is-info .message-header{background-color:#209cee;color:#fff}.message.is-info .message-body{border-color:#209cee;color:#12537e}.message.is-success{background-color:#f6fef9}.message.is-success .message-header{background-color:#23d160;color:#fff}.message.is-success .message-body{border-color:#23d160;color:#0e301a}.message.is-warning{background-color:#fffdf5}.message.is-warning .message-header{background-color:#ffdd57;color:rgba(0,0,0,.7)}.message.is-warning .message-body{border-color:#ffdd57;color:#3b3108}.message.is-danger{background-color:#fff5f7}.message.is-danger .message-header{background-color:#ff3860;color:#fff}.message.is-danger .message-body{border-color:#ff3860;color:#cd0930}.message-header{align-items:center;background-color:#4a4a4a;border-radius:4px 4px 0 0;color:#fff;display:flex;font-weight:700;justify-content:space-between;line-height:1.25;padding:.75em 1em;position:relative}.message-header .delete{flex-grow:0;flex-shrink:0;margin-left:.75em}.message-header+.message-body{border-width:0;border-top-left-radius:0;border-top-right-radius:0}.message-body{border-color:#dbdbdb;border-radius:4px;border-style:solid;border-width:0 0 0 4px;color:#4a4a4a;padding:1.25em 1.5em}.message-body code,.message-body pre{background-color:#fff}.message-body pre code{background-color:transparent}.modal{align-items:center;display:none;flex-direction:column;justify-content:center;overflow:hidden;position:fixed;z-index:40}.modal.is-active{display:flex}.modal-background{background-color:rgba(10,10,10,.86)}.modal-card,.modal-content{margin:0 20px;max-height:calc(100vh - 160px);overflow:auto;position:relative;width:100%}@media screen and (min-width:769px),print{.modal-card,.modal-content{margin:0 auto;max-height:calc(100vh - 40px);width:640px}}.modal-close{background:0 0;height:40px;position:fixed;right:20px;top:20px;width:40px}.modal-card{display:flex;flex-direction:column;max-height:calc(100vh - 40px);overflow:hidden;-ms-overflow-y:visible}.modal-card-foot,.modal-card-head{align-items:center;background-color:#f5f5f5;display:flex;flex-shrink:0;justify-content:flex-start;padding:20px;position:relative}.modal-card-head{border-bottom:1px solid #dbdbdb;border-top-left-radius:6px;border-top-right-radius:6px}.modal-card-title{color:#363636;flex-grow:1;flex-shrink:0;font-size:1.5rem;line-height:1}.modal-card-foot{border-bottom-left-radius:6px;border-bottom-right-radius:6px;border-top:1px solid #dbdbdb}.modal-card-foot .button:not(:last-child){margin-right:10px}.modal-card-body{-webkit-overflow-scrolling:touch;background-color:#fff;flex-grow:1;flex-shrink:1;overflow:auto;padding:20px}.navbar{background-color:#fff;min-height:3.25rem;position:relative;z-index:30}.navbar.is-white{background-color:#fff;color:#0a0a0a}.navbar.is-white .navbar-brand .navbar-link,.navbar.is-white .navbar-brand>.navbar-item{color:#0a0a0a}.navbar.is-white .navbar-brand .navbar-link.is-active,.navbar.is-white .navbar-brand .navbar-link:hover,.navbar.is-white .navbar-brand>a.navbar-item.is-active,.navbar.is-white .navbar-brand>a.navbar-item:hover{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-brand .navbar-link::after{border-color:#0a0a0a}.navbar.is-white .navbar-burger{color:#0a0a0a}@media screen and (min-width:1088px){.navbar.is-white .navbar-end .navbar-link,.navbar.is-white .navbar-end>.navbar-item,.navbar.is-white .navbar-start .navbar-link,.navbar.is-white .navbar-start>.navbar-item{color:#0a0a0a}.navbar.is-white .navbar-end .navbar-link.is-active,.navbar.is-white .navbar-end .navbar-link:hover,.navbar.is-white .navbar-end>a.navbar-item.is-active,.navbar.is-white .navbar-end>a.navbar-item:hover,.navbar.is-white .navbar-start .navbar-link.is-active,.navbar.is-white .navbar-start .navbar-link:hover,.navbar.is-white .navbar-start>a.navbar-item.is-active,.navbar.is-white .navbar-start>a.navbar-item:hover{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-end .navbar-link::after,.navbar.is-white .navbar-start .navbar-link::after{border-color:#0a0a0a}.navbar.is-white .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-white .navbar-item.has-dropdown:hover .navbar-link{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-dropdown a.navbar-item.is-active{background-color:#fff;color:#0a0a0a}}.navbar.is-black{background-color:#0a0a0a;color:#fff}.navbar.is-black .navbar-brand .navbar-link,.navbar.is-black .navbar-brand>.navbar-item{color:#fff}.navbar.is-black .navbar-brand .navbar-link.is-active,.navbar.is-black .navbar-brand .navbar-link:hover,.navbar.is-black .navbar-brand>a.navbar-item.is-active,.navbar.is-black .navbar-brand>a.navbar-item:hover{background-color:#000;color:#fff}.navbar.is-black .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-black .navbar-burger{color:#fff}@media screen and (min-width:1088px){.navbar.is-black .navbar-end .navbar-link,.navbar.is-black .navbar-end>.navbar-item,.navbar.is-black .navbar-start .navbar-link,.navbar.is-black .navbar-start>.navbar-item{color:#fff}.navbar.is-black .navbar-end .navbar-link.is-active,.navbar.is-black .navbar-end .navbar-link:hover,.navbar.is-black .navbar-end>a.navbar-item.is-active,.navbar.is-black .navbar-end>a.navbar-item:hover,.navbar.is-black .navbar-start .navbar-link.is-active,.navbar.is-black .navbar-start .navbar-link:hover,.navbar.is-black .navbar-start>a.navbar-item.is-active,.navbar.is-black .navbar-start>a.navbar-item:hover{background-color:#000;color:#fff}.navbar.is-black .navbar-end .navbar-link::after,.navbar.is-black .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-black .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-black .navbar-item.has-dropdown:hover .navbar-link{background-color:#000;color:#fff}.navbar.is-black .navbar-dropdown a.navbar-item.is-active{background-color:#0a0a0a;color:#fff}}.navbar.is-light{background-color:#f5f5f5;color:#363636}.navbar.is-light .navbar-brand .navbar-link,.navbar.is-light .navbar-brand>.navbar-item{color:#363636}.navbar.is-light .navbar-brand .navbar-link.is-active,.navbar.is-light .navbar-brand .navbar-link:hover,.navbar.is-light .navbar-brand>a.navbar-item.is-active,.navbar.is-light .navbar-brand>a.navbar-item:hover{background-color:#e8e8e8;color:#363636}.navbar.is-light .navbar-brand .navbar-link::after{border-color:#363636}.navbar.is-light .navbar-burger{color:#363636}@media screen and (min-width:1088px){.navbar.is-light .navbar-end .navbar-link,.navbar.is-light .navbar-end>.navbar-item,.navbar.is-light .navbar-start .navbar-link,.navbar.is-light .navbar-start>.navbar-item{color:#363636}.navbar.is-light .navbar-end .navbar-link.is-active,.navbar.is-light .navbar-end .navbar-link:hover,.navbar.is-light .navbar-end>a.navbar-item.is-active,.navbar.is-light .navbar-end>a.navbar-item:hover,.navbar.is-light .navbar-start .navbar-link.is-active,.navbar.is-light .navbar-start .navbar-link:hover,.navbar.is-light .navbar-start>a.navbar-item.is-active,.navbar.is-light .navbar-start>a.navbar-item:hover{background-color:#e8e8e8;color:#363636}.navbar.is-light .navbar-end .navbar-link::after,.navbar.is-light .navbar-start .navbar-link::after{border-color:#363636}.navbar.is-light .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-light .navbar-item.has-dropdown:hover .navbar-link{background-color:#e8e8e8;color:#363636}.navbar.is-light .navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:#363636}}.navbar.is-dark{background-color:#363636;color:#f5f5f5}.navbar.is-dark .navbar-brand .navbar-link,.navbar.is-dark .navbar-brand>.navbar-item{color:#f5f5f5}.navbar.is-dark .navbar-brand .navbar-link.is-active,.navbar.is-dark .navbar-brand .navbar-link:hover,.navbar.is-dark .navbar-brand>a.navbar-item.is-active,.navbar.is-dark .navbar-brand>a.navbar-item:hover{background-color:#292929;color:#f5f5f5}.navbar.is-dark .navbar-brand .navbar-link::after{border-color:#f5f5f5}.navbar.is-dark .navbar-burger{color:#f5f5f5}@media screen and (min-width:1088px){.navbar.is-dark .navbar-end .navbar-link,.navbar.is-dark .navbar-end>.navbar-item,.navbar.is-dark .navbar-start .navbar-link,.navbar.is-dark .navbar-start>.navbar-item{color:#f5f5f5}.navbar.is-dark .navbar-end .navbar-link.is-active,.navbar.is-dark .navbar-end .navbar-link:hover,.navbar.is-dark .navbar-end>a.navbar-item.is-active,.navbar.is-dark .navbar-end>a.navbar-item:hover,.navbar.is-dark .navbar-start .navbar-link.is-active,.navbar.is-dark .navbar-start .navbar-link:hover,.navbar.is-dark .navbar-start>a.navbar-item.is-active,.navbar.is-dark .navbar-start>a.navbar-item:hover{background-color:#292929;color:#f5f5f5}.navbar.is-dark .navbar-end .navbar-link::after,.navbar.is-dark .navbar-start .navbar-link::after{border-color:#f5f5f5}.navbar.is-dark .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-dark .navbar-item.has-dropdown:hover .navbar-link{background-color:#292929;color:#f5f5f5}.navbar.is-dark .navbar-dropdown a.navbar-item.is-active{background-color:#363636;color:#f5f5f5}}.navbar.is-primary{background-color:#00d1b2;color:#fff}.navbar.is-primary .navbar-brand .navbar-link,.navbar.is-primary .navbar-brand>.navbar-item{color:#fff}.navbar.is-primary .navbar-brand .navbar-link.is-active,.navbar.is-primary .navbar-brand .navbar-link:hover,.navbar.is-primary .navbar-brand>a.navbar-item.is-active,.navbar.is-primary .navbar-brand>a.navbar-item:hover{background-color:#00b89c;color:#fff}.navbar.is-primary .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-primary .navbar-burger{color:#fff}@media screen and (min-width:1088px){.navbar.is-primary .navbar-end .navbar-link,.navbar.is-primary .navbar-end>.navbar-item,.navbar.is-primary .navbar-start .navbar-link,.navbar.is-primary .navbar-start>.navbar-item{color:#fff}.navbar.is-primary .navbar-end .navbar-link.is-active,.navbar.is-primary .navbar-end .navbar-link:hover,.navbar.is-primary .navbar-end>a.navbar-item.is-active,.navbar.is-primary .navbar-end>a.navbar-item:hover,.navbar.is-primary .navbar-start .navbar-link.is-active,.navbar.is-primary .navbar-start .navbar-link:hover,.navbar.is-primary .navbar-start>a.navbar-item.is-active,.navbar.is-primary .navbar-start>a.navbar-item:hover{background-color:#00b89c;color:#fff}.navbar.is-primary .navbar-end .navbar-link::after,.navbar.is-primary .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-primary .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-primary .navbar-item.has-dropdown:hover .navbar-link{background-color:#00b89c;color:#fff}.navbar.is-primary .navbar-dropdown a.navbar-item.is-active{background-color:#00d1b2;color:#fff}}.navbar.is-link{background-color:#3273dc;color:#fff}.navbar.is-link .navbar-brand .navbar-link,.navbar.is-link .navbar-brand>.navbar-item{color:#fff}.navbar.is-link .navbar-brand .navbar-link.is-active,.navbar.is-link .navbar-brand .navbar-link:hover,.navbar.is-link .navbar-brand>a.navbar-item.is-active,.navbar.is-link .navbar-brand>a.navbar-item:hover{background-color:#2366d1;color:#fff}.navbar.is-link .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-link .navbar-burger{color:#fff}@media screen and (min-width:1088px){.navbar.is-link .navbar-end .navbar-link,.navbar.is-link .navbar-end>.navbar-item,.navbar.is-link .navbar-start .navbar-link,.navbar.is-link .navbar-start>.navbar-item{color:#fff}.navbar.is-link .navbar-end .navbar-link.is-active,.navbar.is-link .navbar-end .navbar-link:hover,.navbar.is-link .navbar-end>a.navbar-item.is-active,.navbar.is-link .navbar-end>a.navbar-item:hover,.navbar.is-link .navbar-start .navbar-link.is-active,.navbar.is-link .navbar-start .navbar-link:hover,.navbar.is-link .navbar-start>a.navbar-item.is-active,.navbar.is-link .navbar-start>a.navbar-item:hover{background-color:#2366d1;color:#fff}.navbar.is-link .navbar-end .navbar-link::after,.navbar.is-link .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-link .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-link .navbar-item.has-dropdown:hover .navbar-link{background-color:#2366d1;color:#fff}.navbar.is-link .navbar-dropdown a.navbar-item.is-active{background-color:#3273dc;color:#fff}}.navbar.is-info{background-color:#209cee;color:#fff}.navbar.is-info .navbar-brand .navbar-link,.navbar.is-info .navbar-brand>.navbar-item{color:#fff}.navbar.is-info .navbar-brand .navbar-link.is-active,.navbar.is-info .navbar-brand .navbar-link:hover,.navbar.is-info .navbar-brand>a.navbar-item.is-active,.navbar.is-info .navbar-brand>a.navbar-item:hover{background-color:#118fe4;color:#fff}.navbar.is-info .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-info .navbar-burger{color:#fff}@media screen and (min-width:1088px){.navbar.is-info .navbar-end .navbar-link,.navbar.is-info .navbar-end>.navbar-item,.navbar.is-info .navbar-start .navbar-link,.navbar.is-info .navbar-start>.navbar-item{color:#fff}.navbar.is-info .navbar-end .navbar-link.is-active,.navbar.is-info .navbar-end .navbar-link:hover,.navbar.is-info .navbar-end>a.navbar-item.is-active,.navbar.is-info .navbar-end>a.navbar-item:hover,.navbar.is-info .navbar-start .navbar-link.is-active,.navbar.is-info .navbar-start .navbar-link:hover,.navbar.is-info .navbar-start>a.navbar-item.is-active,.navbar.is-info .navbar-start>a.navbar-item:hover{background-color:#118fe4;color:#fff}.navbar.is-info .navbar-end .navbar-link::after,.navbar.is-info .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-info .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-info .navbar-item.has-dropdown:hover .navbar-link{background-color:#118fe4;color:#fff}.navbar.is-info .navbar-dropdown a.navbar-item.is-active{background-color:#209cee;color:#fff}}.navbar.is-success{background-color:#23d160;color:#fff}.navbar.is-success .navbar-brand .navbar-link,.navbar.is-success .navbar-brand>.navbar-item{color:#fff}.navbar.is-success .navbar-brand .navbar-link.is-active,.navbar.is-success .navbar-brand .navbar-link:hover,.navbar.is-success .navbar-brand>a.navbar-item.is-active,.navbar.is-success .navbar-brand>a.navbar-item:hover{background-color:#20bc56;color:#fff}.navbar.is-success .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-success .navbar-burger{color:#fff}@media screen and (min-width:1088px){.navbar.is-success .navbar-end .navbar-link,.navbar.is-success .navbar-end>.navbar-item,.navbar.is-success .navbar-start .navbar-link,.navbar.is-success .navbar-start>.navbar-item{color:#fff}.navbar.is-success .navbar-end .navbar-link.is-active,.navbar.is-success .navbar-end .navbar-link:hover,.navbar.is-success .navbar-end>a.navbar-item.is-active,.navbar.is-success .navbar-end>a.navbar-item:hover,.navbar.is-success .navbar-start .navbar-link.is-active,.navbar.is-success .navbar-start .navbar-link:hover,.navbar.is-success .navbar-start>a.navbar-item.is-active,.navbar.is-success .navbar-start>a.navbar-item:hover{background-color:#20bc56;color:#fff}.navbar.is-success .navbar-end .navbar-link::after,.navbar.is-success .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-success .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-success .navbar-item.has-dropdown:hover .navbar-link{background-color:#20bc56;color:#fff}.navbar.is-success .navbar-dropdown a.navbar-item.is-active{background-color:#23d160;color:#fff}}.navbar.is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-brand .navbar-link,.navbar.is-warning .navbar-brand>.navbar-item{color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-brand .navbar-link.is-active,.navbar.is-warning .navbar-brand .navbar-link:hover,.navbar.is-warning .navbar-brand>a.navbar-item.is-active,.navbar.is-warning .navbar-brand>a.navbar-item:hover{background-color:#ffd83d;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-brand .navbar-link::after{border-color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-burger{color:rgba(0,0,0,.7)}@media screen and (min-width:1088px){.navbar.is-warning .navbar-end .navbar-link,.navbar.is-warning .navbar-end>.navbar-item,.navbar.is-warning .navbar-start .navbar-link,.navbar.is-warning .navbar-start>.navbar-item{color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-end .navbar-link.is-active,.navbar.is-warning .navbar-end .navbar-link:hover,.navbar.is-warning .navbar-end>a.navbar-item.is-active,.navbar.is-warning .navbar-end>a.navbar-item:hover,.navbar.is-warning .navbar-start .navbar-link.is-active,.navbar.is-warning .navbar-start .navbar-link:hover,.navbar.is-warning .navbar-start>a.navbar-item.is-active,.navbar.is-warning .navbar-start>a.navbar-item:hover{background-color:#ffd83d;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-end .navbar-link::after,.navbar.is-warning .navbar-start .navbar-link::after{border-color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-warning .navbar-item.has-dropdown:hover .navbar-link{background-color:#ffd83d;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-dropdown a.navbar-item.is-active{background-color:#ffdd57;color:rgba(0,0,0,.7)}}.navbar.is-danger{background-color:#ff3860;color:#fff}.navbar.is-danger .navbar-brand .navbar-link,.navbar.is-danger .navbar-brand>.navbar-item{color:#fff}.navbar.is-danger .navbar-brand .navbar-link.is-active,.navbar.is-danger .navbar-brand .navbar-link:hover,.navbar.is-danger .navbar-brand>a.navbar-item.is-active,.navbar.is-danger .navbar-brand>a.navbar-item:hover{background-color:#ff1f4b;color:#fff}.navbar.is-danger .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-danger .navbar-burger{color:#fff}@media screen and (min-width:1088px){.navbar.is-danger .navbar-end .navbar-link,.navbar.is-danger .navbar-end>.navbar-item,.navbar.is-danger .navbar-start .navbar-link,.navbar.is-danger .navbar-start>.navbar-item{color:#fff}.navbar.is-danger .navbar-end .navbar-link.is-active,.navbar.is-danger .navbar-end .navbar-link:hover,.navbar.is-danger .navbar-end>a.navbar-item.is-active,.navbar.is-danger .navbar-end>a.navbar-item:hover,.navbar.is-danger .navbar-start .navbar-link.is-active,.navbar.is-danger .navbar-start .navbar-link:hover,.navbar.is-danger .navbar-start>a.navbar-item.is-active,.navbar.is-danger .navbar-start>a.navbar-item:hover{background-color:#ff1f4b;color:#fff}.navbar.is-danger .navbar-end .navbar-link::after,.navbar.is-danger .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-danger .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-danger .navbar-item.has-dropdown:hover .navbar-link{background-color:#ff1f4b;color:#fff}.navbar.is-danger .navbar-dropdown a.navbar-item.is-active{background-color:#ff3860;color:#fff}}.navbar>.container{align-items:stretch;display:flex;min-height:3.25rem;width:100%}.navbar.has-shadow{box-shadow:0 2px 0 0 #f5f5f5}.navbar.is-fixed-bottom,.navbar.is-fixed-top{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom{bottom:0}.navbar.is-fixed-bottom.has-shadow{box-shadow:0 -2px 0 0 #f5f5f5}.navbar.is-fixed-top{top:0}body.has-navbar-fixed-top,html.has-navbar-fixed-top{padding-top:3.25rem}body.has-navbar-fixed-bottom,html.has-navbar-fixed-bottom{padding-bottom:3.25rem}.navbar-brand,.navbar-tabs{align-items:stretch;display:flex;flex-shrink:0;min-height:3.25rem}.navbar-brand a.navbar-item:hover{background-color:transparent}.navbar-tabs{-webkit-overflow-scrolling:touch;max-width:100vw;overflow-x:auto;overflow-y:hidden}.navbar-burger{color:#4a4a4a;cursor:pointer;display:block;height:3.25rem;position:relative;width:3.25rem;margin-left:auto}.navbar-burger span{background-color:currentColor;display:block;height:1px;left:calc(50% - 8px);position:absolute;-webkit-transform-origin:center;transform-origin:center;transition-duration:86ms;transition-property:background-color,opacity,-webkit-transform;transition-property:background-color,opacity,transform;transition-property:background-color,opacity,transform,-webkit-transform;transition-timing-function:ease-out;width:16px}.navbar-burger span:nth-child(1){top:calc(50% - 6px)}.navbar-burger span:nth-child(2){top:calc(50% - 1px)}.navbar-burger span:nth-child(3){top:calc(50% + 4px)}.navbar-burger:hover{background-color:rgba(0,0,0,.05)}.navbar-burger.is-active span:nth-child(1){-webkit-transform:translateY(5px) rotate(45deg);transform:translateY(5px) rotate(45deg)}.navbar-burger.is-active span:nth-child(2){opacity:0}.navbar-burger.is-active span:nth-child(3){-webkit-transform:translateY(-5px) rotate(-45deg);transform:translateY(-5px) rotate(-45deg)}.navbar-menu{display:none}.navbar-item,.navbar-link{color:#4a4a4a;display:block;line-height:1.5;padding:.5rem .75rem;position:relative}.navbar-item .icon:only-child,.navbar-link .icon:only-child{margin-left:-.25rem;margin-right:-.25rem}.navbar-link,a.navbar-item{cursor:pointer}.navbar-link.is-active,.navbar-link:hover,a.navbar-item.is-active,a.navbar-item:hover{background-color:#fafafa;color:#3273dc}.navbar-item{display:block;flex-grow:0;flex-shrink:0}.navbar-item img{max-height:1.75rem}.navbar-item.has-dropdown{padding:0}.navbar-item.is-expanded{flex-grow:1;flex-shrink:1}.navbar-item.is-tab{border-bottom:1px solid transparent;min-height:3.25rem;padding-bottom:calc(.5rem - 1px)}.navbar-item.is-tab:hover{background-color:transparent;border-bottom-color:#3273dc}.navbar-item.is-tab.is-active{background-color:transparent;border-bottom-color:#3273dc;border-bottom-style:solid;border-bottom-width:3px;color:#3273dc;padding-bottom:calc(.5rem - 3px)}.navbar-content{flex-grow:1;flex-shrink:1}.navbar-link:not(.is-arrowless){padding-right:2.5em}.navbar-link:not(.is-arrowless)::after{border-color:#3273dc;margin-top:-.375em;right:1.125em}.navbar-dropdown{font-size:.875rem;padding-bottom:.5rem;padding-top:.5rem}.navbar-dropdown .navbar-item{padding-left:1.5rem;padding-right:1.5rem}.navbar-divider{background-color:#f5f5f5;border:none;display:none;height:2px;margin:.5rem 0}@media screen and (max-width:1087px){.navbar>.container{display:block}.navbar-brand .navbar-item,.navbar-tabs .navbar-item{align-items:center;display:flex}.navbar-link::after{display:none}.navbar-menu{background-color:#fff;box-shadow:0 8px 16px rgba(10,10,10,.1);padding:.5rem 0}.navbar-menu.is-active{display:block}.navbar.is-fixed-bottom-touch,.navbar.is-fixed-top-touch{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom-touch{bottom:0}.navbar.is-fixed-bottom-touch.has-shadow{box-shadow:0 -2px 3px rgba(10,10,10,.1)}.navbar.is-fixed-top-touch{top:0}.navbar.is-fixed-top .navbar-menu,.navbar.is-fixed-top-touch .navbar-menu{-webkit-overflow-scrolling:touch;max-height:calc(100vh - 3.25rem);overflow:auto}body.has-navbar-fixed-top-touch,html.has-navbar-fixed-top-touch{padding-top:3.25rem}body.has-navbar-fixed-bottom-touch,html.has-navbar-fixed-bottom-touch{padding-bottom:3.25rem}}@media screen and (min-width:1088px){.navbar,.navbar-end,.navbar-menu,.navbar-start{align-items:stretch;display:flex}.navbar{min-height:3.25rem}.navbar.is-spaced{padding:1rem 2rem}.navbar.is-spaced .navbar-end,.navbar.is-spaced .navbar-start{align-items:center}.navbar.is-spaced .navbar-link,.navbar.is-spaced a.navbar-item{border-radius:4px}.navbar.is-transparent .navbar-link.is-active,.navbar.is-transparent .navbar-link:hover,.navbar.is-transparent a.navbar-item.is-active,.navbar.is-transparent a.navbar-item:hover{background-color:transparent!important}.navbar.is-transparent .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:hover .navbar-link{background-color:transparent!important}.navbar.is-transparent .navbar-dropdown a.navbar-item:hover{background-color:#f5f5f5;color:#0a0a0a}.navbar.is-transparent .navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:#3273dc}.navbar-burger{display:none}.navbar-item,.navbar-link{align-items:center;display:flex}.navbar-item{display:flex}.navbar-item.has-dropdown{align-items:stretch}.navbar-item.has-dropdown-up .navbar-link::after{-webkit-transform:rotate(135deg) translate(.25em,-.25em);transform:rotate(135deg) translate(.25em,-.25em)}.navbar-item.has-dropdown-up .navbar-dropdown{border-bottom:2px solid #dbdbdb;border-radius:6px 6px 0 0;border-top:none;bottom:100%;box-shadow:0 -8px 8px rgba(10,10,10,.1);top:auto}.navbar-item.is-active .navbar-dropdown,.navbar-item.is-hoverable:hover .navbar-dropdown{display:block}.navbar-item.is-active .navbar-dropdown.is-boxed,.navbar-item.is-hoverable:hover .navbar-dropdown.is-boxed,.navbar.is-spaced .navbar-item.is-active .navbar-dropdown,.navbar.is-spaced .navbar-item.is-hoverable:hover .navbar-dropdown{opacity:1;pointer-events:auto;-webkit-transform:translateY(0);transform:translateY(0)}.navbar-menu{flex-grow:1;flex-shrink:0}.navbar-start{justify-content:flex-start;margin-right:auto}.navbar-end{justify-content:flex-end;margin-left:auto}.navbar-dropdown{background-color:#fff;border-bottom-left-radius:6px;border-bottom-right-radius:6px;border-top:2px solid #dbdbdb;box-shadow:0 8px 8px rgba(10,10,10,.1);display:none;font-size:.875rem;left:0;min-width:100%;position:absolute;top:100%;z-index:20}.navbar-dropdown .navbar-item{padding:.375rem 1rem;white-space:nowrap}.navbar-dropdown a.navbar-item{padding-right:3rem}.navbar-dropdown a.navbar-item:hover{background-color:#f5f5f5;color:#0a0a0a}.navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:#3273dc}.navbar-dropdown.is-boxed,.navbar.is-spaced .navbar-dropdown{border-radius:6px;border-top:none;box-shadow:0 8px 8px rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.1);display:block;opacity:0;pointer-events:none;top:calc(100% + (-4px));-webkit-transform:translateY(-5px);transform:translateY(-5px);transition-duration:86ms;transition-property:opacity,-webkit-transform;transition-property:opacity,transform;transition-property:opacity,transform,-webkit-transform}.navbar-dropdown.is-right{left:auto;right:0}.navbar-divider{display:block}.container>.navbar .navbar-brand,.navbar>.container .navbar-brand{margin-left:-.75rem}.container>.navbar .navbar-menu,.navbar>.container .navbar-menu{margin-right:-.75rem}.navbar.is-fixed-bottom-desktop,.navbar.is-fixed-top-desktop{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom-desktop{bottom:0}.navbar.is-fixed-bottom-desktop.has-shadow{box-shadow:0 -2px 3px rgba(10,10,10,.1)}.navbar.is-fixed-top-desktop{top:0}body.has-navbar-fixed-top-desktop,html.has-navbar-fixed-top-desktop{padding-top:3.25rem}body.has-navbar-fixed-bottom-desktop,html.has-navbar-fixed-bottom-desktop{padding-bottom:3.25rem}body.has-spaced-navbar-fixed-top,html.has-spaced-navbar-fixed-top{padding-top:5.25rem}body.has-spaced-navbar-fixed-bottom,html.has-spaced-navbar-fixed-bottom{padding-bottom:5.25rem}.navbar-link.is-active,a.navbar-item.is-active{color:#0a0a0a}.navbar-link.is-active:not(:hover),a.navbar-item.is-active:not(:hover){background-color:transparent}.navbar-item.has-dropdown.is-active .navbar-link,.navbar-item.has-dropdown:hover .navbar-link{background-color:#fafafa}}.hero.is-fullheight-with-navbar{min-height:calc(100vh - 3.25rem)}.pagination{font-size:1rem;margin:-.25rem}.pagination.is-small{font-size:.75rem}.pagination.is-medium{font-size:1.25rem}.pagination.is-large{font-size:1.5rem}.pagination.is-rounded .pagination-next,.pagination.is-rounded .pagination-previous{padding-left:1em;padding-right:1em;border-radius:290486px}.pagination.is-rounded .pagination-link{border-radius:290486px}.pagination,.pagination-list{align-items:center;display:flex;justify-content:center;text-align:center}.pagination-ellipsis,.pagination-link,.pagination-next,.pagination-previous{font-size:1em;padding-left:.5em;padding-right:.5em;justify-content:center;margin:.25rem;text-align:center}.pagination-link,.pagination-next,.pagination-previous{border-color:#dbdbdb;color:#363636;min-width:2.25em}.pagination-link:hover,.pagination-next:hover,.pagination-previous:hover{border-color:#b5b5b5;color:#363636}.pagination-link:focus,.pagination-next:focus,.pagination-previous:focus{border-color:#3273dc}.pagination-link:active,.pagination-next:active,.pagination-previous:active{box-shadow:inset 0 1px 2px rgba(10,10,10,.2)}.pagination-link[disabled],.pagination-next[disabled],.pagination-previous[disabled]{background-color:#dbdbdb;border-color:#dbdbdb;box-shadow:none;color:#7a7a7a;opacity:.5}.pagination-next,.pagination-previous{padding-left:.75em;padding-right:.75em;white-space:nowrap}.pagination-link.is-current{background-color:#3273dc;border-color:#3273dc;color:#fff}.pagination-ellipsis{color:#b5b5b5;pointer-events:none}.pagination-list{flex-wrap:wrap}@media screen and (max-width:768px){.pagination{flex-wrap:wrap}.pagination-next,.pagination-previous{flex-grow:1;flex-shrink:1}.pagination-list li{flex-grow:1;flex-shrink:1}}@media screen and (min-width:769px),print{.pagination-list{flex-grow:1;flex-shrink:1;justify-content:flex-start;order:1}.pagination-previous{order:2}.pagination-next{order:3}.pagination{justify-content:space-between}.pagination.is-centered .pagination-previous{order:1}.pagination.is-centered .pagination-list{justify-content:center;order:2}.pagination.is-centered .pagination-next{order:3}.pagination.is-right .pagination-previous{order:1}.pagination.is-right .pagination-next{order:2}.pagination.is-right .pagination-list{justify-content:flex-end;order:3}}.panel{font-size:1rem}.panel:not(:last-child){margin-bottom:1.5rem}.panel-block,.panel-heading,.panel-tabs{border-bottom:1px solid #dbdbdb;border-left:1px solid #dbdbdb;border-right:1px solid #dbdbdb}.panel-block:first-child,.panel-heading:first-child,.panel-tabs:first-child{border-top:1px solid #dbdbdb}.panel-heading{background-color:#f5f5f5;border-radius:4px 4px 0 0;color:#363636;font-size:1.25em;font-weight:300;line-height:1.25;padding:.5em .75em}.panel-tabs{align-items:flex-end;display:flex;font-size:.875em;justify-content:center}.panel-tabs a{border-bottom:1px solid #dbdbdb;margin-bottom:-1px;padding:.5em}.panel-tabs a.is-active{border-bottom-color:#4a4a4a;color:#363636}.panel-list a{color:#4a4a4a}.panel-list a:hover{color:#3273dc}.panel-block{align-items:center;color:#363636;display:flex;justify-content:flex-start;padding:.5em .75em}.panel-block input[type=checkbox]{margin-right:.75em}.panel-block>.control{flex-grow:1;flex-shrink:1;width:100%}.panel-block.is-wrapped{flex-wrap:wrap}.panel-block.is-active{border-left-color:#3273dc;color:#363636}.panel-block.is-active .panel-icon{color:#3273dc}a.panel-block,label.panel-block{cursor:pointer}a.panel-block:hover,label.panel-block:hover{background-color:#f5f5f5}.panel-icon{display:inline-block;font-size:14px;height:1em;line-height:1em;text-align:center;vertical-align:top;width:1em;color:#7a7a7a;margin-right:.75em}.panel-icon .fa{font-size:inherit;line-height:inherit}.tabs{-webkit-overflow-scrolling:touch;align-items:stretch;display:flex;font-size:1rem;justify-content:space-between;overflow:hidden;overflow-x:auto;white-space:nowrap}.tabs a{align-items:center;border-bottom-color:#dbdbdb;border-bottom-style:solid;border-bottom-width:1px;color:#4a4a4a;display:flex;justify-content:center;margin-bottom:-1px;padding:.5em 1em;vertical-align:top}.tabs a:hover{border-bottom-color:#363636;color:#363636}.tabs li{display:block}.tabs li.is-active a{border-bottom-color:#3273dc;color:#3273dc}.tabs ul{align-items:center;border-bottom-color:#dbdbdb;border-bottom-style:solid;border-bottom-width:1px;display:flex;flex-grow:1;flex-shrink:0;justify-content:flex-start}.tabs ul.is-left{padding-right:.75em}.tabs ul.is-center{flex:none;justify-content:center;padding-left:.75em;padding-right:.75em}.tabs ul.is-right{justify-content:flex-end;padding-left:.75em}.tabs .icon:first-child{margin-right:.5em}.tabs .icon:last-child{margin-left:.5em}.tabs.is-centered ul{justify-content:center}.tabs.is-right ul{justify-content:flex-end}.tabs.is-boxed a{border:1px solid transparent;border-radius:4px 4px 0 0}.tabs.is-boxed a:hover{background-color:#f5f5f5;border-bottom-color:#dbdbdb}.tabs.is-boxed li.is-active a{background-color:#fff;border-color:#dbdbdb;border-bottom-color:transparent!important}.tabs.is-fullwidth li{flex-grow:1;flex-shrink:0}.tabs.is-toggle a{border-color:#dbdbdb;border-style:solid;border-width:1px;margin-bottom:0;position:relative}.tabs.is-toggle a:hover{background-color:#f5f5f5;border-color:#b5b5b5;z-index:2}.tabs.is-toggle li+li{margin-left:-1px}.tabs.is-toggle li:first-child a{border-radius:4px 0 0 4px}.tabs.is-toggle li:last-child a{border-radius:0 4px 4px 0}.tabs.is-toggle li.is-active a{background-color:#3273dc;border-color:#3273dc;color:#fff;z-index:1}.tabs.is-toggle ul{border-bottom:none}.tabs.is-toggle.is-toggle-rounded li:first-child a{border-bottom-left-radius:290486px;border-top-left-radius:290486px;padding-left:1.25em}.tabs.is-toggle.is-toggle-rounded li:last-child a{border-bottom-right-radius:290486px;border-top-right-radius:290486px;padding-right:1.25em}.tabs.is-small{font-size:.75rem}.tabs.is-medium{font-size:1.25rem}.tabs.is-large{font-size:1.5rem}.column{display:block;flex-basis:0;flex-grow:1;flex-shrink:1;padding:.75rem}.columns.is-mobile>.column.is-narrow{flex:none}.columns.is-mobile>.column.is-full{flex:none;width:100%}.columns.is-mobile>.column.is-three-quarters{flex:none;width:75%}.columns.is-mobile>.column.is-two-thirds{flex:none;width:66.6666%}.columns.is-mobile>.column.is-half{flex:none;width:50%}.columns.is-mobile>.column.is-one-third{flex:none;width:33.3333%}.columns.is-mobile>.column.is-one-quarter{flex:none;width:25%}.columns.is-mobile>.column.is-one-fifth{flex:none;width:20%}.columns.is-mobile>.column.is-two-fifths{flex:none;width:40%}.columns.is-mobile>.column.is-three-fifths{flex:none;width:60%}.columns.is-mobile>.column.is-four-fifths{flex:none;width:80%}.columns.is-mobile>.column.is-offset-three-quarters{margin-left:75%}.columns.is-mobile>.column.is-offset-two-thirds{margin-left:66.6666%}.columns.is-mobile>.column.is-offset-half{margin-left:50%}.columns.is-mobile>.column.is-offset-one-third{margin-left:33.3333%}.columns.is-mobile>.column.is-offset-one-quarter{margin-left:25%}.columns.is-mobile>.column.is-offset-one-fifth{margin-left:20%}.columns.is-mobile>.column.is-offset-two-fifths{margin-left:40%}.columns.is-mobile>.column.is-offset-three-fifths{margin-left:60%}.columns.is-mobile>.column.is-offset-four-fifths{margin-left:80%}.columns.is-mobile>.column.is-1{flex:none;width:8.33333%}.columns.is-mobile>.column.is-offset-1{margin-left:8.33333%}.columns.is-mobile>.column.is-2{flex:none;width:16.66667%}.columns.is-mobile>.column.is-offset-2{margin-left:16.66667%}.columns.is-mobile>.column.is-3{flex:none;width:25%}.columns.is-mobile>.column.is-offset-3{margin-left:25%}.columns.is-mobile>.column.is-4{flex:none;width:33.33333%}.columns.is-mobile>.column.is-offset-4{margin-left:33.33333%}.columns.is-mobile>.column.is-5{flex:none;width:41.66667%}.columns.is-mobile>.column.is-offset-5{margin-left:41.66667%}.columns.is-mobile>.column.is-6{flex:none;width:50%}.columns.is-mobile>.column.is-offset-6{margin-left:50%}.columns.is-mobile>.column.is-7{flex:none;width:58.33333%}.columns.is-mobile>.column.is-offset-7{margin-left:58.33333%}.columns.is-mobile>.column.is-8{flex:none;width:66.66667%}.columns.is-mobile>.column.is-offset-8{margin-left:66.66667%}.columns.is-mobile>.column.is-9{flex:none;width:75%}.columns.is-mobile>.column.is-offset-9{margin-left:75%}.columns.is-mobile>.column.is-10{flex:none;width:83.33333%}.columns.is-mobile>.column.is-offset-10{margin-left:83.33333%}.columns.is-mobile>.column.is-11{flex:none;width:91.66667%}.columns.is-mobile>.column.is-offset-11{margin-left:91.66667%}.columns.is-mobile>.column.is-12{flex:none;width:100%}.columns.is-mobile>.column.is-offset-12{margin-left:100%}@media screen and (max-width:768px){.column.is-narrow-mobile{flex:none}.column.is-full-mobile{flex:none;width:100%}.column.is-three-quarters-mobile{flex:none;width:75%}.column.is-two-thirds-mobile{flex:none;width:66.6666%}.column.is-half-mobile{flex:none;width:50%}.column.is-one-third-mobile{flex:none;width:33.3333%}.column.is-one-quarter-mobile{flex:none;width:25%}.column.is-one-fifth-mobile{flex:none;width:20%}.column.is-two-fifths-mobile{flex:none;width:40%}.column.is-three-fifths-mobile{flex:none;width:60%}.column.is-four-fifths-mobile{flex:none;width:80%}.column.is-offset-three-quarters-mobile{margin-left:75%}.column.is-offset-two-thirds-mobile{margin-left:66.6666%}.column.is-offset-half-mobile{margin-left:50%}.column.is-offset-one-third-mobile{margin-left:33.3333%}.column.is-offset-one-quarter-mobile{margin-left:25%}.column.is-offset-one-fifth-mobile{margin-left:20%}.column.is-offset-two-fifths-mobile{margin-left:40%}.column.is-offset-three-fifths-mobile{margin-left:60%}.column.is-offset-four-fifths-mobile{margin-left:80%}.column.is-1-mobile{flex:none;width:8.33333%}.column.is-offset-1-mobile{margin-left:8.33333%}.column.is-2-mobile{flex:none;width:16.66667%}.column.is-offset-2-mobile{margin-left:16.66667%}.column.is-3-mobile{flex:none;width:25%}.column.is-offset-3-mobile{margin-left:25%}.column.is-4-mobile{flex:none;width:33.33333%}.column.is-offset-4-mobile{margin-left:33.33333%}.column.is-5-mobile{flex:none;width:41.66667%}.column.is-offset-5-mobile{margin-left:41.66667%}.column.is-6-mobile{flex:none;width:50%}.column.is-offset-6-mobile{margin-left:50%}.column.is-7-mobile{flex:none;width:58.33333%}.column.is-offset-7-mobile{margin-left:58.33333%}.column.is-8-mobile{flex:none;width:66.66667%}.column.is-offset-8-mobile{margin-left:66.66667%}.column.is-9-mobile{flex:none;width:75%}.column.is-offset-9-mobile{margin-left:75%}.column.is-10-mobile{flex:none;width:83.33333%}.column.is-offset-10-mobile{margin-left:83.33333%}.column.is-11-mobile{flex:none;width:91.66667%}.column.is-offset-11-mobile{margin-left:91.66667%}.column.is-12-mobile{flex:none;width:100%}.column.is-offset-12-mobile{margin-left:100%}}@media screen and (min-width:769px),print{.column.is-narrow,.column.is-narrow-tablet{flex:none}.column.is-full,.column.is-full-tablet{flex:none;width:100%}.column.is-three-quarters,.column.is-three-quarters-tablet{flex:none;width:75%}.column.is-two-thirds,.column.is-two-thirds-tablet{flex:none;width:66.6666%}.column.is-half,.column.is-half-tablet{flex:none;width:50%}.column.is-one-third,.column.is-one-third-tablet{flex:none;width:33.3333%}.column.is-one-quarter,.column.is-one-quarter-tablet{flex:none;width:25%}.column.is-one-fifth,.column.is-one-fifth-tablet{flex:none;width:20%}.column.is-two-fifths,.column.is-two-fifths-tablet{flex:none;width:40%}.column.is-three-fifths,.column.is-three-fifths-tablet{flex:none;width:60%}.column.is-four-fifths,.column.is-four-fifths-tablet{flex:none;width:80%}.column.is-offset-three-quarters,.column.is-offset-three-quarters-tablet{margin-left:75%}.column.is-offset-two-thirds,.column.is-offset-two-thirds-tablet{margin-left:66.6666%}.column.is-offset-half,.column.is-offset-half-tablet{margin-left:50%}.column.is-offset-one-third,.column.is-offset-one-third-tablet{margin-left:33.3333%}.column.is-offset-one-quarter,.column.is-offset-one-quarter-tablet{margin-left:25%}.column.is-offset-one-fifth,.column.is-offset-one-fifth-tablet{margin-left:20%}.column.is-offset-two-fifths,.column.is-offset-two-fifths-tablet{margin-left:40%}.column.is-offset-three-fifths,.column.is-offset-three-fifths-tablet{margin-left:60%}.column.is-offset-four-fifths,.column.is-offset-four-fifths-tablet{margin-left:80%}.column.is-1,.column.is-1-tablet{flex:none;width:8.33333%}.column.is-offset-1,.column.is-offset-1-tablet{margin-left:8.33333%}.column.is-2,.column.is-2-tablet{flex:none;width:16.66667%}.column.is-offset-2,.column.is-offset-2-tablet{margin-left:16.66667%}.column.is-3,.column.is-3-tablet{flex:none;width:25%}.column.is-offset-3,.column.is-offset-3-tablet{margin-left:25%}.column.is-4,.column.is-4-tablet{flex:none;width:33.33333%}.column.is-offset-4,.column.is-offset-4-tablet{margin-left:33.33333%}.column.is-5,.column.is-5-tablet{flex:none;width:41.66667%}.column.is-offset-5,.column.is-offset-5-tablet{margin-left:41.66667%}.column.is-6,.column.is-6-tablet{flex:none;width:50%}.column.is-offset-6,.column.is-offset-6-tablet{margin-left:50%}.column.is-7,.column.is-7-tablet{flex:none;width:58.33333%}.column.is-offset-7,.column.is-offset-7-tablet{margin-left:58.33333%}.column.is-8,.column.is-8-tablet{flex:none;width:66.66667%}.column.is-offset-8,.column.is-offset-8-tablet{margin-left:66.66667%}.column.is-9,.column.is-9-tablet{flex:none;width:75%}.column.is-offset-9,.column.is-offset-9-tablet{margin-left:75%}.column.is-10,.column.is-10-tablet{flex:none;width:83.33333%}.column.is-offset-10,.column.is-offset-10-tablet{margin-left:83.33333%}.column.is-11,.column.is-11-tablet{flex:none;width:91.66667%}.column.is-offset-11,.column.is-offset-11-tablet{margin-left:91.66667%}.column.is-12,.column.is-12-tablet{flex:none;width:100%}.column.is-offset-12,.column.is-offset-12-tablet{margin-left:100%}}@media screen and (max-width:1087px){.column.is-narrow-touch{flex:none}.column.is-full-touch{flex:none;width:100%}.column.is-three-quarters-touch{flex:none;width:75%}.column.is-two-thirds-touch{flex:none;width:66.6666%}.column.is-half-touch{flex:none;width:50%}.column.is-one-third-touch{flex:none;width:33.3333%}.column.is-one-quarter-touch{flex:none;width:25%}.column.is-one-fifth-touch{flex:none;width:20%}.column.is-two-fifths-touch{flex:none;width:40%}.column.is-three-fifths-touch{flex:none;width:60%}.column.is-four-fifths-touch{flex:none;width:80%}.column.is-offset-three-quarters-touch{margin-left:75%}.column.is-offset-two-thirds-touch{margin-left:66.6666%}.column.is-offset-half-touch{margin-left:50%}.column.is-offset-one-third-touch{margin-left:33.3333%}.column.is-offset-one-quarter-touch{margin-left:25%}.column.is-offset-one-fifth-touch{margin-left:20%}.column.is-offset-two-fifths-touch{margin-left:40%}.column.is-offset-three-fifths-touch{margin-left:60%}.column.is-offset-four-fifths-touch{margin-left:80%}.column.is-1-touch{flex:none;width:8.33333%}.column.is-offset-1-touch{margin-left:8.33333%}.column.is-2-touch{flex:none;width:16.66667%}.column.is-offset-2-touch{margin-left:16.66667%}.column.is-3-touch{flex:none;width:25%}.column.is-offset-3-touch{margin-left:25%}.column.is-4-touch{flex:none;width:33.33333%}.column.is-offset-4-touch{margin-left:33.33333%}.column.is-5-touch{flex:none;width:41.66667%}.column.is-offset-5-touch{margin-left:41.66667%}.column.is-6-touch{flex:none;width:50%}.column.is-offset-6-touch{margin-left:50%}.column.is-7-touch{flex:none;width:58.33333%}.column.is-offset-7-touch{margin-left:58.33333%}.column.is-8-touch{flex:none;width:66.66667%}.column.is-offset-8-touch{margin-left:66.66667%}.column.is-9-touch{flex:none;width:75%}.column.is-offset-9-touch{margin-left:75%}.column.is-10-touch{flex:none;width:83.33333%}.column.is-offset-10-touch{margin-left:83.33333%}.column.is-11-touch{flex:none;width:91.66667%}.column.is-offset-11-touch{margin-left:91.66667%}.column.is-12-touch{flex:none;width:100%}.column.is-offset-12-touch{margin-left:100%}}@media screen and (min-width:1088px){.column.is-narrow-desktop{flex:none}.column.is-full-desktop{flex:none;width:100%}.column.is-three-quarters-desktop{flex:none;width:75%}.column.is-two-thirds-desktop{flex:none;width:66.6666%}.column.is-half-desktop{flex:none;width:50%}.column.is-one-third-desktop{flex:none;width:33.3333%}.column.is-one-quarter-desktop{flex:none;width:25%}.column.is-one-fifth-desktop{flex:none;width:20%}.column.is-two-fifths-desktop{flex:none;width:40%}.column.is-three-fifths-desktop{flex:none;width:60%}.column.is-four-fifths-desktop{flex:none;width:80%}.column.is-offset-three-quarters-desktop{margin-left:75%}.column.is-offset-two-thirds-desktop{margin-left:66.6666%}.column.is-offset-half-desktop{margin-left:50%}.column.is-offset-one-third-desktop{margin-left:33.3333%}.column.is-offset-one-quarter-desktop{margin-left:25%}.column.is-offset-one-fifth-desktop{margin-left:20%}.column.is-offset-two-fifths-desktop{margin-left:40%}.column.is-offset-three-fifths-desktop{margin-left:60%}.column.is-offset-four-fifths-desktop{margin-left:80%}.column.is-1-desktop{flex:none;width:8.33333%}.column.is-offset-1-desktop{margin-left:8.33333%}.column.is-2-desktop{flex:none;width:16.66667%}.column.is-offset-2-desktop{margin-left:16.66667%}.column.is-3-desktop{flex:none;width:25%}.column.is-offset-3-desktop{margin-left:25%}.column.is-4-desktop{flex:none;width:33.33333%}.column.is-offset-4-desktop{margin-left:33.33333%}.column.is-5-desktop{flex:none;width:41.66667%}.column.is-offset-5-desktop{margin-left:41.66667%}.column.is-6-desktop{flex:none;width:50%}.column.is-offset-6-desktop{margin-left:50%}.column.is-7-desktop{flex:none;width:58.33333%}.column.is-offset-7-desktop{margin-left:58.33333%}.column.is-8-desktop{flex:none;width:66.66667%}.column.is-offset-8-desktop{margin-left:66.66667%}.column.is-9-desktop{flex:none;width:75%}.column.is-offset-9-desktop{margin-left:75%}.column.is-10-desktop{flex:none;width:83.33333%}.column.is-offset-10-desktop{margin-left:83.33333%}.column.is-11-desktop{flex:none;width:91.66667%}.column.is-offset-11-desktop{margin-left:91.66667%}.column.is-12-desktop{flex:none;width:100%}.column.is-offset-12-desktop{margin-left:100%}}@media screen and (min-width:1280px){.column.is-narrow-widescreen{flex:none}.column.is-full-widescreen{flex:none;width:100%}.column.is-three-quarters-widescreen{flex:none;width:75%}.column.is-two-thirds-widescreen{flex:none;width:66.6666%}.column.is-half-widescreen{flex:none;width:50%}.column.is-one-third-widescreen{flex:none;width:33.3333%}.column.is-one-quarter-widescreen{flex:none;width:25%}.column.is-one-fifth-widescreen{flex:none;width:20%}.column.is-two-fifths-widescreen{flex:none;width:40%}.column.is-three-fifths-widescreen{flex:none;width:60%}.column.is-four-fifths-widescreen{flex:none;width:80%}.column.is-offset-three-quarters-widescreen{margin-left:75%}.column.is-offset-two-thirds-widescreen{margin-left:66.6666%}.column.is-offset-half-widescreen{margin-left:50%}.column.is-offset-one-third-widescreen{margin-left:33.3333%}.column.is-offset-one-quarter-widescreen{margin-left:25%}.column.is-offset-one-fifth-widescreen{margin-left:20%}.column.is-offset-two-fifths-widescreen{margin-left:40%}.column.is-offset-three-fifths-widescreen{margin-left:60%}.column.is-offset-four-fifths-widescreen{margin-left:80%}.column.is-1-widescreen{flex:none;width:8.33333%}.column.is-offset-1-widescreen{margin-left:8.33333%}.column.is-2-widescreen{flex:none;width:16.66667%}.column.is-offset-2-widescreen{margin-left:16.66667%}.column.is-3-widescreen{flex:none;width:25%}.column.is-offset-3-widescreen{margin-left:25%}.column.is-4-widescreen{flex:none;width:33.33333%}.column.is-offset-4-widescreen{margin-left:33.33333%}.column.is-5-widescreen{flex:none;width:41.66667%}.column.is-offset-5-widescreen{margin-left:41.66667%}.column.is-6-widescreen{flex:none;width:50%}.column.is-offset-6-widescreen{margin-left:50%}.column.is-7-widescreen{flex:none;width:58.33333%}.column.is-offset-7-widescreen{margin-left:58.33333%}.column.is-8-widescreen{flex:none;width:66.66667%}.column.is-offset-8-widescreen{margin-left:66.66667%}.column.is-9-widescreen{flex:none;width:75%}.column.is-offset-9-widescreen{margin-left:75%}.column.is-10-widescreen{flex:none;width:83.33333%}.column.is-offset-10-widescreen{margin-left:83.33333%}.column.is-11-widescreen{flex:none;width:91.66667%}.column.is-offset-11-widescreen{margin-left:91.66667%}.column.is-12-widescreen{flex:none;width:100%}.column.is-offset-12-widescreen{margin-left:100%}}@media screen and (min-width:1472px){.column.is-narrow-fullhd{flex:none}.column.is-full-fullhd{flex:none;width:100%}.column.is-three-quarters-fullhd{flex:none;width:75%}.column.is-two-thirds-fullhd{flex:none;width:66.6666%}.column.is-half-fullhd{flex:none;width:50%}.column.is-one-third-fullhd{flex:none;width:33.3333%}.column.is-one-quarter-fullhd{flex:none;width:25%}.column.is-one-fifth-fullhd{flex:none;width:20%}.column.is-two-fifths-fullhd{flex:none;width:40%}.column.is-three-fifths-fullhd{flex:none;width:60%}.column.is-four-fifths-fullhd{flex:none;width:80%}.column.is-offset-three-quarters-fullhd{margin-left:75%}.column.is-offset-two-thirds-fullhd{margin-left:66.6666%}.column.is-offset-half-fullhd{margin-left:50%}.column.is-offset-one-third-fullhd{margin-left:33.3333%}.column.is-offset-one-quarter-fullhd{margin-left:25%}.column.is-offset-one-fifth-fullhd{margin-left:20%}.column.is-offset-two-fifths-fullhd{margin-left:40%}.column.is-offset-three-fifths-fullhd{margin-left:60%}.column.is-offset-four-fifths-fullhd{margin-left:80%}.column.is-1-fullhd{flex:none;width:8.33333%}.column.is-offset-1-fullhd{margin-left:8.33333%}.column.is-2-fullhd{flex:none;width:16.66667%}.column.is-offset-2-fullhd{margin-left:16.66667%}.column.is-3-fullhd{flex:none;width:25%}.column.is-offset-3-fullhd{margin-left:25%}.column.is-4-fullhd{flex:none;width:33.33333%}.column.is-offset-4-fullhd{margin-left:33.33333%}.column.is-5-fullhd{flex:none;width:41.66667%}.column.is-offset-5-fullhd{margin-left:41.66667%}.column.is-6-fullhd{flex:none;width:50%}.column.is-offset-6-fullhd{margin-left:50%}.column.is-7-fullhd{flex:none;width:58.33333%}.column.is-offset-7-fullhd{margin-left:58.33333%}.column.is-8-fullhd{flex:none;width:66.66667%}.column.is-offset-8-fullhd{margin-left:66.66667%}.column.is-9-fullhd{flex:none;width:75%}.column.is-offset-9-fullhd{margin-left:75%}.column.is-10-fullhd{flex:none;width:83.33333%}.column.is-offset-10-fullhd{margin-left:83.33333%}.column.is-11-fullhd{flex:none;width:91.66667%}.column.is-offset-11-fullhd{margin-left:91.66667%}.column.is-12-fullhd{flex:none;width:100%}.column.is-offset-12-fullhd{margin-left:100%}}.columns{margin-left:-.75rem;margin-right:-.75rem;margin-top:-.75rem}.columns:last-child{margin-bottom:-.75rem}.columns:not(:last-child){margin-bottom:calc(1.5rem - .75rem)}.columns.is-centered{justify-content:center}.columns.is-gapless{margin-left:0;margin-right:0;margin-top:0}.columns.is-gapless>.column{margin:0;padding:0!important}.columns.is-gapless:not(:last-child){margin-bottom:1.5rem}.columns.is-gapless:last-child{margin-bottom:0}.columns.is-mobile{display:flex}.columns.is-multiline{flex-wrap:wrap}.columns.is-vcentered{align-items:center}@media screen and (min-width:769px),print{.columns:not(.is-desktop){display:flex}}@media screen and (min-width:1088px){.columns.is-desktop{display:flex}}.columns.is-variable{--columnGap:0.75rem;margin-left:calc(-1 * var(--columnGap));margin-right:calc(-1 * var(--columnGap))}.columns.is-variable .column{padding-left:var(--columnGap);padding-right:var(--columnGap)}.columns.is-variable.is-0{--columnGap:0rem}@media screen and (max-width:768px){.columns.is-variable.is-0-mobile{--columnGap:0rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-0-tablet{--columnGap:0rem}}@media screen and (min-width:769px) and (max-width:1087px){.columns.is-variable.is-0-tablet-only{--columnGap:0rem}}@media screen and (max-width:1087px){.columns.is-variable.is-0-touch{--columnGap:0rem}}@media screen and (min-width:1088px){.columns.is-variable.is-0-desktop{--columnGap:0rem}}@media screen and (min-width:1088px) and (max-width:1279px){.columns.is-variable.is-0-desktop-only{--columnGap:0rem}}@media screen and (min-width:1280px){.columns.is-variable.is-0-widescreen{--columnGap:0rem}}@media screen and (min-width:1280px) and (max-width:1471px){.columns.is-variable.is-0-widescreen-only{--columnGap:0rem}}@media screen and (min-width:1472px){.columns.is-variable.is-0-fullhd{--columnGap:0rem}}.columns.is-variable.is-1{--columnGap:0.25rem}@media screen and (max-width:768px){.columns.is-variable.is-1-mobile{--columnGap:0.25rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-1-tablet{--columnGap:0.25rem}}@media screen and (min-width:769px) and (max-width:1087px){.columns.is-variable.is-1-tablet-only{--columnGap:0.25rem}}@media screen and (max-width:1087px){.columns.is-variable.is-1-touch{--columnGap:0.25rem}}@media screen and (min-width:1088px){.columns.is-variable.is-1-desktop{--columnGap:0.25rem}}@media screen and (min-width:1088px) and (max-width:1279px){.columns.is-variable.is-1-desktop-only{--columnGap:0.25rem}}@media screen and (min-width:1280px){.columns.is-variable.is-1-widescreen{--columnGap:0.25rem}}@media screen and (min-width:1280px) and (max-width:1471px){.columns.is-variable.is-1-widescreen-only{--columnGap:0.25rem}}@media screen and (min-width:1472px){.columns.is-variable.is-1-fullhd{--columnGap:0.25rem}}.columns.is-variable.is-2{--columnGap:0.5rem}@media screen and (max-width:768px){.columns.is-variable.is-2-mobile{--columnGap:0.5rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-2-tablet{--columnGap:0.5rem}}@media screen and (min-width:769px) and (max-width:1087px){.columns.is-variable.is-2-tablet-only{--columnGap:0.5rem}}@media screen and (max-width:1087px){.columns.is-variable.is-2-touch{--columnGap:0.5rem}}@media screen and (min-width:1088px){.columns.is-variable.is-2-desktop{--columnGap:0.5rem}}@media screen and (min-width:1088px) and (max-width:1279px){.columns.is-variable.is-2-desktop-only{--columnGap:0.5rem}}@media screen and (min-width:1280px){.columns.is-variable.is-2-widescreen{--columnGap:0.5rem}}@media screen and (min-width:1280px) and (max-width:1471px){.columns.is-variable.is-2-widescreen-only{--columnGap:0.5rem}}@media screen and (min-width:1472px){.columns.is-variable.is-2-fullhd{--columnGap:0.5rem}}.columns.is-variable.is-3{--columnGap:0.75rem}@media screen and (max-width:768px){.columns.is-variable.is-3-mobile{--columnGap:0.75rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-3-tablet{--columnGap:0.75rem}}@media screen and (min-width:769px) and (max-width:1087px){.columns.is-variable.is-3-tablet-only{--columnGap:0.75rem}}@media screen and (max-width:1087px){.columns.is-variable.is-3-touch{--columnGap:0.75rem}}@media screen and (min-width:1088px){.columns.is-variable.is-3-desktop{--columnGap:0.75rem}}@media screen and (min-width:1088px) and (max-width:1279px){.columns.is-variable.is-3-desktop-only{--columnGap:0.75rem}}@media screen and (min-width:1280px){.columns.is-variable.is-3-widescreen{--columnGap:0.75rem}}@media screen and (min-width:1280px) and (max-width:1471px){.columns.is-variable.is-3-widescreen-only{--columnGap:0.75rem}}@media screen and (min-width:1472px){.columns.is-variable.is-3-fullhd{--columnGap:0.75rem}}.columns.is-variable.is-4{--columnGap:1rem}@media screen and (max-width:768px){.columns.is-variable.is-4-mobile{--columnGap:1rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-4-tablet{--columnGap:1rem}}@media screen and (min-width:769px) and (max-width:1087px){.columns.is-variable.is-4-tablet-only{--columnGap:1rem}}@media screen and (max-width:1087px){.columns.is-variable.is-4-touch{--columnGap:1rem}}@media screen and (min-width:1088px){.columns.is-variable.is-4-desktop{--columnGap:1rem}}@media screen and (min-width:1088px) and (max-width:1279px){.columns.is-variable.is-4-desktop-only{--columnGap:1rem}}@media screen and (min-width:1280px){.columns.is-variable.is-4-widescreen{--columnGap:1rem}}@media screen and (min-width:1280px) and (max-width:1471px){.columns.is-variable.is-4-widescreen-only{--columnGap:1rem}}@media screen and (min-width:1472px){.columns.is-variable.is-4-fullhd{--columnGap:1rem}}.columns.is-variable.is-5{--columnGap:1.25rem}@media screen and (max-width:768px){.columns.is-variable.is-5-mobile{--columnGap:1.25rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-5-tablet{--columnGap:1.25rem}}@media screen and (min-width:769px) and (max-width:1087px){.columns.is-variable.is-5-tablet-only{--columnGap:1.25rem}}@media screen and (max-width:1087px){.columns.is-variable.is-5-touch{--columnGap:1.25rem}}@media screen and (min-width:1088px){.columns.is-variable.is-5-desktop{--columnGap:1.25rem}}@media screen and (min-width:1088px) and (max-width:1279px){.columns.is-variable.is-5-desktop-only{--columnGap:1.25rem}}@media screen and (min-width:1280px){.columns.is-variable.is-5-widescreen{--columnGap:1.25rem}}@media screen and (min-width:1280px) and (max-width:1471px){.columns.is-variable.is-5-widescreen-only{--columnGap:1.25rem}}@media screen and (min-width:1472px){.columns.is-variable.is-5-fullhd{--columnGap:1.25rem}}.columns.is-variable.is-6{--columnGap:1.5rem}@media screen and (max-width:768px){.columns.is-variable.is-6-mobile{--columnGap:1.5rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-6-tablet{--columnGap:1.5rem}}@media screen and (min-width:769px) and (max-width:1087px){.columns.is-variable.is-6-tablet-only{--columnGap:1.5rem}}@media screen and (max-width:1087px){.columns.is-variable.is-6-touch{--columnGap:1.5rem}}@media screen and (min-width:1088px){.columns.is-variable.is-6-desktop{--columnGap:1.5rem}}@media screen and (min-width:1088px) and (max-width:1279px){.columns.is-variable.is-6-desktop-only{--columnGap:1.5rem}}@media screen and (min-width:1280px){.columns.is-variable.is-6-widescreen{--columnGap:1.5rem}}@media screen and (min-width:1280px) and (max-width:1471px){.columns.is-variable.is-6-widescreen-only{--columnGap:1.5rem}}@media screen and (min-width:1472px){.columns.is-variable.is-6-fullhd{--columnGap:1.5rem}}.columns.is-variable.is-7{--columnGap:1.75rem}@media screen and (max-width:768px){.columns.is-variable.is-7-mobile{--columnGap:1.75rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-7-tablet{--columnGap:1.75rem}}@media screen and (min-width:769px) and (max-width:1087px){.columns.is-variable.is-7-tablet-only{--columnGap:1.75rem}}@media screen and (max-width:1087px){.columns.is-variable.is-7-touch{--columnGap:1.75rem}}@media screen and (min-width:1088px){.columns.is-variable.is-7-desktop{--columnGap:1.75rem}}@media screen and (min-width:1088px) and (max-width:1279px){.columns.is-variable.is-7-desktop-only{--columnGap:1.75rem}}@media screen and (min-width:1280px){.columns.is-variable.is-7-widescreen{--columnGap:1.75rem}}@media screen and (min-width:1280px) and (max-width:1471px){.columns.is-variable.is-7-widescreen-only{--columnGap:1.75rem}}@media screen and (min-width:1472px){.columns.is-variable.is-7-fullhd{--columnGap:1.75rem}}.columns.is-variable.is-8{--columnGap:2rem}@media screen and (max-width:768px){.columns.is-variable.is-8-mobile{--columnGap:2rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-8-tablet{--columnGap:2rem}}@media screen and (min-width:769px) and (max-width:1087px){.columns.is-variable.is-8-tablet-only{--columnGap:2rem}}@media screen and (max-width:1087px){.columns.is-variable.is-8-touch{--columnGap:2rem}}@media screen and (min-width:1088px){.columns.is-variable.is-8-desktop{--columnGap:2rem}}@media screen and (min-width:1088px) and (max-width:1279px){.columns.is-variable.is-8-desktop-only{--columnGap:2rem}}@media screen and (min-width:1280px){.columns.is-variable.is-8-widescreen{--columnGap:2rem}}@media screen and (min-width:1280px) and (max-width:1471px){.columns.is-variable.is-8-widescreen-only{--columnGap:2rem}}@media screen and (min-width:1472px){.columns.is-variable.is-8-fullhd{--columnGap:2rem}}.tile{align-items:stretch;display:block;flex-basis:0;flex-grow:1;flex-shrink:1;min-height:-webkit-min-content;min-height:-moz-min-content;min-height:min-content}.tile.is-ancestor{margin-left:-.75rem;margin-right:-.75rem;margin-top:-.75rem}.tile.is-ancestor:last-child{margin-bottom:-.75rem}.tile.is-ancestor:not(:last-child){margin-bottom:.75rem}.tile.is-child{margin:0!important}.tile.is-parent{padding:.75rem}.tile.is-vertical{flex-direction:column}.tile.is-vertical>.tile.is-child:not(:last-child){margin-bottom:1.5rem!important}@media screen and (min-width:769px),print{.tile:not(.is-child){display:flex}.tile.is-1{flex:none;width:8.33333%}.tile.is-2{flex:none;width:16.66667%}.tile.is-3{flex:none;width:25%}.tile.is-4{flex:none;width:33.33333%}.tile.is-5{flex:none;width:41.66667%}.tile.is-6{flex:none;width:50%}.tile.is-7{flex:none;width:58.33333%}.tile.is-8{flex:none;width:66.66667%}.tile.is-9{flex:none;width:75%}.tile.is-10{flex:none;width:83.33333%}.tile.is-11{flex:none;width:91.66667%}.tile.is-12{flex:none;width:100%}}.hero{align-items:stretch;display:flex;flex-direction:column;justify-content:space-between}.hero .navbar{background:0 0}.hero .tabs ul{border-bottom:none}.hero.is-white{background-color:#fff;color:#0a0a0a}.hero.is-white a:not(.button):not(.dropdown-item):not(.tag),.hero.is-white strong{color:inherit}.hero.is-white .title{color:#0a0a0a}.hero.is-white .subtitle{color:rgba(10,10,10,.9)}.hero.is-white .subtitle a:not(.button),.hero.is-white .subtitle strong{color:#0a0a0a}@media screen and (max-width:1087px){.hero.is-white .navbar-menu{background-color:#fff}}.hero.is-white .navbar-item,.hero.is-white .navbar-link{color:rgba(10,10,10,.7)}.hero.is-white .navbar-link.is-active,.hero.is-white .navbar-link:hover,.hero.is-white a.navbar-item.is-active,.hero.is-white a.navbar-item:hover{background-color:#f2f2f2;color:#0a0a0a}.hero.is-white .tabs a{color:#0a0a0a;opacity:.9}.hero.is-white .tabs a:hover{opacity:1}.hero.is-white .tabs li.is-active a{opacity:1}.hero.is-white .tabs.is-boxed a,.hero.is-white .tabs.is-toggle a{color:#0a0a0a}.hero.is-white .tabs.is-boxed a:hover,.hero.is-white .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-white .tabs.is-boxed li.is-active a,.hero.is-white .tabs.is-boxed li.is-active a:hover,.hero.is-white .tabs.is-toggle li.is-active a,.hero.is-white .tabs.is-toggle li.is-active a:hover{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.hero.is-white.is-bold{background-image:linear-gradient(141deg,#e6e6e6 0,#fff 71%,#fff 100%)}@media screen and (max-width:768px){.hero.is-white.is-bold .navbar-menu{background-image:linear-gradient(141deg,#e6e6e6 0,#fff 71%,#fff 100%)}}.hero.is-black{background-color:#0a0a0a;color:#fff}.hero.is-black a:not(.button):not(.dropdown-item):not(.tag),.hero.is-black strong{color:inherit}.hero.is-black .title{color:#fff}.hero.is-black .subtitle{color:rgba(255,255,255,.9)}.hero.is-black .subtitle a:not(.button),.hero.is-black .subtitle strong{color:#fff}@media screen and (max-width:1087px){.hero.is-black .navbar-menu{background-color:#0a0a0a}}.hero.is-black .navbar-item,.hero.is-black .navbar-link{color:rgba(255,255,255,.7)}.hero.is-black .navbar-link.is-active,.hero.is-black .navbar-link:hover,.hero.is-black a.navbar-item.is-active,.hero.is-black a.navbar-item:hover{background-color:#000;color:#fff}.hero.is-black .tabs a{color:#fff;opacity:.9}.hero.is-black .tabs a:hover{opacity:1}.hero.is-black .tabs li.is-active a{opacity:1}.hero.is-black .tabs.is-boxed a,.hero.is-black .tabs.is-toggle a{color:#fff}.hero.is-black .tabs.is-boxed a:hover,.hero.is-black .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-black .tabs.is-boxed li.is-active a,.hero.is-black .tabs.is-boxed li.is-active a:hover,.hero.is-black .tabs.is-toggle li.is-active a,.hero.is-black .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#0a0a0a}.hero.is-black.is-bold{background-image:linear-gradient(141deg,#000 0,#0a0a0a 71%,#181616 100%)}@media screen and (max-width:768px){.hero.is-black.is-bold .navbar-menu{background-image:linear-gradient(141deg,#000 0,#0a0a0a 71%,#181616 100%)}}.hero.is-light{background-color:#f5f5f5;color:#363636}.hero.is-light a:not(.button):not(.dropdown-item):not(.tag),.hero.is-light strong{color:inherit}.hero.is-light .title{color:#363636}.hero.is-light .subtitle{color:rgba(54,54,54,.9)}.hero.is-light .subtitle a:not(.button),.hero.is-light .subtitle strong{color:#363636}@media screen and (max-width:1087px){.hero.is-light .navbar-menu{background-color:#f5f5f5}}.hero.is-light .navbar-item,.hero.is-light .navbar-link{color:rgba(54,54,54,.7)}.hero.is-light .navbar-link.is-active,.hero.is-light .navbar-link:hover,.hero.is-light a.navbar-item.is-active,.hero.is-light a.navbar-item:hover{background-color:#e8e8e8;color:#363636}.hero.is-light .tabs a{color:#363636;opacity:.9}.hero.is-light .tabs a:hover{opacity:1}.hero.is-light .tabs li.is-active a{opacity:1}.hero.is-light .tabs.is-boxed a,.hero.is-light .tabs.is-toggle a{color:#363636}.hero.is-light .tabs.is-boxed a:hover,.hero.is-light .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-light .tabs.is-boxed li.is-active a,.hero.is-light .tabs.is-boxed li.is-active a:hover,.hero.is-light .tabs.is-toggle li.is-active a,.hero.is-light .tabs.is-toggle li.is-active a:hover{background-color:#363636;border-color:#363636;color:#f5f5f5}.hero.is-light.is-bold{background-image:linear-gradient(141deg,#dfd8d9 0,#f5f5f5 71%,#fff 100%)}@media screen and (max-width:768px){.hero.is-light.is-bold .navbar-menu{background-image:linear-gradient(141deg,#dfd8d9 0,#f5f5f5 71%,#fff 100%)}}.hero.is-dark{background-color:#363636;color:#f5f5f5}.hero.is-dark a:not(.button):not(.dropdown-item):not(.tag),.hero.is-dark strong{color:inherit}.hero.is-dark .title{color:#f5f5f5}.hero.is-dark .subtitle{color:rgba(245,245,245,.9)}.hero.is-dark .subtitle a:not(.button),.hero.is-dark .subtitle strong{color:#f5f5f5}@media screen and (max-width:1087px){.hero.is-dark .navbar-menu{background-color:#363636}}.hero.is-dark .navbar-item,.hero.is-dark .navbar-link{color:rgba(245,245,245,.7)}.hero.is-dark .navbar-link.is-active,.hero.is-dark .navbar-link:hover,.hero.is-dark a.navbar-item.is-active,.hero.is-dark a.navbar-item:hover{background-color:#292929;color:#f5f5f5}.hero.is-dark .tabs a{color:#f5f5f5;opacity:.9}.hero.is-dark .tabs a:hover{opacity:1}.hero.is-dark .tabs li.is-active a{opacity:1}.hero.is-dark .tabs.is-boxed a,.hero.is-dark .tabs.is-toggle a{color:#f5f5f5}.hero.is-dark .tabs.is-boxed a:hover,.hero.is-dark .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-dark .tabs.is-boxed li.is-active a,.hero.is-dark .tabs.is-boxed li.is-active a:hover,.hero.is-dark .tabs.is-toggle li.is-active a,.hero.is-dark .tabs.is-toggle li.is-active a:hover{background-color:#f5f5f5;border-color:#f5f5f5;color:#363636}.hero.is-dark.is-bold{background-image:linear-gradient(141deg,#1f191a 0,#363636 71%,#46403f 100%)}@media screen and (max-width:768px){.hero.is-dark.is-bold .navbar-menu{background-image:linear-gradient(141deg,#1f191a 0,#363636 71%,#46403f 100%)}}.hero.is-primary{background-color:#00d1b2;color:#fff}.hero.is-primary a:not(.button):not(.dropdown-item):not(.tag),.hero.is-primary strong{color:inherit}.hero.is-primary .title{color:#fff}.hero.is-primary .subtitle{color:rgba(255,255,255,.9)}.hero.is-primary .subtitle a:not(.button),.hero.is-primary .subtitle strong{color:#fff}@media screen and (max-width:1087px){.hero.is-primary .navbar-menu{background-color:#00d1b2}}.hero.is-primary .navbar-item,.hero.is-primary .navbar-link{color:rgba(255,255,255,.7)}.hero.is-primary .navbar-link.is-active,.hero.is-primary .navbar-link:hover,.hero.is-primary a.navbar-item.is-active,.hero.is-primary a.navbar-item:hover{background-color:#00b89c;color:#fff}.hero.is-primary .tabs a{color:#fff;opacity:.9}.hero.is-primary .tabs a:hover{opacity:1}.hero.is-primary .tabs li.is-active a{opacity:1}.hero.is-primary .tabs.is-boxed a,.hero.is-primary .tabs.is-toggle a{color:#fff}.hero.is-primary .tabs.is-boxed a:hover,.hero.is-primary .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-primary .tabs.is-boxed li.is-active a,.hero.is-primary .tabs.is-boxed li.is-active a:hover,.hero.is-primary .tabs.is-toggle li.is-active a,.hero.is-primary .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#00d1b2}.hero.is-primary.is-bold{background-image:linear-gradient(141deg,#009e6c 0,#00d1b2 71%,#00e7eb 100%)}@media screen and (max-width:768px){.hero.is-primary.is-bold .navbar-menu{background-image:linear-gradient(141deg,#009e6c 0,#00d1b2 71%,#00e7eb 100%)}}.hero.is-link{background-color:#3273dc;color:#fff}.hero.is-link a:not(.button):not(.dropdown-item):not(.tag),.hero.is-link strong{color:inherit}.hero.is-link .title{color:#fff}.hero.is-link .subtitle{color:rgba(255,255,255,.9)}.hero.is-link .subtitle a:not(.button),.hero.is-link .subtitle strong{color:#fff}@media screen and (max-width:1087px){.hero.is-link .navbar-menu{background-color:#3273dc}}.hero.is-link .navbar-item,.hero.is-link .navbar-link{color:rgba(255,255,255,.7)}.hero.is-link .navbar-link.is-active,.hero.is-link .navbar-link:hover,.hero.is-link a.navbar-item.is-active,.hero.is-link a.navbar-item:hover{background-color:#2366d1;color:#fff}.hero.is-link .tabs a{color:#fff;opacity:.9}.hero.is-link .tabs a:hover{opacity:1}.hero.is-link .tabs li.is-active a{opacity:1}.hero.is-link .tabs.is-boxed a,.hero.is-link .tabs.is-toggle a{color:#fff}.hero.is-link .tabs.is-boxed a:hover,.hero.is-link .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-link .tabs.is-boxed li.is-active a,.hero.is-link .tabs.is-boxed li.is-active a:hover,.hero.is-link .tabs.is-toggle li.is-active a,.hero.is-link .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#3273dc}.hero.is-link.is-bold{background-image:linear-gradient(141deg,#1577c6 0,#3273dc 71%,#4366e5 100%)}@media screen and (max-width:768px){.hero.is-link.is-bold .navbar-menu{background-image:linear-gradient(141deg,#1577c6 0,#3273dc 71%,#4366e5 100%)}}.hero.is-info{background-color:#209cee;color:#fff}.hero.is-info a:not(.button):not(.dropdown-item):not(.tag),.hero.is-info strong{color:inherit}.hero.is-info .title{color:#fff}.hero.is-info .subtitle{color:rgba(255,255,255,.9)}.hero.is-info .subtitle a:not(.button),.hero.is-info .subtitle strong{color:#fff}@media screen and (max-width:1087px){.hero.is-info .navbar-menu{background-color:#209cee}}.hero.is-info .navbar-item,.hero.is-info .navbar-link{color:rgba(255,255,255,.7)}.hero.is-info .navbar-link.is-active,.hero.is-info .navbar-link:hover,.hero.is-info a.navbar-item.is-active,.hero.is-info a.navbar-item:hover{background-color:#118fe4;color:#fff}.hero.is-info .tabs a{color:#fff;opacity:.9}.hero.is-info .tabs a:hover{opacity:1}.hero.is-info .tabs li.is-active a{opacity:1}.hero.is-info .tabs.is-boxed a,.hero.is-info .tabs.is-toggle a{color:#fff}.hero.is-info .tabs.is-boxed a:hover,.hero.is-info .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-info .tabs.is-boxed li.is-active a,.hero.is-info .tabs.is-boxed li.is-active a:hover,.hero.is-info .tabs.is-toggle li.is-active a,.hero.is-info .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#209cee}.hero.is-info.is-bold{background-image:linear-gradient(141deg,#04a6d7 0,#209cee 71%,#3287f5 100%)}@media screen and (max-width:768px){.hero.is-info.is-bold .navbar-menu{background-image:linear-gradient(141deg,#04a6d7 0,#209cee 71%,#3287f5 100%)}}.hero.is-success{background-color:#23d160;color:#fff}.hero.is-success a:not(.button):not(.dropdown-item):not(.tag),.hero.is-success strong{color:inherit}.hero.is-success .title{color:#fff}.hero.is-success .subtitle{color:rgba(255,255,255,.9)}.hero.is-success .subtitle a:not(.button),.hero.is-success .subtitle strong{color:#fff}@media screen and (max-width:1087px){.hero.is-success .navbar-menu{background-color:#23d160}}.hero.is-success .navbar-item,.hero.is-success .navbar-link{color:rgba(255,255,255,.7)}.hero.is-success .navbar-link.is-active,.hero.is-success .navbar-link:hover,.hero.is-success a.navbar-item.is-active,.hero.is-success a.navbar-item:hover{background-color:#20bc56;color:#fff}.hero.is-success .tabs a{color:#fff;opacity:.9}.hero.is-success .tabs a:hover{opacity:1}.hero.is-success .tabs li.is-active a{opacity:1}.hero.is-success .tabs.is-boxed a,.hero.is-success .tabs.is-toggle a{color:#fff}.hero.is-success .tabs.is-boxed a:hover,.hero.is-success .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-success .tabs.is-boxed li.is-active a,.hero.is-success .tabs.is-boxed li.is-active a:hover,.hero.is-success .tabs.is-toggle li.is-active a,.hero.is-success .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#23d160}.hero.is-success.is-bold{background-image:linear-gradient(141deg,#12af2f 0,#23d160 71%,#2ce28a 100%)}@media screen and (max-width:768px){.hero.is-success.is-bold .navbar-menu{background-image:linear-gradient(141deg,#12af2f 0,#23d160 71%,#2ce28a 100%)}}.hero.is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.hero.is-warning a:not(.button):not(.dropdown-item):not(.tag),.hero.is-warning strong{color:inherit}.hero.is-warning .title{color:rgba(0,0,0,.7)}.hero.is-warning .subtitle{color:rgba(0,0,0,.9)}.hero.is-warning .subtitle a:not(.button),.hero.is-warning .subtitle strong{color:rgba(0,0,0,.7)}@media screen and (max-width:1087px){.hero.is-warning .navbar-menu{background-color:#ffdd57}}.hero.is-warning .navbar-item,.hero.is-warning .navbar-link{color:rgba(0,0,0,.7)}.hero.is-warning .navbar-link.is-active,.hero.is-warning .navbar-link:hover,.hero.is-warning a.navbar-item.is-active,.hero.is-warning a.navbar-item:hover{background-color:#ffd83d;color:rgba(0,0,0,.7)}.hero.is-warning .tabs a{color:rgba(0,0,0,.7);opacity:.9}.hero.is-warning .tabs a:hover{opacity:1}.hero.is-warning .tabs li.is-active a{opacity:1}.hero.is-warning .tabs.is-boxed a,.hero.is-warning .tabs.is-toggle a{color:rgba(0,0,0,.7)}.hero.is-warning .tabs.is-boxed a:hover,.hero.is-warning .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-warning .tabs.is-boxed li.is-active a,.hero.is-warning .tabs.is-boxed li.is-active a:hover,.hero.is-warning .tabs.is-toggle li.is-active a,.hero.is-warning .tabs.is-toggle li.is-active a:hover{background-color:rgba(0,0,0,.7);border-color:rgba(0,0,0,.7);color:#ffdd57}.hero.is-warning.is-bold{background-image:linear-gradient(141deg,#ffaf24 0,#ffdd57 71%,#fffa70 100%)}@media screen and (max-width:768px){.hero.is-warning.is-bold .navbar-menu{background-image:linear-gradient(141deg,#ffaf24 0,#ffdd57 71%,#fffa70 100%)}}.hero.is-danger{background-color:#ff3860;color:#fff}.hero.is-danger a:not(.button):not(.dropdown-item):not(.tag),.hero.is-danger strong{color:inherit}.hero.is-danger .title{color:#fff}.hero.is-danger .subtitle{color:rgba(255,255,255,.9)}.hero.is-danger .subtitle a:not(.button),.hero.is-danger .subtitle strong{color:#fff}@media screen and (max-width:1087px){.hero.is-danger .navbar-menu{background-color:#ff3860}}.hero.is-danger .navbar-item,.hero.is-danger .navbar-link{color:rgba(255,255,255,.7)}.hero.is-danger .navbar-link.is-active,.hero.is-danger .navbar-link:hover,.hero.is-danger a.navbar-item.is-active,.hero.is-danger a.navbar-item:hover{background-color:#ff1f4b;color:#fff}.hero.is-danger .tabs a{color:#fff;opacity:.9}.hero.is-danger .tabs a:hover{opacity:1}.hero.is-danger .tabs li.is-active a{opacity:1}.hero.is-danger .tabs.is-boxed a,.hero.is-danger .tabs.is-toggle a{color:#fff}.hero.is-danger .tabs.is-boxed a:hover,.hero.is-danger .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-danger .tabs.is-boxed li.is-active a,.hero.is-danger .tabs.is-boxed li.is-active a:hover,.hero.is-danger .tabs.is-toggle li.is-active a,.hero.is-danger .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#ff3860}.hero.is-danger.is-bold{background-image:linear-gradient(141deg,#ff0561 0,#ff3860 71%,#ff5257 100%)}@media screen and (max-width:768px){.hero.is-danger.is-bold .navbar-menu{background-image:linear-gradient(141deg,#ff0561 0,#ff3860 71%,#ff5257 100%)}}.hero.is-small .hero-body{padding-bottom:1.5rem;padding-top:1.5rem}@media screen and (min-width:769px),print{.hero.is-medium .hero-body{padding-bottom:9rem;padding-top:9rem}}@media screen and (min-width:769px),print{.hero.is-large .hero-body{padding-bottom:18rem;padding-top:18rem}}.hero.is-fullheight .hero-body,.hero.is-fullheight-with-navbar .hero-body,.hero.is-halfheight .hero-body{align-items:center;display:flex}.hero.is-fullheight .hero-body>.container,.hero.is-fullheight-with-navbar .hero-body>.container,.hero.is-halfheight .hero-body>.container{flex-grow:1;flex-shrink:1}.hero.is-halfheight{min-height:50vh}.hero.is-fullheight{min-height:100vh}.hero-video{overflow:hidden}.hero-video video{left:50%;min-height:100%;min-width:100%;position:absolute;top:50%;-webkit-transform:translate3d(-50%,-50%,0);transform:translate3d(-50%,-50%,0)}.hero-video.is-transparent{opacity:.3}@media screen and (max-width:768px){.hero-video{display:none}}.hero-buttons{margin-top:1.5rem}@media screen and (max-width:768px){.hero-buttons .button{display:flex}.hero-buttons .button:not(:last-child){margin-bottom:.75rem}}@media screen and (min-width:769px),print{.hero-buttons{display:flex;justify-content:center}.hero-buttons .button:not(:last-child){margin-right:1.5rem}}.hero-foot,.hero-head{flex-grow:0;flex-shrink:0}.hero-body{flex-grow:1;flex-shrink:0;padding:3rem 1.5rem}.section{padding:3rem 1.5rem}@media screen and (min-width:1088px){.section.is-medium{padding:9rem 1.5rem}.section.is-large{padding:18rem 1.5rem}}.footer{background-color:#fafafa;padding:3rem 1.5rem 6rem} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/export/css/result.css b/testing/web-platform/tests/tools/wave/export/css/result.css
new file mode 100644
index 0000000000..9e7c4dc0ba
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/export/css/result.css
@@ -0,0 +1,75 @@
+body {
+ margin: 0;
+ padding: 0;
+ display: flex;
+ justify-content: center;
+ font-family: "Noto Sans" sans-serif;
+ background-color: white;
+ color: #000;
+}
+
+.header {
+ display: flex;
+ margin: 50px 0 30px 0;
+}
+
+.header :first-child {
+ flex: 1;
+}
+
+.site-logo {
+ max-width: 300px;
+ margin-left: -15px;
+}
+
+.content {
+ width: 1000px;
+}
+
+#test-path,
+#token {
+ font-family: monospace;
+ font-size: 12pt;
+}
+
+.pass {
+ color: green;
+}
+
+.fail {
+ color: red;
+}
+
+.timeout {
+ color: rgb(224, 127, 0);
+}
+
+.not-run {
+ color: blue;
+}
+
+.api-result-timeoutfiles {
+ display: none; /* don't display for now */
+ flex-basis: 100%;
+}
+
+#header {
+ display: flex;
+ align-items: center;
+}
+
+#header > :first-child {
+ flex: 1;
+}
+
+#controls-wrapper {
+ display: flex;
+}
+
+.no-border-radius {
+ border-radius: 0;
+}
+
+#results-table .button {
+ margin: 0 2px;
+}
diff --git a/testing/web-platform/tests/tools/wave/export/index.html b/testing/web-platform/tests/tools/wave/export/index.html
new file mode 100644
index 0000000000..73de099eef
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/export/index.html
@@ -0,0 +1,375 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <title>Results - Web Platform Test</title>
+ <link rel="stylesheet" href="css/bulma.min.css" />
+ <link rel="stylesheet" href="css/result.css" />
+ <script src="lib/utils.js"></script>
+ <script src="lib/ui.js"></script>
+ <script src="results.json.js"></script>
+ <script src="details.json.js"></script>
+ </head>
+ <body>
+ <script>
+ window.onload = () => {
+ resultUi.render();
+ resultUi.loadData();
+ };
+
+ const resultUi = {
+ state: { details: null, results: null },
+ loadData: () => {
+ resultUi.loadSessionDetails();
+ resultUi.loadSessionResults();
+ },
+ loadSessionDetails(callback = () => {}) {
+ resultUi.state.details = details;
+ resultUi.renderSessionDetails();
+ callback(details);
+ },
+ loadSessionResults(callback = () => {}) {
+ const { details } = resultUi.state;
+ Object.keys(details.test_files_count).forEach(api =>
+ !results[api] ? (results[api] = {}) : null
+ );
+ for (let api in results) {
+ let { pass, fail, timeout, not_run } = results[api];
+ let complete = 0;
+ if (pass) complete += pass;
+ if (fail) complete += fail;
+ if (timeout) complete += timeout;
+ if (not_run) complete += not_run;
+ results[api].complete = complete;
+ const { test_files_count, test_files_completed } = details;
+ results[api].isDone =
+ test_files_count[api] === test_files_completed[api];
+ results[api].testFilesCount = test_files_count[api];
+ results[api].testFilesCompleted = test_files_completed[api];
+ }
+ resultUi.state.results = results;
+ resultUi.renderApiResults();
+ callback(results);
+ },
+ render() {
+ const resultView = UI.createElement({
+ className: "content",
+ style: "margin-bottom: 40px;",
+ children: [
+ {
+ className: "header",
+ children: [
+ {
+ children: [
+ {
+ element: "img",
+ src: "res/wavelogo_2016.jpg",
+ className: "site-logo"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ id: "header",
+ children: [
+ { className: "title", text: "Result" },
+ { id: "controls" }
+ ]
+ },
+ { id: "session-details" },
+ { id: "api-results" },
+ { id: "timeout-files" },
+ { id: "export" }
+ ]
+ });
+ const root = UI.getRoot();
+ root.innerHTML = "";
+ root.appendChild(resultView);
+ resultUi.renderSessionDetails();
+ resultUi.renderApiResults();
+ },
+ renderSessionDetails() {
+ const { state } = resultUi;
+ const { details } = state;
+ if (!details) return;
+ const sessionDetailsView = UI.createElement({
+ style: "margin-bottom: 20px"
+ });
+
+ const heading = UI.createElement({
+ text: "Session details",
+ className: "title is-4"
+ });
+ sessionDetailsView.appendChild(heading);
+
+ const getTagStyle = status => {
+ switch (status) {
+ case "completed":
+ return "is-success";
+ case "running":
+ return "is-info";
+ case "aborted":
+ return "is-danger";
+ case "paused":
+ return "is-warning";
+ }
+ };
+
+ const { test_files_count, token } = details;
+ const detailsTable = UI.createElement({
+ element: "table",
+ children: {
+ element: "tbody",
+ children: [
+ {
+ element: "tr",
+ children: [
+ { element: "td", text: "Token:", style: "width: 140px;" },
+ {
+ element: "td",
+ text: token,
+ className: "is-family-monospace"
+ }
+ ]
+ },
+ {
+ element: "tr",
+ children: [
+ { element: "td", text: "User Agent:" },
+ { element: "td", text: details.user_agent || "" }
+ ]
+ },
+ {
+ element: "tr",
+ children: [
+ { element: "td", text: "Test Path:" },
+ { element: "td", text: details.path || "" }
+ ]
+ },
+ {
+ element: "tr",
+ children: [
+ { element: "td", text: "Total Test Files:" },
+ {
+ element: "td",
+ text: Object.keys(test_files_count).reduce(
+ (sum, api) => (sum += test_files_count[api]),
+ 0
+ )
+ }
+ ]
+ },
+ {
+ element: "tr",
+ children: [
+ { element: "td", text: "Test Timeout:" },
+ { element: "td", text: details.test_timeout || "" }
+ ]
+ },
+ {
+ element: "tr",
+ children: [
+ { element: "td", text: "Started:" },
+ {
+ element: "td",
+ text: new Date(details.date_started).toLocaleString()
+ }
+ ]
+ },
+ details.date_finished
+ ? {
+ element: "tr",
+ children: [
+ { element: "td", text: "Finished:" },
+ {
+ element: "td",
+ text: new Date(details.date_finished).toLocaleString()
+ }
+ ]
+ }
+ : null,
+
+ details.date_finished
+ ? {
+ element: "tr",
+ children: [
+ { element: "td", text: "Duration:" },
+ {
+ element: "td",
+ id: "duration",
+ text: utils.millisToTimeString(
+ details.date_finished
+ ? parseInt(details.date_finished) -
+ parseInt(details.date_started)
+ : Date.now() - parseInt(details.date_started)
+ )
+ }
+ ]
+ }
+ : null
+ ]
+ }
+ });
+ sessionDetailsView.appendChild(detailsTable);
+
+ const sessionDetails = UI.getElement("session-details");
+ sessionDetails.innerHTML = "";
+ sessionDetails.appendChild(sessionDetailsView);
+ },
+ renderApiResults() {
+ const { results } = resultUi.state;
+ if (!results) return;
+
+ const apiResultsView = UI.createElement({
+ style: "margin-bottom: 20px"
+ });
+
+ const heading = UI.createElement({
+ text: "API Results",
+ className: "title is-4"
+ });
+ apiResultsView.appendChild(heading);
+
+ const header = UI.createElement({
+ element: "thead",
+ children: [
+ {
+ element: "tr",
+ children: [
+ { element: "th", text: "API" },
+ { element: "th", text: "Pass" },
+ { element: "th", text: "Fail" },
+ { element: "th", text: "Timeout" },
+ { element: "th", text: "Not Run" },
+ { element: "th", text: "Test Files Run" }
+ ]
+ }
+ ]
+ });
+
+ const apis = Object.keys(results).sort((apiA, apiB) =>
+ apiA.toLowerCase() > apiB.toLowerCase() ? 1 : -1
+ );
+
+ const rows = apis.map(api => {
+ const {
+ complete = 0,
+ pass = 0,
+ fail = 0,
+ timeout = 0,
+ timeoutfiles = [],
+ not_run: notRun = 0,
+ isDone = false,
+ testFilesCount,
+ testFilesCompleted = 0
+ } = results[api];
+ return UI.createElement({
+ element: "tr",
+ children: [
+ { element: "td", text: api },
+ {
+ element: "td",
+ style: "color: hsl(141, 71%, 38%)",
+ text: `${pass} (${utils.percent(pass, complete)}%)`
+ },
+ {
+ element: "td",
+ className: "has-text-danger",
+ text: `${fail} (${utils.percent(fail, complete)}%)`
+ },
+ {
+ element: "td",
+ style: "color: hsl(48, 100%, 40%)",
+ text: `${timeout} (${utils.percent(timeout, complete)}%)`
+ },
+ {
+ element: "td",
+ className: "has-text-info",
+ text: `${notRun} (${utils.percent(notRun, complete)}%)`
+ },
+ {
+ element: "td",
+ text: `${testFilesCompleted}/${testFilesCount} (${utils.percent(
+ testFilesCompleted,
+ testFilesCount
+ )}%)`
+ }
+ ]
+ });
+ });
+
+ const { pass, fail, timeout, not_run, complete } = apis.reduce(
+ (sum, api) => {
+ Object.keys(sum).forEach(
+ key => (sum[key] += results[api][key] ? results[api][key] : 0)
+ );
+ return sum;
+ },
+ { complete: 0, pass: 0, fail: 0, timeout: 0, not_run: 0 }
+ );
+ const testFilesCount = Object.keys(results).reduce(
+ (sum, api) => (sum += results[api].testFilesCount),
+ 0
+ );
+ const testFilesCompleted = Object.keys(results).reduce(
+ (sum, api) => (sum += results[api].testFilesCompleted || 0),
+ 0
+ );
+
+ const footer = UI.createElement({
+ element: "tfoot",
+ children: [
+ {
+ element: "tr",
+ children: [
+ { element: "th", text: "Total" },
+ {
+ element: "th",
+ style: "color: hsl(141, 71%, 38%)",
+ text: `${pass} (${utils.percent(pass, complete)}%)`
+ },
+ {
+ element: "th",
+ className: "has-text-danger",
+ text: `${fail} (${utils.percent(fail, complete)}%)`
+ },
+ {
+ element: "th",
+ style: "color: hsl(48, 100%, 40%)",
+ text: `${timeout} (${utils.percent(timeout, complete)}%)`
+ },
+ {
+ element: "th",
+ className: "has-text-info",
+ text: `${not_run} (${utils.percent(not_run, complete)}%)`
+ },
+ {
+ element: "th",
+ text: `${testFilesCompleted}/${testFilesCount} (${utils.percent(
+ testFilesCompleted,
+ testFilesCount
+ )}%)`
+ }
+ ]
+ }
+ ]
+ });
+
+ const resultsTable = UI.createElement({
+ element: "table",
+ className: "table",
+ id: "results-table",
+ style: "border-radius: 3px; border: 2px solid hsl(0, 0%, 86%);",
+ children: [header, { element: "tbody", children: rows }, footer]
+ });
+ apiResultsView.appendChild(resultsTable);
+
+ const apiResults = UI.getElement("api-results");
+ apiResults.innerHTML = "";
+ apiResults.appendChild(apiResultsView);
+ }
+ };
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/export/lib/ui.js b/testing/web-platform/tests/tools/wave/export/lib/ui.js
new file mode 100644
index 0000000000..59e2b786e5
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/export/lib/ui.js
@@ -0,0 +1,64 @@
+const UI = {
+ createElement: config => {
+ if (!config) return document.createElement("div");
+ const elementType = config.element || "div";
+ const element = document.createElement(elementType);
+
+ Object.keys(config).forEach(property => {
+ const value = config[property];
+ switch (property.toLowerCase()) {
+ case "id":
+ case "src":
+ case "style":
+ case "placeholder":
+ case "title":
+ element.setAttribute(property, value);
+ return;
+ case "classname":
+ element.setAttribute("class", value);
+ return;
+ case "text":
+ element.innerText = value;
+ return;
+ case "html":
+ element.innerHTML = value;
+ return;
+ case "onclick":
+ element.onclick = value.bind(element);
+ return;
+ case "onchange":
+ element.onchange = value.bind(element);
+ return;
+ case "onkeydown":
+ element.onkeydown = value.bind(element);
+ return;
+ case "type":
+ if (elementType === "input") element.setAttribute("type", value);
+ return;
+ case "children":
+ if (value instanceof Array) {
+ value.forEach(child =>
+ element.appendChild(
+ child instanceof Element ? child : UI.createElement(child)
+ )
+ );
+ } else {
+ element.appendChild(
+ value instanceof Element ? value : UI.createElement(value)
+ );
+ }
+ return;
+ case "disabled":
+ if (value) element.setAttribute("disabled", true);
+ return;
+ }
+ });
+ return element;
+ },
+ getElement: id => {
+ return document.getElementById(id);
+ },
+ getRoot: () => {
+ return document.getElementsByTagName("body")[0];
+ }
+};
diff --git a/testing/web-platform/tests/tools/wave/export/lib/utils.js b/testing/web-platform/tests/tools/wave/export/lib/utils.js
new file mode 100644
index 0000000000..cbd75b5216
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/export/lib/utils.js
@@ -0,0 +1,40 @@
+const utils = {
+ parseQuery: queryString => {
+ if (queryString.indexOf("?") === -1) return {};
+ queryString = queryString.split("?")[1];
+ const query = {};
+ for (let part of queryString.split("&")) {
+ const keyValue = part.split("=");
+ query[keyValue[0]] = keyValue[1] ? keyValue[1] : null;
+ }
+ return query;
+ },
+ percent: (count, total) => {
+ const percent = Math.floor((count / total) * 10000) / 100;
+ if (!percent) {
+ return 0;
+ }
+ return percent;
+ },
+ saveBlobAsFile: (blob, filename) => {
+ const url = URL.createObjectURL(blob);
+ const a = document.createElement("a");
+ a.style.display = "none";
+ document.body.appendChild(a);
+ a.href = url;
+ a.download = filename;
+ a.click();
+ document.body.removeChild(a);
+ },
+ millisToTimeString(totalMilliseconds) {
+ let milliseconds = (totalMilliseconds % 1000) + "";
+ milliseconds = milliseconds.padStart(3, "0");
+ let seconds = (Math.floor(totalMilliseconds / 1000) % 60) + "";
+ seconds = seconds.padStart(2, "0");
+ let minutes = (Math.floor(totalMilliseconds / 60000) % 60) + "";
+ minutes = minutes.padStart(2, "0");
+ let hours = Math.floor(totalMilliseconds / 3600000) + "";
+ hours = hours.padStart(2, "0");
+ return `${hours}:${minutes}:${seconds}`;
+ }
+};
diff --git a/testing/web-platform/tests/tools/wave/export/res/wavelogo_2016.jpg b/testing/web-platform/tests/tools/wave/export/res/wavelogo_2016.jpg
new file mode 100644
index 0000000000..3881409597
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/export/res/wavelogo_2016.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/network/__init__.py b/testing/web-platform/tests/tools/wave/network/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/network/__init__.py
diff --git a/testing/web-platform/tests/tools/wave/network/api/__init__.py b/testing/web-platform/tests/tools/wave/network/api/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/network/api/__init__.py
diff --git a/testing/web-platform/tests/tools/wave/network/api/api_handler.py b/testing/web-platform/tests/tools/wave/network/api/api_handler.py
new file mode 100644
index 0000000000..9c67e6c0cd
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/network/api/api_handler.py
@@ -0,0 +1,99 @@
+# mypy: allow-untyped-defs
+
+import json
+import sys
+import traceback
+import logging
+
+from urllib.parse import parse_qsl
+
+global logger
+logger = logging.getLogger("wave-api-handler")
+
+
+class ApiHandler:
+ def __init__(self, web_root):
+ self._web_root = web_root
+
+ def set_headers(self, response, headers):
+ if not isinstance(response.headers, list):
+ response.headers = []
+ for header in headers:
+ response.headers.append(header)
+
+ def send_json(self, data, response, status=None):
+ if status is None:
+ status = 200
+ json_string = json.dumps(data, indent=4)
+ response.content = json_string
+ self.set_headers(response, [("Content-Type", "application/json")])
+ response.status = status
+
+ def send_file(self, blob, file_name, response):
+ self.set_headers(response,
+ [("Content-Disposition",
+ "attachment;filename=" + file_name)])
+ response.content = blob
+
+ def send_zip(self, data, file_name, response):
+ response.headers = [("Content-Type", "application/x-compressed")]
+ self.send_file(data, file_name, response)
+
+ def parse_uri(self, request):
+ path = request.url_parts.path
+ if self._web_root is not None:
+ path = path[len(self._web_root):]
+
+ uri_parts = list(filter(None, path.split("/")))
+ return uri_parts
+
+ def parse_query_parameters(self, request):
+ return dict(parse_qsl(request.url_parts.query))
+
+ def handle_exception(self, message):
+ info = sys.exc_info()
+ traceback.print_tb(info[2])
+ logger.error(f"{message}: {info[0].__name__}: {info[1].args[0]}")
+
+ def create_hal_list(self, items, uris, index, count, total):
+ hal_list = {}
+ links = {}
+ if uris is not None:
+ for relation in uris:
+ if relation == "self":
+ continue
+ uri = uris[relation]
+ templated = "{" in uri
+ links[relation] = {"href": uri, "templated": templated}
+
+ if "self" in uris:
+ self_uri = uris["self"]
+ self_uri += f"?index={index}&count={count}"
+ links["self"] = {"href": self_uri}
+
+ first_uri = uris["self"]
+ first_uri += f"?index={0}&count={count}"
+ links["first"] = {"href": first_uri}
+
+ last_uri = uris["self"]
+ last_uri += f"?index={total - (total % count)}&count={count}"
+ links["last"] = {"href": last_uri}
+
+ if index + count <= total:
+ next_index = index + count
+ next_uri = uris["self"]
+ next_uri += f"?index={next_index}&count={count}"
+ links["next"] = {"href": next_uri}
+
+ if index != 0:
+ previous_index = index - count
+ if previous_index < 0:
+ previous_index = 0
+ previous_uri = uris["self"]
+ previous_uri += f"?index={previous_index}&count={count}"
+ links["previous"] = {"href": previous_uri}
+
+ hal_list["_links"] = links
+ hal_list["items"] = items
+
+ return hal_list
diff --git a/testing/web-platform/tests/tools/wave/network/api/devices_api_handler.py b/testing/web-platform/tests/tools/wave/network/api/devices_api_handler.py
new file mode 100644
index 0000000000..ecd9a96770
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/network/api/devices_api_handler.py
@@ -0,0 +1,202 @@
+# mypy: allow-untyped-defs
+
+import json
+import threading
+
+from .api_handler import ApiHandler
+from ...data.http_polling_event_listener import HttpPollingEventListener
+from ...testing.event_dispatcher import DEVICES
+from ...utils.serializer import serialize_device
+from ...testing.devices_manager import DEVICE_TIMEOUT, RECONNECT_TIME
+from ...data.exceptions.not_found_exception import NotFoundException
+
+
+class DevicesApiHandler(ApiHandler):
+ def __init__(self, devices_manager, event_dispatcher, web_root):
+ super().__init__(web_root)
+ self._devices_manager = devices_manager
+ self._event_dispatcher = event_dispatcher
+
+ def create_device(self, request, response):
+ try:
+ user_agent = request.headers[b"user-agent"].decode("utf-8")
+
+ device = self._devices_manager.create_device(user_agent)
+
+ self.send_json({"token": device.token}, response)
+ except Exception:
+ self.handle_exception("Failed to create device")
+ response.status = 500
+
+ def read_device(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ device = self._devices_manager.read_device(token)
+
+ device_object = serialize_device(device)
+
+ self.send_json(device_object, response)
+ except NotFoundException:
+ self.handle_exception("Failed to read device")
+ response.status = 404
+ except Exception:
+ self.handle_exception("Failed to read device")
+ response.status = 500
+
+ def read_devices(self, request, response):
+ try:
+ devices = self._devices_manager.read_devices()
+
+ device_objects = []
+ for device in devices:
+ device_object = serialize_device(device)
+ device_objects.append(device_object)
+
+ self.send_json(device_objects, response)
+ except Exception:
+ self.handle_exception("Failed to read devices")
+ response.status = 500
+
+ def register_event_listener(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+ query = self.parse_query_parameters(request)
+
+ if "device_token" in query:
+ self._devices_manager.refresh_device(query["device_token"])
+
+ event = threading.Event()
+ timer = threading.Timer(
+ (DEVICE_TIMEOUT - RECONNECT_TIME) / 1000,
+ event.set,
+ [])
+ timer.start()
+ http_polling_event_listener = HttpPollingEventListener(token, event)
+ event_listener_token = self._event_dispatcher.add_event_listener(http_polling_event_listener)
+
+ event.wait()
+
+ message = http_polling_event_listener.message
+ if message is not None:
+ self.send_json(data=message, response=response)
+ self._event_dispatcher.remove_event_listener(event_listener_token)
+ except Exception:
+ self.handle_exception("Failed to register event listener")
+ response.status = 500
+
+ def register_global_event_listener(self, request, response):
+ try:
+ query = self.parse_query_parameters(request)
+
+ if "device_token" in query:
+ self._devices_manager.refresh_device(query["device_token"])
+
+ event = threading.Event()
+ timer = threading.Timer(
+ (DEVICE_TIMEOUT - RECONNECT_TIME) / 1000,
+ event.set,
+ [])
+ timer.start()
+ http_polling_event_listener = HttpPollingEventListener(DEVICES, event)
+ event_listener_token = self._event_dispatcher.add_event_listener(http_polling_event_listener)
+
+ event.wait()
+
+ message = http_polling_event_listener.message
+ if message is not None:
+ self.send_json(data=message, response=response)
+ self._event_dispatcher.remove_event_listener(event_listener_token)
+ except Exception:
+ self.handle_exception("Failed to register global event listener")
+ response.status = 500
+
+ def post_global_event(self, request, response):
+ try:
+ event = {}
+ body = request.body.decode("utf-8")
+ if body != "":
+ event = json.loads(body)
+
+ query = self.parse_query_parameters(request)
+ if "device_token" in query:
+ self._devices_manager.refresh_device(query["device_token"])
+
+ event_type = None
+ if "type" in event:
+ event_type = event["type"]
+ data = None
+ if "data" in event:
+ data = event["data"]
+ self._devices_manager.post_global_event(event_type, data)
+
+ except Exception:
+ self.handle_exception("Failed to post global event")
+ response.status = 500
+
+ def post_event(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ event = {}
+ body = request.body.decode("utf-8")
+ if body != "":
+ event = json.loads(body)
+
+ query = self.parse_query_parameters(request)
+ if "device_token" in query:
+ self._devices_manager.refresh_device(query["device_token"])
+
+ event_type = None
+ if "type" in event:
+ event_type = event["type"]
+ data = None
+ if "data" in event:
+ data = event["data"]
+ self._devices_manager.post_event(token, event_type, data)
+
+ except Exception:
+ self.handle_exception("Failed to post event")
+ response.status = 500
+
+ def handle_request(self, request, response):
+ method = request.method
+ uri_parts = self.parse_uri(request)
+
+ # /api/devices
+ if len(uri_parts) == 2:
+ if method == "POST":
+ self.create_device(request, response)
+ return
+ if method == "GET":
+ self.read_devices(request, response)
+ return
+
+ # /api/devices/<function>
+ if len(uri_parts) == 3:
+ function = uri_parts[2]
+ if method == "GET":
+ if function == "events":
+ self.register_global_event_listener(request, response)
+ return
+ self.read_device(request, response)
+ return
+ if method == "POST":
+ if function == "events":
+ self.post_global_event(request, response)
+ return
+
+ # /api/devices/<token>/<function>
+ if len(uri_parts) == 4:
+ function = uri_parts[3]
+ if method == "GET":
+ if function == "events":
+ self.register_event_listener(request, response)
+ return
+ if method == "POST":
+ if function == "events":
+ self.post_event(request, response)
+ return
diff --git a/testing/web-platform/tests/tools/wave/network/api/general_api_handler.py b/testing/web-platform/tests/tools/wave/network/api/general_api_handler.py
new file mode 100644
index 0000000000..65883a9b75
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/network/api/general_api_handler.py
@@ -0,0 +1,76 @@
+# mypy: allow-untyped-defs
+
+from .api_handler import ApiHandler
+
+TOKEN_LENGTH = 36
+
+
+class GeneralApiHandler(ApiHandler):
+ def __init__(
+ self,
+ web_root,
+ read_sessions_enabled,
+ import_results_enabled,
+ reports_enabled,
+ version_string,
+ test_type_selection_enabled,
+ test_file_selection_enabled
+ ):
+ super().__init__(web_root)
+ self.read_sessions_enabled = read_sessions_enabled
+ self.import_results_enabled = import_results_enabled
+ self.reports_enabled = reports_enabled
+ self.version_string = version_string
+ self.test_type_selection_enabled = test_type_selection_enabled
+ self.test_file_selection_enabled = test_file_selection_enabled
+
+ def read_status(self):
+ try:
+ return {
+ "format": "application/json",
+ "data": {
+ "version_string": self.version_string,
+ "read_sessions_enabled": self.read_sessions_enabled,
+ "import_results_enabled": self.import_results_enabled,
+ "reports_enabled": self.reports_enabled,
+ "test_type_selection_enabled": self.test_type_selection_enabled,
+ "test_file_selection_enabled": self.test_file_selection_enabled
+ }
+ }
+ except Exception:
+ self.handle_exception("Failed to read server configuration")
+ return {"status": 500}
+
+ def handle_request(self, request, response):
+ method = request.method
+ uri_parts = self.parse_uri(request)
+
+ result = None
+ # /api/<function>
+ if len(uri_parts) == 2:
+ function = uri_parts[1]
+ if method == "GET":
+ if function == "status":
+ result = self.read_status()
+
+ if result is None:
+ response.status = 404
+ return
+
+ format = None
+ if "format" in result:
+ format = result["format"]
+ if format == "application/json":
+ data = None
+ if "data" in result:
+ data = result["data"]
+ status = 200
+ if "status" in result:
+ status = result["status"]
+ self.send_json(data, response, status)
+ return
+
+ status = 404
+ if "status" in result:
+ status = result["status"]
+ response.status = status
diff --git a/testing/web-platform/tests/tools/wave/network/api/results_api_handler.py b/testing/web-platform/tests/tools/wave/network/api/results_api_handler.py
new file mode 100644
index 0000000000..a9da0df10f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/network/api/results_api_handler.py
@@ -0,0 +1,232 @@
+# mypy: allow-untyped-defs
+
+import json
+
+from .api_handler import ApiHandler
+from ...data.exceptions.duplicate_exception import DuplicateException
+from ...data.exceptions.invalid_data_exception import InvalidDataException
+
+
+class ResultsApiHandler(ApiHandler):
+ def __init__(self, results_manager, session_manager, web_root):
+ super().__init__(web_root)
+ self._results_manager = results_manager
+ self._sessions_manager = session_manager
+
+ def create_result(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ data = None
+ body = request.body.decode("utf-8")
+ if body != "":
+ data = json.loads(body)
+
+ self._results_manager.create_result(token, data)
+
+ except Exception:
+ self.handle_exception("Failed to create result")
+ response.status = 500
+
+ def read_results(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ session = self._sessions_manager.read_session(token)
+ if session is None:
+ response.status = 404
+ return
+
+ results = self._results_manager.read_results(token)
+
+ self.send_json(response=response, data=results)
+
+ except Exception:
+ self.handle_exception("Failed to read results")
+ response.status = 500
+
+ def read_results_compact(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ results = self._results_manager.read_flattened_results(token)
+
+ self.send_json(response=response, data=results)
+
+ except Exception:
+ self.handle_exception("Failed to read compact results")
+ response.status = 500
+
+ def read_results_api_wpt_report_url(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+ api = uri_parts[3]
+
+ uri = self._results_manager.read_results_wpt_report_uri(token, api)
+ self.send_json({"uri": uri}, response)
+ except Exception:
+ self.handle_exception("Failed to read results report url")
+ response.status = 500
+
+ def read_results_api_wpt_multi_report_uri(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ api = uri_parts[2]
+ query = self.parse_query_parameters(request)
+ tokens = query["tokens"].split(",")
+ uri = self._results_manager.read_results_wpt_multi_report_uri(
+ tokens,
+ api
+ )
+ self.send_json({"uri": uri}, response)
+ except Exception:
+ self.handle_exception("Failed to read results multi report url")
+ response.status = 500
+
+ def download_results_api_json(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+ api = uri_parts[3]
+ blob = self._results_manager.export_results_api_json(token, api)
+ if blob is None:
+ response.status = 404
+ return
+ file_path = self._results_manager.get_json_path(token, api)
+ file_name = "{}-{}-{}".format(
+ token.split("-")[0],
+ api,
+ file_path.split("/")[-1]
+ )
+ self.send_zip(blob, file_name, response)
+ except Exception:
+ self.handle_exception("Failed to download api json")
+ response.status = 500
+
+ def import_results_api_json(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+ api = uri_parts[3]
+ blob = request.body
+
+ self._results_manager.import_results_api_json(token, api, blob)
+
+ response.status = 200
+ except Exception:
+ self.handle_exception("Failed to upload api json")
+ response.status = 500
+
+ def download_results_all_api_jsons(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+ blob = self._results_manager.export_results_all_api_jsons(token)
+ file_name = token.split("-")[0] + "_results_json.zip"
+ self.send_zip(blob, file_name, response)
+ except Exception:
+ self.handle_exception("Failed to download all api jsons")
+ response.status = 500
+
+ def download_results(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+ blob = self._results_manager.export_results(token)
+ if blob is None:
+ response.status = 404
+ return
+ file_name = token + ".zip"
+ self.send_zip(blob, file_name, response)
+ except Exception:
+ self.handle_exception("Failed to download results")
+ response.status = 500
+
+ def download_results_overview(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+ blob = self._results_manager.export_results_overview(token)
+ if blob is None:
+ response.status = 404
+ return
+ file_name = token.split("-")[0] + "_results_html.zip"
+ self.send_zip(blob, file_name, response)
+ except Exception:
+ self.handle_exception("Failed to download results overview")
+ response.status = 500
+
+ def import_results(self, request, response):
+ try:
+ blob = request.body
+ token = self._results_manager.import_results(blob)
+ self.send_json({"token": token}, response)
+ except DuplicateException:
+ self.handle_exception("Failed to import results")
+ self.send_json({"error": "Session already exists!"}, response, 400)
+ return
+ except InvalidDataException:
+ self.handle_exception("Failed to import results")
+ self.send_json({"error": "Invalid input data!"}, response, 400)
+ return
+ except Exception:
+ self.handle_exception("Failed to import results")
+ response.status = 500
+
+ def handle_request(self, request, response):
+ method = request.method
+ uri_parts = self.parse_uri(request)
+
+ # /api/results/<token>
+ if len(uri_parts) == 3:
+ if method == "POST":
+ if uri_parts[2] == "import":
+ self.import_results(request, response)
+ return
+ self.create_result(request, response)
+ return
+
+ if method == "GET":
+ self.read_results(request, response)
+ return
+
+ # /api/results/<token>/<function>
+ if len(uri_parts) == 4:
+ function = uri_parts[3]
+ if method == "GET":
+ if function == "compact":
+ self.read_results_compact(request, response)
+ return
+ if function == "reporturl":
+ return self.read_results_api_wpt_multi_report_uri(request,
+ response)
+ if function == "json":
+ self.download_results_all_api_jsons(request, response)
+ return
+ if function == "export":
+ self.download_results(request, response)
+ return
+ if function == "overview":
+ self.download_results_overview(request, response)
+ return
+
+ # /api/results/<token>/<api>/<function>
+ if len(uri_parts) == 5:
+ function = uri_parts[4]
+ if method == "GET":
+ if function == "reporturl":
+ self.read_results_api_wpt_report_url(request, response)
+ return
+ if function == "json":
+ self.download_results_api_json(request, response)
+ return
+ if method == "POST":
+ if function == "json":
+ self.import_results_api_json(request, response)
+ return
+
+ response.status = 404
diff --git a/testing/web-platform/tests/tools/wave/network/api/sessions_api_handler.py b/testing/web-platform/tests/tools/wave/network/api/sessions_api_handler.py
new file mode 100644
index 0000000000..9eb896b807
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/network/api/sessions_api_handler.py
@@ -0,0 +1,458 @@
+# mypy: allow-untyped-defs
+
+import json
+import threading
+
+from .api_handler import ApiHandler
+
+from ...utils.serializer import serialize_session
+from ...data.exceptions.not_found_exception import NotFoundException
+from ...data.exceptions.invalid_data_exception import InvalidDataException
+from ...data.http_polling_event_listener import HttpPollingEventListener
+
+TOKEN_LENGTH = 36
+
+
+class SessionsApiHandler(ApiHandler):
+ def __init__(
+ self,
+ sessions_manager,
+ results_manager,
+ event_dispatcher,
+ web_root,
+ read_sessions_enabled
+ ):
+ super().__init__(web_root)
+ self._sessions_manager = sessions_manager
+ self._results_manager = results_manager
+ self._event_dispatcher = event_dispatcher
+ self._read_sessions_enabled = read_sessions_enabled
+
+ def create_session(self, body, headers):
+ try:
+ config = {}
+ body = body.decode("utf-8")
+ if body != "":
+ config = json.loads(body)
+ tests = {}
+ if "tests" in config:
+ tests = config["tests"]
+ test_types = None
+ if "types" in config:
+ test_types = config["types"]
+ timeouts = {}
+ if "timeouts" in config:
+ timeouts = config["timeouts"]
+ reference_tokens = []
+ if "reference_tokens" in config:
+ reference_tokens = config["reference_tokens"]
+ user_agent = headers[b"user-agent"].decode("utf-8")
+ labels = []
+ if "labels" in config:
+ labels = config["labels"]
+ expiration_date = None
+ if "expiration_date" in config:
+ expiration_date = config["expiration_date"]
+ type = None
+ if "type" in config:
+ type = config["type"]
+
+ session = self._sessions_manager.create_session(
+ tests,
+ test_types,
+ timeouts,
+ reference_tokens,
+ user_agent,
+ labels,
+ expiration_date,
+ type
+ )
+
+ return {
+ "format": "application/json",
+ "data": {"token": session.token}
+ }
+
+ except InvalidDataException:
+ self.handle_exception("Failed to create session")
+ return {
+ "format": "application/json",
+ "data": {"error": "Invalid input data!"},
+ "status": 400
+ }
+
+ except Exception:
+ self.handle_exception("Failed to create session")
+ return {"status": 500}
+
+ def read_session(self, token):
+ try:
+
+ session = self._sessions_manager.read_session(token)
+ if session is None:
+ return {"status": 404}
+
+ data = serialize_session(session)
+
+ return {
+ "format": "application/json",
+ "data": {
+ "token": data["token"],
+ "tests": data["tests"],
+ "types": data["types"],
+ "timeouts": data["timeouts"],
+ "reference_tokens": data["reference_tokens"],
+ "user_agent": data["user_agent"],
+ "browser": data["browser"],
+ "is_public": data["is_public"],
+ "date_created": data["date_created"],
+ "labels": data["labels"]
+ }
+ }
+ except Exception:
+ self.handle_exception("Failed to read session")
+ return {"status": 500}
+
+ def read_sessions(self, query_parameters, uri_path):
+ try:
+ index = 0
+ if "index" in query_parameters:
+ index = int(query_parameters["index"])
+ count = 10
+ if "count" in query_parameters:
+ count = int(query_parameters["count"])
+ expand = []
+ if "expand" in query_parameters:
+ expand = query_parameters["expand"].split(",")
+
+ session_tokens = self._sessions_manager.read_sessions(index=index, count=count)
+ total_sessions = self._sessions_manager.get_total_sessions()
+
+ embedded = {}
+
+ for relation in expand:
+ if relation == "configuration":
+ configurations = []
+ for token in session_tokens:
+ result = self.read_session(token)
+ if "status" in result and result["status"] != 200:
+ continue
+ configurations.append(result["data"])
+ embedded["configuration"] = configurations
+
+ if relation == "status":
+ statuses = []
+ for token in session_tokens:
+ result = self.read_session_status(token)
+ if "status" in result and result["status"] != 200:
+ continue
+ statuses.append(result["data"])
+ embedded["status"] = statuses
+
+ uris = {
+ "self": uri_path,
+ "configuration": self._web_root + "api/sessions/{token}",
+ "status": self._web_root + "api/sessions/{token}/status"
+ }
+
+ data = self.create_hal_list(session_tokens, uris, index, count, total=total_sessions)
+
+ if len(embedded) > 0:
+ data["_embedded"] = embedded
+
+ return {
+ "format": "application/json",
+ "data": data
+ }
+ except Exception:
+ self.handle_exception("Failed to read session")
+ return {"status": 500}
+
+ def read_session_status(self, token):
+ try:
+ session = self._sessions_manager.read_session_status(token)
+ if session is None:
+ return {"status": 404}
+
+ data = serialize_session(session)
+
+ return {
+ "format": "application/json",
+ "data": {
+ "token": data["token"],
+ "status": data["status"],
+ "date_started": data["date_started"],
+ "date_finished": data["date_finished"],
+ "expiration_date": data["expiration_date"]
+ }
+ }
+ except Exception:
+ self.handle_exception("Failed to read session status")
+ return {"status": 500}
+
+ def read_public_sessions(self, request, response):
+ try:
+ session_tokens = self._sessions_manager.read_public_sessions()
+
+ self.send_json(session_tokens, response)
+ except Exception:
+ self.handle_exception("Failed to read public sessions")
+ response.status = 500
+
+ def update_session_configuration(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ config = {}
+ body = request.body.decode("utf-8")
+ if body != "":
+ config = json.loads(body)
+
+ tests = {}
+ if "tests" in config:
+ tests = config["tests"]
+ test_types = None
+ if "types" in config:
+ test_types = config["types"]
+ timeouts = {}
+ if "timeouts" in config:
+ timeouts = config["timeouts"]
+ reference_tokens = []
+ if "reference_tokens" in config:
+ reference_tokens = config["reference_tokens"]
+ type = None
+ if "type" in config:
+ type = config["type"]
+
+ self._sessions_manager.update_session_configuration(
+ token,
+ tests,
+ test_types,
+ timeouts,
+ reference_tokens,
+ type
+ )
+ except NotFoundException:
+ self.handle_exception("Failed to update session configuration")
+ response.status = 404
+ except Exception:
+ self.handle_exception("Failed to update session configuration")
+ response.status = 500
+
+ def update_labels(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ # convert unicode to ascii to get a text type, ignore special chars
+ token = uri_parts[2]
+ body = request.body.decode("utf-8")
+ labels = None
+ if body != "":
+ labels = json.loads(body)
+ if "labels" in labels:
+ labels = labels["labels"]
+
+ self._sessions_manager.update_labels(token=token, labels=labels)
+ except Exception:
+ self.handle_exception("Failed to update labels")
+ response.status = 500
+
+ def delete_session(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ session = self._sessions_manager.read_session(token)
+ if session is None:
+ response.status = 404
+ return
+
+ self._sessions_manager.delete_session(token)
+ self._results_manager.delete_results(token)
+ except Exception:
+ self.handle_exception("Failed to delete session")
+ response.status = 500
+
+ def start_session(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ self._sessions_manager.start_session(token)
+ except Exception:
+ self.handle_exception("Failed to start session")
+ response.status = 500
+
+ def pause_session(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ self._sessions_manager.pause_session(token)
+ except Exception:
+ self.handle_exception("Failed to pause session")
+ response.status = 500
+
+ def stop_session(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ self._sessions_manager.stop_session(token)
+ except Exception:
+ self.handle_exception("Failed to stop session")
+ response.status = 500
+
+ def resume_session(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ resume_token = None
+ body = request.body.decode("utf-8")
+ if body != "":
+ resume_token = json.loads(body)["resume_token"]
+
+ self._sessions_manager.resume_session(token, resume_token)
+ except Exception:
+ self.handle_exception("Failed to resume session")
+ response.status = 500
+
+ def find_session(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ fragment = uri_parts[2]
+ token = self._sessions_manager.find_token(fragment)
+ if token is None:
+ response.status = 404
+ return
+ self.send_json({"token": token}, response)
+ except Exception:
+ self.handle_exception("Failed to find session")
+ response.status = 500
+
+ def register_event_listener(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ query_parameters = self.parse_query_parameters(request)
+ last_event_number = None
+ if ("last_event" in query_parameters):
+ last_event_number = int(query_parameters["last_event"])
+
+ event = threading.Event()
+ http_polling_event_listener = HttpPollingEventListener(token, event)
+ event_listener_token = self._event_dispatcher.add_event_listener(http_polling_event_listener, last_event_number)
+
+ event.wait()
+
+ message = http_polling_event_listener.message
+ self.send_json(data=message, response=response)
+ self._event_dispatcher.remove_event_listener(event_listener_token)
+ except Exception:
+ self.handle_exception("Failed to register event listener")
+ response.status = 500
+
+ def push_event(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+ message = None
+ body = request.body.decode("utf-8")
+ if body != "":
+ message = json.loads(body)
+
+ self._event_dispatcher.dispatch_event(
+ token,
+ message["type"],
+ message["data"])
+ except Exception:
+ self.handle_exception("Failed to push session event")
+
+ def handle_request(self, request, response):
+ method = request.method
+ uri_parts = self.parse_uri(request)
+ body = request.body
+ headers = request.headers
+ query_parameters = self.parse_query_parameters(request)
+ uri_path = request.url_parts.path
+
+ result = None
+ # /api/sessions
+ if len(uri_parts) == 2:
+ if method == "POST":
+ result = self.create_session(body, headers)
+ if method == "GET":
+ if self._read_sessions_enabled:
+ result = self.read_sessions(query_parameters, uri_path)
+
+ # /api/sessions/<token>
+ if len(uri_parts) == 3:
+ function = uri_parts[2]
+ if method == "GET":
+ if function == "public":
+ self.read_public_sessions(request, response)
+ return
+ if len(function) != TOKEN_LENGTH:
+ self.find_session(request, response)
+ return
+ result = self.read_session(token=uri_parts[2])
+ if method == "PUT":
+ self.update_session_configuration(request, response)
+ return
+ if method == "DELETE":
+ self.delete_session(request, response)
+ return
+
+ # /api/sessions/<token>/<function>
+ if len(uri_parts) == 4:
+ function = uri_parts[3]
+ if method == "GET":
+ if function == "status":
+ result = self.read_session_status(token=uri_parts[2])
+ if function == "events":
+ self.register_event_listener(request, response)
+ return
+ if method == "POST":
+ if function == "start":
+ self.start_session(request, response)
+ return
+ if function == "pause":
+ self.pause_session(request, response)
+ return
+ if function == "stop":
+ self.stop_session(request, response)
+ return
+ if function == "resume":
+ self.resume_session(request, response)
+ return
+ if function == "events":
+ self.push_event(request, response)
+ return
+ if method == "PUT":
+ if function == "labels":
+ self.update_labels(request, response)
+ return
+
+ if result is None:
+ response.status = 404
+ return
+
+ format = None
+ if "format" in result:
+ format = result["format"]
+ if format == "application/json":
+ data = None
+ if "data" in result:
+ data = result["data"]
+ status = 200
+ if "status" in result:
+ status = result["status"]
+ self.send_json(data, response, status)
+ return
+
+ status = 404
+ if "status" in result:
+ status = result["status"]
+ response.status = status
diff --git a/testing/web-platform/tests/tools/wave/network/api/tests_api_handler.py b/testing/web-platform/tests/tools/wave/network/api/tests_api_handler.py
new file mode 100644
index 0000000000..3803583771
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/network/api/tests_api_handler.py
@@ -0,0 +1,298 @@
+# mypy: allow-untyped-defs
+
+import json
+
+from urllib.parse import urlunsplit
+
+from .api_handler import ApiHandler
+from ...utils.serializer import serialize_session
+from ...data.session import PAUSED, COMPLETED, ABORTED, PENDING, RUNNING
+
+DEFAULT_LAST_COMPLETED_TESTS_COUNT = 5
+DEFAULT_LAST_COMPLETED_TESTS_STATUS = ["ALL"]
+
+EXECUTION_MODE_AUTO = "auto"
+EXECUTION_MODE_MANUAL = "manual"
+EXECUTION_MODE_PROGRAMMATIC = "programmatic"
+
+
+class TestsApiHandler(ApiHandler):
+ def __init__(
+ self,
+ wpt_port,
+ wpt_ssl_port,
+ tests_manager,
+ sessions_manager,
+ hostname,
+ web_root,
+ test_loader
+ ):
+ super().__init__(web_root)
+ self._tests_manager = tests_manager
+ self._sessions_manager = sessions_manager
+ self._wpt_port = wpt_port
+ self._wpt_ssl_port = wpt_ssl_port
+ self._hostname = hostname
+ self._web_root = web_root
+ self._test_loader = test_loader
+
+ def read_tests(self, response):
+ tests = self._tests_manager.read_tests()
+ self.send_json(tests, response)
+
+ def read_session_tests(self, request, response):
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+ session = self._sessions_manager.read_session(token)
+
+ if session is None:
+ response.status = 404
+ return
+
+ data = serialize_session(session)
+ tests = {
+ "token": token,
+ "pending_tests": data["pending_tests"],
+ "running_tests": data["running_tests"]
+ }
+ self.send_json(tests, response)
+
+ def read_next_test(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ hostname = self._hostname
+
+ session = self._sessions_manager.read_session(token)
+ if session is None:
+ response.status = 404
+ return
+
+ if session.status == PAUSED:
+ url = self._generate_wave_url(
+ hostname=hostname,
+ uri="pause.html",
+ token=token
+ )
+ self.send_json({"next_test": url}, response)
+ return
+ if session.status == COMPLETED or session.status == ABORTED:
+ url = self._generate_wave_url(
+ hostname=hostname,
+ uri="finish.html",
+ token=token
+ )
+ self.send_json({"next_test": url}, response)
+ return
+ if session.status == PENDING:
+ url = self._generate_wave_url(
+ hostname=hostname,
+ uri="newsession.html",
+ token=token
+ )
+ self.send_json({"next_test": url}, response)
+ return
+
+ test = self._tests_manager.next_test(session)
+
+ if test is None:
+ if session.status != RUNNING:
+ return
+ url = self._generate_wave_url(
+ hostname=hostname,
+ uri="finish.html",
+ token=token
+ )
+ self.send_json({"next_test": url}, response)
+ self._sessions_manager.complete_session(token)
+ return
+
+ test_timeout = self._tests_manager.get_test_timeout(
+ test=test, session=session)
+
+ test = self._sessions_manager.get_test_path_with_query(test, session)
+ url = self._generate_test_url(
+ test=test,
+ token=token,
+ test_timeout=test_timeout,
+ hostname=hostname)
+
+ self.send_json({
+ "next_test": url
+ }, response)
+ except Exception:
+ self.handle_exception("Failed to read next test")
+ response.status = 500
+
+ def read_last_completed(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+ query = self.parse_query_parameters(request)
+ count = None
+ if "count" in query:
+ count = query["count"]
+ else:
+ count = DEFAULT_LAST_COMPLETED_TESTS_COUNT
+
+ status = None
+ if "status" in query:
+ status = query["status"].split(",")
+ else:
+ status = DEFAULT_LAST_COMPLETED_TESTS_STATUS
+
+ completed_tests = self._tests_manager.read_last_completed_tests(
+ token, count)
+ tests = {}
+ for one_status in status:
+ one_status = one_status.lower()
+ if one_status == "pass":
+ tests["pass"] = completed_tests["pass"]
+ continue
+ if one_status == "fail":
+ tests["fail"] = completed_tests["fail"]
+ continue
+ if one_status == "timeout":
+ tests["timeout"] = completed_tests["timeout"]
+ continue
+ if one_status == "all":
+ tests["pass"] = completed_tests["pass"]
+ tests["fail"] = completed_tests["fail"]
+ tests["timeout"] = completed_tests["timeout"]
+ break
+ self.send_json(data=tests, response=response)
+ except Exception:
+ self.handle_exception("Failed to read last completed tests")
+ response.status = 500
+
+ def read_malfunctioning(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+ tm = self._tests_manager
+ malfunctioning_tests = tm.read_malfunctioning_tests(token)
+
+ self.send_json(data=malfunctioning_tests, response=response)
+ except Exception:
+ self.handle_exception("Failed to read malfunctioning tests")
+ response.status = 500
+
+ def update_malfunctioning(self, request, response):
+ try:
+ uri_parts = self.parse_uri(request)
+ token = uri_parts[2]
+
+ data = None
+ body = request.body.decode("utf-8")
+ if body != "":
+ data = json.loads(body)
+
+ self._tests_manager.update_malfunctioning_tests(token, data)
+ except Exception:
+ self.handle_exception("Failed to update malfunctioning tests")
+ response.status = 500
+
+ def read_available_apis(self, request, response):
+ try:
+ apis = self._test_loader.get_apis()
+ self.send_json(apis, response)
+ except Exception:
+ self.handle_exception("Failed to read available APIs")
+ response.status = 500
+
+ def handle_request(self, request, response):
+ method = request.method
+ uri_parts = self.parse_uri(request)
+
+ # /api/tests
+ if len(uri_parts) == 2:
+ if method == "GET":
+ self.read_tests(response)
+ return
+
+ # /api/tests/<token>
+ if len(uri_parts) == 3:
+ if method == "GET":
+ if uri_parts[2] == "apis":
+ self.read_available_apis(request, response)
+ return
+ self.read_session_tests(request, response)
+ return
+
+ # /api/tests/<token>/<function>
+ if len(uri_parts) == 4:
+ function = uri_parts[3]
+ if method == "GET":
+ if function == "next":
+ self.read_next_test(request, response)
+ return
+ if function == "last_completed":
+ self.read_last_completed(request, response)
+ return
+ if function == "malfunctioning":
+ self.read_malfunctioning(request, response)
+ return
+ if method == "PUT":
+ if function == "malfunctioning":
+ self.update_malfunctioning(request, response)
+ return
+
+ response.status = 404
+
+ def _generate_wave_url(self, hostname, uri, token):
+ if self._web_root is not None:
+ uri = self._web_root + uri
+
+ return self._generate_url(
+ hostname=hostname,
+ uri=uri,
+ port=self._wpt_port,
+ query="token=" + token
+ )
+
+ def _generate_test_url(self, hostname, test, token, test_timeout):
+ protocol = "http"
+ port = self._wpt_port
+
+ if "https" in test:
+ protocol = "https"
+ port = self._wpt_ssl_port
+
+ test_query = ""
+ split = test.split("?")
+ if len(split) > 1:
+ test = split[0]
+ test_query = split[1]
+
+ query = "token={}&timeout={}&https_port={}&web_root={}&{}".format(
+ token,
+ test_timeout,
+ self._wpt_ssl_port,
+ self._web_root,
+ test_query
+ )
+
+ return self._generate_url(
+ protocol=protocol,
+ hostname=hostname,
+ port=port,
+ uri=test,
+ query=query
+ )
+
+ def _generate_url(self,
+ hostname,
+ port=None,
+ uri=None,
+ query=None,
+ protocol=None):
+ if port is None:
+ port = 80
+ if uri is None:
+ uri = "/"
+ if query is None:
+ query = ""
+ if protocol is None:
+ protocol = "http"
+ return urlunsplit([protocol, f"{hostname}:{port}", uri, query, ''])
diff --git a/testing/web-platform/tests/tools/wave/network/http_handler.py b/testing/web-platform/tests/tools/wave/network/http_handler.py
new file mode 100644
index 0000000000..b76f711cf1
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/network/http_handler.py
@@ -0,0 +1,122 @@
+# mypy: allow-untyped-defs
+
+import http.client as httplib
+import sys
+import logging
+import traceback
+
+
+global logger
+logger = logging.getLogger("wave-api-handler")
+
+class HttpHandler:
+ def __init__(
+ self,
+ static_handler,
+ sessions_api_handler,
+ tests_api_handler,
+ results_api_handler,
+ devices_api_handler,
+ general_api_handler,
+ http_port,
+ web_root
+ ):
+ self.static_handler = static_handler
+ self.sessions_api_handler = sessions_api_handler
+ self.tests_api_handler = tests_api_handler
+ self.results_api_handler = results_api_handler
+ self.general_api_handler = general_api_handler
+ self.devices_api_handler = devices_api_handler
+ self._http_port = http_port
+ self._web_root = web_root
+
+ def handle_request(self, request, response):
+ response.headers = [
+ ("Access-Control-Allow-Origin", "*"),
+ ("Access-Control-Allow-Headers", "*"),
+ ("Access-Control-Allow-Methods", "*")
+ ]
+ if request.method == "OPTIONS":
+ return
+
+ path = self._remove_web_root(request.request_path)
+
+ is_api_call = False
+ for index, part in enumerate(path.split("/")):
+ if index > 2:
+ break
+ if part != "api":
+ continue
+
+ is_api_call = True
+
+ if (is_api_call):
+ if request.url_parts.scheme == "https":
+ self._proxy(request, response)
+ return
+ self.handle_api(request, response)
+ else:
+ self.handle_static_file(request, response)
+
+ def handle_api(self, request, response):
+ path = self._remove_web_root(request.request_path)
+ path = path.split("?")[0]
+ api_name = path.split("/")[1]
+
+ if api_name is None:
+ return
+
+ if api_name == "sessions":
+ self.sessions_api_handler.handle_request(request, response)
+ return
+ if api_name == "tests":
+ self.tests_api_handler.handle_request(request, response)
+ return
+ if api_name == "results":
+ self.results_api_handler.handle_request(request, response)
+ return
+ if api_name == "devices":
+ self.devices_api_handler.handle_request(request, response)
+ return
+
+ self.general_api_handler.handle_request(request, response)
+
+ def handle_static_file(self, request, response):
+ self.static_handler.handle_request(request, response)
+
+ def _remove_web_root(self, path):
+ if self._web_root is not None:
+ path = path[len(self._web_root):]
+ return path
+
+
+ def _proxy(self, request, response):
+ host = 'localhost'
+ port = int(self._http_port)
+ uri = request.url_parts.path
+ uri = uri + "?" + request.url_parts.query
+ content_length = request.headers.get('Content-Length')
+ data = ""
+ if content_length is not None:
+ data = request.raw_input.read(int(content_length))
+ method = request.method
+
+ headers = {}
+ for key in request.headers:
+ value = request.headers[key]
+ headers[key.decode("utf-8")] = value.decode("utf-8")
+
+ try:
+ proxy_connection = httplib.HTTPConnection(host, port)
+ proxy_connection.request(method, uri, data, headers)
+ proxy_response = proxy_connection.getresponse()
+ response.content = proxy_response.read()
+ response.headers = proxy_response.getheaders()
+ response.status = proxy_response.status
+
+ except OSError:
+ message = "Failed to perform proxy request"
+ info = sys.exc_info()
+ traceback.print_tb(info[2])
+ logger.error(f"{message}: {info[0].__name__}: {info[1].args[0]}")
+ response.status = 500
diff --git a/testing/web-platform/tests/tools/wave/network/static_handler.py b/testing/web-platform/tests/tools/wave/network/static_handler.py
new file mode 100644
index 0000000000..230af8da1a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/network/static_handler.py
@@ -0,0 +1,60 @@
+# mypy: allow-untyped-defs
+
+import os
+
+
+class StaticHandler:
+ def __init__(self, web_root, http_port, https_port):
+ self.static_dir = os.path.join(
+ os.getcwd(), "tools/wave/www")
+ self._web_root = web_root
+ self._http_port = http_port
+ self._https_port = https_port
+
+ def handle_request(self, request, response):
+ file_path = request.request_path
+
+ if self._web_root is not None:
+ if not file_path.startswith(self._web_root):
+ response.status = 404
+ return
+ file_path = file_path[len(self._web_root):]
+
+ if file_path == "":
+ file_path = "index.html"
+
+ file_path = file_path.split("?")[0]
+ file_path = os.path.join(self.static_dir, file_path)
+
+ if not os.path.exists(file_path):
+ response.status = 404
+ return
+
+ headers = []
+
+ content_types = {
+ "html": "text/html",
+ "js": "text/javascript",
+ "css": "text/css",
+ "jpg": "image/jpeg",
+ "jpeg": "image/jpeg",
+ "ttf": "font/ttf",
+ "woff": "font/woff",
+ "woff2": "font/woff2"
+ }
+
+ headers.append(
+ ("Content-Type", content_types[file_path.split(".")[-1]]))
+
+ data = None
+ with open(file_path, "rb") as file:
+ data = file.read()
+
+ if file_path.split("/")[-1] == "wave-service.js":
+ data = data.decode("UTF-8")
+ data = data.replace("{{WEB_ROOT}}", str(self._web_root))
+ data = data.replace("{{HTTP_PORT}}", str(self._http_port))
+ data = data.replace("{{HTTPS_PORT}}", str(self._https_port))
+
+ response.content = data
+ response.headers = headers
diff --git a/testing/web-platform/tests/tools/wave/package-lock.json b/testing/web-platform/tests/tools/wave/package-lock.json
new file mode 100644
index 0000000000..cb481388c2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/package-lock.json
@@ -0,0 +1,35 @@
+{
+ "version": "1.0.0",
+ "lockfileVersion": 1,
+ "requires": true,
+ "dependencies": {
+ "fs-extra": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz",
+ "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==",
+ "requires": {
+ "graceful-fs": "^4.1.2",
+ "jsonfile": "^4.0.0",
+ "universalify": "^0.1.0"
+ }
+ },
+ "graceful-fs": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz",
+ "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ=="
+ },
+ "jsonfile": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
+ "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
+ "requires": {
+ "graceful-fs": "^4.1.6"
+ }
+ },
+ "universalify": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
+ "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
+ }
+ }
+}
diff --git a/testing/web-platform/tests/tools/wave/package.json b/testing/web-platform/tests/tools/wave/package.json
new file mode 100644
index 0000000000..f365f7c238
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/package.json
@@ -0,0 +1,7 @@
+{
+ "version": "1.0.0",
+ "license": "MIT",
+ "dependencies": {
+ "fs-extra": "^7.0.1"
+ }
+}
diff --git a/testing/web-platform/tests/tools/wave/requirements.txt b/testing/web-platform/tests/tools/wave/requirements.txt
new file mode 100644
index 0000000000..5c0369b9f5
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/requirements.txt
@@ -0,0 +1,2 @@
+ua-parser==0.10.0
+python-dateutil==2.8.2
diff --git a/testing/web-platform/tests/tools/wave/resources/testharnessreport.js b/testing/web-platform/tests/tools/wave/resources/testharnessreport.js
new file mode 100644
index 0000000000..394b4bf3a8
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/resources/testharnessreport.js
@@ -0,0 +1,284 @@
+/* global add_completion_callback */
+/* global setup */
+
+/*
+ * This file is intended for vendors to implement code needed to integrate
+ * testharness.js tests with their own test systems.
+ *
+ * Typically test system integration will attach callbacks when each test has
+ * run, using add_result_callback(callback(test)), or when the whole test file
+ * has completed, using
+ * add_completion_callback(callback(tests, harness_status)).
+ *
+ * For more documentation about the callback functions and the
+ * parameters they are called with see testharness.js
+ */
+
+/*
+ * If the query parameter token is available means that the test was loaded by
+ * the WAVE test runner and the results need to be reported to the server using
+ * the provided token to identify the session associated this token.
+ */
+if (location.search && location.search.indexOf("token=") != -1) {
+ var __WAVE__HOSTNAME = location.hostname;
+ var __WAVE__PORT = location.port;
+ var __WAVE__PROTOCOL = location.protocol.replace(/:/, "");
+ var __WAVE__QUERY = location.search;
+ var queryParameters = {};
+ var keysAndValues = location.search.replace("?", "").split("&");
+ for (var i = 0; i < keysAndValues.length; i++) {
+ var key = keysAndValues[i].split("=")[0];
+ var value = keysAndValues[i].split("=")[1];
+ queryParameters[key] = value;
+ }
+ var __HTTPS_PORT = parseInt(queryParameters["https_port"] || 443);
+ var __WAVE__TIMEOUT = parseInt(queryParameters["timeout"] || 65000);
+ var __WAVE__WEB_ROOT = queryParameters["web_root"] || "/_wave/";
+ var __WAVE__TOKEN = queryParameters["token"] || null;
+ var __WAVE__TEST = location.pathname;
+ var nextUrl = null;
+ var resultSent = false;
+ var screenConsole;
+
+ try {
+ var documentRoot = document.body ? document.body : document.documentElement;
+ documentRoot.style["background-color"] = "#FFF";
+ window.open = function () {
+ logToConsole(
+ "window.open() is overridden in testharnessreport.js and has not effect"
+ );
+ var dummyWin = {
+ close: function () {
+ logToConsole(
+ "dummyWindow.close() in testharnessreport.js and has not effect"
+ );
+ },
+ };
+ return dummyWin;
+ };
+ window.close = function () {
+ logToConsole(
+ "window.close() is overridden in testharnessreport.js and has not effect"
+ );
+ };
+ } catch (err) {}
+
+ setTimeout(function () {
+ loadNext();
+ }, __WAVE__TIMEOUT);
+
+ function logToConsole() {
+ var text = "";
+ for (var i = 0; i < arguments.length; i++) {
+ text += arguments[i] + " ";
+ }
+ if (console && console.log) {
+ console.log(text);
+ }
+ if (screenConsole) {
+ try {
+ text = text.replace(/ /gm, "&nbsp;");
+ text = text.replace(/\n/gm, "<br/>");
+ screenConsole.innerHTML += "<br/>" + text;
+ } catch (error) {
+ screenConsole.innerText += "\n" + text;
+ }
+ }
+ }
+
+ function dump_and_report_test_results(tests, status) {
+ var results_element = document.createElement("script");
+ results_element.type = "text/json";
+ results_element.id = "__testharness__results__";
+ var test_results = tests.map(function (x) {
+ return {
+ name: x.name,
+ status: x.status,
+ message: x.message,
+ stack: x.stack,
+ };
+ });
+ var data = {
+ test: window.location.href,
+ tests: test_results,
+ status: status.status,
+ message: status.message,
+ stack: status.stack,
+ };
+ results_element.textContent = JSON.stringify(data);
+
+ // To avoid a HierarchyRequestError with XML documents, ensure that 'results_element'
+ // is inserted at a location that results in a valid document.
+ var parent = document.body
+ ? document.body // <body> is required in XHTML documents
+ : document.documentElement; // fallback for optional <body> in HTML5, SVG, etc.
+
+ parent.appendChild(results_element);
+
+ screenConsole = document.getElementById("console");
+ if (!screenConsole) {
+ screenConsole = document.createElement("div");
+ screenConsole.setAttribute("id", "console");
+ screenConsole.setAttribute("style", "font-family: monospace; padding: 5px");
+ parent.appendChild(screenConsole);
+ }
+ window.onerror = logToConsole;
+
+ finishWptTest(data);
+ }
+
+ function finishWptTest(data) {
+ logToConsole("Creating result ...");
+ data.test = __WAVE__TEST;
+ createResult(
+ __WAVE__TOKEN,
+ data,
+ function () {
+ logToConsole("Result created.");
+ loadNext();
+ },
+ function () {
+ logToConsole("Failed to create result.");
+ logToConsole("Trying alternative method ...");
+ createResultAlt(__WAVE__TOKEN, data);
+ }
+ );
+ }
+
+ function loadNext() {
+ logToConsole("Loading next test ...");
+ readNextTest(
+ __WAVE__TOKEN,
+ function (url) {
+ logToConsole("Redirecting to " + url);
+ location.href = url;
+ },
+ function () {
+ logToConsole("Could not load next test.");
+ logToConsole("Trying alternative method ...");
+ readNextAlt(__WAVE__TOKEN);
+ }
+ );
+ }
+
+ function readNextTest(token, onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/tests/" + token + "/next",
+ null,
+ null,
+ function (response) {
+ var jsonObject = JSON.parse(response);
+ onSuccess(jsonObject.next_test);
+ },
+ onError
+ );
+ }
+
+ function readNextAlt(token) {
+ location.href =
+ location.protocol +
+ "//" +
+ location.host +
+ getWaveUrl("next.html?token=" + token);
+ }
+
+ function createResult(token, result, onSuccess, onError) {
+ sendRequest(
+ "POST",
+ "api/results/" + token,
+ {
+ "Content-Type": "application/json",
+ },
+ JSON.stringify(result),
+ function () {
+ onSuccess();
+ },
+ onError
+ );
+ }
+
+ function createResultAlt(token, result) {
+ location.href =
+ __WAVE__WEB_ROOT +
+ "submitresult.html" +
+ "?token=" +
+ token +
+ "&result=" +
+ encodeURIComponent(JSON.stringify(result));
+ }
+
+ function sendRequest(method, uri, headers, data, onSuccess, onError) {
+ var url = getWaveUrl(uri);
+ url = location.protocol + "//" + location.host + url;
+ var xhr = new XMLHttpRequest();
+ xhr.addEventListener("load", function () {
+ onSuccess(xhr.response);
+ });
+ xhr.addEventListener("error", function () {
+ if (onError) onError();
+ });
+ logToConsole("Sending", method, 'request to "' + url + '"');
+ xhr.open(method, url, true);
+ if (headers) {
+ for (var header in headers) {
+ xhr.setRequestHeader(header, headers[header]);
+ }
+ }
+ xhr.send(data);
+ }
+
+ function getWaveUrl(uri) {
+ var url = __WAVE__WEB_ROOT + uri;
+ return url;
+ }
+
+ add_completion_callback(dump_and_report_test_results);
+} else {
+ function dump_test_results(tests, status) {
+ var results_element = document.createElement("script");
+ results_element.type = "text/json";
+ results_element.id = "__testharness__results__";
+ var test_results = tests.map(function (x) {
+ return {
+ name: x.name,
+ status: x.status,
+ message: x.message,
+ stack: x.stack,
+ };
+ });
+ var data = {
+ test: window.location.href,
+ tests: test_results,
+ status: status.status,
+ message: status.message,
+ stack: status.stack,
+ };
+ results_element.textContent = JSON.stringify(data);
+
+ // To avoid a HierarchyRequestError with XML documents, ensure that 'results_element'
+ // is inserted at a location that results in a valid document.
+ var parent = document.body
+ ? document.body // <body> is required in XHTML documents
+ : document.documentElement; // fallback for optional <body> in HTML5, SVG, etc.
+
+ parent.appendChild(results_element);
+ }
+
+ add_completion_callback(dump_test_results);
+
+ /* If the parent window has a testharness_properties object,
+ * we use this to provide the test settings. This is used by the
+ * default in-browser runner to configure the timeout and the
+ * rendering of results
+ */
+ try {
+ if (window.opener && "testharness_properties" in window.opener) {
+ /* If we pass the testharness_properties object as-is here without
+ * JSON stringifying and reparsing it, IE fails & emits the message
+ * "Could not complete the operation due to error 80700019".
+ */
+ setup(JSON.parse(JSON.stringify(window.opener.testharness_properties)));
+ }
+ } catch (e) {}
+}
diff --git a/testing/web-platform/tests/tools/wave/test/WAVE Local.postman_environment.json b/testing/web-platform/tests/tools/wave/test/WAVE Local.postman_environment.json
new file mode 100644
index 0000000000..b1a6a089ab
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/test/WAVE Local.postman_environment.json
@@ -0,0 +1,34 @@
+{
+ "id": "37be8ec4-7855-4554-867e-7a5d2a4f99e6",
+ "name": "WAVE Local",
+ "values": [
+ {
+ "key": "host",
+ "value": "web-platform.test",
+ "enabled": true
+ },
+ {
+ "key": "port",
+ "value": "8000",
+ "enabled": true
+ },
+ {
+ "key": "protocol",
+ "value": "http",
+ "enabled": true
+ },
+ {
+ "key": "web_root",
+ "value": "_wave",
+ "enabled": true
+ },
+ {
+ "key": "device_timeout",
+ "value": "60000",
+ "enabled": true
+ }
+ ],
+ "_postman_variable_scope": "environment",
+ "_postman_exported_at": "2020-05-25T12:12:37.098Z",
+ "_postman_exported_using": "Postman/7.25.0"
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/test/WAVE Server REST API Tests.postman_collection.json b/testing/web-platform/tests/tools/wave/test/WAVE Server REST API Tests.postman_collection.json
new file mode 100644
index 0000000000..93cbedb504
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/test/WAVE Server REST API Tests.postman_collection.json
@@ -0,0 +1,9833 @@
+{
+ "info": {
+ "_postman_id": "ccd6117a-6d61-4617-a6a1-7115db4d4d92",
+ "name": "WAVE Server REST API Tests",
+ "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
+ },
+ "item": [
+ {
+ "name": "Read Available Tests",
+ "item": [
+ {
+ "name": "Read Available Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "051bef94-5544-4ddb-9d85-167677ebecb2",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var availableTests = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(typeof availableTests).to.equal(\"object\");",
+ " for (var api of Object.keys(availableTests)) {",
+ " pm.expect(availableTests[api]).to.be.an.instanceof(Array);",
+ " var apiRegExp = new RegExp(\"^/\" + api, \"i\");",
+ " for (var test of availableTests[api]) {",
+ " pm.expect(test).to.match(apiRegExp);",
+ " }",
+ " }",
+ "});",
+ "",
+ "var includedTests = [];",
+ "var excludedTests = [];",
+ "var specialTimeoutTest = \"\";",
+ "",
+ "var apis = Object.keys(availableTests);",
+ "for(var api of apis) {",
+ " if (availableTests[api].length > 50) {",
+ " var subDirs = availableTests[api].map(test => test.split(\"/\").filter(part => !!part).join(\"/\").split(\"/\")[1]).reduce((acc, curr) => acc.indexOf(curr) === -1 ? acc.concat([curr]) : acc, []);",
+ " if (subDirs.length > 2) {",
+ " includedTests.push(\"/\" + api);",
+ " excludedTests.push(\"/\" + api + \"/\" + subDirs[0]);",
+ " specialTimeoutTest = availableTests[api][availableTests[api].length - 1];",
+ " break;",
+ " }",
+ " ",
+ " }",
+ "}",
+ "",
+ "pm.globals.set(\"available_tests\", JSON.stringify(availableTests));",
+ "pm.globals.set(\"included_tests\", JSON.stringify(includedTests));",
+ "pm.globals.set(\"excluded_tests\", JSON.stringify(excludedTests));",
+ "pm.globals.set(\"special_timeout_test\", specialTimeoutTest.replace(\".\", \"\"));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Create and Read Sessions",
+ "item": [
+ {
+ "name": "Start expiring session remove expiration date",
+ "item": [
+ {
+ "name": "Create Session With Expiration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a8bf3e41-a7df-4c6b-8a20-3a1e6d8a51d9",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "4370546f-b08c-4f77-9bd9-1cd14400665e",
+ "exec": [
+ "var expirationDate = Date.now() + 10000;",
+ "pm.globals.set(\"expiration_date\", expirationDate);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n\t\"expiration_date\": {{expiration_date}}\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "3dcb5b6c-9151-49f7-a1a6-74475927b304",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure contains expiration date\", function () {",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ "});",
+ "",
+ "var expirationDate = pm.globals.get(\"expiration_date\");",
+ "",
+ "pm.test(\"Expiration date is as specified\", function () {",
+ " pm.expect(Date.parse(jsonData.expiration_date)).to.equal(expirationDate);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8d05578a-e2d6-41e9-a2a4-aa7d92bfbbce",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "4c5e3ba4-e27b-4341-8cf8-74ed047a8747",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure contains expiration date\", function () {",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ "});",
+ "",
+ "pm.test(\"Expiration date is null\", function () {",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Read Public Sessions",
+ "item": [
+ {
+ "name": "Read Public Sessions",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "1081afd8-a772-4565-b03d-b58f773cbb65",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Response is JSON Array\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.be.an.instanceof(Array);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/public",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "public"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Find Session",
+ "item": [
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "268e8a31-87bb-4ec5-81d5-87dc74096828",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Find Session Token",
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "625fcc2a-f7b1-403c-b5c5-56db7c5bcee5",
+ "exec": [
+ "const token = pm.globals.get(\"session_token\");",
+ "pm.globals.set(\"session_token_fragment\", token.split(\"-\").shift());"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "8e04212f-e259-413f-98ee-e366cd3adfdd",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const sessionToken = pm.globals.get(\"session_token\");",
+ "",
+ "pm.test(\"Found token is original token\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.token).to.equal(sessionToken);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token_fragment}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token_fragment}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Find Session Too Short Fragment",
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "6f8429e3-9c12-423c-baaf-8f8de0e4ea49",
+ "exec": [
+ "const token = pm.globals.get(\"session_token\");",
+ "pm.globals.set(\"session_token_fragment\", token.split(\"-\").shift());"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "7f6ef567-274e-4f9d-b7fc-50f8240547e9",
+ "exec": [
+ "pm.test(\"Status code is 404\", function () {",
+ " pm.response.to.have.status(404);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/1234567",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "1234567"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Read Next Test",
+ "item": [
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Invalid Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "11f5c620-dc3b-4a8c-8d2f-f9663025c79f",
+ "exec": [
+ "pm.test(\"Status code is 404\", function () {",
+ " pm.response.to.have.status(404);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session \\w Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "c67cde7e-7237-479c-a0c6-b84a533c3b1e",
+ "exec": [
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "24b38d64-5c24-415f-b6f6-f8d252d69ac8",
+ "exec": [
+ "var automaticTimeout = 120000;",
+ "var manualTimeout = 1000000;",
+ "var specialTimeout = 2000;",
+ "",
+ "pm.globals.set(\"automatic_timeout\", automaticTimeout);",
+ "pm.globals.set(\"manual_timeout\", manualTimeout);",
+ "pm.globals.set(\"special_timeout\", specialTimeout);",
+ "",
+ "const availableTests = JSON.parse(pm.globals.get(\"available_tests\"));",
+ "const test1 = availableTests[Object.keys(availableTests)[0]][0];",
+ "",
+ "pm.globals.set(\"single_test_1\", test1);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"{{single_test_1}}\"]\n },\n \"types\": [\n \"automatic\",\n \"manual\"\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Pending Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "0806e4e0-bd1b-40ba-a7d5-74d41473a141",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const nextTest = jsonData.next_test;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "const web_root = pm.environment.get(\"web_root\");",
+ "",
+ "pm.test(\"Returned test is new session page\", function () {",
+ " pm.expect(test).to.equal(\"/\" + web_root + \"/newsession.html\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9b1f2dec-9949-49ff-b466-f602b11dde5d",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Running Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "7fea2977-5ccb-49ee-8f07-79a7f2245f9c",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const nextTest = jsonData.next_test;",
+ "const parameters = nextTest.split(\"?\")[1].split(\"&\");",
+ "let test = parameters.find(parameter => parameter.split(\"=\")[0] === \"test_url\").split(\"=\")[1];",
+ "test = decodeURIComponent(test);",
+ "test = \"/\" + test.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "const api = test.split(\"/\").filter(part => !!part)[0]",
+ "const availableTests = JSON.parse(pm.globals.get(\"available_tests\"));",
+ "",
+ "pm.test(\"Returned test is valid test\", function () {",
+ " pm.expect(availableTests).to.have.property(api);",
+ " pm.expect(availableTests[api]).to.contain(test)",
+ "});",
+ "",
+ "",
+ "setTimeout(function () {}, 1000);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "7cc27ee8-2f06-4e25-98fb-9872c6cf0d93",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"FAIL\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Completed Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "0a73a5eb-3edb-4d15-924d-260dd22bd831",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const nextTest = jsonData.next_test;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "const web_root = pm.environment.get(\"web_root\");",
+ "",
+ "pm.test(\"Returned test is new session page\", function () {",
+ " pm.expect(test).to.equal(\"/\" + web_root + \"/finish.html\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session \\w Configuration Copy",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "94877ec6-70ea-4c78-acb5-20060a535653",
+ "exec": [
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "dcc29092-b7a1-40fc-8359-ecc655dba482",
+ "exec": [
+ "var automaticTimeout = 120000;",
+ "var manualTimeout = 1000000;",
+ "var specialTimeout = 2000;",
+ "",
+ "pm.globals.set(\"automatic_timeout\", automaticTimeout);",
+ "pm.globals.set(\"manual_timeout\", manualTimeout);",
+ "pm.globals.set(\"special_timeout\", specialTimeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": {{included_tests}},\n \"exclude\": {{excluded_tests}}\n },\n \"types\": [\n \"automatic\",\n \"manual\"\n ],\n \"timeouts\": {\n \"automatic\": 1000\n },\n \"labels\": [\"label1\", \"label2\"]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8f6e2bf2-6c1e-434e-83c5-9bcc57880692",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a0a80c05-a2ca-430a-8bed-46ad7978c684",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Aborted Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "00d823e0-2059-4cab-81d6-8d1de9e5b62a",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const nextTest = jsonData.next_test;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "const web_root = pm.environment.get(\"web_root\");",
+ "",
+ "pm.test(\"Returned test is new session page\", function () {",
+ " pm.expect(test).to.equal(\"/\" + web_root + \"/pause.html\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "b8211c13-667d-4970-aedc-6398c25cc40c",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Aborted Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8aad813d-53fe-49fd-bdfc-f16ce2233ae2",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const nextTest = jsonData.next_test;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "const web_root = pm.environment.get(\"web_root\");",
+ "",
+ "pm.test(\"Returned test is new session page\", function () {",
+ " pm.expect(test).to.equal(\"/\" + web_root + \"/finish.html\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Control Session",
+ "item": [
+ {
+ "name": "Setup",
+ "item": [
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "305cb915-8496-4577-b17a-e8189d66c3d1",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Pause Pending Session",
+ "item": [
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9d903165-9667-43b0-b210-a950d0fa1fa2",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "0307919b-6630-48ff-ac18-10c0a47ea254",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is pending\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"pending\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Start Pending Session",
+ "item": [
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f66408d5-5438-4d4f-9bfc-6d24b15e5a90",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "bae5542d-446b-4064-88ff-cc355a8f4f62",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "pm.test(\"Status is running\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"running\");",
+ "});",
+ "",
+ "pm.test(\"Start date is set\", function () {",
+ " pm.expect(Date.parse(jsonData.date_started)).to.be.below(Date.now());",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Start Running Session",
+ "item": [
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "3b386952-8a37-471a-9192-f28974d975a3",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8e56d842-325b-4356-ae1a-2465e9efe188",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is running\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"running\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Pause Running Session",
+ "item": [
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a183cb0b-31f0-48f6-9c20-605a16488d7d",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f2610594-813a-4079-afae-1510486efab4",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is paused\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"paused\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Pause Paused Session",
+ "item": [
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "6b8ec98a-ad91-4d5c-aa07-6b3cc1255902",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "91a38eb8-a501-457f-a9f4-b63221c957f6",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is paused\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"paused\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Start Paused Session",
+ "item": [
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "6e3a8c64-04cb-4c4f-b23d-64655f6e4d22",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "dcf7f6f4-6eef-4a90-a63d-df70446eb209",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is running\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"running\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Stop Running Session",
+ "item": [
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "bee7a70d-af1f-4ad1-9c40-7abdbcc983ae",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "217ed267-1ddb-496a-a7c5-6e29bad97c60",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "pm.test(\"Status is aborted\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});",
+ "",
+ "pm.test(\"Finish date is set\", function () {",
+ " pm.expect(Date.parse(jsonData.date_finished)).to.be.below(Date.now());",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Stop Aborted Session",
+ "item": [
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a255ea61-5c5a-4f0d-8a67-c244510a608c",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9cc7599b-d378-4b04-bada-9d76f6ca610f",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is aborted\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "81e44e3f-5d91-42a7-b0ab-4704eb121868",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "a44180cb-3b13-421b-b146-82901f2b45bd",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Start Aborted Session",
+ "item": [
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "47fb1fa9-4849-4ea9-b36e-fcec8decf62e",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "1923ced4-4361-4a4a-bf7c-4fcdf3df50f6",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is aborted\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "226043e5-1a81-4d36-aa7f-67b10bf407af",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "840bf970-0984-452e-9076-b6f66d0b4511",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Pause Aborted Session",
+ "item": [
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "88c4072f-538b-4d68-9d61-f0fb017e544c",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "2ae00eb3-49e8-4487-b27f-e7c7955fe4f4",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is aborted\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "70e0c783-c405-481c-b9c8-5c8bee392f97",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "85c45e3b-fae6-4a90-afd0-97678769e32c",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Setup",
+ "item": [
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "df4a00d1-d4f8-4b0b-892d-21528165a2cf",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "4fa59617-c922-4826-abc6-6d910e918ea4",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "d83f449d-4a2e-41c8-b2cd-5cf0cd1c404e",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Stop Pending Session",
+ "item": [
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "98da2b86-4f9f-47dd-afc9-22ce96e73e84",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "6d8a8202-c85d-4ada-b443-45108a372aea",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is aborted\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Setup",
+ "item": [
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "0f9df486-91cd-46fc-a711-5cf9bebb2706",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "80ee91fd-98ca-4b8e-83b6-32bf3e55a295",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "47f4d26a-84c2-4980-984d-dac0e95503e1",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "6bd9e85f-eb73-47be-83b0-01be77868899",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "43ebfadb-dc68-4c41-b34e-462e427dc2ef",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Stop Paused Session Copy",
+ "item": [
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "029ec84b-e41b-4744-8745-58c50e52bb11",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "071d757c-41aa-4b52-9ab4-6c2afd6ed2af",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is aborted\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "305720a0-2c84-43d3-8fe5-55f8fdf511d8",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "84e4ab04-5acc-4428-a81a-c0b0c53b5fd9",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Setup",
+ "item": [
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session One Test",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "b7a9d459-9b39-40b2-abdc-fd8fe8b21d61",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "fcbfb0e3-a10d-4ff8-afed-f5f3c9d52090",
+ "exec": [
+ "const availableTests = JSON.parse(pm.globals.get(\"available_tests\"));",
+ "const test = availableTests[Object.keys(availableTests)[0]][0]",
+ "",
+ "pm.globals.set(\"single_test\", test);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"{{single_test}}\"]\n }\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "937ef61d-d6d6-4e24-bc3a-696a23cc95d6",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "29f0554b-66f5-4e26-80ff-39b3faba920a",
+ "exec": [
+ "const response = pm.response.json();",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "const parameters = nextTest.split(\"?\")[1].split(\"&\");",
+ "let test = parameters.find(parameter => parameter.split(\"=\")[0] === \"test_url\").split(\"=\")[1];",
+ "test = decodeURIComponent(test);",
+ "test = \"/\" + test.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "pm.globals.set(\"current_test\", test);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"FAIL\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "9bf66427-6f39-4f0a-b065-1849d679eee6",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "8d1f3837-1f05-47e4-897c-b7c74da9bd43",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Start Completed Session",
+ "item": [
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9bf0fb70-24a0-4136-99fe-8e0e488afd1e",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "7e3184bf-1741-44c8-8cd3-cff30c2deca1",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is completed\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"completed\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Pause Completed Session",
+ "item": [
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e323c6af-d4e5-4a05-b203-09ba67f77d28",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "40cd04ce-850f-4903-bc24-2e62a4df98fe",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is completed\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"completed\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Stop Completed Session",
+ "item": [
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "db7d5209-37b2-46bd-88ab-ce4e241401b2",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "477dc63d-0b43-4226-b86e-163e2de92b67",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is completed\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"completed\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Clean Up",
+ "item": [
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Update and Read Sessions",
+ "item": [
+ {
+ "name": "Create Default",
+ "item": [
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "b923d95e-a7b4-49d4-ab2a-1f435c454387",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "81f36759-7ae2-42b9-81d2-79f57046b46e",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"tests\");",
+ " pm.expect(typeof jsonData.tests).to.equal(\"object\");",
+ " pm.expect(jsonData.tests).to.have.property(\"include\");",
+ " pm.expect(jsonData.tests.include).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData.tests).to.have.property(\"exclude\");",
+ " pm.expect(jsonData.tests.exclude).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"types\");",
+ " pm.expect(jsonData.types).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"user_agent\");",
+ " pm.expect(typeof jsonData.user_agent).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"timeouts\");",
+ " pm.expect(typeof jsonData.timeouts).to.equal(\"object\")",
+ " pm.expect(jsonData.timeouts).to.have.property(\"automatic\");",
+ " pm.expect(typeof jsonData.timeouts.automatic).to.equal(\"number\");",
+ " pm.expect(jsonData.timeouts).to.have.property(\"manual\");",
+ " pm.expect(typeof jsonData.timeouts.manual).to.equal(\"number\");",
+ " pm.expect(jsonData).to.have.property(\"browser\");",
+ " pm.expect(typeof jsonData.browser).to.equal(\"object\");",
+ " pm.expect(jsonData.browser).to.have.property(\"name\");",
+ " pm.expect(typeof jsonData.browser.name).to.equal(\"string\");",
+ " pm.expect(jsonData.browser).to.have.property(\"version\");",
+ " pm.expect(typeof jsonData.browser.version).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"reference_tokens\");",
+ " pm.expect(jsonData.reference_tokens).to.be.an.instanceof(Array);",
+ "});",
+ "",
+ "pm.test(\"Configuration is default\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " pm.expect(jsonData.tests.include).to.include(\"/\");",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(60000);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(300000);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "2873d554-9816-41f0-9051-fb2cb1272a76",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"status\");",
+ " pm.expect(typeof jsonData.status).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ " pm.expect(jsonData).to.have.property(\"date_started\");",
+ " pm.expect(jsonData.date_started).to.satisfy(value => !value || typeof value === \"number\");",
+ " pm.expect(jsonData).to.have.property(\"date_finished\");",
+ " pm.expect(jsonData.date_finished).to.satisfy(value => !value || typeof value === \"number\");",
+ "});",
+ "",
+ "pm.test(\"Session status is pending\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"pending\");",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "53605764-d4f0-4b32-9a30-67e84dd104d9",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "pm.test(\"All tests are pending tests\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.not.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})",
+ "",
+ "console.log(pm.globals.get(\"available_tests\"))",
+ "const availableTests = JSON.parse(pm.globals.get(\"available_tests\"));",
+ "",
+ "pm.test(\"All available tests are part of the session\", function () {",
+ " for (var api of Object.keys(jsonData.pending_tests)) {",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(availableTests[api]).to.include(test);",
+ " }",
+ " }",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "cf165ae9-1c44-483c-8fe4-bd7cdaa20710",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "6938d456-afd5-431b-a95f-a2c45a3ac479",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Update With Configuration",
+ "item": [
+ {
+ "name": "Update Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "657be5b3-4a99-4415-a1ef-9dfdcf541e46",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "093a0019-a84c-44fc-b890-36369d51b7d5",
+ "exec": [
+ "var automaticTimeout = 120000;",
+ "var manualTimeout = 1000000;",
+ "var specialTimeout = 2000;",
+ "",
+ "pm.globals.set(\"automatic_timeout\", automaticTimeout);",
+ "pm.globals.set(\"manual_timeout\", manualTimeout);",
+ "pm.globals.set(\"special_timeout\", specialTimeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "PUT",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": {{included_tests}},\n \"exclude\": {{excluded_tests}}\n },\n \"types\": [\n \"automatic\",\n \"manual\"\n ],\n \"timeouts\": {\n \"automatic\": {{automatic_timeout}},\n \"manual\": {{manual_timeout}},\n \"{{special_timeout_test}}\": {{special_timeout}}\n }\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "4ac55601-9bdf-41f8-9d59-ff1ee20fa471",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"tests\");",
+ " pm.expect(typeof jsonData.tests).to.equal(\"object\");",
+ " pm.expect(jsonData.tests).to.have.property(\"include\");",
+ " pm.expect(jsonData.tests.include).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData.tests).to.have.property(\"exclude\");",
+ " pm.expect(jsonData.tests.exclude).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"types\");",
+ " pm.expect(jsonData.types).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"user_agent\");",
+ " pm.expect(typeof jsonData.user_agent).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"timeouts\");",
+ " pm.expect(typeof jsonData.timeouts).to.equal(\"object\")",
+ " pm.expect(jsonData.timeouts).to.have.property(\"automatic\");",
+ " pm.expect(typeof jsonData.timeouts.automatic).to.equal(\"number\");",
+ " pm.expect(jsonData.timeouts).to.have.property(\"manual\");",
+ " pm.expect(typeof jsonData.timeouts.manual).to.equal(\"number\");",
+ " pm.expect(jsonData).to.have.property(\"browser\");",
+ " pm.expect(typeof jsonData.browser).to.equal(\"object\");",
+ " pm.expect(jsonData.browser).to.have.property(\"name\");",
+ " pm.expect(typeof jsonData.browser.name).to.equal(\"string\");",
+ " pm.expect(jsonData.browser).to.have.property(\"version\");",
+ " pm.expect(typeof jsonData.browser.version).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"reference_tokens\");",
+ " pm.expect(jsonData.reference_tokens).to.be.an.instanceof(Array);",
+ "});",
+ "",
+ "var includedTests = JSON.parse(pm.globals.get(\"included_tests\"));",
+ "var excludedTests = JSON.parse(pm.globals.get(\"excluded_tests\"));",
+ "var automaticTimeout = pm.globals.get(\"automatic_timeout\");",
+ "var manualTimeout = pm.globals.get(\"manual_timeout\");",
+ "var specialTimeout = pm.globals.get(\"special_timeout\");",
+ "var specialTimeoutTest = pm.globals.get(\"special_timeout_test\");",
+ "",
+ "pm.test(\"Configuration is as specified\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " for (var test of includedTests) {",
+ " pm.expect(jsonData.tests.include).to.include(test);",
+ " }",
+ " for (var test of excludedTests) {",
+ " pm.expect(jsonData.tests.exclude).to.include(test);",
+ " }",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(automaticTimeout);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(manualTimeout);",
+ " pm.expect(jsonData.timeouts[specialTimeoutTest]).to.equal(specialTimeout);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "0b49c01b-e943-4879-b441-4093836d05e9",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "pm.test(\"All tests are pending tests\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.not.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})",
+ "",
+ "const availableTests = pm.globals.get(\"available_tests\");",
+ "const includedTests = pm.globals.get(\"included_tests\");",
+ "const excludedTests = pm.globals.get(\"excluded_tests\");",
+ "",
+ "pm.test(\"Selected subset of tests are part of the session\", function () {",
+ " for (var api of Object.keys(jsonData.pending_tests)) {",
+ " for (var includedTest of includedTests) {",
+ " if (includedTest.split(\"/\").find(part => !!part) === api) {",
+ " var includeRegExp = new RegExp(\"^\" + includedTest, \"i\");",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(test).to.match(regex);",
+ " }",
+ " break;",
+ " }",
+ " }",
+ " for (var excludedTest of excludedTests) {",
+ " if (excludedTest.split(\"/\").find(part => !!part) === api) {",
+ " var excludeRegExp = new RegExp(\"^\" + excludedTest, \"i\");",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(test).to.not.match(regex);",
+ " }",
+ " break;",
+ " }",
+ " }",
+ " }",
+ "});",
+ "",
+ "const sessionTests = jsonData.pending_tests;",
+ "",
+ "pm.globals.set(\"session_tests\", JSON.stringify(sessionTests));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "b9cba19f-88d2-41f6-940a-a0979da05500",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"status\");",
+ " pm.expect(typeof jsonData.status).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ " pm.expect(jsonData).to.have.property(\"date_started\");",
+ " pm.expect(jsonData.date_started).to.satisfy(value => !value || typeof value === \"number\");",
+ " pm.expect(jsonData).to.have.property(\"date_finished\");",
+ " pm.expect(jsonData.date_finished).to.satisfy(value => !value || typeof value === \"number\");",
+ "});",
+ "",
+ "pm.test(\"Session status is pending\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"pending\");",
+ "})",
+ "",
+ "pm.test(\"Start and Finish date not set\", function () {",
+ " pm.expect(jsonData.date_started).to.be.null;",
+ " pm.expect(jsonData.date_finished).to.be.null;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "3669c70a-88bc-4854-abac-5cb80a21c392",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "1a81f5c5-28d8-4935-b8ae-822e33c23da9",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Update Session Labels",
+ "item": [
+ {
+ "name": "Create Session \\w Configuration Copy",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9de9321c-ae78-4abd-a740-6634e2cbb9b9",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "739284e7-17ac-4c34-baa1-933353eb7ecc",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"labels\": [\"label1\", \"label2\"]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "5b15f338-c773-4f48-8f78-1d7c926beb81",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"tests\");",
+ " pm.expect(typeof jsonData.tests).to.equal(\"object\");",
+ " pm.expect(jsonData.tests).to.have.property(\"include\");",
+ " pm.expect(jsonData.tests.include).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData.tests).to.have.property(\"exclude\");",
+ " pm.expect(jsonData.tests.exclude).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"types\");",
+ " pm.expect(jsonData.types).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"user_agent\");",
+ " pm.expect(typeof jsonData.user_agent).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"timeouts\");",
+ " pm.expect(typeof jsonData.timeouts).to.equal(\"object\")",
+ " pm.expect(jsonData.timeouts).to.have.property(\"automatic\");",
+ " pm.expect(typeof jsonData.timeouts.automatic).to.equal(\"number\");",
+ " pm.expect(jsonData.timeouts).to.have.property(\"manual\");",
+ " pm.expect(typeof jsonData.timeouts.manual).to.equal(\"number\");",
+ " pm.expect(jsonData).to.have.property(\"browser\");",
+ " pm.expect(typeof jsonData.browser).to.equal(\"object\");",
+ " pm.expect(jsonData.browser).to.have.property(\"name\");",
+ " pm.expect(typeof jsonData.browser.name).to.equal(\"string\");",
+ " pm.expect(jsonData.browser).to.have.property(\"version\");",
+ " pm.expect(typeof jsonData.browser.version).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"reference_tokens\");",
+ " pm.expect(jsonData.reference_tokens).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"labels\");",
+ " pm.expect(jsonData.labels).to.be.an.instanceof(Array);",
+ "});",
+ "",
+ "pm.test(\"Configuration is default\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " pm.expect(jsonData.tests.include).to.include(\"/\");",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(60000);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(300000);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ " pm.expect(jsonData.labels).to.include(\"label1\");",
+ " pm.expect(jsonData.labels).to.include(\"label2\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Update Labels Copy",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e69b7188-b3c9-4f66-8c6c-3f3348f84f6a",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "PUT",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n\t\"labels\": [\"new\", \"labels\"]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/labels",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "labels"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "86e20c4d-5797-4b6e-a8cc-14e284e7c491",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"tests\");",
+ " pm.expect(typeof jsonData.tests).to.equal(\"object\");",
+ " pm.expect(jsonData.tests).to.have.property(\"include\");",
+ " pm.expect(jsonData.tests.include).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData.tests).to.have.property(\"exclude\");",
+ " pm.expect(jsonData.tests.exclude).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"types\");",
+ " pm.expect(jsonData.types).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"user_agent\");",
+ " pm.expect(typeof jsonData.user_agent).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"timeouts\");",
+ " pm.expect(typeof jsonData.timeouts).to.equal(\"object\")",
+ " pm.expect(jsonData.timeouts).to.have.property(\"automatic\");",
+ " pm.expect(typeof jsonData.timeouts.automatic).to.equal(\"number\");",
+ " pm.expect(jsonData.timeouts).to.have.property(\"manual\");",
+ " pm.expect(typeof jsonData.timeouts.manual).to.equal(\"number\");",
+ " pm.expect(jsonData).to.have.property(\"browser\");",
+ " pm.expect(typeof jsonData.browser).to.equal(\"object\");",
+ " pm.expect(jsonData.browser).to.have.property(\"name\");",
+ " pm.expect(typeof jsonData.browser.name).to.equal(\"string\");",
+ " pm.expect(jsonData.browser).to.have.property(\"version\");",
+ " pm.expect(typeof jsonData.browser.version).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"reference_tokens\");",
+ " pm.expect(jsonData.reference_tokens).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"labels\");",
+ " pm.expect(jsonData.labels).to.be.an.instanceof(Array);",
+ "});",
+ "",
+ "pm.test(\"Configuration is default\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " pm.expect(jsonData.tests.include).to.include(\"/\");",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(60000);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(300000);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ " pm.expect(jsonData.labels).to.include(\"new\");",
+ " pm.expect(jsonData.labels).to.include(\"labels\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Delete Session",
+ "item": [
+ {
+ "name": "Setup",
+ "item": [
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e85fd539-4598-47a9-8768-656656f29fec",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Delete Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8e9fe4af-0d50-49eb-8a4a-00e13021b4a6",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9418112a-03f6-4641-893e-076c8922c0af",
+ "exec": [
+ "pm.test(\"Status code is 404\", function () {",
+ " pm.response.to.have.status(404);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Create and Read Results",
+ "item": [
+ {
+ "name": "Create Session",
+ "item": [
+ {
+ "name": "Create Session Two Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a6995ff1-d626-4f4f-a1e0-567f3429bf52",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "85c7ad8d-5fc9-403d-8715-c65d5ff1323e",
+ "exec": [
+ "const availableTests = JSON.parse(pm.globals.get(\"available_tests\"));",
+ "const keys = Object.keys(availableTests).sort();",
+ "const test1 = availableTests[keys[0]][0];",
+ "const test2 = availableTests[keys[1]][0];",
+ "",
+ "console.log(test1, test2)",
+ "",
+ "pm.globals.set(\"single_test_1\", test1);",
+ "pm.globals.set(\"single_test_2\", test2);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"{{single_test_1}}\", \"{{single_test_2}}\"]\n }\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "35cf4745-0301-4c9c-b6b3-40945299533f",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "6dc082ef-80ff-407a-a562-e25e37476eee",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Responds with no results\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(typeof jsonData).to.equal(\"object\");",
+ " pm.expect(Object.keys(jsonData)).to.be.empty;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ],
+ "query": [
+ {
+ "key": "path",
+ "value": "/2dcontext/drawing-images-to-the-canvas",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements/2d.missingargs.html",
+ "disabled": true
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results Compact",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "fcade663-dbd8-42c9-8344-53ae1feb6fcb",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var singleTest1 = pm.globals.get(\"single_test_1\");",
+ "var singleTest2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "var api1 = singleTest1.split(\"/\").find(part => !!part);",
+ "var api2 = singleTest2.split(\"/\").find(part => !!part);",
+ "",
+ "pm.test(\"Responds with no results\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(typeof jsonData).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(api1);",
+ " pm.expect(jsonData).to.have.property(api2);",
+ " pm.expect(jsonData[api1].complete).to.equal(0);",
+ " pm.expect(jsonData[api2].complete).to.equal(0);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/compact",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "compact"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Last Completed Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "36d70d08-3caa-4312-9bbd-aed15fd238dd",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ " ",
+ "pm.test(\"JSON format is as expected\", function () {",
+ " pm.expect(Object.keys(jsonData)).to.have.lengthOf(3);",
+ " pm.expect(jsonData).to.have.property(\"pass\");",
+ " pm.expect(jsonData).to.have.property(\"fail\");",
+ " pm.expect(jsonData).to.have.property(\"timeout\");",
+ " for (var key of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[key]).to.be.an.instanceof(Array);",
+ " }",
+ "});",
+ "",
+ "pm.test(\"Responds with no last completed tests\", function () {",
+ " for (var key of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[key]).to.be.empty;",
+ " }",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/last_completed",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "last_completed"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "211adcf7-d71a-4b0f-94ab-d285fc8367df",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "const sessionTests = jsonData.pending_tests;",
+ "",
+ "pm.globals.set(\"session_tests\", JSON.stringify(sessionTests));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Create First Result",
+ "item": [
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "5410c9c2-ebbe-4bc1-ac12-e2602d8ade57",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(Object.keys(response)).to.have.lengthOf(1);",
+ " pm.expect(response).to.have.property(\"next_test\");",
+ " pm.expect(typeof response.next_test).to.equal(\"string\");",
+ "});",
+ "",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "",
+ "const parameters = nextTest.split(\"?\")[1].split(\"&\");",
+ "let test = parameters.find(parameter => parameter.split(\"=\")[0] === \"test_url\").split(\"=\")[1];",
+ "test = decodeURIComponent(test);",
+ "test = \"/\" + test.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "pm.globals.set(\"current_test\", test);",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "",
+ "pm.test(\"Returned test is first of two specified tests\", function () {",
+ " console.log(test);",
+ " console.log(test1);",
+ " console.log(pm.globals.get(\"single_test_2\"))",
+ " pm.expect(test).to.equal(test1);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e4f9a03f-c731-4192-a9a9-aa9e315c3c44",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"One test is pending, one test is running\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.have.lengthOf(1);",
+ " var api = Object.keys(jsonData.pending_tests)[0];",
+ " pm.expect(jsonData.pending_tests[api]).to.have.lengthOf(1);",
+ " pm.expect(jsonData.pending_tests[api]).to.include(test2);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(1);",
+ " api = Object.keys(jsonData.running_tests)[0];",
+ " pm.expect(jsonData.running_tests[api]).to.have.lengthOf(1);",
+ " pm.expect(jsonData.running_tests[api]).to.include(test1);",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9f684f77-4ed0-4cd2-99a4-b1c134432e9f",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"FAIL\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "33601dd0-6786-4c8a-a6b5-7f1386129de7",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"One test is pending, one test is completed\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.have.lengthOf(1);",
+ " var api = Object.keys(jsonData.pending_tests)[0];",
+ " pm.expect(jsonData.pending_tests[api]).to.have.lengthOf(1);",
+ " pm.expect(jsonData.pending_tests[api]).to.include(test2);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "97be92f1-a8d1-41ea-b41d-0ca08113e6e1",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"status\");",
+ " pm.expect(typeof jsonData.status).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ " pm.expect(jsonData).to.have.property(\"date_started\");",
+ " pm.expect(jsonData).to.have.property(\"date_finished\");",
+ "});",
+ "",
+ "pm.test(\"Session status is running\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"running\");",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Last Completed Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "24ec2b79-c266-473f-8579-7b694079d640",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ " ",
+ "pm.test(\"JSON format is as expected\", function () {",
+ " pm.expect(Object.keys(jsonData)).to.have.lengthOf(3);",
+ " pm.expect(jsonData).to.have.property(\"pass\");",
+ " pm.expect(jsonData).to.have.property(\"fail\");",
+ " pm.expect(jsonData).to.have.property(\"timeout\");",
+ " for (var key of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[key]).to.be.an.instanceof(Array);",
+ " }",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "",
+ "pm.test(\"Responds with one last completed tests as failed\", function () {",
+ " pm.expect(jsonData[\"pass\"]).to.be.empty;",
+ " pm.expect(jsonData[\"fail\"]).to.have.lengthOf(1);",
+ " pm.expect(jsonData[\"fail\"][0]).to.equal(test1);",
+ " pm.expect(jsonData[\"timeout\"]).to.be.empty;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/last_completed",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "last_completed"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "1f47b39f-727f-43b4-934d-c7f62b268baa",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON format is as expected\", function () {",
+ " for (var api of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[api]).to.be.an.instanceof(Array);",
+ " for (var result of jsonData[api]) {",
+ " pm.expect(typeof result).to.equal(\"object\");",
+ " pm.expect(Object.keys(result)).to.have.lengthOf(4);",
+ " pm.expect(result).to.have.property(\"test\");",
+ " pm.expect(typeof result.test).to.equal(\"string\");",
+ " pm.expect(result).to.have.property(\"status\");",
+ " pm.expect(typeof result.status).to.equal(\"string\");",
+ " pm.expect(result).to.have.property(\"message\");",
+ " pm.expect(result.message).to.satisfy(message => !message || typeof message === \"string\");",
+ " pm.expect(result).to.have.property(\"subtests\");",
+ " pm.expect(result.subtests).to.be.an.instanceof(Array);",
+ " for (var subtest of result.subtests) {",
+ " pm.expect(typeof subtest).to.equal(\"object\");",
+ " pm.expect(Object.keys(subtest)).to.have.lengthOf(3);",
+ " pm.expect(subtest).to.have.property(\"name\");",
+ " pm.expect(typeof subtest.name).to.equal(\"string\");",
+ " pm.expect(subtest).to.have.property(\"status\");",
+ " pm.expect(typeof subtest.status).to.equal(\"string\");",
+ " pm.expect(subtest).to.have.property(\"message\");",
+ " pm.expect(subtest.message).to.satisfy(message => !message || typeof message === \"string\");",
+ " }",
+ " }",
+ " }",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "",
+ "pm.test(\"Test is first test, successful run and failed\", function () {",
+ " var api = Object.keys(jsonData)[0];",
+ " pm.expect(api).to.equal(test1.split(\"/\").find(part => !!part))",
+ " var result = jsonData[api][0];",
+ " pm.expect(result.test).to.equal(test1);",
+ " pm.expect(result.status).to.equal(\"OK\");",
+ " pm.expect(result.message).to.be.null;",
+ " var subtest = result.subtests[0];",
+ " pm.expect(subtest.status).to.equal(\"FAIL\");",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ],
+ "query": [
+ {
+ "key": "path",
+ "value": "/2dcontext/drawing-images-to-the-canvas",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements/2d.missingargs.html",
+ "disabled": true
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results Compact",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e734f074-a0c2-4e54-b427-2827fa732843",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "var test = pm.globals.get(\"single_test_1\");",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(typeof jsonData).to.equal(\"object\");",
+ " for (var api of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[api]).to.have.property(\"pass\");",
+ " pm.expect(typeof jsonData[api].pass).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"fail\");",
+ " pm.expect(typeof jsonData[api].fail).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"timeout\");",
+ " pm.expect(typeof jsonData[api].timeout).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"not_run\");",
+ " pm.expect(typeof jsonData[api].not_run).to.equal(\"number\");",
+ " }",
+ "})",
+ "",
+ "pm.test(\"Responds with one test failed\", function () {",
+ " var api = test.split(\"/\").find(part => !!part);",
+ " pm.expect(jsonData[api].pass).to.equal(0);",
+ " pm.expect(jsonData[api].fail).to.equal(1);",
+ " pm.expect(jsonData[api].timeout).to.equal(0);",
+ " pm.expect(jsonData[api].not_run).to.equal(0);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/compact",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "compact"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Create Last Result",
+ "item": [
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "fd2c11ee-5eca-43f3-89a7-133602c8034a",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(Object.keys(response)).to.have.lengthOf(1);",
+ " pm.expect(response).to.have.property(\"next_test\");",
+ " pm.expect(typeof response.next_test).to.equal(\"string\");",
+ "});",
+ "",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "",
+ "const parameters = nextTest.split(\"?\")[1].split(\"&\");",
+ "let test = parameters.find(parameter => parameter.split(\"=\")[0] === \"test_url\").split(\"=\")[1];",
+ "test = decodeURIComponent(test);",
+ "test = \"/\" + test.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "pm.globals.set(\"current_test\", test);",
+ "",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"Returned test is second of two specified tests\", function () {",
+ " pm.expect(test).to.equal(test2);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "112ee3d5-d4e0-4838-879e-f74c38d0218c",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"One test is running\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(1);",
+ " var api = Object.keys(jsonData.running_tests)[0];",
+ " pm.expect(jsonData.running_tests[api]).to.have.lengthOf(1);",
+ " pm.expect(jsonData.running_tests[api]).to.include(test2);",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "79a84ed9-bb07-493d-ab34-8e7693b3ebbd",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"PASS\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "c1829043-18dd-4cc1-8091-41981349f4e3",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"status\");",
+ " pm.expect(typeof jsonData.status).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ " pm.expect(jsonData).to.have.property(\"date_started\");",
+ " pm.expect(jsonData).to.have.property(\"date_finished\");",
+ "});",
+ "",
+ "pm.test(\"Session status is completed\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"completed\");",
+ "})",
+ "",
+ "pm.test(\"Finish date is set\", function () {",
+ " pm.expect(Date.parse(jsonData.date_finished)).to.be.below(Date.now());",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "59fee9df-9945-4fdf-a102-9c1b92d915c1",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "var test1Api = test1.split(\"/\").find(part => !!part);",
+ "var test2Api = test1.split(\"/\").find(part => !!part);",
+ "",
+ "pm.test(\"One test is pending, one test is completed\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Last Completed Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "91fa38ac-3b06-488a-892e-59458e88a4da",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ " ",
+ "pm.test(\"JSON format is as expected\", function () {",
+ " pm.expect(Object.keys(jsonData)).to.have.lengthOf(3);",
+ " pm.expect(jsonData).to.have.property(\"pass\");",
+ " pm.expect(jsonData).to.have.property(\"fail\");",
+ " pm.expect(jsonData).to.have.property(\"timeout\");",
+ " for (var key of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[key]).to.be.an.instanceof(Array);",
+ " }",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"Responds with one last completed tests as failed and one last completed test as passed\", function () {",
+ " pm.expect(jsonData[\"pass\"]).to.have.lengthOf(1);",
+ " pm.expect(jsonData[\"pass\"][0]).to.equal(test2);",
+ " pm.expect(jsonData[\"fail\"]).to.have.lengthOf(1);",
+ " pm.expect(jsonData[\"fail\"][0]).to.equal(test1);",
+ " pm.expect(jsonData[\"timeout\"]).to.be.empty;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/last_completed",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "last_completed"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "dca0b42e-9054-4354-87e5-e8d236216050",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON format is as expected\", function () {",
+ " for (var api of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[api]).to.be.an.instanceof(Array);",
+ " for (var result of jsonData[api]) {",
+ " pm.expect(typeof result).to.equal(\"object\");",
+ " pm.expect(Object.keys(result)).to.have.lengthOf(4);",
+ " pm.expect(result).to.have.property(\"test\");",
+ " pm.expect(typeof result.test).to.equal(\"string\");",
+ " pm.expect(result).to.have.property(\"status\");",
+ " pm.expect(typeof result.status).to.equal(\"string\");",
+ " pm.expect(result).to.have.property(\"message\");",
+ " pm.expect(result.message).to.satisfy(message => !message || typeof message === \"string\");",
+ " pm.expect(result).to.have.property(\"subtests\");",
+ " pm.expect(result.subtests).to.be.an.instanceof(Array);",
+ " for (var subtest of result.subtests) {",
+ " pm.expect(typeof subtest).to.equal(\"object\");",
+ " pm.expect(Object.keys(subtest)).to.have.lengthOf(3);",
+ " pm.expect(subtest).to.have.property(\"name\");",
+ " pm.expect(typeof subtest.name).to.equal(\"string\");",
+ " pm.expect(subtest).to.have.property(\"status\");",
+ " pm.expect(typeof subtest.status).to.equal(\"string\");",
+ " pm.expect(subtest).to.have.property(\"message\");",
+ " pm.expect(subtest.message).to.satisfy(message => !message || typeof message === \"string\");",
+ " }",
+ " }",
+ " }",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"Test is first and second test, successful run and failed, and successful run and passed\", function () {",
+ " var api = Object.keys(jsonData)[0];",
+ " for (var result of jsonData[api]) {",
+ " if (result.test === test1) {",
+ " pm.expect(result.test).to.equal(test1); ",
+ " pm.expect(result.status).to.equal(\"OK\");",
+ " pm.expect(result.message).to.be.null;",
+ " var subtest = result.subtests[0];",
+ " pm.expect(subtest.status).to.equal(\"FAIL\");",
+ " } else {",
+ " pm.expect(result.test).to.equal(test2); ",
+ " pm.expect(result.status).to.equal(\"OK\");",
+ " pm.expect(result.message).to.be.null;",
+ " subtest = result.subtests[0];",
+ " pm.expect(subtest.status).to.equal(\"PASS\");",
+ " }",
+ " }",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ],
+ "query": [
+ {
+ "key": "path",
+ "value": "/2dcontext/drawing-images-to-the-canvas",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements/2d.missingargs.html",
+ "disabled": true
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results Compact",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "14ec9254-5a40-4c86-9a7f-72f43a35a79b",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(typeof jsonData).to.equal(\"object\");",
+ " for (var api of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[api]).to.have.property(\"pass\");",
+ " pm.expect(typeof jsonData[api].pass).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"fail\");",
+ " pm.expect(typeof jsonData[api].fail).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"timeout\");",
+ " pm.expect(typeof jsonData[api].timeout).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"not_run\");",
+ " pm.expect(typeof jsonData[api].not_run).to.equal(\"number\");",
+ " }",
+ "})",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "var test1Api = test1.split(\"/\").find(part => !!part);",
+ "var test2Api = test1.split(\"/\").find(part => !!part);",
+ "",
+ "pm.test(\"Responds with one test failed\", function () {",
+ " pm.expect(Object.keys(jsonData)).to.have.lengthOf(2);",
+ " var api = Object.keys(jsonData)[0];",
+ " if (api === test1Api) {",
+ " pm.expect(jsonData[api].pass).to.equal(0);",
+ " pm.expect(jsonData[api].fail).to.equal(1);",
+ " } else {",
+ " pm.expect(jsonData[api].pass).to.equal(1);",
+ " pm.expect(jsonData[api].fail).to.equal(0);",
+ " }",
+ " pm.expect(jsonData[api].timeout).to.equal(0);",
+ " pm.expect(jsonData[api].not_run).to.equal(0);",
+ " api = Object.keys(jsonData)[1];",
+ " if (api === test1Api) {",
+ " pm.expect(jsonData[api].pass).to.equal(0);",
+ " pm.expect(jsonData[api].fail).to.equal(1);",
+ " } else {",
+ " pm.expect(jsonData[api].pass).to.equal(1);",
+ " pm.expect(jsonData[api].fail).to.equal(0);",
+ " }",
+ " pm.expect(jsonData[api].timeout).to.equal(0);",
+ " pm.expect(jsonData[api].not_run).to.equal(0);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/compact",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "compact"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "132574dc-3688-47e3-8f50-3235f18806f7",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(Object.keys(response)).to.have.lengthOf(1);",
+ " pm.expect(response).to.have.property(\"next_test\");",
+ " pm.expect(typeof response.next_test).to.equal(\"string\");",
+ "});",
+ "",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "pm.globals.set(\"current_test\", test);",
+ "",
+ "const web_root = pm.environment.get(\"web_root\");",
+ "",
+ "pm.test(\"Returned test finish page\", function () {",
+ " pm.expect(test).to.equal(\"/\" + web_root + \"/finish.html\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Clean Up",
+ "item": [
+ {
+ "name": "Delete Session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Viewing and Downloading Reports",
+ "item": [
+ {
+ "name": "Create Sessions",
+ "item": [
+ {
+ "name": "First Session",
+ "item": [
+ {
+ "name": "Create Session One Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "2f233dcf-d934-4479-8908-b1349e6ea54d",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "2930a821-2c6e-47b9-89d9-90ebe1cff52a",
+ "exec": [
+ "const availableTests = JSON.parse(pm.globals.get(\"available_tests\"));",
+ "const keys = Object.keys(availableTests).sort();",
+ "const test1 = availableTests[keys[0]][0];",
+ "const test2 = availableTests[keys[1]][0];",
+ "const apiName = test1.split(\"/\").find(part => !!part);",
+ "",
+ "pm.globals.set(\"single_test_1\", test1);",
+ "pm.globals.set(\"single_test_2\", test2);",
+ "pm.globals.set(\"api_name\", apiName);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"{{single_test_1}}\"]\n }\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "12f3616b-707e-4a71-8db8-89fc195c1fcd",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f35f1509-383f-49af-a30e-41e1df4efa04",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "",
+ "const parameters = nextTest.split(\"?\")[1].split(\"&\");",
+ "let test = parameters.find(parameter => parameter.split(\"=\")[0] === \"test_url\").split(\"=\")[1];",
+ "test = decodeURIComponent(test);",
+ "test = \"/\" + test.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "pm.globals.set(\"current_test\", test);",
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "b015f161-e390-4224-9f26-92395f44c772",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"FAIL\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Second Session",
+ "item": [
+ {
+ "name": "Create Session One Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "da3d0c17-6b2b-4d61-ad01-dba736425388",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token_comp\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "7da81e81-d344-4459-b700-19b93d49d3c9",
+ "exec": [
+ "const availableTests = JSON.parse(pm.globals.get(\"available_tests\"));",
+ "const keys = Object.keys(availableTests).sort();",
+ "const test1 = availableTests[keys[0]][0];",
+ "",
+ "pm.globals.set(\"single_test_1\", test1);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"{{single_test_1}}\"]\n }\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "3296c7ba-67a6-405d-a78a-51dd64432301",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token_comp}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token_comp}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "72deb55c-4583-4741-9cf8-97713762b894",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "",
+ "const parameters = nextTest.split(\"?\")[1].split(\"&\");",
+ "let test = parameters.find(parameter => parameter.split(\"=\")[0] === \"test_url\").split(\"=\")[1];",
+ "test = decodeURIComponent(test);",
+ "test = \"/\" + test.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "pm.globals.set(\"current_test\", test);",
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token_comp}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token_comp}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "d695f300-a773-4654-b346-95c536cad19e",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"FAIL\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token_comp}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token_comp}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Download reports",
+ "item": [
+ {
+ "name": "Download Results Overview",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "6838965f-7a27-48d5-8c27-f55d4756588f",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/overview",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "overview"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Download All Apis Json",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "bcc2c8c4-94c0-4227-a627-3e0d5be2e7b8",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/json",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "json"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Download WPT Multi Report Url",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "7b370723-0506-4b5e-941a-f31a628a29f6",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Uri returned\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(typeof jsonData.uri).to.equal(\"string\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{api_name}}/reporturl?tokens={{session_token}},{{session_token_comp}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{api_name}}",
+ "reporturl"
+ ],
+ "query": [
+ {
+ "key": "tokens",
+ "value": "{{session_token}},{{session_token_comp}}"
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Download Results Api Json",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "b5243979-b63a-4bbf-bd51-1529da1e1f6d",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "81f3da29-8a0c-4403-929d-86590b01ef2f",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/{{api_name}}/json",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "{{api_name}}",
+ "json"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Download WPT Report Copy",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "0168ea3f-9bf7-4ea7-a3ad-745c2704d97e",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Uri returned\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(typeof jsonData.uri).to.equal(\"string\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "e4efdac9-2d00-4853-b145-18a6d44ff139",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/{{api_name}}/reporturl",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "{{api_name}}",
+ "reporturl"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Malfunctioning List",
+ "item": [
+ {
+ "name": "Create Session \\w Configuration Copy",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e685a989-8ef7-4e8f-a3c7-b579c66430f9",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "8517360b-2e66-4ca9-9339-017622c39888",
+ "exec": [
+ "var automaticTimeout = 120000;",
+ "var manualTimeout = 1000000;",
+ "var specialTimeout = 2000;",
+ "",
+ "pm.globals.set(\"automatic_timeout\", automaticTimeout);",
+ "pm.globals.set(\"manual_timeout\", manualTimeout);",
+ "pm.globals.set(\"special_timeout\", specialTimeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": {{included_tests}},\n \"exclude\": {{excluded_tests}}\n },\n \"types\": [\n \"automatic\"\n ],\n \"timeouts\": {\n \"automatic\": {{automatic_timeout}},\n \"manual\": {{manual_timeout}},\n \"{{special_timeout_test}}\": {{special_timeout}}\n },\n \"labels\": [\"label1\", \"label2\"]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Malfunctioning Empty",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "4e85ecaf-4364-4681-ab8a-221d40681c44",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"Return empty array\", function() {",
+ " pm.expect(jsonData).to.be.an.instanceof(Array)",
+ " pm.expect(jsonData).to.have.length(0)",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/malfunctioning",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "malfunctioning"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Update Session Malfunctioning Insert Two",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a6221560-9eba-4fb4-a7cb-a1570a37425d",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "PUT",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "[\n\t\"/test/file/one.html\",\n\t\"/test/file/two.html\"\n]",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/malfunctioning",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "malfunctioning"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Malfunctioning Two Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "77d14567-4600-44fd-98e4-99e113da6781",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"Return array with two tests\", function() {",
+ " pm.expect(jsonData).to.be.an.instanceof(Array)",
+ " pm.expect(jsonData).to.have.length(2)",
+ " pm.expect(jsonData).to.include(\"/test/file/one.html\")",
+ " pm.expect(jsonData).to.include(\"/test/file/two.html\")",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/malfunctioning",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "malfunctioning"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Update Session Malfunctioning Empty Array",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "55b486da-c5ae-49d4-b11a-247387267c9a",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "PUT",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "[]",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/malfunctioning",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "malfunctioning"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Malfunctioning Empty",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "953002ec-d8df-477a-a0dc-f4e4a2efd031",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"Return empty array\", function() {",
+ " pm.expect(jsonData).to.be.an.instanceof(Array)",
+ " pm.expect(jsonData).to.have.length(0)",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/malfunctioning",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "malfunctioning"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Sessions API",
+ "item": [
+ {
+ "name": "create session",
+ "item": [
+ {
+ "name": "With Defaults",
+ "item": [
+ {
+ "name": "Prep: Read Available Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "fc15d329-d132-4abf-90e4-f549eee99b60",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var availableTests = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(typeof availableTests).to.equal(\"object\");",
+ " for (var api of Object.keys(availableTests)) {",
+ " pm.expect(availableTests[api]).to.be.an.instanceof(Array);",
+ " var apiRegExp = new RegExp(\"^/\" + api, \"i\");",
+ " for (var test of availableTests[api]) {",
+ " pm.expect(test).to.match(apiRegExp);",
+ " }",
+ " }",
+ "});",
+ "",
+ "var includedTests = [];",
+ "var excludedTests = [];",
+ "var specialTimeoutTest = \"\";",
+ "",
+ "var apis = Object.keys(availableTests);",
+ "for(var api of apis) {",
+ " if (availableTests[api].length > 50) {",
+ " var subDirs = availableTests[api].map(test => test.split(\"/\").filter(part => !!part).join(\"/\").split(\"/\")[1]).reduce((acc, curr) => acc.indexOf(curr) === -1 ? acc.concat([curr]) : acc, []);",
+ " if (subDirs.length > 2) {",
+ " includedTests.push(\"/\" + api);",
+ " excludedTests.push(\"/\" + api + \"/\" + subDirs[0]);",
+ " specialTimeoutTest = availableTests[api][availableTests[api].length - 1];",
+ " break;",
+ " }",
+ " ",
+ " }",
+ "}",
+ "",
+ "pm.globals.set(\"available_tests\", availableTests);",
+ "pm.globals.set(\"included_tests\", JSON.stringify(includedTests));",
+ "pm.globals.set(\"excluded_tests\", JSON.stringify(excludedTests));",
+ "pm.globals.set(\"special_timeout_test\", specialTimeoutTest.replace(\".\", \"\"));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "ebb192ce-48e5-4aac-b99d-68894378eac8",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "159976cd-de00-4f88-8f9f-47db13ca4a30",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Configuration is default\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " pm.expect(jsonData.tests.include).to.include(\"/\");",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(60000);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(300000);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ " pm.expect(jsonData.labels).to.be.empty;",
+ " pm.expect(new Date(jsonData.date_created).getTime()).to.be.below(Date.now());",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "5058b07b-5e55-44e0-ad88-5c7b5884fa3c",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"Session status is pending\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"pending\");",
+ "})",
+ "",
+ "pm.test(\"Start, Finish and Expiration date not set\", function () {",
+ " pm.expect(jsonData.date_started).to.be.null;",
+ " pm.expect(jsonData.date_finished).to.be.null;",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "62a8c853-bf0b-47b0-9cee-7611d2b48cde",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "pm.test(\"All tests are pending tests\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.not.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})",
+ "",
+ "const availableTests = pm.globals.get(\"available_tests\");",
+ "",
+ "pm.test(\"All available tests are part of the session\", function () {",
+ " for (var api of Object.keys(jsonData.pending_tests)) {",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(availableTests[api]).to.include(test);",
+ " }",
+ " }",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "878420c3-575a-4194-9a72-cebe07823674",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "6672030f-5a8a-4bea-9792-c24b980392e9",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "With Configuration",
+ "item": [
+ {
+ "name": "Prep: Read Available Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "504b76ca-6870-443c-8c33-0cccc52cddae",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var availableTests = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(typeof availableTests).to.equal(\"object\");",
+ " for (var api of Object.keys(availableTests)) {",
+ " pm.expect(availableTests[api]).to.be.an.instanceof(Array);",
+ " var apiRegExp = new RegExp(\"^/\" + api, \"i\");",
+ " for (var test of availableTests[api]) {",
+ " pm.expect(test).to.match(apiRegExp);",
+ " }",
+ " }",
+ "});",
+ "",
+ "var includedTests = [];",
+ "var excludedTests = [];",
+ "var specialTimeoutTest = \"\";",
+ "",
+ "var apis = Object.keys(availableTests).sort();",
+ "for(var api of apis) {",
+ " if (availableTests[api].length > 50) {",
+ " var subDirs = availableTests[api].map(test => test.split(\"/\").filter(part => !!part).join(\"/\").split(\"/\")[1]).reduce((acc, curr) => acc.indexOf(curr) === -1 ? acc.concat([curr]) : acc, []);",
+ " if (subDirs.length > 2) {",
+ " includedTests.push(\"/\" + api);",
+ " excludedTests.push(\"/\" + api + \"/\" + subDirs[0]);",
+ " specialTimeoutTest = availableTests[api][availableTests[api].length - 1];",
+ " break;",
+ " }",
+ " ",
+ " }",
+ "}",
+ "",
+ "pm.globals.set(\"available_tests\", availableTests);",
+ "pm.globals.set(\"included_tests\", JSON.stringify(includedTests));",
+ "pm.globals.set(\"excluded_tests\", JSON.stringify(excludedTests));",
+ "pm.globals.set(\"special_timeout_test\", specialTimeoutTest.replace(\".\", \"\"));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session \\w Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "fa956871-503a-421c-9822-4e2a11e1cf1c",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "213bdf59-9f98-4b5e-bacc-ae3cb31ae26a",
+ "exec": [
+ "var automaticTimeout = 120000;",
+ "var manualTimeout = 1000000;",
+ "var specialTimeout = 2000;",
+ "",
+ "pm.globals.set(\"automatic_timeout\", automaticTimeout);",
+ "pm.globals.set(\"manual_timeout\", manualTimeout);",
+ "pm.globals.set(\"special_timeout\", specialTimeout);",
+ "",
+ "const availableTests = pm.globals.get(\"available_tests\");",
+ "const apiNames = Object.keys(availableTests).sort();",
+ "const apiName = apiNames[0];",
+ "",
+ "pm.globals.set(\"api_name\", apiName);",
+ "pm.globals.set(\"special_timeout_test\", \"/\" + apiName);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"/{{api_name}}\"]\n },\n \"timeouts\": {\n \"automatic\": {{automatic_timeout}},\n \"manual\": {{manual_timeout}},\n \"{{special_timeout_test}}\": {{special_timeout}}\n },\n \"labels\": [\"label1\", \"label2\"]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a0434a56-04b8-4987-8f44-2d71b440ef03",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "var includedTests = JSON.parse(pm.globals.get(\"included_tests\"));",
+ "var excludedTests = JSON.parse(pm.globals.get(\"excluded_tests\"));",
+ "var automaticTimeout = pm.globals.get(\"automatic_timeout\");",
+ "var manualTimeout = pm.globals.get(\"manual_timeout\");",
+ "var specialTimeout = pm.globals.get(\"special_timeout\");",
+ "var specialTimeoutTest = pm.globals.get(\"special_timeout_test\");",
+ "",
+ "pm.test(\"Configuration is as specified\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " for (var test of includedTests) {",
+ " pm.expect(jsonData.tests.include).to.include(test);",
+ " }",
+ " for (var test of excludedTests) {",
+ " pm.expect(jsonData.tests.exclude).to.include(test);",
+ " }",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(automaticTimeout);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(manualTimeout);",
+ " pm.expect(jsonData.timeouts[specialTimeoutTest]).to.equal(specialTimeout);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ " pm.expect(jsonData.labels).to.include(\"label1\");",
+ " pm.expect(jsonData.labels).to.include(\"label2\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "2a7524e5-7394-4177-8267-42d5b32bc474",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"Session status is pending\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"pending\");",
+ "})",
+ "",
+ "pm.test(\"Start and Finish date not set\", function () {",
+ " pm.expect(jsonData.date_started).to.be.null;",
+ " pm.expect(jsonData.date_finished).to.be.null;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "448b6a0c-135d-42ef-8bc0-b4b1ce4ab8a0",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"All tests are pending tests\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.not.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})",
+ "",
+ "const availableTests = pm.globals.get(\"available_tests\");",
+ "const includedTests = pm.globals.get(\"included_tests\");",
+ "const excludedTests = pm.globals.get(\"excluded_tests\");",
+ "",
+ "pm.test(\"Selected subset of tests are part of the session\", function () {",
+ " for (var api of Object.keys(jsonData.pending_tests)) {",
+ " for (var includedTest of includedTests) {",
+ " if (includedTest.split(\"/\").find(part => !!part) === api) {",
+ " var includeRegExp = new RegExp(\"^\" + includedTest, \"i\");",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(test).to.match(regex);",
+ " }",
+ " break;",
+ " }",
+ " }",
+ " for (var excludedTest of excludedTests) {",
+ " if (excludedTest.split(\"/\").find(part => !!part) === api) {",
+ " var excludeRegExp = new RegExp(\"^\" + excludedTest, \"i\");",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(test).to.not.match(regex);",
+ " }",
+ " break;",
+ " }",
+ " }",
+ " }",
+ "});",
+ "",
+ "const sessionTests = jsonData.pending_tests;",
+ "",
+ "pm.globals.set(\"session_tests\", JSON.stringify(sessionTests));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "a16eac8e-9609-4314-abd8-a5c470537b52",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "5df55957-0bbb-4db8-9ab9-d8a70ce37bc5",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "With Expiration",
+ "item": [
+ {
+ "name": "Create Session With Expiration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "6c4aa702-ae3a-44a4-b886-b3e883de4d32",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "893672a9-c666-4790-b6a7-e0ce7afda482",
+ "exec": [
+ "var expirationDate = Date.now() + 3000;",
+ "pm.globals.set(\"expiration_date\", expirationDate);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n\t\"expiration_date\": {{expiration_date}}\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Expired Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f6b31d37-a98b-4110-80a2-f78db6f2e26f",
+ "exec": [
+ "pm.test(\"Status code is 404\", function () {",
+ " pm.response.to.have.status(404);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "d41cf31b-c8e8-455f-aa69-527af5fbd66b",
+ "exec": [
+ "var expirationDate = pm.globals.get(\"expiration_date\");",
+ "",
+ "var timeout = expirationDate - Date.now() + 1000",
+ "",
+ "console.log(timeout)",
+ "",
+ "setTimeout(function () {}, timeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "read session",
+ "item": [
+ {
+ "name": "Prep: Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "090ca1bc-cbee-4d7b-b19f-fc1f4bb879a0",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "232aff11-3cdc-4994-af42-5e3574583814",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"tests\");",
+ " pm.expect(typeof jsonData.tests).to.equal(\"object\");",
+ " pm.expect(jsonData.tests).to.have.property(\"include\");",
+ " pm.expect(jsonData.tests.include).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData.tests).to.have.property(\"exclude\");",
+ " pm.expect(jsonData.tests.exclude).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"types\");",
+ " pm.expect(jsonData.types).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"user_agent\");",
+ " pm.expect(typeof jsonData.user_agent).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"timeouts\");",
+ " pm.expect(typeof jsonData.timeouts).to.equal(\"object\")",
+ " pm.expect(jsonData.timeouts).to.have.property(\"automatic\");",
+ " pm.expect(typeof jsonData.timeouts.automatic).to.equal(\"number\");",
+ " pm.expect(jsonData.timeouts).to.have.property(\"manual\");",
+ " pm.expect(typeof jsonData.timeouts.manual).to.equal(\"number\");",
+ " pm.expect(jsonData).to.have.property(\"browser\");",
+ " pm.expect(typeof jsonData.browser).to.equal(\"object\");",
+ " pm.expect(jsonData.browser).to.have.property(\"name\");",
+ " pm.expect(typeof jsonData.browser.name).to.equal(\"string\");",
+ " pm.expect(jsonData.browser).to.have.property(\"version\");",
+ " pm.expect(typeof jsonData.browser.version).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"reference_tokens\");",
+ " pm.expect(jsonData.reference_tokens).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"labels\");",
+ " pm.expect(jsonData.labels).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"date_created\");",
+ "});",
+ "",
+ "pm.test(\"Configuration is default\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " pm.expect(jsonData.tests.include).to.include(\"/\");",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(60000);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(300000);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ " pm.expect(jsonData.labels).to.be.empty;",
+ " pm.expect(new Date(jsonData.date_created).getTime()).to.be.below(Date.now());",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "read session status",
+ "item": [
+ {
+ "name": "Prep: Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "73cbeeb0-019b-4331-b0ec-ef9d3f1e0494",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "ba142b55-2454-4472-bcf9-669b0bb7bab9",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"status\");",
+ " pm.expect(typeof jsonData.status).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ " pm.expect(jsonData).to.have.property(\"date_started\");",
+ " pm.expect(jsonData).to.have.property(\"date_finished\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "read sessions",
+ "item": [
+ {
+ "name": "Without query parameters",
+ "item": [
+ {
+ "name": "Read sessions",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f443955f-b57e-4801-8eac-547597381615",
+ "exec": [
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"JSON structure as expected\", function() {",
+ " pm.expect(response).to.have.property(\"items\");",
+ " pm.expect(response[\"items\"]).to.be.instanceof(Array);",
+ " pm.expect(response).to.have.property(\"_links\");",
+ " pm.expect(response[\"_links\"]).to.be.instanceof(Object);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/_wave/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "_wave",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Containing created session",
+ "item": [
+ {
+ "name": "Prep: Create session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "2ca80dee-f9a5-4e0b-bead-6ab871d9edf8",
+ "exec": [
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "body": {
+ "mode": "raw",
+ "raw": "",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read sessions",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "fbf5eafb-0ae7-4105-8df5-51bb53b0c595",
+ "exec": [
+ "const token = pm.globals.get(\"session_token\");",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"JSON structure as expected\", function() {",
+ " pm.expect(response).to.have.property(\"items\");",
+ " pm.expect(response[\"items\"]).to.be.instanceof(Array);",
+ " pm.expect(response).to.have.property(\"_links\");",
+ " pm.expect(response[\"_links\"]).to.be.instanceof(Object);",
+ "});",
+ "",
+ "pm.test(\"Created session's token in response\", function() {",
+ " pm.expect(response.items).to.contain(token);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/_wave/api/sessions?index=0&count=1000",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "_wave",
+ "api",
+ "sessions"
+ ],
+ "query": [
+ {
+ "key": "index",
+ "value": "0"
+ },
+ {
+ "key": "count",
+ "value": "1000"
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete Session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "With configuration expansion",
+ "item": [
+ {
+ "name": "Prep: Create session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "bdd64594-c874-4eb2-8b36-4c37cea0f0cb",
+ "exec": [
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "body": {
+ "mode": "raw",
+ "raw": "",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read sessions",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "6732aedf-d99d-416f-b93d-98bc0404ef61",
+ "exec": [
+ "const token = pm.globals.get(\"session_token\");",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"JSON structure as expected\", function() {",
+ " pm.expect(response).to.have.property(\"items\");",
+ " pm.expect(response[\"items\"]).to.be.instanceof(Array);",
+ " pm.expect(response).to.have.property(\"_links\");",
+ " pm.expect(response[\"_links\"]).to.be.instanceof(Object);",
+ " pm.expect(response).to.have.property(\"_embedded\");",
+ " pm.expect(response[\"_embedded\"]).to.be.instanceof(Object);",
+ " pm.expect(response[\"_embedded\"]).to.have.property(\"configuration\");",
+ " pm.expect(response[\"_embedded\"][\"configuration\"]).to.be.instanceof(Array);",
+ "});",
+ "",
+ "pm.test(\"Created session's token in response\", function() {",
+ " pm.expect(response.items).to.contain(token);",
+ "});",
+ "",
+ "pm.test(\"Created session's token in embedded configuration\", function() {",
+ " let tokenInConfigurationList = false;",
+ " let configurations = response._embedded.configuration;",
+ " for (let configuration of configurations) {",
+ " if (configuration.token !== token) continue;",
+ " tokenInConfigurationList = true;",
+ " }",
+ " pm.expect(tokenInConfigurationList).to.equal(true);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/_wave/api/sessions?index=0&count=1000&expand=configuration",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "_wave",
+ "api",
+ "sessions"
+ ],
+ "query": [
+ {
+ "key": "index",
+ "value": "0"
+ },
+ {
+ "key": "count",
+ "value": "1000"
+ },
+ {
+ "key": "expand",
+ "value": "configuration"
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete Session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "With status expansion",
+ "item": [
+ {
+ "name": "Prep: Create session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "3319e417-dd54-495f-a452-5519d2a61082",
+ "exec": [
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "body": {
+ "mode": "raw",
+ "raw": "",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read sessions",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "6ca3480a-3224-4bf8-a466-fdcb06338795",
+ "exec": [
+ "const token = pm.globals.get(\"session_token\");",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"JSON structure as expected\", function() {",
+ " pm.expect(response).to.have.property(\"items\");",
+ " pm.expect(response[\"items\"]).to.be.instanceof(Array);",
+ " pm.expect(response).to.have.property(\"_links\");",
+ " pm.expect(response[\"_links\"]).to.be.instanceof(Object);",
+ " pm.expect(response).to.have.property(\"_embedded\");",
+ " pm.expect(response[\"_embedded\"]).to.be.instanceof(Object);",
+ " pm.expect(response[\"_embedded\"]).to.have.property(\"status\");",
+ " pm.expect(response[\"_embedded\"][\"status\"]).to.be.instanceof(Array);",
+ "});",
+ "",
+ "pm.test(\"Created session's token in response\", function() {",
+ " pm.expect(response.items).to.contain(token);",
+ "});",
+ "",
+ "pm.test(\"Created session's token in embedded status\", function() {",
+ " let tokenInStatusList = false;",
+ " let statuses = response._embedded.status;",
+ " for (let status of statuses) {",
+ " if (status.token !== token) continue;",
+ " tokenInStatusList = true;",
+ " }",
+ " pm.expect(tokenInStatusList).to.equal(true);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/_wave/api/sessions?index=0&count=1000&expand=status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "_wave",
+ "api",
+ "sessions"
+ ],
+ "query": [
+ {
+ "key": "index",
+ "value": "0"
+ },
+ {
+ "key": "count",
+ "value": "1000"
+ },
+ {
+ "key": "expand",
+ "value": "status"
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete Session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Results API",
+ "item": [
+ {
+ "name": "upload api",
+ "item": [
+ {
+ "name": "create session",
+ "item": [
+ {
+ "name": "Prep: Read Available Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "1f3d8b67-aab8-44a0-abf4-e320a6b27755",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var availableTests = pm.response.json();",
+ "",
+ "var includedTests = [];",
+ "var excludedTests = [];",
+ "var specialTimeoutTest = \"\";",
+ "",
+ "var apis = Object.keys(availableTests);",
+ "for(var api of apis) {",
+ " if (availableTests[api].length > 50) {",
+ " var subDirs = availableTests[api].map(test => test.split(\"/\").filter(part => !!part)[1]).reduce((acc, curr) => acc.indexOf(curr) === -1 ? acc.concat([curr]) : acc, []);",
+ " if (subDirs.length > 2) {",
+ " includedTests.push(\"/\" + api);",
+ " excludedTests.push(\"/\" + api + \"/\" + subDirs[0]);",
+ " specialTimeoutTest = availableTests[api][availableTests[api].length - 1];",
+ " break;",
+ " }",
+ " ",
+ " }",
+ "}",
+ "",
+ "var usedApi = apis[0];",
+ "",
+ "pm.globals.set(\"api_name\", usedApi);",
+ "pm.globals.set(\"test\", availableTests[usedApi][0]);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session With One test",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f42479d4-f723-4843-ba91-f30019ebd975",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"{{test}}\"]\n }\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "9f8fc606-25ee-45de-bcd8-57735900701b",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "be2341a8-1598-41b0-96a6-cfd049badd07",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "create results",
+ "item": [
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "897e04ae-d0a2-4a67-9488-2bdeb55db06b",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "95b382c6-9d08-41b9-80d1-40dc0217cc09",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(Object.keys(response)).to.have.lengthOf(1);",
+ " pm.expect(response).to.have.property(\"next_test\");",
+ " pm.expect(typeof response.next_test).to.equal(\"string\");",
+ "});",
+ "",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "",
+ "const parameters = nextTest.split(\"?\")[1].split(\"&\");",
+ "let test = parameters.find(parameter => parameter.split(\"=\")[0] === \"test_url\").split(\"=\")[1];",
+ "test = decodeURIComponent(test);",
+ "test = \"/\" + test.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "pm.globals.set(\"current_test\", test);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "67b23ac9-5254-4b7b-90bf-930a9661a614",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"FAIL\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "upload new results",
+ "item": [
+ {
+ "name": "Download current results",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "984c79db-5ae1-4c22-8b62-6eccabfaeb98",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "var results = jsonData.results;",
+ "",
+ "pm.globals.set(\"results\", results);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "a9a46b5f-2dd3-4fa7-a482-7186ac01a4d6",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/{{api_name}}/json",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "{{api_name}}",
+ "json"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Current Compact Results",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f4974495-2e84-4962-9259-1d5501fb7a3f",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.globals.set(\"compact_results\", jsonData);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/compact",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "compact"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Upload Results Api Json",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "4a7df46d-546c-4cf4-8c6a-9b13eaa0be32",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "e2196348-a4a0-48d3-bd72-f591f0b65cf0",
+ "exec": [
+ "var results = pm.globals.get(\"results\");",
+ "var newSubResult = ",
+ " {",
+ " \"name\": \"placeholder sub test\",",
+ " \"status\": \"PASS\",",
+ " \"message\": \"this is a placeholder sub test\"",
+ " };",
+ "",
+ "results[0].subtests.push(newSubResult);",
+ "resultsString = JSON.stringify(results);",
+ "pm.globals.set(\"resultsString\", resultsString);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"results\": {{resultsString}}\n}",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/{{api_name}}/json",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "{{api_name}}",
+ "json"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Check if changes took effect",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8c45fd6d-ca98-4393-ace6-c01563d7c1ec",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Returned results contain new result\", function () {",
+ " var jsonData = pm.response.json();",
+ " var results = jsonData.results;",
+ "",
+ " var oldResults = pm.globals.get(\"results\");",
+ "",
+ " pm.expect(oldResults[0].subtests.length + 1).to.equal(results[0].subtests.length);",
+ "});",
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "0c9e31fc-1931-4dcd-978f-39e2a38f9a3f",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/{{api_name}}/json",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "{{api_name}}",
+ "json"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Check changes in Compact Results",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "6e30b533-18f9-41bc-af65-bdde323c215a",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "var test = pm.globals.get(\"test\");",
+ "var oldCompactResults = pm.globals.get(\"compact_results\");",
+ "",
+ "pm.test(\"passed test increased\", function () {",
+ " var api = test.split(\"/\").find(part => !!part);",
+ " pm.expect(jsonData[api].pass).to.equal(oldCompactResults[api].pass + 1);",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/compact",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "compact"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "clean up",
+ "item": [
+ {
+ "name": "Clean up: Delete session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "General API",
+ "item": [
+ {
+ "name": "server status",
+ "item": [
+ {
+ "name": "Read server status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8b5b4c23-db0a-4b81-9349-ce8ead64bb7a",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"import_results_enabled\");",
+ " pm.expect(typeof jsonData.import_results_enabled).to.equal(\"boolean\");",
+ " pm.expect(jsonData).to.have.property(\"reports_enabled\");",
+ " pm.expect(typeof jsonData.reports_enabled).to.equal(\"boolean\");",
+ " pm.expect(jsonData).to.have.property(\"read_sessions_enabled\");",
+ " pm.expect(typeof jsonData.read_sessions_enabled).to.equal(\"boolean\");",
+ " pm.expect(jsonData).to.have.property(\"version_string\");",
+ " pm.expect(typeof jsonData.version_string).to.equal(\"string\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Devices API",
+ "item": [
+ {
+ "name": "create",
+ "item": [
+ {
+ "name": "Create device",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "286426ec-c394-48a4-a90f-c6dd80c7bef1",
+ "exec": [
+ "pm.test(\"Successful POST request\", function () {",
+ " pm.expect(pm.response.code).to.be.oneOf([200,201,202]);",
+ "});",
+ "",
+ "var response = pm.response.json();",
+ "",
+ "pm.test('Schema is valid', function() {",
+ " pm.expect(response).to.have.property(\"token\");",
+ "});",
+ "",
+ "pm.test('Data is valid', function() {",
+ " pm.expect(typeof response.token).to.equal(\"string\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ ""
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Wait device timeout",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "3bdb0f7c-1261-465e-9b02-0070005f0c43",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "a1cf5880-7b8e-4bc6-9d92-59632a719c54",
+ "exec": [
+ "var timeout = parseInt(pm.environment.get(\"device_timeout\")) + 500",
+ "",
+ "setTimeout(function () {}, timeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ ""
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "read device",
+ "item": [
+ {
+ "name": "Device not found",
+ "item": [
+ {
+ "name": "Read device",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a4401cbc-0a3a-4c2d-b1ed-e7d6867614ff",
+ "exec": [
+ "pm.test(\"Successful GET request\", function () {",
+ " pm.expect(pm.response.code).to.equal(404);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "1644fd70-b4c4-4207-8e12-67752a984417",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/invalid_token",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ "invalid_token"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Device found",
+ "item": [
+ {
+ "name": "Prep: Create device",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "c4f14137-5ced-449b-a872-b7d541e2f6af",
+ "exec": [
+ "var response = pm.response.json();",
+ "var token = response.token;",
+ "",
+ "pm.globals.set(\"device_token\", token)"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ ""
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read device",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "61a840f1-d3cd-4d7a-a54f-30da1266e48f",
+ "exec": [
+ "pm.test(\"Successful GET request\", function () {",
+ " pm.expect(pm.response.code).to.be.oneOf([200]);",
+ "});",
+ "",
+ "var response = pm.response.json();",
+ "",
+ "pm.test('Schema is valid', function() {",
+ " pm.expect(response).to.have.property(\"token\");",
+ " pm.expect(response).to.have.property(\"user_agent\");",
+ " pm.expect(response).to.have.property(\"last_active\");",
+ " pm.expect(response).to.have.property(\"name\");",
+ "});",
+ "",
+ "pm.test('Data is valid', function() {",
+ " pm.expect(typeof response.token).to.equal(\"string\");",
+ " pm.expect(typeof response.user_agent).to.equal(\"string\");",
+ " pm.expect(typeof response.last_active).to.equal(\"string\");",
+ " pm.expect(typeof response.name).to.equal(\"string\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/{{device_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ "{{device_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Wait device timeout",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "17112336-b962-472d-86a8-7872ff9876d1",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "912939cc-1685-4e1f-b0a6-696b264870ca",
+ "exec": [
+ "var timeout = parseInt(pm.environment.get(\"device_timeout\")) + 500",
+ "",
+ "setTimeout(function () {}, timeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ ""
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Timed out device",
+ "item": [
+ {
+ "name": "Prep: Create device",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "c99b9318-1e50-4702-9d81-53d87faa1080",
+ "exec": [
+ "var response = pm.response.json();",
+ "var token = response.token;",
+ "",
+ "pm.globals.set(\"device_token\", token)"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ ""
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Wait device timeout",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "89517dc9-31b4-40e7-89e4-99dd6e792e6a",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "ed20bbf6-4f31-4d91-a07b-7a0b4afcf161",
+ "exec": [
+ "var timeout = parseInt(pm.environment.get(\"device_timeout\")) + 500",
+ "",
+ "setTimeout(function () {}, timeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ ""
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read device",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "7b7496f5-2fdc-470c-b394-2a5ae7d29da2",
+ "exec": [
+ "pm.test(\"Successful GET request\", function () {",
+ " pm.expect(pm.response.code).to.equal(404);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "1807f152-7ef4-4277-9db6-1bff53d6cc8b",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/{{device_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ "{{device_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "read devices",
+ "item": [
+ {
+ "name": "Prep: Create device A",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a7803c5c-f371-4ae0-8b08-1c06995c9b60",
+ "exec": [
+ "var response = pm.response.json();",
+ "var token = response.token;",
+ "",
+ "pm.globals.set(\"device_token_a\", token)"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ ""
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Prep: Create device B",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "02450903-aef9-4ee6-9d48-94ba9b5fd54c",
+ "exec": [
+ "var response = pm.response.json();",
+ "var token = response.token;",
+ "",
+ "pm.globals.set(\"device_token_b\", token)"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ ""
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read devices",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "1612417b-bed8-48d5-a6a3-39d0d0fe6932",
+ "exec": [
+ "pm.test(\"Successful GET request\", function () {",
+ " pm.expect(pm.response.code).to.be.oneOf([200]);",
+ "});",
+ "",
+ "var response = pm.response.json();",
+ "",
+ "pm.test('Schema is valid', function() {",
+ " pm.expect(response).to.be.instanceof(Array);",
+ " response.forEach(element => {",
+ " pm.expect(element).to.have.property(\"token\");",
+ " pm.expect(element).to.have.property(\"user_agent\");",
+ " pm.expect(element).to.have.property(\"last_active\");",
+ " pm.expect(element).to.have.property(\"name\");",
+ " })",
+ "});",
+ "",
+ "pm.test('Data is valid', function() {",
+ " pm.expect(response).to.have.lengthOf(2);",
+ " var devices_left = [",
+ " pm.globals.get(\"device_token_a\"), ",
+ " pm.globals.get(\"device_token_b\")",
+ " ]",
+ " response.forEach(element => {",
+ " pm.expect(typeof element.token).to.equal(\"string\");",
+ " pm.expect(typeof element.user_agent).to.equal(\"string\");",
+ " pm.expect(typeof element.last_active).to.equal(\"string\");",
+ " pm.expect(typeof element.name).to.equal(\"string\");",
+ " pm.expect(devices_left).to.include(element.token);",
+ " devices_left.splice(devices_left.indexOf(element.token), 1);",
+ " })",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ ""
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Wait device timeout",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9b7ff623-8aac-46d1-9c56-1ceb8acc2de6",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "c0fbf742-f75e-4f3b-a576-75c9d5e0f72d",
+ "exec": [
+ "var timeout = parseInt(pm.environment.get(\"device_timeout\")) + 500",
+ "",
+ "setTimeout(function () {}, timeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/devices/",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "devices",
+ ""
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ }
+ ],
+ "protocolProfileBehavior": {}
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/testing/__init__.py b/testing/web-platform/tests/tools/wave/testing/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/testing/__init__.py
diff --git a/testing/web-platform/tests/tools/wave/testing/devices_manager.py b/testing/web-platform/tests/tools/wave/testing/devices_manager.py
new file mode 100644
index 0000000000..935782d137
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/testing/devices_manager.py
@@ -0,0 +1,119 @@
+# mypy: allow-untyped-defs
+
+import time
+import uuid
+
+from threading import Timer
+
+from ..data.device import Device
+from ..testing.event_dispatcher import DEVICES, DEVICE_ADDED_EVENT, DEVICE_REMOVED_EVENT
+from ..utils.user_agent_parser import parse_user_agent
+from ..utils.serializer import serialize_device
+from ..data.exceptions.not_found_exception import NotFoundException
+
+
+DEVICE_TIMEOUT = 60000 # 60sec
+RECONNECT_TIME = 5000 # 5sec
+
+class DevicesManager:
+ def initialize(self, event_dispatcher):
+ self.devices = {}
+ self._event_dispatcher = event_dispatcher
+ self._timer = None
+
+ def create_device(self, user_agent):
+ browser = parse_user_agent(user_agent)
+ name = "{} {}".format(browser["name"], browser["version"])
+ token = str(uuid.uuid1())
+ last_active = int(time.time() * 1000)
+
+ device = Device(token, user_agent, name, last_active)
+
+ self._event_dispatcher.dispatch_event(
+ DEVICES,
+ DEVICE_ADDED_EVENT,
+ serialize_device(device))
+ self.add_to_cache(device)
+
+ self._set_timer(DEVICE_TIMEOUT)
+
+ return device
+
+ def read_device(self, token):
+ if token not in self.devices:
+ raise NotFoundException(f"Could not find device '{token}'")
+ return self.devices[token]
+
+ def read_devices(self):
+ devices = []
+ for key in self.devices:
+ devices.append(self.devices[key])
+
+ return devices
+
+ def update_device(self, device):
+ if device.token not in self.devices:
+ return
+ self.devices[device.token] = device
+
+ def delete_device(self, token):
+ if token not in self.devices:
+ return
+ device = self.devices[token]
+ del self.devices[token]
+ self._event_dispatcher.dispatch_event(
+ DEVICES,
+ DEVICE_REMOVED_EVENT,
+ serialize_device(device))
+
+ def refresh_device(self, token):
+ if token not in self.devices:
+ return
+ device = self.devices[token]
+ device.last_active = int(time.time() * 1000)
+ self.update_device(device)
+
+ def post_event(self, handle, event_type, data):
+ if event_type is None:
+ return
+ self._event_dispatcher.dispatch_event(handle, event_type, data)
+
+ def post_global_event(self, event_type, data):
+ self.post_event(DEVICES, event_type, data)
+
+ def _set_timer(self, timeout):
+ if self._timer is not None:
+ return
+
+ def handle_timeout(self):
+ self._timer = None
+ now = int(time.time() * 1000)
+ timed_out_devices = []
+ for token in self.devices:
+ device = self.devices[token]
+ if now - device.last_active < DEVICE_TIMEOUT:
+ continue
+ timed_out_devices.append(token)
+
+ for token in timed_out_devices:
+ self.delete_device(token)
+
+ oldest_active_time = None
+ for token in self.devices:
+ device = self.devices[token]
+ if oldest_active_time is None:
+ oldest_active_time = device.last_active
+ else:
+ if oldest_active_time > device.last_active:
+ oldest_active_time = device.last_active
+ if oldest_active_time is not None:
+ self._set_timer(now - oldest_active_time)
+
+ self._timer = Timer(timeout / 1000.0, handle_timeout, [self])
+ self._timer.start()
+
+ def add_to_cache(self, device):
+ if device.token in self.devices:
+ return
+
+ self.devices[device.token] = device
diff --git a/testing/web-platform/tests/tools/wave/testing/event_dispatcher.py b/testing/web-platform/tests/tools/wave/testing/event_dispatcher.py
new file mode 100644
index 0000000000..9bfb6ed712
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/testing/event_dispatcher.py
@@ -0,0 +1,148 @@
+# mypy: allow-untyped-defs
+
+import uuid
+import time
+from threading import Timer
+
+
+STATUS_EVENT = "status"
+RESUME_EVENT = "resume"
+TEST_COMPLETED_EVENT = "test_completed"
+
+DEVICES = "devices"
+DEVICE_ADDED_EVENT = "device_added"
+DEVICE_REMOVED_EVENT = "device_removed"
+
+class EventDispatcher:
+ def __init__(self, event_cache_duration):
+ self._listeners = {}
+ self._events = {}
+ self._current_events = {}
+ self._cache_duration = event_cache_duration
+ self._cache_timeout = None
+
+ def add_event_listener(self, listener, last_event_number=None):
+ token = listener.dispatcher_token
+
+ if last_event_number is not None \
+ and token in self._current_events \
+ and self._current_events[token] > last_event_number:
+ diff_events = self._get_diff_events(token, last_event_number)
+ if len(diff_events) > 0:
+ listener.send_message(diff_events)
+ return
+
+ if token not in self._listeners:
+ self._listeners[token] = []
+ self._listeners[token].append(listener)
+ listener.token = str(uuid.uuid1())
+ return listener.token
+
+ def remove_event_listener(self, listener_token):
+ if listener_token is None:
+ return
+
+ for dispatcher_token in self._listeners:
+ for listener in self._listeners[dispatcher_token]:
+ if listener.token == listener_token:
+ self._listeners[dispatcher_token].remove(listener)
+ if len(self._listeners[dispatcher_token]) == 0:
+ del self._listeners[dispatcher_token]
+ return
+
+ def dispatch_event(self, dispatcher_token, event_type, data=None):
+ if dispatcher_token not in self._current_events:
+ self._current_events[dispatcher_token] = -1
+
+ if dispatcher_token not in self._events:
+ self._events[dispatcher_token] = []
+
+ self._add_to_cache(dispatcher_token, event_type, data)
+ self._set_cache_timer()
+
+ if dispatcher_token not in self._listeners:
+ return
+
+ event = {
+ "type": event_type,
+ "data": data,
+ "number": self._current_events[dispatcher_token]
+ }
+
+ for listener in self._listeners[dispatcher_token]:
+ listener.send_message([event])
+
+ def _get_diff_events(self, dispatcher_token, last_event_number):
+ token = dispatcher_token
+ diff_events = []
+ if token not in self._events:
+ return diff_events
+ for event in self._events[token]:
+ if event["number"] <= last_event_number:
+ continue
+ diff_events.append({
+ "type": event["type"],
+ "data": event["data"],
+ "number": event["number"]
+ })
+ return diff_events
+
+ def _set_cache_timer(self):
+ if self._cache_timeout is not None:
+ return
+
+ events = self._read_cached_events()
+ if len(events) == 0:
+ return
+
+ next_event = events[0]
+ for event in events:
+ if next_event["expiration_date"] > event["expiration_date"]:
+ next_event = event
+
+ timeout = next_event["expiration_date"] / 1000.0 - time.time()
+ if timeout < 0:
+ timeout = 0
+
+ def handle_timeout(self):
+ self._delete_expired_events()
+ self._cache_timeout = None
+ self._set_cache_timer()
+
+ self._cache_timeout = Timer(timeout, handle_timeout, [self])
+ self._cache_timeout.start()
+
+ def _delete_expired_events(self):
+ events = self._read_cached_events()
+ now = int(time.time() * 1000)
+
+ for event in events:
+ if event["expiration_date"] < now:
+ self._remove_from_cache(event)
+
+ def _add_to_cache(self, dispatcher_token, event_type, data):
+ self._current_events[dispatcher_token] += 1
+ current_event_number = self._current_events[dispatcher_token]
+ event = {
+ "type": event_type,
+ "data": data,
+ "number": current_event_number,
+ "expiration_date": int(time.time() * 1000) + self._cache_duration
+ }
+ self._events[dispatcher_token].append(event)
+
+ def _remove_from_cache(self, event):
+ for dispatcher_token in self._events:
+ for cached_event in self._events[dispatcher_token]:
+ if cached_event is not event:
+ continue
+ self._events[dispatcher_token].remove(cached_event)
+ if len(self._events[dispatcher_token]) == 0:
+ del self._events[dispatcher_token]
+ return
+
+ def _read_cached_events(self):
+ events = []
+ for dispatcher_token in self._events:
+ events = events + self._events[dispatcher_token]
+ return events
diff --git a/testing/web-platform/tests/tools/wave/testing/results_manager.py b/testing/web-platform/tests/tools/wave/testing/results_manager.py
new file mode 100644
index 0000000000..13dfcbad7d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/testing/results_manager.py
@@ -0,0 +1,674 @@
+# mypy: allow-untyped-defs
+
+import os
+import shutil
+import re
+import json
+import hashlib
+import zipfile
+import time
+from threading import Timer
+
+from ..utils.user_agent_parser import parse_user_agent, abbreviate_browser_name
+from ..utils.serializer import serialize_session
+from ..utils.deserializer import deserialize_session
+from ..data.exceptions.invalid_data_exception import InvalidDataException
+from ..data.exceptions.duplicate_exception import DuplicateException
+from ..data.exceptions.not_found_exception import NotFoundException
+from ..data.exceptions.permission_denied_exception import PermissionDeniedException
+from .wpt_report import generate_report, generate_multi_report
+from ..data.session import COMPLETED
+
+WAVE_SRC_DIR = "./tools/wave"
+RESULTS_FILE_REGEX = r"^\w\w\d\d\d?\.json$"
+RESULTS_FILE_PATTERN = re.compile(RESULTS_FILE_REGEX)
+SESSION_RESULTS_TIMEOUT = 60*30 # 30min
+
+
+class ResultsManager:
+ def initialize(
+ self,
+ results_directory_path,
+ sessions_manager,
+ tests_manager,
+ import_results_enabled,
+ reports_enabled,
+ persisting_interval
+ ):
+ self._results_directory_path = results_directory_path
+ self._sessions_manager = sessions_manager
+ self._tests_manager = tests_manager
+ self._import_results_enabled = import_results_enabled
+ self._reports_enabled = reports_enabled
+ self._results = {}
+ self._persisting_interval = persisting_interval
+ self._timeouts = {}
+
+ def create_result(self, token, data):
+ result = self.prepare_result(data)
+ test = result["test"]
+
+ session = self._sessions_manager.read_session(token)
+
+ if session is None:
+ return
+ if not self._sessions_manager.test_in_session(test, session):
+ return
+ if not self._sessions_manager.is_test_running(test, session):
+ return
+ self._tests_manager.complete_test(test, session)
+ self._push_to_cache(token, result)
+ self._update_test_state(result, session)
+
+ session.last_completed_test = test
+ session.recent_completed_count += 1
+ self._sessions_manager.update_session(session)
+
+ api = next((p for p in test.split("/") if p != ""), None)
+ if session.recent_completed_count >= self._persisting_interval \
+ or self._sessions_manager.is_api_complete(api, session):
+ self.persist_session(session)
+
+ if not self._sessions_manager.is_api_complete(api, session):
+ return
+ self.generate_report(token, api)
+
+ test_state = session.test_state
+ apis = list(test_state.keys())
+ all_apis_complete = True
+ for api in apis:
+ if not self._sessions_manager.is_api_complete(api, session):
+ all_apis_complete = False
+ if not all_apis_complete:
+ return
+ self._sessions_manager.complete_session(token)
+ self.create_info_file(session)
+
+ def read_results(self, token, filter_path=None):
+ filter_api = None
+ if filter_path is not None:
+ filter_api = next((p for p in filter_path.split("/")
+ if p is not None), None)
+ results = self._read_from_cache(token)
+ if results == []:
+ results = self.load_results(token)
+ self._set_session_cache(token, results)
+
+ filtered_results = {}
+
+ for api in results:
+ if filter_api is not None and api.lower() != filter_api.lower():
+ continue
+ for result in results[api]:
+ if filter_path is not None:
+ pattern = re.compile("^" + filter_path.replace(".", ""))
+ if pattern.match(result["test"].replace(".", "")) \
+ is None:
+ continue
+ if api not in filtered_results:
+ filtered_results[api] = []
+ filtered_results[api].append(result)
+
+ return filtered_results
+
+ def read_flattened_results(self, token):
+ session = self._sessions_manager.read_session(token)
+ return session.test_state
+
+ def _update_test_state(self, result, session):
+ api = next((p for p in result["test"].split("/") if p != ""), None)
+ if "subtests" not in result:
+ if result["status"] == "OK":
+ session.test_state[api]["pass"] += 1
+ elif result["status"] == "ERROR":
+ session.test_state[api]["fail"] += 1
+ elif result["status"] == "TIMEOUT":
+ session.test_state[api]["timeout"] += 1
+ elif result["status"] == "NOTRUN":
+ session.test_state[api]["not_run"] += 1
+ else:
+ for test in result["subtests"]:
+ if test["status"] == "PASS":
+ session.test_state[api]["pass"] += 1
+ elif test["status"] == "FAIL":
+ session.test_state[api]["fail"] += 1
+ elif test["status"] == "TIMEOUT":
+ session.test_state[api]["timeout"] += 1
+ elif test["status"] == "NOTRUN":
+ session.test_state[api]["not_run"] += 1
+
+ session.test_state[api]["complete"] += 1
+ self._sessions_manager.update_session(session)
+
+ def parse_test_state(self, results):
+ test_state = {}
+ for api in list(results.keys()):
+ test_state[api] = {
+ "pass": 0,
+ "fail": 0,
+ "timeout": 0,
+ "not_run": 0,
+ "total": len(results[api]),
+ "complete": 0,
+ }
+ for result in results[api]:
+ if "subtests" not in result:
+ if result["status"] == "OK":
+ test_state[api]["pass"] += 1
+ elif result["status"] == "ERROR":
+ test_state[api]["fail"] += 1
+ elif result["status"] == "TIMEOUT":
+ test_state[api]["timeout"] += 1
+ elif result["status"] == "NOTRUN":
+ test_state[api]["not_run"] += 1
+ else:
+ for test in result["subtests"]:
+ if test["status"] == "PASS":
+ test_state[api]["pass"] += 1
+ elif test["status"] == "FAIL":
+ test_state[api]["fail"] += 1
+ elif test["status"] == "TIMEOUT":
+ test_state[api]["timeout"] += 1
+ elif test["status"] == "NOTRUN":
+ test_state[api]["not_run"] += 1
+ test_state[api]["complete"] += 1
+ return test_state
+
+ def read_common_passed_tests(self, tokens=None):
+ if tokens is None or len(tokens) == 0:
+ return None
+
+ session_results = []
+
+ for token in tokens:
+ session_result = self.read_results(token)
+ session_results.append(session_result)
+
+ passed_tests = {}
+ failed_tests = {}
+
+ for result in session_results:
+ for api in result:
+ if api not in passed_tests:
+ passed_tests[api] = []
+ if api not in failed_tests:
+ failed_tests[api] = []
+
+ for api_result in result[api]:
+ passed = True
+ for subtest in api_result["subtests"]:
+ if subtest["status"] == "PASS":
+ continue
+ passed = False
+ break
+
+ test = api_result["test"]
+
+ if passed:
+ if test in failed_tests[api]:
+ continue
+ if test in passed_tests[api]:
+ continue
+ passed_tests[api].append(test)
+ else:
+ if test in passed_tests[api]:
+ passed_tests[api].remove(test)
+ if test in failed_tests[api]:
+ continue
+ failed_tests[api].append(test)
+ return passed_tests
+
+ def read_results_wpt_report_uri(self, token, api):
+ api_directory = os.path.join(self._results_directory_path, token, api)
+ if not os.path.isdir(api_directory):
+ return None
+ return f"/results/{token}/{api}/all.html"
+
+ def read_results_wpt_multi_report_uri(self, tokens, api):
+ comparison_directory_name = self.get_comparison_identifier(tokens)
+
+ relative_api_directory_path = os.path.join(comparison_directory_name,
+ api)
+
+ api_directory_path = os.path.join(
+ self._results_directory_path,
+ relative_api_directory_path
+ )
+
+ if not os.path.isdir(api_directory_path):
+ self.generate_multi_report(tokens, api)
+
+ return f"/results/{relative_api_directory_path}/all.html"
+
+ def delete_results(self, token):
+ results_directory = os.path.join(self._results_directory_path, token)
+ if not os.path.isdir(results_directory):
+ return
+ shutil.rmtree(results_directory)
+
+ def persist_session(self, session):
+ token = session.token
+ if token not in self._results:
+ return
+ for api in list(self._results[token].keys())[:]:
+ self.save_api_results(token, api)
+ self.create_info_file(session)
+ session.recent_completed_count = 0
+ self._sessions_manager.update_session(session)
+
+ def load_results(self, token):
+ results_directory = os.path.join(self._results_directory_path, token)
+ if not os.path.isdir(results_directory):
+ return {}
+ results = {}
+ apis = os.listdir(results_directory)
+ for api in apis:
+ api_directory = os.path.join(results_directory, api)
+ if not os.path.isdir(api_directory):
+ continue
+ files = os.listdir(api_directory)
+ for file_name in files:
+ if re.match(r"\w\w\d{1,3}\.json", file_name) is None:
+ continue
+ file_path = os.path.join(api_directory, file_name)
+ data = None
+ with open(file_path) as file:
+ data = file.read()
+ result = json.loads(data)
+ results[api] = result["results"]
+ break
+ return results
+
+ def _push_to_cache(self, token, result):
+ if token is None:
+ return
+ if token not in self._results:
+ self._results[token] = {}
+ test = result["test"]
+ api = next((p for p in test.split("/") if p != ""), None)
+ if api not in self._results[token]:
+ self._results[token][api] = []
+ self._results[token][api].append(result)
+ self._set_timeout(token)
+
+ def _set_session_cache(self, token, results):
+ if token is None:
+ return
+ self._results[token] = results
+ self._set_timeout(token)
+
+ def _read_from_cache(self, token):
+ if token is None:
+ return []
+ if token not in self._results:
+ return []
+ self._set_timeout(token)
+ return self._results[token]
+
+ def _clear_session_cache(self, token):
+ if token is None:
+ return
+ if token not in self._results:
+ return
+ del self._results[token]
+
+ def _combine_results_by_api(self, result_a, result_b):
+ combined_result = {}
+ for api in result_a:
+ if api in result_b:
+ combined_result[api] = result_a[api] + result_b[api]
+ else:
+ combined_result[api] = result_a[api]
+
+ for api in result_b:
+ if api in combined_result:
+ continue
+ combined_result[api] = result_b[api]
+
+ return combined_result
+
+ def prepare_result(self, result):
+ harness_status_map = {
+ 0: "OK",
+ 1: "ERROR",
+ 2: "TIMEOUT",
+ 3: "NOTRUN",
+ "OK": "OK",
+ "ERROR": "ERROR",
+ "TIMEOUT": "TIMEOUT",
+ "NOTRUN": "NOTRUN"
+ }
+
+ subtest_status_map = {
+ 0: "PASS",
+ 1: "FAIL",
+ 2: "TIMEOUT",
+ 3: "NOTRUN",
+ "PASS": "PASS",
+ "FAIL": "FAIL",
+ "TIMEOUT": "TIMEOUT",
+ "NOTRUN": "NOTRUN"
+ }
+
+ if "tests" in result:
+ for test in result["tests"]:
+ test["status"] = subtest_status_map[test["status"]]
+ if "stack" in test:
+ del test["stack"]
+ result["subtests"] = result["tests"]
+ del result["tests"]
+
+ if "stack" in result:
+ del result["stack"]
+ result["status"] = harness_status_map[result["status"]]
+
+ return result
+
+ def get_json_path(self, token, api):
+ session = self._sessions_manager.read_session(token)
+ api_directory = os.path.join(self._results_directory_path, token, api)
+
+ browser = parse_user_agent(session.user_agent)
+ abbreviation = abbreviate_browser_name(browser["name"])
+ version = browser["version"]
+ if "." in version:
+ version = version.split(".")[0]
+ version = version.zfill(2)
+ file_name = abbreviation + version + ".json"
+
+ return os.path.join(api_directory, file_name)
+
+ def save_api_results(self, token, api):
+ results = self._read_from_cache(token)
+ if api not in results:
+ return
+ results = results[api]
+ session = self._sessions_manager.read_session(token)
+ self._ensure_results_directory_existence(api, token, session)
+
+ file_path = self.get_json_path(token, api)
+ file_exists = os.path.isfile(file_path)
+
+ with open(file_path, "r+" if file_exists else "w") as file:
+ api_results = None
+ if file_exists:
+ data = file.read()
+ api_results = json.loads(data)
+ else:
+ api_results = {"results": []}
+
+ api_results["results"] = api_results["results"] + results
+
+ file.seek(0)
+ file.truncate()
+ file.write(json.dumps(api_results, indent=4, separators=(',', ': ')))
+
+ def _ensure_results_directory_existence(self, api, token, session):
+ directory = os.path.join(self._results_directory_path, token, api)
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+
+ def generate_report(self, token, api):
+ file_path = self.get_json_path(token, api)
+ dir_path = os.path.dirname(file_path)
+ generate_report(
+ input_json_directory_path=dir_path,
+ output_html_directory_path=dir_path,
+ spec_name=api
+ )
+
+ def generate_multi_report(self, tokens, api):
+ comparison_directory_name = self.get_comparison_identifier(tokens)
+
+ api_directory_path = os.path.join(
+ self._results_directory_path,
+ comparison_directory_name,
+ api
+ )
+
+ if os.path.isdir(api_directory_path):
+ return None
+
+ os.makedirs(api_directory_path)
+
+ result_json_files = []
+ for token in tokens:
+ result_json_files.append({
+ "token": token,
+ "path": self.get_json_path(token, api)
+ })
+ for file in result_json_files:
+ if not os.path.isfile(file["path"]):
+ return None
+ generate_multi_report(
+ output_html_directory_path=api_directory_path,
+ spec_name=api,
+ result_json_files=result_json_files
+ )
+
+ def get_comparison_identifier(self, tokens, ref_tokens=None):
+ if ref_tokens is None:
+ ref_tokens = []
+ comparison_directory = "comparison"
+ tokens.sort()
+ for token in tokens:
+ short_token = token.split("-")[0]
+ comparison_directory += "-" + short_token
+ hash = hashlib.sha1()
+ ref_tokens.sort()
+ for token in ref_tokens:
+ hash.update(token.encode("utf-8"))
+ for token in tokens:
+ hash.update(token.encode("utf-8"))
+ hash = hash.hexdigest()
+ comparison_directory += hash[0:8]
+ return comparison_directory
+
+ def create_info_file(self, session):
+ token = session.token
+ info_file_path = os.path.join(
+ self._results_directory_path,
+ token,
+ "info.json"
+ )
+ info = serialize_session(session)
+ del info["running_tests"]
+ del info["pending_tests"]
+
+ file_content = json.dumps(info, indent=2)
+ with open(info_file_path, "w+") as file:
+ file.write(file_content)
+
+ def export_results_api_json(self, token, api):
+ results = self.read_results(token)
+ if api in results:
+ return json.dumps({"results": results[api]}, indent=4)
+
+ file_path = self.get_json_path(token, api)
+ if not os.path.isfile(file_path):
+ return None
+
+ with open(file_path) as file:
+ blob = file.read()
+ return blob
+
+ def export_results_all_api_jsons(self, token):
+ self._sessions_manager.read_session(token)
+ results_directory = os.path.join(self._results_directory_path, token)
+ results = self.read_results(token)
+
+ zip_file_name = str(time.time()) + ".zip"
+ zip = zipfile.ZipFile(zip_file_name, "w")
+ for api, result in results.items():
+ zip.writestr(
+ api + ".json",
+ json.dumps({"results": result}, indent=4),
+ zipfile.ZIP_DEFLATED
+ )
+
+ results_directory = os.path.join(self._results_directory_path, token)
+ if os.path.isdir(results_directory):
+ persisted_apis = os.listdir(results_directory)
+
+ for api in persisted_apis:
+ if api in results:
+ continue
+ blob = self.export_results_api_json(token, api)
+ if blob is None:
+ continue
+ zip.writestr(api + ".json", blob, zipfile.ZIP_DEFLATED)
+
+ zip.close()
+
+ with open(zip_file_name, "rb") as file:
+ blob = file.read()
+ os.remove(zip_file_name)
+
+ return blob
+
+ def export_results(self, token):
+ if token is None:
+ return
+ session = self._sessions_manager.read_session(token)
+ if session.status != COMPLETED:
+ return None
+
+ session_results_directory = os.path.join(self._results_directory_path,
+ token)
+ if not os.path.isdir(session_results_directory):
+ return None
+
+ zip_file_name = str(time.time()) + ".zip"
+ zip = zipfile.ZipFile(zip_file_name, "w")
+ for root, dirs, files in os.walk(session_results_directory):
+ for file in files:
+ file_name = os.path.join(root.split(token)[1], file)
+ file_path = os.path.join(root, file)
+ zip.write(file_path, file_name, zipfile.ZIP_DEFLATED)
+ zip.close()
+
+ with open(zip_file_name) as file:
+ blob = file.read()
+ os.remove(zip_file_name)
+
+ return blob
+
+ def export_results_overview(self, token):
+ session = self._sessions_manager.read_session(token)
+ if session is None:
+ raise NotFoundException(f"Could not find session {token}")
+
+ tmp_file_name = str(time.time()) + ".zip"
+ zip = zipfile.ZipFile(tmp_file_name, "w")
+
+ flattened_results = self.read_flattened_results(token)
+ results_script = "const results = " + json.dumps(flattened_results,
+ indent=4)
+ zip.writestr("results.json.js", results_script)
+
+ session_dict = serialize_session(session)
+ del session_dict["running_tests"]
+ del session_dict["pending_tests"]
+ details_script = "const details = " + json.dumps(session_dict,
+ indent=4)
+ zip.writestr("details.json.js", details_script)
+
+ for root, dirs, files in os.walk(os.path.join(WAVE_SRC_DIR, "export")):
+ for file in files:
+ file_name = os.path.join(root.split("export")[1], file)
+ file_path = os.path.join(root, file)
+ zip.write(file_path, file_name, zipfile.ZIP_DEFLATED)
+
+ zip.close()
+
+ with open(tmp_file_name, "rb") as file:
+ blob = file.read()
+
+ self.remove_tmp_files()
+
+ return blob
+
+ def is_import_results_enabled(self):
+ return self._import_results_enabled
+
+ def are_reports_enabled(self):
+ return self._reports_enabled
+
+ def load_session_from_info_file(self, info_file_path):
+ if not os.path.isfile(info_file_path):
+ return None
+
+ with open(info_file_path) as info_file:
+ data = info_file.read()
+ info_file.close()
+ info = json.loads(str(data))
+ return deserialize_session(info)
+
+ def import_results(self, blob):
+ if not self.is_import_results_enabled:
+ raise PermissionDeniedException()
+ tmp_file_name = f"{str(time.time())}.zip"
+
+ with open(tmp_file_name, "w") as file:
+ file.write(blob)
+
+ zip = zipfile.ZipFile(tmp_file_name)
+ if "info.json" not in zip.namelist():
+ raise InvalidDataException("Invalid session ZIP!")
+ zipped_info = zip.open("info.json")
+ info = zipped_info.read()
+ zipped_info.close()
+ parsed_info = json.loads(info)
+ token = parsed_info["token"]
+ session = self._sessions_manager.read_session(token)
+ if session is not None:
+ raise DuplicateException("Session already exists!")
+ destination_path = os.path.join(self._results_directory_path, token)
+ os.makedirs(destination_path)
+ zip.extractall(destination_path)
+ self.remove_tmp_files()
+ self.load_results(token)
+ return token
+
+ def import_results_api_json(self, token, api, blob):
+ if not self.is_import_results_enabled:
+ raise PermissionDeniedException()
+ destination_path = os.path.join(self._results_directory_path, token, api)
+ files = os.listdir(destination_path)
+ file_name = ""
+ for file in files:
+ if RESULTS_FILE_PATTERN.match(file):
+ file_name = file
+ break
+ destination_file_path = os.path.join(destination_path, file_name)
+ with open(destination_file_path, "wb") as file:
+ file.write(blob)
+
+ self.generate_report(token, api)
+
+ session = self._sessions_manager.read_session(token)
+ if session is None:
+ raise NotFoundException()
+
+ results = self.load_results(token)
+ test_state = self.parse_test_state(results)
+ session.test_state = test_state
+
+ self._sessions_manager.update_session(session)
+
+ def remove_tmp_files(self):
+ files = os.listdir(".")
+
+ for file in files:
+ if re.match(r"\d{10}\.\d{2}\.zip", file) is None:
+ continue
+ os.remove(file)
+
+ def _set_timeout(self, token):
+ if token in self._timeouts:
+ self._timeouts[token].cancel()
+
+ def handler(self, token):
+ self._clear_session_cache(token)
+
+ self._timeouts[token] = Timer(SESSION_RESULTS_TIMEOUT, handler, [self, token])
diff --git a/testing/web-platform/tests/tools/wave/testing/sessions_manager.py b/testing/web-platform/tests/tools/wave/testing/sessions_manager.py
new file mode 100644
index 0000000000..093c3cffe8
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/testing/sessions_manager.py
@@ -0,0 +1,478 @@
+# mypy: allow-untyped-defs
+
+import uuid
+import time
+import os
+import json
+import re
+
+from threading import Timer
+
+from .test_loader import AUTOMATIC, MANUAL
+from ..data.session import Session, PENDING, PAUSED, RUNNING, ABORTED, COMPLETED
+from ..utils.user_agent_parser import parse_user_agent
+from .event_dispatcher import STATUS_EVENT, RESUME_EVENT
+from ..data.exceptions.not_found_exception import NotFoundException
+from ..data.exceptions.invalid_data_exception import InvalidDataException
+from ..utils.deserializer import deserialize_session
+
+DEFAULT_TEST_TYPES = [AUTOMATIC, MANUAL]
+DEFAULT_TEST_PATHS = ["/"]
+DEFAULT_TEST_AUTOMATIC_TIMEOUT = 60000
+DEFAULT_TEST_MANUAL_TIMEOUT = 300000
+
+
+class SessionsManager:
+ def initialize(self,
+ test_loader,
+ event_dispatcher,
+ tests_manager,
+ results_directory,
+ results_manager,
+ configuration):
+ self._test_loader = test_loader
+ self._sessions = {}
+ self._expiration_timeout = None
+ self._event_dispatcher = event_dispatcher
+ self._tests_manager = tests_manager
+ self._results_directory = results_directory
+ self._results_manager = results_manager
+ self._configuration = configuration
+
+ def create_session(
+ self,
+ tests=None,
+ test_types=None,
+ timeouts=None,
+ reference_tokens=None,
+ user_agent=None,
+ labels=None,
+ expiration_date=None,
+ type=None
+ ):
+ if tests is None:
+ tests = {}
+ if timeouts is None:
+ timeouts = {}
+ if reference_tokens is None:
+ reference_tokens = []
+ if user_agent is None:
+ user_agent = ""
+ if labels is None:
+ labels = []
+
+ if "include" not in tests:
+ tests["include"] = DEFAULT_TEST_PATHS
+ if "exclude" not in tests:
+ tests["exclude"] = []
+ if "automatic" not in timeouts:
+ timeouts["automatic"] = self._configuration["timeouts"]["automatic"]
+ if "manual" not in timeouts:
+ timeouts["manual"] = self._configuration["timeouts"]["manual"]
+ if test_types is None:
+ test_types = DEFAULT_TEST_TYPES
+
+ for test_type in test_types:
+ if test_type != "automatic" and test_type != "manual":
+ raise InvalidDataException(f"Unknown type '{test_type}'")
+
+ token = str(uuid.uuid1())
+ pending_tests = self._test_loader.get_tests(
+ test_types,
+ include_list=tests["include"],
+ exclude_list=tests["exclude"],
+ reference_tokens=reference_tokens)
+
+ browser = parse_user_agent(user_agent)
+
+ test_files_count = self._tests_manager.calculate_test_files_count(
+ pending_tests
+ )
+
+ test_state = {}
+ for api in test_files_count:
+ test_state[api] = {
+ "pass": 0,
+ "fail": 0,
+ "timeout": 0,
+ "not_run": 0,
+ "total": test_files_count[api],
+ "complete": 0}
+
+ date_created = int(time.time() * 1000)
+
+ session = Session(
+ token=token,
+ tests=tests,
+ user_agent=user_agent,
+ browser=browser,
+ test_types=test_types,
+ timeouts=timeouts,
+ pending_tests=pending_tests,
+ running_tests={},
+ test_state=test_state,
+ status=PENDING,
+ reference_tokens=reference_tokens,
+ labels=labels,
+ type=type,
+ expiration_date=expiration_date,
+ date_created=date_created
+ )
+
+ self._push_to_cache(session)
+ if expiration_date is not None:
+ self._set_expiration_timer()
+
+ return session
+
+ def read_session(self, token):
+ if token is None:
+ return None
+ session = self._read_from_cache(token)
+ if session is None or session.test_state is None:
+ print("loading session from file system")
+ session = self.load_session(token)
+ if session is not None:
+ self._push_to_cache(session)
+ return session
+
+ def read_sessions(self, index=None, count=None):
+ if index is None:
+ index = 0
+ if count is None:
+ count = 10
+ self.load_all_sessions_info()
+ sessions = []
+ for it_index, token in enumerate(self._sessions):
+ if it_index < index:
+ continue
+ if len(sessions) == count:
+ break
+ sessions.append(token)
+ return sessions
+
+ def read_session_status(self, token):
+ if token is None:
+ return None
+ session = self._read_from_cache(token)
+ if session is None:
+ session = self.load_session_info(token)
+ if session is None:
+ return None
+ if session.test_state is None:
+ session = self.load_session(token)
+ if session is not None:
+ self._push_to_cache(session)
+ return session
+
+ def read_public_sessions(self):
+ self.load_all_sessions_info()
+ session_tokens = []
+ for token in self._sessions:
+ session = self._sessions[token]
+ if not session.is_public:
+ continue
+ session_tokens.append(token)
+
+ return session_tokens
+
+ def update_session(self, session):
+ self._push_to_cache(session)
+
+ def update_session_configuration(
+ self, token, tests, test_types, timeouts, reference_tokens, type
+ ):
+ session = self.read_session(token)
+ if session is None:
+ raise NotFoundException("Could not find session")
+ if session.status != PENDING:
+ return
+
+ if tests is not None:
+ if "include" not in tests:
+ tests["include"] = session.tests["include"]
+ if "exclude" not in tests:
+ tests["exclude"] = session.tests["exclude"]
+ if reference_tokens is None:
+ reference_tokens = session.reference_tokens
+ if test_types is None:
+ test_types = session.test_types
+
+ pending_tests = self._test_loader.get_tests(
+ include_list=tests["include"],
+ exclude_list=tests["exclude"],
+ reference_tokens=reference_tokens,
+ test_types=test_types
+ )
+ session.pending_tests = pending_tests
+ session.tests = tests
+ test_files_count = self._tests_manager.calculate_test_files_count(
+ pending_tests)
+ test_state = {}
+ for api in test_files_count:
+ test_state[api] = {
+ "pass": 0,
+ "fail": 0,
+ "timeout": 0,
+ "not_run": 0,
+ "total": test_files_count[api],
+ "complete": 0,
+ }
+ session.test_state = test_state
+
+ if test_types is not None:
+ session.test_types = test_types
+ if timeouts is not None:
+ if AUTOMATIC not in timeouts:
+ timeouts[AUTOMATIC] = session.timeouts[AUTOMATIC]
+ if MANUAL not in timeouts:
+ timeouts[MANUAL] = session.timeouts[MANUAL]
+ session.timeouts = timeouts
+ if reference_tokens is not None:
+ session.reference_tokens = reference_tokens
+ if type is not None:
+ session.type = type
+
+ self._push_to_cache(session)
+ return session
+
+ def update_labels(self, token, labels):
+ if token is None or labels is None:
+ return
+ session = self.read_session(token)
+ if session is None:
+ return
+ if session.is_public:
+ return
+ session.labels = labels
+ self._push_to_cache(session)
+
+ def delete_session(self, token):
+ session = self.read_session(token)
+ if session is None:
+ return
+ if session.is_public is True:
+ return
+ del self._sessions[token]
+
+ def add_session(self, session):
+ if session is None:
+ return
+ self._push_to_cache(session)
+
+ def load_all_sessions(self):
+ if not os.path.isdir(self._results_directory):
+ return
+ tokens = os.listdir(self._results_directory)
+ for token in tokens:
+ self.load_session(token)
+
+ def load_all_sessions_info(self):
+ if not os.path.isdir(self._results_directory):
+ return
+ tokens = os.listdir(self._results_directory)
+ for token in tokens:
+ if token in self._sessions:
+ continue
+ self.load_session_info(token)
+
+ def load_session(self, token):
+ session = self.load_session_info(token)
+ if session is None:
+ return None
+
+ if session.test_state is None:
+ results = self._results_manager.load_results(token)
+ test_state = self._results_manager.parse_test_state(results)
+ session.test_state = test_state
+ self._results_manager.create_info_file(session)
+
+ self._push_to_cache(session)
+ return session
+
+ def load_session_info(self, token):
+ result_directory = os.path.join(self._results_directory, token)
+ if not os.path.isdir(result_directory):
+ return None
+ info_file = os.path.join(result_directory, "info.json")
+ if not os.path.isfile(info_file):
+ return None
+
+ info_data = None
+ with open(info_file) as file:
+ info_data = file.read()
+ parsed_info_data = json.loads(info_data)
+
+ session = deserialize_session(parsed_info_data)
+ self._push_to_cache(session)
+ return session
+
+ def _push_to_cache(self, session):
+ self._sessions[session.token] = session
+
+ def _read_from_cache(self, token):
+ if token not in self._sessions:
+ return None
+ return self._sessions[token]
+
+ def _set_expiration_timer(self):
+ expiring_sessions = self._read_expiring_sessions()
+ if len(expiring_sessions) == 0:
+ return
+
+ next_session = expiring_sessions[0]
+ for session in expiring_sessions:
+ if next_session.expiration_date > session.expiration_date:
+ next_session = session
+
+ if self._expiration_timeout is not None:
+ self._expiration_timeout.cancel()
+
+ timeout = next_session.expiration_date / 1000 - time.time()
+ if timeout < 0:
+ timeout = 0
+
+ def handle_timeout(self):
+ self._delete_expired_sessions()
+ self._set_expiration_timer()
+
+ self._expiration_timeout = Timer(timeout, handle_timeout, [self])
+ self._expiration_timeout.start()
+
+ def _delete_expired_sessions(self):
+ expiring_sessions = self._read_expiring_sessions()
+ now = int(time.time() * 1000)
+
+ for session in expiring_sessions:
+ if session.expiration_date < now:
+ self.delete_session(session.token)
+
+ def _read_expiring_sessions(self):
+ expiring_sessions = []
+ for token in self._sessions:
+ session = self._sessions[token]
+ if session.expiration_date is None:
+ continue
+ expiring_sessions.append(session)
+ return expiring_sessions
+
+ def start_session(self, token):
+ session = self.read_session(token)
+
+ if session is None:
+ return
+
+ if session.status != PENDING and session.status != PAUSED:
+ return
+
+ if session.status == PENDING:
+ session.date_started = int(time.time() * 1000)
+ session.expiration_date = None
+
+ session.status = RUNNING
+ self.update_session(session)
+
+ self._event_dispatcher.dispatch_event(
+ token,
+ event_type=STATUS_EVENT,
+ data=session.status
+ )
+
+ def pause_session(self, token):
+ session = self.read_session(token)
+ if session.status != RUNNING:
+ return
+ session.status = PAUSED
+ self.update_session(session)
+ self._event_dispatcher.dispatch_event(
+ token,
+ event_type=STATUS_EVENT,
+ data=session.status
+ )
+ self._results_manager.persist_session(session)
+
+ def stop_session(self, token):
+ session = self.read_session(token)
+ if session.status == ABORTED or session.status == COMPLETED:
+ return
+ session.status = ABORTED
+ session.date_finished = int(time.time() * 1000)
+ self.update_session(session)
+ self._event_dispatcher.dispatch_event(
+ token,
+ event_type=STATUS_EVENT,
+ data=session.status
+ )
+
+ def resume_session(self, token, resume_token):
+ session = self.read_session(token)
+ if session.status != PENDING:
+ return
+ self._event_dispatcher.dispatch_event(
+ token,
+ event_type=RESUME_EVENT,
+ data=resume_token
+ )
+ self.delete_session(token)
+
+ def complete_session(self, token):
+ session = self.read_session(token)
+ if session.status == COMPLETED or session.status == ABORTED:
+ return
+ session.status = COMPLETED
+ session.date_finished = int(time.time() * 1000)
+ self.update_session(session)
+ self._event_dispatcher.dispatch_event(
+ token,
+ event_type=STATUS_EVENT,
+ data=session.status
+ )
+
+ def test_in_session(self, test, session):
+ return self._test_list_contains_test(test, session.pending_tests) \
+ or self._test_list_contains_test(test, session.running_tests)
+
+ def is_test_complete(self, test, session):
+ return not self._test_list_contains_test(test, session.pending_tests) \
+ and not self._test_list_contains_test(test, session.running_tests)
+
+ def is_test_running(self, test, session):
+ return self._test_list_contains_test(test, session.running_tests)
+
+ def _test_list_contains_test(self, test, test_list):
+ for api in list(test_list.keys()):
+ if test in test_list[api]:
+ return True
+ return False
+
+ def is_api_complete(self, api, session):
+ return api not in session.pending_tests \
+ and api not in session.running_tests
+
+ def get_test_path_with_query(self, test, session):
+ query_string = ""
+ include_list = session.tests["include"]
+ for include_test in include_list:
+ split = include_test.split("?")
+ query = ""
+ if len(split) > 1:
+ include_test = split[0]
+ query = split[1]
+ pattern = re.compile("^" + include_test)
+ if pattern.match(test) is not None:
+ query_string += query + "&"
+ return f"{test}?{query_string}"
+
+ def find_token(self, fragment):
+ if len(fragment) < 8:
+ return None
+ tokens = []
+ for token in self._sessions:
+ if token.startswith(fragment):
+ tokens.append(token)
+ if len(tokens) != 1:
+ return None
+ return tokens[0]
+
+ def get_total_sessions(self):
+ return len(self._sessions)
diff --git a/testing/web-platform/tests/tools/wave/testing/test_loader.py b/testing/web-platform/tests/tools/wave/testing/test_loader.py
new file mode 100644
index 0000000000..8d751260d9
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/testing/test_loader.py
@@ -0,0 +1,200 @@
+# mypy: allow-untyped-defs
+
+import os
+import re
+
+AUTOMATIC = "automatic"
+MANUAL = "manual"
+
+TEST_TYPES = [AUTOMATIC, MANUAL]
+
+
+class TestLoader:
+ def initialize(
+ self,
+ exclude_list_file_path,
+ include_list_file_path,
+ results_manager,
+ api_titles
+ ):
+ self._exclude_list_file_path = exclude_list_file_path
+ self._include_list_file_path = include_list_file_path
+ self._results_manager = results_manager
+ self._tests = {}
+ self._tests[AUTOMATIC] = {}
+ self._tests[MANUAL] = {}
+ self._api_titles = api_titles
+
+ def load_tests(self, tests):
+ include_list = self._load_test_list(self._include_list_file_path)
+ exclude_list = self._load_test_list(self._exclude_list_file_path)
+
+ if "testharness" in tests:
+ self._tests[AUTOMATIC] = self._load_tests(
+ tests=tests["testharness"],
+ exclude_list=exclude_list
+ )
+
+ if "manual" in tests:
+ self._tests[MANUAL] = self._load_tests(
+ tests=tests["manual"],
+ include_list=include_list
+ )
+
+ for api in self._tests[AUTOMATIC]:
+ for test_path in self._tests[AUTOMATIC][api][:]:
+ if "manual" not in test_path:
+ continue
+ self._tests[AUTOMATIC][api].remove(test_path)
+
+ if not self._is_valid_test(test_path,
+ include_list=include_list):
+ continue
+
+ if api not in self._tests[MANUAL]:
+ self._tests[MANUAL][api] = []
+ self._tests[MANUAL][api].append(test_path)
+
+ def _load_tests(self, tests, exclude_list=None, include_list=None):
+ loaded_tests = {}
+
+ def get_next_part(tests):
+ paths = []
+ for test in tests:
+ if isinstance(tests[test], dict):
+ subs = get_next_part(tests[test])
+ for sub in subs:
+ if sub is None:
+ continue
+ paths.append(test + "/" + sub)
+ continue
+ if test.endswith(".html"):
+ paths.append(test)
+ continue
+ if test.endswith(".js"):
+ for element in tests[test][1:]:
+ paths.append(element[0])
+ continue
+ return paths
+
+ test_paths = get_next_part(tests)
+ for test_path in test_paths:
+ if not test_path.startswith("/"):
+ test_path = "/" + test_path
+ if self._is_valid_test(test_path, exclude_list, include_list):
+ api_name = self._parse_api_name(test_path)
+ if api_name not in loaded_tests:
+ loaded_tests[api_name] = []
+ loaded_tests[api_name].append(test_path)
+ return loaded_tests
+
+ def _parse_api_name(self, test_path):
+ for part in test_path.split("/"):
+ if part == "":
+ continue
+ return part
+
+ def _is_valid_test(self, test_path, exclude_list=None, include_list=None):
+ is_valid = True
+
+ if include_list is not None and len(include_list) > 0:
+ is_valid = False
+ for include_test in include_list:
+ include_test = include_test.split("?")[0]
+ pattern = re.compile("^" + include_test)
+ if pattern.match(test_path) is not None:
+ is_valid = True
+ break
+
+ if not is_valid:
+ return is_valid
+
+ if exclude_list is not None and len(exclude_list) > 0:
+ is_valid = True
+ for exclude_test in exclude_list:
+ exclude_test = exclude_test.split("?")[0]
+ pattern = re.compile("^" + exclude_test)
+ if pattern.match(test_path) is not None:
+ is_valid = False
+ break
+
+ return is_valid
+
+ def _load_test_list(self, file_path):
+ tests = []
+ if not os.path.isfile(file_path):
+ return tests
+
+ file_content = None
+ with open(file_path) as file_handle:
+ file_content = file_handle.read()
+
+ for line in file_content.split():
+ line = line.replace(" ", "")
+ line = re.sub(r"^#", "", line)
+ if line == "":
+ continue
+ tests.append(line)
+
+ return tests
+
+ def get_tests(
+ self,
+ test_types=None,
+ include_list=None,
+ exclude_list=None,
+ reference_tokens=None
+ ):
+ if test_types is None:
+ test_types = [AUTOMATIC, MANUAL]
+ if include_list is None:
+ include_list = []
+ if exclude_list is None:
+ exclude_list = []
+ if reference_tokens is None:
+ reference_tokens = []
+
+ loaded_tests = {}
+
+ reference_results = self._results_manager.read_common_passed_tests(
+ reference_tokens)
+
+ for test_type in test_types:
+ if test_type not in TEST_TYPES:
+ continue
+ for api in self._tests[test_type]:
+ for test_path in self._tests[test_type][api]:
+ if not self._is_valid_test(test_path, exclude_list,
+ include_list):
+ continue
+ if reference_results is not None and \
+ (api not in reference_results or
+ (api in reference_results and test_path not in reference_results[api])):
+ continue
+ if api not in loaded_tests:
+ loaded_tests[api] = []
+ loaded_tests[api].append(test_path)
+ return loaded_tests
+
+ def get_apis(self):
+ apis = []
+ for test_type in TEST_TYPES:
+ for api in self._tests[test_type]:
+ in_list = False
+ for item in apis:
+ if item["path"] == "/" + api:
+ in_list = True
+ break
+ if in_list:
+ continue
+ title = None
+ for item in self._api_titles:
+ if item["path"] == "/" + api:
+ title = item["title"]
+ break
+
+ if title is None:
+ apis.append({"title": api, "path": "/" + api})
+ else:
+ apis.append({"title": title, "path": "/" + api})
+ return apis
diff --git a/testing/web-platform/tests/tools/wave/testing/tests_manager.py b/testing/web-platform/tests/tools/wave/testing/tests_manager.py
new file mode 100644
index 0000000000..8187eb4a7d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/testing/tests_manager.py
@@ -0,0 +1,374 @@
+# mypy: allow-untyped-defs
+
+import re
+from threading import Timer
+
+from .event_dispatcher import TEST_COMPLETED_EVENT
+
+from ..data.exceptions.not_found_exception import NotFoundException
+from ..data.session import COMPLETED, ABORTED
+
+
+class TestsManager:
+ def initialize(
+ self,
+ test_loader,
+ sessions_manager,
+ results_manager,
+ event_dispatcher
+ ):
+ self._test_loader = test_loader
+ self._sessions_manager = sessions_manager
+ self._results_manager = results_manager
+ self._event_dispatcher = event_dispatcher
+
+ self._timeouts = []
+
+ def next_test(self, session):
+ if session.status == COMPLETED or session.status == ABORTED:
+ return None
+
+ pending_tests = session.pending_tests
+ running_tests = session.running_tests
+ token = session.token
+
+ if pending_tests is None:
+ pending_tests = self.load_tests(session)
+ session.pending_tests = pending_tests
+ self._sessions_manager.update_session(session)
+
+ if running_tests is None:
+ running_tests = {}
+
+ test = self._get_next_test_from_list(pending_tests)
+ if test is None:
+ return None
+
+ pending_tests = self.remove_test_from_list(pending_tests, test)
+ running_tests = self.add_test_to_list(running_tests, test)
+
+ test_timeout = self.get_test_timeout(test, session) / 1000.0
+
+ def handler(self, token, test):
+ self._on_test_timeout(token, test)
+
+ timer = Timer(test_timeout, handler, [self, token, test])
+ self._timeouts.append({
+ "test": test,
+ "timeout": timer
+ })
+
+ session.pending_tests = pending_tests
+ session.running_tests = running_tests
+ self._sessions_manager.update_session(session)
+
+ timer.start()
+ return test
+
+ def read_last_completed_tests(self, token, count):
+ results = self._results_manager.read_results(token)
+
+ results_tests = {}
+ for api in list(results.keys()):
+ results_tests[api] = []
+ for result in results[api]:
+ results_tests[api].append(result["test"])
+
+ sorted_results_tests = self._sort_tests_by_execution(results_tests)
+ sorted_results_tests.reverse()
+
+ tests = {"pass": [], "fail": [], "timeout": []}
+
+ for test in sorted_results_tests:
+ api = None
+ for part in test.split("/"):
+ if part != "":
+ api = part
+ break
+
+ result = None
+ for potential_result in results[api]:
+ if potential_result["test"] == test:
+ result = potential_result
+ break
+ if result is None:
+ break
+
+ if result["status"] == "ERROR":
+ if len(tests["fail"]) < count:
+ tests["fail"].append(result["test"])
+ elif result["status"] == "TIMEOUT":
+ if len(tests["timeout"]) < count:
+ tests["timeout"].append(result["test"])
+ passes = True
+ for test in result["subtests"]:
+ if test["status"] != "PASS":
+ passes = False
+ break
+
+ if passes and len(tests["pass"]) < count:
+ tests["pass"].append(result["test"])
+ if not passes and len(tests["fail"]) < count:
+ tests["fail"].append(result["test"])
+ if len(tests["pass"]) == count and len(tests["fail"]) == count \
+ and len(tests["timeout"]) == count:
+ return tests
+ return tests
+
+ def _sort_tests_by_execution(self, tests):
+ sorted_tests = []
+
+ for api in list(tests.keys()):
+ for test in tests[api]:
+ sorted_tests.append(test)
+
+ class compare:
+ def __init__(self, tests_manager, test):
+ self.test = test
+ self.tests_manager = tests_manager
+
+ def __lt__(self, test_b):
+ test_a = self.test
+ test_b = test_b.test
+ micro_test_list = {}
+ api_a = ""
+ for part in test_a.split("/"):
+ if part != "":
+ api_a = part
+ break
+ api_b = ""
+ for part in test_b.split("/"):
+ if part != "":
+ api_b = part
+ break
+ if api_a == api_b:
+ micro_test_list[api_a] = [test_a, test_b]
+ else:
+ micro_test_list[api_a] = [test_a]
+ micro_test_list[api_b] = [test_b]
+ next_test = self.tests_manager._get_next_test_from_list(micro_test_list)
+ return next_test == test_b
+
+ sorted_tests.sort(key=lambda test: compare(self, test))
+ return sorted_tests
+
+ def _get_next_test_from_list(self, tests):
+ test = None
+ api = None
+ has_http = True
+ has_manual = True
+ current_api = 0
+ current_test = 0
+
+ apis = list(tests.keys())
+ apis.sort(key=lambda api: api.lower())
+
+ for api in apis:
+ tests[api].sort(key=lambda test: test.replace("/", "").lower())
+
+ while test is None:
+ if len(apis) <= current_api:
+ return None
+ api = apis[current_api]
+
+ if len(tests[api]) <= current_test:
+ current_api = current_api + 1
+ current_test = 0
+
+ if current_api == len(apis):
+ if has_http:
+ has_http = False
+ current_api = 0
+ test = None
+ continue
+
+ if has_manual:
+ has_manual = False
+ current_api = 0
+ test = None
+ has_http = True
+ continue
+
+ return None
+
+ test = None
+ continue
+ test = tests[api][current_test]
+
+ if "manual" in test and "https" not in test:
+ return test
+
+ if "manual" in test and "https" in test:
+ if not has_http:
+ return test
+
+ if "manual" not in test and "https" not in test:
+ if not has_manual:
+ return test
+
+ if "manual" not in test and "https" in test:
+ if not has_manual and not has_http:
+ return test
+
+ current_test = current_test + 1
+ test = None
+
+ return test
+
+ def skip_to(self, test_list, test):
+ sorted_tests = self._sort_tests_by_execution(test_list)
+ if test not in sorted_tests:
+ return test_list
+ index = sorted_tests.index(test)
+ remaining_tests = sorted_tests[index + 1:]
+ remaining_tests_by_api = {}
+ current_api = "___"
+ for test in remaining_tests:
+ if not test.startswith("/" + current_api) and \
+ not test.startswith(current_api):
+ current_api = next((p for p in test.split("/") if p != ""),
+ None)
+ if current_api not in remaining_tests_by_api:
+ remaining_tests_by_api[current_api] = []
+ remaining_tests_by_api[current_api].append(test)
+ return remaining_tests_by_api
+
+ def remove_test_from_list(self, test_list, test):
+ api = None
+ for part in test.split("/"):
+ if part is None or part == "":
+ continue
+ api = part
+ break
+ if api not in test_list:
+ return test_list
+ if test not in test_list[api]:
+ return test_list
+ test_list[api].remove(test)
+ if len(test_list[api]) == 0:
+ del test_list[api]
+
+ return test_list
+
+ def add_test_to_list(self, test_list, test):
+ api = None
+ for part in test.split("/"):
+ if part is None or part == "":
+ continue
+ api = part
+ break
+ if api in test_list and test in test_list[api]:
+ return test_list
+ if api not in test_list:
+ test_list[api] = []
+ test_list[api].append(test)
+ return test_list
+
+ def get_test_timeout(self, test, session):
+ timeouts = session.timeouts
+ test_timeout = None
+
+ for path in list(timeouts.keys()):
+ pattern = re.compile("^" + path.replace(".", ""))
+ if pattern.match(test.replace(".", "")) is not None:
+ test_timeout = timeouts[path]
+ break
+
+ if test_timeout is None:
+ if "manual" in test:
+ test_timeout = timeouts["manual"]
+ else:
+ test_timeout = timeouts["automatic"]
+
+ return test_timeout
+
+ def _on_test_timeout(self, token, test):
+ data = {
+ "test": test,
+ "status": "TIMEOUT",
+ "message": None,
+ "subtests": [
+ {
+ "status": "TIMEOUT",
+ "xstatus": "SERVERTIMEOUT"
+ }
+ ]
+ }
+
+ self._results_manager.create_result(token, data)
+
+ def read_tests(self):
+ return self._test_loader.get_tests()
+
+ def complete_test(self, test, session):
+ running_tests = session.running_tests
+
+ running_tests = self.remove_test_from_list(running_tests, test)
+ session.running_tests = running_tests
+
+ timeout = next((t for t in self._timeouts if t["test"] == test), None)
+ timeout["timeout"].cancel()
+ self._timeouts.remove(timeout)
+
+ self.update_tests(
+ running_tests=running_tests,
+ session=session
+ )
+
+ self._event_dispatcher.dispatch_event(
+ dispatcher_token=session.token,
+ event_type=TEST_COMPLETED_EVENT,
+ data=test
+ )
+
+ def update_tests(
+ self,
+ pending_tests=None,
+ running_tests=None,
+ session=None
+ ):
+ if pending_tests is not None:
+ session.pending_tests = pending_tests
+
+ if running_tests is not None:
+ session.running_tests = running_tests
+
+ self._sessions_manager.update_session(session)
+
+ def calculate_test_files_count(self, tests):
+ count = {}
+ for api in tests:
+ count[api] = len(tests[api])
+ return count
+
+ def read_malfunctioning_tests(self, token):
+ session = self._sessions_manager.read_session(token)
+ return session.malfunctioning_tests
+
+ def update_malfunctioning_tests(self, token, tests):
+ if token is None:
+ return
+ if tests is None:
+ return
+
+ session = self._sessions_manager.read_session(token)
+ if session is None:
+ raise NotFoundException("Could not find session using token: " + token)
+ if session.is_public:
+ return
+ session.malfunctioning_tests = tests
+ self._sessions_manager.update_session(session)
+
+ def load_tests(self, session):
+ pending_tests = self._test_loader.get_tests(
+ session.test_types,
+ include_list=session.tests["include"],
+ exclude_list=session.tests["exclude"],
+ reference_tokens=session.reference_tokens
+ )
+
+ last_completed_test = session.last_completed_test
+ if last_completed_test is not None:
+ pending_tests = self.skip_to(pending_tests, last_completed_test)
+
+ return pending_tests
diff --git a/testing/web-platform/tests/tools/wave/testing/wpt_report.py b/testing/web-platform/tests/tools/wave/testing/wpt_report.py
new file mode 100644
index 0000000000..b84119ed85
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/testing/wpt_report.py
@@ -0,0 +1,57 @@
+# mypy: allow-untyped-defs
+
+import subprocess
+import os
+import ntpath
+import sys
+from shutil import copyfile
+
+
+def generate_report(
+ input_json_directory_path=None,
+ output_html_directory_path=None,
+ spec_name=None,
+ is_multi=None,
+ reference_dir=None):
+ if is_multi is None:
+ is_multi = False
+ try:
+ command = [
+ "wptreport",
+ "--input", input_json_directory_path,
+ "--output", output_html_directory_path,
+ "--spec", spec_name,
+ "--sort", "true",
+ "--failures", "true",
+ "--tokenFileName", "true" if is_multi else "false",
+ "--pass", "100",
+ "--ref", reference_dir if reference_dir is not None else ""]
+ whole_command = ""
+ for command_part in command:
+ whole_command += command_part + " "
+ subprocess.call(command, shell=False)
+ except subprocess.CalledProcessError as e:
+ info = sys.exc_info()
+ raise Exception("Failed to execute wptreport: " + str(info[0].__name__) + ": " + e.output)
+
+
+def generate_multi_report(
+ output_html_directory_path=None,
+ spec_name=None,
+ result_json_files=None,
+ reference_dir=None):
+ for file in result_json_files:
+ if not os.path.isfile(file["path"]):
+ continue
+ file_name = ntpath.basename(file["path"])
+ copyfile(file["path"], os.path.join(
+ output_html_directory_path,
+ file["token"] + "-" + file_name
+ ))
+
+ generate_report(
+ input_json_directory_path=output_html_directory_path,
+ output_html_directory_path=output_html_directory_path,
+ spec_name=spec_name,
+ is_multi=True,
+ reference_dir=reference_dir)
diff --git a/testing/web-platform/tests/tools/wave/tests/WAVE Local.postman_environment.json b/testing/web-platform/tests/tools/wave/tests/WAVE Local.postman_environment.json
new file mode 100644
index 0000000000..b1a6a089ab
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/tests/WAVE Local.postman_environment.json
@@ -0,0 +1,34 @@
+{
+ "id": "37be8ec4-7855-4554-867e-7a5d2a4f99e6",
+ "name": "WAVE Local",
+ "values": [
+ {
+ "key": "host",
+ "value": "web-platform.test",
+ "enabled": true
+ },
+ {
+ "key": "port",
+ "value": "8000",
+ "enabled": true
+ },
+ {
+ "key": "protocol",
+ "value": "http",
+ "enabled": true
+ },
+ {
+ "key": "web_root",
+ "value": "_wave",
+ "enabled": true
+ },
+ {
+ "key": "device_timeout",
+ "value": "60000",
+ "enabled": true
+ }
+ ],
+ "_postman_variable_scope": "environment",
+ "_postman_exported_at": "2020-05-25T12:12:37.098Z",
+ "_postman_exported_using": "Postman/7.25.0"
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/tests/WAVE Server REST API Tests.postman_collection.json b/testing/web-platform/tests/tools/wave/tests/WAVE Server REST API Tests.postman_collection.json
new file mode 100644
index 0000000000..c5a07a3352
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/tests/WAVE Server REST API Tests.postman_collection.json
@@ -0,0 +1,8549 @@
+{
+ "info": {
+ "_postman_id": "69fb733b-51ec-49b2-aa04-e2330c1d26b6",
+ "name": "WAVE Server REST API Tests copy",
+ "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
+ },
+ "item": [
+ {
+ "name": "Read Available Tests",
+ "item": [
+ {
+ "name": "Read Available Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "7c432e7c-d0f9-4583-977a-55ca5d1e42a2",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var availableTests = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(typeof availableTests).to.equal(\"object\");",
+ " for (var api of Object.keys(availableTests)) {",
+ " pm.expect(availableTests[api]).to.be.an.instanceof(Array);",
+ " var apiRegExp = new RegExp(\"^/\" + api, \"i\");",
+ " for (var test of availableTests[api]) {",
+ " pm.expect(test).to.match(apiRegExp);",
+ " }",
+ " }",
+ "});",
+ "",
+ "var includedTests = [];",
+ "var excludedTests = [];",
+ "var specialTimeoutTest = \"\";",
+ "",
+ "var apis = Object.keys(availableTests);",
+ "for(var api of apis) {",
+ " if (availableTests[api].length > 50) {",
+ " var subDirs = availableTests[api].map(test => test.split(\"/\").filter(part => !!part).join(\"/\").split(\"/\")[1]).reduce((acc, curr) => acc.indexOf(curr) === -1 ? acc.concat([curr]) : acc, []);",
+ " if (subDirs.length > 2) {",
+ " includedTests.push(\"/\" + api);",
+ " excludedTests.push(\"/\" + api + \"/\" + subDirs[0]);",
+ " specialTimeoutTest = availableTests[api][availableTests[api].length - 1];",
+ " break;",
+ " }",
+ " ",
+ " }",
+ "}",
+ "",
+ "pm.globals.set(\"available_tests\", availableTests);",
+ "pm.globals.set(\"included_tests\", JSON.stringify(includedTests));",
+ "pm.globals.set(\"excluded_tests\", JSON.stringify(excludedTests));",
+ "pm.globals.set(\"special_timeout_test\", specialTimeoutTest.replace(\".\", \"\"));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Create and Read Sessions",
+ "item": [
+ {
+ "name": "Start expiring session remove expiration date",
+ "item": [
+ {
+ "name": "Create Session With Expiration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "71470bf7-293b-4863-a74e-193d86f2e6ac",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "7d59b7a7-78f8-46fc-a3f1-bd06f7ec31b7",
+ "exec": [
+ "var expirationDate = Date.now() + 10000;",
+ "pm.globals.set(\"expiration_date\", expirationDate);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n\t\"expiration_date\": {{expiration_date}}\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "daaa5a98-f330-4ca5-afc5-c4051e7a94c1",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure contains expiration date\", function () {",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ "});",
+ "",
+ "var expirationDate = pm.globals.get(\"expiration_date\");",
+ "",
+ "pm.test(\"Expiration date is as specified\", function () {",
+ " pm.expect(Date.parse(jsonData.expiration_date)).to.equal(expirationDate);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "d6e7a096-5bed-44b0-8772-ab80c1f53447",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "66075d71-55ed-4835-b6b5-ada854653d04",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure contains expiration date\", function () {",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ "});",
+ "",
+ "pm.test(\"Expiration date is null\", function () {",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Read Public Sessions",
+ "item": [
+ {
+ "name": "Read Public Sessions",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a365eb0b-ba55-490a-8cd9-7286a1d39cd2",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Response is JSON Array\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.be.an.instanceof(Array);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/public",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "public"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Find Session",
+ "item": [
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "52719d31-ef36-4b74-8c5c-05d554e916c0",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Find Session Token",
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "753a7451-2a92-4de0-a2e2-d0d9c66de134",
+ "exec": [
+ "const token = pm.globals.get(\"session_token\");",
+ "pm.globals.set(\"session_token_fragment\", token.split(\"-\").shift());"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "2a465de6-f66e-4161-ad7e-7c1c3738befd",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const sessionToken = pm.globals.get(\"session_token\");",
+ "",
+ "pm.test(\"Found token is original token\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.token).to.equal(sessionToken);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token_fragment}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token_fragment}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Find Session Too Short Fragment",
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "0e7ff96f-8c5a-46f3-a1a0-258ad59b96e0",
+ "exec": [
+ "const token = pm.globals.get(\"session_token\");",
+ "pm.globals.set(\"session_token_fragment\", token.split(\"-\").shift());"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "eb36e017-4cdc-458a-b399-8998df800fa1",
+ "exec": [
+ "pm.test(\"Status code is 404\", function () {",
+ " pm.response.to.have.status(404);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/1234567",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "1234567"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Read Next Test",
+ "item": [
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Invalid Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9c8af59c-8d9b-4264-acb5-a0bdf3af434a",
+ "exec": [
+ "pm.test(\"Status code is 404\", function () {",
+ " pm.response.to.have.status(404);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session \\w Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "c16bf66e-283e-40b9-b98d-8b058ec1575a",
+ "exec": [
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "66780bc8-88e6-44d2-a6c8-5c649141accd",
+ "exec": [
+ "var automaticTimeout = 120000;",
+ "var manualTimeout = 1000000;",
+ "var specialTimeout = 2000;",
+ "",
+ "pm.globals.set(\"automatic_timeout\", automaticTimeout);",
+ "pm.globals.set(\"manual_timeout\", manualTimeout);",
+ "pm.globals.set(\"special_timeout\", specialTimeout);",
+ "",
+ "const availableTests = pm.globals.get(\"available_tests\");",
+ "const test1 = availableTests[Object.keys(availableTests)[0]][0];",
+ "",
+ "pm.globals.set(\"single_test_1\", test1);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"{{single_test_1}}\"]\n },\n \"types\": [\n \"automatic\"\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Pending Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "82ca12ad-3be5-419a-9a45-14164b0eaf60",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const nextTest = jsonData.next_test;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "const web_root = pm.environment.get(\"web_root\");",
+ "",
+ "pm.test(\"Returned test is new session page\", function () {",
+ " pm.expect(test).to.equal(\"/\" + web_root + \"/newsession.html\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "135d7d9e-1869-4c02-9c6a-7c0ea522e140",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Running Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "1bfe8c41-2144-46de-824d-2d1e3c88205b",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const nextTest = jsonData.next_test;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "const api = test.split(\"/\").filter(part => !!part)[0]",
+ "const availableTests = pm.globals.get(\"available_tests\")",
+ "",
+ "pm.test(\"Returned test is valid test\", function () {",
+ " pm.expect(availableTests).to.have.property(api);",
+ " pm.expect(availableTests[api]).to.contain(test)",
+ "});",
+ "",
+ "",
+ "setTimeout(function () {}, 1000);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "02cfcc68-43ed-4533-a80c-699954900390",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"FAIL\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Completed Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "c8f3d89b-34ca-4d39-977e-638d3630ee5b",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const nextTest = jsonData.next_test;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "const web_root = pm.environment.get(\"web_root\");",
+ "",
+ "pm.test(\"Returned test is new session page\", function () {",
+ " pm.expect(test).to.equal(\"/\" + web_root + \"/finish.html\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session \\w Configuration Copy",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "bcde140e-51cf-4d7c-b745-5e56f9b59034",
+ "exec": [
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "c4354118-88ff-4b5a-9252-dac19264c262",
+ "exec": [
+ "var automaticTimeout = 120000;",
+ "var manualTimeout = 1000000;",
+ "var specialTimeout = 2000;",
+ "",
+ "pm.globals.set(\"automatic_timeout\", automaticTimeout);",
+ "pm.globals.set(\"manual_timeout\", manualTimeout);",
+ "pm.globals.set(\"special_timeout\", specialTimeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": {{included_tests}},\n \"exclude\": {{excluded_tests}}\n },\n \"types\": [\n \"automatic\"\n ],\n \"timeouts\": {\n \"automatic\": 1000\n },\n \"labels\": [\"label1\", \"label2\"]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "737fc854-7901-4819-96eb-4883f5b15bc9",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a28a515f-0e48-4c23-a418-3b30c1e6dc71",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Aborted Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "1b842c47-c67c-4bfe-a77d-7d5b553e001b",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const nextTest = jsonData.next_test;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "const web_root = pm.environment.get(\"web_root\");",
+ "",
+ "pm.test(\"Returned test is new session page\", function () {",
+ " pm.expect(test).to.equal(\"/\" + web_root + \"/pause.html\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e2318e93-7446-43be-ad52-99201a85fcbf",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Aborted Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "c3bdd37f-c048-4a2a-979b-deca61758292",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const nextTest = jsonData.next_test;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "",
+ "const web_root = pm.environment.get(\"web_root\");",
+ "",
+ "pm.test(\"Returned test is new session page\", function () {",
+ " pm.expect(test).to.equal(\"/\" + web_root + \"/finish.html\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Control Session",
+ "item": [
+ {
+ "name": "Setup",
+ "item": [
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "506a2f52-ab98-4e61-943c-ca0f7cab7f28",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Pause Pending Session",
+ "item": [
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "85ee0aec-9fbd-4544-a938-fb821e576fac",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f42cdf2c-5304-420d-b79d-f3345ac4455d",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is pending\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"pending\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Start Pending Session",
+ "item": [
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "01d6bfab-8ec5-4317-8595-667778be42d8",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "dec9ab62-5014-4b6e-a747-be83dce71a3d",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "pm.test(\"Status is running\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"running\");",
+ "});",
+ "",
+ "pm.test(\"Start date is set\", function () {",
+ " pm.expect(Date.parse(jsonData.date_started)).to.be.below(Date.now());",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Start Running Session",
+ "item": [
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "1db3770b-6fa9-40b9-875c-56d113d0ff5b",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "84ccae85-36d6-4226-850c-269cbfddff8e",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is running\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"running\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Pause Running Session",
+ "item": [
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "29e3a3ed-7b75-43a2-99b3-7300bd39044a",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "614689fb-9d26-4ac1-b89c-eca305a957f2",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is paused\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"paused\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Pause Paused Session",
+ "item": [
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "221d9b75-af5c-4aa9-b13b-6d3af10578aa",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "34147223-f4f2-4c11-a60b-09396d46c152",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is paused\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"paused\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Start Paused Session",
+ "item": [
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "41b686b9-80e9-4c16-906d-ca948c0873a6",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "7a053b4c-87c6-426c-95d6-750c5d478aba",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is running\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"running\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Stop Running Session",
+ "item": [
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "52600705-47f5-4ec7-bf3c-ff2243022625",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "39c0a314-3efe-4977-8c20-33666ea8e0e6",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "pm.test(\"Status is aborted\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});",
+ "",
+ "pm.test(\"Finish date is set\", function () {",
+ " pm.expect(Date.parse(jsonData.date_finished)).to.be.below(Date.now());",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Stop Aborted Session",
+ "item": [
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "0d53b83e-5b8b-4d69-980f-ddc2736d2a5f",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "4217abb8-3e6e-4125-8aaf-92b378136f87",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is aborted\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "f66a7794-ade7-41e7-abc6-560af7acfd53",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "04ca0247-e30e-470a-8282-d7a5eafc5b92",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Start Aborted Session",
+ "item": [
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "0a17b42b-dc5e-4588-88c5-b8bc29065dd0",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "5b908a57-2fe5-4309-86ab-933c196badfd",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is aborted\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "fe61b75f-f106-43f6-bcaf-ad4400ae1ef4",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "f94a7f25-72eb-4bcf-9ddb-573ca2cfbb1f",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Pause Aborted Session",
+ "item": [
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "c3915b66-bcd4-4a23-8b74-2fd9c16210d8",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "c94d14bf-e48f-4a85-8939-e45bc66be6d0",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is aborted\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "eb6a4051-3cd7-4d75-93ec-8f0c4463cc98",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "64295661-300d-4773-8856-6c3ecbe158b0",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Setup",
+ "item": [
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "4d33ee50-4368-42f3-b8a4-ab0ff27f00f6",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "6a217097-fdf7-4c99-b005-723acf9b8389",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "37c80a2a-9508-4e8d-93d5-dfbfac837bed",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Stop Pending Session",
+ "item": [
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "6d0635aa-2b63-4e0f-89d2-da4d47741160",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "90895f8b-5398-4eeb-9f35-5869736dc46c",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is aborted\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Setup",
+ "item": [
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e4929021-a5b2-4839-8537-bfb7ea1935c2",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "2e2703a2-d74f-46ba-b168-9f3b64888086",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "df98f07d-4bb3-467d-92aa-e9122db42e31",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "c3e166ef-de4b-4c02-89c3-547aaa7a6555",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "a86891f3-8f1d-48c6-9e08-0a792bb1627a",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Stop Paused Session Copy",
+ "item": [
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "3ea151c0-8286-419f-bed6-0c06da912850",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f10cf08b-9a1a-4f3e-8b3e-8be52f82734e",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is aborted\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"aborted\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "a1b3bf2e-4d09-4ee1-9063-6c4c842fcd94",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "a6745e90-5382-445d-9d65-db74aff7af1b",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Setup",
+ "item": [
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session One Test",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "da2ec442-44fd-432e-b335-abe4dd73c182",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "47665454-d468-4964-8167-bd80051a3937",
+ "exec": [
+ "const availableTests = pm.globals.get(\"available_tests\");",
+ "const test = availableTests[Object.keys(availableTests)[0]][0]",
+ "",
+ "pm.globals.set(\"single_test\", test);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"{{single_test}}\"]\n }\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8a575b02-40ff-404c-a344-d768c218caba",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e8ab1a02-2be0-43ee-be8b-04967b5aaff3",
+ "exec": [
+ "const response = pm.response.json();",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "pm.globals.set(\"current_test\", test);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"FAIL\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "be410b75-d674-4ef3-8dda-5731f4dbcbd5",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "706efcea-90f2-4206-8246-b82240e719fb",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Start Completed Session",
+ "item": [
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a68edf67-0a80-450d-8f4e-921626491163",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "d3495b7e-e4b7-47a0-9589-f2d145915846",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is completed\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"completed\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Pause Completed Session",
+ "item": [
+ {
+ "name": "Pause Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f1ca8fab-80d3-4d92-887e-018cb6536a9f",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/pause",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "pause"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "c220fdc3-b483-40fa-ae88-bf7f57415d90",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is completed\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"completed\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Stop Completed Session",
+ "item": [
+ {
+ "name": "Stop Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a9130ab9-562e-48d9-a7c1-f8cb3bb8d85e",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/stop",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "stop"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "013fa69e-f98b-4124-ac3f-cbc1bb4c5078",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Status is completed\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData.status).to.equal(\"completed\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Clean Up",
+ "item": [
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Update and Read Sessions",
+ "item": [
+ {
+ "name": "Create Default",
+ "item": [
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "6ed4bed9-b4d6-4463-beca-4c4c09a23e44",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e2eba0d4-9e38-4a5c-9aa0-9874c70f0788",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"tests\");",
+ " pm.expect(typeof jsonData.tests).to.equal(\"object\");",
+ " pm.expect(jsonData.tests).to.have.property(\"include\");",
+ " pm.expect(jsonData.tests.include).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData.tests).to.have.property(\"exclude\");",
+ " pm.expect(jsonData.tests.exclude).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"types\");",
+ " pm.expect(jsonData.types).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"user_agent\");",
+ " pm.expect(typeof jsonData.user_agent).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"timeouts\");",
+ " pm.expect(typeof jsonData.timeouts).to.equal(\"object\")",
+ " pm.expect(jsonData.timeouts).to.have.property(\"automatic\");",
+ " pm.expect(typeof jsonData.timeouts.automatic).to.equal(\"number\");",
+ " pm.expect(jsonData.timeouts).to.have.property(\"manual\");",
+ " pm.expect(typeof jsonData.timeouts.manual).to.equal(\"number\");",
+ " pm.expect(jsonData).to.have.property(\"browser\");",
+ " pm.expect(typeof jsonData.browser).to.equal(\"object\");",
+ " pm.expect(jsonData.browser).to.have.property(\"name\");",
+ " pm.expect(typeof jsonData.browser.name).to.equal(\"string\");",
+ " pm.expect(jsonData.browser).to.have.property(\"version\");",
+ " pm.expect(typeof jsonData.browser.version).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"reference_tokens\");",
+ " pm.expect(jsonData.reference_tokens).to.be.an.instanceof(Array);",
+ "});",
+ "",
+ "pm.test(\"Configuration is default\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " pm.expect(jsonData.tests.include).to.include(\"/\");",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(60000);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(300000);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "22d812a9-5d6f-4cbb-924e-cbe4392072ad",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"status\");",
+ " pm.expect(typeof jsonData.status).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ " pm.expect(jsonData).to.have.property(\"date_started\");",
+ " pm.expect(jsonData.date_started).to.satisfy(value => !value || typeof value === \"number\");",
+ " pm.expect(jsonData).to.have.property(\"date_finished\");",
+ " pm.expect(jsonData.date_finished).to.satisfy(value => !value || typeof value === \"number\");",
+ "});",
+ "",
+ "pm.test(\"Session status is pending\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"pending\");",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "d06b4cb9-f5b9-4a88-8a24-a195e59c298e",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "pm.test(\"All tests are pending tests\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.not.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})",
+ "",
+ "const availableTests = pm.globals.get(\"available_tests\"); ",
+ "",
+ "pm.test(\"All available tests are part of the session\", function () {",
+ " for (var api of Object.keys(jsonData.pending_tests)) {",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(availableTests[api]).to.include(test);",
+ " }",
+ " }",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "bcf419eb-495a-4558-95de-45dea07bd6b0",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "86795630-9b19-49f3-988e-ac089ef76187",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Update With Configuration",
+ "item": [
+ {
+ "name": "Update Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "181e1f95-34d7-4c2c-bc3c-b6d5f411807d",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "46e33bfe-0783-49f8-a9bc-98e26a34b55e",
+ "exec": [
+ "var automaticTimeout = 120000;",
+ "var manualTimeout = 1000000;",
+ "var specialTimeout = 2000;",
+ "",
+ "pm.globals.set(\"automatic_timeout\", automaticTimeout);",
+ "pm.globals.set(\"manual_timeout\", manualTimeout);",
+ "pm.globals.set(\"special_timeout\", specialTimeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "PUT",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": {{included_tests}},\n \"exclude\": {{excluded_tests}}\n },\n \"types\": [\n \"automatic\"\n ],\n \"timeouts\": {\n \"automatic\": {{automatic_timeout}},\n \"manual\": {{manual_timeout}},\n \"{{special_timeout_test}}\": {{special_timeout}}\n }\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "dd86be15-3e08-491e-b56b-274531c348e6",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"tests\");",
+ " pm.expect(typeof jsonData.tests).to.equal(\"object\");",
+ " pm.expect(jsonData.tests).to.have.property(\"include\");",
+ " pm.expect(jsonData.tests.include).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData.tests).to.have.property(\"exclude\");",
+ " pm.expect(jsonData.tests.exclude).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"types\");",
+ " pm.expect(jsonData.types).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"user_agent\");",
+ " pm.expect(typeof jsonData.user_agent).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"timeouts\");",
+ " pm.expect(typeof jsonData.timeouts).to.equal(\"object\")",
+ " pm.expect(jsonData.timeouts).to.have.property(\"automatic\");",
+ " pm.expect(typeof jsonData.timeouts.automatic).to.equal(\"number\");",
+ " pm.expect(jsonData.timeouts).to.have.property(\"manual\");",
+ " pm.expect(typeof jsonData.timeouts.manual).to.equal(\"number\");",
+ " pm.expect(jsonData).to.have.property(\"browser\");",
+ " pm.expect(typeof jsonData.browser).to.equal(\"object\");",
+ " pm.expect(jsonData.browser).to.have.property(\"name\");",
+ " pm.expect(typeof jsonData.browser.name).to.equal(\"string\");",
+ " pm.expect(jsonData.browser).to.have.property(\"version\");",
+ " pm.expect(typeof jsonData.browser.version).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"reference_tokens\");",
+ " pm.expect(jsonData.reference_tokens).to.be.an.instanceof(Array);",
+ "});",
+ "",
+ "var includedTests = JSON.parse(pm.globals.get(\"included_tests\"));",
+ "var excludedTests = JSON.parse(pm.globals.get(\"excluded_tests\"));",
+ "var automaticTimeout = pm.globals.get(\"automatic_timeout\");",
+ "var manualTimeout = pm.globals.get(\"manual_timeout\");",
+ "var specialTimeout = pm.globals.get(\"special_timeout\");",
+ "var specialTimeoutTest = pm.globals.get(\"special_timeout_test\");",
+ "",
+ "pm.test(\"Configuration is as specified\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " for (var test of includedTests) {",
+ " pm.expect(jsonData.tests.include).to.include(test);",
+ " }",
+ " for (var test of excludedTests) {",
+ " pm.expect(jsonData.tests.exclude).to.include(test);",
+ " }",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.not.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(automaticTimeout);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(manualTimeout);",
+ " pm.expect(jsonData.timeouts[specialTimeoutTest]).to.equal(specialTimeout);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "569dc8f6-39d4-4ec6-9466-64154671e4bc",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "pm.test(\"All tests are pending tests\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.not.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})",
+ "",
+ "const availableTests = pm.globals.get(\"available_tests\");",
+ "const includedTests = pm.globals.get(\"included_tests\");",
+ "const excludedTests = pm.globals.get(\"excluded_tests\");",
+ "",
+ "pm.test(\"Selected subset of tests are part of the session\", function () {",
+ " for (var api of Object.keys(jsonData.pending_tests)) {",
+ " for (var includedTest of includedTests) {",
+ " if (includedTest.split(\"/\").find(part => !!part) === api) {",
+ " var includeRegExp = new RegExp(\"^\" + includedTest, \"i\");",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(test).to.match(regex);",
+ " }",
+ " break;",
+ " }",
+ " }",
+ " for (var excludedTest of excludedTests) {",
+ " if (excludedTest.split(\"/\").find(part => !!part) === api) {",
+ " var excludeRegExp = new RegExp(\"^\" + excludedTest, \"i\");",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(test).to.not.match(regex);",
+ " }",
+ " break;",
+ " }",
+ " }",
+ " }",
+ "});",
+ "",
+ "const sessionTests = jsonData.pending_tests;",
+ "",
+ "pm.globals.set(\"session_tests\", JSON.stringify(sessionTests));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "db83ae93-7a70-4267-8a2f-3d2e7cb40b88",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"status\");",
+ " pm.expect(typeof jsonData.status).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ " pm.expect(jsonData).to.have.property(\"date_started\");",
+ " pm.expect(jsonData.date_started).to.satisfy(value => !value || typeof value === \"number\");",
+ " pm.expect(jsonData).to.have.property(\"date_finished\");",
+ " pm.expect(jsonData.date_finished).to.satisfy(value => !value || typeof value === \"number\");",
+ "});",
+ "",
+ "pm.test(\"Session status is pending\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"pending\");",
+ "})",
+ "",
+ "pm.test(\"Start and Finish date not set\", function () {",
+ " pm.expect(jsonData.date_started).to.be.null;",
+ " pm.expect(jsonData.date_finished).to.be.null;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "52cc597e-30b7-426e-a15e-3a628c66619a",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "60d6def7-cb5b-433f-8a4e-33bd8e20be53",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Update Session Labels",
+ "item": [
+ {
+ "name": "Create Session \\w Configuration Copy",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a1c2e070-9309-4dee-ae71-6edf4cbe3630",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "1f03ad23-73c7-4e64-9de9-abe902e2b90c",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"labels\": [\"label1\", \"label2\"]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "bf76f47c-960c-4007-a59b-92793e470082",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"tests\");",
+ " pm.expect(typeof jsonData.tests).to.equal(\"object\");",
+ " pm.expect(jsonData.tests).to.have.property(\"include\");",
+ " pm.expect(jsonData.tests.include).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData.tests).to.have.property(\"exclude\");",
+ " pm.expect(jsonData.tests.exclude).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"types\");",
+ " pm.expect(jsonData.types).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"user_agent\");",
+ " pm.expect(typeof jsonData.user_agent).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"timeouts\");",
+ " pm.expect(typeof jsonData.timeouts).to.equal(\"object\")",
+ " pm.expect(jsonData.timeouts).to.have.property(\"automatic\");",
+ " pm.expect(typeof jsonData.timeouts.automatic).to.equal(\"number\");",
+ " pm.expect(jsonData.timeouts).to.have.property(\"manual\");",
+ " pm.expect(typeof jsonData.timeouts.manual).to.equal(\"number\");",
+ " pm.expect(jsonData).to.have.property(\"browser\");",
+ " pm.expect(typeof jsonData.browser).to.equal(\"object\");",
+ " pm.expect(jsonData.browser).to.have.property(\"name\");",
+ " pm.expect(typeof jsonData.browser.name).to.equal(\"string\");",
+ " pm.expect(jsonData.browser).to.have.property(\"version\");",
+ " pm.expect(typeof jsonData.browser.version).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"reference_tokens\");",
+ " pm.expect(jsonData.reference_tokens).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"labels\");",
+ " pm.expect(jsonData.labels).to.be.an.instanceof(Array);",
+ "});",
+ "",
+ "pm.test(\"Configuration is default\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " pm.expect(jsonData.tests.include).to.include(\"/\");",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(60000);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(300000);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ " pm.expect(jsonData.labels).to.include(\"label1\");",
+ " pm.expect(jsonData.labels).to.include(\"label2\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Update Labels Copy",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "20cc9f7d-b4bb-4bb2-a628-4cfddd86112a",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "PUT",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n\t\"labels\": [\"new\", \"labels\"]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/labels",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "labels"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "11fcc51b-effd-4522-8126-6b6a20435927",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"tests\");",
+ " pm.expect(typeof jsonData.tests).to.equal(\"object\");",
+ " pm.expect(jsonData.tests).to.have.property(\"include\");",
+ " pm.expect(jsonData.tests.include).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData.tests).to.have.property(\"exclude\");",
+ " pm.expect(jsonData.tests.exclude).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"types\");",
+ " pm.expect(jsonData.types).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"user_agent\");",
+ " pm.expect(typeof jsonData.user_agent).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"timeouts\");",
+ " pm.expect(typeof jsonData.timeouts).to.equal(\"object\")",
+ " pm.expect(jsonData.timeouts).to.have.property(\"automatic\");",
+ " pm.expect(typeof jsonData.timeouts.automatic).to.equal(\"number\");",
+ " pm.expect(jsonData.timeouts).to.have.property(\"manual\");",
+ " pm.expect(typeof jsonData.timeouts.manual).to.equal(\"number\");",
+ " pm.expect(jsonData).to.have.property(\"browser\");",
+ " pm.expect(typeof jsonData.browser).to.equal(\"object\");",
+ " pm.expect(jsonData.browser).to.have.property(\"name\");",
+ " pm.expect(typeof jsonData.browser.name).to.equal(\"string\");",
+ " pm.expect(jsonData.browser).to.have.property(\"version\");",
+ " pm.expect(typeof jsonData.browser.version).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"reference_tokens\");",
+ " pm.expect(jsonData.reference_tokens).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"labels\");",
+ " pm.expect(jsonData.labels).to.be.an.instanceof(Array);",
+ "});",
+ "",
+ "pm.test(\"Configuration is default\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " pm.expect(jsonData.tests.include).to.include(\"/\");",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(60000);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(300000);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ " pm.expect(jsonData.labels).to.include(\"new\");",
+ " pm.expect(jsonData.labels).to.include(\"labels\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean Up",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Delete Session",
+ "item": [
+ {
+ "name": "Setup",
+ "item": [
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "2b7198ef-6051-40ee-af4f-466bf41e3768",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Delete Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "cabccfe1-882e-48bf-9946-4b8f67b09f04",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "cad95ed0-822a-433b-b411-adf3b2e5ea77",
+ "exec": [
+ "pm.test(\"Status code is 404\", function () {",
+ " pm.response.to.have.status(404);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Create and Read Results",
+ "item": [
+ {
+ "name": "Create Session",
+ "item": [
+ {
+ "name": "Create Session Two Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "3c82b199-e27f-4b86-b98d-9f10ad2b363f",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "70ad16d3-5df7-4c94-9498-f5abd0b6f76c",
+ "exec": [
+ "const availableTests = pm.globals.get(\"available_tests\");",
+ "const test1 = availableTests[Object.keys(availableTests)[0]][0];",
+ "const test2 = availableTests[Object.keys(availableTests)[1]][0];",
+ "",
+ "pm.globals.set(\"single_test_1\", test1);",
+ "pm.globals.set(\"single_test_2\", test2);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"{{single_test_1}}\", \"{{single_test_2}}\"]\n }\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "074aff68-931d-4d31-b138-d9068bc57422",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9440583c-b8a5-4488-93aa-e37daf96485a",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Responds with no results\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(typeof jsonData).to.equal(\"object\");",
+ " pm.expect(Object.keys(jsonData)).to.be.empty;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ],
+ "query": [
+ {
+ "key": "path",
+ "value": "/2dcontext/drawing-images-to-the-canvas",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements/2d.missingargs.html",
+ "disabled": true
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results Compact",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "50b9afe7-63ef-40ae-a6ea-5abbd40f254d",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var singleTest1 = pm.globals.get(\"single_test_1\");",
+ "var singleTest2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "var api1 = singleTest1.split(\"/\").find(part => !!part);",
+ "var api2 = singleTest2.split(\"/\").find(part => !!part);",
+ "",
+ "pm.test(\"Responds with no results\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(typeof jsonData).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(api1);",
+ " pm.expect(jsonData).to.have.property(api2);",
+ " pm.expect(jsonData[api1].complete).to.equal(0);",
+ " pm.expect(jsonData[api2].complete).to.equal(0);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/compact",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "compact"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Last Completed Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f0c77dd0-d424-4b6e-8bfa-3e0ea5908514",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ " ",
+ "pm.test(\"JSON format is as expected\", function () {",
+ " pm.expect(Object.keys(jsonData)).to.have.lengthOf(3);",
+ " pm.expect(jsonData).to.have.property(\"pass\");",
+ " pm.expect(jsonData).to.have.property(\"fail\");",
+ " pm.expect(jsonData).to.have.property(\"timeout\");",
+ " for (var key of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[key]).to.be.an.instanceof(Array);",
+ " }",
+ "});",
+ "",
+ "pm.test(\"Responds with no last completed tests\", function () {",
+ " for (var key of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[key]).to.be.empty;",
+ " }",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/last_completed",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "last_completed"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "bfdb82c2-1411-4e22-810b-a1f7edb8d2f4",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "const sessionTests = jsonData.pending_tests;",
+ "",
+ "pm.globals.set(\"session_tests\", JSON.stringify(sessionTests));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Create First Result",
+ "item": [
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "2a60296d-953b-430d-a21d-df99265b90c1",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(Object.keys(response)).to.have.lengthOf(1);",
+ " pm.expect(response).to.have.property(\"next_test\");",
+ " pm.expect(typeof response.next_test).to.equal(\"string\");",
+ "});",
+ "",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "pm.globals.set(\"current_test\", test);",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "",
+ "pm.test(\"Returned test is first of two specified tests\", function () {",
+ " pm.expect(test).to.equal(test1);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "42168b0e-8be9-4211-8c4a-5a0776a3d131",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"One test is pending, one test is running\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.have.lengthOf(1);",
+ " var api = Object.keys(jsonData.pending_tests)[0];",
+ " pm.expect(jsonData.pending_tests[api]).to.have.lengthOf(1);",
+ " pm.expect(jsonData.pending_tests[api]).to.include(test2);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(1);",
+ " api = Object.keys(jsonData.running_tests)[0];",
+ " pm.expect(jsonData.running_tests[api]).to.have.lengthOf(1);",
+ " pm.expect(jsonData.running_tests[api]).to.include(test1);",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "95a23ee1-f9e9-4b6a-abf5-4afa48ba5254",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"FAIL\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "707c403b-7135-4e0b-aae8-4fa4f07daac9",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"One test is pending, one test is completed\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.have.lengthOf(1);",
+ " var api = Object.keys(jsonData.pending_tests)[0];",
+ " pm.expect(jsonData.pending_tests[api]).to.have.lengthOf(1);",
+ " pm.expect(jsonData.pending_tests[api]).to.include(test2);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "7b77cb94-4159-462c-877b-23b48446a1bb",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"status\");",
+ " pm.expect(typeof jsonData.status).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ " pm.expect(jsonData).to.have.property(\"date_started\");",
+ " pm.expect(jsonData).to.have.property(\"date_finished\");",
+ "});",
+ "",
+ "pm.test(\"Session status is running\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"running\");",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Last Completed Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8ba5726b-ed37-48f5-9389-4f6041b1eeaa",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ " ",
+ "pm.test(\"JSON format is as expected\", function () {",
+ " pm.expect(Object.keys(jsonData)).to.have.lengthOf(3);",
+ " pm.expect(jsonData).to.have.property(\"pass\");",
+ " pm.expect(jsonData).to.have.property(\"fail\");",
+ " pm.expect(jsonData).to.have.property(\"timeout\");",
+ " for (var key of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[key]).to.be.an.instanceof(Array);",
+ " }",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "",
+ "pm.test(\"Responds with one last completed tests as failed\", function () {",
+ " pm.expect(jsonData[\"pass\"]).to.be.empty;",
+ " pm.expect(jsonData[\"fail\"]).to.have.lengthOf(1);",
+ " pm.expect(jsonData[\"fail\"][0]).to.equal(test1);",
+ " pm.expect(jsonData[\"timeout\"]).to.be.empty;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/last_completed",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "last_completed"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "5ac4de17-caf3-43b2-8eb8-fee26eed5c96",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON format is as expected\", function () {",
+ " for (var api of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[api]).to.be.an.instanceof(Array);",
+ " for (var result of jsonData[api]) {",
+ " pm.expect(typeof result).to.equal(\"object\");",
+ " pm.expect(Object.keys(result)).to.have.lengthOf(4);",
+ " pm.expect(result).to.have.property(\"test\");",
+ " pm.expect(typeof result.test).to.equal(\"string\");",
+ " pm.expect(result).to.have.property(\"status\");",
+ " pm.expect(typeof result.status).to.equal(\"string\");",
+ " pm.expect(result).to.have.property(\"message\");",
+ " pm.expect(result.message).to.satisfy(message => !message || typeof message === \"string\");",
+ " pm.expect(result).to.have.property(\"subtests\");",
+ " pm.expect(result.subtests).to.be.an.instanceof(Array);",
+ " for (var subtest of result.subtests) {",
+ " pm.expect(typeof subtest).to.equal(\"object\");",
+ " pm.expect(Object.keys(subtest)).to.have.lengthOf(3);",
+ " pm.expect(subtest).to.have.property(\"name\");",
+ " pm.expect(typeof subtest.name).to.equal(\"string\");",
+ " pm.expect(subtest).to.have.property(\"status\");",
+ " pm.expect(typeof subtest.status).to.equal(\"string\");",
+ " pm.expect(subtest).to.have.property(\"message\");",
+ " pm.expect(subtest.message).to.satisfy(message => !message || typeof message === \"string\");",
+ " }",
+ " }",
+ " }",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "",
+ "pm.test(\"Test is first test, successful run and failed\", function () {",
+ " var api = Object.keys(jsonData)[0];",
+ " pm.expect(api).to.equal(test1.split(\"/\").find(part => !!part))",
+ " var result = jsonData[api][0];",
+ " pm.expect(result.test).to.equal(test1);",
+ " pm.expect(result.status).to.equal(\"OK\");",
+ " pm.expect(result.message).to.be.null;",
+ " var subtest = result.subtests[0];",
+ " pm.expect(subtest.status).to.equal(\"FAIL\");",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ],
+ "query": [
+ {
+ "key": "path",
+ "value": "/2dcontext/drawing-images-to-the-canvas",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements/2d.missingargs.html",
+ "disabled": true
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results Compact",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "38afbc94-502e-4aa7-85c2-70dbbdae7e13",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(typeof jsonData).to.equal(\"object\");",
+ " for (var api of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[api]).to.have.property(\"pass\");",
+ " pm.expect(typeof jsonData[api].pass).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"fail\");",
+ " pm.expect(typeof jsonData[api].fail).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"timeout\");",
+ " pm.expect(typeof jsonData[api].timeout).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"not_run\");",
+ " pm.expect(typeof jsonData[api].not_run).to.equal(\"number\");",
+ " }",
+ "})",
+ "",
+ "pm.test(\"Responds with one test failed\", function () {",
+ " var api = Object.keys(jsonData)[0];",
+ " pm.expect(jsonData[api].pass).to.equal(0);",
+ " pm.expect(jsonData[api].fail).to.equal(1);",
+ " pm.expect(jsonData[api].timeout).to.equal(0);",
+ " pm.expect(jsonData[api].not_run).to.equal(0);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/compact",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "compact"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Create Last Result",
+ "item": [
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "85cf7940-73d0-4419-81a1-390c9c4c3a1c",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(Object.keys(response)).to.have.lengthOf(1);",
+ " pm.expect(response).to.have.property(\"next_test\");",
+ " pm.expect(typeof response.next_test).to.equal(\"string\");",
+ "});",
+ "",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "pm.globals.set(\"current_test\", test);",
+ "",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"Returned test is second of two specified tests\", function () {",
+ " pm.expect(test).to.equal(test2);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "3e288d2c-865a-48ab-ac53-cddd160a9840",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"One test is running\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(1);",
+ " var api = Object.keys(jsonData.running_tests)[0];",
+ " pm.expect(jsonData.running_tests[api]).to.have.lengthOf(1);",
+ " pm.expect(jsonData.running_tests[api]).to.include(test2);",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "786dbb67-47e8-4009-991c-1670d0ab04e1",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"PASS\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "db8fcfe2-89f3-4e1c-be82-059412218073",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"status\");",
+ " pm.expect(typeof jsonData.status).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ " pm.expect(jsonData).to.have.property(\"date_started\");",
+ " pm.expect(jsonData).to.have.property(\"date_finished\");",
+ "});",
+ "",
+ "pm.test(\"Session status is completed\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"completed\");",
+ "})",
+ "",
+ "pm.test(\"Finish date is set\", function () {",
+ " pm.expect(Date.parse(jsonData.date_finished)).to.be.below(Date.now());",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f8938178-c924-4038-a1cb-5d6fc88c08d2",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "var test1Api = test1.split(\"/\").find(part => !!part);",
+ "var test2Api = test1.split(\"/\").find(part => !!part);",
+ "",
+ "pm.test(\"One test is pending, one test is completed\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Last Completed Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "5ff3fe93-2649-4874-9ae6-ce0bc2887685",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ " ",
+ "pm.test(\"JSON format is as expected\", function () {",
+ " pm.expect(Object.keys(jsonData)).to.have.lengthOf(3);",
+ " pm.expect(jsonData).to.have.property(\"pass\");",
+ " pm.expect(jsonData).to.have.property(\"fail\");",
+ " pm.expect(jsonData).to.have.property(\"timeout\");",
+ " for (var key of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[key]).to.be.an.instanceof(Array);",
+ " }",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"Responds with one last completed tests as failed and one last completed test as passed\", function () {",
+ " pm.expect(jsonData[\"pass\"]).to.have.lengthOf(1);",
+ " pm.expect(jsonData[\"pass\"][0]).to.equal(test2);",
+ " pm.expect(jsonData[\"fail\"]).to.have.lengthOf(1);",
+ " pm.expect(jsonData[\"fail\"][0]).to.equal(test1);",
+ " pm.expect(jsonData[\"timeout\"]).to.be.empty;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/last_completed",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "last_completed"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "fee15d71-abfc-46f6-ad63-bdf67c42905d",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON format is as expected\", function () {",
+ " for (var api of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[api]).to.be.an.instanceof(Array);",
+ " for (var result of jsonData[api]) {",
+ " pm.expect(typeof result).to.equal(\"object\");",
+ " pm.expect(Object.keys(result)).to.have.lengthOf(4);",
+ " pm.expect(result).to.have.property(\"test\");",
+ " pm.expect(typeof result.test).to.equal(\"string\");",
+ " pm.expect(result).to.have.property(\"status\");",
+ " pm.expect(typeof result.status).to.equal(\"string\");",
+ " pm.expect(result).to.have.property(\"message\");",
+ " pm.expect(result.message).to.satisfy(message => !message || typeof message === \"string\");",
+ " pm.expect(result).to.have.property(\"subtests\");",
+ " pm.expect(result.subtests).to.be.an.instanceof(Array);",
+ " for (var subtest of result.subtests) {",
+ " pm.expect(typeof subtest).to.equal(\"object\");",
+ " pm.expect(Object.keys(subtest)).to.have.lengthOf(3);",
+ " pm.expect(subtest).to.have.property(\"name\");",
+ " pm.expect(typeof subtest.name).to.equal(\"string\");",
+ " pm.expect(subtest).to.have.property(\"status\");",
+ " pm.expect(typeof subtest.status).to.equal(\"string\");",
+ " pm.expect(subtest).to.have.property(\"message\");",
+ " pm.expect(subtest.message).to.satisfy(message => !message || typeof message === \"string\");",
+ " }",
+ " }",
+ " }",
+ "});",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "pm.test(\"Test is first and second test, successful run and failed, and successful run and passed\", function () {",
+ " var api = Object.keys(jsonData)[0];",
+ " for (var result of jsonData[api]) {",
+ " if (result.test === test1) {",
+ " pm.expect(result.test).to.equal(test1); ",
+ " pm.expect(result.status).to.equal(\"OK\");",
+ " pm.expect(result.message).to.be.null;",
+ " var subtest = result.subtests[0];",
+ " pm.expect(subtest.status).to.equal(\"FAIL\");",
+ " } else {",
+ " pm.expect(result.test).to.equal(test2); ",
+ " pm.expect(result.status).to.equal(\"OK\");",
+ " pm.expect(result.message).to.be.null;",
+ " subtest = result.subtests[0];",
+ " pm.expect(subtest.status).to.equal(\"PASS\");",
+ " }",
+ " }",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ],
+ "query": [
+ {
+ "key": "path",
+ "value": "/2dcontext/drawing-images-to-the-canvas",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements",
+ "disabled": true
+ },
+ {
+ "key": "path",
+ "value": "/2dcontext/conformance-requirements/2d.missingargs.html",
+ "disabled": true
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Results Compact",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9f739832-3e90-4efc-96cf-2889f08af718",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(typeof jsonData).to.equal(\"object\");",
+ " for (var api of Object.keys(jsonData)) {",
+ " pm.expect(jsonData[api]).to.have.property(\"pass\");",
+ " pm.expect(typeof jsonData[api].pass).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"fail\");",
+ " pm.expect(typeof jsonData[api].fail).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"timeout\");",
+ " pm.expect(typeof jsonData[api].timeout).to.equal(\"number\");",
+ " pm.expect(jsonData[api]).to.have.property(\"not_run\");",
+ " pm.expect(typeof jsonData[api].not_run).to.equal(\"number\");",
+ " }",
+ "})",
+ "",
+ "const test1 = pm.globals.get(\"single_test_1\");",
+ "const test2 = pm.globals.get(\"single_test_2\");",
+ "",
+ "var test1Api = test1.split(\"/\").find(part => !!part);",
+ "var test2Api = test1.split(\"/\").find(part => !!part);",
+ "",
+ "pm.test(\"Responds with one test failed\", function () {",
+ " pm.expect(Object.keys(jsonData)).to.have.lengthOf(2);",
+ " var api = Object.keys(jsonData)[0];",
+ " if (api === test1Api) {",
+ " pm.expect(jsonData[api].pass).to.equal(0);",
+ " pm.expect(jsonData[api].fail).to.equal(1);",
+ " } else {",
+ " pm.expect(jsonData[api].pass).to.equal(1);",
+ " pm.expect(jsonData[api].fail).to.equal(0);",
+ " }",
+ " pm.expect(jsonData[api].timeout).to.equal(0);",
+ " pm.expect(jsonData[api].not_run).to.equal(0);",
+ " api = Object.keys(jsonData)[1];",
+ " if (api === test1Api) {",
+ " pm.expect(jsonData[api].pass).to.equal(0);",
+ " pm.expect(jsonData[api].fail).to.equal(1);",
+ " } else {",
+ " pm.expect(jsonData[api].pass).to.equal(1);",
+ " pm.expect(jsonData[api].fail).to.equal(0);",
+ " }",
+ " pm.expect(jsonData[api].timeout).to.equal(0);",
+ " pm.expect(jsonData[api].not_run).to.equal(0);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/compact",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "compact"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "353cde4e-5c57-4df7-b69e-7dac5cf408e3",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(Object.keys(response)).to.have.lengthOf(1);",
+ " pm.expect(response).to.have.property(\"next_test\");",
+ " pm.expect(typeof response.next_test).to.equal(\"string\");",
+ "});",
+ "",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "pm.globals.set(\"current_test\", test);",
+ "",
+ "const web_root = pm.environment.get(\"web_root\");",
+ "",
+ "pm.test(\"Returned test finish page\", function () {",
+ " pm.expect(test).to.equal(\"/\" + web_root + \"/finish.html\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Clean Up",
+ "item": [
+ {
+ "name": "Delete Session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Viewing and Downloading Reports",
+ "item": [
+ {
+ "name": "Create Sessions",
+ "item": [
+ {
+ "name": "First Session",
+ "item": [
+ {
+ "name": "Create Session One Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "84a0a44c-fdac-42e5-a3f8-7bbe7fc01fd8",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "6748c537-4286-48bb-9a10-7c1828d7f6e8",
+ "exec": [
+ "const availableTests = pm.globals.get(\"available_tests\");",
+ "const test1 = availableTests[Object.keys(availableTests)[0]][0];",
+ "const test2 = availableTests[Object.keys(availableTests)[0]][1];",
+ "const apiName = test1.split(\"/\").filter(part => !!part)[0];",
+ "",
+ "pm.globals.set(\"single_test_1\", test1);",
+ "pm.globals.set(\"single_test_2\", test2);",
+ "pm.globals.set(\"api_name\", apiName);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"{{single_test_1}}\"]\n }\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "579feb39-25c5-482a-95e1-be6861144110",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "2399a9f6-0689-4035-8f01-22ad4726232b",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "pm.globals.set(\"current_test\", test);",
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "75177890-a772-4bcc-9d94-6a7f485b647b",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"FAIL\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Second Session",
+ "item": [
+ {
+ "name": "Create Session One Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e35de151-f1d5-4cb4-b403-953bb7dca87f",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token_comp\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "30b29828-cd1c-469a-873c-77a31ce7b7f4",
+ "exec": [
+ "const availableTests = pm.globals.get(\"available_tests\");",
+ "const test1 = availableTests[Object.keys(availableTests)[0]][0];",
+ "const test2 = availableTests[Object.keys(availableTests)[0]][1];",
+ "",
+ "pm.globals.set(\"single_test_1\", test1);",
+ "pm.globals.set(\"single_test_2\", test2);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": [\"{{single_test_1}}\"]\n }\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Start Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "29cdd92a-28c4-426e-9f53-8563bc3a36e7",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token_comp}}/start",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token_comp}}",
+ "start"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Next Test of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8cc61693-a77c-467c-a7df-5c239a3bf569",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const response = pm.response.json();",
+ "",
+ "const nextTest = response.next_test;",
+ "pm.globals.set(\"current_test_url\", nextTest);",
+ "if (!nextTest) return;",
+ "const test = \"/\" + nextTest.split(\"/\").slice(3).join(\"/\").split(\"?\")[0];",
+ "pm.globals.set(\"current_test\", test);",
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token_comp}}/next",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token_comp}}",
+ "next"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Result",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "ae30e47e-bb3d-4479-9455-3823bc2adf88",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"test\": \"{{current_test}}\",\n \"status\": \"OK\",\n \"message\": null,\n \"subtests\": [\n {\n \"name\": \"Subtest testing feature xy\",\n \"status\": \"FAIL\",\n \"message\": \"Error message\"\n }\n ]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token_comp}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token_comp}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Download reports",
+ "item": [
+ {
+ "name": "Download Results Overview",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e614e97d-15e4-4c9b-987f-bd9f272bd295",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/overview",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "overview"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Download All Apis Json",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e72a7387-bd42-4221-be2a-21bca702d1fe",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/json",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "json"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Download WPT Multi Report Url",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "dbcd722b-480a-4544-807a-6f7d6edcb441",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Uri returned\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(typeof jsonData.uri).to.equal(\"string\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{api_name}}/reporturl?tokens={{session_token}},{{session_token_comp}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{api_name}}",
+ "reporturl"
+ ],
+ "query": [
+ {
+ "key": "tokens",
+ "value": "{{session_token}},{{session_token_comp}}"
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Download Results Api Json",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9e302610-c7a7-4de0-989a-9903e77499eb",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "9d79c233-e3a3-4972-af76-aa3dfa2ade4b",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/{{api_name}}/json",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "{{api_name}}",
+ "json"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Download WPT Report Copy",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "0a43dacd-8106-414c-b9b4-750c2a0375a4",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"Uri returned\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(typeof jsonData.uri).to.equal(\"string\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "4b29e651-47e5-48ae-aa5d-b04b62e47e54",
+ "exec": [
+ ""
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/results/{{session_token}}/{{api_name}}/reporturl",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "results",
+ "{{session_token}}",
+ "{{api_name}}",
+ "reporturl"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Malfunctioning List",
+ "item": [
+ {
+ "name": "Create Session \\w Configuration Copy",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "d0ededde-44a0-48f3-bc21-eab3302b7b4d",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "ef1d181c-10b8-4bdd-9b5c-f14d6d9ca046",
+ "exec": [
+ "var automaticTimeout = 120000;",
+ "var manualTimeout = 1000000;",
+ "var specialTimeout = 2000;",
+ "",
+ "pm.globals.set(\"automatic_timeout\", automaticTimeout);",
+ "pm.globals.set(\"manual_timeout\", manualTimeout);",
+ "pm.globals.set(\"special_timeout\", specialTimeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": {{included_tests}},\n \"exclude\": {{excluded_tests}}\n },\n \"types\": [\n \"automatic\"\n ],\n \"timeouts\": {\n \"automatic\": {{automatic_timeout}},\n \"manual\": {{manual_timeout}},\n \"{{special_timeout_test}}\": {{special_timeout}}\n },\n \"labels\": [\"label1\", \"label2\"]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Malfunctioning Empty",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "e4ed838e-02e3-4fe7-9f27-b9aed6d72d02",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"Return empty array\", function() {",
+ " pm.expect(jsonData).to.be.an.instanceof(Array)",
+ " pm.expect(jsonData).to.have.length(0)",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/malfunctioning",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "malfunctioning"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Update Session Malfunctioning Insert Two",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "40b5a135-a839-4ffc-98e5-6d8923d53743",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "PUT",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "value": "application/json",
+ "type": "text"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "[\n\t\"/test/file/one.html\",\n\t\"/test/file/two.html\"\n]",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/malfunctioning",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "malfunctioning"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Malfunctioning Two Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "32953640-3b13-4df0-8af2-36544ebfe59e",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"Return array with two tests\", function() {",
+ " pm.expect(jsonData).to.be.an.instanceof(Array)",
+ " pm.expect(jsonData).to.have.length(2)",
+ " pm.expect(jsonData).to.include(\"/test/file/one.html\")",
+ " pm.expect(jsonData).to.include(\"/test/file/two.html\")",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/malfunctioning",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "malfunctioning"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Update Session Malfunctioning Empty Array",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "784734d2-61db-4ff3-b063-ba9fc07176d3",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "PUT",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "[]",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/malfunctioning",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "malfunctioning"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Malfunctioning Empty",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "75a2bbc5-47aa-4708-b783-2c5e36f59e1a",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"Return empty array\", function() {",
+ " pm.expect(jsonData).to.be.an.instanceof(Array)",
+ " pm.expect(jsonData).to.have.length(0)",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}/malfunctioning",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}",
+ "malfunctioning"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "Sessions API",
+ "item": [
+ {
+ "name": "create session",
+ "item": [
+ {
+ "name": "With Defaults",
+ "item": [
+ {
+ "name": "Prep: Read Available Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "2f9076bb-59aa-43f9-992d-49723c0487e7",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var availableTests = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(typeof availableTests).to.equal(\"object\");",
+ " for (var api of Object.keys(availableTests)) {",
+ " pm.expect(availableTests[api]).to.be.an.instanceof(Array);",
+ " var apiRegExp = new RegExp(\"^/\" + api, \"i\");",
+ " for (var test of availableTests[api]) {",
+ " pm.expect(test).to.match(apiRegExp);",
+ " }",
+ " }",
+ "});",
+ "",
+ "var includedTests = [];",
+ "var excludedTests = [];",
+ "var specialTimeoutTest = \"\";",
+ "",
+ "var apis = Object.keys(availableTests);",
+ "for(var api of apis) {",
+ " if (availableTests[api].length > 50) {",
+ " var subDirs = availableTests[api].map(test => test.split(\"/\").filter(part => !!part).join(\"/\").split(\"/\")[1]).reduce((acc, curr) => acc.indexOf(curr) === -1 ? acc.concat([curr]) : acc, []);",
+ " if (subDirs.length > 2) {",
+ " includedTests.push(\"/\" + api);",
+ " excludedTests.push(\"/\" + api + \"/\" + subDirs[0]);",
+ " specialTimeoutTest = availableTests[api][availableTests[api].length - 1];",
+ " break;",
+ " }",
+ " ",
+ " }",
+ "}",
+ "",
+ "pm.globals.set(\"available_tests\", availableTests);",
+ "pm.globals.set(\"included_tests\", JSON.stringify(includedTests));",
+ "pm.globals.set(\"excluded_tests\", JSON.stringify(excludedTests));",
+ "pm.globals.set(\"special_timeout_test\", specialTimeoutTest.replace(\".\", \"\"));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "3279e145-d372-4feb-a496-8e3cce73e839",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "2f52b046-49d6-4ae3-bd84-111c5f9e534a",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Configuration is default\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " pm.expect(jsonData.tests.include).to.include(\"/\");",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(60000);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(300000);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ " pm.expect(jsonData.labels).to.be.empty;",
+ " pm.expect(new Date(jsonData.date_created).getTime()).to.be.below(Date.now());",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "86966d77-4519-415d-b893-5af6e18a0acf",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"Session status is pending\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"pending\");",
+ "})",
+ "",
+ "pm.test(\"Start, Finish and Expiration date not set\", function () {",
+ " pm.expect(jsonData.date_started).to.be.null;",
+ " pm.expect(jsonData.date_finished).to.be.null;",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "b67d346a-b2c9-4d67-af7b-27d2f5b8854f",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"pending_tests\");",
+ " pm.expect(typeof jsonData.pending_tests).to.equal(\"object\");",
+ " pm.expect(jsonData).to.have.property(\"running_tests\");",
+ " pm.expect(typeof jsonData.running_tests).to.equal(\"object\");",
+ "});",
+ "",
+ "pm.test(\"All tests are pending tests\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.not.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})",
+ "",
+ "const availableTests = pm.globals.get(\"available_tests\"); ",
+ "",
+ "pm.test(\"All available tests are part of the session\", function () {",
+ " for (var api of Object.keys(jsonData.pending_tests)) {",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(availableTests[api]).to.include(test);",
+ " }",
+ " }",
+ "})"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "a4ad3a47-e64a-491b-91cf-57b36a8eb88f",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "ee4ad964-e66e-45fd-9c7a-7895fcbc9e10",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "With Configuration",
+ "item": [
+ {
+ "name": "Prep: Read Available Tests",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "1a955154-d0c2-4d20-84a7-445da8ecc722",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var availableTests = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(typeof availableTests).to.equal(\"object\");",
+ " for (var api of Object.keys(availableTests)) {",
+ " pm.expect(availableTests[api]).to.be.an.instanceof(Array);",
+ " var apiRegExp = new RegExp(\"^/\" + api, \"i\");",
+ " for (var test of availableTests[api]) {",
+ " pm.expect(test).to.match(apiRegExp);",
+ " }",
+ " }",
+ "});",
+ "",
+ "var includedTests = [];",
+ "var excludedTests = [];",
+ "var specialTimeoutTest = \"\";",
+ "",
+ "var apis = Object.keys(availableTests);",
+ "for(var api of apis) {",
+ " if (availableTests[api].length > 50) {",
+ " var subDirs = availableTests[api].map(test => test.split(\"/\").filter(part => !!part).join(\"/\").split(\"/\")[1]).reduce((acc, curr) => acc.indexOf(curr) === -1 ? acc.concat([curr]) : acc, []);",
+ " if (subDirs.length > 2) {",
+ " includedTests.push(\"/\" + api);",
+ " excludedTests.push(\"/\" + api + \"/\" + subDirs[0]);",
+ " specialTimeoutTest = availableTests[api][availableTests[api].length - 1];",
+ " break;",
+ " }",
+ " ",
+ " }",
+ "}",
+ "",
+ "pm.globals.set(\"available_tests\", availableTests);",
+ "pm.globals.set(\"included_tests\", JSON.stringify(includedTests));",
+ "pm.globals.set(\"excluded_tests\", JSON.stringify(excludedTests));",
+ "pm.globals.set(\"special_timeout_test\", specialTimeoutTest.replace(\".\", \"\"));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Create Session \\w Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "38973bf0-0737-469d-b2ed-819c1acc6cf3",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "cc54eb7d-209f-4659-8119-ade100bb37ef",
+ "exec": [
+ "var automaticTimeout = 120000;",
+ "var manualTimeout = 1000000;",
+ "var specialTimeout = 2000;",
+ "",
+ "pm.globals.set(\"automatic_timeout\", automaticTimeout);",
+ "pm.globals.set(\"manual_timeout\", manualTimeout);",
+ "pm.globals.set(\"special_timeout\", specialTimeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n \"tests\": {\n \"include\": {{included_tests}},\n \"exclude\": {{excluded_tests}}\n },\n \"types\": [\n \"automatic\"\n ],\n \"timeouts\": {\n \"automatic\": {{automatic_timeout}},\n \"manual\": {{manual_timeout}},\n \"{{special_timeout_test}}\": {{special_timeout}}\n },\n \"labels\": [\"label1\", \"label2\"]\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "dbe16a67-70ba-4989-ab50-bafc8acab84c",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "var includedTests = JSON.parse(pm.globals.get(\"included_tests\"));",
+ "var excludedTests = JSON.parse(pm.globals.get(\"excluded_tests\"));",
+ "var automaticTimeout = pm.globals.get(\"automatic_timeout\");",
+ "var manualTimeout = pm.globals.get(\"manual_timeout\");",
+ "var specialTimeout = pm.globals.get(\"special_timeout\");",
+ "var specialTimeoutTest = pm.globals.get(\"special_timeout_test\");",
+ "",
+ "pm.test(\"Configuration is as specified\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " for (var test of includedTests) {",
+ " pm.expect(jsonData.tests.include).to.include(test);",
+ " }",
+ " for (var test of excludedTests) {",
+ " pm.expect(jsonData.tests.exclude).to.include(test);",
+ " }",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.not.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(automaticTimeout);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(manualTimeout);",
+ " pm.expect(jsonData.timeouts[specialTimeoutTest]).to.equal(specialTimeout);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ " pm.expect(jsonData.labels).to.include(\"label1\");",
+ " pm.expect(jsonData.labels).to.include(\"label2\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "c1514c3e-fd96-4e9c-b799-6fd04185bb63",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"Session status is pending\", function () {",
+ " pm.expect(jsonData.status).to.equal(\"pending\");",
+ "})",
+ "",
+ "pm.test(\"Start and Finish date not set\", function () {",
+ " pm.expect(jsonData.date_started).to.be.null;",
+ " pm.expect(jsonData.date_finished).to.be.null;",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Tests of Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "d31b10eb-4a59-4f92-98b2-73707e37a03b",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"All tests are pending tests\", function () {",
+ " pm.expect(Object.keys(jsonData.pending_tests)).to.not.have.lengthOf(0);",
+ " pm.expect(Object.keys(jsonData.running_tests)).to.have.lengthOf(0);",
+ "})",
+ "",
+ "const availableTests = pm.globals.get(\"available_tests\");",
+ "const includedTests = pm.globals.get(\"included_tests\");",
+ "const excludedTests = pm.globals.get(\"excluded_tests\");",
+ "",
+ "pm.test(\"Selected subset of tests are part of the session\", function () {",
+ " for (var api of Object.keys(jsonData.pending_tests)) {",
+ " for (var includedTest of includedTests) {",
+ " if (includedTest.split(\"/\").find(part => !!part) === api) {",
+ " var includeRegExp = new RegExp(\"^\" + includedTest, \"i\");",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(test).to.match(regex);",
+ " }",
+ " break;",
+ " }",
+ " }",
+ " for (var excludedTest of excludedTests) {",
+ " if (excludedTest.split(\"/\").find(part => !!part) === api) {",
+ " var excludeRegExp = new RegExp(\"^\" + excludedTest, \"i\");",
+ " for (var test of jsonData.pending_tests[api]) {",
+ " pm.expect(test).to.not.match(regex);",
+ " }",
+ " break;",
+ " }",
+ " }",
+ " }",
+ "});",
+ "",
+ "const sessionTests = jsonData.pending_tests;",
+ "",
+ "pm.globals.set(\"session_tests\", JSON.stringify(sessionTests));"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/tests/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "tests",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "event": [
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "daf759dc-ea36-4d8f-a524-4dae0cc841dc",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ },
+ {
+ "listen": "test",
+ "script": {
+ "id": "b50f4e6b-23b6-41e1-9bab-cd2ca90b7f43",
+ "type": "text/javascript",
+ "exec": [
+ ""
+ ]
+ }
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "With Expiration",
+ "item": [
+ {
+ "name": "Create Session With Expiration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "53183c44-31d1-48de-89ae-17950ef47363",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "6dadba7e-5352-4b9c-9601-f0eb429341ce",
+ "exec": [
+ "var expirationDate = Date.now() + 3000;",
+ "pm.globals.set(\"expiration_date\", expirationDate);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": "{\n\t\"expiration_date\": {{expiration_date}}\n}"
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Expired Session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "f2bc16d6-099b-487c-a7a6-d48d198ff826",
+ "exec": [
+ "pm.test(\"Status code is 404\", function () {",
+ " pm.response.to.have.status(404);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ },
+ {
+ "listen": "prerequest",
+ "script": {
+ "id": "57f858cd-b882-4fc0-9170-60078cfe5af8",
+ "exec": [
+ "var expirationDate = pm.globals.get(\"expiration_date\");",
+ "",
+ "var timeout = expirationDate - Date.now() + 1000",
+ "",
+ "console.log(timeout)",
+ "",
+ "setTimeout(function () {}, timeout);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "read session",
+ "item": [
+ {
+ "name": "Prep: Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "b50313e7-7fd2-434b-a86e-7ff5fb0af1d8",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "ef9e2d68-6b36-4ce7-8dba-27ebc7153015",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"tests\");",
+ " pm.expect(typeof jsonData.tests).to.equal(\"object\");",
+ " pm.expect(jsonData.tests).to.have.property(\"include\");",
+ " pm.expect(jsonData.tests.include).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData.tests).to.have.property(\"exclude\");",
+ " pm.expect(jsonData.tests.exclude).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"types\");",
+ " pm.expect(jsonData.types).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"user_agent\");",
+ " pm.expect(typeof jsonData.user_agent).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"timeouts\");",
+ " pm.expect(typeof jsonData.timeouts).to.equal(\"object\")",
+ " pm.expect(jsonData.timeouts).to.have.property(\"automatic\");",
+ " pm.expect(typeof jsonData.timeouts.automatic).to.equal(\"number\");",
+ " pm.expect(jsonData.timeouts).to.have.property(\"manual\");",
+ " pm.expect(typeof jsonData.timeouts.manual).to.equal(\"number\");",
+ " pm.expect(jsonData).to.have.property(\"browser\");",
+ " pm.expect(typeof jsonData.browser).to.equal(\"object\");",
+ " pm.expect(jsonData.browser).to.have.property(\"name\");",
+ " pm.expect(typeof jsonData.browser.name).to.equal(\"string\");",
+ " pm.expect(jsonData.browser).to.have.property(\"version\");",
+ " pm.expect(typeof jsonData.browser.version).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"reference_tokens\");",
+ " pm.expect(jsonData.reference_tokens).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"labels\");",
+ " pm.expect(jsonData.labels).to.be.an.instanceof(Array);",
+ " pm.expect(jsonData).to.have.property(\"date_created\");",
+ "});",
+ "",
+ "pm.test(\"Configuration is default\", function () {",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ " pm.expect(jsonData.tests.include).to.include(\"/\");",
+ " pm.expect(jsonData.types).to.include(\"automatic\");",
+ " pm.expect(jsonData.types).to.include(\"manual\");",
+ " pm.expect(jsonData.user_agent).to.include(\"PostmanRuntime\");",
+ " pm.expect(jsonData.timeouts.automatic).to.equal(60000);",
+ " pm.expect(jsonData.timeouts.manual).to.equal(300000);",
+ " pm.expect(jsonData.browser.name).to.equal(\"Other\");",
+ " pm.expect(jsonData.browser.version).to.equal(\"0\");",
+ " pm.expect(jsonData.is_public).to.equal(false);",
+ " pm.expect(jsonData.reference_tokens).to.be.empty;",
+ " pm.expect(jsonData.labels).to.be.empty;",
+ " pm.expect(new Date(jsonData.date_created).getTime()).to.be.below(Date.now());",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "read session status",
+ "item": [
+ {
+ "name": "Prep: Create Session No Configuration",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8c7bcb56-cd2a-4d99-ad6e-ac4bc4767dce",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "const tokenRegex = new RegExp(\"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\");",
+ "",
+ "pm.test(\"Responds with token in JSON format\", function () {",
+ " var jsonData = pm.response.json();",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(jsonData.token).to.match(tokenRegex);",
+ "});",
+ "",
+ "",
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [
+ {
+ "key": "Content-Type",
+ "name": "Content-Type",
+ "type": "text",
+ "value": "application/json"
+ }
+ ],
+ "body": {
+ "mode": "raw",
+ "raw": ""
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read Session Status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "0639b5a0-d419-4642-baf1-5aa4aedf33c9",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"token\");",
+ " pm.expect(typeof jsonData.token).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"status\");",
+ " pm.expect(typeof jsonData.status).to.equal(\"string\");",
+ " pm.expect(jsonData).to.have.property(\"expiration_date\");",
+ " pm.expect(jsonData.expiration_date).to.be.null;",
+ " pm.expect(jsonData).to.have.property(\"date_started\");",
+ " pm.expect(jsonData).to.have.property(\"date_finished\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "read sessions",
+ "item": [
+ {
+ "name": "Without query parameters",
+ "item": [
+ {
+ "name": "Read sessions",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "a1fe1580-1807-45d1-93bb-e54596895c00",
+ "exec": [
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"JSON structure as expected\", function() {",
+ " pm.expect(response).to.have.property(\"items\");",
+ " pm.expect(response[\"items\"]).to.be.instanceof(Array);",
+ " pm.expect(response).to.have.property(\"_links\");",
+ " pm.expect(response[\"_links\"]).to.be.instanceof(Object);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/_wave/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "_wave",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "Containing created session",
+ "item": [
+ {
+ "name": "Prep: Create session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "8c6722cf-7d5c-4b03-8404-3bdf17e543d9",
+ "exec": [
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "body": {
+ "mode": "raw",
+ "raw": "",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read sessions",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "7478f8f8-c838-4ae0-87ef-9889afc1041d",
+ "exec": [
+ "const token = pm.globals.get(\"session_token\");",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"JSON structure as expected\", function() {",
+ " pm.expect(response).to.have.property(\"items\");",
+ " pm.expect(response[\"items\"]).to.be.instanceof(Array);",
+ " pm.expect(response).to.have.property(\"_links\");",
+ " pm.expect(response[\"_links\"]).to.be.instanceof(Object);",
+ "});",
+ "",
+ "pm.test(\"Created session's token in response\", function() {",
+ " pm.expect(response.items).to.contain(token);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/_wave/api/sessions?index=0&count=1000",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "_wave",
+ "api",
+ "sessions"
+ ],
+ "query": [
+ {
+ "key": "index",
+ "value": "0"
+ },
+ {
+ "key": "count",
+ "value": "1000"
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete Session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "With configuration expansion",
+ "item": [
+ {
+ "name": "Prep: Create session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "9f59683a-df99-4e3c-b461-85c606196218",
+ "exec": [
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "body": {
+ "mode": "raw",
+ "raw": "",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read sessions",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "1cb72090-e989-4f6d-b19a-cd57388b18dc",
+ "exec": [
+ "const token = pm.globals.get(\"session_token\");",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"JSON structure as expected\", function() {",
+ " pm.expect(response).to.have.property(\"items\");",
+ " pm.expect(response[\"items\"]).to.be.instanceof(Array);",
+ " pm.expect(response).to.have.property(\"_links\");",
+ " pm.expect(response[\"_links\"]).to.be.instanceof(Object);",
+ " pm.expect(response).to.have.property(\"_embedded\");",
+ " pm.expect(response[\"_embedded\"]).to.be.instanceof(Object);",
+ " pm.expect(response[\"_embedded\"]).to.have.property(\"configuration\");",
+ " pm.expect(response[\"_embedded\"][\"configuration\"]).to.be.instanceof(Array);",
+ "});",
+ "",
+ "pm.test(\"Created session's token in response\", function() {",
+ " pm.expect(response.items).to.contain(token);",
+ "});",
+ "",
+ "pm.test(\"Created session's token in embedded configuration\", function() {",
+ " let tokenInConfigurationList = false;",
+ " let configurations = response._embedded.configuration;",
+ " for (let configuration of configurations) {",
+ " if (configuration.token !== token) continue;",
+ " tokenInConfigurationList = true;",
+ " }",
+ " pm.expect(tokenInConfigurationList).to.equal(true);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/_wave/api/sessions?index=0&count=1000&expand=configuration",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "_wave",
+ "api",
+ "sessions"
+ ],
+ "query": [
+ {
+ "key": "index",
+ "value": "0"
+ },
+ {
+ "key": "count",
+ "value": "1000"
+ },
+ {
+ "key": "expand",
+ "value": "configuration"
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete Session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ },
+ {
+ "name": "With status expansion",
+ "item": [
+ {
+ "name": "Prep: Create session",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "268fd1fa-4dba-401b-8072-8177a0f250c6",
+ "exec": [
+ "const response = pm.response.json();",
+ "const token = response.token;",
+ "pm.globals.set(\"session_token\", token);"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "POST",
+ "header": [],
+ "body": {
+ "mode": "raw",
+ "raw": "",
+ "options": {
+ "raw": {
+ "language": "json"
+ }
+ }
+ },
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions"
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Read sessions",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "93db4741-b574-4433-8dbd-008611311442",
+ "exec": [
+ "const token = pm.globals.get(\"session_token\");",
+ "const response = pm.response.json();",
+ "",
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "pm.test(\"JSON structure as expected\", function() {",
+ " pm.expect(response).to.have.property(\"items\");",
+ " pm.expect(response[\"items\"]).to.be.instanceof(Array);",
+ " pm.expect(response).to.have.property(\"_links\");",
+ " pm.expect(response[\"_links\"]).to.be.instanceof(Object);",
+ " pm.expect(response).to.have.property(\"_embedded\");",
+ " pm.expect(response[\"_embedded\"]).to.be.instanceof(Object);",
+ " pm.expect(response[\"_embedded\"]).to.have.property(\"status\");",
+ " pm.expect(response[\"_embedded\"][\"status\"]).to.be.instanceof(Array);",
+ "});",
+ "",
+ "pm.test(\"Created session's token in response\", function() {",
+ " pm.expect(response.items).to.contain(token);",
+ "});",
+ "",
+ "pm.test(\"Created session's token in embedded status\", function() {",
+ " let tokenInStatusList = false;",
+ " let statuses = response._embedded.status;",
+ " for (let status of statuses) {",
+ " if (status.token !== token) continue;",
+ " tokenInStatusList = true;",
+ " }",
+ " pm.expect(tokenInStatusList).to.equal(true);",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/_wave/api/sessions?index=0&count=1000&expand=status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "_wave",
+ "api",
+ "sessions"
+ ],
+ "query": [
+ {
+ "key": "index",
+ "value": "0"
+ },
+ {
+ "key": "count",
+ "value": "1000"
+ },
+ {
+ "key": "expand",
+ "value": "status"
+ }
+ ]
+ }
+ },
+ "response": []
+ },
+ {
+ "name": "Clean up: Delete Session",
+ "request": {
+ "method": "DELETE",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/sessions/{{session_token}}",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "sessions",
+ "{{session_token}}"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ },
+ {
+ "name": "General API",
+ "item": [
+ {
+ "name": "server status",
+ "item": [
+ {
+ "name": "Read server status",
+ "event": [
+ {
+ "listen": "test",
+ "script": {
+ "id": "d695b371-c742-4eaf-9179-852b04460ecf",
+ "exec": [
+ "pm.test(\"Status code is 200\", function () {",
+ " pm.response.to.have.status(200);",
+ "});",
+ "",
+ "var jsonData = pm.response.json();",
+ "",
+ "pm.test(\"JSON structure is as expected\", function () {",
+ " pm.expect(jsonData).to.have.property(\"import_results_enabled\");",
+ " pm.expect(typeof jsonData.import_results_enabled).to.equal(\"boolean\");",
+ " pm.expect(jsonData).to.have.property(\"reports_enabled\");",
+ " pm.expect(typeof jsonData.reports_enabled).to.equal(\"boolean\");",
+ " pm.expect(jsonData).to.have.property(\"read_sessions_enabled\");",
+ " pm.expect(typeof jsonData.read_sessions_enabled).to.equal(\"boolean\");",
+ " pm.expect(jsonData).to.have.property(\"version_string\");",
+ " pm.expect(typeof jsonData.version_string).to.equal(\"string\");",
+ "});"
+ ],
+ "type": "text/javascript"
+ }
+ }
+ ],
+ "request": {
+ "method": "GET",
+ "header": [],
+ "url": {
+ "raw": "{{protocol}}://{{host}}:{{port}}/{{web_root}}/api/status",
+ "protocol": "{{protocol}}",
+ "host": [
+ "{{host}}"
+ ],
+ "port": "{{port}}",
+ "path": [
+ "{{web_root}}",
+ "api",
+ "status"
+ ]
+ }
+ },
+ "response": []
+ }
+ ],
+ "protocolProfileBehavior": {},
+ "_postman_isSubFolder": true
+ }
+ ],
+ "protocolProfileBehavior": {}
+ }
+ ],
+ "protocolProfileBehavior": {}
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/tests/__init__.py b/testing/web-platform/tests/tools/wave/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/wave/tests/config.json b/testing/web-platform/tests/tools/wave/tests/config.json
new file mode 100644
index 0000000000..672ed1aae9
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/tests/config.json
@@ -0,0 +1,6 @@
+{
+ "ports": {
+ "http": [8080, "auto"],
+ "https": [8483]
+ }
+}
diff --git a/testing/web-platform/tests/tools/wave/tests/test_wave.py b/testing/web-platform/tests/tools/wave/tests/test_wave.py
new file mode 100644
index 0000000000..01b3e93f13
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/tests/test_wave.py
@@ -0,0 +1,54 @@
+# mypy: allow-untyped-defs
+
+import errno
+import os
+import socket
+import subprocess
+import time
+
+from urllib.request import urlopen
+from urllib.error import URLError
+
+from tools.wpt import wpt
+
+def is_port_8080_in_use():
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ try:
+ s.bind(("127.0.0.1", 8080))
+ except OSError as e:
+ if e.errno == errno.EADDRINUSE:
+ return True
+ else:
+ raise e
+ finally:
+ s.close()
+ return False
+
+def test_serve():
+ if is_port_8080_in_use():
+ assert False, "WAVE Test Runner failed: Port 8080 already in use."
+
+ p = subprocess.Popen([os.path.join(wpt.localpaths.repo_root, "wpt"),
+ "serve-wave",
+ "--config",
+ os.path.join(wpt.localpaths.repo_root, "tools/wave/tests/config.json")],
+ preexec_fn=os.setsid)
+
+ start = time.time()
+ try:
+ while True:
+ if p.poll() is not None:
+ assert False, "WAVE Test Runner failed: Server not running."
+ if time.time() - start > 60:
+ assert False, "WAVE Test Runner failed: Server did not start responding within 60s."
+ try:
+ resp = urlopen("http://web-platform.test:8080/_wave/api/sessions/public")
+ print(resp)
+ except URLError:
+ print("URLError")
+ time.sleep(1)
+ else:
+ assert resp.code == 200
+ break
+ finally:
+ os.killpg(p.pid, 15)
diff --git a/testing/web-platform/tests/tools/wave/tox.ini b/testing/web-platform/tests/tools/wave/tox.ini
new file mode 100644
index 0000000000..5a447b21bf
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/tox.ini
@@ -0,0 +1,18 @@
+[tox]
+envlist = py36,py37,py38,py39,py310
+skipsdist=True
+skip_missing_interpreters = False
+
+[testenv]
+deps =
+ -r{toxinidir}/../requirements_pytest.txt
+ -r{toxinidir}/requirements.txt
+ -r{toxinidir}/../wptrunner/requirements.txt
+ -r{toxinidir}/../wptrunner/requirements_chromium.txt
+ -r{toxinidir}/../wptrunner/requirements_firefox.txt
+
+commands =
+ pytest {posargs}
+
+passenv =
+ TASKCLUSTER_ROOT_URL
diff --git a/testing/web-platform/tests/tools/wave/utils/__init__.py b/testing/web-platform/tests/tools/wave/utils/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/utils/__init__.py
diff --git a/testing/web-platform/tests/tools/wave/utils/deserializer.py b/testing/web-platform/tests/tools/wave/utils/deserializer.py
new file mode 100644
index 0000000000..28d1054f6b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/utils/deserializer.py
@@ -0,0 +1,118 @@
+# mypy: allow-untyped-defs
+
+from ..data.session import Session, UNKNOWN
+from datetime import datetime
+import dateutil.parser
+from dateutil.tz import tzutc
+
+
+def deserialize_sessions(session_dicts):
+ sessions = []
+ for session_dict in session_dicts:
+ session = deserialize_session(session_dict)
+ sessions.append(session)
+ return sessions
+
+
+def deserialize_session(session_dict):
+ token = ""
+ if "token" in session_dict:
+ token = session_dict["token"]
+ tests = {"include": [], "exclude": []}
+ if "tests" in session_dict:
+ tests = session_dict["tests"]
+ if "path" in session_dict:
+ test_paths = session_dict["path"].split(", ")
+ tests["include"] = tests["include"] + test_paths
+ test_types = []
+ if "types" in session_dict:
+ test_types = session_dict["types"]
+ user_agent = ""
+ if "user_agent" in session_dict:
+ user_agent = session_dict["user_agent"]
+ labels = []
+ if "labels" in session_dict:
+ labels = session_dict["labels"]
+ timeouts = {}
+ if "timeouts" in session_dict:
+ timeouts = session_dict["timeouts"]
+ pending_tests = None
+ if "pending_tests" in session_dict:
+ pending_tests = session_dict["pending_tests"]
+ running_tests = None
+ if "running_tests" in session_dict:
+ running_tests = session_dict["running_tests"]
+ status = UNKNOWN
+ if "status" in session_dict:
+ status = session_dict["status"]
+ test_state = None
+ if "test_state" in session_dict:
+ test_state = session_dict["test_state"]
+ last_completed_test = None
+ if "last_completed_test" in session_dict:
+ last_completed_test = session_dict["last_completed_test"]
+ date_created = None
+ if "date_created" in session_dict:
+ date_created = session_dict["date_created"]
+ date_created = iso_to_millis(date_created)
+ date_started = None
+ if "date_started" in session_dict:
+ date_started = session_dict["date_started"]
+ date_started = iso_to_millis(date_started)
+ date_finished = None
+ if "date_finished" in session_dict:
+ date_finished = session_dict["date_finished"]
+ date_finished = iso_to_millis(date_finished)
+ is_public = False
+ if "is_public" in session_dict:
+ is_public = session_dict["is_public"]
+ reference_tokens = []
+ if "reference_tokens" in session_dict:
+ reference_tokens = session_dict["reference_tokens"]
+ browser = None
+ if "browser" in session_dict:
+ browser = session_dict["browser"]
+ expiration_date = None
+ if "expiration_date" in session_dict:
+ expiration_date = session_dict["expiration_date"]
+ expiration_date = iso_to_millis(expiration_date)
+ type = None
+ if "type" in session_dict:
+ type = session_dict["type"]
+ malfunctioning_tests = []
+ if "malfunctioning_tests" in session_dict:
+ malfunctioning_tests = session_dict["malfunctioning_tests"]
+
+ return Session(
+ token=token,
+ tests=tests,
+ test_types=test_types,
+ user_agent=user_agent,
+ labels=labels,
+ timeouts=timeouts,
+ pending_tests=pending_tests,
+ running_tests=running_tests,
+ status=status,
+ test_state=test_state,
+ last_completed_test=last_completed_test,
+ date_created=date_created,
+ date_started=date_started,
+ date_finished=date_finished,
+ is_public=is_public,
+ reference_tokens=reference_tokens,
+ browser=browser,
+ expiration_date=expiration_date,
+ type=type,
+ malfunctioning_tests=malfunctioning_tests
+ )
+
+def iso_to_millis(iso_string):
+ if iso_string is None:
+ return None
+ try:
+ date = dateutil.parser.isoparse(iso_string)
+ date = date.replace(tzinfo=tzutc())
+ epoch = datetime.utcfromtimestamp(0).replace(tzinfo=tzutc())
+ return int((date - epoch).total_seconds() * 1000)
+ except Exception:
+ return iso_string
diff --git a/testing/web-platform/tests/tools/wave/utils/serializer.py b/testing/web-platform/tests/tools/wave/utils/serializer.py
new file mode 100644
index 0000000000..995365081d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/utils/serializer.py
@@ -0,0 +1,47 @@
+# mypy: allow-untyped-defs
+
+from datetime import datetime
+
+
+def serialize_session(session):
+ return {
+ "token": session.token,
+ "types": session.test_types,
+ "user_agent": session.user_agent,
+ "labels": session.labels,
+ "timeouts": session.timeouts,
+ "test_state": session.test_state,
+ "last_completed_test": session.last_completed_test,
+ "tests": session.tests,
+ "pending_tests": session.pending_tests,
+ "running_tests": session.running_tests,
+ "status": session.status,
+ "browser": session.browser,
+ "date_created": millis_to_iso(session.date_created),
+ "date_started": millis_to_iso(session.date_started),
+ "date_finished": millis_to_iso(session.date_finished),
+ "is_public": session.is_public,
+ "reference_tokens": session.reference_tokens,
+ "expiration_date": millis_to_iso(session.expiration_date),
+ "type": session.type,
+ "malfunctioning_tests": session.malfunctioning_tests
+ }
+
+def serialize_sessions(sessions):
+ serialized_sessions = []
+ for session in sessions:
+ serialized_sessions.append(serialize_session(session))
+ return serialized_sessions
+
+def serialize_device(device):
+ return {
+ "token": device.token,
+ "user_agent": device.user_agent,
+ "name": device.name,
+ "last_active": millis_to_iso(device.last_active)
+ }
+
+def millis_to_iso(millis):
+ if millis is None:
+ return None
+ return datetime.utcfromtimestamp(millis/1000.0).isoformat() + "+00:00"
diff --git a/testing/web-platform/tests/tools/wave/utils/user_agent_parser.py b/testing/web-platform/tests/tools/wave/utils/user_agent_parser.py
new file mode 100644
index 0000000000..d1da820b53
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/utils/user_agent_parser.py
@@ -0,0 +1,43 @@
+# mypy: allow-untyped-defs
+
+from ua_parser import user_agent_parser
+
+
+def parse_user_agent(user_agent_string):
+ user_agent = user_agent_parser.ParseUserAgent(user_agent_string)
+
+ name = user_agent["family"]
+ version = "0"
+
+ if user_agent["major"] is not None:
+ version = user_agent["major"]
+
+ if user_agent["minor"] is not None:
+ version = version + "." + user_agent["minor"]
+
+ if user_agent["patch"] is not None:
+ version = version + "." + user_agent["patch"]
+
+ return {
+ "name": name,
+ "version": version
+ }
+
+
+def abbreviate_browser_name(name):
+ short_names = {
+ "Chrome": "Ch",
+ "Chrome Mobile WebView": "Ch",
+ "Chromium": "Cm",
+ "WebKit": "Wk",
+ "Safari": "Sf",
+ "Firefox": "FF",
+ "IE": "IE",
+ "Edge": "Ed",
+ "Opera": "Op"
+ }
+
+ if name in short_names:
+ return short_names[name]
+ else:
+ return "Xx"
diff --git a/testing/web-platform/tests/tools/wave/wave_server.py b/testing/web-platform/tests/tools/wave/wave_server.py
new file mode 100644
index 0000000000..1439c0b7ca
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/wave_server.py
@@ -0,0 +1,145 @@
+# mypy: allow-untyped-defs
+
+import os
+import logging
+
+from . import configuration_loader
+
+from .network.http_handler import HttpHandler
+from .network.api.sessions_api_handler import SessionsApiHandler
+from .network.api.tests_api_handler import TestsApiHandler
+from .network.api.results_api_handler import ResultsApiHandler
+from .network.api.devices_api_handler import DevicesApiHandler
+from .network.api.general_api_handler import GeneralApiHandler
+from .network.static_handler import StaticHandler
+
+from .testing.sessions_manager import SessionsManager
+from .testing.results_manager import ResultsManager
+from .testing.tests_manager import TestsManager
+from .testing.devices_manager import DevicesManager
+from .testing.test_loader import TestLoader
+from .testing.event_dispatcher import EventDispatcher
+
+VERSION_STRING = "v3.3.0"
+
+
+class WaveServer:
+ def initialize(self,
+ tests,
+ configuration_file_path=None,
+ application_directory_path=None,
+ reports_enabled=None):
+ if configuration_file_path is None:
+ configuration_file_path = ""
+ if application_directory_path is None:
+ application_directory_path = ""
+ if reports_enabled is None:
+ reports_enabled = False
+
+ logger = logging.getLogger("wave-server")
+
+ logger.debug("Loading configuration ...")
+ configuration = configuration_loader.load(configuration_file_path)
+
+ # Initialize Managers
+ event_dispatcher = EventDispatcher(
+ event_cache_duration=configuration["event_cache_duration"]
+ )
+ sessions_manager = SessionsManager()
+ results_manager = ResultsManager()
+ tests_manager = TestsManager()
+ devices_manager = DevicesManager()
+ test_loader = TestLoader()
+
+ sessions_manager.initialize(
+ test_loader=test_loader,
+ event_dispatcher=event_dispatcher,
+ tests_manager=tests_manager,
+ results_directory=configuration["results_directory_path"],
+ results_manager=results_manager,
+ configuration=configuration
+ )
+
+ results_manager.initialize(
+ results_directory_path=configuration["results_directory_path"],
+ sessions_manager=sessions_manager,
+ tests_manager=tests_manager,
+ import_results_enabled=configuration["import_results_enabled"],
+ reports_enabled=reports_enabled,
+ persisting_interval=configuration["persisting_interval"]
+ )
+
+ tests_manager.initialize(
+ test_loader,
+ results_manager=results_manager,
+ sessions_manager=sessions_manager,
+ event_dispatcher=event_dispatcher
+ )
+
+ devices_manager.initialize(event_dispatcher)
+
+ exclude_list_file_path = os.path.abspath("./excluded.json")
+ include_list_file_path = os.path.abspath("./included.json")
+ test_loader.initialize(
+ exclude_list_file_path,
+ include_list_file_path,
+ results_manager=results_manager,
+ api_titles=configuration["api_titles"]
+ )
+
+ test_loader.load_tests(tests)
+
+ # Initialize HTTP handlers
+ static_handler = StaticHandler(
+ web_root=configuration["web_root"],
+ http_port=configuration["wpt_port"],
+ https_port=configuration["wpt_ssl_port"]
+ )
+ sessions_api_handler = SessionsApiHandler(
+ sessions_manager=sessions_manager,
+ results_manager=results_manager,
+ event_dispatcher=event_dispatcher,
+ web_root=configuration["web_root"],
+ read_sessions_enabled=configuration["read_sessions_enabled"]
+ )
+ tests_api_handler = TestsApiHandler(
+ tests_manager=tests_manager,
+ sessions_manager=sessions_manager,
+ wpt_port=configuration["wpt_port"],
+ wpt_ssl_port=configuration["wpt_ssl_port"],
+ hostname=configuration["hostname"],
+ web_root=configuration["web_root"],
+ test_loader=test_loader
+ )
+ devices_api_handler = DevicesApiHandler(
+ devices_manager=devices_manager,
+ event_dispatcher=event_dispatcher,
+ web_root=configuration["web_root"]
+ )
+ results_api_handler = ResultsApiHandler(
+ results_manager,
+ sessions_manager,
+ web_root=configuration["web_root"]
+ )
+ general_api_handler = GeneralApiHandler(
+ web_root=configuration["web_root"],
+ read_sessions_enabled=configuration["read_sessions_enabled"],
+ import_results_enabled=configuration["import_results_enabled"],
+ reports_enabled=reports_enabled,
+ version_string=VERSION_STRING,
+ test_type_selection_enabled=configuration["enable_test_type_selection"],
+ test_file_selection_enabled=configuration["enable_test_file_selection"]
+ )
+
+ # Initialize HTTP server
+ http_handler = HttpHandler(
+ static_handler=static_handler,
+ sessions_api_handler=sessions_api_handler,
+ tests_api_handler=tests_api_handler,
+ results_api_handler=results_api_handler,
+ devices_api_handler=devices_api_handler,
+ general_api_handler=general_api_handler,
+ http_port=configuration["wpt_port"],
+ web_root=configuration["web_root"]
+ )
+ self.handle_request = http_handler.handle_request
diff --git a/testing/web-platform/tests/tools/wave/www/comparison.html b/testing/web-platform/tests/tools/wave/www/comparison.html
new file mode 100644
index 0000000000..debb971bf4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/comparison.html
@@ -0,0 +1,444 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <title>Compare Results - Web Platform Test</title>
+ <link rel="stylesheet" href="css/bulma-0.7.5/bulma.min.css" />
+ <link rel="stylesheet" href="css/fontawesome-5.7.2.min.css" />
+ <script src="lib/utils.js"></script>
+ <script src="lib/wave-service.js"></script>
+ <script src="lib/ui.js"></script>
+ <style>
+ body {
+ margin: 0;
+ padding: 0;
+ width: 100vw;
+ height: 100vh;
+ display: flex;
+ justify-content: center;
+ font-family: "Noto Sans", sans-serif;
+ overflow-y: auto;
+ overflow-x: hidden;
+ background-color: white;
+ color: #000;
+ }
+
+ .site-logo {
+ max-width: 300px;
+ margin-left: -15px;
+ }
+
+ .content {
+ width: 1000px;
+ }
+
+ .header {
+ display: flex;
+ margin: 50px 0 30px 0;
+ }
+
+ .header :first-child {
+ flex: 1;
+ }
+
+ .session-header {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ cursor: pointer;
+ }
+
+ .session-header div {
+ padding: 5px;
+ font-weight: bold;
+ }
+
+ .session-header:hover div {
+ text-decoration: underline;
+ }
+ </style>
+ </head>
+ <body>
+ <script>
+ window.onload = () => {
+ let { tokens, reftokens } = utils.parseQuery(location.search);
+ tokens = tokens ? tokens.split(",") : [];
+ const refTokens = reftokens ? reftokens.split(",") : [];
+ if (tokens) {
+ WaveService.readStatus(function(config) {
+ comparisonUi.state.reportsEnabled = config.reportsEnabled;
+ comparisonUi.render();
+ });
+ WaveService.addRecentSessions(tokens);
+ WaveService.addRecentSessions(refTokens);
+ comparisonUi.state.tokens = tokens;
+ comparisonUi.state.refTokens = refTokens;
+ comparisonUi.render();
+ comparisonUi.refreshData();
+ }
+ };
+
+ const comparisonUi = {
+ state: {
+ tokens: [],
+ refTokens: [],
+ sessions: {}
+ },
+ refreshData: () => {
+ const { tokens, refTokens } = comparisonUi.state;
+ const allTokens = tokens.slice();
+ refTokens
+ .filter(token => allTokens.indexOf(token) === -1)
+ .forEach(token => allTokens.push(token));
+
+ WaveService.readMultipleSessions(allTokens, configurations => {
+ const sessions = {};
+ configurations.forEach(
+ details => (sessions[details.token] = details)
+ );
+ comparisonUi.state.sessions = sessions;
+
+ WaveService.readResultComparison(tokens, results => {
+ comparisonUi.state.results = results;
+ comparisonUi.renderApiResults();
+ });
+
+ const sessionsReferenceTokens = [];
+ configurations.forEach(({ referenceTokens }) =>
+ referenceTokens
+ .filter(token => refTokens.indexOf(token) === -1)
+ .filter(token => sessionsReferenceTokens.indexOf(token) === -1)
+ .forEach(token => sessionsReferenceTokens.push(token))
+ );
+
+ sessionsReferenceTokens.forEach(token =>
+ comparisonUi.state.refTokens.push(token)
+ );
+
+ WaveService.readMultipleSessions(
+ sessionsReferenceTokens,
+ configurations => {
+ const { sessions } = comparisonUi.state;
+ configurations.forEach(
+ details => (sessions[details.token] = details)
+ );
+ comparisonUi.renderDetails();
+ }
+ );
+ });
+ },
+ openResultsOverview() {
+ location.href = WEB_ROOT + "overview.html";
+ },
+ render() {
+ const comparisonView = UI.createElement({
+ className: "content",
+ style: "margin-bottom: 40px;",
+ children: [
+ {
+ className: "header",
+ children: [
+ {
+ children: [
+ {
+ element: "img",
+ src: "res/wavelogo_2016.jpg",
+ className: "site-logo"
+ }
+ ]
+ },
+ {
+ className: "button is-dark is-outlined",
+ onClick: comparisonUi.openResultsOverview,
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-arrow-left"
+ }
+ ]
+ },
+ {
+ text: "Results Overview",
+ element: "span"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ id: "header",
+ children: [
+ { className: "title", text: "Comparison" },
+ { id: "controls" }
+ ]
+ },
+ { id: "details" },
+ { id: "api-results" }
+ ]
+ });
+
+ const root = UI.getRoot();
+ root.innerHTML = "";
+ root.appendChild(comparisonView);
+ comparisonUi.renderDetails();
+ comparisonUi.renderApiResults();
+ },
+ renderDetails() {
+ const detailsView = UI.createElement({
+ style: "margin-bottom: 20px"
+ });
+
+ const { refTokens } = comparisonUi.state;
+ const detailsTable = UI.createElement({
+ element: "table",
+ children: {
+ element: "tbody",
+ children: [
+ {
+ element: "tr",
+ id: "reference-sessions"
+ }
+ ]
+ }
+ });
+ detailsView.appendChild(detailsTable);
+
+ const details = UI.getElement("details");
+ details.innerHTML = "";
+ details.appendChild(detailsView);
+ comparisonUi.renderReferenceSessions();
+ },
+ renderReferenceSessions() {
+ const { sessions, refTokens } = comparisonUi.state;
+ if (!refTokens || refTokens.length === 0) return;
+ if (!Object.keys(sessions) || Object.keys(sessions).length === 0)
+ return;
+ const referenceSessions = refTokens.map(token => sessions[token]);
+ const referenceSessionsTarget = UI.getElement("reference-sessions");
+ referenceSessionsTarget.innerHTML = "";
+
+ const referenceSessionsLabel = UI.createElement({
+ element: "td",
+ text: "Reference Sessions:",
+ style: "width: 175px"
+ });
+ referenceSessionsTarget.appendChild(referenceSessionsLabel);
+
+ const referenceSessionsList = UI.createElement({ element: "td" });
+ referenceSessions.forEach(session => {
+ const { token, browser } = session;
+ const referenceSessionItem = UI.createElement({
+ style: "margin-right: 10px",
+ className: "button is-dark is-small is-rounded is-outlined",
+ onClick: () => WaveService.openSession(token),
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: {
+ element: "i",
+ className: utils.getBrowserIcon(browser.name)
+ }
+ },
+ {
+ element: "span",
+ className: "is-family-monospace",
+ text: token.split("-").shift()
+ }
+ ]
+ });
+ referenceSessionsList.appendChild(referenceSessionItem);
+ });
+ referenceSessionsTarget.appendChild(referenceSessionsList);
+ },
+ renderApiResults() {
+ const apiResultsView = UI.createElement({
+ style: "margin-bottom: 20px"
+ });
+
+ const heading = UI.createElement({
+ className: "title is-4",
+ text: "Results"
+ });
+ apiResultsView.appendChild(heading);
+
+ const { results, tokens, sessions } = comparisonUi.state;
+
+ if (!results) {
+ const loadingIndicator = UI.createElement({
+ className: "level",
+ children: {
+ element: "span",
+ className: "level-item icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-spinner fa-pulse"
+ },
+ {
+ style: "margin-left: 0.4em;",
+ text: "Loading comparison ..."
+ }
+ ]
+ }
+ });
+ apiResultsView.appendChild(loadingIndicator);
+
+ const apiResults = UI.getElement("api-results");
+ apiResults.innerHTML = "";
+ apiResults.appendChild(apiResultsView);
+ return;
+ }
+
+ const resultsTable = UI.createElement({
+ element: "table"
+ });
+ apiResultsView.appendChild(resultsTable);
+
+ const getColor = percent => {
+ const tRed = 28;
+ const tGreen = 166;
+ const tBlue = 76;
+ const mRed = 204;
+ const mGreen = 163;
+ const mBlue = 0;
+ const bRed = 255;
+ const bGreen = 56;
+ const bBlue = 96;
+ if (percent > 50) {
+ const red = mRed + ((percent - 50) / 50) * (tRed - mRed);
+ const green = mGreen + ((percent - 50) / 50) * (tGreen - mGreen);
+ const blue = mBlue + ((percent - 50) / 50) * (tBlue - mBlue);
+ return `rgb(${red}, ${green}, ${blue})`;
+ } else {
+ const red = bRed + (percent / 50) * (mRed - bRed);
+ const green = bGreen + (percent / 50) * (mGreen - bGreen);
+ const blue = bBlue + (percent / 50) * (mBlue - bBlue);
+ return `rgb(${red}, ${green}, ${blue})`;
+ }
+ };
+
+ const resultsTableHeader = UI.createElement({
+ element: "thead",
+ children: {
+ element: "tr",
+ children: [
+ {
+ element: "td",
+ text: "API",
+ style: "vertical-align: bottom; width: 200px"
+ }
+ ]
+ .concat(
+ tokens.map(token => ({
+ element: "td",
+ children: {
+ onClick: () => WaveService.openSession(token),
+ className: "session-header",
+ children: [
+ {
+ element: "i",
+ style: "font-size: 1.5em; margin-right: 0.1em",
+ className: utils.getBrowserIcon(
+ sessions[token].browser.name
+ )
+ },
+ {
+ children: [
+ {
+ style: "margin: 0; padding: 0;",
+ className: "is-family-monospace",
+ text: `${token.split("-").shift()}`
+ },
+ {
+ style: "margin: 0; padding: 0;",
+ text: `${sessions[token].browser.name} ${
+ sessions[token].browser.version
+ }`
+ }
+ ]
+ }
+ ]
+ }
+ }))
+ )
+ .concat([{ element: "td", style: "width: 80px" }])
+ }
+ });
+ resultsTable.appendChild(resultsTableHeader);
+
+ let apis = [];
+ tokens.forEach(token =>
+ Object.keys(results[token])
+ .filter(api => apis.indexOf(api) === -1)
+ .forEach(api => apis.push(api))
+ );
+ apis = apis.sort((apiA, apiB) =>
+ apiA.toLowerCase() > apiB.toLowerCase() ? 1 : -1
+ );
+
+ const resultsTableBody = UI.createElement({
+ element: "tbody",
+ children: apis.map(api => ({
+ element: "tr",
+ children: [{ element: "td", text: api }]
+ .concat(
+ tokens.map(token =>
+ results[token][api]
+ ? {
+ element: "td",
+ style:
+ "text-align: center; font-weight: bold;" +
+ `color: ${getColor(
+ utils.percent(
+ results[token][api],
+ results["total"][api]
+ )
+ )}`,
+ text: `${utils.percent(
+ results[token][api],
+ results["total"][api]
+ )}%`
+ }
+ : {
+ element: "td",
+ text: "Not Tested",
+ style: "text-align: center;"
+ }
+ )
+ )
+ .concat([
+ comparisonUi.state.reportsEnabled ?
+ {
+ element: "td",
+ children: {
+ className:
+ "html-report button is-dark is-outlined is-small",
+ onclick: () =>
+ WaveService.readMultiReportUri(
+ comparisonUi.state.tokens,
+ api,
+ function(uri) {
+ window.open(uri, "_blank");
+ }
+ ),
+ text: "report"
+ }
+ } : null
+ ])
+ }))
+ });
+ resultsTable.appendChild(resultsTableBody);
+
+ const apiResults = UI.getElement("api-results");
+ apiResults.innerHTML = "";
+ apiResults.appendChild(apiResultsView);
+ }
+ };
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/www/configuration.html b/testing/web-platform/tests/tools/wave/www/configuration.html
new file mode 100644
index 0000000000..327a155154
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/configuration.html
@@ -0,0 +1,1586 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <meta name="viewport" content="width=device-width, initial-scale=1" />
+ <title>Session Configuration - Web Platform Test</title>
+ <link rel="stylesheet" href="css/bulma-0.7.5/bulma.min.css" />
+ <link rel="stylesheet" href="css/fontawesome-5.7.2.min.css" />
+ <script src="lib/utils.js"></script>
+ <script src="lib/wave-service.js"></script>
+ <script src="lib/ui.js"></script>
+ <style>
+ .site-logo {
+ max-width: 300px;
+ margin: 0 0 30px -15px;
+ }
+ </style>
+ </head>
+ <body>
+ <script>
+ // var apis = [
+ // { title: "2D Context", path: "/2dcontext" },
+ // { title: "Content Security Policy", path: "/content-security-policy" },
+ // { title: "CSS", path: "/css" },
+ // { title: "DOM", path: "/dom" },
+ // { title: "ECMAScript", path: "/ecmascript" },
+ // { title: "Encrypted media", path: "/encrypted-media" },
+ // { title: "Fetch", path: "/fetch" },
+ // { title: "FileAPI", path: "/FileAPI" },
+ // { title: "Fullscreen", path: "/fullscreen" },
+ // { title: "WebGL", path: "/webgl" },
+ // { title: "HTML", path: "/html" },
+ // { title: "IndexedDB", path: "/IndexedDB" },
+ // { title: "Media Source", path: "/media-source" },
+ // { title: "Notifications", path: "/notifications" },
+ // { title: "Page Visibility", path: "/page-visibility" },
+ // { title: "Service Workers", path: "/service-workers" },
+ // { title: "UI Events", path: "/uievents" },
+ // { title: "WAVE Extra", path: "/wave-extra" },
+ // { title: "Webaudio", path: "/webaudio" },
+ // { title: "WebCryptoAPI", path: "/WebCryptoAPI" },
+ // { title: "Webmessaging", path: "/webmessaging" },
+ // { title: "Websockets", path: "/websockets" },
+ // { title: "Webstorage", path: "/webstorage" },
+ // { title: "Workers", path: "/workers" },
+ // { title: "XHR", path: "/xhr" }
+ // ];
+
+ // var referenceSessions = [
+ // {
+ // title: "Edge 44.17763",
+ // engine: "",
+ // token: "b2924d20-6a93-11e9-98b4-a11fb92a6d1c",
+ // icon: "fab fa-edge"
+ // },
+ // {
+ // title: "Firefox 64.0",
+ // engine: "Gecko 64.0",
+ // token: "bb7aafa0-6a92-11e9-8ec2-04f58dad2e4f",
+ // icon: "fab fa-firefox"
+ // },
+ // {
+ // title: "WebKit 605.1.15",
+ // engine: "Revision 239158",
+ // token: "caf823e0-6a92-11e9-b732-3188d0065ebc",
+ // icon: "fab fa-safari"
+ // },
+ // {
+ // title: "Chromium 73.0.3640.0",
+ // engine: "Blink 537.36",
+ // token: "a50c6db0-6a94-11e9-8d1b-e23fc4555885",
+ // icon: "fab fa-chrome"
+ // }
+ // ];
+
+ var testFileSelectionEnabled = true;
+
+ window.onload = function () {
+ new ConfigurationView();
+ };
+
+ function ConfigurationView() {
+ const query = utils.parseQuery(location.search);
+ var token = query.token;
+ if (token) WaveService.addRecentSession(token);
+ var referenceSessions = [];
+ var apis = [];
+ var testTypeSelectionEnabled = true;
+ var testFileSelectionEnabled = false;
+
+ var types = [
+ { title: "Automatic", value: "automatic" },
+ { title: "Manual", value: "manual" },
+ ];
+ var state = {};
+ loadServerStatus();
+ loadSessionData(token, function () {
+ loadPublicSessionData(function () {
+ renderReferencesField();
+ });
+ });
+ render();
+
+ function loadServerStatus() {
+ WaveService.readStatus(function (status) {
+ testTypeSelectionEnabled = status.testTypeSelectionEnabled;
+ testFileSelectionEnabled = status.testFileSelectionEnabled;
+ renderSessionConfiguration();
+ });
+ }
+
+ function loadSessionData(token, callback) {
+ if (!token) {
+ state.expired = true;
+ return;
+ }
+ WaveService.readSessionStatus(
+ token,
+ function (status) {
+ if (status.status !== "pending") {
+ openResultsPage(token);
+ return;
+ }
+ state.status = status;
+
+ WaveService.readSession(token, function (configuration) {
+ if (
+ configuration.tests.include.findIndex(
+ (test) => test === "/"
+ ) !== -1
+ ) {
+ configuration.tests.include = apis.map((api) => api.path);
+ }
+ state.configurationBackup = utils.copyObject(configuration);
+ state.configuration = configuration;
+ setTimeout(
+ handleExpiration,
+ status.expirationDate.getTime() - Date.now()
+ );
+ renderSessionConfiguration();
+ callback();
+ WaveService.readAvailableApis(function (available_apis) {
+ available_apis = available_apis.sort((apiA, apiB) =>
+ apiA.title.toLowerCase() > apiB.title.toLowerCase() ? 1 : -1
+ );
+ apis = available_apis;
+ selectAllTests();
+ renderSessionConfiguration();
+ });
+ });
+ },
+ function () {
+ state.expired = true;
+ renderSessionConfiguration();
+ renderExcludedTests();
+ renderResumeView();
+ }
+ );
+ }
+
+ function loadPublicSessionData(callback) {
+ WaveService.readPublicSessions(function (tokens) {
+ WaveService.readMultipleSessions(tokens, function (configurations) {
+ console.log(configurations);
+ referenceSessions = configurations
+ .sort((confA, confB) =>
+ confA.browser.name.toLowerCase() >
+ confB.browser.name.toLowerCase()
+ ? 1
+ : -1
+ )
+ .map((configuration) => {
+ var icon = "";
+ switch (configuration.browser.name.toLowerCase()) {
+ case "firefox":
+ icon = "fab fa-firefox";
+ break;
+ case "webkit":
+ case "safari":
+ icon = "fab fa-safari";
+ break;
+ case "edge":
+ icon = "fab fa-edge";
+ break;
+ case "chrome":
+ case "chromium":
+ icon = "fab fa-chrome";
+ break;
+ }
+ return {
+ title:
+ configuration.browser.name +
+ " " +
+ configuration.browser.version,
+ token: configuration.token,
+ icon,
+ engine: configuration.browser.engine || "",
+ };
+ });
+ callback(referenceSessions);
+ });
+ });
+ }
+
+ function handleConfigureSession() {
+ const tokenFragmentInput = UI.getElement("token-fragment");
+ const fragment = tokenFragmentInput.value;
+ findSession(fragment, function (session) {
+ if (!session) {
+ const errorBox = UI.getElement("find-error");
+ errorBox.setAttribute("style", "display: block");
+ return;
+ }
+ tokenFragmentInput.value = "";
+ const errorBox = UI.getElement("find-error");
+ errorBox.setAttribute("style", "display: none");
+ const path = location.pathname + "?token=" + session.token;
+ location.href = path;
+ });
+ }
+
+ function findSession(fragment, callback) {
+ if (!fragment || fragment.length < 8) {
+ callback(null);
+ return;
+ }
+ WaveService.findToken(
+ fragment,
+ function (token) {
+ WaveService.readSession(token, function (session) {
+ WaveService.readSessionStatus(token, function (status) {
+ session.status = status.status;
+ session.dateStarted = status.dateStarted;
+ session.dateFinished = status.dateFinished;
+ callback(session);
+ });
+ });
+ },
+ function () {
+ callback(null);
+ }
+ );
+ }
+
+ function hasIncludedTest(path) {
+ var tests = state.configuration.tests;
+ //var index = tests.include.findIndex(function (test) {
+ // return test.match(new RegExp("^" + path));
+ //});
+ var index = tests.include.indexOf(path);
+ return index !== -1;
+ }
+
+ function handleIncludedTestToggle(path) {
+ var configuration = state.configuration;
+ if (hasIncludedTest(path)) {
+ handleRemoveIncludedTest(path);
+ } else {
+ handleAddIncludedTest(path);
+ }
+ }
+
+ function handleAddIncludedTest(path) {
+ var tests = state.configuration.tests;
+ if (state.tests && state.tests[path.substr(1)]) {
+ tests.include = tests.include.filter(function (test) {
+ return !test.match(new RegExp("^" + path + "/"));
+ });
+ tests.include = tests.include.concat(state.tests[path.substr(1)]);
+ } else {
+ tests.include.push(path);
+ }
+ }
+
+ function handleRemoveIncludedTest(path) {
+ var tests = state.configuration.tests;
+
+ if (state.tests && state.tests[path.substr(1)]) {
+ tests.include = tests.include.filter(function (test) {
+ return !test.match(new RegExp("^" + path + "/"));
+ });
+ } else {
+ var index = tests.include.findIndex((test) => test === path);
+ tests.include.splice(index, 1);
+ }
+ }
+
+ function getIncludedRatio(path) {
+ var includedTests = state.configuration.tests.include;
+ if (!state.tests) {
+ return includedTests.indexOf(path) !== -1 ? 1 : 0;
+ }
+ var count = 0;
+ for (var test of includedTests) {
+ if (!test.match(new RegExp("^" + path))) continue;
+ count++;
+ }
+ return count / state.tests[path.substr(1)].length;
+ }
+
+ function selectAllTests() {
+ var tests = state.configuration.tests;
+ if (state.tests) {
+ tests.include = [];
+ for (var api in state.tests) {
+ tests.include = tests.include.concat(state.tests[api]);
+ }
+ } else {
+ tests.include = apis.map((api) => api.path);
+ }
+ }
+
+ function deselectAllTests() {
+ var configuration = state.configuration;
+ configuration.tests.include = [];
+ }
+
+ function hasTestType(value) {
+ var configuration = state.configuration;
+ var index = configuration.types.findIndex((type) => type === value);
+ return index !== -1;
+ }
+
+ function handleTestTypeToggle(value) {
+ var configuration = state.configuration;
+ if (hasTestType(value)) {
+ var index = configuration.types.findIndex((type) => type === value);
+ configuration.types.splice(index, 1);
+ } else {
+ configuration.types.push(value);
+ }
+ }
+
+ function selectAllTestTypes() {
+ var configuration = state.configuration;
+ configuration.types = types.map((type) => type.value);
+ }
+
+ function deselectAllTestTypes() {
+ var configuration = state.configuration;
+ configuration.types = [];
+ }
+
+ function hasRefSession(session) {
+ var configuration = state.configuration;
+ var index = configuration.referenceTokens.findIndex(
+ (token) => token === session.token
+ );
+ return index !== -1;
+ }
+
+ function handleRefSessionToggle(session) {
+ var configuration = state.configuration;
+ if (hasRefSession(session)) {
+ var index = configuration.referenceTokens.findIndex(
+ (token) => token === session.token
+ );
+ configuration.referenceTokens.splice(index, 1);
+ } else {
+ configuration.referenceTokens.push(session.token);
+ }
+ }
+
+ function selectAllRefSessions() {
+ var configuration = state.configuration;
+ configuration.referenceTokens = referenceSessions.map(
+ (session) => session.token
+ );
+ }
+
+ function deselectAllRefSessions() {
+ var configuration = state.configuration;
+ configuration.referenceTokens = [];
+ }
+
+ function isTestListValid() {
+ var configuration = state.configuration;
+ return configuration.tests.include.length > 0;
+ }
+
+ function isTestTypeListValid() {
+ var configuration = state.configuration;
+ return configuration.types.length > 0;
+ }
+
+ function isConfigurationValid() {
+ if (!isTestListValid()) return false;
+ if (!isTestTypeListValid()) return false;
+ return true;
+ }
+
+ function isSessionStarting() {
+ return state.isStarting;
+ }
+
+ function checkApiList() {
+ var apiErrorElement = UI.getElement("api-error");
+ apiErrorElement.innerHTML = "";
+ if (!isTestListValid()) {
+ apiErrorElement.appendChild(
+ UI.createElement(
+ createErrorMessage(
+ "Select at least one API or at least one test within an API"
+ )
+ )
+ );
+ }
+ renderButtonsField();
+ }
+
+ function handleStart() {
+ if (isSessionStarting()) return;
+ var configuration = state.configuration;
+ var token = configuration.token;
+ WaveService.updateSession(token, configuration, function () {
+ WaveService.updateLabels(token, configuration.labels, function () {
+ WaveService.startSession(token, function () {
+ openResultsPage(token);
+ });
+ });
+ });
+ state.isStarting = true;
+ }
+
+ function handleDiscardChanges() {
+ state.configuration = utils.copyObject(state.configurationBackup);
+ }
+
+ function handleExpiration() {
+ state.expired = true;
+ renderSessionConfiguration();
+ renderResumeView();
+ }
+
+ function openResultsPage(token) {
+ location.href = WEB_ROOT + "results.html?token=" + token;
+ }
+
+ function handleAddExludedTestsRaw() {
+ var excludedTestsTextArea = UI.getElement("excluded-tests-text");
+ var tests = excludedTestsTextArea.value.split("\n");
+ var configuration = state.configuration;
+ var excludedTests = configuration.tests.exclude;
+ for (var test of tests) {
+ if (!test) continue;
+ if (test.startsWith("#")) continue;
+ if (excludedTests.indexOf(test) !== -1) continue;
+ excludedTests.push(test);
+ }
+
+ excludedTestsTextArea.value = "";
+ renderExcludedTests();
+ }
+
+ function handleAddExludedTestsMalfunctioning() {
+ var excludedTestsTextArea = UI.getElement("excluded-tests-text");
+ var token = excludedTestsTextArea.value;
+ var tokenRegExp = new RegExp(
+ "^[a-f0-9]{8}(-[a-f0-9]{0,4}|$)(-[a-f0-9]{0,4}|$)(-[a-f0-9]{0,4}|$)(-[a-f0-9]{0,12}|$)"
+ );
+ var configuration = state.configuration;
+ var excludedTests = configuration.tests.exclude;
+ if (tokenRegExp.test(token)) {
+ WaveService.findToken(
+ token,
+ function (token) {
+ WaveService.readMalfunctioningTests(token, function (
+ malfunctioningTests
+ ) {
+ for (var test of malfunctioningTests) {
+ if (!test) continue;
+ if (excludedTests.indexOf(test) !== -1) continue;
+ excludedTests.push(test);
+ }
+ renderExcludedTests();
+ });
+ },
+ function () {
+ state.excludedTestError = "Session not found";
+ renderExcludedTests();
+ }
+ );
+ } else {
+ state.excludedTestError = "Invalid session token";
+ renderExcludedTests();
+ }
+ }
+
+ function handleAddExludedTestsExcluded() {
+ var excludedTestsTextArea = UI.getElement("excluded-tests-text");
+ var token = excludedTestsTextArea.value;
+ var tokenRegExp = new RegExp(
+ "^[a-f0-9]{8}(-[a-f0-9]{0,4}|$)(-[a-f0-9]{0,4}|$)(-[a-f0-9]{0,4}|$)(-[a-f0-9]{0,12}|$)"
+ );
+ var configuration = state.configuration;
+ var excludedTests = configuration.tests.exclude;
+ if (tokenRegExp.test(token)) {
+ WaveService.findToken(
+ token,
+ function (token) {
+ WaveService.readSession(token, function (sessionConfig) {
+ var prevExcludedTests = sessionConfig.tests.exclude;
+ for (var test of prevExcludedTests) {
+ if (!test) continue;
+ if (excludedTests.indexOf(test) !== -1) continue;
+ excludedTests.push(test);
+ }
+ renderExcludedTests();
+ });
+ },
+ function () {
+ state.excludedTestError = "Session not found";
+ renderExcludedTests();
+ }
+ );
+ } else {
+ state.excludedTestError = "Invalid session token";
+ renderExcludedTests();
+ }
+ }
+
+ function handleRemoveExcludedTest(test) {
+ var configuration = state.configuration;
+ var excludedTests = configuration.tests.exclude;
+ var index = excludedTests.indexOf(test);
+ excludedTests.splice(index, 1);
+ renderExcludedTests();
+ }
+
+ function handleExcludeInputChange(type) {
+ if (state.activeExcludeInput === type) {
+ state.activeExcludeInput = null;
+ } else {
+ state.activeExcludeInput = type;
+ }
+ renderExcludedTests();
+ }
+
+ function showAddLabel() {
+ state.addLabelVisible = true;
+ renderLabelsField();
+ UI.getElement("session-label-input").focus();
+ }
+
+ function hideAddLabel() {
+ state.addLabelVisible = false;
+ renderLabelsField();
+ }
+
+ function addLabel() {
+ var label = UI.getElement("session-label-input").value;
+ if (!label) return;
+ state.configuration.labels.push(label);
+ renderLabelsField();
+ UI.getElement("session-label-input").focus();
+ }
+
+ function removeLabel(index) {
+ const { configuration } = state;
+ configuration.labels.splice(index, 1);
+ renderLabelsField();
+ }
+
+ function resumeSession() {
+ var resumeToken = UI.getElement("resume-token").value;
+ if (!resumeToken) return;
+
+ WaveService.resumeSession(
+ state.configuration.token,
+ resumeToken,
+ function () {
+ openResultsPage(resumeToken);
+ }
+ );
+ }
+
+ function render() {
+ const configurationView = UI.createElement({
+ element: "section",
+ className: "section",
+ children: [
+ {
+ className: "container",
+ style: "margin-bottom: 2em",
+ children: [
+ {
+ element: "img",
+ src: "res/wavelogo_2016.jpg",
+ className: "site-logo",
+ },
+ { className: "title", text: "Session Configuration" },
+ ],
+ },
+ {
+ id: "session-configuration",
+ },
+ {
+ id: "resume-view",
+ className: "container",
+ style: "margin-bottom: 2em",
+ },
+ ],
+ });
+ const root = UI.getRoot();
+ root.innerHTML = "";
+ root.appendChild(configurationView);
+ renderSessionConfiguration();
+ renderResumeView();
+ }
+
+ function renderSessionConfiguration() {
+ var configuration = state.configuration;
+ var status = state.status;
+ var sessionConfigurationView = UI.createElement({});
+ var sessionConfiguration = UI.getElement("session-configuration");
+ sessionConfiguration.innerHTML = "";
+
+ if (state.expired) {
+ var expiredIndicator = UI.createElement({
+ className: "level container",
+ style: "max-width: 500px",
+ children: {
+ element: "span",
+ className: "level-item field column",
+ children: [
+ {
+ element: "article",
+ className: "message is-danger",
+ id: "find-error",
+ children: [
+ {
+ text:
+ "Could not find any sessions! Try adding more characters of the token.",
+ className: "message-body",
+ },
+ ],
+ style: "display: none",
+ },
+ {
+ className: "label has-text-weight-normal",
+ text: "Session token:",
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ children: {
+ style: "display: flex; margin-bottom: 10px;",
+ children: [
+ {
+ element: "input",
+ inputType: "text",
+ className: "input is-family-monospace",
+ id: "token-fragment",
+ placeholder:
+ "First 8 characters or more of session token",
+ onKeyDown: function (event) {
+ if (event.key === "Enter") {
+ handleConfigureSession();
+ }
+ },
+ },
+ ],
+ },
+ },
+ },
+ },
+ {
+ className: "field is-grouped is-grouped-right",
+ children: {
+ className: "control",
+ children: {
+ className: "button is-dark is-outlined",
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-cog",
+ },
+ ],
+ },
+ { text: "Configure Session", element: "span" },
+ ],
+ onclick: function () {
+ handleConfigureSession();
+ },
+ },
+ },
+ },
+ ],
+ },
+ });
+ sessionConfigurationView.appendChild(expiredIndicator);
+ sessionConfiguration.appendChild(sessionConfigurationView);
+ return;
+ }
+
+ if (!configuration) {
+ var loadingIndicator = createLoadingIndicator(
+ "Loading configuration ..."
+ );
+ sessionConfigurationView.appendChild(loadingIndicator);
+ sessionConfiguration.appendChild(sessionConfigurationView);
+ return;
+ }
+
+ sessionConfiguration.parentNode.replaceChild(
+ UI.createElement({
+ id: "session-configuration",
+ className: "container",
+ style: "margin-bottom: 2em",
+ children: [
+ {
+ id: "token-field",
+ },
+ {
+ id: "expiration-field",
+ },
+ {
+ id: "labels-field",
+ },
+ {
+ id: "apis-field",
+ },
+ {
+ id: "exclude-field",
+ },
+ {
+ id: "types-field",
+ },
+ {
+ id: "references-field",
+ },
+ {
+ id: "buttons-field",
+ },
+ ],
+ }),
+ sessionConfiguration
+ );
+
+ renderTokenField();
+ renderExpirationField();
+ renderLabelsField();
+ renderApisField();
+ renderExcludeField();
+ renderTypesField();
+ renderReferencesField();
+ renderButtonsField();
+ }
+
+ function renderTokenField() {
+ var configuration = state.configuration;
+ var tokenField = UI.getElement("token-field");
+ tokenField.parentNode.replaceChild(
+ UI.createElement({
+ id: "token-field",
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Token" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ text: configuration.token,
+ },
+ },
+ },
+ ],
+ }),
+ tokenField
+ );
+ }
+
+ function renderExpirationField() {
+ var status = state.status;
+ var expirationField = UI.getElement("expiration-field");
+ expirationField.parentNode.replaceChild(
+ UI.createElement({
+ id: "expiration-field",
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Expires" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ text: status.expirationDate.toLocaleString(),
+ },
+ },
+ },
+ ],
+ }),
+ expirationField
+ );
+ }
+
+ function renderLabelsField() {
+ var addLabelVisible = state.addLabelVisible;
+ var configuration = state.configuration;
+ var labelsField = UI.getElement("labels-field");
+
+ labelsField.parentNode.replaceChild(
+ UI.createElement({
+ id: "labels-field",
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Labels" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field is-grouped is-grouped-multiline",
+ children: configuration.labels
+ .map((label, index) => ({
+ className: "control",
+ children: {
+ className: "tags has-addons",
+ children: [
+ {
+ element: "span",
+ className: "tag is-info",
+ text: label,
+ },
+ {
+ element: "a",
+ className: "tag is-delete",
+ onClick: () => removeLabel(index),
+ },
+ ],
+ },
+ }))
+ .concat(
+ addLabelVisible
+ ? [
+ {
+ className: "control field is-grouped",
+ children: [
+ {
+ element: "input",
+ className: "input is-small control",
+ style: "width: 10rem",
+ id: "session-label-input",
+ type: "text",
+ onKeyUp: (event) =>
+ event.keyCode === 13 ? addLabel() : null,
+ },
+ {
+ className:
+ "button is-dark is-outlined is-small is-rounded control",
+ text: "save",
+ onClick: addLabel,
+ },
+ {
+ className:
+ "button is-dark is-outlined is-small is-rounded control",
+ text: "cancel",
+ onClick: hideAddLabel,
+ },
+ ],
+ },
+ ]
+ : [
+ {
+ className: "button is-rounded is-small",
+ text: "Add",
+ onClick: showAddLabel,
+ },
+ ]
+ ),
+ },
+ },
+ ],
+ }),
+ labelsField
+ );
+ }
+
+ function renderApisField() {
+ var apisField = UI.getElement("apis-field");
+ apisField.parentNode.replaceChild(
+ UI.createElement({
+ id: "apis-field",
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: [
+ { className: "label", text: "APIs" },
+ createSelectDeselectButtons(
+ function () {
+ selectAllTests();
+ renderApisField();
+ },
+ function () {
+ deselectAllTests();
+ renderApisField();
+ }
+ ),
+ ],
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ children: [
+ {
+ id: "api-error",
+ },
+ {
+ element: "ul",
+ className: "menu-list",
+ children: apis.map(function (api) {
+ return UI.createElement({
+ element: "li",
+ id: api.title,
+ });
+ }),
+ },
+ ],
+ },
+ },
+ },
+ ],
+ }),
+ apisField
+ );
+ renderApisList(apis);
+ checkApiList();
+ }
+
+ function renderApisList(apis) {
+ for (var api of apis) {
+ renderApiList(api);
+ }
+ }
+
+ function renderApiList(api) {
+ var listItem = UI.getElement(api.title);
+ var includedRatio = getIncludedRatio(api.path);
+ var apiListItem = {
+ element: "a",
+ onClick: function (event) {
+ if (!testFileSelectionEnabled) return;
+ if (!state.expandedApis) state.expandedApis = {};
+ state.expandedApis[api.path] = !state.expandedApis[api.path];
+ renderApiList(api);
+ },
+ children: [
+ {
+ element: "input",
+ type: "checkbox",
+ style: "width: 1.3em; height: 1.3em;vertical-align: middle;",
+ checked: includedRatio > 0,
+ indeterminate: includedRatio > 0 && includedRatio < 1,
+ onclick: function (event) {
+ event.stopPropagation();
+ if (includedRatio > 0) {
+ handleRemoveIncludedTest(api.path);
+ } else {
+ handleAddIncludedTest(api.path);
+ }
+ renderApiList(api);
+ },
+ },
+ testFileSelectionEnabled
+ ? {
+ element: "span",
+ style:
+ "display: inline-block;vertical-align: middle;margin-left:0.3em;width: 0.7em",
+ children: {
+ element: "i",
+ className:
+ state.expandedApis && state.expandedApis[api.path]
+ ? "fas fa-angle-down"
+ : "fas fa-angle-right",
+ },
+ }
+ : null,
+ {
+ style:
+ "display: inline-block;vertical-align: middle;margin-left:0.3em;width: 90%",
+ text: api.title,
+ },
+ ],
+ };
+ listItem.innerHTML = "";
+ listItem.appendChild(UI.createElement(apiListItem));
+ if (state.expandedApis && state.expandedApis[api.path]) {
+ listItem.appendChild(createApiTestsList(api));
+ }
+ checkApiList();
+ }
+
+ function createApiTestsList(api) {
+ if (!state.tests) {
+ WaveService.readTestList(
+ function (readTests) {
+ state.tests = readTests;
+ for (var api in state.tests) {
+ if (hasIncludedTest("/" + api)) {
+ handleRemoveIncludedTest("/" + api);
+ handleAddIncludedTest("/" + api);
+ }
+ }
+ renderApiList(this.api);
+ }.bind({ api: api })
+ );
+ return createLoadingIndicator("Loading tests ...");
+ } else {
+ var tests = state.tests[api.path.substr(1)];
+ var testListView = {
+ element: "ul",
+ children: [],
+ };
+
+ testListView.children = testListView.children.concat(
+ tests
+ .sort()
+ .map(function (test) {
+ return {
+ element: "li",
+ onclick: function (event) {
+ handleIncludedTestToggle(test);
+ renderApiList(api);
+ },
+ children: [
+ {
+ element: "a",
+ children: [
+ {
+ element: "input",
+ type: "checkbox",
+ style:
+ "width: 1.3em; height: 1.3em;vertical-align: middle;",
+ checked: hasIncludedTest(test),
+ },
+ {
+ style:
+ "display: inline-block;vertical-align: middle;margin-left:0.3em;max-width: 90%",
+ text: test,
+ },
+ ],
+ },
+ ],
+ };
+ })
+ );
+ return UI.createElement(testListView);
+ }
+ }
+
+ function renderExcludeField() {
+ var excludeField = UI.getElement("exclude-field");
+ excludeField.parentNode.replaceChild(
+ UI.createElement({
+ id: "exclude-field",
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Excluded Tests" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ children: { id: "excluded-tests-view" },
+ },
+ },
+ },
+ ],
+ }),
+ excludeField
+ );
+ renderExcludedTests();
+ }
+
+ function renderTypesField() {
+ if (!testTypeSelectionEnabled) return;
+ var typesField = UI.getElement("types-field");
+ typesField.parentNode.replaceChild(
+ UI.createElement({
+ id: "types-field",
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Test Types" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ children: [
+ isTestTypeListValid()
+ ? null
+ : createErrorMessage("Select at least one test type"),
+ ].concat(createTestTypeList(types)),
+ },
+ },
+ },
+ ],
+ }),
+ typesField
+ );
+ }
+
+ function renderReferencesField() {
+ if (referenceSessions.length === 0) {
+ return;
+ }
+ var referencesField = UI.getElement("references-field");
+ referencesField.parentNode.replaceChild(
+ UI.createElement({
+ id: "references-field",
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: [
+ { className: "label", text: "Reference Browsers" },
+ createSelectDeselectButtons(
+ function () {
+ selectAllRefSessions();
+ renderReferencesField();
+ },
+ function () {
+ deselectAllRefSessions();
+ renderReferencesField();
+ }
+ ),
+ ],
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ children: createRefSessionsList(referenceSessions),
+ },
+ },
+ },
+ ],
+ }),
+ referencesField
+ );
+ }
+
+ function renderButtonsField() {
+ var buttonsField = UI.getElement("buttons-field");
+ buttonsField.parentNode.replaceChild(
+ UI.createElement({
+ id: "buttons-field",
+ className: "level level-right",
+ children: [
+ {
+ element: "button",
+ className: "button is-success",
+ style: "margin-right: 0.3em",
+ disabled: !isConfigurationValid(),
+ onClick: function () {
+ handleStart();
+ renderButtonsField();
+ },
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: isSessionStarting()
+ ? "fas fa-spinner fa-pulse"
+ : "fas fa-play",
+ },
+ ],
+ },
+ {
+ element: "span",
+ text: isSessionStarting()
+ ? "Starting Session ..."
+ : "Start Session",
+ },
+ ],
+ },
+ {
+ element: "button",
+ className: "button",
+ onClick: function () {
+ handleDiscardChanges();
+ renderSessionConfiguration();
+ },
+ disabled: isSessionStarting(),
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-times",
+ },
+ ],
+ },
+ {
+ element: "span",
+ text: "Discard Changes",
+ },
+ ],
+ },
+ ],
+ }),
+ buttonsField
+ );
+ }
+
+ function renderExcludedTests() {
+ var excludedTestsView = UI.getElement("excluded-tests-view");
+ if (!excludedTestsView) return;
+ excludedTestsView.innerHTML = "";
+
+ var errorMessage = state.excludedTestError;
+ if (errorMessage) {
+ var error = createErrorMessage(errorMessage);
+ excludedTestsView.appendChild(UI.createElement(error));
+ }
+
+ var excludeInputs = [
+ { title: "Add Raw", type: "raw" },
+ { title: "Add Malfunctioning", type: "malfunc" },
+ { title: "Add Previous Excluded", type: "excluded" },
+ ];
+
+ var activeExcludeInput = state.activeExcludeInput;
+
+ var excludedTestInputSwitch = UI.createElement({
+ className: "tabs is-centered is-toggle is-small",
+ children: {
+ element: "ul",
+ children: excludeInputs.map(function (input) {
+ return {
+ element: "li",
+ onClick: function () {
+ handleExcludeInputChange(input.type);
+ },
+ className: (function () {
+ if (activeExcludeInput === input.type) return "is-active";
+ return "";
+ })(),
+ children: { element: "a", text: input.title },
+ };
+ }),
+ },
+ });
+ excludedTestsView.appendChild(excludedTestInputSwitch);
+
+ if (activeExcludeInput === "raw") {
+ var rawInput = UI.createElement({
+ children: [
+ {
+ className: "is-size-7",
+ style: "margin-bottom: 20px",
+ text:
+ "Provide paths to test files or directories to exclude them from the session. One path per line, lines starting with # are omitted.",
+ },
+ {
+ element: "textarea",
+ className: "textarea",
+ id: "excluded-tests-text",
+ },
+ {
+ style: "margin-top: 10px",
+ onClick: function () {
+ handleAddExludedTestsRaw();
+ },
+ children: [
+ {
+ element: "button",
+ className: "button",
+ style: "margin-bottom: 20px",
+ text: "Add",
+ },
+ ],
+ },
+ ],
+ });
+ excludedTestsView.appendChild(rawInput);
+ } else if (
+ activeExcludeInput === "malfunc" ||
+ activeExcludeInput === "excluded"
+ ) {
+ var malfuncInput = UI.createElement({
+ style: "margin-bottom: 1em",
+ children: [
+ {
+ className: "is-size-7",
+ style: "margin-bottom: 1em",
+ text:
+ activeExcludeInput === "malfunc"
+ ? "Add malfunctioning tests from past sessions by providing at least the first eight characters of the session's token."
+ : "Add excluded tests from past sessions by providing at least the first eight characters of the session's token.",
+ },
+ {
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Session Token" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field is-grouped is-multiline",
+ children: [
+ {
+ id: "excluded-tests-text",
+ className: "input",
+ element: "input",
+ type: "text",
+ },
+ {
+ className: "button",
+ style: "margin-left: 1em",
+ text: "Add",
+ onClick: function () {
+ if (activeExcludeInput === "malfunc") {
+ handleAddExludedTestsMalfunctioning();
+ } else {
+ handleAddExludedTestsExcluded();
+ }
+ },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ ],
+ });
+ excludedTestsView.appendChild(malfuncInput);
+ }
+
+ var excludedTestsTable = createExcludedTestsTable();
+ var tableWrapper = UI.createElement({
+ style: "max-height: 250px; overflow: auto; margin-bottom: 10px",
+ });
+ tableWrapper.appendChild(excludedTestsTable);
+ excludedTestsView.appendChild(tableWrapper);
+ }
+
+ function renderResumeView() {
+ var query = utils.parseQuery(location.search);
+ var resumeToken = query.resume;
+ if (!resumeToken) resumeToken = "";
+
+ var renderResumeElement = UI.getElement("resume-view");
+ renderResumeElement.innerHTML = "";
+ if (state.expired) return;
+
+ var heading = UI.createElement({
+ element: "h2",
+ className: "title is-5",
+ text: "Resume session",
+ });
+ renderResumeElement.appendChild(heading);
+
+ var resumeControls = UI.createElement({
+ className: "columns",
+ children: [
+ {
+ className: "column",
+ children: {
+ className: "field",
+ children: [
+ {
+ element: "label",
+ className: "label",
+ text: "Token (first 8 characters or more)",
+ },
+ {
+ className: "control",
+ children: {
+ element: "input",
+ id: "resume-token",
+ className: "input is-family-monospace tabbable",
+ type: "text",
+ style: "max-width: 30em",
+ value: resumeToken,
+ },
+ },
+ ],
+ },
+ },
+ {
+ className: "column",
+ style:
+ "display: flex; align-items: flex-end; justify-content: flex-end",
+ children: {
+ className: "button",
+ onClick: function () {
+ resumeSession();
+ },
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: { element: "i", className: "fas fa-redo-alt" },
+ },
+ {
+ element: "span",
+ text: "Resume",
+ },
+ ],
+ },
+ },
+ ],
+ });
+ renderResumeElement.appendChild(resumeControls);
+ }
+
+ function createExcludedTestsTable() {
+ var excludedTests = state.configuration.tests.exclude;
+ if (excludedTests.length === 0) {
+ return UI.createElement({
+ style: "text-align: center",
+ text: "- No Excluded Tests -",
+ });
+ }
+ var table = UI.createElement({
+ element: "table",
+ className: "table",
+ style: "width: 100%",
+ children: excludedTests.map(function (test) {
+ return {
+ element: "tr",
+ children: [
+ { element: "td", style: "width: 100%;", text: test },
+ {
+ element: "td",
+ children: {
+ element: "button",
+ className: "button is-small",
+ onClick: function () {
+ handleRemoveExcludedTest(test);
+ },
+ children: {
+ element: "span",
+ className: "icon",
+ children: {
+ element: "i",
+ className: "fas fa-trash-alt",
+ },
+ },
+ },
+ },
+ ],
+ };
+ }),
+ });
+ return table;
+ }
+
+ function createTestTypeList(types) {
+ return types.map((type) => ({
+ element: "button",
+ style: "margin-right: 0.3em; margin-bottom: 0.3em",
+ className: "button" + (hasTestType(type.value) ? " is-info" : ""),
+ text: type.title,
+ onClick: function (event) {
+ handleTestTypeToggle(type.value);
+ renderTypesField();
+ },
+ }));
+ }
+
+ function createRefSessionsList(referenceSessions) {
+ return referenceSessions.map((session) => ({
+ element: "button",
+ className: "button" + (hasRefSession(session) ? " is-info" : ""),
+ style: "margin-right: 0.3em; margin-bottom: 0.3em; height: auto",
+ onClick: function () {
+ handleRefSessionToggle(session);
+ renderReferencesField();
+ },
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [{ element: "i", className: session.icon }],
+ },
+ {
+ element: "span",
+ children: [
+ { text: session.title },
+ {
+ text: session.engine,
+ style: "font-size: 0.8em",
+ },
+ ],
+ },
+ ],
+ }));
+ }
+
+ function createSelectDeselectButtons(onSelect, onDeselect) {
+ return {
+ style: "margin-top: 0.3em",
+ children: [
+ {
+ element: "button",
+ style: "margin-right: 0.3em",
+ className: "button is-rounded is-small",
+ text: "All",
+ onClick: onSelect,
+ },
+ {
+ element: "button",
+ className: "button is-rounded is-small",
+ text: "None",
+ onClick: onDeselect,
+ },
+ ],
+ };
+ }
+
+ function createErrorMessage(message) {
+ return {
+ element: "article",
+ className: "message is-danger",
+ children: [
+ {
+ className: "message-body",
+ text: message,
+ },
+ ],
+ };
+ }
+
+ function createLoadingIndicator(text) {
+ return UI.createElement({
+ className: "level",
+ children: {
+ element: "span",
+ className: "level-item",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-spinner fa-pulse",
+ },
+ {
+ style: "margin-left: 0.4em;",
+ text: text,
+ },
+ ],
+ },
+ });
+ }
+ }
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.css b/testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.css
new file mode 100644
index 0000000000..e793432ae3
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.css
@@ -0,0 +1,10599 @@
+/*! bulma.io v0.7.5 | MIT License | github.com/jgthms/bulma */
+@-webkit-keyframes spinAround {
+ from {
+ -webkit-transform: rotate(0deg);
+ transform: rotate(0deg);
+ }
+ to {
+ -webkit-transform: rotate(359deg);
+ transform: rotate(359deg);
+ }
+}
+@keyframes spinAround {
+ from {
+ -webkit-transform: rotate(0deg);
+ transform: rotate(0deg);
+ }
+ to {
+ -webkit-transform: rotate(359deg);
+ transform: rotate(359deg);
+ }
+}
+
+.delete, .modal-close, .is-unselectable, .button, .file, .breadcrumb, .pagination-previous,
+.pagination-next,
+.pagination-link,
+.pagination-ellipsis, .tabs {
+ -webkit-touch-callout: none;
+ -webkit-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+}
+
+.select:not(.is-multiple):not(.is-loading)::after, .navbar-link:not(.is-arrowless)::after {
+ border: 3px solid transparent;
+ border-radius: 2px;
+ border-right: 0;
+ border-top: 0;
+ content: " ";
+ display: block;
+ height: 0.625em;
+ margin-top: -0.4375em;
+ pointer-events: none;
+ position: absolute;
+ top: 50%;
+ -webkit-transform: rotate(-45deg);
+ transform: rotate(-45deg);
+ -webkit-transform-origin: center;
+ transform-origin: center;
+ width: 0.625em;
+}
+
+.box:not(:last-child), .content:not(:last-child), .notification:not(:last-child), .progress:not(:last-child), .table:not(:last-child), .table-container:not(:last-child), .title:not(:last-child),
+.subtitle:not(:last-child), .block:not(:last-child), .highlight:not(:last-child), .breadcrumb:not(:last-child), .level:not(:last-child), .list:not(:last-child), .message:not(:last-child), .tabs:not(:last-child) {
+ margin-bottom: 1.5rem;
+}
+
+.delete, .modal-close {
+ -moz-appearance: none;
+ -webkit-appearance: none;
+ background-color: rgba(10, 10, 10, 0.2);
+ border: none;
+ border-radius: 290486px;
+ cursor: pointer;
+ pointer-events: auto;
+ display: inline-block;
+ flex-grow: 0;
+ flex-shrink: 0;
+ font-size: 0;
+ height: 20px;
+ max-height: 20px;
+ max-width: 20px;
+ min-height: 20px;
+ min-width: 20px;
+ outline: none;
+ position: relative;
+ vertical-align: top;
+ width: 20px;
+}
+
+.delete::before, .modal-close::before, .delete::after, .modal-close::after {
+ background-color: white;
+ content: "";
+ display: block;
+ left: 50%;
+ position: absolute;
+ top: 50%;
+ -webkit-transform: translateX(-50%) translateY(-50%) rotate(45deg);
+ transform: translateX(-50%) translateY(-50%) rotate(45deg);
+ -webkit-transform-origin: center center;
+ transform-origin: center center;
+}
+
+.delete::before, .modal-close::before {
+ height: 2px;
+ width: 50%;
+}
+
+.delete::after, .modal-close::after {
+ height: 50%;
+ width: 2px;
+}
+
+.delete:hover, .modal-close:hover, .delete:focus, .modal-close:focus {
+ background-color: rgba(10, 10, 10, 0.3);
+}
+
+.delete:active, .modal-close:active {
+ background-color: rgba(10, 10, 10, 0.4);
+}
+
+.is-small.delete, .is-small.modal-close {
+ height: 16px;
+ max-height: 16px;
+ max-width: 16px;
+ min-height: 16px;
+ min-width: 16px;
+ width: 16px;
+}
+
+.is-medium.delete, .is-medium.modal-close {
+ height: 24px;
+ max-height: 24px;
+ max-width: 24px;
+ min-height: 24px;
+ min-width: 24px;
+ width: 24px;
+}
+
+.is-large.delete, .is-large.modal-close {
+ height: 32px;
+ max-height: 32px;
+ max-width: 32px;
+ min-height: 32px;
+ min-width: 32px;
+ width: 32px;
+}
+
+.button.is-loading::after, .loader, .select.is-loading::after, .control.is-loading::after {
+ -webkit-animation: spinAround 500ms infinite linear;
+ animation: spinAround 500ms infinite linear;
+ border: 2px solid #dbdbdb;
+ border-radius: 290486px;
+ border-right-color: transparent;
+ border-top-color: transparent;
+ content: "";
+ display: block;
+ height: 1em;
+ position: relative;
+ width: 1em;
+}
+
+.is-overlay, .image.is-square img,
+.image.is-square .has-ratio, .image.is-1by1 img,
+.image.is-1by1 .has-ratio, .image.is-5by4 img,
+.image.is-5by4 .has-ratio, .image.is-4by3 img,
+.image.is-4by3 .has-ratio, .image.is-3by2 img,
+.image.is-3by2 .has-ratio, .image.is-5by3 img,
+.image.is-5by3 .has-ratio, .image.is-16by9 img,
+.image.is-16by9 .has-ratio, .image.is-2by1 img,
+.image.is-2by1 .has-ratio, .image.is-3by1 img,
+.image.is-3by1 .has-ratio, .image.is-4by5 img,
+.image.is-4by5 .has-ratio, .image.is-3by4 img,
+.image.is-3by4 .has-ratio, .image.is-2by3 img,
+.image.is-2by3 .has-ratio, .image.is-3by5 img,
+.image.is-3by5 .has-ratio, .image.is-9by16 img,
+.image.is-9by16 .has-ratio, .image.is-1by2 img,
+.image.is-1by2 .has-ratio, .image.is-1by3 img,
+.image.is-1by3 .has-ratio, .modal, .modal-background, .hero-video {
+ bottom: 0;
+ left: 0;
+ position: absolute;
+ right: 0;
+ top: 0;
+}
+
+.button, .input, .textarea, .select select, .file-cta,
+.file-name, .pagination-previous,
+.pagination-next,
+.pagination-link,
+.pagination-ellipsis {
+ -moz-appearance: none;
+ -webkit-appearance: none;
+ align-items: center;
+ border: 1px solid transparent;
+ border-radius: 4px;
+ box-shadow: none;
+ display: inline-flex;
+ font-size: 1rem;
+ height: 2.25em;
+ justify-content: flex-start;
+ line-height: 1.5;
+ padding-bottom: calc(0.375em - 1px);
+ padding-left: calc(0.625em - 1px);
+ padding-right: calc(0.625em - 1px);
+ padding-top: calc(0.375em - 1px);
+ position: relative;
+ vertical-align: top;
+}
+
+.button:focus, .input:focus, .textarea:focus, .select select:focus, .file-cta:focus,
+.file-name:focus, .pagination-previous:focus,
+.pagination-next:focus,
+.pagination-link:focus,
+.pagination-ellipsis:focus, .is-focused.button, .is-focused.input, .is-focused.textarea, .select select.is-focused, .is-focused.file-cta,
+.is-focused.file-name, .is-focused.pagination-previous,
+.is-focused.pagination-next,
+.is-focused.pagination-link,
+.is-focused.pagination-ellipsis, .button:active, .input:active, .textarea:active, .select select:active, .file-cta:active,
+.file-name:active, .pagination-previous:active,
+.pagination-next:active,
+.pagination-link:active,
+.pagination-ellipsis:active, .is-active.button, .is-active.input, .is-active.textarea, .select select.is-active, .is-active.file-cta,
+.is-active.file-name, .is-active.pagination-previous,
+.is-active.pagination-next,
+.is-active.pagination-link,
+.is-active.pagination-ellipsis {
+ outline: none;
+}
+
+.button[disabled], .input[disabled], .textarea[disabled], .select select[disabled], .file-cta[disabled],
+.file-name[disabled], .pagination-previous[disabled],
+.pagination-next[disabled],
+.pagination-link[disabled],
+.pagination-ellipsis[disabled],
+fieldset[disabled] .button,
+fieldset[disabled] .input,
+fieldset[disabled] .textarea,
+fieldset[disabled] .select select,
+.select fieldset[disabled] select,
+fieldset[disabled] .file-cta,
+fieldset[disabled] .file-name,
+fieldset[disabled] .pagination-previous,
+fieldset[disabled] .pagination-next,
+fieldset[disabled] .pagination-link,
+fieldset[disabled] .pagination-ellipsis {
+ cursor: not-allowed;
+}
+
+/*! minireset.css v0.0.4 | MIT License | github.com/jgthms/minireset.css */
+html,
+body,
+p,
+ol,
+ul,
+li,
+dl,
+dt,
+dd,
+blockquote,
+figure,
+fieldset,
+legend,
+textarea,
+pre,
+iframe,
+hr,
+h1,
+h2,
+h3,
+h4,
+h5,
+h6 {
+ margin: 0;
+ padding: 0;
+}
+
+h1,
+h2,
+h3,
+h4,
+h5,
+h6 {
+ font-size: 100%;
+ font-weight: normal;
+}
+
+ul {
+ list-style: none;
+}
+
+button,
+input,
+select,
+textarea {
+ margin: 0;
+}
+
+html {
+ box-sizing: border-box;
+}
+
+*, *::before, *::after {
+ box-sizing: inherit;
+}
+
+img,
+embed,
+iframe,
+object,
+video {
+ height: auto;
+ max-width: 100%;
+}
+
+audio {
+ max-width: 100%;
+}
+
+iframe {
+ border: 0;
+}
+
+table {
+ border-collapse: collapse;
+ border-spacing: 0;
+}
+
+td,
+th {
+ padding: 0;
+}
+
+td:not([align]),
+th:not([align]) {
+ text-align: left;
+}
+
+html {
+ background-color: white;
+ font-size: 16px;
+ -moz-osx-font-smoothing: grayscale;
+ -webkit-font-smoothing: antialiased;
+ min-width: 300px;
+ overflow-x: hidden;
+ overflow-y: scroll;
+ text-rendering: optimizeLegibility;
+ -webkit-text-size-adjust: 100%;
+ -moz-text-size-adjust: 100%;
+ -ms-text-size-adjust: 100%;
+ text-size-adjust: 100%;
+}
+
+article,
+aside,
+figure,
+footer,
+header,
+hgroup,
+section {
+ display: block;
+}
+
+body,
+button,
+input,
+select,
+textarea {
+ font-family: BlinkMacSystemFont, -apple-system, "Segoe UI", "Roboto", "Oxygen", "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", "Helvetica", "Arial", sans-serif;
+}
+
+code,
+pre {
+ -moz-osx-font-smoothing: auto;
+ -webkit-font-smoothing: auto;
+ font-family: monospace;
+}
+
+body {
+ color: #4a4a4a;
+ font-size: 1em;
+ font-weight: 400;
+ line-height: 1.5;
+}
+
+a {
+ color: #3273dc;
+ cursor: pointer;
+ text-decoration: none;
+}
+
+a strong {
+ color: currentColor;
+}
+
+a:hover {
+ color: #363636;
+}
+
+code {
+ background-color: whitesmoke;
+ color: #ff3860;
+ font-size: 0.875em;
+ font-weight: normal;
+ padding: 0.25em 0.5em 0.25em;
+}
+
+hr {
+ background-color: whitesmoke;
+ border: none;
+ display: block;
+ height: 2px;
+ margin: 1.5rem 0;
+}
+
+img {
+ height: auto;
+ max-width: 100%;
+}
+
+input[type="checkbox"],
+input[type="radio"] {
+ vertical-align: baseline;
+}
+
+small {
+ font-size: 0.875em;
+}
+
+span {
+ font-style: inherit;
+ font-weight: inherit;
+}
+
+strong {
+ color: #363636;
+ font-weight: 700;
+}
+
+fieldset {
+ border: none;
+}
+
+pre {
+ -webkit-overflow-scrolling: touch;
+ background-color: whitesmoke;
+ color: #4a4a4a;
+ font-size: 0.875em;
+ overflow-x: auto;
+ padding: 1.25rem 1.5rem;
+ white-space: pre;
+ word-wrap: normal;
+}
+
+pre code {
+ background-color: transparent;
+ color: currentColor;
+ font-size: 1em;
+ padding: 0;
+}
+
+table td,
+table th {
+ vertical-align: top;
+}
+
+table td:not([align]),
+table th:not([align]) {
+ text-align: left;
+}
+
+table th {
+ color: #363636;
+}
+
+.is-clearfix::after {
+ clear: both;
+ content: " ";
+ display: table;
+}
+
+.is-pulled-left {
+ float: left !important;
+}
+
+.is-pulled-right {
+ float: right !important;
+}
+
+.is-clipped {
+ overflow: hidden !important;
+}
+
+.is-size-1 {
+ font-size: 3rem !important;
+}
+
+.is-size-2 {
+ font-size: 2.5rem !important;
+}
+
+.is-size-3 {
+ font-size: 2rem !important;
+}
+
+.is-size-4 {
+ font-size: 1.5rem !important;
+}
+
+.is-size-5 {
+ font-size: 1.25rem !important;
+}
+
+.is-size-6 {
+ font-size: 1rem !important;
+}
+
+.is-size-7 {
+ font-size: 0.75rem !important;
+}
+
+@media screen and (max-width: 768px) {
+ .is-size-1-mobile {
+ font-size: 3rem !important;
+ }
+ .is-size-2-mobile {
+ font-size: 2.5rem !important;
+ }
+ .is-size-3-mobile {
+ font-size: 2rem !important;
+ }
+ .is-size-4-mobile {
+ font-size: 1.5rem !important;
+ }
+ .is-size-5-mobile {
+ font-size: 1.25rem !important;
+ }
+ .is-size-6-mobile {
+ font-size: 1rem !important;
+ }
+ .is-size-7-mobile {
+ font-size: 0.75rem !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .is-size-1-tablet {
+ font-size: 3rem !important;
+ }
+ .is-size-2-tablet {
+ font-size: 2.5rem !important;
+ }
+ .is-size-3-tablet {
+ font-size: 2rem !important;
+ }
+ .is-size-4-tablet {
+ font-size: 1.5rem !important;
+ }
+ .is-size-5-tablet {
+ font-size: 1.25rem !important;
+ }
+ .is-size-6-tablet {
+ font-size: 1rem !important;
+ }
+ .is-size-7-tablet {
+ font-size: 0.75rem !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .is-size-1-touch {
+ font-size: 3rem !important;
+ }
+ .is-size-2-touch {
+ font-size: 2.5rem !important;
+ }
+ .is-size-3-touch {
+ font-size: 2rem !important;
+ }
+ .is-size-4-touch {
+ font-size: 1.5rem !important;
+ }
+ .is-size-5-touch {
+ font-size: 1.25rem !important;
+ }
+ .is-size-6-touch {
+ font-size: 1rem !important;
+ }
+ .is-size-7-touch {
+ font-size: 0.75rem !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .is-size-1-desktop {
+ font-size: 3rem !important;
+ }
+ .is-size-2-desktop {
+ font-size: 2.5rem !important;
+ }
+ .is-size-3-desktop {
+ font-size: 2rem !important;
+ }
+ .is-size-4-desktop {
+ font-size: 1.5rem !important;
+ }
+ .is-size-5-desktop {
+ font-size: 1.25rem !important;
+ }
+ .is-size-6-desktop {
+ font-size: 1rem !important;
+ }
+ .is-size-7-desktop {
+ font-size: 0.75rem !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .is-size-1-widescreen {
+ font-size: 3rem !important;
+ }
+ .is-size-2-widescreen {
+ font-size: 2.5rem !important;
+ }
+ .is-size-3-widescreen {
+ font-size: 2rem !important;
+ }
+ .is-size-4-widescreen {
+ font-size: 1.5rem !important;
+ }
+ .is-size-5-widescreen {
+ font-size: 1.25rem !important;
+ }
+ .is-size-6-widescreen {
+ font-size: 1rem !important;
+ }
+ .is-size-7-widescreen {
+ font-size: 0.75rem !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .is-size-1-fullhd {
+ font-size: 3rem !important;
+ }
+ .is-size-2-fullhd {
+ font-size: 2.5rem !important;
+ }
+ .is-size-3-fullhd {
+ font-size: 2rem !important;
+ }
+ .is-size-4-fullhd {
+ font-size: 1.5rem !important;
+ }
+ .is-size-5-fullhd {
+ font-size: 1.25rem !important;
+ }
+ .is-size-6-fullhd {
+ font-size: 1rem !important;
+ }
+ .is-size-7-fullhd {
+ font-size: 0.75rem !important;
+ }
+}
+
+.has-text-centered {
+ text-align: center !important;
+}
+
+.has-text-justified {
+ text-align: justify !important;
+}
+
+.has-text-left {
+ text-align: left !important;
+}
+
+.has-text-right {
+ text-align: right !important;
+}
+
+@media screen and (max-width: 768px) {
+ .has-text-centered-mobile {
+ text-align: center !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .has-text-centered-tablet {
+ text-align: center !important;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .has-text-centered-tablet-only {
+ text-align: center !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .has-text-centered-touch {
+ text-align: center !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .has-text-centered-desktop {
+ text-align: center !important;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .has-text-centered-desktop-only {
+ text-align: center !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .has-text-centered-widescreen {
+ text-align: center !important;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .has-text-centered-widescreen-only {
+ text-align: center !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .has-text-centered-fullhd {
+ text-align: center !important;
+ }
+}
+
+@media screen and (max-width: 768px) {
+ .has-text-justified-mobile {
+ text-align: justify !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .has-text-justified-tablet {
+ text-align: justify !important;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .has-text-justified-tablet-only {
+ text-align: justify !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .has-text-justified-touch {
+ text-align: justify !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .has-text-justified-desktop {
+ text-align: justify !important;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .has-text-justified-desktop-only {
+ text-align: justify !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .has-text-justified-widescreen {
+ text-align: justify !important;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .has-text-justified-widescreen-only {
+ text-align: justify !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .has-text-justified-fullhd {
+ text-align: justify !important;
+ }
+}
+
+@media screen and (max-width: 768px) {
+ .has-text-left-mobile {
+ text-align: left !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .has-text-left-tablet {
+ text-align: left !important;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .has-text-left-tablet-only {
+ text-align: left !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .has-text-left-touch {
+ text-align: left !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .has-text-left-desktop {
+ text-align: left !important;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .has-text-left-desktop-only {
+ text-align: left !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .has-text-left-widescreen {
+ text-align: left !important;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .has-text-left-widescreen-only {
+ text-align: left !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .has-text-left-fullhd {
+ text-align: left !important;
+ }
+}
+
+@media screen and (max-width: 768px) {
+ .has-text-right-mobile {
+ text-align: right !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .has-text-right-tablet {
+ text-align: right !important;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .has-text-right-tablet-only {
+ text-align: right !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .has-text-right-touch {
+ text-align: right !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .has-text-right-desktop {
+ text-align: right !important;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .has-text-right-desktop-only {
+ text-align: right !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .has-text-right-widescreen {
+ text-align: right !important;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .has-text-right-widescreen-only {
+ text-align: right !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .has-text-right-fullhd {
+ text-align: right !important;
+ }
+}
+
+.is-capitalized {
+ text-transform: capitalize !important;
+}
+
+.is-lowercase {
+ text-transform: lowercase !important;
+}
+
+.is-uppercase {
+ text-transform: uppercase !important;
+}
+
+.is-italic {
+ font-style: italic !important;
+}
+
+.has-text-white {
+ color: white !important;
+}
+
+a.has-text-white:hover, a.has-text-white:focus {
+ color: #e6e6e6 !important;
+}
+
+.has-background-white {
+ background-color: white !important;
+}
+
+.has-text-black {
+ color: #0a0a0a !important;
+}
+
+a.has-text-black:hover, a.has-text-black:focus {
+ color: black !important;
+}
+
+.has-background-black {
+ background-color: #0a0a0a !important;
+}
+
+.has-text-light {
+ color: whitesmoke !important;
+}
+
+a.has-text-light:hover, a.has-text-light:focus {
+ color: #dbdbdb !important;
+}
+
+.has-background-light {
+ background-color: whitesmoke !important;
+}
+
+.has-text-dark {
+ color: #363636 !important;
+}
+
+a.has-text-dark:hover, a.has-text-dark:focus {
+ color: #1c1c1c !important;
+}
+
+.has-background-dark {
+ background-color: #363636 !important;
+}
+
+.has-text-primary {
+ color: #00d1b2 !important;
+}
+
+a.has-text-primary:hover, a.has-text-primary:focus {
+ color: #009e86 !important;
+}
+
+.has-background-primary {
+ background-color: #00d1b2 !important;
+}
+
+.has-text-link {
+ color: #3273dc !important;
+}
+
+a.has-text-link:hover, a.has-text-link:focus {
+ color: #205bbc !important;
+}
+
+.has-background-link {
+ background-color: #3273dc !important;
+}
+
+.has-text-info {
+ color: #209cee !important;
+}
+
+a.has-text-info:hover, a.has-text-info:focus {
+ color: #0f81cc !important;
+}
+
+.has-background-info {
+ background-color: #209cee !important;
+}
+
+.has-text-success {
+ color: #23d160 !important;
+}
+
+a.has-text-success:hover, a.has-text-success:focus {
+ color: #1ca64c !important;
+}
+
+.has-background-success {
+ background-color: #23d160 !important;
+}
+
+.has-text-warning {
+ color: #ffdd57 !important;
+}
+
+a.has-text-warning:hover, a.has-text-warning:focus {
+ color: #ffd324 !important;
+}
+
+.has-background-warning {
+ background-color: #ffdd57 !important;
+}
+
+.has-text-danger {
+ color: #ff3860 !important;
+}
+
+a.has-text-danger:hover, a.has-text-danger:focus {
+ color: #ff0537 !important;
+}
+
+.has-background-danger {
+ background-color: #ff3860 !important;
+}
+
+.has-text-black-bis {
+ color: #121212 !important;
+}
+
+.has-background-black-bis {
+ background-color: #121212 !important;
+}
+
+.has-text-black-ter {
+ color: #242424 !important;
+}
+
+.has-background-black-ter {
+ background-color: #242424 !important;
+}
+
+.has-text-grey-darker {
+ color: #363636 !important;
+}
+
+.has-background-grey-darker {
+ background-color: #363636 !important;
+}
+
+.has-text-grey-dark {
+ color: #4a4a4a !important;
+}
+
+.has-background-grey-dark {
+ background-color: #4a4a4a !important;
+}
+
+.has-text-grey {
+ color: #7a7a7a !important;
+}
+
+.has-background-grey {
+ background-color: #7a7a7a !important;
+}
+
+.has-text-grey-light {
+ color: #b5b5b5 !important;
+}
+
+.has-background-grey-light {
+ background-color: #b5b5b5 !important;
+}
+
+.has-text-grey-lighter {
+ color: #dbdbdb !important;
+}
+
+.has-background-grey-lighter {
+ background-color: #dbdbdb !important;
+}
+
+.has-text-white-ter {
+ color: whitesmoke !important;
+}
+
+.has-background-white-ter {
+ background-color: whitesmoke !important;
+}
+
+.has-text-white-bis {
+ color: #fafafa !important;
+}
+
+.has-background-white-bis {
+ background-color: #fafafa !important;
+}
+
+.has-text-weight-light {
+ font-weight: 300 !important;
+}
+
+.has-text-weight-normal {
+ font-weight: 400 !important;
+}
+
+.has-text-weight-medium {
+ font-weight: 500 !important;
+}
+
+.has-text-weight-semibold {
+ font-weight: 600 !important;
+}
+
+.has-text-weight-bold {
+ font-weight: 700 !important;
+}
+
+.is-family-primary {
+ font-family: BlinkMacSystemFont, -apple-system, "Segoe UI", "Roboto", "Oxygen", "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", "Helvetica", "Arial", sans-serif !important;
+}
+
+.is-family-secondary {
+ font-family: BlinkMacSystemFont, -apple-system, "Segoe UI", "Roboto", "Oxygen", "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", "Helvetica", "Arial", sans-serif !important;
+}
+
+.is-family-sans-serif {
+ font-family: BlinkMacSystemFont, -apple-system, "Segoe UI", "Roboto", "Oxygen", "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", "Helvetica", "Arial", sans-serif !important;
+}
+
+.is-family-monospace {
+ font-family: monospace !important;
+}
+
+.is-family-code {
+ font-family: monospace !important;
+}
+
+.is-block {
+ display: block !important;
+}
+
+@media screen and (max-width: 768px) {
+ .is-block-mobile {
+ display: block !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .is-block-tablet {
+ display: block !important;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .is-block-tablet-only {
+ display: block !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .is-block-touch {
+ display: block !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .is-block-desktop {
+ display: block !important;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .is-block-desktop-only {
+ display: block !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .is-block-widescreen {
+ display: block !important;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .is-block-widescreen-only {
+ display: block !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .is-block-fullhd {
+ display: block !important;
+ }
+}
+
+.is-flex {
+ display: flex !important;
+}
+
+@media screen and (max-width: 768px) {
+ .is-flex-mobile {
+ display: flex !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .is-flex-tablet {
+ display: flex !important;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .is-flex-tablet-only {
+ display: flex !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .is-flex-touch {
+ display: flex !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .is-flex-desktop {
+ display: flex !important;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .is-flex-desktop-only {
+ display: flex !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .is-flex-widescreen {
+ display: flex !important;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .is-flex-widescreen-only {
+ display: flex !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .is-flex-fullhd {
+ display: flex !important;
+ }
+}
+
+.is-inline {
+ display: inline !important;
+}
+
+@media screen and (max-width: 768px) {
+ .is-inline-mobile {
+ display: inline !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .is-inline-tablet {
+ display: inline !important;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .is-inline-tablet-only {
+ display: inline !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .is-inline-touch {
+ display: inline !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .is-inline-desktop {
+ display: inline !important;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .is-inline-desktop-only {
+ display: inline !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .is-inline-widescreen {
+ display: inline !important;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .is-inline-widescreen-only {
+ display: inline !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .is-inline-fullhd {
+ display: inline !important;
+ }
+}
+
+.is-inline-block {
+ display: inline-block !important;
+}
+
+@media screen and (max-width: 768px) {
+ .is-inline-block-mobile {
+ display: inline-block !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .is-inline-block-tablet {
+ display: inline-block !important;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .is-inline-block-tablet-only {
+ display: inline-block !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .is-inline-block-touch {
+ display: inline-block !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .is-inline-block-desktop {
+ display: inline-block !important;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .is-inline-block-desktop-only {
+ display: inline-block !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .is-inline-block-widescreen {
+ display: inline-block !important;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .is-inline-block-widescreen-only {
+ display: inline-block !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .is-inline-block-fullhd {
+ display: inline-block !important;
+ }
+}
+
+.is-inline-flex {
+ display: inline-flex !important;
+}
+
+@media screen and (max-width: 768px) {
+ .is-inline-flex-mobile {
+ display: inline-flex !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .is-inline-flex-tablet {
+ display: inline-flex !important;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .is-inline-flex-tablet-only {
+ display: inline-flex !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .is-inline-flex-touch {
+ display: inline-flex !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .is-inline-flex-desktop {
+ display: inline-flex !important;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .is-inline-flex-desktop-only {
+ display: inline-flex !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .is-inline-flex-widescreen {
+ display: inline-flex !important;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .is-inline-flex-widescreen-only {
+ display: inline-flex !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .is-inline-flex-fullhd {
+ display: inline-flex !important;
+ }
+}
+
+.is-hidden {
+ display: none !important;
+}
+
+.is-sr-only {
+ border: none !important;
+ clip: rect(0, 0, 0, 0) !important;
+ height: 0.01em !important;
+ overflow: hidden !important;
+ padding: 0 !important;
+ position: absolute !important;
+ white-space: nowrap !important;
+ width: 0.01em !important;
+}
+
+@media screen and (max-width: 768px) {
+ .is-hidden-mobile {
+ display: none !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .is-hidden-tablet {
+ display: none !important;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .is-hidden-tablet-only {
+ display: none !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .is-hidden-touch {
+ display: none !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .is-hidden-desktop {
+ display: none !important;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .is-hidden-desktop-only {
+ display: none !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .is-hidden-widescreen {
+ display: none !important;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .is-hidden-widescreen-only {
+ display: none !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .is-hidden-fullhd {
+ display: none !important;
+ }
+}
+
+.is-invisible {
+ visibility: hidden !important;
+}
+
+@media screen and (max-width: 768px) {
+ .is-invisible-mobile {
+ visibility: hidden !important;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .is-invisible-tablet {
+ visibility: hidden !important;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .is-invisible-tablet-only {
+ visibility: hidden !important;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .is-invisible-touch {
+ visibility: hidden !important;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .is-invisible-desktop {
+ visibility: hidden !important;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .is-invisible-desktop-only {
+ visibility: hidden !important;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .is-invisible-widescreen {
+ visibility: hidden !important;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .is-invisible-widescreen-only {
+ visibility: hidden !important;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .is-invisible-fullhd {
+ visibility: hidden !important;
+ }
+}
+
+.is-marginless {
+ margin: 0 !important;
+}
+
+.is-paddingless {
+ padding: 0 !important;
+}
+
+.is-radiusless {
+ border-radius: 0 !important;
+}
+
+.is-shadowless {
+ box-shadow: none !important;
+}
+
+.is-relative {
+ position: relative !important;
+}
+
+.box {
+ background-color: white;
+ border-radius: 6px;
+ box-shadow: 0 2px 3px rgba(10, 10, 10, 0.1), 0 0 0 1px rgba(10, 10, 10, 0.1);
+ color: #4a4a4a;
+ display: block;
+ padding: 1.25rem;
+}
+
+a.box:hover, a.box:focus {
+ box-shadow: 0 2px 3px rgba(10, 10, 10, 0.1), 0 0 0 1px #3273dc;
+}
+
+a.box:active {
+ box-shadow: inset 0 1px 2px rgba(10, 10, 10, 0.2), 0 0 0 1px #3273dc;
+}
+
+.button {
+ background-color: white;
+ border-color: #dbdbdb;
+ border-width: 1px;
+ color: #363636;
+ cursor: pointer;
+ justify-content: center;
+ padding-bottom: calc(0.375em - 1px);
+ padding-left: 0.75em;
+ padding-right: 0.75em;
+ padding-top: calc(0.375em - 1px);
+ text-align: center;
+ white-space: nowrap;
+}
+
+.button strong {
+ color: inherit;
+}
+
+.button .icon, .button .icon.is-small, .button .icon.is-medium, .button .icon.is-large {
+ height: 1.5em;
+ width: 1.5em;
+}
+
+.button .icon:first-child:not(:last-child) {
+ margin-left: calc(-0.375em - 1px);
+ margin-right: 0.1875em;
+}
+
+.button .icon:last-child:not(:first-child) {
+ margin-left: 0.1875em;
+ margin-right: calc(-0.375em - 1px);
+}
+
+.button .icon:first-child:last-child {
+ margin-left: calc(-0.375em - 1px);
+ margin-right: calc(-0.375em - 1px);
+}
+
+.button:hover, .button.is-hovered {
+ border-color: #b5b5b5;
+ color: #363636;
+}
+
+.button:focus, .button.is-focused {
+ border-color: #3273dc;
+ color: #363636;
+}
+
+.button:focus:not(:active), .button.is-focused:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(50, 115, 220, 0.25);
+}
+
+.button:active, .button.is-active {
+ border-color: #4a4a4a;
+ color: #363636;
+}
+
+.button.is-text {
+ background-color: transparent;
+ border-color: transparent;
+ color: #4a4a4a;
+ text-decoration: underline;
+}
+
+.button.is-text:hover, .button.is-text.is-hovered, .button.is-text:focus, .button.is-text.is-focused {
+ background-color: whitesmoke;
+ color: #363636;
+}
+
+.button.is-text:active, .button.is-text.is-active {
+ background-color: #e8e8e8;
+ color: #363636;
+}
+
+.button.is-text[disabled],
+fieldset[disabled] .button.is-text {
+ background-color: transparent;
+ border-color: transparent;
+ box-shadow: none;
+}
+
+.button.is-white {
+ background-color: white;
+ border-color: transparent;
+ color: #0a0a0a;
+}
+
+.button.is-white:hover, .button.is-white.is-hovered {
+ background-color: #f9f9f9;
+ border-color: transparent;
+ color: #0a0a0a;
+}
+
+.button.is-white:focus, .button.is-white.is-focused {
+ border-color: transparent;
+ color: #0a0a0a;
+}
+
+.button.is-white:focus:not(:active), .button.is-white.is-focused:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(255, 255, 255, 0.25);
+}
+
+.button.is-white:active, .button.is-white.is-active {
+ background-color: #f2f2f2;
+ border-color: transparent;
+ color: #0a0a0a;
+}
+
+.button.is-white[disabled],
+fieldset[disabled] .button.is-white {
+ background-color: white;
+ border-color: transparent;
+ box-shadow: none;
+}
+
+.button.is-white.is-inverted {
+ background-color: #0a0a0a;
+ color: white;
+}
+
+.button.is-white.is-inverted:hover, .button.is-white.is-inverted.is-hovered {
+ background-color: black;
+}
+
+.button.is-white.is-inverted[disabled],
+fieldset[disabled] .button.is-white.is-inverted {
+ background-color: #0a0a0a;
+ border-color: transparent;
+ box-shadow: none;
+ color: white;
+}
+
+.button.is-white.is-loading::after {
+ border-color: transparent transparent #0a0a0a #0a0a0a !important;
+}
+
+.button.is-white.is-outlined {
+ background-color: transparent;
+ border-color: white;
+ color: white;
+}
+
+.button.is-white.is-outlined:hover, .button.is-white.is-outlined.is-hovered, .button.is-white.is-outlined:focus, .button.is-white.is-outlined.is-focused {
+ background-color: white;
+ border-color: white;
+ color: #0a0a0a;
+}
+
+.button.is-white.is-outlined.is-loading::after {
+ border-color: transparent transparent white white !important;
+}
+
+.button.is-white.is-outlined.is-loading:hover::after, .button.is-white.is-outlined.is-loading.is-hovered::after, .button.is-white.is-outlined.is-loading:focus::after, .button.is-white.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #0a0a0a #0a0a0a !important;
+}
+
+.button.is-white.is-outlined[disabled],
+fieldset[disabled] .button.is-white.is-outlined {
+ background-color: transparent;
+ border-color: white;
+ box-shadow: none;
+ color: white;
+}
+
+.button.is-white.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #0a0a0a;
+ color: #0a0a0a;
+}
+
+.button.is-white.is-inverted.is-outlined:hover, .button.is-white.is-inverted.is-outlined.is-hovered, .button.is-white.is-inverted.is-outlined:focus, .button.is-white.is-inverted.is-outlined.is-focused {
+ background-color: #0a0a0a;
+ color: white;
+}
+
+.button.is-white.is-inverted.is-outlined.is-loading:hover::after, .button.is-white.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-white.is-inverted.is-outlined.is-loading:focus::after, .button.is-white.is-inverted.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent white white !important;
+}
+
+.button.is-white.is-inverted.is-outlined[disabled],
+fieldset[disabled] .button.is-white.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #0a0a0a;
+ box-shadow: none;
+ color: #0a0a0a;
+}
+
+.button.is-black {
+ background-color: #0a0a0a;
+ border-color: transparent;
+ color: white;
+}
+
+.button.is-black:hover, .button.is-black.is-hovered {
+ background-color: #040404;
+ border-color: transparent;
+ color: white;
+}
+
+.button.is-black:focus, .button.is-black.is-focused {
+ border-color: transparent;
+ color: white;
+}
+
+.button.is-black:focus:not(:active), .button.is-black.is-focused:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(10, 10, 10, 0.25);
+}
+
+.button.is-black:active, .button.is-black.is-active {
+ background-color: black;
+ border-color: transparent;
+ color: white;
+}
+
+.button.is-black[disabled],
+fieldset[disabled] .button.is-black {
+ background-color: #0a0a0a;
+ border-color: transparent;
+ box-shadow: none;
+}
+
+.button.is-black.is-inverted {
+ background-color: white;
+ color: #0a0a0a;
+}
+
+.button.is-black.is-inverted:hover, .button.is-black.is-inverted.is-hovered {
+ background-color: #f2f2f2;
+}
+
+.button.is-black.is-inverted[disabled],
+fieldset[disabled] .button.is-black.is-inverted {
+ background-color: white;
+ border-color: transparent;
+ box-shadow: none;
+ color: #0a0a0a;
+}
+
+.button.is-black.is-loading::after {
+ border-color: transparent transparent white white !important;
+}
+
+.button.is-black.is-outlined {
+ background-color: transparent;
+ border-color: #0a0a0a;
+ color: #0a0a0a;
+}
+
+.button.is-black.is-outlined:hover, .button.is-black.is-outlined.is-hovered, .button.is-black.is-outlined:focus, .button.is-black.is-outlined.is-focused {
+ background-color: #0a0a0a;
+ border-color: #0a0a0a;
+ color: white;
+}
+
+.button.is-black.is-outlined.is-loading::after {
+ border-color: transparent transparent #0a0a0a #0a0a0a !important;
+}
+
+.button.is-black.is-outlined.is-loading:hover::after, .button.is-black.is-outlined.is-loading.is-hovered::after, .button.is-black.is-outlined.is-loading:focus::after, .button.is-black.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent white white !important;
+}
+
+.button.is-black.is-outlined[disabled],
+fieldset[disabled] .button.is-black.is-outlined {
+ background-color: transparent;
+ border-color: #0a0a0a;
+ box-shadow: none;
+ color: #0a0a0a;
+}
+
+.button.is-black.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: white;
+ color: white;
+}
+
+.button.is-black.is-inverted.is-outlined:hover, .button.is-black.is-inverted.is-outlined.is-hovered, .button.is-black.is-inverted.is-outlined:focus, .button.is-black.is-inverted.is-outlined.is-focused {
+ background-color: white;
+ color: #0a0a0a;
+}
+
+.button.is-black.is-inverted.is-outlined.is-loading:hover::after, .button.is-black.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-black.is-inverted.is-outlined.is-loading:focus::after, .button.is-black.is-inverted.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #0a0a0a #0a0a0a !important;
+}
+
+.button.is-black.is-inverted.is-outlined[disabled],
+fieldset[disabled] .button.is-black.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: white;
+ box-shadow: none;
+ color: white;
+}
+
+.button.is-light {
+ background-color: whitesmoke;
+ border-color: transparent;
+ color: #363636;
+}
+
+.button.is-light:hover, .button.is-light.is-hovered {
+ background-color: #eeeeee;
+ border-color: transparent;
+ color: #363636;
+}
+
+.button.is-light:focus, .button.is-light.is-focused {
+ border-color: transparent;
+ color: #363636;
+}
+
+.button.is-light:focus:not(:active), .button.is-light.is-focused:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(245, 245, 245, 0.25);
+}
+
+.button.is-light:active, .button.is-light.is-active {
+ background-color: #e8e8e8;
+ border-color: transparent;
+ color: #363636;
+}
+
+.button.is-light[disabled],
+fieldset[disabled] .button.is-light {
+ background-color: whitesmoke;
+ border-color: transparent;
+ box-shadow: none;
+}
+
+.button.is-light.is-inverted {
+ background-color: #363636;
+ color: whitesmoke;
+}
+
+.button.is-light.is-inverted:hover, .button.is-light.is-inverted.is-hovered {
+ background-color: #292929;
+}
+
+.button.is-light.is-inverted[disabled],
+fieldset[disabled] .button.is-light.is-inverted {
+ background-color: #363636;
+ border-color: transparent;
+ box-shadow: none;
+ color: whitesmoke;
+}
+
+.button.is-light.is-loading::after {
+ border-color: transparent transparent #363636 #363636 !important;
+}
+
+.button.is-light.is-outlined {
+ background-color: transparent;
+ border-color: whitesmoke;
+ color: whitesmoke;
+}
+
+.button.is-light.is-outlined:hover, .button.is-light.is-outlined.is-hovered, .button.is-light.is-outlined:focus, .button.is-light.is-outlined.is-focused {
+ background-color: whitesmoke;
+ border-color: whitesmoke;
+ color: #363636;
+}
+
+.button.is-light.is-outlined.is-loading::after {
+ border-color: transparent transparent whitesmoke whitesmoke !important;
+}
+
+.button.is-light.is-outlined.is-loading:hover::after, .button.is-light.is-outlined.is-loading.is-hovered::after, .button.is-light.is-outlined.is-loading:focus::after, .button.is-light.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #363636 #363636 !important;
+}
+
+.button.is-light.is-outlined[disabled],
+fieldset[disabled] .button.is-light.is-outlined {
+ background-color: transparent;
+ border-color: whitesmoke;
+ box-shadow: none;
+ color: whitesmoke;
+}
+
+.button.is-light.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #363636;
+ color: #363636;
+}
+
+.button.is-light.is-inverted.is-outlined:hover, .button.is-light.is-inverted.is-outlined.is-hovered, .button.is-light.is-inverted.is-outlined:focus, .button.is-light.is-inverted.is-outlined.is-focused {
+ background-color: #363636;
+ color: whitesmoke;
+}
+
+.button.is-light.is-inverted.is-outlined.is-loading:hover::after, .button.is-light.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-light.is-inverted.is-outlined.is-loading:focus::after, .button.is-light.is-inverted.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent whitesmoke whitesmoke !important;
+}
+
+.button.is-light.is-inverted.is-outlined[disabled],
+fieldset[disabled] .button.is-light.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #363636;
+ box-shadow: none;
+ color: #363636;
+}
+
+.button.is-dark {
+ background-color: #363636;
+ border-color: transparent;
+ color: whitesmoke;
+}
+
+.button.is-dark:hover, .button.is-dark.is-hovered {
+ background-color: #2f2f2f;
+ border-color: transparent;
+ color: whitesmoke;
+}
+
+.button.is-dark:focus, .button.is-dark.is-focused {
+ border-color: transparent;
+ color: whitesmoke;
+}
+
+.button.is-dark:focus:not(:active), .button.is-dark.is-focused:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(54, 54, 54, 0.25);
+}
+
+.button.is-dark:active, .button.is-dark.is-active {
+ background-color: #292929;
+ border-color: transparent;
+ color: whitesmoke;
+}
+
+.button.is-dark[disabled],
+fieldset[disabled] .button.is-dark {
+ background-color: #363636;
+ border-color: transparent;
+ box-shadow: none;
+}
+
+.button.is-dark.is-inverted {
+ background-color: whitesmoke;
+ color: #363636;
+}
+
+.button.is-dark.is-inverted:hover, .button.is-dark.is-inverted.is-hovered {
+ background-color: #e8e8e8;
+}
+
+.button.is-dark.is-inverted[disabled],
+fieldset[disabled] .button.is-dark.is-inverted {
+ background-color: whitesmoke;
+ border-color: transparent;
+ box-shadow: none;
+ color: #363636;
+}
+
+.button.is-dark.is-loading::after {
+ border-color: transparent transparent whitesmoke whitesmoke !important;
+}
+
+.button.is-dark.is-outlined {
+ background-color: transparent;
+ border-color: #363636;
+ color: #363636;
+}
+
+.button.is-dark.is-outlined:hover, .button.is-dark.is-outlined.is-hovered, .button.is-dark.is-outlined:focus, .button.is-dark.is-outlined.is-focused {
+ background-color: #363636;
+ border-color: #363636;
+ color: whitesmoke;
+}
+
+.button.is-dark.is-outlined.is-loading::after {
+ border-color: transparent transparent #363636 #363636 !important;
+}
+
+.button.is-dark.is-outlined.is-loading:hover::after, .button.is-dark.is-outlined.is-loading.is-hovered::after, .button.is-dark.is-outlined.is-loading:focus::after, .button.is-dark.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent whitesmoke whitesmoke !important;
+}
+
+.button.is-dark.is-outlined[disabled],
+fieldset[disabled] .button.is-dark.is-outlined {
+ background-color: transparent;
+ border-color: #363636;
+ box-shadow: none;
+ color: #363636;
+}
+
+.button.is-dark.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: whitesmoke;
+ color: whitesmoke;
+}
+
+.button.is-dark.is-inverted.is-outlined:hover, .button.is-dark.is-inverted.is-outlined.is-hovered, .button.is-dark.is-inverted.is-outlined:focus, .button.is-dark.is-inverted.is-outlined.is-focused {
+ background-color: whitesmoke;
+ color: #363636;
+}
+
+.button.is-dark.is-inverted.is-outlined.is-loading:hover::after, .button.is-dark.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-dark.is-inverted.is-outlined.is-loading:focus::after, .button.is-dark.is-inverted.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #363636 #363636 !important;
+}
+
+.button.is-dark.is-inverted.is-outlined[disabled],
+fieldset[disabled] .button.is-dark.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: whitesmoke;
+ box-shadow: none;
+ color: whitesmoke;
+}
+
+.button.is-primary {
+ background-color: #00d1b2;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-primary:hover, .button.is-primary.is-hovered {
+ background-color: #00c4a7;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-primary:focus, .button.is-primary.is-focused {
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-primary:focus:not(:active), .button.is-primary.is-focused:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(0, 209, 178, 0.25);
+}
+
+.button.is-primary:active, .button.is-primary.is-active {
+ background-color: #00b89c;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-primary[disabled],
+fieldset[disabled] .button.is-primary {
+ background-color: #00d1b2;
+ border-color: transparent;
+ box-shadow: none;
+}
+
+.button.is-primary.is-inverted {
+ background-color: #fff;
+ color: #00d1b2;
+}
+
+.button.is-primary.is-inverted:hover, .button.is-primary.is-inverted.is-hovered {
+ background-color: #f2f2f2;
+}
+
+.button.is-primary.is-inverted[disabled],
+fieldset[disabled] .button.is-primary.is-inverted {
+ background-color: #fff;
+ border-color: transparent;
+ box-shadow: none;
+ color: #00d1b2;
+}
+
+.button.is-primary.is-loading::after {
+ border-color: transparent transparent #fff #fff !important;
+}
+
+.button.is-primary.is-outlined {
+ background-color: transparent;
+ border-color: #00d1b2;
+ color: #00d1b2;
+}
+
+.button.is-primary.is-outlined:hover, .button.is-primary.is-outlined.is-hovered, .button.is-primary.is-outlined:focus, .button.is-primary.is-outlined.is-focused {
+ background-color: #00d1b2;
+ border-color: #00d1b2;
+ color: #fff;
+}
+
+.button.is-primary.is-outlined.is-loading::after {
+ border-color: transparent transparent #00d1b2 #00d1b2 !important;
+}
+
+.button.is-primary.is-outlined.is-loading:hover::after, .button.is-primary.is-outlined.is-loading.is-hovered::after, .button.is-primary.is-outlined.is-loading:focus::after, .button.is-primary.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #fff #fff !important;
+}
+
+.button.is-primary.is-outlined[disabled],
+fieldset[disabled] .button.is-primary.is-outlined {
+ background-color: transparent;
+ border-color: #00d1b2;
+ box-shadow: none;
+ color: #00d1b2;
+}
+
+.button.is-primary.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #fff;
+ color: #fff;
+}
+
+.button.is-primary.is-inverted.is-outlined:hover, .button.is-primary.is-inverted.is-outlined.is-hovered, .button.is-primary.is-inverted.is-outlined:focus, .button.is-primary.is-inverted.is-outlined.is-focused {
+ background-color: #fff;
+ color: #00d1b2;
+}
+
+.button.is-primary.is-inverted.is-outlined.is-loading:hover::after, .button.is-primary.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-primary.is-inverted.is-outlined.is-loading:focus::after, .button.is-primary.is-inverted.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #00d1b2 #00d1b2 !important;
+}
+
+.button.is-primary.is-inverted.is-outlined[disabled],
+fieldset[disabled] .button.is-primary.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #fff;
+ box-shadow: none;
+ color: #fff;
+}
+
+.button.is-link {
+ background-color: #3273dc;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-link:hover, .button.is-link.is-hovered {
+ background-color: #276cda;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-link:focus, .button.is-link.is-focused {
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-link:focus:not(:active), .button.is-link.is-focused:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(50, 115, 220, 0.25);
+}
+
+.button.is-link:active, .button.is-link.is-active {
+ background-color: #2366d1;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-link[disabled],
+fieldset[disabled] .button.is-link {
+ background-color: #3273dc;
+ border-color: transparent;
+ box-shadow: none;
+}
+
+.button.is-link.is-inverted {
+ background-color: #fff;
+ color: #3273dc;
+}
+
+.button.is-link.is-inverted:hover, .button.is-link.is-inverted.is-hovered {
+ background-color: #f2f2f2;
+}
+
+.button.is-link.is-inverted[disabled],
+fieldset[disabled] .button.is-link.is-inverted {
+ background-color: #fff;
+ border-color: transparent;
+ box-shadow: none;
+ color: #3273dc;
+}
+
+.button.is-link.is-loading::after {
+ border-color: transparent transparent #fff #fff !important;
+}
+
+.button.is-link.is-outlined {
+ background-color: transparent;
+ border-color: #3273dc;
+ color: #3273dc;
+}
+
+.button.is-link.is-outlined:hover, .button.is-link.is-outlined.is-hovered, .button.is-link.is-outlined:focus, .button.is-link.is-outlined.is-focused {
+ background-color: #3273dc;
+ border-color: #3273dc;
+ color: #fff;
+}
+
+.button.is-link.is-outlined.is-loading::after {
+ border-color: transparent transparent #3273dc #3273dc !important;
+}
+
+.button.is-link.is-outlined.is-loading:hover::after, .button.is-link.is-outlined.is-loading.is-hovered::after, .button.is-link.is-outlined.is-loading:focus::after, .button.is-link.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #fff #fff !important;
+}
+
+.button.is-link.is-outlined[disabled],
+fieldset[disabled] .button.is-link.is-outlined {
+ background-color: transparent;
+ border-color: #3273dc;
+ box-shadow: none;
+ color: #3273dc;
+}
+
+.button.is-link.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #fff;
+ color: #fff;
+}
+
+.button.is-link.is-inverted.is-outlined:hover, .button.is-link.is-inverted.is-outlined.is-hovered, .button.is-link.is-inverted.is-outlined:focus, .button.is-link.is-inverted.is-outlined.is-focused {
+ background-color: #fff;
+ color: #3273dc;
+}
+
+.button.is-link.is-inverted.is-outlined.is-loading:hover::after, .button.is-link.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-link.is-inverted.is-outlined.is-loading:focus::after, .button.is-link.is-inverted.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #3273dc #3273dc !important;
+}
+
+.button.is-link.is-inverted.is-outlined[disabled],
+fieldset[disabled] .button.is-link.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #fff;
+ box-shadow: none;
+ color: #fff;
+}
+
+.button.is-info {
+ background-color: #209cee;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-info:hover, .button.is-info.is-hovered {
+ background-color: #1496ed;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-info:focus, .button.is-info.is-focused {
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-info:focus:not(:active), .button.is-info.is-focused:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(32, 156, 238, 0.25);
+}
+
+.button.is-info:active, .button.is-info.is-active {
+ background-color: #118fe4;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-info[disabled],
+fieldset[disabled] .button.is-info {
+ background-color: #209cee;
+ border-color: transparent;
+ box-shadow: none;
+}
+
+.button.is-info.is-inverted {
+ background-color: #fff;
+ color: #209cee;
+}
+
+.button.is-info.is-inverted:hover, .button.is-info.is-inverted.is-hovered {
+ background-color: #f2f2f2;
+}
+
+.button.is-info.is-inverted[disabled],
+fieldset[disabled] .button.is-info.is-inverted {
+ background-color: #fff;
+ border-color: transparent;
+ box-shadow: none;
+ color: #209cee;
+}
+
+.button.is-info.is-loading::after {
+ border-color: transparent transparent #fff #fff !important;
+}
+
+.button.is-info.is-outlined {
+ background-color: transparent;
+ border-color: #209cee;
+ color: #209cee;
+}
+
+.button.is-info.is-outlined:hover, .button.is-info.is-outlined.is-hovered, .button.is-info.is-outlined:focus, .button.is-info.is-outlined.is-focused {
+ background-color: #209cee;
+ border-color: #209cee;
+ color: #fff;
+}
+
+.button.is-info.is-outlined.is-loading::after {
+ border-color: transparent transparent #209cee #209cee !important;
+}
+
+.button.is-info.is-outlined.is-loading:hover::after, .button.is-info.is-outlined.is-loading.is-hovered::after, .button.is-info.is-outlined.is-loading:focus::after, .button.is-info.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #fff #fff !important;
+}
+
+.button.is-info.is-outlined[disabled],
+fieldset[disabled] .button.is-info.is-outlined {
+ background-color: transparent;
+ border-color: #209cee;
+ box-shadow: none;
+ color: #209cee;
+}
+
+.button.is-info.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #fff;
+ color: #fff;
+}
+
+.button.is-info.is-inverted.is-outlined:hover, .button.is-info.is-inverted.is-outlined.is-hovered, .button.is-info.is-inverted.is-outlined:focus, .button.is-info.is-inverted.is-outlined.is-focused {
+ background-color: #fff;
+ color: #209cee;
+}
+
+.button.is-info.is-inverted.is-outlined.is-loading:hover::after, .button.is-info.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-info.is-inverted.is-outlined.is-loading:focus::after, .button.is-info.is-inverted.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #209cee #209cee !important;
+}
+
+.button.is-info.is-inverted.is-outlined[disabled],
+fieldset[disabled] .button.is-info.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #fff;
+ box-shadow: none;
+ color: #fff;
+}
+
+.button.is-success {
+ background-color: #23d160;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-success:hover, .button.is-success.is-hovered {
+ background-color: #22c65b;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-success:focus, .button.is-success.is-focused {
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-success:focus:not(:active), .button.is-success.is-focused:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(35, 209, 96, 0.25);
+}
+
+.button.is-success:active, .button.is-success.is-active {
+ background-color: #20bc56;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-success[disabled],
+fieldset[disabled] .button.is-success {
+ background-color: #23d160;
+ border-color: transparent;
+ box-shadow: none;
+}
+
+.button.is-success.is-inverted {
+ background-color: #fff;
+ color: #23d160;
+}
+
+.button.is-success.is-inverted:hover, .button.is-success.is-inverted.is-hovered {
+ background-color: #f2f2f2;
+}
+
+.button.is-success.is-inverted[disabled],
+fieldset[disabled] .button.is-success.is-inverted {
+ background-color: #fff;
+ border-color: transparent;
+ box-shadow: none;
+ color: #23d160;
+}
+
+.button.is-success.is-loading::after {
+ border-color: transparent transparent #fff #fff !important;
+}
+
+.button.is-success.is-outlined {
+ background-color: transparent;
+ border-color: #23d160;
+ color: #23d160;
+}
+
+.button.is-success.is-outlined:hover, .button.is-success.is-outlined.is-hovered, .button.is-success.is-outlined:focus, .button.is-success.is-outlined.is-focused {
+ background-color: #23d160;
+ border-color: #23d160;
+ color: #fff;
+}
+
+.button.is-success.is-outlined.is-loading::after {
+ border-color: transparent transparent #23d160 #23d160 !important;
+}
+
+.button.is-success.is-outlined.is-loading:hover::after, .button.is-success.is-outlined.is-loading.is-hovered::after, .button.is-success.is-outlined.is-loading:focus::after, .button.is-success.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #fff #fff !important;
+}
+
+.button.is-success.is-outlined[disabled],
+fieldset[disabled] .button.is-success.is-outlined {
+ background-color: transparent;
+ border-color: #23d160;
+ box-shadow: none;
+ color: #23d160;
+}
+
+.button.is-success.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #fff;
+ color: #fff;
+}
+
+.button.is-success.is-inverted.is-outlined:hover, .button.is-success.is-inverted.is-outlined.is-hovered, .button.is-success.is-inverted.is-outlined:focus, .button.is-success.is-inverted.is-outlined.is-focused {
+ background-color: #fff;
+ color: #23d160;
+}
+
+.button.is-success.is-inverted.is-outlined.is-loading:hover::after, .button.is-success.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-success.is-inverted.is-outlined.is-loading:focus::after, .button.is-success.is-inverted.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #23d160 #23d160 !important;
+}
+
+.button.is-success.is-inverted.is-outlined[disabled],
+fieldset[disabled] .button.is-success.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #fff;
+ box-shadow: none;
+ color: #fff;
+}
+
+.button.is-warning {
+ background-color: #ffdd57;
+ border-color: transparent;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.button.is-warning:hover, .button.is-warning.is-hovered {
+ background-color: #ffdb4a;
+ border-color: transparent;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.button.is-warning:focus, .button.is-warning.is-focused {
+ border-color: transparent;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.button.is-warning:focus:not(:active), .button.is-warning.is-focused:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(255, 221, 87, 0.25);
+}
+
+.button.is-warning:active, .button.is-warning.is-active {
+ background-color: #ffd83d;
+ border-color: transparent;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.button.is-warning[disabled],
+fieldset[disabled] .button.is-warning {
+ background-color: #ffdd57;
+ border-color: transparent;
+ box-shadow: none;
+}
+
+.button.is-warning.is-inverted {
+ background-color: rgba(0, 0, 0, 0.7);
+ color: #ffdd57;
+}
+
+.button.is-warning.is-inverted:hover, .button.is-warning.is-inverted.is-hovered {
+ background-color: rgba(0, 0, 0, 0.7);
+}
+
+.button.is-warning.is-inverted[disabled],
+fieldset[disabled] .button.is-warning.is-inverted {
+ background-color: rgba(0, 0, 0, 0.7);
+ border-color: transparent;
+ box-shadow: none;
+ color: #ffdd57;
+}
+
+.button.is-warning.is-loading::after {
+ border-color: transparent transparent rgba(0, 0, 0, 0.7) rgba(0, 0, 0, 0.7) !important;
+}
+
+.button.is-warning.is-outlined {
+ background-color: transparent;
+ border-color: #ffdd57;
+ color: #ffdd57;
+}
+
+.button.is-warning.is-outlined:hover, .button.is-warning.is-outlined.is-hovered, .button.is-warning.is-outlined:focus, .button.is-warning.is-outlined.is-focused {
+ background-color: #ffdd57;
+ border-color: #ffdd57;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.button.is-warning.is-outlined.is-loading::after {
+ border-color: transparent transparent #ffdd57 #ffdd57 !important;
+}
+
+.button.is-warning.is-outlined.is-loading:hover::after, .button.is-warning.is-outlined.is-loading.is-hovered::after, .button.is-warning.is-outlined.is-loading:focus::after, .button.is-warning.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent rgba(0, 0, 0, 0.7) rgba(0, 0, 0, 0.7) !important;
+}
+
+.button.is-warning.is-outlined[disabled],
+fieldset[disabled] .button.is-warning.is-outlined {
+ background-color: transparent;
+ border-color: #ffdd57;
+ box-shadow: none;
+ color: #ffdd57;
+}
+
+.button.is-warning.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: rgba(0, 0, 0, 0.7);
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.button.is-warning.is-inverted.is-outlined:hover, .button.is-warning.is-inverted.is-outlined.is-hovered, .button.is-warning.is-inverted.is-outlined:focus, .button.is-warning.is-inverted.is-outlined.is-focused {
+ background-color: rgba(0, 0, 0, 0.7);
+ color: #ffdd57;
+}
+
+.button.is-warning.is-inverted.is-outlined.is-loading:hover::after, .button.is-warning.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-warning.is-inverted.is-outlined.is-loading:focus::after, .button.is-warning.is-inverted.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #ffdd57 #ffdd57 !important;
+}
+
+.button.is-warning.is-inverted.is-outlined[disabled],
+fieldset[disabled] .button.is-warning.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: rgba(0, 0, 0, 0.7);
+ box-shadow: none;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.button.is-danger {
+ background-color: #ff3860;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-danger:hover, .button.is-danger.is-hovered {
+ background-color: #ff2b56;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-danger:focus, .button.is-danger.is-focused {
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-danger:focus:not(:active), .button.is-danger.is-focused:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(255, 56, 96, 0.25);
+}
+
+.button.is-danger:active, .button.is-danger.is-active {
+ background-color: #ff1f4b;
+ border-color: transparent;
+ color: #fff;
+}
+
+.button.is-danger[disabled],
+fieldset[disabled] .button.is-danger {
+ background-color: #ff3860;
+ border-color: transparent;
+ box-shadow: none;
+}
+
+.button.is-danger.is-inverted {
+ background-color: #fff;
+ color: #ff3860;
+}
+
+.button.is-danger.is-inverted:hover, .button.is-danger.is-inverted.is-hovered {
+ background-color: #f2f2f2;
+}
+
+.button.is-danger.is-inverted[disabled],
+fieldset[disabled] .button.is-danger.is-inverted {
+ background-color: #fff;
+ border-color: transparent;
+ box-shadow: none;
+ color: #ff3860;
+}
+
+.button.is-danger.is-loading::after {
+ border-color: transparent transparent #fff #fff !important;
+}
+
+.button.is-danger.is-outlined {
+ background-color: transparent;
+ border-color: #ff3860;
+ color: #ff3860;
+}
+
+.button.is-danger.is-outlined:hover, .button.is-danger.is-outlined.is-hovered, .button.is-danger.is-outlined:focus, .button.is-danger.is-outlined.is-focused {
+ background-color: #ff3860;
+ border-color: #ff3860;
+ color: #fff;
+}
+
+.button.is-danger.is-outlined.is-loading::after {
+ border-color: transparent transparent #ff3860 #ff3860 !important;
+}
+
+.button.is-danger.is-outlined.is-loading:hover::after, .button.is-danger.is-outlined.is-loading.is-hovered::after, .button.is-danger.is-outlined.is-loading:focus::after, .button.is-danger.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #fff #fff !important;
+}
+
+.button.is-danger.is-outlined[disabled],
+fieldset[disabled] .button.is-danger.is-outlined {
+ background-color: transparent;
+ border-color: #ff3860;
+ box-shadow: none;
+ color: #ff3860;
+}
+
+.button.is-danger.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #fff;
+ color: #fff;
+}
+
+.button.is-danger.is-inverted.is-outlined:hover, .button.is-danger.is-inverted.is-outlined.is-hovered, .button.is-danger.is-inverted.is-outlined:focus, .button.is-danger.is-inverted.is-outlined.is-focused {
+ background-color: #fff;
+ color: #ff3860;
+}
+
+.button.is-danger.is-inverted.is-outlined.is-loading:hover::after, .button.is-danger.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-danger.is-inverted.is-outlined.is-loading:focus::after, .button.is-danger.is-inverted.is-outlined.is-loading.is-focused::after {
+ border-color: transparent transparent #ff3860 #ff3860 !important;
+}
+
+.button.is-danger.is-inverted.is-outlined[disabled],
+fieldset[disabled] .button.is-danger.is-inverted.is-outlined {
+ background-color: transparent;
+ border-color: #fff;
+ box-shadow: none;
+ color: #fff;
+}
+
+.button.is-small {
+ border-radius: 2px;
+ font-size: 0.75rem;
+}
+
+.button.is-normal {
+ font-size: 1rem;
+}
+
+.button.is-medium {
+ font-size: 1.25rem;
+}
+
+.button.is-large {
+ font-size: 1.5rem;
+}
+
+.button[disabled],
+fieldset[disabled] .button {
+ background-color: white;
+ border-color: #dbdbdb;
+ box-shadow: none;
+ opacity: 0.5;
+}
+
+.button.is-fullwidth {
+ display: flex;
+ width: 100%;
+}
+
+.button.is-loading {
+ color: transparent !important;
+ pointer-events: none;
+}
+
+.button.is-loading::after {
+ position: absolute;
+ left: calc(50% - (1em / 2));
+ top: calc(50% - (1em / 2));
+ position: absolute !important;
+}
+
+.button.is-static {
+ background-color: whitesmoke;
+ border-color: #dbdbdb;
+ color: #7a7a7a;
+ box-shadow: none;
+ pointer-events: none;
+}
+
+.button.is-rounded {
+ border-radius: 290486px;
+ padding-left: 1em;
+ padding-right: 1em;
+}
+
+.buttons {
+ align-items: center;
+ display: flex;
+ flex-wrap: wrap;
+ justify-content: flex-start;
+}
+
+.buttons .button {
+ margin-bottom: 0.5rem;
+}
+
+.buttons .button:not(:last-child):not(.is-fullwidth) {
+ margin-right: 0.5rem;
+}
+
+.buttons:last-child {
+ margin-bottom: -0.5rem;
+}
+
+.buttons:not(:last-child) {
+ margin-bottom: 1rem;
+}
+
+.buttons.are-small .button:not(.is-normal):not(.is-medium):not(.is-large) {
+ border-radius: 2px;
+ font-size: 0.75rem;
+}
+
+.buttons.are-medium .button:not(.is-small):not(.is-normal):not(.is-large) {
+ font-size: 1.25rem;
+}
+
+.buttons.are-large .button:not(.is-small):not(.is-normal):not(.is-medium) {
+ font-size: 1.5rem;
+}
+
+.buttons.has-addons .button:not(:first-child) {
+ border-bottom-left-radius: 0;
+ border-top-left-radius: 0;
+}
+
+.buttons.has-addons .button:not(:last-child) {
+ border-bottom-right-radius: 0;
+ border-top-right-radius: 0;
+ margin-right: -1px;
+}
+
+.buttons.has-addons .button:last-child {
+ margin-right: 0;
+}
+
+.buttons.has-addons .button:hover, .buttons.has-addons .button.is-hovered {
+ z-index: 2;
+}
+
+.buttons.has-addons .button:focus, .buttons.has-addons .button.is-focused, .buttons.has-addons .button:active, .buttons.has-addons .button.is-active, .buttons.has-addons .button.is-selected {
+ z-index: 3;
+}
+
+.buttons.has-addons .button:focus:hover, .buttons.has-addons .button.is-focused:hover, .buttons.has-addons .button:active:hover, .buttons.has-addons .button.is-active:hover, .buttons.has-addons .button.is-selected:hover {
+ z-index: 4;
+}
+
+.buttons.has-addons .button.is-expanded {
+ flex-grow: 1;
+ flex-shrink: 1;
+}
+
+.buttons.is-centered {
+ justify-content: center;
+}
+
+.buttons.is-centered:not(.has-addons) .button:not(.is-fullwidth) {
+ margin-left: 0.25rem;
+ margin-right: 0.25rem;
+}
+
+.buttons.is-right {
+ justify-content: flex-end;
+}
+
+.buttons.is-right:not(.has-addons) .button:not(.is-fullwidth) {
+ margin-left: 0.25rem;
+ margin-right: 0.25rem;
+}
+
+.container {
+ flex-grow: 1;
+ margin: 0 auto;
+ position: relative;
+ width: auto;
+}
+
+@media screen and (min-width: 1024px) {
+ .container {
+ max-width: 960px;
+ }
+ .container.is-fluid {
+ margin-left: 32px;
+ margin-right: 32px;
+ max-width: none;
+ }
+}
+
+@media screen and (max-width: 1215px) {
+ .container.is-widescreen {
+ max-width: 1152px;
+ }
+}
+
+@media screen and (max-width: 1407px) {
+ .container.is-fullhd {
+ max-width: 1344px;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .container {
+ max-width: 1152px;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .container {
+ max-width: 1344px;
+ }
+}
+
+.content li + li {
+ margin-top: 0.25em;
+}
+
+.content p:not(:last-child),
+.content dl:not(:last-child),
+.content ol:not(:last-child),
+.content ul:not(:last-child),
+.content blockquote:not(:last-child),
+.content pre:not(:last-child),
+.content table:not(:last-child) {
+ margin-bottom: 1em;
+}
+
+.content h1,
+.content h2,
+.content h3,
+.content h4,
+.content h5,
+.content h6 {
+ color: #363636;
+ font-weight: 600;
+ line-height: 1.125;
+}
+
+.content h1 {
+ font-size: 2em;
+ margin-bottom: 0.5em;
+}
+
+.content h1:not(:first-child) {
+ margin-top: 1em;
+}
+
+.content h2 {
+ font-size: 1.75em;
+ margin-bottom: 0.5714em;
+}
+
+.content h2:not(:first-child) {
+ margin-top: 1.1428em;
+}
+
+.content h3 {
+ font-size: 1.5em;
+ margin-bottom: 0.6666em;
+}
+
+.content h3:not(:first-child) {
+ margin-top: 1.3333em;
+}
+
+.content h4 {
+ font-size: 1.25em;
+ margin-bottom: 0.8em;
+}
+
+.content h5 {
+ font-size: 1.125em;
+ margin-bottom: 0.8888em;
+}
+
+.content h6 {
+ font-size: 1em;
+ margin-bottom: 1em;
+}
+
+.content blockquote {
+ background-color: whitesmoke;
+ border-left: 5px solid #dbdbdb;
+ padding: 1.25em 1.5em;
+}
+
+.content ol {
+ list-style-position: outside;
+ margin-left: 2em;
+ margin-top: 1em;
+}
+
+.content ol:not([type]) {
+ list-style-type: decimal;
+}
+
+.content ol:not([type]).is-lower-alpha {
+ list-style-type: lower-alpha;
+}
+
+.content ol:not([type]).is-lower-roman {
+ list-style-type: lower-roman;
+}
+
+.content ol:not([type]).is-upper-alpha {
+ list-style-type: upper-alpha;
+}
+
+.content ol:not([type]).is-upper-roman {
+ list-style-type: upper-roman;
+}
+
+.content ul {
+ list-style: disc outside;
+ margin-left: 2em;
+ margin-top: 1em;
+}
+
+.content ul ul {
+ list-style-type: circle;
+ margin-top: 0.5em;
+}
+
+.content ul ul ul {
+ list-style-type: square;
+}
+
+.content dd {
+ margin-left: 2em;
+}
+
+.content figure {
+ margin-left: 2em;
+ margin-right: 2em;
+ text-align: center;
+}
+
+.content figure:not(:first-child) {
+ margin-top: 2em;
+}
+
+.content figure:not(:last-child) {
+ margin-bottom: 2em;
+}
+
+.content figure img {
+ display: inline-block;
+}
+
+.content figure figcaption {
+ font-style: italic;
+}
+
+.content pre {
+ -webkit-overflow-scrolling: touch;
+ overflow-x: auto;
+ padding: 1.25em 1.5em;
+ white-space: pre;
+ word-wrap: normal;
+}
+
+.content sup,
+.content sub {
+ font-size: 75%;
+}
+
+.content table {
+ width: 100%;
+}
+
+.content table td,
+.content table th {
+ border: 1px solid #dbdbdb;
+ border-width: 0 0 1px;
+ padding: 0.5em 0.75em;
+ vertical-align: top;
+}
+
+.content table th {
+ color: #363636;
+}
+
+.content table th:not([align]) {
+ text-align: left;
+}
+
+.content table thead td,
+.content table thead th {
+ border-width: 0 0 2px;
+ color: #363636;
+}
+
+.content table tfoot td,
+.content table tfoot th {
+ border-width: 2px 0 0;
+ color: #363636;
+}
+
+.content table tbody tr:last-child td,
+.content table tbody tr:last-child th {
+ border-bottom-width: 0;
+}
+
+.content .tabs li + li {
+ margin-top: 0;
+}
+
+.content.is-small {
+ font-size: 0.75rem;
+}
+
+.content.is-medium {
+ font-size: 1.25rem;
+}
+
+.content.is-large {
+ font-size: 1.5rem;
+}
+
+.icon {
+ align-items: center;
+ display: inline-flex;
+ justify-content: center;
+ height: 1.5rem;
+ width: 1.5rem;
+}
+
+.icon.is-small {
+ height: 1rem;
+ width: 1rem;
+}
+
+.icon.is-medium {
+ height: 2rem;
+ width: 2rem;
+}
+
+.icon.is-large {
+ height: 3rem;
+ width: 3rem;
+}
+
+.image {
+ display: block;
+ position: relative;
+}
+
+.image img {
+ display: block;
+ height: auto;
+ width: 100%;
+}
+
+.image img.is-rounded {
+ border-radius: 290486px;
+}
+
+.image.is-square img,
+.image.is-square .has-ratio, .image.is-1by1 img,
+.image.is-1by1 .has-ratio, .image.is-5by4 img,
+.image.is-5by4 .has-ratio, .image.is-4by3 img,
+.image.is-4by3 .has-ratio, .image.is-3by2 img,
+.image.is-3by2 .has-ratio, .image.is-5by3 img,
+.image.is-5by3 .has-ratio, .image.is-16by9 img,
+.image.is-16by9 .has-ratio, .image.is-2by1 img,
+.image.is-2by1 .has-ratio, .image.is-3by1 img,
+.image.is-3by1 .has-ratio, .image.is-4by5 img,
+.image.is-4by5 .has-ratio, .image.is-3by4 img,
+.image.is-3by4 .has-ratio, .image.is-2by3 img,
+.image.is-2by3 .has-ratio, .image.is-3by5 img,
+.image.is-3by5 .has-ratio, .image.is-9by16 img,
+.image.is-9by16 .has-ratio, .image.is-1by2 img,
+.image.is-1by2 .has-ratio, .image.is-1by3 img,
+.image.is-1by3 .has-ratio {
+ height: 100%;
+ width: 100%;
+}
+
+.image.is-square, .image.is-1by1 {
+ padding-top: 100%;
+}
+
+.image.is-5by4 {
+ padding-top: 80%;
+}
+
+.image.is-4by3 {
+ padding-top: 75%;
+}
+
+.image.is-3by2 {
+ padding-top: 66.6666%;
+}
+
+.image.is-5by3 {
+ padding-top: 60%;
+}
+
+.image.is-16by9 {
+ padding-top: 56.25%;
+}
+
+.image.is-2by1 {
+ padding-top: 50%;
+}
+
+.image.is-3by1 {
+ padding-top: 33.3333%;
+}
+
+.image.is-4by5 {
+ padding-top: 125%;
+}
+
+.image.is-3by4 {
+ padding-top: 133.3333%;
+}
+
+.image.is-2by3 {
+ padding-top: 150%;
+}
+
+.image.is-3by5 {
+ padding-top: 166.6666%;
+}
+
+.image.is-9by16 {
+ padding-top: 177.7777%;
+}
+
+.image.is-1by2 {
+ padding-top: 200%;
+}
+
+.image.is-1by3 {
+ padding-top: 300%;
+}
+
+.image.is-16x16 {
+ height: 16px;
+ width: 16px;
+}
+
+.image.is-24x24 {
+ height: 24px;
+ width: 24px;
+}
+
+.image.is-32x32 {
+ height: 32px;
+ width: 32px;
+}
+
+.image.is-48x48 {
+ height: 48px;
+ width: 48px;
+}
+
+.image.is-64x64 {
+ height: 64px;
+ width: 64px;
+}
+
+.image.is-96x96 {
+ height: 96px;
+ width: 96px;
+}
+
+.image.is-128x128 {
+ height: 128px;
+ width: 128px;
+}
+
+.notification {
+ background-color: whitesmoke;
+ border-radius: 4px;
+ padding: 1.25rem 2.5rem 1.25rem 1.5rem;
+ position: relative;
+}
+
+.notification a:not(.button):not(.dropdown-item) {
+ color: currentColor;
+ text-decoration: underline;
+}
+
+.notification strong {
+ color: currentColor;
+}
+
+.notification code,
+.notification pre {
+ background: white;
+}
+
+.notification pre code {
+ background: transparent;
+}
+
+.notification > .delete {
+ position: absolute;
+ right: 0.5rem;
+ top: 0.5rem;
+}
+
+.notification .title,
+.notification .subtitle,
+.notification .content {
+ color: currentColor;
+}
+
+.notification.is-white {
+ background-color: white;
+ color: #0a0a0a;
+}
+
+.notification.is-black {
+ background-color: #0a0a0a;
+ color: white;
+}
+
+.notification.is-light {
+ background-color: whitesmoke;
+ color: #363636;
+}
+
+.notification.is-dark {
+ background-color: #363636;
+ color: whitesmoke;
+}
+
+.notification.is-primary {
+ background-color: #00d1b2;
+ color: #fff;
+}
+
+.notification.is-link {
+ background-color: #3273dc;
+ color: #fff;
+}
+
+.notification.is-info {
+ background-color: #209cee;
+ color: #fff;
+}
+
+.notification.is-success {
+ background-color: #23d160;
+ color: #fff;
+}
+
+.notification.is-warning {
+ background-color: #ffdd57;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.notification.is-danger {
+ background-color: #ff3860;
+ color: #fff;
+}
+
+.progress {
+ -moz-appearance: none;
+ -webkit-appearance: none;
+ border: none;
+ border-radius: 290486px;
+ display: block;
+ height: 1rem;
+ overflow: hidden;
+ padding: 0;
+ width: 100%;
+}
+
+.progress::-webkit-progress-bar {
+ background-color: #dbdbdb;
+}
+
+.progress::-webkit-progress-value {
+ background-color: #4a4a4a;
+}
+
+.progress::-moz-progress-bar {
+ background-color: #4a4a4a;
+}
+
+.progress::-ms-fill {
+ background-color: #4a4a4a;
+ border: none;
+}
+
+.progress.is-white::-webkit-progress-value {
+ background-color: white;
+}
+
+.progress.is-white::-moz-progress-bar {
+ background-color: white;
+}
+
+.progress.is-white::-ms-fill {
+ background-color: white;
+}
+
+.progress.is-white:indeterminate {
+ background-image: linear-gradient(to right, white 30%, #dbdbdb 30%);
+}
+
+.progress.is-black::-webkit-progress-value {
+ background-color: #0a0a0a;
+}
+
+.progress.is-black::-moz-progress-bar {
+ background-color: #0a0a0a;
+}
+
+.progress.is-black::-ms-fill {
+ background-color: #0a0a0a;
+}
+
+.progress.is-black:indeterminate {
+ background-image: linear-gradient(to right, #0a0a0a 30%, #dbdbdb 30%);
+}
+
+.progress.is-light::-webkit-progress-value {
+ background-color: whitesmoke;
+}
+
+.progress.is-light::-moz-progress-bar {
+ background-color: whitesmoke;
+}
+
+.progress.is-light::-ms-fill {
+ background-color: whitesmoke;
+}
+
+.progress.is-light:indeterminate {
+ background-image: linear-gradient(to right, whitesmoke 30%, #dbdbdb 30%);
+}
+
+.progress.is-dark::-webkit-progress-value {
+ background-color: #363636;
+}
+
+.progress.is-dark::-moz-progress-bar {
+ background-color: #363636;
+}
+
+.progress.is-dark::-ms-fill {
+ background-color: #363636;
+}
+
+.progress.is-dark:indeterminate {
+ background-image: linear-gradient(to right, #363636 30%, #dbdbdb 30%);
+}
+
+.progress.is-primary::-webkit-progress-value {
+ background-color: #00d1b2;
+}
+
+.progress.is-primary::-moz-progress-bar {
+ background-color: #00d1b2;
+}
+
+.progress.is-primary::-ms-fill {
+ background-color: #00d1b2;
+}
+
+.progress.is-primary:indeterminate {
+ background-image: linear-gradient(to right, #00d1b2 30%, #dbdbdb 30%);
+}
+
+.progress.is-link::-webkit-progress-value {
+ background-color: #3273dc;
+}
+
+.progress.is-link::-moz-progress-bar {
+ background-color: #3273dc;
+}
+
+.progress.is-link::-ms-fill {
+ background-color: #3273dc;
+}
+
+.progress.is-link:indeterminate {
+ background-image: linear-gradient(to right, #3273dc 30%, #dbdbdb 30%);
+}
+
+.progress.is-info::-webkit-progress-value {
+ background-color: #209cee;
+}
+
+.progress.is-info::-moz-progress-bar {
+ background-color: #209cee;
+}
+
+.progress.is-info::-ms-fill {
+ background-color: #209cee;
+}
+
+.progress.is-info:indeterminate {
+ background-image: linear-gradient(to right, #209cee 30%, #dbdbdb 30%);
+}
+
+.progress.is-success::-webkit-progress-value {
+ background-color: #23d160;
+}
+
+.progress.is-success::-moz-progress-bar {
+ background-color: #23d160;
+}
+
+.progress.is-success::-ms-fill {
+ background-color: #23d160;
+}
+
+.progress.is-success:indeterminate {
+ background-image: linear-gradient(to right, #23d160 30%, #dbdbdb 30%);
+}
+
+.progress.is-warning::-webkit-progress-value {
+ background-color: #ffdd57;
+}
+
+.progress.is-warning::-moz-progress-bar {
+ background-color: #ffdd57;
+}
+
+.progress.is-warning::-ms-fill {
+ background-color: #ffdd57;
+}
+
+.progress.is-warning:indeterminate {
+ background-image: linear-gradient(to right, #ffdd57 30%, #dbdbdb 30%);
+}
+
+.progress.is-danger::-webkit-progress-value {
+ background-color: #ff3860;
+}
+
+.progress.is-danger::-moz-progress-bar {
+ background-color: #ff3860;
+}
+
+.progress.is-danger::-ms-fill {
+ background-color: #ff3860;
+}
+
+.progress.is-danger:indeterminate {
+ background-image: linear-gradient(to right, #ff3860 30%, #dbdbdb 30%);
+}
+
+.progress:indeterminate {
+ -webkit-animation-duration: 1.5s;
+ animation-duration: 1.5s;
+ -webkit-animation-iteration-count: infinite;
+ animation-iteration-count: infinite;
+ -webkit-animation-name: moveIndeterminate;
+ animation-name: moveIndeterminate;
+ -webkit-animation-timing-function: linear;
+ animation-timing-function: linear;
+ background-color: #dbdbdb;
+ background-image: linear-gradient(to right, #4a4a4a 30%, #dbdbdb 30%);
+ background-position: top left;
+ background-repeat: no-repeat;
+ background-size: 150% 150%;
+}
+
+.progress:indeterminate::-webkit-progress-bar {
+ background-color: transparent;
+}
+
+.progress:indeterminate::-moz-progress-bar {
+ background-color: transparent;
+}
+
+.progress.is-small {
+ height: 0.75rem;
+}
+
+.progress.is-medium {
+ height: 1.25rem;
+}
+
+.progress.is-large {
+ height: 1.5rem;
+}
+
+@-webkit-keyframes moveIndeterminate {
+ from {
+ background-position: 200% 0;
+ }
+ to {
+ background-position: -200% 0;
+ }
+}
+
+@keyframes moveIndeterminate {
+ from {
+ background-position: 200% 0;
+ }
+ to {
+ background-position: -200% 0;
+ }
+}
+
+.table {
+ background-color: white;
+ color: #363636;
+}
+
+.table td,
+.table th {
+ border: 1px solid #dbdbdb;
+ border-width: 0 0 1px;
+ padding: 0.5em 0.75em;
+ vertical-align: top;
+}
+
+.table td.is-white,
+.table th.is-white {
+ background-color: white;
+ border-color: white;
+ color: #0a0a0a;
+}
+
+.table td.is-black,
+.table th.is-black {
+ background-color: #0a0a0a;
+ border-color: #0a0a0a;
+ color: white;
+}
+
+.table td.is-light,
+.table th.is-light {
+ background-color: whitesmoke;
+ border-color: whitesmoke;
+ color: #363636;
+}
+
+.table td.is-dark,
+.table th.is-dark {
+ background-color: #363636;
+ border-color: #363636;
+ color: whitesmoke;
+}
+
+.table td.is-primary,
+.table th.is-primary {
+ background-color: #00d1b2;
+ border-color: #00d1b2;
+ color: #fff;
+}
+
+.table td.is-link,
+.table th.is-link {
+ background-color: #3273dc;
+ border-color: #3273dc;
+ color: #fff;
+}
+
+.table td.is-info,
+.table th.is-info {
+ background-color: #209cee;
+ border-color: #209cee;
+ color: #fff;
+}
+
+.table td.is-success,
+.table th.is-success {
+ background-color: #23d160;
+ border-color: #23d160;
+ color: #fff;
+}
+
+.table td.is-warning,
+.table th.is-warning {
+ background-color: #ffdd57;
+ border-color: #ffdd57;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.table td.is-danger,
+.table th.is-danger {
+ background-color: #ff3860;
+ border-color: #ff3860;
+ color: #fff;
+}
+
+.table td.is-narrow,
+.table th.is-narrow {
+ white-space: nowrap;
+ width: 1%;
+}
+
+.table td.is-selected,
+.table th.is-selected {
+ background-color: #00d1b2;
+ color: #fff;
+}
+
+.table td.is-selected a,
+.table td.is-selected strong,
+.table th.is-selected a,
+.table th.is-selected strong {
+ color: currentColor;
+}
+
+.table th {
+ color: #363636;
+}
+
+.table th:not([align]) {
+ text-align: left;
+}
+
+.table tr.is-selected {
+ background-color: #00d1b2;
+ color: #fff;
+}
+
+.table tr.is-selected a,
+.table tr.is-selected strong {
+ color: currentColor;
+}
+
+.table tr.is-selected td,
+.table tr.is-selected th {
+ border-color: #fff;
+ color: currentColor;
+}
+
+.table thead {
+ background-color: transparent;
+}
+
+.table thead td,
+.table thead th {
+ border-width: 0 0 2px;
+ color: #363636;
+}
+
+.table tfoot {
+ background-color: transparent;
+}
+
+.table tfoot td,
+.table tfoot th {
+ border-width: 2px 0 0;
+ color: #363636;
+}
+
+.table tbody {
+ background-color: transparent;
+}
+
+.table tbody tr:last-child td,
+.table tbody tr:last-child th {
+ border-bottom-width: 0;
+}
+
+.table.is-bordered td,
+.table.is-bordered th {
+ border-width: 1px;
+}
+
+.table.is-bordered tr:last-child td,
+.table.is-bordered tr:last-child th {
+ border-bottom-width: 1px;
+}
+
+.table.is-fullwidth {
+ width: 100%;
+}
+
+.table.is-hoverable tbody tr:not(.is-selected):hover {
+ background-color: #fafafa;
+}
+
+.table.is-hoverable.is-striped tbody tr:not(.is-selected):hover {
+ background-color: #fafafa;
+}
+
+.table.is-hoverable.is-striped tbody tr:not(.is-selected):hover:nth-child(even) {
+ background-color: whitesmoke;
+}
+
+.table.is-narrow td,
+.table.is-narrow th {
+ padding: 0.25em 0.5em;
+}
+
+.table.is-striped tbody tr:not(.is-selected):nth-child(even) {
+ background-color: #fafafa;
+}
+
+.table-container {
+ -webkit-overflow-scrolling: touch;
+ overflow: auto;
+ overflow-y: hidden;
+ max-width: 100%;
+}
+
+.tags {
+ align-items: center;
+ display: flex;
+ flex-wrap: wrap;
+ justify-content: flex-start;
+}
+
+.tags .tag {
+ margin-bottom: 0.5rem;
+}
+
+.tags .tag:not(:last-child) {
+ margin-right: 0.5rem;
+}
+
+.tags:last-child {
+ margin-bottom: -0.5rem;
+}
+
+.tags:not(:last-child) {
+ margin-bottom: 1rem;
+}
+
+.tags.are-medium .tag:not(.is-normal):not(.is-large) {
+ font-size: 1rem;
+}
+
+.tags.are-large .tag:not(.is-normal):not(.is-medium) {
+ font-size: 1.25rem;
+}
+
+.tags.is-centered {
+ justify-content: center;
+}
+
+.tags.is-centered .tag {
+ margin-right: 0.25rem;
+ margin-left: 0.25rem;
+}
+
+.tags.is-right {
+ justify-content: flex-end;
+}
+
+.tags.is-right .tag:not(:first-child) {
+ margin-left: 0.5rem;
+}
+
+.tags.is-right .tag:not(:last-child) {
+ margin-right: 0;
+}
+
+.tags.has-addons .tag {
+ margin-right: 0;
+}
+
+.tags.has-addons .tag:not(:first-child) {
+ margin-left: 0;
+ border-bottom-left-radius: 0;
+ border-top-left-radius: 0;
+}
+
+.tags.has-addons .tag:not(:last-child) {
+ border-bottom-right-radius: 0;
+ border-top-right-radius: 0;
+}
+
+.tag:not(body) {
+ align-items: center;
+ background-color: whitesmoke;
+ border-radius: 4px;
+ color: #4a4a4a;
+ display: inline-flex;
+ font-size: 0.75rem;
+ height: 2em;
+ justify-content: center;
+ line-height: 1.5;
+ padding-left: 0.75em;
+ padding-right: 0.75em;
+ white-space: nowrap;
+}
+
+.tag:not(body) .delete {
+ margin-left: 0.25rem;
+ margin-right: -0.375rem;
+}
+
+.tag:not(body).is-white {
+ background-color: white;
+ color: #0a0a0a;
+}
+
+.tag:not(body).is-black {
+ background-color: #0a0a0a;
+ color: white;
+}
+
+.tag:not(body).is-light {
+ background-color: whitesmoke;
+ color: #363636;
+}
+
+.tag:not(body).is-dark {
+ background-color: #363636;
+ color: whitesmoke;
+}
+
+.tag:not(body).is-primary {
+ background-color: #00d1b2;
+ color: #fff;
+}
+
+.tag:not(body).is-link {
+ background-color: #3273dc;
+ color: #fff;
+}
+
+.tag:not(body).is-info {
+ background-color: #209cee;
+ color: #fff;
+}
+
+.tag:not(body).is-success {
+ background-color: #23d160;
+ color: #fff;
+}
+
+.tag:not(body).is-warning {
+ background-color: #ffdd57;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.tag:not(body).is-danger {
+ background-color: #ff3860;
+ color: #fff;
+}
+
+.tag:not(body).is-normal {
+ font-size: 0.75rem;
+}
+
+.tag:not(body).is-medium {
+ font-size: 1rem;
+}
+
+.tag:not(body).is-large {
+ font-size: 1.25rem;
+}
+
+.tag:not(body) .icon:first-child:not(:last-child) {
+ margin-left: -0.375em;
+ margin-right: 0.1875em;
+}
+
+.tag:not(body) .icon:last-child:not(:first-child) {
+ margin-left: 0.1875em;
+ margin-right: -0.375em;
+}
+
+.tag:not(body) .icon:first-child:last-child {
+ margin-left: -0.375em;
+ margin-right: -0.375em;
+}
+
+.tag:not(body).is-delete {
+ margin-left: 1px;
+ padding: 0;
+ position: relative;
+ width: 2em;
+}
+
+.tag:not(body).is-delete::before, .tag:not(body).is-delete::after {
+ background-color: currentColor;
+ content: "";
+ display: block;
+ left: 50%;
+ position: absolute;
+ top: 50%;
+ -webkit-transform: translateX(-50%) translateY(-50%) rotate(45deg);
+ transform: translateX(-50%) translateY(-50%) rotate(45deg);
+ -webkit-transform-origin: center center;
+ transform-origin: center center;
+}
+
+.tag:not(body).is-delete::before {
+ height: 1px;
+ width: 50%;
+}
+
+.tag:not(body).is-delete::after {
+ height: 50%;
+ width: 1px;
+}
+
+.tag:not(body).is-delete:hover, .tag:not(body).is-delete:focus {
+ background-color: #e8e8e8;
+}
+
+.tag:not(body).is-delete:active {
+ background-color: #dbdbdb;
+}
+
+.tag:not(body).is-rounded {
+ border-radius: 290486px;
+}
+
+a.tag:hover {
+ text-decoration: underline;
+}
+
+.title,
+.subtitle {
+ word-break: break-word;
+}
+
+.title em,
+.title span,
+.subtitle em,
+.subtitle span {
+ font-weight: inherit;
+}
+
+.title sub,
+.subtitle sub {
+ font-size: 0.75em;
+}
+
+.title sup,
+.subtitle sup {
+ font-size: 0.75em;
+}
+
+.title .tag,
+.subtitle .tag {
+ vertical-align: middle;
+}
+
+.title {
+ color: #363636;
+ font-size: 2rem;
+ font-weight: 600;
+ line-height: 1.125;
+}
+
+.title strong {
+ color: inherit;
+ font-weight: inherit;
+}
+
+.title + .highlight {
+ margin-top: -0.75rem;
+}
+
+.title:not(.is-spaced) + .subtitle {
+ margin-top: -1.25rem;
+}
+
+.title.is-1 {
+ font-size: 3rem;
+}
+
+.title.is-2 {
+ font-size: 2.5rem;
+}
+
+.title.is-3 {
+ font-size: 2rem;
+}
+
+.title.is-4 {
+ font-size: 1.5rem;
+}
+
+.title.is-5 {
+ font-size: 1.25rem;
+}
+
+.title.is-6 {
+ font-size: 1rem;
+}
+
+.title.is-7 {
+ font-size: 0.75rem;
+}
+
+.subtitle {
+ color: #4a4a4a;
+ font-size: 1.25rem;
+ font-weight: 400;
+ line-height: 1.25;
+}
+
+.subtitle strong {
+ color: #363636;
+ font-weight: 600;
+}
+
+.subtitle:not(.is-spaced) + .title {
+ margin-top: -1.25rem;
+}
+
+.subtitle.is-1 {
+ font-size: 3rem;
+}
+
+.subtitle.is-2 {
+ font-size: 2.5rem;
+}
+
+.subtitle.is-3 {
+ font-size: 2rem;
+}
+
+.subtitle.is-4 {
+ font-size: 1.5rem;
+}
+
+.subtitle.is-5 {
+ font-size: 1.25rem;
+}
+
+.subtitle.is-6 {
+ font-size: 1rem;
+}
+
+.subtitle.is-7 {
+ font-size: 0.75rem;
+}
+
+.heading {
+ display: block;
+ font-size: 11px;
+ letter-spacing: 1px;
+ margin-bottom: 5px;
+ text-transform: uppercase;
+}
+
+.highlight {
+ font-weight: 400;
+ max-width: 100%;
+ overflow: hidden;
+ padding: 0;
+}
+
+.highlight pre {
+ overflow: auto;
+ max-width: 100%;
+}
+
+.number {
+ align-items: center;
+ background-color: whitesmoke;
+ border-radius: 290486px;
+ display: inline-flex;
+ font-size: 1.25rem;
+ height: 2em;
+ justify-content: center;
+ margin-right: 1.5rem;
+ min-width: 2.5em;
+ padding: 0.25rem 0.5rem;
+ text-align: center;
+ vertical-align: top;
+}
+
+.input, .textarea, .select select {
+ background-color: white;
+ border-color: #dbdbdb;
+ border-radius: 4px;
+ color: #363636;
+}
+
+.input::-moz-placeholder, .textarea::-moz-placeholder, .select select::-moz-placeholder {
+ color: rgba(54, 54, 54, 0.3);
+}
+
+.input::-webkit-input-placeholder, .textarea::-webkit-input-placeholder, .select select::-webkit-input-placeholder {
+ color: rgba(54, 54, 54, 0.3);
+}
+
+.input:-moz-placeholder, .textarea:-moz-placeholder, .select select:-moz-placeholder {
+ color: rgba(54, 54, 54, 0.3);
+}
+
+.input:-ms-input-placeholder, .textarea:-ms-input-placeholder, .select select:-ms-input-placeholder {
+ color: rgba(54, 54, 54, 0.3);
+}
+
+.input:hover, .textarea:hover, .select select:hover, .is-hovered.input, .is-hovered.textarea, .select select.is-hovered {
+ border-color: #b5b5b5;
+}
+
+.input:focus, .textarea:focus, .select select:focus, .is-focused.input, .is-focused.textarea, .select select.is-focused, .input:active, .textarea:active, .select select:active, .is-active.input, .is-active.textarea, .select select.is-active {
+ border-color: #3273dc;
+ box-shadow: 0 0 0 0.125em rgba(50, 115, 220, 0.25);
+}
+
+.input[disabled], .textarea[disabled], .select select[disabled],
+fieldset[disabled] .input,
+fieldset[disabled] .textarea,
+fieldset[disabled] .select select,
+.select fieldset[disabled] select {
+ background-color: whitesmoke;
+ border-color: whitesmoke;
+ box-shadow: none;
+ color: #7a7a7a;
+}
+
+.input[disabled]::-moz-placeholder, .textarea[disabled]::-moz-placeholder, .select select[disabled]::-moz-placeholder,
+fieldset[disabled] .input::-moz-placeholder,
+fieldset[disabled] .textarea::-moz-placeholder,
+fieldset[disabled] .select select::-moz-placeholder,
+.select fieldset[disabled] select::-moz-placeholder {
+ color: rgba(122, 122, 122, 0.3);
+}
+
+.input[disabled]::-webkit-input-placeholder, .textarea[disabled]::-webkit-input-placeholder, .select select[disabled]::-webkit-input-placeholder,
+fieldset[disabled] .input::-webkit-input-placeholder,
+fieldset[disabled] .textarea::-webkit-input-placeholder,
+fieldset[disabled] .select select::-webkit-input-placeholder,
+.select fieldset[disabled] select::-webkit-input-placeholder {
+ color: rgba(122, 122, 122, 0.3);
+}
+
+.input[disabled]:-moz-placeholder, .textarea[disabled]:-moz-placeholder, .select select[disabled]:-moz-placeholder,
+fieldset[disabled] .input:-moz-placeholder,
+fieldset[disabled] .textarea:-moz-placeholder,
+fieldset[disabled] .select select:-moz-placeholder,
+.select fieldset[disabled] select:-moz-placeholder {
+ color: rgba(122, 122, 122, 0.3);
+}
+
+.input[disabled]:-ms-input-placeholder, .textarea[disabled]:-ms-input-placeholder, .select select[disabled]:-ms-input-placeholder,
+fieldset[disabled] .input:-ms-input-placeholder,
+fieldset[disabled] .textarea:-ms-input-placeholder,
+fieldset[disabled] .select select:-ms-input-placeholder,
+.select fieldset[disabled] select:-ms-input-placeholder {
+ color: rgba(122, 122, 122, 0.3);
+}
+
+.input, .textarea {
+ box-shadow: inset 0 1px 2px rgba(10, 10, 10, 0.1);
+ max-width: 100%;
+ width: 100%;
+}
+
+.input[readonly], .textarea[readonly] {
+ box-shadow: none;
+}
+
+.is-white.input, .is-white.textarea {
+ border-color: white;
+}
+
+.is-white.input:focus, .is-white.textarea:focus, .is-white.is-focused.input, .is-white.is-focused.textarea, .is-white.input:active, .is-white.textarea:active, .is-white.is-active.input, .is-white.is-active.textarea {
+ box-shadow: 0 0 0 0.125em rgba(255, 255, 255, 0.25);
+}
+
+.is-black.input, .is-black.textarea {
+ border-color: #0a0a0a;
+}
+
+.is-black.input:focus, .is-black.textarea:focus, .is-black.is-focused.input, .is-black.is-focused.textarea, .is-black.input:active, .is-black.textarea:active, .is-black.is-active.input, .is-black.is-active.textarea {
+ box-shadow: 0 0 0 0.125em rgba(10, 10, 10, 0.25);
+}
+
+.is-light.input, .is-light.textarea {
+ border-color: whitesmoke;
+}
+
+.is-light.input:focus, .is-light.textarea:focus, .is-light.is-focused.input, .is-light.is-focused.textarea, .is-light.input:active, .is-light.textarea:active, .is-light.is-active.input, .is-light.is-active.textarea {
+ box-shadow: 0 0 0 0.125em rgba(245, 245, 245, 0.25);
+}
+
+.is-dark.input, .is-dark.textarea {
+ border-color: #363636;
+}
+
+.is-dark.input:focus, .is-dark.textarea:focus, .is-dark.is-focused.input, .is-dark.is-focused.textarea, .is-dark.input:active, .is-dark.textarea:active, .is-dark.is-active.input, .is-dark.is-active.textarea {
+ box-shadow: 0 0 0 0.125em rgba(54, 54, 54, 0.25);
+}
+
+.is-primary.input, .is-primary.textarea {
+ border-color: #00d1b2;
+}
+
+.is-primary.input:focus, .is-primary.textarea:focus, .is-primary.is-focused.input, .is-primary.is-focused.textarea, .is-primary.input:active, .is-primary.textarea:active, .is-primary.is-active.input, .is-primary.is-active.textarea {
+ box-shadow: 0 0 0 0.125em rgba(0, 209, 178, 0.25);
+}
+
+.is-link.input, .is-link.textarea {
+ border-color: #3273dc;
+}
+
+.is-link.input:focus, .is-link.textarea:focus, .is-link.is-focused.input, .is-link.is-focused.textarea, .is-link.input:active, .is-link.textarea:active, .is-link.is-active.input, .is-link.is-active.textarea {
+ box-shadow: 0 0 0 0.125em rgba(50, 115, 220, 0.25);
+}
+
+.is-info.input, .is-info.textarea {
+ border-color: #209cee;
+}
+
+.is-info.input:focus, .is-info.textarea:focus, .is-info.is-focused.input, .is-info.is-focused.textarea, .is-info.input:active, .is-info.textarea:active, .is-info.is-active.input, .is-info.is-active.textarea {
+ box-shadow: 0 0 0 0.125em rgba(32, 156, 238, 0.25);
+}
+
+.is-success.input, .is-success.textarea {
+ border-color: #23d160;
+}
+
+.is-success.input:focus, .is-success.textarea:focus, .is-success.is-focused.input, .is-success.is-focused.textarea, .is-success.input:active, .is-success.textarea:active, .is-success.is-active.input, .is-success.is-active.textarea {
+ box-shadow: 0 0 0 0.125em rgba(35, 209, 96, 0.25);
+}
+
+.is-warning.input, .is-warning.textarea {
+ border-color: #ffdd57;
+}
+
+.is-warning.input:focus, .is-warning.textarea:focus, .is-warning.is-focused.input, .is-warning.is-focused.textarea, .is-warning.input:active, .is-warning.textarea:active, .is-warning.is-active.input, .is-warning.is-active.textarea {
+ box-shadow: 0 0 0 0.125em rgba(255, 221, 87, 0.25);
+}
+
+.is-danger.input, .is-danger.textarea {
+ border-color: #ff3860;
+}
+
+.is-danger.input:focus, .is-danger.textarea:focus, .is-danger.is-focused.input, .is-danger.is-focused.textarea, .is-danger.input:active, .is-danger.textarea:active, .is-danger.is-active.input, .is-danger.is-active.textarea {
+ box-shadow: 0 0 0 0.125em rgba(255, 56, 96, 0.25);
+}
+
+.is-small.input, .is-small.textarea {
+ border-radius: 2px;
+ font-size: 0.75rem;
+}
+
+.is-medium.input, .is-medium.textarea {
+ font-size: 1.25rem;
+}
+
+.is-large.input, .is-large.textarea {
+ font-size: 1.5rem;
+}
+
+.is-fullwidth.input, .is-fullwidth.textarea {
+ display: block;
+ width: 100%;
+}
+
+.is-inline.input, .is-inline.textarea {
+ display: inline;
+ width: auto;
+}
+
+.input.is-rounded {
+ border-radius: 290486px;
+ padding-left: 1em;
+ padding-right: 1em;
+}
+
+.input.is-static {
+ background-color: transparent;
+ border-color: transparent;
+ box-shadow: none;
+ padding-left: 0;
+ padding-right: 0;
+}
+
+.textarea {
+ display: block;
+ max-width: 100%;
+ min-width: 100%;
+ padding: 0.625em;
+ resize: vertical;
+}
+
+.textarea:not([rows]) {
+ max-height: 600px;
+ min-height: 120px;
+}
+
+.textarea[rows] {
+ height: initial;
+}
+
+.textarea.has-fixed-size {
+ resize: none;
+}
+
+.checkbox, .radio {
+ cursor: pointer;
+ display: inline-block;
+ line-height: 1.25;
+ position: relative;
+}
+
+.checkbox input, .radio input {
+ cursor: pointer;
+}
+
+.checkbox:hover, .radio:hover {
+ color: #363636;
+}
+
+.checkbox[disabled], .radio[disabled],
+fieldset[disabled] .checkbox,
+fieldset[disabled] .radio {
+ color: #7a7a7a;
+ cursor: not-allowed;
+}
+
+.radio + .radio {
+ margin-left: 0.5em;
+}
+
+.select {
+ display: inline-block;
+ max-width: 100%;
+ position: relative;
+ vertical-align: top;
+}
+
+.select:not(.is-multiple) {
+ height: 2.25em;
+}
+
+.select:not(.is-multiple):not(.is-loading)::after {
+ border-color: #3273dc;
+ right: 1.125em;
+ z-index: 4;
+}
+
+.select.is-rounded select {
+ border-radius: 290486px;
+ padding-left: 1em;
+}
+
+.select select {
+ cursor: pointer;
+ display: block;
+ font-size: 1em;
+ max-width: 100%;
+ outline: none;
+}
+
+.select select::-ms-expand {
+ display: none;
+}
+
+.select select[disabled]:hover,
+fieldset[disabled] .select select:hover {
+ border-color: whitesmoke;
+}
+
+.select select:not([multiple]) {
+ padding-right: 2.5em;
+}
+
+.select select[multiple] {
+ height: auto;
+ padding: 0;
+}
+
+.select select[multiple] option {
+ padding: 0.5em 1em;
+}
+
+.select:not(.is-multiple):not(.is-loading):hover::after {
+ border-color: #363636;
+}
+
+.select.is-white:not(:hover)::after {
+ border-color: white;
+}
+
+.select.is-white select {
+ border-color: white;
+}
+
+.select.is-white select:hover, .select.is-white select.is-hovered {
+ border-color: #f2f2f2;
+}
+
+.select.is-white select:focus, .select.is-white select.is-focused, .select.is-white select:active, .select.is-white select.is-active {
+ box-shadow: 0 0 0 0.125em rgba(255, 255, 255, 0.25);
+}
+
+.select.is-black:not(:hover)::after {
+ border-color: #0a0a0a;
+}
+
+.select.is-black select {
+ border-color: #0a0a0a;
+}
+
+.select.is-black select:hover, .select.is-black select.is-hovered {
+ border-color: black;
+}
+
+.select.is-black select:focus, .select.is-black select.is-focused, .select.is-black select:active, .select.is-black select.is-active {
+ box-shadow: 0 0 0 0.125em rgba(10, 10, 10, 0.25);
+}
+
+.select.is-light:not(:hover)::after {
+ border-color: whitesmoke;
+}
+
+.select.is-light select {
+ border-color: whitesmoke;
+}
+
+.select.is-light select:hover, .select.is-light select.is-hovered {
+ border-color: #e8e8e8;
+}
+
+.select.is-light select:focus, .select.is-light select.is-focused, .select.is-light select:active, .select.is-light select.is-active {
+ box-shadow: 0 0 0 0.125em rgba(245, 245, 245, 0.25);
+}
+
+.select.is-dark:not(:hover)::after {
+ border-color: #363636;
+}
+
+.select.is-dark select {
+ border-color: #363636;
+}
+
+.select.is-dark select:hover, .select.is-dark select.is-hovered {
+ border-color: #292929;
+}
+
+.select.is-dark select:focus, .select.is-dark select.is-focused, .select.is-dark select:active, .select.is-dark select.is-active {
+ box-shadow: 0 0 0 0.125em rgba(54, 54, 54, 0.25);
+}
+
+.select.is-primary:not(:hover)::after {
+ border-color: #00d1b2;
+}
+
+.select.is-primary select {
+ border-color: #00d1b2;
+}
+
+.select.is-primary select:hover, .select.is-primary select.is-hovered {
+ border-color: #00b89c;
+}
+
+.select.is-primary select:focus, .select.is-primary select.is-focused, .select.is-primary select:active, .select.is-primary select.is-active {
+ box-shadow: 0 0 0 0.125em rgba(0, 209, 178, 0.25);
+}
+
+.select.is-link:not(:hover)::after {
+ border-color: #3273dc;
+}
+
+.select.is-link select {
+ border-color: #3273dc;
+}
+
+.select.is-link select:hover, .select.is-link select.is-hovered {
+ border-color: #2366d1;
+}
+
+.select.is-link select:focus, .select.is-link select.is-focused, .select.is-link select:active, .select.is-link select.is-active {
+ box-shadow: 0 0 0 0.125em rgba(50, 115, 220, 0.25);
+}
+
+.select.is-info:not(:hover)::after {
+ border-color: #209cee;
+}
+
+.select.is-info select {
+ border-color: #209cee;
+}
+
+.select.is-info select:hover, .select.is-info select.is-hovered {
+ border-color: #118fe4;
+}
+
+.select.is-info select:focus, .select.is-info select.is-focused, .select.is-info select:active, .select.is-info select.is-active {
+ box-shadow: 0 0 0 0.125em rgba(32, 156, 238, 0.25);
+}
+
+.select.is-success:not(:hover)::after {
+ border-color: #23d160;
+}
+
+.select.is-success select {
+ border-color: #23d160;
+}
+
+.select.is-success select:hover, .select.is-success select.is-hovered {
+ border-color: #20bc56;
+}
+
+.select.is-success select:focus, .select.is-success select.is-focused, .select.is-success select:active, .select.is-success select.is-active {
+ box-shadow: 0 0 0 0.125em rgba(35, 209, 96, 0.25);
+}
+
+.select.is-warning:not(:hover)::after {
+ border-color: #ffdd57;
+}
+
+.select.is-warning select {
+ border-color: #ffdd57;
+}
+
+.select.is-warning select:hover, .select.is-warning select.is-hovered {
+ border-color: #ffd83d;
+}
+
+.select.is-warning select:focus, .select.is-warning select.is-focused, .select.is-warning select:active, .select.is-warning select.is-active {
+ box-shadow: 0 0 0 0.125em rgba(255, 221, 87, 0.25);
+}
+
+.select.is-danger:not(:hover)::after {
+ border-color: #ff3860;
+}
+
+.select.is-danger select {
+ border-color: #ff3860;
+}
+
+.select.is-danger select:hover, .select.is-danger select.is-hovered {
+ border-color: #ff1f4b;
+}
+
+.select.is-danger select:focus, .select.is-danger select.is-focused, .select.is-danger select:active, .select.is-danger select.is-active {
+ box-shadow: 0 0 0 0.125em rgba(255, 56, 96, 0.25);
+}
+
+.select.is-small {
+ border-radius: 2px;
+ font-size: 0.75rem;
+}
+
+.select.is-medium {
+ font-size: 1.25rem;
+}
+
+.select.is-large {
+ font-size: 1.5rem;
+}
+
+.select.is-disabled::after {
+ border-color: #7a7a7a;
+}
+
+.select.is-fullwidth {
+ width: 100%;
+}
+
+.select.is-fullwidth select {
+ width: 100%;
+}
+
+.select.is-loading::after {
+ margin-top: 0;
+ position: absolute;
+ right: 0.625em;
+ top: 0.625em;
+ -webkit-transform: none;
+ transform: none;
+}
+
+.select.is-loading.is-small:after {
+ font-size: 0.75rem;
+}
+
+.select.is-loading.is-medium:after {
+ font-size: 1.25rem;
+}
+
+.select.is-loading.is-large:after {
+ font-size: 1.5rem;
+}
+
+.file {
+ align-items: stretch;
+ display: flex;
+ justify-content: flex-start;
+ position: relative;
+}
+
+.file.is-white .file-cta {
+ background-color: white;
+ border-color: transparent;
+ color: #0a0a0a;
+}
+
+.file.is-white:hover .file-cta, .file.is-white.is-hovered .file-cta {
+ background-color: #f9f9f9;
+ border-color: transparent;
+ color: #0a0a0a;
+}
+
+.file.is-white:focus .file-cta, .file.is-white.is-focused .file-cta {
+ border-color: transparent;
+ box-shadow: 0 0 0.5em rgba(255, 255, 255, 0.25);
+ color: #0a0a0a;
+}
+
+.file.is-white:active .file-cta, .file.is-white.is-active .file-cta {
+ background-color: #f2f2f2;
+ border-color: transparent;
+ color: #0a0a0a;
+}
+
+.file.is-black .file-cta {
+ background-color: #0a0a0a;
+ border-color: transparent;
+ color: white;
+}
+
+.file.is-black:hover .file-cta, .file.is-black.is-hovered .file-cta {
+ background-color: #040404;
+ border-color: transparent;
+ color: white;
+}
+
+.file.is-black:focus .file-cta, .file.is-black.is-focused .file-cta {
+ border-color: transparent;
+ box-shadow: 0 0 0.5em rgba(10, 10, 10, 0.25);
+ color: white;
+}
+
+.file.is-black:active .file-cta, .file.is-black.is-active .file-cta {
+ background-color: black;
+ border-color: transparent;
+ color: white;
+}
+
+.file.is-light .file-cta {
+ background-color: whitesmoke;
+ border-color: transparent;
+ color: #363636;
+}
+
+.file.is-light:hover .file-cta, .file.is-light.is-hovered .file-cta {
+ background-color: #eeeeee;
+ border-color: transparent;
+ color: #363636;
+}
+
+.file.is-light:focus .file-cta, .file.is-light.is-focused .file-cta {
+ border-color: transparent;
+ box-shadow: 0 0 0.5em rgba(245, 245, 245, 0.25);
+ color: #363636;
+}
+
+.file.is-light:active .file-cta, .file.is-light.is-active .file-cta {
+ background-color: #e8e8e8;
+ border-color: transparent;
+ color: #363636;
+}
+
+.file.is-dark .file-cta {
+ background-color: #363636;
+ border-color: transparent;
+ color: whitesmoke;
+}
+
+.file.is-dark:hover .file-cta, .file.is-dark.is-hovered .file-cta {
+ background-color: #2f2f2f;
+ border-color: transparent;
+ color: whitesmoke;
+}
+
+.file.is-dark:focus .file-cta, .file.is-dark.is-focused .file-cta {
+ border-color: transparent;
+ box-shadow: 0 0 0.5em rgba(54, 54, 54, 0.25);
+ color: whitesmoke;
+}
+
+.file.is-dark:active .file-cta, .file.is-dark.is-active .file-cta {
+ background-color: #292929;
+ border-color: transparent;
+ color: whitesmoke;
+}
+
+.file.is-primary .file-cta {
+ background-color: #00d1b2;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-primary:hover .file-cta, .file.is-primary.is-hovered .file-cta {
+ background-color: #00c4a7;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-primary:focus .file-cta, .file.is-primary.is-focused .file-cta {
+ border-color: transparent;
+ box-shadow: 0 0 0.5em rgba(0, 209, 178, 0.25);
+ color: #fff;
+}
+
+.file.is-primary:active .file-cta, .file.is-primary.is-active .file-cta {
+ background-color: #00b89c;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-link .file-cta {
+ background-color: #3273dc;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-link:hover .file-cta, .file.is-link.is-hovered .file-cta {
+ background-color: #276cda;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-link:focus .file-cta, .file.is-link.is-focused .file-cta {
+ border-color: transparent;
+ box-shadow: 0 0 0.5em rgba(50, 115, 220, 0.25);
+ color: #fff;
+}
+
+.file.is-link:active .file-cta, .file.is-link.is-active .file-cta {
+ background-color: #2366d1;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-info .file-cta {
+ background-color: #209cee;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-info:hover .file-cta, .file.is-info.is-hovered .file-cta {
+ background-color: #1496ed;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-info:focus .file-cta, .file.is-info.is-focused .file-cta {
+ border-color: transparent;
+ box-shadow: 0 0 0.5em rgba(32, 156, 238, 0.25);
+ color: #fff;
+}
+
+.file.is-info:active .file-cta, .file.is-info.is-active .file-cta {
+ background-color: #118fe4;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-success .file-cta {
+ background-color: #23d160;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-success:hover .file-cta, .file.is-success.is-hovered .file-cta {
+ background-color: #22c65b;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-success:focus .file-cta, .file.is-success.is-focused .file-cta {
+ border-color: transparent;
+ box-shadow: 0 0 0.5em rgba(35, 209, 96, 0.25);
+ color: #fff;
+}
+
+.file.is-success:active .file-cta, .file.is-success.is-active .file-cta {
+ background-color: #20bc56;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-warning .file-cta {
+ background-color: #ffdd57;
+ border-color: transparent;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.file.is-warning:hover .file-cta, .file.is-warning.is-hovered .file-cta {
+ background-color: #ffdb4a;
+ border-color: transparent;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.file.is-warning:focus .file-cta, .file.is-warning.is-focused .file-cta {
+ border-color: transparent;
+ box-shadow: 0 0 0.5em rgba(255, 221, 87, 0.25);
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.file.is-warning:active .file-cta, .file.is-warning.is-active .file-cta {
+ background-color: #ffd83d;
+ border-color: transparent;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.file.is-danger .file-cta {
+ background-color: #ff3860;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-danger:hover .file-cta, .file.is-danger.is-hovered .file-cta {
+ background-color: #ff2b56;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-danger:focus .file-cta, .file.is-danger.is-focused .file-cta {
+ border-color: transparent;
+ box-shadow: 0 0 0.5em rgba(255, 56, 96, 0.25);
+ color: #fff;
+}
+
+.file.is-danger:active .file-cta, .file.is-danger.is-active .file-cta {
+ background-color: #ff1f4b;
+ border-color: transparent;
+ color: #fff;
+}
+
+.file.is-small {
+ font-size: 0.75rem;
+}
+
+.file.is-medium {
+ font-size: 1.25rem;
+}
+
+.file.is-medium .file-icon .fa {
+ font-size: 21px;
+}
+
+.file.is-large {
+ font-size: 1.5rem;
+}
+
+.file.is-large .file-icon .fa {
+ font-size: 28px;
+}
+
+.file.has-name .file-cta {
+ border-bottom-right-radius: 0;
+ border-top-right-radius: 0;
+}
+
+.file.has-name .file-name {
+ border-bottom-left-radius: 0;
+ border-top-left-radius: 0;
+}
+
+.file.has-name.is-empty .file-cta {
+ border-radius: 4px;
+}
+
+.file.has-name.is-empty .file-name {
+ display: none;
+}
+
+.file.is-boxed .file-label {
+ flex-direction: column;
+}
+
+.file.is-boxed .file-cta {
+ flex-direction: column;
+ height: auto;
+ padding: 1em 3em;
+}
+
+.file.is-boxed .file-name {
+ border-width: 0 1px 1px;
+}
+
+.file.is-boxed .file-icon {
+ height: 1.5em;
+ width: 1.5em;
+}
+
+.file.is-boxed .file-icon .fa {
+ font-size: 21px;
+}
+
+.file.is-boxed.is-small .file-icon .fa {
+ font-size: 14px;
+}
+
+.file.is-boxed.is-medium .file-icon .fa {
+ font-size: 28px;
+}
+
+.file.is-boxed.is-large .file-icon .fa {
+ font-size: 35px;
+}
+
+.file.is-boxed.has-name .file-cta {
+ border-radius: 4px 4px 0 0;
+}
+
+.file.is-boxed.has-name .file-name {
+ border-radius: 0 0 4px 4px;
+ border-width: 0 1px 1px;
+}
+
+.file.is-centered {
+ justify-content: center;
+}
+
+.file.is-fullwidth .file-label {
+ width: 100%;
+}
+
+.file.is-fullwidth .file-name {
+ flex-grow: 1;
+ max-width: none;
+}
+
+.file.is-right {
+ justify-content: flex-end;
+}
+
+.file.is-right .file-cta {
+ border-radius: 0 4px 4px 0;
+}
+
+.file.is-right .file-name {
+ border-radius: 4px 0 0 4px;
+ border-width: 1px 0 1px 1px;
+ order: -1;
+}
+
+.file-label {
+ align-items: stretch;
+ display: flex;
+ cursor: pointer;
+ justify-content: flex-start;
+ overflow: hidden;
+ position: relative;
+}
+
+.file-label:hover .file-cta {
+ background-color: #eeeeee;
+ color: #363636;
+}
+
+.file-label:hover .file-name {
+ border-color: #d5d5d5;
+}
+
+.file-label:active .file-cta {
+ background-color: #e8e8e8;
+ color: #363636;
+}
+
+.file-label:active .file-name {
+ border-color: #cfcfcf;
+}
+
+.file-input {
+ height: 100%;
+ left: 0;
+ opacity: 0;
+ outline: none;
+ position: absolute;
+ top: 0;
+ width: 100%;
+}
+
+.file-cta,
+.file-name {
+ border-color: #dbdbdb;
+ border-radius: 4px;
+ font-size: 1em;
+ padding-left: 1em;
+ padding-right: 1em;
+ white-space: nowrap;
+}
+
+.file-cta {
+ background-color: whitesmoke;
+ color: #4a4a4a;
+}
+
+.file-name {
+ border-color: #dbdbdb;
+ border-style: solid;
+ border-width: 1px 1px 1px 0;
+ display: block;
+ max-width: 16em;
+ overflow: hidden;
+ text-align: left;
+ text-overflow: ellipsis;
+}
+
+.file-icon {
+ align-items: center;
+ display: flex;
+ height: 1em;
+ justify-content: center;
+ margin-right: 0.5em;
+ width: 1em;
+}
+
+.file-icon .fa {
+ font-size: 14px;
+}
+
+.label {
+ color: #363636;
+ display: block;
+ font-size: 1rem;
+ font-weight: 700;
+}
+
+.label:not(:last-child) {
+ margin-bottom: 0.5em;
+}
+
+.label.is-small {
+ font-size: 0.75rem;
+}
+
+.label.is-medium {
+ font-size: 1.25rem;
+}
+
+.label.is-large {
+ font-size: 1.5rem;
+}
+
+.help {
+ display: block;
+ font-size: 0.75rem;
+ margin-top: 0.25rem;
+}
+
+.help.is-white {
+ color: white;
+}
+
+.help.is-black {
+ color: #0a0a0a;
+}
+
+.help.is-light {
+ color: whitesmoke;
+}
+
+.help.is-dark {
+ color: #363636;
+}
+
+.help.is-primary {
+ color: #00d1b2;
+}
+
+.help.is-link {
+ color: #3273dc;
+}
+
+.help.is-info {
+ color: #209cee;
+}
+
+.help.is-success {
+ color: #23d160;
+}
+
+.help.is-warning {
+ color: #ffdd57;
+}
+
+.help.is-danger {
+ color: #ff3860;
+}
+
+.field:not(:last-child) {
+ margin-bottom: 0.75rem;
+}
+
+.field.has-addons {
+ display: flex;
+ justify-content: flex-start;
+}
+
+.field.has-addons .control:not(:last-child) {
+ margin-right: -1px;
+}
+
+.field.has-addons .control:not(:first-child):not(:last-child) .button,
+.field.has-addons .control:not(:first-child):not(:last-child) .input,
+.field.has-addons .control:not(:first-child):not(:last-child) .select select {
+ border-radius: 0;
+}
+
+.field.has-addons .control:first-child:not(:only-child) .button,
+.field.has-addons .control:first-child:not(:only-child) .input,
+.field.has-addons .control:first-child:not(:only-child) .select select {
+ border-bottom-right-radius: 0;
+ border-top-right-radius: 0;
+}
+
+.field.has-addons .control:last-child:not(:only-child) .button,
+.field.has-addons .control:last-child:not(:only-child) .input,
+.field.has-addons .control:last-child:not(:only-child) .select select {
+ border-bottom-left-radius: 0;
+ border-top-left-radius: 0;
+}
+
+.field.has-addons .control .button:not([disabled]):hover, .field.has-addons .control .button:not([disabled]).is-hovered,
+.field.has-addons .control .input:not([disabled]):hover,
+.field.has-addons .control .input:not([disabled]).is-hovered,
+.field.has-addons .control .select select:not([disabled]):hover,
+.field.has-addons .control .select select:not([disabled]).is-hovered {
+ z-index: 2;
+}
+
+.field.has-addons .control .button:not([disabled]):focus, .field.has-addons .control .button:not([disabled]).is-focused, .field.has-addons .control .button:not([disabled]):active, .field.has-addons .control .button:not([disabled]).is-active,
+.field.has-addons .control .input:not([disabled]):focus,
+.field.has-addons .control .input:not([disabled]).is-focused,
+.field.has-addons .control .input:not([disabled]):active,
+.field.has-addons .control .input:not([disabled]).is-active,
+.field.has-addons .control .select select:not([disabled]):focus,
+.field.has-addons .control .select select:not([disabled]).is-focused,
+.field.has-addons .control .select select:not([disabled]):active,
+.field.has-addons .control .select select:not([disabled]).is-active {
+ z-index: 3;
+}
+
+.field.has-addons .control .button:not([disabled]):focus:hover, .field.has-addons .control .button:not([disabled]).is-focused:hover, .field.has-addons .control .button:not([disabled]):active:hover, .field.has-addons .control .button:not([disabled]).is-active:hover,
+.field.has-addons .control .input:not([disabled]):focus:hover,
+.field.has-addons .control .input:not([disabled]).is-focused:hover,
+.field.has-addons .control .input:not([disabled]):active:hover,
+.field.has-addons .control .input:not([disabled]).is-active:hover,
+.field.has-addons .control .select select:not([disabled]):focus:hover,
+.field.has-addons .control .select select:not([disabled]).is-focused:hover,
+.field.has-addons .control .select select:not([disabled]):active:hover,
+.field.has-addons .control .select select:not([disabled]).is-active:hover {
+ z-index: 4;
+}
+
+.field.has-addons .control.is-expanded {
+ flex-grow: 1;
+ flex-shrink: 1;
+}
+
+.field.has-addons.has-addons-centered {
+ justify-content: center;
+}
+
+.field.has-addons.has-addons-right {
+ justify-content: flex-end;
+}
+
+.field.has-addons.has-addons-fullwidth .control {
+ flex-grow: 1;
+ flex-shrink: 0;
+}
+
+.field.is-grouped {
+ display: flex;
+ justify-content: flex-start;
+}
+
+.field.is-grouped > .control {
+ flex-shrink: 0;
+}
+
+.field.is-grouped > .control:not(:last-child) {
+ margin-bottom: 0;
+ margin-right: 0.75rem;
+}
+
+.field.is-grouped > .control.is-expanded {
+ flex-grow: 1;
+ flex-shrink: 1;
+}
+
+.field.is-grouped.is-grouped-centered {
+ justify-content: center;
+}
+
+.field.is-grouped.is-grouped-right {
+ justify-content: flex-end;
+}
+
+.field.is-grouped.is-grouped-multiline {
+ flex-wrap: wrap;
+}
+
+.field.is-grouped.is-grouped-multiline > .control:last-child, .field.is-grouped.is-grouped-multiline > .control:not(:last-child) {
+ margin-bottom: 0.75rem;
+}
+
+.field.is-grouped.is-grouped-multiline:last-child {
+ margin-bottom: -0.75rem;
+}
+
+.field.is-grouped.is-grouped-multiline:not(:last-child) {
+ margin-bottom: 0;
+}
+
+@media screen and (min-width: 769px), print {
+ .field.is-horizontal {
+ display: flex;
+ }
+}
+
+.field-label .label {
+ font-size: inherit;
+}
+
+@media screen and (max-width: 768px) {
+ .field-label {
+ margin-bottom: 0.5rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .field-label {
+ flex-basis: 0;
+ flex-grow: 1;
+ flex-shrink: 0;
+ margin-right: 1.5rem;
+ text-align: right;
+ }
+ .field-label.is-small {
+ font-size: 0.75rem;
+ padding-top: 0.375em;
+ }
+ .field-label.is-normal {
+ padding-top: 0.375em;
+ }
+ .field-label.is-medium {
+ font-size: 1.25rem;
+ padding-top: 0.375em;
+ }
+ .field-label.is-large {
+ font-size: 1.5rem;
+ padding-top: 0.375em;
+ }
+}
+
+.field-body .field .field {
+ margin-bottom: 0;
+}
+
+@media screen and (min-width: 769px), print {
+ .field-body {
+ display: flex;
+ flex-basis: 0;
+ flex-grow: 5;
+ flex-shrink: 1;
+ }
+ .field-body .field {
+ margin-bottom: 0;
+ }
+ .field-body > .field {
+ flex-shrink: 1;
+ }
+ .field-body > .field:not(.is-narrow) {
+ flex-grow: 1;
+ }
+ .field-body > .field:not(:last-child) {
+ margin-right: 0.75rem;
+ }
+}
+
+.control {
+ box-sizing: border-box;
+ clear: both;
+ font-size: 1rem;
+ position: relative;
+ text-align: left;
+}
+
+.control.has-icons-left .input:focus ~ .icon,
+.control.has-icons-left .select:focus ~ .icon, .control.has-icons-right .input:focus ~ .icon,
+.control.has-icons-right .select:focus ~ .icon {
+ color: #7a7a7a;
+}
+
+.control.has-icons-left .input.is-small ~ .icon,
+.control.has-icons-left .select.is-small ~ .icon, .control.has-icons-right .input.is-small ~ .icon,
+.control.has-icons-right .select.is-small ~ .icon {
+ font-size: 0.75rem;
+}
+
+.control.has-icons-left .input.is-medium ~ .icon,
+.control.has-icons-left .select.is-medium ~ .icon, .control.has-icons-right .input.is-medium ~ .icon,
+.control.has-icons-right .select.is-medium ~ .icon {
+ font-size: 1.25rem;
+}
+
+.control.has-icons-left .input.is-large ~ .icon,
+.control.has-icons-left .select.is-large ~ .icon, .control.has-icons-right .input.is-large ~ .icon,
+.control.has-icons-right .select.is-large ~ .icon {
+ font-size: 1.5rem;
+}
+
+.control.has-icons-left .icon, .control.has-icons-right .icon {
+ color: #dbdbdb;
+ height: 2.25em;
+ pointer-events: none;
+ position: absolute;
+ top: 0;
+ width: 2.25em;
+ z-index: 4;
+}
+
+.control.has-icons-left .input,
+.control.has-icons-left .select select {
+ padding-left: 2.25em;
+}
+
+.control.has-icons-left .icon.is-left {
+ left: 0;
+}
+
+.control.has-icons-right .input,
+.control.has-icons-right .select select {
+ padding-right: 2.25em;
+}
+
+.control.has-icons-right .icon.is-right {
+ right: 0;
+}
+
+.control.is-loading::after {
+ position: absolute !important;
+ right: 0.625em;
+ top: 0.625em;
+ z-index: 4;
+}
+
+.control.is-loading.is-small:after {
+ font-size: 0.75rem;
+}
+
+.control.is-loading.is-medium:after {
+ font-size: 1.25rem;
+}
+
+.control.is-loading.is-large:after {
+ font-size: 1.5rem;
+}
+
+.breadcrumb {
+ font-size: 1rem;
+ white-space: nowrap;
+}
+
+.breadcrumb a {
+ align-items: center;
+ color: #3273dc;
+ display: flex;
+ justify-content: center;
+ padding: 0 0.75em;
+}
+
+.breadcrumb a:hover {
+ color: #363636;
+}
+
+.breadcrumb li {
+ align-items: center;
+ display: flex;
+}
+
+.breadcrumb li:first-child a {
+ padding-left: 0;
+}
+
+.breadcrumb li.is-active a {
+ color: #363636;
+ cursor: default;
+ pointer-events: none;
+}
+
+.breadcrumb li + li::before {
+ color: #b5b5b5;
+ content: "\0002f";
+}
+
+.breadcrumb ul,
+.breadcrumb ol {
+ align-items: flex-start;
+ display: flex;
+ flex-wrap: wrap;
+ justify-content: flex-start;
+}
+
+.breadcrumb .icon:first-child {
+ margin-right: 0.5em;
+}
+
+.breadcrumb .icon:last-child {
+ margin-left: 0.5em;
+}
+
+.breadcrumb.is-centered ol,
+.breadcrumb.is-centered ul {
+ justify-content: center;
+}
+
+.breadcrumb.is-right ol,
+.breadcrumb.is-right ul {
+ justify-content: flex-end;
+}
+
+.breadcrumb.is-small {
+ font-size: 0.75rem;
+}
+
+.breadcrumb.is-medium {
+ font-size: 1.25rem;
+}
+
+.breadcrumb.is-large {
+ font-size: 1.5rem;
+}
+
+.breadcrumb.has-arrow-separator li + li::before {
+ content: "\02192";
+}
+
+.breadcrumb.has-bullet-separator li + li::before {
+ content: "\02022";
+}
+
+.breadcrumb.has-dot-separator li + li::before {
+ content: "\000b7";
+}
+
+.breadcrumb.has-succeeds-separator li + li::before {
+ content: "\0227B";
+}
+
+.card {
+ background-color: white;
+ box-shadow: 0 2px 3px rgba(10, 10, 10, 0.1), 0 0 0 1px rgba(10, 10, 10, 0.1);
+ color: #4a4a4a;
+ max-width: 100%;
+ position: relative;
+}
+
+.card-header {
+ background-color: transparent;
+ align-items: stretch;
+ box-shadow: 0 1px 2px rgba(10, 10, 10, 0.1);
+ display: flex;
+}
+
+.card-header-title {
+ align-items: center;
+ color: #363636;
+ display: flex;
+ flex-grow: 1;
+ font-weight: 700;
+ padding: 0.75rem;
+}
+
+.card-header-title.is-centered {
+ justify-content: center;
+}
+
+.card-header-icon {
+ align-items: center;
+ cursor: pointer;
+ display: flex;
+ justify-content: center;
+ padding: 0.75rem;
+}
+
+.card-image {
+ display: block;
+ position: relative;
+}
+
+.card-content {
+ background-color: transparent;
+ padding: 1.5rem;
+}
+
+.card-footer {
+ background-color: transparent;
+ border-top: 1px solid #dbdbdb;
+ align-items: stretch;
+ display: flex;
+}
+
+.card-footer-item {
+ align-items: center;
+ display: flex;
+ flex-basis: 0;
+ flex-grow: 1;
+ flex-shrink: 0;
+ justify-content: center;
+ padding: 0.75rem;
+}
+
+.card-footer-item:not(:last-child) {
+ border-right: 1px solid #dbdbdb;
+}
+
+.card .media:not(:last-child) {
+ margin-bottom: 1.5rem;
+}
+
+.dropdown {
+ display: inline-flex;
+ position: relative;
+ vertical-align: top;
+}
+
+.dropdown.is-active .dropdown-menu, .dropdown.is-hoverable:hover .dropdown-menu {
+ display: block;
+}
+
+.dropdown.is-right .dropdown-menu {
+ left: auto;
+ right: 0;
+}
+
+.dropdown.is-up .dropdown-menu {
+ bottom: 100%;
+ padding-bottom: 4px;
+ padding-top: initial;
+ top: auto;
+}
+
+.dropdown-menu {
+ display: none;
+ left: 0;
+ min-width: 12rem;
+ padding-top: 4px;
+ position: absolute;
+ top: 100%;
+ z-index: 20;
+}
+
+.dropdown-content {
+ background-color: white;
+ border-radius: 4px;
+ box-shadow: 0 2px 3px rgba(10, 10, 10, 0.1), 0 0 0 1px rgba(10, 10, 10, 0.1);
+ padding-bottom: 0.5rem;
+ padding-top: 0.5rem;
+}
+
+.dropdown-item {
+ color: #4a4a4a;
+ display: block;
+ font-size: 0.875rem;
+ line-height: 1.5;
+ padding: 0.375rem 1rem;
+ position: relative;
+}
+
+a.dropdown-item,
+button.dropdown-item {
+ padding-right: 3rem;
+ text-align: left;
+ white-space: nowrap;
+ width: 100%;
+}
+
+a.dropdown-item:hover,
+button.dropdown-item:hover {
+ background-color: whitesmoke;
+ color: #0a0a0a;
+}
+
+a.dropdown-item.is-active,
+button.dropdown-item.is-active {
+ background-color: #3273dc;
+ color: #fff;
+}
+
+.dropdown-divider {
+ background-color: #dbdbdb;
+ border: none;
+ display: block;
+ height: 1px;
+ margin: 0.5rem 0;
+}
+
+.level {
+ align-items: center;
+ justify-content: space-between;
+}
+
+.level code {
+ border-radius: 4px;
+}
+
+.level img {
+ display: inline-block;
+ vertical-align: top;
+}
+
+.level.is-mobile {
+ display: flex;
+}
+
+.level.is-mobile .level-left,
+.level.is-mobile .level-right {
+ display: flex;
+}
+
+.level.is-mobile .level-left + .level-right {
+ margin-top: 0;
+}
+
+.level.is-mobile .level-item:not(:last-child) {
+ margin-bottom: 0;
+ margin-right: 0.75rem;
+}
+
+.level.is-mobile .level-item:not(.is-narrow) {
+ flex-grow: 1;
+}
+
+@media screen and (min-width: 769px), print {
+ .level {
+ display: flex;
+ }
+ .level > .level-item:not(.is-narrow) {
+ flex-grow: 1;
+ }
+}
+
+.level-item {
+ align-items: center;
+ display: flex;
+ flex-basis: auto;
+ flex-grow: 0;
+ flex-shrink: 0;
+ justify-content: center;
+}
+
+.level-item .title,
+.level-item .subtitle {
+ margin-bottom: 0;
+}
+
+@media screen and (max-width: 768px) {
+ .level-item:not(:last-child) {
+ margin-bottom: 0.75rem;
+ }
+}
+
+.level-left,
+.level-right {
+ flex-basis: auto;
+ flex-grow: 0;
+ flex-shrink: 0;
+}
+
+.level-left .level-item.is-flexible,
+.level-right .level-item.is-flexible {
+ flex-grow: 1;
+}
+
+@media screen and (min-width: 769px), print {
+ .level-left .level-item:not(:last-child),
+ .level-right .level-item:not(:last-child) {
+ margin-right: 0.75rem;
+ }
+}
+
+.level-left {
+ align-items: center;
+ justify-content: flex-start;
+}
+
+@media screen and (max-width: 768px) {
+ .level-left + .level-right {
+ margin-top: 1.5rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .level-left {
+ display: flex;
+ }
+}
+
+.level-right {
+ align-items: center;
+ justify-content: flex-end;
+}
+
+@media screen and (min-width: 769px), print {
+ .level-right {
+ display: flex;
+ }
+}
+
+.list {
+ background-color: white;
+ border-radius: 4px;
+ box-shadow: 0 2px 3px rgba(10, 10, 10, 0.1), 0 0 0 1px rgba(10, 10, 10, 0.1);
+}
+
+.list-item {
+ display: block;
+ padding: 0.5em 1em;
+}
+
+.list-item:not(a) {
+ color: #4a4a4a;
+}
+
+.list-item:first-child {
+ border-top-left-radius: 4px;
+ border-top-right-radius: 4px;
+}
+
+.list-item:last-child {
+ border-bottom-left-radius: 4px;
+ border-bottom-right-radius: 4px;
+}
+
+.list-item:not(:last-child) {
+ border-bottom: 1px solid #dbdbdb;
+}
+
+.list-item.is-active {
+ background-color: #3273dc;
+ color: #fff;
+}
+
+a.list-item {
+ background-color: whitesmoke;
+ cursor: pointer;
+}
+
+.media {
+ align-items: flex-start;
+ display: flex;
+ text-align: left;
+}
+
+.media .content:not(:last-child) {
+ margin-bottom: 0.75rem;
+}
+
+.media .media {
+ border-top: 1px solid rgba(219, 219, 219, 0.5);
+ display: flex;
+ padding-top: 0.75rem;
+}
+
+.media .media .content:not(:last-child),
+.media .media .control:not(:last-child) {
+ margin-bottom: 0.5rem;
+}
+
+.media .media .media {
+ padding-top: 0.5rem;
+}
+
+.media .media .media + .media {
+ margin-top: 0.5rem;
+}
+
+.media + .media {
+ border-top: 1px solid rgba(219, 219, 219, 0.5);
+ margin-top: 1rem;
+ padding-top: 1rem;
+}
+
+.media.is-large + .media {
+ margin-top: 1.5rem;
+ padding-top: 1.5rem;
+}
+
+.media-left,
+.media-right {
+ flex-basis: auto;
+ flex-grow: 0;
+ flex-shrink: 0;
+}
+
+.media-left {
+ margin-right: 1rem;
+}
+
+.media-right {
+ margin-left: 1rem;
+}
+
+.media-content {
+ flex-basis: auto;
+ flex-grow: 1;
+ flex-shrink: 1;
+ text-align: left;
+}
+
+@media screen and (max-width: 768px) {
+ .media-content {
+ overflow-x: auto;
+ }
+}
+
+.menu {
+ font-size: 1rem;
+}
+
+.menu.is-small {
+ font-size: 0.75rem;
+}
+
+.menu.is-medium {
+ font-size: 1.25rem;
+}
+
+.menu.is-large {
+ font-size: 1.5rem;
+}
+
+.menu-list {
+ line-height: 1.25;
+}
+
+.menu-list a {
+ border-radius: 2px;
+ color: #4a4a4a;
+ display: block;
+ padding: 0.5em 0.75em;
+}
+
+.menu-list a:hover {
+ background-color: whitesmoke;
+ color: #363636;
+}
+
+.menu-list a.is-active {
+ background-color: #3273dc;
+ color: #fff;
+}
+
+.menu-list li ul {
+ border-left: 1px solid #dbdbdb;
+ margin: 0.75em;
+ padding-left: 0.75em;
+}
+
+.menu-label {
+ color: #7a7a7a;
+ font-size: 0.75em;
+ letter-spacing: 0.1em;
+ text-transform: uppercase;
+}
+
+.menu-label:not(:first-child) {
+ margin-top: 1em;
+}
+
+.menu-label:not(:last-child) {
+ margin-bottom: 1em;
+}
+
+.message {
+ background-color: whitesmoke;
+ border-radius: 4px;
+ font-size: 1rem;
+}
+
+.message strong {
+ color: currentColor;
+}
+
+.message a:not(.button):not(.tag):not(.dropdown-item) {
+ color: currentColor;
+ text-decoration: underline;
+}
+
+.message.is-small {
+ font-size: 0.75rem;
+}
+
+.message.is-medium {
+ font-size: 1.25rem;
+}
+
+.message.is-large {
+ font-size: 1.5rem;
+}
+
+.message.is-white {
+ background-color: white;
+}
+
+.message.is-white .message-header {
+ background-color: white;
+ color: #0a0a0a;
+}
+
+.message.is-white .message-body {
+ border-color: white;
+ color: #4d4d4d;
+}
+
+.message.is-black {
+ background-color: #fafafa;
+}
+
+.message.is-black .message-header {
+ background-color: #0a0a0a;
+ color: white;
+}
+
+.message.is-black .message-body {
+ border-color: #0a0a0a;
+ color: #090909;
+}
+
+.message.is-light {
+ background-color: #fafafa;
+}
+
+.message.is-light .message-header {
+ background-color: whitesmoke;
+ color: #363636;
+}
+
+.message.is-light .message-body {
+ border-color: whitesmoke;
+ color: #505050;
+}
+
+.message.is-dark {
+ background-color: #fafafa;
+}
+
+.message.is-dark .message-header {
+ background-color: #363636;
+ color: whitesmoke;
+}
+
+.message.is-dark .message-body {
+ border-color: #363636;
+ color: #2a2a2a;
+}
+
+.message.is-primary {
+ background-color: #f5fffd;
+}
+
+.message.is-primary .message-header {
+ background-color: #00d1b2;
+ color: #fff;
+}
+
+.message.is-primary .message-body {
+ border-color: #00d1b2;
+ color: #021310;
+}
+
+.message.is-link {
+ background-color: #f6f9fe;
+}
+
+.message.is-link .message-header {
+ background-color: #3273dc;
+ color: #fff;
+}
+
+.message.is-link .message-body {
+ border-color: #3273dc;
+ color: #22509a;
+}
+
+.message.is-info {
+ background-color: #f6fbfe;
+}
+
+.message.is-info .message-header {
+ background-color: #209cee;
+ color: #fff;
+}
+
+.message.is-info .message-body {
+ border-color: #209cee;
+ color: #12537e;
+}
+
+.message.is-success {
+ background-color: #f6fef9;
+}
+
+.message.is-success .message-header {
+ background-color: #23d160;
+ color: #fff;
+}
+
+.message.is-success .message-body {
+ border-color: #23d160;
+ color: #0e301a;
+}
+
+.message.is-warning {
+ background-color: #fffdf5;
+}
+
+.message.is-warning .message-header {
+ background-color: #ffdd57;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.message.is-warning .message-body {
+ border-color: #ffdd57;
+ color: #3b3108;
+}
+
+.message.is-danger {
+ background-color: #fff5f7;
+}
+
+.message.is-danger .message-header {
+ background-color: #ff3860;
+ color: #fff;
+}
+
+.message.is-danger .message-body {
+ border-color: #ff3860;
+ color: #cd0930;
+}
+
+.message-header {
+ align-items: center;
+ background-color: #4a4a4a;
+ border-radius: 4px 4px 0 0;
+ color: #fff;
+ display: flex;
+ font-weight: 700;
+ justify-content: space-between;
+ line-height: 1.25;
+ padding: 0.75em 1em;
+ position: relative;
+}
+
+.message-header .delete {
+ flex-grow: 0;
+ flex-shrink: 0;
+ margin-left: 0.75em;
+}
+
+.message-header + .message-body {
+ border-width: 0;
+ border-top-left-radius: 0;
+ border-top-right-radius: 0;
+}
+
+.message-body {
+ border-color: #dbdbdb;
+ border-radius: 4px;
+ border-style: solid;
+ border-width: 0 0 0 4px;
+ color: #4a4a4a;
+ padding: 1.25em 1.5em;
+}
+
+.message-body code,
+.message-body pre {
+ background-color: white;
+}
+
+.message-body pre code {
+ background-color: transparent;
+}
+
+.modal {
+ align-items: center;
+ display: none;
+ flex-direction: column;
+ justify-content: center;
+ overflow: hidden;
+ position: fixed;
+ z-index: 40;
+}
+
+.modal.is-active {
+ display: flex;
+}
+
+.modal-background {
+ background-color: rgba(10, 10, 10, 0.86);
+}
+
+.modal-content,
+.modal-card {
+ margin: 0 20px;
+ max-height: calc(100vh - 160px);
+ overflow: auto;
+ position: relative;
+ width: 100%;
+}
+
+@media screen and (min-width: 769px), print {
+ .modal-content,
+ .modal-card {
+ margin: 0 auto;
+ max-height: calc(100vh - 40px);
+ width: 640px;
+ }
+}
+
+.modal-close {
+ background: none;
+ height: 40px;
+ position: fixed;
+ right: 20px;
+ top: 20px;
+ width: 40px;
+}
+
+.modal-card {
+ display: flex;
+ flex-direction: column;
+ max-height: calc(100vh - 40px);
+ overflow: hidden;
+ -ms-overflow-y: visible;
+}
+
+.modal-card-head,
+.modal-card-foot {
+ align-items: center;
+ background-color: whitesmoke;
+ display: flex;
+ flex-shrink: 0;
+ justify-content: flex-start;
+ padding: 20px;
+ position: relative;
+}
+
+.modal-card-head {
+ border-bottom: 1px solid #dbdbdb;
+ border-top-left-radius: 6px;
+ border-top-right-radius: 6px;
+}
+
+.modal-card-title {
+ color: #363636;
+ flex-grow: 1;
+ flex-shrink: 0;
+ font-size: 1.5rem;
+ line-height: 1;
+}
+
+.modal-card-foot {
+ border-bottom-left-radius: 6px;
+ border-bottom-right-radius: 6px;
+ border-top: 1px solid #dbdbdb;
+}
+
+.modal-card-foot .button:not(:last-child) {
+ margin-right: 0.5em;
+}
+
+.modal-card-body {
+ -webkit-overflow-scrolling: touch;
+ background-color: white;
+ flex-grow: 1;
+ flex-shrink: 1;
+ overflow: auto;
+ padding: 20px;
+}
+
+.navbar {
+ background-color: white;
+ min-height: 3.25rem;
+ position: relative;
+ z-index: 30;
+}
+
+.navbar.is-white {
+ background-color: white;
+ color: #0a0a0a;
+}
+
+.navbar.is-white .navbar-brand > .navbar-item,
+.navbar.is-white .navbar-brand .navbar-link {
+ color: #0a0a0a;
+}
+
+.navbar.is-white .navbar-brand > a.navbar-item:focus, .navbar.is-white .navbar-brand > a.navbar-item:hover, .navbar.is-white .navbar-brand > a.navbar-item.is-active,
+.navbar.is-white .navbar-brand .navbar-link:focus,
+.navbar.is-white .navbar-brand .navbar-link:hover,
+.navbar.is-white .navbar-brand .navbar-link.is-active {
+ background-color: #f2f2f2;
+ color: #0a0a0a;
+}
+
+.navbar.is-white .navbar-brand .navbar-link::after {
+ border-color: #0a0a0a;
+}
+
+.navbar.is-white .navbar-burger {
+ color: #0a0a0a;
+}
+
+@media screen and (min-width: 1024px) {
+ .navbar.is-white .navbar-start > .navbar-item,
+ .navbar.is-white .navbar-start .navbar-link,
+ .navbar.is-white .navbar-end > .navbar-item,
+ .navbar.is-white .navbar-end .navbar-link {
+ color: #0a0a0a;
+ }
+ .navbar.is-white .navbar-start > a.navbar-item:focus, .navbar.is-white .navbar-start > a.navbar-item:hover, .navbar.is-white .navbar-start > a.navbar-item.is-active,
+ .navbar.is-white .navbar-start .navbar-link:focus,
+ .navbar.is-white .navbar-start .navbar-link:hover,
+ .navbar.is-white .navbar-start .navbar-link.is-active,
+ .navbar.is-white .navbar-end > a.navbar-item:focus,
+ .navbar.is-white .navbar-end > a.navbar-item:hover,
+ .navbar.is-white .navbar-end > a.navbar-item.is-active,
+ .navbar.is-white .navbar-end .navbar-link:focus,
+ .navbar.is-white .navbar-end .navbar-link:hover,
+ .navbar.is-white .navbar-end .navbar-link.is-active {
+ background-color: #f2f2f2;
+ color: #0a0a0a;
+ }
+ .navbar.is-white .navbar-start .navbar-link::after,
+ .navbar.is-white .navbar-end .navbar-link::after {
+ border-color: #0a0a0a;
+ }
+ .navbar.is-white .navbar-item.has-dropdown:focus .navbar-link,
+ .navbar.is-white .navbar-item.has-dropdown:hover .navbar-link,
+ .navbar.is-white .navbar-item.has-dropdown.is-active .navbar-link {
+ background-color: #f2f2f2;
+ color: #0a0a0a;
+ }
+ .navbar.is-white .navbar-dropdown a.navbar-item.is-active {
+ background-color: white;
+ color: #0a0a0a;
+ }
+}
+
+.navbar.is-black {
+ background-color: #0a0a0a;
+ color: white;
+}
+
+.navbar.is-black .navbar-brand > .navbar-item,
+.navbar.is-black .navbar-brand .navbar-link {
+ color: white;
+}
+
+.navbar.is-black .navbar-brand > a.navbar-item:focus, .navbar.is-black .navbar-brand > a.navbar-item:hover, .navbar.is-black .navbar-brand > a.navbar-item.is-active,
+.navbar.is-black .navbar-brand .navbar-link:focus,
+.navbar.is-black .navbar-brand .navbar-link:hover,
+.navbar.is-black .navbar-brand .navbar-link.is-active {
+ background-color: black;
+ color: white;
+}
+
+.navbar.is-black .navbar-brand .navbar-link::after {
+ border-color: white;
+}
+
+.navbar.is-black .navbar-burger {
+ color: white;
+}
+
+@media screen and (min-width: 1024px) {
+ .navbar.is-black .navbar-start > .navbar-item,
+ .navbar.is-black .navbar-start .navbar-link,
+ .navbar.is-black .navbar-end > .navbar-item,
+ .navbar.is-black .navbar-end .navbar-link {
+ color: white;
+ }
+ .navbar.is-black .navbar-start > a.navbar-item:focus, .navbar.is-black .navbar-start > a.navbar-item:hover, .navbar.is-black .navbar-start > a.navbar-item.is-active,
+ .navbar.is-black .navbar-start .navbar-link:focus,
+ .navbar.is-black .navbar-start .navbar-link:hover,
+ .navbar.is-black .navbar-start .navbar-link.is-active,
+ .navbar.is-black .navbar-end > a.navbar-item:focus,
+ .navbar.is-black .navbar-end > a.navbar-item:hover,
+ .navbar.is-black .navbar-end > a.navbar-item.is-active,
+ .navbar.is-black .navbar-end .navbar-link:focus,
+ .navbar.is-black .navbar-end .navbar-link:hover,
+ .navbar.is-black .navbar-end .navbar-link.is-active {
+ background-color: black;
+ color: white;
+ }
+ .navbar.is-black .navbar-start .navbar-link::after,
+ .navbar.is-black .navbar-end .navbar-link::after {
+ border-color: white;
+ }
+ .navbar.is-black .navbar-item.has-dropdown:focus .navbar-link,
+ .navbar.is-black .navbar-item.has-dropdown:hover .navbar-link,
+ .navbar.is-black .navbar-item.has-dropdown.is-active .navbar-link {
+ background-color: black;
+ color: white;
+ }
+ .navbar.is-black .navbar-dropdown a.navbar-item.is-active {
+ background-color: #0a0a0a;
+ color: white;
+ }
+}
+
+.navbar.is-light {
+ background-color: whitesmoke;
+ color: #363636;
+}
+
+.navbar.is-light .navbar-brand > .navbar-item,
+.navbar.is-light .navbar-brand .navbar-link {
+ color: #363636;
+}
+
+.navbar.is-light .navbar-brand > a.navbar-item:focus, .navbar.is-light .navbar-brand > a.navbar-item:hover, .navbar.is-light .navbar-brand > a.navbar-item.is-active,
+.navbar.is-light .navbar-brand .navbar-link:focus,
+.navbar.is-light .navbar-brand .navbar-link:hover,
+.navbar.is-light .navbar-brand .navbar-link.is-active {
+ background-color: #e8e8e8;
+ color: #363636;
+}
+
+.navbar.is-light .navbar-brand .navbar-link::after {
+ border-color: #363636;
+}
+
+.navbar.is-light .navbar-burger {
+ color: #363636;
+}
+
+@media screen and (min-width: 1024px) {
+ .navbar.is-light .navbar-start > .navbar-item,
+ .navbar.is-light .navbar-start .navbar-link,
+ .navbar.is-light .navbar-end > .navbar-item,
+ .navbar.is-light .navbar-end .navbar-link {
+ color: #363636;
+ }
+ .navbar.is-light .navbar-start > a.navbar-item:focus, .navbar.is-light .navbar-start > a.navbar-item:hover, .navbar.is-light .navbar-start > a.navbar-item.is-active,
+ .navbar.is-light .navbar-start .navbar-link:focus,
+ .navbar.is-light .navbar-start .navbar-link:hover,
+ .navbar.is-light .navbar-start .navbar-link.is-active,
+ .navbar.is-light .navbar-end > a.navbar-item:focus,
+ .navbar.is-light .navbar-end > a.navbar-item:hover,
+ .navbar.is-light .navbar-end > a.navbar-item.is-active,
+ .navbar.is-light .navbar-end .navbar-link:focus,
+ .navbar.is-light .navbar-end .navbar-link:hover,
+ .navbar.is-light .navbar-end .navbar-link.is-active {
+ background-color: #e8e8e8;
+ color: #363636;
+ }
+ .navbar.is-light .navbar-start .navbar-link::after,
+ .navbar.is-light .navbar-end .navbar-link::after {
+ border-color: #363636;
+ }
+ .navbar.is-light .navbar-item.has-dropdown:focus .navbar-link,
+ .navbar.is-light .navbar-item.has-dropdown:hover .navbar-link,
+ .navbar.is-light .navbar-item.has-dropdown.is-active .navbar-link {
+ background-color: #e8e8e8;
+ color: #363636;
+ }
+ .navbar.is-light .navbar-dropdown a.navbar-item.is-active {
+ background-color: whitesmoke;
+ color: #363636;
+ }
+}
+
+.navbar.is-dark {
+ background-color: #363636;
+ color: whitesmoke;
+}
+
+.navbar.is-dark .navbar-brand > .navbar-item,
+.navbar.is-dark .navbar-brand .navbar-link {
+ color: whitesmoke;
+}
+
+.navbar.is-dark .navbar-brand > a.navbar-item:focus, .navbar.is-dark .navbar-brand > a.navbar-item:hover, .navbar.is-dark .navbar-brand > a.navbar-item.is-active,
+.navbar.is-dark .navbar-brand .navbar-link:focus,
+.navbar.is-dark .navbar-brand .navbar-link:hover,
+.navbar.is-dark .navbar-brand .navbar-link.is-active {
+ background-color: #292929;
+ color: whitesmoke;
+}
+
+.navbar.is-dark .navbar-brand .navbar-link::after {
+ border-color: whitesmoke;
+}
+
+.navbar.is-dark .navbar-burger {
+ color: whitesmoke;
+}
+
+@media screen and (min-width: 1024px) {
+ .navbar.is-dark .navbar-start > .navbar-item,
+ .navbar.is-dark .navbar-start .navbar-link,
+ .navbar.is-dark .navbar-end > .navbar-item,
+ .navbar.is-dark .navbar-end .navbar-link {
+ color: whitesmoke;
+ }
+ .navbar.is-dark .navbar-start > a.navbar-item:focus, .navbar.is-dark .navbar-start > a.navbar-item:hover, .navbar.is-dark .navbar-start > a.navbar-item.is-active,
+ .navbar.is-dark .navbar-start .navbar-link:focus,
+ .navbar.is-dark .navbar-start .navbar-link:hover,
+ .navbar.is-dark .navbar-start .navbar-link.is-active,
+ .navbar.is-dark .navbar-end > a.navbar-item:focus,
+ .navbar.is-dark .navbar-end > a.navbar-item:hover,
+ .navbar.is-dark .navbar-end > a.navbar-item.is-active,
+ .navbar.is-dark .navbar-end .navbar-link:focus,
+ .navbar.is-dark .navbar-end .navbar-link:hover,
+ .navbar.is-dark .navbar-end .navbar-link.is-active {
+ background-color: #292929;
+ color: whitesmoke;
+ }
+ .navbar.is-dark .navbar-start .navbar-link::after,
+ .navbar.is-dark .navbar-end .navbar-link::after {
+ border-color: whitesmoke;
+ }
+ .navbar.is-dark .navbar-item.has-dropdown:focus .navbar-link,
+ .navbar.is-dark .navbar-item.has-dropdown:hover .navbar-link,
+ .navbar.is-dark .navbar-item.has-dropdown.is-active .navbar-link {
+ background-color: #292929;
+ color: whitesmoke;
+ }
+ .navbar.is-dark .navbar-dropdown a.navbar-item.is-active {
+ background-color: #363636;
+ color: whitesmoke;
+ }
+}
+
+.navbar.is-primary {
+ background-color: #00d1b2;
+ color: #fff;
+}
+
+.navbar.is-primary .navbar-brand > .navbar-item,
+.navbar.is-primary .navbar-brand .navbar-link {
+ color: #fff;
+}
+
+.navbar.is-primary .navbar-brand > a.navbar-item:focus, .navbar.is-primary .navbar-brand > a.navbar-item:hover, .navbar.is-primary .navbar-brand > a.navbar-item.is-active,
+.navbar.is-primary .navbar-brand .navbar-link:focus,
+.navbar.is-primary .navbar-brand .navbar-link:hover,
+.navbar.is-primary .navbar-brand .navbar-link.is-active {
+ background-color: #00b89c;
+ color: #fff;
+}
+
+.navbar.is-primary .navbar-brand .navbar-link::after {
+ border-color: #fff;
+}
+
+.navbar.is-primary .navbar-burger {
+ color: #fff;
+}
+
+@media screen and (min-width: 1024px) {
+ .navbar.is-primary .navbar-start > .navbar-item,
+ .navbar.is-primary .navbar-start .navbar-link,
+ .navbar.is-primary .navbar-end > .navbar-item,
+ .navbar.is-primary .navbar-end .navbar-link {
+ color: #fff;
+ }
+ .navbar.is-primary .navbar-start > a.navbar-item:focus, .navbar.is-primary .navbar-start > a.navbar-item:hover, .navbar.is-primary .navbar-start > a.navbar-item.is-active,
+ .navbar.is-primary .navbar-start .navbar-link:focus,
+ .navbar.is-primary .navbar-start .navbar-link:hover,
+ .navbar.is-primary .navbar-start .navbar-link.is-active,
+ .navbar.is-primary .navbar-end > a.navbar-item:focus,
+ .navbar.is-primary .navbar-end > a.navbar-item:hover,
+ .navbar.is-primary .navbar-end > a.navbar-item.is-active,
+ .navbar.is-primary .navbar-end .navbar-link:focus,
+ .navbar.is-primary .navbar-end .navbar-link:hover,
+ .navbar.is-primary .navbar-end .navbar-link.is-active {
+ background-color: #00b89c;
+ color: #fff;
+ }
+ .navbar.is-primary .navbar-start .navbar-link::after,
+ .navbar.is-primary .navbar-end .navbar-link::after {
+ border-color: #fff;
+ }
+ .navbar.is-primary .navbar-item.has-dropdown:focus .navbar-link,
+ .navbar.is-primary .navbar-item.has-dropdown:hover .navbar-link,
+ .navbar.is-primary .navbar-item.has-dropdown.is-active .navbar-link {
+ background-color: #00b89c;
+ color: #fff;
+ }
+ .navbar.is-primary .navbar-dropdown a.navbar-item.is-active {
+ background-color: #00d1b2;
+ color: #fff;
+ }
+}
+
+.navbar.is-link {
+ background-color: #3273dc;
+ color: #fff;
+}
+
+.navbar.is-link .navbar-brand > .navbar-item,
+.navbar.is-link .navbar-brand .navbar-link {
+ color: #fff;
+}
+
+.navbar.is-link .navbar-brand > a.navbar-item:focus, .navbar.is-link .navbar-brand > a.navbar-item:hover, .navbar.is-link .navbar-brand > a.navbar-item.is-active,
+.navbar.is-link .navbar-brand .navbar-link:focus,
+.navbar.is-link .navbar-brand .navbar-link:hover,
+.navbar.is-link .navbar-brand .navbar-link.is-active {
+ background-color: #2366d1;
+ color: #fff;
+}
+
+.navbar.is-link .navbar-brand .navbar-link::after {
+ border-color: #fff;
+}
+
+.navbar.is-link .navbar-burger {
+ color: #fff;
+}
+
+@media screen and (min-width: 1024px) {
+ .navbar.is-link .navbar-start > .navbar-item,
+ .navbar.is-link .navbar-start .navbar-link,
+ .navbar.is-link .navbar-end > .navbar-item,
+ .navbar.is-link .navbar-end .navbar-link {
+ color: #fff;
+ }
+ .navbar.is-link .navbar-start > a.navbar-item:focus, .navbar.is-link .navbar-start > a.navbar-item:hover, .navbar.is-link .navbar-start > a.navbar-item.is-active,
+ .navbar.is-link .navbar-start .navbar-link:focus,
+ .navbar.is-link .navbar-start .navbar-link:hover,
+ .navbar.is-link .navbar-start .navbar-link.is-active,
+ .navbar.is-link .navbar-end > a.navbar-item:focus,
+ .navbar.is-link .navbar-end > a.navbar-item:hover,
+ .navbar.is-link .navbar-end > a.navbar-item.is-active,
+ .navbar.is-link .navbar-end .navbar-link:focus,
+ .navbar.is-link .navbar-end .navbar-link:hover,
+ .navbar.is-link .navbar-end .navbar-link.is-active {
+ background-color: #2366d1;
+ color: #fff;
+ }
+ .navbar.is-link .navbar-start .navbar-link::after,
+ .navbar.is-link .navbar-end .navbar-link::after {
+ border-color: #fff;
+ }
+ .navbar.is-link .navbar-item.has-dropdown:focus .navbar-link,
+ .navbar.is-link .navbar-item.has-dropdown:hover .navbar-link,
+ .navbar.is-link .navbar-item.has-dropdown.is-active .navbar-link {
+ background-color: #2366d1;
+ color: #fff;
+ }
+ .navbar.is-link .navbar-dropdown a.navbar-item.is-active {
+ background-color: #3273dc;
+ color: #fff;
+ }
+}
+
+.navbar.is-info {
+ background-color: #209cee;
+ color: #fff;
+}
+
+.navbar.is-info .navbar-brand > .navbar-item,
+.navbar.is-info .navbar-brand .navbar-link {
+ color: #fff;
+}
+
+.navbar.is-info .navbar-brand > a.navbar-item:focus, .navbar.is-info .navbar-brand > a.navbar-item:hover, .navbar.is-info .navbar-brand > a.navbar-item.is-active,
+.navbar.is-info .navbar-brand .navbar-link:focus,
+.navbar.is-info .navbar-brand .navbar-link:hover,
+.navbar.is-info .navbar-brand .navbar-link.is-active {
+ background-color: #118fe4;
+ color: #fff;
+}
+
+.navbar.is-info .navbar-brand .navbar-link::after {
+ border-color: #fff;
+}
+
+.navbar.is-info .navbar-burger {
+ color: #fff;
+}
+
+@media screen and (min-width: 1024px) {
+ .navbar.is-info .navbar-start > .navbar-item,
+ .navbar.is-info .navbar-start .navbar-link,
+ .navbar.is-info .navbar-end > .navbar-item,
+ .navbar.is-info .navbar-end .navbar-link {
+ color: #fff;
+ }
+ .navbar.is-info .navbar-start > a.navbar-item:focus, .navbar.is-info .navbar-start > a.navbar-item:hover, .navbar.is-info .navbar-start > a.navbar-item.is-active,
+ .navbar.is-info .navbar-start .navbar-link:focus,
+ .navbar.is-info .navbar-start .navbar-link:hover,
+ .navbar.is-info .navbar-start .navbar-link.is-active,
+ .navbar.is-info .navbar-end > a.navbar-item:focus,
+ .navbar.is-info .navbar-end > a.navbar-item:hover,
+ .navbar.is-info .navbar-end > a.navbar-item.is-active,
+ .navbar.is-info .navbar-end .navbar-link:focus,
+ .navbar.is-info .navbar-end .navbar-link:hover,
+ .navbar.is-info .navbar-end .navbar-link.is-active {
+ background-color: #118fe4;
+ color: #fff;
+ }
+ .navbar.is-info .navbar-start .navbar-link::after,
+ .navbar.is-info .navbar-end .navbar-link::after {
+ border-color: #fff;
+ }
+ .navbar.is-info .navbar-item.has-dropdown:focus .navbar-link,
+ .navbar.is-info .navbar-item.has-dropdown:hover .navbar-link,
+ .navbar.is-info .navbar-item.has-dropdown.is-active .navbar-link {
+ background-color: #118fe4;
+ color: #fff;
+ }
+ .navbar.is-info .navbar-dropdown a.navbar-item.is-active {
+ background-color: #209cee;
+ color: #fff;
+ }
+}
+
+.navbar.is-success {
+ background-color: #23d160;
+ color: #fff;
+}
+
+.navbar.is-success .navbar-brand > .navbar-item,
+.navbar.is-success .navbar-brand .navbar-link {
+ color: #fff;
+}
+
+.navbar.is-success .navbar-brand > a.navbar-item:focus, .navbar.is-success .navbar-brand > a.navbar-item:hover, .navbar.is-success .navbar-brand > a.navbar-item.is-active,
+.navbar.is-success .navbar-brand .navbar-link:focus,
+.navbar.is-success .navbar-brand .navbar-link:hover,
+.navbar.is-success .navbar-brand .navbar-link.is-active {
+ background-color: #20bc56;
+ color: #fff;
+}
+
+.navbar.is-success .navbar-brand .navbar-link::after {
+ border-color: #fff;
+}
+
+.navbar.is-success .navbar-burger {
+ color: #fff;
+}
+
+@media screen and (min-width: 1024px) {
+ .navbar.is-success .navbar-start > .navbar-item,
+ .navbar.is-success .navbar-start .navbar-link,
+ .navbar.is-success .navbar-end > .navbar-item,
+ .navbar.is-success .navbar-end .navbar-link {
+ color: #fff;
+ }
+ .navbar.is-success .navbar-start > a.navbar-item:focus, .navbar.is-success .navbar-start > a.navbar-item:hover, .navbar.is-success .navbar-start > a.navbar-item.is-active,
+ .navbar.is-success .navbar-start .navbar-link:focus,
+ .navbar.is-success .navbar-start .navbar-link:hover,
+ .navbar.is-success .navbar-start .navbar-link.is-active,
+ .navbar.is-success .navbar-end > a.navbar-item:focus,
+ .navbar.is-success .navbar-end > a.navbar-item:hover,
+ .navbar.is-success .navbar-end > a.navbar-item.is-active,
+ .navbar.is-success .navbar-end .navbar-link:focus,
+ .navbar.is-success .navbar-end .navbar-link:hover,
+ .navbar.is-success .navbar-end .navbar-link.is-active {
+ background-color: #20bc56;
+ color: #fff;
+ }
+ .navbar.is-success .navbar-start .navbar-link::after,
+ .navbar.is-success .navbar-end .navbar-link::after {
+ border-color: #fff;
+ }
+ .navbar.is-success .navbar-item.has-dropdown:focus .navbar-link,
+ .navbar.is-success .navbar-item.has-dropdown:hover .navbar-link,
+ .navbar.is-success .navbar-item.has-dropdown.is-active .navbar-link {
+ background-color: #20bc56;
+ color: #fff;
+ }
+ .navbar.is-success .navbar-dropdown a.navbar-item.is-active {
+ background-color: #23d160;
+ color: #fff;
+ }
+}
+
+.navbar.is-warning {
+ background-color: #ffdd57;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.navbar.is-warning .navbar-brand > .navbar-item,
+.navbar.is-warning .navbar-brand .navbar-link {
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.navbar.is-warning .navbar-brand > a.navbar-item:focus, .navbar.is-warning .navbar-brand > a.navbar-item:hover, .navbar.is-warning .navbar-brand > a.navbar-item.is-active,
+.navbar.is-warning .navbar-brand .navbar-link:focus,
+.navbar.is-warning .navbar-brand .navbar-link:hover,
+.navbar.is-warning .navbar-brand .navbar-link.is-active {
+ background-color: #ffd83d;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.navbar.is-warning .navbar-brand .navbar-link::after {
+ border-color: rgba(0, 0, 0, 0.7);
+}
+
+.navbar.is-warning .navbar-burger {
+ color: rgba(0, 0, 0, 0.7);
+}
+
+@media screen and (min-width: 1024px) {
+ .navbar.is-warning .navbar-start > .navbar-item,
+ .navbar.is-warning .navbar-start .navbar-link,
+ .navbar.is-warning .navbar-end > .navbar-item,
+ .navbar.is-warning .navbar-end .navbar-link {
+ color: rgba(0, 0, 0, 0.7);
+ }
+ .navbar.is-warning .navbar-start > a.navbar-item:focus, .navbar.is-warning .navbar-start > a.navbar-item:hover, .navbar.is-warning .navbar-start > a.navbar-item.is-active,
+ .navbar.is-warning .navbar-start .navbar-link:focus,
+ .navbar.is-warning .navbar-start .navbar-link:hover,
+ .navbar.is-warning .navbar-start .navbar-link.is-active,
+ .navbar.is-warning .navbar-end > a.navbar-item:focus,
+ .navbar.is-warning .navbar-end > a.navbar-item:hover,
+ .navbar.is-warning .navbar-end > a.navbar-item.is-active,
+ .navbar.is-warning .navbar-end .navbar-link:focus,
+ .navbar.is-warning .navbar-end .navbar-link:hover,
+ .navbar.is-warning .navbar-end .navbar-link.is-active {
+ background-color: #ffd83d;
+ color: rgba(0, 0, 0, 0.7);
+ }
+ .navbar.is-warning .navbar-start .navbar-link::after,
+ .navbar.is-warning .navbar-end .navbar-link::after {
+ border-color: rgba(0, 0, 0, 0.7);
+ }
+ .navbar.is-warning .navbar-item.has-dropdown:focus .navbar-link,
+ .navbar.is-warning .navbar-item.has-dropdown:hover .navbar-link,
+ .navbar.is-warning .navbar-item.has-dropdown.is-active .navbar-link {
+ background-color: #ffd83d;
+ color: rgba(0, 0, 0, 0.7);
+ }
+ .navbar.is-warning .navbar-dropdown a.navbar-item.is-active {
+ background-color: #ffdd57;
+ color: rgba(0, 0, 0, 0.7);
+ }
+}
+
+.navbar.is-danger {
+ background-color: #ff3860;
+ color: #fff;
+}
+
+.navbar.is-danger .navbar-brand > .navbar-item,
+.navbar.is-danger .navbar-brand .navbar-link {
+ color: #fff;
+}
+
+.navbar.is-danger .navbar-brand > a.navbar-item:focus, .navbar.is-danger .navbar-brand > a.navbar-item:hover, .navbar.is-danger .navbar-brand > a.navbar-item.is-active,
+.navbar.is-danger .navbar-brand .navbar-link:focus,
+.navbar.is-danger .navbar-brand .navbar-link:hover,
+.navbar.is-danger .navbar-brand .navbar-link.is-active {
+ background-color: #ff1f4b;
+ color: #fff;
+}
+
+.navbar.is-danger .navbar-brand .navbar-link::after {
+ border-color: #fff;
+}
+
+.navbar.is-danger .navbar-burger {
+ color: #fff;
+}
+
+@media screen and (min-width: 1024px) {
+ .navbar.is-danger .navbar-start > .navbar-item,
+ .navbar.is-danger .navbar-start .navbar-link,
+ .navbar.is-danger .navbar-end > .navbar-item,
+ .navbar.is-danger .navbar-end .navbar-link {
+ color: #fff;
+ }
+ .navbar.is-danger .navbar-start > a.navbar-item:focus, .navbar.is-danger .navbar-start > a.navbar-item:hover, .navbar.is-danger .navbar-start > a.navbar-item.is-active,
+ .navbar.is-danger .navbar-start .navbar-link:focus,
+ .navbar.is-danger .navbar-start .navbar-link:hover,
+ .navbar.is-danger .navbar-start .navbar-link.is-active,
+ .navbar.is-danger .navbar-end > a.navbar-item:focus,
+ .navbar.is-danger .navbar-end > a.navbar-item:hover,
+ .navbar.is-danger .navbar-end > a.navbar-item.is-active,
+ .navbar.is-danger .navbar-end .navbar-link:focus,
+ .navbar.is-danger .navbar-end .navbar-link:hover,
+ .navbar.is-danger .navbar-end .navbar-link.is-active {
+ background-color: #ff1f4b;
+ color: #fff;
+ }
+ .navbar.is-danger .navbar-start .navbar-link::after,
+ .navbar.is-danger .navbar-end .navbar-link::after {
+ border-color: #fff;
+ }
+ .navbar.is-danger .navbar-item.has-dropdown:focus .navbar-link,
+ .navbar.is-danger .navbar-item.has-dropdown:hover .navbar-link,
+ .navbar.is-danger .navbar-item.has-dropdown.is-active .navbar-link {
+ background-color: #ff1f4b;
+ color: #fff;
+ }
+ .navbar.is-danger .navbar-dropdown a.navbar-item.is-active {
+ background-color: #ff3860;
+ color: #fff;
+ }
+}
+
+.navbar > .container {
+ align-items: stretch;
+ display: flex;
+ min-height: 3.25rem;
+ width: 100%;
+}
+
+.navbar.has-shadow {
+ box-shadow: 0 2px 0 0 whitesmoke;
+}
+
+.navbar.is-fixed-bottom, .navbar.is-fixed-top {
+ left: 0;
+ position: fixed;
+ right: 0;
+ z-index: 30;
+}
+
+.navbar.is-fixed-bottom {
+ bottom: 0;
+}
+
+.navbar.is-fixed-bottom.has-shadow {
+ box-shadow: 0 -2px 0 0 whitesmoke;
+}
+
+.navbar.is-fixed-top {
+ top: 0;
+}
+
+html.has-navbar-fixed-top,
+body.has-navbar-fixed-top {
+ padding-top: 3.25rem;
+}
+
+html.has-navbar-fixed-bottom,
+body.has-navbar-fixed-bottom {
+ padding-bottom: 3.25rem;
+}
+
+.navbar-brand,
+.navbar-tabs {
+ align-items: stretch;
+ display: flex;
+ flex-shrink: 0;
+ min-height: 3.25rem;
+}
+
+.navbar-brand a.navbar-item:focus, .navbar-brand a.navbar-item:hover {
+ background-color: transparent;
+}
+
+.navbar-tabs {
+ -webkit-overflow-scrolling: touch;
+ max-width: 100vw;
+ overflow-x: auto;
+ overflow-y: hidden;
+}
+
+.navbar-burger {
+ color: #4a4a4a;
+ cursor: pointer;
+ display: block;
+ height: 3.25rem;
+ position: relative;
+ width: 3.25rem;
+ margin-left: auto;
+}
+
+.navbar-burger span {
+ background-color: currentColor;
+ display: block;
+ height: 1px;
+ left: calc(50% - 8px);
+ position: absolute;
+ -webkit-transform-origin: center;
+ transform-origin: center;
+ transition-duration: 86ms;
+ transition-property: background-color, opacity, -webkit-transform;
+ transition-property: background-color, opacity, transform;
+ transition-property: background-color, opacity, transform, -webkit-transform;
+ transition-timing-function: ease-out;
+ width: 16px;
+}
+
+.navbar-burger span:nth-child(1) {
+ top: calc(50% - 6px);
+}
+
+.navbar-burger span:nth-child(2) {
+ top: calc(50% - 1px);
+}
+
+.navbar-burger span:nth-child(3) {
+ top: calc(50% + 4px);
+}
+
+.navbar-burger:hover {
+ background-color: rgba(0, 0, 0, 0.05);
+}
+
+.navbar-burger.is-active span:nth-child(1) {
+ -webkit-transform: translateY(5px) rotate(45deg);
+ transform: translateY(5px) rotate(45deg);
+}
+
+.navbar-burger.is-active span:nth-child(2) {
+ opacity: 0;
+}
+
+.navbar-burger.is-active span:nth-child(3) {
+ -webkit-transform: translateY(-5px) rotate(-45deg);
+ transform: translateY(-5px) rotate(-45deg);
+}
+
+.navbar-menu {
+ display: none;
+}
+
+.navbar-item,
+.navbar-link {
+ color: #4a4a4a;
+ display: block;
+ line-height: 1.5;
+ padding: 0.5rem 0.75rem;
+ position: relative;
+}
+
+.navbar-item .icon:only-child,
+.navbar-link .icon:only-child {
+ margin-left: -0.25rem;
+ margin-right: -0.25rem;
+}
+
+a.navbar-item,
+.navbar-link {
+ cursor: pointer;
+}
+
+a.navbar-item:focus, a.navbar-item:focus-within, a.navbar-item:hover, a.navbar-item.is-active,
+.navbar-link:focus,
+.navbar-link:focus-within,
+.navbar-link:hover,
+.navbar-link.is-active {
+ background-color: #fafafa;
+ color: #3273dc;
+}
+
+.navbar-item {
+ display: block;
+ flex-grow: 0;
+ flex-shrink: 0;
+}
+
+.navbar-item img {
+ max-height: 1.75rem;
+}
+
+.navbar-item.has-dropdown {
+ padding: 0;
+}
+
+.navbar-item.is-expanded {
+ flex-grow: 1;
+ flex-shrink: 1;
+}
+
+.navbar-item.is-tab {
+ border-bottom: 1px solid transparent;
+ min-height: 3.25rem;
+ padding-bottom: calc(0.5rem - 1px);
+}
+
+.navbar-item.is-tab:focus, .navbar-item.is-tab:hover {
+ background-color: transparent;
+ border-bottom-color: #3273dc;
+}
+
+.navbar-item.is-tab.is-active {
+ background-color: transparent;
+ border-bottom-color: #3273dc;
+ border-bottom-style: solid;
+ border-bottom-width: 3px;
+ color: #3273dc;
+ padding-bottom: calc(0.5rem - 3px);
+}
+
+.navbar-content {
+ flex-grow: 1;
+ flex-shrink: 1;
+}
+
+.navbar-link:not(.is-arrowless) {
+ padding-right: 2.5em;
+}
+
+.navbar-link:not(.is-arrowless)::after {
+ border-color: #3273dc;
+ margin-top: -0.375em;
+ right: 1.125em;
+}
+
+.navbar-dropdown {
+ font-size: 0.875rem;
+ padding-bottom: 0.5rem;
+ padding-top: 0.5rem;
+}
+
+.navbar-dropdown .navbar-item {
+ padding-left: 1.5rem;
+ padding-right: 1.5rem;
+}
+
+.navbar-divider {
+ background-color: whitesmoke;
+ border: none;
+ display: none;
+ height: 2px;
+ margin: 0.5rem 0;
+}
+
+@media screen and (max-width: 1023px) {
+ .navbar > .container {
+ display: block;
+ }
+ .navbar-brand .navbar-item,
+ .navbar-tabs .navbar-item {
+ align-items: center;
+ display: flex;
+ }
+ .navbar-link::after {
+ display: none;
+ }
+ .navbar-menu {
+ background-color: white;
+ box-shadow: 0 8px 16px rgba(10, 10, 10, 0.1);
+ padding: 0.5rem 0;
+ }
+ .navbar-menu.is-active {
+ display: block;
+ }
+ .navbar.is-fixed-bottom-touch, .navbar.is-fixed-top-touch {
+ left: 0;
+ position: fixed;
+ right: 0;
+ z-index: 30;
+ }
+ .navbar.is-fixed-bottom-touch {
+ bottom: 0;
+ }
+ .navbar.is-fixed-bottom-touch.has-shadow {
+ box-shadow: 0 -2px 3px rgba(10, 10, 10, 0.1);
+ }
+ .navbar.is-fixed-top-touch {
+ top: 0;
+ }
+ .navbar.is-fixed-top .navbar-menu, .navbar.is-fixed-top-touch .navbar-menu {
+ -webkit-overflow-scrolling: touch;
+ max-height: calc(100vh - 3.25rem);
+ overflow: auto;
+ }
+ html.has-navbar-fixed-top-touch,
+ body.has-navbar-fixed-top-touch {
+ padding-top: 3.25rem;
+ }
+ html.has-navbar-fixed-bottom-touch,
+ body.has-navbar-fixed-bottom-touch {
+ padding-bottom: 3.25rem;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .navbar,
+ .navbar-menu,
+ .navbar-start,
+ .navbar-end {
+ align-items: stretch;
+ display: flex;
+ }
+ .navbar {
+ min-height: 3.25rem;
+ }
+ .navbar.is-spaced {
+ padding: 1rem 2rem;
+ }
+ .navbar.is-spaced .navbar-start,
+ .navbar.is-spaced .navbar-end {
+ align-items: center;
+ }
+ .navbar.is-spaced a.navbar-item,
+ .navbar.is-spaced .navbar-link {
+ border-radius: 4px;
+ }
+ .navbar.is-transparent a.navbar-item:focus, .navbar.is-transparent a.navbar-item:hover, .navbar.is-transparent a.navbar-item.is-active,
+ .navbar.is-transparent .navbar-link:focus,
+ .navbar.is-transparent .navbar-link:hover,
+ .navbar.is-transparent .navbar-link.is-active {
+ background-color: transparent !important;
+ }
+ .navbar.is-transparent .navbar-item.has-dropdown.is-active .navbar-link, .navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus .navbar-link, .navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus-within .navbar-link, .navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:hover .navbar-link {
+ background-color: transparent !important;
+ }
+ .navbar.is-transparent .navbar-dropdown a.navbar-item:focus, .navbar.is-transparent .navbar-dropdown a.navbar-item:hover {
+ background-color: whitesmoke;
+ color: #0a0a0a;
+ }
+ .navbar.is-transparent .navbar-dropdown a.navbar-item.is-active {
+ background-color: whitesmoke;
+ color: #3273dc;
+ }
+ .navbar-burger {
+ display: none;
+ }
+ .navbar-item,
+ .navbar-link {
+ align-items: center;
+ display: flex;
+ }
+ .navbar-item {
+ display: flex;
+ }
+ .navbar-item.has-dropdown {
+ align-items: stretch;
+ }
+ .navbar-item.has-dropdown-up .navbar-link::after {
+ -webkit-transform: rotate(135deg) translate(0.25em, -0.25em);
+ transform: rotate(135deg) translate(0.25em, -0.25em);
+ }
+ .navbar-item.has-dropdown-up .navbar-dropdown {
+ border-bottom: 2px solid #dbdbdb;
+ border-radius: 6px 6px 0 0;
+ border-top: none;
+ bottom: 100%;
+ box-shadow: 0 -8px 8px rgba(10, 10, 10, 0.1);
+ top: auto;
+ }
+ .navbar-item.is-active .navbar-dropdown, .navbar-item.is-hoverable:focus .navbar-dropdown, .navbar-item.is-hoverable:focus-within .navbar-dropdown, .navbar-item.is-hoverable:hover .navbar-dropdown {
+ display: block;
+ }
+ .navbar.is-spaced .navbar-item.is-active .navbar-dropdown, .navbar-item.is-active .navbar-dropdown.is-boxed, .navbar.is-spaced .navbar-item.is-hoverable:focus .navbar-dropdown, .navbar-item.is-hoverable:focus .navbar-dropdown.is-boxed, .navbar.is-spaced .navbar-item.is-hoverable:focus-within .navbar-dropdown, .navbar-item.is-hoverable:focus-within .navbar-dropdown.is-boxed, .navbar.is-spaced .navbar-item.is-hoverable:hover .navbar-dropdown, .navbar-item.is-hoverable:hover .navbar-dropdown.is-boxed {
+ opacity: 1;
+ pointer-events: auto;
+ -webkit-transform: translateY(0);
+ transform: translateY(0);
+ }
+ .navbar-menu {
+ flex-grow: 1;
+ flex-shrink: 0;
+ }
+ .navbar-start {
+ justify-content: flex-start;
+ margin-right: auto;
+ }
+ .navbar-end {
+ justify-content: flex-end;
+ margin-left: auto;
+ }
+ .navbar-dropdown {
+ background-color: white;
+ border-bottom-left-radius: 6px;
+ border-bottom-right-radius: 6px;
+ border-top: 2px solid #dbdbdb;
+ box-shadow: 0 8px 8px rgba(10, 10, 10, 0.1);
+ display: none;
+ font-size: 0.875rem;
+ left: 0;
+ min-width: 100%;
+ position: absolute;
+ top: 100%;
+ z-index: 20;
+ }
+ .navbar-dropdown .navbar-item {
+ padding: 0.375rem 1rem;
+ white-space: nowrap;
+ }
+ .navbar-dropdown a.navbar-item {
+ padding-right: 3rem;
+ }
+ .navbar-dropdown a.navbar-item:focus, .navbar-dropdown a.navbar-item:hover {
+ background-color: whitesmoke;
+ color: #0a0a0a;
+ }
+ .navbar-dropdown a.navbar-item.is-active {
+ background-color: whitesmoke;
+ color: #3273dc;
+ }
+ .navbar.is-spaced .navbar-dropdown, .navbar-dropdown.is-boxed {
+ border-radius: 6px;
+ border-top: none;
+ box-shadow: 0 8px 8px rgba(10, 10, 10, 0.1), 0 0 0 1px rgba(10, 10, 10, 0.1);
+ display: block;
+ opacity: 0;
+ pointer-events: none;
+ top: calc(100% + (-4px));
+ -webkit-transform: translateY(-5px);
+ transform: translateY(-5px);
+ transition-duration: 86ms;
+ transition-property: opacity, -webkit-transform;
+ transition-property: opacity, transform;
+ transition-property: opacity, transform, -webkit-transform;
+ }
+ .navbar-dropdown.is-right {
+ left: auto;
+ right: 0;
+ }
+ .navbar-divider {
+ display: block;
+ }
+ .navbar > .container .navbar-brand,
+ .container > .navbar .navbar-brand {
+ margin-left: -.75rem;
+ }
+ .navbar > .container .navbar-menu,
+ .container > .navbar .navbar-menu {
+ margin-right: -.75rem;
+ }
+ .navbar.is-fixed-bottom-desktop, .navbar.is-fixed-top-desktop {
+ left: 0;
+ position: fixed;
+ right: 0;
+ z-index: 30;
+ }
+ .navbar.is-fixed-bottom-desktop {
+ bottom: 0;
+ }
+ .navbar.is-fixed-bottom-desktop.has-shadow {
+ box-shadow: 0 -2px 3px rgba(10, 10, 10, 0.1);
+ }
+ .navbar.is-fixed-top-desktop {
+ top: 0;
+ }
+ html.has-navbar-fixed-top-desktop,
+ body.has-navbar-fixed-top-desktop {
+ padding-top: 3.25rem;
+ }
+ html.has-navbar-fixed-bottom-desktop,
+ body.has-navbar-fixed-bottom-desktop {
+ padding-bottom: 3.25rem;
+ }
+ html.has-spaced-navbar-fixed-top,
+ body.has-spaced-navbar-fixed-top {
+ padding-top: 5.25rem;
+ }
+ html.has-spaced-navbar-fixed-bottom,
+ body.has-spaced-navbar-fixed-bottom {
+ padding-bottom: 5.25rem;
+ }
+ a.navbar-item.is-active,
+ .navbar-link.is-active {
+ color: #0a0a0a;
+ }
+ a.navbar-item.is-active:not(:focus):not(:hover),
+ .navbar-link.is-active:not(:focus):not(:hover) {
+ background-color: transparent;
+ }
+ .navbar-item.has-dropdown:focus .navbar-link, .navbar-item.has-dropdown:hover .navbar-link, .navbar-item.has-dropdown.is-active .navbar-link {
+ background-color: #fafafa;
+ }
+}
+
+.hero.is-fullheight-with-navbar {
+ min-height: calc(100vh - 3.25rem);
+}
+
+.pagination {
+ font-size: 1rem;
+ margin: -0.25rem;
+}
+
+.pagination.is-small {
+ font-size: 0.75rem;
+}
+
+.pagination.is-medium {
+ font-size: 1.25rem;
+}
+
+.pagination.is-large {
+ font-size: 1.5rem;
+}
+
+.pagination.is-rounded .pagination-previous,
+.pagination.is-rounded .pagination-next {
+ padding-left: 1em;
+ padding-right: 1em;
+ border-radius: 290486px;
+}
+
+.pagination.is-rounded .pagination-link {
+ border-radius: 290486px;
+}
+
+.pagination,
+.pagination-list {
+ align-items: center;
+ display: flex;
+ justify-content: center;
+ text-align: center;
+}
+
+.pagination-previous,
+.pagination-next,
+.pagination-link,
+.pagination-ellipsis {
+ font-size: 1em;
+ justify-content: center;
+ margin: 0.25rem;
+ padding-left: 0.5em;
+ padding-right: 0.5em;
+ text-align: center;
+}
+
+.pagination-previous,
+.pagination-next,
+.pagination-link {
+ border-color: #dbdbdb;
+ color: #363636;
+ min-width: 2.25em;
+}
+
+.pagination-previous:hover,
+.pagination-next:hover,
+.pagination-link:hover {
+ border-color: #b5b5b5;
+ color: #363636;
+}
+
+.pagination-previous:focus,
+.pagination-next:focus,
+.pagination-link:focus {
+ border-color: #3273dc;
+}
+
+.pagination-previous:active,
+.pagination-next:active,
+.pagination-link:active {
+ box-shadow: inset 0 1px 2px rgba(10, 10, 10, 0.2);
+}
+
+.pagination-previous[disabled],
+.pagination-next[disabled],
+.pagination-link[disabled] {
+ background-color: #dbdbdb;
+ border-color: #dbdbdb;
+ box-shadow: none;
+ color: #7a7a7a;
+ opacity: 0.5;
+}
+
+.pagination-previous,
+.pagination-next {
+ padding-left: 0.75em;
+ padding-right: 0.75em;
+ white-space: nowrap;
+}
+
+.pagination-link.is-current {
+ background-color: #3273dc;
+ border-color: #3273dc;
+ color: #fff;
+}
+
+.pagination-ellipsis {
+ color: #b5b5b5;
+ pointer-events: none;
+}
+
+.pagination-list {
+ flex-wrap: wrap;
+}
+
+@media screen and (max-width: 768px) {
+ .pagination {
+ flex-wrap: wrap;
+ }
+ .pagination-previous,
+ .pagination-next {
+ flex-grow: 1;
+ flex-shrink: 1;
+ }
+ .pagination-list li {
+ flex-grow: 1;
+ flex-shrink: 1;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .pagination-list {
+ flex-grow: 1;
+ flex-shrink: 1;
+ justify-content: flex-start;
+ order: 1;
+ }
+ .pagination-previous {
+ order: 2;
+ }
+ .pagination-next {
+ order: 3;
+ }
+ .pagination {
+ justify-content: space-between;
+ }
+ .pagination.is-centered .pagination-previous {
+ order: 1;
+ }
+ .pagination.is-centered .pagination-list {
+ justify-content: center;
+ order: 2;
+ }
+ .pagination.is-centered .pagination-next {
+ order: 3;
+ }
+ .pagination.is-right .pagination-previous {
+ order: 1;
+ }
+ .pagination.is-right .pagination-next {
+ order: 2;
+ }
+ .pagination.is-right .pagination-list {
+ justify-content: flex-end;
+ order: 3;
+ }
+}
+
+.panel {
+ font-size: 1rem;
+}
+
+.panel:not(:last-child) {
+ margin-bottom: 1.5rem;
+}
+
+.panel-heading,
+.panel-tabs,
+.panel-block {
+ border-bottom: 1px solid #dbdbdb;
+ border-left: 1px solid #dbdbdb;
+ border-right: 1px solid #dbdbdb;
+}
+
+.panel-heading:first-child,
+.panel-tabs:first-child,
+.panel-block:first-child {
+ border-top: 1px solid #dbdbdb;
+}
+
+.panel-heading {
+ background-color: whitesmoke;
+ border-radius: 4px 4px 0 0;
+ color: #363636;
+ font-size: 1.25em;
+ font-weight: 300;
+ line-height: 1.25;
+ padding: 0.5em 0.75em;
+}
+
+.panel-tabs {
+ align-items: flex-end;
+ display: flex;
+ font-size: 0.875em;
+ justify-content: center;
+}
+
+.panel-tabs a {
+ border-bottom: 1px solid #dbdbdb;
+ margin-bottom: -1px;
+ padding: 0.5em;
+}
+
+.panel-tabs a.is-active {
+ border-bottom-color: #4a4a4a;
+ color: #363636;
+}
+
+.panel-list a {
+ color: #4a4a4a;
+}
+
+.panel-list a:hover {
+ color: #3273dc;
+}
+
+.panel-block {
+ align-items: center;
+ color: #363636;
+ display: flex;
+ justify-content: flex-start;
+ padding: 0.5em 0.75em;
+}
+
+.panel-block input[type="checkbox"] {
+ margin-right: 0.75em;
+}
+
+.panel-block > .control {
+ flex-grow: 1;
+ flex-shrink: 1;
+ width: 100%;
+}
+
+.panel-block.is-wrapped {
+ flex-wrap: wrap;
+}
+
+.panel-block.is-active {
+ border-left-color: #3273dc;
+ color: #363636;
+}
+
+.panel-block.is-active .panel-icon {
+ color: #3273dc;
+}
+
+a.panel-block,
+label.panel-block {
+ cursor: pointer;
+}
+
+a.panel-block:hover,
+label.panel-block:hover {
+ background-color: whitesmoke;
+}
+
+.panel-icon {
+ display: inline-block;
+ font-size: 14px;
+ height: 1em;
+ line-height: 1em;
+ text-align: center;
+ vertical-align: top;
+ width: 1em;
+ color: #7a7a7a;
+ margin-right: 0.75em;
+}
+
+.panel-icon .fa {
+ font-size: inherit;
+ line-height: inherit;
+}
+
+.tabs {
+ -webkit-overflow-scrolling: touch;
+ align-items: stretch;
+ display: flex;
+ font-size: 1rem;
+ justify-content: space-between;
+ overflow: hidden;
+ overflow-x: auto;
+ white-space: nowrap;
+}
+
+.tabs a {
+ align-items: center;
+ border-bottom-color: #dbdbdb;
+ border-bottom-style: solid;
+ border-bottom-width: 1px;
+ color: #4a4a4a;
+ display: flex;
+ justify-content: center;
+ margin-bottom: -1px;
+ padding: 0.5em 1em;
+ vertical-align: top;
+}
+
+.tabs a:hover {
+ border-bottom-color: #363636;
+ color: #363636;
+}
+
+.tabs li {
+ display: block;
+}
+
+.tabs li.is-active a {
+ border-bottom-color: #3273dc;
+ color: #3273dc;
+}
+
+.tabs ul {
+ align-items: center;
+ border-bottom-color: #dbdbdb;
+ border-bottom-style: solid;
+ border-bottom-width: 1px;
+ display: flex;
+ flex-grow: 1;
+ flex-shrink: 0;
+ justify-content: flex-start;
+}
+
+.tabs ul.is-left {
+ padding-right: 0.75em;
+}
+
+.tabs ul.is-center {
+ flex: none;
+ justify-content: center;
+ padding-left: 0.75em;
+ padding-right: 0.75em;
+}
+
+.tabs ul.is-right {
+ justify-content: flex-end;
+ padding-left: 0.75em;
+}
+
+.tabs .icon:first-child {
+ margin-right: 0.5em;
+}
+
+.tabs .icon:last-child {
+ margin-left: 0.5em;
+}
+
+.tabs.is-centered ul {
+ justify-content: center;
+}
+
+.tabs.is-right ul {
+ justify-content: flex-end;
+}
+
+.tabs.is-boxed a {
+ border: 1px solid transparent;
+ border-radius: 4px 4px 0 0;
+}
+
+.tabs.is-boxed a:hover {
+ background-color: whitesmoke;
+ border-bottom-color: #dbdbdb;
+}
+
+.tabs.is-boxed li.is-active a {
+ background-color: white;
+ border-color: #dbdbdb;
+ border-bottom-color: transparent !important;
+}
+
+.tabs.is-fullwidth li {
+ flex-grow: 1;
+ flex-shrink: 0;
+}
+
+.tabs.is-toggle a {
+ border-color: #dbdbdb;
+ border-style: solid;
+ border-width: 1px;
+ margin-bottom: 0;
+ position: relative;
+}
+
+.tabs.is-toggle a:hover {
+ background-color: whitesmoke;
+ border-color: #b5b5b5;
+ z-index: 2;
+}
+
+.tabs.is-toggle li + li {
+ margin-left: -1px;
+}
+
+.tabs.is-toggle li:first-child a {
+ border-radius: 4px 0 0 4px;
+}
+
+.tabs.is-toggle li:last-child a {
+ border-radius: 0 4px 4px 0;
+}
+
+.tabs.is-toggle li.is-active a {
+ background-color: #3273dc;
+ border-color: #3273dc;
+ color: #fff;
+ z-index: 1;
+}
+
+.tabs.is-toggle ul {
+ border-bottom: none;
+}
+
+.tabs.is-toggle.is-toggle-rounded li:first-child a {
+ border-bottom-left-radius: 290486px;
+ border-top-left-radius: 290486px;
+ padding-left: 1.25em;
+}
+
+.tabs.is-toggle.is-toggle-rounded li:last-child a {
+ border-bottom-right-radius: 290486px;
+ border-top-right-radius: 290486px;
+ padding-right: 1.25em;
+}
+
+.tabs.is-small {
+ font-size: 0.75rem;
+}
+
+.tabs.is-medium {
+ font-size: 1.25rem;
+}
+
+.tabs.is-large {
+ font-size: 1.5rem;
+}
+
+.column {
+ display: block;
+ flex-basis: 0;
+ flex-grow: 1;
+ flex-shrink: 1;
+ padding: 0.75rem;
+}
+
+.columns.is-mobile > .column.is-narrow {
+ flex: none;
+}
+
+.columns.is-mobile > .column.is-full {
+ flex: none;
+ width: 100%;
+}
+
+.columns.is-mobile > .column.is-three-quarters {
+ flex: none;
+ width: 75%;
+}
+
+.columns.is-mobile > .column.is-two-thirds {
+ flex: none;
+ width: 66.6666%;
+}
+
+.columns.is-mobile > .column.is-half {
+ flex: none;
+ width: 50%;
+}
+
+.columns.is-mobile > .column.is-one-third {
+ flex: none;
+ width: 33.3333%;
+}
+
+.columns.is-mobile > .column.is-one-quarter {
+ flex: none;
+ width: 25%;
+}
+
+.columns.is-mobile > .column.is-one-fifth {
+ flex: none;
+ width: 20%;
+}
+
+.columns.is-mobile > .column.is-two-fifths {
+ flex: none;
+ width: 40%;
+}
+
+.columns.is-mobile > .column.is-three-fifths {
+ flex: none;
+ width: 60%;
+}
+
+.columns.is-mobile > .column.is-four-fifths {
+ flex: none;
+ width: 80%;
+}
+
+.columns.is-mobile > .column.is-offset-three-quarters {
+ margin-left: 75%;
+}
+
+.columns.is-mobile > .column.is-offset-two-thirds {
+ margin-left: 66.6666%;
+}
+
+.columns.is-mobile > .column.is-offset-half {
+ margin-left: 50%;
+}
+
+.columns.is-mobile > .column.is-offset-one-third {
+ margin-left: 33.3333%;
+}
+
+.columns.is-mobile > .column.is-offset-one-quarter {
+ margin-left: 25%;
+}
+
+.columns.is-mobile > .column.is-offset-one-fifth {
+ margin-left: 20%;
+}
+
+.columns.is-mobile > .column.is-offset-two-fifths {
+ margin-left: 40%;
+}
+
+.columns.is-mobile > .column.is-offset-three-fifths {
+ margin-left: 60%;
+}
+
+.columns.is-mobile > .column.is-offset-four-fifths {
+ margin-left: 80%;
+}
+
+.columns.is-mobile > .column.is-0 {
+ flex: none;
+ width: 0%;
+}
+
+.columns.is-mobile > .column.is-offset-0 {
+ margin-left: 0%;
+}
+
+.columns.is-mobile > .column.is-1 {
+ flex: none;
+ width: 8.33333%;
+}
+
+.columns.is-mobile > .column.is-offset-1 {
+ margin-left: 8.33333%;
+}
+
+.columns.is-mobile > .column.is-2 {
+ flex: none;
+ width: 16.66667%;
+}
+
+.columns.is-mobile > .column.is-offset-2 {
+ margin-left: 16.66667%;
+}
+
+.columns.is-mobile > .column.is-3 {
+ flex: none;
+ width: 25%;
+}
+
+.columns.is-mobile > .column.is-offset-3 {
+ margin-left: 25%;
+}
+
+.columns.is-mobile > .column.is-4 {
+ flex: none;
+ width: 33.33333%;
+}
+
+.columns.is-mobile > .column.is-offset-4 {
+ margin-left: 33.33333%;
+}
+
+.columns.is-mobile > .column.is-5 {
+ flex: none;
+ width: 41.66667%;
+}
+
+.columns.is-mobile > .column.is-offset-5 {
+ margin-left: 41.66667%;
+}
+
+.columns.is-mobile > .column.is-6 {
+ flex: none;
+ width: 50%;
+}
+
+.columns.is-mobile > .column.is-offset-6 {
+ margin-left: 50%;
+}
+
+.columns.is-mobile > .column.is-7 {
+ flex: none;
+ width: 58.33333%;
+}
+
+.columns.is-mobile > .column.is-offset-7 {
+ margin-left: 58.33333%;
+}
+
+.columns.is-mobile > .column.is-8 {
+ flex: none;
+ width: 66.66667%;
+}
+
+.columns.is-mobile > .column.is-offset-8 {
+ margin-left: 66.66667%;
+}
+
+.columns.is-mobile > .column.is-9 {
+ flex: none;
+ width: 75%;
+}
+
+.columns.is-mobile > .column.is-offset-9 {
+ margin-left: 75%;
+}
+
+.columns.is-mobile > .column.is-10 {
+ flex: none;
+ width: 83.33333%;
+}
+
+.columns.is-mobile > .column.is-offset-10 {
+ margin-left: 83.33333%;
+}
+
+.columns.is-mobile > .column.is-11 {
+ flex: none;
+ width: 91.66667%;
+}
+
+.columns.is-mobile > .column.is-offset-11 {
+ margin-left: 91.66667%;
+}
+
+.columns.is-mobile > .column.is-12 {
+ flex: none;
+ width: 100%;
+}
+
+.columns.is-mobile > .column.is-offset-12 {
+ margin-left: 100%;
+}
+
+@media screen and (max-width: 768px) {
+ .column.is-narrow-mobile {
+ flex: none;
+ }
+ .column.is-full-mobile {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-three-quarters-mobile {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-two-thirds-mobile {
+ flex: none;
+ width: 66.6666%;
+ }
+ .column.is-half-mobile {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-one-third-mobile {
+ flex: none;
+ width: 33.3333%;
+ }
+ .column.is-one-quarter-mobile {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-one-fifth-mobile {
+ flex: none;
+ width: 20%;
+ }
+ .column.is-two-fifths-mobile {
+ flex: none;
+ width: 40%;
+ }
+ .column.is-three-fifths-mobile {
+ flex: none;
+ width: 60%;
+ }
+ .column.is-four-fifths-mobile {
+ flex: none;
+ width: 80%;
+ }
+ .column.is-offset-three-quarters-mobile {
+ margin-left: 75%;
+ }
+ .column.is-offset-two-thirds-mobile {
+ margin-left: 66.6666%;
+ }
+ .column.is-offset-half-mobile {
+ margin-left: 50%;
+ }
+ .column.is-offset-one-third-mobile {
+ margin-left: 33.3333%;
+ }
+ .column.is-offset-one-quarter-mobile {
+ margin-left: 25%;
+ }
+ .column.is-offset-one-fifth-mobile {
+ margin-left: 20%;
+ }
+ .column.is-offset-two-fifths-mobile {
+ margin-left: 40%;
+ }
+ .column.is-offset-three-fifths-mobile {
+ margin-left: 60%;
+ }
+ .column.is-offset-four-fifths-mobile {
+ margin-left: 80%;
+ }
+ .column.is-0-mobile {
+ flex: none;
+ width: 0%;
+ }
+ .column.is-offset-0-mobile {
+ margin-left: 0%;
+ }
+ .column.is-1-mobile {
+ flex: none;
+ width: 8.33333%;
+ }
+ .column.is-offset-1-mobile {
+ margin-left: 8.33333%;
+ }
+ .column.is-2-mobile {
+ flex: none;
+ width: 16.66667%;
+ }
+ .column.is-offset-2-mobile {
+ margin-left: 16.66667%;
+ }
+ .column.is-3-mobile {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-offset-3-mobile {
+ margin-left: 25%;
+ }
+ .column.is-4-mobile {
+ flex: none;
+ width: 33.33333%;
+ }
+ .column.is-offset-4-mobile {
+ margin-left: 33.33333%;
+ }
+ .column.is-5-mobile {
+ flex: none;
+ width: 41.66667%;
+ }
+ .column.is-offset-5-mobile {
+ margin-left: 41.66667%;
+ }
+ .column.is-6-mobile {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-offset-6-mobile {
+ margin-left: 50%;
+ }
+ .column.is-7-mobile {
+ flex: none;
+ width: 58.33333%;
+ }
+ .column.is-offset-7-mobile {
+ margin-left: 58.33333%;
+ }
+ .column.is-8-mobile {
+ flex: none;
+ width: 66.66667%;
+ }
+ .column.is-offset-8-mobile {
+ margin-left: 66.66667%;
+ }
+ .column.is-9-mobile {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-offset-9-mobile {
+ margin-left: 75%;
+ }
+ .column.is-10-mobile {
+ flex: none;
+ width: 83.33333%;
+ }
+ .column.is-offset-10-mobile {
+ margin-left: 83.33333%;
+ }
+ .column.is-11-mobile {
+ flex: none;
+ width: 91.66667%;
+ }
+ .column.is-offset-11-mobile {
+ margin-left: 91.66667%;
+ }
+ .column.is-12-mobile {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-offset-12-mobile {
+ margin-left: 100%;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .column.is-narrow, .column.is-narrow-tablet {
+ flex: none;
+ }
+ .column.is-full, .column.is-full-tablet {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-three-quarters, .column.is-three-quarters-tablet {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-two-thirds, .column.is-two-thirds-tablet {
+ flex: none;
+ width: 66.6666%;
+ }
+ .column.is-half, .column.is-half-tablet {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-one-third, .column.is-one-third-tablet {
+ flex: none;
+ width: 33.3333%;
+ }
+ .column.is-one-quarter, .column.is-one-quarter-tablet {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-one-fifth, .column.is-one-fifth-tablet {
+ flex: none;
+ width: 20%;
+ }
+ .column.is-two-fifths, .column.is-two-fifths-tablet {
+ flex: none;
+ width: 40%;
+ }
+ .column.is-three-fifths, .column.is-three-fifths-tablet {
+ flex: none;
+ width: 60%;
+ }
+ .column.is-four-fifths, .column.is-four-fifths-tablet {
+ flex: none;
+ width: 80%;
+ }
+ .column.is-offset-three-quarters, .column.is-offset-three-quarters-tablet {
+ margin-left: 75%;
+ }
+ .column.is-offset-two-thirds, .column.is-offset-two-thirds-tablet {
+ margin-left: 66.6666%;
+ }
+ .column.is-offset-half, .column.is-offset-half-tablet {
+ margin-left: 50%;
+ }
+ .column.is-offset-one-third, .column.is-offset-one-third-tablet {
+ margin-left: 33.3333%;
+ }
+ .column.is-offset-one-quarter, .column.is-offset-one-quarter-tablet {
+ margin-left: 25%;
+ }
+ .column.is-offset-one-fifth, .column.is-offset-one-fifth-tablet {
+ margin-left: 20%;
+ }
+ .column.is-offset-two-fifths, .column.is-offset-two-fifths-tablet {
+ margin-left: 40%;
+ }
+ .column.is-offset-three-fifths, .column.is-offset-three-fifths-tablet {
+ margin-left: 60%;
+ }
+ .column.is-offset-four-fifths, .column.is-offset-four-fifths-tablet {
+ margin-left: 80%;
+ }
+ .column.is-0, .column.is-0-tablet {
+ flex: none;
+ width: 0%;
+ }
+ .column.is-offset-0, .column.is-offset-0-tablet {
+ margin-left: 0%;
+ }
+ .column.is-1, .column.is-1-tablet {
+ flex: none;
+ width: 8.33333%;
+ }
+ .column.is-offset-1, .column.is-offset-1-tablet {
+ margin-left: 8.33333%;
+ }
+ .column.is-2, .column.is-2-tablet {
+ flex: none;
+ width: 16.66667%;
+ }
+ .column.is-offset-2, .column.is-offset-2-tablet {
+ margin-left: 16.66667%;
+ }
+ .column.is-3, .column.is-3-tablet {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-offset-3, .column.is-offset-3-tablet {
+ margin-left: 25%;
+ }
+ .column.is-4, .column.is-4-tablet {
+ flex: none;
+ width: 33.33333%;
+ }
+ .column.is-offset-4, .column.is-offset-4-tablet {
+ margin-left: 33.33333%;
+ }
+ .column.is-5, .column.is-5-tablet {
+ flex: none;
+ width: 41.66667%;
+ }
+ .column.is-offset-5, .column.is-offset-5-tablet {
+ margin-left: 41.66667%;
+ }
+ .column.is-6, .column.is-6-tablet {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-offset-6, .column.is-offset-6-tablet {
+ margin-left: 50%;
+ }
+ .column.is-7, .column.is-7-tablet {
+ flex: none;
+ width: 58.33333%;
+ }
+ .column.is-offset-7, .column.is-offset-7-tablet {
+ margin-left: 58.33333%;
+ }
+ .column.is-8, .column.is-8-tablet {
+ flex: none;
+ width: 66.66667%;
+ }
+ .column.is-offset-8, .column.is-offset-8-tablet {
+ margin-left: 66.66667%;
+ }
+ .column.is-9, .column.is-9-tablet {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-offset-9, .column.is-offset-9-tablet {
+ margin-left: 75%;
+ }
+ .column.is-10, .column.is-10-tablet {
+ flex: none;
+ width: 83.33333%;
+ }
+ .column.is-offset-10, .column.is-offset-10-tablet {
+ margin-left: 83.33333%;
+ }
+ .column.is-11, .column.is-11-tablet {
+ flex: none;
+ width: 91.66667%;
+ }
+ .column.is-offset-11, .column.is-offset-11-tablet {
+ margin-left: 91.66667%;
+ }
+ .column.is-12, .column.is-12-tablet {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-offset-12, .column.is-offset-12-tablet {
+ margin-left: 100%;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .column.is-narrow-touch {
+ flex: none;
+ }
+ .column.is-full-touch {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-three-quarters-touch {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-two-thirds-touch {
+ flex: none;
+ width: 66.6666%;
+ }
+ .column.is-half-touch {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-one-third-touch {
+ flex: none;
+ width: 33.3333%;
+ }
+ .column.is-one-quarter-touch {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-one-fifth-touch {
+ flex: none;
+ width: 20%;
+ }
+ .column.is-two-fifths-touch {
+ flex: none;
+ width: 40%;
+ }
+ .column.is-three-fifths-touch {
+ flex: none;
+ width: 60%;
+ }
+ .column.is-four-fifths-touch {
+ flex: none;
+ width: 80%;
+ }
+ .column.is-offset-three-quarters-touch {
+ margin-left: 75%;
+ }
+ .column.is-offset-two-thirds-touch {
+ margin-left: 66.6666%;
+ }
+ .column.is-offset-half-touch {
+ margin-left: 50%;
+ }
+ .column.is-offset-one-third-touch {
+ margin-left: 33.3333%;
+ }
+ .column.is-offset-one-quarter-touch {
+ margin-left: 25%;
+ }
+ .column.is-offset-one-fifth-touch {
+ margin-left: 20%;
+ }
+ .column.is-offset-two-fifths-touch {
+ margin-left: 40%;
+ }
+ .column.is-offset-three-fifths-touch {
+ margin-left: 60%;
+ }
+ .column.is-offset-four-fifths-touch {
+ margin-left: 80%;
+ }
+ .column.is-0-touch {
+ flex: none;
+ width: 0%;
+ }
+ .column.is-offset-0-touch {
+ margin-left: 0%;
+ }
+ .column.is-1-touch {
+ flex: none;
+ width: 8.33333%;
+ }
+ .column.is-offset-1-touch {
+ margin-left: 8.33333%;
+ }
+ .column.is-2-touch {
+ flex: none;
+ width: 16.66667%;
+ }
+ .column.is-offset-2-touch {
+ margin-left: 16.66667%;
+ }
+ .column.is-3-touch {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-offset-3-touch {
+ margin-left: 25%;
+ }
+ .column.is-4-touch {
+ flex: none;
+ width: 33.33333%;
+ }
+ .column.is-offset-4-touch {
+ margin-left: 33.33333%;
+ }
+ .column.is-5-touch {
+ flex: none;
+ width: 41.66667%;
+ }
+ .column.is-offset-5-touch {
+ margin-left: 41.66667%;
+ }
+ .column.is-6-touch {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-offset-6-touch {
+ margin-left: 50%;
+ }
+ .column.is-7-touch {
+ flex: none;
+ width: 58.33333%;
+ }
+ .column.is-offset-7-touch {
+ margin-left: 58.33333%;
+ }
+ .column.is-8-touch {
+ flex: none;
+ width: 66.66667%;
+ }
+ .column.is-offset-8-touch {
+ margin-left: 66.66667%;
+ }
+ .column.is-9-touch {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-offset-9-touch {
+ margin-left: 75%;
+ }
+ .column.is-10-touch {
+ flex: none;
+ width: 83.33333%;
+ }
+ .column.is-offset-10-touch {
+ margin-left: 83.33333%;
+ }
+ .column.is-11-touch {
+ flex: none;
+ width: 91.66667%;
+ }
+ .column.is-offset-11-touch {
+ margin-left: 91.66667%;
+ }
+ .column.is-12-touch {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-offset-12-touch {
+ margin-left: 100%;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .column.is-narrow-desktop {
+ flex: none;
+ }
+ .column.is-full-desktop {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-three-quarters-desktop {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-two-thirds-desktop {
+ flex: none;
+ width: 66.6666%;
+ }
+ .column.is-half-desktop {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-one-third-desktop {
+ flex: none;
+ width: 33.3333%;
+ }
+ .column.is-one-quarter-desktop {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-one-fifth-desktop {
+ flex: none;
+ width: 20%;
+ }
+ .column.is-two-fifths-desktop {
+ flex: none;
+ width: 40%;
+ }
+ .column.is-three-fifths-desktop {
+ flex: none;
+ width: 60%;
+ }
+ .column.is-four-fifths-desktop {
+ flex: none;
+ width: 80%;
+ }
+ .column.is-offset-three-quarters-desktop {
+ margin-left: 75%;
+ }
+ .column.is-offset-two-thirds-desktop {
+ margin-left: 66.6666%;
+ }
+ .column.is-offset-half-desktop {
+ margin-left: 50%;
+ }
+ .column.is-offset-one-third-desktop {
+ margin-left: 33.3333%;
+ }
+ .column.is-offset-one-quarter-desktop {
+ margin-left: 25%;
+ }
+ .column.is-offset-one-fifth-desktop {
+ margin-left: 20%;
+ }
+ .column.is-offset-two-fifths-desktop {
+ margin-left: 40%;
+ }
+ .column.is-offset-three-fifths-desktop {
+ margin-left: 60%;
+ }
+ .column.is-offset-four-fifths-desktop {
+ margin-left: 80%;
+ }
+ .column.is-0-desktop {
+ flex: none;
+ width: 0%;
+ }
+ .column.is-offset-0-desktop {
+ margin-left: 0%;
+ }
+ .column.is-1-desktop {
+ flex: none;
+ width: 8.33333%;
+ }
+ .column.is-offset-1-desktop {
+ margin-left: 8.33333%;
+ }
+ .column.is-2-desktop {
+ flex: none;
+ width: 16.66667%;
+ }
+ .column.is-offset-2-desktop {
+ margin-left: 16.66667%;
+ }
+ .column.is-3-desktop {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-offset-3-desktop {
+ margin-left: 25%;
+ }
+ .column.is-4-desktop {
+ flex: none;
+ width: 33.33333%;
+ }
+ .column.is-offset-4-desktop {
+ margin-left: 33.33333%;
+ }
+ .column.is-5-desktop {
+ flex: none;
+ width: 41.66667%;
+ }
+ .column.is-offset-5-desktop {
+ margin-left: 41.66667%;
+ }
+ .column.is-6-desktop {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-offset-6-desktop {
+ margin-left: 50%;
+ }
+ .column.is-7-desktop {
+ flex: none;
+ width: 58.33333%;
+ }
+ .column.is-offset-7-desktop {
+ margin-left: 58.33333%;
+ }
+ .column.is-8-desktop {
+ flex: none;
+ width: 66.66667%;
+ }
+ .column.is-offset-8-desktop {
+ margin-left: 66.66667%;
+ }
+ .column.is-9-desktop {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-offset-9-desktop {
+ margin-left: 75%;
+ }
+ .column.is-10-desktop {
+ flex: none;
+ width: 83.33333%;
+ }
+ .column.is-offset-10-desktop {
+ margin-left: 83.33333%;
+ }
+ .column.is-11-desktop {
+ flex: none;
+ width: 91.66667%;
+ }
+ .column.is-offset-11-desktop {
+ margin-left: 91.66667%;
+ }
+ .column.is-12-desktop {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-offset-12-desktop {
+ margin-left: 100%;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .column.is-narrow-widescreen {
+ flex: none;
+ }
+ .column.is-full-widescreen {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-three-quarters-widescreen {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-two-thirds-widescreen {
+ flex: none;
+ width: 66.6666%;
+ }
+ .column.is-half-widescreen {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-one-third-widescreen {
+ flex: none;
+ width: 33.3333%;
+ }
+ .column.is-one-quarter-widescreen {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-one-fifth-widescreen {
+ flex: none;
+ width: 20%;
+ }
+ .column.is-two-fifths-widescreen {
+ flex: none;
+ width: 40%;
+ }
+ .column.is-three-fifths-widescreen {
+ flex: none;
+ width: 60%;
+ }
+ .column.is-four-fifths-widescreen {
+ flex: none;
+ width: 80%;
+ }
+ .column.is-offset-three-quarters-widescreen {
+ margin-left: 75%;
+ }
+ .column.is-offset-two-thirds-widescreen {
+ margin-left: 66.6666%;
+ }
+ .column.is-offset-half-widescreen {
+ margin-left: 50%;
+ }
+ .column.is-offset-one-third-widescreen {
+ margin-left: 33.3333%;
+ }
+ .column.is-offset-one-quarter-widescreen {
+ margin-left: 25%;
+ }
+ .column.is-offset-one-fifth-widescreen {
+ margin-left: 20%;
+ }
+ .column.is-offset-two-fifths-widescreen {
+ margin-left: 40%;
+ }
+ .column.is-offset-three-fifths-widescreen {
+ margin-left: 60%;
+ }
+ .column.is-offset-four-fifths-widescreen {
+ margin-left: 80%;
+ }
+ .column.is-0-widescreen {
+ flex: none;
+ width: 0%;
+ }
+ .column.is-offset-0-widescreen {
+ margin-left: 0%;
+ }
+ .column.is-1-widescreen {
+ flex: none;
+ width: 8.33333%;
+ }
+ .column.is-offset-1-widescreen {
+ margin-left: 8.33333%;
+ }
+ .column.is-2-widescreen {
+ flex: none;
+ width: 16.66667%;
+ }
+ .column.is-offset-2-widescreen {
+ margin-left: 16.66667%;
+ }
+ .column.is-3-widescreen {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-offset-3-widescreen {
+ margin-left: 25%;
+ }
+ .column.is-4-widescreen {
+ flex: none;
+ width: 33.33333%;
+ }
+ .column.is-offset-4-widescreen {
+ margin-left: 33.33333%;
+ }
+ .column.is-5-widescreen {
+ flex: none;
+ width: 41.66667%;
+ }
+ .column.is-offset-5-widescreen {
+ margin-left: 41.66667%;
+ }
+ .column.is-6-widescreen {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-offset-6-widescreen {
+ margin-left: 50%;
+ }
+ .column.is-7-widescreen {
+ flex: none;
+ width: 58.33333%;
+ }
+ .column.is-offset-7-widescreen {
+ margin-left: 58.33333%;
+ }
+ .column.is-8-widescreen {
+ flex: none;
+ width: 66.66667%;
+ }
+ .column.is-offset-8-widescreen {
+ margin-left: 66.66667%;
+ }
+ .column.is-9-widescreen {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-offset-9-widescreen {
+ margin-left: 75%;
+ }
+ .column.is-10-widescreen {
+ flex: none;
+ width: 83.33333%;
+ }
+ .column.is-offset-10-widescreen {
+ margin-left: 83.33333%;
+ }
+ .column.is-11-widescreen {
+ flex: none;
+ width: 91.66667%;
+ }
+ .column.is-offset-11-widescreen {
+ margin-left: 91.66667%;
+ }
+ .column.is-12-widescreen {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-offset-12-widescreen {
+ margin-left: 100%;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .column.is-narrow-fullhd {
+ flex: none;
+ }
+ .column.is-full-fullhd {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-three-quarters-fullhd {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-two-thirds-fullhd {
+ flex: none;
+ width: 66.6666%;
+ }
+ .column.is-half-fullhd {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-one-third-fullhd {
+ flex: none;
+ width: 33.3333%;
+ }
+ .column.is-one-quarter-fullhd {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-one-fifth-fullhd {
+ flex: none;
+ width: 20%;
+ }
+ .column.is-two-fifths-fullhd {
+ flex: none;
+ width: 40%;
+ }
+ .column.is-three-fifths-fullhd {
+ flex: none;
+ width: 60%;
+ }
+ .column.is-four-fifths-fullhd {
+ flex: none;
+ width: 80%;
+ }
+ .column.is-offset-three-quarters-fullhd {
+ margin-left: 75%;
+ }
+ .column.is-offset-two-thirds-fullhd {
+ margin-left: 66.6666%;
+ }
+ .column.is-offset-half-fullhd {
+ margin-left: 50%;
+ }
+ .column.is-offset-one-third-fullhd {
+ margin-left: 33.3333%;
+ }
+ .column.is-offset-one-quarter-fullhd {
+ margin-left: 25%;
+ }
+ .column.is-offset-one-fifth-fullhd {
+ margin-left: 20%;
+ }
+ .column.is-offset-two-fifths-fullhd {
+ margin-left: 40%;
+ }
+ .column.is-offset-three-fifths-fullhd {
+ margin-left: 60%;
+ }
+ .column.is-offset-four-fifths-fullhd {
+ margin-left: 80%;
+ }
+ .column.is-0-fullhd {
+ flex: none;
+ width: 0%;
+ }
+ .column.is-offset-0-fullhd {
+ margin-left: 0%;
+ }
+ .column.is-1-fullhd {
+ flex: none;
+ width: 8.33333%;
+ }
+ .column.is-offset-1-fullhd {
+ margin-left: 8.33333%;
+ }
+ .column.is-2-fullhd {
+ flex: none;
+ width: 16.66667%;
+ }
+ .column.is-offset-2-fullhd {
+ margin-left: 16.66667%;
+ }
+ .column.is-3-fullhd {
+ flex: none;
+ width: 25%;
+ }
+ .column.is-offset-3-fullhd {
+ margin-left: 25%;
+ }
+ .column.is-4-fullhd {
+ flex: none;
+ width: 33.33333%;
+ }
+ .column.is-offset-4-fullhd {
+ margin-left: 33.33333%;
+ }
+ .column.is-5-fullhd {
+ flex: none;
+ width: 41.66667%;
+ }
+ .column.is-offset-5-fullhd {
+ margin-left: 41.66667%;
+ }
+ .column.is-6-fullhd {
+ flex: none;
+ width: 50%;
+ }
+ .column.is-offset-6-fullhd {
+ margin-left: 50%;
+ }
+ .column.is-7-fullhd {
+ flex: none;
+ width: 58.33333%;
+ }
+ .column.is-offset-7-fullhd {
+ margin-left: 58.33333%;
+ }
+ .column.is-8-fullhd {
+ flex: none;
+ width: 66.66667%;
+ }
+ .column.is-offset-8-fullhd {
+ margin-left: 66.66667%;
+ }
+ .column.is-9-fullhd {
+ flex: none;
+ width: 75%;
+ }
+ .column.is-offset-9-fullhd {
+ margin-left: 75%;
+ }
+ .column.is-10-fullhd {
+ flex: none;
+ width: 83.33333%;
+ }
+ .column.is-offset-10-fullhd {
+ margin-left: 83.33333%;
+ }
+ .column.is-11-fullhd {
+ flex: none;
+ width: 91.66667%;
+ }
+ .column.is-offset-11-fullhd {
+ margin-left: 91.66667%;
+ }
+ .column.is-12-fullhd {
+ flex: none;
+ width: 100%;
+ }
+ .column.is-offset-12-fullhd {
+ margin-left: 100%;
+ }
+}
+
+.columns {
+ margin-left: -0.75rem;
+ margin-right: -0.75rem;
+ margin-top: -0.75rem;
+}
+
+.columns:last-child {
+ margin-bottom: -0.75rem;
+}
+
+.columns:not(:last-child) {
+ margin-bottom: calc(1.5rem - 0.75rem);
+}
+
+.columns.is-centered {
+ justify-content: center;
+}
+
+.columns.is-gapless {
+ margin-left: 0;
+ margin-right: 0;
+ margin-top: 0;
+}
+
+.columns.is-gapless > .column {
+ margin: 0;
+ padding: 0 !important;
+}
+
+.columns.is-gapless:not(:last-child) {
+ margin-bottom: 1.5rem;
+}
+
+.columns.is-gapless:last-child {
+ margin-bottom: 0;
+}
+
+.columns.is-mobile {
+ display: flex;
+}
+
+.columns.is-multiline {
+ flex-wrap: wrap;
+}
+
+.columns.is-vcentered {
+ align-items: center;
+}
+
+@media screen and (min-width: 769px), print {
+ .columns:not(.is-desktop) {
+ display: flex;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .columns.is-desktop {
+ display: flex;
+ }
+}
+
+.columns.is-variable {
+ --columnGap: 0.75rem;
+ margin-left: calc(-1 * var(--columnGap));
+ margin-right: calc(-1 * var(--columnGap));
+}
+
+.columns.is-variable .column {
+ padding-left: var(--columnGap);
+ padding-right: var(--columnGap);
+}
+
+.columns.is-variable.is-0 {
+ --columnGap: 0rem;
+}
+
+@media screen and (max-width: 768px) {
+ .columns.is-variable.is-0-mobile {
+ --columnGap: 0rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .columns.is-variable.is-0-tablet {
+ --columnGap: 0rem;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .columns.is-variable.is-0-tablet-only {
+ --columnGap: 0rem;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .columns.is-variable.is-0-touch {
+ --columnGap: 0rem;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .columns.is-variable.is-0-desktop {
+ --columnGap: 0rem;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .columns.is-variable.is-0-desktop-only {
+ --columnGap: 0rem;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .columns.is-variable.is-0-widescreen {
+ --columnGap: 0rem;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .columns.is-variable.is-0-widescreen-only {
+ --columnGap: 0rem;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .columns.is-variable.is-0-fullhd {
+ --columnGap: 0rem;
+ }
+}
+
+.columns.is-variable.is-1 {
+ --columnGap: 0.25rem;
+}
+
+@media screen and (max-width: 768px) {
+ .columns.is-variable.is-1-mobile {
+ --columnGap: 0.25rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .columns.is-variable.is-1-tablet {
+ --columnGap: 0.25rem;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .columns.is-variable.is-1-tablet-only {
+ --columnGap: 0.25rem;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .columns.is-variable.is-1-touch {
+ --columnGap: 0.25rem;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .columns.is-variable.is-1-desktop {
+ --columnGap: 0.25rem;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .columns.is-variable.is-1-desktop-only {
+ --columnGap: 0.25rem;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .columns.is-variable.is-1-widescreen {
+ --columnGap: 0.25rem;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .columns.is-variable.is-1-widescreen-only {
+ --columnGap: 0.25rem;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .columns.is-variable.is-1-fullhd {
+ --columnGap: 0.25rem;
+ }
+}
+
+.columns.is-variable.is-2 {
+ --columnGap: 0.5rem;
+}
+
+@media screen and (max-width: 768px) {
+ .columns.is-variable.is-2-mobile {
+ --columnGap: 0.5rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .columns.is-variable.is-2-tablet {
+ --columnGap: 0.5rem;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .columns.is-variable.is-2-tablet-only {
+ --columnGap: 0.5rem;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .columns.is-variable.is-2-touch {
+ --columnGap: 0.5rem;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .columns.is-variable.is-2-desktop {
+ --columnGap: 0.5rem;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .columns.is-variable.is-2-desktop-only {
+ --columnGap: 0.5rem;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .columns.is-variable.is-2-widescreen {
+ --columnGap: 0.5rem;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .columns.is-variable.is-2-widescreen-only {
+ --columnGap: 0.5rem;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .columns.is-variable.is-2-fullhd {
+ --columnGap: 0.5rem;
+ }
+}
+
+.columns.is-variable.is-3 {
+ --columnGap: 0.75rem;
+}
+
+@media screen and (max-width: 768px) {
+ .columns.is-variable.is-3-mobile {
+ --columnGap: 0.75rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .columns.is-variable.is-3-tablet {
+ --columnGap: 0.75rem;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .columns.is-variable.is-3-tablet-only {
+ --columnGap: 0.75rem;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .columns.is-variable.is-3-touch {
+ --columnGap: 0.75rem;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .columns.is-variable.is-3-desktop {
+ --columnGap: 0.75rem;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .columns.is-variable.is-3-desktop-only {
+ --columnGap: 0.75rem;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .columns.is-variable.is-3-widescreen {
+ --columnGap: 0.75rem;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .columns.is-variable.is-3-widescreen-only {
+ --columnGap: 0.75rem;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .columns.is-variable.is-3-fullhd {
+ --columnGap: 0.75rem;
+ }
+}
+
+.columns.is-variable.is-4 {
+ --columnGap: 1rem;
+}
+
+@media screen and (max-width: 768px) {
+ .columns.is-variable.is-4-mobile {
+ --columnGap: 1rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .columns.is-variable.is-4-tablet {
+ --columnGap: 1rem;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .columns.is-variable.is-4-tablet-only {
+ --columnGap: 1rem;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .columns.is-variable.is-4-touch {
+ --columnGap: 1rem;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .columns.is-variable.is-4-desktop {
+ --columnGap: 1rem;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .columns.is-variable.is-4-desktop-only {
+ --columnGap: 1rem;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .columns.is-variable.is-4-widescreen {
+ --columnGap: 1rem;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .columns.is-variable.is-4-widescreen-only {
+ --columnGap: 1rem;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .columns.is-variable.is-4-fullhd {
+ --columnGap: 1rem;
+ }
+}
+
+.columns.is-variable.is-5 {
+ --columnGap: 1.25rem;
+}
+
+@media screen and (max-width: 768px) {
+ .columns.is-variable.is-5-mobile {
+ --columnGap: 1.25rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .columns.is-variable.is-5-tablet {
+ --columnGap: 1.25rem;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .columns.is-variable.is-5-tablet-only {
+ --columnGap: 1.25rem;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .columns.is-variable.is-5-touch {
+ --columnGap: 1.25rem;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .columns.is-variable.is-5-desktop {
+ --columnGap: 1.25rem;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .columns.is-variable.is-5-desktop-only {
+ --columnGap: 1.25rem;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .columns.is-variable.is-5-widescreen {
+ --columnGap: 1.25rem;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .columns.is-variable.is-5-widescreen-only {
+ --columnGap: 1.25rem;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .columns.is-variable.is-5-fullhd {
+ --columnGap: 1.25rem;
+ }
+}
+
+.columns.is-variable.is-6 {
+ --columnGap: 1.5rem;
+}
+
+@media screen and (max-width: 768px) {
+ .columns.is-variable.is-6-mobile {
+ --columnGap: 1.5rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .columns.is-variable.is-6-tablet {
+ --columnGap: 1.5rem;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .columns.is-variable.is-6-tablet-only {
+ --columnGap: 1.5rem;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .columns.is-variable.is-6-touch {
+ --columnGap: 1.5rem;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .columns.is-variable.is-6-desktop {
+ --columnGap: 1.5rem;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .columns.is-variable.is-6-desktop-only {
+ --columnGap: 1.5rem;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .columns.is-variable.is-6-widescreen {
+ --columnGap: 1.5rem;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .columns.is-variable.is-6-widescreen-only {
+ --columnGap: 1.5rem;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .columns.is-variable.is-6-fullhd {
+ --columnGap: 1.5rem;
+ }
+}
+
+.columns.is-variable.is-7 {
+ --columnGap: 1.75rem;
+}
+
+@media screen and (max-width: 768px) {
+ .columns.is-variable.is-7-mobile {
+ --columnGap: 1.75rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .columns.is-variable.is-7-tablet {
+ --columnGap: 1.75rem;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .columns.is-variable.is-7-tablet-only {
+ --columnGap: 1.75rem;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .columns.is-variable.is-7-touch {
+ --columnGap: 1.75rem;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .columns.is-variable.is-7-desktop {
+ --columnGap: 1.75rem;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .columns.is-variable.is-7-desktop-only {
+ --columnGap: 1.75rem;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .columns.is-variable.is-7-widescreen {
+ --columnGap: 1.75rem;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .columns.is-variable.is-7-widescreen-only {
+ --columnGap: 1.75rem;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .columns.is-variable.is-7-fullhd {
+ --columnGap: 1.75rem;
+ }
+}
+
+.columns.is-variable.is-8 {
+ --columnGap: 2rem;
+}
+
+@media screen and (max-width: 768px) {
+ .columns.is-variable.is-8-mobile {
+ --columnGap: 2rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .columns.is-variable.is-8-tablet {
+ --columnGap: 2rem;
+ }
+}
+
+@media screen and (min-width: 769px) and (max-width: 1023px) {
+ .columns.is-variable.is-8-tablet-only {
+ --columnGap: 2rem;
+ }
+}
+
+@media screen and (max-width: 1023px) {
+ .columns.is-variable.is-8-touch {
+ --columnGap: 2rem;
+ }
+}
+
+@media screen and (min-width: 1024px) {
+ .columns.is-variable.is-8-desktop {
+ --columnGap: 2rem;
+ }
+}
+
+@media screen and (min-width: 1024px) and (max-width: 1215px) {
+ .columns.is-variable.is-8-desktop-only {
+ --columnGap: 2rem;
+ }
+}
+
+@media screen and (min-width: 1216px) {
+ .columns.is-variable.is-8-widescreen {
+ --columnGap: 2rem;
+ }
+}
+
+@media screen and (min-width: 1216px) and (max-width: 1407px) {
+ .columns.is-variable.is-8-widescreen-only {
+ --columnGap: 2rem;
+ }
+}
+
+@media screen and (min-width: 1408px) {
+ .columns.is-variable.is-8-fullhd {
+ --columnGap: 2rem;
+ }
+}
+
+.tile {
+ align-items: stretch;
+ display: block;
+ flex-basis: 0;
+ flex-grow: 1;
+ flex-shrink: 1;
+ min-height: -webkit-min-content;
+ min-height: -moz-min-content;
+ min-height: min-content;
+}
+
+.tile.is-ancestor {
+ margin-left: -0.75rem;
+ margin-right: -0.75rem;
+ margin-top: -0.75rem;
+}
+
+.tile.is-ancestor:last-child {
+ margin-bottom: -0.75rem;
+}
+
+.tile.is-ancestor:not(:last-child) {
+ margin-bottom: 0.75rem;
+}
+
+.tile.is-child {
+ margin: 0 !important;
+}
+
+.tile.is-parent {
+ padding: 0.75rem;
+}
+
+.tile.is-vertical {
+ flex-direction: column;
+}
+
+.tile.is-vertical > .tile.is-child:not(:last-child) {
+ margin-bottom: 1.5rem !important;
+}
+
+@media screen and (min-width: 769px), print {
+ .tile:not(.is-child) {
+ display: flex;
+ }
+ .tile.is-1 {
+ flex: none;
+ width: 8.33333%;
+ }
+ .tile.is-2 {
+ flex: none;
+ width: 16.66667%;
+ }
+ .tile.is-3 {
+ flex: none;
+ width: 25%;
+ }
+ .tile.is-4 {
+ flex: none;
+ width: 33.33333%;
+ }
+ .tile.is-5 {
+ flex: none;
+ width: 41.66667%;
+ }
+ .tile.is-6 {
+ flex: none;
+ width: 50%;
+ }
+ .tile.is-7 {
+ flex: none;
+ width: 58.33333%;
+ }
+ .tile.is-8 {
+ flex: none;
+ width: 66.66667%;
+ }
+ .tile.is-9 {
+ flex: none;
+ width: 75%;
+ }
+ .tile.is-10 {
+ flex: none;
+ width: 83.33333%;
+ }
+ .tile.is-11 {
+ flex: none;
+ width: 91.66667%;
+ }
+ .tile.is-12 {
+ flex: none;
+ width: 100%;
+ }
+}
+
+.hero {
+ align-items: stretch;
+ display: flex;
+ flex-direction: column;
+ justify-content: space-between;
+}
+
+.hero .navbar {
+ background: none;
+}
+
+.hero .tabs ul {
+ border-bottom: none;
+}
+
+.hero.is-white {
+ background-color: white;
+ color: #0a0a0a;
+}
+
+.hero.is-white a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),
+.hero.is-white strong {
+ color: inherit;
+}
+
+.hero.is-white .title {
+ color: #0a0a0a;
+}
+
+.hero.is-white .subtitle {
+ color: rgba(10, 10, 10, 0.9);
+}
+
+.hero.is-white .subtitle a:not(.button),
+.hero.is-white .subtitle strong {
+ color: #0a0a0a;
+}
+
+@media screen and (max-width: 1023px) {
+ .hero.is-white .navbar-menu {
+ background-color: white;
+ }
+}
+
+.hero.is-white .navbar-item,
+.hero.is-white .navbar-link {
+ color: rgba(10, 10, 10, 0.7);
+}
+
+.hero.is-white a.navbar-item:hover, .hero.is-white a.navbar-item.is-active,
+.hero.is-white .navbar-link:hover,
+.hero.is-white .navbar-link.is-active {
+ background-color: #f2f2f2;
+ color: #0a0a0a;
+}
+
+.hero.is-white .tabs a {
+ color: #0a0a0a;
+ opacity: 0.9;
+}
+
+.hero.is-white .tabs a:hover {
+ opacity: 1;
+}
+
+.hero.is-white .tabs li.is-active a {
+ opacity: 1;
+}
+
+.hero.is-white .tabs.is-boxed a, .hero.is-white .tabs.is-toggle a {
+ color: #0a0a0a;
+}
+
+.hero.is-white .tabs.is-boxed a:hover, .hero.is-white .tabs.is-toggle a:hover {
+ background-color: rgba(10, 10, 10, 0.1);
+}
+
+.hero.is-white .tabs.is-boxed li.is-active a, .hero.is-white .tabs.is-boxed li.is-active a:hover, .hero.is-white .tabs.is-toggle li.is-active a, .hero.is-white .tabs.is-toggle li.is-active a:hover {
+ background-color: #0a0a0a;
+ border-color: #0a0a0a;
+ color: white;
+}
+
+.hero.is-white.is-bold {
+ background-image: linear-gradient(141deg, #e6e6e6 0%, white 71%, white 100%);
+}
+
+@media screen and (max-width: 768px) {
+ .hero.is-white.is-bold .navbar-menu {
+ background-image: linear-gradient(141deg, #e6e6e6 0%, white 71%, white 100%);
+ }
+}
+
+.hero.is-black {
+ background-color: #0a0a0a;
+ color: white;
+}
+
+.hero.is-black a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),
+.hero.is-black strong {
+ color: inherit;
+}
+
+.hero.is-black .title {
+ color: white;
+}
+
+.hero.is-black .subtitle {
+ color: rgba(255, 255, 255, 0.9);
+}
+
+.hero.is-black .subtitle a:not(.button),
+.hero.is-black .subtitle strong {
+ color: white;
+}
+
+@media screen and (max-width: 1023px) {
+ .hero.is-black .navbar-menu {
+ background-color: #0a0a0a;
+ }
+}
+
+.hero.is-black .navbar-item,
+.hero.is-black .navbar-link {
+ color: rgba(255, 255, 255, 0.7);
+}
+
+.hero.is-black a.navbar-item:hover, .hero.is-black a.navbar-item.is-active,
+.hero.is-black .navbar-link:hover,
+.hero.is-black .navbar-link.is-active {
+ background-color: black;
+ color: white;
+}
+
+.hero.is-black .tabs a {
+ color: white;
+ opacity: 0.9;
+}
+
+.hero.is-black .tabs a:hover {
+ opacity: 1;
+}
+
+.hero.is-black .tabs li.is-active a {
+ opacity: 1;
+}
+
+.hero.is-black .tabs.is-boxed a, .hero.is-black .tabs.is-toggle a {
+ color: white;
+}
+
+.hero.is-black .tabs.is-boxed a:hover, .hero.is-black .tabs.is-toggle a:hover {
+ background-color: rgba(10, 10, 10, 0.1);
+}
+
+.hero.is-black .tabs.is-boxed li.is-active a, .hero.is-black .tabs.is-boxed li.is-active a:hover, .hero.is-black .tabs.is-toggle li.is-active a, .hero.is-black .tabs.is-toggle li.is-active a:hover {
+ background-color: white;
+ border-color: white;
+ color: #0a0a0a;
+}
+
+.hero.is-black.is-bold {
+ background-image: linear-gradient(141deg, black 0%, #0a0a0a 71%, #181616 100%);
+}
+
+@media screen and (max-width: 768px) {
+ .hero.is-black.is-bold .navbar-menu {
+ background-image: linear-gradient(141deg, black 0%, #0a0a0a 71%, #181616 100%);
+ }
+}
+
+.hero.is-light {
+ background-color: whitesmoke;
+ color: #363636;
+}
+
+.hero.is-light a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),
+.hero.is-light strong {
+ color: inherit;
+}
+
+.hero.is-light .title {
+ color: #363636;
+}
+
+.hero.is-light .subtitle {
+ color: rgba(54, 54, 54, 0.9);
+}
+
+.hero.is-light .subtitle a:not(.button),
+.hero.is-light .subtitle strong {
+ color: #363636;
+}
+
+@media screen and (max-width: 1023px) {
+ .hero.is-light .navbar-menu {
+ background-color: whitesmoke;
+ }
+}
+
+.hero.is-light .navbar-item,
+.hero.is-light .navbar-link {
+ color: rgba(54, 54, 54, 0.7);
+}
+
+.hero.is-light a.navbar-item:hover, .hero.is-light a.navbar-item.is-active,
+.hero.is-light .navbar-link:hover,
+.hero.is-light .navbar-link.is-active {
+ background-color: #e8e8e8;
+ color: #363636;
+}
+
+.hero.is-light .tabs a {
+ color: #363636;
+ opacity: 0.9;
+}
+
+.hero.is-light .tabs a:hover {
+ opacity: 1;
+}
+
+.hero.is-light .tabs li.is-active a {
+ opacity: 1;
+}
+
+.hero.is-light .tabs.is-boxed a, .hero.is-light .tabs.is-toggle a {
+ color: #363636;
+}
+
+.hero.is-light .tabs.is-boxed a:hover, .hero.is-light .tabs.is-toggle a:hover {
+ background-color: rgba(10, 10, 10, 0.1);
+}
+
+.hero.is-light .tabs.is-boxed li.is-active a, .hero.is-light .tabs.is-boxed li.is-active a:hover, .hero.is-light .tabs.is-toggle li.is-active a, .hero.is-light .tabs.is-toggle li.is-active a:hover {
+ background-color: #363636;
+ border-color: #363636;
+ color: whitesmoke;
+}
+
+.hero.is-light.is-bold {
+ background-image: linear-gradient(141deg, #dfd8d9 0%, whitesmoke 71%, white 100%);
+}
+
+@media screen and (max-width: 768px) {
+ .hero.is-light.is-bold .navbar-menu {
+ background-image: linear-gradient(141deg, #dfd8d9 0%, whitesmoke 71%, white 100%);
+ }
+}
+
+.hero.is-dark {
+ background-color: #363636;
+ color: whitesmoke;
+}
+
+.hero.is-dark a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),
+.hero.is-dark strong {
+ color: inherit;
+}
+
+.hero.is-dark .title {
+ color: whitesmoke;
+}
+
+.hero.is-dark .subtitle {
+ color: rgba(245, 245, 245, 0.9);
+}
+
+.hero.is-dark .subtitle a:not(.button),
+.hero.is-dark .subtitle strong {
+ color: whitesmoke;
+}
+
+@media screen and (max-width: 1023px) {
+ .hero.is-dark .navbar-menu {
+ background-color: #363636;
+ }
+}
+
+.hero.is-dark .navbar-item,
+.hero.is-dark .navbar-link {
+ color: rgba(245, 245, 245, 0.7);
+}
+
+.hero.is-dark a.navbar-item:hover, .hero.is-dark a.navbar-item.is-active,
+.hero.is-dark .navbar-link:hover,
+.hero.is-dark .navbar-link.is-active {
+ background-color: #292929;
+ color: whitesmoke;
+}
+
+.hero.is-dark .tabs a {
+ color: whitesmoke;
+ opacity: 0.9;
+}
+
+.hero.is-dark .tabs a:hover {
+ opacity: 1;
+}
+
+.hero.is-dark .tabs li.is-active a {
+ opacity: 1;
+}
+
+.hero.is-dark .tabs.is-boxed a, .hero.is-dark .tabs.is-toggle a {
+ color: whitesmoke;
+}
+
+.hero.is-dark .tabs.is-boxed a:hover, .hero.is-dark .tabs.is-toggle a:hover {
+ background-color: rgba(10, 10, 10, 0.1);
+}
+
+.hero.is-dark .tabs.is-boxed li.is-active a, .hero.is-dark .tabs.is-boxed li.is-active a:hover, .hero.is-dark .tabs.is-toggle li.is-active a, .hero.is-dark .tabs.is-toggle li.is-active a:hover {
+ background-color: whitesmoke;
+ border-color: whitesmoke;
+ color: #363636;
+}
+
+.hero.is-dark.is-bold {
+ background-image: linear-gradient(141deg, #1f191a 0%, #363636 71%, #46403f 100%);
+}
+
+@media screen and (max-width: 768px) {
+ .hero.is-dark.is-bold .navbar-menu {
+ background-image: linear-gradient(141deg, #1f191a 0%, #363636 71%, #46403f 100%);
+ }
+}
+
+.hero.is-primary {
+ background-color: #00d1b2;
+ color: #fff;
+}
+
+.hero.is-primary a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),
+.hero.is-primary strong {
+ color: inherit;
+}
+
+.hero.is-primary .title {
+ color: #fff;
+}
+
+.hero.is-primary .subtitle {
+ color: rgba(255, 255, 255, 0.9);
+}
+
+.hero.is-primary .subtitle a:not(.button),
+.hero.is-primary .subtitle strong {
+ color: #fff;
+}
+
+@media screen and (max-width: 1023px) {
+ .hero.is-primary .navbar-menu {
+ background-color: #00d1b2;
+ }
+}
+
+.hero.is-primary .navbar-item,
+.hero.is-primary .navbar-link {
+ color: rgba(255, 255, 255, 0.7);
+}
+
+.hero.is-primary a.navbar-item:hover, .hero.is-primary a.navbar-item.is-active,
+.hero.is-primary .navbar-link:hover,
+.hero.is-primary .navbar-link.is-active {
+ background-color: #00b89c;
+ color: #fff;
+}
+
+.hero.is-primary .tabs a {
+ color: #fff;
+ opacity: 0.9;
+}
+
+.hero.is-primary .tabs a:hover {
+ opacity: 1;
+}
+
+.hero.is-primary .tabs li.is-active a {
+ opacity: 1;
+}
+
+.hero.is-primary .tabs.is-boxed a, .hero.is-primary .tabs.is-toggle a {
+ color: #fff;
+}
+
+.hero.is-primary .tabs.is-boxed a:hover, .hero.is-primary .tabs.is-toggle a:hover {
+ background-color: rgba(10, 10, 10, 0.1);
+}
+
+.hero.is-primary .tabs.is-boxed li.is-active a, .hero.is-primary .tabs.is-boxed li.is-active a:hover, .hero.is-primary .tabs.is-toggle li.is-active a, .hero.is-primary .tabs.is-toggle li.is-active a:hover {
+ background-color: #fff;
+ border-color: #fff;
+ color: #00d1b2;
+}
+
+.hero.is-primary.is-bold {
+ background-image: linear-gradient(141deg, #009e6c 0%, #00d1b2 71%, #00e7eb 100%);
+}
+
+@media screen and (max-width: 768px) {
+ .hero.is-primary.is-bold .navbar-menu {
+ background-image: linear-gradient(141deg, #009e6c 0%, #00d1b2 71%, #00e7eb 100%);
+ }
+}
+
+.hero.is-link {
+ background-color: #3273dc;
+ color: #fff;
+}
+
+.hero.is-link a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),
+.hero.is-link strong {
+ color: inherit;
+}
+
+.hero.is-link .title {
+ color: #fff;
+}
+
+.hero.is-link .subtitle {
+ color: rgba(255, 255, 255, 0.9);
+}
+
+.hero.is-link .subtitle a:not(.button),
+.hero.is-link .subtitle strong {
+ color: #fff;
+}
+
+@media screen and (max-width: 1023px) {
+ .hero.is-link .navbar-menu {
+ background-color: #3273dc;
+ }
+}
+
+.hero.is-link .navbar-item,
+.hero.is-link .navbar-link {
+ color: rgba(255, 255, 255, 0.7);
+}
+
+.hero.is-link a.navbar-item:hover, .hero.is-link a.navbar-item.is-active,
+.hero.is-link .navbar-link:hover,
+.hero.is-link .navbar-link.is-active {
+ background-color: #2366d1;
+ color: #fff;
+}
+
+.hero.is-link .tabs a {
+ color: #fff;
+ opacity: 0.9;
+}
+
+.hero.is-link .tabs a:hover {
+ opacity: 1;
+}
+
+.hero.is-link .tabs li.is-active a {
+ opacity: 1;
+}
+
+.hero.is-link .tabs.is-boxed a, .hero.is-link .tabs.is-toggle a {
+ color: #fff;
+}
+
+.hero.is-link .tabs.is-boxed a:hover, .hero.is-link .tabs.is-toggle a:hover {
+ background-color: rgba(10, 10, 10, 0.1);
+}
+
+.hero.is-link .tabs.is-boxed li.is-active a, .hero.is-link .tabs.is-boxed li.is-active a:hover, .hero.is-link .tabs.is-toggle li.is-active a, .hero.is-link .tabs.is-toggle li.is-active a:hover {
+ background-color: #fff;
+ border-color: #fff;
+ color: #3273dc;
+}
+
+.hero.is-link.is-bold {
+ background-image: linear-gradient(141deg, #1577c6 0%, #3273dc 71%, #4366e5 100%);
+}
+
+@media screen and (max-width: 768px) {
+ .hero.is-link.is-bold .navbar-menu {
+ background-image: linear-gradient(141deg, #1577c6 0%, #3273dc 71%, #4366e5 100%);
+ }
+}
+
+.hero.is-info {
+ background-color: #209cee;
+ color: #fff;
+}
+
+.hero.is-info a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),
+.hero.is-info strong {
+ color: inherit;
+}
+
+.hero.is-info .title {
+ color: #fff;
+}
+
+.hero.is-info .subtitle {
+ color: rgba(255, 255, 255, 0.9);
+}
+
+.hero.is-info .subtitle a:not(.button),
+.hero.is-info .subtitle strong {
+ color: #fff;
+}
+
+@media screen and (max-width: 1023px) {
+ .hero.is-info .navbar-menu {
+ background-color: #209cee;
+ }
+}
+
+.hero.is-info .navbar-item,
+.hero.is-info .navbar-link {
+ color: rgba(255, 255, 255, 0.7);
+}
+
+.hero.is-info a.navbar-item:hover, .hero.is-info a.navbar-item.is-active,
+.hero.is-info .navbar-link:hover,
+.hero.is-info .navbar-link.is-active {
+ background-color: #118fe4;
+ color: #fff;
+}
+
+.hero.is-info .tabs a {
+ color: #fff;
+ opacity: 0.9;
+}
+
+.hero.is-info .tabs a:hover {
+ opacity: 1;
+}
+
+.hero.is-info .tabs li.is-active a {
+ opacity: 1;
+}
+
+.hero.is-info .tabs.is-boxed a, .hero.is-info .tabs.is-toggle a {
+ color: #fff;
+}
+
+.hero.is-info .tabs.is-boxed a:hover, .hero.is-info .tabs.is-toggle a:hover {
+ background-color: rgba(10, 10, 10, 0.1);
+}
+
+.hero.is-info .tabs.is-boxed li.is-active a, .hero.is-info .tabs.is-boxed li.is-active a:hover, .hero.is-info .tabs.is-toggle li.is-active a, .hero.is-info .tabs.is-toggle li.is-active a:hover {
+ background-color: #fff;
+ border-color: #fff;
+ color: #209cee;
+}
+
+.hero.is-info.is-bold {
+ background-image: linear-gradient(141deg, #04a6d7 0%, #209cee 71%, #3287f5 100%);
+}
+
+@media screen and (max-width: 768px) {
+ .hero.is-info.is-bold .navbar-menu {
+ background-image: linear-gradient(141deg, #04a6d7 0%, #209cee 71%, #3287f5 100%);
+ }
+}
+
+.hero.is-success {
+ background-color: #23d160;
+ color: #fff;
+}
+
+.hero.is-success a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),
+.hero.is-success strong {
+ color: inherit;
+}
+
+.hero.is-success .title {
+ color: #fff;
+}
+
+.hero.is-success .subtitle {
+ color: rgba(255, 255, 255, 0.9);
+}
+
+.hero.is-success .subtitle a:not(.button),
+.hero.is-success .subtitle strong {
+ color: #fff;
+}
+
+@media screen and (max-width: 1023px) {
+ .hero.is-success .navbar-menu {
+ background-color: #23d160;
+ }
+}
+
+.hero.is-success .navbar-item,
+.hero.is-success .navbar-link {
+ color: rgba(255, 255, 255, 0.7);
+}
+
+.hero.is-success a.navbar-item:hover, .hero.is-success a.navbar-item.is-active,
+.hero.is-success .navbar-link:hover,
+.hero.is-success .navbar-link.is-active {
+ background-color: #20bc56;
+ color: #fff;
+}
+
+.hero.is-success .tabs a {
+ color: #fff;
+ opacity: 0.9;
+}
+
+.hero.is-success .tabs a:hover {
+ opacity: 1;
+}
+
+.hero.is-success .tabs li.is-active a {
+ opacity: 1;
+}
+
+.hero.is-success .tabs.is-boxed a, .hero.is-success .tabs.is-toggle a {
+ color: #fff;
+}
+
+.hero.is-success .tabs.is-boxed a:hover, .hero.is-success .tabs.is-toggle a:hover {
+ background-color: rgba(10, 10, 10, 0.1);
+}
+
+.hero.is-success .tabs.is-boxed li.is-active a, .hero.is-success .tabs.is-boxed li.is-active a:hover, .hero.is-success .tabs.is-toggle li.is-active a, .hero.is-success .tabs.is-toggle li.is-active a:hover {
+ background-color: #fff;
+ border-color: #fff;
+ color: #23d160;
+}
+
+.hero.is-success.is-bold {
+ background-image: linear-gradient(141deg, #12af2f 0%, #23d160 71%, #2ce28a 100%);
+}
+
+@media screen and (max-width: 768px) {
+ .hero.is-success.is-bold .navbar-menu {
+ background-image: linear-gradient(141deg, #12af2f 0%, #23d160 71%, #2ce28a 100%);
+ }
+}
+
+.hero.is-warning {
+ background-color: #ffdd57;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.hero.is-warning a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),
+.hero.is-warning strong {
+ color: inherit;
+}
+
+.hero.is-warning .title {
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.hero.is-warning .subtitle {
+ color: rgba(0, 0, 0, 0.9);
+}
+
+.hero.is-warning .subtitle a:not(.button),
+.hero.is-warning .subtitle strong {
+ color: rgba(0, 0, 0, 0.7);
+}
+
+@media screen and (max-width: 1023px) {
+ .hero.is-warning .navbar-menu {
+ background-color: #ffdd57;
+ }
+}
+
+.hero.is-warning .navbar-item,
+.hero.is-warning .navbar-link {
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.hero.is-warning a.navbar-item:hover, .hero.is-warning a.navbar-item.is-active,
+.hero.is-warning .navbar-link:hover,
+.hero.is-warning .navbar-link.is-active {
+ background-color: #ffd83d;
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.hero.is-warning .tabs a {
+ color: rgba(0, 0, 0, 0.7);
+ opacity: 0.9;
+}
+
+.hero.is-warning .tabs a:hover {
+ opacity: 1;
+}
+
+.hero.is-warning .tabs li.is-active a {
+ opacity: 1;
+}
+
+.hero.is-warning .tabs.is-boxed a, .hero.is-warning .tabs.is-toggle a {
+ color: rgba(0, 0, 0, 0.7);
+}
+
+.hero.is-warning .tabs.is-boxed a:hover, .hero.is-warning .tabs.is-toggle a:hover {
+ background-color: rgba(10, 10, 10, 0.1);
+}
+
+.hero.is-warning .tabs.is-boxed li.is-active a, .hero.is-warning .tabs.is-boxed li.is-active a:hover, .hero.is-warning .tabs.is-toggle li.is-active a, .hero.is-warning .tabs.is-toggle li.is-active a:hover {
+ background-color: rgba(0, 0, 0, 0.7);
+ border-color: rgba(0, 0, 0, 0.7);
+ color: #ffdd57;
+}
+
+.hero.is-warning.is-bold {
+ background-image: linear-gradient(141deg, #ffaf24 0%, #ffdd57 71%, #fffa70 100%);
+}
+
+@media screen and (max-width: 768px) {
+ .hero.is-warning.is-bold .navbar-menu {
+ background-image: linear-gradient(141deg, #ffaf24 0%, #ffdd57 71%, #fffa70 100%);
+ }
+}
+
+.hero.is-danger {
+ background-color: #ff3860;
+ color: #fff;
+}
+
+.hero.is-danger a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),
+.hero.is-danger strong {
+ color: inherit;
+}
+
+.hero.is-danger .title {
+ color: #fff;
+}
+
+.hero.is-danger .subtitle {
+ color: rgba(255, 255, 255, 0.9);
+}
+
+.hero.is-danger .subtitle a:not(.button),
+.hero.is-danger .subtitle strong {
+ color: #fff;
+}
+
+@media screen and (max-width: 1023px) {
+ .hero.is-danger .navbar-menu {
+ background-color: #ff3860;
+ }
+}
+
+.hero.is-danger .navbar-item,
+.hero.is-danger .navbar-link {
+ color: rgba(255, 255, 255, 0.7);
+}
+
+.hero.is-danger a.navbar-item:hover, .hero.is-danger a.navbar-item.is-active,
+.hero.is-danger .navbar-link:hover,
+.hero.is-danger .navbar-link.is-active {
+ background-color: #ff1f4b;
+ color: #fff;
+}
+
+.hero.is-danger .tabs a {
+ color: #fff;
+ opacity: 0.9;
+}
+
+.hero.is-danger .tabs a:hover {
+ opacity: 1;
+}
+
+.hero.is-danger .tabs li.is-active a {
+ opacity: 1;
+}
+
+.hero.is-danger .tabs.is-boxed a, .hero.is-danger .tabs.is-toggle a {
+ color: #fff;
+}
+
+.hero.is-danger .tabs.is-boxed a:hover, .hero.is-danger .tabs.is-toggle a:hover {
+ background-color: rgba(10, 10, 10, 0.1);
+}
+
+.hero.is-danger .tabs.is-boxed li.is-active a, .hero.is-danger .tabs.is-boxed li.is-active a:hover, .hero.is-danger .tabs.is-toggle li.is-active a, .hero.is-danger .tabs.is-toggle li.is-active a:hover {
+ background-color: #fff;
+ border-color: #fff;
+ color: #ff3860;
+}
+
+.hero.is-danger.is-bold {
+ background-image: linear-gradient(141deg, #ff0561 0%, #ff3860 71%, #ff5257 100%);
+}
+
+@media screen and (max-width: 768px) {
+ .hero.is-danger.is-bold .navbar-menu {
+ background-image: linear-gradient(141deg, #ff0561 0%, #ff3860 71%, #ff5257 100%);
+ }
+}
+
+.hero.is-small .hero-body {
+ padding-bottom: 1.5rem;
+ padding-top: 1.5rem;
+}
+
+@media screen and (min-width: 769px), print {
+ .hero.is-medium .hero-body {
+ padding-bottom: 9rem;
+ padding-top: 9rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .hero.is-large .hero-body {
+ padding-bottom: 18rem;
+ padding-top: 18rem;
+ }
+}
+
+.hero.is-halfheight .hero-body, .hero.is-fullheight .hero-body, .hero.is-fullheight-with-navbar .hero-body {
+ align-items: center;
+ display: flex;
+}
+
+.hero.is-halfheight .hero-body > .container, .hero.is-fullheight .hero-body > .container, .hero.is-fullheight-with-navbar .hero-body > .container {
+ flex-grow: 1;
+ flex-shrink: 1;
+}
+
+.hero.is-halfheight {
+ min-height: 50vh;
+}
+
+.hero.is-fullheight {
+ min-height: 100vh;
+}
+
+.hero-video {
+ overflow: hidden;
+}
+
+.hero-video video {
+ left: 50%;
+ min-height: 100%;
+ min-width: 100%;
+ position: absolute;
+ top: 50%;
+ -webkit-transform: translate3d(-50%, -50%, 0);
+ transform: translate3d(-50%, -50%, 0);
+}
+
+.hero-video.is-transparent {
+ opacity: 0.3;
+}
+
+@media screen and (max-width: 768px) {
+ .hero-video {
+ display: none;
+ }
+}
+
+.hero-buttons {
+ margin-top: 1.5rem;
+}
+
+@media screen and (max-width: 768px) {
+ .hero-buttons .button {
+ display: flex;
+ }
+ .hero-buttons .button:not(:last-child) {
+ margin-bottom: 0.75rem;
+ }
+}
+
+@media screen and (min-width: 769px), print {
+ .hero-buttons {
+ display: flex;
+ justify-content: center;
+ }
+ .hero-buttons .button:not(:last-child) {
+ margin-right: 1.5rem;
+ }
+}
+
+.hero-head,
+.hero-foot {
+ flex-grow: 0;
+ flex-shrink: 0;
+}
+
+.hero-body {
+ flex-grow: 1;
+ flex-shrink: 0;
+ padding: 3rem 1.5rem;
+}
+
+.section {
+ padding: 3rem 1.5rem;
+}
+
+@media screen and (min-width: 1024px) {
+ .section.is-medium {
+ padding: 9rem 1.5rem;
+ }
+ .section.is-large {
+ padding: 18rem 1.5rem;
+ }
+}
+
+.footer {
+ background-color: #fafafa;
+ padding: 3rem 1.5rem 6rem;
+}
+/*# sourceMappingURL=bulma.css.map */ \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.css.map b/testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.css.map
new file mode 100644
index 0000000000..33bb0c7776
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.css.map
@@ -0,0 +1 @@
+{"version":3,"sources":["../bulma.sass","../sass/utilities/animations.sass","bulma.css","../sass/utilities/mixins.sass","../sass/utilities/initial-variables.sass","../sass/utilities/controls.sass","../sass/base/minireset.sass","../sass/base/generic.sass","../sass/base/helpers.sass","../sass/elements/box.sass","../sass/elements/button.sass","../sass/utilities/functions.sass","../sass/elements/container.sass","../sass/elements/content.sass","../sass/elements/icon.sass","../sass/elements/image.sass","../sass/elements/notification.sass","../sass/elements/progress.sass","../sass/elements/table.sass","../sass/elements/tag.sass","../sass/elements/title.sass","../sass/elements/other.sass","../sass/form/shared.sass","../sass/form/input-textarea.sass","../sass/form/checkbox-radio.sass","../sass/form/select.sass","../sass/form/file.sass","../sass/form/tools.sass","../sass/components/breadcrumb.sass","../sass/components/card.sass","../sass/components/dropdown.sass","../sass/components/level.sass","../sass/components/list.sass","../sass/components/media.sass","../sass/components/menu.sass","../sass/components/message.sass","../sass/components/modal.sass","../sass/components/navbar.sass","../sass/components/pagination.sass","../sass/components/panel.sass","../sass/components/tabs.sass","../sass/grid/columns.sass","../sass/grid/tiles.sass","../sass/layout/hero.sass","../sass/layout/section.sass","../sass/layout/footer.sass"],"names":[],"mappings":"AACA,6DAAA;ACDA;EACE;IACE,+BAAuB;YAAvB,uBAAuB;ECEzB;EDDA;IACE,iCAAyB;YAAzB,yBAAyB;ECG3B;AACF;ADRA;EACE;IACE,+BAAuB;YAAvB,uBAAuB;ECEzB;EDDA;IACE,iCAAyB;YAAzB,yBAAyB;ECG3B;AACF;;ACmIA;;;;EANE,2BAA2B;EAC3B,yBAAyB;EACzB,sBAAsB;EACtB,qBAAqB;EACrB,iBAAiB;ADtHnB;;AC2IA;EAfE,6BAD8B;EAE9B,kBAAkB;EAClB,eAAe;EACf,aAAa;EACb,YAAY;EACZ,cAAc;EACd,eAAe;EACf,qBAAqB;EACrB,oBAAoB;EACpB,kBAAkB;EAClB,QAAQ;EACR,iCAAyB;UAAzB,yBAAyB;EACzB,gCAAwB;UAAxB,wBAAwB;EACxB,cAAc;ADxHhB;;AC8HE;;EACE,qBCpHkB;AFNtB;;ACiMA;EAhEE,qBAAqB;EACrB,wBAAwB;EACxB,uCC1K2B;ED2K3B,YAAY;EACZ,uBCxGuB;EDyGvB,eAAe;EACf,oBAAoB;EACpB,qBAAqB;EACrB,YAAY;EACZ,cAAc;EACd,YAAY;EACZ,YAAY;EACZ,gBAAgB;EAChB,eAAe;EACf,gBAAgB;EAChB,eAAe;EACf,aAAa;EACb,kBAAkB;EAClB,mBAAmB;EACnB,WAAW;AD7Hb;;AC8HE;EAEE,uBClL2B;EDmL3B,WAAW;EACX,cAAc;EACd,SAAS;EACT,kBAAkB;EAClB,QAAQ;EACR,kEAA0D;UAA1D,0DAA0D;EAC1D,uCAA+B;UAA/B,+BAA+B;AD5HnC;;AC6HE;EACE,WAAW;EACX,UAAU;AD1Hd;;AC2HE;EACE,WAAW;EACX,UAAU;ADxHd;;ACyHE;EAEE,uCC9MyB;AFuF7B;;ACwHE;EACE,uCChNyB;AF2F7B;;ACuHE;EACE,YAAY;EACZ,gBAAgB;EAChB,eAAe;EACf,gBAAgB;EAChB,eAAe;EACf,WAAW;ADpHf;;ACqHE;EACE,YAAY;EACZ,gBAAgB;EAChB,eAAe;EACf,gBAAgB;EAChB,eAAe;EACf,WAAW;ADlHf;;ACmHE;EACE,YAAY;EACZ,gBAAgB;EAChB,eAAe;EACf,gBAAgB;EAChB,eAAe;EACf,WAAW;ADhHf;;ACiIA;EAXE,mDAA2C;UAA3C,2CAA2C;EAC3C,yBCrO4B;EDsO5B,uBC1KuB;ED2KvB,+BAA+B;EAC/B,6BAA6B;EAC7B,WAAW;EACX,cAAc;EACd,WAAW;EACX,kBAAkB;EAClB,UAAU;ADlHZ;;AC8HA;;;;;;;;;;;;;;;;;EANE,SADuB;EAEvB,OAFuB;EAGvB,kBAAkB;EAClB,QAJuB;EAKvB,MALuB;AD/FzB;;AGtHA;;;;;EA3BE,qBAAqB;EACrB,wBAAwB;EACxB,mBAAmB;EACnB,6BAA+C;EAC/C,kBDoDU;ECnDV,gBAAgB;EAChB,oBAAoB;EACpB,eDiBW;EChBX,cAfqB;EAgBrB,2BAA2B;EAC3B,gBAhBuB;EAiBvB,mCAfiE;EAgBjE,iCAfmE;EAgBnE,kCAhBmE;EAiBnE,gCAlBiE;EAmBjE,kBAAkB;EAClB,mBAAmB;AHyJrB;;AGvJE;;;;;;;;;;;;;;;;;EAIE,aAAa;AHuKjB;;AGtKE;;;;;;;;;;;;;;;;EAEE,mBAAmB;AHuLvB;;AI5NA,0EAAA;AAEA;;;;;;;;;;;;;;;;;;;;;;;EAuBE,SAAS;EACT,UAAU;AJ8NZ;;AI3NA;;;;;;EAME,eAAe;EACf,mBAAmB;AJ8NrB;;AI3NA;EACE,gBAAgB;AJ8NlB;;AI3NA;;;;EAIE,SAAS;AJ8NX;;AI3NA;EACE,sBAAsB;AJ8NxB;;AI5NA;EAII,mBAAmB;AJ4NvB;;AIzNA;;;;;EAKE,YAAY;EACZ,eAAe;AJ4NjB;;AI1NA;EACE,eAAe;AJ6NjB;;AI1NA;EACE,SAAS;AJ6NX;;AI1NA;EACE,yBAAyB;EACzB,iBAAiB;AJ6NnB;;AI3NA;;EAEE,UAAU;AJ8NZ;;AIhOA;;EAII,gBAAgB;AJiOpB;;AInQA;EClBE,uBHlB6B;EGmB7B,eAhCc;EAiCd,kCAAkC;EAClC,mCAAmC;EACnC,gBAlCoB;EAmCpB,kBAhCsB;EAiCtB,kBAhCsB;EAiCtB,kCApCiC;EAqCjC,8BAAsB;KAAtB,2BAAsB;MAAtB,0BAAsB;UAAtB,sBAAsB;ALyRxB;;AKvRA;;;;;;;EAOE,cAAc;AL0RhB;;AKxRA;;;;;EAKE,oLH7ByL;AFwT3L;;AKzRA;;EAEE,6BAA6B;EAC7B,4BAA4B;EAC5B,sBHlC0B;AF8T5B;;AK1RA;EACE,cH1D4B;EG2D5B,cAzDkB;EA0DlB,gBH3BiB;EG4BjB,gBAzDoB;ALsVtB;;AKzRA;EACE,cHpDgC;EGqDhC,eAAe;EACf,qBAAqB;AL4RvB;;AK/RA;EAKI,mBAAmB;AL8RvB;;AKnSA;EAOI,cHzE0B;AFyW9B;;AK9RA;EACE,4BHtE4B;EGuE5B,cH5DgC;EG6DhC,kBApEiB;EAqEjB,mBAtEkB;EAuElB,4BAxEgC;ALyWlC;;AK/RA;EACE,4BH7E4B;EG8E5B,YAAY;EACZ,cAAc;EACd,WAvEa;EAwEb,gBAvEkB;ALyWpB;;AKhSA;EACE,YAAY;EACZ,eAAe;ALmSjB;;AKjSA;;EAEE,wBAAwB;ALoS1B;;AKlSA;EACE,kBAtFuB;AL2XzB;;AKnSA;EACE,mBAAmB;EACnB,oBAAoB;ALsStB;;AKpSA;EACE,cHzG4B;EG0G5B,gBHrEe;AF4WjB;;AKnSA;EACE,YAAY;ALsSd;;AKpSA;EJzDE,iCAAiC;EI2DjC,4BH7G4B;EG8G5B,cHnH4B;EGoH5B,kBAhGqB;EAiGrB,gBAAgB;EAChB,uBAjG0B;EAkG1B,gBAAgB;EAChB,iBAAiB;ALuSnB;;AK/SA;EAUI,6BAA6B;EAC7B,mBAAmB;EACnB,cAtGoB;EAuGpB,UAAU;ALySd;;AKvSA;;EAGI,mBAAmB;ALySvB;;AK5SA;;EAKM,gBAAgB;AL4StB;;AKjTA;EAOI,cHvI0B;AFqb9B;;ACxbE;EACE,WAAW;EACX,YAAY;EACZ,cAAc;AD2blB;;AM5bA;EACE,sBAAsB;AN+bxB;;AM7bA;EACE,uBAAuB;ANgczB;;AM5bA;EACE,2BAA2B;AN+b7B;;AMnbI;EACE,0BAA2B;ANsbjC;;AMvbI;EACE,4BAA2B;AN0bjC;;AM3bI;EACE,0BAA2B;AN8bjC;;AM/bI;EACE,4BAA2B;ANkcjC;;AMncI;EACE,6BAA2B;ANscjC;;AMvcI;EACE,0BAA2B;AN0cjC;;AM3cI;EACE,6BAA2B;AN8cjC;;ACxZE;EKvDE;IACE,0BAA2B;ENmd/B;EMpdE;IACE,4BAA2B;ENsd/B;EMvdE;IACE,0BAA2B;ENyd/B;EM1dE;IACE,4BAA2B;EN4d/B;EM7dE;IACE,6BAA2B;EN+d/B;EMheE;IACE,0BAA2B;ENke/B;EMneE;IACE,6BAA2B;ENqe/B;AACF;;AC5aE;EK3DE;IACE,0BAA2B;EN2e/B;EM5eE;IACE,4BAA2B;EN8e/B;EM/eE;IACE,0BAA2B;ENif/B;EMlfE;IACE,4BAA2B;ENof/B;EMrfE;IACE,6BAA2B;ENuf/B;EMxfE;IACE,0BAA2B;EN0f/B;EM3fE;IACE,6BAA2B;EN6f/B;AACF;;AC5bE;EKnEE;IACE,0BAA2B;ENmgB/B;EMpgBE;IACE,4BAA2B;ENsgB/B;EMvgBE;IACE,0BAA2B;ENygB/B;EM1gBE;IACE,4BAA2B;EN4gB/B;EM7gBE;IACE,6BAA2B;EN+gB/B;EMhhBE;IACE,0BAA2B;ENkhB/B;EMnhBE;IACE,6BAA2B;ENqhB/B;AACF;;AChdE;EKvEE;IACE,0BAA2B;EN2hB/B;EM5hBE;IACE,4BAA2B;EN8hB/B;EM/hBE;IACE,0BAA2B;ENiiB/B;EMliBE;IACE,4BAA2B;ENoiB/B;EMriBE;IACE,6BAA2B;ENuiB/B;EMxiBE;IACE,0BAA2B;EN0iB/B;EM3iBE;IACE,6BAA2B;EN6iB/B;AACF;;ACzdI;EKtFA;IACE,0BAA2B;ENmjB/B;EMpjBE;IACE,4BAA2B;ENsjB/B;EMvjBE;IACE,0BAA2B;ENyjB/B;EM1jBE;IACE,4BAA2B;EN4jB/B;EM7jBE;IACE,6BAA2B;EN+jB/B;EMhkBE;IACE,0BAA2B;ENkkB/B;EMnkBE;IACE,6BAA2B;ENqkB/B;AACF;;ACleI;EKrGA;IACE,0BAA2B;EN2kB/B;EM5kBE;IACE,4BAA2B;EN8kB/B;EM/kBE;IACE,0BAA2B;ENilB/B;EMllBE;IACE,4BAA2B;ENolB/B;EMrlBE;IACE,6BAA2B;ENulB/B;EMxlBE;IACE,0BAA2B;EN0lB/B;EM3lBE;IACE,6BAA2B;EN6lB/B;AACF;;AMrkBE;EACE,6BAAqC;ANwkBzC;;AMzkBE;EACE,8BAAqC;AN4kBzC;;AM7kBE;EACE,2BAAqC;ANglBzC;;AMjlBE;EACE,4BAAqC;ANolBzC;;ACxjBE;EKxBE;IACE,6BAAqC;ENolBzC;AACF;;AC1jBE;EKzBE;IACE,6BAAqC;ENulBzC;AACF;;AC5jBE;EK1BE;IACE,6BAAqC;EN0lBzC;AACF;;AC9jBE;EK3BE;IACE,6BAAqC;EN6lBzC;AACF;;AChkBE;EK5BE;IACE,6BAAqC;ENgmBzC;AACF;;ACjkBI;EK9BA;IACE,6BAAqC;ENmmBzC;AACF;;AC7jBI;EKrCA;IACE,6BAAqC;ENsmBzC;AACF;;AC9jBI;EKvCA;IACE,6BAAqC;ENymBzC;AACF;;AC1jBI;EK9CA;IACE,6BAAqC;EN4mBzC;AACF;;AC9mBE;EKxBE;IACE,8BAAqC;EN0oBzC;AACF;;AChnBE;EKzBE;IACE,8BAAqC;EN6oBzC;AACF;;AClnBE;EK1BE;IACE,8BAAqC;ENgpBzC;AACF;;ACpnBE;EK3BE;IACE,8BAAqC;ENmpBzC;AACF;;ACtnBE;EK5BE;IACE,8BAAqC;ENspBzC;AACF;;ACvnBI;EK9BA;IACE,8BAAqC;ENypBzC;AACF;;ACnnBI;EKrCA;IACE,8BAAqC;EN4pBzC;AACF;;ACpnBI;EKvCA;IACE,8BAAqC;EN+pBzC;AACF;;AChnBI;EK9CA;IACE,8BAAqC;ENkqBzC;AACF;;ACpqBE;EKxBE;IACE,2BAAqC;ENgsBzC;AACF;;ACtqBE;EKzBE;IACE,2BAAqC;ENmsBzC;AACF;;ACxqBE;EK1BE;IACE,2BAAqC;ENssBzC;AACF;;AC1qBE;EK3BE;IACE,2BAAqC;ENysBzC;AACF;;AC5qBE;EK5BE;IACE,2BAAqC;EN4sBzC;AACF;;AC7qBI;EK9BA;IACE,2BAAqC;EN+sBzC;AACF;;ACzqBI;EKrCA;IACE,2BAAqC;ENktBzC;AACF;;AC1qBI;EKvCA;IACE,2BAAqC;ENqtBzC;AACF;;ACtqBI;EK9CA;IACE,2BAAqC;ENwtBzC;AACF;;AC1tBE;EKxBE;IACE,4BAAqC;ENsvBzC;AACF;;AC5tBE;EKzBE;IACE,4BAAqC;ENyvBzC;AACF;;AC9tBE;EK1BE;IACE,4BAAqC;EN4vBzC;AACF;;AChuBE;EK3BE;IACE,4BAAqC;EN+vBzC;AACF;;ACluBE;EK5BE;IACE,4BAAqC;ENkwBzC;AACF;;ACnuBI;EK9BA;IACE,4BAAqC;ENqwBzC;AACF;;AC/tBI;EKrCA;IACE,4BAAqC;ENwwBzC;AACF;;AChuBI;EKvCA;IACE,4BAAqC;EN2wBzC;AACF;;AC5tBI;EK9CA;IACE,4BAAqC;EN8wBzC;AACF;;AM7wBA;EACE,qCAAqC;ANgxBvC;;AM9wBA;EACE,oCAAoC;ANixBtC;;AM/wBA;EACE,oCAAoC;ANkxBtC;;AMhxBA;EACE,6BAA6B;ANmxB/B;;AM/wBE;EACE,uBAAwB;ANkxB5B;;AMjxBE;EAGI,yBAAqC;ANkxB3C;;AMjxBE;EACE,kCAAmC;ANoxBvC;;AM3xBE;EACE,yBAAwB;AN8xB5B;;AM7xBE;EAGI,uBAAqC;AN8xB3C;;AM7xBE;EACE,oCAAmC;ANgyBvC;;AMvyBE;EACE,4BAAwB;AN0yB5B;;AMzyBE;EAGI,yBAAqC;AN0yB3C;;AMzyBE;EACE,uCAAmC;AN4yBvC;;AMnzBE;EACE,yBAAwB;ANszB5B;;AMrzBE;EAGI,yBAAqC;ANszB3C;;AMrzBE;EACE,oCAAmC;ANwzBvC;;AM/zBE;EACE,yBAAwB;ANk0B5B;;AMj0BE;EAGI,yBAAqC;ANk0B3C;;AMj0BE;EACE,oCAAmC;ANo0BvC;;AM30BE;EACE,yBAAwB;AN80B5B;;AM70BE;EAGI,yBAAqC;AN80B3C;;AM70BE;EACE,oCAAmC;ANg1BvC;;AMv1BE;EACE,yBAAwB;AN01B5B;;AMz1BE;EAGI,yBAAqC;AN01B3C;;AMz1BE;EACE,oCAAmC;AN41BvC;;AMn2BE;EACE,yBAAwB;ANs2B5B;;AMr2BE;EAGI,yBAAqC;ANs2B3C;;AMr2BE;EACE,oCAAmC;ANw2BvC;;AM/2BE;EACE,yBAAwB;ANk3B5B;;AMj3BE;EAGI,yBAAqC;ANk3B3C;;AMj3BE;EACE,oCAAmC;ANo3BvC;;AM33BE;EACE,yBAAwB;AN83B5B;;AM73BE;EAGI,yBAAqC;AN83B3C;;AM73BE;EACE,oCAAmC;ANg4BvC;;AM73BE;EACE,yBAAwB;ANg4B5B;;AM/3BE;EACE,oCAAmC;ANk4BvC;;AMr4BE;EACE,yBAAwB;ANw4B5B;;AMv4BE;EACE,oCAAmC;AN04BvC;;AM74BE;EACE,yBAAwB;ANg5B5B;;AM/4BE;EACE,oCAAmC;ANk5BvC;;AMr5BE;EACE,yBAAwB;ANw5B5B;;AMv5BE;EACE,oCAAmC;AN05BvC;;AM75BE;EACE,yBAAwB;ANg6B5B;;AM/5BE;EACE,oCAAmC;ANk6BvC;;AMr6BE;EACE,yBAAwB;ANw6B5B;;AMv6BE;EACE,oCAAmC;AN06BvC;;AM76BE;EACE,yBAAwB;ANg7B5B;;AM/6BE;EACE,oCAAmC;ANk7BvC;;AMr7BE;EACE,4BAAwB;ANw7B5B;;AMv7BE;EACE,uCAAmC;AN07BvC;;AM77BE;EACE,yBAAwB;ANg8B5B;;AM/7BE;EACE,oCAAmC;ANk8BvC;;AMh8BA;EACE,2BAAqC;ANm8BvC;;AMl8BA;EACE,2BAAsC;ANq8BxC;;AMp8BA;EACE,2BAAsC;ANu8BxC;;AMt8BA;EACE,2BAAwC;ANy8B1C;;AMx8BA;EACE,2BAAoC;AN28BtC;;AMz8BA;EACE,+LAAuC;AN48BzC;;AM18BA;EACE,+LAAyC;AN68B3C;;AM38BA;EACE,+LAA0C;AN88B5C;;AM58BA;EACE,iCAAyC;AN+8B3C;;AM78BA;EACE,iCAAoC;ANg9BtC;;AMz8BE;EACE,yBAA+B;AN48BnC;;AC5gCE;EKkEE;IACE,yBAA+B;EN88BnC;AACF;;AC9gCE;EKiEE;IACE,yBAA+B;ENi9BnC;AACF;;AChhCE;EKgEE;IACE,yBAA+B;ENo9BnC;AACF;;AClhCE;EK+DE;IACE,yBAA+B;ENu9BnC;AACF;;ACphCE;EK8DE;IACE,yBAA+B;EN09BnC;AACF;;ACrhCI;EK4DA;IACE,yBAA+B;EN69BnC;AACF;;ACjhCI;EKqDA;IACE,yBAA+B;ENg+BnC;AACF;;AClhCI;EKmDA;IACE,yBAA+B;ENm+BnC;AACF;;AC9gCI;EK4CA;IACE,yBAA+B;ENs+BnC;AACF;;AMngCE;EACE,wBAA+B;ANsgCnC;;ACtkCE;EKkEE;IACE,wBAA+B;ENwgCnC;AACF;;ACxkCE;EKiEE;IACE,wBAA+B;EN2gCnC;AACF;;AC1kCE;EKgEE;IACE,wBAA+B;EN8gCnC;AACF;;AC5kCE;EK+DE;IACE,wBAA+B;ENihCnC;AACF;;AC9kCE;EK8DE;IACE,wBAA+B;ENohCnC;AACF;;AC/kCI;EK4DA;IACE,wBAA+B;ENuhCnC;AACF;;AC3kCI;EKqDA;IACE,wBAA+B;EN0hCnC;AACF;;AC5kCI;EKmDA;IACE,wBAA+B;EN6hCnC;AACF;;ACxkCI;EK4CA;IACE,wBAA+B;ENgiCnC;AACF;;AM7jCE;EACE,0BAA+B;ANgkCnC;;AChoCE;EKkEE;IACE,0BAA+B;ENkkCnC;AACF;;ACloCE;EKiEE;IACE,0BAA+B;ENqkCnC;AACF;;ACpoCE;EKgEE;IACE,0BAA+B;ENwkCnC;AACF;;ACtoCE;EK+DE;IACE,0BAA+B;EN2kCnC;AACF;;ACxoCE;EK8DE;IACE,0BAA+B;EN8kCnC;AACF;;ACzoCI;EK4DA;IACE,0BAA+B;ENilCnC;AACF;;ACroCI;EKqDA;IACE,0BAA+B;ENolCnC;AACF;;ACtoCI;EKmDA;IACE,0BAA+B;ENulCnC;AACF;;ACloCI;EK4CA;IACE,0BAA+B;EN0lCnC;AACF;;AMvnCE;EACE,gCAA+B;AN0nCnC;;AC1rCE;EKkEE;IACE,gCAA+B;EN4nCnC;AACF;;AC5rCE;EKiEE;IACE,gCAA+B;EN+nCnC;AACF;;AC9rCE;EKgEE;IACE,gCAA+B;ENkoCnC;AACF;;AChsCE;EK+DE;IACE,gCAA+B;ENqoCnC;AACF;;AClsCE;EK8DE;IACE,gCAA+B;ENwoCnC;AACF;;ACnsCI;EK4DA;IACE,gCAA+B;EN2oCnC;AACF;;AC/rCI;EKqDA;IACE,gCAA+B;EN8oCnC;AACF;;AChsCI;EKmDA;IACE,gCAA+B;ENipCnC;AACF;;AC5rCI;EK4CA;IACE,gCAA+B;ENopCnC;AACF;;AMjrCE;EACE,+BAA+B;ANorCnC;;ACpvCE;EKkEE;IACE,+BAA+B;ENsrCnC;AACF;;ACtvCE;EKiEE;IACE,+BAA+B;ENyrCnC;AACF;;ACxvCE;EKgEE;IACE,+BAA+B;EN4rCnC;AACF;;AC1vCE;EK+DE;IACE,+BAA+B;EN+rCnC;AACF;;AC5vCE;EK8DE;IACE,+BAA+B;ENksCnC;AACF;;AC7vCI;EK4DA;IACE,+BAA+B;ENqsCnC;AACF;;ACzvCI;EKqDA;IACE,+BAA+B;ENwsCnC;AACF;;AC1vCI;EKmDA;IACE,+BAA+B;EN2sCnC;AACF;;ACtvCI;EK4CA;IACE,+BAA+B;EN8sCnC;AACF;;AM7sCA;EACE,wBAAwB;ANgtC1B;;AM9sCA;EACE,uBAAuB;EACvB,iCAAiC;EACjC,yBAAyB;EACzB,2BAA2B;EAC3B,qBAAqB;EACrB,6BAA6B;EAC7B,8BAA8B;EAC9B,wBAAwB;ANitC1B;;ACzzCE;EK2GA;IACE,wBAAwB;ENktC1B;AACF;;AC3zCE;EK2GA;IACE,wBAAwB;ENotC1B;AACF;;AC7zCE;EK2GA;IACE,wBAAwB;ENstC1B;AACF;;AC/zCE;EK2GA;IACE,wBAAwB;ENwtC1B;AACF;;ACj0CE;EK2GA;IACE,wBAAwB;EN0tC1B;AACF;;ACl0CI;EK0GF;IACE,wBAAwB;EN4tC1B;AACF;;AC9zCI;EKoGF;IACE,wBAAwB;EN8tC1B;AACF;;AC/zCI;EKmGF;IACE,wBAAwB;ENguC1B;AACF;;AC3zCI;EK6FF;IACE,wBAAwB;ENkuC1B;AACF;;AMjuCA;EACE,6BAA6B;ANouC/B;;ACn3CE;EKkJA;IACE,6BAA6B;ENquC/B;AACF;;ACr3CE;EKkJA;IACE,6BAA6B;ENuuC/B;AACF;;ACv3CE;EKkJA;IACE,6BAA6B;ENyuC/B;AACF;;ACz3CE;EKkJA;IACE,6BAA6B;EN2uC/B;AACF;;AC33CE;EKkJA;IACE,6BAA6B;EN6uC/B;AACF;;AC53CI;EKiJF;IACE,6BAA6B;EN+uC/B;AACF;;ACx3CI;EK2IF;IACE,6BAA6B;ENivC/B;AACF;;ACz3CI;EK0IF;IACE,6BAA6B;ENmvC/B;AACF;;ACr3CI;EKoIF;IACE,6BAA6B;ENqvC/B;AACF;;AMlvCA;EACE,oBAAoB;ANqvCtB;;AMnvCA;EACE,qBAAqB;ANsvCvB;;AMpvCA;EACE,2BAA2B;ANuvC7B;;AMrvCA;EACE,2BAA2B;ANwvC7B;;AMnvCA;EACE,6BAA6B;ANsvC/B;;AOrgDA;EAEE,uBLG6B;EKF7B,kBLyDgB;EKxDhB,4ELX2B;EKY3B,cLP4B;EKQ5B,cAAc;EACd,gBAZmB;APmhDrB;;AOrgDA;EAGI,8DLA8B;AFsgDlC;;AOzgDA;EAKI,oELF8B;AF0gDlC;;AQl/CA;EAGE,uBNlC6B;EMmC7B,qBNvC4B;EMwC5B,iBL/CwB;EKgDxB,cN7C4B;EM8C5B,eAAe;EAGf,uBAAuB;EACvB,mCAjD+D;EAkD/D,oBAjDgC;EAkDhC,qBAlDgC;EAmDhC,gCApD+D;EAqD/D,kBAAkB;EAClB,mBAAmB;ARi/CrB;;AQjgDA;EAkBI,cAAc;ARm/ClB;;AQrgDA;EAwBM,aAAa;EACb,YAAY;ARi/ClB;;AQ1gDA;EA2BM,iCAAqD;EACrD,sBAAsB;ARm/C5B;;AQ/gDA;EA8BM,qBAAqB;EACrB,kCAAsD;ARq/C5D;;AQphDA;EAiCM,iCAAqD;EACrD,kCAAsD;ARu/C5D;;AQzhDA;EAsCI,qBN1E0B;EM2E1B,cN9E0B;AFqkD9B;;AQ9hDA;EA0CI,qBNlE8B;EMmE9B,cNlF0B;AF0kD9B;;AQniDA;EA6CM,kDNrE4B;AF+jDlC;;AQviDA;EAgDI,qBNtF0B;EMuF1B,cNxF0B;AFmlD9B;;AQ5iDA;EAoDI,6BAA6B;EAC7B,yBAAyB;EACzB,cN5F0B;EM6F1B,0BAA0B;AR4/C9B;;AQnjDA;EA4DM,4BN7FwB;EM8FxB,cNpGwB;AF+lD9B;;AQxjDA;EAgEM,yBAAiE;EACjE,cNxGwB;AFomD9B;;AQ7jDA;;EAoEM,6BAA6B;EAC7B,yBAAyB;EACzB,gBAAgB;AR8/CtB;;AQpkDA;EA2EM,uBN1GyB;EM2GzB,yBAAyB;EACzB,cNxHuB;AFqnD7B;;AQ1kDA;EAgFQ,yBAAsC;EACtC,yBAAyB;EACzB,cN7HqB;AF2nD7B;;AQhlDA;EAqFQ,yBAAyB;EACzB,cNjIqB;AFgoD7B;;AQrlDA;EAwFU,mDNvHqB;AFwnD/B;;AQzlDA;EA2FQ,yBAAoC;EACpC,yBAAyB;EACzB,cNxIqB;AF0oD7B;;AQ/lDA;;EAgGQ,uBN/HuB;EMgIvB,yBAAyB;EACzB,gBAAgB;ARogDxB;;AQtmDA;EAoGQ,yBN/IqB;EMgJrB,YNpIuB;AF0oD/B;;AQ3mDA;EAwGU,uBAA2C;ARugDrD;;AQ/mDA;;EA2GU,yBNtJmB;EMuJnB,yBAAyB;EACzB,gBAAgB;EAChB,YN7IqB;AFspD/B;;AQvnDA;EAiHU,gEAA4E;AR0gDtF;;AQ3nDA;EAmHQ,6BAA6B;EAC7B,mBNnJuB;EMoJvB,YNpJuB;AFgqD/B;;AQjoDA;EA0HU,uBNzJqB;EM0JrB,mBN1JqB;EM2JrB,cNvKmB;AFkrD7B;;AQvoDA;EA+HY,4DAA8D;AR4gD1E;;AQ3oDA;EAqIc,gEAA4E;AR0gD1F;;AQ/oDA;;EAwIU,6BAA6B;EAC7B,mBNxKqB;EMyKrB,gBAAgB;EAChB,YN1KqB;AFsrD/B;;AQvpDA;EA6IQ,6BAA6B;EAC7B,qBNzLqB;EM0LrB,cN1LqB;AFwsD7B;;AQ7pDA;EAoJU,yBN/LmB;EMgMnB,YNpLqB;AFisD/B;;AQlqDA;EA4Jc,4DAA8D;AR0gD5E;;AQtqDA;;EA+JU,6BAA6B;EAC7B,qBN3MmB;EM4MnB,gBAAgB;EAChB,cN7MmB;AFytD7B;;AQ9qDA;EA2EM,yBNtHuB;EMuHvB,yBAAyB;EACzB,YN5GyB;AFmtD/B;;AQprDA;EAgFQ,yBAAsC;EACtC,yBAAyB;EACzB,YNjHuB;AFytD/B;;AQ1rDA;EAqFQ,yBAAyB;EACzB,YNrHuB;AF8tD/B;;AQ/rDA;EAwFU,gDNnImB;AF8uD7B;;AQnsDA;EA2FQ,uBAAoC;EACpC,yBAAyB;EACzB,YN5HuB;AFwuD/B;;AQzsDA;;EAgGQ,yBN3IqB;EM4IrB,yBAAyB;EACzB,gBAAgB;AR8mDxB;;AQhtDA;EAoGQ,uBNnIuB;EMoIvB,cNhJqB;AFgwD7B;;AQrtDA;EAwGU,yBAA2C;ARinDrD;;AQztDA;;EA2GU,uBN1IqB;EM2IrB,yBAAyB;EACzB,gBAAgB;EAChB,cNzJmB;AF4wD7B;;AQjuDA;EAiHU,4DAA4E;ARonDtF;;AQruDA;EAmHQ,6BAA6B;EAC7B,qBN/JqB;EMgKrB,cNhKqB;AFsxD7B;;AQ3uDA;EA0HU,yBNrKmB;EMsKnB,qBNtKmB;EMuKnB,YN3JqB;AFgxD/B;;AQjvDA;EA+HY,gEAA8D;ARsnD1E;;AQrvDA;EAqIc,4DAA4E;ARonD1F;;AQzvDA;;EAwIU,6BAA6B;EAC7B,qBNpLmB;EMqLnB,gBAAgB;EAChB,cNtLmB;AF4yD7B;;AQjwDA;EA6IQ,6BAA6B;EAC7B,mBN7KuB;EM8KvB,YN9KuB;AFsyD/B;;AQvwDA;EAoJU,uBNnLqB;EMoLrB,cNhMmB;AFuzD7B;;AQ5wDA;EA4Jc,gEAA8D;ARonD5E;;AQhxDA;;EA+JU,6BAA6B;EAC7B,mBN/LqB;EMgMrB,gBAAgB;EAChB,YNjMqB;AFuzD/B;;AQxxDA;EA2EM,4BN5GwB;EM6GxB,yBAAyB;EACzB,cNpHwB;AFq0D9B;;AQ9xDA;EAgFQ,yBAAsC;EACtC,yBAAyB;EACzB,cNzHsB;AF20D9B;;AQpyDA;EAqFQ,yBAAyB;EACzB,cN7HsB;AFg1D9B;;AQzyDA;EAwFU,mDNzHoB;AF80D9B;;AQ7yDA;EA2FQ,yBAAoC;EACpC,yBAAyB;EACzB,cNpIsB;AF01D9B;;AQnzDA;;EAgGQ,4BNjIsB;EMkItB,yBAAyB;EACzB,gBAAgB;ARwtDxB;;AQ1zDA;EAoGQ,yBN3IsB;EM4ItB,iBNtIsB;AFg2D9B;;AQ/zDA;EAwGU,yBAA2C;AR2tDrD;;AQn0DA;;EA2GU,yBNlJoB;EMmJpB,yBAAyB;EACzB,gBAAgB;EAChB,iBN/IoB;AF42D9B;;AQ30DA;EAiHU,gEAA4E;AR8tDtF;;AQ/0DA;EAmHQ,6BAA6B;EAC7B,wBNrJsB;EMsJtB,iBNtJsB;AFs3D9B;;AQr1DA;EA0HU,4BN3JoB;EM4JpB,wBN5JoB;EM6JpB,cNnKoB;AFk4D9B;;AQ31DA;EA+HY,sEAA8D;ARguD1E;;AQ/1DA;EAqIc,gEAA4E;AR8tD1F;;AQn2DA;;EAwIU,6BAA6B;EAC7B,wBN1KoB;EM2KpB,gBAAgB;EAChB,iBN5KoB;AF44D9B;;AQ32DA;EA6IQ,6BAA6B;EAC7B,qBNrLsB;EMsLtB,cNtLsB;AFw5D9B;;AQj3DA;EAoJU,yBN3LoB;EM4LpB,iBNtLoB;AFu5D9B;;AQt3DA;EA4Jc,sEAA8D;AR8tD5E;;AQ13DA;;EA+JU,6BAA6B;EAC7B,qBNvMoB;EMwMpB,gBAAgB;EAChB,cNzMoB;AFy6D9B;;AQl4DA;EA2EM,yBNlHwB;EMmHxB,yBAAyB;EACzB,iBN9GwB;AFy6D9B;;AQx4DA;EAgFQ,yBAAsC;EACtC,yBAAyB;EACzB,iBNnHsB;AF+6D9B;;AQ94DA;EAqFQ,yBAAyB;EACzB,iBNvHsB;AFo7D9B;;AQn5DA;EAwFU,gDN/HoB;AF87D9B;;AQv5DA;EA2FQ,yBAAoC;EACpC,yBAAyB;EACzB,iBN9HsB;AF87D9B;;AQ75DA;;EAgGQ,yBNvIsB;EMwItB,yBAAyB;EACzB,gBAAgB;ARk0DxB;;AQp6DA;EAoGQ,4BNrIsB;EMsItB,cN5IsB;AFg9D9B;;AQz6DA;EAwGU,yBAA2C;ARq0DrD;;AQ76DA;;EA2GU,4BN5IoB;EM6IpB,yBAAyB;EACzB,gBAAgB;EAChB,cNrJoB;AF49D9B;;AQr7DA;EAiHU,sEAA4E;ARw0DtF;;AQz7DA;EAmHQ,6BAA6B;EAC7B,qBN3JsB;EM4JtB,cN5JsB;AFs+D9B;;AQ/7DA;EA0HU,yBNjKoB;EMkKpB,qBNlKoB;EMmKpB,iBN7JoB;AFs+D9B;;AQr8DA;EA+HY,gEAA8D;AR00D1E;;AQz8DA;EAqIc,sEAA4E;ARw0D1F;;AQ78DA;;EAwIU,6BAA6B;EAC7B,qBNhLoB;EMiLpB,gBAAgB;EAChB,cNlLoB;AF4/D9B;;AQr9DA;EA6IQ,6BAA6B;EAC7B,wBN/KsB;EMgLtB,iBNhLsB;AF4/D9B;;AQ39DA;EAoJU,4BNrLoB;EMsLpB,cN5LoB;AFugE9B;;AQh+DA;EA4Jc,gEAA8D;ARw0D5E;;AQp+DA;;EA+JU,6BAA6B;EAC7B,wBNjMoB;EMkMpB,gBAAgB;EAChB,iBNnMoB;AF6gE9B;;AQ5+DA;EA2EM,yBNrG4B;EMsG5B,yBAAyB;EACzB,WC7DU;ATk+DhB;;AQl/DA;EAgFQ,yBAAsC;EACtC,yBAAyB;EACzB,WClEQ;ATw+DhB;;AQx/DA;EAqFQ,yBAAyB;EACzB,WCtEQ;AT6+DhB;;AQ7/DA;EAwFU,iDNlHwB;AF2hElC;;AQjgEA;EA2FQ,yBAAoC;EACpC,yBAAyB;EACzB,WC7EQ;ATu/DhB;;AQvgEA;;EAgGQ,yBN1H0B;EM2H1B,yBAAyB;EACzB,gBAAgB;AR46DxB;;AQ9gEA;EAoGQ,sBCpFQ;EDqFR,cN/H0B;AF6iElC;;AQnhEA;EAwGU,yBAA2C;AR+6DrD;;AQvhEA;;EA2GU,sBC3FM;ED4FN,yBAAyB;EACzB,gBAAgB;EAChB,cNxIwB;AFyjElC;;AQ/hEA;EAiHU,0DAA4E;ARk7DtF;;AQniEA;EAmHQ,6BAA6B;EAC7B,qBN9I0B;EM+I1B,cN/I0B;AFmkElC;;AQziEA;EA0HU,yBNpJwB;EMqJxB,qBNrJwB;EMsJxB,WC5GM;AT+hEhB;;AQ/iEA;EA+HY,gEAA8D;ARo7D1E;;AQnjEA;EAqIc,0DAA4E;ARk7D1F;;AQvjEA;;EAwIU,6BAA6B;EAC7B,qBNnKwB;EMoKxB,gBAAgB;EAChB,cNrKwB;AFylElC;;AQ/jEA;EA6IQ,6BAA6B;EAC7B,kBC9HQ;ED+HR,WC/HQ;ATqjEhB;;AQrkEA;EAoJU,sBCpIM;EDqIN,cN/KwB;AFomElC;;AQ1kEA;EA4Jc,gEAA8D;ARk7D5E;;AQ9kEA;;EA+JU,6BAA6B;EAC7B,kBChJM;EDiJN,gBAAgB;EAChB,WClJM;ATskEhB;;AQtlEA;EA2EM,yBNnG4B;EMoG5B,yBAAyB;EACzB,WC7DU;AT4kEhB;;AQ5lEA;EAgFQ,yBAAsC;EACtC,yBAAyB;EACzB,WClEQ;ATklEhB;;AQlmEA;EAqFQ,yBAAyB;EACzB,WCtEQ;ATulEhB;;AQvmEA;EAwFU,kDNhHwB;AFmoElC;;AQ3mEA;EA2FQ,yBAAoC;EACpC,yBAAyB;EACzB,WC7EQ;ATimEhB;;AQjnEA;;EAgGQ,yBNxH0B;EMyH1B,yBAAyB;EACzB,gBAAgB;ARshExB;;AQxnEA;EAoGQ,sBCpFQ;EDqFR,cN7H0B;AFqpElC;;AQ7nEA;EAwGU,yBAA2C;ARyhErD;;AQjoEA;;EA2GU,sBC3FM;ED4FN,yBAAyB;EACzB,gBAAgB;EAChB,cNtIwB;AFiqElC;;AQzoEA;EAiHU,0DAA4E;AR4hEtF;;AQ7oEA;EAmHQ,6BAA6B;EAC7B,qBN5I0B;EM6I1B,cN7I0B;AF2qElC;;AQnpEA;EA0HU,yBNlJwB;EMmJxB,qBNnJwB;EMoJxB,WC5GM;ATyoEhB;;AQzpEA;EA+HY,gEAA8D;AR8hE1E;;AQ7pEA;EAqIc,0DAA4E;AR4hE1F;;AQjqEA;;EAwIU,6BAA6B;EAC7B,qBNjKwB;EMkKxB,gBAAgB;EAChB,cNnKwB;AFisElC;;AQzqEA;EA6IQ,6BAA6B;EAC7B,kBC9HQ;ED+HR,WC/HQ;AT+pEhB;;AQ/qEA;EAoJU,sBCpIM;EDqIN,cN7KwB;AF4sElC;;AQprEA;EA4Jc,gEAA8D;AR4hE5E;;AQxrEA;;EA+JU,6BAA6B;EAC7B,kBChJM;EDiJN,gBAAgB;EAChB,WClJM;ATgrEhB;;AQhsEA;EA2EM,yBNpG4B;EMqG5B,yBAAyB;EACzB,WC7DU;ATsrEhB;;AQtsEA;EAgFQ,yBAAsC;EACtC,yBAAyB;EACzB,WClEQ;AT4rEhB;;AQ5sEA;EAqFQ,yBAAyB;EACzB,WCtEQ;ATisEhB;;AQjtEA;EAwFU,kDNjHwB;AF8uElC;;AQrtEA;EA2FQ,yBAAoC;EACpC,yBAAyB;EACzB,WC7EQ;AT2sEhB;;AQ3tEA;;EAgGQ,yBNzH0B;EM0H1B,yBAAyB;EACzB,gBAAgB;ARgoExB;;AQluEA;EAoGQ,sBCpFQ;EDqFR,cN9H0B;AFgwElC;;AQvuEA;EAwGU,yBAA2C;ARmoErD;;AQ3uEA;;EA2GU,sBC3FM;ED4FN,yBAAyB;EACzB,gBAAgB;EAChB,cNvIwB;AF4wElC;;AQnvEA;EAiHU,0DAA4E;ARsoEtF;;AQvvEA;EAmHQ,6BAA6B;EAC7B,qBN7I0B;EM8I1B,cN9I0B;AFsxElC;;AQ7vEA;EA0HU,yBNnJwB;EMoJxB,qBNpJwB;EMqJxB,WC5GM;ATmvEhB;;AQnwEA;EA+HY,gEAA8D;ARwoE1E;;AQvwEA;EAqIc,0DAA4E;ARsoE1F;;AQ3wEA;;EAwIU,6BAA6B;EAC7B,qBNlKwB;EMmKxB,gBAAgB;EAChB,cNpKwB;AF4yElC;;AQnxEA;EA6IQ,6BAA6B;EAC7B,kBC9HQ;ED+HR,WC/HQ;ATywEhB;;AQzxEA;EAoJU,sBCpIM;EDqIN,cN9KwB;AFuzElC;;AQ9xEA;EA4Jc,gEAA8D;ARsoE5E;;AQlyEA;;EA+JU,6BAA6B;EAC7B,kBChJM;EDiJN,gBAAgB;EAChB,WClJM;AT0xEhB;;AQ1yEA;EA2EM,yBNtG4B;EMuG5B,yBAAyB;EACzB,WC7DU;ATgyEhB;;AQhzEA;EAgFQ,yBAAsC;EACtC,yBAAyB;EACzB,WClEQ;ATsyEhB;;AQtzEA;EAqFQ,yBAAyB;EACzB,WCtEQ;AT2yEhB;;AQ3zEA;EAwFU,iDNnHwB;AF01ElC;;AQ/zEA;EA2FQ,yBAAoC;EACpC,yBAAyB;EACzB,WC7EQ;ATqzEhB;;AQr0EA;;EAgGQ,yBN3H0B;EM4H1B,yBAAyB;EACzB,gBAAgB;AR0uExB;;AQ50EA;EAoGQ,sBCpFQ;EDqFR,cNhI0B;AF42ElC;;AQj1EA;EAwGU,yBAA2C;AR6uErD;;AQr1EA;;EA2GU,sBC3FM;ED4FN,yBAAyB;EACzB,gBAAgB;EAChB,cNzIwB;AFw3ElC;;AQ71EA;EAiHU,0DAA4E;ARgvEtF;;AQj2EA;EAmHQ,6BAA6B;EAC7B,qBN/I0B;EMgJ1B,cNhJ0B;AFk4ElC;;AQv2EA;EA0HU,yBNrJwB;EMsJxB,qBNtJwB;EMuJxB,WC5GM;AT61EhB;;AQ72EA;EA+HY,gEAA8D;ARkvE1E;;AQj3EA;EAqIc,0DAA4E;ARgvE1F;;AQr3EA;;EAwIU,6BAA6B;EAC7B,qBNpKwB;EMqKxB,gBAAgB;EAChB,cNtKwB;AFw5ElC;;AQ73EA;EA6IQ,6BAA6B;EAC7B,kBC9HQ;ED+HR,WC/HQ;ATm3EhB;;AQn4EA;EAoJU,sBCpIM;EDqIN,cNhLwB;AFm6ElC;;AQx4EA;EA4Jc,gEAA8D;ARgvE5E;;AQ54EA;;EA+JU,6BAA6B;EAC7B,kBChJM;EDiJN,gBAAgB;EAChB,WClJM;ATo4EhB;;AQp5EA;EA2EM,yBNvG4B;EMwG5B,yBAAyB;EACzB,yBC/De;AT44ErB;;AQ15EA;EAgFQ,yBAAsC;EACtC,yBAAyB;EACzB,yBCpEa;ATk5ErB;;AQh6EA;EAqFQ,yBAAyB;EACzB,yBCxEa;ATu5ErB;;AQr6EA;EAwFU,kDNpHwB;AFq8ElC;;AQz6EA;EA2FQ,yBAAoC;EACpC,yBAAyB;EACzB,yBC/Ea;ATi6ErB;;AQ/6EA;;EAgGQ,yBN5H0B;EM6H1B,yBAAyB;EACzB,gBAAgB;ARo1ExB;;AQt7EA;EAoGQ,oCCtFa;EDuFb,cNjI0B;AFu9ElC;;AQ37EA;EAwGU,oCAA2C;ARu1ErD;;AQ/7EA;;EA2GU,oCC7FW;ED8FX,yBAAyB;EACzB,gBAAgB;EAChB,cN1IwB;AFm+ElC;;AQv8EA;EAiHU,sFAA4E;AR01EtF;;AQ38EA;EAmHQ,6BAA6B;EAC7B,qBNhJ0B;EMiJ1B,cNjJ0B;AF6+ElC;;AQj9EA;EA0HU,yBNtJwB;EMuJxB,qBNvJwB;EMwJxB,yBC9GW;ATy8ErB;;AQv9EA;EA+HY,gEAA8D;AR41E1E;;AQ39EA;EAqIc,sFAA4E;AR01E1F;;AQ/9EA;;EAwIU,6BAA6B;EAC7B,qBNrKwB;EMsKxB,gBAAgB;EAChB,cNvKwB;AFmgFlC;;AQv+EA;EA6IQ,6BAA6B;EAC7B,gCChIa;EDiIb,yBCjIa;AT+9ErB;;AQ7+EA;EAoJU,oCCtIW;EDuIX,cNjLwB;AF8gFlC;;AQl/EA;EA4Jc,gEAA8D;AR01E5E;;AQt/EA;;EA+JU,6BAA6B;EAC7B,gCClJW;EDmJX,gBAAgB;EAChB,yBCpJW;ATg/ErB;;AQ9/EA;EA2EM,yBNjG4B;EMkG5B,yBAAyB;EACzB,WC7DU;ATo/EhB;;AQpgFA;EAgFQ,yBAAsC;EACtC,yBAAyB;EACzB,WClEQ;AT0/EhB;;AQ1gFA;EAqFQ,yBAAyB;EACzB,WCtEQ;AT+/EhB;;AQ/gFA;EAwFU,iDN9GwB;AFyiFlC;;AQnhFA;EA2FQ,yBAAoC;EACpC,yBAAyB;EACzB,WC7EQ;ATygFhB;;AQzhFA;;EAgGQ,yBNtH0B;EMuH1B,yBAAyB;EACzB,gBAAgB;AR87ExB;;AQhiFA;EAoGQ,sBCpFQ;EDqFR,cN3H0B;AF2jFlC;;AQriFA;EAwGU,yBAA2C;ARi8ErD;;AQziFA;;EA2GU,sBC3FM;ED4FN,yBAAyB;EACzB,gBAAgB;EAChB,cNpIwB;AFukFlC;;AQjjFA;EAiHU,0DAA4E;ARo8EtF;;AQrjFA;EAmHQ,6BAA6B;EAC7B,qBN1I0B;EM2I1B,cN3I0B;AFilFlC;;AQ3jFA;EA0HU,yBNhJwB;EMiJxB,qBNjJwB;EMkJxB,WC5GM;ATijFhB;;AQjkFA;EA+HY,gEAA8D;ARs8E1E;;AQrkFA;EAqIc,0DAA4E;ARo8E1F;;AQzkFA;;EAwIU,6BAA6B;EAC7B,qBN/JwB;EMgKxB,gBAAgB;EAChB,cNjKwB;AFumFlC;;AQjlFA;EA6IQ,6BAA6B;EAC7B,kBC9HQ;ED+HR,WC/HQ;ATukFhB;;AQvlFA;EAoJU,sBCpIM;EDqIN,cN3KwB;AFknFlC;;AQ5lFA;EA4Jc,gEAA8D;ARo8E5E;;AQhmFA;;EA+JU,6BAA6B;EAC7B,kBChJM;EDiJN,gBAAgB;EAChB,WClJM;ATwlFhB;;AQxmFA;EATE,kBN+BgB;EM9BhB,kBNAc;AFqnFhB;;AQ7mFA;EANE,eNHW;AF0nFb;;AQjnFA;EAJE,kBNNc;AF+nFhB;;AQrnFA;EAFE,iBNTa;AFooFf;;AQznFA;;EA+KI,uBN9M2B;EM+M3B,qBNnN0B;EMoN1B,gBAnMyB;EAoMzB,YAnMyB;ARkpF7B;;AQjoFA;EAoLI,aAAa;EACb,WAAW;ARi9Ef;;AQtoFA;EAuLI,6BAA6B;EAC7B,oBAAoB;ARm9ExB;;AQ3oFA;EPpCE,kBAAkB;EAKhB,2BAAiC;EACjC,0BAAgC;EO0N9B,6BAA6B;ARs9EnC;;AQlpFA;EA8LI,4BN/N0B;EMgO1B,qBNlO0B;EMmO1B,cNrO0B;EMsO1B,gBAAgB;EAChB,oBAAoB;ARw9ExB;;AQ1pFA;EAoMI,uBN3KqB;EM4KrB,iBAAiB;EACjB,kBAAkB;AR09EtB;;AQx9EA;EACE,mBAAmB;EACnB,aAAa;EACb,eAAe;EACf,2BAA2B;AR29E7B;;AQ/9EA;EAMI,qBAAqB;AR69EzB;;AQn+EA;EAQM,oBAAoB;AR+9E1B;;AQv+EA;EAUI,sBAAsB;ARi+E1B;;AQ3+EA;EAYI,mBAAmB;ARm+EvB;;AQ/+EA;EAjNE,kBN+BgB;EM9BhB,kBNAc;AFosFhB;;AQp/EA;EA5ME,kBNNc;AF0sFhB;;AQx/EA;EA1ME,iBNTa;AF+sFf;;AQ5/EA;EA0BQ,4BAA4B;EAC5B,yBAAyB;ARs+EjC;;AQjgFA;EA6BQ,6BAA6B;EAC7B,0BAA0B;EAC1B,kBAAkB;ARw+E1B;;AQvgFA;EAiCQ,eAAe;AR0+EvB;;AQ3gFA;EAoCQ,UAAU;AR2+ElB;;AQ/gFA;EA0CQ,UAAU;ARy+ElB;;AQnhFA;EA4CU,UAAU;AR2+EpB;;AQvhFA;EA8CQ,YAAY;EACZ,cAAc;AR6+EtB;;AQ5hFA;EAiDI,uBAAuB;AR++E3B;;AQhiFA;EAoDQ,oBAAoB;EACpB,qBAAqB;ARg/E7B;;AQriFA;EAuDI,yBAAyB;ARk/E7B;;AQziFA;EA0DQ,oBAAoB;EACpB,qBAAqB;ARm/E7B;;AUjyFA;EACE,YAAY;EACZ,cAAc;EACd,kBAAkB;EAClB,WAAW;AVoyFb;;ACzsFE;ES/FF;IAMI,gBAAuC;EVuyFzC;EU7yFF;IAQM,iBR0CI;IQzCJ,kBRyCI;IQxCJ,eAAe;EVwyFnB;AACF;;AC1sFI;ESzGJ;IAaM,iBAA0C;EV2yF9C;AACF;;ACjsFI;ESxHJ;IAgBM,iBAAsC;EV8yF1C;AACF;;ACjtFI;ES9GJ;IAkBI,iBAA0C;EVkzF5C;AACF;;ACxsFI;ES7HJ;IAoBI,iBAAsC;EVszFxC;AACF;;AW1zFA;EAII,kBAAkB;AX0zFtB;;AW9zFA;;;;;;;EAcM,kBAAkB;AX0zFxB;;AWx0FA;;;;;;EAqBI,cTlC0B;ESmC1B,gBTCiB;ESAjB,kBAxC+B;AXo2FnC;;AWn1FA;EAyBI,cAAc;EACd,oBAAoB;AX8zFxB;;AWx1FA;EA4BM,eAAe;AXg0FrB;;AW51FA;EA8BI,iBAAiB;EACjB,uBAAuB;AXk0F3B;;AWj2FA;EAiCM,oBAAoB;AXo0F1B;;AWr2FA;EAmCI,gBAAgB;EAChB,uBAAuB;AXs0F3B;;AW12FA;EAsCM,oBAAoB;AXw0F1B;;AW92FA;EAwCI,iBAAiB;EACjB,oBAAoB;AX00FxB;;AWn3FA;EA2CI,kBAAkB;EAClB,uBAAuB;AX40F3B;;AWx3FA;EA8CI,cAAc;EACd,kBAAkB;AX80FtB;;AW73FA;EAiDI,4BTxD0B;ESyD1B,8BT3D0B;ES4D1B,qBAhEqC;AXg5FzC;;AWn4FA;EAqDI,4BAA4B;EAC5B,gBAAgB;EAChB,eAAe;AXk1FnB;;AWz4FA;EAyDM,wBAAwB;AXo1F9B;;AW74FA;EA2DQ,4BAA4B;AXs1FpC;;AWj5FA;EA6DQ,4BAA4B;AXw1FpC;;AWr5FA;EA+DQ,4BAA4B;AX01FpC;;AWz5FA;EAiEQ,4BAA4B;AX41FpC;;AW75FA;EAmEI,wBAAwB;EACxB,gBAAgB;EAChB,eAAe;AX81FnB;;AWn6FA;EAuEM,uBAAuB;EACvB,iBAAiB;AXg2FvB;;AWx6FA;EA0EQ,uBAAuB;AXk2F/B;;AW56FA;EA4EI,gBAAgB;AXo2FpB;;AWh7FA;EA8EI,gBAAgB;EAChB,iBAAiB;EACjB,kBAAkB;AXs2FtB;;AWt7FA;EAkFM,eAAe;AXw2FrB;;AW17FA;EAoFM,kBAAkB;AX02FxB;;AW97FA;EAsFM,qBAAqB;AX42F3B;;AWl8FA;EAwFM,kBAAkB;AX82FxB;;AWt8FA;EV2CE,iCAAiC;EUgD/B,gBAAgB;EAChB,qBAvG8B;EAwG9B,gBAAgB;EAChB,iBAAiB;AXg3FrB;;AW98FA;;EAiGI,cAAc;AXk3FlB;;AWn9FA;EAmGI,WAAW;AXo3Ff;;AWv9FA;;EAsGM,yBT/GwB;ESgHxB,qBA/GmC;EAgHnC,qBA/GmC;EAgHnC,mBAAmB;AXs3FzB;;AW/9FA;EA2GM,cTxHwB;AFg/F9B;;AWn+FA;EA6GQ,gBAAgB;AX03FxB;;AWv+FA;;EAiHQ,qBAtHsC;EAuHtC,cT/HsB;AF0/F9B;;AW7+FA;;EAsHQ,qBAzHsC;EA0HtC,cTpIsB;AFggG9B;;AWn/FA;;EA6HY,sBAAsB;AX23FlC;;AWx/FA;EAgIM,aAAa;AX43FnB;;AW5/FA;EAmII,kBTjHY;AF8+FhB;;AWhgGA;EAqII,kBTrHY;AFo/FhB;;AWpgGA;EAuII,iBTxHW;AFy/Ff;;AYthGA;EACE,mBAAmB;EACnB,oBAAoB;EACpB,uBAAuB;EACvB,cATsB;EAUtB,aAVsB;AZmiGxB;;AY9hGA;EAQI,YAZwB;EAaxB,WAbwB;AZuiG5B;;AYniGA;EAWI,YAdyB;EAezB,WAfyB;AZ2iG7B;;AYxiGA;EAcI,YAhBwB;EAiBxB,WAjBwB;AZ+iG5B;;AahjGA;EACE,cAAc;EACd,kBAAkB;AbmjGpB;;AarjGA;EAII,cAAc;EACd,YAAY;EACZ,WAAW;AbqjGf;;Aa3jGA;EAQM,uBX4DmB;AF2/FzB;;Aa/jGA;;;;;;;;;;;;;;;;;EA6BM,YAAY;EACZ,WAAW;AbsjGjB;;AaplGA;EAiCI,iBAAiB;AbujGrB;;AaxlGA;EAmCI,gBAAgB;AbyjGpB;;Aa5lGA;EAqCI,gBAAgB;Ab2jGpB;;AahmGA;EAuCI,qBAAqB;Ab6jGzB;;AapmGA;EAyCI,gBAAgB;Ab+jGpB;;AaxmGA;EA2CI,mBAAmB;AbikGvB;;Aa5mGA;EA6CI,gBAAgB;AbmkGpB;;AahnGA;EA+CI,qBAAqB;AbqkGzB;;AapnGA;EAiDI,iBAAiB;AbukGrB;;AaxnGA;EAmDI,sBAAsB;AbykG1B;;Aa5nGA;EAqDI,iBAAiB;Ab2kGrB;;AahoGA;EAuDI,sBAAsB;Ab6kG1B;;AapoGA;EAyDI,sBAAsB;Ab+kG1B;;AaxoGA;EA2DI,iBAAiB;AbilGrB;;Aa5oGA;EA6DI,iBAAiB;AbmlGrB;;AahpGA;EAiEM,YAAwB;EACxB,WAAuB;AbmlG7B;;AarpGA;EAiEM,YAAwB;EACxB,WAAuB;AbwlG7B;;Aa1pGA;EAiEM,YAAwB;EACxB,WAAuB;Ab6lG7B;;Aa/pGA;EAiEM,YAAwB;EACxB,WAAuB;AbkmG7B;;AapqGA;EAiEM,YAAwB;EACxB,WAAuB;AbumG7B;;AazqGA;EAiEM,YAAwB;EACxB,WAAuB;Ab4mG7B;;Aa9qGA;EAiEM,aAAwB;EACxB,YAAuB;AbinG7B;;AcjrGA;EAEE,4BZM4B;EYL5B,kBZ6DU;EY5DV,sCANkD;EAOlD,kBAAkB;AdmrGpB;;AcxrGA;EAOI,mBAAmB;EACnB,0BAA0B;AdqrG9B;;Ac7rGA;EAUI,mBAAmB;AdurGvB;;AcjsGA;;EAaI,iBZH2B;AF4rG/B;;ActsGA;EAeI,uBAAuB;Ad2rG3B;;Ac1sGA;EAiBI,kBAAkB;EAClB,aAAa;EACb,WAAW;Ad6rGf;;AchtGA;;;EAuBI,mBAAmB;Ad+rGvB;;ActtGA;EA6BM,uBZnByB;EYoBzB,cZhCuB;AF6tG7B;;Ac3tGA;EA6BM,yBZ/BuB;EYgCvB,YZpByB;AFstG/B;;AchuGA;EA6BM,4BZrBwB;EYsBxB,cZ5BwB;AFmuG9B;;AcruGA;EA6BM,yBZ3BwB;EY4BxB,iBZtBwB;AFkuG9B;;Ac1uGA;EA6BM,yBZd4B;EYe5B,WL2BU;ATsrGhB;;Ac/uGA;EA6BM,yBZZ4B;EYa5B,WL2BU;AT2rGhB;;AcpvGA;EA6BM,yBZb4B;EYc5B,WL2BU;ATgsGhB;;AczvGA;EA6BM,yBZf4B;EYgB5B,WL2BU;ATqsGhB;;Ac9vGA;EA6BM,yBZhB4B;EYiB5B,yBLyBe;AT4sGrB;;AcnwGA;EA6BM,yBZV4B;EYW5B,WL2BU;AT+sGhB;;AetwGA;EAEE,qBAAqB;EACrB,wBAAwB;EACxB,YAAY;EACZ,uBb2DuB;Ea1DvB,cAAc;EACd,YbuBW;EatBX,gBAAgB;EAChB,UAAU;EACV,WAAW;AfwwGb;;AelxGA;EAYI,yBbR0B;AFkxG9B;;AetxGA;EAcI,yBbb0B;AFyxG9B;;Ae1xGA;EAgBI,yBbf0B;AF6xG9B;;Ae9xGA;EAkBI,yBbjB0B;EakB1B,YAAY;AfgxGhB;;AenyGA;EAyBQ,uBbjBuB;AF+xG/B;;AevyGA;EA2BQ,uBbnBuB;AFmyG/B;;Ae3yGA;EA6BQ,uBbrBuB;AFuyG/B;;Ae/yGA;EA+BQ,mEAA2F;AfoxGnG;;AenzGA;EAyBQ,yBb7BqB;AF2zG7B;;AevzGA;EA2BQ,yBb/BqB;AF+zG7B;;Ae3zGA;EA6BQ,yBbjCqB;AFm0G7B;;Ae/zGA;EA+BQ,qEAA2F;AfoyGnG;;Aen0GA;EAyBQ,4BbnBsB;AFi0G9B;;Aev0GA;EA2BQ,4BbrBsB;AFq0G9B;;Ae30GA;EA6BQ,4BbvBsB;AFy0G9B;;Ae/0GA;EA+BQ,wEAA2F;AfozGnG;;Aen1GA;EAyBQ,yBbzBsB;AFu1G9B;;Aev1GA;EA2BQ,yBb3BsB;AF21G9B;;Ae31GA;EA6BQ,yBb7BsB;AF+1G9B;;Ae/1GA;EA+BQ,qEAA2F;Afo0GnG;;Aen2GA;EAyBQ,yBbZ0B;AF01GlC;;Aev2GA;EA2BQ,yBbd0B;AF81GlC;;Ae32GA;EA6BQ,yBbhB0B;AFk2GlC;;Ae/2GA;EA+BQ,qEAA2F;Afo1GnG;;Aen3GA;EAyBQ,yBbV0B;AFw2GlC;;Aev3GA;EA2BQ,yBbZ0B;AF42GlC;;Ae33GA;EA6BQ,yBbd0B;AFg3GlC;;Ae/3GA;EA+BQ,qEAA2F;Afo2GnG;;Aen4GA;EAyBQ,yBbX0B;AFy3GlC;;Aev4GA;EA2BQ,yBbb0B;AF63GlC;;Ae34GA;EA6BQ,yBbf0B;AFi4GlC;;Ae/4GA;EA+BQ,qEAA2F;Afo3GnG;;Aen5GA;EAyBQ,yBbb0B;AF24GlC;;Aev5GA;EA2BQ,yBbf0B;AF+4GlC;;Ae35GA;EA6BQ,yBbjB0B;AFm5GlC;;Ae/5GA;EA+BQ,qEAA2F;Afo4GnG;;Aen6GA;EAyBQ,yBbd0B;AF45GlC;;Aev6GA;EA2BQ,yBbhB0B;AFg6GlC;;Ae36GA;EA6BQ,yBblB0B;AFo6GlC;;Ae/6GA;EA+BQ,qEAA2F;Afo5GnG;;Aen7GA;EAyBQ,yBbR0B;AFs6GlC;;Aev7GA;EA2BQ,yBbV0B;AF06GlC;;Ae37GA;EA6BQ,yBbZ0B;AF86GlC;;Ae/7GA;EA+BQ,qEAA2F;Afo6GnG;;Aen8GA;EAkCI,gCApCkC;UAoClC,wBApCkC;EAqClC,2CAAmC;UAAnC,mCAAmC;EACnC,yCAAiC;UAAjC,iCAAiC;EACjC,yCAAiC;UAAjC,iCAAiC;EACjC,yBblC0B;EamC1B,qEAA0F;EAC1F,6BAA6B;EAC7B,4BAA4B;EAC5B,0BAA0B;Afq6G9B;;Ae/8GA;EA4CM,6BAA6B;Afu6GnC;;Aen9GA;EA8CM,6BAA6B;Afy6GnC;;Aev9GA;EAkDI,ebnBY;AF47GhB;;Ae39GA;EAoDI,ebvBY;AFk8GhB;;Ae/9GA;EAsDI,cb1BW;AFu8Gf;;Ae36GA;EACE;IACE,2BAA2B;Ef86G7B;Ee76GA;IACE,4BAA4B;Ef+6G9B;AACF;;Aep7GA;EACE;IACE,2BAA2B;Ef86G7B;Ee76GA;IACE,4BAA4B;Ef+6G9B;AACF;;AgBz9GA;EAEE,uBdb6B;Ecc7B,cdtB4B;AFi/G9B;;AgB99GA;;EAMI,yBdrB0B;EcsB1B,qBA5B6B;EA6B7B,qBA5B6B;EA6B7B,mBAAmB;AhB69GvB;;AgBt+GA;;EAeQ,uBd1BuB;Ec2BvB,mBd3BuB;Ec4BvB,cdxCqB;AFogH7B;;AgB7+GA;;EAeQ,yBdtCqB;EcuCrB,qBdvCqB;EcwCrB,Yd5BuB;AF+/G/B;;AgBp/GA;;EAeQ,4Bd5BsB;Ec6BtB,wBd7BsB;Ec8BtB,cdpCsB;AF8gH9B;;AgB3/GA;;EAeQ,yBdlCsB;EcmCtB,qBdnCsB;EcoCtB,iBd9BsB;AF+gH9B;;AgBlgHA;;EAeQ,yBdrB0B;EcsB1B,qBdtB0B;EcuB1B,WPmBQ;ATq+GhB;;AgBzgHA;;EAeQ,yBdnB0B;EcoB1B,qBdpB0B;EcqB1B,WPmBQ;AT4+GhB;;AgBhhHA;;EAeQ,yBdpB0B;EcqB1B,qBdrB0B;EcsB1B,WPmBQ;ATm/GhB;;AgBvhHA;;EAeQ,yBdtB0B;EcuB1B,qBdvB0B;EcwB1B,WPmBQ;AT0/GhB;;AgB9hHA;;EAeQ,yBdvB0B;EcwB1B,qBdxB0B;EcyB1B,yBPiBa;ATmgHrB;;AgBriHA;;EAeQ,yBdjB0B;EckB1B,qBdlB0B;EcmB1B,WPmBQ;ATwgHhB;;AgB5iHA;;EAoBM,mBAAmB;EACnB,SAAS;AhB6hHf;;AgBljHA;;EAuBM,yBd7B4B;Ec8B5B,WPYU;ATohHhB;;AgBxjHA;;;;EA2BQ,mBAAmB;AhBoiH3B;;AgB/jHA;EA6BI,cdhD0B;AFslH9B;;AgBnkHA;EA+BM,gBAAgB;AhBwiHtB;;AgBvkHA;EAkCM,yBdxC4B;EcyC5B,WPCU;ATwiHhB;;AgB5kHA;;EAsCQ,mBAAmB;AhB2iH3B;;AgBjlHA;;EAyCQ,kBPLQ;EOMR,mBAAmB;AhB6iH3B;;AgBvlHA;EA4CI,6BAxDqC;AhBumHzC;;AgB3lHA;;EA+CM,qBAhEgC;EAiEhC,cdnEwB;AFonH9B;;AgBjmHA;EAkDI,6BA5DqC;AhB+mHzC;;AgBrmHA;;EAqDM,qBApEgC;EAqEhC,cdzEwB;AF8nH9B;;AgB3mHA;EAwDI,6BAnEqC;AhB0nHzC;;AgB/mHA;;EA6DU,sBAAsB;AhBujHhC;;AgBpnHA;;EAkEM,iBAAiB;AhBujHvB;;AgBznHA;;EAuEU,wBAAwB;AhBujHlC;;AgB9nHA;EAyEI,WAAW;AhByjHf;;AgBloHA;EA8EU,yBd1FoB;AFkpH9B;;AgBtoHA;EAmFY,yBd/FkB;AFspH9B;;AgB1oHA;EAqFc,4BdlGgB;AF2pH9B;;AgB9oHA;;EAyFM,qBAAqB;AhB0jH3B;;AgBnpHA;EA8FU,yBd1GoB;AFmqH9B;;AgBvjHA;Ef3DE,iCAAiC;Ee8DjC,cAAc;EACd,kBAAkB;EAClB,eAAe;AhByjHjB;;AiBlrHA;EACE,mBAAmB;EACnB,aAAa;EACb,eAAe;EACf,2BAA2B;AjBqrH7B;;AiBzrHA;EAMI,qBAAqB;AjBurHzB;;AiB7rHA;EAQM,oBAAoB;AjByrH1B;;AiBjsHA;EAUI,sBAAsB;AjB2rH1B;;AiBrsHA;EAYI,mBAAmB;AjB6rHvB;;AiBzsHA;EAgBM,efeO;AF8qHb;;AiB7sHA;EAmBM,kBfWU;AFmrHhB;;AiBjtHA;EAqBI,uBAAuB;AjBgsH3B;;AiBrtHA;EAuBM,qBAAqB;EACrB,oBAAoB;AjBksH1B;;AiB1tHA;EA0BI,yBAAyB;AjBosH7B;;AiB9tHA;EA6BQ,mBAAmB;AjBqsH3B;;AiBluHA;EA+BQ,eAAe;AjBusHvB;;AiBtuHA;EAkCM,eAAe;AjBwsHrB;;AiB1uHA;EAoCQ,cAAc;EACd,4BAA4B;EAC5B,yBAAyB;AjB0sHjC;;AiBhvHA;EAwCQ,6BAA6B;EAC7B,0BAA0B;AjB4sHlC;;AiB1sHA;EACE,mBAAmB;EACnB,4BftC4B;EeuC5B,kBfiBU;EehBV,cf7C4B;Ee8C5B,oBAAoB;EACpB,kBfjBc;EekBd,WAAW;EACX,uBAAuB;EACvB,gBAAgB;EAChB,oBAAoB;EACpB,qBAAqB;EACrB,mBAAmB;AjB6sHrB;;AiBztHA;EAcI,oBAAoB;EACpB,uBAAuB;AjB+sH3B;;AiB9tHA;EAqBM,uBfvDyB;EewDzB,cfpEuB;AFixH7B;;AiBnuHA;EAqBM,yBfnEuB;EeoEvB,YfxDyB;AF0wH/B;;AiBxuHA;EAqBM,4BfzDwB;Ee0DxB,cfhEwB;AFuxH9B;;AiB7uHA;EAqBM,yBf/DwB;EegExB,iBf1DwB;AFsxH9B;;AiBlvHA;EAqBM,yBflD4B;EemD5B,WRTU;AT0uHhB;;AiBvvHA;EAqBM,yBfhD4B;EeiD5B,WRTU;AT+uHhB;;AiB5vHA;EAqBM,yBfjD4B;EekD5B,WRTU;ATovHhB;;AiBjwHA;EAqBM,yBfnD4B;EeoD5B,WRTU;ATyvHhB;;AiBtwHA;EAqBM,yBfpD4B;EeqD5B,yBRXe;ATgwHrB;;AiB3wHA;EAqBM,yBf9C4B;Ee+C5B,WRTU;ATmwHhB;;AiBhxHA;EAyBI,kBfpCY;AF+xHhB;;AiBpxHA;EA2BI,efvCS;AFoyHb;;AiBxxHA;EA6BI,kBf1CY;AFyyHhB;;AiB5xHA;EAgCM,qBAAqB;EACrB,sBAAsB;AjBgwH5B;;AiBjyHA;EAmCM,qBAAqB;EACrB,sBAAsB;AjBkwH5B;;AiBtyHA;EAsCM,qBAAqB;EACrB,sBAAsB;AjBowH5B;;AiB3yHA;EA0CI,gBAvFmB;EAwFnB,UAAU;EACV,kBAAkB;EAClB,UAAU;AjBqwHd;;AiBlzHA;EAgDM,8BAA8B;EAC9B,WAAW;EACX,cAAc;EACd,SAAS;EACT,kBAAkB;EAClB,QAAQ;EACR,kEAA0D;UAA1D,0DAA0D;EAC1D,uCAA+B;UAA/B,+BAA+B;AjBswHrC;;AiB7zHA;EAyDM,WAAW;EACX,UAAU;AjBwwHhB;;AiBl0HA;EA4DM,WAAW;EACX,UAAU;AjB0wHhB;;AiBv0HA;EAgEM,yBAAmD;AjB2wHzD;;AiB30HA;EAkEM,yBAAoD;AjB6wH1D;;AiB/0HA;EAoEI,uBf9CqB;AF6zHzB;;AiB7wHA;EAEI,0BAA0B;AjB+wH9B;;AkBp3HA;;EAGE,sBAAsB;AlBs3HxB;;AkBz3HA;;;;EAMI,oBAAoB;AlB03HxB;;AkBh4HA;;EAQI,iBApBmB;AlBi5HvB;;AkBr4HA;;EAUI,iBArBmB;AlBq5HvB;;AkB14HA;;EAYI,sBAAsB;AlBm4H1B;;AkBj4HA;EACE,chB5B4B;EgB+B5B,ehBJW;EgBKX,gBhBImB;EgBHnB,kBAnCuB;AlBq6HzB;;AkBx4HA;EAQI,cApCwB;EAqCxB,oBApCyB;AlBw6H7B;;AkB74HA;EAWI,oBAAoB;AlBs4HxB;;AkBj5HA;EAaI,oBA7B+B;AlBq6HnC;;AkBr5HA;EAkBM,ehBpBO;AF25Hb;;AkBz5HA;EAkBM,iBhBnBS;AF85Hf;;AkB75HA;EAkBM,ehBlBO;AFi6Hb;;AkBj6HA;EAkBM,iBhBjBS;AFo6Hf;;AkBr6HA;EAkBM,kBhBhBU;AFu6HhB;;AkBz6HA;EAkBM,ehBfO;AF06Hb;;AkB76HA;EAkBM,kBhBdU;AF66HhB;;AkB75HA;EACE,chB/C4B;EgBkD5B,kBhBtBc;EgBuBd,gBhBlBiB;EgBmBjB,iBA7CyB;AlB28H3B;;AkBp6HA;EAQI,chBvD0B;EgBwD1B,gBhBpBiB;AFo7HrB;;AkBz6HA;EAWI,oBA/C+B;AlBi9HnC;;AkB76HA;EAgBM,ehBtCO;AFu8Hb;;AkBj7HA;EAgBM,iBhBrCS;AF08Hf;;AkBr7HA;EAgBM,ehBpCO;AF68Hb;;AkBz7HA;EAgBM,iBhBnCS;AFg9Hf;;AkB77HA;EAgBM,kBhBlCU;AFm9HhB;;AkBj8HA;EAgBM,ehBjCO;AFs9Hb;;AkBr8HA;EAgBM,kBhBhCU;AFy9HhB;;AmBx/HA;EACE,cAAc;EACd,eAAe;EACf,mBAAmB;EACnB,kBAAkB;EAClB,yBAAyB;AnB2/H3B;;AmBz/HA;EAEE,gBjByBiB;EiBxBjB,eAAe;EACf,gBAAgB;EAChB,UAAU;AnB2/HZ;;AmBhgIA;EAOI,cAAc;EACd,eAAe;AnB6/HnB;;AmBx/HA;EACE,mBAAmB;EACnB,4BjBhB4B;EiBiB5B,uBjByCuB;EiBxCvB,oBAAoB;EACpB,kBjBIc;EiBHd,WAAW;EACX,uBAAuB;EACvB,oBAAoB;EACpB,gBAAgB;EAChB,uBAAuB;EACvB,kBAAkB;EAClB,mBAAmB;AnB2/HrB;;AoB5+HA;EAxBE,uBlBf6B;EkBgB7B,qBlBpB4B;EkBqB5B,kBlBqCU;EkBpCV,clB1B4B;AFkiI9B;;ACr+HI;EmBjCA,4BlB5B0B;AFsiI9B;;ACz+HI;EmBjCA,4BlB5B0B;AF0iI9B;;AC7+HI;EmBjCA,4BlB5B0B;AF8iI9B;;ACj/HI;EmBjCA,4BlB5B0B;AFkjI9B;;AoBrhIE;EAEE,qBlB5B0B;AFmjI9B;;AoBthIE;EAIE,qBlBrB8B;EkBsB9B,kDlBtB8B;AF4iIlC;;AoBrhIE;;;;;EAEE,4BlBlC0B;EkBmC1B,wBlBnC0B;EkBoC1B,gBAAgB;EAChB,clBzC0B;AFokI9B;;ACzgII;;;;;EmBhBE,+BlB3CwB;AF4kI9B;;ACjhII;;;;;EmBhBE,+BlB3CwB;AFolI9B;;ACzhII;;;;;EmBhBE,+BlB3CwB;AF4lI9B;;ACjiII;;;;;EmBhBE,+BlB3CwB;AFomI9B;;AqB5mIA;EAEE,iDnBA2B;EmBC3B,eAAe;EACf,WAAW;ArB8mIb;;AqB7mIE;EACE,gBAAgB;ArBgnIpB;;AqB5mII;EACE,mBnBGyB;AF4mI/B;;AqBhnIK;EAMG,mDnBFuB;AFgnI/B;;AqBpnII;EACE,qBnBTuB;AFgoI7B;;AqBxnIK;EAMG,gDnBdqB;AFooI7B;;AqB5nII;EACE,wBnBCwB;AF8nI9B;;AqBhoIK;EAMG,mDnBJsB;AFkoI9B;;AqBpoII;EACE,qBnBLwB;AF4oI9B;;AqBxoIK;EAMG,gDnBVsB;AFgpI9B;;AqB5oII;EACE,qBnBQ4B;AFuoIlC;;AqBhpIK;EAMG,iDnBG0B;AF2oIlC;;AqBppII;EACE,qBnBU4B;AF6oIlC;;AqBxpIK;EAMG,kDnBK0B;AFipIlC;;AqB5pII;EACE,qBnBS4B;AFspIlC;;AqBhqIK;EAMG,kDnBI0B;AF0pIlC;;AqBpqII;EACE,qBnBO4B;AFgqIlC;;AqBxqIK;EAMG,iDnBE0B;AFoqIlC;;AqB5qII;EACE,qBnBM4B;AFyqIlC;;AqBhrIK;EAMG,kDnBC0B;AF6qIlC;;AqBprII;EACE,qBnBY4B;AF2qIlC;;AqBxrIK;EAMG,iDnBO0B;AF+qIlC;;AqBprIE;ElB0BA,kBDuBgB;ECtBhB,kBDRc;AFsqIhB;;AqBvrIE;ElB2BA,kBDZc;AF4qIhB;;AqBzrIE;ElB2BA,iBDfa;AFirIf;;AqB1rIE;EACE,cAAc;EACd,WAAW;ArB6rIf;;AqB5rIE;EACE,eAAe;EACf,WAAW;ArB+rIf;;AqB7rIA;EAGI,uBnBmCqB;EmBlCrB,iBAAiB;EACjB,kBAAkB;ArB8rItB;;AqBnsIA;EAOI,6BAA6B;EAC7B,yBAAyB;EACzB,gBAAgB;EAChB,eAAe;EACf,gBAAgB;ArBgsIpB;;AqB9rIA;EAEE,cAAc;EACd,eAAe;EACf,eAAe;EACf,gBAAgB;EAChB,gBAAgB;ArBgsIlB;;AqBtsIA;EAQI,iBAAiB;EACjB,iBAAiB;ArBksIrB;;AqB3sIA;EAWI,eAAe;ArBosInB;;AqB/sIA;EAcI,YAAY;ArBqsIhB;;AsBhwIA;EACE,eAAe;EACf,qBAAqB;EACrB,iBAAiB;EACjB,kBAAkB;AtBmwIpB;;AsBlwIE;EACE,eAAe;AtBqwInB;;AsBpwIE;EACE,cpBF0B;AFywI9B;;AsBtwIE;;;EAEE,cpBH0B;EoBI1B,mBAAmB;AtB0wIvB;;AsBrwIA;EAGI,kBAAkB;AtBswItB;;AuB1xIA;EACE,qBAAqB;EACrB,eAAe;EACf,kBAAkB;EAClB,mBAAmB;AvB6xIrB;;AuBjyIA;EAMI,cpBDmB;AHgyIvB;;AuBryIA;EAUM,qBrBW4B;EqBV5B,cAAc;EACd,UAAU;AvB+xIhB;;AuB3yIA;EAeM,uBrBuDmB;EqBtDnB,iBAAiB;AvBgyIvB;;AuBhzIA;EAmBI,eAAe;EACf,cAAc;EACd,cAAc;EACd,eAAe;EACf,aAAa;AvBiyIjB;;AuBxzIA;EAyBM,aAAa;AvBmyInB;;AuB5zIA;;EA4BM,wBrBhBwB;AFqzI9B;;AuBj0IA;EA8BM,oBAAoB;AvBuyI1B;;AuBr0IA;EAgCM,YAAY;EACZ,UAAU;AvByyIhB;;AuB10IA;EAmCQ,kBAAkB;AvB2yI1B;;AuB90IA;EAuCM,qBrBjCwB;AF40I9B;;AuBl1IA;EA6CQ,mBrB/BuB;AFw0I/B;;AuBt1IA;EA+CQ,mBrBjCuB;AF40I/B;;AuB11IA;EAkDU,qBAAgC;AvB4yI1C;;AuB91IA;EAuDU,mDrBzCqB;AFo1I/B;;AuBl2IA;EA6CQ,qBrB3CqB;AFo2I7B;;AuBt2IA;EA+CQ,qBrB7CqB;AFw2I7B;;AuB12IA;EAkDU,mBAAgC;AvB4zI1C;;AuB92IA;EAuDU,gDrBrDmB;AFg3I7B;;AuBl3IA;EA6CQ,wBrBjCsB;AF02I9B;;AuBt3IA;EA+CQ,wBrBnCsB;AF82I9B;;AuB13IA;EAkDU,qBAAgC;AvB40I1C;;AuB93IA;EAuDU,mDrB3CoB;AFs3I9B;;AuBl4IA;EA6CQ,qBrBvCsB;AFg4I9B;;AuBt4IA;EA+CQ,qBrBzCsB;AFo4I9B;;AuB14IA;EAkDU,qBAAgC;AvB41I1C;;AuB94IA;EAuDU,gDrBjDoB;AF44I9B;;AuBl5IA;EA6CQ,qBrB1B0B;AFm4IlC;;AuBt5IA;EA+CQ,qBrB5B0B;AFu4IlC;;AuB15IA;EAkDU,qBAAgC;AvB42I1C;;AuB95IA;EAuDU,iDrBpCwB;AF+4IlC;;AuBl6IA;EA6CQ,qBrBxB0B;AFi5IlC;;AuBt6IA;EA+CQ,qBrB1B0B;AFq5IlC;;AuB16IA;EAkDU,qBAAgC;AvB43I1C;;AuB96IA;EAuDU,kDrBlCwB;AF65IlC;;AuBl7IA;EA6CQ,qBrBzB0B;AFk6IlC;;AuBt7IA;EA+CQ,qBrB3B0B;AFs6IlC;;AuB17IA;EAkDU,qBAAgC;AvB44I1C;;AuB97IA;EAuDU,kDrBnCwB;AF86IlC;;AuBl8IA;EA6CQ,qBrB3B0B;AFo7IlC;;AuBt8IA;EA+CQ,qBrB7B0B;AFw7IlC;;AuB18IA;EAkDU,qBAAgC;AvB45I1C;;AuB98IA;EAuDU,iDrBrCwB;AFg8IlC;;AuBl9IA;EA6CQ,qBrB5B0B;AFq8IlC;;AuBt9IA;EA+CQ,qBrB9B0B;AFy8IlC;;AuB19IA;EAkDU,qBAAgC;AvB46I1C;;AuB99IA;EAuDU,kDrBtCwB;AFi9IlC;;AuBl+IA;EA6CQ,qBrBtB0B;AF+8IlC;;AuBt+IA;EA+CQ,qBrBxB0B;AFm9IlC;;AuB1+IA;EAkDU,qBAAgC;AvB47I1C;;AuB9+IA;EAuDU,iDrBhCwB;AF29IlC;;AuBl/IA;EpB4CE,kBDuBgB;ECtBhB,kBDRc;AFk9IhB;;AuBv/IA;EpB+CE,kBDZc;AFw9IhB;;AuB3/IA;EpBiDE,iBDfa;AF69If;;AuB//IA;EAkEM,qBrB1DwB;AF2/I9B;;AuBngJA;EAoEI,WAAW;AvBm8If;;AuBvgJA;EAsEM,WAAW;AvBq8IjB;;AuB3gJA;EA0EM,aAAa;EACb,kBAAkB;EAClB,cAAc;EACd,YAAY;EACZ,uBAAe;UAAf,eAAe;AvBq8IrB;;AuBnhJA;EAgFM,kBrB3CU;AFk/IhB;;AuBvhJA;EAkFM,kBrB/CU;AFw/IhB;;AuB3hJA;EAoFM,iBrBlDS;AF6/If;;AwBlhJA;EAEE,oBAAoB;EACpB,aAAa;EACb,2BAA2B;EAC3B,kBAAkB;AxBohJpB;;AwBzhJA;EAYQ,uBtBXuB;EsBYvB,yBAAyB;EACzB,ctBzBqB;AF0iJ7B;;AwB/hJA;EAkBU,yBAAsC;EACtC,yBAAyB;EACzB,ctB/BmB;AFgjJ7B;;AwBriJA;EAwBU,yBAAyB;EACzB,+CtBxBqB;EsByBrB,ctBrCmB;AFsjJ7B;;AwB3iJA;EA8BU,yBAAoC;EACpC,yBAAyB;EACzB,ctB3CmB;AF4jJ7B;;AwBjjJA;EAYQ,yBtBvBqB;EsBwBrB,yBAAyB;EACzB,YtBbuB;AFsjJ/B;;AwBvjJA;EAkBU,yBAAsC;EACtC,yBAAyB;EACzB,YtBnBqB;AF4jJ/B;;AwB7jJA;EAwBU,yBAAyB;EACzB,4CtBpCmB;EsBqCnB,YtBzBqB;AFkkJ/B;;AwBnkJA;EA8BU,uBAAoC;EACpC,yBAAyB;EACzB,YtB/BqB;AFwkJ/B;;AwBzkJA;EAYQ,4BtBbsB;EsBctB,yBAAyB;EACzB,ctBrBsB;AFslJ9B;;AwB/kJA;EAkBU,yBAAsC;EACtC,yBAAyB;EACzB,ctB3BoB;AF4lJ9B;;AwBrlJA;EAwBU,yBAAyB;EACzB,+CtB1BoB;EsB2BpB,ctBjCoB;AFkmJ9B;;AwB3lJA;EA8BU,yBAAoC;EACpC,yBAAyB;EACzB,ctBvCoB;AFwmJ9B;;AwBjmJA;EAYQ,yBtBnBsB;EsBoBtB,yBAAyB;EACzB,iBtBfsB;AFwmJ9B;;AwBvmJA;EAkBU,yBAAsC;EACtC,yBAAyB;EACzB,iBtBrBoB;AF8mJ9B;;AwB7mJA;EAwBU,yBAAyB;EACzB,4CtBhCoB;EsBiCpB,iBtB3BoB;AFonJ9B;;AwBnnJA;EA8BU,yBAAoC;EACpC,yBAAyB;EACzB,iBtBjCoB;AF0nJ9B;;AwBznJA;EAYQ,yBtBN0B;EsBO1B,yBAAyB;EACzB,WfkCQ;AT+kJhB;;AwB/nJA;EAkBU,yBAAsC;EACtC,yBAAyB;EACzB,Wf4BM;ATqlJhB;;AwBroJA;EAwBU,yBAAyB;EACzB,6CtBnBwB;EsBoBxB,WfsBM;AT2lJhB;;AwB3oJA;EA8BU,yBAAoC;EACpC,yBAAyB;EACzB,WfgBM;ATimJhB;;AwBjpJA;EAYQ,yBtBJ0B;EsBK1B,yBAAyB;EACzB,WfkCQ;ATumJhB;;AwBvpJA;EAkBU,yBAAsC;EACtC,yBAAyB;EACzB,Wf4BM;AT6mJhB;;AwB7pJA;EAwBU,yBAAyB;EACzB,8CtBjBwB;EsBkBxB,WfsBM;ATmnJhB;;AwBnqJA;EA8BU,yBAAoC;EACpC,yBAAyB;EACzB,WfgBM;ATynJhB;;AwBzqJA;EAYQ,yBtBL0B;EsBM1B,yBAAyB;EACzB,WfkCQ;AT+nJhB;;AwB/qJA;EAkBU,yBAAsC;EACtC,yBAAyB;EACzB,Wf4BM;ATqoJhB;;AwBrrJA;EAwBU,yBAAyB;EACzB,8CtBlBwB;EsBmBxB,WfsBM;AT2oJhB;;AwB3rJA;EA8BU,yBAAoC;EACpC,yBAAyB;EACzB,WfgBM;ATipJhB;;AwBjsJA;EAYQ,yBtBP0B;EsBQ1B,yBAAyB;EACzB,WfkCQ;ATupJhB;;AwBvsJA;EAkBU,yBAAsC;EACtC,yBAAyB;EACzB,Wf4BM;AT6pJhB;;AwB7sJA;EAwBU,yBAAyB;EACzB,6CtBpBwB;EsBqBxB,WfsBM;ATmqJhB;;AwBntJA;EA8BU,yBAAoC;EACpC,yBAAyB;EACzB,WfgBM;ATyqJhB;;AwBztJA;EAYQ,yBtBR0B;EsBS1B,yBAAyB;EACzB,yBfgCa;ATirJrB;;AwB/tJA;EAkBU,yBAAsC;EACtC,yBAAyB;EACzB,yBf0BW;ATurJrB;;AwBruJA;EAwBU,yBAAyB;EACzB,8CtBrBwB;EsBsBxB,yBfoBW;AT6rJrB;;AwB3uJA;EA8BU,yBAAoC;EACpC,yBAAyB;EACzB,yBfcW;ATmsJrB;;AwBjvJA;EAYQ,yBtBF0B;EsBG1B,yBAAyB;EACzB,WfkCQ;ATusJhB;;AwBvvJA;EAkBU,yBAAsC;EACtC,yBAAyB;EACzB,Wf4BM;AT6sJhB;;AwB7vJA;EAwBU,yBAAyB;EACzB,6CtBfwB;EsBgBxB,WfsBM;ATmtJhB;;AwBnwJA;EA8BU,yBAAoC;EACpC,yBAAyB;EACzB,WfgBM;ATytJhB;;AwBzwJA;EAmCI,kBtBXY;AFqvJhB;;AwB7wJA;EAqCI,kBtBfY;AF2vJhB;;AwBjxJA;EAwCQ,eAAe;AxB6uJvB;;AwBrxJA;EA0CI,iBtBrBW;AFowJf;;AwBzxJA;EA6CQ,eAAe;AxBgvJvB;;AwB7xJA;EAiDM,6BAA6B;EAC7B,0BAA0B;AxBgvJhC;;AwBlyJA;EAoDM,4BAA4B;EAC5B,yBAAyB;AxBkvJ/B;;AwBvyJA;EAwDQ,kBtBDI;AFovJZ;;AwB3yJA;EA0DQ,aAAa;AxBqvJrB;;AwB/yJA;EA6DM,sBAAsB;AxBsvJ5B;;AwBnzJA;EA+DM,sBAAsB;EACtB,YAAY;EACZ,gBAAgB;AxBwvJtB;;AwBzzJA;EAmEM,uBAAuB;AxB0vJ7B;;AwB7zJA;EAqEM,aAAa;EACb,YAAY;AxB4vJlB;;AwBl0JA;EAwEQ,eAAe;AxB8vJvB;;AwBt0JA;EA2EQ,eAAe;AxB+vJvB;;AwB10JA;EA8EQ,eAAe;AxBgwJvB;;AwB90JA;EAiFQ,eAAe;AxBiwJvB;;AwBl1JA;EAoFQ,0BAA4C;AxBkwJpD;;AwBt1JA;EAsFQ,0BtB/BI;EsBgCJ,uBAAuB;AxBowJ/B;;AwB31JA;EAyFI,uBAAuB;AxBswJ3B;;AwB/1JA;EA4FM,WAAW;AxBuwJjB;;AwBn2JA;EA8FM,YAAY;EACZ,eAAe;AxBywJrB;;AwBx2JA;EAiGI,yBAAyB;AxB2wJ7B;;AwB52JA;EAmGM,0BAA4C;AxB6wJlD;;AwBh3JA;EAqGM,0BtB9CM;EsB+CN,2BAA2B;EAC3B,SAAS;AxB+wJf;;AwB7wJA;EACE,oBAAoB;EACpB,aAAa;EACb,eAAe;EACf,2BAA2B;EAC3B,gBAAgB;EAChB,kBAAkB;AxBgxJpB;;AwBtxJA;EASM,yBAA0D;EAC1D,ctB1HwB;AF24J9B;;AwB3xJA;EAYM,qBAAmD;AxBmxJzD;;AwB/xJA;EAeM,yBAAwD;EACxD,ctBhIwB;AFo5J9B;;AwBpyJA;EAkBM,qBAAiD;AxBsxJvD;;AwBpxJA;EACE,YAAY;EACZ,OAAO;EACP,UAAU;EACV,aAAa;EACb,kBAAkB;EAClB,MAAM;EACN,WAAW;AxBuxJb;;AwBrxJA;;EAGE,qBtB5I4B;EsB6I5B,kBtBnFU;EsBoFV,cAAc;EACd,iBAAiB;EACjB,kBAAkB;EAClB,mBAAmB;AxBuxJrB;;AwBrxJA;EACE,4BtBlJ4B;EsBmJ5B,ctBxJ4B;AFg7J9B;;AwBtxJA;EACE,qBtBxJ4B;EsByJ5B,mBA1J4B;EA2J5B,2BA1JoC;EA2JpC,cAAc;EACd,eA3JwB;EA4JxB,gBAAgB;EAChB,gBAAgB;EAChB,uBAAuB;AxByxJzB;;AwBvxJA;EACE,mBAAmB;EACnB,aAAa;EACb,WAAW;EACX,uBAAuB;EACvB,mBAAmB;EACnB,UAAU;AxB0xJZ;;AwBhyJA;EAQI,eAAe;AxB4xJnB;;AyB18JA;EACE,cvBA4B;EuBC5B,cAAc;EACd,evB4BW;EuB3BX,gBvBkCe;AF26JjB;;AyBj9JA;EAMI,oBAAoB;AzB+8JxB;;AyBr9JA;EASI,kBvBuBY;AFy7JhB;;AyBz9JA;EAWI,kBvBmBY;AF+7JhB;;AyB79JA;EAaI,iBvBgBW;AFo8Jf;;AyBl9JA;EACE,cAAc;EACd,kBvBec;EuBdd,mBAAmB;AzBq9JrB;;AyBx9JA;EAOM,YvBbyB;AFk+J/B;;AyB59JA;EAOM,cvBzBuB;AFk/J7B;;AyBh+JA;EAOM,iBvBfwB;AF4+J9B;;AyBp+JA;EAOM,cvBrBwB;AFs/J9B;;AyBx+JA;EAOM,cvBR4B;AF6+JlC;;AyB5+JA;EAOM,cvBN4B;AF++JlC;;AyBh/JA;EAOM,cvBP4B;AFo/JlC;;AyBp/JA;EAOM,cvBT4B;AF0/JlC;;AyBx/JA;EAOM,cvBV4B;AF+/JlC;;AyB5/JA;EAOM,cvBJ4B;AF6/JlC;;AyBr/JA;EAEI,sBAAsB;AzBu/J1B;;AyBz/JA;EAKI,aAAa;EACb,2BAA2B;AzBw/J/B;;AyB9/JA;EASQ,kBAAkB;AzBy/J1B;;AyBlgKA;;;EAcU,gBAAgB;AzB0/J1B;;AyBxgKA;;;EAmBU,6BAA6B;EAC7B,0BAA0B;AzB2/JpC;;AyB/gKA;;;EAyBU,4BAA4B;EAC5B,yBAAyB;AzB4/JnC;;AyBthKA;;;;;EAiCY,UAAU;AzB6/JtB;;AyB9hKA;;;;;;;;;EAsCY,UAAU;AzBogKtB;;AyB1iKA;;;;;;;;;EAwCc,UAAU;AzB8gKxB;;AyBtjKA;EA0CQ,YAAY;EACZ,cAAc;AzBghKtB;;AyB3jKA;EA6CM,uBAAuB;AzBkhK7B;;AyB/jKA;EA+CM,yBAAyB;AzBohK/B;;AyBnkKA;EAkDQ,YAAY;EACZ,cAAc;AzBqhKtB;;AyBxkKA;EAqDI,aAAa;EACb,2BAA2B;AzBuhK/B;;AyB7kKA;EAwDM,cAAc;AzByhKpB;;AyBjlKA;EA0DQ,gBAAgB;EAChB,qBAAqB;AzB2hK7B;;AyBtlKA;EA6DQ,YAAY;EACZ,cAAc;AzB6hKtB;;AyB3lKA;EAgEM,uBAAuB;AzB+hK7B;;AyB/lKA;EAkEM,yBAAyB;AzBiiK/B;;AyBnmKA;EAoEM,eAAe;AzBmiKrB;;AyBvmKA;EAwEU,sBAAsB;AzBmiKhC;;AyB3mKA;EA0EQ,uBAAuB;AzBqiK/B;;AyB/mKA;EA4EQ,gBAAgB;AzBuiKxB;;AC7jKE;EwBtDF;IA+EM,aAAa;EzByiKjB;AACF;;AyBxiKA;EAEI,kBAAkB;AzB0iKtB;;AC3kKE;EwB+BF;IAII,qBAAqB;EzB6iKvB;AACF;;AC7kKE;EwB2BF;IAMI,aAAa;IACb,YAAY;IACZ,cAAc;IACd,oBAAoB;IACpB,iBAAiB;EzBijKnB;EyB3jKF;IAYM,kBvBvFU;IuBwFV,oBAAoB;EzBkjKxB;EyB/jKF;IAeM,oBAAoB;EzBmjKxB;EyBlkKF;IAiBM,kBvB9FU;IuB+FV,oBAAoB;EzBojKxB;EyBtkKF;IAoBM,iBvBlGS;IuBmGT,oBAAoB;EzBqjKxB;AACF;;AyBpjKA;EAEI,gBAAgB;AzBsjKpB;;AC1mKE;EwBkDF;IAII,aAAa;IACb,aAAa;IACb,YAAY;IACZ,cAAc;EzByjKhB;EyBhkKF;IASM,gBAAgB;EzB0jKpB;EyBnkKF;IAWM,cAAc;EzB2jKlB;EyBtkKF;IAaQ,YAAY;EzB4jKlB;EyBzkKF;IAeQ,qBAAqB;EzB6jK3B;AACF;;AyB5jKA;EACE,sBAAsB;EACtB,WAAW;EACX,evBvHW;EuBwHX,kBAAkB;EAClB,gBAAgB;AzB+jKlB;;AyBpkKA;;;EAaU,cvB7JoB;AF0tK9B;;AyB1kKA;;;EAeQ,kBvBlIQ;AFmsKhB;;AyBhlKA;;;EAiBQ,kBvBtIQ;AF2sKhB;;AyBtlKA;;;EAmBQ,iBvBzIO;AFktKf;;AyB5lKA;EAqBM,cvBnKwB;EuBoKxB,ctBzKiB;EsB0KjB,oBAAoB;EACpB,kBAAkB;EAClB,MAAM;EACN,atB7KiB;EsB8KjB,UAAU;AzB2kKhB;;AyBtmKA;;EA+BM,oBtBlLiB;AH8vKvB;;AyB3mKA;EAiCM,OAAO;AzB8kKb;;AyB/mKA;;EAqCM,qBtBxLiB;AHuwKvB;;AyBpnKA;EAuCM,QAAQ;AzBilKd;;AyBxnKA;EA2CM,6BAA6B;EAC7B,cAAc;EACd,YAAY;EACZ,UAAU;AzBilKhB;;AyB/nKA;EAgDM,kBvBnKU;AFsvKhB;;AyBnoKA;EAkDM,kBvBvKU;AF4vKhB;;AyBvoKA;EAoDM,iBvB1KS;AFiwKf;;A0B1xKA;EAGE,exBwBW;EwBvBX,mBAAmB;A1B2xKrB;;A0B/xKA;EAMI,mBAAmB;EACnB,cxBK8B;EwBJ9B,aAAa;EACb,uBAAuB;EACvB,iBAduC;A1B2yK3C;;A0BvyKA;EAYM,cxBfwB;AF8yK9B;;A0B3yKA;EAcI,mBAAmB;EACnB,aAAa;A1BiyKjB;;A0BhzKA;EAiBM,eAAe;A1BmyKrB;;A0BpzKA;EAoBQ,cxBvBsB;EwBwBtB,eAAe;EACf,oBAAoB;A1BoyK5B;;A0B1zKA;EAwBM,cxBxBwB;EwByBxB,iBAAiB;A1BsyKvB;;A0B/zKA;;EA4BI,uBAAuB;EACvB,aAAa;EACb,eAAe;EACf,2BAA2B;A1BwyK/B;;A0Bv0KA;EAkCM,mBAAmB;A1ByyKzB;;A0B30KA;EAoCM,kBAAkB;A1B2yKxB;;A0B/0KA;;EAyCM,uBAAuB;A1B2yK7B;;A0Bp1KA;;EA6CM,yBAAyB;A1B4yK/B;;A0Bz1KA;EAgDI,kBxBpBY;AFi0KhB;;A0B71KA;EAkDI,kBxBxBY;AFu0KhB;;A0Bj2KA;EAoDI,iBxB3BW;AF40Kf;;A0Br2KA;EAwDM,iBAAiB;A1BizKvB;;A0Bz2KA;EA2DM,iBAAiB;A1BkzKvB;;A0B72KA;EA8DM,iBAAiB;A1BmzKvB;;A0Bj3KA;EAiEM,iBAAiB;A1BozKvB;;A2B32KA;EACE,uBzBN6B;EyBO7B,4EzBnB2B;EyBoB3B,czBf4B;EyBgB5B,eAAe;EACf,kBAAkB;A3B82KpB;;A2B52KA;EACE,6BAvBwC;EAwBxC,oBAAoB;EACpB,2CzB3B2B;EyB4B3B,aAAa;A3B+2Kf;;A2B72KA;EACE,mBAAmB;EACnB,czB5B4B;EyB6B5B,aAAa;EACb,YAAY;EACZ,gBzBMe;EyBLf,gBAhC2B;A3Bg5K7B;;A2Bt3KA;EAQI,uBAAuB;A3Bk3K3B;;A2Bh3KA;EACE,mBAAmB;EACnB,eAAe;EACf,aAAa;EACb,uBAAuB;EACvB,gBAzC2B;A3B45K7B;;A2Bj3KA;EACE,cAAc;EACd,kBAAkB;A3Bo3KpB;;A2Bl3KA;EACE,6BA5CyC;EA6CzC,eA5C2B;A3Bi6K7B;;A2Bn3KA;EACE,6BA7CwC;EA8CxC,6BzBjD4B;EyBkD5B,oBAAoB;EACpB,aAAa;A3Bs3Kf;;A2Bp3KA;EACE,mBAAmB;EACnB,aAAa;EACb,aAAa;EACb,YAAY;EACZ,cAAc;EACd,uBAAuB;EACvB,gBAvD2B;A3B86K7B;;A2B93KA;EASI,+BzB9D0B;AFu7K9B;;A2Br3KA;EAEI,qBzB/BkB;AFs5KtB;;A4Bl7KA;EACE,oBAAoB;EACpB,kBAAkB;EAClB,mBAAmB;A5Bq7KrB;;A4Bx7KA;EAOM,cAAc;A5Bq7KpB;;A4B57KA;EAUM,UAAU;EACV,QAAQ;A5Bs7Kd;;A4Bj8KA;EAcM,YAAY;EACZ,mBA9BuB;EA+BvB,oBAAoB;EACpB,SAAS;A5Bu7Kf;;A4Br7KA;EACE,aAAa;EACb,OAAO;EACP,gBAzC6B;EA0C7B,gBAtC2B;EAuC3B,kBAAkB;EAClB,SAAS;EACT,WApCqB;A5B49KvB;;A4Bt7KA;EACE,uB1BlC6B;E0BmC7B,kB1BmBU;E0BlBV,4E1BhD2B;E0BiD3B,sBA9CsC;EA+CtC,mBA9CmC;A5Bu+KrC;;Acn+KgB;Ec6Cd,c1BhD4B;E0BiD5B,cAAc;EACd,mBAAmB;EACnB,gBAAgB;EAChB,sBAAsB;EACtB,kBAAkB;A5B07KpB;;A4Bx7KA;;EAEE,mBAAmB;EACnB,gBAAgB;EAChB,mBAAmB;EACnB,WAAW;A5B27Kb;;A4Bh8KA;;EAOI,4B1BzD0B;E0B0D1B,c1BpEyB;AFkgL7B;;A4Bt8KA;;EAUI,yB1BnD8B;E0BoD9B,WnBZY;AT68KhB;;A4B/7KA;EACE,yB1BlE4B;E0BmE5B,YAAY;EACZ,cAAc;EACd,WAAW;EACX,gBAAgB;A5Bk8KlB;;A6BhhLA;EAEE,mBAAmB;EACnB,8BAA8B;A7BkhLhC;;A6BrhLA;EAKI,kB3B6DQ;AFu9KZ;;A6BzhLA;EAOI,qBAAqB;EACrB,mBAAmB;A7BshLvB;;A6B9hLA;EAWI,aAAa;A7BuhLjB;;A6BliLA;;EAcM,aAAa;A7ByhLnB;;A6BviLA;EAgBM,aAAa;A7B2hLnB;;A6B3iLA;EAmBQ,gBAAgB;EAChB,qBAtBiC;A7BkjLzC;;A6BhjLA;EAsBQ,YAAY;A7B8hLpB;;ACj+KE;E4BnFF;IAyBI,aAAa;E7BgiLf;E6BzjLF;IA4BQ,YAAY;E7BgiLlB;AACF;;A6B/hLA;EACE,mBAAmB;EACnB,aAAa;EACb,gBAAgB;EAChB,YAAY;EACZ,cAAc;EACd,uBAAuB;A7BkiLzB;;A6BxiLA;;EASI,gBAAgB;A7BoiLpB;;AC5/KE;E4BjDF;IAaM,sBA7CmC;E7BklLvC;AACF;;A6BpiLA;;EAEE,gBAAgB;EAChB,YAAY;EACZ,cAAc;A7BuiLhB;;A6B3iLA;;EAQM,YAAY;A7BwiLlB;;AC1gLE;E4BtCF;;IAYQ,qBA3DiC;E7BqmLvC;AACF;;A6BziLA;EACE,mBAAmB;EACnB,2BAA2B;A7B4iL7B;;AC1hLE;E4BpBF;IAMM,kBAAkB;E7B6iLtB;AACF;;AC5hLE;E4BxBF;IAQI,aAAa;E7BijLf;AACF;;A6BhjLA;EACE,mBAAmB;EACnB,yBAAyB;A7BmjL3B;;ACviLE;E4BdF;IAKI,aAAa;E7BqjLf;AACF;;A8BxnLA;EAEE,uB5BE6B;E4BD7B,kB5BuDU;E4BtDV,4E5BZ2B;AFsoL7B;;A8BrnLA;EACE,cAAc;EACd,kBAAkB;A9BwnLpB;;A8B1nLA;EAII,c5BhB0B;AF0oL9B;;A8B9nLA;EAMI,2B5B2CQ;E4B1CR,4B5B0CQ;AFklLZ;;A8BnoLA;EASI,8B5BwCQ;E4BvCR,+B5BuCQ;AFulLZ;;A8BxoLA;EAYI,gC5BrB0B;AFqpL9B;;A8B5oLA;EAcI,yB5BZ8B;E4Ba9B,WrB2BY;ATumLhB;;A8BhoLA;EACE,4B5BzB4B;E4B0B5B,eAAe;A9BmoLjB;;A+BzqLA;EACE,uBAAuB;EACvB,aAAa;EACb,gBAAgB;A/B4qLlB;;A+B/qLA;EAKI,sBAAsB;A/B8qL1B;;A+BnrLA;EAOI,8C7BG0B;E6BF1B,aAAa;EACb,oBAAoB;A/BgrLxB;;A+BzrLA;;EAYM,qBAAqB;A/BkrL3B;;A+B9rLA;EAcM,mBAAmB;A/BorLzB;;A+BlsLA;EAgBQ,kBAAkB;A/BsrL1B;;A+BtsLA;EAkBI,8C7BR0B;E6BS1B,gBAAgB;EAChB,iBAAiB;A/BwrLrB;;A+B5sLA;EAwBM,kBAAkB;EAClB,mBAAmB;A/BwrLzB;;A+BtrLA;;EAEE,gBAAgB;EAChB,YAAY;EACZ,cAAc;A/ByrLhB;;A+BvrLA;EACE,kBAAkB;A/B0rLpB;;A+BxrLA;EACE,iBAAiB;A/B2rLnB;;A+BzrLA;EACE,gBAAgB;EAChB,YAAY;EACZ,cAAc;EACd,gBAAgB;A/B4rLlB;;ACtpLE;E8B1CF;IAQI,gBAAgB;E/B6rLlB;AACF;;AgC3tLA;EACE,e9BiBW;AF6sLb;;AgC/tLA;EAII,kB9BeY;AFgtLhB;;AgCnuLA;EAMI,kB9BWY;AFstLhB;;AgCvuLA;EAQI,iB9BQW;AF2tLf;;AgCjuLA;EACE,iBArB0B;AhCyvL5B;;AgCruLA;EAGI,kB9BoCc;E8BnCd,c9BzB0B;E8B0B1B,cAAc;EACd,qBAzBiC;AhC+vLrC;;AgC5uLA;EAQM,4B9BxBwB;E8ByBxB,c9B/BwB;AFuwL9B;;AgCjvLA;EAYM,yB9BnB4B;E8BoB5B,WvBoBU;ATqtLhB;;AgCtvLA;EAgBM,8B9BlCwB;E8BmCxB,cAnC0B;EAoC1B,oBAnCgC;AhC6wLtC;;AgCxuLA;EACE,c9BzC4B;E8B0C5B,iBApC2B;EAqC3B,qBApC+B;EAqC/B,yBAAyB;AhC2uL3B;;AgC/uLA;EAMI,eAtCoB;AhCmxLxB;;AgCnvLA;EAQI,kBAxCoB;AhCuxLxB;;AiClxLA;EAEE,4B/BX4B;E+BY5B,kB/B4CU;E+B3CV,e/BWW;AFywLb;;AiCxxLA;EAMI,mBAAmB;AjCsxLvB;;AiC5xLA;EAQI,mBAAmB;EACnB,0BAA0B;AjCwxL9B;;AiCjyLA;EAYI,kB/BIY;AFqxLhB;;AiCryLA;EAcI,kB/BAY;AF2xLhB;;AiCzyLA;EAgBI,iB/BHW;AFgyLf;;AiC7yLA;EA0BM,uBAAmD;AjCuxLzD;;AiCjzLA;EA4BQ,uB/BnCuB;E+BoCvB,c/BhDqB;AFy0L7B;;AiCtzLA;EA+BQ,mB/BtCuB;E+BuCvB,cAA6E;AjC2xLrF;;AiC3zLA;EA0BM,yBAAmD;AjCqyLzD;;AiC/zLA;EA4BQ,yB/B/CqB;E+BgDrB,Y/BpCuB;AF20L/B;;AiCp0LA;EA+BQ,qB/BlDqB;E+BmDrB,cAA6E;AjCyyLrF;;AiCz0LA;EA0BM,yBAAmD;AjCmzLzD;;AiC70LA;EA4BQ,4B/BrCsB;E+BsCtB,c/B5CsB;AFi2L9B;;AiCl1LA;EA+BQ,wB/BxCsB;E+ByCtB,cAA6E;AjCuzLrF;;AiCv1LA;EA0BM,yBAAmD;AjCi0LzD;;AiC31LA;EA4BQ,yB/B3CsB;E+B4CtB,iB/BtCsB;AFy2L9B;;AiCh2LA;EA+BQ,qB/B9CsB;E+B+CtB,cAA6E;AjCq0LrF;;AiCr2LA;EA0BM,yBAAmD;AjC+0LzD;;AiCz2LA;EA4BQ,yB/B9B0B;E+B+B1B,WxBWQ;ATs0LhB;;AiC92LA;EA+BQ,qB/BjC0B;E+BkC1B,cAA6E;AjCm1LrF;;AiCn3LA;EA0BM,yBAAmD;AjC61LzD;;AiCv3LA;EA4BQ,yB/B5B0B;E+B6B1B,WxBWQ;ATo1LhB;;AiC53LA;EA+BQ,qB/B/B0B;E+BgC1B,cAA6E;AjCi2LrF;;AiCj4LA;EA0BM,yBAAmD;AjC22LzD;;AiCr4LA;EA4BQ,yB/B7B0B;E+B8B1B,WxBWQ;ATk2LhB;;AiC14LA;EA+BQ,qB/BhC0B;E+BiC1B,cAA6E;AjC+2LrF;;AiC/4LA;EA0BM,yBAAmD;AjCy3LzD;;AiCn5LA;EA4BQ,yB/B/B0B;E+BgC1B,WxBWQ;ATg3LhB;;AiCx5LA;EA+BQ,qB/BlC0B;E+BmC1B,cAA6E;AjC63LrF;;AiC75LA;EA0BM,yBAAmD;AjCu4LzD;;AiCj6LA;EA4BQ,yB/BhC0B;E+BiC1B,yBxBSa;ATg4LrB;;AiCt6LA;EA+BQ,qB/BnC0B;E+BoC1B,cAA6E;AjC24LrF;;AiC36LA;EA0BM,yBAAmD;AjCq5LzD;;AiC/6LA;EA4BQ,yB/B1B0B;E+B2B1B,WxBWQ;AT44LhB;;AiCp7LA;EA+BQ,qB/B7B0B;E+B8B1B,cAA6E;AjCy5LrF;;AiCv5LA;EACE,mBAAmB;EACnB,yB/BlD4B;E+BmD5B,0BAAgE;EAChE,WxBEc;EwBDd,aAAa;EACb,gB/BlBe;E+BmBf,8BAA8B;EAC9B,iBAAiB;EACjB,mBA1DiC;EA2DjC,kBAAkB;AjC05LpB;;AiCp6LA;EAYI,YAAY;EACZ,cAAc;EACd,mBAAmB;AjC45LvB;;AiC16LA;EAgBI,eArDgC;EAsDhC,yBAAyB;EACzB,0BAA0B;AjC85L9B;;AiC55LA;EACE,qB/BlE4B;E+BmE5B,kB/BTU;E+BUV,mBAAmB;EACnB,uBArEmC;EAsEnC,c/BzE4B;E+B0E5B,qBArEiC;AjCo+LnC;;AiCr6LA;;EASI,uB/BtE2B;AFu+L/B;;AiC16LA;EAWI,6BAtEgD;AjCy+LpD;;AkC39LA;EAEE,mBAAmB;EACnB,aAAa;EACb,sBAAsB;EACtB,uBAAuB;EACvB,gBAAgB;EAChB,eAAe;EACf,WAtCU;AlCmgMZ;;AkCr+LA;EAWI,aAAa;AlC89LjB;;AkC59LA;EAEE,wChC3C2B;AFygM7B;;AkC59LA;;EAEE,cA5CgC;EA6ChC,+BAA0D;EAC1D,cAAc;EACd,kBAAkB;EAClB,WAAW;AlC+9Lb;;AC/7LE;EiCtCF;;IASI,cAAc;IACd,8BAA0D;IAC1D,YAtDuB;ElCwhMzB;AACF;;AkCj+LA;EAEE,gBAAgB;EAChB,YAtD2B;EAuD3B,eAAe;EACf,WAvDsB;EAwDtB,SAvDoB;EAwDpB,WA1D2B;AlC6hM7B;;AkCj+LA;EACE,aAAa;EACb,sBAAsB;EACtB,8BAAgD;EAChD,gBAAgB;EAChB,uBAAuB;AlCo+LzB;;AkCl+LA;;EAEE,mBAAmB;EACnB,4BhCnE4B;EgCoE5B,aAAa;EACb,cAAc;EACd,2BAA2B;EAC3B,aAlE4B;EAmE5B,kBAAkB;AlCq+LpB;;AkCn+LA;EACE,gChC7E4B;EgC8E5B,2BhCnBgB;EgCoBhB,4BhCpBgB;AF0/LlB;;AkCp+LA;EACE,chCtF4B;EgCuF5B,YAAY;EACZ,cAAc;EACd,iBhC7Da;EgC8Db,cA3E8B;AlCkjMhC;;AkCr+LA;EACE,8BhC9BgB;EgC+BhB,+BhC/BgB;EgCgChB,6BhC3F4B;AFmkM9B;;AkC3+LA;EAMM,mBAAmB;AlCy+LzB;;AkCv+LA;EjC5CE,iCAAiC;EiC8CjC,uBhC9F6B;EgC+F7B,YAAY;EACZ,cAAc;EACd,cAAc;EACd,aApF4B;AlC8jM9B;;AmCpiMA;EACE,uBjCzC6B;EiC0C7B,mBArDqB;EAsDrB,kBAAkB;EAClB,WApDW;AnC2lMb;;AmC3iMA;EASM,uBjCjDyB;EiCkDzB,cjC9DuB;AFomM7B;;AmChjMA;;EAcU,cjClEmB;AFymM7B;;AmCrjMA;;;;EAoBY,yBAAoC;EACpC,cjCzEiB;AFinM7B;;AmC7jMA;EAwBY,qBjC5EiB;AFqnM7B;;AmCjkMA;EA0BQ,cjC9EqB;AFynM7B;;ACljME;EkCnBF;;;;IAgCY,cjCpFiB;EFioM3B;EmC7kMF;;;;;;;;;;IAsCc,yBAAoC;IACpC,cjC3Fe;EF8oM3B;EmC1lMF;;IA0Cc,qBjC9Fe;EFkpM3B;EmC9lMF;;;IA8CU,yBAAoC;IACpC,cjCnGmB;EFwpM3B;EmCpmMF;IAmDc,uBjC3FiB;IiC4FjB,cjCxGe;EF4pM3B;AACF;;AmCzmMA;EASM,yBjC7DuB;EiC8DvB,YjClDyB;AFspM/B;;AmC9mMA;;EAcU,YjCtDqB;AF2pM/B;;AmCnnMA;;;;EAoBY,uBAAoC;EACpC,YjC7DmB;AFmqM/B;;AmC3nMA;EAwBY,mBjChEmB;AFuqM/B;;AmC/nMA;EA0BQ,YjClEuB;AF2qM/B;;AChnME;EkCnBF;;;;IAgCY,YjCxEmB;EFmrM7B;EmC3oMF;;;;;;;;;;IAsCc,uBAAoC;IACpC,YjC/EiB;EFgsM7B;EmCxpMF;;IA0Cc,mBjClFiB;EFosM7B;EmC5pMF;;;IA8CU,uBAAoC;IACpC,YjCvFqB;EF0sM7B;EmClqMF;IAmDc,yBjCvGe;IiCwGf,YjC5FiB;EF8sM7B;AACF;;AmCvqMA;EASM,4BjCnDwB;EiCoDxB,cjC1DwB;AF4tM9B;;AmC5qMA;;EAcU,cjC9DoB;AFiuM9B;;AmCjrMA;;;;EAoBY,yBAAoC;EACpC,cjCrEkB;AFyuM9B;;AmCzrMA;EAwBY,qBjCxEkB;AF6uM9B;;AmC7rMA;EA0BQ,cjC1EsB;AFivM9B;;AC9qME;EkCnBF;;;;IAgCY,cjChFkB;EFyvM5B;EmCzsMF;;;;;;;;;;IAsCc,yBAAoC;IACpC,cjCvFgB;EFswM5B;EmCttMF;;IA0Cc,qBjC1FgB;EF0wM5B;EmC1tMF;;;IA8CU,yBAAoC;IACpC,cjC/FoB;EFgxM5B;EmChuMF;IAmDc,4BjC7FgB;IiC8FhB,cjCpGgB;EFoxM5B;AACF;;AmCruMA;EASM,yBjCzDwB;EiC0DxB,iBjCpDwB;AFoxM9B;;AmC1uMA;;EAcU,iBjCxDoB;AFyxM9B;;AmC/uMA;;;;EAoBY,yBAAoC;EACpC,iBjC/DkB;AFiyM9B;;AmCvvMA;EAwBY,wBjClEkB;AFqyM9B;;AmC3vMA;EA0BQ,iBjCpEsB;AFyyM9B;;AC5uME;EkCnBF;;;;IAgCY,iBjC1EkB;EFizM5B;EmCvwMF;;;;;;;;;;IAsCc,yBAAoC;IACpC,iBjCjFgB;EF8zM5B;EmCpxMF;;IA0Cc,wBjCpFgB;EFk0M5B;EmCxxMF;;;IA8CU,yBAAoC;IACpC,iBjCzFoB;EFw0M5B;EmC9xMF;IAmDc,yBjCnGgB;IiCoGhB,iBjC9FgB;EF40M5B;AACF;;AmCnyMA;EASM,yBjC5C4B;EiC6C5B,W1BHU;ATiyMhB;;AmCxyMA;;EAcU,W1BPM;ATsyMhB;;AmC7yMA;;;;EAoBY,yBAAoC;EACpC,W1BdI;AT8yMhB;;AmCrzMA;EAwBY,kB1BjBI;ATkzMhB;;AmCzzMA;EA0BQ,W1BnBQ;ATszMhB;;AC1yME;EkCnBF;;;;IAgCY,W1BzBI;ET8zMd;EmCr0MF;;;;;;;;;;IAsCc,yBAAoC;IACpC,W1BhCE;ET20Md;EmCl1MF;;IA0Cc,kB1BnCE;ET+0Md;EmCt1MF;;;IA8CU,yBAAoC;IACpC,W1BxCM;ETq1Md;EmC51MF;IAmDc,yBjCtFoB;IiCuFpB,W1B7CE;ETy1Md;AACF;;AmCj2MA;EASM,yBjC1C4B;EiC2C5B,W1BHU;AT+1MhB;;AmCt2MA;;EAcU,W1BPM;ATo2MhB;;AmC32MA;;;;EAoBY,yBAAoC;EACpC,W1BdI;AT42MhB;;AmCn3MA;EAwBY,kB1BjBI;ATg3MhB;;AmCv3MA;EA0BQ,W1BnBQ;ATo3MhB;;ACx2ME;EkCnBF;;;;IAgCY,W1BzBI;ET43Md;EmCn4MF;;;;;;;;;;IAsCc,yBAAoC;IACpC,W1BhCE;ETy4Md;EmCh5MF;;IA0Cc,kB1BnCE;ET64Md;EmCp5MF;;;IA8CU,yBAAoC;IACpC,W1BxCM;ETm5Md;EmC15MF;IAmDc,yBjCpFoB;IiCqFpB,W1B7CE;ETu5Md;AACF;;AmC/5MA;EASM,yBjC3C4B;EiC4C5B,W1BHU;AT65MhB;;AmCp6MA;;EAcU,W1BPM;ATk6MhB;;AmCz6MA;;;;EAoBY,yBAAoC;EACpC,W1BdI;AT06MhB;;AmCj7MA;EAwBY,kB1BjBI;AT86MhB;;AmCr7MA;EA0BQ,W1BnBQ;ATk7MhB;;ACt6ME;EkCnBF;;;;IAgCY,W1BzBI;ET07Md;EmCj8MF;;;;;;;;;;IAsCc,yBAAoC;IACpC,W1BhCE;ETu8Md;EmC98MF;;IA0Cc,kB1BnCE;ET28Md;EmCl9MF;;;IA8CU,yBAAoC;IACpC,W1BxCM;ETi9Md;EmCx9MF;IAmDc,yBjCrFoB;IiCsFpB,W1B7CE;ETq9Md;AACF;;AmC79MA;EASM,yBjC7C4B;EiC8C5B,W1BHU;AT29MhB;;AmCl+MA;;EAcU,W1BPM;ATg+MhB;;AmCv+MA;;;;EAoBY,yBAAoC;EACpC,W1BdI;ATw+MhB;;AmC/+MA;EAwBY,kB1BjBI;AT4+MhB;;AmCn/MA;EA0BQ,W1BnBQ;ATg/MhB;;ACp+ME;EkCnBF;;;;IAgCY,W1BzBI;ETw/Md;EmC//MF;;;;;;;;;;IAsCc,yBAAoC;IACpC,W1BhCE;ETqgNd;EmC5gNF;;IA0Cc,kB1BnCE;ETygNd;EmChhNF;;;IA8CU,yBAAoC;IACpC,W1BxCM;ET+gNd;EmCthNF;IAmDc,yBjCvFoB;IiCwFpB,W1B7CE;ETmhNd;AACF;;AmC3hNA;EASM,yBjC9C4B;EiC+C5B,yB1BLe;AT2hNrB;;AmChiNA;;EAcU,yB1BTW;ATgiNrB;;AmCriNA;;;;EAoBY,yBAAoC;EACpC,yB1BhBS;ATwiNrB;;AmC7iNA;EAwBY,gC1BnBS;AT4iNrB;;AmCjjNA;EA0BQ,yB1BrBa;ATgjNrB;;ACliNE;EkCnBF;;;;IAgCY,yB1B3BS;ETwjNnB;EmC7jNF;;;;;;;;;;IAsCc,yBAAoC;IACpC,yB1BlCO;ETqkNnB;EmC1kNF;;IA0Cc,gC1BrCO;ETykNnB;EmC9kNF;;;IA8CU,yBAAoC;IACpC,yB1B1CW;ET+kNnB;EmCplNF;IAmDc,yBjCxFoB;IiCyFpB,yB1B/CO;ETmlNnB;AACF;;AmCzlNA;EASM,yBjCxC4B;EiCyC5B,W1BHU;ATulNhB;;AmC9lNA;;EAcU,W1BPM;AT4lNhB;;AmCnmNA;;;;EAoBY,yBAAoC;EACpC,W1BdI;ATomNhB;;AmC3mNA;EAwBY,kB1BjBI;ATwmNhB;;AmC/mNA;EA0BQ,W1BnBQ;AT4mNhB;;AChmNE;EkCnBF;;;;IAgCY,W1BzBI;ETonNd;EmC3nNF;;;;;;;;;;IAsCc,yBAAoC;IACpC,W1BhCE;ETioNd;EmCxoNF;;IA0Cc,kB1BnCE;ETqoNd;EmC5oNF;;;IA8CU,yBAAoC;IACpC,W1BxCM;ET2oNd;EmClpNF;IAmDc,yBjClFoB;IiCmFpB,W1B7CE;ET+oNd;AACF;;AmCvpNA;EAsDI,oBAAoB;EACpB,aAAa;EACb,mBA3GmB;EA4GnB,WAAW;AnCqmNf;;AmC9pNA;EA2DI,gCjCrG0B;AF4sN9B;;AmClqNA;EALE,OAAO;EACP,eAAe;EACf,QAAQ;EACR,WA7CiB;AnCwtNnB;;AmCzqNA;EAgEI,SAAS;AnC6mNb;;AmC7qNA;EAkEM,iCjC5GwB;AF2tN9B;;AmCjrNA;EAoEI,MAAM;AnCinNV;;AmC/mNA;;EAGI,oBA5HmB;AnC6uNvB;;AmCpnNA;;EAKI,uBA9HmB;AnCkvNvB;;AmClnNA;;EAEE,oBAAoB;EACpB,aAAa;EACb,cAAc;EACd,mBArIqB;AnC0vNvB;;AmCnnNA;EAIM,6BAA6B;AnCmnNnC;;AmCjnNA;ElClFE,iCAAiC;EkCoFjC,gBAAgB;EAChB,gBAAgB;EAChB,kBAAkB;AnConNpB;;AmClnNA;EACE,cjChJ4B;EDoB5B,eAAe;EACf,cAAc;EACd,ekC1BqB;ElC2BrB,kBAAkB;EAClB,ckC5BqB;EAsJrB,iBAAiB;AnCynNnB;;AClvNE;EACE,8BAA8B;EAC9B,cAAc;EACd,WAAW;EACX,qBAAqB;EACrB,kBAAkB;EAClB,gCAAwB;UAAxB,wBAAwB;EACxB,yBCgCQ;ED/BR,iEAAyD;EAAzD,yDAAyD;EAAzD,4EAAyD;EACzD,oCCyBa;EDxBb,WAAW;ADqvNf;;ACpvNI;EACE,oBAAoB;ADuvN1B;;ACtvNI;EACE,oBAAoB;ADyvN1B;;ACxvNI;EACE,oBAAoB;AD2vN1B;;AC1vNE;EACE,qCAA4B;AD6vNhC;;ACzvNM;EACE,gDAAwC;UAAxC,wCAAwC;AD4vNhD;;AC3vNM;EACE,UAAU;AD8vNlB;;AC7vNM;EACE,kDAA0C;UAA1C,0CAA0C;ADgwNlD;;AmChqNA;EACE,aAAa;AnCmqNf;;AmCjqNA;;EAEE,cjCzJ4B;EiC0J5B,cAAc;EACd,gBAAgB;EAChB,uBAAuB;EACvB,kBAAkB;AnCoqNpB;;AmC1qNA;;EASM,qBAAqB;EACrB,sBAAsB;AnCsqN5B;;AmCpqNA;;EAEE,eAAe;AnCuqNjB;;AmCzqNA;;;;;EAOI,yBjCpK0B;EiCqK1B,cjC7J8B;AFu0NlC;;AmCxqNA;EACE,cAAc;EACd,YAAY;EACZ,cAAc;AnC2qNhB;;AmC9qNA;EAKI,mBA3KgC;AnCw1NpC;;AmClrNA;EAOI,UAAU;AnC+qNd;;AmCtrNA;EASI,YAAY;EACZ,cAAc;AnCirNlB;;AmC3rNA;EAYI,oCAAoC;EACpC,mBA9LmB;EA+LnB,kCAAkC;AnCmrNtC;;AmCjsNA;EAiBM,6BAnLyC;EAoLzC,4BjCjL4B;AFq2NlC;;AmCtsNA;EAoBM,6BAnL0C;EAoL1C,4BjCpL4B;EiCqL5B,0BAnLuC;EAoLvC,wBAnLqC;EAoLrC,cjCvL4B;EiCwL5B,kCAAwE;AnCsrN9E;;AmCprNA;EACE,YAAY;EACZ,cAAc;AnCurNhB;;AmCrrNA;EACE,oBAAoB;AnCwrNtB;;AmCzrNA;EAII,qBjClM8B;EiCmM9B,oBAAoB;EACpB,cAAc;AnCyrNlB;;AmCvrNA;EACE,mBAAmB;EACnB,sBAAsB;EACtB,mBAAmB;AnC0rNrB;;AmC7rNA;EAKI,oBAAoB;EACpB,qBAAqB;AnC4rNzB;;AmC1rNA;EACE,4BjCxN4B;EiCyN5B,YAAY;EACZ,aAAa;EACb,WA7LyB;EA8LzB,gBAAgB;AnC6rNlB;;ACx1NE;EkCvBF;IAsLI,cAAc;EnC8rNhB;EmC7rNA;;IAGI,mBAAmB;IACnB,aAAa;EnC8rNjB;EmC7rNA;IAEI,aAAa;EnC8rNjB;EmCvxNF;IA2FI,uBjCxO2B;IiCyO3B,4CjCrPyB;IiCsPzB,iBAAiB;EnC+rNnB;EmClsNA;IAKI,cAAc;EnCgsNlB;EmC9rNA;IA3MA,OAAO;IACP,eAAe;IACf,QAAQ;IACR,WA7CiB;EnCy7NjB;EmCpsNA;IAKI,SAAS;EnCksNb;EmCvsNA;IAOM,4CjCjQqB;EFo8N3B;EmC1sNA;IASI,MAAM;EnCosNV;EmC7sNA;IlC9LA,iCAAiC;IkC4M3B,iCAA2C;IAC3C,cAAc;EnCmsNpB;EmClsNA;;IAGI,oBA5QiB;EnC+8NrB;EmCtsNA;;IAKI,uBA9QiB;EnCm9NrB;AACF;;AC94NE;EkC2MA;;;;IAIE,oBAAoB;IACpB,aAAa;EnCusNf;EmC16NF;IAqOI,mBAxRmB;EnCg+NrB;EmCzsNA;IAGI,kBAxR0B;EnCi+N9B;EmC5sNA;;IAMM,mBAAmB;EnC0sNzB;EmChtNA;;IASM,kBjC/NI;EF06NV;EmCptNA;;;;IAgBQ,wCAAwC;EnC0sNhD;EmC1tNA;IAuBU,wCAAwC;EnCssNlD;EmC7tNA;IA4BU,4BjC1SkB;IiC2SlB,cjCrTiB;EFy/N3B;EmCjuNA;IA+BU,4BjC7SkB;IiC8SlB,cjCrSsB;EF0+NhC;EmCz2NF;IAsKI,aAAa;EnCssNf;EmCp2NF;;IAiKI,mBAAmB;IACnB,aAAa;EnCusNf;EmCn1NF;IA8II,aAAa;EnCwsNf;EmCt1NF;IAgJM,oBAAoB;EnCysNxB;EmC5sNA;IAMM,4DAAoD;YAApD,oDAAoD;EnCysN1D;EmC/sNA;IAQM,gCjC/TsB;IiCgUtB,0BAAkE;IAClE,gBAAgB;IAChB,YAAY;IACZ,4CjC3UqB;IiC4UrB,SAAS;EnC0sNf;EmCvtNA;IAmBM,cAAc;EnCusNpB;EmCtsNM;IAEE,UAAU;IACV,oBAAoB;IACpB,gCAAwB;YAAxB,wBAAwB;EnCusNhC;EmCr4NF;IAgMI,YAAY;IACZ,cAAc;EnCwsNhB;EmCvsNA;IACE,2BAA2B;IAC3B,kBAAkB;EnCysNpB;EmCxsNA;IACE,yBAAyB;IACzB,iBAAiB;EnC0sNnB;EmCj1NF;IAyII,uBjCtV2B;IiCuV3B,8BjChSc;IiCiSd,+BjCjSc;IiCkSd,6BjC7V0B;IiC8V1B,2CjCtWyB;IiCuWzB,aAAa;IACb,mBAAmB;IACnB,OAAO;IACP,eAAe;IACf,kBAAkB;IAClB,SAAS;IACT,WAhVkB;EnC2hOpB;EmC/1NF;IAsJM,sBAAsB;IACtB,mBAAmB;EnC4sNvB;EmC3tNA;IAiBI,mBAAmB;EnC6sNvB;EmC9tNA;IAoBM,4BjC3WsB;IiC4WtB,cjCtXqB;EFmkO3B;EmCluNA;IAuBM,4BjC9WsB;IiC+WtB,cjCtW0B;EFojOhC;EmC7sNE;IAEE,kBjCzTY;IiC0TZ,gBAAgB;IAChB,4EjC9XuB;IiC+XvB,cAAc;IACd,UAAU;IACV,oBAAoB;IACpB,wBAA8C;IAC9C,mCAA2B;YAA3B,2BAA2B;IAC3B,yBjC/TM;IiCgUN,+CAAuC;IAAvC,uCAAuC;IAAvC,0DAAuC;EnC8sN3C;EmClvNA;IAsCI,UAAU;IACV,QAAQ;EnC+sNZ;EmCt3NF;IAyKI,cAAc;EnCgtNhB;EmC/sNA;;IAGI,oBAAoB;EnCgtNxB;EmCntNA;;IAKI,qBAAqB;EnCktNzB;EmChtNA;IAnWA,OAAO;IACP,eAAe;IACf,QAAQ;IACR,WA7CiB;EnCmmOjB;EmCttNA;IAKI,SAAS;EnCotNb;EmCztNA;IAOM,4CjCzZqB;EF8mO3B;EmC5tNA;IASI,MAAM;EnCstNV;EmCrtNA;;IAGI,oBA9ZiB;EnConOrB;EmCztNA;;IAKI,uBAhaiB;EnCwnOrB;EmC7tNA;;IAOI,oBAA4D;EnC0tNhE;EmCjuNA;;IASI,uBAA+D;EnC4tNnE;EmC1tNA;;IAGI,cjC1auB;EFqoO3B;EmC9tNA;;IAKI,6BAja2C;EnC8nO/C;EmC5tNA;IAKM,yBjCvasB;EFioO5B;AACF;;AmCvtNA;EAEI,iCAA2C;AnCytN/C;;AoCpnOA;EACE,elCIW;EkCHX,gBA/B0B;ApCspO5B;;AoCznOA;EAKI,kBlCCY;AFunOhB;;AoC7nOA;EAOI,kBlCHY;AF6nOhB;;AoCjoOA;EASI,iBlCNW;AFkoOf;;AoCroOA;;EAaM,iBAAiB;EACjB,kBAAkB;EAClB,uBlCwBmB;AFqmOzB;;AoC5oOA;EAiBM,uBlCsBmB;AFymOzB;;AoC7nOA;;EAEE,mBAAmB;EACnB,aAAa;EACb,uBAAuB;EACvB,kBAAkB;ApCgoOpB;;AoC9nOA;;;;EAME,cA1D6B;EA2D7B,uBAAuB;EACvB,eA3D8B;EA4D9B,mBA3DkC;EA4DlC,oBA3DmC;EA4DnC,kBAAkB;ApC+nOpB;;AoC7nOA;;;EAGE,qBlC/D4B;EkCgE5B,clCpE4B;EkCqE5B,iBjCtEqB;AHssOvB;;AoCroOA;;;EAOI,qBlCpE0B;EkCqE1B,clCxE0B;AF4sO9B;;AoC5oOA;;;EAUI,qBlC3D8B;AFmsOlC;;AoClpOA;;;EAYI,iDlChFyB;AF4tO7B;;AoCxpOA;;;EAcI,yBlC1E0B;EkC2E1B,qBlC3E0B;EkC4E1B,gBAAgB;EAChB,clC/E0B;EkCgF1B,YAAY;ApCgpOhB;;AoC9oOA;;EAEE,oBAAoB;EACpB,qBAAqB;EACrB,mBAAmB;ApCipOrB;;AoC/oOA;EAEI,yBlC7E8B;EkC8E9B,qBlC9E8B;EkC+E9B,W3BvCY;ATwrOhB;;AoC/oOA;EACE,clC9F4B;EkC+F5B,oBAAoB;ApCkpOtB;;AoChpOA;EACE,eAAe;ApCmpOjB;;AC7qOE;EmClDF;IAgFI,eAAe;EpCopOjB;EoCzqOF;;IAwBI,YAAY;IACZ,cAAc;EpCqpOhB;EoCppOA;IAEI,YAAY;IACZ,cAAc;EpCqpOlB;AACF;;ACxrOE;EmCqBF;IAiBI,YAAY;IACZ,cAAc;IACd,2BAA2B;IAC3B,QAAQ;EpCupOV;EoCtpOA;IACE,QAAQ;EpCwpOV;EoCvpOA;IACE,QAAQ;EpCypOV;EoC5vOF;IAqGI,8BAA8B;EpC0pOhC;EoC3pOA;IAIM,QAAQ;EpC0pOd;EoC9pOA;IAMM,uBAAuB;IACvB,QAAQ;EpC2pOd;EoClqOA;IASM,QAAQ;EpC4pOd;EoCrqOA;IAYM,QAAQ;EpC4pOd;EoCxqOA;IAcM,QAAQ;EpC6pOd;EoC3qOA;IAgBM,yBAAyB;IACzB,QAAQ;EpC8pOd;AACF;;AqCxxOA;EACE,enCQW;AFmxOb;;AqC5xOA;EAGI,qBnCiBkB;AF4wOtB;;AqC3xOA;;;EAGE,gCnCzB4B;EmC0B5B,8BnC1B4B;EmC2B5B,+BnC3B4B;AFyzO9B;;AqCnyOA;;;EAOI,6BnC7B0B;AF+zO9B;;AqChyOA;EACE,4BnC9B4B;EmC+B5B,0BAA8D;EAC9D,cnCtC4B;EmCuC5B,iBArCyB;EAsCzB,gBnCPgB;EmCQhB,iBA1C8B;EA2C9B,qBA1CkC;ArC60OpC;;AqCjyOA;EACE,qBAAqB;EACrB,aAAa;EACb,kBA1C4B;EA2C5B,uBAAuB;ArCoyOzB;;AqCxyOA;EAMI,gCnC9C0B;EmC+C1B,mBAAmB;EACnB,cAAc;ArCsyOlB;;AqC9yOA;EAWM,4BnCtDwB;EmCuDxB,cnCxDwB;AF+1O9B;;AqCryOA;EAEI,cnC3D0B;AFk2O9B;;AqCzyOA;EAIM,cnC/C4B;AFw1OlC;;AqCvyOA;EACE,mBAAmB;EACnB,cnClE4B;EmCmE5B,aAAa;EACb,2BAA2B;EAC3B,qBAAqB;ArC0yOvB;;AqC/yOA;EAOI,oBAAoB;ArC4yOxB;;AqCnzOA;EASI,YAAY;EACZ,cAAc;EACd,WAAW;ArC8yOf;;AqCzzOA;EAaI,eAAe;ArCgzOnB;;AqC7zOA;EAeI,0BnChE8B;EmCiE9B,cnChF0B;AFk4O9B;;AqCl0OA;EAkBM,cnCnE4B;AFu3OlC;;AqClzOA;;EAEE,eAAe;ArCqzOjB;;AqCvzOA;;EAII,4BnClF0B;AF04O9B;;AqCtzOA;EpC9EE,qBAAqB;EACrB,eoC8EgB;EpC7EhB,WoC6EqB;EpC5ErB,gBoC4EqB;EpC3ErB,kBAAkB;EAClB,mBAAmB;EACnB,UoCyEqB;EACrB,cnC1F4B;EmC2F5B,oBAAoB;ArC+zOtB;;AqCl0OA;EAKI,kBAAkB;EAClB,oBAAoB;ArCi0OxB;;AsC34OA;ErCkCE,iCAAiC;EqC9BjC,oBAAoB;EACpB,aAAa;EACb,epCEW;EoCDX,8BAA8B;EAC9B,gBAAgB;EAChB,gBAAgB;EAChB,mBAAmB;AtC44OrB;;AsCt5OA;EAYI,mBAAmB;EACnB,4BpC/B0B;EoCgC1B,0BAzC4B;EA0C5B,wBAzC0B;EA0C1B,cpCrC0B;EoCsC1B,aAAa;EACb,uBAAuB;EACvB,mBAA6C;EAC7C,kBAxCyB;EAyCzB,mBAAmB;AtC84OvB;;AsCn6OA;EAuBM,4BpC7CwB;EoC8CxB,cpC9CwB;AF87O9B;;AsCx6OA;EA0BI,cAAc;AtCk5OlB;;AsC56OA;EA6BQ,4BpCpC0B;EoCqC1B,cpCrC0B;AFw7OlC;;AsCj7OA;EAgCI,mBAAmB;EACnB,4BpCnD0B;EoCoD1B,0BA7D4B;EA8D5B,wBA7D0B;EA8D1B,aAAa;EACb,YAAY;EACZ,cAAc;EACd,2BAA2B;AtCq5O/B;;AsC57OA;EAyCM,qBAAqB;AtCu5O3B;;AsCh8OA;EA2CM,UAAU;EACV,uBAAuB;EACvB,oBAAoB;EACpB,qBAAqB;AtCy5O3B;;AsCv8OA;EAgDM,yBAAyB;EACzB,oBAAoB;AtC25O1B;;AsC58OA;EAoDM,mBAAmB;AtC45OzB;;AsCh9OA;EAsDM,kBAAkB;AtC85OxB;;AsCp9OA;EA0DM,uBAAuB;AtC85O7B;;AsCx9OA;EA6DM,yBAAyB;AtC+5O/B;;AsC59OA;EAiEM,6BAA6B;EAC7B,0BAAkE;AtC+5OxE;;AsCj+OA;EAoEQ,4BpCpFsB;EoCqFtB,4BpCvFsB;AFw/O9B;;AsCt+OA;EAyEU,uBpCvFqB;EoCwFrB,qBpC5FoB;EoC6FpB,2CAA2E;AtCi6OrF;;AsC5+OA;EA8EM,YAAY;EACZ,cAAc;AtCk6OpB;;AsCj/OA;EAkFM,qBpCpGwB;EoCqGxB,mBA5F+B;EA6F/B,iBA5F6B;EA6F7B,gBAAgB;EAChB,kBAAkB;AtCm6OxB;;AsCz/OA;EAwFQ,4BpCxGsB;EoCyGtB,qBpC5GsB;EoC6GtB,UAAU;AtCq6OlB;;AsC//OA;EA6FQ,iBAAgD;AtCs6OxD;;AsCngPA;EA+FQ,0BpCvDI;AF+9OZ;;AsCvgPA;EAiGQ,0BAAoE;AtC06O5E;;AsC3gPA;EAoGU,yBpC3GwB;EoC4GxB,qBpC5GwB;EoC6GxB,W7BrEM;E6BsEN,UAAU;AtC26OpB;;AsClhPA;EAyGM,mBAAmB;AtC66OzB;;AsCthPA;EA6GU,mCpCnEe;EoCoEf,gCpCpEe;EoCqEf,oBAAoB;AtC66O9B;;AsC5hPA;EAiHU,oCpCvEe;EoCwEf,iCpCxEe;EoCyEf,qBAAqB;AtC+6O/B;;AsCliPA;EAsHI,kBpC7GY;AF6hPhB;;AsCtiPA;EAwHI,kBpCjHY;AFmiPhB;;AsC1iPA;EA0HI,iBpCpHW;AFwiPf;;AuCxkPA;EACE,cAAc;EACd,aAAa;EACb,YAAY;EACZ,cAAc;EACd,gBAPkB;AvCklPpB;;AuC1kPE;EACE,UAAU;AvC6kPd;;AuC5kPE;EACE,UAAU;EACV,WAAW;AvC+kPf;;AuC9kPE;EACE,UAAU;EACV,UAAU;AvCilPd;;AuChlPE;EACE,UAAU;EACV,eAAe;AvCmlPnB;;AuCllPE;EACE,UAAU;EACV,UAAU;AvCqlPd;;AuCplPE;EACE,UAAU;EACV,eAAe;AvCulPnB;;AuCtlPE;EACE,UAAU;EACV,UAAU;AvCylPd;;AuCxlPE;EACE,UAAU;EACV,UAAU;AvC2lPd;;AuC1lPE;EACE,UAAU;EACV,UAAU;AvC6lPd;;AuC5lPE;EACE,UAAU;EACV,UAAU;AvC+lPd;;AuC9lPE;EACE,UAAU;EACV,UAAU;AvCimPd;;AuChmPE;EACE,gBAAgB;AvCmmPpB;;AuClmPE;EACE,qBAAqB;AvCqmPzB;;AuCpmPE;EACE,gBAAgB;AvCumPpB;;AuCtmPE;EACE,qBAAqB;AvCymPzB;;AuCxmPE;EACE,gBAAgB;AvC2mPpB;;AuC1mPE;EACE,gBAAgB;AvC6mPpB;;AuC5mPE;EACE,gBAAgB;AvC+mPpB;;AuC9mPE;EACE,gBAAgB;AvCinPpB;;AuChnPE;EACE,gBAAgB;AvCmnPpB;;AuCjnPI;EACE,UAAU;EACV,SAA0B;AvConPhC;;AuCnnPI;EACE,eAAgC;AvCsnPtC;;AuC1nPI;EACE,UAAU;EACV,eAA0B;AvC6nPhC;;AuC5nPI;EACE,qBAAgC;AvC+nPtC;;AuCnoPI;EACE,UAAU;EACV,gBAA0B;AvCsoPhC;;AuCroPI;EACE,sBAAgC;AvCwoPtC;;AuC5oPI;EACE,UAAU;EACV,UAA0B;AvC+oPhC;;AuC9oPI;EACE,gBAAgC;AvCipPtC;;AuCrpPI;EACE,UAAU;EACV,gBAA0B;AvCwpPhC;;AuCvpPI;EACE,sBAAgC;AvC0pPtC;;AuC9pPI;EACE,UAAU;EACV,gBAA0B;AvCiqPhC;;AuChqPI;EACE,sBAAgC;AvCmqPtC;;AuCvqPI;EACE,UAAU;EACV,UAA0B;AvC0qPhC;;AuCzqPI;EACE,gBAAgC;AvC4qPtC;;AuChrPI;EACE,UAAU;EACV,gBAA0B;AvCmrPhC;;AuClrPI;EACE,sBAAgC;AvCqrPtC;;AuCzrPI;EACE,UAAU;EACV,gBAA0B;AvC4rPhC;;AuC3rPI;EACE,sBAAgC;AvC8rPtC;;AuClsPI;EACE,UAAU;EACV,UAA0B;AvCqsPhC;;AuCpsPI;EACE,gBAAgC;AvCusPtC;;AuC3sPI;EACE,UAAU;EACV,gBAA0B;AvC8sPhC;;AuC7sPI;EACE,sBAAgC;AvCgtPtC;;AuCptPI;EACE,UAAU;EACV,gBAA0B;AvCutPhC;;AuCttPI;EACE,sBAAgC;AvCytPtC;;AuC7tPI;EACE,UAAU;EACV,WAA0B;AvCguPhC;;AuC/tPI;EACE,iBAAgC;AvCkuPtC;;AChtPE;EsC/EF;IAgEM,UAAU;EvCouPd;EuCpyPF;IAkEM,UAAU;IACV,WAAW;EvCquPf;EuCxyPF;IAqEM,UAAU;IACV,UAAU;EvCsuPd;EuC5yPF;IAwEM,UAAU;IACV,eAAe;EvCuuPnB;EuChzPF;IA2EM,UAAU;IACV,UAAU;EvCwuPd;EuCpzPF;IA8EM,UAAU;IACV,eAAe;EvCyuPnB;EuCxzPF;IAiFM,UAAU;IACV,UAAU;EvC0uPd;EuC5zPF;IAoFM,UAAU;IACV,UAAU;EvC2uPd;EuCh0PF;IAuFM,UAAU;IACV,UAAU;EvC4uPd;EuCp0PF;IA0FM,UAAU;IACV,UAAU;EvC6uPd;EuCx0PF;IA6FM,UAAU;IACV,UAAU;EvC8uPd;EuC50PF;IAgGM,gBAAgB;EvC+uPpB;EuC/0PF;IAkGM,qBAAqB;EvCgvPzB;EuCl1PF;IAoGM,gBAAgB;EvCivPpB;EuCr1PF;IAsGM,qBAAqB;EvCkvPzB;EuCx1PF;IAwGM,gBAAgB;EvCmvPpB;EuC31PF;IA0GM,gBAAgB;EvCovPpB;EuC91PF;IA4GM,gBAAgB;EvCqvPpB;EuCj2PF;IA8GM,gBAAgB;EvCsvPpB;EuCp2PF;IAgHM,gBAAgB;EvCuvPpB;EuCv2PF;IAmHQ,UAAU;IACV,SAA0B;EvCuvPhC;EuC32PF;IAsHQ,eAAgC;EvCwvPtC;EuC92PF;IAmHQ,UAAU;IACV,eAA0B;EvC8vPhC;EuCl3PF;IAsHQ,qBAAgC;EvC+vPtC;EuCr3PF;IAmHQ,UAAU;IACV,gBAA0B;EvCqwPhC;EuCz3PF;IAsHQ,sBAAgC;EvCswPtC;EuC53PF;IAmHQ,UAAU;IACV,UAA0B;EvC4wPhC;EuCh4PF;IAsHQ,gBAAgC;EvC6wPtC;EuCn4PF;IAmHQ,UAAU;IACV,gBAA0B;EvCmxPhC;EuCv4PF;IAsHQ,sBAAgC;EvCoxPtC;EuC14PF;IAmHQ,UAAU;IACV,gBAA0B;EvC0xPhC;EuC94PF;IAsHQ,sBAAgC;EvC2xPtC;EuCj5PF;IAmHQ,UAAU;IACV,UAA0B;EvCiyPhC;EuCr5PF;IAsHQ,gBAAgC;EvCkyPtC;EuCx5PF;IAmHQ,UAAU;IACV,gBAA0B;EvCwyPhC;EuC55PF;IAsHQ,sBAAgC;EvCyyPtC;EuC/5PF;IAmHQ,UAAU;IACV,gBAA0B;EvC+yPhC;EuCn6PF;IAsHQ,sBAAgC;EvCgzPtC;EuCt6PF;IAmHQ,UAAU;IACV,UAA0B;EvCszPhC;EuC16PF;IAsHQ,gBAAgC;EvCuzPtC;EuC76PF;IAmHQ,UAAU;IACV,gBAA0B;EvC6zPhC;EuCj7PF;IAsHQ,sBAAgC;EvC8zPtC;EuCp7PF;IAmHQ,UAAU;IACV,gBAA0B;EvCo0PhC;EuCx7PF;IAsHQ,sBAAgC;EvCq0PtC;EuC37PF;IAmHQ,UAAU;IACV,WAA0B;EvC20PhC;EuC/7PF;IAsHQ,iBAAgC;EvC40PtC;AACF;;ACh3PE;EsCnFF;IA0HM,UAAU;EvC80Pd;EuCx8PF;IA6HM,UAAU;IACV,WAAW;EvC80Pf;EuC58PF;IAiIM,UAAU;IACV,UAAU;EvC80Pd;EuCh9PF;IAqIM,UAAU;IACV,eAAe;EvC80PnB;EuCp9PF;IAyIM,UAAU;IACV,UAAU;EvC80Pd;EuCx9PF;IA6IM,UAAU;IACV,eAAe;EvC80PnB;EuC59PF;IAiJM,UAAU;IACV,UAAU;EvC80Pd;EuCh+PF;IAqJM,UAAU;IACV,UAAU;EvC80Pd;EuCp+PF;IAyJM,UAAU;IACV,UAAU;EvC80Pd;EuCx+PF;IA6JM,UAAU;IACV,UAAU;EvC80Pd;EuC5+PF;IAiKM,UAAU;IACV,UAAU;EvC80Pd;EuCh/PF;IAqKM,gBAAgB;EvC80PpB;EuCn/PF;IAwKM,qBAAqB;EvC80PzB;EuCt/PF;IA2KM,gBAAgB;EvC80PpB;EuCz/PF;IA8KM,qBAAqB;EvC80PzB;EuC5/PF;IAiLM,gBAAgB;EvC80PpB;EuC//PF;IAoLM,gBAAgB;EvC80PpB;EuClgQF;IAuLM,gBAAgB;EvC80PpB;EuCrgQF;IA0LM,gBAAgB;EvC80PpB;EuCxgQF;IA6LM,gBAAgB;EvC80PpB;EuC3gQF;IAiMQ,UAAU;IACV,SAA0B;EvC60PhC;EuC/gQF;IAqMQ,eAAgC;EvC60PtC;EuClhQF;IAiMQ,UAAU;IACV,eAA0B;EvCo1PhC;EuCthQF;IAqMQ,qBAAgC;EvCo1PtC;EuCzhQF;IAiMQ,UAAU;IACV,gBAA0B;EvC21PhC;EuC7hQF;IAqMQ,sBAAgC;EvC21PtC;EuChiQF;IAiMQ,UAAU;IACV,UAA0B;EvCk2PhC;EuCpiQF;IAqMQ,gBAAgC;EvCk2PtC;EuCviQF;IAiMQ,UAAU;IACV,gBAA0B;EvCy2PhC;EuC3iQF;IAqMQ,sBAAgC;EvCy2PtC;EuC9iQF;IAiMQ,UAAU;IACV,gBAA0B;EvCg3PhC;EuCljQF;IAqMQ,sBAAgC;EvCg3PtC;EuCrjQF;IAiMQ,UAAU;IACV,UAA0B;EvCu3PhC;EuCzjQF;IAqMQ,gBAAgC;EvCu3PtC;EuC5jQF;IAiMQ,UAAU;IACV,gBAA0B;EvC83PhC;EuChkQF;IAqMQ,sBAAgC;EvC83PtC;EuCnkQF;IAiMQ,UAAU;IACV,gBAA0B;EvCq4PhC;EuCvkQF;IAqMQ,sBAAgC;EvCq4PtC;EuC1kQF;IAiMQ,UAAU;IACV,UAA0B;EvC44PhC;EuC9kQF;IAqMQ,gBAAgC;EvC44PtC;EuCjlQF;IAiMQ,UAAU;IACV,gBAA0B;EvCm5PhC;EuCrlQF;IAqMQ,sBAAgC;EvCm5PtC;EuCxlQF;IAiMQ,UAAU;IACV,gBAA0B;EvC05PhC;EuC5lQF;IAqMQ,sBAAgC;EvC05PtC;EuC/lQF;IAiMQ,UAAU;IACV,WAA0B;EvCi6PhC;EuCnmQF;IAqMQ,iBAAgC;EvCi6PtC;AACF;;AC5gQE;EsC3FF;IAwMM,UAAU;EvCo6Pd;EuC5mQF;IA0MM,UAAU;IACV,WAAW;EvCq6Pf;EuChnQF;IA6MM,UAAU;IACV,UAAU;EvCs6Pd;EuCpnQF;IAgNM,UAAU;IACV,eAAe;EvCu6PnB;EuCxnQF;IAmNM,UAAU;IACV,UAAU;EvCw6Pd;EuC5nQF;IAsNM,UAAU;IACV,eAAe;EvCy6PnB;EuChoQF;IAyNM,UAAU;IACV,UAAU;EvC06Pd;EuCpoQF;IA4NM,UAAU;IACV,UAAU;EvC26Pd;EuCxoQF;IA+NM,UAAU;IACV,UAAU;EvC46Pd;EuC5oQF;IAkOM,UAAU;IACV,UAAU;EvC66Pd;EuChpQF;IAqOM,UAAU;IACV,UAAU;EvC86Pd;EuCppQF;IAwOM,gBAAgB;EvC+6PpB;EuCvpQF;IA0OM,qBAAqB;EvCg7PzB;EuC1pQF;IA4OM,gBAAgB;EvCi7PpB;EuC7pQF;IA8OM,qBAAqB;EvCk7PzB;EuChqQF;IAgPM,gBAAgB;EvCm7PpB;EuCnqQF;IAkPM,gBAAgB;EvCo7PpB;EuCtqQF;IAoPM,gBAAgB;EvCq7PpB;EuCzqQF;IAsPM,gBAAgB;EvCs7PpB;EuC5qQF;IAwPM,gBAAgB;EvCu7PpB;EuC/qQF;IA2PQ,UAAU;IACV,SAA0B;EvCu7PhC;EuCnrQF;IA8PQ,eAAgC;EvCw7PtC;EuCtrQF;IA2PQ,UAAU;IACV,eAA0B;EvC87PhC;EuC1rQF;IA8PQ,qBAAgC;EvC+7PtC;EuC7rQF;IA2PQ,UAAU;IACV,gBAA0B;EvCq8PhC;EuCjsQF;IA8PQ,sBAAgC;EvCs8PtC;EuCpsQF;IA2PQ,UAAU;IACV,UAA0B;EvC48PhC;EuCxsQF;IA8PQ,gBAAgC;EvC68PtC;EuC3sQF;IA2PQ,UAAU;IACV,gBAA0B;EvCm9PhC;EuC/sQF;IA8PQ,sBAAgC;EvCo9PtC;EuCltQF;IA2PQ,UAAU;IACV,gBAA0B;EvC09PhC;EuCttQF;IA8PQ,sBAAgC;EvC29PtC;EuCztQF;IA2PQ,UAAU;IACV,UAA0B;EvCi+PhC;EuC7tQF;IA8PQ,gBAAgC;EvCk+PtC;EuChuQF;IA2PQ,UAAU;IACV,gBAA0B;EvCw+PhC;EuCpuQF;IA8PQ,sBAAgC;EvCy+PtC;EuCvuQF;IA2PQ,UAAU;IACV,gBAA0B;EvC++PhC;EuC3uQF;IA8PQ,sBAAgC;EvCg/PtC;EuC9uQF;IA2PQ,UAAU;IACV,UAA0B;EvCs/PhC;EuClvQF;IA8PQ,gBAAgC;EvCu/PtC;EuCrvQF;IA2PQ,UAAU;IACV,gBAA0B;EvC6/PhC;EuCzvQF;IA8PQ,sBAAgC;EvC8/PtC;EuC5vQF;IA2PQ,UAAU;IACV,gBAA0B;EvCogQhC;EuChwQF;IA8PQ,sBAAgC;EvCqgQtC;EuCnwQF;IA2PQ,UAAU;IACV,WAA0B;EvC2gQhC;EuCvwQF;IA8PQ,iBAAgC;EvC4gQtC;AACF;;AC5qQE;EsC/FF;IAiQM,UAAU;EvC+gQd;EuChxQF;IAmQM,UAAU;IACV,WAAW;EvCghQf;EuCpxQF;IAsQM,UAAU;IACV,UAAU;EvCihQd;EuCxxQF;IAyQM,UAAU;IACV,eAAe;EvCkhQnB;EuC5xQF;IA4QM,UAAU;IACV,UAAU;EvCmhQd;EuChyQF;IA+QM,UAAU;IACV,eAAe;EvCohQnB;EuCpyQF;IAkRM,UAAU;IACV,UAAU;EvCqhQd;EuCxyQF;IAqRM,UAAU;IACV,UAAU;EvCshQd;EuC5yQF;IAwRM,UAAU;IACV,UAAU;EvCuhQd;EuChzQF;IA2RM,UAAU;IACV,UAAU;EvCwhQd;EuCpzQF;IA8RM,UAAU;IACV,UAAU;EvCyhQd;EuCxzQF;IAiSM,gBAAgB;EvC0hQpB;EuC3zQF;IAmSM,qBAAqB;EvC2hQzB;EuC9zQF;IAqSM,gBAAgB;EvC4hQpB;EuCj0QF;IAuSM,qBAAqB;EvC6hQzB;EuCp0QF;IAySM,gBAAgB;EvC8hQpB;EuCv0QF;IA2SM,gBAAgB;EvC+hQpB;EuC10QF;IA6SM,gBAAgB;EvCgiQpB;EuC70QF;IA+SM,gBAAgB;EvCiiQpB;EuCh1QF;IAiTM,gBAAgB;EvCkiQpB;EuCn1QF;IAoTQ,UAAU;IACV,SAA0B;EvCkiQhC;EuCv1QF;IAuTQ,eAAgC;EvCmiQtC;EuC11QF;IAoTQ,UAAU;IACV,eAA0B;EvCyiQhC;EuC91QF;IAuTQ,qBAAgC;EvC0iQtC;EuCj2QF;IAoTQ,UAAU;IACV,gBAA0B;EvCgjQhC;EuCr2QF;IAuTQ,sBAAgC;EvCijQtC;EuCx2QF;IAoTQ,UAAU;IACV,UAA0B;EvCujQhC;EuC52QF;IAuTQ,gBAAgC;EvCwjQtC;EuC/2QF;IAoTQ,UAAU;IACV,gBAA0B;EvC8jQhC;EuCn3QF;IAuTQ,sBAAgC;EvC+jQtC;EuCt3QF;IAoTQ,UAAU;IACV,gBAA0B;EvCqkQhC;EuC13QF;IAuTQ,sBAAgC;EvCskQtC;EuC73QF;IAoTQ,UAAU;IACV,UAA0B;EvC4kQhC;EuCj4QF;IAuTQ,gBAAgC;EvC6kQtC;EuCp4QF;IAoTQ,UAAU;IACV,gBAA0B;EvCmlQhC;EuCx4QF;IAuTQ,sBAAgC;EvColQtC;EuC34QF;IAoTQ,UAAU;IACV,gBAA0B;EvC0lQhC;EuC/4QF;IAuTQ,sBAAgC;EvC2lQtC;EuCl5QF;IAoTQ,UAAU;IACV,UAA0B;EvCimQhC;EuCt5QF;IAuTQ,gBAAgC;EvCkmQtC;EuCz5QF;IAoTQ,UAAU;IACV,gBAA0B;EvCwmQhC;EuC75QF;IAuTQ,sBAAgC;EvCymQtC;EuCh6QF;IAoTQ,UAAU;IACV,gBAA0B;EvC+mQhC;EuCp6QF;IAuTQ,sBAAgC;EvCgnQtC;EuCv6QF;IAoTQ,UAAU;IACV,WAA0B;EvCsnQhC;EuC36QF;IAuTQ,iBAAgC;EvCunQtC;AACF;;ACj0QI;EsC9GJ;IA0TM,UAAU;EvC0nQd;EuCp7QF;IA4TM,UAAU;IACV,WAAW;EvC2nQf;EuCx7QF;IA+TM,UAAU;IACV,UAAU;EvC4nQd;EuC57QF;IAkUM,UAAU;IACV,eAAe;EvC6nQnB;EuCh8QF;IAqUM,UAAU;IACV,UAAU;EvC8nQd;EuCp8QF;IAwUM,UAAU;IACV,eAAe;EvC+nQnB;EuCx8QF;IA2UM,UAAU;IACV,UAAU;EvCgoQd;EuC58QF;IA8UM,UAAU;IACV,UAAU;EvCioQd;EuCh9QF;IAiVM,UAAU;IACV,UAAU;EvCkoQd;EuCp9QF;IAoVM,UAAU;IACV,UAAU;EvCmoQd;EuCx9QF;IAuVM,UAAU;IACV,UAAU;EvCooQd;EuC59QF;IA0VM,gBAAgB;EvCqoQpB;EuC/9QF;IA4VM,qBAAqB;EvCsoQzB;EuCl+QF;IA8VM,gBAAgB;EvCuoQpB;EuCr+QF;IAgWM,qBAAqB;EvCwoQzB;EuCx+QF;IAkWM,gBAAgB;EvCyoQpB;EuC3+QF;IAoWM,gBAAgB;EvC0oQpB;EuC9+QF;IAsWM,gBAAgB;EvC2oQpB;EuCj/QF;IAwWM,gBAAgB;EvC4oQpB;EuCp/QF;IA0WM,gBAAgB;EvC6oQpB;EuCv/QF;IA6WQ,UAAU;IACV,SAA0B;EvC6oQhC;EuC3/QF;IAgXQ,eAAgC;EvC8oQtC;EuC9/QF;IA6WQ,UAAU;IACV,eAA0B;EvCopQhC;EuClgRF;IAgXQ,qBAAgC;EvCqpQtC;EuCrgRF;IA6WQ,UAAU;IACV,gBAA0B;EvC2pQhC;EuCzgRF;IAgXQ,sBAAgC;EvC4pQtC;EuC5gRF;IA6WQ,UAAU;IACV,UAA0B;EvCkqQhC;EuChhRF;IAgXQ,gBAAgC;EvCmqQtC;EuCnhRF;IA6WQ,UAAU;IACV,gBAA0B;EvCyqQhC;EuCvhRF;IAgXQ,sBAAgC;EvC0qQtC;EuC1hRF;IA6WQ,UAAU;IACV,gBAA0B;EvCgrQhC;EuC9hRF;IAgXQ,sBAAgC;EvCirQtC;EuCjiRF;IA6WQ,UAAU;IACV,UAA0B;EvCurQhC;EuCriRF;IAgXQ,gBAAgC;EvCwrQtC;EuCxiRF;IA6WQ,UAAU;IACV,gBAA0B;EvC8rQhC;EuC5iRF;IAgXQ,sBAAgC;EvC+rQtC;EuC/iRF;IA6WQ,UAAU;IACV,gBAA0B;EvCqsQhC;EuCnjRF;IAgXQ,sBAAgC;EvCssQtC;EuCtjRF;IA6WQ,UAAU;IACV,UAA0B;EvC4sQhC;EuC1jRF;IAgXQ,gBAAgC;EvC6sQtC;EuC7jRF;IA6WQ,UAAU;IACV,gBAA0B;EvCmtQhC;EuCjkRF;IAgXQ,sBAAgC;EvCotQtC;EuCpkRF;IA6WQ,UAAU;IACV,gBAA0B;EvC0tQhC;EuCxkRF;IAgXQ,sBAAgC;EvC2tQtC;EuC3kRF;IA6WQ,UAAU;IACV,WAA0B;EvCiuQhC;EuC/kRF;IAgXQ,iBAAgC;EvCkuQtC;AACF;;ACt9QI;EsC7HJ;IAmXM,UAAU;EvCquQd;EuCxlRF;IAqXM,UAAU;IACV,WAAW;EvCsuQf;EuC5lRF;IAwXM,UAAU;IACV,UAAU;EvCuuQd;EuChmRF;IA2XM,UAAU;IACV,eAAe;EvCwuQnB;EuCpmRF;IA8XM,UAAU;IACV,UAAU;EvCyuQd;EuCxmRF;IAiYM,UAAU;IACV,eAAe;EvC0uQnB;EuC5mRF;IAoYM,UAAU;IACV,UAAU;EvC2uQd;EuChnRF;IAuYM,UAAU;IACV,UAAU;EvC4uQd;EuCpnRF;IA0YM,UAAU;IACV,UAAU;EvC6uQd;EuCxnRF;IA6YM,UAAU;IACV,UAAU;EvC8uQd;EuC5nRF;IAgZM,UAAU;IACV,UAAU;EvC+uQd;EuChoRF;IAmZM,gBAAgB;EvCgvQpB;EuCnoRF;IAqZM,qBAAqB;EvCivQzB;EuCtoRF;IAuZM,gBAAgB;EvCkvQpB;EuCzoRF;IAyZM,qBAAqB;EvCmvQzB;EuC5oRF;IA2ZM,gBAAgB;EvCovQpB;EuC/oRF;IA6ZM,gBAAgB;EvCqvQpB;EuClpRF;IA+ZM,gBAAgB;EvCsvQpB;EuCrpRF;IAiaM,gBAAgB;EvCuvQpB;EuCxpRF;IAmaM,gBAAgB;EvCwvQpB;EuC3pRF;IAsaQ,UAAU;IACV,SAA0B;EvCwvQhC;EuC/pRF;IAyaQ,eAAgC;EvCyvQtC;EuClqRF;IAsaQ,UAAU;IACV,eAA0B;EvC+vQhC;EuCtqRF;IAyaQ,qBAAgC;EvCgwQtC;EuCzqRF;IAsaQ,UAAU;IACV,gBAA0B;EvCswQhC;EuC7qRF;IAyaQ,sBAAgC;EvCuwQtC;EuChrRF;IAsaQ,UAAU;IACV,UAA0B;EvC6wQhC;EuCprRF;IAyaQ,gBAAgC;EvC8wQtC;EuCvrRF;IAsaQ,UAAU;IACV,gBAA0B;EvCoxQhC;EuC3rRF;IAyaQ,sBAAgC;EvCqxQtC;EuC9rRF;IAsaQ,UAAU;IACV,gBAA0B;EvC2xQhC;EuClsRF;IAyaQ,sBAAgC;EvC4xQtC;EuCrsRF;IAsaQ,UAAU;IACV,UAA0B;EvCkyQhC;EuCzsRF;IAyaQ,gBAAgC;EvCmyQtC;EuC5sRF;IAsaQ,UAAU;IACV,gBAA0B;EvCyyQhC;EuChtRF;IAyaQ,sBAAgC;EvC0yQtC;EuCntRF;IAsaQ,UAAU;IACV,gBAA0B;EvCgzQhC;EuCvtRF;IAyaQ,sBAAgC;EvCizQtC;EuC1tRF;IAsaQ,UAAU;IACV,UAA0B;EvCuzQhC;EuC9tRF;IAyaQ,gBAAgC;EvCwzQtC;EuCjuRF;IAsaQ,UAAU;IACV,gBAA0B;EvC8zQhC;EuCruRF;IAyaQ,sBAAgC;EvC+zQtC;EuCxuRF;IAsaQ,UAAU;IACV,gBAA0B;EvCq0QhC;EuC5uRF;IAyaQ,sBAAgC;EvCs0QtC;EuC/uRF;IAsaQ,UAAU;IACV,WAA0B;EvC40QhC;EuCnvRF;IAyaQ,iBAAgC;EvC60QtC;AACF;;AuC50QA;EACE,qBA9akB;EA+alB,sBA/akB;EAgblB,oBAhbkB;AvC+vRpB;;AuCl1QA;EAKI,uBAlbgB;AvCmwRpB;;AuCt1QA;EAOI,qCAA4C;AvCm1QhD;;AuC11QA;EAUI,uBAAuB;AvCo1Q3B;;AuC91QA;EAYI,cAAc;EACd,eAAe;EACf,aAAa;AvCs1QjB;;AuCp2QA;EAgBM,SAAS;EACT,qBAAqB;AvCw1Q3B;;AuCz2QA;EAmBM,qBAAqB;AvC01Q3B;;AuC72QA;EAqBM,gBAAgB;AvC41QtB;;AuCj3QA;EAuBI,aAAa;AvC81QjB;;AuCr3QA;EAyBI,eAAe;AvCg2QnB;;AuCz3QA;EA2BI,mBAAmB;AvCk2QvB;;ACrtRE;EsCwVF;IA+BM,aAAa;EvCm2QjB;AACF;;AC/sRE;EsC4UF;IAmCM,aAAa;EvCq2QjB;AACF;;AuCn2QE;EACE,oBAAY;EACZ,wCAAwC;EACxC,yCAAyC;AvCs2Q7C;;AuCz2QE;EAKI,8BAA8B;EAC9B,+BAA+B;AvCw2QrC;;AuC92QE;EASM,iBAAY;AvCy2QpB;;ACpvRE;EsCkYA;IAYQ,iBAAY;EvC22QpB;AACF;;ACtvRE;EsC8XA;IAeQ,iBAAY;EvC82QpB;AACF;;ACxvRE;EsC0XA;IAkBQ,iBAAY;EvCi3QpB;AACF;;AC1vRE;EsCsXA;IAqBQ,iBAAY;EvCo3QpB;AACF;;AC5vRE;EsCkXA;IAwBQ,iBAAY;EvCu3QpB;AACF;;AC7vRI;EsC6WF;IA2BQ,iBAAY;EvC03QpB;AACF;;ACzvRI;EsCmWF;IA8BQ,iBAAY;EvC63QpB;AACF;;AC1vRI;EsC8VF;IAiCQ,iBAAY;EvCg4QpB;AACF;;ACtvRI;EsCoVF;IAoCQ,iBAAY;EvCm4QpB;AACF;;AuCx6QE;EASM,oBAAY;AvCm6QpB;;AC9yRE;EsCkYA;IAYQ,oBAAY;EvCq6QpB;AACF;;AChzRE;EsC8XA;IAeQ,oBAAY;EvCw6QpB;AACF;;AClzRE;EsC0XA;IAkBQ,oBAAY;EvC26QpB;AACF;;ACpzRE;EsCsXA;IAqBQ,oBAAY;EvC86QpB;AACF;;ACtzRE;EsCkXA;IAwBQ,oBAAY;EvCi7QpB;AACF;;ACvzRI;EsC6WF;IA2BQ,oBAAY;EvCo7QpB;AACF;;ACnzRI;EsCmWF;IA8BQ,oBAAY;EvCu7QpB;AACF;;ACpzRI;EsC8VF;IAiCQ,oBAAY;EvC07QpB;AACF;;AChzRI;EsCoVF;IAoCQ,oBAAY;EvC67QpB;AACF;;AuCl+QE;EASM,mBAAY;AvC69QpB;;ACx2RE;EsCkYA;IAYQ,mBAAY;EvC+9QpB;AACF;;AC12RE;EsC8XA;IAeQ,mBAAY;EvCk+QpB;AACF;;AC52RE;EsC0XA;IAkBQ,mBAAY;EvCq+QpB;AACF;;AC92RE;EsCsXA;IAqBQ,mBAAY;EvCw+QpB;AACF;;ACh3RE;EsCkXA;IAwBQ,mBAAY;EvC2+QpB;AACF;;ACj3RI;EsC6WF;IA2BQ,mBAAY;EvC8+QpB;AACF;;AC72RI;EsCmWF;IA8BQ,mBAAY;EvCi/QpB;AACF;;AC92RI;EsC8VF;IAiCQ,mBAAY;EvCo/QpB;AACF;;AC12RI;EsCoVF;IAoCQ,mBAAY;EvCu/QpB;AACF;;AuC5hRE;EASM,oBAAY;AvCuhRpB;;ACl6RE;EsCkYA;IAYQ,oBAAY;EvCyhRpB;AACF;;ACp6RE;EsC8XA;IAeQ,oBAAY;EvC4hRpB;AACF;;ACt6RE;EsC0XA;IAkBQ,oBAAY;EvC+hRpB;AACF;;ACx6RE;EsCsXA;IAqBQ,oBAAY;EvCkiRpB;AACF;;AC16RE;EsCkXA;IAwBQ,oBAAY;EvCqiRpB;AACF;;AC36RI;EsC6WF;IA2BQ,oBAAY;EvCwiRpB;AACF;;ACv6RI;EsCmWF;IA8BQ,oBAAY;EvC2iRpB;AACF;;ACx6RI;EsC8VF;IAiCQ,oBAAY;EvC8iRpB;AACF;;ACp6RI;EsCoVF;IAoCQ,oBAAY;EvCijRpB;AACF;;AuCtlRE;EASM,iBAAY;AvCilRpB;;AC59RE;EsCkYA;IAYQ,iBAAY;EvCmlRpB;AACF;;AC99RE;EsC8XA;IAeQ,iBAAY;EvCslRpB;AACF;;ACh+RE;EsC0XA;IAkBQ,iBAAY;EvCylRpB;AACF;;ACl+RE;EsCsXA;IAqBQ,iBAAY;EvC4lRpB;AACF;;ACp+RE;EsCkXA;IAwBQ,iBAAY;EvC+lRpB;AACF;;ACr+RI;EsC6WF;IA2BQ,iBAAY;EvCkmRpB;AACF;;ACj+RI;EsCmWF;IA8BQ,iBAAY;EvCqmRpB;AACF;;ACl+RI;EsC8VF;IAiCQ,iBAAY;EvCwmRpB;AACF;;AC99RI;EsCoVF;IAoCQ,iBAAY;EvC2mRpB;AACF;;AuChpRE;EASM,oBAAY;AvC2oRpB;;ACthSE;EsCkYA;IAYQ,oBAAY;EvC6oRpB;AACF;;ACxhSE;EsC8XA;IAeQ,oBAAY;EvCgpRpB;AACF;;AC1hSE;EsC0XA;IAkBQ,oBAAY;EvCmpRpB;AACF;;AC5hSE;EsCsXA;IAqBQ,oBAAY;EvCspRpB;AACF;;AC9hSE;EsCkXA;IAwBQ,oBAAY;EvCypRpB;AACF;;AC/hSI;EsC6WF;IA2BQ,oBAAY;EvC4pRpB;AACF;;AC3hSI;EsCmWF;IA8BQ,oBAAY;EvC+pRpB;AACF;;AC5hSI;EsC8VF;IAiCQ,oBAAY;EvCkqRpB;AACF;;ACxhSI;EsCoVF;IAoCQ,oBAAY;EvCqqRpB;AACF;;AuC1sRE;EASM,mBAAY;AvCqsRpB;;AChlSE;EsCkYA;IAYQ,mBAAY;EvCusRpB;AACF;;ACllSE;EsC8XA;IAeQ,mBAAY;EvC0sRpB;AACF;;ACplSE;EsC0XA;IAkBQ,mBAAY;EvC6sRpB;AACF;;ACtlSE;EsCsXA;IAqBQ,mBAAY;EvCgtRpB;AACF;;ACxlSE;EsCkXA;IAwBQ,mBAAY;EvCmtRpB;AACF;;ACzlSI;EsC6WF;IA2BQ,mBAAY;EvCstRpB;AACF;;ACrlSI;EsCmWF;IA8BQ,mBAAY;EvCytRpB;AACF;;ACtlSI;EsC8VF;IAiCQ,mBAAY;EvC4tRpB;AACF;;ACllSI;EsCoVF;IAoCQ,mBAAY;EvC+tRpB;AACF;;AuCpwRE;EASM,oBAAY;AvC+vRpB;;AC1oSE;EsCkYA;IAYQ,oBAAY;EvCiwRpB;AACF;;AC5oSE;EsC8XA;IAeQ,oBAAY;EvCowRpB;AACF;;AC9oSE;EsC0XA;IAkBQ,oBAAY;EvCuwRpB;AACF;;AChpSE;EsCsXA;IAqBQ,oBAAY;EvC0wRpB;AACF;;AClpSE;EsCkXA;IAwBQ,oBAAY;EvC6wRpB;AACF;;ACnpSI;EsC6WF;IA2BQ,oBAAY;EvCgxRpB;AACF;;AC/oSI;EsCmWF;IA8BQ,oBAAY;EvCmxRpB;AACF;;AChpSI;EsC8VF;IAiCQ,oBAAY;EvCsxRpB;AACF;;AC5oSI;EsCoVF;IAoCQ,oBAAY;EvCyxRpB;AACF;;AuC9zRE;EASM,iBAAY;AvCyzRpB;;ACpsSE;EsCkYA;IAYQ,iBAAY;EvC2zRpB;AACF;;ACtsSE;EsC8XA;IAeQ,iBAAY;EvC8zRpB;AACF;;ACxsSE;EsC0XA;IAkBQ,iBAAY;EvCi0RpB;AACF;;AC1sSE;EsCsXA;IAqBQ,iBAAY;EvCo0RpB;AACF;;AC5sSE;EsCkXA;IAwBQ,iBAAY;EvCu0RpB;AACF;;AC7sSI;EsC6WF;IA2BQ,iBAAY;EvC00RpB;AACF;;ACzsSI;EsCmWF;IA8BQ,iBAAY;EvC60RpB;AACF;;AC1sSI;EsC8VF;IAiCQ,iBAAY;EvCg1RpB;AACF;;ACtsSI;EsCoVF;IAoCQ,iBAAY;EvCm1RpB;AACF;;AwCz0SA;EACE,oBAAoB;EACpB,cAAc;EACd,aAAa;EACb,YAAY;EACZ,cAAc;EACd,+BAAuB;EAAvB,4BAAuB;EAAvB,uBAAuB;AxC40SzB;;AwCl1SA;EASI,qBAA+B;EAC/B,sBAAgC;EAChC,oBAA8B;AxC60SlC;;AwCx1SA;EAaM,uBAAiC;AxC+0SvC;;AwC51SA;EAeM,sBAjBgB;AxCk2StB;;AwCh2SA;EAiBI,oBAAoB;AxCm1SxB;;AwCp2SA;EAmBI,gBArBkB;AxC02StB;;AwCx2SA;EAqBI,sBAAsB;AxCu1S1B;;AwC52SA;EAuBM,gCAAgC;AxCy1StC;;AC7xSE;EuCnFF;IA2BM,aAAa;ExC01SjB;EwCr3SF;IA8BQ,UAAU;IACV,eAAuB;ExC01S7B;EwCz3SF;IA8BQ,UAAU;IACV,gBAAuB;ExC81S7B;EwC73SF;IA8BQ,UAAU;IACV,UAAuB;ExCk2S7B;EwCj4SF;IA8BQ,UAAU;IACV,gBAAuB;ExCs2S7B;EwCr4SF;IA8BQ,UAAU;IACV,gBAAuB;ExC02S7B;EwCz4SF;IA8BQ,UAAU;IACV,UAAuB;ExC82S7B;EwC74SF;IA8BQ,UAAU;IACV,gBAAuB;ExCk3S7B;EwCj5SF;IA8BQ,UAAU;IACV,gBAAuB;ExCs3S7B;EwCr5SF;IA8BQ,UAAU;IACV,UAAuB;ExC03S7B;EwCz5SF;IA8BQ,UAAU;IACV,gBAAuB;ExC83S7B;EwC75SF;IA8BQ,UAAU;IACV,gBAAuB;ExCk4S7B;EwCj6SF;IA8BQ,UAAU;IACV,WAAuB;ExCs4S7B;AACF;;AyCt6SA;EACE,oBAAoB;EACpB,aAAa;EACb,sBAAsB;EACtB,8BAA8B;AzCy6ShC;;AyC76SA;EAMI,gBAAgB;AzC26SpB;;AyCj7SA;EASM,mBAAmB;AzC46SzB;;AyCr7SA;EAeM,uBvCHyB;EuCIzB,cvChBuB;AF07S7B;;AyC17SA;;EAmBQ,cAAc;AzC46StB;;AyC/7SA;EAqBQ,cvCrBqB;AFm8S7B;;AyCn8SA;EAuBQ,4BvCvBqB;AFu8S7B;;AyCv8SA;;EA0BU,cvC1BmB;AF48S7B;;ACj3SE;EwC3FF;IA6BU,uBvCjBqB;EFq8S7B;AACF;;AyCl9SA;;EAgCQ,4BvChCqB;AFu9S7B;;AyCv9SA;;;EAqCU,yBAAoC;EACpC,cvCtCmB;AF89S7B;;AyC99SA;EAyCU,cvCzCmB;EuC0CnB,YAAY;AzCy7StB;;AyCn+SA;EA4CY,UAAU;AzC27StB;;AyCv+SA;EA+CY,UAAU;AzC47StB;;AyC3+SA;EAmDY,cvCnDiB;AF++S7B;;AyC/+SA;EAqDc,uCvCrDe;AFm/S7B;;AyCn/SA;EAyDc,yBvCzDe;EuC0Df,qBvC1De;EuC2Df,YvC/CiB;AF6+S/B;;AyCz/SA;EAgEQ,4EAAyG;AzC67SjH;;AC96SE;EwC/EF;IAmEY,4EAAyG;EzC+7SnH;AACF;;AyCngTA;EAeM,yBvCfuB;EuCgBvB,YvCJyB;AF4/S/B;;AyCxgTA;;EAmBQ,cAAc;AzC0/StB;;AyC7gTA;EAqBQ,YvCTuB;AFqgT/B;;AyCjhTA;EAuBQ,+BvCXuB;AFygT/B;;AyCrhTA;;EA0BU,YvCdqB;AF8gT/B;;AC/7SE;EwC3FF;IA6BU,yBvC7BmB;EF+hT3B;AACF;;AyChiTA;;EAgCQ,+BvCpBuB;AFyhT/B;;AyCriTA;;;EAqCU,uBAAoC;EACpC,YvC1BqB;AFgiT/B;;AyC5iTA;EAyCU,YvC7BqB;EuC8BrB,YAAY;AzCugTtB;;AyCjjTA;EA4CY,UAAU;AzCygTtB;;AyCrjTA;EA+CY,UAAU;AzC0gTtB;;AyCzjTA;EAmDY,YvCvCmB;AFijT/B;;AyC7jTA;EAqDc,uCvCrDe;AFikT7B;;AyCjkTA;EAyDc,uBvC7CiB;EuC8CjB,mBvC9CiB;EuC+CjB,cvC3De;AFukT7B;;AyCvkTA;EAgEQ,8EAAyG;AzC2gTjH;;AC5/SE;EwC/EF;IAmEY,8EAAyG;EzC6gTnH;AACF;;AyCjlTA;EAeM,4BvCLwB;EuCMxB,cvCZwB;AFklT9B;;AyCtlTA;;EAmBQ,cAAc;AzCwkTtB;;AyC3lTA;EAqBQ,cvCjBsB;AF2lT9B;;AyC/lTA;EAuBQ,4BvCnBsB;AF+lT9B;;AyCnmTA;;EA0BU,cvCtBoB;AFomT9B;;AC7gTE;EwC3FF;IA6BU,4BvCnBoB;EFmmT5B;AACF;;AyC9mTA;;EAgCQ,4BvC5BsB;AF+mT9B;;AyCnnTA;;;EAqCU,yBAAoC;EACpC,cvClCoB;AFsnT9B;;AyC1nTA;EAyCU,cvCrCoB;EuCsCpB,YAAY;AzCqlTtB;;AyC/nTA;EA4CY,UAAU;AzCulTtB;;AyCnoTA;EA+CY,UAAU;AzCwlTtB;;AyCvoTA;EAmDY,cvC/CkB;AFuoT9B;;AyC3oTA;EAqDc,uCvCrDe;AF+oT7B;;AyC/oTA;EAyDc,yBvCrDgB;EuCsDhB,qBvCtDgB;EuCuDhB,iBvCjDgB;AF2oT9B;;AyCrpTA;EAgEQ,iFAAyG;AzCylTjH;;AC1kTE;EwC/EF;IAmEY,iFAAyG;EzC2lTnH;AACF;;AyC/pTA;EAeM,yBvCXwB;EuCYxB,iBvCNwB;AF0pT9B;;AyCpqTA;;EAmBQ,cAAc;AzCspTtB;;AyCzqTA;EAqBQ,iBvCXsB;AFmqT9B;;AyC7qTA;EAuBQ,+BvCbsB;AFuqT9B;;AyCjrTA;;EA0BU,iBvChBoB;AF4qT9B;;AC3lTE;EwC3FF;IA6BU,yBvCzBoB;EFurT5B;AACF;;AyC5rTA;;EAgCQ,+BvCtBsB;AFurT9B;;AyCjsTA;;;EAqCU,yBAAoC;EACpC,iBvC5BoB;AF8rT9B;;AyCxsTA;EAyCU,iBvC/BoB;EuCgCpB,YAAY;AzCmqTtB;;AyC7sTA;EA4CY,UAAU;AzCqqTtB;;AyCjtTA;EA+CY,UAAU;AzCsqTtB;;AyCrtTA;EAmDY,iBvCzCkB;AF+sT9B;;AyCztTA;EAqDc,uCvCrDe;AF6tT7B;;AyC7tTA;EAyDc,4BvC/CgB;EuCgDhB,wBvChDgB;EuCiDhB,cvCvDgB;AF+tT9B;;AyCnuTA;EAgEQ,gFAAyG;AzCuqTjH;;ACxpTE;EwC/EF;IAmEY,gFAAyG;EzCyqTnH;AACF;;AyC7uTA;EAeM,yBvCE4B;EuCD5B,WhC2CU;ATurThB;;AyClvTA;;EAmBQ,cAAc;AzCouTtB;;AyCvvTA;EAqBQ,WhCsCQ;ATgsThB;;AyC3vTA;EAuBQ,+BhCoCQ;ATosThB;;AyC/vTA;;EA0BU,WhCiCM;ATysThB;;ACzqTE;EwC3FF;IA6BU,yBvCZwB;EFwvThC;AACF;;AyC1wTA;;EAgCQ,+BhC2BQ;ATotThB;;AyC/wTA;;;EAqCU,yBAAoC;EACpC,WhCqBM;AT2tThB;;AyCtxTA;EAyCU,WhCkBM;EgCjBN,YAAY;AzCivTtB;;AyC3xTA;EA4CY,UAAU;AzCmvTtB;;AyC/xTA;EA+CY,UAAU;AzCovTtB;;AyCnyTA;EAmDY,WhCQI;AT4uThB;;AyCvyTA;EAqDc,uCvCrDe;AF2yT7B;;AyC3yTA;EAyDc,sBhCEE;EgCDF,kBhCCE;EgCAF,cvC1CoB;AFgyTlC;;AyCjzTA;EAgEQ,gFAAyG;AzCqvTjH;;ACtuTE;EwC/EF;IAmEY,gFAAyG;EzCuvTnH;AACF;;AyC3zTA;EAeM,yBvCI4B;EuCH5B,WhC2CU;ATqwThB;;AyCh0TA;;EAmBQ,cAAc;AzCkzTtB;;AyCr0TA;EAqBQ,WhCsCQ;AT8wThB;;AyCz0TA;EAuBQ,+BhCoCQ;ATkxThB;;AyC70TA;;EA0BU,WhCiCM;ATuxThB;;ACvvTE;EwC3FF;IA6BU,yBvCVwB;EFo0ThC;AACF;;AyCx1TA;;EAgCQ,+BhC2BQ;ATkyThB;;AyC71TA;;;EAqCU,yBAAoC;EACpC,WhCqBM;ATyyThB;;AyCp2TA;EAyCU,WhCkBM;EgCjBN,YAAY;AzC+zTtB;;AyCz2TA;EA4CY,UAAU;AzCi0TtB;;AyC72TA;EA+CY,UAAU;AzCk0TtB;;AyCj3TA;EAmDY,WhCQI;AT0zThB;;AyCr3TA;EAqDc,uCvCrDe;AFy3T7B;;AyCz3TA;EAyDc,sBhCEE;EgCDF,kBhCCE;EgCAF,cvCxCoB;AF42TlC;;AyC/3TA;EAgEQ,gFAAyG;AzCm0TjH;;ACpzTE;EwC/EF;IAmEY,gFAAyG;EzCq0TnH;AACF;;AyCz4TA;EAeM,yBvCG4B;EuCF5B,WhC2CU;ATm1ThB;;AyC94TA;;EAmBQ,cAAc;AzCg4TtB;;AyCn5TA;EAqBQ,WhCsCQ;AT41ThB;;AyCv5TA;EAuBQ,+BhCoCQ;ATg2ThB;;AyC35TA;;EA0BU,WhCiCM;ATq2ThB;;ACr0TE;EwC3FF;IA6BU,yBvCXwB;EFm5ThC;AACF;;AyCt6TA;;EAgCQ,+BhC2BQ;ATg3ThB;;AyC36TA;;;EAqCU,yBAAoC;EACpC,WhCqBM;ATu3ThB;;AyCl7TA;EAyCU,WhCkBM;EgCjBN,YAAY;AzC64TtB;;AyCv7TA;EA4CY,UAAU;AzC+4TtB;;AyC37TA;EA+CY,UAAU;AzCg5TtB;;AyC/7TA;EAmDY,WhCQI;ATw4ThB;;AyCn8TA;EAqDc,uCvCrDe;AFu8T7B;;AyCv8TA;EAyDc,sBhCEE;EgCDF,kBhCCE;EgCAF,cvCzCoB;AF27TlC;;AyC78TA;EAgEQ,gFAAyG;AzCi5TjH;;ACl4TE;EwC/EF;IAmEY,gFAAyG;EzCm5TnH;AACF;;AyCv9TA;EAeM,yBvCC4B;EuCA5B,WhC2CU;ATi6ThB;;AyC59TA;;EAmBQ,cAAc;AzC88TtB;;AyCj+TA;EAqBQ,WhCsCQ;AT06ThB;;AyCr+TA;EAuBQ,+BhCoCQ;AT86ThB;;AyCz+TA;;EA0BU,WhCiCM;ATm7ThB;;ACn5TE;EwC3FF;IA6BU,yBvCbwB;EFm+ThC;AACF;;AyCp/TA;;EAgCQ,+BhC2BQ;AT87ThB;;AyCz/TA;;;EAqCU,yBAAoC;EACpC,WhCqBM;ATq8ThB;;AyChgUA;EAyCU,WhCkBM;EgCjBN,YAAY;AzC29TtB;;AyCrgUA;EA4CY,UAAU;AzC69TtB;;AyCzgUA;EA+CY,UAAU;AzC89TtB;;AyC7gUA;EAmDY,WhCQI;ATs9ThB;;AyCjhUA;EAqDc,uCvCrDe;AFqhU7B;;AyCrhUA;EAyDc,sBhCEE;EgCDF,kBhCCE;EgCAF,cvC3CoB;AF2gUlC;;AyC3hUA;EAgEQ,gFAAyG;AzC+9TjH;;ACh9TE;EwC/EF;IAmEY,gFAAyG;EzCi+TnH;AACF;;AyCriUA;EAeM,yBvCA4B;EuCC5B,yBhCyCe;ATi/TrB;;AyC1iUA;;EAmBQ,cAAc;AzC4hUtB;;AyC/iUA;EAqBQ,yBhCoCa;AT0/TrB;;AyCnjUA;EAuBQ,yBhCkCa;AT8/TrB;;AyCvjUA;;EA0BU,yBhC+BW;ATmgUrB;;ACj+TE;EwC3FF;IA6BU,yBvCdwB;EFkjUhC;AACF;;AyClkUA;;EAgCQ,yBhCyBa;AT8gUrB;;AyCvkUA;;;EAqCU,yBAAoC;EACpC,yBhCmBW;ATqhUrB;;AyC9kUA;EAyCU,yBhCgBW;EgCfX,YAAY;AzCyiUtB;;AyCnlUA;EA4CY,UAAU;AzC2iUtB;;AyCvlUA;EA+CY,UAAU;AzC4iUtB;;AyC3lUA;EAmDY,yBhCMS;ATsiUrB;;AyC/lUA;EAqDc,uCvCrDe;AFmmU7B;;AyCnmUA;EAyDc,oChCAO;EgCCP,gChCDO;EgCEP,cvC5CoB;AF0lUlC;;AyCzmUA;EAgEQ,gFAAyG;AzC6iUjH;;AC9hUE;EwC/EF;IAmEY,gFAAyG;EzC+iUnH;AACF;;AyCnnUA;EAeM,yBvCM4B;EuCL5B,WhC2CU;AT6jUhB;;AyCxnUA;;EAmBQ,cAAc;AzC0mUtB;;AyC7nUA;EAqBQ,WhCsCQ;ATskUhB;;AyCjoUA;EAuBQ,+BhCoCQ;AT0kUhB;;AyCroUA;;EA0BU,WhCiCM;AT+kUhB;;AC/iUE;EwC3FF;IA6BU,yBvCRwB;EF0nUhC;AACF;;AyChpUA;;EAgCQ,+BhC2BQ;AT0lUhB;;AyCrpUA;;;EAqCU,yBAAoC;EACpC,WhCqBM;ATimUhB;;AyC5pUA;EAyCU,WhCkBM;EgCjBN,YAAY;AzCunUtB;;AyCjqUA;EA4CY,UAAU;AzCynUtB;;AyCrqUA;EA+CY,UAAU;AzC0nUtB;;AyCzqUA;EAmDY,WhCQI;ATknUhB;;AyC7qUA;EAqDc,uCvCrDe;AFirU7B;;AyCjrUA;EAyDc,sBhCEE;EgCDF,kBhCCE;EgCAF,cvCtCoB;AFkqUlC;;AyCvrUA;EAgEQ,gFAAyG;AzC2nUjH;;AC5mUE;EwC/EF;IAmEY,gFAAyG;EzC6nUnH;AACF;;AyCjsUA;EAuEM,sBAAsB;EACtB,mBAAmB;AzC8nUzB;;ACnnUE;EwCnFF;IA4EQ,oBAAoB;IACpB,iBAAiB;EzC+nUvB;AACF;;AC1nUE;EwCnFF;IAiFQ,qBAAqB;IACrB,kBAAkB;EzCioUxB;AACF;;AyCptUA;EAuFM,mBAAmB;EACnB,aAAa;AzCioUnB;;AyCztUA;EA0FQ,YAAY;EACZ,cAAc;AzCmoUtB;;AyC9tUA;EA6FI,gBAAgB;AzCqoUpB;;AyCluUA;EA+FI,iBAAiB;AzCuoUrB;;AyCnoUA;EAEE,gBAAgB;AzCqoUlB;;AyCvoUA;EAII,SAAS;EACT,gBAAgB;EAChB,eAAe;EACf,kBAAkB;EAClB,QAAQ;EACR,6CAAqC;UAArC,qCAAqC;AzCuoUzC;;AyChpUA;EAYI,YAAY;AzCwoUhB;;ACxqUE;EwCoBF;IAeI,aAAa;EzC0oUf;AACF;;AyCzoUA;EACE,kBAAkB;AzC4oUpB;;AClrUE;EwCqCF;IAKM,aAAa;EzC6oUjB;EyClpUF;IAOQ,sBAAsB;EzC8oU5B;AACF;;ACvrUE;EwCiCF;IASI,aAAa;IACb,uBAAuB;EzCkpUzB;EyC5pUF;IAYM,oBAAoB;EzCmpUxB;AACF;;AyChpUA;;EAEE,YAAY;EACZ,cAAc;AzCmpUhB;;AyCjpUA;EACE,YAAY;EACZ,cAAc;EACd,oBAAoB;AzCopUtB;;A0C9xUA;EACE,oBAL2B;A1CsyU7B;;ACrsUE;EyC7FF;IAMM,oBAT8B;E1C0yUlC;E0CvyUF;IAQM,qBAV8B;E1C4yUlC;AACF;;A2C3yUA;EACE,yBzCQ4B;EyCP5B,yBAJ+B;A3CkzUjC","file":"bulma.css"} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.min.css b/testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.min.css
new file mode 100644
index 0000000000..434a97896a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/css/bulma-0.7.5/bulma.min.css
@@ -0,0 +1 @@
+/*! bulma.io v0.7.5 | MIT License | github.com/jgthms/bulma */@-webkit-keyframes spinAround{from{-webkit-transform:rotate(0);transform:rotate(0)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes spinAround{from{-webkit-transform:rotate(0);transform:rotate(0)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.breadcrumb,.button,.delete,.file,.is-unselectable,.modal-close,.pagination-ellipsis,.pagination-link,.pagination-next,.pagination-previous,.tabs{-webkit-touch-callout:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.navbar-link:not(.is-arrowless)::after,.select:not(.is-multiple):not(.is-loading)::after{border:3px solid transparent;border-radius:2px;border-right:0;border-top:0;content:" ";display:block;height:.625em;margin-top:-.4375em;pointer-events:none;position:absolute;top:50%;-webkit-transform:rotate(-45deg);transform:rotate(-45deg);-webkit-transform-origin:center;transform-origin:center;width:.625em}.block:not(:last-child),.box:not(:last-child),.breadcrumb:not(:last-child),.content:not(:last-child),.highlight:not(:last-child),.level:not(:last-child),.list:not(:last-child),.message:not(:last-child),.notification:not(:last-child),.progress:not(:last-child),.subtitle:not(:last-child),.table-container:not(:last-child),.table:not(:last-child),.tabs:not(:last-child),.title:not(:last-child){margin-bottom:1.5rem}.delete,.modal-close{-moz-appearance:none;-webkit-appearance:none;background-color:rgba(10,10,10,.2);border:none;border-radius:290486px;cursor:pointer;pointer-events:auto;display:inline-block;flex-grow:0;flex-shrink:0;font-size:0;height:20px;max-height:20px;max-width:20px;min-height:20px;min-width:20px;outline:0;position:relative;vertical-align:top;width:20px}.delete::after,.delete::before,.modal-close::after,.modal-close::before{background-color:#fff;content:"";display:block;left:50%;position:absolute;top:50%;-webkit-transform:translateX(-50%) translateY(-50%) rotate(45deg);transform:translateX(-50%) translateY(-50%) rotate(45deg);-webkit-transform-origin:center center;transform-origin:center center}.delete::before,.modal-close::before{height:2px;width:50%}.delete::after,.modal-close::after{height:50%;width:2px}.delete:focus,.delete:hover,.modal-close:focus,.modal-close:hover{background-color:rgba(10,10,10,.3)}.delete:active,.modal-close:active{background-color:rgba(10,10,10,.4)}.is-small.delete,.is-small.modal-close{height:16px;max-height:16px;max-width:16px;min-height:16px;min-width:16px;width:16px}.is-medium.delete,.is-medium.modal-close{height:24px;max-height:24px;max-width:24px;min-height:24px;min-width:24px;width:24px}.is-large.delete,.is-large.modal-close{height:32px;max-height:32px;max-width:32px;min-height:32px;min-width:32px;width:32px}.button.is-loading::after,.control.is-loading::after,.loader,.select.is-loading::after{-webkit-animation:spinAround .5s infinite linear;animation:spinAround .5s infinite linear;border:2px solid #dbdbdb;border-radius:290486px;border-right-color:transparent;border-top-color:transparent;content:"";display:block;height:1em;position:relative;width:1em}.hero-video,.image.is-16by9 .has-ratio,.image.is-16by9 img,.image.is-1by1 .has-ratio,.image.is-1by1 img,.image.is-1by2 .has-ratio,.image.is-1by2 img,.image.is-1by3 .has-ratio,.image.is-1by3 img,.image.is-2by1 .has-ratio,.image.is-2by1 img,.image.is-2by3 .has-ratio,.image.is-2by3 img,.image.is-3by1 .has-ratio,.image.is-3by1 img,.image.is-3by2 .has-ratio,.image.is-3by2 img,.image.is-3by4 .has-ratio,.image.is-3by4 img,.image.is-3by5 .has-ratio,.image.is-3by5 img,.image.is-4by3 .has-ratio,.image.is-4by3 img,.image.is-4by5 .has-ratio,.image.is-4by5 img,.image.is-5by3 .has-ratio,.image.is-5by3 img,.image.is-5by4 .has-ratio,.image.is-5by4 img,.image.is-9by16 .has-ratio,.image.is-9by16 img,.image.is-square .has-ratio,.image.is-square img,.is-overlay,.modal,.modal-background{bottom:0;left:0;position:absolute;right:0;top:0}.button,.file-cta,.file-name,.input,.pagination-ellipsis,.pagination-link,.pagination-next,.pagination-previous,.select select,.textarea{-moz-appearance:none;-webkit-appearance:none;align-items:center;border:1px solid transparent;border-radius:4px;box-shadow:none;display:inline-flex;font-size:1rem;height:2.25em;justify-content:flex-start;line-height:1.5;padding-bottom:calc(.375em - 1px);padding-left:calc(.625em - 1px);padding-right:calc(.625em - 1px);padding-top:calc(.375em - 1px);position:relative;vertical-align:top}.button:active,.button:focus,.file-cta:active,.file-cta:focus,.file-name:active,.file-name:focus,.input:active,.input:focus,.is-active.button,.is-active.file-cta,.is-active.file-name,.is-active.input,.is-active.pagination-ellipsis,.is-active.pagination-link,.is-active.pagination-next,.is-active.pagination-previous,.is-active.textarea,.is-focused.button,.is-focused.file-cta,.is-focused.file-name,.is-focused.input,.is-focused.pagination-ellipsis,.is-focused.pagination-link,.is-focused.pagination-next,.is-focused.pagination-previous,.is-focused.textarea,.pagination-ellipsis:active,.pagination-ellipsis:focus,.pagination-link:active,.pagination-link:focus,.pagination-next:active,.pagination-next:focus,.pagination-previous:active,.pagination-previous:focus,.select select.is-active,.select select.is-focused,.select select:active,.select select:focus,.textarea:active,.textarea:focus{outline:0}.button[disabled],.file-cta[disabled],.file-name[disabled],.input[disabled],.pagination-ellipsis[disabled],.pagination-link[disabled],.pagination-next[disabled],.pagination-previous[disabled],.select fieldset[disabled] select,.select select[disabled],.textarea[disabled],fieldset[disabled] .button,fieldset[disabled] .file-cta,fieldset[disabled] .file-name,fieldset[disabled] .input,fieldset[disabled] .pagination-ellipsis,fieldset[disabled] .pagination-link,fieldset[disabled] .pagination-next,fieldset[disabled] .pagination-previous,fieldset[disabled] .select select,fieldset[disabled] .textarea{cursor:not-allowed}/*! minireset.css v0.0.4 | MIT License | github.com/jgthms/minireset.css */blockquote,body,dd,dl,dt,fieldset,figure,h1,h2,h3,h4,h5,h6,hr,html,iframe,legend,li,ol,p,pre,textarea,ul{margin:0;padding:0}h1,h2,h3,h4,h5,h6{font-size:100%;font-weight:400}ul{list-style:none}button,input,select,textarea{margin:0}html{box-sizing:border-box}*,::after,::before{box-sizing:inherit}embed,iframe,img,object,video{height:auto;max-width:100%}audio{max-width:100%}iframe{border:0}table{border-collapse:collapse;border-spacing:0}td,th{padding:0}td:not([align]),th:not([align]){text-align:left}html{background-color:#fff;font-size:16px;-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;min-width:300px;overflow-x:hidden;overflow-y:scroll;text-rendering:optimizeLegibility;-webkit-text-size-adjust:100%;-moz-text-size-adjust:100%;-ms-text-size-adjust:100%;text-size-adjust:100%}article,aside,figure,footer,header,hgroup,section{display:block}body,button,input,select,textarea{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif}code,pre{-moz-osx-font-smoothing:auto;-webkit-font-smoothing:auto;font-family:monospace}body{color:#4a4a4a;font-size:1em;font-weight:400;line-height:1.5}a{color:#3273dc;cursor:pointer;text-decoration:none}a strong{color:currentColor}a:hover{color:#363636}code{background-color:#f5f5f5;color:#ff3860;font-size:.875em;font-weight:400;padding:.25em .5em .25em}hr{background-color:#f5f5f5;border:none;display:block;height:2px;margin:1.5rem 0}img{height:auto;max-width:100%}input[type=checkbox],input[type=radio]{vertical-align:baseline}small{font-size:.875em}span{font-style:inherit;font-weight:inherit}strong{color:#363636;font-weight:700}fieldset{border:none}pre{-webkit-overflow-scrolling:touch;background-color:#f5f5f5;color:#4a4a4a;font-size:.875em;overflow-x:auto;padding:1.25rem 1.5rem;white-space:pre;word-wrap:normal}pre code{background-color:transparent;color:currentColor;font-size:1em;padding:0}table td,table th{vertical-align:top}table td:not([align]),table th:not([align]){text-align:left}table th{color:#363636}.is-clearfix::after{clear:both;content:" ";display:table}.is-pulled-left{float:left!important}.is-pulled-right{float:right!important}.is-clipped{overflow:hidden!important}.is-size-1{font-size:3rem!important}.is-size-2{font-size:2.5rem!important}.is-size-3{font-size:2rem!important}.is-size-4{font-size:1.5rem!important}.is-size-5{font-size:1.25rem!important}.is-size-6{font-size:1rem!important}.is-size-7{font-size:.75rem!important}@media screen and (max-width:768px){.is-size-1-mobile{font-size:3rem!important}.is-size-2-mobile{font-size:2.5rem!important}.is-size-3-mobile{font-size:2rem!important}.is-size-4-mobile{font-size:1.5rem!important}.is-size-5-mobile{font-size:1.25rem!important}.is-size-6-mobile{font-size:1rem!important}.is-size-7-mobile{font-size:.75rem!important}}@media screen and (min-width:769px),print{.is-size-1-tablet{font-size:3rem!important}.is-size-2-tablet{font-size:2.5rem!important}.is-size-3-tablet{font-size:2rem!important}.is-size-4-tablet{font-size:1.5rem!important}.is-size-5-tablet{font-size:1.25rem!important}.is-size-6-tablet{font-size:1rem!important}.is-size-7-tablet{font-size:.75rem!important}}@media screen and (max-width:1023px){.is-size-1-touch{font-size:3rem!important}.is-size-2-touch{font-size:2.5rem!important}.is-size-3-touch{font-size:2rem!important}.is-size-4-touch{font-size:1.5rem!important}.is-size-5-touch{font-size:1.25rem!important}.is-size-6-touch{font-size:1rem!important}.is-size-7-touch{font-size:.75rem!important}}@media screen and (min-width:1024px){.is-size-1-desktop{font-size:3rem!important}.is-size-2-desktop{font-size:2.5rem!important}.is-size-3-desktop{font-size:2rem!important}.is-size-4-desktop{font-size:1.5rem!important}.is-size-5-desktop{font-size:1.25rem!important}.is-size-6-desktop{font-size:1rem!important}.is-size-7-desktop{font-size:.75rem!important}}@media screen and (min-width:1216px){.is-size-1-widescreen{font-size:3rem!important}.is-size-2-widescreen{font-size:2.5rem!important}.is-size-3-widescreen{font-size:2rem!important}.is-size-4-widescreen{font-size:1.5rem!important}.is-size-5-widescreen{font-size:1.25rem!important}.is-size-6-widescreen{font-size:1rem!important}.is-size-7-widescreen{font-size:.75rem!important}}@media screen and (min-width:1408px){.is-size-1-fullhd{font-size:3rem!important}.is-size-2-fullhd{font-size:2.5rem!important}.is-size-3-fullhd{font-size:2rem!important}.is-size-4-fullhd{font-size:1.5rem!important}.is-size-5-fullhd{font-size:1.25rem!important}.is-size-6-fullhd{font-size:1rem!important}.is-size-7-fullhd{font-size:.75rem!important}}.has-text-centered{text-align:center!important}.has-text-justified{text-align:justify!important}.has-text-left{text-align:left!important}.has-text-right{text-align:right!important}@media screen and (max-width:768px){.has-text-centered-mobile{text-align:center!important}}@media screen and (min-width:769px),print{.has-text-centered-tablet{text-align:center!important}}@media screen and (min-width:769px) and (max-width:1023px){.has-text-centered-tablet-only{text-align:center!important}}@media screen and (max-width:1023px){.has-text-centered-touch{text-align:center!important}}@media screen and (min-width:1024px){.has-text-centered-desktop{text-align:center!important}}@media screen and (min-width:1024px) and (max-width:1215px){.has-text-centered-desktop-only{text-align:center!important}}@media screen and (min-width:1216px){.has-text-centered-widescreen{text-align:center!important}}@media screen and (min-width:1216px) and (max-width:1407px){.has-text-centered-widescreen-only{text-align:center!important}}@media screen and (min-width:1408px){.has-text-centered-fullhd{text-align:center!important}}@media screen and (max-width:768px){.has-text-justified-mobile{text-align:justify!important}}@media screen and (min-width:769px),print{.has-text-justified-tablet{text-align:justify!important}}@media screen and (min-width:769px) and (max-width:1023px){.has-text-justified-tablet-only{text-align:justify!important}}@media screen and (max-width:1023px){.has-text-justified-touch{text-align:justify!important}}@media screen and (min-width:1024px){.has-text-justified-desktop{text-align:justify!important}}@media screen and (min-width:1024px) and (max-width:1215px){.has-text-justified-desktop-only{text-align:justify!important}}@media screen and (min-width:1216px){.has-text-justified-widescreen{text-align:justify!important}}@media screen and (min-width:1216px) and (max-width:1407px){.has-text-justified-widescreen-only{text-align:justify!important}}@media screen and (min-width:1408px){.has-text-justified-fullhd{text-align:justify!important}}@media screen and (max-width:768px){.has-text-left-mobile{text-align:left!important}}@media screen and (min-width:769px),print{.has-text-left-tablet{text-align:left!important}}@media screen and (min-width:769px) and (max-width:1023px){.has-text-left-tablet-only{text-align:left!important}}@media screen and (max-width:1023px){.has-text-left-touch{text-align:left!important}}@media screen and (min-width:1024px){.has-text-left-desktop{text-align:left!important}}@media screen and (min-width:1024px) and (max-width:1215px){.has-text-left-desktop-only{text-align:left!important}}@media screen and (min-width:1216px){.has-text-left-widescreen{text-align:left!important}}@media screen and (min-width:1216px) and (max-width:1407px){.has-text-left-widescreen-only{text-align:left!important}}@media screen and (min-width:1408px){.has-text-left-fullhd{text-align:left!important}}@media screen and (max-width:768px){.has-text-right-mobile{text-align:right!important}}@media screen and (min-width:769px),print{.has-text-right-tablet{text-align:right!important}}@media screen and (min-width:769px) and (max-width:1023px){.has-text-right-tablet-only{text-align:right!important}}@media screen and (max-width:1023px){.has-text-right-touch{text-align:right!important}}@media screen and (min-width:1024px){.has-text-right-desktop{text-align:right!important}}@media screen and (min-width:1024px) and (max-width:1215px){.has-text-right-desktop-only{text-align:right!important}}@media screen and (min-width:1216px){.has-text-right-widescreen{text-align:right!important}}@media screen and (min-width:1216px) and (max-width:1407px){.has-text-right-widescreen-only{text-align:right!important}}@media screen and (min-width:1408px){.has-text-right-fullhd{text-align:right!important}}.is-capitalized{text-transform:capitalize!important}.is-lowercase{text-transform:lowercase!important}.is-uppercase{text-transform:uppercase!important}.is-italic{font-style:italic!important}.has-text-white{color:#fff!important}a.has-text-white:focus,a.has-text-white:hover{color:#e6e6e6!important}.has-background-white{background-color:#fff!important}.has-text-black{color:#0a0a0a!important}a.has-text-black:focus,a.has-text-black:hover{color:#000!important}.has-background-black{background-color:#0a0a0a!important}.has-text-light{color:#f5f5f5!important}a.has-text-light:focus,a.has-text-light:hover{color:#dbdbdb!important}.has-background-light{background-color:#f5f5f5!important}.has-text-dark{color:#363636!important}a.has-text-dark:focus,a.has-text-dark:hover{color:#1c1c1c!important}.has-background-dark{background-color:#363636!important}.has-text-primary{color:#00d1b2!important}a.has-text-primary:focus,a.has-text-primary:hover{color:#009e86!important}.has-background-primary{background-color:#00d1b2!important}.has-text-link{color:#3273dc!important}a.has-text-link:focus,a.has-text-link:hover{color:#205bbc!important}.has-background-link{background-color:#3273dc!important}.has-text-info{color:#209cee!important}a.has-text-info:focus,a.has-text-info:hover{color:#0f81cc!important}.has-background-info{background-color:#209cee!important}.has-text-success{color:#23d160!important}a.has-text-success:focus,a.has-text-success:hover{color:#1ca64c!important}.has-background-success{background-color:#23d160!important}.has-text-warning{color:#ffdd57!important}a.has-text-warning:focus,a.has-text-warning:hover{color:#ffd324!important}.has-background-warning{background-color:#ffdd57!important}.has-text-danger{color:#ff3860!important}a.has-text-danger:focus,a.has-text-danger:hover{color:#ff0537!important}.has-background-danger{background-color:#ff3860!important}.has-text-black-bis{color:#121212!important}.has-background-black-bis{background-color:#121212!important}.has-text-black-ter{color:#242424!important}.has-background-black-ter{background-color:#242424!important}.has-text-grey-darker{color:#363636!important}.has-background-grey-darker{background-color:#363636!important}.has-text-grey-dark{color:#4a4a4a!important}.has-background-grey-dark{background-color:#4a4a4a!important}.has-text-grey{color:#7a7a7a!important}.has-background-grey{background-color:#7a7a7a!important}.has-text-grey-light{color:#b5b5b5!important}.has-background-grey-light{background-color:#b5b5b5!important}.has-text-grey-lighter{color:#dbdbdb!important}.has-background-grey-lighter{background-color:#dbdbdb!important}.has-text-white-ter{color:#f5f5f5!important}.has-background-white-ter{background-color:#f5f5f5!important}.has-text-white-bis{color:#fafafa!important}.has-background-white-bis{background-color:#fafafa!important}.has-text-weight-light{font-weight:300!important}.has-text-weight-normal{font-weight:400!important}.has-text-weight-medium{font-weight:500!important}.has-text-weight-semibold{font-weight:600!important}.has-text-weight-bold{font-weight:700!important}.is-family-primary{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif!important}.is-family-secondary{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif!important}.is-family-sans-serif{font-family:BlinkMacSystemFont,-apple-system,"Segoe UI",Roboto,Oxygen,Ubuntu,Cantarell,"Fira Sans","Droid Sans","Helvetica Neue",Helvetica,Arial,sans-serif!important}.is-family-monospace{font-family:monospace!important}.is-family-code{font-family:monospace!important}.is-block{display:block!important}@media screen and (max-width:768px){.is-block-mobile{display:block!important}}@media screen and (min-width:769px),print{.is-block-tablet{display:block!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-block-tablet-only{display:block!important}}@media screen and (max-width:1023px){.is-block-touch{display:block!important}}@media screen and (min-width:1024px){.is-block-desktop{display:block!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-block-desktop-only{display:block!important}}@media screen and (min-width:1216px){.is-block-widescreen{display:block!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-block-widescreen-only{display:block!important}}@media screen and (min-width:1408px){.is-block-fullhd{display:block!important}}.is-flex{display:flex!important}@media screen and (max-width:768px){.is-flex-mobile{display:flex!important}}@media screen and (min-width:769px),print{.is-flex-tablet{display:flex!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-flex-tablet-only{display:flex!important}}@media screen and (max-width:1023px){.is-flex-touch{display:flex!important}}@media screen and (min-width:1024px){.is-flex-desktop{display:flex!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-flex-desktop-only{display:flex!important}}@media screen and (min-width:1216px){.is-flex-widescreen{display:flex!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-flex-widescreen-only{display:flex!important}}@media screen and (min-width:1408px){.is-flex-fullhd{display:flex!important}}.is-inline{display:inline!important}@media screen and (max-width:768px){.is-inline-mobile{display:inline!important}}@media screen and (min-width:769px),print{.is-inline-tablet{display:inline!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-inline-tablet-only{display:inline!important}}@media screen and (max-width:1023px){.is-inline-touch{display:inline!important}}@media screen and (min-width:1024px){.is-inline-desktop{display:inline!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-inline-desktop-only{display:inline!important}}@media screen and (min-width:1216px){.is-inline-widescreen{display:inline!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-inline-widescreen-only{display:inline!important}}@media screen and (min-width:1408px){.is-inline-fullhd{display:inline!important}}.is-inline-block{display:inline-block!important}@media screen and (max-width:768px){.is-inline-block-mobile{display:inline-block!important}}@media screen and (min-width:769px),print{.is-inline-block-tablet{display:inline-block!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-inline-block-tablet-only{display:inline-block!important}}@media screen and (max-width:1023px){.is-inline-block-touch{display:inline-block!important}}@media screen and (min-width:1024px){.is-inline-block-desktop{display:inline-block!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-inline-block-desktop-only{display:inline-block!important}}@media screen and (min-width:1216px){.is-inline-block-widescreen{display:inline-block!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-inline-block-widescreen-only{display:inline-block!important}}@media screen and (min-width:1408px){.is-inline-block-fullhd{display:inline-block!important}}.is-inline-flex{display:inline-flex!important}@media screen and (max-width:768px){.is-inline-flex-mobile{display:inline-flex!important}}@media screen and (min-width:769px),print{.is-inline-flex-tablet{display:inline-flex!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-inline-flex-tablet-only{display:inline-flex!important}}@media screen and (max-width:1023px){.is-inline-flex-touch{display:inline-flex!important}}@media screen and (min-width:1024px){.is-inline-flex-desktop{display:inline-flex!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-inline-flex-desktop-only{display:inline-flex!important}}@media screen and (min-width:1216px){.is-inline-flex-widescreen{display:inline-flex!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-inline-flex-widescreen-only{display:inline-flex!important}}@media screen and (min-width:1408px){.is-inline-flex-fullhd{display:inline-flex!important}}.is-hidden{display:none!important}.is-sr-only{border:none!important;clip:rect(0,0,0,0)!important;height:.01em!important;overflow:hidden!important;padding:0!important;position:absolute!important;white-space:nowrap!important;width:.01em!important}@media screen and (max-width:768px){.is-hidden-mobile{display:none!important}}@media screen and (min-width:769px),print{.is-hidden-tablet{display:none!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-hidden-tablet-only{display:none!important}}@media screen and (max-width:1023px){.is-hidden-touch{display:none!important}}@media screen and (min-width:1024px){.is-hidden-desktop{display:none!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-hidden-desktop-only{display:none!important}}@media screen and (min-width:1216px){.is-hidden-widescreen{display:none!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-hidden-widescreen-only{display:none!important}}@media screen and (min-width:1408px){.is-hidden-fullhd{display:none!important}}.is-invisible{visibility:hidden!important}@media screen and (max-width:768px){.is-invisible-mobile{visibility:hidden!important}}@media screen and (min-width:769px),print{.is-invisible-tablet{visibility:hidden!important}}@media screen and (min-width:769px) and (max-width:1023px){.is-invisible-tablet-only{visibility:hidden!important}}@media screen and (max-width:1023px){.is-invisible-touch{visibility:hidden!important}}@media screen and (min-width:1024px){.is-invisible-desktop{visibility:hidden!important}}@media screen and (min-width:1024px) and (max-width:1215px){.is-invisible-desktop-only{visibility:hidden!important}}@media screen and (min-width:1216px){.is-invisible-widescreen{visibility:hidden!important}}@media screen and (min-width:1216px) and (max-width:1407px){.is-invisible-widescreen-only{visibility:hidden!important}}@media screen and (min-width:1408px){.is-invisible-fullhd{visibility:hidden!important}}.is-marginless{margin:0!important}.is-paddingless{padding:0!important}.is-radiusless{border-radius:0!important}.is-shadowless{box-shadow:none!important}.is-relative{position:relative!important}.box{background-color:#fff;border-radius:6px;box-shadow:0 2px 3px rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.1);color:#4a4a4a;display:block;padding:1.25rem}a.box:focus,a.box:hover{box-shadow:0 2px 3px rgba(10,10,10,.1),0 0 0 1px #3273dc}a.box:active{box-shadow:inset 0 1px 2px rgba(10,10,10,.2),0 0 0 1px #3273dc}.button{background-color:#fff;border-color:#dbdbdb;border-width:1px;color:#363636;cursor:pointer;justify-content:center;padding-bottom:calc(.375em - 1px);padding-left:.75em;padding-right:.75em;padding-top:calc(.375em - 1px);text-align:center;white-space:nowrap}.button strong{color:inherit}.button .icon,.button .icon.is-large,.button .icon.is-medium,.button .icon.is-small{height:1.5em;width:1.5em}.button .icon:first-child:not(:last-child){margin-left:calc(-.375em - 1px);margin-right:.1875em}.button .icon:last-child:not(:first-child){margin-left:.1875em;margin-right:calc(-.375em - 1px)}.button .icon:first-child:last-child{margin-left:calc(-.375em - 1px);margin-right:calc(-.375em - 1px)}.button.is-hovered,.button:hover{border-color:#b5b5b5;color:#363636}.button.is-focused,.button:focus{border-color:#3273dc;color:#363636}.button.is-focused:not(:active),.button:focus:not(:active){box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.button.is-active,.button:active{border-color:#4a4a4a;color:#363636}.button.is-text{background-color:transparent;border-color:transparent;color:#4a4a4a;text-decoration:underline}.button.is-text.is-focused,.button.is-text.is-hovered,.button.is-text:focus,.button.is-text:hover{background-color:#f5f5f5;color:#363636}.button.is-text.is-active,.button.is-text:active{background-color:#e8e8e8;color:#363636}.button.is-text[disabled],fieldset[disabled] .button.is-text{background-color:transparent;border-color:transparent;box-shadow:none}.button.is-white{background-color:#fff;border-color:transparent;color:#0a0a0a}.button.is-white.is-hovered,.button.is-white:hover{background-color:#f9f9f9;border-color:transparent;color:#0a0a0a}.button.is-white.is-focused,.button.is-white:focus{border-color:transparent;color:#0a0a0a}.button.is-white.is-focused:not(:active),.button.is-white:focus:not(:active){box-shadow:0 0 0 .125em rgba(255,255,255,.25)}.button.is-white.is-active,.button.is-white:active{background-color:#f2f2f2;border-color:transparent;color:#0a0a0a}.button.is-white[disabled],fieldset[disabled] .button.is-white{background-color:#fff;border-color:transparent;box-shadow:none}.button.is-white.is-inverted{background-color:#0a0a0a;color:#fff}.button.is-white.is-inverted.is-hovered,.button.is-white.is-inverted:hover{background-color:#000}.button.is-white.is-inverted[disabled],fieldset[disabled] .button.is-white.is-inverted{background-color:#0a0a0a;border-color:transparent;box-shadow:none;color:#fff}.button.is-white.is-loading::after{border-color:transparent transparent #0a0a0a #0a0a0a!important}.button.is-white.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-white.is-outlined.is-focused,.button.is-white.is-outlined.is-hovered,.button.is-white.is-outlined:focus,.button.is-white.is-outlined:hover{background-color:#fff;border-color:#fff;color:#0a0a0a}.button.is-white.is-outlined.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-white.is-outlined.is-loading.is-focused::after,.button.is-white.is-outlined.is-loading.is-hovered::after,.button.is-white.is-outlined.is-loading:focus::after,.button.is-white.is-outlined.is-loading:hover::after{border-color:transparent transparent #0a0a0a #0a0a0a!important}.button.is-white.is-outlined[disabled],fieldset[disabled] .button.is-white.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-white.is-inverted.is-outlined{background-color:transparent;border-color:#0a0a0a;color:#0a0a0a}.button.is-white.is-inverted.is-outlined.is-focused,.button.is-white.is-inverted.is-outlined.is-hovered,.button.is-white.is-inverted.is-outlined:focus,.button.is-white.is-inverted.is-outlined:hover{background-color:#0a0a0a;color:#fff}.button.is-white.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-white.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-white.is-inverted.is-outlined.is-loading:focus::after,.button.is-white.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-white.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-white.is-inverted.is-outlined{background-color:transparent;border-color:#0a0a0a;box-shadow:none;color:#0a0a0a}.button.is-black{background-color:#0a0a0a;border-color:transparent;color:#fff}.button.is-black.is-hovered,.button.is-black:hover{background-color:#040404;border-color:transparent;color:#fff}.button.is-black.is-focused,.button.is-black:focus{border-color:transparent;color:#fff}.button.is-black.is-focused:not(:active),.button.is-black:focus:not(:active){box-shadow:0 0 0 .125em rgba(10,10,10,.25)}.button.is-black.is-active,.button.is-black:active{background-color:#000;border-color:transparent;color:#fff}.button.is-black[disabled],fieldset[disabled] .button.is-black{background-color:#0a0a0a;border-color:transparent;box-shadow:none}.button.is-black.is-inverted{background-color:#fff;color:#0a0a0a}.button.is-black.is-inverted.is-hovered,.button.is-black.is-inverted:hover{background-color:#f2f2f2}.button.is-black.is-inverted[disabled],fieldset[disabled] .button.is-black.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#0a0a0a}.button.is-black.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-black.is-outlined{background-color:transparent;border-color:#0a0a0a;color:#0a0a0a}.button.is-black.is-outlined.is-focused,.button.is-black.is-outlined.is-hovered,.button.is-black.is-outlined:focus,.button.is-black.is-outlined:hover{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.button.is-black.is-outlined.is-loading::after{border-color:transparent transparent #0a0a0a #0a0a0a!important}.button.is-black.is-outlined.is-loading.is-focused::after,.button.is-black.is-outlined.is-loading.is-hovered::after,.button.is-black.is-outlined.is-loading:focus::after,.button.is-black.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-black.is-outlined[disabled],fieldset[disabled] .button.is-black.is-outlined{background-color:transparent;border-color:#0a0a0a;box-shadow:none;color:#0a0a0a}.button.is-black.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-black.is-inverted.is-outlined.is-focused,.button.is-black.is-inverted.is-outlined.is-hovered,.button.is-black.is-inverted.is-outlined:focus,.button.is-black.is-inverted.is-outlined:hover{background-color:#fff;color:#0a0a0a}.button.is-black.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-black.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-black.is-inverted.is-outlined.is-loading:focus::after,.button.is-black.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #0a0a0a #0a0a0a!important}.button.is-black.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-black.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-light{background-color:#f5f5f5;border-color:transparent;color:#363636}.button.is-light.is-hovered,.button.is-light:hover{background-color:#eee;border-color:transparent;color:#363636}.button.is-light.is-focused,.button.is-light:focus{border-color:transparent;color:#363636}.button.is-light.is-focused:not(:active),.button.is-light:focus:not(:active){box-shadow:0 0 0 .125em rgba(245,245,245,.25)}.button.is-light.is-active,.button.is-light:active{background-color:#e8e8e8;border-color:transparent;color:#363636}.button.is-light[disabled],fieldset[disabled] .button.is-light{background-color:#f5f5f5;border-color:transparent;box-shadow:none}.button.is-light.is-inverted{background-color:#363636;color:#f5f5f5}.button.is-light.is-inverted.is-hovered,.button.is-light.is-inverted:hover{background-color:#292929}.button.is-light.is-inverted[disabled],fieldset[disabled] .button.is-light.is-inverted{background-color:#363636;border-color:transparent;box-shadow:none;color:#f5f5f5}.button.is-light.is-loading::after{border-color:transparent transparent #363636 #363636!important}.button.is-light.is-outlined{background-color:transparent;border-color:#f5f5f5;color:#f5f5f5}.button.is-light.is-outlined.is-focused,.button.is-light.is-outlined.is-hovered,.button.is-light.is-outlined:focus,.button.is-light.is-outlined:hover{background-color:#f5f5f5;border-color:#f5f5f5;color:#363636}.button.is-light.is-outlined.is-loading::after{border-color:transparent transparent #f5f5f5 #f5f5f5!important}.button.is-light.is-outlined.is-loading.is-focused::after,.button.is-light.is-outlined.is-loading.is-hovered::after,.button.is-light.is-outlined.is-loading:focus::after,.button.is-light.is-outlined.is-loading:hover::after{border-color:transparent transparent #363636 #363636!important}.button.is-light.is-outlined[disabled],fieldset[disabled] .button.is-light.is-outlined{background-color:transparent;border-color:#f5f5f5;box-shadow:none;color:#f5f5f5}.button.is-light.is-inverted.is-outlined{background-color:transparent;border-color:#363636;color:#363636}.button.is-light.is-inverted.is-outlined.is-focused,.button.is-light.is-inverted.is-outlined.is-hovered,.button.is-light.is-inverted.is-outlined:focus,.button.is-light.is-inverted.is-outlined:hover{background-color:#363636;color:#f5f5f5}.button.is-light.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-light.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-light.is-inverted.is-outlined.is-loading:focus::after,.button.is-light.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #f5f5f5 #f5f5f5!important}.button.is-light.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-light.is-inverted.is-outlined{background-color:transparent;border-color:#363636;box-shadow:none;color:#363636}.button.is-dark{background-color:#363636;border-color:transparent;color:#f5f5f5}.button.is-dark.is-hovered,.button.is-dark:hover{background-color:#2f2f2f;border-color:transparent;color:#f5f5f5}.button.is-dark.is-focused,.button.is-dark:focus{border-color:transparent;color:#f5f5f5}.button.is-dark.is-focused:not(:active),.button.is-dark:focus:not(:active){box-shadow:0 0 0 .125em rgba(54,54,54,.25)}.button.is-dark.is-active,.button.is-dark:active{background-color:#292929;border-color:transparent;color:#f5f5f5}.button.is-dark[disabled],fieldset[disabled] .button.is-dark{background-color:#363636;border-color:transparent;box-shadow:none}.button.is-dark.is-inverted{background-color:#f5f5f5;color:#363636}.button.is-dark.is-inverted.is-hovered,.button.is-dark.is-inverted:hover{background-color:#e8e8e8}.button.is-dark.is-inverted[disabled],fieldset[disabled] .button.is-dark.is-inverted{background-color:#f5f5f5;border-color:transparent;box-shadow:none;color:#363636}.button.is-dark.is-loading::after{border-color:transparent transparent #f5f5f5 #f5f5f5!important}.button.is-dark.is-outlined{background-color:transparent;border-color:#363636;color:#363636}.button.is-dark.is-outlined.is-focused,.button.is-dark.is-outlined.is-hovered,.button.is-dark.is-outlined:focus,.button.is-dark.is-outlined:hover{background-color:#363636;border-color:#363636;color:#f5f5f5}.button.is-dark.is-outlined.is-loading::after{border-color:transparent transparent #363636 #363636!important}.button.is-dark.is-outlined.is-loading.is-focused::after,.button.is-dark.is-outlined.is-loading.is-hovered::after,.button.is-dark.is-outlined.is-loading:focus::after,.button.is-dark.is-outlined.is-loading:hover::after{border-color:transparent transparent #f5f5f5 #f5f5f5!important}.button.is-dark.is-outlined[disabled],fieldset[disabled] .button.is-dark.is-outlined{background-color:transparent;border-color:#363636;box-shadow:none;color:#363636}.button.is-dark.is-inverted.is-outlined{background-color:transparent;border-color:#f5f5f5;color:#f5f5f5}.button.is-dark.is-inverted.is-outlined.is-focused,.button.is-dark.is-inverted.is-outlined.is-hovered,.button.is-dark.is-inverted.is-outlined:focus,.button.is-dark.is-inverted.is-outlined:hover{background-color:#f5f5f5;color:#363636}.button.is-dark.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-dark.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-dark.is-inverted.is-outlined.is-loading:focus::after,.button.is-dark.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #363636 #363636!important}.button.is-dark.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-dark.is-inverted.is-outlined{background-color:transparent;border-color:#f5f5f5;box-shadow:none;color:#f5f5f5}.button.is-primary{background-color:#00d1b2;border-color:transparent;color:#fff}.button.is-primary.is-hovered,.button.is-primary:hover{background-color:#00c4a7;border-color:transparent;color:#fff}.button.is-primary.is-focused,.button.is-primary:focus{border-color:transparent;color:#fff}.button.is-primary.is-focused:not(:active),.button.is-primary:focus:not(:active){box-shadow:0 0 0 .125em rgba(0,209,178,.25)}.button.is-primary.is-active,.button.is-primary:active{background-color:#00b89c;border-color:transparent;color:#fff}.button.is-primary[disabled],fieldset[disabled] .button.is-primary{background-color:#00d1b2;border-color:transparent;box-shadow:none}.button.is-primary.is-inverted{background-color:#fff;color:#00d1b2}.button.is-primary.is-inverted.is-hovered,.button.is-primary.is-inverted:hover{background-color:#f2f2f2}.button.is-primary.is-inverted[disabled],fieldset[disabled] .button.is-primary.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#00d1b2}.button.is-primary.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-primary.is-outlined{background-color:transparent;border-color:#00d1b2;color:#00d1b2}.button.is-primary.is-outlined.is-focused,.button.is-primary.is-outlined.is-hovered,.button.is-primary.is-outlined:focus,.button.is-primary.is-outlined:hover{background-color:#00d1b2;border-color:#00d1b2;color:#fff}.button.is-primary.is-outlined.is-loading::after{border-color:transparent transparent #00d1b2 #00d1b2!important}.button.is-primary.is-outlined.is-loading.is-focused::after,.button.is-primary.is-outlined.is-loading.is-hovered::after,.button.is-primary.is-outlined.is-loading:focus::after,.button.is-primary.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-primary.is-outlined[disabled],fieldset[disabled] .button.is-primary.is-outlined{background-color:transparent;border-color:#00d1b2;box-shadow:none;color:#00d1b2}.button.is-primary.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-primary.is-inverted.is-outlined.is-focused,.button.is-primary.is-inverted.is-outlined.is-hovered,.button.is-primary.is-inverted.is-outlined:focus,.button.is-primary.is-inverted.is-outlined:hover{background-color:#fff;color:#00d1b2}.button.is-primary.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-primary.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-primary.is-inverted.is-outlined.is-loading:focus::after,.button.is-primary.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #00d1b2 #00d1b2!important}.button.is-primary.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-primary.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-link{background-color:#3273dc;border-color:transparent;color:#fff}.button.is-link.is-hovered,.button.is-link:hover{background-color:#276cda;border-color:transparent;color:#fff}.button.is-link.is-focused,.button.is-link:focus{border-color:transparent;color:#fff}.button.is-link.is-focused:not(:active),.button.is-link:focus:not(:active){box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.button.is-link.is-active,.button.is-link:active{background-color:#2366d1;border-color:transparent;color:#fff}.button.is-link[disabled],fieldset[disabled] .button.is-link{background-color:#3273dc;border-color:transparent;box-shadow:none}.button.is-link.is-inverted{background-color:#fff;color:#3273dc}.button.is-link.is-inverted.is-hovered,.button.is-link.is-inverted:hover{background-color:#f2f2f2}.button.is-link.is-inverted[disabled],fieldset[disabled] .button.is-link.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#3273dc}.button.is-link.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-link.is-outlined{background-color:transparent;border-color:#3273dc;color:#3273dc}.button.is-link.is-outlined.is-focused,.button.is-link.is-outlined.is-hovered,.button.is-link.is-outlined:focus,.button.is-link.is-outlined:hover{background-color:#3273dc;border-color:#3273dc;color:#fff}.button.is-link.is-outlined.is-loading::after{border-color:transparent transparent #3273dc #3273dc!important}.button.is-link.is-outlined.is-loading.is-focused::after,.button.is-link.is-outlined.is-loading.is-hovered::after,.button.is-link.is-outlined.is-loading:focus::after,.button.is-link.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-link.is-outlined[disabled],fieldset[disabled] .button.is-link.is-outlined{background-color:transparent;border-color:#3273dc;box-shadow:none;color:#3273dc}.button.is-link.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-link.is-inverted.is-outlined.is-focused,.button.is-link.is-inverted.is-outlined.is-hovered,.button.is-link.is-inverted.is-outlined:focus,.button.is-link.is-inverted.is-outlined:hover{background-color:#fff;color:#3273dc}.button.is-link.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-link.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-link.is-inverted.is-outlined.is-loading:focus::after,.button.is-link.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #3273dc #3273dc!important}.button.is-link.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-link.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-info{background-color:#209cee;border-color:transparent;color:#fff}.button.is-info.is-hovered,.button.is-info:hover{background-color:#1496ed;border-color:transparent;color:#fff}.button.is-info.is-focused,.button.is-info:focus{border-color:transparent;color:#fff}.button.is-info.is-focused:not(:active),.button.is-info:focus:not(:active){box-shadow:0 0 0 .125em rgba(32,156,238,.25)}.button.is-info.is-active,.button.is-info:active{background-color:#118fe4;border-color:transparent;color:#fff}.button.is-info[disabled],fieldset[disabled] .button.is-info{background-color:#209cee;border-color:transparent;box-shadow:none}.button.is-info.is-inverted{background-color:#fff;color:#209cee}.button.is-info.is-inverted.is-hovered,.button.is-info.is-inverted:hover{background-color:#f2f2f2}.button.is-info.is-inverted[disabled],fieldset[disabled] .button.is-info.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#209cee}.button.is-info.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-info.is-outlined{background-color:transparent;border-color:#209cee;color:#209cee}.button.is-info.is-outlined.is-focused,.button.is-info.is-outlined.is-hovered,.button.is-info.is-outlined:focus,.button.is-info.is-outlined:hover{background-color:#209cee;border-color:#209cee;color:#fff}.button.is-info.is-outlined.is-loading::after{border-color:transparent transparent #209cee #209cee!important}.button.is-info.is-outlined.is-loading.is-focused::after,.button.is-info.is-outlined.is-loading.is-hovered::after,.button.is-info.is-outlined.is-loading:focus::after,.button.is-info.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-info.is-outlined[disabled],fieldset[disabled] .button.is-info.is-outlined{background-color:transparent;border-color:#209cee;box-shadow:none;color:#209cee}.button.is-info.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-info.is-inverted.is-outlined.is-focused,.button.is-info.is-inverted.is-outlined.is-hovered,.button.is-info.is-inverted.is-outlined:focus,.button.is-info.is-inverted.is-outlined:hover{background-color:#fff;color:#209cee}.button.is-info.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-info.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-info.is-inverted.is-outlined.is-loading:focus::after,.button.is-info.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #209cee #209cee!important}.button.is-info.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-info.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-success{background-color:#23d160;border-color:transparent;color:#fff}.button.is-success.is-hovered,.button.is-success:hover{background-color:#22c65b;border-color:transparent;color:#fff}.button.is-success.is-focused,.button.is-success:focus{border-color:transparent;color:#fff}.button.is-success.is-focused:not(:active),.button.is-success:focus:not(:active){box-shadow:0 0 0 .125em rgba(35,209,96,.25)}.button.is-success.is-active,.button.is-success:active{background-color:#20bc56;border-color:transparent;color:#fff}.button.is-success[disabled],fieldset[disabled] .button.is-success{background-color:#23d160;border-color:transparent;box-shadow:none}.button.is-success.is-inverted{background-color:#fff;color:#23d160}.button.is-success.is-inverted.is-hovered,.button.is-success.is-inverted:hover{background-color:#f2f2f2}.button.is-success.is-inverted[disabled],fieldset[disabled] .button.is-success.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#23d160}.button.is-success.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-success.is-outlined{background-color:transparent;border-color:#23d160;color:#23d160}.button.is-success.is-outlined.is-focused,.button.is-success.is-outlined.is-hovered,.button.is-success.is-outlined:focus,.button.is-success.is-outlined:hover{background-color:#23d160;border-color:#23d160;color:#fff}.button.is-success.is-outlined.is-loading::after{border-color:transparent transparent #23d160 #23d160!important}.button.is-success.is-outlined.is-loading.is-focused::after,.button.is-success.is-outlined.is-loading.is-hovered::after,.button.is-success.is-outlined.is-loading:focus::after,.button.is-success.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-success.is-outlined[disabled],fieldset[disabled] .button.is-success.is-outlined{background-color:transparent;border-color:#23d160;box-shadow:none;color:#23d160}.button.is-success.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-success.is-inverted.is-outlined.is-focused,.button.is-success.is-inverted.is-outlined.is-hovered,.button.is-success.is-inverted.is-outlined:focus,.button.is-success.is-inverted.is-outlined:hover{background-color:#fff;color:#23d160}.button.is-success.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-success.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-success.is-inverted.is-outlined.is-loading:focus::after,.button.is-success.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #23d160 #23d160!important}.button.is-success.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-success.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-warning{background-color:#ffdd57;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning.is-hovered,.button.is-warning:hover{background-color:#ffdb4a;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning.is-focused,.button.is-warning:focus{border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning.is-focused:not(:active),.button.is-warning:focus:not(:active){box-shadow:0 0 0 .125em rgba(255,221,87,.25)}.button.is-warning.is-active,.button.is-warning:active{background-color:#ffd83d;border-color:transparent;color:rgba(0,0,0,.7)}.button.is-warning[disabled],fieldset[disabled] .button.is-warning{background-color:#ffdd57;border-color:transparent;box-shadow:none}.button.is-warning.is-inverted{background-color:rgba(0,0,0,.7);color:#ffdd57}.button.is-warning.is-inverted.is-hovered,.button.is-warning.is-inverted:hover{background-color:rgba(0,0,0,.7)}.button.is-warning.is-inverted[disabled],fieldset[disabled] .button.is-warning.is-inverted{background-color:rgba(0,0,0,.7);border-color:transparent;box-shadow:none;color:#ffdd57}.button.is-warning.is-loading::after{border-color:transparent transparent rgba(0,0,0,.7) rgba(0,0,0,.7)!important}.button.is-warning.is-outlined{background-color:transparent;border-color:#ffdd57;color:#ffdd57}.button.is-warning.is-outlined.is-focused,.button.is-warning.is-outlined.is-hovered,.button.is-warning.is-outlined:focus,.button.is-warning.is-outlined:hover{background-color:#ffdd57;border-color:#ffdd57;color:rgba(0,0,0,.7)}.button.is-warning.is-outlined.is-loading::after{border-color:transparent transparent #ffdd57 #ffdd57!important}.button.is-warning.is-outlined.is-loading.is-focused::after,.button.is-warning.is-outlined.is-loading.is-hovered::after,.button.is-warning.is-outlined.is-loading:focus::after,.button.is-warning.is-outlined.is-loading:hover::after{border-color:transparent transparent rgba(0,0,0,.7) rgba(0,0,0,.7)!important}.button.is-warning.is-outlined[disabled],fieldset[disabled] .button.is-warning.is-outlined{background-color:transparent;border-color:#ffdd57;box-shadow:none;color:#ffdd57}.button.is-warning.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,.7);color:rgba(0,0,0,.7)}.button.is-warning.is-inverted.is-outlined.is-focused,.button.is-warning.is-inverted.is-outlined.is-hovered,.button.is-warning.is-inverted.is-outlined:focus,.button.is-warning.is-inverted.is-outlined:hover{background-color:rgba(0,0,0,.7);color:#ffdd57}.button.is-warning.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-warning.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-warning.is-inverted.is-outlined.is-loading:focus::after,.button.is-warning.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #ffdd57 #ffdd57!important}.button.is-warning.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-warning.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,.7);box-shadow:none;color:rgba(0,0,0,.7)}.button.is-danger{background-color:#ff3860;border-color:transparent;color:#fff}.button.is-danger.is-hovered,.button.is-danger:hover{background-color:#ff2b56;border-color:transparent;color:#fff}.button.is-danger.is-focused,.button.is-danger:focus{border-color:transparent;color:#fff}.button.is-danger.is-focused:not(:active),.button.is-danger:focus:not(:active){box-shadow:0 0 0 .125em rgba(255,56,96,.25)}.button.is-danger.is-active,.button.is-danger:active{background-color:#ff1f4b;border-color:transparent;color:#fff}.button.is-danger[disabled],fieldset[disabled] .button.is-danger{background-color:#ff3860;border-color:transparent;box-shadow:none}.button.is-danger.is-inverted{background-color:#fff;color:#ff3860}.button.is-danger.is-inverted.is-hovered,.button.is-danger.is-inverted:hover{background-color:#f2f2f2}.button.is-danger.is-inverted[disabled],fieldset[disabled] .button.is-danger.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#ff3860}.button.is-danger.is-loading::after{border-color:transparent transparent #fff #fff!important}.button.is-danger.is-outlined{background-color:transparent;border-color:#ff3860;color:#ff3860}.button.is-danger.is-outlined.is-focused,.button.is-danger.is-outlined.is-hovered,.button.is-danger.is-outlined:focus,.button.is-danger.is-outlined:hover{background-color:#ff3860;border-color:#ff3860;color:#fff}.button.is-danger.is-outlined.is-loading::after{border-color:transparent transparent #ff3860 #ff3860!important}.button.is-danger.is-outlined.is-loading.is-focused::after,.button.is-danger.is-outlined.is-loading.is-hovered::after,.button.is-danger.is-outlined.is-loading:focus::after,.button.is-danger.is-outlined.is-loading:hover::after{border-color:transparent transparent #fff #fff!important}.button.is-danger.is-outlined[disabled],fieldset[disabled] .button.is-danger.is-outlined{background-color:transparent;border-color:#ff3860;box-shadow:none;color:#ff3860}.button.is-danger.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-danger.is-inverted.is-outlined.is-focused,.button.is-danger.is-inverted.is-outlined.is-hovered,.button.is-danger.is-inverted.is-outlined:focus,.button.is-danger.is-inverted.is-outlined:hover{background-color:#fff;color:#ff3860}.button.is-danger.is-inverted.is-outlined.is-loading.is-focused::after,.button.is-danger.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-danger.is-inverted.is-outlined.is-loading:focus::after,.button.is-danger.is-inverted.is-outlined.is-loading:hover::after{border-color:transparent transparent #ff3860 #ff3860!important}.button.is-danger.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-danger.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-small{border-radius:2px;font-size:.75rem}.button.is-normal{font-size:1rem}.button.is-medium{font-size:1.25rem}.button.is-large{font-size:1.5rem}.button[disabled],fieldset[disabled] .button{background-color:#fff;border-color:#dbdbdb;box-shadow:none;opacity:.5}.button.is-fullwidth{display:flex;width:100%}.button.is-loading{color:transparent!important;pointer-events:none}.button.is-loading::after{position:absolute;left:calc(50% - (1em / 2));top:calc(50% - (1em / 2));position:absolute!important}.button.is-static{background-color:#f5f5f5;border-color:#dbdbdb;color:#7a7a7a;box-shadow:none;pointer-events:none}.button.is-rounded{border-radius:290486px;padding-left:1em;padding-right:1em}.buttons{align-items:center;display:flex;flex-wrap:wrap;justify-content:flex-start}.buttons .button{margin-bottom:.5rem}.buttons .button:not(:last-child):not(.is-fullwidth){margin-right:.5rem}.buttons:last-child{margin-bottom:-.5rem}.buttons:not(:last-child){margin-bottom:1rem}.buttons.are-small .button:not(.is-normal):not(.is-medium):not(.is-large){border-radius:2px;font-size:.75rem}.buttons.are-medium .button:not(.is-small):not(.is-normal):not(.is-large){font-size:1.25rem}.buttons.are-large .button:not(.is-small):not(.is-normal):not(.is-medium){font-size:1.5rem}.buttons.has-addons .button:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.buttons.has-addons .button:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0;margin-right:-1px}.buttons.has-addons .button:last-child{margin-right:0}.buttons.has-addons .button.is-hovered,.buttons.has-addons .button:hover{z-index:2}.buttons.has-addons .button.is-active,.buttons.has-addons .button.is-focused,.buttons.has-addons .button.is-selected,.buttons.has-addons .button:active,.buttons.has-addons .button:focus{z-index:3}.buttons.has-addons .button.is-active:hover,.buttons.has-addons .button.is-focused:hover,.buttons.has-addons .button.is-selected:hover,.buttons.has-addons .button:active:hover,.buttons.has-addons .button:focus:hover{z-index:4}.buttons.has-addons .button.is-expanded{flex-grow:1;flex-shrink:1}.buttons.is-centered{justify-content:center}.buttons.is-centered:not(.has-addons) .button:not(.is-fullwidth){margin-left:.25rem;margin-right:.25rem}.buttons.is-right{justify-content:flex-end}.buttons.is-right:not(.has-addons) .button:not(.is-fullwidth){margin-left:.25rem;margin-right:.25rem}.container{flex-grow:1;margin:0 auto;position:relative;width:auto}@media screen and (min-width:1024px){.container{max-width:960px}.container.is-fluid{margin-left:32px;margin-right:32px;max-width:none}}@media screen and (max-width:1215px){.container.is-widescreen{max-width:1152px}}@media screen and (max-width:1407px){.container.is-fullhd{max-width:1344px}}@media screen and (min-width:1216px){.container{max-width:1152px}}@media screen and (min-width:1408px){.container{max-width:1344px}}.content li+li{margin-top:.25em}.content blockquote:not(:last-child),.content dl:not(:last-child),.content ol:not(:last-child),.content p:not(:last-child),.content pre:not(:last-child),.content table:not(:last-child),.content ul:not(:last-child){margin-bottom:1em}.content h1,.content h2,.content h3,.content h4,.content h5,.content h6{color:#363636;font-weight:600;line-height:1.125}.content h1{font-size:2em;margin-bottom:.5em}.content h1:not(:first-child){margin-top:1em}.content h2{font-size:1.75em;margin-bottom:.5714em}.content h2:not(:first-child){margin-top:1.1428em}.content h3{font-size:1.5em;margin-bottom:.6666em}.content h3:not(:first-child){margin-top:1.3333em}.content h4{font-size:1.25em;margin-bottom:.8em}.content h5{font-size:1.125em;margin-bottom:.8888em}.content h6{font-size:1em;margin-bottom:1em}.content blockquote{background-color:#f5f5f5;border-left:5px solid #dbdbdb;padding:1.25em 1.5em}.content ol{list-style-position:outside;margin-left:2em;margin-top:1em}.content ol:not([type]){list-style-type:decimal}.content ol:not([type]).is-lower-alpha{list-style-type:lower-alpha}.content ol:not([type]).is-lower-roman{list-style-type:lower-roman}.content ol:not([type]).is-upper-alpha{list-style-type:upper-alpha}.content ol:not([type]).is-upper-roman{list-style-type:upper-roman}.content ul{list-style:disc outside;margin-left:2em;margin-top:1em}.content ul ul{list-style-type:circle;margin-top:.5em}.content ul ul ul{list-style-type:square}.content dd{margin-left:2em}.content figure{margin-left:2em;margin-right:2em;text-align:center}.content figure:not(:first-child){margin-top:2em}.content figure:not(:last-child){margin-bottom:2em}.content figure img{display:inline-block}.content figure figcaption{font-style:italic}.content pre{-webkit-overflow-scrolling:touch;overflow-x:auto;padding:1.25em 1.5em;white-space:pre;word-wrap:normal}.content sub,.content sup{font-size:75%}.content table{width:100%}.content table td,.content table th{border:1px solid #dbdbdb;border-width:0 0 1px;padding:.5em .75em;vertical-align:top}.content table th{color:#363636}.content table th:not([align]){text-align:left}.content table thead td,.content table thead th{border-width:0 0 2px;color:#363636}.content table tfoot td,.content table tfoot th{border-width:2px 0 0;color:#363636}.content table tbody tr:last-child td,.content table tbody tr:last-child th{border-bottom-width:0}.content .tabs li+li{margin-top:0}.content.is-small{font-size:.75rem}.content.is-medium{font-size:1.25rem}.content.is-large{font-size:1.5rem}.icon{align-items:center;display:inline-flex;justify-content:center;height:1.5rem;width:1.5rem}.icon.is-small{height:1rem;width:1rem}.icon.is-medium{height:2rem;width:2rem}.icon.is-large{height:3rem;width:3rem}.image{display:block;position:relative}.image img{display:block;height:auto;width:100%}.image img.is-rounded{border-radius:290486px}.image.is-16by9 .has-ratio,.image.is-16by9 img,.image.is-1by1 .has-ratio,.image.is-1by1 img,.image.is-1by2 .has-ratio,.image.is-1by2 img,.image.is-1by3 .has-ratio,.image.is-1by3 img,.image.is-2by1 .has-ratio,.image.is-2by1 img,.image.is-2by3 .has-ratio,.image.is-2by3 img,.image.is-3by1 .has-ratio,.image.is-3by1 img,.image.is-3by2 .has-ratio,.image.is-3by2 img,.image.is-3by4 .has-ratio,.image.is-3by4 img,.image.is-3by5 .has-ratio,.image.is-3by5 img,.image.is-4by3 .has-ratio,.image.is-4by3 img,.image.is-4by5 .has-ratio,.image.is-4by5 img,.image.is-5by3 .has-ratio,.image.is-5by3 img,.image.is-5by4 .has-ratio,.image.is-5by4 img,.image.is-9by16 .has-ratio,.image.is-9by16 img,.image.is-square .has-ratio,.image.is-square img{height:100%;width:100%}.image.is-1by1,.image.is-square{padding-top:100%}.image.is-5by4{padding-top:80%}.image.is-4by3{padding-top:75%}.image.is-3by2{padding-top:66.6666%}.image.is-5by3{padding-top:60%}.image.is-16by9{padding-top:56.25%}.image.is-2by1{padding-top:50%}.image.is-3by1{padding-top:33.3333%}.image.is-4by5{padding-top:125%}.image.is-3by4{padding-top:133.3333%}.image.is-2by3{padding-top:150%}.image.is-3by5{padding-top:166.6666%}.image.is-9by16{padding-top:177.7777%}.image.is-1by2{padding-top:200%}.image.is-1by3{padding-top:300%}.image.is-16x16{height:16px;width:16px}.image.is-24x24{height:24px;width:24px}.image.is-32x32{height:32px;width:32px}.image.is-48x48{height:48px;width:48px}.image.is-64x64{height:64px;width:64px}.image.is-96x96{height:96px;width:96px}.image.is-128x128{height:128px;width:128px}.notification{background-color:#f5f5f5;border-radius:4px;padding:1.25rem 2.5rem 1.25rem 1.5rem;position:relative}.notification a:not(.button):not(.dropdown-item){color:currentColor;text-decoration:underline}.notification strong{color:currentColor}.notification code,.notification pre{background:#fff}.notification pre code{background:0 0}.notification>.delete{position:absolute;right:.5rem;top:.5rem}.notification .content,.notification .subtitle,.notification .title{color:currentColor}.notification.is-white{background-color:#fff;color:#0a0a0a}.notification.is-black{background-color:#0a0a0a;color:#fff}.notification.is-light{background-color:#f5f5f5;color:#363636}.notification.is-dark{background-color:#363636;color:#f5f5f5}.notification.is-primary{background-color:#00d1b2;color:#fff}.notification.is-link{background-color:#3273dc;color:#fff}.notification.is-info{background-color:#209cee;color:#fff}.notification.is-success{background-color:#23d160;color:#fff}.notification.is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.notification.is-danger{background-color:#ff3860;color:#fff}.progress{-moz-appearance:none;-webkit-appearance:none;border:none;border-radius:290486px;display:block;height:1rem;overflow:hidden;padding:0;width:100%}.progress::-webkit-progress-bar{background-color:#dbdbdb}.progress::-webkit-progress-value{background-color:#4a4a4a}.progress::-moz-progress-bar{background-color:#4a4a4a}.progress::-ms-fill{background-color:#4a4a4a;border:none}.progress.is-white::-webkit-progress-value{background-color:#fff}.progress.is-white::-moz-progress-bar{background-color:#fff}.progress.is-white::-ms-fill{background-color:#fff}.progress.is-white:indeterminate{background-image:linear-gradient(to right,#fff 30%,#dbdbdb 30%)}.progress.is-black::-webkit-progress-value{background-color:#0a0a0a}.progress.is-black::-moz-progress-bar{background-color:#0a0a0a}.progress.is-black::-ms-fill{background-color:#0a0a0a}.progress.is-black:indeterminate{background-image:linear-gradient(to right,#0a0a0a 30%,#dbdbdb 30%)}.progress.is-light::-webkit-progress-value{background-color:#f5f5f5}.progress.is-light::-moz-progress-bar{background-color:#f5f5f5}.progress.is-light::-ms-fill{background-color:#f5f5f5}.progress.is-light:indeterminate{background-image:linear-gradient(to right,#f5f5f5 30%,#dbdbdb 30%)}.progress.is-dark::-webkit-progress-value{background-color:#363636}.progress.is-dark::-moz-progress-bar{background-color:#363636}.progress.is-dark::-ms-fill{background-color:#363636}.progress.is-dark:indeterminate{background-image:linear-gradient(to right,#363636 30%,#dbdbdb 30%)}.progress.is-primary::-webkit-progress-value{background-color:#00d1b2}.progress.is-primary::-moz-progress-bar{background-color:#00d1b2}.progress.is-primary::-ms-fill{background-color:#00d1b2}.progress.is-primary:indeterminate{background-image:linear-gradient(to right,#00d1b2 30%,#dbdbdb 30%)}.progress.is-link::-webkit-progress-value{background-color:#3273dc}.progress.is-link::-moz-progress-bar{background-color:#3273dc}.progress.is-link::-ms-fill{background-color:#3273dc}.progress.is-link:indeterminate{background-image:linear-gradient(to right,#3273dc 30%,#dbdbdb 30%)}.progress.is-info::-webkit-progress-value{background-color:#209cee}.progress.is-info::-moz-progress-bar{background-color:#209cee}.progress.is-info::-ms-fill{background-color:#209cee}.progress.is-info:indeterminate{background-image:linear-gradient(to right,#209cee 30%,#dbdbdb 30%)}.progress.is-success::-webkit-progress-value{background-color:#23d160}.progress.is-success::-moz-progress-bar{background-color:#23d160}.progress.is-success::-ms-fill{background-color:#23d160}.progress.is-success:indeterminate{background-image:linear-gradient(to right,#23d160 30%,#dbdbdb 30%)}.progress.is-warning::-webkit-progress-value{background-color:#ffdd57}.progress.is-warning::-moz-progress-bar{background-color:#ffdd57}.progress.is-warning::-ms-fill{background-color:#ffdd57}.progress.is-warning:indeterminate{background-image:linear-gradient(to right,#ffdd57 30%,#dbdbdb 30%)}.progress.is-danger::-webkit-progress-value{background-color:#ff3860}.progress.is-danger::-moz-progress-bar{background-color:#ff3860}.progress.is-danger::-ms-fill{background-color:#ff3860}.progress.is-danger:indeterminate{background-image:linear-gradient(to right,#ff3860 30%,#dbdbdb 30%)}.progress:indeterminate{-webkit-animation-duration:1.5s;animation-duration:1.5s;-webkit-animation-iteration-count:infinite;animation-iteration-count:infinite;-webkit-animation-name:moveIndeterminate;animation-name:moveIndeterminate;-webkit-animation-timing-function:linear;animation-timing-function:linear;background-color:#dbdbdb;background-image:linear-gradient(to right,#4a4a4a 30%,#dbdbdb 30%);background-position:top left;background-repeat:no-repeat;background-size:150% 150%}.progress:indeterminate::-webkit-progress-bar{background-color:transparent}.progress:indeterminate::-moz-progress-bar{background-color:transparent}.progress.is-small{height:.75rem}.progress.is-medium{height:1.25rem}.progress.is-large{height:1.5rem}@-webkit-keyframes moveIndeterminate{from{background-position:200% 0}to{background-position:-200% 0}}@keyframes moveIndeterminate{from{background-position:200% 0}to{background-position:-200% 0}}.table{background-color:#fff;color:#363636}.table td,.table th{border:1px solid #dbdbdb;border-width:0 0 1px;padding:.5em .75em;vertical-align:top}.table td.is-white,.table th.is-white{background-color:#fff;border-color:#fff;color:#0a0a0a}.table td.is-black,.table th.is-black{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.table td.is-light,.table th.is-light{background-color:#f5f5f5;border-color:#f5f5f5;color:#363636}.table td.is-dark,.table th.is-dark{background-color:#363636;border-color:#363636;color:#f5f5f5}.table td.is-primary,.table th.is-primary{background-color:#00d1b2;border-color:#00d1b2;color:#fff}.table td.is-link,.table th.is-link{background-color:#3273dc;border-color:#3273dc;color:#fff}.table td.is-info,.table th.is-info{background-color:#209cee;border-color:#209cee;color:#fff}.table td.is-success,.table th.is-success{background-color:#23d160;border-color:#23d160;color:#fff}.table td.is-warning,.table th.is-warning{background-color:#ffdd57;border-color:#ffdd57;color:rgba(0,0,0,.7)}.table td.is-danger,.table th.is-danger{background-color:#ff3860;border-color:#ff3860;color:#fff}.table td.is-narrow,.table th.is-narrow{white-space:nowrap;width:1%}.table td.is-selected,.table th.is-selected{background-color:#00d1b2;color:#fff}.table td.is-selected a,.table td.is-selected strong,.table th.is-selected a,.table th.is-selected strong{color:currentColor}.table th{color:#363636}.table th:not([align]){text-align:left}.table tr.is-selected{background-color:#00d1b2;color:#fff}.table tr.is-selected a,.table tr.is-selected strong{color:currentColor}.table tr.is-selected td,.table tr.is-selected th{border-color:#fff;color:currentColor}.table thead{background-color:transparent}.table thead td,.table thead th{border-width:0 0 2px;color:#363636}.table tfoot{background-color:transparent}.table tfoot td,.table tfoot th{border-width:2px 0 0;color:#363636}.table tbody{background-color:transparent}.table tbody tr:last-child td,.table tbody tr:last-child th{border-bottom-width:0}.table.is-bordered td,.table.is-bordered th{border-width:1px}.table.is-bordered tr:last-child td,.table.is-bordered tr:last-child th{border-bottom-width:1px}.table.is-fullwidth{width:100%}.table.is-hoverable tbody tr:not(.is-selected):hover{background-color:#fafafa}.table.is-hoverable.is-striped tbody tr:not(.is-selected):hover{background-color:#fafafa}.table.is-hoverable.is-striped tbody tr:not(.is-selected):hover:nth-child(even){background-color:#f5f5f5}.table.is-narrow td,.table.is-narrow th{padding:.25em .5em}.table.is-striped tbody tr:not(.is-selected):nth-child(even){background-color:#fafafa}.table-container{-webkit-overflow-scrolling:touch;overflow:auto;overflow-y:hidden;max-width:100%}.tags{align-items:center;display:flex;flex-wrap:wrap;justify-content:flex-start}.tags .tag{margin-bottom:.5rem}.tags .tag:not(:last-child){margin-right:.5rem}.tags:last-child{margin-bottom:-.5rem}.tags:not(:last-child){margin-bottom:1rem}.tags.are-medium .tag:not(.is-normal):not(.is-large){font-size:1rem}.tags.are-large .tag:not(.is-normal):not(.is-medium){font-size:1.25rem}.tags.is-centered{justify-content:center}.tags.is-centered .tag{margin-right:.25rem;margin-left:.25rem}.tags.is-right{justify-content:flex-end}.tags.is-right .tag:not(:first-child){margin-left:.5rem}.tags.is-right .tag:not(:last-child){margin-right:0}.tags.has-addons .tag{margin-right:0}.tags.has-addons .tag:not(:first-child){margin-left:0;border-bottom-left-radius:0;border-top-left-radius:0}.tags.has-addons .tag:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0}.tag:not(body){align-items:center;background-color:#f5f5f5;border-radius:4px;color:#4a4a4a;display:inline-flex;font-size:.75rem;height:2em;justify-content:center;line-height:1.5;padding-left:.75em;padding-right:.75em;white-space:nowrap}.tag:not(body) .delete{margin-left:.25rem;margin-right:-.375rem}.tag:not(body).is-white{background-color:#fff;color:#0a0a0a}.tag:not(body).is-black{background-color:#0a0a0a;color:#fff}.tag:not(body).is-light{background-color:#f5f5f5;color:#363636}.tag:not(body).is-dark{background-color:#363636;color:#f5f5f5}.tag:not(body).is-primary{background-color:#00d1b2;color:#fff}.tag:not(body).is-link{background-color:#3273dc;color:#fff}.tag:not(body).is-info{background-color:#209cee;color:#fff}.tag:not(body).is-success{background-color:#23d160;color:#fff}.tag:not(body).is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.tag:not(body).is-danger{background-color:#ff3860;color:#fff}.tag:not(body).is-normal{font-size:.75rem}.tag:not(body).is-medium{font-size:1rem}.tag:not(body).is-large{font-size:1.25rem}.tag:not(body) .icon:first-child:not(:last-child){margin-left:-.375em;margin-right:.1875em}.tag:not(body) .icon:last-child:not(:first-child){margin-left:.1875em;margin-right:-.375em}.tag:not(body) .icon:first-child:last-child{margin-left:-.375em;margin-right:-.375em}.tag:not(body).is-delete{margin-left:1px;padding:0;position:relative;width:2em}.tag:not(body).is-delete::after,.tag:not(body).is-delete::before{background-color:currentColor;content:"";display:block;left:50%;position:absolute;top:50%;-webkit-transform:translateX(-50%) translateY(-50%) rotate(45deg);transform:translateX(-50%) translateY(-50%) rotate(45deg);-webkit-transform-origin:center center;transform-origin:center center}.tag:not(body).is-delete::before{height:1px;width:50%}.tag:not(body).is-delete::after{height:50%;width:1px}.tag:not(body).is-delete:focus,.tag:not(body).is-delete:hover{background-color:#e8e8e8}.tag:not(body).is-delete:active{background-color:#dbdbdb}.tag:not(body).is-rounded{border-radius:290486px}a.tag:hover{text-decoration:underline}.subtitle,.title{word-break:break-word}.subtitle em,.subtitle span,.title em,.title span{font-weight:inherit}.subtitle sub,.title sub{font-size:.75em}.subtitle sup,.title sup{font-size:.75em}.subtitle .tag,.title .tag{vertical-align:middle}.title{color:#363636;font-size:2rem;font-weight:600;line-height:1.125}.title strong{color:inherit;font-weight:inherit}.title+.highlight{margin-top:-.75rem}.title:not(.is-spaced)+.subtitle{margin-top:-1.25rem}.title.is-1{font-size:3rem}.title.is-2{font-size:2.5rem}.title.is-3{font-size:2rem}.title.is-4{font-size:1.5rem}.title.is-5{font-size:1.25rem}.title.is-6{font-size:1rem}.title.is-7{font-size:.75rem}.subtitle{color:#4a4a4a;font-size:1.25rem;font-weight:400;line-height:1.25}.subtitle strong{color:#363636;font-weight:600}.subtitle:not(.is-spaced)+.title{margin-top:-1.25rem}.subtitle.is-1{font-size:3rem}.subtitle.is-2{font-size:2.5rem}.subtitle.is-3{font-size:2rem}.subtitle.is-4{font-size:1.5rem}.subtitle.is-5{font-size:1.25rem}.subtitle.is-6{font-size:1rem}.subtitle.is-7{font-size:.75rem}.heading{display:block;font-size:11px;letter-spacing:1px;margin-bottom:5px;text-transform:uppercase}.highlight{font-weight:400;max-width:100%;overflow:hidden;padding:0}.highlight pre{overflow:auto;max-width:100%}.number{align-items:center;background-color:#f5f5f5;border-radius:290486px;display:inline-flex;font-size:1.25rem;height:2em;justify-content:center;margin-right:1.5rem;min-width:2.5em;padding:.25rem .5rem;text-align:center;vertical-align:top}.input,.select select,.textarea{background-color:#fff;border-color:#dbdbdb;border-radius:4px;color:#363636}.input::-moz-placeholder,.select select::-moz-placeholder,.textarea::-moz-placeholder{color:rgba(54,54,54,.3)}.input::-webkit-input-placeholder,.select select::-webkit-input-placeholder,.textarea::-webkit-input-placeholder{color:rgba(54,54,54,.3)}.input:-moz-placeholder,.select select:-moz-placeholder,.textarea:-moz-placeholder{color:rgba(54,54,54,.3)}.input:-ms-input-placeholder,.select select:-ms-input-placeholder,.textarea:-ms-input-placeholder{color:rgba(54,54,54,.3)}.input:hover,.is-hovered.input,.is-hovered.textarea,.select select.is-hovered,.select select:hover,.textarea:hover{border-color:#b5b5b5}.input:active,.input:focus,.is-active.input,.is-active.textarea,.is-focused.input,.is-focused.textarea,.select select.is-active,.select select.is-focused,.select select:active,.select select:focus,.textarea:active,.textarea:focus{border-color:#3273dc;box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.input[disabled],.select fieldset[disabled] select,.select select[disabled],.textarea[disabled],fieldset[disabled] .input,fieldset[disabled] .select select,fieldset[disabled] .textarea{background-color:#f5f5f5;border-color:#f5f5f5;box-shadow:none;color:#7a7a7a}.input[disabled]::-moz-placeholder,.select fieldset[disabled] select::-moz-placeholder,.select select[disabled]::-moz-placeholder,.textarea[disabled]::-moz-placeholder,fieldset[disabled] .input::-moz-placeholder,fieldset[disabled] .select select::-moz-placeholder,fieldset[disabled] .textarea::-moz-placeholder{color:rgba(122,122,122,.3)}.input[disabled]::-webkit-input-placeholder,.select fieldset[disabled] select::-webkit-input-placeholder,.select select[disabled]::-webkit-input-placeholder,.textarea[disabled]::-webkit-input-placeholder,fieldset[disabled] .input::-webkit-input-placeholder,fieldset[disabled] .select select::-webkit-input-placeholder,fieldset[disabled] .textarea::-webkit-input-placeholder{color:rgba(122,122,122,.3)}.input[disabled]:-moz-placeholder,.select fieldset[disabled] select:-moz-placeholder,.select select[disabled]:-moz-placeholder,.textarea[disabled]:-moz-placeholder,fieldset[disabled] .input:-moz-placeholder,fieldset[disabled] .select select:-moz-placeholder,fieldset[disabled] .textarea:-moz-placeholder{color:rgba(122,122,122,.3)}.input[disabled]:-ms-input-placeholder,.select fieldset[disabled] select:-ms-input-placeholder,.select select[disabled]:-ms-input-placeholder,.textarea[disabled]:-ms-input-placeholder,fieldset[disabled] .input:-ms-input-placeholder,fieldset[disabled] .select select:-ms-input-placeholder,fieldset[disabled] .textarea:-ms-input-placeholder{color:rgba(122,122,122,.3)}.input,.textarea{box-shadow:inset 0 1px 2px rgba(10,10,10,.1);max-width:100%;width:100%}.input[readonly],.textarea[readonly]{box-shadow:none}.is-white.input,.is-white.textarea{border-color:#fff}.is-white.input:active,.is-white.input:focus,.is-white.is-active.input,.is-white.is-active.textarea,.is-white.is-focused.input,.is-white.is-focused.textarea,.is-white.textarea:active,.is-white.textarea:focus{box-shadow:0 0 0 .125em rgba(255,255,255,.25)}.is-black.input,.is-black.textarea{border-color:#0a0a0a}.is-black.input:active,.is-black.input:focus,.is-black.is-active.input,.is-black.is-active.textarea,.is-black.is-focused.input,.is-black.is-focused.textarea,.is-black.textarea:active,.is-black.textarea:focus{box-shadow:0 0 0 .125em rgba(10,10,10,.25)}.is-light.input,.is-light.textarea{border-color:#f5f5f5}.is-light.input:active,.is-light.input:focus,.is-light.is-active.input,.is-light.is-active.textarea,.is-light.is-focused.input,.is-light.is-focused.textarea,.is-light.textarea:active,.is-light.textarea:focus{box-shadow:0 0 0 .125em rgba(245,245,245,.25)}.is-dark.input,.is-dark.textarea{border-color:#363636}.is-dark.input:active,.is-dark.input:focus,.is-dark.is-active.input,.is-dark.is-active.textarea,.is-dark.is-focused.input,.is-dark.is-focused.textarea,.is-dark.textarea:active,.is-dark.textarea:focus{box-shadow:0 0 0 .125em rgba(54,54,54,.25)}.is-primary.input,.is-primary.textarea{border-color:#00d1b2}.is-primary.input:active,.is-primary.input:focus,.is-primary.is-active.input,.is-primary.is-active.textarea,.is-primary.is-focused.input,.is-primary.is-focused.textarea,.is-primary.textarea:active,.is-primary.textarea:focus{box-shadow:0 0 0 .125em rgba(0,209,178,.25)}.is-link.input,.is-link.textarea{border-color:#3273dc}.is-link.input:active,.is-link.input:focus,.is-link.is-active.input,.is-link.is-active.textarea,.is-link.is-focused.input,.is-link.is-focused.textarea,.is-link.textarea:active,.is-link.textarea:focus{box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.is-info.input,.is-info.textarea{border-color:#209cee}.is-info.input:active,.is-info.input:focus,.is-info.is-active.input,.is-info.is-active.textarea,.is-info.is-focused.input,.is-info.is-focused.textarea,.is-info.textarea:active,.is-info.textarea:focus{box-shadow:0 0 0 .125em rgba(32,156,238,.25)}.is-success.input,.is-success.textarea{border-color:#23d160}.is-success.input:active,.is-success.input:focus,.is-success.is-active.input,.is-success.is-active.textarea,.is-success.is-focused.input,.is-success.is-focused.textarea,.is-success.textarea:active,.is-success.textarea:focus{box-shadow:0 0 0 .125em rgba(35,209,96,.25)}.is-warning.input,.is-warning.textarea{border-color:#ffdd57}.is-warning.input:active,.is-warning.input:focus,.is-warning.is-active.input,.is-warning.is-active.textarea,.is-warning.is-focused.input,.is-warning.is-focused.textarea,.is-warning.textarea:active,.is-warning.textarea:focus{box-shadow:0 0 0 .125em rgba(255,221,87,.25)}.is-danger.input,.is-danger.textarea{border-color:#ff3860}.is-danger.input:active,.is-danger.input:focus,.is-danger.is-active.input,.is-danger.is-active.textarea,.is-danger.is-focused.input,.is-danger.is-focused.textarea,.is-danger.textarea:active,.is-danger.textarea:focus{box-shadow:0 0 0 .125em rgba(255,56,96,.25)}.is-small.input,.is-small.textarea{border-radius:2px;font-size:.75rem}.is-medium.input,.is-medium.textarea{font-size:1.25rem}.is-large.input,.is-large.textarea{font-size:1.5rem}.is-fullwidth.input,.is-fullwidth.textarea{display:block;width:100%}.is-inline.input,.is-inline.textarea{display:inline;width:auto}.input.is-rounded{border-radius:290486px;padding-left:1em;padding-right:1em}.input.is-static{background-color:transparent;border-color:transparent;box-shadow:none;padding-left:0;padding-right:0}.textarea{display:block;max-width:100%;min-width:100%;padding:.625em;resize:vertical}.textarea:not([rows]){max-height:600px;min-height:120px}.textarea[rows]{height:initial}.textarea.has-fixed-size{resize:none}.checkbox,.radio{cursor:pointer;display:inline-block;line-height:1.25;position:relative}.checkbox input,.radio input{cursor:pointer}.checkbox:hover,.radio:hover{color:#363636}.checkbox[disabled],.radio[disabled],fieldset[disabled] .checkbox,fieldset[disabled] .radio{color:#7a7a7a;cursor:not-allowed}.radio+.radio{margin-left:.5em}.select{display:inline-block;max-width:100%;position:relative;vertical-align:top}.select:not(.is-multiple){height:2.25em}.select:not(.is-multiple):not(.is-loading)::after{border-color:#3273dc;right:1.125em;z-index:4}.select.is-rounded select{border-radius:290486px;padding-left:1em}.select select{cursor:pointer;display:block;font-size:1em;max-width:100%;outline:0}.select select::-ms-expand{display:none}.select select[disabled]:hover,fieldset[disabled] .select select:hover{border-color:#f5f5f5}.select select:not([multiple]){padding-right:2.5em}.select select[multiple]{height:auto;padding:0}.select select[multiple] option{padding:.5em 1em}.select:not(.is-multiple):not(.is-loading):hover::after{border-color:#363636}.select.is-white:not(:hover)::after{border-color:#fff}.select.is-white select{border-color:#fff}.select.is-white select.is-hovered,.select.is-white select:hover{border-color:#f2f2f2}.select.is-white select.is-active,.select.is-white select.is-focused,.select.is-white select:active,.select.is-white select:focus{box-shadow:0 0 0 .125em rgba(255,255,255,.25)}.select.is-black:not(:hover)::after{border-color:#0a0a0a}.select.is-black select{border-color:#0a0a0a}.select.is-black select.is-hovered,.select.is-black select:hover{border-color:#000}.select.is-black select.is-active,.select.is-black select.is-focused,.select.is-black select:active,.select.is-black select:focus{box-shadow:0 0 0 .125em rgba(10,10,10,.25)}.select.is-light:not(:hover)::after{border-color:#f5f5f5}.select.is-light select{border-color:#f5f5f5}.select.is-light select.is-hovered,.select.is-light select:hover{border-color:#e8e8e8}.select.is-light select.is-active,.select.is-light select.is-focused,.select.is-light select:active,.select.is-light select:focus{box-shadow:0 0 0 .125em rgba(245,245,245,.25)}.select.is-dark:not(:hover)::after{border-color:#363636}.select.is-dark select{border-color:#363636}.select.is-dark select.is-hovered,.select.is-dark select:hover{border-color:#292929}.select.is-dark select.is-active,.select.is-dark select.is-focused,.select.is-dark select:active,.select.is-dark select:focus{box-shadow:0 0 0 .125em rgba(54,54,54,.25)}.select.is-primary:not(:hover)::after{border-color:#00d1b2}.select.is-primary select{border-color:#00d1b2}.select.is-primary select.is-hovered,.select.is-primary select:hover{border-color:#00b89c}.select.is-primary select.is-active,.select.is-primary select.is-focused,.select.is-primary select:active,.select.is-primary select:focus{box-shadow:0 0 0 .125em rgba(0,209,178,.25)}.select.is-link:not(:hover)::after{border-color:#3273dc}.select.is-link select{border-color:#3273dc}.select.is-link select.is-hovered,.select.is-link select:hover{border-color:#2366d1}.select.is-link select.is-active,.select.is-link select.is-focused,.select.is-link select:active,.select.is-link select:focus{box-shadow:0 0 0 .125em rgba(50,115,220,.25)}.select.is-info:not(:hover)::after{border-color:#209cee}.select.is-info select{border-color:#209cee}.select.is-info select.is-hovered,.select.is-info select:hover{border-color:#118fe4}.select.is-info select.is-active,.select.is-info select.is-focused,.select.is-info select:active,.select.is-info select:focus{box-shadow:0 0 0 .125em rgba(32,156,238,.25)}.select.is-success:not(:hover)::after{border-color:#23d160}.select.is-success select{border-color:#23d160}.select.is-success select.is-hovered,.select.is-success select:hover{border-color:#20bc56}.select.is-success select.is-active,.select.is-success select.is-focused,.select.is-success select:active,.select.is-success select:focus{box-shadow:0 0 0 .125em rgba(35,209,96,.25)}.select.is-warning:not(:hover)::after{border-color:#ffdd57}.select.is-warning select{border-color:#ffdd57}.select.is-warning select.is-hovered,.select.is-warning select:hover{border-color:#ffd83d}.select.is-warning select.is-active,.select.is-warning select.is-focused,.select.is-warning select:active,.select.is-warning select:focus{box-shadow:0 0 0 .125em rgba(255,221,87,.25)}.select.is-danger:not(:hover)::after{border-color:#ff3860}.select.is-danger select{border-color:#ff3860}.select.is-danger select.is-hovered,.select.is-danger select:hover{border-color:#ff1f4b}.select.is-danger select.is-active,.select.is-danger select.is-focused,.select.is-danger select:active,.select.is-danger select:focus{box-shadow:0 0 0 .125em rgba(255,56,96,.25)}.select.is-small{border-radius:2px;font-size:.75rem}.select.is-medium{font-size:1.25rem}.select.is-large{font-size:1.5rem}.select.is-disabled::after{border-color:#7a7a7a}.select.is-fullwidth{width:100%}.select.is-fullwidth select{width:100%}.select.is-loading::after{margin-top:0;position:absolute;right:.625em;top:.625em;-webkit-transform:none;transform:none}.select.is-loading.is-small:after{font-size:.75rem}.select.is-loading.is-medium:after{font-size:1.25rem}.select.is-loading.is-large:after{font-size:1.5rem}.file{align-items:stretch;display:flex;justify-content:flex-start;position:relative}.file.is-white .file-cta{background-color:#fff;border-color:transparent;color:#0a0a0a}.file.is-white.is-hovered .file-cta,.file.is-white:hover .file-cta{background-color:#f9f9f9;border-color:transparent;color:#0a0a0a}.file.is-white.is-focused .file-cta,.file.is-white:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(255,255,255,.25);color:#0a0a0a}.file.is-white.is-active .file-cta,.file.is-white:active .file-cta{background-color:#f2f2f2;border-color:transparent;color:#0a0a0a}.file.is-black .file-cta{background-color:#0a0a0a;border-color:transparent;color:#fff}.file.is-black.is-hovered .file-cta,.file.is-black:hover .file-cta{background-color:#040404;border-color:transparent;color:#fff}.file.is-black.is-focused .file-cta,.file.is-black:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(10,10,10,.25);color:#fff}.file.is-black.is-active .file-cta,.file.is-black:active .file-cta{background-color:#000;border-color:transparent;color:#fff}.file.is-light .file-cta{background-color:#f5f5f5;border-color:transparent;color:#363636}.file.is-light.is-hovered .file-cta,.file.is-light:hover .file-cta{background-color:#eee;border-color:transparent;color:#363636}.file.is-light.is-focused .file-cta,.file.is-light:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(245,245,245,.25);color:#363636}.file.is-light.is-active .file-cta,.file.is-light:active .file-cta{background-color:#e8e8e8;border-color:transparent;color:#363636}.file.is-dark .file-cta{background-color:#363636;border-color:transparent;color:#f5f5f5}.file.is-dark.is-hovered .file-cta,.file.is-dark:hover .file-cta{background-color:#2f2f2f;border-color:transparent;color:#f5f5f5}.file.is-dark.is-focused .file-cta,.file.is-dark:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(54,54,54,.25);color:#f5f5f5}.file.is-dark.is-active .file-cta,.file.is-dark:active .file-cta{background-color:#292929;border-color:transparent;color:#f5f5f5}.file.is-primary .file-cta{background-color:#00d1b2;border-color:transparent;color:#fff}.file.is-primary.is-hovered .file-cta,.file.is-primary:hover .file-cta{background-color:#00c4a7;border-color:transparent;color:#fff}.file.is-primary.is-focused .file-cta,.file.is-primary:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(0,209,178,.25);color:#fff}.file.is-primary.is-active .file-cta,.file.is-primary:active .file-cta{background-color:#00b89c;border-color:transparent;color:#fff}.file.is-link .file-cta{background-color:#3273dc;border-color:transparent;color:#fff}.file.is-link.is-hovered .file-cta,.file.is-link:hover .file-cta{background-color:#276cda;border-color:transparent;color:#fff}.file.is-link.is-focused .file-cta,.file.is-link:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(50,115,220,.25);color:#fff}.file.is-link.is-active .file-cta,.file.is-link:active .file-cta{background-color:#2366d1;border-color:transparent;color:#fff}.file.is-info .file-cta{background-color:#209cee;border-color:transparent;color:#fff}.file.is-info.is-hovered .file-cta,.file.is-info:hover .file-cta{background-color:#1496ed;border-color:transparent;color:#fff}.file.is-info.is-focused .file-cta,.file.is-info:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(32,156,238,.25);color:#fff}.file.is-info.is-active .file-cta,.file.is-info:active .file-cta{background-color:#118fe4;border-color:transparent;color:#fff}.file.is-success .file-cta{background-color:#23d160;border-color:transparent;color:#fff}.file.is-success.is-hovered .file-cta,.file.is-success:hover .file-cta{background-color:#22c65b;border-color:transparent;color:#fff}.file.is-success.is-focused .file-cta,.file.is-success:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(35,209,96,.25);color:#fff}.file.is-success.is-active .file-cta,.file.is-success:active .file-cta{background-color:#20bc56;border-color:transparent;color:#fff}.file.is-warning .file-cta{background-color:#ffdd57;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-warning.is-hovered .file-cta,.file.is-warning:hover .file-cta{background-color:#ffdb4a;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-warning.is-focused .file-cta,.file.is-warning:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(255,221,87,.25);color:rgba(0,0,0,.7)}.file.is-warning.is-active .file-cta,.file.is-warning:active .file-cta{background-color:#ffd83d;border-color:transparent;color:rgba(0,0,0,.7)}.file.is-danger .file-cta{background-color:#ff3860;border-color:transparent;color:#fff}.file.is-danger.is-hovered .file-cta,.file.is-danger:hover .file-cta{background-color:#ff2b56;border-color:transparent;color:#fff}.file.is-danger.is-focused .file-cta,.file.is-danger:focus .file-cta{border-color:transparent;box-shadow:0 0 .5em rgba(255,56,96,.25);color:#fff}.file.is-danger.is-active .file-cta,.file.is-danger:active .file-cta{background-color:#ff1f4b;border-color:transparent;color:#fff}.file.is-small{font-size:.75rem}.file.is-medium{font-size:1.25rem}.file.is-medium .file-icon .fa{font-size:21px}.file.is-large{font-size:1.5rem}.file.is-large .file-icon .fa{font-size:28px}.file.has-name .file-cta{border-bottom-right-radius:0;border-top-right-radius:0}.file.has-name .file-name{border-bottom-left-radius:0;border-top-left-radius:0}.file.has-name.is-empty .file-cta{border-radius:4px}.file.has-name.is-empty .file-name{display:none}.file.is-boxed .file-label{flex-direction:column}.file.is-boxed .file-cta{flex-direction:column;height:auto;padding:1em 3em}.file.is-boxed .file-name{border-width:0 1px 1px}.file.is-boxed .file-icon{height:1.5em;width:1.5em}.file.is-boxed .file-icon .fa{font-size:21px}.file.is-boxed.is-small .file-icon .fa{font-size:14px}.file.is-boxed.is-medium .file-icon .fa{font-size:28px}.file.is-boxed.is-large .file-icon .fa{font-size:35px}.file.is-boxed.has-name .file-cta{border-radius:4px 4px 0 0}.file.is-boxed.has-name .file-name{border-radius:0 0 4px 4px;border-width:0 1px 1px}.file.is-centered{justify-content:center}.file.is-fullwidth .file-label{width:100%}.file.is-fullwidth .file-name{flex-grow:1;max-width:none}.file.is-right{justify-content:flex-end}.file.is-right .file-cta{border-radius:0 4px 4px 0}.file.is-right .file-name{border-radius:4px 0 0 4px;border-width:1px 0 1px 1px;order:-1}.file-label{align-items:stretch;display:flex;cursor:pointer;justify-content:flex-start;overflow:hidden;position:relative}.file-label:hover .file-cta{background-color:#eee;color:#363636}.file-label:hover .file-name{border-color:#d5d5d5}.file-label:active .file-cta{background-color:#e8e8e8;color:#363636}.file-label:active .file-name{border-color:#cfcfcf}.file-input{height:100%;left:0;opacity:0;outline:0;position:absolute;top:0;width:100%}.file-cta,.file-name{border-color:#dbdbdb;border-radius:4px;font-size:1em;padding-left:1em;padding-right:1em;white-space:nowrap}.file-cta{background-color:#f5f5f5;color:#4a4a4a}.file-name{border-color:#dbdbdb;border-style:solid;border-width:1px 1px 1px 0;display:block;max-width:16em;overflow:hidden;text-align:left;text-overflow:ellipsis}.file-icon{align-items:center;display:flex;height:1em;justify-content:center;margin-right:.5em;width:1em}.file-icon .fa{font-size:14px}.label{color:#363636;display:block;font-size:1rem;font-weight:700}.label:not(:last-child){margin-bottom:.5em}.label.is-small{font-size:.75rem}.label.is-medium{font-size:1.25rem}.label.is-large{font-size:1.5rem}.help{display:block;font-size:.75rem;margin-top:.25rem}.help.is-white{color:#fff}.help.is-black{color:#0a0a0a}.help.is-light{color:#f5f5f5}.help.is-dark{color:#363636}.help.is-primary{color:#00d1b2}.help.is-link{color:#3273dc}.help.is-info{color:#209cee}.help.is-success{color:#23d160}.help.is-warning{color:#ffdd57}.help.is-danger{color:#ff3860}.field:not(:last-child){margin-bottom:.75rem}.field.has-addons{display:flex;justify-content:flex-start}.field.has-addons .control:not(:last-child){margin-right:-1px}.field.has-addons .control:not(:first-child):not(:last-child) .button,.field.has-addons .control:not(:first-child):not(:last-child) .input,.field.has-addons .control:not(:first-child):not(:last-child) .select select{border-radius:0}.field.has-addons .control:first-child:not(:only-child) .button,.field.has-addons .control:first-child:not(:only-child) .input,.field.has-addons .control:first-child:not(:only-child) .select select{border-bottom-right-radius:0;border-top-right-radius:0}.field.has-addons .control:last-child:not(:only-child) .button,.field.has-addons .control:last-child:not(:only-child) .input,.field.has-addons .control:last-child:not(:only-child) .select select{border-bottom-left-radius:0;border-top-left-radius:0}.field.has-addons .control .button:not([disabled]).is-hovered,.field.has-addons .control .button:not([disabled]):hover,.field.has-addons .control .input:not([disabled]).is-hovered,.field.has-addons .control .input:not([disabled]):hover,.field.has-addons .control .select select:not([disabled]).is-hovered,.field.has-addons .control .select select:not([disabled]):hover{z-index:2}.field.has-addons .control .button:not([disabled]).is-active,.field.has-addons .control .button:not([disabled]).is-focused,.field.has-addons .control .button:not([disabled]):active,.field.has-addons .control .button:not([disabled]):focus,.field.has-addons .control .input:not([disabled]).is-active,.field.has-addons .control .input:not([disabled]).is-focused,.field.has-addons .control .input:not([disabled]):active,.field.has-addons .control .input:not([disabled]):focus,.field.has-addons .control .select select:not([disabled]).is-active,.field.has-addons .control .select select:not([disabled]).is-focused,.field.has-addons .control .select select:not([disabled]):active,.field.has-addons .control .select select:not([disabled]):focus{z-index:3}.field.has-addons .control .button:not([disabled]).is-active:hover,.field.has-addons .control .button:not([disabled]).is-focused:hover,.field.has-addons .control .button:not([disabled]):active:hover,.field.has-addons .control .button:not([disabled]):focus:hover,.field.has-addons .control .input:not([disabled]).is-active:hover,.field.has-addons .control .input:not([disabled]).is-focused:hover,.field.has-addons .control .input:not([disabled]):active:hover,.field.has-addons .control .input:not([disabled]):focus:hover,.field.has-addons .control .select select:not([disabled]).is-active:hover,.field.has-addons .control .select select:not([disabled]).is-focused:hover,.field.has-addons .control .select select:not([disabled]):active:hover,.field.has-addons .control .select select:not([disabled]):focus:hover{z-index:4}.field.has-addons .control.is-expanded{flex-grow:1;flex-shrink:1}.field.has-addons.has-addons-centered{justify-content:center}.field.has-addons.has-addons-right{justify-content:flex-end}.field.has-addons.has-addons-fullwidth .control{flex-grow:1;flex-shrink:0}.field.is-grouped{display:flex;justify-content:flex-start}.field.is-grouped>.control{flex-shrink:0}.field.is-grouped>.control:not(:last-child){margin-bottom:0;margin-right:.75rem}.field.is-grouped>.control.is-expanded{flex-grow:1;flex-shrink:1}.field.is-grouped.is-grouped-centered{justify-content:center}.field.is-grouped.is-grouped-right{justify-content:flex-end}.field.is-grouped.is-grouped-multiline{flex-wrap:wrap}.field.is-grouped.is-grouped-multiline>.control:last-child,.field.is-grouped.is-grouped-multiline>.control:not(:last-child){margin-bottom:.75rem}.field.is-grouped.is-grouped-multiline:last-child{margin-bottom:-.75rem}.field.is-grouped.is-grouped-multiline:not(:last-child){margin-bottom:0}@media screen and (min-width:769px),print{.field.is-horizontal{display:flex}}.field-label .label{font-size:inherit}@media screen and (max-width:768px){.field-label{margin-bottom:.5rem}}@media screen and (min-width:769px),print{.field-label{flex-basis:0;flex-grow:1;flex-shrink:0;margin-right:1.5rem;text-align:right}.field-label.is-small{font-size:.75rem;padding-top:.375em}.field-label.is-normal{padding-top:.375em}.field-label.is-medium{font-size:1.25rem;padding-top:.375em}.field-label.is-large{font-size:1.5rem;padding-top:.375em}}.field-body .field .field{margin-bottom:0}@media screen and (min-width:769px),print{.field-body{display:flex;flex-basis:0;flex-grow:5;flex-shrink:1}.field-body .field{margin-bottom:0}.field-body>.field{flex-shrink:1}.field-body>.field:not(.is-narrow){flex-grow:1}.field-body>.field:not(:last-child){margin-right:.75rem}}.control{box-sizing:border-box;clear:both;font-size:1rem;position:relative;text-align:left}.control.has-icons-left .input:focus~.icon,.control.has-icons-left .select:focus~.icon,.control.has-icons-right .input:focus~.icon,.control.has-icons-right .select:focus~.icon{color:#7a7a7a}.control.has-icons-left .input.is-small~.icon,.control.has-icons-left .select.is-small~.icon,.control.has-icons-right .input.is-small~.icon,.control.has-icons-right .select.is-small~.icon{font-size:.75rem}.control.has-icons-left .input.is-medium~.icon,.control.has-icons-left .select.is-medium~.icon,.control.has-icons-right .input.is-medium~.icon,.control.has-icons-right .select.is-medium~.icon{font-size:1.25rem}.control.has-icons-left .input.is-large~.icon,.control.has-icons-left .select.is-large~.icon,.control.has-icons-right .input.is-large~.icon,.control.has-icons-right .select.is-large~.icon{font-size:1.5rem}.control.has-icons-left .icon,.control.has-icons-right .icon{color:#dbdbdb;height:2.25em;pointer-events:none;position:absolute;top:0;width:2.25em;z-index:4}.control.has-icons-left .input,.control.has-icons-left .select select{padding-left:2.25em}.control.has-icons-left .icon.is-left{left:0}.control.has-icons-right .input,.control.has-icons-right .select select{padding-right:2.25em}.control.has-icons-right .icon.is-right{right:0}.control.is-loading::after{position:absolute!important;right:.625em;top:.625em;z-index:4}.control.is-loading.is-small:after{font-size:.75rem}.control.is-loading.is-medium:after{font-size:1.25rem}.control.is-loading.is-large:after{font-size:1.5rem}.breadcrumb{font-size:1rem;white-space:nowrap}.breadcrumb a{align-items:center;color:#3273dc;display:flex;justify-content:center;padding:0 .75em}.breadcrumb a:hover{color:#363636}.breadcrumb li{align-items:center;display:flex}.breadcrumb li:first-child a{padding-left:0}.breadcrumb li.is-active a{color:#363636;cursor:default;pointer-events:none}.breadcrumb li+li::before{color:#b5b5b5;content:"\0002f"}.breadcrumb ol,.breadcrumb ul{align-items:flex-start;display:flex;flex-wrap:wrap;justify-content:flex-start}.breadcrumb .icon:first-child{margin-right:.5em}.breadcrumb .icon:last-child{margin-left:.5em}.breadcrumb.is-centered ol,.breadcrumb.is-centered ul{justify-content:center}.breadcrumb.is-right ol,.breadcrumb.is-right ul{justify-content:flex-end}.breadcrumb.is-small{font-size:.75rem}.breadcrumb.is-medium{font-size:1.25rem}.breadcrumb.is-large{font-size:1.5rem}.breadcrumb.has-arrow-separator li+li::before{content:"\02192"}.breadcrumb.has-bullet-separator li+li::before{content:"\02022"}.breadcrumb.has-dot-separator li+li::before{content:"\000b7"}.breadcrumb.has-succeeds-separator li+li::before{content:"\0227B"}.card{background-color:#fff;box-shadow:0 2px 3px rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.1);color:#4a4a4a;max-width:100%;position:relative}.card-header{background-color:transparent;align-items:stretch;box-shadow:0 1px 2px rgba(10,10,10,.1);display:flex}.card-header-title{align-items:center;color:#363636;display:flex;flex-grow:1;font-weight:700;padding:.75rem}.card-header-title.is-centered{justify-content:center}.card-header-icon{align-items:center;cursor:pointer;display:flex;justify-content:center;padding:.75rem}.card-image{display:block;position:relative}.card-content{background-color:transparent;padding:1.5rem}.card-footer{background-color:transparent;border-top:1px solid #dbdbdb;align-items:stretch;display:flex}.card-footer-item{align-items:center;display:flex;flex-basis:0;flex-grow:1;flex-shrink:0;justify-content:center;padding:.75rem}.card-footer-item:not(:last-child){border-right:1px solid #dbdbdb}.card .media:not(:last-child){margin-bottom:1.5rem}.dropdown{display:inline-flex;position:relative;vertical-align:top}.dropdown.is-active .dropdown-menu,.dropdown.is-hoverable:hover .dropdown-menu{display:block}.dropdown.is-right .dropdown-menu{left:auto;right:0}.dropdown.is-up .dropdown-menu{bottom:100%;padding-bottom:4px;padding-top:initial;top:auto}.dropdown-menu{display:none;left:0;min-width:12rem;padding-top:4px;position:absolute;top:100%;z-index:20}.dropdown-content{background-color:#fff;border-radius:4px;box-shadow:0 2px 3px rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.1);padding-bottom:.5rem;padding-top:.5rem}.dropdown-item{color:#4a4a4a;display:block;font-size:.875rem;line-height:1.5;padding:.375rem 1rem;position:relative}a.dropdown-item,button.dropdown-item{padding-right:3rem;text-align:left;white-space:nowrap;width:100%}a.dropdown-item:hover,button.dropdown-item:hover{background-color:#f5f5f5;color:#0a0a0a}a.dropdown-item.is-active,button.dropdown-item.is-active{background-color:#3273dc;color:#fff}.dropdown-divider{background-color:#dbdbdb;border:none;display:block;height:1px;margin:.5rem 0}.level{align-items:center;justify-content:space-between}.level code{border-radius:4px}.level img{display:inline-block;vertical-align:top}.level.is-mobile{display:flex}.level.is-mobile .level-left,.level.is-mobile .level-right{display:flex}.level.is-mobile .level-left+.level-right{margin-top:0}.level.is-mobile .level-item:not(:last-child){margin-bottom:0;margin-right:.75rem}.level.is-mobile .level-item:not(.is-narrow){flex-grow:1}@media screen and (min-width:769px),print{.level{display:flex}.level>.level-item:not(.is-narrow){flex-grow:1}}.level-item{align-items:center;display:flex;flex-basis:auto;flex-grow:0;flex-shrink:0;justify-content:center}.level-item .subtitle,.level-item .title{margin-bottom:0}@media screen and (max-width:768px){.level-item:not(:last-child){margin-bottom:.75rem}}.level-left,.level-right{flex-basis:auto;flex-grow:0;flex-shrink:0}.level-left .level-item.is-flexible,.level-right .level-item.is-flexible{flex-grow:1}@media screen and (min-width:769px),print{.level-left .level-item:not(:last-child),.level-right .level-item:not(:last-child){margin-right:.75rem}}.level-left{align-items:center;justify-content:flex-start}@media screen and (max-width:768px){.level-left+.level-right{margin-top:1.5rem}}@media screen and (min-width:769px),print{.level-left{display:flex}}.level-right{align-items:center;justify-content:flex-end}@media screen and (min-width:769px),print{.level-right{display:flex}}.list{background-color:#fff;border-radius:4px;box-shadow:0 2px 3px rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.1)}.list-item{display:block;padding:.5em 1em}.list-item:not(a){color:#4a4a4a}.list-item:first-child{border-top-left-radius:4px;border-top-right-radius:4px}.list-item:last-child{border-bottom-left-radius:4px;border-bottom-right-radius:4px}.list-item:not(:last-child){border-bottom:1px solid #dbdbdb}.list-item.is-active{background-color:#3273dc;color:#fff}a.list-item{background-color:#f5f5f5;cursor:pointer}.media{align-items:flex-start;display:flex;text-align:left}.media .content:not(:last-child){margin-bottom:.75rem}.media .media{border-top:1px solid rgba(219,219,219,.5);display:flex;padding-top:.75rem}.media .media .content:not(:last-child),.media .media .control:not(:last-child){margin-bottom:.5rem}.media .media .media{padding-top:.5rem}.media .media .media+.media{margin-top:.5rem}.media+.media{border-top:1px solid rgba(219,219,219,.5);margin-top:1rem;padding-top:1rem}.media.is-large+.media{margin-top:1.5rem;padding-top:1.5rem}.media-left,.media-right{flex-basis:auto;flex-grow:0;flex-shrink:0}.media-left{margin-right:1rem}.media-right{margin-left:1rem}.media-content{flex-basis:auto;flex-grow:1;flex-shrink:1;text-align:left}@media screen and (max-width:768px){.media-content{overflow-x:auto}}.menu{font-size:1rem}.menu.is-small{font-size:.75rem}.menu.is-medium{font-size:1.25rem}.menu.is-large{font-size:1.5rem}.menu-list{line-height:1.25}.menu-list a{border-radius:2px;color:#4a4a4a;display:block;padding:.5em .75em}.menu-list a:hover{background-color:#f5f5f5;color:#363636}.menu-list a.is-active{background-color:#3273dc;color:#fff}.menu-list li ul{border-left:1px solid #dbdbdb;margin:.75em;padding-left:.75em}.menu-label{color:#7a7a7a;font-size:.75em;letter-spacing:.1em;text-transform:uppercase}.menu-label:not(:first-child){margin-top:1em}.menu-label:not(:last-child){margin-bottom:1em}.message{background-color:#f5f5f5;border-radius:4px;font-size:1rem}.message strong{color:currentColor}.message a:not(.button):not(.tag):not(.dropdown-item){color:currentColor;text-decoration:underline}.message.is-small{font-size:.75rem}.message.is-medium{font-size:1.25rem}.message.is-large{font-size:1.5rem}.message.is-white{background-color:#fff}.message.is-white .message-header{background-color:#fff;color:#0a0a0a}.message.is-white .message-body{border-color:#fff;color:#4d4d4d}.message.is-black{background-color:#fafafa}.message.is-black .message-header{background-color:#0a0a0a;color:#fff}.message.is-black .message-body{border-color:#0a0a0a;color:#090909}.message.is-light{background-color:#fafafa}.message.is-light .message-header{background-color:#f5f5f5;color:#363636}.message.is-light .message-body{border-color:#f5f5f5;color:#505050}.message.is-dark{background-color:#fafafa}.message.is-dark .message-header{background-color:#363636;color:#f5f5f5}.message.is-dark .message-body{border-color:#363636;color:#2a2a2a}.message.is-primary{background-color:#f5fffd}.message.is-primary .message-header{background-color:#00d1b2;color:#fff}.message.is-primary .message-body{border-color:#00d1b2;color:#021310}.message.is-link{background-color:#f6f9fe}.message.is-link .message-header{background-color:#3273dc;color:#fff}.message.is-link .message-body{border-color:#3273dc;color:#22509a}.message.is-info{background-color:#f6fbfe}.message.is-info .message-header{background-color:#209cee;color:#fff}.message.is-info .message-body{border-color:#209cee;color:#12537e}.message.is-success{background-color:#f6fef9}.message.is-success .message-header{background-color:#23d160;color:#fff}.message.is-success .message-body{border-color:#23d160;color:#0e301a}.message.is-warning{background-color:#fffdf5}.message.is-warning .message-header{background-color:#ffdd57;color:rgba(0,0,0,.7)}.message.is-warning .message-body{border-color:#ffdd57;color:#3b3108}.message.is-danger{background-color:#fff5f7}.message.is-danger .message-header{background-color:#ff3860;color:#fff}.message.is-danger .message-body{border-color:#ff3860;color:#cd0930}.message-header{align-items:center;background-color:#4a4a4a;border-radius:4px 4px 0 0;color:#fff;display:flex;font-weight:700;justify-content:space-between;line-height:1.25;padding:.75em 1em;position:relative}.message-header .delete{flex-grow:0;flex-shrink:0;margin-left:.75em}.message-header+.message-body{border-width:0;border-top-left-radius:0;border-top-right-radius:0}.message-body{border-color:#dbdbdb;border-radius:4px;border-style:solid;border-width:0 0 0 4px;color:#4a4a4a;padding:1.25em 1.5em}.message-body code,.message-body pre{background-color:#fff}.message-body pre code{background-color:transparent}.modal{align-items:center;display:none;flex-direction:column;justify-content:center;overflow:hidden;position:fixed;z-index:40}.modal.is-active{display:flex}.modal-background{background-color:rgba(10,10,10,.86)}.modal-card,.modal-content{margin:0 20px;max-height:calc(100vh - 160px);overflow:auto;position:relative;width:100%}@media screen and (min-width:769px),print{.modal-card,.modal-content{margin:0 auto;max-height:calc(100vh - 40px);width:640px}}.modal-close{background:0 0;height:40px;position:fixed;right:20px;top:20px;width:40px}.modal-card{display:flex;flex-direction:column;max-height:calc(100vh - 40px);overflow:hidden;-ms-overflow-y:visible}.modal-card-foot,.modal-card-head{align-items:center;background-color:#f5f5f5;display:flex;flex-shrink:0;justify-content:flex-start;padding:20px;position:relative}.modal-card-head{border-bottom:1px solid #dbdbdb;border-top-left-radius:6px;border-top-right-radius:6px}.modal-card-title{color:#363636;flex-grow:1;flex-shrink:0;font-size:1.5rem;line-height:1}.modal-card-foot{border-bottom-left-radius:6px;border-bottom-right-radius:6px;border-top:1px solid #dbdbdb}.modal-card-foot .button:not(:last-child){margin-right:.5em}.modal-card-body{-webkit-overflow-scrolling:touch;background-color:#fff;flex-grow:1;flex-shrink:1;overflow:auto;padding:20px}.navbar{background-color:#fff;min-height:3.25rem;position:relative;z-index:30}.navbar.is-white{background-color:#fff;color:#0a0a0a}.navbar.is-white .navbar-brand .navbar-link,.navbar.is-white .navbar-brand>.navbar-item{color:#0a0a0a}.navbar.is-white .navbar-brand .navbar-link.is-active,.navbar.is-white .navbar-brand .navbar-link:focus,.navbar.is-white .navbar-brand .navbar-link:hover,.navbar.is-white .navbar-brand>a.navbar-item.is-active,.navbar.is-white .navbar-brand>a.navbar-item:focus,.navbar.is-white .navbar-brand>a.navbar-item:hover{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-brand .navbar-link::after{border-color:#0a0a0a}.navbar.is-white .navbar-burger{color:#0a0a0a}@media screen and (min-width:1024px){.navbar.is-white .navbar-end .navbar-link,.navbar.is-white .navbar-end>.navbar-item,.navbar.is-white .navbar-start .navbar-link,.navbar.is-white .navbar-start>.navbar-item{color:#0a0a0a}.navbar.is-white .navbar-end .navbar-link.is-active,.navbar.is-white .navbar-end .navbar-link:focus,.navbar.is-white .navbar-end .navbar-link:hover,.navbar.is-white .navbar-end>a.navbar-item.is-active,.navbar.is-white .navbar-end>a.navbar-item:focus,.navbar.is-white .navbar-end>a.navbar-item:hover,.navbar.is-white .navbar-start .navbar-link.is-active,.navbar.is-white .navbar-start .navbar-link:focus,.navbar.is-white .navbar-start .navbar-link:hover,.navbar.is-white .navbar-start>a.navbar-item.is-active,.navbar.is-white .navbar-start>a.navbar-item:focus,.navbar.is-white .navbar-start>a.navbar-item:hover{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-end .navbar-link::after,.navbar.is-white .navbar-start .navbar-link::after{border-color:#0a0a0a}.navbar.is-white .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-white .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-white .navbar-item.has-dropdown:hover .navbar-link{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-dropdown a.navbar-item.is-active{background-color:#fff;color:#0a0a0a}}.navbar.is-black{background-color:#0a0a0a;color:#fff}.navbar.is-black .navbar-brand .navbar-link,.navbar.is-black .navbar-brand>.navbar-item{color:#fff}.navbar.is-black .navbar-brand .navbar-link.is-active,.navbar.is-black .navbar-brand .navbar-link:focus,.navbar.is-black .navbar-brand .navbar-link:hover,.navbar.is-black .navbar-brand>a.navbar-item.is-active,.navbar.is-black .navbar-brand>a.navbar-item:focus,.navbar.is-black .navbar-brand>a.navbar-item:hover{background-color:#000;color:#fff}.navbar.is-black .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-black .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-black .navbar-end .navbar-link,.navbar.is-black .navbar-end>.navbar-item,.navbar.is-black .navbar-start .navbar-link,.navbar.is-black .navbar-start>.navbar-item{color:#fff}.navbar.is-black .navbar-end .navbar-link.is-active,.navbar.is-black .navbar-end .navbar-link:focus,.navbar.is-black .navbar-end .navbar-link:hover,.navbar.is-black .navbar-end>a.navbar-item.is-active,.navbar.is-black .navbar-end>a.navbar-item:focus,.navbar.is-black .navbar-end>a.navbar-item:hover,.navbar.is-black .navbar-start .navbar-link.is-active,.navbar.is-black .navbar-start .navbar-link:focus,.navbar.is-black .navbar-start .navbar-link:hover,.navbar.is-black .navbar-start>a.navbar-item.is-active,.navbar.is-black .navbar-start>a.navbar-item:focus,.navbar.is-black .navbar-start>a.navbar-item:hover{background-color:#000;color:#fff}.navbar.is-black .navbar-end .navbar-link::after,.navbar.is-black .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-black .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-black .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-black .navbar-item.has-dropdown:hover .navbar-link{background-color:#000;color:#fff}.navbar.is-black .navbar-dropdown a.navbar-item.is-active{background-color:#0a0a0a;color:#fff}}.navbar.is-light{background-color:#f5f5f5;color:#363636}.navbar.is-light .navbar-brand .navbar-link,.navbar.is-light .navbar-brand>.navbar-item{color:#363636}.navbar.is-light .navbar-brand .navbar-link.is-active,.navbar.is-light .navbar-brand .navbar-link:focus,.navbar.is-light .navbar-brand .navbar-link:hover,.navbar.is-light .navbar-brand>a.navbar-item.is-active,.navbar.is-light .navbar-brand>a.navbar-item:focus,.navbar.is-light .navbar-brand>a.navbar-item:hover{background-color:#e8e8e8;color:#363636}.navbar.is-light .navbar-brand .navbar-link::after{border-color:#363636}.navbar.is-light .navbar-burger{color:#363636}@media screen and (min-width:1024px){.navbar.is-light .navbar-end .navbar-link,.navbar.is-light .navbar-end>.navbar-item,.navbar.is-light .navbar-start .navbar-link,.navbar.is-light .navbar-start>.navbar-item{color:#363636}.navbar.is-light .navbar-end .navbar-link.is-active,.navbar.is-light .navbar-end .navbar-link:focus,.navbar.is-light .navbar-end .navbar-link:hover,.navbar.is-light .navbar-end>a.navbar-item.is-active,.navbar.is-light .navbar-end>a.navbar-item:focus,.navbar.is-light .navbar-end>a.navbar-item:hover,.navbar.is-light .navbar-start .navbar-link.is-active,.navbar.is-light .navbar-start .navbar-link:focus,.navbar.is-light .navbar-start .navbar-link:hover,.navbar.is-light .navbar-start>a.navbar-item.is-active,.navbar.is-light .navbar-start>a.navbar-item:focus,.navbar.is-light .navbar-start>a.navbar-item:hover{background-color:#e8e8e8;color:#363636}.navbar.is-light .navbar-end .navbar-link::after,.navbar.is-light .navbar-start .navbar-link::after{border-color:#363636}.navbar.is-light .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-light .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-light .navbar-item.has-dropdown:hover .navbar-link{background-color:#e8e8e8;color:#363636}.navbar.is-light .navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:#363636}}.navbar.is-dark{background-color:#363636;color:#f5f5f5}.navbar.is-dark .navbar-brand .navbar-link,.navbar.is-dark .navbar-brand>.navbar-item{color:#f5f5f5}.navbar.is-dark .navbar-brand .navbar-link.is-active,.navbar.is-dark .navbar-brand .navbar-link:focus,.navbar.is-dark .navbar-brand .navbar-link:hover,.navbar.is-dark .navbar-brand>a.navbar-item.is-active,.navbar.is-dark .navbar-brand>a.navbar-item:focus,.navbar.is-dark .navbar-brand>a.navbar-item:hover{background-color:#292929;color:#f5f5f5}.navbar.is-dark .navbar-brand .navbar-link::after{border-color:#f5f5f5}.navbar.is-dark .navbar-burger{color:#f5f5f5}@media screen and (min-width:1024px){.navbar.is-dark .navbar-end .navbar-link,.navbar.is-dark .navbar-end>.navbar-item,.navbar.is-dark .navbar-start .navbar-link,.navbar.is-dark .navbar-start>.navbar-item{color:#f5f5f5}.navbar.is-dark .navbar-end .navbar-link.is-active,.navbar.is-dark .navbar-end .navbar-link:focus,.navbar.is-dark .navbar-end .navbar-link:hover,.navbar.is-dark .navbar-end>a.navbar-item.is-active,.navbar.is-dark .navbar-end>a.navbar-item:focus,.navbar.is-dark .navbar-end>a.navbar-item:hover,.navbar.is-dark .navbar-start .navbar-link.is-active,.navbar.is-dark .navbar-start .navbar-link:focus,.navbar.is-dark .navbar-start .navbar-link:hover,.navbar.is-dark .navbar-start>a.navbar-item.is-active,.navbar.is-dark .navbar-start>a.navbar-item:focus,.navbar.is-dark .navbar-start>a.navbar-item:hover{background-color:#292929;color:#f5f5f5}.navbar.is-dark .navbar-end .navbar-link::after,.navbar.is-dark .navbar-start .navbar-link::after{border-color:#f5f5f5}.navbar.is-dark .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-dark .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-dark .navbar-item.has-dropdown:hover .navbar-link{background-color:#292929;color:#f5f5f5}.navbar.is-dark .navbar-dropdown a.navbar-item.is-active{background-color:#363636;color:#f5f5f5}}.navbar.is-primary{background-color:#00d1b2;color:#fff}.navbar.is-primary .navbar-brand .navbar-link,.navbar.is-primary .navbar-brand>.navbar-item{color:#fff}.navbar.is-primary .navbar-brand .navbar-link.is-active,.navbar.is-primary .navbar-brand .navbar-link:focus,.navbar.is-primary .navbar-brand .navbar-link:hover,.navbar.is-primary .navbar-brand>a.navbar-item.is-active,.navbar.is-primary .navbar-brand>a.navbar-item:focus,.navbar.is-primary .navbar-brand>a.navbar-item:hover{background-color:#00b89c;color:#fff}.navbar.is-primary .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-primary .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-primary .navbar-end .navbar-link,.navbar.is-primary .navbar-end>.navbar-item,.navbar.is-primary .navbar-start .navbar-link,.navbar.is-primary .navbar-start>.navbar-item{color:#fff}.navbar.is-primary .navbar-end .navbar-link.is-active,.navbar.is-primary .navbar-end .navbar-link:focus,.navbar.is-primary .navbar-end .navbar-link:hover,.navbar.is-primary .navbar-end>a.navbar-item.is-active,.navbar.is-primary .navbar-end>a.navbar-item:focus,.navbar.is-primary .navbar-end>a.navbar-item:hover,.navbar.is-primary .navbar-start .navbar-link.is-active,.navbar.is-primary .navbar-start .navbar-link:focus,.navbar.is-primary .navbar-start .navbar-link:hover,.navbar.is-primary .navbar-start>a.navbar-item.is-active,.navbar.is-primary .navbar-start>a.navbar-item:focus,.navbar.is-primary .navbar-start>a.navbar-item:hover{background-color:#00b89c;color:#fff}.navbar.is-primary .navbar-end .navbar-link::after,.navbar.is-primary .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-primary .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-primary .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-primary .navbar-item.has-dropdown:hover .navbar-link{background-color:#00b89c;color:#fff}.navbar.is-primary .navbar-dropdown a.navbar-item.is-active{background-color:#00d1b2;color:#fff}}.navbar.is-link{background-color:#3273dc;color:#fff}.navbar.is-link .navbar-brand .navbar-link,.navbar.is-link .navbar-brand>.navbar-item{color:#fff}.navbar.is-link .navbar-brand .navbar-link.is-active,.navbar.is-link .navbar-brand .navbar-link:focus,.navbar.is-link .navbar-brand .navbar-link:hover,.navbar.is-link .navbar-brand>a.navbar-item.is-active,.navbar.is-link .navbar-brand>a.navbar-item:focus,.navbar.is-link .navbar-brand>a.navbar-item:hover{background-color:#2366d1;color:#fff}.navbar.is-link .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-link .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-link .navbar-end .navbar-link,.navbar.is-link .navbar-end>.navbar-item,.navbar.is-link .navbar-start .navbar-link,.navbar.is-link .navbar-start>.navbar-item{color:#fff}.navbar.is-link .navbar-end .navbar-link.is-active,.navbar.is-link .navbar-end .navbar-link:focus,.navbar.is-link .navbar-end .navbar-link:hover,.navbar.is-link .navbar-end>a.navbar-item.is-active,.navbar.is-link .navbar-end>a.navbar-item:focus,.navbar.is-link .navbar-end>a.navbar-item:hover,.navbar.is-link .navbar-start .navbar-link.is-active,.navbar.is-link .navbar-start .navbar-link:focus,.navbar.is-link .navbar-start .navbar-link:hover,.navbar.is-link .navbar-start>a.navbar-item.is-active,.navbar.is-link .navbar-start>a.navbar-item:focus,.navbar.is-link .navbar-start>a.navbar-item:hover{background-color:#2366d1;color:#fff}.navbar.is-link .navbar-end .navbar-link::after,.navbar.is-link .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-link .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-link .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-link .navbar-item.has-dropdown:hover .navbar-link{background-color:#2366d1;color:#fff}.navbar.is-link .navbar-dropdown a.navbar-item.is-active{background-color:#3273dc;color:#fff}}.navbar.is-info{background-color:#209cee;color:#fff}.navbar.is-info .navbar-brand .navbar-link,.navbar.is-info .navbar-brand>.navbar-item{color:#fff}.navbar.is-info .navbar-brand .navbar-link.is-active,.navbar.is-info .navbar-brand .navbar-link:focus,.navbar.is-info .navbar-brand .navbar-link:hover,.navbar.is-info .navbar-brand>a.navbar-item.is-active,.navbar.is-info .navbar-brand>a.navbar-item:focus,.navbar.is-info .navbar-brand>a.navbar-item:hover{background-color:#118fe4;color:#fff}.navbar.is-info .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-info .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-info .navbar-end .navbar-link,.navbar.is-info .navbar-end>.navbar-item,.navbar.is-info .navbar-start .navbar-link,.navbar.is-info .navbar-start>.navbar-item{color:#fff}.navbar.is-info .navbar-end .navbar-link.is-active,.navbar.is-info .navbar-end .navbar-link:focus,.navbar.is-info .navbar-end .navbar-link:hover,.navbar.is-info .navbar-end>a.navbar-item.is-active,.navbar.is-info .navbar-end>a.navbar-item:focus,.navbar.is-info .navbar-end>a.navbar-item:hover,.navbar.is-info .navbar-start .navbar-link.is-active,.navbar.is-info .navbar-start .navbar-link:focus,.navbar.is-info .navbar-start .navbar-link:hover,.navbar.is-info .navbar-start>a.navbar-item.is-active,.navbar.is-info .navbar-start>a.navbar-item:focus,.navbar.is-info .navbar-start>a.navbar-item:hover{background-color:#118fe4;color:#fff}.navbar.is-info .navbar-end .navbar-link::after,.navbar.is-info .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-info .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-info .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-info .navbar-item.has-dropdown:hover .navbar-link{background-color:#118fe4;color:#fff}.navbar.is-info .navbar-dropdown a.navbar-item.is-active{background-color:#209cee;color:#fff}}.navbar.is-success{background-color:#23d160;color:#fff}.navbar.is-success .navbar-brand .navbar-link,.navbar.is-success .navbar-brand>.navbar-item{color:#fff}.navbar.is-success .navbar-brand .navbar-link.is-active,.navbar.is-success .navbar-brand .navbar-link:focus,.navbar.is-success .navbar-brand .navbar-link:hover,.navbar.is-success .navbar-brand>a.navbar-item.is-active,.navbar.is-success .navbar-brand>a.navbar-item:focus,.navbar.is-success .navbar-brand>a.navbar-item:hover{background-color:#20bc56;color:#fff}.navbar.is-success .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-success .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-success .navbar-end .navbar-link,.navbar.is-success .navbar-end>.navbar-item,.navbar.is-success .navbar-start .navbar-link,.navbar.is-success .navbar-start>.navbar-item{color:#fff}.navbar.is-success .navbar-end .navbar-link.is-active,.navbar.is-success .navbar-end .navbar-link:focus,.navbar.is-success .navbar-end .navbar-link:hover,.navbar.is-success .navbar-end>a.navbar-item.is-active,.navbar.is-success .navbar-end>a.navbar-item:focus,.navbar.is-success .navbar-end>a.navbar-item:hover,.navbar.is-success .navbar-start .navbar-link.is-active,.navbar.is-success .navbar-start .navbar-link:focus,.navbar.is-success .navbar-start .navbar-link:hover,.navbar.is-success .navbar-start>a.navbar-item.is-active,.navbar.is-success .navbar-start>a.navbar-item:focus,.navbar.is-success .navbar-start>a.navbar-item:hover{background-color:#20bc56;color:#fff}.navbar.is-success .navbar-end .navbar-link::after,.navbar.is-success .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-success .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-success .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-success .navbar-item.has-dropdown:hover .navbar-link{background-color:#20bc56;color:#fff}.navbar.is-success .navbar-dropdown a.navbar-item.is-active{background-color:#23d160;color:#fff}}.navbar.is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-brand .navbar-link,.navbar.is-warning .navbar-brand>.navbar-item{color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-brand .navbar-link.is-active,.navbar.is-warning .navbar-brand .navbar-link:focus,.navbar.is-warning .navbar-brand .navbar-link:hover,.navbar.is-warning .navbar-brand>a.navbar-item.is-active,.navbar.is-warning .navbar-brand>a.navbar-item:focus,.navbar.is-warning .navbar-brand>a.navbar-item:hover{background-color:#ffd83d;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-brand .navbar-link::after{border-color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-burger{color:rgba(0,0,0,.7)}@media screen and (min-width:1024px){.navbar.is-warning .navbar-end .navbar-link,.navbar.is-warning .navbar-end>.navbar-item,.navbar.is-warning .navbar-start .navbar-link,.navbar.is-warning .navbar-start>.navbar-item{color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-end .navbar-link.is-active,.navbar.is-warning .navbar-end .navbar-link:focus,.navbar.is-warning .navbar-end .navbar-link:hover,.navbar.is-warning .navbar-end>a.navbar-item.is-active,.navbar.is-warning .navbar-end>a.navbar-item:focus,.navbar.is-warning .navbar-end>a.navbar-item:hover,.navbar.is-warning .navbar-start .navbar-link.is-active,.navbar.is-warning .navbar-start .navbar-link:focus,.navbar.is-warning .navbar-start .navbar-link:hover,.navbar.is-warning .navbar-start>a.navbar-item.is-active,.navbar.is-warning .navbar-start>a.navbar-item:focus,.navbar.is-warning .navbar-start>a.navbar-item:hover{background-color:#ffd83d;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-end .navbar-link::after,.navbar.is-warning .navbar-start .navbar-link::after{border-color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-warning .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-warning .navbar-item.has-dropdown:hover .navbar-link{background-color:#ffd83d;color:rgba(0,0,0,.7)}.navbar.is-warning .navbar-dropdown a.navbar-item.is-active{background-color:#ffdd57;color:rgba(0,0,0,.7)}}.navbar.is-danger{background-color:#ff3860;color:#fff}.navbar.is-danger .navbar-brand .navbar-link,.navbar.is-danger .navbar-brand>.navbar-item{color:#fff}.navbar.is-danger .navbar-brand .navbar-link.is-active,.navbar.is-danger .navbar-brand .navbar-link:focus,.navbar.is-danger .navbar-brand .navbar-link:hover,.navbar.is-danger .navbar-brand>a.navbar-item.is-active,.navbar.is-danger .navbar-brand>a.navbar-item:focus,.navbar.is-danger .navbar-brand>a.navbar-item:hover{background-color:#ff1f4b;color:#fff}.navbar.is-danger .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-danger .navbar-burger{color:#fff}@media screen and (min-width:1024px){.navbar.is-danger .navbar-end .navbar-link,.navbar.is-danger .navbar-end>.navbar-item,.navbar.is-danger .navbar-start .navbar-link,.navbar.is-danger .navbar-start>.navbar-item{color:#fff}.navbar.is-danger .navbar-end .navbar-link.is-active,.navbar.is-danger .navbar-end .navbar-link:focus,.navbar.is-danger .navbar-end .navbar-link:hover,.navbar.is-danger .navbar-end>a.navbar-item.is-active,.navbar.is-danger .navbar-end>a.navbar-item:focus,.navbar.is-danger .navbar-end>a.navbar-item:hover,.navbar.is-danger .navbar-start .navbar-link.is-active,.navbar.is-danger .navbar-start .navbar-link:focus,.navbar.is-danger .navbar-start .navbar-link:hover,.navbar.is-danger .navbar-start>a.navbar-item.is-active,.navbar.is-danger .navbar-start>a.navbar-item:focus,.navbar.is-danger .navbar-start>a.navbar-item:hover{background-color:#ff1f4b;color:#fff}.navbar.is-danger .navbar-end .navbar-link::after,.navbar.is-danger .navbar-start .navbar-link::after{border-color:#fff}.navbar.is-danger .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-danger .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-danger .navbar-item.has-dropdown:hover .navbar-link{background-color:#ff1f4b;color:#fff}.navbar.is-danger .navbar-dropdown a.navbar-item.is-active{background-color:#ff3860;color:#fff}}.navbar>.container{align-items:stretch;display:flex;min-height:3.25rem;width:100%}.navbar.has-shadow{box-shadow:0 2px 0 0 #f5f5f5}.navbar.is-fixed-bottom,.navbar.is-fixed-top{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom{bottom:0}.navbar.is-fixed-bottom.has-shadow{box-shadow:0 -2px 0 0 #f5f5f5}.navbar.is-fixed-top{top:0}body.has-navbar-fixed-top,html.has-navbar-fixed-top{padding-top:3.25rem}body.has-navbar-fixed-bottom,html.has-navbar-fixed-bottom{padding-bottom:3.25rem}.navbar-brand,.navbar-tabs{align-items:stretch;display:flex;flex-shrink:0;min-height:3.25rem}.navbar-brand a.navbar-item:focus,.navbar-brand a.navbar-item:hover{background-color:transparent}.navbar-tabs{-webkit-overflow-scrolling:touch;max-width:100vw;overflow-x:auto;overflow-y:hidden}.navbar-burger{color:#4a4a4a;cursor:pointer;display:block;height:3.25rem;position:relative;width:3.25rem;margin-left:auto}.navbar-burger span{background-color:currentColor;display:block;height:1px;left:calc(50% - 8px);position:absolute;-webkit-transform-origin:center;transform-origin:center;transition-duration:86ms;transition-property:background-color,opacity,-webkit-transform;transition-property:background-color,opacity,transform;transition-property:background-color,opacity,transform,-webkit-transform;transition-timing-function:ease-out;width:16px}.navbar-burger span:nth-child(1){top:calc(50% - 6px)}.navbar-burger span:nth-child(2){top:calc(50% - 1px)}.navbar-burger span:nth-child(3){top:calc(50% + 4px)}.navbar-burger:hover{background-color:rgba(0,0,0,.05)}.navbar-burger.is-active span:nth-child(1){-webkit-transform:translateY(5px) rotate(45deg);transform:translateY(5px) rotate(45deg)}.navbar-burger.is-active span:nth-child(2){opacity:0}.navbar-burger.is-active span:nth-child(3){-webkit-transform:translateY(-5px) rotate(-45deg);transform:translateY(-5px) rotate(-45deg)}.navbar-menu{display:none}.navbar-item,.navbar-link{color:#4a4a4a;display:block;line-height:1.5;padding:.5rem .75rem;position:relative}.navbar-item .icon:only-child,.navbar-link .icon:only-child{margin-left:-.25rem;margin-right:-.25rem}.navbar-link,a.navbar-item{cursor:pointer}.navbar-link.is-active,.navbar-link:focus,.navbar-link:focus-within,.navbar-link:hover,a.navbar-item.is-active,a.navbar-item:focus,a.navbar-item:focus-within,a.navbar-item:hover{background-color:#fafafa;color:#3273dc}.navbar-item{display:block;flex-grow:0;flex-shrink:0}.navbar-item img{max-height:1.75rem}.navbar-item.has-dropdown{padding:0}.navbar-item.is-expanded{flex-grow:1;flex-shrink:1}.navbar-item.is-tab{border-bottom:1px solid transparent;min-height:3.25rem;padding-bottom:calc(.5rem - 1px)}.navbar-item.is-tab:focus,.navbar-item.is-tab:hover{background-color:transparent;border-bottom-color:#3273dc}.navbar-item.is-tab.is-active{background-color:transparent;border-bottom-color:#3273dc;border-bottom-style:solid;border-bottom-width:3px;color:#3273dc;padding-bottom:calc(.5rem - 3px)}.navbar-content{flex-grow:1;flex-shrink:1}.navbar-link:not(.is-arrowless){padding-right:2.5em}.navbar-link:not(.is-arrowless)::after{border-color:#3273dc;margin-top:-.375em;right:1.125em}.navbar-dropdown{font-size:.875rem;padding-bottom:.5rem;padding-top:.5rem}.navbar-dropdown .navbar-item{padding-left:1.5rem;padding-right:1.5rem}.navbar-divider{background-color:#f5f5f5;border:none;display:none;height:2px;margin:.5rem 0}@media screen and (max-width:1023px){.navbar>.container{display:block}.navbar-brand .navbar-item,.navbar-tabs .navbar-item{align-items:center;display:flex}.navbar-link::after{display:none}.navbar-menu{background-color:#fff;box-shadow:0 8px 16px rgba(10,10,10,.1);padding:.5rem 0}.navbar-menu.is-active{display:block}.navbar.is-fixed-bottom-touch,.navbar.is-fixed-top-touch{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom-touch{bottom:0}.navbar.is-fixed-bottom-touch.has-shadow{box-shadow:0 -2px 3px rgba(10,10,10,.1)}.navbar.is-fixed-top-touch{top:0}.navbar.is-fixed-top .navbar-menu,.navbar.is-fixed-top-touch .navbar-menu{-webkit-overflow-scrolling:touch;max-height:calc(100vh - 3.25rem);overflow:auto}body.has-navbar-fixed-top-touch,html.has-navbar-fixed-top-touch{padding-top:3.25rem}body.has-navbar-fixed-bottom-touch,html.has-navbar-fixed-bottom-touch{padding-bottom:3.25rem}}@media screen and (min-width:1024px){.navbar,.navbar-end,.navbar-menu,.navbar-start{align-items:stretch;display:flex}.navbar{min-height:3.25rem}.navbar.is-spaced{padding:1rem 2rem}.navbar.is-spaced .navbar-end,.navbar.is-spaced .navbar-start{align-items:center}.navbar.is-spaced .navbar-link,.navbar.is-spaced a.navbar-item{border-radius:4px}.navbar.is-transparent .navbar-link.is-active,.navbar.is-transparent .navbar-link:focus,.navbar.is-transparent .navbar-link:hover,.navbar.is-transparent a.navbar-item.is-active,.navbar.is-transparent a.navbar-item:focus,.navbar.is-transparent a.navbar-item:hover{background-color:transparent!important}.navbar.is-transparent .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus .navbar-link,.navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus-within .navbar-link,.navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:hover .navbar-link{background-color:transparent!important}.navbar.is-transparent .navbar-dropdown a.navbar-item:focus,.navbar.is-transparent .navbar-dropdown a.navbar-item:hover{background-color:#f5f5f5;color:#0a0a0a}.navbar.is-transparent .navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:#3273dc}.navbar-burger{display:none}.navbar-item,.navbar-link{align-items:center;display:flex}.navbar-item{display:flex}.navbar-item.has-dropdown{align-items:stretch}.navbar-item.has-dropdown-up .navbar-link::after{-webkit-transform:rotate(135deg) translate(.25em,-.25em);transform:rotate(135deg) translate(.25em,-.25em)}.navbar-item.has-dropdown-up .navbar-dropdown{border-bottom:2px solid #dbdbdb;border-radius:6px 6px 0 0;border-top:none;bottom:100%;box-shadow:0 -8px 8px rgba(10,10,10,.1);top:auto}.navbar-item.is-active .navbar-dropdown,.navbar-item.is-hoverable:focus .navbar-dropdown,.navbar-item.is-hoverable:focus-within .navbar-dropdown,.navbar-item.is-hoverable:hover .navbar-dropdown{display:block}.navbar-item.is-active .navbar-dropdown.is-boxed,.navbar-item.is-hoverable:focus .navbar-dropdown.is-boxed,.navbar-item.is-hoverable:focus-within .navbar-dropdown.is-boxed,.navbar-item.is-hoverable:hover .navbar-dropdown.is-boxed,.navbar.is-spaced .navbar-item.is-active .navbar-dropdown,.navbar.is-spaced .navbar-item.is-hoverable:focus .navbar-dropdown,.navbar.is-spaced .navbar-item.is-hoverable:focus-within .navbar-dropdown,.navbar.is-spaced .navbar-item.is-hoverable:hover .navbar-dropdown{opacity:1;pointer-events:auto;-webkit-transform:translateY(0);transform:translateY(0)}.navbar-menu{flex-grow:1;flex-shrink:0}.navbar-start{justify-content:flex-start;margin-right:auto}.navbar-end{justify-content:flex-end;margin-left:auto}.navbar-dropdown{background-color:#fff;border-bottom-left-radius:6px;border-bottom-right-radius:6px;border-top:2px solid #dbdbdb;box-shadow:0 8px 8px rgba(10,10,10,.1);display:none;font-size:.875rem;left:0;min-width:100%;position:absolute;top:100%;z-index:20}.navbar-dropdown .navbar-item{padding:.375rem 1rem;white-space:nowrap}.navbar-dropdown a.navbar-item{padding-right:3rem}.navbar-dropdown a.navbar-item:focus,.navbar-dropdown a.navbar-item:hover{background-color:#f5f5f5;color:#0a0a0a}.navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:#3273dc}.navbar-dropdown.is-boxed,.navbar.is-spaced .navbar-dropdown{border-radius:6px;border-top:none;box-shadow:0 8px 8px rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.1);display:block;opacity:0;pointer-events:none;top:calc(100% + (-4px));-webkit-transform:translateY(-5px);transform:translateY(-5px);transition-duration:86ms;transition-property:opacity,-webkit-transform;transition-property:opacity,transform;transition-property:opacity,transform,-webkit-transform}.navbar-dropdown.is-right{left:auto;right:0}.navbar-divider{display:block}.container>.navbar .navbar-brand,.navbar>.container .navbar-brand{margin-left:-.75rem}.container>.navbar .navbar-menu,.navbar>.container .navbar-menu{margin-right:-.75rem}.navbar.is-fixed-bottom-desktop,.navbar.is-fixed-top-desktop{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom-desktop{bottom:0}.navbar.is-fixed-bottom-desktop.has-shadow{box-shadow:0 -2px 3px rgba(10,10,10,.1)}.navbar.is-fixed-top-desktop{top:0}body.has-navbar-fixed-top-desktop,html.has-navbar-fixed-top-desktop{padding-top:3.25rem}body.has-navbar-fixed-bottom-desktop,html.has-navbar-fixed-bottom-desktop{padding-bottom:3.25rem}body.has-spaced-navbar-fixed-top,html.has-spaced-navbar-fixed-top{padding-top:5.25rem}body.has-spaced-navbar-fixed-bottom,html.has-spaced-navbar-fixed-bottom{padding-bottom:5.25rem}.navbar-link.is-active,a.navbar-item.is-active{color:#0a0a0a}.navbar-link.is-active:not(:focus):not(:hover),a.navbar-item.is-active:not(:focus):not(:hover){background-color:transparent}.navbar-item.has-dropdown.is-active .navbar-link,.navbar-item.has-dropdown:focus .navbar-link,.navbar-item.has-dropdown:hover .navbar-link{background-color:#fafafa}}.hero.is-fullheight-with-navbar{min-height:calc(100vh - 3.25rem)}.pagination{font-size:1rem;margin:-.25rem}.pagination.is-small{font-size:.75rem}.pagination.is-medium{font-size:1.25rem}.pagination.is-large{font-size:1.5rem}.pagination.is-rounded .pagination-next,.pagination.is-rounded .pagination-previous{padding-left:1em;padding-right:1em;border-radius:290486px}.pagination.is-rounded .pagination-link{border-radius:290486px}.pagination,.pagination-list{align-items:center;display:flex;justify-content:center;text-align:center}.pagination-ellipsis,.pagination-link,.pagination-next,.pagination-previous{font-size:1em;justify-content:center;margin:.25rem;padding-left:.5em;padding-right:.5em;text-align:center}.pagination-link,.pagination-next,.pagination-previous{border-color:#dbdbdb;color:#363636;min-width:2.25em}.pagination-link:hover,.pagination-next:hover,.pagination-previous:hover{border-color:#b5b5b5;color:#363636}.pagination-link:focus,.pagination-next:focus,.pagination-previous:focus{border-color:#3273dc}.pagination-link:active,.pagination-next:active,.pagination-previous:active{box-shadow:inset 0 1px 2px rgba(10,10,10,.2)}.pagination-link[disabled],.pagination-next[disabled],.pagination-previous[disabled]{background-color:#dbdbdb;border-color:#dbdbdb;box-shadow:none;color:#7a7a7a;opacity:.5}.pagination-next,.pagination-previous{padding-left:.75em;padding-right:.75em;white-space:nowrap}.pagination-link.is-current{background-color:#3273dc;border-color:#3273dc;color:#fff}.pagination-ellipsis{color:#b5b5b5;pointer-events:none}.pagination-list{flex-wrap:wrap}@media screen and (max-width:768px){.pagination{flex-wrap:wrap}.pagination-next,.pagination-previous{flex-grow:1;flex-shrink:1}.pagination-list li{flex-grow:1;flex-shrink:1}}@media screen and (min-width:769px),print{.pagination-list{flex-grow:1;flex-shrink:1;justify-content:flex-start;order:1}.pagination-previous{order:2}.pagination-next{order:3}.pagination{justify-content:space-between}.pagination.is-centered .pagination-previous{order:1}.pagination.is-centered .pagination-list{justify-content:center;order:2}.pagination.is-centered .pagination-next{order:3}.pagination.is-right .pagination-previous{order:1}.pagination.is-right .pagination-next{order:2}.pagination.is-right .pagination-list{justify-content:flex-end;order:3}}.panel{font-size:1rem}.panel:not(:last-child){margin-bottom:1.5rem}.panel-block,.panel-heading,.panel-tabs{border-bottom:1px solid #dbdbdb;border-left:1px solid #dbdbdb;border-right:1px solid #dbdbdb}.panel-block:first-child,.panel-heading:first-child,.panel-tabs:first-child{border-top:1px solid #dbdbdb}.panel-heading{background-color:#f5f5f5;border-radius:4px 4px 0 0;color:#363636;font-size:1.25em;font-weight:300;line-height:1.25;padding:.5em .75em}.panel-tabs{align-items:flex-end;display:flex;font-size:.875em;justify-content:center}.panel-tabs a{border-bottom:1px solid #dbdbdb;margin-bottom:-1px;padding:.5em}.panel-tabs a.is-active{border-bottom-color:#4a4a4a;color:#363636}.panel-list a{color:#4a4a4a}.panel-list a:hover{color:#3273dc}.panel-block{align-items:center;color:#363636;display:flex;justify-content:flex-start;padding:.5em .75em}.panel-block input[type=checkbox]{margin-right:.75em}.panel-block>.control{flex-grow:1;flex-shrink:1;width:100%}.panel-block.is-wrapped{flex-wrap:wrap}.panel-block.is-active{border-left-color:#3273dc;color:#363636}.panel-block.is-active .panel-icon{color:#3273dc}a.panel-block,label.panel-block{cursor:pointer}a.panel-block:hover,label.panel-block:hover{background-color:#f5f5f5}.panel-icon{display:inline-block;font-size:14px;height:1em;line-height:1em;text-align:center;vertical-align:top;width:1em;color:#7a7a7a;margin-right:.75em}.panel-icon .fa{font-size:inherit;line-height:inherit}.tabs{-webkit-overflow-scrolling:touch;align-items:stretch;display:flex;font-size:1rem;justify-content:space-between;overflow:hidden;overflow-x:auto;white-space:nowrap}.tabs a{align-items:center;border-bottom-color:#dbdbdb;border-bottom-style:solid;border-bottom-width:1px;color:#4a4a4a;display:flex;justify-content:center;margin-bottom:-1px;padding:.5em 1em;vertical-align:top}.tabs a:hover{border-bottom-color:#363636;color:#363636}.tabs li{display:block}.tabs li.is-active a{border-bottom-color:#3273dc;color:#3273dc}.tabs ul{align-items:center;border-bottom-color:#dbdbdb;border-bottom-style:solid;border-bottom-width:1px;display:flex;flex-grow:1;flex-shrink:0;justify-content:flex-start}.tabs ul.is-left{padding-right:.75em}.tabs ul.is-center{flex:none;justify-content:center;padding-left:.75em;padding-right:.75em}.tabs ul.is-right{justify-content:flex-end;padding-left:.75em}.tabs .icon:first-child{margin-right:.5em}.tabs .icon:last-child{margin-left:.5em}.tabs.is-centered ul{justify-content:center}.tabs.is-right ul{justify-content:flex-end}.tabs.is-boxed a{border:1px solid transparent;border-radius:4px 4px 0 0}.tabs.is-boxed a:hover{background-color:#f5f5f5;border-bottom-color:#dbdbdb}.tabs.is-boxed li.is-active a{background-color:#fff;border-color:#dbdbdb;border-bottom-color:transparent!important}.tabs.is-fullwidth li{flex-grow:1;flex-shrink:0}.tabs.is-toggle a{border-color:#dbdbdb;border-style:solid;border-width:1px;margin-bottom:0;position:relative}.tabs.is-toggle a:hover{background-color:#f5f5f5;border-color:#b5b5b5;z-index:2}.tabs.is-toggle li+li{margin-left:-1px}.tabs.is-toggle li:first-child a{border-radius:4px 0 0 4px}.tabs.is-toggle li:last-child a{border-radius:0 4px 4px 0}.tabs.is-toggle li.is-active a{background-color:#3273dc;border-color:#3273dc;color:#fff;z-index:1}.tabs.is-toggle ul{border-bottom:none}.tabs.is-toggle.is-toggle-rounded li:first-child a{border-bottom-left-radius:290486px;border-top-left-radius:290486px;padding-left:1.25em}.tabs.is-toggle.is-toggle-rounded li:last-child a{border-bottom-right-radius:290486px;border-top-right-radius:290486px;padding-right:1.25em}.tabs.is-small{font-size:.75rem}.tabs.is-medium{font-size:1.25rem}.tabs.is-large{font-size:1.5rem}.column{display:block;flex-basis:0;flex-grow:1;flex-shrink:1;padding:.75rem}.columns.is-mobile>.column.is-narrow{flex:none}.columns.is-mobile>.column.is-full{flex:none;width:100%}.columns.is-mobile>.column.is-three-quarters{flex:none;width:75%}.columns.is-mobile>.column.is-two-thirds{flex:none;width:66.6666%}.columns.is-mobile>.column.is-half{flex:none;width:50%}.columns.is-mobile>.column.is-one-third{flex:none;width:33.3333%}.columns.is-mobile>.column.is-one-quarter{flex:none;width:25%}.columns.is-mobile>.column.is-one-fifth{flex:none;width:20%}.columns.is-mobile>.column.is-two-fifths{flex:none;width:40%}.columns.is-mobile>.column.is-three-fifths{flex:none;width:60%}.columns.is-mobile>.column.is-four-fifths{flex:none;width:80%}.columns.is-mobile>.column.is-offset-three-quarters{margin-left:75%}.columns.is-mobile>.column.is-offset-two-thirds{margin-left:66.6666%}.columns.is-mobile>.column.is-offset-half{margin-left:50%}.columns.is-mobile>.column.is-offset-one-third{margin-left:33.3333%}.columns.is-mobile>.column.is-offset-one-quarter{margin-left:25%}.columns.is-mobile>.column.is-offset-one-fifth{margin-left:20%}.columns.is-mobile>.column.is-offset-two-fifths{margin-left:40%}.columns.is-mobile>.column.is-offset-three-fifths{margin-left:60%}.columns.is-mobile>.column.is-offset-four-fifths{margin-left:80%}.columns.is-mobile>.column.is-0{flex:none;width:0%}.columns.is-mobile>.column.is-offset-0{margin-left:0}.columns.is-mobile>.column.is-1{flex:none;width:8.33333%}.columns.is-mobile>.column.is-offset-1{margin-left:8.33333%}.columns.is-mobile>.column.is-2{flex:none;width:16.66667%}.columns.is-mobile>.column.is-offset-2{margin-left:16.66667%}.columns.is-mobile>.column.is-3{flex:none;width:25%}.columns.is-mobile>.column.is-offset-3{margin-left:25%}.columns.is-mobile>.column.is-4{flex:none;width:33.33333%}.columns.is-mobile>.column.is-offset-4{margin-left:33.33333%}.columns.is-mobile>.column.is-5{flex:none;width:41.66667%}.columns.is-mobile>.column.is-offset-5{margin-left:41.66667%}.columns.is-mobile>.column.is-6{flex:none;width:50%}.columns.is-mobile>.column.is-offset-6{margin-left:50%}.columns.is-mobile>.column.is-7{flex:none;width:58.33333%}.columns.is-mobile>.column.is-offset-7{margin-left:58.33333%}.columns.is-mobile>.column.is-8{flex:none;width:66.66667%}.columns.is-mobile>.column.is-offset-8{margin-left:66.66667%}.columns.is-mobile>.column.is-9{flex:none;width:75%}.columns.is-mobile>.column.is-offset-9{margin-left:75%}.columns.is-mobile>.column.is-10{flex:none;width:83.33333%}.columns.is-mobile>.column.is-offset-10{margin-left:83.33333%}.columns.is-mobile>.column.is-11{flex:none;width:91.66667%}.columns.is-mobile>.column.is-offset-11{margin-left:91.66667%}.columns.is-mobile>.column.is-12{flex:none;width:100%}.columns.is-mobile>.column.is-offset-12{margin-left:100%}@media screen and (max-width:768px){.column.is-narrow-mobile{flex:none}.column.is-full-mobile{flex:none;width:100%}.column.is-three-quarters-mobile{flex:none;width:75%}.column.is-two-thirds-mobile{flex:none;width:66.6666%}.column.is-half-mobile{flex:none;width:50%}.column.is-one-third-mobile{flex:none;width:33.3333%}.column.is-one-quarter-mobile{flex:none;width:25%}.column.is-one-fifth-mobile{flex:none;width:20%}.column.is-two-fifths-mobile{flex:none;width:40%}.column.is-three-fifths-mobile{flex:none;width:60%}.column.is-four-fifths-mobile{flex:none;width:80%}.column.is-offset-three-quarters-mobile{margin-left:75%}.column.is-offset-two-thirds-mobile{margin-left:66.6666%}.column.is-offset-half-mobile{margin-left:50%}.column.is-offset-one-third-mobile{margin-left:33.3333%}.column.is-offset-one-quarter-mobile{margin-left:25%}.column.is-offset-one-fifth-mobile{margin-left:20%}.column.is-offset-two-fifths-mobile{margin-left:40%}.column.is-offset-three-fifths-mobile{margin-left:60%}.column.is-offset-four-fifths-mobile{margin-left:80%}.column.is-0-mobile{flex:none;width:0%}.column.is-offset-0-mobile{margin-left:0}.column.is-1-mobile{flex:none;width:8.33333%}.column.is-offset-1-mobile{margin-left:8.33333%}.column.is-2-mobile{flex:none;width:16.66667%}.column.is-offset-2-mobile{margin-left:16.66667%}.column.is-3-mobile{flex:none;width:25%}.column.is-offset-3-mobile{margin-left:25%}.column.is-4-mobile{flex:none;width:33.33333%}.column.is-offset-4-mobile{margin-left:33.33333%}.column.is-5-mobile{flex:none;width:41.66667%}.column.is-offset-5-mobile{margin-left:41.66667%}.column.is-6-mobile{flex:none;width:50%}.column.is-offset-6-mobile{margin-left:50%}.column.is-7-mobile{flex:none;width:58.33333%}.column.is-offset-7-mobile{margin-left:58.33333%}.column.is-8-mobile{flex:none;width:66.66667%}.column.is-offset-8-mobile{margin-left:66.66667%}.column.is-9-mobile{flex:none;width:75%}.column.is-offset-9-mobile{margin-left:75%}.column.is-10-mobile{flex:none;width:83.33333%}.column.is-offset-10-mobile{margin-left:83.33333%}.column.is-11-mobile{flex:none;width:91.66667%}.column.is-offset-11-mobile{margin-left:91.66667%}.column.is-12-mobile{flex:none;width:100%}.column.is-offset-12-mobile{margin-left:100%}}@media screen and (min-width:769px),print{.column.is-narrow,.column.is-narrow-tablet{flex:none}.column.is-full,.column.is-full-tablet{flex:none;width:100%}.column.is-three-quarters,.column.is-three-quarters-tablet{flex:none;width:75%}.column.is-two-thirds,.column.is-two-thirds-tablet{flex:none;width:66.6666%}.column.is-half,.column.is-half-tablet{flex:none;width:50%}.column.is-one-third,.column.is-one-third-tablet{flex:none;width:33.3333%}.column.is-one-quarter,.column.is-one-quarter-tablet{flex:none;width:25%}.column.is-one-fifth,.column.is-one-fifth-tablet{flex:none;width:20%}.column.is-two-fifths,.column.is-two-fifths-tablet{flex:none;width:40%}.column.is-three-fifths,.column.is-three-fifths-tablet{flex:none;width:60%}.column.is-four-fifths,.column.is-four-fifths-tablet{flex:none;width:80%}.column.is-offset-three-quarters,.column.is-offset-three-quarters-tablet{margin-left:75%}.column.is-offset-two-thirds,.column.is-offset-two-thirds-tablet{margin-left:66.6666%}.column.is-offset-half,.column.is-offset-half-tablet{margin-left:50%}.column.is-offset-one-third,.column.is-offset-one-third-tablet{margin-left:33.3333%}.column.is-offset-one-quarter,.column.is-offset-one-quarter-tablet{margin-left:25%}.column.is-offset-one-fifth,.column.is-offset-one-fifth-tablet{margin-left:20%}.column.is-offset-two-fifths,.column.is-offset-two-fifths-tablet{margin-left:40%}.column.is-offset-three-fifths,.column.is-offset-three-fifths-tablet{margin-left:60%}.column.is-offset-four-fifths,.column.is-offset-four-fifths-tablet{margin-left:80%}.column.is-0,.column.is-0-tablet{flex:none;width:0%}.column.is-offset-0,.column.is-offset-0-tablet{margin-left:0}.column.is-1,.column.is-1-tablet{flex:none;width:8.33333%}.column.is-offset-1,.column.is-offset-1-tablet{margin-left:8.33333%}.column.is-2,.column.is-2-tablet{flex:none;width:16.66667%}.column.is-offset-2,.column.is-offset-2-tablet{margin-left:16.66667%}.column.is-3,.column.is-3-tablet{flex:none;width:25%}.column.is-offset-3,.column.is-offset-3-tablet{margin-left:25%}.column.is-4,.column.is-4-tablet{flex:none;width:33.33333%}.column.is-offset-4,.column.is-offset-4-tablet{margin-left:33.33333%}.column.is-5,.column.is-5-tablet{flex:none;width:41.66667%}.column.is-offset-5,.column.is-offset-5-tablet{margin-left:41.66667%}.column.is-6,.column.is-6-tablet{flex:none;width:50%}.column.is-offset-6,.column.is-offset-6-tablet{margin-left:50%}.column.is-7,.column.is-7-tablet{flex:none;width:58.33333%}.column.is-offset-7,.column.is-offset-7-tablet{margin-left:58.33333%}.column.is-8,.column.is-8-tablet{flex:none;width:66.66667%}.column.is-offset-8,.column.is-offset-8-tablet{margin-left:66.66667%}.column.is-9,.column.is-9-tablet{flex:none;width:75%}.column.is-offset-9,.column.is-offset-9-tablet{margin-left:75%}.column.is-10,.column.is-10-tablet{flex:none;width:83.33333%}.column.is-offset-10,.column.is-offset-10-tablet{margin-left:83.33333%}.column.is-11,.column.is-11-tablet{flex:none;width:91.66667%}.column.is-offset-11,.column.is-offset-11-tablet{margin-left:91.66667%}.column.is-12,.column.is-12-tablet{flex:none;width:100%}.column.is-offset-12,.column.is-offset-12-tablet{margin-left:100%}}@media screen and (max-width:1023px){.column.is-narrow-touch{flex:none}.column.is-full-touch{flex:none;width:100%}.column.is-three-quarters-touch{flex:none;width:75%}.column.is-two-thirds-touch{flex:none;width:66.6666%}.column.is-half-touch{flex:none;width:50%}.column.is-one-third-touch{flex:none;width:33.3333%}.column.is-one-quarter-touch{flex:none;width:25%}.column.is-one-fifth-touch{flex:none;width:20%}.column.is-two-fifths-touch{flex:none;width:40%}.column.is-three-fifths-touch{flex:none;width:60%}.column.is-four-fifths-touch{flex:none;width:80%}.column.is-offset-three-quarters-touch{margin-left:75%}.column.is-offset-two-thirds-touch{margin-left:66.6666%}.column.is-offset-half-touch{margin-left:50%}.column.is-offset-one-third-touch{margin-left:33.3333%}.column.is-offset-one-quarter-touch{margin-left:25%}.column.is-offset-one-fifth-touch{margin-left:20%}.column.is-offset-two-fifths-touch{margin-left:40%}.column.is-offset-three-fifths-touch{margin-left:60%}.column.is-offset-four-fifths-touch{margin-left:80%}.column.is-0-touch{flex:none;width:0%}.column.is-offset-0-touch{margin-left:0}.column.is-1-touch{flex:none;width:8.33333%}.column.is-offset-1-touch{margin-left:8.33333%}.column.is-2-touch{flex:none;width:16.66667%}.column.is-offset-2-touch{margin-left:16.66667%}.column.is-3-touch{flex:none;width:25%}.column.is-offset-3-touch{margin-left:25%}.column.is-4-touch{flex:none;width:33.33333%}.column.is-offset-4-touch{margin-left:33.33333%}.column.is-5-touch{flex:none;width:41.66667%}.column.is-offset-5-touch{margin-left:41.66667%}.column.is-6-touch{flex:none;width:50%}.column.is-offset-6-touch{margin-left:50%}.column.is-7-touch{flex:none;width:58.33333%}.column.is-offset-7-touch{margin-left:58.33333%}.column.is-8-touch{flex:none;width:66.66667%}.column.is-offset-8-touch{margin-left:66.66667%}.column.is-9-touch{flex:none;width:75%}.column.is-offset-9-touch{margin-left:75%}.column.is-10-touch{flex:none;width:83.33333%}.column.is-offset-10-touch{margin-left:83.33333%}.column.is-11-touch{flex:none;width:91.66667%}.column.is-offset-11-touch{margin-left:91.66667%}.column.is-12-touch{flex:none;width:100%}.column.is-offset-12-touch{margin-left:100%}}@media screen and (min-width:1024px){.column.is-narrow-desktop{flex:none}.column.is-full-desktop{flex:none;width:100%}.column.is-three-quarters-desktop{flex:none;width:75%}.column.is-two-thirds-desktop{flex:none;width:66.6666%}.column.is-half-desktop{flex:none;width:50%}.column.is-one-third-desktop{flex:none;width:33.3333%}.column.is-one-quarter-desktop{flex:none;width:25%}.column.is-one-fifth-desktop{flex:none;width:20%}.column.is-two-fifths-desktop{flex:none;width:40%}.column.is-three-fifths-desktop{flex:none;width:60%}.column.is-four-fifths-desktop{flex:none;width:80%}.column.is-offset-three-quarters-desktop{margin-left:75%}.column.is-offset-two-thirds-desktop{margin-left:66.6666%}.column.is-offset-half-desktop{margin-left:50%}.column.is-offset-one-third-desktop{margin-left:33.3333%}.column.is-offset-one-quarter-desktop{margin-left:25%}.column.is-offset-one-fifth-desktop{margin-left:20%}.column.is-offset-two-fifths-desktop{margin-left:40%}.column.is-offset-three-fifths-desktop{margin-left:60%}.column.is-offset-four-fifths-desktop{margin-left:80%}.column.is-0-desktop{flex:none;width:0%}.column.is-offset-0-desktop{margin-left:0}.column.is-1-desktop{flex:none;width:8.33333%}.column.is-offset-1-desktop{margin-left:8.33333%}.column.is-2-desktop{flex:none;width:16.66667%}.column.is-offset-2-desktop{margin-left:16.66667%}.column.is-3-desktop{flex:none;width:25%}.column.is-offset-3-desktop{margin-left:25%}.column.is-4-desktop{flex:none;width:33.33333%}.column.is-offset-4-desktop{margin-left:33.33333%}.column.is-5-desktop{flex:none;width:41.66667%}.column.is-offset-5-desktop{margin-left:41.66667%}.column.is-6-desktop{flex:none;width:50%}.column.is-offset-6-desktop{margin-left:50%}.column.is-7-desktop{flex:none;width:58.33333%}.column.is-offset-7-desktop{margin-left:58.33333%}.column.is-8-desktop{flex:none;width:66.66667%}.column.is-offset-8-desktop{margin-left:66.66667%}.column.is-9-desktop{flex:none;width:75%}.column.is-offset-9-desktop{margin-left:75%}.column.is-10-desktop{flex:none;width:83.33333%}.column.is-offset-10-desktop{margin-left:83.33333%}.column.is-11-desktop{flex:none;width:91.66667%}.column.is-offset-11-desktop{margin-left:91.66667%}.column.is-12-desktop{flex:none;width:100%}.column.is-offset-12-desktop{margin-left:100%}}@media screen and (min-width:1216px){.column.is-narrow-widescreen{flex:none}.column.is-full-widescreen{flex:none;width:100%}.column.is-three-quarters-widescreen{flex:none;width:75%}.column.is-two-thirds-widescreen{flex:none;width:66.6666%}.column.is-half-widescreen{flex:none;width:50%}.column.is-one-third-widescreen{flex:none;width:33.3333%}.column.is-one-quarter-widescreen{flex:none;width:25%}.column.is-one-fifth-widescreen{flex:none;width:20%}.column.is-two-fifths-widescreen{flex:none;width:40%}.column.is-three-fifths-widescreen{flex:none;width:60%}.column.is-four-fifths-widescreen{flex:none;width:80%}.column.is-offset-three-quarters-widescreen{margin-left:75%}.column.is-offset-two-thirds-widescreen{margin-left:66.6666%}.column.is-offset-half-widescreen{margin-left:50%}.column.is-offset-one-third-widescreen{margin-left:33.3333%}.column.is-offset-one-quarter-widescreen{margin-left:25%}.column.is-offset-one-fifth-widescreen{margin-left:20%}.column.is-offset-two-fifths-widescreen{margin-left:40%}.column.is-offset-three-fifths-widescreen{margin-left:60%}.column.is-offset-four-fifths-widescreen{margin-left:80%}.column.is-0-widescreen{flex:none;width:0%}.column.is-offset-0-widescreen{margin-left:0}.column.is-1-widescreen{flex:none;width:8.33333%}.column.is-offset-1-widescreen{margin-left:8.33333%}.column.is-2-widescreen{flex:none;width:16.66667%}.column.is-offset-2-widescreen{margin-left:16.66667%}.column.is-3-widescreen{flex:none;width:25%}.column.is-offset-3-widescreen{margin-left:25%}.column.is-4-widescreen{flex:none;width:33.33333%}.column.is-offset-4-widescreen{margin-left:33.33333%}.column.is-5-widescreen{flex:none;width:41.66667%}.column.is-offset-5-widescreen{margin-left:41.66667%}.column.is-6-widescreen{flex:none;width:50%}.column.is-offset-6-widescreen{margin-left:50%}.column.is-7-widescreen{flex:none;width:58.33333%}.column.is-offset-7-widescreen{margin-left:58.33333%}.column.is-8-widescreen{flex:none;width:66.66667%}.column.is-offset-8-widescreen{margin-left:66.66667%}.column.is-9-widescreen{flex:none;width:75%}.column.is-offset-9-widescreen{margin-left:75%}.column.is-10-widescreen{flex:none;width:83.33333%}.column.is-offset-10-widescreen{margin-left:83.33333%}.column.is-11-widescreen{flex:none;width:91.66667%}.column.is-offset-11-widescreen{margin-left:91.66667%}.column.is-12-widescreen{flex:none;width:100%}.column.is-offset-12-widescreen{margin-left:100%}}@media screen and (min-width:1408px){.column.is-narrow-fullhd{flex:none}.column.is-full-fullhd{flex:none;width:100%}.column.is-three-quarters-fullhd{flex:none;width:75%}.column.is-two-thirds-fullhd{flex:none;width:66.6666%}.column.is-half-fullhd{flex:none;width:50%}.column.is-one-third-fullhd{flex:none;width:33.3333%}.column.is-one-quarter-fullhd{flex:none;width:25%}.column.is-one-fifth-fullhd{flex:none;width:20%}.column.is-two-fifths-fullhd{flex:none;width:40%}.column.is-three-fifths-fullhd{flex:none;width:60%}.column.is-four-fifths-fullhd{flex:none;width:80%}.column.is-offset-three-quarters-fullhd{margin-left:75%}.column.is-offset-two-thirds-fullhd{margin-left:66.6666%}.column.is-offset-half-fullhd{margin-left:50%}.column.is-offset-one-third-fullhd{margin-left:33.3333%}.column.is-offset-one-quarter-fullhd{margin-left:25%}.column.is-offset-one-fifth-fullhd{margin-left:20%}.column.is-offset-two-fifths-fullhd{margin-left:40%}.column.is-offset-three-fifths-fullhd{margin-left:60%}.column.is-offset-four-fifths-fullhd{margin-left:80%}.column.is-0-fullhd{flex:none;width:0%}.column.is-offset-0-fullhd{margin-left:0}.column.is-1-fullhd{flex:none;width:8.33333%}.column.is-offset-1-fullhd{margin-left:8.33333%}.column.is-2-fullhd{flex:none;width:16.66667%}.column.is-offset-2-fullhd{margin-left:16.66667%}.column.is-3-fullhd{flex:none;width:25%}.column.is-offset-3-fullhd{margin-left:25%}.column.is-4-fullhd{flex:none;width:33.33333%}.column.is-offset-4-fullhd{margin-left:33.33333%}.column.is-5-fullhd{flex:none;width:41.66667%}.column.is-offset-5-fullhd{margin-left:41.66667%}.column.is-6-fullhd{flex:none;width:50%}.column.is-offset-6-fullhd{margin-left:50%}.column.is-7-fullhd{flex:none;width:58.33333%}.column.is-offset-7-fullhd{margin-left:58.33333%}.column.is-8-fullhd{flex:none;width:66.66667%}.column.is-offset-8-fullhd{margin-left:66.66667%}.column.is-9-fullhd{flex:none;width:75%}.column.is-offset-9-fullhd{margin-left:75%}.column.is-10-fullhd{flex:none;width:83.33333%}.column.is-offset-10-fullhd{margin-left:83.33333%}.column.is-11-fullhd{flex:none;width:91.66667%}.column.is-offset-11-fullhd{margin-left:91.66667%}.column.is-12-fullhd{flex:none;width:100%}.column.is-offset-12-fullhd{margin-left:100%}}.columns{margin-left:-.75rem;margin-right:-.75rem;margin-top:-.75rem}.columns:last-child{margin-bottom:-.75rem}.columns:not(:last-child){margin-bottom:calc(1.5rem - .75rem)}.columns.is-centered{justify-content:center}.columns.is-gapless{margin-left:0;margin-right:0;margin-top:0}.columns.is-gapless>.column{margin:0;padding:0!important}.columns.is-gapless:not(:last-child){margin-bottom:1.5rem}.columns.is-gapless:last-child{margin-bottom:0}.columns.is-mobile{display:flex}.columns.is-multiline{flex-wrap:wrap}.columns.is-vcentered{align-items:center}@media screen and (min-width:769px),print{.columns:not(.is-desktop){display:flex}}@media screen and (min-width:1024px){.columns.is-desktop{display:flex}}.columns.is-variable{--columnGap:0.75rem;margin-left:calc(-1 * var(--columnGap));margin-right:calc(-1 * var(--columnGap))}.columns.is-variable .column{padding-left:var(--columnGap);padding-right:var(--columnGap)}.columns.is-variable.is-0{--columnGap:0rem}@media screen and (max-width:768px){.columns.is-variable.is-0-mobile{--columnGap:0rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-0-tablet{--columnGap:0rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-0-tablet-only{--columnGap:0rem}}@media screen and (max-width:1023px){.columns.is-variable.is-0-touch{--columnGap:0rem}}@media screen and (min-width:1024px){.columns.is-variable.is-0-desktop{--columnGap:0rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-0-desktop-only{--columnGap:0rem}}@media screen and (min-width:1216px){.columns.is-variable.is-0-widescreen{--columnGap:0rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-0-widescreen-only{--columnGap:0rem}}@media screen and (min-width:1408px){.columns.is-variable.is-0-fullhd{--columnGap:0rem}}.columns.is-variable.is-1{--columnGap:0.25rem}@media screen and (max-width:768px){.columns.is-variable.is-1-mobile{--columnGap:0.25rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-1-tablet{--columnGap:0.25rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-1-tablet-only{--columnGap:0.25rem}}@media screen and (max-width:1023px){.columns.is-variable.is-1-touch{--columnGap:0.25rem}}@media screen and (min-width:1024px){.columns.is-variable.is-1-desktop{--columnGap:0.25rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-1-desktop-only{--columnGap:0.25rem}}@media screen and (min-width:1216px){.columns.is-variable.is-1-widescreen{--columnGap:0.25rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-1-widescreen-only{--columnGap:0.25rem}}@media screen and (min-width:1408px){.columns.is-variable.is-1-fullhd{--columnGap:0.25rem}}.columns.is-variable.is-2{--columnGap:0.5rem}@media screen and (max-width:768px){.columns.is-variable.is-2-mobile{--columnGap:0.5rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-2-tablet{--columnGap:0.5rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-2-tablet-only{--columnGap:0.5rem}}@media screen and (max-width:1023px){.columns.is-variable.is-2-touch{--columnGap:0.5rem}}@media screen and (min-width:1024px){.columns.is-variable.is-2-desktop{--columnGap:0.5rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-2-desktop-only{--columnGap:0.5rem}}@media screen and (min-width:1216px){.columns.is-variable.is-2-widescreen{--columnGap:0.5rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-2-widescreen-only{--columnGap:0.5rem}}@media screen and (min-width:1408px){.columns.is-variable.is-2-fullhd{--columnGap:0.5rem}}.columns.is-variable.is-3{--columnGap:0.75rem}@media screen and (max-width:768px){.columns.is-variable.is-3-mobile{--columnGap:0.75rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-3-tablet{--columnGap:0.75rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-3-tablet-only{--columnGap:0.75rem}}@media screen and (max-width:1023px){.columns.is-variable.is-3-touch{--columnGap:0.75rem}}@media screen and (min-width:1024px){.columns.is-variable.is-3-desktop{--columnGap:0.75rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-3-desktop-only{--columnGap:0.75rem}}@media screen and (min-width:1216px){.columns.is-variable.is-3-widescreen{--columnGap:0.75rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-3-widescreen-only{--columnGap:0.75rem}}@media screen and (min-width:1408px){.columns.is-variable.is-3-fullhd{--columnGap:0.75rem}}.columns.is-variable.is-4{--columnGap:1rem}@media screen and (max-width:768px){.columns.is-variable.is-4-mobile{--columnGap:1rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-4-tablet{--columnGap:1rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-4-tablet-only{--columnGap:1rem}}@media screen and (max-width:1023px){.columns.is-variable.is-4-touch{--columnGap:1rem}}@media screen and (min-width:1024px){.columns.is-variable.is-4-desktop{--columnGap:1rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-4-desktop-only{--columnGap:1rem}}@media screen and (min-width:1216px){.columns.is-variable.is-4-widescreen{--columnGap:1rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-4-widescreen-only{--columnGap:1rem}}@media screen and (min-width:1408px){.columns.is-variable.is-4-fullhd{--columnGap:1rem}}.columns.is-variable.is-5{--columnGap:1.25rem}@media screen and (max-width:768px){.columns.is-variable.is-5-mobile{--columnGap:1.25rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-5-tablet{--columnGap:1.25rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-5-tablet-only{--columnGap:1.25rem}}@media screen and (max-width:1023px){.columns.is-variable.is-5-touch{--columnGap:1.25rem}}@media screen and (min-width:1024px){.columns.is-variable.is-5-desktop{--columnGap:1.25rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-5-desktop-only{--columnGap:1.25rem}}@media screen and (min-width:1216px){.columns.is-variable.is-5-widescreen{--columnGap:1.25rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-5-widescreen-only{--columnGap:1.25rem}}@media screen and (min-width:1408px){.columns.is-variable.is-5-fullhd{--columnGap:1.25rem}}.columns.is-variable.is-6{--columnGap:1.5rem}@media screen and (max-width:768px){.columns.is-variable.is-6-mobile{--columnGap:1.5rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-6-tablet{--columnGap:1.5rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-6-tablet-only{--columnGap:1.5rem}}@media screen and (max-width:1023px){.columns.is-variable.is-6-touch{--columnGap:1.5rem}}@media screen and (min-width:1024px){.columns.is-variable.is-6-desktop{--columnGap:1.5rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-6-desktop-only{--columnGap:1.5rem}}@media screen and (min-width:1216px){.columns.is-variable.is-6-widescreen{--columnGap:1.5rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-6-widescreen-only{--columnGap:1.5rem}}@media screen and (min-width:1408px){.columns.is-variable.is-6-fullhd{--columnGap:1.5rem}}.columns.is-variable.is-7{--columnGap:1.75rem}@media screen and (max-width:768px){.columns.is-variable.is-7-mobile{--columnGap:1.75rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-7-tablet{--columnGap:1.75rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-7-tablet-only{--columnGap:1.75rem}}@media screen and (max-width:1023px){.columns.is-variable.is-7-touch{--columnGap:1.75rem}}@media screen and (min-width:1024px){.columns.is-variable.is-7-desktop{--columnGap:1.75rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-7-desktop-only{--columnGap:1.75rem}}@media screen and (min-width:1216px){.columns.is-variable.is-7-widescreen{--columnGap:1.75rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-7-widescreen-only{--columnGap:1.75rem}}@media screen and (min-width:1408px){.columns.is-variable.is-7-fullhd{--columnGap:1.75rem}}.columns.is-variable.is-8{--columnGap:2rem}@media screen and (max-width:768px){.columns.is-variable.is-8-mobile{--columnGap:2rem}}@media screen and (min-width:769px),print{.columns.is-variable.is-8-tablet{--columnGap:2rem}}@media screen and (min-width:769px) and (max-width:1023px){.columns.is-variable.is-8-tablet-only{--columnGap:2rem}}@media screen and (max-width:1023px){.columns.is-variable.is-8-touch{--columnGap:2rem}}@media screen and (min-width:1024px){.columns.is-variable.is-8-desktop{--columnGap:2rem}}@media screen and (min-width:1024px) and (max-width:1215px){.columns.is-variable.is-8-desktop-only{--columnGap:2rem}}@media screen and (min-width:1216px){.columns.is-variable.is-8-widescreen{--columnGap:2rem}}@media screen and (min-width:1216px) and (max-width:1407px){.columns.is-variable.is-8-widescreen-only{--columnGap:2rem}}@media screen and (min-width:1408px){.columns.is-variable.is-8-fullhd{--columnGap:2rem}}.tile{align-items:stretch;display:block;flex-basis:0;flex-grow:1;flex-shrink:1;min-height:-webkit-min-content;min-height:-moz-min-content;min-height:min-content}.tile.is-ancestor{margin-left:-.75rem;margin-right:-.75rem;margin-top:-.75rem}.tile.is-ancestor:last-child{margin-bottom:-.75rem}.tile.is-ancestor:not(:last-child){margin-bottom:.75rem}.tile.is-child{margin:0!important}.tile.is-parent{padding:.75rem}.tile.is-vertical{flex-direction:column}.tile.is-vertical>.tile.is-child:not(:last-child){margin-bottom:1.5rem!important}@media screen and (min-width:769px),print{.tile:not(.is-child){display:flex}.tile.is-1{flex:none;width:8.33333%}.tile.is-2{flex:none;width:16.66667%}.tile.is-3{flex:none;width:25%}.tile.is-4{flex:none;width:33.33333%}.tile.is-5{flex:none;width:41.66667%}.tile.is-6{flex:none;width:50%}.tile.is-7{flex:none;width:58.33333%}.tile.is-8{flex:none;width:66.66667%}.tile.is-9{flex:none;width:75%}.tile.is-10{flex:none;width:83.33333%}.tile.is-11{flex:none;width:91.66667%}.tile.is-12{flex:none;width:100%}}.hero{align-items:stretch;display:flex;flex-direction:column;justify-content:space-between}.hero .navbar{background:0 0}.hero .tabs ul{border-bottom:none}.hero.is-white{background-color:#fff;color:#0a0a0a}.hero.is-white a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-white strong{color:inherit}.hero.is-white .title{color:#0a0a0a}.hero.is-white .subtitle{color:rgba(10,10,10,.9)}.hero.is-white .subtitle a:not(.button),.hero.is-white .subtitle strong{color:#0a0a0a}@media screen and (max-width:1023px){.hero.is-white .navbar-menu{background-color:#fff}}.hero.is-white .navbar-item,.hero.is-white .navbar-link{color:rgba(10,10,10,.7)}.hero.is-white .navbar-link.is-active,.hero.is-white .navbar-link:hover,.hero.is-white a.navbar-item.is-active,.hero.is-white a.navbar-item:hover{background-color:#f2f2f2;color:#0a0a0a}.hero.is-white .tabs a{color:#0a0a0a;opacity:.9}.hero.is-white .tabs a:hover{opacity:1}.hero.is-white .tabs li.is-active a{opacity:1}.hero.is-white .tabs.is-boxed a,.hero.is-white .tabs.is-toggle a{color:#0a0a0a}.hero.is-white .tabs.is-boxed a:hover,.hero.is-white .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-white .tabs.is-boxed li.is-active a,.hero.is-white .tabs.is-boxed li.is-active a:hover,.hero.is-white .tabs.is-toggle li.is-active a,.hero.is-white .tabs.is-toggle li.is-active a:hover{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.hero.is-white.is-bold{background-image:linear-gradient(141deg,#e6e6e6 0,#fff 71%,#fff 100%)}@media screen and (max-width:768px){.hero.is-white.is-bold .navbar-menu{background-image:linear-gradient(141deg,#e6e6e6 0,#fff 71%,#fff 100%)}}.hero.is-black{background-color:#0a0a0a;color:#fff}.hero.is-black a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-black strong{color:inherit}.hero.is-black .title{color:#fff}.hero.is-black .subtitle{color:rgba(255,255,255,.9)}.hero.is-black .subtitle a:not(.button),.hero.is-black .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-black .navbar-menu{background-color:#0a0a0a}}.hero.is-black .navbar-item,.hero.is-black .navbar-link{color:rgba(255,255,255,.7)}.hero.is-black .navbar-link.is-active,.hero.is-black .navbar-link:hover,.hero.is-black a.navbar-item.is-active,.hero.is-black a.navbar-item:hover{background-color:#000;color:#fff}.hero.is-black .tabs a{color:#fff;opacity:.9}.hero.is-black .tabs a:hover{opacity:1}.hero.is-black .tabs li.is-active a{opacity:1}.hero.is-black .tabs.is-boxed a,.hero.is-black .tabs.is-toggle a{color:#fff}.hero.is-black .tabs.is-boxed a:hover,.hero.is-black .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-black .tabs.is-boxed li.is-active a,.hero.is-black .tabs.is-boxed li.is-active a:hover,.hero.is-black .tabs.is-toggle li.is-active a,.hero.is-black .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#0a0a0a}.hero.is-black.is-bold{background-image:linear-gradient(141deg,#000 0,#0a0a0a 71%,#181616 100%)}@media screen and (max-width:768px){.hero.is-black.is-bold .navbar-menu{background-image:linear-gradient(141deg,#000 0,#0a0a0a 71%,#181616 100%)}}.hero.is-light{background-color:#f5f5f5;color:#363636}.hero.is-light a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-light strong{color:inherit}.hero.is-light .title{color:#363636}.hero.is-light .subtitle{color:rgba(54,54,54,.9)}.hero.is-light .subtitle a:not(.button),.hero.is-light .subtitle strong{color:#363636}@media screen and (max-width:1023px){.hero.is-light .navbar-menu{background-color:#f5f5f5}}.hero.is-light .navbar-item,.hero.is-light .navbar-link{color:rgba(54,54,54,.7)}.hero.is-light .navbar-link.is-active,.hero.is-light .navbar-link:hover,.hero.is-light a.navbar-item.is-active,.hero.is-light a.navbar-item:hover{background-color:#e8e8e8;color:#363636}.hero.is-light .tabs a{color:#363636;opacity:.9}.hero.is-light .tabs a:hover{opacity:1}.hero.is-light .tabs li.is-active a{opacity:1}.hero.is-light .tabs.is-boxed a,.hero.is-light .tabs.is-toggle a{color:#363636}.hero.is-light .tabs.is-boxed a:hover,.hero.is-light .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-light .tabs.is-boxed li.is-active a,.hero.is-light .tabs.is-boxed li.is-active a:hover,.hero.is-light .tabs.is-toggle li.is-active a,.hero.is-light .tabs.is-toggle li.is-active a:hover{background-color:#363636;border-color:#363636;color:#f5f5f5}.hero.is-light.is-bold{background-image:linear-gradient(141deg,#dfd8d9 0,#f5f5f5 71%,#fff 100%)}@media screen and (max-width:768px){.hero.is-light.is-bold .navbar-menu{background-image:linear-gradient(141deg,#dfd8d9 0,#f5f5f5 71%,#fff 100%)}}.hero.is-dark{background-color:#363636;color:#f5f5f5}.hero.is-dark a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-dark strong{color:inherit}.hero.is-dark .title{color:#f5f5f5}.hero.is-dark .subtitle{color:rgba(245,245,245,.9)}.hero.is-dark .subtitle a:not(.button),.hero.is-dark .subtitle strong{color:#f5f5f5}@media screen and (max-width:1023px){.hero.is-dark .navbar-menu{background-color:#363636}}.hero.is-dark .navbar-item,.hero.is-dark .navbar-link{color:rgba(245,245,245,.7)}.hero.is-dark .navbar-link.is-active,.hero.is-dark .navbar-link:hover,.hero.is-dark a.navbar-item.is-active,.hero.is-dark a.navbar-item:hover{background-color:#292929;color:#f5f5f5}.hero.is-dark .tabs a{color:#f5f5f5;opacity:.9}.hero.is-dark .tabs a:hover{opacity:1}.hero.is-dark .tabs li.is-active a{opacity:1}.hero.is-dark .tabs.is-boxed a,.hero.is-dark .tabs.is-toggle a{color:#f5f5f5}.hero.is-dark .tabs.is-boxed a:hover,.hero.is-dark .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-dark .tabs.is-boxed li.is-active a,.hero.is-dark .tabs.is-boxed li.is-active a:hover,.hero.is-dark .tabs.is-toggle li.is-active a,.hero.is-dark .tabs.is-toggle li.is-active a:hover{background-color:#f5f5f5;border-color:#f5f5f5;color:#363636}.hero.is-dark.is-bold{background-image:linear-gradient(141deg,#1f191a 0,#363636 71%,#46403f 100%)}@media screen and (max-width:768px){.hero.is-dark.is-bold .navbar-menu{background-image:linear-gradient(141deg,#1f191a 0,#363636 71%,#46403f 100%)}}.hero.is-primary{background-color:#00d1b2;color:#fff}.hero.is-primary a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-primary strong{color:inherit}.hero.is-primary .title{color:#fff}.hero.is-primary .subtitle{color:rgba(255,255,255,.9)}.hero.is-primary .subtitle a:not(.button),.hero.is-primary .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-primary .navbar-menu{background-color:#00d1b2}}.hero.is-primary .navbar-item,.hero.is-primary .navbar-link{color:rgba(255,255,255,.7)}.hero.is-primary .navbar-link.is-active,.hero.is-primary .navbar-link:hover,.hero.is-primary a.navbar-item.is-active,.hero.is-primary a.navbar-item:hover{background-color:#00b89c;color:#fff}.hero.is-primary .tabs a{color:#fff;opacity:.9}.hero.is-primary .tabs a:hover{opacity:1}.hero.is-primary .tabs li.is-active a{opacity:1}.hero.is-primary .tabs.is-boxed a,.hero.is-primary .tabs.is-toggle a{color:#fff}.hero.is-primary .tabs.is-boxed a:hover,.hero.is-primary .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-primary .tabs.is-boxed li.is-active a,.hero.is-primary .tabs.is-boxed li.is-active a:hover,.hero.is-primary .tabs.is-toggle li.is-active a,.hero.is-primary .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#00d1b2}.hero.is-primary.is-bold{background-image:linear-gradient(141deg,#009e6c 0,#00d1b2 71%,#00e7eb 100%)}@media screen and (max-width:768px){.hero.is-primary.is-bold .navbar-menu{background-image:linear-gradient(141deg,#009e6c 0,#00d1b2 71%,#00e7eb 100%)}}.hero.is-link{background-color:#3273dc;color:#fff}.hero.is-link a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-link strong{color:inherit}.hero.is-link .title{color:#fff}.hero.is-link .subtitle{color:rgba(255,255,255,.9)}.hero.is-link .subtitle a:not(.button),.hero.is-link .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-link .navbar-menu{background-color:#3273dc}}.hero.is-link .navbar-item,.hero.is-link .navbar-link{color:rgba(255,255,255,.7)}.hero.is-link .navbar-link.is-active,.hero.is-link .navbar-link:hover,.hero.is-link a.navbar-item.is-active,.hero.is-link a.navbar-item:hover{background-color:#2366d1;color:#fff}.hero.is-link .tabs a{color:#fff;opacity:.9}.hero.is-link .tabs a:hover{opacity:1}.hero.is-link .tabs li.is-active a{opacity:1}.hero.is-link .tabs.is-boxed a,.hero.is-link .tabs.is-toggle a{color:#fff}.hero.is-link .tabs.is-boxed a:hover,.hero.is-link .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-link .tabs.is-boxed li.is-active a,.hero.is-link .tabs.is-boxed li.is-active a:hover,.hero.is-link .tabs.is-toggle li.is-active a,.hero.is-link .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#3273dc}.hero.is-link.is-bold{background-image:linear-gradient(141deg,#1577c6 0,#3273dc 71%,#4366e5 100%)}@media screen and (max-width:768px){.hero.is-link.is-bold .navbar-menu{background-image:linear-gradient(141deg,#1577c6 0,#3273dc 71%,#4366e5 100%)}}.hero.is-info{background-color:#209cee;color:#fff}.hero.is-info a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-info strong{color:inherit}.hero.is-info .title{color:#fff}.hero.is-info .subtitle{color:rgba(255,255,255,.9)}.hero.is-info .subtitle a:not(.button),.hero.is-info .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-info .navbar-menu{background-color:#209cee}}.hero.is-info .navbar-item,.hero.is-info .navbar-link{color:rgba(255,255,255,.7)}.hero.is-info .navbar-link.is-active,.hero.is-info .navbar-link:hover,.hero.is-info a.navbar-item.is-active,.hero.is-info a.navbar-item:hover{background-color:#118fe4;color:#fff}.hero.is-info .tabs a{color:#fff;opacity:.9}.hero.is-info .tabs a:hover{opacity:1}.hero.is-info .tabs li.is-active a{opacity:1}.hero.is-info .tabs.is-boxed a,.hero.is-info .tabs.is-toggle a{color:#fff}.hero.is-info .tabs.is-boxed a:hover,.hero.is-info .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-info .tabs.is-boxed li.is-active a,.hero.is-info .tabs.is-boxed li.is-active a:hover,.hero.is-info .tabs.is-toggle li.is-active a,.hero.is-info .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#209cee}.hero.is-info.is-bold{background-image:linear-gradient(141deg,#04a6d7 0,#209cee 71%,#3287f5 100%)}@media screen and (max-width:768px){.hero.is-info.is-bold .navbar-menu{background-image:linear-gradient(141deg,#04a6d7 0,#209cee 71%,#3287f5 100%)}}.hero.is-success{background-color:#23d160;color:#fff}.hero.is-success a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-success strong{color:inherit}.hero.is-success .title{color:#fff}.hero.is-success .subtitle{color:rgba(255,255,255,.9)}.hero.is-success .subtitle a:not(.button),.hero.is-success .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-success .navbar-menu{background-color:#23d160}}.hero.is-success .navbar-item,.hero.is-success .navbar-link{color:rgba(255,255,255,.7)}.hero.is-success .navbar-link.is-active,.hero.is-success .navbar-link:hover,.hero.is-success a.navbar-item.is-active,.hero.is-success a.navbar-item:hover{background-color:#20bc56;color:#fff}.hero.is-success .tabs a{color:#fff;opacity:.9}.hero.is-success .tabs a:hover{opacity:1}.hero.is-success .tabs li.is-active a{opacity:1}.hero.is-success .tabs.is-boxed a,.hero.is-success .tabs.is-toggle a{color:#fff}.hero.is-success .tabs.is-boxed a:hover,.hero.is-success .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-success .tabs.is-boxed li.is-active a,.hero.is-success .tabs.is-boxed li.is-active a:hover,.hero.is-success .tabs.is-toggle li.is-active a,.hero.is-success .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#23d160}.hero.is-success.is-bold{background-image:linear-gradient(141deg,#12af2f 0,#23d160 71%,#2ce28a 100%)}@media screen and (max-width:768px){.hero.is-success.is-bold .navbar-menu{background-image:linear-gradient(141deg,#12af2f 0,#23d160 71%,#2ce28a 100%)}}.hero.is-warning{background-color:#ffdd57;color:rgba(0,0,0,.7)}.hero.is-warning a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-warning strong{color:inherit}.hero.is-warning .title{color:rgba(0,0,0,.7)}.hero.is-warning .subtitle{color:rgba(0,0,0,.9)}.hero.is-warning .subtitle a:not(.button),.hero.is-warning .subtitle strong{color:rgba(0,0,0,.7)}@media screen and (max-width:1023px){.hero.is-warning .navbar-menu{background-color:#ffdd57}}.hero.is-warning .navbar-item,.hero.is-warning .navbar-link{color:rgba(0,0,0,.7)}.hero.is-warning .navbar-link.is-active,.hero.is-warning .navbar-link:hover,.hero.is-warning a.navbar-item.is-active,.hero.is-warning a.navbar-item:hover{background-color:#ffd83d;color:rgba(0,0,0,.7)}.hero.is-warning .tabs a{color:rgba(0,0,0,.7);opacity:.9}.hero.is-warning .tabs a:hover{opacity:1}.hero.is-warning .tabs li.is-active a{opacity:1}.hero.is-warning .tabs.is-boxed a,.hero.is-warning .tabs.is-toggle a{color:rgba(0,0,0,.7)}.hero.is-warning .tabs.is-boxed a:hover,.hero.is-warning .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-warning .tabs.is-boxed li.is-active a,.hero.is-warning .tabs.is-boxed li.is-active a:hover,.hero.is-warning .tabs.is-toggle li.is-active a,.hero.is-warning .tabs.is-toggle li.is-active a:hover{background-color:rgba(0,0,0,.7);border-color:rgba(0,0,0,.7);color:#ffdd57}.hero.is-warning.is-bold{background-image:linear-gradient(141deg,#ffaf24 0,#ffdd57 71%,#fffa70 100%)}@media screen and (max-width:768px){.hero.is-warning.is-bold .navbar-menu{background-image:linear-gradient(141deg,#ffaf24 0,#ffdd57 71%,#fffa70 100%)}}.hero.is-danger{background-color:#ff3860;color:#fff}.hero.is-danger a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-danger strong{color:inherit}.hero.is-danger .title{color:#fff}.hero.is-danger .subtitle{color:rgba(255,255,255,.9)}.hero.is-danger .subtitle a:not(.button),.hero.is-danger .subtitle strong{color:#fff}@media screen and (max-width:1023px){.hero.is-danger .navbar-menu{background-color:#ff3860}}.hero.is-danger .navbar-item,.hero.is-danger .navbar-link{color:rgba(255,255,255,.7)}.hero.is-danger .navbar-link.is-active,.hero.is-danger .navbar-link:hover,.hero.is-danger a.navbar-item.is-active,.hero.is-danger a.navbar-item:hover{background-color:#ff1f4b;color:#fff}.hero.is-danger .tabs a{color:#fff;opacity:.9}.hero.is-danger .tabs a:hover{opacity:1}.hero.is-danger .tabs li.is-active a{opacity:1}.hero.is-danger .tabs.is-boxed a,.hero.is-danger .tabs.is-toggle a{color:#fff}.hero.is-danger .tabs.is-boxed a:hover,.hero.is-danger .tabs.is-toggle a:hover{background-color:rgba(10,10,10,.1)}.hero.is-danger .tabs.is-boxed li.is-active a,.hero.is-danger .tabs.is-boxed li.is-active a:hover,.hero.is-danger .tabs.is-toggle li.is-active a,.hero.is-danger .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#ff3860}.hero.is-danger.is-bold{background-image:linear-gradient(141deg,#ff0561 0,#ff3860 71%,#ff5257 100%)}@media screen and (max-width:768px){.hero.is-danger.is-bold .navbar-menu{background-image:linear-gradient(141deg,#ff0561 0,#ff3860 71%,#ff5257 100%)}}.hero.is-small .hero-body{padding-bottom:1.5rem;padding-top:1.5rem}@media screen and (min-width:769px),print{.hero.is-medium .hero-body{padding-bottom:9rem;padding-top:9rem}}@media screen and (min-width:769px),print{.hero.is-large .hero-body{padding-bottom:18rem;padding-top:18rem}}.hero.is-fullheight .hero-body,.hero.is-fullheight-with-navbar .hero-body,.hero.is-halfheight .hero-body{align-items:center;display:flex}.hero.is-fullheight .hero-body>.container,.hero.is-fullheight-with-navbar .hero-body>.container,.hero.is-halfheight .hero-body>.container{flex-grow:1;flex-shrink:1}.hero.is-halfheight{min-height:50vh}.hero.is-fullheight{min-height:100vh}.hero-video{overflow:hidden}.hero-video video{left:50%;min-height:100%;min-width:100%;position:absolute;top:50%;-webkit-transform:translate3d(-50%,-50%,0);transform:translate3d(-50%,-50%,0)}.hero-video.is-transparent{opacity:.3}@media screen and (max-width:768px){.hero-video{display:none}}.hero-buttons{margin-top:1.5rem}@media screen and (max-width:768px){.hero-buttons .button{display:flex}.hero-buttons .button:not(:last-child){margin-bottom:.75rem}}@media screen and (min-width:769px),print{.hero-buttons{display:flex;justify-content:center}.hero-buttons .button:not(:last-child){margin-right:1.5rem}}.hero-foot,.hero-head{flex-grow:0;flex-shrink:0}.hero-body{flex-grow:1;flex-shrink:0;padding:3rem 1.5rem}.section{padding:3rem 1.5rem}@media screen and (min-width:1024px){.section.is-medium{padding:9rem 1.5rem}.section.is-large{padding:18rem 1.5rem}}.footer{background-color:#fafafa;padding:3rem 1.5rem 6rem} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/www/css/fontawesome-5.7.2.min.css b/testing/web-platform/tests/tools/wave/www/css/fontawesome-5.7.2.min.css
new file mode 100644
index 0000000000..a74835e25b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/css/fontawesome-5.7.2.min.css
@@ -0,0 +1 @@
+.fa,.fab,.fal,.far,.fas{-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;display:inline-block;font-style:normal;font-variant:normal;text-rendering:auto;line-height:1}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-.0667em}.fa-xs{font-size:.75em}.fa-sm{font-size:.875em}.fa-1x{font-size:1em}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-6x{font-size:6em}.fa-7x{font-size:7em}.fa-8x{font-size:8em}.fa-9x{font-size:9em}.fa-10x{font-size:10em}.fa-fw{text-align:center;width:1.25em}.fa-ul{list-style-type:none;margin-left:2.5em;padding-left:0}.fa-ul>li{position:relative}.fa-li{left:-2em;position:absolute;text-align:center;width:2em;line-height:inherit}.fa-border{border:.08em solid #eee;border-radius:.1em;padding:.2em .25em .15em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa.fa-pull-left,.fab.fa-pull-left,.fal.fa-pull-left,.far.fa-pull-left,.fas.fa-pull-left{margin-right:.3em}.fa.fa-pull-right,.fab.fa-pull-right,.fal.fa-pull-right,.far.fa-pull-right,.fas.fa-pull-right{margin-left:.3em}.fa-spin{animation:fa-spin 2s infinite linear}.fa-pulse{animation:fa-spin 1s infinite steps(8)}@keyframes fa-spin{0%{transform:rotate(0deg)}to{transform:rotate(1turn)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";transform:scaleX(-1)}.fa-flip-vertical{transform:scaleY(-1)}.fa-flip-both,.fa-flip-horizontal.fa-flip-vertical,.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)"}.fa-flip-both,.fa-flip-horizontal.fa-flip-vertical{transform:scale(-1)}:root .fa-flip-both,:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{display:inline-block;height:2em;line-height:2em;position:relative;vertical-align:middle;width:2.5em}.fa-stack-1x,.fa-stack-2x{left:0;position:absolute;text-align:center;width:100%}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-500px:before{content:"\f26e"}.fa-accessible-icon:before{content:"\f368"}.fa-accusoft:before{content:"\f369"}.fa-acquisitions-incorporated:before{content:"\f6af"}.fa-ad:before{content:"\f641"}.fa-address-book:before{content:"\f2b9"}.fa-address-card:before{content:"\f2bb"}.fa-adjust:before{content:"\f042"}.fa-adn:before{content:"\f170"}.fa-adobe:before{content:"\f778"}.fa-adversal:before{content:"\f36a"}.fa-affiliatetheme:before{content:"\f36b"}.fa-air-freshener:before{content:"\f5d0"}.fa-algolia:before{content:"\f36c"}.fa-align-center:before{content:"\f037"}.fa-align-justify:before{content:"\f039"}.fa-align-left:before{content:"\f036"}.fa-align-right:before{content:"\f038"}.fa-alipay:before{content:"\f642"}.fa-allergies:before{content:"\f461"}.fa-amazon:before{content:"\f270"}.fa-amazon-pay:before{content:"\f42c"}.fa-ambulance:before{content:"\f0f9"}.fa-american-sign-language-interpreting:before{content:"\f2a3"}.fa-amilia:before{content:"\f36d"}.fa-anchor:before{content:"\f13d"}.fa-android:before{content:"\f17b"}.fa-angellist:before{content:"\f209"}.fa-angle-double-down:before{content:"\f103"}.fa-angle-double-left:before{content:"\f100"}.fa-angle-double-right:before{content:"\f101"}.fa-angle-double-up:before{content:"\f102"}.fa-angle-down:before{content:"\f107"}.fa-angle-left:before{content:"\f104"}.fa-angle-right:before{content:"\f105"}.fa-angle-up:before{content:"\f106"}.fa-angry:before{content:"\f556"}.fa-angrycreative:before{content:"\f36e"}.fa-angular:before{content:"\f420"}.fa-ankh:before{content:"\f644"}.fa-app-store:before{content:"\f36f"}.fa-app-store-ios:before{content:"\f370"}.fa-apper:before{content:"\f371"}.fa-apple:before{content:"\f179"}.fa-apple-alt:before{content:"\f5d1"}.fa-apple-pay:before{content:"\f415"}.fa-archive:before{content:"\f187"}.fa-archway:before{content:"\f557"}.fa-arrow-alt-circle-down:before{content:"\f358"}.fa-arrow-alt-circle-left:before{content:"\f359"}.fa-arrow-alt-circle-right:before{content:"\f35a"}.fa-arrow-alt-circle-up:before{content:"\f35b"}.fa-arrow-circle-down:before{content:"\f0ab"}.fa-arrow-circle-left:before{content:"\f0a8"}.fa-arrow-circle-right:before{content:"\f0a9"}.fa-arrow-circle-up:before{content:"\f0aa"}.fa-arrow-down:before{content:"\f063"}.fa-arrow-left:before{content:"\f060"}.fa-arrow-right:before{content:"\f061"}.fa-arrow-up:before{content:"\f062"}.fa-arrows-alt:before{content:"\f0b2"}.fa-arrows-alt-h:before{content:"\f337"}.fa-arrows-alt-v:before{content:"\f338"}.fa-artstation:before{content:"\f77a"}.fa-assistive-listening-systems:before{content:"\f2a2"}.fa-asterisk:before{content:"\f069"}.fa-asymmetrik:before{content:"\f372"}.fa-at:before{content:"\f1fa"}.fa-atlas:before{content:"\f558"}.fa-atlassian:before{content:"\f77b"}.fa-atom:before{content:"\f5d2"}.fa-audible:before{content:"\f373"}.fa-audio-description:before{content:"\f29e"}.fa-autoprefixer:before{content:"\f41c"}.fa-avianex:before{content:"\f374"}.fa-aviato:before{content:"\f421"}.fa-award:before{content:"\f559"}.fa-aws:before{content:"\f375"}.fa-baby:before{content:"\f77c"}.fa-baby-carriage:before{content:"\f77d"}.fa-backspace:before{content:"\f55a"}.fa-backward:before{content:"\f04a"}.fa-bacon:before{content:"\f7e5"}.fa-balance-scale:before{content:"\f24e"}.fa-ban:before{content:"\f05e"}.fa-band-aid:before{content:"\f462"}.fa-bandcamp:before{content:"\f2d5"}.fa-barcode:before{content:"\f02a"}.fa-bars:before{content:"\f0c9"}.fa-baseball-ball:before{content:"\f433"}.fa-basketball-ball:before{content:"\f434"}.fa-bath:before{content:"\f2cd"}.fa-battery-empty:before{content:"\f244"}.fa-battery-full:before{content:"\f240"}.fa-battery-half:before{content:"\f242"}.fa-battery-quarter:before{content:"\f243"}.fa-battery-three-quarters:before{content:"\f241"}.fa-bed:before{content:"\f236"}.fa-beer:before{content:"\f0fc"}.fa-behance:before{content:"\f1b4"}.fa-behance-square:before{content:"\f1b5"}.fa-bell:before{content:"\f0f3"}.fa-bell-slash:before{content:"\f1f6"}.fa-bezier-curve:before{content:"\f55b"}.fa-bible:before{content:"\f647"}.fa-bicycle:before{content:"\f206"}.fa-bimobject:before{content:"\f378"}.fa-binoculars:before{content:"\f1e5"}.fa-biohazard:before{content:"\f780"}.fa-birthday-cake:before{content:"\f1fd"}.fa-bitbucket:before{content:"\f171"}.fa-bitcoin:before{content:"\f379"}.fa-bity:before{content:"\f37a"}.fa-black-tie:before{content:"\f27e"}.fa-blackberry:before{content:"\f37b"}.fa-blender:before{content:"\f517"}.fa-blender-phone:before{content:"\f6b6"}.fa-blind:before{content:"\f29d"}.fa-blog:before{content:"\f781"}.fa-blogger:before{content:"\f37c"}.fa-blogger-b:before{content:"\f37d"}.fa-bluetooth:before{content:"\f293"}.fa-bluetooth-b:before{content:"\f294"}.fa-bold:before{content:"\f032"}.fa-bolt:before{content:"\f0e7"}.fa-bomb:before{content:"\f1e2"}.fa-bone:before{content:"\f5d7"}.fa-bong:before{content:"\f55c"}.fa-book:before{content:"\f02d"}.fa-book-dead:before{content:"\f6b7"}.fa-book-medical:before{content:"\f7e6"}.fa-book-open:before{content:"\f518"}.fa-book-reader:before{content:"\f5da"}.fa-bookmark:before{content:"\f02e"}.fa-bowling-ball:before{content:"\f436"}.fa-box:before{content:"\f466"}.fa-box-open:before{content:"\f49e"}.fa-boxes:before{content:"\f468"}.fa-braille:before{content:"\f2a1"}.fa-brain:before{content:"\f5dc"}.fa-bread-slice:before{content:"\f7ec"}.fa-briefcase:before{content:"\f0b1"}.fa-briefcase-medical:before{content:"\f469"}.fa-broadcast-tower:before{content:"\f519"}.fa-broom:before{content:"\f51a"}.fa-brush:before{content:"\f55d"}.fa-btc:before{content:"\f15a"}.fa-bug:before{content:"\f188"}.fa-building:before{content:"\f1ad"}.fa-bullhorn:before{content:"\f0a1"}.fa-bullseye:before{content:"\f140"}.fa-burn:before{content:"\f46a"}.fa-buromobelexperte:before{content:"\f37f"}.fa-bus:before{content:"\f207"}.fa-bus-alt:before{content:"\f55e"}.fa-business-time:before{content:"\f64a"}.fa-buysellads:before{content:"\f20d"}.fa-calculator:before{content:"\f1ec"}.fa-calendar:before{content:"\f133"}.fa-calendar-alt:before{content:"\f073"}.fa-calendar-check:before{content:"\f274"}.fa-calendar-day:before{content:"\f783"}.fa-calendar-minus:before{content:"\f272"}.fa-calendar-plus:before{content:"\f271"}.fa-calendar-times:before{content:"\f273"}.fa-calendar-week:before{content:"\f784"}.fa-camera:before{content:"\f030"}.fa-camera-retro:before{content:"\f083"}.fa-campground:before{content:"\f6bb"}.fa-canadian-maple-leaf:before{content:"\f785"}.fa-candy-cane:before{content:"\f786"}.fa-cannabis:before{content:"\f55f"}.fa-capsules:before{content:"\f46b"}.fa-car:before{content:"\f1b9"}.fa-car-alt:before{content:"\f5de"}.fa-car-battery:before{content:"\f5df"}.fa-car-crash:before{content:"\f5e1"}.fa-car-side:before{content:"\f5e4"}.fa-caret-down:before{content:"\f0d7"}.fa-caret-left:before{content:"\f0d9"}.fa-caret-right:before{content:"\f0da"}.fa-caret-square-down:before{content:"\f150"}.fa-caret-square-left:before{content:"\f191"}.fa-caret-square-right:before{content:"\f152"}.fa-caret-square-up:before{content:"\f151"}.fa-caret-up:before{content:"\f0d8"}.fa-carrot:before{content:"\f787"}.fa-cart-arrow-down:before{content:"\f218"}.fa-cart-plus:before{content:"\f217"}.fa-cash-register:before{content:"\f788"}.fa-cat:before{content:"\f6be"}.fa-cc-amazon-pay:before{content:"\f42d"}.fa-cc-amex:before{content:"\f1f3"}.fa-cc-apple-pay:before{content:"\f416"}.fa-cc-diners-club:before{content:"\f24c"}.fa-cc-discover:before{content:"\f1f2"}.fa-cc-jcb:before{content:"\f24b"}.fa-cc-mastercard:before{content:"\f1f1"}.fa-cc-paypal:before{content:"\f1f4"}.fa-cc-stripe:before{content:"\f1f5"}.fa-cc-visa:before{content:"\f1f0"}.fa-centercode:before{content:"\f380"}.fa-centos:before{content:"\f789"}.fa-certificate:before{content:"\f0a3"}.fa-chair:before{content:"\f6c0"}.fa-chalkboard:before{content:"\f51b"}.fa-chalkboard-teacher:before{content:"\f51c"}.fa-charging-station:before{content:"\f5e7"}.fa-chart-area:before{content:"\f1fe"}.fa-chart-bar:before{content:"\f080"}.fa-chart-line:before{content:"\f201"}.fa-chart-pie:before{content:"\f200"}.fa-check:before{content:"\f00c"}.fa-check-circle:before{content:"\f058"}.fa-check-double:before{content:"\f560"}.fa-check-square:before{content:"\f14a"}.fa-cheese:before{content:"\f7ef"}.fa-chess:before{content:"\f439"}.fa-chess-bishop:before{content:"\f43a"}.fa-chess-board:before{content:"\f43c"}.fa-chess-king:before{content:"\f43f"}.fa-chess-knight:before{content:"\f441"}.fa-chess-pawn:before{content:"\f443"}.fa-chess-queen:before{content:"\f445"}.fa-chess-rook:before{content:"\f447"}.fa-chevron-circle-down:before{content:"\f13a"}.fa-chevron-circle-left:before{content:"\f137"}.fa-chevron-circle-right:before{content:"\f138"}.fa-chevron-circle-up:before{content:"\f139"}.fa-chevron-down:before{content:"\f078"}.fa-chevron-left:before{content:"\f053"}.fa-chevron-right:before{content:"\f054"}.fa-chevron-up:before{content:"\f077"}.fa-child:before{content:"\f1ae"}.fa-chrome:before{content:"\f268"}.fa-church:before{content:"\f51d"}.fa-circle:before{content:"\f111"}.fa-circle-notch:before{content:"\f1ce"}.fa-city:before{content:"\f64f"}.fa-clinic-medical:before{content:"\f7f2"}.fa-clipboard:before{content:"\f328"}.fa-clipboard-check:before{content:"\f46c"}.fa-clipboard-list:before{content:"\f46d"}.fa-clock:before{content:"\f017"}.fa-clone:before{content:"\f24d"}.fa-closed-captioning:before{content:"\f20a"}.fa-cloud:before{content:"\f0c2"}.fa-cloud-download-alt:before{content:"\f381"}.fa-cloud-meatball:before{content:"\f73b"}.fa-cloud-moon:before{content:"\f6c3"}.fa-cloud-moon-rain:before{content:"\f73c"}.fa-cloud-rain:before{content:"\f73d"}.fa-cloud-showers-heavy:before{content:"\f740"}.fa-cloud-sun:before{content:"\f6c4"}.fa-cloud-sun-rain:before{content:"\f743"}.fa-cloud-upload-alt:before{content:"\f382"}.fa-cloudscale:before{content:"\f383"}.fa-cloudsmith:before{content:"\f384"}.fa-cloudversify:before{content:"\f385"}.fa-cocktail:before{content:"\f561"}.fa-code:before{content:"\f121"}.fa-code-branch:before{content:"\f126"}.fa-codepen:before{content:"\f1cb"}.fa-codiepie:before{content:"\f284"}.fa-coffee:before{content:"\f0f4"}.fa-cog:before{content:"\f013"}.fa-cogs:before{content:"\f085"}.fa-coins:before{content:"\f51e"}.fa-columns:before{content:"\f0db"}.fa-comment:before{content:"\f075"}.fa-comment-alt:before{content:"\f27a"}.fa-comment-dollar:before{content:"\f651"}.fa-comment-dots:before{content:"\f4ad"}.fa-comment-medical:before{content:"\f7f5"}.fa-comment-slash:before{content:"\f4b3"}.fa-comments:before{content:"\f086"}.fa-comments-dollar:before{content:"\f653"}.fa-compact-disc:before{content:"\f51f"}.fa-compass:before{content:"\f14e"}.fa-compress:before{content:"\f066"}.fa-compress-arrows-alt:before{content:"\f78c"}.fa-concierge-bell:before{content:"\f562"}.fa-confluence:before{content:"\f78d"}.fa-connectdevelop:before{content:"\f20e"}.fa-contao:before{content:"\f26d"}.fa-cookie:before{content:"\f563"}.fa-cookie-bite:before{content:"\f564"}.fa-copy:before{content:"\f0c5"}.fa-copyright:before{content:"\f1f9"}.fa-couch:before{content:"\f4b8"}.fa-cpanel:before{content:"\f388"}.fa-creative-commons:before{content:"\f25e"}.fa-creative-commons-by:before{content:"\f4e7"}.fa-creative-commons-nc:before{content:"\f4e8"}.fa-creative-commons-nc-eu:before{content:"\f4e9"}.fa-creative-commons-nc-jp:before{content:"\f4ea"}.fa-creative-commons-nd:before{content:"\f4eb"}.fa-creative-commons-pd:before{content:"\f4ec"}.fa-creative-commons-pd-alt:before{content:"\f4ed"}.fa-creative-commons-remix:before{content:"\f4ee"}.fa-creative-commons-sa:before{content:"\f4ef"}.fa-creative-commons-sampling:before{content:"\f4f0"}.fa-creative-commons-sampling-plus:before{content:"\f4f1"}.fa-creative-commons-share:before{content:"\f4f2"}.fa-creative-commons-zero:before{content:"\f4f3"}.fa-credit-card:before{content:"\f09d"}.fa-critical-role:before{content:"\f6c9"}.fa-crop:before{content:"\f125"}.fa-crop-alt:before{content:"\f565"}.fa-cross:before{content:"\f654"}.fa-crosshairs:before{content:"\f05b"}.fa-crow:before{content:"\f520"}.fa-crown:before{content:"\f521"}.fa-crutch:before{content:"\f7f7"}.fa-css3:before{content:"\f13c"}.fa-css3-alt:before{content:"\f38b"}.fa-cube:before{content:"\f1b2"}.fa-cubes:before{content:"\f1b3"}.fa-cut:before{content:"\f0c4"}.fa-cuttlefish:before{content:"\f38c"}.fa-d-and-d:before{content:"\f38d"}.fa-d-and-d-beyond:before{content:"\f6ca"}.fa-dashcube:before{content:"\f210"}.fa-database:before{content:"\f1c0"}.fa-deaf:before{content:"\f2a4"}.fa-delicious:before{content:"\f1a5"}.fa-democrat:before{content:"\f747"}.fa-deploydog:before{content:"\f38e"}.fa-deskpro:before{content:"\f38f"}.fa-desktop:before{content:"\f108"}.fa-dev:before{content:"\f6cc"}.fa-deviantart:before{content:"\f1bd"}.fa-dharmachakra:before{content:"\f655"}.fa-dhl:before{content:"\f790"}.fa-diagnoses:before{content:"\f470"}.fa-diaspora:before{content:"\f791"}.fa-dice:before{content:"\f522"}.fa-dice-d20:before{content:"\f6cf"}.fa-dice-d6:before{content:"\f6d1"}.fa-dice-five:before{content:"\f523"}.fa-dice-four:before{content:"\f524"}.fa-dice-one:before{content:"\f525"}.fa-dice-six:before{content:"\f526"}.fa-dice-three:before{content:"\f527"}.fa-dice-two:before{content:"\f528"}.fa-digg:before{content:"\f1a6"}.fa-digital-ocean:before{content:"\f391"}.fa-digital-tachograph:before{content:"\f566"}.fa-directions:before{content:"\f5eb"}.fa-discord:before{content:"\f392"}.fa-discourse:before{content:"\f393"}.fa-divide:before{content:"\f529"}.fa-dizzy:before{content:"\f567"}.fa-dna:before{content:"\f471"}.fa-dochub:before{content:"\f394"}.fa-docker:before{content:"\f395"}.fa-dog:before{content:"\f6d3"}.fa-dollar-sign:before{content:"\f155"}.fa-dolly:before{content:"\f472"}.fa-dolly-flatbed:before{content:"\f474"}.fa-donate:before{content:"\f4b9"}.fa-door-closed:before{content:"\f52a"}.fa-door-open:before{content:"\f52b"}.fa-dot-circle:before{content:"\f192"}.fa-dove:before{content:"\f4ba"}.fa-download:before{content:"\f019"}.fa-draft2digital:before{content:"\f396"}.fa-drafting-compass:before{content:"\f568"}.fa-dragon:before{content:"\f6d5"}.fa-draw-polygon:before{content:"\f5ee"}.fa-dribbble:before{content:"\f17d"}.fa-dribbble-square:before{content:"\f397"}.fa-dropbox:before{content:"\f16b"}.fa-drum:before{content:"\f569"}.fa-drum-steelpan:before{content:"\f56a"}.fa-drumstick-bite:before{content:"\f6d7"}.fa-drupal:before{content:"\f1a9"}.fa-dumbbell:before{content:"\f44b"}.fa-dumpster:before{content:"\f793"}.fa-dumpster-fire:before{content:"\f794"}.fa-dungeon:before{content:"\f6d9"}.fa-dyalog:before{content:"\f399"}.fa-earlybirds:before{content:"\f39a"}.fa-ebay:before{content:"\f4f4"}.fa-edge:before{content:"\f282"}.fa-edit:before{content:"\f044"}.fa-egg:before{content:"\f7fb"}.fa-eject:before{content:"\f052"}.fa-elementor:before{content:"\f430"}.fa-ellipsis-h:before{content:"\f141"}.fa-ellipsis-v:before{content:"\f142"}.fa-ello:before{content:"\f5f1"}.fa-ember:before{content:"\f423"}.fa-empire:before{content:"\f1d1"}.fa-envelope:before{content:"\f0e0"}.fa-envelope-open:before{content:"\f2b6"}.fa-envelope-open-text:before{content:"\f658"}.fa-envelope-square:before{content:"\f199"}.fa-envira:before{content:"\f299"}.fa-equals:before{content:"\f52c"}.fa-eraser:before{content:"\f12d"}.fa-erlang:before{content:"\f39d"}.fa-ethereum:before{content:"\f42e"}.fa-ethernet:before{content:"\f796"}.fa-etsy:before{content:"\f2d7"}.fa-euro-sign:before{content:"\f153"}.fa-exchange-alt:before{content:"\f362"}.fa-exclamation:before{content:"\f12a"}.fa-exclamation-circle:before{content:"\f06a"}.fa-exclamation-triangle:before{content:"\f071"}.fa-expand:before{content:"\f065"}.fa-expand-arrows-alt:before{content:"\f31e"}.fa-expeditedssl:before{content:"\f23e"}.fa-external-link-alt:before{content:"\f35d"}.fa-external-link-square-alt:before{content:"\f360"}.fa-eye:before{content:"\f06e"}.fa-eye-dropper:before{content:"\f1fb"}.fa-eye-slash:before{content:"\f070"}.fa-facebook:before{content:"\f09a"}.fa-facebook-f:before{content:"\f39e"}.fa-facebook-messenger:before{content:"\f39f"}.fa-facebook-square:before{content:"\f082"}.fa-fantasy-flight-games:before{content:"\f6dc"}.fa-fast-backward:before{content:"\f049"}.fa-fast-forward:before{content:"\f050"}.fa-fax:before{content:"\f1ac"}.fa-feather:before{content:"\f52d"}.fa-feather-alt:before{content:"\f56b"}.fa-fedex:before{content:"\f797"}.fa-fedora:before{content:"\f798"}.fa-female:before{content:"\f182"}.fa-fighter-jet:before{content:"\f0fb"}.fa-figma:before{content:"\f799"}.fa-file:before{content:"\f15b"}.fa-file-alt:before{content:"\f15c"}.fa-file-archive:before{content:"\f1c6"}.fa-file-audio:before{content:"\f1c7"}.fa-file-code:before{content:"\f1c9"}.fa-file-contract:before{content:"\f56c"}.fa-file-csv:before{content:"\f6dd"}.fa-file-download:before{content:"\f56d"}.fa-file-excel:before{content:"\f1c3"}.fa-file-export:before{content:"\f56e"}.fa-file-image:before{content:"\f1c5"}.fa-file-import:before{content:"\f56f"}.fa-file-invoice:before{content:"\f570"}.fa-file-invoice-dollar:before{content:"\f571"}.fa-file-medical:before{content:"\f477"}.fa-file-medical-alt:before{content:"\f478"}.fa-file-pdf:before{content:"\f1c1"}.fa-file-powerpoint:before{content:"\f1c4"}.fa-file-prescription:before{content:"\f572"}.fa-file-signature:before{content:"\f573"}.fa-file-upload:before{content:"\f574"}.fa-file-video:before{content:"\f1c8"}.fa-file-word:before{content:"\f1c2"}.fa-fill:before{content:"\f575"}.fa-fill-drip:before{content:"\f576"}.fa-film:before{content:"\f008"}.fa-filter:before{content:"\f0b0"}.fa-fingerprint:before{content:"\f577"}.fa-fire:before{content:"\f06d"}.fa-fire-alt:before{content:"\f7e4"}.fa-fire-extinguisher:before{content:"\f134"}.fa-firefox:before{content:"\f269"}.fa-first-aid:before{content:"\f479"}.fa-first-order:before{content:"\f2b0"}.fa-first-order-alt:before{content:"\f50a"}.fa-firstdraft:before{content:"\f3a1"}.fa-fish:before{content:"\f578"}.fa-fist-raised:before{content:"\f6de"}.fa-flag:before{content:"\f024"}.fa-flag-checkered:before{content:"\f11e"}.fa-flag-usa:before{content:"\f74d"}.fa-flask:before{content:"\f0c3"}.fa-flickr:before{content:"\f16e"}.fa-flipboard:before{content:"\f44d"}.fa-flushed:before{content:"\f579"}.fa-fly:before{content:"\f417"}.fa-folder:before{content:"\f07b"}.fa-folder-minus:before{content:"\f65d"}.fa-folder-open:before{content:"\f07c"}.fa-folder-plus:before{content:"\f65e"}.fa-font:before{content:"\f031"}.fa-font-awesome:before{content:"\f2b4"}.fa-font-awesome-alt:before{content:"\f35c"}.fa-font-awesome-flag:before{content:"\f425"}.fa-font-awesome-logo-full:before{content:"\f4e6"}.fa-fonticons:before{content:"\f280"}.fa-fonticons-fi:before{content:"\f3a2"}.fa-football-ball:before{content:"\f44e"}.fa-fort-awesome:before{content:"\f286"}.fa-fort-awesome-alt:before{content:"\f3a3"}.fa-forumbee:before{content:"\f211"}.fa-forward:before{content:"\f04e"}.fa-foursquare:before{content:"\f180"}.fa-free-code-camp:before{content:"\f2c5"}.fa-freebsd:before{content:"\f3a4"}.fa-frog:before{content:"\f52e"}.fa-frown:before{content:"\f119"}.fa-frown-open:before{content:"\f57a"}.fa-fulcrum:before{content:"\f50b"}.fa-funnel-dollar:before{content:"\f662"}.fa-futbol:before{content:"\f1e3"}.fa-galactic-republic:before{content:"\f50c"}.fa-galactic-senate:before{content:"\f50d"}.fa-gamepad:before{content:"\f11b"}.fa-gas-pump:before{content:"\f52f"}.fa-gavel:before{content:"\f0e3"}.fa-gem:before{content:"\f3a5"}.fa-genderless:before{content:"\f22d"}.fa-get-pocket:before{content:"\f265"}.fa-gg:before{content:"\f260"}.fa-gg-circle:before{content:"\f261"}.fa-ghost:before{content:"\f6e2"}.fa-gift:before{content:"\f06b"}.fa-gifts:before{content:"\f79c"}.fa-git:before{content:"\f1d3"}.fa-git-square:before{content:"\f1d2"}.fa-github:before{content:"\f09b"}.fa-github-alt:before{content:"\f113"}.fa-github-square:before{content:"\f092"}.fa-gitkraken:before{content:"\f3a6"}.fa-gitlab:before{content:"\f296"}.fa-gitter:before{content:"\f426"}.fa-glass-cheers:before{content:"\f79f"}.fa-glass-martini:before{content:"\f000"}.fa-glass-martini-alt:before{content:"\f57b"}.fa-glass-whiskey:before{content:"\f7a0"}.fa-glasses:before{content:"\f530"}.fa-glide:before{content:"\f2a5"}.fa-glide-g:before{content:"\f2a6"}.fa-globe:before{content:"\f0ac"}.fa-globe-africa:before{content:"\f57c"}.fa-globe-americas:before{content:"\f57d"}.fa-globe-asia:before{content:"\f57e"}.fa-globe-europe:before{content:"\f7a2"}.fa-gofore:before{content:"\f3a7"}.fa-golf-ball:before{content:"\f450"}.fa-goodreads:before{content:"\f3a8"}.fa-goodreads-g:before{content:"\f3a9"}.fa-google:before{content:"\f1a0"}.fa-google-drive:before{content:"\f3aa"}.fa-google-play:before{content:"\f3ab"}.fa-google-plus:before{content:"\f2b3"}.fa-google-plus-g:before{content:"\f0d5"}.fa-google-plus-square:before{content:"\f0d4"}.fa-google-wallet:before{content:"\f1ee"}.fa-gopuram:before{content:"\f664"}.fa-graduation-cap:before{content:"\f19d"}.fa-gratipay:before{content:"\f184"}.fa-grav:before{content:"\f2d6"}.fa-greater-than:before{content:"\f531"}.fa-greater-than-equal:before{content:"\f532"}.fa-grimace:before{content:"\f57f"}.fa-grin:before{content:"\f580"}.fa-grin-alt:before{content:"\f581"}.fa-grin-beam:before{content:"\f582"}.fa-grin-beam-sweat:before{content:"\f583"}.fa-grin-hearts:before{content:"\f584"}.fa-grin-squint:before{content:"\f585"}.fa-grin-squint-tears:before{content:"\f586"}.fa-grin-stars:before{content:"\f587"}.fa-grin-tears:before{content:"\f588"}.fa-grin-tongue:before{content:"\f589"}.fa-grin-tongue-squint:before{content:"\f58a"}.fa-grin-tongue-wink:before{content:"\f58b"}.fa-grin-wink:before{content:"\f58c"}.fa-grip-horizontal:before{content:"\f58d"}.fa-grip-lines:before{content:"\f7a4"}.fa-grip-lines-vertical:before{content:"\f7a5"}.fa-grip-vertical:before{content:"\f58e"}.fa-gripfire:before{content:"\f3ac"}.fa-grunt:before{content:"\f3ad"}.fa-guitar:before{content:"\f7a6"}.fa-gulp:before{content:"\f3ae"}.fa-h-square:before{content:"\f0fd"}.fa-hacker-news:before{content:"\f1d4"}.fa-hacker-news-square:before{content:"\f3af"}.fa-hackerrank:before{content:"\f5f7"}.fa-hamburger:before{content:"\f805"}.fa-hammer:before{content:"\f6e3"}.fa-hamsa:before{content:"\f665"}.fa-hand-holding:before{content:"\f4bd"}.fa-hand-holding-heart:before{content:"\f4be"}.fa-hand-holding-usd:before{content:"\f4c0"}.fa-hand-lizard:before{content:"\f258"}.fa-hand-middle-finger:before{content:"\f806"}.fa-hand-paper:before{content:"\f256"}.fa-hand-peace:before{content:"\f25b"}.fa-hand-point-down:before{content:"\f0a7"}.fa-hand-point-left:before{content:"\f0a5"}.fa-hand-point-right:before{content:"\f0a4"}.fa-hand-point-up:before{content:"\f0a6"}.fa-hand-pointer:before{content:"\f25a"}.fa-hand-rock:before{content:"\f255"}.fa-hand-scissors:before{content:"\f257"}.fa-hand-spock:before{content:"\f259"}.fa-hands:before{content:"\f4c2"}.fa-hands-helping:before{content:"\f4c4"}.fa-handshake:before{content:"\f2b5"}.fa-hanukiah:before{content:"\f6e6"}.fa-hard-hat:before{content:"\f807"}.fa-hashtag:before{content:"\f292"}.fa-hat-wizard:before{content:"\f6e8"}.fa-haykal:before{content:"\f666"}.fa-hdd:before{content:"\f0a0"}.fa-heading:before{content:"\f1dc"}.fa-headphones:before{content:"\f025"}.fa-headphones-alt:before{content:"\f58f"}.fa-headset:before{content:"\f590"}.fa-heart:before{content:"\f004"}.fa-heart-broken:before{content:"\f7a9"}.fa-heartbeat:before{content:"\f21e"}.fa-helicopter:before{content:"\f533"}.fa-highlighter:before{content:"\f591"}.fa-hiking:before{content:"\f6ec"}.fa-hippo:before{content:"\f6ed"}.fa-hips:before{content:"\f452"}.fa-hire-a-helper:before{content:"\f3b0"}.fa-history:before{content:"\f1da"}.fa-hockey-puck:before{content:"\f453"}.fa-holly-berry:before{content:"\f7aa"}.fa-home:before{content:"\f015"}.fa-hooli:before{content:"\f427"}.fa-hornbill:before{content:"\f592"}.fa-horse:before{content:"\f6f0"}.fa-horse-head:before{content:"\f7ab"}.fa-hospital:before{content:"\f0f8"}.fa-hospital-alt:before{content:"\f47d"}.fa-hospital-symbol:before{content:"\f47e"}.fa-hot-tub:before{content:"\f593"}.fa-hotdog:before{content:"\f80f"}.fa-hotel:before{content:"\f594"}.fa-hotjar:before{content:"\f3b1"}.fa-hourglass:before{content:"\f254"}.fa-hourglass-end:before{content:"\f253"}.fa-hourglass-half:before{content:"\f252"}.fa-hourglass-start:before{content:"\f251"}.fa-house-damage:before{content:"\f6f1"}.fa-houzz:before{content:"\f27c"}.fa-hryvnia:before{content:"\f6f2"}.fa-html5:before{content:"\f13b"}.fa-hubspot:before{content:"\f3b2"}.fa-i-cursor:before{content:"\f246"}.fa-ice-cream:before{content:"\f810"}.fa-icicles:before{content:"\f7ad"}.fa-id-badge:before{content:"\f2c1"}.fa-id-card:before{content:"\f2c2"}.fa-id-card-alt:before{content:"\f47f"}.fa-igloo:before{content:"\f7ae"}.fa-image:before{content:"\f03e"}.fa-images:before{content:"\f302"}.fa-imdb:before{content:"\f2d8"}.fa-inbox:before{content:"\f01c"}.fa-indent:before{content:"\f03c"}.fa-industry:before{content:"\f275"}.fa-infinity:before{content:"\f534"}.fa-info:before{content:"\f129"}.fa-info-circle:before{content:"\f05a"}.fa-instagram:before{content:"\f16d"}.fa-intercom:before{content:"\f7af"}.fa-internet-explorer:before{content:"\f26b"}.fa-invision:before{content:"\f7b0"}.fa-ioxhost:before{content:"\f208"}.fa-italic:before{content:"\f033"}.fa-itunes:before{content:"\f3b4"}.fa-itunes-note:before{content:"\f3b5"}.fa-java:before{content:"\f4e4"}.fa-jedi:before{content:"\f669"}.fa-jedi-order:before{content:"\f50e"}.fa-jenkins:before{content:"\f3b6"}.fa-jira:before{content:"\f7b1"}.fa-joget:before{content:"\f3b7"}.fa-joint:before{content:"\f595"}.fa-joomla:before{content:"\f1aa"}.fa-journal-whills:before{content:"\f66a"}.fa-js:before{content:"\f3b8"}.fa-js-square:before{content:"\f3b9"}.fa-jsfiddle:before{content:"\f1cc"}.fa-kaaba:before{content:"\f66b"}.fa-kaggle:before{content:"\f5fa"}.fa-key:before{content:"\f084"}.fa-keybase:before{content:"\f4f5"}.fa-keyboard:before{content:"\f11c"}.fa-keycdn:before{content:"\f3ba"}.fa-khanda:before{content:"\f66d"}.fa-kickstarter:before{content:"\f3bb"}.fa-kickstarter-k:before{content:"\f3bc"}.fa-kiss:before{content:"\f596"}.fa-kiss-beam:before{content:"\f597"}.fa-kiss-wink-heart:before{content:"\f598"}.fa-kiwi-bird:before{content:"\f535"}.fa-korvue:before{content:"\f42f"}.fa-landmark:before{content:"\f66f"}.fa-language:before{content:"\f1ab"}.fa-laptop:before{content:"\f109"}.fa-laptop-code:before{content:"\f5fc"}.fa-laptop-medical:before{content:"\f812"}.fa-laravel:before{content:"\f3bd"}.fa-lastfm:before{content:"\f202"}.fa-lastfm-square:before{content:"\f203"}.fa-laugh:before{content:"\f599"}.fa-laugh-beam:before{content:"\f59a"}.fa-laugh-squint:before{content:"\f59b"}.fa-laugh-wink:before{content:"\f59c"}.fa-layer-group:before{content:"\f5fd"}.fa-leaf:before{content:"\f06c"}.fa-leanpub:before{content:"\f212"}.fa-lemon:before{content:"\f094"}.fa-less:before{content:"\f41d"}.fa-less-than:before{content:"\f536"}.fa-less-than-equal:before{content:"\f537"}.fa-level-down-alt:before{content:"\f3be"}.fa-level-up-alt:before{content:"\f3bf"}.fa-life-ring:before{content:"\f1cd"}.fa-lightbulb:before{content:"\f0eb"}.fa-line:before{content:"\f3c0"}.fa-link:before{content:"\f0c1"}.fa-linkedin:before{content:"\f08c"}.fa-linkedin-in:before{content:"\f0e1"}.fa-linode:before{content:"\f2b8"}.fa-linux:before{content:"\f17c"}.fa-lira-sign:before{content:"\f195"}.fa-list:before{content:"\f03a"}.fa-list-alt:before{content:"\f022"}.fa-list-ol:before{content:"\f0cb"}.fa-list-ul:before{content:"\f0ca"}.fa-location-arrow:before{content:"\f124"}.fa-lock:before{content:"\f023"}.fa-lock-open:before{content:"\f3c1"}.fa-long-arrow-alt-down:before{content:"\f309"}.fa-long-arrow-alt-left:before{content:"\f30a"}.fa-long-arrow-alt-right:before{content:"\f30b"}.fa-long-arrow-alt-up:before{content:"\f30c"}.fa-low-vision:before{content:"\f2a8"}.fa-luggage-cart:before{content:"\f59d"}.fa-lyft:before{content:"\f3c3"}.fa-magento:before{content:"\f3c4"}.fa-magic:before{content:"\f0d0"}.fa-magnet:before{content:"\f076"}.fa-mail-bulk:before{content:"\f674"}.fa-mailchimp:before{content:"\f59e"}.fa-male:before{content:"\f183"}.fa-mandalorian:before{content:"\f50f"}.fa-map:before{content:"\f279"}.fa-map-marked:before{content:"\f59f"}.fa-map-marked-alt:before{content:"\f5a0"}.fa-map-marker:before{content:"\f041"}.fa-map-marker-alt:before{content:"\f3c5"}.fa-map-pin:before{content:"\f276"}.fa-map-signs:before{content:"\f277"}.fa-markdown:before{content:"\f60f"}.fa-marker:before{content:"\f5a1"}.fa-mars:before{content:"\f222"}.fa-mars-double:before{content:"\f227"}.fa-mars-stroke:before{content:"\f229"}.fa-mars-stroke-h:before{content:"\f22b"}.fa-mars-stroke-v:before{content:"\f22a"}.fa-mask:before{content:"\f6fa"}.fa-mastodon:before{content:"\f4f6"}.fa-maxcdn:before{content:"\f136"}.fa-medal:before{content:"\f5a2"}.fa-medapps:before{content:"\f3c6"}.fa-medium:before{content:"\f23a"}.fa-medium-m:before{content:"\f3c7"}.fa-medkit:before{content:"\f0fa"}.fa-medrt:before{content:"\f3c8"}.fa-meetup:before{content:"\f2e0"}.fa-megaport:before{content:"\f5a3"}.fa-meh:before{content:"\f11a"}.fa-meh-blank:before{content:"\f5a4"}.fa-meh-rolling-eyes:before{content:"\f5a5"}.fa-memory:before{content:"\f538"}.fa-mendeley:before{content:"\f7b3"}.fa-menorah:before{content:"\f676"}.fa-mercury:before{content:"\f223"}.fa-meteor:before{content:"\f753"}.fa-microchip:before{content:"\f2db"}.fa-microphone:before{content:"\f130"}.fa-microphone-alt:before{content:"\f3c9"}.fa-microphone-alt-slash:before{content:"\f539"}.fa-microphone-slash:before{content:"\f131"}.fa-microscope:before{content:"\f610"}.fa-microsoft:before{content:"\f3ca"}.fa-minus:before{content:"\f068"}.fa-minus-circle:before{content:"\f056"}.fa-minus-square:before{content:"\f146"}.fa-mitten:before{content:"\f7b5"}.fa-mix:before{content:"\f3cb"}.fa-mixcloud:before{content:"\f289"}.fa-mizuni:before{content:"\f3cc"}.fa-mobile:before{content:"\f10b"}.fa-mobile-alt:before{content:"\f3cd"}.fa-modx:before{content:"\f285"}.fa-monero:before{content:"\f3d0"}.fa-money-bill:before{content:"\f0d6"}.fa-money-bill-alt:before{content:"\f3d1"}.fa-money-bill-wave:before{content:"\f53a"}.fa-money-bill-wave-alt:before{content:"\f53b"}.fa-money-check:before{content:"\f53c"}.fa-money-check-alt:before{content:"\f53d"}.fa-monument:before{content:"\f5a6"}.fa-moon:before{content:"\f186"}.fa-mortar-pestle:before{content:"\f5a7"}.fa-mosque:before{content:"\f678"}.fa-motorcycle:before{content:"\f21c"}.fa-mountain:before{content:"\f6fc"}.fa-mouse-pointer:before{content:"\f245"}.fa-mug-hot:before{content:"\f7b6"}.fa-music:before{content:"\f001"}.fa-napster:before{content:"\f3d2"}.fa-neos:before{content:"\f612"}.fa-network-wired:before{content:"\f6ff"}.fa-neuter:before{content:"\f22c"}.fa-newspaper:before{content:"\f1ea"}.fa-nimblr:before{content:"\f5a8"}.fa-nintendo-switch:before{content:"\f418"}.fa-node:before{content:"\f419"}.fa-node-js:before{content:"\f3d3"}.fa-not-equal:before{content:"\f53e"}.fa-notes-medical:before{content:"\f481"}.fa-npm:before{content:"\f3d4"}.fa-ns8:before{content:"\f3d5"}.fa-nutritionix:before{content:"\f3d6"}.fa-object-group:before{content:"\f247"}.fa-object-ungroup:before{content:"\f248"}.fa-odnoklassniki:before{content:"\f263"}.fa-odnoklassniki-square:before{content:"\f264"}.fa-oil-can:before{content:"\f613"}.fa-old-republic:before{content:"\f510"}.fa-om:before{content:"\f679"}.fa-opencart:before{content:"\f23d"}.fa-openid:before{content:"\f19b"}.fa-opera:before{content:"\f26a"}.fa-optin-monster:before{content:"\f23c"}.fa-osi:before{content:"\f41a"}.fa-otter:before{content:"\f700"}.fa-outdent:before{content:"\f03b"}.fa-page4:before{content:"\f3d7"}.fa-pagelines:before{content:"\f18c"}.fa-pager:before{content:"\f815"}.fa-paint-brush:before{content:"\f1fc"}.fa-paint-roller:before{content:"\f5aa"}.fa-palette:before{content:"\f53f"}.fa-palfed:before{content:"\f3d8"}.fa-pallet:before{content:"\f482"}.fa-paper-plane:before{content:"\f1d8"}.fa-paperclip:before{content:"\f0c6"}.fa-parachute-box:before{content:"\f4cd"}.fa-paragraph:before{content:"\f1dd"}.fa-parking:before{content:"\f540"}.fa-passport:before{content:"\f5ab"}.fa-pastafarianism:before{content:"\f67b"}.fa-paste:before{content:"\f0ea"}.fa-patreon:before{content:"\f3d9"}.fa-pause:before{content:"\f04c"}.fa-pause-circle:before{content:"\f28b"}.fa-paw:before{content:"\f1b0"}.fa-paypal:before{content:"\f1ed"}.fa-peace:before{content:"\f67c"}.fa-pen:before{content:"\f304"}.fa-pen-alt:before{content:"\f305"}.fa-pen-fancy:before{content:"\f5ac"}.fa-pen-nib:before{content:"\f5ad"}.fa-pen-square:before{content:"\f14b"}.fa-pencil-alt:before{content:"\f303"}.fa-pencil-ruler:before{content:"\f5ae"}.fa-penny-arcade:before{content:"\f704"}.fa-people-carry:before{content:"\f4ce"}.fa-pepper-hot:before{content:"\f816"}.fa-percent:before{content:"\f295"}.fa-percentage:before{content:"\f541"}.fa-periscope:before{content:"\f3da"}.fa-person-booth:before{content:"\f756"}.fa-phabricator:before{content:"\f3db"}.fa-phoenix-framework:before{content:"\f3dc"}.fa-phoenix-squadron:before{content:"\f511"}.fa-phone:before{content:"\f095"}.fa-phone-slash:before{content:"\f3dd"}.fa-phone-square:before{content:"\f098"}.fa-phone-volume:before{content:"\f2a0"}.fa-php:before{content:"\f457"}.fa-pied-piper:before{content:"\f2ae"}.fa-pied-piper-alt:before{content:"\f1a8"}.fa-pied-piper-hat:before{content:"\f4e5"}.fa-pied-piper-pp:before{content:"\f1a7"}.fa-piggy-bank:before{content:"\f4d3"}.fa-pills:before{content:"\f484"}.fa-pinterest:before{content:"\f0d2"}.fa-pinterest-p:before{content:"\f231"}.fa-pinterest-square:before{content:"\f0d3"}.fa-pizza-slice:before{content:"\f818"}.fa-place-of-worship:before{content:"\f67f"}.fa-plane:before{content:"\f072"}.fa-plane-arrival:before{content:"\f5af"}.fa-plane-departure:before{content:"\f5b0"}.fa-play:before{content:"\f04b"}.fa-play-circle:before{content:"\f144"}.fa-playstation:before{content:"\f3df"}.fa-plug:before{content:"\f1e6"}.fa-plus:before{content:"\f067"}.fa-plus-circle:before{content:"\f055"}.fa-plus-square:before{content:"\f0fe"}.fa-podcast:before{content:"\f2ce"}.fa-poll:before{content:"\f681"}.fa-poll-h:before{content:"\f682"}.fa-poo:before{content:"\f2fe"}.fa-poo-storm:before{content:"\f75a"}.fa-poop:before{content:"\f619"}.fa-portrait:before{content:"\f3e0"}.fa-pound-sign:before{content:"\f154"}.fa-power-off:before{content:"\f011"}.fa-pray:before{content:"\f683"}.fa-praying-hands:before{content:"\f684"}.fa-prescription:before{content:"\f5b1"}.fa-prescription-bottle:before{content:"\f485"}.fa-prescription-bottle-alt:before{content:"\f486"}.fa-print:before{content:"\f02f"}.fa-procedures:before{content:"\f487"}.fa-product-hunt:before{content:"\f288"}.fa-project-diagram:before{content:"\f542"}.fa-pushed:before{content:"\f3e1"}.fa-puzzle-piece:before{content:"\f12e"}.fa-python:before{content:"\f3e2"}.fa-qq:before{content:"\f1d6"}.fa-qrcode:before{content:"\f029"}.fa-question:before{content:"\f128"}.fa-question-circle:before{content:"\f059"}.fa-quidditch:before{content:"\f458"}.fa-quinscape:before{content:"\f459"}.fa-quora:before{content:"\f2c4"}.fa-quote-left:before{content:"\f10d"}.fa-quote-right:before{content:"\f10e"}.fa-quran:before{content:"\f687"}.fa-r-project:before{content:"\f4f7"}.fa-radiation:before{content:"\f7b9"}.fa-radiation-alt:before{content:"\f7ba"}.fa-rainbow:before{content:"\f75b"}.fa-random:before{content:"\f074"}.fa-raspberry-pi:before{content:"\f7bb"}.fa-ravelry:before{content:"\f2d9"}.fa-react:before{content:"\f41b"}.fa-reacteurope:before{content:"\f75d"}.fa-readme:before{content:"\f4d5"}.fa-rebel:before{content:"\f1d0"}.fa-receipt:before{content:"\f543"}.fa-recycle:before{content:"\f1b8"}.fa-red-river:before{content:"\f3e3"}.fa-reddit:before{content:"\f1a1"}.fa-reddit-alien:before{content:"\f281"}.fa-reddit-square:before{content:"\f1a2"}.fa-redhat:before{content:"\f7bc"}.fa-redo:before{content:"\f01e"}.fa-redo-alt:before{content:"\f2f9"}.fa-registered:before{content:"\f25d"}.fa-renren:before{content:"\f18b"}.fa-reply:before{content:"\f3e5"}.fa-reply-all:before{content:"\f122"}.fa-replyd:before{content:"\f3e6"}.fa-republican:before{content:"\f75e"}.fa-researchgate:before{content:"\f4f8"}.fa-resolving:before{content:"\f3e7"}.fa-restroom:before{content:"\f7bd"}.fa-retweet:before{content:"\f079"}.fa-rev:before{content:"\f5b2"}.fa-ribbon:before{content:"\f4d6"}.fa-ring:before{content:"\f70b"}.fa-road:before{content:"\f018"}.fa-robot:before{content:"\f544"}.fa-rocket:before{content:"\f135"}.fa-rocketchat:before{content:"\f3e8"}.fa-rockrms:before{content:"\f3e9"}.fa-route:before{content:"\f4d7"}.fa-rss:before{content:"\f09e"}.fa-rss-square:before{content:"\f143"}.fa-ruble-sign:before{content:"\f158"}.fa-ruler:before{content:"\f545"}.fa-ruler-combined:before{content:"\f546"}.fa-ruler-horizontal:before{content:"\f547"}.fa-ruler-vertical:before{content:"\f548"}.fa-running:before{content:"\f70c"}.fa-rupee-sign:before{content:"\f156"}.fa-sad-cry:before{content:"\f5b3"}.fa-sad-tear:before{content:"\f5b4"}.fa-safari:before{content:"\f267"}.fa-sass:before{content:"\f41e"}.fa-satellite:before{content:"\f7bf"}.fa-satellite-dish:before{content:"\f7c0"}.fa-save:before{content:"\f0c7"}.fa-schlix:before{content:"\f3ea"}.fa-school:before{content:"\f549"}.fa-screwdriver:before{content:"\f54a"}.fa-scribd:before{content:"\f28a"}.fa-scroll:before{content:"\f70e"}.fa-sd-card:before{content:"\f7c2"}.fa-search:before{content:"\f002"}.fa-search-dollar:before{content:"\f688"}.fa-search-location:before{content:"\f689"}.fa-search-minus:before{content:"\f010"}.fa-search-plus:before{content:"\f00e"}.fa-searchengin:before{content:"\f3eb"}.fa-seedling:before{content:"\f4d8"}.fa-sellcast:before{content:"\f2da"}.fa-sellsy:before{content:"\f213"}.fa-server:before{content:"\f233"}.fa-servicestack:before{content:"\f3ec"}.fa-shapes:before{content:"\f61f"}.fa-share:before{content:"\f064"}.fa-share-alt:before{content:"\f1e0"}.fa-share-alt-square:before{content:"\f1e1"}.fa-share-square:before{content:"\f14d"}.fa-shekel-sign:before{content:"\f20b"}.fa-shield-alt:before{content:"\f3ed"}.fa-ship:before{content:"\f21a"}.fa-shipping-fast:before{content:"\f48b"}.fa-shirtsinbulk:before{content:"\f214"}.fa-shoe-prints:before{content:"\f54b"}.fa-shopping-bag:before{content:"\f290"}.fa-shopping-basket:before{content:"\f291"}.fa-shopping-cart:before{content:"\f07a"}.fa-shopware:before{content:"\f5b5"}.fa-shower:before{content:"\f2cc"}.fa-shuttle-van:before{content:"\f5b6"}.fa-sign:before{content:"\f4d9"}.fa-sign-in-alt:before{content:"\f2f6"}.fa-sign-language:before{content:"\f2a7"}.fa-sign-out-alt:before{content:"\f2f5"}.fa-signal:before{content:"\f012"}.fa-signature:before{content:"\f5b7"}.fa-sim-card:before{content:"\f7c4"}.fa-simplybuilt:before{content:"\f215"}.fa-sistrix:before{content:"\f3ee"}.fa-sitemap:before{content:"\f0e8"}.fa-sith:before{content:"\f512"}.fa-skating:before{content:"\f7c5"}.fa-sketch:before{content:"\f7c6"}.fa-skiing:before{content:"\f7c9"}.fa-skiing-nordic:before{content:"\f7ca"}.fa-skull:before{content:"\f54c"}.fa-skull-crossbones:before{content:"\f714"}.fa-skyatlas:before{content:"\f216"}.fa-skype:before{content:"\f17e"}.fa-slack:before{content:"\f198"}.fa-slack-hash:before{content:"\f3ef"}.fa-slash:before{content:"\f715"}.fa-sleigh:before{content:"\f7cc"}.fa-sliders-h:before{content:"\f1de"}.fa-slideshare:before{content:"\f1e7"}.fa-smile:before{content:"\f118"}.fa-smile-beam:before{content:"\f5b8"}.fa-smile-wink:before{content:"\f4da"}.fa-smog:before{content:"\f75f"}.fa-smoking:before{content:"\f48d"}.fa-smoking-ban:before{content:"\f54d"}.fa-sms:before{content:"\f7cd"}.fa-snapchat:before{content:"\f2ab"}.fa-snapchat-ghost:before{content:"\f2ac"}.fa-snapchat-square:before{content:"\f2ad"}.fa-snowboarding:before{content:"\f7ce"}.fa-snowflake:before{content:"\f2dc"}.fa-snowman:before{content:"\f7d0"}.fa-snowplow:before{content:"\f7d2"}.fa-socks:before{content:"\f696"}.fa-solar-panel:before{content:"\f5ba"}.fa-sort:before{content:"\f0dc"}.fa-sort-alpha-down:before{content:"\f15d"}.fa-sort-alpha-up:before{content:"\f15e"}.fa-sort-amount-down:before{content:"\f160"}.fa-sort-amount-up:before{content:"\f161"}.fa-sort-down:before{content:"\f0dd"}.fa-sort-numeric-down:before{content:"\f162"}.fa-sort-numeric-up:before{content:"\f163"}.fa-sort-up:before{content:"\f0de"}.fa-soundcloud:before{content:"\f1be"}.fa-sourcetree:before{content:"\f7d3"}.fa-spa:before{content:"\f5bb"}.fa-space-shuttle:before{content:"\f197"}.fa-speakap:before{content:"\f3f3"}.fa-spider:before{content:"\f717"}.fa-spinner:before{content:"\f110"}.fa-splotch:before{content:"\f5bc"}.fa-spotify:before{content:"\f1bc"}.fa-spray-can:before{content:"\f5bd"}.fa-square:before{content:"\f0c8"}.fa-square-full:before{content:"\f45c"}.fa-square-root-alt:before{content:"\f698"}.fa-squarespace:before{content:"\f5be"}.fa-stack-exchange:before{content:"\f18d"}.fa-stack-overflow:before{content:"\f16c"}.fa-stamp:before{content:"\f5bf"}.fa-star:before{content:"\f005"}.fa-star-and-crescent:before{content:"\f699"}.fa-star-half:before{content:"\f089"}.fa-star-half-alt:before{content:"\f5c0"}.fa-star-of-david:before{content:"\f69a"}.fa-star-of-life:before{content:"\f621"}.fa-staylinked:before{content:"\f3f5"}.fa-steam:before{content:"\f1b6"}.fa-steam-square:before{content:"\f1b7"}.fa-steam-symbol:before{content:"\f3f6"}.fa-step-backward:before{content:"\f048"}.fa-step-forward:before{content:"\f051"}.fa-stethoscope:before{content:"\f0f1"}.fa-sticker-mule:before{content:"\f3f7"}.fa-sticky-note:before{content:"\f249"}.fa-stop:before{content:"\f04d"}.fa-stop-circle:before{content:"\f28d"}.fa-stopwatch:before{content:"\f2f2"}.fa-store:before{content:"\f54e"}.fa-store-alt:before{content:"\f54f"}.fa-strava:before{content:"\f428"}.fa-stream:before{content:"\f550"}.fa-street-view:before{content:"\f21d"}.fa-strikethrough:before{content:"\f0cc"}.fa-stripe:before{content:"\f429"}.fa-stripe-s:before{content:"\f42a"}.fa-stroopwafel:before{content:"\f551"}.fa-studiovinari:before{content:"\f3f8"}.fa-stumbleupon:before{content:"\f1a4"}.fa-stumbleupon-circle:before{content:"\f1a3"}.fa-subscript:before{content:"\f12c"}.fa-subway:before{content:"\f239"}.fa-suitcase:before{content:"\f0f2"}.fa-suitcase-rolling:before{content:"\f5c1"}.fa-sun:before{content:"\f185"}.fa-superpowers:before{content:"\f2dd"}.fa-superscript:before{content:"\f12b"}.fa-supple:before{content:"\f3f9"}.fa-surprise:before{content:"\f5c2"}.fa-suse:before{content:"\f7d6"}.fa-swatchbook:before{content:"\f5c3"}.fa-swimmer:before{content:"\f5c4"}.fa-swimming-pool:before{content:"\f5c5"}.fa-synagogue:before{content:"\f69b"}.fa-sync:before{content:"\f021"}.fa-sync-alt:before{content:"\f2f1"}.fa-syringe:before{content:"\f48e"}.fa-table:before{content:"\f0ce"}.fa-table-tennis:before{content:"\f45d"}.fa-tablet:before{content:"\f10a"}.fa-tablet-alt:before{content:"\f3fa"}.fa-tablets:before{content:"\f490"}.fa-tachometer-alt:before{content:"\f3fd"}.fa-tag:before{content:"\f02b"}.fa-tags:before{content:"\f02c"}.fa-tape:before{content:"\f4db"}.fa-tasks:before{content:"\f0ae"}.fa-taxi:before{content:"\f1ba"}.fa-teamspeak:before{content:"\f4f9"}.fa-teeth:before{content:"\f62e"}.fa-teeth-open:before{content:"\f62f"}.fa-telegram:before{content:"\f2c6"}.fa-telegram-plane:before{content:"\f3fe"}.fa-temperature-high:before{content:"\f769"}.fa-temperature-low:before{content:"\f76b"}.fa-tencent-weibo:before{content:"\f1d5"}.fa-tenge:before{content:"\f7d7"}.fa-terminal:before{content:"\f120"}.fa-text-height:before{content:"\f034"}.fa-text-width:before{content:"\f035"}.fa-th:before{content:"\f00a"}.fa-th-large:before{content:"\f009"}.fa-th-list:before{content:"\f00b"}.fa-the-red-yeti:before{content:"\f69d"}.fa-theater-masks:before{content:"\f630"}.fa-themeco:before{content:"\f5c6"}.fa-themeisle:before{content:"\f2b2"}.fa-thermometer:before{content:"\f491"}.fa-thermometer-empty:before{content:"\f2cb"}.fa-thermometer-full:before{content:"\f2c7"}.fa-thermometer-half:before{content:"\f2c9"}.fa-thermometer-quarter:before{content:"\f2ca"}.fa-thermometer-three-quarters:before{content:"\f2c8"}.fa-think-peaks:before{content:"\f731"}.fa-thumbs-down:before{content:"\f165"}.fa-thumbs-up:before{content:"\f164"}.fa-thumbtack:before{content:"\f08d"}.fa-ticket-alt:before{content:"\f3ff"}.fa-times:before{content:"\f00d"}.fa-times-circle:before{content:"\f057"}.fa-tint:before{content:"\f043"}.fa-tint-slash:before{content:"\f5c7"}.fa-tired:before{content:"\f5c8"}.fa-toggle-off:before{content:"\f204"}.fa-toggle-on:before{content:"\f205"}.fa-toilet:before{content:"\f7d8"}.fa-toilet-paper:before{content:"\f71e"}.fa-toolbox:before{content:"\f552"}.fa-tools:before{content:"\f7d9"}.fa-tooth:before{content:"\f5c9"}.fa-torah:before{content:"\f6a0"}.fa-torii-gate:before{content:"\f6a1"}.fa-tractor:before{content:"\f722"}.fa-trade-federation:before{content:"\f513"}.fa-trademark:before{content:"\f25c"}.fa-traffic-light:before{content:"\f637"}.fa-train:before{content:"\f238"}.fa-tram:before{content:"\f7da"}.fa-transgender:before{content:"\f224"}.fa-transgender-alt:before{content:"\f225"}.fa-trash:before{content:"\f1f8"}.fa-trash-alt:before{content:"\f2ed"}.fa-trash-restore:before{content:"\f829"}.fa-trash-restore-alt:before{content:"\f82a"}.fa-tree:before{content:"\f1bb"}.fa-trello:before{content:"\f181"}.fa-tripadvisor:before{content:"\f262"}.fa-trophy:before{content:"\f091"}.fa-truck:before{content:"\f0d1"}.fa-truck-loading:before{content:"\f4de"}.fa-truck-monster:before{content:"\f63b"}.fa-truck-moving:before{content:"\f4df"}.fa-truck-pickup:before{content:"\f63c"}.fa-tshirt:before{content:"\f553"}.fa-tty:before{content:"\f1e4"}.fa-tumblr:before{content:"\f173"}.fa-tumblr-square:before{content:"\f174"}.fa-tv:before{content:"\f26c"}.fa-twitch:before{content:"\f1e8"}.fa-twitter:before{content:"\f099"}.fa-twitter-square:before{content:"\f081"}.fa-typo3:before{content:"\f42b"}.fa-uber:before{content:"\f402"}.fa-ubuntu:before{content:"\f7df"}.fa-uikit:before{content:"\f403"}.fa-umbrella:before{content:"\f0e9"}.fa-umbrella-beach:before{content:"\f5ca"}.fa-underline:before{content:"\f0cd"}.fa-undo:before{content:"\f0e2"}.fa-undo-alt:before{content:"\f2ea"}.fa-uniregistry:before{content:"\f404"}.fa-universal-access:before{content:"\f29a"}.fa-university:before{content:"\f19c"}.fa-unlink:before{content:"\f127"}.fa-unlock:before{content:"\f09c"}.fa-unlock-alt:before{content:"\f13e"}.fa-untappd:before{content:"\f405"}.fa-upload:before{content:"\f093"}.fa-ups:before{content:"\f7e0"}.fa-usb:before{content:"\f287"}.fa-user:before{content:"\f007"}.fa-user-alt:before{content:"\f406"}.fa-user-alt-slash:before{content:"\f4fa"}.fa-user-astronaut:before{content:"\f4fb"}.fa-user-check:before{content:"\f4fc"}.fa-user-circle:before{content:"\f2bd"}.fa-user-clock:before{content:"\f4fd"}.fa-user-cog:before{content:"\f4fe"}.fa-user-edit:before{content:"\f4ff"}.fa-user-friends:before{content:"\f500"}.fa-user-graduate:before{content:"\f501"}.fa-user-injured:before{content:"\f728"}.fa-user-lock:before{content:"\f502"}.fa-user-md:before{content:"\f0f0"}.fa-user-minus:before{content:"\f503"}.fa-user-ninja:before{content:"\f504"}.fa-user-nurse:before{content:"\f82f"}.fa-user-plus:before{content:"\f234"}.fa-user-secret:before{content:"\f21b"}.fa-user-shield:before{content:"\f505"}.fa-user-slash:before{content:"\f506"}.fa-user-tag:before{content:"\f507"}.fa-user-tie:before{content:"\f508"}.fa-user-times:before{content:"\f235"}.fa-users:before{content:"\f0c0"}.fa-users-cog:before{content:"\f509"}.fa-usps:before{content:"\f7e1"}.fa-ussunnah:before{content:"\f407"}.fa-utensil-spoon:before{content:"\f2e5"}.fa-utensils:before{content:"\f2e7"}.fa-vaadin:before{content:"\f408"}.fa-vector-square:before{content:"\f5cb"}.fa-venus:before{content:"\f221"}.fa-venus-double:before{content:"\f226"}.fa-venus-mars:before{content:"\f228"}.fa-viacoin:before{content:"\f237"}.fa-viadeo:before{content:"\f2a9"}.fa-viadeo-square:before{content:"\f2aa"}.fa-vial:before{content:"\f492"}.fa-vials:before{content:"\f493"}.fa-viber:before{content:"\f409"}.fa-video:before{content:"\f03d"}.fa-video-slash:before{content:"\f4e2"}.fa-vihara:before{content:"\f6a7"}.fa-vimeo:before{content:"\f40a"}.fa-vimeo-square:before{content:"\f194"}.fa-vimeo-v:before{content:"\f27d"}.fa-vine:before{content:"\f1ca"}.fa-vk:before{content:"\f189"}.fa-vnv:before{content:"\f40b"}.fa-volleyball-ball:before{content:"\f45f"}.fa-volume-down:before{content:"\f027"}.fa-volume-mute:before{content:"\f6a9"}.fa-volume-off:before{content:"\f026"}.fa-volume-up:before{content:"\f028"}.fa-vote-yea:before{content:"\f772"}.fa-vr-cardboard:before{content:"\f729"}.fa-vuejs:before{content:"\f41f"}.fa-walking:before{content:"\f554"}.fa-wallet:before{content:"\f555"}.fa-warehouse:before{content:"\f494"}.fa-water:before{content:"\f773"}.fa-weebly:before{content:"\f5cc"}.fa-weibo:before{content:"\f18a"}.fa-weight:before{content:"\f496"}.fa-weight-hanging:before{content:"\f5cd"}.fa-weixin:before{content:"\f1d7"}.fa-whatsapp:before{content:"\f232"}.fa-whatsapp-square:before{content:"\f40c"}.fa-wheelchair:before{content:"\f193"}.fa-whmcs:before{content:"\f40d"}.fa-wifi:before{content:"\f1eb"}.fa-wikipedia-w:before{content:"\f266"}.fa-wind:before{content:"\f72e"}.fa-window-close:before{content:"\f410"}.fa-window-maximize:before{content:"\f2d0"}.fa-window-minimize:before{content:"\f2d1"}.fa-window-restore:before{content:"\f2d2"}.fa-windows:before{content:"\f17a"}.fa-wine-bottle:before{content:"\f72f"}.fa-wine-glass:before{content:"\f4e3"}.fa-wine-glass-alt:before{content:"\f5ce"}.fa-wix:before{content:"\f5cf"}.fa-wizards-of-the-coast:before{content:"\f730"}.fa-wolf-pack-battalion:before{content:"\f514"}.fa-won-sign:before{content:"\f159"}.fa-wordpress:before{content:"\f19a"}.fa-wordpress-simple:before{content:"\f411"}.fa-wpbeginner:before{content:"\f297"}.fa-wpexplorer:before{content:"\f2de"}.fa-wpforms:before{content:"\f298"}.fa-wpressr:before{content:"\f3e4"}.fa-wrench:before{content:"\f0ad"}.fa-x-ray:before{content:"\f497"}.fa-xbox:before{content:"\f412"}.fa-xing:before{content:"\f168"}.fa-xing-square:before{content:"\f169"}.fa-y-combinator:before{content:"\f23b"}.fa-yahoo:before{content:"\f19e"}.fa-yandex:before{content:"\f413"}.fa-yandex-international:before{content:"\f414"}.fa-yarn:before{content:"\f7e3"}.fa-yelp:before{content:"\f1e9"}.fa-yen-sign:before{content:"\f157"}.fa-yin-yang:before{content:"\f6ad"}.fa-yoast:before{content:"\f2b1"}.fa-youtube:before{content:"\f167"}.fa-youtube-square:before{content:"\f431"}.fa-zhihu:before{content:"\f63f"}.sr-only{border:0;clip:rect(0,0,0,0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.sr-only-focusable:active,.sr-only-focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}@font-face{font-family:"Font Awesome 5 Brands";font-style:normal;font-weight:normal;font-display:auto;src:url(../webfonts/fa-brands-400.eot);src:url(../webfonts/fa-brands-400.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-brands-400.woff2) format("woff2"),url(../webfonts/fa-brands-400.woff) format("woff"),url(../webfonts/fa-brands-400.ttf) format("truetype"),url(../webfonts/fa-brands-400.svg#fontawesome) format("svg")}.fab{font-family:"Font Awesome 5 Brands"}@font-face{font-family:"Font Awesome 5 Free";font-style:normal;font-weight:400;font-display:auto;src:url(../webfonts/fa-regular-400.eot);src:url(../webfonts/fa-regular-400.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-regular-400.woff2) format("woff2"),url(../webfonts/fa-regular-400.woff) format("woff"),url(../webfonts/fa-regular-400.ttf) format("truetype"),url(../webfonts/fa-regular-400.svg#fontawesome) format("svg")}.far{font-weight:400}@font-face{font-family:"Font Awesome 5 Free";font-style:normal;font-weight:900;font-display:auto;src:url(../webfonts/fa-solid-900.eot);src:url(../webfonts/fa-solid-900.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-solid-900.woff2) format("woff2"),url(../webfonts/fa-solid-900.woff) format("woff"),url(../webfonts/fa-solid-900.ttf) format("truetype"),url(../webfonts/fa-solid-900.svg#fontawesome) format("svg")}.fa,.far,.fas{font-family:"Font Awesome 5 Free"}.fa,.fas{font-weight:900} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/www/css/fontawesome.min.css b/testing/web-platform/tests/tools/wave/www/css/fontawesome.min.css
new file mode 100644
index 0000000000..812418ae90
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/css/fontawesome.min.css
@@ -0,0 +1,5 @@
+/*!
+ * Font Awesome Free 5.2.0 by @fontawesome - https://fontawesome.com
+ * License - https://fontawesome.com/license (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
+ */
+.fa,.fab,.fal,.far,.fas{-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;display:inline-block;font-style:normal;font-variant:normal;text-rendering:auto;line-height:1}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-.0667em}.fa-xs{font-size:.75em}.fa-sm{font-size:.875em}.fa-1x{font-size:1em}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-6x{font-size:6em}.fa-7x{font-size:7em}.fa-8x{font-size:8em}.fa-9x{font-size:9em}.fa-10x{font-size:10em}.fa-fw{text-align:center;width:1.25em}.fa-ul{list-style-type:none;margin-left:2.5em;padding-left:0}.fa-ul>li{position:relative}.fa-li{left:-2em;position:absolute;text-align:center;width:2em;line-height:inherit}.fa-border{border:.08em solid #eee;border-radius:.1em;padding:.2em .25em .15em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa.fa-pull-left,.fab.fa-pull-left,.fal.fa-pull-left,.far.fa-pull-left,.fas.fa-pull-left{margin-right:.3em}.fa.fa-pull-right,.fab.fa-pull-right,.fal.fa-pull-right,.far.fa-pull-right,.fas.fa-pull-right{margin-left:.3em}.fa-spin{animation:a 2s infinite linear}.fa-pulse{animation:a 1s infinite steps(8)}@keyframes a{0%{transform:rotate(0deg)}to{transform:rotate(1turn)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";transform:scaleX(-1)}.fa-flip-vertical{transform:scaleY(-1)}.fa-flip-horizontal.fa-flip-vertical,.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)"}.fa-flip-horizontal.fa-flip-vertical{transform:scale(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{-webkit-filter:none;filter:none}.fa-stack{display:inline-block;height:2em;line-height:2em;position:relative;vertical-align:middle;width:2em}.fa-stack-1x,.fa-stack-2x{left:0;position:absolute;text-align:center;width:100%}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-500px:before{content:"\f26e"}.fa-accessible-icon:before{content:"\f368"}.fa-accusoft:before{content:"\f369"}.fa-address-book:before{content:"\f2b9"}.fa-address-card:before{content:"\f2bb"}.fa-adjust:before{content:"\f042"}.fa-adn:before{content:"\f170"}.fa-adversal:before{content:"\f36a"}.fa-affiliatetheme:before{content:"\f36b"}.fa-air-freshener:before{content:"\f5d0"}.fa-algolia:before{content:"\f36c"}.fa-align-center:before{content:"\f037"}.fa-align-justify:before{content:"\f039"}.fa-align-left:before{content:"\f036"}.fa-align-right:before{content:"\f038"}.fa-allergies:before{content:"\f461"}.fa-amazon:before{content:"\f270"}.fa-amazon-pay:before{content:"\f42c"}.fa-ambulance:before{content:"\f0f9"}.fa-american-sign-language-interpreting:before{content:"\f2a3"}.fa-amilia:before{content:"\f36d"}.fa-anchor:before{content:"\f13d"}.fa-android:before{content:"\f17b"}.fa-angellist:before{content:"\f209"}.fa-angle-double-down:before{content:"\f103"}.fa-angle-double-left:before{content:"\f100"}.fa-angle-double-right:before{content:"\f101"}.fa-angle-double-up:before{content:"\f102"}.fa-angle-down:before{content:"\f107"}.fa-angle-left:before{content:"\f104"}.fa-angle-right:before{content:"\f105"}.fa-angle-up:before{content:"\f106"}.fa-angry:before{content:"\f556"}.fa-angrycreative:before{content:"\f36e"}.fa-angular:before{content:"\f420"}.fa-app-store:before{content:"\f36f"}.fa-app-store-ios:before{content:"\f370"}.fa-apper:before{content:"\f371"}.fa-apple:before{content:"\f179"}.fa-apple-alt:before{content:"\f5d1"}.fa-apple-pay:before{content:"\f415"}.fa-archive:before{content:"\f187"}.fa-archway:before{content:"\f557"}.fa-arrow-alt-circle-down:before{content:"\f358"}.fa-arrow-alt-circle-left:before{content:"\f359"}.fa-arrow-alt-circle-right:before{content:"\f35a"}.fa-arrow-alt-circle-up:before{content:"\f35b"}.fa-arrow-circle-down:before{content:"\f0ab"}.fa-arrow-circle-left:before{content:"\f0a8"}.fa-arrow-circle-right:before{content:"\f0a9"}.fa-arrow-circle-up:before{content:"\f0aa"}.fa-arrow-down:before{content:"\f063"}.fa-arrow-left:before{content:"\f060"}.fa-arrow-right:before{content:"\f061"}.fa-arrow-up:before{content:"\f062"}.fa-arrows-alt:before{content:"\f0b2"}.fa-arrows-alt-h:before{content:"\f337"}.fa-arrows-alt-v:before{content:"\f338"}.fa-assistive-listening-systems:before{content:"\f2a2"}.fa-asterisk:before{content:"\f069"}.fa-asymmetrik:before{content:"\f372"}.fa-at:before{content:"\f1fa"}.fa-atlas:before{content:"\f558"}.fa-atom:before{content:"\f5d2"}.fa-audible:before{content:"\f373"}.fa-audio-description:before{content:"\f29e"}.fa-autoprefixer:before{content:"\f41c"}.fa-avianex:before{content:"\f374"}.fa-aviato:before{content:"\f421"}.fa-award:before{content:"\f559"}.fa-aws:before{content:"\f375"}.fa-backspace:before{content:"\f55a"}.fa-backward:before{content:"\f04a"}.fa-balance-scale:before{content:"\f24e"}.fa-ban:before{content:"\f05e"}.fa-band-aid:before{content:"\f462"}.fa-bandcamp:before{content:"\f2d5"}.fa-barcode:before{content:"\f02a"}.fa-bars:before{content:"\f0c9"}.fa-baseball-ball:before{content:"\f433"}.fa-basketball-ball:before{content:"\f434"}.fa-bath:before{content:"\f2cd"}.fa-battery-empty:before{content:"\f244"}.fa-battery-full:before{content:"\f240"}.fa-battery-half:before{content:"\f242"}.fa-battery-quarter:before{content:"\f243"}.fa-battery-three-quarters:before{content:"\f241"}.fa-bed:before{content:"\f236"}.fa-beer:before{content:"\f0fc"}.fa-behance:before{content:"\f1b4"}.fa-behance-square:before{content:"\f1b5"}.fa-bell:before{content:"\f0f3"}.fa-bell-slash:before{content:"\f1f6"}.fa-bezier-curve:before{content:"\f55b"}.fa-bicycle:before{content:"\f206"}.fa-bimobject:before{content:"\f378"}.fa-binoculars:before{content:"\f1e5"}.fa-birthday-cake:before{content:"\f1fd"}.fa-bitbucket:before{content:"\f171"}.fa-bitcoin:before{content:"\f379"}.fa-bity:before{content:"\f37a"}.fa-black-tie:before{content:"\f27e"}.fa-blackberry:before{content:"\f37b"}.fa-blender:before{content:"\f517"}.fa-blind:before{content:"\f29d"}.fa-blogger:before{content:"\f37c"}.fa-blogger-b:before{content:"\f37d"}.fa-bluetooth:before{content:"\f293"}.fa-bluetooth-b:before{content:"\f294"}.fa-bold:before{content:"\f032"}.fa-bolt:before{content:"\f0e7"}.fa-bomb:before{content:"\f1e2"}.fa-bone:before{content:"\f5d7"}.fa-bong:before{content:"\f55c"}.fa-book:before{content:"\f02d"}.fa-book-open:before{content:"\f518"}.fa-book-reader:before{content:"\f5da"}.fa-bookmark:before{content:"\f02e"}.fa-bowling-ball:before{content:"\f436"}.fa-box:before{content:"\f466"}.fa-box-open:before{content:"\f49e"}.fa-boxes:before{content:"\f468"}.fa-braille:before{content:"\f2a1"}.fa-brain:before{content:"\f5dc"}.fa-briefcase:before{content:"\f0b1"}.fa-briefcase-medical:before{content:"\f469"}.fa-broadcast-tower:before{content:"\f519"}.fa-broom:before{content:"\f51a"}.fa-brush:before{content:"\f55d"}.fa-btc:before{content:"\f15a"}.fa-bug:before{content:"\f188"}.fa-building:before{content:"\f1ad"}.fa-bullhorn:before{content:"\f0a1"}.fa-bullseye:before{content:"\f140"}.fa-burn:before{content:"\f46a"}.fa-buromobelexperte:before{content:"\f37f"}.fa-bus:before{content:"\f207"}.fa-bus-alt:before{content:"\f55e"}.fa-buysellads:before{content:"\f20d"}.fa-calculator:before{content:"\f1ec"}.fa-calendar:before{content:"\f133"}.fa-calendar-alt:before{content:"\f073"}.fa-calendar-check:before{content:"\f274"}.fa-calendar-minus:before{content:"\f272"}.fa-calendar-plus:before{content:"\f271"}.fa-calendar-times:before{content:"\f273"}.fa-camera:before{content:"\f030"}.fa-camera-retro:before{content:"\f083"}.fa-cannabis:before{content:"\f55f"}.fa-capsules:before{content:"\f46b"}.fa-car:before{content:"\f1b9"}.fa-car-alt:before{content:"\f5de"}.fa-car-battery:before{content:"\f5df"}.fa-car-crash:before{content:"\f5e1"}.fa-car-side:before{content:"\f5e4"}.fa-caret-down:before{content:"\f0d7"}.fa-caret-left:before{content:"\f0d9"}.fa-caret-right:before{content:"\f0da"}.fa-caret-square-down:before{content:"\f150"}.fa-caret-square-left:before{content:"\f191"}.fa-caret-square-right:before{content:"\f152"}.fa-caret-square-up:before{content:"\f151"}.fa-caret-up:before{content:"\f0d8"}.fa-cart-arrow-down:before{content:"\f218"}.fa-cart-plus:before{content:"\f217"}.fa-cc-amazon-pay:before{content:"\f42d"}.fa-cc-amex:before{content:"\f1f3"}.fa-cc-apple-pay:before{content:"\f416"}.fa-cc-diners-club:before{content:"\f24c"}.fa-cc-discover:before{content:"\f1f2"}.fa-cc-jcb:before{content:"\f24b"}.fa-cc-mastercard:before{content:"\f1f1"}.fa-cc-paypal:before{content:"\f1f4"}.fa-cc-stripe:before{content:"\f1f5"}.fa-cc-visa:before{content:"\f1f0"}.fa-centercode:before{content:"\f380"}.fa-certificate:before{content:"\f0a3"}.fa-chalkboard:before{content:"\f51b"}.fa-chalkboard-teacher:before{content:"\f51c"}.fa-charging-station:before{content:"\f5e7"}.fa-chart-area:before{content:"\f1fe"}.fa-chart-bar:before{content:"\f080"}.fa-chart-line:before{content:"\f201"}.fa-chart-pie:before{content:"\f200"}.fa-check:before{content:"\f00c"}.fa-check-circle:before{content:"\f058"}.fa-check-double:before{content:"\f560"}.fa-check-square:before{content:"\f14a"}.fa-chess:before{content:"\f439"}.fa-chess-bishop:before{content:"\f43a"}.fa-chess-board:before{content:"\f43c"}.fa-chess-king:before{content:"\f43f"}.fa-chess-knight:before{content:"\f441"}.fa-chess-pawn:before{content:"\f443"}.fa-chess-queen:before{content:"\f445"}.fa-chess-rook:before{content:"\f447"}.fa-chevron-circle-down:before{content:"\f13a"}.fa-chevron-circle-left:before{content:"\f137"}.fa-chevron-circle-right:before{content:"\f138"}.fa-chevron-circle-up:before{content:"\f139"}.fa-chevron-down:before{content:"\f078"}.fa-chevron-left:before{content:"\f053"}.fa-chevron-right:before{content:"\f054"}.fa-chevron-up:before{content:"\f077"}.fa-child:before{content:"\f1ae"}.fa-chrome:before{content:"\f268"}.fa-church:before{content:"\f51d"}.fa-circle:before{content:"\f111"}.fa-circle-notch:before{content:"\f1ce"}.fa-clipboard:before{content:"\f328"}.fa-clipboard-check:before{content:"\f46c"}.fa-clipboard-list:before{content:"\f46d"}.fa-clock:before{content:"\f017"}.fa-clone:before{content:"\f24d"}.fa-closed-captioning:before{content:"\f20a"}.fa-cloud:before{content:"\f0c2"}.fa-cloud-download-alt:before{content:"\f381"}.fa-cloud-upload-alt:before{content:"\f382"}.fa-cloudscale:before{content:"\f383"}.fa-cloudsmith:before{content:"\f384"}.fa-cloudversify:before{content:"\f385"}.fa-cocktail:before{content:"\f561"}.fa-code:before{content:"\f121"}.fa-code-branch:before{content:"\f126"}.fa-codepen:before{content:"\f1cb"}.fa-codiepie:before{content:"\f284"}.fa-coffee:before{content:"\f0f4"}.fa-cog:before{content:"\f013"}.fa-cogs:before{content:"\f085"}.fa-coins:before{content:"\f51e"}.fa-columns:before{content:"\f0db"}.fa-comment:before{content:"\f075"}.fa-comment-alt:before{content:"\f27a"}.fa-comment-dots:before{content:"\f4ad"}.fa-comment-slash:before{content:"\f4b3"}.fa-comments:before{content:"\f086"}.fa-compact-disc:before{content:"\f51f"}.fa-compass:before{content:"\f14e"}.fa-compress:before{content:"\f066"}.fa-concierge-bell:before{content:"\f562"}.fa-connectdevelop:before{content:"\f20e"}.fa-contao:before{content:"\f26d"}.fa-cookie:before{content:"\f563"}.fa-cookie-bite:before{content:"\f564"}.fa-copy:before{content:"\f0c5"}.fa-copyright:before{content:"\f1f9"}.fa-couch:before{content:"\f4b8"}.fa-cpanel:before{content:"\f388"}.fa-creative-commons:before{content:"\f25e"}.fa-creative-commons-by:before{content:"\f4e7"}.fa-creative-commons-nc:before{content:"\f4e8"}.fa-creative-commons-nc-eu:before{content:"\f4e9"}.fa-creative-commons-nc-jp:before{content:"\f4ea"}.fa-creative-commons-nd:before{content:"\f4eb"}.fa-creative-commons-pd:before{content:"\f4ec"}.fa-creative-commons-pd-alt:before{content:"\f4ed"}.fa-creative-commons-remix:before{content:"\f4ee"}.fa-creative-commons-sa:before{content:"\f4ef"}.fa-creative-commons-sampling:before{content:"\f4f0"}.fa-creative-commons-sampling-plus:before{content:"\f4f1"}.fa-creative-commons-share:before{content:"\f4f2"}.fa-credit-card:before{content:"\f09d"}.fa-crop:before{content:"\f125"}.fa-crop-alt:before{content:"\f565"}.fa-crosshairs:before{content:"\f05b"}.fa-crow:before{content:"\f520"}.fa-crown:before{content:"\f521"}.fa-css3:before{content:"\f13c"}.fa-css3-alt:before{content:"\f38b"}.fa-cube:before{content:"\f1b2"}.fa-cubes:before{content:"\f1b3"}.fa-cut:before{content:"\f0c4"}.fa-cuttlefish:before{content:"\f38c"}.fa-d-and-d:before{content:"\f38d"}.fa-dashcube:before{content:"\f210"}.fa-database:before{content:"\f1c0"}.fa-deaf:before{content:"\f2a4"}.fa-delicious:before{content:"\f1a5"}.fa-deploydog:before{content:"\f38e"}.fa-deskpro:before{content:"\f38f"}.fa-desktop:before{content:"\f108"}.fa-deviantart:before{content:"\f1bd"}.fa-diagnoses:before{content:"\f470"}.fa-dice:before{content:"\f522"}.fa-dice-five:before{content:"\f523"}.fa-dice-four:before{content:"\f524"}.fa-dice-one:before{content:"\f525"}.fa-dice-six:before{content:"\f526"}.fa-dice-three:before{content:"\f527"}.fa-dice-two:before{content:"\f528"}.fa-digg:before{content:"\f1a6"}.fa-digital-ocean:before{content:"\f391"}.fa-digital-tachograph:before{content:"\f566"}.fa-directions:before{content:"\f5eb"}.fa-discord:before{content:"\f392"}.fa-discourse:before{content:"\f393"}.fa-divide:before{content:"\f529"}.fa-dizzy:before{content:"\f567"}.fa-dna:before{content:"\f471"}.fa-dochub:before{content:"\f394"}.fa-docker:before{content:"\f395"}.fa-dollar-sign:before{content:"\f155"}.fa-dolly:before{content:"\f472"}.fa-dolly-flatbed:before{content:"\f474"}.fa-donate:before{content:"\f4b9"}.fa-door-closed:before{content:"\f52a"}.fa-door-open:before{content:"\f52b"}.fa-dot-circle:before{content:"\f192"}.fa-dove:before{content:"\f4ba"}.fa-download:before{content:"\f019"}.fa-draft2digital:before{content:"\f396"}.fa-drafting-compass:before{content:"\f568"}.fa-draw-polygon:before{content:"\f5ee"}.fa-dribbble:before{content:"\f17d"}.fa-dribbble-square:before{content:"\f397"}.fa-dropbox:before{content:"\f16b"}.fa-drum:before{content:"\f569"}.fa-drum-steelpan:before{content:"\f56a"}.fa-drupal:before{content:"\f1a9"}.fa-dumbbell:before{content:"\f44b"}.fa-dyalog:before{content:"\f399"}.fa-earlybirds:before{content:"\f39a"}.fa-ebay:before{content:"\f4f4"}.fa-edge:before{content:"\f282"}.fa-edit:before{content:"\f044"}.fa-eject:before{content:"\f052"}.fa-elementor:before{content:"\f430"}.fa-ellipsis-h:before{content:"\f141"}.fa-ellipsis-v:before{content:"\f142"}.fa-ello:before{content:"\f5f1"}.fa-ember:before{content:"\f423"}.fa-empire:before{content:"\f1d1"}.fa-envelope:before{content:"\f0e0"}.fa-envelope-open:before{content:"\f2b6"}.fa-envelope-square:before{content:"\f199"}.fa-envira:before{content:"\f299"}.fa-equals:before{content:"\f52c"}.fa-eraser:before{content:"\f12d"}.fa-erlang:before{content:"\f39d"}.fa-ethereum:before{content:"\f42e"}.fa-etsy:before{content:"\f2d7"}.fa-euro-sign:before{content:"\f153"}.fa-exchange-alt:before{content:"\f362"}.fa-exclamation:before{content:"\f12a"}.fa-exclamation-circle:before{content:"\f06a"}.fa-exclamation-triangle:before{content:"\f071"}.fa-expand:before{content:"\f065"}.fa-expand-arrows-alt:before{content:"\f31e"}.fa-expeditedssl:before{content:"\f23e"}.fa-external-link-alt:before{content:"\f35d"}.fa-external-link-square-alt:before{content:"\f360"}.fa-eye:before{content:"\f06e"}.fa-eye-dropper:before{content:"\f1fb"}.fa-eye-slash:before{content:"\f070"}.fa-facebook:before{content:"\f09a"}.fa-facebook-f:before{content:"\f39e"}.fa-facebook-messenger:before{content:"\f39f"}.fa-facebook-square:before{content:"\f082"}.fa-fast-backward:before{content:"\f049"}.fa-fast-forward:before{content:"\f050"}.fa-fax:before{content:"\f1ac"}.fa-feather:before{content:"\f52d"}.fa-feather-alt:before{content:"\f56b"}.fa-female:before{content:"\f182"}.fa-fighter-jet:before{content:"\f0fb"}.fa-file:before{content:"\f15b"}.fa-file-alt:before{content:"\f15c"}.fa-file-archive:before{content:"\f1c6"}.fa-file-audio:before{content:"\f1c7"}.fa-file-code:before{content:"\f1c9"}.fa-file-contract:before{content:"\f56c"}.fa-file-download:before{content:"\f56d"}.fa-file-excel:before{content:"\f1c3"}.fa-file-export:before{content:"\f56e"}.fa-file-image:before{content:"\f1c5"}.fa-file-import:before{content:"\f56f"}.fa-file-invoice:before{content:"\f570"}.fa-file-invoice-dollar:before{content:"\f571"}.fa-file-medical:before{content:"\f477"}.fa-file-medical-alt:before{content:"\f478"}.fa-file-pdf:before{content:"\f1c1"}.fa-file-powerpoint:before{content:"\f1c4"}.fa-file-prescription:before{content:"\f572"}.fa-file-signature:before{content:"\f573"}.fa-file-upload:before{content:"\f574"}.fa-file-video:before{content:"\f1c8"}.fa-file-word:before{content:"\f1c2"}.fa-fill:before{content:"\f575"}.fa-fill-drip:before{content:"\f576"}.fa-film:before{content:"\f008"}.fa-filter:before{content:"\f0b0"}.fa-fingerprint:before{content:"\f577"}.fa-fire:before{content:"\f06d"}.fa-fire-extinguisher:before{content:"\f134"}.fa-firefox:before{content:"\f269"}.fa-first-aid:before{content:"\f479"}.fa-first-order:before{content:"\f2b0"}.fa-first-order-alt:before{content:"\f50a"}.fa-firstdraft:before{content:"\f3a1"}.fa-fish:before{content:"\f578"}.fa-flag:before{content:"\f024"}.fa-flag-checkered:before{content:"\f11e"}.fa-flask:before{content:"\f0c3"}.fa-flickr:before{content:"\f16e"}.fa-flipboard:before{content:"\f44d"}.fa-flushed:before{content:"\f579"}.fa-fly:before{content:"\f417"}.fa-folder:before{content:"\f07b"}.fa-folder-open:before{content:"\f07c"}.fa-font:before{content:"\f031"}.fa-font-awesome:before{content:"\f2b4"}.fa-font-awesome-alt:before{content:"\f35c"}.fa-font-awesome-flag:before{content:"\f425"}.fa-font-awesome-logo-full:before{content:"\f4e6"}.fa-fonticons:before{content:"\f280"}.fa-fonticons-fi:before{content:"\f3a2"}.fa-football-ball:before{content:"\f44e"}.fa-fort-awesome:before{content:"\f286"}.fa-fort-awesome-alt:before{content:"\f3a3"}.fa-forumbee:before{content:"\f211"}.fa-forward:before{content:"\f04e"}.fa-foursquare:before{content:"\f180"}.fa-free-code-camp:before{content:"\f2c5"}.fa-freebsd:before{content:"\f3a4"}.fa-frog:before{content:"\f52e"}.fa-frown:before{content:"\f119"}.fa-frown-open:before{content:"\f57a"}.fa-fulcrum:before{content:"\f50b"}.fa-futbol:before{content:"\f1e3"}.fa-galactic-republic:before{content:"\f50c"}.fa-galactic-senate:before{content:"\f50d"}.fa-gamepad:before{content:"\f11b"}.fa-gas-pump:before{content:"\f52f"}.fa-gavel:before{content:"\f0e3"}.fa-gem:before{content:"\f3a5"}.fa-genderless:before{content:"\f22d"}.fa-get-pocket:before{content:"\f265"}.fa-gg:before{content:"\f260"}.fa-gg-circle:before{content:"\f261"}.fa-gift:before{content:"\f06b"}.fa-git:before{content:"\f1d3"}.fa-git-square:before{content:"\f1d2"}.fa-github:before{content:"\f09b"}.fa-github-alt:before{content:"\f113"}.fa-github-square:before{content:"\f092"}.fa-gitkraken:before{content:"\f3a6"}.fa-gitlab:before{content:"\f296"}.fa-gitter:before{content:"\f426"}.fa-glass-martini:before{content:"\f000"}.fa-glass-martini-alt:before{content:"\f57b"}.fa-glasses:before{content:"\f530"}.fa-glide:before{content:"\f2a5"}.fa-glide-g:before{content:"\f2a6"}.fa-globe:before{content:"\f0ac"}.fa-globe-africa:before{content:"\f57c"}.fa-globe-americas:before{content:"\f57d"}.fa-globe-asia:before{content:"\f57e"}.fa-gofore:before{content:"\f3a7"}.fa-golf-ball:before{content:"\f450"}.fa-goodreads:before{content:"\f3a8"}.fa-goodreads-g:before{content:"\f3a9"}.fa-google:before{content:"\f1a0"}.fa-google-drive:before{content:"\f3aa"}.fa-google-play:before{content:"\f3ab"}.fa-google-plus:before{content:"\f2b3"}.fa-google-plus-g:before{content:"\f0d5"}.fa-google-plus-square:before{content:"\f0d4"}.fa-google-wallet:before{content:"\f1ee"}.fa-graduation-cap:before{content:"\f19d"}.fa-gratipay:before{content:"\f184"}.fa-grav:before{content:"\f2d6"}.fa-greater-than:before{content:"\f531"}.fa-greater-than-equal:before{content:"\f532"}.fa-grimace:before{content:"\f57f"}.fa-grin:before{content:"\f580"}.fa-grin-alt:before{content:"\f581"}.fa-grin-beam:before{content:"\f582"}.fa-grin-beam-sweat:before{content:"\f583"}.fa-grin-hearts:before{content:"\f584"}.fa-grin-squint:before{content:"\f585"}.fa-grin-squint-tears:before{content:"\f586"}.fa-grin-stars:before{content:"\f587"}.fa-grin-tears:before{content:"\f588"}.fa-grin-tongue:before{content:"\f589"}.fa-grin-tongue-squint:before{content:"\f58a"}.fa-grin-tongue-wink:before{content:"\f58b"}.fa-grin-wink:before{content:"\f58c"}.fa-grip-horizontal:before{content:"\f58d"}.fa-grip-vertical:before{content:"\f58e"}.fa-gripfire:before{content:"\f3ac"}.fa-grunt:before{content:"\f3ad"}.fa-gulp:before{content:"\f3ae"}.fa-h-square:before{content:"\f0fd"}.fa-hacker-news:before{content:"\f1d4"}.fa-hacker-news-square:before{content:"\f3af"}.fa-hackerrank:before{content:"\f5f7"}.fa-hand-holding:before{content:"\f4bd"}.fa-hand-holding-heart:before{content:"\f4be"}.fa-hand-holding-usd:before{content:"\f4c0"}.fa-hand-lizard:before{content:"\f258"}.fa-hand-paper:before{content:"\f256"}.fa-hand-peace:before{content:"\f25b"}.fa-hand-point-down:before{content:"\f0a7"}.fa-hand-point-left:before{content:"\f0a5"}.fa-hand-point-right:before{content:"\f0a4"}.fa-hand-point-up:before{content:"\f0a6"}.fa-hand-pointer:before{content:"\f25a"}.fa-hand-rock:before{content:"\f255"}.fa-hand-scissors:before{content:"\f257"}.fa-hand-spock:before{content:"\f259"}.fa-hands:before{content:"\f4c2"}.fa-hands-helping:before{content:"\f4c4"}.fa-handshake:before{content:"\f2b5"}.fa-hashtag:before{content:"\f292"}.fa-hdd:before{content:"\f0a0"}.fa-heading:before{content:"\f1dc"}.fa-headphones:before{content:"\f025"}.fa-headphones-alt:before{content:"\f58f"}.fa-headset:before{content:"\f590"}.fa-heart:before{content:"\f004"}.fa-heartbeat:before{content:"\f21e"}.fa-helicopter:before{content:"\f533"}.fa-highlighter:before{content:"\f591"}.fa-hips:before{content:"\f452"}.fa-hire-a-helper:before{content:"\f3b0"}.fa-history:before{content:"\f1da"}.fa-hockey-puck:before{content:"\f453"}.fa-home:before{content:"\f015"}.fa-hooli:before{content:"\f427"}.fa-hornbill:before{content:"\f592"}.fa-hospital:before{content:"\f0f8"}.fa-hospital-alt:before{content:"\f47d"}.fa-hospital-symbol:before{content:"\f47e"}.fa-hot-tub:before{content:"\f593"}.fa-hotel:before{content:"\f594"}.fa-hotjar:before{content:"\f3b1"}.fa-hourglass:before{content:"\f254"}.fa-hourglass-end:before{content:"\f253"}.fa-hourglass-half:before{content:"\f252"}.fa-hourglass-start:before{content:"\f251"}.fa-houzz:before{content:"\f27c"}.fa-html5:before{content:"\f13b"}.fa-hubspot:before{content:"\f3b2"}.fa-i-cursor:before{content:"\f246"}.fa-id-badge:before{content:"\f2c1"}.fa-id-card:before{content:"\f2c2"}.fa-id-card-alt:before{content:"\f47f"}.fa-image:before{content:"\f03e"}.fa-images:before{content:"\f302"}.fa-imdb:before{content:"\f2d8"}.fa-inbox:before{content:"\f01c"}.fa-indent:before{content:"\f03c"}.fa-industry:before{content:"\f275"}.fa-infinity:before{content:"\f534"}.fa-info:before{content:"\f129"}.fa-info-circle:before{content:"\f05a"}.fa-instagram:before{content:"\f16d"}.fa-internet-explorer:before{content:"\f26b"}.fa-ioxhost:before{content:"\f208"}.fa-italic:before{content:"\f033"}.fa-itunes:before{content:"\f3b4"}.fa-itunes-note:before{content:"\f3b5"}.fa-java:before{content:"\f4e4"}.fa-jedi-order:before{content:"\f50e"}.fa-jenkins:before{content:"\f3b6"}.fa-joget:before{content:"\f3b7"}.fa-joint:before{content:"\f595"}.fa-joomla:before{content:"\f1aa"}.fa-js:before{content:"\f3b8"}.fa-js-square:before{content:"\f3b9"}.fa-jsfiddle:before{content:"\f1cc"}.fa-kaggle:before{content:"\f5fa"}.fa-key:before{content:"\f084"}.fa-keybase:before{content:"\f4f5"}.fa-keyboard:before{content:"\f11c"}.fa-keycdn:before{content:"\f3ba"}.fa-kickstarter:before{content:"\f3bb"}.fa-kickstarter-k:before{content:"\f3bc"}.fa-kiss:before{content:"\f596"}.fa-kiss-beam:before{content:"\f597"}.fa-kiss-wink-heart:before{content:"\f598"}.fa-kiwi-bird:before{content:"\f535"}.fa-korvue:before{content:"\f42f"}.fa-language:before{content:"\f1ab"}.fa-laptop:before{content:"\f109"}.fa-laptop-code:before{content:"\f5fc"}.fa-laravel:before{content:"\f3bd"}.fa-lastfm:before{content:"\f202"}.fa-lastfm-square:before{content:"\f203"}.fa-laugh:before{content:"\f599"}.fa-laugh-beam:before{content:"\f59a"}.fa-laugh-squint:before{content:"\f59b"}.fa-laugh-wink:before{content:"\f59c"}.fa-layer-group:before{content:"\f5fd"}.fa-leaf:before{content:"\f06c"}.fa-leanpub:before{content:"\f212"}.fa-lemon:before{content:"\f094"}.fa-less:before{content:"\f41d"}.fa-less-than:before{content:"\f536"}.fa-less-than-equal:before{content:"\f537"}.fa-level-down-alt:before{content:"\f3be"}.fa-level-up-alt:before{content:"\f3bf"}.fa-life-ring:before{content:"\f1cd"}.fa-lightbulb:before{content:"\f0eb"}.fa-line:before{content:"\f3c0"}.fa-link:before{content:"\f0c1"}.fa-linkedin:before{content:"\f08c"}.fa-linkedin-in:before{content:"\f0e1"}.fa-linode:before{content:"\f2b8"}.fa-linux:before{content:"\f17c"}.fa-lira-sign:before{content:"\f195"}.fa-list:before{content:"\f03a"}.fa-list-alt:before{content:"\f022"}.fa-list-ol:before{content:"\f0cb"}.fa-list-ul:before{content:"\f0ca"}.fa-location-arrow:before{content:"\f124"}.fa-lock:before{content:"\f023"}.fa-lock-open:before{content:"\f3c1"}.fa-long-arrow-alt-down:before{content:"\f309"}.fa-long-arrow-alt-left:before{content:"\f30a"}.fa-long-arrow-alt-right:before{content:"\f30b"}.fa-long-arrow-alt-up:before{content:"\f30c"}.fa-low-vision:before{content:"\f2a8"}.fa-luggage-cart:before{content:"\f59d"}.fa-lyft:before{content:"\f3c3"}.fa-magento:before{content:"\f3c4"}.fa-magic:before{content:"\f0d0"}.fa-magnet:before{content:"\f076"}.fa-mailchimp:before{content:"\f59e"}.fa-male:before{content:"\f183"}.fa-mandalorian:before{content:"\f50f"}.fa-map:before{content:"\f279"}.fa-map-marked:before{content:"\f59f"}.fa-map-marked-alt:before{content:"\f5a0"}.fa-map-marker:before{content:"\f041"}.fa-map-marker-alt:before{content:"\f3c5"}.fa-map-pin:before{content:"\f276"}.fa-map-signs:before{content:"\f277"}.fa-markdown:before{content:"\f60f"}.fa-marker:before{content:"\f5a1"}.fa-mars:before{content:"\f222"}.fa-mars-double:before{content:"\f227"}.fa-mars-stroke:before{content:"\f229"}.fa-mars-stroke-h:before{content:"\f22b"}.fa-mars-stroke-v:before{content:"\f22a"}.fa-mastodon:before{content:"\f4f6"}.fa-maxcdn:before{content:"\f136"}.fa-medal:before{content:"\f5a2"}.fa-medapps:before{content:"\f3c6"}.fa-medium:before{content:"\f23a"}.fa-medium-m:before{content:"\f3c7"}.fa-medkit:before{content:"\f0fa"}.fa-medrt:before{content:"\f3c8"}.fa-meetup:before{content:"\f2e0"}.fa-megaport:before{content:"\f5a3"}.fa-meh:before{content:"\f11a"}.fa-meh-blank:before{content:"\f5a4"}.fa-meh-rolling-eyes:before{content:"\f5a5"}.fa-memory:before{content:"\f538"}.fa-mercury:before{content:"\f223"}.fa-microchip:before{content:"\f2db"}.fa-microphone:before{content:"\f130"}.fa-microphone-alt:before{content:"\f3c9"}.fa-microphone-alt-slash:before{content:"\f539"}.fa-microphone-slash:before{content:"\f131"}.fa-microscope:before{content:"\f610"}.fa-microsoft:before{content:"\f3ca"}.fa-minus:before{content:"\f068"}.fa-minus-circle:before{content:"\f056"}.fa-minus-square:before{content:"\f146"}.fa-mix:before{content:"\f3cb"}.fa-mixcloud:before{content:"\f289"}.fa-mizuni:before{content:"\f3cc"}.fa-mobile:before{content:"\f10b"}.fa-mobile-alt:before{content:"\f3cd"}.fa-modx:before{content:"\f285"}.fa-monero:before{content:"\f3d0"}.fa-money-bill:before{content:"\f0d6"}.fa-money-bill-alt:before{content:"\f3d1"}.fa-money-bill-wave:before{content:"\f53a"}.fa-money-bill-wave-alt:before{content:"\f53b"}.fa-money-check:before{content:"\f53c"}.fa-money-check-alt:before{content:"\f53d"}.fa-monument:before{content:"\f5a6"}.fa-moon:before{content:"\f186"}.fa-mortar-pestle:before{content:"\f5a7"}.fa-motorcycle:before{content:"\f21c"}.fa-mouse-pointer:before{content:"\f245"}.fa-music:before{content:"\f001"}.fa-napster:before{content:"\f3d2"}.fa-neos:before{content:"\f612"}.fa-neuter:before{content:"\f22c"}.fa-newspaper:before{content:"\f1ea"}.fa-nimblr:before{content:"\f5a8"}.fa-nintendo-switch:before{content:"\f418"}.fa-node:before{content:"\f419"}.fa-node-js:before{content:"\f3d3"}.fa-not-equal:before{content:"\f53e"}.fa-notes-medical:before{content:"\f481"}.fa-npm:before{content:"\f3d4"}.fa-ns8:before{content:"\f3d5"}.fa-nutritionix:before{content:"\f3d6"}.fa-object-group:before{content:"\f247"}.fa-object-ungroup:before{content:"\f248"}.fa-odnoklassniki:before{content:"\f263"}.fa-odnoklassniki-square:before{content:"\f264"}.fa-oil-can:before{content:"\f613"}.fa-old-republic:before{content:"\f510"}.fa-opencart:before{content:"\f23d"}.fa-openid:before{content:"\f19b"}.fa-opera:before{content:"\f26a"}.fa-optin-monster:before{content:"\f23c"}.fa-osi:before{content:"\f41a"}.fa-outdent:before{content:"\f03b"}.fa-page4:before{content:"\f3d7"}.fa-pagelines:before{content:"\f18c"}.fa-paint-brush:before{content:"\f1fc"}.fa-paint-roller:before{content:"\f5aa"}.fa-palette:before{content:"\f53f"}.fa-palfed:before{content:"\f3d8"}.fa-pallet:before{content:"\f482"}.fa-paper-plane:before{content:"\f1d8"}.fa-paperclip:before{content:"\f0c6"}.fa-parachute-box:before{content:"\f4cd"}.fa-paragraph:before{content:"\f1dd"}.fa-parking:before{content:"\f540"}.fa-passport:before{content:"\f5ab"}.fa-paste:before{content:"\f0ea"}.fa-patreon:before{content:"\f3d9"}.fa-pause:before{content:"\f04c"}.fa-pause-circle:before{content:"\f28b"}.fa-paw:before{content:"\f1b0"}.fa-paypal:before{content:"\f1ed"}.fa-pen:before{content:"\f304"}.fa-pen-alt:before{content:"\f305"}.fa-pen-fancy:before{content:"\f5ac"}.fa-pen-nib:before{content:"\f5ad"}.fa-pen-square:before{content:"\f14b"}.fa-pencil-alt:before{content:"\f303"}.fa-pencil-ruler:before{content:"\f5ae"}.fa-people-carry:before{content:"\f4ce"}.fa-percent:before{content:"\f295"}.fa-percentage:before{content:"\f541"}.fa-periscope:before{content:"\f3da"}.fa-phabricator:before{content:"\f3db"}.fa-phoenix-framework:before{content:"\f3dc"}.fa-phoenix-squadron:before{content:"\f511"}.fa-phone:before{content:"\f095"}.fa-phone-slash:before{content:"\f3dd"}.fa-phone-square:before{content:"\f098"}.fa-phone-volume:before{content:"\f2a0"}.fa-php:before{content:"\f457"}.fa-pied-piper:before{content:"\f2ae"}.fa-pied-piper-alt:before{content:"\f1a8"}.fa-pied-piper-hat:before{content:"\f4e5"}.fa-pied-piper-pp:before{content:"\f1a7"}.fa-piggy-bank:before{content:"\f4d3"}.fa-pills:before{content:"\f484"}.fa-pinterest:before{content:"\f0d2"}.fa-pinterest-p:before{content:"\f231"}.fa-pinterest-square:before{content:"\f0d3"}.fa-plane:before{content:"\f072"}.fa-plane-arrival:before{content:"\f5af"}.fa-plane-departure:before{content:"\f5b0"}.fa-play:before{content:"\f04b"}.fa-play-circle:before{content:"\f144"}.fa-playstation:before{content:"\f3df"}.fa-plug:before{content:"\f1e6"}.fa-plus:before{content:"\f067"}.fa-plus-circle:before{content:"\f055"}.fa-plus-square:before{content:"\f0fe"}.fa-podcast:before{content:"\f2ce"}.fa-poo:before{content:"\f2fe"}.fa-poop:before{content:"\f619"}.fa-portrait:before{content:"\f3e0"}.fa-pound-sign:before{content:"\f154"}.fa-power-off:before{content:"\f011"}.fa-prescription:before{content:"\f5b1"}.fa-prescription-bottle:before{content:"\f485"}.fa-prescription-bottle-alt:before{content:"\f486"}.fa-print:before{content:"\f02f"}.fa-procedures:before{content:"\f487"}.fa-product-hunt:before{content:"\f288"}.fa-project-diagram:before{content:"\f542"}.fa-pushed:before{content:"\f3e1"}.fa-puzzle-piece:before{content:"\f12e"}.fa-python:before{content:"\f3e2"}.fa-qq:before{content:"\f1d6"}.fa-qrcode:before{content:"\f029"}.fa-question:before{content:"\f128"}.fa-question-circle:before{content:"\f059"}.fa-quidditch:before{content:"\f458"}.fa-quinscape:before{content:"\f459"}.fa-quora:before{content:"\f2c4"}.fa-quote-left:before{content:"\f10d"}.fa-quote-right:before{content:"\f10e"}.fa-r-project:before{content:"\f4f7"}.fa-random:before{content:"\f074"}.fa-ravelry:before{content:"\f2d9"}.fa-react:before{content:"\f41b"}.fa-readme:before{content:"\f4d5"}.fa-rebel:before{content:"\f1d0"}.fa-receipt:before{content:"\f543"}.fa-recycle:before{content:"\f1b8"}.fa-red-river:before{content:"\f3e3"}.fa-reddit:before{content:"\f1a1"}.fa-reddit-alien:before{content:"\f281"}.fa-reddit-square:before{content:"\f1a2"}.fa-redo:before{content:"\f01e"}.fa-redo-alt:before{content:"\f2f9"}.fa-registered:before{content:"\f25d"}.fa-rendact:before{content:"\f3e4"}.fa-renren:before{content:"\f18b"}.fa-reply:before{content:"\f3e5"}.fa-reply-all:before{content:"\f122"}.fa-replyd:before{content:"\f3e6"}.fa-researchgate:before{content:"\f4f8"}.fa-resolving:before{content:"\f3e7"}.fa-retweet:before{content:"\f079"}.fa-rev:before{content:"\f5b2"}.fa-ribbon:before{content:"\f4d6"}.fa-road:before{content:"\f018"}.fa-robot:before{content:"\f544"}.fa-rocket:before{content:"\f135"}.fa-rocketchat:before{content:"\f3e8"}.fa-rockrms:before{content:"\f3e9"}.fa-route:before{content:"\f4d7"}.fa-rss:before{content:"\f09e"}.fa-rss-square:before{content:"\f143"}.fa-ruble-sign:before{content:"\f158"}.fa-ruler:before{content:"\f545"}.fa-ruler-combined:before{content:"\f546"}.fa-ruler-horizontal:before{content:"\f547"}.fa-ruler-vertical:before{content:"\f548"}.fa-rupee-sign:before{content:"\f156"}.fa-sad-cry:before{content:"\f5b3"}.fa-sad-tear:before{content:"\f5b4"}.fa-safari:before{content:"\f267"}.fa-sass:before{content:"\f41e"}.fa-save:before{content:"\f0c7"}.fa-schlix:before{content:"\f3ea"}.fa-school:before{content:"\f549"}.fa-screwdriver:before{content:"\f54a"}.fa-scribd:before{content:"\f28a"}.fa-search:before{content:"\f002"}.fa-search-minus:before{content:"\f010"}.fa-search-plus:before{content:"\f00e"}.fa-searchengin:before{content:"\f3eb"}.fa-seedling:before{content:"\f4d8"}.fa-sellcast:before{content:"\f2da"}.fa-sellsy:before{content:"\f213"}.fa-server:before{content:"\f233"}.fa-servicestack:before{content:"\f3ec"}.fa-shapes:before{content:"\f61f"}.fa-share:before{content:"\f064"}.fa-share-alt:before{content:"\f1e0"}.fa-share-alt-square:before{content:"\f1e1"}.fa-share-square:before{content:"\f14d"}.fa-shekel-sign:before{content:"\f20b"}.fa-shield-alt:before{content:"\f3ed"}.fa-ship:before{content:"\f21a"}.fa-shipping-fast:before{content:"\f48b"}.fa-shirtsinbulk:before{content:"\f214"}.fa-shoe-prints:before{content:"\f54b"}.fa-shopping-bag:before{content:"\f290"}.fa-shopping-basket:before{content:"\f291"}.fa-shopping-cart:before{content:"\f07a"}.fa-shopware:before{content:"\f5b5"}.fa-shower:before{content:"\f2cc"}.fa-shuttle-van:before{content:"\f5b6"}.fa-sign:before{content:"\f4d9"}.fa-sign-in-alt:before{content:"\f2f6"}.fa-sign-language:before{content:"\f2a7"}.fa-sign-out-alt:before{content:"\f2f5"}.fa-signal:before{content:"\f012"}.fa-signature:before{content:"\f5b7"}.fa-simplybuilt:before{content:"\f215"}.fa-sistrix:before{content:"\f3ee"}.fa-sitemap:before{content:"\f0e8"}.fa-sith:before{content:"\f512"}.fa-skull:before{content:"\f54c"}.fa-skyatlas:before{content:"\f216"}.fa-skype:before{content:"\f17e"}.fa-slack:before{content:"\f198"}.fa-slack-hash:before{content:"\f3ef"}.fa-sliders-h:before{content:"\f1de"}.fa-slideshare:before{content:"\f1e7"}.fa-smile:before{content:"\f118"}.fa-smile-beam:before{content:"\f5b8"}.fa-smile-wink:before{content:"\f4da"}.fa-smoking:before{content:"\f48d"}.fa-smoking-ban:before{content:"\f54d"}.fa-snapchat:before{content:"\f2ab"}.fa-snapchat-ghost:before{content:"\f2ac"}.fa-snapchat-square:before{content:"\f2ad"}.fa-snowflake:before{content:"\f2dc"}.fa-solar-panel:before{content:"\f5ba"}.fa-sort:before{content:"\f0dc"}.fa-sort-alpha-down:before{content:"\f15d"}.fa-sort-alpha-up:before{content:"\f15e"}.fa-sort-amount-down:before{content:"\f160"}.fa-sort-amount-up:before{content:"\f161"}.fa-sort-down:before{content:"\f0dd"}.fa-sort-numeric-down:before{content:"\f162"}.fa-sort-numeric-up:before{content:"\f163"}.fa-sort-up:before{content:"\f0de"}.fa-soundcloud:before{content:"\f1be"}.fa-spa:before{content:"\f5bb"}.fa-space-shuttle:before{content:"\f197"}.fa-speakap:before{content:"\f3f3"}.fa-spinner:before{content:"\f110"}.fa-splotch:before{content:"\f5bc"}.fa-spotify:before{content:"\f1bc"}.fa-spray-can:before{content:"\f5bd"}.fa-square:before{content:"\f0c8"}.fa-square-full:before{content:"\f45c"}.fa-squarespace:before{content:"\f5be"}.fa-stack-exchange:before{content:"\f18d"}.fa-stack-overflow:before{content:"\f16c"}.fa-stamp:before{content:"\f5bf"}.fa-star:before{content:"\f005"}.fa-star-half:before{content:"\f089"}.fa-star-half-alt:before{content:"\f5c0"}.fa-star-of-life:before{content:"\f621"}.fa-staylinked:before{content:"\f3f5"}.fa-steam:before{content:"\f1b6"}.fa-steam-square:before{content:"\f1b7"}.fa-steam-symbol:before{content:"\f3f6"}.fa-step-backward:before{content:"\f048"}.fa-step-forward:before{content:"\f051"}.fa-stethoscope:before{content:"\f0f1"}.fa-sticker-mule:before{content:"\f3f7"}.fa-sticky-note:before{content:"\f249"}.fa-stop:before{content:"\f04d"}.fa-stop-circle:before{content:"\f28d"}.fa-stopwatch:before{content:"\f2f2"}.fa-store:before{content:"\f54e"}.fa-store-alt:before{content:"\f54f"}.fa-strava:before{content:"\f428"}.fa-stream:before{content:"\f550"}.fa-street-view:before{content:"\f21d"}.fa-strikethrough:before{content:"\f0cc"}.fa-stripe:before{content:"\f429"}.fa-stripe-s:before{content:"\f42a"}.fa-stroopwafel:before{content:"\f551"}.fa-studiovinari:before{content:"\f3f8"}.fa-stumbleupon:before{content:"\f1a4"}.fa-stumbleupon-circle:before{content:"\f1a3"}.fa-subscript:before{content:"\f12c"}.fa-subway:before{content:"\f239"}.fa-suitcase:before{content:"\f0f2"}.fa-suitcase-rolling:before{content:"\f5c1"}.fa-sun:before{content:"\f185"}.fa-superpowers:before{content:"\f2dd"}.fa-superscript:before{content:"\f12b"}.fa-supple:before{content:"\f3f9"}.fa-surprise:before{content:"\f5c2"}.fa-swatchbook:before{content:"\f5c3"}.fa-swimmer:before{content:"\f5c4"}.fa-swimming-pool:before{content:"\f5c5"}.fa-sync:before{content:"\f021"}.fa-sync-alt:before{content:"\f2f1"}.fa-syringe:before{content:"\f48e"}.fa-table:before{content:"\f0ce"}.fa-table-tennis:before{content:"\f45d"}.fa-tablet:before{content:"\f10a"}.fa-tablet-alt:before{content:"\f3fa"}.fa-tablets:before{content:"\f490"}.fa-tachometer-alt:before{content:"\f3fd"}.fa-tag:before{content:"\f02b"}.fa-tags:before{content:"\f02c"}.fa-tape:before{content:"\f4db"}.fa-tasks:before{content:"\f0ae"}.fa-taxi:before{content:"\f1ba"}.fa-teamspeak:before{content:"\f4f9"}.fa-teeth:before{content:"\f62e"}.fa-teeth-open:before{content:"\f62f"}.fa-telegram:before{content:"\f2c6"}.fa-telegram-plane:before{content:"\f3fe"}.fa-tencent-weibo:before{content:"\f1d5"}.fa-terminal:before{content:"\f120"}.fa-text-height:before{content:"\f034"}.fa-text-width:before{content:"\f035"}.fa-th:before{content:"\f00a"}.fa-th-large:before{content:"\f009"}.fa-th-list:before{content:"\f00b"}.fa-theater-masks:before{content:"\f630"}.fa-themeco:before{content:"\f5c6"}.fa-themeisle:before{content:"\f2b2"}.fa-thermometer:before{content:"\f491"}.fa-thermometer-empty:before{content:"\f2cb"}.fa-thermometer-full:before{content:"\f2c7"}.fa-thermometer-half:before{content:"\f2c9"}.fa-thermometer-quarter:before{content:"\f2ca"}.fa-thermometer-three-quarters:before{content:"\f2c8"}.fa-thumbs-down:before{content:"\f165"}.fa-thumbs-up:before{content:"\f164"}.fa-thumbtack:before{content:"\f08d"}.fa-ticket-alt:before{content:"\f3ff"}.fa-times:before{content:"\f00d"}.fa-times-circle:before{content:"\f057"}.fa-tint:before{content:"\f043"}.fa-tint-slash:before{content:"\f5c7"}.fa-tired:before{content:"\f5c8"}.fa-toggle-off:before{content:"\f204"}.fa-toggle-on:before{content:"\f205"}.fa-toolbox:before{content:"\f552"}.fa-tooth:before{content:"\f5c9"}.fa-trade-federation:before{content:"\f513"}.fa-trademark:before{content:"\f25c"}.fa-traffic-light:before{content:"\f637"}.fa-train:before{content:"\f238"}.fa-transgender:before{content:"\f224"}.fa-transgender-alt:before{content:"\f225"}.fa-trash:before{content:"\f1f8"}.fa-trash-alt:before{content:"\f2ed"}.fa-tree:before{content:"\f1bb"}.fa-trello:before{content:"\f181"}.fa-tripadvisor:before{content:"\f262"}.fa-trophy:before{content:"\f091"}.fa-truck:before{content:"\f0d1"}.fa-truck-loading:before{content:"\f4de"}.fa-truck-monster:before{content:"\f63b"}.fa-truck-moving:before{content:"\f4df"}.fa-truck-pickup:before{content:"\f63c"}.fa-tshirt:before{content:"\f553"}.fa-tty:before{content:"\f1e4"}.fa-tumblr:before{content:"\f173"}.fa-tumblr-square:before{content:"\f174"}.fa-tv:before{content:"\f26c"}.fa-twitch:before{content:"\f1e8"}.fa-twitter:before{content:"\f099"}.fa-twitter-square:before{content:"\f081"}.fa-typo3:before{content:"\f42b"}.fa-uber:before{content:"\f402"}.fa-uikit:before{content:"\f403"}.fa-umbrella:before{content:"\f0e9"}.fa-umbrella-beach:before{content:"\f5ca"}.fa-underline:before{content:"\f0cd"}.fa-undo:before{content:"\f0e2"}.fa-undo-alt:before{content:"\f2ea"}.fa-uniregistry:before{content:"\f404"}.fa-universal-access:before{content:"\f29a"}.fa-university:before{content:"\f19c"}.fa-unlink:before{content:"\f127"}.fa-unlock:before{content:"\f09c"}.fa-unlock-alt:before{content:"\f13e"}.fa-untappd:before{content:"\f405"}.fa-upload:before{content:"\f093"}.fa-usb:before{content:"\f287"}.fa-user:before{content:"\f007"}.fa-user-alt:before{content:"\f406"}.fa-user-alt-slash:before{content:"\f4fa"}.fa-user-astronaut:before{content:"\f4fb"}.fa-user-check:before{content:"\f4fc"}.fa-user-circle:before{content:"\f2bd"}.fa-user-clock:before{content:"\f4fd"}.fa-user-cog:before{content:"\f4fe"}.fa-user-edit:before{content:"\f4ff"}.fa-user-friends:before{content:"\f500"}.fa-user-graduate:before{content:"\f501"}.fa-user-lock:before{content:"\f502"}.fa-user-md:before{content:"\f0f0"}.fa-user-minus:before{content:"\f503"}.fa-user-ninja:before{content:"\f504"}.fa-user-plus:before{content:"\f234"}.fa-user-secret:before{content:"\f21b"}.fa-user-shield:before{content:"\f505"}.fa-user-slash:before{content:"\f506"}.fa-user-tag:before{content:"\f507"}.fa-user-tie:before{content:"\f508"}.fa-user-times:before{content:"\f235"}.fa-users:before{content:"\f0c0"}.fa-users-cog:before{content:"\f509"}.fa-ussunnah:before{content:"\f407"}.fa-utensil-spoon:before{content:"\f2e5"}.fa-utensils:before{content:"\f2e7"}.fa-vaadin:before{content:"\f408"}.fa-vector-square:before{content:"\f5cb"}.fa-venus:before{content:"\f221"}.fa-venus-double:before{content:"\f226"}.fa-venus-mars:before{content:"\f228"}.fa-viacoin:before{content:"\f237"}.fa-viadeo:before{content:"\f2a9"}.fa-viadeo-square:before{content:"\f2aa"}.fa-vial:before{content:"\f492"}.fa-vials:before{content:"\f493"}.fa-viber:before{content:"\f409"}.fa-video:before{content:"\f03d"}.fa-video-slash:before{content:"\f4e2"}.fa-vimeo:before{content:"\f40a"}.fa-vimeo-square:before{content:"\f194"}.fa-vimeo-v:before{content:"\f27d"}.fa-vine:before{content:"\f1ca"}.fa-vk:before{content:"\f189"}.fa-vnv:before{content:"\f40b"}.fa-volleyball-ball:before{content:"\f45f"}.fa-volume-down:before{content:"\f027"}.fa-volume-off:before{content:"\f026"}.fa-volume-up:before{content:"\f028"}.fa-vuejs:before{content:"\f41f"}.fa-walking:before{content:"\f554"}.fa-wallet:before{content:"\f555"}.fa-warehouse:before{content:"\f494"}.fa-weebly:before{content:"\f5cc"}.fa-weibo:before{content:"\f18a"}.fa-weight:before{content:"\f496"}.fa-weight-hanging:before{content:"\f5cd"}.fa-weixin:before{content:"\f1d7"}.fa-whatsapp:before{content:"\f232"}.fa-whatsapp-square:before{content:"\f40c"}.fa-wheelchair:before{content:"\f193"}.fa-whmcs:before{content:"\f40d"}.fa-wifi:before{content:"\f1eb"}.fa-wikipedia-w:before{content:"\f266"}.fa-window-close:before{content:"\f410"}.fa-window-maximize:before{content:"\f2d0"}.fa-window-minimize:before{content:"\f2d1"}.fa-window-restore:before{content:"\f2d2"}.fa-windows:before{content:"\f17a"}.fa-wine-glass:before{content:"\f4e3"}.fa-wine-glass-alt:before{content:"\f5ce"}.fa-wix:before{content:"\f5cf"}.fa-wolf-pack-battalion:before{content:"\f514"}.fa-won-sign:before{content:"\f159"}.fa-wordpress:before{content:"\f19a"}.fa-wordpress-simple:before{content:"\f411"}.fa-wpbeginner:before{content:"\f297"}.fa-wpexplorer:before{content:"\f2de"}.fa-wpforms:before{content:"\f298"}.fa-wrench:before{content:"\f0ad"}.fa-x-ray:before{content:"\f497"}.fa-xbox:before{content:"\f412"}.fa-xing:before{content:"\f168"}.fa-xing-square:before{content:"\f169"}.fa-y-combinator:before{content:"\f23b"}.fa-yahoo:before{content:"\f19e"}.fa-yandex:before{content:"\f413"}.fa-yandex-international:before{content:"\f414"}.fa-yelp:before{content:"\f1e9"}.fa-yen-sign:before{content:"\f157"}.fa-yoast:before{content:"\f2b1"}.fa-youtube:before{content:"\f167"}.fa-youtube-square:before{content:"\f431"}.fa-zhihu:before{content:"\f63f"}.sr-only{border:0;clip:rect(0,0,0,0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.sr-only-focusable:active,.sr-only-focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}@font-face{font-family:"Font Awesome 5 Brands";font-style:normal;font-weight:normal;src:url(../webfonts/fa-brands-400.eot);src:url(../webfonts/fa-brands-400.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-brands-400.woff2) format("woff2"),url(../webfonts/fa-brands-400.woff) format("woff"),url(../webfonts/fa-brands-400.ttf) format("truetype"),url(../webfonts/fa-brands-400.svg#fontawesome) format("svg")}.fab{font-family:"Font Awesome 5 Brands"}@font-face{font-family:"Font Awesome 5 Free";font-style:normal;font-weight:400;src:url(../webfonts/fa-regular-400.eot);src:url(../webfonts/fa-regular-400.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-regular-400.woff2) format("woff2"),url(../webfonts/fa-regular-400.woff) format("woff"),url(../webfonts/fa-regular-400.ttf) format("truetype"),url(../webfonts/fa-regular-400.svg#fontawesome) format("svg")}.far{font-weight:400}@font-face{font-family:"Font Awesome 5 Free";font-style:normal;font-weight:900;src:url(../webfonts/fa-solid-900.eot);src:url(../webfonts/fa-solid-900.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-solid-900.woff2) format("woff2"),url(../webfonts/fa-solid-900.woff) format("woff"),url(../webfonts/fa-solid-900.ttf) format("truetype"),url(../webfonts/fa-solid-900.svg#fontawesome) format("svg")}.fa,.far,.fas{font-family:"Font Awesome 5 Free"}.fa,.fas{font-weight:900} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/www/css/main.css b/testing/web-platform/tests/tools/wave/www/css/main.css
new file mode 100644
index 0000000000..7edbb0f3ba
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/css/main.css
@@ -0,0 +1,101 @@
+h2 {
+ font-weight: bold;
+ margin-bottom: 10px;
+}
+
+.site-logo {
+ max-width: 300px;
+ margin-bottom: 30px;
+ margin-left: -15px;
+}
+
+.page-description {
+ max-width: 700px;
+ margin-top: 50px;
+}
+
+.field-controls {
+ padding-top: 25px;
+}
+
+#api-controls {
+ margin-top: 15px;
+ max-width: 850px;
+}
+
+#select-none {
+ margin-right: 25px;
+}
+
+.test-count-container {
+ vertical-align: sub;
+ margin-left: 10px;
+}
+
+.section-heading {
+ font-size: 18px;
+ margin-top: 25px;
+ margin-bottom: 0;
+ font-weight: bold;
+}
+
+.button-group {
+ margin-top: 30px;
+}
+
+.mb-1 {
+ margin-bottom: 10px;
+}
+
+.form-group {
+ display: inline-block;
+}
+
+.form-group--offset {
+ margin-left: 25px;
+}
+
+.filter-group {
+ margin-top: 30px;
+ padding-top: 15px;
+ padding-bottom: 20px;
+ border-top: 1px solid #d2d2d2;
+ border-bottom: 1px solid #d2d2d2;
+}
+
+.focused {
+ /* box-shadow: inset -3px -3px 0 red, inset 3px 3px 0 red; */
+}
+
+.focused:after {
+ display: block;
+ position: absolute;
+ border: solid 3px red;
+ content: "";
+ width: 100%;
+ height: 100%;
+ top: 0;
+ left: 0;
+}
+
+.button.is-light.is-focused:not(:active),
+.button.is-light:focus:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(0, 0, 0, 0.2);
+}
+
+.button.is-info.is-focused:not(:active),
+.button.is-info:focus:not(:active) {
+ box-shadow: 0 0 0 0.125em rgba(50, 115, 220, 0.25);
+}
+
+@media screen and (min-width: 560px) {
+ #api-controls {
+ columns: 2;
+ }
+}
+
+@media screen and (min-width: 780px) {
+ #api-controls {
+ columns: 3;
+ }
+}
diff --git a/testing/web-platform/tests/tools/wave/www/css/result.css b/testing/web-platform/tests/tools/wave/www/css/result.css
new file mode 100644
index 0000000000..6f944a0a83
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/css/result.css
@@ -0,0 +1,75 @@
+body {
+ margin: 0;
+ padding: 0;
+ display: flex;
+ justify-content: center;
+ font-family: "Noto Sans", sans-serif;
+ background-color: white;
+ color: #000;
+}
+
+.header {
+ display: flex;
+ margin: 50px 0 30px 0;
+}
+
+.header :first-child {
+ flex: 1;
+}
+
+.site-logo {
+ max-width: 300px;
+ margin-left: -15px;
+}
+
+.content {
+ width: 1000px;
+}
+
+#test-path,
+#token {
+ font-family: monospace;
+ font-size: 12pt;
+}
+
+.pass {
+ color: green;
+}
+
+.fail {
+ color: red;
+}
+
+.timeout {
+ color: rgb(224, 127, 0);
+}
+
+.not-run {
+ color: blue;
+}
+
+.api-result-timeoutfiles {
+ display: none; /* don't display for now */
+ flex-basis: 100%;
+}
+
+#header {
+ display: flex;
+ align-items: center;
+}
+
+#header > :first-child {
+ flex: 1;
+}
+
+#controls-wrapper {
+ display: flex;
+}
+
+.no-border-radius {
+ border-radius: 0;
+}
+
+#results-table .button {
+ margin: 0 2px;
+}
diff --git a/testing/web-platform/tests/tools/wave/www/css/style.css b/testing/web-platform/tests/tools/wave/www/css/style.css
new file mode 100644
index 0000000000..9264a47bc5
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/css/style.css
@@ -0,0 +1,86 @@
+body {
+ width: 100vw;
+ height: 100vh;
+ padding: 0;
+ margin: 0;
+ color: #000;
+}
+
+.section {
+ max-width: 1000px;
+ margin: 0 auto;
+ padding-left: 0;
+ padding-right: 0;
+}
+
+.site-logo {
+ max-width: 300px;
+ margin-bottom: 30px;
+ margin-left: -15px;
+}
+
+.site-header {
+ margin-bottom: 50px;
+}
+
+#content {
+ display: flex;
+ flex-direction: column;
+ justify-content: flex-start;
+ align-items: flex-start;
+}
+
+#qr-code {
+ padding: 10px;
+ border: 1px solid rgb(200, 200, 200);
+ border-radius: 3px;
+}
+
+#button-wrapper {
+ display: flex;
+ margin: 20px 0;
+}
+
+#button-wrapper :not(:first-child) {
+ margin-left: 15px;
+}
+
+#token,
+#test-path {
+ font-family: monospace;
+ font-size: 12pt;
+}
+
+#details-wrapper {
+ display: flex;
+ flex-direction: column;
+ max-width: 600px;
+ margin-top: 20px;
+}
+
+.prompt {
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+ width: 700px;
+}
+
+.detail {
+ margin: 2px 0;
+ display: flex;
+ width: 100%;
+}
+
+.detail :first-child {
+ width: 150px;
+ flex: none;
+ font-weight: bold;
+}
+
+.detail div {
+ flex: 1;
+}
+
+.mb-2 {
+ margin-bottom: 20px;
+}
diff --git a/testing/web-platform/tests/tools/wave/www/favicon.ico b/testing/web-platform/tests/tools/wave/www/favicon.ico
new file mode 100644
index 0000000000..46d0d17d7b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/favicon.ico
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/finish.html b/testing/web-platform/tests/tools/wave/www/finish.html
new file mode 100644
index 0000000000..403cb2879e
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/finish.html
@@ -0,0 +1,215 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <title>Session Finished - Web Platform Test Runner</title>
+ <link rel="stylesheet" href="css/bulma-0.7.5/bulma.min.css" />
+ <link rel="stylesheet" href="css/style.css" />
+ <link rel="stylesheet" href="css/main.css" />
+ <script src="lib/davidshimjs/qrcode.js"></script>
+ <script src="lib/keycodes.js"></script>
+ <script src="lib/wave-service.js"></script>
+ </head>
+ <body>
+ <section class="section">
+ <div class="container site-header">
+ <img src="res/wavelogo_2016.jpg" alt="WAVE Logo" class="site-logo" />
+ <h1 class="title is-spaced" id="title"></h1>
+ </div>
+
+ <div class="container">
+ <div id="content">
+ <div id="qr-code"></div>
+ <div id="button-wrapper">
+ <div id="new-button" class="button is-success is-large tabbable">
+ Create New Session
+ </div>
+ <button id="results-button" class="button is-large tabbable">
+ View Result Page
+ </button>
+ </div>
+ <div id="details-wrapper">
+ <h3 class="title is-5 is-spaced">Details</h3>
+ <div class="detail">
+ <div>Token:</div>
+ <div id="token"></div>
+ </div>
+ <div class="detail">
+ <div>User Agent:</div>
+ <div id="user-agent"></div>
+ </div>
+ <div class="detail">
+ <div>Test Types:</div>
+ <div id="test-types"></div>
+ </div>
+ <div class="detail">
+ <div>Total Test Files:</div>
+ <div id="total-tests"></div>
+ </div>
+ <div class="detail">
+ <div>Reference Tokens:</div>
+ <div id="reference-tokens"></div>
+ </div>
+ <div class="detail">
+ <div>Test Timeouts:</div>
+ <div id="test-timeout"></div>
+ </div>
+ <div class="detail">
+ <div>Test Paths:</div>
+ <div id="test-path"></div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </section>
+
+ <script>
+ var HOSTNAME = location.hostname;
+ var PORT = location.port;
+ var PROTOCOL = location.protocol.replace(/:/, "");
+ var QUERY = location.search.replace(/\?/, "");
+ var match = QUERY.match(/token=([^&]+)/);
+ var TOKEN = match ? match[1] : null;
+ var selectedTabbable = -1;
+
+ function displaySessionConfiguration(configuration) {
+ var userAgent = document.getElementById("user-agent");
+ userAgent.innerText = configuration.userAgent;
+ var testPath = document.getElementById("test-path");
+ for (var i = 0; i < configuration.tests.include.length; i++) {
+ var path = configuration.tests.include[i];
+ testPath.innerText += path + "\n";
+ }
+ var testTypes = document.getElementById("test-types");
+ testTypes.innerText = configuration.types.join(", ");
+ var testTimeout = document.getElementById("test-timeout");
+ for (var timeout in configuration.timeouts) {
+ testTimeout.innerText +=
+ timeout + ": " + configuration.timeouts[timeout] / 1000 + "s\n";
+ }
+ var referenceTokens = document.getElementById("reference-tokens");
+ if (configuration.referenceTokens.length === 0) {
+ referenceTokens.innerText = "none";
+ } else {
+ for (var i = 0; i < configuration.referenceTokens.length; i++) {
+ var token = configuration.referenceTokens[i];
+ referenceTokens.innerText += token + "\n";
+ }
+ }
+ }
+
+ function displaySessionStatus(status) {
+ var title = document.getElementById("title");
+ if (status.status === "aborted") {
+ title.innerText = "Session Aborted";
+ } else {
+ title.innerText = "Session Complete";
+ }
+
+ var testTypes = document.getElementById("total-tests");
+ var count = 0;
+ for (var api in status.testFilesCount) {
+ count += status.testFilesCount[api];
+ }
+ testTypes.innerText = count;
+ }
+
+ function startTests() {
+ sendRequest("GET", WEB_ROOT + "api/next", null, function(response) {
+ location.href = response;
+ });
+ }
+
+ var resultsUrl =
+ "http://" + location.host + WEB_ROOT + "overview.html" + location.search;
+ new QRCode(document.getElementById("qr-code"), resultsUrl);
+
+ var resultsButton = document.getElementById("results-button");
+ resultsButton.onclick = function() {
+ window.open(resultsUrl, "_blank");
+ };
+
+ var newButton = document.getElementById("new-button");
+ newButton.onclick = function() {
+ location.href = WEB_ROOT + "index.html";
+ };
+
+ function removeClass(element, className) {
+ var elementClass = element.className;
+ var index = elementClass.indexOf(className);
+ if (index !== -1) {
+ element.className = elementClass.replace(className, "");
+ }
+ }
+
+ function addClass(element, className) {
+ element.className += " " + className;
+ }
+
+ function skipFocus(steps) {
+ var tabbables = document.getElementsByClassName("tabbable");
+ if (selectedTabbable === -1) {
+ selectedTabbable = 0;
+ } else {
+ removeClass(tabbables[selectedTabbable], "focused");
+ selectedTabbable += steps;
+ }
+
+ if (selectedTabbable >= tabbables.length) {
+ selectedTabbable = 0;
+ }
+
+ if (selectedTabbable < 0) {
+ selectedTabbable = tabbables.length - 1;
+ }
+
+ tabbables[selectedTabbable].focus();
+ addClass(tabbables[selectedTabbable], "focused");
+ }
+
+ function focusNext() {
+ skipFocus(1);
+ }
+
+ function focusPrevious() {
+ skipFocus(-1);
+ }
+
+ document.onkeydown = function(event) {
+ event = event || window.event;
+ var charCode =
+ typeof event.which === "number" ? event.which : event.keyCode;
+
+ if (ACTION_KEYS.indexOf(charCode) !== -1) {
+ event.preventDefault();
+ if (selectedTabbable === -1) {
+ return;
+ }
+ var tabbables = document.getElementsByClassName("tabbable");
+ var element = tabbables[selectedTabbable];
+ if (element.type === "checkbox") {
+ element.checked = !element.checked;
+ } else {
+ element.click();
+ }
+ }
+
+ if (PREV_KEYS.indexOf(charCode) !== -1) {
+ focusPrevious();
+ }
+
+ if (NEXT_KEYS.indexOf(charCode) !== -1) {
+ focusNext();
+ }
+ };
+
+ var match = location.search.match(/token=(.+)/);
+ var token = match[1];
+ var tokenView = document.getElementById("token");
+ tokenView.innerText = token;
+
+ WaveService.readSession(token, displaySessionConfiguration);
+ WaveService.readSessionStatus(token, displaySessionStatus);
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/www/index.html b/testing/web-platform/tests/tools/wave/www/index.html
new file mode 100644
index 0000000000..8ba94dfac0
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/index.html
@@ -0,0 +1,263 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <meta name="viewport" content="width=device-width, initial-scale=1" />
+ <title>Web Media API Snapshot Test Suite</title>
+ <link rel="stylesheet" href="css/bulma-0.7.5/bulma.min.css" />
+ <link rel="stylesheet" href="css/fontawesome.min.css" />
+ <script src="lib/keycodes.js"></script>
+ <script src="lib/wave-service.js"></script>
+ <script src="lib/davidshimjs/qrcode.js"></script>
+ </head>
+
+ <body>
+ <section class="section">
+ <div class="container">
+ <img
+ style="max-width: 300px; margin-bottom: 30px; margin-left: -15px;"
+ src="res/wavelogo_2016.jpg"
+ alt="WAVE (Web Application Video Ecosystem) Project Logo"
+ />
+
+ <h1 class="title is-spaced">
+ WAVE WMAS Test Suite
+ </h1>
+ <p class="subtitle">
+ <a href="https://github.com/w3c/webmediaapi/">GitHub</a> -
+ </p>
+ </div>
+
+ <div class="container" style="margin-top: 2em;">
+ <h2 class="title is-5">
+ New test session
+ </h2>
+ <div class="columns is-vcentered" style="margin-top: 20px;">
+ <div class="column is-narrow">
+ <div
+ id="qr-code"
+ style="
+ width: 256px;
+ height: 256px;
+ padding: 5px;
+ border: 1px gray solid;
+ border-radius: 3px;
+ "
+ ></div>
+ </div>
+ <div class="column">
+ <table style="margin-bottom: 1.5em;">
+ <tr>
+ <td class="has-text-weight-bold" style="padding-right: 1rem;">
+ Token:
+ </td>
+ <td id="session-token" class="is-family-monospace"></td>
+ </tr>
+ <tr>
+ <td class="has-text-weight-bold" style="padding-right: 1rem;">
+ Expires:
+ </td>
+ <td id="expiary-date"></td>
+ </tr>
+ <tr>
+ <td></td>
+ <td>
+ <p class="is-size-7">(Session start revokes expiration.)</p>
+ </td>
+ </tr>
+ </table>
+
+ <p style="max-width: 32rem; margin-bottom: 1rem;">
+ Configure a new session on a second device by scanning the
+ QR-Code, or click the button:
+ </p>
+ <div
+ class="button"
+ style="margin-bottom: 1rem;"
+ id="configure-button"
+ >
+ <span class="icon"><i class="fas fa-sliders-h"></i></span>
+ <span>Configure Session</span>
+ </div>
+ <p style="max-width: 32rem;">
+ The tests will start running in this window, as soon as the
+ session is started from the configuration view.
+ </p>
+ </div>
+ </div>
+ </div>
+
+ <div class="container" style="margin-top: 2em;">
+ <h2 class="title is-5">
+ Resume running session
+ </h2>
+ <article
+ id="unknown_token_error"
+ style="max-width: 30em; display: none;"
+ class="message is-danger"
+ >
+ <div class="message-body">
+ Unknown token
+ </div>
+ </article>
+ <div class="columns is-vcentered">
+ <div id="resume_token" class="column is-narrow"></div>
+ <div class="button-group column">
+ <button
+ id="resume-button"
+ class="button is-success tabbable"
+ type="submit"
+ data-uid="100"
+ autofocus
+ >
+ <span class="icon"><i class="fas fa-redo"></i></span>
+ <span>Resume</span>
+ </button>
+ </div>
+ </div>
+ </div>
+ </section>
+
+ <script>
+ var selectedTabbable = -1;
+
+ function removeClass(element, className) {
+ var elementClass = element.className;
+ var index = elementClass.indexOf(className);
+ if (index !== -1) {
+ element.className = elementClass.replace(className, "");
+ }
+ }
+
+ function addClass(element, className) {
+ element.className += " " + className;
+ }
+
+ function skipFocus(steps) {
+ var tabbables = document.getElementsByClassName("tabbable");
+ if (selectedTabbable === -1) {
+ selectedTabbable = 0;
+ } else {
+ removeClass(tabbables[selectedTabbable], "focused");
+ selectedTabbable += steps;
+ }
+
+ if (selectedTabbable >= tabbables.length) {
+ selectedTabbable = 0;
+ }
+
+ if (selectedTabbable < 0) {
+ selectedTabbable = tabbables.length - 1;
+ }
+
+ tabbables[selectedTabbable].focus();
+ addClass(tabbables[selectedTabbable], "focused");
+ }
+
+ function focusNext() {
+ skipFocus(1);
+ }
+
+ function focusPrevious() {
+ skipFocus(-1);
+ }
+
+ // Resume
+ var resumeToken = "";
+ var cookies = document.cookie.split(";");
+ for (var i = 0; i < cookies.length; i++) {
+ var cookie = cookies[i];
+ if (cookie.split("=")[0].replace(/ /g, "") === "resume_token") {
+ resumeToken = cookie.split("=")[1];
+ break;
+ }
+ }
+ if (!resumeToken) resumeToken = "";
+
+ var resumeButton = document.getElementById("resume-button");
+
+ var tokenText = document.getElementById("resume_token");
+ if (resumeToken) {
+ tokenText.innerText = "Last session: " + resumeToken;
+ } else {
+ tokenText.innerText = "No recent session.";
+ resumeButton.setAttribute("disabled", "");
+ }
+ var unknownTokenError = document.getElementById("unknown_token_error");
+ resumeButton.onclick = function (event) {
+ location.href = WEB_ROOT + "next.html?token=" + resumeToken;
+ };
+
+ resumeButton.onkeydown = function (event) {
+ var charCode =
+ typeof event.which === "number" ? event.which : event.keyCode;
+ if (ACTION_KEYS.indexOf(charCode) === -1) return;
+ location.href = WEB_ROOT + "next.html?token=" + resumeToken;
+ };
+
+ document.onkeydown = function (event) {
+ event = event || window.event;
+ var charCode =
+ typeof event.which === "number" ? event.which : event.keyCode;
+
+ if (ACTION_KEYS.indexOf(charCode) !== -1) {
+ event.preventDefault();
+ var tabbables = document.getElementsByClassName("tabbable");
+ var element = tabbables[selectedTabbable];
+ if (!element) return;
+ if (element.type === "checkbox") {
+ element.checked = !element.checked;
+ } else {
+ element.click();
+ }
+ }
+
+ if (PREV_KEYS.indexOf(charCode) !== -1) {
+ focusPrevious();
+ }
+
+ if (NEXT_KEYS.indexOf(charCode) !== -1) {
+ focusNext();
+ }
+ };
+
+ var lifeTime = 30 * 60 * 1000; // 30min
+ WaveService.createSession(
+ { expirationDate: new Date().getTime() + lifeTime },
+ function (token) {
+ var sessionTokenElement = document.getElementById("session-token");
+ sessionTokenElement.innerText = token;
+
+ WaveService.readSessionStatus(token, function (config) {
+ var expiaryDateElement = document.getElementById("expiary-date");
+ expiaryDateElement.innerText = config.expirationDate.toLocaleString();
+ });
+
+ var configurationUrl =
+ location.origin +
+ WEB_ROOT +
+ "configuration.html?token=" +
+ token +
+ "&resume=" +
+ resumeToken;
+ new QRCode(document.getElementById("qr-code"), configurationUrl);
+ document.getElementById("configure-button").onclick = function () {
+ window.open(configurationUrl, "_blank");
+ };
+
+ WaveService.addSessionEventListener(token, function (message) {
+ if (message.type === "resume") {
+ var resumeToken = message.data;
+ location.href = WEB_ROOT + "next.html?token=" + resumeToken;
+ }
+
+ if (message.type !== "status") return;
+ if (message.data === "pending") return;
+ location.href = WEB_ROOT + "next.html?token=" + token;
+ });
+ }
+ );
+ document.getElementById("resume-button").focus();
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/www/lib/davidshimjs/LICENSE b/testing/web-platform/tests/tools/wave/www/lib/davidshimjs/LICENSE
new file mode 100644
index 0000000000..93c33233fa
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/lib/davidshimjs/LICENSE
@@ -0,0 +1,14 @@
+The MIT License (MIT)
+---------------------
+Copyright (c) 2012 davidshimjs
+
+Permission is hereby granted, free of charge,
+to any person obtaining a copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/www/lib/davidshimjs/qrcode.js b/testing/web-platform/tests/tools/wave/www/lib/davidshimjs/qrcode.js
new file mode 100644
index 0000000000..45e5d7b974
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/lib/davidshimjs/qrcode.js
@@ -0,0 +1,1533 @@
+/**
+ * @fileoverview
+ * - Using the 'QRCode for Javascript library'
+ * - Fixed dataset of 'QRCode for Javascript library' for support full-spec.
+ * - this library has no dependencies.
+ *
+ * @author davidshimjs
+ * @see <a href="http://www.d-project.com/" target="_blank">http://www.d-project.com/</a>
+ * @see <a href="http://jeromeetienne.github.com/jquery-qrcode/" target="_blank">http://jeromeetienne.github.com/jquery-qrcode/</a>
+ */
+var QRCode
+;(function () {
+ // ---------------------------------------------------------------------
+ // QRCode for JavaScript
+ //
+ // Copyright (c) 2009 Kazuhiko Arase
+ //
+ // URL: http://www.d-project.com/
+ //
+ // Licensed under the MIT license:
+ // http://www.opensource.org/licenses/mit-license.php
+ //
+ // The word "QR Code" is registered trademark of
+ // DENSO WAVE INCORPORATED
+ // http://www.denso-wave.com/qrcode/faqpatent-e.html
+ //
+ // ---------------------------------------------------------------------
+ function QR8bitByte (data) {
+ this.mode = QRMode.MODE_8BIT_BYTE
+ this.data = data
+ this.parsedData = []
+
+ // Added to support UTF-8 Characters
+ for (var i = 0, l = this.data.length; i < l; i++) {
+ var byteArray = []
+ var code = this.data.charCodeAt(i)
+
+ if (code > 0x10000) {
+ byteArray[0] = 0xf0 | ((code & 0x1c0000) >>> 18)
+ byteArray[1] = 0x80 | ((code & 0x3f000) >>> 12)
+ byteArray[2] = 0x80 | ((code & 0xfc0) >>> 6)
+ byteArray[3] = 0x80 | (code & 0x3f)
+ } else if (code > 0x800) {
+ byteArray[0] = 0xe0 | ((code & 0xf000) >>> 12)
+ byteArray[1] = 0x80 | ((code & 0xfc0) >>> 6)
+ byteArray[2] = 0x80 | (code & 0x3f)
+ } else if (code > 0x80) {
+ byteArray[0] = 0xc0 | ((code & 0x7c0) >>> 6)
+ byteArray[1] = 0x80 | (code & 0x3f)
+ } else {
+ byteArray[0] = code
+ }
+
+ this.parsedData.push(byteArray)
+ }
+
+ this.parsedData = Array.prototype.concat.apply([], this.parsedData)
+
+ if (this.parsedData.length != this.data.length) {
+ this.parsedData.unshift(191)
+ this.parsedData.unshift(187)
+ this.parsedData.unshift(239)
+ }
+ }
+
+ QR8bitByte.prototype = {
+ getLength: function (buffer) {
+ return this.parsedData.length
+ },
+ write: function (buffer) {
+ for (var i = 0, l = this.parsedData.length; i < l; i++) {
+ buffer.put(this.parsedData[i], 8)
+ }
+ }
+ }
+
+ function QRCodeModel (typeNumber, errorCorrectLevel) {
+ this.typeNumber = typeNumber
+ this.errorCorrectLevel = errorCorrectLevel
+ this.modules = null
+ this.moduleCount = 0
+ this.dataCache = null
+ this.dataList = []
+ }
+
+ QRCodeModel.prototype = {
+ addData: function (data) {
+ var newData = new QR8bitByte(data)
+ this.dataList.push(newData)
+ this.dataCache = null
+ },
+ isDark: function (row, col) {
+ if (
+ row < 0 ||
+ this.moduleCount <= row ||
+ col < 0 ||
+ this.moduleCount <= col
+ ) {
+ throw new Error(row + ',' + col)
+ }
+ return this.modules[row][col]
+ },
+ getModuleCount: function () {
+ return this.moduleCount
+ },
+ make: function () {
+ this.makeImpl(false, this.getBestMaskPattern())
+ },
+ makeImpl: function (test, maskPattern) {
+ this.moduleCount = this.typeNumber * 4 + 17
+ this.modules = new Array(this.moduleCount)
+ for (var row = 0; row < this.moduleCount; row++) {
+ this.modules[row] = new Array(this.moduleCount)
+ for (var col = 0; col < this.moduleCount; col++) {
+ this.modules[row][col] = null
+ }
+ }
+ this.setupPositionProbePattern(0, 0)
+ this.setupPositionProbePattern(this.moduleCount - 7, 0)
+ this.setupPositionProbePattern(0, this.moduleCount - 7)
+ this.setupPositionAdjustPattern()
+ this.setupTimingPattern()
+ this.setupTypeInfo(test, maskPattern)
+ if (this.typeNumber >= 7) {
+ this.setupTypeNumber(test)
+ }
+ if (this.dataCache == null) {
+ this.dataCache = QRCodeModel.createData(
+ this.typeNumber,
+ this.errorCorrectLevel,
+ this.dataList
+ )
+ }
+ this.mapData(this.dataCache, maskPattern)
+ },
+ setupPositionProbePattern: function (row, col) {
+ for (var r = -1; r <= 7; r++) {
+ if (row + r <= -1 || this.moduleCount <= row + r) continue
+ for (var c = -1; c <= 7; c++) {
+ if (col + c <= -1 || this.moduleCount <= col + c) continue
+ if (
+ (r >= 0 && r <= 6 && (c == 0 || c == 6)) ||
+ (c >= 0 && c <= 6 && (r == 0 || r == 6)) ||
+ (r >= 2 && r <= 4 && c >= 2 && c <= 4)
+ ) {
+ this.modules[row + r][col + c] = true
+ } else {
+ this.modules[row + r][col + c] = false
+ }
+ }
+ }
+ },
+ getBestMaskPattern: function () {
+ var minLostPoint = 0
+ var pattern = 0
+ for (var i = 0; i < 8; i++) {
+ this.makeImpl(true, i)
+ var lostPoint = QRUtil.getLostPoint(this)
+ if (i == 0 || minLostPoint > lostPoint) {
+ minLostPoint = lostPoint
+ pattern = i
+ }
+ }
+ return pattern
+ },
+ createMovieClip: function (target_mc, instance_name, depth) {
+ var qr_mc = target_mc.createEmptyMovieClip(instance_name, depth)
+ var cs = 1
+ this.make()
+ for (var row = 0; row < this.modules.length; row++) {
+ var y = row * cs
+ for (var col = 0; col < this.modules[row].length; col++) {
+ var x = col * cs
+ var dark = this.modules[row][col]
+ if (dark) {
+ qr_mc.beginFill(0, 100)
+ qr_mc.moveTo(x, y)
+ qr_mc.lineTo(x + cs, y)
+ qr_mc.lineTo(x + cs, y + cs)
+ qr_mc.lineTo(x, y + cs)
+ qr_mc.endFill()
+ }
+ }
+ }
+ return qr_mc
+ },
+ setupTimingPattern: function () {
+ for (var r = 8; r < this.moduleCount - 8; r++) {
+ if (this.modules[r][6] != null) {
+ continue
+ }
+ this.modules[r][6] = r % 2 == 0
+ }
+ for (var c = 8; c < this.moduleCount - 8; c++) {
+ if (this.modules[6][c] != null) {
+ continue
+ }
+ this.modules[6][c] = c % 2 == 0
+ }
+ },
+ setupPositionAdjustPattern: function () {
+ var pos = QRUtil.getPatternPosition(this.typeNumber)
+ for (var i = 0; i < pos.length; i++) {
+ for (var j = 0; j < pos.length; j++) {
+ var row = pos[i]
+ var col = pos[j]
+ if (this.modules[row][col] != null) {
+ continue
+ }
+ for (var r = -2; r <= 2; r++) {
+ for (var c = -2; c <= 2; c++) {
+ if (
+ r == -2 ||
+ r == 2 ||
+ c == -2 ||
+ c == 2 ||
+ (r == 0 && c == 0)
+ ) {
+ this.modules[row + r][col + c] = true
+ } else {
+ this.modules[row + r][col + c] = false
+ }
+ }
+ }
+ }
+ }
+ },
+ setupTypeNumber: function (test) {
+ var bits = QRUtil.getBCHTypeNumber(this.typeNumber)
+ for (var i = 0; i < 18; i++) {
+ var mod = !test && ((bits >> i) & 1) == 1
+ this.modules[Math.floor(i / 3)][i % 3 + this.moduleCount - 8 - 3] = mod
+ }
+ for (var i = 0; i < 18; i++) {
+ var mod = !test && ((bits >> i) & 1) == 1
+ this.modules[i % 3 + this.moduleCount - 8 - 3][Math.floor(i / 3)] = mod
+ }
+ },
+ setupTypeInfo: function (test, maskPattern) {
+ var data = (this.errorCorrectLevel << 3) | maskPattern
+ var bits = QRUtil.getBCHTypeInfo(data)
+ for (var i = 0; i < 15; i++) {
+ var mod = !test && ((bits >> i) & 1) == 1
+ if (i < 6) {
+ this.modules[i][8] = mod
+ } else if (i < 8) {
+ this.modules[i + 1][8] = mod
+ } else {
+ this.modules[this.moduleCount - 15 + i][8] = mod
+ }
+ }
+ for (var i = 0; i < 15; i++) {
+ var mod = !test && ((bits >> i) & 1) == 1
+ if (i < 8) {
+ this.modules[8][this.moduleCount - i - 1] = mod
+ } else if (i < 9) {
+ this.modules[8][15 - i - 1 + 1] = mod
+ } else {
+ this.modules[8][15 - i - 1] = mod
+ }
+ }
+ this.modules[this.moduleCount - 8][8] = !test
+ },
+ mapData: function (data, maskPattern) {
+ var inc = -1
+ var row = this.moduleCount - 1
+ var bitIndex = 7
+ var byteIndex = 0
+ for (var col = this.moduleCount - 1; col > 0; col -= 2) {
+ if (col == 6) col--
+ while (true) {
+ for (var c = 0; c < 2; c++) {
+ if (this.modules[row][col - c] == null) {
+ var dark = false
+ if (byteIndex < data.length) {
+ dark = ((data[byteIndex] >>> bitIndex) & 1) == 1
+ }
+ var mask = QRUtil.getMask(maskPattern, row, col - c)
+ if (mask) {
+ dark = !dark
+ }
+ this.modules[row][col - c] = dark
+ bitIndex--
+ if (bitIndex == -1) {
+ byteIndex++
+ bitIndex = 7
+ }
+ }
+ }
+ row += inc
+ if (row < 0 || this.moduleCount <= row) {
+ row -= inc
+ inc = -inc
+ break
+ }
+ }
+ }
+ }
+ }
+ QRCodeModel.PAD0 = 0xec
+ QRCodeModel.PAD1 = 0x11
+ QRCodeModel.createData = function (typeNumber, errorCorrectLevel, dataList) {
+ var rsBlocks = QRRSBlock.getRSBlocks(typeNumber, errorCorrectLevel)
+ var buffer = new QRBitBuffer()
+ for (var i = 0; i < dataList.length; i++) {
+ var data = dataList[i]
+ buffer.put(data.mode, 4)
+ buffer.put(
+ data.getLength(),
+ QRUtil.getLengthInBits(data.mode, typeNumber)
+ )
+ data.write(buffer)
+ }
+ var totalDataCount = 0
+ for (var i = 0; i < rsBlocks.length; i++) {
+ totalDataCount += rsBlocks[i].dataCount
+ }
+ if (buffer.getLengthInBits() > totalDataCount * 8) {
+ throw new Error(
+ 'code length overflow. (' +
+ buffer.getLengthInBits() +
+ '>' +
+ totalDataCount * 8 +
+ ')'
+ )
+ }
+ if (buffer.getLengthInBits() + 4 <= totalDataCount * 8) {
+ buffer.put(0, 4)
+ }
+ while (buffer.getLengthInBits() % 8 != 0) {
+ buffer.putBit(false)
+ }
+ while (true) {
+ if (buffer.getLengthInBits() >= totalDataCount * 8) {
+ break
+ }
+ buffer.put(QRCodeModel.PAD0, 8)
+ if (buffer.getLengthInBits() >= totalDataCount * 8) {
+ break
+ }
+ buffer.put(QRCodeModel.PAD1, 8)
+ }
+ return QRCodeModel.createBytes(buffer, rsBlocks)
+ }
+ QRCodeModel.createBytes = function (buffer, rsBlocks) {
+ var offset = 0
+ var maxDcCount = 0
+ var maxEcCount = 0
+ var dcdata = new Array(rsBlocks.length)
+ var ecdata = new Array(rsBlocks.length)
+ for (var r = 0; r < rsBlocks.length; r++) {
+ var dcCount = rsBlocks[r].dataCount
+ var ecCount = rsBlocks[r].totalCount - dcCount
+ maxDcCount = Math.max(maxDcCount, dcCount)
+ maxEcCount = Math.max(maxEcCount, ecCount)
+ dcdata[r] = new Array(dcCount)
+ for (var i = 0; i < dcdata[r].length; i++) {
+ dcdata[r][i] = 0xff & buffer.buffer[i + offset]
+ }
+ offset += dcCount
+ var rsPoly = QRUtil.getErrorCorrectPolynomial(ecCount)
+ var rawPoly = new QRPolynomial(dcdata[r], rsPoly.getLength() - 1)
+ var modPoly = rawPoly.mod(rsPoly)
+ ecdata[r] = new Array(rsPoly.getLength() - 1)
+ for (var i = 0; i < ecdata[r].length; i++) {
+ var modIndex = i + modPoly.getLength() - ecdata[r].length
+ ecdata[r][i] = modIndex >= 0 ? modPoly.get(modIndex) : 0
+ }
+ }
+ var totalCodeCount = 0
+ for (var i = 0; i < rsBlocks.length; i++) {
+ totalCodeCount += rsBlocks[i].totalCount
+ }
+ var data = new Array(totalCodeCount)
+ var index = 0
+ for (var i = 0; i < maxDcCount; i++) {
+ for (var r = 0; r < rsBlocks.length; r++) {
+ if (i < dcdata[r].length) {
+ data[index++] = dcdata[r][i]
+ }
+ }
+ }
+ for (var i = 0; i < maxEcCount; i++) {
+ for (var r = 0; r < rsBlocks.length; r++) {
+ if (i < ecdata[r].length) {
+ data[index++] = ecdata[r][i]
+ }
+ }
+ }
+ return data
+ }
+ var QRMode = {
+ MODE_NUMBER: 1 << 0,
+ MODE_ALPHA_NUM: 1 << 1,
+ MODE_8BIT_BYTE: 1 << 2,
+ MODE_KANJI: 1 << 3
+ }
+ var QRErrorCorrectLevel = { L: 1, M: 0, Q: 3, H: 2 }
+ var QRMaskPattern = {
+ PATTERN000: 0,
+ PATTERN001: 1,
+ PATTERN010: 2,
+ PATTERN011: 3,
+ PATTERN100: 4,
+ PATTERN101: 5,
+ PATTERN110: 6,
+ PATTERN111: 7
+ }
+ var QRUtil = {
+ PATTERN_POSITION_TABLE: [
+ [],
+ [6, 18],
+ [6, 22],
+ [6, 26],
+ [6, 30],
+ [6, 34],
+ [6, 22, 38],
+ [6, 24, 42],
+ [6, 26, 46],
+ [6, 28, 50],
+ [6, 30, 54],
+ [6, 32, 58],
+ [6, 34, 62],
+ [6, 26, 46, 66],
+ [6, 26, 48, 70],
+ [6, 26, 50, 74],
+ [6, 30, 54, 78],
+ [6, 30, 56, 82],
+ [6, 30, 58, 86],
+ [6, 34, 62, 90],
+ [6, 28, 50, 72, 94],
+ [6, 26, 50, 74, 98],
+ [6, 30, 54, 78, 102],
+ [6, 28, 54, 80, 106],
+ [6, 32, 58, 84, 110],
+ [6, 30, 58, 86, 114],
+ [6, 34, 62, 90, 118],
+ [6, 26, 50, 74, 98, 122],
+ [6, 30, 54, 78, 102, 126],
+ [6, 26, 52, 78, 104, 130],
+ [6, 30, 56, 82, 108, 134],
+ [6, 34, 60, 86, 112, 138],
+ [6, 30, 58, 86, 114, 142],
+ [6, 34, 62, 90, 118, 146],
+ [6, 30, 54, 78, 102, 126, 150],
+ [6, 24, 50, 76, 102, 128, 154],
+ [6, 28, 54, 80, 106, 132, 158],
+ [6, 32, 58, 84, 110, 136, 162],
+ [6, 26, 54, 82, 110, 138, 166],
+ [6, 30, 58, 86, 114, 142, 170]
+ ],
+ G15:
+ (1 << 10) |
+ (1 << 8) |
+ (1 << 5) |
+ (1 << 4) |
+ (1 << 2) |
+ (1 << 1) |
+ (1 << 0),
+ G18:
+ (1 << 12) |
+ (1 << 11) |
+ (1 << 10) |
+ (1 << 9) |
+ (1 << 8) |
+ (1 << 5) |
+ (1 << 2) |
+ (1 << 0),
+ G15_MASK: (1 << 14) | (1 << 12) | (1 << 10) | (1 << 4) | (1 << 1),
+ getBCHTypeInfo: function (data) {
+ var d = data << 10
+ while (QRUtil.getBCHDigit(d) - QRUtil.getBCHDigit(QRUtil.G15) >= 0) {
+ d ^=
+ QRUtil.G15 << (QRUtil.getBCHDigit(d) - QRUtil.getBCHDigit(QRUtil.G15))
+ }
+ return ((data << 10) | d) ^ QRUtil.G15_MASK
+ },
+ getBCHTypeNumber: function (data) {
+ var d = data << 12
+ while (QRUtil.getBCHDigit(d) - QRUtil.getBCHDigit(QRUtil.G18) >= 0) {
+ d ^=
+ QRUtil.G18 << (QRUtil.getBCHDigit(d) - QRUtil.getBCHDigit(QRUtil.G18))
+ }
+ return (data << 12) | d
+ },
+ getBCHDigit: function (data) {
+ var digit = 0
+ while (data != 0) {
+ digit++
+ data >>>= 1
+ }
+ return digit
+ },
+ getPatternPosition: function (typeNumber) {
+ return QRUtil.PATTERN_POSITION_TABLE[typeNumber - 1]
+ },
+ getMask: function (maskPattern, i, j) {
+ switch (maskPattern) {
+ case QRMaskPattern.PATTERN000:
+ return (i + j) % 2 == 0
+ case QRMaskPattern.PATTERN001:
+ return i % 2 == 0
+ case QRMaskPattern.PATTERN010:
+ return j % 3 == 0
+ case QRMaskPattern.PATTERN011:
+ return (i + j) % 3 == 0
+ case QRMaskPattern.PATTERN100:
+ return (Math.floor(i / 2) + Math.floor(j / 3)) % 2 == 0
+ case QRMaskPattern.PATTERN101:
+ return (i * j) % 2 + (i * j) % 3 == 0
+ case QRMaskPattern.PATTERN110:
+ return ((i * j) % 2 + (i * j) % 3) % 2 == 0
+ case QRMaskPattern.PATTERN111:
+ return ((i * j) % 3 + (i + j) % 2) % 2 == 0
+ default:
+ throw new Error('bad maskPattern:' + maskPattern)
+ }
+ },
+ getErrorCorrectPolynomial: function (errorCorrectLength) {
+ var a = new QRPolynomial([1], 0)
+ for (var i = 0; i < errorCorrectLength; i++) {
+ a = a.multiply(new QRPolynomial([1, QRMath.gexp(i)], 0))
+ }
+ return a
+ },
+ getLengthInBits: function (mode, type) {
+ if (type >= 1 && type < 10) {
+ switch (mode) {
+ case QRMode.MODE_NUMBER:
+ return 10
+ case QRMode.MODE_ALPHA_NUM:
+ return 9
+ case QRMode.MODE_8BIT_BYTE:
+ return 8
+ case QRMode.MODE_KANJI:
+ return 8
+ default:
+ throw new Error('mode:' + mode)
+ }
+ } else if (type < 27) {
+ switch (mode) {
+ case QRMode.MODE_NUMBER:
+ return 12
+ case QRMode.MODE_ALPHA_NUM:
+ return 11
+ case QRMode.MODE_8BIT_BYTE:
+ return 16
+ case QRMode.MODE_KANJI:
+ return 10
+ default:
+ throw new Error('mode:' + mode)
+ }
+ } else if (type < 41) {
+ switch (mode) {
+ case QRMode.MODE_NUMBER:
+ return 14
+ case QRMode.MODE_ALPHA_NUM:
+ return 13
+ case QRMode.MODE_8BIT_BYTE:
+ return 16
+ case QRMode.MODE_KANJI:
+ return 12
+ default:
+ throw new Error('mode:' + mode)
+ }
+ } else {
+ throw new Error('type:' + type)
+ }
+ },
+ getLostPoint: function (qrCode) {
+ var moduleCount = qrCode.getModuleCount()
+ var lostPoint = 0
+ for (var row = 0; row < moduleCount; row++) {
+ for (var col = 0; col < moduleCount; col++) {
+ var sameCount = 0
+ var dark = qrCode.isDark(row, col)
+ for (var r = -1; r <= 1; r++) {
+ if (row + r < 0 || moduleCount <= row + r) {
+ continue
+ }
+ for (var c = -1; c <= 1; c++) {
+ if (col + c < 0 || moduleCount <= col + c) {
+ continue
+ }
+ if (r == 0 && c == 0) {
+ continue
+ }
+ if (dark == qrCode.isDark(row + r, col + c)) {
+ sameCount++
+ }
+ }
+ }
+ if (sameCount > 5) {
+ lostPoint += 3 + sameCount - 5
+ }
+ }
+ }
+ for (var row = 0; row < moduleCount - 1; row++) {
+ for (var col = 0; col < moduleCount - 1; col++) {
+ var count = 0
+ if (qrCode.isDark(row, col)) count++
+ if (qrCode.isDark(row + 1, col)) count++
+ if (qrCode.isDark(row, col + 1)) count++
+ if (qrCode.isDark(row + 1, col + 1)) count++
+ if (count == 0 || count == 4) {
+ lostPoint += 3
+ }
+ }
+ }
+ for (var row = 0; row < moduleCount; row++) {
+ for (var col = 0; col < moduleCount - 6; col++) {
+ if (
+ qrCode.isDark(row, col) &&
+ !qrCode.isDark(row, col + 1) &&
+ qrCode.isDark(row, col + 2) &&
+ qrCode.isDark(row, col + 3) &&
+ qrCode.isDark(row, col + 4) &&
+ !qrCode.isDark(row, col + 5) &&
+ qrCode.isDark(row, col + 6)
+ ) {
+ lostPoint += 40
+ }
+ }
+ }
+ for (var col = 0; col < moduleCount; col++) {
+ for (var row = 0; row < moduleCount - 6; row++) {
+ if (
+ qrCode.isDark(row, col) &&
+ !qrCode.isDark(row + 1, col) &&
+ qrCode.isDark(row + 2, col) &&
+ qrCode.isDark(row + 3, col) &&
+ qrCode.isDark(row + 4, col) &&
+ !qrCode.isDark(row + 5, col) &&
+ qrCode.isDark(row + 6, col)
+ ) {
+ lostPoint += 40
+ }
+ }
+ }
+ var darkCount = 0
+ for (var col = 0; col < moduleCount; col++) {
+ for (var row = 0; row < moduleCount; row++) {
+ if (qrCode.isDark(row, col)) {
+ darkCount++
+ }
+ }
+ }
+ var ratio = Math.abs(100 * darkCount / moduleCount / moduleCount - 50) / 5
+ lostPoint += ratio * 10
+ return lostPoint
+ }
+ }
+ var QRMath = {
+ glog: function (n) {
+ if (n < 1) {
+ throw new Error('glog(' + n + ')')
+ }
+ return QRMath.LOG_TABLE[n]
+ },
+ gexp: function (n) {
+ while (n < 0) {
+ n += 255
+ }
+ while (n >= 256) {
+ n -= 255
+ }
+ return QRMath.EXP_TABLE[n]
+ },
+ EXP_TABLE: new Array(256),
+ LOG_TABLE: new Array(256)
+ }
+ for (var i = 0; i < 8; i++) {
+ QRMath.EXP_TABLE[i] = 1 << i
+ }
+ for (var i = 8; i < 256; i++) {
+ QRMath.EXP_TABLE[i] =
+ QRMath.EXP_TABLE[i - 4] ^
+ QRMath.EXP_TABLE[i - 5] ^
+ QRMath.EXP_TABLE[i - 6] ^
+ QRMath.EXP_TABLE[i - 8]
+ }
+ for (var i = 0; i < 255; i++) {
+ QRMath.LOG_TABLE[QRMath.EXP_TABLE[i]] = i
+ }
+ function QRPolynomial (num, shift) {
+ if (num.length == undefined) {
+ throw new Error(num.length + '/' + shift)
+ }
+ var offset = 0
+ while (offset < num.length && num[offset] == 0) {
+ offset++
+ }
+ this.num = new Array(num.length - offset + shift)
+ for (var i = 0; i < num.length - offset; i++) {
+ this.num[i] = num[i + offset]
+ }
+ }
+ QRPolynomial.prototype = {
+ get: function (index) {
+ return this.num[index]
+ },
+ getLength: function () {
+ return this.num.length
+ },
+ multiply: function (e) {
+ var num = new Array(this.getLength() + e.getLength() - 1)
+ for (var i = 0; i < this.getLength(); i++) {
+ for (var j = 0; j < e.getLength(); j++) {
+ num[i + j] ^= QRMath.gexp(
+ QRMath.glog(this.get(i)) + QRMath.glog(e.get(j))
+ )
+ }
+ }
+ return new QRPolynomial(num, 0)
+ },
+ mod: function (e) {
+ if (this.getLength() - e.getLength() < 0) {
+ return this
+ }
+ var ratio = QRMath.glog(this.get(0)) - QRMath.glog(e.get(0))
+ var num = new Array(this.getLength())
+ for (var i = 0; i < this.getLength(); i++) {
+ num[i] = this.get(i)
+ }
+ for (var i = 0; i < e.getLength(); i++) {
+ num[i] ^= QRMath.gexp(QRMath.glog(e.get(i)) + ratio)
+ }
+ return new QRPolynomial(num, 0).mod(e)
+ }
+ }
+ function QRRSBlock (totalCount, dataCount) {
+ this.totalCount = totalCount
+ this.dataCount = dataCount
+ }
+ QRRSBlock.RS_BLOCK_TABLE = [
+ [1, 26, 19],
+ [1, 26, 16],
+ [1, 26, 13],
+ [1, 26, 9],
+ [1, 44, 34],
+ [1, 44, 28],
+ [1, 44, 22],
+ [1, 44, 16],
+ [1, 70, 55],
+ [1, 70, 44],
+ [2, 35, 17],
+ [2, 35, 13],
+ [1, 100, 80],
+ [2, 50, 32],
+ [2, 50, 24],
+ [4, 25, 9],
+ [1, 134, 108],
+ [2, 67, 43],
+ [2, 33, 15, 2, 34, 16],
+ [2, 33, 11, 2, 34, 12],
+ [2, 86, 68],
+ [4, 43, 27],
+ [4, 43, 19],
+ [4, 43, 15],
+ [2, 98, 78],
+ [4, 49, 31],
+ [2, 32, 14, 4, 33, 15],
+ [4, 39, 13, 1, 40, 14],
+ [2, 121, 97],
+ [2, 60, 38, 2, 61, 39],
+ [4, 40, 18, 2, 41, 19],
+ [4, 40, 14, 2, 41, 15],
+ [2, 146, 116],
+ [3, 58, 36, 2, 59, 37],
+ [4, 36, 16, 4, 37, 17],
+ [4, 36, 12, 4, 37, 13],
+ [2, 86, 68, 2, 87, 69],
+ [4, 69, 43, 1, 70, 44],
+ [6, 43, 19, 2, 44, 20],
+ [6, 43, 15, 2, 44, 16],
+ [4, 101, 81],
+ [1, 80, 50, 4, 81, 51],
+ [4, 50, 22, 4, 51, 23],
+ [3, 36, 12, 8, 37, 13],
+ [2, 116, 92, 2, 117, 93],
+ [6, 58, 36, 2, 59, 37],
+ [4, 46, 20, 6, 47, 21],
+ [7, 42, 14, 4, 43, 15],
+ [4, 133, 107],
+ [8, 59, 37, 1, 60, 38],
+ [8, 44, 20, 4, 45, 21],
+ [12, 33, 11, 4, 34, 12],
+ [3, 145, 115, 1, 146, 116],
+ [4, 64, 40, 5, 65, 41],
+ [11, 36, 16, 5, 37, 17],
+ [11, 36, 12, 5, 37, 13],
+ [5, 109, 87, 1, 110, 88],
+ [5, 65, 41, 5, 66, 42],
+ [5, 54, 24, 7, 55, 25],
+ [11, 36, 12],
+ [5, 122, 98, 1, 123, 99],
+ [7, 73, 45, 3, 74, 46],
+ [15, 43, 19, 2, 44, 20],
+ [3, 45, 15, 13, 46, 16],
+ [1, 135, 107, 5, 136, 108],
+ [10, 74, 46, 1, 75, 47],
+ [1, 50, 22, 15, 51, 23],
+ [2, 42, 14, 17, 43, 15],
+ [5, 150, 120, 1, 151, 121],
+ [9, 69, 43, 4, 70, 44],
+ [17, 50, 22, 1, 51, 23],
+ [2, 42, 14, 19, 43, 15],
+ [3, 141, 113, 4, 142, 114],
+ [3, 70, 44, 11, 71, 45],
+ [17, 47, 21, 4, 48, 22],
+ [9, 39, 13, 16, 40, 14],
+ [3, 135, 107, 5, 136, 108],
+ [3, 67, 41, 13, 68, 42],
+ [15, 54, 24, 5, 55, 25],
+ [15, 43, 15, 10, 44, 16],
+ [4, 144, 116, 4, 145, 117],
+ [17, 68, 42],
+ [17, 50, 22, 6, 51, 23],
+ [19, 46, 16, 6, 47, 17],
+ [2, 139, 111, 7, 140, 112],
+ [17, 74, 46],
+ [7, 54, 24, 16, 55, 25],
+ [34, 37, 13],
+ [4, 151, 121, 5, 152, 122],
+ [4, 75, 47, 14, 76, 48],
+ [11, 54, 24, 14, 55, 25],
+ [16, 45, 15, 14, 46, 16],
+ [6, 147, 117, 4, 148, 118],
+ [6, 73, 45, 14, 74, 46],
+ [11, 54, 24, 16, 55, 25],
+ [30, 46, 16, 2, 47, 17],
+ [8, 132, 106, 4, 133, 107],
+ [8, 75, 47, 13, 76, 48],
+ [7, 54, 24, 22, 55, 25],
+ [22, 45, 15, 13, 46, 16],
+ [10, 142, 114, 2, 143, 115],
+ [19, 74, 46, 4, 75, 47],
+ [28, 50, 22, 6, 51, 23],
+ [33, 46, 16, 4, 47, 17],
+ [8, 152, 122, 4, 153, 123],
+ [22, 73, 45, 3, 74, 46],
+ [8, 53, 23, 26, 54, 24],
+ [12, 45, 15, 28, 46, 16],
+ [3, 147, 117, 10, 148, 118],
+ [3, 73, 45, 23, 74, 46],
+ [4, 54, 24, 31, 55, 25],
+ [11, 45, 15, 31, 46, 16],
+ [7, 146, 116, 7, 147, 117],
+ [21, 73, 45, 7, 74, 46],
+ [1, 53, 23, 37, 54, 24],
+ [19, 45, 15, 26, 46, 16],
+ [5, 145, 115, 10, 146, 116],
+ [19, 75, 47, 10, 76, 48],
+ [15, 54, 24, 25, 55, 25],
+ [23, 45, 15, 25, 46, 16],
+ [13, 145, 115, 3, 146, 116],
+ [2, 74, 46, 29, 75, 47],
+ [42, 54, 24, 1, 55, 25],
+ [23, 45, 15, 28, 46, 16],
+ [17, 145, 115],
+ [10, 74, 46, 23, 75, 47],
+ [10, 54, 24, 35, 55, 25],
+ [19, 45, 15, 35, 46, 16],
+ [17, 145, 115, 1, 146, 116],
+ [14, 74, 46, 21, 75, 47],
+ [29, 54, 24, 19, 55, 25],
+ [11, 45, 15, 46, 46, 16],
+ [13, 145, 115, 6, 146, 116],
+ [14, 74, 46, 23, 75, 47],
+ [44, 54, 24, 7, 55, 25],
+ [59, 46, 16, 1, 47, 17],
+ [12, 151, 121, 7, 152, 122],
+ [12, 75, 47, 26, 76, 48],
+ [39, 54, 24, 14, 55, 25],
+ [22, 45, 15, 41, 46, 16],
+ [6, 151, 121, 14, 152, 122],
+ [6, 75, 47, 34, 76, 48],
+ [46, 54, 24, 10, 55, 25],
+ [2, 45, 15, 64, 46, 16],
+ [17, 152, 122, 4, 153, 123],
+ [29, 74, 46, 14, 75, 47],
+ [49, 54, 24, 10, 55, 25],
+ [24, 45, 15, 46, 46, 16],
+ [4, 152, 122, 18, 153, 123],
+ [13, 74, 46, 32, 75, 47],
+ [48, 54, 24, 14, 55, 25],
+ [42, 45, 15, 32, 46, 16],
+ [20, 147, 117, 4, 148, 118],
+ [40, 75, 47, 7, 76, 48],
+ [43, 54, 24, 22, 55, 25],
+ [10, 45, 15, 67, 46, 16],
+ [19, 148, 118, 6, 149, 119],
+ [18, 75, 47, 31, 76, 48],
+ [34, 54, 24, 34, 55, 25],
+ [20, 45, 15, 61, 46, 16]
+ ]
+ QRRSBlock.getRSBlocks = function (typeNumber, errorCorrectLevel) {
+ var rsBlock = QRRSBlock.getRsBlockTable(typeNumber, errorCorrectLevel)
+ if (rsBlock == undefined) {
+ throw new Error(
+ 'bad rs block @ typeNumber:' +
+ typeNumber +
+ '/errorCorrectLevel:' +
+ errorCorrectLevel
+ )
+ }
+ var length = rsBlock.length / 3
+ var list = []
+ for (var i = 0; i < length; i++) {
+ var count = rsBlock[i * 3 + 0]
+ var totalCount = rsBlock[i * 3 + 1]
+ var dataCount = rsBlock[i * 3 + 2]
+ for (var j = 0; j < count; j++) {
+ list.push(new QRRSBlock(totalCount, dataCount))
+ }
+ }
+ return list
+ }
+ QRRSBlock.getRsBlockTable = function (typeNumber, errorCorrectLevel) {
+ switch (errorCorrectLevel) {
+ case QRErrorCorrectLevel.L:
+ return QRRSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 0]
+ case QRErrorCorrectLevel.M:
+ return QRRSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 1]
+ case QRErrorCorrectLevel.Q:
+ return QRRSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 2]
+ case QRErrorCorrectLevel.H:
+ return QRRSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 3]
+ default:
+ return undefined
+ }
+ }
+ function QRBitBuffer () {
+ this.buffer = []
+ this.length = 0
+ }
+ QRBitBuffer.prototype = {
+ get: function (index) {
+ var bufIndex = Math.floor(index / 8)
+ return ((this.buffer[bufIndex] >>> (7 - index % 8)) & 1) == 1
+ },
+ put: function (num, length) {
+ for (var i = 0; i < length; i++) {
+ this.putBit(((num >>> (length - i - 1)) & 1) == 1)
+ }
+ },
+ getLengthInBits: function () {
+ return this.length
+ },
+ putBit: function (bit) {
+ var bufIndex = Math.floor(this.length / 8)
+ if (this.buffer.length <= bufIndex) {
+ this.buffer.push(0)
+ }
+ if (bit) {
+ this.buffer[bufIndex] |= 0x80 >>> (this.length % 8)
+ }
+ this.length++
+ }
+ }
+ var QRCodeLimitLength = [
+ [17, 14, 11, 7],
+ [32, 26, 20, 14],
+ [53, 42, 32, 24],
+ [78, 62, 46, 34],
+ [106, 84, 60, 44],
+ [134, 106, 74, 58],
+ [154, 122, 86, 64],
+ [192, 152, 108, 84],
+ [230, 180, 130, 98],
+ [271, 213, 151, 119],
+ [321, 251, 177, 137],
+ [367, 287, 203, 155],
+ [425, 331, 241, 177],
+ [458, 362, 258, 194],
+ [520, 412, 292, 220],
+ [586, 450, 322, 250],
+ [644, 504, 364, 280],
+ [718, 560, 394, 310],
+ [792, 624, 442, 338],
+ [858, 666, 482, 382],
+ [929, 711, 509, 403],
+ [1003, 779, 565, 439],
+ [1091, 857, 611, 461],
+ [1171, 911, 661, 511],
+ [1273, 997, 715, 535],
+ [1367, 1059, 751, 593],
+ [1465, 1125, 805, 625],
+ [1528, 1190, 868, 658],
+ [1628, 1264, 908, 698],
+ [1732, 1370, 982, 742],
+ [1840, 1452, 1030, 790],
+ [1952, 1538, 1112, 842],
+ [2068, 1628, 1168, 898],
+ [2188, 1722, 1228, 958],
+ [2303, 1809, 1283, 983],
+ [2431, 1911, 1351, 1051],
+ [2563, 1989, 1423, 1093],
+ [2699, 2099, 1499, 1139],
+ [2809, 2213, 1579, 1219],
+ [2953, 2331, 1663, 1273]
+ ]
+
+ function _isSupportCanvas () {
+ return typeof CanvasRenderingContext2D !== 'undefined'
+ }
+
+ // android 2.x doesn't support Data-URI spec
+ function _getAndroid () {
+ var android = false
+ var sAgent = navigator.userAgent
+
+ if (/android/i.test(sAgent)) {
+ // android
+ android = true
+ var aMat = sAgent.toString().match(/android ([0-9]\.[0-9])/i)
+
+ if (aMat && aMat[1]) {
+ android = parseFloat(aMat[1])
+ }
+ }
+
+ return android
+ }
+
+ var svgDrawer = (function () {
+ var Drawing = function (el, htOption) {
+ this._el = el
+ this._htOption = htOption
+ }
+
+ Drawing.prototype.draw = function (oQRCode) {
+ var _htOption = this._htOption
+ var _el = this._el
+ var nCount = oQRCode.getModuleCount()
+ var nWidth = Math.floor(_htOption.width / nCount)
+ var nHeight = Math.floor(_htOption.height / nCount)
+
+ this.clear()
+
+ function makeSVG (tag, attrs) {
+ var el = document.createElementNS('http://www.w3.org/2000/svg', tag)
+ for (var k in attrs) {
+ if (attrs.hasOwnProperty(k)) el.setAttribute(k, attrs[k])
+ }
+ return el
+ }
+
+ var svg = makeSVG('svg', {
+ viewBox: '0 0 ' + String(nCount) + ' ' + String(nCount),
+ width: '100%',
+ height: '100%',
+ fill: _htOption.colorLight
+ })
+ svg.setAttributeNS(
+ 'http://www.w3.org/2000/xmlns/',
+ 'xmlns:xlink',
+ 'http://www.w3.org/1999/xlink'
+ )
+ _el.appendChild(svg)
+
+ svg.appendChild(
+ makeSVG('rect', {
+ fill: _htOption.colorLight,
+ width: '100%',
+ height: '100%'
+ })
+ )
+ svg.appendChild(
+ makeSVG('rect', {
+ fill: _htOption.colorDark,
+ width: '1',
+ height: '1',
+ id: 'template'
+ })
+ )
+
+ for (var row = 0; row < nCount; row++) {
+ for (var col = 0; col < nCount; col++) {
+ if (oQRCode.isDark(row, col)) {
+ var child = makeSVG('use', { x: String(col), y: String(row) })
+ child.setAttributeNS(
+ 'http://www.w3.org/1999/xlink',
+ 'href',
+ '#template'
+ )
+ svg.appendChild(child)
+ }
+ }
+ }
+ }
+ Drawing.prototype.clear = function () {
+ while (this._el.hasChildNodes()) this._el.removeChild(this._el.lastChild)
+ }
+ return Drawing
+ })()
+
+ var useSVG = document.documentElement.tagName.toLowerCase() === 'svg'
+
+ // Drawing in DOM by using Table tag
+ var Drawing = useSVG
+ ? svgDrawer
+ : !_isSupportCanvas()
+ ? (function () {
+ var Drawing = function (el, htOption) {
+ this._el = el
+ this._htOption = htOption
+ }
+
+ /**
+ * Draw the QRCode
+ *
+ * @param {QRCode} oQRCode
+ */
+ Drawing.prototype.draw = function (oQRCode) {
+ var _htOption = this._htOption
+ var _el = this._el
+ var nCount = oQRCode.getModuleCount()
+ var nWidth = Math.floor(_htOption.width / nCount)
+ var nHeight = Math.floor(_htOption.height / nCount)
+ var aHTML = ['<table style="border:0;border-collapse:collapse;">']
+
+ for (var row = 0; row < nCount; row++) {
+ aHTML.push('<tr>')
+
+ for (var col = 0; col < nCount; col++) {
+ aHTML.push(
+ '<td style="border:0;border-collapse:collapse;padding:0;margin:0;width:' +
+ nWidth +
+ 'px;height:' +
+ nHeight +
+ 'px;background-color:' +
+ (oQRCode.isDark(row, col)
+ ? _htOption.colorDark
+ : _htOption.colorLight) +
+ ';"></td>'
+ )
+ }
+
+ aHTML.push('</tr>')
+ }
+
+ aHTML.push('</table>')
+ _el.innerHTML = aHTML.join('')
+
+ // Fix the margin values as real size.
+ var elTable = _el.childNodes[0]
+ var nLeftMarginTable = (_htOption.width - elTable.offsetWidth) / 2
+ var nTopMarginTable = (_htOption.height - elTable.offsetHeight) / 2
+
+ if (nLeftMarginTable > 0 && nTopMarginTable > 0) {
+ elTable.style.margin =
+ nTopMarginTable + 'px ' + nLeftMarginTable + 'px'
+ }
+ }
+
+ /**
+ * Clear the QRCode
+ */
+ Drawing.prototype.clear = function () {
+ this._el.innerHTML = ''
+ }
+
+ return Drawing
+ })()
+ : (function () {
+ // Drawing in Canvas
+ function _onMakeImage () {
+ this._elImage.src = this._elCanvas.toDataURL('image/png')
+ this._elImage.style.display = 'block'
+ this._elCanvas.style.display = 'none'
+ }
+
+ // Android 2.1 bug workaround
+ // http://code.google.com/p/android/issues/detail?id=5141
+ if (this._android && this._android <= 2.1) {
+ var factor = 1 / window.devicePixelRatio
+ var drawImage = CanvasRenderingContext2D.prototype.drawImage
+ CanvasRenderingContext2D.prototype.drawImage = function (
+ image,
+ sx,
+ sy,
+ sw,
+ sh,
+ dx,
+ dy,
+ dw,
+ dh
+ ) {
+ if ('nodeName' in image && /img/i.test(image.nodeName)) {
+ for (var i = arguments.length - 1; i >= 1; i--) {
+ arguments[i] = arguments[i] * factor
+ }
+ } else if (typeof dw === 'undefined') {
+ arguments[1] *= factor
+ arguments[2] *= factor
+ arguments[3] *= factor
+ arguments[4] *= factor
+ }
+
+ drawImage.apply(this, arguments)
+ }
+ }
+
+ /**
+ * Check whether the user's browser supports Data URI or not
+ *
+ * @private
+ * @param {Function} fSuccess Occurs if it supports Data URI
+ * @param {Function} fFail Occurs if it doesn't support Data URI
+ */
+ function _safeSetDataURI (fSuccess, fFail) {
+ var self = this
+ self._fFail = fFail
+ self._fSuccess = fSuccess
+
+ // Check it just once
+ if (self._bSupportDataURI === null) {
+ var el = document.createElement('img')
+ var fOnError = function () {
+ self._bSupportDataURI = false
+
+ if (self._fFail) {
+ self._fFail.call(self)
+ }
+ }
+ var fOnSuccess = function () {
+ self._bSupportDataURI = true
+
+ if (self._fSuccess) {
+ self._fSuccess.call(self)
+ }
+ }
+
+ el.onabort = fOnError
+ el.onerror = fOnError
+ el.onload = fOnSuccess
+ el.src =
+ 'data:image/gif;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==' // the Image contains 1px data.
+ } else if (self._bSupportDataURI === true && self._fSuccess) {
+ self._fSuccess.call(self)
+ } else if (self._bSupportDataURI === false && self._fFail) {
+ self._fFail.call(self)
+ }
+ }
+
+ /**
+ * Drawing QRCode by using canvas
+ *
+ * @constructor
+ * @param {HTMLElement} el
+ * @param {Object} htOption QRCode Options
+ */
+ var Drawing = function (el, htOption) {
+ this._bIsPainted = false
+ this._android = _getAndroid()
+
+ this._htOption = htOption
+ this._elCanvas = document.createElement('canvas')
+ this._elCanvas.width = htOption.width
+ this._elCanvas.height = htOption.height
+ el.appendChild(this._elCanvas)
+ this._el = el
+ this._oContext = this._elCanvas.getContext('2d')
+ this._bIsPainted = false
+ this._elImage = document.createElement('img')
+ this._elImage.alt = 'Scan me!'
+ this._elImage.style.display = 'none'
+ this._el.appendChild(this._elImage)
+ this._bSupportDataURI = null
+ }
+
+ /**
+ * Draw the QRCode
+ *
+ * @param {QRCode} oQRCode
+ */
+ Drawing.prototype.draw = function (oQRCode) {
+ var _elImage = this._elImage
+ var _oContext = this._oContext
+ var _htOption = this._htOption
+
+ var nCount = oQRCode.getModuleCount()
+ var nWidth = _htOption.width / nCount
+ var nHeight = _htOption.height / nCount
+ var nRoundedWidth = Math.round(nWidth)
+ var nRoundedHeight = Math.round(nHeight)
+
+ _elImage.style.display = 'none'
+ this.clear()
+
+ for (var row = 0; row < nCount; row++) {
+ for (var col = 0; col < nCount; col++) {
+ var bIsDark = oQRCode.isDark(row, col)
+ var nLeft = col * nWidth
+ var nTop = row * nHeight
+ _oContext.strokeStyle = bIsDark
+ ? _htOption.colorDark
+ : _htOption.colorLight
+ _oContext.lineWidth = 1
+ _oContext.fillStyle = bIsDark
+ ? _htOption.colorDark
+ : _htOption.colorLight
+ _oContext.fillRect(nLeft, nTop, nWidth, nHeight)
+
+ // 안티 앨리어싱 방지 처리
+ _oContext.strokeRect(
+ Math.floor(nLeft) + 0.5,
+ Math.floor(nTop) + 0.5,
+ nRoundedWidth,
+ nRoundedHeight
+ )
+
+ _oContext.strokeRect(
+ Math.ceil(nLeft) - 0.5,
+ Math.ceil(nTop) - 0.5,
+ nRoundedWidth,
+ nRoundedHeight
+ )
+ }
+ }
+
+ this._bIsPainted = true
+ }
+
+ /**
+ * Make the image from Canvas if the browser supports Data URI.
+ */
+ Drawing.prototype.makeImage = function () {
+ if (this._bIsPainted) {
+ _safeSetDataURI.call(this, _onMakeImage)
+ }
+ }
+
+ /**
+ * Return whether the QRCode is painted or not
+ *
+ * @return {Boolean}
+ */
+ Drawing.prototype.isPainted = function () {
+ return this._bIsPainted
+ }
+
+ /**
+ * Clear the QRCode
+ */
+ Drawing.prototype.clear = function () {
+ this._oContext.clearRect(
+ 0,
+ 0,
+ this._elCanvas.width,
+ this._elCanvas.height
+ )
+ this._bIsPainted = false
+ }
+
+ /**
+ * @private
+ * @param {Number} nNumber
+ */
+ Drawing.prototype.round = function (nNumber) {
+ if (!nNumber) {
+ return nNumber
+ }
+
+ return Math.floor(nNumber * 1000) / 1000
+ }
+
+ return Drawing
+ })()
+
+ /**
+ * Get the type by string length
+ *
+ * @private
+ * @param {String} sText
+ * @param {Number} nCorrectLevel
+ * @return {Number} type
+ */
+ function _getTypeNumber (sText, nCorrectLevel) {
+ var nType = 1
+ var length = _getUTF8Length(sText)
+
+ for (var i = 0, len = QRCodeLimitLength.length; i <= len; i++) {
+ var nLimit = 0
+
+ switch (nCorrectLevel) {
+ case QRErrorCorrectLevel.L:
+ nLimit = QRCodeLimitLength[i][0]
+ break
+ case QRErrorCorrectLevel.M:
+ nLimit = QRCodeLimitLength[i][1]
+ break
+ case QRErrorCorrectLevel.Q:
+ nLimit = QRCodeLimitLength[i][2]
+ break
+ case QRErrorCorrectLevel.H:
+ nLimit = QRCodeLimitLength[i][3]
+ break
+ }
+
+ if (length <= nLimit) {
+ break
+ } else {
+ nType++
+ }
+ }
+
+ if (nType > QRCodeLimitLength.length) {
+ throw new Error('Too long data')
+ }
+
+ return nType
+ }
+
+ function _getUTF8Length (sText) {
+ var replacedText = encodeURI(sText)
+ .toString()
+ .replace(/\%[0-9a-fA-F]{2}/g, 'a')
+ return replacedText.length + (replacedText.length != sText ? 3 : 0)
+ }
+
+ /**
+ * @class QRCode
+ * @constructor
+ * @example
+ * new QRCode(document.getElementById("test"), "http://jindo.dev.naver.com/collie");
+ *
+ * @example
+ * var oQRCode = new QRCode("test", {
+ * text : "http://naver.com",
+ * width : 128,
+ * height : 128
+ * });
+ *
+ * oQRCode.clear(); // Clear the QRCode.
+ * oQRCode.makeCode("http://map.naver.com"); // Re-create the QRCode.
+ *
+ * @param {HTMLElement|String} el target element or 'id' attribute of element.
+ * @param {Object|String} vOption
+ * @param {String} vOption.text QRCode link data
+ * @param {Number} [vOption.width=256]
+ * @param {Number} [vOption.height=256]
+ * @param {String} [vOption.colorDark="#000000"]
+ * @param {String} [vOption.colorLight="#ffffff"]
+ * @param {QRCode.CorrectLevel} [vOption.correctLevel=QRCode.CorrectLevel.H] [L|M|Q|H]
+ */
+ QRCode = function (el, vOption) {
+ this._htOption = {
+ width: 256,
+ height: 256,
+ typeNumber: 4,
+ colorDark: '#000000',
+ colorLight: '#ffffff',
+ correctLevel: QRErrorCorrectLevel.H
+ }
+
+ if (typeof vOption === 'string') {
+ vOption = {
+ text: vOption
+ }
+ }
+
+ // Overwrites options
+ if (vOption) {
+ for (var i in vOption) {
+ this._htOption[i] = vOption[i]
+ }
+ }
+
+ if (typeof el === 'string') {
+ el = document.getElementById(el)
+ }
+
+ if (this._htOption.useSVG) {
+ Drawing = svgDrawer
+ }
+
+ this._android = _getAndroid()
+ this._el = el
+ this._oQRCode = null
+ this._oDrawing = new Drawing(this._el, this._htOption)
+
+ if (this._htOption.text) {
+ this.makeCode(this._htOption.text)
+ }
+ }
+
+ /**
+ * Make the QRCode
+ *
+ * @param {String} sText link data
+ */
+ QRCode.prototype.makeCode = function (sText) {
+ this._oQRCode = new QRCodeModel(
+ _getTypeNumber(sText, this._htOption.correctLevel),
+ this._htOption.correctLevel
+ )
+ this._oQRCode.addData(sText)
+ this._oQRCode.make()
+ this._el.title = sText
+ this._oDrawing.draw(this._oQRCode)
+ this.makeImage()
+ }
+
+ /**
+ * Make the Image from Canvas element
+ * - It occurs automatically
+ * - Android below 3 doesn't support Data-URI spec.
+ *
+ * @private
+ */
+ QRCode.prototype.makeImage = function () {
+ if (
+ typeof this._oDrawing.makeImage === 'function' &&
+ (!this._android || this._android >= 3)
+ ) {
+ this._oDrawing.makeImage()
+ }
+ }
+
+ /**
+ * Clear the QRCode
+ */
+ QRCode.prototype.clear = function () {
+ this._oDrawing.clear()
+ }
+
+ /**
+ * @name QRCode.CorrectLevel
+ */
+ QRCode.CorrectLevel = QRErrorCorrectLevel
+})()
diff --git a/testing/web-platform/tests/tools/wave/www/lib/jszip.min.js b/testing/web-platform/tests/tools/wave/www/lib/jszip.min.js
new file mode 100644
index 0000000000..b9188736b7
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/lib/jszip.min.js
@@ -0,0 +1,15 @@
+/*!
+
+JSZip v3.1.5 - A JavaScript class for generating and reading zip files
+<http://stuartk.com/jszip>
+
+(c) 2009-2016 Stuart Knightley <stuart [at] stuartk.com>
+Dual licenced under the MIT license or GPLv3. See https://raw.github.com/Stuk/jszip/master/LICENSE.markdown.
+
+JSZip uses the library pako released under the MIT license :
+https://github.com/nodeca/pako/blob/master/LICENSE
+*/
+!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,b.JSZip=a()}}(function(){return function a(b,c,d){function e(g,h){if(!c[g]){if(!b[g]){var i="function"==typeof require&&require;if(!h&&i)return i(g,!0);if(f)return f(g,!0);var j=new Error("Cannot find module '"+g+"'");throw j.code="MODULE_NOT_FOUND",j}var k=c[g]={exports:{}};b[g][0].call(k.exports,function(a){var c=b[g][1][a];return e(c?c:a)},k,k.exports,a,b,c,d)}return c[g].exports}for(var f="function"==typeof require&&require,g=0;g<d.length;g++)e(d[g]);return e}({1:[function(a,b,c){"use strict";var d=a("./utils"),e=a("./support"),f="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";c.encode=function(a){for(var b,c,e,g,h,i,j,k=[],l=0,m=a.length,n=m,o="string"!==d.getTypeOf(a);l<a.length;)n=m-l,o?(b=a[l++],c=l<m?a[l++]:0,e=l<m?a[l++]:0):(b=a.charCodeAt(l++),c=l<m?a.charCodeAt(l++):0,e=l<m?a.charCodeAt(l++):0),g=b>>2,h=(3&b)<<4|c>>4,i=n>1?(15&c)<<2|e>>6:64,j=n>2?63&e:64,k.push(f.charAt(g)+f.charAt(h)+f.charAt(i)+f.charAt(j));return k.join("")},c.decode=function(a){var b,c,d,g,h,i,j,k=0,l=0,m="data:";if(a.substr(0,m.length)===m)throw new Error("Invalid base64 input, it looks like a data url.");a=a.replace(/[^A-Za-z0-9\+\/\=]/g,"");var n=3*a.length/4;if(a.charAt(a.length-1)===f.charAt(64)&&n--,a.charAt(a.length-2)===f.charAt(64)&&n--,n%1!==0)throw new Error("Invalid base64 input, bad content length.");var o;for(o=e.uint8array?new Uint8Array(0|n):new Array(0|n);k<a.length;)g=f.indexOf(a.charAt(k++)),h=f.indexOf(a.charAt(k++)),i=f.indexOf(a.charAt(k++)),j=f.indexOf(a.charAt(k++)),b=g<<2|h>>4,c=(15&h)<<4|i>>2,d=(3&i)<<6|j,o[l++]=b,64!==i&&(o[l++]=c),64!==j&&(o[l++]=d);return o}},{"./support":30,"./utils":32}],2:[function(a,b,c){"use strict";function d(a,b,c,d,e){this.compressedSize=a,this.uncompressedSize=b,this.crc32=c,this.compression=d,this.compressedContent=e}var e=a("./external"),f=a("./stream/DataWorker"),g=a("./stream/DataLengthProbe"),h=a("./stream/Crc32Probe"),g=a("./stream/DataLengthProbe");d.prototype={getContentWorker:function(){var a=new f(e.Promise.resolve(this.compressedContent)).pipe(this.compression.uncompressWorker()).pipe(new g("data_length")),b=this;return a.on("end",function(){if(this.streamInfo.data_length!==b.uncompressedSize)throw new Error("Bug : uncompressed data size mismatch")}),a},getCompressedWorker:function(){return new f(e.Promise.resolve(this.compressedContent)).withStreamInfo("compressedSize",this.compressedSize).withStreamInfo("uncompressedSize",this.uncompressedSize).withStreamInfo("crc32",this.crc32).withStreamInfo("compression",this.compression)}},d.createWorkerFrom=function(a,b,c){return a.pipe(new h).pipe(new g("uncompressedSize")).pipe(b.compressWorker(c)).pipe(new g("compressedSize")).withStreamInfo("compression",b)},b.exports=d},{"./external":6,"./stream/Crc32Probe":25,"./stream/DataLengthProbe":26,"./stream/DataWorker":27}],3:[function(a,b,c){"use strict";var d=a("./stream/GenericWorker");c.STORE={magic:"\0\0",compressWorker:function(a){return new d("STORE compression")},uncompressWorker:function(){return new d("STORE decompression")}},c.DEFLATE=a("./flate")},{"./flate":7,"./stream/GenericWorker":28}],4:[function(a,b,c){"use strict";function d(){for(var a,b=[],c=0;c<256;c++){a=c;for(var d=0;d<8;d++)a=1&a?3988292384^a>>>1:a>>>1;b[c]=a}return b}function e(a,b,c,d){var e=h,f=d+c;a^=-1;for(var g=d;g<f;g++)a=a>>>8^e[255&(a^b[g])];return a^-1}function f(a,b,c,d){var e=h,f=d+c;a^=-1;for(var g=d;g<f;g++)a=a>>>8^e[255&(a^b.charCodeAt(g))];return a^-1}var g=a("./utils"),h=d();b.exports=function(a,b){if("undefined"==typeof a||!a.length)return 0;var c="string"!==g.getTypeOf(a);return c?e(0|b,a,a.length,0):f(0|b,a,a.length,0)}},{"./utils":32}],5:[function(a,b,c){"use strict";c.base64=!1,c.binary=!1,c.dir=!1,c.createFolders=!0,c.date=null,c.compression=null,c.compressionOptions=null,c.comment=null,c.unixPermissions=null,c.dosPermissions=null},{}],6:[function(a,b,c){"use strict";var d=null;d="undefined"!=typeof Promise?Promise:a("lie"),b.exports={Promise:d}},{lie:58}],7:[function(a,b,c){"use strict";function d(a,b){h.call(this,"FlateWorker/"+a),this._pako=null,this._pakoAction=a,this._pakoOptions=b,this.meta={}}var e="undefined"!=typeof Uint8Array&&"undefined"!=typeof Uint16Array&&"undefined"!=typeof Uint32Array,f=a("pako"),g=a("./utils"),h=a("./stream/GenericWorker"),i=e?"uint8array":"array";c.magic="\b\0",g.inherits(d,h),d.prototype.processChunk=function(a){this.meta=a.meta,null===this._pako&&this._createPako(),this._pako.push(g.transformTo(i,a.data),!1)},d.prototype.flush=function(){h.prototype.flush.call(this),null===this._pako&&this._createPako(),this._pako.push([],!0)},d.prototype.cleanUp=function(){h.prototype.cleanUp.call(this),this._pako=null},d.prototype._createPako=function(){this._pako=new f[this._pakoAction]({raw:!0,level:this._pakoOptions.level||-1});var a=this;this._pako.onData=function(b){a.push({data:b,meta:a.meta})}},c.compressWorker=function(a){return new d("Deflate",a)},c.uncompressWorker=function(){return new d("Inflate",{})}},{"./stream/GenericWorker":28,"./utils":32,pako:59}],8:[function(a,b,c){"use strict";function d(a,b,c,d){f.call(this,"ZipFileWorker"),this.bytesWritten=0,this.zipComment=b,this.zipPlatform=c,this.encodeFileName=d,this.streamFiles=a,this.accumulate=!1,this.contentBuffer=[],this.dirRecords=[],this.currentSourceOffset=0,this.entriesCount=0,this.currentFile=null,this._sources=[]}var e=a("../utils"),f=a("../stream/GenericWorker"),g=a("../utf8"),h=a("../crc32"),i=a("../signature"),j=function(a,b){var c,d="";for(c=0;c<b;c++)d+=String.fromCharCode(255&a),a>>>=8;return d},k=function(a,b){var c=a;return a||(c=b?16893:33204),(65535&c)<<16},l=function(a,b){return 63&(a||0)},m=function(a,b,c,d,f,m){var n,o,p=a.file,q=a.compression,r=m!==g.utf8encode,s=e.transformTo("string",m(p.name)),t=e.transformTo("string",g.utf8encode(p.name)),u=p.comment,v=e.transformTo("string",m(u)),w=e.transformTo("string",g.utf8encode(u)),x=t.length!==p.name.length,y=w.length!==u.length,z="",A="",B="",C=p.dir,D=p.date,E={crc32:0,compressedSize:0,uncompressedSize:0};b&&!c||(E.crc32=a.crc32,E.compressedSize=a.compressedSize,E.uncompressedSize=a.uncompressedSize);var F=0;b&&(F|=8),r||!x&&!y||(F|=2048);var G=0,H=0;C&&(G|=16),"UNIX"===f?(H=798,G|=k(p.unixPermissions,C)):(H=20,G|=l(p.dosPermissions,C)),n=D.getUTCHours(),n<<=6,n|=D.getUTCMinutes(),n<<=5,n|=D.getUTCSeconds()/2,o=D.getUTCFullYear()-1980,o<<=4,o|=D.getUTCMonth()+1,o<<=5,o|=D.getUTCDate(),x&&(A=j(1,1)+j(h(s),4)+t,z+="up"+j(A.length,2)+A),y&&(B=j(1,1)+j(h(v),4)+w,z+="uc"+j(B.length,2)+B);var I="";I+="\n\0",I+=j(F,2),I+=q.magic,I+=j(n,2),I+=j(o,2),I+=j(E.crc32,4),I+=j(E.compressedSize,4),I+=j(E.uncompressedSize,4),I+=j(s.length,2),I+=j(z.length,2);var J=i.LOCAL_FILE_HEADER+I+s+z,K=i.CENTRAL_FILE_HEADER+j(H,2)+I+j(v.length,2)+"\0\0\0\0"+j(G,4)+j(d,4)+s+z+v;return{fileRecord:J,dirRecord:K}},n=function(a,b,c,d,f){var g="",h=e.transformTo("string",f(d));return g=i.CENTRAL_DIRECTORY_END+"\0\0\0\0"+j(a,2)+j(a,2)+j(b,4)+j(c,4)+j(h.length,2)+h},o=function(a){var b="";return b=i.DATA_DESCRIPTOR+j(a.crc32,4)+j(a.compressedSize,4)+j(a.uncompressedSize,4)};e.inherits(d,f),d.prototype.push=function(a){var b=a.meta.percent||0,c=this.entriesCount,d=this._sources.length;this.accumulate?this.contentBuffer.push(a):(this.bytesWritten+=a.data.length,f.prototype.push.call(this,{data:a.data,meta:{currentFile:this.currentFile,percent:c?(b+100*(c-d-1))/c:100}}))},d.prototype.openedSource=function(a){this.currentSourceOffset=this.bytesWritten,this.currentFile=a.file.name;var b=this.streamFiles&&!a.file.dir;if(b){var c=m(a,b,!1,this.currentSourceOffset,this.zipPlatform,this.encodeFileName);this.push({data:c.fileRecord,meta:{percent:0}})}else this.accumulate=!0},d.prototype.closedSource=function(a){this.accumulate=!1;var b=this.streamFiles&&!a.file.dir,c=m(a,b,!0,this.currentSourceOffset,this.zipPlatform,this.encodeFileName);if(this.dirRecords.push(c.dirRecord),b)this.push({data:o(a),meta:{percent:100}});else for(this.push({data:c.fileRecord,meta:{percent:0}});this.contentBuffer.length;)this.push(this.contentBuffer.shift());this.currentFile=null},d.prototype.flush=function(){for(var a=this.bytesWritten,b=0;b<this.dirRecords.length;b++)this.push({data:this.dirRecords[b],meta:{percent:100}});var c=this.bytesWritten-a,d=n(this.dirRecords.length,c,a,this.zipComment,this.encodeFileName);this.push({data:d,meta:{percent:100}})},d.prototype.prepareNextSource=function(){this.previous=this._sources.shift(),this.openedSource(this.previous.streamInfo),this.isPaused?this.previous.pause():this.previous.resume()},d.prototype.registerPrevious=function(a){this._sources.push(a);var b=this;return a.on("data",function(a){b.processChunk(a)}),a.on("end",function(){b.closedSource(b.previous.streamInfo),b._sources.length?b.prepareNextSource():b.end()}),a.on("error",function(a){b.error(a)}),this},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(!this.previous&&this._sources.length?(this.prepareNextSource(),!0):this.previous||this._sources.length||this.generatedError?void 0:(this.end(),!0))},d.prototype.error=function(a){var b=this._sources;if(!f.prototype.error.call(this,a))return!1;for(var c=0;c<b.length;c++)try{b[c].error(a)}catch(a){}return!0},d.prototype.lock=function(){f.prototype.lock.call(this);for(var a=this._sources,b=0;b<a.length;b++)a[b].lock()},b.exports=d},{"../crc32":4,"../signature":23,"../stream/GenericWorker":28,"../utf8":31,"../utils":32}],9:[function(a,b,c){"use strict";var d=a("../compressions"),e=a("./ZipFileWorker"),f=function(a,b){var c=a||b,e=d[c];if(!e)throw new Error(c+" is not a valid compression method !");return e};c.generateWorker=function(a,b,c){var d=new e(b.streamFiles,c,b.platform,b.encodeFileName),g=0;try{a.forEach(function(a,c){g++;var e=f(c.options.compression,b.compression),h=c.options.compressionOptions||b.compressionOptions||{},i=c.dir,j=c.date;c._compressWorker(e,h).withStreamInfo("file",{name:a,dir:i,date:j,comment:c.comment||"",unixPermissions:c.unixPermissions,dosPermissions:c.dosPermissions}).pipe(d)}),d.entriesCount=g}catch(h){d.error(h)}return d}},{"../compressions":3,"./ZipFileWorker":8}],10:[function(a,b,c){"use strict";function d(){if(!(this instanceof d))return new d;if(arguments.length)throw new Error("The constructor with parameters has been removed in JSZip 3.0, please check the upgrade guide.");this.files={},this.comment=null,this.root="",this.clone=function(){var a=new d;for(var b in this)"function"!=typeof this[b]&&(a[b]=this[b]);return a}}d.prototype=a("./object"),d.prototype.loadAsync=a("./load"),d.support=a("./support"),d.defaults=a("./defaults"),d.version="3.1.5",d.loadAsync=function(a,b){return(new d).loadAsync(a,b)},d.external=a("./external"),b.exports=d},{"./defaults":5,"./external":6,"./load":11,"./object":15,"./support":30}],11:[function(a,b,c){"use strict";function d(a){return new f.Promise(function(b,c){var d=a.decompressed.getContentWorker().pipe(new i);d.on("error",function(a){c(a)}).on("end",function(){d.streamInfo.crc32!==a.decompressed.crc32?c(new Error("Corrupted zip : CRC32 mismatch")):b()}).resume()})}var e=a("./utils"),f=a("./external"),g=a("./utf8"),e=a("./utils"),h=a("./zipEntries"),i=a("./stream/Crc32Probe"),j=a("./nodejsUtils");b.exports=function(a,b){var c=this;return b=e.extend(b||{},{base64:!1,checkCRC32:!1,optimizedBinaryString:!1,createFolders:!1,decodeFileName:g.utf8decode}),j.isNode&&j.isStream(a)?f.Promise.reject(new Error("JSZip can't accept a stream when loading a zip file.")):e.prepareContent("the loaded zip file",a,!0,b.optimizedBinaryString,b.base64).then(function(a){var c=new h(b);return c.load(a),c}).then(function(a){var c=[f.Promise.resolve(a)],e=a.files;if(b.checkCRC32)for(var g=0;g<e.length;g++)c.push(d(e[g]));return f.Promise.all(c)}).then(function(a){for(var d=a.shift(),e=d.files,f=0;f<e.length;f++){var g=e[f];c.file(g.fileNameStr,g.decompressed,{binary:!0,optimizedBinaryString:!0,date:g.date,dir:g.dir,comment:g.fileCommentStr.length?g.fileCommentStr:null,unixPermissions:g.unixPermissions,dosPermissions:g.dosPermissions,createFolders:b.createFolders})}return d.zipComment.length&&(c.comment=d.zipComment),c})}},{"./external":6,"./nodejsUtils":14,"./stream/Crc32Probe":25,"./utf8":31,"./utils":32,"./zipEntries":33}],12:[function(a,b,c){"use strict";function d(a,b){f.call(this,"Nodejs stream input adapter for "+a),this._upstreamEnded=!1,this._bindStream(b)}var e=a("../utils"),f=a("../stream/GenericWorker");e.inherits(d,f),d.prototype._bindStream=function(a){var b=this;this._stream=a,a.pause(),a.on("data",function(a){b.push({data:a,meta:{percent:0}})}).on("error",function(a){b.isPaused?this.generatedError=a:b.error(a)}).on("end",function(){b.isPaused?b._upstreamEnded=!0:b.end()})},d.prototype.pause=function(){return!!f.prototype.pause.call(this)&&(this._stream.pause(),!0)},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(this._upstreamEnded?this.end():this._stream.resume(),!0)},b.exports=d},{"../stream/GenericWorker":28,"../utils":32}],13:[function(a,b,c){"use strict";function d(a,b,c){e.call(this,b),this._helper=a;var d=this;a.on("data",function(a,b){d.push(a)||d._helper.pause(),c&&c(b)}).on("error",function(a){d.emit("error",a)}).on("end",function(){d.push(null)})}var e=a("readable-stream").Readable,f=a("../utils");f.inherits(d,e),d.prototype._read=function(){this._helper.resume()},b.exports=d},{"../utils":32,"readable-stream":16}],14:[function(a,b,c){"use strict";b.exports={isNode:"undefined"!=typeof Buffer,newBufferFrom:function(a,b){return new Buffer(a,b)},allocBuffer:function(a){return Buffer.alloc?Buffer.alloc(a):new Buffer(a)},isBuffer:function(a){return Buffer.isBuffer(a)},isStream:function(a){return a&&"function"==typeof a.on&&"function"==typeof a.pause&&"function"==typeof a.resume}}},{}],15:[function(a,b,c){"use strict";function d(a){return"[object RegExp]"===Object.prototype.toString.call(a)}var e=a("./utf8"),f=a("./utils"),g=a("./stream/GenericWorker"),h=a("./stream/StreamHelper"),i=a("./defaults"),j=a("./compressedObject"),k=a("./zipObject"),l=a("./generate"),m=a("./nodejsUtils"),n=a("./nodejs/NodejsStreamInputAdapter"),o=function(a,b,c){var d,e=f.getTypeOf(b),h=f.extend(c||{},i);h.date=h.date||new Date,null!==h.compression&&(h.compression=h.compression.toUpperCase()),"string"==typeof h.unixPermissions&&(h.unixPermissions=parseInt(h.unixPermissions,8)),h.unixPermissions&&16384&h.unixPermissions&&(h.dir=!0),h.dosPermissions&&16&h.dosPermissions&&(h.dir=!0),h.dir&&(a=q(a)),h.createFolders&&(d=p(a))&&r.call(this,d,!0);var l="string"===e&&h.binary===!1&&h.base64===!1;c&&"undefined"!=typeof c.binary||(h.binary=!l);var o=b instanceof j&&0===b.uncompressedSize;(o||h.dir||!b||0===b.length)&&(h.base64=!1,h.binary=!0,b="",h.compression="STORE",e="string");var s=null;s=b instanceof j||b instanceof g?b:m.isNode&&m.isStream(b)?new n(a,b):f.prepareContent(a,b,h.binary,h.optimizedBinaryString,h.base64);var t=new k(a,s,h);this.files[a]=t},p=function(a){"/"===a.slice(-1)&&(a=a.substring(0,a.length-1));var b=a.lastIndexOf("/");return b>0?a.substring(0,b):""},q=function(a){return"/"!==a.slice(-1)&&(a+="/"),a},r=function(a,b){return b="undefined"!=typeof b?b:i.createFolders,a=q(a),this.files[a]||o.call(this,a,null,{dir:!0,createFolders:b}),this.files[a]},s={load:function(){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},forEach:function(a){var b,c,d;for(b in this.files)this.files.hasOwnProperty(b)&&(d=this.files[b],c=b.slice(this.root.length,b.length),c&&b.slice(0,this.root.length)===this.root&&a(c,d))},filter:function(a){var b=[];return this.forEach(function(c,d){a(c,d)&&b.push(d)}),b},file:function(a,b,c){if(1===arguments.length){if(d(a)){var e=a;return this.filter(function(a,b){return!b.dir&&e.test(a)})}var f=this.files[this.root+a];return f&&!f.dir?f:null}return a=this.root+a,o.call(this,a,b,c),this},folder:function(a){if(!a)return this;if(d(a))return this.filter(function(b,c){return c.dir&&a.test(b)});var b=this.root+a,c=r.call(this,b),e=this.clone();return e.root=c.name,e},remove:function(a){a=this.root+a;var b=this.files[a];if(b||("/"!==a.slice(-1)&&(a+="/"),b=this.files[a]),b&&!b.dir)delete this.files[a];else for(var c=this.filter(function(b,c){return c.name.slice(0,a.length)===a}),d=0;d<c.length;d++)delete this.files[c[d].name];return this},generate:function(a){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},generateInternalStream:function(a){var b,c={};try{if(c=f.extend(a||{},{streamFiles:!1,compression:"STORE",compressionOptions:null,type:"",platform:"DOS",comment:null,mimeType:"application/zip",encodeFileName:e.utf8encode}),c.type=c.type.toLowerCase(),c.compression=c.compression.toUpperCase(),"binarystring"===c.type&&(c.type="string"),!c.type)throw new Error("No output type specified.");f.checkSupport(c.type),"darwin"!==c.platform&&"freebsd"!==c.platform&&"linux"!==c.platform&&"sunos"!==c.platform||(c.platform="UNIX"),"win32"===c.platform&&(c.platform="DOS");var d=c.comment||this.comment||"";b=l.generateWorker(this,c,d)}catch(i){b=new g("error"),b.error(i)}return new h(b,c.type||"string",c.mimeType)},generateAsync:function(a,b){return this.generateInternalStream(a).accumulate(b)},generateNodeStream:function(a,b){return a=a||{},a.type||(a.type="nodebuffer"),this.generateInternalStream(a).toNodejsStream(b)}};b.exports=s},{"./compressedObject":2,"./defaults":5,"./generate":9,"./nodejs/NodejsStreamInputAdapter":12,"./nodejsUtils":14,"./stream/GenericWorker":28,"./stream/StreamHelper":29,"./utf8":31,"./utils":32,"./zipObject":35}],16:[function(a,b,c){b.exports=a("stream")},{stream:void 0}],17:[function(a,b,c){"use strict";function d(a){e.call(this,a);for(var b=0;b<this.data.length;b++)a[b]=255&a[b]}var e=a("./DataReader"),f=a("../utils");f.inherits(d,e),d.prototype.byteAt=function(a){return this.data[this.zero+a]},d.prototype.lastIndexOfSignature=function(a){for(var b=a.charCodeAt(0),c=a.charCodeAt(1),d=a.charCodeAt(2),e=a.charCodeAt(3),f=this.length-4;f>=0;--f)if(this.data[f]===b&&this.data[f+1]===c&&this.data[f+2]===d&&this.data[f+3]===e)return f-this.zero;return-1},d.prototype.readAndCheckSignature=function(a){var b=a.charCodeAt(0),c=a.charCodeAt(1),d=a.charCodeAt(2),e=a.charCodeAt(3),f=this.readData(4);return b===f[0]&&c===f[1]&&d===f[2]&&e===f[3]},d.prototype.readData=function(a){if(this.checkOffset(a),0===a)return[];var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./DataReader":18}],18:[function(a,b,c){"use strict";function d(a){this.data=a,this.length=a.length,this.index=0,this.zero=0}var e=a("../utils");d.prototype={checkOffset:function(a){this.checkIndex(this.index+a)},checkIndex:function(a){if(this.length<this.zero+a||a<0)throw new Error("End of data reached (data length = "+this.length+", asked index = "+a+"). Corrupted zip ?")},setIndex:function(a){this.checkIndex(a),this.index=a},skip:function(a){this.setIndex(this.index+a)},byteAt:function(a){},readInt:function(a){var b,c=0;for(this.checkOffset(a),b=this.index+a-1;b>=this.index;b--)c=(c<<8)+this.byteAt(b);return this.index+=a,c},readString:function(a){return e.transformTo("string",this.readData(a))},readData:function(a){},lastIndexOfSignature:function(a){},readAndCheckSignature:function(a){},readDate:function(){var a=this.readInt(4);return new Date(Date.UTC((a>>25&127)+1980,(a>>21&15)-1,a>>16&31,a>>11&31,a>>5&63,(31&a)<<1))}},b.exports=d},{"../utils":32}],19:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./Uint8ArrayReader"),f=a("../utils");f.inherits(d,e),d.prototype.readData=function(a){this.checkOffset(a);var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./Uint8ArrayReader":21}],20:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./DataReader"),f=a("../utils");f.inherits(d,e),d.prototype.byteAt=function(a){return this.data.charCodeAt(this.zero+a)},d.prototype.lastIndexOfSignature=function(a){return this.data.lastIndexOf(a)-this.zero},d.prototype.readAndCheckSignature=function(a){var b=this.readData(4);return a===b},d.prototype.readData=function(a){this.checkOffset(a);var b=this.data.slice(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./DataReader":18}],21:[function(a,b,c){"use strict";function d(a){e.call(this,a)}var e=a("./ArrayReader"),f=a("../utils");f.inherits(d,e),d.prototype.readData=function(a){if(this.checkOffset(a),0===a)return new Uint8Array(0);var b=this.data.subarray(this.zero+this.index,this.zero+this.index+a);return this.index+=a,b},b.exports=d},{"../utils":32,"./ArrayReader":17}],22:[function(a,b,c){"use strict";var d=a("../utils"),e=a("../support"),f=a("./ArrayReader"),g=a("./StringReader"),h=a("./NodeBufferReader"),i=a("./Uint8ArrayReader");b.exports=function(a){var b=d.getTypeOf(a);return d.checkSupport(b),"string"!==b||e.uint8array?"nodebuffer"===b?new h(a):e.uint8array?new i(d.transformTo("uint8array",a)):new f(d.transformTo("array",a)):new g(a)}},{"../support":30,"../utils":32,"./ArrayReader":17,"./NodeBufferReader":19,"./StringReader":20,"./Uint8ArrayReader":21}],23:[function(a,b,c){"use strict";c.LOCAL_FILE_HEADER="PK",c.CENTRAL_FILE_HEADER="PK",c.CENTRAL_DIRECTORY_END="PK",c.ZIP64_CENTRAL_DIRECTORY_LOCATOR="PK",c.ZIP64_CENTRAL_DIRECTORY_END="PK",c.DATA_DESCRIPTOR="PK\b"},{}],24:[function(a,b,c){"use strict";function d(a){e.call(this,"ConvertWorker to "+a),this.destType=a}var e=a("./GenericWorker"),f=a("../utils");f.inherits(d,e),d.prototype.processChunk=function(a){this.push({data:f.transformTo(this.destType,a.data),meta:a.meta})},b.exports=d},{"../utils":32,"./GenericWorker":28}],25:[function(a,b,c){"use strict";function d(){e.call(this,"Crc32Probe"),this.withStreamInfo("crc32",0)}var e=a("./GenericWorker"),f=a("../crc32"),g=a("../utils");g.inherits(d,e),d.prototype.processChunk=function(a){this.streamInfo.crc32=f(a.data,this.streamInfo.crc32||0),this.push(a)},b.exports=d},{"../crc32":4,"../utils":32,"./GenericWorker":28}],26:[function(a,b,c){"use strict";function d(a){f.call(this,"DataLengthProbe for "+a),this.propName=a,this.withStreamInfo(a,0)}var e=a("../utils"),f=a("./GenericWorker");e.inherits(d,f),d.prototype.processChunk=function(a){if(a){var b=this.streamInfo[this.propName]||0;this.streamInfo[this.propName]=b+a.data.length}f.prototype.processChunk.call(this,a)},b.exports=d},{"../utils":32,"./GenericWorker":28}],27:[function(a,b,c){"use strict";function d(a){f.call(this,"DataWorker");var b=this;this.dataIsReady=!1,this.index=0,this.max=0,this.data=null,this.type="",this._tickScheduled=!1,a.then(function(a){b.dataIsReady=!0,b.data=a,b.max=a&&a.length||0,b.type=e.getTypeOf(a),b.isPaused||b._tickAndRepeat()},function(a){b.error(a)})}var e=a("../utils"),f=a("./GenericWorker"),g=16384;e.inherits(d,f),d.prototype.cleanUp=function(){f.prototype.cleanUp.call(this),this.data=null},d.prototype.resume=function(){return!!f.prototype.resume.call(this)&&(!this._tickScheduled&&this.dataIsReady&&(this._tickScheduled=!0,e.delay(this._tickAndRepeat,[],this)),!0)},d.prototype._tickAndRepeat=function(){this._tickScheduled=!1,this.isPaused||this.isFinished||(this._tick(),this.isFinished||(e.delay(this._tickAndRepeat,[],this),this._tickScheduled=!0))},d.prototype._tick=function(){if(this.isPaused||this.isFinished)return!1;var a=g,b=null,c=Math.min(this.max,this.index+a);if(this.index>=this.max)return this.end();switch(this.type){case"string":b=this.data.substring(this.index,c);break;case"uint8array":b=this.data.subarray(this.index,c);break;case"array":case"nodebuffer":b=this.data.slice(this.index,c)}return this.index=c,this.push({data:b,meta:{percent:this.max?this.index/this.max*100:0}})},b.exports=d},{"../utils":32,"./GenericWorker":28}],28:[function(a,b,c){"use strict";function d(a){this.name=a||"default",this.streamInfo={},this.generatedError=null,this.extraStreamInfo={},this.isPaused=!0,this.isFinished=!1,this.isLocked=!1,this._listeners={data:[],end:[],error:[]},this.previous=null}d.prototype={push:function(a){this.emit("data",a)},end:function(){if(this.isFinished)return!1;this.flush();try{this.emit("end"),this.cleanUp(),this.isFinished=!0}catch(a){this.emit("error",a)}return!0},error:function(a){return!this.isFinished&&(this.isPaused?this.generatedError=a:(this.isFinished=!0,this.emit("error",a),this.previous&&this.previous.error(a),this.cleanUp()),!0)},on:function(a,b){return this._listeners[a].push(b),this},cleanUp:function(){this.streamInfo=this.generatedError=this.extraStreamInfo=null,this._listeners=[]},emit:function(a,b){if(this._listeners[a])for(var c=0;c<this._listeners[a].length;c++)this._listeners[a][c].call(this,b)},pipe:function(a){return a.registerPrevious(this)},registerPrevious:function(a){if(this.isLocked)throw new Error("The stream '"+this+"' has already been used.");this.streamInfo=a.streamInfo,this.mergeStreamInfo(),this.previous=a;var b=this;return a.on("data",function(a){b.processChunk(a)}),a.on("end",function(){b.end()}),a.on("error",function(a){b.error(a)}),this},pause:function(){return!this.isPaused&&!this.isFinished&&(this.isPaused=!0,this.previous&&this.previous.pause(),!0)},resume:function(){if(!this.isPaused||this.isFinished)return!1;this.isPaused=!1;var a=!1;return this.generatedError&&(this.error(this.generatedError),a=!0),this.previous&&this.previous.resume(),!a},flush:function(){},processChunk:function(a){this.push(a)},withStreamInfo:function(a,b){return this.extraStreamInfo[a]=b,this.mergeStreamInfo(),this},mergeStreamInfo:function(){for(var a in this.extraStreamInfo)this.extraStreamInfo.hasOwnProperty(a)&&(this.streamInfo[a]=this.extraStreamInfo[a])},lock:function(){if(this.isLocked)throw new Error("The stream '"+this+"' has already been used.");this.isLocked=!0,this.previous&&this.previous.lock()},toString:function(){var a="Worker "+this.name;return this.previous?this.previous+" -> "+a:a}},b.exports=d},{}],29:[function(a,b,c){"use strict";function d(a,b,c){switch(a){case"blob":return h.newBlob(h.transformTo("arraybuffer",b),c);case"base64":return k.encode(b);default:return h.transformTo(a,b)}}function e(a,b){var c,d=0,e=null,f=0;for(c=0;c<b.length;c++)f+=b[c].length;switch(a){case"string":return b.join("");case"array":return Array.prototype.concat.apply([],b);case"uint8array":for(e=new Uint8Array(f),c=0;c<b.length;c++)e.set(b[c],d),d+=b[c].length;return e;case"nodebuffer":return Buffer.concat(b);default:throw new Error("concat : unsupported type '"+a+"'")}}function f(a,b){return new m.Promise(function(c,f){var g=[],h=a._internalType,i=a._outputType,j=a._mimeType;a.on("data",function(a,c){g.push(a),b&&b(c)}).on("error",function(a){g=[],f(a)}).on("end",function(){try{var a=d(i,e(h,g),j);c(a)}catch(b){f(b)}g=[]}).resume()})}function g(a,b,c){var d=b;switch(b){case"blob":case"arraybuffer":d="uint8array";break;case"base64":d="string"}try{this._internalType=d,this._outputType=b,this._mimeType=c,h.checkSupport(d),this._worker=a.pipe(new i(d)),a.lock()}catch(e){this._worker=new j("error"),this._worker.error(e)}}var h=a("../utils"),i=a("./ConvertWorker"),j=a("./GenericWorker"),k=a("../base64"),l=a("../support"),m=a("../external"),n=null;if(l.nodestream)try{n=a("../nodejs/NodejsStreamOutputAdapter")}catch(o){}g.prototype={accumulate:function(a){return f(this,a)},on:function(a,b){var c=this;return"data"===a?this._worker.on(a,function(a){b.call(c,a.data,a.meta)}):this._worker.on(a,function(){h.delay(b,arguments,c)}),this},resume:function(){return h.delay(this._worker.resume,[],this._worker),this},pause:function(){return this._worker.pause(),this},toNodejsStream:function(a){if(h.checkSupport("nodestream"),"nodebuffer"!==this._outputType)throw new Error(this._outputType+" is not supported by this method");return new n(this,{objectMode:"nodebuffer"!==this._outputType},a)}},b.exports=g},{"../base64":1,"../external":6,"../nodejs/NodejsStreamOutputAdapter":13,"../support":30,"../utils":32,"./ConvertWorker":24,"./GenericWorker":28}],30:[function(a,b,c){"use strict";if(c.base64=!0,c.array=!0,c.string=!0,c.arraybuffer="undefined"!=typeof ArrayBuffer&&"undefined"!=typeof Uint8Array,c.nodebuffer="undefined"!=typeof Buffer,c.uint8array="undefined"!=typeof Uint8Array,"undefined"==typeof ArrayBuffer)c.blob=!1;else{var d=new ArrayBuffer(0);try{c.blob=0===new Blob([d],{type:"application/zip"}).size}catch(e){try{var f=self.BlobBuilder||self.WebKitBlobBuilder||self.MozBlobBuilder||self.MSBlobBuilder,g=new f;g.append(d),c.blob=0===g.getBlob("application/zip").size}catch(e){c.blob=!1}}}try{c.nodestream=!!a("readable-stream").Readable}catch(e){c.nodestream=!1}},{"readable-stream":16}],31:[function(a,b,c){"use strict";function d(){i.call(this,"utf-8 decode"),this.leftOver=null}function e(){i.call(this,"utf-8 encode")}for(var f=a("./utils"),g=a("./support"),h=a("./nodejsUtils"),i=a("./stream/GenericWorker"),j=new Array(256),k=0;k<256;k++)j[k]=k>=252?6:k>=248?5:k>=240?4:k>=224?3:k>=192?2:1;j[254]=j[254]=1;var l=function(a){var b,c,d,e,f,h=a.length,i=0;for(e=0;e<h;e++)c=a.charCodeAt(e),55296===(64512&c)&&e+1<h&&(d=a.charCodeAt(e+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),e++)),i+=c<128?1:c<2048?2:c<65536?3:4;for(b=g.uint8array?new Uint8Array(i):new Array(i),f=0,e=0;f<i;e++)c=a.charCodeAt(e),55296===(64512&c)&&e+1<h&&(d=a.charCodeAt(e+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),e++)),c<128?b[f++]=c:c<2048?(b[f++]=192|c>>>6,b[f++]=128|63&c):c<65536?(b[f++]=224|c>>>12,b[f++]=128|c>>>6&63,b[f++]=128|63&c):(b[f++]=240|c>>>18,b[f++]=128|c>>>12&63,b[f++]=128|c>>>6&63,b[f++]=128|63&c);return b},m=function(a,b){var c;for(b=b||a.length,b>a.length&&(b=a.length),c=b-1;c>=0&&128===(192&a[c]);)c--;return c<0?b:0===c?b:c+j[a[c]]>b?c:b},n=function(a){var b,c,d,e,g=a.length,h=new Array(2*g);for(c=0,b=0;b<g;)if(d=a[b++],d<128)h[c++]=d;else if(e=j[d],e>4)h[c++]=65533,b+=e-1;else{for(d&=2===e?31:3===e?15:7;e>1&&b<g;)d=d<<6|63&a[b++],e--;e>1?h[c++]=65533:d<65536?h[c++]=d:(d-=65536,h[c++]=55296|d>>10&1023,h[c++]=56320|1023&d)}return h.length!==c&&(h.subarray?h=h.subarray(0,c):h.length=c),f.applyFromCharCode(h)};c.utf8encode=function(a){return g.nodebuffer?h.newBufferFrom(a,"utf-8"):l(a)},c.utf8decode=function(a){return g.nodebuffer?f.transformTo("nodebuffer",a).toString("utf-8"):(a=f.transformTo(g.uint8array?"uint8array":"array",a),n(a))},f.inherits(d,i),d.prototype.processChunk=function(a){var b=f.transformTo(g.uint8array?"uint8array":"array",a.data);if(this.leftOver&&this.leftOver.length){if(g.uint8array){var d=b;b=new Uint8Array(d.length+this.leftOver.length),b.set(this.leftOver,0),b.set(d,this.leftOver.length)}else b=this.leftOver.concat(b);this.leftOver=null}var e=m(b),h=b;e!==b.length&&(g.uint8array?(h=b.subarray(0,e),this.leftOver=b.subarray(e,b.length)):(h=b.slice(0,e),this.leftOver=b.slice(e,b.length))),this.push({data:c.utf8decode(h),meta:a.meta})},d.prototype.flush=function(){this.leftOver&&this.leftOver.length&&(this.push({data:c.utf8decode(this.leftOver),meta:{}}),this.leftOver=null)},c.Utf8DecodeWorker=d,f.inherits(e,i),e.prototype.processChunk=function(a){this.push({data:c.utf8encode(a.data),meta:a.meta})},c.Utf8EncodeWorker=e},{"./nodejsUtils":14,"./stream/GenericWorker":28,"./support":30,"./utils":32}],32:[function(a,b,c){"use strict";function d(a){var b=null;return b=i.uint8array?new Uint8Array(a.length):new Array(a.length),f(a,b)}function e(a){return a}function f(a,b){for(var c=0;c<a.length;++c)b[c]=255&a.charCodeAt(c);return b}function g(a){var b=65536,d=c.getTypeOf(a),e=!0;if("uint8array"===d?e=n.applyCanBeUsed.uint8array:"nodebuffer"===d&&(e=n.applyCanBeUsed.nodebuffer),e)for(;b>1;)try{return n.stringifyByChunk(a,d,b)}catch(f){b=Math.floor(b/2)}return n.stringifyByChar(a)}function h(a,b){for(var c=0;c<a.length;c++)b[c]=a[c];
+return b}var i=a("./support"),j=a("./base64"),k=a("./nodejsUtils"),l=a("core-js/library/fn/set-immediate"),m=a("./external");c.newBlob=function(a,b){c.checkSupport("blob");try{return new Blob([a],{type:b})}catch(d){try{var e=self.BlobBuilder||self.WebKitBlobBuilder||self.MozBlobBuilder||self.MSBlobBuilder,f=new e;return f.append(a),f.getBlob(b)}catch(d){throw new Error("Bug : can't construct the Blob.")}}};var n={stringifyByChunk:function(a,b,c){var d=[],e=0,f=a.length;if(f<=c)return String.fromCharCode.apply(null,a);for(;e<f;)"array"===b||"nodebuffer"===b?d.push(String.fromCharCode.apply(null,a.slice(e,Math.min(e+c,f)))):d.push(String.fromCharCode.apply(null,a.subarray(e,Math.min(e+c,f)))),e+=c;return d.join("")},stringifyByChar:function(a){for(var b="",c=0;c<a.length;c++)b+=String.fromCharCode(a[c]);return b},applyCanBeUsed:{uint8array:function(){try{return i.uint8array&&1===String.fromCharCode.apply(null,new Uint8Array(1)).length}catch(a){return!1}}(),nodebuffer:function(){try{return i.nodebuffer&&1===String.fromCharCode.apply(null,k.allocBuffer(1)).length}catch(a){return!1}}()}};c.applyFromCharCode=g;var o={};o.string={string:e,array:function(a){return f(a,new Array(a.length))},arraybuffer:function(a){return o.string.uint8array(a).buffer},uint8array:function(a){return f(a,new Uint8Array(a.length))},nodebuffer:function(a){return f(a,k.allocBuffer(a.length))}},o.array={string:g,array:e,arraybuffer:function(a){return new Uint8Array(a).buffer},uint8array:function(a){return new Uint8Array(a)},nodebuffer:function(a){return k.newBufferFrom(a)}},o.arraybuffer={string:function(a){return g(new Uint8Array(a))},array:function(a){return h(new Uint8Array(a),new Array(a.byteLength))},arraybuffer:e,uint8array:function(a){return new Uint8Array(a)},nodebuffer:function(a){return k.newBufferFrom(new Uint8Array(a))}},o.uint8array={string:g,array:function(a){return h(a,new Array(a.length))},arraybuffer:function(a){return a.buffer},uint8array:e,nodebuffer:function(a){return k.newBufferFrom(a)}},o.nodebuffer={string:g,array:function(a){return h(a,new Array(a.length))},arraybuffer:function(a){return o.nodebuffer.uint8array(a).buffer},uint8array:function(a){return h(a,new Uint8Array(a.length))},nodebuffer:e},c.transformTo=function(a,b){if(b||(b=""),!a)return b;c.checkSupport(a);var d=c.getTypeOf(b),e=o[d][a](b);return e},c.getTypeOf=function(a){return"string"==typeof a?"string":"[object Array]"===Object.prototype.toString.call(a)?"array":i.nodebuffer&&k.isBuffer(a)?"nodebuffer":i.uint8array&&a instanceof Uint8Array?"uint8array":i.arraybuffer&&a instanceof ArrayBuffer?"arraybuffer":void 0},c.checkSupport=function(a){var b=i[a.toLowerCase()];if(!b)throw new Error(a+" is not supported by this platform")},c.MAX_VALUE_16BITS=65535,c.MAX_VALUE_32BITS=-1,c.pretty=function(a){var b,c,d="";for(c=0;c<(a||"").length;c++)b=a.charCodeAt(c),d+="\\x"+(b<16?"0":"")+b.toString(16).toUpperCase();return d},c.delay=function(a,b,c){l(function(){a.apply(c||null,b||[])})},c.inherits=function(a,b){var c=function(){};c.prototype=b.prototype,a.prototype=new c},c.extend=function(){var a,b,c={};for(a=0;a<arguments.length;a++)for(b in arguments[a])arguments[a].hasOwnProperty(b)&&"undefined"==typeof c[b]&&(c[b]=arguments[a][b]);return c},c.prepareContent=function(a,b,e,f,g){var h=m.Promise.resolve(b).then(function(a){var b=i.blob&&(a instanceof Blob||["[object File]","[object Blob]"].indexOf(Object.prototype.toString.call(a))!==-1);return b&&"undefined"!=typeof FileReader?new m.Promise(function(b,c){var d=new FileReader;d.onload=function(a){b(a.target.result)},d.onerror=function(a){c(a.target.error)},d.readAsArrayBuffer(a)}):a});return h.then(function(b){var h=c.getTypeOf(b);return h?("arraybuffer"===h?b=c.transformTo("uint8array",b):"string"===h&&(g?b=j.decode(b):e&&f!==!0&&(b=d(b))),b):m.Promise.reject(new Error("Can't read the data of '"+a+"'. Is it in a supported JavaScript type (String, Blob, ArrayBuffer, etc) ?"))})}},{"./base64":1,"./external":6,"./nodejsUtils":14,"./support":30,"core-js/library/fn/set-immediate":36}],33:[function(a,b,c){"use strict";function d(a){this.files=[],this.loadOptions=a}var e=a("./reader/readerFor"),f=a("./utils"),g=a("./signature"),h=a("./zipEntry"),i=(a("./utf8"),a("./support"));d.prototype={checkSignature:function(a){if(!this.reader.readAndCheckSignature(a)){this.reader.index-=4;var b=this.reader.readString(4);throw new Error("Corrupted zip or bug: unexpected signature ("+f.pretty(b)+", expected "+f.pretty(a)+")")}},isSignature:function(a,b){var c=this.reader.index;this.reader.setIndex(a);var d=this.reader.readString(4),e=d===b;return this.reader.setIndex(c),e},readBlockEndOfCentral:function(){this.diskNumber=this.reader.readInt(2),this.diskWithCentralDirStart=this.reader.readInt(2),this.centralDirRecordsOnThisDisk=this.reader.readInt(2),this.centralDirRecords=this.reader.readInt(2),this.centralDirSize=this.reader.readInt(4),this.centralDirOffset=this.reader.readInt(4),this.zipCommentLength=this.reader.readInt(2);var a=this.reader.readData(this.zipCommentLength),b=i.uint8array?"uint8array":"array",c=f.transformTo(b,a);this.zipComment=this.loadOptions.decodeFileName(c)},readBlockZip64EndOfCentral:function(){this.zip64EndOfCentralSize=this.reader.readInt(8),this.reader.skip(4),this.diskNumber=this.reader.readInt(4),this.diskWithCentralDirStart=this.reader.readInt(4),this.centralDirRecordsOnThisDisk=this.reader.readInt(8),this.centralDirRecords=this.reader.readInt(8),this.centralDirSize=this.reader.readInt(8),this.centralDirOffset=this.reader.readInt(8),this.zip64ExtensibleData={};for(var a,b,c,d=this.zip64EndOfCentralSize-44,e=0;e<d;)a=this.reader.readInt(2),b=this.reader.readInt(4),c=this.reader.readData(b),this.zip64ExtensibleData[a]={id:a,length:b,value:c}},readBlockZip64EndOfCentralLocator:function(){if(this.diskWithZip64CentralDirStart=this.reader.readInt(4),this.relativeOffsetEndOfZip64CentralDir=this.reader.readInt(8),this.disksCount=this.reader.readInt(4),this.disksCount>1)throw new Error("Multi-volumes zip are not supported")},readLocalFiles:function(){var a,b;for(a=0;a<this.files.length;a++)b=this.files[a],this.reader.setIndex(b.localHeaderOffset),this.checkSignature(g.LOCAL_FILE_HEADER),b.readLocalPart(this.reader),b.handleUTF8(),b.processAttributes()},readCentralDir:function(){var a;for(this.reader.setIndex(this.centralDirOffset);this.reader.readAndCheckSignature(g.CENTRAL_FILE_HEADER);)a=new h({zip64:this.zip64},this.loadOptions),a.readCentralPart(this.reader),this.files.push(a);if(this.centralDirRecords!==this.files.length&&0!==this.centralDirRecords&&0===this.files.length)throw new Error("Corrupted zip or bug: expected "+this.centralDirRecords+" records in central dir, got "+this.files.length)},readEndOfCentral:function(){var a=this.reader.lastIndexOfSignature(g.CENTRAL_DIRECTORY_END);if(a<0){var b=!this.isSignature(0,g.LOCAL_FILE_HEADER);throw b?new Error("Can't find end of central directory : is this a zip file ? If it is, see https://stuk.github.io/jszip/documentation/howto/read_zip.html"):new Error("Corrupted zip: can't find end of central directory")}this.reader.setIndex(a);var c=a;if(this.checkSignature(g.CENTRAL_DIRECTORY_END),this.readBlockEndOfCentral(),this.diskNumber===f.MAX_VALUE_16BITS||this.diskWithCentralDirStart===f.MAX_VALUE_16BITS||this.centralDirRecordsOnThisDisk===f.MAX_VALUE_16BITS||this.centralDirRecords===f.MAX_VALUE_16BITS||this.centralDirSize===f.MAX_VALUE_32BITS||this.centralDirOffset===f.MAX_VALUE_32BITS){if(this.zip64=!0,a=this.reader.lastIndexOfSignature(g.ZIP64_CENTRAL_DIRECTORY_LOCATOR),a<0)throw new Error("Corrupted zip: can't find the ZIP64 end of central directory locator");if(this.reader.setIndex(a),this.checkSignature(g.ZIP64_CENTRAL_DIRECTORY_LOCATOR),this.readBlockZip64EndOfCentralLocator(),!this.isSignature(this.relativeOffsetEndOfZip64CentralDir,g.ZIP64_CENTRAL_DIRECTORY_END)&&(this.relativeOffsetEndOfZip64CentralDir=this.reader.lastIndexOfSignature(g.ZIP64_CENTRAL_DIRECTORY_END),this.relativeOffsetEndOfZip64CentralDir<0))throw new Error("Corrupted zip: can't find the ZIP64 end of central directory");this.reader.setIndex(this.relativeOffsetEndOfZip64CentralDir),this.checkSignature(g.ZIP64_CENTRAL_DIRECTORY_END),this.readBlockZip64EndOfCentral()}var d=this.centralDirOffset+this.centralDirSize;this.zip64&&(d+=20,d+=12+this.zip64EndOfCentralSize);var e=c-d;if(e>0)this.isSignature(c,g.CENTRAL_FILE_HEADER)||(this.reader.zero=e);else if(e<0)throw new Error("Corrupted zip: missing "+Math.abs(e)+" bytes.")},prepareReader:function(a){this.reader=e(a)},load:function(a){this.prepareReader(a),this.readEndOfCentral(),this.readCentralDir(),this.readLocalFiles()}},b.exports=d},{"./reader/readerFor":22,"./signature":23,"./support":30,"./utf8":31,"./utils":32,"./zipEntry":34}],34:[function(a,b,c){"use strict";function d(a,b){this.options=a,this.loadOptions=b}var e=a("./reader/readerFor"),f=a("./utils"),g=a("./compressedObject"),h=a("./crc32"),i=a("./utf8"),j=a("./compressions"),k=a("./support"),l=0,m=3,n=function(a){for(var b in j)if(j.hasOwnProperty(b)&&j[b].magic===a)return j[b];return null};d.prototype={isEncrypted:function(){return 1===(1&this.bitFlag)},useUTF8:function(){return 2048===(2048&this.bitFlag)},readLocalPart:function(a){var b,c;if(a.skip(22),this.fileNameLength=a.readInt(2),c=a.readInt(2),this.fileName=a.readData(this.fileNameLength),a.skip(c),this.compressedSize===-1||this.uncompressedSize===-1)throw new Error("Bug or corrupted zip : didn't get enough informations from the central directory (compressedSize === -1 || uncompressedSize === -1)");if(b=n(this.compressionMethod),null===b)throw new Error("Corrupted zip : compression "+f.pretty(this.compressionMethod)+" unknown (inner file : "+f.transformTo("string",this.fileName)+")");this.decompressed=new g(this.compressedSize,this.uncompressedSize,this.crc32,b,a.readData(this.compressedSize))},readCentralPart:function(a){this.versionMadeBy=a.readInt(2),a.skip(2),this.bitFlag=a.readInt(2),this.compressionMethod=a.readString(2),this.date=a.readDate(),this.crc32=a.readInt(4),this.compressedSize=a.readInt(4),this.uncompressedSize=a.readInt(4);var b=a.readInt(2);if(this.extraFieldsLength=a.readInt(2),this.fileCommentLength=a.readInt(2),this.diskNumberStart=a.readInt(2),this.internalFileAttributes=a.readInt(2),this.externalFileAttributes=a.readInt(4),this.localHeaderOffset=a.readInt(4),this.isEncrypted())throw new Error("Encrypted zip are not supported");a.skip(b),this.readExtraFields(a),this.parseZIP64ExtraField(a),this.fileComment=a.readData(this.fileCommentLength)},processAttributes:function(){this.unixPermissions=null,this.dosPermissions=null;var a=this.versionMadeBy>>8;this.dir=!!(16&this.externalFileAttributes),a===l&&(this.dosPermissions=63&this.externalFileAttributes),a===m&&(this.unixPermissions=this.externalFileAttributes>>16&65535),this.dir||"/"!==this.fileNameStr.slice(-1)||(this.dir=!0)},parseZIP64ExtraField:function(a){if(this.extraFields[1]){var b=e(this.extraFields[1].value);this.uncompressedSize===f.MAX_VALUE_32BITS&&(this.uncompressedSize=b.readInt(8)),this.compressedSize===f.MAX_VALUE_32BITS&&(this.compressedSize=b.readInt(8)),this.localHeaderOffset===f.MAX_VALUE_32BITS&&(this.localHeaderOffset=b.readInt(8)),this.diskNumberStart===f.MAX_VALUE_32BITS&&(this.diskNumberStart=b.readInt(4))}},readExtraFields:function(a){var b,c,d,e=a.index+this.extraFieldsLength;for(this.extraFields||(this.extraFields={});a.index<e;)b=a.readInt(2),c=a.readInt(2),d=a.readData(c),this.extraFields[b]={id:b,length:c,value:d}},handleUTF8:function(){var a=k.uint8array?"uint8array":"array";if(this.useUTF8())this.fileNameStr=i.utf8decode(this.fileName),this.fileCommentStr=i.utf8decode(this.fileComment);else{var b=this.findExtraFieldUnicodePath();if(null!==b)this.fileNameStr=b;else{var c=f.transformTo(a,this.fileName);this.fileNameStr=this.loadOptions.decodeFileName(c)}var d=this.findExtraFieldUnicodeComment();if(null!==d)this.fileCommentStr=d;else{var e=f.transformTo(a,this.fileComment);this.fileCommentStr=this.loadOptions.decodeFileName(e)}}},findExtraFieldUnicodePath:function(){var a=this.extraFields[28789];if(a){var b=e(a.value);return 1!==b.readInt(1)?null:h(this.fileName)!==b.readInt(4)?null:i.utf8decode(b.readData(a.length-5))}return null},findExtraFieldUnicodeComment:function(){var a=this.extraFields[25461];if(a){var b=e(a.value);return 1!==b.readInt(1)?null:h(this.fileComment)!==b.readInt(4)?null:i.utf8decode(b.readData(a.length-5))}return null}},b.exports=d},{"./compressedObject":2,"./compressions":3,"./crc32":4,"./reader/readerFor":22,"./support":30,"./utf8":31,"./utils":32}],35:[function(a,b,c){"use strict";var d=a("./stream/StreamHelper"),e=a("./stream/DataWorker"),f=a("./utf8"),g=a("./compressedObject"),h=a("./stream/GenericWorker"),i=function(a,b,c){this.name=a,this.dir=c.dir,this.date=c.date,this.comment=c.comment,this.unixPermissions=c.unixPermissions,this.dosPermissions=c.dosPermissions,this._data=b,this._dataBinary=c.binary,this.options={compression:c.compression,compressionOptions:c.compressionOptions}};i.prototype={internalStream:function(a){var b=null,c="string";try{if(!a)throw new Error("No output type specified.");c=a.toLowerCase();var e="string"===c||"text"===c;"binarystring"!==c&&"text"!==c||(c="string"),b=this._decompressWorker();var g=!this._dataBinary;g&&!e&&(b=b.pipe(new f.Utf8EncodeWorker)),!g&&e&&(b=b.pipe(new f.Utf8DecodeWorker))}catch(i){b=new h("error"),b.error(i)}return new d(b,c,"")},async:function(a,b){return this.internalStream(a).accumulate(b)},nodeStream:function(a,b){return this.internalStream(a||"nodebuffer").toNodejsStream(b)},_compressWorker:function(a,b){if(this._data instanceof g&&this._data.compression.magic===a.magic)return this._data.getCompressedWorker();var c=this._decompressWorker();return this._dataBinary||(c=c.pipe(new f.Utf8EncodeWorker)),g.createWorkerFrom(c,a,b)},_decompressWorker:function(){return this._data instanceof g?this._data.getContentWorker():this._data instanceof h?this._data:new e(this._data)}};for(var j=["asText","asBinary","asNodeBuffer","asUint8Array","asArrayBuffer"],k=function(){throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.")},l=0;l<j.length;l++)i.prototype[j[l]]=k;b.exports=i},{"./compressedObject":2,"./stream/DataWorker":27,"./stream/GenericWorker":28,"./stream/StreamHelper":29,"./utf8":31}],36:[function(a,b,c){a("../modules/web.immediate"),b.exports=a("../modules/_core").setImmediate},{"../modules/_core":40,"../modules/web.immediate":56}],37:[function(a,b,c){b.exports=function(a){if("function"!=typeof a)throw TypeError(a+" is not a function!");return a}},{}],38:[function(a,b,c){var d=a("./_is-object");b.exports=function(a){if(!d(a))throw TypeError(a+" is not an object!");return a}},{"./_is-object":51}],39:[function(a,b,c){var d={}.toString;b.exports=function(a){return d.call(a).slice(8,-1)}},{}],40:[function(a,b,c){var d=b.exports={version:"2.3.0"};"number"==typeof __e&&(__e=d)},{}],41:[function(a,b,c){var d=a("./_a-function");b.exports=function(a,b,c){if(d(a),void 0===b)return a;switch(c){case 1:return function(c){return a.call(b,c)};case 2:return function(c,d){return a.call(b,c,d)};case 3:return function(c,d,e){return a.call(b,c,d,e)}}return function(){return a.apply(b,arguments)}}},{"./_a-function":37}],42:[function(a,b,c){b.exports=!a("./_fails")(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},{"./_fails":45}],43:[function(a,b,c){var d=a("./_is-object"),e=a("./_global").document,f=d(e)&&d(e.createElement);b.exports=function(a){return f?e.createElement(a):{}}},{"./_global":46,"./_is-object":51}],44:[function(a,b,c){var d=a("./_global"),e=a("./_core"),f=a("./_ctx"),g=a("./_hide"),h="prototype",i=function(a,b,c){var j,k,l,m=a&i.F,n=a&i.G,o=a&i.S,p=a&i.P,q=a&i.B,r=a&i.W,s=n?e:e[b]||(e[b]={}),t=s[h],u=n?d:o?d[b]:(d[b]||{})[h];n&&(c=b);for(j in c)k=!m&&u&&void 0!==u[j],k&&j in s||(l=k?u[j]:c[j],s[j]=n&&"function"!=typeof u[j]?c[j]:q&&k?f(l,d):r&&u[j]==l?function(a){var b=function(b,c,d){if(this instanceof a){switch(arguments.length){case 0:return new a;case 1:return new a(b);case 2:return new a(b,c)}return new a(b,c,d)}return a.apply(this,arguments)};return b[h]=a[h],b}(l):p&&"function"==typeof l?f(Function.call,l):l,p&&((s.virtual||(s.virtual={}))[j]=l,a&i.R&&t&&!t[j]&&g(t,j,l)))};i.F=1,i.G=2,i.S=4,i.P=8,i.B=16,i.W=32,i.U=64,i.R=128,b.exports=i},{"./_core":40,"./_ctx":41,"./_global":46,"./_hide":47}],45:[function(a,b,c){b.exports=function(a){try{return!!a()}catch(b){return!0}}},{}],46:[function(a,b,c){var d=b.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=d)},{}],47:[function(a,b,c){var d=a("./_object-dp"),e=a("./_property-desc");b.exports=a("./_descriptors")?function(a,b,c){return d.f(a,b,e(1,c))}:function(a,b,c){return a[b]=c,a}},{"./_descriptors":42,"./_object-dp":52,"./_property-desc":53}],48:[function(a,b,c){b.exports=a("./_global").document&&document.documentElement},{"./_global":46}],49:[function(a,b,c){b.exports=!a("./_descriptors")&&!a("./_fails")(function(){return 7!=Object.defineProperty(a("./_dom-create")("div"),"a",{get:function(){return 7}}).a})},{"./_descriptors":42,"./_dom-create":43,"./_fails":45}],50:[function(a,b,c){b.exports=function(a,b,c){var d=void 0===c;switch(b.length){case 0:return d?a():a.call(c);case 1:return d?a(b[0]):a.call(c,b[0]);case 2:return d?a(b[0],b[1]):a.call(c,b[0],b[1]);case 3:return d?a(b[0],b[1],b[2]):a.call(c,b[0],b[1],b[2]);case 4:return d?a(b[0],b[1],b[2],b[3]):a.call(c,b[0],b[1],b[2],b[3])}return a.apply(c,b)}},{}],51:[function(a,b,c){b.exports=function(a){return"object"==typeof a?null!==a:"function"==typeof a}},{}],52:[function(a,b,c){var d=a("./_an-object"),e=a("./_ie8-dom-define"),f=a("./_to-primitive"),g=Object.defineProperty;c.f=a("./_descriptors")?Object.defineProperty:function(a,b,c){if(d(a),b=f(b,!0),d(c),e)try{return g(a,b,c)}catch(h){}if("get"in c||"set"in c)throw TypeError("Accessors not supported!");return"value"in c&&(a[b]=c.value),a}},{"./_an-object":38,"./_descriptors":42,"./_ie8-dom-define":49,"./_to-primitive":55}],53:[function(a,b,c){b.exports=function(a,b){return{enumerable:!(1&a),configurable:!(2&a),writable:!(4&a),value:b}}},{}],54:[function(a,b,c){var d,e,f,g=a("./_ctx"),h=a("./_invoke"),i=a("./_html"),j=a("./_dom-create"),k=a("./_global"),l=k.process,m=k.setImmediate,n=k.clearImmediate,o=k.MessageChannel,p=0,q={},r="onreadystatechange",s=function(){var a=+this;if(q.hasOwnProperty(a)){var b=q[a];delete q[a],b()}},t=function(a){s.call(a.data)};m&&n||(m=function(a){for(var b=[],c=1;arguments.length>c;)b.push(arguments[c++]);return q[++p]=function(){h("function"==typeof a?a:Function(a),b)},d(p),p},n=function(a){delete q[a]},"process"==a("./_cof")(l)?d=function(a){l.nextTick(g(s,a,1))}:o?(e=new o,f=e.port2,e.port1.onmessage=t,d=g(f.postMessage,f,1)):k.addEventListener&&"function"==typeof postMessage&&!k.importScripts?(d=function(a){k.postMessage(a+"","*")},k.addEventListener("message",t,!1)):d=r in j("script")?function(a){i.appendChild(j("script"))[r]=function(){i.removeChild(this),s.call(a)}}:function(a){setTimeout(g(s,a,1),0)}),b.exports={set:m,clear:n}},{"./_cof":39,"./_ctx":41,"./_dom-create":43,"./_global":46,"./_html":48,"./_invoke":50}],55:[function(a,b,c){var d=a("./_is-object");b.exports=function(a,b){if(!d(a))return a;var c,e;if(b&&"function"==typeof(c=a.toString)&&!d(e=c.call(a)))return e;if("function"==typeof(c=a.valueOf)&&!d(e=c.call(a)))return e;if(!b&&"function"==typeof(c=a.toString)&&!d(e=c.call(a)))return e;throw TypeError("Can't convert object to primitive value")}},{"./_is-object":51}],56:[function(a,b,c){var d=a("./_export"),e=a("./_task");d(d.G+d.B,{setImmediate:e.set,clearImmediate:e.clear})},{"./_export":44,"./_task":54}],57:[function(a,b,c){(function(a){"use strict";function c(){k=!0;for(var a,b,c=l.length;c;){for(b=l,l=[],a=-1;++a<c;)b[a]();c=l.length}k=!1}function d(a){1!==l.push(a)||k||e()}var e,f=a.MutationObserver||a.WebKitMutationObserver;if(f){var g=0,h=new f(c),i=a.document.createTextNode("");h.observe(i,{characterData:!0}),e=function(){i.data=g=++g%2}}else if(a.setImmediate||"undefined"==typeof a.MessageChannel)e="document"in a&&"onreadystatechange"in a.document.createElement("script")?function(){var b=a.document.createElement("script");b.onreadystatechange=function(){c(),b.onreadystatechange=null,b.parentNode.removeChild(b),b=null},a.document.documentElement.appendChild(b)}:function(){setTimeout(c,0)};else{var j=new a.MessageChannel;j.port1.onmessage=c,e=function(){j.port2.postMessage(0)}}var k,l=[];b.exports=d}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],58:[function(a,b,c){"use strict";function d(){}function e(a){if("function"!=typeof a)throw new TypeError("resolver must be a function");this.state=s,this.queue=[],this.outcome=void 0,a!==d&&i(this,a)}function f(a,b,c){this.promise=a,"function"==typeof b&&(this.onFulfilled=b,this.callFulfilled=this.otherCallFulfilled),"function"==typeof c&&(this.onRejected=c,this.callRejected=this.otherCallRejected)}function g(a,b,c){o(function(){var d;try{d=b(c)}catch(e){return p.reject(a,e)}d===a?p.reject(a,new TypeError("Cannot resolve promise with itself")):p.resolve(a,d)})}function h(a){var b=a&&a.then;if(a&&("object"==typeof a||"function"==typeof a)&&"function"==typeof b)return function(){b.apply(a,arguments)}}function i(a,b){function c(b){f||(f=!0,p.reject(a,b))}function d(b){f||(f=!0,p.resolve(a,b))}function e(){b(d,c)}var f=!1,g=j(e);"error"===g.status&&c(g.value)}function j(a,b){var c={};try{c.value=a(b),c.status="success"}catch(d){c.status="error",c.value=d}return c}function k(a){return a instanceof this?a:p.resolve(new this(d),a)}function l(a){var b=new this(d);return p.reject(b,a)}function m(a){function b(a,b){function d(a){g[b]=a,++h!==e||f||(f=!0,p.resolve(j,g))}c.resolve(a).then(d,function(a){f||(f=!0,p.reject(j,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=new Array(e),h=0,i=-1,j=new this(d);++i<e;)b(a[i],i);return j}function n(a){function b(a){c.resolve(a).then(function(a){f||(f=!0,p.resolve(h,a))},function(a){f||(f=!0,p.reject(h,a))})}var c=this;if("[object Array]"!==Object.prototype.toString.call(a))return this.reject(new TypeError("must be an array"));var e=a.length,f=!1;if(!e)return this.resolve([]);for(var g=-1,h=new this(d);++g<e;)b(a[g]);return h}var o=a("immediate"),p={},q=["REJECTED"],r=["FULFILLED"],s=["PENDING"];b.exports=e,e.prototype["catch"]=function(a){return this.then(null,a)},e.prototype.then=function(a,b){if("function"!=typeof a&&this.state===r||"function"!=typeof b&&this.state===q)return this;var c=new this.constructor(d);if(this.state!==s){var e=this.state===r?a:b;g(c,e,this.outcome)}else this.queue.push(new f(c,a,b));return c},f.prototype.callFulfilled=function(a){p.resolve(this.promise,a)},f.prototype.otherCallFulfilled=function(a){g(this.promise,this.onFulfilled,a)},f.prototype.callRejected=function(a){p.reject(this.promise,a)},f.prototype.otherCallRejected=function(a){g(this.promise,this.onRejected,a)},p.resolve=function(a,b){var c=j(h,b);if("error"===c.status)return p.reject(a,c.value);var d=c.value;if(d)i(a,d);else{a.state=r,a.outcome=b;for(var e=-1,f=a.queue.length;++e<f;)a.queue[e].callFulfilled(b)}return a},p.reject=function(a,b){a.state=q,a.outcome=b;for(var c=-1,d=a.queue.length;++c<d;)a.queue[c].callRejected(b);return a},e.resolve=k,e.reject=l,e.all=m,e.race=n},{immediate:57}],59:[function(a,b,c){"use strict";var d=a("./lib/utils/common").assign,e=a("./lib/deflate"),f=a("./lib/inflate"),g=a("./lib/zlib/constants"),h={};d(h,e,f,g),b.exports=h},{"./lib/deflate":60,"./lib/inflate":61,"./lib/utils/common":62,"./lib/zlib/constants":65}],60:[function(a,b,c){"use strict";function d(a){if(!(this instanceof d))return new d(a);this.options=i.assign({level:s,method:u,chunkSize:16384,windowBits:15,memLevel:8,strategy:t,to:""},a||{});var b=this.options;b.raw&&b.windowBits>0?b.windowBits=-b.windowBits:b.gzip&&b.windowBits>0&&b.windowBits<16&&(b.windowBits+=16),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new l,this.strm.avail_out=0;var c=h.deflateInit2(this.strm,b.level,b.method,b.windowBits,b.memLevel,b.strategy);if(c!==p)throw new Error(k[c]);if(b.header&&h.deflateSetHeader(this.strm,b.header),b.dictionary){var e;if(e="string"==typeof b.dictionary?j.string2buf(b.dictionary):"[object ArrayBuffer]"===m.call(b.dictionary)?new Uint8Array(b.dictionary):b.dictionary,c=h.deflateSetDictionary(this.strm,e),c!==p)throw new Error(k[c]);this._dict_set=!0}}function e(a,b){var c=new d(b);if(c.push(a,!0),c.err)throw c.msg||k[c.err];return c.result}function f(a,b){return b=b||{},b.raw=!0,e(a,b)}function g(a,b){return b=b||{},b.gzip=!0,e(a,b)}var h=a("./zlib/deflate"),i=a("./utils/common"),j=a("./utils/strings"),k=a("./zlib/messages"),l=a("./zlib/zstream"),m=Object.prototype.toString,n=0,o=4,p=0,q=1,r=2,s=-1,t=0,u=8;d.prototype.push=function(a,b){var c,d,e=this.strm,f=this.options.chunkSize;if(this.ended)return!1;d=b===~~b?b:b===!0?o:n,"string"==typeof a?e.input=j.string2buf(a):"[object ArrayBuffer]"===m.call(a)?e.input=new Uint8Array(a):e.input=a,e.next_in=0,e.avail_in=e.input.length;do{if(0===e.avail_out&&(e.output=new i.Buf8(f),e.next_out=0,e.avail_out=f),c=h.deflate(e,d),c!==q&&c!==p)return this.onEnd(c),this.ended=!0,!1;0!==e.avail_out&&(0!==e.avail_in||d!==o&&d!==r)||("string"===this.options.to?this.onData(j.buf2binstring(i.shrinkBuf(e.output,e.next_out))):this.onData(i.shrinkBuf(e.output,e.next_out)))}while((e.avail_in>0||0===e.avail_out)&&c!==q);return d===o?(c=h.deflateEnd(this.strm),this.onEnd(c),this.ended=!0,c===p):d!==r||(this.onEnd(p),e.avail_out=0,!0)},d.prototype.onData=function(a){this.chunks.push(a)},d.prototype.onEnd=function(a){a===p&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=i.flattenChunks(this.chunks)),this.chunks=[],this.err=a,this.msg=this.strm.msg},c.Deflate=d,c.deflate=e,c.deflateRaw=f,c.gzip=g},{"./utils/common":62,"./utils/strings":63,"./zlib/deflate":67,"./zlib/messages":72,"./zlib/zstream":74}],61:[function(a,b,c){"use strict";function d(a){if(!(this instanceof d))return new d(a);this.options=h.assign({chunkSize:16384,windowBits:0,to:""},a||{});var b=this.options;b.raw&&b.windowBits>=0&&b.windowBits<16&&(b.windowBits=-b.windowBits,0===b.windowBits&&(b.windowBits=-15)),!(b.windowBits>=0&&b.windowBits<16)||a&&a.windowBits||(b.windowBits+=32),b.windowBits>15&&b.windowBits<48&&0===(15&b.windowBits)&&(b.windowBits|=15),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new l,this.strm.avail_out=0;var c=g.inflateInit2(this.strm,b.windowBits);if(c!==j.Z_OK)throw new Error(k[c]);this.header=new m,g.inflateGetHeader(this.strm,this.header)}function e(a,b){var c=new d(b);if(c.push(a,!0),c.err)throw c.msg||k[c.err];return c.result}function f(a,b){return b=b||{},b.raw=!0,e(a,b)}var g=a("./zlib/inflate"),h=a("./utils/common"),i=a("./utils/strings"),j=a("./zlib/constants"),k=a("./zlib/messages"),l=a("./zlib/zstream"),m=a("./zlib/gzheader"),n=Object.prototype.toString;d.prototype.push=function(a,b){var c,d,e,f,k,l,m=this.strm,o=this.options.chunkSize,p=this.options.dictionary,q=!1;if(this.ended)return!1;d=b===~~b?b:b===!0?j.Z_FINISH:j.Z_NO_FLUSH,"string"==typeof a?m.input=i.binstring2buf(a):"[object ArrayBuffer]"===n.call(a)?m.input=new Uint8Array(a):m.input=a,m.next_in=0,m.avail_in=m.input.length;do{if(0===m.avail_out&&(m.output=new h.Buf8(o),m.next_out=0,m.avail_out=o),c=g.inflate(m,j.Z_NO_FLUSH),c===j.Z_NEED_DICT&&p&&(l="string"==typeof p?i.string2buf(p):"[object ArrayBuffer]"===n.call(p)?new Uint8Array(p):p,c=g.inflateSetDictionary(this.strm,l)),c===j.Z_BUF_ERROR&&q===!0&&(c=j.Z_OK,q=!1),c!==j.Z_STREAM_END&&c!==j.Z_OK)return this.onEnd(c),this.ended=!0,!1;m.next_out&&(0!==m.avail_out&&c!==j.Z_STREAM_END&&(0!==m.avail_in||d!==j.Z_FINISH&&d!==j.Z_SYNC_FLUSH)||("string"===this.options.to?(e=i.utf8border(m.output,m.next_out),f=m.next_out-e,k=i.buf2string(m.output,e),m.next_out=f,m.avail_out=o-f,f&&h.arraySet(m.output,m.output,e,f,0),this.onData(k)):this.onData(h.shrinkBuf(m.output,m.next_out)))),0===m.avail_in&&0===m.avail_out&&(q=!0)}while((m.avail_in>0||0===m.avail_out)&&c!==j.Z_STREAM_END);return c===j.Z_STREAM_END&&(d=j.Z_FINISH),d===j.Z_FINISH?(c=g.inflateEnd(this.strm),this.onEnd(c),this.ended=!0,c===j.Z_OK):d!==j.Z_SYNC_FLUSH||(this.onEnd(j.Z_OK),m.avail_out=0,!0)},d.prototype.onData=function(a){this.chunks.push(a)},d.prototype.onEnd=function(a){a===j.Z_OK&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=h.flattenChunks(this.chunks)),this.chunks=[],this.err=a,this.msg=this.strm.msg},c.Inflate=d,c.inflate=e,c.inflateRaw=f,c.ungzip=e},{"./utils/common":62,"./utils/strings":63,"./zlib/constants":65,"./zlib/gzheader":68,"./zlib/inflate":70,"./zlib/messages":72,"./zlib/zstream":74}],62:[function(a,b,c){"use strict";var d="undefined"!=typeof Uint8Array&&"undefined"!=typeof Uint16Array&&"undefined"!=typeof Int32Array;c.assign=function(a){for(var b=Array.prototype.slice.call(arguments,1);b.length;){var c=b.shift();if(c){if("object"!=typeof c)throw new TypeError(c+"must be non-object");for(var d in c)c.hasOwnProperty(d)&&(a[d]=c[d])}}return a},c.shrinkBuf=function(a,b){return a.length===b?a:a.subarray?a.subarray(0,b):(a.length=b,a)};var e={arraySet:function(a,b,c,d,e){if(b.subarray&&a.subarray)return void a.set(b.subarray(c,c+d),e);for(var f=0;f<d;f++)a[e+f]=b[c+f]},flattenChunks:function(a){var b,c,d,e,f,g;for(d=0,b=0,c=a.length;b<c;b++)d+=a[b].length;for(g=new Uint8Array(d),e=0,b=0,c=a.length;b<c;b++)f=a[b],g.set(f,e),e+=f.length;return g}},f={arraySet:function(a,b,c,d,e){for(var f=0;f<d;f++)a[e+f]=b[c+f]},flattenChunks:function(a){return[].concat.apply([],a)}};c.setTyped=function(a){a?(c.Buf8=Uint8Array,c.Buf16=Uint16Array,c.Buf32=Int32Array,c.assign(c,e)):(c.Buf8=Array,c.Buf16=Array,c.Buf32=Array,c.assign(c,f))},c.setTyped(d)},{}],63:[function(a,b,c){"use strict";function d(a,b){if(b<65537&&(a.subarray&&g||!a.subarray&&f))return String.fromCharCode.apply(null,e.shrinkBuf(a,b));for(var c="",d=0;d<b;d++)c+=String.fromCharCode(a[d]);return c}var e=a("./common"),f=!0,g=!0;try{String.fromCharCode.apply(null,[0])}catch(h){f=!1}try{String.fromCharCode.apply(null,new Uint8Array(1))}catch(h){g=!1}for(var i=new e.Buf8(256),j=0;j<256;j++)i[j]=j>=252?6:j>=248?5:j>=240?4:j>=224?3:j>=192?2:1;i[254]=i[254]=1,c.string2buf=function(a){var b,c,d,f,g,h=a.length,i=0;for(f=0;f<h;f++)c=a.charCodeAt(f),55296===(64512&c)&&f+1<h&&(d=a.charCodeAt(f+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),f++)),i+=c<128?1:c<2048?2:c<65536?3:4;for(b=new e.Buf8(i),g=0,f=0;g<i;f++)c=a.charCodeAt(f),55296===(64512&c)&&f+1<h&&(d=a.charCodeAt(f+1),56320===(64512&d)&&(c=65536+(c-55296<<10)+(d-56320),f++)),c<128?b[g++]=c:c<2048?(b[g++]=192|c>>>6,b[g++]=128|63&c):c<65536?(b[g++]=224|c>>>12,b[g++]=128|c>>>6&63,b[g++]=128|63&c):(b[g++]=240|c>>>18,b[g++]=128|c>>>12&63,b[g++]=128|c>>>6&63,b[g++]=128|63&c);return b},c.buf2binstring=function(a){return d(a,a.length)},c.binstring2buf=function(a){for(var b=new e.Buf8(a.length),c=0,d=b.length;c<d;c++)b[c]=a.charCodeAt(c);return b},c.buf2string=function(a,b){var c,e,f,g,h=b||a.length,j=new Array(2*h);for(e=0,c=0;c<h;)if(f=a[c++],f<128)j[e++]=f;else if(g=i[f],g>4)j[e++]=65533,c+=g-1;else{for(f&=2===g?31:3===g?15:7;g>1&&c<h;)f=f<<6|63&a[c++],g--;g>1?j[e++]=65533:f<65536?j[e++]=f:(f-=65536,j[e++]=55296|f>>10&1023,j[e++]=56320|1023&f)}return d(j,e)},c.utf8border=function(a,b){var c;for(b=b||a.length,b>a.length&&(b=a.length),c=b-1;c>=0&&128===(192&a[c]);)c--;return c<0?b:0===c?b:c+i[a[c]]>b?c:b}},{"./common":62}],64:[function(a,b,c){"use strict";function d(a,b,c,d){for(var e=65535&a|0,f=a>>>16&65535|0,g=0;0!==c;){g=c>2e3?2e3:c,c-=g;do e=e+b[d++]|0,f=f+e|0;while(--g);e%=65521,f%=65521}return e|f<<16|0;
+}b.exports=d},{}],65:[function(a,b,c){"use strict";b.exports={Z_NO_FLUSH:0,Z_PARTIAL_FLUSH:1,Z_SYNC_FLUSH:2,Z_FULL_FLUSH:3,Z_FINISH:4,Z_BLOCK:5,Z_TREES:6,Z_OK:0,Z_STREAM_END:1,Z_NEED_DICT:2,Z_ERRNO:-1,Z_STREAM_ERROR:-2,Z_DATA_ERROR:-3,Z_BUF_ERROR:-5,Z_NO_COMPRESSION:0,Z_BEST_SPEED:1,Z_BEST_COMPRESSION:9,Z_DEFAULT_COMPRESSION:-1,Z_FILTERED:1,Z_HUFFMAN_ONLY:2,Z_RLE:3,Z_FIXED:4,Z_DEFAULT_STRATEGY:0,Z_BINARY:0,Z_TEXT:1,Z_UNKNOWN:2,Z_DEFLATED:8}},{}],66:[function(a,b,c){"use strict";function d(){for(var a,b=[],c=0;c<256;c++){a=c;for(var d=0;d<8;d++)a=1&a?3988292384^a>>>1:a>>>1;b[c]=a}return b}function e(a,b,c,d){var e=f,g=d+c;a^=-1;for(var h=d;h<g;h++)a=a>>>8^e[255&(a^b[h])];return a^-1}var f=d();b.exports=e},{}],67:[function(a,b,c){"use strict";function d(a,b){return a.msg=I[b],b}function e(a){return(a<<1)-(a>4?9:0)}function f(a){for(var b=a.length;--b>=0;)a[b]=0}function g(a){var b=a.state,c=b.pending;c>a.avail_out&&(c=a.avail_out),0!==c&&(E.arraySet(a.output,b.pending_buf,b.pending_out,c,a.next_out),a.next_out+=c,b.pending_out+=c,a.total_out+=c,a.avail_out-=c,b.pending-=c,0===b.pending&&(b.pending_out=0))}function h(a,b){F._tr_flush_block(a,a.block_start>=0?a.block_start:-1,a.strstart-a.block_start,b),a.block_start=a.strstart,g(a.strm)}function i(a,b){a.pending_buf[a.pending++]=b}function j(a,b){a.pending_buf[a.pending++]=b>>>8&255,a.pending_buf[a.pending++]=255&b}function k(a,b,c,d){var e=a.avail_in;return e>d&&(e=d),0===e?0:(a.avail_in-=e,E.arraySet(b,a.input,a.next_in,e,c),1===a.state.wrap?a.adler=G(a.adler,b,e,c):2===a.state.wrap&&(a.adler=H(a.adler,b,e,c)),a.next_in+=e,a.total_in+=e,e)}function l(a,b){var c,d,e=a.max_chain_length,f=a.strstart,g=a.prev_length,h=a.nice_match,i=a.strstart>a.w_size-la?a.strstart-(a.w_size-la):0,j=a.window,k=a.w_mask,l=a.prev,m=a.strstart+ka,n=j[f+g-1],o=j[f+g];a.prev_length>=a.good_match&&(e>>=2),h>a.lookahead&&(h=a.lookahead);do if(c=b,j[c+g]===o&&j[c+g-1]===n&&j[c]===j[f]&&j[++c]===j[f+1]){f+=2,c++;do;while(j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&j[++f]===j[++c]&&f<m);if(d=ka-(m-f),f=m-ka,d>g){if(a.match_start=b,g=d,d>=h)break;n=j[f+g-1],o=j[f+g]}}while((b=l[b&k])>i&&0!==--e);return g<=a.lookahead?g:a.lookahead}function m(a){var b,c,d,e,f,g=a.w_size;do{if(e=a.window_size-a.lookahead-a.strstart,a.strstart>=g+(g-la)){E.arraySet(a.window,a.window,g,g,0),a.match_start-=g,a.strstart-=g,a.block_start-=g,c=a.hash_size,b=c;do d=a.head[--b],a.head[b]=d>=g?d-g:0;while(--c);c=g,b=c;do d=a.prev[--b],a.prev[b]=d>=g?d-g:0;while(--c);e+=g}if(0===a.strm.avail_in)break;if(c=k(a.strm,a.window,a.strstart+a.lookahead,e),a.lookahead+=c,a.lookahead+a.insert>=ja)for(f=a.strstart-a.insert,a.ins_h=a.window[f],a.ins_h=(a.ins_h<<a.hash_shift^a.window[f+1])&a.hash_mask;a.insert&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[f+ja-1])&a.hash_mask,a.prev[f&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=f,f++,a.insert--,!(a.lookahead+a.insert<ja)););}while(a.lookahead<la&&0!==a.strm.avail_in)}function n(a,b){var c=65535;for(c>a.pending_buf_size-5&&(c=a.pending_buf_size-5);;){if(a.lookahead<=1){if(m(a),0===a.lookahead&&b===J)return ua;if(0===a.lookahead)break}a.strstart+=a.lookahead,a.lookahead=0;var d=a.block_start+c;if((0===a.strstart||a.strstart>=d)&&(a.lookahead=a.strstart-d,a.strstart=d,h(a,!1),0===a.strm.avail_out))return ua;if(a.strstart-a.block_start>=a.w_size-la&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.strstart>a.block_start&&(h(a,!1),0===a.strm.avail_out)?ua:ua}function o(a,b){for(var c,d;;){if(a.lookahead<la){if(m(a),a.lookahead<la&&b===J)return ua;if(0===a.lookahead)break}if(c=0,a.lookahead>=ja&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart),0!==c&&a.strstart-c<=a.w_size-la&&(a.match_length=l(a,c)),a.match_length>=ja)if(d=F._tr_tally(a,a.strstart-a.match_start,a.match_length-ja),a.lookahead-=a.match_length,a.match_length<=a.max_lazy_match&&a.lookahead>=ja){a.match_length--;do a.strstart++,a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart;while(0!==--a.match_length);a.strstart++}else a.strstart+=a.match_length,a.match_length=0,a.ins_h=a.window[a.strstart],a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+1])&a.hash_mask;else d=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++;if(d&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=a.strstart<ja-1?a.strstart:ja-1,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function p(a,b){for(var c,d,e;;){if(a.lookahead<la){if(m(a),a.lookahead<la&&b===J)return ua;if(0===a.lookahead)break}if(c=0,a.lookahead>=ja&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart),a.prev_length=a.match_length,a.prev_match=a.match_start,a.match_length=ja-1,0!==c&&a.prev_length<a.max_lazy_match&&a.strstart-c<=a.w_size-la&&(a.match_length=l(a,c),a.match_length<=5&&(a.strategy===U||a.match_length===ja&&a.strstart-a.match_start>4096)&&(a.match_length=ja-1)),a.prev_length>=ja&&a.match_length<=a.prev_length){e=a.strstart+a.lookahead-ja,d=F._tr_tally(a,a.strstart-1-a.prev_match,a.prev_length-ja),a.lookahead-=a.prev_length-1,a.prev_length-=2;do++a.strstart<=e&&(a.ins_h=(a.ins_h<<a.hash_shift^a.window[a.strstart+ja-1])&a.hash_mask,c=a.prev[a.strstart&a.w_mask]=a.head[a.ins_h],a.head[a.ins_h]=a.strstart);while(0!==--a.prev_length);if(a.match_available=0,a.match_length=ja-1,a.strstart++,d&&(h(a,!1),0===a.strm.avail_out))return ua}else if(a.match_available){if(d=F._tr_tally(a,0,a.window[a.strstart-1]),d&&h(a,!1),a.strstart++,a.lookahead--,0===a.strm.avail_out)return ua}else a.match_available=1,a.strstart++,a.lookahead--}return a.match_available&&(d=F._tr_tally(a,0,a.window[a.strstart-1]),a.match_available=0),a.insert=a.strstart<ja-1?a.strstart:ja-1,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function q(a,b){for(var c,d,e,f,g=a.window;;){if(a.lookahead<=ka){if(m(a),a.lookahead<=ka&&b===J)return ua;if(0===a.lookahead)break}if(a.match_length=0,a.lookahead>=ja&&a.strstart>0&&(e=a.strstart-1,d=g[e],d===g[++e]&&d===g[++e]&&d===g[++e])){f=a.strstart+ka;do;while(d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&d===g[++e]&&e<f);a.match_length=ka-(f-e),a.match_length>a.lookahead&&(a.match_length=a.lookahead)}if(a.match_length>=ja?(c=F._tr_tally(a,1,a.match_length-ja),a.lookahead-=a.match_length,a.strstart+=a.match_length,a.match_length=0):(c=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++),c&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function r(a,b){for(var c;;){if(0===a.lookahead&&(m(a),0===a.lookahead)){if(b===J)return ua;break}if(a.match_length=0,c=F._tr_tally(a,0,a.window[a.strstart]),a.lookahead--,a.strstart++,c&&(h(a,!1),0===a.strm.avail_out))return ua}return a.insert=0,b===M?(h(a,!0),0===a.strm.avail_out?wa:xa):a.last_lit&&(h(a,!1),0===a.strm.avail_out)?ua:va}function s(a,b,c,d,e){this.good_length=a,this.max_lazy=b,this.nice_length=c,this.max_chain=d,this.func=e}function t(a){a.window_size=2*a.w_size,f(a.head),a.max_lazy_match=D[a.level].max_lazy,a.good_match=D[a.level].good_length,a.nice_match=D[a.level].nice_length,a.max_chain_length=D[a.level].max_chain,a.strstart=0,a.block_start=0,a.lookahead=0,a.insert=0,a.match_length=a.prev_length=ja-1,a.match_available=0,a.ins_h=0}function u(){this.strm=null,this.status=0,this.pending_buf=null,this.pending_buf_size=0,this.pending_out=0,this.pending=0,this.wrap=0,this.gzhead=null,this.gzindex=0,this.method=$,this.last_flush=-1,this.w_size=0,this.w_bits=0,this.w_mask=0,this.window=null,this.window_size=0,this.prev=null,this.head=null,this.ins_h=0,this.hash_size=0,this.hash_bits=0,this.hash_mask=0,this.hash_shift=0,this.block_start=0,this.match_length=0,this.prev_match=0,this.match_available=0,this.strstart=0,this.match_start=0,this.lookahead=0,this.prev_length=0,this.max_chain_length=0,this.max_lazy_match=0,this.level=0,this.strategy=0,this.good_match=0,this.nice_match=0,this.dyn_ltree=new E.Buf16(2*ha),this.dyn_dtree=new E.Buf16(2*(2*fa+1)),this.bl_tree=new E.Buf16(2*(2*ga+1)),f(this.dyn_ltree),f(this.dyn_dtree),f(this.bl_tree),this.l_desc=null,this.d_desc=null,this.bl_desc=null,this.bl_count=new E.Buf16(ia+1),this.heap=new E.Buf16(2*ea+1),f(this.heap),this.heap_len=0,this.heap_max=0,this.depth=new E.Buf16(2*ea+1),f(this.depth),this.l_buf=0,this.lit_bufsize=0,this.last_lit=0,this.d_buf=0,this.opt_len=0,this.static_len=0,this.matches=0,this.insert=0,this.bi_buf=0,this.bi_valid=0}function v(a){var b;return a&&a.state?(a.total_in=a.total_out=0,a.data_type=Z,b=a.state,b.pending=0,b.pending_out=0,b.wrap<0&&(b.wrap=-b.wrap),b.status=b.wrap?na:sa,a.adler=2===b.wrap?0:1,b.last_flush=J,F._tr_init(b),O):d(a,Q)}function w(a){var b=v(a);return b===O&&t(a.state),b}function x(a,b){return a&&a.state?2!==a.state.wrap?Q:(a.state.gzhead=b,O):Q}function y(a,b,c,e,f,g){if(!a)return Q;var h=1;if(b===T&&(b=6),e<0?(h=0,e=-e):e>15&&(h=2,e-=16),f<1||f>_||c!==$||e<8||e>15||b<0||b>9||g<0||g>X)return d(a,Q);8===e&&(e=9);var i=new u;return a.state=i,i.strm=a,i.wrap=h,i.gzhead=null,i.w_bits=e,i.w_size=1<<i.w_bits,i.w_mask=i.w_size-1,i.hash_bits=f+7,i.hash_size=1<<i.hash_bits,i.hash_mask=i.hash_size-1,i.hash_shift=~~((i.hash_bits+ja-1)/ja),i.window=new E.Buf8(2*i.w_size),i.head=new E.Buf16(i.hash_size),i.prev=new E.Buf16(i.w_size),i.lit_bufsize=1<<f+6,i.pending_buf_size=4*i.lit_bufsize,i.pending_buf=new E.Buf8(i.pending_buf_size),i.d_buf=1*i.lit_bufsize,i.l_buf=3*i.lit_bufsize,i.level=b,i.strategy=g,i.method=c,w(a)}function z(a,b){return y(a,b,$,aa,ba,Y)}function A(a,b){var c,h,k,l;if(!a||!a.state||b>N||b<0)return a?d(a,Q):Q;if(h=a.state,!a.output||!a.input&&0!==a.avail_in||h.status===ta&&b!==M)return d(a,0===a.avail_out?S:Q);if(h.strm=a,c=h.last_flush,h.last_flush=b,h.status===na)if(2===h.wrap)a.adler=0,i(h,31),i(h,139),i(h,8),h.gzhead?(i(h,(h.gzhead.text?1:0)+(h.gzhead.hcrc?2:0)+(h.gzhead.extra?4:0)+(h.gzhead.name?8:0)+(h.gzhead.comment?16:0)),i(h,255&h.gzhead.time),i(h,h.gzhead.time>>8&255),i(h,h.gzhead.time>>16&255),i(h,h.gzhead.time>>24&255),i(h,9===h.level?2:h.strategy>=V||h.level<2?4:0),i(h,255&h.gzhead.os),h.gzhead.extra&&h.gzhead.extra.length&&(i(h,255&h.gzhead.extra.length),i(h,h.gzhead.extra.length>>8&255)),h.gzhead.hcrc&&(a.adler=H(a.adler,h.pending_buf,h.pending,0)),h.gzindex=0,h.status=oa):(i(h,0),i(h,0),i(h,0),i(h,0),i(h,0),i(h,9===h.level?2:h.strategy>=V||h.level<2?4:0),i(h,ya),h.status=sa);else{var m=$+(h.w_bits-8<<4)<<8,n=-1;n=h.strategy>=V||h.level<2?0:h.level<6?1:6===h.level?2:3,m|=n<<6,0!==h.strstart&&(m|=ma),m+=31-m%31,h.status=sa,j(h,m),0!==h.strstart&&(j(h,a.adler>>>16),j(h,65535&a.adler)),a.adler=1}if(h.status===oa)if(h.gzhead.extra){for(k=h.pending;h.gzindex<(65535&h.gzhead.extra.length)&&(h.pending!==h.pending_buf_size||(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending!==h.pending_buf_size));)i(h,255&h.gzhead.extra[h.gzindex]),h.gzindex++;h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),h.gzindex===h.gzhead.extra.length&&(h.gzindex=0,h.status=pa)}else h.status=pa;if(h.status===pa)if(h.gzhead.name){k=h.pending;do{if(h.pending===h.pending_buf_size&&(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending===h.pending_buf_size)){l=1;break}l=h.gzindex<h.gzhead.name.length?255&h.gzhead.name.charCodeAt(h.gzindex++):0,i(h,l)}while(0!==l);h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),0===l&&(h.gzindex=0,h.status=qa)}else h.status=qa;if(h.status===qa)if(h.gzhead.comment){k=h.pending;do{if(h.pending===h.pending_buf_size&&(h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),g(a),k=h.pending,h.pending===h.pending_buf_size)){l=1;break}l=h.gzindex<h.gzhead.comment.length?255&h.gzhead.comment.charCodeAt(h.gzindex++):0,i(h,l)}while(0!==l);h.gzhead.hcrc&&h.pending>k&&(a.adler=H(a.adler,h.pending_buf,h.pending-k,k)),0===l&&(h.status=ra)}else h.status=ra;if(h.status===ra&&(h.gzhead.hcrc?(h.pending+2>h.pending_buf_size&&g(a),h.pending+2<=h.pending_buf_size&&(i(h,255&a.adler),i(h,a.adler>>8&255),a.adler=0,h.status=sa)):h.status=sa),0!==h.pending){if(g(a),0===a.avail_out)return h.last_flush=-1,O}else if(0===a.avail_in&&e(b)<=e(c)&&b!==M)return d(a,S);if(h.status===ta&&0!==a.avail_in)return d(a,S);if(0!==a.avail_in||0!==h.lookahead||b!==J&&h.status!==ta){var o=h.strategy===V?r(h,b):h.strategy===W?q(h,b):D[h.level].func(h,b);if(o!==wa&&o!==xa||(h.status=ta),o===ua||o===wa)return 0===a.avail_out&&(h.last_flush=-1),O;if(o===va&&(b===K?F._tr_align(h):b!==N&&(F._tr_stored_block(h,0,0,!1),b===L&&(f(h.head),0===h.lookahead&&(h.strstart=0,h.block_start=0,h.insert=0))),g(a),0===a.avail_out))return h.last_flush=-1,O}return b!==M?O:h.wrap<=0?P:(2===h.wrap?(i(h,255&a.adler),i(h,a.adler>>8&255),i(h,a.adler>>16&255),i(h,a.adler>>24&255),i(h,255&a.total_in),i(h,a.total_in>>8&255),i(h,a.total_in>>16&255),i(h,a.total_in>>24&255)):(j(h,a.adler>>>16),j(h,65535&a.adler)),g(a),h.wrap>0&&(h.wrap=-h.wrap),0!==h.pending?O:P)}function B(a){var b;return a&&a.state?(b=a.state.status,b!==na&&b!==oa&&b!==pa&&b!==qa&&b!==ra&&b!==sa&&b!==ta?d(a,Q):(a.state=null,b===sa?d(a,R):O)):Q}function C(a,b){var c,d,e,g,h,i,j,k,l=b.length;if(!a||!a.state)return Q;if(c=a.state,g=c.wrap,2===g||1===g&&c.status!==na||c.lookahead)return Q;for(1===g&&(a.adler=G(a.adler,b,l,0)),c.wrap=0,l>=c.w_size&&(0===g&&(f(c.head),c.strstart=0,c.block_start=0,c.insert=0),k=new E.Buf8(c.w_size),E.arraySet(k,b,l-c.w_size,c.w_size,0),b=k,l=c.w_size),h=a.avail_in,i=a.next_in,j=a.input,a.avail_in=l,a.next_in=0,a.input=b,m(c);c.lookahead>=ja;){d=c.strstart,e=c.lookahead-(ja-1);do c.ins_h=(c.ins_h<<c.hash_shift^c.window[d+ja-1])&c.hash_mask,c.prev[d&c.w_mask]=c.head[c.ins_h],c.head[c.ins_h]=d,d++;while(--e);c.strstart=d,c.lookahead=ja-1,m(c)}return c.strstart+=c.lookahead,c.block_start=c.strstart,c.insert=c.lookahead,c.lookahead=0,c.match_length=c.prev_length=ja-1,c.match_available=0,a.next_in=i,a.input=j,a.avail_in=h,c.wrap=g,O}var D,E=a("../utils/common"),F=a("./trees"),G=a("./adler32"),H=a("./crc32"),I=a("./messages"),J=0,K=1,L=3,M=4,N=5,O=0,P=1,Q=-2,R=-3,S=-5,T=-1,U=1,V=2,W=3,X=4,Y=0,Z=2,$=8,_=9,aa=15,ba=8,ca=29,da=256,ea=da+1+ca,fa=30,ga=19,ha=2*ea+1,ia=15,ja=3,ka=258,la=ka+ja+1,ma=32,na=42,oa=69,pa=73,qa=91,ra=103,sa=113,ta=666,ua=1,va=2,wa=3,xa=4,ya=3;D=[new s(0,0,0,0,n),new s(4,4,8,4,o),new s(4,5,16,8,o),new s(4,6,32,32,o),new s(4,4,16,16,p),new s(8,16,32,32,p),new s(8,16,128,128,p),new s(8,32,128,256,p),new s(32,128,258,1024,p),new s(32,258,258,4096,p)],c.deflateInit=z,c.deflateInit2=y,c.deflateReset=w,c.deflateResetKeep=v,c.deflateSetHeader=x,c.deflate=A,c.deflateEnd=B,c.deflateSetDictionary=C,c.deflateInfo="pako deflate (from Nodeca project)"},{"../utils/common":62,"./adler32":64,"./crc32":66,"./messages":72,"./trees":73}],68:[function(a,b,c){"use strict";function d(){this.text=0,this.time=0,this.xflags=0,this.os=0,this.extra=null,this.extra_len=0,this.name="",this.comment="",this.hcrc=0,this.done=!1}b.exports=d},{}],69:[function(a,b,c){"use strict";var d=30,e=12;b.exports=function(a,b){var c,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,B,C;c=a.state,f=a.next_in,B=a.input,g=f+(a.avail_in-5),h=a.next_out,C=a.output,i=h-(b-a.avail_out),j=h+(a.avail_out-257),k=c.dmax,l=c.wsize,m=c.whave,n=c.wnext,o=c.window,p=c.hold,q=c.bits,r=c.lencode,s=c.distcode,t=(1<<c.lenbits)-1,u=(1<<c.distbits)-1;a:do{q<15&&(p+=B[f++]<<q,q+=8,p+=B[f++]<<q,q+=8),v=r[p&t];b:for(;;){if(w=v>>>24,p>>>=w,q-=w,w=v>>>16&255,0===w)C[h++]=65535&v;else{if(!(16&w)){if(0===(64&w)){v=r[(65535&v)+(p&(1<<w)-1)];continue b}if(32&w){c.mode=e;break a}a.msg="invalid literal/length code",c.mode=d;break a}x=65535&v,w&=15,w&&(q<w&&(p+=B[f++]<<q,q+=8),x+=p&(1<<w)-1,p>>>=w,q-=w),q<15&&(p+=B[f++]<<q,q+=8,p+=B[f++]<<q,q+=8),v=s[p&u];c:for(;;){if(w=v>>>24,p>>>=w,q-=w,w=v>>>16&255,!(16&w)){if(0===(64&w)){v=s[(65535&v)+(p&(1<<w)-1)];continue c}a.msg="invalid distance code",c.mode=d;break a}if(y=65535&v,w&=15,q<w&&(p+=B[f++]<<q,q+=8,q<w&&(p+=B[f++]<<q,q+=8)),y+=p&(1<<w)-1,y>k){a.msg="invalid distance too far back",c.mode=d;break a}if(p>>>=w,q-=w,w=h-i,y>w){if(w=y-w,w>m&&c.sane){a.msg="invalid distance too far back",c.mode=d;break a}if(z=0,A=o,0===n){if(z+=l-w,w<x){x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}}else if(n<w){if(z+=l+n-w,w-=n,w<x){x-=w;do C[h++]=o[z++];while(--w);if(z=0,n<x){w=n,x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}}}else if(z+=n-w,w<x){x-=w;do C[h++]=o[z++];while(--w);z=h-y,A=C}for(;x>2;)C[h++]=A[z++],C[h++]=A[z++],C[h++]=A[z++],x-=3;x&&(C[h++]=A[z++],x>1&&(C[h++]=A[z++]))}else{z=h-y;do C[h++]=C[z++],C[h++]=C[z++],C[h++]=C[z++],x-=3;while(x>2);x&&(C[h++]=C[z++],x>1&&(C[h++]=C[z++]))}break}}break}}while(f<g&&h<j);x=q>>3,f-=x,q-=x<<3,p&=(1<<q)-1,a.next_in=f,a.next_out=h,a.avail_in=f<g?5+(g-f):5-(f-g),a.avail_out=h<j?257+(j-h):257-(h-j),c.hold=p,c.bits=q}},{}],70:[function(a,b,c){"use strict";function d(a){return(a>>>24&255)+(a>>>8&65280)+((65280&a)<<8)+((255&a)<<24)}function e(){this.mode=0,this.last=!1,this.wrap=0,this.havedict=!1,this.flags=0,this.dmax=0,this.check=0,this.total=0,this.head=null,this.wbits=0,this.wsize=0,this.whave=0,this.wnext=0,this.window=null,this.hold=0,this.bits=0,this.length=0,this.offset=0,this.extra=0,this.lencode=null,this.distcode=null,this.lenbits=0,this.distbits=0,this.ncode=0,this.nlen=0,this.ndist=0,this.have=0,this.next=null,this.lens=new s.Buf16(320),this.work=new s.Buf16(288),this.lendyn=null,this.distdyn=null,this.sane=0,this.back=0,this.was=0}function f(a){var b;return a&&a.state?(b=a.state,a.total_in=a.total_out=b.total=0,a.msg="",b.wrap&&(a.adler=1&b.wrap),b.mode=L,b.last=0,b.havedict=0,b.dmax=32768,b.head=null,b.hold=0,b.bits=0,b.lencode=b.lendyn=new s.Buf32(pa),b.distcode=b.distdyn=new s.Buf32(qa),b.sane=1,b.back=-1,D):G}function g(a){var b;return a&&a.state?(b=a.state,b.wsize=0,b.whave=0,b.wnext=0,f(a)):G}function h(a,b){var c,d;return a&&a.state?(d=a.state,b<0?(c=0,b=-b):(c=(b>>4)+1,b<48&&(b&=15)),b&&(b<8||b>15)?G:(null!==d.window&&d.wbits!==b&&(d.window=null),d.wrap=c,d.wbits=b,g(a))):G}function i(a,b){var c,d;return a?(d=new e,a.state=d,d.window=null,c=h(a,b),c!==D&&(a.state=null),c):G}function j(a){return i(a,sa)}function k(a){if(ta){var b;for(q=new s.Buf32(512),r=new s.Buf32(32),b=0;b<144;)a.lens[b++]=8;for(;b<256;)a.lens[b++]=9;for(;b<280;)a.lens[b++]=7;for(;b<288;)a.lens[b++]=8;for(w(y,a.lens,0,288,q,0,a.work,{bits:9}),b=0;b<32;)a.lens[b++]=5;w(z,a.lens,0,32,r,0,a.work,{bits:5}),ta=!1}a.lencode=q,a.lenbits=9,a.distcode=r,a.distbits=5}function l(a,b,c,d){var e,f=a.state;return null===f.window&&(f.wsize=1<<f.wbits,f.wnext=0,f.whave=0,f.window=new s.Buf8(f.wsize)),d>=f.wsize?(s.arraySet(f.window,b,c-f.wsize,f.wsize,0),f.wnext=0,f.whave=f.wsize):(e=f.wsize-f.wnext,e>d&&(e=d),s.arraySet(f.window,b,c-d,e,f.wnext),d-=e,d?(s.arraySet(f.window,b,c-d,d,0),f.wnext=d,f.whave=f.wsize):(f.wnext+=e,f.wnext===f.wsize&&(f.wnext=0),f.whave<f.wsize&&(f.whave+=e))),0}function m(a,b){var c,e,f,g,h,i,j,m,n,o,p,q,r,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,Aa=0,Ba=new s.Buf8(4),Ca=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15];if(!a||!a.state||!a.output||!a.input&&0!==a.avail_in)return G;c=a.state,c.mode===W&&(c.mode=X),h=a.next_out,f=a.output,j=a.avail_out,g=a.next_in,e=a.input,i=a.avail_in,m=c.hold,n=c.bits,o=i,p=j,xa=D;a:for(;;)switch(c.mode){case L:if(0===c.wrap){c.mode=X;break}for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(2&c.wrap&&35615===m){c.check=0,Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0),m=0,n=0,c.mode=M;break}if(c.flags=0,c.head&&(c.head.done=!1),!(1&c.wrap)||(((255&m)<<8)+(m>>8))%31){a.msg="incorrect header check",c.mode=ma;break}if((15&m)!==K){a.msg="unknown compression method",c.mode=ma;break}if(m>>>=4,n-=4,wa=(15&m)+8,0===c.wbits)c.wbits=wa;else if(wa>c.wbits){a.msg="invalid window size",c.mode=ma;break}c.dmax=1<<wa,a.adler=c.check=1,c.mode=512&m?U:W,m=0,n=0;break;case M:for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(c.flags=m,(255&c.flags)!==K){a.msg="unknown compression method",c.mode=ma;break}if(57344&c.flags){a.msg="unknown header flags set",c.mode=ma;break}c.head&&(c.head.text=m>>8&1),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0,c.mode=N;case N:for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.head&&(c.head.time=m),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,Ba[2]=m>>>16&255,Ba[3]=m>>>24&255,c.check=u(c.check,Ba,4,0)),m=0,n=0,c.mode=O;case O:for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.head&&(c.head.xflags=255&m,c.head.os=m>>8),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0,c.mode=P;case P:if(1024&c.flags){for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.length=m,c.head&&(c.head.extra_len=m),512&c.flags&&(Ba[0]=255&m,Ba[1]=m>>>8&255,c.check=u(c.check,Ba,2,0)),m=0,n=0}else c.head&&(c.head.extra=null);c.mode=Q;case Q:if(1024&c.flags&&(q=c.length,q>i&&(q=i),q&&(c.head&&(wa=c.head.extra_len-c.length,c.head.extra||(c.head.extra=new Array(c.head.extra_len)),s.arraySet(c.head.extra,e,g,q,wa)),512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,c.length-=q),c.length))break a;c.length=0,c.mode=R;case R:if(2048&c.flags){if(0===i)break a;q=0;do wa=e[g+q++],c.head&&wa&&c.length<65536&&(c.head.name+=String.fromCharCode(wa));while(wa&&q<i);if(512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,wa)break a}else c.head&&(c.head.name=null);c.length=0,c.mode=S;case S:if(4096&c.flags){if(0===i)break a;q=0;do wa=e[g+q++],c.head&&wa&&c.length<65536&&(c.head.comment+=String.fromCharCode(wa));while(wa&&q<i);if(512&c.flags&&(c.check=u(c.check,e,q,g)),i-=q,g+=q,wa)break a}else c.head&&(c.head.comment=null);c.mode=T;case T:if(512&c.flags){for(;n<16;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m!==(65535&c.check)){a.msg="header crc mismatch",c.mode=ma;break}m=0,n=0}c.head&&(c.head.hcrc=c.flags>>9&1,c.head.done=!0),a.adler=c.check=0,c.mode=W;break;case U:for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}a.adler=c.check=d(m),m=0,n=0,c.mode=V;case V:if(0===c.havedict)return a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,F;a.adler=c.check=1,c.mode=W;case W:if(b===B||b===C)break a;case X:if(c.last){m>>>=7&n,n-=7&n,c.mode=ja;break}for(;n<3;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}switch(c.last=1&m,m>>>=1,n-=1,3&m){case 0:c.mode=Y;break;case 1:if(k(c),c.mode=ca,b===C){m>>>=2,n-=2;break a}break;case 2:c.mode=_;break;case 3:a.msg="invalid block type",c.mode=ma}m>>>=2,n-=2;break;case Y:for(m>>>=7&n,n-=7&n;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if((65535&m)!==(m>>>16^65535)){a.msg="invalid stored block lengths",c.mode=ma;break}if(c.length=65535&m,m=0,n=0,c.mode=Z,b===C)break a;case Z:c.mode=$;case $:if(q=c.length){if(q>i&&(q=i),q>j&&(q=j),0===q)break a;s.arraySet(f,e,g,q,h),i-=q,g+=q,j-=q,h+=q,c.length-=q;break}c.mode=W;break;case _:for(;n<14;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(c.nlen=(31&m)+257,m>>>=5,n-=5,c.ndist=(31&m)+1,m>>>=5,n-=5,c.ncode=(15&m)+4,m>>>=4,n-=4,c.nlen>286||c.ndist>30){a.msg="too many length or distance symbols",c.mode=ma;break}c.have=0,c.mode=aa;case aa:for(;c.have<c.ncode;){for(;n<3;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.lens[Ca[c.have++]]=7&m,m>>>=3,n-=3}for(;c.have<19;)c.lens[Ca[c.have++]]=0;if(c.lencode=c.lendyn,c.lenbits=7,ya={bits:c.lenbits},xa=w(x,c.lens,0,19,c.lencode,0,c.work,ya),c.lenbits=ya.bits,xa){a.msg="invalid code lengths set",c.mode=ma;break}c.have=0,c.mode=ba;case ba:for(;c.have<c.nlen+c.ndist;){for(;Aa=c.lencode[m&(1<<c.lenbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(sa<16)m>>>=qa,n-=qa,c.lens[c.have++]=sa;else{if(16===sa){for(za=qa+2;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m>>>=qa,n-=qa,0===c.have){a.msg="invalid bit length repeat",c.mode=ma;break}wa=c.lens[c.have-1],q=3+(3&m),m>>>=2,n-=2}else if(17===sa){for(za=qa+3;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=qa,n-=qa,wa=0,q=3+(7&m),m>>>=3,n-=3}else{for(za=qa+7;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=qa,n-=qa,wa=0,q=11+(127&m),m>>>=7,n-=7}if(c.have+q>c.nlen+c.ndist){a.msg="invalid bit length repeat",c.mode=ma;break}for(;q--;)c.lens[c.have++]=wa}}if(c.mode===ma)break;if(0===c.lens[256]){a.msg="invalid code -- missing end-of-block",c.mode=ma;break}if(c.lenbits=9,ya={bits:c.lenbits},xa=w(y,c.lens,0,c.nlen,c.lencode,0,c.work,ya),c.lenbits=ya.bits,xa){a.msg="invalid literal/lengths set",c.mode=ma;break}if(c.distbits=6,c.distcode=c.distdyn,ya={bits:c.distbits},xa=w(z,c.lens,c.nlen,c.ndist,c.distcode,0,c.work,ya),c.distbits=ya.bits,xa){a.msg="invalid distances set",c.mode=ma;break}if(c.mode=ca,b===C)break a;case ca:c.mode=da;case da:if(i>=6&&j>=258){a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,v(a,p),h=a.next_out,f=a.output,j=a.avail_out,g=a.next_in,e=a.input,i=a.avail_in,m=c.hold,n=c.bits,c.mode===W&&(c.back=-1);break}for(c.back=0;Aa=c.lencode[m&(1<<c.lenbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(ra&&0===(240&ra)){for(ta=qa,ua=ra,va=sa;Aa=c.lencode[va+((m&(1<<ta+ua)-1)>>ta)],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(ta+qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=ta,n-=ta,c.back+=ta}if(m>>>=qa,n-=qa,c.back+=qa,c.length=sa,0===ra){c.mode=ia;break}if(32&ra){c.back=-1,c.mode=W;break}if(64&ra){a.msg="invalid literal/length code",c.mode=ma;break}c.extra=15&ra,c.mode=ea;case ea:if(c.extra){for(za=c.extra;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.length+=m&(1<<c.extra)-1,m>>>=c.extra,n-=c.extra,c.back+=c.extra}c.was=c.length,c.mode=fa;case fa:for(;Aa=c.distcode[m&(1<<c.distbits)-1],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(0===(240&ra)){for(ta=qa,ua=ra,va=sa;Aa=c.distcode[va+((m&(1<<ta+ua)-1)>>ta)],qa=Aa>>>24,ra=Aa>>>16&255,sa=65535&Aa,!(ta+qa<=n);){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}m>>>=ta,n-=ta,c.back+=ta}if(m>>>=qa,n-=qa,c.back+=qa,64&ra){a.msg="invalid distance code",c.mode=ma;break}c.offset=sa,c.extra=15&ra,c.mode=ga;case ga:if(c.extra){for(za=c.extra;n<za;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}c.offset+=m&(1<<c.extra)-1,m>>>=c.extra,n-=c.extra,c.back+=c.extra}if(c.offset>c.dmax){a.msg="invalid distance too far back",c.mode=ma;break}c.mode=ha;case ha:if(0===j)break a;if(q=p-j,c.offset>q){if(q=c.offset-q,q>c.whave&&c.sane){a.msg="invalid distance too far back",c.mode=ma;break}q>c.wnext?(q-=c.wnext,r=c.wsize-q):r=c.wnext-q,q>c.length&&(q=c.length),pa=c.window}else pa=f,r=h-c.offset,q=c.length;q>j&&(q=j),j-=q,c.length-=q;do f[h++]=pa[r++];while(--q);0===c.length&&(c.mode=da);break;case ia:if(0===j)break a;f[h++]=c.length,j--,c.mode=da;break;case ja:if(c.wrap){for(;n<32;){if(0===i)break a;i--,m|=e[g++]<<n,n+=8}if(p-=j,a.total_out+=p,c.total+=p,p&&(a.adler=c.check=c.flags?u(c.check,f,p,h-p):t(c.check,f,p,h-p)),p=j,(c.flags?m:d(m))!==c.check){a.msg="incorrect data check",c.mode=ma;break}m=0,n=0}c.mode=ka;case ka:if(c.wrap&&c.flags){for(;n<32;){if(0===i)break a;i--,m+=e[g++]<<n,n+=8}if(m!==(4294967295&c.total)){a.msg="incorrect length check",c.mode=ma;break}m=0,n=0}c.mode=la;case la:xa=E;break a;case ma:xa=H;break a;case na:return I;case oa:default:return G}return a.next_out=h,a.avail_out=j,a.next_in=g,a.avail_in=i,c.hold=m,c.bits=n,(c.wsize||p!==a.avail_out&&c.mode<ma&&(c.mode<ja||b!==A))&&l(a,a.output,a.next_out,p-a.avail_out)?(c.mode=na,I):(o-=a.avail_in,p-=a.avail_out,a.total_in+=o,a.total_out+=p,c.total+=p,c.wrap&&p&&(a.adler=c.check=c.flags?u(c.check,f,p,a.next_out-p):t(c.check,f,p,a.next_out-p)),a.data_type=c.bits+(c.last?64:0)+(c.mode===W?128:0)+(c.mode===ca||c.mode===Z?256:0),(0===o&&0===p||b===A)&&xa===D&&(xa=J),xa)}function n(a){if(!a||!a.state)return G;var b=a.state;return b.window&&(b.window=null),a.state=null,D}function o(a,b){var c;return a&&a.state?(c=a.state,0===(2&c.wrap)?G:(c.head=b,b.done=!1,D)):G}function p(a,b){var c,d,e,f=b.length;return a&&a.state?(c=a.state,0!==c.wrap&&c.mode!==V?G:c.mode===V&&(d=1,d=t(d,b,f,0),d!==c.check)?H:(e=l(a,b,f,f))?(c.mode=na,I):(c.havedict=1,D)):G}var q,r,s=a("../utils/common"),t=a("./adler32"),u=a("./crc32"),v=a("./inffast"),w=a("./inftrees"),x=0,y=1,z=2,A=4,B=5,C=6,D=0,E=1,F=2,G=-2,H=-3,I=-4,J=-5,K=8,L=1,M=2,N=3,O=4,P=5,Q=6,R=7,S=8,T=9,U=10,V=11,W=12,X=13,Y=14,Z=15,$=16,_=17,aa=18,ba=19,ca=20,da=21,ea=22,fa=23,ga=24,ha=25,ia=26,ja=27,ka=28,la=29,ma=30,na=31,oa=32,pa=852,qa=592,ra=15,sa=ra,ta=!0;c.inflateReset=g,c.inflateReset2=h,c.inflateResetKeep=f,c.inflateInit=j,c.inflateInit2=i,c.inflate=m,c.inflateEnd=n,c.inflateGetHeader=o,c.inflateSetDictionary=p,c.inflateInfo="pako inflate (from Nodeca project)"},{"../utils/common":62,"./adler32":64,"./crc32":66,"./inffast":69,"./inftrees":71}],71:[function(a,b,c){"use strict";var d=a("../utils/common"),e=15,f=852,g=592,h=0,i=1,j=2,k=[3,4,5,6,7,8,9,10,11,13,15,17,19,23,27,31,35,43,51,59,67,83,99,115,131,163,195,227,258,0,0],l=[16,16,16,16,16,16,16,16,17,17,17,17,18,18,18,18,19,19,19,19,20,20,20,20,21,21,21,21,16,72,78],m=[1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193,257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577,0,0],n=[16,16,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,64,64];b.exports=function(a,b,c,o,p,q,r,s){var t,u,v,w,x,y,z,A,B,C=s.bits,D=0,E=0,F=0,G=0,H=0,I=0,J=0,K=0,L=0,M=0,N=null,O=0,P=new d.Buf16(e+1),Q=new d.Buf16(e+1),R=null,S=0;for(D=0;D<=e;D++)P[D]=0;for(E=0;E<o;E++)P[b[c+E]]++;for(H=C,G=e;G>=1&&0===P[G];G--);if(H>G&&(H=G),0===G)return p[q++]=20971520,p[q++]=20971520,s.bits=1,0;for(F=1;F<G&&0===P[F];F++);for(H<F&&(H=F),K=1,D=1;D<=e;D++)if(K<<=1,K-=P[D],K<0)return-1;if(K>0&&(a===h||1!==G))return-1;for(Q[1]=0,D=1;D<e;D++)Q[D+1]=Q[D]+P[D];for(E=0;E<o;E++)0!==b[c+E]&&(r[Q[b[c+E]]++]=E);if(a===h?(N=R=r,y=19):a===i?(N=k,O-=257,R=l,S-=257,y=256):(N=m,R=n,y=-1),M=0,E=0,D=F,x=q,I=H,J=0,v=-1,L=1<<H,w=L-1,a===i&&L>f||a===j&&L>g)return 1;for(;;){z=D-J,r[E]<y?(A=0,B=r[E]):r[E]>y?(A=R[S+r[E]],B=N[O+r[E]]):(A=96,B=0),t=1<<D-J,u=1<<I,F=u;do u-=t,p[x+(M>>J)+u]=z<<24|A<<16|B|0;while(0!==u);for(t=1<<D-1;M&t;)t>>=1;if(0!==t?(M&=t-1,M+=t):M=0,E++,0===--P[D]){if(D===G)break;D=b[c+r[E]]}if(D>H&&(M&w)!==v){for(0===J&&(J=H),x+=F,I=D-J,K=1<<I;I+J<G&&(K-=P[I+J],!(K<=0));)I++,K<<=1;if(L+=1<<I,a===i&&L>f||a===j&&L>g)return 1;v=M&w,p[v]=H<<24|I<<16|x-q|0}}return 0!==M&&(p[x+M]=D-J<<24|64<<16|0),s.bits=H,0}},{"../utils/common":62}],72:[function(a,b,c){"use strict";b.exports={2:"need dictionary",1:"stream end",0:"","-1":"file error","-2":"stream error","-3":"data error","-4":"insufficient memory","-5":"buffer error","-6":"incompatible version"}},{}],73:[function(a,b,c){"use strict";function d(a){for(var b=a.length;--b>=0;)a[b]=0}function e(a,b,c,d,e){this.static_tree=a,this.extra_bits=b,this.extra_base=c,this.elems=d,this.max_length=e,this.has_stree=a&&a.length}function f(a,b){this.dyn_tree=a,this.max_code=0,this.stat_desc=b}function g(a){return a<256?ia[a]:ia[256+(a>>>7)]}function h(a,b){a.pending_buf[a.pending++]=255&b,a.pending_buf[a.pending++]=b>>>8&255}function i(a,b,c){a.bi_valid>X-c?(a.bi_buf|=b<<a.bi_valid&65535,h(a,a.bi_buf),a.bi_buf=b>>X-a.bi_valid,a.bi_valid+=c-X):(a.bi_buf|=b<<a.bi_valid&65535,a.bi_valid+=c)}function j(a,b,c){i(a,c[2*b],c[2*b+1])}function k(a,b){var c=0;do c|=1&a,a>>>=1,c<<=1;while(--b>0);return c>>>1}function l(a){16===a.bi_valid?(h(a,a.bi_buf),a.bi_buf=0,a.bi_valid=0):a.bi_valid>=8&&(a.pending_buf[a.pending++]=255&a.bi_buf,a.bi_buf>>=8,a.bi_valid-=8)}function m(a,b){var c,d,e,f,g,h,i=b.dyn_tree,j=b.max_code,k=b.stat_desc.static_tree,l=b.stat_desc.has_stree,m=b.stat_desc.extra_bits,n=b.stat_desc.extra_base,o=b.stat_desc.max_length,p=0;for(f=0;f<=W;f++)a.bl_count[f]=0;for(i[2*a.heap[a.heap_max]+1]=0,
+c=a.heap_max+1;c<V;c++)d=a.heap[c],f=i[2*i[2*d+1]+1]+1,f>o&&(f=o,p++),i[2*d+1]=f,d>j||(a.bl_count[f]++,g=0,d>=n&&(g=m[d-n]),h=i[2*d],a.opt_len+=h*(f+g),l&&(a.static_len+=h*(k[2*d+1]+g)));if(0!==p){do{for(f=o-1;0===a.bl_count[f];)f--;a.bl_count[f]--,a.bl_count[f+1]+=2,a.bl_count[o]--,p-=2}while(p>0);for(f=o;0!==f;f--)for(d=a.bl_count[f];0!==d;)e=a.heap[--c],e>j||(i[2*e+1]!==f&&(a.opt_len+=(f-i[2*e+1])*i[2*e],i[2*e+1]=f),d--)}}function n(a,b,c){var d,e,f=new Array(W+1),g=0;for(d=1;d<=W;d++)f[d]=g=g+c[d-1]<<1;for(e=0;e<=b;e++){var h=a[2*e+1];0!==h&&(a[2*e]=k(f[h]++,h))}}function o(){var a,b,c,d,f,g=new Array(W+1);for(c=0,d=0;d<Q-1;d++)for(ka[d]=c,a=0;a<1<<ba[d];a++)ja[c++]=d;for(ja[c-1]=d,f=0,d=0;d<16;d++)for(la[d]=f,a=0;a<1<<ca[d];a++)ia[f++]=d;for(f>>=7;d<T;d++)for(la[d]=f<<7,a=0;a<1<<ca[d]-7;a++)ia[256+f++]=d;for(b=0;b<=W;b++)g[b]=0;for(a=0;a<=143;)ga[2*a+1]=8,a++,g[8]++;for(;a<=255;)ga[2*a+1]=9,a++,g[9]++;for(;a<=279;)ga[2*a+1]=7,a++,g[7]++;for(;a<=287;)ga[2*a+1]=8,a++,g[8]++;for(n(ga,S+1,g),a=0;a<T;a++)ha[2*a+1]=5,ha[2*a]=k(a,5);ma=new e(ga,ba,R+1,S,W),na=new e(ha,ca,0,T,W),oa=new e(new Array(0),da,0,U,Y)}function p(a){var b;for(b=0;b<S;b++)a.dyn_ltree[2*b]=0;for(b=0;b<T;b++)a.dyn_dtree[2*b]=0;for(b=0;b<U;b++)a.bl_tree[2*b]=0;a.dyn_ltree[2*Z]=1,a.opt_len=a.static_len=0,a.last_lit=a.matches=0}function q(a){a.bi_valid>8?h(a,a.bi_buf):a.bi_valid>0&&(a.pending_buf[a.pending++]=a.bi_buf),a.bi_buf=0,a.bi_valid=0}function r(a,b,c,d){q(a),d&&(h(a,c),h(a,~c)),G.arraySet(a.pending_buf,a.window,b,c,a.pending),a.pending+=c}function s(a,b,c,d){var e=2*b,f=2*c;return a[e]<a[f]||a[e]===a[f]&&d[b]<=d[c]}function t(a,b,c){for(var d=a.heap[c],e=c<<1;e<=a.heap_len&&(e<a.heap_len&&s(b,a.heap[e+1],a.heap[e],a.depth)&&e++,!s(b,d,a.heap[e],a.depth));)a.heap[c]=a.heap[e],c=e,e<<=1;a.heap[c]=d}function u(a,b,c){var d,e,f,h,k=0;if(0!==a.last_lit)do d=a.pending_buf[a.d_buf+2*k]<<8|a.pending_buf[a.d_buf+2*k+1],e=a.pending_buf[a.l_buf+k],k++,0===d?j(a,e,b):(f=ja[e],j(a,f+R+1,b),h=ba[f],0!==h&&(e-=ka[f],i(a,e,h)),d--,f=g(d),j(a,f,c),h=ca[f],0!==h&&(d-=la[f],i(a,d,h)));while(k<a.last_lit);j(a,Z,b)}function v(a,b){var c,d,e,f=b.dyn_tree,g=b.stat_desc.static_tree,h=b.stat_desc.has_stree,i=b.stat_desc.elems,j=-1;for(a.heap_len=0,a.heap_max=V,c=0;c<i;c++)0!==f[2*c]?(a.heap[++a.heap_len]=j=c,a.depth[c]=0):f[2*c+1]=0;for(;a.heap_len<2;)e=a.heap[++a.heap_len]=j<2?++j:0,f[2*e]=1,a.depth[e]=0,a.opt_len--,h&&(a.static_len-=g[2*e+1]);for(b.max_code=j,c=a.heap_len>>1;c>=1;c--)t(a,f,c);e=i;do c=a.heap[1],a.heap[1]=a.heap[a.heap_len--],t(a,f,1),d=a.heap[1],a.heap[--a.heap_max]=c,a.heap[--a.heap_max]=d,f[2*e]=f[2*c]+f[2*d],a.depth[e]=(a.depth[c]>=a.depth[d]?a.depth[c]:a.depth[d])+1,f[2*c+1]=f[2*d+1]=e,a.heap[1]=e++,t(a,f,1);while(a.heap_len>=2);a.heap[--a.heap_max]=a.heap[1],m(a,b),n(f,j,a.bl_count)}function w(a,b,c){var d,e,f=-1,g=b[1],h=0,i=7,j=4;for(0===g&&(i=138,j=3),b[2*(c+1)+1]=65535,d=0;d<=c;d++)e=g,g=b[2*(d+1)+1],++h<i&&e===g||(h<j?a.bl_tree[2*e]+=h:0!==e?(e!==f&&a.bl_tree[2*e]++,a.bl_tree[2*$]++):h<=10?a.bl_tree[2*_]++:a.bl_tree[2*aa]++,h=0,f=e,0===g?(i=138,j=3):e===g?(i=6,j=3):(i=7,j=4))}function x(a,b,c){var d,e,f=-1,g=b[1],h=0,k=7,l=4;for(0===g&&(k=138,l=3),d=0;d<=c;d++)if(e=g,g=b[2*(d+1)+1],!(++h<k&&e===g)){if(h<l){do j(a,e,a.bl_tree);while(0!==--h)}else 0!==e?(e!==f&&(j(a,e,a.bl_tree),h--),j(a,$,a.bl_tree),i(a,h-3,2)):h<=10?(j(a,_,a.bl_tree),i(a,h-3,3)):(j(a,aa,a.bl_tree),i(a,h-11,7));h=0,f=e,0===g?(k=138,l=3):e===g?(k=6,l=3):(k=7,l=4)}}function y(a){var b;for(w(a,a.dyn_ltree,a.l_desc.max_code),w(a,a.dyn_dtree,a.d_desc.max_code),v(a,a.bl_desc),b=U-1;b>=3&&0===a.bl_tree[2*ea[b]+1];b--);return a.opt_len+=3*(b+1)+5+5+4,b}function z(a,b,c,d){var e;for(i(a,b-257,5),i(a,c-1,5),i(a,d-4,4),e=0;e<d;e++)i(a,a.bl_tree[2*ea[e]+1],3);x(a,a.dyn_ltree,b-1),x(a,a.dyn_dtree,c-1)}function A(a){var b,c=4093624447;for(b=0;b<=31;b++,c>>>=1)if(1&c&&0!==a.dyn_ltree[2*b])return I;if(0!==a.dyn_ltree[18]||0!==a.dyn_ltree[20]||0!==a.dyn_ltree[26])return J;for(b=32;b<R;b++)if(0!==a.dyn_ltree[2*b])return J;return I}function B(a){pa||(o(),pa=!0),a.l_desc=new f(a.dyn_ltree,ma),a.d_desc=new f(a.dyn_dtree,na),a.bl_desc=new f(a.bl_tree,oa),a.bi_buf=0,a.bi_valid=0,p(a)}function C(a,b,c,d){i(a,(L<<1)+(d?1:0),3),r(a,b,c,!0)}function D(a){i(a,M<<1,3),j(a,Z,ga),l(a)}function E(a,b,c,d){var e,f,g=0;a.level>0?(a.strm.data_type===K&&(a.strm.data_type=A(a)),v(a,a.l_desc),v(a,a.d_desc),g=y(a),e=a.opt_len+3+7>>>3,f=a.static_len+3+7>>>3,f<=e&&(e=f)):e=f=c+5,c+4<=e&&b!==-1?C(a,b,c,d):a.strategy===H||f===e?(i(a,(M<<1)+(d?1:0),3),u(a,ga,ha)):(i(a,(N<<1)+(d?1:0),3),z(a,a.l_desc.max_code+1,a.d_desc.max_code+1,g+1),u(a,a.dyn_ltree,a.dyn_dtree)),p(a),d&&q(a)}function F(a,b,c){return a.pending_buf[a.d_buf+2*a.last_lit]=b>>>8&255,a.pending_buf[a.d_buf+2*a.last_lit+1]=255&b,a.pending_buf[a.l_buf+a.last_lit]=255&c,a.last_lit++,0===b?a.dyn_ltree[2*c]++:(a.matches++,b--,a.dyn_ltree[2*(ja[c]+R+1)]++,a.dyn_dtree[2*g(b)]++),a.last_lit===a.lit_bufsize-1}var G=a("../utils/common"),H=4,I=0,J=1,K=2,L=0,M=1,N=2,O=3,P=258,Q=29,R=256,S=R+1+Q,T=30,U=19,V=2*S+1,W=15,X=16,Y=7,Z=256,$=16,_=17,aa=18,ba=[0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0],ca=[0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13],da=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7],ea=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15],fa=512,ga=new Array(2*(S+2));d(ga);var ha=new Array(2*T);d(ha);var ia=new Array(fa);d(ia);var ja=new Array(P-O+1);d(ja);var ka=new Array(Q);d(ka);var la=new Array(T);d(la);var ma,na,oa,pa=!1;c._tr_init=B,c._tr_stored_block=C,c._tr_flush_block=E,c._tr_tally=F,c._tr_align=D},{"../utils/common":62}],74:[function(a,b,c){"use strict";function d(){this.input=null,this.next_in=0,this.avail_in=0,this.total_in=0,this.output=null,this.next_out=0,this.avail_out=0,this.total_out=0,this.msg="",this.state=null,this.data_type=2,this.adler=0}b.exports=d},{}]},{},[10])(10)}); \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/www/lib/keycodes.js b/testing/web-platform/tests/tools/wave/www/lib/keycodes.js
new file mode 100644
index 0000000000..2d01b035ab
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/lib/keycodes.js
@@ -0,0 +1,88 @@
+if (typeof KeyEvent != "undefined") {
+ if (typeof KeyEvent.VK_LEFT != "undefined") {
+ var VK_LEFT = KeyEvent.VK_LEFT;
+ var VK_UP = KeyEvent.VK_UP;
+ var VK_RIGHT = KeyEvent.VK_RIGHT;
+ var VK_DOWN = KeyEvent.VK_DOWN;
+ }
+ if (typeof KeyEvent.VK_ENTER != "undefined") {
+ var VK_ENTER = KeyEvent.VK_ENTER;
+ }
+ if (typeof KeyEvent.VK_RED != "undefined") {
+ var VK_RED = KeyEvent.VK_RED;
+ var VK_GREEN = KeyEvent.VK_GREEN;
+ var VK_YELLOW = KeyEvent.VK_YELLOW;
+ var VK_BLUE = KeyEvent.VK_BLUE;
+ }
+ if (typeof KeyEvent.VK_PLAY != "undefined") {
+ var VK_PLAY = KeyEvent.VK_PLAY;
+ var VK_PAUSE = KeyEvent.VK_PAUSE;
+ var VK_STOP = KeyEvent.VK_STOP;
+ }
+ if (typeof KeyEvent.VK_BACK != "undefined") {
+ var VK_BACK = KeyEvent.VK_BACK;
+ }
+ if (typeof KeyEvent.VK_0 != "undefined") {
+ var VK_0 = KeyEvent.VK_0;
+ var VK_1 = KeyEvent.VK_1;
+ var VK_2 = KeyEvent.VK_2;
+ var VK_3 = KeyEvent.VK_3;
+ var VK_4 = KeyEvent.VK_4;
+ var VK_5 = KeyEvent.VK_5;
+ var VK_6 = KeyEvent.VK_6;
+ var VK_7 = KeyEvent.VK_7;
+ var VK_8 = KeyEvent.VK_8;
+ var VK_9 = KeyEvent.VK_9;
+ }
+}
+if (typeof VK_LEFT == "undefined") {
+ var VK_LEFT = 132;
+ var VK_UP = 130;
+ var VK_RIGHT = 133;
+ var VK_DOWN = 131;
+}
+if (typeof VK_ENTER == "undefined") {
+ var VK_ENTER = 13;
+}
+if (typeof VK_RED == "undefined") {
+ var VK_RED = 403;
+ var VK_GREEN = 404;
+ var VK_YELLOW = 502;
+ var VK_BLUE = 406;
+}
+if (typeof VK_PLAY == "undefined") {
+ var VK_PLAY = 19;
+ var VK_PAUSE = 19;
+ var VK_STOP = 413;
+}
+if (typeof VK_BACK == "undefined") {
+ var VK_BACK = 0xa6;
+}
+if (typeof VK_0 == "undefined") {
+ var VK_0 = 48;
+ var VK_1 = 49;
+ var VK_2 = 50;
+ var VK_3 = 51;
+ var VK_4 = 52;
+ var VK_5 = 53;
+ var VK_6 = 54;
+ var VK_7 = 55;
+ var VK_8 = 56;
+ var VK_9 = 57;
+}
+
+var NEXT_KEYS = [39, 133, 131];
+var PREV_KEYS = [37, 132, 130];
+var ACTION_KEYS = [13, 32];
+
+if (typeof KeyEvent != "undefined") {
+ if (typeof KeyEvent.VK_LEFT != "undefined") {
+ PREV_KEYS.push(KeyEvent.VK_LEFT);
+ PREV_KEYS.push(KeyEvent.VK_UP);
+ NEXT_KEYS.push(KeyEvent.VK_RIGHT);
+ NEXT_KEYS.push(KeyEvent.VK_DOWN);
+ }
+ if (typeof KeyEvent.VK_ENTER != "undefined") {
+ ACTION_KEYS.push(KeyEvent.VK_ENTER);
+ }
+} \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/www/lib/qrcode.js b/testing/web-platform/tests/tools/wave/www/lib/qrcode.js
new file mode 100644
index 0000000000..45e5d7b974
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/lib/qrcode.js
@@ -0,0 +1,1533 @@
+/**
+ * @fileoverview
+ * - Using the 'QRCode for Javascript library'
+ * - Fixed dataset of 'QRCode for Javascript library' for support full-spec.
+ * - this library has no dependencies.
+ *
+ * @author davidshimjs
+ * @see <a href="http://www.d-project.com/" target="_blank">http://www.d-project.com/</a>
+ * @see <a href="http://jeromeetienne.github.com/jquery-qrcode/" target="_blank">http://jeromeetienne.github.com/jquery-qrcode/</a>
+ */
+var QRCode
+;(function () {
+ // ---------------------------------------------------------------------
+ // QRCode for JavaScript
+ //
+ // Copyright (c) 2009 Kazuhiko Arase
+ //
+ // URL: http://www.d-project.com/
+ //
+ // Licensed under the MIT license:
+ // http://www.opensource.org/licenses/mit-license.php
+ //
+ // The word "QR Code" is registered trademark of
+ // DENSO WAVE INCORPORATED
+ // http://www.denso-wave.com/qrcode/faqpatent-e.html
+ //
+ // ---------------------------------------------------------------------
+ function QR8bitByte (data) {
+ this.mode = QRMode.MODE_8BIT_BYTE
+ this.data = data
+ this.parsedData = []
+
+ // Added to support UTF-8 Characters
+ for (var i = 0, l = this.data.length; i < l; i++) {
+ var byteArray = []
+ var code = this.data.charCodeAt(i)
+
+ if (code > 0x10000) {
+ byteArray[0] = 0xf0 | ((code & 0x1c0000) >>> 18)
+ byteArray[1] = 0x80 | ((code & 0x3f000) >>> 12)
+ byteArray[2] = 0x80 | ((code & 0xfc0) >>> 6)
+ byteArray[3] = 0x80 | (code & 0x3f)
+ } else if (code > 0x800) {
+ byteArray[0] = 0xe0 | ((code & 0xf000) >>> 12)
+ byteArray[1] = 0x80 | ((code & 0xfc0) >>> 6)
+ byteArray[2] = 0x80 | (code & 0x3f)
+ } else if (code > 0x80) {
+ byteArray[0] = 0xc0 | ((code & 0x7c0) >>> 6)
+ byteArray[1] = 0x80 | (code & 0x3f)
+ } else {
+ byteArray[0] = code
+ }
+
+ this.parsedData.push(byteArray)
+ }
+
+ this.parsedData = Array.prototype.concat.apply([], this.parsedData)
+
+ if (this.parsedData.length != this.data.length) {
+ this.parsedData.unshift(191)
+ this.parsedData.unshift(187)
+ this.parsedData.unshift(239)
+ }
+ }
+
+ QR8bitByte.prototype = {
+ getLength: function (buffer) {
+ return this.parsedData.length
+ },
+ write: function (buffer) {
+ for (var i = 0, l = this.parsedData.length; i < l; i++) {
+ buffer.put(this.parsedData[i], 8)
+ }
+ }
+ }
+
+ function QRCodeModel (typeNumber, errorCorrectLevel) {
+ this.typeNumber = typeNumber
+ this.errorCorrectLevel = errorCorrectLevel
+ this.modules = null
+ this.moduleCount = 0
+ this.dataCache = null
+ this.dataList = []
+ }
+
+ QRCodeModel.prototype = {
+ addData: function (data) {
+ var newData = new QR8bitByte(data)
+ this.dataList.push(newData)
+ this.dataCache = null
+ },
+ isDark: function (row, col) {
+ if (
+ row < 0 ||
+ this.moduleCount <= row ||
+ col < 0 ||
+ this.moduleCount <= col
+ ) {
+ throw new Error(row + ',' + col)
+ }
+ return this.modules[row][col]
+ },
+ getModuleCount: function () {
+ return this.moduleCount
+ },
+ make: function () {
+ this.makeImpl(false, this.getBestMaskPattern())
+ },
+ makeImpl: function (test, maskPattern) {
+ this.moduleCount = this.typeNumber * 4 + 17
+ this.modules = new Array(this.moduleCount)
+ for (var row = 0; row < this.moduleCount; row++) {
+ this.modules[row] = new Array(this.moduleCount)
+ for (var col = 0; col < this.moduleCount; col++) {
+ this.modules[row][col] = null
+ }
+ }
+ this.setupPositionProbePattern(0, 0)
+ this.setupPositionProbePattern(this.moduleCount - 7, 0)
+ this.setupPositionProbePattern(0, this.moduleCount - 7)
+ this.setupPositionAdjustPattern()
+ this.setupTimingPattern()
+ this.setupTypeInfo(test, maskPattern)
+ if (this.typeNumber >= 7) {
+ this.setupTypeNumber(test)
+ }
+ if (this.dataCache == null) {
+ this.dataCache = QRCodeModel.createData(
+ this.typeNumber,
+ this.errorCorrectLevel,
+ this.dataList
+ )
+ }
+ this.mapData(this.dataCache, maskPattern)
+ },
+ setupPositionProbePattern: function (row, col) {
+ for (var r = -1; r <= 7; r++) {
+ if (row + r <= -1 || this.moduleCount <= row + r) continue
+ for (var c = -1; c <= 7; c++) {
+ if (col + c <= -1 || this.moduleCount <= col + c) continue
+ if (
+ (r >= 0 && r <= 6 && (c == 0 || c == 6)) ||
+ (c >= 0 && c <= 6 && (r == 0 || r == 6)) ||
+ (r >= 2 && r <= 4 && c >= 2 && c <= 4)
+ ) {
+ this.modules[row + r][col + c] = true
+ } else {
+ this.modules[row + r][col + c] = false
+ }
+ }
+ }
+ },
+ getBestMaskPattern: function () {
+ var minLostPoint = 0
+ var pattern = 0
+ for (var i = 0; i < 8; i++) {
+ this.makeImpl(true, i)
+ var lostPoint = QRUtil.getLostPoint(this)
+ if (i == 0 || minLostPoint > lostPoint) {
+ minLostPoint = lostPoint
+ pattern = i
+ }
+ }
+ return pattern
+ },
+ createMovieClip: function (target_mc, instance_name, depth) {
+ var qr_mc = target_mc.createEmptyMovieClip(instance_name, depth)
+ var cs = 1
+ this.make()
+ for (var row = 0; row < this.modules.length; row++) {
+ var y = row * cs
+ for (var col = 0; col < this.modules[row].length; col++) {
+ var x = col * cs
+ var dark = this.modules[row][col]
+ if (dark) {
+ qr_mc.beginFill(0, 100)
+ qr_mc.moveTo(x, y)
+ qr_mc.lineTo(x + cs, y)
+ qr_mc.lineTo(x + cs, y + cs)
+ qr_mc.lineTo(x, y + cs)
+ qr_mc.endFill()
+ }
+ }
+ }
+ return qr_mc
+ },
+ setupTimingPattern: function () {
+ for (var r = 8; r < this.moduleCount - 8; r++) {
+ if (this.modules[r][6] != null) {
+ continue
+ }
+ this.modules[r][6] = r % 2 == 0
+ }
+ for (var c = 8; c < this.moduleCount - 8; c++) {
+ if (this.modules[6][c] != null) {
+ continue
+ }
+ this.modules[6][c] = c % 2 == 0
+ }
+ },
+ setupPositionAdjustPattern: function () {
+ var pos = QRUtil.getPatternPosition(this.typeNumber)
+ for (var i = 0; i < pos.length; i++) {
+ for (var j = 0; j < pos.length; j++) {
+ var row = pos[i]
+ var col = pos[j]
+ if (this.modules[row][col] != null) {
+ continue
+ }
+ for (var r = -2; r <= 2; r++) {
+ for (var c = -2; c <= 2; c++) {
+ if (
+ r == -2 ||
+ r == 2 ||
+ c == -2 ||
+ c == 2 ||
+ (r == 0 && c == 0)
+ ) {
+ this.modules[row + r][col + c] = true
+ } else {
+ this.modules[row + r][col + c] = false
+ }
+ }
+ }
+ }
+ }
+ },
+ setupTypeNumber: function (test) {
+ var bits = QRUtil.getBCHTypeNumber(this.typeNumber)
+ for (var i = 0; i < 18; i++) {
+ var mod = !test && ((bits >> i) & 1) == 1
+ this.modules[Math.floor(i / 3)][i % 3 + this.moduleCount - 8 - 3] = mod
+ }
+ for (var i = 0; i < 18; i++) {
+ var mod = !test && ((bits >> i) & 1) == 1
+ this.modules[i % 3 + this.moduleCount - 8 - 3][Math.floor(i / 3)] = mod
+ }
+ },
+ setupTypeInfo: function (test, maskPattern) {
+ var data = (this.errorCorrectLevel << 3) | maskPattern
+ var bits = QRUtil.getBCHTypeInfo(data)
+ for (var i = 0; i < 15; i++) {
+ var mod = !test && ((bits >> i) & 1) == 1
+ if (i < 6) {
+ this.modules[i][8] = mod
+ } else if (i < 8) {
+ this.modules[i + 1][8] = mod
+ } else {
+ this.modules[this.moduleCount - 15 + i][8] = mod
+ }
+ }
+ for (var i = 0; i < 15; i++) {
+ var mod = !test && ((bits >> i) & 1) == 1
+ if (i < 8) {
+ this.modules[8][this.moduleCount - i - 1] = mod
+ } else if (i < 9) {
+ this.modules[8][15 - i - 1 + 1] = mod
+ } else {
+ this.modules[8][15 - i - 1] = mod
+ }
+ }
+ this.modules[this.moduleCount - 8][8] = !test
+ },
+ mapData: function (data, maskPattern) {
+ var inc = -1
+ var row = this.moduleCount - 1
+ var bitIndex = 7
+ var byteIndex = 0
+ for (var col = this.moduleCount - 1; col > 0; col -= 2) {
+ if (col == 6) col--
+ while (true) {
+ for (var c = 0; c < 2; c++) {
+ if (this.modules[row][col - c] == null) {
+ var dark = false
+ if (byteIndex < data.length) {
+ dark = ((data[byteIndex] >>> bitIndex) & 1) == 1
+ }
+ var mask = QRUtil.getMask(maskPattern, row, col - c)
+ if (mask) {
+ dark = !dark
+ }
+ this.modules[row][col - c] = dark
+ bitIndex--
+ if (bitIndex == -1) {
+ byteIndex++
+ bitIndex = 7
+ }
+ }
+ }
+ row += inc
+ if (row < 0 || this.moduleCount <= row) {
+ row -= inc
+ inc = -inc
+ break
+ }
+ }
+ }
+ }
+ }
+ QRCodeModel.PAD0 = 0xec
+ QRCodeModel.PAD1 = 0x11
+ QRCodeModel.createData = function (typeNumber, errorCorrectLevel, dataList) {
+ var rsBlocks = QRRSBlock.getRSBlocks(typeNumber, errorCorrectLevel)
+ var buffer = new QRBitBuffer()
+ for (var i = 0; i < dataList.length; i++) {
+ var data = dataList[i]
+ buffer.put(data.mode, 4)
+ buffer.put(
+ data.getLength(),
+ QRUtil.getLengthInBits(data.mode, typeNumber)
+ )
+ data.write(buffer)
+ }
+ var totalDataCount = 0
+ for (var i = 0; i < rsBlocks.length; i++) {
+ totalDataCount += rsBlocks[i].dataCount
+ }
+ if (buffer.getLengthInBits() > totalDataCount * 8) {
+ throw new Error(
+ 'code length overflow. (' +
+ buffer.getLengthInBits() +
+ '>' +
+ totalDataCount * 8 +
+ ')'
+ )
+ }
+ if (buffer.getLengthInBits() + 4 <= totalDataCount * 8) {
+ buffer.put(0, 4)
+ }
+ while (buffer.getLengthInBits() % 8 != 0) {
+ buffer.putBit(false)
+ }
+ while (true) {
+ if (buffer.getLengthInBits() >= totalDataCount * 8) {
+ break
+ }
+ buffer.put(QRCodeModel.PAD0, 8)
+ if (buffer.getLengthInBits() >= totalDataCount * 8) {
+ break
+ }
+ buffer.put(QRCodeModel.PAD1, 8)
+ }
+ return QRCodeModel.createBytes(buffer, rsBlocks)
+ }
+ QRCodeModel.createBytes = function (buffer, rsBlocks) {
+ var offset = 0
+ var maxDcCount = 0
+ var maxEcCount = 0
+ var dcdata = new Array(rsBlocks.length)
+ var ecdata = new Array(rsBlocks.length)
+ for (var r = 0; r < rsBlocks.length; r++) {
+ var dcCount = rsBlocks[r].dataCount
+ var ecCount = rsBlocks[r].totalCount - dcCount
+ maxDcCount = Math.max(maxDcCount, dcCount)
+ maxEcCount = Math.max(maxEcCount, ecCount)
+ dcdata[r] = new Array(dcCount)
+ for (var i = 0; i < dcdata[r].length; i++) {
+ dcdata[r][i] = 0xff & buffer.buffer[i + offset]
+ }
+ offset += dcCount
+ var rsPoly = QRUtil.getErrorCorrectPolynomial(ecCount)
+ var rawPoly = new QRPolynomial(dcdata[r], rsPoly.getLength() - 1)
+ var modPoly = rawPoly.mod(rsPoly)
+ ecdata[r] = new Array(rsPoly.getLength() - 1)
+ for (var i = 0; i < ecdata[r].length; i++) {
+ var modIndex = i + modPoly.getLength() - ecdata[r].length
+ ecdata[r][i] = modIndex >= 0 ? modPoly.get(modIndex) : 0
+ }
+ }
+ var totalCodeCount = 0
+ for (var i = 0; i < rsBlocks.length; i++) {
+ totalCodeCount += rsBlocks[i].totalCount
+ }
+ var data = new Array(totalCodeCount)
+ var index = 0
+ for (var i = 0; i < maxDcCount; i++) {
+ for (var r = 0; r < rsBlocks.length; r++) {
+ if (i < dcdata[r].length) {
+ data[index++] = dcdata[r][i]
+ }
+ }
+ }
+ for (var i = 0; i < maxEcCount; i++) {
+ for (var r = 0; r < rsBlocks.length; r++) {
+ if (i < ecdata[r].length) {
+ data[index++] = ecdata[r][i]
+ }
+ }
+ }
+ return data
+ }
+ var QRMode = {
+ MODE_NUMBER: 1 << 0,
+ MODE_ALPHA_NUM: 1 << 1,
+ MODE_8BIT_BYTE: 1 << 2,
+ MODE_KANJI: 1 << 3
+ }
+ var QRErrorCorrectLevel = { L: 1, M: 0, Q: 3, H: 2 }
+ var QRMaskPattern = {
+ PATTERN000: 0,
+ PATTERN001: 1,
+ PATTERN010: 2,
+ PATTERN011: 3,
+ PATTERN100: 4,
+ PATTERN101: 5,
+ PATTERN110: 6,
+ PATTERN111: 7
+ }
+ var QRUtil = {
+ PATTERN_POSITION_TABLE: [
+ [],
+ [6, 18],
+ [6, 22],
+ [6, 26],
+ [6, 30],
+ [6, 34],
+ [6, 22, 38],
+ [6, 24, 42],
+ [6, 26, 46],
+ [6, 28, 50],
+ [6, 30, 54],
+ [6, 32, 58],
+ [6, 34, 62],
+ [6, 26, 46, 66],
+ [6, 26, 48, 70],
+ [6, 26, 50, 74],
+ [6, 30, 54, 78],
+ [6, 30, 56, 82],
+ [6, 30, 58, 86],
+ [6, 34, 62, 90],
+ [6, 28, 50, 72, 94],
+ [6, 26, 50, 74, 98],
+ [6, 30, 54, 78, 102],
+ [6, 28, 54, 80, 106],
+ [6, 32, 58, 84, 110],
+ [6, 30, 58, 86, 114],
+ [6, 34, 62, 90, 118],
+ [6, 26, 50, 74, 98, 122],
+ [6, 30, 54, 78, 102, 126],
+ [6, 26, 52, 78, 104, 130],
+ [6, 30, 56, 82, 108, 134],
+ [6, 34, 60, 86, 112, 138],
+ [6, 30, 58, 86, 114, 142],
+ [6, 34, 62, 90, 118, 146],
+ [6, 30, 54, 78, 102, 126, 150],
+ [6, 24, 50, 76, 102, 128, 154],
+ [6, 28, 54, 80, 106, 132, 158],
+ [6, 32, 58, 84, 110, 136, 162],
+ [6, 26, 54, 82, 110, 138, 166],
+ [6, 30, 58, 86, 114, 142, 170]
+ ],
+ G15:
+ (1 << 10) |
+ (1 << 8) |
+ (1 << 5) |
+ (1 << 4) |
+ (1 << 2) |
+ (1 << 1) |
+ (1 << 0),
+ G18:
+ (1 << 12) |
+ (1 << 11) |
+ (1 << 10) |
+ (1 << 9) |
+ (1 << 8) |
+ (1 << 5) |
+ (1 << 2) |
+ (1 << 0),
+ G15_MASK: (1 << 14) | (1 << 12) | (1 << 10) | (1 << 4) | (1 << 1),
+ getBCHTypeInfo: function (data) {
+ var d = data << 10
+ while (QRUtil.getBCHDigit(d) - QRUtil.getBCHDigit(QRUtil.G15) >= 0) {
+ d ^=
+ QRUtil.G15 << (QRUtil.getBCHDigit(d) - QRUtil.getBCHDigit(QRUtil.G15))
+ }
+ return ((data << 10) | d) ^ QRUtil.G15_MASK
+ },
+ getBCHTypeNumber: function (data) {
+ var d = data << 12
+ while (QRUtil.getBCHDigit(d) - QRUtil.getBCHDigit(QRUtil.G18) >= 0) {
+ d ^=
+ QRUtil.G18 << (QRUtil.getBCHDigit(d) - QRUtil.getBCHDigit(QRUtil.G18))
+ }
+ return (data << 12) | d
+ },
+ getBCHDigit: function (data) {
+ var digit = 0
+ while (data != 0) {
+ digit++
+ data >>>= 1
+ }
+ return digit
+ },
+ getPatternPosition: function (typeNumber) {
+ return QRUtil.PATTERN_POSITION_TABLE[typeNumber - 1]
+ },
+ getMask: function (maskPattern, i, j) {
+ switch (maskPattern) {
+ case QRMaskPattern.PATTERN000:
+ return (i + j) % 2 == 0
+ case QRMaskPattern.PATTERN001:
+ return i % 2 == 0
+ case QRMaskPattern.PATTERN010:
+ return j % 3 == 0
+ case QRMaskPattern.PATTERN011:
+ return (i + j) % 3 == 0
+ case QRMaskPattern.PATTERN100:
+ return (Math.floor(i / 2) + Math.floor(j / 3)) % 2 == 0
+ case QRMaskPattern.PATTERN101:
+ return (i * j) % 2 + (i * j) % 3 == 0
+ case QRMaskPattern.PATTERN110:
+ return ((i * j) % 2 + (i * j) % 3) % 2 == 0
+ case QRMaskPattern.PATTERN111:
+ return ((i * j) % 3 + (i + j) % 2) % 2 == 0
+ default:
+ throw new Error('bad maskPattern:' + maskPattern)
+ }
+ },
+ getErrorCorrectPolynomial: function (errorCorrectLength) {
+ var a = new QRPolynomial([1], 0)
+ for (var i = 0; i < errorCorrectLength; i++) {
+ a = a.multiply(new QRPolynomial([1, QRMath.gexp(i)], 0))
+ }
+ return a
+ },
+ getLengthInBits: function (mode, type) {
+ if (type >= 1 && type < 10) {
+ switch (mode) {
+ case QRMode.MODE_NUMBER:
+ return 10
+ case QRMode.MODE_ALPHA_NUM:
+ return 9
+ case QRMode.MODE_8BIT_BYTE:
+ return 8
+ case QRMode.MODE_KANJI:
+ return 8
+ default:
+ throw new Error('mode:' + mode)
+ }
+ } else if (type < 27) {
+ switch (mode) {
+ case QRMode.MODE_NUMBER:
+ return 12
+ case QRMode.MODE_ALPHA_NUM:
+ return 11
+ case QRMode.MODE_8BIT_BYTE:
+ return 16
+ case QRMode.MODE_KANJI:
+ return 10
+ default:
+ throw new Error('mode:' + mode)
+ }
+ } else if (type < 41) {
+ switch (mode) {
+ case QRMode.MODE_NUMBER:
+ return 14
+ case QRMode.MODE_ALPHA_NUM:
+ return 13
+ case QRMode.MODE_8BIT_BYTE:
+ return 16
+ case QRMode.MODE_KANJI:
+ return 12
+ default:
+ throw new Error('mode:' + mode)
+ }
+ } else {
+ throw new Error('type:' + type)
+ }
+ },
+ getLostPoint: function (qrCode) {
+ var moduleCount = qrCode.getModuleCount()
+ var lostPoint = 0
+ for (var row = 0; row < moduleCount; row++) {
+ for (var col = 0; col < moduleCount; col++) {
+ var sameCount = 0
+ var dark = qrCode.isDark(row, col)
+ for (var r = -1; r <= 1; r++) {
+ if (row + r < 0 || moduleCount <= row + r) {
+ continue
+ }
+ for (var c = -1; c <= 1; c++) {
+ if (col + c < 0 || moduleCount <= col + c) {
+ continue
+ }
+ if (r == 0 && c == 0) {
+ continue
+ }
+ if (dark == qrCode.isDark(row + r, col + c)) {
+ sameCount++
+ }
+ }
+ }
+ if (sameCount > 5) {
+ lostPoint += 3 + sameCount - 5
+ }
+ }
+ }
+ for (var row = 0; row < moduleCount - 1; row++) {
+ for (var col = 0; col < moduleCount - 1; col++) {
+ var count = 0
+ if (qrCode.isDark(row, col)) count++
+ if (qrCode.isDark(row + 1, col)) count++
+ if (qrCode.isDark(row, col + 1)) count++
+ if (qrCode.isDark(row + 1, col + 1)) count++
+ if (count == 0 || count == 4) {
+ lostPoint += 3
+ }
+ }
+ }
+ for (var row = 0; row < moduleCount; row++) {
+ for (var col = 0; col < moduleCount - 6; col++) {
+ if (
+ qrCode.isDark(row, col) &&
+ !qrCode.isDark(row, col + 1) &&
+ qrCode.isDark(row, col + 2) &&
+ qrCode.isDark(row, col + 3) &&
+ qrCode.isDark(row, col + 4) &&
+ !qrCode.isDark(row, col + 5) &&
+ qrCode.isDark(row, col + 6)
+ ) {
+ lostPoint += 40
+ }
+ }
+ }
+ for (var col = 0; col < moduleCount; col++) {
+ for (var row = 0; row < moduleCount - 6; row++) {
+ if (
+ qrCode.isDark(row, col) &&
+ !qrCode.isDark(row + 1, col) &&
+ qrCode.isDark(row + 2, col) &&
+ qrCode.isDark(row + 3, col) &&
+ qrCode.isDark(row + 4, col) &&
+ !qrCode.isDark(row + 5, col) &&
+ qrCode.isDark(row + 6, col)
+ ) {
+ lostPoint += 40
+ }
+ }
+ }
+ var darkCount = 0
+ for (var col = 0; col < moduleCount; col++) {
+ for (var row = 0; row < moduleCount; row++) {
+ if (qrCode.isDark(row, col)) {
+ darkCount++
+ }
+ }
+ }
+ var ratio = Math.abs(100 * darkCount / moduleCount / moduleCount - 50) / 5
+ lostPoint += ratio * 10
+ return lostPoint
+ }
+ }
+ var QRMath = {
+ glog: function (n) {
+ if (n < 1) {
+ throw new Error('glog(' + n + ')')
+ }
+ return QRMath.LOG_TABLE[n]
+ },
+ gexp: function (n) {
+ while (n < 0) {
+ n += 255
+ }
+ while (n >= 256) {
+ n -= 255
+ }
+ return QRMath.EXP_TABLE[n]
+ },
+ EXP_TABLE: new Array(256),
+ LOG_TABLE: new Array(256)
+ }
+ for (var i = 0; i < 8; i++) {
+ QRMath.EXP_TABLE[i] = 1 << i
+ }
+ for (var i = 8; i < 256; i++) {
+ QRMath.EXP_TABLE[i] =
+ QRMath.EXP_TABLE[i - 4] ^
+ QRMath.EXP_TABLE[i - 5] ^
+ QRMath.EXP_TABLE[i - 6] ^
+ QRMath.EXP_TABLE[i - 8]
+ }
+ for (var i = 0; i < 255; i++) {
+ QRMath.LOG_TABLE[QRMath.EXP_TABLE[i]] = i
+ }
+ function QRPolynomial (num, shift) {
+ if (num.length == undefined) {
+ throw new Error(num.length + '/' + shift)
+ }
+ var offset = 0
+ while (offset < num.length && num[offset] == 0) {
+ offset++
+ }
+ this.num = new Array(num.length - offset + shift)
+ for (var i = 0; i < num.length - offset; i++) {
+ this.num[i] = num[i + offset]
+ }
+ }
+ QRPolynomial.prototype = {
+ get: function (index) {
+ return this.num[index]
+ },
+ getLength: function () {
+ return this.num.length
+ },
+ multiply: function (e) {
+ var num = new Array(this.getLength() + e.getLength() - 1)
+ for (var i = 0; i < this.getLength(); i++) {
+ for (var j = 0; j < e.getLength(); j++) {
+ num[i + j] ^= QRMath.gexp(
+ QRMath.glog(this.get(i)) + QRMath.glog(e.get(j))
+ )
+ }
+ }
+ return new QRPolynomial(num, 0)
+ },
+ mod: function (e) {
+ if (this.getLength() - e.getLength() < 0) {
+ return this
+ }
+ var ratio = QRMath.glog(this.get(0)) - QRMath.glog(e.get(0))
+ var num = new Array(this.getLength())
+ for (var i = 0; i < this.getLength(); i++) {
+ num[i] = this.get(i)
+ }
+ for (var i = 0; i < e.getLength(); i++) {
+ num[i] ^= QRMath.gexp(QRMath.glog(e.get(i)) + ratio)
+ }
+ return new QRPolynomial(num, 0).mod(e)
+ }
+ }
+ function QRRSBlock (totalCount, dataCount) {
+ this.totalCount = totalCount
+ this.dataCount = dataCount
+ }
+ QRRSBlock.RS_BLOCK_TABLE = [
+ [1, 26, 19],
+ [1, 26, 16],
+ [1, 26, 13],
+ [1, 26, 9],
+ [1, 44, 34],
+ [1, 44, 28],
+ [1, 44, 22],
+ [1, 44, 16],
+ [1, 70, 55],
+ [1, 70, 44],
+ [2, 35, 17],
+ [2, 35, 13],
+ [1, 100, 80],
+ [2, 50, 32],
+ [2, 50, 24],
+ [4, 25, 9],
+ [1, 134, 108],
+ [2, 67, 43],
+ [2, 33, 15, 2, 34, 16],
+ [2, 33, 11, 2, 34, 12],
+ [2, 86, 68],
+ [4, 43, 27],
+ [4, 43, 19],
+ [4, 43, 15],
+ [2, 98, 78],
+ [4, 49, 31],
+ [2, 32, 14, 4, 33, 15],
+ [4, 39, 13, 1, 40, 14],
+ [2, 121, 97],
+ [2, 60, 38, 2, 61, 39],
+ [4, 40, 18, 2, 41, 19],
+ [4, 40, 14, 2, 41, 15],
+ [2, 146, 116],
+ [3, 58, 36, 2, 59, 37],
+ [4, 36, 16, 4, 37, 17],
+ [4, 36, 12, 4, 37, 13],
+ [2, 86, 68, 2, 87, 69],
+ [4, 69, 43, 1, 70, 44],
+ [6, 43, 19, 2, 44, 20],
+ [6, 43, 15, 2, 44, 16],
+ [4, 101, 81],
+ [1, 80, 50, 4, 81, 51],
+ [4, 50, 22, 4, 51, 23],
+ [3, 36, 12, 8, 37, 13],
+ [2, 116, 92, 2, 117, 93],
+ [6, 58, 36, 2, 59, 37],
+ [4, 46, 20, 6, 47, 21],
+ [7, 42, 14, 4, 43, 15],
+ [4, 133, 107],
+ [8, 59, 37, 1, 60, 38],
+ [8, 44, 20, 4, 45, 21],
+ [12, 33, 11, 4, 34, 12],
+ [3, 145, 115, 1, 146, 116],
+ [4, 64, 40, 5, 65, 41],
+ [11, 36, 16, 5, 37, 17],
+ [11, 36, 12, 5, 37, 13],
+ [5, 109, 87, 1, 110, 88],
+ [5, 65, 41, 5, 66, 42],
+ [5, 54, 24, 7, 55, 25],
+ [11, 36, 12],
+ [5, 122, 98, 1, 123, 99],
+ [7, 73, 45, 3, 74, 46],
+ [15, 43, 19, 2, 44, 20],
+ [3, 45, 15, 13, 46, 16],
+ [1, 135, 107, 5, 136, 108],
+ [10, 74, 46, 1, 75, 47],
+ [1, 50, 22, 15, 51, 23],
+ [2, 42, 14, 17, 43, 15],
+ [5, 150, 120, 1, 151, 121],
+ [9, 69, 43, 4, 70, 44],
+ [17, 50, 22, 1, 51, 23],
+ [2, 42, 14, 19, 43, 15],
+ [3, 141, 113, 4, 142, 114],
+ [3, 70, 44, 11, 71, 45],
+ [17, 47, 21, 4, 48, 22],
+ [9, 39, 13, 16, 40, 14],
+ [3, 135, 107, 5, 136, 108],
+ [3, 67, 41, 13, 68, 42],
+ [15, 54, 24, 5, 55, 25],
+ [15, 43, 15, 10, 44, 16],
+ [4, 144, 116, 4, 145, 117],
+ [17, 68, 42],
+ [17, 50, 22, 6, 51, 23],
+ [19, 46, 16, 6, 47, 17],
+ [2, 139, 111, 7, 140, 112],
+ [17, 74, 46],
+ [7, 54, 24, 16, 55, 25],
+ [34, 37, 13],
+ [4, 151, 121, 5, 152, 122],
+ [4, 75, 47, 14, 76, 48],
+ [11, 54, 24, 14, 55, 25],
+ [16, 45, 15, 14, 46, 16],
+ [6, 147, 117, 4, 148, 118],
+ [6, 73, 45, 14, 74, 46],
+ [11, 54, 24, 16, 55, 25],
+ [30, 46, 16, 2, 47, 17],
+ [8, 132, 106, 4, 133, 107],
+ [8, 75, 47, 13, 76, 48],
+ [7, 54, 24, 22, 55, 25],
+ [22, 45, 15, 13, 46, 16],
+ [10, 142, 114, 2, 143, 115],
+ [19, 74, 46, 4, 75, 47],
+ [28, 50, 22, 6, 51, 23],
+ [33, 46, 16, 4, 47, 17],
+ [8, 152, 122, 4, 153, 123],
+ [22, 73, 45, 3, 74, 46],
+ [8, 53, 23, 26, 54, 24],
+ [12, 45, 15, 28, 46, 16],
+ [3, 147, 117, 10, 148, 118],
+ [3, 73, 45, 23, 74, 46],
+ [4, 54, 24, 31, 55, 25],
+ [11, 45, 15, 31, 46, 16],
+ [7, 146, 116, 7, 147, 117],
+ [21, 73, 45, 7, 74, 46],
+ [1, 53, 23, 37, 54, 24],
+ [19, 45, 15, 26, 46, 16],
+ [5, 145, 115, 10, 146, 116],
+ [19, 75, 47, 10, 76, 48],
+ [15, 54, 24, 25, 55, 25],
+ [23, 45, 15, 25, 46, 16],
+ [13, 145, 115, 3, 146, 116],
+ [2, 74, 46, 29, 75, 47],
+ [42, 54, 24, 1, 55, 25],
+ [23, 45, 15, 28, 46, 16],
+ [17, 145, 115],
+ [10, 74, 46, 23, 75, 47],
+ [10, 54, 24, 35, 55, 25],
+ [19, 45, 15, 35, 46, 16],
+ [17, 145, 115, 1, 146, 116],
+ [14, 74, 46, 21, 75, 47],
+ [29, 54, 24, 19, 55, 25],
+ [11, 45, 15, 46, 46, 16],
+ [13, 145, 115, 6, 146, 116],
+ [14, 74, 46, 23, 75, 47],
+ [44, 54, 24, 7, 55, 25],
+ [59, 46, 16, 1, 47, 17],
+ [12, 151, 121, 7, 152, 122],
+ [12, 75, 47, 26, 76, 48],
+ [39, 54, 24, 14, 55, 25],
+ [22, 45, 15, 41, 46, 16],
+ [6, 151, 121, 14, 152, 122],
+ [6, 75, 47, 34, 76, 48],
+ [46, 54, 24, 10, 55, 25],
+ [2, 45, 15, 64, 46, 16],
+ [17, 152, 122, 4, 153, 123],
+ [29, 74, 46, 14, 75, 47],
+ [49, 54, 24, 10, 55, 25],
+ [24, 45, 15, 46, 46, 16],
+ [4, 152, 122, 18, 153, 123],
+ [13, 74, 46, 32, 75, 47],
+ [48, 54, 24, 14, 55, 25],
+ [42, 45, 15, 32, 46, 16],
+ [20, 147, 117, 4, 148, 118],
+ [40, 75, 47, 7, 76, 48],
+ [43, 54, 24, 22, 55, 25],
+ [10, 45, 15, 67, 46, 16],
+ [19, 148, 118, 6, 149, 119],
+ [18, 75, 47, 31, 76, 48],
+ [34, 54, 24, 34, 55, 25],
+ [20, 45, 15, 61, 46, 16]
+ ]
+ QRRSBlock.getRSBlocks = function (typeNumber, errorCorrectLevel) {
+ var rsBlock = QRRSBlock.getRsBlockTable(typeNumber, errorCorrectLevel)
+ if (rsBlock == undefined) {
+ throw new Error(
+ 'bad rs block @ typeNumber:' +
+ typeNumber +
+ '/errorCorrectLevel:' +
+ errorCorrectLevel
+ )
+ }
+ var length = rsBlock.length / 3
+ var list = []
+ for (var i = 0; i < length; i++) {
+ var count = rsBlock[i * 3 + 0]
+ var totalCount = rsBlock[i * 3 + 1]
+ var dataCount = rsBlock[i * 3 + 2]
+ for (var j = 0; j < count; j++) {
+ list.push(new QRRSBlock(totalCount, dataCount))
+ }
+ }
+ return list
+ }
+ QRRSBlock.getRsBlockTable = function (typeNumber, errorCorrectLevel) {
+ switch (errorCorrectLevel) {
+ case QRErrorCorrectLevel.L:
+ return QRRSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 0]
+ case QRErrorCorrectLevel.M:
+ return QRRSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 1]
+ case QRErrorCorrectLevel.Q:
+ return QRRSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 2]
+ case QRErrorCorrectLevel.H:
+ return QRRSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 3]
+ default:
+ return undefined
+ }
+ }
+ function QRBitBuffer () {
+ this.buffer = []
+ this.length = 0
+ }
+ QRBitBuffer.prototype = {
+ get: function (index) {
+ var bufIndex = Math.floor(index / 8)
+ return ((this.buffer[bufIndex] >>> (7 - index % 8)) & 1) == 1
+ },
+ put: function (num, length) {
+ for (var i = 0; i < length; i++) {
+ this.putBit(((num >>> (length - i - 1)) & 1) == 1)
+ }
+ },
+ getLengthInBits: function () {
+ return this.length
+ },
+ putBit: function (bit) {
+ var bufIndex = Math.floor(this.length / 8)
+ if (this.buffer.length <= bufIndex) {
+ this.buffer.push(0)
+ }
+ if (bit) {
+ this.buffer[bufIndex] |= 0x80 >>> (this.length % 8)
+ }
+ this.length++
+ }
+ }
+ var QRCodeLimitLength = [
+ [17, 14, 11, 7],
+ [32, 26, 20, 14],
+ [53, 42, 32, 24],
+ [78, 62, 46, 34],
+ [106, 84, 60, 44],
+ [134, 106, 74, 58],
+ [154, 122, 86, 64],
+ [192, 152, 108, 84],
+ [230, 180, 130, 98],
+ [271, 213, 151, 119],
+ [321, 251, 177, 137],
+ [367, 287, 203, 155],
+ [425, 331, 241, 177],
+ [458, 362, 258, 194],
+ [520, 412, 292, 220],
+ [586, 450, 322, 250],
+ [644, 504, 364, 280],
+ [718, 560, 394, 310],
+ [792, 624, 442, 338],
+ [858, 666, 482, 382],
+ [929, 711, 509, 403],
+ [1003, 779, 565, 439],
+ [1091, 857, 611, 461],
+ [1171, 911, 661, 511],
+ [1273, 997, 715, 535],
+ [1367, 1059, 751, 593],
+ [1465, 1125, 805, 625],
+ [1528, 1190, 868, 658],
+ [1628, 1264, 908, 698],
+ [1732, 1370, 982, 742],
+ [1840, 1452, 1030, 790],
+ [1952, 1538, 1112, 842],
+ [2068, 1628, 1168, 898],
+ [2188, 1722, 1228, 958],
+ [2303, 1809, 1283, 983],
+ [2431, 1911, 1351, 1051],
+ [2563, 1989, 1423, 1093],
+ [2699, 2099, 1499, 1139],
+ [2809, 2213, 1579, 1219],
+ [2953, 2331, 1663, 1273]
+ ]
+
+ function _isSupportCanvas () {
+ return typeof CanvasRenderingContext2D !== 'undefined'
+ }
+
+ // android 2.x doesn't support Data-URI spec
+ function _getAndroid () {
+ var android = false
+ var sAgent = navigator.userAgent
+
+ if (/android/i.test(sAgent)) {
+ // android
+ android = true
+ var aMat = sAgent.toString().match(/android ([0-9]\.[0-9])/i)
+
+ if (aMat && aMat[1]) {
+ android = parseFloat(aMat[1])
+ }
+ }
+
+ return android
+ }
+
+ var svgDrawer = (function () {
+ var Drawing = function (el, htOption) {
+ this._el = el
+ this._htOption = htOption
+ }
+
+ Drawing.prototype.draw = function (oQRCode) {
+ var _htOption = this._htOption
+ var _el = this._el
+ var nCount = oQRCode.getModuleCount()
+ var nWidth = Math.floor(_htOption.width / nCount)
+ var nHeight = Math.floor(_htOption.height / nCount)
+
+ this.clear()
+
+ function makeSVG (tag, attrs) {
+ var el = document.createElementNS('http://www.w3.org/2000/svg', tag)
+ for (var k in attrs) {
+ if (attrs.hasOwnProperty(k)) el.setAttribute(k, attrs[k])
+ }
+ return el
+ }
+
+ var svg = makeSVG('svg', {
+ viewBox: '0 0 ' + String(nCount) + ' ' + String(nCount),
+ width: '100%',
+ height: '100%',
+ fill: _htOption.colorLight
+ })
+ svg.setAttributeNS(
+ 'http://www.w3.org/2000/xmlns/',
+ 'xmlns:xlink',
+ 'http://www.w3.org/1999/xlink'
+ )
+ _el.appendChild(svg)
+
+ svg.appendChild(
+ makeSVG('rect', {
+ fill: _htOption.colorLight,
+ width: '100%',
+ height: '100%'
+ })
+ )
+ svg.appendChild(
+ makeSVG('rect', {
+ fill: _htOption.colorDark,
+ width: '1',
+ height: '1',
+ id: 'template'
+ })
+ )
+
+ for (var row = 0; row < nCount; row++) {
+ for (var col = 0; col < nCount; col++) {
+ if (oQRCode.isDark(row, col)) {
+ var child = makeSVG('use', { x: String(col), y: String(row) })
+ child.setAttributeNS(
+ 'http://www.w3.org/1999/xlink',
+ 'href',
+ '#template'
+ )
+ svg.appendChild(child)
+ }
+ }
+ }
+ }
+ Drawing.prototype.clear = function () {
+ while (this._el.hasChildNodes()) this._el.removeChild(this._el.lastChild)
+ }
+ return Drawing
+ })()
+
+ var useSVG = document.documentElement.tagName.toLowerCase() === 'svg'
+
+ // Drawing in DOM by using Table tag
+ var Drawing = useSVG
+ ? svgDrawer
+ : !_isSupportCanvas()
+ ? (function () {
+ var Drawing = function (el, htOption) {
+ this._el = el
+ this._htOption = htOption
+ }
+
+ /**
+ * Draw the QRCode
+ *
+ * @param {QRCode} oQRCode
+ */
+ Drawing.prototype.draw = function (oQRCode) {
+ var _htOption = this._htOption
+ var _el = this._el
+ var nCount = oQRCode.getModuleCount()
+ var nWidth = Math.floor(_htOption.width / nCount)
+ var nHeight = Math.floor(_htOption.height / nCount)
+ var aHTML = ['<table style="border:0;border-collapse:collapse;">']
+
+ for (var row = 0; row < nCount; row++) {
+ aHTML.push('<tr>')
+
+ for (var col = 0; col < nCount; col++) {
+ aHTML.push(
+ '<td style="border:0;border-collapse:collapse;padding:0;margin:0;width:' +
+ nWidth +
+ 'px;height:' +
+ nHeight +
+ 'px;background-color:' +
+ (oQRCode.isDark(row, col)
+ ? _htOption.colorDark
+ : _htOption.colorLight) +
+ ';"></td>'
+ )
+ }
+
+ aHTML.push('</tr>')
+ }
+
+ aHTML.push('</table>')
+ _el.innerHTML = aHTML.join('')
+
+ // Fix the margin values as real size.
+ var elTable = _el.childNodes[0]
+ var nLeftMarginTable = (_htOption.width - elTable.offsetWidth) / 2
+ var nTopMarginTable = (_htOption.height - elTable.offsetHeight) / 2
+
+ if (nLeftMarginTable > 0 && nTopMarginTable > 0) {
+ elTable.style.margin =
+ nTopMarginTable + 'px ' + nLeftMarginTable + 'px'
+ }
+ }
+
+ /**
+ * Clear the QRCode
+ */
+ Drawing.prototype.clear = function () {
+ this._el.innerHTML = ''
+ }
+
+ return Drawing
+ })()
+ : (function () {
+ // Drawing in Canvas
+ function _onMakeImage () {
+ this._elImage.src = this._elCanvas.toDataURL('image/png')
+ this._elImage.style.display = 'block'
+ this._elCanvas.style.display = 'none'
+ }
+
+ // Android 2.1 bug workaround
+ // http://code.google.com/p/android/issues/detail?id=5141
+ if (this._android && this._android <= 2.1) {
+ var factor = 1 / window.devicePixelRatio
+ var drawImage = CanvasRenderingContext2D.prototype.drawImage
+ CanvasRenderingContext2D.prototype.drawImage = function (
+ image,
+ sx,
+ sy,
+ sw,
+ sh,
+ dx,
+ dy,
+ dw,
+ dh
+ ) {
+ if ('nodeName' in image && /img/i.test(image.nodeName)) {
+ for (var i = arguments.length - 1; i >= 1; i--) {
+ arguments[i] = arguments[i] * factor
+ }
+ } else if (typeof dw === 'undefined') {
+ arguments[1] *= factor
+ arguments[2] *= factor
+ arguments[3] *= factor
+ arguments[4] *= factor
+ }
+
+ drawImage.apply(this, arguments)
+ }
+ }
+
+ /**
+ * Check whether the user's browser supports Data URI or not
+ *
+ * @private
+ * @param {Function} fSuccess Occurs if it supports Data URI
+ * @param {Function} fFail Occurs if it doesn't support Data URI
+ */
+ function _safeSetDataURI (fSuccess, fFail) {
+ var self = this
+ self._fFail = fFail
+ self._fSuccess = fSuccess
+
+ // Check it just once
+ if (self._bSupportDataURI === null) {
+ var el = document.createElement('img')
+ var fOnError = function () {
+ self._bSupportDataURI = false
+
+ if (self._fFail) {
+ self._fFail.call(self)
+ }
+ }
+ var fOnSuccess = function () {
+ self._bSupportDataURI = true
+
+ if (self._fSuccess) {
+ self._fSuccess.call(self)
+ }
+ }
+
+ el.onabort = fOnError
+ el.onerror = fOnError
+ el.onload = fOnSuccess
+ el.src =
+ 'data:image/gif;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==' // the Image contains 1px data.
+ } else if (self._bSupportDataURI === true && self._fSuccess) {
+ self._fSuccess.call(self)
+ } else if (self._bSupportDataURI === false && self._fFail) {
+ self._fFail.call(self)
+ }
+ }
+
+ /**
+ * Drawing QRCode by using canvas
+ *
+ * @constructor
+ * @param {HTMLElement} el
+ * @param {Object} htOption QRCode Options
+ */
+ var Drawing = function (el, htOption) {
+ this._bIsPainted = false
+ this._android = _getAndroid()
+
+ this._htOption = htOption
+ this._elCanvas = document.createElement('canvas')
+ this._elCanvas.width = htOption.width
+ this._elCanvas.height = htOption.height
+ el.appendChild(this._elCanvas)
+ this._el = el
+ this._oContext = this._elCanvas.getContext('2d')
+ this._bIsPainted = false
+ this._elImage = document.createElement('img')
+ this._elImage.alt = 'Scan me!'
+ this._elImage.style.display = 'none'
+ this._el.appendChild(this._elImage)
+ this._bSupportDataURI = null
+ }
+
+ /**
+ * Draw the QRCode
+ *
+ * @param {QRCode} oQRCode
+ */
+ Drawing.prototype.draw = function (oQRCode) {
+ var _elImage = this._elImage
+ var _oContext = this._oContext
+ var _htOption = this._htOption
+
+ var nCount = oQRCode.getModuleCount()
+ var nWidth = _htOption.width / nCount
+ var nHeight = _htOption.height / nCount
+ var nRoundedWidth = Math.round(nWidth)
+ var nRoundedHeight = Math.round(nHeight)
+
+ _elImage.style.display = 'none'
+ this.clear()
+
+ for (var row = 0; row < nCount; row++) {
+ for (var col = 0; col < nCount; col++) {
+ var bIsDark = oQRCode.isDark(row, col)
+ var nLeft = col * nWidth
+ var nTop = row * nHeight
+ _oContext.strokeStyle = bIsDark
+ ? _htOption.colorDark
+ : _htOption.colorLight
+ _oContext.lineWidth = 1
+ _oContext.fillStyle = bIsDark
+ ? _htOption.colorDark
+ : _htOption.colorLight
+ _oContext.fillRect(nLeft, nTop, nWidth, nHeight)
+
+ // 안티 앨리어싱 방지 처리
+ _oContext.strokeRect(
+ Math.floor(nLeft) + 0.5,
+ Math.floor(nTop) + 0.5,
+ nRoundedWidth,
+ nRoundedHeight
+ )
+
+ _oContext.strokeRect(
+ Math.ceil(nLeft) - 0.5,
+ Math.ceil(nTop) - 0.5,
+ nRoundedWidth,
+ nRoundedHeight
+ )
+ }
+ }
+
+ this._bIsPainted = true
+ }
+
+ /**
+ * Make the image from Canvas if the browser supports Data URI.
+ */
+ Drawing.prototype.makeImage = function () {
+ if (this._bIsPainted) {
+ _safeSetDataURI.call(this, _onMakeImage)
+ }
+ }
+
+ /**
+ * Return whether the QRCode is painted or not
+ *
+ * @return {Boolean}
+ */
+ Drawing.prototype.isPainted = function () {
+ return this._bIsPainted
+ }
+
+ /**
+ * Clear the QRCode
+ */
+ Drawing.prototype.clear = function () {
+ this._oContext.clearRect(
+ 0,
+ 0,
+ this._elCanvas.width,
+ this._elCanvas.height
+ )
+ this._bIsPainted = false
+ }
+
+ /**
+ * @private
+ * @param {Number} nNumber
+ */
+ Drawing.prototype.round = function (nNumber) {
+ if (!nNumber) {
+ return nNumber
+ }
+
+ return Math.floor(nNumber * 1000) / 1000
+ }
+
+ return Drawing
+ })()
+
+ /**
+ * Get the type by string length
+ *
+ * @private
+ * @param {String} sText
+ * @param {Number} nCorrectLevel
+ * @return {Number} type
+ */
+ function _getTypeNumber (sText, nCorrectLevel) {
+ var nType = 1
+ var length = _getUTF8Length(sText)
+
+ for (var i = 0, len = QRCodeLimitLength.length; i <= len; i++) {
+ var nLimit = 0
+
+ switch (nCorrectLevel) {
+ case QRErrorCorrectLevel.L:
+ nLimit = QRCodeLimitLength[i][0]
+ break
+ case QRErrorCorrectLevel.M:
+ nLimit = QRCodeLimitLength[i][1]
+ break
+ case QRErrorCorrectLevel.Q:
+ nLimit = QRCodeLimitLength[i][2]
+ break
+ case QRErrorCorrectLevel.H:
+ nLimit = QRCodeLimitLength[i][3]
+ break
+ }
+
+ if (length <= nLimit) {
+ break
+ } else {
+ nType++
+ }
+ }
+
+ if (nType > QRCodeLimitLength.length) {
+ throw new Error('Too long data')
+ }
+
+ return nType
+ }
+
+ function _getUTF8Length (sText) {
+ var replacedText = encodeURI(sText)
+ .toString()
+ .replace(/\%[0-9a-fA-F]{2}/g, 'a')
+ return replacedText.length + (replacedText.length != sText ? 3 : 0)
+ }
+
+ /**
+ * @class QRCode
+ * @constructor
+ * @example
+ * new QRCode(document.getElementById("test"), "http://jindo.dev.naver.com/collie");
+ *
+ * @example
+ * var oQRCode = new QRCode("test", {
+ * text : "http://naver.com",
+ * width : 128,
+ * height : 128
+ * });
+ *
+ * oQRCode.clear(); // Clear the QRCode.
+ * oQRCode.makeCode("http://map.naver.com"); // Re-create the QRCode.
+ *
+ * @param {HTMLElement|String} el target element or 'id' attribute of element.
+ * @param {Object|String} vOption
+ * @param {String} vOption.text QRCode link data
+ * @param {Number} [vOption.width=256]
+ * @param {Number} [vOption.height=256]
+ * @param {String} [vOption.colorDark="#000000"]
+ * @param {String} [vOption.colorLight="#ffffff"]
+ * @param {QRCode.CorrectLevel} [vOption.correctLevel=QRCode.CorrectLevel.H] [L|M|Q|H]
+ */
+ QRCode = function (el, vOption) {
+ this._htOption = {
+ width: 256,
+ height: 256,
+ typeNumber: 4,
+ colorDark: '#000000',
+ colorLight: '#ffffff',
+ correctLevel: QRErrorCorrectLevel.H
+ }
+
+ if (typeof vOption === 'string') {
+ vOption = {
+ text: vOption
+ }
+ }
+
+ // Overwrites options
+ if (vOption) {
+ for (var i in vOption) {
+ this._htOption[i] = vOption[i]
+ }
+ }
+
+ if (typeof el === 'string') {
+ el = document.getElementById(el)
+ }
+
+ if (this._htOption.useSVG) {
+ Drawing = svgDrawer
+ }
+
+ this._android = _getAndroid()
+ this._el = el
+ this._oQRCode = null
+ this._oDrawing = new Drawing(this._el, this._htOption)
+
+ if (this._htOption.text) {
+ this.makeCode(this._htOption.text)
+ }
+ }
+
+ /**
+ * Make the QRCode
+ *
+ * @param {String} sText link data
+ */
+ QRCode.prototype.makeCode = function (sText) {
+ this._oQRCode = new QRCodeModel(
+ _getTypeNumber(sText, this._htOption.correctLevel),
+ this._htOption.correctLevel
+ )
+ this._oQRCode.addData(sText)
+ this._oQRCode.make()
+ this._el.title = sText
+ this._oDrawing.draw(this._oQRCode)
+ this.makeImage()
+ }
+
+ /**
+ * Make the Image from Canvas element
+ * - It occurs automatically
+ * - Android below 3 doesn't support Data-URI spec.
+ *
+ * @private
+ */
+ QRCode.prototype.makeImage = function () {
+ if (
+ typeof this._oDrawing.makeImage === 'function' &&
+ (!this._android || this._android >= 3)
+ ) {
+ this._oDrawing.makeImage()
+ }
+ }
+
+ /**
+ * Clear the QRCode
+ */
+ QRCode.prototype.clear = function () {
+ this._oDrawing.clear()
+ }
+
+ /**
+ * @name QRCode.CorrectLevel
+ */
+ QRCode.CorrectLevel = QRErrorCorrectLevel
+})()
diff --git a/testing/web-platform/tests/tools/wave/www/lib/query-parser.js b/testing/web-platform/tests/tools/wave/www/lib/query-parser.js
new file mode 100644
index 0000000000..c8ab8333d2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/lib/query-parser.js
@@ -0,0 +1,12 @@
+var QueryParser = {};
+
+QueryParser.parseQuery = function () {
+ var queryParameters = {};
+ var keysAndValues = location.search.replace("?", "").split("&");
+ for (var i = 0; i < keysAndValues.length; i++) {
+ var key = keysAndValues[i].split("=")[0];
+ var value = keysAndValues[i].split("=")[1];
+ queryParameters[key] = value;
+ }
+ return queryParameters;
+};
diff --git a/testing/web-platform/tests/tools/wave/www/lib/screen-console.js b/testing/web-platform/tests/tools/wave/www/lib/screen-console.js
new file mode 100644
index 0000000000..0e13b963a6
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/lib/screen-console.js
@@ -0,0 +1,16 @@
+function ScreenConsole(element) {
+ this._element = element;
+}
+
+ScreenConsole.prototype.log = function () {
+ var text = "";
+ for (var i = 0; i < arguments.length; i++) {
+ text += arguments[i] + " ";
+ }
+ console.log(text);
+ this._element.innerText += text + "\n";
+};
+
+ScreenConsole.prototype.clear = function () {
+ this._element.innerText = "";
+};
diff --git a/testing/web-platform/tests/tools/wave/www/lib/ui.js b/testing/web-platform/tests/tools/wave/www/lib/ui.js
new file mode 100644
index 0000000000..4abbece985
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/lib/ui.js
@@ -0,0 +1,100 @@
+const UI = {
+ createElement: config => {
+ if (!config) return;
+ const elementType = config.element || "div";
+ const element = document.createElement(elementType);
+
+ Object.keys(config).forEach(property => {
+ const value = config[property];
+ switch (property.toLowerCase()) {
+ case "id":
+ case "src":
+ case "style":
+ case "placeholder":
+ case "title":
+ case "accept":
+ element.setAttribute(property, value);
+ return;
+ case "classname":
+ element.setAttribute("class", value);
+ return;
+ case "colspan":
+ element.setAttribute("colspan", value);
+ return;
+ case "text":
+ element.innerText = value;
+ return;
+ case "value":
+ element.value = value;
+ return;
+ case "html":
+ element.innerHTML = value;
+ return;
+ case "onclick":
+ element.onclick = value.bind(element);
+ return;
+ case "onchange":
+ element.onchange = value.bind(element);
+ return;
+ case "onkeydown":
+ element.onkeydown = value.bind(element);
+ return;
+ case "onkeyup":
+ element.onkeyup = value.bind(element);
+ return;
+ case "type":
+ if (elementType === "input") element.setAttribute("type", value);
+ return;
+ case "children":
+ if (value instanceof Array) {
+ value.forEach(child => {
+ const childElement =
+ child instanceof Element ? child : UI.createElement(child);
+ if (!childElement) return;
+ element.appendChild(childElement);
+ });
+ } else {
+ const child = value;
+ const childElement =
+ child instanceof Element ? child : UI.createElement(child);
+ if (!childElement) return;
+ element.appendChild(childElement);
+ element.appendChild(childElement);
+ }
+ return;
+ case "disabled":
+ if (value) element.setAttribute("disabled", true);
+ return;
+ case "checked":
+ if (value) element.setAttribute("checked", true);
+ return;
+ case "indeterminate":
+ element.indeterminate = value;
+ return;
+ }
+ });
+ return element;
+ },
+ getElement: id => {
+ return document.getElementById(id);
+ },
+ getRoot: () => {
+ return document.getElementsByTagName("body")[0];
+ },
+ scrollPositions: {},
+ saveScrollPosition: elementId => {
+ let scrollElement = UI.getElement(elementId);
+ if (!scrollElement) return;
+ UI.scrollPositions[elementId] = {
+ scrollLeft: scrollElement.scrollLeft,
+ scrollRight: scrollElement.scrollRight
+ };
+ },
+ loadScrollPosition: elementId => {
+ let scrollElement = UI.getElement(elementId);
+ if (!scrollElement) return;
+ if (!UI.scrollPositions[elementId]) return;
+ scrollElement.scrollLeft = UI.scrollPositions[elementId].scrollLeft;
+ scrollElement.scrollRight = UI.scrollPositions[elementId].scrollRight;
+ }
+};
diff --git a/testing/web-platform/tests/tools/wave/www/lib/utils.js b/testing/web-platform/tests/tools/wave/www/lib/utils.js
new file mode 100644
index 0000000000..d84a2cb69c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/lib/utils.js
@@ -0,0 +1,57 @@
+const utils = {
+ parseQuery: queryString => {
+ if (queryString.indexOf("?") === -1) return {};
+ queryString = queryString.split("?")[1];
+ const query = {};
+ for (let part of queryString.split("&")) {
+ const keyValue = part.split("=");
+ query[keyValue[0]] = keyValue[1] ? keyValue[1] : null;
+ }
+ return query;
+ },
+ percent: (count, total) => {
+ const percent = Math.floor((count / total) * 10000) / 100;
+ if (!percent) {
+ return 0;
+ }
+ return percent;
+ },
+ saveBlobAsFile: (blob, filename) => {
+ const url = URL.createObjectURL(blob);
+ const a = document.createElement("a");
+ a.style.display = "none";
+ document.body.appendChild(a);
+ a.href = url;
+ a.download = filename;
+ a.click();
+ document.body.removeChild(a);
+ },
+ millisToTimeString(totalMilliseconds) {
+ let milliseconds = (totalMilliseconds % 1000) + "";
+ milliseconds = milliseconds.padStart(3, "0");
+ let seconds = (Math.floor(totalMilliseconds / 1000) % 60) + "";
+ seconds = seconds.padStart(2, "0");
+ let minutes = (Math.floor(totalMilliseconds / 60000) % 60) + "";
+ minutes = minutes.padStart(2, "0");
+ let hours = Math.floor(totalMilliseconds / 3600000) + "";
+ hours = hours.padStart(2, "0");
+ return `${hours}:${minutes}:${seconds}`;
+ },
+ getBrowserIcon(browser) {
+ switch (browser.toLowerCase()) {
+ case "firefox":
+ return "fab fa-firefox";
+ case "edge":
+ return "fab fa-edge";
+ case "chrome":
+ case "chromium":
+ return "fab fa-chrome";
+ case "safari":
+ case "webkit":
+ return "fab fa-safari";
+ }
+ },
+ copyObject(object) {
+ return JSON.parse(JSON.stringify(object));
+ }
+};
diff --git a/testing/web-platform/tests/tools/wave/www/lib/wave-service.js b/testing/web-platform/tests/tools/wave/www/lib/wave-service.js
new file mode 100644
index 0000000000..f7a60e153d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/lib/wave-service.js
@@ -0,0 +1,866 @@
+function sendRequest(method, uri, headers, data, onSuccess, onError) {
+ var xhr = new XMLHttpRequest();
+ xhr.onload = function () {
+ if (xhr.status === 200) {
+ onSuccess(xhr.response);
+ } else {
+ if (onError) onError(xhr.status, xhr.response);
+ }
+ };
+ xhr.onerror = function () {
+ if (onError) onError();
+ };
+ xhr.open(method, WaveService.uriPrefix + uri, true);
+ for (var header in headers) {
+ xhr.setRequestHeader(header, headers[header]);
+ }
+ xhr.send(data);
+ return xhr;
+}
+
+var WEB_ROOT = "{{WEB_ROOT}}";
+var HTTP_PORT = "{{HTTP_PORT}}";
+var HTTPS_PORT = "{{HTTPS_PORT}}";
+var OPEN = "open";
+var CLOSED = "closed";
+
+var WaveService = {
+ uriPrefix: WEB_ROOT,
+ socket: {
+ state: CLOSED,
+ onMessage: function () {},
+ onOpen: function () {},
+ onClose: function () {},
+ send: function () {},
+ close: function () {},
+ onStateChange: function () {},
+ },
+ // SESSIONS API
+ createSession: function (configuration, onSuccess, onError) {
+ var data = JSON.stringify({
+ tests: configuration.tests,
+ types: configuration.types,
+ timeouts: configuration.timeouts,
+ reference_tokens: configuration.referenceTokens,
+ expiration_date: configuration.expirationDate,
+ labels: configuration.labels,
+ });
+ sendRequest(
+ "POST",
+ "api/sessions",
+ { "Content-Type": "application/json" },
+ data,
+ function (response) {
+ var jsonObject = JSON.parse(response);
+ onSuccess(jsonObject.token);
+ },
+ onError
+ );
+ },
+ readSession: function (token, onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/sessions/" + token,
+ null,
+ null,
+ function (response) {
+ var jsonObject = JSON.parse(response);
+ onSuccess({
+ token: jsonObject.token,
+ tests: jsonObject.tests,
+ types: jsonObject.types,
+ userAgent: jsonObject.user_agent,
+ labels: jsonObject.labels,
+ timeouts: jsonObject.timeouts,
+ browser: jsonObject.browser,
+ isPublic: jsonObject.is_public,
+ referenceTokens: jsonObject.reference_tokens,
+ });
+ },
+ onError
+ );
+ },
+ readMultipleSessions: function (tokens, onSuccess, onError) {
+ var requestsLeft = tokens.length;
+ if (requestsLeft === 0) onSuccess([]);
+ var configurations = [];
+ for (var i = 0; i < tokens.length; i++) {
+ var token = tokens[i];
+ WaveService.readSession(
+ token,
+ function (configuration) {
+ requestsLeft--;
+ configurations.push(configuration);
+ if (requestsLeft === 0) onSuccess(configurations);
+ },
+ function (status) {
+ if (status === 404) requestsLeft--;
+ if (status !== 404 && onError) onError();
+ if (requestsLeft === 0) onSuccess(configurations);
+ }
+ );
+ }
+ },
+ readSessionStatus: function (token, onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/sessions/" + token + "/status",
+ null,
+ null,
+ function (response) {
+ var jsonObject = JSON.parse(response);
+ var dateStarted = null;
+ if (jsonObject.date_started) {
+ dateStarted = new Date(jsonObject.date_started);
+ }
+ var dateFinished = null;
+ if (jsonObject.date_finished) {
+ dateFinished = new Date(jsonObject.date_finished);
+ }
+ var expirationDate = null;
+ if (jsonObject.expiration_date) {
+ expirationDate = new Date(jsonObject.expiration_date);
+ }
+ onSuccess({
+ token: jsonObject.token,
+ dateStarted: dateStarted,
+ dateFinished: dateFinished,
+ testFilesCount: jsonObject.test_files_count,
+ testFilesCompleted: jsonObject.test_files_completed,
+ status: jsonObject.status,
+ expirationDate: expirationDate,
+ });
+ },
+ function () {
+ if (onError) onError();
+ }
+ );
+ },
+ readMultipleSessionStatuses: function (tokens, onSuccess, onError) {
+ var requestsLeft = tokens.length;
+ if (requestsLeft === 0) onSuccess([]);
+ var statuses = [];
+ for (var i = 0; i < tokens.length; i++) {
+ var token = tokens[i];
+ WaveService.readSessionStatus(
+ token,
+ function (status) {
+ requestsLeft--;
+ statuses.push(status);
+ if (requestsLeft === 0) onSuccess(statuses);
+ },
+ function () {
+ requestsLeft--;
+ if (requestsLeft === 0) onSuccess(statuses);
+ }
+ );
+ }
+ },
+ readPublicSessions: function (onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/sessions/public",
+ null,
+ null,
+ function (response) {
+ var jsonObject = JSON.parse(response);
+ onSuccess(jsonObject);
+ },
+ onError
+ );
+ },
+ updateSession: function (token, configuration, onSuccess, onError) {
+ var data = JSON.stringify({
+ tests: configuration.tests,
+ types: configuration.types,
+ timeouts: configuration.timeouts,
+ reference_tokens: configuration.referenceTokens,
+ expiration_date: configuration.expirationDate,
+ type: configuration.type,
+ });
+ sendRequest(
+ "PUT",
+ "api/sessions/" + token,
+ { "Content-Type": "application/json" },
+ data,
+ function () {
+ onSuccess();
+ },
+ onError
+ );
+ },
+ updateLabels: function (token, labels, onSuccess, onError) {
+ var data = JSON.stringify({ labels: labels });
+ sendRequest(
+ "PUT",
+ "api/sessions/" + token + "/labels",
+ { "Content-Type": "application/json" },
+ data,
+ function () {
+ if (onSuccess) onSuccess();
+ },
+ onError
+ );
+ },
+ findToken: function (fragment, onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/sessions/" + fragment,
+ null,
+ null,
+ function (response) {
+ var jsonObject = JSON.parse(response);
+ onSuccess(jsonObject.token);
+ },
+ onError
+ );
+ },
+ startSession: function (token, onSuccess, onError) {
+ sendRequest(
+ "POST",
+ "api/sessions/" + token + "/start",
+ null,
+ null,
+ function () {
+ onSuccess();
+ },
+ onError
+ );
+ },
+ pauseSession: function (token, onSuccess, onError) {
+ sendRequest(
+ "POST",
+ "api/sessions/" + token + "/pause",
+ null,
+ null,
+ function () {
+ onSuccess();
+ },
+ onError
+ );
+ },
+ stopSession: function (token, onSuccess, onError) {
+ sendRequest(
+ "POST",
+ "api/sessions/" + token + "/stop",
+ null,
+ null,
+ function () {
+ onSuccess();
+ },
+ onError
+ );
+ },
+ resumeSession: function (token, resumeToken, onSuccess, onError) {
+ var data = JSON.stringify({ resume_token: resumeToken });
+ sendRequest(
+ "POST",
+ "api/sessions/" + token + "/resume",
+ { "Content-Type": "application/json" },
+ data,
+ function () {
+ if (onSuccess) onSuccess();
+ },
+ function (response) {
+ if (onError) onError(response);
+ }
+ );
+ },
+ deleteSession: function (token, onSuccess, onError) {
+ sendRequest(
+ "DELETE",
+ "api/sessions/" + token,
+ null,
+ null,
+ function () {
+ onSuccess();
+ },
+ onError
+ );
+ },
+
+ // TESTS API
+ readTestList: function (onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/tests",
+ null,
+ null,
+ function (response) {
+ var jsonObject = JSON.parse(response);
+ onSuccess(jsonObject);
+ },
+ onError
+ );
+ },
+ readNextTest: function (token, onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/tests/" + token + "/next",
+ null,
+ null,
+ function (response) {
+ var jsonObject = JSON.parse(response);
+ onSuccess(jsonObject.next_test);
+ },
+ onError
+ );
+ },
+ readLastCompletedTests: function (token, resultTypes, onSuccess, onError) {
+ var status = "";
+ if (resultTypes) {
+ for (var i = 0; i < resultTypes.length; i++) {
+ var type = resultTypes[i];
+ status += type + ",";
+ }
+ }
+ sendRequest(
+ "GET",
+ "api/tests/" + token + "/last_completed?status=" + status,
+ null,
+ null,
+ function (response) {
+ var tests = JSON.parse(response);
+ var parsedTests = [];
+ for (var status in tests) {
+ for (var i = 0; i < tests[status].length; i++) {
+ var path = tests[status][i];
+ parsedTests.push({ path: path, status: status });
+ }
+ }
+ onSuccess(parsedTests);
+ },
+ onError
+ );
+ },
+ readMalfunctioningTests: function (token, onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/tests/" + token + "/malfunctioning",
+ null,
+ null,
+ function (response) {
+ var tests = JSON.parse(response);
+ onSuccess(tests);
+ },
+ function (response) {
+ var errorMessage = JSON.parse(response).error;
+ onError(errorMessage);
+ }
+ );
+ },
+ updateMalfunctioningTests: function (
+ token,
+ malfunctioningTests,
+ onSuccess,
+ onError
+ ) {
+ var data = JSON.stringify(malfunctioningTests);
+ sendRequest(
+ "PUT",
+ "api/tests/" + token + "/malfunctioning",
+ { "Content-Type": "application/json" },
+ data,
+ function () {
+ onSuccess();
+ },
+ function (response) {
+ var errorMessage = JSON.parse(response).error;
+ onError(errorMessage);
+ }
+ );
+ },
+ readAvailableApis: function (onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/tests/apis",
+ null,
+ null,
+ function (response) {
+ var apis = JSON.parse(response);
+ onSuccess(apis);
+ },
+ function (response) {
+ if (!onError) return;
+ var errorMessage = JSON.parse(response).error;
+ onError(errorMessage);
+ }
+ );
+ },
+
+ // RESULTS API
+ createResult: function (token, result, onSuccess, onError) {
+ sendRequest(
+ "POST",
+ "api/results/" + token,
+ { "Content-Type": "application/json" },
+ JSON.stringify(result),
+ function () {
+ onSuccess();
+ },
+ onError
+ );
+ },
+ readResults: function (token, onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/results/" + token,
+ null,
+ null,
+ function (response) {
+ onSuccess(JSON.parse(response));
+ },
+ onError
+ );
+ },
+ readResultsCompact: function (token, onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/results/" + token + "/compact",
+ null,
+ null,
+ function (response) {
+ var jsonObject = JSON.parse(response);
+ onSuccess(jsonObject);
+ },
+ onError
+ );
+ },
+ readResultComparison: function (tokens, onSuccess, onError) {
+ var comparison = {};
+ var fetchComplete = function (results) {
+ comparison.total = {};
+ for (var i = 0; i < results.length; i++) {
+ var result = results[i];
+ var token = result.token;
+ comparison[token] = {};
+ for (var api in result) {
+ if (api === "token") continue;
+ comparison[token][api] = result[api].pass;
+ if (!comparison.total[api]) {
+ var total = 0;
+ for (var status in result[api]) {
+ total = total + result[api][status];
+ }
+ comparison.total[api] = total;
+ }
+ }
+ }
+ onSuccess(comparison);
+ };
+ var requestsLeft = tokens.length;
+ if (requestsLeft === 0) onSuccess([]);
+ var results = [];
+ for (var i = 0; i < tokens.length; i++) {
+ var token = tokens[i];
+ (function (token) {
+ WaveService.readResultsCompact(
+ token,
+ function (result) {
+ requestsLeft--;
+ result.token = token;
+ results.push(result);
+ if (requestsLeft === 0) fetchComplete(results);
+ },
+ function (responseStatus) {
+ if (responseStatus === 404) requestsLeft--;
+ if (status !== 404 && onError) onError();
+ if (requestsLeft === 0) fetchComplete(results);
+ }
+ );
+ })(token);
+ }
+ },
+ downloadResults: function (token) {
+ location.href = "api/results/" + token + "/export";
+ },
+ downloadApiResult: function (token, api) {
+ location.href = "api/results/" + token + "/" + api + "/json";
+ },
+ downloadAllApiResults: function (token, api) {
+ location.href = "api/results/" + token + "/json";
+ },
+ downloadReport: function (token, api) {
+ location.href = "api/results/" + token + "/" + api + "/report";
+ },
+ importResults: function (data, onSuccess, onError) {
+ sendRequest(
+ "POST",
+ "api/results/import",
+ { "Content-Type": "application/octet-stream" },
+ data,
+ function (response) {
+ var token = JSON.parse(response).token;
+ onSuccess(token);
+ },
+ function (status, response) {
+ var errorMessage;
+ if (status === 500) {
+ errorMessage = "Internal server error.";
+ } else {
+ errorMessage = JSON.parse(response).error;
+ }
+ onError(errorMessage);
+ }
+ );
+ },
+ readReportUri: function (token, api, onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/results/" + token + "/" + api + "/reporturl",
+ null,
+ null,
+ function (response) {
+ var jsonObject = JSON.parse(response);
+ onSuccess(jsonObject.uri);
+ },
+ onError
+ );
+ },
+ downloadMultiReport: function (tokens, api) {
+ location.href = "api/results/" + api + "/report?tokens=" + tokens.join(",");
+ },
+ readMultiReportUri: function (tokens, api, onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/results/" + api + "/reporturl?tokens=" + tokens.join(","),
+ null,
+ null,
+ function (response) {
+ var jsonObject = JSON.parse(response);
+ onSuccess(jsonObject.uri);
+ },
+ onError
+ );
+ },
+ downloadResultsOverview: function (token) {
+ location.href = "api/results/" + token + "/overview";
+ },
+
+ // DEVICES API
+ _device_token: null,
+ _deviceEventListeners: {},
+ _deviceEventNumbers: {},
+ registerDevice: function (onSuccess, onError) {
+ sendRequest(
+ "POST",
+ "api/devices",
+ null,
+ null,
+ function (response) {
+ var data = JSON.parse(response);
+ WaveService._device_token = data.token;
+ onSuccess(data.token);
+ },
+ onError
+ );
+ },
+ readDevice: function (token, onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/devices/" + token,
+ null,
+ null,
+ function (response) {
+ if (!onSuccess) return;
+ var data = JSON.parse(response);
+ onSuccess(data);
+ },
+ function (error) {
+ if (!onError) return;
+ onError(error);
+ }
+ );
+ },
+ DEVICE_ADDED_EVENT: "device_added",
+ DEVICE_REMOVED_EVENT: "device_removed",
+ START_SESSION: "start_session",
+ addDeviceEventListener: function (token, callback) {
+ var listeners = WaveService._deviceEventListeners;
+ if (!listeners[token]) listeners[token] = [];
+ listeners[token].push(callback);
+ WaveService._deviceEventListeners = listeners;
+ WaveService.listenDeviceEvents(token);
+ },
+ removeDeviceEventListener: function (callback) {
+ var listeners = WaveService._deviceEventListeners;
+ for (var token of Object.keys(listeners)) {
+ var index = listeners[token].indexOf(callback);
+ if (index === -1) continue;
+ listeners[token].splice(index, 1);
+ break;
+ }
+ WaveService._deviceEventListeners = listeners;
+ },
+ listenDeviceEvents: function (token) {
+ var listeners = WaveService._deviceEventListeners;
+ if (!listeners[token] || listeners.length === 0) return;
+ var url = "api/devices/" + token + "/events";
+ var lastEventNumber = WaveService._deviceEventNumbers[token];
+ if (lastEventNumber) {
+ url += "?last_active=" + lastEventNumber;
+ }
+ WaveService.listenHttpPolling(
+ url,
+ function (response) {
+ if (!response) {
+ WaveService.listenDeviceEvents(token);
+ return;
+ }
+ for (var listener of listeners[token]) {
+ listener(response);
+ }
+ WaveService._deviceEventNumbers[token] = lastEventNumber;
+ WaveService.listenDeviceEvents(token);
+ },
+ function () {
+ setTimeout(function () {
+ WaveService.listenDeviceEvents();
+ }, 1000);
+ }
+ );
+ },
+ sendDeviceEvent: function (device_token, event, onSuccess, onError) {
+ var data = JSON.stringify({
+ type: event.type,
+ data: event.data,
+ });
+ sendRequest(
+ "POST",
+ "api/devices/" + device_token + "/events",
+ { "Content-Type": "application/json" },
+ data,
+ onSuccess,
+ onError
+ );
+ },
+ addGlobalDeviceEventListener: function (callback) {
+ WaveService._globalDeviceEventListeners.push(callback);
+ WaveService.listenGlobalDeviceEvents();
+ },
+ removeGlobalDeviceEventListener: function (callback) {
+ var index = WaveService._globalDeviceEventListeners.indexOf(callback);
+ WaveService._globalDeviceEventListeners.splice(index, 1);
+ },
+ listenGlobalDeviceEvents: function () {
+ var listeners = WaveService._globalDeviceEventListeners;
+ if (listeners.length === 0) return;
+ var query = "";
+ if (WaveService._device_token) {
+ query = "?device_token=" + WaveService._device_token;
+ }
+ WaveService.listenHttpPolling(
+ "api/devices/events" + query,
+ function (response) {
+ if (!response) {
+ WaveService.listenGlobalDeviceEvents();
+ return;
+ }
+ for (var listener of listeners) {
+ listener(response);
+ }
+ WaveService.listenGlobalDeviceEvents();
+ },
+ function () {
+ setTimeout(function () {
+ WaveService.listenGlobalDeviceEvents();
+ }, 1000);
+ }
+ );
+ },
+ sendGlobalDeviceEvent: function (event, onSuccess, onError) {
+ var data = JSON.stringify({
+ type: event.type,
+ data: event.data,
+ });
+ sendRequest(
+ "POST",
+ "api/devices/events",
+ { "Content-Type": "application/json" },
+ data,
+ onSuccess,
+ onError
+ );
+ },
+
+ // GENERAL API
+ readStatus: function (onSuccess, onError) {
+ sendRequest(
+ "GET",
+ "api/status",
+ null,
+ null,
+ function (response) {
+ var data = JSON.parse(response);
+ var configuration = {
+ readSessionsEnabled: data.read_sessions_enabled,
+ importResultsEnabled: data.import_results_enabled,
+ reportsEnabled: data.reports_enabled,
+ versionString: data.version_string,
+ testTypeSelectionEnabled: data.test_type_selection_enabled,
+ testFileSelectionEnabled: data.test_file_selection_enabled
+ };
+ onSuccess(configuration);
+ },
+ onError
+ );
+ },
+
+ // UTILITY
+ addRecentSession: function (token) {
+ if (!token) return;
+ var state = WaveService.getState();
+ if (!state.recent_sessions) state.recent_sessions = [];
+ if (state.recent_sessions.indexOf(token) !== -1) return;
+ state.recent_sessions.unshift(token);
+ WaveService.setState(state);
+ },
+ addRecentSessions: function (tokens) {
+ for (var i = 0; i < tokens.length; i++) {
+ var token = tokens[i];
+ WaveService.addRecentSession(token);
+ }
+ },
+ getPinnedSessions: function () {
+ var state = WaveService.getState();
+ if (!state || !state.pinned_sessions) return [];
+ return state.pinned_sessions;
+ },
+ addPinnedSession: function (token) {
+ if (!token) return;
+ var state = WaveService.getState();
+ if (!state.pinned_sessions) state.pinned_sessions = [];
+ if (state.pinned_sessions.indexOf(token) !== -1) return;
+ state.pinned_sessions.unshift(token);
+ WaveService.setState(state);
+ },
+ getRecentSessions: function () {
+ var state = WaveService.getState();
+ if (!state || !state.recent_sessions) return [];
+ return state.recent_sessions;
+ },
+ setRecentSessions: function (sessionTokens) {
+ var state = WaveService.getState();
+ state.recent_sessions = sessionTokens;
+ WaveService.setState(state);
+ },
+ removePinnedSession: function (token) {
+ if (!token) return;
+ var state = WaveService.getState();
+ if (!state.pinned_sessions) return;
+ var index = state.pinned_sessions.indexOf(token);
+ if (index === -1) return;
+ state.pinned_sessions.splice(index, 1);
+ WaveService.setState(state);
+ },
+ removeRecentSession: function (token) {
+ var state = WaveService.getState();
+ if (!state.recent_sessions) return;
+ var index = state.recent_sessions.indexOf(token);
+ if (index === -1) return;
+ state.recent_sessions.splice(index, 1);
+ WaveService.setState(state);
+ },
+ getState: function () {
+ if (!window.localStorage) return null;
+ var storage = window.localStorage;
+ var state = JSON.parse(storage.getItem("wave"));
+ if (!state) return {};
+ return state;
+ },
+ setState: function (state) {
+ if (!window.localStorage) return null;
+ var storage = window.localStorage;
+ storage.setItem("wave", JSON.stringify(state));
+ },
+ _globalDeviceEventListeners: [],
+ _sessionEventListeners: {},
+ _sessionEventNumbers: {},
+ listenHttpPolling: function (url, onSuccess, onError) {
+ var uniqueId = new Date().getTime();
+ if (url.indexOf("?") === -1) {
+ url = url + "?id=" + uniqueId;
+ } else {
+ url = url + "&id=" + uniqueId;
+ }
+ sendRequest(
+ "GET",
+ url,
+ null,
+ null,
+ function (response) {
+ if (!response) {
+ onSuccess(null);
+ return;
+ }
+ onSuccess(JSON.parse(response));
+ },
+ onError
+ );
+ },
+ addSessionEventListener: function (token, callback) {
+ var listeners = WaveService._sessionEventListeners;
+ if (!listeners[token]) listeners[token] = [];
+ if (listeners[token].indexOf(callback) >= 0) return;
+ listeners[token].push(callback);
+ WaveService._sessionEventListeners = listeners;
+ WaveService.listenSessionEvents(token);
+ },
+ removeSessionEventListener: function (callback) {
+ var listeners = WaveService._sessionEventListeners;
+ for (var token of Object.keys(listeners)) {
+ var index = listeners[token].indexOf(callback);
+ if (index === -1) continue;
+ listeners[token].splice(index, 1);
+ break;
+ }
+ WaveService._sessionEventListeners = listeners;
+ },
+ listenSessionEvents: function (token) {
+ var listeners = WaveService._sessionEventListeners;
+ if (!listeners[token] || listeners.length === 0) return;
+ var url = "api/sessions/" + token + "/events";
+ var lastEventNumber = WaveService._sessionEventNumbers[token];
+ if (lastEventNumber) {
+ url += "?last_event=" + lastEventNumber;
+ }
+ WaveService.listenHttpPolling(
+ url,
+ function (response) {
+ if (!response) {
+ WaveService.listenSessionEvents(token);
+ return;
+ }
+ var lastEventNumber = 0;
+ for (var listener of listeners[token]) {
+ for (var event of response) {
+ if (event.number > lastEventNumber) {
+ lastEventNumber = event.number;
+ }
+ listener(event);
+ }
+ }
+ WaveService._sessionEventNumbers[token] = lastEventNumber;
+ WaveService.listenSessionEvents(token);
+ },
+ function () {
+ setTimeout(function () {
+ WaveService.listenSessionEvents();
+ }, 1000);
+ }
+ );
+ },
+ openSession: function (token) {
+ location.href = "/results.html?token=" + token;
+ },
+};
+
+if (!Object.keys)
+ Object.keys = function (o) {
+ if (o !== Object(o))
+ throw new TypeError("Object.keys called on a non-object");
+ var k = [],
+ p;
+ for (p in o) if (Object.prototype.hasOwnProperty.call(o, p)) k.push(p);
+ return k;
+ };
diff --git a/testing/web-platform/tests/tools/wave/www/newsession.html b/testing/web-platform/tests/tools/wave/www/newsession.html
new file mode 100644
index 0000000000..0ff308c8b4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/newsession.html
@@ -0,0 +1,257 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <title>New Session - Web Platform Test Runner</title>
+ <link rel="stylesheet" href="css/bulma-0.7.5/bulma.min.css" />
+ <link rel="stylesheet" href="css/style.css" />
+ <link rel="stylesheet" href="css/main.css" />
+ <script src="lib/davidshimjs/qrcode.js"></script>
+ <script src="lib/keycodes.js"></script>
+ <script src="lib/wave-service.js"></script>
+ </head>
+ <body>
+ <section class="section">
+ <div class="container site-header">
+ <img src="res/wavelogo_2016.jpg" alt="WAVE Logo" class="site-logo" />
+ <h1 class="title is-spaced">New Session</h1>
+ </div>
+
+ <div class="container">
+ <div id="content">
+ <div id="qr-code"></div>
+ <br />
+ <p>
+ Scan QR Code OR visit
+ <a
+ id="results-page-url"
+ style="font-weight: bold; color: #23d160;"
+ target="_blank"
+ ></a>
+ and follow instructions
+ </p>
+ <div id="button-wrapper">
+ <button
+ id="start-button"
+ class="button is-large is-success tabbable"
+ >
+ Start
+ </button>
+ <button
+ id="results-button"
+ class="button is-large is-light tabbable"
+ >
+ View Result Page
+ </button>
+ </div>
+
+ <div>(Alternatively to start the test you can just press Return)</div>
+
+ <div id="details-wrapper">
+ <h3 class="title is-5 is-spaced">Details</h3>
+ <div class="detail">
+ <div>Token:</div>
+ <div id="token"></div>
+ </div>
+ <div class="detail">
+ <div>User Agent:</div>
+ <div id="user-agent"></div>
+ </div>
+ <div class="detail">
+ <div>Test Types:</div>
+ <div id="test-types"></div>
+ </div>
+ <div class="detail">
+ <div>Total Test Files:</div>
+ <div id="total-tests"></div>
+ </div>
+ <div class="detail">
+ <div>Reference Tokens:</div>
+ <div id="reference-tokens"></div>
+ </div>
+ <div class="detail">
+ <div>Test Timeouts:</div>
+ <div id="test-timeout"></div>
+ </div>
+ <div class="detail">
+ <div>Test Paths:</div>
+ <div id="test-path"></div>
+ </div>
+ </div>
+ </div>
+ </div>
+ </section>
+
+ <script>
+ var HOSTNAME = location.hostname;
+ var PORT = location.port;
+ var PROTOCOL = location.protocol.replace(/:/, "");
+ var QUERY = location.search.replace(/\?/, "");
+ var match = QUERY.match(/token=([^&]+)/);
+ var TOKEN = match ? match[1] : null;
+ var RESUME = /[\?&]resume=/.test(location.search);
+ if (TOKEN)
+ document.cookie = "resume_token=" + TOKEN + "; expires=Fri, 31 Dec 9999 23:59:59 GMT";
+ var selectedTabbable = -1;
+
+ document.onload = function() {
+ if (RESUME) {
+ document.getElementById("start-button").innerHTML = "Resume";
+ document.getElementById("new-button").style.display = "block";
+ }
+ document.getElementById("results-page-url").innerHTML =
+ "http://" + location.host + WEB_ROOT + "overview.html";
+ document.getElementById("results-page-url").href =
+ "http://" + location.host + WEB_ROOT + "overview.html";
+ };
+
+ function displaySessionConfiguration(configuration) {
+ var userAgent = document.getElementById("user-agent");
+ userAgent.innerText = configuration.userAgent;
+ var testPath = document.getElementById("test-path");
+ for (var i = 0; i < configuration.tests.include.length; i++) {
+ var path = configuration.tests.include[i];
+ testPath.innerText += path + "\n";
+ }
+ var testTypes = document.getElementById("test-types");
+ testTypes.innerText = configuration.types.join(", ");
+ var testTimeout = document.getElementById("test-timeout");
+ for (var timeout in configuration.timeouts) {
+ testTimeout.innerText +=
+ timeout + ": " + configuration.timeouts[timeout] / 1000 + "s\n";
+ }
+ var referenceTokens = document.getElementById("reference-tokens");
+ if (configuration.referenceTokens.length === 0) {
+ referenceTokens.innerText = "none";
+ } else {
+ for (var i = 0; i < configuration.referenceTokens.length; i++) {
+ var token = configuration.referenceTokens[i];
+ referenceTokens.innerText += token + "\n";
+ }
+ }
+ }
+
+ function displaySessionStatus(status) {
+ var testTypes = document.getElementById("total-tests");
+ var count = 0;
+ for (var api in status.testFilesCount) {
+ count += status.testFilesCount[api];
+ }
+ testTypes.innerText = count;
+ }
+
+ function startTests() {
+ WaveService.startSession(token, function() {
+ WaveService.readNextTest(token, function(url) {
+ location.href = url;
+ });
+ });
+ }
+
+ var resultsUrl =
+ "http://" + location.host + WEB_ROOT + "results.html" + location.search;
+ new QRCode(document.getElementById("qr-code"), resultsUrl);
+
+ var resultsButton = document.getElementById("results-button");
+ resultsButton.onclick = function() {
+ window.open(resultsUrl, "_blank");
+ };
+
+ var startButton = document.getElementById("start-button");
+ startButton.onclick = startTests;
+
+ function removeClass(element, className) {
+ var elementClass = element.className;
+ var index = elementClass.indexOf(className);
+ if (index !== -1) {
+ element.className = elementClass.replace(className, "");
+ }
+ }
+
+ function addClass(element, className) {
+ element.className += " " + className;
+ }
+
+ function skipFocus(steps) {
+ var tabbables = document.getElementsByClassName("tabbable");
+ if (selectedTabbable === -1) {
+ selectedTabbable = 0;
+ } else {
+ removeClass(tabbables[selectedTabbable], "focused");
+ selectedTabbable += steps;
+ }
+
+ if (selectedTabbable >= tabbables.length) {
+ selectedTabbable = 0;
+ }
+
+ if (selectedTabbable < 0) {
+ selectedTabbable = tabbables.length - 1;
+ }
+
+ tabbables[selectedTabbable].focus();
+ addClass(tabbables[selectedTabbable], "focused");
+ }
+
+ function focusNext() {
+ skipFocus(1);
+ }
+
+ function focusPrevious() {
+ skipFocus(-1);
+ }
+
+ document.onkeydown = function(event) {
+ event = event || window.event;
+ var charCode =
+ typeof event.which === "number" ? event.which : event.keyCode;
+
+ if (ACTION_KEYS.indexOf(charCode) !== -1) {
+ event.preventDefault();
+ if (selectedTabbable === -1) {
+ startTests();
+ return;
+ }
+ var tabbables = document.getElementsByClassName("tabbable");
+ var element = tabbables[selectedTabbable];
+ if (element.type === "checkbox") {
+ element.checked = !element.checked;
+ } else {
+ element.click();
+ }
+ }
+
+ if (PREV_KEYS.indexOf(charCode) !== -1) {
+ focusPrevious();
+ }
+
+ if (NEXT_KEYS.indexOf(charCode) !== -1) {
+ focusNext();
+ }
+ };
+
+ var match = location.search.match(/token=([^&]+)/);
+ var token = match[1];
+ var tokenView = document.getElementById("token");
+ tokenView.innerText = token;
+
+ WaveService.readSession(token, displaySessionConfiguration);
+ WaveService.readSessionStatus(token, displaySessionStatus);
+
+ if (window.localStorage) {
+ var storage = window.localStorage;
+ var state = JSON.parse(storage.getItem("wave"));
+ if (!state) {
+ state = {};
+ }
+ if (!state.recent_sessions) {
+ state.recent_sessions = [];
+ }
+ if (state.recent_sessions.indexOf(token) === -1) {
+ state.recent_sessions.unshift(token);
+ }
+ storage.setItem("wave", JSON.stringify(state));
+ }
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/www/next.html b/testing/web-platform/tests/tools/wave/www/next.html
new file mode 100644
index 0000000000..8412b9234f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/next.html
@@ -0,0 +1,33 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <script src="lib/wave-service.js"></script>
+ <script src="lib/screen-console.js"></script>
+ <script src="lib/query-parser.js"></script>
+ </head>
+ <body>
+ <p id="console" style="font-family: monospace;"></p>
+ <script>
+ var consoleElement = document.getElementById("console");
+ var screenConsole = new ScreenConsole(consoleElement);
+ var queryParameters = QueryParser.parseQuery();
+ var TOKEN = queryParameters["token"];
+ if (TOKEN)
+ document.cookie =
+ "resume_token=" + TOKEN + "; expires=Fri, 31 Dec 9999 23:59:59 GMT";
+
+ screenConsole.log("Loading next test ...");
+
+ WaveService.readNextTest(
+ TOKEN,
+ function (url) {
+ screenConsole.log("Redirecting to " + url);
+ location.href = url;
+ },
+ function () {
+ screenConsole.log("Connection failed.");
+ }
+ );
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/www/overview.html b/testing/web-platform/tests/tools/wave/www/overview.html
new file mode 100644
index 0000000000..48ec3a2514
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/overview.html
@@ -0,0 +1,1315 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <meta name="viewport" content="width=device-width, initial-scale=1" />
+ <title>Overview - Web Platform Test</title>
+ <link rel="stylesheet" href="css/bulma-0.7.5/bulma.min.css" />
+ <link rel="stylesheet" href="css/fontawesome-5.7.2.min.css" />
+ <script src="lib/utils.js"></script>
+ <script src="lib/wave-service.js"></script>
+ <script src="lib/ui.js"></script>
+ <style>
+ .site-logo {
+ max-width: 300px;
+ margin: 0 0 30px -15px;
+ }
+
+ .disabled-row {
+ color: gray;
+ background: lightgray;
+ }
+ </style>
+ </head>
+ <body>
+ <script>
+ window.onload = () => {
+ const query = utils.parseQuery(location.search);
+ if (query.token) {
+ location.href = WEB_ROOT + "results.html" + location.search;
+ }
+ resultsUi.render();
+ resultsUi.loadData();
+ };
+ var sortDetail = {};
+ const defaultSortDetail = { sortColumn: "dateStarted", ascending: true };
+ sortDetail["recentSessions"] = defaultSortDetail;
+ sortDetail["pinnedSessions"] = defaultSortDetail;
+ sortDetail["publicSessions"] = defaultSortDetail;
+
+ const resultsUi = {
+ state: {
+ comparison: [],
+ recentSessions: null,
+ importResultsEnabled: false,
+ filterLabels: []
+ },
+ loadData() {
+ const pinnedSessions = WaveService.getPinnedSessions().filter(
+ token => !!token
+ );
+ const recentSessions = WaveService.getRecentSessions().filter(
+ token => !!token
+ );
+
+ pinnedSessions.forEach(token => {
+ const index = recentSessions.indexOf(token);
+ if (index !== -1) recentSessions.splice(index, 1);
+ });
+ WaveService.setRecentSessions(recentSessions);
+
+ let allSessions = [];
+ allSessions = allSessions.concat(pinnedSessions);
+ allSessions = allSessions.concat(recentSessions);
+
+ WaveService.readPublicSessions(publicSessions => {
+ publicSessions.forEach(token => {
+ const index = recentSessions.indexOf(token);
+ if (index !== -1) recentSessions.splice(index, 1);
+ });
+ WaveService.setRecentSessions(recentSessions);
+ allSessions = allSessions.concat(publicSessions);
+ WaveService.readMultipleSessions(allSessions, configurations =>
+ WaveService.readMultipleSessionStatuses(allSessions, statuses => {
+ configurations.forEach(configuration => {
+ const status = statuses.find(
+ status => status.token === configuration.token
+ );
+ configuration.dateStarted = status.dateStarted;
+ configuration.dateFinished = status.dateFinished;
+ configuration.status = status.status;
+ });
+
+ configurations = configurations.filter(
+ configuration => !!configuration
+ );
+ allSessions
+ .filter(
+ token =>
+ !configurations.some(
+ configuration => configuration.token === token
+ )
+ )
+ .forEach(token => {
+ WaveService.removePinnedSession(token);
+ WaveService.removeRecentSession(token);
+ });
+ resultsUi.state.publicSessions = publicSessions;
+ resultsUi.state.pinnedSessions = WaveService.getPinnedSessions();
+ resultsUi.state.recentSessions = WaveService.getRecentSessions();
+
+ const sessions = {};
+ configurations.forEach(
+ configuration =>
+ (sessions[configuration.token] = configuration)
+ );
+ resultsUi.state.sessions = sessions;
+
+ const referenceTokens = [];
+ const loadedSessionsTokens = Object.keys(sessions);
+ configurations.forEach(configuration =>
+ configuration.referenceTokens
+ .filter(token => loadedSessionsTokens.indexOf(token) === -1)
+ .forEach(token => referenceTokens.push(token))
+ );
+ WaveService.readMultipleSessions(
+ referenceTokens,
+ configurations => {
+ const { sessions } = resultsUi.state;
+ configurations.forEach(
+ configuration =>
+ (sessions[configuration.token] = configuration)
+ );
+ resultsUi.renderPublicSessions();
+ resultsUi.renderPinnedSessions();
+ resultsUi.renderRecentSessions();
+ }
+ );
+ })
+ );
+ });
+ WaveService.readStatus(function(config) {
+ resultsUi.state.importResultsEnabled = config.importResultsEnabled;
+ resultsUi.state.reportsEnabled = config.reportsEnabled;
+ resultsUi.renderManageSessions();
+ });
+ },
+ findSession(fragment, callback) {
+ if (!fragment || fragment.length < 8) return;
+ WaveService.findToken(
+ fragment,
+ token => {
+ WaveService.readSession(token, session => {
+ WaveService.readSessionStatus(token, status => {
+ session.status = status.status;
+ session.dateStarted = status.dateStarted;
+ session.dateFinished = status.dateFinished;
+ callback(session);
+ });
+ });
+ },
+ () => callback(null)
+ );
+ },
+ addSession(session) {
+ const token = session.token;
+ if (resultsUi.state.sessions[token]) return;
+ resultsUi.state.sessions[token] = session;
+ resultsUi.pinSession(token);
+ },
+ removeSession(token) {
+ delete resultsUi.state.sessions[token];
+ WaveService.removeRecentSession(token);
+ WaveService.removePinnedSession(token);
+ resultsUi.updateSessionState();
+ },
+ showAddSessionError() {
+ const errorBox = UI.getElement("find-error");
+ errorBox.setAttribute("style", "display: block");
+ },
+ hideAddSessionError() {
+ const errorBox = UI.getElement("find-error");
+ errorBox.setAttribute("style", "display: none");
+ },
+ pinSession(token) {
+ WaveService.addPinnedSession(token);
+ WaveService.removeRecentSession(token);
+ resultsUi.updateSessionState();
+ },
+ unpinSession(token) {
+ WaveService.removePinnedSession(token);
+ WaveService.addRecentSession(token);
+ resultsUi.updateSessionState();
+ },
+ updateSessionState() {
+ resultsUi.state.pinnedSessions = WaveService.getPinnedSessions();
+ resultsUi.state.recentSessions = WaveService.getRecentSessions();
+ resultsUi.renderPinnedSessions();
+ resultsUi.renderRecentSessions();
+ },
+ openSessionResult(token) {
+ location.href = `${WEB_ROOT}results.html?token=${token}`;
+ },
+ sortSessions(tableType, column) {
+ if (tableType in sortDetail) {
+ if (sortDetail[tableType].sortColumn == column) {
+ sortDetail[tableType].ascending = !sortDetail[tableType]
+ .ascending;
+ } else {
+ sortDetail[tableType].sortColumn = column;
+ sortDetail[tableType].ascending = true;
+ }
+ switch (tableType) {
+ case "recentSessions":
+ resultsUi.renderRecentSessions();
+ break;
+ case "pinnedSessions":
+ resultsUi.renderPinnedSessions();
+ break;
+ case "publicSessions":
+ resultsUi.renderPublicSessions();
+ break;
+ }
+ }
+ },
+ sortSessionsByColumn(sessions, recentSessions, column, ascending) {
+ var resultArray = recentSessions
+ .map(token => sessions[token])
+ .sort(function(sessionA, sessionB) {
+ let columnA = sessionA[column];
+ if (column === "browser")
+ columnA = sessionA[column].name + sessionA[column].version;
+ if (column === "dateStarted" && !columnA) {
+ columnA = Date.now();
+ }
+ let columnB = sessionB[column];
+ if (column === "browser")
+ columnB = sessionB[column].name + sessionA[column].version;
+ if (column === "dateStarted" && !columnB) {
+ columnB = Date.now();
+ }
+ if (columnA < columnB) {
+ return -1;
+ }
+ if (columnA > columnB) {
+ return 1;
+ }
+ return 0;
+ });
+ if (ascending) {
+ resultArray.reverse();
+ }
+ return resultArray;
+ },
+ compareSessions(reftokens) {
+ if (!resultsUi.isComparisonValid()) return;
+ const tokens = resultsUi.state.comparison;
+ if (!tokens || tokens.length === 0) return;
+ const refQuery = reftokens ? `&reftokens=${reftokens}` : "";
+ location.href = `${WEB_ROOT}comparison.html?tokens=${tokens.join(
+ ","
+ )}${refQuery}`;
+ },
+ isComparisonValid() {
+ const { comparison, sessions } = resultsUi.state;
+ if (!comparison) return false;
+ if (comparison.length <= 1) return false;
+ const comparingSessions = comparison.map(token => sessions[token]);
+ const referenceTokens = comparingSessions[0].referenceTokens;
+ for (let comparingSession of comparingSessions) {
+ const comparingReferenceTokens = comparingSession.referenceTokens;
+ if (referenceTokens.length !== comparingReferenceTokens.length)
+ return false;
+ for (let token of comparingReferenceTokens) {
+ if (referenceTokens.indexOf(token) === -1) return false;
+ }
+ }
+ return true;
+ },
+ isSessionValidForComparison(session) {
+ if (!session) return false;
+ if (session.status !== "completed" && session.status !== "aborted")
+ return false;
+ const sessionRefTokens = session.reference_tokens;
+ const comparisonSession =
+ resultsUi.state.sessions[resultsUi.state.comparison[0]];
+ if (!comparisonSession) return true;
+ const comparisonRefTokens = comparisonSession.reference_tokens;
+ if (!comparisonRefTokens) return true;
+ if (sessionRefTokens.length !== comparisonRefTokens.length)
+ return false;
+ if (
+ sessionRefTokens.some(
+ token => comparisonRefTokens.indexOf(token) === -1
+ )
+ )
+ return false;
+ return true;
+ },
+ isSessionSelectedForComparison(session) {
+ return resultsUi.state.comparison.indexOf(session.token) !== -1;
+ },
+ isSessionDisabled(session) {
+ return (
+ resultsUi.state.comparison.length > 0 &&
+ !resultsUi.isSessionValidForComparison(session)
+ );
+ },
+ addSessionToComparison(token) {
+ if (resultsUi.state.comparison.indexOf(token) !== -1) return;
+ resultsUi.state.comparison.push(token);
+ resultsUi.updateCompareButton();
+ resultsUi.renderSessions();
+ },
+ removeSessionFromComparison(token) {
+ const index = resultsUi.state.comparison.indexOf(token);
+ if (index === -1) return;
+ resultsUi.state.comparison.splice(index, 1);
+ resultsUi.updateCompareButton();
+ resultsUi.renderSessions();
+ },
+ handleAddSession() {
+ const tokenFragmentInput = UI.getElement("token-fragment");
+ const fragment = tokenFragmentInput.value;
+ resultsUi.findSession(fragment, session => {
+ if (!session) {
+ resultsUi.showAddSessionError();
+ return;
+ }
+ tokenFragmentInput.value = "";
+ resultsUi.hideAddSessionError();
+ resultsUi.addSession(session);
+ });
+ },
+ handleImportSession() {
+ resultsUi.state.importError = null;
+ resultsUi.state.importInProgress = true;
+ resultsUi.renderManageSessions();
+ const { importSessionFile: file } = resultsUi.state;
+ const reader = new FileReader();
+ reader.readAsArrayBuffer(file);
+ reader.onload = () => {
+ const data = reader.result;
+ WaveService.importResults(
+ data,
+ function(token) {
+ location.href = WEB_ROOT + "results.html?token=" + token;
+ },
+ function(error) {
+ resultsUi.state.importError = error;
+ resultsUi.state.importInProgress = false;
+ resultsUi.renderManageSessions();
+ }
+ );
+ };
+ },
+ handleImportSessionSelection() {
+ const file = UI.getElement("import-session-file").files[0];
+ resultsUi.state.importSessionFile = file;
+ resultsUi.renderManageSessions();
+ },
+ addFilterLabel() {
+ const label = UI.getElement("filter-label-input").value;
+ if (!label) return;
+ const { filterLabels } = resultsUi.state;
+ if (filterLabels.indexOf(label) !== -1) return;
+ filterLabels.push(label);
+ resultsUi.renderSessions();
+ UI.getElement("filter-label-input").focus();
+ },
+ removeFilterLabel(index) {
+ resultsUi.state.filterLabels.splice(index, 1);
+ resultsUi.renderSessions();
+ },
+ showAddFilterLabel() {
+ resultsUi.state.addFilterLabelVisible = true;
+ resultsUi.renderSessions();
+ UI.getElement("filter-label-input").focus();
+ },
+ hideAddFilterLabel() {
+ resultsUi.state.addFilterLabelVisible = false;
+ resultsUi.renderSessions();
+ },
+ render() {
+ const { getRoot, createElement, getElement } = UI;
+ const resultsView = UI.createElement({
+ className: "section",
+ children: [
+ {
+ className: "container",
+ style: "margin-bottom: 2em",
+ children: [
+ {
+ element: "img",
+ src: "res/wavelogo_2016.jpg",
+ className: "site-logo"
+ },
+ { text: "Results Overview", className: "title" }
+ ]
+ },
+ {
+ id: "manage-sessions",
+ className: "container",
+ style: "margin-bottom: 2em"
+ },
+ { id: "sessions", className: "container" }
+ ]
+ });
+
+ const root = UI.getRoot();
+ root.innerHTML = "";
+ root.appendChild(resultsView);
+
+ resultsUi.renderManageSessions();
+ resultsUi.renderSessions();
+ },
+ renderManageSessions() {
+ const manageSessionsView = UI.getElement("manage-sessions");
+ manageSessionsView.innerHTML = "";
+ const heading = { text: "Manage Sessions", className: "title is-4" };
+ const addCompareSessions = {
+ className: "columns",
+ children: [
+ {
+ className: "column",
+ children: [
+ { text: "Add Sessions", className: "title is-5" },
+ {
+ element: "article",
+ className: "message is-danger",
+ id: "find-error",
+ children: [
+ {
+ text:
+ "Could not find any sessions! Try adding more characters of the token.",
+ className: "message-body"
+ }
+ ],
+ style: "display: none"
+ },
+ {
+ className: "field",
+ children: [
+ {
+ className: "label has-text-weight-normal",
+ text: "Session token:"
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ children: {
+ style: "display: flex; margin-bottom: 10px;",
+ children: [
+ {
+ element: "input",
+ inputType: "text",
+ className: "input is-family-monospace",
+ id: "token-fragment",
+ placeholder:
+ "First 8 characters or more of session token",
+ onKeyDown: event =>
+ event.key === "Enter"
+ ? resultsUi.handleAddSession()
+ : null
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ className: "field is-grouped is-grouped-right",
+ children: {
+ className: "control",
+ children: {
+ className: "button is-dark is-outlined",
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-plus"
+ }
+ ]
+ },
+ { text: "Add Session", element: "span" }
+ ],
+ onclick: resultsUi.handleAddSession
+ }
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ className: "column",
+ children: [
+ { text: "Compare Sessions", className: "title is-5" },
+ {
+ element: "label",
+ text:
+ "Compare sessions by selecting them in the list below. " +
+ "Only sessions with the same set of reference sessions can be compared. " +
+ "Sessions have to be finished."
+ },
+ {
+ style: "text-align: right",
+ children: [
+ {
+ className: "button is-dark is-outlined",
+ disabled: true,
+ id: "compare-button",
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-balance-scale"
+ }
+ ]
+ },
+ { text: "Compare Selected", element: "span" }
+ ],
+ onClick: () => resultsUi.compareSessions()
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ };
+ const {
+ importSessionFile,
+ importError,
+ importInProgress
+ } = resultsUi.state;
+ const importSessions = {
+ className: "columns",
+ style: "margin-bottom: 2em",
+ children: [
+ {
+ className: "column is-half",
+ children: [
+ { text: "Import Sessions", className: "title is-5" },
+ {
+ element: "article",
+ className: "message is-danger",
+ children: [
+ {
+ className: "message-body",
+ text: "Could not import session: " + importError
+ }
+ ],
+ style: importError ? "" : "display: none"
+ },
+ {
+ className: "field file has-name",
+ children: [
+ {
+ element: "label",
+ className: "file-label",
+ style: "width: 100%",
+ children: [
+ {
+ element: "input",
+ className: "file-input",
+ type: "file",
+ accept: ".zip",
+ id: "import-session-file",
+ onChange: resultsUi.handleImportSessionSelection
+ },
+ {
+ element: "span",
+ className: "file-cta",
+ children: [
+ {
+ element: "span",
+ className: "file-icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-upload"
+ }
+ ]
+ },
+ {
+ element: "span",
+ className: "file-label",
+ text: "Choose ZIP file"
+ }
+ ]
+ },
+ {
+ element: "span",
+ className: "file-name",
+ style: "width: 100%; max-width: unset",
+ text: importSessionFile
+ ? importSessionFile.name
+ : ""
+ }
+ ]
+ }
+ ]
+ },
+ {
+ className: "field is-grouped is-grouped-right",
+ children: {
+ className: "control",
+ children: {
+ className: "button is-dark is-outlined",
+ disabled: !importSessionFile,
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: importInProgress
+ ? "fas fa-spinner fa-pulse"
+ : "fas fa-plus"
+ }
+ ]
+ },
+ { text: "Import Session", element: "span" }
+ ],
+ onclick: resultsUi.handleImportSession
+ }
+ }
+ }
+ ]
+ },
+ {
+ className: "column",
+ children: []
+ }
+ ]
+ };
+ const { importResultsEnabled } = resultsUi.state;
+ manageSessionsView.appendChild(UI.createElement(heading));
+ manageSessionsView.appendChild(UI.createElement(addCompareSessions));
+ if (!importResultsEnabled) return;
+ manageSessionsView.appendChild(UI.createElement(importSessions));
+ },
+ renderSessions() {
+ const sessionsView = UI.getElement("sessions");
+ sessionsView.innerHTML = "";
+ sessionsView.appendChild(
+ UI.createElement({ text: "Sessions", className: "title is-4" })
+ );
+
+ const sessionFilters = resultsUi.createSessionFilters();
+ sessionsView.appendChild(sessionFilters);
+
+ sessionsView.appendChild(UI.createElement({ id: "public-sessions" }));
+ sessionsView.appendChild(UI.createElement({ id: "pinned-sessions" }));
+ sessionsView.appendChild(UI.createElement({ id: "recent-sessions" }));
+ sessionsView.appendChild(UI.createElement({ id: "session-status" }));
+ resultsUi.renderPublicSessions();
+ resultsUi.renderPinnedSessions();
+ resultsUi.renderRecentSessions();
+ },
+ renderPublicSessions() {
+ resultsUi.renderSessionStatus();
+ const { sessions, publicSessions, filterLabels } = resultsUi.state;
+
+ UI.saveScrollPosition("public-sessions-overflow");
+
+ const publicSessionsView = UI.getElement("public-sessions");
+ publicSessionsView.innerHTML = "";
+
+ if (!publicSessions || publicSessions.length === 0) return;
+ const sortedPublicSessions = resultsUi.sortSessionsByColumn(
+ sessions,
+ publicSessions,
+ sortDetail["publicSessions"].sortColumn,
+ sortDetail["publicSessions"].ascending
+ );
+
+ const filteredPublicSessions = sortedPublicSessions.filter(
+ session =>
+ filterLabels.length === 0 ||
+ filterLabels.reduce(
+ (match, label) =>
+ match &&
+ session.labels
+ .map(label => label.toLowerCase())
+ .indexOf(label.toLowerCase()) !== -1,
+ true
+ )
+ );
+
+ if (filteredPublicSessions.length === 0) return;
+
+ publicSessionsView.appendChild(
+ UI.createElement({
+ text: "Reference Browsers",
+ className: "title is-5"
+ })
+ );
+
+ const sessionsTable = UI.createElement({
+ style: "overflow-x: auto",
+ id: "public-sessions-overflow",
+ children: resultsUi.createSessionsTable(
+ "publicSessions",
+ filteredPublicSessions,
+ { static: true }
+ )
+ });
+ publicSessionsView.appendChild(sessionsTable);
+
+ publicSessionsView.appendChild(
+ UI.createElement({ style: "content: ''; margin-bottom: 40px" })
+ );
+
+ UI.loadScrollPosition("public-sessions-overflow")
+ },
+ renderPinnedSessions() {
+ resultsUi.renderSessionStatus();
+ const { sessions, pinnedSessions, filterLabels } = resultsUi.state;
+
+ UI.saveScrollPosition("pinned-sessions-overflow");
+ const pinnedSessionsView = UI.getElement("pinned-sessions");
+ pinnedSessionsView.innerHTML = "";
+ if (!pinnedSessions || pinnedSessions.length === 0) return;
+ const sortedPinnedSessions = resultsUi.sortSessionsByColumn(
+ sessions,
+ pinnedSessions,
+ sortDetail["pinnedSessions"].sortColumn,
+ sortDetail["pinnedSessions"].ascending
+ );
+ const filteredPinnedSessions = sortedPinnedSessions.filter(
+ session =>
+ filterLabels.length === 0 ||
+ filterLabels.reduce(
+ (match, label) =>
+ match &&
+ session.labels
+ .map(label => label.toLowerCase())
+ .indexOf(label.toLowerCase()) !== -1,
+ true
+ )
+ );
+
+ if (filteredPinnedSessions.length === 0) return;
+
+ pinnedSessionsView.appendChild(
+ UI.createElement({ text: "Pinned", className: "title is-5" })
+ );
+
+ const sessionsTable = UI.createElement({
+ style: "overflow-x: auto",
+ id: "pinned-sessions-overflow",
+ children: resultsUi.createSessionsTable(
+ "pinnedSessions",
+ filteredPinnedSessions,
+ { pinned: true }
+ )
+ });
+ pinnedSessionsView.appendChild(sessionsTable);
+
+ pinnedSessionsView.appendChild(
+ UI.createElement({ style: "content: ''; margin-bottom: 40px" })
+ );
+ UI.loadScrollPosition("pinned-sessions-overflow");
+ },
+ renderRecentSessions() {
+ resultsUi.renderSessionStatus();
+ const {
+ sessions,
+ recentSessions,
+ pinnedSessions,
+ filterLabels
+ } = resultsUi.state;
+ UI.saveScrollPosition("recent-sessions-overflow");
+ const recentSessionsView = UI.getElement("recent-sessions");
+ recentSessionsView.innerHTML = "";
+ if (!recentSessions || recentSessions.length === 0) return;
+
+ const sortedRecentSessions = resultsUi.sortSessionsByColumn(
+ sessions,
+ recentSessions,
+ sortDetail["recentSessions"].sortColumn,
+ sortDetail["recentSessions"].ascending
+ );
+ const filteredRecentSessions = sortedRecentSessions.filter(
+ session =>
+ filterLabels.length === 0 ||
+ filterLabels.reduce(
+ (match, label) =>
+ match &&
+ session.labels
+ .map(label => label.toLowerCase())
+ .indexOf(label.toLowerCase()) !== -1,
+ true
+ )
+ );
+
+ if (filteredRecentSessions.length === 0) return;
+
+ recentSessionsView.appendChild(
+ UI.createElement({ text: "Recent", className: "title is-5" })
+ );
+
+ const sessionsTable = UI.createElement({
+ style: "overflow-x: auto",
+ id: "recent-sessions-overflow",
+ children: resultsUi.createSessionsTable(
+ "recentSessions",
+ filteredRecentSessions,
+ { pinned: false }
+ )
+ });
+ recentSessionsView.appendChild(sessionsTable);
+
+ recentSessionsView.appendChild(
+ UI.createElement({ style: "content: ''; margin-bottom: 40px" })
+ );
+ UI.loadScrollPosition("recent-sessions-overflow");
+ },
+ renderSessionStatus() {
+ const {
+ recentSessions,
+ pinnedSessions,
+ publicSessions
+ } = resultsUi.state;
+ const sessionStatusView = UI.getElement("session-status");
+ sessionStatusView.innerHTML = "";
+ if (!recentSessions && !pinnedSessions && !publicSessions) {
+ sessionStatusView.appendChild(
+ UI.createElement({
+ className: "level",
+ children: {
+ element: "span",
+ className: "level-item",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-spinner fa-pulse"
+ },
+ {
+ style: "margin-left: 0.4em;",
+ text: "Loading sessions ..."
+ }
+ ]
+ }
+ })
+ );
+ return;
+ } else if (
+ (!recentSessions || recentSessions.length === 0) &&
+ (!pinnedSessions || pinnedSessions.length === 0) &&
+ (!publicSessions || publicSessions.length === 0)
+ ) {
+ sessionStatusView.appendChild(
+ UI.createElement({
+ className: "level",
+ children: {
+ element: "span",
+ className: "level-item",
+ text: "No sessions available."
+ }
+ })
+ );
+ return;
+ }
+ },
+ createSessionFilters() {
+ const { filterLabels, addFilterLabelVisible } = resultsUi.state;
+
+ const filters = UI.createElement({
+ className: "field is-horizontal",
+ style: "margin-bottom: 2em",
+ children: [
+ {
+ className: "field-label",
+ style: "flex: unset",
+ children: {
+ className: "label has-text-weight-normal",
+ text: "Filter by labels:"
+ }
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "control",
+ children: {
+ className: "field is-grouped is-grouped-multiline",
+ children: filterLabels
+ .map((label, index) => ({
+ className: "control",
+ children: {
+ className: "tags has-addons",
+ children: [
+ {
+ element: "span",
+ className: "tag is-info",
+ text: label
+ },
+ {
+ element: "a",
+ className: "tag is-delete",
+ onClick: () => resultsUi.removeFilterLabel(index)
+ }
+ ]
+ }
+ }))
+ .concat(
+ addFilterLabelVisible
+ ? [
+ {
+ className: "control field is-grouped",
+ children: [
+ {
+ element: "input",
+ className: "input is-small control",
+ style: "width: 10rem",
+ id: "filter-label-input",
+ type: "text",
+ onKeyUp: event =>
+ event.keyCode === 13
+ ? resultsUi.addFilterLabel()
+ : null
+ },
+ {
+ className:
+ "button is-dark is-outlined is-small is-rounded control",
+ text: "save",
+ onClick: resultsUi.addFilterLabel
+ },
+ {
+ className:
+ "button is-dark is-outlined is-small is-rounded control",
+ text: "cancel",
+ onClick: resultsUi.hideAddFilterLabel
+ }
+ ]
+ }
+ ]
+ : [
+ {
+ className: "button is-rounded is-small",
+ text: "Add",
+ onClick: resultsUi.showAddFilterLabel
+ }
+ ]
+ )
+ }
+ }
+ }
+ ]
+ });
+ return filters;
+ },
+ createSessionsTable(
+ tableType,
+ sessions,
+ { pinned = false, static = false } = {}
+ ) {
+ const getTagStyle = status => {
+ switch (status) {
+ case "completed":
+ return "is-success";
+ case "running":
+ return "is-info";
+ case "aborted":
+ return "is-danger";
+ case "paused":
+ return "is-warning";
+ case "pending":
+ return "is-primary";
+ }
+ };
+ var sortIcon = null;
+ if (tableType in sortDetail) {
+ sortIcon = sortDetail[tableType].ascending
+ ? "fas fa-sort-down"
+ : "fas fa-sort-up";
+ }
+ return UI.createElement({
+ element: "table",
+ className: "table is-bordered is-hoverable is-fullwidth",
+ children: [
+ {
+ element: "thead",
+ children: {
+ element: "tr",
+ children: [
+ {
+ element: "td",
+ style: "text-decoration: underline dotted;",
+ text: "Cp",
+ className: "is-narrow",
+ title: "Select for comparison"
+ },
+ {
+ element: "td",
+ text: "Token",
+ className: "is-narrow",
+ onclick: () => resultsUi.sortSessions(tableType, "token"),
+ style: "cursor: pointer;",
+ children: [
+ {
+ element: "i",
+ className: sortIcon,
+ style:
+ "padding-left: 20px; visibility:" +
+ (sortIcon &&
+ sortDetail[tableType].sortColumn == "token"
+ ? "visible;"
+ : "hidden;")
+ }
+ ]
+ },
+ {
+ element: "td",
+ text: "Browser",
+ onclick: () =>
+ resultsUi.sortSessions(tableType, "browser"),
+ style: "cursor: pointer;",
+ className: "is-narrow",
+ children: [
+ {
+ element: "i",
+ className: sortIcon,
+ style:
+ "padding-left: 20px; visibility:" +
+ (sortIcon &&
+ sortDetail[tableType].sortColumn == "browser"
+ ? "visible;"
+ : "hidden;")
+ }
+ ]
+ },
+ {
+ element: "td",
+ text: "Status",
+ onclick: () =>
+ resultsUi.sortSessions(tableType, "status"),
+ style: "cursor: pointer",
+ className: "is-narrow",
+ children: [
+ {
+ element: "i",
+ className: sortIcon,
+ style:
+ "padding-left: 20px; visibility:" +
+ (sortIcon &&
+ sortDetail[tableType].sortColumn == "status"
+ ? "visible;"
+ : "hidden;")
+ }
+ ]
+ },
+ {
+ element: "td",
+ text: "Date Started",
+ onclick: () =>
+ resultsUi.sortSessions(tableType, "dateStarted"),
+ style: "cursor: pointer;",
+ className: "is-narrow",
+ children: [
+ {
+ element: "i",
+ className: sortIcon,
+ style:
+ "padding-left: 20px; visibility:" +
+ (sortIcon &&
+ sortDetail[tableType].sortColumn == "dateStarted"
+ ? "visible;"
+ : "hidden;")
+ }
+ ]
+ },
+ {
+ element: "td",
+ text: "Labels",
+ style: "cursor: pointer; width: 18rem"
+ },
+ static
+ ? null
+ : {
+ element: "td",
+ text: "RefS",
+ title: "Reference Sessions",
+ style: "text-decoration: underline dotted;",
+ className: "is-narrow"
+ },
+ static
+ ? null
+ : {
+ element: "td",
+ colspan: 2,
+ text: "Options",
+ className: "is-narrow"
+ }
+ ]
+ }
+ },
+ {
+ element: "tbody",
+ children: sessions.map(session => ({
+ element: "tr",
+ className: resultsUi.isSessionDisabled(session)
+ ? "disabled-row"
+ : "",
+ style: "cursor: pointer",
+ onclick: () => resultsUi.openSessionResult(session.token),
+ children: [
+ {
+ element: "td",
+ onclick: event => event.stopPropagation(),
+ style: "vertical-align: middle;",
+ children: [
+ {
+ element: "input",
+ className: "checkbox",
+ style:
+ "width: 18px; height: 18px; margin-top: 0.55em",
+ type: "checkbox",
+ disabled: !resultsUi.isSessionValidForComparison(
+ session
+ ),
+ checked: resultsUi.isSessionSelectedForComparison(
+ session
+ ),
+ onchange: event =>
+ event.target.checked
+ ? resultsUi.addSessionToComparison(session.token)
+ : resultsUi.removeSessionFromComparison(
+ session.token
+ )
+ }
+ ]
+ },
+ {
+ element: "td",
+ className: "is-family-monospace",
+ style: "vertical-align: middle;",
+ text: session.token.split("-").shift()
+ },
+ {
+ element: "td",
+ style: "vertical-align: middle; white-space: nowrap",
+ text: session.browser.name + " " + session.browser.version
+ },
+ {
+ element: "td",
+ style: "vertical-align: middle; text-align: center",
+ children: [
+ {
+ className: `tag ${getTagStyle(session.status)}`,
+ text: session.status
+ }
+ ]
+ },
+ {
+ element: "td",
+ style: "vertical-align: middle; white-space: nowrap",
+ text: session.dateStarted
+ ? new Date(session.dateStarted).toLocaleString()
+ : "not started"
+ },
+ {
+ element: "td",
+ children: {
+ className: "tags field is-grouped isgrouped-multiline",
+ style: "min-width: 10em",
+ children: session.labels.map(label => ({
+ className: "control",
+ children: {
+ element: "span",
+ className: "tag is-info",
+ text: label
+ }
+ }))
+ }
+ },
+ static
+ ? null
+ : {
+ element: "td",
+ title: session.referenceTokens
+ .map(token => token.split("-").shift())
+ .sort((tokenA, tokenB) => tokenA - tokenB)
+ .join("\n"),
+ style: "white-space:nowrap",
+ children: (() => {
+ const tokens = session.referenceTokens.slice();
+ let overflow = 0;
+ if (tokens.length > 3) {
+ overflow = tokens.length - 2;
+ }
+ if (overflow > 0) tokens.splice(2, overflow + 2);
+ const children = tokens.map(token => {
+ let icon = "";
+ const session = resultsUi.state.sessions[token];
+ switch (session.browser.name.toLowerCase()) {
+ case "firefox":
+ icon = "fab fa-firefox";
+ break;
+ case "edge":
+ icon = "fab fa-edge";
+ break;
+ case "chrome":
+ case "chromium":
+ icon = "fab fa-chrome";
+ break;
+ case "safari":
+ case "webkit":
+ icon = "fab fa-safari";
+ break;
+ }
+ return {
+ element: "span",
+ style:
+ "margin-right: 5px; vertical-align: middle;",
+ children: { element: "i", className: icon }
+ };
+ });
+
+ if (overflow > 0)
+ children.push({
+ element: "span",
+ style: "vertical-align: middle",
+ className: "is-size-7",
+ text: `+${overflow}`
+ });
+
+ return children;
+ })()
+ },
+ static
+ ? null
+ : {
+ element: "td",
+ style: "vertical-align: middle; text-align: center",
+ className: "is-paddingless",
+ children: [
+ {
+ className: "button is-dark is-outlined is-small",
+ title: pinned ? "Unpin session" : "Pin session",
+ style: "margin: 5px",
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-thumbtack",
+ style: pinned
+ ? ""
+ : "transform: rotate(45deg)"
+ }
+ ]
+ }
+ ],
+ onclick: event => {
+ event.stopPropagation();
+ if (pinned) {
+ resultsUi.unpinSession(session.token);
+ } else {
+ resultsUi.pinSession(session.token);
+ }
+ }
+ }
+ ]
+ },
+ static
+ ? null
+ : {
+ element: "td",
+ style: "vertical-align: middle; text-align: center",
+ className: "is-paddingless",
+ children: [
+ {
+ className: "button is-dark is-outlined is-small",
+ title: "Remove session from list",
+ style: "margin: 5px",
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-trash-alt"
+ }
+ ]
+ }
+ ],
+ onclick: event => {
+ event.stopPropagation();
+ resultsUi.removeSession(session.token);
+ }
+ }
+ ]
+ }
+ ]
+ }))
+ }
+ ]
+ });
+ },
+ updateCompareButton: () => {
+ const compareButton = UI.getElement("compare-button");
+ if (resultsUi.isComparisonValid()) {
+ compareButton.removeAttribute("disabled");
+ } else {
+ compareButton.setAttribute("disabled", true);
+ }
+ }
+ };
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/www/pause.html b/testing/web-platform/tests/tools/wave/www/pause.html
new file mode 100644
index 0000000000..4e52b6673d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/pause.html
@@ -0,0 +1,224 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <title>Session Paused - Web Platform Test Runner</title>
+ <link rel="stylesheet" href="css/bulma-0.7.5/bulma.min.css" />
+ <link rel="stylesheet" href="css/style.css" />
+ <link rel="stylesheet" href="css/main.css" />
+ <script src="lib/davidshimjs/qrcode.js"></script>
+ <script src="lib/keycodes.js"></script>
+ <script src="lib/wave-service.js"></script>
+ </head>
+ <body>
+ <section class="section">
+ <div class="container site-header">
+ <img src="res/wavelogo_2016.jpg" alt="WAVE Logo" class="site-logo" />
+ <h1 class="title is-spaced">Session Paused</h1>
+ </div>
+
+ <div id="content">
+ <div id="qr-code"></div>
+ <div id="button-wrapper">
+ <button
+ id="start-button"
+ class="button is-large is-success tabbable"
+ >
+ Continue
+ </button>
+ <button
+ id="results-button"
+ class="button is-large is-light tabbable"
+ >
+ View Result Page
+ </button>
+ </div>
+ <div>(Alternatively to resume the test you can just press Return)</div>
+ <div id="details-wrapper">
+ <h3 class="title is-5 is-spaced">Details</h3>
+ <div class="detail">
+ <div>Token:</div>
+ <div id="token"></div>
+ </div>
+ <div class="detail">
+ <div>User Agent:</div>
+ <div id="user-agent"></div>
+ </div>
+ <div class="detail">
+ <div>Test Types:</div>
+ <div id="test-types"></div>
+ </div>
+ <div class="detail">
+ <div>Total Test Files:</div>
+ <div id="total-tests"></div>
+ </div>
+ <div class="detail">
+ <div>Reference Tokens:</div>
+ <div id="reference-tokens"></div>
+ </div>
+ <div class="detail">
+ <div>Test Timeouts:</div>
+ <div id="test-timeout"></div>
+ </div>
+ <div class="detail">
+ <div>Test Paths:</div>
+ <div id="test-path"></div>
+ </div>
+ </div>
+ </div>
+ </section>
+
+ <script>
+ var HOSTNAME = location.hostname;
+ var PORT = location.port;
+ var PROTOCOL = location.protocol.replace(/:/, "");
+ var QUERY = location.search.replace(/\?/, "");
+ var match = QUERY.match(/token=([^&]+)/);
+ var TOKEN = match ? match[1] : null;
+ if (TOKEN)
+ document.cookie = "resume_token=" + TOKEN + "; expires=Fri, 31 Dec 9999 23:59:59 GMT";
+ var selectedTabbable = -1;
+
+ function displaySessionConfiguration(configuration) {
+ var userAgent = document.getElementById("user-agent");
+ userAgent.innerText = configuration.userAgent;
+ var testPath = document.getElementById("test-path");
+ for (var i = 0; i < configuration.tests.include.length; i++) {
+ var path = configuration.tests.include[i];
+ testPath.innerText += path + "\n";
+ }
+ var testTypes = document.getElementById("test-types");
+ testTypes.innerText = configuration.types.join(", ");
+ var testTimeout = document.getElementById("test-timeout");
+ for (var timeout in configuration.timeouts) {
+ testTimeout.innerText +=
+ timeout + ": " + configuration.timeouts[timeout] / 1000 + "s\n";
+ }
+ var referenceTokens = document.getElementById("reference-tokens");
+ if (configuration.referenceTokens.length === 0) {
+ referenceTokens.innerText = "none";
+ } else {
+ for (var i = 0; i < configuration.referenceTokens.length; i++) {
+ var token = configuration.referenceTokens[i];
+ referenceTokens.innerText += token + "\n";
+ }
+ }
+ }
+
+ function displaySessionStatus(status) {
+ var testTypes = document.getElementById("total-tests");
+ var count = 0;
+ for (var api in status.testFilesCount) {
+ count += status.testFilesCount[api];
+ }
+ testTypes.innerText = count;
+ }
+
+ function continueTests() {
+ WaveService.startSession(TOKEN, function() {
+ WaveService.readNextTest(TOKEN, function(url) {
+ location.href = url;
+ });
+ });
+ }
+
+ WaveService.addSessionEventListener(TOKEN, function(message) {
+ if (message.type !== "status") return;
+ if (message.data !== "running") return;
+ WaveService.readNextTest(TOKEN, function(url) {
+ location.href = url;
+ });
+ });
+
+ var resultsUrl =
+ "http://" + location.host + WEB_ROOT + "overview.html" + location.search;
+ new QRCode(document.getElementById("qr-code"), resultsUrl);
+
+ var resultsButton = document.getElementById("results-button");
+ resultsButton.onclick = function() {
+ window.open(resultsUrl, "_blank");
+ };
+
+ var continueButton = document.getElementById("start-button");
+ continueButton.onclick = continueTests;
+
+ function removeClass(element, className) {
+ var elementClass = element.className;
+ var index = elementClass.indexOf(className);
+ if (index !== -1) {
+ element.className = elementClass.replace(className, "");
+ }
+ }
+
+ function addClass(element, className) {
+ element.className += " " + className;
+ }
+
+ function skipFocus(steps) {
+ var tabbables = document.getElementsByClassName("tabbable");
+ if (selectedTabbable === -1) {
+ selectedTabbable = 0;
+ } else {
+ removeClass(tabbables[selectedTabbable], "focused");
+ selectedTabbable += steps;
+ }
+
+ if (selectedTabbable >= tabbables.length) {
+ selectedTabbable = 0;
+ }
+
+ if (selectedTabbable < 0) {
+ selectedTabbable = tabbables.length - 1;
+ }
+
+ tabbables[selectedTabbable].focus();
+ addClass(tabbables[selectedTabbable], "focused");
+ }
+
+ function focusNext() {
+ skipFocus(1);
+ }
+
+ function focusPrevious() {
+ skipFocus(-1);
+ }
+
+ document.onkeydown = function(event) {
+ event = event || window.event;
+ var charCode =
+ typeof event.which === "number" ? event.which : event.keyCode;
+
+ if (ACTION_KEYS.indexOf(charCode) !== -1) {
+ event.preventDefault();
+ if (selectedTabbable === -1) {
+ continueTests();
+ return;
+ }
+ var tabbables = document.getElementsByClassName("tabbable");
+ var element = tabbables[selectedTabbable];
+ if (element.type === "checkbox") {
+ element.checked = !element.checked;
+ } else {
+ element.click();
+ }
+ }
+
+ if (PREV_KEYS.indexOf(charCode) !== -1) {
+ focusPrevious();
+ }
+
+ if (NEXT_KEYS.indexOf(charCode) !== -1) {
+ focusNext();
+ }
+ };
+
+ var match = location.search.match(/token=(.+)/);
+ var token = match[1];
+ var tokenView = document.getElementById("token");
+ tokenView.innerText = token;
+
+ WaveService.readSession(token, displaySessionConfiguration);
+ WaveService.readSessionStatus(token, displaySessionStatus);
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/www/res/spinner-solid.svg b/testing/web-platform/tests/tools/wave/www/res/spinner-solid.svg
new file mode 100644
index 0000000000..f795980b3d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/res/spinner-solid.svg
@@ -0,0 +1 @@
+<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="spinner" class="svg-inline--fa fa-spinner fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M304 48c0 26.51-21.49 48-48 48s-48-21.49-48-48 21.49-48 48-48 48 21.49 48 48zm-48 368c-26.51 0-48 21.49-48 48s21.49 48 48 48 48-21.49 48-48-21.49-48-48-48zm208-208c-26.51 0-48 21.49-48 48s21.49 48 48 48 48-21.49 48-48-21.49-48-48-48zM96 256c0-26.51-21.49-48-48-48S0 229.49 0 256s21.49 48 48 48 48-21.49 48-48zm12.922 99.078c-26.51 0-48 21.49-48 48s21.49 48 48 48 48-21.49 48-48c0-26.509-21.491-48-48-48zm294.156 0c-26.51 0-48 21.49-48 48s21.49 48 48 48 48-21.49 48-48c0-26.509-21.49-48-48-48zM108.922 60.922c-26.51 0-48 21.49-48 48s21.49 48 48 48 48-21.49 48-48-21.491-48-48-48z"></path></svg> \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wave/www/res/wavelogo_2016.jpg b/testing/web-platform/tests/tools/wave/www/res/wavelogo_2016.jpg
new file mode 100644
index 0000000000..3881409597
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/res/wavelogo_2016.jpg
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/results.html b/testing/web-platform/tests/tools/wave/www/results.html
new file mode 100644
index 0000000000..c9b2c028c1
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/results.html
@@ -0,0 +1,1565 @@
+<!DOCTYPE html>
+<html lang="en" style="overflow: auto;">
+ <head>
+ <meta charset="UTF-8" />
+ <meta name="viewport" content="width=device-width, initial-scale=1" />
+ <title>Session Results - Web Platform Test</title>
+ <link rel="stylesheet" href="css/bulma-0.7.5/bulma.min.css" />
+ <link rel="stylesheet" href="css/fontawesome-5.7.2.min.css" />
+ <!-- <link rel="stylesheet" href="css/result.css" /> -->
+ <script src="lib/utils.js"></script>
+ <script src="lib/wave-service.js"></script>
+ <script src="lib/ui.js"></script>
+ <style>
+ .site-logo {
+ max-width: 300px;
+ margin: 0 0 30px -15px;
+ }
+ </style>
+ </head>
+ <body>
+ <script>
+ let token = null;
+ window.onload = () => {
+ const query = utils.parseQuery(location.search);
+ token = query.token;
+ if (token) {
+ resultUi.render();
+ resultUi.refreshData();
+ } else {
+ location.href = WEB_ROOT + "overview.html" + location.search;
+ }
+ WaveService.addRecentSession(token);
+ };
+ const resultUi = {
+ state: {
+ details: null,
+ results: null,
+ referenceSessions: [],
+ lastCompletedTests: [],
+ malfunctioningTests: [],
+ addLabelVisible: false,
+ },
+ refreshData: (toUpdate) => {
+ WaveService.readStatus(function (config) {
+ resultUi.state.reportsEnabled = config.reportsEnabled;
+ resultUi.renderApiResults();
+ });
+ switch (toUpdate) {
+ case "test_completed":
+ resultUi.refreshSessionStatus(() => {
+ resultUi.refreshSessionResults(() => {
+ resultUi.renderApiResults();
+ });
+ });
+ resultUi.refreshLastCompletedTests(() => {
+ resultUi.renderLastCompletedTests();
+ });
+ break;
+ case "status":
+ resultUi.refreshSessionStatus(() => {
+ resultUi.renderControls();
+ resultUi.renderSessionDetails();
+ });
+ break;
+ case "":
+ case null:
+ case undefined:
+ resultUi.refreshSessionConfiguration(() => {
+ resultUi.refreshSessionStatus(() => {
+ resultUi.refreshSessionResults(() => {
+ resultUi.refreshReferenceSessions(() =>
+ resultUi.renderReferenceSessions()
+ );
+ resultUi.renderControls();
+ resultUi.renderSessionDetails();
+ resultUi.renderApiResults();
+ resultUi.renderExportView();
+ resultUi.refreshLastCompletedTests(() => {
+ resultUi.renderLastCompletedTests();
+ });
+ resultUi.refreshMalfunctioningTests(() => {
+ resultUi.renderMalfunctioningTests();
+ });
+ });
+ });
+ });
+ break;
+ }
+ },
+ refreshSessionConfiguration(callback = () => {}) {
+ WaveService.readSession(token, (configuration) => {
+ resultUi.state.configuration = configuration;
+ callback(configuration);
+ });
+ },
+ refreshSessionStatus(callback = () => {}) {
+ WaveService.readSessionStatus(token, (status) => {
+ resultUi.state.status = status;
+ if (status.status !== "completed" && status.status !== "aborted")
+ WaveService.addSessionEventListener(
+ token,
+ resultUi.handleSessionEvent
+ );
+ callback(status);
+ });
+ },
+ refreshReferenceSessions(callback = () => {}) {
+ const { configuration } = resultUi.state;
+ if (!configuration) return;
+ const { referenceTokens } = configuration;
+ if (!referenceTokens) return;
+ WaveService.readMultipleSessions(referenceTokens, (configuration) => {
+ resultUi.state.referenceSessions = configuration;
+ resultUi.renderReferenceSessions();
+ callback(configuration);
+ });
+ },
+ refreshSessionResults(callback = () => {}) {
+ WaveService.readResultsCompact(token, (results) => {
+ resultUi.state.results = results;
+ callback(results);
+ });
+ },
+ refreshLastCompletedTests(callback = () => {}) {
+ if (resultUi.state.configuration.isPublic) return;
+ WaveService.readLastCompletedTests(token, ["timeout"], (tests) => {
+ resultUi.state.lastCompletedTests = tests;
+ callback();
+ });
+ },
+ refreshMalfunctioningTests(callback = () => {}) {
+ WaveService.readMalfunctioningTests(token, (tests) => {
+ resultUi.state.malfunctioningTests = tests;
+ callback();
+ });
+ },
+ handleSessionEvent(message) {
+ resultUi.refreshData(message.type);
+ },
+ openResultsOverview() {
+ location.href = WEB_ROOT + "overview.html";
+ },
+ stopSession() {
+ WaveService.stopSession(token, resultUi.refreshData);
+ },
+ deleteSession() {
+ WaveService.deleteSession(token, () =>
+ resultUi.openResultsOverview()
+ );
+ },
+ showDeleteModal() {
+ const modal = UI.getElement("delete-modal");
+ const className = modal.getAttribute("class");
+ modal.setAttribute("class", className + " is-active");
+ },
+ hideDeleteModal() {
+ const modal = UI.getElement("delete-modal");
+ let className = modal.getAttribute("class");
+ className = className.replace(" is-active", "");
+ modal.setAttribute("class", className);
+ },
+ downloadApiResultJson(api) {
+ const { results } = resultUi.state;
+ WaveService.downloadApiResult(token, api);
+ },
+ openHtmlReport(api) {
+ const { results } = resultUi.state;
+ if (results[api].complete != results[api].total) return;
+ WaveService.readReportUri(token, api, function (uri) {
+ window.open(uri, "_blank");
+ });
+ },
+ downloadFinishedApiJsons() {
+ WaveService.downloadAllApiResults(token);
+ },
+ downloadHtmlZip() {
+ WaveService.downloadResultsOverview(token);
+ },
+ downloadResults() {
+ if (resultUi.state.status.status !== "completed") return;
+ WaveService.downloadResults(token);
+ },
+ addMalfunctioningTest(testPath) {
+ const { malfunctioningTests } = resultUi.state;
+ if (malfunctioningTests.indexOf(testPath) !== -1) return;
+ malfunctioningTests.push(testPath);
+ WaveService.updateMalfunctioningTests(
+ token,
+ malfunctioningTests,
+ () => {
+ resultUi.renderMalfunctioningTests();
+ }
+ );
+ resultUi.renderLastCompletedTests();
+ },
+ removeMalfunctioningTest(testPath) {
+ const { malfunctioningTests } = resultUi.state;
+ malfunctioningTests.splice(malfunctioningTests.indexOf(testPath), 1);
+ WaveService.updateMalfunctioningTests(
+ token,
+ malfunctioningTests,
+ () => {
+ resultUi.renderMalfunctioningTests();
+ }
+ );
+ resultUi.renderLastCompletedTests();
+ },
+ isTestOnMalfunctioningList(test) {
+ const { malfunctioningTests } = resultUi.state;
+ return malfunctioningTests.indexOf(test) !== -1;
+ },
+ showExcluded() {
+ resultUi.state.showExcluded = true;
+ resultUi.renderSessionDetails();
+ },
+ hideExcluded() {
+ resultUi.state.showExcluded = false;
+ resultUi.renderSessionDetails();
+ },
+ addLabel() {
+ const label = UI.getElement("session-label-input").value;
+ if (!label) return;
+ const { configuration } = resultUi.state;
+ configuration.labels.push(label);
+ WaveService.updateLabels(token, configuration.labels);
+ resultUi.renderSessionDetails();
+ UI.getElement("session-label-input").focus();
+ },
+ removeLabel(index) {
+ const { configuration } = resultUi.state;
+ configuration.labels.splice(index, 1);
+ WaveService.updateLabels(token, configuration.labels);
+ resultUi.renderSessionDetails();
+ },
+ showAddLabel() {
+ resultUi.state.addLabelVisible = true;
+ resultUi.renderSessionDetails();
+ UI.getElement("session-label-input").focus();
+ },
+ hideAddLabel() {
+ resultUi.state.addLabelVisible = false;
+ resultUi.renderSessionDetails();
+ },
+ render() {
+ const resultView = UI.createElement({
+ className: "section",
+ children: [
+ {
+ className: "container",
+ style: "margin-bottom: 2em",
+ children: [
+ {
+ className: "columns",
+ children: [
+ {
+ className: "column",
+ children: [
+ {
+ element: "img",
+ src: "res/wavelogo_2016.jpg",
+ className: "site-logo",
+ },
+ ],
+ },
+ {
+ className: "column is-narrow",
+ children: {
+ className: "button is-dark is-outlined",
+ onclick: resultUi.openResultsOverview,
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-arrow-left",
+ },
+ ],
+ },
+ {
+ text: "Results Overview",
+ element: "span",
+ },
+ ],
+ },
+ },
+ ],
+ },
+ {
+ className: "container",
+ children: {
+ className: "columns",
+ children: [
+ {
+ className: "column",
+ children: { className: "title", text: "Result" },
+ },
+ {
+ className: "column is-narrow",
+ children: { id: "controls" },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ {
+ id: "session-details",
+ className: "container",
+ style: "margin-bottom: 2em",
+ },
+ {
+ id: "last-completed-tests",
+ className: "container",
+ style: "margin-bottom: 2em",
+ },
+ {
+ id: "api-results",
+ className: "container",
+ style: "margin-bottom: 2em",
+ },
+ {
+ id: "timeout-files",
+ className: "container",
+ style: "margin-bottom: 2em",
+ },
+ {
+ id: "export",
+ className: "container",
+ style: "margin-bottom: 2em",
+ },
+ {
+ id: "malfunctioning-tests",
+ className: "container",
+ style: "margin-bottom: 2em",
+ },
+ ],
+ });
+ const root = UI.getRoot();
+ root.innerHTML = "";
+ root.appendChild(resultView);
+ resultUi.renderControls();
+ resultUi.renderSessionDetails();
+ resultUi.renderApiResults();
+ resultUi.renderExportView();
+ },
+ renderControls() {
+ const { state } = resultUi;
+ if (!state.status) return;
+ const { status } = state.status;
+ const { isPublic } = state.configuration;
+ const controlsView = UI.createElement({
+ className: "field is-grouped is-grouped-multiline",
+ });
+ if (
+ status &&
+ status !== "aborted" &&
+ status !== "completed" &&
+ status !== "pending"
+ ) {
+ const pauseResumeButton = UI.createElement({
+ id: "pause-resume-button",
+ className: "control button is-dark is-outlined",
+ onclick: function () {
+ if (status === "running") {
+ WaveService.pauseSession(token, resultUi.refreshData);
+ } else {
+ WaveService.startSession(token, resultUi.refreshData);
+ }
+ },
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className:
+ status === "running" ? "fas fa-pause" : "fas fa-play",
+ },
+ ],
+ },
+ {
+ text: status === "running" ? "Pause" : "Resume",
+ element: "span",
+ },
+ ],
+ });
+ controlsView.appendChild(pauseResumeButton);
+ }
+
+ if (status && status !== "aborted" && status !== "completed") {
+ const stopButton = UI.createElement({
+ id: "stop-button",
+ className: "control button is-dark is-outlined",
+ onclick: resultUi.stopSession,
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-square",
+ },
+ ],
+ },
+ {
+ text: "Stop",
+ element: "span",
+ },
+ ],
+ });
+ controlsView.appendChild(stopButton);
+ }
+ if (!isPublic) {
+ const deleteButton = UI.createElement({
+ id: "delete-button",
+ className: "control button is-dark is-outlined",
+ onclick: resultUi.showDeleteModal,
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-trash-alt",
+ },
+ ],
+ },
+ {
+ text: "Delete",
+ element: "span",
+ },
+ ],
+ });
+ controlsView.appendChild(deleteButton);
+ }
+
+ const deleteModal = UI.createElement({
+ id: "delete-modal",
+ className: "modal",
+ children: [
+ {
+ className: "modal-background",
+ onclick: resultUi.hideDeleteModal,
+ },
+ {
+ className: "modal-card",
+ children: [
+ {
+ className: "modal-card-head",
+ children: [
+ {
+ element: "p",
+ className: "modal-card-title",
+ text: "Delete Session",
+ },
+ ],
+ },
+ {
+ className: "modal-card-body",
+ children: [
+ {
+ element: "p",
+ text: "Are you sure you want to delete this session?",
+ },
+ { element: "p", text: "This action cannot be undone." },
+ ],
+ },
+ {
+ className: "modal-card-foot",
+ children: [
+ {
+ className: "button is-danger",
+ text: "Delete Session",
+ onclick: resultUi.deleteSession,
+ },
+ {
+ className: "button",
+ text: "Cancel",
+ onclick: resultUi.hideDeleteModal,
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ });
+ controlsView.appendChild(deleteModal);
+
+ const controls = UI.getElement("controls");
+ controls.innerHTML = "";
+ controls.appendChild(controlsView);
+ },
+ renderSessionDetails() {
+ const { state } = resultUi;
+ const { configuration, status, results } = state;
+ if (!configuration || !status) return;
+ const sessionDetailsView = UI.createElement({
+ style: "margin-bottom: 20px",
+ });
+
+ const heading = UI.createElement({
+ text: "Session details",
+ className: "title is-4",
+ });
+ sessionDetailsView.appendChild(heading);
+
+ const getTagStyle = (status) => {
+ switch (status) {
+ case "completed":
+ return "is-success";
+ case "running":
+ return "is-info";
+ case "aborted":
+ return "is-danger";
+ case "paused":
+ return "is-warning";
+ case "pending":
+ return "is-primary";
+ }
+ };
+ if (status.dateFinished) {
+ if (state.durationInterval) clearInterval(state.durationInterval);
+ } else if (status.dateStarted) {
+ if (!state.durationInterval)
+ state.durationInterval = setInterval(() => {
+ UI.getElement("duration").innerHTML = utils.millisToTimeString(
+ Date.now() - status.dateStarted.getTime()
+ );
+ }, 1000);
+ }
+
+ const { addLabelVisible } = state;
+ const { showExcluded } = state;
+
+ const tokenField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Token" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ text: configuration.token,
+ },
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(tokenField);
+
+ const userAgentField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "User Agent" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ text: configuration.userAgent || "",
+ },
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(userAgentField);
+
+ const testPathsField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Test Paths" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ text: configuration.tests.include
+ .reduce((text, test) => text + test + ", ", "")
+ .slice(0, -2),
+ },
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(testPathsField);
+
+ const excludedTestsField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Excluded Test Paths" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ children: [
+ {
+ element: "span",
+ text: configuration.tests.exclude.length,
+ },
+ {
+ element: "span",
+ className: "button is-small is-rounded",
+ style: "margin-left: 10px",
+ text: showExcluded ? "hide" : "show",
+ onClick: showExcluded
+ ? resultUi.hideExcluded
+ : resultUi.showExcluded,
+ },
+ showExcluded
+ ? {
+ style:
+ "max-height: 250px; overflow: auto; margin-bottom: 10px",
+ children: configuration.tests.exclude.map(
+ (test) => ({
+ text: test,
+ })
+ ),
+ }
+ : null,
+ ],
+ },
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(excludedTestsField);
+
+ const referenceSessionField = UI.createElement({
+ style: "display: none",
+ id: "reference-session-field",
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Reference Sessions" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ children: { id: "reference-sessions" },
+ },
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(referenceSessionField);
+
+ const totalTestFilesField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Total Test Files" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: "control",
+ text: Object.keys(results).reduce(
+ (sum, api) => (sum += results[api].total),
+ 0
+ ),
+ },
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(totalTestFilesField);
+
+ const statusField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Status" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: `control tag ${getTagStyle(status.status)}`,
+ text: status.status,
+ },
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(statusField);
+
+ const timeoutsField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Test Timeouts" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: `control`,
+ text: Object.keys(configuration.timeouts).reduce(
+ (text, timeout) =>
+ `${text}${timeout}: ${
+ configuration.timeouts[timeout] / 1000
+ }s\n`,
+ ""
+ ),
+ },
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(timeoutsField);
+
+ if (status.dateStarted) {
+ const startedField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Date Started" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: `control`,
+ text: new Date(status.dateStarted).toLocaleString(),
+ },
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(startedField);
+ }
+
+ if (status.dateFinished) {
+ const finishedField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Date Finished" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: `control`,
+ text: new Date(status.dateFinished).toLocaleString(),
+ },
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(finishedField);
+ }
+
+ if (status.dateStarted) {
+ const durationField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Duration" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field",
+ children: {
+ className: `control`,
+ id: "duration",
+ text: utils.millisToTimeString(
+ status.dateFinished
+ ? status.dateFinished.getTime() -
+ status.dateStarted.getTime()
+ : Date.now() - status.dateStarted.getTime()
+ ),
+ },
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(durationField);
+ }
+
+ const labelsField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Labels" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "field is-grouped is-grouped-multiline",
+ children: configuration.labels
+ .map((label, index) => ({
+ className: "control",
+ children: {
+ className: "tags has-addons",
+ children: [
+ {
+ element: "span",
+ className: "tag is-info",
+ text: label,
+ },
+ {
+ element: "a",
+ className: "tag is-delete",
+ onClick: () => resultUi.removeLabel(index),
+ },
+ ],
+ },
+ }))
+ .concat(
+ resultUi.state.configuration.isPublic
+ ? []
+ : addLabelVisible
+ ? [
+ {
+ className: "control field is-grouped",
+ children: [
+ {
+ element: "input",
+ className: "input is-small control",
+ style: "width: 10rem",
+ id: "session-label-input",
+ type: "text",
+ onKeyUp: (event) =>
+ event.keyCode === 13
+ ? resultUi.addLabel()
+ : null,
+ },
+ {
+ className:
+ "button is-dark is-outlined is-small is-rounded control",
+ text: "save",
+ onClick: resultUi.addLabel,
+ },
+ {
+ className:
+ "button is-dark is-outlined is-small is-rounded control",
+ text: "cancel",
+ onClick: resultUi.hideAddLabel,
+ },
+ ],
+ },
+ ]
+ : [
+ {
+ className: "button is-rounded is-small",
+ text: "Add",
+ onClick: resultUi.showAddLabel,
+ },
+ ]
+ ),
+ },
+ },
+ ],
+ });
+ sessionDetailsView.appendChild(labelsField);
+
+ const sessionDetails = UI.getElement("session-details");
+ sessionDetails.innerHTML = "";
+ sessionDetails.appendChild(sessionDetailsView);
+ resultUi.renderReferenceSessions();
+ },
+ renderReferenceSessions() {
+ const { referenceSessions } = resultUi.state;
+ if (!referenceSessions || referenceSessions.length === 0) return;
+ const referenceSessionsList = UI.createElement({
+ className: "field is-grouped is-grouped-multiline",
+ });
+ const getBrowserIcon = (browser) => {
+ switch (browser.toLowerCase()) {
+ case "firefox":
+ return "fab fa-firefox";
+ case "edge":
+ return "fab fa-edge";
+ case "chrome":
+ case "chromium":
+ return "fab fa-chrome";
+ case "safari":
+ case "webkit":
+ return "fab fa-safari";
+ }
+ };
+ referenceSessions.forEach((session) => {
+ const { token, browser } = session;
+ const referenceSessionItem = UI.createElement({
+ className:
+ "control button is-dark is-small is-rounded is-outlined",
+ onClick: () => WaveService.openSession(token),
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: {
+ element: "i",
+ className: getBrowserIcon(browser.name),
+ },
+ },
+ {
+ element: "span",
+ text: token.split("-").shift(),
+ },
+ ],
+ });
+ referenceSessionsList.appendChild(referenceSessionItem);
+ });
+ const referenceSessionsTarget = UI.getElement("reference-sessions");
+ referenceSessionsTarget.innerHTML = "";
+ referenceSessionsTarget.appendChild(referenceSessionsList);
+ const field = UI.getElement("reference-session-field");
+ field.style["display"] = "flex";
+ },
+ renderLastCompletedTests() {
+ if (resultUi.state.configuration.isPublic) return;
+ const lastCompletedTestsView = UI.createElement({});
+
+ const heading = UI.createElement({
+ className: "title is-4",
+ children: [
+ { element: "span", text: "Last Timed-Out Test Files" },
+ {
+ element: "span",
+ className: "title is-7",
+ text: " (most recent first)",
+ },
+ ],
+ });
+ lastCompletedTestsView.appendChild(heading);
+
+ const { lastCompletedTests } = resultUi.state;
+ const testsTable = UI.createElement({
+ element: "table",
+ className: "table",
+ style: "min-width: 100%",
+ children: [
+ {
+ element: "thead",
+ children: [
+ {
+ element: "tr",
+ children: [
+ { element: "td", text: "Test File" },
+ { element: "td", text: "Malfunctioning List" },
+ ],
+ },
+ ],
+ },
+ {
+ element: "tbody",
+ children: lastCompletedTests.map(({ path, status }) => ({
+ element: "tr",
+ children: [
+ { element: "td", text: path },
+ {
+ element: "td",
+ children: [
+ {
+ element: "button",
+ className: "button is-dark is-outlined is-small",
+ onClick: () => resultUi.addMalfunctioningTest(path),
+ title: "Add to malfunctioning tests list.",
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: resultUi.isTestOnMalfunctioningList(
+ path
+ )
+ ? "fas fa-check"
+ : "fas fa-plus",
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ })),
+ },
+ ],
+ });
+ if (lastCompletedTests.length > 0) {
+ lastCompletedTestsView.appendChild(
+ UI.createElement({
+ className: "container",
+ style: "overflow-x: auto;",
+ id: "last-completed-overflow",
+ children: testsTable,
+ })
+ );
+ } else {
+ const noTestsLabel = UI.createElement({
+ text: "- No Timed-Out Tests -",
+ style: "text-align: center",
+ });
+ lastCompletedTestsView.appendChild(noTestsLabel);
+ }
+
+ UI.saveScrollPosition("last-completed-overflow");
+
+ const lastCompletedTestsElement = UI.getElement(
+ "last-completed-tests"
+ );
+ lastCompletedTestsElement.innerHTML = "";
+ lastCompletedTestsElement.appendChild(lastCompletedTestsView);
+
+ UI.loadScrollPosition("last-completed-overflow");
+ },
+ renderApiResults() {
+ const { results, status } = resultUi.state;
+
+ const apiResultsView = UI.createElement({
+ style: "margin-bottom: 20px",
+ });
+
+ const heading = UI.createElement({
+ text: "API Results",
+ className: "title is-4",
+ });
+ apiResultsView.appendChild(heading);
+
+ if (!results) {
+ const loadingIndicator = UI.createElement({
+ className: "level",
+ children: {
+ element: "span",
+ className: "level-item",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-spinner fa-pulse",
+ },
+ {
+ style: "margin-left: 0.4em;",
+ text: "Loading results ...",
+ },
+ ],
+ },
+ });
+ apiResultsView.appendChild(loadingIndicator);
+
+ const apiResults = UI.getElement("api-results");
+ apiResults.innerHTML = "";
+ apiResults.appendChild(apiResultsView);
+ return;
+ }
+
+ const width = status.status === "running" ? "7.5em" : "auto";
+ const header = UI.createElement({
+ element: "thead",
+ children: [
+ {
+ element: "tr",
+ children: [
+ { element: "th", text: "API" },
+ { element: "th", text: "Pass", style: `min-width: ${width}` },
+ { element: "th", text: "Fail", style: `min-width: ${width}` },
+ {
+ element: "th",
+ text: "Timeout",
+ style: `min-width: ${width}`,
+ },
+ {
+ element: "th",
+ text: "Not Run",
+ style: `min-width: ${width}`,
+ },
+ {
+ element: "th",
+ text: "Test Files Run",
+ style: `min-width: ${width}`,
+ },
+ { element: "th", text: "Export" },
+ ],
+ },
+ ],
+ });
+
+ const apis = Object.keys(results).sort((apiA, apiB) =>
+ apiA.toLowerCase() > apiB.toLowerCase() ? 1 : -1
+ );
+
+ const rows = apis.map((api) => {
+ const {
+ complete = 0,
+ pass = 0,
+ fail = 0,
+ timeout = 0,
+ timeoutfiles = [],
+ not_run: notRun = 0,
+ total,
+ } = results[api];
+ isDone = results[api].complete == results[api].total;
+ const totalTestResults = pass + fail + timeout + notRun;
+ return UI.createElement({
+ element: "tr",
+ style: "white-space: nowrap",
+ children: [
+ { element: "td", text: api },
+ {
+ element: "td",
+ children: {
+ style: `color: hsl(141, 71%, 38%); overflow: visible; white-space: nowrap; width: ${width}`,
+ text: `${pass} (${utils.percent(pass, totalTestResults)}%)`,
+ },
+ },
+ {
+ element: "td",
+ children: {
+ className: "has-text-danger",
+ style: `overflow: visible; white-space: nowrap; width: ${width}`,
+ text: `${fail} (${utils.percent(fail, totalTestResults)}%)`,
+ },
+ },
+ {
+ element: "td",
+ children: {
+ style: `color: hsl(48, 100%, 40%); overflow: visible; white-space: nowrap; width: ${width}`,
+ text: `${timeout} (${utils.percent(
+ timeout,
+ totalTestResults
+ )}%)`,
+ },
+ },
+ {
+ element: "td",
+ children: {
+ className: "has-text-info",
+ style: `overflow: visible; white-space: nowrap; width: ${width}`,
+ text: `${notRun} (${utils.percent(
+ notRun,
+ totalTestResults
+ )}%)`,
+ },
+ },
+ {
+ element: "td",
+ children: {
+ style: `overflow: visible; white-space: nowrap; width: ${width}`,
+ text: `${complete}/${total} (${utils.percent(
+ complete,
+ total
+ )}%)`,
+ },
+ },
+ {
+ element: "td",
+ children: {
+ className: "field has-addons",
+ children: [
+ {
+ className: "control",
+ children: {
+ className: "button is-dark is-outlined is-small",
+ onclick: () => resultUi.downloadApiResultJson(api),
+ text: "json",
+ title: `Download results of ${api} API as JSON file.`,
+ },
+ },
+ resultUi.state.reportsEnabled
+ ? {
+ className: "control",
+ children: {
+ className: "button is-dark is-outlined is-small",
+ disabled: !isDone,
+ onclick: () => resultUi.openHtmlReport(api),
+ text: "report",
+ title: `Show results of ${api} API in WPT Report format.`,
+ },
+ }
+ : null,
+ ],
+ },
+ },
+ ],
+ });
+ });
+
+ const { pass, fail, timeout, not_run, complete, total } = apis.reduce(
+ (sum, api) => {
+ Object.keys(sum).forEach(
+ (key) => (sum[key] += results[api][key] ? results[api][key] : 0)
+ );
+ return sum;
+ },
+ { complete: 0, total: 0, pass: 0, fail: 0, timeout: 0, not_run: 0 }
+ );
+ const totalTestResults = pass + fail + timeout + not_run;
+
+ const footer = UI.createElement({
+ element: "tfoot",
+ children: [
+ {
+ element: "tr",
+ children: [
+ { element: "th", text: "Total" },
+ {
+ element: "th",
+ children: {
+ style: `color: hsl(141, 71%, 38%); overflow: visible; white-space: nowrap; width: ${width}`,
+ text: `${pass} (${utils.percent(
+ pass,
+ totalTestResults
+ )}%)`,
+ },
+ },
+ {
+ element: "th",
+ children: {
+ style: `overflow: visible; white-space: nowrap; width: ${width}`,
+ className: "has-text-danger",
+ text: `${fail} (${utils.percent(
+ fail,
+ totalTestResults
+ )}%)`,
+ },
+ },
+ {
+ element: "th",
+ children: {
+ style: `color: hsl(48, 100%, 40%); overflow: visible; white-space: nowrap; width: ${width}`,
+ text: `${timeout} (${utils.percent(
+ timeout,
+ totalTestResults
+ )}%)`,
+ },
+ },
+ {
+ element: "th",
+ children: {
+ style: `overflow: visible; white-space: nowrap; width: ${width}`,
+ className: "has-text-info",
+ text: `${not_run} (${utils.percent(
+ not_run,
+ totalTestResults
+ )}%)`,
+ },
+ },
+ {
+ element: "th",
+ children: {
+ style: `overflow: visible; white-space: nowrap; width: ${width}`,
+ text: `${complete}/${total} (${utils.percent(
+ complete,
+ total
+ )}%)`,
+ },
+ },
+ { element: "th" },
+ ],
+ },
+ ],
+ });
+
+ const resultsTable = UI.createElement({
+ className: "container",
+ style: "overflow-x: auto",
+ id: "results-overflow",
+ children: {
+ element: "table",
+ className: "table",
+ id: "results-table",
+ style:
+ "width: 100%; min-width: 30em; border-radius: 3px; border: 2px solid hsl(0, 0%, 86%);",
+ children: [header, { element: "tbody", children: rows }, footer],
+ },
+ });
+ apiResultsView.appendChild(resultsTable);
+
+ UI.saveScrollPosition("results-overflow");
+
+ const apiResults = UI.getElement("api-results");
+ apiResults.innerHTML = "";
+ apiResults.appendChild(apiResultsView);
+
+ UI.loadScrollPosition("results-overflow");
+ },
+ renderExportView() {
+ const { status } = resultUi.state;
+ if (!status) return;
+
+ const exportElement = UI.getElement("export");
+ exportElement.innerHTML = "";
+
+ const heading = UI.createElement({
+ className: "title is-4",
+ text: "Export",
+ });
+ exportElement.appendChild(heading);
+
+ const resultsField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: { className: "label", text: "Results" },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "control columns",
+ style: "width: 100%",
+ children: [
+ {
+ className: "column is-9",
+ text:
+ "Download results for import into other WMAS Test Suite instances.",
+ },
+ {
+ className: "column is-3",
+ children: {
+ className:
+ "button is-dark is-outlined is-small is-fullwidth",
+ onClick: resultUi.downloadResults,
+ disabled: status.status !== "completed",
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: {
+ element: "i",
+ className: "fas fa-file-archive",
+ },
+ },
+ { element: "span", text: "Download Zip" },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ ],
+ });
+ exportElement.appendChild(resultsField);
+
+ const jsonField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: {
+ className: "label",
+ text: "All JSON Files",
+ },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "control columns",
+ style: "width: 100%",
+ children: [
+ {
+ className: "column is-9",
+ text:
+ "Download JSON files containing results of completed test files.",
+ },
+ {
+ className: "column is-3",
+ children: {
+ className:
+ "button is-dark is-outlined is-small is-fullwidth",
+ onclick: resultUi.downloadFinishedApiJsons,
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: {
+ element: "i",
+ className: "fas fa-file-archive",
+ },
+ },
+ { element: "span", text: "Download Zip" },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ ],
+ });
+ exportElement.appendChild(jsonField);
+
+ const htmlField = UI.createElement({
+ className: "field is-horizontal",
+ children: [
+ {
+ className: "field-label",
+ children: {
+ className: "label",
+ text: "Session result HTML",
+ },
+ },
+ {
+ className: "field-body",
+ children: {
+ className: "control columns",
+ style: "width: 100%",
+ children: [
+ {
+ className: "column is-9",
+ text:
+ "Download this sessions result as standalone HTML page, similar to this page.",
+ },
+ {
+ className: "column is-3",
+ children: {
+ className:
+ "button is-dark is-outlined is-small is-fullwidth",
+ onClick: resultUi.downloadHtmlZip,
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: {
+ element: "i",
+ className: "fas fa-code",
+ },
+ },
+ { element: "span", text: "Download HTML" },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ ],
+ });
+ exportElement.appendChild(htmlField);
+ },
+ renderMalfunctioningTests() {
+ const malfunctioningTestsView = UI.createElement({});
+ const heading = UI.createElement({
+ className: "title is-4",
+ text: "Malfunctioning Tests",
+ });
+ malfunctioningTestsView.appendChild(heading);
+
+ const { malfunctioningTests } = resultUi.state;
+ const testsTable = UI.createElement({
+ element: "table",
+ className: "table",
+ style: "min-width: 100%",
+ children: [
+ {
+ element: "thead",
+ children: [
+ {
+ element: "tr",
+ children: [
+ { element: "td", text: "Test File" },
+ { element: "td", text: "" },
+ ],
+ },
+ ],
+ },
+ {
+ element: "tbody",
+ children: malfunctioningTests.map((path) => ({
+ element: "tr",
+ children: [
+ { element: "td", text: path },
+ {
+ element: "td",
+ children: resultUi.state.configuration.isPublic
+ ? null
+ : {
+ element: "button",
+ className: "button is-dark is-outlined is-small",
+ onClick: () =>
+ resultUi.removeMalfunctioningTest(path),
+ title: "Remove from malfunctioning tests list.",
+ children: [
+ {
+ element: "span",
+ className: "icon",
+ children: [
+ {
+ element: "i",
+ className: "fas fa-trash-alt",
+ },
+ ],
+ },
+ ],
+ },
+ },
+ ],
+ })),
+ },
+ ],
+ });
+ if (malfunctioningTests.length > 0) {
+ malfunctioningTestsView.appendChild(
+ UI.createElement({
+ className: "container",
+ style: "overflow-x: auto",
+ id: "malfunctioning-overflow",
+ children: testsTable,
+ })
+ );
+ } else {
+ const noTestsLabel = UI.createElement({
+ text: "- No Tests Available -",
+ style: "text-align: center",
+ });
+ malfunctioningTestsView.appendChild(noTestsLabel);
+ }
+
+ UI.saveScrollPosition("malfunctioning-overflow");
+
+ const malfunctioningTestsElement = UI.getElement(
+ "malfunctioning-tests"
+ );
+ malfunctioningTestsElement.innerHTML = "";
+ malfunctioningTestsElement.appendChild(malfunctioningTestsView);
+
+ UI.loadScrollPosition("malfunctioning-overflow");
+ },
+ };
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/www/submitresult.html b/testing/web-platform/tests/tools/wave/www/submitresult.html
new file mode 100644
index 0000000000..07e9f35235
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/submitresult.html
@@ -0,0 +1,63 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <title>Web Platform Tests</title>
+ <style>
+ #console {
+ font-family: monospace;
+ }
+ </style>
+ <script src="lib/wave-service.js"></script>
+ </head>
+ <body>
+ <div id="console"></div>
+ <script>
+ var screenConsole = document.getElementById("console");
+ var log = function() {
+ var text = "";
+ for (var i = 0; i < arguments.length; i++) {
+ text += arguments[i] + " ";
+ }
+ text = text.replace(/ /gm, "&nbsp;");
+ text = text.replace(/\n/gm, "<br/>");
+ screenConsole.innerHTML += "<br/>" + text;
+ };
+
+ window.onerror = function(error) {
+ log(error);
+ };
+
+ var HOSTNAME = location.hostname;
+ var PORT = location.port;
+ var PROTOCOL = location.protocol.replace(/:/, "");
+ var QUERY = (location.search.replace(/\?/, ""));
+ var match = QUERY.match(/token=([^&]+)/);
+ var TOKEN = match ? match[1] : null;
+ QUERY += /[\?&]path=/.test(location.search) ? "" : "&resume=1";
+ match = QUERY.match(/data=([^&]+)/);
+
+ var parsedQuery = {};
+
+ var parts = QUERY.split("&");
+ for (var i = 0; i < parts.length; i++) {
+ var part = parts[i];
+ var key = part.split("=")[0];
+ var value = part.split("=")[1];
+ parsedQuery[key] = value;
+ }
+
+ var resultData;
+ var rawResult = parsedQuery.result;
+ if (rawResult) {
+ resultData = JSON.parse(decodeURIComponent(rawResult));
+ }
+
+ WaveService.createResult(TOKEN, resultData, function() {
+ WaveService.readNextTest(TOKEN, function(url) {
+ location.href = url;
+ });
+ });
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/www/test.html b/testing/web-platform/tests/tools/wave/www/test.html
new file mode 100644
index 0000000000..d06b18fef6
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/test.html
@@ -0,0 +1,155 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <title>Creating Session - Web Platform Tests</title>
+ <link rel="stylesheet" href="css/bulma-0.7.5/bulma.min.css" />
+ <link rel="stylesheet" href="css/style.css" />
+ <link rel="stylesheet" href="css/main.css" />
+ <script src="lib/wave-service.js"></script>
+ <style>
+ #console {
+ font-family: monospace;
+ }
+ .spinner {
+ height: 0.7em;
+ width: 0.7em;
+ }
+ .fa-pulse {
+ -webkit-animation: fa-spin 1s steps(8) infinite;
+ animation: fa-spin 1s steps(8) infinite;
+ }
+ @-webkit-keyframes fa-spin {
+ 0% {
+ -webkit-transform: rotate(0deg);
+ transform: rotate(0deg);
+ }
+ to {
+ -webkit-transform: rotate(1turn);
+ transform: rotate(1turn);
+ }
+ }
+ @keyframes fa-spin {
+ 0% {
+ -webkit-transform: rotate(0deg);
+ transform: rotate(0deg);
+ }
+ to {
+ -webkit-transform: rotate(1turn);
+ transform: rotate(1turn);
+ }
+ }
+ </style>
+ </head>
+ <body>
+ <section class="section">
+ <div class="container">
+ <img src="res/wavelogo_2016.jpg" alt="WAVE Logo" class="site-logo" />
+
+ <h1 class="title is-spaced">
+ <span>
+ <img src="res/spinner-solid.svg" class="spinner fa-pulse" />
+ </span>
+ Creating Session
+ </h1>
+ <div id="content">
+ <div id="details-wrapper">
+ <div class="detail">
+ <div>Reference Tokens:</div>
+ <div id="reference-tokens"></div>
+ </div>
+ <div class="detail">
+ <div>Test Paths:</div>
+ <div id="test-path"></div>
+ </div>
+ </div>
+
+ <div id="console"></div>
+ </div>
+ </div>
+ </section>
+ <script>
+ var screenConsole = document.getElementById("console");
+ var log = function() {
+ var text = "";
+ for (var i = 0; i < arguments.length; i++) {
+ text += arguments[i] + " ";
+ }
+ text = text.replace(/ /gm, "&nbsp;");
+ text = text.replace(/\n/gm, "<br/>");
+ screenConsole.innerHTML += "<br/>" + text;
+ };
+
+ window.onerror = function(error) {
+ log(error);
+ };
+
+ var HOSTNAME = location.hostname;
+ var PORT = location.port;
+ var PROTOCOL = location.protocol.replace(/:/, "");
+ var QUERY = decodeURIComponent(location.search.replace(/\?/, ""));
+ var match = QUERY.match(/token=([^&]+)/);
+ var TOKEN = match ? match[1] : null;
+ QUERY += /[\?&]path=/.test(location.search) ? "" : "&resume=1";
+
+ var parsedQuery = {};
+
+ var parts = QUERY.split("&");
+ for (var i = 0; i < parts.length; i++) {
+ var part = parts[i];
+ var key = part.split("=")[0];
+ var value = part.split("=")[1];
+ parsedQuery[key] = value;
+ }
+
+ var includedTests = [];
+ var paths = parsedQuery["path"].split(",");
+ for (var i = 0; i < paths.length; i++) {
+ var path = paths[i];
+ includedTests.push(path.trim());
+ }
+
+ var excludedTests = [
+ "/html/semantics/scripting-1/the-script-element/module/dynamic-import/no-active-script-manual-classic.html",
+ "/html/semantics/scripting-1/the-script-element/module/dynamic-import/no-active-script-manual-module.html"
+ ];
+
+ var referenceTokens = [];
+ if (parsedQuery["reftoken"]) {
+ var paths = parsedQuery["reftoken"].split(",");
+ for (var i = 0; i < paths.length; i++) {
+ var path = paths[i];
+ referenceTokens.push(path.trim());
+ }
+ }
+
+ var testPath = document.getElementById("test-path");
+ var paths = includedTests;
+ for (var i = 0; i < paths.length; i++) {
+ var path = paths[i];
+ testPath.innerText += path + "\n";
+ }
+ var referenceTokensElement = document.getElementById("reference-tokens");
+ if (referenceTokens.length === 0) {
+ referenceTokensElement.innerText = "none";
+ } else {
+ for (var i = 0; i < referenceTokens.length; i++) {
+ var token = referenceTokens[i];
+ referenceTokensElement.innerText += token + "\n";
+ }
+ }
+ log("Please wait ...");
+ WaveService.createSession(
+ {
+ tests: { include: includedTests, exclude: excludedTests },
+ referenceTokens: referenceTokens
+ },
+ function(token) {
+ log("Session created successfully! Token: " + token);
+ log("Redirecting ...");
+ location.href = "/newsession.html?token=" + token;
+ }
+ );
+ </script>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.eot b/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.eot
new file mode 100644
index 0000000000..da7bd5eb70
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.eot
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.svg b/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.svg
new file mode 100644
index 0000000000..caa8cc43ca
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.svg
@@ -0,0 +1,3296 @@
+<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1">
+<metadata>
+Created by FontForge 20190112 at Tue Feb 12 10:24:59 2019
+ By Robert Madole
+Copyright (c) Font Awesome
+</metadata>
+<defs>
+<font id="FontAwesome5Brands-Regular" horiz-adv-x="448" >
+ <font-face
+ font-family="Font Awesome 5 Brands Regular"
+ font-weight="400"
+ font-stretch="normal"
+ units-per-em="512"
+ panose-1="2 0 5 3 0 0 0 0 0 0"
+ ascent="448"
+ descent="-64"
+ bbox="-0.200195 -66.9505 641.5 448.3"
+ underline-thickness="25"
+ underline-position="-51"
+ unicode-range="U+0020-F7E3"
+ />
+ <missing-glyph />
+ <glyph glyph-name="twitter-square" unicode="&#xf081;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM351.1 257.2c12.8008 9.2998 24 20.8994 32.9004 34c-11.7998 -5.10059 -24.5996 -8.7998 -37.7998 -10.2002
+c13.5996 8.09961 23.8994 20.9004 28.7998 36.0996c-12.5996 -7.5 -26.7998 -13 -41.5996 -15.7998c-12 12.7998 -29 20.7002 -47.9004 20.7002c-40 0 -73.2998 -36.0996 -64 -80.5996c-54.4004 2.7998 -102.9 28.7998 -135.2 68.5996
+c-5.7002 -9.7002 -8.89941 -20.9004 -8.89941 -33.0996v-0.107422c0 -19.3584 13.0811 -43.7715 29.1992 -54.4932c-10.6992 0.400391 -20.8994 3.40039 -29.5996 8.2998v-0.799805c0 -31.8994 22.5 -58.2998 52.5 -64.3994
+c-10.4004 -2.7002 -19.5 -2.7002 -29.5996 -1.2002c8.2998 -26 32.5 -44.9004 61.2998 -45.5c-22.5 -17.6006 -50.7002 -28 -81.4004 -28c-5.39941 0 -10.5 0.200195 -15.7998 0.799805c29 -18.5996 63.5 -29.4004 100.7 -29.4004c120.6 0 186.6 99.9004 186.6 186.601
+c0 2.7998 0 5.7002 -0.200195 8.5z" />
+ <glyph glyph-name="facebook-square" unicode="&#xf082;"
+d="M448 368v-352c0 -26.5 -21.5 -48 -48 -48h-85.2998v177.2h60.5996l8.7002 67.5996h-69.2998v43.2002c0 19.5996 5.39941 32.9004 33.5 32.9004h35.7998v60.3994c-6.2002 0.799805 -27.4004 2.7002 -52.2002 2.7002c-51.5996 0 -87 -31.5 -87 -89.4004v-49.8994h-60.7998
+v-67.6006h60.9004v-177.1h-196.9c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48z" />
+ <glyph glyph-name="linkedin" unicode="&#xf08c;"
+d="M416 416c17.5996 0 32 -14.5 32 -32.2998v-383.4c0 -17.7998 -14.4004 -32.2998 -32 -32.2998h-384.1c-17.6006 0 -31.9004 14.5 -31.9004 32.2998v383.4c0 17.7998 14.2998 32.2998 31.9004 32.2998h384.1zM135.4 32h0.0996094v213.8h-66.5v-213.8h66.4004zM102.2 275
+c21.2998 0 38.5 17.2002 38.5 38.5c0 21.2002 -17.2998 38.5 -38.5 38.5c-21.2998 0 -38.5 -17.2998 -38.5 -38.5s17.2002 -38.5 38.5 -38.5zM384.3 32v117.2c0 57.5996 -12.5 101.899 -79.7002 101.899c-32.2998 0 -54 -17.6992 -62.8994 -34.5h-0.900391v29.2002h-63.7002
+v-213.8h66.4004v105.8c0 27.9004 5.2998 54.9004 39.9004 54.9004c34 0 34.5 -31.9004 34.5 -56.7002v-104h66.3994z" />
+ <glyph glyph-name="github-square" unicode="&#xf092;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM277.3 32.2998c66 22 110.8 84.9004 110.7 158.3c0 91.8008 -74.4004 161.5 -166.2 161.5s-162 -69.6992 -162 -161.5
+c0 -73.3994 46.2002 -136.199 112.2 -158.3c8.5 -1.5 11.5 3.7002 11.5 8c0 4.10059 -0.200195 26.7002 -0.200195 40.6006c0 0 -46.3994 -10 -56.0996 19.6992c0 0 -7.60059 19.2002 -18.4004 24.2002c0 0 -15.0996 10.4004 1.10059 10.2002
+c0 0 16.3994 -1.2998 25.5 -17.0996c14.5 -25.6006 38.7998 -18.2002 48.2998 -13.9004c1.5 10.5996 5.7998 18 10.5996 22.2998c-37 4.10059 -74.2998 9.5 -74.2998 73.1006c0 18.1992 5 27.2998 15.5996 39c-1.7998 4.39941 -7.39941 22.0996 1.7002 45
+c13.9004 4.2998 45.7002 -17.9004 45.7002 -17.9004c13.2002 3.7002 27.5 5.59961 41.5996 5.59961c14.1006 0 28.4004 -1.89941 41.6006 -5.59961c0 0 31.7998 22.2002 45.7002 17.9004c9.09961 -23 3.39941 -40.7002 1.69922 -45
+c10.6006 -11.7002 17.1006 -20.8008 17.1006 -39c0 -63.9004 -39 -69 -76 -73.1006c6.09961 -5.2002 11.2998 -15.0996 11.2998 -30.7002c0 -22.2998 -0.200195 -49.8994 -0.200195 -55.2998c0 -4.2998 3.10059 -9.5 11.5 -8zM179.2 93.4004
+c-1.90039 -0.400391 -3.7002 0.399414 -3.90039 1.69922c-0.200195 1.5 1.10059 2.80078 3 3.2002c1.90039 0.200195 3.7002 -0.599609 3.90039 -1.89941c0.299805 -1.30078 -1 -2.60059 -3 -3zM169.7 94.2998c0 1.5 -1.7998 2.60059 -3.7002 2.40039
+c-2 0 -3.5 -1.10059 -3.5 -2.40039c0 -1.5 1.5 -2.59961 3.7002 -2.39941c2 0 3.5 1.09961 3.5 2.39941zM156 95.4004c-0.400391 -1.30078 -2.40039 -1.90039 -4.09961 -1.30078c-1.90039 0.400391 -3.2002 1.90039 -2.80078 3.2002
+c0.400391 1.2998 2.40039 1.90039 4.10059 1.5c2 -0.599609 3.2998 -2.09961 2.7998 -3.39941zM143.7 100.8c0.899414 0.799805 0.399414 2.7998 -0.900391 4.10059c-1.5 1.5 -3.39941 1.69922 -4.2998 0.599609c-1 -0.900391 -0.599609 -2.7998 0.900391 -4.09961
+c1.5 -1.5 3.39941 -1.7002 4.2998 -0.600586zM134.6 109.9c1.10059 0.799805 1.10059 2.59961 0 4.09961c-0.899414 1.5 -2.59961 2.2002 -3.69922 1.2998c-1.10059 -0.700195 -1.10059 -2.39941 0 -3.89941c1.09961 -1.5 2.7998 -2.10059 3.69922 -1.5zM128.1 119.6
+c0.900391 0.700195 0.700195 2.2002 -0.399414 3.5c-1.10059 1 -2.60059 1.5 -3.5 0.600586c-0.900391 -0.700195 -0.700195 -2.2002 0.399414 -3.5c1.10059 -1 2.60059 -1.5 3.5 -0.600586zM121.4 127c0.399414 0.799805 -0.200195 1.90039 -1.5 2.59961
+c-1.30078 0.5 -2.40039 0.200195 -2.80078 -0.399414c-0.399414 -0.900391 0.200195 -2 1.5 -2.60059c1.10059 -0.699219 2.40039 -0.5 2.80078 0.400391z" />
+ <glyph glyph-name="twitter" unicode="&#xf099;" horiz-adv-x="511"
+d="M459.37 296.284c0.325195 -4.54785 0.325195 -9.09766 0.325195 -13.6455c0 -138.72 -105.583 -298.558 -298.559 -298.558c-59.4521 0 -114.68 17.2188 -161.137 47.1055c8.44727 -0.973633 16.5684 -1.29883 25.3398 -1.29883
+c49.0547 0 94.2129 16.5684 130.274 44.832c-46.1318 0.975586 -84.792 31.1885 -98.1123 72.7725c6.49805 -0.974609 12.9951 -1.62402 19.8184 -1.62402c9.4209 0 18.8428 1.2998 27.6133 3.57324c-48.0811 9.74707 -84.1426 51.9795 -84.1426 102.984v1.29883
+c13.9688 -7.79688 30.2139 -12.6699 47.4307 -13.3184c-28.2637 18.8428 -46.7803 51.0049 -46.7803 87.3906c0 19.4922 5.19629 37.3604 14.2939 52.9541c51.6543 -63.6748 129.3 -105.258 216.364 -109.807c-1.62402 7.79688 -2.59863 15.918 -2.59863 24.04
+c0 57.8271 46.7822 104.934 104.934 104.934c30.2139 0 57.502 -12.6699 76.6709 -33.1367c23.7148 4.54785 46.4551 13.3193 66.5986 25.3398c-7.79785 -24.3662 -24.3662 -44.833 -46.1318 -57.8271c21.1172 2.27344 41.584 8.12207 60.4258 16.2432
+c-14.292 -20.791 -32.1611 -39.3086 -52.6279 -54.2529z" />
+ <glyph glyph-name="facebook" unicode="&#xf09a;"
+d="M448 391.3v-398.5c0 -13.7002 -11.0996 -24.7002 -24.7002 -24.7002h-114.2v173.4h58.2002l8.7002 67.5996h-67v43.2002c0 19.6006 5.40039 32.9004 33.5 32.9004h35.7998v60.5c-6.2002 0.799805 -27.3994 2.7002 -52.2002 2.7002c-51.5996 0 -87 -31.5 -87 -89.4004
+v-49.9004h-58.3994v-67.5996h58.3994v-173.5h-214.399c-13.6006 0 -24.7002 11.0996 -24.7002 24.7002v398.6c0 13.6006 11.0996 24.7002 24.7002 24.7002h398.5c13.7002 0 24.7998 -11.0996 24.7998 -24.7002z" />
+ <glyph glyph-name="github" unicode="&#xf09b;" horiz-adv-x="496"
+d="M165.9 50.5996c0 -2 -2.30078 -3.59961 -5.2002 -3.59961c-3.2998 -0.299805 -5.60059 1.2998 -5.60059 3.59961c0 2 2.30078 3.60059 5.2002 3.60059c3 0.299805 5.60059 -1.2998 5.60059 -3.60059zM134.8 55.0996c0.700195 2 3.60059 3 6.2002 2.30078
+c3 -0.900391 4.90039 -3.2002 4.2998 -5.2002c-0.599609 -2 -3.59961 -3 -6.2002 -2c-3 0.599609 -5 2.89941 -4.2998 4.89941zM179 56.7998c2.90039 0.299805 5.59961 -1 5.90039 -2.89941c0.299805 -2 -1.7002 -3.90039 -4.60059 -4.60059
+c-3 -0.700195 -5.59961 0.600586 -5.89941 2.60059c-0.300781 2.2998 1.69922 4.19922 4.59961 4.89941zM244.8 440c138.7 0 251.2 -105.3 251.2 -244c0 -110.9 -67.7998 -205.8 -167.8 -239c-12.7002 -2.2998 -17.2998 5.59961 -17.2998 12.0996
+c0 8.2002 0.299805 49.9004 0.299805 83.6006c0 23.5 -7.7998 38.5 -17 46.3994c55.8994 6.30078 114.8 14 114.8 110.5c0 27.4004 -9.7998 41.2002 -25.7998 58.9004c2.59961 6.5 11.0996 33.2002 -2.60059 67.9004c-20.8994 6.59961 -69 -27 -69 -27
+c-20 5.59961 -41.5 8.5 -62.7998 8.5s-42.7998 -2.90039 -62.7998 -8.5c0 0 -48.0996 33.5 -69 27c-13.7002 -34.6006 -5.2002 -61.4004 -2.59961 -67.9004c-16 -17.5996 -23.6006 -31.4004 -23.6006 -58.9004c0 -96.1992 56.4004 -104.3 112.3 -110.5
+c-7.19922 -6.59961 -13.6992 -17.6992 -16 -33.6992c-14.2998 -6.60059 -51 -17.7002 -72.8994 20.8994c-13.7002 23.7998 -38.6006 25.7998 -38.6006 25.7998c-24.5 0.300781 -1.59961 -15.3994 -1.59961 -15.3994c16.4004 -7.5 27.7998 -36.6006 27.7998 -36.6006
+c14.7002 -44.7998 84.7002 -29.7998 84.7002 -29.7998c0 -21 0.299805 -55.2002 0.299805 -61.3994c0 -6.5 -4.5 -14.4004 -17.2998 -12.1006c-99.7002 33.4004 -169.5 128.3 -169.5 239.2c0 138.7 106.1 244 244.8 244zM97.2002 95.0996
+c1.2998 1.30078 3.59961 0.600586 5.2002 -1c1.69922 -1.89941 2 -4.19922 0.699219 -5.19922c-1.2998 -1.30078 -3.59961 -0.600586 -5.19922 1c-1.7002 1.89941 -2 4.19922 -0.700195 5.19922zM86.4004 103.2c0.699219 1 2.2998 1.2998 4.2998 0.700195
+c2 -1 3 -2.60059 2.2998 -3.90039c-0.700195 -1.40039 -2.7002 -1.7002 -4.2998 -0.700195c-2 1 -3 2.60059 -2.2998 3.90039zM118.8 67.5996c1.2998 1.60059 4.2998 1.30078 6.5 -1c2 -1.89941 2.60059 -4.89941 1.2998 -6.19922
+c-1.2998 -1.60059 -4.19922 -1.30078 -6.5 1c-2.2998 1.89941 -2.89941 4.89941 -1.2998 6.19922zM107.4 82.2998c1.59961 1.2998 4.19922 0.299805 5.59961 -2c1.59961 -2.2998 1.59961 -4.89941 0 -6.2002c-1.2998 -1 -4 0 -5.59961 2.30078
+c-1.60059 2.2998 -1.60059 4.89941 0 5.89941z" />
+ <glyph glyph-name="pinterest" unicode="&#xf0d2;" horiz-adv-x="496"
+d="M496 192c0 -137 -111 -248 -248 -248c-25.5996 0 -50.2002 3.90039 -73.4004 11.0996c10.1006 16.5 25.2002 43.5 30.8008 65c3 11.6006 15.3994 59 15.3994 59c8.10059 -15.3994 31.7002 -28.5 56.7998 -28.5c74.8008 0 128.7 68.8008 128.7 154.301
+c0 81.8994 -66.8994 143.199 -152.899 143.199c-107 0 -163.9 -71.7998 -163.9 -150.1c0 -36.4004 19.4004 -81.7002 50.2998 -96.0996c4.7002 -2.2002 7.2002 -1.2002 8.2998 3.2998c0.800781 3.39941 5 20.2998 6.90039 28.0996
+c0.599609 2.5 0.299805 4.7002 -1.7002 7.10059c-10.0996 12.5 -18.2998 35.2998 -18.2998 56.5996c0 54.7002 41.4004 107.6 112 107.6c60.9004 0 103.6 -41.5 103.6 -100.899c0 -67.1006 -33.8994 -113.601 -78 -113.601c-24.2998 0 -42.5996 20.1006 -36.6992 44.8008
+c7 29.5 20.5 61.2998 20.5 82.5996c0 19 -10.2002 34.9004 -31.4004 34.9004c-24.9004 0 -44.9004 -25.7002 -44.9004 -60.2002c0 -22 7.40039 -36.7998 7.40039 -36.7998s-24.5 -103.801 -29 -123.2c-5 -21.4004 -3 -51.6006 -0.900391 -71.2002
+c-92.1992 36.0996 -157.6 125.9 -157.6 231c0 137 111 248 248 248s248 -111 248 -248z" />
+ <glyph glyph-name="pinterest-square" unicode="&#xf0d3;"
+d="M448 368v-352c0 -26.5 -21.5 -48 -48 -48h-245.6c9.7998 16.4004 22.3994 40 27.3994 59.2998c3 11.5 15.2998 58.4004 15.2998 58.4004c8 -15.2998 31.4004 -28.2002 56.3008 -28.2002c74.0996 0 127.399 68.0996 127.399 152.7
+c0 81.0996 -66.2002 141.8 -151.399 141.8c-106 0 -162.2 -71.0996 -162.2 -148.6c0 -36 19.2002 -80.8008 49.7998 -95.1006c4.7002 -2.2002 7.09961 -1.2002 8.2002 3.2998c0.799805 3.40039 5 20.1006 6.7998 27.8008c0.599609 2.5 0.299805 4.59961 -1.7002 7
+c-10.0996 12.2998 -18.2998 34.8994 -18.2998 56c0 54.1992 41 106.6 110.9 106.6c60.2998 0 102.6 -41.0996 102.6 -99.9004c0 -66.3994 -33.5 -112.399 -77.2002 -112.399c-24.0996 0 -42.0996 19.8994 -36.3994 44.3994c6.89941 29.2002 20.2998 60.7002 20.2998 81.8008
+c0 53 -75.5 45.6992 -75.5 -25c0 -21.7002 7.2998 -36.5 7.2998 -36.5c-31.4004 -132.801 -36.0996 -134.5 -29.5996 -192.601l2.19922 -0.799805h-88.5996c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48z" />
+ <glyph glyph-name="google-plus-square" unicode="&#xf0d4;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM164 92c57.7002 0 96 40.5 96 97.5996c0 6.5 -0.599609 11.6006 -1.59961 16.6006h-94.4004v-34.4004h56.9004
+c-2.40039 -14.5996 -17.2002 -43.0996 -56.8008 -43.0996c-34.0996 0 -61.8994 28.2998 -61.8994 63.2002c0 35 27.7998 63.1992 61.8994 63.1992c19.5 0 32.4004 -8.2998 39.8008 -15.3994l27.0996 26.0996c-17.5 16.4004 -40 26.2002 -67 26.2002
+c-55.2998 0 -100 -44.7002 -100 -100s44.7002 -100 100 -100zM384 173.8v29.2002h-29v29h-29.2002v-29h-29v-29.2002h29v-29h29.2002v29h29z" />
+ <glyph glyph-name="google-plus-g" unicode="&#xf0d5;" horiz-adv-x="640"
+d="M386.061 219.504c1.83398 -9.69238 3.14355 -19.3838 3.14355 -31.9561c0 -109.753 -73.6055 -187.548 -184.404 -187.548c-106.084 0 -192 85.915 -192 192s85.916 192 192 192c51.8643 0 95.083 -18.8594 128.611 -50.292l-52.126 -50.0303
+c-14.1455 13.6211 -39.0283 29.5996 -76.4854 29.5996c-65.4834 0 -118.92 -54.2217 -118.92 -121.277s53.4365 -121.277 118.92 -121.277c75.9609 0 104.514 54.7451 108.965 82.7734h-108.965v66.0088h181.261v-0.000976562zM571.467 213.067h55.7334v-56.001h-55.7334
+v-55.7334h-56.001v55.7334h-55.7324v56.001h55.7324v55.7324h56.001v-55.7324z" />
+ <glyph glyph-name="linkedin-in" unicode="&#xf0e1;"
+d="M100.3 0h-92.8994v299.1h92.8994v-299.1zM53.7998 339.9c-29.7002 0 -53.7998 24.5996 -53.7998 54.2998s24.0996 53.7998 53.7998 53.7998s53.7998 -24.0996 53.7998 -53.7998s-24.0996 -54.2998 -53.7998 -54.2998zM448 0h-92.7002v145.6
+c0 34.7002 -0.700195 79.2002 -48.2998 79.2002c-48.2998 0 -55.7002 -37.7002 -55.7002 -76.7002v-148.1h-92.7998v299.1h89.0996v-40.7998h1.30078c12.3994 23.5 42.6992 48.2998 87.8994 48.2998c94 0 111.3 -61.8994 111.3 -142.3v-164.3h-0.0996094z" />
+ <glyph glyph-name="github-alt" unicode="&#xf113;" horiz-adv-x="480"
+d="M186.1 119.3c0 -20.8994 -10.8994 -55.0996 -36.6992 -55.0996c-25.8008 0 -36.7002 34.2002 -36.7002 55.0996c0 20.9004 10.8994 55.1006 36.7002 55.1006c25.7998 0 36.6992 -34.2002 36.6992 -55.1006zM480 169.8c0 -31.8994 -3.2002 -65.7002 -17.5 -95
+c-37.9004 -76.5996 -142.1 -74.7998 -216.7 -74.7998c-75.7998 0 -186.2 -2.7002 -225.6 74.7998c-14.6006 29 -20.2002 63.1006 -20.2002 95c0 41.9004 13.9004 81.5 41.5 113.601c-5.2002 15.7998 -7.7002 32.3994 -7.7002 48.7998
+c0 21.5 4.90039 32.2998 14.6006 51.7998c45.2998 0 74.2998 -9 108.8 -36c29 6.90039 58.7998 10 88.7002 10c27 0 54.1992 -2.90039 80.3994 -9.2002c34 26.7002 63 35.2002 107.8 35.2002c9.80078 -19.5 14.6006 -30.2998 14.6006 -51.7998
+c0 -16.4004 -2.60059 -32.7002 -7.7002 -48.2002c27.5 -32.4004 39 -72.2998 39 -114.2zM415.7 119.3c0 43.9004 -26.7002 82.6006 -73.5 82.6006c-18.9004 0 -37 -3.40039 -56 -6c-14.9004 -2.30078 -29.7998 -3.2002 -45.1006 -3.2002
+c-15.1992 0 -30.0996 0.899414 -45.0996 3.2002c-18.7002 2.59961 -37 6 -56 6c-46.7998 0 -73.5 -38.7002 -73.5 -82.6006c0 -87.7998 80.4004 -101.3 150.4 -101.3h48.1992c70.3008 0 150.601 13.4004 150.601 101.3zM333.1 174.4
+c25.8008 0 36.7002 -34.2002 36.7002 -55.1006c0 -20.8994 -10.8994 -55.0996 -36.7002 -55.0996c-25.7998 0 -36.6992 34.2002 -36.6992 55.0996c0 20.9004 10.8994 55.1006 36.6992 55.1006z" />
+ <glyph glyph-name="maxcdn" unicode="&#xf136;" horiz-adv-x="511"
+d="M461.1 5.2998h-97.3994l51.8994 242.7c2.30078 10.2002 0.900391 19.5 -4.39941 25.7002c-5 6.09961 -13.7002 9.59961 -24.2002 9.59961h-49.2998l-59.5 -278h-97.4004l59.5 278h-83.3994l-59.5 -278h-97.4004l59.5 278l-44.5996 95.4004h372.1
+c39.4004 0 75.2998 -16.2998 98.2998 -44.9004c23.2998 -28.5996 31.7998 -67.3994 23.6006 -105.899z" />
+ <glyph glyph-name="html5" unicode="&#xf13b;" horiz-adv-x="384"
+d="M0 416h384l-34.9004 -395.8l-157.6 -52.2002l-156.6 52.2002zM308.2 288.1l4.39941 47.7002h-241.1l12.7998 -145.6h166.9l-6 -62.2002l-53.7002 -14.5l-53.5 14.5l-3.5 38.0996h-47.7002l6 -75.7998l98.7002 -27.2998h1.09961v0.299805l97.9004 27l13.5996 148.4h-175.6
+l-4.09961 49.3994h183.8z" />
+ <glyph glyph-name="css3" unicode="&#xf13c;" horiz-adv-x="480"
+d="M480 416l-64 -368l-223.3 -80l-192.7 80l19.5996 94.7998h82l-8 -40.5996l116.4 -44.4004l134.1 44.4004l18.8008 97.0996h-333.4l16 82h333.7l10.5 52.7002h-333.4l16.2998 82h407.4z" />
+ <glyph glyph-name="btc" unicode="&#xf15a;" horiz-adv-x="383"
+d="M310.204 205.362c46.0059 -11.0283 74.9971 -38.4443 69.3262 -99.8906c-7.24805 -76.5723 -61.5967 -97.0547 -142.896 -101.467v-68.0049h-48.5273v66.7451c-12.29 0 -25.21 0 -38.4443 0.314453v-67.0596h-48.5283v68.0049s-8.88867 0.31543 -97.3701 0.31543
+l9.76758 57.666c34.7305 -0.614258 50.3301 -3.4209 53.2549 16.0703v217.43c-4.60645 24.5664 -24.709 22.1045 -63.0234 21.4268v51.6777c58.748 -0.275391 79.5283 -0.539062 97.3701 0v79.4092h48.5283v-77.833c12.9189 0.31543 25.8389 0.629883 38.4443 0.629883
+v77.2031h48.5273v-79.4092c62.3926 -5.35547 109.492 -24.5781 114.851 -81.9287c4.09668 -41.9102 -13.5508 -67.1201 -41.2803 -81.2998zM150.608 313.447v-96.7402c27.416 0 113.126 -6.30273 113.126 48.2119c0 57.0352 -85.7109 48.5283 -113.126 48.5283z
+M150.608 61.6709c32.7715 0 133.126 -6.93262 133.127 53.2529c0 62.3936 -100.355 53.2549 -133.127 53.2549v-106.508z" />
+ <glyph glyph-name="youtube" unicode="&#xf167;" horiz-adv-x="576"
+d="M549.655 323.917c11.4121 -42.8672 11.4121 -132.305 11.4121 -132.305s0 -89.4385 -11.4121 -132.306c-6.28125 -23.6494 -24.7871 -41.5 -48.2842 -47.8203c-42.5908 -11.4863 -213.371 -11.4863 -213.371 -11.4863s-170.78 0 -213.371 11.4863
+c-23.4971 6.32031 -42.0029 24.1709 -48.2842 47.8203c-11.4121 42.8672 -11.4121 132.306 -11.4121 132.306s0 89.4375 11.4121 132.305c6.28125 23.6504 24.7871 42.2754 48.2842 48.5967c42.5908 11.4863 213.371 11.4863 213.371 11.4863s170.781 0 213.371 -11.4863
+c23.4971 -6.32031 42.0029 -24.9463 48.2842 -48.5967zM232.145 110.409l142.739 81.2012l-142.739 81.2051v-162.406z" />
+ <glyph glyph-name="xing" unicode="&#xf168;" horiz-adv-x="384"
+d="M162.7 238c-1.7998 -3.2998 -25.2002 -44.4004 -70.1006 -123.5c-4.89941 -8.2998 -10.7998 -12.5 -17.6992 -12.5h-65.1006c-7.7002 0 -12.0996 7.5 -8.5 14.4004l69 121.3c0.200195 0 0.200195 0.0996094 0 0.299805l-43.8994 75.5996
+c-4.30078 7.80078 0.299805 14.1006 8.5 14.1006h65.0996c7.2998 0 13.2998 -4.10059 18 -12.2002zM382.6 401.9l-144 -253v-0.300781l91.6006 -166.6c3.89941 -7.09961 0.200195 -14.0996 -8.5 -14.0996h-65.2002c-7.59961 0 -13.5996 4 -18 12.1992l-92.4004 168.5
+c3.30078 5.80078 51.5 90.8008 144.801 255.2c4.59961 8.10059 10.3994 12.2002 17.5 12.2002h65.6992c8 0 12.3008 -6.7002 8.5 -14.0996z" />
+ <glyph glyph-name="xing-square" unicode="&#xf169;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM140.4 127.8c4.89941 0 9.09961 2.90039 12.5996 9.10059c32.0996 56.5 48.7998 85.8994 50.0996 88.1992l-31.8994 55.3008
+c-3.40039 5.7998 -7.7002 8.69922 -12.9004 8.69922h-46.5996c-5.7998 0 -9 -4.5 -6 -10.0996l31.3994 -54c0.100586 -0.0996094 0.100586 -0.200195 0 -0.200195l-49.2998 -86.7002c-2.7002 -5 0.5 -10.2998 6 -10.2998h46.6006zM360.1 341.9
+c2.80078 5.2998 -0.299805 10.0996 -6 10h-46.8994c-5.10059 0 -9.2002 -2.90039 -12.5 -8.7002c-66.6006 -117.4 -101.101 -178.2 -103.4 -182.3l66 -120.301c3.2002 -5.7998 7.40039 -8.69922 12.9004 -8.69922h46.5996c6.10059 0 8.7998 5 6 10.0996l-65.5 119v0.200195z
+" />
+ <glyph glyph-name="dropbox" unicode="&#xf16b;" horiz-adv-x="528"
+d="M264.4 331.7l-132 -84.2998l132 -84.3008l-132 -84.2998l-132.4 85.1006l132.3 84.2998l-132.3 83.5l132.3 84.2998zM131.6 52.2998l132 84.2998l132 -84.2998l-132 -84.2998zM264.4 163.9l132 84.2998l-132 83.5996l131.3 84.2002l132.3 -84.2998l-132.3 -84.2998
+l132.3 -84.2002l-132.3 -84.2998z" />
+ <glyph glyph-name="stack-overflow" unicode="&#xf16c;" horiz-adv-x="384"
+d="M290.7 137l-8.2002 -39l-195.7 41l8.2002 39.2998zM341.7 224l-25.5 -30.7998l-153.5 128.3l25.5 30.7998zM310.5 184.3l-16.7998 -36.2998l-181.2 84.5l16.7002 36.5zM262 416l119.3 -160.3l-32 -24l-119.3 160.3zM282.5 88v-39.7002h-200v39.7002h200zM322.2 8v120h40
+v-160h-359.5v160h40v-120h279.5z" />
+ <glyph glyph-name="instagram" unicode="&#xf16d;"
+d="M224.1 307c63.6006 0 114.9 -51.2998 114.9 -114.9c0 -63.5996 -51.2998 -114.899 -114.9 -114.899c-63.5996 0 -114.899 51.2998 -114.899 114.899c0 63.6006 51.2998 114.9 114.899 114.9zM224.1 117.4c41.1006 0 74.7002 33.5 74.7002 74.6992
+c0 41.2002 -33.5 74.7002 -74.7002 74.7002c-41.1992 0 -74.6992 -33.5 -74.6992 -74.7002c0 -41.1992 33.5996 -74.6992 74.6992 -74.6992zM370.5 311.7c0 -14.9004 -12 -26.7998 -26.7998 -26.7998c-14.9004 0 -26.7998 12 -26.7998 26.7998s12 26.7998 26.7998 26.7998
+s26.7998 -12 26.7998 -26.7998zM446.6 284.5c2.10059 -37 2.10059 -147.8 0 -184.8c-1.7998 -35.9004 -10 -67.7002 -36.1992 -93.9004c-26.2002 -26.2998 -58 -34.5 -93.9004 -36.2002c-37 -2.09961 -147.9 -2.09961 -184.9 0
+c-35.8994 1.80078 -67.5996 10 -93.8994 36.2002s-34.5 58 -36.2002 93.9004c-2.09961 37 -2.09961 147.899 0 184.899c1.7998 35.9004 9.90039 67.7002 36.2002 93.9004s58.0996 34.4004 93.8994 36.0996c37 2.10059 147.9 2.10059 184.9 0
+c35.9004 -1.7998 67.7002 -10 93.9004 -36.1992c26.2998 -26.2002 34.5 -58 36.1992 -93.9004zM398.8 60c11.7002 29.4004 9 99.5 9 132.1c0 32.6006 2.7002 102.601 -9 132.101c-7.89941 19.7002 -23 34.7998 -42.5996 42.5996c-29.4004 11.6006 -99.5 9 -132.101 9
+c-32.5996 0 -102.6 2.7002 -132.1 -9c-19.7002 -7.89941 -34.7998 -23 -42.5996 -42.5996c-11.6006 -29.4004 -9 -99.5 -9 -132.101c0 -32.5996 -2.7002 -102.6 9 -132.1c7.89941 -19.7002 23 -34.7998 42.5996 -42.5996c29.4004 -11.6006 99.5 -9 132.1 -9
+c32.6006 0 102.601 -2.7002 132.101 9c19.7002 7.89941 34.7998 23 42.5996 42.5996z" />
+ <glyph glyph-name="flickr" unicode="&#xf16e;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM144.5 129c35.0996 0 63.5 28.4004 63.5 63.5s-28.4004 63.5 -63.5 63.5s-63.5 -28.4004 -63.5 -63.5s28.4004 -63.5 63.5 -63.5z
+M303.5 129c35.0996 0 63.5 28.4004 63.5 63.5s-28.4004 63.5 -63.5 63.5s-63.5 -28.4004 -63.5 -63.5s28.4004 -63.5 63.5 -63.5z" />
+ <glyph glyph-name="adn" unicode="&#xf170;" horiz-adv-x="496"
+d="M248 280.5l64.9004 -98.7998h-129.801zM496 192c0 -136.9 -111.1 -248 -248 -248s-248 111.1 -248 248s111.1 248 248 248s248 -111.1 248 -248zM396.2 109.3l-148.2 223.2l-148.2 -223.2h30.4004l33.5996 51.7002h168.601l33.5996 -51.7002h30.2002z" />
+ <glyph glyph-name="bitbucket" unicode="&#xf171;" horiz-adv-x="499"
+d="M16.2002 416.4l466.8 -0.200195c1 0 1.90039 -0.100586 2.7998 -0.200195c8.7002 -1.40039 14.6006 -9.59961 13.2002 -18.2998l-67.9004 -416.8c-1.2998 -7.80078 -8.09961 -13.5 -16 -13.4004h-325.699c-10.6006 0.0996094 -19.6006 7.7998 -21.3008 18.2002
+l-67.8994 412.1c-0.100586 0.900391 -0.200195 1.90039 -0.200195 2.7998c0.0996094 8.90039 7.40039 15.9004 16.2002 15.8008zM302.1 118.6l25.2002 147h-157.3l28.0996 -147h104z" />
+ <glyph glyph-name="tumblr" unicode="&#xf173;" horiz-adv-x="319"
+d="M309.8 -32.2998c-13.5996 -14.5 -50 -31.7002 -97.3994 -31.7002c-120.801 0 -147 88.7998 -147 140.6v144h-47.5c-5.5 0 -10 4.5 -10 10v68c0 7.2002 4.5 13.6006 11.2998 16c62 21.8008 81.5 76 84.2998 117.101c0.799805 11 6.5 16.2998 16.0996 16.2998h70.9004
+c5.5 0 10 -4.5 10 -10v-115.2h83c5.5 0 10 -4.39941 10 -9.89941v-81.7002c0 -5.5 -4.5 -10 -10 -10h-83.4004v-133.2c0 -34.2002 23.7002 -53.5996 68 -35.7998c4.80078 1.89941 9 3.2002 12.7002 2.2002c3.5 -0.900391 5.7998 -3.40039 7.40039 -7.90039l22 -64.2998
+c1.7998 -5 3.2998 -10.6006 -0.400391 -14.5z" />
+ <glyph glyph-name="tumblr-square" unicode="&#xf174;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM317.7 51.7998c2.2998 2.40039 1.2998 5.90039 0.299805 9.10059l-13.7998 40.1992c-1 2.80078 -2.40039 4.40039 -4.60059 4.90039
+c-2.39941 0.599609 -5 -0.200195 -8 -1.40039c-27.6992 -11.0996 -42.5 1 -42.5 22.4004v83.2998h52.1006c3.39941 0 6.2002 2.7998 6.2002 6.2002v51.0996c0 3.40039 -2.80078 6.2002 -6.2002 6.2002h-51.9004v72c0 3.40039 -2.7998 6.2002 -6.2002 6.2002h-44.2998
+c-5.89941 0 -9.5 -3.2998 -10 -10.2002c-1.7998 -25.7002 -13.8994 -59.5 -52.7002 -73.2002c-4.2998 -1.5 -7.09961 -5.5 -7.09961 -10v-42.5c0 -3.39941 2.7998 -6.19922 6.2002 -6.19922h29.7002v-90c0 -32.4004 16.3994 -87.9004 91.8994 -87.9004
+c29.7002 0 52.4004 10.7002 60.9004 19.7998z" />
+ <glyph glyph-name="apple" unicode="&#xf179;" horiz-adv-x="376"
+d="M314.7 179.3c0 -1.89941 -3.5 -61.2002 61.7002 -91.8994c-12.2002 -36.8008 -54 -118.601 -102.601 -119.301c-28.0996 0 -44.5996 17.9004 -76.3994 17.9004c-32.8008 0 -50.6006 -17.2998 -75.8008 -17.9004c-48.1992 -1.5 -94.3994 88.5 -107.199 125.2
+c-9.60059 27.9004 -14.4004 55 -14.4004 81.2002c0 88.7002 59.2998 132.3 115.1 133.2c27 0 61.4004 -19.7002 76.4004 -19.7002c14.2002 0 53 23.5 88.5 20.7002c37.5 -2.90039 65.9004 -17.7002 84.7002 -44.6006c-33.6006 -20.3994 -50.2002 -48.0996 -50 -84.7998z
+M258.1 343.5c-19.5996 -22.9004 -43.3994 -36.2998 -69.5 -34.2998c-2.19922 27.5996 8.10059 52.0996 25.6006 71.8994c15.8994 18.5 43.7998 33.5 67.8994 34.9004c0.800781 -10.5996 3.30078 -40.0996 -24 -72.5z" />
+ <glyph glyph-name="windows" unicode="&#xf17a;"
+d="M0 354.3l183.6 25.2998v-177.399h-183.6v152.1zM0 29.7002v149.899h183.6v-175.199zM203.8 1.7002v177.899h244.2v-211.6zM203.8 382.3l244.2 33.7002v-213.8h-244.2v180.1z" />
+ <glyph glyph-name="android" unicode="&#xf17b;"
+d="M89.5996 243.5v-115.8c0 -15.4004 -12.0996 -27.7002 -27.5 -27.7002c-15.2998 0 -30.0996 12.4004 -30.0996 27.7002v115.8c0 15.0996 14.7998 27.5 30.0996 27.5c15.1006 0 27.5 -12.4004 27.5 -27.5zM100.4 86.5v179.4h247.3v-179.4
+c0 -16.4004 -13.2002 -29.5996 -29.4004 -29.5996h-20.2002v-61.1006c0 -36.7998 -55.5 -36.7002 -55.5 0v61.1006h-37.1992v-61.1006c0 -36.5996 -55.2002 -36.8994 -55.2002 0l-0.299805 61.1006h-19.9004c-16.4004 0 -29.5996 13.1992 -29.5996 29.5996zM348.4 275.6
+h-249.101c0 42.8008 25.6006 80 63.6006 99.4004l-19.1006 35.2998c-2.7998 4.90039 4.2998 8 6.7002 3.7998l19.4004 -35.5996c34.8994 15.5 75 14.7002 108.3 0l19.2998 35.5c2.5 4.2998 9.5 1.09961 6.7002 -3.7998l-19.1006 -35.2002
+c37.7002 -19.4004 63.3008 -56.5996 63.3008 -99.4004zM177.7 331.1c0 5.7002 -4.60059 10.5 -10.5 10.5c-5.7002 0 -10.2002 -4.7998 -10.2002 -10.5c0 -5.69922 4.59961 -10.5 10.2002 -10.5c5.89941 0 10.5 4.80078 10.5 10.5zM291.1 331.1
+c0 5.7002 -4.59961 10.5 -10.1992 10.5c-5.90039 0 -10.5 -4.7998 -10.5 -10.5c0 -5.69922 4.59961 -10.5 10.5 -10.5c5.59961 0 10.1992 4.80078 10.1992 10.5zM385.9 271c15.2998 0 30.0996 -12.0996 30.0996 -27.5v-115.8
+c0 -15.2998 -14.7002 -27.7002 -30.0996 -27.7002c-15.1006 0 -27.5 12.2998 -27.5 27.7002v115.8c0 15.4004 12.3994 27.5 27.5 27.5z" />
+ <glyph glyph-name="linux" unicode="&#xf17c;"
+d="M220.8 324.7c-1.09961 0.599609 -3.09961 0.399414 -3.39941 1.7002c-0.200195 0.399414 0.199219 0.899414 0.599609 1.09961c1.59961 0.900391 3.7998 0.599609 5.5 -0.0996094c1.2998 -0.600586 3.40039 -1.5 3.2002 -2.90039
+c-0.100586 -1.09961 -1.7998 -1.5 -2.90039 -1.5c-1.2002 0 -2 1.2002 -3 1.7002zM198.9 323c-1 -0.0996094 -2.7002 0.400391 -2.80078 1.40039c-0.199219 1.39941 1.90039 2.2998 3.2002 2.89941c1.7002 0.700195 3.90039 1 5.5 0.100586
+c0.400391 -0.200195 0.799805 -0.700195 0.600586 -1.10059c-0.400391 -1.2002 -2.40039 -1 -3.5 -1.59961c-1 -0.5 -1.80078 -1.7002 -3 -1.7002zM420 44.2002c11.0996 -12.4004 15.9004 -21.5 15.5 -29.7002c-0.5 -8.2002 -6.5 -13.7998 -13.9004 -18.2998
+c-14.8994 -9 -37.2998 -15.7998 -50.8994 -32.2002c-14.2002 -16.9004 -31.7002 -26.5996 -48.2998 -27.9004c-16.5 -1.2998 -32 6.30078 -40.3008 23v0.100586c-1.09961 2.09961 -1.89941 4.39941 -2.5 6.7002c-21.5 -1.2002 -40.1992 5.2998 -55.0996 4.09961
+c-22 -1.2002 -35.7998 -6.5 -48.2998 -6.59961c-4.7998 -10.6006 -14.2998 -17.6006 -25.9004 -20.2002c-16 -3.7002 -36.0996 0 -55.8994 10.3994c-18.5 9.80078 -42 8.90039 -59.3008 12.5c-8.69922 1.80078 -16.2998 5 -20.0996 12.3008
+c-3.7002 7.2998 -3 17.2998 2.2002 31.6992c1.7002 5.10059 0.399414 12.7002 -0.799805 20.8008c-0.600586 3.89941 -1.2002 7.89941 -1.2002 11.7998c0 4.2998 0.700195 8.5 2.7998 12.3994c4.5 8.5 11.7998 12.1006 18.5 14.5c6.7002 2.40039 12.7998 4 17 8.30078
+c5.2002 5.5 10.0996 14.3994 16.5996 20.1992c-2.59961 17.2002 0.200195 35.4004 6.2002 53.3008c12.6006 37.8994 39.2002 74.1992 58.1006 96.6992c16.0996 22.9004 20.7998 41.3008 22.5 64.7002c1.09961 31.7998 -24.5 135.4 77.8994 135.2
+c80.9004 -0.0996094 76.2998 -85.4004 75.7998 -131.3c-0.299805 -30.1006 16.3008 -50.5 33.4004 -72c15.2002 -18 35.0996 -44.2998 46.5 -74.4004c9.2998 -24.5996 12.9004 -51.7998 3.7002 -79.0996c1.39941 -0.5 2.7998 -1.2002 4.09961 -2
+c1.40039 -0.799805 2.7002 -1.7998 4 -2.90039c6.60059 -5.59961 8.7002 -14.2998 10.5 -22.3994c1.90039 -8.10059 3.60059 -15.7002 7.2002 -19.7002zM223.7 360.7c-3.2002 -7.2002 -3.90039 -14.9004 -2.90039 -21.7998c3.60059 -0.900391 8.90039 -2.40039 13 -4.40039
+c-2.09961 12.2002 4.5 23.5 11.7998 23c8.90039 -0.299805 13.9004 -15.5 9.10059 -27.2998c-0.799805 -1.90039 -2.7998 -3.40039 -3.90039 -4.60059c6.7002 -2.2998 11 -4.09961 12.6006 -4.89941c7.89941 9.5 10.7998 26.2002 4.2998 40.3994
+c-9.7998 21.4004 -34.2002 21.8008 -44 -0.399414zM183 372.2c-18.9004 0 -24 -37.5 -8.40039 -52.1006c7.80078 5.7002 6.90039 4.7002 5.90039 5.5c-8 6.90039 -6.59961 27.4004 1.7998 28.1006c6.2998 0.5 10.7998 -10.7002 9.60059 -19.6006
+c3.09961 2.10059 6.69922 3.60059 10.1992 4.60059c1.7002 19.2998 -9 33.5 -19.0996 33.5zM169.4 311.5c-4.2002 -3.2998 -5.60059 -7.40039 -4.2002 -12.2998c1.5 -4.90039 6.09961 -10.5 14.7002 -15.2998c7.7998 -4.60059 12 -11.5 20 -15
+c2.59961 -1.10059 5.69922 -1.90039 9.59961 -2.10059c18.4004 -1.09961 27.0996 11.2998 38.2002 14.9004c11.7002 3.7002 20.0996 11 22.7002 18.0996c3.19922 8.5 -2.10059 14.7002 -10.5 18.2002c-11.3008 4.90039 -16.3008 5.2002 -22.6006 9.2998
+c-10.2998 6.60059 -18.7998 8.90039 -25.8994 8.90039c-14.4004 0 -23.2002 -9.7998 -27.9004 -14.2002c-0.5 -0.5 -7.90039 -5.90039 -14.0996 -10.5zM172.7 -22.5c2.09961 20.5 -31.5 49 -41 68.9004l-19.6006 35.5996c-6.7998 9.2002 -13.7998 14.7998 -21.8994 16
+c-7.7002 1.2002 -12.6006 -1.40039 -17.7002 -6.90039c-4.7998 -5.09961 -8.7998 -12.2998 -14.2998 -18c-7.7998 -6.5 -9.2998 -6.19922 -19.6006 -9.89941c-6.2998 -2.2002 -11.2998 -4.60059 -14.7998 -11.2998c-2.7002 -5 -2.09961 -12.2002 -0.899414 -20
+c1.19922 -7.90039 3 -16.3008 0.599609 -23.9004v-0.200195c-5 -13.7002 -5 -21.7002 -2.59961 -26.3994c7.89941 -15.4004 46.5996 -6.10059 76.5 -21.9004c31.3994 -16.4004 72.5996 -17.0996 75.2998 18zM171.3 3.40039c37.6006 -25.7002 82.2002 -15.7002 114.3 7.19922
+c3.2002 11 6.30078 21.3008 6.80078 29c0.799805 15.2002 1.59961 28.7002 4.39941 39.9004c3.10059 12.5996 9.2998 23.0996 21.4004 27.2998c2.2998 21.1006 18.7002 21.1006 38.2998 12.5c18.9004 -8.5 26 -16 22.7998 -26.0996c1 0 2 0.0996094 4.2002 0
+c5.2002 16.8994 -14.2998 28 -30.7002 34.7998c2.90039 12 2.40039 24.0996 -0.399414 35.7002c-6 25.2998 -22.6006 47.7998 -35.2002 59c-2.2998 0.0996094 -2.10059 -1.90039 2.59961 -6.5c11.6006 -10.7002 37.1006 -49.2002 23.2998 -84.9004
+c-3.89941 1 -7.59961 1.5 -10.8994 1.40039c-5.2998 29.0996 -17.5 53.2002 -23.6006 64.5996c-11.5 21.4004 -29.5 65.2998 -37.1992 95.7002c-4.5 -6.40039 -12.4004 -11.9004 -22.3008 -15c-4.69922 -1.5 -9.69922 -5.5 -15.8994 -9
+c-13.9004 -8 -30 -8.7998 -42.4004 1.2002c-4.5 3.59961 -8 7.59961 -12.5996 10.2998c-1.60059 0.900391 -5.10059 3.2998 -6.2002 4.09961c-2 -37.7998 -27.2998 -85.2998 -39.2998 -112.699c-8.2998 -19.7002 -13.2002 -40.8008 -13.7998 -61.5
+c-21.8008 29.0996 -5.90039 66.2998 2.59961 82.3994c9.5 17.6006 11 22.5 8.7002 20.7998c-8.60059 -14 -22 -36.2998 -27.2002 -59.1992c-2.7002 -11.9004 -3.2002 -24 0.299805 -35.2002s11.1006 -21.5 24.6006 -29.9004c0 0 24.7998 -14.2998 38.2998 -32.5
+c7.39941 -10 9.7002 -18.7002 7.39941 -24.8994c-2.5 -6.7002 -9.59961 -8.90039 -16.6992 -8.90039c4.7998 -6 10.2998 -13 14.3994 -19.5996zM428.7 14.9004c0.299805 5.09961 -3.10059 13 -13.7002 24.5996c-10 11.2998 -7.2002 33.0996 -17.0996 41.5996
+c-6.90039 6 -13.6006 5.40039 -22.6006 5.10059c-7.7002 -8.7998 -25.7998 -19.6006 -38.3994 -16.2998c-11.5 2.89941 -18 16.2998 -18.8008 29.5c-0.299805 -0.200195 -0.699219 -0.300781 -1 -0.5c-7.09961 -3.90039 -11.0996 -10.8008 -13.6992 -21.1006
+c-2.5 -10.2002 -3.40039 -23.5 -4.2002 -38.7002c-0.700195 -11.7998 -6.2002 -26.3994 -9.90039 -40.5996c-3.5 -13.2002 -5.7998 -25.2002 -1.09961 -36.2998c7.2002 -14.5 19.5 -20.4004 33.7002 -19.2998c14.1992 1.09961 30.3994 9.7998 43.5996 25.5
+c22 26.5996 62.2998 29.6992 63.2002 46.5zM173.3 299.3c-3.5 2.7998 -3.09961 6.60059 -1.7002 6.5c2.40039 -0.299805 2.80078 -3.5 4.30078 -4.89941c2 -1.90039 4.59961 -4.40039 7.69922 -6.90039c6.2002 -4.90039 14.5 -9.7002 24.9004 -9.7002
+s22.5 6 29.9004 10.2002c4.19922 2.40039 9.5 6.59961 13.8994 9.7998c3.40039 2.5 3.2002 5.40039 6 5.10059c2.7998 -0.300781 0.799805 -3.2002 -3.09961 -6.60059c-3.90039 -3.39941 -9.90039 -7.7998 -14.7998 -10.3994
+c-9.30078 -4.90039 -20.2002 -10.8008 -31.8008 -10.8008c-11.5 0 -20.6992 5.40039 -27.2998 10.6006c-3.2998 2.59961 -6 5.2002 -8 7.09961z" />
+ <glyph glyph-name="dribbble" unicode="&#xf17d;" horiz-adv-x="512"
+d="M256 440c136.748 0 248 -111.252 248 -248s-111.252 -248 -248 -248s-248 111.252 -248 248s111.252 248 248 248zM419.97 325.634c-4.46582 -6.04102 -39.9629 -51.5459 -118.284 -83.5225c7.43652 -15.2217 12.8652 -27.5732 18.6172 -41.6143
+c70.4844 8.86426 140.519 -5.34082 147.502 -6.81836c-0.46582 49.998 -18.332 95.9092 -47.835 131.955zM396.421 350.13c-52.0947 46.2188 -122.885 63.6816 -190.061 47.4893c5.85449 -7.83984 44.3281 -60.2324 79.04 -124.008
+c75.3232 28.2324 107.211 71.0918 111.021 76.5186zM165.941 383.38c-59.2637 -27.9531 -103.562 -82.585 -117.298 -148.318c9.47461 -0.125 96.7471 -0.503906 195.834 25.8096c-35.0986 62.3926 -72.9512 114.85 -78.5361 122.509zM44.1699 191.677
+c0 -54.4072 20.624 -104.082 54.457 -141.636c34.3369 58.7793 103.932 120.731 180.531 142.306c-5.31738 12.0342 -11.1104 24.0811 -17.1738 35.9492c-105.786 -31.6592 -208.438 -30.3359 -217.706 -30.1455c-0.0654297 -2.15137 -0.108398 -4.30762 -0.108398 -6.47363
+zM125.977 24.5645c62.7539 -48.9355 144.656 -56.8955 212.769 -27.8828c-3.15039 18.585 -15.4492 83.3555 -45.1895 160.639c-85.4004 -29.1348 -145.452 -87.5234 -167.579 -132.756zM374.357 16.0752c47.5215 32.1338 81.3525 83.0371 90.7949 141.978
+c-7.24707 2.28711 -65.5674 19.6816 -131.947 9.05566c27.7061 -76.1367 38.9805 -138.147 41.1523 -151.033z" />
+ <glyph glyph-name="skype" unicode="&#xf17e;" horiz-adv-x="447"
+d="M424.7 148.2c14.5996 -18.9004 23.2998 -42.5 23.2002 -68.1006c0 -61.7998 -50.2002 -112 -112 -112c-25.6006 0 -49.2002 8.7002 -68.2002 23.3008c-14.1006 -3 -28.9004 -4.7002 -43.7998 -4.7002c-113.4 0 -205.301 91.7998 -205.301 205.3
+c0 14.9004 1.80078 29.7998 4.7002 43.7998c-14.5996 18.9004 -23.2998 42.5 -23.2998 68.2002c0 61.7998 50.2002 112 112 112c25.7002 0 49.2998 -8.7002 68.2998 -23.4004c14.1006 3 28.9004 4.7002 43.7998 4.7002c113.4 0 205.301 -91.7998 205.301 -205.3
+c0 -14.9004 -1.80078 -29.7998 -4.7002 -43.7998zM230.1 56.7002c54.9004 0 112 27.3994 112 86.5c0 50.7998 -49.2998 68.2998 -90.6992 77.5996c-48.3008 11.2002 -69.1006 13.2002 -69.1006 33c0 15.5 16.2998 22.5 42 22.5c45.7998 0 46.7002 -33.5 75 -33.5
+c18.9004 0 30.2998 14.9004 30.2998 31.7998c0 33.5 -55.6992 55.4004 -110.8 55.4004c-50.5 0 -109.1 -21.9004 -109.1 -81.0996c0 -65.2002 55.2998 -71.8008 117.8 -87.2002c26 -6.40039 42 -9.2998 42 -28c0 -14.9004 -16.5996 -26.2998 -42.2998 -26.2998
+c-54 0 -56.9004 44.8994 -88.1006 44.8994c-20.5 0 -29.5 -14.5996 -29.5 -30.5996c0 -35.7998 54.9004 -65 120.5 -65z" />
+ <glyph glyph-name="foursquare" unicode="&#xf180;" horiz-adv-x="368"
+d="M323.1 445c40 0 50.7002 -22.7998 42.2002 -65.2002l-48.5996 -243c-3.7002 -14.5 -9.2002 -39.7002 -44.2998 -39.7002h-83.4004c-3.40039 0 -3.7002 0.300781 -6.7998 -3.09961c0 0 -2.2002 -2.5 -131.101 -151.9
+c-10.0996 -11.6992 -26.6992 -9.59961 -32.8994 -7.09961c-6.10059 2.40039 -18.2002 9.7998 -18.2002 30.0996v433.801c0 17.7998 12.4004 46.0996 49.9004 46.0996h273.199zM306.8 371.2c2.10059 9.7998 -5.2998 17.5 -13.5 17.5h-219
+c-9.7998 0 -16.5996 -8.90039 -16.5996 -16.6006v-338.8c0 -0.899414 0.899414 -1.2002 1.7998 -0.299805c80.5996 96.9004 89.5 108.3 89.5 108.3c9.2998 10.7998 13 12.6006 26.5 12.6006h73.5c10.0996 0 16 8.59961 16.9004 13.5
+c0.899414 5 9.59961 49.8994 11.3994 58.7998c1.7998 9 -6.5 18.2002 -14.7998 18.2002h-90.4004c-12 0 -20.5996 8.59961 -20.5996 20.5996v13c0 12 8.59961 20.2998 20.5996 20.2998h106.4c7.40039 0 15.7002 6.7002 16.9004 13.2002z" />
+ <glyph glyph-name="trello" unicode="&#xf181;" horiz-adv-x="447"
+d="M392.3 416c30.7998 -0.200195 55.7002 -25.2002 55.6006 -56v-336c0 -30.7998 -24.9004 -55.7998 -55.7002 -56h-336.2c-30.9004 0 -56 25.0996 -56 56c0 340 -0.0996094 336 0 336c0 30.9004 25.0996 56 56.0996 56h336.2zM197 76.7002h0.0996094v254.2
+c0 14.8994 -12.0996 26.8994 -26.8994 26.8994h-82.9004c-14.8994 0 -26.8994 -12.0996 -26.8994 -26.8994v-254.2c0.0996094 -14.7998 12.1992 -26.7002 27 -26.6006h82.6992c14.8008 0 26.7002 11.9004 26.9004 26.6006zM390.1 188.7v142.1
+c0 14.9004 -12.0996 26.9004 -26.8994 26.9004h-81.1006c-14.7998 0 -26.7998 -12.1006 -26.7998 -26.9004v-142.1c0 -14.9004 12.1006 -26.9004 26.9004 -26.9004h81c14.8994 0 26.8994 12.1006 26.8994 26.9004z" />
+ <glyph glyph-name="gratipay" unicode="&#xf184;" horiz-adv-x="496"
+d="M248 440c136.9 0 248 -111.1 248 -248s-111.1 -248 -248 -248s-248 111.1 -248 248s111.1 248 248 248zM362.6 213.6c8.80078 12 19.1006 50.4004 -13.7998 72c-27.7002 18.1006 -54.2002 4.2002 -68.0996 -11.8994c-15.1006 -16.9004 -45.7998 -17.9004 -61.7002 0
+c-13.9004 16.0996 -40.4004 30 -68.5 11.8994c-32.7002 -21.5996 -22.2998 -60.0996 -13.5996 -72l112.699 -152.699z" />
+ <glyph glyph-name="vk" unicode="&#xf189;" horiz-adv-x="575"
+d="M545 330.3c-7.40039 -34.2998 -79.2998 -135.5 -79.4004 -135.6c-6.19922 -10 -8.69922 -15 0 -26.2002c3.40039 -4.7998 79.1006 -76.5996 90.3008 -111.5c4.89941 -16.5996 -3.60059 -25 -20.4004 -25h-58.9004c-22.3994 0 -29 17.9004 -69 57.9004
+c-35 33.6992 -50 38.0996 -58.6992 38.0996c-18.8008 0 -15.4004 -6.2998 -15.4004 -73.0996c0 -14.5 -4.59961 -22.9004 -42.0996 -22.9004c-62.4004 0 -131 37.9004 -179.7 107.8c-73.1006 102.4 -93.1006 179.9 -93.1006 195.5c0 8.7998 3.40039 16.7002 20.2002 16.7002
+h58.9004c15.0996 0 20.7998 -6.59961 26.5996 -22.9004c28.7998 -84 77.4004 -157.399 97.4004 -157.399c7.5 0 10.8994 3.5 10.8994 22.5v86.7998c-2.19922 40 -23.3994 43.2998 -23.3994 57.5c0 6.5 5.59961 13.5 15 13.5h92.5996
+c12.4004 0 16.6006 -6.7002 16.6006 -21.7002v-116.7c0 -12.5 5.69922 -16.8994 9.39941 -16.8994c7.5 0 13.7998 4.39941 27.5 18.0996c42.4004 47.4004 72.4004 120.5 72.4004 120.5c3.7002 8.7998 10.5996 16.7002 25.5996 16.7002h58.9004
+c17.7998 0 21.5 -9.2002 17.7998 -21.7002z" />
+ <glyph glyph-name="weibo" unicode="&#xf18a;" horiz-adv-x="511"
+d="M407 270.4c7.59961 24 -13.4004 46.7998 -37.4004 41.6992c-22 -4.7998 -28.7998 28.1006 -7.09961 32.8008c50.0996 10.8994 92.2998 -37.1006 76.5 -84.8008c-6.7998 -21.1992 -38.7998 -10.7998 -32 10.3008zM214.8 1.2998c-106.3 0 -214.8 51.4004 -214.8 136.3
+c0 44.3008 28 95.4004 76.2998 143.7c99.7002 99.7002 203.2 100.9 173.601 5.7002c-4 -13.0996 12.2998 -5.7002 12.2998 -6c79.5 33.5996 140.5 16.7998 114 -51.4004c-3.7002 -9.39941 1.09961 -10.8994 8.2998 -13.0996c135.7 -42.2998 34.7998 -215.2 -169.7 -215.2z
+M358.5 147.6c-5.40039 55.7002 -78.5 94 -163.4 85.7002c-84.7998 -8.59961 -148.8 -60.2998 -143.399 -116c5.39941 -55.7002 78.5 -94 163.399 -85.7002c84.8008 8.60059 148.801 60.3008 143.4 116zM347.9 412.9c102.3 21.5996 189.3 -74.5 157.399 -174.301
+c-8.2998 -25 -44.7998 -12.1992 -37.3994 12c23.0996 71.2002 -39.4004 139.2 -111.7 124c-25.1006 -5.39941 -34.2002 32.7002 -8.2998 38.3008zM269.4 101.9c-17.1006 -38.8008 -66.8008 -60 -109.101 -46.3008c-40.7998 13.1006 -58 53.4004 -40.2998 89.7002
+c17.7002 35.4004 63.0996 55.4004 103.4 45.1006c42 -10.8008 63.0996 -50.2002 46 -88.5zM183.1 131.9c-12.8994 5.39941 -30 -0.300781 -38 -12.9004c-8.2998 -12.9004 -4.2998 -28 8.60059 -34c13.0996 -6 30.7998 -0.299805 39.0996 12.9004
+c8 13.0996 3.7002 28.2998 -9.7002 34zM215.7 145.3c-5.10059 1.7002 -11.4004 -0.599609 -14.2998 -5.39941c-2.90039 -5.10059 -1.40039 -10.6006 3.69922 -12.9004c5.10059 -2 11.7002 0.299805 14.6006 5.40039c2.7998 5.19922 1.09961 10.8994 -4 12.8994z" />
+ <glyph glyph-name="renren" unicode="&#xf18b;" horiz-adv-x="512"
+d="M214 278.9c0 -110.4 -61 -205.4 -147.6 -247.4c-36.4004 43.2998 -58.4004 98.7998 -58.4004 159.9c0 122.699 89.0996 224.399 206 244.1v-156.6zM255 -56c-42.9004 0 -83.2998 11 -118.5 30.4004c57.2002 36.0996 103.4 90.6992 118.5 154.6
+c15.5 -63.9004 61.7002 -118.5 118.8 -154.7c-35.0996 -19.2998 -75.5 -30.2998 -118.8 -30.2998zM445.6 31.5c-86.5996 42 -147.6 136.9 -147.6 247.4v156.6c116.9 -19.7002 206 -121.4 206 -244.1c0 -61.1006 -22 -116.601 -58.4004 -159.9z" />
+ <glyph glyph-name="pagelines" unicode="&#xf18c;" horiz-adv-x="384"
+d="M384 135.3c-55.0996 -136.7 -187.1 -54 -187.1 -54c-40.5 -81.7998 -107.4 -134.399 -184.601 -134.7c-16.0996 0 -16.5996 24.4004 0 24.4004c64.4004 0.299805 120.5 42.7002 157.2 110.1c-41.0996 -15.8994 -118.6 -27.8994 -161.6 82.2002
+c109 44.9004 159.1 -11.2002 178.3 -45.5c9.89941 24.4004 17 50.9004 21.5996 79.7002c0 0 -139.7 -21.9004 -149.5 98.0996c119.101 47.9004 152.601 -76.6992 152.601 -76.6992c1.59961 16.6992 3.2998 52.5996 3.2998 53.3994c0 0 -106.3 73.7002 -38.1006 165.2
+c124.601 -43 61.4004 -162.4 61.4004 -162.4c0.5 -1.59961 0.5 -23.7998 0 -33.3994c0 0 45.2002 89 136.4 57.5c-4.2002 -134 -141.9 -106.4 -141.9 -106.4c-4.40039 -27.3994 -11.2002 -53.3994 -20 -77.5c0 0 83 91.7998 172 20z" />
+ <glyph glyph-name="stack-exchange" unicode="&#xf18d;"
+d="M17.7002 115.7h412.7v-22c0 -37.7002 -29.3008 -68 -65.3008 -68h-19l-86.7998 -89.7002v89.7002h-176.3c-36 0 -65.2998 30.2998 -65.2998 68v22zM17.7002 139.3v85h412.7v-85h-412.7zM17.7002 248.7v85h412.7v-85h-412.7zM365 448
+c36 0 65.2998 -30.2998 65.4004 -67.7002v-22.2998h-412.7v22.2998c0 37.4004 29.2998 67.7002 65.2998 67.7002h282z" />
+ <glyph glyph-name="vimeo-square" unicode="&#xf194;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM383.8 266.4c1.90039 41.5996 -13.5996 63 -46.5 64c-44.2998 1.39941 -74.3994 -23.6006 -90.0996 -75.1006
+c19.5996 8.40039 48.5996 10.6006 45.2002 -22.2002c-0.900391 -11.0996 -8.10059 -27.0996 -21.5 -48.2998c-37.2002 -58.7002 -46.3008 -39.0996 -66.8008 90.5c-5.7998 36.5 -21.0996 53.5 -46 51.1006c-22 -2 -57.1992 -38 -94.0996 -70.4004l15 -19.4004
+c14.2998 10.1006 22.7002 15.1006 25.0996 15.1006c20.8008 0 31.5 -54.1006 56.7002 -146.4c12.9004 -34.3994 28.6006 -51.5996 47.2998 -51.5996c30.1006 0 66.9004 28.2998 110.4 84.7998c42.0996 54.0996 63.9004 96.7998 65.2998 127.9z" />
+ <glyph glyph-name="slack" unicode="&#xf198;"
+d="M94.1201 132.9c0 -25.9004 -21.1602 -47.0605 -47.0605 -47.0605c-25.8994 0 -47.0596 21.1602 -47.0596 47.0605c0 25.8994 21.1602 47.0596 47.0596 47.0596h47.0605v-47.0596zM117.84 132.9c0 25.8994 21.1602 47.0596 47.0605 47.0596
+c25.8994 0 47.0596 -21.1602 47.0596 -47.0596v-117.841c0 -25.8994 -21.1602 -47.0596 -47.0596 -47.0596c-25.9004 0 -47.0605 21.1602 -47.0605 47.0596v117.841zM164.9 321.88c-25.9004 0 -47.0605 21.1602 -47.0605 47.0605c0 25.8994 21.1602 47.0596 47.0605 47.0596
+c25.8994 0 47.0596 -21.1602 47.0596 -47.0596v-47.0605h-47.0596zM164.9 298.16c25.8994 0 47.0596 -21.1602 47.0596 -47.0605c0 -25.8994 -21.1602 -47.0596 -47.0596 -47.0596h-117.841c-25.8994 0 -47.0596 21.1602 -47.0596 47.0596
+c0 25.9004 21.1602 47.0605 47.0596 47.0605h117.841zM353.88 251.1c0 25.9004 21.1602 47.0605 47.0605 47.0605c25.8994 0 47.0596 -21.1602 47.0596 -47.0605c0 -25.8994 -21.1602 -47.0596 -47.0596 -47.0596h-47.0605v47.0596zM330.16 251.1
+c0 -25.8994 -21.1602 -47.0596 -47.0605 -47.0596c-25.8994 0 -47.0596 21.1602 -47.0596 47.0596v117.841c0 25.8994 21.1602 47.0596 47.0596 47.0596c25.9004 0 47.0605 -21.1602 47.0605 -47.0596v-117.841zM283.1 62.1201c25.9004 0 47.0605 -21.1602 47.0605 -47.0605
+c0 -25.8994 -21.1602 -47.0596 -47.0605 -47.0596c-25.8994 0 -47.0596 21.1602 -47.0596 47.0596v47.0605h47.0596zM283.1 85.8398c-25.8994 0 -47.0596 21.1602 -47.0596 47.0605c0 25.8994 21.1602 47.0596 47.0596 47.0596h117.841
+c25.8994 0 47.0596 -21.1602 47.0596 -47.0596c0 -25.9004 -21.1602 -47.0605 -47.0596 -47.0605h-117.841z" />
+ <glyph glyph-name="wordpress" unicode="&#xf19a;" horiz-adv-x="512"
+d="M61.7002 278.6l101.5 -278c-71 34.4004 -119.9 107.2 -119.9 191.4c0 30.9004 6.60059 60.0996 18.4004 86.5996zM399.6 202.7c0 -18.2002 -7 -39.2998 -16 -68.7002l-21.1992 -70.9004l-76.9004 228.7c12.7998 0.700195 24.2998 2 24.2998 2
+c11.4004 1.2998 10.1006 18.2002 -1.39941 17.5c0 0 -34.5 -2.7002 -56.7002 -2.7002c-20.9004 0 -56 2.7002 -56 2.7002c-11.4004 0.700195 -12.7998 -16.7998 -1.2998 -17.5c0 0 10.7998 -1.2998 22.2998 -2l33.0996 -90.7998l-46.5996 -139.6l-77.5 230.399
+c12.7998 0.700195 24.2998 2 24.2998 2c11.4004 1.2998 10.0996 18.2002 -1.40039 17.5c0 0 -34.5 -2.7002 -56.6992 -2.7002c-4 0 -8.7002 0.100586 -13.7002 0.300781c38.0996 57.7998 103.5 95.8994 177.8 95.8994c55.4004 0 105.8 -21.2002 143.7 -55.8994
+c-1 0.0996094 -1.90039 0.199219 -2.7998 0.199219c-20.9004 0 -35.7002 -18.1992 -35.7002 -37.7998c0 -17.5 10.0996 -32.3994 20.8994 -49.8994c8.10059 -14.2002 17.5 -32.4004 17.5 -58.7002zM259.7 173.4l65.3994 -179.2c0.400391 -1 0.900391 -2 1.5 -2.90039
+c-22.0996 -7.7998 -45.7998 -12.0996 -70.5996 -12.0996c-20.9004 0 -41 3.09961 -60.0996 8.7002zM442.7 294.1c16.5996 -30.2998 26 -65.0996 26 -102.1c0 -78.5 -42.5 -147 -105.8 -183.9l65 187.9c12.1992 30.4004 16.1992 54.5996 16.1992 76.2002
+c0 7.89941 -0.5 15.0996 -1.39941 21.8994zM504 192c0 -136.8 -111.3 -248 -248 -248c-136.8 0 -248 111.3 -248 248c0 136.8 111.2 248 248 248c136.7 0 248 -111.2 248 -248zM492.6 192c0 130.5 -106.199 236.6 -236.6 236.6c-130.5 0 -236.6 -106.1 -236.6 -236.6
+s106.199 -236.6 236.6 -236.6c130.5 0 236.6 106.1 236.6 236.6z" />
+ <glyph glyph-name="openid" unicode="&#xf19b;"
+d="M271.5 16l-68 -32c-115 10.2998 -203.5 71.5 -203.5 145.8c0 71.5 82.5 131 191.7 144.3v-43c-71.5 -12.5 -124 -53 -124 -101.3c0 -51 58.5 -93.2998 135.7 -103v340l68 33.2002v-384h0.0996094zM448 157l-131.3 28.5l36.7998 20.7002c-19.5 11.5 -43.5 20 -70 24.7998
+v43c46.2002 -5.5 87.7002 -19.5 120.3 -39.2998l35 19.7998z" />
+ <glyph glyph-name="yahoo" unicode="&#xf19e;" horiz-adv-x="447"
+d="M252 156l4 -220c-12.7002 2.2002 -23.5 3.90039 -32.2998 3.90039c-8.40039 0 -19.2002 -1.7002 -32.2998 -3.90039l4 220c-55 94.7998 -110.4 196.8 -174 292c11.8994 -3.09961 23 -3.90039 33.1992 -3.90039c9 0 20.4004 0.800781 34.1006 3.90039
+c40.8994 -72.2002 82.0996 -138.7 135 -225.5c37.2998 61.5996 91.0996 144.1 134.899 225.5c11.1006 -2.90039 22 -3.90039 32.9004 -3.90039c11.5 0 23.2002 1 35 3.90039c-34.4004 -47.9004 -131.6 -216.9 -174.5 -292z" />
+ <glyph glyph-name="google" unicode="&#xf1a0;" horiz-adv-x="488"
+d="M488 186.2c0 -141.5 -96.9004 -242.2 -240 -242.2c-137.2 0 -248 110.8 -248 248s110.8 248 248 248c66.7998 0 123 -24.5 166.3 -64.9004l-67.5 -64.8994c-88.2998 85.2002 -252.5 21.2002 -252.5 -118.2c0 -86.5 69.1006 -156.6 153.7 -156.6
+c98.2002 0 135 70.3994 140.8 106.899h-140.8v85.2998h236.1c2.30078 -12.6992 3.90039 -24.8994 3.90039 -41.3994z" />
+ <glyph glyph-name="reddit" unicode="&#xf1a1;" horiz-adv-x="512"
+d="M201.5 142.5c-13.7998 0 -24.9004 11.0996 -24.9004 24.5996c0 13.8008 11.1006 24.9004 24.9004 24.9004c13.5996 0 24.5996 -11.0996 24.5996 -24.9004c0 -13.5996 -11.0996 -24.5996 -24.5996 -24.5996zM504 192c0 -137 -111 -248 -248 -248s-248 111 -248 248
+s111 248 248 248s248 -111 248 -248zM371.7 233.2c-9.40039 0 -17.7002 -3.90039 -23.7998 -10c-22.4004 15.5 -52.6006 25.5 -86.1006 26.5996l17.4004 78.2998l55.3994 -12.5c0 -13.5996 11.1006 -24.5996 24.6006 -24.5996c13.7998 0 24.8994 11.2998 24.8994 24.9004
+c0 13.5996 -11.0996 24.8994 -24.8994 24.8994c-9.7002 0 -18 -5.7998 -22.1006 -13.7998l-61.1992 13.5996c-3 0.800781 -6.10059 -1.39941 -6.90039 -4.39941l-19.0996 -86.4004c-33.2002 -1.39941 -63.1006 -11.2998 -85.5 -26.7998
+c-6.10059 6.40039 -14.7002 10.2002 -24.1006 10.2002c-34.8994 0 -46.2998 -46.9004 -14.3994 -62.7998c-1.10059 -5 -1.7002 -10.2002 -1.7002 -15.5c0 -52.6006 59.2002 -95.2002 132 -95.2002c73.0996 0 132.3 42.5996 132.3 95.2002
+c0 5.2998 -0.599609 10.7998 -1.90039 15.7998c31.3008 16 19.8008 62.5 -14.8994 62.5zM302.8 117c2.2002 2.2002 6.10059 2.2002 8.2998 0c2.5 -2.5 2.5 -6.40039 0 -8.59961c-22.8994 -22.8008 -87.3994 -22.8008 -110.199 0c-2.5 2.19922 -2.5 6.09961 0 8.59961
+c2.19922 2.2002 6.09961 2.2002 8.2998 0c17.5 -17.9004 75.3994 -18.2002 93.5996 0zM310.5 192c13.9004 0 24.9004 -11.0996 24.9004 -24.9004c0 -13.5 -11.1006 -24.5996 -24.9004 -24.5996c-13.5 0 -24.5996 11 -24.5996 24.5996c0 13.8008 11 24.9004 24.5996 24.9004z
+" />
+ <glyph glyph-name="reddit-square" unicode="&#xf1a2;"
+d="M283.2 102.5c2.7002 -2.7002 2.7002 -6.7998 0 -9.2002c-24.5 -24.5 -93.7998 -24.5996 -118.4 0c-2.7002 2.40039 -2.7002 6.5 0 9.2002c2.40039 2.40039 6.5 2.40039 8.90039 0c18.7002 -19.2002 81 -19.5996 100.5 0c2.39941 2.2998 6.59961 2.2998 9 0zM191.9 156.3
+c0 -14.5996 -11.9004 -26.5 -26.5 -26.5c-14.9004 0 -26.8008 11.9004 -26.8008 26.5c0 14.9004 11.9004 26.7998 26.8008 26.7998c14.5996 0 26.5 -11.8994 26.5 -26.7998zM282.6 183.1c14.9004 0 26.8008 -11.8994 26.8008 -26.7998
+c0 -14.5996 -11.9004 -26.5 -26.8008 -26.5c-14.5996 0 -26.5 11.9004 -26.5 26.5c0 14.9004 11.9004 26.7998 26.5 26.7998zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM348.3 227.4
+c-10.0996 0 -19 -4.2002 -25.5996 -10.7002c-24.1006 16.7002 -56.5 27.3994 -92.5 28.5996l18.7002 84.2002l59.5 -13.4004c0 -14.5996 11.8994 -26.5 26.5 -26.5c14.8994 0 26.7998 12.2002 26.7998 26.8008c0 14.5996 -11.9004 26.7998 -26.7998 26.7998
+c-10.4004 0 -19.3008 -6.2002 -23.8008 -14.9004l-65.6992 14.6006c-3.30078 0.899414 -6.5 -1.5 -7.40039 -4.80078l-20.5 -92.7998c-35.7002 -1.5 -67.7998 -12.2002 -91.9004 -28.8994c-6.5 6.7998 -15.7998 11 -25.8994 11c-37.5 0 -49.7998 -50.4004 -15.5 -67.5
+c-1.2002 -5.40039 -1.7998 -11 -1.7998 -16.7002c0 -56.5 63.6992 -102.3 141.899 -102.3c78.5 0 142.2 45.7998 142.2 102.3c0 5.7002 -0.599609 11.5996 -2.09961 17c33.5996 17.2002 21.1992 67.2002 -16.1006 67.2002z" />
+ <glyph glyph-name="stumbleupon-circle" unicode="&#xf1a3;" horiz-adv-x="512"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM256 262.5c9.7998 0 17.7998 -8 17.7002 -17.5996v-20.6006l22.8994 -10.7002l34.1006 10.1006v23.7002c0 40.2998 -34 72.5996 -74.7002 72.5996
+c-40.5 0 -74.7002 -32.0996 -74.7002 -72.0996v-108.4c0 -9.90039 -8 -17.7998 -17.7998 -17.7998s-17.7998 7.7998 -17.7998 17.7998v45.7998h-57.2998v-46.5c0 -41.3994 33.5 -74.8994 74.8994 -74.8994c41 0 74.9004 33 74.9004 73.8994v106.9
+c0 9.7998 8 17.7998 17.7998 17.7998zM423.6 138.9c0 0 0 0.5 0.100586 46.3994h-57.2998v-48c0 -9.7002 -8 -17.5996 -17.8008 -17.5996c-9.7998 0 -17.7998 7.7998 -17.7998 17.5996v47.1006l-34.0996 -10.1006l-22.9004 10.7002v-46.7998
+c0 -41 33.7002 -74.2002 74.9004 -74.2002c41.3994 0 74.8994 33.5 74.8994 74.9004z" />
+ <glyph glyph-name="stumbleupon" unicode="&#xf1a4;" horiz-adv-x="502"
+d="M502.9 182v-69.7002c0 -62.0996 -50.3008 -112.399 -112.4 -112.399c-61.7998 0 -112.4 49.7998 -112.4 111.3v70.2002l34.3008 -16l51.0996 15.1992v-70.5996c0 -14.7002 12 -26.5 26.7002 -26.5s26.7998 11.7998 26.7998 26.5v72h85.9004zM278.2 240.2v30.8994
+c0 14.7002 -12 26.7002 -26.7002 26.7002s-26.7002 -12 -26.7002 -26.7002v-160.3c0 -61.2998 -50.7998 -110.8 -112.399 -110.8c-62.1006 0 -112.4 50.2998 -112.4 112.3v69.7002h86v-68.5996c0 -14.9004 12 -26.7002 26.7002 -26.7002s26.7002 11.7998 26.7002 26.7002
+v162.399c0 60 51.2998 108.2 112.1 108.2c61 0 112.1 -48.5 112.1 -109v-35.5996l-51.0996 -15.2002z" />
+ <glyph glyph-name="delicious" unicode="&#xf1a5;"
+d="M446.5 380c1 -3.7998 1.5 -7.90039 1.59961 -12v-352.1c0 -26.5 -21.5 -48 -48 -48h-352c-4.09961 0 -8.19922 0.5 -12 1.5c-7.69922 2 -14.5996 5.7998 -20.2998 11c-1.2002 1.09961 -2.2998 2.19922 -3.2998 3.2998c-5.2002 5.7002 -9 12.5996 -11 20.2998
+c-1 3.7998 -1.5 7.90039 -1.5 12v352c0 26.5 21.5 48 48 47.9004h352c4.09961 0 8.2002 -0.5 12 -1.5c1.90039 -0.400391 3.7002 -1 5.40039 -1.7002c1.89941 -0.700195 3.69922 -1.5 5.5 -2.5c1.39941 -0.700195 2.69922 -1.5 4 -2.40039
+c1.09961 -0.799805 2.19922 -1.59961 3.2998 -2.5c2.5 -2 4.7998 -4.2998 6.89941 -6.7998c1.7002 -2.09961 3.30078 -4.5 4.7002 -6.90039c1.2998 -2.2998 2.40039 -4.59961 3.2998 -7.09961c0.5 -1.5 1 -3 1.40039 -4.5zM416 16v176h-192v192h-176
+c-8.7998 0 -16 -7.2002 -16 -16v-176h192v-192h176c8.7998 0 16 7.2002 16 16z" />
+ <glyph glyph-name="digg" unicode="&#xf1a6;" horiz-adv-x="512"
+d="M81.7002 275.7v76.2998h51v-250.7h-132.7v174.4h81.7002zM81.7002 142.3v92.2998h-30.7998v-92.2998h30.7998zM378.9 275.7h133.1v-243.7h-133.1v40.7998h81.7998v28.5h-81.7998v174.4zM460.7 142.3v92.2998h-30.7998v-92.2998h30.7998zM225.1 101.3v174.4h133.301
+v-243.7h-133.301v40.7998h82.1006v28.5h-82.1006zM276.3 234.6v-92.2998h30.7998v92.2998h-30.7998zM153.3 352h51.2998v-51h-51.2998v51zM153.3 275.7h51.2998v-174.4h-51.2998v174.4z" />
+ <glyph glyph-name="pied-piper-pp" unicode="&#xf1a7;"
+d="M205.3 273.4c0 -21.1006 -14.2002 -38.1006 -31.7002 -38.1006c-7.09961 0 -12.7998 1.2002 -17.1992 3.7002v68c4.39941 2.7002 10.0996 4.2002 17.1992 4.2002c17.5 0 31.7002 -16.9004 31.7002 -37.7998zM257.9 206.4c17.3994 0 31.6992 -17 31.6992 -38.1006
+c0 -20.8994 -14.2998 -37.7998 -31.6992 -37.7998c-7.10059 0 -12.8008 1.2002 -17.2002 3.7002v68c4.39941 2.7002 10.0996 4.2002 17.2002 4.2002zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352
+c26.5 0 48 -21.5 48 -48zM185 192.9c41 0 74.2002 35.5996 74.2002 79.5996s-33.2002 79.5996 -74.2002 79.5996c-12 0 -24.0996 -3.19922 -34.5996 -8.7998h-45.7002v-206.3l51.7998 10.0996v50.6006c8.59961 -3.10059 18.0996 -4.7998 28.5 -4.7998zM343.4 167.6
+c0 44 -33.2002 79.6006 -73.9004 79.6006c-3.2002 0 -6.40039 -0.200195 -9.59961 -0.700195c-3.7002 -12.5 -10.1006 -23.7998 -19.2002 -33.4004c-13.7998 -15 -32.2002 -23.7998 -51.7998 -24.7998v-156.3l51.7998 10.0996v50.6006
+c8.59961 -3.2002 18.2002 -4.7002 28.7002 -4.7002c40.7998 0 74 35.5996 74 79.5996z" />
+ <glyph glyph-name="pied-piper-alt" unicode="&#xf1a8;" horiz-adv-x="576"
+d="M244 202l-27.7002 -5.7002l-1.7002 4.90039c6.7002 0.5 12.7002 3.7002 19.3008 3.7002c3.7998 0 6.89941 -0.900391 10.0996 -2.90039zM379.9 4.09961c9.5 0 28.1992 -45.0996 33 -55.0996c-35.9004 -13.4004 -70.3008 -15.9004 -106 -9.7998l-6.90039 45.0996
+c15.7998 10.2998 60.9004 19.7998 79.9004 19.7998zM340.8 271c-7.59961 3.5 -63.8994 6.40039 -98.7998 -10c6.2998 11.7998 13.2002 17 25.9004 21.7998c27.2998 10.2998 40.1992 30.5 58.8994 51.1006c11.9004 -8.40039 12 -24.6006 31.6006 -23v-21.8008
+l6.2998 -0.299805c37.3994 14.4004 74.7002 30.2002 106.6 54.6006c48.2998 36.7998 52.9004 50 81.2998 100l2 2.59961c-0.599609 -14.0996 -6.2998 -27.2998 -12.3994 -39.9004c-30.5 -63.7998 -78.7002 -100.3 -146.8 -116.699
+c-12.4004 -2.90039 -26.4004 -3.2002 -37.6006 -8.90039c1.40039 -9.7998 13.2002 -18.0996 13.2002 -23c0 -3.40039 -5.5 -7.2002 -7.5 -8.59961c-11.2002 12.8994 -16.0996 19.2998 -22.7002 22.0996zM555.5 448l-0.299805 -1.40039l-0.600586 -0.599609
+l0.300781 0.900391zM496.3 65.9004c20.1006 -34.2002 43.7002 -54.3008 72.7002 -79.9004c-31 -19.2998 -70.4004 -32.2002 -103.5 -47.2002c-55.2002 46.2998 -23 229.9 -111.5 229.9c-3.5 -0.700195 -2.40039 -0.299805 -4.59961 -1.7002
+c1.09961 -1.40039 2.59961 -2.90039 3.69922 -4c23.9004 -20.0996 33.4004 -24.4004 34.8008 -58.5996l0.299805 -9.5c0.799805 -21.6006 -5.5 -42.5 -9.7998 -63.5c-25.9004 0.699219 -51.2002 -11 -77.9004 -2.90039c-0.700195 5.90039 -1.09961 30.9004 0.299805 41.0996
+c1.40039 9.5 33.6006 29.9004 33 43.7002c-5.5 0.600586 -9.2002 -2.59961 -12.3994 -6.89941c-13.3008 -19.5 -47.2002 -41.9004 -71.3008 -41.9004c-16.5996 0 -56.2998 71.5 -76.3994 85.9004c-3.2002 2.2998 -5.2002 5.39941 -7.7998 8.59961
+c-16.1006 -3.7998 -139.4 -32.2002 -147.4 -32.2002c-6 0 -11.5 4.90039 -11.5 10.9004c0 5.5 3.40039 10.7002 8.90039 11.7998l139.6 30.4004c-9.5 17.1992 12.2998 17.5 21.5 20.0996c3.2002 0.799805 6.2998 4 9.5 4c6.2998 0 11.7998 -8.90039 13.7998 -14.0996
+c6.2998 1.39941 45.7002 10.5996 49.4004 10.5996c15.2002 0 15.8994 -20.0996 2.89941 -22.7002l-52.2998 -11.5l-0.299805 -4.59961c-0.299805 -10.1006 45.4004 -60.1006 53.4004 -60.1006c18.0996 0 54.8994 41.7002 54.8994 60.1006
+c0 30.7002 -42.7998 12.5996 -42.7998 33.5996c0 3.5 1.2002 6.60059 2.90039 9.7998l-19.5 5.5c13.0996 13.6006 13.7998 31.7002 10.8994 50.3008c14.7002 2.89941 26.7002 4.59961 41.4004 4.59961c56.8994 0 45.7002 -8.59961 65.5 -54.2998l14.3994 7.2002
+c-2.2998 -34.2002 -36.1992 -17.5 -35.0996 -31l0.299805 -6c74.7002 2.89941 116.101 -58.6006 150 -115.5zM300.1 19.7998h8.90039l2.90039 -23.7998l-11.8008 -3.40039v27.2002zM231.4 170.2l13.7998 3.5l31.2998 -50.9004l-21 -13.7998zM315.8 15.2998
+c22.6006 2.5 32.7002 6.2998 59.5 6.2998c0.299805 -1.39941 0.900391 -3.19922 0.900391 -4.59961c0 -7.5 -49.4004 -12.5996 -58.4004 -14.0996z" />
+ <glyph glyph-name="drupal" unicode="&#xf1a9;"
+d="M319.5 333.3c13.5 -8.2998 96.5 -67 96.5 -179.3c0 -112 -88.5 -186 -190.2 -186c-102 0 -193.8 80.2998 -193.8 189.5c0 109 85 167.5 100.8 175.8c18.7002 10.1006 32.2002 15.2998 53.5 32.2998c10.5 8.30078 19.2998 20.2002 22 49.5
+c15.2002 -18.2998 33.5 -39.5 46.5 -48.2998c21.2002 -14 42.5 -19.5 64.7002 -33.5zM322 7.7002c4.2002 4.2002 1.90039 13.0996 -4.2002 8.5c-8.5 -6.2998 -27.5 -14 -54.5 -14c-34.5 0 -51.5 13.2998 -51.5 13.2998c-6.2002 0 -11.2998 -7.2002 -6.5 -12
+c26.6006 -24.5 96.6006 -15.9004 116.7 4.2002zM267.5 60.2998c-6.5 -2.7002 -28.4004 -16.7998 -22.4004 -25c2.40039 -3.2998 5.2002 -1.2998 12.2002 4.7002c7.2002 5.7998 12 11 26.7002 11c25.2998 0 18.0996 -19.9004 26.5 -15.7002
+c9.90039 4.90039 -2.09961 20.9004 -6.2002 23.7002c-7.7998 5.09961 -28.0996 4.90039 -36.7998 1.2998zM360 43c39.0996 -3.2998 64.5 106 15.7998 106c-20 0 -60.5 -41.5 -81.7998 -41.7998c-24.7002 -0.5 -59 49 -108.5 48.5
+c-66.4004 -0.400391 -90.5996 -78.6006 -51.7998 -105.2c57.2002 -38.7002 130.399 42.9004 161.3 42c19.5 -0.700195 49.7998 -48.5 65 -49.5z" />
+ <glyph glyph-name="joomla" unicode="&#xf1aa;"
+d="M0.599609 355.9c0 33.2998 26.8008 60.0996 59.8008 60.0996c30 0 54.5 -21.9004 59.1992 -50.2002c32.6006 7.60059 67.1006 -0.599609 96.5 -30l-44.2998 -44.2998c-20.5 20.5 -42.5996 16.2998 -55.3994 3.5c-14.3008 -14.2998 -14.3008 -37.9004 0 -52.2002
+l99.5 -99.5l-44 -44.2998c-87.7002 87.2002 -49.7002 49.7002 -99.8008 99.7002c-26.7998 26.5 -35 64.7998 -24.7998 98.8994c-26.8994 5.80078 -46.7002 29.7002 -46.7002 58.3008zM130.1 239.5c28.5 28.4004 81.3008 80.7998 99.6006 99.9004
+c26.5996 26.5996 64.5 35 98.2998 25.0996c4.09961 29.0996 29.2002 51.5996 59.5 51.5996c33 0 59.7998 -26.8994 59.7998 -60.0996c0 -30.2998 -22.7002 -55.4004 -51.8994 -59.5c9.59961 -33.5996 2.2998 -70 -28.9004 -101.2l-44 44.2998
+c20.5 20.4004 16.2998 42.6006 3.5 55.4004c-14.2998 14.2998 -37.5996 14.2998 -51.9004 0c-10 -10.0996 -89.6992 -89.7998 -99.6992 -99.7998zM396.4 87.2998c29.0996 -4.09961 51.5996 -28.8994 51.5996 -59.0996c0 -33.2998 -26.7998 -60.1006 -59.7998 -60.1006
+c-29.2002 0 -53.4004 20.7002 -58.9004 48.1006c-34.7002 -10.7998 -75.0996 -2.2002 -102.7 28l44 44.2998c20.4004 -20.5 42.6006 -16.2998 55.4004 -3.5c14.2998 14.2998 14.2998 37.5996 0 51.9004l-99.7002 99.6992l44.2998 44.3008
+c104.5 -104.4 87.7002 -87.5 99.5 -99.7002c25.4004 -25.4004 34.5 -61.2002 26.3008 -93.9004zM312.1 140.4c-87.2998 -87.3008 -67.3994 -67.7002 -99.5 -99.7002c-25.6992 -25.4004 -61.5 -34.2002 -94.1992 -26c-6.10059 -26.9004 -30 -46.7002 -58.6006 -46.7002
+c-33 0 -59.7998 26.7998 -59.7998 60.0996c0 28.3008 19.5 52.2002 46.2002 58.2002c-8.5 33.1006 -0.700195 68.1006 29.5 98.2998l44 -44.2998c-20.1006 -20.0996 -16.2998 -42 -3.2002 -55.3994c14.2998 -14.3008 37.5996 -14.3008 51.9004 0
+c49.2998 49.3994 12.6992 13.3994 99.6992 99.7998z" />
+ <glyph glyph-name="behance" unicode="&#xf1b4;" horiz-adv-x="576"
+d="M232 210.8c43.5996 -12.2998 64.7002 -45.2002 64.7002 -89.7002c0 -72 -60.5 -102.899 -124.9 -102.899h-171.8v354.399h167.1c60.7002 0 113.301 -17.1992 113.301 -87.7998c0 -35.7998 -16.6006 -58.7998 -48.4004 -74zM77.9004 312.1v-82.6992h79
+c27.7998 0 47.5 12.0996 47.5 42.1992c0 32.6006 -25.3008 40.5 -53.4004 40.5h-73.0996zM161.2 78.4004c31.7002 0 57.5996 11.1992 57.5996 47c0 36.2998 -21.7002 50.5996 -56 50.5996h-84.8994v-97.5996h83.2998zM519.7 319.1h-143.7v34.9004h143.7v-34.9004zM576 142.8
+c0 -4.5 -0.299805 -9 -0.599609 -13.2002h-185.101c0 -41.0996 21.7002 -65.2998 63 -65.2998c21.4004 0 49 11.6006 55.7002 33.5h62.2002c-19.1006 -58.7002 -58.7998 -86.2998 -120.101 -86.2998c-81 0 -131.3 54.7998 -131.3 134.7c0 77 53.1006 135.8 131.3 135.8
+c80.5 0 124.9 -63.2998 124.9 -139.2zM390.4 174h114.699c-3 34 -20.7998 54.7998 -56.1992 54.7998c-33.8008 0 -56.2002 -21.0996 -58.5 -54.7998z" />
+ <glyph glyph-name="behance-square" unicode="&#xf1b5;"
+d="M186.5 155c0 -19.2998 -14 -25.4004 -31.2002 -25.4004h-45.0996v52.9004h46c18.5996 -0.0996094 30.2998 -7.7998 30.2998 -27.5zM178.8 237.3c0 -16.2998 -10.7002 -22.8994 -25.7998 -22.8994h-42.7002v44.7998h39.6006c15.1992 0 28.8994 -4.2002 28.8994 -21.9004z
+M311.1 214.1c19.2002 0 28.8008 -11.1992 30.5 -29.6992h-62.1992c1.19922 18.2998 13.3994 29.6992 31.6992 29.6992zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM271.7 263h77.7998
+v18.9004h-77.7998v-18.9004zM228.7 152.7c0 24.0996 -11.4004 44.8994 -35 51.5996c17.2002 8.2002 26.2002 17.7002 26.2002 37c0 38.2002 -28.5 47.5 -61.4004 47.5h-90.5v-192h93.0996c34.9004 0.200195 67.6006 16.9004 67.6006 55.9004zM380 167.5
+c0 41.0996 -24.0996 75.4004 -67.5996 75.4004c-42.4004 0 -71.1006 -31.8008 -71.1006 -73.6006c0 -43.2998 27.2998 -73 71.1006 -73c33.1992 0 54.6992 14.9004 65.0996 46.7998h-33.7002c-3.7002 -11.8994 -18.5996 -18.0996 -30.2002 -18.0996
+c-22.3994 0 -34.0996 13.0996 -34.0996 35.2998h100.2c0.0996094 2.2998 0.299805 4.7998 0.299805 7.2002z" />
+ <glyph glyph-name="steam" unicode="&#xf1b6;" horiz-adv-x="496"
+d="M496 192c0 -137 -111.2 -248 -248.4 -248c-113.8 0 -209.6 76.2998 -239 180.4l95.2002 -39.3008c6.40039 -32.0996 34.9004 -56.3994 68.9004 -56.3994c39.2002 0 71.8994 32.3994 70.2002 73.5l84.5 60.2002c52.0996 -1.30078 95.7998 40.8994 95.7998 93.5
+c0 51.5996 -42 93.5 -93.7002 93.5s-93.7002 -42 -93.7002 -93.5v-1.2002l-59.2002 -85.7002c-15.5 0.900391 -30.6992 -3.40039 -43.5 -12.0996l-133.1 55c10.2002 127.699 117.1 228.1 247.6 228.1c137.2 0 248.4 -111 248.4 -248zM155.7 63.7002
+c19.7998 -8.2002 42.5 1.09961 50.7998 21c8.2998 19.7998 -1.09961 42.5 -20.9004 50.7002l-31.5 13c12.2002 4.59961 26 4.7998 38.9004 -0.600586c13 -5.39941 23.0996 -15.5996 28.5 -28.5996s5.2998 -27.2998 -0.0996094 -40.2998
+c-11.2002 -26.8008 -42.1006 -39.6006 -69 -28.4004c-10.2119 4.26953 -22.3975 15.8281 -27.2002 25.7998zM329.5 193.6c-34.4004 0 -62.4004 28 -62.4004 62.3008c0 34.2998 28 62.2998 62.4004 62.2998s62.4004 -28 62.4004 -62.2998
+c0 -34.3008 -27.9004 -62.3008 -62.4004 -62.3008zM329.6 209.2c25.9004 0 46.9004 21 46.9004 46.7998c0 25.9004 -21 46.7998 -46.9004 46.7998c-25.8994 0 -46.8994 -21 -46.8994 -46.7998c0.0996094 -25.7998 21.0996 -46.7998 46.8994 -46.7998z" />
+ <glyph glyph-name="steam-square" unicode="&#xf1b7;"
+d="M185.2 91.5c7.7002 18.5 -1 39.7002 -19.6006 47.4004l-29.5 12.1992c11.4004 4.30078 24.3008 4.5 36.4004 -0.5c12.2002 -5.09961 21.5996 -14.5996 26.7002 -26.6992c5 -12.2002 5 -25.6006 -0.100586 -37.7002c-10.5 -25.1006 -39.3994 -37 -64.5996 -26.5
+c-11.5996 4.7998 -20.4004 13.5996 -25.4004 24.2002l28.5 -11.8008c18.6006 -7.7998 39.9004 0.900391 47.6006 19.4004zM400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v112.8l89.0996 -36.8994
+c6 -30 32.7002 -52.7002 64.5 -52.7002c36.6006 0 67.3008 30.2998 65.7002 68.7998l79 56.2998c48.7002 -1.2002 89.6006 38.2998 89.6006 87.5c0 48.2002 -39.3008 87.5 -87.6006 87.5s-87.5996 -39.2998 -87.5996 -87.5v-1.09961l-55.4004 -80.2002
+c-14.5 0.799805 -28.7002 -3.09961 -40.7002 -11.2998l-116.6 48.0996v160.7c0 26.5 21.5 48 48 48h352zM300.3 193.5c-32.2002 0 -58.3994 26.0996 -58.3994 58.2998s26.1992 58.2998 58.3994 58.2998s58.4004 -26.1992 58.4004 -58.2998
+c0 -32.0996 -26.2002 -58.2998 -58.4004 -58.2998zM300.4 208.1c24.1992 0 43.8994 19.6006 43.8994 43.8008c0 24.1992 -19.5996 43.7998 -43.8994 43.7998c-24.2002 0 -43.9004 -19.6006 -43.9004 -43.7998c0 -24.2002 19.7002 -43.8008 43.9004 -43.8008z" />
+ <glyph glyph-name="spotify" unicode="&#xf1bc;" horiz-adv-x="496"
+d="M248 440c136.9 0 248 -111.1 248 -248s-111.1 -248 -248 -248s-248 111.1 -248 248s111.1 248 248 248zM348.7 75.0996c8.09961 0 15.2002 6.30078 15.2002 15.4004s-3.60059 12.5996 -9.7002 16.5c-71.4004 42.7002 -155.101 44.2998 -237 26.2002
+c-7.5 -1.60059 -13.6006 -6.5 -13.6006 -16.7998c0 -8.10059 6.10059 -15.8008 15.8008 -15.8008c2.89941 0 8 1.60059 11.8994 2.60059c71.7002 14.7002 144.3 13.0996 206.7 -24.5c3.90039 -2.2998 6.5 -3.60059 10.7002 -3.60059zM375.6 140.7
+c10.9004 0 19.3008 8.7002 19.4004 19.5c0 8.7002 -3.2002 14.8994 -11.2998 19.7002c-49.4004 29.3994 -112.101 45.5 -177 45.5c-41.6006 0 -70 -5.80078 -97.7998 -13.6006c-10.3008 -2.89941 -15.5 -10 -15.5 -20.7002c0 -10.6992 8.69922 -19.3994 19.3994 -19.3994
+c4.5 0 7.10059 1.2998 11.9004 2.59961c82.8994 22.5 176.1 7.60059 238.6 -29.3994c3.60059 -1.90039 7.10059 -4.2002 12.2998 -4.2002zM406.6 216.9c12.2002 0 23.2002 9.69922 23.2002 23.2998c0 11.8994 -5.09961 18.0996 -12.8994 22.5996
+c-55.9004 32.6006 -132.4 47.7998 -205.4 47.7998c-42.9004 0 -82.2998 -4.89941 -117.5 -15.1992c-9 -2.60059 -17.4004 -10.3008 -17.4004 -23.9004c0 -13.2998 10.1006 -23.5996 23.3008 -23.5996c4.7998 0 9.2998 1.59961 12.8994 2.59961
+c82.4004 23 209.7 12.7998 280.9 -29.7002c4.5 -2.59961 7.7002 -3.89941 12.8994 -3.89941z" />
+ <glyph glyph-name="deviantart" unicode="&#xf1bd;" horiz-adv-x="320"
+d="M320 354.8l-98.2002 -179.1l7.40039 -9.5h90.7998v-127.7h-160.9l-13.5 -9.2002l-43.6992 -84c-0.300781 0 -8.60059 -8.59961 -9.2002 -9.2002h-92.7002v93.2002l93.2002 179.4l-7.40039 9.2002h-85.7998v127.6h156l13.5 9.2002l43.7002 84
+c0.299805 0 8.59961 8.59961 9.2002 9.2002h97.5996v-93.1006z" />
+ <glyph glyph-name="soundcloud" unicode="&#xf1be;" horiz-adv-x="639"
+d="M111.4 191.7l5.7998 -65l-5.7998 -68.2998c-0.300781 -2.5 -2.2002 -4.40039 -4.40039 -4.40039s-4.2002 1.90039 -4.2002 4.40039l-5.59961 68.2998l5.59961 65c0 2.2002 1.90039 4.2002 4.2002 4.2002c2.2002 0 4.09961 -2 4.40039 -4.2002zM132.8 237.3
+c2.5 0 4.7002 -2.2002 4.7002 -5l5.7998 -105.6l-5.7998 -68.2998c0 -2.80078 -2.2002 -5 -4.7002 -5c-2.7998 0 -4.7002 2.19922 -5 5l-5 68.2998l5 105.6c0.299805 2.7998 2.2002 5 5 5zM158.3 261.4c2.7998 0 5.2998 -2.2002 5.2998 -5.30078l5.30078 -130
+l-5.30078 -67.7998c0 -3.09961 -2.5 -5.2998 -5.2998 -5.2998c-3.09961 0 -5.2998 2.2002 -5.59961 5.2998l-4.40039 67.7998l4.40039 130c0.299805 3.10059 2.5 5.30078 5.59961 5.30078zM7.2002 164.8c1.39941 0 2.2002 -1.09961 2.5 -2.5l5.59961 -35.5996l-5.59961 -35
+c-0.299805 -1.40039 -1.10059 -2.5 -2.5 -2.5c-1.40039 0 -2.2002 1.09961 -2.5 2.5l-4.7002 35l4.7002 35.5996c0.299805 1.40039 1.09961 2.5 2.5 2.5zM30.7998 186.7c1.40039 0 2.5 -1.10059 2.7998 -2.5l7.2002 -57.5l-7.2002 -56.4004
+c-0.299805 -1.39941 -1.39941 -2.5 -2.7998 -2.5c-1.39941 0 -2.5 1.10059 -2.5 2.7998l-6.39941 56.1006l6.39941 57.5c0 1.39941 1.10059 2.5 2.5 2.5zM56.0996 198.1c1.7002 0 3.10059 -1.39941 3.10059 -3.2998l6.89941 -68.0996l-6.89941 -65.7998
+c0 -1.7002 -1.40039 -3.10059 -3.10059 -3.10059c-1.59961 0 -3 1.40039 -3.2998 3.10059l-5.7998 65.7998l5.7998 68.0996c0.200195 1.90039 1.60059 3.2998 3.2998 3.2998zM81.4004 200.3c1.89941 0 3.59961 -1.39941 3.89941 -3.59961l6.40039 -70l-6.40039 -67.7998
+c-0.299805 -2.2002 -2 -3.60059 -3.89941 -3.60059c-1.90039 0 -3.60059 1.40039 -3.60059 3.60059l-5.7998 67.7998l5.7998 70c0 2.2002 1.7002 3.59961 3.60059 3.59961zM322.8 311.2c2.5 -1.40039 4.10059 -4.2002 4.5 -7.2002l3.90039 -177.5l-3.90039 -64.2002
+c0 -4.7002 -3.89941 -8.59961 -8.59961 -8.59961s-8.60059 3.89941 -8.90039 8.59961l-1.7002 31.7002l-1.69922 32.5l3.2998 176.7v0.799805c0.200195 2.5 1.39941 5 3.2998 6.7002c1.40039 1.09961 3.40039 1.89941 5.59961 1.89941
+c1.40039 0 3.10059 -0.599609 4.2002 -1.39941zM296.1 295.9c2.2002 -1.40039 3.60059 -3.90039 3.90039 -6.7002l3.2998 -162.8l-3.09961 -58.6006l-0.299805 -6.7002c0 -2.2998 -0.800781 -4.19922 -2.5 -5.59961c-1.40039 -1.40039 -3.40039 -2.5 -5.60059 -2.5
+c-2.5 0 -4.7002 1.2002 -6.39941 3.09961c-1.10059 1.40039 -1.7002 3 -1.7002 4.7002v0.299805c-3.10059 65.3008 -3.10059 65.6006 -3.10059 65.6006l2.80078 160.8l0.299805 1.7002c0 2.7998 1.39941 5.2998 3.59961 6.7002
+c1.2998 0.799805 2.7998 1.39941 4.40039 1.39941c1.59961 0 3 -0.599609 4.39941 -1.39941zM184.7 273.4c3.39941 0 5.89941 -2.80078 6.09961 -6.10059l5 -140.6l-5 -67.2002c-0.299805 -3.2998 -2.7998 -5.7998 -6.09961 -5.7998c-3 0 -5.5 2.5 -5.7998 5.7998
+l-4.40039 67.2002l4.40039 140.6c0 3.2998 2.69922 6.10059 5.7998 6.10059zM561.4 210.6c43.2998 0 78.5996 -35.2998 78.5 -78.8994c0 -43.2998 -35.3008 -78.2998 -78.6006 -78.2998h-218.3c-4.7002 0.599609 -8.59961 4.19922 -8.59961 9.19922v249.7
+c0 4.7998 1.69922 7 7.7998 9.2002c15.2998 6.09961 32.5 9.40039 50.2998 9.40039c72.5 0 131.9 -55.6006 138.3 -126.4c9.5 3.90039 19.7998 6.09961 30.6006 6.09961zM264.7 270.9c4.2002 0 7.2002 -3.30078 7.5 -7.80078l3.89941 -136.699l-3.89941 -65.6006
+c0 -4.2002 -3.2998 -7.5 -7.5 -7.5s-7.5 3.2998 -7.7998 7.5l-3.30078 65.6006l3.30078 136.699c0.299805 4.5 3.59961 7.80078 7.7998 7.80078zM211.1 278.7c3.60059 0 6.40039 -3.10059 6.7002 -6.7002l4.40039 -145.3l-4.40039 -66.9004
+c-0.299805 -3.59961 -3.09961 -6.39941 -6.7002 -6.39941c-3.2998 0 -6.09961 2.7998 -6.39941 6.39941l-3.90039 66.9004l3.90039 145.3c0 3.59961 3.09961 6.7002 6.39941 6.7002zM237.8 275.3c3.90039 0 6.90039 -3 6.90039 -6.89941l4.2002 -141.7l-4.2002 -66.4004
+c0 -3.7998 -3.10059 -6.89941 -6.90039 -6.89941s-6.59961 3 -6.89941 6.89941l-3.90039 66.4004l3.90039 141.7c0 3.7998 3 6.89941 6.89941 6.89941z" />
+ <glyph glyph-name="vine" unicode="&#xf1ca;" horiz-adv-x="384"
+d="M384 193.3v-52.0996c-18.4004 -4.2002 -36.9004 -6.10059 -52.0996 -6.10059c-36.9004 -77.3994 -103 -143.8 -125.101 -156.199c-14 -7.90039 -27.0996 -8.40039 -42.7002 0.799805c-27.0996 16.2998 -129.899 100.6 -164.1 365.6h74.5
+c18.7002 -159.1 64.5 -240.7 114.8 -301.8c27.9004 27.9004 54.7998 65.0996 75.6006 106.9c-49.8008 25.2998 -80.1006 80.8994 -80.1006 145.6c0 65.5996 37.7002 115.1 102.2 115.1c114.9 0 106.2 -127.899 81.5996 -181.5c0 0 -46.3994 -9.19922 -63.5 20.5
+c3.40039 11.3008 8.2002 30.8008 8.2002 48.5c0 31.3008 -11.2998 46.6006 -28.3994 46.6006c-18.2002 0 -30.8008 -17.1006 -30.8008 -50c0.100586 -79.2002 59.4004 -118.7 129.9 -101.9z" />
+ <glyph glyph-name="codepen" unicode="&#xf1cb;" horiz-adv-x="512"
+d="M502.285 288.296c6.00098 -3.99902 9.71484 -11.1426 9.71582 -18.2852v-155.999c0 -7.14258 -3.71484 -14.2871 -9.71484 -18.2861l-234 -156.021c-8.06055 -4.95996 -16.584 -4.91504 -24.5713 0l-234 156.021c-6.00098 4 -9.71484 11.1436 -9.71484 18.2861v155.999
+c0 7.14258 3.71387 14.2861 9.71387 18.2852l234 156c8.06055 4.95996 16.584 4.91504 24.5713 0zM278 384.869v-102.572l95.4287 -63.7148l76.8574 51.4287zM234 384.869l-172.286 -114.858l76.8574 -51.4287l95.4287 63.7148v102.572zM44 228.868v-73.7139
+l55.1426 36.8564zM234 -0.84668v102.571l-95.4287 63.7158l-76.8574 -51.4297zM256 140.011l77.7148 52l-77.7148 52l-77.7148 -52zM278 -0.84668l172.286 114.857l-76.8574 51.4297l-95.4287 -63.7158v-102.571zM468 155.154v73.7139l-55.1426 -36.8574z" />
+ <glyph glyph-name="jsfiddle" unicode="&#xf1cc;" horiz-adv-x="575"
+d="M510.634 210.538c45.6885 -25.334 68.3721 -74.5605 56.832 -122.634c-12.1035 -50.4199 -55.5479 -86.6592 -108.212 -87.293c-84.0303 -1.01172 -168.079 -0.458984 -252.12 -0.480469c-30.3223 -0.00683594 -60.668 -0.492188 -90.959 0.539062
+c-48.0938 1.63672 -91.7764 35.8643 -105.607 81.4326c-14.1289 46.5508 2.18945 94.623 41.9014 124.615c2.54688 1.92383 4.86914 6.52051 4.51465 9.54492c-3.74609 31.8604 7.14453 57.6709 32.6758 76.4082c26.2822 19.2881 55.2285 21.5879 85.3311 9.16699
+c2.36621 -0.975586 4.63965 -2.17773 7.82422 -3.68555c16.5215 27.5332 38.1221 48.6523 65.4922 63.9023c92.8594 51.7402 210.954 8.31152 246.85 -91.6455c5.55762 -15.4766 6.74512 -32.6074 9.09668 -49.0947c0.716797 -5.02832 1.6543 -8.15527 6.38086 -10.7764z
+M531.741 53.6582c39.3135 48.375 22.418 117.668 -35.1426 144.497c-7.43555 3.46582 -9.72559 7.74414 -9.84766 15.8936c-1.87012 125.129 -132.78 187.063 -230.24 132.697c-26.1133 -14.5674 -46.4492 -34.8955 -60.6709 -61.2939
+c-7.59082 -14.0908 -11.9287 -7.97754 -22.1982 -2.52734c-24.6113 13.0635 -49.0469 12.6406 -72.0332 -3.08301c-21.9678 -15.0244 -31.9102 -36.6201 -26.4199 -62.9805c2.4082 -11.5703 -0.914062 -17.0635 -10.0967 -23.1367
+c-38.1895 -25.2578 -53.0879 -74.8604 -34.1855 -116.105c18.4355 -40.2295 51.3135 -59.6631 95.1748 -59.9951c0.700195 -0.00488281 163.728 -0.545898 163.728 0.154297c56.8857 0 113.778 -0.551758 170.652 0.229492
+c28.9375 0.397461 53.0498 13.2178 71.2803 35.6504zM443.952 134.157c-5.84863 -31.1572 -34.6221 -55.0967 -66.666 -55.0957c-16.9531 0.00195312 -32.0586 6.5459 -44.0791 17.7051c-27.6973 25.7139 -71.1406 74.9805 -95.9375 93.3877
+c-20.0557 14.8877 -41.9893 12.333 -60.2715 -3.78223c-49.9961 -44.0713 15.8594 -121.775 67.0625 -77.1885c4.54883 3.95996 7.84082 9.54297 12.7441 12.8447c8.18457 5.50879 20.7666 0.883789 13.168 -10.6221c-17.3574 -26.2842 -49.3301 -38.1973 -78.8623 -29.3008
+c-28.8975 8.70312 -48.8408 35.9678 -48.626 70.1787c1.22461 22.4844 12.3633 43.0596 35.4141 55.9648c22.5742 12.6377 46.3682 13.1455 66.9902 -2.47363c50.791 -38.4756 75.5781 -81.7451 107.296 -101.245c24.5586 -15.0996 54.2549 -7.36328 68.8232 17.5059
+c28.8301 49.209 -34.5918 105.016 -78.8682 63.46c-3.98828 -3.74414 -6.91699 -8.93164 -11.4092 -11.7197c-10.9756 -6.81152 -17.333 4.1123 -12.8096 10.3525c20.7031 28.5537 50.4639 40.4404 83.2715 28.2139c31.4287 -11.7139 49.1074 -44.3662 42.7598 -78.1855z
+" />
+ <glyph glyph-name="rebel" unicode="&#xf1d0;" horiz-adv-x="512"
+d="M256.5 -56c-139.3 0 -247.5 116.2 -243.3 254.1c2.7998 79.2002 43.2002 152.2 116.5 200.4c0.299805 0 1.89941 0.599609 1.09961 -0.799805c-5.7998 -5.5 -111.3 -129.8 -14.0996 -226.4c49.7998 -49.5 90 -2.5 90 -2.5c38.5 50.1006 -0.600586 125.9 -0.600586 125.9
+c-10 24.8994 -45.6992 40.0996 -45.6992 40.0996l28.7998 31.7998c24.3994 -10.5 43.2002 -38.6992 43.2002 -38.6992c0.799805 29.5996 -21.9004 61.3994 -21.9004 61.3994l44.5996 50.7002l44.3008 -50.0996c-20.5 -28.8008 -21.9004 -62.6006 -21.9004 -62.6006
+c13.7998 23 43.5 39.2998 43.5 39.2998l28.5 -31.7998c-27.4004 -8.89941 -45.4004 -39.8994 -45.4004 -39.8994c-15.7998 -28.5 -27.0996 -89.4004 0.600586 -127.301c32.3994 -44.5996 87.7002 2.80078 87.7002 2.80078c102.699 91.8994 -10.5 225 -10.5 225
+c-6.10059 5.5 0.799805 2.7998 0.799805 2.7998c50.0996 -36.5 114.6 -84.4004 116.2 -204.8c2 -145.601 -99.9004 -249.4 -242.4 -249.4z" />
+ <glyph glyph-name="empire" unicode="&#xf1d1;" horiz-adv-x="496"
+d="M287.6 393.8c-10.7998 2.2002 -22.0996 3.2998 -33.5 3.60059v18.1992c78.1006 -2.19922 146.101 -44 184.601 -106.6l-15.7998 -9.09961c-6.10059 9.69922 -12.7002 18.7998 -20.2002 27.0996l-18 -15.5c-26 29.5996 -61.4004 50.7002 -101.9 58.4004zM53.4004 125.6
+c3.89941 -10.7998 8.2998 -21.0996 13.5996 -31.0996l-15.7998 -9.09961c-17.1006 31.5996 -27.1006 68.0996 -27.1006 106.6s9.90039 75 27.1006 106.5l15.7998 -9.09961c-5.2998 -9.7002 -10 -20.2002 -13.5996 -31l22.6992 -7.7002
+c-6.39941 -18.2998 -9.69922 -38.2002 -9.69922 -58.7002s3.59961 -40.4004 10 -58.7002zM213.1 14l-4.69922 -23.7998c10.7998 -1.90039 22.1992 -3.2998 33.5 -3.60059v-18.2998c-78.1006 2.2998 -146.4 44.2998 -184.9 106.601l16 9.39941
+c5.7998 -9.7002 12.7002 -18.7998 20.2002 -27.3994l18 15.7998c26.0996 -29.6006 61.5 -50.7002 101.899 -58.7002zM93.2998 327.1c-7.5 -8.2998 -14.0996 -17.5 -20.0996 -27.1992l-15.7998 9.09961c38.5 62.5996 106.5 104.4 184.6 106.6v-18.1992
+c-11.4004 -0.300781 -22.7002 -1.40039 -33.5 -3.60059l4.7002 -23.7998c-40.5 -7.7002 -75.9004 -28.7998 -101.9 -58.4004zM402.7 56.9004c7.5 8.59961 14.3994 17.6992 20.0996 27.3994l16.1006 -9.39941c-38.5 -62.3008 -106.801 -104.4 -184.9 -106.601v18.2998
+c11.4004 0.300781 22.7002 1.7002 33.5 3.60059l-4.7002 23.7998c40.5 8 75.9004 29.0996 101.9 58.7002zM496 192c0 -137 -111 -248 -248 -248s-248 111 -248 248s111 248 248 248s248 -111 248 -248zM483.8 192c0 130.1 -105.7 235.8 -235.8 235.8
+s-235.8 -105.7 -235.8 -235.8s105.7 -235.8 235.8 -235.8s235.8 105.7 235.8 235.8zM444.8 298.6c17.2002 -31.5996 27.1006 -68.0996 27.1006 -106.6s-9.90039 -75 -27.1006 -106.4l-15.7998 9.10059c5.2998 10 9.7002 20.2002 13.5996 31l-23 7.7002
+c6.40039 18.2998 10 38.1992 10 58.6992s-3.2998 40.4004 -9.69922 58.7002l22.6992 7.7002c-3.59961 10.7998 -8.2998 21.2998 -13.5996 31zM261.8 120.9l13.2998 -66.7002c-8.59961 -1.7002 -17.6992 -2.7998 -27.0996 -2.7998s-18.5 1.09961 -27.0996 2.7998
+l13.2998 66.7002c-16.2998 3.2998 -30.5 11.5996 -40.7002 23.5l-51.2002 -44.8008c-11.8994 13.6006 -21.2998 29.4004 -27.0996 46.8008l64.2002 22.0996c-2.5 7.40039 -3.90039 15.2002 -3.90039 23.5s1.40039 16 3.90039 23.5l-64.5 22.0996
+c6.09961 17.5 15.5 33.2002 27.3994 46.8008l51.2002 -44.8008c10.2998 11.9004 24.4004 20.5 40.7002 23.8008l-13.2998 66.3994c8.59961 2 17.6992 2.7998 27.0996 2.7998s18.5 -0.899414 27.0996 -2.7998l-13.2998 -66.3994
+c16.2998 -3.30078 30.5 -11.9004 40.7002 -23.8008l51.2002 44.8008c11.8994 -13.6006 21.2998 -29.4004 27.3994 -46.8008l-64.5 -22.0996c2.5 -7.40039 3.90039 -15.2002 3.90039 -23.5s-1.40039 -16 -3.90039 -23.5l64.2002 -22.0996
+c-5.7998 -17.5 -15.2002 -33.2002 -27.0996 -46.8008l-51.2002 44.8008c-10.2998 -11.9004 -24.4004 -20.2002 -40.7002 -23.5z" />
+ <glyph glyph-name="git-square" unicode="&#xf1d2;"
+d="M140.1 99.5c12.1006 0 29.5 -2.09961 29.5 -17.9004c0 -15.5 -13.8994 -18.7998 -27 -18.7998c-12.2998 0 -30.8994 2 -30.8994 18s15.7002 18.7002 28.3994 18.7002zM115.4 216.1c0 35.2002 43.5996 34.8008 43.5 0.400391c0 -14.5 -5.7002 -24.5 -21.2002 -24.5
+c-15.7002 0 -22.2998 9.2998 -22.2998 24.0996zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM207 274.3c-12.2998 -4.7998 -25.5 -8.39941 -38.9004 -8.39941
+c-38.5 22.0996 -90.6992 -0.900391 -90.6992 -49.5c0 -18 11.5996 -42.9004 29.5996 -48.9004v-0.700195c-13.4004 -5.89941 -17.0996 -36.0996 0.700195 -47v-0.700195c-19.5 -6.39941 -32.2998 -18.7998 -32.2998 -40.1992c0 -36.8008 34.7998 -47 65.3994 -47
+c37.1006 0 64.7998 13.3994 64.7998 54.2998c0 50.3994 -72.0996 31.7998 -72.0996 59.0996c0 9.60059 5.2002 13.4004 14.0996 15.2002c29.6006 5.5 48.2002 28.5996 48.2002 58.7002c0 5.2002 -1.09961 10.2002 -2.89941 15c4.7998 1.09961 9.5 2.2998 14.0996 3.89941
+v36.2002zM263.8 272.5h-39.5996c1.2998 -10.5996 1.09961 -142.6 0 -155.5h39.5996c-1.09961 12.7998 -1.2002 145.1 0 155.5zM268.3 325.8c0 13.4004 -10 26.2002 -24.0996 26.2002c-14.2998 0 -24.6006 -12.5 -24.6006 -26.2002
+c0 -13.5996 10.5 -25.8994 24.6006 -25.8994c13.7002 0 24.0996 12.5 24.0996 25.8994zM372.6 272.5h-36.3994c0 9.7998 -0.400391 19.5996 1.09961 29.5h-40.5c1.2998 -7.2998 1.10059 -13.5996 1.10059 -29.5h-17.1006v-33.9004
+c11.9004 1.90039 12.1006 0.700195 17.1006 0.700195v-0.700195h-0.400391v-62.6992c0 -31.1006 4.5 -61.2002 42.7002 -61.2002c10.7002 0 23 1.7998 32.5 7v35c-5.7002 -3.90039 -13.9004 -5.90039 -20.9004 -5.90039c-13.2002 0 -15.5 12.9004 -15.5 23.4004v65.2002
+c22.7002 0 36.4004 -0.700195 36.4004 -0.700195v33.7998h-0.100586z" />
+ <glyph glyph-name="git" unicode="&#xf1d3;"
+d="M18.7998 226.3c0 67.9004 73 100.2 126.8 69.2002c18.8008 0 37.3008 5 54.5 11.7998v-50.5c-6.5 -2.2002 -13 -4 -19.6992 -5.5c2.5 -6.7998 4 -13.7002 4 -21c0 -42.2002 -26 -74.5 -67.5 -82.2002c-12.5 -2.59961 -19.7002 -7.7998 -19.7002 -21.2998
+c0 -38.2998 101 -12.2998 101 -82.7998c0 -57.2998 -38.7002 -76 -90.7002 -76c-42.7002 0 -91.5 14.2998 -91.5 65.7998c0 29.9004 18 47.2002 45.2998 56.2002v1c-25 15.2002 -19.7998 57.5 -1 65.7998v1c-25.2998 8.5 -41.5 43.2002 -41.5 68.5zM106.5 62.5
+c-17.7002 0 -39.7002 -3.7998 -39.7998 -26.0996c0 -22.5 26 -25.2002 43.2998 -25.2002c18.2002 0 37.7002 4.5 37.7002 26.2998c0 22 -24.2002 25 -41.2002 25zM103.3 192c21.7002 0 29.7002 14 29.7002 34.2998c0 48.2998 -61 48.7998 -61 -0.5
+c0 -20.7998 9.2998 -33.7998 31.2998 -33.7998zM432 142.5v-49c-13.2998 -7.2998 -30.5 -9.7998 -45.5 -9.7998c-53.5 0 -59.7998 42.2002 -59.7998 85.7002v87.6992h0.5v1c-7 0 -7.2998 1.60059 -24 -1v47.5h24c0 22.3008 0.299805 31 -1.5 41.2002h56.7002
+c-2 -13.7998 -1.5 -27.5 -1.5 -41.2002h51v-47.5s-19.3008 1 -51 1v-91.0996c0 -14.7998 3.2998 -32.7998 21.7998 -32.7998c9.7998 0 21.2998 2.7998 29.2998 8.2998zM286 379.3c0 -18.7002 -14.5 -36.2002 -33.7998 -36.2002c-19.7998 0 -34.5 17.2002 -34.5 36.2002
+c0 19.2998 14.5 36.7002 34.5 36.7002c19.7998 0 33.7998 -18 33.7998 -36.7002zM279.8 304.8c-1.7998 -14.5996 -1.59961 -199.8 0 -217.8h-55.5c1.60059 18.0996 1.7998 203 0 217.8h55.5z" />
+ <glyph glyph-name="hacker-news" unicode="&#xf1d4;"
+d="M0 416h448v-448h-448v448zM21.2002 218.8h-0.200195c0.0996094 0.100586 0.200195 0.299805 0.299805 0.400391c0 -0.100586 0 -0.299805 -0.0996094 -0.400391zM239.2 164.9l80.7998 155.1h-34.7998c-54.7998 -101.2 -48.2998 -98.5996 -60.6006 -125.6
+c-10.0996 24.3994 -6.7998 27.2998 -59.2998 125.6h-37.2998l79.7998 -153.3v-102.7h31.4004v100.9z" />
+ <glyph glyph-name="tencent-weibo" unicode="&#xf1d5;" horiz-adv-x="383"
+d="M72.2998 -47.7998c1.40039 -19.9004 -27.5996 -22.2002 -29.7002 -2.90039c-11.5996 129.9 31.1006 239.5 101.4 313.2c-15.5996 34 9.2002 77.0996 50.5996 77.0996c30.3008 0 55.1006 -24.5996 55.1006 -55.0996c0 -44 -49.5 -70.7998 -86.9004 -45.0996
+c-65.7002 -71.3008 -101.399 -169.801 -90.5 -287.2zM192 447.9c92 0 166.6 -74.6006 166.6 -166.5c0 -102.301 -93.2998 -185.5 -204 -162.301c-19 4.7002 -12.5 33.2002 6.60059 29.1006c80.7998 -20.7998 167.7 42.2998 167.7 133.1c0 75.5 -61.5 136.9 -136.9 136.9
+c-101 0 -168.3 -106.601 -122 -199.2c9 -17.9004 -17.5996 -30.7998 -26.2998 -13.4004c-56 108.101 22.3994 242.301 148.3 242.301z" />
+ <glyph glyph-name="qq" unicode="&#xf1d6;"
+d="M433.754 27.5547c-11.5264 -1.39258 -44.8604 52.7412 -44.8604 52.7412c0 -31.3447 -16.1357 -72.2471 -51.0508 -101.786c16.8418 -5.19141 54.8428 -19.167 45.8037 -34.4209c-7.31641 -12.3428 -125.511 -7.88086 -159.633 -4.03711
+c-34.1221 -3.84375 -152.315 -8.30566 -159.632 4.03711c-9.04492 15.25 28.918 29.2139 45.7832 34.415c-34.9199 29.5391 -51.0586 70.4453 -51.0586 101.792c0 0 -33.334 -54.1338 -44.8594 -52.7412c-5.37012 0.650391 -12.4238 29.6445 9.34668 99.7041
+c10.2617 33.0244 21.9951 60.4785 40.1445 105.779c-3.05566 116.898 45.2441 214.956 160.262 214.962c113.737 -0.00585938 163.156 -96.1328 160.264 -214.963c18.1182 -45.2227 29.9121 -72.8506 40.1445 -105.778c21.7676 -70.0596 14.7158 -99.0527 9.3457 -99.7041z
+" />
+ <glyph glyph-name="weixin" unicode="&#xf1d7;" horiz-adv-x="576"
+d="M385.2 280.4c-92.4004 0 -165.4 -69.1006 -165.3 -154c0 -14.2002 2.19922 -27.9004 6.19922 -40.8008c-6.19922 -0.5 -12.0996 -0.799805 -18.2998 -0.799805c-24.3994 0 -43.7998 4.90039 -68.2002 9.7002l-68 -34.0996l19.3008 58.5996
+c-48.6006 34.0996 -77.9004 78.2002 -77.9004 131.6c0 92.6006 87.5 165.4 194.7 165.4c95.5996 0 179.7 -58.2998 196.3 -136.7c-6.2002 0.799805 -12.4004 1.10059 -18.7998 1.10059zM280.7 333.3c-14.7002 0 -29.2002 -9.7002 -29.2998 -24.3994
+c0 -14.5 14.5 -24.2002 29.2998 -24.2002c14.5 0 24.2002 9.7002 24.2002 24.2002c0 14.6992 -9.7002 24.3994 -24.2002 24.3994zM144.3 284.7c14.7998 0 24.4004 9.59961 24.4004 24.2002c0 14.6992 -9.60059 24.3994 -24.4004 24.3994
+c-14.5 0 -29.2998 -9.59961 -29.2998 -24.3994c0 -14.5 14.7998 -24.2002 29.2998 -24.2002zM563 128.6c0 -43.7998 -29 -82.6992 -68.2002 -112.1l14.7998 -48.5996l-53.3994 29.2998c-19.7002 -4.7998 -39.2998 -9.90039 -58.6006 -9.90039
+c-92.5996 0 -165.399 63.4004 -165.399 141.3c0 77.9004 72.7002 141.301 165.399 141.301c87.5 0 165.4 -63.4004 165.4 -141.301zM343.9 153.1c14.6992 0 24.3994 9.60059 24.3994 19.6006c0 9.59961 -9.59961 19.2998 -24.3994 19.2998
+c-9.60059 0 -19.3008 -9.59961 -19.3008 -19.2998c0 -9.90039 9.60059 -19.6006 19.3008 -19.6006zM451 153.1c14.5 0 24.5 9.60059 24.4004 19.6006c0 9.59961 -9.90039 19.2998 -24.4004 19.2998c-9.59961 0 -19.2998 -9.59961 -19.2998 -19.2998
+c0 -9.90039 9.59961 -19.6006 19.2998 -19.6006z" />
+ <glyph glyph-name="slideshare" unicode="&#xf1e7;" horiz-adv-x="512"
+d="M187.7 294.3c34 0 61.7002 -25.7002 61.7002 -57.7002c0 -31.6992 -27.7002 -57.6992 -61.7002 -57.6992s-61.7002 26 -61.7002 57.6992c0 32 27.7002 57.7002 61.7002 57.7002zM331.1 294.3c34.3008 0 61.8008 -25.7002 61.7002 -57.7002
+c0 -31.6992 -27.3994 -57.6992 -61.7002 -57.6992c-34 0 -61.6992 26 -61.6992 57.6992c0 32 27.6992 57.7002 61.6992 57.7002zM487.7 204.3c15.2002 10.5 25.2002 -4 16.0996 -17.7998c-18.2998 -22.5996 -53.2002 -50.2998 -106.3 -72
+c56.2998 -191.7 -137.4 -222.3 -134.3 -124c0 0.700195 -0.299805 53.7998 -0.299805 93.5c-4.30078 0.799805 -8.60059 2 -13.7002 3.09961c0 -40 -0.299805 -95.8994 -0.299805 -96.5996c3.09961 -98.2002 -190.601 -67.5996 -134.301 124.1
+c-53.1992 21.7002 -88 49.4004 -106.3 72c-9.09961 13.7002 0.900391 28.3008 16 17.7002c2 -1.39941 4.2998 -2.89941 6.2998 -4.2998v198.3c0 27.4004 20.6006 49.7002 46 49.7002h359.101c25.3994 0 46 -22.2998 46 -49.7002v-198.3zM457.2 185.1h0.0996094v190.601
+c0 32.7998 -10.5996 45.7002 -40.8994 45.7002h-317.7c-31.7002 0 -40.6006 -10.8008 -40.6006 -45.7002v-192.4c67.7002 -35.3994 125.7 -29.0996 157.4 -28c13.4004 0.299805 22 -2.2998 27.0996 -7.7002c1.7002 -1.59961 10 -9.39941 20.3008 -17.0996
+c1.09961 15.7998 10 25.7998 33.6992 24.9004c32.3008 -1.40039 91.7002 -7.7002 160.601 29.6992z" />
+ <glyph glyph-name="twitch" unicode="&#xf1e8;"
+d="M40.0996 416h397.9v-274.2l-117 -117h-87l-56.7998 -56.7998h-60.2002v56.7998h-107v314.3zM397.9 161.9v214h-321v-280.9h90.2998v-56.7998l56.7998 56.7998h107zM331 299v-116.9h-40.0996v116.9h40.0996zM224 299v-116.9h-40.0996v116.9h40.0996z" />
+ <glyph glyph-name="yelp" unicode="&#xf1e9;" horiz-adv-x="381"
+d="M41.9004 207.68l99.6191 -48.6094c19.2002 -9.40039 16.2002 -37.5107 -4.5 -42.71l-107.52 -26.8105c-13.4004 -3.39941 -26.71 5.90039 -28.21 19.6006c-3.40039 29.4092 0 58.4092 9 85.3193c4.41016 13.1104 19.3096 19.21 31.6104 13.21zM85.9102 -31.5703
+c-11.4004 7.80078 -13.2998 23.8105 -4.08984 34.0898l74.21 82.4209c14.3096 15.8096 40.5098 5.19922 39.8096 -16.1006l-3.89941 -110.82c-0.410156 -13.7998 -13.0107 -24 -26.6104 -21.6992c-28.6104 4.7998 -55.7197 15.9102 -79.4199 32.1094zM231.24 78.3496
+c-11.2998 18.1104 6.2002 40.4102 26.5098 33.9102l105.42 -34.21c13.0996 -4.2998 19.5 -19.1094 13.5996 -31.6094c-12.3994 -26 -30.3096 -49.1104 -52.71 -67.6104c-10.71 -8.7998 -26.71 -6.2002 -34.0098 5.5zM379.57 210.58
+c5.5 -12.71 -1.30078 -27.3105 -14.6104 -31.21l-106.62 -30.5098c-20.5 -5.90039 -37.0996 17.0098 -25.2002 34.71l62.0107 91.9199c7.70996 11.3994 23.71 13.5 34.0098 4.39941c21.2002 -18.6992 38.6094 -42.1992 50.4102 -69.3096zM61.1104 417.82
+c47 22.3994 89.21 28.2998 112.33 30.0996c13.2998 1.09961 24.5996 -9.5 24.5 -22.7998v-208.34c0 -23.2998 -30.9102 -31.6006 -42.6104 -11.4004l-104.12 180.43c-6.59961 11.5107 -2.09961 26.3105 9.90039 32.0107z" />
+ <glyph glyph-name="paypal" unicode="&#xf1ed;" horiz-adv-x="384"
+d="M111.4 152.1c-3.5 -19.1992 -17.4004 -108.699 -21.5 -134c-0.300781 -1.7998 -1 -2.5 -3 -2.5h-74.6006c-7.59961 0 -13.0996 6.60059 -12.0996 13.9004l58.5996 371.9c1.5 9.59961 10.1006 16.8994 20 16.8994c152.3 0 165.101 3.7002 204 -11.3994
+c60.1006 -23.3008 65.6006 -79.5 44 -140.301c-21.5 -62.5996 -72.5 -89.5 -140.1 -90.2998c-43.4004 -0.700195 -69.5 7 -75.2998 -24.2002zM357.1 296c28.4004 -21.2002 30.3008 -57.7998 23.8008 -92.5996c-16.5 -83.5 -71.9004 -112.301 -142.9 -112.301
+c-15 0 -24.7002 2.30078 -29.2998 -19.6992c-15.5 -97.4004 -13.7002 -85.9004 -14.4004 -91.3008c-1.7002 -8.59961 -8.7998 -14.8994 -17.3994 -14.8994h-63.5c-7.10059 0 -11.6006 5.7998 -10.6006 12.8994c0 0 4.5 29.3008 27.1006 169.7
+c0.799805 6.10059 4.7998 9.40039 10.8994 9.40039c54 0 164.601 -9.90039 204.5 103.899c3.7002 11.1006 6.7998 22.2002 8.7998 33.6006c0.5 3.09961 1.2002 2.59961 3 1.2998z" />
+ <glyph glyph-name="google-wallet" unicode="&#xf1ee;"
+d="M156.8 321.2c37.6006 -60.6006 64.2002 -113.101 84.2998 -162.5c-8.2998 -33.7998 -18.7998 -66.5 -31.2998 -98.2998c-13.2002 52.2998 -26.5 101.3 -56 148.5c6.5 36.3994 2.2998 73.5996 3 112.3zM109.3 248c5 0 10 -2.5 13 -6.5
+c43.7998 -59.7998 66.2998 -123.8 82.5 -193.5h-103.5c-20 69.5 -49.5 133 -91.7002 187.3c-4 5.2002 0 12.7002 6.5 12.7002h93.2002zM157.1 336h108.7c74.7998 -103 131.2 -230 143.2 -368h-113.7c-8.2002 133.5 -69.7002 260 -138.2 368zM408.9 404.5
+c19 -67.5 31.0996 -139 31.0996 -212.6c0 -69.5 -9.5 -142.5 -25.2998 -203c-10.9004 92.5 -42.4004 184.6 -90.6006 270.8c-4.19922 50.5 -13.2998 99.5 -26.5 146c-1.19922 5.2998 2.5 10.2998 7.80078 10.2998h88.2998c7 0 13.3994 -4.7002 15.2002 -11.5z" />
+ <glyph glyph-name="cc-visa" unicode="&#xf1f0;" horiz-adv-x="576"
+d="M470.1 216.7c0 0 7.60059 -37.2002 9.30078 -45h-33.4004c3.2998 8.89941 16 43.5 16 43.5c-0.200195 -0.299805 3.2998 9.09961 5.2998 14.8994zM576 368v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h480
+c26.5 0 48 -21.5 48 -48zM152.5 116.8l63.2002 155.2h-42.5l-39.2998 -106l-4.30078 21.5l-14 71.4004c-2.2998 9.89941 -9.39941 12.6992 -18.1992 13.0996h-64.7002l-0.700195 -3.09961c15.7998 -4 29.9004 -9.80078 42.2002 -17.1006l35.7998 -135h42.5zM246.9 116.6
+l25.1992 155.4h-40.1992l-25.1006 -155.4h40.1006zM386.8 167.4c0.200195 17.6992 -10.5996 31.1992 -33.7002 42.2998c-14.0996 7.09961 -22.6992 11.8994 -22.6992 19.2002c0.199219 6.59961 7.2998 13.3994 23.0996 13.3994
+c13.0996 0.299805 22.7002 -2.7998 29.9004 -5.89941l3.59961 -1.7002l5.5 33.5996c-7.90039 3.10059 -20.5 6.60059 -36 6.60059c-39.7002 0 -67.5996 -21.2002 -67.7998 -51.4004c-0.299805 -22.2998 20 -34.7002 35.2002 -42.2002
+c15.5 -7.59961 20.7998 -12.5996 20.7998 -19.2998c-0.200195 -10.4004 -12.6006 -15.2002 -24.1006 -15.2002c-16 0 -24.5996 2.5 -37.6992 8.2998l-5.30078 2.5l-5.59961 -34.8994c9.40039 -4.2998 26.7998 -8.10059 44.7998 -8.2998
+c42.2002 -0.100586 69.7002 20.7998 70 53zM528 116.6l-32.4004 155.4h-31.0996c-9.59961 0 -16.9004 -2.7998 -21 -12.9004l-59.7002 -142.5h42.2002s6.90039 19.2002 8.40039 23.3008h51.5996c1.2002 -5.5 4.7998 -23.3008 4.7998 -23.3008h37.2002z" />
+ <glyph glyph-name="cc-mastercard" unicode="&#xf1f1;" horiz-adv-x="576"
+d="M482.9 37.7002c0 -6.7998 -4.60059 -11.7002 -11.2002 -11.7002c-6.7998 0 -11.2002 5.2002 -11.2002 11.7002s4.40039 11.7002 11.2002 11.7002c6.59961 0 11.2002 -5.2002 11.2002 -11.7002zM172.1 49.4004c6.5 0 10.8008 -5.2002 10.9004 -11.7002
+c0 -6.7998 -4.40039 -11.7002 -10.9004 -11.7002c-7.09961 0 -11.1992 5.2002 -11.1992 11.7002s4.09961 11.7002 11.1992 11.7002zM289.6 49.7002c5.2002 0 8.7002 -3 9.60059 -8.7002h-19.1006c0.800781 5.2002 4.10059 8.7002 9.5 8.7002zM397.4 49.4004
+c6.7998 0 11.1992 -5.2002 11.1992 -11.7002c0 -6.7998 -4.39941 -11.7002 -11.1992 -11.7002c-6.80078 0 -10.9004 5.2002 -10.9004 11.7002s4.09961 11.7002 10.9004 11.7002zM503.3 23.2998c0 -0.299805 0.299805 -0.5 0.299805 -1.09961
+c0 -0.299805 -0.299805 -0.5 -0.299805 -1.10059c-0.299805 -0.299805 -0.299805 -0.5 -0.5 -0.799805c-0.299805 -0.299805 -0.5 -0.5 -1.09961 -0.5c-0.299805 -0.299805 -0.5 -0.299805 -1.10059 -0.299805c-0.299805 0 -0.5 0 -1.09961 0.299805
+c-0.299805 0 -0.5 0.299805 -0.799805 0.5c-0.299805 0.299805 -0.5 0.5 -0.5 0.799805c-0.299805 0.5 -0.299805 0.800781 -0.299805 1.10059c0 0.5 0 0.799805 0.299805 1.09961c0 0.5 0.299805 0.799805 0.5 1.10059c0.299805 0.299805 0.5 0.299805 0.799805 0.5
+c0.5 0.299805 0.799805 0.299805 1.09961 0.299805c0.5 0 0.800781 0 1.10059 -0.299805c0.5 -0.300781 0.799805 -0.300781 1.09961 -0.5c0.299805 -0.200195 0.200195 -0.600586 0.5 -1.10059zM501.1 21.9004c0.5 0 0.5 0.299805 0.800781 0.299805
+c0.299805 0.299805 0.299805 0.5 0.299805 0.799805s0 0.5 -0.299805 0.799805c-0.300781 0 -0.5 0.299805 -1.10059 0.299805h-1.59961v-3.5h0.799805v1.40039h0.299805l1.10059 -1.40039h0.799805zM576 367v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48
+v352c0 26.5 21.5 48 48 48h480c26.5 0 48 -21.5 48 -48zM64 227.4c0 -76.5 62.0996 -138.5 138.5 -138.5c27.2002 0 53.9004 8.19922 76.5 23.0996c-72.9004 59.2998 -72.4004 171.2 0 230.5c-22.5996 15 -49.2998 23.0996 -76.5 23.0996
+c-76.4004 0.100586 -138.5 -62 -138.5 -138.199zM288 118.6c70.5 55 70.2002 162.2 0 217.5c-70.2002 -55.2998 -70.5 -162.6 0 -217.5zM145.7 42.2998c0 8.7002 -5.7002 14.4004 -14.7002 14.7002c-4.59961 0 -9.5 -1.40039 -12.7998 -6.5
+c-2.40039 4.09961 -6.5 6.5 -12.2002 6.5c-3.7998 0 -7.59961 -1.40039 -10.5996 -5.40039v4.40039h-8.2002v-36.7002h8.2002c0 18.9004 -2.5 30.2002 9 30.2002c10.1992 0 8.19922 -10.2002 8.19922 -30.2002h7.90039c0 18.2998 -2.5 30.2002 9 30.2002
+c10.2002 0 8.2002 -10 8.2002 -30.2002h8.2002v23h-0.200195zM190.6 56h-7.89941v-4.40039c-2.7002 3.30078 -6.5 5.40039 -11.7002 5.40039c-10.2998 0 -18.2002 -8.2002 -18.2002 -19.2998c0 -11.2002 7.90039 -19.2998 18.2002 -19.2998
+c5.2002 0 9 1.89941 11.7002 5.39941v-4.59961h7.89941v36.7998zM231.1 30.4004c0 15 -22.8994 8.19922 -22.8994 15.1992c0 5.7002 11.8994 4.80078 18.5 1.10059l3.2998 6.5c-9.40039 6.09961 -30.2002 6 -30.2002 -8.2002c0 -14.2998 22.9004 -8.2998 22.9004 -15
+c0 -6.2998 -13.5 -5.7998 -20.7002 -0.799805l-3.5 -6.2998c11.2002 -7.60059 32.5996 -6 32.5996 7.5zM266.5 21.0996l-2.2002 6.80078c-3.7998 -2.10059 -12.2002 -4.40039 -12.2002 4.09961v16.5996h13.1006v7.40039h-13.1006v11.2002h-8.19922v-11.2002h-7.60059
+v-7.2998h7.60059v-16.7002c0 -17.5996 17.2998 -14.4004 22.5996 -10.9004zM279.8 34.5h27.5c0 16.2002 -7.39941 22.5996 -17.3994 22.5996c-10.6006 0 -18.2002 -7.89941 -18.2002 -19.2998c0 -20.5 22.5996 -23.8994 33.7998 -14.2002l-3.7998 6
+c-7.7998 -6.39941 -19.6006 -5.7998 -21.9004 4.90039zM338.9 56c-4.60059 2 -11.6006 1.7998 -15.2002 -4.40039v4.40039h-8.2002v-36.7002h8.2002v20.7002c0 11.5996 9.5 10.0996 12.7998 8.40039zM349.5 37.7002c0 11.3994 11.5996 15.0996 20.7002 8.39941l3.7998 6.5
+c-11.5996 9.10059 -32.7002 4.10059 -32.7002 -15c0 -19.7998 22.4004 -23.7998 32.7002 -15l-3.7998 6.5c-9.2002 -6.5 -20.7002 -2.59961 -20.7002 8.60059zM416.2 56h-8.2002v-4.40039c-8.2998 11 -29.9004 4.80078 -29.9004 -13.8994
+c0 -19.2002 22.4004 -24.7002 29.9004 -13.9004v-4.59961h8.2002v36.7998zM449.9 56c-2.40039 1.2002 -11 2.90039 -15.2002 -4.40039v4.40039h-7.90039v-36.7002h7.90039v20.7002c0 11 9 10.2998 12.7998 8.40039zM490.2 70.9004h-7.90039v-19.3008
+c-8.2002 10.9004 -29.8994 5.10059 -29.8994 -13.8994c0 -19.4004 22.5 -24.6006 29.8994 -13.9004v-4.59961h7.90039v51.7002zM497.8 146v-4.59961h0.799805v4.59961h1.90039v0.799805h-4.59961v-0.799805h1.89941zM504.4 22.2002c0 0.5 0 1.09961 -0.300781 1.59961
+c-0.299805 0.299805 -0.5 0.799805 -0.799805 1.10059c-0.299805 0.299805 -0.799805 0.5 -1.09961 0.799805c-0.5 0 -1.10059 0.299805 -1.60059 0.299805c-0.299805 0 -0.799805 -0.299805 -1.39941 -0.299805c-0.5 -0.299805 -0.799805 -0.5 -1.10059 -0.799805
+c-0.5 -0.300781 -0.799805 -0.800781 -0.799805 -1.10059c-0.299805 -0.5 -0.299805 -1.09961 -0.299805 -1.59961c0 -0.299805 0 -0.799805 0.299805 -1.40039c0 -0.299805 0.299805 -0.799805 0.799805 -1.09961c0.300781 -0.299805 0.5 -0.5 1.10059 -0.799805
+c0.5 -0.300781 1.09961 -0.300781 1.39941 -0.300781c0.5 0 1.10059 0 1.60059 0.300781c0.299805 0.299805 0.799805 0.5 1.09961 0.799805s0.5 0.799805 0.799805 1.09961c0.300781 0.600586 0.300781 1.10059 0.300781 1.40039zM507.6 146.9h-1.39941l-1.60059 -3.5
+l-1.59961 3.5h-1.40039v-5.40039h0.800781v4.09961l1.59961 -3.5h1.09961l1.40039 3.5v-4.09961h1.09961v5.40039zM512 227.4c0 76.1992 -62.0996 138.3 -138.5 138.3c-27.2002 0 -53.9004 -8.2002 -76.5 -23.1006c72.0996 -59.2998 73.2002 -171.5 0 -230.5
+c22.5996 -15 49.5 -23.0996 76.5 -23.0996c76.4004 -0.0996094 138.5 61.9004 138.5 138.4z" />
+ <glyph glyph-name="cc-discover" unicode="&#xf1f2;" horiz-adv-x="576"
+d="M520.4 251.9c0 -8.40039 -5.5 -12.8008 -15.8008 -12.8008h-4.69922v24.9004h4.89941c10.1006 0 15.6006 -4.2002 15.6006 -12.0996zM528 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h480z
+M483.9 277.1v-82h16v32.8008h2.19922l22.2002 -32.8008h19.6006l-25.8008 34.4004c12.1006 2.5 18.7002 10.5996 18.7002 23.2002c0 28.5 -30.2998 24.3994 -52.8994 24.3994zM428 277v-82h45.2998v13.7998h-29.2998v22.2002h28.2998v13.7998h-28.2998v18.2002h29.2998v14
+h-45.2998zM359.3 277h-17.5l35 -84.2002h8.60059l35.5 84.2002h-17.5l-22.2002 -55.2002zM303.4 280c-24.6006 0 -44.6006 -19.9004 -44.6006 -44.5996c0 -24.6006 19.9004 -44.6006 44.6006 -44.6006c24.5996 0 44.5996 19.9004 44.5996 44.6006
+c0 24.5996 -19.9004 44.5996 -44.5996 44.5996zM254.1 273.9c-30.1992 15 -63.2998 -6.80078 -63.2998 -38c0 -32.5 33.6006 -52.5 63.2998 -38.2002v19c-19.2998 -19.2998 -46.7998 -5.7998 -46.7998 19.2002c0 23.6992 26.7002 39.0996 46.7998 19v19zM156.9 207.6
+c-7.60059 0 -13.8008 3.7002 -17.5 10.8008l-10.3008 -9.90039c17.8008 -26.0996 56.6006 -18.2002 56.6006 11.2998c0 13.1006 -5.40039 19 -23.6006 25.6006c-9.59961 3.39941 -12.2998 5.89941 -12.2998 10.2998c0 8.7002 14.5 14.0996 24.9004 2.5l8.39941 10.7998
+c-19.0996 17.0996 -49.6992 8.90039 -49.6992 -14.2998c0 -11.2998 5.19922 -17.2002 20.1992 -22.7002c25.7002 -9.09961 14.7002 -24.4004 3.30078 -24.4004zM55.4004 195c30.8994 0 44.0996 22.4004 44.0996 40.9004c0 24.0996 -18 41.0996 -44.0996 41.0996h-23.4004
+v-82h23.4004zM122.9 195v82h-16v-82h16zM544 15v145c-33.2998 -20.7998 -226.4 -124.4 -416 -160h401c8.2002 0 15 6.7998 15 15zM74.0996 256.4c5.7002 -5 8.90039 -12.6006 8.90039 -20.5c0 -7.90039 -3.2002 -15.5 -8.90039 -20.7002
+c-4.89941 -4.40039 -11.5996 -6.40039 -21.8994 -6.40039h-4.2002v54.2002h4.2002c10.2998 0 16.7002 -1.7002 21.8994 -6.59961z" />
+ <glyph glyph-name="cc-amex" unicode="&#xf1f3;" horiz-adv-x="576"
+d="M325.1 280.2c0.100586 -8 -4.2998 -15.7002 -11.6992 -18.7002c9.5 -3.2998 11 -9.2002 11 -18.4004v-13.5h-16.6006c-0.299805 14.8008 3.60059 25.1006 -14.7998 25.1006h-18v-25.1006h-16.4004v69.3008l39.1006 -0.300781c13.2998 0 27.3994 -2 27.3994 -18.3994z
+M295.7 268.9c5.7002 0 11 1.2998 11 7.89941c0 6.40039 -5.60059 7.40039 -10.7002 7.40039h-21v-15.2998h20.7002zM279 179.4c15.5996 0 27.9004 -5.40039 27.9004 -22.7002c0 -27.9004 -30.4004 -23.2998 -49.3008 -23.2998l-0.0996094 -23.3008h-32.2002l-20.3994 23
+l-21.3008 -23h-65.3994l0.0996094 69.3008h66.5l20.5 -22.8008l21 22.8008h52.7002zM175.2 124.7l19 20.2002l-17.9004 20.1992h-41.7002v-12.5h36.3008v-14.0996h-36.3008v-13.7998h40.6006zM241 116.5v55.5l-25.2998 -27.4004zM278.8 147.5
+c5.90039 0 10.5 2.7998 10.5 9.2002c0 6.09961 -4.59961 8.39941 -10.2002 8.39941h-21.5v-17.5996h21.2002zM247.2 284.2h-38.9004v-12.5h37.7998v-14.1006h-37.7998v-13.7998h38.9004v-14.2998h-55.5v69.2998h55.5v-14.5996zM576 192.6h-0.200195h0.200195zM381.4 160.7
+c-0.100586 -7.60059 -4.2002 -15.2998 -11.9004 -18.4004c9.2002 -3.2998 11 -9.5 11 -18.3994l-0.0996094 -13.8008h-16.6006l0.100586 11.5c0 11.8008 -3.80078 13.8008 -14.8008 13.8008h-17.5996l-0.0996094 -25.3008h-16.6006l0.100586 69.3008h39.3994
+c13 0 27.1006 -2.30078 27.1006 -18.7002zM352.2 149.5c5.59961 0 11 1.2998 11 8.2002c0 6.39941 -5.60059 7.39941 -10.7002 7.39941h-21v-15.5996h20.7002zM179.4 229.5h-16.8008v54.2002l-24 -54.2002h-14.5996l-24 54.2002v-54.2002h-33.7998l-6.40039 15.2998h-34.5
+l-6.39941 -15.2998h-17.9004l29.7002 69.2998h24.5l28.0996 -65.7002v65.7002h27.1006l21.6992 -47l19.7002 47h27.6006v-69.2998zM31.2002 259.2h22.7002l-11.5 27.5996zM508.6 100.3c34.8008 0 54.8008 -2.2002 67.5 6.10059v-90.4004c0 -26.5 -21.5 -48 -48 -48h-480.1
+c-26.5 0 -48 21.5 -48 48v203.7h26.5996c4.2002 10.0996 2.2002 5.2998 6.40039 15.2998h19.2002c4.2002 -10 2.2002 -5.2002 6.39941 -15.2998h52.9004v11.3994c2.2002 -5 1.09961 -2.5 5.09961 -11.3994h29.5c2.40039 5.5 2.60059 5.7998 5.10059 11.3994v-11.3994h135.5
+v25.0996c6.39941 0 8 0.100586 9.7998 -0.200195c0 0 -0.200195 -10.8994 0.0996094 -24.7998h66.5v8.90039c7.40039 -5.90039 17.4004 -8.90039 29.7002 -8.90039h26.7998c4.2002 10.1006 2.2002 5.2998 6.40039 15.2998h19c6.5 -15 0.200195 -0.5 6.59961 -15.2998
+h52.8008v21.9004c11.7998 -19.7002 7.7998 -12.9004 13.1992 -21.9004h41.6006v92h-39.9004v-18.3994c-12.2002 20.1992 -6.2998 10.3994 -11.2002 18.3994h-43.2998v-20.5996c-6.2002 14.5996 -4.59961 10.7998 -8.7998 20.5996h-32.4004
+c-0.399414 0 -2.2998 -0.200195 -2.2998 0.299805h-27.5996c-12.7998 0 -23.1006 -3.19922 -30.7002 -9.2998v9.2998h-39.9004v-5.2998c-10.7998 6.10059 -20.6992 5.10059 -64.3994 5.2998c-0.100586 0 -11.6006 0.100586 -11.6006 0h-103
+c-2.5 -6.09961 -6.7998 -16.3994 -12.5996 -30c-2.7998 6 -11 23.8008 -13.9004 30h-46v-21.0996c-7.39941 17.4004 -4.69922 11 -9 21.0996h-39.6992c-3.40039 -7.89941 -13.7002 -32 -23.1006 -53.8994v109.8c0 26.5 21.5 48 48 48h480c26.5 0 48 -21.5 48 -48v-175.4
+c-37.7002 0.200195 -44 0.900391 -54.2998 -5v5c-45.2998 0 -53.5 1.7002 -64.9004 -5.19922v5.19922h-78.1992v-5.09961c-11.4004 6.5 -21.4004 5.09961 -75.7002 5.09961v-5.59961c-6.2998 3.7002 -14.5 5.59961 -24.2998 5.59961h-58
+c-3.5 -3.7998 -12.5 -13.6992 -15.7002 -17.1992c-12.7002 14.0996 -10.5 11.5996 -15.5 17.1992h-83.1006v-92.2998h82c3.30078 3.5 12.9004 13.9004 16.1006 17.4004c12.7002 -14.2998 10.2998 -11.7002 15.3994 -17.4004h48.9004
+c0 14.7002 0.0996094 8.2998 0.0996094 23c11.5 -0.200195 24.3008 0.200195 34.3008 6.2002c0 -13.9004 -0.100586 -17.0996 -0.100586 -29.2002h39.6006c0 18.5 0.0996094 7.40039 0.0996094 25.2998c6.2002 0 7.7002 0 9.40039 -0.0996094
+c0.0996094 -1.2998 0 0 0 -25.2002c152.8 0 145.899 -1.09961 156.699 4.5v-4.5zM544.9 164.8c-4.60059 0 -9.2002 -0.700195 -9.2002 -6.5c0 -12.2002 28.7998 0.299805 39.2998 -13.5v-25.7998c-4.90039 -7.09961 -14.0996 -8.90039 -22.5 -8.90039h-32l0.0996094 14.8008
+h32c4.10059 0 8.40039 1.2998 8.40039 6.39941c0 14.6006 -42.7002 -5.59961 -42.7002 27.4004c0 14.0996 11 20.7002 23.7998 20.7002h32.9004v-14.6006h-30.0996zM487.9 125c4.09961 0 8.69922 1 8.7998 6.40039c0 14.8994 -42.7002 -5.60059 -42.7002 27.3994
+c0 14.1006 10.7002 20.7002 23.5 20.7002h33.2002v-14.5996h-30.4004c-4.2998 0 -9.2002 -0.800781 -9.2002 -6.40039c0 -15.0996 42.9004 6.90039 42.9004 -26.2998c0 -16.4004 -11.4004 -22 -26.2002 -22h-32.2002l0.100586 14.7998h32.2002zM445.7 165.1h-38.5v-12.5
+h37.7998v-14.0996h-37.9004v-13.7998l38.6006 -0.299805l-0.100586 -14.3008h-55.1992l0.0996094 69.3008h55.2002v-14.3008zM389.4 273.2c0.299805 0.299805 1.69922 1 7.2998 1c1 0 2 -0.100586 3.09961 -0.100586l-7.2998 -16.8994
+c-2.2998 0 -3.2002 0.399414 -3.40039 0.5c-0.199219 0.200195 -1.09961 1.89941 -1.09961 7.89941c0 5.40039 1.09961 7.40039 1.40039 7.60059zM409.8 283.7h-0.0996094h0.0996094zM393.6 298.9h16.1006v-15.2002c-17.4004 0.299805 -33.1006 4.09961 -33.1006 -19.7002
+c0 -11.7998 2.80078 -19.9004 16.1006 -19.9004h7.39941l23.5 54.5h24.8008l27.8994 -65.3994v65.3994h25.2998l29.1006 -48.0996v48.0996h16.8994v-69h-23.5996l-31.2002 51.9004v-51.9004h-33.7002l-6.59961 15.3008h-34.2998l-6.40039 -15.3008h-19.2002
+c-22.7998 0 -33 11.8008 -33 34c0 23.3008 10.5 35.3008 34 35.3008zM435.7 286.8l-11.6006 -27.5996h22.8008zM334.6 298.8h16.9004v-69.2998h-16.9004v69.2998z" />
+ <glyph glyph-name="cc-paypal" unicode="&#xf1f4;" horiz-adv-x="576"
+d="M186.3 189.8c0 -12.2002 -9.7002 -21.5 -22 -21.5c-9.2002 0 -16 5.2002 -16 15c0 12.2002 9.5 22 21.7002 22c9.2998 0 16.2998 -5.7002 16.2998 -15.5zM80.5 238.3c11.2998 0 19.7998 -1.5 17.5 -14.8994c-2 -12.7002 -10.5 -14.2002 -21.5 -14.2002l-8.2002 -0.299805
+l4.2998 26.6992c0.200195 1.7002 1.7002 2.7002 3.2002 2.7002h4.7002zM364.5 238.3c8.5 0 18 -0.5 18.0996 -11.0996c0 -15 -9 -18 -22 -18l-8 -0.299805l4.2002 26.6992c0.200195 1.7002 1.40039 2.7002 3.2002 2.7002h4.5zM576 368v-352c0 -26.5 -21.5 -48 -48 -48h-480
+c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h480c26.5 0 48 -21.5 48 -48zM128.3 232.6c0 21 -16.2002 28 -34.7002 28h-40c-2.5 0 -5 -2 -5.19922 -4.69922l-16.4004 -102.101c-0.299805 -2 1.2002 -4 3.2002 -4h19c2.7002 0 5.2002 2.90039 5.5 5.7002l4.5 26.5996
+c1 7.2002 13.2002 4.7002 18 4.7002c28.5996 0 46.0996 17 46.0996 45.7998zM212.5 223.8h-19c-3.7998 0 -4 -5.5 -4.2002 -8.2002c-5.7998 8.5 -14.2002 10 -23.7002 10c-24.5 0 -43.1992 -21.5 -43.1992 -45.1992c0 -19.5 12.1992 -32.2002 31.6992 -32.2002
+c9 0 20.2002 4.89941 26.5 11.8994c-0.5 -1.5 -1 -4.69922 -1 -6.19922c0 -2.30078 1 -4 3.2002 -4h17.2002c2.7002 0 5 2.89941 5.5 5.69922l10.2002 64.3008c0.299805 1.89941 -1.2002 3.89941 -3.2002 3.89941zM253 125.9l63.7002 92.5996c0.5 0.5 0.5 1 0.5 1.7002
+c0 1.7002 -1.5 3.5 -3.2002 3.5h-19.2002c-1.7002 0 -3.5 -1 -4.5 -2.5l-26.5 -39l-11 37.5c-0.799805 2.2002 -3 4 -5.5 4h-18.7002c-1.69922 0 -3.19922 -1.7998 -3.19922 -3.5c0 -1.2002 19.5 -56.7998 21.1992 -62.1006c-2.69922 -3.7998 -20.5 -28.5996 -20.5 -31.5996
+c0 -1.7998 1.5 -3.2002 3.2002 -3.2002h19.2002c1.7998 0.100586 3.5 1.10059 4.5 2.60059zM412.3 232.6c0 21 -16.2002 28 -34.7002 28h-39.6992c-2.7002 0 -5.2002 -2 -5.5 -4.69922l-16.2002 -102c-0.200195 -2 1.2998 -4 3.2002 -4h20.5c2 0 3.5 1.5 4 3.19922l4.5 29
+c1 7.2002 13.1992 4.7002 18 4.7002c28.3994 0 45.8994 17 45.8994 45.7998zM496.5 223.8h-19c-3.7998 0 -4 -5.5 -4.2998 -8.2002c-5.5 8.5 -14 10 -23.7002 10c-24.5 0 -43.2002 -21.5 -43.2002 -45.1992c0 -19.5 12.2002 -32.2002 31.7002 -32.2002
+c9.2998 0 20.5 4.89941 26.5 11.8994c-0.299805 -1.5 -1 -4.69922 -1 -6.19922c0 -2.30078 1 -4 3.2002 -4h17.2998c2.7002 0 5 2.89941 5.5 5.69922l10.2002 64.3008c0.299805 1.89941 -1.2002 3.89941 -3.2002 3.89941zM544 257.1c0 2 -1.5 3.5 -3.2002 3.5h-18.5
+c-1.5 0 -3 -1.19922 -3.2002 -2.69922l-16.1992 -104l-0.300781 -0.5c0 -1.80078 1.5 -3.5 3.5 -3.5h16.5c2.5 0 5 2.89941 5.2002 5.69922l16.2002 101.2v0.299805zM454 205.3c9.2998 0 16.2998 -5.7002 16.2002 -15.5c0 -12.2998 -9.7002 -21.5 -21.7002 -21.5
+c-9.2002 0 -16.2002 5.2998 -16.2002 15c0 12.2998 9.5 22 21.7002 22z" />
+ <glyph glyph-name="cc-stripe" unicode="&#xf1f5;" horiz-adv-x="576"
+d="M492.4 227.2c8.69922 0 18 -6.7002 18 -22.7002h-36.7002c0 16 9.7998 22.7002 18.7002 22.7002zM375 224.6c12.9004 0.100586 21.9004 -14.5 21.9004 -33.0996c0 -19.0996 -8.80078 -33.4004 -21.9004 -33.4004c-8.2998 0 -13.2998 3 -16.7998 6.7002l-0.200195 52.7998
+c3.7002 4.10059 8.7998 7 17 7zM528 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h480zM122.2 166.9c0 42.2998 -54.2998 34.6992 -54.2998 50.6992c0 5.5 4.59961 7.7002 12.0996 7.7002
+c10.7998 0 24.5 -3.2998 35.2998 -9.09961v33.3994c-11.7998 4.7002 -23.5 6.5 -35.2998 6.5c-28.7998 0 -48 -15 -48 -40.1992c0 -39.3008 54 -32.9004 54 -49.9004c0 -6.59961 -5.7002 -8.7002 -13.5996 -8.7002c-11.8008 0 -26.9004 4.90039 -38.9004 11.2998v-33.8994
+c13.2002 -5.7002 26.5996 -8.10059 38.7998 -8.10059c29.6006 0.200195 49.9004 14.7002 49.9004 40.3008zM191 223.5v30.2998h-26.9004v30.7998l-34.6992 -7.39941l-0.200195 -113.9c0 -21 15.7998 -36.5 36.8994 -36.5c11.6006 0 20.2002 2.10059 24.9004 4.7002v28.9004
+c-4.5 -1.80078 -27 -8.30078 -27 12.5996v50.5h27zM265 221.1v32.7002h-0.0996094c-4.7002 1.7002 -21.3008 4.7998 -29.6006 -10.5l-2.2002 10.5h-30.6992v-124.5h35.5v84.4004c8.39941 11 22.5996 8.89941 27.0996 7.39941zM309.1 129.3v124.5h-35.6992v-124.5h35.6992z
+M309.1 272.2v28.8994l-35.6992 -7.59961v-28.9004zM383.2 126.7c25.3994 0.0996094 48.5996 20.5 48.5996 65.5996c0 41.2998 -23.5 63.7998 -48.3994 63.7998c-13.9004 0 -22.9004 -6.59961 -27.8008 -11.0996l-1.7998 8.7998h-31.2998v-165.8l35.5 7.5l0.0996094 40.2002
+c5.10059 -3.7002 12.7002 -9 25.1006 -9zM543.6 178.2c0.100586 2 0.400391 9.39941 0.400391 12.8994c0 36.4004 -17.5996 65.1006 -51.2998 65.1006c-33.7998 0 -54.2998 -28.7002 -54.2998 -64.9004c0 -42.7998 24.1992 -64.5 58.7998 -64.5
+c17 0 29.7002 3.90039 39.3994 9.2002v28.5996c-9.69922 -4.89941 -20.7998 -7.89941 -34.8994 -7.89941c-13.7998 0 -26 4.89941 -27.6006 21.5h69.5z" />
+ <glyph glyph-name="lastfm" unicode="&#xf202;" horiz-adv-x="512"
+d="M225.8 80.9004c0 0 -31.7002 -31.1006 -97.8994 -31.1006c-82.2002 0 -127.9 48.1006 -127.9 137.2c0 92.7002 45.7002 147.2 131.8 147.2c117.7 0 129.3 -66.2002 161.3 -163c14 -42.7998 38.7002 -73.9004 97.9004 -73.9004c39.9004 0 61 8.7998 61 30.5
+c0 31.9004 -34.9004 35.1006 -79.7998 45.7002c-48.6006 11.7002 -68 36.9004 -68 76.7998c0 64 51.5996 83.9004 104.399 83.9004c59.8008 0 96.2002 -21.7002 100.9 -74.5l-58.5996 -7c-2.30078 25.2002 -17.5 35.7998 -45.7002 35.7998
+c-25.7998 0 -41.6006 -11.7998 -41.6006 -31.7002c0 -17.5996 7.60059 -28.0996 33.4004 -34c52.2998 -11.5 115 -19.2002 115 -92.0996c0 -58.6006 -49.2998 -80.9004 -122 -80.9004c-101.4 0 -136.6 45.7002 -155.4 102.601
+c-26.0996 81.5996 -34.3994 134.899 -100.899 134.899c-35.7002 0 -72.1006 -25.7998 -72.1006 -97.8994c0 -56.3008 28.7002 -91.5 69.2002 -91.5c45.7002 0 76.2002 34 76.2002 34z" />
+ <glyph glyph-name="lastfm-square" unicode="&#xf203;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM307.8 103.1c45.4004 0 76.2002 13.9004 76.1006 50.6006c0 45.5 -39.1006 50.3994 -71.8008 57.5
+c-16.0996 3.7002 -20.8994 10.2998 -20.8994 21.2998c0 12.5 9.89941 19.7998 26 19.7998c17.5996 0 27.0996 -6.59961 28.5996 -22.3994l36.7002 4.39941c-2.90039 33 -25.5996 46.6006 -63 46.6006c-32.9004 0 -65.2002 -12.4004 -65.2002 -52.4004
+c0 -24.9004 12.1006 -40.7002 42.5 -48c28.1006 -6.59961 49.9004 -8.7002 49.9004 -28.5996c0 -13.6006 -13.2002 -19.1006 -38.1006 -19.1006c-37 0 -52.3994 19.4004 -61.1992 46.2002c-20 60.5 -27.3008 101.9 -100.801 101.9c-53.8994 0 -82.5 -34.1006 -82.5 -92
+c0 -55.7002 28.6006 -85.8008 79.9004 -85.8008c41.4004 0 61.2002 19.4004 61.2002 19.4004l-11.7002 31.9004s-19 -21.3008 -47.5996 -21.3008c-25.3008 0 -43.3008 22 -43.3008 57.2002c0 45.1006 22.7002 61.2002 45.1006 61.2002c41.5 0 46.7002 -33.2998 63 -84.2998
+c11.7002 -35.5 33.7002 -64.1006 97.0996 -64.1006z" />
+ <glyph glyph-name="ioxhost" unicode="&#xf208;" horiz-adv-x="640"
+d="M616 288c13.2998 0 24 -10.7002 24 -24c0 -13.2002 -10.7002 -24 -24 -24h-52.7002c3.10059 -15.5 4.7002 -31.5996 4.7002 -48c0 -137 -111 -248 -248 -248c-102.9 0 -191.2 62.7002 -228.7 152h-67.2998c-13.2998 0 -24 10.7002 -24 24c0 13.2002 10.7002 24 24 24
+h52.7002c-3.10059 15.5 -4.7002 31.5996 -4.7002 48c0 137 111 248 248 248c102.9 0 191.2 -62.7002 228.7 -152h67.2998zM520 192c0 16.5996 -2 32.5996 -5.7998 48h-298.2c-13.2998 0 -24 10.7002 -24 24c0 13.2002 10.7002 24 24 24h279.5
+c-33.9004 62 -99.7998 104 -175.5 104c-110.5 0 -200 -89.5 -200 -200c0 -16.5996 2 -32.5996 5.7998 -48h298.2c13.2998 0 24 -10.7002 24 -24c0 -13.2002 -10.7002 -24 -24 -24h-279.5c33.9004 -62 99.7998 -104 175.5 -104c110.5 0 200 89.5 200 200zM216 216h208
+c13.2998 0 24 -10.7002 24 -24c0 -13.2002 -10.7002 -24 -24 -24h-208c-13.2998 0 -24 10.7002 -24 24c0 13.2002 10.7002 24 24 24z" />
+ <glyph glyph-name="angellist" unicode="&#xf209;"
+d="M347.1 232.6c48 -11.6992 54.9004 -50.5996 54.9004 -93.6992c0 -114.301 -73.4004 -202.9 -191.4 -202.9c-96.1992 0 -164.6 76.4004 -164.5 148.6c0 37.1006 14.2002 61.7002 51.1006 71.7002c-3.10059 8.2998 -8 20.7998 -8 29.7002
+c0 23.5 24.8994 52.5996 48.2998 52.5996c6.90039 0 13.7002 -2 20 -4.2998c-12.4004 35.2002 -46.5996 126.7 -46.5996 162c0 28.7998 14.5996 51.7002 45.6992 51.7002c40 0 85.4004 -144 95.1006 -172.5c12.5 31.4004 52.5 163.1 97.0996 163.1
+c28 0 43.7002 -22.2998 43.7002 -48.8994c0 -30.2002 -33.7002 -124.5 -45.4004 -157.101zM311.7 340l-33.1006 -93.7002l34 -6c8.5 23.4004 47.1006 128.9 47.1006 148c0 7.10059 -2.2998 16 -10.9004 16c-16 0 -33.0996 -52 -37.0996 -64.2998zM142.3 399.7
+c0 -29.1006 34.6006 -120 45.5 -148.8c7.7002 4.39941 19.7998 2.69922 35.4004 1.39941l-34.6006 100.3c-31.7998 92.8008 -46.2998 59 -46.2998 47.1006zM140 204c-7.7002 0 -20.2998 -13.4004 -20.4004 -21.0996c0 -20.8008 56 -97.7002 76.9004 -97.7002
+c5.7002 0 10.5996 6.2998 10.5996 11.3994c0 12.8008 -37.7998 107.4 -67.0996 107.4zM324.3 17.7002c55.2998 61.5 49.1006 158.6 31 174.7c-24 21.0996 -106 29.0996 -138.3 29.0996c-17.2998 0 -17.4004 -6.40039 -17.4004 -13.0996
+c0 -43.7002 92.9004 -39.7002 120.601 -39.7002c11.2002 0 15.7998 -9.90039 16.8994 -21.1006c-7.39941 -7.39941 -17.6992 -11.6992 -27.3994 -15.3994c-9.40039 -3.40039 -19.1006 -7.10059 -27.1006 -13.1006c-22 -16 -43.6992 -43.3994 -43.6992 -71.6992
+c0 -17.7002 10.5996 -32.9004 10.5996 -50.3008c0 -0.299805 -2 -6.5 -2 -7.39941c-32.5996 2.2998 -40.5996 34.5996 -41.7002 61.7002c-3.39941 -0.900391 -8 -0.600586 -11.7002 -0.600586c5.10059 -17.7998 -11.8994 -42 -38 -42
+c-37.7998 0 -88 57.2002 -58.2998 86.9004c28.7002 -35.9004 35 -51.4004 51.1006 -51.4004c4 0 11.6992 3.40039 11.6992 8.2998c0 12.8008 -42.8994 73.1006 -54.2998 73.1006c-16.7998 0 -37.7002 -24.9004 -20.5996 -68.2998
+c22.5996 -55.7002 69.5 -88.3008 128.899 -88.3008c43.4004 0 80.6006 16.6006 109.7 48.6006zM225.7 143.7c3.2002 -8.2998 6.59961 -16.6006 9.39941 -25.1006c6.30078 7.10059 12.9004 13.7002 20.3008 19.1006c-10 2 -20 2.89941 -29.7002 6z" />
+ <glyph glyph-name="buysellads" unicode="&#xf20d;"
+d="M224 297.3l42.9004 -160.7h-85.8008zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM382.7 42.7002l-94.5 298.7h-128.4l-94.5 -298.7h90.7002l111.7 91.5996l24.2002 -91.5996h90.7998z
+" />
+ <glyph glyph-name="connectdevelop" unicode="&#xf20e;" horiz-adv-x="576"
+d="M550.5 207c6.69629 -1.33887 11.7861 -7.5 11.7881 -14.7324c0 -7.5 -5.3584 -13.6602 -12.3223 -15l-54.9111 -95.3574c0.536133 -1.60742 0.804688 -3.21387 0.804688 -4.82129c0 -7.23145 -5.09082 -13.3926 -12.0547 -14.7314l-51.6963 -90.2686
+c0.535156 -1.33887 0.802734 -2.67773 0.802734 -4.28516c0 -8.30371 -6.69727 -15.2676 -15.2686 -15.2676c-4.28516 0 -8.30371 1.875 -10.9814 4.82129h-107.144c-2.67871 -3.21484 -6.96484 -5.35742 -11.5176 -5.35742s-8.83887 2.14258 -11.5166 5.35645h-106.875
+c-2.67969 -3.21484 -6.69727 -5.35742 -11.5186 -5.35742c-8.30371 0 -15.2676 6.69727 -15.2676 15.2676c0 1.875 0.535156 3.75 1.07031 5.35742l-51.6963 89.7324c-6.96484 1.33887 -12.0547 7.5 -12.0547 14.7314c0 1.875 0.268555 3.21387 0.804688 4.82129
+l-55.1797 95.3574c-6.96484 1.60742 -12.0537 7.76855 -12.0537 15c0 7.5 5.3584 13.6611 12.5898 15l53.3047 92.1436c0 0.536133 -0.268555 1.07227 -0.268555 1.60645c0 6.16113 3.75098 11.251 9.10742 13.6611l55.9824 97.2334
+c-0.536133 1.33887 -1.07129 3.21387 -1.07129 4.82129c0 8.57129 6.96484 15.2676 15.2676 15.2676c4.82227 0 8.83887 -2.14258 11.7861 -5.625h106.071c2.67871 3.48242 6.69629 5.625 11.5176 5.625s8.83887 -2.14258 11.5176 -5.62402h106.606
+c2.94727 3.48242 6.96484 5.625 11.7861 5.625c8.30371 0 15.2676 -6.69727 15.2676 -15.2676c0 -1.60742 -0.535156 -3.21484 -1.07031 -4.82129l55.4463 -95.8936c8.03613 -0.267578 14.7324 -6.96484 14.7324 -15.001c0 -2.67871 -0.803711 -5.08984 -1.875 -7.23145z
+M153.535 -2.73242v75.8037h-43.6602zM153.535 81.1074v50.624l-44.999 -47.4102c0.535156 -1.07227 1.07129 -2.14355 1.33887 -3.21387h43.6602zM153.535 143.518l0.000976562 92.9463l-50.0889 51.9648c-2.41113 -1.60645 -5.08887 -2.41113 -7.76855 -2.67871
+l-51.9648 -90c0.268555 -1.07227 0.268555 -2.14258 0.268555 -3.48242c0 -1.33887 0 -2.67871 -0.535156 -4.01758l55.7129 -96.4287c1.33887 -0.267578 2.67871 -1.07129 4.01758 -1.60742zM153.535 245.84v72.0527l-43.9277 -15.8037
+c0 -0.267578 0.267578 -0.803711 0.267578 -1.07227c0 -2.94531 -0.803711 -5.62402 -2.14258 -7.7666zM153.535 326.465v59.7324l-43.6602 -75.5361zM480.054 287.357l-0.267578 0.267578l-98.0361 -101.518l63.75 -67.2324l35.3584 167.143zM291.75 92.8926
+l-11.25 -11.7852h22.7676zM291.482 104.143l79.2852 82.2324l-83.0352 87.5889l-79.5537 -84.375zM296.839 98.25l16.875 -17.1426h124.02l5.8916 28.125l-67.5 71.25zM410.411 403.607l-117.053 -124.019l83.0342 -87.5889l97.5 101.25
+c-1.33984 2.14258 -2.14258 4.82129 -2.14258 7.7666v0.536133l-57.8574 100.714c-1.33984 0.268555 -2.41016 0.804688 -3.48145 1.34082zM401.304 405.75h-4.28711l-166.339 -60l57.0547 -60.2676zM277.821 405.75h-103.929l50.8936 -53.5713l148.393 53.5713h-75
+c-2.67871 -2.67773 -6.16016 -4.28516 -10.1787 -4.28516s-7.50098 1.60742 -10.1787 4.28516zM161.572 400.125v-70.7148l54.9111 19.8213l-51.1611 53.8398c-0.730469 -0.25293 -1.93066 -0.613281 -2.67969 -0.804688zM161.572 320.839v-83.3037l40.9814 -42.0527
+l79.5537 84.1064l-59.7324 63.2139zM161.572 228.161v-76.0723l36.4277 38.3037zM161.572 140.303v-59.1953h107.678l17.1426 17.6777l-82.7676 85.9814zM168.536 -21.75h1.33887l91.6074 94.8213h-99.9102v-89.7324l1.07031 -1.60645
+c2.41113 -0.804688 4.28613 -1.875 5.89355 -3.48242zM298.447 -21.75h104.194l-91.6064 94.8213h-38.3037l-91.6074 -94.8213h96.4287c2.68066 2.41016 6.42871 4.28516 10.4473 4.28516s7.76758 -1.875 10.4473 -4.28516zM418.447 -9.96387l17.4121 83.0361h-114.376
+l89.1953 -91.875c1.07227 0.536133 2.14355 1.07031 3.48242 1.33887zM431.303 12.2676l34.8223 60.8037h-21.9639zM466.125 81.1074c0.267578 1.07129 0.803711 2.14258 1.33887 2.94531l-17.1426 18.2139l-4.55371 -21.1592h20.3574zM532.286 188.518
+c-0.268555 1.33984 -0.536133 2.41113 -0.536133 3.75c0 1.60742 0.536133 2.94629 0.802734 4.28516l-45.8027 79.2861l-34.5537 -163.928l20.625 -21.9639c1.33887 0.802734 2.67871 1.33887 4.01758 1.87402z" />
+ <glyph glyph-name="dashcube" unicode="&#xf210;"
+d="M326.6 344l102.2 104v-427c0 -50.5 -40.0996 -85 -91.2002 -85h-227.199c-51.1006 0 -91.2002 34.5 -91.2002 85v229.5c0 50.2002 40.0996 93.5 91.2002 93.5h216.199zM153.9 31.5v-0.0996094h223.8l-51.1006 52.2998v123.5c0 17.7002 -14.2998 32.5 -32 32.5h-140.699
+c-17.7002 0 -32.4004 -14.7998 -32.4004 -32.5v-142.9c0 -17.7002 14.7002 -32.7998 32.4004 -32.7998z" />
+ <glyph glyph-name="forumbee" unicode="&#xf211;"
+d="M5.7998 138.3c-3.7998 17 -5.7998 34.2002 -5.7998 51.4004c0 123.3 99.7998 223.3 223.1 223.3c16.6006 0 33.3008 -2 49.3008 -5.5c-123.4 -47 -220.5 -145.5 -266.601 -269.2zM398.7 327.5c-151.101 -44 -269.2 -164.4 -312.3 -315.7
+c-17.2002 13.4004 -32.7002 30.9004 -45.2002 49c43.3994 149.9 160.1 267.7 309.7 312c18.0996 -12.5996 34.0996 -27.7998 47.7998 -45.2998zM414.5 74.7998c13.0996 -35.2998 24.2002 -73.2998 33.5 -109.8c-36.0996 9.2998 -72 20.5 -107 33.5996
+c-25.7002 -16 -54.5996 -26.8994 -84.5996 -31.2998c42.5996 79.7002 108.199 147.4 187.6 190.3c-4.09961 -29.0996 -14.2998 -57.6992 -29.5 -82.7998zM444.2 220.3c-113.7 -46.7002 -204.2 -139.399 -250.5 -253.5c-19.6006 2.7002 -38.5 7.60059 -56.6006 15.2002
+c44.9004 138.5 153.4 249.3 291.301 295.1c7.89941 -18.0996 13.1992 -37.2998 15.7998 -56.7998z" />
+ <glyph glyph-name="leanpub" unicode="&#xf212;" horiz-adv-x="576"
+d="M386.539 336.515l15.0957 -248.955l-10.9785 0.275391c-36.2324 0.824219 -71.6406 -8.7832 -102.657 -27.9971c-31.0156 19.2139 -66.4238 27.9971 -102.657 27.9971c-45.5635 0 -82.0693 -10.7051 -123.516 -27.7227l31.291 258.288
+c28.5459 11.8027 61.4834 18.1143 92.2256 18.1143c41.1729 0 73.8359 -13.1748 102.657 -42.5439c27.7227 28.2715 59.0127 41.7217 98.5391 42.5439zM569.07 0c-25.5264 0 -47.4854 5.21484 -70.542 15.6445c-34.3105 15.6455 -69.9932 24.9785 -107.871 24.9785
+c-38.9775 0 -74.9346 -12.9014 -102.657 -40.623c-27.7227 27.7227 -63.6797 40.623 -102.657 40.623c-37.8779 0 -73.5605 -9.33301 -107.871 -24.9785c-22.2324 -9.88086 -44.7402 -15.6445 -69.1689 -15.6445h-1.37305l42.5449 349.141
+c39.251 22.2334 87.0117 34.8594 132.301 34.8594c37.0547 0 75.209 -7.68457 106.225 -29.0947c31.0156 21.4102 69.1699 29.0947 106.225 29.0947c45.2891 0 93.0498 -12.626 132.301 -34.8594zM525.702 44.7412l-34.0361 280.246
+c-30.7422 13.999 -67.248 21.4102 -101.009 21.4102c-38.4287 0 -74.3848 -12.0771 -102.657 -38.7021c-28.2725 26.625 -64.2275 38.7021 -102.657 38.7021c-33.7607 0 -70.2666 -7.41113 -101.009 -21.4102l-34.0361 -280.246
+c47.2109 19.4863 82.8945 33.4854 135.045 33.4854c37.6045 0 70.8174 -9.60547 102.657 -29.6436c31.8398 20.0381 65.0518 29.6436 102.657 29.6436c52.1504 0 87.834 -13.999 135.045 -33.4854z" />
+ <glyph glyph-name="sellsy" unicode="&#xf213;" horiz-adv-x="640"
+d="M539.71 210.692c55.1572 -13.4834 94.0742 -63.124 94.0732 -119.509c0 -68.0264 -55.4639 -123.184 -123.185 -123.184h-381.197c-67.7217 0 -123.186 55.1572 -123.185 123.185c0 47.4961 27.8848 91.0098 70.7852 111.234
+c-2.14453 7.35449 -3.06543 15.0146 -3.06543 22.3691c0 46.2705 37.6914 83.9609 83.9629 83.9609c20.2227 0 39.835 -7.35449 55.1562 -20.5303c18.3867 74.7695 85.8008 127.781 163.021 127.781c92.542 0 167.924 -75.3818 167.924 -167.924
+c0 -12.5635 -1.22559 -25.127 -4.29004 -37.3838zM199.88 46.4463v110.928c0 8.27344 -7.04688 15.3213 -15.3213 15.3213h-30.9482c-8.27344 0 -15.3213 -7.04785 -15.3213 -15.3213v-110.928c0 -8.27344 7.04688 -15.3213 15.3213 -15.3213h30.9482
+c8.27344 0 15.3213 7.04688 15.3213 15.3213zM289.357 46.4463v131.458c0 8.27246 -7.04883 15.3203 -15.3223 15.3203h-30.9492c-8.27246 0 -15.3213 -7.04688 -15.3213 -15.3203v-131.458c0 -8.27344 7.04688 -15.3213 15.3213 -15.3213h30.9492
+c8.27344 0 15.3223 7.04688 15.3223 15.3213zM378.834 46.4463v162.714c0 8.27246 -7.04688 15.3213 -15.3213 15.3213h-30.9482c-8.27441 0 -15.3223 -7.04785 -15.3223 -15.3213v-162.714c0 -8.27344 7.04785 -15.3213 15.3223 -15.3213h30.9482
+c8.27441 0 15.3213 7.04688 15.3213 15.3213zM465.861 46.4463v224.612c0 8.58008 -7.04785 15.6279 -15.3223 15.6279h-28.4971c-8.27441 0 -15.3213 -7.04883 -15.3213 -15.6279v-224.612c0 -8.27344 7.04688 -15.3213 15.3213 -15.3213h28.4971
+c8.27441 0 15.3223 7.04688 15.3223 15.3213z" />
+ <glyph glyph-name="shirtsinbulk" unicode="&#xf214;"
+d="M100 37.7002l4.40039 9.89941l30.5996 -13.3994l-4.40039 -9.90039zM139.4 20.2002l4.39941 9.89941l30.6006 -13.3994l-4.40039 -9.90039zM311.5 34.2002l30.5996 13.3994l4.40039 -9.89941l-30.5996 -13.4004zM179.1 3l4.40039 9.59961l30.2998 -13.3994
+l-4.39941 -9.90039zM60.4004 55.2002l4.39941 9.89941l30.6006 -13.6992l-4.40039 -9.60059zM271.8 16.7002l30.6006 13.3994l4.39941 -9.89941l-30.5996 -13.4004zM232.5 -0.799805l30.5996 13.3994l4.40039 -9.59961l-30.5996 -13.7002zM350.9 51.4004l30.5996 13.6992
+l4.40039 -9.89941l-30.6006 -13.4004zM170 401.4v-10.5h-33.5v10.5h33.5zM122.8 401.4l-0.0996094 -10.5h-33.5v10.5h33.5996zM75.5 401.4l0.0996094 -10.5h-33.2998v10.5h33.2002zM217 401.4v-10.5h-33.2002v10.5h33.2002zM311.5 401.4v-10.5h-33.5v10.5h33.5zM358.8 401.4
+v-10.5h-33.5v10.5h33.5zM264.2 401.4v-10.5h-33.2002v10.5h33.2002zM405.7 401.4v-10.5h-33.2998v10.5h33.2998zM52.7998 96.9004v-33.5h-10.7998v33.5h10.7998zM122.8 312.8l-0.0996094 -10.5h-33.5v10.5h33.5996zM52.7998 302.2v-23h-10.7998v33.5h33.5996v-10.5h-22.7998
+zM221.7 73.5996c-50.2002 0 -91.2998 40.8008 -91.2998 91.3008c0 50.1992 41.0996 91.2998 91.2998 91.2998c50.5 0 91.2998 -41.1006 91.2998 -91.2998c0 -50.5 -40.7998 -91.3008 -91.2998 -91.3008zM173.5 184.7c0 -44.2998 77.5996 -11.9004 77.5996 -38
+c0 -13.1006 -24 -14.2998 -32.6992 -14.2998c-12.3008 0 -29.8008 2.69922 -35.9004 14.8994h-0.900391l-9 -18.3994c14.8008 -9.30078 29.1006 -12.2002 47.2002 -12.2002c19.5 0 51 5.7998 51 31.2002c0 48.0996 -78.5 16.2998 -78.5 37.8994
+c0 13.1006 20.7998 14.9004 29.7998 14.9004c10.8008 0 29.2002 -3.2002 35.6006 -13.1006h0.899414l8.80078 16.9004c-15.1006 6.2002 -27.4004 12 -44.3008 12c-20.0996 0 -49.5996 -6.40039 -49.5996 -31.7998zM52.7998 269.6v-33.5996h-10.7998v33.5996h10.7998z
+M395.2 63.4004v33.5h10.7998v-33.5h-10.7998zM52.7998 140.1v-33.5h-10.7998v33.5h10.7998zM0 444.3h448v-406l-226.3 -98.5996l-221.7 98.5996v406zM418.8 57.2002h0.100586v270.1h-389.7v-270.1l192.8 -85.7002zM418.8 356.5h0.100586v58.5996h-389.7v-58.5996h389.6z
+M52.7998 226.4v-33.5h-10.7998v33.5h10.7998zM52.7998 183.2v-33.5h-10.7998v33.5h10.7998zM170 312.8v-10.5h-33.5v10.5h33.5zM395.2 149.7v33.5h10.7998v-33.5h-10.7998zM395.2 192.9v33.5h10.7998v-33.5h-10.7998zM217 312.8v-10.5h-33.2002v10.5h33.2002zM395.2 236
+v33.5h10.7998v-33.5h-10.7998zM395.2 106.5v33.5h10.7998v-33.5h-10.7998zM264.2 312.8v-10.5h-33.2002v10.5h33.2002zM311.5 312.8v-10.5h-33.5v10.5h33.5zM395.2 279.2l0.0996094 23h-22.7998v10.5h33.5v-33.5h-10.7998zM358.8 312.8v-10.5h-33.5v10.5h33.5z" />
+ <glyph glyph-name="simplybuilt" unicode="&#xf215;" horiz-adv-x="512"
+d="M481.2 384c14.7002 0 26.5 -11.7998 26.7002 -26.2998v-331.4c0 -14.5 -11.8008 -26.2998 -26.6006 -26.2998h-450.399c-14.8008 0 -26.6006 11.7998 -26.6006 26.2998v331.4c0 14.5 11.7998 26.2998 26.4004 26.2998h106c14.5996 0 26.5996 -11.7998 26.5996 -26.2998
+v-39.6006h185.3v39.6006c0 14.5 12.1006 26.2998 26.6006 26.2998h106zM149.8 92.2002c36.9004 0 66.6006 29.7002 66.6006 66.3994c0 36.9004 -29.7002 66.6006 -66.6006 66.6006c-36.7002 0 -66.3994 -29.7002 -66.3994 -66.6006
+c0 -36.6992 29.7998 -66.3994 66.3994 -66.3994zM362.2 92.2002c36.5996 0 66.3994 29.7002 66.3994 66.5996c0 36.7002 -29.7998 66.4004 -66.3994 66.4004c-36.9004 0 -66.6006 -29.7998 -66.6006 -66.4004c0 -36.8994 29.7002 -66.5996 66.6006 -66.5996z" />
+ <glyph glyph-name="skyatlas" unicode="&#xf216;" horiz-adv-x="640"
+d="M640 118.7c0 -65.9004 -52.5 -114.4 -117.5 -114.4c-165.9 0 -196.6 249.7 -359.7 249.7c-146.899 0 -147.1 -212.2 5.60059 -212.2c42.5 0 90.8994 17.7998 125.3 42.5c5.59961 4.10059 16.8994 16.2998 22.7998 16.2998s10.9004 -5 10.9004 -10.8994
+c0 -7.7998 -13.1006 -19.1006 -18.7002 -24.1006c-40.9004 -35.5996 -100.3 -61.1992 -154.7 -61.1992c-83.4004 -0.100586 -154 59 -154 144.899c0 85.9004 67.5 149.101 152.8 149.101c185.3 0 222.5 -245.9 361.9 -245.9c99.8994 0 94.7998 139.7 3.39941 139.7
+c-17.5 0 -35 -11.6006 -46.8994 -11.6006c-8.40039 0 -15.9004 7.2002 -15.9004 15.6006c0 11.5996 5.2998 23.7002 5.2998 36.2998c0 66.5996 -50.8994 114.7 -116.899 114.7c-53.1006 0 -80 -36.9004 -88.7998 -36.9004c-6.2002 0 -11.2002 5 -11.2002 11.2002
+c0 5.59961 4.09961 10.2998 7.7998 14.4004c25.2998 28.7998 64.7002 43.6992 102.8 43.6992c79.4004 0 139.101 -58.3994 139.101 -137.8c0 -6.89941 -0.300781 -13.7002 -1.2002 -20.5996c11.8994 3.09961 24.0996 4.7002 35.8994 4.7002
+c60.7002 0 111.9 -45.3008 111.9 -107.2z" />
+ <glyph glyph-name="pinterest-p" unicode="&#xf231;" horiz-adv-x="384"
+d="M204 441.5c94.2002 0 180 -64.7998 180 -164.1c0 -93.3008 -47.7002 -196.801 -153.9 -196.801c-25.1992 0 -57 12.6006 -69.2998 36c-22.7998 -90.2998 -21 -103.8 -71.3994 -172.8c-5.2002 -1.89941 -3.5 -2.2998 -6.90039 1.5c-1.7998 18.9004 -4.5 37.5 -4.5 56.4004
+c0 61.2002 28.2002 149.7 42 209.1c-7.5 15.2998 -9.59961 33.9004 -9.59961 50.7002c0 80 93.8994 92 93.8994 25.7998c0 -39 -26.3994 -75.5996 -26.3994 -113.399c0 -25.8008 21.2998 -43.8008 46.1992 -43.8008c69 0 90.3008 99.6006 90.3008 152.7
+c0 71.1006 -50.4004 109.8 -118.5 109.8c-79.2002 0 -140.4 -57 -140.4 -137.399c0 -38.7002 23.7002 -58.5 23.7002 -67.7998c0 -7.80078 -5.7002 -35.4004 -15.6006 -35.4004c-24 0 -63.5996 40 -63.5996 110.4c0 110.699 101.4 179.1 204 179.1z" />
+ <glyph glyph-name="whatsapp" unicode="&#xf232;"
+d="M380.9 350.9c41.8994 -42 67.0996 -97.7002 67.0996 -157c0 -122.4 -101.8 -222 -224.1 -222h-0.100586c-37.2002 0 -73.7002 9.2998 -106.1 27l-117.7 -30.9004l31.5 115c-19.4004 33.7002 -29.5996 71.9004 -29.5996 111c0 122.4 99.5996 222 222 222
+c59.2998 0 115.1 -23.0996 157 -65.0996zM223.9 9.2998c101.699 0 186.6 82.7998 186.6 184.601c0.0996094 49.2998 -21.2998 95.5996 -56.0996 130.5c-34.8008 34.8994 -81.1006 54.0996 -130.4 54.0996c-101.8 0 -184.6 -82.7998 -184.6 -184.5
+c0 -34.9004 9.69922 -68.7998 28.1992 -98.2002l4.40039 -7l-18.5996 -68.0996l69.7998 18.2998l6.7002 -4c28.2998 -16.7998 60.7998 -25.7002 94 -25.7002zM325.1 147.5c5.5 -2.7002 9.2002 -4.09961 10.5 -6.59961c1.40039 -2.30078 1.40039 -13.4004 -3.19922 -26.4004
+c-4.60059 -13 -26.7002 -24.7998 -37.4004 -26.4004c-17.5996 -2.59961 -31.4004 -1.2998 -66.5996 13.9004c-55.7002 24.0996 -92 80.0996 -94.8008 83.7998c-2.69922 3.7002 -22.5996 30.1006 -22.5996 57.4004s14.2998 40.7002 19.4004 46.2998
+c5.09961 5.5 11.0996 6.90039 14.7998 6.90039s7.39941 0 10.5996 -0.200195c3.40039 -0.200195 8 1.2998 12.5 -9.5c4.60059 -11.1006 15.7002 -38.4004 17.1006 -41.2002c1.39941 -2.7998 2.2998 -6 0.5 -9.7002c-10.6006 -21.2002 -22 -20.5 -16.3008 -30.2998
+c21.5 -36.9004 42.9004 -49.7002 75.5 -66c5.5 -2.7998 8.80078 -2.2998 12 1.40039c3.30078 3.7998 13.9004 16.1992 17.6006 21.7998c3.7002 5.59961 7.39941 4.7002 12.5 2.7998c5.09961 -1.7998 32.3994 -15.2002 37.8994 -18z" />
+ <glyph glyph-name="viacoin" unicode="&#xf237;" horiz-adv-x="384"
+d="M384 416l-48 -112h48v-48h-68.5l-13.7998 -32h82.2998v-48h-102.8l-89.2002 -208l-89.2002 208h-102.8v48h82.2998l-13.7998 32h-68.5v48h48l-48 112h64l80.7998 -192h94.5l80.7002 192h64zM192 112l27 64h-54z" />
+ <glyph glyph-name="medium" unicode="&#xf23a;"
+d="M0 416h448v-448h-448v448zM372.2 309.9v5h-83.2002l-59.2998 -147.9l-67.4004 148h-87.2998v-5.09961l28.0996 -33.9004c2.80078 -2.5 4.2002 -6.09961 3.80078 -9.7998v-133c0.799805 -4.7998 -0.700195 -9.7002 -4.10059 -13.2002l-31.5996 -38.2998v-5.10059h89.7998
+v5.10059l-31.5996 38.2998c-3.40039 3.5 -5.10059 8.40039 -4.40039 13.2002v115l78.7002 -171.601h9.09961l67.6006 171.601v-136.9c0 -3.59961 0 -4.2998 -2.40039 -6.7002l-24.2998 -23.5996v-4.90039h118v5.10059l-23.5 23
+c-2.10059 1.5 -3.10059 4.09961 -2.7002 6.7002v169.3c-0.400391 2.5 0.599609 5.09961 2.7002 6.7002z" />
+ <glyph glyph-name="y-combinator" unicode="&#xf23b;"
+d="M448 416v-448h-448v448h448zM236 160.5l77.5 145.5h-32.7002l-45.7998 -91c-4.7002 -9.2998 -9 -18.2998 -12.7998 -26.7998l-12.2002 26.7998l-45.2002 91h-35l76.7002 -143.8v-94.5h29.5v92.7998z" />
+ <glyph glyph-name="optin-monster" unicode="&#xf23c;" horiz-adv-x="576"
+d="M572.6 26.5996c1 -3.5 1.90039 -7 1.7002 -10.6992c0.799805 -31.6006 -44.2998 -64 -73.5 -65.1006c-17.2998 -0.799805 -34.5996 8.40039 -42.7002 23.5c-113.5 -4.09961 -227 -4.89941 -340.199 0c-8.40039 -15.0996 -25.7002 -24 -43 -23.5
+c-28.9004 1.10059 -74 33.5 -73.5 65.1006c0.299805 3.7998 0.799805 7.2998 1.89941 10.7998c-5.59961 9.39941 -4.7998 15.2998 5.40039 11.5996c3.2998 5.2002 7 9.5 11.0996 13.7998c-2.5 10.9004 1.2998 14.1006 11.1006 9.2002c4.5 3.2998 10 6.5 15.8994 9.2002
+c0 15.7998 11.7998 11.2002 17.2998 5.7002c12.5 1.7998 20.2002 -0.700195 26.8008 -5.7002v19.7002c-12.9004 0 -40.6006 11.3994 -45.9004 36.2002c-5 20.7998 2.59961 38.0996 25.0996 47.5996c0.800781 5.90039 8.10059 14 14.9004 15.9004
+c7.59961 1.89941 12.5 -4.60059 14.0996 -10.3008c7.40039 0 17.8008 -1.5 21.1006 -8.09961c5.39941 0.5 11.0996 1.40039 16.5 1.90039c-2.40039 1.89941 -5.10059 3.5 -8.10059 4.59961c-5.09961 8.90039 -13.7998 11.0996 -24.5996 11.5996
+c0 0.800781 0 1.60059 0.299805 2.7002c-19.7998 0.5 -44.0996 5.60059 -54.8994 17.7998c-21.3008 23.6006 -15.9004 83.6006 12.1992 103.5c8.40039 5.7002 21.6006 0.800781 22.7002 -9.69922c2.40039 -20.6006 0.400391 -26.8008 26.2002 -25.9004
+c8.09961 7.7998 16.7998 14.5996 26.5 20c-14.9004 1.2998 -28.9004 -1.59961 -43.7998 -3.7998c12.7002 12.5 23.8994 25.3994 56.7002 42.3994c23.5 11.9004 50 20.8008 76.1992 23.2002c-18.5996 7.90039 -40 11.9004 -59.6992 16.5
+c76.5 16.2002 174.6 22.1006 244.199 -37.5996c18.1006 -15.4004 32.4004 -36.2002 42.7002 -60c39.7998 -4.90039 36.4004 5.5 38.6006 25.0996c1.09961 10.2998 14.2998 15.4004 22.6992 9.5c14.9004 -10.5 22.2002 -30.7998 24.6006 -48.0996
+c2.2002 -17.7998 0.299805 -41.2998 -12.4004 -55.1006c-10.7998 -12.1992 -34.2998 -17.5996 -53.7998 -18.0996v-2.7998c-11.0996 -0.200195 -20.2998 -2.40039 -25.7002 -11.6006c-3 -1.09961 -5.7002 -2.69922 -8.39941 -4.59961
+c5.69922 -0.5 11.3994 -1.40039 16.7998 -1.90039c1.89941 5.60059 12.5996 8.40039 21.0996 8.40039c1.7002 5.40039 6.7998 11.9004 14.1006 10.2998c7.2998 -1.59961 14.0996 -10 14.8994 -15.8994c10.7998 -4.40039 22.1006 -12.2002 25.1006 -25.7002
+c1.89941 -8.10059 1.69922 -15.1006 0.299805 -21.9004c-5.7002 -25.2002 -33.2998 -36.2002 -45.9004 -36.2002c0 -6.69922 0 -13.1992 -0.299805 -19.6992c8.09961 6 16.4004 7.19922 26.7998 5.69922c6 5.90039 17.6006 9.40039 17.6006 -5.69922
+c5.59961 -2.7002 11.2998 -6 15.8994 -9.2002c10.1006 5 13.7002 0.5 10.7998 -9.2002c4.10059 -4.2998 8.10059 -8.90039 11.1006 -13.7998c10.0996 3.59961 11 -2.10059 5.39941 -11.6006zM498.8 280.6c17.2998 -6.69922 26.2002 -22.0996 30.2998 -35.6992
+c1.10059 10.5996 -2.69922 39.5 -13.7998 51.0996c-7.2998 7.2998 -14.0996 5.09961 -14.0996 -0.799805c0 -6.2002 -1.2998 -11.6006 -2.40039 -14.6006zM494.2 273.9c-3.2002 -3.30078 -9.2002 -4.90039 -14.1006 -5.7002c13 -15.7002 17 -41.7002 12.7002 -63
+c10.7998 2.2002 20.5 6.2998 26.2002 12.2002c1.90039 2.19922 3.7998 4.89941 4.90039 7.59961c-1.10059 21.2998 -10.2002 42.7002 -29.7002 48.9004zM470.1 267.1c-3.69922 0 -8.09961 0 -11.7998 0.300781c7.5 -20.6006 12.4004 -42.7002 14.2998 -64.6006
+c3.5 0 7.5 0.299805 11.6006 0.799805c5.89941 24.3008 -0.299805 51.6006 -14.1006 63.5zM47.5 245c4.09961 13.5 13 28.9004 30.2998 35.7002c-1 3 -2.39941 8.39941 -2.39941 14.5996c0 5.90039 -7.10059 8.10059 -14.1006 0.799805
+c-11.3994 -11.5996 -14.8994 -40.5996 -13.7998 -51.0996zM57.2002 217.4c5.7002 -6.2002 15.3994 -10 26.2002 -12.2002c-4.30078 21.3994 -0.300781 47.2998 12.6992 63c-4.89941 0.799805 -10.8994 2.5 -14.0996 5.7002
+c-19.4004 -6.2002 -28.2998 -27.6006 -29.7002 -48.9004c1.40039 -2.7002 3 -5.40039 4.90039 -7.59961zM105.1 202.8c2.40039 22.2002 9.10059 43.7998 19.8008 63.5c-5.2002 -1.09961 -10 -3 -14.9004 -4.89941l-12.2002 -5.10059v0.299805
+c-7.2998 -14.0996 -10 -34.3994 -5.39941 -53c4.59961 -0.5 8.59961 -0.799805 12.6992 -0.799805zM289.1 365.5c-41.8994 0 -76.1992 -34.0996 -76.1992 -75.9004c0 -42.1992 34.2998 -76.1992 76.1992 -76.1992c41.9004 0 76.2002 34 76.2002 76.1992
+c0 41.9004 -34.2998 75.9004 -76.2002 75.9004zM404.7 191.2c-12.9004 0.799805 -26.2002 0.799805 -39.5 1.09961c10 -50.5996 3.2998 -64.7002 16.5 -58.0996c16 8.09961 22.7002 39.2002 23 57zM350.7 192.8c-18.9004 0.299805 -38.1006 0.299805 -57 0v0.299805
+c-0.299805 -5.19922 0.200195 -38.0996 4.2998 -41.0996c11.0996 -5.40039 39.5 -4.59961 51.0996 -1.09961c5.40039 1.59961 2.40039 37 1.60059 41.8994zM278.3 139c4.60059 2.5 2.40039 45.4004 1.2998 53.7002v0.299805
+c-19.3994 -0.299805 -38.5996 -0.299805 -57.7998 -0.799805c-1.89941 -9.2002 -4.59961 -48.9004 1.90039 -51.6006c13 -5.69922 41.5996 -5.09961 54.5996 -1.59961zM171.8 190.1c-5.39941 -19.6992 0.299805 -45.0996 22.2002 -54.8994
+c5.40039 -2.5 8.59961 -2.5 9.7002 4.2998c1.89941 8.7002 2.5 36.7998 4.89941 52.2002c-12.1992 -0.200195 -24.5996 -0.799805 -36.7998 -1.60059zM136.4 158.8c2.39941 -3.7002 1.59961 -9.09961 -8 -12.5c43.7998 -47 92.6992 -85.7002 155.899 -106.5
+c67.5 19.2002 115.601 60 163.2 107c-11.0996 4.2998 -7.7002 10.2998 -7.2998 11.6006c-8.90039 0.799805 -17.9004 1.89941 -26.5 2.69922c-9.5 -33 -36 -52.8994 -46.7998 -31.5996c-2.7002 5.2002 -3.5 11.7002 -4.60059 16.7998
+c-3.7998 -8.39941 -13.2998 -8.09961 -24.5996 -8.89941c-13.2002 -1.10059 -31.6006 -1.30078 -44 3c-3 -12.9004 -11.1006 -12.9004 -26.7998 -14.3008c-14.1006 -1.39941 -48.7002 -4.09961 -54.9004 10.8008c-1.09961 -28.7002 -35.0996 -10 -45.0996 7
+c-3.2002 5.69922 -5.40039 11.3994 -7 17.5996c-7.80078 -0.799805 -15.7002 -1.59961 -23.5 -2.7002zM114.8 -13.7002c0.5 2.5 0.799805 5.2002 0.799805 8.2002c-5.69922 23.2002 -18.5996 49.7002 -33.5 54c-22.3994 6.7002 -68.8994 -23.5 -66.1992 -54.5996
+c12.6992 -19.5 40 -35.7002 59.1992 -36.5c17.8008 -0.800781 35.9004 11.0996 39.7002 28.8994zM106.1 52.2998c9 -16 15.5 -33.2998 16.7002 -51.8994c33.5 19.3994 69.1006 35.6992 105.9 47c-38.7002 20.5 -68.1006 47.7998 -97.2998 77
+c-2.10059 -1.30078 -5.10059 -2.40039 -7.80078 -3.5c-1.59961 -4.90039 8.7002 -5.30078 5.40039 -12.4004c-2.09961 -4.09961 -8.59961 -7.59961 -15.0996 -9.2002c-2.10059 -2.7002 -5.10059 -4.89941 -7.80078 -6.5h-0.299805
+c-0.200195 -13.5 -0.200195 -27 0.299805 -40.5zM443.7 -12.2998c-36.7998 21.2998 -74.1006 41.2998 -115.601 53c-13.7998 -6.2002 -27.8994 -11.2998 -42.1992 -15.4004c-2.10059 -0.799805 -2.10059 -0.799805 -4.30078 0
+c-11.8994 3.7002 -23.2998 8.10059 -34.8994 13.2002c-40.2002 -11.5996 -77.2998 -29.2002 -112.4 -50.7998h-0.299805v-0.299805c0.299805 0 0.299805 0 0.299805 0.299805c103.2 -4.10059 206.4 -3.5 309.4 0zM454.2 0.0996094c1 14.7002 7.2002 35.8008 16.5 51.7002
+l-0.299805 -0.299805c0.5 13.7002 0.799805 27.5 0.799805 41.2998c-3 1.7002 -5.7002 4.10059 -8.10059 6.7998c-6.5 1.30078 -12.8994 5.10059 -15.0996 8.90039c-1.90039 4.09961 1.2998 7.59961 5.90039 10.2998c-0.200195 0.5 -0.5 1.60059 -0.5 2.40039
+c-3 0.799805 -5.40039 1.7998 -7.60059 3.2002c-31.5996 -29.4004 -65.3994 -56.7002 -103.5 -76.7002c38.9004 -11.7002 76 -28.1006 111.9 -47.6006zM560.1 -6.09961c3 31.0996 -43.5 61.3994 -66.1992 54.5c-14.6006 -4.30078 -27.8008 -30.8008 -33.5 -54
+c0 -23.8008 21.1992 -37.9004 40.5 -37c19.1992 0.799805 46.5 17 59.1992 36.5zM372.9 372.8c-35.7002 39.2002 -81.4004 47.7998 -126 23.5c25.1992 56.2002 122.199 48.6006 126 -23.5zM74.7998 40.9004c14.9004 1.89941 24.6006 -19.2002 18.6006 -30.8008
+c-4.80078 -9.69922 -23.7002 -24.0996 -35.9004 -27.2998c-16.5 -4.59961 -32.2002 3.2998 -32.2002 14.9004c0 17.7998 33.7998 41.5996 49.5 43.2002zM290.7 217.1c-30.9004 0 -57.6006 25.7002 -50.2998 59.8008c13.1992 -20.7002 46.5 -12 46.5 11.2998
+c0 10 -7 18.5996 -16.5 21.5996c31.6992 13.7998 72.1992 -8.2002 72.1992 -44.2998c0 -26.7998 -23.2998 -48.4004 -51.8994 -48.4004zM68 -26.0996c-0.5 8.39941 20.2998 23.5 29.2002 25.0996c8.59961 1.59961 12.7002 -11.4004 9.7002 -18.4004
+c-2.7002 -5.69922 -10.5 -13.5 -17.3008 -16.1992c-9.39941 -3.2002 -21.0996 3 -21.5996 9.5zM501.2 40.9004c15.7002 -1.60059 49.5 -25.4004 49.5 -43.2002c0 -11.7002 -15.7002 -19.5 -32.2002 -14.9004c-12.0996 3.2002 -31.2998 17.6006 -36.2002 27.2998
+c-5.7002 11.6006 4 32.7002 18.9004 30.8008zM478.8 -1c8.90039 -1.59961 30 -16.7002 29.1006 -25.0996c-0.200195 -6.5 -12.1006 -12.7002 -21.3008 -9.5c-7 2.69922 -14.8994 10.5 -17.2998 16.1992c-2.89941 7.10059 1.10059 20 9.5 18.4004z" />
+ <glyph glyph-name="opencart" unicode="&#xf23d;" horiz-adv-x="639"
+d="M423.3 7.2998c0 -25.2998 -20.2998 -45.5996 -45.5996 -45.5996s-45.7998 20.2998 -45.7998 45.5996s20.5996 45.7998 45.7998 45.7998c25.3994 0 45.5996 -20.5 45.5996 -45.7998zM169.4 53.0996c25.2998 0 45.7998 -20.5 45.7998 -45.7998
+s-20.5 -45.5996 -45.7998 -45.5996c-25.3008 0 -45.6006 20.3994 -45.6006 45.5996s20.2998 45.7998 45.6006 45.7998zM461.1 323.1c302.2 0 169.5 -67.1992 -17.1992 -233.899c59.1992 102.8 262.5 193.899 -70.8008 188.899c-319.8 -4.69922 -338.699 92.5 -373.1 144.2
+c81.9004 -86.3994 158.9 -99.2002 461.1 -99.2002z" />
+ <glyph glyph-name="expeditedssl" unicode="&#xf23e;" horiz-adv-x="496"
+d="M248 404.6c117.4 0 212.6 -95.1992 212.6 -212.6s-95.1992 -212.6 -212.6 -212.6s-212.6 95.1992 -212.6 212.6s95.1992 212.6 212.6 212.6zM150.6 271.7h-0.199219v-26.6006c0 -5 3.89941 -8.89941 8.89941 -8.89941h17.7002c5 0 8.90039 3.89941 8.90039 8.89941
+v26.6006c0 82.0996 124 82.0996 124 0v-26.6006c0 -5 3.89941 -8.89941 8.89941 -8.89941h17.7002c5 0 8.90039 3.89941 8.90039 8.89941v26.6006c0 53.7002 -43.7002 97.3994 -97.4004 97.3994s-97.4004 -43.6992 -97.4004 -97.3994zM389.7 68v141.7
+c0 9.7002 -8 17.7002 -17.7002 17.7002h-248c-9.7002 0 -17.7002 -8 -17.7002 -17.7002v-141.7c0 -9.7002 8 -17.7002 17.7002 -17.7002h248c9.7002 0 17.7002 8 17.7002 17.7002zM141.7 205.3v-132.899c0 -2.5 -1.90039 -4.40039 -4.40039 -4.40039h-8.89941
+c-2.5 0 -4.40039 1.90039 -4.40039 4.40039v132.899c0 2.5 1.90039 4.40039 4.40039 4.40039h8.89941c2.5 0 4.40039 -1.90039 4.40039 -4.40039zM283.4 156.6c0 -13 -7.2002 -24.3994 -17.7002 -30.3994v-31.6006c0 -5 -3.90039 -8.89941 -8.90039 -8.89941h-17.7002
+c-5 0 -8.89941 3.89941 -8.89941 8.89941v31.6006c-10.5 6.09961 -17.7002 17.3994 -17.7002 30.3994c0 19.7002 15.7998 35.4004 35.4004 35.4004c19.5996 0 35.5 -15.7998 35.5 -35.4004zM248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248
+s111 248 248 248zM248 -38.2998c127 0 230.3 103.3 230.3 230.3s-103.3 230.3 -230.3 230.3s-230.3 -103.3 -230.3 -230.3s103.3 -230.3 230.3 -230.3z" />
+ <glyph glyph-name="cc-jcb" unicode="&#xf24b;" horiz-adv-x="576"
+d="M431.5 203.7v32.2998c41.2002 0 38.5 -0.200195 38.5 -0.200195c7.2998 -1.2998 13.2998 -7.2998 13.2998 -16c0 -8.7998 -6 -14.5 -13.2998 -15.7998c-1.2002 -0.400391 -3.2998 -0.299805 -38.5 -0.299805zM474.3 183.5c7.5 -1.5 13.5 -8.2998 13.5 -17
+c0 -9 -6 -15.5 -13.5 -17c-2.7998 -0.700195 -3.2002 -0.5 -42.7998 -0.5v35c39.5 0 40 0.200195 42.7998 -0.5zM576 368v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h480c26.5 0 48 -21.5 48 -48zM182 255.7h-57
+c0 -67.1006 10.7002 -109.7 -35.7998 -109.7c-19.5 0 -38.7998 5.7002 -57.2002 14.7998v-28c30 -8.2998 68 -8.2998 68 -8.2998c97.9004 0 82 47.7002 82 131.2zM360.5 251.2c-63.4004 16 -165 14.8994 -165 -59.2998c0 -77.1006 108.2 -73.6006 165 -59.2002v28.2998
+c-47.5996 -24.7002 -107.5 -22 -107.5 31s59.7998 55.5996 107.5 31.2002v28zM544 161.5c0 18.5 -16.5 30.5 -38 32v0.799805c19.5 2.7002 30.2998 15.5 30.2998 30.2002c0 19 -15.7002 30 -37 31c0 0 6.2998 0.299805 -120.3 0.299805v-127.5h122.7
+c24.2998 -0.0996094 42.2998 12.9004 42.2998 33.2002z" />
+ <glyph glyph-name="cc-diners-club" unicode="&#xf24c;" horiz-adv-x="576"
+d="M239.7 368.1c97.2002 0 175.8 -78.5996 175.8 -175.8c0 -96.8994 -78.5996 -175.8 -175.8 -175.8c-96.9004 0 -175.8 78.9004 -175.8 175.8c0 97.2002 78.8994 175.8 175.8 175.8zM199.8 88.5v207.9c-41.7002 -16.2002 -71.3994 -56.7002 -71.3994 -104.101
+c0 -47.3994 29.6992 -87.8994 71.3994 -103.8zM279.6 88.2002c41.7002 16.2002 71.4004 56.7002 71.4004 104.1c0 47.4004 -29.7002 87.9004 -71.4004 104.101v-208.2zM528 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48v352
+c0 26.5 21.5 48 48 48h480zM329.7 0c105 0 200.7 85.5 200.7 190.2c0 114.6 -95.7002 193.8 -200.7 193.8h-90.2998c-106.2 0 -193.801 -79.2002 -193.801 -193.8c0 -104.7 87.6006 -190.2 193.801 -190.2h90.2998z" />
+ <glyph glyph-name="creative-commons" unicode="&#xf25e;" horiz-adv-x="496"
+d="M245.83 233.13l-33.2197 -17.2803c-9.43066 19.5801 -25.2402 19.9307 -27.46 19.9307c-22.1309 0 -33.2207 -14.6104 -33.2207 -43.8398c0 -23.5703 9.20996 -43.8408 33.2207 -43.8408c14.4697 0 24.6494 7.09082 30.5693 21.2607l30.5498 -15.5
+c-6.16992 -11.5107 -25.6895 -38.9805 -65.0996 -38.9805c-22.5996 0 -73.96 10.3203 -73.96 77.0498c0 58.6904 43 77.0605 72.6299 77.0605c30.7197 0.00976562 52.7002 -11.9502 65.9902 -35.8604zM388.88 233.13l-32.7803 -17.2803
+c-9.5 19.7705 -25.7197 19.9307 -27.8994 19.9307c-22.1406 0 -33.2197 -14.6104 -33.2197 -43.8398c0 -23.5508 9.22949 -43.8408 33.2197 -43.8408c14.4502 0 24.6494 7.09082 30.54 21.2607l31 -15.5c-2.10059 -3.75 -21.3906 -38.9805 -65.0898 -38.9805
+c-22.6904 0 -73.96 9.87012 -73.96 77.0498c0 58.6699 42.9697 77.0605 72.6299 77.0605c30.71 0.00976562 52.5801 -11.9502 65.5596 -35.8604zM247.56 439.95c141.82 0 248.44 -110.13 248.44 -248c0 -147.13 -118.51 -248 -248.44 -248
+c-133.96 0 -247.56 109.51 -247.56 248c0 132.939 104.74 248 247.56 248zM248.43 -10.8604c103.16 0 202.83 81.1299 202.84 202.82c0 113.8 -90.2891 203.26 -202.819 203.26c-118.29 0 -203.72 -97.8496 -203.72 -203.27c0 -109.771 91.1592 -202.811 203.699 -202.811z
+" />
+ <glyph glyph-name="gg" unicode="&#xf260;" horiz-adv-x="512"
+d="M179.2 217.6l102.399 -102.399l-102.399 -102.4l-179.2 179.2l179.2 179.2l44.7998 -44.7998l-25.5996 -25.6006l-19.2002 19.2002l-128 -128l128 -128l51.5 51.5l-77.1006 76.5zM332.8 371.2l179.2 -179.2l-179.2 -179.2l-44.7998 44.7998l25.5996 25.6006
+l19.2002 -19.2002l128 128l-128 128l-51.5 -51.5l77.1006 -76.5l-25.6006 -25.5996l-102.399 102.399z" />
+ <glyph glyph-name="gg-circle" unicode="&#xf261;" horiz-adv-x="514"
+d="M257 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM207.5 65.2002l75 75.2002l-77.2002 77.1992l-24.3994 -24.3994l53.0996 -52.9004l-26.5996 -26.5996l-77.2002 77.2002l77.2002 77.1992l11.0996 -11.0996l24.2002 24.2002
+l-35.2002 35.3994l-125.7 -125.699zM306.5 67.4004l125.7 125.6l-125.7 125.7l-75 -75l77.2002 -77.2002l24.3994 24.4004l-53.0996 52.8994l26.5 26.5l77.2002 -77.2002l-77.2002 -77.1992l-11.0996 11.0996l-24.1006 -24.4004z" />
+ <glyph glyph-name="tripadvisor" unicode="&#xf262;" horiz-adv-x="576"
+d="M166.4 167.479c0 -13.2354 -10.7305 -23.9658 -23.9668 -23.9658c-13.2354 0 -23.9658 10.7305 -23.9658 23.9658c0 13.2363 10.7305 23.9668 23.9658 23.9668c13.2363 0 23.9668 -10.7295 23.9668 -23.9668zM431.362 191.435
+c13.2295 0 23.9551 -10.7246 23.9561 -23.9561c0 -13.2305 -10.7266 -23.9551 -23.9561 -23.9551c-13.2314 0 -23.9561 10.7256 -23.9561 23.9551c0 13.2314 10.7256 23.9561 23.9561 23.9561zM520.75 51.9453c-62.667 -49.1045 -153.276 -38.1094 -202.379 24.5586
+l-30.9795 -46.3252l-30.6826 45.9395c-48.2773 -60.3906 -135.622 -71.8916 -197.885 -26.0547c-64.0586 47.1572 -77.7588 137.315 -30.6016 201.373c-5.05762 17.1221 -17.7021 42.7236 -28.2227 57.1475l90.2861 0.0498047
+c48.0039 29.8701 132.851 54.1123 189.389 54.1123c2.11914 0 5.55762 -0.0371094 7.67578 -0.0820312c1.72363 0.0302734 4.52246 0.0556641 6.24609 0.0556641c55.5518 0 138.851 -23.9258 185.936 -53.4043l96.2178 -0.0742188
+c-10.6191 -14.5371 -23.3213 -40.3643 -28.3516 -57.6494c46.793 -62.7471 34.9639 -151.37 -26.6484 -199.646zM259.366 166.239c-0.00683594 63.5566 -51.5352 115.075 -115.092 115.067c-63.5576 -0.00683594 -115.074 -51.5342 -115.068 -115.092
+c0.00683594 -63.5566 51.5352 -115.075 115.092 -115.067c63.5127 0.0742188 114.984 51.5381 115.068 115.052v0.0400391zM287.957 176.694c5.43262 73.4395 65.5098 130.884 139.12 133.021c-35.5576 15.374 -95.8555 27.8506 -134.594 27.8506
+c-1.41699 0 -3.7168 -0.0166016 -5.13379 -0.0380859c-0.953125 0.00878906 -2.50098 0.0166016 -3.45508 0.0166016c-39.2324 0 -100.479 -12.2168 -136.709 -27.2695c74.3447 -1.58203 135.3 -59.4248 140.771 -133.581zM539.663 205.461
+c-21.9922 59.6338 -88.1621 90.1484 -147.795 68.1572c-59.6338 -21.9922 -90.1484 -88.1621 -68.1572 -147.795v-0.0322266c22.0381 -59.6074 88.1982 -90.0908 147.827 -68.1133c59.6152 22.0039 90.1133 88.1621 68.125 147.783zM213.624 167.486v-0.115234
+c-0.0566406 -39.3281 -31.9863 -71.1631 -71.3145 -71.1064c-39.3271 0.0576172 -71.1621 31.9863 -71.1055 71.3145s31.9863 71.1631 71.3135 71.1055c39.2598 -0.115234 71.042 -31.9395 71.1064 -71.1982zM189.112 167.486v0.0839844
+c-0.0517578 25.7832 -20.9941 46.6445 -46.7783 46.5938s-46.6445 -20.9941 -46.5938 -46.7773c0.0507812 -25.7842 20.9941 -46.6445 46.7764 -46.5938c25.7266 0.113281 46.5371 20.9678 46.5957 46.6934zM502.535 167.486
+c-0.0205078 -39.3281 -31.918 -71.2422 -71.2471 -71.2217c-39.3291 0.0214844 -71.1943 31.918 -71.1729 71.2471c0.0195312 39.3281 31.918 71.1943 71.2471 71.1729c39.29 -0.0654297 71.1211 -31.9082 71.1729 -71.1982zM478.031 167.494
+c-0.00878906 25.7842 -20.918 46.6787 -46.7021 46.6699s-46.6787 -20.918 -46.6699 -46.7021s20.918 -46.6777 46.7021 -46.6699c25.7646 0.0458984 46.6357 20.9277 46.6699 46.6934v0.00878906z" />
+ <glyph glyph-name="odnoklassniki" unicode="&#xf263;" horiz-adv-x="320"
+d="M275.1 114c-27.3994 -17.4004 -65.0996 -24.2998 -90 -26.9004l20.9004 -20.5996l76.2998 -76.2998c27.9004 -28.6006 -17.5 -73.2998 -45.7002 -45.7002c-19.0996 19.4004 -47.0996 47.4004 -76.2998 76.5996l-76.2998 -76.5
+c-28.2002 -27.5 -73.5996 17.6006 -45.4004 45.7002c19.4004 19.4004 47.1006 47.4004 76.3008 76.2998l20.5996 20.6006c-24.5996 2.59961 -62.9004 9.09961 -90.5996 26.8994c-32.6006 21 -46.9004 33.3008 -34.3008 59c7.40039 14.6006 27.7002 26.9004 54.6006 5.7002
+c0 0 36.2998 -28.8994 94.8994 -28.8994c58.6006 0 94.9004 28.8994 94.9004 28.8994c26.9004 21.1006 47.0996 8.90039 54.5996 -5.7002c12.4004 -25.6992 -1.89941 -38 -34.5 -59.0996zM30.2998 318.3c0 71.7002 58.2998 129.7 129.7 129.7s129.7 -58 129.7 -129.7
+c0 -71.3994 -58.2998 -129.399 -129.7 -129.399s-129.7 58 -129.7 129.399zM96.2998 318.3c0 -35.0996 28.6006 -63.7002 63.7002 -63.7002s63.7002 28.6006 63.7002 63.7002c0 35.4004 -28.6006 64 -63.7002 64s-63.7002 -28.5996 -63.7002 -64z" />
+ <glyph glyph-name="odnoklassniki-square" unicode="&#xf264;"
+d="M184.2 270.9c0 22.0996 17.8994 40 39.7998 40s39.7998 -17.9004 39.7998 -40c0 -22 -17.8994 -39.8008 -39.7998 -39.8008s-39.7998 17.9004 -39.7998 39.8008zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352
+c26.5 0 48 -21.5 48 -48zM142.9 270.9c0 -44.6006 36.3994 -80.9004 81.0996 -80.9004s81.0996 36.2002 81.0996 80.9004c0 44.7998 -36.3994 81.0996 -81.0996 81.0996s-81.0996 -36.2002 -81.0996 -81.0996zM317.4 180.2
+c-4.60059 9.09961 -17.3008 16.7998 -34.1006 3.59961c0 0 -22.7002 -18 -59.2998 -18s-59.2998 18 -59.2998 18c-16.7998 13.2002 -29.5 5.5 -34.1006 -3.59961c-7.89941 -16.1006 1.10059 -23.7002 21.4004 -37c17.2998 -11.1006 41.2002 -15.2002 56.5996 -16.7998
+l-12.8994 -12.9004c-18.2002 -18 -35.5 -35.5 -47.7002 -47.7002c-17.5996 -17.5996 10.7002 -45.7998 28.4004 -28.5996l47.6992 47.8994c18.2002 -18.1992 35.7002 -35.6992 47.7002 -47.8994c17.6006 -17.2002 46 10.7002 28.6006 28.5996l-47.7002 47.7002l-13 12.9004
+c15.5 1.59961 39.0996 5.89941 56.2002 16.7998c20.3994 13.2998 29.2998 21 21.5 37z" />
+ <glyph glyph-name="get-pocket" unicode="&#xf265;"
+d="M407.6 384c22.7002 0 40.4004 -18.2002 40.4004 -40.5996v-135.2c0 -124.7 -99.7998 -224.2 -223.8 -224.2c-124.5 0 -224.2 99.5 -224.2 224.2v135.2c0 22.0996 18.5 40.5996 40.5996 40.5996h367zM245.6 115.5c111.9 107.5 114.801 105.4 114.801 123.2
+c0 16.8994 -13.8008 30.7002 -30.7002 30.7002c-16.9004 0 -14.9004 -2.40039 -105.5 -89.3008c-89.1006 85.5 -88.2002 89.3008 -105.2 89.3008c-16.9004 0 -30.7002 -13.8008 -30.7002 -30.7002c0 -18.1006 1.2002 -14.2998 114.9 -123.2
+c11 -11.0996 30 -11.7998 42.3994 0z" />
+ <glyph glyph-name="wikipedia-w" unicode="&#xf266;" horiz-adv-x="640"
+d="M640 396.8l-0.299805 -12.2002c-28.1006 -0.799805 -45 -15.7998 -55.7998 -40.2998c-25 -57.7998 -103.301 -240 -155.301 -358.6h-13.5996l-81.9004 193.1c-32.5 -63.5996 -68.2998 -130 -99.1992 -193.1c-0.300781 -0.299805 -15 0 -15 0.299805
+c-46.9004 109.7 -96.1006 218.6 -143.101 328.6c-11.3994 26.7002 -49.3994 70 -75.5996 69.7002c0 3.10059 -0.299805 10 -0.299805 14.2002h161.899v-13.9004c-19.2002 -1.09961 -52.7998 -13.2998 -43.2998 -34.1992c21.9004 -49.7002 103.6 -240.301 125.6 -288.601
+c15 29.7002 57.8008 109.2 75.3008 142.8c-13.9004 28.3008 -58.6006 133.9 -72.8008 160c-9.69922 17.8008 -36.0996 19.4004 -55.7998 19.7002v13.9004l142.5 -0.299805v-13.1006c-19.3994 -0.599609 -38.0996 -7.7998 -29.3994 -26.0996
+c18.8994 -40 30.5996 -68.1006 48.0996 -104.7c5.59961 10.7998 34.7002 69.4004 48.0996 100.8c8.90039 20.6006 -3.89941 28.6006 -38.5996 29.4004c0.299805 3.59961 0 10.2998 0.299805 13.5996c44.4004 0.299805 111.101 0.299805 123.101 0.600586v-13.6006
+c-22.5 -0.799805 -45.8008 -12.7998 -58.1006 -31.7002l-59.2002 -122.8c6.40039 -16.0996 63.3008 -142.8 69.2002 -156.7l122.4 282.601c-8.60059 23.0996 -36.4004 28.0996 -47.2002 28.2998v13.9004l127.8 -1.10059z" />
+ <glyph glyph-name="safari" unicode="&#xf267;" horiz-adv-x="512"
+d="M236.9 191.2c0 9.09961 6.59961 17.7002 16.2998 17.7002c8.89941 0 17.3994 -6.40039 17.3994 -16.1006c0 -9.09961 -6.39941 -17.7002 -16.0996 -17.7002c-9 0 -17.5996 6.7002 -17.5996 16.1006zM504 192c0 -137 -111 -248 -248 -248s-248 111 -248 248
+s111 248 248 248s248 -111 248 -248zM477.4 192c0 122.3 -99.1006 221.4 -221.4 221.4s-221.4 -99.1006 -221.4 -221.4s99.1006 -221.4 221.4 -221.4s221.4 99.1006 221.4 221.4zM404.9 95.4004c0 -3.60059 13 -10.2002 16.2998 -12.2002
+c-27.4004 -41.5 -69.7998 -71.4004 -117.9 -83.2998l-4.39941 18.5c-0.300781 2.5 -1.90039 2.7998 -4.2002 2.7998c-1.90039 0 -3 -2.7998 -2.7998 -4.2002l4.39941 -18.7998c-13.2998 -2.7998 -26.7998 -4.2002 -40.3994 -4.2002c-36.3008 0 -72 10.2002 -103 29.0996
+c1.69922 2.80078 12.1992 18 12.1992 20.2002c0 1.90039 -1.69922 3.60059 -3.59961 3.60059c-3.90039 0 -12.2002 -16.6006 -14.7002 -19.9004c-41.7998 27.7002 -72 70.5996 -83.5996 119.6l19.0996 4.2002c2.2002 0.600586 2.7998 2.2002 2.7998 4.2002
+c0 1.90039 -2.7998 3 -4.39941 2.7998l-18.7002 -4.2998c-2.5 12.7002 -3.90039 25.5 -3.90039 38.5c0 37.0996 10.5 73.5996 30.2002 104.9c2.7998 -1.7002 16.1006 -10.8008 18.2998 -10.8008c1.90039 0 3.60059 1.40039 3.60059 3.30078
+c0 3.89941 -14.7002 11.2998 -18 13.5996c28.2002 41.2002 71.0996 70.9004 119.8 81.9004l4.2002 -18.5c0.599609 -2.2002 2.2002 -2.80078 4.2002 -2.80078s3 2.80078 2.7998 4.40039l-4.2002 18.2998c12.2002 2.2002 24.5996 3.60059 37.0996 3.60059
+c37.1006 0 73.3008 -10.5 104.9 -30.2002c-1.90039 -2.7998 -10.7998 -15.7998 -10.7998 -18c0 -1.90039 1.39941 -3.60059 3.2998 -3.60059c3.90039 0 11.2998 14.4004 13.2998 17.7002c41 -27.7002 70.2998 -70 81.7002 -118.2l-15.5 -3.2998
+c-2.5 -0.599609 -2.7998 -2.2002 -2.7998 -4.39941c0 -1.90039 2.7998 -3 4.2002 -2.80078l15.7998 3.60059c2.5 -12.7002 3.89941 -25.7002 3.89941 -38.7002c0 -36.2998 -10 -72 -28.7998 -102.7c-2.7998 1.40039 -14.3994 9.7002 -16.5996 9.7002
+c-2.10059 0 -3.7998 -1.7002 -3.7998 -3.59961zM371.7 337.6c-13 -12.1992 -134.2 -123.699 -137.601 -129.5l-96.5996 -160.5c12.7002 11.9004 134.2 124 137.3 129.301z" />
+ <glyph glyph-name="chrome" unicode="&#xf268;" horiz-adv-x="495"
+d="M131.5 230.5l-76.4004 117.4c47.6006 59.1992 119 91.7998 192 92.0996c42.3008 0.299805 85.5 -10.5 124.801 -33.2002c43.3994 -25.2002 76.3994 -61.3994 97.3994 -103l-205.3 10.7998c-58.0996 3.40039 -113.4 -29.2998 -132.5 -84.0996zM164.4 192
+c0 46.2998 37.3994 83.5996 83.5996 83.5996s83.5996 -37.3994 83.5996 -83.5996s-37.3994 -83.5996 -83.5996 -83.5996s-83.5996 37.3994 -83.5996 83.5996zM479.3 281.2c43.5 -111.9 0 -241.9 -107.399 -303.9c-43.4004 -25.2002 -91.3008 -35.3994 -137.801 -32.8994
+l112.101 172.399c31.8994 49 31.2998 112.9 -6.60059 157.2zM133.7 144.4c26.2998 -51.7002 81.8994 -83.3008 139.5 -72.5l-63.7002 -124.801c-118.7 18.2002 -209.5 120.9 -209.5 244.9c0 50.0996 14.9004 96.9004 40.4004 135.9z" />
+ <glyph glyph-name="firefox" unicode="&#xf269;" horiz-adv-x="480"
+d="M478.1 212.7c1.30078 -7.10059 1.90039 -14.2998 1.90039 -21.6006v-2.7998c-1.40039 -34 -11.5996 -67 -29.5996 -95.8994c-1 -1.5 -1.80078 -2.90039 -2.7002 -4.30078c2.7002 -7.19922 2.59961 -15.0996 -0.400391 -22.1992
+c-5 -19.4004 -16.5996 -36.4004 -32.8994 -48.1006c-10.8008 -8.7002 -22.7002 -16.2002 -35.3008 -22.0996l-1.89941 -0.900391l-1 -0.5c-1.7002 -0.700195 -3.2998 -1.39941 -4.90039 -2.09961c-2.39941 -5.10059 -5.7998 -9.60059 -9.89941 -13.2998
+c-2.5 -3.10059 -30.1006 -35 -113.801 -35c-23.5996 0 -47.1992 3.5 -69.7998 10.2998c0.799805 -0.299805 1.60059 -0.700195 2.40039 -1c-2.60059 0.899414 -5.2002 1.7998 -7.7002 2.7002c-19.0996 5.89941 -37.2002 14.5996 -53.7998 25.7998
+c-40.7002 24.7002 -72.9004 61.2002 -92.2998 104.7c-14.5 31.3994 -21.1006 65.7998 -19.4004 100.3c-2.7998 -8.2998 -5.2002 -16.7002 -7 -25.2998c0 29.1992 5.5 58.0996 16.2002 85.1992c-5.5 -7.89941 -10.2998 -16.2998 -14.2998 -25.0996
+c5.69922 23.0996 14.6992 45.2002 26.7998 65.5996c3.7002 6.10059 7.89941 11.9004 12.7002 17.1006v0.200195c-0.100586 2.69922 0.0996094 5.5 0.5 8.2998c1.5 16.2998 5.69922 32.2002 12.3994 47.0996l0.299805 0.700195c0.100586 0.299805 0 -1 0 -1.7002
+s-0.0996094 -1.2998 0 -1c0.600586 2 1.40039 4 2.30078 5.90039c1 2.09961 2.39941 4.09961 3.89941 5.7998c0.100586 0.0996094 0.200195 0.200195 0.299805 0.400391c0.100586 0.199219 -0.399414 -2 -0.5 -3.10059v-0.5c0.600586 1.2002 1.30078 2.40039 2.2002 4.5
+c2.10059 5.90039 6 11 11.1006 14.5l0.199219 0.100586c-0.299805 -9 1.2002 -17.9004 4.40039 -26.2002v-0.100586c0.299805 -0.399414 0.5 1.30078 0.900391 1.30078c0.0996094 0 0.199219 -0.100586 0.199219 -0.200195
+c0.900391 -1.7998 1.80078 -3.60059 2.7002 -5.2002c1.2998 -2.2002 2.5 -4.2002 3.7002 -6l0.400391 -0.200195l0.199219 0.100586c2.60059 -4.2002 5.90039 -7.80078 9.7002 -10.9004h-0.200195l0.200195 -0.0996094c18.2998 3.59961 37.2002 2 54.6006 -4.7002
+l0.0996094 0.0996094c2.09961 2.60059 4.59961 4.90039 7.2998 6.90039c0 -0.900391 -0.0996094 -1.7998 -0.200195 -2.7002c4 5 9.10059 9 15 11.5c-0.399414 -0.700195 -0.5 -1.40039 -0.5 -2.2002c7.40039 4.2998 15.5 7.40039 23.9004 9
+c1.09961 0 -3.5 -1.7998 -5.09961 -3.09961c3.69922 1.59961 7.69922 2.59961 11.6992 2.7998c6.60059 0.700195 14 -2.09961 12.6006 -2.7002c-2.7998 -1 -5.5 -2.2002 -8.2002 -3.5c-0.799805 -0.700195 3.2002 0.200195 2.40039 -0.5
+c-14 -9.2002 -24.8008 -22.5996 -30.8008 -38.2998v-0.0996094c2.5 -11 11.4004 -19.3008 22.5 -21.1006c31.5 -3 37.5 -5.59961 38.4004 -9.09961v-1.5c-0.0996094 -1 -0.200195 -1.90039 -0.299805 -2.7998c-1.2002 -6.90039 -4.90039 -13.2002 -10.2002 -17.7002
+c-1.40039 -1.2998 -2.90039 -2.5 -4.5 -3.5c-1.09961 -0.700195 -6.40039 -2.7998 -12.7998 -5.60059c-7.90039 -3.19922 -15.5 -7.09961 -22.7002 -11.5996c-1.2998 -0.799805 -2.40039 -1.7002 -3.40039 -2.7002c-0.399414 -0.399414 -1.19922 -1.5 -1.19922 -1.5
+v-0.0996094c0.5 -1.2002 1 -2.40039 1.19922 -3.7002c-1.39941 1.7002 -2.69922 1.09961 -1.89941 -0.5c0.899414 -2.5 1.2998 -5.2002 1.09961 -7.7998c0.200195 -4.7998 -0.700195 -9.60059 -2.59961 -14c-2.10059 1.5 -4.2998 2.89941 -6.60059 4.09961h-0.199219
+c2.5 -1.59961 4.2998 -3.89941 5.39941 -6.59961c0.700195 -2.2002 -0.299805 -2.7002 -0.299805 -2.7002c-1.40039 2 -3.09961 3.59961 -5.2002 4.7002c-3.09961 1.7998 -8.7998 4.7002 -11.3994 5.7998c-0.300781 -0.200195 -0.5 -0.0996094 -0.800781 -0.200195
+c0.800781 -1.2998 2.10059 -3.7998 2.10059 -3.7998s-1.7998 1.09961 -4.7998 2.59961c-3.90039 -1.7998 -7.2002 -4.89941 -9.30078 -8.69922c-3.5 -7.7002 -3.09961 -16.7002 1 -24.1006c4 -6 9.10059 -11.2002 15 -15.2002
+c0.400391 -0.299805 -3.39941 1.10059 -3.09961 0.800781c4.59961 -3.2002 9.40039 -6.10059 14.4004 -8.60059c1.5 -1 -5 1.2002 -3.40039 0.299805c1.40039 -0.899414 2.7998 -1.69922 4.2998 -2.5c22.9004 -12.0996 38.9004 0.400391 56.4004 2.90039
+c16.7998 3 33.7998 -3.59961 44.2002 -17c6 -8.5 -0.600586 -16.7002 -9 -14h-0.200195c-8.60059 2.90039 -19.1006 -4.2998 -36.6006 -14c-17.2998 -8.2998 -36.8994 -10.5996 -55.5996 -6.59961c-4.7998 0.899414 -9.40039 2.09961 -14 3.69922l-2 0.700195
+l0.200195 -0.299805c8.7998 -12.2002 19.8994 -22.5 32.7998 -30.2998c8.7002 -4.40039 17.9004 -7.5 27.4004 -9.2998c8 -1.90039 16.1992 -2.80078 24.5 -2.80078c61 -0.0996094 110.6 49.4004 110.6 110.4c0.0996094 15.9004 -3.09961 31.7998 -9.2998 46.5
+c20.7002 -12.2998 37.5996 -30.2002 48.7998 -51.5c-13.9004 40.5996 -40.2998 56.4004 -64.7002 76.5996c-19.5996 14.8008 -34.7002 34.9004 -43.3994 57.9004c-25.2002 67.7998 33.0996 132.9 33.0996 132.9s-3.59961 -15.1006 27.4004 -44.3008
+c6.39941 -5.89941 16.7998 -14.5 28.8994 -26.6992c1.7002 9.2998 4.2002 18.3994 7.40039 27.2998c2.5 -14.7002 7.7998 -28.7998 15.3994 -41.6006c11.7002 -16.6992 21.9004 -25.5996 30.7002 -40c1.90039 -2.5 3.7998 -5.19922 5.60059 -7.89941
+c5.09961 -7.2002 9.5 -14.7998 13.2998 -22.7998c6 -12 10.7998 -24.5 14.5 -37.4004c3 -10.4004 4.89941 -20.9004 5.7998 -31.5996c2.90039 3.89941 4.7002 5.89941 4.7002 5.89941s0.700195 -2.59961 1.39941 -7.09961zM179.1 310.3
+c-0.5 -1.2002 -0.899414 -2.2998 -1.2998 -3.5c0.400391 1.2002 0.900391 2.40039 1.2998 3.5z" />
+ <glyph glyph-name="opera" unicode="&#xf26a;" horiz-adv-x="496"
+d="M313.9 415.3c-170.2 0 -252.601 -223.8 -147.5 -355.1c36.5 -45.4004 88.5996 -75.6006 147.5 -75.6006c36.2998 0 70.2998 11.1006 99.3994 30.4004c-43.7998 -39.2002 -101.899 -63 -165.3 -63c-3.90039 0 -8 0 -11.9004 0.299805
+c-131.5 6.10059 -236.1 114.601 -236.1 247.7c0 137 111 248 248 248h0.799805c63.1006 -0.299805 120.7 -24.0996 164.4 -63.0996c-29 19.3994 -63.1006 30.3994 -99.2998 30.3994zM415.7 17.5996c-40.9004 -24.6992 -90.7002 -23.5996 -132 5.80078
+c56.2002 20.5 97.7002 91.5996 97.7002 176.6c0 84.7002 -41.2002 155.8 -97.4004 176.6c41.7998 29.2002 91.2002 30.3008 132.9 5c105.899 -98.6992 105.5 -265.699 -1.2002 -364z" />
+ <glyph glyph-name="internet-explorer" unicode="&#xf26b;" horiz-adv-x="511"
+d="M483.049 288.294c25.1963 -45.4473 33.2578 -97.5811 26.8516 -141.162h-328.792c0 -100.432 144.31 -136.029 196.818 -47.4355h120.833c-32.5645 -91.7285 -119.689 -146.022 -216.813 -146.022c-35.1367 0 -70.2725 0.143555 -101.695 15.5732
+c-87.3975 -44.4941 -180.251 -56.5693 -180.251 42.0059c0 45.8066 23.2461 107.096 43.9922 145.022c35.1357 63.7227 81.4121 124.875 135.687 173.168c-43.7061 -18.8604 -91.125 -66.2959 -121.977 -101.158c25.877 112.787 129.466 193.638 237.098 186.457
+c130.032 59.7939 209.673 34.1445 209.673 -38.5771c0 -27.4326 -10.5684 -63.2959 -21.4238 -87.8711zM64.5586 101.123c-73.001 -152.4 11.5254 -172.244 100.267 -123.304c-46.5635 27.4326 -82.5557 72.1533 -100.267 123.304zM180.536 209.996h207.961
+c-2 55.1514 -50.5635 94.8711 -103.981 94.8711c-53.7041 0 -101.979 -39.7197 -103.979 -94.8711zM365.072 397.596c46.2764 -18.002 85.9824 -57.2939 112.263 -99.5859c7.1416 18.8604 14.5693 47.8643 14.5693 67.8672c0 32.0049 -22.8525 53.7217 -54.2744 53.7217
+c-23.9951 0 -51.1328 -11.7158 -72.5576 -22.0029z" />
+ <glyph glyph-name="contao" unicode="&#xf26d;" horiz-adv-x="512"
+d="M45.4004 143c14.3994 -67.0996 26.3994 -129 68.1992 -175h-79.5996c-18.7002 0 -34 15.2002 -34 34v380c0 18.7002 15.2002 34 34 34h57.7002c-13.7998 -12.5996 -26.1006 -27.2002 -36.9004 -43.5996c-45.3994 -70 -27 -146.801 -9.39941 -229.4zM478 416
+c18.7998 0 34 -15.2002 34 -34v-380.1c0 -18.8008 -15.2998 -34 -34 -34h-52.0996c38.6992 38.3994 60.5996 92.0996 57.3994 163.6l-137.399 -29.5996c-1.7002 -32.5 -12.9004 -63.8008 -57.4004 -73.2002c-24.9004 -5.2998 -45.4004 0.599609 -58.2998 11.7002
+c-15.7998 13.5 -28.4004 31 -49.5 131.199c-21.4004 100.5 -17 121.601 -8.2002 140.301c7.2998 15.2998 23.7002 29.2998 48.2998 34.5996c44.7998 9.40039 67.7002 -14.9004 82.6006 -43.9004l137.1 29.3008c-13.5 34.5996 -31.2998 62.6992 -52.7002 84.0996h90.2002z
+" />
+ <glyph glyph-name="500px" unicode="&#xf26e;" horiz-adv-x="447"
+d="M103.3 103.7c-6.5 14.2002 -6.89941 18.2998 7.40039 23.0996c25.5996 8 8 -9.2002 43.2002 -49.2002h0.299805v93.9004c1.2002 50.2002 44 92.2002 97.7002 92.2002c53.8994 0 97.6992 -43.5 97.6992 -96.7998c0 -63.4004 -60.7998 -113.2 -128.5 -93.3008
+c-10.5 4.2002 -2.09961 31.7002 8.5 28.6006c53 0 89.4004 10.0996 89.4004 64.3994c0 61 -77.0996 89.6006 -116.9 44.6006c-23.5 -26.4004 -17.5996 -42.1006 -17.5996 -157.601c50.7002 -31 118.3 -22 160.4 20.1006c24.7998 24.7998 38.5 58 38.5 93
+c0 35.2002 -13.8008 68.2002 -38.8008 93.2998c-24.7998 24.7998 -57.7998 38.5 -93.2998 38.5s-68.7998 -13.7998 -93.5 -38.5c-0.299805 -0.299805 -16 -16.5 -21.2002 -23.9004l-0.5 -0.599609c-3.2998 -4.7002 -6.2998 -9.09961 -20.0996 -6.09961
+c-6.90039 1.69922 -14.2998 5.7998 -14.2998 11.7998v186.8c0 5 3.89941 10.5 10.5 10.5h241.3c8.2998 0 8.2998 -11.5996 8.2998 -15.0996c0 -3.90039 0 -15.1006 -8.2998 -15.1006h-223.2v-132.899h0.299805c104.2 109.8 282.801 36 282.801 -108.9
+c0 -178.1 -244.801 -220.3 -310.101 -62.7998zM166.6 364.5c3.80078 18.7998 145.101 50.7998 238.301 -38.2002c8.5 -7.5 -9.5 -22.7998 -14.3008 -22.7998c-6.59961 0 -84.5996 87.9004 -209.399 40.4004c-10 -3.90039 -15.1006 16.3994 -14.6006 20.5996zM393 33.2998
+c8.09961 8 27.5996 -12.5996 20.7002 -20.3994c-135.601 -135.601 -357.601 -52.1006 -381.601 121.3c-1.5 10.7002 28.9004 15.5 28.9004 3.2998c33 -165 222 -214.1 332 -104.2zM213.6 141.4c0 3.39941 2.30078 4.69922 20.4004 22.5996l-18.2002 18.2002
+c-5.59961 5.59961 7.40039 17.2998 12.4004 17.2998c3.09961 0 2.89941 -0.700195 21.5 -19.5l17.8994 17.9004c6.10059 6.09961 22.5 -8.90039 16.2002 -15.7002l-18.2002 -18.2002l17.3008 -17.2998c7.7998 -7.7998 -5.30078 -18.2002 -10.7002 -18.2002
+c-3.2002 0 -2.7002 0.200195 -22.2998 19.5c-19.7002 -19.7002 -18.5 -19.5 -22.3008 -19.5c-2.39941 0 -5.5 1.40039 -8.5 4.40039c-1.19922 1.19922 -5.5 4.5 -5.5 8.5z" />
+ <glyph glyph-name="amazon" unicode="&#xf270;" horiz-adv-x="447"
+d="M257.2 285.3c0 39.2998 5.2002 69.2002 -35.5 69.1006c0 0 -37.9004 0 -54.2002 -49.5l-73.5 6.7998c0 49.2998 46.7002 104.3 134.7 104.3c87.7998 0 112.3 -57 112.3 -82.2998v-147.101c0 -27.5 32.2998 -52.7998 32.2998 -52.7998l-56.7998 -56
+c-9.90039 9.2998 -38.7998 36.6006 -45.2998 46.7998c-45.2002 -70.7998 -183.5 -66.2998 -183.5 43.2002c0 102 120.8 115.7 169.5 117.5zM257.2 198.5v40.5996c-33.7002 -1.09961 -84.2002 -10.5996 -84.2002 -57.7998c0 -50.7998 84.2002 -62.7998 84.2002 17.2002z
+M393.2 35c-7.7002 -10 -70 -67 -174.5 -67s-184.5 71.5 -209 101c-6.7998 7.7002 1 11.2998 5.5 8.2998c73.2998 -44.5 187.8 -117.8 372.5 -30.2998c7.5 3.7002 13.2998 -2 5.5 -12zM433 32.7998c-6.5 -15.7998 -16 -26.7998 -21.2002 -31
+c-5.5 -4.5 -9.5 -2.7002 -6.5 3.7998s19.2998 46.5 12.7002 55c-6.5 8.30078 -37 4.30078 -48 3.2002c-10.7998 -1 -13 -2 -14 0.299805c-2.2998 5.7002 21.7002 15.5 37.5 17.5c15.7002 1.80078 41 0.800781 46 -5.69922c3.7002 -5.10059 0 -27.1006 -6.5 -43.1006z" />
+ <glyph glyph-name="houzz" unicode="&#xf27c;" horiz-adv-x="414"
+d="M258.9 117.3h-104.601v-149.3h-154.3v448h109.5v-104.5l305.1 -85.5996v-257.9h-155.699v149.3z" />
+ <glyph glyph-name="vimeo-v" unicode="&#xf27d;"
+d="M447.8 294.4c-2 -43.6006 -32.3994 -103.301 -91.3994 -179.101c-60.9004 -79.2002 -112.4 -118.8 -154.601 -118.8c-26.0996 0 -48.2002 24.0996 -66.2998 72.2998c-35.2002 129.2 -50.2002 204.9 -79.2998 204.9c-3.40039 0 -15.1006 -7.10059 -35.2002 -21.1006
+l-21 27.2002c51.5996 45.2998 100.9 95.7002 131.8 98.5c34.9004 3.40039 56.2998 -20.5 64.4004 -71.5c28.7002 -181.5 41.3994 -208.899 93.5996 -126.7c18.7002 29.6006 28.7998 52.1006 30.2002 67.6006c4.7998 45.8994 -35.7998 42.7998 -63.2998 31
+c22 72.0996 64.0996 107.1 126.2 105.1c45.7998 -1.2002 67.5 -31.0996 64.8994 -89.3994z" />
+ <glyph glyph-name="black-tie" unicode="&#xf27e;"
+d="M0 416h448v-448h-448v448zM316.5 90.7998l-64.5 184l64.4004 86.6006h-184.9l64.5 -86.6006l-64.5 -184l92.5 -88.7002z" />
+ <glyph glyph-name="fonticons" unicode="&#xf280;"
+d="M0 416h448v-448h-448v448zM187 275.1c11.9004 0 16.5996 -4.2998 16.2998 -23l50.7002 6.10059c0 44.5996 -30.5996 52.7998 -64.7002 52.7998c-50.7998 0 -77.2998 -20.4004 -77.2998 -70v-21h-28v-37.4004h22.2002c2.89941 0 5.7998 0 5.7998 -2.2998v-111.399
+c0 -5.60059 -1.5 -7.30078 -6.7002 -7.90039l-21.2998 -2v-25.7002h130.7v25.1006l-43.5 4.09961c-5.2002 0.599609 -3.2002 1.5 -3.2002 7.2998v112.9h55.7002l11.0996 37.2998h-67.3994c-2.90039 0 0.599609 2 0.599609 4.40039v23.2998
+c0 17.5 0.599609 27.3994 19 27.3994zM261.3 33.2998h102.601v25.1006l-15.7002 2.59961c-5.5 0.900391 -2.90039 1.5 -2.90039 7.2998v151.7h-80.2002l-6.69922 -29.5l24.1992 -6.40039c3.80078 -1.19922 6.7002 -3.7998 6.7002 -7.89941v-107.9
+c0 -5.59961 -2.39941 -6.7002 -7.59961 -7.2998l-20.4004 -2.59961v-25.1006zM342.1 288.8l21.9004 24.2002l-3.5 9.59961h-27.7002l-15.5 28h-9.2998l-15.5 -28h-27.7002l-3.5 -9.59961l21.7998 -24.2002l-9 -33.2002l7.30078 -7.2998l31.1992 16.6006l31.2002 -16.6006
+l7.2998 7.2998z" />
+ <glyph glyph-name="reddit-alien" unicode="&#xf281;" horiz-adv-x="511"
+d="M440.3 244.5c55.2998 0 73.7002 -74.0996 23.7998 -99.7002c2.2002 -7.89941 3.10059 -16.7002 3.10059 -25.0996c0 -83.7998 -94.4004 -151.7 -210.8 -151.7c-115.9 0 -210.301 67.9004 -210.301 151.7c0 8.39941 0.800781 16.7998 2.60059 24.7002
+c-50.9004 25.5 -32.7002 100.1 22.8994 100.1c15 0 28.7002 -6.2002 38.4004 -16.2998c35.7998 24.7002 83.4004 40.5996 136.3 42.7998l30.4004 137.6c1.2998 4.90039 6.09961 8.40039 11 7.10059l97.3994 -21.6006c6.60059 12.7002 19.9004 22 35.3008 22
+c22.0996 0 39.6992 -18.0996 39.6992 -39.6992c0 -21.6006 -17.6992 -39.7002 -39.6992 -39.7002c-21.6006 0 -39.2002 17.5996 -39.2002 39.2002l-88.2002 19.7998l-27.7002 -124.8c53.2998 -1.7002 101.4 -17.6006 137.101 -42.3008
+c9.69922 9.7002 22.8994 15.9004 37.8994 15.9004zM129.4 139.1c0 -21.5996 17.6992 -39.2998 39.6992 -39.1992c21.6006 0 39.2002 17.5996 39.2002 39.1992c0 22.1006 -17.5996 39.7002 -39.2002 39.7002c-22.0996 0 -39.6992 -17.7002 -39.6992 -39.7002zM343.7 45.5996
+c4 3.5 4 9.7002 -0.100586 13.7002c-3.5 3.5 -9.69922 3.5 -13.1992 0c-29 -29 -121.2 -28.5 -149 0c-3.5 3.5 -9.7002 3.5 -13.2002 0c-4 -4 -4 -10.2002 0 -13.7002c36.3994 -36.3994 139.1 -36.3994 175.5 0zM342.9 99.7998c22 0 39.5996 17.7002 39.6992 39.2002
+c0 22.0996 -17.6992 39.7002 -39.6992 39.7002c-21.6006 0 -39.2002 -17.7002 -39.2002 -39.7002c0 -21.5996 17.5996 -39.2002 39.2002 -39.2002z" />
+ <glyph glyph-name="edge" unicode="&#xf282;" horiz-adv-x="512"
+d="M25.7139 219.837c0.111328 0.162109 0.230469 0.323242 0.341797 0.485352c-0.0205078 -0.162109 -0.0449219 -0.323242 -0.0644531 -0.485352h-0.277344zM486.286 204.329l0.000976562 -52.0645h-314.073c1.38379 -128.497 191.392 -124.065 272.255 -67.5713v-104.404
+c-47.3555 -28.5244 -156.774 -53.1709 -240.132 -21.3242c-70.6191 27.1406 -119.913 100.528 -120.743 171.977c-1.10742 92.2188 45.6943 153.422 120.742 188.314c-15.7852 -19.9395 -27.9697 -41.54 -34.3389 -78.9258h175.853
+c10.2471 104.957 -99.4189 104.957 -99.4189 104.957c-103.302 -3.58984 -177.945 -63.6543 -220.375 -124.966c14.5615 114.465 92.9062 219.955 232.837 219.678c85.0195 0 157.605 -39.8779 198.593 -113.265c21.0469 -37.9404 28.8008 -78.373 28.8008 -122.405z" />
+ <glyph glyph-name="codiepie" unicode="&#xf284;" horiz-adv-x="472"
+d="M422.5 245.1c30.7002 0 33.5 -53.0996 -0.299805 -53.0996h-10.7998v-44.2998h-26.6006v97.3994h37.7002zM472 95.4004c-42.0996 -91.9004 -121.6 -151.4 -224 -151.4c-137 0 -248 111 -248 248s111 248 248 248c97.4004 0 172.8 -53.7002 218.2 -138.4l-186 -108.8z
+M433.5 82.9004l-60.2998 30.6992c-27.1006 -44.2998 -70.4004 -71.3994 -122.4 -71.3994c-82.5 0 -149.2 66.7002 -149.2 148.899c0 82.5 66.7002 149.2 149.2 149.2c48.4004 0 88.9004 -23.5 116.9 -63.3994l59.5 34.5996c-40.7002 62.5996 -104.7 100 -179.2 100
+c-121.2 0 -219.5 -98.2998 -219.5 -219.5s98.2998 -219.5 219.5 -219.5c78.5996 0 146.5 42.0996 185.5 110.4z" />
+ <glyph glyph-name="modx" unicode="&#xf285;"
+d="M356 206.2l36.7002 -23.7002v-214.5l-133 83.7998zM440 373l-83.2002 -134.3l-153.5 96.5l23 37.7998h213.7zM351 230.2l-249.8 -57.7002l-46 29v214.5zM97 153.8l249.7 57.7002l-125 -200.5h-213.7z" />
+ <glyph glyph-name="fort-awesome" unicode="&#xf286;" horiz-adv-x="511"
+d="M489.2 160.1c2.59961 0 4.59961 -2 4.5 -4.59961v-219.5h-182.9v96c0 72.5996 -109.7 72.5996 -109.7 0v-96h-182.899v219.5c0 2.59961 2 4.59961 4.59961 4.59961h27.4004c2.59961 0 4.59961 -2 4.59961 -4.59961v-32h36.6006v178.3
+c0 2.60059 2 4.60059 4.59961 4.60059h27.4004c2.59961 0 4.59961 -2 4.59961 -4.60059v-32h36.2998v32c0 2.60059 2 4.60059 4.60059 4.60059h27.3994c2.60059 0 4.60059 -2 4.60059 -4.60059v-32h36.5996v32c0 6 8 4.60059 11.7002 4.60059v111.699
+c-5.40039 2.60059 -9.10059 8.30078 -9.10059 14.3008c0 20.7998 31.4004 20.6992 31.4004 0c0 -6 -3.7002 -11.7002 -9.09961 -14.3008v-4.89941c7.69922 1.7998 15.6992 2.89941 23.6992 2.89941c11.7002 0 22.9004 -4.2998 32.6006 -4.2998
+c8.89941 0 18.8994 4.2998 24 4.2998c2.59961 0 4.59961 -2 4.59961 -4.59961v-60c0 -6.90039 -23.0996 -8 -27.7002 -8c-10.5 0 -20.5 4.2998 -31.3994 4.2998c-8.60059 0 -17.4004 -1.39941 -25.7002 -3.39941v-38c3.7002 0 11.7002 1.39941 11.7002 -4.60059v-32h36.5996
+v32c0 2.60059 2 4.60059 4.60059 4.60059h27.3994c2.60059 0 4.60059 -2 4.60059 -4.60059v-32h36.5996v32c0 2.60059 2 4.60059 4.59961 4.60059h27.4004c2.59961 0 4.59961 -2 4.59961 -4.60059v-178.3h36.6006v32c0 2.59961 2 4.59961 4.59961 4.59961h27.4004z
+M201.1 164.6v64c0 2.60059 -2 4.60059 -4.59961 4.60059h-27.4004c-2.59961 0 -4.59961 -2 -4.59961 -4.60059v-64c0 -2.59961 2 -4.59961 4.59961 -4.59961h27.4004c2.59961 0 4.59961 2 4.59961 4.59961zM347.5 164.6v64c0 2.60059 -2 4.60059 -4.59961 4.60059h-27.4004
+c-2.59961 0 -4.59961 -2 -4.59961 -4.60059v-64c0 -2.59961 2 -4.59961 4.59961 -4.59961h27.4004c2.59961 0 4.59961 2 4.59961 4.59961z" />
+ <glyph glyph-name="usb" unicode="&#xf287;" horiz-adv-x="641"
+d="M641.5 192c0 -3.09961 -1.7002 -6.09961 -4.5 -7.5l-89.0996 -53.5c-1.40039 -0.799805 -2.80078 -1.40039 -4.5 -1.40039c-1.40039 0 -3.10059 0.300781 -4.5 1.10059c-2.80078 1.7002 -4.5 4.5 -4.5 7.7998v35.5996h-238.7
+c25.2998 -39.5996 40.5 -106.899 69.5996 -106.899h26.7002v26.7998c0 5 3.90039 8.90039 8.90039 8.90039h89.0996c5 0 8.90039 -3.90039 8.90039 -8.90039v-89.0996c0 -5 -3.90039 -8.90039 -8.90039 -8.90039h-89.0996c-5 0 -8.90039 3.90039 -8.90039 8.90039v26.6992
+h-26.7002c-75.3994 0 -81.0996 142.5 -124.7 142.5h-100.3c-8.09961 -30.5996 -35.8994 -53.5 -69 -53.5c-39.2998 0.100586 -71.2998 32.1006 -71.2998 71.4004s32 71.2998 71.2998 71.2998c33.1006 0 61 -22.7998 69 -53.5c39.1006 0 43.9004 -9.5 74.6006 60.4004
+c40.0996 89.0996 58.0996 82.0996 108.899 82.0996c7.5 20.9004 27 35.6006 50.4004 35.6006c29.5 0 53.5 -23.9004 53.5 -53.5c0 -29.6006 -23.9004 -53.5 -53.5 -53.5c-23.4004 0 -42.9004 14.7998 -50.4004 35.5996h-29.7998
+c-29.0996 0 -44.2998 -67.4004 -69.5996 -106.9h310.1v35.6006c0 3.2998 1.7002 6.09961 4.5 7.7998s6.40039 1.40039 8.90039 -0.299805l89.0996 -53.5c2.7998 -1.10059 4.5 -4.10059 4.5 -7.2002z" />
+ <glyph glyph-name="product-hunt" unicode="&#xf288;" horiz-adv-x="512"
+d="M326.3 229.2c0 -20.5 -16.7002 -37.2002 -37.2002 -37.2002h-70.2998v74.4004h70.2998c20.5 0 37.2002 -16.7002 37.2002 -37.2002zM504 192c0 -137 -111 -248 -248 -248s-248 111 -248 248s111 248 248 248s248 -111 248 -248zM375.9 229.2
+c0 47.8994 -38.9004 86.7998 -86.8008 86.7998h-119.899v-248h49.5996v74.4004h70.2998c47.9004 0 86.8008 38.8994 86.8008 86.7998z" />
+ <glyph glyph-name="mixcloud" unicode="&#xf289;" horiz-adv-x="640"
+d="M424.43 228.271c42.3623 -9.1377 74.4805 -47.0693 74.4805 -92.2002c0 -52.3311 -42.6406 -94.6934 -94.9688 -94.6934h-289.614c-62.5752 0 -113.243 50.668 -113.243 112.966c0 56.7598 42.085 103.554 96.6299 111.582
+c22.9814 67.5586 86.9395 114.074 159.205 114.074c87.2158 0 159.205 -66.7266 167.511 -151.729zM403.941 83.7412c29.0713 0 52.6064 23.5352 52.6064 52.3301c0 22.1494 -14.1211 40.9766 -33.502 48.4531c-1.38477 -8.58301 -3.59961 -17.166 -6.36914 -25.4727
+c-8.01367 -25.6484 -49.0898 -14.2266 -40.1465 13.29c4.15332 12.7373 6.36914 26.0264 6.36914 39.5938c0 69.2197 -56.4834 125.702 -125.979 125.702c-49.8379 0 -94.6934 -29.626 -114.628 -73.9258c19.3809 -4.98438 37.3779 -14.9512 52.0527 -29.3486
+c19.9531 -19.9531 -10.2168 -50.1436 -30.1797 -30.1807c-13.29 13.291 -31.0107 20.7666 -49.8379 20.7666c-39.04 0 -70.8809 -31.5645 -70.8809 -70.6045s31.8408 -70.6035 70.8809 -70.6035h289.614zM639.01 136.071c0 -44.0244 -12.7363 -86.3867 -37.1016 -122.657
+c-4.15332 -6.0918 -10.7979 -9.41406 -17.7197 -9.41406c-16.3174 0 -27.1279 18.8262 -17.4434 32.9492c19.3809 29.3486 29.9033 63.6816 29.9033 99.1221c0 35.4395 -10.5215 69.7725 -29.9033 98.8447c-15.6553 22.8311 19.3613 47.2402 35.1631 23.5342
+c24.3662 -35.9932 37.1016 -78.3564 37.1016 -122.379zM568.13 136.071c0 -31.5654 -9.13672 -62.0215 -26.8564 -88.3252c-4.15332 -6.09082 -10.7988 -9.13574 -17.7207 -9.13574c-17.2012 0 -27.0215 18.9785 -17.4424 32.9473
+c13.0127 19.1045 19.6572 41.2559 19.6572 64.5137c0 22.9805 -6.64453 45.4072 -19.6572 64.5117c-15.7617 22.9863 19.0078 47.0947 35.1631 23.5352c17.7188 -26.0264 26.8564 -56.4834 26.8564 -88.0469z" />
+ <glyph glyph-name="scribd" unicode="&#xf28a;" horiz-adv-x="384"
+d="M42.2998 195.3c-16.0996 19 -24.7002 45.9004 -24.7998 79.9004c0 100.399 75.2002 153.1 167.2 153.1c98.5996 1.60059 156.8 -49 184.3 -70.5996l-50.5 -72.1006l-37.2998 24.6006l26.8994 38.5996c-36.5 24 -79.3994 36.5 -123 35.7998
+c-50.6992 0.800781 -111.699 -27.1992 -111.699 -76.1992c0 -18.7002 11.1992 -20.7002 28.5996 -15.6006c23.2998 5.2998 41.9004 -0.599609 55.7998 -14c26.4004 -24.2998 23.2002 -67.5996 -0.700195 -91.8994c-29.1992 -29.5 -85.1992 -27.3008 -114.8 8.39941z
+M360 189.4c33.9004 -40.4004 36.7998 -138.2 -20.2998 -189.601c-39.2002 -33.5996 -82.2002 -44.0996 -133.601 -44.0996c-70.2998 -0.299805 -138.199 25.3994 -190.699 72.2002l-15.4004 13.7998l60.7998 71.7998l35.6006 -27.4004l-33.7002 -39.3994
+c41.7002 -30.9004 92.2002 -47.5 144.1 -47.2998c61.9004 0 104.7 23.5 121.4 64.3994c0.899414 4.2002 1.39941 8.40039 1.39941 12.7002c0 18.7002 -11.1992 20.7002 -28.5996 15.5996c-23.2998 -5.2998 -42.2002 0.5 -56.2998 14.4004
+c-12.4004 11.2998 -19.1006 27.5 -18.4004 44.2998c-0.599609 39.2002 32.4004 69.2002 70.5 67.2002c24.2998 0.799805 47.7002 -9.7998 63.2002 -28.5996z" />
+ <glyph glyph-name="bluetooth" unicode="&#xf293;"
+d="M292.6 276.9l-42.8994 -42.9004l-0.299805 86zM249.4 57.0996l0.199219 86l42.9004 -42.8994zM416 188.6c0 -205.6 -71.9004 -252.6 -185.1 -252.6c-113.2 0 -198.9 47 -198.9 252.6c0 205.601 83.4004 259.4 196.6 259.4c113.2 0 187.4 -53.9004 187.4 -259.4z
+M257.5 188.6l79.4004 88.6006l-125.101 134.3v-176.9l-73.7998 73.8008l-27 -26.9004l92.7002 -93l-92.7002 -93l26.9004 -26.9004l73.7998 73.8008l2.2998 -170l127.4 127.5z" />
+ <glyph glyph-name="bluetooth-b" unicode="&#xf294;" horiz-adv-x="320"
+d="M196.48 187.977l97.9111 -103.333l-148.552 -148.644l-2.71484 198.284l-86.1113 -86.1113l-31.4053 31.4053l108.061 108.398l-108.061 108.399l31.4053 31.4053l86.1113 -86.1113v206.33l145.981 -156.69zM237.34 290.973l-50.3145 50.3174l0.337891 -100.295z
+M187.363 134.96l-0.337891 -100.294l50.3145 50.3164z" />
+ <glyph glyph-name="gitlab" unicode="&#xf296;" horiz-adv-x="512"
+d="M105.2 423.1c0 0 56.5 -174.8 56.5996 -174.8h-132l56.5 174.8c3.2002 8.90039 15.7998 8.90039 18.9004 0zM0.900391 160.3l28.7998 88l226.2 -294l-247.9 184c-6.7998 5.10059 -9.7002 14 -7.09961 22zM161.7 248.3h188.6l-94.2998 -294zM511.1 160.3
+c2.5 -8 -0.299805 -16.8994 -7.19922 -22l-247.9 -184l226.3 294zM425.7 423.1l56.5 -174.8h-132l56.5996 174.8c3.2002 8.90039 15.7998 8.90039 18.9004 0z" />
+ <glyph glyph-name="wpbeginner" unicode="&#xf297;" horiz-adv-x="511"
+d="M462.799 125.626c56.2109 -64.3076 4.16211 -157.626 -91.8545 -157.626c-39.6025 0 -78.8242 17.6865 -100.143 50.04c-6.88672 -0.356445 -22.7021 -0.356445 -29.5898 0c-21.3643 -32.4209 -60.624 -50.04 -100.143 -50.04
+c-95.4902 0 -148.349 92.9961 -91.8555 157.626c-79.1387 131.851 31.2646 290.374 206.792 290.374c175.632 0 285.87 -158.626 206.793 -290.374zM123.152 208.598h41.5283v58.0752h-41.5283v-58.0752zM340.332 122.526v23.8389
+c-60.5059 -20.915 -132.355 -9.19824 -187.589 33.9707l0.246094 -24.8965c51.1006 -46.3672 131.746 -57.875 187.343 -32.9131zM189.579 208.598h166.058v58.0752h-166.058v-58.0752z" />
+ <glyph glyph-name="wpforms" unicode="&#xf298;"
+d="M448 372.8v-361.7c0 -24.2998 -19 -43.1992 -43.2002 -43.1992h-361.6c-23.9004 0.0996094 -43.2002 18.6992 -43.2002 43.2998v361.6c0 24.1006 18.7998 43.2002 43.2002 43.2002h361.7c24 0 43.0996 -18.7998 43.0996 -43.2002zM410.7 11.2002v361.6
+c0 3 -2.60059 5.7998 -5.7998 5.7998h-9.30078l-110.3 -74.5996l-61.2998 49.9004l-61.2002 -49.9004l-110.3 74.7002h-9.2998c-3.2002 0 -5.7998 -2.7998 -5.7998 -5.7998v-361.7c0 -3 2.59961 -5.7998 5.7998 -5.7998h361.7
+c3.19922 -0.100586 5.7998 2.69922 5.7998 5.7998zM150.2 262v-37h-73.5v37h73.5zM150.2 187.6v-37.2998h-73.5v37.2998h73.5zM161.3 334.9l54 43.6992h-118.5zM371.3 262v-37h-196v37h196zM371.3 187.6v-37.2998h-196v37.2998h196zM286.7 334.9l64.5 43.6992h-118.4z
+M371.3 113v-37.2998h-99.3994v37.2998h99.3994z" />
+ <glyph glyph-name="envira" unicode="&#xf299;"
+d="M0 416c477.6 0 366.6 -317.3 367.1 -366.3l80.9004 -81.7002h-26l-70.4004 71.2002c-39 -4.2002 -124.399 -34.5 -214.399 37c-90.2002 71.5 -85.2002 157.1 -137.2 339.8zM79.7002 370c-49.7002 23.5 -5.2002 -9.2002 -5.2002 -9.2002
+c45.2002 -31.2002 66 -73.7002 90.2002 -119.899c31.5 -60.2002 79 -139.7 144.2 -167.7c65 -28 34.1992 -12.5 6 8.5c-28.2002 21.2002 -68.2002 87 -91 130.2c-31.7002 60 -61 118.6 -144.2 158.1z" />
+ <glyph glyph-name="glide" unicode="&#xf2a5;"
+d="M252.8 299.4c0 -8.80078 -1.59961 -17.7002 -3.39941 -26.4004c-5.80078 -27.7998 -11.6006 -55.7998 -17.3008 -83.5996c-1.39941 -6.30078 -8.2998 -4.90039 -13.6992 -4.90039c-23.8008 0 -30.5 26 -30.5 45.5c0 29.2998 11.1992 68.0996 38.5 83.0996
+c4.2998 2.5 9.19922 4.2002 14.0996 4.2002c11.4004 0 12.2998 -8.2998 12.2998 -17.8994zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM384 181c0 5.09961 -20.7998 37.7002 -25.5 39.5
+c-2.2002 0.900391 -7.2002 2.2998 -9.59961 2.2998c-23.1006 0 -38.7002 -10.5 -58.2002 -21.5l-0.5 0.5c4.2998 29.4004 14.5996 57.2002 14.5996 87.4004c0 44.5996 -23.7998 62.7002 -67.5 62.7002c-71.7002 0 -108 -70.8008 -108 -123.5c0 -54.7002 32 -85 86.2998 -85
+c7.5 0 6.90039 0.599609 6.90039 -2.30078c-10.5 -80.2998 -56.5 -82.8994 -56.5 -58.8994c0 24.3994 28 36.5 28.2998 38c-0.200195 7.59961 -29.2998 17.2002 -36.7002 17.2002c-21.0996 0 -32.6992 -33 -32.6992 -50.6006c0 -32.2998 20.3994 -54.7002 53.2998 -54.7002
+c48.2002 0 83.3994 49.7002 94.2998 91.7002c9.40039 37.7002 7 39.4004 12.2998 42.1006c20 10.0996 35.7998 16.7998 58.4004 16.7998c11.0996 0 19 -2.2998 36.7002 -5.2002c1.7998 -0.0996094 4.09961 1.7002 4.09961 3.5z" />
+ <glyph glyph-name="glide-g" unicode="&#xf2a6;" horiz-adv-x="480"
+d="M407.1 236.8c7.5 -2.89941 40.9004 -55.3994 40.9004 -63.3994c0 -2.90039 -3.7998 -5.80078 -6.7002 -5.80078c-28.3994 4.7002 -41.0996 8.40039 -58.8994 8.40039c-36.3008 0 -61.6006 -10.7998 -93.8008 -27c-8.5 -4.2998 -4.59961 -7.09961 -19.6992 -67.5996
+c-17.4004 -67.6006 -74 -145.4 -151.4 -145.4c-52.7002 0 -85.5 36 -85.5 87.9004c0 28.0996 18.5 79.1992 52.4004 79.2998c11.8994 0 58.5996 -15.4004 58.8994 -27.6006c-0.5 -2.39941 -45.5 -21.7998 -45.5 -61c0 -38.5 73.9004 -34.2998 90.7998 94.6006
+c0 4.7998 1 3.7998 -11 3.7998c-87.2998 0 -138.6 48.7002 -138.6 136.6c0 84.7002 58.2998 198.4 173.4 198.4c70.1992 0 108.399 -29.0996 108.399 -100.6c0 -48.5 -16.5 -93.1006 -23.5 -140.4l0.900391 -0.900391c31.2998 17.7002 56.3994 34.5 93.5 34.5
+c3.7998 0 11.8994 -2.39941 15.3994 -3.7998zM231.8 321.2c2.90039 13.8994 5.5 28.0996 5.60059 42.3994c0 15.4004 -1.40039 28.7002 -20 28.7002c-7.80078 0 -15.6006 -2.59961 -22.6006 -6.7002c-43.7998 -24.0996 -61.7998 -86.3994 -61.7998 -133.399
+c0 -31.2998 10.7002 -73.1006 49 -73.1006c8.7002 0 19.7002 -2.39941 22 7.80078c9.2002 44.6992 18.5 89.5996 27.7998 134.3z" />
+ <glyph glyph-name="viadeo" unicode="&#xf2a9;" horiz-adv-x="447"
+d="M276.2 297.5v-0.700195c-17.9004 52.6006 -42.6006 103.4 -70.7998 151.2c43.2998 -29.2002 67 -100 70.7998 -150.5zM308.9 175.8c15.0996 3.10059 29.5 9 42.1992 17c24.5 -58.5996 20.2002 -139.7 -36.3994 -201c-67.7998 -73.8994 -191.9 -74.5996 -259.8 0
+c-108.801 117.8 -31.6006 313.7 129.899 313.7c21.2998 0 42.6006 -3.5 62.5 -10.7002c-6.89941 -13.3994 -11.7002 -28.2002 -13.3994 -43.2998c-15.4004 6.5 -32.3008 9.59961 -49.1006 9.59961c-78 0 -135.399 -66.6992 -135.399 -142.3
+c0 -68.7998 45.5996 -126 111.3 -137.399c98.5 38.3994 116.6 188.199 116.6 280c0 11.6992 0 23.6992 -1 35.3994c12.4004 -36.0996 18.9004 -73.8994 18.9004 -112c0 -86.5 -35.1006 -158.399 -109.3 -205.1l-3.80078 -0.299805
+c80 -1.60059 137.801 61.6992 137.801 139.399c0 19.5 -3.40039 38.7998 -11 57zM418.1 436.3c52 -74 20.9004 -208.6 -58.0996 -208.6c-21.2998 0 -40.2002 11.3994 -55 25.7998c35.0996 19.2998 79.4004 49.2002 99.7002 84.9004
+c2.39941 4.7998 6.5 13.6992 7.2002 19.1992c-19.9004 -44.6992 -70.8008 -79.6992 -118.2 -90.6992c-7.5 11.6992 -12 24.6992 -12 38.7998c0 16.5 8.2002 38.5 20.5996 50.5c34.5 32.8994 84.7998 13.5996 115.8 80.0996z" />
+ <glyph glyph-name="viadeo-square" unicode="&#xf2aa;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM280.7 66.7998c35.3994 38.2998 38.0996 89 22.7998 125.601c-7.90039 -4.90039 -16.9004 -8.60059 -26.4004 -10.5
+c4.80078 -11.4004 6.90039 -23.5 6.90039 -35.7002c0 -48.6006 -36.2002 -88.2002 -86.2002 -87.2002l2.40039 0.200195c46.3994 29.2002 68.2998 74.0996 68.2998 128.2c0 23.7998 -4.09961 47.5 -11.7998 70v0.399414c-2.2998 31.6006 -17.1006 75.7998 -44.2002 94.1006
+c17.5996 -29.9004 33 -61.6006 44.2002 -94.5c0.599609 -7.30078 0.599609 -14.8008 0.599609 -22.1006c0 -57.3994 -11.3994 -151 -72.8994 -175c-41 7.2002 -69.5 42.9004 -69.5 85.9004c0 47.2002 35.7998 88.8994 84.5996 88.8994c10.5 0 21 -1.89941 30.7002 -6
+c1.09961 9.5 4.09961 18.7002 8.39941 27.1006c-12.5 4.59961 -25.7998 6.7002 -39.0996 6.7002c-101 0 -149.2 -122.5 -81.2002 -196.101c42.4004 -46.5996 120 -46.2002 162.4 0zM309 214.3c49.4004 0 68.7998 84.1006 36.2998 130.3
+c-19.3994 -41.5 -50.7998 -29.5 -72.3994 -50c-7.7002 -7.5 -12.9004 -21.2998 -12.9004 -31.5996c0 -8.7998 2.7998 -17 7.5 -24.2998c29.7002 6.89941 61.4004 28.7998 73.9004 56.7002c-0.400391 -3.40039 -3 -9 -4.5 -12c-12.7002 -22.3008 -40.4004 -41 -62.3008 -53
+c9.30078 -9 21.1006 -16.1006 34.4004 -16.1006z" />
+ <glyph glyph-name="snapchat" unicode="&#xf2ab;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM417.5 101.1c2.2002 5.30078 -0.900391 9.80078 -4.90039 10.8008c-46.2998 7.59961 -67.0996 55.0996 -68 57.0996
+c-0.0996094 0.0996094 -0.0996094 0.200195 -0.199219 0.299805c-2.40039 5 -3 9.2002 -1.60059 12.5c2.60059 6.2998 12.5 9.40039 19 11.5c1.7998 0.600586 3.5 1.10059 4.90039 1.7002c11.5 4.5 17.2998 10.0996 17.2002 16.5996
+c-0.100586 5.10059 -4.10059 9.60059 -10.4004 11.9004c-4 1.59961 -9.59961 1.90039 -13.5996 0c-5.5 -2.59961 -10.4004 -4 -14.7002 -4.2002c-2.7998 0.100586 -4.60059 0.799805 -5.7002 1.40039c1.40039 24 4.7002 58 -3.7998 77.0996
+c-16.2998 36.5 -49.6006 54.2998 -84.2998 54.2998c-0.600586 0 -6.10059 -0.0996094 -6.7002 -0.0996094c-14 0 -61.6006 -4 -84.1006 -54.2998c-8.5 -19.1006 -5.19922 -53.2002 -3.7998 -77.1006c-1.09961 -0.599609 -3.2998 -1.39941 -6.59961 -1.39941
+c-4.5 0 -9.7998 1.39941 -15.7002 4.2002c-7.5 3.5 -20.2998 -1.80078 -21.9004 -10.3008c-1 -4.89941 1.2002 -12.0996 17 -18.2998c6.10059 -2.5 20.6006 -5.2998 24 -13.2002c1.40039 -3.2998 0.900391 -7.5 -1.59961 -12.5
+c-0.0996094 -0.0996094 -0.200195 -0.199219 -0.200195 -0.299805c-0.899414 -2 -21.7002 -49.5 -68 -57.0996c-3.59961 -0.600586 -6.09961 -3.7998 -5.89941 -7.40039c0.699219 -13.8994 31.6992 -19.2998 45.5 -21.3994c1.39941 -1.90039 2.5 -9.90039 4.2998 -16
+c0.799805 -2.7002 2.89941 -6 8.2998 -6s13.2998 3.09961 25.7998 3.09961c17.6006 0 23.6006 -4 37.4004 -13.7002c9.89941 -7 27.5 -19.7998 48.5 -18.2002c20.7998 -0.899414 34.7002 7.90039 49.2002 18.2002c13.6992 9.7002 19.7998 13.7002 37.3994 13.7002
+c13 0 19.6006 -2.90039 25.7998 -2.90039h0.200195c4.40039 0 7 2.2002 8.10059 5.90039c1.7998 6.09961 2.89941 14 4.2998 15.9004c26.7002 4.19922 41.2998 10.0996 44.7998 18.1992z" />
+ <glyph glyph-name="snapchat-ghost" unicode="&#xf2ac;" horiz-adv-x="512"
+d="M510.846 55.3271c-5.21094 -12.1572 -27.2383 -21.0889 -67.3594 -27.3184c-2.06445 -2.78613 -3.77539 -14.6855 -6.50781 -23.9561c-1.625 -5.56543 -5.62207 -8.86914 -12.1279 -8.86914l-0.296875 0.00585938c-9.39453 0 -19.2031 4.32227 -38.8516 4.32227
+c-26.5215 0 -35.6621 -6.04297 -56.2539 -20.5879c-21.832 -15.4375 -42.7715 -28.7637 -74.0273 -27.3984c-31.6455 -2.33398 -58.0244 16.9072 -72.8711 27.4033c-20.7139 14.6436 -29.8281 20.582 -56.2412 20.582c-18.8633 0 -30.7354 -4.71973 -38.8516 -4.71973
+c-8.07324 0 -11.2129 4.92188 -12.4219 9.04004c-2.70312 9.18848 -4.4043 21.2627 -6.52344 24.1299c-20.6787 3.20898 -67.3096 11.3438 -68.498 32.1504c-0.00878906 0.161133 -0.015625 0.422852 -0.015625 0.583984c0 4.97559 3.98438 9.67285 8.89258 10.4844
+c69.583 11.4551 100.925 82.9014 102.228 85.9346c0.0742188 0.175781 0.155273 0.34375 0.237305 0.514648c3.71289 7.53711 4.54395 13.8486 2.46289 18.7529c-5.05078 11.8965 -26.8721 16.1641 -36.0537 19.7959c-23.7148 9.36621 -27.0146 20.1279 -25.6113 27.5039
+c2.43652 12.8359 21.7246 20.7354 33.002 15.4531c8.91895 -4.18066 16.8428 -6.29688 23.5469 -6.29688c5.02148 0 8.21191 1.2041 9.95996 2.1709c-2.04297 35.9365 -7.10156 87.29 5.68652 115.969c33.7734 75.7188 105.356 81.6025 126.478 81.6025
+c0.943359 0 9.14062 0.0888672 10.1094 0.0888672c52.1484 0 102.255 -26.7803 126.724 -81.6426c12.7764 -28.6504 7.74902 -79.792 5.69434 -116.01c1.58203 -0.87207 4.35742 -1.94141 8.59961 -2.13867c6.39648 0.286133 13.8145 2.38867 22.0693 6.25684
+c6.08496 2.84668 14.4053 2.46094 20.4795 -0.0576172l0.0292969 -0.00976562c9.47559 -3.38574 15.4385 -10.2158 15.5889 -17.8701c0.183594 -9.74707 -8.52246 -18.165 -25.8779 -25.0186c-2.11816 -0.834961 -4.69434 -1.6543 -7.43457 -2.52441
+c-9.79688 -3.10645 -24.5996 -7.80566 -28.6152 -17.2715c-2.0791 -4.9043 -1.25684 -11.2109 2.45996 -18.748c0.0869141 -0.167969 0.166016 -0.341797 0.238281 -0.514648c1.30176 -3.03027 32.6152 -74.46 102.23 -85.9346
+c6.42676 -1.05762 11.1631 -7.87695 7.72461 -15.8584z" />
+ <glyph glyph-name="snapchat-square" unicode="&#xf2ad;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM393.5 101.1c2.2002 5.30078 -0.900391 9.80078 -4.90039 10.8008c-46.2998 7.59961 -67.0996 55.0996 -68 57.0996
+c-0.0996094 0.0996094 -0.0996094 0.200195 -0.199219 0.299805c-2.40039 5 -3 9.2002 -1.60059 12.5c2.60059 6.2998 12.5 9.40039 19 11.5c1.7998 0.600586 3.5 1.10059 4.90039 1.7002c11.5 4.5 17.2998 10.0996 17.2002 16.5996
+c-0.100586 5.10059 -4.10059 9.60059 -10.4004 11.9004c-4 1.59961 -9.59961 1.90039 -13.5996 0c-5.5 -2.59961 -10.4004 -4 -14.7002 -4.2002c-2.7998 0.100586 -4.60059 0.799805 -5.7002 1.40039c1.40039 24 4.7002 58 -3.7998 77.0996
+c-16.2998 36.5 -49.6006 54.2998 -84.2998 54.2998c-0.600586 0 -6.10059 -0.0996094 -6.7002 -0.0996094c-14 0 -61.6006 -4 -84.1006 -54.2998c-8.5 -19.1006 -5.19922 -53.2002 -3.7998 -77.1006c-1.09961 -0.599609 -3.2998 -1.39941 -6.59961 -1.39941
+c-4.5 0 -9.7998 1.39941 -15.7002 4.2002c-7.5 3.5 -20.2998 -1.80078 -21.9004 -10.3008c-1 -4.89941 1.2002 -12.0996 17 -18.2998c6.10059 -2.5 20.6006 -5.2998 24 -13.2002c1.40039 -3.2998 0.900391 -7.5 -1.59961 -12.5
+c-0.0996094 -0.0996094 -0.200195 -0.199219 -0.200195 -0.299805c-0.899414 -2 -21.7002 -49.5 -68 -57.0996c-3.59961 -0.600586 -6.09961 -3.7998 -5.89941 -7.40039c0.699219 -13.8994 31.6992 -19.2998 45.5 -21.3994c1.39941 -1.90039 2.5 -9.90039 4.2998 -16
+c0.799805 -2.7002 2.89941 -6 8.2998 -6s13.2998 3.09961 25.7998 3.09961c17.6006 0 23.6006 -4 37.4004 -13.7002c9.89941 -7 27.5 -19.7998 48.5 -18.2002c20.7998 -0.899414 34.7002 7.90039 49.2002 18.2002c13.6992 9.7002 19.7998 13.7002 37.3994 13.7002
+c13 0 19.6006 -2.90039 25.7998 -2.90039h0.200195c4.40039 0 7 2.2002 8.10059 5.90039c1.7998 6.09961 2.89941 14 4.2998 15.9004c26.7002 4.19922 41.2998 10.0996 44.7998 18.1992z" />
+ <glyph glyph-name="pied-piper" unicode="&#xf2ae;"
+d="M32 29l-32 -60.2002l0.799805 328c0 65.9004 53.2002 119.2 119.2 119.2h327.2c-93 -28.9004 -189.9 -94.2002 -253.9 -168.6c-70.5996 -81.4004 -110.7 -137.4 -161.3 -218.4zM448 416c0 0 0 -328.8 0.0996094 -328.8c0 -65.9004 -53.2998 -119.2 -119.3 -119.2
+h-328.399c18.5 25.5 61.6992 54 84.8994 66c35.5 18.0996 76.4004 28.5 105.3 56.2998c42.1006 40.5 47.8008 105 71 158.601c43.6006 100.3 186.4 167.1 186.4 167.1z" />
+ <glyph glyph-name="first-order" unicode="&#xf2b0;"
+d="M12.9004 218.8c0.0996094 0.100586 0.199219 0.299805 0.299805 0.400391c0 -0.100586 0 -0.299805 -0.100586 -0.400391h-0.199219zM224 351.4c7.40039 0 14.5996 -0.5 21.7002 -1.7002l-4 -67.7002l22.2998 64.2998c14.2998 -3.7998 27.7002 -9.5 40 -16.8994
+l-29.4004 -61.1006l45.1006 50.9004c11.5 -8.90039 21.7002 -19.2002 30.5996 -30.9004l-50.5996 -45.3994l60.8994 29.6992c7.5 -12.2998 12.9004 -26 16.6006 -40.2998l-64 -22.2998l67.7002 4c1.09961 -7.09961 1.39941 -14.5996 1.39941 -22
+s-0.299805 -14.5996 -1.39941 -21.7002l-67.4004 4l64 -22.2998c-3.7002 -14.5996 -9.5 -28 -16.5996 -40.2998l-61.1006 29.3994l50.6006 -45.0996c-8.60059 -11.7998 -18.9004 -22 -30.6006 -30.9004l-44.8994 50.9004l29.3994 -61.2998
+c-12.2998 -7.5 -25.7002 -12.9004 -40 -16.9004l-22.5996 65.1006l4 -68.6006c-7.10059 -1.09961 -14.2998 -1.7002 -21.7002 -1.7002c-7.09961 0 -14.5996 0.600586 -21.7002 1.7002l4 68l-22.2998 -64.5996c-14.2998 3.7998 -27.7002 9.5 -40 16.8994l29.5 61.4004
+l-44.9004 -50.9004c-11.7998 8.60059 -22 19.2002 -30.8994 30.9004l50.8994 45.0996l-61.0996 -29.6992c-7.2002 12.5996 -12.9004 26 -16.5996 40.2998l64 22.5996l-67.7002 -4c-0.799805 7.10059 -1.40039 14.2998 -1.40039 21.7002s0.5 14.9004 1.40039 22l68 -4
+l-64.2998 22.5996c3.69922 14.3008 9.5 27.7002 16.5996 40l61.0996 -29.6992l-50.5996 45.3994c8.90039 11.7998 19.2002 22 30.5996 30.9004l45.1006 -50.9004l-29.4004 61.4004c12.2998 7.2002 25.7002 12.8994 40 16.5996l22 -64l-3.7002 67.4004
+c6.80078 1.09961 14.3008 1.7002 21.4004 1.7002zM443.4 320v-256l-219.4 -128l-219.4 128v256l219.4 128zM426.3 309.7l-202.3 117.399l-202.3 -117.399v-235.101l202.3 -117.699l202.3 117.699v235.101zM224 410.9l187.7 -109.4v-218.9l-187.7 -109.5l-187.7 109.5
+v218.801zM224 360c-92.2998 0 -166.9 -75.0996 -166.9 -168c0 -92.5996 74.6006 -167.7 166.9 -167.7c92 0 166.9 75.1006 166.9 167.7c0 92.9004 -74.9004 168 -166.9 168z" />
+ <glyph glyph-name="yoast" unicode="&#xf2b1;"
+d="M91.2998 372h186l-7 -18.9004h-179c-39.7002 0 -71.8994 -31.5996 -71.8994 -70.2998v-205.399c0 -35.4004 24.8994 -70.3008 84 -70.3008v-19.0996h-12.1006c-50.0996 0 -91.2998 40.2002 -91.2998 89.5v205.3c0 49.2998 40.7002 89.2002 91.2998 89.2002zM320.4 428
+h66.5c-143.801 -378.1 -145.7 -398.9 -184.7 -439.3c-20.7998 -21.6006 -49.2998 -31.7002 -78.2998 -32.7002v51.0996c49.1992 7.7002 64.5996 49.9004 64.5996 75.3008c0 20.0996 0.599609 12.5996 -82.0996 223.199h61.3994l50.4004 -156.6zM448 286.5v-298.5h-214
+c6.59961 9.59961 10.7002 16.2998 12.0996 19.4004h182.5v279.1c0 32.5 -17.0996 51.9004 -48.1992 62.9004l6.69922 17.5996c41.7002 -13.5996 60.9004 -43.0996 60.9004 -80.5z" />
+ <glyph glyph-name="themeisle" unicode="&#xf2b2;" horiz-adv-x="512"
+d="M208 359.714c0 10 6.28613 21.7139 17.7148 21.7139c11.1426 0 17.7139 -11.7139 17.7139 -21.7139c0 -10.2852 -6.57129 -21.7139 -17.7139 -21.7139c-11.4287 0 -17.7148 11.4287 -17.7148 21.7139zM512 199.714c0 -36.001 -11.4287 -102.286 -36.2861 -129.714
+c-22.8574 -24.8584 -87.4277 -61.1426 -120.856 -70.5723l-1.14355 -0.286133v-32.5703c0 -16.2861 -12.5723 -30.5713 -29.1426 -30.5713c-10 0 -19.4297 5.71387 -24.5723 14.2861c-5.42676 -8.57227 -14.8564 -14.2861 -24.8564 -14.2861
+s-19.4287 5.71387 -24.8574 14.2861c-5.14258 -8.57227 -14.5713 -14.2861 -24.5703 -14.2861c-10.2861 0 -19.4287 5.71387 -24.8574 14.2861c-5.14355 -8.57227 -14.5713 -14.2861 -24.5713 -14.2861c-18.8574 0 -29.4287 15.7139 -29.4287 32.8574
+c-16.2861 -12.2852 -35.7158 -19.4287 -56.5713 -19.4287c-22 0 -43.4287 8.28516 -60.2861 22.8574c10.2852 0.286133 20.5713 2.28613 30.2852 5.71387c-20.8574 5.71387 -39.4277 18.8574 -52 36.2861c21.3701 -4.64551 46.209 -1.67285 67.1426 11.1426
+c-22 22 -56.5703 58.8574 -68.5713 87.4287c-5.71387 13.4287 -6.85645 31.4287 -6.85645 45.7139c0 49.7139 20.2861 160 86.2861 160c10.5713 0 18.8564 -4.8584 23.1426 -14.8574c3.0498 4.46289 8.42578 11.374 12 15.4277c2 2.57227 5.71387 5.42969 7.14355 8.28613
+c7.99902 12.5713 11.7139 21.1426 21.7139 34c32.2852 41.1445 81.7139 69.4297 134.856 69.4297c6 0 12 -0.285156 17.7148 -1.14355c10.8564 11.7148 26 18.2861 41.7148 18.2861c14.5703 0 29.7139 -6 40 -16.2861c0.856445 -0.857422 1.42773 -2.28613 1.42773 -3.42773
+c0 -3.71387 -10.2852 -13.4287 -12.8574 -16.2861c4.28613 -1.42871 15.7148 -6.8584 15.7148 -12c0 -2.85742 -2.85742 -5.14258 -4.57129 -7.14258c31.4287 -27.7148 49.4287 -67.1436 56.2861 -108c4.28613 5.14258 10.2852 8.57129 17.1426 8.57129
+c10.5713 0 20.8574 -7.14355 28.5713 -14.001c20.8564 -18.5703 25.7139 -53.1416 25.7139 -79.7139zM188 358.572c0 -18.2861 12.5713 -37.1436 32.2861 -37.1436c19.7139 0 32.2852 18.8574 32.2852 37.1436c0 18 -12.5713 36.8564 -32.2852 36.8564
+c-19.7148 0 -32.2861 -18.8574 -32.2861 -36.8564zM237.714 254c0 19.7139 3.71387 39.1426 8.57129 58.2861c-52.0391 -79.5342 -13.5312 -184.571 68.8574 -184.571c21.4287 0 42.5713 7.71387 60 20c2 7.42871 3.71484 14.8574 3.71484 22.5723
+c0 14.2861 -6.28613 21.4277 -20.5723 21.4277c-4.57129 0 -9.14355 -0.856445 -13.4287 -1.71387c-63.3438 -12.668 -107.143 -3.66895 -107.143 63.999zM196.572 -0.858398c0 11.1436 -8.8584 20.8574 -20.2861 20.8574c-11.4287 0 -20 -9.71484 -20 -20.8574v-32.5703
+c0 -11.1436 8.57129 -21.1426 20 -21.1426c11.4277 0 20.2861 9.71484 20.2861 21.1426v32.5703zM245.715 -0.858398c0 11.1436 -8.57227 20.8574 -20 20.8574c-11.4287 0 -20.2861 -9.71484 -20.2861 -20.8574v-32.5703c0 -11.1436 8.85742 -21.1426 20.2861 -21.1426
+c11.4277 0 20 10 20 21.1426v32.5703zM295.428 -0.858398c0 11.1436 -8.85645 20.8574 -20.2852 20.8574s-20.2852 -9.71484 -20.2852 -20.8574v-32.5703c0 -11.1436 8.85645 -21.1426 20.2852 -21.1426s20.2852 9.71484 20.2852 21.1426v32.5703zM345.143 -0.858398
+c0 11.1436 -8.85645 20.8574 -20.2852 20.8574s-20.2861 -9.71484 -20.2861 -20.8574v-32.5703c0 -11.1436 8.85742 -21.1426 20.2861 -21.1426s20.2852 10 20.2852 21.1426v32.5703zM421.714 162c-30.8564 -59.1416 -90.2852 -102.572 -158.571 -102.572
+c-96.5703 0 -160.57 84.5723 -160.57 176.572c0 16.8574 2 33.4287 6 49.7139c-20 -33.7148 -29.7139 -72.5723 -29.7139 -111.429c0 -60.2861 24.8564 -121.715 71.4287 -160.857c5.14258 9.71387 14.8564 16.2861 26 16.2861c10 0 19.4277 -5.71387 24.5713 -14.2861
+c5.42871 8.57129 14.5703 14.2861 24.8574 14.2861c10 0 19.4277 -5.71387 24.5713 -14.2861c5.42871 8.57129 14.8564 14.2861 24.8574 14.2861c10 0 19.4287 -5.71387 24.8574 -14.2861c5.14258 8.57129 14.5713 14.2861 24.5723 14.2861
+c10.8564 0 20.8564 -6.57227 25.7139 -16c43.4268 36.2861 68.5693 92 71.4258 148.286zM432.286 261.714c0 53.7139 -34.5713 105.714 -92.5723 105.714c-30.2852 0 -58.5713 -15.1426 -78.8564 -36.8564c-19.9951 -66.3828 -27.4473 -136.571 41.4287 -136.571
+c28.8047 0 97.3564 28.5381 84.2861 -36.8574c28.8564 26 45.7139 65.7148 45.7139 104.571z" />
+ <glyph glyph-name="google-plus" unicode="&#xf2b3;" horiz-adv-x="496"
+d="M248 440c136.9 0 248 -111.1 248 -248s-111.1 -248 -248 -248s-248 111.1 -248 248s111.1 248 248 248zM177.3 68c71.2998 0 118.8 50.4004 118.8 121.2c0 7.09961 -0.599609 13.8994 -1.89941 20.7002h-116.9v-42.6006h70.1006
+c-5.2002 -34.2002 -37.5 -53.2998 -70.1006 -53.2998c-43 0 -77.2002 35.5 -77.2002 78.0996c0 42.6006 34.3008 78.1006 77.2002 78.1006c18.1006 0 36.2002 -6.2002 49.4004 -19.1006l33.5996 32.6006c-22.8994 21.2998 -51.7002 32.2998 -83 32.2998
+c-68.7998 0 -124 -55.5 -124 -124s55.2002 -124 124 -124zM407.5 174.2h35.2002v35.5h-35.2002v35.5h-35.5v-35.5h-35.5v-35.5h35.5v-35.5h35.5v35.5z" />
+ <glyph glyph-name="font-awesome" unicode="&#xf2b4;"
+d="M397.8 416c27.5 0 50.2002 -22.7002 50.2002 -50.2002v-347.6c0 -27.5 -22.7002 -50.2002 -50.2002 -50.2002h-347.6c-27.5 0 -50.2002 22.7002 -50.2002 50.2002v347.6c0 27.5 22.7002 50.2002 50.2002 50.2002h347.6zM352.4 131.7h0.0996094v140.3
+c0 4.2002 -4.2002 7.7998 -9 7.7998c-6 0 -31.0996 -16.0996 -53.7998 -16.0996c-4.7002 0 -8.90039 0.599609 -13.1006 2.39941c-20.2998 7.7002 -38.1992 13.7002 -60.8994 13.7002c-20.9004 0 -43 -6.5 -61.5 -14.2998
+c-1.7998 -1.2002 -3.60059 -1.7998 -5.40039 -2.40039v18.5c8.2998 6 13.1006 15.5 13.1006 26.3008c0 18.5996 -15 33.5 -33.5 33.5c-18.6006 0 -33.5 -15 -33.5 -33.5c0 -10.8008 5.2998 -20.3008 13.0996 -26.3008v-218.6c0 -11.2998 9 -20.2998 20.2998 -20.2998
+c8.90039 0 16.7002 5.89941 19.1006 14.2998v1.2002c0.599609 1.2002 0.599609 3 0.599609 4.7998v45.4004c1.2002 0.599609 2.40039 0.599609 3.59961 1.19922c19.7002 8.90039 44.2002 17.3008 67.5 17.3008c32.3008 0 44.8008 -16.7002 71.7002 -16.7002
+c19.2002 0 37.1006 6.5 53.7998 13.7002c4.2002 1.7998 7.80078 3.59961 7.80078 7.7998z" />
+ <glyph glyph-name="linode" unicode="&#xf2b8;" horiz-adv-x="447"
+d="M437.4 221.7c0.599609 -2 -8.80078 -66.2998 -9.7002 -72.7998c0 -0.900391 -0.5 -1.7002 -1.10059 -2l-54.5996 -43.7002c-1.09961 -0.900391 -2.59961 -0.900391 -3.7002 0l-20.2998 14l-2.2998 -33.4004c0 -0.899414 -0.200195 -1.7002 -1.10059 -2.2998
+l-66.8994 -53.4004c-1.10059 -0.899414 -2.90039 -0.899414 -4 0l-28 23.7002l2 -46c0 -0.899414 -0.200195 -1.7002 -1.10059 -2.2998l-83.6992 -66.9004c-0.600586 -0.299805 -1.10059 -0.599609 -1.7002 -0.599609c-0.900391 0.299805 -1.7002 0.299805 -2.2998 0.900391
+l-65.1006 69.0996c-1.5 1.40039 -15.5 72 -16.8994 79.0996c-0.300781 1.10059 0.5 2.5 1.39941 3.10059l17.4004 10.5996c-3.40039 3.2002 -26.5 23.4004 -27.1006 26.2998l-20.5996 100.301c-0.299805 1.09961 0.299805 2.5 1.7002 3.39941l26.8994 12.9004
+c-4.59961 3.5 -37.6992 27.5 -38.5996 30.8994l-27.4004 133.101c-0.299805 1.7002 0.600586 3.09961 2 3.7002l123.7 38.5996c0.600586 0 1.40039 0 2.2998 -0.299805l90.6006 -43.7002c0.799805 -0.599609 1.7002 -1.7002 1.7002 -2.59961l5.69922 -132.301
+c0 -1.19922 -0.599609 -2.2998 -1.69922 -2.89941l-33.7002 -17.4004l36 -24.2998c0.799805 -0.299805 1.39941 -1.40039 1.39941 -2.2998l1.40039 -35.1006l34.5996 21.2002c0.800781 0.600586 2.2002 0.600586 3.10059 0l24 -16l0.899414 31.4004
+c0 0.899414 0.5 2 1.40039 2.59961l58.9004 36c1.09961 0.600586 2.19922 0.600586 3.09961 0l70 -38.5996c0.5 -0.600586 1.09961 -1.10059 1.40039 -2zM232.6 216.9l-100.6 -57.2002l14 -96.6006l90.5996 61.2002zM224.9 396.9l-120.9 -46.6006l19.7002 -134.8
+l106.6 55.4004zM44 274.9l73.0996 -57.2002l-19.3994 132.899l-79.7002 49.4004zM74.5996 127.1l64.8008 -60.7998l-13.7002 93.4004l-70 58.2998zM98.9004 9.40039l57.6992 -61.2002l-9.69922 67.3994l-61.7002 60.9004zM163.4 -55.0996l78.1992 62.2998l-3.09961 70
+l-85.7002 -61.4004zM245.4 60l27.0996 -22.9004l-0.599609 68.3008l-29.4004 22.5996c0 -2.2998 1.2002 -6.2998 -1.09961 -8l-22.3008 -14.9004l24.3008 -20c2.89941 -2.19922 2 -21.6992 2 -25.0996zM339.7 85.4004l4.2002 66.8994l-65.7002 -46.8994l0.599609 -68.6006z
+M367.4 111.1l5.7998 66.6006l-64.6006 40.5996l-0.599609 -30l41.2002 -27.2002c0.799805 -0.599609 1.39941 -1.69922 1.09961 -2.59961l-2 -34zM422 150.9l8.5 63.3994l-51.0996 -36.5996l-5.7002 -65.1006z" />
+ <glyph glyph-name="quora" unicode="&#xf2c4;" horiz-adv-x="447"
+d="M440.5 61.2998c1.7998 -18 -7.2002 -93.2998 -89 -93.2998c-49.5 0 -75.5 28.7002 -95.2002 62.2998c-117.7 -32.5996 -249 54.9004 -249 189c0 117 98 196.7 197.7 196.7c101.8 0 198.5 -79.2002 198.4 -196.7c0 -65.5 -30.5 -118.8 -74.7002 -153
+c14.2002 -21.5996 29 -35.7998 49.5 -35.7998c22.5 0 31.5 17.2998 33 30.7998h29.2998zM297 118.8c11.2998 24.9004 16.7998 58.7002 16.7002 100.5c0 104.2 -32.5 157.7 -108.7 157.7c-75 0 -107.5 -53.5 -107.5 -157.9c0 -103.699 32.5 -156.699 107.5 -156.699
+c12 0 22.7002 1.19922 32.7002 4.19922c-15.5 30.5 -33.7002 61.3008 -69.2002 61.3008c-6.7998 0 -13.5996 -1 -19.7998 -4l-12.2002 24.2998c14.7002 12.7998 38.5 22.7998 69 22.7998c47.7998 0 72 -23 91.5 -52.2002z" />
+ <glyph glyph-name="free-code-camp" unicode="&#xf2c5;" horiz-adv-x="575"
+d="M69.2998 303.5c-41 -68.5 -36.3994 -163 1 -227c22.2002 -38.2002 49.7002 -52.4004 49.7002 -66.5c0 -6.7998 -6 -13 -12.7998 -13c-19.5 0 -99.2002 75.5 -99.2002 197.8c0 111.5 78 186 97.0996 186c6 0 14.9004 -4.7998 14.9004 -11.0996
+c0 -12.7002 -28.2998 -28.6006 -50.7002 -66.2002zM265.1 89.7002c-37.1992 13.5996 -65.5 45.8994 -65.2998 86.2002c0 48 57.7002 90.0996 57.7002 136.199c0 16.8008 -10.4004 32.6006 -19.5996 38.2002c-1.90039 1 -4.60059 2.7002 -4.60059 5.10059
+c0 9.59961 26.1006 2.7998 36.5 -2.2002c33.6006 -15.9004 40.6006 -40.2998 46.4004 -74.1006c1.39941 -7.89941 4.2998 -33.2998 15.8994 -33.2998c7.5 0 12.3008 5.10059 12.3008 12.2998c0 12.6006 -15.4004 31.2002 -7.2002 31.2002
+c6.09961 0 18.5996 -12.7998 22.5 -16.8994c23.3994 -24.9004 32.0996 -49 32.0996 -82.6006c0 -42.2002 -23.3994 -74.7002 -53.0996 -89.7998c-9.2002 -5.7998 -12.1006 0.900391 -12.1006 1.90039c0 7 29.5 23.5996 29.5 56c0 10.5996 -2.69922 22.5 -8.5 31.3994
+c-1.69922 2.40039 -7.69922 10.1006 -11.0996 10.1006c-0.700195 0 -0.700195 -0.5 -0.700195 -1.2002c0 -5.7998 3.60059 -11.4004 3.60059 -17.4004c0 -13 -31.9004 -20.2002 -31.9004 6.7998c0 7.10059 0.700195 14.3008 0.700195 21.3008
+c0 5.09961 -0.200195 6.5 -2.40039 11.0996c-3.39941 6.5 -14.5 19.7998 -22.5 19.7998c-2.2002 0 -2.89941 0 -2.89941 -2.2002c0 -3.39941 7.69922 -7 7.69922 -19.2998c0 -32.0996 -44.1992 -37.8994 -44.1992 -70c0 -14.3994 1.89941 -26.5 10.0996 -38.5996
+c5.09961 -7.5 10.5996 -11.7998 19.0996 -15.2002c2.10059 -0.700195 4.30078 -0.900391 4.30078 -3.59961c0 -6.40039 -7.80078 -3 -12.3008 -1.2002zM470.4 381c21.3994 0 97.5996 -78.9004 97.5 -198.2c0 -104.899 -73.4004 -185.7 -98.8008 -185.7
+c-5 0 -13.1992 6.30078 -13.1992 11.4004c0 8.2002 28.2998 34.5996 35.2998 43.5c61 76.7002 64 205.9 -17.6006 291c-5.5 5.7998 -17.5996 16.7002 -17.5996 25.4004c0 6.09961 8.40039 12.5996 14.4004 12.5996zM428.1 57.9004c8.40039 0 11.9004 -7 11.9004 -15.5
+c0 -8.90039 -2.5 -16.4004 -11.9004 -16.4004h-261.1c-8.5 0 -15.5 7 -15.5 15.5c0 8.90039 6.09961 16.4004 15.5 16.4004h261.1z" />
+ <glyph glyph-name="telegram" unicode="&#xf2c6;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM369.8 270.1c3.60059 16.8008 -6.09961 23.5 -17.2002 19.5l-239.1 -92.1992c-16.4004 -6.40039 -16.0996 -15.5 -2.7998 -19.7002l61.2002 -19.1006l142 89.4004
+c6.59961 4.40039 12.6992 1.90039 7.69922 -2.5l-114.899 -103.8l-4.40039 -63.1006c6.40039 0 9.2002 2.80078 12.5 6.10059l29.9004 28.7998l62 -45.7002c11.2998 -6.39941 19.3994 -3.09961 22.3994 10.5z" />
+ <glyph glyph-name="bandcamp" unicode="&#xf2d5;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM296.2 113.9l84.7002 156.1h-181l-84.7002 -156.1h181z" />
+ <glyph glyph-name="grav" unicode="&#xf2d6;" horiz-adv-x="512"
+d="M301.1 236c4.40039 -4.40039 4.40039 -11.9004 0 -16.2998l-9.69922 -9.7002c-4.40039 -4.7002 -11.9004 -4.7002 -16.6006 0l-10.5 10.5c-4.39941 4.7002 -4.39941 11.9004 0 16.5996l9.7002 9.7002c4.40039 4.40039 11.9004 4.40039 16.5996 0zM270.9 255.7
+c-2.7002 -2.7998 -7.40039 -2.7998 -10.5 0c-2.80078 3 -2.80078 7.7002 0 10.5c3 3 7.69922 3 10.5 0c3 -2.7002 3 -7.5 0 -10.5zM244.9 250.4c2.7998 3 7.5 3 10.5 0c2.7998 -2.7002 2.7998 -7.40039 0 -10.2002c-3 -3 -7.7002 -3 -10.5 0c-3 2.7002 -3 7.39941 0 10.2002
+zM317.4 263.7c-19.9004 14.3994 -33.8008 43.2002 -11.9004 68.0996c21.5996 24.9004 40.7002 17.2002 59.7998 -0.799805c11.9004 -11.2998 29.2998 -24.9004 17.2002 -48.2002c-12.5 -23.5 -45.0996 -33.2002 -65.0996 -19.0996zM365.1 308.2
+c-8.89941 10 -23.2998 -6.90039 -15.5 -16.1006c7.40039 -9 32.1006 -2.39941 15.5 16.1006zM504 192c0 -137 -111 -248 -248 -248s-248 111 -248 248s111 248 248 248s248 -111 248 -248zM437.8 149.4c2.5 16.0996 -20.2002 16.5996 -25.2002 25.6992
+c-13.5996 24.1006 -27.6992 36.8008 -54.5 30.4004c11.6006 8 23.5 6.09961 23.5 6.09961c0.300781 6.40039 0 13 -9.39941 24.9004c3.89941 12.5 0.299805 22.4004 0.299805 22.4004c15.5 8.59961 26.7998 24.3994 29.0996 43.1992
+c3.60059 31 -18.7998 59.2002 -49.7998 62.8008c-22.0996 2.5 -43.7002 -7.7002 -54.2998 -25.7002c-23.2002 -40.1006 1.40039 -70.9004 22.4004 -81.4004c-14.4004 1.40039 -34.3008 11.9004 -40.1006 34.2998c-6.59961 25.7002 2.7998 49.8008 8.90039 61.4004
+c0 0 -4.40039 5.7998 -8 8.90039c0 0 -13.7998 0 -24.6006 -5.30078c11.9004 15.2002 25.2002 14.4004 25.2002 14.4004c0 6.40039 -0.599609 14.9004 -3.59961 21.5996c-5.40039 11 -23.7998 12.9004 -31.7002 -2.7998c0.0996094 0.200195 0.299805 0.400391 0.400391 0.5
+c-5 -11.8994 -1.10059 -55.8994 16.8994 -87.2002c-2.5 -1.39941 -9.09961 -6.09961 -13 -10c-21.5996 -9.69922 -56.2002 -60.2998 -56.2002 -60.2998c-28.1992 -10.7998 -77.1992 -50.8994 -70.5996 -79.7002c0.299805 -3 1.40039 -5.5 3 -7.5
+c-2.7998 -2.19922 -5.5 -5 -8.2998 -8.2998c-11.9004 -13.7998 -5.2998 -35.2002 17.7002 -24.3994c15.7998 7.19922 29.5996 20.1992 36.2998 30.3994c0 0 -5.5 5 -16.2998 4.40039c27.6992 6.59961 34.2998 9.39941 46.1992 9.09961c8 -3.89941 8 34.2998 8 34.2998
+c0 14.7002 -2.19922 31 -11.0996 41.5c12.5 -12.1992 29.0996 -32.6992 28 -60.5996c-0.799805 -18.2998 -15.2002 -23 -15.2002 -23c-9.09961 -16.5996 -43.2002 -65.9004 -30.3994 -106c0 0 -9.7002 14.9004 -10.2002 22.0996
+c-17.4004 -19.3994 -46.5 -52.2998 -24.6006 -64.5c26.6006 -14.6992 108.801 88.6006 126.2 142.301c34.6006 20.7998 55.4004 47.2998 63.9004 65c22 -43.5 95.2998 -94.5 101.1 -59z" />
+ <glyph glyph-name="etsy" unicode="&#xf2d7;" horiz-adv-x="384"
+d="M384 100c-1.75 -10.75 -13.75 -110 -15.5 -132c-117.879 4.29883 -219.895 4.74316 -368.5 0v25.5c45.457 8.94824 60.627 8.01855 61 35.25c1.79297 72.3223 3.52441 244.143 0 322c-1.0293 28.46 -12.1299 26.7646 -61 36v25.5
+c73.8857 -2.3584 255.933 -8.55078 362.999 3.75c-3.5 -38.25 -7.75 -126.5 -7.75 -126.5h-23.249c-11.0527 42.835 -18.7588 90.5 -54.75 90.5h-137c-10.25 0 -10.75 -3.5 -10.75 -9.75v-163.75c58 -0.5 88.5 2.5 88.5 2.5c29.7695 0.951172 27.5596 8.50195 40.75 65.251
+h25.75c-4.40723 -101.351 -3.91016 -61.8291 -1.75 -160.25h-25.75c-9.15527 40.0859 -9.06543 61.0449 -39.501 61.5c0 0 -21.5 2 -88 2v-139c0 -26 14.25 -38.25 44.25 -38.25h89.251c63.6357 0 66.5645 24.9961 98.751 99.75h22.249v-0.000976562z" />
+ <glyph glyph-name="imdb" unicode="&#xf2d8;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM21.2998 218.8h-0.299805c0.0996094 0.100586 0.200195 0.299805 0.299805 0.400391v-0.400391zM97 128.2v127.8h-33v-127.8h33z
+M210.2 128.2v127.8h-43l-7.60059 -59.9004c-2.69922 20 -5.39941 40.1006 -8.69922 59.9004h-42.8008v-127.8h29v84.5l12.2002 -84.5h20.6006l11.5996 86.3994v-86.3994h28.7002zM221.6 128.2c86.1006 -0.100586 75 -6 75 82.5c0 8.09961 0.300781 16.7998 -1.39941 24.3994
+c-4.2998 22.5 -31.4004 20.9004 -49 20.9004h-24.6006v-127.8zM382.5 157.4v36c0 17.2998 -0.799805 30.0996 -22.2002 30.0996c-8.89941 0 -14.8994 -2.7002 -20.8994 -9.2002v41.7002h-31.7002v-127.8h29.7998l1.90039 8.09961
+c5.69922 -6.7998 11.8994 -9.7998 20.8994 -9.7998c19.7998 0 22.2002 15.2002 22.2002 30.9004zM265 218.1v-49.2998c0 -9.7002 1.90039 -18.7002 -10.2998 -18.3994v83.6992c11.8994 0 10.2998 -6.2998 10.2998 -16zM350.5 192v-32.7002
+c0 -5.39941 1.59961 -14.3994 -6.2002 -14.3994c-1.59961 0 -3 0.799805 -3.7998 2.39941c-2.2002 5.10059 -1.09961 44.1006 -1.09961 44.7002c0 3.7998 -1.10059 12.7002 4.89941 12.7002c7.2998 0 6.2002 -7.2998 6.2002 -12.7002z" />
+ <glyph glyph-name="ravelry" unicode="&#xf2d9;" horiz-adv-x="511"
+d="M407.4 386.5c72.6992 -37.9004 112 -117.2 103.3 -199.5c-1.7002 -16.7002 -4.40039 -36.2002 -9.7998 -52.2002c-22.2002 -65.7002 -52.9004 -108.6 -123.101 -147.7c-6.39941 -4.39941 -13.2998 -8.59961 -20.2002 -10.7998
+c-12.5 -4.39941 -26.0996 -5.39941 -40.0996 -3.89941c-5.90039 -0.5 -11.7998 -0.700195 -18 -0.700195c-93.7002 0 -173 64 -196.9 151.399c-0.699219 0 -1.5 0.200195 -2.19922 0.200195c-5.60059 -44.2998 27.0996 -104.1 27.0996 -104.1s2 -3 13.2998 -20.2002
+c-62.7998 33.2002 -64.5 131.2 -64.5 131.2c-15 5.59961 -67.2002 23.3994 -76.2998 37.8994c0 0 40.9004 -22.3994 76.2002 -27c-0.200195 0.300781 0.5 7.90039 0.5 7.90039c2.2002 30 12.5 53.4004 23.0996 71.4004c6.90039 33.7998 22.1006 64.2998 43.2998 89.8994
+c3.7002 15.2998 9.60059 33.5 19.9004 52.7002c4.40039 8.40039 8.59961 13.7998 19.9004 19c74.8994 35 148.699 43.9004 224.5 4.5zM138.8 284.8c-7.59961 -11.2998 -13.7002 -23.5996 -18.8994 -36.3994c8.09961 8.59961 14.7998 14.1992 18.1992 16.6992
+c-0.5 7.40039 0.700195 19.7002 0.700195 19.7002zM107.6 162.9c0.700195 -9.60059 2 -18.9004 4.2002 -28.1006l41.4004 -6.89941c-14.1006 42.0996 -15.7998 90.0996 -15.7998 90.0996c-16.5 -16 -25.4004 -37.9004 -29.8008 -55.0996zM115.5 120.1
+c21.4004 -69.6992 81 -122.8 154.1 -134.399c-1 0.299805 -1.69922 0.5 -2.69922 1c0 0 -81 47.5 -108.301 124.3c-9.09961 1.5 -28.2998 5.90039 -43.0996 9.09961zM386 3.90039c63 32 106.6 98 106.8 174c0 107.399 -86.5996 194.5 -193 194.5
+c-49.2998 0 -94.0996 -18.7002 -128.3 -49.5c-5.2002 -10.1006 -8.59961 -22.9004 -11.0996 -39.4004c52.5 44.5996 146 33.5 146 33.5c23.3994 -1 20.5996 -21.7002 20.3994 -28.0996c-85.2002 7.19922 -127 -17.2002 -168.399 -52.4004
+c0 0 8.09961 -78.7998 26.7998 -110.8c107.8 -4.90039 189.8 53.7002 189.8 53.7002c10.2998 7.39941 19.4004 8.09961 21.4004 -4.7002c1.5 -10.4004 2.19922 -24.4004 -9.60059 -29.7998c-36 -16.8008 -75.5996 -27.3008 -115 -33
+c-25.5996 -3.7002 -39.7998 -4.60059 -78 -3.90039c36.4004 -84.7002 127.5 -107.8 127.5 -107.8c28.5 -4.7002 50.2002 -1 64.7002 3.7002z" />
+ <glyph glyph-name="sellcast" unicode="&#xf2da;"
+d="M353.4 416c52.0996 0 94.6992 -42.5996 94.6992 -94.5996v-258.801c0 -52 -42.5996 -94.5996 -94.6992 -94.5996h-258.7c-52.1006 0 -94.7002 42.5996 -94.7002 94.7002v258.7c0 52 42.5996 94.5996 94.7002 94.5996h258.7zM303.4 99.5996
+c27.8994 48.2002 11.1992 110.5 -37.2002 138.5c-18.6006 10.8008 0.0996094 -0.0996094 -18.5 10.7002c-25 14.4004 -46.2002 -23.2998 -21.6006 -37.5c18 -10.2002 0.800781 -0.399414 18.6006 -10.5996c27.5996 -16 37.2002 -51.7998 21.2998 -79.4004
+c-16 -27.5996 -51.7998 -37.2002 -79.4004 -21.2998c-18.5996 10.7998 0.100586 -0.0996094 -18.5 10.7002c-10.2998 6 -23.5996 2.39941 -29.5 -7.90039l-15.6992 -27.2002c-12.6006 -21.7998 19.3994 -53 42.2998 -13.1992c48.2998 -27.7002 110.3 -11 138.2 37.1992z
+M325.2 308.4c14.2998 24.7998 -23.4004 46.3994 -37.7002 21.5l-4.7998 -8.40039c-48.2998 27.7002 -110.3 11 -138.2 -37.2002c-27.7998 -48.2998 -11.0996 -110.6 37.0996 -138.399c18.6006 -10.8008 -0.0996094 0.0996094 18.5 -10.7002
+c25 -14.4004 46.2002 23.2998 21.6006 37.5c-0.100586 0 -18.6006 10.5996 -18.6006 10.5996c-27.5996 16 -37.2998 51.7998 -21.2998 79.4004c16 27.5996 51.7998 37.2002 79.4004 21.2998c18.5996 -10.7998 -0.100586 0.0996094 18.5 -10.7002
+c10.2002 -5.09961 20 -2.89941 26.5 3.60059c2.7002 2.69922 2 2 19 31.5z" />
+ <glyph glyph-name="superpowers" unicode="&#xf2dd;"
+d="M448 416l-87.2002 -87c39.7002 -38.7002 61.2002 -92.7002 57.7002 -148.2c-5.40039 -93 -76.9004 -167.3 -168.7 -179.8c-83.2998 -11 -166.5 -22 -249.8 -33l86.7998 86.7998c-39.7998 38.7002 -61.0996 92.7002 -57.7998 148.2c5.7002 93.2998 77 167.5 169 180
+c83.2002 11 166.7 22 250 33zM368.3 183.7c4.40039 80 -56.7998 146.3 -136.1 151c-78.7002 4.7998 -148.5 -55.2998 -153 -134.5c-4.40039 -80 56.7998 -146.3 136.3 -151c78.7998 -4.7002 148.6 55 152.8 134.5z" />
+ <glyph glyph-name="wpexplorer" unicode="&#xf2de;" horiz-adv-x="512"
+d="M512 192c0 -141.2 -114.7 -256 -256 -256c-141.2 0 -256 114.7 -256 256s114.7 256 256 256s256 -114.7 256 -256zM480 192c0 123.2 -100.3 224 -224 224c-123.5 0 -224 -100.5 -224 -224s100.5 -224 224 -224s224 100.5 224 224zM160.9 323.4l86.8994 -37.1006
+l-37.0996 -86.8994l-86.9004 37.0996zM270.9 154.3l46.5996 -94h-14.5996l-50 100l-48.9004 -100h-14l51.0996 106.9l-22.2998 9.39941l6 14l68.6006 -29.0996l-6 -14.2998zM259.1 270.6l68.6006 -29.3994l-29.4004 -68.2998l-68.2998 29.0996zM339.4 227.7
+l54.5996 -23.1006l-23.4004 -54.2998l-54.2998 23.1006z" />
+ <glyph glyph-name="meetup" unicode="&#xf2e0;" horiz-adv-x="527"
+d="M99 33.7002c1.09961 -5.7002 -2.2998 -11.1006 -8 -12.2998c-5.40039 -1.10059 -10.9004 2.2998 -12 8c-1.09961 5.39941 2.2998 11.0996 7.7002 12.2998c5.39941 1.2002 11.0996 -2.2998 12.2998 -8zM242.1 -37.7002c6.60059 4.60059 15.5 2.7998 19.7002 -3.7002
+c4.60059 -6.59961 2.90039 -15.3994 -3.39941 -20c-6.60059 -4.59961 -15.4004 -2.89941 -20 3.7002c-4.30078 6.60059 -2.60059 15.4004 3.69922 20zM156.1 424.6c-6.2998 -1.5 -12.5 2.5 -13.8994 9.10059c-1.2002 6.2998 2.7998 12.5996 9.09961 14
+c6.2998 1.5 12.6006 -2.5 13.7002 -9.10059c1.40039 -6.2998 -2.59961 -12.5996 -8.90039 -14zM34.4004 221.7c10 -7.10059 12.5996 -20.7998 5.69922 -31.2002c-6.89941 -10.2998 -20.5996 -12.7998 -30.5996 -5.7002c-10 6.90039 -12.5996 20.9004 -5.7002 30.9004
+c6.90039 10.2998 20.6006 12.8994 30.6006 6zM306.4 392.6c-10.3008 -6.2998 -23.7002 -2.89941 -29.7002 7.40039c-6.2998 10.5996 -2.90039 24.2998 7.39941 30.5996c10.3008 6.30078 23.7002 2.90039 30 -7.69922c6 -10.3008 2.90039 -24 -7.69922 -30.3008zM115.3 334.6
+c-7.5 -5.19922 -18 -3.5 -23.0996 4.30078c-5.10059 7.69922 -3.40039 18.2998 4.2998 23.6992c7.40039 5.10059 18 3.40039 23.0996 -4.2998c5.10059 -7.7002 3.40039 -18.2998 -4.2998 -23.7002zM487.6 178.6c7.40039 1.40039 14.8008 -3.5 16.3008 -10.8994
+c1.69922 -7.7002 -3.2002 -15.2002 -10.6006 -16.6006c-7.39941 -1.69922 -14.8994 3.2002 -16.2998 10.6006c-1.7002 7.7998 3.2002 15.2002 10.5996 16.8994zM527.3 235.4c1.40039 -5.7002 -2.2998 -11.1006 -7.7002 -12.6006
+c-5.69922 -1.09961 -11.1992 2.60059 -12.2998 8c-1.09961 5.7002 2.2998 11.5 8 12.6006c5.40039 1.09961 10.9004 -2.30078 12 -8zM447 309.1c8.2998 6 20 3.80078 25.7002 -4.89941c5.7002 -8.60059 3.7002 -20.2998 -4.60059 -26.2998
+c-8.59961 -5.7002 -20.2998 -3.7002 -26 4.89941c-5.69922 8.60059 -3.69922 20.2998 4.90039 26.2998zM440.7 169.7c26.2998 -43.1006 15.0996 -100 -26.2998 -129.101c-17.4004 -12.2998 -37.1006 -17.6992 -56.9004 -17.0996
+c-12 -47.0996 -69.4004 -64.5996 -105.1 -32.5996c-1.10059 -0.900391 -2.60059 -1.7002 -3.7002 -2.90039c-39.1006 -27.0996 -92.2998 -17.4004 -119.4 22.2998c-9.7002 14.2998 -14.5996 30.6006 -15.0996 46.9004c-65.4004 10.8994 -90 94 -41.1006 139.7
+c-28.2998 46.8994 0.600586 107.399 53.4004 114.899c25.0996 66.2002 107.6 97.6006 163.6 54.2002c67.4004 22.2998 136.301 -29.4004 130.9 -101.1c41.0996 -12.6006 52.7998 -66.9004 19.7002 -95.2002zM370.7 95.4004
+c-3.10059 20.5996 -40.9004 4.59961 -43.1006 27.0996c-3.09961 32 43.7002 101.1 40 128c-3.39941 24 -19.3994 29.0996 -33.3994 29.4004c-13.4004 0.299805 -16.9004 -2 -21.4004 -4.60059c-2.89941 -1.7002 -6.59961 -4.89941 -11.7002 0.299805
+c-6.2998 6 -11.0996 11.7002 -19.3994 12.9004c-12.2998 2 -17.7002 -2 -26.6006 -9.7002c-3.39941 -2.89941 -12 -12.8994 -20 -9.09961c-3.39941 1.7002 -15.3994 7.7002 -24 11.3994c-16.2998 7.10059 -40 -4.59961 -48.5996 -20
+c-12.9004 -22.8994 -38 -113.1 -41.7002 -125.1c-8.59961 -26.5996 10.9004 -48.5996 36.9004 -47.0996c11.0996 0.599609 18.2998 4.59961 25.3994 17.3994c4 7.40039 41.7002 107.7 44.6006 112.601c2 3.39941 8.89941 8 14.5996 5.09961
+c5.7002 -3.09961 6.90039 -9.40039 6 -15.0996c-1.09961 -9.7002 -28 -70.9004 -28.8994 -77.7002c-3.40039 -22.9004 26.8994 -26.6006 38.5996 -4c3.7002 7.09961 45.7002 92.5996 49.4004 98.2998c4.2998 6.2998 7.39941 8.2998 11.6992 8
+c3.10059 0 8.30078 -0.900391 7.10059 -10.9004c-1.40039 -9.39941 -35.1006 -72.2998 -38.9004 -87.6992c-4.59961 -20.6006 6.60059 -41.4004 24.9004 -50.6006c11.3994 -5.7002 62.5 -15.7002 58.5 11.1006zM376.4 3.09961c10.5996 7.5 24.8994 4.60059 32.2998 -6
+c7.09961 -10.5996 4.59961 -25.1992 -6 -32.5996c-10.6006 -7.09961 -24.9004 -4.59961 -32 6c-7.2002 10.5996 -4.60059 25.2002 5.7002 32.5996z" />
+ <glyph glyph-name="font-awesome-alt" unicode="&#xf35c;"
+d="M339.3 276.8c5.40039 0 9.5 -3 7.7002 -7.09961v-134.4c0 -4.2002 -3 -6 -7.2002 -7.7998c-15.5996 -7.09961 -33.5 -13.7002 -52 -13.7002c-26.2998 0 -38.2002 16.1006 -69.2998 16.1006c-22.7002 0 -46 -8.30078 -65.7002 -16.7002
+c-0.599609 -0.600586 -1.7998 -1.2002 -3 -1.2002v-44.2002c0 -1.7998 0 -3 -0.599609 -4.7998v-1.2998c-2.40039 -7.7002 -9.5 -13.7002 -18.5 -13.7002c-10.7002 0 -19.7002 8.90039 -19.7002 19.7002v212.1c-7.7002 6 -12.5 15.5 -12.5 25.7002
+c0 18 14.2998 32.2998 32.2998 32.2998s32.2998 -14.3994 32.2998 -32.2998c0 -10.7998 -4.69922 -19.7002 -12.5 -25.7002v-17.8994c1.2002 0.599609 3 1.19922 4.80078 1.7998c17.8994 7.09961 39.3994 13.7002 59.6992 13.7002
+c22.1006 0 39.4004 -5.90039 59.1006 -13.7002c4.09961 -1.7998 8.2998 -2.40039 12.5 -2.40039c22.7002 0 46.5996 15.5 52.5996 15.5zM397.8 416c27.5 0 50.2002 -22.7002 50.2002 -50.2002v-347.6c0 -27.5 -22.7002 -50.2002 -50.2002 -50.2002h-347.6
+c-27.5 0 -50.2002 22.7002 -50.2002 50.2002v347.6c0 27.5 22.7002 50.2002 50.2002 50.2002h347.6zM412.1 18.2998v347.601c0 7.69922 -6.5 14.2998 -14.2998 14.2998v-0.100586h-347.6c-7.7002 0 -14.2998 -6.5 -14.2998 -14.2998v-347.5
+c0 -7.7002 6.5 -14.2998 14.2998 -14.2998h347.6c7.7002 0 14.2998 6.5 14.2998 14.2998z" />
+ <glyph glyph-name="accessible-icon" unicode="&#xf368;"
+d="M423.9 192.2l-12.9004 -157.3c-3.2998 -40.7002 -63.9004 -35.1006 -60.5996 4.89941l10 122.5l-41.1006 -2.2998c10.1006 -20.7002 15.7998 -43.9004 15.7998 -68.5c0 -41.2002 -16.0996 -78.7002 -42.2998 -106.5l-39.2998 39.2998
+c57.9004 63.7002 13.0996 167.2 -74 167.2c-25.9004 0 -49.5 -9.90039 -67.2002 -26l-39.2998 39.2998c22 20.7002 50.0996 35.1006 81.4004 40.2002l75.2998 85.7002l-42.6006 24.7998l-51.5996 -46c-30 -26.7998 -70.5996 18.5 -40.5 45.4004l68 60.6992
+c9.7998 8.80078 24.0996 10.2002 35.5 3.60059c0 0 139.3 -80.9004 139.5 -81.1006c16.2002 -10.0996 20.7002 -36 6.09961 -52.5996l-58.3994 -66.5l106.1 5.90039c18.5 1.09961 33.6006 -14.4004 32.1006 -32.7002zM359 346.2
+c-28.0996 0 -50.9004 22.7998 -50.9004 50.8994c0 28.1006 22.8008 50.9004 50.9004 50.9004s50.9004 -22.7998 50.9004 -50.9004c0 -28.0996 -22.8008 -50.8994 -50.9004 -50.8994zM179.6 -8.5c20.8008 0 40.1006 6.40039 56.1006 17.2998l39.7002 -39.7002
+c-100.7 -78.8994 -251.4 -8.19922 -251.4 122.5c0 36.1006 12.4004 69.4004 33.2002 95.7002l39.7002 -39.7002c-44.7002 -65.5 2.09961 -156.1 82.6992 -156.1z" />
+ <glyph glyph-name="accusoft" unicode="&#xf369;" horiz-adv-x="640"
+d="M322.1 196c-1.69922 -1.59961 -89.5996 -82.5 -90.1992 -83.2998l-92.6006 -33.7998c-4.7998 -2 -7.59961 -3.7002 -7 -8.90039c0.200195 -1.5 0.600586 -22.5996 1 -27.7002c-0.700195 -0.5 -0.0996094 0 -0.599609 -0.599609c0 0 -113.7 -36.6006 -114.5 -36.6006
+c-14.1006 -5.09961 -22.7002 -8.2998 -15.7002 1.7002c1.2998 1.7998 234.4 231.601 243.4 240.9c13 13.5 25 15.0996 25 15.0996l51.1992 -65.7998v-1zM482.2 75.9004c-5.7002 6.89941 -232.2 297.1 -239.9 306.6c-13.7002 17.2002 0 16.7998 19.2002 16.9004
+c9.7002 0.0996094 106.3 0.599609 116.5 0.599609c24.0996 0.0996094 28.7002 -0.599609 38.4004 -12.7998c2.09961 -2.7002 205.1 -245.8 207.199 -248.3c5.5 -6.7002 15.2002 -19.1006 7.2002 -23.4004c-2.39941 -1.2998 -114.6 -47.7002 -117.8 -48.9004
+c-10.0996 -4 -17.5 -6.7998 -30.7998 9.30078zM634.9 74.2998c6 -1.39941 7.09961 -4.2002 1.69922 -8.2002c-2 -1.39941 -123.699 -76.5996 -125.8 -77.7998c-15.0996 -8.7998 -38 -1.59961 -53.5996 1.7002c-7.10059 1.5 -305.3 68.2998 -308 69.0996
+c-2.60059 0.900391 -4.40039 1 -4.60059 3.5c-0.299805 4 6 5.60059 11.1006 7.60059c5 1.89941 145.3 52.5996 150.2 54.7002c4.7998 2.09961 11.2998 2.69922 14.3994 2.89941c4.90039 0.299805 59.9004 -8.39941 65.2998 -9.2998l57.1006 -74
+c9.7998 -11.4004 20.7002 -21.9004 36.7002 -14.5996c2.5 1.19922 117.5 51.5996 117.5 51.5996c13.3994 -2.5 35.6992 -6.90039 38 -7.2002z" />
+ <glyph glyph-name="adversal" unicode="&#xf36a;" horiz-adv-x="512"
+d="M482.1 416c24.5 0 29.9004 -5.59961 29.9004 -30.2002v-388.1c0 -24.5 -5.5 -29.7002 -29.9004 -29.7002h-453.399c-22.9004 0 -28.7002 5.59961 -28.7002 28.9004v390.199c0 23 5.7998 28.9004 28.7002 28.9004h453.399zM178.4 227.7
+c9.39941 -7.2002 12.3994 -17.1006 11.2998 -27.2998c-1.7998 -19.1006 -75.7998 -11.4004 -114 -30.9004c-27.2002 -13.9004 -42.7002 -41.7002 -39.6006 -71c6.7002 -64.7002 89.6006 -79.7002 147 -43.2998c4.60059 3.2002 8.30078 4.89941 11.9004 1
+c2.09961 -2.60059 2 -4 3.90039 -6.2002c7.2998 -9.59961 38.1992 -14.0996 46.5996 -7.40039c3.09961 2.80078 4.59961 6.30078 2.7002 10.7002c-13.6006 30.5 -6.60059 63 -9.2998 88.7998c0 69.3008 6.39941 111.7 -34.5 128.5
+c-41.9004 17.4004 -84.2002 16.6006 -125.301 -4.7998c-16.2998 -9 -53.6992 -52.8994 -24.8994 -64.2998c5.2998 -2.2998 12.7998 -4 22.5 -5.5c8.2002 -1.2002 13.2002 -2.7998 17.5 8.2998c12.0996 32.1006 56.7002 43.6006 84.2002 23.4004zM465.1 5.7002
+c0 14.2998 -9.7998 9.89941 -16.5996 9.89941c-132.3 0.400391 -264.5 0.400391 -396.8 0c-6.60059 0 -16.7002 4.80078 -17.1006 -9.09961c-0.399414 -15.5 10.4004 -10.7002 17.8008 -10.7002h394.899c6.7002 0 17.7998 -5.2002 17.7998 9.90039zM468.9 346.2
+c0 0.200195 0 0.299805 0.0996094 0.5c0 9.89941 -3.5 15.0996 -13.5996 14.2998c-3.10059 -0.400391 -6.60059 0 -9.7002 0c-26.1006 0 -26 0 -26 -26.2002v-71c-79.2002 45.6006 -124.3 -6.59961 -136.101 -30.5c-16.3994 -32.8994 -21.7998 -66.5996 -15.6992 -100
+c16.2998 -92.2998 91 -114.899 144.399 -85.2002c4.60059 2.80078 6.60059 7.5 12.4004 -1.19922c8.59961 -12.7002 23.7002 -5.2002 36.0996 -5.60059c7.40039 0 8.10059 8.2002 8.10059 13.9004v291zM417.4 113.9c-19.5 -47.6006 -72.9004 -43.3008 -90 -5.2002
+c-15.1006 33.2998 -15.5 68.2002 0.399414 101.5c16.2998 34.0996 59.7002 35.7002 81.5 4.7998c20.6006 -28.7998 14.9004 -84.5996 8.10059 -101.1zM122.6 78.5996c-7.5 1.30078 -33 3.30078 -33.6992 27.8008c-0.400391 13.8994 7.7998 23 19.7998 25.7998
+c24.3994 5.89941 49.2998 9.89941 73.7002 14.7002c8.89941 2 7.39941 -4.40039 7.7998 -9.5c1.39941 -33 -26.1006 -59.2002 -67.6006 -58.8008z" />
+ <glyph glyph-name="affiliatetheme" unicode="&#xf36b;" horiz-adv-x="511"
+d="M159.7 210.6c-51.2998 -70.8994 -116.601 -110.8 -145.7 -89.1992c-29.2002 21.6992 -11.2002 96.5996 40.2002 167.5c51.2998 70.8994 116.6 110.8 145.7 89.1992c29.0996 -21.5996 11.0996 -96.5996 -40.2002 -167.5zM510.9 267.9
+c0.699219 -8.2002 1.09961 -16.5 1 -25c0 -151.801 -121.601 -274.9 -271.601 -274.9c-82.8994 0 -157.2 37.5996 -207 96.9004c71.2998 19.3994 130.5 68.3994 164.101 133.199c7.69922 -32.5996 24 -58.5996 49 -73.7998c72.5996 -44.0996 190.699 20.2002 264.5 143.601z
+" />
+ <glyph glyph-name="algolia" unicode="&#xf36c;" horiz-adv-x="447"
+d="M229.3 265.4c49.2002 0 89.2002 -39.9004 89.2002 -89.2002s-39.9004 -89.2002 -89.2002 -89.2002s-89.2002 39.9004 -89.2002 89.2002s39.9004 89.2002 89.2002 89.2002zM292 208.8c1.2998 0.700195 1.7998 2.40039 1.09961 3.7002
+c-12.1992 21.4004 -34.8994 36.0996 -61.0996 37.0996c-1.40039 0.100586 -2.7002 -1.09961 -2.7002 -2.59961v-66.5c0 -1.90039 2 -3.2002 3.7998 -2.2998zM389.1 416c32.5 0 58.9004 -26.4004 58.8008 -58.9004v-330.199c0 -32.5 -26.3008 -58.9004 -58.9004 -58.9004
+h-330.1c-32.5 0 -58.9004 26.4004 -58.9004 59v330.1c0 32.5 26.4004 58.9004 58.9004 58.9004h330.199zM186.5 331.3h0.0996094v-15.7998c0 -1.7002 1.7002 -3 3.40039 -2.5c12.7002 3.7002 25.9004 5.5 39.4004 5.5c13 0 25.7998 -1.7002 38.0996 -5.09961
+c1.59961 -0.5 3.2998 0.699219 3.2998 2.5v15.3994c0 10.7998 -8.7002 19.5 -19.5 19.5h-45.2998c-10.7998 0 -19.5 -8.7002 -19.5 -19.5zM102.1 294.3c-7.59961 -7.59961 -7.59961 -19.8994 0 -27.3994l7.7002 -7.7002c1.10059 -1.2002 3 -1 4 0.299805
+c4.40039 6.09961 9.40039 12 14.7998 17.4004c5.5 5.5 11.4004 10.3994 17.6006 14.8994c1.2998 1 1.39941 2.90039 0.299805 4l-7.7002 7.7002c-7.59961 7.59961 -19.8994 7.59961 -27.5 0zM229.3 49.5c69.9004 0 126.601 56.7998 126.601 126.6
+c0 70 -56.6006 126.601 -126.601 126.601c-69.8994 0 -126.6 -56.7002 -126.6 -126.601c0 -69.8994 56.5996 -126.6 126.6 -126.6z" />
+ <glyph glyph-name="amilia" unicode="&#xf36d;"
+d="M240.1 416c134.101 0 191.9 -55.7002 192 -136v-296.6c0 -3 -1 -8.10059 -5.09961 -9.10059c-4 -1 -57.2998 -0.700195 -66.5 -0.700195s-56.7998 1 -59.9004 2c-4 0.900391 -6.09961 6.10059 -6.09961 9.10059v25.3994
+c-39.5996 -21.3994 -105.5 -42.0996 -153.3 -42.0996c-109.7 0 -124.9 85.7002 -124.9 104s-5.09961 95.5 30.4004 111.8c31.5 13.2002 156.3 36.5 243.7 47.7998v38.5c0 44.7002 -1 73.1006 -58.9004 73.1006c-55.7998 0 -119.8 -25.4004 -152.3 -47.7002
+c-6.10059 -4.09961 -16.2002 -4.09961 -20.2998 6.09961c-5.10059 12.2002 -9.10059 34.5 -10.2002 39.6006c-1.90039 10.2002 2.09961 16.2998 7.2002 19.3994c52.6992 38.5 122.3 55.4004 184.199 55.4004zM290.3 68v106.7c-44.7002 -4.10059 -95.5 -20.2998 -119.8 -33.5
+c-21.2998 -10.2002 -18.2998 -40.7002 -18.2998 -52.9004c0.0996094 -11.2002 6.2002 -44.7002 59 -44.7002c30.3994 0 57.7002 11.2002 79.0996 24.4004z" />
+ <glyph glyph-name="angrycreative" unicode="&#xf36e;" horiz-adv-x="640"
+d="M640 209.8l-3.2002 -28.2002l-34.5 -2.2998l-2 -18.0996l34.5 2.2998l-3.2002 -28.2002l-34.3994 -2.2002l-2.2998 -20.0996l34.3994 2.2002l-3 -26.1006l-64.7002 -4.09961l12.7002 113.2l-47.2998 -115.4l-31.9004 -2l-23.7998 117.8l30.2998 2l13.6006 -79.3994
+l31.7002 82.3994zM426.8 76.5l12.7998 120l28.4004 1.90039l-12.9004 -120.101zM162 59.9004l-19.4004 36l-3.5 -37.4004l-28.1992 -1.7002l2.69922 29.1006c-11 -18 -32 -34.3008 -56.8994 -35.8008c-32.7998 -2 -59.7002 20.9004 -56.4004 58.2002
+c2.60059 29.2998 26.7002 62.7998 67.5 65.4004c37.7002 2.39941 47.6006 -23.2002 51.2998 -28.7998l2.80078 30.7998l38.8994 2.5c20.1006 1.2998 38.7002 -3.7002 42.5 -23.7002l2.60059 26.5996l64.7998 4.2002l-2.7002 -27.8994l-36.4004 -2.40039l-1.69922 -17.9004
+l36.3994 2.30078l-2.7002 -27.9004l-36.3994 -2.2998l-1.90039 -19.9004l36.2998 2.2998l-2.09961 -20.7998l55 117.2l23.7998 1.59961l32.1006 -110.6l8.89941 85.5996l-22.2998 -1.39941l2.90039 27.8994l75 4.90039l-3 -28l-24.3008 -1.59961l-9.69922 -91.9004
+l-58 -3.7002l-4.30078 15.6006l-39.3994 -2.5l-8 -16.3008zM117.7 130.1l-26.4004 -1.69922c-6.7002 12.3994 -14.3994 16.5996 -26.2998 15.7998c-19 -1.2002 -33.2998 -17.5 -34.5996 -33.2998c-1.40039 -16 7.2998 -32.5 28.6992 -31.2002
+c12.8008 0.799805 21.3008 8.59961 28.9004 18.8994l27 1.7002zM173.8 137.8c1.2002 12.9004 -7.59961 13.6006 -26.0996 12.4004l-2.7002 -28.5c14.2002 0.899414 27.5 2.09961 28.7998 16.0996zM194.9 67l5.7998 60c-5 -13.5 -14.7002 -21.0996 -27.9004 -26.5996z
+M330.3 112l-7.89941 37.7998l-15.8008 -39.2998zM160.2 186.6l-4.2998 17.5l-39.6006 -2.59961l-8.09961 -18.2002l-31.9004 -2.09961l57 121.899l23.9004 1.60059l30.7002 -102l9.89941 104.7l27 1.7998l37.7998 -63.6006l6.5 66.6006l28.5 1.89941l-4 -41.1992
+c7.40039 13.5 22.9004 44.6992 63.6006 47.5c40.5 2.7998 52.3994 -29.3008 53.3994 -30.3008l3.30078 32l39.2998 2.7002c12.7002 0.900391 27.7998 -0.299805 36.2998 -9.7002l-4.40039 11.9004l32.2002 2.2002l12.9004 -43.2002l23 45.7002l31 2.2002l-43.6006 -78.4004
+l-4.7998 -44.2998l-28.3994 -1.90039l4.7998 44.2998l-15.7998 43c1 -22.2998 -9.2002 -40.0996 -32 -49.5996l25.1992 -38.7998l-36.3994 -2.40039l-19.2002 36.7998l-4 -38.2998l-28.4004 -1.89941l3.30078 31.5c-6.7002 -9.30078 -19.7002 -35.4004 -59.6006 -38
+c-26.2002 -1.7002 -45.5996 10.2998 -55.3994 39.1992l-4 -40.2998l-25 -1.59961l-37.6006 63.2998l-6.2998 -66.2002zM436.8 268.7c10.2002 0.700195 17.5 2.09961 21.6006 4.2998c4.5 2.40039 7 6.40039 7.59961 12.0996
+c0.599609 5.30078 -0.599609 8.80078 -3.40039 10.4004c-3.59961 2.09961 -10.5996 2.7998 -22.8994 2zM327.7 234c5.59961 -5.90039 12.7002 -8.5 21.2998 -7.90039c4.7002 0.300781 9.09961 1.80078 13.2998 4.10059c5.5 3 10.6006 8 15.1006 14.2998l-34.2002 -2.2998
+l2.39941 23.8994l63.1006 4.30078l1.2002 12l-31.2002 -2.10059c-4.10059 3.7002 -7.7998 6.60059 -11.1006 8.10059c-4 1.69922 -8.09961 2.7998 -12.1992 2.5c-8 -0.5 -15.3008 -3.60059 -22 -9.2002c-7.7002 -6.40039 -12 -14.5 -12.9004 -24.4004
+c-1.09961 -9.59961 1.40039 -17.2998 7.2002 -23.2998zM126.4 225.8l23.7998 1.60059l-8.2998 37.5996z" />
+ <glyph glyph-name="app-store" unicode="&#xf36f;" horiz-adv-x="512"
+d="M255.9 327.1l9.09961 15.7002c5.59961 9.7998 18.0996 13.1006 27.9004 7.5c9.7998 -5.59961 13.0996 -18.0996 7.5 -27.8994l-87.5 -151.5h63.2998c20.5 0 32 -24.1006 23.0996 -40.8008h-185.5c-11.2998 0 -20.3994 9.10059 -20.3994 20.4004
+s9.09961 20.4004 20.3994 20.4004h52l66.6006 115.399l-20.8008 36.1006c-5.59961 9.7998 -2.2998 22.1992 7.5 27.8994c9.80078 5.60059 22.2002 2.2998 27.9004 -7.5zM177.2 109.1l-19.6006 -34c-5.59961 -9.7998 -18.0996 -13.0996 -27.8994 -7.5
+c-9.7998 5.60059 -13.1006 18.1006 -7.5 27.9004l14.5996 25.2002c16.4004 5.09961 29.7998 1.2002 40.4004 -11.6006zM346.1 170.8h53.1006c11.2998 0 20.3994 -9.09961 20.3994 -20.3994c0 -11.3008 -9.09961 -20.4004 -20.3994 -20.4004h-29.5l19.8994 -34.5
+c5.60059 -9.7998 2.30078 -22.2002 -7.5 -27.9004c-9.7998 -5.59961 -22.1992 -2.2998 -27.8994 7.5c-33.5 58.1006 -58.7002 101.601 -75.4004 130.601c-17.0996 29.5 -4.89941 59.0996 7.2002 69.0996c13.4004 -23 33.4004 -57.7002 60.0996 -104zM256 440
+c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM472 192c0 119.9 -97.2998 216 -216 216c-119.9 0 -216 -97.2998 -216 -216c0 -119.9 97.2998 -216 216 -216c119.9 0 216 97.2998 216 216z" />
+ <glyph glyph-name="app-store-ios" unicode="&#xf370;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM127 63.5l19.2998 33.2998c-10.2998 12.5 -23.5 16.2998 -39.5996 11.4004l-14.2998 -24.7002
+c-5.5 -9.5 -2.30078 -21.7998 7.2998 -27.2998c9.5 -5.5 21.7998 -2.2998 27.2998 7.2998zM265.9 117.4c8.7998 16.2998 -2.5 40 -22.7002 40h-62.1006l85.8008 148.6c5.5 9.5 2.2998 21.7998 -7.30078 27.2998c-9.5 5.5 -21.7998 2.2998 -27.2998 -7.2998
+l-8.89941 -15.4004l-8.90039 15.4004c-5.5 9.5 -17.7002 12.7998 -27.2998 7.2998c-9.5 -5.5 -12.7998 -17.7002 -7.2998 -27.2998l20.5 -35.4004l-65.4004 -113.199h-51c-11 0 -20 -9 -20 -20s9 -20 20 -20h181.9zM364 117.4c11 0 20 8.89941 20 20c0 11 -9 20 -20 20h-52
+c-26.2002 45.2998 -45.7998 79.2998 -58.9004 102c-11.8994 -9.80078 -23.7998 -38.8008 -7.09961 -67.8008c16.5 -28.3994 41.0996 -71.1992 74 -128.1c5.5 -9.5 17.7002 -12.7998 27.2998 -7.2998c9.5 5.5 12.7998 17.7002 7.2998 27.2998l-19.5996 33.9004h29z" />
+ <glyph glyph-name="apper" unicode="&#xf371;" horiz-adv-x="639"
+d="M42.0996 208.9c22.2002 0 29 -2.80078 33.5 -14.6006h0.800781v22.9004c0 11.2998 -4.80078 15.3994 -17.9004 15.3994c-11.2998 0 -14.4004 -2.5 -15.0996 -12.7998h-38.6006c0.299805 13.9004 1.5 19.1006 5.7998 24.4004
+c7.30078 8.7998 18.9004 11.7998 46.1006 11.7998c33 0 47.0996 -5 53.8994 -18.9004c2 -4.2998 4 -15.5996 4 -23.6992v-76.3008h-38.2998l1.2998 19.1006h-1c-5.2998 -15.6006 -13.5996 -20.4004 -35.5 -20.4004c-30.2998 0 -41.0996 10.1006 -41.0996 37.2998
+c0 25.2002 12.2998 35.8008 42.0996 35.8008zM59.2002 160.8c13.0996 0 16.8994 3 16.8994 13.4004c0 9.09961 -4.2998 11.5996 -19.5996 11.5996c-13.0996 0 -17.9004 -3 -17.9004 -12.0996c-0.0996094 -10.4004 3.7002 -12.9004 20.6006 -12.9004zM137 255.7h38.2998
+l-1.5 -20.6006h0.799805c9.10059 17.1006 15.9004 20.9004 37.5 20.9004c14.4004 0 24.7002 -3 31.5 -9.09961c9.80078 -8.60059 12.8008 -20.4004 12.8008 -48.1006c0 -30 -3 -43.0996 -12.1006 -52.8994c-6.7998 -7.30078 -16.3994 -10.1006 -33.2002 -10.1006
+c-20.3994 0 -29.1992 5.5 -33.7998 21.2002h-0.799805v-70.2998h-39.5v169zM217.9 195c0 27.5 -3.30078 32.5 -20.7002 32.5c-16.9004 0 -20.7002 -5 -20.7002 -28.7002c0 -28 3.5 -33.5 21.2002 -33.5c16.3994 0 20.2002 5.60059 20.2002 29.7002zM275.8 255.7h38.2998
+l-1.5 -20.6006h0.800781c9.09961 17.1006 15.8994 20.9004 37.5 20.9004c14.3994 0 24.6992 -3 31.5 -9.09961c9.7998 -8.60059 12.7998 -20.4004 12.7998 -48.1006c0 -30 -3 -43.0996 -12.1006 -52.8994c-6.7998 -7.30078 -16.3994 -10.1006 -33.2998 -10.1006
+c-20.3994 0 -29.2002 5.5 -33.7998 21.2002h-0.799805v-70.2998h-39.5v169h0.0996094zM356.7 195c0 27.5 -3.2998 32.5 -20.7002 32.5c-16.9004 0 -20.7002 -5 -20.7002 -28.7002c0 -28 3.5 -33.5 21.2002 -33.5c16.4004 0 20.2002 5.60059 20.2002 29.7002zM410.5 198.8
+c0 25.4004 3.2998 37.7998 12.2998 45.7998c8.7998 8.10059 22.2002 11.3008 45.1006 11.3008c42.7998 0 55.6992 -12.8008 55.6992 -55.7002v-11.1006h-75.2998c-0.299805 -2 -0.299805 -4 -0.299805 -4.7998c0 -16.8994 4.5 -21.8994 20.0996 -21.8994
+c13.9004 0 17.9004 3 17.9004 13.8994h37.5v-2.2998c0 -9.7998 -2.5 -18.9004 -6.7998 -24.7002c-7.2998 -9.7998 -19.6006 -13.5996 -44.2998 -13.5996c-27.5 0 -41.6006 3.2998 -50.6006 12.2998c-8.5 8.5 -11.2998 21.2998 -11.2998 50.7998zM486.9 210.4
+c-0.300781 1.7998 -0.300781 3.2998 -0.300781 3.7998c0 12.2998 -3.2998 14.5996 -19.5996 14.5996c-14.4004 0 -17.0996 -3 -18.0996 -15.0996l-0.300781 -3.2998h38.3008zM542.5 255.7h38.2998l-1.7998 -19.9004h0.700195
+c6.7998 14.9004 14.3994 20.2002 29.7002 20.2002c10.7998 0 19.0996 -3.2998 23.3994 -9.2998c5.2998 -7.2998 6.7998 -14.4004 6.7998 -34c0 -1.5 0 -5 0.200195 -9.2998h-35c0.299805 1.7998 0.299805 3.2998 0.299805 4c0 15.3994 -2 19.3994 -10.2998 19.3994
+c-6.2998 0 -10.7998 -3.2998 -13.0996 -9.2998c-1 -3 -1 -4.2998 -1 -12.2998v-68h-38.2998v118.5h0.0996094z" />
+ <glyph glyph-name="asymmetrik" unicode="&#xf372;" horiz-adv-x="576"
+d="M517.5 138.8c-13.9004 -14.2998 -30.4004 -27.7002 -48.9004 -39.7998l73.4004 -110.4h-101.6l-45.9004 71.8008c-17.5996 -7.2002 -35.9004 -13.4004 -54.5 -18.7002l32.5996 -53.1006h-135.5l22.8008 37.1006c-23.3008 -2.7002 -46.4004 -3.7002 -68.6006 -2.7002
+l-22 -34.4004h-101.6l34.5 51.7002c-45 17.9004 -68.9004 47.9004 -68.4004 83c0.299805 25.7998 14 54.2998 41.7002 82.9004c38.9004 40 96.5 72.5996 161.6 92.8994c-22.2998 -8.09961 -42 -18.5 -62 -30.6992c-31.1992 -16.2002 -58.6992 -35.9004 -79.5 -58.1006
+c-57.3994 -61 -46.5 -121.8 19.1006 -151.2l190.2 285.5l150.899 -226.399c13 9.5 24.7998 19.7998 35 30.5996c98 104.2 53.7002 207.9 -98.7998 231.7c-68.2998 10.5996 -146.8 5.7002 -221.3 -14.7998c-60.1006 -10 -118.7 -31.7002 -170.7 -58.2002
+c118.1 66.9004 277.9 102.1 406.6 82.4004c110 -16.8008 170.2 -69.5 169.4 -135c-0.400391 -36.1006 -19.7002 -76.1006 -58.5 -116.101zM329.9 58.2998c18.3994 5.2998 36.5 11.7998 53.6992 19.2002l-78.6992 123l-101.9 -159.3
+c22.5 -0.700195 45.7998 0.899414 69.2002 4.39941l32.7002 53.3008z" />
+ <glyph glyph-name="audible" unicode="&#xf373;" horiz-adv-x="640"
+d="M640 248.1v-54l-320 -200l-320 199.9v54l320 -200zM445.5 176.1c-70.7998 94.4004 -200.5 110.7 -290.2 36.3008c-2.59961 -2.2002 -5.2002 -4.40039 -7.7002 -6.7002h-0.299805c37.1006 55.7002 100.601 92.3994 172.601 92.3994s135.5 -36.7998 172.699 -92.5996z
+M225.4 157.3c21 29.6006 55.5 49 94.3994 49c39.2002 0 73.9004 -19.5996 94.7998 -49.5l-45.3994 -28.3994c-21.2002 29.1992 -52 47.5996 -86.4004 47.5996c-20.8994 0 -40.5 -6.7998 -57.3994 -18.7002zM103.6 286.9c-11.5 -9.10059 -24.2998 -22.1006 -34.1992 -32.6006
+c53.8994 82.1006 147 135.601 250.5 135.601c104.899 0 197.199 -54 250.699 -135.7l-48.7998 -30.4004l-0.700195 1c-99.2998 138.5 -285.699 166.4 -417.5 62.1006zM570.6 254.2z" />
+ <glyph glyph-name="avianex" unicode="&#xf374;" horiz-adv-x="512"
+d="M453.1 416c39 0 64.8008 -31.2002 57.8008 -69.7998l-56.7002 -308.5c-7.10059 -38.5 -44.4004 -69.7002 -83.2998 -69.7002h-312c-39 0 -64.8008 31.2002 -57.7002 69.7002l56.5996 308.6c7.10059 38.5 44.4004 69.7002 83.2998 69.7002h312zM394.9 68.7002
+l6.2998 7.89941l-94.9004 119.4l-4.5 7.2998c19.7998 14.2002 33.5 24.2998 35.2998 25.6006c7.90039 6.59961 6.30078 20.7998 -2.69922 31.2998c-9.2002 10.7998 -23 14.3994 -30.7002 7.89941c0 0 -14.4004 -13.5996 -33.7998 -32.3994l-4.90039 4.5l-103.1 112.399
+l-8.90039 -4.7998l-18.7998 -28.8994l68.7998 -99.8008l20.5 -29.5996c-12 -12.2998 -23.5 -24.4004 -32.7998 -34.9004l-58 31.1006l-15.7002 -15.4004l52.4004 -48.0996l40.5996 -61l17.9004 12.7002l-22.1006 64.1992c12.5 7.60059 27 17.1006 41.7002 27.1006
+l115.4 -110z" />
+ <glyph glyph-name="aws" unicode="&#xf375;" horiz-adv-x="640"
+d="M180.41 244.99c-0.719727 -22.6504 10.5996 -32.6807 10.8799 -39.0498c-0.238281 -2.31543 -2.0752 -5.12402 -4.09961 -6.27051l-12.8008 -8.95996c-1.39941 -0.981445 -3.92188 -1.8418 -5.62988 -1.91992c-0.429688 0.0195312 -8.18945 -1.83008 -20.4795 25.6104
+c-13.0283 -16.2627 -40.5127 -29.4609 -61.3496 -29.4609c-0.347656 0 -0.913086 0.00488281 -1.26074 0.0107422c-16.2803 -0.890625 -60.4004 9.24023 -58.1299 56.21c-1.58984 38.2803 34.0596 62.0596 70.9297 60.0498
+c7.10059 -0.0195312 21.6006 -0.370117 46.9902 -6.26953v15.6191c2.69043 26.46 -14.7002 46.9902 -44.8096 43.9102c-2.40039 -0.00976562 -19.4004 0.5 -45.8408 -10.1094c-7.35938 -3.37988 -8.2998 -2.82031 -10.75 -2.82031
+c-7.40918 0 -4.35938 21.4795 -2.93945 24.2002c5.20996 6.39941 35.8604 18.3496 65.9395 18.1797c1.86523 0.165039 4.89844 0.298828 6.77148 0.298828c15.2451 0 37.1611 -7.875 48.9189 -17.5791c9.87305 -11.0439 17.8867 -32.0303 17.8867 -46.8438
+c0 -1.52539 -0.0966797 -3.99609 -0.216797 -5.51562zM93.9902 212.6c32.4297 0.470703 46.1602 19.9707 49.29 30.4707c2.45996 10.0498 2.0498 16.4102 2.0498 27.3994c-9.66992 2.32031 -23.5898 4.85059 -39.5605 4.87012
+c-15.1494 1.14062 -42.8193 -5.62988 -41.7393 -32.2598c-1.24023 -16.79 11.1201 -31.4004 29.96 -30.4805zM264.91 189.55c-7.86035 -0.719727 -11.5205 4.86035 -12.6797 10.3701l-49.8008 164.65c-0.969727 2.7793 -1.60938 5.64941 -1.91992 8.58008
+c-0.0283203 0.189453 -0.0517578 0.5 -0.0517578 0.692383c0 2.18555 1.75195 4.22656 3.91211 4.55762h22.25c8.78027 0.879883 11.6396 -6.03027 12.5498 -10.3701l35.7197 -140.83l33.1602 140.83c0.530273 3.21973 2.94043 11.0693 12.7998 10.2393h17.1602
+c2.16992 0.180664 11.1104 0.5 12.6807 -10.3691l33.4199 -142.631l36.8701 142.631c0.479492 2.17969 2.71973 11.3691 12.6797 10.3691h19.7197c0.850586 0.130859 6.15039 0.810547 5.25 -8.5791c-0.429688 -1.85059 3.41016 10.6592 -52.75 -169.9
+c-1.14941 -5.50977 -4.82031 -11.0898 -12.6797 -10.3701h-18.6904c-10.9395 -1.15039 -12.5098 9.66016 -12.6797 10.75l-33.1602 137.13l-32.7803 -136.99c-0.15918 -1.08984 -1.72949 -11.8994 -12.6797 -10.75h-18.2998v-0.00976562zM538.39 183.92
+c-5.87988 -0.00976562 -33.9199 0.299805 -57.3594 12.29c-4.31152 1.8252 -7.81055 7.10645 -7.81055 11.7891v0.121094v10.75c0 8.4502 6.2002 6.89941 8.83008 5.88965c10.04 -4.05957 16.4805 -7.13965 28.8105 -9.59961
+c36.6494 -7.53027 52.7695 2.2998 56.7197 4.47949c13.1504 7.81055 14.1895 25.6807 5.25 34.9502c-10.4805 8.79004 -15.4805 9.12012 -53.1299 21c-4.64062 1.29004 -43.7002 13.6104 -43.79 52.3604c-0.610352 28.2402 25.0498 56.1797 69.5195 55.9502
+c12.6699 0.00976562 46.4307 -4.13086 55.5703 -15.6201c1.34961 -2.08984 2.01953 -4.5498 1.91992 -7.04004v-10.1104c0 -4.43945 -1.62012 -6.66016 -4.87012 -6.66016c-7.70996 0.860352 -21.3896 11.1699 -49.1602 10.75
+c-6.88965 0.360352 -39.8896 -0.910156 -38.4092 -24.9697c-0.430664 -18.96 26.6094 -26.0703 29.6992 -26.8896c36.46 -10.9707 48.6504 -12.79 63.1201 -29.5801c17.1406 -22.25 7.90039 -48.2998 4.35059 -55.4404
+c-19.0801 -37.4902 -68.4199 -34.4395 -69.2607 -34.4199zM578.59 79.0596c-70.0303 -51.7197 -171.689 -79.25 -258.49 -79.25c-0.853516 -0.00488281 -2.23926 -0.00976562 -3.09277 -0.00976562c-99.5195 0 -240.271 54.0918 -314.177 120.74
+c-6.53027 5.88965 -0.770508 13.96 7.16992 9.46973c81.1748 -46.4336 222.955 -84.1201 316.473 -84.1201h0.407227c69.4072 0.373047 177.64 22.5713 241.59 49.5508c11.7803 5 21.7705 -7.80078 10.1201 -16.3809zM607.78 112.35
+c-8.95996 11.5205 -59.2803 5.38086 -81.8105 2.69043c-6.79004 -0.770508 -7.93945 5.12012 -1.79004 9.46973c40.0703 28.1699 105.88 20.1006 113.44 10.6299c7.5498 -9.46973 -2.0498 -75.4092 -39.5605 -106.909c-5.75977 -4.87012 -11.2695 -2.30078 -8.70996 4.09961
+c8.44043 21.25 27.3906 68.4902 18.4307 80.0195z" />
+ <glyph glyph-name="bimobject" unicode="&#xf378;"
+d="M416 416c17.5996 0 32 -14.4004 32 -32v-384c0 -17.5996 -14.4004 -32 -32 -32h-384c-17.5996 0 -32 14.4004 -32 32v384c0 17.5996 14.4004 32 32 32h384zM352 158.6h-0.0996094v35c0 49.4004 -11.4004 82.5 -103.801 82.5h-17.2998
+c-30 0 -65.0996 -8.2998 -69.7002 -38.7998h-1.09961v74.7002h-64v-232h64v34.7998h0.900391c8 -23.8994 26.2998 -38.7998 70.3994 -38.7998h16.9004c92.3994 0 103.8 33.2002 103.8 82.5996zM288 187.5v-22.9004c0 -21.6992 -3.40039 -33.7998 -38.4004 -33.7998h-45.2998
+c-28.8994 0 -44.0996 6.5 -44.0996 35.7002v19c0 29.2998 15.2002 35.7002 44.0996 35.7002h45.2998c35 0.200195 38.4004 -12 38.4004 -33.7002z" />
+ <glyph glyph-name="bitcoin" unicode="&#xf379;" horiz-adv-x="512"
+d="M504 192c0 -136.967 -111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248s248 -111.033 248 -248zM362.349 227.33c4.9375 32.999 -20.1904 50.7393 -54.5498 62.5732l11.1465 44.7021l-27.2129 6.78027l-10.8516 -43.5234
+c-7.1543 1.78223 -14.502 3.46387 -21.8027 5.12988l10.9287 43.8096l-27.1982 6.78125l-11.1523 -44.6855c-5.92188 1.34863 -11.7354 2.68164 -17.377 4.08398l0.0302734 0.139648l-37.5293 9.37012l-7.23926 -29.0625s20.1914 -4.62695 19.7646 -4.91309
+c11.0225 -2.75098 13.0146 -10.0439 12.6807 -15.8242l-12.6963 -50.9258c0.759766 -0.193359 1.74414 -0.472656 2.8291 -0.90625c-0.907227 0.224609 -1.87598 0.472656 -2.87598 0.712891l-17.7959 -71.3379c-1.34961 -3.34863 -4.76758 -8.37012 -12.4717 -6.46484
+c0.271484 -0.394531 -19.7793 4.9375 -19.7793 4.9375l-13.5107 -31.1475l35.4141 -8.82617c6.58887 -1.65137 13.0449 -3.37988 19.4004 -5.00684l-11.2617 -45.2129l27.1816 -6.78027l11.1533 44.7324c5.96875 -1.61719 15.6846 -4.13867 21.6865 -5.62695
+l-11.1152 -44.5225l27.2139 -6.78125l11.2617 45.1279c46.4043 -8.78125 81.2988 -5.23926 95.9863 36.7266c11.8359 33.79 -0.589844 53.2812 -25.0049 65.9912c17.7803 4.09766 31.1748 15.792 34.7471 39.9492zM300.172 140.151
+c-8.41016 -33.79 -65.3076 -15.5234 -83.7549 -10.9434l14.9443 59.8994c18.4453 -4.60352 77.5996 -13.7178 68.8105 -48.9561zM308.589 227.818c-7.67285 -30.7363 -55.0312 -15.1201 -70.3926 -11.292l13.5479 54.3262
+c15.3633 -3.82715 64.8359 -10.9727 56.8447 -43.0342z" />
+ <glyph glyph-name="bity" unicode="&#xf37a;" horiz-adv-x="496"
+d="M78.4004 380.8c95.3994 89.2002 246.1 91.2002 343.1 -3.7998c14.2998 -14.0996 -6.40039 -37.0996 -22.4004 -21.5c-84.7998 82.4004 -215.8 80.2998 -298.899 3.2002c-16.2998 -15.1006 -36.5 8.2998 -21.7998 22.0996zM177.3 -37.7998
+c-128.7 38.2998 -201.899 170.7 -169.8 298.1c5.2998 21 35.2002 12.5 30.2002 -7.09961c-28.2998 -111.3 35.2998 -227.101 147.5 -261c21.3994 -6.40039 11.3994 -35.7002 -7.90039 -30zM325.4 -35.7998c-19.2002 -6.2998 -30 22.7002 -8.80078 29.7002
+c106.101 35.5 167.4 145.699 143.2 253.399c-4.89941 21.7002 25.5 27.6006 30 7.90039c28.5 -124.101 -42.5 -250.8 -164.399 -291zM262.5 43.2002c0 -8.2002 -6.59961 -14.7998 -14.7998 -14.7998s-14.7998 6.59961 -14.7998 14.7998l0.199219 71.7998
+c0 8.09961 6.60059 14.7998 14.8008 14.7998c8.19922 0 14.7998 -6.59961 14.7998 -14.7998zM333.5 312.2c0 21.7998 32.5 19.5996 32.5 0v-71.6006c0 -69.2998 -60.7002 -90.8994 -118 -90.0996c-57.2998 -0.799805 -118 20.7998 -118 90.0996v71.6006
+c0 19.5996 32.5 21.7998 32.5 0c-1.40039 -88.2002 -7 -131.8 85.5 -132.5c90.2002 0.599609 87.5996 41.5996 85.5 132.5z" />
+ <glyph glyph-name="blackberry" unicode="&#xf37b;" horiz-adv-x="511"
+d="M166 331.1c0 -23.3994 -16.4004 -49.0996 -72.5 -49.0996h-70.0996l21 88.7998h67.7998c42.0996 0 53.7998 -23.2998 53.7998 -39.7002zM292.2 370.8c42.0996 0 53.7998 -23.2998 53.7002 -39.7002c0 -23.3994 -16.3008 -49.0996 -70.1006 -49.0996h-70.0996
+l18.7002 88.7998h67.7998zM88.7998 239.9c42.1006 0 53.7998 -23.4004 53.7998 -39.7002c0 -25.7002 -16.3994 -49.1006 -72.5 -49.1006h-70.0996l21 88.8008h67.7998zM268.9 239.9c42 0 53.6992 -23.4004 53.6992 -39.7002c0 -25.7002 -16.2998 -49.1006 -70.0996 -49.1006
+h-70.0996l18.6992 88.8008h67.8008zM458.2 293.7c42.0996 0 53.7998 -23.4004 53.7002 -39.7002c0 -25.7002 -16.3008 -49.0996 -70.1006 -49.0996h-70.0996l18.7002 88.7998h67.7998zM430.2 155.8c42.0996 0 53.7002 -23.3994 53.7002 -39.7002
+c0 -25.6992 -14 -49.0996 -70.1006 -49.0996h-70.0996l18.7002 88.7998h67.7998zM240.8 102c42.1006 0 53.7998 -23.4004 53.7002 -39.7002c0 -23.3994 -14 -49.0996 -70.0996 -49.0996h-70.1006l18.7002 88.7998h67.7998z" />
+ <glyph glyph-name="blogger" unicode="&#xf37c;" horiz-adv-x="447"
+d="M162.4 252c4.7998 4.90039 6.19922 5.09961 36.3994 5.09961c27.2002 0 28.1006 -0.0996094 32.1006 -2.09961c5.7998 -2.90039 8.2998 -7 8.2998 -13.5996c0 -5.90039 -2.40039 -10 -7.60059 -13.4004c-2.7998 -1.7998 -4.5 -1.90039 -31.0996 -2.09961
+c-16.4004 -0.100586 -29.5 0.199219 -31.5 0.799805c-10.2998 2.89941 -14.0996 17.7002 -6.59961 25.2998zM223.8 157.5c55.4004 0 55.1006 0 60.4004 -4.7002c7.39941 -7 5.89941 -19.2998 -3.10059 -24.3994l-9.19922 -1.5l-47.9004 -0.600586
+c-42.2002 -0.5 -54.0996 0.200195 -56.2998 1.2002c-4.40039 1.90039 -8.5 7.2998 -9.2002 12c-0.599609 4.5 1.59961 10.7998 5.09961 13.9004c4.40039 3.89941 6.30078 4.09961 60.2002 4.09961zM447.2 27.4004c-3.5 -28.4004 -23 -50.4004 -51.1006 -57.5
+c-7.19922 -1.80078 -9.69922 -1.90039 -172.899 -1.80078c-157.8 0 -165.9 0.100586 -172 1.80078c-8.40039 2.19922 -15.6006 5.5 -22.2998 10c-5.60059 3.7998 -13.9004 11.7998 -17 16.3994c-3.80078 5.60059 -8.2002 15.2998 -10 22
+c-1.80078 6.7002 -1.90039 9.40039 -1.90039 173.4c0 163.1 0 166.6 1.7998 173.7c6.2998 24.6992 25.9004 43.5996 51.2002 49.1992c7.2998 1.60059 332.1 1.90039 340 0.300781c21.2002 -4.30078 37.9004 -17.1006 47.5996 -36.4004c7.7002 -15.2998 7 1.5 7.30078 -180.6
+c0.199219 -115.801 0 -164.5 -0.700195 -170.5zM361.8 212.6c-1.09961 5 -4.2002 9.60059 -7.7002 11.5c-1.09961 0.600586 -8 1.30078 -15.5 1.7002c-12.3994 0.600586 -13.7998 0.799805 -17.7998 3.10059c-6.2002 3.59961 -7.89941 7.59961 -8 18.2998
+c0 20.3994 -8.5 39.3994 -25.2998 56.5c-12 12.2002 -25.2998 20.5 -40.5996 25.0996c-3.60059 1.10059 -11.8008 1.5 -39.2002 1.7998c-42.9004 0.5 -52.5 -0.399414 -67.1006 -6.19922c-27 -10.7002 -46.2998 -33.4004 -53.3994 -62.4004
+c-1.2998 -5.40039 -1.60059 -14.2002 -1.90039 -64.2998c-0.399414 -62.7998 0 -72.1006 4 -84.5c9.7002 -30.7002 37.1006 -53.4004 64.6006 -58.4004c9.19922 -1.7002 122.199 -2.09961 133.699 -0.5c20.1006 2.7002 35.9004 10.7998 50.7002 25.9004
+c10.7002 10.8994 17.4004 22.7998 21.7998 38.5c3.2002 10.8994 2.90039 88.3994 1.7002 93.8994z" />
+ <glyph glyph-name="blogger-b" unicode="&#xf37d;"
+d="M446.6 225.3c2 -8.89941 2.40039 -134.1 -2.5 -151.7c-7.09961 -25.2998 -17.8994 -44.3994 -35.1992 -62.0996c-23.9004 -24.4004 -49.4004 -37.5 -81.9004 -41.9004c-18.7002 -2.5 -201.2 -1.89941 -216 0.800781c-44.5 8 -88.7998 44.6992 -104.4 94.2998
+c-6.2998 20.0996 -7 35 -6.39941 136.5c0.5 81 1 95.0996 3.09961 103.899c11.4004 46.8008 42.6006 83.4004 86.1006 100.601c23.5996 9.39941 39 10.7998 108.399 10c44.2002 -0.5 57.4004 -1.10059 63.2998 -2.90039c24.6006 -7.5 46.2002 -20.7998 65.5 -40.5
+c27.1006 -27.5996 40.8008 -58.2998 40.9004 -91.2998c0.0996094 -17.2002 2.7998 -23.5996 12.9004 -29.5c6.39941 -3.7002 8.59961 -4.09961 28.6992 -5c12 -0.5 23.2002 -1.7002 25 -2.7002c5.7002 -3.09961 10.7002 -10.5 12.5 -18.5zM124.5 288.9
+c-12.2002 -12.3008 -6 -36.1006 10.5996 -40.8008c3.10059 -0.799805 24.3008 -1.39941 50.8008 -1.19922c43 0.199219 45.6992 0.399414 50.2998 3.2998c8.5 5.39941 12.2998 12.0996 12.2998 21.5996c0 10.6006 -4.09961 17.2002 -13.4004 21.9004
+c-6.39941 3.2998 -7.89941 3.39941 -51.7998 3.39941c-48.7998 0 -51 -0.299805 -58.7998 -8.19922zM316.3 89.0996c14.4004 8.2002 17 28.1006 4.90039 39.4004c-8.5 7.90039 -8 7.90039 -97.6006 7.7998c-87.0996 -0.0996094 -90.1992 -0.299805 -97.2998 -6.7002
+c-5.59961 -5.09961 -9.2998 -15.0996 -8.2002 -22.3994c1.10059 -7.7002 7.80078 -16.2998 14.9004 -19.4004c3.59961 -1.59961 22.7998 -2.7998 90.9004 -2l77.5 0.900391z" />
+ <glyph glyph-name="buromobelexperte" unicode="&#xf37f;"
+d="M0 416h128v-128h-128v128zM120 296v112h-112v-112h112zM160 416h128v-128h-128v128zM280 296v112h-112v-112h112zM320 416h128v-128h-128v128zM440 296v112h-112v-112h112zM0 256h128v-128h-128v128zM120 136v112h-112v-112h112zM160 256h128v-128h-128v128zM280 136v112
+h-112v-112h112zM320 256h128v-128h-128v128zM440 136v112h-112v-112h112zM0 96h128v-128h-128v128zM120 -24v112h-112v-112h112zM160 96h128v-128h-128v128zM280 -24v112h-112v-112h112zM320 96h128v-128h-128v128z" />
+ <glyph glyph-name="centercode" unicode="&#xf380;" horiz-adv-x="512"
+d="M329.2 179.4c-3.7998 -35.2002 -35.4004 -60.6006 -70.6006 -56.8008c-35.1992 3.80078 -60.5996 35.4004 -56.7998 70.6006s35.4004 60.5996 70.6006 56.7998c35.0996 -3.7998 60.5996 -35.4004 56.7998 -70.5996zM243.4 -55.7002
+c-146.7 7.7002 -251.601 138.2 -233.301 279.4c11.2002 86.5996 65.8008 156.899 139.101 192c161 77.0996 349.7 -37.4004 354.7 -216.601c4.09961 -147 -118.4 -262.199 -260.5 -254.8zM423.3 124.3c27.9004 118 -160.5 205.9 -237.2 234.2
+c-57.5 -56.2998 -69.0996 -188.6 -33.7998 -344.4c68.7998 -15.7998 169.101 26.4004 271 110.2z" />
+ <glyph glyph-name="cloudscale" unicode="&#xf383;"
+d="M318.1 294c6.2002 6.2998 15.8008 -3.09961 9.5 -9.59961l-75.1992 -88.8008c0.899414 -8.19922 -1.80078 -16.7998 -8.10059 -23.0996c-11.0996 -11 -28.8994 -11 -40 0c-11.0996 11.0996 -11.0996 29 0 40c6.2998 6.2998 14.7998 9 23.1006 8.09961l25.1992 20.4004
+c-16.3994 15.2998 -38.3994 24.7002 -62.5996 24.7002c-50.7998 0 -94.5996 -41.4004 -92.5996 -97.4004c-1 6.2998 -1.40039 12.7998 -1.40039 19.4004c0 71.5 57.7998 132.3 129.4 132.3c31.7998 0 60.7998 -14.2998 83.2998 -33.5996zM234.3 182.5
+c5.60059 5.5 5.60059 14.5996 0 20.2002c-5.59961 5.59961 -14.5996 5.59961 -20.2002 0c-5.59961 -5.60059 -5.59961 -14.6006 0 -20.2002c5.60059 -5.5 14.6006 -5.5 20.2002 0zM224 416c123.5 0 224 -100.5 224 -224s-100.5 -224 -224 -224s-224 100.5 -224 224
+s100.5 224 224 224zM224 32c88.2002 0 160 71.7998 160 160s-71.7998 160 -160 160s-160 -71.7998 -160 -160s71.7998 -160 160 -160z" />
+ <glyph glyph-name="cloudsmith" unicode="&#xf384;" horiz-adv-x="333"
+d="M332.5 28.0996c0 -46.3994 -37.5996 -84.0996 -84 -84.0996s-84 37.7002 -84 84.0996c0 46.4004 37.5996 84 84 84s84 -37.5996 84 -84zM248.5 272c-46.4004 0 -80 -33.5996 -80 -80s-37.5996 -80 -84 -80s-84 33.5996 -84 80s37.5996 88 84 88s76 29.5996 76 76
+s41.5996 84 88 84s80 -37.5996 80 -84s-33.5996 -84 -80 -84z" />
+ <glyph glyph-name="cloudversify" unicode="&#xf385;" horiz-adv-x="615"
+d="M148.6 144v-0.0996094h-48.8994c-6.40039 0 -11.7002 5.39941 -11.7002 11.7998v40.3994c0 7.60059 7 11.9004 10.7998 11.9004h46.7998v-6.59961c0 -10.7002 8.80078 -16.7002 19.5 -16.7002h20.2002c10.7998 0 19.5 8.7998 19.5 19.5v20.3994
+c0 10.6006 -3.5 19.5 -15.2002 19.5c18.5 15.2002 37.2002 21.4004 45 24.1006c15 56.5 42 92.3994 99.3008 109.7c55.0996 16.5 153.5 3.09961 186.5 -85c73.8994 -22.6006 106.899 -92.6006 92.0996 -155.101c-13 -54.8994 -62.2998 -100.6 -131.5 -99.5
+c-49.5996 -51.3994 -135.2 -48.8994 -186.4 -5.59961c-78.5996 -4.2002 -137.8 42.7998 -146 111.3zM376 136c8.7002 -54.0996 59.7002 -65.5 91.7998 -59.2002c39.1006 7.7002 70.5 37.5 79.7002 76.5c5.7998 24.4004 2.40039 50 -9.40039 72l-10.5 19.6006
+c1.2002 -22.5 -12.5 -60.6006 -47.5 -76.9004c65.5 67.7002 2.10059 141.2 -67.6992 150.5c-49.8008 6.59961 -83.3008 -13 -114.2 -43.7002c48 -4.7002 87.7002 -26.7998 101.8 -74.7998c-30.0996 49.2998 -103 56.5996 -133.6 40.7998
+c-35.5 -18.2002 -60 -54 -57 -93.8994c3.59961 -47.4004 39.5 -67.4004 57.3994 -79.8008c-4.5 21.7002 -4 71.3008 29.2002 92.9004c-36.2998 -60 28.0996 -144.6 135.3 -110.8c-33.5996 14.3994 -66 40.5 -55.2998 86.7998zM128 240h-39.7998
+c-8.90039 0 -16.2002 7.2998 -16.2002 16.2002v39.5996c0 8.90039 7.2998 16.2002 16.2002 16.2002h39.7998c8.90039 0 16.2002 -7.2998 16.2002 -16.2002v-39.5996c0 -8.90039 -7.2998 -16.2002 -16.2002 -16.2002zM10.0996 280c-5.59961 0 -10.0996 4.5 -10.0996 10.0996
+v27.8008c0 5.59961 4.5 10.0996 10.0996 10.0996h27.7002c5.5 0 10.1006 -4.5 10.1006 -10.0996v-27.8008c0 -5.59961 -4.5 -10.0996 -10.1006 -10.0996h-27.7002zM168 305.3v21.4004c0 5.09961 4.2002 9.2998 9.2998 9.2998h21.4004
+c5.09961 0 9.2998 -4.2002 9.2998 -9.2998v-21.4004c0 -5.09961 -4.2002 -9.2998 -9.2998 -9.2998h-21.4004c-5.09961 0 -9.2998 4.2002 -9.2998 9.2998zM56 212.5v-25c0 -6.2998 -5.09961 -11.5 -11.4004 -11.5h-25.1992c-6.30078 0 -11.4004 5.2002 -11.4004 11.5v25
+c0 6.2998 5.09961 11.5 11.4004 11.5h25.0996c6.40039 0 11.5 -5.2002 11.5 -11.5z" />
+ <glyph glyph-name="cpanel" unicode="&#xf388;" horiz-adv-x="639"
+d="M210.3 227.8c6.60059 -29.0996 -14.5 -65.2998 -51.7002 -65.2998h-32l6.40039 23.7998c1.7998 6.2002 7.2998 10.7998 14.2998 10.7998h10.2998c12.4004 0 20.8008 11.7002 18.3008 22.6006c-2.10059 9.2002 -9.90039 14.7998 -18.3008 14.7998h-19.7998
+l-25.7998 -95.7002c-1.90039 -6.2002 -7.40039 -10.7002 -14.2002 -10.7002l-24.7002 -0.0996094l34.9004 130.1c1.7998 6.40039 7.2002 10.9004 14.2998 10.9004h37c24.1006 0 45.4004 -16.4004 51 -41.2002zM53.7998 199.8c-24.8994 0 -24.7002 -37.3994 0 -37.3994
+h11.2998c4.2002 0 7.60059 -3.90039 6.40039 -8.30078l-7.09961 -26.0996h-12.4004c-33.5 0 -59 31.4004 -50.2998 65.2002c7.2998 27 28.2998 41.0996 51.2002 41.0996h40l-6.2002 -23.5996c-1.90039 -6.5 -7.40039 -10.9004 -14.2998 -10.9004h-18.6006zM301.3 234.6
+c18.7998 0 33.2998 -17.5996 28.5 -36.7998l-14 -51.7998c-2.7998 -10.5996 -12.2002 -17.7998 -23.3994 -17.7998l-57.5 0.200195c-42.9004 0 -38.5 63.7998 0.699219 63.7998h48.4004l-3.5 -13.2002c-1.90039 -6.2002 -7.40039 -10.7998 -14.2002 -10.7998h-21.5996
+c-5.2998 0 -5.2998 -7.90039 0 -7.90039h34.8994c4.60059 0 5.10059 3.90039 5.5 5.2998l8.60059 31.8008c0.299805 1 1.89941 5.2998 -2.10059 5.2998h-57.5c-9.69922 0 -16.5996 8.89941 -14.1992 18.5l3.5 13.3994h77.8994zM633.1 269c4.5 0 7.7002 -4 6.5 -8.2998
+l-26.5 -98.2002c-5.09961 -20.7002 -24.1992 -34.5 -44.8994 -34.5l35.5996 133.1c1.2002 4.7002 5.5 7.90039 10.4004 7.90039h18.8994zM396.8 234.3c34.4004 0 59.2998 -32.2998 50.2998 -65.3994l-8.7998 -33.1006c-1.2002 -4.89941 -5.7002 -7.7998 -10.2998 -7.7998
+h-19.0996c-4.5 0 -7.60059 4 -6.40039 8.2998l10.5996 40c3.30078 11.6006 -5.59961 23.4004 -18.0996 23.4004h-19.7998l-17.2002 -64c-1.2002 -4.7998 -5.59961 -7.7998 -10.4004 -7.7998h-18.8994c-4.2002 0 -7.60059 3.89941 -6.40039 8.2998l26.2002 98h48.2998
+v0.0996094zM495.1 159.7h73.3008l-5.7002 -21c-1.90039 -6.2002 -7.40039 -10.7002 -14.2002 -10.7002h-66.7002c-20 0 -33.2998 19 -28.2998 36.7002l10.7998 40c4.7998 17.5996 20.7002 29.5996 38.6006 29.5996h47.2998c19 0 33.2002 -17.7002 28.2998 -36.7998
+l-3.2002 -12c-2.89941 -11 -12.7002 -17.5996 -23.2002 -17.5996h-53.3994l3.5 13c1.59961 6.19922 7.2002 10.7998 14.2002 10.7998h21.5996c2 0 3.2998 1 3.90039 3l0.699219 2.59961c0.700195 2.7002 -1.2998 5.10059 -3.89941 5.10059h-32.9004
+c-4.09961 0 -6.89941 -2.10059 -7.7998 -6l-8 -30c-0.900391 -3.30078 1.5 -6.7002 5.09961 -6.7002z" />
+ <glyph glyph-name="css3-alt" unicode="&#xf38b;" horiz-adv-x="384"
+d="M0 416h384l-34.9004 -395.8l-157.1 -52.2002l-157.1 52.2002zM313.1 336h-242.199l5.7998 -47.2998h122.899l-6.5 -2.7002l-112.1 -46.7002l3.59961 -46.2998l0.200195 0.0996094v-0.0996094l166.3 -0.5l-3.69922 -61.5996l-54.7002 -15.4004l-52.6006 13.2998
+l-3.19922 38.2998h-48.9004l6.40039 -73.8994l98.7998 -29.2002l98.2002 28.7002l12.7998 146.6h-111.5l0.299805 0.100586l115.3 49.2998z" />
+ <glyph glyph-name="cuttlefish" unicode="&#xf38c;" horiz-adv-x="431"
+d="M344 142.5c13.7002 -50.9004 41.7002 -93.2998 87 -117.8c-45.2998 -49.6006 -110.5 -80.7002 -183 -80.7002c-137 0 -248 111 -248 248s111 248 248 248c72.5 0 137.7 -31.0996 183 -80.7002c-45.2998 -24.5 -73.2998 -66.8994 -87 -117.8
+c-17.5 31.5996 -57.4004 54.5 -96 54.5c-56.5996 0 -104 -47.4004 -104 -104s47.4004 -104 104 -104c38.5996 0 78.5 22.9004 96 54.5z" />
+ <glyph glyph-name="d-and-d" unicode="&#xf38d;" horiz-adv-x="575"
+d="M82.5 349.1c-0.599609 17.2002 2 33.8008 12.7002 48.2002c0.299805 -7.39941 1.2002 -14.5 4.2002 -21.5996c5.89941 27.5 19.6992 49.2998 42.2998 65.5c-1.90039 -5.90039 -3.5 -11.7998 -3 -17.7002c8.7002 7.40039 18.7998 17.7998 44.3994 22.7002
+c14.7002 2.7998 29.7002 2 42.1006 -1c38.5 -9.2998 61 -34.2998 69.7002 -72.2998c5.2998 -23.1006 0.699219 -45 -8.30078 -66.4004c-5.19922 -12.4004 -12 -24.4004 -20.6992 -35.0996c-2 1.89941 -3.90039 3.7998 -5.80078 5.59961
+c-42.7998 40.7998 -26.7998 25.2002 -37.3994 37.4004c-1.10059 1.19922 -1 2.19922 -0.100586 3.59961c8.30078 13.5 11.8008 28.2002 10 44c-1.09961 9.7998 -4.2998 18.9004 -11.2998 26.2002c-14.5 15.2998 -39.2002 15 -53.5 -0.600586
+c-11.3994 -12.5 -14.0996 -27.3994 -10.8994 -43.5996c0.199219 -1.2998 0.399414 -2.7002 0 -3.90039c-3.40039 -13.6992 -4.60059 -27.5996 -2.5 -41.5996c0.0996094 -0.5 0.0996094 -1.09961 0.0996094 -1.59961c0 -0.300781 -0.0996094 -0.5 -0.200195 -1.10059
+c-21.7998 11 -36 28.2998 -43.2002 52.2002c-8.2998 -17.7998 -11.0996 -35.5 -6.59961 -54.0996c-15.5996 15.1992 -21.2998 34.2998 -22 55.1992zM552.1 225.9c0.5 -0.600586 1.2002 -1 1.7002 -1.40039v-0.5c-15 3.59961 -29.7998 1.7998 -44.5 -1.2998
+c-9.2998 -2 -18.2998 -4.7002 -26.7002 -9c-2.89941 -1.5 -5.69922 -3.2998 -8 -4.7002c-5.7998 2.40039 -11.2998 5.5 -17.1992 6.7998c-24.5 5.2998 -45.8008 -1.2002 -62.5 -20c-19.7002 -22.2002 -34.5 -47.5996 -46.7002 -74.5l-1.2002 -2.7002
+c-0.0996094 -0.199219 -0.200195 -0.299805 -0.400391 -0.399414c-12.0996 8.2998 -21.5996 20.2998 -36.0996 25.5996c0.299805 0.400391 0.400391 0.900391 0.700195 1.2998c20.5996 28.2002 44.8994 52.5 75.0996 70.4004c16 9.5 33 16.0996 51.5 18.5
+c1.7998 0.200195 3.5 0.400391 5.2998 1.09961c-4.39941 0 -8.7998 0.300781 -13.0996 -0.0996094c-21.2002 -1.90039 -40.5 -9.59961 -58.7002 -20.2002c-13.7998 -8 -26.2002 -17.7002 -36.5996 -29.7998c-0.400391 -0.5 -0.600586 -1.09961 -0.900391 -1.7002
+c-0.299805 0.299805 -0.700195 0.600586 -1 0.900391c11 30.8994 30.7002 55 57.7002 73.2998c0.200195 -0.200195 0.5 -0.299805 0.700195 -0.5c-1.2002 -1.7002 -2.5 -3.2998 -3.5 -5.09961c-1.7998 -3.30078 -3.7002 -6.5 -5.10059 -10
+c-1.7998 -4.30078 1.60059 -8.60059 12 -0.5c18.2002 14.0996 29.6006 26.2998 48.9004 29.5996c0.700195 0.0996094 1.2998 0.299805 1.90039 0.299805h2.5c-1 -0.700195 -1.60059 -1.09961 -2.2002 -1.5c-11.6006 -7.7998 -11.7998 -7.39941 -15 -12
+c-2.60059 -3.7002 -0.200195 -8 4.7002 -6.7998c2.59961 0.599609 5.19922 1.2998 7.69922 2.2002c9.40039 3.2998 19 5.7998 29 6.39941c13.9004 0.800781 27.1006 -1.89941 39.9004 -7.09961c15.0996 -6.2002 28.5 -15 40.0996 -26.5996zM316.7 50.4004
+c1.5 -1.30078 1.89941 -2.40039 0.899414 -4.2002c-25.2998 -50.2002 -61.0996 -89.1006 -116 -98.7998c-26.7998 -4.7002 -52.8994 -2.7002 -77.8994 8.59961c-18.5 8.2002 -34.6006 19.5996 -47.2002 35.5996c-2 2.60059 -3.7002 5.40039 -5.90039 8.60059
+c-0.699219 -7.7998 0.100586 -14.9004 1.5 -21.9004c-0.199219 -0.200195 -0.399414 -0.299805 -0.599609 -0.5c-3.2002 3.40039 -6.59961 6.60059 -9.5 10.2998c-12.2002 15.5 -19.5 33.3008 -24.0996 52.3008c-11.8008 48.2998 -0.5 78.7998 7.7998 101.1
+c-8.7002 -4.7998 -16.2002 -10.2998 -23.6006 -16.2002c11.6006 32.7998 31.9004 59.9004 56.1006 84.6006c2.39941 -2.10059 3.2998 -4.7002 3 -7.40039c-0.200195 -1 -5.90039 -38.9004 -5.60059 -44.7002c18.9004 18.9004 40.5 33.2998 64.8008 43.9004
+c-7.5 -11.1006 -11 -23.4004 -11.8008 -37.2998c13.4004 12.1992 27.7002 20.0996 46.4004 13.8994c-8.5 -9.09961 -30.7998 -30.5 -38.5996 -64.2998c-5.10059 -21.9004 -3.80078 -43.0996 8.19922 -62.5996c11.2002 -18.3008 27.8008 -27.8008 49.4004 -27.8008
+c12.5996 0 23.7998 5 34.0996 11.8008c18.5 12.2998 32.8008 28.5 44 47.5996c1.90039 3.2002 1.10059 2.09961 1.90039 3c19.9004 -16.0996 3.2998 -2.59961 42.7002 -35.5996zM488.7 96.7998c20.2002 -6.59961 35.5 -18.7998 43.7998 -38.8994
+c9.2002 -23.1006 2.09961 -49.4004 -17.4004 -66c-16.3994 -14 -35.6992 -19.2002 -57 -17.4004c-0.599609 0 -1.19922 0 -1.89941 -0.299805c15.0996 -10.7002 31.5996 -15.2002 50.8994 -10.6006c-2.19922 -2.39941 -3.89941 -4.69922 -5.89941 -6.5
+c-12.2998 -10.8994 -26.9004 -16.8994 -42.9004 -19.7998c-39.5996 -7.2998 -75.5996 12.7998 -85 56.9004c-0.5 2.09961 -0.599609 4.2002 -0.899414 6.39941c-10.8008 -8.19922 -16.4004 -34.0996 -0.700195 -52.2998c-1.60059 0.5 -2.60059 0.700195 -3.60059 1.10059
+c-21.2998 8.2998 -34.3994 28.2998 -33.5 51.1992c0.900391 23.2002 4.90039 41 -13 56c-16.5 13.8008 -33 27.4004 -49.5 41.1006c-8.09961 6.7002 -14.7998 14.5 -17 25.0996c-1 4.60059 -1.39941 9.40039 -1.7998 14.1006c-0.5 6.09961 -3.2998 11 -7.89941 14.7998
+c-4.5 3.89941 -9.30078 7.39941 -13.8008 11.2002c-8.89941 7.5 -12.2998 18.8994 -7.2998 29.8994c2.7998 -12.8994 9.60059 -18.8994 22.6006 -20.2998c4.39941 -0.5 8.89941 -0.799805 13.2998 -1.5c8.09961 -1.2002 12.7998 -6.09961 14.2998 -14.2002
+c0.700195 -3.39941 1.2998 -6.7998 2.2002 -10.2002c1.59961 -5.59961 4.5 -8 10.3994 -8.39941c4.60059 -0.299805 9.30078 -0.5 13.9004 -0.900391c7.59961 -0.599609 14.2002 -3.7998 20.0996 -8.7002c19.4004 -16.1992 39 -32.1992 58.5 -48.2998
+c5.7002 -4.7002 12 -8.2002 19.6006 -8.5c16.7002 -0.599609 29 15.2002 24.7998 31.7998c-0.200195 0.700195 -0.400391 1.5 -0.0996094 2.80078c2.39941 -2 4.89941 -3.80078 7 -5.90039c14.0996 -14 18.0996 -39.2998 8.69922 -56.0996
+c-2.09961 -3.80078 -5.2998 -7.10059 -8.09961 -10.8008c0.700195 -0.199219 1.7998 -0.5 3 -0.599609c14 -1.40039 27.2002 1 38.9004 9.09961c15.7998 10.9004 18 31.2002 5.39941 45.6006c-4.7002 5.39941 -8.89941 8 -18.7998 12
+c6.5 1.2998 19.2002 0.200195 28.7002 -2.90039zM99.4004 268.7c-5.30078 9.2002 -13.2002 15.5996 -22.1006 21.2998c13.7002 0.5 26.6006 -0.200195 39.6006 -3.7002c-7 12.2002 -8.5 24.7002 -5 38.7002c5.2998 -11.9004 13.6992 -20.0996 23.5996 -26.7998
+c19.7002 -13.2002 35.7002 -19.6006 46.7002 -30.2002c3.39941 -3.2998 6.2998 -7.09961 9.59961 -10.9004c-0.799805 2.10059 -1.39941 4.10059 -2.2002 6c-5 10.6006 -13 18.6006 -22.5996 25c-1.7998 1.2002 -2.7998 2.5 -3.40039 4.5
+c-3.2998 12.5 -3 25.1006 -0.699219 37.6006c1 5.5 2.7998 10.8994 4.5 16.2998c0.799805 2.40039 2.2998 4.59961 4 6.59961c0.599609 -6.89941 0 -25.5 19.5996 -46c10.7998 -11.2998 22.4004 -21.8994 33.9004 -32.6992c9 -8.5 18.2998 -16.7002 25.5 -26.8008
+c1.09961 -1.59961 2.19922 -3.2998 3.7998 -4.69922c-5 13 -14.2002 24.0996 -24.2002 33.7998c-9.59961 9.2998 -19.4004 18.3994 -29.2002 27.3994c-3.2998 3 -4.59961 6.7002 -5.09961 10.9004c-1.2002 10.4004 0 20.5996 4.2998 30.2002c0.5 1 1.09961 2 1.90039 3.2998
+c0.5 -4.2002 0.599609 -7.90039 1.39941 -11.5996c4.7998 -23.1006 20.4004 -36.3008 49.2998 -63.5c10 -9.40039 19.3008 -19.2002 25.6006 -31.6006c4.7998 -9.2998 7.2998 -19 5.7002 -29.5996c-0.100586 -0.600586 0.5 -1.7002 1.09961 -2
+c6.2002 -2.60059 10 -6.90039 9.7002 -14.2998c7.7002 2.59961 12.5 8 16.3994 14.5c4.2002 -20.2002 -9.09961 -50.3008 -27.1992 -58.7002c0.399414 4.5 5 23.3994 -16.5 27.7002c-6.80078 1.2998 -12.8008 1.2998 -22.9004 2.09961c4.7002 9 10.4004 20.5996 0.5 22.4004
+c-24.9004 4.59961 -52.7998 -1.90039 -57.7998 -4.60059c8.2002 -0.399414 16.2998 -1 23.5 -3.2998c-2 -6.5 -4 -12.7002 -5.7998 -18.9004c-1.90039 -6.5 2.09961 -14.5996 9.2998 -9.59961c1.2002 0.900391 2.2998 1.90039 3.2998 2.7002
+c-3.09961 -17.9004 -2.90039 -15.9004 -2.7998 -18.2998c0.299805 -10.2002 9.5 -7.80078 15.7002 -7.30078c-2.5 -11.7998 -29.5 -27.2998 -45.4004 -25.7998c7 4.7002 12.7002 10.2998 15.9004 17.9004c-6.5 -0.799805 -12.9004 -1.60059 -19.2002 -2.40039
+l-0.299805 0.900391c4.69922 3.39941 8 7.7998 10.1992 13.0996c8.7002 21.1006 -3.59961 38 -25 39.9004c-9.09961 0.799805 -17.7998 -0.799805 -25.8994 -5.5c6.2002 15.5996 17.2002 26.5996 32.5996 34.5c-15.2002 4.2998 -8.89941 2.7002 -24.5996 6.2998
+c14.5996 9.2998 30.2002 13.2002 46.5 14.5996c-5.2002 3.2002 -48.1006 3.60059 -70.2002 -20.8994c7.90039 -1.40039 15.5 -2.7998 23.2002 -4.2002c-23.7998 -7 -44 -19.7002 -62.4004 -35.5996c1.10059 4.7998 2.7002 9.5 3.2998 14.2998
+c0.600586 4.5 0.800781 9.2002 0.100586 13.5996c-1.5 9.40039 -8.90039 15.1006 -19.7002 16.2998c-7.90039 0.900391 -15.5996 -0.0996094 -23.2998 -1.2998c-0.900391 -0.0996094 -1.7002 -0.299805 -2.90039 0c15.7998 14.7998 36 21.7002 53.1006 33.5
+c6 4.5 6.7998 8.2002 3 14.9004zM227.8 241.9c3.2998 -16 12.6006 -25.5 23.7998 -24.3008c-4.59961 11.3008 -12.0996 19.5 -23.7998 24.3008z" />
+ <glyph glyph-name="deploydog" unicode="&#xf38e;" horiz-adv-x="512"
+d="M382.2 312h51.7002v-239.6h-51.7002v20.6992c-19.7998 -24.7998 -52.7998 -24.0996 -73.7998 -14.6992c-26.2002 11.6992 -44.3008 38.0996 -44.3008 71.7998c0 29.7998 14.8008 57.8994 43.3008 70.7998c20.1992 9.09961 52.6992 10.5996 74.7998 -12.9004v103.9z
+M317.5 150.2c0 -18.2002 13.5996 -33.5 33.2002 -33.5c19.7998 0 33.2002 16.3994 33.2002 32.8994c0 17.1006 -13.7002 33.2002 -33.2002 33.2002c-19.6006 0 -33.2002 -16.3994 -33.2002 -32.5996zM188.5 312h51.7002v-239.6h-51.7002v20.6992
+c-19.7998 -24.7998 -52.7998 -24.0996 -73.7998 -14.6992c-26.2002 11.6992 -44.2998 38.0996 -44.2998 71.7998c0 29.7998 14.7998 57.8994 43.2998 70.7998c20.2002 9.09961 52.7002 10.5996 74.7998 -12.9004v103.9zM123.8 150.2c0 -18.2002 13.6006 -33.5 33.2002 -33.5
+c19.7998 0 33.2002 16.3994 33.2002 32.8994c0 17.1006 -13.7002 33.2002 -33.2002 33.2002c-19.7002 0 -33.2002 -16.3994 -33.2002 -32.5996zM448 352h-384c-17.5996 0 -32 -14.5 -32 -32v-256c0 -17.5996 14.5 -32 32 -32h384c17.5996 0 32 14.5 32 32v256
+c0 17.5996 -14.5 32 -32 32zM448 384c35.2002 0 64 -28.7998 64 -64v-256c0 -35.2002 -28.7998 -64 -64 -64h-384c-35.2002 0 -64 28.7998 -64 64v256c0 35.2002 28.7998 64 64 64h384z" />
+ <glyph glyph-name="deskpro" unicode="&#xf38f;" horiz-adv-x="480"
+d="M205.9 -64l31.0996 38.4004c12.2998 0.199219 25.5996 1.39941 36.5 6.59961c38.9004 18.5996 38.4004 61.9004 38.2998 63.7998c-0.0996094 5 -0.799805 4.40039 -28.8994 37.4004h79.0996c-0.200195 -50.1006 -7.2998 -68.5 -10.2002 -75.7002
+c-9.39941 -23.7002 -43.8994 -62.7998 -95.2002 -69.4004c-8.69922 -1.09961 -32.7998 -1.19922 -50.6992 -1.09961zM406.3 103.7l-119.2 -0.100586l17.4004 31.3008l175.5 -0.300781c-15.2002 -17.2998 -35.0996 -30.8994 -73.7002 -30.8994zM362.7 327.6v-168.3h-73.5
+l-32.7002 -55.5h-6.5c-52.2998 0 -58.0996 56.5 -58.2998 58.9004c-1.2002 13.2002 -21.2998 11.5996 -20.1006 -1.7998c1.40039 -15.8008 8.80078 -40 26.4004 -57.1006h-91c-25.5 0 -110.8 26.7998 -107 114v213.3c0 16 9.7002 16.6006 15 16.8008h82
+c0.200195 0 0.299805 -0.100586 0.5 -0.100586c4.2998 0.400391 50.0996 2.10059 50.0996 -43.7002c0 -13.2998 20.2002 -13.3994 20.2002 0c0 18.2002 -5.5 32.8008 -15.7998 43.7002h84.2002c108.7 0.400391 126.5 -79.3994 126.5 -120.2zM230.2 271.6l64 -29.2998
+c13.2998 45.5 -42.2002 71.7002 -64 29.2998z" />
+ <glyph glyph-name="digital-ocean" unicode="&#xf391;" horiz-adv-x="511"
+d="M87 -33.7998v73.5996h73.7002v-73.5996h-73.7002zM25.4004 101.4h61.5996v-61.6006h-61.5996v61.6006zM491.6 271.1c53.2002 -170.3 -73 -327.1 -235.6 -327.1v95.7998h0.299805v0.299805c101.7 0.200195 180.5 101 141.4 208
+c-14.2998 39.6006 -46.1006 71.4004 -85.7998 85.7002c-107.101 38.7998 -208.101 -39.8994 -208.101 -141.7h-95.7998c0 162.2 156.9 288.7 327 235.601c74.2002 -23.2998 133.6 -82.4004 156.6 -156.601zM256.3 40.0996h-0.299805v-0.299805h-95.2998v95.6006h95.5996
+v-95.3008z" />
+ <glyph glyph-name="discord" unicode="&#xf392;"
+d="M297.216 204.8c0 -15.6162 -11.5195 -28.416 -26.1123 -28.416c-14.3359 0 -26.1113 12.7998 -26.1113 28.416s11.5195 28.416 26.1113 28.416c14.5928 0 26.1123 -12.7998 26.1123 -28.416zM177.664 233.216c14.5918 0 26.3682 -12.7998 26.1123 -28.416
+c0 -15.6162 -11.5205 -28.416 -26.1123 -28.416c-14.3359 0 -26.1123 12.7998 -26.1123 28.416s11.5205 28.416 26.1123 28.416zM448 395.264v-459.264c-64.4941 56.9941 -43.8682 38.1279 -118.784 107.776l13.5684 -47.3604h-290.304
+c-28.9287 0 -52.4805 23.5518 -52.4805 52.7363v346.111c0 29.1846 23.5518 52.7363 52.4805 52.7363h343.039c28.9287 0 52.4805 -23.5518 52.4805 -52.7363zM375.04 152.576c0 82.4316 -36.8643 149.248 -36.8643 149.248
+c-36.8643 27.6475 -71.9355 26.8799 -71.9355 26.8799l-3.58398 -4.0957c43.5195 -13.3125 63.7441 -32.5127 63.7441 -32.5127c-60.8115 33.3291 -132.244 33.335 -191.232 7.42383c-9.47168 -4.35156 -15.1035 -7.42383 -15.1035 -7.42383
+s21.2471 20.2246 67.3271 33.5361l-2.55957 3.07227s-35.0723 0.767578 -71.9355 -26.8799c0 0 -36.8643 -66.8164 -36.8643 -149.248c0 0 21.5039 -37.1201 78.0801 -38.9121c0 0 9.47168 11.5195 17.1514 21.248c-32.5117 9.72754 -44.7998 30.208 -44.7998 30.208
+c3.7666 -2.63574 9.97656 -6.05273 10.4961 -6.40039c43.21 -24.1973 104.588 -32.126 159.744 -8.95996c8.95996 3.32812 18.9443 8.19238 29.4395 15.1045c0 0 -12.7998 -20.9922 -46.3359 -30.4639c7.68066 -9.72852 16.8965 -20.7363 16.8965 -20.7363
+c56.5762 1.79199 78.3359 38.9121 78.3359 38.9121z" />
+ <glyph glyph-name="discourse" unicode="&#xf393;"
+d="M225.9 416c122.699 0 222.1 -102.3 222.1 -223.9c0 -121.6 -99.4004 -223.899 -222.1 -223.899l-225.801 -0.200195s-0.0996094 224 -0.0996094 227.9c0 121.6 103.3 220.1 225.9 220.1zM224 64c70.7002 0 128 57.2998 128 128s-57.2998 128 -128 128
+s-128 -57.2998 -128 -128c0 -22.0996 5.59961 -42.9004 15.4004 -61l-22.9004 -75l81.0996 20.0996c16.5 -7.7998 35 -12.0996 54.4004 -12.0996z" />
+ <glyph glyph-name="dochub" unicode="&#xf394;" horiz-adv-x="400"
+d="M397.9 288h-141.9v140.4zM304 256h96v-126.1c0 -129.301 -70.2998 -193.9 -210.8 -193.9h-189.2v512h189.2c12.2002 0 23.7002 -1.09961 34.5996 -3.2998v-84c-10 1.7002 -21.0996 2.5 -33.0996 2.5h-94.7002v-337.3h94.7002c76.7998 0 113.3 33.2998 113.3 100.1v130z
+" />
+ <glyph glyph-name="docker" unicode="&#xf395;" horiz-adv-x="640"
+d="M349.9 211.7h-66.1006v59.3994h66.1006v-59.3994zM349.9 416v-60.7002h-66.1006v60.7002h66.1006zM428.1 271.2v-59.4004h-66.0996v59.4004h66.0996zM271.8 343.3v-60.0996h-66.0996v60.0996h66.0996zM349.9 343.3v-60.0996h-66.1006v60.0996h66.1006zM626.7 243.3
+l13.2998 -8.89941c-1.90039 -3.90039 -7 -14.6006 -8.5 -17.1006c-23.7002 -45.2998 -69.9004 -45.5996 -91.2998 -45.2002c-54.5 -131.699 -171 -204.199 -328.4 -204.199c-72.7002 0 -128.3 22.2998 -165.399 66.1992c-38.2002 45.3008 -52.7002 111.301 -44 162.101
+h434.699c22.6006 -0.400391 39.7002 6 48.4004 10.7002c-19.7002 30.1992 -14.7002 76 3.7002 103.8l9.2998 14l14 -9.2998c24.4004 -18.8008 37.7998 -39.7002 41.0996 -63.7002c25.5 4.7998 58.7002 1.2998 73.1006 -8.40039zM115.6 271.2h0.100586v-59.4004h-66.1006
+v59.4004h66zM193.7 271.2v-59.4004h-66.1006v59.4004h66.1006zM271.8 271.2v-59.4004h-66.0996v59.4004h66.0996zM193.7 343.3v-60.0996h-66.1006v60.0996h66.1006z" />
+ <glyph glyph-name="draft2digital" unicode="&#xf396;" horiz-adv-x="480"
+d="M480 49.9004l-144 -81.9004v64.2002l-336 -0.100586c18.2998 19.1006 84.5 87.8008 161.1 174.801c32.6006 37.1992 78 83.2998 69.7002 127.6c-5.2998 28.2998 -42.2002 50.7998 -83.2998 33.5c-8.59961 -3.59961 -24.5 -17.4004 -26.2998 -24.7002
+c28.2998 -4.7002 48 -29.7002 48 -56.7998c0 -31.7002 -25.6006 -57.4004 -57.2998 -57.4004c-37.3008 0 -62.2002 34.1006 -56.7002 67.1006c1.2002 7.89941 5.09961 26.7998 18.2002 47.7002c14.8994 23.8994 45.1992 54.8994 104.3 67.2998
+c103.8 21.7002 161.6 -36.6006 166 -41.2002c28.8994 -29.9004 48 -90.7002 12.7998 -153.3c-30 -53.4004 -81 -114.3 -111.8 -149.3h91.2998v64.6992zM369.9 77v-54.4004l47.0996 27.2002zM134.2 286.6c0 12.3008 -10 22.4004 -22.4004 22.4004
+c-12.3994 0 -22.3994 -10 -22.3994 -22.4004c0 -12.3994 10 -22.3994 22.3994 -22.3994c12.4004 0 22.4004 10 22.4004 22.3994zM82.5 67.5h114.4c17.5996 19.2002 91.5 100.8 128.5 166.7c36.5996 65.0996 -5.80078 113.3 -5.80078 113.3
+c-14.1992 14.9004 -36.8994 36.2002 -82.1992 38.2998c6.7998 -5.5 16.8994 -16.8994 24.2998 -35.7002c11.8994 -30.2998 6.7002 -69.5996 -28.4004 -112.699c-53.0996 -65.2002 -125.2 -142.5 -150.8 -169.9z" />
+ <glyph glyph-name="dribbble-square" unicode="&#xf397;"
+d="M90.2002 219.8c8.89941 42.4004 37.3994 77.7002 75.7002 95.7002c3.59961 -4.90039 28 -38.7998 50.6992 -79c-64 -17 -120.3 -16.7998 -126.399 -16.7002zM314.6 294c-2.5 -3.5 -23 -31.0996 -71.5996 -49.4004c-22.4004 41.1006 -47.2002 74.9004 -51 80
+c43.2998 10.5 89 -0.799805 122.6 -30.5996zM140.1 84c14.3008 29.2002 53 66.7998 108.101 85.5996c19.2002 -49.7998 27.2002 -91.5996 29.2002 -103.6c-44 -18.7002 -96.8008 -13.5996 -137.301 18zM238.9 192.2c-49.4004 -13.9004 -94.3008 -53.9004 -116.5 -91.7998
+c-21.8008 24.2998 -35.1006 56.2998 -35.1006 91.3994c0 1.40039 0.100586 2.7998 0.100586 4.2002c6 -0.200195 72.1992 -1 140.399 19.4004c3.90039 -7.7002 7.7002 -15.4004 11.1006 -23.2002zM273.8 175.9c42.7998 6.89941 80.5 -4.30078 85.1006 -5.80078
+c-6.10059 -38 -27.9004 -70.8994 -58.6006 -91.5996c-1.39941 8.2998 -8.59961 48.2998 -26.5 97.4004zM253.5 224.3c50.5 20.7002 73.4004 50 76.2998 53.9004c19.1006 -23.2002 30.6006 -52.7998 30.9004 -85.1006c-4.5 1 -49.7002 10.1006 -95.2002 4.40039
+c-3.7002 9 -7.2002 17 -12 26.7998zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM384 192c0 88.2002 -71.7998 160 -160 160s-160 -71.7998 -160 -160s71.7998 -160 160 -160
+s160 71.7998 160 160z" />
+ <glyph glyph-name="dyalog" unicode="&#xf399;" horiz-adv-x="416"
+d="M0 416h171.2c74.5 0 137.7 -24 182.5 -69.5996c40.2002 -40.9004 62.2998 -95.6006 62.2998 -154.301c0 -111.399 -84.0996 -224.1 -244.8 -224.1h-171.2v64h171.2c122.2 0 180.8 84 180.8 160.1c0 79.7002 -67.4004 159.9 -180.8 159.9h-107.2v-55.2002h-64v119.2z" />
+ <glyph glyph-name="earlybirds" unicode="&#xf39a;" horiz-adv-x="480"
+d="M313.2 400.5c1.2002 13 21.2998 14 36.5996 8.7002c0.900391 -0.299805 26.2002 -9.7002 19 -15.2002c-27.8994 7.40039 -56.3994 -18.2002 -55.5996 6.5zM112.2 393.6c-7.7998 6.2002 19.8994 16.4004 20.8994 16.7002c16.8008 5.7002 38.9004 4.60059 40.2002 -9.59961
+c0.900391 -27.1006 -30.3994 1 -61.0996 -7.10059zM319.4 288c8.7998 0 16 -7.2002 16 -16s-7.2002 -16 -16 -16c-8.80078 0 -16 7.2002 -16 16s7.19922 16 16 16zM159.7 288c8.7998 0 16 -7.2002 16 -16s-7.2002 -16 -16 -16s-16 7.2002 -16 16s7.2002 16 16 16z
+M478.2 124.8c-9.90039 -24 -40.7002 -11 -63.9004 1.2002c-13.5 -69.0996 -58.0996 -111.4 -126.3 -124.2c0.299805 -0.899414 -2 0.100586 24 -1c33.5996 -1.39941 63.7998 3.10059 97.4004 8c-19.8008 13.7998 -11.4004 37.1006 -9.80078 38.1006
+c1.40039 0.899414 14.7002 -1.7002 21.6006 -11.5c8.59961 12.5 28.3994 14.7998 30.2002 13.5996c1.59961 -1.09961 6.59961 -20.9004 -6.90039 -34.5996c4.7002 0.899414 8.2002 1.59961 9.7998 2.09961c2.60059 0.799805 17.7002 -11.2998 3.10059 -13.2998
+c-14.3008 -2.2998 -22.6006 -5.10059 -47.1006 -10.7998c-45.8994 -10.7002 -85.8994 -11.8008 -117.7 -12.8008l1 -11.5996c3.80078 -18.0996 -23.3994 -24.2998 -27.5996 -6.2002c0.799805 -17.8994 -27.0996 -21.7998 -28.4004 1l-0.5 -5.2998
+c-0.699219 -18.4004 -28.3994 -17.9004 -28.2998 0.599609c-7.5 -13.5 -28.0996 -6.7998 -26.3994 8.5l1.19922 12.4004c-36.6992 -0.900391 -59.6992 -3.09961 -61.7998 -3.09961c-20.8994 0 -20.8994 31.5996 0 31.5996c2.40039 0 27.7002 -1.2998 63.2002 -2.7998
+c-61.0996 15.5 -103.7 55 -114.9 118.2c-25 -12.8008 -57.5 -26.8008 -68.1992 -0.800781c-10.5 25.4004 21.5 42.6006 66.7998 73.4004c0.700195 6.59961 1.59961 13.2998 2.7002 19.7998c-14.4004 19.6006 -11.6006 36.2998 -16.1006 60.4004
+c-16.7998 -2.40039 -23.2002 9.09961 -23.5996 23.0996c0.299805 7.2998 2.09961 14.9004 2.39941 15.4004c1.10059 1.7998 10.1006 2 12.7002 2.59961c6 31.7002 50.6006 33.2002 90.9004 34.5c19.7002 21.7998 45.2002 41.5 80.8994 48.2998
+c-15.2998 19.4004 -3.39941 39.9004 -2.39941 40.4004c1.7002 0.799805 21.2002 -4.2998 26.2998 -23.2002c5.2002 8.7998 18.2998 11.4004 19.5996 10.7002c1.10059 -0.599609 6.40039 -15 -4.89941 -25.9004c40.2998 -3.5 72.2002 -24.6992 96 -50.6992
+c36.0996 -1.5 71.7998 -5.90039 77.0996 -34c2.7002 -0.600586 11.6006 -0.800781 12.7002 -2.60059c0.299805 -0.5 2.09961 -8.09961 2.40039 -15.3994c-0.5 -13.9004 -6.80078 -25.4004 -23.6006 -23.1006c-3.2002 -17.2998 -2.7002 -32.8994 -8.7002 -47.7002
+c2.40039 -11.6992 4 -23.7998 4.80078 -36.3994c37 -25.4004 70.2998 -42.5 60.2998 -66.9004zM207.4 288.1c0.899414 44 -37.9004 42.2002 -78.6006 40.3008c-21.7002 -1 -38.8994 -1.90039 -45.5 -13.9004c-11.3994 -20.9004 5.90039 -92.9004 23.2002 -101.2
+c9.7998 -4.7002 73.4004 -7.89941 86.2998 7.10059c8.2002 9.39941 15 49.3994 14.6006 67.6992zM259.4 229.8c-4.30078 12.4004 -6 30.1006 -15.3008 32.7002c-2 0.5 -9 0.5 -11 0c-10 -2.7998 -10.7998 -22.0996 -17 -37.2002c15.4004 0 19.3008 -9.7002 23.7002 -9.7002
+c4.2998 0 6.2998 11.3008 19.6006 14.2002zM395.1 314.5c-6.59961 12.0996 -24.7998 12.9004 -46.5 13.9004c-40.1992 1.89941 -78.1992 3.7998 -77.2998 -40.3008c-0.5 -18.2998 5 -58.2998 13.2002 -67.7998c13 -14.8994 76.5996 -11.7998 86.2998 -7.09961
+c15.7998 7.59961 36.5 78.8994 24.2998 101.3z" />
+ <glyph glyph-name="erlang" unicode="&#xf39d;" horiz-adv-x="640"
+d="M87.2002 394.5c-41.5 -50.2002 -65.6006 -116.2 -65.5 -192.9c-0.100586 -86.7998 29 -159.5 78.7002 -212.1h-100.4v405h87.2002zM325.4 384.8c46.1992 -0.0996094 79.5996 -33.5 80.6992 -83.2002h-169.899c4.09961 49.7002 43.2998 83.1006 89.2002 83.2002z
+M556.1 394.4h0.300781l-0.100586 0.0996094zM556.4 394.4h83.5996v-405h-80.7998c21.3994 23 40.5 49.8994 57.8994 80.7998l-96.3994 48.2002c-33.9004 -55.1006 -83.4004 -105.801 -151.9 -106.101c-99.7002 0.400391 -138.8 85.6006 -138.6 195.3h372.399
+c0.5 12.4004 0.5 18.1006 0 24.1006c2.5 65.2002 -14.7998 120 -46.1992 162.7z" />
+ <glyph glyph-name="facebook-f" unicode="&#xf39e;" horiz-adv-x="264"
+d="M215.8 363c-37.7998 0 -45.0996 -18 -45.0996 -44.2998v-62.7002h85.2998l-11.7002 -91h-73.5996v-229h-94v229h-76.7002v91h76.7002v71.7002c0 77.8994 47.5996 120.3 117.1 120.3c33.2998 0 61.9004 -2.5 70.2002 -3.59961v-81.4004h-48.2002z" />
+ <glyph glyph-name="facebook-messenger" unicode="&#xf39f;" horiz-adv-x="447"
+d="M224 416c122.5 0 221.8 -92.7998 221.7 -207.3c0 -138.8 -143.2 -238.5 -285.4 -198.7l-75.7002 -42v79.4004c-162.1 122.6 -68.6992 368.6 139.4 368.6zM247.4 137.9l121.1 128.5l-110.4 -61.1006l-57.3994 59.6006l-121.101 -128.5l110.4 61.0996z" />
+ <glyph glyph-name="firstdraft" unicode="&#xf3a1;" horiz-adv-x="384"
+d="M384 256h-64v-128h-128v-128h-192v25.5996h166.4v128h128v128h89.5996v-25.5996zM358.4 217.6h25.5996v-153.6h-128v-128h-192v25.5996h166.4v128h128v128zM384 25.5996v-25.5996h-64v-64h-25.5996v89.5996h89.5996zM0 448h384v-128h-128v-128h-128v-128h-128v384z" />
+ <glyph glyph-name="fonticons-fi" unicode="&#xf3a2;" horiz-adv-x="384"
+d="M114.4 224h92.3994l-15.2002 -51.2002h-76.3994v-157.8c0 -8 -2.7998 -9.2002 4.39941 -10l59.6006 -5.59961v-34.4004h-179.2v35.2002l29.2002 2.7998c7.2002 0.799805 9.2002 3.2002 9.2002 10.7998v155.8c0 3.2002 -4 3.2002 -8 3.2002h-30.4004v51.2002h38.4004
+v28.7998c0 68 36.3994 96 106 96c46.7998 0 88.7998 -11.2002 88.7998 -72.3994l-69.6006 -8.40039c0.400391 25.5996 -6 31.5996 -22.3994 31.5996c-25.2002 0 -26 -13.5996 -26 -37.5996v-32c0 -3.2002 -4.7998 -6 -0.799805 -6zM384 -35h-140.8v34.4004l28 3.59961
+c7.2002 0.799805 10.3994 2.40039 10.3994 10v148c0 5.59961 -4 9.2002 -9.19922 10.7998l-33.2002 8.7998l9.2002 40.4004h110v-208c0 -8 -3.60059 -8.7998 4 -10l21.5996 -3.59961v-34.4004zM354 312.2l12.4004 -45.6006l-10 -10l-42.8008 22.8008l-42.7998 -22.8008
+l-10 10l12.4004 45.6006l-30 36.3994l4.7998 10h38l21.2002 38.4004h12.7998l21.2002 -38.4004h38l4.7998 -13.1992z" />
+ <glyph glyph-name="fort-awesome-alt" unicode="&#xf3a3;" horiz-adv-x="512"
+d="M208 210.6c2.09961 0 3.7002 -1.59961 3.7002 -3.69922v-51.7002c0 -2.10059 -1.60059 -3.7002 -3.7002 -3.7002h-22.2002c-2.09961 0 -3.7002 1.59961 -3.7002 3.7002v51.7002c0 2.09961 1.60059 3.69922 3.7002 3.69922h22.2002zM326.2 210.6
+c2 0 3.59961 -1.59961 3.7002 -3.69922v-51.7002c0 -2.10059 -1.60059 -3.7002 -3.7002 -3.7002h-22.2002c-2.09961 0 -3.7002 1.59961 -3.7002 3.7002v51.7002c0 2.09961 1.60059 3.69922 3.7002 3.69922h22.2002zM458.2 335.7
+c28.8994 -40.7002 45.7998 -90.2002 45.7998 -143.7c0 -2 0 -4 -0.0996094 -6c0 -0.700195 0 -1.2998 -0.100586 -2c0 -1.2998 -0.0996094 -2.7002 -0.200195 -4c0 -0.799805 -0.0996094 -1.5 -0.0996094 -2.2998
+c-0.0996094 -1.2002 -0.0996094 -2.40039 -0.200195 -0.700195c-0.0996094 -0.799805 -0.0996094 -1.59961 -0.200195 -2.40039c-0.0996094 -1.19922 -0.199219 -2.39941 -0.299805 -3.5c-0.0996094 -0.799805 -0.200195 -1.59961 -0.200195 -2.39941
+c-0.0996094 -1.2002 -0.299805 -2.40039 -0.399414 -3.60059c-0.100586 -0.799805 -0.200195 -1.5 -0.299805 -2.2998c-0.200195 -1.2998 -0.400391 -2.59961 -0.5 -3.89941c-0.100586 -0.600586 -0.200195 -1.30078 -0.300781 -1.90039l-0.899414 -5.7002
+c-0.100586 -0.599609 -0.200195 -1.09961 -0.299805 -1.7002c-0.200195 -1.2998 -0.5 -2.69922 -0.800781 -4c-0.199219 -0.799805 -0.299805 -1.59961 -0.5 -2.39941c-0.199219 -1.10059 -0.5 -2.2002 -0.699219 -3.2002
+c-0.200195 -0.900391 -0.400391 -1.7002 -0.600586 -2.59961c-0.200195 -1 -0.5 -2 -0.700195 -3c-0.199219 -0.900391 -0.5 -1.80078 -0.699219 -2.7002c-0.300781 -1 -0.5 -1.90039 -0.800781 -2.90039c-0.199219 -0.899414 -0.5 -1.7998 -0.799805 -2.7002
+c-0.299805 -0.899414 -0.599609 -1.89941 -0.799805 -2.7998c-0.299805 -0.899414 -0.5 -1.7998 -0.799805 -2.7002c-0.299805 -0.899414 -0.600586 -1.7998 -0.900391 -2.7998c-0.5 -1.59961 -1.09961 -3.2998 -1.7002 -4.89941
+c-0.299805 -0.900391 -0.599609 -1.80078 -1 -2.80078c-0.399414 -1 -0.699219 -2 -1.09961 -3c-0.299805 -0.799805 -0.599609 -1.5 -0.900391 -2.2998l-1.19922 -3c-0.300781 -0.700195 -0.600586 -1.5 -0.900391 -2.2002c-0.400391 -1 -0.799805 -2 -1.2998 -3
+l-0.900391 -2.09961c-0.399414 -1 -0.899414 -2 -1.39941 -3c-0.300781 -0.700195 -0.600586 -1.2998 -0.900391 -2c-0.5 -1 -1 -2.09961 -1.5 -3.09961c-0.299805 -0.600586 -0.599609 -1.10059 -0.799805 -1.7002c-0.600586 -1.10059 -1.10059 -2.2002 -1.7002 -3.2998
+c-0.0996094 -0.200195 -0.200195 -0.300781 -0.299805 -0.5c-2.2002 -4.10059 -4.40039 -8.2002 -6.7998 -12.2002c-0.200195 -0.400391 -0.5 -0.799805 -0.700195 -1.2002c-0.700195 -1.09961 -1.2998 -2.2002 -2 -3.2998
+c-0.299805 -0.5 -0.600586 -0.900391 -0.900391 -1.40039c-0.700195 -1.09961 -1.39941 -2.09961 -2 -3.2002c-0.299805 -0.5 -0.599609 -0.899414 -0.899414 -1.39941c-0.700195 -1.10059 -1.40039 -2.10059 -2.10059 -3.2002
+c-0.299805 -0.400391 -0.599609 -0.799805 -0.799805 -1.2002c-0.799805 -1.09961 -1.5 -2.2002 -2.2998 -3.2998c-0.200195 -0.200195 -0.299805 -0.5 -0.5 -0.700195c-37.6006 -54.7002 -94.5 -91.3994 -160.101 -102.399
+c-0.899414 -0.100586 -1.69922 -0.300781 -2.59961 -0.400391c-1 -0.200195 -2.09961 -0.299805 -3.09961 -0.5c-0.900391 -0.0996094 -1.80078 -0.299805 -2.80078 -0.400391c-1 -0.0996094 -2 -0.299805 -3 -0.399414c-1 -0.100586 -2 -0.200195 -2.89941 -0.299805
+c-1 -0.100586 -1.90039 -0.200195 -2.90039 -0.300781c-1 -0.0996094 -2.09961 -0.199219 -3.09961 -0.299805c-0.900391 -0.0996094 -1.7998 -0.200195 -2.7002 -0.200195c-1.09961 -0.0996094 -2.2998 -0.0996094 -3.40039 -0.199219
+c-0.799805 0 -1.69922 -0.100586 -2.5 -0.100586c-1.2998 -0.0996094 -2.59961 -0.0996094 -3.89941 -0.0996094c-0.700195 0 -1.40039 -0.100586 -2.10059 -0.100586c-2 0 -4 -0.0996094 -6 -0.0996094s-4 0 -6 0.0996094c-0.699219 0 -1.39941 0 -2.09961 0.100586
+c-1.2998 0 -2.59961 0.0996094 -3.90039 0.0996094c-0.799805 0 -1.69922 0.100586 -2.5 0.100586c-1.09961 0.0996094 -2.2998 0.0996094 -3.39941 0.199219c-0.900391 0.100586 -1.7998 0.100586 -2.7002 0.200195c-1 0.100586 -2.09961 0.200195 -3.09961 0.299805
+c-1 0.100586 -1.90039 0.200195 -2.90039 0.300781c-1 0.0996094 -2 0.199219 -2.90039 0.299805c-1 0.0996094 -2 0.200195 -3 0.399414c-0.899414 0.100586 -1.7998 0.300781 -2.7998 0.400391s-2.09961 0.299805 -3.09961 0.5
+c-0.900391 0.0996094 -1.7002 0.299805 -2.60059 0.400391c-65.5996 10.8994 -122.5 47.6992 -160 99.3994c-0.199219 0.200195 -0.299805 0.5 -0.5 0.700195c-0.799805 1.09961 -1.59961 2.2002 -2.2998 3.2998c-0.299805 0.400391 -0.599609 0.799805 -0.799805 1.2002
+c-0.700195 1.09961 -1.40039 2.09961 -2.09961 3.2002c-0.300781 0.5 -0.600586 0.899414 -0.900391 1.39941c-0.700195 1.10059 -1.40039 2.10059 -2 3.2002c-0.299805 0.5 -0.599609 0.900391 -0.900391 1.40039c-0.699219 1.09961 -1.2998 2.2002 -2 3.2998
+c-0.199219 0.400391 -0.5 0.799805 -0.699219 1.2002c-2.40039 4 -4.60059 8.09961 -6.80078 12.2002c-0.0996094 0.199219 -0.199219 0.299805 -0.299805 0.5c-0.599609 1.09961 -1.09961 2.19922 -1.7002 3.2998c-0.299805 0.599609 -0.599609 1.09961 -0.799805 1.7002
+c-0.5 1 -1 2.09961 -1.5 3.09961c-0.299805 0.700195 -0.599609 1.2998 -0.899414 2c-0.5 1 -0.900391 2 -1.40039 3l-0.900391 2.09961c-0.399414 1 -0.899414 2 -1.2998 3c-0.299805 0.700195 -0.599609 1.5 -0.899414 2.2002l-1.2002 3
+c-0.299805 0.799805 -0.600586 1.5 -0.900391 2.2998c-0.399414 1 -0.799805 2 -1.09961 3c-0.299805 0.900391 -0.600586 1.80078 -1 2.80078c-0.600586 1.59961 -1.10059 3.2998 -1.7002 4.89941c-0.299805 0.900391 -0.599609 1.7998 -0.900391 2.7998
+c-0.299805 0.900391 -0.5 1.80078 -0.799805 2.7002c-0.299805 0.900391 -0.599609 1.90039 -0.799805 2.7998c-0.299805 0.900391 -0.5 1.80078 -0.799805 2.7002c-0.299805 1 -0.5 1.90039 -0.799805 2.90039c-0.200195 0.899414 -0.5 1.7998 -0.700195 2.7002
+c-0.299805 1 -0.5 2 -0.700195 3c-0.200195 0.899414 -0.400391 1.69922 -0.599609 2.59961c-0.200195 1.09961 -0.5 2.2002 -0.700195 3.2002c-0.200195 0.799805 -0.299805 1.59961 -0.5 2.39941c-0.299805 1.30078 -0.5 2.7002 -0.799805 4
+c-0.100586 0.600586 -0.200195 1.10059 -0.300781 1.7002l-0.899414 5.7002c-0.100586 0.599609 -0.200195 1.2998 -0.299805 1.90039c-0.200195 1.2998 -0.400391 2.59961 -0.5 3.89941c-0.100586 0.799805 -0.200195 1.5 -0.300781 2.2998
+c-0.0996094 1.2002 -0.299805 2.40039 -0.399414 3.60059c-0.100586 0.799805 -0.200195 1.59961 -0.200195 2.39941c-0.0996094 1.2002 -0.200195 2.40039 -0.299805 3.5c-0.100586 0.800781 -0.100586 1.60059 -0.200195 2.40039
+c-0.0996094 1.2002 -0.200195 2.40039 -0.200195 3.7002c0 0.799805 -0.0996094 1.5 -0.0996094 2.2998c-0.100586 1.2998 -0.100586 2.7002 -0.200195 4c0 0.700195 0 1.2998 -0.0996094 2c0 2 -0.100586 4 -0.100586 6c0 53.5 16.9004 103 45.7998 143.6
+c2.30078 3.2002 4.7002 6.40039 7.10059 9.5c4.89941 6.2002 10.0996 12.3008 15.5996 18c2.7002 2.90039 5.5 5.7002 8.40039 8.40039c2.89941 2.7002 5.7998 5.40039 8.7998 8c4.5 3.90039 9.09961 7.59961 13.9004 11.2002c1.59961 1.2002 3.19922 2.39941 4.7998 3.5
+c27.2998 19.5996 59 33.7002 93.2998 40.7998c16.0996 3.2998 32.9004 5 50 5s33.7998 -1.7002 50 -5c34.2998 -7 66 -21.0996 93.5996 -40.7002c1.60059 -1.2002 3.2002 -2.2998 4.80078 -3.5c4.7998 -3.59961 9.39941 -7.2998 13.8994 -11.2002
+c12 -10.3994 23 -21.8994 32.7998 -34.3994c2.5 -3.10059 4.80078 -6.2998 7.10059 -9.5zM448 76.5v71.2998c0 2.10059 -1.59961 3.7002 -3.7002 3.7002h-22.2002c-2.09961 0 -3.69922 -1.59961 -3.69922 -3.7002v-25.7998h-29.5v144
+c0 2.09961 -1.60059 3.7002 -3.7002 3.7002h-22.1006c-2.09961 0 -3.69922 -1.60059 -3.69922 -3.7002v-25.9004h-29.5v25.9004c0 2.09961 -1.60059 3.7002 -3.7002 3.7002h-22.2002c-2.09961 0 -3.7002 -1.60059 -3.7002 -3.7002v-25.9004h-29.5v25.9004
+c0 4.7998 -6.5 3.7002 -9.5 3.7002v30.7002c6.7002 1.59961 13.7998 2.7998 20.7998 2.7998c8.80078 0 16.8008 -3.5 25.4004 -3.5c3.7002 0 22.4004 0.899414 22.4004 6.5v48.3994c0 2.10059 -1.60059 3.7002 -3.7002 3.7002c-4.2002 0 -12.2002 -3.5 -19.4004 -3.5
+c-7.89941 0 -16.8994 3.5 -26.2998 3.5c-6.5 0 -12.9004 -0.899414 -19.2002 -2.2998v3.90039c4.40039 2.09961 7.40039 6.69922 7.40039 11.5c0 16.7998 -25.4004 16.7998 -25.4004 0c0 -4.80078 3 -9.5 7.40039 -11.5v-90.2002c-3 0 -9.5 1.09961 -9.5 -3.7002v-25.9004
+h-29.5v25.9004c0 2.09961 -1.60059 3.7002 -3.7002 3.7002h-22.2002c-2.09961 0 -3.7002 -1.60059 -3.7002 -3.7002v-25.9004h-29.5v25.9004c0 2.09961 -1.59961 3.7002 -3.69922 3.7002h-22.1006c-2.09961 0 -3.7002 -1.60059 -3.7002 -3.7002v-144h-29.5996v25.7998
+c0 2.10059 -1.59961 3.7002 -3.7002 3.7002h-22.0996c-2.10059 0 -3.7002 -1.59961 -3.7002 -3.7002v-71.2998c9.40039 -15.5 20.5996 -29.9004 33.5996 -42.9004c20.6006 -20.5996 44.5 -36.6992 71.2002 -48c13.9004 -5.89941 28.2002 -10.2998 42.9004 -13.1992v75.7998
+c0 58.5996 88.5996 58.5996 88.5996 0v-75.7998c14.7002 2.89941 29 7.39941 42.9004 13.1992c26.7002 11.3008 50.5996 27.4004 71.2002 48c13 13 24.1992 27.4004 33.5996 42.9004z" />
+ <glyph glyph-name="freebsd" unicode="&#xf3a4;"
+d="M303.7 351.8c11.0996 11.1006 115.5 77 139.2 53.2002c23.6992 -23.7002 -42.1006 -128.1 -53.2002 -139.2c-11.1006 -11.0996 -39.4004 -0.899414 -63.1006 22.9004c-23.7998 23.7002 -34.0996 52 -22.8994 63.0996zM109.9 379.9
+c-31.6006 -19.4004 -57.9004 -46.5 -76.4004 -78.7002c-20.7998 36.2998 -44.5 89.0996 -27.9004 105.7c16.4004 16.5 68 -6.40039 104.301 -27zM406.7 274c3.2998 5.5 7 11.7998 10.8994 18.7998c17.6006 -31.2998 27.7002 -67.3994 27.7002 -105.8
+c0 -119.1 -96.5 -215.6 -215.6 -215.6c-119.101 0 -215.601 96.5996 -215.601 215.6c0 119.1 96.5 215.6 215.601 215.6c35.8994 0 69.7002 -8.7998 99.5 -24.2998c-7.2998 -4 -13.9004 -8 -19.6006 -11.5996c-26 4.7002 -32.8994 -16.4004 -14.8994 -48.7002
+c21.7998 -43.0996 89 -90.4004 109.3 -70.0996c5.40039 5.39941 6 14.7998 2.7002 26.0996z" />
+ <glyph glyph-name="gitkraken" unicode="&#xf3a6;" horiz-adv-x="592"
+d="M565.7 329.9c11.7998 -31.6006 18.2998 -65.7002 18.2998 -101.4c0 -155.1 -122.6 -281.6 -276.3 -287.7v145.8c-8.40039 -0.5 -16.6006 -0.399414 -23.4004 0v-145.899c-153.7 6.2002 -276.3 132.7 -276.3 287.8c0 35.7002 6.5 69.7998 18.2998 101.3
+c2.2998 6.2002 9.2998 9.2002 15.2998 6.60059c5.7002 -2.40039 8.5 -8.80078 6.30078 -14.6006c-10.9004 -29 -16.9004 -60.5 -16.9004 -93.2998c0 -134.6 100.4 -245.7 230.2 -262.7v123.7c-7.90039 1.59961 -15.4004 3.7002 -23 6.2002v-104
+c-106.7 26 -185.9 122.1 -185.9 236.8c0 91.7998 50.7998 171.8 125.8 213.3c5.80078 3.2002 13 0.900391 15.9004 -5c2.7002 -5.5 0.700195 -12.0996 -4.7002 -15.0996c-67.8994 -37.7002 -113.899 -110.101 -113.899 -193.2c0 -93.4004 57.8994 -173.2 139.8 -205.4
+v92.2002c-14.2002 4.5 -24.7998 17.7002 -24.7998 33.5c0 13.1006 6.69922 24.4004 17.2998 30.5c-8.2002 79.6006 -44.5 58.6006 -44.5 83.9004v14.7998c0 38 87.8994 161.7 129.1 164.7c2.60059 0.200195 5.10059 0.200195 7.60059 0
+c41.0996 -2.90039 129 -126.7 129 -164.7v-14.7002c0 -25.2998 -36.2002 -4.39941 -44.5 -83.8994c10.5 -6.10059 17.2998 -17.4004 17.2998 -30.5c0 -15.8008 -10.7002 -29 -24.9004 -33.5v-92.2002c81.9004 32.2998 139.8 112.1 139.8 205.399
+c0 83.2002 -46 155.601 -113.899 193.2c-5.2998 2.90039 -7.40039 9.60059 -4.7002 15.1006c2.90039 5.89941 10.2002 8.19922 15.9004 5c75 -41.5 125.8 -121.5 125.8 -213.301c0 -114.699 -79.2002 -210.899 -185.9 -236.8v104
+c-7.5 -2.59961 -15.0996 -4.7002 -23 -6.2002v-123.699c129.9 17 230.2 128.1 230.2 262.699c0 32.8008 -6 64.3008 -16.9004 93.3008c-2.19922 5.69922 0.600586 12.1992 6.30078 14.5996c6 2.59961 13 -0.5 15.2998 -6.59961zM365.9 172.5
+c-13.1006 0 -23.7002 -10.5996 -23.7002 -23.7002c0 -13.2002 10.7002 -23.7002 23.7002 -23.7002c13.0996 0 23.6992 10.6006 23.6992 23.7002c0 13.2002 -10.6992 23.7002 -23.6992 23.7002zM226.1 125.2c13.2002 0 23.7002 10.7002 23.7002 23.7002
+c0 13.0996 -10.5996 23.6992 -23.7002 23.6992c-13.1992 0 -23.6992 -10.6992 -23.6992 -23.6992s10.5 -23.7002 23.6992 -23.7002z" />
+ <glyph glyph-name="gofore" unicode="&#xf3a7;" horiz-adv-x="400"
+d="M324 128.2c54.2998 0 65.7002 -50.1006 67.7002 -77.7002c-46.5 -56.2998 -107.8 -82.5 -171 -82.5c-123.7 0 -220.7 101.5 -220.7 224c0 123.4 98 224 220.7 224c59 0 114.3 -23.2998 156.1 -65.5996l-62.2998 -63.3008c-25 25.4004 -58.2998 39.4004 -93.5996 39.4004
+c-73.2002 0 -132.4 -60.2998 -132.4 -134.4c0 -74.1992 59.2002 -134.399 132.4 -134.399c33.5996 0 65.3994 12.7002 89.8994 35.7998v34.7002h13.2002zM311.9 240.7c47.6992 0 88.0996 -35 88.0996 -100.2v-30.5996c-15.5 26.6992 -42.5 41.7998 -76 41.7998h-118.4v89
+h106.301z" />
+ <glyph glyph-name="goodreads" unicode="&#xf3a8;"
+d="M299.9 256.8c5.09961 -37.2998 -4.7002 -79 -35.9004 -100.7c-22.2998 -15.5 -52.7998 -14.0996 -70.7998 -5.69922c-37.1006 17.2998 -49.5 58.5996 -46.7998 97.1992c4.2998 60.9004 40.8994 87.9004 75.2998 87.5c46.8994 0.200195 71.7998 -31.7998 78.2002 -78.2998
+zM448 360v-336c0 -30.9004 -25.0996 -56 -56 -56h-336c-30.9004 0 -56 25.0996 -56 56v336c0 30.9004 25.0996 56 56 56h336c30.9004 0 56 -25.0996 56 -56zM330 134.8c0 0 -0.0996094 34 -0.0996094 217.3h-29v-40.2998c-0.800781 -0.299805 -1.2002 0.5 -1.60059 1.2002
+c-9.59961 20.7002 -35.8994 46.2998 -76 46c-51.8994 -0.400391 -87.2002 -31.2002 -100.6 -77.7998c-4.2998 -14.9004 -5.7998 -30.1006 -5.5 -45.6006c1.7002 -77.8994 45.0996 -117.8 112.399 -115.199c28.9004 1.09961 54.5 17 69 45.1992
+c0.5 1 1.10059 1.90039 1.7002 2.90039c0.200195 -0.0996094 0.400391 -0.0996094 0.600586 -0.200195c0.299805 -3.7998 0.199219 -30.7002 0.0996094 -34.5c-0.200195 -14.7998 -2 -29.5 -7.2002 -43.5c-7.7998 -21 -22.2998 -34.7002 -44.5 -39.5
+c-17.7998 -3.89941 -35.5996 -3.7998 -53.2002 1.2002c-21.5 6.09961 -36.5 19 -41.0996 41.7998c-0.299805 1.60059 -1.2998 1.2998 -2.2998 1.2998h-26.7998c0.799805 -10.5996 3.19922 -20.2998 8.5 -29.1992c24.1992 -40.5 82.6992 -48.5 128.199 -37.4004
+c49.9004 12.2998 67.3008 54.9004 67.4004 106.3z" />
+ <glyph glyph-name="goodreads-g" unicode="&#xf3a9;" horiz-adv-x="383"
+d="M42.5996 44.7002h2.80078c12.6992 0 25.5 0 38.1992 -0.100586c1.60059 0 3.10059 0.400391 3.60059 -2.09961c7.09961 -34.9004 30 -54.5996 62.8994 -63.9004c26.9004 -7.59961 54.1006 -7.7998 81.3008 -1.7998c33.7998 7.40039 56 28.2998 68 60.4004
+c8 21.5 10.6992 43.7998 11 66.5c0.0996094 5.7998 0.299805 47 -0.200195 52.7998l-0.900391 0.299805c-0.799805 -1.5 -1.7002 -2.89941 -2.5 -4.39941c-22.0996 -43.1006 -61.2998 -67.4004 -105.399 -69.1006c-103 -4 -169.4 57 -172 176.2
+c-0.5 23.7002 1.7998 46.9004 8.2998 69.7002c20.5996 71.0996 74.5996 118.2 153.899 118.8c61.3008 0.400391 101.5 -38.7002 116.2 -70.2998c0.5 -1.10059 1.2998 -2.2998 2.40039 -1.90039v61.6006h44.2998c0 -280.301 0.0996094 -332.2 0.0996094 -332.2
+c-0.0996094 -78.5 -26.6992 -143.7 -103 -162.2c-69.5 -16.9004 -159 -4.7998 -196 57.2002c-8 13.5 -11.7998 28.2998 -13 44.5zM188.9 411.5c-52.5 0.5 -108.5 -40.7002 -115 -133.8c-4.10059 -59 14.7998 -122.2 71.5 -148.601
+c27.5996 -12.8994 74.2998 -15 108.3 8.7002c47.5996 33.2002 62.7002 97 54.7998 154c-9.7002 71.1006 -47.7998 120 -119.6 119.7z" />
+ <glyph glyph-name="google-drive" unicode="&#xf3aa;" horiz-adv-x="512"
+d="M339 133.1l-163.6 282.9h161.199l163.601 -282.9h-161.2zM201.5 109.5h310.5l-80.5996 -141.5h-310.5zM154.1 380.6l82.9004 -141.399l-156.4 -271.2l-80.5996 141.5z" />
+ <glyph glyph-name="google-play" unicode="&#xf3ab;" horiz-adv-x="512"
+d="M325.3 213.7l-220.7 221.3l280.801 -161.2zM47 448l256.6 -255.9l-256.6 -256c-13 6.80078 -21.7002 19.2002 -21.7002 35.3008v441.3c0 16.0996 8.7002 28.5 21.7002 35.2998zM472.2 222.4c19.2002 -14.3008 19.2002 -46.5 1.2002 -60.8008l-60.1006 -34.0996
+l-65.7002 64.5l65.7002 64.5zM104.6 -51l220.7 221.3l60.1006 -60.0996z" />
+ <glyph glyph-name="gripfire" unicode="&#xf3ac;" horiz-adv-x="352"
+d="M112.5 146.6c0 -26.8994 16.5996 -47.1992 32.5996 -69.5c22.5 -30.1992 44.2002 -56.8994 44.2002 -86.5c-0.0996094 -14.5 -4.39941 -29.6992 -17.5 -46.3994c0 5.2998 4.7998 12.2002 4.7998 22.2998c0 15.2002 -13 39.9004 -78.0996 86.5996
+c-34.2998 29.1006 -66.5 58.5 -66.5 108.301c0 114.699 147.1 176.5 147.1 268.6c0 3.2998 -0.199219 6.7002 -0.599609 10c5.09961 -2.40039 39.0996 -43.2998 39.0996 -90.4004c0 -80.5 -105.1 -129.199 -105.1 -203zM317.8 185.6
+c1.5 -8.39941 2.2002 -16.5996 2.2002 -24.5996c0 -51.7998 -29.4004 -97.5 -67.2998 -136.8c-1 -1 -2.2002 -2.40039 -3.2002 -2.40039c-3.59961 0 -35.5 41.6006 -35.5 53.2002c0 0 41.7998 55.7002 41.7998 96.9004c0 10.7998 -2.7002 21.6992 -9.09961 33.3994
+c-1.5 -32.2998 -55.7002 -87.7002 -58.1006 -87.7002c-2.69922 0 -17.8994 22 -17.8994 42.1006c0 5.2998 1 10.7002 3.2002 15.7998c2.39941 5.5 56.5996 72 56.5996 116.7c0 6.2002 -1 12 -3.40039 17.0996l-4 7.2002c16.7002 -6.5 82.6006 -64.0996 94.7002 -130.9z" />
+ <glyph glyph-name="grunt" unicode="&#xf3ad;" horiz-adv-x="384"
+d="M61.2998 258.7c0.5 4.89941 2.7998 10 7 12h0.100586c-4.60059 1.7002 -9.2002 3.09961 -13.5 4.09961c42.1992 10.2002 73.3994 -20.5996 83.0996 -31.7998c16.5996 -19.2002 35.5 -8.7998 35.5 -8.7998c0.299805 -11.1006 -10.2998 -19 -21.0996 -19.5
+c1.19922 -15.4004 -13.9004 -32.5 -13.9004 -32.5s5.59961 15 2.7002 25.2998c-0.900391 3.2002 -2 6.09961 -3 8.5c-19.2998 -17.2002 -48 -1.5 -54.9004 6.09961c-9.59961 10.6006 -12.3994 23.8008 -12.7998 34.1006c-1.7998 -3.7998 -3.2998 -9.10059 -4 -16.6006
+c0 0 -6.2998 9.10059 -5.2002 19.1006zM89.5996 260.5c-2.89941 -9.09961 -3.39941 -27.7002 6.90039 -35.2998c16.2998 -12.1006 32.2998 -5 38 -1.7002c-7.5 11.2998 -25.4004 26 -44.9004 37zM231.7 214.7c-10.7998 0.399414 -21.4004 8.39941 -21.2002 19.2998
+c0 0 18.7998 -10.4004 35.5 8.7998c9.7002 11.2002 40.7998 42 83.0996 31.7998c-4.2998 -0.899414 -8.89941 -2.2998 -13.5 -4.09961h0.100586c4.09961 -1.7998 6.39941 -6.7998 7 -11.7998c1.2002 -10 -5.2002 -19.1006 -5.2002 -19.1006
+c-0.599609 7.5 -2.2002 12.8008 -4 16.6006c-0.5 -10.2998 -3.2002 -23.5 -12.7998 -34.1006c-6.7998 -7.59961 -35.5 -23.3994 -54.7998 -6.09961c-1 -2.5 -2.10059 -5.2998 -3 -8.5c-2.90039 -10.2998 2.69922 -25.2998 2.69922 -25.2998s-15.0996 17 -13.8994 32.5z
+M294.4 260.5c-19.5 -11 -37.4004 -25.5996 -44.9004 -37c5.7002 -3.40039 21.5996 -10.5 37.9004 1.59961c10.3994 7.7002 10 26.3008 7 35.4004zM160 29.5c4.09961 0 7 -0.900391 8.7998 -2.7002c2.2002 -2.2998 1.5 -5.2998 0.900391 -6.7998
+c-1.10059 -2.7002 -5.5 -11.5996 -13 -19.7998c-2.7002 -2.90039 -6.60059 -4.60059 -11 -4.60059c-4.2998 0 -8.7002 1.60059 -11.7998 4.30078c-2.30078 2.09961 -10.2002 9.5 -13.7002 18.5996c-1.2998 3.40039 -1 6.09961 0.899414 8.09961
+c1.30078 1.30078 4 2.90039 9.5 2.90039h29.4004zM349.2 130.7c0 0 29.2998 -22.5 21.0996 -70.9004c-5.2998 -29.5 -23.2002 -46 -47 -54.7002c-8.7998 -19.0996 -29.3994 -45.6992 -67.2998 -49.5996c-14.5 -11.7998 -34.5 -19.5 -63.5996 -19.5h-0.200195
+c-29.2002 0 -49.2002 7.7002 -63.6006 19.5c-37.8994 3.90039 -58.5 30.5 -67.2998 49.5996c-23.7998 8.60059 -41.7998 25.2002 -47 54.7002c-8.59961 48.2002 20.6006 70.7998 20.6006 70.7998c2.39941 -17.8994 13 -33.8994 24.5996 -43.7998
+c3.09961 22.7002 3.7002 55.5 3.7002 62.4004c0 14.7002 -9.5 24.5 -12.2002 26.0996c-2.5 1.5 -5.2998 3 -8.2998 4.60059c-18 9.59961 -40.4004 21.5996 -40.4004 43.6992c0 16.1006 9.2998 23.2002 15.4004 27.8008c0.799805 0.599609 1.5 1.19922 2.2002 1.69922
+c2.09961 1.7002 3.69922 3 4.2998 4.40039c4.39941 9.7998 3.59961 34.2002 1.7002 37.5996c-0.600586 0.700195 -16.8008 21 -11.8008 39.2002c2 7.40039 6.90039 13.2998 14.1006 17c5.2998 2.7002 11.7998 4.2002 19.5 4.5c0.0996094 2 0.5 4 0.899414 5.90039
+c0.5 2.59961 1.10059 5.2998 0.900391 8.09961c-0.400391 4.7002 -0.799805 9.10059 -2.2002 11.2998c-8.39941 13.3008 -28.7998 17.6006 -29 17.6006l-12.2998 2.39941l8.09961 9.40039c0.200195 0.200195 17.3008 17.5 46.3008 17.5c7.89941 0 16 -1.2998 23.8994 -3.5
+c24.2998 -7.7998 42.9004 -30.5 49.4004 -39.2998c2 0.599609 3.89941 1.2002 5.89941 1.7002c-1 26.3994 20.7002 47.3994 28.2002 48.2998c0.5 -4.5 -0.399414 -22.2002 7.2002 -27.6006c2.2002 14.4004 9.59961 30.3008 39.0996 40.7002
+c-6.2998 -16.7002 -0.799805 -30.7002 1.80078 -37.2002c20.0996 18.2002 33.6992 15.2002 33.6992 15.2002s-13.1992 -22.7002 -9 -38.5c3.30078 -0.799805 6.5 -1.7002 9.60059 -2.7002c6.5 8.80078 25.2002 31.5 49.3994 39.3008
+c8.10059 2.59961 16.2002 3.89941 24.1006 3.89941c29 0 46.2002 -17.2998 46.2998 -17.5l8.09961 -9.5l-12.2998 -2.39941c-0.200195 0 -20.5996 -4.30078 -29 -17.6006c-1.39941 -2.2998 -1.7998 -6.59961 -2.2002 -11.2998
+c-0.199219 -2.7998 0.300781 -5.5 0.900391 -8.09961c0.400391 -2 0.799805 -3.90039 0.900391 -5.90039c7.59961 -0.299805 14.1992 -1.7998 19.5 -4.5c7.19922 -3.7002 12.0996 -9.59961 14.0996 -17c4.90039 -18.2998 -11.2002 -38.5996 -11.7998 -39.2002
+c-1.90039 -3.39941 -2.7002 -27.7998 1.7002 -37.5996c0.599609 -1.40039 2.19922 -2.7002 4.2998 -4.40039c0.700195 -0.599609 1.39941 -1.09961 2.2002 -1.7002c6.09961 -4.59961 15.3994 -11.5996 15.3994 -27.7998c0 -22.0996 -22.3994 -34.0996 -40.3994 -43.7002
+c-2.90039 -1.59961 -5.80078 -3.09961 -8.30078 -4.59961c-2.69922 -1.59961 -12.1992 -11.4004 -12.1992 -26.0996c0 -6.90039 0.599609 -39.7002 3.69922 -62.4004c11.6006 9.90039 22.2002 25.7998 24.6006 43.7002zM305.7 410.3
+c-17.7998 -5.7002 -31.6006 -23.0996 -37.7002 -32.2002c1.59961 -0.699219 3.09961 -1.39941 4.7002 -2.19922c2.59961 -1.2002 4.89941 -2.40039 7.09961 -3.7002c2.7002 5.5 8.40039 13.7002 20.7002 22.3994c8.2002 5.80078 18.2002 8.90039 28.7002 8.90039
+c3.59961 0 6.7998 -0.400391 9.2002 -0.799805c3.2998 2.09961 6.59961 3.89941 9.69922 5.2998c-4.7998 2 -13.6992 5 -24.6992 5c-6.10059 0 -12.1006 -0.900391 -17.7002 -2.7002zM326.7 392.1c-7.40039 -0.299805 -14 -2.69922 -19.6006 -7
+c-8 -6.39941 -12.0996 -17.6992 -13.5 -22.5c4.90039 -4.19922 8.2002 -8.09961 10.5 -11.1992c3.40039 1 7.30078 1.89941 11.5 2.69922c3.30078 4.5 3.90039 10.6006 4.40039 17c0.5 6.2002 1.09961 12.6006 4.40039 17.8008c0.699219 1.09961 1.5 2.19922 2.2998 3.19922
+zM45.5996 402.7c2.40039 0.399414 5.60059 0.799805 9 0.899414c10.6006 0 20.5 -3.09961 28.8008 -8.89941c12.3994 -8.7002 18.0996 -17 20.6992 -22.4004c2.2002 1.2002 4.60059 2.5 7.10059 3.7002c1.59961 0.799805 3.2002 1.5 4.7998 2.2002
+c-6.09961 8.89941 -19.9004 26.2998 -37.7002 32.0996c-5.7002 1.7998 -11.5996 2.7002 -17.7002 2.7002c-11 0 -19.8994 -3 -24.6992 -5c3.09961 -1.2998 6.39941 -3.09961 9.69922 -5.2998zM90.2998 362.6c-1.39941 4.80078 -5.5 16.1006 -13.5 22.4004
+c-5.5 4.40039 -12.0996 6.7002 -19.5 7c0.799805 -1 1.60059 -2.09961 2.2998 -3.2002c3.30078 -5.2002 3.90039 -11.5996 4.40039 -17.7998c0.5 -6.40039 1 -12.5 4.2998 -16.9004c4.2002 -0.799805 8.10059 -1.7998 11.5 -2.69922c2.2002 3.19922 5.60059 7 10.5 11.1992z
+M58.0996 188.1c8.7002 -5 18.1006 -16.7998 19 -34.1992c0.900391 -14.7002 -0.899414 -49.9004 -3.39941 -75.9004c12.5 -4.7998 26.7002 -6.40039 39.7002 -6.7998c2 4.09961 3.89941 8.5 5.5 13.0996c0.699219 1.90039 19.5996 51 26.3994 62.2002
+c-5.39941 -39 -17.5 -73.7002 -23.5 -89.5996c3.40039 0.399414 7.2998 0.699219 11.7002 0.699219h117c4.40039 0 8.2002 -0.199219 11.7002 -0.699219c-6 15.8994 -18 50.5996 -23.5 89.5996c6.7998 -11.0996 25.7002 -60.2002 26.3994 -62.2002
+c1.60059 -4.59961 3.5 -9 5.5 -13.0996c13 0.399414 27.3008 2 39.7002 6.7998c-2.5 26 -4.2998 61.2998 -3.39941 75.9004c1.09961 17.5 10.3994 29.1992 19.0996 34.1992c2.7002 1.5 5.5 3.10059 8.40039 4.60059c14.7998 8 30.1992 16.2998 30.1992 30.5
+c0 11.0996 -4.2998 14.5 -8.89941 18.2002l-0.5 0.399414c-0.700195 0.600586 -1.5 1.2002 -2.2002 1.7998c0.900391 -7.19922 1.90039 -13.2998 2.7002 -14.8994c0 0 -12.1006 15 -15.7002 44.2998c-1.40039 11.5 1.09961 34.2002 5.09961 43
+c-0.199219 -4.90039 0 -9.7998 0.300781 -14.4004c0.399414 0.900391 0.799805 1.60059 1.2998 2.2002c3.2998 4 11.8994 17.5 9.39941 26.6006c-1 3.39941 -3.19922 6 -6.69922 7.7998c-3.80078 1.89941 -8.80078 2.89941 -15.1006 2.89941
+c-12.2998 0 -25.8994 -3.7998 -32.8994 -6c-25.1006 -7.89941 -55.4004 -30.8994 -64.1006 -37.6992c-0.200195 -0.200195 -0.399414 -0.300781 -0.399414 -0.300781l-5.60059 -3.89941l3.5 5.7998c0.200195 0.299805 19.1006 31.4004 53.1006 46.5
+c-2 2.90039 -7.40039 8.2002 -21.6006 15.0996c-21.3994 10.5 -46.3994 15.8008 -74.2998 15.8008c-27.7998 0 -52.9004 -5.30078 -74.2998 -15.8008c-14.2002 -7 -19.6006 -12.1992 -21.6006 -15.0996c34.1006 -15.0996 53 -46.2002 53.2002 -46.5l3.5 -5.7998
+l-5.59961 3.89941s-0.200195 0.100586 -0.400391 0.300781c-8.7002 6.7998 -39 29.6992 -64.0996 37.6992c-7 2.30078 -20.6006 6 -32.9004 6c-6.2998 0 -11.2998 -1 -15.0996 -2.89941c-3.60059 -1.7998 -5.7998 -4.2998 -6.7002 -7.7998
+c-2.40039 -9.10059 6.2002 -22.6006 9.40039 -26.6006c0.5 -0.599609 0.899414 -1.39941 1.2998 -2.2002c0.299805 4.60059 0.5 9.5 0.299805 14.4004c4 -8.7002 6.5 -31.5 5.09961 -43c-3.59961 -29.2998 -15.6992 -44.2998 -15.6992 -44.2998
+c0.799805 1.59961 1.7998 7.7002 2.69922 14.8994c-0.799805 -0.599609 -1.5 -1.19922 -2.19922 -1.7998l-0.5 -0.399414c-4.60059 -3.60059 -8.90039 -7.10059 -8.90039 -18.2002c0 -14.2002 15.2998 -22.5 30.2002 -30.5c2.7998 -1.5 5.7002 -3 8.39941 -4.60059z
+M34.7998 43.4004c11.9004 -19.7002 35.5 -29.4004 58.2002 -29.5c-4.5 13.2998 -3.09961 24 4.09961 31.7998l1.40039 1.39941c1.7998 2.40039 4.2998 5.80078 7 10c-27.2002 1.10059 -63.5 11 -74.4004 45.4004c-5 -5 -8.39941 -39.0996 3.7002 -59.0996zM80.5 -0.0996094
+c6.5 -9.5 16.5 -19.6006 30.9004 -25.5c-4.90039 7.19922 -8.80078 15.0996 -12.3008 23.0996c-6.39941 0.5 -12.5996 1.2998 -18.5996 2.40039zM192 -50.2002c60.5996 0.100586 78.2998 45.9004 84.9004 64.7002c3.59961 10.5 3.2998 18.2998 -0.900391 23.0996
+c-2.7998 3.30078 -9.5 7.2002 -24.5996 7.2002h-118.801c-15.0996 0 -21.6992 -3.89941 -24.5996 -7.2002c-4.2998 -4.89941 -4.59961 -12.5996 -0.900391 -23.0996c6.60059 -18.9004 24.3008 -64.5996 84.9004 -64.7002zM272.6 -25.5996
+c14.4004 5.89941 24.4004 16 30.9004 25.5c-6 -1.10059 -12.2002 -1.90039 -18.5996 -2.40039c-3.5 -8 -7.40039 -15.9004 -12.3008 -23.0996zM349.2 43.4004c12.2002 19.8994 8.7998 54 3.7998 59c-10.9004 -34.4004 -47.2002 -44.2002 -74.4004 -45.4004
+c2.7002 -4.2002 5.2002 -7.59961 7 -10c0.5 -0.5 1 -1 1.40039 -1.5c7.2002 -7.7002 8.59961 -18.5 4.09961 -31.7998c22.5 0.399414 46.1006 10 58.1006 29.7002zM191.9 260.3c-12.7002 0.200195 -27.2002 17.7998 -27.2002 17.7998
+c9.89941 -6 18.7998 -8.09961 27.2998 -8.2998c8.5 0.200195 17.4004 2.2998 27.2998 8.2998c0 0 -14.5 -17.6992 -27.2002 -17.7998h-0.199219zM253.6 29.5996c5.40039 -0.0996094 8.10059 -1.69922 9.40039 -3c1.90039 -1.89941 2.2002 -4.59961 0.900391 -7.89941
+c-3.5 -8.90039 -11.4004 -16.1006 -13.7002 -18.1006c-3.10059 -2.59961 -7.40039 -4.19922 -11.7998 -4.19922c-4.40039 0 -8.30078 1.59961 -11 4.5c-7.5 8 -12 16.6992 -13 19.2998c-0.600586 1.5 -1.30078 4.39941 0.899414 6.7002
+c1.7002 1.7998 4.7002 2.69922 8.90039 2.69922h29.3994z" />
+ <glyph glyph-name="gulp" unicode="&#xf3ae;" horiz-adv-x="255"
+d="M209.8 56.9004l-14.0996 -24.6006l-4.60059 -80.2002c0 -8.89941 -28.2998 -16.0996 -63.0996 -16.0996s-63.0996 7.2002 -63.0996 16.0996l-5.80078 79.4004l-14.8994 25.4004c41.2002 -17.3008 126 -16.7002 165.6 0zM13.7998 310.2
+c30.7002 -17 197.8 -16.9004 228.3 0.200195l-14.7998 -136.801c-4.7998 -4.19922 -11.5996 -10.1992 -16.5996 -14.0996c-1.60059 -1.2002 -6 -4.7002 -8 -4.7002c-1.2998 0 -2.2002 0.5 -2.2002 1.7998c0.0996094 1 3.40039 4.5 5 6.40039
+c4.90039 5.7002 13.7998 16 13.7998 23.4004c0 7 -10.7002 14.0996 -25.7002 0.199219c-1.59961 -1.5 -3.09961 -3 -4.5 -4.5c0.400391 1.10059 1.10059 5.10059 1.10059 6.2002c0 2.7998 -1.40039 4 -4.2002 4c-1 0 -1.90039 -0.599609 -2.7002 -1.59961
+c-2.59961 -3.10059 -3.89941 -7.5 -5.2998 -11.2998c-0.5 -1.80078 -1.09961 -3.60059 -1.7002 -5.5c-0.399414 -0.200195 -0.700195 -0.300781 -0.899414 -0.600586c-3.80078 -3.89941 -17.7002 -17 -23.1006 -17c-2.2998 0 -1.59961 3.60059 -1 5.7998
+c1 3.40039 6.7998 17.7002 8.7002 22.3008c4.59961 11.0996 8 19.7998 13.2002 31.8994c3.89941 9.2002 3.7998 8.60059 4.5 10.5c0.700195 2.10059 0.700195 4.90039 -1 6.2002c-1 0.700195 -2 1.09961 -3.2002 1.09961c-2.40039 0 -4.7998 -1.39941 -6.09961 -4.69922
+c-25.5 -64.4004 -25.2002 -63.3008 -26.4004 -68.2002c-2 -1.7002 -4.40039 -3.40039 -6.7998 -4.5c-3.10059 -1.40039 -6.7998 -2.2002 -6.7998 1.2002c0 3.69922 1.39941 8.19922 2.69922 11.6992c2.2002 6.10059 4.90039 11.1006 6.90039 16.7002
+c0.900391 2.40039 1.2998 4.7002 -0.400391 6.90039c-0.799805 1 -1.89941 1.5 -3.19922 1.5c-2.60059 0 -4.10059 -2.60059 -5.2002 -5.10059c-0.700195 -1.5 -1.2998 -3.09961 -1.7998 -4.7998c-1.2002 -4 -3.60059 -8.7002 -5.60059 -12.2998
+c-2.7998 -5 -6.5 -10.0996 -11.0996 -13.5c-2.2002 -1.59961 -4.5 -2.40039 -6.90039 -2.40039c-3.5 0 -2.39941 5.7002 -1.5 9c2.2002 7.80078 5.5 13.3008 9.2998 20.8008c1.30078 2.69922 2.30078 5.39941 -0.299805 7.19922c-0.5 0.300781 -1 0.5 -1.59961 0.700195
+c-3.40039 0.900391 -6 -1.09961 -7.60059 -4.5c-3.09961 -6.2998 -5.39941 -11.7002 -7.09961 -16.2002c-3.2998 -8.89941 -6.90039 -18.2998 -4.59961 -23.7998c1.5 -3.7002 4.5 -5.09961 8.59961 -5.09961c9.7998 0 17.7998 6.7002 22.4004 14.8994
+c-4.30078 -19.7998 8.19922 -17.2998 20 -8.09961c0.0996094 -0.400391 0.0996094 -0.799805 0.199219 -1.2002c1.5 -6.7002 8.7002 -6.7002 14.5 -4.09961c3.5 1.59961 8.2002 4.5 14.4004 10.5c0.200195 0.299805 0.799805 1.39941 -0.799805 -2.2998
+c-7.2002 -16.2002 -13.5 -28.2002 -15 -34.3008c-0.200195 -0.899414 -0.299805 -1.7998 -0.299805 -2.69922c0 -1.80078 0.399414 -3.10059 1.2998 -3.7002c1.59961 -1.2002 4.2002 -1.2998 6.09961 -0.299805c1.7998 1 3.10059 2.59961 4 4.5
+c1 2.19922 0.200195 0.699219 5.2002 14c5 13.3994 2.90039 7.7998 9.09961 22c1.90039 4.2998 4.2002 9.5 8.5 15.5c2.5 3.39941 5.5 7 8.7002 9.69922c5.7002 4.7002 11.7002 5.40039 11.7002 2.5c0 -2.19922 -3.2998 -6.39941 -4.7002 -8.09961
+c-5.2998 -6.7002 -14.3994 -16.2998 -14.3994 -21.5c0 -9.5 12 -8 17.3994 -5.7002c7.2998 3.2002 13.9004 9.60059 19.6006 14.7998l-10.9004 -94.5996c-1.90039 -4.90039 -39.0996 -17.0996 -88.2002 -17.0996c-49 0 -86.2002 12.0996 -88.2002 17.0996l-7.59961 79.5996
+c2.09961 -1.5 4.2998 -2.39941 7.7002 -2.39941c7.39941 0 16.0996 6.7002 21.5 11.7998c2.2998 2.2002 4.39941 4.40039 6.39941 6.59961c-1 -3 -7.09961 -22 -7.2998 -25.1992c-0.0996094 -1 -0.200195 -4.90039 0.799805 -6.30078
+c0.5 -0.799805 1.40039 -1.19922 2.60059 -1.19922c2.89941 0 5.59961 4.69922 6.2998 7.5c0 0 1.7998 6.2998 7.59961 25.7998c6.30078 21.0996 10 24.5 10 34.7002c0 5.59961 -7.2998 6.7998 -9.89941 0l-5.2002 -15.5c-2.2002 -4.5 -8 -11.5 -12.5 -16
+c-3.5 -3.5 -10.7998 -10.1006 -15.7998 -10.1006c-2.40039 0 -3.90039 1.40039 -4.90039 3.60059c-2.2998 5.2998 -0.899414 14.2998 0.600586 19.8994c2.59961 9.7002 6.89941 19.4004 12 28.2002c4.19922 7.2998 10.1992 15.7002 17.0996 20.7002
+c6.59961 4.7998 12.7998 4.5 16.9004 -2.7998c1.5 -2.7002 3.7998 -7.30078 6.7998 -7.30078c2.5 0 5.7002 2.60059 4.5 9.10059c-0.5 2.5 -4.90039 8.7998 -10.1006 11.7998c-6 3.59961 -12.3994 3.59961 -18.6992 0.900391
+c-19.2002 -8.2002 -34.1006 -35.2002 -40 -55.2002zM243.5 318.7c0 -21 -231.2 -21 -231.2 0c0 8.7998 51.7998 15.8994 115.601 15.8994c9 0 17.7998 -0.0996094 26.2998 -0.399414l12.5996 48.7002l61.2998 64.5c1.40039 1.39941 5.80078 0.199219 9.90039 -3.5
+c4.09961 -3.7002 6.59961 -7.90039 5.2998 -9.30078l-0.0996094 -0.0996094l-57.2998 -60.5l-10 -40.7002c39.8994 -2.59961 67.5996 -8.09961 67.5996 -14.5996zM174.1 314.1c0 0.800781 -0.899414 1.5 -2.5 2.10059l-0.199219 -0.799805
+c0 -1.30078 -5 -2.40039 -11.1006 -2.40039c-6.09961 0 -11.0996 1.09961 -11.0996 2.40039c0 0.0996094 0 0.199219 0.0996094 0.299805l0.200195 0.700195c-1.7998 -0.600586 -3 -1.40039 -3 -2.30078c0 -2.09961 6.2002 -3.69922 13.7002 -3.69922
+c7.7002 -0.100586 13.8994 1.59961 13.8994 3.69922z" />
+ <glyph glyph-name="hacker-news-square" unicode="&#xf3af;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM21.2002 218.8h-0.200195c0.0996094 0.100586 0.200195 0.299805 0.299805 0.400391c0 -0.100586 0 -0.299805 -0.0996094 -0.400391z
+M239.2 164.9l80.7998 155.1h-34.7998c-54.7998 -101.2 -48.2998 -98.5996 -60.6006 -125.6c-10.0996 24.3994 -6.7998 27.2998 -59.2998 125.6h-37.2998l79.7998 -153.3v-102.7h31.4004v100.9z" />
+ <glyph glyph-name="hire-a-helper" unicode="&#xf3b0;" horiz-adv-x="512"
+d="M443.1 448c3.90039 -36.4004 32.5 -65.7998 68.9004 -71.7002v-370.5c-35.4004 -4 -64.9004 -33.3994 -67.9004 -69.7998h-372.199c-5.90039 36.4004 -34.5 63.9004 -71.9004 68.7998v371.5c37.4004 3.90039 67.9004 34.4004 71.9004 71.7002h371.199zM406.1 43.0996
+c7.80078 0 5.80078 10.8008 0 10.8008c-10.2998 3.39941 -13.5 3.59961 -21.6992 13.7998c-7.80078 12.8994 -7.90039 44.3994 -7.90039 127.8v101.2c0 22.0996 12.2002 28.2998 28.5996 32.3994c8.90039 2.2002 3.90039 11.8008 -1 11.8008
+c-36.5 0 -20.5996 -2 -57.0996 -2c-32.7002 0 -16.5 2 -49.2002 2c-3.2998 0 -8.5 -8.30078 -1 -10.8008c4.90039 -1.59961 27.6006 -3.69922 27.6006 -39.2998c0 -45.5996 0.199219 -55.7998 -1 -68.7998c0 -1.2998 -2.30078 -12.7998 -12.8008 -12.7998h-109.199
+c-10.5 0 -12.8008 11.5 -12.8008 12.7998c-1.19922 13 -1 23.2002 -1 68.7998c0 35.6006 22.7002 37.7002 27.6006 39.2998c7.5 2.5 2.2998 10.8008 -1 10.8008c-32.7002 0 -16.5 -2 -49.2002 -2c-36.5 0 -20.5996 2 -57.0996 2c-5 0 -9.80078 -9.60059 -1 -11.8008
+c16.3994 -4.09961 28.5996 -10.1992 28.5996 -32.3994v-101.2c0 -83.4004 -0.200195 -114.9 -7.90039 -127.8c-8.19922 -10.2998 -11.5 -10.4004 -21.6992 -13.7998c-5.80078 0 -7.90039 -10.8008 0 -10.8008c36.2998 0 18.7998 2 55.0996 2c35.7998 0 21 -2 56.0996 -2
+c6 0 4.90039 8.2002 0 9.80078c-22.7998 7.59961 -22.8994 10.2998 -24.5996 12.7998c-10.4004 15.5996 -5.90039 83 -5.90039 113c0 5.2998 6.40039 12.7998 13.8008 12.7998h111.199c7.40039 0 13.8008 -7.5 13.8008 -12.7998c0 -30 4.5 -97.4004 -5.90039 -113
+c-1.7002 -2.60059 -1.7998 -5.2002 -24.5996 -12.7998c-4.90039 -1.60059 -5.90039 -9.80078 0 -9.80078c35.0996 0 20.2998 2 56.0996 2c36.2998 0 18.7998 -2 55.0996 -2z" />
+ <glyph glyph-name="hotjar" unicode="&#xf3b1;"
+d="M414.9 286.5c30 -53 41.7998 -121.6 26.2998 -180.9c-14.7002 -56.6992 -68.2998 -120.3 -148.8 -145.6c54.5 76.9004 43.8994 200.1 -27.1006 215.5c54.2002 -93.9004 -53.7002 -180.3 -110.8 -93.9004c-2.5 -7.19922 -25.0996 -74.5 4.09961 -129.6
+c-61.0996 9.09961 -117.8 33.5 -144.6 93.4004c-35 78.1992 -2.7002 149.8 79 204.899c129.2 87.2998 28.0996 197.7 28.0996 197.7s219.101 -29 293.801 -161.5z" />
+ <glyph glyph-name="hubspot" unicode="&#xf3b2;" horiz-adv-x="512"
+d="M267.4 236.4l-163.2 114.699c-7.90039 -4.69922 -17 -7.59961 -26.7998 -7.59961c-28.8008 0 -52.2002 23.4004 -52.2002 52.2998c0 28.7998 23.3994 52.2002 52.2002 52.2002c28.8994 0 52.3994 -23.4004 52.3994 -52.2002c0 -4.7998 -0.799805 -9.39941 -2 -13.7998
+c51.4004 -39.0996 141.3 -103.9 168.9 -124.8c13.0996 6.89941 27.5 11.5 42.7002 13.5996v61.2002c-17.5 7.40039 -28.2002 23.7998 -28.2002 42.9004c0 26.0996 20.5996 47.8994 46.7002 47.8994c26.0996 0 47 -21.7998 47 -47.8994
+c0 -19.1006 -10.7002 -35.5 -28.2002 -42.9004v-61.5996c62.5 -9.5 110.2 -63.5 110.2 -128.7c0 -71.9004 -58.1006 -130.2 -130 -130.2c-29.9004 0 -57.3008 10 -79.3008 26.9004l-50 -50.2002c1.30078 -3.90039 1.90039 -7.90039 1.90039 -12.1006
+c0 -10.6992 -4.2002 -20.8994 -11.7998 -28.5c-7.7002 -7.69922 -17.7998 -11.5996 -28.6006 -11.5996c-10.6992 0 -20.8994 4 -28.5 11.5996c-7.59961 7.60059 -11.7998 17.7002 -11.7998 28.5c0 10.8008 4.2002 21 11.7998 28.6006
+c7.60059 7.59961 17.7002 11.7998 28.5 11.7998c4.90039 0 9.60059 -0.900391 14 -2.5l49.5 49.7998c-16.2998 21.7002 -26 48.7002 -26 78c0 37.2998 15.7002 70.9004 40.8008 94.6006zM356.9 72.7998c38.0996 0 69 30.9004 69 69c0 38.1006 -30.9004 69 -69 69
+c-38.1006 0 -69 -30.8994 -69 -69c0 -38.0996 30.8994 -69 69 -69z" />
+ <glyph glyph-name="itunes" unicode="&#xf3b4;"
+d="M223.6 367.7c94.5 0 171.2 -76.7002 171.2 -171.3c0 -94.5 -76.5996 -171.2 -171.2 -171.2c-94.5996 0 -171.1 76.7998 -171.1 171.3s76.5 171.2 171.1 171.2zM303 127.7c1.40039 6.2002 0.900391 -3 1 167.6c0 5.7002 -3.2998 9.10059 -9 8.7002
+c-1.7998 0 -14.0996 -2.40039 -115.1 -21.4004c-0.900391 0 -4.60059 -1 -6.7002 -2.69922c-2 -1.60059 -3.10059 -3.80078 -3.5 -6.40039c-1.7002 -6.7002 2.39941 -128 -2.60059 -133.7c-2.09961 -2.5 -4.69922 -3.2002 -7.69922 -3.7002
+c-17.7002 -3.19922 -29.6006 -4.7998 -38 -12.7998c-14.5 -14.2002 -7 -38.8994 14.3994 -42.8994c8 -1.40039 23.1006 0.599609 31.4004 5.19922c7.2998 3.80078 12.7998 10.6006 14.8994 19.6006c1.7002 7.7002 1.2002 2.39941 1.2002 118.5
+c0 5.7002 1.7002 7.2002 6.7002 8.2998c0 0 87.9004 16.4004 91.9004 17.0996c5.69922 1 8.39941 -0.5 8.39941 -6.09961c0 -78.7998 1 -77.2002 -2.2002 -80.7998c-2.09961 -2.5 -4.69922 -3.2002 -7.69922 -3.7002c-17.7002 -3.2002 -29.6006 -4.7998 -38 -12.7998
+c-10.6006 -10.4004 -10.4004 -26.7998 1.39941 -36.7998c9.7002 -7.80078 19.7998 -7.2002 31.9004 -5c13.7998 2.59961 24.0996 10.1992 27.2998 23.7998zM345.2 416c56.8994 0 102.8 -45.9004 102.8 -102.8v-242.4c0 -56.8994 -45.7998 -102.8 -102.8 -102.8h-242.4
+c-56.8994 0 -102.8 45.9004 -102.8 102.8v242.4c0 56.8994 45.9004 102.8 102.8 102.8h242.4zM223.6 4c106.301 0 192.5 86.2002 192.5 192.5s-86.1992 192.5 -192.5 192.5c-106.3 0 -192.5 -86.2002 -192.5 -192.5s86.2002 -192.5 192.5 -192.5z" />
+ <glyph glyph-name="itunes-note" unicode="&#xf3b5;" horiz-adv-x="384"
+d="M381.9 59.7998c-6.40039 -27.3994 -27.2002 -42.7998 -55.1006 -48c-24.5 -4.5 -44.8994 -5.59961 -64.5 10.2002c-23.8994 20.0996 -24.2002 53.4004 -2.7002 74.4004c17 16.1992 40.9004 19.5 76.8008 25.7998c6 1.09961 11.1992 2.5 15.5996 7.39941
+c6.40039 7.2002 4.40039 4.10059 4.40039 163.2c0 11.2002 -5.5 14.2998 -17 12.2998c-8.2002 -1.39941 -185.7 -34.5996 -185.7 -34.5996c-10.2002 -2.2002 -13.4004 -5.2002 -13.4004 -16.7002c0 -234.7 1.10059 -223.899 -2.5 -239.5
+c-4.2002 -18.2002 -15.3994 -31.8994 -30.2002 -39.5c-16.7998 -9.2998 -47.1992 -13.3994 -63.3994 -10.3994c-43.2002 8.09961 -58.4004 58 -29.1006 86.5996c17 16.2002 40.9004 19.5 76.8008 25.7998c6 1.10059 11.1992 2.5 15.5996 7.40039
+c10.0996 11.5 1.7998 256.6 5.2002 270.2c0.799805 5.19922 3 9.59961 7.09961 12.8994c4.2002 3.5 11.7998 5.5 13.4004 5.5c204 38.2002 228.899 43.1006 232.399 43.1006c11.5 0.799805 18.1006 -6 18.1006 -17.6006c0.200195 -344.5 1.09961 -326 -1.7998 -338.5z" />
+ <glyph glyph-name="jenkins" unicode="&#xf3b6;" horiz-adv-x="511"
+d="M487.1 23c1.5 -11.9004 -5.2998 -28.2998 -8.69922 -39.7002c-4.90039 -16.2998 -9.7002 -31.8994 -14.6006 -47.2002h-422c-0.700195 1.90039 -1.39941 4 -2.09961 6c-4.60059 14.2002 -12.6006 31.7002 -14.7002 45.8008
+c-3.09961 20.8994 16.5996 22.0996 29.2002 31.0996c19.5 14 34.7998 21.7998 55.8994 34.2998c6.30078 3.7998 25.1006 13.2002 27.3008 17.6006c4.2998 8.69922 -7.30078 20.8994 -10.4004 27.6992c-4.90039 10.7002 -7.5 19.8008 -8.2002 30.4004
+c-17.7002 2.7998 -31.0996 13.2998 -39.2002 25.2002c-13.3994 19.7002 -22.6992 56 -11.0996 83.7002c0.900391 2.19922 5.40039 6.5 6.09961 9.7998c1.40039 6.59961 -2.5 15.3994 -2.69922 22.3994c-1.2002 36 6.09961 67 30.2998 77.8008
+c9.7998 39.0996 45 52.1992 78.0996 71.5996c12.2998 7.2998 26 11.9004 40.1006 17.0996c50.5 18.7002 128.1 15.1006 170.1 -16.5996c17.7998 -13.5 46.2002 -41.9004 56.4004 -62.5c26.8994 -54.2998 25 -145.1 6.19922 -211.2
+c-2.5 -8.89941 -6.19922 -21.8994 -11.2998 -32.5996c-3.59961 -7.40039 -14.7002 -22.2998 -13.2998 -28.9004c1.40039 -6.7998 25.2998 -24.8994 30.4004 -29.8994c9.19922 -8.80078 26.7998 -20.7002 28.1992 -31.9004zM205.9 414.3
+c-33.2002 -9.39941 -75.7002 -33.5 -89.3008 -63.3994c10.6006 1.5 17.9004 6.7998 28.3008 7.5c3.89941 0.299805 9.09961 -1.60059 13.5996 -0.5c9 2.2998 16.5996 22.5 23.4004 30c6.59961 7.39941 14.5996 10.5 20 17.1992c3.5 1.7002 8.69922 1.60059 8.89941 6.80078
+c-1.5 1.69922 -3.09961 2.89941 -4.89941 2.39941zM101.1 320.7c-14.6992 -16.1006 -11.5996 -46.2998 -9.7998 -67.7998c26.5 16.6992 61.6006 -1.30078 61.2998 -29.6006c12.6006 0.299805 4.7002 15.7998 2.40039 25.7002c-7.5 32.5996 12.5996 67.9004 0.900391 97.5996
+c-22.7002 -1.7998 -41.3008 -11 -54.8008 -25.8994zM137.8 120.5c4.90039 -20 15.7002 -46 26.2998 -61.4004c13.6006 -19.3994 40.1006 -22.2998 68.7002 -24.1992c5.10059 11 23.9004 10.0996 36.2002 7.19922c-14.7002 5.80078 -28.4004 19.9004 -39.7002 32.4004
+c-13 14.2998 -26.0996 29.7002 -26.7998 48.4004c24.5 -34 44.7998 -63.8008 89.5 -78.8008c33.7998 -11.2998 73.2002 5.2002 99.2002 23.4004c10.7998 7.59961 17.2002 19.5996 24.8994 30.5996c28.7002 41.2002 42 100.101 39.1006 157.101
+c-1.2002 23.5 -1.10059 47 -9 62.7998c-8.2998 16.5996 -36.2002 31.2998 -52.5 16.4004c-3 16.0996 13.5996 26.0996 33.0996 20.2998c-13.8994 18 -28.5996 39.5996 -48.2998 50.7002c-34.4004 19.5 -92.7002 34.0996 -129.3 15.7998
+c-29.6006 -14.7002 -69.5 -39.1006 -83.1006 -70c12.7002 -29.7998 -3.7998 -57.1006 -4.7998 -87.4004c-0.599609 -16.0996 7.60059 -30.2002 8.2002 -47.7002c-4.40039 -7.19922 -17.7002 -8.09961 -26.9004 -7.59961c-3.09961 15.5 -8.5 32.9004 -24.5 34.7002
+c-22.5 2.39941 -39.0996 -16.2998 -40.0996 -35.7998c-1.2002 -23 17.7002 -61 44.4004 -58.4004c10.2998 1.09961 12.7998 11.4004 24.0996 11.2998c6.09961 -12.2002 -9.40039 -16 -11 -24.7002c-0.400391 -2.19922 1.2998 -11 2.2998 -15.0996zM359.8 -3.59961
+c-1.59961 -4.40039 0.299805 -10.4004 -0.599609 -16.5c14.8994 -4.2002 31.8994 -6.40039 50.7002 -7c3.69922 4.7998 4.89941 13.7998 4.5 22.7998c-0.600586 10.7998 -3.40039 33.0996 -10.1006 37c-14.0996 8.2002 -39 -16.5 -49.5996 -20.2998
+c1.2002 -3.40039 3.09961 -6 3.2002 -10.2002c6.2998 1.5 13.8994 0.5 19.2998 -2.2002c-6.2998 -0.700195 -13.2998 -0.599609 -17.4004 -3.59961zM342.6 16.4004c7.60059 5.5 14.3008 12 22.2002 17.0996c-18.2002 -1.59961 -41 -12.9004 -59 -4.90039
+c-0.0996094 -0.899414 -1.2998 -0.599609 -1.5 -1.39941c12.2998 -9.60059 21.5 -11.6006 38.2998 -10.7998zM330.5 -16.7998c26.9004 -8.40039 22.2002 36.7998 -2.7998 20.2002c-0.700195 -8.2002 1.2002 -10.8008 2.7998 -20.2002zM226 9.40039
+c0 6.19922 3.59961 12 2.7998 16.3994c-13.7998 2.40039 -31.8994 0.799805 -41.2998 7.2998c-9.59961 -9.69922 26.9004 -23 38.5 -23.6992zM57.7002 -49.0996v-0.100586h180.7c-0.800781 2.5 -1.5 4.90039 -2.2002 7.2002c-4.7998 15.2998 -7.5 26.7002 -8.7002 35.5
+c-19.2002 9.2002 -39.7002 18.5 -56.2002 30.2002c-3 2.2002 -23.3994 28.7002 -26.2002 27.5996c-36.8994 -14.5996 -71.3994 -39.7002 -102.199 -63.5c5.59961 -11.7998 10.5 -24.2002 14.7998 -36.8994zM298.3 -54.7998h-0.799805
+c0.299805 0.200195 0.5 0.399414 0.799805 0.5v-0.5zM305.8 -49.0996h9.60059c-1 1.5 -2.10059 2.89941 -3.2002 4.2998c-2.10059 -1.5 -4.2998 -2.90039 -6.40039 -4.2998zM320.9 -24.4004c0.0996094 3.60059 0.299805 7.2002 0.399414 10.6006
+c-6.5 3.2002 -14 5.5 -23.5 5.89941c6.5 3.30078 15.9004 3.2002 21.7998 7.10059c0.100586 1.5 0.100586 2.89941 0.200195 4.2998c-10.7998 0.900391 -14.7998 5.59961 -21.8994 9.5c-11.6006 6.40039 -29 13.2002 -43.9004 16.0996
+c-18.5 3.60059 -16.7998 -25.1992 -16 -42.3994c0.700195 -13.6006 7.7002 -28 10.7998 -37c1.5 -4.2002 1.7998 -8.7002 5.40039 -9.5c6.39941 -1.5 27.3994 6.89941 33.3994 10.2002c12.7002 6.89941 22.5 17.8994 33.3008 25.1992zM374.3 -49.0996l0.600586 12.5996
+c-11.2002 -0.700195 -17.5 10.2002 -25.4004 11c-6.90039 0.700195 -12.7002 -7.90039 -21.7002 -4.2002c-2 -2.2002 -3.89941 -4.7002 -6 -6.89941c3.2002 -3.90039 6.10059 -8.10059 8.90039 -12.5h17.3994c0.200195 3.19922 2.80078 5.7998 6.10059 5.7998
+s6 -2.60059 6.09961 -5.7998h14zM383 -49.0996h36.2998c-6.7002 10.1992 -20.0996 18.7998 -35.7002 11.5c-0.199219 -3.7002 -0.399414 -7.5 -0.599609 -11.5zM466.4 -12.0996c1.19922 6.19922 4.59961 19.5996 3.7998 25.0996
+c-1.40039 9.7998 -14.6006 17.0996 -21.4004 23.0996c-12.3994 11.1006 -20.2002 21 -33.2002 31.4004c-5.19922 -7.7998 -16.5 -13 -20.7998 -19.2998c30.7002 14.8994 36.2998 -55.7998 24.2002 -78.5c1.90039 -6.7998 8.2998 -9.40039 10.9004 -15.5
+c-0.700195 -1.10059 -1.30078 -2.2002 -1.90039 -3.2998h27.9004c0.199219 0 0.399414 0 0.599609 -0.100586c4.09961 13.1006 7.59961 25.9004 9.90039 37.1006zM222.2 317.5c5.39941 14.9004 27.2002 34.7002 45 32c7.7002 -1.2002 18 -8.2002 12.2002 -17.7002
+c-30.2002 7 -45.2002 -12.5996 -54.4004 -33.0996c-8.09961 2 -4.90039 13.0996 -2.7998 18.7998zM406.3 254.4c8.2002 3.59961 22.4004 0.699219 29.6006 5.2998c-4.2002 11.5 -10.3008 21.3994 -9.30078 37.7002c0.5 0 1 0 1.40039 -0.100586
+c6.7998 -14.2002 12.7002 -29.2002 21.4004 -41.7002c-5.7002 -13.5 -43.6006 -25.3994 -43.1006 -1.19922zM309.5 251.7c-6.7998 10.8994 -19 32.5 -14.5 45.2998c6.5 -11.9004 8.59961 -24.4004 17.7998 -33.2998c4.10059 -4 12.2002 -9 8.2002 -20.2002
+c-0.900391 -2.7002 -7.7998 -8.59961 -11.7002 -9.7002c-14.3994 -4.2998 -47.8994 -0.899414 -36.5996 17.1006c11.8994 -0.700195 27.8994 -7.80078 36.7998 0.799805zM336.8 181.7c3.7998 -6.60059 1.40039 -18.7002 12.1006 -20.6006
+c20.1992 -3.39941 43.5996 12.3008 58.0996 17.8008c9 15.1992 -0.799805 20.6992 -8.90039 30.5c-16.5996 20 -38.7998 44.7998 -38 74.6992c6.7002 4.90039 7.30078 -7.39941 8.2002 -9.69922c8.7002 -20.3008 30.4004 -46.2002 46.2998 -63.5
+c3.90039 -4.30078 10.3008 -8.40039 11 -11.2002c2.10059 -8.2002 -5.39941 -18 -4.5 -23.5c-21.6992 -13.9004 -45.7998 -29.1006 -81.3994 -25.6006c-7.40039 6.7002 -10.2998 21.4004 -2.90039 31.1006zM135.5 190.9c-6.7998 3.89941 -8.40039 21 -16.4004 21.3994
+c-11.3994 0.700195 -9.2998 -22.2002 -9.2998 -35.5c-7.7998 7.10059 -9.2002 29.1006 -3.5 40.2998c-6.59961 3.2002 -9.5 -3.59961 -13.0996 -5.89941c4.7002 34.0996 49.7998 15.7998 42.2998 -20.2998zM435.1 162.1c-10.0996 -19.1992 -24.3994 -40.3994 -54 -41
+c-0.599609 6.2002 -1.09961 15.6006 0 19.4004c22.7002 2.2002 36.6006 13.7002 54 21.5996zM293.2 149.7c18.8994 -9.90039 53.5996 -11 79.2998 -10.2002c1.40039 -5.59961 1.2998 -12.5996 1.40039 -19.4004c-33 -1.7998 -72 6.40039 -80.7002 29.6006zM385.4 103
+c-1.7002 -4.2998 -5.30078 -9.2998 -9.80078 -11.0996c-12.0996 -4.90039 -45.5996 -8.7002 -62.3994 0.299805c-10.7002 5.7002 -17.5 18.5 -23.4004 26c-2.7998 3.59961 -16.8994 12.8994 -0.200195 12.8994c13.1006 -32.6992 58 -29 95.8008 -28.0996z" />
+ <glyph glyph-name="joget" unicode="&#xf3b7;" horiz-adv-x="496"
+d="M378.1 403c116.601 -71.7998 152.9 -224.6 81 -341.2c-71.8994 -116.5 -224.6 -152.8 -341.199 -80.8994c-116.601 71.8994 -152.9 224.6 -81 341.199c46.8994 76 128.1 117.9 211.3 117.9c44.3994 0 89.3994 -11.9004 129.899 -37zM429.9 79.7998
+c5.2998 8.7002 9.89941 17.6006 13.8994 26.6006c-32.0996 -1.10059 -157.1 1.5 -208.8 -17.6006c-58.4004 -21.5 -36.9004 -53.3994 -31.2002 -67.0996c3.7998 -9.10059 14.7002 -28.7998 23.7002 -42.4004c6.7998 -0.599609 13.5996 -1 20.4004 -1
+c71.5996 0 141.6 36 182 101.5zM229.1 166.1c51 -1.2998 205.4 -4.39941 230.301 -4.89941c11.8994 81.7998 -24.5 166.6 -99.3008 212.7c-100.5 61.8994 -232.1 30.6992 -294 -69.8008c-28.5996 -46.3994 -37.2998 -99.3994 -28.5 -149.1
+c11 40.9004 49.7002 131.5 178.301 140.2c50.8994 4 41.5 -19.2002 23.5996 -29.7002c-17.7998 -10.5 -45.7002 -23.7998 -68.9004 -51.2002c-23.1992 -27.3994 3 -46.7998 58.5 -48.2002zM412.9 220.9c22.6992 -6 19.0996 -15.5 19.0996 -15.5l-46.5 -23.4004
+l-169.5 -1.59961s33.7998 10.7998 65.2998 31.2998c26 16.8994 49.7002 35.5996 67.5 35.5996c3.7002 0 7.2002 -0.899414 10.4004 -2.7002c18.5 -10.5996 -2.90039 -18.1992 -13.4004 -24.5996s-50.7002 -34.5 -50.7002 -34.5s1.40039 -7.59961 31.1006 8.2002
+c29.7002 15.8994 64 33.2002 86.7002 27.2002z" />
+ <glyph glyph-name="js" unicode="&#xf3b8;"
+d="M0 416h448v-448h-448v448zM243.8 66.5996v143.7h-42.0996v-143.1c0 -21.1006 -8.7998 -26.5 -22.6006 -26.5c-14.5 0 -20.5 9.89941 -27.0996 21.5996l-34.2998 -20.7002c10 -21.0996 29.5 -38.5 63.2002 -38.5c37.2998 0 62.8994 19.9004 62.8994 63.5zM343.4 3.09961
+c39.8994 0 69.6992 20.8008 69.6992 58.6006c0 35.2002 -20.0996 50.8994 -55.8994 66.2002l-10.5 4.5c-18.1006 7.89941 -25.9004 13 -25.9004 25.5996c0 10.2002 7.7998 18 20.1006 18c12.0996 0 19.8994 -5.09961 27.0996 -18l32.7998 21
+c-13.7998 24.4004 -33 33.7002 -59.7998 33.7002c-37.5 0 -61.5996 -24 -61.5996 -55.6006c0 -34.2998 20.0996 -50.5996 50.5 -63.5l10.5 -4.5c19.2998 -8.5 30.6992 -13.5996 30.6992 -28c0 -12.0996 -11.1992 -20.7998 -28.5996 -20.7998
+c-20.7002 0 -32.5 10.9004 -41.5 25.6006l-34.2998 -19.8008c12.2998 -24.3994 37.5996 -43 76.7002 -43z" />
+ <glyph glyph-name="js-square" unicode="&#xf3b9;"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM243.8 66.5996v143.7h-42.0996v-143.1c0 -21.1006 -8.7998 -26.5 -22.6006 -26.5c-14.5 0 -20.5 9.89941 -27.0996 21.5996
+l-34.2998 -20.7002c10 -21.0996 29.5 -38.5 63.2002 -38.5c37.2998 0 62.8994 19.9004 62.8994 63.5zM343.4 3.09961c39.8994 0 69.6992 20.8008 69.6992 58.6006c0 35.2002 -20.0996 50.8994 -55.8994 66.2002l-10.5 4.5c-18.1006 7.89941 -25.9004 13 -25.9004 25.5996
+c0 10.2002 7.7998 18 20.1006 18c12.0996 0 19.8994 -5.09961 27.0996 -18l32.7998 21c-13.7998 24.4004 -33 33.7002 -59.7998 33.7002c-37.5 0 -61.5996 -24 -61.5996 -55.6006c0 -34.2998 20.0996 -50.5996 50.5 -63.5l10.5 -4.5
+c19.2998 -8.5 30.6992 -13.5996 30.6992 -28c0 -12.0996 -11.1992 -20.7998 -28.5996 -20.7998c-20.7002 0 -32.5 10.9004 -41.5 25.6006l-34.2998 -19.8008c12.2998 -24.3994 37.5996 -43 76.7002 -43z" />
+ <glyph glyph-name="keycdn" unicode="&#xf3ba;" horiz-adv-x="512"
+d="M63.7998 38.7002l60.5 59c32.1006 -42.7998 71.1006 -66 126.601 -67.4004c30.5 -0.700195 60.2998 7 86.3994 22.4004c5.10059 -5.2998 18.5 -19.5 20.9004 -22c-32.2002 -20.7002 -69.6006 -31.1006 -108.101 -30.2002
+c-43.2998 1.09961 -84.5996 16.7002 -117.699 44.4004c0.299805 0.599609 -38.2002 -37.5 -38.6006 -37.9004c9.5 -29.7998 -13.0996 -62.4004 -46.2998 -62.4004c-26.7998 0.100586 -47.5 21.7002 -47.5 48.5c0 34.3008 33.0996 56.6006 63.7998 45.6006zM418.7 291.1
+c19.0996 -31.2998 29.5996 -67.3994 28.7002 -104c-1.10059 -44.7998 -19 -87.5 -48.6006 -121c0.299805 -0.299805 23.7998 -25.1992 24.1006 -25.5c9.59961 1.30078 19.1992 -2 25.8994 -9.09961c11.2998 -12 10.9004 -30.9004 -1.09961 -42.4004
+c-12 -11.2998 -30.9004 -10.8994 -42.4004 1.10059c-6.7002 7 -9.39941 16.7998 -7.59961 26.2998c-24.9004 26.5996 -44.4004 47.2002 -44.4004 47.2002c42.7002 34.0996 63.2998 79.5996 64.4004 124.2c0.700195 28.8994 -7.2002 57.1992 -21.1006 82.1992zM104 394.9
+c6.7002 -7 9.40039 -16.8008 7.59961 -26.3008l45.9004 -48.0996c-4.7002 -3.7998 -13.2998 -10.4004 -22.7998 -21.2998c-25.4004 -28.5 -39.6006 -64.7998 -40.7002 -102.9c-0.700195 -28.8994 6.09961 -57.2002 20 -82.3994l-22 -21.5
+c-19.2998 31.5996 -28.9004 67.6992 -27.7998 104.699c1 44.6006 18.2998 87.6006 47.5 121.101l-25.2998 26.3994c-9.60059 -1.2998 -19.2002 2 -25.9004 9.10059c-11.2998 12 -10.9004 30.8994 1.09961 42.3994c11.9004 11.2002 30.6006 10.9004 42.4004 -1.19922z
+M464.9 440c26 0 47.0996 -22.4004 47.0996 -48.2998c0 -25.9004 -21.0996 -47.7002 -47.0996 -47.7002c-6.30078 -0.0996094 -14 1.09961 -15.9004 1.7998l-62.9004 -59.7002c-32.6992 43.6006 -76.6992 65.9004 -126.899 67.2002
+c-30.5 0.700195 -60.2998 -6.7998 -86.2002 -22.3994l-21.0996 22c32.1992 20.7998 69.5996 31.0996 108.1 30.1992c43.2998 -1.09961 84.5996 -16.6992 117.7 -44.5996l41.0996 38.5996c-1.5 4.7002 -2.2002 9.60059 -2.2002 14.5
+c-0.0996094 26.7002 22.3008 48.4004 48.3008 48.4004zM256.7 334.6c5.5 0 10.8994 -0.399414 16.3994 -1.09961c78.1006 -9.7998 133.4 -81.0996 123.801 -159.1c-9.80078 -78.1006 -81.1006 -133.4 -159.101 -123.801c-78.0996 9.80078 -133.399 81.1006 -123.8 159.2
+c9.2998 72.4004 70.0996 124.601 142.7 124.8zM197.7 215.2c0.599609 -22.7002 12.2002 -41.7998 32.3994 -52.2002l-11 -51.7002h73.7002l-11 51.7002c20.1006 10.9004 32.1006 29 32.4004 52.2002c-0.400391 32.7998 -25.7998 57.5 -58.2998 58.2998
+c-32.1006 -0.799805 -57.3008 -24.7998 -58.2002 -58.2998zM256 288z" />
+ <glyph glyph-name="kickstarter" unicode="&#xf3bb;"
+d="M400 -32h-352c-26.4004 0 -48 21.5996 -48 48v352c0 26.4004 21.5996 48 48 48h352c26.4004 0 48 -21.5996 48 -48v-352c0 -26.4004 -21.5996 -48 -48 -48zM199.6 269.5c0 30.7002 -17.5996 45.0996 -39.6992 45.0996c-25.8008 0 -40 -19.7998 -40 -44.5v-154.8
+c0 -25.7998 13.6992 -45.5996 40.5 -45.5996c21.5 0 39.1992 14 39.1992 45.5996v41.7998l60.6006 -75.6992c12.2998 -14.9004 39 -16.8008 55.7998 0c14.5996 15.0996 14.7998 36.7998 4 50.3994l-49.0996 62.7998l40.5 58.7002c9.39941 13.5 9.5 34.5 -5.60059 49.1006
+c-16.3994 15.8994 -44.5996 17.2998 -61.3994 -7l-44.8008 -64.7002v38.7998z" />
+ <glyph glyph-name="kickstarter-k" unicode="&#xf3bc;" horiz-adv-x="384"
+d="M147.3 333.6v-70.5996l82.7998 118.2c31.2002 44.3994 83.3008 41.7998 113.601 12.7998c27.8994 -26.7002 27.7998 -65.0996 10.3994 -89.7998l-74.8994 -107.4l90.7998 -114.8c19.9004 -24.7998 19.5996 -64.5996 -7.40039 -92.2002
+c-31.0996 -30.7002 -80.5 -27.2002 -103.199 0l-112.101 138.3v-76.5c0 -57.7998 -32.5996 -83.3994 -72.3994 -83.3994c-49.6006 0 -74.9004 36.0996 -74.9004 83.3994v283c0 45.2002 26.2002 81.4004 73.9004 81.4004c40.8994 0 73.3994 -26.2002 73.3994 -82.4004z" />
+ <glyph glyph-name="laravel" unicode="&#xf3bd;" horiz-adv-x="640"
+d="M637.5 206.4c4.2998 -4.80078 3.2002 -8.60059 -4.7002 -10.6006c-6.7002 -1.89941 -69.5996 -18.5996 -87.2998 -23.2998c25.7998 -34.5996 75.0996 -100.6 79.2998 -106.8c5.7002 -8.5 0.5 -10.9004 -7.89941 -14.4004c-8.40039 -3.39941 -195.2 -70.5996 -208 -74.5
+c-16.3008 -5 -23.7002 -7.5 -34.3008 7.40039c-8 11.0996 -51.0996 88.7002 -72.1992 127c-40 -10.5 -113.2 -29.6006 -134.301 -34.7002c-20.5996 -5 -29.3994 7.40039 -32.7998 15c-3.39941 7.59961 -124.8 269.2 -132.399 287.2c-7.60059 18 0.799805 21.3994 8.39941 22
+c7.60059 0.700195 114.5 9.59961 128.5 10.2002c14 0.699219 15.2998 -2.5 21.4004 -11.6006l154.2 -257.5l193.699 46.4004c-10.7998 15.2002 -59.5 84.2998 -64.1992 90.8994c-5.30078 7.40039 0.0996094 10.8008 8.69922 12.3008
+c8.60059 1.39941 82.7002 13.8994 89.1006 14.7998c6.2998 0.899414 11.3994 3.09961 21.7002 -9.2998c10.2998 -12.4004 68.8994 -85.7002 73.0996 -90.5zM285.3 134.4c2.2998 0.5 3.7998 1.7998 1.2002 6.09961c-2.40039 4.2998 -144.6 249.7 -144.6 249.7
+c-1.30078 2.2002 -0.900391 3 -4.5 2.7998c-3.5 -0.200195 -104.301 -9.2002 -106 -9.2002c-1.7002 0 -1.80078 -2.59961 0 -5.89941c1.7998 -3.30078 130.1 -268 130.8 -270s0.700195 -2.60059 6.5 -1.30078c5.7998 1.30078 114.3 27.3008 116.6 27.8008zM591.3 77
+c-1.7002 2.7002 -61.2002 83.4004 -64.0996 88.2002c-3 4.7002 -4.5 3.7002 -9.2002 2.2002l-188.8 -49.1006s58 -100.3 62.3994 -106.8c4.40039 -6.5 7.10059 -6 10.6006 -4.5c3.39941 1.5 181.7 61.5996 187.1 63.5996c5.5 1.90039 3.7002 3.7002 2 6.40039zM603.4 211.1
+c4.19922 1 7.39941 2.40039 5.59961 4.7002c-1.90039 2.40039 -50.9004 64.5 -54.5 69.4004c-3.59961 4.89941 -6.09961 4.09961 -9 3.39941c-2.90039 -0.599609 -67.2998 -12.2998 -71.2998 -12.7998s-2.7002 -2.7002 -1.10059 -5l56.7002 -77.7998
+s69.4004 17.2002 73.6006 18.0996z" />
+ <glyph glyph-name="line" unicode="&#xf3c0;"
+d="M272.1 243.8v-71.0996c0 -1.7998 -1.39941 -3.2002 -3.19922 -3.2002h-11.4004c-1.09961 0 -2.09961 0.599609 -2.59961 1.2998l-32.6006 44v-42.2002c0 -1.7998 -1.39941 -3.19922 -3.2002 -3.19922h-11.3994c-1.7998 0 -3.2002 1.39941 -3.2002 3.19922v71.1006
+c0 1.7998 1.40039 3.2002 3.2002 3.2002h11.2998c1 0 2.09961 -0.5 2.59961 -1.40039l32.6006 -44v42.2002c0 1.7998 1.39941 3.2002 3.2002 3.2002h11.3994c1.7998 0.0996094 3.2998 -1.40039 3.2998 -3.10059zM190.1 247c1.80078 0 3.2002 -1.5 3.2002 -3.2002v-71.0996
+c0 -1.7998 -1.39941 -3.2002 -3.2002 -3.2002h-11.3994c-1.7998 0 -3.2002 1.40039 -3.2002 3.2002v71.0996c0 1.7998 1.40039 3.2002 3.2002 3.2002h11.3994zM162.6 187.4c1.7002 0 3.10059 -1.5 3.10059 -3.2002v-11.4004c0 -1.7998 -1.40039 -3.2002 -3.2002 -3.2002
+h-45.7002c-0.899414 0 -1.59961 0.400391 -2.2002 0.900391c-0.599609 0.599609 -0.899414 1.2998 -0.899414 2.2002v71.0996c0 1.7998 1.39941 3.2002 3.2002 3.2002h11.3994c1.7998 0 3.2002 -1.40039 3.2002 -3.2002v-56.3994h31.0996zM332.1 247
+c1.7002 0 3.10059 -1.5 3.2002 -3.2002v-11.3994c0 -1.80078 -1.39941 -3.2002 -3.2002 -3.2002h-31.0996v-12h31.0996c1.80078 0 3.2002 -1.40039 3.2002 -3.2002v-11.5c0 -1.7998 -1.39941 -3.2002 -3.2002 -3.2002h-31.0996v-12h31.0996
+c1.80078 0 3.2002 -1.39941 3.2002 -3.2002v-11.3994c0 -1.7998 -1.39941 -3.2002 -3.2002 -3.2002h-45.6992c-1.80078 0 -3.2002 1.5 -3.2002 3.2002v71.0996c0 1.7998 1.5 3.2002 3.2002 3.2002h45.6992zM448 334.3v-285.3
+c-0.0996094 -44.7998 -36.7998 -81.0996 -81.7002 -81h-285.3c-44.7998 0.0996094 -81.0996 36.9004 -81 81.7002v285.3c0.0996094 44.7998 36.9004 81.0996 81.7002 81h285.3c44.7998 -0.0996094 81.0996 -36.7998 81 -81.7002zM386.4 211.7
+c0 73 -73.2002 132.399 -163.101 132.399c-89.8994 0 -163.1 -59.3994 -163.1 -132.399c0 -65.4004 58 -120.2 136.399 -130.601c19.1006 -4.09961 16.9004 -11.0996 12.6006 -36.7998c-0.700195 -4.09961 -3.2998 -16.0996 14.0996 -8.7998
+c17.4004 7.2998 93.9004 55.2998 128.2 94.7002c23.5996 26 34.9004 52.2998 34.9004 81.5z" />
+ <glyph glyph-name="lyft" unicode="&#xf3c3;" horiz-adv-x="512"
+d="M0 366.9h77.7998v-208.7c0 -33.1006 15 -52.7998 27.2002 -61c-12.7002 -11.1006 -51.2002 -20.9004 -80.2002 2.7998c-17 14 -24.7998 37.2998 -24.7998 59v207.9zM485.9 193.4c0 -14.2002 11.5996 -25.9004 26.0996 -25.9004v-76.5
+c-56.7002 0 -102.7 46.0996 -102.7 102.7v77.0996c0 34.6006 -52.2002 34.6006 -52.2002 0v-23.2998h38.8008v-76.7998h-38.8008v-6.7002c0 -21.7998 -7.69922 -45 -24.7998 -59c-16.2998 -13.7002 -35.7002 -16.2998 -51.7002 -14v179.2
+c0 56.7002 46.1006 102.7 102.7 102.7c49.1006 0 90.2002 -34.4004 100.3 -80.7002h26.1006v-76.7998h-23.7998v-22zM191.6 292.4v0.5h77.1006v-178.2c0 -52.4004 -29.7002 -91.7002 -76.7998 -100.8c-26.1006 -5.10059 -52.5 -2.80078 -77.6006 4.69922v70.3008
+c9.7998 -4.2002 29.5 -9.40039 45 -7.80078c20.4004 2 32.7998 11.9004 34.9004 25.3008c0 0 -21.2002 -20.4004 -58.2002 -10.6006c-37 9.90039 -45 40.1006 -45 63.9004v132.7h76.7998v-113c0 -15.4004 23.7998 -15.4004 23.7998 0v113z" />
+ <glyph glyph-name="magento" unicode="&#xf3c4;"
+d="M445.7 320.1v-256.1l-63.4004 -36.5v255.8l-158.5 91.6006l-158.6 -91.6006l0.399414 -255.899l-63.2998 36.5996v255.9l221.9 128.1zM255.6 27.5v255.9l63.4004 -36.6006v-256l-95.0996 -54.8994l-94.9004 54.8994l-0.0996094 255.9l63.2998 36.5996v-256
+l31.7998 -18.2002z" />
+ <glyph glyph-name="medapps" unicode="&#xf3c6;" horiz-adv-x="320"
+d="M118.3 209.6c3.5 12.5 6.90039 33.6006 13.2002 33.6006c8.2998 -1.7998 9.59961 -23.4004 18.5996 -36.6006c4.60059 23.5 5.30078 85.1006 14.1006 86.7002c9 0.700195 19.7002 -66.5 22 -77.5c9.89941 -4.09961 48.8994 -6.59961 48.8994 -6.59961
+c1.90039 -7.2998 -24 -7.60059 -40 -7.7998c-4.59961 -14.8008 -5.39941 -27.7002 -11.3994 -28c-4.7002 -0.200195 -8.2002 28.7998 -17.5 49.5996l-9.40039 -65.5c-4.39941 -13 -15.5 22.5 -21.8994 39.2998c-3.30078 0.100586 -62.4004 1.60059 -47.6006 7.7998zM228 0
+h-136c-21.2002 0 -21.2002 32 0 32h136c21.2002 0 21.2002 -32 0 -32zM204 -64h-88c-21.2002 0 -21.2002 32 0 32h88c21.2002 0 21.2002 -32 0 -32zM238.2 77.5c-3.60059 -21.2998 -36 -15.5 -32.6006 5.09961c3.60059 21.2002 5.60059 40.6006 15.3008 58.6006
+c32.5996 60.2998 66.0996 95.5 66.0996 151.6c0 67.9004 -57 123.2 -127 123.2s-127 -55.2998 -127 -123.2c0 -56.0996 33.5 -91.2998 66.0996 -151.7c9.7002 -17.8994 11.7002 -36.8994 15.3008 -58.5996c3.5 -20.7998 -29.1006 -26.0996 -32.6006 -5.09961
+c-3.2002 19.0996 -5.2002 36.3994 -11.8994 48.8994c-8 14.7002 -16.1006 28.1006 -24 41c-24.6006 40.4004 -45.9004 75.2998 -45.9004 125.5c0 85.6006 71.7998 155.2 160 155.2s160 -69.5996 160 -155.2c0 -50.2998 -21.2998 -85.0996 -45.9004 -125.5
+c-7.89941 -12.8994 -16.0996 -26.2998 -24 -41c-6.69922 -12.3994 -8.69922 -29.8994 -11.8994 -48.7998z" />
+ <glyph glyph-name="medium-m" unicode="&#xf3c7;" horiz-adv-x="512"
+d="M71.5 305.7c0.599609 5.89941 -1.7002 11.7998 -6.09961 15.7998l-45.1006 54.4004v8.09961h140.2l108.4 -237.7l95.2998 237.7h133.7v-8.09961l-38.6006 -37c-3.2998 -2.5 -5 -6.7002 -4.2998 -10.8008v-272c-0.700195 -4.09961 1 -8.2998 4.2998 -10.7998l37.7002 -37
+v-8.09961h-189.7v8.09961l39.1006 37.9004c3.7998 3.7998 3.7998 5 3.7998 10.7998v219.8l-108.7 -275.899h-14.7002l-126.399 275.899v-184.899c-1.10059 -7.80078 1.5 -15.6006 7 -21.2002l50.7998 -61.6006v-8.09961h-144v8l50.7998 61.7002
+c5.40039 5.59961 7.90039 13.5 6.5 21.2002v213.8z" />
+ <glyph glyph-name="medrt" unicode="&#xf3c8;" horiz-adv-x="544"
+d="M113.7 192c0 -121.8 83.8994 -222.8 193.5 -241.1c-18.7002 -4.5 -38.2002 -6.90039 -58.2002 -6.90039c-137.6 0 -249 111 -249 248s111.4 248 248.9 248c20.0996 0 39.5996 -2.40039 58.1992 -6.90039c-109.6 -18.2998 -193.399 -119.3 -193.399 -241.1zM411.1 91.7002
+c77.7002 55.3994 104.4 155.1 67 233.899c11.2002 -9.89941 21.5 -21.2998 30.5 -34.1992c61.6006 -88.3008 40.8008 -210.301 -46.5 -272.601c-87.2998 -62.2998 -208.1 -41.2002 -269.699 47c-9 12.7998 -16.2002 26.4004 -21.7002 40.5
+c60.7998 -62.0996 162.7 -70 240.399 -14.5996zM192.3 335.7c72.5 54.5996 171.601 45.7002 221.601 -19.7998c45.2998 -59.7002 34.3994 -145.601 -22.3008 -201.801c18.5 51.4004 11.3008 111 -24.3994 158c-43 56.5 -114.601 78.3008 -178.9 60.5
+c1.2998 1 2.60059 2.10059 4 3.10059zM296 224h40c4.40039 0 8 -3.59961 8 -8v-48c0 -4.40039 -3.59961 -8 -8 -8h-40c-4.40039 0 -8 -3.59961 -8 -8v-40c0 -4.40039 -3.59961 -8 -8 -8h-48c-4.40039 0 -8 3.59961 -8 8v40c0 4.40039 -3.59961 8 -8 8h-40
+c-4.40039 0 -8 3.59961 -8 8v48c0 4.40039 3.59961 8 8 8h40c4.40039 0 8 3.59961 8 8v40c0 4.40039 3.59961 8 8 8h48c4.40039 0 8 -3.59961 8 -8v-40c0 -4.40039 3.59961 -8 8 -8z" />
+ <glyph glyph-name="microsoft" unicode="&#xf3ca;"
+d="M0 416h214.6v-214.6h-214.6v214.6zM233.4 416h214.6v-214.6h-214.6v214.6zM0 182.6h214.6v-214.6h-214.6v214.6zM233.4 182.6h214.6v-214.6h-214.6v214.6z" />
+ <glyph glyph-name="mix" unicode="&#xf3cb;"
+d="M0 384h448v-204.1c0 -56.6006 -88 -59.9004 -88 0v23.7998c0 56.7998 -82.7002 59 -88 4.2998v-116.1c0 -58 -96 -57.9004 -96 0v175.3c0 56.8994 -80.0996 59.3994 -88 6.5v-238.601c0 -58.0996 -88 -56.1992 -88 0v348.9z" />
+ <glyph glyph-name="mizuni" unicode="&#xf3cc;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111.1 248 -248c0 -137 -111 -248 -248 -248s-248 111 -248 248c0 136.9 111 248 248 248zM168 88.0996v223.9c0 22.0996 -17.9004 40 -40 40s-40 -17.9004 -40 -40v-272.1c21.2002 20.8994 48.5996 37.5996 80 48.1992zM288 98v214
+c0 22.0996 -17.9004 40 -40 40s-40 -17.9004 -40 -40v-214c13 2 26.4004 3.09961 40.2002 3.09961c13.5996 0 26.8994 -1.09961 39.7998 -3.09961zM408 40.2998v271.7c0 22.0996 -17.9004 40 -40 40s-40 -17.9004 -40 -40v-223.7c31.4004 -10.5996 58.7998 -27.2002 80 -48z
+" />
+ <glyph glyph-name="monero" unicode="&#xf3d0;" horiz-adv-x="496"
+d="M352 64h108.4c-43.4004 -71.9004 -122.301 -120 -212.4 -120s-169 48.0996 -212.4 120h108.4v127.8l104 -104.8l104 105v-128zM88 112h-74.7998c-8.60059 25.0996 -13.2002 52 -13.2002 80c0 137 111 248 248 248s248 -111 248 -248c0 -28 -4.7002 -54.9004 -13.2002 -80
+h-74.7998v208l-160.6 -159.4l-159.4 159.4v-208z" />
+ <glyph glyph-name="napster" unicode="&#xf3d2;" horiz-adv-x="495"
+d="M298.3 74.4004c-14.2002 -13.6006 -31.2998 -24.1006 -50.3994 -30.5c-19 6.39941 -36.2002 16.8994 -50.3008 30.5h100.7zM342.3 274c-56.3994 39.7998 -132.1 39.9004 -188.899 -0.0996094c-19.9004 16.7998 -43.6006 29.5 -69.5 36.3994v-161.6
+c0 -217.3 328 -219.101 328 0.299805v161.2c-26 -7 -49.6006 -19.2998 -69.6006 -36.2002zM133.5 332.5c6.5 -3.2002 14.0996 -7.40039 20.4004 -11.4004c58.6992 30.5 129.199 30.6006 187.899 0.100586c6.7002 4.2002 13.5 8 20.6006 11.5
+c-64.6006 59.8994 -164.5 59.7998 -228.9 -0.200195zM43.7998 354.8c17.5 -0.5 34.2998 -3.09961 50.6006 -7.5c82 91.6006 225.5 91.6006 307.5 0.100586c16.0996 4.39941 32.7998 6.89941 50.0996 7.39941v-69.2002c58.7002 -36.5 58.5 -121.899 -0.200195 -158.199
+l-0.299805 -1.7002c-25.9004 -238.8 -381.2 -243.601 -407.6 1.5c-58.5 37.2002 -58.5 121.8 -0.100586 158.3v69.2998zM259.2 96c13.0996 59.2998 33.5 56 113 55.4004c-0.799805 -8.2002 0.0996094 -32.3008 -26.2002 -47.4004c-4.40039 -2.5 -15.2998 -6 -25.5 -6.5
+c-25.2998 -1.2002 -61.2998 -1.5 -61.2998 -1.5zM123.7 151.3c79.2998 0.700195 99.7998 4 113 -55.3994c0 0 -36 0.399414 -61.2998 1.5c-10.3008 0.5 -21.1006 4 -25.5 6.5c-26.3008 15.0996 -25.4004 39.1992 -26.2002 47.3994zM292.8 27.9004
+c3 -4.90039 3.2002 -8.80078 3.2998 -8.90039c-29.0996 -17.5996 -67.0996 -17.5996 -96.1992 0c0 0 0.899414 5.5 3.69922 9.59961c3.5 5.10059 6.40039 6.60059 6.40039 6.60059c23.7002 -6.90039 51.0996 -7.2998 75.9004 0c0 0 3.69922 -2 6.89941 -7.2998z" />
+ <glyph glyph-name="node-js" unicode="&#xf3d3;"
+d="M224 -60c-6.7002 0 -13.5 1.7998 -19.4004 5.2002l-61.6992 36.5c-9.2002 5.2002 -4.7002 7 -1.7002 8c12.2998 4.2998 14.7998 5.2002 27.8994 12.7002c1.40039 0.799805 3.2002 0.5 4.60059 -0.400391l47.3994 -28.0996c1.7002 -1 4.10059 -1 5.7002 0l184.7 106.6
+c1.7002 1 2.7998 3 2.7998 5v213.2c0 2.09961 -1.09961 4 -2.89941 5.09961l-184.601 106.5c-1.7002 1 -4 1 -5.7002 0l-184.5 -106.6c-1.7998 -1 -2.89941 -3 -2.89941 -5.10059v-213.1c0 -2 1.09961 -4 2.89941 -4.90039l50.6006 -29.1992
+c27.5 -13.7002 44.2998 2.39941 44.2998 18.6992v210.4c0 3 2.40039 5.2998 5.40039 5.2998h23.3994c2.90039 0 5.40039 -2.2998 5.40039 -5.2998v-210.5c0 -36.5996 -20 -57.5996 -54.7002 -57.5996c-10.7002 0 -19.0996 0 -42.5 11.5996l-48.4004 27.9004
+c-12 6.89941 -19.3994 19.7998 -19.3994 33.6992v213.101c0 13.7998 7.39941 26.7998 19.3994 33.7002l184.5 106.6c11.7002 6.59961 27.2002 6.59961 38.8008 0l184.699 -106.7c12 -6.89941 19.4004 -19.7998 19.4004 -33.7002v-213.1
+c0 -13.7998 -7.40039 -26.7002 -19.4004 -33.7002l-184.699 -106.6c-5.90039 -3.40039 -12.6006 -5.2002 -19.4004 -5.2002zM373.1 150.1c0 -40.1992 -33.5996 -63.2998 -92 -63.3994c-80.8994 0 -97.7998 37.0996 -97.7998 68.2002c0 2.89941 2.2998 5.2998 5.2998 5.2998
+h23.9004c2.7002 0 4.90039 -1.90039 5.2998 -4.5c3.60059 -24.2998 14.2998 -36.6006 63.2002 -36.6006c38.9004 0 55.5 8.80078 55.5 29.4004c0 11.9004 -4.7002 20.7998 -65.2002 26.7002c-50.5 5 -81.7998 16.2002 -81.7998 56.5996c0 37.2998 31.4004 59.5 84.0996 59.5
+c59.2002 0 88.5 -20.5 92.2002 -64.5996c0.100586 -1.5 -0.399414 -3 -1.39941 -4.10059c-1 -1.09961 -2.40039 -1.69922 -3.90039 -1.69922h-24c-2.5 0 -4.7002 1.7998 -5.2002 4.19922c-5.7998 25.6006 -19.7998 33.8008 -57.7002 33.8008
+c-42.5 0 -47.3994 -14.8008 -47.3994 -25.9004c0 -13.4004 5.7998 -17.2998 63.2002 -24.9004c56.6992 -7.5 83.6992 -18.0996 83.6992 -58z" />
+ <glyph glyph-name="npm" unicode="&#xf3d4;" horiz-adv-x="576"
+d="M288 160h-32v64h32v-64zM576 288v-192h-288v-32h-128v32h-160v192h576zM160 256h-128v-128h64v96h32v-96h32v128zM320 256h-128v-160h64v32h64v128zM544 256h-192v-128h64v96h32v-96h32v96h32v-96h32v128z" />
+ <glyph glyph-name="ns8" unicode="&#xf3d5;" horiz-adv-x="639"
+d="M187.1 288.1h44.9004l-48.5 -160.1h-56.9004l-50.5996 106.5l-31.0996 -106.5h-44.9004l49 160.1h49.4004l54.5 -113.699zM639.6 289c4.60059 -28.5996 -36.0996 -44.7002 -65.6992 -50.5996h-0.100586c17.5 -29.3008 22.1006 -69.3008 3.40039 -105.5
+c-26.4004 -51.2002 -86.5 -79.9004 -135.101 -68c-29.3994 7.19922 -51.3994 29 -56.7998 59.5c-0.700195 3.5 -1 7.09961 -1.2002 10.7998c-5.5 -2.7998 -11.8994 -4.2002 -18.5 -4.90039c-15.5996 -1.7002 -21 -2.2998 -160.899 -2.2998l11.5996 39.5h126.8
+c9.10059 0 12.2002 3.2002 13.8008 7.40039c1.69922 4.59961 3.39941 10.1992 4.5 14.5996c1.09961 3.90039 0.0996094 6.59961 -7.7002 6.59961h-87.2998c-33.4004 0 -38.2002 9.2002 -32.8008 28.6006c3.2002 11.5 10.8008 37.2002 17.6006 47.0996
+c7.09961 10.2002 18.2998 13.7002 30.5996 15c15.6006 1.7002 20.4004 1.2002 160.101 1.2002l-9.7002 -31.5h-133.5c-5.5 0 -11.2002 -0.700195 -13.2998 -7.09961c-1.80078 -5.40039 -2.10059 -6.7002 -3.7002 -12.2002c-1.40039 -5.10059 2.2002 -7.40039 11.5 -7.40039
+h87.5996c20.4004 0 31 -6.7998 34 -16.5996c19.9004 21.3994 50.4004 39.5 94.2002 48.2002v0.0996094c-13.4004 42.5 43.9004 66.5996 88.5 58.7998c18.2002 -3.2002 39.2002 -13.2998 42.0996 -31.2998zM530.7 184.3c3.09961 15.7998 -0.5 33.7002 -7.2002 47.7998
+c-23.2998 -2.89941 -52.2998 -10.0996 -68.5 -26.8994c-24.4004 -25.2998 -16.7998 -60 14.0996 -64.7998c25 -3.90039 55.7002 14.3994 61.6006 43.8994zM552.5 267.4c10.5996 1.5 23.5 3.5 34.2002 9.59961c14.7998 8.5 10.3994 21 -4.90039 24.4004
+c-10.8994 2.39941 -25.0996 -0.5 -31.7998 -7.7002c-7.2998 -7.7998 -1.7002 -20.2998 2.5 -26.2998z" />
+ <glyph glyph-name="nutritionix" unicode="&#xf3d6;" horiz-adv-x="400"
+d="M88 439.9c0 0 133.4 8.19922 121 -104.4c0 0 19.0996 74.9004 103 40.5996c0 0 -17.7002 -74 -88 -56c0 0 14.5996 54.6006 66.0996 56.6006c0 0 -39.8994 10.2998 -82.0996 -48.7998c0 0 -19.7998 94.5 -93.5996 99.6992c0 0 75.1992 -19.3994 77.5996 -107.5
+c0 -0.0996094 -106.4 -7 -104 119.801zM400 124.3c0 -48.5 -9.7002 -95.2998 -32 -132.3c-42.2002 -30.9004 -105 -48 -168 -48c-62.9004 0 -125.8 17.0996 -168 48c-22.2998 37 -32 83.7998 -32 132.3c0 48.4004 17.7002 94.7002 40 131.7
+c42.2002 30.9004 97.0996 48.5996 160 48.5996c63 0 117.8 -17.5996 160 -48.5996c22.2998 -37 40 -83.2998 40 -131.7zM120 20c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28s12.5 -28 28 -28s28 12.5 28 28zM120 86.2002c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28
+s12.5 -28 28 -28s28 12.5 28 28zM120 152.4c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28s12.5 -28 28 -28s28 12.5 28 28zM192 20c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28s12.5 -28 28 -28s28 12.5 28 28zM192 86.2002c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28
+s12.5 -28 28 -28s28 12.5 28 28zM192 152.4c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28s12.5 -28 28 -28s28 12.5 28 28zM264 20c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28s12.5 -28 28 -28s28 12.5 28 28zM264 86.2002c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28
+s12.5 -28 28 -28s28 12.5 28 28zM264 152.4c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28s12.5 -28 28 -28s28 12.5 28 28zM336 20c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28s12.5 -28 28 -28s28 12.5 28 28zM336 86.2002c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28
+s12.5 -28 28 -28s28 12.5 28 28zM336 152.4c0 15.5 -12.5 28 -28 28s-28 -12.5 -28 -28s12.5 -28 28 -28s28 12.5 28 28zM360 192c-4.7998 22.2998 -7.40039 36.9004 -16 56c-38.7998 19.9004 -90.5 32 -144 32s-105.2 -12.0996 -144 -32
+c-8.7998 -19.5 -11.2002 -33.9004 -16 -56c42.2002 7.90039 98.7002 14.7998 160 14.7998s117.8 -6.89941 160 -14.7998z" />
+ <glyph glyph-name="page4" unicode="&#xf3d7;" horiz-adv-x="496"
+d="M248 -56c-137 0 -248 111 -248 248s111 248 248 248c20.9004 0 41.2998 -2.59961 60.7002 -7.5l-266.4 -376.5h205.7v-112zM248 87.5996h-149.4l149.4 213.601v-213.601zM344 56h111.4c-26.9004 -41 -65.7002 -73.5 -111.4 -92.7002v92.7002zM401.4 194.2v-16.7002
+l-21.2002 8.2998zM381.1 139.7c5.90039 0 8.2002 -4.7002 8.2002 -10.6006v-10h-16.2002v7.7002c0 6.60059 1.30078 12.9004 8 12.9004zM496 192c0 -37.2998 -8.2002 -72.7002 -23 -104.4h-129v333.101c89.2998 -37.5 152 -125.8 152 -228.7zM360.4 304.4h68.1992v47.5996
+h-13.8994v-32.5996h-13.9004v29.5996h-13.8994v-29.5996h-12.7002v32.5996h-13.9004v-47.5996h0.100586zM428.5 119.1h-26.5v11c0 15.4004 -5.59961 25.2002 -20.9004 25.2002c-15.3994 0 -20.6992 -10.5996 -20.6992 -25.8994v-25.3008h68.1992v15h-0.0996094zM428.5 222.1
+l-68.2002 -29.6992v-12.4004l68.2002 -29.5v16.5996l-14.4004 5.7002v26.5l14.4004 5.90039v16.8994zM423.7 290.6h-35.6006v-26.5996h13.9004v12.2002h11c8.59961 -15.7998 1.2998 -35.2998 -18.5996 -35.2998c-22.5 0 -28.3008 25.2998 -15.5 37.6992l-11.6006 10.6006
+c-16.2002 -17.5 -12.2002 -63.9004 27.1006 -63.9004c34 0 44.6992 35.9004 29.2998 65.2998z" />
+ <glyph glyph-name="palfed" unicode="&#xf3d8;" horiz-adv-x="575"
+d="M384.9 254.1c0.0996094 -53.3994 -46.5 -96.1992 -83.3008 -96.1992c-12.5 0 -14.3994 3.39941 -15.0996 6.19922c0.5 39.1006 1.7002 80.4004 3 119.801c40.2002 14.3994 95.4004 17.5996 95.4004 -29.8008zM190.4 181.9
+c-0.200195 0.599609 -0.400391 2.09961 -0.600586 4.59961c0 25.5996 37 60.9004 58.5 75.9004c-1.2002 -36.4004 -5.5 -198.101 -1.39941 -242.5c3 -32.3008 26.7998 -32.9004 36.3994 -22.3008c5.90039 6.60059 5.5 15.7002 5.2998 19.1006v0.200195
+c-1.7998 25.5996 -2.7998 60.5996 -2.69922 100c60.7998 -14.4004 140.1 60.2998 140.1 138.199c0 71 -63 94.2002 -135.2 72c-2.89941 14.6006 -18.2998 20.1006 -29.5 11.1006c-7.5 -6.2002 -9.5 -15.7998 -10.5 -28.2002c-57.7998 -30.9004 -100.7 -84.5 -100.7 -126.5
+c0 -24.9004 15.6006 -43 37.1006 -43c35.0996 0 41 44.0996 14.3994 44.0996c-4.69922 0 -11 -2.69922 -11.1992 -2.69922zM8 266.9c0 38.5996 38.4004 37.3994 38.4004 37.3994h29c15.5 70.1006 120.5 74.2998 120.5 74.2998h28.0996v19.1006
+c0 18.3994 21.0996 18.3994 21.0996 18.3994h85.8008c18.3994 0 21.0996 -18.3994 21.0996 -18.3994v-19.1006h28c89.2002 0 112.1 -48.6992 119.4 -74.2998h30.0996c38.5 0 38.4004 -37.3994 38.4004 -37.3994c0 -38.6006 -38.4004 -37.4004 -38.4004 -37.4004h-30
+l-22.4004 -217.2c0 -43.8994 -44.6992 -44.2998 -44.6992 -44.2998h-288.9c-44.7002 0 -44.7002 44.2998 -44.7002 44.2998l-22.3994 217.2h-30c-38.5 0 -38.4004 37.4004 -38.4004 37.4004z" />
+ <glyph glyph-name="patreon" unicode="&#xf3d9;" horiz-adv-x="512"
+d="M512 253.2c0 -101.3 -82.4004 -183.8 -183.8 -183.8c-101.7 0 -184.4 82.3994 -184.4 183.8c0 101.6 82.7002 184.3 184.4 184.3c101.399 0 183.8 -82.7002 183.8 -184.3zM0 -53.5v491h90v-491h-90z" />
+ <glyph glyph-name="periscope" unicode="&#xf3da;"
+d="M370 384.4c38.4004 -40.7002 59.5 -94.3008 59.5 -150.801c0 -74.2998 -57.4004 -159.5 -82 -192.6c-8 -10.7998 -79.2998 -105 -120.9 -105c-34 0 -88.7998 56.5 -125.399 104.9c-24.9004 32.8994 -82.7002 117.6 -82.7002 192.699c0 118.2 93.4004 214.4 208.1 214.4
+c53.9004 0 104.801 -22.5996 143.4 -63.5996zM226.6 -45.9004c37.3008 0 184.801 167.301 184.7 279.4c0 107.3 -83.8994 196.3 -184.7 196.3c-106.1 0 -190 -88.8994 -190 -196.3c0 -112.1 147.5 -279.4 190 -279.4zM338 241.2c0 -59.1006 -51.0996 -109.7 -110.8 -109.7
+c-100.601 0 -150.7 108.2 -92.9004 181.8v-0.399414c0 -24.5 20.1006 -44.4004 44.7998 -44.4004c24.7002 0 44.8008 19.9004 44.8008 44.4004c0 18.1992 -11.1006 33.7998 -26.9004 40.6992c76.5996 19.2002 141 -39.2998 141 -112.399z" />
+ <glyph glyph-name="phabricator" unicode="&#xf3db;" horiz-adv-x="496"
+d="M323 185.9c0 0 21.5996 -19.6006 20.9004 -20.7002l-8.10059 -19.7998c-0.5 -1.40039 -29.7002 -0.5 -29.7002 -0.5l-9.09961 -9.10059s1.59961 -31.5 0.200195 -32.0996l-20 -7.5c-1.2998 -0.5 -21.7998 23.2998 -21.7998 23.2998l-13.1006 0.200195
+s-19.2998 -24.1006 -20.7002 -23.5l-20.0996 8.2998c-1.40039 0.5 -1.2002 32.2998 -1.2002 32.2998l-9.39941 9.2998s-28.9004 -0.899414 -29.5 0.5l-9.5 20c-0.600586 1.40039 21.0996 21.2002 21.0996 21.2002l-0.0996094 12.9004s-21.6006 19.5996 -21 21
+l8.09961 19.7998c0.5 1.2998 29.7002 0.400391 29.7002 0.400391l9.09961 9.09961s-1.59961 28.4004 -0.200195 28.9004l20 8.2998c1.40039 0.599609 21.9004 -20.7998 21.9004 -20.7998l13.0996 -0.200195s19.3008 21.5996 20.7002 21l20.1006 -9.2002
+c1.39941 -0.599609 1.19922 -29.0996 1.19922 -29.0996l9.40039 -9.30078s28.9004 0.900391 29.5 -0.5l9.5 -20c0.599609 -1.39941 -21.0996 -21.1992 -21.0996 -21.1992zM278.1 194.6c-0.699219 17 -15.5 30.3008 -32.7998 29.5
+c-17.2998 -0.699219 -30.7998 -15.1992 -30.0996 -32.2998c0.700195 -17.0996 15.5 -30.3994 32.7998 -29.5996s30.7998 15.2998 30.0996 32.3994zM479.3 232.5c22.2998 -22.2998 22.2998 -58.7002 0 -81c-67.3994 -67.4004 -44.2998 -44.4004 -95.2998 -95.2998
+c-74.4004 -74.5 -194.7 -74.9004 -269.8 -1.60059l-0.100586 -0.0996094c-51 51 -27.5 27.5996 -97.3994 97c-22.2998 22.2998 -22.2998 58.7002 0 81c67.8994 67.4004 44.7998 44.2998 95.7002 95.2998c74.3994 74.4004 194.699 74.9004 269.8 1.60059l0.0996094 0.0996094
+zM140.4 84.2002c59.5996 -59.5 156 -59.6006 215.6 -0.100586c59.5996 59.6006 59.5 156.101 0 215.601c-59.5996 59.5 -156.1 59.5996 -215.6 0c-59.6006 -59.5 -59.6006 -156 0 -215.5z" />
+ <glyph glyph-name="phoenix-framework" unicode="&#xf3dc;" horiz-adv-x="640"
+d="M212.9 103.7c-36.7002 -1.2002 -108.7 29.2998 -127.7 106.399c-8.7002 35.3008 -2.7002 51.8008 -8 86.1006c-8.2002 53.3994 -32.1006 72.2002 -55.9004 76.5c-6.2002 1.09961 -12.3994 1.2998 -18.7002 0.299805
+c-0.799805 -0.0996094 -1.59961 -0.200195 -2.39941 -0.200195c-0.100586 0.200195 -0.100586 0.299805 -0.200195 0.5c0.700195 0.600586 1.40039 1.2002 2.2002 1.7998c36.8994 26.9004 92 38.4004 136.3 35c123.6 -9.5 141.3 -156.6 252.5 -173.1
+c6.09961 -0.900391 12.2998 -1.09961 18.5 -1.7002c0.700195 -0.0996094 1.40039 -0.0996094 2.5 -0.200195c-2.09961 -2.19922 -21.5996 -11.7998 -36.5 -14.5c-18.4004 -3.39941 -35.7002 -0.0996094 -51.2998 10.3008c-14.5 9.7998 -24.5 23.5 -38.9004 27.3994
+c-13 3.60059 -34.0996 1.7002 -35.8994 -19.5996c-1.30078 -15.9004 14.1992 -51.7998 51.7998 -74.6006c40.3994 -24.5 101.399 -26.8994 134.7 -14.7998c0.299805 0.100586 0.699219 0.200195 1.09961 0.299805c0.200195 0.100586 0.400391 0 1 -0.0996094
+c-23.5996 -28.4004 -71.2002 -49.9004 -108.2 -45.4004c-50.3994 6.2002 -77.7002 75.9004 -113.7 97.5c-19.0996 11.5 -49.0996 7 -52 -18.5c-1.09961 -10 2.10059 -19 6.40039 -27.5996c24.4004 -48.5996 65.5996 -47 68 -49.5996
+c-2.7998 -0.800781 -21.7998 -2.10059 -25.5996 -2.2002zM75.2998 383.1c13.1006 -14.5 34.2002 -7.89941 35.2998 6.80078c-12.3994 -0.700195 -24.5 -2.2002 -36.5996 -4.80078c0.400391 -0.799805 0.400391 -1 1.2998 -2zM272.2 32.5996
+c-42.7998 -1.19922 -92 26.7002 -123.5 61.4004c-4.60059 5 -16.7998 20.2002 -18.6006 23.4004l0.400391 0.399414c6.59961 -4.09961 25.7002 -18.5996 54.7998 -27c24.2002 -7 48.1006 -6.2998 71.6006 3.2998c22.6992 9.30078 41 0.5 43.0996 -2.89941
+c-18.5 -3.7998 -20.0996 -4.40039 -24 -7.90039c-5.09961 -4.39941 -4.59961 -11.7002 7 -17.2002c26.2002 -12.3994 63 2.80078 97.2002 -25.3994c2.39941 -2 8.09961 -7.7998 10.0996 -10.7002c-0.0996094 -0.200195 -0.299805 -0.299805 -0.399414 -0.5
+c-4.80078 1.5 -16.4004 7.5 -40.2002 9.2998c-24.7002 2 -46.2998 -5.2998 -77.5 -6.2002zM447 284.6c16.4004 5.2002 41.2998 13.4004 66.5 3.30078c16.0996 -6.5 26.2002 -18.7002 32.0996 -34.6006c3.5 -9.39941 5.10059 -19.7002 5.10059 -28.7002
+c-0.200195 0 -0.400391 0 -0.600586 -0.0996094c-0.199219 0.400391 -0.399414 0.900391 -0.5 1.2998c-5 22 -29.8994 43.7998 -67.5996 29.9004c-50.2002 -18.6006 -130.4 -9.7002 -176.9 48c-0.699219 0.899414 -2.39941 1.7002 -1.2998 3.2002
+c0.100586 0.199219 2.10059 -0.600586 3 -1.30078c18.1006 -13.3994 38.2998 -21.8994 60.2998 -26.1992c30.5 -6.10059 54.6006 -2.90039 79.9004 5.19922zM549.7 167.1c-32.4004 -0.199219 -33.7998 -50.0996 -103.601 -64.3994
+c-18.1992 -3.7002 -38.6992 -4.60059 -44.8994 -4.2002v0.400391c2.7998 1.5 14.7002 2.59961 29.7002 16.5996c7.89941 7.2998 15.2998 15.0996 22.7998 22.9004c19.5 20.1992 41.3994 42.1992 81.8994 39c23.1006 -1.80078 29.3008 -8.2002 36.1006 -12.7002
+c0.299805 -0.200195 0.399414 -0.5 0.700195 -0.900391c-0.5 0 -0.700195 -0.0996094 -0.900391 0c-7 2.7002 -14.2998 3.2998 -21.7998 3.2998zM537.4 191.2c-0.100586 -0.200195 -0.100586 -0.400391 -0.200195 -0.600586c-28.9004 4.40039 -48 7.90039 -68.5 -4
+c-17 -9.89941 -31.4004 -20.5 -62 -24.3994c-27.1006 -3.40039 -45.1006 -2.40039 -66.1006 8c-0.299805 0.200195 -0.599609 0.399414 -1 0.599609c0 0.200195 0.100586 0.299805 0.100586 0.5c24.8994 -3.7998 36.3994 -5.09961 55.5 5.7998
+c22.2998 12.9004 40.0996 26.6006 71.2998 31c29.5996 4.10059 51.2998 -2.5 70.9004 -16.8994zM268.6 350.7c-0.599609 0.599609 -1.09961 1.2002 -2.09961 2.2998c7.59961 0 29.7002 1.2002 53.4004 -8.40039c19.6992 -8 32.1992 -21 50.1992 -32.8994
+c11.1006 -7.2998 23.4004 -9.2998 36.4004 -8.10059c4.2998 0.400391 8.5 1.2002 12.7998 1.7002c0.400391 0.100586 0.900391 0 1.5 -0.299805c-0.599609 -0.400391 -1.2002 -0.900391 -1.7998 -1.2002c-8.09961 -4 -16.7002 -6.2998 -25.5996 -7.09961
+c-26.1006 -2.60059 -50.3008 3.7002 -73.4004 15.3994c-19.2998 9.90039 -36.4004 22.9004 -51.4004 38.6006zM640 112.3c-3.5 -3.09961 -22.7002 -11.5996 -42.7002 -5.2998c-12.2998 3.90039 -19.5 14.9004 -31.5996 24.0996
+c-10 7.60059 -20.9004 7.90039 -28.1006 8.40039c0.600586 0.799805 0.900391 1.2002 1.2002 1.40039c14.7998 9.19922 30.5 12.1992 47.2998 6.5c12.5 -4.2002 19.2002 -13.5 30.4004 -24.2002c10.7998 -10.4004 21 -9.90039 23.0996 -10.5
+c0.100586 0.0996094 0.200195 0 0.400391 -0.400391zM427.5 -24.7002c2.2002 -1.2002 1.59961 -1.5 1.5 -2c-18.5 1.40039 -33.9004 7.60059 -46.7998 22.2002c-21.7998 24.7002 -41.7002 27.9004 -48.6006 29.7002c0.5 0.200195 0.800781 0.399414 1.10059 0.399414
+c13.0996 -0.0996094 26.0996 -0.699219 38.8994 -3.89941c25.3008 -6.40039 35 -25.4004 41.6006 -35.2998c3.2002 -4.80078 7.2998 -8.30078 12.2998 -11.1006z" />
+ <glyph glyph-name="playstation" unicode="&#xf3df;" horiz-adv-x="576"
+d="M570.9 75.7002c-11.3008 -14.2002 -38.8008 -24.2998 -38.8008 -24.2998l-205.1 -73.6006v54.2998l150.9 53.8008c17.0996 6.09961 19.7998 14.7998 5.7998 19.3994c-13.9004 4.60059 -39.1006 3.2998 -56.2002 -2.89941l-100.5 -35.5v56.3994
+c23.2002 7.7998 47.0996 13.6006 75.7002 16.7998c40.8994 4.5 90.8994 -0.599609 130.2 -15.5c44.1992 -14 49.1992 -34.6992 38 -48.8994zM346.5 168.2v139c0 16.2998 -3 31.2998 -18.2998 35.5996c-11.7002 3.7998 -19 -7.09961 -19 -23.3994v-347.9l-93.7998 29.7998
+v414.7c39.8994 -7.40039 98 -24.9004 129.199 -35.4004c79.5 -27.2998 106.4 -61.2998 106.4 -137.8c0 -74.5 -46 -102.8 -104.5 -74.5996zM43.2002 37.7998c-45.4004 12.7998 -53 39.5 -32.2998 54.7998c19.0996 14.2002 51.6992 24.9004 51.6992 24.9004l134.5 47.7998
+v-54.5l-96.7998 -34.5996c-17.0996 -6.10059 -19.7002 -14.7998 -5.7998 -19.4004c13.9004 -4.59961 39.0996 -3.2998 56.2002 2.90039l46.3994 16.8994v-48.7998c-51.5996 -9.2998 -101.399 -7.2998 -153.899 10z" />
+ <glyph glyph-name="pushed" unicode="&#xf3e1;" horiz-adv-x="432"
+d="M407 336.1c21.7002 -1.89941 33.7998 -28 17.4004 -44.7998l-235.2 -231.3l-35.2998 -80.7998c-11 -17.2002 -41.2002 -14.2998 -47.7002 7l-105.101 348.3c-4.59961 18.2998 6.30078 33.9004 21.4004 36.5996l271.3 44.4004c17.9004 3.40039 39.1006 -13.5 28.7002 -37
+l-14 -33.4004zM297.6 394.4l-189 -31l177.4 -16.3008l16.7998 39.9004c2.2998 4.90039 -0.0996094 8.09961 -5.2002 7.40039zM22.7002 340.1l157.899 -244.3l96.9004 230.7l-248.7 22.7002c-5.09961 0.899414 -9.2002 -4 -6.09961 -9.10059zM136 -8.40039
+c0 0 28.2002 64.1006 35.2002 79.1006l-127.7 197.6l83.0996 -275.5c1.5 -4.2998 6.80078 -5.2002 9.40039 -1.2002zM408.8 306.1c3.10059 3.30078 1.40039 7.5 -2.59961 8.60059l-106.4 9.7002l-89.7002 -213.7z" />
+ <glyph glyph-name="python" unicode="&#xf3e2;"
+d="M439.8 247.5c10.7002 -42.9004 11.2002 -75.0996 0 -108.6c-10.7998 -32.5 -22.2998 -54.2002 -53.3994 -54.2002h-160.2v-13.6006h106.7v-40.6992c0 -30.8008 -26.5 -46.5 -53.4004 -54.3008c-40.5 -11.6992 -73 -9.89941 -106.8 0
+c-28.2002 8.30078 -53.4004 25.3008 -53.4004 54.3008v101.8c0 29.2998 24.2002 54.2998 53.4004 54.2998h106.8c35.5996 0 66.7998 31 66.7998 67.7998v47.4004h40.1006c31.0996 0 45.6992 -23.2998 53.3994 -54.2002zM286.2 44c-11 0 -20 -9 -20.1006 -20.2998
+c0 -11.2002 9.10059 -20.4004 20.1006 -20.4004c11.0996 0 20.0996 9.10059 20.0996 20.4004c0 11.2002 -9 20.2998 -20.0996 20.2998zM167.8 199.9c-36.2998 0 -66.7998 -31.1006 -66.7998 -66.4004v-48.7998h-36.7002c-31.0996 0 -49.2002 22.5996 -56.7998 54.2002
+c-10.2002 42.5 -9.7998 67.8994 0 108.6c8.5 35.5 35.7002 54.2002 66.7998 54.2002h147v13.5996h-106.899v40.7002c0 30.9004 8.19922 47.5996 53.3994 55.5996c32.1006 5.7002 71 6 106.8 0.100586c29 -4.90039 53.4004 -26.6006 53.4004 -55.6006v-101.899
+c0 -29.7998 -23.7002 -54.2998 -53.4004 -54.2998h-106.8zM161.1 342.5c11.1006 0 20.1006 9.09961 20.1006 20.2998s-9.10059 20.4004 -20.1006 20.4004c-11.0996 0 -20 -9.10059 -20.0996 -20.4004c0 -11.2002 9 -20.2998 20.0996 -20.2998z" />
+ <glyph glyph-name="red-river" unicode="&#xf3e3;"
+d="M353.2 416c52.3994 0 94.7998 -42.4004 94.7998 -94.7998v-258.4c0 -52.3994 -42.4004 -94.7998 -94.7998 -94.7998h-258.4c-52.3994 0 -94.7998 42.4004 -94.7998 94.7998v258.4c0 52.3994 42.4004 94.7998 94.7998 94.7998h258.4zM144.9 247.1
+c-0.600586 12.4004 11.6992 24.6006 24 24h56.2998c27 0 48.8994 21.9004 48.8994 48.9004h-154.199c-13.2002 0 -23.9004 -10.7002 -23.9004 -23.9004v-154.199c27 0 48.9004 21.8994 48.9004 48.8994v56.2998zM321.2 175.1c27 0 48.8994 21.9004 48.8994 48.9004h-154.199
+c-13.2002 0 -23.9004 -10.7002 -23.9004 -23.9004v-154.199c27 0 48.9004 21.8994 48.9004 48.8994v56.2998c-0.600586 12.4004 11.6992 24.6006 24 24h56.2998z" />
+ <glyph glyph-name="wpressr" unicode="&#xf3e4;" horiz-adv-x="496"
+d="M248 440c136.97 0 248 -111.03 248 -248s-111.03 -248 -248 -248s-248 111.03 -248 248s111.03 248 248 248zM419.33 281.4c2.41016 5.47949 0.459961 8.2793 -5.62012 8.26953c-104.8 0.00976562 -107.69 -0.0302734 -130.78 0.0302734
+c-4.31934 0.00976562 -7.10938 -1.82031 -8.83984 -5.78027c-5.70996 -13.0996 -11.5195 -26.1504 -17.2998 -39.21c-2.57031 -5.7998 -1 -8.26953 5.26953 -8.26953c25.2607 0 50.5205 -0.0107422 75.7803 0.0195312
+c10.0303 0.00976562 8.54004 -13.6602 -3.89941 -13.6396c-26.4307 0.0498047 -52.8604 0 -79.29 0.0498047c-4.91016 0.00976562 -8.33008 -1.88965 -10.3506 -6.5c-4.2998 -9.83008 -32.1494 -73.0801 -32.1895 -73.1602
+c-3.2002 -7.16016 -16.2607 -6.09961 -11.2803 5.33008c8.26953 18.9902 16.6504 37.9297 24.9795 56.8896c2.25 5.11035 -0.0996094 8.74023 -5.65918 8.75c-15.21 0.0205078 -30.4307 -0.0400391 -45.6406 0.0400391
+c-3.35938 0.0107422 -5.41016 -1.29004 -6.76953 -4.38965c-31.4307 -71.8701 -29.7803 -67.3203 -30.0098 -67.6904c-3.87012 -6.37012 -14.8604 -3.34961 -10.9502 5.60059c5.66992 13.0098 11.3701 26.0098 17.0898 39c13.5703 30.7793 27.1396 61.5596 40.7402 92.3301
+c2.54004 5.75 -0.419922 10.5801 -6.66016 10.5898c-14.2402 0.0302734 -28.4805 -0.0498047 -42.7197 0.0498047c-4.26074 0.0302734 -6.84082 -1.76953 -8.54004 -5.65039c-12.8604 -29.3896 -25.8203 -58.7295 -38.75 -88.0791
+c-8.62012 -19.5605 -17.2305 -39.1201 -25.8906 -58.6602c-1.58008 -3.55078 -1.47949 -6.78027 1.20996 -9.73047c11.2207 -12.3096 22.4707 -24.6094 33.6807 -36.9395c2.08984 -2.30078 4.58984 -3.4502 7.71973 -3.4502c45.9395 0.0195312 91.8701 0.00976562 137.81 0
+c3.86035 0 6.37988 1.78027 7.91992 5.29004c10.3203 23.5 20.7607 46.9395 30.9502 70.5c2.08984 4.83008 5.21973 6.75 10.3398 6.71973c23.0205 -0.110352 46.0303 -0.0400391 69.0508 -0.0498047c6.0791 0 10.5293 2.72949 12.9697 8.24023
+c15.2598 34.4795 30.4502 68.9893 45.6299 103.5z" />
+ <glyph glyph-name="replyd" unicode="&#xf3e6;"
+d="M320 -32h-192c-70.4004 0 -128 57.5996 -128 128v192c0 70.4004 57.5996 128 128 128h192c70.4004 0 128 -57.5996 128 -128v-192c0 -70.4004 -57.5996 -128 -128 -128zM193.4 174.8c-6.10059 2 -11.6006 3.10059 -16.4004 3.10059
+c-7.2002 0 -13.5 -1.90039 -18.9004 -5.60059c-5.39941 -3.7002 -9.59961 -9 -12.7998 -15.7998h-1.09961l-4.2002 18.2998h-28v-138.899h36.0996v89.6992c1.5 5.40039 4.40039 9.80078 8.7002 13.2002c4.2998 3.40039 9.7998 5.10059 16.2002 5.10059
+c4.59961 0 9.7998 -1 15.5996 -3.10059zM308.6 71.4004c-3.19922 -2.40039 -7.69922 -4.80078 -13.6992 -7.10059s-12.8008 -3.5 -20.4004 -3.5c-12.2002 0 -21.0996 3 -26.5 8.90039c-5.5 5.89941 -8.5 14.7002 -9 26.3994h83.2998
+c0.900391 4.80078 1.60059 9.40039 2.10059 13.9004c0.5 4.40039 0.699219 8.59961 0.699219 12.5c0 10.7002 -1.59961 19.7002 -4.69922 26.9004c-3.2002 7.19922 -7.30078 13 -12.5 17.1992c-5.2002 4.30078 -11.1006 7.30078 -17.8008 9.2002
+c-6.69922 1.7998 -13.5 2.7998 -20.5996 2.7998c-21.0996 0 -37.5 -6.09961 -49.2002 -18.2998s-17.5 -30.5 -17.5 -55c0 -22.7998 5.2002 -40.7002 15.6006 -53.7002c10.3994 -13.0996 26.7998 -19.5996 49.1992 -19.5996c10.7002 0 20.9004 1.5 30.4004 4.59961
+c9.5 3.10059 17.0996 6.80078 22.5996 11.2002zM286.8 141.7c3.7998 -5.40039 5.2998 -13.1006 4.60059 -23.1006h-51.7002c0.899414 9.40039 3.7002 17 8.2002 22.6006c4.5 5.59961 11.5 8.5 21 8.5c8.19922 0.0996094 14.0996 -2.60059 17.8994 -8zM366.7 139.2
+c4.09961 -3.90039 9.39941 -5.7998 16.0996 -5.7998c7 0 12.6006 1.89941 16.7002 5.7998c4.09961 3.89941 6.09961 9.09961 6.09961 15.5996s-2 11.6006 -6.09961 15.4004s-9.59961 5.7002 -16.7002 5.7002c-6.7002 0 -12 -1.90039 -16.0996 -5.7002
+c-4.10059 -3.7998 -6.10059 -8.90039 -6.10059 -15.4004s2 -11.7002 6.10059 -15.5996zM366.7 38.7002c4.09961 -3.90039 9.39941 -5.7998 16.0996 -5.7998c7 0 12.6006 1.89941 16.7002 5.7998c4.09961 3.89941 6.09961 9.09961 6.09961 15.5996
+s-2 11.6006 -6.09961 15.4004s-9.59961 5.7002 -16.7002 5.7002c-6.7002 0 -12 -1.90039 -16.0996 -5.7002c-4.10059 -3.7998 -6.10059 -8.90039 -6.10059 -15.4004c0 -6.59961 2 -11.7002 6.10059 -15.5996z" />
+ <glyph glyph-name="resolving" unicode="&#xf3e7;" horiz-adv-x="496"
+d="M281.2 169.8l-197.9 -57.2002l-28.5996 98.6006l188.2 54.0996c52.6992 15.2998 65 8.10059 71.0996 -12.7998l11.2002 -39.2998c5.59961 -19.9004 2 -30.1006 -44 -43.4004zM248.5 440c137 0 248.5 -111.4 247.5 -247.7c0 -136.899 -111.5 -248.3 -248.5 -248.3
+c-46 0 -89.5 12.7002 -126.3 34.7002l-23 80.2002l286.8 -37.3008l48.0996 13.3008l-9.69922 34.1992l-220.4 27.1006l92.5996 26.5996c30.2002 8.7002 42 15.7998 61.4004 33.2002c24.5 23 31.7002 45.5 23.5 73.5996l-10.7002 37.8008
+c-8.7002 30.1992 -25.0996 49.0996 -61.3994 55.1992c-25.1006 3.5 -44.5 2 -79.3008 -8.19922l-221.899 -63.9004c26 108.8 124.2 189.5 241.3 189.5zM38.2998 59.4004c-24 38.3994 -38.2998 83.2998 -38.2998 131.8z" />
+ <glyph glyph-name="rocketchat" unicode="&#xf3e8;" horiz-adv-x="582"
+d="M491.9 342.1c121 -77.3994 120.6 -223.5 0 -300.5c-78 -51.5 -181.4 -63.0996 -267.101 -47.6992c-96.5996 -92.2002 -204 -49.7002 -224.8 -37.4004c0 0 73.9004 62.7998 61.9004 117.8c-45.9004 46.9004 -87.5 146.2 0 235.4c12 55 -61.9004 117.8 -61.9004 117.8
+c21 12.2998 128.7 54.9004 224.8 -37.7998c85.9004 15.5 189.3 3.7998 267.101 -47.6006zM297.5 42.0996c128.1 0 232 67 232 149.9s-103.9 150.1 -232 150.1s-232 -67.1992 -232 -150.1c0 -36.2002 19.7002 -69.2998 52.5996 -95.2002
+c9.2002 -29.5 3.80078 -62.7002 -16.1992 -99.7998c-0.900391 -1.7002 -1.80078 -3.5 -2.80078 -5.2998c18.2002 1.59961 36 6.7002 52.3008 14.8994c13.3994 7.60059 25.8994 16.9004 37 27.7002l20 19.2998c29.0996 -7.69922 59 -11.5996 89.0996 -11.5zM186.2 156.3
+c-19.1006 -0.299805 -34.9004 15 -35.2002 34.1006c-0.700195 45.8994 68.5996 46.8994 69.2998 1.09961v-0.5c0.200195 -19.2998 -15.5 -34.7002 -34.0996 -34.7002zM260.8 190.4c-0.799805 45.8994 68.5 47 69.2998 1.19922v-0.599609
+c0.400391 -45.5996 -68.5 -46.0996 -69.2998 -0.599609zM405.8 156.3c-19.0996 -0.299805 -34.8994 15 -35.2002 34.1006c-0.699219 45.8994 68.6006 46.8994 69.3008 1.09961v-0.5c0.199219 -19 -15 -34.5996 -34.1006 -34.7002z" />
+ <glyph glyph-name="rockrms" unicode="&#xf3e9;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM405.4 20.5l-101.5 118.9s73.5996 0.199219 74.1992 0.199219c29.6006 -1.09961 46.6006 33.3008 27.6006 56.1006l-157.7 185.1c-13.2002 17.2998 -40.0996 18.4004 -54.5 0
+l-147.1 -172.5h90l84.2998 98.9004l84.5996 -99.2998h-75.2998c-30.5 0 -44.5 -35.7002 -26.5996 -56.1006l112 -131.3h90z" />
+ <glyph glyph-name="schlix" unicode="&#xf3ea;"
+d="M350.5 290.3l-54.2002 46.1006l73.4004 39l78.2998 -44.2002zM192 325.9l45.7002 28.1992l34.7002 -34.5996l-55.4004 -29zM126.9 319.3l31.8994 22.1006l17.2002 -28.4004l-36.7002 -22.5zM103.6 231.1l-8.7998 34.8008l29.6006 18.2998l13.0996 -35.2998z
+M82.4004 314.8l23.8994 18.1006l8.90039 -24l-26.7002 -18.3008zM59 241.5l-3.59961 28.4004l22.2998 15.5l6.09961 -28.7002zM28.4004 224.9l20.7998 12.7998l3.2998 -33.4004l-22.9004 -12zM1.40039 180l19.1992 10.2002l0.400391 -38.2002l-21 -8.7998zM60.5 120.7
+l-28.2998 -8.2998l-1.60059 46.7998l25.1006 10.7002zM99 184.8l-31.0996 -13l-5.2002 40.7998l27.3994 14.4004zM123.2 71l-41.6006 -5.90039l-8.09961 63.5l35.2002 10.8008zM151.7 210.9l21.2002 -57.1006l-46.2002 -13.5996l-13.7002 54.0996zM237.4 -19.5996
+l-70.9004 3.2998l-24.2998 95.7998l55.2002 8.59961zM152.5 260.1l42.2002 22.4004l28 -45.9004l-50.7998 -21.2998zM193.5 165.2l61.2998 18.7002l52.7998 -86.6006l-79.7998 -11.2998zM244.9 250.8l67.2998 28.7998l65.5 -65.3994l-88.6006 -26.2002z" />
+ <glyph glyph-name="searchengin" unicode="&#xf3eb;" horiz-adv-x="459"
+d="M220.6 317.7l-67.1992 -209.3v130.3l-54.7002 -24.2002l54.7002 190.3v-115.3zM137.4 414.4l-1.30078 -4.7002l-15.1992 -52.9004c-40.3008 -15.5 -68.9004 -54.5996 -68.9004 -100.3c0 -52.2998 34.2998 -95.9004 83.4004 -105.5v-53.5996
+c-77.9004 10.5 -135.4 78.1992 -135.4 159c0 80.5 59.7998 147.199 137.4 158zM448.8 -32.7998c-11.2002 -11.2002 -23.0996 -12.2998 -28.5996 -10.5c-5.40039 1.7998 -27.1006 19.8994 -60.4004 44.3994c-33.2998 24.6006 -33.5996 35.7002 -43 56.7002
+c-9.39941 20.9004 -30.3994 42.6006 -57.5 52.4004l-9.7002 14.7002c-24.6992 -16.9004 -53 -26.9004 -81.2998 -28.7002l2.10059 6.59961l15.8994 49.5c46.5 11.9004 80.9004 54 80.9004 104.2c0 54.5 -38.4004 102.1 -96 107.1v52.1006
+c83.2002 -5.10059 148.8 -74.5 148.8 -159.3c0 -33.6006 -11.2002 -64.7002 -29 -90.4004l14.5996 -9.59961c9.80078 -27.1006 31.5 -48 52.4004 -57.4004s32.2002 -9.7002 56.7998 -43c24.6006 -33.2002 42.7002 -54.9004 44.5 -60.2998
+c1.7998 -5.40039 0.700195 -17.2998 -10.5 -28.5zM438.9 -14.9004c0 4.40039 -3.60059 8 -8 8c-4.40039 0 -8 -3.59961 -8 -8c0 -4.39941 3.59961 -8 8 -8c4.39941 0 8 3.60059 8 8z" />
+ <glyph glyph-name="servicestack" unicode="&#xf3ec;" horiz-adv-x="496"
+d="M88 232c81.7002 -10.2002 273.7 -102.3 304 -232h-392c99.5 8.09961 184.5 137 88 232zM120 384c102.8 -15.5 335.3 -167.9 376 -384h-96c-26.2998 126.7 -150.7 216.7 -233.6 250.4c1.2998 49.6992 -14.1006 98 -46.4004 133.6z" />
+ <glyph glyph-name="sistrix" unicode="&#xf3ee;"
+d="M448 -1l-30.5 -31l-146 148.1c-28.7002 -23.6992 -65.2002 -37.8994 -105 -37.8994c-91.7998 0 -166.5 75.7998 -166.5 168.899c0 93.1006 74.7002 168.9 166.6 168.801c91.8008 0 166.5 -75.8008 166.5 -168.9c0 -37 -11.8994 -71.2998 -31.8994 -99.2002zM166.5 117.2
+c70.7002 0 128.1 58.2998 128.1 129.899c0 71.6006 -57.5 129.9 -128.1 129.9s-128.1 -58.2998 -128.1 -129.9c0 -71.5996 57.5 -129.899 128.1 -129.899z" />
+ <glyph glyph-name="slack-hash" unicode="&#xf3ef;"
+d="M446.2 177.6c6.2002 -19 -3.90039 -39.6992 -22.9004 -45.6992l-45.3994 -15.1006l15.6992 -47c6.10059 -19.0996 -3.89941 -39.7002 -23 -45.8994c-21.2998 -6.10059 -40.0996 6 -46 22.8994l-15.6992 47l-93.6006 -31.2998l15.7002 -47
+c6.09961 -19.0996 -3.90039 -39.7002 -23 -45.9004c-21.2998 -6.09961 -40.0996 6 -46 22.9004l-15.7002 47c-45.7002 -15.2002 -50.8994 -17.7998 -57.7002 -16.7998c-14.5 0.599609 -28.5996 10.0996 -33.5996 24.5996c-6.09961 19 4 39.7002 23 45.9004l45.4004 15.0996
+l-30.3008 90c-45.6992 -15.2002 -50.8994 -17.7998 -57.6992 -16.7998c-14.5 0.599609 -28.6006 10.0996 -33.6006 24.5996c-6.09961 19.1006 3.90039 39.7002 23 45.9004l45.2998 15l-15.6992 47c-6.10059 19.0996 3.89941 39.7002 23 45.9004
+c19.0996 6.19922 39.7998 -3.90039 46 -22.9004l15.6992 -47l93.4004 31.2002l-15.7002 47c-6.09961 19.0996 3.90039 39.7002 23 45.8994c19.1006 6.2002 39.7998 -3.89941 46 -22.8994l15.7002 -47l45.4004 15.0996c19.0996 6.2002 39.7998 -3.89941 46 -22.8994
+c6.09961 -19.1006 -3.90039 -39.7002 -23 -45.9004l-45.4004 -15.0996l30.2998 -90l45.4004 15.0996c19.0996 6.2002 39.7998 -3.90039 46 -22.9004zM192.1 130.4l93.5 31.2998l-30.2998 90.2002l-93.5 -31.3008z" />
+ <glyph glyph-name="speakap" unicode="&#xf3f3;" horiz-adv-x="430"
+d="M55.0098 56.2197c-79.4102 88.1904 -71.96 224.36 16.6406 304.141c88.5996 79.7793 224.8 72.96 304.21 -15.2402c79.4092 -88.2002 71.96 -224.36 -16.6406 -304.14c-18.7393 -16.8701 64.0303 -43.0908 42.0205 -52.2607c-82.0605 -34.21 -253.91 -35.04 -346.23 67.5
+zM268.32 267.82l38.5 40.8594c-9.61035 8.89062 -32.0205 26.8301 -76.1699 27.6006c-52.3301 0.910156 -95.8604 -28.2998 -96.7705 -79.96c-0.200195 -11.3301 0.290039 -36.7207 29.4199 -54.8301c34.46 -21.4199 86.5205 -21.5098 85.9805 -52.2598
+c-0.370117 -21.2803 -26.4199 -25.8105 -38.5898 -25.6006c-2.98047 0.0498047 -30.2305 0.459961 -47.6104 24.6201l-39.9502 -42.6104c28.1602 -27.0098 58.9902 -32.6201 83.4902 -33.0498c10.2295 -0.179688 96.4199 -0.330078 97.8398 81
+c0.280273 15.8105 -2.07031 39.7197 -28.8604 56.5898c-34.3594 21.6406 -84.96 19.4502 -84.4297 49.75c0.410156 23.25 30.96 25.3701 37.5303 25.2607c0.429688 0 26.6201 -0.260742 39.6201 -17.3701z" />
+ <glyph glyph-name="staylinked" unicode="&#xf3f5;" horiz-adv-x="439"
+d="M382.7 155.5l44.2998 -41.2998c3.7002 -3.5 3.2998 -9 -0.700195 -12.2002l-198 -163.9c-9.89941 -7.59961 -17.2998 -0.799805 -17.2998 -0.799805l-208.7 196.101c-3.5 3.5 -3 9 1.2002 12.1992l45.7998 34.9004c4.2002 3.2002 10.4004 3 13.9004 -0.5l151.899 -147.5
+c3.7002 -3.5 10 -3.7002 14.2002 -0.400391l93.2002 74c4.09961 3.2002 4.5 8.7002 0.900391 12.2002l-84 81.2998c-3.60059 3.5 -9.90039 3.7002 -14 0.5l-0.100586 -0.0996094c-4.09961 -3.2002 -10.3994 -3 -14 0.5l-68.0996 64.2998
+c-3.5 3.5 -3.10059 9 1.09961 12.2002l57.2998 43.5996c4.10059 3.2002 10.3008 3 13.8008 -0.5l170 -167.3zM437.2 238.9c3.7002 -3.5 3.39941 -9 -0.700195 -12.2002l-45.7998 -35.7998c-4.10059 -3.2002 -10.4004 -3 -14.1006 0.5l-160.399 159
+c-3.60059 3.5 -9.7998 3.69922 -13.9004 0.5l-92.2002 -71.5c-4.19922 -3.30078 -4.69922 -8.7002 -1.09961 -12.2002l94.5996 -91.7998c3.7002 -3.5 10 -3.60059 14.2002 -0.400391l0.100586 0.0996094c4.19922 3.2002 10.5996 3 14.1992 -0.5l57.1006 -54.3994
+c3.7002 -3.5 3.2998 -9 -0.900391 -12.2002l-7.7002 -6l0.300781 -0.299805l-50.2002 -38.7998c-4.2002 -3.30078 -10.6006 -3.10059 -14.2998 0.399414l-171.7 165.101l-42.2998 41.6992c-3.60059 3.5 -3 9 1.19922 12.2002l206.801 162.101
+c8.2998 6.59961 14.7998 2.2998 16.2998 1.09961z" />
+ <glyph glyph-name="steam-symbol" unicode="&#xf3f6;"
+d="M395.5 270.5c0 -33.7998 -27.5 -61 -61 -61c-33.7998 0 -61 27.2998 -61 61s27.2998 61 61 61c33.5 0 61 -27.2002 61 -61zM448 270.3c0 -63 -51 -113.8 -113.7 -113.8l-109.3 -79.7998c-4 -43 -40.5 -76.7998 -84.5 -76.7998c-40.5 0 -74.7002 28.7998 -83 67
+l-57.5 23.0996v107.3l97.2002 -39.2998c15.0996 9.2002 32.2002 13.2998 52 11.5l71 101.7c0.5 62.2998 51.5 112.8 114 112.8c62.7998 0 113.8 -51 113.8 -113.7zM203 85c0 34.7002 -27.7998 62.5 -62.5 62.5c-4.5 0 -9 -0.5 -13.5 -1.5l26 -10.5
+c25.5 -10.2002 38 -39 27.7002 -64.5c-10.2002 -25.5 -39.2002 -38 -64.7002 -27.5c-10.2002 4 -20.5 8.2998 -30.7002 12.2002c10.5 -19.7002 31.2002 -33.2002 55.2002 -33.2002c34.7002 0 62.5 27.7998 62.5 62.5zM410.5 270.3c0 42 -34.2998 76.2002 -76.2002 76.2002
+c-42.2998 0 -76.5 -34.2002 -76.5 -76.2002c0 -42.2002 34.2998 -76.2002 76.5 -76.2002c41.9004 -0.0996094 76.2002 33.9004 76.2002 76.2002z" />
+ <glyph glyph-name="sticker-mule" unicode="&#xf3f7;" horiz-adv-x="576"
+d="M561.7 248.4c-1.2998 -0.300781 0.299805 0 0 0zM555.5 325.8c20.2002 -50.0996 20.5996 -45.2002 20.5996 -52.8994c0 -7.5 -4.09961 -11 -7.19922 -16.5c-1.5 -3 -4.60059 -7.5 -7.2002 -8c-0.400391 0 -3 -0.5 -13.4004 -2.5c-7.2002 -1 -13.3994 4.5 -14.8994 9.5
+c-1.60059 4.69922 2.7998 10.0996 -11.8008 22.8994c-10.2998 10 -21.0996 11.2998 -31.8994 17c-9.7998 5.7002 -11.9004 -1 -18 -8c-18 -22.8994 -34 -46.8994 -52 -69.7998c-11.7998 -15 -24.2002 -30.4004 -33.5 -47.4004
+c-3.90039 -6.7998 -9.5 -28.0996 -10.2998 -29.8994c-6.2002 -17.7002 -5.5 -25.7998 -16.5 -68.2998c-3.10059 -10 -5.7002 -21.4004 -8.7002 -32.4004c-2.2002 -6.7998 -7.40039 -49.2998 -0.5 -59.4004c2.09961 -3.5 8.7002 -4.5 11.2998 -8
+c0.0996094 -0.0996094 9.59961 -18.1992 9.2998 -20c0 -6.09961 -9.39941 -5.59961 -11.2998 -6.5c-4.7998 -2.89941 -3.7998 -5.89941 -6.40039 -7.39941c-5.89941 -2.90039 -32.0996 -3.2002 -36.5 0.5c-4.09961 3 -2.19922 11.8994 -1.5 15
+c2.2002 15 -2.5 7.89941 -9.7998 11.5c-3.09961 1.5 -4.09961 5.5 -4.59961 10c-0.5 1.5 -1 2.5 -1.5 3.5c-1.7002 10.7002 6.7998 33.5996 8.2002 43.3994c4.89941 23.7002 -0.700195 37.2002 1.5 46.9004c3.69922 16.2002 4.09961 3.5 4.09961 29.9004
+c-1.40039 25.8994 3.2998 36.8994 0.5 38.8994c-14.7998 0 -64.2998 -10.7002 -112.2 -2c-46.0996 8.90039 -59.3994 29 -65.3994 30.9004c-10.3008 4.5 -23.2002 -0.5 -27.3008 -7c-0.0996094 -0.100586 -35 -70.6006 -39.5996 -87.7998
+c-6.2002 -20.5 -0.5 -47.4004 4.09961 -66.8008c0 -0.0996094 4.5 -14.5996 10.3008 -19.5c2.09961 -1.5 5.09961 -2.5 7.19922 -4.5c2.80078 -2.69922 9.40039 -15.1992 9.80078 -16c2.59961 -4.5 3.59961 -8 -1.5 -10.5c-3.60059 -2 -9.30078 -2.5 -14.4004 -2.5
+c-2.59961 -0.5 -1.5 -3.5 -3.09961 -5c-2.90039 -2.7998 -20.7002 -6.09961 -29.9004 -2.5c-2.59961 1 -5.7002 3 -6.2002 5c-1.5 4 2.10059 9 -1 12.5c-4.5 2.90039 -13.0996 2 -17 12c-2.2002 5.40039 -2.59961 7.60059 -2.59961 49.4004
+c0 9.7002 -5.90039 38.7002 -8.2002 46.9004c-1.5 5.5 -1.5 11.5 0 16c0.299805 0.899414 4.09961 4.59961 4.09961 13c-1 1.5 -4.59961 0.5 -5.09961 1.5c-10.4004 80.5996 -5.90039 79 -7.7002 98.2998c-1.5 16 -10.8994 43.8994 -6.7002 64.2998
+c0.5 2.40039 3.40039 21 24.2002 38.9004c31 26.6992 48.4004 38.2998 159 11.5c1.10059 -0.400391 66.2998 -21.1006 110.7 9c15.5 11.2998 28.7998 11.2998 35.5 16c0.0996094 0.0996094 61.7002 52.0996 87 65.2998c47.2002 29.3994 69.9004 16.7002 75.0996 18
+c4.7002 1 13.4004 25.7998 17 25.7998c5.5 0 1.60059 -20.2002 3.60059 -25.9004c0.5 -2 3.59961 -5 6.2002 -5c2.2998 0 1.69922 0.800781 10.2998 5c8.39941 5.40039 14.8994 17.6006 20.5996 17c11.7002 -1.59961 -19 -41.5996 -19 -46.8994
+c0 -2 0.200195 -0.799805 4.60059 -9.5c2.59961 -5.5 4.59961 -13.5 6.19922 -20c8.30078 -29.7002 5.7002 -14.6006 13.4004 -36.9004z" />
+ <glyph glyph-name="studiovinari" unicode="&#xf3f8;" horiz-adv-x="511"
+d="M480.3 260.3l4.2002 -28v-28l-25.0996 -44.0996l-39.8008 -78.4004l-56.0996 -67.5l-79.0996 -37.7998l-17.7002 -24.5l-7.7002 -12l-9.59961 -4s17.2998 63.5996 19.3994 63.5996c2.10059 0 20.2998 -0.699219 20.2998 -0.699219l66.7002 38.5996l-92.5 -26.0996
+l-55.8994 -36.8008l-22.8008 -28l-6.59961 -1.39941l20.7998 73.5996l6.90039 5.5l20.7002 -12.8994l88.2998 45.1992l56.7998 51.5l14.7998 68.4004l-125.399 -23.2998l15.1992 18.2002l-173.399 53.2998l81.8994 10.5l-166 122.899l114.9 -18.0996l-101.3 108
+l252.899 -126.6l-31.5 38l124.4 -74.4004l-143.3 99l18.7002 -38.4004l-49.6006 18.1006l-45.5 84.2998l194.601 -122l-42.9004 55.7998l108 -96.3994l12 8.89941l-21 16.4004l4.2002 37.7998l37.7998 10.4004l29.2002 -24.7002l11.5 -4.2002l-7 -6.2002l8.5 -12
+l-13.1006 -7.39941l-10.2998 -20.2002z" />
+ <glyph glyph-name="supple" unicode="&#xf3f9;" horiz-adv-x="640"
+d="M640 185.5c0 -64.0996 -109 -116.1 -243.5 -116.1c-24.7998 0 -48.5996 1.7998 -71.0996 5c7.69922 -0.400391 15.5 -0.600586 23.3994 -0.600586c134.5 0 243.5 56.9004 243.5 127.101c0 29.3994 -19.0996 56.3994 -51.2002 78
+c60 -21.1006 98.9004 -55.1006 98.9004 -93.4004zM47.7002 220.1c0.0996094 -29.3994 19.2998 -56.5 51.5996 -78c-60.2002 21 -99.2002 55 -99.2998 93.3008c-0.0996094 64.0996 108.8 116.3 243.3 116.699c24.7002 0 48.5 -1.69922 71 -4.89941
+c-7.7002 0.299805 -15.3994 0.5 -23.2998 0.5c-134.5 -0.299805 -243.4 -57.4004 -243.3 -127.601zM107.9 180.2l8.7998 10.8994s8.7998 -10.0996 20.7002 -10.0996c6.5 0 12.2998 3.5 12.2998 10.0996c0 14.5 -40.2002 13.3008 -40.2002 39.9004
+c0 13.9004 12 24.0996 28.5 24.0996c10 0 25.4004 -4.69922 25.4004 -16.7998v-7.89941h-14.2002v3.89941c0 4 -5.60059 6.60059 -11.2998 6.60059c-7.2002 0 -12.5 -3.7002 -12.5 -9.10059c0 -14.5996 40.1992 -11.7002 40.1992 -39.7002
+c0 -13.5996 -10.5 -25.0996 -28.3994 -25.0996c-18.7998 0 -29.2998 13.2002 -29.2998 13.2002zM228.7 253.8h15.7002v-55c0 -18.8994 -13.3008 -31.8994 -33.4004 -31.8994c-20.2998 0 -33.7002 13 -33.7002 31.8994v55h15.7998v-54.5
+c0 -11.2002 7.10059 -17.7002 17.8008 -17.7002c10.6992 0 17.7998 6.5 17.7998 17.8008v54.3994zM263.1 168.4v72h-7.7998v13.3994h39.1006c16 0 27.1992 -11.2002 27.1992 -27.7998s-11.1992 -28.0996 -27.1992 -28.0996h-15.5v-29.5h-15.8008zM278.9 211.4h12.5996
+c8.90039 0 14 5.7998 14 14.6992c0 8.7002 -5 14.4004 -13.7002 14.4004h-12.8994v-29.0996zM335.9 168.4v72h-7.80078v13.3994h39.1006c16 0 27.2002 -11.2002 27.2002 -27.7998s-11.2002 -28.0996 -27.2002 -28.0996h-15.5v-29.5h-15.7998zM351.6 211.4h12.6006
+c9 0 14 5.7998 14 14.6992c0 8.7002 -5 14.4004 -13.7002 14.4004h-12.9004v-29.0996zM408.7 176.6h0.0996094v61.2002c0 1.60059 -0.899414 2.60059 -2.59961 2.60059h-5.2002v13.3994h15.4004c5.7998 0 8.19922 -2.5 8.19922 -8.2002v-61.1992
+c0 -1.60059 0.900391 -2.60059 2.60059 -2.60059h18.5996c1.60059 0 2.60059 0.900391 2.60059 2.60059v5.19922h14.2998v-13c0 -5.7998 -2.40039 -8.19922 -8.2002 -8.19922h-37.5996c-5.80078 0 -8.2002 2.39941 -8.2002 8.19922zM472.1 176.6h-0.0996094v63.9004h-7.7998
+v13.4004h51.5996c5.7002 0 8.2002 -2.5 8.2002 -8.2002v-13h-14.2002v5.2002c0 1.59961 -0.899414 2.59961 -2.59961 2.59961h-19.2002v-22.4004h27.7002v-13.3994h-27.7002v-20.2998c0 -1.60059 0.900391 -2.60059 2.59961 -2.60059h19.7002
+c1.60059 0 2.60059 0.900391 2.60059 2.60059v5.19922h14.2998v-13c0 -5.7998 -2.5 -8.19922 -8.2002 -8.19922h-38.7002c-5.7998 0 -8.2002 2.39941 -8.2002 8.19922zM531 252.6h-2.7002v1.2002h7v-1.2002h-2.7002v-5.89941h-1.59961v5.89941zM536.7 253.8h2.39941
+l2.10059 -5.09961l2.09961 5.09961h2.2998v-7.09961h-1.5v5.7002l-2.2998 -5.7002h-1.2998l-2.2998 5.7002v-5.7002h-1.5v7.09961z" />
+ <glyph glyph-name="telegram-plane" unicode="&#xf3fe;" horiz-adv-x="447"
+d="M446.7 349.4l-67.6006 -318.801c-5.09961 -22.5 -18.3994 -28.0996 -37.2998 -17.5l-103 75.9004l-49.7002 -47.7998c-5.5 -5.5 -10.0996 -10.1006 -20.6992 -10.1006l7.39941 104.9l190.9 172.5c8.2998 7.40039 -1.7998 11.5 -12.9004 4.09961l-236 -148.6
+l-101.6 31.7998c-22.1006 6.90039 -22.5 22.1006 4.59961 32.7002l397.4 153.1c18.3994 6.90039 34.5 -4.09961 28.5 -32.1992z" />
+ <glyph glyph-name="uber" unicode="&#xf402;" horiz-adv-x="447"
+d="M414.1 416c18.7002 0 33.9004 -15.2002 33.8008 -33.9004v-380.199c0 -18.7002 -15.2002 -33.9004 -33.9004 -33.9004h-380.1c-18.7002 0 -33.9004 15.2002 -33.9004 34v380.1c0 18.7002 15.2002 33.9004 33.9004 33.9004h380.199zM237.6 56.9004
+c74.6006 7.5 129 74.0996 121.5 148.6c-7 69.4004 -65.3994 122.2 -135.1 122.2s-128.1 -52.7998 -135.1 -122.2h94.3994v20.4004c0 3.7998 3.10059 6.7998 6.7998 6.7998h67.9004c3.7998 0 6.7998 -3.10059 6.7998 -6.7998v-67.9004
+c0 -3.7998 -3.09961 -6.7998 -6.7998 -6.7998h-67.9004c-3.7998 0 -6.7998 3.09961 -6.7998 6.7998v20.4004h-94.3994c7.5 -74.6006 74.0996 -129 148.699 -121.5z" />
+ <glyph glyph-name="uikit" unicode="&#xf403;" horiz-adv-x="443"
+d="M443.9 320v-256l-225.9 -128l-218 128v214.3l87.5996 -45.0996v-117l133.5 -75.5l135.801 75.5v151l-101.101 57.5996l87.6006 53.1006zM308.6 398.9l-87.3994 -53l-86 47.2998l88.5996 54.7998z" />
+ <glyph glyph-name="uniregistry" unicode="&#xf404;" horiz-adv-x="384"
+d="M192 -32c-39.5 0 -76.2002 11.7998 -106.7 32.2002h213.5c-30.5996 -20.4004 -67.2998 -32.2002 -106.8 -32.2002zM102.9 161.1c0 -2.5 0.0996094 -5 0.299805 -7.39941h-103.101c-0.0996094 2.39941 -0.0996094 4.89941 -0.0996094 7.39941v12.4004h102.9v-12.4004z
+M123.4 104.1c8.89941 -10.5996 20.0996 -19.0996 33 -24.7998h-138.301c-3.7998 8 -7 16.2998 -9.59961 24.7998h114.9zM105.7 138.8c2 -7.89941 5.2002 -15.3994 9.2002 -22.2998h-109.7c-1.7002 7.2998 -3 14.7002 -3.90039 22.2998h104.4zM102.9 208.1v-17.2998h-102.9
+v17.2998h102.9zM102.9 381.3v-4.89941h-102.9v4.89941h102.9zM102.9 416v-2.5h-102.9v2.5h102.9zM102.9 346.7v-7.40039h-102.9v7.40039h102.9zM102.9 242.7v-14.7998h-102.9v14.7998h102.9zM102.9 312v-9.90039h-102.9v9.90039h102.9zM102.9 277.4v-12.4004h-102.9v12.4004
+h102.9zM269.1 116.5c4 6.90039 7.10059 14.4004 9.2002 22.2998h104.4c-0.799805 -7.59961 -2.10059 -15 -3.90039 -22.2998h-109.7zM281.1 302.2v9.7998h102.9v-9.7998h-102.9zM281.1 265v12.4004h102.9v-12.4004h-102.9zM281.1 339.3v7.40039h102.9v-7.40039h-102.9z
+M281.1 416h102.9v-2.5h-102.9v2.5zM78.0996 5.09961c-11.7998 8.7002 -23.5996 18.7002 -33.1992 29.7002h293.1c-9.5 -11.0996 -20.4004 -21 -32.2002 -29.7002h-227.7zM281.1 376.4v4.89941h102.9v-4.89941h-102.9zM281.1 227.9v14.7998h102.9v-14.7998h-102.9z
+M38.7998 42.2998c-6.59961 8.5 -10.5996 17.6006 -15.7998 27.2002h338.9c-5.2002 -9.59961 -11.1006 -18.7002 -17.8008 -27.2002h-305.3zM227.6 79.4004c12.8008 5.59961 24.1006 14.0996 32.9004 24.7998h115c-2.7002 -8.60059 -4.7998 -16.7998 -8.5 -24.7998h-139.4z
+M281.1 161.1v12.4004h102.9v-12.4004c0 -2.5 -0.0996094 -4.89941 -0.200195 -7.39941h-103.1c0.299805 2.39941 0.399414 4.89941 0.399414 7.39941zM281.1 190.8v17.2998h102.9v-17.2998h-102.9z" />
+ <glyph glyph-name="untappd" unicode="&#xf405;" horiz-adv-x="640"
+d="M401.3 398.1c-79.7998 -160.1 -84.5996 -152.5 -87.8994 -173.199l-5.2002 -32.8008c-1.90039 -12 -6.60059 -23.5 -13.7002 -33.3994l-148.9 -207.8c-7.59961 -10.6006 -20.3994 -16.2002 -33.3994 -14.6006c-40.2998 5 -77.7998 32.2002 -95.2998 68.5
+c-5.7002 11.7998 -4.5 25.7998 3.09961 36.4004l148.9 207.899c7.09961 9.90039 16.3994 18 27.1992 23.7002l29.3008 15.5c18.5 9.7998 9.69922 11.9004 135.6 138.9c1 4.7998 1 7.2998 3.59961 8c3 0.700195 6.60059 1 6.30078 4.59961l-0.400391 4.60059
+c-0.200195 1.89941 1.2998 3.59961 3.2002 3.59961c4.5 0.0996094 13.2002 -1.2002 25.5996 -10c12.2998 -8.90039 16.4004 -16.7998 17.7002 -21.0996c0.599609 -1.80078 -0.599609 -3.7002 -2.40039 -4.2002l-4.5 -1.10059
+c-3.39941 -0.899414 -2.5 -4.39941 -2.2998 -7.39941c0.100586 -2.7998 -2.2998 -3.60059 -6.5 -6.10059zM230.1 411.6c-3.19922 0.800781 -8.19922 1.2002 -6.7998 5.40039c1.2998 4.2998 5.40039 12.2002 17.7002 21.0996c12.4004 8.90039 21.0996 10.1006 25.5996 10
+c4.2002 -0.0996094 3.10059 -4.89941 2.80078 -8.19922c-0.300781 -3.60059 3.2998 -3.80078 6.2998 -4.60059c2.59961 -0.700195 2.59961 -3.2998 3.59961 -8c9.10059 -9.2002 17.6006 -17.8994 25.6006 -26.0996c1.2998 -1.40039 1.19922 -3.5 -0.100586 -4.90039
+c-15.8994 -16.3994 -29.2998 -30.5996 -40.5 -42.5996c-1 -1 -2.59961 -0.799805 -3.2998 0.5c-6.90039 13.5 -14.2998 28.0996 -22.2002 44c-4.2998 2.5 -6.59961 3.2998 -6.39941 6c0.199219 3 1.09961 6.5 -2.30078 7.39941zM620 41.2998
+c7.7002 -10.7002 8.7998 -24.7002 3.40039 -36.5996c-17.7002 -36.6006 -55.4004 -63.7002 -95.7002 -68.6006c-12.9004 -1.5 -25.5 4.10059 -33.1006 14.7002l-148.899 207.9c-7.10059 9.89941 -11.7998 21.3994 -13.7002 33.3994
+c-1.59961 9.80078 -2 19.1006 -0.299805 29.8008c1.89941 12 2.7002 6 49 94.7998c0.700195 1.39941 2.59961 1.59961 3.59961 0.5c16.2998 -18 19.2998 -23 30.5 -28.9004c29.7998 -15.7002 43.2002 -20.5996 56.4004 -39.0996z" />
+ <glyph glyph-name="ussunnah" unicode="&#xf407;" horiz-adv-x="512"
+d="M156.8 162.9l5.7002 -14.4004h-8.2002c-1.2998 3.2002 -3.09961 7.7002 -3.7998 9.5c-2.5 6.2998 -1.09961 8.40039 0 10c1.90039 2.7002 3.2002 4.40039 3.59961 5.2002c0 -2.2002 0.800781 -5.7002 2.7002 -10.2998zM454.1 144.1
+c-2.09961 -13.7998 -5.69922 -27.0996 -10.5 -39.6992l43 -23.4004l-44.7998 18.7998c-5.2998 -13.2002 -12 -25.5996 -19.8994 -37.2002l34.1992 -30.1992l-36.7998 26.3994c-8.39941 -11.7998 -18 -22.5996 -28.7002 -32.2998l24.9004 -34.7002l-28.0996 31.7998
+c-11 -9.59961 -23.1006 -18 -36.1006 -25.0996l15.7002 -37.2002l-19.2998 35.2998c-13.1006 -6.7998 -27 -12.0996 -41.6006 -15.8994l6.7002 -38.4004l-10.5 37.4004c-14.2998 -3.40039 -29.2002 -5.2998 -44.5 -5.40039l-1.7998 -38.2998l-1.90039 38.4004
+c-15.2998 0.0996094 -30.1992 2 -44.5 5.2998l-10.5996 -37.2998l6.7002 38.1992c-14.6006 3.7002 -28.6006 9.10059 -41.7002 15.8008l-19.2002 -35.1006l15.6006 37c-13 7 -25.2002 15.4004 -36.2002 25.1006l-27.9004 -31.6006l24.7002 34.4004
+c-10.7002 9.7002 -20.4004 20.5 -28.7998 32.2998l-36.5 -26.2002l33.8994 29.9004c-7.89941 11.5996 -14.5996 24.0996 -20 37.2998l-44.3994 -18.7002l42.5996 23.2002c-4.7998 12.7002 -8.39941 26.0996 -10.5 39.9004l-51 -9l50.2998 14.1992
+c-1.09961 8.5 -1.69922 17.1006 -1.69922 25.9004c0 4.7002 0.199219 9.40039 0.5 14.0996l-55.4004 2.90039l56 2.7998c1.2998 13.1006 3.7998 25.7998 7.5 38.1006l-57.0996 16.0996l58.8994 -10.4004c4 12 9.10059 23.5 15.2002 34.4004l-55.0996 30l58.2998 -24.5996
+c6.2998 10.5996 13.5 20.3994 21.5996 29.5996l-49.5 43.5996l53.9004 -38.6992c8.09961 8.59961 17 16.5 26.5996 23.5996l-40 55.5996l45.6006 -51.5996c9.5 6.59961 19.6992 12.2998 30.2998 17.2002l-27.2998 64.8994l33.7998 -62.0996
+c10.5 4.40039 21.3994 7.90039 32.7002 10.4004l-12.4004 70.6992l19.5 -69.1992c11 2.09961 22.2998 3.19922 33.7998 3.39941l3.7002 72.2002l3.59961 -72.2002c11.5 -0.200195 22.8008 -1.39941 33.8008 -3.5l19.5996 69.2998l-12.4004 -70.6992
+c11.3008 -2.60059 22.2002 -6.10059 32.6006 -10.5l33.8994 62.1992l-27.3994 -65.0996c10.5996 -4.90039 20.7002 -10.7002 30.2002 -17.2002l45.7998 51.7998l-40.1006 -55.8994c9.5 -7.10059 18.4004 -15 26.5 -23.6006l54.2002 38.9004l-49.7002 -43.9004
+c8 -9.09961 15.2002 -18.8994 21.5 -29.3994l58.7002 24.7002l-55.5 -30.2002c6.10059 -10.9004 11.1006 -22.2998 15.1006 -34.2998l59.2998 10.3994l-57.5 -16.2002c3.7002 -12.1992 6.2002 -24.8994 7.5 -37.8994l56.2998 -2.7002l-56 -2.7998
+c0.299805 -4.60059 0.5 -9.2998 0.5 -14.1006c0 -8.69922 -0.599609 -17.2998 -1.59961 -25.7998l50.6992 -14.2998zM432.3 175.1c0 97.5 -79 176.5 -176.5 176.5s-176.5 -79 -176.5 -176.5s79 -176.5 176.5 -176.5s176.5 79 176.5 176.5zM408.3 175.1
+c0 -84.2998 -68.2998 -152.6 -152.6 -152.6s-152.601 68.2998 -152.601 152.6c0 84.3008 68.3008 152.601 152.601 152.601s152.6 -68.2998 152.6 -152.601zM195 207c0 -2.09961 1.2998 -3.7998 3.59961 -5.09961c3.30078 -1.90039 6.2002 -4.60059 8.2002 -8.2002
+c2.7998 5.7002 4.2998 9.5 4.2998 11.2002c0 2.19922 -1.09961 4.39941 -3.19922 7c-2.10059 2.5 -3.2002 5.19922 -3.30078 7.69922c-6.5 -6.7998 -9.59961 -10.8994 -9.59961 -12.5996zM154.3 226c0 -2.09961 1.2998 -3.7998 3.60059 -5.09961
+c3.5 -1.90039 6.19922 -4.60059 8.19922 -8.2002c2.80078 5.7002 4.30078 9.5 4.30078 11.2002c0 2.19922 -1.10059 4.39941 -3.2002 7c-2.10059 2.5 -3.2002 5.19922 -3.2998 7.69922c-6.5 -6.7998 -9.60059 -10.8994 -9.60059 -12.5996zM135.3 226
+c0 -2.09961 1.2998 -3.7998 3.60059 -5.09961c3.2998 -1.90039 6.19922 -4.60059 8.19922 -8.2002c2.80078 5.7002 4.30078 9.5 4.30078 11.2002c0 2.19922 -1.10059 4.39941 -3.2002 7c-2.10059 2.5 -3.2002 5.19922 -3.2998 7.69922
+c-6.40039 -6.7998 -9.60059 -10.8994 -9.60059 -12.5996zM340.2 138.1c-8.40039 3 -8.7002 6.80078 -8.7002 15.6006v112.3c-8.2002 -12.5 -14.2002 -18.5996 -18 -18.5996c6.2998 -14.4004 9.5 -23.9004 9.5 -28.3008v-64.2998c0 -2.2002 -2.2002 -6.5 -4.7002 -6.5h-18
+c-2.7998 7.5 -10.2002 26.9004 -15.2998 40.2998c-2 -2.5 -7.2002 -9.19922 -10.7002 -13.6992c2.40039 -1.60059 4.10059 -3.60059 5.2002 -6.30078c2.59961 -6.69922 6.40039 -16.5 7.90039 -20.1992h-9.2002c-3.90039 10.3994 -9.60059 25.3994 -11.7998 31.0996
+c-2 -2.5 -7.2002 -9.2002 -10.7002 -13.7002c2.39941 -1.59961 4.09961 -3.59961 5.2002 -6.2998c0.799805 -2 2.7998 -7.2998 4.2998 -10.9004h-9.2002c-1.5 4.10059 -5.59961 14.6006 -8.40039 22c-2 -2.5 -7.19922 -9.19922 -10.6992 -13.6992
+c2.5 -1.60059 4.2998 -3.60059 5.19922 -6.30078c0.200195 -0.599609 0.5 -1.39941 0.600586 -1.69922h-17.7002c-4.59961 13.8994 -11.4004 27.6992 -11.4004 34.0996c0 2.2002 0.300781 5.09961 1.10059 8.2002c-8.7998 -10.7998 -14 -15.9004 -14 -25
+c0 -7.5 10.3994 -28.2998 10.3994 -33.2998c0 -1.7002 -0.5 -3.30078 -1.39941 -4.90039c-9.60059 12.7002 -15.5 20.7002 -18.7998 20.7002h-12l-11.2002 28c-3.7998 9.59961 -5.7002 16 -5.7002 18.7998c0 3.7998 0.5 7.7002 1.7002 12.2002
+c-1 -1.2998 -3.7002 -4.7002 -5.5 -7.10059c-0.799805 2.10059 -3.10059 7.7002 -4.60059 11.5c-2.09961 -2.5 -7.5 -9.09961 -11.1992 -13.5996c0.899414 -2.2998 3.2998 -8.09961 4.89941 -12.2002c-2.5 -3.2998 -9.09961 -11.7998 -13.5996 -17.7002
+c-4 -5.2998 -5.7998 -13.2998 -2.7002 -21.7998c2.5 -6.7002 2 -7.89941 -1.7002 -14.0996h61.7002c5.5 0 14.2998 -14 15.5 -22c13.2002 16 15.4004 19.5996 16.7998 21.5996h107c3.90039 0 7.2002 1.90039 9.90039 5.7998zM360.3 164.7v101.6
+c-9 -12.5 -15.8994 -18.5996 -20.7002 -18.5996c7.10059 -14.4004 10.7002 -23.9004 10.7002 -28.2998v-66.3008c0 -17.5 8.60059 -20.3994 24 -20.3994c8.10059 0 12.5 0.799805 13.7002 2.7002c-4.2998 1.59961 -7.59961 2.5 -9.90039 3.2998
+c-8.09961 3.2002 -17.7998 7.39941 -17.7998 26z" />
+ <glyph glyph-name="vaadin" unicode="&#xf408;"
+d="M224.5 307.3c1.5 17.6006 4.90039 52.7002 49.7998 52.7002h98.6006c20.6992 0 32.0996 7.7998 32.0996 21.5996v12.3008c0 12.1992 9.2998 22.0996 21.5 22.0996s21.5 -9.90039 21.5 -22.0996v-36.5c0 -42.9004 -21.5 -62 -66.7998 -62h-100.5
+c-30.1006 0 -33 -14.7002 -33 -27.1006c0 -1.2998 -0.100586 -2.5 -0.200195 -3.7002c-0.700195 -12.2998 -10.9004 -22.1992 -23.4004 -22.1992s-22.6992 9.7998 -23.3994 22.1992c-0.100586 1.2002 -0.200195 2.40039 -0.200195 3.7002c0 12.2998 -3 27.1006 -33 27.1006
+h-100.7c-45.2998 0 -66.7998 19.0996 -66.7998 62v36.5c0 12.1992 9.40039 22.0996 21.5996 22.0996c12.2002 0 21.5 -9.90039 21.5 -22.0996v-12.3008c0 -13.7998 11.4004 -21.5996 32.1006 -21.5996h98.5996c44.7998 0 48.2998 -35.0996 49.7998 -52.7002h0.900391z
+M224 -8c-11.5 0 -21.4004 7 -25.7002 16.2998c-1.09961 1.7998 -97.0996 169.5 -98.2002 171.4c-11.8994 19.7002 3.2002 44.2998 27.2002 44.2998c13.9004 0 23.4004 -6.40039 29.7998 -20.2998l66.9004 -117.7l66.9004 117.7c6.5 13.8994 15.8994 20.2998 29.7998 20.2998
+c24 0 39.0996 -24.7002 27.2002 -44.2998c-1.10059 -1.7998 -97.1006 -169.601 -98.2002 -171.4c-4.2998 -9.2998 -14.2002 -16.2998 -25.7002 -16.2998z" />
+ <glyph glyph-name="viber" unicode="&#xf409;" horiz-adv-x="511"
+d="M444 398.1c42.2002 -36.6992 65.5996 -117.899 49.7998 -246.5c-15.2002 -124.6 -109.1 -136.6 -125.7 -142c-7.19922 -2.2998 -70.2998 -18.0996 -152.5 -11.1992c-9.09961 -10.5 -21.0996 -24.3008 -29.7998 -33.7002
+c-15.8994 -17.1006 -25.7002 -33 -42.2998 -27.7998c-13.7998 4.19922 -13 25.0996 -13 25.0996l0.0996094 51.5996h-0.0996094c-120.1 33.8008 -118.4 158.4 -117 224.9s14.2998 120.2 50.9004 156.8c65.7998 60.4004 200.899 52.2998 200.899 52.2998
+c114.601 -0.5 166 -37.7998 178.7 -49.5zM457.9 161c13.2998 107.3 -4.90039 180.5 -40.6006 211.1c-10.7998 9.80078 -57.2002 39 -154.1 39.4004c0 0 -114.7 7.5 -170.4 -43c-31 -30.5996 -41.5 -76.0996 -42.5996 -131.6
+c-1.10059 -55.5 -7.10059 -161.601 94.7002 -189.801c-0.100586 0 -0.100586 0 0 0c0 0 -0.400391 -78.7998 -0.400391 -85.6992c-0.0996094 -10.5 5.7002 -11 11 -5.7002c16.2002 16.2998 68.2002 79 68.2002 79c69.7002 -4.5 125.2 9.2998 131.2 11.2002
+c14 4.5 90.0996 11.0996 103 115.1zM318.9 241.8c0.399414 -8.59961 -12.5 -9.2002 -12.9004 -0.599609c-1.09961 22 -11.4004 32.7002 -32.5996 33.8994c-8.60059 0.5 -7.80078 13.4004 0.699219 12.9004c27.9004 -1.5 43.4004 -17.5 44.8008 -46.2002zM339.2 230.5
+c1 42.4004 -25.5 75.5996 -75.7998 79.2998c-8.5 0.600586 -7.60059 13.5 0.899414 12.9004c58 -4.2002 88.9004 -44.1006 87.7998 -92.5c-0.0996094 -8.60059 -13.0996 -8.2002 -12.8994 0.299805zM386.2 217.1c0.0996094 -8.59961 -12.9004 -8.69922 -12.9004 -0.0996094
+c-0.599609 81.5 -54.8994 125.9 -120.8 126.4c-8.5 0.0996094 -8.5 12.8994 0 12.8994c73.7002 -0.5 133 -51.3994 133.7 -139.2zM374.9 119v-0.200195c-10.8008 -19 -31 -40 -51.8008 -33.2998l-0.199219 0.299805c-21.1006 5.90039 -70.8008 31.5 -102.2 56.5
+c-16.2002 12.7998 -31 27.9004 -42.4004 42.4004c-10.2998 12.8994 -20.7002 28.2002 -30.7998 46.5996c-21.2998 38.5 -26 55.7002 -26 55.7002c-6.7002 20.7998 14.2002 41 33.2998 51.7998h0.200195c9.2002 4.7998 18 3.2002 23.9004 -3.89941
+c0 0 12.3994 -14.8008 17.6992 -22.1006c5 -6.7998 11.7002 -17.7002 15.2002 -23.7998c6.10059 -10.9004 2.2998 -22 -3.7002 -26.5996l-12 -9.60059c-6.09961 -4.89941 -5.2998 -14 -5.2998 -14s17.7998 -67.2998 84.2998 -84.2998c0 0 9.10059 -0.799805 14 5.2998
+l9.60059 12c4.59961 6 15.7002 9.7998 26.5996 3.7002c14.7002 -8.2998 33.4004 -21.2002 45.7998 -32.9004c7 -5.69922 8.60059 -14.3994 3.80078 -23.5996z" />
+ <glyph glyph-name="vimeo" unicode="&#xf40a;"
+d="M403.2 416c24.7002 0 44.7998 -20.0996 44.7998 -44.7998v-358.4c0 -24.7002 -20.0996 -44.7998 -44.7998 -44.7998h-358.4c-24.7002 0 -44.7998 20.0996 -44.7998 44.7998v358.4c0 24.7002 20.0996 44.7998 44.7998 44.7998h358.4zM377 267.2
+c1.90039 42.2002 -13.7998 63.7998 -47.0996 64.7002c-44.9004 1.39941 -75.3008 -23.9004 -91.2002 -76c19.8994 8.5 49.2998 10.7998 45.7998 -22.4004c-1 -11.2002 -8.2998 -27.5 -21.7998 -48.9004c-37.7002 -59.3994 -46.9004 -39.5996 -67.6006 91.6006
+c-5.7998 36.8994 -21.2998 54.0996 -46.5 51.7002c-22.2998 -2 -57.8994 -38.4004 -95.1992 -71.2002l15.1992 -19.6006c14.5 10.1006 23 15.2002 25.4004 15.2002c21 0 31.9004 -54.7002 57.4004 -148c13.0996 -34.8994 29 -52.2998 47.8994 -52.2998
+c30.4004 0 67.7002 28.5996 111.7 85.7998c42.5996 54.7002 64.5996 97.9004 66 129.4z" />
+ <glyph glyph-name="vnv" unicode="&#xf40b;" horiz-adv-x="640"
+d="M104.9 96c-34.1006 0 -46.4004 30.4004 -46.4004 30.4004l-55.9004 111.5s-10.3994 18.0996 10.4004 18.0996h32.7998c10.4004 0 13.2002 -8.7002 18.7998 -18.0996l36.7002 -74.5s5.2002 -13.1006 21.1006 -13.1006c15.8994 0 21.0996 13.1006 21.0996 13.1006
+l36.7002 74.5c5.59961 9.5 8.39941 18.0996 18.7998 18.0996h32.7998c20.7998 0 10.4004 -18.0996 10.4004 -18.0996l-55.7998 -111.5s-12.2002 -30.4004 -46.4004 -30.4004h-35.0996zM499.9 96c-34.1006 0 -46.4004 30.4004 -46.4004 30.4004l-55.9004 111.5
+s-10.3994 18.0996 10.4004 18.0996h32.7998c10.4004 0 13.2002 -8.7002 18.7998 -18.0996l36.7002 -74.5s5.2002 -13.1006 21.1006 -13.1006c15.8994 0 21.0996 13.1006 21.0996 13.1006l36.7998 74.5c5.60059 9.5 8.40039 18.0996 18.7998 18.0996h32.9004
+c20.7998 0 10.4004 -18.0996 10.4004 -18.0996l-55.9004 -111.5s-12.2002 -30.4004 -46.4004 -30.4004h-35.1992zM337.6 256c34.1006 0 46.4004 -30.4004 46.4004 -30.4004l55.9004 -111.5s10.3994 -18.0996 -10.4004 -18.0996h-32.7998
+c-10.4004 0 -13.2002 8.7002 -18.7998 18.0996l-36.7002 74.5s-5.2002 13.1006 -21.1006 13.1006c-15.8994 0 -21.0996 -13.1006 -21.0996 -13.1006l-36.7002 -74.5c-5.59961 -9.39941 -8.39941 -18.0996 -18.7998 -18.0996h-32.9004
+c-20.7998 0 -10.3994 18.0996 -10.3994 18.0996l55.8994 111.5s12.2002 30.4004 46.4004 30.4004h35.0996z" />
+ <glyph glyph-name="whatsapp-square" unicode="&#xf40c;"
+d="M224 325.2c35.2002 0 68.2002 -13.7002 93.2002 -38.7002c24.8994 -24.9004 40.0996 -58 40.0996 -93.2002c0 -72.7002 -60.7002 -131.8 -133.3 -131.8h-0.0996094c-23.7002 0 -46.9004 6.40039 -67.1006 18.4004l-4.7998 2.89941l-49.9004 -13.0996l13.3008 48.5996
+l-3.10059 5c-13.2002 20.9004 -20.2002 45.2002 -20.2002 70.1006c0.100586 72.6992 59.2002 131.8 131.9 131.8zM301.5 136.8c3.2998 9.2002 3.2998 17.2002 2.40039 19.1006c-1 1.59961 -3.60059 2.59961 -7.60059 4.59961s-23.5 11.5996 -27.0996 12.9004
+c-3.60059 1.2998 -6.2998 2 -8.90039 -2c-2.59961 -3.90039 -10.2002 -12.9004 -12.5 -15.5c-2.2998 -2.7002 -4.59961 -3 -8.59961 -1c-23.2998 11.6992 -38.6006 20.7998 -53.9004 47.0996c-4.09961 7 4 6.40039 11.6006 21.5996
+c1.39941 2.60059 0.699219 4.90039 -0.300781 6.90039s-8.89941 21.5 -12.1992 29.4004c-3.2002 7.69922 -6.5 6.69922 -8.90039 6.7998c-2.2998 0.0996094 -5 0.0996094 -7.59961 0.0996094c-2.7002 0 -7 -1 -10.6006 -5c-3.7002 -4 -13.8994 -13.5996 -13.8994 -33.0996
+s14.1992 -38.4004 16.1992 -41c2 -2.60059 28 -42.6006 67.7002 -59.7998c25.1006 -10.8008 34.9004 -11.8008 47.5 -9.90039c7.60059 1.09961 23.4004 9.5 26.7002 18.7998zM400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48
+v352c0 26.5 21.5 48 48 48h352zM223.9 34.7998c87.3994 0 160.1 71.1006 160.1 158.5c0 42.4004 -18 82.2002 -47.9004 112.2c-30 30 -69.7998 46.5 -112.199 46.5c-87.4004 0 -158.5 -71.0996 -158.601 -158.5c0 -28 7.2998 -55.2998 21.2002 -79.2998l-22.5 -82.2002
+l84.0996 22.0996c23.1006 -12.5996 49.2002 -19.2998 75.8008 -19.2998z" />
+ <glyph glyph-name="whmcs" unicode="&#xf40d;"
+d="M448 287l-29.0996 -7l-2.2002 -12.0996l20.8994 -18.8008l-10.2998 -20.0996l-28.7998 8.7998l-7.7998 -8.09961l8.7998 -28l-20.4004 -12.1006l-20.6992 21.6006l-11.6006 -3.5l-6.7002 -28.7998l-22.5996 0.299805l-6.7002 28.5l-11.5996 2.89941l-19.4004 -20.3994
+l-19.8994 11.5996l8.09961 26.9004l-7.2002 8.59961l-29.5996 -7.5l-10.4004 18.5l20.1006 19.9004l-2.40039 12.0996l-28.7998 7.5l0.299805 21.7002l28.5 7.7998l2.90039 10.4004l-20.7002 21l11 19.0996l28.5 -7.5l8.09961 8.40039l-8.09961 27.7002l19.3994 11
+l19.7002 -21l12.1006 3.19922l6.19922 26.4004h22.6006l7 -26.4004l10.7002 -3.19922l21.2998 21l19.0996 -11.6006l-7.5 -28.2002l7.2002 -7.5l29 7.5l10.4004 -19.3994l-20.1006 -20.7002l2.2002 -10.4004l28.5 -8.7998v-21.2998zM328.8 241.8
+c31.4004 0 56.7998 25.2998 56.7998 56.7998c0 31.4004 -25.3994 56.8008 -56.7998 56.8008c-31.3994 0 -56.7998 -25.4004 -56.7998 -56.8008c0 -31.3994 25.5 -56.7998 56.7998 -56.7998zM401.1 225.4l46.9004 -14.5v-39.9004l-55.0996 -13.4004l-4.10059 -22.6992
+l38.9004 -35.3008l-19.2002 -37.8994l-54 16.7002l-14.5996 -15.2002l16.6992 -52.5l-38.2998 -22.7002l-38.8994 40.5l-21.7002 -6.59961l-12.6006 -54l-42.3994 0.5l-12.6006 53.5996l-21.6992 5.59961l-36.4004 -38.3994l-37.4004 21.7002l15.2002 50.5l-13.7002 16.0996
+l-55.5 -14.0996l-19.6992 34.7998l37.8994 37.3994l-4.7998 22.8008l-54 14.0996l0.5 40.9004l53.5 14.6992l5.7002 19.7002l-38.9004 39.4004l20.7002 35.7998l53.5996 -14.0996l15.2002 15.6992l-15.2002 52l36.4004 20.7002l36.7998 -39.3994l22.7002 6.09961l11.5996 52
+h42.4004l11.5996 -45.9004l-22.5996 5.90039l-6.2998 1.7002l-3.2998 -5.7002l-11 -19.0996l-3.30078 -5.60059l4.60059 -4.59961l17.2002 -17.4004l-0.300781 -1l-23.7998 -6.5l-6.2002 -1.7002l-0.0996094 -6.39941l-0.200195 -12.9004
+c-47.5 -10.3994 -83.2998 -52.7998 -83.2998 -103.5c0 -58.2998 47.2998 -105.7 105.7 -105.7c50.5 0 92.7002 35.4004 103.2 82.8008l13.1992 -0.200195l6.90039 -0.100586l1.59961 6.7002l5.60059 24l1.89941 0.600586l17.1006 -17.8008l4.7002 -4.89941l5.7998 3.39941
+l20.3994 12.1006l5.80078 3.5l-2 6.5z" />
+ <glyph glyph-name="wordpress-simple" unicode="&#xf411;" horiz-adv-x="512"
+d="M256 440c136.7 0 248 -111.2 248 -248c0 -136.7 -111.3 -248 -248 -248s-248 111.3 -248 248c0 136.8 111.3 248 248 248zM33 192c0 -88.2002 51.2998 -164.5 125.7 -200.7l-106.4 291.4c-12.3994 -27.7002 -19.2998 -58.4004 -19.2998 -90.7002zM256 -31
+c26 0 50.9004 4.5 74 12.5996c-0.599609 1 -1.09961 2 -1.59961 3.10059l-68.5 187.8l-66.9004 -194.4c20 -5.89941 41.0996 -9.09961 63 -9.09961zM286.7 296.5l80.7002 -239.6l22.1992 74.2998c9.7002 30.8994 17 53 17 72.0996c0 27.6006 -9.89941 46.7002 -18.3994 61.5
+c-11.2998 18.4004 -21.9004 33.9004 -21.9004 52.2998c0 20.5 15.5 39.6006 37.4004 39.6006c1 0 1.89941 -0.100586 2.89941 -0.200195c-39.6992 36.2998 -92.5996 58.5 -150.6 58.5c-77.9004 0 -146.4 -40 -186.3 -100.5
+c5.2998 -0.200195 10.2002 -0.299805 14.3994 -0.299805c23.3008 0 59.4004 2.7998 59.4004 2.7998c12 0.700195 13.4004 -17 1.40039 -18.4004c0 0 -12.1006 -1.39941 -25.5 -2.09961l81.1992 -241.5l48.8008 146.3l-34.7002 95.2002
+c-12 0.700195 -23.4004 2.09961 -23.4004 2.09961c-12 0.700195 -10.5996 19.1006 1.40039 18.4004c0 0 36.7998 -2.7998 58.7002 -2.7998c23.2998 0 59.3994 2.7998 59.3994 2.7998c12 0.700195 13.4004 -17 1.40039 -18.4004c0 0 -12.1006 -1.39941 -25.5 -2.09961z
+M368.1 -0.700195c66.3008 38.6006 110.9 110.4 110.9 192.7c0 38.7998 -9.90039 75.2002 -27.2998 107c1 -7.09961 1.5 -14.7002 1.5 -22.9004c0 -22.6992 -4.2998 -48.0996 -17 -79.8994z" />
+ <glyph glyph-name="xbox" unicode="&#xf412;" horiz-adv-x="512"
+d="M369.9 129.8c44.2998 -54.2998 64.6992 -98.7998 54.3994 -118.7c-7.89941 -15.0996 -56.7002 -44.5996 -92.5996 -55.8994c-29.6006 -9.2998 -68.4004 -13.2998 -100.4 -10.2002c-38.2002 3.7002 -76.8994 17.4004 -110.1 39
+c-27.9004 18.2002 -34.2002 25.7002 -34.2002 40.5996c0 29.9004 32.9004 82.3008 89.2002 142.101c32 33.8994 76.5 73.7002 81.3994 72.5996c9.40039 -2.09961 84.3008 -75.0996 112.301 -109.5zM188.6 304.2c-66.3994 -81.5 -106 -155.4 -120.3 -194.4
+c-9.7998 -26.5 -13.7002 -53 -9.5 -64c2.7998 -7.39941 0.200195 -4.7002 -9.2998 9.90039c-23.2002 35.5 -34.9004 70.3994 -40.5 120.899c-1.90039 16.7002 -1.2002 26.3008 4.2002 60.5c6.7998 42.7002 31.0996 92 60.2998 122.4
+c12.4004 12.9004 13.5 13.2002 28.7002 8.09961c28.2998 -9.5 56.7002 -36.5 86.3994 -63.3994zM500.2 240.7c4.7002 -22.6006 5.09961 -70.9004 0.799805 -93.4004c-3.59961 -18.5 -11.2002 -42.5 -18.5996 -58.7002c-5.5 -12.1992 -19.3008 -35.7998 -25.4004 -43.5
+c-3.09961 -3.89941 -3.09961 -3.89941 -1.40039 4.60059c2.30078 11.2002 -0.599609 31.5996 -7.39941 52.2998c-20.7002 62.9004 -80.5 149 -122.9 202.3c23.2998 21.4004 41 38.2998 64.2998 52.7998c11.8008 7.40039 28.7002 13.9004 36 13.9004
+c7.10059 0 57.7002 -50.2998 74.6006 -130.3zM141.3 405c-14.5996 -0.700195 -14 0.0996094 9.40039 11.2002c81.2002 38.2998 170 27.5996 233.899 -11.7002c-13.3994 0.599609 -43.5 5.90039 -107.399 -25.2002c-11.2002 -5.5 -20.9004 -9.7998 -21.6006 -9.7002
+c-4.59961 0.900391 -66.5996 37.9004 -114.3 35.4004z" />
+ <glyph glyph-name="yandex" unicode="&#xf413;" horiz-adv-x="256"
+d="M153.1 132.2l-87.3994 -196.2h-63.7002l96 209.8c-45.0996 22.9004 -75.2002 64.4004 -75.2002 141.101c-0.0996094 107.399 68 161.1 148.9 161.1h82.2998v-512h-55.0996v196.2h-45.8008zM198.9 401.5h-29.4004c-44.4004 0 -87.4004 -29.4004 -87.4004 -114.6
+c0 -82.3008 39.4004 -108.801 87.4004 -108.801h29.4004v223.4z" />
+ <glyph glyph-name="yandex-international" unicode="&#xf414;" horiz-adv-x="320"
+d="M129.5 -64v166.1l-111 297.9h55.7998l81.7998 -229.7l94.1006 277.7h51.2998l-120.7 -347.8v-164.2h-51.2998z" />
+ <glyph glyph-name="apple-pay" unicode="&#xf415;" horiz-adv-x="640"
+d="M116.9 289.5c-7.5 -8.90039 -19.5 -15.9004 -31.5 -14.9004c-1.5 12 4.39941 24.8008 11.2998 32.6006c7.5 9.09961 20.5996 15.5996 31.2998 16.0996c1.2002 -12.3994 -3.7002 -24.7002 -11.0996 -33.7998zM127.8 272.3c6.7998 -0.5 26.2998 -2.5 38.7998 -21.0996
+c-1 -0.799805 -23.1992 -13.5 -22.8994 -40.2998c0.299805 -32 28 -42.6006 28.2998 -42.9004c-0.200195 -0.799805 -4.40039 -15.0996 -14.5 -29.9004c-8.90039 -13 -18 -25.6992 -32.5 -26c-14 -0.199219 -18.7002 8.40039 -34.7998 8.40039
+c-16 0 -21.2002 -8.09961 -34.5 -8.59961c-14 -0.5 -24.6006 13.7998 -33.5 26.7998c-18.2002 26.2998 -32.1006 74 -13.2998 106.3c9.09961 16.0996 25.6992 26.2002 43.5996 26.5c13.7998 0.299805 26.4004 -9.09961 34.7998 -9.09961
+c8.2002 0 23.1006 10.8994 40.5 9.89941zM228.2 308.5h73.2002c37.6992 0 64.0996 -26 64.0996 -64s-26.7998 -64.2998 -65.0996 -64.2998h-41.9004v-66.6006h-30.2998v194.9zM258.5 283v-77.4004h34.7998c26.4004 0 41.4004 14.2002 41.4004 38.8008
+c0 24.5996 -15 38.5996 -41.2998 38.5996h-34.9004zM420.7 112.1c-28.1006 0 -47.7002 16.8008 -47.7998 42c0 25 19 39.4004 54.0996 41.5l37.7998 2.30078v10.7998c0 15.8994 -10.3994 24.5 -28.8994 24.5c-15.2002 0 -26.3008 -7.90039 -28.6006 -19.9004h-27.2998
+c0.900391 25.2002 24.7002 43.6006 56.7998 43.6006c34.6006 0 57.1006 -18.2002 57.1006 -46.3008v-97h-28v23.4004h-0.600586c-8 -15.2998 -25.5996 -24.9004 -44.5996 -24.9004zM428.9 135.2c20.5 0 36 13 36 31.2002v11l-33.6006 -2.10059
+c-18.8994 -1.09961 -28.7998 -8.2002 -28.7998 -20.5c0 -11.7998 10.2998 -19.5996 26.4004 -19.5996zM531.4 60.5996c-2.30078 0 -9.80078 0.300781 -11.6006 0.700195v23.4004c1.90039 -0.200195 6.5 -0.5 8.90039 -0.5c13.3994 0 20.8994 5.7002 25.5 20.2998
+l2.7998 8.59961l-51.2002 141.9h31.6006l35.5996 -115.1h0.599609l35.6006 115.1h30.7998l-53.0996 -149c-12.1006 -34.0996 -26 -45.4004 -55.5 -45.4004z" />
+ <glyph glyph-name="cc-apple-pay" unicode="&#xf416;" horiz-adv-x="576"
+d="M302.2 229.6c0 -17.1992 -10.5 -27.0996 -29 -27.0996h-24.2998v54.2002h24.3994c18.4004 0 28.9004 -9.7998 28.9004 -27.1006zM349.7 167c0 8.59961 6.89941 13.5 20.2002 14.4004l23.5 1.5v-7.7002c0 -12.7998 -10.8008 -21.9004 -25.2002 -21.9004
+c-11.2998 0 -18.5 5.40039 -18.5 13.7002zM576 369v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h480c26.5 0 48 -21.5 48 -48zM127.8 250.8c8.40039 -0.700195 16.7998 4.2002 22.1006 10.4004
+c5.19922 6.39941 8.59961 15 7.69922 23.7002c-7.39941 -0.300781 -16.5996 -4.90039 -21.8994 -11.3008c-4.7998 -5.5 -8.90039 -14.3994 -7.90039 -22.7998zM188.4 176.3c-0.200195 0.200195 -19.6006 7.60059 -19.8008 30c-0.199219 18.7002 15.3008 27.7002 16 28.2002
+c-8.7998 13 -22.3994 14.4004 -27.0996 14.7002c-12.2002 0.700195 -22.5996 -6.90039 -28.4004 -6.90039c-5.89941 0 -14.6992 6.60059 -24.2998 6.40039c-12.5 -0.200195 -24.2002 -7.2998 -30.5 -18.6006c-13.0996 -22.5996 -3.39941 -56 9.2998 -74.3994
+c6.2002 -9.10059 13.7002 -19.1006 23.5 -18.7002c9.30078 0.400391 13 6 24.2002 6c11.2998 0 14.5 -6 24.2998 -5.90039c10.2002 0.200195 16.5 9.10059 22.8008 18.2002c6.89941 10.4004 9.7998 20.4004 10 21zM323.8 229.7c0 26.5996 -18.5 44.7998 -44.8994 44.7998
+h-51.2002v-136.4h21.2002v46.6006h29.2998c26.7998 0 45.5996 18.3994 45.5996 45zM413.8 206c0 19.7002 -15.7998 32.4004 -40 32.4004c-22.5 0 -39.0996 -12.9004 -39.7002 -30.5h19.1006c1.59961 8.39941 9.39941 13.8994 20 13.8994c13 0 20.2002 -6 20.2002 -17.2002
+v-7.5l-26.4004 -1.59961c-24.5996 -1.5 -37.9004 -11.5996 -37.9004 -29.0996c0 -17.7002 13.7002 -29.4004 33.4004 -29.4004c13.2998 0 25.5996 6.7002 31.2002 17.4004h0.399414v-16.4004h19.6006v68h0.0996094zM516 237.1h-21.5l-24.9004 -80.5996h-0.399414
+l-24.9004 80.5996h-22.2998l35.9004 -99.2998l-1.90039 -6c-3.2002 -10.2002 -8.5 -14.2002 -17.9004 -14.2002c-1.69922 0 -4.89941 0.200195 -6.19922 0.300781v-16.4004c1.19922 -0.400391 6.5 -0.5 8.09961 -0.5c20.7002 0 30.4004 7.90039 38.9004 31.7998z" />
+ <glyph glyph-name="fly" unicode="&#xf417;" horiz-adv-x="383"
+d="M197.8 20.2002c12.9004 -11.7002 33.7002 -33.2998 33.2002 -50.7002c0 -0.799805 -0.0996094 -1.59961 -0.0996094 -2.5c-1.80078 -19.7998 -18.8008 -31.0996 -39.1006 -31c-25 0.0996094 -39.8994 16.7998 -38.7002 35.7998c1 16.2002 20.5 36.7002 32.4004 47.6006
+c2.2998 2.09961 2.7002 2.69922 5.59961 3.59961c3.40039 0 3.90039 -0.299805 6.7002 -2.7998zM331.9 380.7c23.8994 -40 27.7998 -73.2998 20.7998 -112.5c-15.2002 -69.9004 -103.601 -166.5 -155.9 -215.7c-1.7002 -1.59961 -1.39941 -1.40039 -3.5 -2.09961
+l-3.2998 0.0996094c-1.7002 0.599609 -4.5 3.5 -6.2002 5.09961c-58.7998 57.8008 -148.7 151.601 -155.8 233.801c-1.5 71.3994 29.2998 113.399 82.9004 141.3c9.89941 4.09961 37 17.2998 81.0996 17.2998c22 0.200195 51.0996 -4.5 76.5996 -15.2002
+c24.7002 -11.5 47 -26.3994 63.3008 -52.0996zM186.8 96.0996v325.7c-57.8994 -5.5 -72.7002 -89.2002 -69.2998 -136.7c4.09961 -58.2998 41.2998 -137.899 69.2998 -189zM328.7 268c15.7998 54.9004 -10.9004 134.7 -99.7002 153
+c38.2002 -25.5996 49.5996 -85.5 48 -131.4c-2 -58.5996 -39.4004 -140 -67.2002 -191.899c41.6006 42.2998 102.5 113.5 118.9 170.3z" />
+ <glyph glyph-name="nintendo-switch" unicode="&#xf418;"
+d="M95.9004 414.5c5.7998 1 19.0996 1.2998 63.2998 1.40039c54.3994 0.0996094 56 0 56.8994 -1.7002c0.600586 -1.2998 0.900391 -72.1006 0.900391 -222.601c0 -202.899 -0.0996094 -220.8 -1.5 -222.199c-1.40039 -1.40039 -6.7998 -1.5 -58.7002 -1.10059
+c-50.2002 0.200195 -58 0.5 -64.0996 1.90039c-47 10.8994 -80 44 -90.2998 90.5c-1.90039 8.7002 -2 11.2998 -2.2002 122.899c-0.5 121.601 -0.200195 130.801 3.89941 146.5c11.3008 43.4004 47.2002 76.4004 91.8008 84.4004zM179.8 191.9h-0.200195v188
+l-35.8994 -0.100586c-39.7002 -0.0996094 -45.4004 -0.700195 -57.6006 -5.59961c-24.2998 -9.7002 -42.3994 -31.2998 -48 -57.4004c-1.89941 -9.2002 -1.89941 -239.899 0.100586 -249.3c6.2998 -30 28.3994 -53 58.2998 -60.7002
+c7 -1.7002 10.7002 -1.89941 45.5 -2.2998l37.7998 -0.5v187.9zM103.9 323.1c23.2998 4.7002 46 -11.2998 49.8994 -35c3.5 -20.0996 -8.7002 -39.5996 -28.7998 -46.3994c-4.90039 -1.7002 -7.5 -2 -14.7998 -1.7002c-8.10059 0.299805 -9.7002 0.599609 -15.9004 3.7002
+c-9 4.39941 -15.5 11 -19.8994 20c-3 5.89941 -3.40039 7.89941 -3.80078 15.2002c-0.799805 14.0996 4.10059 26.0996 13.8008 34.5c4.7998 4.09961 13.6992 8.59961 19.5 9.69922zM262.1 415.4c0.300781 0.299805 18.8008 0.599609 41.2002 0.5
+c32.2998 0 42.2998 -0.300781 48.7998 -1.40039c48.4004 -8.2998 85.3008 -44.7998 94.5 -93.0996c1.30078 -6.7002 1.5 -22.6006 1.5 -129c0 -133.301 0.100586 -128.7 -5.69922 -146.101c-1.60059 -4.5 -4.7002 -11.7998 -7 -16.2002
+c-18 -34 -51.3008 -56.8994 -89 -61.1992c-8.90039 -1 -80.9004 -1.30078 -83.4004 -0.400391c-1.40039 0.599609 -1.5 21.0996 -1.5 223.4c0 122.699 0.200195 223.199 0.599609 223.5zM361.2 213.3c-23.2002 6.10059 -47.2998 -7.2998 -54.2002 -30
+c-2.2998 -7.2998 -2.40039 -19.2002 -0.400391 -26.2002c6.7002 -23 31.4004 -37 54.6006 -31c14.2998 3.80078 25.3994 14.1006 31.0996 28.8008c3.10059 8 3.2002 20.7998 0.100586 29.5c-4.90039 14.0996 -16.7002 25.0996 -31.2002 28.8994z" />
+ <glyph glyph-name="node" unicode="&#xf419;" horiz-adv-x="640"
+d="M316.3 -4c-2.09961 0 -4.2002 0.599609 -6.09961 1.59961l-19.2002 11.4004c-2.90039 1.59961 -1.5 2.2002 -0.5 2.5c3.7998 1.2998 4.59961 1.59961 8.7002 4c0.399414 0.200195 1 0.0996094 1.39941 -0.0996094l14.8008 -8.80078
+c0.5 -0.299805 1.2998 -0.299805 1.7998 0l57.7998 33.4004c0.5 0.299805 0.900391 0.900391 0.900391 1.59961v66.7002c0 0.700195 -0.300781 1.2998 -0.900391 1.60059l-57.7998 33.2998c-0.5 0.299805 -1.2002 0.299805 -1.7998 0l-57.8008 -33.2998
+c-0.599609 -0.300781 -0.899414 -1 -0.899414 -1.60059v-66.7002c0 -0.599609 0.399414 -1.19922 0.899414 -1.5l15.8008 -9.09961c8.59961 -4.2998 13.8994 0.799805 13.8994 5.7998v65.9004c0 0.899414 0.700195 1.7002 1.7002 1.7002h7.2998
+c0.900391 0 1.7002 -0.700195 1.7002 -1.7002v-65.9004c0 -11.5 -6.2002 -18 -17.0996 -18c-3.30078 0 -6 0 -13.3008 3.60059l-15.1992 8.69922c-3.7002 2.2002 -6.10059 6.2002 -6.10059 10.5v66.7002c0 4.2998 2.2998 8.40039 6.10059 10.5l57.7998 33.4004
+c3.7002 2.09961 8.5 2.09961 12.0996 0l57.7998 -33.4004c3.7002 -2.2002 6.10059 -6.2002 6.10059 -10.5v-66.7002c0 -4.2998 -2.2998 -8.39941 -6.10059 -10.5l-57.7998 -33.3994c-1.7002 -1.10059 -3.7998 -1.7002 -6 -1.7002zM363 61.7998
+c0 -12.5996 -10.5 -19.7998 -29 -19.7998c-25.2998 0 -30.5996 11.5996 -30.5996 21.2998c0 1 0.799805 1.7002 1.69922 1.7002h7.5c0.900391 0 1.60059 -0.599609 1.7002 -1.40039c1.10059 -7.59961 4.5 -11.3994 19.7998 -11.3994
+c12.2002 0 17.4004 2.7002 17.4004 9.2002c0 3.69922 -1.5 6.39941 -20.4004 8.2998c-15.7998 1.59961 -25.5996 5 -25.5996 17.7002c0 11.5996 9.7998 18.5996 26.2998 18.5996c18.5 0 27.6006 -6.40039 28.7998 -20.2002
+c0.100586 -0.5 -0.0996094 -0.899414 -0.399414 -1.2998c-0.299805 -0.299805 -0.700195 -0.5 -1.2002 -0.5h-7.5c-0.799805 0 -1.40039 0.5 -1.59961 1.2998c-1.80078 8 -6.2002 10.6006 -18.1006 10.6006c-13.2998 0 -14.7998 -4.60059 -14.7998 -8.10059
+c0 -4.2002 1.7998 -5.39941 19.7998 -7.7998c17.7998 -2.40039 26.2002 -5.7002 26.2002 -18.2002zM417.5 111.9c0 -6.10059 -5 -11.1006 -11.0996 -11.1006c-6.10059 0 -11.1006 5 -11.1006 11.1006c0 6.2998 5.2002 11.0996 11.1006 11.0996
+c6 0.0996094 11.0996 -4.7998 11.0996 -11.0996zM415.7 111.9c0 5.19922 -4.2002 9.2998 -9.40039 9.2998c-5.09961 0 -9.2998 -4.10059 -9.2998 -9.2998c0 -5.2002 4.2002 -9.40039 9.2998 -9.40039c5.2002 0.0996094 9.40039 4.2998 9.40039 9.40039zM411.2 105.7
+h-2.60059c-0.0996094 0.599609 -0.5 3.7998 -0.5 3.89941c-0.199219 0.700195 -0.399414 1.10059 -1.2998 1.10059h-2.2002v-5h-2.39941v12.5h4.2998c1.5 0 4.40039 0 4.40039 -3.2998c0 -2.30078 -1.5 -2.80078 -2.40039 -3.10059
+c1.7002 -0.0996094 1.7998 -1.2002 2.09961 -2.7998c0.100586 -1 0.300781 -2.7002 0.600586 -3.2998zM408.4 114.5c0 1.7002 -1.2002 1.7002 -1.80078 1.7002h-2v-3.5h1.90039c1.59961 0 1.90039 1.09961 1.90039 1.7998zM137.3 257l-0.200195 -95
+c0 -1.2998 -0.699219 -2.59961 -1.7998 -3.2002c-1.09961 -0.700195 -2.59961 -0.700195 -3.7002 0l-36.3994 20.9004c-2.2998 1.2998 -3.7002 3.7998 -3.7002 6.39941v44.4004c0 2.59961 -1.40039 5.09961 -3.7002 6.40039l-15.5 8.89941
+c-1.09961 0.700195 -2.39941 1 -3.7002 1c-1.2998 0 -2.5 -0.299805 -3.69922 -1l-15.5 -8.89941c-2.30078 -1.30078 -3.7002 -3.80078 -3.7002 -6.40039v-44.4004c0 -2.59961 -1.40039 -5 -3.7002 -6.39941l-36.4004 -20.9004
+c-1.19922 -0.700195 -2.59961 -0.700195 -3.69922 0c-1.10059 0.700195 -1.80078 1.90039 -1.80078 3.2002l-0.0996094 95c0 2.59961 1.40039 5.09961 3.7002 6.40039l61.2002 35.2998c1.09961 0.599609 2.19922 1 3.39941 1h0.600586
+c1.19922 -0.100586 2.39941 -0.400391 3.39941 -1l61.2998 -35.2998c2.30078 -1.30078 3.7002 -3.7002 3.7002 -6.40039zM472.5 360.7v-176.4c0 -2.59961 -1.40039 -5.09961 -3.7002 -6.39941l-61.2998 -35.4004c-2.2998 -1.2998 -5.09961 -1.2998 -7.40039 0
+l-61.2998 35.4004c-2.2998 1.2998 -3.7002 3.7998 -3.7002 6.39941v70.7998c0 2.60059 1.40039 5.10059 3.7002 6.40039l61.2998 35.4004c2.30078 1.2998 5.10059 1.2998 7.40039 0l15.2998 -8.80078c1.7002 -1 3.90039 0.300781 3.90039 2.2002v94
+c0 2.7998 3 4.60059 5.5 3.2002l36.5 -20.4004c2.2998 -1.19922 3.7998 -3.69922 3.7998 -6.39941zM426.5 231.8c0 0.700195 -0.400391 1.2998 -0.900391 1.60059l-21 12.1992c-0.599609 0.300781 -1.2998 0.300781 -1.89941 0l-21 -12.1992
+c-0.600586 -0.300781 -0.900391 -0.900391 -0.900391 -1.60059v-24.2998c0 -0.700195 0.400391 -1.2998 0.900391 -1.59961l21 -12.1006c0.599609 -0.299805 1.2998 -0.299805 1.7998 0l21 12.1006c0.599609 0.299805 0.900391 0.899414 0.900391 1.59961v24.2998h0.0996094
+zM636.3 232.5l-36.7002 -21.2998c-2.5 -1.40039 -5.59961 0.399414 -5.59961 3.2002v17.3994c0 1.2998 -0.799805 2.5 -1.90039 3.2002l-19.1992 11.0996c-1.10059 0.700195 -2.60059 0.700195 -3.7002 0l-19.2002 -11.0996
+c-1.2002 -0.700195 -1.90039 -1.90039 -1.90039 -3.2002v-22.2002c0 -1.2998 0.700195 -2.5 1.90039 -3.19922l61.7002 -35.4004c2.5 -1.40039 2.5 -5 0 -6.40039l-36.7998 -20.5c-2.30078 -1.2998 -5.10059 -1.2998 -7.30078 0l-60.8994 34.7002
+c-2.2998 1.2998 -3.7002 3.7002 -3.7002 6.40039v70.7998c0 2.59961 1.40039 5.09961 3.7002 6.40039l61.2998 35.3994c2.2998 1.2998 5.09961 1.2998 7.40039 0l60.8994 -35.3994c2.2998 -1.30078 3.7002 -3.80078 3.7002 -6.40039v-17.0996
+c0 -2.60059 -1.40039 -5.10059 -3.7002 -6.40039zM559 229l11.7998 6.7998c0.400391 0.299805 1 0.299805 1.40039 0l11.7998 -6.7998c0.400391 -0.200195 0.700195 -0.700195 0.700195 -1.2002v-13.5996c0 -0.5 -0.299805 -0.900391 -0.700195 -1.2002l-11.7998 -6.7998
+c-0.400391 -0.299805 -1 -0.299805 -1.40039 0l-11.7998 6.7998c-0.400391 0.200195 -0.700195 0.700195 -0.700195 1.2002v13.5996c0 0.5 0.299805 0.900391 0.700195 1.2002zM304.8 185.5c0 -0.599609 -0.0996094 -1.2002 -0.200195 -1.7002
+c-0.5 -2 -1.7998 -3.7002 -3.59961 -4.7002l-61 -35.1992c-2.2002 -1.30078 -5 -1.40039 -7.40039 0l-61.1992 35.1992c-2.10059 1.2002 -4 3.60059 -4 6.40039v70.4004c0 2.69922 1.59961 5.09961 3.89941 6.39941l61.1006 35.2002
+c2.39941 1.40039 5.2998 1.2002 7.39941 0l61.1006 -35.2002c2.2998 -1.2998 3.89941 -3.7998 3.89941 -6.39941v-70.4004zM230.5 310.4l-0.799805 -0.5h1.09961zM306.7 180.2l-0.400391 0.700195v-0.900391z" />
+ <glyph glyph-name="osi" unicode="&#xf41a;" horiz-adv-x="495"
+d="M0 188.8c2.2998 135.8 97.4004 232.4 213.8 248.101c138.8 18.5996 255.601 -75.8008 278 -201.101c21.2998 -118.8 -44 -230 -151.6 -274c-9.2998 -3.7998 -14.4004 -1.7002 -18 7.7002c-17.7998 46.2998 -35.6006 92.7002 -53.4004 139
+c-3.09961 8.09961 -1 13.2002 7 16.7998c24.2002 11 39.2998 29.4004 43.2998 55.7998c6.40039 42.4004 -24.5 78.7002 -64.5 82.2002c-39 3.40039 -71.7998 -23.7002 -77.5 -59.7002c-5.19922 -33 11.1006 -63.6992 41.9004 -77.6992
+c9.59961 -4.40039 11.5 -8.60059 7.7998 -18.4004c-17.8994 -46.5996 -35.7998 -93.2002 -53.7002 -139.9c-2.59961 -6.89941 -8.2998 -9.2998 -15.5 -6.5c-52.5996 20.3008 -101.399 61 -130.8 119c-24.8994 49.2002 -25.2002 87.7002 -26.7998 108.7zM20.9004 190.7
+c0.399414 -6.60059 0.599609 -14.2998 1.2998 -22.1006c6.2998 -71.8994 49.5996 -143.5 131 -183.1c3.2002 -1.5 4.39941 -0.799805 5.59961 2.2998c14.9004 39.1006 29.9004 78.2002 45 117.3c1.2998 3.30078 0.600586 4.80078 -2.39941 6.7002
+c-31.6006 19.9004 -47.3008 48.5 -45.6006 86c1 21.6006 9.2998 40.5 23.7998 56.2998c30 32.7002 77 39.8008 115.5 17.6006c31.9004 -18.4004 49.5 -53.7998 45.2002 -90.4004c-3.59961 -30.5996 -19.2998 -53.8994 -45.7002 -69.7998
+c-2.69922 -1.59961 -3.5 -2.90039 -2.2998 -6c15.2002 -39.2002 30.2002 -78.4004 45.2002 -117.7c1.2002 -3.09961 2.40039 -3.7998 5.59961 -2.2998c35.5 16.5996 65.2002 40.2998 88.1006 72c34.7998 48.2002 49.0996 101.9 42.2998 161
+c-13.7002 117.5 -119.4 214.8 -255.5 198c-106.1 -13 -195.3 -102.5 -197.1 -225.8z" />
+ <glyph glyph-name="react" unicode="&#xf41b;" horiz-adv-x="512"
+d="M418.2 270.8c54.3994 -18.7002 93.7998 -48.0996 93.7998 -78.3994c0 -31.7002 -41.7998 -62.6006 -99.5 -81.7002c-3.09961 -1 -6.2002 -2 -9.40039 -2.90039c1.10059 -4.59961 2.10059 -9.09961 3 -13.5c11.4004 -57.5996 2.60059 -104.899 -24.3994 -120.5
+c-26.1006 -15.0996 -68.4004 -0.200195 -111.2 36.6006c-4.59961 4 -9.2002 8.09961 -13.5996 12.3994c-3.5 -3.39941 -7 -6.59961 -10.5 -9.7002c-44.2002 -38.6992 -89.6006 -54.6992 -116.601 -39.0996c-26.2002 15.0996 -34.3994 59.0996 -23.8994 114.6
+c1.19922 6.10059 2.5 12 4 18c-4.60059 1.30078 -9.10059 2.80078 -13.6006 4.30078c-55.5 19 -96.2998 50.2998 -96.2998 81.5c0 30.1992 38.2998 59.3994 91.7002 77.8994c5.89941 2.10059 12.2002 4.10059 18.5996 5.90039
+c-1.39941 5.59961 -2.59961 11.0996 -3.7002 16.7002c-11 56.3994 -3.19922 101.5 23 116.699c27.3008 15.9004 72.9004 -1.09961 118.4 -41.5c2.7998 -2.5 5.59961 -5.09961 8.2998 -7.69922c4 3.89941 8.2002 7.7998 12.5 11.5
+c43.4004 37.7998 86.2998 53.5 112.601 38.3994c27.2998 -15.7998 35.3994 -63.7002 23.0996 -123.3c-0.799805 -3.7002 -1.59961 -7.40039 -2.5 -11.0996c5.40039 -1.60059 10.7998 -3.30078 16.2002 -5.10059zM282.9 355.7c-4 -3.5 -7.80078 -7 -11.7002 -10.7002
+c15.3994 -16.7002 29.5996 -34.5 42.5996 -53.0996c22.6006 -2 45.1006 -5.60059 67.2998 -10.6006c0.900391 3.2998 1.60059 6.60059 2.30078 10c10.5996 51.5 4.09961 90.7002 -12.8008 100.4c-15.7998 9.09961 -50.5 -3.60059 -87.6992 -36zM167.2 140.5
+c-5 8.59961 -9.7002 17.2998 -14.2998 26.0996c-6.40039 -15.1992 -11.9004 -30.0996 -16.3008 -44.5c15.3008 -3.2998 30.8008 -5.7998 46.4004 -7.5c-5.5 8.5 -10.7002 17.2002 -15.7998 25.9004zM136.9 260.8c4.39941 -14.0996 9.69922 -28.7002 16 -43.5996
+c4.5 8.7998 9.2998 17.5 14.1992 26c4.90039 8.59961 10.1006 17.0996 15.4004 25.3994c-15.9004 -2 -31.2002 -4.59961 -45.5996 -7.7998zM164.3 191.9c6.7002 -13.8008 13.7998 -27.3008 21.5 -40.6006s15.9004 -26.2998 24.6006 -39
+c14.6992 -0.899414 29.8994 -1.39941 45.5996 -1.39941s31.2002 0.5 46.0996 1.59961c8.5 12.7998 16.6006 25.7002 24.2002 39c7.7002 13.4004 14.9004 27 21.6006 40.7998c-6.80078 13.7002 -14 27.2002 -21.7002 40.4004s-15.7998 26.0996 -24.2998 38.7002
+c-14.9004 1.09961 -30.3008 1.69922 -45.9004 1.69922s-30.9004 -0.599609 -45.9004 -1.69922c-8.59961 -12.7002 -16.7998 -25.6006 -24.3994 -38.9004c-7.60059 -13.2998 -14.7998 -26.7998 -21.4004 -40.5996zM344.9 140.7c-5 -8.60059 -10.1006 -17.2002 -15.5 -25.6006
+c15.7998 1.80078 31.5 4.5 47 8c-4.90039 15.1006 -10.5 29.8008 -16.9004 44.3008c-4.7002 -9 -9.5 -17.9004 -14.5996 -26.7002zM359.3 217.2c6.10059 14.2002 11.5 28.5996 16.1006 43.3994c-14.4004 3.30078 -29.8008 6 -45.9004 8
+c5.2998 -8.2998 10.4004 -16.6992 15.2998 -25.1992c5 -8.60059 9.7998 -17.4004 14.5 -26.2002zM256.2 329.7c-10 -10.9004 -20.1006 -22.9004 -29.9004 -35.7998c19.7998 0.899414 39.7002 0.899414 59.5 0c-9.2002 12.3994 -19.0996 24.3994 -29.5996 35.7998zM140.2 391
+c-15.7998 -9.09961 -22 -45.5996 -12.6006 -94c1.10059 -5.2002 2.2002 -10.4004 3.5 -15.5c22.2002 4.90039 44.6006 8.40039 67.2002 10.4004c13.1006 18.5996 27.4004 36.3994 42.9004 53.0996c-2.60059 2.40039 -5.10059 4.7998 -7.60059 7
+c-39.2998 34.7998 -76.5996 48.7998 -93.3994 39zM115.7 127.4c6.89941 22 15.2002 43.5996 24.7998 64.5c-9.5 20.5996 -17.7002 41.8994 -24.5 63.5996c-5.7998 -1.7002 -11.5996 -3.5 -17.2998 -5.5c-45.6006 -15.9004 -77.2002 -39.2998 -77.2002 -57.5996
+c1.90039 -12.1006 8.7002 -22.9004 18.7998 -29.9004c17.5 -13.9004 41.7002 -24.5 63 -31.2002c4.10059 -1.39941 8.2002 -2.7002 12.4004 -3.89941zM232.3 29.4004c3.2002 2.7998 6.40039 5.7998 9.60059 8.89941c-15.5 16.7998 -30 34.7002 -43.2002 53.4004
+c-22.9004 1.7002 -45.5 5 -67.9004 9.7998c-1.39941 -5.5 -2.59961 -11.0996 -3.7002 -16.7002c-9 -47.5 -2.39941 -82.7998 13.5 -92c11.4004 -4.5 24.2002 -4 35.3008 1.2998c20.7998 8.2002 39.8994 20.2002 56.3994 35.3008zM256.8 53.7002
+c10.5 11.5996 20.4004 23.7002 29.6006 36.3994c-10 -0.5 -20.1006 -0.699219 -30.4004 -0.699219c-10 0 -19.9004 0.199219 -29.5 0.599609c9.90039 -13.0996 20.0996 -25.2998 30.2998 -36.2998zM387.5 23.7002c3.2002 22.2002 2.40039 44.7002 -2.5 66.2998
+c-0.799805 4 -1.7002 8.09961 -2.7002 12.2002c-22.5 -5.10059 -45.2998 -8.60059 -68.2002 -10.5c-12.7998 -18.7998 -26.8994 -36.7002 -42.1992 -53.6006c4.2998 -4 8.5 -7.89941 12.6992 -11.5c36.6006 -31.3994 70.5 -43.3994 86.4004 -34.1992
+c9.59961 7.69922 15.5996 19.0996 16.5 31.2998zM405.7 131.2c49.8994 16.5 84.7998 41.7998 84.7998 61.3994c0 18.2002 -32.7002 42 -79.2998 58c-4.7998 1.60059 -9.7998 3.2002 -15 4.7002c-6.7998 -21.5 -14.9004 -42.5 -24.5 -62.8994
+c9.89941 -20.7002 18.5 -42 25.5 -63.8008c2.89941 0.800781 5.7002 1.7002 8.5 2.60059zM256 146.2c-25.2998 0 -45.7998 20.5 -45.7998 45.7998s20.5 45.7998 45.7998 45.7998s45.7998 -20.5 45.7998 -45.7998s-20.5 -45.7998 -45.7998 -45.7998z" />
+ <glyph glyph-name="autoprefixer" unicode="&#xf41c;" horiz-adv-x="640"
+d="M318.4 432l164.1 -480h-77.5l-25.2002 81.4004h-119.5l-25.3994 -81.4004h-77.5zM278.1 90.0996h83.6006l-40.9004 130.4h-1.5zM640 43l-158.5 -9.5l-19.4004 56.5l167.9 -15.5996zM177.9 90l-19.4004 -56.4004l-158.5 9.40039l10 31.2998z" />
+ <glyph glyph-name="less" unicode="&#xf41d;" horiz-adv-x="640"
+d="M612.7 229c0 -11 6.7998 -22.5996 27.2998 -23.2998v-27.2998c-20.5 -1 -27.2998 -12.6006 -27.2998 -23.6006c0 -20.3994 3.2002 -32 3.2002 -54.5996c0 -34.2002 -12.7002 -45.2002 -40.5 -45.2002h-20.5v25.2002h6.2998v0.5c13.5996 0 17.2998 4.7002 17.2998 22.5996
+c0 17.2998 -1.59961 32.6006 -1.59961 51.5c0 24.2002 7.7998 33.6006 23.5996 37.2998v1.60059c-15.7002 3.7002 -23.5996 13.0996 -23.5996 37.2998c0 18.9004 1.59961 35.2002 1.59961 51.5c0 17.4004 -3.09961 22.0996 -17.2998 22.0996h-6.2998v24.2002h20.5
+c27.8994 0 40.5 -11 40.5 -45.2002c0 -22 -3.2002 -34.0996 -3.2002 -54.5996zM507.1 197c20.5 -6.7998 43 -18.9004 43 -47.7998c0 -28.9004 -22.5996 -51 -64.5996 -51c-20 0 -44.0996 9 -59.9004 22.0996l21 30.5c14.2002 -11 27.4004 -16.2998 40.5 -16.2998
+c14.2002 0 20.5 5.2002 20.5 13.0996c0 10.5 -15.7998 15.8008 -32.0996 22.1006c-18.9004 7.2998 -41.5 20.5 -41.5 46.2002c0 28.8994 24.2002 49.3994 59.9004 49.3994c24.1992 0 42.0996 -10.5 55.1992 -20.5l-21 -27.7998c-11.5 8.40039 -22 13.0996 -33.5996 13.0996
+s-17.9004 -4.69922 -17.9004 -12.5996c0 -10.5 14.7002 -14.2002 30.5 -20.5zM148.2 137.6c1.59961 0 3.09961 0 6.2002 0.800781l5.2998 -34.2002c-5.7002 -2.10059 -13.6006 -3.7002 -23.6006 -3.7002c-32.0996 0 -43.0996 21 -43.0996 53.0996v150.801h-14.0996
+c-13.6006 0 -17.3008 -4.80078 -17.3008 -22.1006s1.60059 -32.5996 1.60059 -51.5c0 -24.2002 -7.7998 -33.5996 -23.6006 -37.2998v-1.59961c15.7002 -3.7002 23.6006 -13.1006 23.6006 -37.3008c0 -19.3994 -1.60059 -34.1992 -1.60059 -51.5
+c0 -17.2998 4.2002 -22.5996 17.3008 -22.5996h6.2998v-24.2002h-20.5c-27.9004 0 -40.5 11 -40.5 45.2002c0 22.5996 3.2002 34.2002 3.2002 53.5996c0 11 -6.80078 22.6006 -27.3008 23.1006v27.2998c20.5 1 27.3008 12.5996 27.3008 23.5996
+c0 19.4004 -3.2002 32 -3.2002 54.6006c0 34.2002 12.5996 45.2002 41 45.2002h74.5996v-178.2c0 -9.90039 4.7002 -13.1006 8.40039 -13.1006zM379.9 197c20.5 -6.7998 43.0996 -18.9004 43 -47.7998c0 -28.9004 -22.6006 -51 -64.6006 -51
+c-20 0 -44.0996 9 -59.8994 22.0996l20.5 30.5c14.1992 -11 27.3994 -16.2998 40.5 -16.2998c14.1992 0 20.5 5.2002 20.5 13.0996c0 10.5 -15.8008 15.8008 -32.1006 22.1006c-18.8994 7.2998 -41.5 20.5 -41.5 46.2002c0 28.8994 24.2002 49.3994 59.9004 49.3994
+c24.2002 0 42.0996 -10.5 55.2002 -20.5l-21 -27.7998c-11.5 8.40039 -22 13.0996 -33.6006 13.0996c-11.5996 0 -17.8994 -4.69922 -17.8994 -12.5996c0 -10.5 14.6992 -14.2002 31 -20.5zM224.9 265.8c44.0996 0 67.2998 -33.0996 66.6992 -75.7002
+c0 -8.39941 -1.09961 -15.6992 -1.59961 -19.3994h-95.2002c4.2002 -24.2002 20.5 -34.2002 41.5 -34.2002c11.6006 0 22.6006 3.2002 34.2002 10l15.7998 -27.7998c-16.2998 -11.1006 -37.2998 -17.9004 -56.2002 -17.9004c-45.0996 0 -79.2998 30.5 -79.2998 82.5
+c-1 50.4004 35.7002 82.5 74.1006 82.5zM194.9 199.6h56.7998c0 17.9004 -7.40039 31 -26.2998 31c-14.7002 0 -27.3008 -10 -30.5 -31z" />
+ <glyph glyph-name="sass" unicode="&#xf41e;" horiz-adv-x="640"
+d="M301.9 69.0996c-0.300781 -0.599609 -0.600586 -1.09961 0 0zM551.1 156.1c57.9004 0.300781 90.6006 -37.0996 89 -71.0996c-1.09961 -26.9004 -25.6992 -37.9004 -30.2998 -38.7002c-3.2998 -0.599609 -5.09961 -0.700195 -5.59961 1.90039
+c-0.299805 1.7998 0.899414 2.7002 4.7998 5.09961c3.90039 2.40039 15.5996 10.4004 17.7002 25c2.09961 14.5 -8.7998 49.2998 -64.5 55.7998c-26 3 -46.4004 -0.599609 -62.1006 -7.19922c2.90039 -7.60059 5.10059 -15.5 5.40039 -23.4004
+c0.799805 -17.5 -11.2998 -30.4004 -23.7998 -39.5996c-7.2998 -5.40039 -15.1006 -8.90039 -21.6006 -11.1006c-5.19922 -2.2002 -12.1992 -4.5 -17.0996 -3.5c-10.9004 2.2002 -16.7002 11.7998 -9.2998 33.1006c4 11.5 15.5 29 34.0996 44.0996
+c-4.2998 8.7002 -9 17.5996 -11.3994 25.7002c-4.80078 16.0996 -6.2002 25.7998 -6.2002 25.7998s-15.2998 -31.7002 -35.1006 -60.5996c-1.09961 -1.7002 -2.2998 -3.40039 -3.39941 -5c3.7998 -9 6.89941 -18.6006 7.2998 -28.2002
+c0.700195 -17.4004 -6.90039 -30.6006 -19.4004 -39.7998c-6.7998 -4.90039 -14 -8.40039 -20.1992 -10.6006c-3.90039 -1.7998 -12 -4.59961 -23.5 -5.39941c-6.30078 -0.5 -12.3008 -0.100586 -15.7002 2.5c-4.60059 3.39941 -5.2002 7.7998 -2.7998 13.6992
+c2 5 17.1992 22.4004 30 37.6006c3.5 4.2002 6.89941 8.5 9.89941 12.5c-0.0996094 0.0996094 -0.0996094 0.200195 -0.0996094 0.200195s2.2998 3 6.09961 8.19922c-4.7002 10.1006 -10.5996 20.5 -13.3994 30c-4.80078 16.1006 -6.2002 25.8008 -6.2002 25.8008
+s-15.5 -39.7002 -31.7002 -71.5c-12.5 -24.6006 -20.7998 -39.5 -24.5996 -46v-0.300781s-0.5 -0.899414 -1.5 -2.39941c-0.5 -0.799805 -0.700195 -1.2002 -0.700195 -1.2002v0.0996094c-4.2002 -6.19922 -13.6006 -18.2998 -23 -18.2998
+c-25.7002 0 -16.2998 52.2002 -16.2998 52.2002s-7.5 -19.2998 -16 -35.9004c-6.90039 -13.5996 -13.1006 -25 -26.9004 -25c-3.90039 0 -10.2002 0.100586 -15.4004 5c-11.7998 11.2002 -20.8994 39.7002 -19.0996 61.7002c1.5 18.7998 4.40039 31.7998 8.40039 42.6006
+c-7.10059 -3.90039 -15.2002 -8.40039 -23.5 -13.3008c-4.30078 -2.5 -8.60059 -5 -12.8008 -7.5c0.100586 -0.299805 0.300781 -0.5 0.400391 -0.799805c10.5996 -20.3994 13.4004 -65.2002 -9.59961 -99.5s-65.8008 -55.2002 -107.601 -43.5996
+c-13.3994 3.7998 -33.7998 31.5996 -16.2998 70.3994c15.5 34.2002 77.4004 66.6006 93.7002 74.7002c1.39941 0.799805 2.89941 1.60059 4.5 2.5c-32.5 28.4004 -113.7 66.7998 -125.101 125.7c-3.19922 16.5996 4.60059 56.2998 53.3008 101.9
+c41 38.2998 98 67.6992 150.699 86.3994c88.5 31.4004 182 12.9004 196.4 -43.5c14.0996 -55.5 -34 -121.8 -95.7998 -145.6c-55 -21.2998 -100.5 -17.9004 -119.2 -11.7998c-21.2998 7 -33.7998 21 -36.7998 28.8994c-1.2002 3.10059 -3.2998 8.2998 0 10.1006
+c2 1.09961 2.7998 0.799805 8.09961 -5.10059c5.10059 -5.59961 25.5 -20.5996 64.2998 -16.2998c101.801 11.4004 163.101 90.5 143.7 133c-13.5 29.7998 -91.8994 43.2002 -189.899 -5.59961c-119.601 -59.6006 -126.101 -108.7 -127.101 -127.4
+c-2.7998 -51.2998 63.2998 -78.2998 99.1006 -116.5c0.5 -0.5 0.899414 -1 1.39941 -1.5c6.7002 3.7002 13.7998 7.59961 20.7002 11.4004c18 9.89941 35.0996 19.1992 43 23.5c12.5996 18.2998 38.2002 38.5 56.5996 38.5c29.5 0 19.4004 -42.4004 19.4004 -42.4004
+s0.599609 2 1.40039 2c0.799805 0 4.09961 5.5 13.1992 2.2002c9.40039 -3.5 7.2002 -10 7.30078 -10.7002c0.0996094 -1.2998 -11 -38.9004 -15.7002 -63.0996c-2.2002 -11.5 -0.900391 -19.9004 -0.299805 -19.9004c0.899414 0 2.7998 2.90039 4.5 6.09961v0.100586
+s1.2998 2.39941 3.5 6.7002c0 0.199219 -0.200195 -0.300781 -0.5 -0.800781c0.199219 0.400391 0.5 0.900391 0.899414 1.7002c2.60059 5 6.2002 12.4004 10.4004 21.6006c8.2002 18.0996 39.5 87.6992 42.0996 95.3994c2.60059 7.7002 4 15.7002 5.2998 19.1006
+c1.30078 3.39941 12.4004 6 25.3008 5.89941c12.8994 -0.0996094 14.1992 -5.59961 14.2998 -6.7002c0.0996094 -1.09961 -6.2002 -16.3994 -7.60059 -27.1992c-1.39941 -10.8008 -0.0996094 -16.2002 1.10059 -25.3008c0.799805 -6 4.5 -13.5 8.89941 -22
+c13.3008 21.8008 36.8008 63.6006 39.1006 75.3008c1.5 8 4 15.6992 5.2998 19.0996s12.4004 6 25.2998 5.90039c12.9004 -0.100586 14.2002 -5.60059 14.2998 -6.7002c0.100586 -1.10059 -6.19922 -16.4004 -7.59961 -27.2002s-0.0996094 -16.2002 1.09961 -25.2998
+c1 -7.7998 7.10059 -18.2002 13 -30.1006c16.2002 8 35.6006 13.4004 58 13.5zM121.8 11.4004c19.4004 21.0996 27.4004 47.8994 19.1006 78.2998c-1 -0.600586 -2 -1.10059 -2.90039 -1.7002c0 0 -0.400391 -0.200195 -1.2002 -0.700195
+c-4.7998 -2.89941 -8.7002 -5.2998 -11.3994 -6.89941c-11.8008 -7.40039 -29.6006 -19.4004 -43.4004 -32.4004c-22.7002 -21.4004 -27.4004 -51 -15.5 -57.9004c11.0996 -6.39941 36.9004 1.2002 55.2998 21.3008zM256.2 102.8c4 9.7998 19.7002 53.2998 16.2002 59.2002
+c-2.60059 4.5 -13.7002 0.900391 -23.8008 -10.4004c-6.2998 -7 -16.8994 -25 -21.8994 -40.0996c-9.90039 -30 -5.60059 -60.5 1.39941 -62.2998c8.2002 -2.10059 21.7002 37.8994 28.1006 53.5996zM367.2 49.7998c7.7998 4.7998 25 16.9004 25.0996 34.7998
+c0 0.600586 -0.0996094 1.10059 -0.0996094 1.60059c-4 -5.2002 -7.7002 -9.90039 -10.9004 -13.9004c-5.5 -6.7998 -19.3994 -21.7002 -19.3994 -21.7002s-2 -1.89941 -1.10059 -2.39941c1.2002 -0.700195 3.7002 0.200195 6.40039 1.59961zM452.8 69.2998
+c9.7002 3.5 25.7998 11.9004 25.9004 34.4004c0 3.39941 -0.700195 7 -1.90039 10.7998c-10.3994 -9.2002 -16.3994 -18.7998 -19 -24.5c-6.7002 -14.5996 -7 -19.2998 -5 -20.7002z" />
+ <glyph glyph-name="vuejs" unicode="&#xf41f;"
+d="M356.9 383.7h91.0996l-224 -383.7l-224 383.7h176l48 -88.6006l56 88.6006h76.9004zM55.7002 351.7l168.3 -288.2l168.2 288.2h-53.7998l-114.4 -198.2l-114.5 198.2h-53.7998z" />
+ <glyph glyph-name="angular" unicode="&#xf420;" horiz-adv-x="415"
+d="M169.7 179.9l38.0996 91.5996l38.1006 -91.5996h-76.2002zM207.8 416l207.8 -74.4004l-31.7998 -275.699l-176 -97.9004l-176 97.9004l-31.7998 275.699zM338 74.2002l-130.2 292.3l-130.1 -292.3h48.7002l26.1992 65.3994h110.601l26.2002 -65.3994h48.5996z" />
+ <glyph glyph-name="aviato" unicode="&#xf421;" horiz-adv-x="640"
+d="M107.2 164.5l-19 41.7998h-52.1006l-19 -41.7998h-17.0996l62.2002 131.4l62.2002 -131.4h-17.2002zM62.2002 262.6l-19.6006 -42.5h39.2002zM174.9 160.2l-62.2002 131.399h17.0996l45.1006 -96l45.0996 96h17zM255.5 164.5v127.1h15.5v-127.1h-15.5zM464.6 280.1
+v-115.6h-17.2998v115.6h-41.2002v11.5h99.6006v-11.5h-41.1006zM640 229.2c0 -9.2002 -1.7002 -17.7998 -5.09961 -25.7998c-3.40039 -8 -8.2002 -15.1006 -14.2002 -21.1006s-13.1006 -10.7998 -21.1006 -14.2002c-8 -3.39941 -16.5996 -5.09961 -25.7998 -5.09961
+s-17.7998 1.7002 -25.7998 5.09961c-8 3.40039 -15.0996 8.2002 -21.0996 14.2002s-10.8008 13 -14.2002 21.1006c-3.40039 8 -5.10059 16.5996 -5.10059 25.7998s1.7002 17.7998 5.10059 25.7998c3.39941 8 8.2002 15.0996 14.2002 21.0996s13 8.40039 21.0996 11.9004
+c8 3.40039 16.5996 5.09961 25.7998 5.09961s17.7998 -1.69922 25.7998 -5.09961s15.1006 -5.7998 21.1006 -11.9004c6 -6 10.7002 -13.0996 14.2002 -21.0996c3.39941 -8 5.09961 -16.5996 5.09961 -25.7998zM624.5 229.2c0 7.2998 -1.2998 14 -3.90039 20.2998
+c-2.59961 6.2998 -6.19922 11.7002 -10.7998 16.2998c-4.59961 4.60059 -10 8.2002 -16.2002 10.9004c-6.19922 2.7002 -12.7998 4 -19.7998 4s-13.5996 -1.2998 -19.7998 -4s-11.5996 -6.2998 -16.2002 -10.9004c-4.59961 -4.59961 -8.2002 -10 -10.7998 -16.2998
+s-3.90039 -13.0996 -3.90039 -20.2998c0 -7.2998 1.30078 -14 3.90039 -20.2998c2.59961 -6.30078 6.2002 -11.7002 10.7998 -16.3008c4.60059 -4.59961 10 -8.19922 16.2002 -10.8994s12.7998 -4 19.7998 -4s13.6006 1.2998 19.7998 4
+c6.2002 2.7002 11.6006 6.2998 16.2002 10.8994c4.60059 4.60059 8.2002 10 10.7998 16.3008c2.60059 6.2998 3.90039 13.0996 3.90039 20.2998zM529.7 132.5c6 -0.900391 10.5 -6 10.7002 -12.2998c0 -6.7998 -5.60059 -12.4004 -12.4004 -12.4004
+s-12.4004 5.60059 -12.4004 12.4004c0 6.2002 4.60059 11.2998 10.5 12.2002v5.7998l-80.2998 -9v-5.40039c5.60059 -1.09961 9.90039 -6.09961 9.90039 -12.0996c0 -6.7998 -5.60059 -10.2002 -12.4004 -10.2002s-12.3994 3.40039 -12.3994 10.2002
+c0 5.89941 4.19922 11 9.89941 12.0996v4.90039l-28.3994 -3.2002v-23.7002h5.89941v-13.7998h-5.89941v6.59961h-5v-6.59961h-5.90039v13.7998h5.90039v23.2002l-38.3008 -4.2998c-8.09961 -11.5 -19 -13.6006 -19 -13.6006l0.100586 -6.69922l5.09961 -0.200195
+l0.100586 -12.1006h-4.10059l-0.0996094 5h-5.2002l-0.0996094 -5h-4.10059l0.100586 12.1006l5.09961 0.200195l0.0996094 6.69922s-10.8994 2.2002 -19 13.6006l-38.2998 4.2998v-23.2002h5.90039v-13.7998h-5.90039v6.59961h-5v-6.59961h-5.89941v13.9004h5.89941
+v23.6992l-28.3994 3.2002v-4.89941c5.59961 -1.10059 9.89941 -6.10059 9.89941 -12.1006c0 -6.7998 -5.59961 -10.2002 -12.3994 -10.2002c-6.80078 0 -12.4004 3.40039 -12.4004 10.2002c0 5.90039 4.2002 11 9.90039 12.1006v5.39941l-80.3008 9v-5.7998
+c5.90039 -0.900391 10.5 -6 10.5 -12.2002c0 -6.7998 -5.59961 -12.3994 -12.3994 -12.3994s-12.4004 5.59961 -12.4004 12.3994c0 6.2002 4.60059 11.2998 10.5 12.2002v6.2998l-88.8994 10l242.899 -13.5c-0.599609 2.2002 -1.09961 4.60059 -1.39941 7.2002
+c-0.300781 2.09961 -0.5 4.2002 -0.600586 6.5l-64.7998 8.09961l64.9004 -1.89941c0 0.399414 0 0.799805 0.0996094 1.09961c2.7998 17.2002 25.5 23.7002 25.5 23.7002l1.09961 26.4004h-23.5996l-19 -41.8008h-17.0996l62.1992 131.4l62.2002 -131.4h-17.0996
+l-19 41.8008h-23.7998l1.09961 -26.3008s22.7002 -6.5 25.5 -23.6992c0 -0.400391 0.0996094 -0.700195 0.0996094 -1.10059l64.9004 1.90039l-64.7998 -8.10059c-0.100586 -2.2998 -0.299805 -4.5 -0.600586 -6.5c-0.299805 -2.59961 -0.799805 -5 -1.39941 -7.19922
+l242.899 13.3994l-88.8994 -10v-6.2998zM328.9 220.1h17.8994l1.7002 40.3008l1.7002 -40.3008h17.8994l-19.5996 42.5z" />
+ <glyph glyph-name="ember" unicode="&#xf423;" horiz-adv-x="640"
+d="M639.9 193.4c1.09961 -10.8008 -5.30078 -14.3008 -5.30078 -14.3008s-26.5996 -19.5996 -47 -13.6992c-20.3994 5.89941 -21.5 43.1992 -21.5 43.1992h-1.89941l-20.7002 -57.1992s-8.2998 -27.9004 -20.7002 -22.8008
+c-12.3994 5.10059 -12.0996 18.6006 -12.0996 18.6006s-19.2998 -21.2998 -54.7998 -18.6006c-31.1006 2.30078 -41.1006 26.7002 -41.1006 26.7002s-20.7998 -14.3994 -79.0996 -25.8994c-26.1006 -2.90039 -44.6006 12.8994 -44.6006 12.8994
+c-2.39941 -2.39941 -18 -10.2002 -18 -10.2002s-22.2998 -10.2998 -30.8994 5.30078c-8.60059 15.5996 -3 63.6992 -3 63.6992h-1.60059s-12.8994 -26.2998 -19.5996 -49.8994c-6.7002 -23.6006 -15 -21.2002 -15 -21.2002s-15.2998 -1.40039 -18.7998 11.4004
+c-3.5 12.8994 5.59961 59.6992 5.59961 59.6992l-1.2998 -0.299805s-0.799805 1.40039 -12.5996 -23.5996c-20.1006 -48.9004 -24.9004 -50 -36.5 -47.9004c-11.6006 2.10059 -12.1006 16.7002 -12.1006 16.7002l-15.8994 -8.7998s-38.6006 -16.6006 -58.8008 -1.2998
+c-13.3994 10.1992 -18 22.1992 -19.5996 29.6992c0 0 -17 1.80078 -28.0996 6.10059c-11.1006 4.2998 0.0996094 18.2998 0.0996094 18.2998s3.5 5.2998 10 0s18.7998 -2.90039 18.7998 -2.90039c1 8.5 2.5 19.7002 7.7998 31.5c11 24.7002 27.6006 33 41.3008 33.3008
+c13.6992 0.199219 23.3994 -3.5 31.6992 -15.3008c18.6006 -45.8994 -49.3994 -69.1992 -49.3994 -69.1992s-1.7998 -12.1006 16.7002 -11.8008c18.5996 0.200195 46.7998 20.4004 46.7998 20.4004c1.2998 15.4004 12.0996 63.5 15 70.7002
+c2.89941 7.2002 14.2002 5.89941 14.2002 5.89941s8.89941 1.90039 10.5 -7.5c1.69922 -9.39941 -6.40039 -47.5996 -6.40039 -47.5996l1.2998 -1.59961c0.799805 3.69922 20.4004 36.5 20.4004 36.5s11.2998 19.5996 28.5 18.7998s-0.799805 -53.5 -0.799805 -53.5
+l1.2998 -1.60059l1.2998 2.40039c2.2002 5.90039 27.7002 44.5996 27.7002 44.5996s9.59961 11.3008 18.5 8.60059c8.7998 -2.60059 9.39941 -6.7002 9.89941 -14.2002s-7 -52.0996 -7 -52.0996s-4.2998 -29.2002 5.40039 -28.7002s20.2002 10.7002 20.2002 10.7002
+s7.5 57.5996 12.5996 105.1c5.10059 47.5 27.1006 79.5 27.1006 79.5s6.5 10 23.5 16.7002c11.1992 4 23.3994 1.2998 29.1992 -23.1006c9.5 -41 -23.2998 -87.8994 -36.8994 -105.199c5.89941 5.7998 15.7998 12.0996 27.2002 5.2998
+c40.2998 -25.2998 7.2998 -80.9004 7.2998 -80.9004c11.7998 3.7998 33 18 33 18s0.5 6.10059 0.700195 7.5c7.19922 41.2998 32 56.2002 36.5996 59.7002c4.7998 3.59961 47.0996 19.7998 49 -24s-52.9004 -59.0996 -52.9004 -59.0996s4.80078 -12.6006 25 -9.40039
+c20.2002 3.2002 43.3008 22.7998 43.3008 22.7998c0.799805 18 12.5996 61 15 67.2002c2.39941 6.2002 17.1992 6.5 18.7998 3c2.2002 -7 0.299805 -37.5996 0.299805 -37.5996l1.59961 0.5c5.90039 17.5 18.3008 31.1992 18.3008 31.1992s9.89941 9.7002 18 7.30078
+c8.09961 -2.30078 5.09961 -30.4004 5.09961 -30.4004s-4.2998 -30.7002 9.40039 -32c13.6992 -1.40039 29.2998 10.7002 29.2998 10.7002s9.59961 3.89941 10.7002 -6.7998zM61.9004 188.1c0 0 6.19922 -1.89941 19.8994 7.60059
+c13.7002 9.39941 16.4004 24.3994 9.10059 31.3994c-7.2002 6.90039 -28.2002 -7 -29 -39zM334.7 311.9c0 0 -15.9004 -54.5 -16.4004 -70.7002c0 0 44.5 72 40 96.2002c-4.5 24.1992 -23.5996 -25.5 -23.5996 -25.5zM357.5 173.5
+c12.5996 33.0996 -3.59961 45.5 -3.59961 45.5s-23.4004 12.9004 -33.3008 -20.2002c-9.89941 -33.0996 -6.39941 -44.8994 -6.39941 -44.8994s30.7002 -13.4004 43.2998 19.5996zM442.1 188.1c0 0 15.7002 -1.09961 26.4004 14.2002s1.2998 25.5 1.2998 25.5
+s-8.59961 11.1006 -19.5996 -9.09961c-11.1006 -20.1006 -8.10059 -30.6006 -8.10059 -30.6006z" />
+ <glyph glyph-name="font-awesome-flag" unicode="&#xf425;" horiz-adv-x="447"
+d="M444.373 88.5762c0 -7.16797 -6.14453 -10.2402 -13.3125 -13.3125c-28.6719 -12.2881 -59.3916 -23.5518 -92.1592 -23.5518c-46.0801 0 -67.584 28.6719 -122.88 28.6719c-39.9365 0 -81.9209 -14.3359 -115.713 -29.6953
+c-2.04785 -1.02441 -4.0957 -1.02441 -6.14355 -2.04883v-77.8232c0 -21.4053 -16.1221 -34.8164 -33.792 -34.8164c-19.4561 0 -34.8164 15.3604 -34.8164 34.8164v374.783c-13.3115 10.2402 -22.5273 26.624 -22.5273 45.0566c0 31.7441 25.5996 57.3438 57.3438 57.3438
+s57.3438 -25.5996 57.3438 -57.3438c0 -18.4326 -8.19141 -34.8164 -22.5273 -45.0566v-31.7432c4.12402 1.37402 58.7676 28.6719 114.688 28.6719c65.2705 0 97.6758 -27.6484 126.976 -27.6484c38.9121 0 81.9209 27.6484 92.1602 27.6484
+c8.19238 0 15.3604 -6.14453 15.3604 -13.3125v-240.64z" />
+ <glyph glyph-name="gitter" unicode="&#xf426;" horiz-adv-x="384"
+d="M66.4004 125.5h-50.4004v322.5h50.4004v-322.5zM166.9 371.9v-435.9h-50.4004v435.9h50.4004zM267.5 371.9v-435.9h-50.4004v435.9h50.4004zM368 372v-247h-50.4004v247h50.4004z" />
+ <glyph glyph-name="hooli" unicode="&#xf427;" horiz-adv-x="640"
+d="M144.5 96v16c12.2998 -6.59961 25.0996 -12.2002 38.2998 -16.7998zM202.2 101.3c29.5 -10.7002 55.3994 -13.5 75.2998 -13.2998c-24.7998 -7 -58.2002 -5.2998 -94.7002 7.2002l19.4004 0.799805v5.2998zM611.1 216.5c-16 0 -28.8994 13 -28.8994 28.9004
+c0 15.8994 13 24.5 28.8994 24.5c16 0 28.9004 -8.5 28.9004 -24.5s-13 -28.9004 -28.9004 -28.9004zM582.1 96v110.5h57.9004v-110.5h-57.9004zM508.4 96v168l57.8994 27.2998v-195.3h-57.8994zM477.4 215.4c18.0996 -18.1006 16.6992 -33.8008 16.7998 -52.6006
+c0 -18.7002 1.39941 -34.2998 -16.7998 -52.5c-18.1006 -18.2002 -50.4004 -17.0996 -50.4004 -17.0996s-32.2002 -1.10059 -50.4004 17.0996c-18.1992 18.2002 -16.7998 33.7998 -16.7998 52.5s-1.39941 34.4004 16.7998 52.6006
+c18.1006 18.1992 50.4004 17.0996 50.4004 17.0996s32.2002 1.09961 50.4004 -17.0996zM437.6 143.5v40.4004c0 8.7998 -7.2998 10.8994 -10.6992 10.8994c-3.40039 0 -10.7002 -2.2002 -10.7002 -10.8994v-40.4004c0 -3.59961 1.7998 -12.5 10.7002 -12.5
+c8.89941 0 10.6992 8.90039 10.6992 12.5zM331.4 215.4c18.1992 -18.1006 16.6992 -33.8008 16.6992 -52.3008c0 -18.6992 1.5 -34.2998 -16.6992 -52.5c-18.1006 -18.1992 -50.4004 -17.0996 -50.4004 -17.0996s-32.2002 -1.09961 -50.4004 17.0996
+c-18.1992 18.2002 -16.7998 33.8008 -16.7998 52.5c0 15.6006 -0.899414 29.1006 9.2998 43.7002c-16 11.7998 -58 37.4004 -99.8994 58.2998v-54.2998c8 13.7002 22.7002 22 38.5 21.9004c27.2002 0 40.5996 -18.7002 40.5996 -37.4004v-93.8994
+c-20.3994 7.5 -39.7002 17.3994 -57.7002 29.5996v48.7002c0 8.09961 -1.5 15 -10.5996 15s-10.7998 -11.2998 -10.7998 -18.2002v-29.7998l-4.5 3.59961c-22.9004 18.9004 -40.2998 35.6006 -53.4004 50.2998v-31c11 -9.7998 23.6006 -20.1992 38.4004 -31.3994
+c6.39941 -4.90039 12.8994 -9.40039 19.3994 -13.6006v-28.5996h-57.8994v73.7002c-86.7002 78 -61.7998 110.8 -61.7998 110.8c8.2998 18.2998 42.8994 22.2002 97.2998 0.0996094l22.5 10.6006v-20.7002c29.5996 -14.5996 63.8994 -31.5 102.1 -61.0996
+c1.60059 2.09961 3.40039 4.09961 5.2998 6c18.2002 18.1992 50.4004 17.0996 50.4004 17.0996s32.2002 1.09961 50.4004 -17.0996zM65.2002 264l29.2002 13.7002c-26.9004 10.0996 -50.9004 13.5 -64.4004 2.09961c-3.7002 -3.09961 -13.5 -24.5996 35.2002 -79.0996
+v63.2998zM291.7 143.5v40.4004c0 8.7998 -7.2998 10.8994 -10.7002 10.8994s-10.7002 -2.2002 -10.7002 -10.8994v-40.4004c0 -3.59961 1.7998 -12.5 10.7002 -12.5s10.7002 8.90039 10.7002 12.5z" />
+ <glyph glyph-name="strava" unicode="&#xf428;" horiz-adv-x="369"
+d="M151.4 448l150.199 -292h-88.5l-61.6992 116.1l-62.2002 -116.1h-89.2002zM301.6 156h67.6006l-111.5 -220l-112.2 220h67.5996l44.6006 -88.2002z" />
+ <glyph glyph-name="stripe" unicode="&#xf429;" horiz-adv-x="640"
+d="M165 303.3l0.0996094 -38.5h33.7002v-37.7998h-33.7002v-63.2002c0 -26.2002 28 -18 33.7002 -15.7002v-33.7998c-5.89941 -3.2002 -16.5996 -5.89941 -31.2002 -5.89941c-26.2998 0 -46.0996 17 -46.0996 43.2998l0.200195 142.399zM254.1 251.7
+c10.4004 19.0996 31.1006 15.2002 37.1006 13.0996v-40.7998c-5.7002 1.7998 -23.4004 4.5 -33.9004 -9.2998v-103.101h-44.2998v153.2h38.4004zM346.4 324v-36.2002l-44.6006 -9.5v36.2002zM44.9004 219.7c0 -20 67.8994 -10.5 67.8994 -63.4004
+c0 -32 -25.3994 -47.7998 -62.2998 -47.7998c-15.2998 0 -32 3 -48.5 10.0996v40c14.9004 -8.09961 33.9004 -14.1992 48.5996 -14.1992c9.90039 0 17 2.69922 17 10.8994c0 21.2002 -67.5 13.2002 -67.5 62.4004c0 31.3994 24 50.2002 60 50.2002
+c14.7002 0 29.4004 -2.30078 44.1006 -8.10059v-41.7998c-13.5 7.2998 -30.7002 11.4004 -44.2002 11.4004c-9.2998 -0.100586 -15.0996 -2.80078 -15.0996 -9.7002zM640 186.4c0 -4.30078 -0.400391 -13.6006 -0.599609 -15.9004h-86.9004
+c2 -20.7998 17.2002 -26.9004 34.5 -26.9004c17.5996 0 31.5 3.7002 43.5996 9.80078v-33.4004c-12.0996 -6.7002 -28 -11.5 -49.1992 -11.5c-43.2002 0 -73.5 24.7002 -73.5 78.2002c0 45.2002 25.6992 81.0996 67.8994 81.0996s64.2002 -35.8994 64.2002 -81.3994z
+M552.1 203.2h45.9004c0 20 -11.5996 28.3994 -22.5 28.3994c-11.0996 0 -23.4004 -8.39941 -23.4004 -28.3994zM439.2 267.8c31.2002 0 60.5996 -28.0996 60.5 -79.7002c0 -56.3994 -29 -79.5996 -60.7998 -79.5996c-15.5 0 -25 6.5 -31.4004 11.2002l-0.0996094 -50.2002
+l-44.4004 -9.40039v204.801h39.0996l2.30078 -11c6.19922 5.69922 17.3994 13.8994 34.7998 13.8994zM428.6 145.3c16.5 0 27.5 17.9004 27.4004 41.7998c0 23.2002 -11.2002 41.4004 -27.4004 41.4004c-10.1992 0 -16.5996 -3.7002 -21.1992 -8.7998l0.299805 -66
+c4.2998 -4.60059 10.5 -8.40039 20.8994 -8.40039zM301.9 111.6v153.2h44.5996v-153.2h-44.5996z" />
+ <glyph glyph-name="stripe-s" unicode="&#xf42a;" horiz-adv-x="362"
+d="M144.3 293.4c0 -64.2002 218 -33.7002 218 -203.9c0 -102.6 -81.7002 -153.6 -200.3 -153.6c-49.2002 0 -103 9.59961 -156 32.3994v128.5c47.9004 -26 108.9 -45.5 156.1 -45.5c31.8008 0 54.7002 8.5 54.7002 34.9004c0 68.0996 -216.8 42.5 -216.8 200.399
+c0 101 77.0996 161.4 192.8 161.4c47.2998 0 94.5 -7.2002 141.8 -26.0996v-134.301c-43.3994 23.4004 -98.5 36.7002 -141.899 36.7002c-29.7998 0 -48.4004 -8.59961 -48.4004 -30.8994z" />
+ <glyph glyph-name="typo3" unicode="&#xf42b;" horiz-adv-x="433"
+d="M171.7 369.6c0 -66.3994 83.3994 -264.899 140.6 -264.899c6.90039 0 11.5 0 18.5 2.2998c-49.3994 -79.5 -110.399 -139 -146.7 -139c-77.2998 0 -184.1 234 -184.1 337.5c0 16.2998 3.90039 29.4004 9.2998 37.0996c27 32.4004 106.8 57.9004 176.3 66.4004
+c-8.5 -7 -13.8994 -14.7002 -13.8994 -39.4004zM294.5 416c71.7998 0 138.8 -11.5996 138.8 -52.5c0 -82.5996 -52.5 -182.3 -78.7998 -182.3c-47.9004 0 -101.7 132.1 -101.7 198.5c0 30.8994 11.6006 36.2998 41.7002 36.2998z" />
+ <glyph glyph-name="amazon-pay" unicode="&#xf42c;" horiz-adv-x="611"
+d="M0 122.8c2.2998 4.2002 5.2002 4.90039 9.7002 2.5c10.3994 -5.59961 20.5996 -11.3994 31.2002 -16.7002c40.6992 -20.3994 83.1992 -35.5996 127.399 -46.2998c20.9004 -5 41.9004 -9 63.2002 -11.7998c31.5 -4.2002 63.2002 -6 95 -5.2002
+c17.4004 0.400391 34.7998 1.7998 52.0996 3.7998c56.4004 6.7002 110.9 20.8008 163.301 42.8008c2.89941 1.19922 5.89941 2 9.09961 1.19922c6.7002 -1.7998 9 -9 4.09961 -13.8994c-2.7998 -2.7998 -6.2998 -5.10059 -9.59961 -7.40039
+c-30.7002 -21.0996 -64.2002 -36.3994 -99.5996 -47.8994c-24.6006 -7.90039 -49.6006 -13.8008 -75.1006 -17.6006c-17.5996 -2.59961 -35.3994 -4.39941 -53.2002 -4.7998c-0.799805 0 -1.69922 -0.200195 -2.5 -0.299805h-21.0996
+c-0.799805 0.0996094 -1.7002 0.299805 -2.5 0.299805c-3.59961 0.200195 -7.2002 0.299805 -10.7002 0.400391c-16.8994 0.699219 -33.7002 2.59961 -50.3994 5.2998c-27.4004 4.5 -54.2002 11.3994 -80.4004 20.8994c-54.0996 19.6006 -102.6 48.6006 -145.6 87
+c-1.80078 1.60059 -3 3.80078 -4.40039 5.7002v2zM158 383c2.7998 0 5.5 0 8.2998 -0.0996094c3.2998 -0.5 6.60059 -0.800781 9.7998 -1.5c21.3008 -4.40039 35.4004 -17.3008 43.9004 -36.9004c6.90039 -15.9004 8.59961 -32.7002 8.09961 -49.7998
+c-0.399414 -15.4004 -3.2998 -30.2002 -10.2998 -44.1006c-9.2002 -18.3994 -23.3994 -30.8994 -43.7998 -34.8994c-22.5 -4.40039 -43.0996 0.5 -61 15.3994c-0.5 0.5 -1.09961 1 -2.2002 1.90039v-72.4004c0 -1 0 -2 -0.0996094 -3c-0.299805 -3 -2.10059 -5 -5 -5
+c-7 -0.0996094 -14.1006 -0.0996094 -21.1006 0c-2.89941 0.100586 -4.69922 2 -4.89941 5c-0.100586 1 -0.100586 2 -0.100586 3v209.301c0 6.89941 1.30078 8.19922 8.2002 8.19922h11.5c4.60059 0 6.90039 -2 7.60059 -6.59961c0.5 -2.7002 0.899414 -5.5 1.2998 -8.2002
+c0 -0.399414 0.200195 -0.899414 0.399414 -1.39941c2.5 1.89941 4.7002 3.69922 7.10059 5.39941c11.3994 8.2002 24 13.4004 38 15.1006c1.39941 0.199219 2.89941 0.399414 4.2998 0.599609zM110.6 341.1c0.100586 -14.0996 0 -28.0996 0 -42.0996
+c0 -14.0996 0.100586 -28.0996 0 -42.2002c0 -1.59961 0.5 -2.39941 1.7002 -3.2998c11.2002 -7.90039 23.4004 -13.2998 37.4004 -13.9004c20.2002 -0.899414 35.7998 7.2002 42.5996 28.5c3.2002 10 4 20.2002 4 30.6006c0 11.2002 -1 22.2998 -4.89941 33
+c-6.40039 17.5 -18.6006 24.7998 -33.5 25.8994c-16.8008 1.30078 -31.9004 -3.69922 -45.6006 -13.1992c-1.2998 -0.900391 -1.7002 -1.80078 -1.7002 -3.30078zM316.3 383c4 0 8 0 11.9004 0.0996094c3.59961 -0.5 7.2002 -0.799805 10.7998 -1.2998
+c7.7002 -1.09961 15.0996 -3.09961 21.7998 -7.09961c11.6006 -6.90039 17.1006 -17.5 19 -30.4004c0.700195 -4.2998 0.900391 -8.59961 0.900391 -12.8994v-106c0 -0.900391 0 -1.90039 -0.100586 -2.80078c-0.299805 -2.7998 -2.09961 -4.59961 -4.7998 -4.69922
+c-5.39941 -0.100586 -10.8994 -0.100586 -16.2998 0c-2.90039 0.0996094 -4.7998 2.09961 -5.40039 5.19922c-0.699219 3.60059 -1.19922 7.2002 -1.7998 11c-0.700195 -0.399414 -1.2002 -0.699219 -1.59961 -1.09961
+c-11.7998 -9.7002 -25.2002 -16.0996 -40.2998 -18.4004c-13.1006 -2 -26 -1.19922 -37.9004 5.40039c-12.4004 6.90039 -19.4004 17.7002 -21.4004 31.7002c-1.5 10.5 -0.799805 20.8994 3.90039 30.7002c6.09961 12.5996 16.5 20.3994 29.4004 24.8994
+c10.7998 3.7998 22 4.5 33.2998 3.90039c10.8994 -0.600586 21.5 -2.40039 32 -4.90039c0.399414 -0.0996094 0.799805 0 1.2998 -0.0996094c0.0996094 0.5 0.200195 1 0.200195 1.39941c-0.100586 8.30078 0 16.6006 -0.299805 24.9004
+c-0.200195 5.90039 -1.60059 11.5996 -5.30078 16.4004c-4.19922 5.5 -10.2998 7.39941 -16.7998 8.39941c-12.5 1.90039 -24.8994 0.900391 -37.2002 -1.39941c-7.89941 -1.5 -15.6992 -3.7002 -23.5 -5.7002c-4.69922 -1.2002 -6.69922 0.0996094 -6.7998 4.89941
+c-0.0996094 3.30078 0.100586 6.60059 0 9.90039c-0.0996094 3.90039 1.7002 6.5 5.2998 7.7002c5.90039 2 11.8008 4.2002 17.9004 5.7998c9.5 2.40039 19.0996 3.59961 28.9004 4.09961c0.899414 0.100586 1.89941 0.300781 2.89941 0.400391zM351.3 255.3
+c-0.0996094 4.7002 0 9.40039 0.100586 14.1006c0 4.7998 -0.100586 9.5 0 14.2998c0 1.59961 -0.5 2.39941 -2.10059 2.59961c-8.39941 1.10059 -16.5996 2.7002 -25 3.40039c-8.2998 0.799805 -16.7002 0.399414 -24.7998 -2.2002
+c-8 -2.59961 -13.9004 -7.2998 -16.4004 -15.5996c-1.89941 -6.40039 -1.89941 -13 0.100586 -19.4004s6.09961 -10.7998 12.3994 -13.2998c5.40039 -2.2002 11.1006 -2.40039 16.8008 -1.7998c13.8994 1.39941 26.1992 6.7998 37.3994 14.8994
+c1.10059 0.799805 1.5 1.7002 1.5 3zM611.2 125.9v-17.3008c-0.700195 -3.59961 -1.2998 -7.2998 -2.10059 -10.8994c-4.39941 -20.2998 -11.8994 -39.2002 -24.6992 -55.6006c-3.80078 -4.7998 -8.30078 -9.09961 -12.7002 -13.3994
+c-1.2002 -1.2002 -3.10059 -2.10059 -4.7998 -2.5c-2.90039 -0.700195 -4.60059 1.2002 -4.10059 4.09961c0.200195 1 0.600586 2 1 3c5.7998 14.7998 11.7002 29.7002 15.7998 45.1006c2.10059 7.59961 3.90039 15.2998 3.5 23.2998
+c-0.199219 5.2002 -2.5 9 -7.59961 10.3994c-4.7002 1.30078 -9.59961 2.40039 -14.4004 2.7002c-11.3994 0.900391 -22.8994 0.200195 -34.2998 -0.899414c-7.7998 -0.800781 -15.5 -1.7002 -23.2998 -2.5c-0.700195 -0.100586 -1.5 -0.100586 -2.2002 -0.100586
+c-1.5 -0.0996094 -3.2002 0.299805 -3.59961 1.7998c-0.400391 1.2002 -0.200195 2.90039 0.5 4c0.899414 1.40039 2.2998 2.5 3.7002 3.5c12.0996 8.30078 25.6992 12.9004 40 15.6006c13 2.39941 26.0996 3 39.1992 1.89941
+c7.10059 -0.599609 14.2002 -1.59961 21 -3.89941c4.30078 -1.40039 8.10059 -3.2998 9.10059 -8.2998zM479.1 249.1c0.300781 -0.699219 0.5 -1.2998 0.900391 -2.39941c2.59961 7.7002 5.2002 15 7.7002 22.2002c11.5996 33.3994 23.2002 66.6992 34.7998 100
+c0.5 1.39941 1.09961 2.69922 1.59961 4.09961c1.40039 3.59961 4 5.2002 7.7002 5.2002c6.60059 0 13.2998 0.0996094 19.9004 0c2.7998 0 4.09961 -1.60059 3.7002 -4.40039c-0.300781 -1.89941 -0.900391 -3.7002 -1.60059 -5.5
+c-23.5 -59.8994 -46.8994 -119.8 -70.5996 -179.6c-2.5 -6.2998 -5.7002 -12.5 -9.2002 -18.4004c-8.7998 -14.8994 -22.4004 -21.7998 -39.5 -21.3994c-5.59961 0.0996094 -11.2998 1 -16.9004 2c-5.39941 0.899414 -7.2998 3.39941 -7.39941 8.89941
+c-0.100586 3.2998 -0.100586 6.60059 0 9.90039c0.0996094 3.5 1.7998 5 5.2002 4.7998c2.5 -0.200195 5 -0.799805 7.5 -1c5.19922 -0.5 10.2998 -0.299805 15.3994 1.09961c7.2002 1.90039 12.2002 6.80078 15.2002 13.3008c3.40039 7.2998 6 15 9.2998 22.2998
+c1.90039 4.2002 1.5 7.7002 -0.200195 11.7998c-19.7998 48.4004 -39.5 97 -59.0996 145.5c-0.799805 2 -1.40039 4 -1.90039 6.09961c-0.5 2.5 0.700195 4.5 3.2002 4.5c7.7002 0.100586 15.2998 0 22.9004 -0.0996094c3.2002 0 5.2998 -1.90039 6.39941 -4.7998
+c2.10059 -5.60059 4.30078 -11.2002 6.30078 -16.9004c12.8994 -35.7002 25.7998 -71.5 38.6992 -107.2z" />
+ <glyph glyph-name="cc-amazon-pay" unicode="&#xf42d;" horiz-adv-x="576"
+d="M124.7 246.2c0.0996094 11.7998 0 23.5 0 35.2998v35.2998c0 1.2998 0.399414 2 1.39941 2.7002c11.5 8 24.1006 12.0996 38.2002 11.0996c12.5 -0.899414 22.7002 -7 28.1006 -21.6992c3.2998 -8.90039 4.09961 -18.2002 4.09961 -27.7002
+c0 -8.7002 -0.700195 -17.2998 -3.40039 -25.6006c-5.69922 -17.7998 -18.6992 -24.6992 -35.6992 -23.8994c-11.7002 0.5 -21.9004 5 -31.4004 11.7002c-0.900391 0.799805 -1.40039 1.59961 -1.2998 2.7998zM279.6 231.6c-5.19922 2 -8.7998 5.7002 -10.3994 11.2002
+c-1.7002 5.40039 -1.7002 10.7998 -0.100586 16.2002c2 6.90039 7 10.9004 13.7002 13.0996c6.7998 2.2002 13.7998 2.5 20.7998 1.90039c7 -0.700195 13.9004 -2 20.9004 -2.90039c1.40039 -0.199219 1.7998 -0.799805 1.7998 -2.19922c-0.0996094 -4 0 -8 0 -12
+c0 -3.90039 -0.0996094 -7.90039 0 -11.8008c0 -1.19922 -0.399414 -1.89941 -1.2998 -2.5c-9.40039 -6.7998 -19.7002 -11.2998 -31.2998 -12.5c-4.7998 -0.5 -9.5 -0.299805 -14.1006 1.5zM576 368v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48v352
+c0 26.5 21.5 48 48 48h480c26.5 0 48 -21.5 48 -48zM368.5 344.1c0.400391 -1.69922 0.900391 -3.39941 1.59961 -5.09961c16.5 -40.5996 32.9004 -81.2998 49.5 -121.9c1.40039 -3.5 1.7002 -6.39941 0.200195 -9.89941
+c-2.7998 -6.2002 -4.89941 -12.6006 -7.7998 -18.7002c-2.59961 -5.5 -6.7002 -9.5 -12.7002 -11.2002c-4.2002 -1.09961 -8.5 -1.2998 -12.8994 -0.899414c-2.10059 0.199219 -4.2002 0.699219 -6.30078 0.799805c-2.7998 0.200195 -4.19922 -1.10059 -4.2998 -4
+c-0.0996094 -2.7998 -0.0996094 -5.60059 0 -8.2998c0.100586 -4.60059 1.60059 -6.7002 6.2002 -7.5c4.7002 -0.800781 9.40039 -1.60059 14.2002 -1.7002c14.2998 -0.299805 25.7002 5.39941 33.0996 17.8994c2.90039 4.90039 5.60059 10.1006 7.7002 15.4004
+c19.7998 50.0996 39.5 100.3 59.2002 150.5c0.599609 1.5 1.09961 3 1.2998 4.59961c0.400391 2.40039 -0.700195 3.60059 -3.09961 3.7002c-5.60059 0.100586 -11.1006 0 -16.7002 0c-3.10059 0 -5.2998 -1.39941 -6.40039 -4.2998
+c-0.399414 -1.09961 -0.899414 -2.2998 -1.2998 -3.40039l-29.0996 -83.6992c-2.10059 -6.10059 -4.2002 -12.1006 -6.5 -18.6006c-0.400391 0.900391 -0.600586 1.40039 -0.800781 1.90039c-10.7998 29.8994 -21.5996 59.8994 -32.3994 89.7998
+c-1.7002 4.7002 -3.5 9.5 -5.2998 14.2002c-0.900391 2.5 -2.7002 4 -5.40039 4c-6.40039 0.0996094 -12.7998 0.200195 -19.2002 0.0996094c-2.2002 0 -3.2998 -1.59961 -2.7998 -3.7002zM242.4 242c1.69922 -11.7002 7.59961 -20.7998 18 -26.5996
+c9.89941 -5.5 20.6992 -6.2002 31.6992 -4.60059c12.7002 1.90039 23.9004 7.2998 33.8008 15.5c0.399414 0.299805 0.799805 0.600586 1.39941 1c0.5 -3.2002 0.900391 -6.2002 1.5 -9.2002c0.5 -2.59961 2.10059 -4.2998 4.5 -4.39941
+c4.60059 -0.100586 9.10059 -0.100586 13.7002 0c2.2998 0.0996094 3.7998 1.59961 4 3.89941c0.0996094 0.800781 0.0996094 1.60059 0.0996094 2.30078v88.7998c0 3.59961 -0.199219 7.2002 -0.699219 10.7998c-1.60059 10.7998 -6.2002 19.7002 -15.9004 25.4004
+c-5.59961 3.2998 -11.7998 5 -18.2002 5.89941c-3 0.400391 -6 0.700195 -9.09961 1.10059h-10c-0.799805 -0.100586 -1.60059 -0.300781 -2.5 -0.300781c-8.2002 -0.399414 -16.2998 -1.39941 -24.2002 -3.5c-5.09961 -1.2998 -10 -3.19922 -15 -4.89941
+c-3 -1 -4.5 -3.2002 -4.40039 -6.5c0.100586 -2.7998 -0.0996094 -5.60059 0 -8.2998c0.100586 -4.10059 1.80078 -5.2002 5.7002 -4.10059c6.5 1.7002 13.1006 3.5 19.7002 4.7998c10.2998 1.90039 20.7002 2.7002 31.0996 1.2002
+c5.40039 -0.799805 10.5 -2.39941 14.1006 -7c3.09961 -4 4.2002 -8.7998 4.39941 -13.7002c0.300781 -6.89941 0.200195 -13.8994 0.300781 -20.7998c0 -0.399414 -0.100586 -0.700195 -0.200195 -1.2002c-0.400391 0 -0.799805 0 -1.10059 0.100586
+c-8.7998 2.09961 -17.6992 3.59961 -26.7998 4.09961c-9.5 0.5 -18.8994 -0.0996094 -27.8994 -3.2002c-10.8008 -3.7998 -19.5 -10.2998 -24.6006 -20.7998c-4.09961 -8.2998 -4.59961 -17 -3.39941 -25.7998zM98.7002 341.1v-175.3c0 -0.799805 0 -1.7002 0.0996094 -2.5
+c0.200195 -2.5 1.7002 -4.09961 4.10059 -4.2002c5.89941 -0.0996094 11.7998 -0.0996094 17.6992 0c2.5 0 4 1.7002 4.10059 4.10059c0.0996094 0.799805 0.0996094 1.7002 0.0996094 2.5v60.7002c0.900391 -0.700195 1.40039 -1.2002 1.90039 -1.60059
+c15 -12.5 32.2002 -16.5996 51.0996 -12.8994c17.1006 3.39941 28.9004 13.8994 36.7002 29.1992c5.7998 11.6006 8.2998 24.1006 8.7002 37c0.5 14.3008 -1 28.4004 -6.7998 41.7002c-7.10059 16.4004 -18.9004 27.2998 -36.7002 30.9004
+c-2.7002 0.599609 -5.5 0.799805 -8.2002 1.2002h-7c-1.2002 -0.200195 -2.40039 -0.300781 -3.59961 -0.5c-11.7002 -1.40039 -22.3008 -5.80078 -31.8008 -12.7002c-2 -1.40039 -3.89941 -3 -5.89941 -4.5c-0.100586 0.5 -0.299805 0.799805 -0.400391 1.2002
+c-0.399414 2.2998 -0.700195 4.59961 -1.09961 6.89941c-0.600586 3.90039 -2.5 5.5 -6.40039 5.60059h-9.7002c-5.89941 0.0996094 -6.89941 -1 -6.89941 -6.80078zM493.6 109c-2.69922 0.700195 -5.09961 0 -7.59961 -1c-43.9004 -18.4004 -89.5 -30.2002 -136.8 -35.7998
+c-14.5 -1.7002 -29.1006 -2.7998 -43.7002 -3.2002c-26.5996 -0.700195 -53.2002 0.799805 -79.5996 4.2998c-17.8008 2.40039 -35.5 5.7002 -53 9.90039c-37 8.89941 -72.7002 21.7002 -106.7 38.7998c-8.7998 4.40039 -17.4004 9.2998 -26.1006 14
+c-3.7998 2.09961 -6.19922 1.5 -8.19922 -2.09961v-1.7002c1.19922 -1.60059 2.19922 -3.40039 3.69922 -4.7998c36 -32.2002 76.6006 -56.5 122 -72.9004c21.9004 -7.90039 44.4004 -13.7002 67.3008 -17.5c14 -2.2998 28 -3.7998 42.1992 -4.5
+c3 -0.0996094 6 -0.200195 9 -0.400391c0.700195 0 1.40039 -0.199219 2.10059 -0.299805h17.7002c0.699219 0.100586 1.39941 0.299805 2.09961 0.299805c14.9004 0.400391 29.7998 1.80078 44.5996 4c21.4004 3.2002 42.4004 8.10059 62.9004 14.7002
+c29.5996 9.60059 57.7002 22.4004 83.4004 40.1006c2.7998 1.89941 5.69922 3.7998 8 6.19922c4.2998 4.40039 2.2998 10.4004 -3.30078 11.9004zM544 136.7c-0.799805 4.2002 -4 5.7998 -7.59961 7c-5.7002 1.89941 -11.6006 2.7998 -17.6006 3.2998
+c-11 0.900391 -22 0.400391 -32.7998 -1.59961c-12 -2.2002 -23.4004 -6.10059 -33.5 -13.1006c-1.2002 -0.799805 -2.40039 -1.7998 -3.09961 -3c-0.600586 -0.899414 -0.700195 -2.2998 -0.5 -3.39941c0.299805 -1.30078 1.69922 -1.60059 3 -1.5
+c0.599609 0 1.19922 0 1.7998 0.0996094l19.5 2.09961c9.59961 0.900391 19.2002 1.5 28.7998 0.800781c4.09961 -0.300781 8.09961 -1.2002 12 -2.2002c4.2998 -1.10059 6.2002 -4.40039 6.40039 -8.7002c0.299805 -6.7002 -1.2002 -13.0996 -2.90039 -19.5
+c-3.5 -12.9004 -8.2998 -25.4004 -13.2998 -37.7998c-0.299805 -0.799805 -0.700195 -1.7002 -0.799805 -2.5c-0.400391 -2.5 1 -4 3.39941 -3.5c1.40039 0.299805 3 1.09961 4 2.09961c3.7002 3.60059 7.5 7.2002 10.6006 11.2002
+c10.6992 13.7998 17 29.5996 20.6992 46.5996c0.700195 3 1.2002 6.10059 1.7002 9.10059c0.200195 4.7002 0.200195 9.59961 0.200195 14.5z" />
+ <glyph glyph-name="ethereum" unicode="&#xf42e;" horiz-adv-x="320"
+d="M311.9 187.2l-151.9 -92.7998l-152 92.7998l152 260.8zM160 64.5996l152 92.8008l-152 -221.4l-152 221.4z" />
+ <glyph glyph-name="korvue" unicode="&#xf42f;" horiz-adv-x="446"
+d="M386.5 414c32.7002 0 59.5 -26.7998 59.5996 -59.5v-327c0 -32.7002 -26.5 -59.5 -59.5 -59.5h-327.1c-32.7002 0 -59.5 26.7998 -59.5 59.4004v327.1c0 32.7002 26.7998 59.5 59.5 59.5h327zM87.0996 327.2v-132h187.5l81.2002 132h-110.899l-61.8008 -116v116h-96z
+M248.9 55.0996h118.399l-88.5996 130.801h-191.5v-130.801h96v113.601z" />
+ <glyph glyph-name="elementor" unicode="&#xf430;"
+d="M425.6 416c12.4004 0 22.4004 -10 22.4004 -22.4004v-403.199c0 -12.4004 -10 -22.4004 -22.4004 -22.4004h-403.199c-12.4004 0 -22.4004 10 -22.4004 22.4004v403.199c0 12.4004 10 22.4004 22.4004 22.4004h403.199zM164.3 92.5v199h-39.7998v-199h39.7998z
+M323.6 92.5v39.7998h-119.5v-39.7998h119.5zM323.6 172.1v39.8008h-119.5v-39.8008h119.5zM323.6 251.8v39.7998h-119.5v-39.7998h119.5z" />
+ <glyph glyph-name="youtube-square" unicode="&#xf431;"
+d="M186.8 245.9l95.2002 -54.1006l-95.2002 -54.0996v108.2zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM406 191.7c0 0 0 59.5996 -7.59961 88.2002
+c-4.2002 15.7998 -16.5 28.1992 -32.2002 32.3994c-28.2998 7.7002 -142.2 7.7002 -142.2 7.7002s-113.9 0 -142.2 -7.7002c-15.7002 -4.2002 -28 -16.5996 -32.2002 -32.3994c-7.59961 -28.5 -7.59961 -88.2002 -7.59961 -88.2002s0 -59.6006 7.59961 -88.2002
+c4.2002 -15.7998 16.5 -27.7002 32.2002 -31.9004c28.2998 -7.59961 142.2 -7.59961 142.2 -7.59961s113.9 0 142.2 7.7002c15.7002 4.2002 28 16.0996 32.2002 31.8994c7.59961 28.5 7.59961 88.1006 7.59961 88.1006z" />
+ <glyph glyph-name="flipboard" unicode="&#xf44d;"
+d="M0 416h448v-448h-448v448zM358.4 236.8v89.6006h-268.801v-268.801h89.6006v89.6006h89.5996v89.5996h89.6006z" />
+ <glyph glyph-name="hips" unicode="&#xf452;" horiz-adv-x="640"
+d="M251.6 290.4v-201.801c0 -1.89941 -0.899414 -2.7998 -2.7998 -2.7998h-40.8994c-1.60059 0 -2.7002 1.40039 -2.7002 2.7998v201.801c0 1.39941 1.09961 2.7998 2.7002 2.7998h40.8994c1.90039 0 2.7998 -0.900391 2.7998 -2.7998zM156.5 280
+c18.7002 -13.5 28 -31.9004 28 -55.2998v-136.101c0 -1.89941 -0.900391 -2.7998 -2.7002 -2.7998h-27.2998c-9.09961 0 -16.4004 7.2998 -16.4004 16.2998v122.601c0 0.899414 2.7002 27 -45.7998 27c-48.5996 0 -45.7998 -26.2002 -45.7998 -27v-136.101
+c0 -1.89941 -0.900391 -2.7998 -2.7998 -2.7998h-41c-1.7998 0 -2.7002 0.900391 -2.7002 2.7998v279.2c0 1.7998 0.900391 2.7002 2.7002 2.7002h40.8994c1.90039 0 2.80078 -0.900391 2.80078 -2.7002v-81.2002c15.1992 7.7002 31.6992 11.5 49.7998 11.4004
+c24 -0.0996094 44.2002 -6.2002 60.2998 -18zM634.9 169.9c5.5 -12.6006 6.59961 -25.6006 3.09961 -39.1006c-9.59961 -36.8994 -44.9004 -45.5 -45.5996 -45.7998c-10.5 -3.09961 -23.6006 -4.2998 -36.3008 -4.2998c-16.5996 0 -32.5996 2.7002 -48.1992 8.2002
+c-9.7002 3.39941 -14.6006 10.2998 -14.6006 20.6992v34.4004c0 2.09961 2.2998 3.7002 4.40039 2.2998c13.7002 -10.2002 34.0996 -19.0996 58.3994 -19.0996c23.3008 0 32.8008 4.5 36.5 13.5996c3 7.90039 -0.599609 16.1006 -12.1992 21.2002l-53.6006 23.5
+c-21.3994 9.40039 -33.7998 24 -37.2002 43.5996c-5.69922 33.7002 22.2002 53.3008 22.7002 53.7002c13.2002 9.60059 32 15.4004 58.5 15.4004c19 0 37.4004 -3.2998 55.1006 -9.90039c1.2998 -0.5 1.89941 -1.2998 1.89941 -2.59961v-44.7002
+c0 -2.09961 -2.2998 -3.40039 -4 -2.40039c-39.7002 20.7002 -76.5996 12.3008 -84 6.80078c-6.59961 -4.90039 -6 -12.5 2.60059 -16.1006l57.5996 -25.2998c16.5 -7.09961 28.0996 -18.4004 34.9004 -34.0996zM376.2 298.2c60.3994 0 108.7 -48.2998 108.6 -108.601
+c0 -60.1992 -48.2002 -108.699 -108.7 -108.699c-21.8994 0 -41.1992 6.39941 -57.6992 19.0996v-88.7998c0 -1.7998 -0.900391 -2.7002 -2.80078 -2.7002h-40.8994c-2.10059 0 -2.7002 1.90039 -2.7002 2.7002v183.5c0 83.3994 72.5 103.5 104.2 103.5zM376.2 127.3
+c34.8994 0 62.2998 27.9004 62.2002 62.2002c0 34.5996 -27.7002 62.2002 -62.2002 62.2002c-34.6006 0 -62.2002 -27.7002 -62.2002 -62.2002c0 -17.2002 6 -31.7998 18.2002 -44c12.0996 -12.0996 26.7998 -18.2002 44 -18.2002zM228.3 375.5
+c15.9004 0 28.9004 -12.7002 28.9004 -28.9004c0 -15.7998 -12.7002 -28.8994 -28.9004 -28.8994s-28.8994 13.2998 -28.8994 28.8994c0.0996094 16 13 28.9004 28.8994 28.9004z" />
+ <glyph glyph-name="php" unicode="&#xf457;" horiz-adv-x="640"
+d="M320 343.5c-171.3 0 -303.2 -72.2002 -303.2 -151.5s131.8 -151.5 303.2 -151.5c171.3 0 303.2 72.2002 303.2 151.5s-131.8 151.5 -303.2 151.5zM320 360.3c176.7 0 320 -75.2998 320 -168.3s-143.3 -168.3 -320 -168.3s-320 75.2998 -320 168.3s143.3 168.3 320 168.3z
+M218.2 205.5c7.39941 38.4004 -18.4004 34.2998 -56.4004 34.2998l-13.7002 -70.5996c34.3008 0 62.2002 -4.2002 70.1006 36.2998zM97.4004 97.7002l32.6992 168.7h70.7002c21.2002 0 36.7998 -5.5 46.5 -16.7002c18.6006 -21.4004 11.7998 -64.1006 -14.2998 -88.1006
+c-23.5996 -22.0996 -49.0996 -19.0996 -90.2002 -19.0996l-8.7002 -44.7998h-36.6992zM283.1 311.3h36.5l-8.69922 -44.7998c31.5 0 60.6992 2.2998 74.7998 -10.7002c14.7998 -13.5996 7.7002 -31 -8.2998 -113.1h-37c15.3994 79.3994 18.2998 86 12.6992 92
+c-5.39941 5.7998 -17.6992 4.59961 -47.3994 4.59961l-18.7998 -96.5996h-36.5zM505 205.5c7.40039 38.4004 -18.2002 34.2998 -56.4004 34.2998l-13.6992 -70.5996c33.3994 0 62.0996 -4.7998 70.0996 36.2998zM384.2 97.7002l32.7998 168.7h70.7002
+c21.2002 0 36.7998 -5.5 46.5 -16.7002c18.5996 -21.4004 11.7998 -64.1006 -14.2998 -88.1006c-23.1006 -21.5996 -47 -19.0996 -90.2002 -19.0996l-8.7002 -44.7998h-36.7998z" />
+ <glyph glyph-name="quinscape" unicode="&#xf459;" horiz-adv-x="489"
+d="M301.6 -26.5996c4.40039 -4.40039 8.10059 -9 13.3008 -12.5c-22.6006 -6.90039 -45.6006 -10.1006 -70.4004 -10.1006c-135 0 -244.5 109.5 -244.5 244.601c0 135.1 109.5 244.6 244.5 244.6c135.1 0 244.6 -109.5 244.6 -244.6
+c0 -35.3008 -6.89941 -67.4004 -20.2998 -97.7002c-3 5.7002 -7.2002 10.2002 -11.2002 15.2998c11.2002 93.5 -62.0996 176.6 -157 176.6c-87.2998 0 -158.1 -70.7998 -158.1 -158.1s70.7998 -158.1 158.1 -158.1h1zM301.5 -26.5c0 0 0.0996094 -0.0996094 0 0
+l0.400391 -0.0996094zM379.9 142.4c54.7998 0 99.1992 -44.4004 99.1992 -99.2002s-44.3994 -99.2002 -99.1992 -99.2002c-54.8008 0 -99.2002 44.4004 -99.2002 99.2002s44.3994 99.2002 99.2002 99.2002z" />
+ <glyph glyph-name="readme" unicode="&#xf4d5;" horiz-adv-x="576"
+d="M528.3 401.5c26.4004 -0.200195 47.7002 -21.7002 47.7002 -48.0996v-245.7c0 -26.5 -21.5 -48 -48 -48h-89.7002c-102.1 0 -132.6 -24.4004 -147.3 -75c-0.799805 -2.7998 -5.2998 -2.7998 -6 0c-14.5996 50.5996 -45.0996 75 -147.3 75h-89.7002
+c-26.5 0 -48 21.5 -48 48v245.8c0 26.5 21.5 48 48 48h139.7c48.0996 0 89.7998 -33.2998 100.399 -80.2998c10.5 47 52.3008 80.2998 100.4 80.2998h139.8zM242 136.1h0.0996094v22.9004c0 2 -1.59961 3.5 -3.5 3.5h-160.399c-2 0 -3.5 -1.59961 -3.5 -3.5v-22.9004
+c0 -2 1.59961 -3.5 3.5 -3.5h160.3c2 0 3.5 1.60059 3.5 3.5zM242 197h0.0996094v22.9004c0 2 -1.59961 3.5 -3.5 3.5h-160.399c-2 0 -3.5 -1.60059 -3.5 -3.5v-22.9004c0 -2 1.59961 -3.5 3.5 -3.5h160.3c2 0 3.5 1.59961 3.5 3.5zM242 257.9h0.0996094v22.8994
+c0 2 -1.59961 3.5 -3.5 3.5h-160.399c-2 0 -3.5 -1.59961 -3.5 -3.5v-22.8994c0 -2 1.59961 -3.5 3.5 -3.5h160.3c2 0 3.5 1.59961 3.5 3.5zM501.3 136.2h0.100586v22.8994c0 2 -1.60059 3.5 -3.5 3.5h-160.4c-2 0 -3.5 -1.59961 -3.5 -3.5v-22.8994
+c0 -2 1.59961 -3.5 3.5 -3.5h160.3c2 0 3.5 1.59961 3.5 3.5zM501.3 197.1h0.100586v22.9004c0 2 -1.60059 3.5 -3.5 3.5h-160.4c-2 0 -3.5 -1.59961 -3.5 -3.5v-22.9004c0 -2 1.59961 -3.5 3.5 -3.5h160.3c2 0 3.5 1.60059 3.5 3.5zM501.3 258h0.100586v22.7998
+c0 2 -1.60059 3.5 -3.5 3.5h-160.4c-2 0 -3.5 -1.59961 -3.5 -3.5v-22.7998c0 -2 1.59961 -3.5 3.5 -3.5h160.3c2 0 3.5 1.59961 3.5 3.5z" />
+ <glyph glyph-name="java" unicode="&#xf4e4;" horiz-adv-x="377"
+d="M274.9 135.1c-94.5 -24.8994 -277 -13.2998 -224.5 12.1006c44.5 21.3994 80.5996 19 80.5996 19s-93.0996 -22.1006 -33 -30.1006c25.4004 -3.39941 76 -2.59961 123.1 1.30078c38.5 3.19922 77.2002 10.1992 77.2002 10.1992s-13.5996 -5.7998 -23.3994 -12.5z
+M189.5 167.2c-48.5 43.7998 -84.0996 82.2998 -60.2002 118.2c35.1006 52.5 132.2 78.0996 110.7 162.6c0 0 53.2002 -53.2002 -50.5 -135c-83.0996 -65.5996 -19 -103.1 0 -145.8zM304.1 343.4c-111.6 -64.7002 -91 -83.5 -64.0996 -121.301
+c28.7998 -40.5 -33.9004 -72.8994 -33.9004 -72.8994s31.2002 25.5996 6.5 54c-83.6992 96.3994 91.6006 140.2 91.5 140.2zM298 72.9004c96.0996 49.8994 51.5996 97.8994 20.5996 91.3994c-7.59961 -1.59961 -11 -3 -11 -3s2.80078 4.40039 8.2002 6.2998
+c61.2998 21.6006 108.5 -63.5996 -19.7998 -97.2998c0 0.100586 1.5 1.40039 2 2.60059zM345.2 10.5996c53 -23.8994 -115.2 -72 -319.4 -38.7998c-74.8994 12.1006 36.1006 54.5 56.4004 40.2002c0 0 -6.5 0.400391 -17.7002 -2
+c-10.7998 -2.2998 -45.0996 -13.4004 -26.7998 -21.2998c50.7998 -22.1006 233.7 -16.7998 291.6 0.700195c30.4004 9.2998 15.9004 21.1992 15.9004 21.1992zM121.6 52c0 0 -19.5996 -11.4004 13.9004 -15.2002c40.5996 -4.59961 61.2998 -4 106 4.5
+c0 0 11.7998 -7.39941 28.2002 -13.7998c-100.2 -42.9004 -226.8 2.5 -148.101 24.5zM301.4 -45.2002c69.7998 13.2002 76.1992 29.7002 76.1992 29.7002c-3.2998 -43.5996 -144.899 -52.7998 -237.1 -46.9004c-60.5996 3.90039 -72.4004 13.7002 -72.4004 13.6006
+c57.5 -9.5 154.601 -11.2002 233.301 3.59961zM257.8 95c0 0 8.2002 -8.2998 21.1006 -12.7998c-121.301 -35.5 -256.301 -2.90039 -169.5 25.8994c0 0 -21.9004 -16.1992 11.5996 -19.6992c43.2998 -4.5 77.5996 -4.80078 136.8 6.59961z" />
+ <glyph glyph-name="pied-piper-hat" unicode="&#xf4e5;" horiz-adv-x="640"
+d="M640 423.1c-80.7998 -53.5996 -89.4004 -92.5 -96.4004 -104.399c-6.69922 -12.2002 -11.6992 -60.2998 -23.2998 -83.6006c-11.7002 -23.5996 -54.2002 -42.1992 -66.0996 -50c-11.7002 -7.7998 -28.2998 -38.0996 -41.9004 -64.1992
+c-108.1 4.39941 -167.399 -38.8008 -259.2 -93.6006c29.4004 9.7002 43.3008 16.7002 43.3008 16.7002c94.1992 36 139.3 68.2998 281.1 49.2002c1.09961 0 1.90039 -0.600586 2.7998 -0.799805c3.90039 -2.2002 5.2998 -6.90039 3.10059 -10.8008l-53.9004 -95.7998
+c-2.5 -4.7002 -7.7998 -7.2002 -13.0996 -6.09961c-126.801 23.7998 -226.9 -17.2998 -318.9 -18.6006c-73.4004 -1.09961 -97.5 33.5 -97.5 35.1006c0 1.09961 0.599609 1.7002 1.7002 1.7002c0 0 38.2998 0 103.1 15.2998c73.6006 140.3 139.2 189.399 210.601 189.399
+c0 0 71.6992 0 90.5996 -61.8994c22.7998 39.7002 28.2998 49.2002 28.2998 49.2002c5.2998 9.39941 35 77.1992 86.4004 141.399c51.5 64 90.3994 79.9004 119.3 91.7998z" />
+ <glyph glyph-name="creative-commons-by" unicode="&#xf4e7;" horiz-adv-x="496"
+d="M314.9 253.6v-101.399h-28.3008v-120.5h-77.0996v120.399h-28.2998v101.5c0 4.40039 1.59961 8.2002 4.59961 11.3008c3.10059 3.09961 6.90039 4.69922 11.2998 4.69922h101.9c4.09961 0 7.7998 -1.59961 11.0996 -4.69922
+c3.10059 -3.2002 4.80078 -6.90039 4.80078 -11.3008zM213.4 317.3c0 23.2998 11.5 35 34.5 35s34.5 -11.7002 34.5 -35c0 -23 -11.5 -34.5 -34.5 -34.5s-34.5 11.5 -34.5 34.5zM247.6 440c141.801 0 248.4 -110.1 248.4 -248c0 -147.1 -118.5 -248 -248.4 -248
+c-134 0 -247.6 109.5 -247.6 248c0 132.9 104.7 248 247.6 248zM248.4 395.3c-118.2 0 -203.7 -97.8994 -203.7 -203.3c0 -109.8 91.2002 -202.8 203.7 -202.8c103.199 0 202.8 81.0996 202.8 202.8c0.0996094 113.8 -90.2002 203.3 -202.8 203.3z" />
+ <glyph glyph-name="creative-commons-nc" unicode="&#xf4e8;" horiz-adv-x="496"
+d="M247.6 440c139.801 0 248.4 -107.9 248.4 -248c0 -147.2 -118.5 -248 -248.4 -248c-134.5 0 -247.6 110.8 -247.6 248c0 132.9 104.7 248 247.6 248zM55.7998 258.9c-7.39941 -20.4004 -11.0996 -42.7002 -11.0996 -66.9004c0 -110.9 92.0996 -202.4 203.7 -202.4
+c122.399 0 177.199 101.801 178.5 104.101l-93.4004 41.5996c-7.7002 -37.0996 -41.2002 -53 -68.2002 -55.3994v-38.1006h-28.7998v38.2002c-27.5 0.299805 -52.5996 10.2002 -75.2998 29.7002l34.0996 34.5c31.7002 -29.4004 86.4004 -31.7998 86.4004 2.2002
+c0 6.19922 -2.2002 11.1992 -6.60059 15.0996c-14.1992 6 -1.7998 0.0996094 -219.3 97.4004zM248.4 395.7c-38.4004 0 -112.4 -8.7002 -170.5 -93l94.7998 -42.5c10 31.2998 40.3994 42.8994 63.7998 44.2998v38.0996h28.7998v-38.0996
+c22.7002 -1.2002 43.4004 -8.90039 62 -23l-32.2998 -33.2002c-42.7002 29.9004 -83.5 8 -70 -11.0996c53.4004 -24.1006 43.7998 -19.7998 93 -41.6006l127.1 -56.6992c4.10059 17.3994 6.2002 35.0996 6.2002 53.0996c0 57 -19.7998 105 -59.2998 143.9
+c-39.2998 39.8994 -87.2002 59.7998 -143.6 59.7998z" />
+ <glyph glyph-name="creative-commons-nc-eu" unicode="&#xf4e9;" horiz-adv-x="496"
+d="M247.7 440c140.7 0 248.3 -109 248.3 -248c0 -147.1 -118.1 -248 -248.3 -248c-136 0 -247.7 111.7 -247.7 248c0 131.2 103.6 248 247.7 248zM248.3 -10.7002c122.601 0 177.3 102.2 178.8 104.3l-128.3 56.8008h-90.2998
+c9.2002 -39.3008 39.0996 -50.2002 67.2998 -50.2002c19.1006 0 38.6006 6.2002 47.2998 10.7998l10 -46.0996c-14.1992 -7.90039 -38.1992 -15.8008 -64.7998 -15.8008c-57.3994 0 -113.2 34.3008 -124.6 101.301h-27v29.5h22.7998
+c0 16.2998 0.400391 13.2998 0.400391 19.5h-23.3008v29.5h4.7002l-65.7002 29.0996c-7.19922 -20.7998 -10.8994 -42.7998 -10.8994 -66c0 -110.2 91.5996 -202.7 203.6 -202.7zM231.6 179.9l-0.5 0.399414l0.900391 -0.399414h-0.400391zM308.8 199.4l136.101 -60.5
+c4.19922 16.5996 6.2998 34.1992 6.2998 52.8994c0 113.2 -90 203.4 -203 203.4c-13 0 -106.101 3.2002 -170.7 -93.6006l81.5996 -36.0996c4.10059 7.2002 8.60059 14 13.9004 20.0996c23.7002 26.5 56.9004 42.3008 95.9004 42.3008
+c25.2998 0 47.2998 -5.80078 62.2998 -12.4004l-11.6006 -47.2998c-10.7998 4.59961 -27.7998 10 -46.0996 10c-20 0 -38.2002 -6.60059 -51.0996 -22.4004c-3.40039 -3.7998 -6.30078 -8.7998 -8.80078 -14.2998l28.6006 -12.5996h70.2998v-29.5h-3.7002z" />
+ <glyph glyph-name="creative-commons-nc-jp" unicode="&#xf4ea;" horiz-adv-x="496"
+d="M247.7 440c140.8 0 248.3 -109.2 248.3 -248c0 -147.2 -118.1 -248 -248.3 -248c-135.9 0 -247.7 111.6 -247.7 248c0 131.2 103.6 248 247.7 248zM248.3 -10.7002c118.101 0 173.7 96.1006 175.2 98.2998l-81 36.1006v-35.7002h-64.2002v-56h-61.7002v56h-63.7998
+v38.7002h63.7998v18.7002l-5.69922 11.7998h-58.1006v38.5996h27.9004l-127 56.5c-6 -19.0996 -9 -39.2002 -9 -60.2998c0 -110.2 91.5996 -202.7 203.6 -202.7zM335.9 126.6l-54.7002 24.3008l-2.90039 -5.60059v-18.7002h57.6006zM342.4 178l101 -45.0996
+c5.19922 18.3994 7.89941 38 7.89941 59c0 113.399 -90.2002 203.399 -203 203.399c-91.0996 0 -145.899 -54 -173.7 -98.0996l81.9004 -36.5l-27.2998 51h65.7998l39.5996 -85.7002l23 -10.2002l43.4004 96h65.7998l-63 -116h38.6006v-17.7998z" />
+ <glyph glyph-name="creative-commons-nd" unicode="&#xf4eb;" horiz-adv-x="496"
+d="M247.6 440c141.801 0 248.4 -110.1 248.4 -248c0 -147.1 -118.5 -248 -248.4 -248c-134 0 -247.6 109.5 -247.6 248c0 132.9 104.7 248 247.6 248zM248.4 395.3c-118.2 0 -203.7 -97.8994 -203.7 -203.3c0 -109.8 91.2002 -202.8 203.7 -202.8
+c103.199 0 202.8 81.0996 202.8 202.8c0.0996094 113.8 -90.2002 203.3 -202.8 203.3zM342.4 251v-42.5h-180.301v42.5h180.301zM342.4 171.2v-42.5h-180.301v42.5h180.301z" />
+ <glyph glyph-name="creative-commons-pd" unicode="&#xf4ec;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111.1 248 -248c0 -137 -111 -248 -248 -248s-248 111 -248 248c0 136.9 111 248 248 248zM248 -9.5c76.0996 0 142.4 42.4004 176.7 104.8c-1.40039 0.299805 12.5 -5.7998 -217.9 96.7998c0.200195 -32 16.1006 -71.8994 53.9004 -71.8994
+c18.7002 0 30.7998 10.3994 36.2998 16.7002l36.0996 -43.9004c-25.8994 -22.7998 -56.5 -29.5 -79.3994 -29.5c-46.5 0 -120.4 27.9004 -120.4 126.9c0 11.3994 1.2002 22.3994 3.2998 32.8994l-78.7998 35.1006c-45.5996 -129.9 51 -267.9 190.2 -267.9zM442.2 140.5
+c0.200195 -0.200195 0.299805 -0.299805 0.599609 -0.400391c4.40039 16.6006 6.7998 34 6.7998 52c0 111.101 -90.3994 201.5 -201.5 201.5c-70.3994 0 -132.399 -36.2998 -168.5 -91.1992l74.9004 -33.4004c19.7998 31.0996 53.2998 51.5996 100.7 51.5996
+c20.0996 0 51 -4.19922 78.0996 -27.5l-40.3994 -41.5996c-19.8008 19.7002 -55.9004 23 -74.7002 -11z" />
+ <glyph glyph-name="creative-commons-pd-alt" unicode="&#xf4ed;" horiz-adv-x="496"
+d="M247.6 440c141.801 0 248.4 -110.1 248.4 -248c0 -147.1 -118.5 -248 -248.4 -248c-134 0 -247.6 109.5 -247.6 248c0 132.9 104.7 248 247.6 248zM248.4 -10.7998c103.199 0 202.8 81.0996 202.8 202.8c0.0996094 113.8 -90.2002 203.3 -202.8 203.3
+c-118.2 0 -203.7 -97.8994 -203.7 -203.3c0 -109.8 91.2002 -202.8 203.7 -202.8zM316.7 262c21.3994 0 70 -5.2002 70 -68.5996c0 -63.5 -48.6006 -68.6006 -70 -68.6006h-53.2002v137.2h53.2002zM317.5 153.5c24 0 34.5 15.2998 34.5 39.9004
+c0 42 -31.2002 39.8994 -35 39.8994l-19.4004 -0.0996094v-79.7002h19.9004zM203.7 262c33.7002 0 50.5 -15.5 50.5 -46.5c0 -9 -3 -46.5 -57.1006 -46.5h-27v-44.2998h-34.5996v137.3h68.2002zM198.8 194.7c27.9004 0 30 41.5996 -0.899414 41.5996h-28.3008v-41.5996
+h29.2002z" />
+ <glyph glyph-name="creative-commons-remix" unicode="&#xf4ee;" horiz-adv-x="496"
+d="M247.6 440c141.801 0 248.4 -110.1 248.4 -248c0 -147.1 -118.5 -248 -248.4 -248c-134 0 -247.6 109.5 -247.6 248c0 132.9 104.7 248 247.6 248zM248.4 395.3c-118.2 0 -203.7 -97.8994 -203.7 -203.3c0 -109.8 91.2002 -202.8 203.7 -202.8
+c103.199 0 202.8 81.0996 202.8 202.8c0.0996094 113.8 -90.2002 203.3 -202.8 203.3zM410.1 187.6l4.90039 -2.19922v-70c-7.2002 -3.60059 -63.4004 -27.5 -67.2998 -28.8008c-6.5 1.80078 -113.7 46.8008 -137.3 56.2002l-64.2002 -26.5996l-63.2998 27.5v63.7998
+l59.2998 24.7998c-0.700195 0.700195 -0.400391 -5 -0.400391 70.4004l67.2998 29.7002l151.9 -62.9004v-61.5996zM339.7 106.1v43.8008h-0.400391v1.7998l-113.8 46.5v-45.2002l113.8 -46.9004v0.400391zM347.2 163.7l39.8994 16.3994l-36.7998 15.5l-39 -16.3994z
+M399.5 125.6v43l-44.2998 -18.5996v-43.4004z" />
+ <glyph glyph-name="creative-commons-sa" unicode="&#xf4ef;" horiz-adv-x="496"
+d="M247.6 440c141.801 0 248.4 -110.1 248.4 -248c0 -147.1 -118.5 -248 -248.4 -248c-134 0 -247.6 109.5 -247.6 248c0 132.9 104.7 248 247.6 248zM248.4 395.3c-118.2 0 -203.7 -97.8994 -203.7 -203.3c0 -109.8 91.2002 -202.8 203.7 -202.8
+c103.199 0 202.8 81.0996 202.8 202.8c0.0996094 113.8 -90.2002 203.3 -202.8 203.3zM137.7 227c13 83.9004 80.5 95.7002 108.899 95.7002c99.8008 0 127.5 -82.5 127.5 -134.2c0 -63.5996 -41 -132.9 -128.899 -132.9c-38.9004 0 -99.1006 20 -109.4 97h62.5
+c1.5 -30.0996 19.6006 -45.1992 54.5 -45.1992c23.2998 0 58 18.1992 58 82.7998c0 82.5 -49.0996 80.5996 -56.7002 80.5996c-33.0996 0 -51.6992 -14.5996 -55.7998 -43.7998h18.2002l-49.2002 -49.2002l-49 49.2002h19.4004z" />
+ <glyph glyph-name="creative-commons-sampling" unicode="&#xf4f0;" horiz-adv-x="496"
+d="M247.6 440c141.801 0 248.4 -110.1 248.4 -248c0 -147.1 -118.5 -248 -248.4 -248c-134 0 -247.6 109.5 -247.6 248c0 132.9 104.7 248 247.6 248zM248.4 395.3c-118.2 0 -203.7 -97.8994 -203.7 -203.3c0 -109.8 91.2002 -202.8 203.7 -202.8
+c103.199 0 202.8 81.0996 202.8 202.8c0.0996094 113.8 -90.2002 203.3 -202.8 203.3zM252 342.1c2.7998 0.300781 11.5 -1 11.5 -11.5l6.59961 -107.199l4.90039 59.2998c0 6 4.7002 10.5996 10.5996 10.5996c5.90039 0 10.6006 -4.7002 10.6006 -10.5996
+c0 -2.5 -0.5 5.7002 5.7002 -81.5l5.7998 64.2002c0.299805 2.89941 2.89941 9.2998 10.2002 9.2998c3.7998 0 9.89941 -2.2998 10.5996 -8.90039l11.5 -96.5l5.2998 12.7998c1.7998 4.40039 5.2002 6.60059 10.2002 6.60059h58v-21.2998h-50.9004l-18.1992 -44.3008
+c-3.90039 -9.89941 -19.5 -9.09961 -20.8008 3.10059l-4 31.8994l-7.5 -92.5996c-0.299805 -3 -3 -9.2998 -10.1992 -9.2998c-3 0 -9.80078 2.09961 -10.6006 9.2998c0 1.90039 0.600586 -5.7998 -6.2002 77.9004l-5.2998 -72.2002
+c-1.09961 -4.7998 -4.7998 -9.2998 -10.5996 -9.2998c-2.90039 0 -9.7998 2 -10.6006 9.2998c0 1.89941 0.5 -6.7002 -5.7998 87.7002l-5.7998 -94.8008c0 -6.2998 -3.59961 -12.3994 -10.5996 -12.3994c-5.2002 0 -10.6006 4.09961 -10.6006 12l-5.7998 87.7002
+c-5.7998 -92.5 -5.2998 -84 -5.2998 -85.9004c-1.10059 -4.7998 -4.7998 -9.2998 -10.6006 -9.2998c-3 0 -9.7998 2.09961 -10.5996 9.2998c0 0.700195 -0.400391 1.09961 -0.400391 2.59961l-6.19922 88.6006l-4.90039 -56.7002
+c-0.700195 -6.5 -6.7002 -9.2998 -10.5996 -9.2998c-5.80078 0 -9.60059 4.09961 -10.6006 8.89941l-11.0996 76.4004c-2 -4 -3.5 -8.40039 -11.1006 -8.40039h-51.3994v21.3008h44.7998l13.7002 27.8994c4.39941 9.90039 18.2002 7.2002 19.8994 -2.7002l3.10059 -20.3994
+l8.39941 97.8994c0 6 4.80078 10.6006 10.6006 10.6006c0.5 0 10.5996 0.200195 10.5996 -12.4004l4.90039 -69.0996l6.59961 92.5996c0 10.1006 9.5 10.6006 10.2002 10.6006c0.599609 0 10.5996 -0.700195 10.5996 -10.6006l5.30078 -80.5996l6.19922 97.8994
+c0.100586 1.10059 -0.599609 10.3008 9.90039 11.5z" />
+ <glyph glyph-name="creative-commons-sampling-plus" unicode="&#xf4f1;" horiz-adv-x="496"
+d="M247.6 440c141.801 0 248.4 -110.1 248.4 -248c0 -147.1 -118.5 -248 -248.4 -248c-134 0 -247.6 109.5 -247.6 248c0 132.9 104.7 248 247.6 248zM248.4 395.3c-118.2 0 -203.7 -97.8994 -203.7 -203.3c0 -109.8 91.2002 -202.8 203.7 -202.8
+c103.199 0 202.8 81.0996 202.8 202.8c0.0996094 113.8 -90.2002 203.3 -202.8 203.3zM355.4 189.7l58.3994 0.299805v-23.2002h-50.5l-18 -43.3994c-4.59961 -11 -20.8994 -8.7002 -22.2998 3.09961l-2.7002 22.2998l-6.7998 -83
+c-1.09961 -14.0996 -22 -14.2002 -23.0996 0.100586l-4.90039 64.3994l-4.59961 -58.5996c-1.10059 -14.2998 -22.3008 -14.1006 -23.2002 0.200195l-4.5 71.7998l-4.90039 -80.5c-0.899414 -14.5 -22.2998 -14.5 -23.2002 -0.100586l-4.7998 73.3008l-4.59961 -70.4004
+c-0.900391 -14.2998 -22.1006 -14.5 -23.2002 -0.0996094l-5.7002 78.2998l-3.7998 -43.6006c-1.2002 -13.6992 -21.0996 -14.1992 -23.0996 -0.699219l-10.7002 73.0996c-2 -3.90039 -6 -6.40039 -10.4004 -6.40039h-51.2998v23.2002h43.9004l13.1992 27.7002
+c4.90039 10.2998 20.3008 8.09961 22 -3.2998l1.80078 -12.2002l7.69922 89.7998c1.2002 14.1006 22.1006 14.1006 23.2002 -0.200195l4.10059 -57l5.2998 80.2002c1 14.4004 22.2998 14.4004 23.2002 0l4.2998 -66.2998l5.09961 83.7002
+c0.900391 14.3994 22.2998 14.5 23.2002 0l5.90039 -94.2998l3.5 44.8994c1.09961 14.2002 22.0996 14.2998 23.1992 0l5.2002 -68.7998l4.2998 51.4004c1.10059 13.7998 21.4004 14.2998 23.1006 0.399414l11 -92.7998l4 9.5c1.7002 4.40039 6 7.2002 10.7002 7.2002z
+M277.4 184.5c4.09961 0 7.5 3.40039 7.5 7.5c0 4.2002 -3.40039 7.5 -7.5 7.5h-21.9004v21.9004c0 4.19922 -3.40039 7.5 -7.5 7.5s-7.5 -3.40039 -7.5 -7.5v-21.9004h-21.9004c-4.09961 0 -7.5 -3.40039 -7.5 -7.5c0 -4.2002 3.40039 -7.5 7.5 -7.5h21.9004v-21.9004
+c0 -4.19922 3.40039 -7.5 7.5 -7.5c4.2002 0 7.5 3.40039 7.5 7.5v21.9004h21.9004z" />
+ <glyph glyph-name="creative-commons-share" unicode="&#xf4f2;" horiz-adv-x="496"
+d="M247.6 440c141.801 0 248.4 -110.1 248.4 -248c0 -147.1 -118.5 -248 -248.4 -248c-134 0 -247.6 109.5 -247.6 248c0 132.9 104.7 248 247.6 248zM248.4 395.3c-118.2 0 -203.7 -97.8994 -203.7 -203.3c0 -109.8 91.2002 -202.8 203.7 -202.8
+c103.199 0 202.8 81.0996 202.8 202.8c0.0996094 113.8 -90.2002 203.3 -202.8 203.3zM349.4 262.9c7.7998 0 13.6992 -6.10059 13.6992 -13.7002v-182.5c0 -7.7002 -6.09961 -13.7002 -13.6992 -13.7002h-135.101c-7.7002 0 -13.7002 6 -13.7002 13.7002v54h-54
+c-7.7998 0 -13.6992 6 -13.6992 13.7002v182.5c0 8.19922 6.59961 12.6992 12.3994 13.6992h136.4c7.7002 0 13.7002 -6 13.7002 -13.6992v-54h54zM159.9 147.7h40.6992v101.399c0 7.40039 5.80078 12.6006 12 13.7002h55.8008v40.2998h-108.5v-155.399zM336.1 235.8h-108.5
+v-155.399h108.5v155.399z" />
+ <glyph glyph-name="creative-commons-zero" unicode="&#xf4f3;" horiz-adv-x="496"
+d="M247.6 440c141.801 0 248.4 -110.1 248.4 -248c0 -147.1 -118.5 -248 -248.4 -248c-134 0 -247.6 109.5 -247.6 248c0 132.9 104.7 248 247.6 248zM248.4 395.3c-118.2 0 -203.7 -97.8994 -203.7 -203.3c0 -109.8 91.2002 -202.8 203.7 -202.8
+c103.199 0 202.8 81.0996 202.8 202.8c0.0996094 113.8 -90.2002 203.3 -202.8 203.3zM248 334.8c81.9004 0 102.5 -77.2998 102.5 -142.8s-20.5996 -142.8 -102.5 -142.8s-102.5 77.2998 -102.5 142.8s20.5996 142.8 102.5 142.8zM248 280.9
+c-42.0996 0 -44.0996 -60.1006 -44.0996 -88.9004c0 -9.2998 0.199219 -21.7002 1.89941 -34.4004l54.5 100.2c5.7002 9.7998 2.7998 16.7998 -3.09961 21.9004c-2.7998 0.700195 -5.90039 1.2002 -9.2002 1.2002zM288.8 234.7l-60.8994 -105.2
+c-12.5 -18.7002 6.59961 -26.4004 20.0996 -26.4004c42.0996 0 44.0996 60 44.0996 88.9004c0 11.2998 -0.399414 27.2998 -3.2998 42.7002z" />
+ <glyph glyph-name="ebay" unicode="&#xf4f4;" horiz-adv-x="640"
+d="M606 258.5h34l-99.2002 -194.8h-35.8994l28.5 54.0996l-61.5 116.101c3.09961 -6.60059 4.7998 -14.5 4.7998 -23.8008v-65.5996c0 -9.2998 0.299805 -18.5996 1 -26.7998h-29.7998c-0.800781 6.89941 -1.10059 13.5996 -1.10059 20.2002
+c-16.0996 -19.8008 -35.2998 -25.5 -61.8994 -25.5c-39.5 0 -60.6006 20.8994 -60.6006 45c0 3.19922 0.200195 6.19922 0.700195 9c-8.40039 -32.3008 -36.4004 -54.2002 -73.2998 -54.2002c-23.2998 0 -45.1006 8.2998 -58.7002 24.8994
+c0 -6.59961 -0.400391 -13.1992 -1.09961 -19.5h-31.5c0.5 10.2002 1.09961 22.8008 1.09961 33.1006v169.5h32.0996v-80.6006c15.7002 18.7002 37.4004 24.2002 58.7002 24.2002c35.7002 0 75.4004 -24.0996 75.4004 -76.2002c0 -5.59961 -0.5 -11 -1.5 -16.1992
+c7.09961 24.3994 34.2998 33.5 76.7002 34.3994c13.6992 0.299805 29 0.400391 41.6992 0.400391v3.39941c0 23.4004 -15 33 -41 33c-19.2998 0 -33.5996 -8 -35 -21.7998h-33.6992c3.59961 34.4004 39.6992 43.1006 71.5 43.1006c27.3994 0 51.7998 -7 63.2998 -26
+l-10.9004 20.5996h37.5l54.9004 -109.9zM243.7 134.2c29.7998 0 50.2002 21.5 50.2002 53.7998c0 32.4004 -20.4004 53.7998 -50.2002 53.7998c-29.6006 0 -50.2002 -21.3994 -50.2002 -53.7998c0 -32.2998 20.5996 -53.7998 50.2002 -53.7998zM444.6 181.5v3.2998
+c-11.7998 0 -26.2998 -0.0996094 -39.3994 -0.599609c-29.1006 -0.900391 -47.2002 -6.2002 -47.2002 -25.2998c0 -12.4004 9.90039 -25.8008 35 -25.8008c33.7002 0 51.5996 18.4004 51.5996 48.4004zM32.7002 179.9c3.5 -58.3008 79.2002 -57.4004 91.2002 -21.6006
+h33.0996c-6.40039 -34.3994 -43 -46.0996 -74.4004 -46.0996c-57.1992 0 -82.5 31.5 -82.5 74c0 46.7998 26.2002 77.5996 83 77.5996c45.3008 0 78.4004 -23.7002 78.4004 -75.3994v-8.5h-128.8zM127.7 201.3c-2.2998 54.7002 -87.5 56.6006 -94.4004 0h94.4004z" />
+ <glyph glyph-name="keybase" unicode="&#xf4f5;" horiz-adv-x="412"
+d="M177.2 17.0996c0 -9.7998 -8 -17.7998 -17.7998 -17.7998c-9.80078 0 -17.8008 8 -17.8008 17.7998c0 9.80078 8 17.8008 17.8008 17.8008c9.7998 0.0996094 17.7998 -7.90039 17.7998 -17.8008zM270 35c9.7998 0 17.7998 -8 17.7998 -17.7998
+s-8 -17.7998 -17.7998 -17.7998s-17.7998 8 -17.7998 17.7998s8 17.7998 17.7998 17.7998zM412.3 71c0 -38.9004 -7.59961 -73.9004 -22.2002 -103h-27.2998c23.5 38.7002 30.5 94.7998 22.4004 134.3c-16.1006 -29.5 -52.1006 -38.5996 -85.9004 -28.7998
+c-127.8 37.5 -192.5 -19.7002 -234.6 -50.2998l18.8994 59.2998l-39.8994 -42.2998c4.7998 -26.7002 15.7002 -51.2998 31.2002 -72.2998h-28.8008c-9.69922 15.7998 -17.1992 33 -22.1992 51.2998l-23.8008 -25.2002c0 74.9004 -5.5 147.6 61.5 215.2
+c20.2002 20.3994 43.7002 36.2002 69.1006 46.7002c-6.7998 13.5 -9.5 29.1992 -7.7998 46l-19.9004 1.19922c-17.9004 1.10059 -31.5996 16.5 -30.5996 34.4004v0.0996094l1.59961 26.2002c1.09961 17.1006 15.4004 30.6006 32.5 30.6006
+c1.2998 0 -0.299805 0.0996094 28.2002 -1.7002c13.8994 -0.799805 21.5 -9.7998 22.7998 -11.4004c7.09961 10.4004 14.5 20.5 24.5996 34.5l20.6006 -12.0996c-13.6006 -29 -9.10059 -36.2002 -9 -36.2998c3.89941 0 13.8994 0.5 32.3994 -5.7002
+c19.9004 -6.60059 35.9004 -20.7002 44.9004 -39.7002c0.400391 -0.900391 15.5 -29 1.2002 -62.5996c19 -6.10059 51.2998 -19.9004 82.3994 -51.8008c36.6006 -37.5996 57.7002 -87.3994 57.7002 -136.6zM128 325.7c3.2002 10 7.7002 19.7002 13.0996 29.3994
+c0.100586 2 2.2002 13.1006 -7.7998 13.8008c-28.5 1.7998 -26.2998 1.59961 -26.7002 1.59961c-4.59961 0 -8.2998 -3.5 -8.59961 -8.09961l-1.59961 -26.2002c-0.300781 -4.7002 3.39941 -8.7998 8.09961 -9.10059zM153.8 263.9
+c5.60059 -9.40039 14.1006 -16.1006 22.2998 -20c0 21.1992 28.5 41.8994 52.8008 17.5l8.39941 -10.3008c20.7998 18.8008 19.4004 45.3008 12.1006 60.9004c-13.8008 29.0996 -46.9004 32 -54.3008 31.7002c-10.2998 -0.400391 -19.6992 5.39941 -23.6992 15.2998
+c-13.7002 -21.2002 -37.2002 -62.5 -17.6006 -95.0996zM236.7 195.5l-19.7002 -16.0996c-1.90039 -1.60059 -2.2002 -4.40039 -0.599609 -6.30078l8.89941 -10.8994c1 -1.2002 3.7998 -2.7002 6.2998 -0.600586l19.6006 16l5.5 -6.7998c4.89941 -6 13.7998 1.40039 9 7.2998
+c-63.6006 78.3008 -41.5 51.1006 -55.2998 68.1006c-4.7002 6 -13.9004 -1.40039 -9 -7.2998c1.89941 -2.30078 18.3994 -22.6006 19.7998 -24.3008l-9.60059 -7.89941c-4.59961 -3.7998 2.60059 -13.2998 7.40039 -9.40039l9.7002 8zM355.1 169.8
+c-16.8994 23.7002 -42.5996 46.7002 -73.3994 60.4004c-7.90039 3.5 -15 6.09961 -22.9004 8.59961c-2 -2.2002 -4.09961 -4.2998 -6.39941 -6.2002l31.8994 -39.1992c10.4004 -12.7002 8.5 -31.5 -4.2002 -41.9004c-1.2998 -1.09961 -13.0996 -10.7002 -29 -4.90039
+c-2.89941 -2.2998 -10.0996 -9.89941 -22.1992 -9.89941c-8.60059 0 -16.6006 3.7998 -22.1006 10.5l-8.89941 10.8994c-6.30078 7.80078 -7.90039 17.9004 -5 26.8008c-8.2002 9.89941 -8.30078 21.2998 -4.60059 30c-7.2002 1.2998 -26.7002 6.19922 -42.7002 21.3994
+c-55.7998 -20.7002 -88 -64.3994 -101.3 -91.2002c-14.8994 -30.1992 -18.7998 -60.8994 -19.8994 -90.1992c8.19922 8.69922 -3.90039 -4.10059 114 120.899l-29.9004 -93.5996c57.7998 31.0996 124 36 197.4 14.3994c23.5996 -6.89941 45.0996 -1.59961 56 13.9004
+c11.0996 15.5996 8.5 37.7002 -6.80078 59.2998zM110.6 340.7l1 15.5996l15.6006 -1l-1 -15.5996z" />
+ <glyph glyph-name="mastodon" unicode="&#xf4f6;" horiz-adv-x="417"
+d="M417.8 268.9c0 0 0.799805 -71.7002 -9 -121.5c-6.2002 -31.6006 -55.0996 -66.2002 -111.2 -72.9004c-20.0996 -2.40039 -93.0996 -14.2002 -178.699 6.7002c0 -4.90039 0.299805 -9.5 0.899414 -13.9004c6.60059 -49.5996 49.2002 -52.5996 89.6006 -54
+c40.7998 -1.2998 77.0996 10.1006 77.0996 10.1006l1.7002 -36.9004s-28.5 -15.2998 -79.2998 -18.0996c-28 -1.60059 -62.8008 0.699219 -103.301 11.3994c-112.199 29.7002 -105.6 173.4 -105.6 289.101c0 97.1992 63.7002 125.699 63.7002 125.699
+c61.8994 28.4004 227.899 28.7002 290.399 0c0 0 63.7002 -28.5 63.7002 -125.699zM342.7 143.7c0 122 5.2998 147.7 -18.4004 175c-25.7002 28.7002 -79.7002 31 -103.8 -6.10059l-11.5996 -19.5l-11.6006 19.5c-24 36.9004 -77.8994 35 -103.8 6.10059
+c-23.5996 -27.1006 -18.4004 -52.9004 -18.4004 -175h46.7002v114.2c0 49.6992 64 51.5996 64 -6.90039v-62.5h46.2998v62.5c0 58.5 64 56.5996 64 6.90039v-114.2h46.6006z" />
+ <glyph glyph-name="r-project" unicode="&#xf4f7;" horiz-adv-x="581"
+d="M581 221.4c0 -54.8008 -33.9004 -104.301 -88.4004 -139.7l67.4004 -113.7h-112l-40.0996 75.4004c-21.8008 -6.5 -45.1006 -11.2002 -69.4004 -13.9004v-61.5h-99.0996v61.9004c-136.101 16.0996 -239.4 95.6992 -239.4 191.5c0 107.5 130.1 194.6 290.5 194.6
+s290.5 -87.0996 290.5 -194.6zM114.2 206.9c0 -52.8008 51.0996 -98.4004 125.2 -119.9v208.3h199s90.5996 -1.59961 90.5996 -87.8994c0 -86.3008 -86.5996 -92.7002 -86.5996 -92.7002s17.5996 -5.2998 27.7998 -10.5c1.7002 -0.799805 4 -2.10059 6.39941 -3.7002
+c43.8008 21.4004 70.3008 56.2998 70.3008 106.4c0 92.2998 -90 133 -211.9 133s-220.8 -59.5 -220.8 -133zM339.3 168.6c49.6006 0 87.7998 -8.19922 87.7998 28.3008c0 34.0996 -30 27.2998 -87.7998 27.2998v-55.6006zM338.4 96.0996v-22.0996
+c17.5996 0.0996094 34.5 1 50.5996 2.90039c-5.09961 7.5 -13.2002 19.1992 -24 19.1992h-26.5996z" />
+ <glyph glyph-name="researchgate" unicode="&#xf4f8;"
+d="M0 416h448v-448h-448v448zM262.2 81.5996v7.30078c-10 0 -20 6.89941 -27.2002 14.6992c-12.2002 13.3008 -28.5996 34.7002 -42.2002 58.9004c22.5 5.2998 39.2002 26.4004 39.2002 47.5c0 31.2002 -24.2002 45.5996 -55.9004 45.5996
+c-17.7998 0 -45.0996 -1.59961 -70.8994 -0.599609v-8.09961c15.5996 -2.90039 22 -1.30078 22 -23.9004v-109.4c0 -22.5996 -6.5 -21 -22 -23.8994v-8.10059c7.5 0.200195 20.5 0.800781 33.5996 0.800781c12.5 0 28.7002 -0.5 35.6006 -0.800781v8.10059
+c-19.8008 2.7002 -25.8008 0.399414 -25.8008 23.8994v46.4004c6.7002 -0.599609 12.5 -0.599609 21.4004 -0.599609c16.9004 -30.3008 33 -53 42.2002 -63.6006c16.7998 -20.2002 43.3994 -17.2002 50 -14.2002zM285.1 216.6c38.7002 0 34 29.4004 34 49.9004h-30.3994
+v-10.7002h17.8994c0 -15.8994 -7.39941 -26.7998 -21.5 -26.7998c-11.2998 0 -17.8994 9.90039 -17.8994 23.2998v26.7998c0 12.4004 11.7998 19.7002 19.7002 19.7002c14.1992 0 19.6992 -12.5 19.6992 -12.5l10.7002 7.2002s-5.2002 17.9004 -30.3994 17.9004
+c-25.2002 0 -34 -18.2002 -34 -30.4004v-32.2002c0 -16.5 8.89941 -32.2002 32.1992 -32.2002zM168.6 171.9c-9.39941 0 -13.5996 0.299805 -20 0.799805v69.7002c6.40039 0.599609 15 0.599609 22.5 0.599609c23.3008 0 37.2002 -12.2002 37.2002 -34.5
+c0 -21.9004 -15 -36.5996 -39.7002 -36.5996z" />
+ <glyph glyph-name="teamspeak" unicode="&#xf4f9;" horiz-adv-x="511"
+d="M244.2 101.2c-2.40039 -12.5 -10.6006 -20 -22.5 -24.2998c-9.2002 -3.2002 -50.1006 -1.60059 -61.7002 -1c-18 1.2998 -33.2002 8.5 -43.4004 24c-14.5 22.5 -19.5 47.6992 -14.5 73.8994c4.60059 24.5 24.6006 34.7002 46.3008 22.7002
+c15.1992 -7.5 42.5 -27.4004 63.3994 -46.5996c20.4004 -18.7002 34.7998 -36.4004 32.4004 -48.7002zM449.2 80.4004c6.7002 -5.40039 11.2002 -22 11.5996 -32.1006c1 -50.3994 -23.8994 -68 -46.5996 -85.3994c-65.1006 -50 -295.101 -16.9004 -145.4 -6.40039
+c127.4 9 164.101 96.0996 172.101 121.5c1.09961 3.40039 5.2998 4.7998 8.2998 2.40039zM511.2 202.8c0 -17.2002 1.89941 -34.5996 -1 -51.7002c-4 -24.6992 -29.1006 -41.6992 -53.2002 -36.6992c-7.2002 1.69922 -9.40039 7.19922 -9.40039 14.1992
+c0 28.1006 0.800781 56.4004 0 84.6006c-1.89941 75.7998 -36.1992 132.8 -102.3 169.399c-111 60.4004 -253.2 -7 -277.8 -131.5c-6.09961 -30.3994 -1.7002 -48.2998 -3.7002 -125.8c-0.299805 -7.2002 -4.2998 -11.2002 -12 -11.5
+c-30.7998 -1.39941 -51.7998 18.2002 -51.7998 49v20.9004l0.799805 26.5c2.40039 15.5 10.7002 27 24.9004 34c3.5 1.7998 5.7002 3.5 6.39941 7.7998c6.10059 33.4004 19.5 64 39.3008 91.7002c2.2998 3.09961 4 5.2998 1 9.2998c-3.7002 5.40039 -1 10.2002 3 14.5
+c28.0996 31.7998 61.8994 55.0996 102 67.4004c95.7998 29.3994 180.1 9.2998 252.3 -60.5c6.7002 -6.40039 15.5 -12.9004 7 -24.4004c-1.2998 -1.7998 1.09961 -3.5 2.2002 -5c20.6992 -28 34.1992 -59.4004 40.3994 -93.5996
+c0.900391 -3.7002 3 -5.10059 5.90039 -6.40039c17.3994 -8.7998 25.7002 -23.2998 26 -42.2002zM351.6 71.2998l-51.5996 7.7002c-22.7998 5.90039 -51 32.7002 22.2002 60.7998c21.5996 8.5 85.7002 37.2002 87.7998 -8c0.900391 -32 -21.9004 -63.2998 -58.4004 -60.5z
+" />
+ <glyph glyph-name="first-order-alt" unicode="&#xf50a;" horiz-adv-x="496"
+d="M248 440c136.97 0 248 -111.03 248 -248s-111.03 -248 -248 -248s-248 111.03 -248 248s111.03 248 248 248zM248 -48.21c132.66 0 240.21 107.55 240.21 240.21s-107.55 240.21 -240.21 240.21s-240.21 -107.55 -240.21 -240.21s107.55 -240.21 240.21 -240.21z
+M248 411.71c121.34 0 219.71 -98.3701 219.71 -219.71s-98.3701 -219.71 -219.71 -219.71s-219.71 98.3701 -219.71 219.71s98.3701 219.71 219.71 219.71zM248 -19.5098c116.81 0 211.51 94.7002 211.51 211.51s-94.7002 211.51 -211.51 211.51
+s-211.51 -94.6895 -211.51 -211.51s94.7002 -211.51 211.51 -211.51zM434.23 143.47c-3.69141 -14.209 -12.709 -36.0225 -20.1309 -48.6895l-74.1299 35.8799l61.4805 -54.8203c-8.85352 -11.7021 -25.5195 -28.4082 -37.2002 -37.29l-54.7998 61.5703l35.8799 -74.2705
+c-12.6445 -7.45215 -34.4307 -16.5156 -48.6299 -20.2295l-27.29 78.4697l4.79004 -82.9297c-8.61035 -1.17969 -17.4004 -1.7998 -26.3301 -1.7998s-17.7197 0.620117 -26.3301 1.7998l4.75977 82.46l-27.1494 -78.0303c-14.2021 3.70996 -35.998 12.7588 -48.6504 20.2002
+l35.9297 74.3398l-54.8701 -61.6396c-11.6836 8.87988 -28.3584 25.582 -37.2197 37.2793l61.5898 54.9004l-74.2598 -35.9297c-7.42383 12.667 -16.4463 34.4795 -20.1396 48.6895l77.8398 27.1104l-82.2305 -4.75977c-1.15918 8.56934 -1.7793 17.3193 -1.7793 26.21
+c0 9 0.629883 17.8398 1.81934 26.5098l82.3799 -4.76953l-77.9395 27.1592c3.71973 14.208 12.7822 36.0127 20.2295 48.6699l74.2207 -35.9199l-61.5205 54.8604c8.88086 11.6836 25.582 28.3584 37.2803 37.2197l54.7598 -61.5293l-35.8301 74.1699
+c12.6562 7.41895 34.4521 16.4375 48.6504 20.1299l26.8701 -77.25l-4.70996 81.6094c8.60938 1.18066 17.3896 1.80078 26.3193 1.80078c8.93066 0 17.71 -0.620117 26.3203 -1.80078l-4.74023 -82.1592l27.0498 77.7598c17.2705 -4.5 33.6006 -11.3506 48.6309 -20.1699
+l-35.8203 -74.1201l54.7197 61.4697c11.6924 -8.86133 28.376 -25.54 37.2402 -37.2295l-61.4502 -54.7705l74.1201 35.8604c7.43945 -12.6533 16.4893 -34.4492 20.2002 -48.6504l-77.8105 -27.0996l82.2402 4.75c1.19043 -8.66016 1.82031 -17.5 1.82031 -26.4902
+c0 -8.87988 -0.610352 -17.6299 -1.78027 -26.1904l-82.1201 4.75z" />
+ <glyph glyph-name="fulcrum" unicode="&#xf50b;" horiz-adv-x="269"
+d="M70.75 283.86l-35.3799 -43.5508l-35.3701 43.5508l35.3799 43.5498zM119.23 448v-211.11l-41.0801 -44.8896l41.0801 -44.8896v-211.11l-20.5303 198.18l-50.9805 57.8203l50.9707 57.8203zM198.9 283.86l35.3799 43.5498l35.3799 -43.5498l-35.3799 -43.5508z
+M150.42 236.89v211.11l20.54 -198.18l50.9805 -57.8203l-50.9805 -57.8203l-20.54 -198.18v211.11l41.0801 44.8896z" />
+ <glyph glyph-name="galactic-republic" unicode="&#xf50c;" horiz-adv-x="496"
+d="M248 -56c-136.75 0 -248 111.25 -248 248s111.25 248 248 248s248 -111.25 248 -248s-111.25 -248 -248 -248zM248 423.47c-127.63 0 -231.47 -103.84 -231.47 -231.47s103.84 -231.47 231.47 -231.47s231.47 103.84 231.47 231.47s-103.84 231.47 -231.47 231.47z
+M275.62 401.66c37.6602 -4.91016 72.21 -19.7402 100.96 -41.7998l-17.3896 -17.3604c-20.6758 15.3154 -58.1152 30.7891 -83.5703 34.54v24.6201zM220.25 401.59v-24.54c-30.9697 -4.60938 -59.4502 -16.8301 -83.5195 -34.6699h-0.0800781l-17.2803 17.3604
+c28.7197 22.0498 63.2402 36.9102 100.88 41.8496zM232.5 351.42h31v-82.8604c10.0498 -2.0293 19.3701 -6.00977 27.6201 -11.5l58.6699 58.6709l21.9297 -21.9307l-58.6699 -58.6699c5.46973 -8.24023 9.48047 -17.5996 11.5 -27.6201h82.8701v-31h-82.8701
+c-2.03027 -10.0195 -6.04004 -19.3096 -11.5 -27.54l58.6699 -58.6895l-21.9297 -21.9307l-58.6699 58.6904c-8.25 -5.49023 -17.5703 -9.52051 -27.6201 -11.5498v-82.9004h-31v82.9004c-8.25781 1.66895 -20.6533 6.80762 -27.6699 11.4697l-58.6201 -58.6201
+l-21.9297 21.9297l58.6699 58.6904c-5.45996 8.23047 -9.4502 17.5205 -11.4697 27.54h-82.9004v31h82.9004c2.01953 10.0303 6 19.3896 11.4697 27.6201l-58.6699 58.6699l21.9297 21.9297l58.6201 -58.5898c8.25 5.48047 17.6299 9.38965 27.6699 11.4199v82.8701z
+M415.74 320.7c22.0996 -28.7402 36.9795 -63.3398 41.9297 -101.03h-24.6201c-3.7832 25.4902 -19.3154 62.9746 -34.6699 83.6699zM80.1904 320.57l17.3896 -17.3906c-17.8301 -24.0693 -29.9902 -52.5596 -34.5898 -83.5195h-24.6504
+c4.94043 37.6494 19.79 72.1895 41.8506 100.91zM38.3398 164.33l24.6504 0.00976562c4.58984 -30.9502 16.7002 -59.4502 34.5098 -83.5195l-17.3604 -17.3906c-22.0498 28.7207 -36.8799 63.2607 -41.7998 100.9zM433.04 164.33h24.6201
+c-4.9502 -37.6699 -19.8506 -72.2197 -41.9297 -100.96l-17.3604 17.3604c17.8701 24.0996 30.0596 52.6094 34.6699 83.5996zM136.66 41.6201c24.0703 -17.8604 52.6094 -30.0205 83.5996 -34.6504v-24.6396c-37.6602 4.9502 -72.2295 19.8398 -100.96 41.9297z
+M359.19 41.5703h0.0791016l17.3105 -17.3906c-28.75 -22.0596 -63.29 -36.9297 -100.96 -41.8496v24.5703c30.9902 4.58984 59.4795 16.8301 83.5703 34.6699z" />
+ <glyph glyph-name="galactic-senate" unicode="&#xf50d;" horiz-adv-x="512"
+d="M249.86 414.52h12.2793v-26.0693c13.5801 -20.6201 23.8604 -108.59 24.4902 -215.351c-11.7402 15.6201 -19.1299 33.3301 -19.1299 48.2402v16.8799c0.0302734 5.32031 -0.75 10.5303 -2.19043 15.6504c-0.649414 2.13965 -1.38965 4.07031 -2.61914 5.82031
+c-1.23047 1.73926 -3.44043 3.79004 -6.68066 3.79004c-3.25 0 -5.4502 -2.04004 -6.67969 -3.79004c-1.23047 -1.74023 -1.96973 -3.68066 -2.62012 -5.82031c-1.44043 -5.12012 -2.21973 -10.3301 -2.19043 -15.6504v-16.8799
+c0 -14.9102 -7.38965 -32.6201 -19.1299 -48.2402c0.610352 106.761 10.8906 194.73 24.4707 215.351v26.0693zM223.52 266.75c-1.59961 -22.4004 -2.75 -46.5195 -3.47949 -72.0703c-23.2998 -11.2793 -40.7705 -33.1602 -46.3203 -59.5098
+c-7.71973 -2.25977 -22.71 -3.91992 -40.4893 -4.21973c-7.51074 3.66016 -16.5 5.85938 -26.1807 6.04004c1.90039 14.9102 5.87012 29.1699 11.6504 42.4199c15.4395 -8.10059 30.9297 -8.66016 35.4697 -0.959961c4.57031 7.74023 -3.58984 21.04 -18.3203 30.6602
+c8.68066 11.7695 18.9805 22.2998 30.5605 31.0898c9.50977 -15.5898 23.3594 -24.4404 31.3594 -19.8203c8.05078 4.65039 7.19043 21.1699 -1.70996 37.29c8.76074 3.88965 17.9404 6.92969 27.46 9.08008zM288.48 266.75
+c7.82227 -1.75977 20.1201 -5.82812 27.4492 -9.08008c-8.89941 -16.1299 -9.75977 -32.6396 -1.70996 -37.29c8 -4.62012 21.8506 4.23047 31.3604 19.8203c11.5801 -8.79004 21.8799 -19.3203 30.5596 -31.0898c-14.7197 -9.61035 -22.8896 -22.9199 -18.3193 -30.6602
+c4.54004 -7.7002 20.0293 -7.14062 35.4697 0.959961c5.79004 -13.25 9.75 -27.5098 11.6504 -42.4199c-9.68066 -0.19043 -18.6709 -2.37988 -26.1807 -6.04004c-17.7793 0.299805 -32.7695 1.95996 -40.4902 4.21973c-5.5498 26.3496 -23.0293 48.2305 -46.3193 59.5098
+c-0.719727 25.5508 -1.87988 49.6699 -3.46973 72.0703zM256 258.15c3.23047 0 5.86035 -8.81055 6.08984 -19.9307h0.0498047v-16.8799c0 -41.4199 49.0107 -95.04 93.4902 -95.04c52 0 122.76 1.4502 156.37 -29.1699v-2.50977
+c-9.41992 -17.1104 -20.5801 -33.1699 -33.1797 -47.9697c-12.5303 21.0898 -51.5898 40.96 -108.021 41.3496c-45.6797 -1.01953 -79.0195 -20.3301 -90.7598 -40.8701c-0.00976562 -0.00976562 0.00976562 -0.0400391 0 -0.0498047
+c-7.66992 -2.13965 -15.8496 -3.23047 -24.04 -3.20996c-8.19043 -0.0205078 -16.3701 1.07031 -24.04 3.20996c-0.00976562 0.00976562 0.00976562 0.0400391 0 0.0498047c-11.7295 20.54 -45.0801 39.8506 -90.7598 40.8701
+c-56.4307 -0.400391 -95.5 -20.2598 -108.021 -41.3496c-12.5996 14.7998 -23.7598 30.8496 -33.1797 47.9697v2.50977c33.6201 30.6201 104.37 29.1699 156.37 29.1699c44.4795 0 93.4902 53.6201 93.4902 95.04v16.8799h0.0498047
+c0.229492 11.1201 2.85938 19.9307 6.08984 19.9307zM256 161.56c-22.4199 0 -40.5996 -18.1797 -40.5996 -40.5996s18.1797 -40.6504 40.5996 -40.6504s40.5996 18.2305 40.5996 40.6504s-18.1797 40.5996 -40.5996 40.5996zM256 153.92
+c18.1904 0 32.96 -14.7695 32.96 -32.96s-14.7695 -32.96 -32.96 -32.96s-32.96 14.7695 -32.96 32.96s14.7695 32.96 32.96 32.96zM256 147.78c-14.8096 0 -26.8203 -12.0107 -26.8203 -26.8203s12.0107 -26.8203 26.8203 -26.8203s26.8203 12.0107 26.8203 26.8203
+s-12.0107 26.8203 -26.8203 26.8203zM141.2 81.1104c18.75 -0.419922 35.1895 -4.18066 48.6094 -9.66992c12.5508 -16.0303 29.1602 -30.04 49.5801 -33.0703c0.100586 -0.00976562 0.169922 -0.0302734 0.270508 -0.0498047
+c0.0498047 -0.0107422 0.109375 -0.0400391 0.160156 -0.0507812c5.23926 -1.06934 10.6396 -1.59961 16.1895 -1.59961c5.56055 0 10.9502 0.530273 16.1904 1.59961c0.0498047 0.0107422 0.109375 0.0400391 0.160156 0.0507812
+c0.0996094 0.00976562 0.179688 0.0292969 0.269531 0.0498047c20.4199 3.04004 37.04 17.04 49.5801 33.0703c13.4199 5.5 29.8496 9.25 48.6104 9.66992c10.1797 -0.0800781 21.5996 -0.360352 30.5 -1.66016c-0.430664 -4.41992 -1.51074 -18.6299 -7.11035 -29.7598
+c-9.11035 2.55957 -18.3604 3.89941 -27.6201 3.89941c-41.2803 -0.939453 -71.4795 -34.3496 -78.2598 -74.4697l-0.110352 -4.7002c-10.3994 -1.91992 -21.1797 -2.93945 -32.21 -2.93945c-11.0195 0 -21.8096 1.0293 -32.21 2.93945l-0.109375 4.7002
+c-6.78027 40.1201 -36.9805 73.5303 -78.2607 74.4697c-9.25977 0 -18.5098 -1.33984 -27.6201 -3.89941c-5.59961 11.1299 -6.67969 25.3398 -7.10938 29.7598c8.89941 1.2998 20.3096 1.58984 30.5 1.66016z" />
+ <glyph glyph-name="jedi-order" unicode="&#xf50e;"
+d="M398.5 74.4004c0 0 26.2998 16.1992 49.9004 77.6992c0 0 -17 -183.3 -222 -185.699h-4.10059c-205.1 2.39941 -222 185.699 -222 185.699c23.2002 -61.5996 49.4004 -77.6992 49.4004 -77.6992c-95.9004 122.1 -17.2002 233.1 -17.2002 233.1
+c-45.4004 -85.7002 41.4004 -170.5 41.4004 -170.5c-105 171.6 60.5 271.5 60.5 271.5c-96.9004 -72.5996 10.0996 -190.7 10.0996 -190.7c-85.7998 -158.399 68.5996 -230.1 68.5996 -230.1s0.400391 16.8994 2.2002 85.7002l-34.5 -36.2002l24.2002 47.3994
+l-62.5996 9.10059l62.5996 9.09961l-20.2002 55.5l31.4004 -45.8994c2.2998 87.8994 7.89941 305.899 7.89941 306.899v2.40039v-1v1v-2.40039c0.100586 -1.7998 5.7002 -219.2 7.90039 -306.899l31.4004 45.8994l-20.2002 -55.5l62.5996 -9.09961l-62.5996 -9.10059
+l24.2002 -47.3994s-30.2002 31.7002 -34.5 36.2002c1.7998 -68.8008 2.19922 -85.7002 2.19922 -85.7002s154.4 71.7002 68.6006 230.1c0 0 107 118 10.0996 190.7c0 0 165.5 -100 60.5 -271.5c0 0 86.7998 84.7002 41.4004 170.5c0 0 78.7002 -111 -17.2002 -233.1z" />
+ <glyph glyph-name="mandalorian" unicode="&#xf50f;" horiz-adv-x="390"
+d="M203.28 -63.8896c-0.980469 3.25977 -1.69043 15.8301 -1.39062 24.5801c0.550781 15.8896 0.980469 24.7197 1.40039 28.7598c0.639648 6.2002 2.87012 20.7197 3.28027 21.3799c0.599609 0.959961 0.399414 27.8701 -0.240234 33.1299
+c-0.310547 2.58008 -0.629883 11.9004 -0.69043 20.7305c-0.129883 16.4697 -0.529297 20.1191 -2.72949 24.7598c-1.10059 2.31934 -1.23047 3.83984 -0.990234 11.4297c0.160156 4.81055 0 10.5303 -0.339844 12.71c-2.0498 12.9697 -3.45996 27.7002 -3.25 33.9004
+c0.209961 6.12012 0.429688 7.14941 2.05957 9.66992c3.05078 4.70996 6.51074 14.04 8.62012 23.2695c2.25977 9.86035 3.87988 17.1807 4.58984 20.7402c0.890625 4.41992 2.43066 9.71973 4.36035 15.0498c2.27051 6.25 2.49023 15.3906 0.370117 15.3906
+c-0.299805 0 -1.37988 -1.2207 -2.41016 -2.70996c-1.03027 -1.49023 -4.75977 -4.80078 -8.29004 -7.36035c-8.37012 -6.08008 -11.7002 -9.38965 -12.6602 -12.5801c-0.929688 -3.11035 -1.01953 -7.22949 -0.160156 -7.75977
+c0.34082 -0.209961 1.29004 -2.40039 2.11035 -4.87988c1.62012 -4.87988 1.87012 -10.1201 0.719727 -15.3604c-0.389648 -1.76953 -1.0498 -5.46973 -1.45996 -8.22949c-0.410156 -2.76074 -0.979492 -6.46094 -1.25 -8.2207
+c-0.279297 -1.75977 -0.969727 -3.67969 -1.5498 -4.25977c-0.959961 -0.959961 -1.13965 -0.910156 -2.0498 0.530273c-0.549805 0.870117 -1.2002 3.00977 -1.44043 4.75c-0.25 1.73926 -1.62988 7.10938 -3.08008 11.9297
+c-3.2793 10.9004 -3.51953 16.1504 -0.959961 20.96c0.919922 1.73047 1.66992 3.81055 1.66992 4.61035c0 2.38965 -2.19922 5.31934 -7.40918 9.88965c-7.05078 6.17969 -8.62988 7.91992 -10.2305 11.2998c-1.70996 3.60059 -3.05957 4.06055 -4.54004 1.54004
+c-1.78027 -3.00977 -2.59961 -9.10938 -2.96973 -22.0195l-0.350586 -12.1299l1.9502 -2.25c3.20996 -3.7002 12.0703 -16.4502 13.7803 -19.8301c3.41016 -6.74023 4.33984 -11.6904 4.41016 -23.5605c0.0703125 -11.8398 0.950195 -22.75 2 -24.71
+c0.359375 -0.660156 0.509766 -1.34961 0.339844 -1.51953s0.410156 -2.08984 1.29004 -4.27051c0.879883 -2.17969 1.80957 -6.21973 2.05957 -8.97949s1.02051 -7.43066 1.70996 -10.3701c2.23047 -9.56055 2.77051 -14.0801 2.39062 -20.1396
+c-0.200195 -3.27051 -0.530273 -11.0703 -0.730469 -17.3203c-1.30957 -41.7598 -1.84961 -57.9805 -2.04004 -61.21c-0.120117 -2.02051 -0.389648 -11.5098 -0.599609 -21.0703c-0.360352 -16.2998 -1.2998 -27.3701 -2.41992 -28.6494
+c-0.640625 -0.730469 -8.07031 4.90918 -12.5205 9.48926c-3.75 3.87012 -4.01953 4.79004 -2.83008 9.9502c0.700195 3.00977 2.26074 18.29 3.33008 32.6201c0.360352 4.78027 0.810547 10.5 1.01074 12.71c0.830078 9.37012 1.65918 20.3496 2.60938 34.7803
+c0.560547 8.45996 1.33008 16.4395 1.7207 17.7295c0.379883 1.29004 0.889648 9.89062 1.12988 19.1104l0.429688 16.7695l-2.25977 4.30078c-1.7207 3.2793 -4.87012 6.93945 -13.2207 15.3398c-6.0293 6.06934 -11.8398 12.2998 -12.9092 13.8496l-1.9502 2.81055
+l0.75 10.8994c1.08984 15.71 1.09961 48.5703 0.0195312 59.0605l-0.889648 8.69922l-3.28027 4.52051c-5.85938 8.08008 -5.7998 7.75 -6.21973 33.2695c-0.0996094 6.07031 -0.379883 11.5 -0.629883 12.0605c-0.830078 1.87012 -3.0498 2.66016 -8.54004 3.0498
+c-8.86035 0.620117 -10.96 1.90039 -23.8506 14.5498c-6.14941 6.04004 -12.3398 11.9697 -13.75 13.1904c-2.80957 2.41992 -2.79004 1.99023 -0.55957 9.62988l1.34961 4.64941l-1.68945 3.04004c-0.929688 1.6709 -2.08984 3.51074 -2.58984 4.07031
+c-1.33008 1.50977 -5.5 10.8896 -5.99023 13.4902c-0.310547 1.66016 -0.0898438 2.66992 0.870117 3.89941c2.22949 2.86035 3.39941 5.68066 4.4502 10.7305c2.33008 11.1904 7.73926 26.0898 10.5996 29.2197c3.17969 3.4707 7.7002 1.0498 9.41016 -5.0293
+c1.33984 -4.79004 1.37012 -9.79004 0.0996094 -18.5508c-0.529297 -3.67969 -0.979492 -8.67969 -0.990234 -11.1094c-0.0195312 -4.01074 0.19043 -4.69043 2.25 -7.39062c3.33008 -4.37012 7.73047 -7.40918 15.2002 -10.5195
+c1.7002 -0.709961 3.82031 -1.99023 4.7207 -2.85059c11.1699 -10.7197 18.6191 -16.1797 22.9492 -16.8496c5.18066 -0.799805 7.98047 -4.54004 10.04 -13.3896c1.31055 -5.65039 4 -11.1406 5.45996 -11.1406c0.59082 0 2.09082 0.629883 3.33008 1.39062
+c1.98047 1.21973 2.25 1.72949 2.25 4.17969c-0.00976562 3.70996 -1.16992 14.0801 -2 17.8398c-0.370117 1.66016 -0.779297 4.06055 -0.929688 5.35059c-0.139648 1.29004 -0.610352 3.84961 -1.03027 5.68945c-2.5498 11.1602 -3.64941 15.46 -4.09961 16.0498
+c-1.5498 2.02051 -4.08008 10.2002 -4.92969 15.9199c-1.64062 11.1104 -3.96094 14.2305 -12.9102 17.3906c-4.64062 1.63965 -8.89062 4.12012 -13.3203 7.78027c-1.15039 0.949219 -4.00977 3.21973 -6.34961 5.05957
+c-2.35059 1.83008 -4.41016 3.53027 -4.60059 3.75977c-0.179688 0.230469 -1.38965 1.14062 -2.68945 2.02051c-6.24023 4.21973 -8.84082 6.97949 -11.2607 11.96l-2.43945 5.01953l-0.219727 12.9805l-0.220703 12.9795l6.91016 6.5498
+c3.9502 3.75 8.48047 7.35059 10.5898 8.43066c3.31055 1.68945 4.4502 1.88965 11.3701 2.0498c8.53027 0.19043 10.1201 -0.0205078 11.6602 -1.55957c1.53027 -1.53027 1.36035 -6.40039 -0.290039 -8.5c-0.740234 -0.94043 -1.33984 -1.98047 -1.33984 -2.32031
+c0 -0.580078 -2.61035 -4.91016 -5.41992 -8.99023c-0.680664 -0.990234 -2.12988 -5.34961 -2.37012 -6.82031c20.4395 -13.3896 21.5498 -3.76953 14.0703 -28.9795l11.3994 -2.54004c3.11035 8.66016 6.46973 17.2598 8.61035 26.2197
+c0.290039 7.62988 -11.9805 4.19043 -15.4004 8.68066c-2.33008 5.92969 3.12988 14.1797 6.06055 19.1992c1.59961 2.33984 6.62012 4.7002 8.81934 4.15039c0.879883 -0.219727 4.16016 0.349609 7.37012 1.28027c3.18066 0.919922 6.58008 1.67969 7.5498 1.67969
+c0.970703 0 3.66016 0.580078 5.98047 1.29004c3.65039 1.11035 4.5 1.16992 6.34961 0.400391c1.16992 -0.480469 3.79004 -1.09082 5.82031 -1.36035c2.02051 -0.259766 4.71973 -1.12012 6 -1.91016s3.53027 -1.76953 5.02051 -2.16992
+c2.50977 -0.679688 3 -0.570312 7.0498 1.66992l4.34961 2.40039l10.7002 0.40918c10.4395 0.400391 10.8096 0.470703 15.2598 2.68066l4.58008 2.2998l2.45996 -1.42969c1.75977 -1.02051 3.13965 -2.73047 4.85059 -5.98047
+c2.35938 -4.50977 2.37988 -4.58008 1.36914 -7.37012c-0.879883 -2.43945 -0.889648 -3.2998 -0.0996094 -6.38965c0.5 -1.95996 1.4502 -4.62012 2.09961 -5.91016c0.650391 -1.29004 1.24023 -3.08984 1.31055 -4.00977
+c0.30957 -4.33008 -0.0302734 -5.2998 -2.41016 -6.91992c-2.16992 -1.4707 -6.98047 -7.91016 -6.98047 -9.33984c0 -0.320312 -0.479492 -1.69043 -1.06934 -3.03027c-5.04004 -11.5098 -6.75977 -13.5605 -14.2598 -16.9805
+c-9.2002 -4.19922 -12.3008 -5.18945 -16.21 -5.18945c-3.10059 0 -4 -0.25 -4.54004 -1.25977c-0.370117 -0.69043 -2.20996 -2.37012 -4.09082 -3.70996c-2.04004 -1.4707 -3.7998 -3.38086 -4.37988 -4.78027c-0.540039 -1.28027 -1.66016 -2.58984 -2.48926 -2.91016
+c-0.830078 -0.320312 -1.94043 -1.08008 -2.4502 -1.70996c-0.520508 -0.620117 -3.66016 -3.04004 -7 -5.37988c-3.33008 -2.33984 -6.87012 -5.02051 -7.87012 -5.95996c-1 -0.94043 -2.07031 -1.70996 -2.39062 -1.70996
+c-0.319336 0 -1.2793 -0.740234 -2.12988 -1.65039c-1.30957 -1.38965 -1.48926 -2.11035 -1.13965 -4.59961c0.219727 -1.62988 0.860352 -4.27051 1.41992 -5.87988c1.32031 -3.80078 1.31055 -7.86035 -0.0498047 -10.5703
+c-1.42969 -2.86035 -0.889648 -6.65039 1.34961 -9.58984c2.01074 -2.62988 2.16016 -4.56055 0.709961 -8.83984c-0.609375 -1.80078 -1.0498 -5.4502 -1.05957 -8.91016c-0.0205078 -4.87988 0.219727 -6.28027 1.45996 -8.37988
+c1.2002 -2.04004 1.82031 -2.48047 3.24023 -2.32031c1.97949 0.229492 2.2998 1.0498 4.70996 12.1201c2.17969 10.0303 3.70996 11.9199 13.7598 17.0801c2.94043 1.50977 7.45996 3.95996 10.0303 5.43945c2.58008 1.48047 6.79004 3.69043 9.37012 4.91016
+c6.66992 3.16016 11.0498 6.52051 15.2197 11.6699c7.10938 8.79004 9.97949 16.2207 12.8496 33.2998c0.549805 3.28027 1.43066 5.65039 2.86035 7.73047c1.29004 1.87012 2.37012 4.62012 2.88965 7.30957c1.02051 5.30078 2.85059 9.08008 5.58008 11.5107
+c4.7002 4.17969 6 1.08984 4.58984 -10.8701c-0.459961 -3.86035 -1.09961 -10.3301 -1.43945 -14.3799l-0.610352 -7.36035l4.4502 -4.08984l4.4502 -4.08984l0.109375 -8.41992c0.0605469 -4.63086 0.470703 -9.53027 0.919922 -10.8906l0.820312 -2.46973
+l-6.42969 -6.28027c-8.54004 -8.33008 -12.8799 -13.9297 -16.7598 -21.6094c-1.77051 -3.49023 -3.74023 -7.11035 -4.38086 -8.03027c-2.17969 -3.11035 -6.45996 -13.0098 -8.75977 -20.2598l-2.29004 -7.2207l-6.96973 -6.48926
+c-3.83008 -3.57031 -7.95996 -7.25 -9.16992 -8.16992c-3.0498 -2.32031 -4.25977 -5.15039 -4.25977 -9.99023c0 -2.98047 0.429688 -4.95996 1.58984 -7.25977c0.870117 -1.74023 1.80957 -3.91016 2.08984 -4.83008s0.979492 -2.2207 1.57031 -2.89062
+c1.39941 -1.58984 1.91992 -16.1201 0.830078 -23.2197c-0.680664 -4.48047 -3.63086 -12.0205 -4.7002 -12.0205c-1.79004 0 -4.06055 -9.26953 -5.07031 -20.7393c-0.179688 -2.02051 -0.620117 -5.94043 -0.979492 -8.7002
+c-0.360352 -2.75977 -0.959961 -9.98047 -1.35059 -16.0498c-0.769531 -12.2207 -0.189453 -18.7705 2.0498 -23.1504c3.41016 -6.68945 0.520508 -12.6895 -11.0293 -22.8398l-3.9707 -3.49023l0.0703125 -5.18945c0.0400391 -2.86035 0.549805 -6.85059 1.13965 -8.87012
+c4.61035 -15.9805 4.73047 -16.9199 4.38086 -37.1299c-0.460938 -26.4004 -0.260742 -40.2705 0.629883 -44.1504c0.419922 -1.83984 0.90918 -5 1.08008 -7.01953c0.169922 -2.02051 0.65918 -5.33008 1.0791 -7.36035
+c0.470703 -2.25977 0.780273 -11.0205 0.790039 -22.7402l0.0205078 -19.0596l-1.81055 -2.62988c-2.70996 -3.91016 -15.1094 -13.54 -15.4893 -12.29zM232.81 -18.7803c-0.179688 0.299805 -0.329102 6.87012 -0.329102 14.5898
+c0 14.0605 -0.890625 27.54 -2.26074 34.4502c-0.399414 2.02051 -0.80957 9.7002 -0.899414 17.0605c-0.150391 11.9297 -1.40039 24.3701 -2.64062 26.3799c-0.660156 1.06934 -3.01953 17.6602 -3.0293 21.2998c-0.0107422 4.23047 1.01953 6 5.2793 9.12988
+c4.14062 3.04004 4.86035 3.13965 5.48047 0.719727c0.280273 -1.09961 1.4502 -5.61914 2.59961 -10.0293c3.93066 -15.1201 4.14062 -16.2705 4.0498 -21.7402c-0.0996094 -5.78027 -0.129883 -6.12988 -1.73926 -17.7305
+c-0.980469 -7.06934 -1.16992 -12.3896 -1.04004 -28.4297c0.169922 -19.4004 -0.640625 -35.7295 -2.04004 -41.2695c-0.709961 -2.78027 -2.7998 -5.48047 -3.43066 -4.43066zM161.82 18.7998c-0.240234 0.379883 -1.01074 5.24023 -1.73047 10.79
+c-0.719727 5.56055 -1.49023 10.4102 -1.72949 10.79c-0.230469 0.379883 -0.680664 3.2998 -0.990234 6.49023c-0.310547 3.18945 -0.910156 7.45996 -1.33008 9.47949c-0.990234 4.79004 -3.34961 19.3506 -3.41992 21.0703
+c-0.0302734 0.740234 -0.339844 4.0498 -0.700195 7.36035c-0.669922 6.20996 -0.839844 27.6699 -0.219727 28.29c0.959961 0.959961 6.62988 -2.76074 11.3301 -7.43066l5.2793 -5.25l-0.449219 -6.46973c-0.25 -3.55957 -0.600586 -10.2295 -0.780273 -14.8301
+c-0.179688 -4.59961 -0.490234 -9.87012 -0.669922 -11.71s-0.610352 -9.36035 -0.94043 -16.7197c-0.790039 -17.4102 -1.93945 -31.29 -2.64941 -32c-0.320312 -0.299805 -0.760742 -0.240234 -1 0.139648zM74.6299 285.39
+c21.0703 -12.79 17.8398 -14.1494 28.4902 -17.6592c13.0098 -4.29004 18.8701 -7.13086 23.1494 -16.8701c-43.6592 -36.1406 -69.0098 -57.9004 -76.71 -70.8203c-31.0195 -52.0098 -5.98926 -101.59 62.75 -87.21c-14.1797 -29.2305 -77.9697 -28.6299 -98.6797 4.90039
+c-24.6797 39.9492 -22.0898 118.3 61 187.659zM285.42 106.37c56.6602 -6.87988 82.3203 37.7402 46.54 89.2295c0 0 -26.8701 29.3408 -64.2803 67.96c2.98047 15.4502 9.49023 32.1201 30.5703 53.8203c89.2002 -63.5098 92 -141.61 92.46 -149.36
+c4.27051 -70.5791 -78.6602 -91.1191 -105.29 -61.6494z" />
+ <glyph glyph-name="old-republic" unicode="&#xf510;" horiz-adv-x="496"
+d="M235.76 437.77c7.5 0.310547 15 0.280273 22.5 0.0908203c3.61035 -0.140625 7.2002 -0.400391 10.79 -0.730469c4.91992 -0.269531 9.79004 -1.03027 14.6699 -1.62012c2.93066 -0.429688 5.83008 -0.979492 8.75 -1.45996
+c7.90039 -1.33008 15.6699 -3.28027 23.3906 -5.39941c12.2393 -3.4707 24.1895 -7.91992 35.7598 -13.21c26.5596 -12.2402 50.9395 -29.21 71.6299 -49.8809c20.0303 -20.0898 36.7197 -43.5498 48.8896 -69.1895c1.12988 -2.58984 2.44043 -5.10059 3.4707 -7.74023
+c2.80957 -6.42969 5.38965 -12.9697 7.58008 -19.6299c4.13965 -12.3301 7.33984 -24.9902 9.41992 -37.8301c0.569336 -3.13965 1.04004 -6.2998 1.39941 -9.46973c0.549805 -3.83008 0.94043 -7.69043 1.18066 -11.5605
+c0.829102 -8.33984 0.839844 -16.7295 0.769531 -25.0996c-0.0703125 -4.96973 -0.259766 -9.94043 -0.75 -14.8896c-0.240234 -3.38086 -0.509766 -6.76074 -0.979492 -10.1201c-0.390625 -2.7207 -0.630859 -5.45996 -1.11035 -8.16992
+c-0.900391 -5.15039 -1.7002 -10.3105 -2.87012 -15.4102c-4.09961 -18.5 -10.2998 -36.5498 -18.5098 -53.6299c-15.7705 -32.8301 -38.8301 -62.1699 -67.1201 -85.1201c-14.3926 -11.7676 -39.8887 -27.3848 -56.9102 -34.8604
+c-6.20996 -2.67969 -12.46 -5.25 -18.8701 -7.41016c-3.50977 -1.16016 -7.00977 -2.37988 -10.5703 -3.38965c-6.61914 -1.87988 -13.2891 -3.63965 -20.0391 -5c-4.66016 -0.910156 -9.34082 -1.73047 -14.0303 -2.48047c-5.25 -0.65918 -10.5 -1.43945 -15.79 -1.73926
+c-6.69043 -0.660156 -13.4102 -0.839844 -20.1201 -0.810547c-6.82031 -0.0292969 -13.6504 0.120117 -20.4502 0.790039c-3.29004 0.230469 -6.57031 0.5 -9.83008 0.950195c-2.71973 0.389648 -5.45996 0.629883 -8.16992 1.11035
+c-4.12012 0.719727 -8.25 1.37012 -12.3496 2.21973c-4.25 0.939453 -8.49023 1.88965 -12.6904 3.01953c-8.62988 2.16992 -17.0801 5.01074 -25.4102 8.13086c-10.4893 4.11914 -20.79 8.75 -30.6396 14.25c-2.13965 1.14941 -4.28027 2.28906 -6.34961 3.56934
+c-11.2207 6.58008 -21.8604 14.1006 -31.9199 22.3398c-34.6807 28.4102 -61.4102 66.4307 -76.3506 108.7c-3.08984 8.74023 -5.70996 17.6504 -7.7998 26.6797c-1.48047 6.16016 -2.52051 12.4209 -3.58008 18.6602
+c-0.400391 2.35059 -0.610352 4.73047 -0.950195 7.08984c-0.599609 3.96094 -0.75 7.96094 -1.16992 11.9404c-0.799805 9.46973 -0.709961 18.9902 -0.509766 28.4902c0.139648 3.50977 0.339844 7.00977 0.700195 10.5098
+c0.30957 3.16992 0.459961 6.37012 0.919922 9.52051c0.410156 2.80957 0.649414 5.64941 1.16016 8.43945c0.699219 3.94043 1.2998 7.90039 2.11914 11.8203c3.43066 16.5195 8.4707 32.7295 15.2607 48.1797c1.14941 2.91992 2.58984 5.71973 3.85938 8.58984
+c8.05078 16.71 17.9004 32.5605 29.4902 47.0605c20 25.3799 45.1006 46.6797 73.2705 62.4697c7.5 4.15039 15.1592 8.0498 23.0693 11.3701c15.8203 6.87988 32.4102 11.9502 49.3105 15.3799c3.50977 0.669922 7.04004 1.24023 10.5596 1.84961
+c2.62012 0.470703 5.28027 0.700195 7.91016 1.08008c3.53027 0.530273 7.09961 0.680664 10.6504 1.04004c2.45996 0.240234 4.90918 0.360352 7.35938 0.509766zM244.4 413.36c-9.23047 -0.100586 -18.4307 -0.990234 -27.5703 -2.23047
+c-7.2998 -1.08008 -14.5303 -2.59961 -21.71 -4.2998c-13.9102 -3.5 -27.4805 -8.33984 -40.46 -14.4199c-10.46 -4.99023 -20.5898 -10.7002 -30.1797 -17.2197c-4.18066 -2.9209 -8.40039 -5.80078 -12.3408 -9.03027
+c-5.08008 -3.96973 -9.97949 -8.16992 -14.6797 -12.5898c-2.50977 -2.24023 -4.80957 -4.7002 -7.21973 -7.06055c-28.2207 -28.79 -48.4404 -65.3896 -57.5 -104.689c-2.04004 -8.44043 -3.54004 -17.0205 -4.44043 -25.6504
+c-1.09961 -8.88965 -1.43945 -17.8496 -1.41016 -26.7998c0.110352 -7.13965 0.379883 -14.2803 1.2207 -21.3701c0.620117 -7.12012 1.87012 -14.1602 3.19922 -21.1797c1.07031 -4.65039 2.03027 -9.32031 3.33008 -13.9102
+c6.29004 -23.3799 16.5 -45.7002 30.0703 -65.75c8.63965 -12.9805 18.7803 -24.9297 29.9805 -35.7705c16.2793 -15.8193 35.0498 -29.04 55.3398 -39.2197c7.2793 -3.51953 14.6602 -6.87012 22.2695 -9.62988c5.04004 -1.75977 10.0605 -3.57031 15.2197 -4.98047
+c11.2607 -3.22949 22.7705 -5.59961 34.3906 -7.05957c2.91016 -0.290039 5.80957 -0.610352 8.71973 -0.900391c13.8203 -1.08008 27.7402 -1 41.54 0.430664c4.4502 0.599609 8.91992 0.989258 13.3496 1.7793c3.63086 0.670898 7.28027 1.25 10.8701 2.10059
+c4.12988 0.979492 8.28027 1.91016 12.3604 3.07031c26.5 7.33984 51.5801 19.71 73.5801 36.1992c15.7803 11.8203 29.96 25.7607 42.1201 41.2803c3.25977 4.02051 6.16992 8.30957 9.12988 12.5498c3.38965 5.06055 6.58008 10.25 9.59961 15.54
+c2.40039 4.44043 4.74023 8.91016 6.9502 13.4502c5.69043 12.0498 10.2803 24.6201 13.75 37.4902c2.58984 10.0098 4.75 20.1602 5.90039 30.4502c1.76953 13.4697 1.93945 27.0996 1.29004 40.6494c-0.290039 3.89062 -0.669922 7.77051 -1 11.6602
+c-2.23047 19.0801 -6.79004 37.9102 -13.8203 55.7998c-5.9502 15.1299 -13.5303 29.6299 -22.6104 43.1299c-12.6895 18.8008 -28.2393 35.6807 -45.9697 49.8301c-25.0498 20 -54.4697 34.5498 -85.6504 42.0801c-7.7793 1.92969 -15.6895 3.33984 -23.6299 4.4502
+c-3.90918 0.589844 -7.84961 0.820312 -11.7695 1.24023c-7.38965 0.569336 -14.8105 0.719727 -22.2197 0.580078zM139.26 364.47c13.2998 8.89062 28.0801 15.3799 43.2998 20.1807c-3.16992 -1.77051 -6.43945 -3.38086 -9.5293 -5.29004
+c-11.21 -6.68066 -21.5205 -14.9004 -30.3799 -24.4902c-6.80078 -7.42969 -12.7607 -15.7305 -17.0107 -24.8896c-3.29004 -6.86035 -5.63965 -14.1904 -6.85938 -21.7109c-0.930664 -4.84961 -1.2998 -9.80957 -1.16992 -14.75
+c0.129883 -13.6592 4.43945 -27.0791 11.29 -38.8193c5.91992 -10.2197 13.6299 -19.3301 22.3594 -27.2598c4.85059 -4.36035 10.2402 -8.09082 14.9502 -12.6006c2.25977 -2.18945 4.49023 -4.41992 6.42969 -6.91016c2.62012 -3.30957 4.89062 -6.98926 5.99023 -11.0996
+c0.900391 -3.02051 0.660156 -6.2002 0.69043 -9.31055c0.0195312 -4.09961 -0.0400391 -8.19922 0.0292969 -12.2998c0.140625 -3.54004 -0.0195312 -7.08984 0.110352 -10.6299c0.0800781 -2.37988 0.0205078 -4.75977 0.0498047 -7.13965
+c0.160156 -5.77051 0.0605469 -11.5303 0.150391 -17.2998c0.109375 -2.91016 0.0195312 -5.82031 0.129883 -8.74023c0.0302734 -1.62988 0.129883 -3.28027 -0.0302734 -4.91016c-0.910156 -0.120117 -1.81934 -0.179688 -2.72949 -0.160156
+c-10.9902 0 -21.8799 2.62988 -31.9502 6.92969c-6 2.7002 -11.8105 5.89062 -17.0898 9.83008c-5.75 4.19043 -11.0898 8.95996 -15.79 14.3105c-6.53027 7.24023 -11.9805 15.3896 -16.6201 23.9502c-1.07031 2.0293 -2.24023 4.01953 -3.17969 6.12012
+c-1.16016 2.63965 -2.62012 5.13965 -3.66992 7.81934c-4.05078 9.68066 -6.57031 19.9404 -8.08008 30.3105c-0.490234 4.43945 -1.09082 8.87988 -1.2002 13.3496c-0.700195 15.7305 0.839844 31.5498 4.66992 46.8203c2.12012 8.14941 4.76953 16.1797 8.30957 23.8301
+c6.32031 14.1992 15.3398 27.1797 26.3008 38.1895c6.2793 6.2002 13.1299 11.8398 20.5293 16.6699zM314.63 384.59c2.74023 -0.740234 5.41016 -1.74023 8.08984 -2.67969c6.36035 -2.33008 12.6807 -4.83984 18.71 -7.95996
+c13.1104 -6.44043 25.3105 -14.8105 35.8203 -24.9697c10.2002 -9.9502 18.7402 -21.6006 25.1396 -34.3408c1.28027 -2.75 2.64062 -5.45996 3.81055 -8.25977c6.30957 -15.0996 10 -31.2598 11.2295 -47.5703c0.410156 -4.54004 0.44043 -9.08984 0.450195 -13.6396
+c0.0703125 -11.6396 -1.49023 -23.25 -4.2998 -34.5303c-1.96973 -7.26953 -4.34961 -14.4893 -7.86035 -21.1797c-3.17969 -6.63965 -6.67969 -13.1602 -10.8398 -19.2402c-6.93945 -10.4697 -15.5996 -19.8701 -25.8203 -27.2197
+c-10.4795 -7.63965 -22.6396 -13.0195 -35.3994 -15.3799c-3.50977 -0.69043 -7.08008 -1.08008 -10.6602 -1.20996c-1.84961 -0.0605469 -3.71973 -0.160156 -5.55957 0.0996094c-0.280273 2.15039 0 4.31055 -0.0107422 6.45996
+c-0.0292969 3.73047 0.140625 7.4502 0.100586 11.1699c0.189453 7.02051 0.0195312 14.0508 0.209961 21.0703c0.0292969 2.37988 -0.0302734 4.75977 0.0292969 7.13965c0.170898 5.07031 -0.0390625 10.1406 0.140625 15.21
+c0.0996094 2.99023 -0.240234 6.04004 0.509766 8.95996c0.660156 2.5 1.78027 4.86035 3.08984 7.08008c4.45996 7.31055 11.0605 12.96 17.6807 18.2607c5.37988 4.17969 10.4697 8.76953 15.0195 13.8398c7.67969 8.37012 14.1699 17.8799 18.7803 28.2695
+c2.5 5.93066 4.51953 12.1006 5.5498 18.46c0.860352 4.37012 1.05957 8.83008 1.00977 13.2705c-0.0195312 7.84961 -1.39941 15.6494 -3.63965 23.1699c-1.75 5.72949 -4.27051 11.1797 -7.08984 16.4502c-3.87012 6.92969 -8.65039 13.3096 -13.96 19.1992
+c-9.94043 10.8506 -21.75 19.9404 -34.6006 27.1006c-1.84961 1.01953 -3.83984 1.82031 -5.62988 2.96973zM213.83 326.14c0.979492 1.18066 1.99023 2.33008 3.12012 3.37988c-0.610352 -0.929688 -1.27051 -1.80957 -1.9502 -2.67969
+c-3.09961 -3.87988 -5.54004 -8.30957 -7.03027 -13.0596c-0.870117 -3.27051 -1.67969 -6.60059 -1.72949 -10c-0.0703125 -2.52051 -0.0800781 -5.07031 0.319336 -7.57031c1.13086 -7.62988 4.33008 -14.8496 8.77051 -21.1201c2 -2.7002 4.25 -5.26953 6.91992 -7.33008
+c1.62012 -1.26953 3.53027 -2.08984 5.33984 -3.0498c3.11035 -1.67969 6.32031 -3.22949 9.07031 -5.47949c2.66992 -2.09082 4.5498 -5.33008 4.39941 -8.79004c-0.00976562 -73.6709 0 -147.341 -0.00976562 -221.021c0 -1.34961 -0.0800781 -2.7002 0.0400391 -4.04004
+c0.129883 -1.47949 0.820312 -2.83008 1.46973 -4.14941c0.860352 -1.66016 1.78027 -3.34082 3.18066 -4.62012c0.849609 -0.770508 1.96973 -1.40039 3.14941 -1.24023c1.5 0.200195 2.66016 1.34961 3.4502 2.57031c0.959961 1.50977 1.67969 3.15918 2.28027 4.84961
+c0.759766 2.12988 0.439453 4.41992 0.540039 6.62988c0.139648 4.03027 -0.0205078 8.06055 0.139648 12.0898c0.0302734 5.89062 0.0302734 11.7705 0.0605469 17.6602c0.139648 3.62012 0.0292969 7.24023 0.109375 10.8604
+c0.150391 4.0293 -0.0195312 8.05957 0.140625 12.0898c0.0292969 5.99023 0.0292969 11.9795 0.0693359 17.9697c0.140625 3.62012 0.0205078 7.24023 0.110352 10.8604c0.139648 3.92969 -0.0205078 7.85938 0.139648 11.7803
+c0.0302734 5.98926 0.0302734 11.9795 0.0605469 17.9697c0.160156 3.93945 -0.00976562 7.87988 0.189453 11.8193c0.290039 -1.43945 0.129883 -2.91992 0.220703 -4.37988c0.189453 -3.60938 0.419922 -7.22949 0.759766 -10.8398
+c0.320312 -3.43945 0.439453 -6.88965 0.859375 -10.3193c0.370117 -3.10059 0.510742 -6.2207 0.950195 -9.31055c0.570312 -4.08984 0.870117 -8.20996 1.54004 -12.29c1.45996 -9.04004 2.83008 -18.1104 5.08984 -26.9902c1.13086 -4.81934 2.40039 -9.60938 4 -14.2998
+c2.54004 -7.89941 5.7207 -15.6699 10.3105 -22.6201c1.72949 -2.63965 3.87012 -4.97949 6.09961 -7.20996c0.270508 -0.25 0.549805 -0.509766 0.879883 -0.709961c0.600586 -0.25 1.31055 0.0703125 1.7002 0.570312c0.709961 0.879883 1.16992 1.93945 1.7002 2.92969
+c4.0498 7.7998 8.17969 15.5605 12.3398 23.3105c0.700195 1.30957 1.44043 2.62012 2.56055 3.60938c1.75 1.57031 3.83984 2.69043 5.97949 3.62988c2.87988 1.2207 5.90039 2.19043 9.03027 2.41992c6.58008 0.620117 13.1094 -0.75 19.5596 -1.84961
+c3.69043 -0.580078 7.40039 -1.16992 11.1299 -1.41016c3.74023 -0.0996094 7.48047 -0.0498047 11.21 0.280273c8.55078 0.919922 16.9902 2.95996 24.9404 6.25c5.2998 2.24023 10.46 4.83008 15.3096 7.92969c11.46 7.20996 21.46 16.5703 30.04 27.0107
+c1.16992 1.41992 2.25 2.89941 3.45996 4.2793c-1.19922 -3.24023 -2.66992 -6.37012 -4.15918 -9.47949c-1.25 -2.90039 -2.84082 -5.61035 -4.27051 -8.41992c-5.16016 -9.62988 -11.0195 -18.9102 -17.75 -27.5205
+c-4.03027 -5.20996 -8.53027 -10.0498 -13.3301 -14.5703c-6.63965 -6.0498 -14.0703 -11.3691 -22.4297 -14.7598c-8.20996 -3.37012 -17.3105 -4.62988 -26.0898 -3.29004c-3.56055 0.580078 -7.01074 1.69043 -10.4102 2.87988
+c-2.79004 0.970703 -5.39062 2.38086 -8.03027 3.69043c-3.42969 1.70996 -6.63965 3.80957 -9.70996 6.08008c2.70996 -3.06055 5.69043 -5.86035 8.7002 -8.61035c4.26953 -3.75977 8.74023 -7.30957 13.6299 -10.2295c3.98047 -2.4502 8.29004 -4.40039 12.8398 -5.51074
+c1.45996 -0.369141 2.95996 -0.459961 4.4502 -0.599609c-1.25 -1.09961 -2.62988 -2.04004 -3.99023 -2.97949c-9.60938 -6.54004 -20.0098 -11.8604 -30.6895 -16.4307c-20.8604 -8.7002 -43.1699 -13.9697 -65.7402 -15.3398
+c-4.66016 -0.240234 -9.32031 -0.360352 -13.9805 -0.360352c-4.97949 0.110352 -9.96973 0.130859 -14.9199 0.650391c-11.2002 0.759766 -22.29 2.73047 -33.1699 5.42969c-10.3496 2.70996 -20.5498 6.12012 -30.2998 10.5508
+c-8.70996 3.85938 -17.1201 8.41992 -24.9902 13.79c-1.83008 1.30957 -3.74023 2.5293 -5.37012 4.0791c6.60059 1.19043 13.0303 3.39062 18.9902 6.48047c5.74023 2.86035 10.9902 6.66016 15.6299 11.0703c2.24023 2.18945 4.29004 4.58984 6.19043 7.08984
+c-3.43066 -2.12988 -6.93066 -4.15039 -10.6201 -5.78027c-4.41016 -2.16016 -9.07031 -3.76953 -13.8105 -5.01953c-5.72949 -1.52051 -11.7393 -1.73047 -17.6094 -1.14062c-8.12988 0.950195 -15.8604 4.27051 -22.5098 8.98047
+c-4.32031 2.93945 -8.2207 6.42969 -11.96 10.0596c-9.93066 10.1602 -18.2002 21.8105 -25.6602 33.8604c-3.94043 6.26953 -7.53027 12.75 -11.1201 19.2197c-1.0498 2.04004 -2.15039 4.0498 -3.17969 6.10059c2.84961 -2.9209 5.56934 -5.9707 8.42969 -8.88086
+c8.99023 -8.96973 18.5596 -17.4395 29.1602 -24.4795c7.5498 -4.90039 15.6699 -9.23047 24.5596 -11.0303c3.11035 -0.729492 6.32031 -0.469727 9.46973 -0.80957c2.77051 -0.280273 5.56055 -0.200195 8.34082 -0.299805
+c5.0498 -0.0605469 10.1094 -0.0400391 15.1592 0.15918c3.65039 0.160156 7.27051 0.660156 10.8906 1.09082c2.06934 0.25 4.10938 0.709961 6.13965 1.19922c3.87988 0.950195 8.11035 0.959961 11.8301 -0.609375c4.75977 -1.85059 8.44043 -5.64062 11.3799 -9.70996
+c2.16016 -3.02051 4.06055 -6.2207 5.66016 -9.58008c1.16016 -2.43066 2.45996 -4.79004 3.5498 -7.26074c1 -2.23926 2.15039 -4.41992 3.41992 -6.51953c0.669922 -1.01953 1.40039 -2.15039 2.62012 -2.5498c1.06055 0.75 1.70996 1.91016 2.28027 3.03027
+c2.09961 4.15918 3.41992 8.64941 4.88965 13.0498c2.02051 6.58984 3.78027 13.2695 5.19043 20.0195c2.20996 9.25 3.25 18.7197 4.54004 28.1299c0.55957 3.98047 0.830078 7.99023 1.30957 11.9707c0.870117 10.6396 1.90039 21.2695 2.24023 31.9395
+c0.0800781 1.86035 0.240234 3.70996 0.25 5.57031c0.00976562 4.34961 0.25 8.68945 0.219727 13.0303c-0.00976562 2.37988 -0.00976562 4.75977 0 7.12988c0.0498047 5.06934 -0.200195 10.1396 -0.219727 15.21c-0.200195 6.60938 -0.709961 13.2002 -1.29004 19.7793
+c-0.730469 5.88086 -1.5498 11.7803 -3.12012 17.5107c-2.0498 7.75 -5.58984 15.0293 -9.7998 21.8193c-3.16016 5.07031 -6.79004 9.87988 -11.0898 14.0303c-3.87988 3.86035 -8.58008 7.08008 -13.9404 8.4502c-1.5 0.410156 -3.05957 0.450195 -4.58984 0.639648
+c0.0703125 2.99023 0.700195 5.93066 1.25977 8.85059c1.58984 7.70996 3.7998 15.2998 6.76074 22.5996c1.51953 4.03027 3.40918 7.90039 5.38965 11.7197c3.4502 6.56055 7.62012 12.79 12.46 18.46zM245.1 324.44
+c0.350586 0.0595703 0.709961 0.119141 1.07031 0.189453c0.19043 -1.79004 0.0898438 -3.58008 0.0996094 -5.37012v-38.1299c-0.00976562 -1.74023 0.130859 -3.49023 -0.149414 -5.21973c-0.360352 0.0302734 -0.709961 0.0498047 -1.06055 0.0498047
+c-0.949219 3.75 -1.71973 7.5498 -2.61914 11.3096c-0.380859 1.53027 -0.580078 3.09082 -1.07031 4.59082c-1.7002 0.239258 -3.42969 0.169922 -5.15039 0.199219c-5.05957 0.0107422 -10.1299 0 -15.1895 0.0107422
+c-1.66016 0.00976562 -3.32031 -0.0898438 -4.98047 0.0292969c-0.0302734 0.390625 -0.259766 0.910156 0.160156 1.18066c1.28027 0.649414 2.71973 0.879883 4.05957 1.34961c3.43066 1.13965 6.88086 2.16016 10.3105 3.31055
+c1.38965 0.479492 2.90039 0.719727 4.16016 1.54004c0.0400391 0.55957 0.0195312 1.12988 -0.0498047 1.67969c-1.23047 0.549805 -2.53027 0.870117 -3.81055 1.28027c-3.12988 1.0293 -6.29004 1.95996 -9.41016 3.01953c-1.79004 0.620117 -3.66992 1 -5.41016 1.79004
+c-0.0292969 0.370117 -0.0693359 0.730469 -0.109375 1.08984c5.08984 0.19043 10.2002 -0.0595703 15.2998 0.120117c3.36035 0.129883 6.73047 -0.0800781 10.0898 0.0703125c0.120117 0.389648 0.259766 0.769531 0.370117 1.16016
+c1.08008 4.93945 2.33008 9.8291 3.38965 14.75zM251.07 324.64c0.359375 -0.0498047 0.719727 -0.120117 1.08008 -0.199219c0.979492 -3.85059 1.72949 -7.76074 2.70996 -11.6104c0.359375 -1.41992 0.55957 -2.87988 1.0293 -4.27051
+c2.53027 -0.179688 5.07031 0.0107422 7.61035 -0.0498047c5.16016 -0.120117 10.3301 -0.120117 15.4902 -0.0693359c0.759766 0.00976562 1.51953 -0.0302734 2.2793 -0.0800781c-0.0390625 -0.360352 -0.0693359 -0.720703 -0.0996094 -1.08008
+c-1.82031 -0.830078 -3.78027 -1.25 -5.66992 -1.89062c-3.73047 -1.22949 -7.48047 -2.38965 -11.2197 -3.56934c-0.570312 -0.169922 -1.12012 -0.419922 -1.66992 -0.640625c-0.150391 -0.549805 -0.180664 -1.12012 -0.120117 -1.68945
+c0.870117 -0.480469 1.81934 -0.810547 2.76953 -1.08984c4.87988 -1.52051 9.73047 -3.14062 14.6299 -4.60059c0.379883 -0.129883 0.780273 -0.269531 1.12988 -0.490234c0.400391 -0.269531 0.230469 -0.790039 0.150391 -1.17969
+c-1.66016 -0.129883 -3.30957 -0.0302734 -4.96973 -0.0400391c-5.16992 -0.00976562 -10.3301 0.00976562 -15.5 -0.00976562c-1.61035 -0.0302734 -3.21973 0.0195312 -4.82031 -0.209961c-0.519531 -1.66992 -0.719727 -3.41992 -1.16992 -5.11035
+c-0.94043 -3.56934 -1.51953 -7.24023 -2.54004 -10.7793c-0.360352 -0.0107422 -0.709961 -0.0205078 -1.05957 -0.0605469c-0.290039 1.73047 -0.150391 3.48047 -0.150391 5.21973v38.1299c0.0205078 1.78027 -0.0800781 3.58008 0.110352 5.37012zM65.0498 279.67
+c1.12012 2.15039 2.08008 4.40039 3.37012 6.45996c-1.82031 -7.55957 -2.91016 -15.2695 -3.62012 -23c-0.799805 -7.70996 -0.849609 -15.4902 -0.540039 -23.2295c1.0498 -19.9404 5.54004 -39.8301 14.2305 -57.8809c2.99023 -5.98926 6.34961 -11.8291 10.5 -17.1094
+c6.12012 -7.46973 12.5293 -14.7598 19.8398 -21.0898c4.7998 -4.10059 9.99023 -7.78027 15.54 -10.8008c3.26953 -1.64941 6.50977 -3.38965 9.93945 -4.67969c5.01074 -2.03027 10.1904 -3.60938 15.4209 -4.93945c3.8291 -0.959961 7.7793 -1.41016 11.5195 -2.70996
+c5 -1.57031 9.46973 -4.61035 13.0303 -8.43066c4.92969 -5.22949 8.08984 -11.8701 10.2002 -18.6699c0.989258 -2.89941 1.58984 -5.91016 2.16992 -8.91992c0.149414 -0.75 0.219727 -1.51953 0.15918 -2.29004c-6.5 -2.78027 -13.2598 -5.05957 -20.2598 -6.17969
+c-4.10938 -0.780273 -8.29004 -0.990234 -12.46 -1.08008c-10.25 -0.240234 -20.4697 1.75977 -30.1201 5.12012c-3.73926 1.41992 -7.48926 2.84961 -11.0293 4.71973c-8.06055 3.83984 -15.6406 8.7002 -22.46 14.46c-2.9209 2.5498 -5.83008 5.12988 -8.40039 8.03027
+c-9.16016 9.83008 -16.2998 21.4102 -21.79 33.6494c-2.38965 5.55078 -4.61035 11.1807 -6.37012 16.96c-1.16992 3.94043 -2.36035 7.89062 -3.25977 11.9102c-0.75 2.94043 -1.21973 5.9502 -1.87012 8.91992c-0.459961 2.14062 -0.69043 4.32031 -1.03027 6.48047
+c-0.849609 5.42969 -1.2793 10.9297 -1.33008 16.4297c0.110352 6.18066 0.25 12.3701 1.07031 18.5c0.400391 2.86035 0.669922 5.74023 1.15039 8.60059c0.979492 5.69922 2.13965 11.3691 3.70996 16.9297c3.08984 11.6504 7.47949 22.9502 12.6895 33.8398z
+M428.78 286.11c1.09961 -1.66016 1.91016 -3.48047 2.7793 -5.26074c2.10059 -4.44922 4.24023 -8.89941 6.02051 -13.4893c7.61035 -18.7607 12.2998 -38.79 13.04 -59.0508c0.0195312 -1.75977 0.0703125 -3.51953 0.110352 -5.29004
+c0.129883 -9.56934 -1.27051 -19.0898 -3.18066 -28.4492c-0.729492 -3.58984 -1.54004 -7.16992 -2.58008 -10.6904c-4.04004 -14.7197 -10 -29 -18.4102 -41.7803c-8.20996 -12.5693 -19.0098 -23.5498 -31.8398 -31.4092
+c-5.72949 -3.59082 -11.79 -6.64062 -18.0498 -9.19043c-5.78027 -2.19043 -11.71 -4.03027 -17.7998 -5.11035c-6.40039 -1.0498 -12.9102 -1.51953 -19.4004 -1.22949c-7.91992 0.479492 -15.7793 2.07031 -23.21 4.84961
+c-1.93945 0.799805 -3.93945 1.45996 -5.83984 2.33008c-0.209961 1.50977 0.25 2.99023 0.530273 4.45996c1.16016 5.74023 3.03027 11.3604 5.7002 16.5801c2.36914 4.50977 5.51953 8.65039 9.45996 11.9004c2.42969 2.0498 5.23926 3.60938 8.15918 4.83008
+c3.58008 1.5 7.4707 1.96973 11.2402 2.83008c7.23047 1.70996 14.3701 3.92969 21.1504 7c10.3496 4.64941 19.71 11.3799 27.6494 19.46c1.59082 1.60938 3.23047 3.17969 4.74023 4.86914c3.37012 3.76074 6.70996 7.57031 9.85059 11.5303
+c7.47949 10.0703 12.8193 21.5898 16.71 33.4805c1.58008 5.2998 3.20996 10.5996 4.20996 16.0498c0.629883 2.87012 1.04004 5.78027 1.51953 8.67969c0.870117 6.08984 1.58984 12.2207 1.67969 18.3799c0.120117 6.65039 0.140625 13.3203 -0.529297 19.9404
+c-0.730469 7.99023 -1.87012 15.96 -3.70996 23.7803z" />
+ <glyph glyph-name="phoenix-squadron" unicode="&#xf511;" horiz-adv-x="513"
+d="M96.2402 385.19c46.6699 36.2393 105.91 56.2393 165.04 54.7295c29.6699 0.379883 59.29 -5.37988 87.1699 -15.3701c-24.2002 4.64062 -49.1807 6.35059 -73.6006 2.4502c-43 -5.34961 -83.2598 -27.2305 -112.159 -59.3496
+c5.68945 0.989258 10.8096 3.67969 16.0693 5.87988c18.1904 7.88965 37.6006 13.29 57.4004 14.8701c19.7998 2.13965 39.75 0.429688 59.4502 -1.93066c-14.46 -2.79004 -29.2002 -4.58008 -43.1104 -9.60938c-34.5303 -11.1104 -65.46 -33.2607 -86.5498 -62.8203
+c-13.8398 -19.7705 -23.7002 -42.9902 -24.7402 -67.3301c-0.349609 -16.54 5.23047 -34.9102 19.8896 -44.1699c11.1309 -6.66016 24.8506 -9.38965 37.6309 -6.75977c15.4893 2.46973 30.1592 8.66992 43.7295 16.3799c11.5498 6.83984 22.7305 14.5898 32.0498 24.3203
+c3.7998 3.22949 2.54004 8.47949 2.62988 12.8291c-2.12988 0.34082 -4.39941 1.11035 -6.31934 -0.299805c-9.50684 -5.21094 -25.6035 -12.1191 -35.9307 -15.4199c-20.0693 -6.18945 -42.2793 -8.47949 -62.2793 -0.780273
+c12.8301 -1.72949 26.1396 -0.30957 37.8496 5.45996c20.29 9.75 36.9199 25.2705 54.5996 38.8809c27.8408 21.29 57.6406 40.1094 89.1709 55.4697c25.7793 12.0098 53.0898 22.8496 81.8096 24.2002c-15.6797 -13.7607 -32.25 -26.6006 -46.9199 -41.5107
+c-14.5498 -14.04 -27.54 -29.5791 -40.2305 -45.3096c-3.5293 -4.61035 -8.97949 -6.95996 -13.6201 -10.1904c-22.2393 -15.0293 -40.5996 -35.96 -52.0391 -60.2793c-9.36035 -19.7402 -14.5508 -41.9707 -11.8105 -63.8398
+c1.9502 -13.7305 8.74023 -27.6709 20.96 -35.0107c12.9404 -7.98926 29.1396 -8.08984 43.6104 -5.10938c32.8994 7.46973 61.6094 28.9697 81.2793 56c20.5 27.5996 30.6104 62.3799 29.25 96.6396c-0.519531 7.52051 -1.5791 15 -1.66992 22.5498
+c8.02051 -19.54 14.8701 -39.8301 16.7002 -61.0098c2.00977 -14.3203 0.75 -28.8398 -1.62012 -43.0195c-1.91992 -11.0205 -5.68945 -21.5801 -7.80957 -32.5303c20.3604 22.7295 34.1699 51.2402 39.46 81.3096c5.71973 35.3701 0.580078 72.3604 -14.25 104.95
+c20.8398 -32.1201 32.4297 -69.79 35.8096 -107.8c0.5 -12.7705 0.5 -25.5801 0 -38.3398c-2.90918 -34.2607 -12.9697 -67.9502 -29.7598 -98c-26.2002 -47.4805 -68.2002 -85.8906 -117.54 -108.32c-78.5195 -36.3398 -175.2 -31.4102 -248.72 14.7197
+c-38.8398 23.7803 -71.0605 58.3203 -91.6797 98.96c-14.7207 28.8906 -23.4004 60.8203 -25.4404 93.1904v31.4297c3.94043 69.9697 40.9902 136.32 96.2402 178.891zM318.89 304.62c5.51074 0.799805 10.8203 2.57031 16.0205 4.5
+c4.99023 1.76953 9.26953 5.9502 10.3496 11.25c-8.91016 -5 -17.9502 -9.9502 -26.3701 -15.75z" />
+ <glyph glyph-name="sith" unicode="&#xf512;"
+d="M0 416l118.75 -69.71l-11.5195 58.9004l91.0596 -69.8701c8.5 1.50977 17.0996 2.29004 25.71 2.29004s17.21 -0.770508 25.71 -2.29004l91.0596 69.8701l-11.5195 -58.9004l118.75 69.71l-69.71 -118.75l58.8604 11.5195l-69.8408 -91.0293
+c3.04004 -17.0098 3.03027 -34.4404 0 -51.4502l69.8408 -91.0303l-58.8604 11.5205l69.71 -118.78l-118.75 69.71l11.5195 -58.8604l-91.0293 69.8408c-17.0098 -3.04004 -34.46 -3.04004 -51.4805 0l-91.0293 -69.8408l11.5195 58.8604l-118.75 -69.71l69.71 118.78
+l-58.8604 -11.5205l69.8408 91.0303c-1.25488 7.04492 -2.27246 18.5693 -2.27246 25.7246c0 7.15625 1.01758 18.6807 2.27246 25.7256l-69.8408 91.0293l58.8604 -11.5195zM224 316.22c-31.7998 0 -63.6104 -12.0898 -87.8496 -36.3398
+c-48.4902 -48.4902 -48.5 -127.2 0 -175.7c48.5 -48.4893 127.21 -48.5195 175.699 -0.0292969c48.4902 48.4893 48.5 127.199 0 175.699c-24.25 24.25 -56.0498 36.3701 -87.8496 36.3701zM224 279.56c22.4199 0 44.8301 -8.51953 61.9199 -25.6094
+c34.1904 -34.1904 34.1797 -89.6904 0 -123.87c-34.1895 -34.1797 -89.6504 -34.1904 -123.84 0c-34.1904 34.1895 -34.1797 89.6895 0 123.87c17.0898 17.0898 39.5 25.6094 61.9199 25.6094z" />
+ <glyph glyph-name="trade-federation" unicode="&#xf513;" horiz-adv-x="496"
+d="M248 439.2c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -43.5996c129.7 0 234.8 105.1 234.8 234.8s-105.1 234.8 -234.8 234.8s-234.8 -105.1 -234.8 -234.8s105.1 -234.8 234.8 -234.8zM403.1 284.9v-0.100586h-145.699
+v-34.7998h83.2998v-47h-83.2998v-195.8h-48.8008v196.8h-117.699l-36.7002 46h155.1v81.7002h193.8v-46.7998zM329.8 239.8h-82.8994v56.2002h145v24.4004h-171.801v-80.6006h-143.899l20.0996 -23.8994h123.8v-197.4h26.8008v197.4h82.8994v23.8994zM168.5 308.8l22 9.2998
+l-15.7998 -18.0996l15.7002 -18.0996l-22.2002 9.5l-12.2998 -20.5l2.09961 24l-23.2998 5.39941l23.5 5.40039l-2.10059 23.7998zM138.9 328.5l9.5 -10.2002l-13.8008 5.2998l-6.7998 -12.1992l0.799805 14.6992l-13.6992 2.7002l14.2998 3.7998l-1.7002 13.9004
+l8 -12.4004l12.7002 5.90039zM304.3 183.3l-9.2998 -10.7998l9.40039 -10.7002l-13.1006 5.5l-7.2998 -12.2002l1.2002 14.2002l-13.9004 3.2002l13.9004 3.2002l-1.2998 14.2002l7.2998 -12.2002zM411.2 260.5l-15 -17.5996l15.0996 -17l-21.2002 8.7998l-11.5 -19.6006
+l1.80078 22.9004l-22.2002 4.90039l22.2998 5.39941l-2.2002 22.7002l12 -19.5996zM248 418.1c125.3 0 226.9 -101.6 226.9 -226.899s-101.601 -226.9 -226.9 -226.9s-226.9 101.601 -226.9 226.9s101.601 226.899 226.9 226.899zM342.6 252h-83.1992v30.9004h145.699
+v50.6992h-197.8v-81.5996h-157.399l40 -49.9004h116.699v-196.8h52.7002v195.7h83.2998v51zM248 404.8c-94.5996 0 -174.9 -61.5996 -202.9 -146.8h157.4v81.5996h199.1c-38.7998 40.2002 -93.2998 65.2002 -153.6 65.2002zM248 -22.2998c117.9 0 213.5 95.5996 213.4 213.5
+c0 51.8994 -18.5 99.5 -49.3008 136.5v-50.7998h-145.6v-19.2002h83.2002v-62.7002h-83.2998v-195.8h-64.6006v196.8h-114.7l-43.7998 56.2998c-5.7998 -19.2998 -8.89941 -39.8994 -8.89941 -61.0996c0 -117.9 95.6992 -213.5 213.6 -213.5zM178.8 173l22.7002 9.2998
+l-16.9004 -17.0996l15.8008 -18.7998l-21.5 10.7998l-13 -20.9004l3.69922 23.7998l-23.7998 5.90039l23.7002 3.90039l-1.7002 24.5z" />
+ <glyph glyph-name="wolf-pack-battalion" unicode="&#xf514;" horiz-adv-x="456"
+d="M239.73 -23.5303l-11.4307 -21.0996l-11.4395 21.1104l-10.5605 -15.8408l-5.28027 12.3203l-5.2793 -7.04004v-29.9102c-21.0605 7.91992 -21.1104 66.8604 -25.5098 97.21c-4.62012 31.8799 0.879883 92.8105 -81.3701 149.11
+c8.87988 23.5996 12 49.4297 2.63965 80.0498c-27.8701 -3.33008 -53.9404 -10.5801 -63.3398 -54.0996l30.3496 -8.36035c-11.1494 -23.04 -17.0195 -46.7598 -13.2002 -72.1396l27.2705 7.04004l6.16016 -33.4307l18.4697 7.04004l8.7998 -33.4297l19.3398 7.0498
+l-26.3896 -21.1094l-8.7998 28.1494l-24.6299 -5.28027l-7.04004 35.6309l-26.3906 -14.5205c-0.25 20.0205 -6.95996 58.0605 8.80078 84.4502l-26.3906 -5.28027c-3.99023 22.0703 2.37988 39.21 7.91992 56.7402l-22.4297 -9.67969
+c0.44043 25.0693 29.9404 56.79 61.5898 58.5098c20.2197 1.08984 56.7305 25.1602 54.1006 51.8994c-1.95996 19.8701 -17.4502 42.6201 -43.1104 49.7002c43.9795 -36.5098 9.67969 -67.2998 -5.28027 -73.46c-4.39941 11.4404 -17.54 69.0801 0 130.2
+c40.4697 -22.8701 89.7305 -65.0996 93.25 -147.81l58.0605 -38.71l3.51953 -93.25l-107.33 59.8193l-7.04004 -7.04004l17.5898 -3.51953l43.9902 -38.71l15.8398 5.2793l28.1504 -49.2598l3.51953 -119.64l-21.1094 -15.8398l32.5498 -15.8398l32.5498 15.8398
+l-21.1094 15.8398l3.51953 119.64l28.1504 49.2598l15.8398 -5.2793l43.9902 38.71l17.5898 3.51953l-7.04004 7.04004l-107.33 -59.8193l3.51953 93.25l58.0605 38.71c3.51953 82.6895 52.7793 124.92 93.25 147.79c17.54 -61.1201 4.39941 -118.761 0 -130.2
+c-14.96 6.16016 -49.2705 36.9502 -5.28027 73.46c-25.6602 -7.08008 -41.1504 -29.8301 -43.1104 -49.7002c-2.63965 -26.7305 33.8799 -50.8096 54.1006 -51.9004c31.6396 -1.70996 61.1396 -33.4297 61.5801 -58.5l-22.4307 9.68066
+c5.54004 -17.5303 11.9102 -34.6699 7.91992 -56.7402l-26.3896 5.28027c15.7705 -26.3906 9.0498 -64.4307 8.7998 -84.4502l-26.3896 14.5195l-7.04004 -35.6299l-24.6299 5.28027l-8.7998 -28.1504l-26.3906 21.1104l19.3506 -7.04004l8.7998 33.4297l18.4697 -7.04004
+l6.16016 33.4307l27.2803 -7.05078c3.80957 25.3809 -2.0498 49.1006 -13.2002 72.1406l30.3496 8.35938c-9.39941 43.5205 -35.4697 50.7607 -63.3398 54.1006c-9.35938 -30.6201 -6.24023 -56.4404 2.64062 -80.0498c-82.25 -56.3008 -76.75 -117.221 -81.3701 -149.11
+c-4.40039 -30.3496 -4.4502 -89.29 -25.5107 -97.21v29.9102l-5.2793 7.04004l-5.28027 -12.3203zM318.9 71.4805l-15.8408 10.5596c7.4707 4.36035 13.7607 8.41992 19.3506 12.3203c-0.600586 -7.26074 -0.270508 -13.8799 -3.50977 -22.8799zM347.05 120.74
+c-0.399414 -10.9404 -0.899414 -21.6602 -1.75977 -31.6699c-7.84961 1.85938 -15.5703 3.7998 -21.1104 7.04004c8.24023 7.89941 15.5508 16.2695 22.8701 24.6299zM371.68 115.46l-23.75 6.16016c7.24023 9.08984 13.0898 18.1797 18.4707 27.2695
+c3.22949 -9.21973 5.25977 -20 5.2793 -33.4297zM375.2 196.4c19.4395 -12.8105 27.7998 -33.6602 29.9102 -56.3008c-12.3203 4.53027 -24.6299 9.31055 -36.9502 10.5605c5.05957 11.9902 6.64941 28.1396 7.04004 45.7402zM373.44 242.14
+c18.5293 -2.62988 35.1494 -9.19922 45.75 -28.1494c-14.21 -4.36035 -24.7705 -5.9707 -43.9902 -14.0801c0.0800781 13.4102 -0.950195 27.9297 -1.75977 42.2295zM137.68 71.4805c-3.23926 9 -2.91016 15.6191 -3.50977 22.8799
+c5.58984 -3.90039 11.8799 -7.95996 19.3496 -12.3203zM109.53 120.74c7.31934 -8.36035 14.6299 -16.7305 22.8701 -24.6299c-5.54004 -3.24023 -13.2607 -5.18066 -21.1104 -7.04004c-0.860352 10.0098 -1.36035 20.7295 -1.75977 31.6699zM84.8896 115.46
+c0.0205078 13.4297 2.05078 24.21 5.28027 33.4297c5.37988 -9.08984 11.2305 -18.1797 18.4697 -27.2695zM81.3701 196.4c0.389648 -17.6006 1.99023 -33.75 7.04004 -45.7402c-12.3203 -1.25 -24.6299 -6.03027 -36.9502 -10.5605
+c2.11035 22.6406 10.4697 43.4902 29.9102 56.3008zM83.1299 242.14c-0.80957 -14.2998 -1.83984 -28.8193 -1.75977 -42.2295c-19.2197 8.10938 -29.7803 9.71973 -43.9902 14.0801c10.6104 18.9502 27.2197 25.5195 45.75 28.1494z" />
+ <glyph glyph-name="hornbill" unicode="&#xf592;" horiz-adv-x="509"
+d="M75.3701 77.7002c2.13965 -15.8301 -5.77051 -31.9805 -20.9404 -39.29c-18.8496 -9.10059 -41.5498 -1.16992 -50.6797 17.6797c-9.08008 18.8301 -1.12988 41.5801 17.7002 50.6504c7.0498 3.39941 14.6299 4.41992 21.8496 3.37988
+c-78.2803 111.35 52 190.53 52 190.53c-5.85938 -43.04 -8.24023 -91.1602 -8.24023 -91.1602c-67.3096 -41.4502 0.920898 -64.0605 39.8105 -72.8701c19.7695 -53.6201 71.1797 -91.9404 131.66 -91.9404c1.91992 0 3.76953 0.209961 5.66992 0.280273l0.109375 -18.8604
+c-99.2197 -1.38965 -158.699 29.1406 -188.939 51.6006zM183.38 405.4c109.75 73.9697 187.59 -54.0508 187.59 -54.0605c-43.04 5.86035 -91.1797 8.24023 -91.1797 8.24023c-43.0996 70.0098 -65.8301 -6.54004 -73.8398 -44.29
+c-51.4805 -20.8301 -87.8506 -71.21 -87.8506 -130.16c0 -0.910156 0.120117 -1.78027 0.140625 -2.67969l-21.8398 -0.150391c-1.41016 100.46 29.8691 160.12 52.4199 190.03c-15.1602 -1.19043 -30.2002 6.75977 -37.1807 21.2295
+c-9.10938 18.8506 -1.16992 41.5801 17.6904 50.6807c18.8398 9.08984 41.5596 1.14941 50.6602 -17.6904c3.29004 -6.81934 4.2793 -14.1494 3.38965 -21.1494zM487.56 271.23c-6.23926 -3.01074 -12.8799 -4 -19.3096 -3.5c84.4902 -113.45 -48.96 -194.61 -48.96 -194.61
+c5.87012 43.0303 8.20996 91.1602 8.20996 91.1602c66.5996 40.96 0.639648 63.5195 -38.46 72.54c-20.5898 51.96 -71.1904 88.7598 -130.49 88.7598c-2.75 0 -5.43945 -0.259766 -8.13965 -0.410156l-0.140625 22.5c93.6104 1.31055 151.74 -25.7998 183.45 -47.7402
+c-2.31934 15.9502 5.60059 32.2705 20.8701 39.6406c18.8398 9.08008 41.5703 1.16016 50.6699 -17.6904c9.11035 -18.8398 1.14062 -41.5596 -17.7002 -50.6494zM373.05 11.7598c14.1904 0.0800781 27.8906 -7.72949 34.4502 -21.3496
+c9.08984 -18.8203 1.16016 -41.5498 -17.7002 -50.6504c-18.8398 -9.06934 -41.5801 -1.17969 -50.6396 17.71c-2.19043 4.52051 -3.33008 9.25 -3.64062 13.9707c-111.979 -80.3701 -191.899 50.9697 -191.899 50.9697c43.0703 -5.87988 91.1895 -8.21973 91.1895 -8.21973
+c41.3301 -67.1709 63.9209 0.540039 72.7705 39.4893c53.3398 19.9004 91.3896 71.1807 91.3896 131.45c0 2.08008 -0.219727 4.08984 -0.299805 6.15039l19.5205 0.139648c1.2793 -89.9697 -23.71 -147.2 -45.1406 -179.66z" />
+ <glyph glyph-name="mailchimp" unicode="&#xf59e;" horiz-adv-x="428"
+d="M222.7 374.8c-2.2002 -1.39941 -4.40039 -2.7998 -6.40039 -4.2002l-3.59961 12.6006zM100.9 133.6c3.5 -2.69922 12.7998 -4.69922 14.6992 -14.1992c1.90039 -10.8008 -6 -20.5 -15.5 -20.7002c-6.69922 -0.200195 -10.3994 4 -9.69922 4.89941
+c0.299805 0.400391 1.2998 0.200195 2.89941 0c8.5 -1.2998 13.7002 3.90039 14.9004 9.30078c0 0 0.299805 1.5 0.299805 2.5c0 0.899414 -0.0996094 1.7998 -0.200195 2.59961c-1 5.7002 -7.39941 6.7002 -11.5996 11.2002c-3.7998 4 -3 9.2002 -0.700195 11.7002
+c2.7998 2.7998 6.7998 1.7998 6.7998 0.799805c0 -0.5 -1 -0.900391 -2.2002 -1.7002c-1.59961 -1.09961 -1.7998 -2.2002 -1.39941 -4c0.299805 -1 0.700195 -1.59961 1.7002 -2.40039zM105.4 144c-3.2002 4.5 -10.4004 8.7998 -20.2002 6.59961
+c-1.7998 -0.399414 -0.799805 -0.199219 -2.60059 -0.699219c-0.299805 0 -0.5 -0.100586 -0.799805 -0.200195c-0.599609 -0.200195 -1.09961 -0.400391 -1.59961 -0.700195c-0.400391 -0.299805 -4 -1.90039 -7 -5.5c-4 -5 -5.40039 -11.5 -5.2002 -17.7002
+c0.200195 -6 2 -9.39941 2.2998 -10.2002c1.40039 -3 -1.89941 -3.59961 -4.7998 -0.399414c-2.40039 2.5 -3.90039 6.2998 -4.59961 9.7002c-3 14 3.19922 28 17.5996 33.5996c0.799805 0.299805 1.7002 0.5 2.5 0.700195c1.5 0.5 6.7002 1.5 12.0996 0.700195
+c5.80078 -0.900391 11 -3.90039 14.3008 -7.7002c2.5 -2.7998 4.39941 -6.90039 4.09961 -10.5c-0.0996094 -1.5 -0.799805 -3.60059 -2.09961 -4.2002c-0.5 -0.299805 -1.10059 -0.200195 -1.40039 0.200195c-1 1 -0.200195 2.89941 -2.59961 6.2998zM201.7 379.4
+l-6.10059 2l-1.69922 6.89941l2.89941 8.7998zM212.4 367.8c-2.7002 -1.7998 -5 -3.59961 -7 -5.09961l-13.6006 11.3994zM303.8 182.5c-3.5 1.90039 -5.2002 5.59961 -3.89941 8.2002c1.39941 2.59961 5.5 3.2002 9 1.2002
+c3.59961 -1.90039 5.2998 -5.60059 3.89941 -8.2002c-1.39941 -2.60059 -5.5 -3.2002 -9 -1.2002zM266.1 184.2c-5.19922 0.399414 -10.6992 0.299805 -18.3994 -2.90039c-2.2998 -1 -3.7998 -1.7002 -4.40039 -1.2002c-0.700195 0.5 -0.0996094 2.40039 2.10059 4.5
+c1.89941 1.80078 3.89941 2.90039 6.09961 3.80078c0.299805 0.199219 0.700195 0.299805 1.09961 0.399414c1 0.299805 2.10059 0.600586 3.2002 0.799805c9.10059 1.5 15.7998 -3.5 14.9004 -5c-0.400391 -0.699219 -2.10059 -0.599609 -4.60059 -0.399414zM426.6 124.3
+c5.30078 -12.8994 -5.19922 -28.3994 -5.19922 -28.3994c-0.400391 -1.2002 -33.3008 -127.9 -162.9 -127.9c-112 0 -165.1 97.4004 -165.1 97.4004c-8.2002 -0.5 -16 0.799805 -23.3008 3.5c-32.8994 12.0996 -48.5996 49.6992 -37.2998 81.1992l-18.0996 13.8008
+c-69.9004 53.2998 128.3 297.199 198.5 242c0.299805 -0.300781 16.7002 -13.5 16.7002 -13.5c25.0996 15.1992 50 23.5 70.2998 23.5c14.3994 0 26.3994 -4.10059 34.5 -12.9004c18.2002 -19.7002 11.5996 -57.2998 -13.2002 -95.4004
+c5.2998 -5.09961 9.7002 -12 12.9004 -18.7998c7.89941 -6 13.0996 -15.2998 15.5996 -28.0996c2.7998 -14.2002 3.40039 -34.5 5.2002 -43.2002c6.7002 -3 3.89941 -1.59961 11 -5.2002c7.39941 -3.7002 16.5 -8.2998 26.5 -17.8994
+c17.7998 -4.40039 24.5996 -23.4004 14.2002 -36.3008c-0.200195 -0.299805 -1.5 -1.7998 -2.80078 -3.2998c0.300781 -0.799805 3.7002 -6.2998 6.2002 -17.5996c7.2998 -1.7998 13.2002 -6.2998 16.2998 -12.9004zM20.0996 180.1
+c2.40039 -4.09961 19.3008 -16.5 19.4004 -16.5c6.59961 9.10059 14.0996 15.8008 21.7002 20h-0.700195c15.2998 53.3008 56.2998 108 98.0996 143.7c10.4004 8.90039 31.4004 23 31.4004 23l-23.9004 20.7998l-1.69922 12.1006l32.5996 -26.9004
+c-1.40039 -1.2002 -2.90039 -2.39941 -4.40039 -3.7002c-3.2998 -2.69922 -6.5 -5.59961 -9.69922 -8.59961c-4.60059 -4.2998 -9.30078 -8.90039 -13.9004 -13.7002c-9.90039 -10.2002 -19.5996 -21.3994 -28.7002 -32.7998
+c-16 -20.2002 -22.7002 -29.7002 -32.8994 -48.9004c-0.100586 -0.0996094 55.5996 66.6006 69.3994 80.3008c25.4004 25.1992 67 53.2998 67.4004 53.5c17.7998 10.7998 32.7998 17.1992 45.0996 20c-20.2002 -2 -41.7998 -12.4004 -58.7998 -22.6006
+c0 0.100586 -24.7002 20.9004 -24.7002 20.9004l-9 -3.5c-32.7998 7.7002 -105.1 -46.7998 -148.6 -115.9c-17.6006 -28 -42.2998 -77.2002 -28.1006 -101.2zM89.2002 80c28.2002 0 49.5996 26.7998 44.3994 56.2002c0 0 -2.7998 -6.7002 -5.5 -10.2998
+c0.700195 10.5 -1 20.8994 -4.19922 30.7998c0 0 -1.40039 -5.90039 -4.2002 -13.1006c-2.7002 22.9004 -12.2998 27.8008 -12.2998 27.8008c-5.5 2.59961 -11.7002 4 -18.2002 4c-25 0 -45.2998 -21.3008 -45.2998 -47.7002c0 -26.2998 20.2998 -47.7002 45.2998 -47.7002z
+M145 248.5c12.7002 16.5 29 32.2998 51.4004 46.9004c48.7998 31.5996 97.5 35.1992 114.199 22.3994c0 0.100586 -0.299805 0.799805 -0.399414 1c-4.2998 9 -14.2002 15.2998 -23 17.7998c1.59961 -1.59961 3.7998 -4.5 4.7002 -6.2998
+c-6.60059 4.60059 -15.5 8.60059 -24.7002 10.5c0 0 1.09961 -0.799805 1.2998 -1c1.7998 -1.7002 4.2998 -4.39941 5.2998 -6.7002c-8.7998 3.60059 -20 5.5 -29.5 3.7002l-1.2002 -0.299805s1.2002 -0.299805 1.5 -0.400391c3.2002 -0.899414 7.7002 -2.89941 9.7002 -5.5
+c-15.8994 2.80078 -33.2998 -0.299805 -42.7002 -5.5h2.2002c3.40039 -0.0996094 10.4004 -0.5 13.2998 -2.39941c-10 -2 -24.3994 -6.5 -32.1992 -13.2002c1.39941 0.0996094 9.19922 1.09961 12.3994 0.599609c-42.7998 -24.5 -62.2998 -61.5996 -62.2998 -61.5996z
+M408.4 102.2c8.59961 10.2998 6.19922 20.5 -5.10059 20.5c-3.7002 0 -6.39941 -1 -6.39941 -1s0 15.0996 -7 26.8994c-5.40039 -6.09961 -20.4004 -18.0996 -40 -27.3994c-18.4004 -8.7002 -43 -16.4004 -73.5 -17.4004
+c-8.60059 -0.299805 -13.8008 1.10059 -16.8008 -8.89941c-0.299805 -0.900391 -0.399414 -1.90039 -0.5 -2.90039c33.4004 -10.5 86.3008 10 91.4004 11.2002c0.200195 0.0996094 0.400391 0.0996094 0.5 0.0996094c3.2998 -0.200195 -28.2998 -20.2002 -72.2998 -20.2002
+c-7.40039 0 -13.9004 0.700195 -18.9004 1.7002c2.5 -8.2002 8.7002 -11.8994 17 -13.7998c6.2002 -1.5 12.9004 -1.59961 12.9004 -1.59961c60.8994 -1.7002 111 45.5 113.1 47.8994c0 0 -0.5 -1.2002 -0.599609 -1.39941c-9 -20 -60.6006 -56.4004 -112.101 -55.4004
+l-0.199219 -0.0996094c-12 0.0996094 -26.5 3.09961 -34 12.5c-11.9004 14.7998 -5.7002 39.3994 13.2998 40c0 0 4.39941 0.0996094 6.2998 0.0996094c47 1.40039 89.2998 18.7002 119.5 54.7998c4.09961 5.2002 -0.5 12.2998 -9.5 12.5h-0.0996094l-0.100586 0.100586
+c-10.7002 11.3994 -20 15.5 -28.3994 19.6992c-17.5 8.90039 -15.8008 1.2002 -19.9004 44.5c-1.09961 11.7002 -3.40039 27.6006 -13.5996 33.7002c-2.7002 1.60059 -5.7002 2.2998 -8.7002 2.2998s-4.5 -0.699219 -5.10059 -0.799805
+c-5.59961 -1.2002 -8.7998 -4.39941 -12.7998 -8.09961c-18.8994 -17.4004 -34.0996 -12.7002 -56.5996 -12.5c-21.1006 0.0996094 -39.7998 -14.6006 -43.4004 -37.2998c-1.7998 -11.9004 -0.700195 -24 1.2002 -29.4004c0 0 -5.7998 3.7998 -8.5 7.2002
+c3.2998 -20.7998 22.2002 -34.2998 22.2002 -34.2998c-3 -0.700195 -7.2998 -0.400391 -7.2998 -0.400391s10.6992 -8.5 20.0996 -11.5c-2.40039 -1.5 -14.7002 -13.7002 -21.0996 -30.4004c-5.90039 -15.6992 -3.5 -34.5 -3.5 -34.5l5.19922 7.7002
+s-3.2998 -17.2002 3.2002 -33.7998c2.2002 5 6.90039 13.7998 6.90039 13.7998s-0.799805 -18.3994 8.09961 -33.5c0.299805 3.40039 1.60059 12.2998 1.60059 12.2998s5.09961 -15.6992 16.8994 -26.6992c22 -19.5 80.2998 -23.1006 124.7 11.7998
+c35.2002 27.5996 41.2998 60.7998 41.9004 62zM259 89v2v-2zM259.7 84.9004c0 0.0996094 -0.100586 0.0996094 -0.100586 0.199219c0 -0.0996094 0 -0.0996094 0.100586 -0.199219c0 0.0996094 -0.100586 0.199219 -0.100586 0.399414
+c0 -0.200195 0 -0.299805 0.100586 -0.399414zM322.6 195.3c0.600586 3.5 3.10059 6.10059 5.7002 5.7002c2.60059 -0.400391 4.2002 -3.59961 3.7002 -7.09961c-0.599609 -3.60059 -3.09961 -6.10059 -5.7002 -5.7002c-2.59961 0.399414 -4.2002 3.59961 -3.7002 7.09961z
+M264.6 196.5c-10.3994 3.7998 -18 4.40039 -31.5 -0.700195c-3.39941 -1.2998 -5.7998 -2.2998 -7 -2.09961c-1.89941 0.299805 0 3.7002 4.10059 7.09961c8.2998 6.7002 19.7002 8.7002 29.5 5.10059c4.2998 -1.60059 9.09961 -4.7002 11.5996 -8.40039
+c0.900391 -1.40039 1.2002 -2.5 0.799805 -2.90039c-0.699219 -0.899414 -3.5 0.300781 -7.5 1.90039zM311.3 225.7c-0.0996094 3.39941 0.5 8.89941 3.7002 10c5.40039 1.89941 12.5996 -12 12.9004 -24.2998c-4.30078 2.09961 -9.30078 3.09961 -14.5 2.59961
+c-1.30078 3.90039 -1.90039 7.7002 -2.10059 11.7002z" />
+ <glyph glyph-name="megaport" unicode="&#xf5a3;" horiz-adv-x="496"
+d="M214.5 238.4l33.4004 33.3994l33.3994 -33.3994v-66.4004l-33.2998 -33.2998l-33.5 33.5v66.2002zM248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM393.1 25.5996h0.100586v87.1006l-59.7002 59.7002v87.5996l-59.5 59.5
+v75.5996l-26.0996 19.2002l-26.1006 -19.2002v-75.5996l-59.5 -59.5v-87.9004l-59.5 -59.5v-87l26.1006 -19.1992l26.0996 19.1992v65.5l33.5 33.4004l33.4004 -33.4004v-65.5l26.0996 -19.1992l26.2002 19.1992v65.5l33.3994 33.4004l33.4004 -33.4004v-65.5l26 -19.1992z
+" />
+ <glyph glyph-name="nimblr" unicode="&#xf5a8;" horiz-adv-x="355"
+d="M232.6 148.71c15.5703 0 27.1504 -11.46 27.1504 -26.96c0 -15.5498 -11.6201 -26.96 -27.1504 -26.96c-15.6992 0 -27.1494 11.5703 -27.1494 26.96c0 15.5098 11.5801 26.96 27.1494 26.96zM99.0098 121.75c0 15.6104 11.6807 26.96 27.1504 26.96
+c15.5703 0 27.1494 -11.46 27.1494 -26.96c0 -15.4102 -11.4697 -26.96 -27.1494 -26.96c-15.4404 0 -27.1504 11.3096 -27.1504 26.96zM177.76 289.05c98.3701 0 177.76 -79.0693 177.76 -176.53c0 -97.5693 -79.5195 -176.52 -177.76 -176.52
+c-98.1494 0 -177.76 78.8701 -177.76 176.52v335.48l45.25 -227c30.2002 48.2305 97.75 68.0498 132.51 68.0498zM177.76 -19.0703c73.2598 0 132.51 58.9102 132.51 131.59c0 72.6807 -59.2393 131.591 -132.51 131.591c-73.2695 0 -132.51 -58.9102 -132.51 -131.591
+c0 -72.6895 59.2402 -131.59 132.51 -131.59z" />
+ <glyph glyph-name="rev" unicode="&#xf5b2;" horiz-adv-x="410"
+d="M270.67 173.11c0 -36.1602 -29.4102 -65.5703 -65.5596 -65.5703c-36.1504 0 -65.5703 29.4102 -65.5703 65.5703c0 36.1592 29.4102 65.5596 65.5703 65.5596c36.1592 0 65.5596 -29.4004 65.5596 -65.5596zM410.22 178.16v-210.16h-210.16v0.129883
+c-110.939 2.69043 -200.06 93.3896 -200.06 204.98c0 108.529 84.3096 197.319 191.01 204.569v38.3203l108.76 -62.7803l-108.76 -62.79v39.1201c-80.0293 -7.16016 -142.99 -74.5693 -142.99 -156.43c0 -86.6201 70.4707 -157.09 157.091 -157.09
+s157.09 70.4697 157.09 157.09c0 55.1895 -28.6406 103.79 -71.8105 131.82l45.3799 26.1992c44.2207 -36.6299 72.8301 -91.4297 74.3203 -152.979h0.129883z" />
+ <glyph glyph-name="shopware" unicode="&#xf5b5;" horiz-adv-x="495"
+d="M395.5 -7.26953c-42.9502 -31.79 -93.9502 -48.5908 -147.48 -48.5908c-137.21 0 -248.02 111 -248.02 248c0 137.19 111.04 248 248.02 248c61.3008 0 120.141 -22.5498 165.681 -63.5c2.62012 -2.35938 0.580078 -6.63965 -2.86035 -6.17969
+c-17.6699 2.42969 -36.75 3.66016 -56.71 3.66016c-129.36 0 -222.399 -53.4697 -222.399 -155.351c0 -109.039 92.1299 -145.88 176.829 -178.729c33.6406 -13.04 65.4004 -25.3604 86.96 -41.5898c1.90039 -1.44043 1.89062 -4.31055 -0.0195312 -5.71973zM494.96 215.05
+c2.00977 -23.0801 2.95996 -64.9199 -15.9297 -113.31c-0.790039 -2.00977 -3.16992 -2.87012 -5.0498 -1.82031c-29.4902 16.3604 -61.6104 28.3398 -92.6807 39.9297c-60.2803 22.4805 -112.34 41.8906 -112.34 84.4902c0 1.45996 -3.87988 53.6299 80.25 53.6299
+c50.8604 0 92.7197 -17.5195 144.48 -60.4795c0.719727 -0.610352 1.18945 -1.5 1.26953 -2.44043z" />
+ <glyph glyph-name="squarespace" unicode="&#xf5be;" horiz-adv-x="512"
+d="M186.12 104.66l157.22 157.2c38.5703 38.5898 101.13 38.5898 139.72 0c38.5908 -38.5801 38.5908 -101.13 0 -139.721l-119.25 -119.239l-0.0400391 -0.0400391c-19.2891 -19.2705 -50.5498 -19.25 -69.8193 0.0400391l154.149 154.14
+c19.29 19.29 19.29 50.5703 0 69.8604s-50.5693 19.29 -69.8594 0l-157.181 -157.181c-9.64941 -9.64941 -25.29 -9.64941 -34.9395 0c-9.65039 9.65039 -9.65039 25.29 0 34.9404zM430.65 209.46c9.63965 -9.63965 9.63965 -25.2803 -0.0107422 -34.9297l-157.199 -157.2
+c-38.5801 -38.5703 -101.141 -38.5703 -139.721 0l-0.0195312 0.0195312c-9.64062 9.65039 -9.62988 25.29 0.0195312 34.9307l0.0107422 0.00976562c9.64941 9.63965 25.2793 9.62988 34.9199 -0.00976562l0.0498047 -0.0498047
+c19.29 -19.2607 50.5498 -19.2402 69.8193 0.0498047l157.2 157.18c9.64062 9.65039 25.2803 9.65039 34.9307 0zM168.66 122.13c-38.6006 -38.5801 -101.13 -38.5801 -139.73 0.00976562c-38.5801 38.5801 -38.5801 101.13 0 139.721l119.23 119.25l0.0195312 0.0195312
+c19.3008 19.2803 50.5703 19.2705 69.8506 -0.0195312l-154.17 -154.17l-0.0302734 -0.0302734c-19.2803 -19.2998 -19.2598 -50.5605 0.0302734 -69.8398l0.00976562 -0.0107422c19.29 -19.29 50.5703 -19.2793 69.8496 0.0107422l157.21 157.18
+c9.64062 9.63965 25.2705 9.63965 34.9102 0c9.64062 -9.65039 9.64062 -25.29 0 -34.9404zM81.3301 174.53c-9.64062 9.64941 -9.65039 25.29 0 34.9297l157.189 157.19c38.5908 38.5898 101.131 38.5898 139.721 0c9.64941 -9.64062 9.64941 -25.2803 0 -34.9307
+c-9.64062 -9.64941 -25.2803 -9.64941 -34.9307 0l-0.0195312 0.0205078c-19.29 19.2793 -50.5596 19.2695 -69.8398 -0.0205078l-157.21 -157.189c-9.64062 -9.64062 -25.2705 -9.64062 -34.9102 0z" />
+ <glyph glyph-name="themeco" unicode="&#xf5c6;" horiz-adv-x="441"
+d="M199.74 435.71c9.74023 5.63965 25.5898 5.73047 35.3896 0.209961l188.13 -105.95c9.81055 -5.51953 17.7598 -19.1396 17.7598 -30.3799v-213.87c0 -11.2598 -7.92969 -24.8896 -17.71 -30.46l-188.22 -107.14c-9.78027 -5.57031 -25.5801 -5.48047 -35.29 0.209961
+l-182.22 106.72c-9.70996 5.69043 -17.5801 19.4307 -17.5801 30.6807v213.859c0 11.2598 7.90039 24.96 17.6299 30.5898zM123.54 239c-15.6904 0 -31.3896 -0.139648 -47.0801 -0.139648v-99.8701h18.8301v29.3896h28.25c48.9404 0 48.79 70.6201 0 70.6201zM261.5 140.27
+l-30.25 34.1006c36.4004 7.38965 34.2598 64.21 -10.7002 64.4902c-15.8398 0 -31.6699 0.139648 -47.5098 0.139648v-100.01h18.8301v33.3799h18.1299l29.0996 -33.3799h22.4004v1.2793zM220.56 221.31c22.9805 0 22.9004 -31.96 0 -31.96h-28.6797v31.96h28.6797z
+M126.49 222.88c20.8496 0 20.7793 -38.2402 0 -38.2402h-31.8105v38.2305zM316.14 240.85c-67.3994 0 -69.8594 -104.149 0 -104.149c68.3906 0.00976562 68.3301 104.149 0 104.149zM316.14 223.73c43.4307 0 44.1006 -69.7607 0 -69.7607
+c-44.1201 0 -43.7393 69.7607 0 69.7607z" />
+ <glyph glyph-name="weebly" unicode="&#xf5cc;" horiz-adv-x="512"
+d="M425.09 382.17c50.9102 0 87.5498 -35.1504 86.9199 -83.4697c0 -21.6201 -0.950195 -18.5498 -77.5 -227.2c-22.3799 -60.5703 -67.7695 -69.6699 -92.7402 -69.6699c-39.2393 0 -70.0391 19.46 -85.9297 54.29c-15.8896 -34.5205 -46.7002 -53.9805 -85.9297 -53.9805
+c-24.9697 0 -70.3701 8.78027 -92.7402 69.3506c-72.9902 200.21 -77.1699 204.52 -77.1699 233.479c0 43.3105 38.5898 77.2002 87.54 77.2002c40.21 0 73.2803 -25.7295 83.6602 -64.3301c18.4795 58.0498 65.5 64.3301 85.2803 64.3301
+c19.4492 0 66.7891 -6.26953 84.9492 -64.3301c10.3799 38.6006 43.7803 64.3301 83.6602 64.3301zM451.43 267.36c3.49023 11.1992 7.29004 19.3701 7.61035 27.2393c0 22.3906 -16.1602 35.71 -38.3301 35.71c-18.6904 0 -31.9902 -11.7998 -36.1104 -29.0498
+l-44.0293 -139.819h-0.950195l-44.6602 136.79c-6.01953 19.9697 -16.4697 32.0791 -38.96 32.0791s-32.9404 -12.4092 -38.96 -32.0791l-44.6602 -136.79h-0.950195l-44.0293 139.819c-4.12012 17.25 -17.4199 29.0498 -36.1104 29.0498
+c-22.4902 0 -38.3301 -13.0195 -38.3301 -29.3594c0 -10.5898 2.54004 -19.6699 7.91992 -34.5l64.9404 -175.23c7.91016 -21.4795 21.2197 -37.2197 46.2393 -37.2197c23.1201 0 37.0605 12.0996 44.0205 33.5996l39.2803 117.42h0.949219l39.2803 -117.42
+c6.65039 -21.4893 20.5898 -33.8994 44.0303 -33.8994c25.0195 0 38.3203 15.7295 46.2402 37.2197z" />
+ <glyph glyph-name="wix" unicode="&#xf5cf;" horiz-adv-x="640"
+d="M393.38 316.31c0 -13.0293 2.08008 -32.6895 -28.6797 -43.8291c-9.52051 -3.4502 -15.9502 -9.66016 -15.9502 -9.66016c0 31 4.71973 42.2197 17.4004 48.8594c9.75 5.11035 27.2295 4.62988 27.2295 4.62988zM277.58 280.77
+c5.47949 26.3408 30.8799 38.3408 55.2998 35.2705l-65.5703 -247.93s-21.6396 -1.56055 -32.46 3.95996c-14.2197 7.25 -20.9893 12.8398 -29.5898 46.5693c-7.66992 30.0703 -29.1494 118.4 -31.1201 124.7c-4.30957 13.8105 -10.6396 14.9404 -15.3994 0
+c-2.00977 -6.29004 -23.4502 -94.6299 -31.1201 -124.7c-8.61035 -33.7295 -15.3701 -39.3193 -29.5898 -46.5693c-10.8301 -5.52051 -32.46 -3.95996 -32.46 -3.95996l-65.5703 247.93c23.8604 3 49.7305 -8.5498 55.2803 -35.2705l34.2393 -132.659l28.4805 108.569
+c7.76953 32.3506 21.0596 48.5303 48.4297 48.5303c27.6201 0 40.7402 -16.54 48.4307 -48.5303l28.4795 -108.569zM393.36 275.56v-8.97949l0.0195312 0.00976562v-150.27c-0.129883 -30.8301 -3.33008 -37.6807 -17.2598 -44.7803
+c-10.8203 -5.52051 -27.3701 -3.42969 -27.3701 -3.42969v152.069c0 21.25 -1.95996 27.9404 13.1797 35.2002c6.19043 2.96973 11.96 5.25 17.9707 8.61035c9.35938 5.22949 13.46 11.5693 13.46 11.5693zM556.8 191.48l82.9902 -123.36s-35.9297 -4.62012 -53.3203 11.21
+c-13.9102 12.6602 -23.7393 28.3398 -53.1396 70.7197c-0.5 0.770508 -6.25977 10.5205 -13.0703 0c-34.9297 -50.3496 -41.0195 -60.2598 -52.5098 -70.7197c-17.3799 -15.8301 -53.9502 -11.21 -53.9502 -11.21l82.9697 123.36l-83.1992 123.739
+s35.1094 5.98047 52.5 -9.84961c13.3799 -12.1797 24.8896 -30.2402 54.1797 -72.4697c6.82031 -10.54 12.5996 -0.730469 13.0703 0c29.7695 42.9199 40.8799 60.3691 54.1797 72.4697c17.3896 15.8301 52.5 9.84961 52.5 9.84961z" />
+ <glyph glyph-name="ello" unicode="&#xf5f1;" horiz-adv-x="496"
+d="M248 440c136.97 0 248 -111.03 248 -248s-111.03 -248 -248 -248s-248 111.03 -248 248s111.03 248 248 248zM391.84 154.8c2.48047 7.44043 -2.47949 15.71 -9.91992 17.3604c-7.43945 2.47949 -15.71 -2.48047 -17.3604 -9.91992
+c-14.0498 -52.9102 -62 -90.1104 -116.56 -90.1104s-102.51 37.2002 -116.56 90.1104c-1.65039 7.43945 -9.9209 11.5693 -17.3604 9.91992c-7.44043 -1.65039 -11.5703 -9.91992 -9.91992 -17.3604c16.5303 -65.3096 76.0498 -111.6 143.84 -111.6
+s127.31 46.29 143.84 111.6z" />
+ <glyph glyph-name="hackerrank" unicode="&#xf5f7;" horiz-adv-x="464"
+d="M453.5 320c14.4805 -24.9502 14.4697 -230.92 -0.00976562 -256c-14.4805 -25.0801 -192.391 -128 -221.33 -128c-28.9404 0 -206.83 102.79 -221.32 127.99c-14.4902 25.21 -14.4102 230.8 0 256.01s192.36 128 221.32 128c28.9697 0 206.85 -103.05 221.34 -128z
+M292.13 33.7803c3.95996 0 40.46 35.7793 37.5801 38.6895c-0.870117 0.879883 -8.82031 1.49023 -17.6904 1.83984c0 32.4004 -2.98926 19.0508 0.660156 210.341c0.0703125 3.65918 -1.04004 5.35938 -4.5 5.37988
+c-11.0801 0.0693359 -22.1602 0.0195312 -33.2295 -0.0605469c-3.25977 -0.0292969 -4.31055 -1.80957 -4.20996 -5.2002c1.58984 -48.8994 1.2002 -79.0898 1.2002 -83.6396h-80.2607c0.600586 25.7998 0.209961 79.6396 2.62988 105.39v3.16016
+c8.87012 0.350586 15.9004 0.970703 16.7705 1.83984c2.90039 2.91016 -34.3105 38.6904 -38.2705 38.6904c-3.94922 0 -41.4092 -35.7695 -38.4893 -38.6904c0.879883 -0.879883 7.58984 -1.48926 17.2598 -1.83984v-3.16992
+c3.11035 -128.649 1.07031 -179.229 0.150391 -212.67c-0.130859 -4.58008 1.63965 -6.12012 5.73926 -6.10938c10.1406 0.0292969 20.2803 -0.0507812 30.4102 -0.0800781c4.16016 -0.0205078 5.96973 1.39941 5.74023 5.93945
+c-1.83008 36.6797 -1.37012 65.7803 -1.37012 72.8799h79.9297c0 -2.41992 0.480469 -3.83008 0.44043 -5.84961c-0.350586 -17.7305 -0.94043 -60.0898 -0.94043 -86.3203c-11.29 -0.349609 -16.6797 -0.959961 -17.5498 -1.83008
+c-2.91016 -2.91992 34.04 -38.6895 38 -38.6895z" />
+ <glyph glyph-name="kaggle" unicode="&#xf5fa;" horiz-adv-x="291"
+d="M290.2 -53.5l1.39941 -7.59961c-0.5 -2 -2.5 -3 -6 -3h-66.8994c-4 0 -7.5 1.7998 -10.5 5.2998l-110.5 140.6l-30.7998 -29.2998v-109c0 -5 -2.5 -7.5 -7.5 -7.5h-51.9004c-5 0 -7.5 2.5 -7.5 7.5v497c0 5 2.5 7.5 7.5 7.5h51.9004c5 0 7.5 -2.5 7.5 -7.5v-306
+l132.3 133.7c3.5 3.5 7 5.2998 10.5 5.2998h69.2002c7 0 7.89941 -7.7998 5.2998 -10.5l-139.8 -135.3z" />
+ <glyph glyph-name="markdown" unicode="&#xf60f;" horiz-adv-x="640"
+d="M593.8 388.9c25.5 0 46.2002 -20.7002 46.2002 -46.1006v-301.6c0.0996094 -25.4004 -20.5996 -46.1006 -46.0996 -46.1006h-547.7c-25.5 0 -46.2002 20.7002 -46.2002 46.2002v301.5c0 25.4004 20.7002 46.1006 46.2002 46.1006h547.6zM338.5 87.4004h-0.200195v209.199
+h-61.5l-61.5 -76.8994l-61.5 76.8994h-61.5v-209.199h61.7002v120l61.5 -76.9004l61.5 76.9004v-120h61.5zM473.8 84.2998l92.2002 107.7h-61.5v104.6h-61.5v-104.6h-61.5z" />
+ <glyph glyph-name="neos" unicode="&#xf612;" horiz-adv-x="456"
+d="M387.44 -64h-95.1104l-108.21 154.54v-91.0996l-86.4297 -63.4404h-97.6904v482.18l40.4697 29.8203h108.05l123.74 -176.13v112.68l86.4307 63.4502h97.6895v-461.5zM10.7695 412.73v-460.721l72.0107 52.8799v249.16l215.489 -307.689h84.79l52.3506 38.1699h-78.2705
+l-316.18 450.489zM93.3096 -53.8799l80.04 58.7803v101.04l-79.7998 114.359v-220.939l-72.5801 -53.25h72.3398v0.00976562zM52.6299 437.23l310.601 -442.57h82.3691v442.57h-79.75v-317.561l-222.939 317.561h-90.2803zM283.03 256.35l72.0098 -102.81v278.53
+l-72.0098 -52.96v-122.761z" />
+ <glyph glyph-name="zhihu" unicode="&#xf63f;" horiz-adv-x="640"
+d="M170.54 299.87h122.68v-217.55h-49.5293l-42.0107 -26.3701l-7.70996 26.3701l-23.4297 0.00976562v217.54zM268.29 105.94v170.31h-72.8203v-170.31l11.9004 -0.0400391l5.08008 -17.4707l27.8994 17.5107h27.9404zM149.83 200.33
+c7.5 0 7.58984 -23.6104 7.58984 -23.6104h-61.6504c-0.879883 -13.1201 -3.50977 -26.6895 -7.86914 -40.6699l14.6191 11.6201c8.73047 -8.75 29.2109 -32.8896 36.79 -41.8096c9.15039 -13.1006 1.24023 -39.9902 1.24023 -39.9902l-53.96 64.9395
+c-12.6094 -48.3496 -35.5898 -69.25 -35.5898 -69.25c-10.0898 -8.96973 -30.5098 -15.75 -51 -9.89941c42.8301 33.2197 66.4502 75.2402 70.8496 125.1h-65.5801s3.82031 23.6201 15.5605 23.6201h52.2695c0.480469 6.56055 1.68066 62.9404 1.68066 73.4404h-28.8701
+c-2.62988 -7.87012 -3.03027 -8.64062 -5.14062 -14.5303c-11.4697 -21.0303 -30.9492 -21.5703 -36.8398 -22.21c17.4902 34.9795 27.3105 69.2197 30.7002 78.1201c8.2002 21.5693 32.2705 21.5693 32.2705 21.5693c-5.25 -14.0098 -9.63086 -27.5498 -13.1201 -40.6699
+h88.5c10.5498 0.25 8.58008 -22.3096 8.58008 -22.3096h-51.1602c0 -21.8701 -0.459961 -46.3604 -2.2002 -73.46h52.3301zM561.85 201.93l-19.2295 14.4307s30.8301 40.0498 36.8301 48.1992c8.72949 10.7402 27.3799 -4.05957 27.3799 -4.05957
+s-24.1504 -32.9297 -44.9805 -58.5703zM411.76 261.02l0.00976562 0.0107422c8.99023 -8.25 34.6602 -45.8604 34.6602 -45.8604l-19.46 -13.7295c-1.59961 2.40918 -41.1201 57.4492 -41.1201 57.4492s16.9004 10.3799 25.9102 2.12988zM640 189.65
+c0 0 0.950195 -23.79 -8.73047 -23.79h-122.359v-73.3203c0.780273 -28.0303 -15.3301 -45.3096 -44.8906 -45.3096c-9.84961 0 -16.1396 1.75977 -26.0195 6.56934c-12.9805 7.4502 -17.3203 17.8701 -19.3096 21.8398c15.6094 -0.65918 27.6094 -1.91992 41.6895 -1.80957
+c13.29 -0.870117 24.4805 7.15039 24.4805 21.1201v70.9199h-107.94c-22.6895 0.540039 -25.5098 22.8496 -25.5098 22.8496h133.47v99.8105c-12.8301 0 -31.6797 -0.830078 -56.5098 -2.43066c-26.46 -0.80957 -35.8398 -2.58984 -49.1504 0.890625
+c-8.16016 2.46973 -14.1797 10.7295 -15.7793 19.5498c67.1396 1.55957 232.359 18.0498 232.359 18.0498s20.1006 5.75977 23.1699 4.58008c12.8105 -6.25 0.589844 -33.4395 0.589844 -33.4395c-17.6396 -0.810547 -46.8896 -2.40039 -87.7695 -4.81055
+c-10.4297 -0.799805 -18.04 -1.2002 -22.8496 -1.2002v-101c0.149414 0 111.279 0.930664 131.06 0.930664z" />
+ <glyph glyph-name="alipay" unicode="&#xf642;"
+d="M377.74 416c38.6895 0 70.0898 -31.5703 69.9297 -70.2598v-234.41c-48.6104 16.7002 -99.6895 36.04 -148.62 52.7402c23.1406 44.2998 38.3506 90.9199 38.3506 90.9199h-88.7705v31.2402h109.45v19.0098h-109.44v50.4199h-50.9199v-50.4199h-109.439v-19.0098h109.439
+v-31.2402h-92.0801v-16.7002h178.2s-9.91992 -30.25 -26.4502 -60.3398c-47.7793 14.71 -91.75 24.96 -127.13 24.96c-84.6396 0 -103.49 -42.4902 -99.5195 -81.5c3.30957 -31.0703 26.4502 -76.3701 97.04 -76.3701c64.4795 0 116.55 37.0303 148.62 81
+c61.0098 -28.0996 125.64 -62.8203 171.6 -88.4404c-0.5 -38.5195 -31.7402 -69.5996 -70.2598 -69.5996h-307.48c-38.8496 0 -70.2598 31.4102 -70.2598 70.2598v307.48c0 38.8496 31.4102 70.2598 70.2598 70.2598h307.48zM47.2803 125.05
+c-0.990234 17.5205 10.9102 50.5801 78.3594 50.5801c24.96 0 64.8105 -12.7295 109.44 -31.4102c-25.29 -33.2197 -65.7998 -72.8994 -117.87 -72.8994c-59.6797 0 -68.9404 33.5596 -69.9297 53.7295z" />
+ <glyph glyph-name="the-red-yeti" unicode="&#xf69d;" horiz-adv-x="505"
+d="M484.4 206.3c2.19922 -3 4.69922 -6.89941 6.7998 -13.2998c4.2002 -11.7998 7.2002 -22.9004 8.89941 -34.2002l-2.5 -0.5l-13 14.2998c-17.8994 -28.0996 -9.89941 -15.3994 -16.6992 -25.0996c0 -124.2 -101.301 -211.5 -223 -211.5
+c-61.5 0 -113.9 20.2002 -157.5 60.2002c-64.5 60.8994 -64.9004 125 -64.9004 150.5c-0.5 1.7998 -0.700195 3.5 -1.2002 5.2002l-20.2002 -22.4004c-6.7998 43 25.7002 74.2998 33 80.7002c0.5 1 0.700195 2.2002 1.2002 3.2002l-28.7998 1l-3 3.39941
+c8.5 3.5 25.2998 13.2998 40.2998 14.2998c7.40039 14.2002 16.5 27.5 27.7998 40.3008c1.30078 6.39941 3.30078 14.1992 6.60059 25.7998l-7.60059 -4.7002l-1.69922 1.7002l1.69922 8.39941c10.8008 25.6006 26 48 46.7002 67.4004l-33 14.2998h3.7002
+c20.9004 4.90039 33.2002 3.2998 49.2002 0c-2.5 4.10059 -5.40039 10.5 -8.40039 18.9004c-2.89941 8.09961 -2.89941 16.5 0.5 25.2998c8.90039 -7.40039 14.2998 -24.5996 15.2002 -27c0.700195 3.59961 2.09961 21.2998 33.7002 45.5l1.7998 -0.5l-12 -44.2002
+c30 17.7002 63 21.9004 97.9004 11.7998c-12.7002 -12.1992 -24.3008 -28.8994 -42.5 -33c7.39941 -2.2998 28.6992 -9.69922 34.1992 -15.1992l-24.7998 7.09961c6.5 -6 19.6006 -16.4004 25.1006 -25.0996c24.3994 -1.30078 47.1992 -5.7002 68.3994 -13.3008l-0.5 0.5
+c29.4004 14.7002 37.7002 27.3008 74.7998 3c0 -30.1992 -2.2998 -23.3994 3 -29.7998c7.60059 6.40039 16.5 12.2998 25.3008 16.5c13 6.40039 23.0996 4.7002 30.6992 -5.89941c11.8008 0 17.8008 -15.7002 18.4004 -27c14.7998 -2.90039 2.7002 -30.7002 2.5 -30.7002
+l-7.09961 -18.2002c7.7998 -7.7998 22.0996 -20.9004 31.6992 -44.7998zM394.2 336.8c-13.1006 8.90039 -22.7002 11.9004 -28.2998 8.5c8.09961 -7.2002 13 -14.2998 13.5 -20.7002c1.19922 -7.59961 -2.2002 -14.7998 -10.6006 -21.8994l-4.2002 -3.40039
+c4.2002 -7.09961 7.2002 -14.7002 8.40039 -23.0996h2.5c-2.09961 13.8994 -2.5 11 0.700195 14.7998c11 -6.40039 14.8994 -14.5 16 -19.9004c21.7998 10.1006 29.5 12.7002 54.7998 20.9004l-18.2002 -16c11.4004 0 25.6006 0.299805 46.5 -8.40039
+c7 24.3008 7.10059 20.7002 2.5 20.7002l-4.7002 -11.2998c-1.69922 10.5 -2.89941 18.9004 -3.39941 25.2998c-0.5 6.7002 -3.90039 9.60059 -9.2998 10.1006c0 -5.90039 0.5 -11 1.69922 -15.2002l-1.69922 -5.90039c-2.90039 10.6006 -5.90039 20.2002 -9.30078 27.7998
+c-9.69922 17.7002 -30.1992 -9.19922 -43 -11.2998c8.10059 -0.5 16.9004 -0.5 27 0l-22.3994 -5.39941l3.39941 -4.7002c-5.5 0 -16.8994 -0.900391 -22.3994 17.2002zM354.6 346.9l-20.2998 -11.8008c11.2998 -7.59961 20.2002 -18.1992 27.7998 -31.1992
+c6.40039 2.89941 10.1006 5.09961 11.8008 7.59961c2.5 2.7998 2.5 4.7002 3 7.09961c0.599609 1.30078 0.799805 2.7002 -3.40039 11.1006c-7.5 11.7998 -16.2002 15.2998 -18.9004 17.2002zM87.2002 304.9c-7.7998 -24.1006 -11.7002 -49.4004 -13.2002 -74.6006
+l13.2002 -5l1.2002 27c9.5 -16.3994 11.1992 -23.2998 12.2998 -28.7998c2.7998 2.09961 7.7002 7 22.5996 11.2998l1.2002 -1.7002l-7.59961 -10.5996c10.0996 3.5 19.5 3.5 28.2998 0.5l-10.6006 -8.40039c22.8008 -8.39941 26.6006 -7.59961 38.4004 -26.0996
+l-11.7998 1.2002c34.8994 -20.5 66 -47.9004 141.2 -63.2002c15.5996 24.0996 14 21.0996 14 22.9004l0.199219 0.199219l-0.199219 0.200195c-0.700195 1.90039 -14.1006 16.6006 -18.2002 20.7002c7.2998 -1.7998 6 -0.900391 10.7998 -3.7002
+c1.7002 -0.899414 -5.40039 5.40039 -21.9004 20.2002c16.5 -6.7002 27.6006 -15.5 33 -27.7998l1.7002 30.7002l-22.3994 17.6992l6.39941 5.90039c-7.2998 0 -31 3.7002 -49.2002 -16l-2.5 0.5c8.2002 16.4004 13.7002 35.2002 16 46c1.80078 8.90039 3 18.2002 3 28.2998
+c0 19.5 -4.69922 38.4004 -13.5 56.6006c-6.39941 13.5 -16.5 25.2998 -30 35.3994c-6.39941 4.7002 -13.0996 9.2998 -20.6992 13.5c3 0.700195 1 1.2002 -5.40039 1.2002c-6.40039 0.200195 -13 0.700195 -19.4004 1.2002v-3
+c-10.0996 -1.7002 -18.8994 -6.7998 -25.2998 -15.2002h-1.2002l-5.39941 -3.40039c-1.2002 2.90039 0 6.30078 4.2002 9.30078l10.5996 11.2998l-3.40039 -0.5l2 3.39941c-2.2998 0.200195 -4.19922 0.5 -6.19922 0.700195l-0.5 1.2002l2.5 1.7002
+c2.19922 -0.200195 4.59961 -0.5 7.09961 -0.700195c4.2002 2.2002 8.7998 3 14 2l2.5 -1.2002l0.200195 -0.5c7.2002 0.400391 14.7998 1.40039 23.3994 2.90039c20.7002 2.89941 36.7002 11.2998 48.5 24.7998l-21.0996 0.5
+c-25.7998 0.5 -49.4004 -5.40039 -71.2998 -18.9004l-2.5 2.5l0.5 4.7002l1.7002 7.10059c1.69922 8.09961 3.5 16.3994 6.39941 25.2998c-1.7002 -0.700195 -4.59961 -4.90039 -9.2998 -11.2998c-4.7002 -6.40039 -8.40039 -13 -10.0996 -19.4004
+c-1.7002 -7.2002 -4.2002 -11.7998 -5.90039 -14.2998l-13.5 29l8.40039 -35.7998l-0.5 -1.7002c-5.40039 0 -10.6006 0.799805 -16 2.5c-3.40039 0.700195 -10.6006 1.2002 -20.9004 1.2002c0.5 0 -0.700195 0 -3.2002 -0.5
+c5.40039 -1.30078 13.5 -4.2002 24.7998 -8.40039l6.40039 1.2002c-4.2002 -3.40039 -10.9004 -10.1006 -20.2002 -19.4004c-9.39941 -8.89941 -20.2002 -26.0996 -32.5 -50.2002l4.2002 1.2002l10.0996 9.2998l-5.39941 -4.69922l13 12.2998l-2.5 -3.40039
+c-5.10059 -7.59961 -8.10059 -12.2998 -9.2998 -15.2002zM363.7 -25.0996c8.2998 40.2998 3.59961 55.1992 -0.700195 89.5c-35.5 -11.8008 -20.2998 -6 -32 -10.8008l10.5 -14.1992l-1.2002 -1.2002c-20.2002 6 -23.2002 10.7998 -27.7998 15
+c6 -22.2002 13.9004 -26.4004 29.5 -31.7002c-9.5 -9.59961 -25.4004 4 -34.4004 13l2.5 -23.5996l-4.19922 -3c-5 22.0996 -22 39.0996 -25.3008 39.0996c-44 -13 -79.0996 -5.7998 -113.899 10.5996c-1.60059 -0.399414 -70.6006 -18 -120.5 37.1006
+c13.7002 -35 32.2998 -63.7002 71.2998 -82.6006c-4.7002 10.1006 -11.9004 19 -20.7002 26.6006c0 0 0.700195 3.7002 1.2002 10.0996c19.4004 -19.3994 50.7002 -39.5 93.2002 -60.2002c-59.6006 24.5 -59.9004 24.8008 -69.1006 29l16 -20.6992
+c-3 -1.30078 -6.69922 -0.5 -10.0996 1.19922c-12.2998 7.10059 -24.0996 15.5 -35.4004 24.8008c1.90039 -2.2002 80.1006 -98.5 200.9 -74.3008c-43.0996 21.8008 -52.4004 52.4004 -66.5996 73.5l17.6992 -7.59961l-11.7998 23.0996
+c20.1006 -27.7998 28.6006 -35 38.4004 -44.2998l-30 16.5c12.5996 -27.0996 33.7002 -47 63.5 -58.7998c2.89941 1.5 9.09961 -1.09961 59 23.9004zM479 189.3l8.90039 -12.7998l-12.3008 32.5c10.9004 0 10 -0.0996094 21.2002 -3.40039
+c-8.09961 11.3008 -16.8994 21.9004 -27 32l-26.5996 23.1006l1.2002 3l23.5996 2.5c-11.0996 2.5 -21.7002 3.7002 -33 4.2002l-17.7002 -0.5l-0.5 2.89941l14.7998 13l-41.7998 -20.2002l-12.2998 18.9004l3.40039 -16l-2.5 -1.2002l-5.90039 4.2002h-10.0996
+l5.39941 -4.2002v-2l-13.5 -27.7998c-10.0996 -31.2002 -21.8994 -67.9004 -35.3994 -109.7l1.19922 16l-1.19922 -3v-0.5c-6.40039 -16 -13.6006 -29.5 -21.2002 -39.5996l9.2998 21.8994l-46.7002 -20.1992c11.7998 13.5 23.6006 19.3994 34.9004 18.8994
+c-71.2002 11.4004 -106.2 41 -110.4 46c3.60059 -6.2002 13.2002 -17.7998 16 -40.0996l-1.7002 -1.2002c-4.2998 15.5996 -16.3994 46.5996 -55.7998 69.5996l23.6006 -2.5c-10.5 12.6006 -36.3008 17.8008 -40.8008 16l-2.5 2.5l8.40039 8.40039l-22.2998 -5.7998
+l5.39941 13.5c-8.09961 -4.40039 -4.2998 -2.40039 -17 -8.90039l-1.69922 0.5c0.599609 0.600586 0.899414 -0.700195 -3 9.2998c-0.600586 -11 -0.400391 -8.59961 -1 -11.7998c-1.60059 -0.599609 -0.800781 -0.200195 -3.7002 -1.7002
+c-40 20.6006 -57.2002 11 -73 5.2002c36.7998 -6 29.2998 -4 38.3994 -9.2998c-25.7998 -12.2002 -31.8994 -12.5996 -51.3994 -70.0996l22.2998 22.2998l2.5 -16.4004c13.4004 -58 68.7002 -92.5 126.4 -83.3994l-26.1006 22.3994l44.7998 -22.3994l-1.19922 -3
+c4.59961 -1.7002 9.2998 -3 13.5 -4.2002c30 -8.90039 61.1992 -11.1006 93.1992 -6.40039l-32.5 21.2002c35.8008 -7 50.7002 -31.4004 56.8008 -39.5996l-7.60059 29l1.2002 2.5l19 -27.9004l-9.2998 26.5996l21.8994 -13.5h1.2002l-3.39941 4.2002l7.09961 -4.7002
+l-14.2998 16l1.2002 3l7.59961 -7.09961c4.2998 1.2002 41.4004 10.5 80.9004 40.2998c47.8994 35.4004 68.0996 73.7998 71.5996 79.7002l-3 9.2998zM472.9 260.6l-18.2002 -1.19922l14.2998 -11.8008zM218.1 253.5c2.7002 -5.09961 5.7002 -12.4004 18.4004 -18.7998
+c-7.5 -10.9004 -8.2998 -10.5 -20.2002 -16c-7.59961 -7.7002 -13.5 -13.1006 -17.7002 -14.7998l7.10059 13c-8.2998 -3 -16.4004 -3.7002 -24.7998 -2.5l-0.5 1.19922c19 2.10059 37.1992 9.40039 46.5 16c-4.10059 4.2002 -7.10059 11.3008 -8.80078 21.9004z
+M221.8 355.8c6.40039 -3.7002 11 -8.39941 14.2998 -13.7998c14.7002 -24.0996 19.2002 -40.0996 11.3008 -47.7002c-7.90039 -7.59961 -16.8008 -7.09961 -26.1006 3c-9.2998 10.1006 -13.5 23.6006 -11.7998 39.6006c1.7002 15.8994 5.90039 22.2998 12.2998 18.8994z
+M217.1 309.5c7.10059 -21.2998 33.4004 -23.0996 26.9004 4.90039c-3.90039 16.5 -8.7998 27.0996 -15.2002 32.5c-6.59961 5.39941 -10.0996 6.69922 -11.2998 4.19922c-2.5 -2.89941 -3.5 -11.2998 -3 -24.7998c7.5 12.7998 11.5996 5.90039 12.5 4.7002l-0.5 -0.5
+c-0.799805 -1.7002 -2.59961 -3.09961 1.7002 -6.2002l1.2002 0.5v-4.7002c-1.80078 -12.5 -6.90039 -12.7998 -12.3008 -10.5996zM172.1 315c-2.19922 0.5 -4.19922 1.7002 -5.39941 4.2002c-3.5 8.5 0 21.2002 8.09961 21.2002c2 -0.5 3.7002 -1.7002 5.40039 -4.7002
+c-1.5 -0.400391 -4.7002 -4.7998 0.700195 -5.90039h0.5c0 -13.7002 -7.7002 -15.0996 -9.30078 -14.7998zM212.2 365.1l-3.7002 2.40039l-0.5 2.5c18.2998 0 25.7998 -8.7998 28.2998 -14.2998c-8.89941 4.7002 -17.7002 6.39941 -26.0996 5.89941l-0.5 3zM140.4 315.7
+c1.59961 -1.60059 0.599609 -0.299805 4.89941 -6.60059c-25.3994 -4.69922 -23.2002 -12.2998 -30 -12.2998c0.299805 0.600586 7.10059 16 23.6006 16l-7.10059 7.60059c9.40039 0.5 15.2002 2.09961 19.9004 -5.90039c0 8.7998 1.2002 16.5 2.89941 23.5996
+c2 7.60059 3.7002 11.8008 5.40039 13.5c1 1.5 16.2998 15.7002 29 22.4004c4.2002 2.90039 8.7998 3.40039 13.5 1.7002c0.5 -0.5 0.5 -1.2002 0.5 -1.7002l-13 -7.59961c7.59961 -11.8008 10.5 -25.3008 8.7998 -41.3008c-1.2998 -11.0996 -6 -20.6992 -14.7998 -28.2998
+l2.90039 -4.7002c-30 2.2002 -24.8008 6.80078 -46.5 23.6006zM159.1 334.4c-1.7998 -7.2002 -2.2998 -16 -3.09961 -26l5.40039 -6.40039l7.09961 -3.40039c2.90039 -0.5 6.59961 -1 11.2998 -0.5c1 1.7002 3.5 4.2002 6.40039 7.60059
+c5 5.89941 7.89941 13.7998 8.39941 23.0996c0.400391 8.7998 -0.5 17.7002 -3 25.2998c-3 8.10059 -5.89941 11 -10.0996 9.30078c-5.40039 -1.7002 -10.5996 -5.40039 -16 -11.8008c-3 -4.19922 -5.2002 -9.59961 -6.40039 -17.1992zM201.1 278.3l-3.09961 -6.5
+c7.09961 4.2002 13.5 7.2002 19.4004 8.40039l7.09961 0.5l11.7998 -7.60059h-2.5c-8.7998 3.7002 -19.3994 1.2002 -30.7002 -7.59961c-0.5 -4.7002 1.7002 -14.7002 5.90039 -29.5l9.2002 0.5c-21.9004 -6.59961 -37.6006 -8.40039 -48.9004 -5.40039
+c-24.8994 6.7002 -27.3994 23.6006 -27.5 24.1006c-2.89941 11.2998 -3.39941 22.3994 -1.7002 33.7002c-6.39941 -0.5 -11 -4.2002 -15.1992 -10.6006c-2.90039 5.90039 -5.40039 8.7998 -5.90039 9.2998c1.5 0.700195 12.2998 7.5 32.5 4.90039l0.5 -2.5l-5.90039 -1.2002
+c-0.0996094 -0.399414 -1.89941 -29.5 18.9004 -24.7998c1.40039 0.299805 1.2998 -0.0996094 36.0996 14.2998z" />
+ <glyph glyph-name="acquisitions-incorporated" unicode="&#xf6af;" horiz-adv-x="345"
+d="M338.5 -20.2002c2.2002 -14.2998 4.09961 -28.7002 6.59961 -43.7002h-337.1c-4 0 -6.09961 0.700195 -5.2998 5.7002c2.09961 12.9004 3.5 25.9004 5 38.7998c0.5 4.80078 2.2998 6.80078 7.59961 6.80078c118.101 -1 114.9 -0.300781 121.4 2.39941
+c9.39941 4 14.8994 12.9004 14.8994 23.1006c-0.0996094 42.8994 -0.299805 85.8994 -0.199219 128.8c0 3.7998 -1.2002 5.89941 -4.60059 6.7998c-15.7002 3.90039 -31.2998 7.7002 -47.5996 11.7002c-5.2998 -12.2998 -10.4004 -24.4004 -15.7002 -36.7002
+c1.7998 -3.2998 28.4004 -2.90039 35.2998 -2.90039v-27.5996h-114.3c1 8.59961 1.7002 16.7998 3.2002 24.9004c0.299805 1.39941 3.59961 3.09961 5.5 3.19922c8.39941 0.400391 16.8994 0.300781 25.3994 0.100586c4 0 5.90039 1.09961 7.60059 5.2002
+c16.5996 40.6992 13.5 31.1992 67.2998 161c31.5 76.0996 33 76 32.5996 87.3994c-0.699219 18.6006 -25.3994 22.2998 -37.6992 22.1006c-30 -0.400391 -38.4004 0.5 -101.801 0.5c-7.19922 44.5 -4.19922 32.0996 -6.39941 45.2998
+c-0.700195 4.2002 1 5.2998 4.59961 5.2998l339.101 -0.200195c-0.800781 -5.39941 -1.60059 -10.7998 -2.40039 -16.0996c-1.2998 -9.7002 -2.7998 -19.4004 -4 -29.2002c-0.299805 -2.90039 -1.2002 -4.2998 -4.2998 -4.2998
+c-20.6006 -0.100586 -41.2002 -0.100586 -61.7998 -0.5c-18.7002 -0.400391 -37.6006 -0.299805 -56.2002 -2c-13.4004 -1.2002 -23.2998 -12.6006 -18.9004 -26.6006c8.60059 -27.0996 27.7002 -69.0996 36.5 -89.1992c65.7002 -154.2 61.4004 -157 84 -158.601
+c6.60059 -0.5 13.4004 -0.0996094 20.4004 -0.0996094c1.2998 -9.40039 2.59961 -18 4 -27.5h-116v27c10.3994 0 20.3994 0.0996094 30.3994 -0.100586c3.5 0 5 0.700195 3.40039 4.40039c-4.40039 10.2998 -8.7002 20.5996 -13.2002 30.9004
+c-1.59961 3.69922 -4.09961 4.7998 -8.39941 3.5c-12.4004 -3.60059 -24.8008 -6.7002 -37.3008 -9.7002c-4.2998 -1.10059 -6 -2.7998 -5.89941 -7.5c0.799805 -57.5 0.899414 -127.5 1 -129.101c0.399414 -12.5996 8.7002 -21.3994 21 -23.0996
+c0.899414 -0.200195 12.8994 -2.7998 112.7 -2.59961c8.2998 0 8.39941 0.0996094 9.59961 -7.60059zM163.6 185.5c6.2002 -2 12 -2.2002 18.4004 0c13 4.2002 26.2998 7.7998 39.4004 11.7002c1.39941 0.5 2.69922 1.5 3.89941 2.09961
+c-6.7002 17.4004 -13.0996 34.2002 -19.7002 50.9004c-8.89941 22.7002 -17.6992 60.2998 -27 82.7998c-1.5 0.799805 -1.89941 -2.40039 -9.39941 0c-17.1006 -44 -34.1006 -87.7998 -51.2998 -132.1c2.2998 -1.2002 4 -2.30078 5.7998 -2.90039
+c13.2998 -4.2998 26.5996 -8.2998 39.8994 -12.5z" />
+ <glyph glyph-name="critical-role" unicode="&#xf6c9;" horiz-adv-x="445"
+d="M224.82 448c0.259766 -0.150391 216.569 -124.51 217.12 -124.72c3.04004 -1.18066 3.69922 -3.45996 3.69922 -6.56055c-0.0693359 -83.4502 -0.0595703 -166.899 -0.00976562 -250.359c0 -2.77051 -0.979492 -4.43066 -3.37988 -5.78027
+c-21.3701 -11.9902 -207.86 -118.29 -218.93 -124.58h-3c-79.3203 45.6602 -218.24 125.44 -218.391 125.52c-1.29004 0.740234 -1.95996 1.75 -1.87988 3.24023c0.0400391 0.870117 -0.0302734 225.94 -0.0498047 253.101c0 2.43945 0.889648 3.79004 2.92969 4.92969
+c23.2607 13.0996 209.271 119.229 220.141 125.21h1.75zM214.4 427.58l-0.220703 0.160156c-64.75 -36.8604 -129.489 -73.7402 -194.229 -110.61c0.0400391 -0.120117 0.0800781 -0.229492 0.129883 -0.349609c10.1895 -3.83984 20.3896 -7.69043 30.8604 -11.6406
+c-7.70996 -5.98926 -8.32031 -6.0293 -10.6504 -5.12988c-0.0996094 0.0400391 -24.1699 9.28027 -26.7998 9.99023v-230.42c0.879883 1.41016 64.0693 110.91 64.1299 111.01c1.62012 2.82031 3.03027 1.91992 9.12012 1.52051
+c1.39941 -0.0908203 1.47949 -0.220703 0.779297 -1.41992c-41.1895 -71.3301 -36.3994 -62.9902 -67.4795 -116.94c-0.80957 -1.40039 -0.610352 -1.12988 1.25 -1.12988c227.729 0 176.4 0 186.5 -0.0302734c1.44043 0 1.69043 0.230469 1.7002 1.64062
+c0.00976562 2.95996 0.00976562 5.91992 0 8.87988c0 1.33984 2.35938 0.80957 -18.3701 1.00977c-7.45996 0.0703125 -14.1396 3.21973 -21.3799 12.7002c-7.37988 9.66016 -14.6201 19.4297 -21.8506 29.21c-2.2793 3.08008 -3.44922 2.37988 -16.7598 2.37988
+c-1.75 0 -1.78027 0 -1.75977 -1.82031c0.290039 -26.21 0.149414 -25.2695 1.04004 -32.6602c0.519531 -4.37012 2.16016 -4.19922 9.68945 -4.80957c3.14062 -0.259766 3.88086 -4.08008 0.520508 -4.91992c-1.57031 -0.390625 -31.6006 -0.509766 -33.6699 0.0996094
+c-2.27051 0.660156 -2.5498 4.08008 0.299805 4.73047c3.29004 0.759766 6.16016 -0.810547 6.66016 4.43945c1.2998 13.6602 1.16992 9.04004 1.09961 79.4199c-0.00976562 10.8203 -0.349609 12.5801 -5.35938 13.5508
+c-1.2207 0.239258 -3.54004 0.15918 -4.69043 0.549805c-2.87988 0.969727 -2 4.83984 1.77051 4.84961c33.6699 0.0302734 46.0791 1.07031 56.0596 -4.85938c7.74023 -4.61035 11.9795 -11.4805 12.5098 -20.4004c0.879883 -14.5898 -6.50977 -22.3496 -14.9902 -32.5898
+c-0.679688 -0.820312 -0.719727 -1.37988 -0.0400391 -2.2207c2.60059 -3.25 5.05078 -6.62988 7.71094 -9.8291c27.5596 -33.2305 24.1094 -30.54 41.2793 -33.0605c0.890625 -0.129883 1.02051 0.419922 1.00977 1.15039
+c-0.0195312 3.66992 0.0107422 7.33008 -0.0195312 11c-0.00976562 1.01953 0.320312 1.42969 1.41016 1.25977c12.54 -1.91016 21.8496 0.0703125 23.5801 0.299805c1.08008 0.150391 1.5 -0.200195 1.47949 -1.33008c0 -0.109375 0.879883 -26.6895 0.870117 -26.7998
+c-0.0498047 -1.51953 0.669922 -1.62012 1.89062 -1.62012c62.3799 0.0205078 125.149 0.0205078 186.71 0.0205078c-27.1201 47.0293 -54.1104 93.8496 -81.1807 140.81c2.25977 0.660156 -0.399414 0.0302734 6.69043 1.38965
+c2.03027 0.390625 2.0498 0.410156 3.10938 -1.43945c7.31055 -12.6396 77.3105 -133.96 77.3701 -134.061v230.46c-1.71973 -0.5 -103.3 -38.7197 -105.76 -39.6797c-1.08008 -0.419922 -1.5498 -0.200195 -1.91016 0.879883
+c-0.629883 1.90039 -1.33984 3.76074 -2.08984 5.62012c-0.320312 0.790039 -0.0898438 1.12988 0.650391 1.39062c0.0996094 0.0390625 95.5293 35.8496 103.04 38.7695c-65.4199 37.5703 -130.561 75 -196.011 112.6c29.0703 -50.3594 57.9502 -100.369 86.8203 -150.39
+c-0.0898438 -0.110352 -0.179688 -0.219727 -0.280273 -0.330078c-9.56934 0.900391 -10.46 1.60059 -11.7998 3.94043c-0.959961 1.68945 -73.5 127.71 -82 142.16c-9.09961 -14.6709 -83.5596 -146.211 -85.3701 -146.32
+c-2.92969 -0.169922 -5.87988 -0.0800781 -9.25 -0.0800781c28.8301 49.8301 57.5 99.4199 86.1807 149zM266.33 297.66c1.84961 0.0498047 3.7002 0.519531 5.54004 0.849609c1.68945 0.299805 2.53027 -0.200195 2.59961 -1.91992
+c0 -0.109375 0.0703125 -19.0596 -0.859375 -20.4502c-0.870117 -1.2998 -1.87988 -1.21973 -2.60059 0.19043c-4.95996 9.68945 6.2207 9.66016 -39.1201 12.0498c-0.699219 0.0400391 -1 -0.229492 -0.969727 -0.929688c0 -0.129883 3.71973 -121.98 3.73047 -122.11
+c0.0195312 -0.889648 0.519531 -1.2002 1.20996 -1.50977c2.91992 -1.31055 5.95996 -2.41992 8.69922 -4.0498c7.31055 -4.33008 11.3809 -10.8398 12.4102 -19.3105c1.44043 -11.7998 -2.76953 -35.7695 -32.21 -37.1396
+c-2.75 -0.129883 -28.2598 -1.08008 -34.1396 23.25c-4.66016 19.2598 8.25977 32.7002 19.8896 36.4004c1.49023 0.469727 1.9502 1.25977 1.98047 2.65918c0.0996094 5.62988 3 107.101 3.70996 121.351c0.0498047 1.08008 -0.620117 1.16016 -1.35059 1.14941
+c-32.3496 -0.519531 -36.75 0.34082 -40.2197 -8.51953c-2.41992 -6.17969 -4.13965 -1.32031 -3.9502 -0.230469c0.879883 4.98242 2.36328 13.0605 3.31055 18.0303c0.399414 2.11035 1.42969 2.61035 3.42969 1.86035c5.58984 -2.11035 6.71973 -1.7002 37.25 -1.91992
+c1.73047 -0.0107422 1.78027 0.0800781 1.82031 1.84961c0.679688 27.4902 0.580078 22.5898 0.969727 29.5498c0.0703125 1.29004 -0.410156 2.16992 -1.62988 2.7998c-5.59961 2.91016 -8.75 7.55078 -8.90039 13.8701c-0.349609 14.8105 17.7207 21.6699 27.3799 11.5107
+c6.84082 -7.19043 5.80078 -18.9102 -2.44922 -24.1504c-1.66016 -1.05957 -2.31055 -2.33008 -2.2207 -4.33984c0.0302734 -0.589844 -0.109375 4.30957 0.980469 -30.0498c0.0302734 -0.900391 0.429688 -1.12012 1.24023 -1.11035
+c0.0996094 0 23.0098 0.0898438 34.4697 0.370117zM67.2695 306.3c19.8408 4.50977 32.6807 0.560547 52.4902 -1.68945c2.75977 -0.310547 3.74023 -1.2207 3.62012 -3.99023c-0.209961 -4.99023 -1.16016 -22.3301 -1.24023 -23.1504
+c-0.0996094 -1.04004 -0.599609 -1.91016 -1.62988 -2.33984c-4.05957 -1.7002 -3.60938 4.4502 -4.00977 7.29004c-3.12988 22.4297 -73.8701 32.7002 -74.6299 -25.4004c-0.310547 -23.9199 17.0098 -53.6299 54.0801 -50.8799
+c27.2402 2.01074 19.0498 20.1904 24.8398 20.4707c1.97949 0.0996094 3.33008 -1.33008 2.97949 -3.36035c-1.8291 -10.8496 -3.41992 -18.9502 -3.44922 -19.1504c-1.51074 -9.17969 -86.6699 -22.0801 -93.3506 42.0508
+c-2.67969 25.8691 10.4707 53.3691 40.2998 60.1494zM147.26 218.63c-6.5 0.0302734 -12.9902 0 -19.4902 0.0400391c-1.95996 0.00976562 -2.7793 1.61035 -2.65918 1.79004c2.37988 3.75 5.88965 -0.919922 5.85938 6.13965
+c-0.0800781 25.75 0.209961 37.9902 0.230469 40.1006c0.0302734 3.41992 -0.530273 4.64941 -3.32031 4.93945c-7 0.720703 -3.11035 3.37012 -1.11035 3.37988c11.8408 0.100586 22.6201 0.180664 30.0508 -0.719727c8.76953 -1.06934 16.71 -12.6299 7.92969 -22.6201
+c-1.98047 -2.25 -4.03027 -4.41992 -6.13965 -6.72949c0.949219 -1.15039 6.89941 -8.82031 17.2793 -19.6807c2.66016 -2.7793 6.15039 -3.50977 9.87988 -3.12988c1.2207 0.120117 2.11035 0.75 2.23047 2.12012c0.299805 3.41992 0.259766 -4.72949 0.450195 40.5801
+c0.0195312 5.65039 -0.339844 6.58008 -3.23047 6.83008c-3.9502 0.349609 -4.0293 2.25977 -0.689453 3.37012c0.120117 0.0400391 18.9795 0.0898438 19.0898 0.0898438c0.320312 0 4.49023 -0.530273 1.0498 -3.37988
+c-0.0498047 -0.0498047 -0.160156 -0.0302734 -0.240234 -0.0400391c-3.60938 -0.259766 -3.93945 -0.979492 -3.95996 -4.62012c-0.269531 -43.9297 0.0703125 -40.2295 0.410156 -42.8203c0.110352 -0.839844 0.270508 -2.22949 5.10059 -2.13965
+c2.48926 0.0400391 3.85938 -3.37012 -0.0205078 -3.39941c-10.3701 -0.0800781 -20.7402 -0.0302734 -31.1104 -0.0703125c-10.6699 -0.0400391 -13.4697 6.2002 -24.21 20.8203c-1.59961 2.17969 -8.30957 2.35938 -8.19922 0.369141
+c0.879883 -16.4697 0 -17.7793 3.98926 -17.6699c4.75 0.100586 4.73047 -3.56934 0.830078 -3.5498zM422.23 228.78c-1.21094 -7.12988 0.169922 -10.3799 -5.30078 -10.3398c-61.5498 0.419922 -47.8193 0.219727 -50.7197 0.30957
+c-1.21973 0.0400391 -2.42969 0.44043 -3.62988 0.730469c-2.53027 0.599609 1.47949 1.22949 -0.379883 5.59961c-1.43066 3.37012 -2.78027 6.78027 -4.11035 10.1895c-0.379883 0.980469 -0.939453 1.44043 -2.04004 1.44043
+c-3.12012 0 -7.26953 0.44043 -14.5801 -0.0703125c-0.580078 -0.0400391 -1.39941 -0.549805 -1.62012 -1.05957c-1.58008 -3.62012 -3.06934 -7.29004 -4.50977 -10.96c-1.26953 -3.23047 7.86035 -1.32031 12.1904 -2.16016
+c2.96973 -0.570312 4.5293 -3.71973 0.660156 -3.72949c-8.79004 -0.0302734 -17.5801 -0.0605469 -26.3701 -0.0507812c-2.91992 0 -3.08984 3.15039 -0.740234 3.20996c2.66992 0.0703125 4.74023 1.12988 5.91992 3.4707c1.5 2.96973 2.7998 6.04004 4.11035 9.08984
+c18.1797 42.1396 17.0596 40.1699 18.4199 41.6094c1 1.06055 2.06934 1.05078 3.0293 -0.0400391c2.93066 -3.33984 18.4004 -44.71 23.6201 -51.9199c1.95996 -2.69922 5.74023 -1.97949 6.36035 -2.00977c3.61035 -0.129883 3.96973 1.11035 4.12988 4.29004
+c0.0898438 1.87012 0.0800781 -1.16992 0.0703125 41.2402c0 4.45996 -2.36035 3.74023 -5.5498 4.26953c-0.260742 0.0400391 -2.56055 0.629883 -0.0800781 3.06055c0.209961 0.200195 -0.890625 0.240234 21.6992 0.149414
+c2.32031 -0.00976562 5.32031 -2.75 -1.20996 -3.44922c-1.80957 -0.19043 -2.58008 -1 -2.65918 -2.83008c-0.0703125 -1.62988 -0.19043 -38.8906 0.290039 -41.21c0.349609 -1.73047 1.72949 -2.37988 3.22949 -2.43066c13.25 -0.429688 14.9199 -0.439453 16.04 3.41016
+c1.66992 5.78027 4.12988 2.52051 3.73047 0.19043zM317.55 164.41c-4.24023 0 -4.41992 3.38965 -0.609375 3.41016c35.9092 0.160156 28.1094 -0.379883 37.1895 0.649414c1.67969 0.19043 2.37988 -0.239258 2.25 -1.88965
+c-0.259766 -3.38965 -0.639648 -6.78027 -1.03027 -10.1602c-0.25 -2.16016 -3.19922 -2.61035 -3.39941 0.150391c-0.379883 5.30957 -2.15039 4.44922 -15.6299 5.08008c-1.58008 0.0693359 -1.64062 0.0195312 -1.64062 -1.52051v-16.1201
+c0 -1.64941 0 -1.59961 1.62012 -1.46973c3.12012 0.25 10.3105 -0.339844 15.6904 1.51953c0.469727 0.160156 3.2998 1.79004 3.06934 -1.75977c-0.00976562 -0.209961 -0.759766 -10.3496 -1.17969 -11.3896c-0.530273 -1.29004 -1.87988 -1.50977 -2.58008 -0.320312
+c-1.16992 1.9502 0 5.08008 -3.70996 5.2998c-15.4199 0.900391 -12.9102 2.55078 -12.9102 -5.98926c0 -12.25 -0.759766 -16.1104 3.89062 -16.2402c16.6396 -0.480469 14.3994 0 16.4297 5.70996c0.839844 2.37012 3.5 1.76953 3.17969 -0.580078
+c-0.439453 -3.20996 -0.849609 -6.42969 -1.22949 -9.63965c-0.0400391 -0.360352 -0.160156 -2.40039 -4.66016 -2.39062c-37.1602 0.0800781 -34.54 0.19043 -35.21 0.310547c-2.71973 0.509766 -2.2002 3.04004 0.219727 3.44922
+c1.10059 0.19043 4.03027 -0.539062 4.16016 2.56055c2.44043 56.2197 -0.0703125 51.3398 -3.91016 51.3301zM317.14 273.93c2.45996 -0.609375 3.12988 -1.75977 2.9502 -4.64941c-0.330078 -5.2998 -0.339844 -8.98047 -0.549805 -9.69043
+c-0.660156 -2.22949 -3.15039 -2.12012 -3.33984 0.270508c-0.379883 4.80957 -3.0498 7.81934 -7.57031 9.14941c-26.2803 7.73047 -32.8096 -15.46 -27.1699 -30.2197c5.87988 -15.4102 21.9902 -15.9199 28.8604 -13.7803c5.91992 1.85059 5.87988 6.5 6.91016 7.58008
+c1.22949 1.2998 2.25 1.83984 3.11914 -1.09961c0.0302734 -0.100586 0.570312 -11.8906 -5.96973 -12.75c-1.59961 -0.209961 -19.3799 -3.69043 -32.6797 3.38965c-21.0098 11.1904 -16.7402 35.4697 -6.87988 45.3301c14.0293 14.0596 39.9102 7.05957 42.3193 6.46973z
+M288.8 167.86c3.28027 0 3.66016 -3 0.160156 -3.43066c-2.61035 -0.319336 -4.96973 0.419922 -5 -5.45996c-0.00976562 -1.97949 -0.19043 -29.0498 0.400391 -41.4502c0.109375 -2.28906 1.14941 -3.51953 3.43945 -3.64941
+c22.0303 -1.20996 14.9502 1.64941 18.79 6.33984c1.83008 2.24023 2.75977 -0.839844 2.75977 -1.08008c0.350586 -13.6201 -3.95996 -12.3896 -5.18945 -12.3994c-0.100586 0 -38.0801 0.179688 -38.1797 0.189453c-1.93066 0.230469 -2.06055 2.99023 -0.420898 3.37988
+c1.99023 0.480469 4.94043 -0.399414 5.13086 2.7998c0.959961 15.8701 0.569336 44.6504 0.339844 47.8105c-0.270508 3.76953 -2.7998 3.26953 -5.68066 3.70996c-2.46973 0.379883 -1.98926 3.21973 0.34082 3.21973
+c1.46973 0.0205078 17.9893 0.0302734 23.1094 0.0205078zM257.17 225.65c0.0703125 -4.08008 2.86035 -3.45996 6.00977 -3.58008c2.61035 -0.100586 2.53027 -3.41016 -0.0693359 -3.43066c-6.48047 -0.0400391 -13.7002 0.0205078 -21.6104 0.0605469
+c-3.83984 0.0195312 -3.37988 3.34961 0.0400391 3.37012c4.49023 0.0292969 3.24023 -1.61035 3.41016 45.54c0.0195312 5.08008 -3.27051 3.54004 -4.71973 4.22949c-2.58008 1.23047 -1.36035 3.08984 0.40918 3.15039
+c1.29004 0.0498047 20.1904 0.410156 21.1699 -0.209961c1.13086 -0.720703 1.87012 -1.65039 -0.419922 -2.86035c-0.989258 -0.519531 -3.85938 0.280273 -4.14941 -2.46973c-0.0205078 -0.209961 -0.820312 -1.62988 -0.0703125 -43.7998zM220.26 -48.6201
+c1.14062 -0.660156 2.12012 -0.660156 3.25977 0c16.9902 9.79004 181.971 103.57 197.421 112.51c-0.140625 0.430664 11.2598 0.180664 -181.521 0.270508c-1.21973 0 -1.57031 -0.370117 -1.53027 -1.56055c0 -0.0996094 1.25 -44.5098 1.2207 -50.3799
+c-0.0205078 -2.58008 -0.620117 -5.21973 -1.36035 -7.70996c-0.549805 -1.83008 0.379883 0.5 -13.5 -32.2295c-0.730469 -1.7207 -1.04004 -2.20996 -1.96973 0.0800781c-4.19043 10.3398 -8.28027 20.7197 -12.5703 31.0098c-1.45996 3.5 -2.24023 7 -2 10.79
+c0.160156 2.45996 0.799805 16.1201 1.50977 48.0195c0.0400391 1.9502 0.0107422 1.95996 -1.95996 1.95996h-183c2.58008 -1.62988 178.32 -102.569 196 -112.76zM129.36 140.13c0 -2.39941 0.359375 -2.79004 2.75977 -3.03027
+c11.54 -1.16992 21.04 -3.73926 25.6396 7.32031c6.00977 14.46 2.66016 34.4102 -12.4795 38.8398c-2.01074 0.589844 -15.96 2.75977 -15.9404 -1.50977c0.0498047 -8.00977 0.00976562 -11.5801 0.0205078 -41.6201zM235.11 155.18
+c0 -2.12988 1.06934 -38.6797 1.08984 -39.1299c0.339844 -9.93945 -25.5801 -5.76953 -25.2305 2.58984c0.0800781 2.02051 1.37012 37.4199 1.10059 39.4307c-14.1006 -7.44043 -14.4199 -40.21 6.43945 -48.8008c8.43066 -3.46973 17.54 -0.599609 22.3906 7.07031
+c4.90918 7.75977 6.83984 29.4697 -5.43066 38.96c-0.120117 -0.0498047 -0.239258 -0.0898438 -0.359375 -0.120117zM222.83 353.22c-9.83008 0.0302734 -9.73047 -14.75 -0.0703125 -14.8701c9.58984 -0.109375 10.1006 14.8408 0.0703125 14.8701zM142.68 249.35
+c0.0205078 -1.7998 0.410156 -2.39941 2.16992 -2.58008c13.6201 -1.38965 12.5107 10.9902 12.1602 13.3604c-1.68945 11.2197 -14.3799 10.2002 -14.3496 7.81055c0.0498047 -4.5 -0.0302734 -13.6807 0.0195312 -18.5908zM354.99 242.95
+c-2.03027 5.28027 -4 10.3896 -6.10059 15.8398c-2.15918 -5.48047 -4.15918 -10.5703 -6.22949 -15.8398h12.3301z" />
+ <glyph glyph-name="d-and-d-beyond" unicode="&#xf6ca;" horiz-adv-x="640"
+d="M313.8 206.5c-9.89941 0 -16 7 -15.7002 7.09961c-4.2998 5.7002 -3 -0.299805 -2.39941 -1.89941c-10.9004 10.2998 -5.2998 25.3994 -5.10059 26c0.700195 1.89941 0 2.2002 -0.599609 1.89941c-1 -0.299805 -2.09961 -1.89941 -2.09961 -1.89941
+c0.799805 9.09961 9.2998 14.7002 9.2998 14.7002l0.200195 -0.200195c1 -1.5 -0.400391 -3.2002 -0.600586 -9c1.60059 2.2998 7.90039 6.59961 11.4004 7.89941c-1.10059 -1.5 -2.10059 -3.59961 -2.10059 -6.59961c3.7002 4.2002 7.5 2.59961 8 2.40039
+c-12.1992 -11.9004 -7 -26.6006 3.2002 -26.6006c5.7002 0 11.5 6.40039 13.9004 10.7002c2.39941 -2.40039 6.39941 -5.5 7.39941 -6.59961c-3.7998 -7.80078 -11 -17.9004 -24.7998 -17.9004zM366.2 227.6c0 -2.89941 -2.90039 -4.09961 -5.40039 -4.5
+c0.700195 1.5 1.7998 5.10059 -0.200195 9c0.700195 -0.0996094 5.60059 -0.5 5.60059 -4.5zM376.5 222.4c-0.400391 -6.5 -6.90039 -11.6006 -14.5996 -10.6006c2 -1.7002 6.59961 -3 9 -1.89941c-3.90039 -6.90039 -23.1006 -7.5 -23.1006 6.39941
+c-2.89941 -2.89941 -2.09961 -7.39941 0 -9.2998c-2.2002 0.700195 -5.7998 3.09961 -6.39941 7.40039c-1.30078 10.0996 4.39941 6.5 -10.4004 18.0996c-4.7998 3.7002 -3 6.59961 -4 8.5c-1.09961 2.2002 -7 4.09961 -4.5 8.5
+c-0.0996094 -1.59961 1 -2.90039 2.59961 -3.5c1.80078 -0.700195 3.2002 -0.200195 4.80078 -1c1.69922 -1.2002 0.899414 -3.90039 2.19922 -5c1.10059 -0.799805 4.2002 0.299805 6.60059 -1.7998c2.59961 -2 8.2002 -6.7002 10.5996 -8.60059
+c4.40039 -3.59961 8.7998 0.400391 7.40039 4.60059c4.5 -2.60059 5 -9.90039 1.2998 -12.5c10.5996 -2.40039 13 10.0996 5 11.3994c7.2998 0.700195 13.5 -4.2998 13.5 -10.6992zM337.1 240.8c4.30078 6.10059 13.3008 15.2998 23.8008 15.7998
+c-5.90039 0.800781 -15.1006 -3.19922 -19.7002 -9c0.899414 3.90039 5.09961 10.1006 10.2002 13c0 0 -2.5 -3.19922 -1.40039 -3.69922c1.59961 -0.800781 5.7998 5.69922 11.2002 5.89941c0 0 -4 -2 -3.2002 -3.39941c0.599609 -0.900391 3.2998 1.2998 8 1.2998
+c5.7998 0 10.9004 -3.5 13.2998 -6.2002c-4 1.09961 -11.5996 -0.799805 -13.7998 -2.7002c-0.299805 0.200195 -11.7998 9 -22 -15.5c-4.7998 3.7998 -4.40039 3.7002 -6.40039 4.5zM579.6 188.9c37.2002 0 60.4004 -19.6006 60.4004 -48.9004
+c0 -28.2002 -17 -48.9004 -59.0996 -48.9004c-20.7002 0 -41.2002 1.30078 -51.6006 2.10059l7.40039 8.2002v77.1992l-7.40039 8.2002c10.2998 0.799805 29.6006 2.10059 50.2998 2.10059zM564.5 113.3c25.4004 -3.2002 46.7998 1.40039 46.7998 27
+c0 22.5 -16.7002 29.6006 -46.7998 26.2998v-53.2998zM301.6 267c0.100586 -0.299805 -2.7998 2.2998 -3.2998 7.5c-0.200195 2.2998 0 19.7998 20 18.9004c11.2002 -0.600586 16.7002 -8.30078 16.7002 -16.5c0 -4.30078 -2.2998 -10.1006 -5.5 -13.8008
+c-2.2002 2.2002 -5.59961 4.60059 -7.7002 7.80078c3.7998 5.59961 2.2002 14.3994 -4.7002 14.3994c-4.2998 0 -7.7998 -4.5 -6.39941 -9.89941c-0.700195 -2.40039 -1 -5.60059 -0.5 -8c-4.90039 2.59961 -6.5 6 -7.5 9c-1.2998 -2.5 -2.10059 -6 -1.10059 -9.40039z
+M301.2 261c0.299805 1.7002 -3.10059 4.59961 -4.7998 5.2002c4.7998 0.200195 7 -0.600586 7 -0.600586c-1.30078 1.7002 -1.60059 4.5 -1 6.7002c2.5 -6.09961 11.6992 -7.09961 13.8994 -12.2002c-0.299805 2.30078 -2.39941 4.7002 -4.7998 6.10059
+c-1.2998 3.2002 -0.299805 9.39941 1.2998 11c-0.5 -8.7998 12 -13.7998 14.6006 -20.2002c-1.40039 5.5 -7.40039 9 -10.1006 12.2002c-1 2.09961 -0.200195 5.7998 0.799805 7.09961c-0.5 -9.7002 15.8008 -14.2998 14.1006 -23.8994
+c0.899414 -0.400391 2.09961 -1.2002 1.89941 -2.60059c1.30078 0.299805 2.60059 1.7002 2.90039 2.7002c0.700195 -4.5 -1.90039 -9 -4.7998 -10.4004c1.59961 4 -2.7002 5.60059 -6.7002 5.10059c0 0 1.59961 2.2998 1 3.39941
+c-0.799805 1.5 -8 0.800781 -11.2002 -0.299805c1.10059 0.100586 3.60059 -0.200195 4.60059 -0.5c-2.10059 -2.89941 -1 -7.09961 1.2998 -4.2002c0 0 -1.10059 -3.5 -0.299805 -4.2998c0.799805 -0.799805 2.59961 -0.200195 2.59961 -0.200195
+c-1.2002 -2.69922 -5.2998 -4.59961 -8.2002 -4.59961c1.10059 0.400391 2.7002 2.2998 3 3.40039c-0.799805 -0.5 -2.7002 -0.700195 -3.5 -0.5c6.10059 3 0 13.1992 -7 8.19922c1 2.7002 3.7002 5.30078 5.7998 6.10059c-1.2998 0.5 -2.69922 0.799805 -4.2998 1.09961
+c1.7998 1.5 6.2998 2.7998 8.5 2.60059c-3.5 0.799805 -9.89941 -0.300781 -12.7998 -3.7002c0.900391 0 3.2998 -0.5 4.2998 -0.799805c-4 -0.700195 -9.39941 -4.40039 -11 -6.2002c0.299805 2.2002 1 4.2002 0.5 5.59961c-0.799805 2 -3 2.7998 -7.7998 1.7998
+c3.2002 3.2002 9.7002 5.10059 10.2002 6.90039zM327.1 253.6c0 0 -0.899414 3 -4.19922 4.30078c0.699219 -2.2002 1.5 -4.30078 4.19922 -4.30078zM366 249.9l0.700195 0.699219c0.5 0.400391 1.59961 0.900391 2.7002 1.40039v-18.4004
+c-1.7002 0.800781 -3.5 1.10059 -5.60059 1.10059c-2.39941 0 -5 -0.5 -5 -0.5c-0.5 0.5 -3.59961 2.89941 -5.09961 3.2002c4.09961 -4.30078 0.5 -9.80078 -3 -7.2002v15.7002c0.700195 0.799805 1.2998 1.7998 2.09961 2.59961
+c1.7002 2.09961 4.60059 3.40039 7.5 3.40039c1.7998 0 3.60059 -0.400391 4.7002 -1.40039zM79.9004 142.1c22 -6.39941 19.3994 -20.0996 19.3994 -25.1992c0 -7.80078 -3.2002 -13.6006 -9.89941 -17.6006c-12.6006 -7.39941 -24.7002 -5.89941 -86.4004 -5.89941
+l8.40039 8.59961v32.2998l-11.4004 14.6006h11.2998v29.5l-8.2998 8.59961h56.0996c12.9004 0 37 -4.40039 37 -25c0 -1.90039 1 -15.2998 -16.1992 -19.9004zM38.5996 169.6v-20.8994c10.6006 0 29.6006 -3.2998 29.6006 8.7998v3
+c0 9.90039 -9.60059 9.09961 -29.6006 9.09961zM38.5996 110.4c20.4004 0 32.9004 -1.90039 32.9004 9.2998h-0.200195v4.5c0 11.0996 -20.5 8.7998 -32.7002 8.7998v-22.5996zM139.8 129.7v-15.4004l60.1006 0.200195l-14.1006 -21.2002h-81.2002l7.40039 8.2002v77.0996
+l-7.40039 8.2002l73.5 0.200195v-0.200195l14.1006 -21h-52.4004v-14.8994h37.2002l-14.0996 -21.2002v-0.200195zM354.5 189.8c73.7998 0 77.5996 -99.2998 -0.299805 -99.2998c-77.2002 0 -73.6006 99.2998 0.299805 99.2998zM354.2 112.3
+c39 0 37 55.2002 0.200195 55.2998c-37.1006 0 -37.6006 -55.2998 -0.200195 -55.2998zM262.9 120.6l0.199219 -19l7.2002 -8.19922h-42.5996l7.7002 8.19922l-0.200195 19.4004l-44.1006 65.7998h44.9004l-6.40039 -7.2002l21 -37.1992h0.300781l20.5 37.1992
+l-6.10059 7.2002h41.7002zM234.5 271.9c-9.09961 6.69922 -9.5 14.0996 -9.59961 14.8994c7.2998 -4.2998 9 -4 39.8994 -4c-5.7998 0 24 3.10059 32.2002 -22.8994c-0.400391 0 -8.40039 -4.80078 -10.4004 -7.90039c5.30078 1.90039 8.90039 1.09961 9 1.09961
+c-8 -5.09961 -9.59961 -14.7998 -9.59961 -20.5c0.900391 2.10059 2.7002 3.7002 2.7002 3.5c-0.600586 -2.5 -1.40039 -7 -0.799805 -12c-8.60059 -7.09961 -16 -8.59961 -26 -8.59961h-35.1006c0.400391 0.0996094 7.7998 4.5 7.90039 4.59961
+c1.89941 1.10059 2.7002 2.2002 2.7002 6.40039v38.7998c0 4.2002 -1.30078 5.2998 -2.90039 6.60059zM256 266.4v-34.6006c4.7002 0 23.0996 -3.39941 23.0996 17.2998c0 20.6006 -18.5 17.3008 -23.0996 17.3008zM484.9 186.8l39.1992 -0.0996094l-7.39941 -8.2998
+v-85.2002h-21.2998c-4 12.7002 -44.8008 45 -48.5 55.5996h-0.300781v-47.3994l7.40039 -8.2002h-39l7.2002 8.2998v76.9004l-7.40039 8.5h31.6006c2.89941 -9.40039 39.7998 -36.5 45.1992 -50.9004h0.300781v42.5zM378.2 282.9
+c32.7002 -1.60059 33.7998 -29.8008 33.7998 -33.6006c0 -6.7002 -3.2998 -34 -36.7002 -34h-0.299805c3.59961 4.2998 3.5 11.9004 -2.2002 16.2998c1.2002 0 19.7002 -3.19922 19.7002 17.3008c0 20.6992 -18.4004 17.2998 -23.0996 17.2998v-4.2998
+c-5.40039 0.799805 -7.40039 -0.300781 -7.5 -0.300781c2.09961 1.80078 4.5 2.60059 6.09961 2.90039c-7.09961 1.59961 -13.5996 -2.40039 -14.5996 -3.5c0.799805 1.7998 2.39941 3.40039 3.5 4.5c-2.30078 -0.799805 -4.30078 -1.90039 -6.10059 -3
+c0 5.2002 0.200195 7.5 -2.89941 9.5c-9.10059 6.59961 -9.5 14.2002 -9.60059 14.9004c7.10059 -4.2002 7.7002 -4 39.9004 -4z" />
+ <glyph glyph-name="dev" unicode="&#xf6cc;"
+d="M120.12 239.71c3.87012 -2.90039 5.82031 -7.25977 5.83008 -13.0596v-69.6504c0 -5.80957 -1.94043 -10.1602 -5.82031 -13.0596c-3.87988 -2.90039 -7.76953 -4.35059 -11.6494 -4.35059h-17.4502v104.47h17.4395c3.87988 0 7.77051 -1.44922 11.6504 -4.34961z
+M404.1 416c24.2002 0 43.8408 -19.5898 43.9004 -43.7998v-360.4c-0.0595703 -24.21 -19.6904 -43.7998 -43.9004 -43.7998h-360.199c-24.2002 0 -43.8408 19.5898 -43.9004 43.7998v360.4c0.0595703 24.21 19.7002 43.7998 43.9004 43.7998h360.199zM154.2 156.81
+l-0.00976562 70.9307c-0.0107422 18.8193 -11.9307 47.2793 -47.3701 47.2793h-47.3799v-165.46h46.3994c36.75 -0.0595703 48.3604 28.4404 48.3604 47.25zM254.88 245.47l0.00976562 29.5205h-63.1895c-11.1504 -0.280273 -19.9805 -9.54004 -19.71 -20.6904v-125.109
+c0.279297 -11.1602 9.55957 -19.9805 20.7197 -19.6904h62.1797v29.5703h-53.29v38.4102h32.5703v29.5693h-32.5703v38.4199h53.2803zM358.52 130.18l38.4609 144.801h-32.5801l-29.5703 -113.721l-29.71 113.721h-32.5703l38.5303 -144.801
+c10.5898 -24.6299 34.2402 -30.75 47.4395 0z" />
+ <glyph glyph-name="fantasy-flight-games" unicode="&#xf6dc;" horiz-adv-x="512"
+d="M256 415.14l223.14 -223.14l-223.14 -223.14l-223.14 223.14zM88.3398 192.17c11.3447 -11.2461 29.7705 -29.4893 41.1299 -40.7197c20.1602 19.8799 40.46 39.8994 61.8506 60.9902c12.0596 -12.5801 24.5195 -25.5703 36.54 -38.1104
+c12.0293 11.6895 23.7393 23.0596 35.6895 34.6602c-6.99023 7.4502 -32.1494 32.8301 -35.0898 35.7793c-1.91016 1.9209 -2.29004 3.2207 -0.120117 5.35059c15.5801 15.2295 39.21 17.79 56.9805 5.09961c7.98926 -5.70996 14.2998 -11.6396 48.5098 -43.9502
+c10.8203 11.1504 22.2295 22.8506 33.5 34.6904c0.490234 0.520508 0.0996094 2.63965 -0.580078 3.37988c-0.0898438 0.100586 -37.5195 40.6006 -62.1504 59c-33.5801 25.0801 -78.3193 23.0605 -119.77 -18.6895c-84.5703 -85.1807 -94.5303 -95.4805 -96.4902 -97.4805z
+M323.16 90.5703c18.8203 18.79 80.3301 80.6396 100.5 101.5c-13.7305 13.4492 -27.1797 26.6299 -40.8604 40.0293c-20.0098 -19.7393 -40.2402 -39.6895 -61.25 -60.4199c-12.3301 12.8301 -24.8799 25.8799 -37.25 38.75
+c-1.25977 -0.689453 -1.64941 -0.80957 -1.91016 -1.06934c-10.7295 -10.7705 -21.4199 -21.5801 -32.21 -32.29c-2.22949 -2.20996 -0.519531 -3.35059 0.800781 -4.69043c10.5791 -10.7402 21.1797 -21.4502 31.7695 -32.1797
+c3.5498 -3.60059 3.54004 -3.85059 -0.139648 -7.24023c-16.8008 -15.4697 -40.8408 -16.54 -59.3203 -1.7998c-7.62012 6.08008 -11.6602 10.1797 -44.6797 42.0898c-11.5801 -11.8896 -23.3203 -23.9404 -35.3701 -36.3096
+c33.5498 -34.7607 50.8496 -53.3408 72.9297 -66.8408c28.9004 -17.6699 71.5 -14.96 106.99 20.4707zM256 448l256 -256l-256 -256l-256 256zM16 192l240 -240l240 240l-240 240z" />
+ <glyph glyph-name="penny-arcade" unicode="&#xf704;" horiz-adv-x="639"
+d="M421.91 283.73c7.33984 -16.2705 2.29004 -5.07031 24.6299 -54.6807l-39.7305 -10.6094c13.7002 59.2295 10.6104 45.8398 15.1006 65.29zM215.82 232.62c32.5 8.99023 41.9492 -37.6396 -0.350586 -47.4297c-14.2002 -3.77051 -6.64941 -1.75 -34.8193 -9.34082
+l-4.45996 46.1904c28.3193 7.5498 19.4395 5.17969 39.6299 10.5801zM541.98 258.81c75.7998 -37.9092 98 -76.3193 97.9893 -104.47c2.10059 -78.8496 -183.3 -130.33 -399.89 -84.8301c0.540039 -13 -8.00977 -24.6494 -20.5801 -28.0195
+c-125.54 -33.54 -117.35 -31.75 -122.53 -31.7598c-14.3701 -0.0107422 -26.4102 10.8896 -27.7998 25.1992l-4.2998 44.4805c-0.0683594 0.724609 -0.125 1.90332 -0.125 2.63184c0 10.5811 8.01758 22.2461 17.8945 26.0381l-1.73926 17.8799
+c-50.2305 28.2598 -80.9004 61.8701 -80.9004 95.3701c0 72.9199 144.26 113.4 309.41 98.3701c2.68945 7.54395 11.1514 15.3438 18.8896 17.4102c96.8701 25.9092 65.3203 17.4795 135.59 36.2295c13.1602 3.50977 26.9307 -2.95996 32.6201 -15.3301zM255.14 149.7
+c17.5 4.0498 40.2363 19.1562 50.75 33.7197c21.6006 32.5898 14.1104 105.561 -42.5498 104.43c-16.04 -0.229492 -8.07031 0.890625 -186.22 -46.6494l4.34961 -44.5l20.1201 5.38965l11.1104 -114.64l-20.0205 -5.35059l4.30078 -44.5195l115.31 30.7803
+l-4.50977 44.5098l-20.5303 -5.50977l-2.45996 23.5498l48.4404 12.9102zM454.32 133.08l108.55 28.96l-4.2998 44.4795l-20.79 -5.55957l-66.6699 145.47c-70.5801 -18.8301 -42.2305 -11.25 -135.591 -36.2393l4.2002 -44.4805l17.1504 4.55957l-33.0801 -126.47
+l-20.9902 -5.58984l4.45996 -44.4297l112.851 30.0693l-4.05078 39.54l-19.1992 -5.12012l4.09961 17.54l57.7598 15.4209l6.61035 -14.6807l-14.9004 -3.97949z" />
+ <glyph glyph-name="wizards-of-the-coast" unicode="&#xf730;" horiz-adv-x="640"
+d="M219.19 102.31c7.44922 5.80078 16.2598 0.680664 21.7295 -7.0791c7.08984 -10.1201 6.24023 -18.1602 -0.259766 -23.04c-7.62012 -6.24023 -17.0898 0.129883 -21.7305 6.5498c-10.8096 15.1299 -1.63965 22.1895 0.260742 23.5693zM555.94 26.3701
+c1.30957 4.4502 3.92969 10.21 3.93945 20.1699c0 34.04 -41.6299 64.4102 -100.03 68.0801c-53.1592 3.39941 -120.46 -15.4502 -184.35 -73.8506l-0.790039 0.260742c1.58008 10.4697 -0.780273 16.2295 -3.40039 21.21l0.260742 1.56934
+c64.4199 51.3203 134.069 66.5107 188.8 60.4902c61.0098 -6.54004 104.479 -39.54 101.34 -78.0303c-0.790039 -9.68945 -2.88965 -15.71 -4.97949 -19.8994c-1.34082 -1.66992 -1.13086 -1.7002 -0.790039 0zM392.28 207.58
+c-0.530273 7.07031 3.13965 11.7803 6.7998 15.46c3.66992 3.91992 14.9297 10.4697 14.9297 10.4697s-1.2998 -26.4502 -2.08984 -29.8496c-1.04004 -3.92969 -4.96973 -6.81055 -10.4697 -6.5498c-4.98047 0.259766 -8.37988 3.39941 -9.16992 10.4697zM342.26 358.68
+c147.17 0 275.48 -86.6797 291.21 -196.939c0 0 -3.66992 -1.31055 -9.68945 -4.4502c0 -0.259766 1.0498 -10.7402 0.259766 -16.5c-0.259766 -1.83008 -1.0498 -1.0498 -1.0498 0c-0.270508 5.24023 -1.57031 11.5303 -2.36035 14.9297
+c-4.70996 -2.60938 -10.21 -6.54004 -15.9697 -11.7793c0 0 4.70996 -10.21 4.70996 -25.9209c0 -21.21 -8.37988 -32.9893 -16.5 -37.9697l-0.259766 0.520508c9.16992 9.16992 12.5693 21.4795 12.5693 31.9492c0 13.8701 -6.80957 33.25 -14.3994 41.3701
+c0 0 4.4502 -8.12012 6.80957 -17.8096c0 0 -21.21 -21.4697 -26.9697 -62.3203c0 0 -3.66992 9.16992 -10.7402 16.2402c0 0 12.0498 -15.4502 12.0498 -38.2305c0 -19.3799 -12.8398 -37.4395 -27.5 -48.1797c-0.989258 0 -0.790039 -0.169922 -0.790039 0.790039
+c15.71 12.8301 22.2607 28.0205 22.2607 46.3506c0 38.2295 -49.2305 80.3896 -130.15 80.3896c-96.1104 0 -181.74 -58.1299 -236.99 -128.05l-1.0498 0.259766c-40.3203 120.979 -135.64 185.66 -196.13 202.16c-2.09961 0.519531 -1.83984 0.790039 -0.790039 1.30957
+c12.3096 14.4004 136.96 151.88 341.47 151.88zM243.02 69.0596c16.8408 14.5908 4.99023 30.7705 4.71094 31.1602c-4.08008 5.99023 -16.3105 16.8506 -31.1602 5.5c-10.9502 -8.37988 -11.6406 -22.8896 -4.19043 -32.4697
+c6.44043 -8.26953 19.5801 -13.1797 30.6396 -4.19043zM245.11 205.49l1.83008 -8.11035l-3.6709 4.4502l-14.1396 -26.71l24.6201 -28.7998l12.5703 6.01953l-11.7803 70.96zM263.7 87.9102c3.41016 2.35938 7.33984 4.97949 9.67969 6.57031l-0.259766 0.259766
+c-1.56055 -0.780273 -3.11035 -1.0498 -12.5703 15.9697v0.259766c6.87012 5.16016 8.45996 4.89062 11.5205 5.5l0.259766 0.260742c-1.31055 3.66992 -1.31055 3.66992 -1.83008 5.5h-0.259766c-3.95996 -3.31055 -1.4707 -1.58008 -11.5205 -7.86035h-0.259766
+c-1.83008 3.13965 -4.19043 7.33008 -5.75977 9.68945v1.31055c4.4502 3.91992 10.2197 6.7998 12.3096 7.58984c2.87988 1.0498 4.19043 0.520508 5.24023 0.259766l0.259766 0.520508c-1.30957 1.83008 -2.08984 2.87988 -3.39941 4.70996l-0.520508 0.259766
+c-9.9502 -5.5 -17.54 -9.9502 -25.3994 -15.71l0.259766 -0.519531c1.30957 0.259766 3.13965 -0.260742 4.4502 -2.62012c15.04 -25.0801 19.5898 -27.5908 17.54 -31.6904zM318.96 120.38v0.25c-1.99023 0 -2.34961 -1.37012 -14.6602 30.6396v0.260742
+c4.95996 1.85938 8.78027 4.37988 12.3105 2.62012l0.259766 0.519531l-3.13965 4.98047l-0.520508 0.259766c-2.22949 -0.929688 -20.4697 -8.00977 -27.7598 -12.5703l-0.259766 -0.519531l1.0498 -5.76074h0.519531c1.0498 3.68066 9.7998 7.33008 9.9502 7.33008
+l0.259766 -0.259766c12.9404 -29.7598 13.0703 -29.8799 11.7803 -32.4697l0.259766 -0.259766c3.93066 2.09961 6.81055 3.40918 9.9502 4.97949zM363.73 136.88c-0.780273 0.520508 -2.09082 1.31055 -2.63086 3.92969c-1.56934 6.02051 -4.70996 20.1709 -6.2793 26.4502
+c-0.530273 1.57031 -0.530273 3.14062 0.519531 4.4502l-0.259766 0.259766c-3.41016 -0.529297 -6.29004 -1.30957 -10.7402 -2.35938v-0.260742c1.57031 -0.529297 2.10059 -2.09961 2.62012 -3.92969l2.62012 -9.42969l-0.259766 -0.259766
+c-3.40039 -1.05078 -8.90039 -2.62012 -12.8301 -3.93066h-0.259766c-0.780273 2.10059 -1.83008 5.75977 -3.14062 9.69043l0.259766 4.70996l-0.259766 0.259766c-4.71973 -1.30957 -7.59961 -2.34961 -10.7402 -3.40039v-0.519531
+c1.05078 0 2.10059 -1.30957 2.62012 -3.13965c1.0498 -3.40039 8.12012 -24.0908 9.16992 -27.2305c0.790039 -2.09961 0.790039 -3.66992 -0.259766 -4.97949l0.259766 -0.260742c3.14062 1.31055 6.54004 2.87988 10.21 3.93066v0.519531
+c-1.0498 0.259766 -2.08984 0.780273 -2.87988 3.13965c-1.0498 3.93066 -3.39941 11.2607 -4.18945 13.8809l0.259766 0.259766c3.92969 1.30957 9.42969 3.13965 12.8301 3.92969l0.259766 -0.259766c0.530273 -2.09961 2.62012 -10.2197 3.66992 -13.6201
+l-0.519531 -4.4502l0.259766 -0.259766c4.4502 1.57031 5.5 1.83008 9.69043 2.87988zM395.94 143.69c0.529297 1.8291 1.0498 3.65918 1.5791 6.04004h-0.259766c-2.0293 -4.06055 -15.0898 -5.09082 -16.2402 -4.71094l-0.259766 0.260742
+c-0.519531 3.13965 -1.83008 10.4795 -2.08984 12.5693l0.259766 0.260742c8.06055 0.899414 5.40039 1.0293 10.21 0h0.260742c0 3.40918 0.259766 3.66992 0.259766 5.23926h-0.259766c-5.98047 -2.2998 -1.2207 -0.679688 -10.7402 -2.35938l-0.259766 0.259766
+c-0.520508 3.40039 -1.31055 8.37988 -1.57031 9.9502l0.259766 0.259766c12.9004 2.41016 15.1006 0.349609 16.2402 -0.790039l0.259766 0.259766c-0.780273 2.36035 -1.0498 3.14062 -1.57031 5.5l-0.259766 0.260742
+c-4.71973 -0.260742 -15.71 -1.05078 -24.8799 -2.62012l-0.790039 -0.520508c1.83008 -0.790039 2.36035 -1.83984 2.62012 -3.66992c1.58008 -7.59961 3.41016 -18.3301 4.98047 -26.1895l-0.790039 -4.19043l0.259766 -0.259766
+c8.37988 1.83008 17.8096 3.66992 22.5195 4.18945zM406.68 188.2c3.14062 1.56934 7.33008 5.5 7.33008 5.50977c1.95996 -4.58008 0.970703 -2.70996 4.19043 -7.86035c10.1494 -0.459961 8.60938 0.0205078 20.4297 -1.0498l0.790039 4.70996
+s-4.18945 0 -5.75977 1.83008c-1.0498 1.31055 -1.31055 3.14062 -1.57031 5.5c0 2.36035 0.270508 16.5 0.790039 20.6904c0.259766 4.18945 2.08984 20.4199 2.08984 23.04c0.260742 2.62012 1.0498 8.91016 0.260742 12.0498
+c-4.82031 19.2803 -24.4307 17.8096 -50.0205 16.2402l-5.24023 -16.2402l2.62012 -2.87988c16.5498 16.5498 37.6201 4.56934 29.5898 -5.75977c-5.18945 -6.9209 -19.7393 -8.90039 -28.54 -17.0205c-6.47949 -6.49023 -12.2393 -20.9004 -5.5 -31.6904
+c6.12988 -11.0391 17.29 -9.96973 17.54 -9.94922c2.87988 0 6.55078 0.519531 11 2.87988zM443.86 166.99c0 1.83984 0.269531 4.18945 0.269531 5.25l-0.259766 0.519531c-14.3604 8.98047 -26.8604 0.919922 -28.7998 -9.9502
+c-2.83984 -16.0898 15.3594 -25.46 25.6602 -18.5898l0.519531 0.520508c0 0.259766 1.30957 4.4502 1.83008 6.2793l-0.259766 0.260742c-6.39062 -9.58008 -23.3203 -6.87012 -20.6904 10.21c1.91016 12.6602 15.3799 16.0801 21.7305 5.5zM449.63 254.72
+c0 0 4.96973 -0.790039 4.99023 -3.66016c0 -2.08984 -4.98047 -55.25 -4.98047 -55.25c-0.109375 -1.48926 -0.339844 -6.80957 -7.58984 -6.80957l-0.790039 -4.70996c18.3906 -2.83008 19.3701 -3.04004 36.9199 -7.33008l0.520508 4.70996
+c-13.0498 3.91992 -9.74023 7.37012 -4.4502 46.0898c1.09961 0.870117 8.62012 7.14062 20.6904 0.790039l11.2598 11.2598s-9.69043 8.90039 -14.9307 7.33008c-5.23926 -1.30957 -15.4492 -10.7393 -15.4492 -10.7393l1.56934 17.54
+c-8.10938 4.0498 -27.0693 7.3291 -27.7598 7.3291v-6.5498zM460.62 140.28c9.42969 -2.35059 16.2402 2.62012 18.8496 11.5195c2.08984 7.60059 -1.56934 16.7598 -10.7393 19.3799c-6.54004 2.10059 -15.7109 -0.779297 -18.8506 -10.21
+c-3.39941 -9.68945 2.62012 -18.5996 10.7402 -20.6895zM502.78 130.59c-0.780273 1.31055 -1.04004 2.10059 -0.799805 3.91016c1.22949 27.0098 1.5293 24.6602 1.0498 25.1396c-2.08984 0.790039 -5.5 2.09082 -7.58984 2.87988l-0.520508 -0.259766v-2.08984
+c-3.92969 -6.01953 -10.4795 -15.4502 -13.8799 -20.1602l-2.62012 -1.83008v-0.259766c2.08984 -0.259766 4.70996 -1.30957 6.02051 -1.57031v0.260742l0.790039 3.39941c0.789062 1.0498 2.35938 3.66992 3.66992 5.5c0.40918 0 2.25 -0.549805 7.06934 -2.35938
+c0.330078 -0.320312 0.330078 0.649414 -0.259766 -7.59082l-1.57031 -1.8291v-0.260742c1.57031 -0.519531 6.28027 -2.35938 8.64062 -2.87988zM498.07 220.41c-13.2207 -21.1504 -9.39062 -51.6006 9.66992 -52.9004c5.75977 -0.259766 9.42969 3.93066 9.68945 3.66992
+l-2.08984 -6.80957c8.91016 -4.21973 11.4404 -5.29004 17.8105 -8.63965l1.83008 4.44922c-6.14062 3.51074 -1.29004 11.25 24.6191 84.3203c-6.13965 6.45996 -10.2998 10.0596 -22.5195 20.4297l-1.83008 -3.66992c1.62988 -1.35938 6.79004 -5.00977 4.4502 -11.2598
+l-7.58984 -26.1904c-3.28027 12.79 -22.79 14.8701 -34.04 -3.39941zM527.4 141.07l2.35938 3.39941v0.520508c-3.41016 6.83008 -11.9395 7.41992 -14.6602 2.35938c-1.83984 -3.40918 0.260742 -7.06934 1.83008 -9.68945
+c1.57031 -2.87988 3.14062 -6.29004 2.08984 -8.37988c-2.31934 -4.62988 -8.94922 -0.680664 -8.37988 4.97949l-0.790039 -0.259766c-2.09961 -4.7998 -1.83008 -4.00977 -1.83008 -4.70996c3.05078 -6.09961 12.8105 -7.12988 15.4502 -0.790039
+c1.57031 3.15039 0.520508 6.80957 -1.0498 9.42969c-1.83008 3.40039 -4.18945 6.29004 -2.87988 8.37988c1.51953 2.65039 7.86035 0.470703 7.86035 -5.23926zM548.61 127.71l1.30957 3.91016l-0.259766 0.259766c-2.36035 2.08984 -8.64062 6.54004 -12.3105 8.90039
+h-0.259766l-3.13965 -3.40039v-0.259766c4.7998 -0.320312 3.37988 0.149414 6.01953 -1.83008v-0.259766c-2.62012 -4.9707 -6.0293 -11.2607 -9.16992 -17.0205l-2.08984 -1.30957l-0.259766 -0.259766l5.75977 -4.4502l0.259766 0.259766
+c-0.259766 0.530273 -0.519531 1.57031 0.790039 3.92969c2.87988 5.77051 6.28027 12.0508 8.64062 16.2402h0.259766c3.54004 -2.57031 2.49023 -1.43945 4.4502 -4.70996zM575.84 171.97l7.85059 10.46s-9.4209 18.8604 -23.04 16.5
+c-20.8408 -4.0293 -3.15039 -34.21 -2.09082 -38.2295c4.33008 -15.1299 -16.3193 -12.5605 -13.3496 5.24023l-2.87988 2.08984l-4.98047 -14.4004s11.7803 -11.2598 20.1602 -10.4697c8.12012 0.790039 13.8799 6.29004 13.8799 16.5
+c0 8.37988 -7.85938 22.7803 -7.85938 27.7598c0 6.86035 12.2695 4.75977 11.5195 -4.97949c-0.259766 -2.61035 -1.2998 -5.23047 -2.08984 -7.59082zM611.46 182.18c0.780273 -2.35938 1.57031 -1.83008 0.790039 0.270508
+c-32.4697 98.9795 -132.76 138.78 -199.8 139.83c-50.54 0.779297 -89.5605 -11.79 -131.98 -35.8799l20.6904 61.0098l-33.7803 -65.7305l-8.89941 20.9502c3.13965 1.04004 6.2793 2.08984 6.2793 2.08984l-2.62012 8.64062s-3.13965 -0.780273 -7.33008 -2.09082
+l-12.0498 28.2803l13.6201 -61.0098c-5.12012 2.55957 -19.0996 6.83008 -6.5498 19.3799l-2.62012 11c-6.97949 -2.21973 -13.2295 -3.62012 -32.21 -9.68945l-23.0801 11.5l59.1797 -42.6807l-4.70996 -2.08984l-17.2793 13.8799
+c2.23926 -5.13965 3.2998 -12.1699 4.70996 -19.6396l-28.54 -13.0898l-30.1104 36.1396l-17.2803 -9.16992l13.6201 -42.4199l-11.2598 -4.98047l94.2695 29.3301l-3.66992 -10.4697l-0.519531 3.13965l-13.0898 -3.39941l4.97949 -24.6201l-4.4502 -12.3105
+l-25.6592 30.6406l-39.8008 -10.21l18.8506 -58.9199c-60.1299 62.3994 -67.7002 66.3994 -61.7998 75.6797c2.09961 2.87988 7.85938 7.07031 7.85938 7.07031l-4.18945 7.06934c-26.7803 -18.3496 -27.8398 -19.1494 -58.4004 -42.6797l4.98047 -6.01953
+s8.12012 5.75977 13.6201 5.5c7.81934 -0.350586 1.76953 2.93945 113.659 -98.7305l11.7803 8.37988l-27.7598 93.4805l35.8799 -42.1602l-4.70996 -13.8799l41.9004 88.5098c34.6699 -80.5098 29.1494 -66.9502 32.9893 -78.8203l-33.5195 67.2998l-2.36035 -4.44922
+c1.2998 -1.30078 -0.919922 3.05957 22.7803 -59.4404c3.22949 -8.88965 -1.10059 -9.88965 -5.5 -12.8301l2.36035 -4.70996c15.3594 6.79004 22.9395 9.54004 39.0195 14.4004l-1.0498 4.97949c-8.89062 -1.33008 -10.1006 0.169922 -12.0498 4.4502
+c-1.05078 2.09961 -14.1504 40.0703 -20.4307 58.6602l-10.21 4.97949l-2.35938 8.12012l61.54 -36.6602l-13.0908 -43.21c12.1904 3.26074 27.0303 6.74023 49.4902 9.9502l-0.259766 26.71l-4.98047 -1.0498c-0.669922 -13.7998 -6.0293 -22.0801 -19.6396 -22.7803
+l22.2598 80.3906c-27.6201 -0.450195 -59.2695 -7.19043 -66.7695 -8.90039l3.92969 -16.5l-25.1396 19.6396l91.3896 20.6904l-85.6299 -9.16992c38.4902 22.5195 79.3398 39.0195 132.76 37.9697c131.46 -2.08984 180.95 -99.2402 191.95 -129.62zM203.48 295.57
+l2.35938 -8.64062c7.82031 2.61035 10.8604 2.36035 11.2598 2.36035l-9.42969 7.58984c-2.36035 -0.790039 -4.18945 -1.30957 -4.18945 -1.30957zM347.24 257.07l-11.5303 -37.71l-21.7295 17.0195c6.7998 25.5 31.6895 21.29 33.2598 20.6904zM318.43 380.93
+c224.94 0 321.83 -143.76 321.57 -227.55c0 -11 -0.269531 -17.5498 -0.790039 -19.6396c-0.259766 -2.10059 -1.0498 -0.790039 -1.0498 0.519531v9.9502c0 106.58 -121.51 223.37 -301.67 223.37c-61.2705 0 -103.69 -12.0498 -110.24 -13.8799l-1.57031 0.259766
+c-6.80957 7.58984 -12.8301 9.69043 -21.21 11.7803v0.790039c8.91016 2.34961 56.5605 14.3994 114.96 14.3994zM529.49 211.25c-8.61035 -34.4502 -13.6504 -35.3496 -18.3301 -35.3604c-7.33008 0 -6.81055 9.43066 -6.02051 14.9307
+c0.879883 9.72949 7.40039 34.6494 17.0205 33.5195c7.33008 -0.780273 8.63965 -7.33008 7.33008 -13.0898zM467.96 168.3c3.40039 -0.780273 7.84961 -4.4502 5.23047 -14.3896c-2.88086 -11.2598 -8.11035 -11.79 -11.7803 -10.7402
+c-5.5 1.31055 -7.85059 7.84961 -6.02051 14.6602c3.14062 11.2598 9.9502 11.2598 12.5703 10.4697zM491 147.35v0.270508c1.0498 1.83008 5.5 8.63965 6.5498 9.9502c-0.269531 -3.66992 -0.790039 -10.2207 -0.790039 -12.0508
+c-2.62012 0.780273 -3.92969 1.31055 -5.75977 1.83008z" />
+ <glyph glyph-name="think-peaks" unicode="&#xf731;" horiz-adv-x="529"
+d="M442.4 38.5996l-206.2 353.801l-204.2 -352.101l-32 0.299805l236.2 407.4l206.2 -353.9l55.0996 95l32 -0.299805zM87.0996 82.7002l149.601 257.899l235.8 -404.6l-32.5 0.0996094l-203.4 349.101l-117.399 -202.5h-32.1006z" />
+ <glyph glyph-name="reacteurope" unicode="&#xf75d;" horiz-adv-x="570"
+d="M247.6 236.2l2 6.7998l-5.69922 4.2998l7.19922 0.100586l2.30078 6.7998l2.2998 -6.7998l7.09961 -0.100586l-5.7002 -4.2998l2.10059 -6.7998l-5.7998 4.09961zM311.3 236.2l1.90039 6.7998l-5.7002 4.2998l7.2002 0.100586l2.2998 6.7998l2.2998 -6.7998
+l7.2002 -0.100586l-5.7002 -4.2998l2.10059 -6.7998l-5.80078 4.09961zM220 185.7c4.90039 0 3.7998 -3.90039 3.7998 -13.7998c0 -10.3008 -6.7002 -14.1006 -16.7998 -14.1006h-0.200195c-10.0996 0 -16.7998 3.7002 -16.7998 14.1006v40.0996
+c0 9.90039 6.7002 14.0996 16.7998 14.0996h0.200195c10.0996 0 16.7998 -4.19922 16.7998 -14.0996c0 -8.40039 0.900391 -12.2002 -3.7998 -12.2998h-3.40039c-4.5 0 -3.7998 3.2998 -3.7998 10.5c0 4.7002 -2.2998 6.09961 -5.7998 6.09961
+s-5.7998 -1.39941 -5.7998 -6.09961v-36.6006c0 -4.69922 2.2998 -6.09961 5.7998 -6.09961s5.7998 1.40039 5.7998 6.09961c0 8.10059 -1 12.1006 3.7998 12.1006h3.40039zM139.3 168.3c2.5 0 3.7998 -1.2998 3.7998 -3.7998v-2.09961
+c0 -2.5 -1.2998 -3.80078 -3.7998 -3.80078h-21.8994c-2.5 0 -3.80078 1.30078 -3.80078 3.80078v59.0996c0 2.5 1.30078 3.90039 3.7002 3.7998h21.7002c2.5 0 3.7998 -1.2998 3.7998 -3.7998v-2.09961c0 -2.5 -1.2998 -3.80078 -3.7998 -3.80078h-14.4004v-18.2998
+h11.4004c2.5 0 3.7998 -1.2998 3.7998 -3.7998v-2.09961c0 -2.5 -1.2998 -3.80078 -3.7998 -3.80078h-11.4004v-19.2998h14.7002zM97.2998 186.8l8.10059 -23.8994c0.799805 -2.60059 -0.400391 -4.40039 -3.2002 -4.40039h-3.2998c-2.2002 0 -3.60059 1 -4.30078 3.2002
+l-7.39941 23.5h-5.60059v-22.9004c0 -2.5 -1.2998 -3.7998 -3.7998 -3.7998h-3.39941c-2.5 0 -3.80078 1.2998 -3.80078 3.7998v59.1006c0 2.5 1.30078 3.7998 3.80078 3.7998h13.3994c10.1006 0 16.7998 -4 16.7998 -14.1006v-11.8994
+c0 -6.40039 -2.69922 -10.4004 -7.2998 -12.4004zM93.5 200.8v8.7002c0 4.7998 -2.5 6.09961 -6.09961 6.09961h-5.80078v-20.8994h5.80078c3.59961 0 6.09961 1.2998 6.09961 6.09961zM173 221.9l11.2002 -59.2002c0.5 -2.7002 -0.799805 -4.10059 -3.40039 -4.10059h-3.5
+c-2.2998 0 -3.7002 1.10059 -4.09961 3.5l-1.7998 11.3008h-12.2002l-1.7998 -11.3008c-0.400391 -2.39941 -1.80078 -3.5 -4.10059 -3.5h-3c-2.5 0 -3.89941 1.40039 -3.39941 4.10059l11 59.2002c0.399414 2.2998 1.89941 3.5 4 3.39941h6.89941
+c2.2998 0 3.7998 -1.09961 4.2002 -3.39941zM160.7 182.6h9.39941l-4.69922 29.7002zM250 162.4c0 -2.5 -1.2998 -3.80078 -3.7998 -3.80078h-3.40039c-2.5 0 -3.7998 1.30078 -3.7998 3.80078v53.1992h-7.2998c-2.5 0 -3.7998 1.30078 -3.7998 3.80078v2.09961
+c0 2.5 1.2998 3.7998 3.7998 3.7998h25.7998c2.5 0 3.7998 -1.2998 3.7998 -3.7998v-2.09961c0 -2.5 -1.2998 -3.80078 -3.7998 -3.80078h-7.5v-53.1992zM498 163.2c1.2002 0 2 -0.700195 2 -2v-0.799805c0 -1.2002 -0.799805 -1.90039 -2 -1.90039h-22.5
+c-1.2002 0 -2 0.700195 -2 1.90039v63c0 1.2998 0.799805 2 2 1.7998h22.2002c1.2002 0 2 -0.600586 2 -1.90039v-0.799805c0 -1.2002 -0.799805 -2 -2 -2h-19.1006v-25.7998h16.1006c1.2002 0 2 -0.799805 2 -2v-0.799805c0 -1.2002 -0.799805 -2 -2 -2h-16.1006v-26.7002
+h19.4004zM404.9 226.1c10.0996 0 15.2998 -4.69922 15.2998 -14.0996v-40.0996c0 -9.30078 -5.2002 -14.1006 -15.2998 -14.1006h-0.800781c-10.0996 0 -15.2998 4.7998 -15.2998 14.1006v40.0996c0 9.40039 5.2002 14.0996 15.2998 14.0996h0.800781zM415.1 173.7v36.5996
+c0 7.90039 -3 11.1006 -10.5 11.1006s-10.5 -3.2002 -10.5 -11.1006v-36.5996c0 -8 3 -11.1006 10.5 -11.1006s10.4004 3.10059 10.5 11.1006zM368.6 188.2l10.6006 -27.2998c0.5 -1.30078 -0.100586 -2.30078 -1.5 -2.30078h-1.5c-1 0 -1.7998 0.400391 -2.2998 1.5
+l-10.4004 27.2002h-11.5996v-26.8994c0 -1.2002 -0.700195 -1.90039 -1.90039 -1.90039h-1.2002c-1.2002 0 -2 0.700195 -2 1.90039v63c0 1.19922 0.799805 1.89941 2 1.89941h13.7002c10.0996 0 15.2998 -4.7002 15.2998 -14.0996v-9.7002
+c0 -7.2002 -3.09961 -11.7002 -9.2002 -13.2998zM362.2 192.1c7.5 0 10.5 3.2002 10.5 11v6.40039c0 8 -3 11.0996 -10.5 11.0996h-10.2002v-28.5h10.2002zM448.1 225.2c10.1006 0 15.3008 -4.7002 15.3008 -14.1006v-10.5c0 -9.2998 -5.2002 -14.0996 -15.3008 -14.0996
+h-10.5996v-26.0996c0 -1.2002 -0.700195 -1.90039 -1.90039 -1.90039h-1.19922c-1.2002 0 -2 0.700195 -2 1.90039v63c0 1.2998 0.799805 2 2 1.7998h13.6992zM458.3 202.4v7.09961c0 7.90039 -3 11.0996 -10.5 11h-10.2002v-29.2002h10.2002
+c7.5 0 10.5 3.2002 10.5 11.1006zM256.5 139.9l7.09961 -0.100586l-5.69922 -4.2998l2.09961 -6.7998l-5.7998 4.09961l-5.7998 -4.09961l2.09961 6.7998l-5.7002 4.2998l7.10059 0.100586l2.2998 6.7998zM484.1 276c122.301 -46.0996 118.4 -132.5 -33.8994 -176.3
+c13.3994 -49.7002 18.0996 -101.9 0.0996094 -133.8c-3.7998 -6.7002 -16.7998 -27.7002 -47.5996 -27.7002c-41.5 0 -110.2 41.5996 -182.101 142c-42.7998 3.5 -72.1992 10.0996 -84.5996 13c-20.5 -82.2998 -6.7998 -125.3 15.5 -137.9
+c1.2002 -0.700195 38.4004 -27.2002 120.9 52.7998c3.39941 -3.5 6.7998 -6.89941 10.1992 -10.1992c-63.0996 -61.2002 -110.199 -71 -138.199 -55.2002c-32.4004 18.2998 -42.8008 72 -22.3008 153.899c-18.8994 5 -121.6 33.2002 -122.1 92.8008
+c-0.400391 40.8994 49.7998 74.6992 120.3 95c-13.3994 49.5996 -18.2002 101.8 -0.0996094 133.8c3.7998 6.7002 16.7998 27.7002 47.5996 27.7002c41.6006 0 110.3 -41.6006 182.2 -142.101c21.2998 -1.7998 51.2998 -5.2002 84.5996 -13
+c20.5 82 6.90039 125.101 -15.5 137.8c-1.2998 0.700195 -38.3994 27.2002 -120.899 -52.7998c-3.40039 3.5 -6.7998 6.90039 -10.2002 10.2002c52.2998 50.9004 103.7 74.5996 138.2 55.2002c33.8994 -19.2002 41.8994 -75.9004 22.2998 -153.9
+c1.90039 -0.5 17.9004 -4.59961 35.5996 -11.2998zM132.9 411.1c-23.1006 -40.7998 1 -121.6 1.19922 -123c12.4004 3.10059 41.6006 10.3008 84.4004 14.6006c16.2002 23.5 33.2998 45.2002 50.7002 64.2002c3.39941 -3.30078 6.7998 -6.7002 10.2002 -10.2002
+c-14.4004 -15.7002 -28.6006 -33.2998 -42.2002 -52.4004c42.7002 3.2002 75.0996 1.90039 93.8994 0.900391c-64.5 86.5996 -126.5 126.2 -163.3 126.2c-23 0 -32 -15.2002 -34.8994 -20.3008zM437.7 -27.2002c3.2998 6 21.5 38.6006 -1.2002 123
+c-4.09961 -1.09961 -37.0996 -9.89941 -84.4004 -14.5996c-16.1992 -23.6006 -33.2998 -45.2002 -50.6992 -64.2002c-3.40039 3.2998 -6.80078 6.7002 -10.2002 10.2002c14.3994 15.7002 28.5996 33.2998 42.2002 52.3994
+c-9.80078 -0.799805 -46.8008 -3.59961 -93.9004 -0.899414c64.5996 -86.7998 126.6 -126.2 163.3 -126.2c23.1006 0 32 15.2002 34.9004 20.2998zM446.8 111.4c25.6006 7.2998 85.9004 27.3994 105.7 62.5c1.40039 2.5 33.5 50.5 -72.5996 90.3994
+c-15.3008 5.7998 -29.2002 9.60059 -34.5 11c-3.60059 -12.8994 -7.90039 -26.0996 -12.8008 -39.5c-2 -0.299805 -6.19922 -1.39941 -8.89941 -5.59961l-0.100586 0.0996094c-1.89941 1.7998 -4.19922 3.2998 -6.7998 4.2998c5.7002 15 10.6006 29.8008 14.6006 44.2002
+c-7.2002 1.7002 -31.8008 7.60059 -72.2002 11.7002c16.7002 -24.5 27.8994 -44.0996 34.2998 -55.5c-6.09961 -2.09961 -9.2002 -5.7998 -10.4004 -7.40039c-13.5996 16.4004 -11 19.9004 -42.5 64.5c-44.8994 3.10059 -84.5 1.60059 -112.399 -1
+c-16.9004 -25 -28.2998 -45.1992 -34.7998 -56.8994c-1.30078 -0.600586 -6.5 -2.7998 -10.2002 -8.40039c-1.10059 3.5 -3.40039 6.2002 -6.40039 8c6.10059 11.4004 16.9004 31 32.7998 55.2998c-39.5996 -4.59961 -65 -11.1992 -72 -13
+c4.30078 -14.0996 9.40039 -28.5996 15.2002 -43.2998c-0.899414 -0.599609 -1.59961 -1.39941 -2.2998 -2.2002c-1.5 1.90039 -4 5.30078 -14.4004 5.30078c-4.69922 12.2998 -8.7998 24.5 -12.3994 36.3994c-138.8 -40.3994 -158.4 -121.399 1.5 -164
+c3.59961 12.9004 7.7998 26 12.7002 39.4004c1 0 3.69922 -0.200195 6.89941 1c3 -1.2002 5.2002 -1 8.40039 -1c-5.5 -14.6006 -10.2002 -28.9004 -14.1006 -42.9004c3.60059 -0.799805 32 -7.7998 72.2002 -11.7002c-16.2998 23.9004 -27.5 43.4004 -33.7998 54.6006
+c8.7002 0 10.7002 1.59961 12.5996 3.2002c1 -0.600586 2 -1.10059 3 -1.5c15.3008 -26.7002 28.9004 -46.6006 36.8008 -57.8008c28.0996 -2.09961 67.2998 -2.89941 112.399 1c16.5 24.3008 27.7002 44 33.9004 55.2002c7.2998 0 9.7998 3 10.8994 4.2002
+c1.80078 -1.2998 3.80078 -2.40039 6 -3.2002c-15 -28 -28.6992 -48.8994 -32.1992 -54.2002c40.1992 4.5 71.2998 12.8008 72 13c-4.10059 13.8008 -9 27.9004 -14.7002 42.2002c2 0.900391 3.7998 2.10059 5.39941 3.60059l0.100586 0.0996094
+c2 -3.09961 5.7998 -5.7002 10.7002 -5.7002c4.69922 -12.2998 8.7998 -24.5 12.3994 -36.3994zM332.4 225.4c1.2998 0 2 -0.700195 2.19922 -1.90039v-51.5c0 -9.5 -5 -14.0996 -15.0996 -14.0996h-0.400391c-10.0996 0 -15.0996 4.5 -15.0996 14.0996v51.5
+c0 1.2002 0.799805 1.90039 2 1.90039h1.2002c1.2002 0 1.89941 -0.600586 1.89941 -1.90039v-49.7998c0 -8 2.60059 -11.1006 10.1006 -11.1006s10.0996 3.2002 10.0996 11.1006v49.7998c0 1.2002 0.700195 1.90039 1.90039 1.90039h1.2002zM318.7 139.9l7.09961 -0.100586
+l-5.7002 -4.2998l2.10059 -6.7998l-5.7998 4.09961l-5.80078 -4.09961l2.10059 6.7998l-5.7002 4.2998l7.09961 0.100586l2.30078 6.7998zM287.6 132.5l7.10059 -0.0996094l-5.7002 -4.30078l2.09961 -6.7998l-5.7998 4.10059l-5.7998 -4.10059l2.09961 6.7998
+l-5.69922 4.30078l7.09961 0.0996094l2.2998 6.7998zM292.7 163.3c1.2998 0 2 -0.700195 2 -2.09961v-0.799805c0 -1.2002 -0.700195 -1.90039 -2 -1.90039h-22.5c-1.2002 0 -2 0.700195 -2 1.90039v63c0 1.19922 0.799805 1.89941 2 1.89941h22.2002
+c1.19922 0 2 -0.599609 2 -1.89941v-0.800781c0 -1.19922 -0.800781 -2 -2 -2h-19.1006v-25.7998h16.1006c1.19922 0 2 -0.799805 2 -2v-0.799805c0 -1.2002 -0.800781 -2 -2 -2h-16.1006v-26.7002h19.4004zM285.3 262.7l2.2998 -6.7998l7.10059 -0.100586l-5.7002 -4.2998
+l2.09961 -6.7998l-5.7998 4.09961l-5.7998 -4.09961l2.09961 6.7998l-5.69922 4.2998l7.09961 0.100586z" />
+ <glyph glyph-name="adobe" unicode="&#xf778;" horiz-adv-x="460"
+d="M289.9 383.7h170.899v-384zM170.9 383.7l-170.9 -384v384h170.9zM230.4 241.6l107.5 -241.899h-73l-30.7002 76.7998h-78.7002z" />
+ <glyph glyph-name="artstation" unicode="&#xf77a;" horiz-adv-x="508"
+d="M0 70.5996h315.1l59.2002 -102.6h-285.399c-20.1006 0 -37.4004 11.5 -45.9004 28.2998zM499.8 98c19 -29.4004 -0.0996094 -55.9004 -2 -59.0996l-40.7002 -70.5l-257.3 447.6h88.4004c19.7002 0 36.7998 -11.0996 45.3994 -27.2998zM273 143.5h-231l115.5 200z" />
+ <glyph glyph-name="atlassian" unicode="&#xf77b;" horiz-adv-x="512"
+d="M152.2 211.6c66.2998 -70.7998 89.0996 -189.3 51.2002 -267.1c-2.40039 -5.2002 -7.60059 -8.5 -13.4004 -8.40039h-175c-11 0 -18.4004 11.7002 -13.4004 21.7002l125.801 251c5.09961 10.5 17.0996 11 24.7998 2.7998zM244.4 439.9
+c6.7998 10.8994 20.2998 10.6992 25.5996 0.0996094c5.90039 -11.7002 240.4 -482.3 240.4 -482.3c5 -9.90039 -2.2002 -21.7002 -13.4004 -21.7002h-174.2c-5.7002 0 -10.8994 3.2998 -13.3994 8.40039c-73.5 146.899 -187.301 302.1 -65 495.5z" />
+ <glyph glyph-name="canadian-maple-leaf" unicode="&#xf785;" horiz-adv-x="466"
+d="M360.8 96.2998c-5 -5 -10 -7.5 -5 -22.5s10 -35.0996 10 -35.0996s-95.2002 20.0996 -105.2 22.5996c-8.89941 0.900391 -18.3994 -2.5 -18.3994 -12.5c0 -10.0996 5.7998 -112.8 5.7998 -112.8h-30s5.7998 102.7 5.7998 112.8c0 10 -9.59961 13.4004 -18.2998 12.5
+c-10.0996 -2.5 -105.3 -22.5996 -105.3 -22.5996s5 20.0996 10.0996 35.0996c4.90039 15 0 17.5 -5.09961 22.5c-2.60059 2.5 -105.2 92.4004 -105.2 92.4004l17.5 7.59961c10 4.90039 7.40039 11.4004 5 17.4004c-2.5 7.59961 -20.0996 67.2998 -20.0996 67.2998
+s47.5996 -10 57.6992 -12.5c7.5 -2.40039 10 2.5 12.5 7.5s15 32.2998 15 32.2998s52.6006 -59.7998 55.1006 -62.2998c10.0996 -7.5 20.0996 0 17.5996 10c0 10 -27.5996 129.6 -27.5996 129.6s30.0996 -17.3994 40.0996 -22.3994c7.60059 -5 12.6006 -5 17.6006 5
+c5 7.5 42.5 79.7998 42.5 79.7998s37.5996 -72.2998 42.6992 -79.7998c5 -10 10.1006 -10 17.6006 -5c10 5 40.0996 22.3994 40.0996 22.3994s-27.5996 -119.6 -27.5996 -129.6c-2.5 -10 7.59961 -17.5 17.5996 -10c2.5 2.40039 55.1006 62.2998 55.1006 62.2998
+s12.5 -27.3994 15 -32.3994s5 -9.90039 12.5 -7.5c10 2.5 57.6992 12.5 57.6992 12.5s-17.6992 -59.7002 -20.0996 -67.3008c-2.40039 -5.89941 -5 -12.5 5 -17.3994l17.5 -7.5s-102.7 -89.9004 -105.2 -92.4004z" />
+ <glyph glyph-name="centos" unicode="&#xf789;" horiz-adv-x="447"
+d="M289.6 350.2l31.6006 -31.7002l-76.2998 -76.5v108.2h44.6992zM127.2 318.5l31.5996 31.7002h44.7002v-108.2zM168.7 360.1l55.5 55.6006l55.5 -55.6006h-44.7002v-127.899l-10.7998 -10.7998l-10.7998 10.7998v127.899h-44.7002zM194.9 192l-10.8008 -10.7998h-128.6
+v-44.7998l-55.5 55.5996l55.5 55.5996v-44.7998h128.6zM274.2 212.7l76.2998 76.5l31.5996 -31.7002v-44.7998h-107.899zM447.5 192l-55.5 -55.5996v44.7998h-127.7l-10.7998 10.7998l10.7998 10.7998h127.7v44.7998zM65.4004 271.5v78.7002h79.3994l-31.5996 -31.7002
+l90.2998 -90.5v-15.2998h-15.2998l-90.2998 90.5zM382.1 350.2v-78.7002l-31.5996 31.7002l-90.2998 -90.5h-15.2998v15.2998l90.2998 90.5l-31.6006 31.7002h78.5zM203.5 33.7998v-0.0996094h-44.7002l-31.5996 31.7002l76.2998 76.5v-108.101zM65.4004 212.7v44.7998
+l32.5 31.7002l76.2998 -76.5h-108.8zM382.1 112.5v-78.7002h-78.5l31.6006 31.7002l-90.2998 90.5v15.2998h15.2998l90.2998 -90.5zM382.1 171.3v-44.7998l-31.5996 -31.7002l-76.2998 76.5h107.899zM321.2 65.5l-31.6006 -31.5996h-44.6992v108.1zM97.9004 94.7998
+l-32.5 31.7002v44.7998h108.8zM279.7 23.9004l-55.5 -55.6006l-55.5 55.6006h44.7002v127.899l10.7998 10.7998l10.7998 -10.7998v-127.899h44.7002zM113.2 65.5l31.5996 -31.7002h-79.3994v78.7002l32.5 -31.7002l90.2998 90.5h15.2998v-15.2998z" />
+ <glyph glyph-name="confluence" unicode="&#xf78d;" horiz-adv-x="512"
+d="M2.2998 35.7998c42.2998 66.9004 125.2 233.2 373.101 112.601c39.6992 -19.1006 83.6992 -39.9004 105.899 -50.3008c8 -3.69922 11.7002 -13.1992 8.10059 -21.2998l-50.4004 -114.1c-0.0996094 -0.100586 -0.0996094 -0.299805 -0.200195 -0.400391
+c-3.89941 -8.09961 -13.5996 -11.5996 -21.7002 -7.7002c-200.399 95.2002 -213.8 111.5 -280.899 -0.699219c0 0 -0.100586 -0.100586 -0.100586 -0.200195c-4.69922 -7.7002 -14.6992 -10 -22.3994 -5.2998l-105.9 65.1992c-7.59961 4.7002 -10 14.6006 -5.5 22.2002z
+M509.7 347.9c-42.6006 -67.5 -125.4 -232.9 -373.4 -112.9c-39.7002 19.2002 -83.7998 40 -106 50.4004c-8 3.69922 -11.7002 13.1992 -8.09961 21.2998l50.5 114.1c0.0996094 0.100586 0.0996094 0.299805 0.200195 0.400391
+c3.89941 8.09961 13.5996 11.5996 21.6992 7.7002c199.5 -94.7002 213.301 -111.7 280.601 0.899414c0.200195 0.400391 0.399414 0.700195 0.599609 1c5 7.5 15.1006 9.40039 22.6006 4.40039l105.8 -65.1006c7.59961 -4.69922 10 -14.5996 5.5 -22.1992z" />
+ <glyph glyph-name="dhl" unicode="&#xf790;" horiz-adv-x="640"
+d="M238 146.8l22.2998 30.2002h58.7002l-22.2998 -30.2002h-58.7002zM0 165.1h86.5l-4.7002 -6.39941h-81.7998v6.39941zM172.9 177h68.1992c-5.69922 -7.7998 -24.0996 -30.2998 -57.1992 -30.2998h-100.101l41.1006 55.7998h51c5.59961 0 5.59961 -2.2002 2.7998 -5.90039
+c-2.7998 -3.69922 -7.60059 -10.2998 -10.4004 -14.0996c-1.39941 -1.90039 -4.09961 -5.5 4.60059 -5.5zM490.4 183.9h-62.2002l39.2998 53.3994h62.2002zM95.2998 177l-4.7002 -6.40039h-90.5996v6.40039h95.2998zM206.3 203.6
+c2.7998 3.7002 2.90039 5.90039 -2.7002 5.90039h-111.399l20.3994 27.7998h117.9c29.9004 0 37.5996 -23.5996 29.2002 -35c-6.2002 -8.39941 -13.5 -18.3994 -13.5 -18.3994h-45.6006c-8.69922 0 -6 3.5 -4.59961 5.5c2.7998 3.7998 7.5 10.3994 10.2998 14.1992zM0 146.8
+v6.40039h77.7998l-4.7002 -6.40039h-73.0996zM323 146.8c0 0 22.2002 30.2002 22.2998 30.2002h58.7002l-22.2998 -30.2002h-58.7002zM545 146.7l4.7002 6.39941h90.2998v-6.39941h-95zM567.3 177h72.7002v-6.40039h-77.4004zM553.8 158.7l4.7002 6.39941h81.5v-6.39941
+h-86.2002zM389.6 237.3h58.7002l-39.2998 -53.3994h-143.6l39.2998 53.3994h58.7002l-22.5 -30.5996h26.1992zM423.1 177h133.4l-22.2998 -30.2998h-94.2998c-24.1006 0 -30.6006 11.5996 -23.2002 21.5996c2.09961 2.7998 6.39941 8.7002 6.39941 8.7002z" />
+ <glyph glyph-name="diaspora" unicode="&#xf791;" horiz-adv-x="461"
+d="M226.6 93.2998c-1.39941 0 -88 -119.899 -88.6992 -119.899c-0.700195 0 -86.6006 60.5 -86.9004 61.1992c-0.299805 0.800781 86.5996 125.7 86.5996 127.4c0 2.2002 -129.6 44 -137.6 47.0996c-1.2998 0.5 31.4004 101.801 31.7002 102.101
+c0.599609 0.700195 144.399 -47 145.5 -47c0.399414 0 0.899414 0.599609 1 1.2998c0.399414 2 1 148.6 1.7002 149.6c0.799805 1.2002 104.5 0.700195 105.1 0.300781c1.5 -1 3.5 -156.101 6.09961 -156.101c1.40039 0 138.7 47 139.301 46.2998
+c0.799805 -0.899414 31.8994 -102.199 31.5 -102.6c-0.900391 -0.900391 -140.2 -47.0996 -140.601 -48.7998c-0.299805 -1.40039 82.7998 -122.101 82.5 -122.9c-0.200195 -0.700195 -85.5 -63.5 -86.2998 -63.5c-1 0.200195 -89 125.5 -90.9004 125.5z" />
+ <glyph glyph-name="fedex" unicode="&#xf797;" horiz-adv-x="640"
+d="M586 163.5l54 -60.5h-64.4004l-22.2998 25l-22.0996 -25h-212.2v11.9004h-0.5c-7.90039 -11.7002 -20.7998 -18.6006 -34.9004 -18.6006c-32.6992 0 -56.3994 26.4004 -60.0996 56.9004h-85.5c0 -23.5 31.0996 -35.5 45.7998 -14.6006h42
+c-27.5996 -67.6992 -130.2 -49.3994 -130.2 23.7002c0 6.40039 0.800781 12.5 2.30078 18.2002h-48.9004v-77.5h-49v184.4h109v-41.1006h-60v-26.2002h54.7998v-24.1992c24.5 43.5996 103.9 45.3994 121.9 -14c7.5 25.5 28.8994 44.8994 57.2998 44.8994
+c13.9004 0 25.7998 -3.7998 35.4004 -14.7998h0.5v75.5h151.199v-48.0996h-56.0996v-16h118.7l22.5 -24.8008l21.7002 24.8008h62.3994zM139.3 180.1h46.5c-4.7998 25.6006 -40.3994 26.3008 -46.5 0zM292.7 131.2c34.5 0 32.5996 62.7998 0 62.7998
+c-34 0 -34.6006 -62.7998 0 -62.7998zM460.5 112.1v29.6006h-56.0996v44.7002h56.0996v28.0996h-55.5v33.9004h56.0996v30.1992h-95v-166.5h94.4004zM414.6 151.9h56.1006v-45.6006l50.7002 57l-50.7002 57v-44h-56.1006v-24.3994zM553.2 141.6l26.2998 -29.5h40.5
+l-46 51.4004l45.4004 51h-38.5l-25.6006 -29.2998l-26.5996 29.2998h-39.7002l45.5996 -51.2002l-45.5996 -51.2002h38.0996z" />
+ <glyph glyph-name="fedora" unicode="&#xf798;"
+d="M225 416c123.7 -0.299805 223.7 -100.9 223.4 -224.6c-0.300781 -123.7 -100.9 -223.7 -224.601 -223.4l-170.2 0.400391c-29.7998 0.0996094 -53.6992 24.0996 -53.5996 53.8994l0.400391 170.3c0.399414 123.7 100.899 223.7 224.6 223.4zM394.8 258.8
+c-0.200195 8 -1 15.2002 -2.7998 22.4004l-55.2002 56.0996v-1.59961c0 -5.10059 -1.5 -9.60059 -3.7998 -14.2998zM331 353.7c2 -2.90039 3.40039 -5.7998 4.2002 -9.2002l54.2998 -54.5996c-9.5 28.6992 -31 51.8994 -58.5 63.7998zM118.1 200.8
+c-5.59961 -0.399414 -11 -1.5 -16.1992 -2.89941l8.5 -8.5c2.09961 4.19922 4.59961 8 7.69922 11.3994zM97 196.6c-4.7002 -1.2998 -9.40039 -3.09961 -13.7002 -5.19922l27 -27.2002c-1.5 4 -2.39941 8 -2.39941 12.5l0.899414 8zM78.7998 189.2
+c-3.7998 -2.2002 -7.7998 -4.60059 -11.2002 -7.2002l35.3008 -35.9004c4.5 2.30078 9.39941 3.80078 14.0996 4.7002zM63.5996 179.4c-3.59961 -2.90039 -7.19922 -5.80078 -10.0996 -9.40039l34.9004 -34.5996c3.09961 3.09961 6.69922 6 10.5 8.5zM50.2998 167.1
+c-2.89941 -3.2998 -5.7998 -6.69922 -8.59961 -10.5l35.7998 -35.8994c2.2998 4.5 4.7998 8 7.7002 11.3994zM39.2998 152.8c-2.5 -3.7998 -4.7002 -7.7998 -6.7002 -12l39.5 -39.7998c0.400391 5.2998 1.60059 10.5 3.60059 15.2002zM30.5 136.5
+c-1.7998 -4.90039 -3.2998 -9.59961 -4.7002 -14.5l52.7002 -53.5c-3.7998 7.7998 -6.5 16.7998 -6.7002 26.2002zM22.5996 93.5c0 -7.59961 1.10059 -14.7998 2.90039 -21.9004l55.4004 -55.6992v1.09961c0 5.09961 1.39941 10.0996 3.59961 14.2998zM27.9004 62.7998
+c9.5 -28.7002 31 -51.7998 58.5 -63.7998c-2 2.90039 -3.30078 5.7998 -4.2002 9.2002zM22.5996 99.7998l64.4004 -64.2002c2.7002 3.30078 6.09961 6.5 9.90039 8.7002l-72.2002 72.5c-1.2002 -5.59961 -2.10059 -11.2002 -2.10059 -17zM275.9 151.6
+c32.5996 -0.0996094 32.6992 49.2002 0.199219 49.4004l-33.5996 0.0996094c-4.90039 0 -8.90039 4 -8.90039 9l0.100586 47c0.0996094 40.5 38.5996 60.8008 66 54.9004c15.3994 -3.90039 30.2998 8.40039 30.2998 23.9004c0 12.0996 -8.7002 22.1992 -19.9004 24
+c-6.7998 1.59961 -13.2998 2.2998 -20.1992 2.2998c-58.3008 0.200195 -105.4 -46.7998 -105.601 -105l-0.0996094 -56l-42.6006 0.0996094c-32.5996 0.100586 -32.6992 -49.2002 -0.0996094 -49.2998l33.5996 -0.0996094c4.40039 0 8.90039 -4.5 8.90039 -9l-0.0996094 -47
+c0 -31 -25.2002 -56 -56.1006 -55.9004c-9.39941 0 -9.39941 1.59961 -15.7002 1.59961c-13.1992 0 -24.5 -11.1992 -24.5 -24.5996c0 -15.5 14.2002 -24.2002 19.9004 -24.2002c61.2998 -12.8994 125.5 33.6006 125.7 102.9l0.0996094 56zM299.4 151.9
+c5.59961 0.5 11 1.59961 16.0996 2.89941l-8.5 8.5c-1.7998 -4.2002 -4.2002 -8 -7.59961 -11.3994zM320.4 156.1c4.7998 1.40039 9.5 3.10059 13.6992 5.30078l-27 27.1992c1.5 -4 2.40039 -8 2.40039 -12.5l-0.900391 -8.09961zM338.4 163.5
+c4 2.2002 8.09961 4.7002 11.8994 7.2002l-36.2002 35.8994c-4.09961 -2.2998 -8.7998 -3.59961 -13.6992 -4.69922zM353.9 173.3c3.59961 2.90039 6.89941 5.7998 10.0996 9l-34.9004 35c-3.19922 -3.2002 -6.69922 -6.09961 -10.5 -8.5zM367.1 185.6
+c2.90039 3.30078 5.90039 6.7002 8.60059 10.5l-35.7998 35.9004c-2.30078 -4.2002 -4.80078 -8.09961 -7.7002 -11.4004zM378.1 199.9c2.5 3.7998 4.80078 7.7998 6.7002 12l-39.5 39.7998c-0.399414 -5.2998 -1.59961 -10.5 -3.59961 -15.2002zM391.6 230.8
+l-53.0996 53.4004c4.2002 -7.7998 6.90039 -16.7998 7.09961 -26.2002l41.3008 -41.5c1.7998 4.7002 3.39941 9.40039 4.69922 14.2998zM392.6 236.4c1.30078 5.09961 2 11.1992 2.30078 16.5996l-64.3008 64.7002c-2.69922 -3.7998 -6.09961 -7 -9.89941 -9.2002z" />
+ <glyph glyph-name="figma" unicode="&#xf799;" horiz-adv-x="341"
+d="M256 277.3h-85.4004v-256c0 -47.0996 -38.1992 -85.2998 -85.2998 -85.2998c-47.0996 0 -85.2998 38.0996 -85.2998 85.2998s38.2002 85.4004 85.2998 85.4004c-47.0996 0 -85.2998 38.2002 -85.2998 85.2998s38.2002 85.2998 85.2998 85.4004
+c-47.0996 0 -85.2998 38.1992 -85.2998 85.2998c0 47.0996 38.2002 85.2998 85.2998 85.2998h170.7c47.0996 0 85.2998 -38.2998 85.2998 -85.4004c0 -47.0996 -38.2002 -85.2998 -85.2998 -85.2998zM256 277.3c47.0996 0 85.2998 -38.2002 85.2998 -85.2998
+s-38.2002 -85.2998 -85.2998 -85.2998s-85.2998 38.2002 -85.2998 85.2998s38.2002 85.2998 85.2998 85.2998z" />
+ <glyph glyph-name="intercom" unicode="&#xf7af;"
+d="M392 416c30.9004 0 56 -25.0996 56 -56v-336c0 -30.9004 -25.0996 -56 -56 -56h-336c-30.9004 0 -56 25.0996 -56 56v336c0 30.9004 25.0996 56 56 56h336zM283.7 333.9v-199.5c0 -19.8008 29.8994 -19.8008 29.8994 0v199.5c0 19.7998 -29.8994 19.7998 -29.8994 0z
+M209.1 341.4v-216.5c0 -19.8008 29.9004 -19.8008 29.9004 0v216.5c0 19.7998 -29.9004 19.7998 -29.9004 0zM134.4 333.9v-199.5c0 -19.8008 29.8994 -19.8008 29.8994 0v199.5c0 19.7998 -29.8994 19.7998 -29.8994 0zM59.7002 304v-134.3
+c0 -19.7998 29.8994 -19.7998 29.8994 0v134.3c0 19.7998 -29.8994 19.7998 -29.8994 0zM383.1 76.2002c14.9004 12.8994 -4.5 35.5996 -19.3994 22.7002c-63.2002 -53.9004 -213.4 -55.3008 -279.3 0c-15 12.7998 -34.4004 -9.90039 -19.4004 -22.7002
+c76.4004 -65.4004 245.3 -63 318.1 0zM388.3 169.7v134.3c0 19.7998 -29.8994 19.7998 -29.8994 0v-134.3c0 -19.7998 29.8994 -19.7998 29.8994 0z" />
+ <glyph glyph-name="invision" unicode="&#xf7b0;"
+d="M407.4 416c22.3994 0 40.5996 -18.2002 40.5996 -40.5996v-366.801c0 -22.3994 -18.2002 -40.5996 -40.5996 -40.5996h-366.801c-22.3994 0 -40.5996 18.2002 -40.5996 40.5996v366.801c0 22.3994 18.2002 40.5996 40.5996 40.5996h366.801zM176.1 302.4
+c-0.599609 35.0996 -53.5996 34.7998 -53.6992 -0.400391c0 -15 12.1992 -27 27.0996 -27c4.2002 0.0996094 27 4 26.5996 27.4004zM332.8 71c23.7998 0 42.7002 15.2998 53.2002 52l-17.9004 6.7002c-14.2998 -39.5 -31.7998 -32.4004 -31.7998 -16.9004
+c0.299805 8.10059 0.700195 7.7002 14.9004 58.7998c26.0996 85.8008 -61.2998 113.5 -101.8 38l8.89941 40.5h-68.7998l-9.7002 -35.5996h32.2998l-19.7998 -79.4004c-16.5 -36.6992 -57.3994 -44.0996 -57.3994 -23.1992c0.299805 11.2998 -0.700195 4.5 32.8994 138.199
+h-76.3994l-9.7002 -35.5996h31.7998c-22.0996 -90.0996 -22.9004 -89.7998 -23 -104.1c0 -48.7002 63.0996 -56.1006 94.5996 -4.30078l-8.09961 -32.5h45.0996l25.8008 103.301c14.6992 59.6992 74 47.0996 59.8994 0.699219c-9.09961 -32.5996 -40.5996 -106.6 25 -106.6z
+" />
+ <glyph glyph-name="jira" unicode="&#xf7b1;" horiz-adv-x="496"
+d="M490 206.3c8 -7.89941 8 -20.7002 0 -28.5996c-225.8 -225 137.9 136.3 -241.5 -241.7c-180.7 180.1 -109.7 109.3 -242.5 241.6c-7.90039 8 -7.90039 20.8008 0 28.7002c0 0 77 76.7998 242.5 241.7c72.0996 -71.7998 168.6 -169 241.5 -241.7zM248.5 116.3l76 75.7002
+l-76 75.7002l-76 -75.7002z" />
+ <glyph glyph-name="mendeley" unicode="&#xf7b3;" horiz-adv-x="640"
+d="M624.6 122.8c23.1006 -22.7002 17.8008 -73.5 0 -88.2998c-36.1992 -38.9004 -100 -18.2002 -104.899 35.2002c-1 11.7002 1.09961 23 5.7002 33c47.2998 103.7 -185.9 106.1 -146.5 8.2002c0.0996094 -0.100586 0.199219 -0.200195 0.299805 -0.400391
+c26.5996 -42.5996 -6.7002 -97.2998 -58.7998 -95.2002c-52 -2.2002 -85.6006 52.4004 -58.8008 95.2002c0.100586 0.200195 0.200195 0.299805 0.300781 0.400391c39.3994 97.8994 -193.801 95.5 -146.5 -8.2002c20.3994 -44.9004 -14.1006 -93.7002 -61.2002 -87.7998
+c-61.9004 7.7998 -62.5 82.8994 -42.6006 102.6c16 16 31.8008 24.7998 53 22.5c43.3008 1 49.7002 34.9004 37.5 98.7998c-22.6992 57.5 14.5 131.601 87.4004 130.8c76.9004 -0.699219 82.7998 -82 130.9 -82c49.1992 0 53.5 81.3008 130.899 82
+c72.5 0.700195 110.2 -73.2998 87.4004 -130.8c-12.2002 -63.8994 -5.7998 -97.7998 37.5 -98.7998c18.7002 2 36.0996 -4.7998 48.3994 -17.2002zM320.7 141.9c43.5996 0 62.7998 37.7998 62.7998 62.7998c0 34.7002 -28.0996 62.7998 -62.7998 62.7998h-0.600586
+c-34.5996 0 -62.7998 -28.0996 -62.7998 -62.7998c0 -25.2998 19.4004 -62.7998 62.7998 -62.7998h0.600586z" />
+ <glyph glyph-name="raspberry-pi" unicode="&#xf7bb;" horiz-adv-x="406"
+d="M372 215.5c28.7002 -17.2002 54.5996 -72.5996 14 -117.7c-2.59961 -14.0996 -7.09961 -24.2002 -11 -35.3994c-5.90039 -45.2002 -44.4004 -66.3008 -54.5996 -68.8008c-14.9004 -11.1992 -30.7002 -21.7998 -52.2002 -29.1992
+c-20.2002 -20.6006 -42.1006 -28.4004 -64.2002 -28.4004h-1c-22 0 -44 7.7998 -64.2998 28.4004c-21.4004 7.39941 -37.2998 18 -52.2002 29.1992c-10.0996 2.5 -48.7002 23.6006 -54.5996 68.8008c-3.90039 11.1992 -8.40039 21.2998 -11 35.3994
+c-40.5 45 -14.6006 100.5 14.1992 117.7l3.7002 6.5c-0.0996094 46.4004 21.4004 65.2998 46.5 79.7002c-7.59961 2 -15.3994 3.7002 -17.5996 13.2002c-13.1006 3.39941 -15.7998 9.39941 -17.1006 15.7998c-3.39941 2.2998 -14.7998 8.7002 -13.5996 19.7002
+c-6.2998 4.39941 -9.90039 10.0996 -8.09961 18.0996c-6.90039 7.5 -8.7002 13.7002 -5.80078 19.4004c-8.2998 10.1992 -4.59961 15.5 -1.09961 20.8994c-6.2002 11.2002 -0.799805 23.2002 16.5 21.2002c6.90039 10.0996 21.9004 7.7998 24.2002 7.7998
+c2.5 3.2998 6 6 16.5 4.7002c6.7998 6.09961 14.3994 5.09961 22.2998 2.09961c3.2002 2.5 6.09961 3.40039 8.7002 3.5c5 0.100586 9.2998 -2.89941 13.8994 -4.2998c11.3008 3.60059 13.8008 -1.39941 19.4004 -3.39941c12.2998 2.59961 16.0996 -3 22 -8.90039
+l6.90039 0.0996094c18.5996 -10.7998 27.7998 -32.7998 31.0996 -44.0996c3.2998 11.2998 12.5 33.2998 31.0996 44.0996l6.90039 -0.0996094c5.90039 5.90039 9.7002 11.5 22 8.90039c5.5 2.09961 8.09961 7 19.4004 3.39941
+c7.09961 2.2002 13.2998 8.10059 22.5996 0.799805c7.90039 2.90039 15.5 4 22.2998 -2.09961c10.5 1.2998 13.9004 -1.5 16.5 -4.7002c2.2998 0 17.2998 2.2998 24.2002 -7.7998c17.2998 2 22.7998 -10 16.5996 -21.2002c3.5 -5.2998 7.2002 -10.5996 -1.09961 -20.8994
+c2.90039 -5.7002 1.09961 -11.9004 -5.7998 -19.4004c1.89941 -8 -1.7002 -13.7002 -8.10059 -18.0996c1.2002 -11 -10.1992 -17.4004 -13.5996 -19.7002c-1.2998 -6.40039 -4 -12.5 -17.0996 -15.7998c-2.2002 -9.60059 -10 -11.2002 -17.6006 -13.2002
+c25.1006 -14.4004 46.6006 -33.2998 46.5 -79.7002zM349.8 223.5c1.5 48.7002 -36.3994 75.4004 -82.0996 67.9004c-16.7998 -2.80078 80.5996 -86.6006 82.0996 -67.9004zM306.8 130.4c24.5 15.7998 28.9004 51.5996 9.90039 80
+c-19 28.3994 -54.2998 38.5996 -78.7998 22.7998s-28.9004 -51.6006 -9.90039 -80c19 -28.4004 54.2998 -38.6006 78.7998 -22.7998zM238.9 418.7c-16.6006 -15.9004 -40.1006 -55.9004 -5.80078 -71.7998c29 23.5 63.6006 40.6992 102 53.5
+c-49.2998 -25.1006 -78 -45.3008 -93.6992 -62.6006c8.09961 -31.7002 50 -33.2002 65.3994 -32.2998c-3.09961 1.40039 -5.7998 3.09961 -6.7002 5.7998c3.80078 2.7002 17.5 0.299805 27 5.60059c-3.69922 0.699219 -5.39941 1.39941 -7.09961 4.09961
+c9 2.90039 18.7002 5.2998 24.4004 10c-3.10059 0 -6 -0.599609 -10 2.09961c8.09961 4.30078 16.6992 7.7002 23.3994 14.2002c-4.2002 0.100586 -8.7002 0.100586 -10 1.60059c7.40039 4.5 13.6006 9.5 18.7998 15c-5.7998 -0.700195 -8.2998 -0.100586 -9.69922 0.899414
+c5.59961 5.60059 12.5996 10.4004 16 17.2998c-4.40039 -1.5 -8.30078 -2.09961 -11.2002 0.100586c1.89941 4.2998 10 6.7002 14.7002 16.5996c-4.60059 -0.399414 -9.40039 -1 -10.4004 0c2.09961 8.5 5.7002 13.2002 9.2998 18.2002
+c-9.7998 0.200195 -24.5996 0 -23.8994 0.799805l6 6.10059c-9.5 2.5 -19.3008 -0.400391 -26.4004 -2.60059c-3.2002 2.5 0 5.60059 3.90039 8.7998c-8.10059 -1 -15.5 -2.89941 -22.1006 -5.39941c-3.59961 3.09961 2.2998 6.2998 5.10059 9.39941
+c-12.5 -2.2998 -17.8008 -5.59961 -23.1006 -8.89941c-3.7998 3.59961 -0.200195 6.7002 2.40039 9.7998c-9.40039 -3.5 -14.2998 -7.90039 -19.4004 -12.2998c-1.7998 2.2998 -4.39941 4 -1.2002 9.59961c-6.69922 -3.7998 -11.7998 -8.2998 -15.5 -13.2998
+c-4.19922 2.59961 -2.5 6.09961 -2.5 9.40039c-7 -5.60059 -11.3994 -11.5 -16.7998 -17.3008c-1.09961 0.800781 -2.09961 3.40039 -2.89941 7.60059zM204.5 304.1c-27.2002 0.700195 -53.4004 -19.8994 -53.4004 -31.7998
+c-0.0996094 -14.5996 21.5 -29.3994 53.6006 -29.7998c32.7998 -0.200195 53.7002 11.9004 53.7998 26.9004c0.0996094 16.8994 -29.7998 35 -54 34.6992zM81.5 316.9c9.59961 -5.2002 23.2002 -2.80078 27.2002 -5.60059
+c-0.900391 -2.59961 -3.60059 -4.39941 -6.7002 -5.7998c15.4004 -0.900391 57.4004 0.5 65.4004 32.2998c-15.7002 17.2998 -44.4004 37.6006 -93.7002 62.6006c38.3994 -12.8008 73 -30 102 -53.5c34.0996 15.8994 10.5996 55.8994 -6 71.7998
+c-0.900391 -4.2002 -1.7998 -6.7998 -2.90039 -7.60059c-5.39941 5.80078 -9.7998 11.7002 -16.7998 17.3008c0 -3.2002 1.59961 -6.80078 -2.5 -9.40039c-3.7002 5 -8.7998 9.5 -15.5 13.2998c3.2002 -5.59961 0.5 -7.2998 -1.2002 -9.59961
+c-5.09961 4.39941 -10 8.89941 -19.3994 12.2998c2.59961 -3.09961 6.19922 -6.2002 2.39941 -9.7998c-5.2998 3.2998 -10.5996 6.59961 -23.0996 8.89941c2.7998 -3.09961 8.59961 -6.2998 5.09961 -9.39941c-6.7002 2.5 -14 4.2998 -22.0996 5.39941
+c3.7998 -3.19922 7.09961 -6.2998 3.89941 -8.7998c-7.09961 2.2002 -16.8994 5.10059 -26.3994 2.60059l6 -6.10059c0.700195 -0.799805 -14.1006 -0.700195 -23.9004 -0.799805c3.5 -4.90039 7.2002 -9.7002 9.2998 -18.2002c-1 -1 -5.7998 -0.399414 -10.3994 0
+c4.7002 -9.89941 12.7998 -12.3994 14.7002 -16.5996c-2.90039 -2.10059 -6.90039 -1.60059 -11.2002 -0.100586c3.2998 -6.89941 10.3994 -11.6992 16 -17.2998c-1.40039 -1 -3.90039 -1.59961 -9.7002 -0.899414c5.2002 -5.5 11.4004 -10.5 18.7998 -15
+c-1.2998 -1.60059 -5.7998 -1.5 -10 -1.60059c6.7002 -6.5 15.2998 -9.89941 23.4004 -14.2002c-4 -2.7998 -6.90039 -2.09961 -10 -2.09961c5.7002 -4.7002 15.3994 -7.2002 24.3994 -10c-1.69922 -2.59961 -3.39941 -3.40039 -7.09961 -4.09961zM141.6 292.8
+c-45.5996 7.60059 -83.5996 -19.2002 -82.0996 -67.8994c1.5 -18.6006 98.9004 65.0996 82.0996 67.8994zM38.2002 106c21.5996 -9.5 39.0996 105.3 12.5996 98.2998c-43.8994 -24.5996 -36.2998 -79.5 -12.5996 -98.2998zM129.2 7.7998
+c14.0996 10.4004 6.39941 45.7002 -10.5 65.7002c-19.4004 22.2002 -44.6006 35.4004 -60.9004 25.5996c-10.8994 -8.19922 -12.8994 -36 2.60059 -63.3994c23 -32.6006 55.5 -35.7998 68.7998 -27.9004zM102.8 127.5c24.4004 -15.7998 59.7002 -5.59961 78.7998 22.7998
+c19 28.4004 14.6006 64.2002 -9.89941 80s-59.7998 5.60059 -78.7998 -22.7998s-14.6006 -64.2002 9.89941 -80zM205 -48c28.4004 -0.5 57.7002 24.4004 57.2002 35.4004c-0.100586 11.3994 -32 19.8994 -55.7002 18.8994c-23.5 0.600586 -59.7002 -9.2998 -59.2998 -22
+c-0.400391 -8.59961 28.3994 -33.5 57.7998 -32.2998zM263.9 76.9004v0.599609c-0.200195 29.5996 -26.8008 53.5996 -59.4004 53.4004c-32.5996 -0.200195 -59 -24.3008 -58.7998 -54v-0.600586c0.200195 -29.5996 26.7998 -53.5996 59.3994 -53.3994
+c32.6006 0.199219 59 24.2998 58.8008 54zM346.1 34.2002c18.4004 23.2002 12.2002 62 1.7002 72.2998c-15.5996 11.9004 -38 -3.2998 -58.8994 -26.5996c-18.3008 -21.1006 -28.4004 -59.6006 -15.1006 -72c12.7002 -9.60059 47 -8.30078 72.2998 26.2998zM369 107.4
+c23.7002 18.6992 31.2998 73.5996 -12.5996 98.2998c-26.4004 7 -8.90039 -107.7 12.5996 -98.2998z" />
+ <glyph glyph-name="redhat" unicode="&#xf7bc;" horiz-adv-x="512"
+d="M312.4 46.7998c1.2998 -1.2998 3.59961 -5.59961 0.799805 -11.0996c-1.60059 -2.90039 -3.2002 -4.90039 -6.2002 -7.2998c-3.59961 -2.90039 -10.5996 -6.2002 -20.2998 -0.100586c-5.2002 3.2998 -5.5 4.40039 -12.7002 3.40039
+c-5.09961 -0.700195 -7.09961 4.5 -5.2998 8.7998c1.89941 4.2998 9.39941 7.7002 18.7998 2.2002c4.2002 -2.5 10.7998 -7.7002 16.5996 -3.10059c2.40039 1.90039 3.80078 3.2002 7.2002 7c0.299805 0.5 0.799805 0.5 1.10059 0.200195zM256 418.3
+c141.4 0 256 -114.6 256.3 -255.899c0 -51.3008 -15.0996 -99 -41.0996 -139.101c-34.4004 7.90039 -63.2998 1.5 -77.7002 -4.5c-1.40039 -0.700195 -2.2998 -2.09961 -2.2998 -3.7002c0 -1.89941 1.09961 -2.09961 0.599609 -4.7998
+c-0.700195 -3.09961 -4.2002 -6.09961 -13 -8.09961c-17.3994 -3.7998 -28.3994 -21.9004 -34.7002 -27.7998c-6.5 -6.10059 -23.2998 -10.2002 -25.2998 -8.30078c-0.299805 0.300781 -0.299805 0.700195 0.100586 1.2002c2.5 3.2998 11.8994 13.5 17.5996 24.5
+c5.2002 9.90039 9.7002 12.7002 16 22.1006c1.90039 2.69922 9 12.5 11.0996 20.0996c2.30078 7.59961 1.5 16.9004 2.40039 20.7998c1.2002 5.60059 6.5 17.7002 6.90039 24.5c0.199219 3.90039 -16.1006 -5.5 -23.9004 -5.5s-15.4004 4.7002 -22.2998 5
+c-8.60059 0.400391 -14.1006 -6.59961 -21.9004 -5.39941c-4.39941 0.699219 -8.2002 4.59961 -15.8994 4.89941c-11.1006 0.400391 -24.6006 -6.09961 -49.9004 -5.2998c-24.9004 0.799805 -48 31.5 -51.0996 36.4004c-3.7002 5.69922 -8.2002 5.69922 -13.1006 1.19922
+c-4.89941 -4.39941 -11 -0.899414 -12.7002 2.10059c-2.39941 4.2998 -8 14.7998 -16.2998 21.2002c-8.7002 6.7998 -17.5 6.09961 -21.5 5c-10.5 -2.80078 -15.5996 -13.6006 -14.7002 -22.7002c0.900391 -9.2998 5.60059 -13.5 9.5 -17.7998
+c6.30078 -7.2002 6.10059 -14.1006 15.8008 -18.2002c5 -2.10059 6.69922 -5.10059 5.2998 -9.10059c-1.2998 -3.5 -6.40039 -4.2998 -9.7998 -4.5c-8.30078 -0.399414 -15.1006 3.5 -15.8008 3.90039c-12 7.59961 -11.1992 22.9004 -30.2998 22.9004
+c-3.7998 0 -7.2998 -1 -10.3994 -2.60059c-12.4004 -6.39941 -27 -10.2002 -42.8008 -10.2002h-17.7998c-8.59961 25.7002 -13.2998 53.2002 -13.2998 81.8008c0 141.3 114.6 255.899 256 255.899zM464.2 167.5c5.5 30.5996 -20.2002 53.2002 -63.9004 63.0996
+c-2.5 -8.5 -6.09961 -19.3994 -22.0996 -27.5996c-2.40039 -1.2002 -3.2998 0.799805 -2.2002 2.59961c6.09961 10.3008 7.09961 12.9004 8.90039 16.9004c2.5 6 3.7998 14.4004 -1.10059 32c-9.59961 34.5996 -29.7002 80.9004 -44.2998 95.9004
+c-14.0996 14.5 -39.5996 18.5996 -62.7002 12.6992c-8.39941 -2.19922 -25.0996 -10.7998 -55.8994 -3.89941c-53.3008 12 -61.2002 -14.7002 -64.3008 -26.2998c-3 -11.6006 -10.3994 -44.7002 -10.3994 -44.7002c-2.40039 -13.5 -5.7002 -36.9004 77.2002 -52.7002
+c62.6992 -12 30.8994 -31.7998 55.6992 -48.4004c5.5 -3.59961 0 -6.69922 -6.09961 -7.2998c-16.4004 -1.7002 -77.0996 15.7002 -113.1 36.1006c-29.4004 17.8994 -29.9004 34.0996 -23.2002 47.7998c-44.4004 4.7998 -77.7998 -4.2002 -83.7998 -25.2002
+c-10.3008 -36.0996 79.3994 -97.7002 181.6 -128.6c107.2 -32.4004 217.4 -9.80078 229.7 57.5996zM229.6 313c19.9004 0 34.4004 -14.5 6.5 -16.5996c-29.5996 -2.10059 -32.6992 -5.30078 -38.1992 -11.2002c-7.80078 -8.2998 -18.1006 10.7998 -18.1006 10.7998
+c-6.2002 1.2998 -13.5996 11.2002 -9.59961 20.5c4 9.2002 11.2998 6.5 13.5996 3.59961c11.4004 -14.0996 19.5 -7.09961 45.7998 -7.09961zM290.7 353.2c27 2.7998 39.5996 -7.7002 39.3994 -14.1006c-0.199219 -7.59961 -13.6992 -13.5 -30.0996 -13.1992
+c-5.2998 0.0996094 -10.2998 0.899414 -14.5996 2.09961c-1.2002 0.299805 -1.2002 1.90039 0 2.2002c30.5996 7 15.8994 20.5 -18.3008 15.0996c-1.19922 0 -1.19922 1.2002 -0.5 1.5c5.80078 3 14.3008 5.40039 24.1006 6.40039z" />
+ <glyph glyph-name="sketch" unicode="&#xf7c6;" horiz-adv-x="494"
+d="M18.5 285.8l78.9004 105.8l-6.90039 -130.699h-90.5zM387.3 402.3l7.10059 -133.5l-135.7 147.2zM103.2 229.7l122.6 -239.7l-224.899 261.7h91.0996zM105.2 260.9l142.1 154.1l60.4004 -65.5996l81.5 -88.5h-284zM402.5 251.8v-0.0996094h90.9004l-224.801 -261.7z
+M406.4 379l87.8994 -118.1h-90.2998l-6.09961 113.399l-0.900391 17.2998zM104.5 354.5l2.59961 47.7998l128.601 13.7002l-135.8 -147.1zM392.2 251.8l-144.9 -283.8l-102.7 200.9l-42.3994 82.8994h290z" />
+ <glyph glyph-name="sourcetree" unicode="&#xf7d3;" horiz-adv-x="406"
+d="M406.2 245c-0.100586 -88.0996 -57 -166.1 -140.7 -193.1v-101.601c0 -7.89941 -6.40039 -14.2998 -14.2998 -14.2998h-96.4004c-7.89941 0 -14.2998 6.40039 -14.2998 14.2998v101.7c-83.9004 27.0996 -140.6 105.2 -140.5 193.4
+c0.200195 112 91.0996 202.8 203.2 202.6c112.1 0 203 -90.9004 203 -203zM134.6 245c0 -91 137.2 -89.9004 137.301 0c0 90.7998 -137.301 90.7998 -137.301 0z" />
+ <glyph glyph-name="suse" unicode="&#xf7d6;" horiz-adv-x="631"
+d="M467.1 345.2c0 1 1 0.799805 0.900391 0.700195c4.2002 -0.600586 92.2002 -13.7002 129.2 -35.3008c12.0996 -6.89941 20.7002 -21.5 34.5 -69.5996c0.700195 -2.7998 -2.7998 -5.09961 -3.60059 -5.7002c-26.8994 -18.7998 -56.7998 -36.5996 -145.199 21.7002
+c11.0996 -26.7002 10.6992 -25.0996 11 -25.5c5.39941 -2.7998 56.2998 -28.7002 81.5996 -28.2002c20.4004 0.400391 42.2002 10.4004 50.9004 15.6006c0 0 4.5 2.69922 3.19922 -2.40039c-0.5 -1.90039 -3.2998 -14.5996 -7 -17.7002c-1 -1 -36.5996 -28.3994 -103.1 -28
+c-42.7002 1 -76.7998 16.4004 -82.4004 -17.3994c-2.7998 -17.1006 6.10059 -37.5 18.9004 -55.8008h-48.5996c-15.9004 28.4004 -23.1006 68.3008 -79.8008 68.3008c-51.5996 0 -48.3994 -50.7002 -42.6992 -68.3008h-45.9004
+c-18.7998 68.8008 -69.2002 98.6006 -102.4 104.301c-74.2998 12.7998 -113 -49.2002 -87 -98.4004c23.1006 -43.5996 81.4004 -44.0996 99.7002 -20.4004c19.2002 25 3.7998 62.9004 -28.5 65.7002c-12.2998 1.10059 -25.2998 -4.59961 -25.5 -16.8994v-0.100586
+c0 -10.8994 10 -13.7998 12 -14c7.2998 0.400391 8.90039 1.90039 11.7998 2.40039c5.2002 0.899414 16.8008 -1.7998 16.8008 -13.9004c0 -8.39941 -6.80078 -13.0996 -13.3008 -14.8994c-5.69922 -1.90039 -10.7998 -2.7002 -15.7998 -2.7002
+c-17.2002 0 -41.2002 14.7998 -41.5 42.5996c-0.200195 15.7002 7.7002 30 21.6006 39.2002c15.8994 10.5 43.5996 14.0996 69.1992 -1.59961c31.5 -19.3008 39.7002 -57.1006 31.5 -82.9004c-11.8994 -37.2998 -45.5996 -57.2002 -90 -53.2998
+c-31.5 2.7998 -60.6992 19.3994 -78.0996 44.3994c-7.7002 11.1006 -13.4004 23.4004 -16.5 35.7002c-7.09961 28.1006 0 58.1006 5.5 71.4004c25.7002 63.5996 80.2002 97.2998 99.5996 109.7c100.2 61.5996 171.101 59.0996 223 57.2998
+c61.3008 -2.2002 126.601 -23.2998 135.7 -26.2998c0 2 0.299805 20.2998 0.299805 20.2998zM527.5 272.4c-1.09961 -33.5 39.0996 -51.6006 63.2998 -28.9004c24.4004 22.9004 9 64 -24.3994 65.2002c-21.4004 0.700195 -38.3008 -16.1006 -38.9004 -36.2998zM566.1 297.7
+c14.5 -0.600586 25.9004 -12.7002 25.4004 -27.2002c-0.5 -14.2002 -12.4004 -25.7998 -27.2002 -25.2998c-14.5 0.5 -25.7998 12.7002 -25.2998 27.2002c0.400391 14.0996 12.2998 25.7998 27.0996 25.2998zM570.4 268.9c15.3994 0 15.3994 15.5996 0 15.5996
+c-15.4004 0 -15.4004 -15.5996 0 -15.5996z" />
+ <glyph glyph-name="ubuntu" unicode="&#xf7df;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM300.7 347c-8.7998 -15.2002 -3.60059 -34.7002 11.7002 -43.5996c15.1992 -8.80078 34.6992 -3.5 43.5 11.6992c8.7998 15.3008 3.59961 34.8008 -11.7002 43.6006
+c-15.2002 8.7998 -34.7002 3.5 -43.5 -11.7002zM87.4004 160.1c17.5996 0 31.8994 14.3008 31.8994 31.9004s-14.2998 31.9004 -31.8994 31.9004c-17.6006 0 -31.9004 -14.3008 -31.9004 -31.9004s14.2998 -31.9004 31.9004 -31.9004zM115.5 157
+c8.7002 -32.7002 29.0996 -60.7002 56.5 -79l23.7002 39.5996c-51.5 36.3008 -51.5 112.5 0 148.801l-23.7002 39.5996c-27.4004 -18.4004 -47.9004 -46.2998 -56.5 -79.0996c22.4004 -18 22.2998 -52 0 -69.9004zM344.2 25.2998
+c15.2998 8.7998 20.5 28.2998 11.7002 43.6006c-8.80078 15.2998 -28.3008 20.5 -43.5 11.6992c-15.3008 -8.7998 -20.5 -28.2998 -11.7002 -43.5996s28.2002 -20.5 43.5 -11.7002zM344.5 94.7998c23 22.9004 38 53.9004 40.2998 88.4004l-46.0996 0.700195
+c-5.5 -62.7002 -71.9004 -100.9 -128.9 -74.4004l-22.5 -40.2998c47.7998 -23.7002 91.5 -10.7998 96.7002 -9.40039c4.40039 28.4004 33.7998 45.2998 60.5 35zM338.6 200.1l46 0.600586c-2.19922 34.5996 -17.0996 65.5996 -40.1992 88.5
+c-26.7002 -10.2998 -56.1006 6.7002 -60.5 35c-5.2002 1.39941 -48.8008 14.2998 -96.7002 -9.40039l22.5 -40.2998c57.5996 26.7998 123.5 -12.4004 128.899 -74.4004z" />
+ <glyph glyph-name="ups" unicode="&#xf7e0;" horiz-adv-x="375"
+d="M99.2002 145.4v123h32.5996v-141.601c-17.7002 -12.0996 -97.8994 -35.8994 -97.8994 39v102.601h32.6992v-104c0 -32.1006 27.4004 -22.6006 32.6006 -19zM0 373.6c93.5996 49.6006 259.1 61.6006 375.4 0v-220.899c0 -103.9 -75.3008 -135.2 -187.7 -184.101
+c-112.8 48.9004 -187.7 80.4004 -187.7 184.101v220.899zM358.1 152.7v216.2c-109.3 10.1992 -238.6 4 -340.899 -89.8008v-126.399c0 -86.6006 53 -113.5 170.5 -165.3c117.2 51.6992 170.399 78.6992 170.399 165.3zM148.5 260.1
+c23.7002 15.3008 104.4 31.8008 104.4 -65.5996c0 -75.9004 -47.3008 -85.7998 -71.7002 -78.5v-68.7002h-32.7002v212.8zM181.2 142.8c2 -0.799805 38.3994 -16.8994 38.3994 51c0 62 -30 53.5 -38.3994 49.2998v-100.3zM260.3 229.2
+c-0.200195 41.0996 51.2002 53.8994 79.7002 31.8994v-28.3994c-17.9004 18.2998 -47.9004 18.0996 -48.5 -2.2002c-0.700195 -26.5996 55.0996 -21.7002 53.4004 -73.2002c-1.30078 -41.5996 -47 -55 -83.2002 -33.5v30.1006
+c20.3994 -18.1006 51.3994 -18.6006 50.7998 4.89941c-0.599609 27.9004 -52.0996 23.1006 -52.2002 70.4004z" />
+ <glyph glyph-name="usps" unicode="&#xf7e1;" horiz-adv-x="522"
+d="M433.3 206.2c-1.39941 -2.2002 -4.2998 -4.40039 -3.7998 0.0996094c2.90039 11.6006 13.9004 30.5 4.40039 32c-17.8008 3.10059 -88.1006 -4.5 -88.1006 0c0 2.40039 26.7002 3.10059 37.9004 8.7002c9.39941 9.40039 10.7998 8 10.8994 8h27
+c26.9004 0 37.5 -7.5 11.7002 -48.7998zM321.2 183.6c-62.2998 -21.1992 -124.8 -54.1992 -321.2 -151.5l52.4004 245.5c185 0 335.199 5.60059 337.899 -22.5996h-196.6l30.7002 -93.2998c35 16.7998 120.899 51.7002 172.6 53.8994
+c21.7998 0.800781 30.5996 -2.89941 26.2002 -5.7998c-4.7002 -2.2998 -38 -4.89941 -102 -26.2002zM67.7002 351.9h454.3l-67.7002 -319.7h-423.1s402 157.3 406.399 160.2c0 0 35.7002 48 14.6006 67.6992c-6.5 6.60059 -16 6.60059 -52.4004 6.60059
+c-2.09961 19.5 -78.7998 31.5 -332.1 85.2002z" />
+ <glyph glyph-name="yarn" unicode="&#xf7e3;" horiz-adv-x="496"
+d="M393.9 102.8c-39 -9.2998 -48.4004 -32.0996 -104 -47.3994c0 0 -2.7002 -4 -10.4004 -5.80078c-13.4004 -3.2998 -63.9004 -6 -68.5 -6.09961c-12.4004 -0.0996094 -19.9004 3.2002 -22 8.2002c-6.40039 15.2998 9.2002 22 9.2002 22
+c-8.10059 5 -9 9.89941 -9.7998 8.09961c-2.40039 -5.7998 -3.60059 -20.0996 -10.1006 -26.5c-8.7998 -8.89941 -25.5 -5.89941 -35.2998 -0.799805c-10.7998 5.7002 0.799805 19.2002 0.799805 19.2002s-5.7998 -3.40039 -10.5 3.59961
+c-6 9.2998 -17.0996 37.2998 11.5 62c-1.2998 10.1006 -4.59961 53.7002 40.6006 85.6006c0 0 -20.6006 22.7998 -12.9004 43.2998c5 13.3994 7 13.2998 8.59961 13.8994c5.7002 2.2002 11.3008 4.60059 15.4004 9.10059c20.5996 22.2002 46.7998 18 46.7998 18
+s12.4004 37.7998 23.9004 30.3994c3.5 -2.2998 16.2998 -30.5996 16.2998 -30.5996s13.5996 7.90039 15.0996 5c8.2002 -16 9.2002 -46.5 5.60059 -65.0996c-6.10059 -30.6006 -21.4004 -47.1006 -27.6006 -57.5c-1.39941 -2.40039 16.5 -10 27.8008 -41.3008
+c10.3994 -28.5996 1.09961 -52.6992 2.7998 -55.2998c0.799805 -1.39941 13.7002 -0.799805 36.3994 13.2002c12.8008 7.90039 28.1006 16.9004 45.4004 17c16.7002 0.5 17.5996 -19.2002 4.90039 -22.2002zM496 192c0 -136.9 -111.1 -248 -248 -248s-248 111.1 -248 248
+s111.1 248 248 248s248 -111.1 248 -248zM416.7 116.8c-1.7002 13.6006 -13.2002 23 -28 22.7998c-22 -0.299805 -40.5 -11.6992 -52.7998 -19.1992c-4.80078 -3 -8.90039 -5.2002 -12.4004 -6.80078c3.09961 44.5 -22.5 73.1006 -28.7002 79.4004
+c7.7998 11.2998 18.4004 27.7998 23.4004 53.2002c4.2998 21.7002 3 55.5 -6.90039 74.5c-1.59961 3.09961 -7.39941 11.2002 -21 7.39941c-9.7002 20 -13 22.1006 -15.5996 23.8008c-1.10059 0.699219 -23.6006 16.3994 -41.4004 -28
+c-12.2002 -0.900391 -31.2998 -5.30078 -47.5 -22.8008c-2 -2.19922 -5.89941 -3.7998 -10.0996 -5.39941h0.0996094c-8.39941 -3 -12.2998 -9.90039 -16.8994 -22.2998c-6.5 -17.4004 0.199219 -34.6006 6.7998 -45.7002c-17.7998 -15.9004 -37 -39.7998 -35.7002 -82.5
+c-34 -36 -11.7998 -73 -5.59961 -79.6006c-1.60059 -11.0996 3.69922 -19.3994 12 -23.7998c12.5996 -6.7002 30.2998 -9.59961 43.8994 -2.7998c4.90039 -5.2002 13.7998 -10.0996 30 -10.0996c6.7998 0 58 2.89941 72.6006 6.5c6.7998 1.59961 11.5 4.5 14.5996 7.09961
+c9.7998 3.09961 36.7998 12.2998 62.2002 28.7002c18 11.7002 24.2002 14.2002 37.5996 17.3994c12.9004 3.2002 21 15.1006 19.4004 28.2002z" />
+ </font>
+</defs></svg>
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.ttf b/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.ttf
new file mode 100644
index 0000000000..5f72e9127f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.ttf
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.woff b/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.woff
new file mode 100644
index 0000000000..c64755a525
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.woff
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.woff2 b/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.woff2
new file mode 100644
index 0000000000..b5a956765b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-brands-400.woff2
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.eot b/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.eot
new file mode 100644
index 0000000000..55085ca95d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.eot
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.svg b/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.svg
new file mode 100644
index 0000000000..bba54466b9
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.svg
@@ -0,0 +1,799 @@
+<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1">
+<metadata>
+Created by FontForge 20190112 at Tue Feb 12 10:24:59 2019
+ By Robert Madole
+Copyright (c) Font Awesome
+</metadata>
+<defs>
+<font id="FontAwesome5Free-Regular" horiz-adv-x="512" >
+ <font-face
+ font-family="Font Awesome 5 Free Regular"
+ font-weight="400"
+ font-stretch="normal"
+ units-per-em="512"
+ panose-1="2 0 5 3 0 0 0 0 0 0"
+ ascent="448"
+ descent="-64"
+ bbox="-0.0663408 -64.0662 640.01 448.1"
+ underline-thickness="25"
+ underline-position="-51"
+ unicode-range="U+0020-F5C8"
+ />
+ <missing-glyph />
+ <glyph glyph-name="heart" unicode="&#xf004;"
+d="M458.4 383.7c75.2998 -63.4004 64.0996 -166.601 10.5996 -221.3l-175.4 -178.7c-10 -10.2002 -23.2998 -15.7998 -37.5996 -15.7998c-14.2002 0 -27.5996 5.69922 -37.5996 15.8994l-175.4 178.7c-53.5996 54.7002 -64.5996 157.9 10.5996 221.2
+c57.8008 48.7002 147.101 41.2998 202.4 -15c55.2998 56.2998 144.6 63.5996 202.4 15zM434.8 196.2c36.2002 36.8994 43.7998 107.7 -7.2998 150.8c-38.7002 32.5996 -98.7002 27.9004 -136.5 -10.5996l-35 -35.7002l-35 35.7002
+c-37.5996 38.2998 -97.5996 43.1992 -136.5 10.5c-51.2002 -43.1006 -43.7998 -113.5 -7.2998 -150.7l175.399 -178.7c2.40039 -2.40039 4.40039 -2.40039 6.80078 0z" />
+ <glyph glyph-name="star" unicode="&#xf005;" horiz-adv-x="576"
+d="M528.1 276.5c26.2002 -3.7998 36.7002 -36.0996 17.7002 -54.5996l-105.7 -103l25 -145.5c4.5 -26.3008 -23.1992 -45.9004 -46.3994 -33.7002l-130.7 68.7002l-130.7 -68.7002c-23.2002 -12.2998 -50.8994 7.39941 -46.3994 33.7002l25 145.5l-105.7 103
+c-19 18.5 -8.5 50.7998 17.7002 54.5996l146.1 21.2998l65.2998 132.4c11.7998 23.8994 45.7002 23.5996 57.4004 0l65.2998 -132.4zM388.6 135.7l100.601 98l-139 20.2002l-62.2002 126l-62.2002 -126l-139 -20.2002l100.601 -98l-23.7002 -138.4l124.3 65.2998
+l124.3 -65.2998z" />
+ <glyph glyph-name="user" unicode="&#xf007;" horiz-adv-x="448"
+d="M313.6 144c74.2002 0 134.4 -60.2002 134.4 -134.4v-25.5996c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v25.5996c0 74.2002 60.2002 134.4 134.4 134.4c28.7998 0 42.5 -16 89.5996 -16s60.9004 16 89.5996 16zM400 -16v25.5996
+c0 47.6006 -38.7998 86.4004 -86.4004 86.4004c-14.6992 0 -37.8994 -16 -89.5996 -16c-51.2998 0 -75 16 -89.5996 16c-47.6006 0 -86.4004 -38.7998 -86.4004 -86.4004v-25.5996h352zM224 160c-79.5 0 -144 64.5 -144 144s64.5 144 144 144s144 -64.5 144 -144
+s-64.5 -144 -144 -144zM224 400c-52.9004 0 -96 -43.0996 -96 -96s43.0996 -96 96 -96s96 43.0996 96 96s-43.0996 96 -96 96z" />
+ <glyph glyph-name="clock" unicode="&#xf017;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM256 -8c110.5 0 200 89.5 200 200s-89.5 200 -200 200s-200 -89.5 -200 -200s89.5 -200 200 -200zM317.8 96.4004l-84.8994 61.6992
+c-3.10059 2.30078 -4.90039 5.90039 -4.90039 9.7002v164.2c0 6.59961 5.40039 12 12 12h32c6.59961 0 12 -5.40039 12 -12v-141.7l66.7998 -48.5996c5.40039 -3.90039 6.5 -11.4004 2.60059 -16.7998l-18.8008 -25.9004c-3.89941 -5.2998 -11.3994 -6.5 -16.7998 -2.59961z
+" />
+ <glyph glyph-name="list-alt" unicode="&#xf022;"
+d="M464 416c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h416zM458 16c3.31152 0 6 2.68848 6 6v340c0 3.31152 -2.68848 6 -6 6h-404c-3.31152 0 -6 -2.68848 -6 -6v-340
+c0 -3.31152 2.68848 -6 6 -6h404zM416 108v-24c0 -6.62695 -5.37305 -12 -12 -12h-200c-6.62695 0 -12 5.37305 -12 12v24c0 6.62695 5.37305 12 12 12h200c6.62695 0 12 -5.37305 12 -12zM416 204v-24c0 -6.62695 -5.37305 -12 -12 -12h-200c-6.62695 0 -12 5.37305 -12 12
+v24c0 6.62695 5.37305 12 12 12h200c6.62695 0 12 -5.37305 12 -12zM416 300v-24c0 -6.62695 -5.37305 -12 -12 -12h-200c-6.62695 0 -12 5.37305 -12 12v24c0 6.62695 5.37305 12 12 12h200c6.62695 0 12 -5.37305 12 -12zM164 288c0 -19.8818 -16.1182 -36 -36 -36
+s-36 16.1182 -36 36s16.1182 36 36 36s36 -16.1182 36 -36zM164 192c0 -19.8818 -16.1182 -36 -36 -36s-36 16.1182 -36 36s16.1182 36 36 36s36 -16.1182 36 -36zM164 96c0 -19.8818 -16.1182 -36 -36 -36s-36 16.1182 -36 36s16.1182 36 36 36s36 -16.1182 36 -36z" />
+ <glyph glyph-name="flag" unicode="&#xf024;"
+d="M336.174 368c35.4668 0 73.0195 12.6914 108.922 28.1797c31.6406 13.6514 66.9043 -9.65723 66.9043 -44.1162v-239.919c0 -16.1953 -8.1543 -31.3057 -21.7129 -40.1631c-26.5762 -17.3643 -70.0693 -39.9814 -128.548 -39.9814c-68.6084 0 -112.781 32 -161.913 32
+c-56.5674 0 -89.957 -11.2803 -127.826 -28.5566v-83.4434c0 -8.83691 -7.16309 -16 -16 -16h-16c-8.83691 0 -16 7.16309 -16 16v406.438c-14.3428 8.2998 -24 23.7979 -24 41.5615c0 27.5693 23.2422 49.71 51.2012 47.8965
+c22.9658 -1.49023 41.8662 -19.4717 44.4805 -42.3379c0.177734 -1.52441 0.321289 -4.00781 0.321289 -5.54199c0 -4.30176 -1.10352 -11.1035 -2.46289 -15.1846c22.418 8.68555 49.4199 15.168 80.7207 15.168c68.6084 0 112.781 -32 161.913 -32zM464 112v240
+c-31.5059 -14.6338 -84.5547 -32 -127.826 -32c-59.9111 0 -101.968 32 -161.913 32c-41.4365 0 -80.4766 -16.5879 -102.261 -32v-232c31.4473 14.5967 84.4648 24 127.826 24c59.9111 0 101.968 -32 161.913 -32c41.4365 0 80.4775 16.5879 102.261 32z" />
+ <glyph glyph-name="bookmark" unicode="&#xf02e;" horiz-adv-x="384"
+d="M336 448c26.5098 0 48 -21.4902 48 -48v-464l-192 112l-192 -112v464c0 26.5098 21.4902 48 48 48h288zM336 19.5703v374.434c0 3.31348 -2.68555 5.99609 -6 5.99609h-276c-3.31152 0 -6 -2.68848 -6 -6v-374.43l144 84z" />
+ <glyph glyph-name="image" unicode="&#xf03e;"
+d="M464 384c26.5098 0 48 -21.4902 48 -48v-288c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v288c0 26.5098 21.4902 48 48 48h416zM458 48c3.31152 0 6 2.68848 6 6v276c0 3.31152 -2.68848 6 -6 6h-404c-3.31152 0 -6 -2.68848 -6 -6v-276
+c0 -3.31152 2.68848 -6 6 -6h404zM128 296c22.0908 0 40 -17.9092 40 -40s-17.9092 -40 -40 -40s-40 17.9092 -40 40s17.9092 40 40 40zM96 96v48l39.5137 39.5146c4.6875 4.68652 12.2852 4.68652 16.9717 0l39.5146 -39.5146l119.514 119.515
+c4.6875 4.68652 12.2852 4.68652 16.9717 0l87.5146 -87.5146v-80h-320z" />
+ <glyph glyph-name="edit" unicode="&#xf044;" horiz-adv-x="575"
+d="M402.3 103.1l32 32c5 5 13.7002 1.5 13.7002 -5.69922v-145.4c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h273.5c7.09961 0 10.7002 -8.59961 5.7002 -13.7002l-32 -32c-1.5 -1.5 -3.5 -2.2998 -5.7002 -2.2998h-241.5v-352h352
+v113.5c0 2.09961 0.799805 4.09961 2.2998 5.59961zM558.9 304.9l-262.601 -262.601l-90.3994 -10c-26.2002 -2.89941 -48.5 19.2002 -45.6006 45.6006l10 90.3994l262.601 262.601c22.8994 22.8994 59.8994 22.8994 82.6992 0l43.2002 -43.2002
+c22.9004 -22.9004 22.9004 -60 0.100586 -82.7998zM460.1 274l-58.0996 58.0996l-185.8 -185.899l-7.2998 -65.2998l65.2998 7.2998zM524.9 353.7l-43.2002 43.2002c-4.10059 4.09961 -10.7998 4.09961 -14.7998 0l-30.9004 -30.9004l58.0996 -58.0996l30.9004 30.8994
+c4 4.2002 4 10.7998 -0.0996094 14.9004z" />
+ <glyph glyph-name="times-circle" unicode="&#xf057;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM256 -8c110.5 0 200 89.5 200 200s-89.5 200 -200 200s-200 -89.5 -200 -200s89.5 -200 200 -200zM357.8 254.2l-62.2002 -62.2002l62.2002 -62.2002
+c4.7002 -4.7002 4.7002 -12.2998 0 -17l-22.5996 -22.5996c-4.7002 -4.7002 -12.2998 -4.7002 -17 0l-62.2002 62.2002l-62.2002 -62.2002c-4.7002 -4.7002 -12.2998 -4.7002 -17 0l-22.5996 22.5996c-4.7002 4.7002 -4.7002 12.2998 0 17l62.2002 62.2002l-62.2002 62.2002
+c-4.7002 4.7002 -4.7002 12.2998 0 17l22.5996 22.5996c4.7002 4.7002 12.2998 4.7002 17 0l62.2002 -62.2002l62.2002 62.2002c4.7002 4.7002 12.2998 4.7002 17 0l22.5996 -22.5996c4.7002 -4.7002 4.7002 -12.2998 0 -17z" />
+ <glyph glyph-name="check-circle" unicode="&#xf058;"
+d="M256 440c136.967 0 248 -111.033 248 -248s-111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248zM256 392c-110.549 0 -200 -89.4678 -200 -200c0 -110.549 89.4678 -200 200 -200c110.549 0 200 89.4678 200 200c0 110.549 -89.4678 200 -200 200z
+M396.204 261.733c4.66699 -4.70508 4.63672 -12.3037 -0.0673828 -16.9717l-172.589 -171.204c-4.70508 -4.66797 -12.3027 -4.63672 -16.9697 0.0683594l-90.7812 91.5156c-4.66797 4.70605 -4.63672 12.3047 0.0683594 16.9717l22.7188 22.5361
+c4.70508 4.66699 12.3027 4.63574 16.9697 -0.0693359l59.792 -60.2773l141.353 140.216c4.70508 4.66797 12.3027 4.6377 16.9697 -0.0673828z" />
+ <glyph glyph-name="question-circle" unicode="&#xf059;"
+d="M256 440c136.957 0 248 -111.083 248 -248c0 -136.997 -111.043 -248 -248 -248s-248 111.003 -248 248c0 136.917 111.043 248 248 248zM256 -8c110.569 0 200 89.4697 200 200c0 110.529 -89.5088 200 -200 200c-110.528 0 -200 -89.5049 -200 -200
+c0 -110.569 89.4678 -200 200 -200zM363.244 247.2c0 -67.0518 -72.4209 -68.084 -72.4209 -92.8633v-6.33691c0 -6.62695 -5.37305 -12 -12 -12h-45.6475c-6.62695 0 -12 5.37305 -12 12v8.65918c0 35.7451 27.1006 50.0342 47.5791 61.5156
+c17.5615 9.84473 28.3242 16.541 28.3242 29.5791c0 17.2461 -21.999 28.6934 -39.7842 28.6934c-23.1885 0 -33.8936 -10.9775 -48.9424 -29.9697c-4.05664 -5.11914 -11.46 -6.07031 -16.666 -2.12402l-27.8232 21.0986
+c-5.10742 3.87207 -6.25098 11.0654 -2.64453 16.3633c23.627 34.6934 53.7217 54.1846 100.575 54.1846c49.0713 0 101.45 -38.3037 101.45 -88.7998zM298 80c0 -23.1592 -18.8408 -42 -42 -42s-42 18.8408 -42 42s18.8408 42 42 42s42 -18.8408 42 -42z" />
+ <glyph glyph-name="eye" unicode="&#xf06e;" horiz-adv-x="576"
+d="M288 304c0.0927734 0 0.244141 0.000976562 0.336914 0.000976562c61.6641 0 111.71 -50.0469 111.71 -111.711c0 -61.6631 -50.0459 -111.71 -111.71 -111.71s-111.71 50.0469 -111.71 111.71c0 8.71289 1.95898 22.5781 4.37305 30.9502
+c6.93066 -3.94141 19.0273 -7.18457 27 -7.24023c30.9121 0 56 25.0879 56 56c-0.0556641 7.97266 -3.29883 20.0693 -7.24023 27c8.42383 2.62207 22.4189 4.8623 31.2402 5zM572.52 206.6c1.9209 -3.79883 3.47949 -10.3379 3.47949 -14.5947
+s-1.55859 -10.7959 -3.47949 -14.5947c-54.1992 -105.771 -161.59 -177.41 -284.52 -177.41s-230.29 71.5898 -284.52 177.4c-1.9209 3.79883 -3.47949 10.3379 -3.47949 14.5947s1.55859 10.7959 3.47949 14.5947c54.1992 105.771 161.59 177.41 284.52 177.41
+s230.29 -71.5898 284.52 -177.4zM288 48c98.6602 0 189.1 55 237.93 144c-48.8398 89 -139.27 144 -237.93 144s-189.09 -55 -237.93 -144c48.8398 -89 139.279 -144 237.93 -144z" />
+ <glyph glyph-name="eye-slash" unicode="&#xf070;" horiz-adv-x="640"
+d="M634 -23c3.31738 -2.65137 6.00977 -8.25098 6.00977 -12.498c0 -3.10449 -1.57715 -7.58984 -3.51953 -10.0117l-10 -12.4902c-2.65234 -3.31152 -8.24707 -6 -12.4902 -6c-3.09961 0 -7.58008 1.57227 -10 3.50977l-598 467.49
+c-3.31738 2.65137 -6.00977 8.25098 -6.00977 12.498c0 3.10449 1.57715 7.58984 3.51953 10.0117l10 12.4902c2.65234 3.31152 8.24707 6 12.4902 6c3.09961 0 7.58008 -1.57227 10 -3.50977zM296.79 301.53c6.33496 1.35059 16.7324 2.45801 23.21 2.46973
+c60.4805 0 109.36 -47.9102 111.58 -107.85zM343.21 82.46c-6.33496 -1.34375 -16.7334 -2.44629 -23.21 -2.45996c-60.4697 0 -109.35 47.9102 -111.58 107.84zM320 336c-19.8799 0 -39.2803 -2.7998 -58.2197 -7.09961l-46.4102 36.29
+c32.9199 11.8096 67.9297 18.8096 104.63 18.8096c122.93 0 230.29 -71.5898 284.57 -177.4c1.91992 -3.79883 3.47949 -10.3379 3.47949 -14.5947s-1.55957 -10.7959 -3.47949 -14.5947c-11.7197 -22.7598 -35.4189 -56.4092 -52.9004 -75.1104l-37.7402 29.5
+c14.333 15.0156 34.0449 41.9854 44 60.2002c-48.8398 89 -139.279 144 -237.93 144zM320 48c19.8896 0 39.2803 2.7998 58.2197 7.08984l46.4102 -36.2803c-32.9199 -11.7598 -67.9297 -18.8096 -104.63 -18.8096c-122.92 0 -230.28 71.5898 -284.51 177.4
+c-1.9209 3.79883 -3.47949 10.3379 -3.47949 14.5947s1.55859 10.7959 3.47949 14.5947c11.7168 22.7568 35.4111 56.4014 52.8896 75.1006l37.7402 -29.5c-14.3467 -15.0107 -34.0811 -41.9756 -44.0498 -60.1904c48.8496 -89 139.279 -144 237.93 -144z" />
+ <glyph glyph-name="calendar-alt" unicode="&#xf073;" horiz-adv-x="448"
+d="M148 160h-40c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12zM256 172c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h40
+c6.59961 0 12 -5.40039 12 -12v-40zM352 172c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-40zM256 76c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v40
+c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-40zM160 76c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-40zM352 76c0 -6.59961 -5.40039 -12 -12 -12h-40
+c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-40zM448 336v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h48v52c0 6.59961 5.40039 12 12 12h40
+c6.59961 0 12 -5.40039 12 -12v-52h128v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h48c26.5 0 48 -21.5 48 -48zM400 -10v298h-352v-298c0 -3.2998 2.7002 -6 6 -6h340c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="comment" unicode="&#xf075;"
+d="M256 416c141.4 0 256 -93.0996 256 -208s-114.6 -208 -256 -208c-32.7998 0 -64 5.2002 -92.9004 14.2998c-29.0996 -20.5996 -77.5996 -46.2998 -139.1 -46.2998c-9.59961 0 -18.2998 5.7002 -22.0996 14.5c-3.80078 8.7998 -2 19 4.59961 26
+c0.5 0.400391 31.5 33.7998 46.4004 73.2002c-33 35.0996 -52.9004 78.7002 -52.9004 126.3c0 114.9 114.6 208 256 208zM256 48c114.7 0 208 71.7998 208 160s-93.2998 160 -208 160s-208 -71.7998 -208 -160c0 -42.2002 21.7002 -74.0996 39.7998 -93.4004
+l20.6006 -21.7998l-10.6006 -28.0996c-5.5 -14.5 -12.5996 -28.1006 -19.8994 -40.2002c23.5996 7.59961 43.1992 18.9004 57.5 29l19.5 13.7998l22.6992 -7.2002c25.3008 -8 51.7002 -12.0996 78.4004 -12.0996z" />
+ <glyph glyph-name="folder" unicode="&#xf07b;"
+d="M464 320c26.5098 0 48 -21.4902 48 -48v-224c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v288c0 26.5098 21.4902 48 48 48h146.74c8.49023 0 16.6299 -3.37012 22.6299 -9.37012l54.6299 -54.6299h192zM464 48v224h-198.62
+c-8.49023 0 -16.6299 3.37012 -22.6299 9.37012l-54.6299 54.6299h-140.12v-288h416z" />
+ <glyph glyph-name="folder-open" unicode="&#xf07c;" horiz-adv-x="575"
+d="M527.9 224c37.6992 0 60.6992 -41.5 40.6992 -73.4004l-79.8994 -128c-8.7998 -14.0996 -24.2002 -22.5996 -40.7002 -22.5996h-400c-26.5 0 -48 21.5 -48 48v288c0 26.5 21.5 48 48 48h160l64 -64h160c26.5 0 48 -21.5 48 -48v-48h47.9004zM48 330v-233.4l62.9004 104.2
+c8.69922 14.4004 24.2998 23.2002 41.0996 23.2002h280v42c0 3.2998 -2.7002 6 -6 6h-173.9l-64 64h-134.1c-3.2998 0 -6 -2.7002 -6 -6zM448 48l80 128h-378.8l-77.2002 -128h376z" />
+ <glyph glyph-name="chart-bar" unicode="&#xf080;"
+d="M396.8 96c-6.39941 0 -12.7998 6.40039 -12.7998 12.7998v230.4c0 6.39941 6.40039 12.7998 12.7998 12.7998h22.4004c6.39941 0 12.7998 -6.40039 12.7998 -12.7998v-230.4c0 -6.39941 -6.40039 -12.7998 -12.7998 -12.7998h-22.4004zM204.8 96
+c-6.39941 0 -12.7998 6.40039 -12.7998 12.7998v198.4c0 6.39941 6.40039 12.7998 12.7998 12.7998h22.4004c6.39941 0 12.7998 -6.40039 12.7998 -12.7998v-198.4c0 -6.39941 -6.40039 -12.7998 -12.7998 -12.7998h-22.4004zM300.8 96
+c-6.39941 0 -12.7998 6.40039 -12.7998 12.7998v134.4c0 6.39941 6.40039 12.7998 12.7998 12.7998h22.4004c6.39941 0 12.7998 -6.40039 12.7998 -12.7998v-134.4c0 -6.39941 -6.40039 -12.7998 -12.7998 -12.7998h-22.4004zM496 48c8.83984 0 16 -7.16016 16 -16v-16
+c0 -8.83984 -7.16016 -16 -16 -16h-464c-17.6699 0 -32 14.3301 -32 32v336c0 8.83984 7.16016 16 16 16h16c8.83984 0 16 -7.16016 16 -16v-320h448zM108.8 96c-6.39941 0 -12.7998 6.40039 -12.7998 12.7998v70.4004c0 6.39941 6.40039 12.7998 12.7998 12.7998h22.4004
+c6.39941 0 12.7998 -6.40039 12.7998 -12.7998v-70.4004c0 -6.39941 -6.40039 -12.7998 -12.7998 -12.7998h-22.4004z" />
+ <glyph glyph-name="comments" unicode="&#xf086;" horiz-adv-x="576"
+d="M532 61.7998c15.2998 -30.7002 37.4004 -54.5 37.7998 -54.7998c6.2998 -6.7002 8 -16.5 4.40039 -25c-3.7002 -8.5 -12 -14 -21.2002 -14c-53.5996 0 -96.7002 20.2998 -125.2 38.7998c-19 -4.39941 -39 -6.7998 -59.7998 -6.7998
+c-86.2002 0 -159.9 40.4004 -191.3 97.7998c-9.7002 1.2002 -19.2002 2.7998 -28.4004 4.90039c-28.5 -18.6006 -71.7002 -38.7998 -125.2 -38.7998c-9.19922 0 -17.5996 5.5 -21.1992 14c-3.7002 8.5 -1.90039 18.2998 4.39941 25
+c0.400391 0.399414 22.4004 24.1992 37.7002 54.8994c-27.5 27.2002 -44 61.2002 -44 98.2002c0 88.4004 93.0996 160 208 160c86.2998 0 160.3 -40.5 191.8 -98.0996c99.7002 -11.8008 176.2 -77.9004 176.2 -157.9c0 -37.0996 -16.5 -71.0996 -44 -98.2002zM139.2 154.1
+l19.7998 -4.5c16 -3.69922 32.5 -5.59961 49 -5.59961c86.7002 0 160 51.2998 160 112s-73.2998 112 -160 112s-160 -51.2998 -160 -112c0 -28.7002 16.2002 -50.5996 29.7002 -64l24.7998 -24.5l-15.5 -31.0996c-2.59961 -5.10059 -5.2998 -10.1006 -8 -14.8008
+c14.5996 5.10059 29 12.3008 43.0996 21.4004zM498.3 96c13.5 13.4004 29.7002 35.2998 29.7002 64c0 49.2002 -48.2998 91.5 -112.7 106c0.299805 -3.2998 0.700195 -6.59961 0.700195 -10c0 -80.9004 -78 -147.5 -179.3 -158.3
+c29.0996 -29.6006 77.2998 -49.7002 131.3 -49.7002c16.5 0 33 1.90039 49 5.59961l19.9004 4.60059l17.0996 -11.1006c14.0996 -9.09961 28.5 -16.2998 43.0996 -21.3994c-2.69922 4.7002 -5.39941 9.7002 -8 14.7998l-15.5 31.0996z" />
+ <glyph glyph-name="star-half" unicode="&#xf089;" horiz-adv-x="308"
+d="M288 62.7002v-54.2998l-130.7 -68.6006c-23.3994 -12.2998 -50.8994 7.60059 -46.3994 33.7002l25 145.5l-105.7 103c-19 18.5 -8.5 50.7998 17.7002 54.5996l146.1 21.2002l65.2998 132.4c5.90039 11.8994 17.2998 17.7998 28.7002 17.7998v-68.0996l-62.2002 -126
+l-139 -20.2002l100.601 -98l-23.7002 -138.4z" />
+ <glyph glyph-name="lemon" unicode="&#xf094;"
+d="M484.112 420.111c28.1221 -28.123 35.9434 -68.0039 19.0215 -97.0547c-23.0576 -39.584 50.1436 -163.384 -82.3311 -295.86c-132.301 -132.298 -256.435 -59.3594 -295.857 -82.3291c-29.0459 -16.917 -68.9219 -9.11426 -97.0576 19.0205
+c-28.1221 28.1221 -35.9434 68.0029 -19.0215 97.0547c23.0566 39.5859 -50.1436 163.386 82.3301 295.86c132.308 132.309 256.407 59.3496 295.862 82.332c29.0498 16.9219 68.9307 9.09863 97.0537 -19.0234zM461.707 347.217
+c13.5166 23.2031 -27.7578 63.7314 -50.4883 50.4912c-66.6025 -38.7939 -165.646 45.5898 -286.081 -74.8457c-120.444 -120.445 -36.0449 -219.472 -74.8447 -286.08c-13.542 -23.2471 27.8145 -63.6953 50.4932 -50.4883
+c66.6006 38.7949 165.636 -45.5996 286.076 74.8428c120.444 120.445 36.0449 219.472 74.8447 286.08zM291.846 338.481c1.37012 -10.96 -6.40332 -20.957 -17.3643 -22.3271c-54.8467 -6.85547 -135.779 -87.7871 -142.636 -142.636
+c-1.37305 -10.9883 -11.3984 -18.7334 -22.3262 -17.3643c-10.9609 1.37012 -18.7344 11.3652 -17.3643 22.3262c9.16211 73.2852 104.167 168.215 177.364 177.364c10.9531 1.36816 20.9561 -6.40234 22.3262 -17.3633z" />
+ <glyph glyph-name="credit-card" unicode="&#xf09d;" horiz-adv-x="576"
+d="M527.9 416c26.5996 0 48.0996 -21.5 48.0996 -48v-352c0 -26.5 -21.5 -48 -48.0996 -48h-479.801c-26.5996 0 -48.0996 21.5 -48.0996 48v352c0 26.5 21.5 48 48.0996 48h479.801zM54.0996 368c-3.2998 0 -6 -2.7002 -6 -6v-42h479.801v42c0 3.2998 -2.7002 6 -6 6
+h-467.801zM521.9 16c3.2998 0 6 2.7002 6 6v170h-479.801v-170c0 -3.2998 2.7002 -6 6 -6h467.801zM192 116v-40c0 -6.59961 -5.40039 -12 -12 -12h-72c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h72c6.59961 0 12 -5.40039 12 -12zM384 116v-40
+c0 -6.59961 -5.40039 -12 -12 -12h-136c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h136c6.59961 0 12 -5.40039 12 -12z" />
+ <glyph glyph-name="hdd" unicode="&#xf0a0;" horiz-adv-x="576"
+d="M567.403 212.358c5.59668 -8.04688 8.59668 -17.6113 8.59668 -27.4121v-136.946c0 -26.5098 -21.4902 -48 -48 -48h-480c-26.5098 0 -48 21.4902 -48 48v136.946c0 8.30957 3.85156 20.5898 8.59668 27.4121l105.08 151.053
+c7.90625 11.3652 25.5596 20.5889 39.4033 20.5889h0.000976562h269.838h0.000976562c13.8438 0 31.4971 -9.22363 39.4033 -20.5889zM153.081 336l-77.9131 -112h425.664l-77.9131 112h-269.838zM528 48v128h-480v-128h480zM496 112c0 -17.6729 -14.3271 -32 -32 -32
+s-32 14.3271 -32 32s14.3271 32 32 32s32 -14.3271 32 -32zM400 112c0 -17.6729 -14.3271 -32 -32 -32s-32 14.3271 -32 32s14.3271 32 32 32s32 -14.3271 32 -32z" />
+ <glyph glyph-name="hand-point-right" unicode="&#xf0a4;"
+d="M428.8 310.4c45.0996 0 83.2002 -38.1016 83.2002 -83.2002c0 -45.6162 -37.7646 -83.2002 -83.2002 -83.2002h-35.6475c-1.41602 -6.36719 -4.96875 -16.252 -7.92969 -22.0645c2.50586 -22.0059 -3.50293 -44.9775 -15.9844 -62.791
+c-1.14062 -52.4863 -37.3984 -91.1445 -99.9404 -91.1445h-21.2988c-60.0635 0 -98.5117 40 -127.2 40h-2.67871c-5.74707 -4.95215 -13.5361 -8 -22.1201 -8h-64c-17.6729 0 -32 12.8936 -32 28.7998v230.4c0 15.9062 14.3271 28.7998 32 28.7998h64.001
+c8.58398 0 16.373 -3.04785 22.1201 -8h2.67871c6.96387 0 14.8623 6.19336 30.1816 23.6689l0.128906 0.148438l0.130859 0.145508c8.85645 9.93652 18.1162 20.8398 25.8506 33.2529c18.7051 30.2471 30.3936 78.7842 75.707 78.7842c56.9277 0 92 -35.2861 92 -83.2002
+v-0.0839844c0 -6.21777 -0.974609 -16.2148 -2.17578 -22.3154h86.1768zM428.8 192c18.9756 0 35.2002 16.2246 35.2002 35.2002c0 18.7002 -16.7754 35.2002 -35.2002 35.2002h-158.399c0 17.3242 26.3994 35.1992 26.3994 70.3994c0 26.4004 -20.625 35.2002 -44 35.2002
+c-8.79395 0 -20.4443 -32.7119 -34.9258 -56.0996c-9.07422 -14.5752 -19.5244 -27.2256 -30.7988 -39.875c-16.1094 -18.374 -33.8359 -36.6328 -59.0752 -39.5967v-176.753c42.79 -3.7627 74.5088 -39.6758 120 -39.6758h21.2988
+c40.5244 0 57.124 22.1973 50.6006 61.3252c14.6113 8.00098 24.1514 33.9785 12.9248 53.625c19.3652 18.2246 17.7871 46.3809 4.9502 61.0498h91.0254zM88 64c0 13.2549 -10.7451 24 -24 24s-24 -10.7451 -24 -24s10.7451 -24 24 -24s24 10.7451 24 24z" />
+ <glyph glyph-name="hand-point-left" unicode="&#xf0a5;" horiz-adv-x="511"
+d="M0 227.2c0 45.0986 38.1006 83.2002 83.2002 83.2002h86.1758c-1.3623 6.91016 -2.17578 14.374 -2.17578 22.3994c0 47.9141 35.0723 83.2002 92 83.2002c45.3135 0 57.002 -48.5371 75.7061 -78.7852c7.73438 -12.4121 16.9951 -23.3154 25.8506 -33.2529
+l0.130859 -0.145508l0.128906 -0.148438c15.3213 -17.4746 23.2197 -23.668 30.1836 -23.668h2.67871c5.74707 4.95215 13.5361 8 22.1201 8h64c17.6729 0 32 -12.8936 32 -28.7998v-230.4c0 -15.9062 -14.3271 -28.7998 -32 -28.7998h-64
+c-8.58398 0 -16.373 3.04785 -22.1201 8h-2.67871c-28.6885 0 -67.1367 -40 -127.2 -40h-21.2988c-62.542 0 -98.8008 38.6582 -99.9404 91.1445c-12.4814 17.8135 -18.4922 40.7852 -15.9844 62.791c-2.96094 5.8125 -6.51367 15.6973 -7.92969 22.0645h-35.6465
+c-45.4355 0 -83.2002 37.584 -83.2002 83.2002zM48 227.2c0 -18.9756 16.2246 -35.2002 35.2002 -35.2002h91.0244c-12.8369 -14.6689 -14.415 -42.8252 4.9502 -61.0498c-11.2256 -19.6465 -1.68652 -45.624 12.9248 -53.625
+c-6.52246 -39.1279 10.0771 -61.3252 50.6016 -61.3252h21.2988c45.4912 0 77.21 35.9131 120 39.6768v176.752c-25.2393 2.96289 -42.9658 21.2227 -59.0752 39.5967c-11.2744 12.6494 -21.7246 25.2998 -30.7988 39.875
+c-14.4814 23.3877 -26.1318 56.0996 -34.9258 56.0996c-23.375 0 -44 -8.7998 -44 -35.2002c0 -35.2002 26.3994 -53.0752 26.3994 -70.3994h-158.399c-18.4248 0 -35.2002 -16.5 -35.2002 -35.2002zM448 88c-13.2549 0 -24 -10.7451 -24 -24s10.7451 -24 24 -24
+s24 10.7451 24 24s-10.7451 24 -24 24z" />
+ <glyph glyph-name="hand-point-up" unicode="&#xf0a6;" horiz-adv-x="448"
+d="M105.6 364.8c0 45.0996 38.1016 83.2002 83.2002 83.2002c45.6162 0 83.2002 -37.7646 83.2002 -83.2002v-35.6465c6.36719 -1.41602 16.252 -4.96875 22.0645 -7.92969c22.0059 2.50684 44.9775 -3.50293 62.791 -15.9844
+c52.4863 -1.14062 91.1445 -37.3984 91.1445 -99.9404v-21.2988c0 -60.0635 -40 -98.5117 -40 -127.2v-2.67871c4.95215 -5.74707 8 -13.5361 8 -22.1201v-64c0 -17.6729 -12.8936 -32 -28.7998 -32h-230.4c-15.9062 0 -28.7998 14.3271 -28.7998 32v64
+c0 8.58398 3.04785 16.373 8 22.1201v2.67871c0 6.96387 -6.19336 14.8623 -23.6689 30.1816l-0.148438 0.128906l-0.145508 0.130859c-9.93652 8.85645 -20.8398 18.1162 -33.2529 25.8506c-30.2471 18.7051 -78.7842 30.3936 -78.7842 75.707
+c0 56.9277 35.2861 92 83.2002 92h0.0839844c6.21777 0 16.2148 -0.974609 22.3154 -2.17578v86.1768zM224 364.8c0 18.9756 -16.2246 35.2002 -35.2002 35.2002c-18.7002 0 -35.2002 -16.7754 -35.2002 -35.2002v-158.399c-17.3242 0 -35.1992 26.3994 -70.3994 26.3994
+c-26.4004 0 -35.2002 -20.625 -35.2002 -44c0 -8.79395 32.7119 -20.4443 56.0996 -34.9258c14.5752 -9.07422 27.2256 -19.5244 39.875 -30.7988c18.374 -16.1094 36.6328 -33.8359 39.5967 -59.0752h176.753c3.7627 42.79 39.6758 74.5088 39.6758 120v21.2988
+c0 40.5244 -22.1973 57.124 -61.3252 50.6006c-8.00098 14.6113 -33.9785 24.1514 -53.625 12.9248c-18.2246 19.3652 -46.3809 17.7871 -61.0498 4.9502v91.0254zM352 24c-13.2549 0 -24 -10.7451 -24 -24s10.7451 -24 24 -24s24 10.7451 24 24s-10.7451 24 -24 24z" />
+ <glyph glyph-name="hand-point-down" unicode="&#xf0a7;" horiz-adv-x="448"
+d="M188.8 -64c-45.0986 0 -83.2002 38.1006 -83.2002 83.2002v86.1758c-6.91016 -1.3623 -14.374 -2.17578 -22.3994 -2.17578c-47.9141 0 -83.2002 35.0723 -83.2002 92c0 45.3135 48.5371 57.002 78.7852 75.707c12.4121 7.73438 23.3154 16.9951 33.2529 25.8506
+l0.145508 0.130859l0.148438 0.128906c17.4746 15.3213 23.668 23.2197 23.668 30.1836v2.67871c-4.95215 5.74707 -8 13.5361 -8 22.1201v64c0 17.6729 12.8936 32 28.7998 32h230.4c15.9062 0 28.7998 -14.3271 28.7998 -32v-64.001
+c0 -8.58398 -3.04785 -16.373 -8 -22.1201v-2.67871c0 -28.6885 40 -67.1367 40 -127.2v-21.2988c0 -62.542 -38.6582 -98.8008 -91.1445 -99.9404c-17.8135 -12.4814 -40.7852 -18.4922 -62.791 -15.9844c-5.8125 -2.96094 -15.6973 -6.51367 -22.0645 -7.92969v-35.6465
+c0 -45.4355 -37.584 -83.2002 -83.2002 -83.2002zM188.8 -16c18.9756 0 35.2002 16.2246 35.2002 35.2002v91.0244c14.6689 -12.8369 42.8252 -14.415 61.0498 4.9502c19.6465 -11.2256 45.624 -1.68652 53.625 12.9248c39.1279 -6.52246 61.3252 10.0771 61.3252 50.6016
+v21.2988c0 45.4912 -35.9131 77.21 -39.6768 120h-176.752c-2.96289 -25.2393 -21.2227 -42.9658 -39.5967 -59.0752c-12.6494 -11.2744 -25.2998 -21.7246 -39.875 -30.7988c-23.3877 -14.4814 -56.0996 -26.1318 -56.0996 -34.9258c0 -23.375 8.7998 -44 35.2002 -44
+c35.2002 0 53.0752 26.3994 70.3994 26.3994v-158.399c0 -18.4248 16.5 -35.2002 35.2002 -35.2002zM328 384c0 -13.2549 10.7451 -24 24 -24s24 10.7451 24 24s-10.7451 24 -24 24s-24 -10.7451 -24 -24z" />
+ <glyph glyph-name="copy" unicode="&#xf0c5;" horiz-adv-x="448"
+d="M433.941 382.059c7.75977 -7.75977 14.0586 -22.9658 14.0586 -33.9404v-268.118c0 -26.5098 -21.4902 -48 -48 -48h-80v-48c0 -26.5098 -21.4902 -48 -48 -48h-224c-26.5098 0 -48 21.4902 -48 48v320c0 26.5098 21.4902 48 48 48h80v48c0 26.5098 21.4902 48 48 48
+h172.118c10.9746 0 26.1807 -6.29883 33.9404 -14.0586zM266 -16c3.31152 0 6 2.68848 6 6v42h-96c-26.5098 0 -48 21.4902 -48 48v224h-74c-3.31152 0 -6 -2.68848 -6 -6v-308c0 -3.31152 2.68848 -6 6 -6h212zM394 80c3.31152 0 6 2.68848 6 6v202h-88
+c-13.2549 0 -24 10.7451 -24 24v88h-106c-3.31152 0 -6 -2.68848 -6 -6v-308c0 -3.31152 2.68848 -6 6 -6h212zM400 336v9.63184v0.000976562c0 1.37207 -0.787109 3.27246 -1.75684 4.24219l-48.3682 48.3682c-1.12598 1.125 -2.65234 1.75684 -4.24316 1.75684h-9.63184
+v-64h64z" />
+ <glyph glyph-name="save" unicode="&#xf0c7;" horiz-adv-x="448"
+d="M433.941 318.059c7.75977 -7.75977 14.0586 -22.9658 14.0586 -33.9404v-268.118c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h268.118c10.9746 0 26.1807 -6.29883 33.9404 -14.0586zM272 368h-128v-80h128v80
+zM394 16c3.31152 0 6 2.68848 6 6v259.632v0.000976562c0 1.37207 -0.787109 3.27246 -1.75684 4.24219l-78.2432 78.2432v-100.118c0 -13.2549 -10.7451 -24 -24 -24h-176c-13.2549 0 -24 10.7451 -24 24v104h-42c-3.31152 0 -6 -2.68848 -6 -6v-340
+c0 -3.31152 2.68848 -6 6 -6h340zM224 216c48.5234 0 88 -39.4766 88 -88s-39.4766 -88 -88 -88s-88 39.4766 -88 88s39.4766 88 88 88zM224 88c22.0557 0 40 17.9443 40 40s-17.9443 40 -40 40s-40 -17.9443 -40 -40s17.9443 -40 40 -40z" />
+ <glyph glyph-name="square" unicode="&#xf0c8;" horiz-adv-x="448"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM394 16c3.2998 0 6 2.7002 6 6v340c0 3.2998 -2.7002 6 -6 6h-340c-3.2998 0 -6 -2.7002 -6 -6v-340c0 -3.2998 2.7002 -6 6 -6h340z" />
+ <glyph glyph-name="envelope" unicode="&#xf0e0;"
+d="M464 384c26.5098 0 48 -21.4902 48 -48v-288c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v288c0 26.5098 21.4902 48 48 48h416zM464 336h-416v-40.8047c22.4248 -18.2627 58.1797 -46.6602 134.587 -106.49
+c16.834 -13.2422 50.2051 -45.0762 73.4131 -44.7012c23.2119 -0.371094 56.5723 31.4541 73.4131 44.7012c76.4189 59.8389 112.165 88.2305 134.587 106.49v40.8047zM48 48h416v185.601c-22.915 -18.252 -55.4189 -43.8691 -104.947 -82.6523
+c-22.5439 -17.748 -60.3359 -55.1787 -103.053 -54.9473c-42.9277 -0.231445 -81.2051 37.75 -103.062 54.9551c-49.5293 38.7842 -82.0244 64.3945 -104.938 82.6455v-185.602z" />
+ <glyph glyph-name="lightbulb" unicode="&#xf0eb;" horiz-adv-x="352"
+d="M176 368c8.83984 0 16 -7.16016 16 -16s-7.16016 -16 -16 -16c-35.2803 0 -64 -28.7002 -64 -64c0 -8.83984 -7.16016 -16 -16 -16s-16 7.16016 -16 16c0 52.9404 43.0596 96 96 96zM96.0596 -11.1699l-0.0400391 43.1797h159.961l-0.0507812 -43.1797
+c-0.00976562 -3.13965 -0.939453 -6.21973 -2.67969 -8.83984l-24.5098 -36.8398c-2.95996 -4.45996 -7.95996 -7.14062 -13.3203 -7.14062h-78.8496c-5.35059 0 -10.3506 2.68066 -13.3203 7.14062l-24.5098 36.8398c-1.75 2.62012 -2.68066 5.68945 -2.68066 8.83984z
+M176 448c97.2002 0 176 -78.7998 176 -176c0 -44.3701 -16.4502 -84.8496 -43.5498 -115.79c-16.6406 -18.9795 -42.7402 -58.79 -52.4199 -92.1602v-0.0498047h-48v0.0996094c0.00390625 4.04199 0.999023 10.4482 2.21973 14.3008
+c5.67969 17.9893 22.9902 64.8496 62.0996 109.46c20.4102 23.29 31.6504 53.1699 31.6504 84.1396c0 70.5801 -57.4199 128 -128 128c-68.2803 0 -128.15 -54.3604 -127.95 -128c0.0898438 -30.9902 11.0703 -60.71 31.6104 -84.1396
+c39.3496 -44.9004 56.5801 -91.8604 62.1699 -109.67c1.42969 -4.56055 2.13965 -9.30078 2.15039 -14.0703v-0.120117h-48v0.0595703c-9.68066 33.3604 -35.7803 73.1709 -52.4209 92.1602c-27.1094 30.9307 -43.5596 71.4102 -43.5596 115.78
+c0 93.0303 73.7197 176 176 176z" />
+ <glyph glyph-name="bell" unicode="&#xf0f3;" horiz-adv-x="448"
+d="M439.39 85.71c6 -6.44043 8.66016 -14.1602 8.61035 -21.71c-0.0996094 -16.4004 -12.9805 -32 -32.0996 -32h-383.801c-19.1191 0 -31.9893 15.5996 -32.0996 32c-0.0498047 7.5498 2.61035 15.2598 8.61035 21.71c19.3193 20.7598 55.4697 51.9902 55.4697 154.29
+c0 77.7002 54.4795 139.9 127.939 155.16v20.8398c0 17.6699 14.3203 32 31.9805 32s31.9805 -14.3301 31.9805 -32v-20.8398c73.46 -15.2598 127.939 -77.46 127.939 -155.16c0 -102.3 36.1504 -133.53 55.4697 -154.29zM67.5303 80h312.939
+c-21.2197 27.96 -44.4199 74.3203 -44.5293 159.42c0 0.200195 0.0595703 0.379883 0.0595703 0.580078c0 61.8604 -50.1396 112 -112 112s-112 -50.1396 -112 -112c0 -0.200195 0.0595703 -0.379883 0.0595703 -0.580078
+c-0.109375 -85.0898 -23.3096 -131.45 -44.5293 -159.42zM224 -64c-35.3203 0 -63.9697 28.6504 -63.9697 64h127.939c0 -35.3496 -28.6494 -64 -63.9697 -64z" />
+ <glyph glyph-name="hospital" unicode="&#xf0f8;" horiz-adv-x="448"
+d="M128 204v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12zM268 192c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40
+c0 -6.62695 -5.37305 -12 -12 -12h-40zM192 108c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40zM268 96c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40
+c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-40zM448 -28v-36h-448v36c0 6.62695 5.37305 12 12 12h19.5v378.965c0 11.6172 10.7451 21.0352 24 21.0352h88.5v40c0 13.2549 10.7451 24 24 24h112c13.2549 0 24 -10.7451 24 -24v-40h88.5
+c13.2549 0 24 -9.41797 24 -21.0352v-378.965h19.5c6.62695 0 12 -5.37305 12 -12zM79.5 -15h112.5v67c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-67h112.5v351h-64.5v-24c0 -13.2549 -10.7451 -24 -24 -24h-112c-13.2549 0 -24 10.7451 -24 24v24
+h-64.5v-351zM266 384h-26v26c0 3.31152 -2.68848 6 -6 6h-20c-3.31152 0 -6 -2.68848 -6 -6v-26h-26c-3.31152 0 -6 -2.68848 -6 -6v-20c0 -3.31152 2.68848 -6 6 -6h26v-26c0 -3.31152 2.68848 -6 6 -6h20c3.31152 0 6 2.68848 6 6v26h26c3.31152 0 6 2.68848 6 6v20
+c0 3.31152 -2.68848 6 -6 6z" />
+ <glyph glyph-name="plus-square" unicode="&#xf0fe;" horiz-adv-x="448"
+d="M352 208v-32c0 -6.59961 -5.40039 -12 -12 -12h-88v-88c0 -6.59961 -5.40039 -12 -12 -12h-32c-6.59961 0 -12 5.40039 -12 12v88h-88c-6.59961 0 -12 5.40039 -12 12v32c0 6.59961 5.40039 12 12 12h88v88c0 6.59961 5.40039 12 12 12h32c6.59961 0 12 -5.40039 12 -12
+v-88h88c6.59961 0 12 -5.40039 12 -12zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM400 22v340c0 3.2998 -2.7002 6 -6 6h-340c-3.2998 0 -6 -2.7002 -6 -6v-340
+c0 -3.2998 2.7002 -6 6 -6h340c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="circle" unicode="&#xf111;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM256 -8c110.5 0 200 89.5 200 200s-89.5 200 -200 200s-200 -89.5 -200 -200s89.5 -200 200 -200z" />
+ <glyph glyph-name="smile" unicode="&#xf118;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM168 208c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32
+s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM328 208c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM332 135.4c8.5 10.1992 23.7002 11.5 33.7998 3.09961c10.2002 -8.5 11.6006 -23.5996 3.10059 -33.7998
+c-30 -36 -74.1006 -56.6006 -120.9 -56.6006s-90.9004 20.6006 -120.9 56.6006c-8.39941 10.2002 -7.09961 25.2998 3.10059 33.7998c10.0996 8.40039 25.2998 7.09961 33.7998 -3.09961c20.7998 -25.1006 51.5 -39.4004 84 -39.4004s63.2002 14.4004 84 39.4004z" />
+ <glyph glyph-name="frown" unicode="&#xf119;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM168 208c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32
+s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM328 272c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32zM248 144c40.2002 0 78 -17.7002 103.8 -48.5996c8.40039 -10.2002 7.10059 -25.3008 -3.09961 -33.8008
+c-10.7002 -8.7998 -25.7002 -6.59961 -33.7998 3.10059c-16.6006 20 -41 31.3994 -66.9004 31.3994s-50.2998 -11.5 -66.9004 -31.3994c-8.5 -10.2002 -23.5996 -11.5 -33.7998 -3.10059c-10.2002 8.5 -11.5996 23.6006 -3.09961 33.8008
+c25.7998 30.8994 63.5996 48.5996 103.8 48.5996z" />
+ <glyph glyph-name="meh" unicode="&#xf11a;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM168 208c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32
+s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM328 272c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32zM336 128c13.2002 0 24 -10.7998 24 -24s-10.7998 -24 -24 -24h-176c-13.2002 0 -24 10.7998 -24 24s10.7998 24 24 24h176z
+" />
+ <glyph glyph-name="keyboard" unicode="&#xf11c;" horiz-adv-x="576"
+d="M528 384c26.5098 0 48 -21.4902 48 -48v-288c0 -26.5098 -21.4902 -48 -48 -48h-480c-26.5098 0 -48 21.4902 -48 48v288c0 26.5098 21.4902 48 48 48h480zM536 48v288c0 4.41113 -3.58887 8 -8 8h-480c-4.41113 0 -8 -3.58887 -8 -8v-288c0 -4.41113 3.58887 -8 8 -8
+h480c4.41113 0 8 3.58887 8 8zM170 178c0 -6.62695 -5.37305 -12 -12 -12h-28c-6.62695 0 -12 5.37305 -12 12v28c0 6.62695 5.37305 12 12 12h28c6.62695 0 12 -5.37305 12 -12v-28zM266 178c0 -6.62695 -5.37305 -12 -12 -12h-28c-6.62695 0 -12 5.37305 -12 12v28
+c0 6.62695 5.37305 12 12 12h28c6.62695 0 12 -5.37305 12 -12v-28zM362 178c0 -6.62695 -5.37305 -12 -12 -12h-28c-6.62695 0 -12 5.37305 -12 12v28c0 6.62695 5.37305 12 12 12h28c6.62695 0 12 -5.37305 12 -12v-28zM458 178c0 -6.62695 -5.37305 -12 -12 -12h-28
+c-6.62695 0 -12 5.37305 -12 12v28c0 6.62695 5.37305 12 12 12h28c6.62695 0 12 -5.37305 12 -12v-28zM122 96c0 -6.62695 -5.37305 -12 -12 -12h-28c-6.62695 0 -12 5.37305 -12 12v28c0 6.62695 5.37305 12 12 12h28c6.62695 0 12 -5.37305 12 -12v-28zM506 96
+c0 -6.62695 -5.37305 -12 -12 -12h-28c-6.62695 0 -12 5.37305 -12 12v28c0 6.62695 5.37305 12 12 12h28c6.62695 0 12 -5.37305 12 -12v-28zM122 260c0 -6.62695 -5.37305 -12 -12 -12h-28c-6.62695 0 -12 5.37305 -12 12v28c0 6.62695 5.37305 12 12 12h28
+c6.62695 0 12 -5.37305 12 -12v-28zM218 260c0 -6.62695 -5.37305 -12 -12 -12h-28c-6.62695 0 -12 5.37305 -12 12v28c0 6.62695 5.37305 12 12 12h28c6.62695 0 12 -5.37305 12 -12v-28zM314 260c0 -6.62695 -5.37305 -12 -12 -12h-28c-6.62695 0 -12 5.37305 -12 12v28
+c0 6.62695 5.37305 12 12 12h28c6.62695 0 12 -5.37305 12 -12v-28zM410 260c0 -6.62695 -5.37305 -12 -12 -12h-28c-6.62695 0 -12 5.37305 -12 12v28c0 6.62695 5.37305 12 12 12h28c6.62695 0 12 -5.37305 12 -12v-28zM506 260c0 -6.62695 -5.37305 -12 -12 -12h-28
+c-6.62695 0 -12 5.37305 -12 12v28c0 6.62695 5.37305 12 12 12h28c6.62695 0 12 -5.37305 12 -12v-28zM408 102c0 -6.62695 -5.37305 -12 -12 -12h-216c-6.62695 0 -12 5.37305 -12 12v16c0 6.62695 5.37305 12 12 12h216c6.62695 0 12 -5.37305 12 -12v-16z" />
+ <glyph glyph-name="calendar" unicode="&#xf133;" horiz-adv-x="448"
+d="M400 384c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h48v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h128v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12
+v-52h48zM394 -16c3.2998 0 6 2.7002 6 6v298h-352v-298c0 -3.2998 2.7002 -6 6 -6h340z" />
+ <glyph glyph-name="play-circle" unicode="&#xf144;"
+d="M371.7 210c16.3994 -9.2002 16.3994 -32.9004 0 -42l-176 -101c-15.9004 -8.7998 -35.7002 2.59961 -35.7002 21v208c0 18.5 19.9004 29.7998 35.7002 21zM504 192c0 -137 -111 -248 -248 -248s-248 111 -248 248s111 248 248 248s248 -111 248 -248zM56 192
+c0 -110.5 89.5 -200 200 -200s200 89.5 200 200s-89.5 200 -200 200s-200 -89.5 -200 -200z" />
+ <glyph glyph-name="minus-square" unicode="&#xf146;" horiz-adv-x="448"
+d="M108 164c-6.59961 0 -12 5.40039 -12 12v32c0 6.59961 5.40039 12 12 12h232c6.59961 0 12 -5.40039 12 -12v-32c0 -6.59961 -5.40039 -12 -12 -12h-232zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352
+c26.5 0 48 -21.5 48 -48zM400 22v340c0 3.2998 -2.7002 6 -6 6h-340c-3.2998 0 -6 -2.7002 -6 -6v-340c0 -3.2998 2.7002 -6 6 -6h340c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="check-square" unicode="&#xf14a;" horiz-adv-x="448"
+d="M400 416c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h352zM400 16v352h-352v-352h352zM364.136 257.724l-172.589 -171.204
+c-4.70508 -4.66699 -12.3027 -4.63672 -16.9697 0.0683594l-90.7812 91.5156c-4.66699 4.70508 -4.63672 12.3037 0.0693359 16.9717l22.7188 22.5361c4.70508 4.66699 12.3027 4.63672 16.9697 -0.0693359l59.792 -60.2773l141.353 140.217
+c4.70508 4.66699 12.3027 4.63672 16.9697 -0.0683594l22.5361 -22.7178c4.66699 -4.70605 4.63672 -12.3047 -0.0683594 -16.9717z" />
+ <glyph glyph-name="share-square" unicode="&#xf14d;" horiz-adv-x="576"
+d="M561.938 289.94c18.75 -18.7402 18.75 -49.1406 0 -67.8809l-143.998 -144c-29.9727 -29.9727 -81.9404 -9.05273 -81.9404 33.9404v53.7998c-101.266 -7.83691 -99.625 -31.6406 -84.1104 -78.7598c14.2285 -43.0889 -33.4736 -79.248 -71.0195 -55.7402
+c-51.6924 32.3057 -84.8701 83.0635 -84.8701 144.76c0 39.3408 12.2197 72.7402 36.3301 99.3008c19.8398 21.8398 47.7402 38.4697 82.9102 49.4199c36.7295 11.4395 78.3096 16.1094 120.76 17.9893v57.1982c0 42.9355 51.9258 63.9541 81.9404 33.9404zM384 112l144 144
+l-144 144v-104.09c-110.86 -0.90332 -240 -10.5166 -240 -119.851c0 -52.1396 32.79 -85.6094 62.3096 -104.06c-39.8174 120.65 48.999 141.918 177.69 143.84v-103.84zM408.74 27.5068c6.14844 1.75684 15.5449 5.92383 20.9736 9.30273
+c7.97656 4.95215 18.2861 -0.825195 18.2861 -10.2139v-42.5957c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h132c6.62695 0 12 -5.37305 12 -12v-4.48633c0 -4.91699 -2.9873 -9.36914 -7.56934 -11.1514
+c-13.7021 -5.33105 -26.3955 -11.5371 -38.0498 -18.585c-1.59668 -0.974609 -4.41016 -1.77051 -6.28027 -1.77734h-86.1006c-3.31152 0 -6 -2.68848 -6 -6v-340c0 -3.31152 2.68848 -6 6 -6h340c3.31152 0 6 2.68848 6 6v25.9658c0 5.37012 3.5791 10.0596 8.74023 11.541
+z" />
+ <glyph glyph-name="compass" unicode="&#xf14e;" horiz-adv-x="496"
+d="M347.94 318.14c16.6592 7.61035 33.8096 -9.54004 26.1992 -26.1992l-65.9697 -144.341c-2.73047 -5.97363 -9.7959 -13.0391 -15.7695 -15.7695l-144.341 -65.9697c-16.6592 -7.61035 -33.8096 9.5498 -26.1992 26.1992l65.9697 144.341
+c2.73047 5.97363 9.7959 13.0391 15.7695 15.7695zM270.58 169.42c12.4697 12.4697 12.4697 32.6904 0 45.1602s-32.6904 12.4697 -45.1602 0s-12.4697 -32.6904 0 -45.1602s32.6904 -12.4697 45.1602 0zM248 440c136.97 0 248 -111.03 248 -248s-111.03 -248 -248 -248
+s-248 111.03 -248 248s111.03 248 248 248zM248 -8c110.28 0 200 89.7197 200 200s-89.7197 200 -200 200s-200 -89.7197 -200 -200s89.7197 -200 200 -200z" />
+ <glyph glyph-name="caret-square-down" unicode="&#xf150;" horiz-adv-x="448"
+d="M125.1 240h197.801c10.6992 0 16.0996 -13 8.5 -20.5l-98.9004 -98.2998c-4.7002 -4.7002 -12.2002 -4.7002 -16.9004 0l-98.8994 98.2998c-7.7002 7.5 -2.2998 20.5 8.39941 20.5zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352
+c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM400 22v340c0 3.2998 -2.7002 6 -6 6h-340c-3.2998 0 -6 -2.7002 -6 -6v-340c0 -3.2998 2.7002 -6 6 -6h340c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="caret-square-up" unicode="&#xf151;" horiz-adv-x="448"
+d="M322.9 144h-197.801c-10.6992 0 -16.0996 13 -8.5 20.5l98.9004 98.2998c4.7002 4.7002 12.2002 4.7002 16.9004 0l98.8994 -98.2998c7.7002 -7.5 2.2998 -20.5 -8.39941 -20.5zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352
+c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM400 22v340c0 3.2998 -2.7002 6 -6 6h-340c-3.2998 0 -6 -2.7002 -6 -6v-340c0 -3.2998 2.7002 -6 6 -6h340c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="caret-square-right" unicode="&#xf152;" horiz-adv-x="448"
+d="M176 93.0996v197.801c0 10.6992 13 16.0996 20.5 8.5l98.2998 -98.9004c4.7002 -4.7002 4.7002 -12.2002 0 -16.9004l-98.2998 -98.8994c-7.5 -7.7002 -20.5 -2.2998 -20.5 8.39941zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352
+c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM400 22v340c0 3.2998 -2.7002 6 -6 6h-340c-3.2998 0 -6 -2.7002 -6 -6v-340c0 -3.2998 2.7002 -6 6 -6h340c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="file" unicode="&#xf15b;" horiz-adv-x="384"
+d="M369.9 350.1c9 -9 14.0996 -21.2998 14.0996 -34v-332.1c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48.0996h204.1c12.7002 0 24.9004 -5.09961 33.9004 -14.0996zM332.1 320l-76.0996 76.0996v-76.0996h76.0996zM48 -16h288v288
+h-104c-13.2998 0 -24 10.7002 -24 24v104h-160v-416z" />
+ <glyph glyph-name="file-alt" unicode="&#xf15c;" horiz-adv-x="384"
+d="M288 200v-28c0 -6.59961 -5.40039 -12 -12 -12h-168c-6.59961 0 -12 5.40039 -12 12v28c0 6.59961 5.40039 12 12 12h168c6.59961 0 12 -5.40039 12 -12zM276 128c6.59961 0 12 -5.40039 12 -12v-28c0 -6.59961 -5.40039 -12 -12 -12h-168c-6.59961 0 -12 5.40039 -12 12
+v28c0 6.59961 5.40039 12 12 12h168zM384 316.1v-332.1c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48h204.1c12.7002 0 24.9004 -5.09961 33.9004 -14.0996l83.9004 -83.9004c9 -8.90039 14.0996 -21.2002 14.0996 -33.9004z
+M256 396.1v-76.0996h76.0996zM336 -16v288h-104c-13.2998 0 -24 10.7002 -24 24v104h-160v-416h288z" />
+ <glyph glyph-name="thumbs-up" unicode="&#xf164;" horiz-adv-x="480"
+d="M466.27 161.31c4.6748 -22.6465 0.864258 -44.5371 -8.98926 -62.9893c2.95898 -23.8682 -4.02148 -48.5654 -17.3398 -66.9902c-0.954102 -55.9072 -35.8232 -95.3301 -112.94 -95.3301c-7 0 -15 0.00976562 -22.2197 0.00976562
+c-102.742 0 -133.293 38.9395 -177.803 39.9404c-3.56934 -13.7764 -16.085 -23.9502 -30.9775 -23.9502h-64c-17.6729 0 -32 14.3271 -32 32v240c0 17.6729 14.3271 32 32 32h98.7598c19.1455 16.9531 46.0137 60.6533 68.7598 83.4004
+c13.667 13.667 10.1533 108.6 71.7607 108.6c57.5801 0 95.2695 -31.9355 95.2695 -104.73c0 -18.4092 -3.92969 -33.7295 -8.84961 -46.5391h36.4795c48.6025 0 85.8203 -41.5654 85.8203 -85.5801c0 -19.1504 -4.95996 -34.9902 -13.7305 -49.8408zM404.52 107.48
+c21.5811 20.3838 18.6992 51.0645 5.21094 65.6191c9.44922 0 22.3594 18.9102 22.2695 37.8105c-0.0898438 18.9102 -16.71 37.8203 -37.8203 37.8203h-103.989c0 37.8193 28.3594 55.3691 28.3594 94.5391c0 23.75 0 56.7305 -47.2695 56.7305
+c-18.9102 -18.9102 -9.45996 -66.1797 -37.8203 -94.54c-26.5596 -26.5703 -66.1797 -97.46 -94.54 -97.46h-10.9199v-186.17c53.6113 0 100.001 -37.8203 171.64 -37.8203h37.8203c35.5117 0 60.8203 17.1201 53.1201 65.9004
+c15.2002 8.16016 26.5 36.4395 13.9395 57.5703zM88 16c0 13.2549 -10.7451 24 -24 24s-24 -10.7451 -24 -24s10.7451 -24 24 -24s24 10.7451 24 24z" />
+ <glyph glyph-name="thumbs-down" unicode="&#xf165;" horiz-adv-x="480"
+d="M466.27 222.69c8.77051 -14.8506 13.7305 -30.6904 13.7305 -49.8408c0 -44.0146 -37.2178 -85.5801 -85.8203 -85.5801h-36.4795c4.91992 -12.8096 8.84961 -28.1299 8.84961 -46.5391c0 -72.7949 -37.6895 -104.73 -95.2695 -104.73
+c-61.6074 0 -58.0938 94.9326 -71.7607 108.6c-22.7461 22.7471 -49.6133 66.4473 -68.7598 83.4004h-7.05176c-5.5332 -9.56152 -15.8662 -16 -27.708 -16h-64c-17.6729 0 -32 14.3271 -32 32v240c0 17.6729 14.3271 32 32 32h64c8.11328 0 15.5146 -3.02539 21.1553 -8
+h10.8447c40.9971 0 73.1953 39.9902 176.78 39.9902c7.21973 0 15.2197 0.00976562 22.2197 0.00976562c77.1172 0 111.986 -39.4229 112.94 -95.3301c13.3184 -18.4248 20.2979 -43.1221 17.3398 -66.9902c9.85352 -18.4521 13.6641 -40.3428 8.98926 -62.9893zM64 152
+c13.2549 0 24 10.7451 24 24s-10.7451 24 -24 24s-24 -10.7451 -24 -24s10.7451 -24 24 -24zM394.18 135.27c21.1104 0 37.7305 18.9102 37.8203 37.8203c0.0898438 18.9004 -12.8203 37.8105 -22.2695 37.8105c13.4883 14.5547 16.3701 45.2354 -5.21094 65.6191
+c12.5605 21.1309 1.26074 49.4102 -13.9395 57.5703c7.7002 48.7803 -17.6084 65.9004 -53.1201 65.9004h-37.8203c-71.6387 0 -118.028 -37.8203 -171.64 -37.8203v-186.17h10.9199c28.3604 0 67.9805 -70.8896 94.54 -97.46
+c28.3604 -28.3604 18.9102 -75.6299 37.8203 -94.54c47.2695 0 47.2695 32.9805 47.2695 56.7305c0 39.1699 -28.3594 56.7197 -28.3594 94.5391h103.989z" />
+ <glyph glyph-name="sun" unicode="&#xf185;"
+d="M494.2 226.1c11.2002 -7.59961 17.7998 -20.0996 17.8994 -33.6992c0 -13.4004 -6.69922 -26 -17.7998 -33.5l-59.7998 -40.5l13.7002 -71c2.5 -13.2002 -1.60059 -26.8008 -11.1006 -36.3008s-22.8994 -13.7998 -36.2998 -11.0996l-70.8994 13.7002l-40.4004 -59.9004
+c-7.5 -11.0996 -20.0996 -17.7998 -33.5 -17.7998s-26 6.7002 -33.5 17.9004l-40.4004 59.8994l-70.7998 -13.7002c-13.3994 -2.59961 -26.7998 1.60059 -36.2998 11.1006s-13.7002 23.0996 -11.0996 36.2998l13.6992 71l-59.7998 40.5
+c-11.0996 7.5 -17.7998 20 -17.7998 33.5s6.59961 26 17.7998 33.5996l59.7998 40.5l-13.6992 71c-2.60059 13.2002 1.59961 26.7002 11.0996 36.3008c9.5 9.59961 23 13.6992 36.2998 11.1992l70.7998 -13.6992l40.4004 59.8994c15.0996 22.2998 51.9004 22.2998 67 0
+l40.4004 -59.8994l70.8994 13.6992c13 2.60059 26.6006 -1.59961 36.2002 -11.0996c9.5 -9.59961 13.7002 -23.2002 11.0996 -36.4004l-13.6992 -71zM381.3 140.5l76.7998 52.0996l-76.7998 52l17.6006 91.1006l-91 -17.6006l-51.9004 76.9004l-51.7998 -76.7998
+l-91 17.5996l17.5996 -91.2002l-76.7998 -52l76.7998 -52l-17.5996 -91.1992l90.8994 17.5996l51.9004 -77l51.9004 76.9004l91 -17.6006zM256 296c57.2998 0 104 -46.7002 104 -104s-46.7002 -104 -104 -104s-104 46.7002 -104 104s46.7002 104 104 104zM256 136
+c30.9004 0 56 25.0996 56 56s-25.0996 56 -56 56s-56 -25.0996 -56 -56s25.0996 -56 56 -56z" />
+ <glyph glyph-name="moon" unicode="&#xf186;" horiz-adv-x="511"
+d="M279.135 -64c-141.424 0 -256 114.64 -256 256c0 141.425 114.641 256 256 256c13.0068 -0.00195312 33.9443 -1.91797 46.7354 -4.27734c44.0205 -8.13086 53.7666 -66.8691 15.0215 -88.9189c-41.374 -23.5439 -67.4336 -67.4121 -67.4336 -115.836
+c0 -83.5234 75.9238 -146.475 158.272 -130.792c43.6904 8.32129 74.5186 -42.5693 46.248 -77.4004c-47.8613 -58.9717 -120.088 -94.7754 -198.844 -94.7754zM279.135 400c-114.875 0 -208 -93.125 -208 -208s93.125 -208 208 -208
+c65.2314 0 123.439 30.0361 161.575 77.0244c-111.611 -21.2568 -215.252 64.0957 -215.252 177.943c0 67.5127 36.9326 126.392 91.6934 157.555c-12.3271 2.27637 -25.0312 3.47754 -38.0166 3.47754z" />
+ <glyph glyph-name="caret-square-left" unicode="&#xf191;" horiz-adv-x="448"
+d="M272 290.9v-197.801c0 -10.6992 -13 -16.0996 -20.5 -8.5l-98.2998 98.9004c-4.7002 4.7002 -4.7002 12.2002 0 16.9004l98.2998 98.8994c7.5 7.7002 20.5 2.2998 20.5 -8.39941zM448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352
+c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM400 22v340c0 3.2998 -2.7002 6 -6 6h-340c-3.2998 0 -6 -2.7002 -6 -6v-340c0 -3.2998 2.7002 -6 6 -6h340c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="dot-circle" unicode="&#xf192;"
+d="M256 392c-110.549 0 -200 -89.4678 -200 -200c0 -110.549 89.4678 -200 200 -200c110.549 0 200 89.4678 200 200c0 110.549 -89.4678 200 -200 200zM256 440c136.967 0 248 -111.033 248 -248s-111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248z
+M256 272c44.1826 0 80 -35.8174 80 -80s-35.8174 -80 -80 -80s-80 35.8174 -80 80s35.8174 80 80 80z" />
+ <glyph glyph-name="building" unicode="&#xf1ad;" horiz-adv-x="448"
+d="M128 300v40c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12zM268 288c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-40
+c0 -6.59961 -5.40039 -12 -12 -12h-40zM140 192c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-40zM268 192c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h40
+c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-40zM192 108c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-40zM268 96c-6.59961 0 -12 5.40039 -12 12v40
+c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-40zM448 -28v-36h-448v36c0 6.59961 5.40039 12 12 12h19.5v440c0 13.2998 10.7002 24 24 24h337c13.2998 0 24 -10.7002 24 -24v-440h19.5
+c6.59961 0 12 -5.40039 12 -12zM79.5 -15h112.5v67c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-67h112.5v414l-288.5 1z" />
+ <glyph glyph-name="file-pdf" unicode="&#xf1c1;" horiz-adv-x="384"
+d="M369.9 350.1c9 -9 14.0996 -21.2998 14.0996 -34v-332.1c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48.0996h204.1c12.7002 0 24.9004 -5.09961 33.9004 -14.0996zM332.1 320l-76.0996 76.0996v-76.0996h76.0996zM48 -16h288v288
+h-104c-13.2998 0 -24 10.7002 -24 24v104h-160v-416zM298.2 127.7c10.5 -10.5 8 -38.7002 -17.5 -38.7002c-14.7998 0 -36.9004 6.7998 -55.7998 17c-21.6006 -3.59961 -46 -12.7002 -68.4004 -20.0996c-50.0996 -86.4004 -79.4004 -47 -76.0996 -31.2002
+c4 20 31 35.8994 51 46.2002c10.5 18.3994 25.3994 50.5 35.3994 74.3994c-7.39941 28.6006 -11.3994 51 -7 67.1006c4.7998 17.6992 38.4004 20.2998 42.6006 -5.90039c4.69922 -15.4004 -1.5 -39.9004 -5.40039 -56c8.09961 -21.2998 19.5996 -35.7998 36.7998 -46.2998
+c17.4004 2.2002 52.2002 5.5 64.4004 -6.5zM100.1 49.9004c0 -0.700195 11.4004 4.69922 30.4004 35c-5.90039 -5.5 -25.2998 -21.3008 -30.4004 -35zM181.7 240.5c-2.5 0 -2.60059 -26.9004 1.7998 -40.7998c4.90039 8.7002 5.59961 40.7998 -1.7998 40.7998zM157.3 103.9
+c15.9004 6.09961 34 14.8994 54.7998 19.1992c-11.1992 8.30078 -21.7998 20.4004 -30.0996 35.5c-6.7002 -17.6992 -15 -37.7998 -24.7002 -54.6992zM288.9 108.9c3.59961 2.39941 -2.2002 10.3994 -37.3008 7.7998c32.3008 -13.7998 37.3008 -7.7998 37.3008 -7.7998z" />
+ <glyph glyph-name="file-word" unicode="&#xf1c2;" horiz-adv-x="384"
+d="M369.9 350.1c9 -9 14.0996 -21.2998 14.0996 -34v-332.1c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48.0996h204.1c12.7002 0 24.9004 -5.09961 33.9004 -14.0996zM332.1 320l-76.0996 76.0996v-76.0996h76.0996zM48 -16h288v288
+h-104c-13.2998 0 -24 10.7002 -24 24v104h-160v-416zM268.1 192v0.200195h15.8008c7.7998 0 13.5 -7.2998 11.5996 -14.9004c-4.2998 -17 -13.7002 -54.0996 -34.5 -136c-1.2998 -5.39941 -6.09961 -9.09961 -11.5996 -9.09961h-24.7002
+c-5.5 0 -10.2998 3.7998 -11.6006 9.09961c-5.2998 20.9004 -17.7998 71 -17.8994 71.4004l-2.90039 17.2998c-0.5 -5.2998 -1.5 -11.0996 -3 -17.2998l-17.8994 -71.4004c-1.30078 -5.39941 -6.10059 -9.09961 -11.6006 -9.09961h-25.2002
+c-5.59961 0 -10.3994 3.7002 -11.6992 9.09961c-6.5 26.5 -25.2002 103.4 -33.2002 136c-1.7998 7.5 3.89941 14.7998 11.7002 14.7998h16.7998c5.7998 0 10.7002 -4.09961 11.7998 -9.69922c5 -25.7002 18.4004 -93.8008 19.0996 -99
+c0.300781 -1.7002 0.400391 -3.10059 0.5 -4.2002c0.800781 7.5 0.400391 4.7002 24.8008 103.7c1.39941 5.2998 6.19922 9.09961 11.6992 9.09961h13.3008c5.59961 0 10.3994 -3.7998 11.6992 -9.2002c23.9004 -99.7002 22.8008 -94.3994 23.6006 -99.5
+c0.299805 -1.7002 0.5 -3.09961 0.700195 -4.2998c0.599609 8.09961 0.399414 5.7998 21 103.5c1.09961 5.5 6 9.5 11.6992 9.5z" />
+ <glyph glyph-name="file-excel" unicode="&#xf1c3;" horiz-adv-x="384"
+d="M369.9 350.1c9 -9 14.0996 -21.2998 14.0996 -34v-332.1c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48.0996h204.1c12.7002 0 24.9004 -5.09961 33.9004 -14.0996zM332.1 320l-76.0996 76.0996v-76.0996h76.0996zM48 -16h288v288
+h-104c-13.2998 0 -24 10.7002 -24 24v104h-160v-416zM260 224c9.2002 0 15 -10 10.2998 -18c-16 -27.5 -45.5996 -76.9004 -46.2998 -78l46.4004 -78c4.59961 -8 -1.10059 -18 -10.4004 -18h-28.7998c-4.40039 0 -8.5 2.40039 -10.6006 6.2998
+c-22.6992 41.7998 -13.6992 27.5 -28.5996 57.7002c-5.59961 -12.7002 -6.90039 -17.7002 -28.5996 -57.7002c-2.10059 -3.89941 -6.10059 -6.2998 -10.5 -6.2998h-28.9004c-9.2998 0 -15.0996 10 -10.4004 18l46.3008 78l-46.3008 78c-4.59961 8 1.10059 18 10.4004 18
+h28.9004c4.39941 0 8.5 -2.40039 10.5996 -6.2998c21.7002 -40.4004 14.7002 -28.6006 28.5996 -57.7002c6.40039 15.2998 10.6006 24.5996 28.6006 57.7002c2.09961 3.89941 6.09961 6.2998 10.5 6.2998h28.7998z" />
+ <glyph glyph-name="file-powerpoint" unicode="&#xf1c4;" horiz-adv-x="384"
+d="M369.9 350.1c9 -9 14.0996 -21.2998 14.0996 -34v-332.1c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48.0996h204.1c12.7002 0 24.9004 -5.09961 33.9004 -14.0996zM332.1 320l-76.0996 76.0996v-76.0996h76.0996zM48 -16h288v288
+h-104c-13.2998 0 -24 10.7002 -24 24v104h-160v-416zM120 44v168c0 6.59961 5.40039 12 12 12h69.2002c36.7002 0 62.7998 -27 62.7998 -66.2998c0 -74.2998 -68.7002 -66.5 -95.5 -66.5v-47.2002c0 -6.59961 -5.40039 -12 -12 -12h-24.5c-6.59961 0 -12 5.40039 -12 12z
+M168.5 131.4h23c7.90039 0 13.9004 2.39941 18.0996 7.19922c8.5 9.80078 8.40039 28.5 0.100586 37.8008c-4.10059 4.59961 -9.90039 7 -17.4004 7h-23.8994v-52h0.0996094z" />
+ <glyph glyph-name="file-image" unicode="&#xf1c5;" horiz-adv-x="384"
+d="M369.9 350.1c9 -9 14.0996 -21.2998 14.0996 -34v-332.1c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48.0996h204.1c12.7002 0 24.9004 -5.09961 33.9004 -14.0996zM332.1 320l-76.0996 76.0996v-76.0996h76.0996zM48 -16h288v288
+h-104c-13.2998 0 -24 10.7002 -24 24v104h-160v-416zM80 32v64l39.5 39.5c4.7002 4.7002 12.2998 4.7002 17 0l39.5 -39.5l87.5 87.5c4.7002 4.7002 12.2998 4.7002 17 0l23.5 -23.5v-128h-224zM128 272c26.5 0 48 -21.5 48 -48s-21.5 -48 -48 -48s-48 21.5 -48 48
+s21.5 48 48 48z" />
+ <glyph glyph-name="file-archive" unicode="&#xf1c6;" horiz-adv-x="384"
+d="M128.3 288h32v-32h-32v32zM192.3 384v-32h-32v32h32zM128.3 352h32v-32h-32v32zM192.3 320v-32h-32v32h32zM369.9 350.1c9 -9 14.0996 -21.2998 14.0996 -34v-332.1c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48.0996h204.1
+c12.7002 0 24.9004 -5.09961 33.9004 -14.0996zM256 396.1v-76.0996h76.0996zM336 -16v288h-104c-13.2998 0 -24 10.7002 -24 24v104h-48.2998v-16h-32v16h-79.7002v-416h288zM194.2 182.3l17.2998 -87.7002c6.40039 -32.3994 -18.4004 -62.5996 -51.5 -62.5996
+c-33.2002 0 -58 30.4004 -51.4004 62.9004l19.7002 97.0996v32h32v-32h22.1006c5.7998 0 10.6992 -4.09961 11.7998 -9.7002zM160.3 57.9004c17.9004 0 32.4004 12.0996 32.4004 27c0 14.8994 -14.5 27 -32.4004 27c-17.8994 0 -32.3994 -12.1006 -32.3994 -27
+c0 -14.9004 14.5 -27 32.3994 -27zM192.3 256v-32h-32v32h32z" />
+ <glyph glyph-name="file-audio" unicode="&#xf1c7;" horiz-adv-x="384"
+d="M369.941 350.059c7.75977 -7.75977 14.0586 -22.9658 14.0586 -33.9404v-332.118c0 -26.5098 -21.4902 -48 -48 -48h-288c-26.5098 0 -48 21.4902 -48 48v416c0 26.5098 21.4902 48 48 48h204.118c10.9746 0 26.1807 -6.29883 33.9404 -14.0586zM332.118 320
+l-76.1182 76.1182v-76.1182h76.1182zM48 -16h288v288h-104c-13.2549 0 -24 10.7451 -24 24v104h-160v-416zM192 60.0244c0 -10.6914 -12.9258 -16.0459 -20.4854 -8.48535l-35.5146 35.9746h-28c-6.62695 0 -12 5.37305 -12 12v56c0 6.62695 5.37305 12 12 12h28
+l35.5146 36.9473c7.56055 7.56055 20.4854 2.20605 20.4854 -8.48535v-135.951zM233.201 107.154c9.05078 9.29688 9.05957 24.1328 0.000976562 33.4385c-22.1494 22.752 12.2344 56.2461 34.3945 33.4814c27.1982 -27.9404 27.2119 -72.4443 0.000976562 -100.401
+c-21.793 -22.3857 -56.9463 10.3154 -34.3965 33.4814z" />
+ <glyph glyph-name="file-video" unicode="&#xf1c8;" horiz-adv-x="384"
+d="M369.941 350.059c7.75977 -7.75977 14.0586 -22.9658 14.0586 -33.9404v-332.118c0 -26.5098 -21.4902 -48 -48 -48h-288c-26.5098 0 -48 21.4902 -48 48v416c0 26.5098 21.4902 48 48 48h204.118c10.9746 0 26.1807 -6.29883 33.9404 -14.0586zM332.118 320
+l-76.1182 76.1182v-76.1182h76.1182zM48 -16h288v288h-104c-13.2549 0 -24 10.7451 -24 24v104h-160v-416zM276.687 195.303c10.0049 10.0049 27.3135 2.99707 27.3135 -11.3135v-111.976c0 -14.2939 -17.2959 -21.332 -27.3135 -11.3135l-52.6865 52.6738v-37.374
+c0 -11.0459 -8.9541 -20 -20 -20h-104c-11.0459 0 -20 8.9541 -20 20v104c0 11.0459 8.9541 20 20 20h104c11.0459 0 20 -8.9541 20 -20v-37.374z" />
+ <glyph glyph-name="file-code" unicode="&#xf1c9;" horiz-adv-x="384"
+d="M149.9 98.9004c3.5 -3.30078 3.69922 -8.90039 0.399414 -12.4004l-17.3994 -18.5996c-1.60059 -1.80078 -4 -2.80078 -6.40039 -2.80078c-2.2002 0 -4.40039 0.900391 -6 2.40039l-57.7002 54.0996c-3.7002 3.40039 -3.7002 9.30078 0 12.8008l57.7002 54.0996
+c3.40039 3.2998 9 3.2002 12.4004 -0.400391l17.3994 -18.5996l0.200195 -0.200195c3.2002 -3.59961 2.7998 -9.2002 -0.799805 -12.3994l-32.7998 -28.9004l32.7998 -28.9004zM369.9 350.1c9 -9 14.0996 -21.2998 14.0996 -34v-332.1c0 -26.5 -21.5 -48 -48 -48h-288
+c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48.0996h204.1c12.7002 0 24.9004 -5.09961 33.9004 -14.0996zM256 396.1v-76.0996h76.0996zM336 -16v288h-104c-13.2998 0 -24 10.7002 -24 24v104h-160v-416h288zM209.6 234l24.4004 -7
+c4.7002 -1.2998 7.40039 -6.2002 6 -10.9004l-54.7002 -188.199c-1.2998 -4.60059 -6.2002 -7.40039 -10.8994 -6l-24.4004 7.09961c-4.7002 1.2998 -7.40039 6.2002 -6 10.9004l54.7002 188.1c1.39941 4.7002 6.2002 7.40039 10.8994 6zM234.1 157.1
+c-3.5 3.30078 -3.69922 8.90039 -0.399414 12.4004l17.3994 18.5996c3.30078 3.60059 8.90039 3.7002 12.4004 0.400391l57.7002 -54.0996c3.7002 -3.40039 3.7002 -9.30078 0 -12.8008l-57.7002 -54.0996c-3.5 -3.2998 -9.09961 -3.09961 -12.4004 0.400391
+l-17.3994 18.5996l-0.200195 0.200195c-3.2002 3.59961 -2.7998 9.2002 0.799805 12.3994l32.7998 28.9004l-32.7998 28.9004z" />
+ <glyph glyph-name="life-ring" unicode="&#xf1cd;"
+d="M256 -56c-136.967 0 -248 111.033 -248 248s111.033 248 248 248s248 -111.033 248 -248s-111.033 -248 -248 -248zM152.602 20.7197c63.2178 -38.3184 143.579 -38.3184 206.797 0l-53.4111 53.4111c-31.8467 -13.5215 -68.168 -13.5059 -99.9746 0zM336 192
+c0 44.1123 -35.8877 80 -80 80s-80 -35.8877 -80 -80s35.8877 -80 80 -80s80 35.8877 80 80zM427.28 88.6016c38.3184 63.2178 38.3184 143.579 0 206.797l-53.4111 -53.4111c13.5215 -31.8467 13.5049 -68.168 0 -99.9746zM359.397 363.28
+c-63.2168 38.3184 -143.578 38.3184 -206.796 0l53.4111 -53.4111c31.8457 13.5215 68.167 13.5049 99.9736 0zM84.7197 295.398c-38.3184 -63.2178 -38.3184 -143.579 0 -206.797l53.4111 53.4111c-13.5215 31.8467 -13.5059 68.168 0 99.9746z" />
+ <glyph glyph-name="paper-plane" unicode="&#xf1d8;"
+d="M440 441.5c34.5996 19.9004 77.5996 -8.7998 71.5 -48.9004l-59.4004 -387.199c-2.2998 -14.5 -11.0996 -27.3008 -23.8994 -34.5c-7.2998 -4.10059 -15.4004 -6.2002 -23.6006 -6.2002c-6.19922 0 -12.3994 1.2002 -18.2998 3.59961l-111.899 46.2002l-43.8008 -59.0996
+c-27.3994 -36.9004 -86.5996 -17.8008 -86.5996 28.5996v84.4004l-114.3 47.2998c-36.7998 15.0996 -40.1006 66 -5.7002 85.8994zM192 -16l36.5996 49.5l-36.5996 15.0996v-64.5996zM404.6 12.7002l59.4004 387.3l-416 -240l107.8 -44.5996l211.5 184.3
+c14.2002 12.2998 34.4004 -5.7002 23.7002 -21.2002l-140.2 -202.3z" />
+ <glyph glyph-name="futbol" unicode="&#xf1e3;" horiz-adv-x="496"
+d="M483.8 268.6c42.2998 -130.199 -29 -270.1 -159.2 -312.399c-25.5 -8.2998 -51.2998 -12.2002 -76.6992 -12.2002c-104.5 0 -201.7 66.5996 -235.7 171.4c-42.2998 130.199 29 270.1 159.2 312.399c25.5 8.2998 51.2998 12.2002 76.6992 12.2002
+c104.5 0 201.7 -66.5996 235.7 -171.4zM409.3 74.9004c6.10059 8.39941 12.1006 16.8994 16.7998 26.1992c14.3008 28.1006 21.5 58.5 21.7002 89.2002l-38.8994 36.4004l-71.1006 -22.1006l-24.3994 -75.1992l43.6992 -60.9004zM409.3 310.3
+c-24.5 33.4004 -58.7002 58.4004 -97.8994 71.4004l-47.4004 -26.2002v-73.7998l64.2002 -46.5l70.7002 22zM184.9 381.6c-39.9004 -13.2998 -73.5 -38.5 -97.8008 -71.8994l10.1006 -52.5l70.5996 -22l64.2002 46.5v73.7998zM139 68.5l43.5 61.7002l-24.2998 74.2998
+l-71.1006 22.2002l-39 -36.4004c0.5 -55.7002 23.4004 -95.2002 37.8008 -115.3zM187.2 1.5c64.0996 -20.4004 115.5 -1.7998 121.7 0l22.3994 48.0996l-44.2998 61.7002h-78.5996l-43.6006 -61.7002z" />
+ <glyph glyph-name="newspaper" unicode="&#xf1ea;" horiz-adv-x="576"
+d="M552 384c13.2549 0 24 -10.7451 24 -24v-336c0 -13.2549 -10.7451 -24 -24 -24h-496c-30.9277 0 -56 25.0723 -56 56v272c0 13.2549 10.7451 24 24 24h42.752c6.60547 18.623 24.3896 32 45.248 32h440zM48 56c0 -4.41113 3.58887 -8 8 -8s8 3.58887 8 8v248h-16v-248z
+M528 48v288h-416v-280c0 -2.7168 -0.204102 -5.38574 -0.578125 -8h416.578zM172 168c-6.62695 0 -12 5.37305 -12 12v96c0 6.62695 5.37305 12 12 12h136c6.62695 0 12 -5.37305 12 -12v-96c0 -6.62695 -5.37305 -12 -12 -12h-136zM200 248v-40h80v40h-80zM160 108v24
+c0 6.62695 5.37305 12 12 12h136c6.62695 0 12 -5.37305 12 -12v-24c0 -6.62695 -5.37305 -12 -12 -12h-136c-6.62695 0 -12 5.37305 -12 12zM352 108v24c0 6.62695 5.37305 12 12 12h104c6.62695 0 12 -5.37305 12 -12v-24c0 -6.62695 -5.37305 -12 -12 -12h-104
+c-6.62695 0 -12 5.37305 -12 12zM352 252v24c0 6.62695 5.37305 12 12 12h104c6.62695 0 12 -5.37305 12 -12v-24c0 -6.62695 -5.37305 -12 -12 -12h-104c-6.62695 0 -12 5.37305 -12 12zM352 180v24c0 6.62695 5.37305 12 12 12h104c6.62695 0 12 -5.37305 12 -12v-24
+c0 -6.62695 -5.37305 -12 -12 -12h-104c-6.62695 0 -12 5.37305 -12 12z" />
+ <glyph glyph-name="bell-slash" unicode="&#xf1f6;" horiz-adv-x="640"
+d="M633.99 -23.0195c6.91016 -5.52051 8.01953 -15.5908 2.5 -22.4902l-10 -12.4902c-5.53027 -6.88965 -15.5898 -8.00977 -22.4902 -2.49023l-598 467.51c-6.90039 5.52051 -8.01953 15.5908 -2.49023 22.4902l10 12.4902
+c5.52051 6.90039 15.5898 8.00977 22.4902 2.49023zM163.53 80h182.84l61.3994 -48h-279.659c-19.1201 0 -31.9902 15.5996 -32.1006 32c-0.0498047 7.5498 2.61035 15.2598 8.61035 21.71c18.3701 19.7402 51.5703 49.6904 54.8398 140.42l45.4697 -35.5498
+c-6.91992 -54.7803 -24.6895 -88.5498 -41.3994 -110.58zM320 352c-23.3496 0 -45 -7.17969 -62.9404 -19.4004l-38.1699 29.8408c19.6807 15.7793 43.1104 27.3096 69.1299 32.7197v20.8398c0 17.6699 14.3203 32 31.9805 32s31.9805 -14.3301 31.9805 -32v-20.8398
+c73.46 -15.2598 127.939 -77.46 127.939 -155.16c0 -41.3604 6.03027 -70.7197 14.3398 -92.8496l-59.5293 46.54c-1.63086 13.96 -2.77051 28.8896 -2.79004 45.7295c0 0.200195 0.0595703 0.379883 0.0595703 0.580078c0 61.8604 -50.1396 112 -112 112zM320 -64
+c-35.3203 0 -63.9697 28.6504 -63.9697 64h127.939c0 -35.3496 -28.6494 -64 -63.9697 -64z" />
+ <glyph glyph-name="copyright" unicode="&#xf1f9;"
+d="M256 440c136.967 0 248 -111.033 248 -248s-111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248zM256 -8c110.549 0 200 89.4678 200 200c0 110.549 -89.4678 200 -200 200c-110.549 0 -200 -89.4688 -200 -200c0 -110.549 89.4678 -200 200 -200z
+M363.351 93.0645c-9.61328 -9.71289 -45.5293 -41.3965 -104.064 -41.3965c-82.4297 0 -140.484 61.4248 -140.484 141.567c0 79.1514 60.2754 139.4 139.763 139.4c55.5303 0 88.7373 -26.6201 97.5928 -34.7783c2.13379 -1.96289 3.86523 -5.9082 3.86523 -8.80762
+c0 -1.95508 -0.864258 -4.87402 -1.92969 -6.51465l-18.1543 -28.1133c-3.8418 -5.9502 -11.9668 -7.28223 -17.499 -2.9209c-8.5957 6.77637 -31.8145 22.5381 -61.708 22.5381c-48.3037 0 -77.916 -35.3301 -77.916 -80.082c0 -41.5889 26.8877 -83.6924 78.2764 -83.6924
+c32.6572 0 56.8428 19.0391 65.7266 27.2256c5.26953 4.85645 13.5957 4.03906 17.8193 -1.73828l19.8652 -27.1699c1.28613 -1.74512 2.33008 -4.91992 2.33008 -7.08789c0 -2.72363 -1.56055 -6.5 -3.48242 -8.42969z" />
+ <glyph glyph-name="closed-captioning" unicode="&#xf20a;"
+d="M464 384c26.5 0 48 -21.5 48 -48v-288c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v288c0 26.5 21.5 48 48 48h416zM458 48c3.2998 0 6 2.7002 6 6v276c0 3.2998 -2.7002 6 -6 6h-404c-3.2998 0 -6 -2.7002 -6 -6v-276c0 -3.2998 2.7002 -6 6 -6h404z
+M246.9 133.7c1.69922 -2.40039 1.5 -5.60059 -0.5 -7.7002c-53.6006 -56.7998 -172.801 -32.0996 -172.801 67.9004c0 97.2998 121.7 119.5 172.5 70.0996c2.10059 -2 2.5 -3.2002 1 -5.7002l-17.5 -30.5c-1.89941 -3.09961 -6.19922 -4 -9.09961 -1.7002
+c-40.7998 32 -94.5996 14.9004 -94.5996 -31.1992c0 -48 51 -70.5 92.1992 -32.6006c2.80078 2.5 7.10059 2.10059 9.2002 -0.899414zM437.3 133.7c1.7002 -2.40039 1.5 -5.60059 -0.5 -7.7002c-53.5996 -56.9004 -172.8 -32.0996 -172.8 67.9004
+c0 97.2998 121.7 119.5 172.5 70.0996c2.09961 -2 2.5 -3.2002 1 -5.7002l-17.5 -30.5c-1.90039 -3.09961 -6.2002 -4 -9.09961 -1.7002c-40.8008 32 -94.6006 14.9004 -94.6006 -31.1992c0 -48 51 -70.5 92.2002 -32.6006c2.7998 2.5 7.09961 2.10059 9.2002 -0.899414z
+" />
+ <glyph glyph-name="object-group" unicode="&#xf247;"
+d="M500 320h-12v-256h12c6.62695 0 12 -5.37305 12 -12v-72c0 -6.62695 -5.37305 -12 -12 -12h-72c-6.62695 0 -12 5.37305 -12 12v12h-320v-12c0 -6.62695 -5.37305 -12 -12 -12h-72c-6.62695 0 -12 5.37305 -12 12v72c0 6.62695 5.37305 12 12 12h12v256h-12
+c-6.62695 0 -12 5.37305 -12 12v72c0 6.62695 5.37305 12 12 12h72c6.62695 0 12 -5.37305 12 -12v-12h320v12c0 6.62695 5.37305 12 12 12h72c6.62695 0 12 -5.37305 12 -12v-72c0 -6.62695 -5.37305 -12 -12 -12zM448 384v-32h32v32h-32zM32 384v-32h32v32h-32zM64 0v32
+h-32v-32h32zM480 0v32h-32v-32h32zM440 64v256h-12c-6.62695 0 -12 5.37305 -12 12v12h-320v-12c0 -6.62695 -5.37305 -12 -12 -12h-12v-256h12c6.62695 0 12 -5.37305 12 -12v-12h320v12c0 6.62695 5.37305 12 12 12h12zM404 256c6.62695 0 12 -5.37207 12 -12v-168
+c0 -6.62793 -5.37305 -12 -12 -12h-200c-6.62695 0 -12 5.37207 -12 12v52h-84c-6.62695 0 -12 5.37207 -12 12v168c0 6.62793 5.37305 12 12 12h200c6.62695 0 12 -5.37207 12 -12v-52h84zM136 280v-112h144v112h-144zM376 104v112h-56v-76
+c0 -6.62793 -5.37305 -12 -12 -12h-76v-24h144z" />
+ <glyph glyph-name="object-ungroup" unicode="&#xf248;" horiz-adv-x="576"
+d="M564 224h-12v-160h12c6.62695 0 12 -5.37305 12 -12v-72c0 -6.62695 -5.37305 -12 -12 -12h-72c-6.62695 0 -12 5.37305 -12 12v12h-224v-12c0 -6.62695 -5.37305 -12 -12 -12h-72c-6.62695 0 -12 5.37305 -12 12v72c0 6.62695 5.37305 12 12 12h12v24h-88v-12
+c0 -6.62695 -5.37305 -12 -12 -12h-72c-6.62695 0 -12 5.37305 -12 12v72c0 6.62695 5.37305 12 12 12h12v160h-12c-6.62695 0 -12 5.37305 -12 12v72c0 6.62695 5.37305 12 12 12h72c6.62695 0 12 -5.37305 12 -12v-12h224v12c0 6.62695 5.37305 12 12 12h72
+c6.62695 0 12 -5.37305 12 -12v-72c0 -6.62695 -5.37305 -12 -12 -12h-12v-24h88v12c0 6.62695 5.37305 12 12 12h72c6.62695 0 12 -5.37305 12 -12v-72c0 -6.62695 -5.37305 -12 -12 -12zM352 384v-32h32v32h-32zM352 128v-32h32v32h-32zM64 96v32h-32v-32h32zM64 352v32
+h-32v-32h32zM96 136h224v12c0 6.62695 5.37305 12 12 12h12v160h-12c-6.62695 0 -12 5.37305 -12 12v12h-224v-12c0 -6.62695 -5.37305 -12 -12 -12h-12v-160h12c6.62695 0 12 -5.37305 12 -12v-12zM224 0v32h-32v-32h32zM504 64v160h-12c-6.62695 0 -12 5.37305 -12 12v12
+h-88v-88h12c6.62695 0 12 -5.37305 12 -12v-72c0 -6.62695 -5.37305 -12 -12 -12h-72c-6.62695 0 -12 5.37305 -12 12v12h-88v-24h12c6.62695 0 12 -5.37305 12 -12v-12h224v12c0 6.62695 5.37305 12 12 12h12zM544 0v32h-32v-32h32zM544 256v32h-32v-32h32z" />
+ <glyph glyph-name="sticky-note" unicode="&#xf249;" horiz-adv-x="448"
+d="M448 99.8936c0 -10.9746 -6.29883 -26.1797 -14.0586 -33.9404l-83.8828 -83.8818c-7.75977 -7.76074 -22.9658 -14.0596 -33.9404 -14.0596h-268.118c-26.5098 0 -48 21.4902 -48 48v351.988c0 26.5098 21.4902 48 48 48h352c26.5098 0 48 -21.4902 48 -48v-268.106z
+M320 19.8936l76.1182 76.1182h-76.1182v-76.1182zM400 368h-352v-351.988h224v104c0 13.2549 10.7451 24 24 24h104v223.988z" />
+ <glyph glyph-name="clone" unicode="&#xf24d;"
+d="M464 448c26.5098 0 48 -21.4902 48 -48v-320c0 -26.5098 -21.4902 -48 -48 -48h-48v-48c0 -26.5098 -21.4902 -48 -48 -48h-320c-26.5098 0 -48 21.4902 -48 48v320c0 26.5098 21.4902 48 48 48h48v48c0 26.5098 21.4902 48 48 48h320zM362 -16c3.31152 0 6 2.68848 6 6
+v42h-224c-26.5098 0 -48 21.4902 -48 48v224h-42c-3.31152 0 -6 -2.68848 -6 -6v-308c0 -3.31152 2.68848 -6 6 -6h308zM458 80c3.31152 0 6 2.68848 6 6v308c0 3.31152 -2.68848 6 -6 6h-308c-3.31152 0 -6 -2.68848 -6 -6v-308c0 -3.31152 2.68848 -6 6 -6h308z" />
+ <glyph glyph-name="hourglass" unicode="&#xf254;" horiz-adv-x="384"
+d="M368 400c0 -80.0996 -31.8984 -165.619 -97.1797 -208c64.9912 -42.1934 97.1797 -127.436 97.1797 -208h4c6.62695 0 12 -5.37305 12 -12v-24c0 -6.62695 -5.37305 -12 -12 -12h-360c-6.62695 0 -12 5.37305 -12 12v24c0 6.62695 5.37305 12 12 12h4
+c0 80.0996 31.8994 165.619 97.1797 208c-64.9912 42.1934 -97.1797 127.436 -97.1797 208h-4c-6.62695 0 -12 5.37305 -12 12v24c0 6.62695 5.37305 12 12 12h360c6.62695 0 12 -5.37305 12 -12v-24c0 -6.62695 -5.37305 -12 -12 -12h-4zM64 400
+c0 -101.621 57.3066 -184 128 -184s128 82.3799 128 184h-256zM320 -16c0 101.62 -57.3076 184 -128 184s-128 -82.3799 -128 -184h256z" />
+ <glyph glyph-name="hand-rock" unicode="&#xf255;"
+d="M408.864 368.948c48.8213 20.751 103.136 -15.0723 103.136 -67.9111v-114.443c0 -15.3955 -3.08887 -30.3906 -9.18262 -44.5674l-42.835 -99.6562c-4.99707 -11.625 -3.98242 -18.8574 -3.98242 -42.3701c0 -17.6729 -14.3271 -32 -32 -32h-252
+c-17.6729 0 -32 14.3271 -32 32c0 27.3301 1.1416 29.2012 -3.11035 32.9033l-97.71 85.0811c-24.8994 21.6797 -39.1797 52.8926 -39.1797 85.6338v56.9531c0 47.4277 44.8457 82.0215 91.0459 71.1807c1.96094 55.751 63.5107 87.8262 110.671 60.8057
+c29.1895 31.0713 78.8604 31.4473 108.334 -0.0214844c32.7051 18.6846 76.4121 10.3096 98.8135 -23.5879zM464 186.594v114.445c0 34.29 -52 33.8232 -52 0.676758c0 -8.83594 -7.16309 -16 -16 -16h-7c-8.83691 0 -16 7.16406 -16 16v26.751
+c0 34.457 -52 33.707 -52 0.676758v-27.4287c0 -8.83594 -7.16309 -16 -16 -16h-7c-8.83691 0 -16 7.16406 -16 16v40.4658c0 34.3525 -52 33.8115 -52 0.677734v-41.1436c0 -8.83594 -7.16406 -16 -16 -16h-7c-8.83594 0 -16 7.16406 -16 16v26.751
+c0 34.4023 -52 33.7744 -52 0.676758v-116.571c0 -8.83203 -7.16797 -16 -16 -16c-3.30664 0 -8.01367 1.7627 -10.5068 3.93359l-7 6.09473c-3.03223 2.64062 -5.49316 8.04688 -5.49316 12.0674v0v41.2275c0 34.2148 -52 33.8857 -52 0.677734v-56.9531
+c0 -18.8555 8.27441 -36.874 22.7002 -49.4365l97.71 -85.0801c12.4502 -10.8398 19.5898 -26.4463 19.5898 -42.8164v-10.2861h220v7.07617c0 13.21 2.65332 26.0791 7.88281 38.25l42.835 99.6553c2.91602 6.75391 5.28223 18.207 5.28223 25.5635v0.0488281z" />
+ <glyph glyph-name="hand-paper" unicode="&#xf256;" horiz-adv-x="448"
+d="M372.57 335.359c39.9062 5.63281 75.4297 -25.7393 75.4297 -66.3594v-131.564c-0.00195312 -12.7666 -2.33008 -33.2246 -5.19531 -45.666l-30.1836 -130.958c-3.34668 -14.5234 -16.2783 -24.8125 -31.1816 -24.8125h-222.897
+c-9.10352 0 -20.7793 6.01758 -26.0615 13.4316l-119.97 168.415c-21.2441 29.8203 -14.8047 71.3574 14.5498 93.1533c18.7754 13.9395 42.1309 16.2979 62.083 8.87109v126.13c0 44.0547 41.125 75.5439 82.4053 64.9834c23.8926 48.1963 92.3535 50.2471 117.982 0.74707
+c42.5186 11.1445 83.0391 -21.9346 83.0391 -65.5469v-10.8242zM399.997 137.437l-0.00195312 131.563c0 24.9492 -36.5703 25.5508 -36.5703 -0.691406v-76.3086c0 -8.83691 -7.16309 -16 -16 -16h-6.85645c-8.83691 0 -16 7.16309 -16 16v154.184
+c0 25.501 -36.5703 26.3633 -36.5703 0.691406v-154.875c0 -8.83691 -7.16309 -16 -16 -16h-6.85645c-8.83691 0 -16 7.16309 -16 16v188.309c0 25.501 -36.5703 26.3545 -36.5703 0.691406v-189c0 -8.83691 -7.16309 -16 -16 -16h-6.85645c-8.83691 0 -16 7.16309 -16 16
+v153.309c0 25.501 -36.5713 26.3359 -36.5713 0.691406v-206.494c0 -15.5703 -20.0352 -21.9092 -29.0303 -9.2832l-27.1279 38.0791c-14.3711 20.1709 -43.833 -2.33496 -29.3945 -22.6045l115.196 -161.697h201.92l27.3252 118.551
+c2.63086 11.417 3.96484 23.1553 3.96484 34.8857z" />
+ <glyph glyph-name="hand-scissors" unicode="&#xf257;"
+d="M256 -32c-44.9561 0 -77.3428 43.2627 -64.0244 85.8535c-21.6484 13.71 -34.0156 38.7617 -30.3408 65.0068h-87.6348c-40.8037 0 -74 32.8105 -74 73.1406c0 40.3291 33.1963 73.1396 74 73.1396l94 -9.14062l-78.8496 18.6787
+c-38.3076 14.7422 -57.04 57.4707 -41.9424 95.1123c15.0303 37.4736 57.7549 55.7803 95.6416 41.2012l144.929 -55.7568c24.9551 30.5566 57.8086 43.9932 92.2178 24.7324l97.999 -54.8525c20.9746 -11.7393 34.0049 -33.8457 34.0049 -57.6904v-205.702
+c0 -30.7422 -21.4404 -57.5576 -51.7979 -64.5537l-118.999 -27.4268c-4.97168 -1.14648 -10.0889 -1.72949 -15.2031 -1.72949zM256 16.0127l70 -0.000976562c1.23633 0 3.21777 0.225586 4.42285 0.501953l119.001 27.4277
+c8.58203 1.97754 14.5762 9.29102 14.5762 17.7812v205.701c0 6.4873 -3.62109 12.542 -9.44922 15.8047l-98 54.8545c-8.13965 4.55566 -18.668 2.61914 -24.4873 -4.50781l-21.7646 -26.6475c-2.65039 -3.24512 -8.20215 -5.87891 -12.3926 -5.87891
+c-1.64062 0 -4.21484 0.477539 -5.74609 1.06738l-166.549 64.0908c-32.6543 12.5664 -50.7744 -34.5771 -19.2227 -46.7168l155.357 -59.7852c5.66016 -2.17773 10.2539 -8.86816 10.2539 -14.9326v0v-11.6328c0 -8.83691 -7.16309 -16 -16 -16h-182
+c-34.375 0 -34.4297 -50.2803 0 -50.2803h182c8.83691 0 16 -7.16309 16 -16v-6.85645c0 -8.83691 -7.16309 -16 -16 -16h-28c-25.1221 0 -25.1592 -36.5674 0 -36.5674h28c8.83691 0 16 -7.16211 16 -16v-6.85547c0 -8.83691 -7.16309 -16 -16 -16
+c-25.1201 0 -25.1602 -36.5674 0 -36.5674z" />
+ <glyph glyph-name="hand-lizard" unicode="&#xf258;" horiz-adv-x="576"
+d="M556.686 157.458c12.6357 -19.4863 19.3145 -42.0615 19.3145 -65.2871v-124.171h-224v71.582l-99.751 38.7871c-2.7832 1.08203 -5.70996 1.63086 -8.69727 1.63086h-131.552c-30.8789 0 -56 25.1211 -56 56c0 48.5234 39.4766 88 88 88h113.709l18.333 48h-196.042
+c-44.1123 0 -80 35.8877 -80 80v8c0 30.8779 25.1211 56 56 56h293.917c24.5 0 47.084 -12.2725 60.4111 -32.8291zM528 16v76.1709v0.0478516c0 11.7461 -5.19141 29.2734 -11.5879 39.124l-146.358 225.715c-4.44336 6.85254 -11.9707 10.9424 -20.1367 10.9424h-293.917
+c-4.41113 0 -8 -3.58887 -8 -8v-8c0 -17.6445 14.3555 -32 32 -32h213.471c25.2021 0 42.626 -25.293 33.6299 -48.8457l-24.5518 -64.2812c-7.05371 -18.4658 -25.0732 -30.873 -44.8398 -30.873h-113.709c-22.0557 0 -40 -17.9443 -40 -40c0 -4.41113 3.58887 -8 8 -8
+h131.552h0.0517578c7.44141 0 19.1074 -2.19238 26.041 -4.89355l99.752 -38.7881c18.5898 -7.22852 30.6035 -24.7881 30.6035 -44.7363v-23.582h128z" />
+ <glyph glyph-name="hand-spock" unicode="&#xf259;"
+d="M21.0957 66.21c-26.9688 25.3818 -28.2471 67.7461 -2.87109 94.707c24.1982 25.7139 64.2881 28.2373 91.4824 5.72168l-31.04 136.509c-9.38379 41.2803 21.4336 81.0127 64.0713 81.8438c1.74414 28.9062 22.2656 54.4912 51.8818 61.2949
+c36.001 8.27539 72.0176 -14.2266 80.3037 -50.2959l21.6748 -131.99l16.9014 105.25c9.02344 36.0947 45.4473 57.7021 81.25 48.75c27.3066 -6.82715 45.7061 -29.1357 49.8496 -53.9922c43.2285 0.212891 75.6436 -40.1133 65.5439 -82.5244l-31.7295 -133.41
+c-0.938477 -3.94141 -1.41406 -7.99414 -1.41406 -12.0449v-36.8389v-0.00683594c0 -9.29102 -2.14355 -24.0596 -4.78516 -32.9668l-31.8145 -107.312c-4.02734 -13.585 -16.5107 -22.9043 -30.6807 -22.9043h-237.6c-7.00586 0 -16.8311 3.89648 -21.9316 8.69824z
+M53.1641 128.021c-7.17969 -7.62891 -6.81543 -19.6777 0.813477 -26.8574l124.487 -117.164h219.311l28.4199 95.8613c1.86133 6.27637 2.80469 12.7793 2.80469 19.3281v36.8389c0.000976562 6.48047 1.21973 16.8574 2.71973 23.1621l31.7549 133.407
+c5.83105 24.4893 -31.1445 33.25 -36.9658 8.80273l-26.9229 -113.105c-1.61523 -6.78711 -8.58887 -12.2949 -15.5645 -12.2949h-9.69434c-10.4072 0 -18.043 9.79199 -15.5225 19.8799l38.127 152.512c6.09766 24.376 -30.7607 33.6396 -36.8643 9.21777l-42.3721 -169.49
+c-1.67285 -6.68945 -8.62695 -12.1191 -15.5225 -12.1191h-13.2168v0c-7.0332 0 -14.0195 5.5625 -15.5938 12.417l-45.2207 196.828c-5.64453 24.5684 -42.6572 15.9609 -37.0342 -8.50781l41.6191 -181.153c2.30078 -10.0156 -5.31738 -19.583 -15.5938 -19.583h-8.60352
+h-0.000976562c-7.0498 0 -14.04 5.5791 -15.6025 12.4541l-30.3984 133.757c-5.55273 24.4395 -42.6504 16.1963 -37.0547 -8.4209l34.1299 -150.172c0.263672 -1.16309 0.397461 -2.35352 0.397461 -3.5459v-69.4795c0 -13.9941 -16.7754 -21.2432 -26.9658 -11.6523
+l-53.0117 49.8936c-7.61523 7.16699 -19.6377 6.85938 -26.8564 -0.8125z" />
+ <glyph glyph-name="hand-pointer" unicode="&#xf25a;" horiz-adv-x="448"
+d="M358.182 268.639c43.1934 16.6348 89.8184 -15.7949 89.8184 -62.6387v-84c-0.000976562 -4.25 -0.775391 -11.0615 -1.72754 -15.2041l-27.4297 -118.999c-6.98242 -30.2969 -33.7549 -51.7969 -64.5566 -51.7969h-178.286c-21.2588 0 -41.3682 10.4102 -53.791 27.8457
+l-109.699 154.001c-21.2432 29.8193 -14.8047 71.3574 14.5498 93.1523c18.8115 13.9658 42.1748 16.2822 62.083 8.87207v161.129c0 36.9443 29.7363 67 66.2861 67s66.2861 -30.0557 66.2861 -67v-73.6338c20.4131 2.85742 41.4678 -3.94238 56.5947 -19.6289
+c27.1934 12.8467 60.3799 5.66992 79.8721 -19.0986zM80.9854 168.303c-14.4004 20.2119 -43.8008 -2.38281 -29.3945 -22.6055l109.712 -154c3.43457 -4.81934 8.92871 -7.69727 14.6973 -7.69727h178.285c8.49219 0 15.8037 5.99414 17.7822 14.5762l27.4297 119.001
+c0.333008 1.44629 0.501953 2.93457 0.501953 4.42285v84c0 25.1602 -36.5713 25.1211 -36.5713 0c0 -8.83594 -7.16309 -16 -16 -16h-6.85645c-8.83691 0 -16 7.16406 -16 16v21c0 25.1602 -36.5713 25.1201 -36.5713 0v-21c0 -8.83594 -7.16309 -16 -16 -16h-6.85938
+c-8.83691 0 -16 7.16406 -16 16v35c0 25.1602 -36.5703 25.1201 -36.5703 0v-35c0 -8.83594 -7.16309 -16 -16 -16h-6.85742c-8.83691 0 -16 7.16406 -16 16v175c0 25.1602 -36.5713 25.1201 -36.5713 0v-241.493c0 -15.5703 -20.0352 -21.9092 -29.0303 -9.2832z
+M176.143 48v96c0 8.83691 6.26855 16 14 16h6c7.73242 0 14 -7.16309 14 -16v-96c0 -8.83691 -6.26758 -16 -14 -16h-6c-7.73242 0 -14 7.16309 -14 16zM251.571 48v96c0 8.83691 6.26758 16 14 16h6c7.73145 0 14 -7.16309 14 -16v-96c0 -8.83691 -6.26855 -16 -14 -16h-6
+c-7.73242 0 -14 7.16309 -14 16zM327 48v96c0 8.83691 6.26758 16 14 16h6c7.73242 0 14 -7.16309 14 -16v-96c0 -8.83691 -6.26758 -16 -14 -16h-6c-7.73242 0 -14 7.16309 -14 16z" />
+ <glyph glyph-name="hand-peace" unicode="&#xf25b;" horiz-adv-x="448"
+d="M362.146 256.024c42.5908 13.3184 85.8535 -19.0684 85.8535 -64.0244l-0.0117188 -70.001c-0.000976562 -4.25 -0.775391 -11.0615 -1.72949 -15.2031l-27.4268 -118.999c-6.99707 -30.3564 -33.8105 -51.7969 -64.5547 -51.7969h-205.702
+c-23.8447 0 -45.9502 13.0303 -57.6904 34.0059l-54.8525 97.999c-19.2607 34.4092 -5.82422 67.2617 24.7324 92.2178l-55.7568 144.928c-14.5791 37.8867 3.72754 80.6113 41.2012 95.6416c37.6406 15.0977 80.3691 -3.63477 95.1123 -41.9424l18.6787 -78.8496
+l-9.14062 94c0 40.8037 32.8096 74 73.1396 74s73.1406 -33.1963 73.1406 -74v-87.6348c26.2451 3.6748 51.2959 -8.69238 65.0068 -30.3408zM399.987 122l-0.000976562 70c0 25.1602 -36.5674 25.1201 -36.5674 0c0 -8.83691 -7.16309 -16 -16 -16h-6.85547
+c-8.83789 0 -16 7.16309 -16 16v28c0 25.1592 -36.5674 25.1221 -36.5674 0v-28c0 -8.83691 -7.16309 -16 -16 -16h-6.85645c-8.83691 0 -16 7.16309 -16 16v182c0 34.4297 -50.2803 34.375 -50.2803 0v-182c0 -8.83691 -7.16309 -16 -16 -16h-11.6328v0
+c-6.06445 0 -12.7549 4.59375 -14.9326 10.2539l-59.7842 155.357c-12.1396 31.5518 -59.2842 13.4326 -46.7168 -19.2227l64.0898 -166.549c0.589844 -1.53125 1.06738 -4.10547 1.06738 -5.74609c0 -4.19043 -2.63379 -9.74219 -5.87891 -12.3926l-26.6475 -21.7646
+c-7.12695 -5.81934 -9.06445 -16.3467 -4.50781 -24.4873l54.8535 -98c3.26367 -5.82812 9.31934 -9.44922 15.8057 -9.44922h205.701c8.49121 0 15.8037 5.99414 17.7812 14.5762l27.4277 119.001c0.333008 1.44629 0.501953 2.93457 0.501953 4.42285z" />
+ <glyph glyph-name="registered" unicode="&#xf25d;"
+d="M256 440c136.967 0 248 -111.033 248 -248s-111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248zM256 -8c110.549 0 200 89.4678 200 200c0 110.549 -89.4678 200 -200 200c-110.549 0 -200 -89.4688 -200 -200c0 -110.549 89.4678 -200 200 -200z
+M366.442 73.791c4.40332 -7.99219 -1.37012 -17.791 -10.5107 -17.791h-42.8096h-0.0126953c-3.97559 0 -8.71582 2.84961 -10.5801 6.36035l-47.5156 89.3027h-31.958v-83.6631c0 -6.61719 -5.38281 -12 -12 -12h-38.5674c-6.61719 0 -12 5.38281 -12 12v248.304
+c0 6.61719 5.38281 12 12 12h78.667c71.251 0 101.498 -32.749 101.498 -85.252c0 -31.6123 -15.2148 -59.2969 -39.4824 -73.1758c3.02148 -4.61719 0.225586 0.199219 53.2715 -96.085zM256.933 208.094c20.9131 0 32.4307 11.5186 32.4316 32.4316
+c0 19.5752 -6.5127 31.709 -38.9297 31.709h-27.377v-64.1406h33.875z" />
+ <glyph glyph-name="calendar-plus" unicode="&#xf271;" horiz-adv-x="448"
+d="M336 156v-24c0 -6.59961 -5.40039 -12 -12 -12h-76v-76c0 -6.59961 -5.40039 -12 -12 -12h-24c-6.59961 0 -12 5.40039 -12 12v76h-76c-6.59961 0 -12 5.40039 -12 12v24c0 6.59961 5.40039 12 12 12h76v76c0 6.59961 5.40039 12 12 12h24c6.59961 0 12 -5.40039 12 -12
+v-76h76c6.59961 0 12 -5.40039 12 -12zM448 336v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h48v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h128v52c0 6.59961 5.40039 12 12 12h40
+c6.59961 0 12 -5.40039 12 -12v-52h48c26.5 0 48 -21.5 48 -48zM400 -10v298h-352v-298c0 -3.2998 2.7002 -6 6 -6h340c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="calendar-minus" unicode="&#xf272;" horiz-adv-x="448"
+d="M124 120c-6.59961 0 -12 5.40039 -12 12v24c0 6.59961 5.40039 12 12 12h200c6.59961 0 12 -5.40039 12 -12v-24c0 -6.59961 -5.40039 -12 -12 -12h-200zM448 336v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h48v52
+c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h128v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h48c26.5 0 48 -21.5 48 -48zM400 -10v298h-352v-298c0 -3.2998 2.7002 -6 6 -6h340c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="calendar-times" unicode="&#xf273;" horiz-adv-x="448"
+d="M311.7 73.2998l-17 -17c-4.7002 -4.7002 -12.2998 -4.7002 -17 0l-53.7002 53.7998l-53.7002 -53.6992c-4.7002 -4.7002 -12.2998 -4.7002 -17 0l-17 17c-4.7002 4.69922 -4.7002 12.2998 0 17l53.7002 53.6992l-53.7002 53.7002c-4.7002 4.7002 -4.7002 12.2998 0 17
+l17 17c4.7002 4.7002 12.2998 4.7002 17 0l53.7002 -53.7002l53.7002 53.7002c4.7002 4.7002 12.2998 4.7002 17 0l17 -17c4.7002 -4.7002 4.7002 -12.2998 0 -17l-53.7998 -53.7998l53.6992 -53.7002c4.80078 -4.7002 4.80078 -12.2998 0.100586 -17zM448 336v-352
+c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h48v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h128v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h48c26.5 0 48 -21.5 48 -48zM400 -10
+v298h-352v-298c0 -3.2998 2.7002 -6 6 -6h340c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="calendar-check" unicode="&#xf274;" horiz-adv-x="448"
+d="M400 384c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h48v52c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-52h128v52c0 6.62695 5.37305 12 12 12h40
+c6.62695 0 12 -5.37305 12 -12v-52h48zM394 -16c3.31152 0 6 2.68848 6 6v298h-352v-298c0 -3.31152 2.68848 -6 6 -6h340zM341.151 184.65l-142.31 -141.169c-4.70508 -4.66699 -12.3027 -4.6377 -16.9707 0.0673828l-75.0908 75.6992
+c-4.66699 4.70508 -4.6377 12.3027 0.0673828 16.9707l22.7197 22.5361c4.70508 4.66699 12.3027 4.63672 16.9697 -0.0693359l44.1035 -44.4609l111.072 110.182c4.70508 4.66699 12.3027 4.63672 16.9707 -0.0683594l22.5361 -22.7178
+c4.66699 -4.70508 4.63672 -12.3027 -0.0683594 -16.9697z" />
+ <glyph glyph-name="map" unicode="&#xf279;" horiz-adv-x="576"
+d="M560.02 416c8.4502 0 15.9805 -6.83008 15.9805 -16.0195v-346.32c0 -11.9609 -9.01367 -25.2705 -20.1201 -29.71l-151.83 -52.8105c-5.32617 -1.7334 -14.1953 -3.13965 -19.7969 -3.13965c-5.7373 0 -14.8105 1.47363 -20.2529 3.29004l-172 60.71l-170.05 -62.8398
+c-1.99023 -0.790039 -4 -1.16016 -5.95996 -1.16016c-8.45996 0 -15.9902 6.83008 -15.9902 16.0195v346.32c0.00292969 11.959 9.0166 25.2686 20.1201 29.71l151.83 52.8105c6.43945 2.08984 13.1201 3.13965 19.8096 3.13965
+c5.73242 -0.00195312 14.8008 -1.47168 20.2402 -3.28027l172 -60.7197h0.00976562l170.05 62.8398c1.98047 0.790039 4 1.16016 5.95996 1.16016zM224 357.58v-285.97l128 -45.1904v285.97zM48 29.9502l127.36 47.0801l0.639648 0.229492v286.2l-128 -44.5303v-288.979z
+M528 65.0801v288.97l-127.36 -47.0693l-0.639648 -0.240234v-286.19z" />
+ <glyph glyph-name="comment-alt" unicode="&#xf27a;"
+d="M448 448c35.2998 0 64 -28.7002 64 -64v-288c0 -35.2998 -28.7002 -64 -64 -64h-144l-124.9 -93.5996c-2.19922 -1.7002 -4.69922 -2.40039 -7.09961 -2.40039c-6.2002 0 -12 4.90039 -12 12v84h-96c-35.2998 0 -64 28.7002 -64 64v288c0 35.2998 28.7002 64 64 64h384z
+M464 96v288c0 8.7998 -7.2002 16 -16 16h-384c-8.7998 0 -16 -7.2002 -16 -16v-288c0 -8.7998 7.2002 -16 16 -16h144v-60l67.2002 50.4004l12.7998 9.59961h160c8.7998 0 16 7.2002 16 16z" />
+ <glyph glyph-name="pause-circle" unicode="&#xf28b;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM256 -8c110.5 0 200 89.5 200 200s-89.5 200 -200 200s-200 -89.5 -200 -200s89.5 -200 200 -200zM352 272v-160c0 -8.7998 -7.2002 -16 -16 -16h-48
+c-8.7998 0 -16 7.2002 -16 16v160c0 8.7998 7.2002 16 16 16h48c8.7998 0 16 -7.2002 16 -16zM240 272v-160c0 -8.7998 -7.2002 -16 -16 -16h-48c-8.7998 0 -16 7.2002 -16 16v160c0 8.7998 7.2002 16 16 16h48c8.7998 0 16 -7.2002 16 -16z" />
+ <glyph glyph-name="stop-circle" unicode="&#xf28d;"
+d="M504 192c0 -137 -111 -248 -248 -248s-248 111 -248 248s111 248 248 248s248 -111 248 -248zM56 192c0 -110.5 89.5 -200 200 -200s200 89.5 200 200s-89.5 200 -200 200s-200 -89.5 -200 -200zM352 272v-160c0 -8.7998 -7.2002 -16 -16 -16h-160
+c-8.7998 0 -16 7.2002 -16 16v160c0 8.7998 7.2002 16 16 16h160c8.7998 0 16 -7.2002 16 -16z" />
+ <glyph glyph-name="handshake" unicode="&#xf2b5;" horiz-adv-x="640"
+d="M519.2 320.1h120.8v-255.699h-64c-17.5 0 -31.7998 14.1992 -31.9004 31.6992h-57.8994c-1.7998 -8.19922 -5.2998 -16.0996 -10.9004 -23l-26.2002 -32.2998c-15.7998 -19.3994 -41.8994 -25.5 -64 -16.7998c-13.5 -16.5996 -30.5996 -24 -48.7998 -24
+c-15.0996 0 -28.5996 5.09961 -41.0996 15.9004c-31.7998 -21.9004 -74.7002 -21.3008 -105.601 3.7998l-84.5996 76.3994h-9.09961c-0.100586 -17.5 -14.3008 -31.6992 -31.9004 -31.6992h-64v255.699h118l47.5996 47.6006c10.5 10.3994 24.8008 16.2998 39.6006 16.2998
+h226.8v0c12.7812 0 30.5225 -7.30273 39.5996 -16.2998zM48 96.4004c8.7998 0 16 7.09961 16 16c0 8.7998 -7.2002 16 -16 16s-16 -7.2002 -16 -16c0 -8.80078 7.2002 -16 16 -16zM438 103.3c2.7002 3.40039 2.2002 8.5 -1.2002 11.2998l-108.2 87.8008l-8.19922 -7.5
+c-40.3008 -36.8008 -86.7002 -11.8008 -101.5 4.39941c-26.7002 29 -25 74.4004 4.39941 101.3l38.7002 35.5h-56.7002c-2 -0.799805 -3.7002 -1.5 -5.7002 -2.2998l-61.6992 -61.5996h-41.9004v-128.101h27.7002l97.2998 -88
+c16.0996 -13.0996 41.4004 -10.5 55.2998 6.60059l15.6006 19.2002l36.7998 -31.5c3 -2.40039 12 -4.90039 18 2.39941l30 36.5l23.8994 -19.3994c3.5 -2.80078 8.5 -2.2002 11.3008 1.19922zM544 144.1v128h-44.7002l-61.7002 61.6006
+c-1.39941 1.5 -3.39941 2.2998 -5.5 2.2998l-83.6992 -0.200195c-10 0 -19.6006 -3.7002 -27 -10.5l-65.6006 -60.0996c-9.7002 -8.7998 -10.5 -24 -1.2002 -33.9004c8.90039 -9.39941 25.1006 -8.7002 34.6006 0l55.2002 50.6006c6.5 5.89941 16.5996 5.5 22.5996 -1
+l10.9004 -11.7002c6 -6.5 5.5 -16.6006 -1 -22.6006l-12.5 -11.3994l102.699 -83.4004c2.80078 -2.2998 5.40039 -4.89941 7.7002 -7.7002h69.2002zM592 96.4004c8.7998 0 16 7.09961 16 16c0 8.7998 -7.2002 16 -16 16s-16 -7.2002 -16 -16c0 -8.80078 7.2002 -16 16 -16z
+" />
+ <glyph glyph-name="envelope-open" unicode="&#xf2b6;"
+d="M494.586 283.484c9.6123 -7.94824 17.4141 -24.5205 17.4141 -36.9932v-262.491c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v262.515c0 12.5166 7.84668 29.1279 17.5146 37.0771c4.08008 3.35449 110.688 89.0996 135.15 108.549
+c22.6992 18.1426 60.1299 55.8594 103.335 55.8594c43.4365 0 81.2314 -38.1914 103.335 -55.8594c23.5283 -18.707 130.554 -104.773 135.251 -108.656zM464 -10v253.632v0.00488281c0 1.5791 -0.996094 3.66602 -2.22363 4.6582
+c-15.8633 12.8232 -108.793 87.5752 -132.366 106.316c-17.5527 14.0195 -49.7168 45.3887 -73.4102 45.3887c-23.6016 0 -55.2451 -30.8799 -73.4102 -45.3887c-23.5713 -18.7393 -116.494 -93.4795 -132.364 -106.293
+c-1.40918 -1.13965 -2.22559 -2.85254 -2.22559 -4.66504v-253.653c0 -3.31152 2.68848 -6 6 -6h404c3.31152 0 6 2.68848 6 6zM432.009 177.704c4.24902 -5.15918 3.46484 -12.7949 -1.74512 -16.9814c-28.9746 -23.2822 -59.2734 -47.5967 -70.9287 -56.8623
+c-22.6992 -18.1436 -60.1299 -55.8604 -103.335 -55.8604c-43.4521 0 -81.2871 38.2373 -103.335 55.8604c-11.2793 8.9668 -41.7441 33.4131 -70.9268 56.8643c-5.20996 4.1875 -5.99316 11.8223 -1.74512 16.9814l15.2578 18.5283
+c4.17773 5.07227 11.6572 5.84277 16.7793 1.72559c28.6182 -23.001 58.5654 -47.0352 70.5596 -56.5713c17.5527 -14.0195 49.7168 -45.3887 73.4102 -45.3887c23.6016 0 55.2461 30.8799 73.4102 45.3887c11.9941 9.53516 41.9434 33.5703 70.5625 56.5684
+c5.12207 4.11621 12.6016 3.3457 16.7783 -1.72656z" />
+ <glyph glyph-name="address-book" unicode="&#xf2b9;" horiz-adv-x="448"
+d="M436 288h-20v-64h20c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-20v-64h20c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-20v-48c0 -26.5 -21.5 -48 -48 -48h-320c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48
+h320c26.5 0 48 -21.5 48 -48v-48h20c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12zM368 -16v416h-320v-416h320zM208 192c-35.2998 0 -64 28.7002 -64 64s28.7002 64 64 64s64 -28.7002 64 -64s-28.7002 -64 -64 -64zM118.4 64
+c-12.4004 0 -22.4004 8.59961 -22.4004 19.2002v19.2002c0 31.7998 30.0996 57.5996 67.2002 57.5996c11.3994 0 17.8994 -8 44.7998 -8c26.0996 0 34 8 44.7998 8c37.1006 0 67.2002 -25.7998 67.2002 -57.5996v-19.2002c0 -10.6006 -10 -19.2002 -22.4004 -19.2002
+h-179.199z" />
+ <glyph glyph-name="address-card" unicode="&#xf2bb;" horiz-adv-x="576"
+d="M528 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h480zM528 16v352h-480v-352h480zM208 192c-35.2998 0 -64 28.7002 -64 64s28.7002 64 64 64s64 -28.7002 64 -64s-28.7002 -64 -64 -64z
+M118.4 64c-12.4004 0 -22.4004 8.59961 -22.4004 19.2002v19.2002c0 31.7998 30.0996 57.5996 67.2002 57.5996c11.3994 0 17.8994 -8 44.7998 -8c26.0996 0 34 8 44.7998 8c37.1006 0 67.2002 -25.7998 67.2002 -57.5996v-19.2002
+c0 -10.6006 -10 -19.2002 -22.4004 -19.2002h-179.199zM360 128c-4.40039 0 -8 3.59961 -8 8v16c0 4.40039 3.59961 8 8 8h112c4.40039 0 8 -3.59961 8 -8v-16c0 -4.40039 -3.59961 -8 -8 -8h-112zM360 192c-4.40039 0 -8 3.59961 -8 8v16c0 4.40039 3.59961 8 8 8h112
+c4.40039 0 8 -3.59961 8 -8v-16c0 -4.40039 -3.59961 -8 -8 -8h-112zM360 256c-4.40039 0 -8 3.59961 -8 8v16c0 4.40039 3.59961 8 8 8h112c4.40039 0 8 -3.59961 8 -8v-16c0 -4.40039 -3.59961 -8 -8 -8h-112z" />
+ <glyph glyph-name="user-circle" unicode="&#xf2bd;" horiz-adv-x="496"
+d="M248 344c53 0 96 -43 96 -96s-43 -96 -96 -96s-96 43 -96 96s43 96 96 96zM248 200c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48zM248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8
+c49.7002 0 95.0996 18.2998 130.1 48.4004c-14.8994 23 -40.3994 38.5 -69.5996 39.5c-20.7998 -6.5 -40.5996 -9.60059 -60.5 -9.60059s-39.7002 3.2002 -60.5 9.60059c-29.2002 -0.900391 -54.7002 -16.5 -69.5996 -39.5c35 -30.1006 80.3994 -48.4004 130.1 -48.4004z
+M410.7 76.0996c23.3994 32.7002 37.2998 72.7002 37.2998 115.9c0 110.3 -89.7002 200 -200 200s-200 -89.7002 -200 -200c0 -43.2002 13.9004 -83.2002 37.2998 -115.9c24.5 31.4004 62.2002 51.9004 105.101 51.9004c10.1992 0 26.0996 -9.59961 57.5996 -9.59961
+c31.5996 0 47.4004 9.59961 57.5996 9.59961c43 0 80.7002 -20.5 105.101 -51.9004z" />
+ <glyph glyph-name="id-badge" unicode="&#xf2c1;" horiz-adv-x="384"
+d="M336 448c26.5 0 48 -21.5 48 -48v-416c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48h288zM336 -16v416h-288v-416h288zM144 336c-8.7998 0 -16 7.2002 -16 16s7.2002 16 16 16h96c8.7998 0 16 -7.2002 16 -16s-7.2002 -16 -16 -16
+h-96zM192 160c-35.2998 0 -64 28.7002 -64 64s28.7002 64 64 64s64 -28.7002 64 -64s-28.7002 -64 -64 -64zM102.4 32c-12.4004 0 -22.4004 8.59961 -22.4004 19.2002v19.2002c0 31.7998 30.0996 57.5996 67.2002 57.5996c11.3994 0 17.8994 -8 44.7998 -8
+c26.0996 0 34 8 44.7998 8c37.1006 0 67.2002 -25.7998 67.2002 -57.5996v-19.2002c0 -10.6006 -10 -19.2002 -22.4004 -19.2002h-179.199z" />
+ <glyph glyph-name="id-card" unicode="&#xf2c2;" horiz-adv-x="576"
+d="M528 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h480zM528 16v288h-480v-288h32.7998c-1 4.5 -0.799805 -3.59961 -0.799805 22.4004c0 31.7998 30.0996 57.5996 67.2002 57.5996
+c11.3994 0 17.8994 -8 44.7998 -8c26.0996 0 34 8 44.7998 8c37.1006 0 67.2002 -25.7998 67.2002 -57.5996c0 -26 0.0996094 -17.9004 -0.799805 -22.4004h224.8zM360 96c-4.40039 0 -8 3.59961 -8 8v16c0 4.40039 3.59961 8 8 8h112c4.40039 0 8 -3.59961 8 -8v-16
+c0 -4.40039 -3.59961 -8 -8 -8h-112zM360 160c-4.40039 0 -8 3.59961 -8 8v16c0 4.40039 3.59961 8 8 8h112c4.40039 0 8 -3.59961 8 -8v-16c0 -4.40039 -3.59961 -8 -8 -8h-112zM360 224c-4.40039 0 -8 3.59961 -8 8v16c0 4.40039 3.59961 8 8 8h112
+c4.40039 0 8 -3.59961 8 -8v-16c0 -4.40039 -3.59961 -8 -8 -8h-112zM192 128c-35.2998 0 -64 28.7002 -64 64s28.7002 64 64 64s64 -28.7002 64 -64s-28.7002 -64 -64 -64z" />
+ <glyph glyph-name="window-maximize" unicode="&#xf2d0;"
+d="M464 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h416zM464 22v234h-416v-234c0 -3.2998 2.7002 -6 6 -6h404c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="window-minimize" unicode="&#xf2d1;"
+d="M480 -32h-448c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32h448c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32z" />
+ <glyph glyph-name="window-restore" unicode="&#xf2d2;"
+d="M464 448c26.5 0 48 -21.5 48 -48v-320c0 -26.5 -21.5 -48 -48 -48h-48v-48c0 -26.5 -21.5 -48 -48 -48h-320c-26.5 0 -48 21.5 -48 48v320c0 26.5 21.5 48 48 48h48v48c0 26.5 21.5 48 48 48h320zM368 -16v208h-320v-208h320zM464 80v320h-320v-48h224
+c26.5 0 48 -21.5 48 -48v-224h48z" />
+ <glyph glyph-name="snowflake" unicode="&#xf2dc;" horiz-adv-x="448"
+d="M440.1 92.7998c7.60059 -4.39941 10.1006 -14.2002 5.5 -21.7002l-7.89941 -13.8994c-4.40039 -7.7002 -14 -10.2998 -21.5 -5.90039l-39.2002 23l9.09961 -34.7002c2.30078 -8.5 -2.69922 -17.2998 -11.0996 -19.5996l-15.2002 -4.09961
+c-8.39941 -2.30078 -17.0996 2.7998 -19.2998 11.2998l-21.2998 81l-71.9004 42.2002v-84.5l58.2998 -59.3008c6.10059 -6.19922 6.10059 -16.3994 0 -22.5996l-11.0996 -11.2998c-6.09961 -6.2002 -16.0996 -6.2002 -22.2002 0l-24.8994 25.3994v-46.0996
+c0 -8.7998 -7 -16 -15.7002 -16h-15.7002c-8.7002 0 -15.7002 7.2002 -15.7002 16v45.9004l-24.8994 -25.4004c-6.10059 -6.2002 -16.1006 -6.2002 -22.2002 0l-11.1006 11.2998c-6.09961 6.2002 -6.09961 16.4004 0 22.6006l58.3008 59.2998v84.5l-71.9004 -42.2002
+l-21.2998 -81c-2.2998 -8.5 -10.9004 -13.5996 -19.2998 -11.2998l-15.2002 4.09961c-8.40039 2.2998 -13.2998 11.1006 -11.1006 19.6006l9.10059 34.6992l-39.2002 -23c-7.5 -4.39941 -17.2002 -1.7998 -21.5 5.90039l-7.90039 13.9004
+c-4.2998 7.69922 -1.69922 17.5 5.80078 21.8994l39.1992 23l-34.0996 9.2998c-8.40039 2.30078 -13.2998 11.1006 -11.0996 19.6006l4.09961 15.5c2.2998 8.5 10.9004 13.5996 19.2998 11.2998l79.7002 -21.7002l71.9004 42.2002l-71.9004 42.2002l-79.7002 -21.7002
+c-8.39941 -2.2998 -17.0996 2.7998 -19.2998 11.2998l-4.09961 15.5c-2.30078 8.5 2.69922 17.2998 11.0996 19.6006l34.0996 9.09961l-39.1992 23c-7.60059 4.5 -10.1006 14.2002 -5.80078 21.9004l7.90039 13.8994c4.40039 7.7002 14 10.2998 21.5 5.90039l39.2002 -23
+l-9.10059 34.7002c-2.2998 8.5 2.7002 17.2998 11.1006 19.5996l15.2002 4.09961c8.39941 2.30078 17.0996 -2.7998 19.2998 -11.2998l21.2998 -81l71.9004 -42.2002v84.5l-58.3008 59.3008c-6.09961 6.19922 -6.09961 16.3994 0 22.5996l11.5 11.2998
+c6.10059 6.2002 16.1006 6.2002 22.2002 0l24.9004 -25.3994v46.0996c0 8.7998 7 16 15.7002 16h15.6992c8.7002 0 15.7002 -7.2002 15.7002 -16v-45.9004l24.9004 25.4004c6.09961 6.2002 16.0996 6.2002 22.2002 0l11.0996 -11.2998
+c6.09961 -6.2002 6.09961 -16.4004 0 -22.6006l-58.2998 -59.2998v-84.5l71.8994 42.2002l21.3008 81c2.2998 8.5 10.8994 13.5996 19.2998 11.2998l15.2002 -4.09961c8.39941 -2.2998 13.2998 -11.1006 11.0996 -19.6006l-9.09961 -34.6992l39.1992 23
+c7.5 4.39941 17.2002 1.7998 21.5 -5.90039l7.90039 -13.9004c4.2998 -7.69922 1.7002 -17.5 -5.7998 -21.8994l-39.2002 -23l34.0996 -9.2998c8.40039 -2.30078 13.3008 -11.1006 11.1006 -19.6006l-4.10059 -15.5c-2.2998 -8.5 -10.8994 -13.5996 -19.2998 -11.2998
+l-79.7002 21.7002l-71.8994 -42.2002l71.7998 -42.2002l79.7002 21.7002c8.39941 2.2998 17.0996 -2.7998 19.2998 -11.2998l4.09961 -15.5c2.30078 -8.5 -2.69922 -17.2998 -11.0996 -19.6006l-34.0996 -9.2998z" />
+ <glyph glyph-name="trash-alt" unicode="&#xf2ed;" horiz-adv-x="448"
+d="M268 32c-6.62402 0 -12 5.37598 -12 12v216c0 6.62402 5.37598 12 12 12h24c6.62402 0 12 -5.37598 12 -12v-216c0 -6.62402 -5.37598 -12 -12 -12h-24zM432 368c8.83203 0 16 -7.16797 16 -16v-16c0 -8.83203 -7.16797 -16 -16 -16h-16v-336
+c0 -26.4961 -21.5039 -48 -48 -48h-288c-26.4961 0 -48 21.5039 -48 48v336h-16c-8.83203 0 -16 7.16797 -16 16v16c0 8.83203 7.16797 16 16 16h82.4102l34.0195 56.7002c7.71875 12.8613 26.1572 23.2998 41.1572 23.2998h0.00292969h100.82h0.0224609
+c15 0 33.4385 -10.4385 41.1572 -23.2998l34 -56.7002h82.4102zM171.84 397.09l-17.4502 -29.0898h139.221l-17.46 29.0898c-0.96582 1.60645 -3.26953 2.91016 -5.14355 2.91016h-0.00683594h-94h-0.0166016c-1.87402 0 -4.17871 -1.30371 -5.14355 -2.91016zM368 -16v336
+h-288v-336h288zM156 32c-6.62402 0 -12 5.37598 -12 12v216c0 6.62402 5.37598 12 12 12h24c6.62402 0 12 -5.37598 12 -12v-216c0 -6.62402 -5.37598 -12 -12 -12h-24z" />
+ <glyph glyph-name="images" unicode="&#xf302;" horiz-adv-x="576"
+d="M480 32v-16c0 -26.5098 -21.4902 -48 -48 -48h-384c-26.5098 0 -48 21.4902 -48 48v256c0 26.5098 21.4902 48 48 48h16v-48h-10c-3.31152 0 -6 -2.68848 -6 -6v-244c0 -3.31152 2.68848 -6 6 -6h372c3.31152 0 6 2.68848 6 6v10h48zM522 368h-372
+c-3.31152 0 -6 -2.68848 -6 -6v-244c0 -3.31152 2.68848 -6 6 -6h372c3.31152 0 6 2.68848 6 6v244c0 3.31152 -2.68848 6 -6 6zM528 416c26.5098 0 48 -21.4902 48 -48v-256c0 -26.5098 -21.4902 -48 -48 -48h-384c-26.5098 0 -48 21.4902 -48 48v256
+c0 26.5098 21.4902 48 48 48h384zM264 304c0 -22.0908 -17.9092 -40 -40 -40s-40 17.9092 -40 40s17.9092 40 40 40s40 -17.9092 40 -40zM192 208l39.5146 39.5146c4.68652 4.68652 12.2842 4.68652 16.9717 0l39.5137 -39.5146l103.515 103.515
+c4.68652 4.68652 12.2842 4.68652 16.9717 0l71.5137 -71.5146v-80h-288v48z" />
+ <glyph glyph-name="clipboard" unicode="&#xf328;" horiz-adv-x="384"
+d="M336 384c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h80c0 35.2998 28.7002 64 64 64s64 -28.7002 64 -64h80zM192 408c-13.2998 0 -24 -10.7002 -24 -24s10.7002 -24 24 -24s24 10.7002 24 24
+s-10.7002 24 -24 24zM336 -10v340c0 3.2998 -2.7002 6 -6 6h-42v-36c0 -6.59961 -5.40039 -12 -12 -12h-168c-6.59961 0 -12 5.40039 -12 12v36h-42c-3.2998 0 -6 -2.7002 -6 -6v-340c0 -3.2998 2.7002 -6 6 -6h276c3.2998 0 6 2.7002 6 6z" />
+ <glyph glyph-name="arrow-alt-circle-down" unicode="&#xf358;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM256 -8c110.5 0 200 89.5 200 200s-89.5 200 -200 200s-200 -89.5 -200 -200s89.5 -200 200 -200zM224 308c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-116
+h67c10.7002 0 16.0996 -12.9004 8.5 -20.5l-99 -99c-4.7002 -4.7002 -12.2998 -4.7002 -17 0l-99 99c-7.5 7.59961 -2.2002 20.5 8.5 20.5h67v116z" />
+ <glyph glyph-name="arrow-alt-circle-left" unicode="&#xf359;"
+d="M8 192c0 137 111 248 248 248s248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248zM456 192c0 110.5 -89.5 200 -200 200s-200 -89.5 -200 -200s89.5 -200 200 -200s200 89.5 200 200zM384 212v-40c0 -6.59961 -5.40039 -12 -12 -12h-116v-67
+c0 -10.7002 -12.9004 -16 -20.5 -8.5l-99 99c-4.7002 4.7002 -4.7002 12.2998 0 17l99 99c7.59961 7.59961 20.5 2.2002 20.5 -8.5v-67h116c6.59961 0 12 -5.40039 12 -12z" />
+ <glyph glyph-name="arrow-alt-circle-right" unicode="&#xf35a;"
+d="M504 192c0 -137 -111 -248 -248 -248s-248 111 -248 248s111 248 248 248s248 -111 248 -248zM56 192c0 -110.5 89.5 -200 200 -200s200 89.5 200 200s-89.5 200 -200 200s-200 -89.5 -200 -200zM128 172v40c0 6.59961 5.40039 12 12 12h116v67
+c0 10.7002 12.9004 16 20.5 8.5l99 -99c4.7002 -4.7002 4.7002 -12.2998 0 -17l-99 -99c-7.59961 -7.59961 -20.5 -2.2002 -20.5 8.5v67h-116c-6.59961 0 -12 5.40039 -12 12z" />
+ <glyph glyph-name="arrow-alt-circle-up" unicode="&#xf35b;"
+d="M256 -56c-137 0 -248 111 -248 248s111 248 248 248s248 -111 248 -248s-111 -248 -248 -248zM256 392c-110.5 0 -200 -89.5 -200 -200s89.5 -200 200 -200s200 89.5 200 200s-89.5 200 -200 200zM276 64h-40c-6.59961 0 -12 5.40039 -12 12v116h-67
+c-10.7002 0 -16 12.9004 -8.5 20.5l99 99c4.7002 4.7002 12.2998 4.7002 17 0l99 -99c7.59961 -7.59961 2.2002 -20.5 -8.5 -20.5h-67v-116c0 -6.59961 -5.40039 -12 -12 -12z" />
+ <glyph glyph-name="gem" unicode="&#xf3a5;" horiz-adv-x="576"
+d="M464 448c4.09961 0 7.7998 -2 10.0996 -5.40039l99.9004 -147.199c2.90039 -4.40039 2.59961 -10.1006 -0.700195 -14.2002l-276 -340.8c-4.7998 -5.90039 -13.7998 -5.90039 -18.5996 0l-276 340.8c-3.2998 4 -3.60059 9.7998 -0.700195 14.2002l100 147.199
+c2.2002 3.40039 6 5.40039 10 5.40039h352zM444.7 400h-56.7998l51.6992 -96h68.4004zM242.6 400l-51.5996 -96h194l-51.7002 96h-90.7002zM131.3 400l-63.2998 -96h68.4004l51.6992 96h-56.7998zM88.2998 256l119.7 -160l-68.2998 160h-51.4004zM191.2 256l96.7998 -243.3
+l96.7998 243.3h-193.6zM368 96l119.6 160h-51.3994z" />
+ <glyph glyph-name="money-bill-alt" unicode="&#xf3d1;" horiz-adv-x="640"
+d="M320 304c53.0195 0 96 -50.1396 96 -112c0 -61.8701 -43 -112 -96 -112c-53.0195 0 -96 50.1504 -96 112c0 61.8604 42.9805 112 96 112zM360 136v16c0 4.41992 -3.58008 8 -8 8h-16v88c0 4.41992 -3.58008 8 -8 8h-13.5801h-0.000976562
+c-4.01074 0 -9.97266 -1.80566 -13.3086 -4.03027l-15.3301 -10.2197c-1.96777 -1.30957 -3.56445 -4.29004 -3.56445 -6.65332c0 -1.33691 0.601562 -3.32422 1.34375 -4.43652l8.88086 -13.3105c1.30859 -1.9668 4.29004 -3.56445 6.65332 -3.56445
+c1.33691 0 3.32422 0.602539 4.43652 1.34473l0.469727 0.310547v-55.4404h-16c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h64c4.41992 0 8 3.58008 8 8zM608 384c17.6699 0 32 -14.3301 32 -32v-320c0 -17.6699 -14.3301 -32 -32 -32h-576
+c-17.6699 0 -32 14.3301 -32 32v320c0 17.6699 14.3301 32 32 32h576zM592 112v160c-35.3496 0 -64 28.6504 -64 64h-416c0 -35.3496 -28.6504 -64 -64 -64v-160c35.3496 0 64 -28.6504 64 -64h416c0 35.3496 28.6504 64 64 64z" />
+ <glyph glyph-name="window-close" unicode="&#xf410;"
+d="M464 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h416zM464 22v340c0 3.2998 -2.7002 6 -6 6h-404c-3.2998 0 -6 -2.7002 -6 -6v-340c0 -3.2998 2.7002 -6 6 -6h404c3.2998 0 6 2.7002 6 6z
+M356.5 253.4l-61.4004 -61.4004l61.4004 -61.4004c4.59961 -4.59961 4.59961 -12.0996 0 -16.7998l-22.2998 -22.2998c-4.60059 -4.59961 -12.1006 -4.59961 -16.7998 0l-61.4004 61.4004l-61.4004 -61.4004c-4.59961 -4.59961 -12.0996 -4.59961 -16.7998 0
+l-22.2998 22.2998c-4.59961 4.60059 -4.59961 12.1006 0 16.7998l61.4004 61.4004l-61.4004 61.4004c-4.59961 4.59961 -4.59961 12.0996 0 16.7998l22.2998 22.2998c4.60059 4.59961 12.1006 4.59961 16.7998 0l61.4004 -61.4004l61.4004 61.4004
+c4.59961 4.59961 12.0996 4.59961 16.7998 0l22.2998 -22.2998c4.7002 -4.60059 4.7002 -12.1006 0 -16.7998z" />
+ <glyph glyph-name="comment-dots" unicode="&#xf4ad;"
+d="M144 240c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32zM256 240c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32zM368 240c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32
+s-32 14.2998 -32 32s14.2998 32 32 32zM256 416c141.4 0 256 -93.0996 256 -208s-114.6 -208 -256 -208c-32.7998 0 -64 5.2002 -92.9004 14.2998c-29.0996 -20.5996 -77.5996 -46.2998 -139.1 -46.2998c-9.59961 0 -18.2998 5.7002 -22.0996 14.5
+c-3.80078 8.7998 -2 19 4.59961 26c0.5 0.400391 31.5 33.7998 46.4004 73.2002c-33 35.0996 -52.9004 78.7002 -52.9004 126.3c0 114.9 114.6 208 256 208zM256 48c114.7 0 208 71.7998 208 160s-93.2998 160 -208 160s-208 -71.7998 -208 -160
+c0 -42.2002 21.7002 -74.0996 39.7998 -93.4004l20.6006 -21.7998l-10.6006 -28.0996c-5.5 -14.5 -12.5996 -28.1006 -19.8994 -40.2002c23.5996 7.59961 43.1992 18.9004 57.5 29l19.5 13.7998l22.6992 -7.2002c25.3008 -8 51.7002 -12.0996 78.4004 -12.0996z" />
+ <glyph glyph-name="smile-wink" unicode="&#xf4da;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM365.8 138.4c10.2002 -8.5 11.6006 -23.6006 3.10059 -33.8008
+c-30 -36 -74.1006 -56.5996 -120.9 -56.5996s-90.9004 20.5996 -120.9 56.5996c-8.39941 10.2002 -7.09961 25.3008 3.10059 33.8008c10.0996 8.39941 25.2998 7.09961 33.7998 -3.10059c20.7998 -25.0996 51.5 -39.3994 84 -39.3994s63.2002 14.3994 84 39.3994
+c8.5 10.2002 23.5996 11.6006 33.7998 3.10059zM168 208c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM328 268c25.7002 0 55.9004 -16.9004 59.7002 -42.0996c1.7998 -11.1006 -11.2998 -18.2002 -19.7998 -10.8008l-9.5 8.5
+c-14.8008 13.2002 -46.2002 13.2002 -61 0l-9.5 -8.5c-8.30078 -7.39941 -21.5 -0.399414 -19.8008 10.8008c4 25.1992 34.2002 42.0996 59.9004 42.0996z" />
+ <glyph glyph-name="angry" unicode="&#xf556;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM248 136c33.5996 0 65.2002 -14.7998 86.7998 -40.5996
+c8.40039 -10.2002 7.10059 -25.3008 -3.09961 -33.8008c-10.6006 -8.89941 -25.7002 -6.69922 -33.7998 3c-24.8008 29.7002 -75 29.7002 -99.8008 0c-8.5 -10.1992 -23.5996 -11.5 -33.7998 -3s-11.5996 23.6006 -3.09961 33.8008
+c21.5996 25.7998 53.2002 40.5996 86.7998 40.5996zM200 208c0 -17.7002 -14.2998 -32.0996 -32 -32.0996s-32 14.2998 -32 32c0 6.19922 2.2002 11.6992 5.2998 16.5996l-28.2002 8.5c-12.6992 3.7998 -19.8994 17.2002 -16.0996 29.9004
+c3.7998 12.6992 17.0996 20 29.9004 16.0996l80 -24c12.6992 -3.7998 19.8994 -17.2002 16.0996 -29.9004c-3.09961 -10.3994 -12.7002 -17.0996 -23 -17.0996zM399 262.9c3.7998 -12.7002 -3.40039 -26.1006 -16.0996 -29.8008l-28.2002 -8.5
+c3.09961 -4.89941 5.2998 -10.3994 5.2998 -16.5996c0 -17.7002 -14.2998 -32 -32 -32s-32 14.2998 -32 32c-10.2998 0 -19.9004 6.7002 -23 17.0996c-3.7998 12.7002 3.40039 26.1006 16.0996 29.9004l80 24c12.8008 3.7998 26.1006 -3.40039 29.9004 -16.0996z" />
+ <glyph glyph-name="dizzy" unicode="&#xf567;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM214.2 209.9
+c-7.90039 -7.90039 -20.5 -7.90039 -28.4004 -0.200195l-17.7998 17.7998l-17.7998 -17.7998c-7.7998 -7.7998 -20.5 -7.7998 -28.2998 0c-7.80078 7.7998 -7.80078 20.5 0 28.2998l17.8994 17.9004l-17.8994 17.8994c-7.80078 7.7998 -7.80078 20.5 0 28.2998
+c7.7998 7.80078 20.5 7.80078 28.2998 0l17.7998 -17.7998l17.9004 17.9004c7.7998 7.7998 20.5 7.7998 28.2998 0s7.7998 -20.5 0 -28.2998l-17.9004 -17.9004l17.9004 -17.7998c7.7998 -7.7998 7.7998 -20.5 0 -28.2998zM374.2 302.1
+c7.7002 -7.7998 7.7002 -20.3994 0 -28.1992l-17.9004 -17.9004l17.7998 -18c7.80078 -7.7998 7.80078 -20.5 0 -28.2998c-7.7998 -7.7998 -20.5 -7.7998 -28.2998 0l-17.7998 17.7998l-17.7998 -17.7998c-7.7998 -7.7998 -20.5 -7.7998 -28.2998 0
+c-7.80078 7.7998 -7.80078 20.5 0 28.2998l17.8994 17.9004l-17.8994 17.8994c-7.80078 7.7998 -7.80078 20.5 0 28.2998c7.7998 7.80078 20.5 7.80078 28.2998 0l17.7998 -17.7998l17.9004 17.7998c7.7998 7.80078 20.5 7.80078 28.2998 0zM248 176
+c35.2998 0 64 -28.7002 64 -64s-28.7002 -64 -64 -64s-64 28.7002 -64 64s28.7002 64 64 64z" />
+ <glyph glyph-name="flushed" unicode="&#xf579;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM344 304c44.2002 0 80 -35.7998 80 -80s-35.7998 -80 -80 -80
+s-80 35.7998 -80 80s35.7998 80 80 80zM344 176c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48zM344 248c13.2998 0 24 -10.7002 24 -24s-10.7002 -24 -24 -24s-24 10.7002 -24 24s10.7002 24 24 24zM232 224c0 -44.2002 -35.7998 -80 -80 -80
+s-80 35.7998 -80 80s35.7998 80 80 80s80 -35.7998 80 -80zM152 176c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48zM152 248c13.2998 0 24 -10.7002 24 -24s-10.7002 -24 -24 -24s-24 10.7002 -24 24s10.7002 24 24 24zM312 104
+c13.2002 0 24 -10.7998 24 -24s-10.7998 -24 -24 -24h-128c-13.2002 0 -24 10.7998 -24 24s10.7998 24 24 24h128z" />
+ <glyph glyph-name="frown-open" unicode="&#xf57a;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM200 240c0 -17.7002 -14.2998 -32 -32 -32s-32 14.2998 -32 32
+s14.2998 32 32 32s32 -14.2998 32 -32zM328 272c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32zM248 160c35.5996 0 88.7998 -21.2998 95.7998 -61.2002c2 -11.7998 -9.09961 -21.5996 -20.5 -18.0996
+c-31.2002 9.59961 -59.3994 15.2998 -75.2998 15.2998s-44.0996 -5.7002 -75.2998 -15.2998c-11.5 -3.40039 -22.5 6.2998 -20.5 18.0996c7 39.9004 60.2002 61.2002 95.7998 61.2002z" />
+ <glyph glyph-name="grimace" unicode="&#xf57f;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM168 208c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32
+s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM328 208c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM344 192c26.5 0 48 -21.5 48 -48v-32c0 -26.5 -21.5 -48 -48 -48h-192c-26.5 0 -48 21.5 -48 48v32c0 26.5 21.5 48 48 48
+h192zM176 96v24h-40v-8c0 -8.7998 7.2002 -16 16 -16h24zM176 136v24h-24c-8.7998 0 -16 -7.2002 -16 -16v-8h40zM240 96v24h-48v-24h48zM240 136v24h-48v-24h48zM304 96v24h-48v-24h48zM304 136v24h-48v-24h48zM360 112v8h-40v-24h24c8.7998 0 16 7.2002 16 16zM360 136v8
+c0 8.7998 -7.2002 16 -16 16h-24v-24h40z" />
+ <glyph glyph-name="grin" unicode="&#xf580;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM353.6 143.4c10 3.09961 19.3008 -5.5 17.7002 -15.3008
+c-8 -47.0996 -71.2998 -80 -123.3 -80s-115.4 32.9004 -123.3 80c-1.7002 9.90039 7.7998 18.4004 17.7002 15.3008c26 -8.30078 64.3994 -13.1006 105.6 -13.1006s79.7002 4.7998 105.6 13.1006zM168 208c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32
+s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM328 208c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32s32 -14.2998 32 -32s-14.2998 -32 -32 -32z" />
+ <glyph glyph-name="grin-alt" unicode="&#xf581;" horiz-adv-x="496"
+d="M200.3 200c-7.5 -11.4004 -24.5996 -12 -32.7002 0c-12.3994 18.7002 -15.1992 37.2998 -15.6992 56c0.599609 18.7002 3.2998 37.2998 15.6992 56c7.60059 11.4004 24.7002 12 32.7002 0c12.4004 -18.7002 15.2002 -37.2998 15.7002 -56
+c-0.599609 -18.7002 -3.2998 -37.2998 -15.7002 -56zM328.3 200c-7.5 -11.4004 -24.5996 -12 -32.7002 0c-12.3994 18.7002 -15.1992 37.2998 -15.6992 56c0.599609 18.7002 3.2998 37.2998 15.6992 56c7.60059 11.4004 24.7002 12 32.7002 0
+c12.4004 -18.7002 15.2002 -37.2998 15.7002 -56c-0.599609 -18.7002 -3.2998 -37.2998 -15.7002 -56zM248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200
+s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM353.6 143.4c10 3.09961 19.3008 -5.5 17.7002 -15.3008c-8 -47.0996 -71.2998 -80 -123.3 -80s-115.4 32.8008 -123.3 80c-1.7002 10 7.7998 18.4004 17.7002 15.3008c26 -8.30078 64.3994 -13.1006 105.6 -13.1006
+s79.7002 4.7998 105.6 13.1006z" />
+ <glyph glyph-name="grin-beam" unicode="&#xf582;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM353.6 143.4c10 3.09961 19.3008 -5.5 17.7002 -15.3008
+c-8 -47.0996 -71.2998 -80 -123.3 -80s-115.4 32.9004 -123.3 80c-1.7002 10 7.89941 18.4004 17.7002 15.3008c26 -8.30078 64.3994 -13.1006 105.6 -13.1006s79.7002 4.7998 105.6 13.1006zM117.7 216.3c-3.60059 1.10059 -6 4.60059 -5.7002 8.2998
+c3.2998 42.1006 32.2002 71.4004 56 71.4004s52.7002 -29.2998 56 -71.4004c0.299805 -3.7998 -2.09961 -7.19922 -5.7002 -8.2998c-3.09961 -1 -7.2002 0 -9.2998 3.7002l-9.5 17c-7.7002 13.7002 -19.2002 21.5996 -31.5 21.5996s-23.7998 -7.89941 -31.5 -21.5996
+l-9.5 -17c-1.90039 -3.2002 -5.7998 -4.7998 -9.2998 -3.7002zM277.7 216.3c-3.60059 1.10059 -6 4.60059 -5.7002 8.2998c3.2998 42.1006 32.2002 71.4004 56 71.4004s52.7002 -29.2998 56 -71.4004c0.299805 -3.7998 -2.09961 -7.19922 -5.7002 -8.2998
+c-3.09961 -1 -7.2002 0 -9.2998 3.7002l-9.5 17c-7.7002 13.7002 -19.2002 21.5996 -31.5 21.5996s-23.7998 -7.89941 -31.5 -21.5996l-9.5 -17c-1.90039 -3.2002 -5.7998 -4.7998 -9.2998 -3.7002z" />
+ <glyph glyph-name="grin-beam-sweat" unicode="&#xf583;" horiz-adv-x="496"
+d="M440 288c-29.5 0 -53.2998 26.2998 -53.2998 58.7002c0 25 31.7002 75.5 46.2002 97.2998c3.5 5.2998 10.5996 5.2998 14.1992 0c14.5 -21.7998 46.2002 -72.2998 46.2002 -97.2998c0 -32.4004 -23.7998 -58.7002 -53.2998 -58.7002zM248 48
+c-51.9004 0 -115.3 32.9004 -123.3 80c-1.7002 10 7.89941 18.4004 17.7002 15.2998c26 -8.2998 64.3994 -13.0996 105.6 -13.0996s79.7002 4.7998 105.6 13.0996c10 3.2002 19.4004 -5.39941 17.7002 -15.2998c-8 -47.0996 -71.3994 -80 -123.3 -80zM378.3 216.3
+c-3.09961 -0.899414 -7.2002 0.100586 -9.2998 3.7002l-9.5 17c-7.7002 13.7002 -19.2002 21.5996 -31.5 21.5996s-23.7998 -7.89941 -31.5 -21.5996l-9.5 -17c-1.90039 -3.2002 -5.7998 -4.7998 -9.2998 -3.7002c-3.60059 1.10059 -6 4.60059 -5.7002 8.2998
+c3.2998 42.1006 32.2002 71.4004 56 71.4004s52.7002 -29.2998 56 -71.4004c0.299805 -3.7998 -2.09961 -7.19922 -5.7002 -8.2998zM483.6 269.2c8 -24.2998 12.4004 -50.2002 12.4004 -77.2002c0 -137 -111 -248 -248 -248s-248 111 -248 248s111 248 248 248
+c45.7002 0 88.4004 -12.5996 125.2 -34.2002c-10.9004 -21.5996 -15.5 -36.2002 -17.2002 -45.7002c-31.2002 20.1006 -68.2002 31.9004 -108 31.9004c-110.3 0 -200 -89.7002 -200 -200s89.7002 -200 200 -200s200 89.7002 200 200
+c0 22.5 -3.90039 44.0996 -10.7998 64.2998c0.399414 0 21.7998 -2.7998 46.3994 12.9004zM168 258.6c-12.2998 0 -23.7998 -7.7998 -31.5 -21.5996l-9.5 -17c-1.90039 -3.2002 -5.7998 -4.7998 -9.2998 -3.7002c-3.60059 1.10059 -6 4.60059 -5.7002 8.2998
+c3.2998 42.1006 32.2002 71.4004 56 71.4004s52.7002 -29.2998 56 -71.4004c0.299805 -3.7998 -2.09961 -7.19922 -5.7002 -8.2998c-3.09961 -1 -7.2002 0 -9.2998 3.7002l-9.5 17c-7.7002 13.7002 -19.2002 21.5996 -31.5 21.5996z" />
+ <glyph glyph-name="grin-hearts" unicode="&#xf584;" horiz-adv-x="496"
+d="M353.6 143.4c10 3.09961 19.3008 -5.5 17.7002 -15.3008c-8 -47.0996 -71.2998 -80 -123.3 -80s-115.4 32.8008 -123.3 80c-1.7002 10 7.89941 18.4004 17.7002 15.3008c26 -8.30078 64.3994 -13.1006 105.6 -13.1006s79.7002 4.7998 105.6 13.1006zM200.8 192.3
+l-70.2002 18.1006c-20.3994 5.2998 -31.8994 27 -24.1992 47.1992c6.69922 17.7002 26.6992 26.7002 44.8994 22l7.10059 -1.89941l2 7.09961c5.09961 18.1006 22.8994 30.9004 41.5 27.9004c21.3994 -3.40039 34.3994 -24.2002 28.7998 -44.5l-19.4004 -69.9004
+c-1.2998 -4.5 -6 -7.2002 -10.5 -6zM389.6 257.6c7.7002 -20.1992 -3.7998 -41.7998 -24.1992 -47.0996l-70.2002 -18.2002c-4.60059 -1.2002 -9.2998 1.5 -10.5 6l-19.4004 69.9004c-5.59961 20.2998 7.40039 41.0996 28.7998 44.5c18.7002 3 36.5 -9.7998 41.5 -27.9004
+l2 -7.09961l7.10059 1.89941c18.2002 4.7002 38.2002 -4.39941 44.8994 -22zM248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200
+s89.7002 -200 200 -200z" />
+ <glyph glyph-name="grin-squint" unicode="&#xf585;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM353.6 143.4c10 3.09961 19.3008 -5.5 17.7002 -15.3008
+c-8 -47.0996 -71.2998 -80 -123.3 -80s-115.4 32.9004 -123.3 80c-1.7002 9.90039 7.7998 18.4004 17.7002 15.3008c26 -8.30078 64.3994 -13.1006 105.6 -13.1006s79.7002 4.7998 105.6 13.1006zM118.9 184.2c-3.80078 4.39941 -3.90039 11 -0.100586 15.5l33.6006 40.2998
+l-33.6006 40.2998c-3.7002 4.5 -3.7002 11 0.100586 15.5c3.89941 4.40039 10.1992 5.5 15.2998 2.5l80 -48c3.59961 -2.2002 5.7998 -6.09961 5.7998 -10.2998s-2.2002 -8.09961 -5.7998 -10.2998l-80 -48c-5.40039 -3.2002 -11.7002 -1.7002 -15.2998 2.5zM361.8 181.7
+l-80 48c-3.59961 2.2002 -5.7998 6.09961 -5.7998 10.2998s2.2002 8.09961 5.7998 10.2998l80 48c5.10059 2.90039 11.5 1.90039 15.2998 -2.5c3.80078 -4.5 3.90039 -11 0.100586 -15.5l-33.6006 -40.2998l33.6006 -40.2998c3.7002 -4.5 3.7002 -11 -0.100586 -15.5
+c-3.59961 -4.2002 -9.89941 -5.7002 -15.2998 -2.5z" />
+ <glyph glyph-name="grin-squint-tears" unicode="&#xf586;"
+d="M117.1 63.9004c6.30078 0.899414 11.7002 -4.5 10.9004 -10.9004c-3.7002 -25.7998 -13.7002 -84 -30.5996 -100.9c-22 -21.8994 -57.9004 -21.5 -80.3008 0.900391c-22.3994 22.4004 -22.7998 58.4004 -0.899414 80.2998
+c16.8994 16.9004 75.0996 26.9004 100.899 30.6006zM75.9004 105.6c-19.6006 -3.89941 -35.1006 -8.09961 -47.3008 -12.1992c-39.2998 90.5996 -22.0996 199.899 52 274c48.5 48.3994 111.9 72.5996 175.4 72.5996c38.9004 0 77.7998 -9.2002 113.2 -27.4004
+c-4 -12.1992 -8.2002 -28 -12 -48.2998c-30.4004 17.9004 -65 27.7002 -101.2 27.7002c-53.4004 0 -103.6 -20.7998 -141.4 -58.5996c-61.5996 -61.5 -74.2998 -153.4 -38.6992 -227.801zM428.2 293.2c20.2998 3.89941 36.2002 8 48.5 12
+c47.8994 -93.2002 32.8994 -210.5 -45.2002 -288.601c-48.5 -48.3994 -111.9 -72.5996 -175.4 -72.5996c-33.6992 0 -67.2998 7 -98.6992 20.5996c4.19922 12.2002 8.2998 27.7002 12.1992 47.2002c26.6006 -12.7998 55.9004 -19.7998 86.4004 -19.7998
+c53.4004 0 103.6 20.7998 141.4 58.5996c65.6992 65.7002 75.7998 166 30.7998 242.601zM394.9 320.1c-6.30078 -0.899414 -11.7002 4.5 -10.9004 10.9004c3.7002 25.7998 13.7002 84 30.5996 100.9c22 21.8994 57.9004 21.5 80.3008 -0.900391
+c22.3994 -22.4004 22.7998 -58.4004 0.899414 -80.2998c-16.8994 -16.9004 -75.0996 -26.9004 -100.899 -30.6006zM207.9 211.8c3 -3 4.19922 -7.2998 3.19922 -11.5l-22.5996 -90.5c-1.40039 -5.39941 -6.2002 -9.09961 -11.7002 -9.09961h-0.899414
+c-5.80078 0.5 -10.5 5.09961 -11 10.8994l-4.80078 52.3008l-52.2998 4.7998c-5.7998 0.5 -10.3994 5.2002 -10.8994 11c-0.400391 5.89941 3.39941 11.2002 9.09961 12.5996l90.5 22.7002c4.2002 1 8.40039 -0.200195 11.4004 -3.2002zM247.6 236.9
+c-0.0996094 0 -6.39941 -1.80078 -11.3994 3.19922c-3 3 -4.2002 7.30078 -3.2002 11.4004l22.5996 90.5c1.40039 5.7002 7 9.2002 12.6006 9.09961c5.7998 -0.5 10.5 -5.09961 11 -10.8994l4.7998 -52.2998l52.2998 -4.80078c5.7998 -0.5 10.4004 -5.19922 10.9004 -11
+c0.399414 -5.89941 -3.40039 -11.1992 -9.10059 -12.5996zM299.6 148.4c29.1006 29.0996 53 59.5996 65.3008 83.7998c4.89941 9.2998 17.5996 9.89941 23.3994 1.7002c27.7002 -38.9004 6.10059 -106.9 -30.5996 -143.7s-104.8 -58.2998 -143.7 -30.6006
+c-8.2998 5.90039 -7.5 18.6006 1.7002 23.4004c24.2002 12.5 54.7998 36.2998 83.8994 65.4004z" />
+ <glyph glyph-name="grin-stars" unicode="&#xf587;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM353.6 143.4c10 3.09961 19.3008 -5.5 17.7002 -15.3008
+c-8 -47.0996 -71.2998 -80 -123.3 -80s-115.4 32.8008 -123.3 80c-1.7002 10 7.89941 18.4004 17.7002 15.3008c26 -8.30078 64.3994 -13.1006 105.6 -13.1006s79.7002 4.7998 105.6 13.1006zM125.7 200.9l6.09961 34.8994l-25.3994 24.6006
+c-4.60059 4.59961 -1.90039 12.2998 4.2998 13.1992l34.8994 5l15.5 31.6006c2.90039 5.7998 11 5.7998 13.9004 0l15.5 -31.6006l34.9004 -5c6.19922 -1 8.7998 -8.69922 4.2998 -13.1992l-25.4004 -24.6006l6 -34.8994c1 -6.2002 -5.39941 -11 -11 -7.90039
+l-31.2998 16.2998l-31.2998 -16.2998c-5.60059 -3.09961 -12 1.7002 -11 7.90039zM385.4 273.6c6.19922 -1 8.89941 -8.59961 4.39941 -13.1992l-25.3994 -24.6006l6 -34.8994c1 -6.2002 -5.40039 -11 -11 -7.90039l-31.3008 16.2998l-31.2998 -16.2998
+c-5.59961 -3.09961 -12 1.7002 -11 7.90039l6 34.8994l-25.3994 24.6006c-4.60059 4.59961 -1.90039 12.2998 4.2998 13.1992l34.8994 5l15.5 31.6006c2.90039 5.7998 11 5.7998 13.9004 0l15.5 -31.6006z" />
+ <glyph glyph-name="grin-tears" unicode="&#xf588;" horiz-adv-x="640"
+d="M117.1 191.9c6.30078 0.899414 11.7002 -4.5 10.9004 -10.9004c-3.7002 -25.7998 -13.7002 -84 -30.5996 -100.9c-22 -21.8994 -57.9004 -21.5 -80.3008 0.900391c-22.3994 22.4004 -22.7998 58.4004 -0.899414 80.2998c16.8994 16.9004 75.0996 26.9004 100.899 30.6006
+zM623.8 161.3c21.9004 -21.8994 21.5 -57.8994 -0.799805 -80.2002c-22.4004 -22.3994 -58.4004 -22.7998 -80.2998 -0.899414c-16.9004 16.8994 -26.9004 75.0996 -30.6006 100.899c-0.899414 6.30078 4.5 11.7002 10.8008 10.8008
+c25.7998 -3.7002 84 -13.7002 100.899 -30.6006zM497.2 99.5996c12.3994 -37.2998 25.0996 -43.7998 28.2998 -46.5c-44.5996 -65.7998 -120 -109.1 -205.5 -109.1s-160.9 43.2998 -205.5 109.1c3.09961 2.60059 15.7998 9.10059 28.2998 46.5
+c33.4004 -63.8994 100.3 -107.6 177.2 -107.6s143.8 43.7002 177.2 107.6zM122.7 223.5c-2.40039 0.299805 -5 2.5 -49.5 -6.90039c12.3994 125.4 118.1 223.4 246.8 223.4s234.4 -98 246.8 -223.5c-44.2998 9.40039 -47.3994 7.2002 -49.5 7
+c-15.2002 95.2998 -97.7998 168.5 -197.3 168.5s-182.1 -73.2002 -197.3 -168.5zM320 48c-51.9004 0 -115.3 32.9004 -123.3 80c-1.7002 10 7.89941 18.4004 17.7002 15.2998c26 -8.2998 64.3994 -13.0996 105.6 -13.0996s79.7002 4.7998 105.6 13.0996
+c10 3.2002 19.4004 -5.39941 17.7002 -15.2998c-8 -47.0996 -71.3994 -80 -123.3 -80zM450.3 216.3c-3.09961 -0.899414 -7.2002 0.100586 -9.2998 3.7002l-9.5 17c-7.7002 13.7002 -19.2002 21.5996 -31.5 21.5996s-23.7998 -7.89941 -31.5 -21.5996l-9.5 -17
+c-1.90039 -3.2002 -5.7998 -4.7998 -9.2998 -3.7002c-3.60059 1.10059 -6 4.60059 -5.7002 8.2998c3.2998 42.1006 32.2002 71.4004 56 71.4004s52.7002 -29.2998 56 -71.4004c0.299805 -3.7998 -2.09961 -7.19922 -5.7002 -8.2998zM240 258.6
+c-12.2998 0 -23.7998 -7.7998 -31.5 -21.5996l-9.5 -17c-1.90039 -3.2002 -5.7998 -4.7998 -9.2998 -3.7002c-3.60059 1.10059 -6 4.60059 -5.7002 8.2998c3.2998 42.1006 32.2002 71.4004 56 71.4004s52.7002 -29.2998 56 -71.4004
+c0.299805 -3.7998 -2.09961 -7.19922 -5.7002 -8.2998c-3.09961 -1 -7.2002 0 -9.2998 3.7002l-9.5 17c-7.7002 13.7002 -19.2002 21.5996 -31.5 21.5996z" />
+ <glyph glyph-name="grin-tongue" unicode="&#xf589;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM312 40h0.0996094v43.7998l-17.6992 8.7998c-15.1006 7.60059 -31.5 -1.69922 -34.9004 -16.5l-2.7998 -12.0996c-2.10059 -9.2002 -15.2002 -9.2002 -17.2998 0
+l-2.80078 12.0996c-3.39941 14.8008 -19.8994 24 -34.8994 16.5l-17.7002 -8.7998v-42.7998c0 -35.2002 28 -64.5 63.0996 -65c35.8008 -0.5 64.9004 28.4004 64.9004 64zM340.2 14.7002c64 33.3994 107.8 100.3 107.8 177.3c0 110.3 -89.7002 200 -200 200
+s-200 -89.7002 -200 -200c0 -77 43.7998 -143.9 107.8 -177.3c-2.2002 8.09961 -3.7998 16.5 -3.7998 25.2998v43.5c-14.2002 12.4004 -24.4004 27.5 -27.2998 44.5c-1.7002 10 7.7998 18.4004 17.7002 15.2998c26 -8.2998 64.3994 -13.0996 105.6 -13.0996
+s79.7002 4.7998 105.6 13.0996c10 3.2002 19.4004 -5.39941 17.7002 -15.2998c-2.89941 -17 -13.0996 -32.0996 -27.2998 -44.5v-43.5c0 -8.7998 -1.59961 -17.2002 -3.7998 -25.2998zM168 272c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32
+s14.2998 32 32 32zM328 272c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32z" />
+ <glyph glyph-name="grin-tongue-squint" unicode="&#xf58a;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM312 40h0.0996094v43.7998l-17.6992 8.7998c-15.1006 7.60059 -31.5 -1.69922 -34.9004 -16.5l-2.7998 -12.0996c-2.10059 -9.2002 -15.2002 -9.2002 -17.2998 0
+l-2.80078 12.0996c-3.39941 14.8008 -19.8994 24 -34.8994 16.5l-17.7002 -8.7998v-42.7998c0 -35.2002 28 -64.5 63.0996 -65c35.8008 -0.5 64.9004 28.4004 64.9004 64zM340.2 14.7002c64 33.3994 107.8 100.3 107.8 177.3c0 110.3 -89.7002 200 -200 200
+s-200 -89.7002 -200 -200c0 -77 43.7998 -143.9 107.8 -177.3c-2.2002 8.09961 -3.7998 16.5 -3.7998 25.2998v43.5c-14.2002 12.4004 -24.4004 27.5 -27.2998 44.5c-1.7002 10 7.7998 18.4004 17.7002 15.2998c26 -8.2998 64.3994 -13.0996 105.6 -13.0996
+s79.7002 4.7998 105.6 13.0996c10 3.2002 19.4004 -5.39941 17.7002 -15.2998c-2.89941 -17 -13.0996 -32.0996 -27.2998 -44.5v-43.5c0 -8.7998 -1.59961 -17.2002 -3.7998 -25.2998zM377.1 295.8c3.80078 -4.39941 3.90039 -11 0.100586 -15.5l-33.6006 -40.2998
+l33.6006 -40.2998c3.7002 -4.5 3.7002 -11 -0.100586 -15.5c-3.59961 -4.2002 -9.89941 -5.7002 -15.2998 -2.5l-80 48c-3.59961 2.2002 -5.7998 6.09961 -5.7998 10.2998s2.2002 8.09961 5.7998 10.2998l80 48c5 3 11.5 1.90039 15.2998 -2.5zM214.2 250.3
+c3.59961 -2.2002 5.7998 -6.09961 5.7998 -10.2998s-2.2002 -8.09961 -5.7998 -10.2998l-80 -48c-5.40039 -3.2002 -11.7002 -1.7002 -15.2998 2.5c-3.80078 4.5 -3.90039 11 -0.100586 15.5l33.6006 40.2998l-33.6006 40.2998c-3.7002 4.5 -3.7002 11 0.100586 15.5
+c3.89941 4.5 10.2998 5.5 15.2998 2.5z" />
+ <glyph glyph-name="grin-tongue-wink" unicode="&#xf58b;" horiz-adv-x="496"
+d="M152 268c25.7002 0 55.9004 -16.9004 59.7998 -42.0996c0.799805 -5 -1.7002 -10 -6.09961 -12.4004c-5.7002 -3.09961 -11.2002 -0.599609 -13.7002 1.59961l-9.5 8.5c-14.7998 13.2002 -46.2002 13.2002 -61 0l-9.5 -8.5
+c-3.7998 -3.39941 -9.2998 -4 -13.7002 -1.59961c-4.39941 2.40039 -6.89941 7.40039 -6.09961 12.4004c3.89941 25.1992 34.0996 42.0996 59.7998 42.0996zM328 320c44.2002 0 80 -35.7998 80 -80s-35.7998 -80 -80 -80s-80 35.7998 -80 80s35.7998 80 80 80zM328 192
+c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48zM328 264c13.2998 0 24 -10.7002 24 -24s-10.7002 -24 -24 -24s-24 10.7002 -24 24s10.7002 24 24 24zM248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248z
+M312 40h0.0996094v43.7998l-17.6992 8.7998c-15.1006 7.60059 -31.5 -1.69922 -34.9004 -16.5l-2.7998 -12.0996c-2.10059 -9.2002 -15.2002 -9.2002 -17.2998 0l-2.80078 12.0996c-3.39941 14.8008 -19.8994 24 -34.8994 16.5l-17.7002 -8.7998v-42.7998
+c0 -35.2002 28 -64.5 63.0996 -65c35.8008 -0.5 64.9004 28.4004 64.9004 64zM340.2 14.7002c64 33.3994 107.8 100.3 107.8 177.3c0 110.3 -89.7002 200 -200 200s-200 -89.7002 -200 -200c0 -77 43.7998 -143.9 107.8 -177.3
+c-2.2002 8.09961 -3.7998 16.5 -3.7998 25.2998v43.5c-14.2002 12.4004 -24.4004 27.5 -27.2998 44.5c-1.7002 10 7.7998 18.4004 17.7002 15.2998c26 -8.2998 64.3994 -13.0996 105.6 -13.0996s79.7002 4.7998 105.6 13.0996c10 3.2002 19.4004 -5.39941 17.7002 -15.2998
+c-2.89941 -17 -13.0996 -32.0996 -27.2998 -44.5v-43.5c0 -8.7998 -1.59961 -17.2002 -3.7998 -25.2998z" />
+ <glyph glyph-name="grin-wink" unicode="&#xf58c;" horiz-adv-x="496"
+d="M328 268c25.6904 0 55.8799 -16.9199 59.8701 -42.1201c1.72949 -11.0898 -11.3506 -18.2695 -19.8301 -10.8398l-9.5498 8.47949c-14.8105 13.1904 -46.1602 13.1904 -60.9707 0l-9.5498 -8.47949c-8.33008 -7.40039 -21.5801 -0.379883 -19.8301 10.8398
+c3.98047 25.2002 34.1699 42.1201 59.8604 42.1201zM168 208c-17.6699 0 -32 14.3301 -32 32s14.3301 32 32 32s32 -14.3301 32 -32s-14.3301 -32 -32 -32zM353.55 143.36c10.04 3.13965 19.3906 -5.4502 17.71 -15.3408
+c-7.92969 -47.1494 -71.3193 -80.0195 -123.26 -80.0195s-115.33 32.8701 -123.26 80.0195c-1.69043 9.9707 7.76953 18.4707 17.71 15.3408c25.9297 -8.31055 64.3994 -13.0605 105.55 -13.0605s79.6201 4.75977 105.55 13.0605zM248 440c136.97 0 248 -111.03 248 -248
+s-111.03 -248 -248 -248s-248 111.03 -248 248s111.03 248 248 248zM248 -8c110.28 0 200 89.7197 200 200s-89.7197 200 -200 200s-200 -89.7197 -200 -200s89.7197 -200 200 -200z" />
+ <glyph glyph-name="kiss" unicode="&#xf596;" horiz-adv-x="496"
+d="M168 272c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32zM304 140c0 -13 -13.4004 -27.2998 -35.0996 -36.4004c21.7998 -8.69922 35.1992 -23 35.1992 -36c0 -19.1992 -28.6992 -41.5 -71.5 -44h-0.5
+c-3.69922 0 -7 2.60059 -7.7998 6.2002c-0.899414 3.7998 1.10059 7.7002 4.7002 9.2002l17 7.2002c12.9004 5.5 20.7002 13.5 20.7002 21.5s-7.7998 16 -20.7998 21.5l-16.9004 7.2002c-6 2.59961 -5.7002 12.3994 0 14.7998l17 7.2002
+c12.9004 5.5 20.7002 13.5 20.7002 21.5s-7.7998 16 -20.7998 21.5l-16.9004 7.19922c-3.59961 1.5 -5.59961 5.40039 -4.7002 9.2002c0.799805 3.7998 4.40039 6.60059 8.2002 6.2002c42.7002 -2.5 71.5 -24.7998 71.5 -44zM248 440c137 0 248 -111 248 -248
+s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM328 272c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32z
+" />
+ <glyph glyph-name="kiss-beam" unicode="&#xf597;" horiz-adv-x="496"
+d="M168 296c23.7998 0 52.7002 -29.2998 55.7998 -71.4004c0.299805 -3.7998 -2 -7.19922 -5.59961 -8.2998c-3.10059 -1 -7.2002 0 -9.2998 3.7002l-9.5 17c-7.7002 13.7002 -19.2002 21.5996 -31.5 21.5996c-12.3008 0 -23.8008 -7.89941 -31.5 -21.5996l-9.5 -17
+c-1.80078 -3.2002 -5.80078 -4.7002 -9.30078 -3.7002c-3.59961 1.10059 -5.89941 4.60059 -5.59961 8.2998c3.2998 42.1006 32.2002 71.4004 56 71.4004zM248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8
+c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM304 140c0 -13 -13.4004 -27.2998 -35.0996 -36.4004c21.7998 -8.69922 35.1992 -23 35.1992 -36c0 -19.1992 -28.6992 -41.5 -71.5 -44h-0.5
+c-3.69922 0 -7 2.60059 -7.7998 6.2002c-0.899414 3.7998 1.10059 7.7002 4.7002 9.2002l17 7.2002c12.9004 5.5 20.7002 13.5 20.7002 21.5s-7.7998 16 -20.7998 21.5l-16.9004 7.2002c-6 2.59961 -5.7002 12.3994 0 14.7998l17 7.2002
+c12.9004 5.5 20.7002 13.5 20.7002 21.5s-7.7998 16 -20.7998 21.5l-16.9004 7.19922c-3.59961 1.5 -5.59961 5.40039 -4.7002 9.2002c0.799805 3.7998 4.40039 6.60059 8.2002 6.2002c42.7002 -2.5 71.5 -24.7998 71.5 -44zM328 296
+c23.7998 0 52.7002 -29.2998 55.7998 -71.4004c0.299805 -3.7998 -2 -7.19922 -5.59961 -8.2998c-3.10059 -1 -7.2002 0 -9.2998 3.7002l-9.5 17c-7.7002 13.7002 -19.2002 21.5996 -31.5 21.5996c-12.3008 0 -23.8008 -7.89941 -31.5 -21.5996l-9.5 -17
+c-1.80078 -3.2002 -5.80078 -4.7002 -9.30078 -3.7002c-3.59961 1.10059 -5.89941 4.60059 -5.59961 8.2998c3.2998 42.1006 32.2002 71.4004 56 71.4004z" />
+ <glyph glyph-name="kiss-wink-heart" unicode="&#xf598;" horiz-adv-x="503"
+d="M304 139.5c0 -13 -13.4004 -27.2998 -35.0996 -36.4004c21.7998 -8.69922 35.1992 -23 35.1992 -36c0 -19.1992 -28.6992 -41.5 -71.5 -44h-0.5c-3.69922 0 -7 2.60059 -7.7998 6.2002c-0.899414 3.7998 1.10059 7.7002 4.7002 9.2002l17 7.2002
+c12.9004 5.5 20.7002 13.5 20.7002 21.5s-7.7998 16 -20.7998 21.5l-16.9004 7.2002c-6 2.59961 -5.7002 12.3994 0 14.7998l17 7.2002c12.9004 5.5 20.7002 13.5 20.7002 21.5s-7.7998 16 -20.7998 21.5l-16.9004 7.19922c-3.59961 1.5 -5.59961 5.40039 -4.7002 9.2002
+c0.799805 3.7998 4.40039 6.60059 8.2002 6.2002c42.7002 -2.5 71.5 -24.7998 71.5 -44zM374.5 223c-14.7998 13.2002 -46.2002 13.2002 -61 0l-9.5 -8.5c-2.5 -2.2998 -7.90039 -4.7002 -13.7002 -1.59961c-4.39941 2.39941 -6.89941 7.39941 -6.09961 12.3994
+c3.89941 25.2002 34.2002 42.1006 59.7998 42.1006s55.7998 -16.9004 59.7998 -42.1006c0.799805 -5 -1.7002 -10 -6.09961 -12.3994c-4.40039 -2.40039 -9.90039 -1.7002 -13.7002 1.59961zM136 239.5c0 17.7002 14.2998 32 32 32s32 -14.2998 32 -32s-14.2998 -32 -32 -32
+s-32 14.2998 -32 32zM501.1 45.5c9.2002 -23.9004 -4.39941 -49.4004 -28.5 -55.7002l-83 -21.5c-5.39941 -1.39941 -10.8994 1.7998 -12.3994 7.10059l-22.9004 82.5996c-6.59961 24 8.7998 48.5996 34 52.5996c22 3.5 43.1006 -11.5996 49 -33l2.2998 -8.39941
+l8.40039 2.2002c21.5996 5.59961 45.0996 -5.10059 53.0996 -25.9004zM334 11.7002c17.7002 -64 10.9004 -39.5 13.4004 -46.7998c-30.5 -13.4004 -64 -20.9004 -99.4004 -20.9004c-137 0 -248 111 -248 248s111 248 248 248s248 -111 247.9 -248
+c0 -31.7998 -6.2002 -62.0996 -17.1006 -90c-6 1.5 -12.2002 2.7998 -18.5996 2.90039c-5.60059 9.69922 -13.6006 17.5 -22.6006 23.8994c6.7002 19.9004 10.4004 41.1006 10.4004 63.2002c0 110.3 -89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200
+c30.7998 0 59.9004 7.2002 86 19.7002z" />
+ <glyph glyph-name="laugh" unicode="&#xf599;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM389.4 50.5996c37.7998 37.8008 58.5996 88 58.5996 141.4s-20.7998 103.6 -58.5996 141.4c-37.8008 37.7998 -88 58.5996 -141.4 58.5996s-103.6 -20.7998 -141.4 -58.5996
+c-37.7998 -37.8008 -58.5996 -88 -58.5996 -141.4s20.7998 -103.6 58.5996 -141.4c37.8008 -37.7998 88 -58.5996 141.4 -58.5996s103.6 20.7998 141.4 58.5996zM328 224c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM168 224
+c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM362.4 160c8.19922 0 14.5 -7 13.5 -15c-7.5 -59.2002 -58.9004 -105 -121.101 -105h-13.5996c-62.2002 0 -113.601 45.7998 -121.101 105c-1 8 5.30078 15 13.5 15h228.801z" />
+ <glyph glyph-name="laugh-beam" unicode="&#xf59a;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM389.4 50.5996c37.7998 37.8008 58.5996 88 58.5996 141.4s-20.7998 103.6 -58.5996 141.4c-37.8008 37.7998 -88 58.5996 -141.4 58.5996s-103.6 -20.7998 -141.4 -58.5996
+c-37.7998 -37.8008 -58.5996 -88 -58.5996 -141.4s20.7998 -103.6 58.5996 -141.4c37.8008 -37.7998 88 -58.5996 141.4 -58.5996s103.6 20.7998 141.4 58.5996zM328 296c23.7998 0 52.7002 -29.2998 55.7998 -71.4004c0.700195 -8.5 -10.7998 -11.8994 -14.8994 -4.5
+l-9.5 17c-7.7002 13.7002 -19.2002 21.6006 -31.5 21.6006c-12.3008 0 -23.8008 -7.90039 -31.5 -21.6006l-9.5 -17c-4.10059 -7.39941 -15.6006 -4.09961 -14.9004 4.5c3.2998 42.1006 32.2002 71.4004 56 71.4004zM127 220.1c-4.2002 -7.39941 -15.7002 -4 -15.0996 4.5
+c3.2998 42.1006 32.1992 71.4004 56 71.4004c23.7998 0 52.6992 -29.2998 56 -71.4004c0.699219 -8.5 -10.8008 -11.8994 -14.9004 -4.5l-9.5 17c-7.7002 13.7002 -19.2002 21.6006 -31.5 21.6006s-23.7998 -7.90039 -31.5 -21.6006zM362.4 160c8.19922 0 14.5 -7 13.5 -15
+c-7.5 -59.2002 -58.9004 -105 -121.101 -105h-13.5996c-62.2002 0 -113.601 45.7998 -121.101 105c-1 8 5.30078 15 13.5 15h228.801z" />
+ <glyph glyph-name="laugh-squint" unicode="&#xf59b;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM389.4 50.5996c37.7998 37.8008 58.5996 88 58.5996 141.4s-20.7998 103.6 -58.5996 141.4c-37.8008 37.7998 -88 58.5996 -141.4 58.5996s-103.6 -20.7998 -141.4 -58.5996
+c-37.7998 -37.8008 -58.5996 -88 -58.5996 -141.4s20.7998 -103.6 58.5996 -141.4c37.8008 -37.7998 88 -58.5996 141.4 -58.5996s103.6 20.7998 141.4 58.5996zM343.6 252l33.6006 -40.2998c8.59961 -10.4004 -3.90039 -24.7998 -15.4004 -18l-80 48
+c-7.7998 4.7002 -7.7998 15.8994 0 20.5996l80 48c11.6006 6.7998 24 -7.7002 15.4004 -18zM134.2 193.7c-11.6006 -6.7998 -24.1006 7.59961 -15.4004 18l33.6006 40.2998l-33.6006 40.2998c-8.59961 10.2998 3.7998 24.9004 15.4004 18l80 -48
+c7.7998 -4.7002 7.7998 -15.8994 0 -20.5996zM362.4 160c8.19922 0 14.5 -7 13.5 -15c-7.5 -59.2002 -58.9004 -105 -121.101 -105h-13.5996c-62.2002 0 -113.601 45.7998 -121.101 105c-1 8 5.30078 15 13.5 15h228.801z" />
+ <glyph glyph-name="laugh-wink" unicode="&#xf59c;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM389.4 50.5996c37.7998 37.8008 58.5996 88 58.5996 141.4s-20.7998 103.6 -58.5996 141.4c-37.8008 37.7998 -88 58.5996 -141.4 58.5996s-103.6 -20.7998 -141.4 -58.5996
+c-37.7998 -37.8008 -58.5996 -88 -58.5996 -141.4s20.7998 -103.6 58.5996 -141.4c37.8008 -37.7998 88 -58.5996 141.4 -58.5996s103.6 20.7998 141.4 58.5996zM328 284c25.7002 0 55.9004 -16.9004 59.7002 -42.0996c1.7998 -11.1006 -11.2998 -18.2002 -19.7998 -10.8008
+l-9.5 8.5c-14.8008 13.2002 -46.2002 13.2002 -61 0l-9.5 -8.5c-8.30078 -7.39941 -21.5 -0.399414 -19.8008 10.8008c4 25.1992 34.2002 42.0996 59.9004 42.0996zM168 224c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32s32 -14.2998 32 -32s-14.2998 -32 -32 -32z
+M362.4 160c8.19922 0 14.5 -7 13.5 -15c-7.5 -59.2002 -58.9004 -105 -121.101 -105h-13.5996c-62.2002 0 -113.601 45.7998 -121.101 105c-1 8 5.30078 15 13.5 15h228.801z" />
+ <glyph glyph-name="meh-blank" unicode="&#xf5a4;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM168 272c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32
+s-32 14.2998 -32 32s14.2998 32 32 32zM328 272c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32z" />
+ <glyph glyph-name="meh-rolling-eyes" unicode="&#xf5a5;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM336 296c39.7998 0 72 -32.2002 72 -72s-32.2002 -72 -72 -72
+s-72 32.2002 -72 72s32.2002 72 72 72zM336 184c22.0996 0 40 17.9004 40 40c0 13.5996 -7.2998 25.0996 -17.7002 32.2998c1 -2.59961 1.7002 -5.39941 1.7002 -8.2998c0 -13.2998 -10.7002 -24 -24 -24s-24 10.7002 -24 24c0 3 0.700195 5.7002 1.7002 8.2998
+c-10.4004 -7.2002 -17.7002 -18.7002 -17.7002 -32.2998c0 -22.0996 17.9004 -40 40 -40zM232 224c0 -39.7998 -32.2002 -72 -72 -72s-72 32.2002 -72 72s32.2002 72 72 72s72 -32.2002 72 -72zM120 224c0 -22.0996 17.9004 -40 40 -40s40 17.9004 40 40
+c0 13.5996 -7.2998 25.0996 -17.7002 32.2998c1 -2.59961 1.7002 -5.39941 1.7002 -8.2998c0 -13.2998 -10.7002 -24 -24 -24s-24 10.7002 -24 24c0 3 0.700195 5.7002 1.7002 8.2998c-10.4004 -7.2002 -17.7002 -18.7002 -17.7002 -32.2998zM312 96
+c13.2002 0 24 -10.7998 24 -24s-10.7998 -24 -24 -24h-128c-13.2002 0 -24 10.7998 -24 24s10.7998 24 24 24h128z" />
+ <glyph glyph-name="sad-cry" unicode="&#xf5b3;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM392 53.5996c34.5996 35.9004 56 84.7002 56 138.4c0 110.3 -89.7002 200 -200 200s-200 -89.7002 -200 -200c0 -53.7002 21.4004 -102.4 56 -138.4v114.4
+c0 13.2002 10.7998 24 24 24s24 -10.7998 24 -24v-151.4c28.5 -15.5996 61.2002 -24.5996 96 -24.5996s67.5 9 96 24.5996v151.4c0 13.2002 10.7998 24 24 24s24 -10.7998 24 -24v-114.4zM205.8 213.5c-5.7998 -3.2002 -11.2002 -0.700195 -13.7002 1.59961l-9.5 8.5
+c-14.7998 13.2002 -46.1992 13.2002 -61 0l-9.5 -8.5c-3.7998 -3.39941 -9.2998 -4 -13.6992 -1.59961c-4.40039 2.40039 -6.90039 7.40039 -6.10059 12.4004c3.90039 25.1992 34.2002 42.0996 59.7998 42.0996c25.6006 0 55.8008 -16.9004 59.8008 -42.0996
+c0.799805 -5 -1.7002 -10 -6.10059 -12.4004zM344 268c25.7002 0 55.9004 -16.9004 59.7998 -42.0996c0.799805 -5 -1.7002 -10 -6.09961 -12.4004c-5.7002 -3.09961 -11.2002 -0.599609 -13.7002 1.59961l-9.5 8.5c-14.7998 13.2002 -46.2002 13.2002 -61 0l-9.5 -8.5
+c-3.7998 -3.39941 -9.2002 -4 -13.7002 -1.59961c-4.39941 2.40039 -6.89941 7.40039 -6.09961 12.4004c3.89941 25.1992 34.0996 42.0996 59.7998 42.0996zM248 176c30.9004 0 56 -28.7002 56 -64s-25.0996 -64 -56 -64s-56 28.7002 -56 64s25.0996 64 56 64z" />
+ <glyph glyph-name="sad-tear" unicode="&#xf5b4;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM256 144c38.0996 0 74 -16.7998 98.5 -46.0996
+c8.5 -10.2002 7.09961 -25.3008 -3.09961 -33.8008c-10.6006 -8.7998 -25.7002 -6.69922 -33.8008 3.10059c-15.2998 18.2998 -37.7998 28.7998 -61.5996 28.7998c-13.2002 0 -24 10.7998 -24 24s10.7998 24 24 24zM168 208c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32
+s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM328 272c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32zM162.4 173.2c2.7998 3.7002 8.39941 3.7002 11.1992 0c11.4004 -15.2998 36.4004 -50.6006 36.4004 -68.1006
+c0 -22.6992 -18.7998 -41.0996 -42 -41.0996s-42 18.4004 -42 41.0996c0 17.5 25 52.8008 36.4004 68.1006z" />
+ <glyph glyph-name="smile-beam" unicode="&#xf5b8;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM332 135.4c8.5 10.1992 23.5996 11.5 33.7998 3.09961
+c10.2002 -8.5 11.6006 -23.5996 3.10059 -33.7998c-30 -36 -74.1006 -56.6006 -120.9 -56.6006s-90.9004 20.6006 -120.9 56.6006c-8.39941 10.2002 -7.09961 25.2998 3.10059 33.7998c10.2002 8.40039 25.2998 7.09961 33.7998 -3.09961
+c20.7998 -25.1006 51.5 -39.4004 84 -39.4004s63.2002 14.4004 84 39.4004zM136.5 237l-9.5 -17c-1.90039 -3.2002 -5.90039 -4.7998 -9.2998 -3.7002c-3.60059 1.10059 -6 4.60059 -5.7002 8.2998c3.2998 42.1006 32.2002 71.4004 56 71.4004s52.7002 -29.2998 56 -71.4004
+c0.299805 -3.7998 -2.09961 -7.19922 -5.7002 -8.2998c-3.09961 -1 -7.2002 0 -9.2998 3.7002l-9.5 17c-7.7002 13.7002 -19.2002 21.5996 -31.5 21.5996s-23.7998 -7.89941 -31.5 -21.5996zM328 296c23.7998 0 52.7002 -29.2998 56 -71.4004
+c0.299805 -3.7998 -2.09961 -7.19922 -5.7002 -8.2998c-3.09961 -1 -7.2002 0 -9.2998 3.7002l-9.5 17c-7.7002 13.7002 -19.2002 21.5996 -31.5 21.5996s-23.7998 -7.89941 -31.5 -21.5996l-9.5 -17c-1.90039 -3.2002 -5.7998 -4.7998 -9.2998 -3.7002
+c-3.60059 1.10059 -6 4.60059 -5.7002 8.2998c3.2998 42.1006 32.2002 71.4004 56 71.4004z" />
+ <glyph glyph-name="surprise" unicode="&#xf5c2;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM248 168c35.2998 0 64 -28.7002 64 -64s-28.7002 -64 -64 -64
+s-64 28.7002 -64 64s28.7002 64 64 64zM200 240c0 -17.7002 -14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32s32 -14.2998 32 -32zM328 272c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32z" />
+ <glyph glyph-name="tired" unicode="&#xf5c8;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 -8c110.3 0 200 89.7002 200 200s-89.7002 200 -200 200s-200 -89.7002 -200 -200s89.7002 -200 200 -200zM377.1 295.8c3.80078 -4.39941 3.90039 -11 0.100586 -15.5
+l-33.6006 -40.2998l33.6006 -40.2998c3.7998 -4.5 3.7002 -11 -0.100586 -15.5c-3.5 -4.10059 -9.89941 -5.7002 -15.2998 -2.5l-80 48c-3.59961 2.2002 -5.7998 6.09961 -5.7998 10.2998s2.2002 8.09961 5.7998 10.2998l80 48c5 2.90039 11.5 1.90039 15.2998 -2.5z
+M220 240c0 -4.2002 -2.2002 -8.09961 -5.7998 -10.2998l-80 -48c-5.40039 -3.2002 -11.7998 -1.60059 -15.2998 2.5c-3.80078 4.5 -3.90039 11 -0.100586 15.5l33.6006 40.2998l-33.6006 40.2998c-3.7998 4.5 -3.7002 11 0.100586 15.5
+c3.7998 4.40039 10.2998 5.5 15.2998 2.5l80 -48c3.59961 -2.2002 5.7998 -6.09961 5.7998 -10.2998zM248 176c45.4004 0 100.9 -38.2998 107.8 -93.2998c1.5 -11.9004 -7 -21.6006 -15.5 -17.9004c-22.7002 9.7002 -56.2998 15.2002 -92.2998 15.2002
+s-69.5996 -5.5 -92.2998 -15.2002c-8.60059 -3.7002 -17 6.10059 -15.5 17.9004c6.89941 55 62.3994 93.2998 107.8 93.2998z" />
+ </font>
+</defs></svg>
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.ttf b/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.ttf
new file mode 100644
index 0000000000..a309313d5f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.ttf
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.woff b/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.woff
new file mode 100644
index 0000000000..2578261897
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.woff
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.woff2 b/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.woff2
new file mode 100644
index 0000000000..3ef9c3edb0
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-regular-400.woff2
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.eot b/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.eot
new file mode 100644
index 0000000000..68c010a862
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.eot
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.svg b/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.svg
new file mode 100644
index 0000000000..4ef85aa379
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.svg
@@ -0,0 +1,4516 @@
+<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1">
+<metadata>
+Created by FontForge 20190112 at Tue Feb 12 10:24:59 2019
+ By Robert Madole
+Copyright (c) Font Awesome
+</metadata>
+<defs>
+<font id="FontAwesome5Free-Solid" horiz-adv-x="512" >
+ <font-face
+ font-family="Font Awesome 5 Free Solid"
+ font-weight="900"
+ font-stretch="normal"
+ units-per-em="512"
+ panose-1="2 0 5 3 0 0 0 0 0 0"
+ ascent="448"
+ descent="-64"
+ bbox="-0.983398 -64.9834 640.104 448.427"
+ underline-thickness="25"
+ underline-position="-51"
+ unicode-range="U+0020-F82F"
+ />
+ <missing-glyph />
+ <glyph glyph-name="glass-martini" unicode="&#xf000;"
+d="M502.05 390.4l-214.05 -214.04v-192.36h56c22.0898 0 40 -17.9102 40 -40c0 -4.41992 -3.58008 -8 -8 -8h-240c-4.41992 0 -8 3.58008 -8 8c0 22.0898 17.9102 40 40 40h56v192.36l-214.05 214.04c-21.25 21.2598 -6.2002 57.5996 23.8496 57.5996h444.4
+c30.0498 0 45.0996 -36.3398 23.8496 -57.5996z" />
+ <glyph glyph-name="music" unicode="&#xf001;"
+d="M511.99 415.99l0.00976562 -351.99c0 -35.3496 -42.9805 -64 -96 -64s-96 28.6504 -96 64s42.9805 64 96 64c11.2803 0 21.9502 -1.54004 32 -3.91992v184.63l-256 -75.0195v-233.69c0 -35.3496 -42.9805 -64 -96 -64s-96 28.6504 -96 64s42.9805 64 96 64
+c11.2803 0 21.9502 -1.54004 32 -3.91992v261.42c0 14 9.09961 26.2998 22.4004 30.5l319.989 94.5c20.5 6.5 41.6006 -8.7998 41.6006 -30.5098z" />
+ <glyph glyph-name="search" unicode="&#xf002;" horiz-adv-x="511"
+d="M505 5.2998c9.2998 -9.39941 9.2998 -24.5996 -0.0996094 -34l-28.3008 -28.2998c-9.2998 -9.40039 -24.5 -9.40039 -33.8994 0l-99.7002 99.7002c-4.5 4.5 -7 10.5996 -7 17v16.2998c-35.2998 -27.5996 -79.7002 -44 -128 -44c-114.9 0 -208 93.0996 -208 208
+s93.0996 208 208 208s208 -93.0996 208 -208c0 -48.2998 -16.4004 -92.7002 -44 -128h16.2998c6.40039 0 12.5 -2.5 17 -7zM208 112c70.7998 0 128 57.2998 128 128c0 70.7998 -57.2998 128 -128 128c-70.7998 0 -128 -57.2998 -128 -128c0 -70.7998 57.2998 -128 128 -128z
+" />
+ <glyph glyph-name="heart" unicode="&#xf004;"
+d="M462.3 385.4c62.7998 -53.6006 66.1006 -149.801 9.7998 -207.9l-193.5 -199.8c-12.5 -12.9004 -32.7998 -12.9004 -45.2998 0l-193.5 199.8c-56.2002 58.0996 -52.8994 154.3 9.90039 207.9c54.7998 46.6992 136.399 38.2998 186.6 -13.6006l19.7002 -20.2998
+l19.7002 20.2998c50.2998 51.9004 131.8 60.2998 186.6 13.6006z" />
+ <glyph glyph-name="star" unicode="&#xf005;" horiz-adv-x="576"
+d="M259.3 430.2c11.7998 23.8994 45.7002 23.5996 57.4004 0l65.2998 -132.4l146.1 -21.2998c26.2002 -3.7998 36.7002 -36.0996 17.7002 -54.5996l-105.7 -103l25 -145.5c4.5 -26.3008 -23.1992 -45.9004 -46.3994 -33.7002l-130.7 68.7002l-130.7 -68.7002
+c-23.2002 -12.2998 -50.8994 7.39941 -46.3994 33.7002l25 145.5l-105.7 103c-19 18.5 -8.5 50.7998 17.7002 54.5996l146.1 21.2998z" />
+ <glyph glyph-name="user" unicode="&#xf007;" horiz-adv-x="448"
+d="M224 192c-70.7002 0 -128 57.2998 -128 128s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128zM313.6 160c74.2002 0 134.4 -60.2002 134.4 -134.4v-41.5996c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v41.5996
+c0 74.2002 60.2002 134.4 134.4 134.4h16.6992c22.3008 -10.2002 46.9004 -16 72.9004 -16s50.7002 5.7998 72.9004 16h16.6992z" />
+ <glyph glyph-name="film" unicode="&#xf008;"
+d="M488 384c13.2998 0 24 -10.7002 24 -24v-336c0 -13.2998 -10.7002 -24 -24 -24h-8v20c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-20h-320v20c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-20h-8
+c-13.2998 0 -24 10.7002 -24 24v336c0 13.2998 10.7002 24 24 24h8v-20c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12v20h320v-20c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12v20h8zM96 76v40c0 6.59961 -5.40039 12 -12 12h-40
+c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12zM96 172v40c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12zM96 268v40
+c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12zM368 60v96c0 6.59961 -5.40039 12 -12 12h-200c-6.59961 0 -12 -5.40039 -12 -12v-96c0 -6.59961 5.40039 -12 12 -12h200
+c6.59961 0 12 5.40039 12 12zM368 228v96c0 6.59961 -5.40039 12 -12 12h-200c-6.59961 0 -12 -5.40039 -12 -12v-96c0 -6.59961 5.40039 -12 12 -12h200c6.59961 0 12 5.40039 12 12zM480 76v40c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-40
+c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12zM480 172v40c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12zM480 268v40c0 6.59961 -5.40039 12 -12 12h-40
+c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12z" />
+ <glyph glyph-name="th-large" unicode="&#xf009;"
+d="M296 416h192c13.2549 0 24 -10.7451 24 -24v-160c0 -13.2549 -10.7451 -24 -24 -24h-192c-13.2549 0 -24 10.7451 -24 24v160c0 13.2549 10.7451 24 24 24zM216 416c13.2549 0 24 -10.7451 24 -24v-160c0 -13.2549 -10.7451 -24 -24 -24h-192
+c-13.2549 0 -24 10.7451 -24 24v160c0 13.2549 10.7451 24 24 24h192zM0 152c0 13.2549 10.7451 24 24 24h192c13.2549 0 24 -10.7451 24 -24v-160c0 -13.2549 -10.7451 -24 -24 -24h-192c-13.2549 0 -24 10.7451 -24 24v160zM296 -32c-13.2549 0 -24 10.7451 -24 24v160
+c0 13.2549 10.7451 24 24 24h192c13.2549 0 24 -10.7451 24 -24v-160c0 -13.2549 -10.7451 -24 -24 -24h-192z" />
+ <glyph glyph-name="th" unicode="&#xf00a;"
+d="M149.333 392v-80c0 -13.2549 -10.7451 -24 -24 -24h-101.333c-13.2549 0 -24 10.7451 -24 24v80c0 13.2549 10.7451 24 24 24h101.333c13.2549 0 24 -10.7451 24 -24zM330.667 152c0 -13.2549 -10.7451 -24 -24.001 -24h-101.333c-13.2549 0 -24 10.7451 -24 24v80
+c0 13.2549 10.7451 24 24 24h101.334c13.2549 0 24 -10.7451 24 -24v-80zM362.667 392c0 13.2549 10.7451 24 24 24h101.333c13.2549 0 24 -10.7451 24 -24v-80c0 -13.2549 -10.7451 -24 -24 -24h-101.333c-13.2549 0 -24 10.7451 -24 24v80zM330.667 312
+c0 -13.2549 -10.7451 -24 -24.001 -24h-101.333c-13.2549 0 -24 10.7451 -24 24v80c0 13.2549 10.7451 24 24 24h101.334c13.2549 0 24 -10.7451 24 -24v-80zM125.333 256c13.2549 0 24 -10.7451 24 -24v-80c0 -13.2549 -10.7451 -24 -24 -24h-101.333
+c-13.2549 0 -24 10.7451 -24 24v80c0 13.2549 10.7451 24 24 24h101.333zM0 72c0 13.2549 10.7451 24 24 24h101.333c13.2549 0 24 -10.7451 24 -24v-80c0 -13.2549 -10.7451 -24 -24 -24h-101.333c-13.2549 0 -24 10.7451 -24 24v80zM386.667 128
+c-13.2549 0 -24 10.7451 -24 24v80c0 13.2549 10.7451 24 24 24h101.333c13.2549 0 24 -10.7451 24 -24v-80c0 -13.2549 -10.7451 -24 -24 -24h-101.333zM386.667 -32c-13.2549 0 -24 10.7451 -24 24v80c0 13.2549 10.7451 24 24 24h101.333c13.2549 0 24 -10.7451 24 -24
+v-80c0 -13.2549 -10.7451 -24 -24 -24h-101.333zM181.333 72c0 13.2549 10.7451 24 24 24h101.333c13.2549 0 24 -10.7451 24 -24v-80c0 -13.2549 -10.7451 -24 -24 -24h-101.333c-13.2549 0 -24 10.7451 -24 24v80z" />
+ <glyph glyph-name="th-list" unicode="&#xf00b;"
+d="M149.333 232v-80c0 -13.2549 -10.7451 -24 -24 -24h-101.333c-13.2549 0 -24 10.7451 -24 24v80c0 13.2549 10.7451 24 24 24h101.333c13.2549 0 24 -10.7451 24 -24zM0 72c0 13.2549 10.7451 24 24 24h101.333c13.2549 0 24 -10.7451 24 -24v-80
+c0 -13.2549 -10.7451 -24 -24 -24h-101.333c-13.2549 0 -24 10.7451 -24 24v80zM125.333 416c13.2549 0 24 -10.7451 24 -24v-80c0 -13.2549 -10.7451 -24 -24 -24h-101.333c-13.2549 0 -24 10.7451 -24 24v80c0 13.2549 10.7451 24 24 24h101.333zM205.333 -32
+c-13.2549 0 -24 10.7451 -24 24v80c0 13.2549 10.7451 24 24 24h282.667c13.2549 0 24 -10.7451 24 -24v-80c0 -13.2549 -10.7451 -24 -24 -24h-282.667zM181.333 392c0 13.2549 10.7451 24 24 24h282.667c13.2549 0 24 -10.7451 24 -24v-80
+c0 -13.2549 -10.7451 -24 -24 -24h-282.667c-13.2549 0 -24 10.7451 -24 24v80zM205.333 128c-13.2549 0 -24 10.7451 -24 24v80c0 13.2549 10.7451 24 24 24h282.667c13.2549 0 24 -10.7451 24 -24v-80c0 -13.2549 -10.7451 -24 -24 -24h-282.667z" />
+ <glyph glyph-name="check" unicode="&#xf00c;"
+d="M173.898 8.5957l-166.4 166.4c-9.99707 9.99707 -9.99707 26.2061 0 36.2041l36.2031 36.2041c9.99707 9.99805 26.207 9.99805 36.2041 0l112.095 -112.095l240.095 240.095c9.99707 9.99707 26.207 9.99707 36.2041 0l36.2031 -36.2041
+c9.99707 -9.99707 9.99707 -26.2061 0 -36.2041l-294.4 -294.401c-9.99805 -9.99707 -26.207 -9.99707 -36.2031 0.000976562z" />
+ <glyph glyph-name="times" unicode="&#xf00d;" horiz-adv-x="352"
+d="M242.72 192l100.07 -100.07c12.2803 -12.29 12.2803 -32.1992 0 -44.4795l-22.2402 -22.2402c-12.2803 -12.2803 -32.2002 -12.2803 -44.4795 0l-100.07 100.07l-100.07 -100.07c-12.2793 -12.2803 -32.1992 -12.2803 -44.4795 0l-22.2402 22.2402
+c-12.2803 12.29 -12.2803 32.2002 0 44.4795l100.07 100.07l-100.07 100.07c-12.2803 12.29 -12.2803 32.1992 0 44.4795l22.2402 22.2402c12.29 12.2803 32.2002 12.2803 44.4795 0l100.07 -100.07l100.07 100.07c12.29 12.2803 32.1992 12.2803 44.4795 0
+l22.2402 -22.2402c12.2803 -12.29 12.2803 -32.2002 0 -44.4795z" />
+ <glyph glyph-name="search-plus" unicode="&#xf00e;" horiz-adv-x="511"
+d="M304 256v-32c0 -6.59961 -5.40039 -12 -12 -12h-56v-56c0 -6.59961 -5.40039 -12 -12 -12h-32c-6.59961 0 -12 5.40039 -12 12v56h-56c-6.59961 0 -12 5.40039 -12 12v32c0 6.59961 5.40039 12 12 12h56v56c0 6.59961 5.40039 12 12 12h32c6.59961 0 12 -5.40039 12 -12
+v-56h56c6.59961 0 12 -5.40039 12 -12zM505 -28.7002l-28.2998 -28.2998c-9.40039 -9.40039 -24.6006 -9.40039 -33.9004 0l-99.7998 99.7002c-4.5 4.5 -7 10.5996 -7 17v16.2998c-35.2998 -27.5996 -79.7002 -44 -128 -44c-114.9 0 -208 93.0996 -208 208
+s93.0996 208 208 208s208 -93.0996 208 -208c0 -48.2998 -16.4004 -92.7002 -44 -128h16.2998c6.40039 0 12.5 -2.5 17 -7l99.7002 -99.7002c9.2998 -9.39941 9.2998 -24.5996 0 -34zM344 240c0 75.2002 -60.7998 136 -136 136s-136 -60.7998 -136 -136
+s60.7998 -136 136 -136s136 60.7998 136 136z" />
+ <glyph glyph-name="search-minus" unicode="&#xf010;" horiz-adv-x="511"
+d="M304 256v-32c0 -6.59961 -5.40039 -12 -12 -12h-168c-6.59961 0 -12 5.40039 -12 12v32c0 6.59961 5.40039 12 12 12h168c6.59961 0 12 -5.40039 12 -12zM505 -28.7002l-28.2998 -28.2998c-9.40039 -9.40039 -24.6006 -9.40039 -33.9004 0l-99.7998 99.7002
+c-4.5 4.5 -7 10.5996 -7 17v16.2998c-35.2998 -27.5996 -79.7002 -44 -128 -44c-114.9 0 -208 93.0996 -208 208s93.0996 208 208 208s208 -93.0996 208 -208c0 -48.2998 -16.4004 -92.7002 -44 -128h16.2998c6.40039 0 12.5 -2.5 17 -7l99.7002 -99.7002
+c9.2998 -9.39941 9.2998 -24.5996 0 -34zM344 240c0 75.2002 -60.7998 136 -136 136s-136 -60.7998 -136 -136s60.7998 -136 136 -136s136 60.7998 136 136z" />
+ <glyph glyph-name="power-off" unicode="&#xf011;" horiz-adv-x="511"
+d="M400 393.9c63 -45 104 -118.601 104 -201.9c0 -136.8 -110.8 -247.7 -247.5 -248c-136.5 -0.299805 -248.3 111 -248.5 247.6c-0.0996094 83.3008 40.9004 157.101 103.8 202.2c11.7002 8.2998 28 4.7998 35 -7.7002l15.7998 -28.0996
+c5.90039 -10.5 3.10059 -23.7998 -6.59961 -31c-41.5 -30.7998 -68 -79.5996 -68 -134.9c-0.0996094 -92.2998 74.5 -168.1 168 -168.1c91.5996 0 168.6 74.2002 168 169.1c-0.299805 51.8008 -24.7002 101.801 -68.0996 134c-9.7002 7.2002 -12.4004 20.5 -6.5 30.9004
+l15.7998 28.0996c7 12.4004 23.2002 16.1006 34.7998 7.80078zM296 184c0 -13.2998 -10.7002 -24 -24 -24h-32c-13.2998 0 -24 10.7002 -24 24v240c0 13.2998 10.7002 24 24 24h32c13.2998 0 24 -10.7002 24 -24v-240z" />
+ <glyph glyph-name="signal" unicode="&#xf012;" horiz-adv-x="640"
+d="M216 160c8.83984 0 16 -7.16016 16 -16v-192c0 -8.83984 -7.16016 -16 -16 -16h-48c-8.83984 0 -16 7.16016 -16 16v192c0 8.83984 7.16016 16 16 16h48zM88 64c8.83984 0 16 -7.16016 16 -16v-96c0 -8.83984 -7.16016 -16 -16 -16h-48c-8.83984 0 -16 7.16016 -16 16v96
+c0 8.83984 7.16016 16 16 16h48zM344 256c8.83984 0 16 -7.16016 16 -16v-288c0 -8.83984 -7.16016 -16 -16 -16h-48c-8.83984 0 -16 7.16016 -16 16v288c0 8.83984 7.16016 16 16 16h48zM472 352c8.83984 0 16 -7.16016 16 -16v-384c0 -8.83984 -7.16016 -16 -16 -16h-48
+c-8.83984 0 -16 7.16016 -16 16v384c0 8.83984 7.16016 16 16 16h48zM600 448c8.83984 0 16 -7.16016 16 -16v-480c0 -8.83984 -7.16016 -16 -16 -16h-48c-8.83984 0 -16 7.16016 -16 16v480c0 8.83984 7.16016 16 16 16h48z" />
+ <glyph glyph-name="cog" unicode="&#xf013;"
+d="M487.4 132.3c4.89941 -2.7998 7.09961 -8.59961 5.59961 -14.0996c-11.0996 -35.7002 -30 -67.9004 -54.7002 -94.6006c-3.7998 -4.19922 -9.89941 -5.09961 -14.7998 -2.2998l-42.5996 24.6006c-18 -15.3008 -38.6006 -27.2002 -60.8008 -35.1006v-49.2002
+c0 -5.59961 -3.89941 -10.5 -9.39941 -11.6992c-34.9004 -7.80078 -72.5 -8.2002 -109.2 0c-5.5 1.19922 -9.40039 6.09961 -9.40039 11.6992v49.2002c-22.2998 7.7998 -42.8994 19.7002 -60.7998 35.1006l-42.5996 -24.6006c-4.7998 -2.7998 -11 -1.7998 -14.7998 2.2998
+c-24.7002 26.8008 -43.6006 59 -54.7002 94.6006c-1.60059 5.39941 0.599609 11.2002 5.5 14l42.5996 24.5996c-4.2998 23.2002 -4.2998 47 0 70.2002l-42.5996 24.5996c-4.90039 2.80078 -7.2002 8.60059 -5.5 14c11.0996 35.7002 30 67.9004 54.7002 94.6006
+c3.7998 4.2002 9.89941 5.09961 14.7998 2.2998l42.5 -24.5996c18 15.2998 38.5996 27.1992 60.7998 35.0996v49.2002c0 5.59961 3.90039 10.5 9.40039 11.7002c34.8994 7.7998 72.5 8.19922 109.199 0c5.5 -1.2002 9.40039 -6.10059 9.40039 -11.7002v-49.1006
+c22.2998 -7.7998 42.9004 -19.6992 60.7998 -35.0996l42.6006 24.5996c4.7998 2.80078 11 1.80078 14.7998 -2.2998c24.7002 -26.7998 43.5996 -59 54.7002 -94.5996c1.59961 -5.40039 -0.600586 -11.2002 -5.5 -14l-42.6006 -24.6006
+c4.2998 -23.1992 4.2998 -47 0 -70.1992zM256 112c44.0996 0 80 35.9004 80 80s-35.9004 80 -80 80s-80 -35.9004 -80 -80s35.9004 -80 80 -80z" />
+ <glyph glyph-name="home" unicode="&#xf015;" horiz-adv-x="576"
+d="M280.37 299.74c1.84863 1.49023 5.27539 2.69922 7.64941 2.69922c2.375 0 5.80176 -1.20898 7.65039 -2.69922l184.33 -151.74v-164c0 -8.83203 -7.16797 -16 -16 -16l-112.02 0.30957h-0.000976562c-8.83203 0 -16 7.16797 -16 16
+c0 0.0146484 0 0.0371094 0.000976562 0.0507812v95.6396c0 8.83203 -7.16895 16 -16 16h-64c-8.83203 0 -16 -7.16797 -16 -16v-95.71v0c0 -8.78809 -7.13281 -15.9561 -15.9209 -16l-112.06 -0.290039c-8.83203 0 -16 7.16797 -16 16v163.89zM571.6 196.53
+c2.44531 -1.98828 4.42969 -6.15918 4.42969 -9.31055c0 -2.37305 -1.22266 -5.78613 -2.72949 -7.62012l-25.5 -31c-1.98633 -2.40332 -6.13086 -4.35449 -9.24902 -4.35449c-2.38574 0 -5.81348 1.23438 -7.65039 2.75488l-235.23 193.74
+c-1.84863 1.49023 -5.27539 2.69922 -7.65039 2.69922c-2.37402 0 -5.80078 -1.20898 -7.64941 -2.69922l-235.22 -193.74c-1.83691 -1.5166 -5.26074 -2.74805 -7.64258 -2.74805c-3.12793 0 -7.28027 1.96191 -9.26758 4.37793l-25.5 31
+c-1.52051 1.83789 -2.75488 5.26562 -2.75488 7.65039c0 3.11914 1.95117 7.2627 4.35449 9.25l253.13 208.47c7.33594 6.03613 21 10.9355 30.5 10.9355c9.50098 0 23.1641 -4.89941 30.5 -10.9355l89.5303 -73.6602v72.6104c0 6.62402 5.37598 12 12 12h56
+c6.62402 0 12 -5.37598 12 -12v-138.51z" />
+ <glyph glyph-name="clock" unicode="&#xf017;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM313.1 89.9004c5.40039 -3.90039 12.9004 -2.7002 16.8008 2.59961l28.1992 38.7998c3.90039 5.40039 2.80078 12.9004 -2.59961 16.7998l-63.5 46.2002v137.7
+c0 6.59961 -5.40039 12 -12 12h-48c-6.59961 0 -12 -5.40039 -12 -12v-168.3c0 -3.7998 1.7998 -7.40039 4.90039 -9.7002z" />
+ <glyph glyph-name="road" unicode="&#xf018;" horiz-adv-x="576"
+d="M573.19 45.3301c9.25977 -21.1904 -5.5 -45.3301 -27.7305 -45.3301h-196.84l-10.3105 97.6797c-0.859375 8.14062 -7.71973 14.3203 -15.9092 14.3203h-68.8008c-8.18945 0 -15.0498 -6.17969 -15.9092 -14.3203l-10.3105 -97.6797h-196.84
+c-22.2305 0 -36.9902 24.1396 -27.7402 45.3301l139.79 320c4.96973 11.3799 15.7998 18.6699 27.7305 18.6699h97.5898l-2.4502 -23.1602c-0.5 -4.71973 3.20996 -8.83984 7.95996 -8.83984h29.1602c4.75 0 8.45996 4.12012 7.95996 8.83984l-2.4502 23.1602h97.5898
+c11.9199 0 22.75 -7.29004 27.7207 -18.6699zM260.4 312.84l-4.59082 -43.5801c-0.75 -7.08984 4.80078 -13.2598 11.9307 -13.2598h40.54c7.12012 0 12.6797 6.16992 11.9297 13.2598l-4.59961 43.5801c-0.430664 4.07031 -3.87012 7.16016 -7.95996 7.16016h-39.29
+h-0.00488281c-3.97363 0 -7.53809 -3.20801 -7.95508 -7.16016zM315.64 144c9.5 0 16.9102 8.23047 15.9102 17.6797l-5.06934 48c-0.860352 8.14062 -7.7207 14.3203 -15.9102 14.3203h-45.1504c-8.18945 0 -15.0498 -6.17969 -15.9102 -14.3203l-5.06934 -48
+c-1 -9.44922 6.40918 -17.6797 15.9092 -17.6797h55.29z" />
+ <glyph glyph-name="download" unicode="&#xf019;"
+d="M216 448h80c13.2998 0 24 -10.7002 24 -24v-168h87.7002c17.7998 0 26.7002 -21.5 14.0996 -34.0996l-152.1 -152.2c-7.5 -7.5 -19.7998 -7.5 -27.2998 0l-152.301 152.2c-12.5996 12.5996 -3.69922 34.0996 14.1006 34.0996h87.7998v168c0 13.2998 10.7002 24 24 24z
+M512 72v-112c0 -13.2998 -10.7002 -24 -24 -24h-464c-13.2998 0 -24 10.7002 -24 24v112c0 13.2998 10.7002 24 24 24h146.7l49 -49c20.0996 -20.0996 52.5 -20.0996 72.5996 0l49 49h146.7c13.2998 0 24 -10.7002 24 -24zM388 -16c0 11 -9 20 -20 20s-20 -9 -20 -20
+s9 -20 20 -20s20 9 20 20zM452 -16c0 11 -9 20 -20 20s-20 -9 -20 -20s9 -20 20 -20s20 9 20 20z" />
+ <glyph glyph-name="inbox" unicode="&#xf01c;" horiz-adv-x="576"
+d="M567.938 204.092c4.4502 -6.6748 8.06152 -18.6025 8.06152 -26.624v-0.000976562v-129.467c0 -26.5098 -21.4902 -48 -48 -48h-480c-26.5098 0 -48 21.4902 -48 48v129.467v0.000976562c0 8.02148 3.61133 19.9492 8.06152 26.624l105.689 158.534
+c7.86621 11.7988 25.7578 21.374 39.9385 21.374h268.621c14.1807 0 32.0732 -9.57617 39.9395 -21.374zM162.252 320l-85.334 -128h123.082l32 -64h112l32 64h123.082l-85.333 128h-251.497z" />
+ <glyph glyph-name="redo" unicode="&#xf01e;" horiz-adv-x="520"
+d="M500.333 448c6.62695 0 12 -5.37305 12 -12v-200.332c0 -6.62695 -5.37305 -12 -12 -12h-200.333c-6.62695 0 -12 5.37305 -12 12v47.4111c0 6.85254 5.72852 12.3135 12.5742 11.9863l101.524 -4.86523c-31.5928 46.9414 -85.207 77.7998 -146.099 77.7998
+c-97.2842 0 -176 -78.7334 -176 -176c0 -97.2842 78.7334 -176 176 -176c44.6064 0 85.3076 16.5518 116.309 43.8555c4.74023 4.17383 11.9141 3.92285 16.3809 -0.543945l33.9707 -33.9707c4.87207 -4.87207 4.63086 -12.8145 -0.482422 -17.4326
+c-43.9756 -39.7217 -102.252 -63.9082 -166.178 -63.9082c-136.904 0 -247.899 110.932 -248 247.813c-0.100586 136.66 111.34 248.187 248.001 248.187c75.6709 0 143.415 -33.8994 188.901 -87.333l-3.9668 82.7588c-0.327148 6.8457 5.13379 12.5742 11.9863 12.5742
+h47.4111z" />
+ <glyph glyph-name="sync" unicode="&#xf021;"
+d="M440.935 435.426c-0.327148 6.8457 5.13477 12.5742 11.9873 12.5742h47.4111c6.62695 0 12 -5.37305 12 -12v-200.333c0 -6.62695 -5.37305 -12 -12 -12h-200.333c-6.62695 0 -12 5.37305 -12 12v47.4111c0 6.85254 5.72852 12.3135 12.5742 11.9863l101.529 -4.86523
+c-31.5918 46.9443 -85.2109 77.8008 -146.104 77.8008c-83.0693 0 -152.593 -57.4082 -171.137 -134.741c-1.29883 -5.41699 -6.10645 -9.25879 -11.6768 -9.25879h-49.084c-7.49902 0 -13.1943 6.80664 -11.8066 14.1758c21.6367 114.9 122.518 201.824 243.704 201.824
+c75.6738 0 143.416 -33.9043 188.901 -87.3398zM256 16c83.0693 0 152.593 57.4082 171.137 134.741c1.29883 5.41699 6.10645 9.25879 11.6768 9.25879h49.084c7.49902 0 13.1943 -6.80664 11.8066 -14.1758c-21.6367 -114.9 -122.518 -201.824 -243.704 -201.824
+c-75.5947 0 -143.275 33.834 -188.759 87.1738l4.12891 -82.5752c0.342773 -6.85352 -5.12207 -12.5986 -11.9854 -12.5986h-47.3848c-6.62695 0 -12 5.37305 -12 12v200.332c0 6.62695 5.37305 12 12 12h200.332c6.62695 0 12 -5.37305 12 -12v-47.4121
+c0 -6.85156 -5.72852 -12.3125 -12.5732 -11.9863l-101.868 4.87109c31.5928 -46.9463 85.2148 -77.8047 146.109 -77.8047z" />
+ <glyph glyph-name="list-alt" unicode="&#xf022;"
+d="M464 -32h-416c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h416c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48zM128 328c-22.0908 0 -40 -17.9092 -40 -40s17.9092 -40 40 -40s40 17.9092 40 40s-17.9092 40 -40 40zM128 232
+c-22.0908 0 -40 -17.9092 -40 -40s17.9092 -40 40 -40s40 17.9092 40 40s-17.9092 40 -40 40zM128 136c-22.0908 0 -40 -17.9092 -40 -40s17.9092 -40 40 -40s40 17.9092 40 40s-17.9092 40 -40 40zM416 272v32c0 6.62695 -5.37305 12 -12 12h-200
+c-6.62695 0 -12 -5.37305 -12 -12v-32c0 -6.62695 5.37305 -12 12 -12h200c6.62695 0 12 5.37305 12 12zM416 176v32c0 6.62695 -5.37305 12 -12 12h-200c-6.62695 0 -12 -5.37305 -12 -12v-32c0 -6.62695 5.37305 -12 12 -12h200c6.62695 0 12 5.37305 12 12zM416 80v32
+c0 6.62695 -5.37305 12 -12 12h-200c-6.62695 0 -12 -5.37305 -12 -12v-32c0 -6.62695 5.37305 -12 12 -12h200c6.62695 0 12 5.37305 12 12z" />
+ <glyph glyph-name="lock" unicode="&#xf023;" horiz-adv-x="448"
+d="M400 224c26.5 0 48 -21.5 48 -48v-192c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v192c0 26.5 21.5 48 48 48h24v72c0 83.7998 68.2002 152 152 152s152 -68.2002 152 -152v-72h24zM296 224v72c0 39.7002 -32.2998 72 -72 72s-72 -32.2998 -72 -72v-72h144
+z" />
+ <glyph glyph-name="flag" unicode="&#xf024;" horiz-adv-x="520"
+d="M349.565 349.217c40.4951 0 82.6113 15.9062 116.949 31.8545c21.2168 9.85352 45.4854 -5.62305 45.4854 -29.0166v-243.1c0 -10.5264 -5.16016 -20.4072 -13.8428 -26.3584c-35.8379 -24.5635 -74.3359 -40.8574 -122.505 -40.8574
+c-67.373 0 -111.629 34.7832 -165.218 34.7832c-50.8525 0 -86.124 -10.0586 -114.435 -22.1221v-94.4004c0 -13.2549 -10.7451 -24 -24 -24h-16c-13.2549 0 -24 10.7451 -24 24v386.055c-14.5029 10.1201 -24 26.9189 -24 45.9453
+c0 31.7041 26.3447 57.2539 58.3379 55.9521c28.4678 -1.1582 51.7793 -23.9668 53.5508 -52.4033c0.0625 -0.980469 0.113281 -2.57324 0.113281 -3.55566c0 -5.71094 -1.65723 -14.6738 -3.69922 -20.0059c20.7363 7.62891 43.0898 12.0127 68.0449 12.0127
+c67.373 0 111.63 -34.7832 165.218 -34.7832z" />
+ <glyph glyph-name="headphones" unicode="&#xf025;"
+d="M256 416c141.504 0 256 -114.521 256 -256v-48c0 -10.917 -7.9248 -23.7402 -17.6904 -28.6221l-14.3818 -7.19141c-2.01074 -60.0889 -51.3486 -108.187 -111.928 -108.187h-24c-13.2549 0 -24 10.7451 -24 24v176c0 13.2549 10.7451 24 24 24h24
+c31.3418 0 59.6709 -12.8789 80 -33.627v1.62695c0 105.869 -86.1309 192 -192 192s-192 -86.1309 -192 -192v-1.62695c20.3291 20.748 48.6582 33.627 80 33.627h24c13.2549 0 24 -10.7451 24 -24v-176c0 -13.2549 -10.7451 -24 -24 -24h-24
+c-60.5791 0 -109.917 48.0967 -111.928 108.187l-14.3828 7.19141c-9.76465 4.88184 -17.6895 17.7051 -17.6895 28.6221v0v48c0 141.504 114.52 256 256 256z" />
+ <glyph glyph-name="volume-off" unicode="&#xf026;" horiz-adv-x="258"
+d="M216.36 376.96c15.0098 15 40.9697 4.49023 40.9697 -16.9795v-335.961c0 -21.4395 -25.9404 -32 -40.9697 -16.9697l-88.9707 88.9502h-102.06c-13.2598 0 -24 10.75 -24 24v144c0 13.2598 10.7402 24 24 24h102.06z" />
+ <glyph glyph-name="volume-down" unicode="&#xf027;" horiz-adv-x="384"
+d="M215.03 375.96c15.0098 15 40.9697 4.49023 40.9697 -16.9795v-335.961c0 -21.4395 -25.9404 -32 -40.9697 -16.9697l-88.9707 88.9502h-102.06c-13.2598 0 -24 10.75 -24 24v144c0 13.2598 10.7402 24 24 24h102.06zM338.23 267.88
+c28.2393 -15.5498 45.7793 -44.9902 45.7793 -76.8701s-17.54 -61.3301 -45.7695 -76.8799c-11.5605 -6.34961 -26.1807 -2.20996 -32.6104 9.4502c-6.38965 11.6104 -2.16016 26.2002 9.4502 32.6104c12.9004 7.08984 20.9199 20.4297 20.9199 34.8096
+s-8.01953 27.7197 -20.9297 34.8203c-11.6104 6.41016 -15.8398 21 -9.4502 32.6094c6.41992 11.6104 21.0303 15.7803 32.6104 9.4502z" />
+ <glyph glyph-name="volume-up" unicode="&#xf028;" horiz-adv-x="576"
+d="M215.03 376.95c15.0098 15.0098 40.9697 4.49023 40.9697 -16.9697v-335.961c0 -21.4395 -25.9404 -32 -40.9697 -16.9697l-88.9707 88.9502h-102.06c-13.2598 0 -24 10.75 -24 24v144c0 13.2598 10.7402 24 24 24h102.06zM448.35 428.03
+c79.9199 -52.46 127.65 -140.7 127.65 -236.03s-47.7305 -183.58 -127.65 -236.04c-11.5801 -7.61035 -26.4697 -3.75977 -33.5098 6.9502c-7.33984 11.1602 -4.21973 26.1797 6.9502 33.5c66.2695 43.4902 105.82 116.6 105.82 195.58
+c0 78.9795 -39.5508 152.09 -105.82 195.58c-11.1699 7.33008 -14.29 22.3398 -6.9502 33.5098c7.33008 11.1895 22.3398 14.2803 33.5098 6.9502zM480 192c0 -63.54 -32.0596 -121.94 -85.7695 -156.24c-12 -7.67969 -26.6104 -2.89941 -33.1201 7.45996
+c-7.09082 11.29 -3.78027 26.2207 7.40918 33.3604c39.75 25.3896 63.4805 68.5303 63.4805 115.42s-23.7305 90.0303 -63.4805 115.42c-11.1895 7.15039 -14.5 22.0801 -7.40918 33.3604c7.08984 11.2793 21.9297 14.5996 33.1201 7.45996
+c53.71 -34.2998 85.7695 -92.71 85.7695 -156.24zM338.23 268.87c28.2393 -15.54 45.7793 -44.9805 45.7793 -76.8604s-17.54 -61.3301 -45.7695 -76.8799c-11.5605 -6.34961 -26.1807 -2.20996 -32.6104 9.4502c-6.38965 11.6104 -2.16016 26.2002 9.4502 32.6104
+c12.9004 7.08984 20.9199 20.4297 20.9199 34.8096c0 14.3701 -8.01953 27.7197 -20.9297 34.8096c-11.6104 6.41016 -15.8398 21 -9.4502 32.6104c6.41992 11.6104 21.0303 15.7803 32.6104 9.4502z" />
+ <glyph glyph-name="qrcode" unicode="&#xf029;" horiz-adv-x="448"
+d="M0 224v192h192v-192h-192zM64 352v-64h64v64h-64zM256 416h192v-192h-192v192zM384 288v64h-64v-64h64zM0 -32v192h192v-192h-192zM64 96v-64h64v64h-64zM416 160h32v-128h-96v32h-32v-96h-64v192h96v-32h64v32zM416 0h32v-32h-32v32zM352 0h32v-32h-32v32z" />
+ <glyph glyph-name="barcode" unicode="&#xf02a;"
+d="M0 0v384h18v-384h-18zM26.8574 0.273438v383.727h9.14258v-383.727h-9.14258zM54 0.273438v383.727h8.85742v-383.727h-8.85742zM98.8574 0.273438v383.727h8.85645v-383.727h-8.85645zM134.857 0.273438v383.727h17.7139v-383.727h-17.7139zM179.714 0.273438v383.727
+h8.85742v-383.727h-8.85742zM197.714 0.273438v383.727h8.85742v-383.727h-8.85742zM215.714 0.273438v383.727h8.85742v-383.727h-8.85742zM251.429 0.273438v383.727h18v-383.727h-18zM296.286 0.273438v383.727h18v-383.727h-18zM332.285 0.273438v383.727h18.001
+v-383.727h-18.001zM368.286 0.273438v383.727h18.001v-383.727h-18.001zM395.143 0.273438v383.727h18v-383.727h-18zM440.286 0.273438v383.727h26.8564v-383.727h-26.8564zM476 0.273438v383.727h9.14258v-383.727h-9.14258zM494 0v384h18v-384h-18z" />
+ <glyph glyph-name="tag" unicode="&#xf02b;" horiz-adv-x="511"
+d="M0 195.882v204.118c0 26.5098 21.4902 48 48 48h204.118c10.9746 0 26.1807 -6.29883 33.9404 -14.0586l211.883 -211.883c18.7441 -18.7441 18.7441 -49.1367 0 -67.8818l-204.118 -204.118c-18.7451 -18.7441 -49.1377 -18.7441 -67.8818 0l-211.883 211.883
+c-7.75977 7.75977 -14.0586 22.9658 -14.0586 33.9404zM112 384c-26.5098 0 -48 -21.4902 -48 -48s21.4902 -48 48 -48s48 21.4902 48 48s-21.4902 48 -48 48z" />
+ <glyph glyph-name="tags" unicode="&#xf02c;" horiz-adv-x="639"
+d="M497.941 222.059c18.7441 -18.7441 18.7441 -49.1367 0 -67.8818l-204.118 -204.118c-18.7461 -18.7451 -49.1387 -18.7441 -67.8818 0l-211.883 211.883c-7.75977 7.75977 -14.0586 22.9658 -14.0586 33.9404v204.118c0 26.5098 21.4902 48 48 48h204.118
+c10.9746 0 26.1807 -6.29883 33.9404 -14.0586zM112 288c26.5098 0 48 21.4902 48 48s-21.4902 48 -48 48s-48 -21.4902 -48 -48s21.4902 -48 48 -48zM625.941 154.177l-204.118 -204.118c-18.7451 -18.7441 -49.1377 -18.7441 -67.8818 0l-0.360352 0.360352
+l174.059 174.059c16.999 16.999 26.3604 39.6006 26.3604 63.6406s-9.3623 46.6406 -26.3604 63.6396l-196.242 196.242h48.7207c10.9746 0 26.1807 -6.29883 33.9404 -14.0586l211.883 -211.883c18.7441 -18.7441 18.7441 -49.1367 0 -67.8818z" />
+ <glyph glyph-name="book" unicode="&#xf02d;" horiz-adv-x="448"
+d="M448 88c0 -7.5 -3.5 -14.2998 -8.90039 -18.5996c-4.19922 -15.4004 -4.19922 -59.3008 0 -74.7002c5.40039 -4.40039 8.90039 -11.2002 8.90039 -18.7002v-16c0 -13.2998 -10.7002 -24 -24 -24h-328c-53 0 -96 43 -96 96v320c0 53 43 96 96 96h328
+c13.2998 0 24 -10.7002 24 -24v-336zM128 314v-20c0 -3.2998 2.7002 -6 6 -6h212c3.2998 0 6 2.7002 6 6v20c0 3.2998 -2.7002 6 -6 6h-212c-3.2998 0 -6 -2.7002 -6 -6zM128 250v-20c0 -3.2998 2.7002 -6 6 -6h212c3.2998 0 6 2.7002 6 6v20c0 3.2998 -2.7002 6 -6 6h-212
+c-3.2998 0 -6 -2.7002 -6 -6zM381.4 0c-1.90039 17.0996 -1.90039 46.9004 0 64h-285.4c-17.5996 0 -32 -14.4004 -32 -32c0 -17.7002 14.2998 -32 32 -32h285.4z" />
+ <glyph glyph-name="bookmark" unicode="&#xf02e;" horiz-adv-x="384"
+d="M0 -64v464c0 26.5098 21.4902 48 48 48h288c26.5098 0 48 -21.4902 48 -48v-464l-192 112z" />
+ <glyph glyph-name="print" unicode="&#xf02f;"
+d="M448 256c35.3496 0 64 -28.6504 64 -64v-112c0 -8.83984 -7.16016 -16 -16 -16h-48v-96c0 -17.6699 -14.3301 -32 -32 -32h-320c-17.6699 0 -32 14.3301 -32 32v96h-48c-8.83984 0 -16 7.16016 -16 16v112c0 35.3496 28.6504 64 64 64v160c0 17.6699 14.3301 32 32 32
+h274.74c8.49023 0 16.6299 -3.37012 22.6299 -9.37012l45.2598 -45.25c6 -6.00977 9.37012 -14.1396 9.37012 -22.6299v-114.75zM384 0v96h-256v-96h256zM384 224v96h-48c-8.83984 0 -16 7.16016 -16 16v48h-192v-160h256zM432 152c13.25 0 24 10.75 24 24
+c0 13.2598 -10.75 24 -24 24s-24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24z" />
+ <glyph glyph-name="camera" unicode="&#xf030;"
+d="M512 304v-288c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v288c0 26.5 21.5 48 48 48h88l12.2998 32.9004c7 18.6992 24.9004 31.0996 44.9004 31.0996h125.5c20 0 37.8994 -12.4004 44.8994 -31.0996l12.4004 -32.9004h88c26.5 0 48 -21.5 48 -48zM376 160
+c0 66.2002 -53.7998 120 -120 120s-120 -53.7998 -120 -120s53.7998 -120 120 -120s120 53.7998 120 120zM344 160c0 -48.5 -39.5 -88 -88 -88s-88 39.5 -88 88s39.5 88 88 88s88 -39.5 88 -88z" />
+ <glyph glyph-name="font" unicode="&#xf031;" horiz-adv-x="448"
+d="M432 32c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-136c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h23.4004l-26.6006 80.7998h-138.2l-26.5996 -80.7998h24c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-136
+c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h26.7002l129.8 373.3c2.2002 6.40039 8.2998 10.7002 15.0996 10.7002h72.8008c6.7998 0 12.8994 -4.2998 15.0996 -10.7002l129.8 -373.3h26.7002zM174.4 179.7h98.5996l-41.2998 124
+c-4.7998 15.2002 -6.90039 28.2002 -7.7002 34.7002c-1 -6.40039 -3.2998 -19.4004 -7.59961 -34.6006z" />
+ <glyph glyph-name="bold" unicode="&#xf032;" horiz-adv-x="384"
+d="M304.793 204.109c44.6338 -14.4395 71.207 -53.9551 71.207 -106.123c0 -60.6338 -38.4805 -107.18 -89.4443 -122.402c-22.0312 -6.29492 -42.6631 -7.58398 -66.8652 -7.58398h-195.69c-8.83691 0 -16 7.16309 -16 16v32.4209c0 8.83691 7.16309 16 16 16h33.1133
+v318.53h-33.1133c-8.83691 0 -16 7.16309 -16 16v33.0488c0 8.83691 7.16309 16 16 16h185.661c31.1875 0 55.4766 -2.00977 80.1631 -12.0186c42.376 -16.5537 68.626 -55.9434 68.626 -104.18c0 -41.5332 -20.0186 -77.1553 -53.6572 -95.6924zM142.217 347.191v-112.953
+h76.8467c32.709 0 53.0283 21.7607 53.0283 56.79c0 22.9434 -9.07812 40.9668 -24.9062 49.4453c-9.98828 4.69824 -21.2305 6.71777 -37.5244 6.71777h-67.4443zM254.859 41.7158c21.1221 8.80078 34.7822 32.8594 34.7812 61.2842
+c0 37.6768 -23.4121 63.0566 -63.0566 63.0566h-84.3672v-129.248h81.2334c8.73145 0 21.2686 0.851562 31.4092 4.90723z" />
+ <glyph glyph-name="italic" unicode="&#xf033;" horiz-adv-x="320"
+d="M204.758 32c10.0664 0 17.6309 -9.18457 15.7041 -19.0654l-6.24512 -32c-1.39355 -7.13965 -8.42871 -12.9346 -15.7031 -12.9346h-0.000976562h-158.479c-10.0674 0 -17.6318 9.18457 -15.7051 19.0654l6.24512 32c1.39355 7.13965 8.42871 12.9346 15.7031 12.9346
+h0.000976562h39.4707l62.0898 320h-33.8457c-10.0664 0 -17.6309 9.18359 -15.7041 19.0635l6.24219 32c1.39258 7.14062 8.42773 12.9365 15.7041 12.9365h159.732c10.0664 0 17.6309 -9.18359 15.7041 -19.0635l-6.24219 -32
+c-1.39258 -7.14062 -8.42773 -12.9365 -15.7041 -12.9365h-40.7246l-62.0918 -320h33.8486z" />
+ <glyph glyph-name="text-height" unicode="&#xf034;" horiz-adv-x="576"
+d="M16 416h288c8.83691 0 16 -7.16309 16 -16v-96c0 -8.83691 -7.16309 -16 -16 -16h-35.4961c-8.83691 0 -16 7.16309 -16 16v48h-54.7607v-320h34.2568c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-144c-8.83691 0 -16 7.16309 -16 16v32
+c0 8.83691 7.16309 16 16 16h34.2568v320h-54.7607v-48c0 -8.83691 -7.16309 -16 -16 -16h-35.4961c-8.83691 0 -16 7.16309 -16 16v96c0 8.83691 7.16309 16 16 16zM491.308 411.315l79.9951 -80.001c10.0059 -10.0078 2.99414 -27.3145 -11.3125 -27.3145h-47.9902v-224
+h48c15.6387 0 20.6348 -17.9912 11.3135 -27.3145l-79.9951 -80.001c-6.24707 -6.24609 -16.3818 -6.24414 -22.626 0l-79.9951 80.001c-10.0059 10.0078 -2.99414 27.3145 11.3125 27.3145h47.9902v224h-48c-15.6387 0 -20.6348 17.9912 -11.3135 27.3145l79.9951 80.001
+c6.24707 6.24805 16.3818 6.24414 22.626 0z" />
+ <glyph glyph-name="text-width" unicode="&#xf035;" horiz-adv-x="448"
+d="M16 416h416c8.83691 0 16 -7.16309 16 -16v-96c0 -8.83691 -7.16309 -16 -16 -16h-35.4961c-8.83691 0 -16 7.16309 -16 16v48h-118.761v-128h34.2568c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-144c-8.83691 0 -16 7.16309 -16 16v32
+c0 8.83691 7.16309 16 16 16h34.2568v128h-118.761v-48c0 -8.83691 -7.16309 -16 -16 -16h-35.4961c-8.83691 0 -16 7.16309 -16 16v96c0 8.83691 7.16309 16 16 16zM443.315 75.3184c6.24805 -6.24707 6.24414 -16.3818 0 -22.626l-80.001 -79.9951
+c-10.0078 -10.0059 -27.3145 -2.99414 -27.3145 11.3125v47.9902h-224v-48c0 -15.6377 -17.9912 -20.6348 -27.3145 -11.3135l-80.001 79.9951c-6.24609 6.24707 -6.24414 16.3818 0 22.626l80.001 79.9951c10.0078 10.0059 27.3145 2.99414 27.3145 -11.3125v-47.9902h224
+v48c0 15.6377 17.9912 20.6348 27.3145 11.3135z" />
+ <glyph glyph-name="align-left" unicode="&#xf036;" horiz-adv-x="448"
+d="M288 404v-40c0 -8.83691 -7.16309 -16 -16 -16h-256c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h256c8.83691 0 16 -7.16309 16 -16zM0 276c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416
+c-8.83691 0 -16 7.16309 -16 16v40zM16 -36c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416zM272 164c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-256
+c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h256z" />
+ <glyph glyph-name="align-center" unicode="&#xf037;" horiz-adv-x="448"
+d="M352 404v-40c0 -8.83691 -7.16309 -16 -16 -16h-224c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h224c8.83691 0 16 -7.16309 16 -16zM16 220c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40
+c0 -8.83691 -7.16309 -16 -16 -16h-416zM16 -36c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416zM336 164c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16
+h-224c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h224z" />
+ <glyph glyph-name="align-right" unicode="&#xf038;" horiz-adv-x="448"
+d="M160 364v40c0 8.83691 7.16309 16 16 16h256c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-256c-8.83691 0 -16 7.16309 -16 16zM16 220c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40
+c0 -8.83691 -7.16309 -16 -16 -16h-416zM16 -36c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416zM176 92c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h256
+c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-256z" />
+ <glyph glyph-name="align-justify" unicode="&#xf039;" horiz-adv-x="448"
+d="M0 364v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416c-8.83691 0 -16 7.16309 -16 16zM16 220c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40
+c0 -8.83691 -7.16309 -16 -16 -16h-416zM16 -36c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416zM16 92c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416
+c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416z" />
+ <glyph glyph-name="list" unicode="&#xf03a;"
+d="M128 332v40c0 8.83691 7.16309 16 16 16h352c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-352c-8.83691 0 -16 7.16309 -16 16zM144 156c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h352c8.83691 0 16 -7.16309 16 -16v-40
+c0 -8.83691 -7.16309 -16 -16 -16h-352zM144 -4c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h352c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-352zM16 304c-8.83691 0 -16 7.16309 -16 16v64c0 8.83691 7.16309 16 16 16h64
+c8.83691 0 16 -7.16309 16 -16v-64c0 -8.83691 -7.16309 -16 -16 -16h-64zM16 144c-8.83691 0 -16 7.16309 -16 16v64c0 8.83691 7.16309 16 16 16h64c8.83691 0 16 -7.16309 16 -16v-64c0 -8.83691 -7.16309 -16 -16 -16h-64zM16 -16c-8.83691 0 -16 7.16309 -16 16v64
+c0 8.83691 7.16309 16 16 16h64c8.83691 0 16 -7.16309 16 -16v-64c0 -8.83691 -7.16309 -16 -16 -16h-64z" />
+ <glyph glyph-name="outdent" unicode="&#xf03b;" horiz-adv-x="448"
+d="M0 364v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416c-8.83691 0 -16 7.16309 -16 16zM208 220c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h224c8.83691 0 16 -7.16309 16 -16v-40
+c0 -8.83691 -7.16309 -16 -16 -16h-224zM16 -36c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416zM208 92c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h224
+c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-224zM4.68652 180.687c-6.24805 6.24805 -6.24805 16.3789 0 22.627l96 95.9912c9.98828 9.99121 27.3135 3.0166 27.3135 -11.3125v-191.977c0 -14.2393 -17.2656 -21.3633 -27.3135 -11.3125z" />
+ <glyph glyph-name="indent" unicode="&#xf03c;" horiz-adv-x="448"
+d="M0 364v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416c-8.83691 0 -16 7.16309 -16 16zM176 220c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h256c8.83691 0 16 -7.16309 16 -16v-40
+c0 -8.83691 -7.16309 -16 -16 -16h-256zM16 -36c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416zM176 92c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h256
+c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-256zM123.313 203.313c6.24805 -6.24805 6.24805 -16.3789 0 -22.625l-96 -95.9922c-9.98828 -9.99121 -27.3135 -3.0166 -27.3135 11.3125v191.976c0 14.2393 17.2656 21.3633 27.3135 11.3125z" />
+ <glyph glyph-name="video" unicode="&#xf03d;" horiz-adv-x="576"
+d="M336.2 384c26.3994 0 47.7998 -21.4004 47.7998 -47.7998v-288.4c0 -26.3994 -21.4004 -47.7998 -47.7998 -47.7998h-288.4c-26.3994 0 -47.7998 21.4004 -47.7998 47.7998v288.4c0 26.3994 21.4004 47.7998 47.7998 47.7998h288.4zM525.6 346.3
+c21.3008 14.6006 50.4004 -0.399414 50.4004 -25.7998v-256.9c0 -25.5 -29.2002 -40.3994 -50.4004 -25.7998l-109.6 75.5v157.4z" />
+ <glyph glyph-name="image" unicode="&#xf03e;"
+d="M464 0h-416c-26.5098 0 -48 21.4902 -48 48v288c0 26.5098 21.4902 48 48 48h416c26.5098 0 48 -21.4902 48 -48v-288c0 -26.5098 -21.4902 -48 -48 -48zM112 328c-30.9277 0 -56 -25.0723 -56 -56s25.0723 -56 56 -56s56 25.0723 56 56s-25.0723 56 -56 56zM64 64h384
+v112l-87.5146 87.5146c-4.68652 4.68652 -12.2842 4.68652 -16.9717 0l-135.514 -135.515l-55.5146 55.5146c-4.68652 4.68652 -12.2842 4.68652 -16.9717 0l-71.5137 -71.5146v-48z" />
+ <glyph glyph-name="map-marker" unicode="&#xf041;" horiz-adv-x="384"
+d="M172.268 -53.6699c-145.298 210.639 -172.268 232.257 -172.268 309.67c0 106.039 85.9609 192 192 192s192 -85.9609 192 -192c0 -77.4131 -26.9697 -99.0312 -172.268 -309.67c-9.53516 -13.7744 -29.9307 -13.7734 -39.4648 0z" />
+ <glyph glyph-name="adjust" unicode="&#xf042;"
+d="M8 192c0 136.967 111.034 248 248 248s248 -111.034 248 -248s-111.033 -248 -248 -248s-248 111.034 -248 248zM256 8c101.689 0 184 82.2949 184 184c0 101.689 -82.2949 184 -184 184v-368z" />
+ <glyph glyph-name="tint" unicode="&#xf043;" horiz-adv-x="352"
+d="M205.22 425.91c46.9902 -158.48 146.78 -200.07 146.78 -311.82c0 -98.4395 -78.7197 -178.09 -176 -178.09s-176 79.6504 -176 178.09c0 111.19 100.01 154.061 146.78 311.82c9 30.1201 50.5 28.7803 58.4395 0zM176 0c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16
+c-44.1104 0 -80 35.8896 -80 80c0 8.83984 -7.16016 16 -16 16s-16 -7.16016 -16 -16c0 -61.75 50.25 -112 112 -112z" />
+ <glyph glyph-name="edit" unicode="&#xf044;" horiz-adv-x="575"
+d="M402.6 364.8l90.2002 -90.2002c3.7998 -3.7998 3.7998 -10 0 -13.7998l-218.399 -218.399l-92.8008 -10.3008c-12.3994 -1.39941 -22.8994 9.10059 -21.5 21.5l10.3008 92.8008l218.399 218.399c3.7998 3.7998 10 3.7998 13.7998 0zM564.6 387.7
+c15.2002 -15.2002 15.2002 -39.9004 0 -55.2002l-35.3994 -35.4004c-3.7998 -3.7998 -10 -3.7998 -13.7998 0l-90.2002 90.2002c-3.7998 3.7998 -3.7998 10 0 13.7998l35.3994 35.4004c15.3008 15.2002 40 15.2002 55.2002 0zM384 101.8c0 3.2002 1.2998 6.2002 3.5 8.5
+l40 40c7.59961 7.5 20.5 2.2002 20.5 -8.5v-157.8c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h285.8c10.7002 0 16.1006 -12.9004 8.5 -20.5l-40 -40c-2.2998 -2.2002 -5.2998 -3.5 -8.5 -3.5h-229.8v-320h320v101.8z" />
+ <glyph glyph-name="step-backward" unicode="&#xf048;" horiz-adv-x="448"
+d="M64 -20v424c0 6.59961 5.40039 12 12 12h48c6.59961 0 12 -5.40039 12 -12v-176.4l195.5 181c20.5996 17.1006 52.5 2.80078 52.5 -24.5996v-384c0 -27.4004 -31.9004 -41.7002 -52.5 -24.5996l-195.5 179.899v-175.3c0 -6.59961 -5.40039 -12 -12 -12h-48
+c-6.59961 0 -12 5.40039 -12 12z" />
+ <glyph glyph-name="fast-backward" unicode="&#xf049;"
+d="M0 12v360c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-151.9l171.5 156.5c20.5996 17.1006 52.5 2.80078 52.5 -24.5996v-131.9l171.5 156.5c20.5996 17.1006 52.5 2.80078 52.5 -24.5996v-320c0 -27.4004 -31.9004 -41.7002 -52.5 -24.5996
+l-171.5 155.3v-130.7c0 -27.4004 -31.9004 -41.7002 -52.5 -24.5996l-171.5 155.3v-150.7c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12z" />
+ <glyph glyph-name="backward" unicode="&#xf04a;"
+d="M11.5 167.4c-15.2998 12.7998 -15.2998 36.3994 0 49.1992l192 160c20.5996 17.2002 52.5 2.80078 52.5 -24.5996v-320c0 -27.4004 -31.9004 -41.7998 -52.5 -24.5996zM267.5 167.4c-15.2998 12.7998 -15.2998 36.3994 0 49.1992l192 160
+c20.5996 17.2002 52.5 2.80078 52.5 -24.5996v-320c0 -27.4004 -31.9004 -41.7998 -52.5 -24.5996z" />
+ <glyph glyph-name="play" unicode="&#xf04b;" horiz-adv-x="447"
+d="M424.4 233.3c31.5 -18.5 31.3994 -64.0996 0 -82.5996l-352 -208c-31.7002 -18.7998 -72.4004 3.7998 -72.4004 41.2998v416.1c0 41.8008 43.7998 58.2002 72.4004 41.3008z" />
+ <glyph glyph-name="pause" unicode="&#xf04c;" horiz-adv-x="448"
+d="M144 -31h-96c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h96c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48zM448 17c0 -26.5 -21.5 -48 -48 -48h-96c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h96c26.5 0 48 -21.5 48 -48v-352z" />
+ <glyph glyph-name="stop" unicode="&#xf04d;" horiz-adv-x="448"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352z" />
+ <glyph glyph-name="forward" unicode="&#xf04e;" horiz-adv-x="511"
+d="M500.5 216.6c15.2998 -12.7998 15.2998 -36.3994 0 -49.1992l-192 -160c-20.5996 -17.2002 -52.5 -2.80078 -52.5 24.5996v320c0 27.4004 31.9004 41.7002 52.5 24.5996zM244.5 216.6c15.2998 -12.7998 15.2998 -36.3994 0 -49.1992l-192 -160
+c-20.5996 -17.2002 -52.5 -2.80078 -52.5 24.5996v320c0 27.4004 31.9004 41.7002 52.5 24.5996z" />
+ <glyph glyph-name="fast-forward" unicode="&#xf050;"
+d="M512 372v-360c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v151.9l-171.5 -156.5c-20.5996 -17.2002 -52.5 -2.80078 -52.5 24.5996v131.9l-171.5 -156.5c-20.5996 -17.2002 -52.5 -2.80078 -52.5 24.5996v320
+c0 27.4004 31.9004 41.7002 52.5 24.5996l171.5 -155.399v130.8c0 27.4004 31.9004 41.7002 52.5 24.5996l171.5 -155.399v150.8c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12z" />
+ <glyph glyph-name="step-forward" unicode="&#xf051;" horiz-adv-x="448"
+d="M384 404v-424c0 -6.59961 -5.40039 -12 -12 -12h-48c-6.59961 0 -12 5.40039 -12 12v176.4l-195.5 -181c-20.5996 -17.1006 -52.5 -2.80078 -52.5 24.5996v384c0 27.4004 31.9004 41.7002 52.5 24.5996l195.5 -179.899v175.3c0 6.59961 5.40039 12 12 12h48
+c6.59961 0 12 -5.40039 12 -12z" />
+ <glyph glyph-name="eject" unicode="&#xf052;" horiz-adv-x="448"
+d="M448 64v-64c0 -17.6729 -14.3271 -32 -32 -32h-384c-17.6729 0 -32 14.3271 -32 32v64c0 17.6729 14.3271 32 32 32h384c17.6729 0 32 -14.3271 32 -32zM48.0527 128c-41.7285 0 -63.5273 49.7324 -35.3828 80.4346l175.946 192.008
+c19.0156 20.7432 51.7529 20.7422 70.7666 0l175.939 -192.008c28.1973 -30.7607 6.26758 -80.4346 -35.3828 -80.4346h-351.887z" />
+ <glyph glyph-name="chevron-left" unicode="&#xf053;" horiz-adv-x="320"
+d="M34.5195 208.97l194.351 194.34c9.37012 9.37012 24.5703 9.37012 33.9395 0l22.6709 -22.6699c9.35938 -9.35938 9.36914 -24.5195 0.0390625 -33.8994l-154.029 -154.74l154.02 -154.75c9.33984 -9.37988 9.32031 -24.54 -0.0400391 -33.9004l-22.6699 -22.6699
+c-9.37012 -9.37012 -24.5693 -9.37012 -33.9395 0l-194.341 194.351c-9.36914 9.37012 -9.36914 24.5693 0 33.9395z" />
+ <glyph glyph-name="chevron-right" unicode="&#xf054;" horiz-adv-x="319"
+d="M285.476 175.029l-194.344 -194.344c-9.37305 -9.37207 -24.5684 -9.37207 -33.9404 0l-22.667 22.667c-9.35742 9.35742 -9.375 24.5225 -0.0400391 33.9014l154.021 154.746l-154.021 154.745c-9.33496 9.37891 -9.31738 24.5439 0.0400391 33.9014l22.667 22.667
+c9.37305 9.37207 24.5684 9.37207 33.9404 0l194.343 -194.344c9.37305 -9.37207 9.37305 -24.5674 0.000976562 -33.9404z" />
+ <glyph glyph-name="plus-circle" unicode="&#xf055;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM400 164v56c0 6.59961 -5.40039 12 -12 12h-92v92c0 6.59961 -5.40039 12 -12 12h-56c-6.59961 0 -12 -5.40039 -12 -12v-92h-92c-6.59961 0 -12 -5.40039 -12 -12v-56
+c0 -6.59961 5.40039 -12 12 -12h92v-92c0 -6.59961 5.40039 -12 12 -12h56c6.59961 0 12 5.40039 12 12v92h92c6.59961 0 12 5.40039 12 12z" />
+ <glyph glyph-name="minus-circle" unicode="&#xf056;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM124 152h264c6.59961 0 12 5.40039 12 12v56c0 6.59961 -5.40039 12 -12 12h-264c-6.59961 0 -12 -5.40039 -12 -12v-56c0 -6.59961 5.40039 -12 12 -12z" />
+ <glyph glyph-name="times-circle" unicode="&#xf057;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM377.6 126.9l-65.5996 65.0996l65.7002 65c4.7002 4.7002 4.7002 12.2998 0 17l-39.6006 39.5996c-4.69922 4.7002 -12.2998 4.7002 -17 0l-65.0996 -65.5996l-65 65.7002
+c-4.7002 4.7002 -12.2998 4.7002 -17 0l-39.5996 -39.6006c-4.7002 -4.69922 -4.7002 -12.2998 0 -17l65.5996 -65.0996l-65.5996 -65c-4.7002 -4.7002 -4.7002 -12.2998 0 -17l39.5 -39.5996c4.69922 -4.7002 12.2998 -4.7002 17 0l65.0996 65.5996l65 -65.5996
+c4.7002 -4.7002 12.2998 -4.7002 17 0l39.5996 39.5c4.7002 4.69922 4.7002 12.2998 0 17z" />
+ <glyph glyph-name="check-circle" unicode="&#xf058;"
+d="M504 192c0 -136.967 -111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248s248 -111.033 248 -248zM227.314 60.6855l184 184c6.24707 6.24805 6.24707 16.3799 0 22.6279l-22.6279 22.627c-6.24707 6.24902 -16.3789 6.24902 -22.6279 0
+l-150.059 -150.059l-70.0586 70.0596c-6.24805 6.24805 -16.3799 6.24805 -22.6279 0l-22.6279 -22.627c-6.24707 -6.24805 -6.24707 -16.3799 0 -22.6279l104 -104c6.24902 -6.24805 16.3799 -6.24805 22.6289 -0.000976562z" />
+ <glyph glyph-name="question-circle" unicode="&#xf059;"
+d="M504 192c0 -136.997 -111.043 -248 -248 -248s-248 111.003 -248 248c0 136.917 111.043 248 248 248s248 -111.083 248 -248zM262.655 358c-54.4971 0 -89.2549 -22.957 -116.549 -63.7578c-3.53613 -5.28613 -2.35352 -12.415 2.71484 -16.2578l34.6982 -26.3105
+c5.20508 -3.94727 12.6211 -3.00781 16.665 2.12207c17.8643 22.6582 30.1133 35.7969 57.3037 35.7969c20.4287 0 45.6973 -13.1475 45.6973 -32.958c0 -14.9756 -12.3623 -22.667 -32.5332 -33.9756c-23.5244 -13.1875 -54.6523 -29.6006 -54.6523 -70.6592v-4
+c0 -6.62695 5.37305 -12 12 -12h56c6.62695 0 12 5.37305 12 12v1.33301c0 28.4619 83.1855 29.6475 83.1855 106.667c0 58.002 -60.1641 102 -116.53 102zM256 110c-25.3652 0 -46 -20.6348 -46 -46c0 -25.3643 20.6348 -46 46 -46s46 20.6357 46 46
+c0 25.3652 -20.6348 46 -46 46z" />
+ <glyph glyph-name="info-circle" unicode="&#xf05a;"
+d="M256 440c136.957 0 248 -111.083 248 -248c0 -136.997 -111.043 -248 -248 -248s-248 111.003 -248 248c0 136.917 111.043 248 248 248zM256 330c-23.1963 0 -42 -18.8037 -42 -42s18.8037 -42 42 -42s42 18.8037 42 42s-18.8037 42 -42 42zM312 76v24
+c0 6.62695 -5.37305 12 -12 12h-12v100c0 6.62695 -5.37305 12 -12 12h-64c-6.62695 0 -12 -5.37305 -12 -12v-24c0 -6.62695 5.37305 -12 12 -12h12v-64h-12c-6.62695 0 -12 -5.37305 -12 -12v-24c0 -6.62695 5.37305 -12 12 -12h88c6.62695 0 12 5.37305 12 12z" />
+ <glyph glyph-name="crosshairs" unicode="&#xf05b;"
+d="M500 224c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-30.3643c-13.9121 -93.6748 -87.9609 -167.724 -181.636 -181.636v-30.3643c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v30.3643
+c-93.6748 13.9121 -167.724 87.9609 -181.636 181.636h-30.3643c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h30.3643c13.9121 93.6748 87.9609 167.724 181.636 181.636v30.3643c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-30.3643
+c93.6748 -13.9121 167.724 -87.9609 181.636 -181.636h30.3643zM288 43.3662c58.2432 12.417 104.232 58.46 116.634 116.634h-40.6338c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40.6338c-12.417 58.2432 -58.46 104.232 -116.634 116.634v-40.6338
+c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40.6338c-58.2432 -12.417 -104.232 -58.46 -116.634 -116.634h40.6338c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-40.6338
+c12.417 -58.2432 58.46 -104.232 116.634 -116.634v40.6338c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40.6338zM288 192c0 -17.6729 -14.3271 -32 -32 -32s-32 14.3271 -32 32s14.3271 32 32 32s32 -14.3271 32 -32z" />
+ <glyph glyph-name="ban" unicode="&#xf05e;"
+d="M256 440c136.967 0 248 -111.034 248 -248s-111.034 -248 -248 -248s-248 111.033 -248 248s111.034 248 248 248zM386.108 322.108c-65.4121 65.4102 -165.435 70.0312 -235.639 20.6758l256.315 -256.313c49.3232 70.1562 44.7705 170.189 -20.6768 235.638z
+M125.892 61.8916c65.4121 -65.4111 165.436 -70.0312 235.639 -20.6758l-256.315 256.313c-49.3232 -70.1562 -44.7705 -170.189 20.6768 -235.638z" />
+ <glyph glyph-name="arrow-left" unicode="&#xf060;" horiz-adv-x="448"
+d="M257.5 2.90039l-22.2002 -22.2002c-9.39941 -9.40039 -24.5996 -9.40039 -33.8994 0l-194.4 194.3c-9.40039 9.40039 -9.40039 24.5996 0 33.9004l194.4 194.399c9.39941 9.40039 24.5996 9.40039 33.8994 0l22.2002 -22.2002c9.5 -9.5 9.2998 -25 -0.400391 -34.2998
+l-120.5 -114.8h287.4c13.2998 0 24 -10.7002 24 -24v-32c0 -13.2998 -10.7002 -24 -24 -24h-287.4l120.5 -114.8c9.80078 -9.2998 10 -24.7998 0.400391 -34.2998z" />
+ <glyph glyph-name="arrow-right" unicode="&#xf061;" horiz-adv-x="448"
+d="M190.5 381.1l22.2002 22.2002c9.39941 9.40039 24.5996 9.40039 33.8994 0l194.4 -194.3c9.40039 -9.40039 9.40039 -24.5996 0 -33.9004l-194.4 -194.399c-9.39941 -9.40039 -24.5996 -9.40039 -33.8994 0l-22.2002 22.2002c-9.5 9.5 -9.2998 25 0.400391 34.2998
+l120.5 114.8h-287.4c-13.2998 0 -24 10.7002 -24 24v32c0 13.2998 10.7002 24 24 24h287.4l-120.5 114.8c-9.80078 9.2998 -10 24.7998 -0.400391 34.2998z" />
+ <glyph glyph-name="arrow-up" unicode="&#xf062;" horiz-adv-x="447"
+d="M34.9004 158.5l-22.2002 22.2002c-9.40039 9.39941 -9.40039 24.5996 0 33.8994l194.3 194.4c9.40039 9.40039 24.5996 9.40039 33.9004 0l194.3 -194.3c9.39941 -9.40039 9.39941 -24.6006 0 -33.9004l-22.2002 -22.2002c-9.5 -9.5 -25 -9.2998 -34.2998 0.400391
+l-114.7 120.4v-287.4c0 -13.2998 -10.7002 -24 -24 -24h-32c-13.2998 0 -24 10.7002 -24 24v287.4l-114.8 -120.5c-9.2998 -9.80078 -24.7998 -10 -34.2998 -0.400391z" />
+ <glyph glyph-name="arrow-down" unicode="&#xf063;" horiz-adv-x="448"
+d="M413.1 225.5l22.2002 -22.2002c9.40039 -9.39941 9.40039 -24.5996 0 -33.8994l-194.3 -194.4c-9.40039 -9.40039 -24.5996 -9.40039 -33.9004 0l-194.399 194.4c-9.40039 9.39941 -9.40039 24.5996 0 33.8994l22.2002 22.2002c9.5 9.5 25 9.2998 34.2998 -0.400391
+l114.8 -120.5v287.4c0 13.2998 10.7002 24 24 24h32c13.2998 0 24 -10.7002 24 -24v-287.4l114.8 120.5c9.2998 9.80078 24.7998 10 34.2998 0.400391z" />
+ <glyph glyph-name="share" unicode="&#xf064;"
+d="M503.691 258.164c11.0859 -9.5752 11.0703 -26.7656 0 -36.3281l-176.005 -152c-15.3867 -13.2891 -39.6865 -2.53613 -39.6865 18.1641v87.915c-155.083 -2.23145 -221.934 -40.7295 -176.59 -185.742c5.03418 -16.0977 -14.4238 -28.5615 -28.0771 -18.6309
+c-43.752 31.8232 -83.333 92.6914 -83.333 154.132c0 152.227 127.371 184.419 288 186.258v80.0537c0 20.668 24.2812 31.4688 39.6865 18.1641z" />
+ <glyph glyph-name="expand" unicode="&#xf065;" horiz-adv-x="448"
+d="M0 268v124c0 13.2998 10.7002 24 24 24h124c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-84v-84c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12zM288 404c0 6.59961 5.40039 12 12 12h124c13.2998 0 24 -10.7002 24 -24
+v-124c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v84h-84c-6.59961 0 -12 5.40039 -12 12v40zM436 128c6.59961 0 12 -5.40039 12 -12v-124c0 -13.2998 -10.7002 -24 -24 -24h-124c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h84
+v84c0 6.59961 5.40039 12 12 12h40zM160 -20c0 -6.59961 -5.40039 -12 -12 -12h-124c-13.2998 0 -24 10.7002 -24 24v124c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-84h84c6.59961 0 12 -5.40039 12 -12v-40z" />
+ <glyph glyph-name="compress" unicode="&#xf066;" horiz-adv-x="448"
+d="M436 256h-124c-13.2998 0 -24 10.7002 -24 24v124c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-84h84c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12zM160 280c0 -13.2998 -10.7002 -24 -24 -24h-124
+c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h84v84c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-124zM160 -20c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v84h-84c-6.59961 0 -12 5.40039 -12 12v40
+c0 6.59961 5.40039 12 12 12h124c13.2998 0 24 -10.7002 24 -24v-124zM352 -20c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v124c0 13.2998 10.7002 24 24 24h124c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-84v-84z" />
+ <glyph glyph-name="plus" unicode="&#xf067;" horiz-adv-x="448"
+d="M416 240c17.6699 0 32 -14.3301 32 -32v-32c0 -17.6699 -14.3301 -32 -32 -32h-144v-144c0 -17.6699 -14.3301 -32 -32 -32h-32c-17.6699 0 -32 14.3301 -32 32v144h-144c-17.6699 0 -32 14.3301 -32 32v32c0 17.6699 14.3301 32 32 32h144v144
+c0 17.6699 14.3301 32 32 32h32c17.6699 0 32 -14.3301 32 -32v-144h144z" />
+ <glyph glyph-name="minus" unicode="&#xf068;" horiz-adv-x="448"
+d="M416 240c17.6699 0 32 -14.3301 32 -32v-32c0 -17.6699 -14.3301 -32 -32 -32h-384c-17.6699 0 -32 14.3301 -32 32v32c0 17.6699 14.3301 32 32 32h384z" />
+ <glyph glyph-name="asterisk" unicode="&#xf069;"
+d="M478.21 113.907c11.7949 -6.47754 15.96 -21.3828 9.23242 -33.0361l-19.4805 -33.7412c-6.72754 -11.6533 -21.7207 -15.499 -33.2266 -8.52246l-138.735 84.1104l3.47559 -162.204c0.288086 -13.4531 -10.5391 -24.5137 -23.9941 -24.5137h-38.9619
+c-13.4551 0 -24.2822 11.0605 -23.9941 24.5137l3.47461 162.204l-138.735 -84.1113c-11.5059 -6.97656 -26.499 -3.13086 -33.2266 8.52246l-19.4805 33.7412c-6.72852 11.6533 -2.5625 26.5596 9.23242 33.0371l142.21 78.0928l-142.209 78.0918
+c-11.7949 6.47754 -15.9609 21.3838 -9.2334 33.0371l19.4805 33.7412c6.72754 11.6533 21.7207 15.499 33.2266 8.52246l138.735 -84.1104l-3.47363 162.204c-0.289062 13.4531 10.5381 24.5137 23.9932 24.5137h38.9609c13.4561 0 24.2822 -11.0605 23.9941 -24.5137
+l-3.47461 -162.204l138.735 84.1113c11.5068 6.97656 26.499 3.13086 33.2266 -8.52246l19.4805 -33.7412c6.72852 -11.6533 2.5625 -26.5596 -9.23242 -33.0371l-142.21 -78.0928z" />
+ <glyph glyph-name="exclamation-circle" unicode="&#xf06a;"
+d="M504 192c0 -136.997 -111.043 -248 -248 -248s-248 111.003 -248 248c0 136.917 111.043 248 248 248s248 -111.083 248 -248zM256 142c-25.4053 0 -46 -20.5947 -46 -46s20.5947 -46 46 -46s46 20.5947 46 46s-20.5947 46 -46 46zM212.327 307.346l7.41797 -136
+c0.34668 -6.36328 5.6084 -11.3457 11.9814 -11.3457h48.5469c6.37305 0 11.6348 4.98242 11.9814 11.3457l7.41797 136c0.375 6.87402 -5.09766 12.6543 -11.9814 12.6543h-63.3838c-6.88379 0 -12.3555 -5.78027 -11.9805 -12.6543z" />
+ <glyph glyph-name="gift" unicode="&#xf06b;"
+d="M32 0v128h192v-160h-160c-17.7002 0 -32 14.2998 -32 32zM288 -32v160h192v-128c0 -17.7002 -14.2998 -32 -32 -32h-160zM480 288c17.7002 0 32 -14.2998 32 -32v-80c0 -8.7998 -7.2002 -16 -16 -16h-480c-8.7998 0 -16 7.2002 -16 16v80c0 17.7002 14.2998 32 32 32
+h44.0996c-6.2998 12.0996 -10.0996 25.5 -10.0996 40c0 48.5 39.5 88 88 88c41.5996 0 68.5 -21.2998 103 -68.2998c34.5 47 61.4004 68.2998 103 68.2998c48.5 0 88 -39.5 88 -88c0 -14.5 -3.90039 -27.9004 -10.0996 -40h42.0996zM153.9 288h86.0996
+c-51.5 76.7002 -66.2002 80 -86.0996 80c-22.1006 0 -40 -17.9004 -40 -40s17.8994 -40 40 -40zM360 288c22.0996 0 40 17.9004 40 40s-17.9004 40 -40 40c-20.4004 0 -34.7002 -3.5 -86.0996 -80h86.0996z" />
+ <glyph glyph-name="leaf" unicode="&#xf06c;" horiz-adv-x="576"
+d="M546.2 438.3c19 -42.3994 29.7998 -94.3994 29.7998 -144.6c0 -172.4 -110.5 -313.2 -267.5 -324.601c-80.9004 -8.59961 -142.5 33.3008 -174.9 77.2002c-51 -42.7002 -70.3994 -87 -71.8994 -90.5996c-6.7998 -16.2002 -25.4004 -24.1006 -41.7998 -17.2998
+c-16.3008 6.69922 -24.1006 25.2998 -17.5 41.5996c23.5996 57.9004 130.199 212 381.6 212c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16c-130.6 0 -222.7 -38.7998 -286.5 -84.5c-0.700195 6.7998 -1.5 13.5 -1.5 20.5c0 106 86 192 192 192h80
+c63.4004 0 118.9 33.5996 149.9 87.5c6.69922 11.7998 22.6992 11.2998 28.2998 -1.2002z" />
+ <glyph glyph-name="fire" unicode="&#xf06d;" horiz-adv-x="384"
+d="M216 424.14c0 -103.14 168 -125.85 168 -296.14c0 -105.87 -86.1299 -192 -192 -192s-192 86.1299 -192 192c0 58.6699 27.7998 106.84 54.5703 134.96c14.96 15.7305 41.4297 5.2002 41.4297 -16.5v-85.5098c0 -35.1699 27.9805 -64.4902 63.1504 -64.9404
+c35.7393 -0.469727 64.8496 28.3604 64.8496 63.9902c0 88 -176 96.1504 -52.1504 277.18c13.5 19.7305 44.1504 10.7607 44.1504 -13.04z" />
+ <glyph glyph-name="eye" unicode="&#xf06e;" horiz-adv-x="576"
+d="M572.52 206.6c1.9209 -3.79883 3.47949 -10.3379 3.47949 -14.5947s-1.55859 -10.7959 -3.47949 -14.5947c-54.1992 -105.771 -161.59 -177.41 -284.52 -177.41s-230.29 71.5898 -284.52 177.4c-1.9209 3.79883 -3.47949 10.3379 -3.47949 14.5947
+s1.55859 10.7959 3.47949 14.5947c54.1992 105.771 161.59 177.41 284.52 177.41s230.29 -71.5898 284.52 -177.4zM288 48h0.0703125c79.4492 0 143.93 64.4805 143.93 143.93v0.0703125c0 79.4883 -64.5117 144 -144 144s-144 -64.5117 -144 -144s64.5117 -144 144 -144z
+M288 288h0.225586c52.8701 0 95.7803 -42.9092 95.7803 -95.7793c0 -52.8711 -42.9102 -95.7803 -95.7803 -95.7803c-52.8711 0 -95.7803 42.9092 -95.7803 95.7803c0 7.04785 1.49805 18.2871 3.34473 25.0889c6.9834 -5.13867 19.6895 -9.30957 28.3604 -9.30957
+c26.4131 0 47.8496 21.4365 47.8496 47.8496c0 8.6709 -4.1709 21.377 -9.30957 28.3604c6.84375 1.99219 18.1826 3.69043 25.3096 3.79004z" />
+ <glyph glyph-name="eye-slash" unicode="&#xf070;" horiz-adv-x="640"
+d="M320 48c7.24121 0.0673828 18.8896 1.23633 26 2.61035l51.8896 -40.1504c-25.0195 -6.45996 -50.9795 -10.46 -77.8896 -10.46c-122.93 0 -230.29 71.5898 -284.52 177.4c-1.9209 3.79883 -3.47949 10.3379 -3.47949 14.5947s1.55859 10.7959 3.47949 14.5947
+c10.2393 20 22.9297 38.29 36.7197 55.5898l104.899 -81.0693c5.65039 -74.4004 67.0508 -133.11 142.9 -133.11zM633.82 -10.0996c3.41309 -2.65234 6.18359 -8.3125 6.18359 -12.6357c0 -3.02734 -1.50684 -7.42383 -3.36426 -9.81445l-19.6396 -25.2705
+c-2.65234 -3.41211 -8.31152 -6.18262 -12.6338 -6.18262c-3.03125 0 -7.43359 1.51172 -9.82617 3.37305l-588.36 454.729c-3.41016 2.65234 -6.17773 8.31055 -6.17773 12.6309c0 3.0293 1.50879 7.42773 3.36816 9.81934l19.6299 25.2705
+c2.65234 3.41211 8.31152 6.18262 12.6338 6.18262c3.03125 0 7.43359 -1.51172 9.82617 -3.37305l127.22 -98.3301c38.0117 20.7578 104.011 37.6475 147.32 37.7002c122.93 0 230.29 -71.5898 284.52 -177.4c1.9209 -3.79883 3.47949 -10.3379 3.47949 -14.5947
+s-1.55859 -10.7959 -3.47949 -14.5947c-16.7666 -32.6758 -53.166 -78.4033 -81.25 -102.07zM450.1 131.9c8.61035 18.3203 13.9004 38.4697 13.9004 60.0996v0.0800781c0 79.4434 -64.4766 143.92 -143.92 143.92h-0.0800781
+c-28.4697 -0.0214844 -69.3047 -14.8545 -91.1504 -33.1104l73.6104 -56.8896c0.726562 2.71387 1.41602 7.19336 1.54004 10c-0.015625 8.62891 -4.18652 21.2666 -9.30957 28.21c7.17969 2.09668 19.0781 3.79785 26.5576 3.79785
+c52.3076 0 94.7598 -42.4521 94.7598 -94.7598c0 -0.344727 -0.00292969 -0.90332 -0.0078125 -1.24805c-0.112305 -8.43457 -2.44238 -21.749 -5.2002 -29.7197z" />
+ <glyph glyph-name="exclamation-triangle" unicode="&#xf071;" horiz-adv-x="576"
+d="M569.517 7.9873c18.458 -31.9941 -4.71094 -71.9873 -41.5762 -71.9873h-479.887c-36.9365 0 -59.999 40.0547 -41.5771 71.9873l239.946 416.027c18.4668 32.0098 64.7197 31.9512 83.1543 0zM288 94c-25.4053 0 -46 -20.5947 -46 -46s20.5947 -46 46 -46
+s46 20.5947 46 46s-20.5947 46 -46 46zM244.327 259.346l7.41797 -136c0.34668 -6.36328 5.6084 -11.3457 11.9814 -11.3457h48.5469c6.37305 0 11.6348 4.98242 11.9814 11.3457l7.41797 136c0.375 6.87402 -5.09766 12.6543 -11.9814 12.6543h-63.3838
+c-6.88379 0 -12.3555 -5.78027 -11.9805 -12.6543z" />
+ <glyph glyph-name="plane" unicode="&#xf072;" horiz-adv-x="576"
+d="M480 256c35.3496 0 96 -28.6504 96 -64s-60.6504 -64 -96 -64h-114.29l-105.11 -183.94c-2.84961 -4.97949 -8.14941 -8.05957 -13.8896 -8.05957h-65.5c-10.6299 0 -18.2998 10.1797 -15.3799 20.4004l49.0303 171.6h-102.86l-43.2002 -57.5996
+c-3.01953 -4.03027 -7.75977 -6.40039 -12.7998 -6.40039h-39.9902c-10.4102 0 -18.0498 9.78027 -15.5195 19.8799l31.5098 108.12l-31.5098 108.12c-2.53027 10.0996 5.10938 19.8799 15.5195 19.8799h39.9902c5.03027 0 9.78027 -2.37012 12.7998 -6.40039
+l43.2002 -57.5996h102.86l-49.0303 171.61c-2.91992 10.2197 4.75 20.3896 15.3799 20.3896h65.5h0.000976562c5.12598 0 11.3525 -3.61133 13.8994 -8.05957l105.1 -183.94h114.29z" />
+ <glyph glyph-name="calendar-alt" unicode="&#xf073;" horiz-adv-x="448"
+d="M0 -16v272h448v-272c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48zM320 180v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12v40c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12zM320 52v-40
+c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12v40c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12zM192 180v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12v40c0 6.59961 -5.40039 12 -12 12h-40
+c-6.59961 0 -12 -5.40039 -12 -12zM192 52v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12v40c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12zM64 180v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12v40
+c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12zM64 52v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12v40c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12zM400 384c26.5 0 48 -21.5 48 -48v-48h-448v48
+c0 26.5 21.5 48 48 48h48v48c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-48h128v48c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-48h48z" />
+ <glyph glyph-name="random" unicode="&#xf074;"
+d="M504.971 88.9707c9.37305 -9.37305 9.37305 -24.5684 0 -33.9404l-80 -79.9844c-15.0098 -15.0098 -40.9707 -4.49023 -40.9707 16.9707v39.9834h-58.7852c-2.87793 0 -6.80859 1.70801 -8.77246 3.81152l-70.5566 75.5967l53.333 57.1426l52.7812 -56.5508h32v39.9814
+c0 21.4375 25.9434 31.9971 40.9707 16.9707zM12 272c-6.62695 0 -12 5.37305 -12 12v56c0 6.62695 5.37305 12 12 12h110.785h0.000976562c2.87793 0 6.80762 -1.70801 8.77148 -3.81152l70.5566 -75.5967l-53.333 -57.1426l-52.7812 56.5508h-84zM384 272h-32
+l-220.442 -236.188c-2.26953 -2.43066 -5.44629 -3.81152 -8.77246 -3.81152h-110.785c-6.62695 0 -12 5.37305 -12 12v56c0 6.62695 5.37305 12 12 12h84l220.442 236.188c1.96387 2.10352 5.89453 3.81152 8.77246 3.81152h58.7852v39.9814
+c0 21.4365 25.9434 31.9971 40.9707 16.9697l80 -79.9814c9.37305 -9.37207 9.37305 -24.5674 0 -33.9404l-80 -79.9844c-15.0098 -15.0088 -40.9707 -4.48926 -40.9707 16.9707v39.9844z" />
+ <glyph glyph-name="comment" unicode="&#xf075;"
+d="M256 416c141.4 0 256 -93.0996 256 -208s-114.6 -208 -256 -208c-38.4004 0 -74.7002 7.09961 -107.4 19.4004c-24.5996 -19.6006 -74.2998 -51.4004 -140.6 -51.4004c-3.2002 0 -6 1.7998 -7.2998 4.7998s-0.700195 6.40039 1.5 8.7002
+c0.5 0.5 42.2998 45.4004 54.7998 95.7998c-35.5996 35.7002 -57 81.1006 -57 130.7c0 114.9 114.6 208 256 208z" />
+ <glyph glyph-name="magnet" unicode="&#xf076;"
+d="M164.1 288h-152.1c-6.59961 0 -12 5.40039 -12 12v80c0 19.9004 16.0996 36 36 36h104c19.9004 0 36 -16.0996 36 -36v-80c0.0996094 -6.59961 -5.2998 -12 -11.9004 -12zM512.1 300c0 -6.59961 -5.39941 -12 -12 -11.9004h-152c-6.59961 0 -12 5.40039 -12 12v80
+c0 19.9004 16.1006 36 36 36h104c19.9004 0 36 -16.0996 36 -36v-80.0996zM348.1 256h151.9c6.7002 0 12 -5.40039 12 -12.0996c-0.200195 -20.2002 -0.599609 -40.4004 0 -53.2002c0 -150.7 -134.5 -246.7 -255.1 -246.7c-120.601 0 -256.801 96 -256.801 246.6
+c0.600586 13 0.100586 31.9004 0 53.3008c0 6.69922 5.30078 12.0996 12 12.0996h152c6.60059 0 12 -5.40039 12 -12v-52c0 -127.9 160 -128.1 160 0v52c0 6.59961 5.40039 12 12 12z" />
+ <glyph glyph-name="chevron-up" unicode="&#xf077;" horiz-adv-x="448"
+d="M240.971 317.476l194.344 -194.343c9.37207 -9.37305 9.37207 -24.5684 0 -33.9404l-22.667 -22.667c-9.35742 -9.35742 -24.5225 -9.375 -33.9014 -0.0400391l-154.746 154.02l-154.745 -154.021c-9.37891 -9.33496 -24.5439 -9.31738 -33.9014 0.0400391
+l-22.667 22.667c-9.37207 9.37305 -9.37207 24.5684 0 33.9404l194.344 194.343c9.37207 9.37305 24.5674 9.37305 33.9404 0.000976562z" />
+ <glyph glyph-name="chevron-down" unicode="&#xf078;" horiz-adv-x="447"
+d="M207.029 66.5244l-194.344 194.344c-9.37207 9.37305 -9.37207 24.5684 0 33.9404l22.667 22.667c9.35742 9.35742 24.5225 9.375 33.9014 0.0400391l154.746 -154.021l154.745 154.021c9.37891 9.33496 24.5439 9.31738 33.9014 -0.0400391l22.667 -22.667
+c9.37207 -9.37305 9.37207 -24.5684 0 -33.9404l-194.343 -194.344c-9.37305 -9.37207 -24.5684 -9.37207 -33.9414 0z" />
+ <glyph glyph-name="retweet" unicode="&#xf079;" horiz-adv-x="640"
+d="M629.657 104.402l-100.687 -100.687c-9.37305 -9.37207 -24.5674 -9.37207 -33.9404 0l-100.688 100.687c-9.37305 9.37305 -9.37305 24.5684 0 33.9404l10.8232 10.8232c9.56152 9.56152 25.1328 9.33984 34.4189 -0.492188l40.415 -42.792v182.118h-187.549
+c-5.4873 0 -13.0908 3.14941 -16.9707 7.0293l-16 16c-15.1191 15.1201 -4.41113 40.9707 16.9707 40.9707h243.549c13.2549 0 24 -10.7451 24 -24v-222.118l40.416 42.792c9.28516 9.83105 24.8564 10.0537 34.4189 0.492188l10.8232 -10.8232
+c9.37207 -9.37207 9.37207 -24.5684 -0.000976562 -33.9404zM364.519 88.9707l16.001 -16c15.1191 -15.1201 4.41113 -40.9707 -16.9707 -40.9707h-243.549c-13.2549 0 -24 10.7451 -24 24v222.119l-40.416 -42.793c-9.28613 -9.83105 -24.8574 -10.0527 -34.4189 -0.491211
+l-10.8223 10.8223c-9.37305 9.37207 -9.37305 24.5674 0 33.9404l100.688 100.687c9.37207 9.37305 24.5674 9.37305 33.9404 0l100.687 -100.686c9.37305 -9.37207 9.37305 -24.5674 0 -33.9404l-10.8223 -10.8223c-9.5625 -9.5625 -25.1328 -9.33984 -34.4189 0.491211
+l-40.416 42.792v-182.119h187.548h0.000976562c5.4873 0 13.0898 -3.14941 16.9697 -7.0293z" />
+ <glyph glyph-name="shopping-cart" unicode="&#xf07a;" horiz-adv-x="575"
+d="M528.12 146.681c-2.4834 -10.9268 -12.1973 -18.6807 -23.4033 -18.6807h-293.145l6.54492 -32h268.418c15.4004 0 26.8154 -14.3008 23.4033 -29.3193l-5.51758 -24.2754c18.6914 -9.07324 31.5791 -28.2334 31.5791 -50.4053c0 -30.9277 -25.0723 -56 -56 -56
+s-56 25.0723 -56 56c0 15.6738 6.44727 29.835 16.8232 40h-209.647c10.377 -10.165 16.8242 -24.3262 16.8242 -40c0 -30.9277 -25.0723 -56 -56 -56s-56 25.0723 -56 56c0 20.7783 11.3252 38.9004 28.1309 48.5654l-70.248 343.435h-69.8828
+c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24h102.529c11.4004 0 21.2285 -8.02148 23.5127 -19.1904l9.16602 -44.8096h392.782c15.4004 0 26.8154 -14.3008 23.4023 -29.3193z" />
+ <glyph glyph-name="folder" unicode="&#xf07b;"
+d="M464 320c26.5098 0 48 -21.4902 48 -48v-224c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v288c0 26.5098 21.4902 48 48 48h160l64 -64h192z" />
+ <glyph glyph-name="folder-open" unicode="&#xf07c;" horiz-adv-x="576"
+d="M572.694 155.907l-72.4248 -124.155c-10.2236 -17.5273 -34.9883 -31.752 -55.2793 -31.752h-0.000976562h-399.964c-18.5234 0 -30.0645 20.0928 -20.7314 36.0928l72.4238 124.155c10.2246 17.5273 34.9902 31.752 55.2822 31.752v0h399.964
+c18.5234 0 30.0645 -20.0928 20.7305 -36.0928zM152 224c-34.0107 0 -65.7861 -18.25 -82.9229 -47.6279l-69.0771 -118.418v278.046c0 26.5098 21.4902 48 48 48h160l64 -64h160c26.5098 0 48 -21.4902 48 -48v-48h-328z" />
+ <glyph glyph-name="chart-bar" unicode="&#xf080;"
+d="M332.8 128c-6.39941 0 -12.7998 6.40039 -12.7998 12.7998v134.4c0 6.39941 6.40039 12.7998 12.7998 12.7998h38.4004c6.39941 0 12.7998 -6.40039 12.7998 -12.7998v-134.4c0 -6.39941 -6.40039 -12.7998 -12.7998 -12.7998h-38.4004zM428.8 128
+c-6.39941 0 -12.7998 6.40039 -12.7998 12.7998v230.4c0 6.39941 6.40039 12.7998 12.7998 12.7998h38.4004c6.39941 0 12.7998 -6.40039 12.7998 -12.7998v-230.4c0 -6.39941 -6.40039 -12.7998 -12.7998 -12.7998h-38.4004zM140.8 128
+c-6.39941 0 -12.7998 6.40039 -12.7998 12.7998v70.4004c0 6.39941 6.40039 12.7998 12.7998 12.7998h38.4004c6.39941 0 12.7998 -6.40039 12.7998 -12.7998v-70.4004c0 -6.39941 -6.40039 -12.7998 -12.7998 -12.7998h-38.4004zM236.8 128
+c-6.39941 0 -12.7998 6.40039 -12.7998 12.7998v198.4c0 6.39941 6.40039 12.7998 12.7998 12.7998h38.4004c6.39941 0 12.7998 -6.40039 12.7998 -12.7998v-198.4c0 -6.39941 -6.40039 -12.7998 -12.7998 -12.7998h-38.4004zM496 64c8.83984 0 16 -7.16016 16 -16v-32
+c0 -8.83984 -7.16016 -16 -16 -16h-464c-17.6699 0 -32 14.3301 -32 32v336c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-304h432z" />
+ <glyph glyph-name="camera-retro" unicode="&#xf083;"
+d="M48 416h416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48zM48 384c-8.7998 0 -16 -7.2002 -16 -16v-10c0 -3.2998 2.7002 -6 6 -6h116c3.2998 0 6 2.7002 6 6v20c0 3.2998 -2.7002 6 -6 6h-106z
+M474 288c3.2998 0 6 2.7002 6 6v74c0 8.7998 -7.2002 16 -16 16h-252.8c-2 0 -3.90039 -1 -5 -2.7002l-30.2002 -45.2998h-138c-3.2998 0 -6 -2.7002 -6 -6v-36c0 -3.2998 2.7002 -6 6 -6h436zM256 24c66.2002 0 120 53.7998 120 120s-53.7998 120 -120 120
+s-120 -53.7998 -120 -120s53.7998 -120 120 -120zM256 232c48.5 0 88 -39.5 88 -88s-39.5 -88 -88 -88s-88 39.5 -88 88s39.5 88 88 88zM208 128c8.7998 0 16 7.2002 16 16c0 17.5996 14.4004 32 32 32c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16
+c-35.2998 0 -64 -28.7002 -64 -64c0 -8.7998 7.2002 -16 16 -16z" />
+ <glyph glyph-name="key" unicode="&#xf084;"
+d="M512 271.999c0 -97.2021 -78.7979 -175.999 -176 -175.999c-11.2197 0 -22.1904 1.06152 -32.8271 3.06934l-24.0117 -27.0146c-3.95215 -4.44629 -11.9883 -8.05469 -17.9375 -8.05469h-0.000976562h-37.2227v-40c0 -13.2549 -10.7451 -24 -24 -24h-40v-40
+c0 -13.2549 -10.7451 -24 -24 -24h-112c-13.2549 0 -24 10.7451 -24 24v78.0586c0 6.36523 2.5293 12.4707 7.0293 16.9717l161.802 161.802c-5.72266 17.3535 -8.83105 35.8965 -8.83105 55.168c0 97.2021 78.7969 175.999 175.999 176
+c97.4893 0.000976562 176.001 -78.5107 176.001 -176.001zM336 320c0 -26.5098 21.4902 -48 48 -48s48 21.4902 48 48s-21.4902 48 -48 48s-48 -21.4902 -48 -48z" />
+ <glyph glyph-name="cogs" unicode="&#xf085;" horiz-adv-x="639"
+d="M512.1 257l-8.19922 -14.2998c-3 -5.2998 -9.40039 -7.5 -15.1006 -5.40039c-11.7998 4.40039 -22.5996 10.7002 -32.0996 18.6006c-4.60059 3.7998 -5.7998 10.5 -2.7998 15.6992l8.19922 14.3008c-6.89941 8 -12.2998 17.2998 -15.8994 27.3994h-16.5
+c-6 0 -11.2002 4.2998 -12.2002 10.2998c-2 12 -2.09961 24.6006 0 37.1006c1 6 6.2002 10.3994 12.2002 10.3994h16.5c3.59961 10.1006 9 19.4004 15.8994 27.4004l-8.19922 14.2998c-3 5.2002 -1.90039 11.9004 2.7998 15.7002
+c9.5 7.90039 20.3994 14.2002 32.0996 18.5996c5.7002 2.10059 12.1006 -0.0996094 15.1006 -5.39941l8.19922 -14.2998c10.5 1.89941 21.2002 1.89941 31.7002 0l8.2002 14.2998c3 5.2998 9.40039 7.5 15.0996 5.39941c11.8008 -4.39941 22.6006 -10.6992 32.1006 -18.5996
+c4.59961 -3.7998 5.7998 -10.5 2.7998 -15.7002l-8.2002 -14.2998c6.90039 -8 12.2998 -17.2998 15.9004 -27.4004h16.5c6 0 11.2002 -4.2998 12.2002 -10.2998c2 -12 2.09961 -24.5996 0 -37.0996c-1 -6 -6.2002 -10.4004 -12.2002 -10.4004h-16.5
+c-3.60059 -10.0996 -9 -19.3994 -15.9004 -27.3994l8.2002 -14.3008c3 -5.19922 1.90039 -11.8994 -2.7998 -15.6992c-9.5 -7.90039 -20.4004 -14.2002 -32.1006 -18.6006c-5.69922 -2.09961 -12.0996 0.100586 -15.0996 5.40039l-8.2002 14.2998
+c-10.3994 -1.90039 -21.2002 -1.90039 -31.7002 0zM501.6 315.8c38.5 -29.5996 82.4004 14.2998 52.8008 52.7998c-38.5 29.7002 -82.4004 -14.2998 -52.8008 -52.7998zM386.3 161.9l33.7002 -16.8008c10.0996 -5.7998 14.5 -18.0996 10.5 -29.0996
+c-8.90039 -24.2002 -26.4004 -46.4004 -42.5996 -65.7998c-7.40039 -8.90039 -20.2002 -11.1006 -30.3008 -5.2998l-29.0996 16.7998c-16 -13.7002 -34.5996 -24.6006 -54.9004 -31.7002v-33.5996c0 -11.6006 -8.2998 -21.6006 -19.6992 -23.6006
+c-24.6006 -4.2002 -50.4004 -4.39941 -75.9004 0c-11.5 2 -20 11.9004 -20 23.6006v33.5996c-20.2998 7.2002 -38.9004 18 -54.9004 31.7002l-29.0996 -16.7002c-10 -5.7998 -22.9004 -3.59961 -30.2998 5.2998c-16.2002 19.4004 -33.2998 41.6006 -42.2002 65.7002
+c-4 10.9004 0.400391 23.2002 10.5 29.0996l33.2998 16.8008c-3.89941 20.8994 -3.89941 42.3994 0 63.3994l-33.2998 16.9004c-10.0996 5.7998 -14.5996 18.0996 -10.5 29c8.90039 24.2002 26 46.3994 42.2002 65.7998c7.39941 8.90039 20.2002 11.0996 30.2998 5.2998
+l29.0996 -16.7998c16 13.7002 34.6006 24.5996 54.9004 31.7002v33.7002c0 11.5 8.2002 21.5 19.5996 23.5c24.6006 4.19922 50.5 4.39941 76 0.0996094c11.5 -2 20 -11.9004 20 -23.5996v-33.6006c20.3008 -7.2002 38.9004 -18 54.9004 -31.7002l29.0996 16.8008
+c10 5.7998 22.9004 3.59961 30.3008 -5.30078c16.1992 -19.3994 33.1992 -41.5996 42.0996 -65.7998c4 -10.8994 0.0996094 -23.2002 -10 -29.0996l-33.7002 -16.7998c3.90039 -21 3.90039 -42.5 0 -63.5zM268.7 140.8c59.2002 77 -28.7002 164.9 -105.7 105.7
+c-59.2002 -77 28.7002 -164.9 105.7 -105.7zM512.1 -41.9004l-8.19922 -14.2998c-3 -5.2998 -9.40039 -7.5 -15.1006 -5.39941c-11.7998 4.39941 -22.5996 10.6992 -32.0996 18.5996c-4.60059 3.7998 -5.7998 10.5 -2.7998 15.7002l8.19922 14.2998
+c-6.89941 8 -12.2998 17.2998 -15.8994 27.4004h-16.5c-6 0 -11.2002 4.2998 -12.2002 10.2998c-2 12 -2.09961 24.5996 0 37.0996c1 6 6.2002 10.4004 12.2002 10.4004h16.5c3.59961 10.0996 9 19.3994 15.8994 27.3994l-8.19922 14.3008
+c-3 5.19922 -1.90039 11.8994 2.7998 15.6992c9.5 7.90039 20.3994 14.2002 32.0996 18.6006c5.7002 2.09961 12.1006 -0.100586 15.1006 -5.40039l8.19922 -14.2998c10.5 1.90039 21.2002 1.90039 31.7002 0l8.2002 14.2998c3 5.2998 9.40039 7.5 15.0996 5.40039
+c11.8008 -4.40039 22.6006 -10.7002 32.1006 -18.6006c4.59961 -3.7998 5.7998 -10.5 2.7998 -15.6992l-8.2002 -14.3008c6.90039 -8 12.2998 -17.2998 15.9004 -27.3994h16.5c6 0 11.2002 -4.2998 12.2002 -10.2998c2 -12 2.09961 -24.6006 0 -37.1006
+c-1 -6 -6.2002 -10.3994 -12.2002 -10.3994h-16.5c-3.60059 -10.1006 -9 -19.4004 -15.9004 -27.4004l8.2002 -14.2998c3 -5.2002 1.90039 -11.9004 -2.7998 -15.7002c-9.5 -7.90039 -20.4004 -14.2002 -32.1006 -18.5996
+c-5.69922 -2.10059 -12.0996 0.0996094 -15.0996 5.39941l-8.2002 14.2998c-10.3994 -1.89941 -21.2002 -1.89941 -31.7002 0zM501.6 17c38.5 -29.5996 82.4004 14.2998 52.8008 52.7998c-38.5 29.6006 -82.4004 -14.2998 -52.8008 -52.7998z" />
+ <glyph glyph-name="comments" unicode="&#xf086;" horiz-adv-x="576"
+d="M416 256c0 -88.4004 -93.0996 -160 -208 -160c-41 0 -79.0996 9.2998 -111.3 25c-21.7998 -12.7002 -52.1006 -25 -88.7002 -25c-3.2002 0 -6 1.7998 -7.2998 4.7998s-0.700195 6.40039 1.5 8.7002c0.299805 0.299805 22.3994 24.2998 35.7998 54.5
+c-23.9004 26.0996 -38 57.7002 -38 92c0 88.4004 93.0996 160 208 160s208 -71.5996 208 -160zM538 36c13.4004 -30.2998 35.5 -54.2002 35.7998 -54.5c2.2002 -2.40039 2.7998 -5.7998 1.5 -8.7002c-1.2002 -2.89941 -4.09961 -4.7998 -7.2998 -4.7998
+c-36.5996 0 -66.9004 12.2998 -88.7002 25c-32.2002 -15.7998 -70.2998 -25 -111.3 -25c-86.2002 0 -160.2 40.4004 -191.7 97.9004c10.4004 -1.10059 20.9004 -1.90039 31.7002 -1.90039c132.3 0 240 86.0996 240 192c0 6.7998 -0.400391 13.5 -1.2998 20.0996
+c75.7998 -23.8994 129.3 -81.1992 129.3 -148.1c0 -34.2998 -14.0996 -66 -38 -92z" />
+ <glyph glyph-name="star-half" unicode="&#xf089;" horiz-adv-x="308"
+d="M288 448v-439.6l-130.7 -68.6006c-23.3994 -12.2998 -50.8994 7.60059 -46.3994 33.7002l25 145.5l-105.7 103c-19 18.5 -8.5 50.7998 17.7002 54.5996l146.1 21.2002l65.2998 132.4c5.90039 11.8994 17.2998 17.7998 28.7002 17.7998z" />
+ <glyph glyph-name="thumbtack" unicode="&#xf08d;" horiz-adv-x="384"
+d="M298.028 233.733c47.9893 -22.3135 85.9717 -62.5508 85.9727 -113.733c0 -13.2549 -10.7451 -24 -24 -24h-136v-104.007c0 -1.04297 -0.378906 -2.64551 -0.844727 -3.57812l-24 -48c-2.94727 -5.89258 -11.3701 -5.88184 -14.3115 0l-24 48
+c-0.555664 1.11133 -0.844727 2.33594 -0.844727 3.57812v104.007h-136c-13.2549 0 -24 10.7451 -24 24c0 50.7393 37.4648 91.1797 85.9717 113.733l12.2354 118.267h-42.207c-13.2549 0 -24 10.7451 -24 24v48c0 13.2549 10.7451 24 24 24h272
+c13.2549 0 24 -10.7451 24 -24v-48c0 -13.2549 -10.7451 -24 -24 -24h-42.207z" />
+ <glyph glyph-name="trophy" unicode="&#xf091;" horiz-adv-x="576"
+d="M552 384c13.2998 0 24 -10.7002 24 -24v-56c0 -35.7002 -22.5996 -72.4004 -61.9004 -100.7c-31.3994 -22.7002 -69.6992 -37.0996 -110 -41.7002c-31.3994 -52.0996 -68.0996 -73.5996 -68.0996 -73.5996v-72h48c35.2998 0 64 -20.7002 64 -56v-12
+c0 -6.59961 -5.40039 -12 -12 -12h-296c-6.59961 0 -12 5.40039 -12 12v12c0 35.2998 28.7002 56 64 56h48v72s-36.7002 21.5 -68.0996 73.5996c-40.2002 4.60059 -78.5 19 -110 41.7002c-39.4004 28.2998 -61.9004 65 -61.9004 100.7v56c0 13.2998 10.7002 24 24 24h104v40
+c0 13.2998 10.7002 24 24 24h272c13.2998 0 24 -10.7002 24 -24v-40h104zM99.2998 255.2c12.5 -9 26.6006 -16.2002 41.7002 -21.4004c-7 25 -11.7998 53.6006 -12.7998 86.2002h-64.2002v-16c0 -11.5996 10.9004 -31.2002 35.2998 -48.7998zM512 304v16h-64.2998
+c-1 -32.5996 -5.7998 -61.2002 -12.7998 -86.2002c15.0996 5.2002 29.2998 12.4004 41.7998 21.4004c17.5996 12.7002 35.2998 32.7002 35.2998 48.7998z" />
+ <glyph glyph-name="upload" unicode="&#xf093;"
+d="M296 64h-80c-13.2998 0 -24 10.7002 -24 24v168h-87.7002c-17.7998 0 -26.7002 21.5 -14.0996 34.0996l152.1 152.2c7.5 7.5 19.7998 7.5 27.2998 0l152.2 -152.2c12.6006 -12.5996 3.7002 -34.0996 -14.0996 -34.0996h-87.7002v-168c0 -13.2998 -10.7002 -24 -24 -24z
+M512 72v-112c0 -13.2998 -10.7002 -24 -24 -24h-464c-13.2998 0 -24 10.7002 -24 24v112c0 13.2998 10.7002 24 24 24h136v-8c0 -30.9004 25.0996 -56 56 -56h80c30.9004 0 56 25.0996 56 56v8h136c13.2998 0 24 -10.7002 24 -24zM388 -16c0 11 -9 20 -20 20s-20 -9 -20 -20
+s9 -20 20 -20s20 9 20 20zM452 -16c0 11 -9 20 -20 20s-20 -9 -20 -20s9 -20 20 -20s20 9 20 20z" />
+ <glyph glyph-name="lemon" unicode="&#xf094;"
+d="M489.038 425.037c23.0938 -23.0938 28.8916 -54.3906 16.833 -75.0928c-34.3115 -58.9043 53.0762 -181.249 -86.7461 -321.071s-262.167 -52.4326 -321.068 -86.7432c-20.7031 -12.0586 -52 -6.2627 -75.0947 16.832c-23.0928 23.0938 -28.8916 54.3906 -16.833 75.0928
+c34.3125 58.9043 -53.0781 181.247 86.7451 321.07s262.167 52.4336 321.073 86.7461c20.7012 12.0586 51.9971 6.25879 75.0908 -16.834zM243.881 352.478c8.57227 2.14355 13.7832 10.8291 11.6416 19.4023c-2.14258 8.57324 -10.8281 13.7852 -19.4033 11.6426
+c-69.8027 -17.4521 -154.218 -101.949 -171.643 -171.643c-2.1416 -8.57324 3.07031 -17.2588 11.6426 -19.4033c1.30273 -0.324219 2.6084 -0.480469 3.89258 -0.480469c7.16895 0 13.6943 4.85352 15.5117 12.124c14.5498 58.2031 90.1689 133.811 148.357 148.357z" />
+ <glyph glyph-name="phone" unicode="&#xf095;"
+d="M493.4 423.4c10.8994 -2.5 18.5996 -12.2002 18.5996 -23.4004c0 -256.5 -207.9 -464 -464 -464c-11.2998 0 -20.9004 7.7998 -23.4004 18.5996l-24 104c-2.59961 11.3008 3.30078 22.9004 14 27.6006l112 48c9.80078 4.2002 21.2002 1.39941 28 -6.90039
+l49.6006 -60.5996c78.2998 36.7002 141.2 100.5 177.2 177.2l-60.6006 49.5996c-8.2998 6.7002 -11.0996 18.2002 -6.89941 28l48 112c4.59961 10.5996 16.1992 16.5 27.5 13.9004z" />
+ <glyph glyph-name="phone-square" unicode="&#xf098;" horiz-adv-x="448"
+d="M400 416c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h352zM94 32c160.055 0 290 129.708 290 290v0c0 6.58691 -5.20898 13.1338 -11.6279 14.6143l-65 14.998
+c-0.918945 0.211914 -2.42969 0.383789 -3.37305 0.383789c-5.45996 0 -11.6367 -4.07324 -13.7871 -9.09082l-30 -69.998c-0.668945 -1.5625 -1.21191 -4.20898 -1.21191 -5.9082c0 -3.92383 2.46387 -9.125 5.50098 -11.6104l37.8857 -30.9971
+c-22.4834 -47.9219 -61.8369 -87.8164 -110.78 -110.779l-30.9971 37.8848c-2.48535 3.03711 -7.68652 5.50195 -11.6104 5.50195c-1.69922 0 -4.3457 -0.543945 -5.9082 -1.21289l-69.998 -29.999c-5.01855 -2.15039 -9.09082 -8.32715 -9.09082 -13.7871
+c0 -0.943359 0.171875 -2.4541 0.383789 -3.37305l14.998 -65c1.55957 -6.75391 7.58301 -11.627 14.6162 -11.627z" />
+ <glyph glyph-name="unlock" unicode="&#xf09c;" horiz-adv-x="448"
+d="M400 192c26.5 0 48 -21.5 48 -48v-160c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v160c0 26.5 21.5 48 48 48h24v102.5c0 84 67.5 153.2 151.5 153.5s152.5 -68 152.5 -152v-16c0 -13.2998 -10.7002 -24 -24 -24h-32c-13.2998 0 -24 10.7002 -24 24v16
+c0 39.9004 -32.7002 72.4004 -72.7002 72c-39.5996 -0.400391 -71.2998 -33.2998 -71.2998 -72.9004v-103.1h248z" />
+ <glyph glyph-name="credit-card" unicode="&#xf09d;" horiz-adv-x="576"
+d="M0 16v176h576v-176c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48zM192 84v-40c0 -6.59961 5.40039 -12 12 -12h136c6.59961 0 12 5.40039 12 12v40c0 6.59961 -5.40039 12 -12 12h-136c-6.59961 0 -12 -5.40039 -12 -12zM64 84v-40
+c0 -6.59961 5.40039 -12 12 -12h72c6.59961 0 12 5.40039 12 12v40c0 6.59961 -5.40039 12 -12 12h-72c-6.59961 0 -12 -5.40039 -12 -12zM576 368v-48h-576v48c0 26.5 21.5 48 48 48h480c26.5 0 48 -21.5 48 -48z" />
+ <glyph glyph-name="rss" unicode="&#xf09e;" horiz-adv-x="448"
+d="M128.081 32.041c0 -35.3691 -28.6719 -64.041 -64.041 -64.041s-64.04 28.6719 -64.04 64.041s28.6719 64.041 64.041 64.041s64.04 -28.6729 64.04 -64.041zM303.741 -15.209c0.494141 -9.13477 -6.84668 -16.791 -15.9951 -16.79h-48.0693
+c-8.41406 0 -15.4707 6.49023 -16.0176 14.8867c-7.29883 112.07 -96.9404 201.488 -208.772 208.772c-8.39648 0.545898 -14.8867 7.60254 -14.8867 16.0176v48.0693c0 9.14746 7.65625 16.4883 16.791 15.9941c154.765 -8.36328 278.596 -132.351 286.95 -286.95z
+M447.99 -15.4971c0.324219 -9.03027 -6.97168 -16.5029 -16.0049 -16.5039h-48.0684c-8.62598 0 -15.6455 6.83496 -15.999 15.4531c-7.83789 191.148 -161.286 344.626 -352.465 352.465c-8.61816 0.354492 -15.4531 7.37402 -15.4531 15.999v48.0684
+c0 9.03418 7.47266 16.3301 16.5029 16.0059c234.962 -8.43555 423.093 -197.667 431.487 -431.487z" />
+ <glyph glyph-name="hdd" unicode="&#xf0a0;" horiz-adv-x="576"
+d="M576 144v-96c0 -26.5098 -21.4902 -48 -48 -48h-480c-26.5098 0 -48 21.4902 -48 48v96c0 26.5098 21.4902 48 48 48h480c26.5098 0 48 -21.4902 48 -48zM528 224h-480h-0.0693359c-8.81738 0 -22.5742 -2.76172 -30.708 -6.16504l96.5283 144.791
+c7.86621 11.7988 25.7578 21.374 39.9385 21.374h268.621c14.1807 0 32.0732 -9.57617 39.9395 -21.374l96.5273 -144.791c-8.13379 3.40332 -21.8906 6.16504 -30.708 6.16504h-0.0693359zM480 128c-17.6729 0 -32 -14.3271 -32 -32s14.3271 -32 32 -32s32 14.3271 32 32
+s-14.3271 32 -32 32zM384 128c-17.6729 0 -32 -14.3271 -32 -32s14.3271 -32 32 -32s32 14.3271 32 32s-14.3271 32 -32 32z" />
+ <glyph glyph-name="bullhorn" unicode="&#xf0a1;" horiz-adv-x="576"
+d="M576 208c0 -23.6299 -12.9502 -44.04 -32.0098 -55.1299v-152.87c0 -9.21973 -7.08008 -32 -32 -32c-6.19336 0.00585938 -15.1445 3.15039 -19.9805 7.01953l-85.0293 68.0303c-42.7002 34.1406 -96.3203 52.9502 -150.98 52.9502h-28.0801
+c-2.79004 -10.21 -4.41016 -20.8896 -4.41016 -32c0 -29.0801 9.75 -55.9199 26.1504 -77.4404c15.79 -20.7197 0.149414 -50.5596 -25.9004 -50.5596h-74.2793c-11.8809 0 -23.2109 6.37012 -28.4004 17.0596c-16.2998 33.5908 -25.5605 71.1709 -25.5605 110.94
+c0 10.8604 0.790039 21.5195 2.18066 32h-33.7002c-35.3496 0 -64 28.6504 -64 64v96c0 35.3496 28.6504 64 64 64h192c54.6602 0 108.28 18.8096 150.99 52.9502l85.0293 68.0303c5.79004 4.63965 12.8604 7.01953 19.9805 7.01953c25.0195 0 32 -23.2598 32 -32.0098
+v-152.87c19.0498 -11.0801 32 -31.4902 32 -55.1201zM480 66.5801v282.84l-33.0498 -26.4395c-54 -43.2002 -121.83 -66.9805 -190.95 -66.9805v-96c69.1201 0 136.95 -23.7803 190.95 -66.9805z" />
+ <glyph glyph-name="certificate" unicode="&#xf0a3;" horiz-adv-x="511"
+d="M458.622 192.08l45.9844 -45.0039c13.7012 -12.9727 7.32227 -36.0371 -10.6641 -40.3389l-62.6504 -15.9902l17.6611 -62.0146c4.99023 -17.834 -11.8252 -34.665 -29.6611 -29.6719l-61.9941 17.667l-15.9834 -62.6709
+c-4.33887 -18.1533 -27.8252 -24.1553 -40.3252 -10.668l-44.9893 46.001l-44.9912 -46.001c-12.6289 -13.3496 -35.8857 -7.90625 -40.3252 10.668l-15.9834 62.6709l-61.9941 -17.667c-17.832 -4.99121 -34.6523 11.833 -29.6611 29.6719l17.6611 62.0146
+l-62.6504 15.9902c-17.9795 4.2998 -24.3721 27.3613 -10.6641 40.3389l45.9854 45.0039l-45.9854 45.0049c-13.7012 12.9707 -7.32227 36.0371 10.665 40.3379l62.6504 15.9902l-17.6611 62.0146c-4.99023 17.834 11.8242 34.665 29.6611 29.6709l61.9951 -17.667
+l15.9834 62.6709c4.27832 17.9023 27.6953 24.0195 40.3252 10.6689l44.9893 -46.3418l44.9902 46.3428c12.7744 13.5039 36.0947 7.03027 40.3252 -10.6689l15.9834 -62.6709l61.9941 17.667c17.832 4.99219 34.6523 -11.833 29.6611 -29.6709l-17.6611 -62.0146
+l62.6504 -15.9902c17.9795 -4.2998 24.3721 -27.3623 10.6641 -40.3389z" />
+ <glyph glyph-name="hand-point-right" unicode="&#xf0a4;"
+d="M512 248.348c0 -23.625 -20.6504 -43.8252 -44.7998 -43.8252h-99.8516c16.3408 -17.0488 18.3467 -49.7666 -6.29883 -70.9443c14.2881 -22.8291 2.14746 -53.0176 -16.4502 -62.3154c8.97461 -49.1406 -21.9453 -71.2627 -72.5996 -71.2627
+c-2.74609 0 -13.2764 0.203125 -16 0.195312c-61.9707 -0.167969 -76.8936 31.0645 -123.731 38.3145c-11.6729 1.80762 -20.2686 11.8916 -20.2686 23.7041v171.525l0.00195312 0.000976562c0.0107422 18.3662 10.6074 35.8887 28.4639 43.8447
+c28.8857 12.9941 95.4131 49.0381 107.534 77.3232c7.79688 18.1934 21.3838 29.084 40 29.0918c34.2217 0.0136719 57.752 -35.0977 44.1191 -66.9082c-3.58301 -8.3584 -8.3125 -16.6699 -14.1533 -24.918h149.234c23.4502 0 44.7998 -20.543 44.7998 -43.8262zM96 248
+v-192c0 -13.2549 -10.7451 -24 -24 -24h-48c-13.2549 0 -24 10.7451 -24 24v192c0 13.2549 10.7451 24 24 24h48c13.2549 0 24 -10.7451 24 -24zM68 80c0 11.0459 -8.9541 20 -20 20s-20 -8.9541 -20 -20s8.9541 -20 20 -20s20 8.9541 20 20z" />
+ <glyph glyph-name="hand-point-left" unicode="&#xf0a5;"
+d="M44.7998 292.174h149.234c-5.84082 8.24805 -10.5703 16.5586 -14.1533 24.918c-13.6328 31.8105 9.89746 66.9219 44.1191 66.9082c18.6162 -0.0078125 32.2031 -10.8975 40 -29.0918c12.1221 -28.2861 78.6484 -64.3291 107.534 -77.3232
+c17.8564 -7.95605 28.4531 -25.4785 28.4639 -43.8447l0.00195312 -0.000976562v-171.526c0 -11.8115 -8.5957 -21.8965 -20.2686 -23.7031c-46.8379 -7.25 -61.7607 -38.4824 -123.731 -38.3145c-2.72363 0.00683594 -13.2539 -0.195312 -16 -0.195312
+c-50.6543 0 -81.5742 22.1221 -72.5996 71.2627c-18.5977 9.29688 -30.7383 39.4863 -16.4502 62.3154c-24.6455 21.1768 -22.6396 53.8955 -6.29883 70.9443h-99.8516c-24.1494 0 -44.7998 20.2002 -44.7998 43.8252c0 23.2832 21.3496 43.8262 44.7998 43.8262zM440 272
+h48c13.2549 0 24 -10.7451 24 -24v-192c0 -13.2549 -10.7451 -24 -24 -24h-48c-13.2549 0 -24 10.7451 -24 24v192c0 13.2549 10.7451 24 24 24zM464 60c11.0459 0 20 8.9541 20 20s-8.9541 20 -20 20s-20 -8.9541 -20 -20s8.9541 -20 20 -20z" />
+ <glyph glyph-name="hand-point-up" unicode="&#xf0a6;" horiz-adv-x="384"
+d="M135.652 448c23.625 0 43.8252 -20.6504 43.8252 -44.7998v-99.8516c17.0488 16.3408 49.7666 18.3467 70.9443 -6.29883c22.8291 14.2881 53.0176 2.14746 62.3154 -16.4502c49.1406 8.97461 71.2627 -21.9453 71.2627 -72.5996
+c0 -2.74609 -0.203125 -13.2764 -0.195312 -16c0.167969 -61.9707 -31.0645 -76.8936 -38.3145 -123.731c-1.80762 -11.6729 -11.8916 -20.2686 -23.7041 -20.2686h-171.525l-0.000976562 0.00195312c-18.3662 0.0107422 -35.8887 10.6074 -43.8447 28.4639
+c-12.9941 28.8857 -49.0381 95.4121 -77.3232 107.534c-18.1943 7.79688 -29.084 21.3838 -29.0918 40c-0.0136719 34.2217 35.0977 57.752 66.9082 44.1191c8.3584 -3.58301 16.6699 -8.3125 24.918 -14.1533v149.234c0 23.4502 20.543 44.7998 43.8262 44.7998zM136 32
+h192c13.2549 0 24 -10.7451 24 -24v-48c0 -13.2549 -10.7451 -24 -24 -24h-192c-13.2549 0 -24 10.7451 -24 24v48c0 13.2549 10.7451 24 24 24zM304 4c-11.0459 0 -20 -8.9541 -20 -20s8.9541 -20 20 -20s20 8.9541 20 20s-8.9541 20 -20 20z" />
+ <glyph glyph-name="hand-point-down" unicode="&#xf0a7;" horiz-adv-x="384"
+d="M91.8262 -19.2002v149.234c-8.24805 -5.84082 -16.5586 -10.5703 -24.918 -14.1533c-31.8105 -13.6328 -66.9219 9.89746 -66.9082 44.1191c0.0078125 18.6162 10.8975 32.2031 29.0918 40c28.2861 12.1221 64.3291 78.6484 77.3232 107.534
+c7.95605 17.8564 25.4785 28.4531 43.8447 28.4639l0.000976562 0.00195312h171.526c11.8115 0 21.8965 -8.5957 23.7031 -20.2686c7.25 -46.8379 38.4824 -61.7607 38.3145 -123.731c-0.00683594 -2.72363 0.195312 -13.2539 0.195312 -16
+c0 -50.6543 -22.1221 -81.5742 -71.2627 -72.5996c-9.29688 -18.5977 -39.4863 -30.7383 -62.3154 -16.4502c-21.1768 -24.6455 -53.8955 -22.6396 -70.9443 -6.29883v-99.8516c0 -24.1494 -20.2002 -44.7998 -43.8252 -44.7998
+c-23.2832 0 -43.8262 21.3496 -43.8262 44.7998zM112 376v48c0 13.2549 10.7451 24 24 24h192c13.2549 0 24 -10.7451 24 -24v-48c0 -13.2549 -10.7451 -24 -24 -24h-192c-13.2549 0 -24 10.7451 -24 24zM324 400c0 11.0459 -8.9541 20 -20 20s-20 -8.9541 -20 -20
+s8.9541 -20 20 -20s20 8.9541 20 20z" />
+ <glyph glyph-name="arrow-circle-left" unicode="&#xf0a8;"
+d="M256 -56c-137 0 -248 111 -248 248s111 248 248 248s248 -111 248 -248s-111 -248 -248 -248zM284.9 87.5996l-75.5 72.4004h182.6c13.2998 0 24 10.7002 24 24v16c0 13.2998 -10.7002 24 -24 24h-182.6l75.5 72.4004c9.69922 9.2998 9.89941 24.7998 0.399414 34.2998
+l-11 10.8994c-9.39941 9.40039 -24.5996 9.40039 -33.8994 0l-132.7 -132.6c-9.40039 -9.40039 -9.40039 -24.5996 0 -33.9004l132.7 -132.699c9.39941 -9.40039 24.5996 -9.40039 33.8994 0l11 10.8994c9.5 9.5 9.2998 25 -0.399414 34.2998z" />
+ <glyph glyph-name="arrow-circle-right" unicode="&#xf0a9;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM227.1 296.4l75.5 -72.4004h-182.6c-13.2998 0 -24 -10.7002 -24 -24v-16c0 -13.2998 10.7002 -24 24 -24h182.6l-75.5 -72.4004
+c-9.69922 -9.2998 -9.89941 -24.7998 -0.399414 -34.2998l11 -10.8994c9.39941 -9.40039 24.5996 -9.40039 33.8994 0l132.7 132.6c9.40039 9.40039 9.40039 24.5996 0 33.9004l-132.7 132.8c-9.39941 9.39941 -24.5996 9.39941 -33.8994 0l-11 -10.9004
+c-9.5 -9.59961 -9.2998 -25.0996 0.399414 -34.3994z" />
+ <glyph glyph-name="arrow-circle-up" unicode="&#xf0aa;"
+d="M8 192c0 137 111 248 248 248s248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248zM151.6 163.1l72.4004 75.5v-182.6c0 -13.2998 10.7002 -24 24 -24h16c13.2998 0 24 10.7002 24 24v182.6l72.4004 -75.5c9.2998 -9.69922 24.7998 -9.89941 34.2998 -0.399414
+l10.8994 11c9.40039 9.39941 9.40039 24.5996 0 33.8994l-132.6 132.7c-9.40039 9.40039 -24.5996 9.40039 -33.9004 0l-132.8 -132.7c-9.39941 -9.39941 -9.39941 -24.5996 0 -33.8994l10.9004 -11c9.59961 -9.5 25.0996 -9.2998 34.3994 0.399414z" />
+ <glyph glyph-name="arrow-circle-down" unicode="&#xf0ab;"
+d="M504 192c0 -137 -111 -248 -248 -248s-248 111 -248 248s111 248 248 248s248 -111 248 -248zM360.4 220.9l-72.4004 -75.5v182.6c0 13.2998 -10.7002 24 -24 24h-16c-13.2998 0 -24 -10.7002 -24 -24v-182.6l-72.4004 75.5
+c-9.2998 9.69922 -24.7998 9.89941 -34.2998 0.399414l-10.8994 -11c-9.40039 -9.39941 -9.40039 -24.5996 0 -33.8994l132.6 -132.7c9.40039 -9.40039 24.5996 -9.40039 33.9004 0l132.699 132.7c9.40039 9.39941 9.40039 24.5996 0 33.8994l-10.8994 11
+c-9.5 9.5 -25 9.2998 -34.2998 -0.399414z" />
+ <glyph glyph-name="globe" unicode="&#xf0ac;" horiz-adv-x="495"
+d="M336.5 288h-177c14.5 89.2998 48.7002 152 88.5 152s74 -62.7002 88.5 -152zM152 192c0 22.2002 1.2002 43.5 3.2998 64h185.3c2.10059 -20.5 3.30078 -41.7998 3.30078 -64s-1.2002 -43.5 -3.30078 -64h-185.3c-2.09961 20.5 -3.2998 41.7998 -3.2998 64zM476.7 288
+h-108c-8.7998 56.9004 -25.6006 107.8 -50 141.6c71.5 -21.1992 129.399 -73.6992 158 -141.6zM177.2 429.6c-24.4004 -33.7998 -41.2002 -84.6992 -49.9004 -141.6h-108c28.5 67.9004 86.5 120.4 157.9 141.6zM487.4 256c5.39941 -20.5 8.5 -41.7998 8.5 -64
+s-3.10059 -43.5 -8.60059 -64h-114.6c2.09961 21 3.2998 42.5 3.2998 64s-1.2002 43 -3.2998 64h114.7zM120 192c0 -21.5 1.2002 -43 3.2002 -64h-114.601c-5.39941 20.5 -8.59961 41.7998 -8.59961 64s3.2002 43.5 8.59961 64h114.7
+c-2.09961 -21 -3.2998 -42.5 -3.2998 -64zM159.5 96h177c-14.5 -89.2998 -48.7002 -152 -88.5 -152s-74 62.7002 -88.5 152zM318.8 -45.5996c24.4004 33.7998 41.2002 84.6992 50 141.6h108c-28.5996 -67.9004 -86.5996 -120.4 -158 -141.6zM19.2998 96h108
+c8.7998 -56.9004 25.6006 -107.8 50 -141.6c-71.5 21.1992 -129.399 73.6992 -158 141.6z" />
+ <glyph glyph-name="wrench" unicode="&#xf0ad;"
+d="M507.73 338.9c11.7891 -47.4102 -0.84082 -99.6602 -37.9102 -136.73c-39.9004 -39.9004 -97.25 -50.9297 -147.37 -34.2197l-213.21 -213.21c-24.9902 -24.9902 -65.5098 -24.9902 -90.5 0s-24.9902 65.5098 0 90.5l213.39 213.39
+c-16.5 50.1006 -5.58984 107.561 34.0498 147.2c37.0303 37.0195 89.2002 49.6699 136.58 37.9297c9.08984 -2.25977 12.2803 -13.54 5.66016 -20.1602l-74.3604 -74.3594l11.3105 -67.8799l67.8799 -11.3105l74.3604 74.3604
+c6.58008 6.58008 17.8799 3.51953 20.1201 -5.50977zM64 -24c13.25 0 24 10.75 24 24c0 13.2598 -10.75 24 -24 24s-24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24z" />
+ <glyph glyph-name="tasks" unicode="&#xf0ae;"
+d="M208 316c-8.7998 0 -16 7.2002 -16 16v40c0 8.7998 7.2002 16 16 16h288c8.7998 0 16 -7.2002 16 -16v-40c0 -8.7998 -7.2002 -16 -16 -16h-288zM208 156c-8.7998 0 -16 7.2002 -16 16v40c0 8.7998 7.2002 16 16 16h288c8.7998 0 16 -7.2002 16 -16v-40
+c0 -8.7998 -7.2002 -16 -16 -16h-288zM208 -4c-8.7998 0 -16 7.2002 -16 16v40c0 8.7998 7.2002 16 16 16h288c8.7998 0 16 -7.2002 16 -16v-40c0 -8.7998 -7.2002 -16 -16 -16h-288zM64 80c26.5 0 48 -21.5 48 -48s-21.5 -48 -48 -48s-48.5996 21.5 -48.5996 48
+s22.0996 48 48.5996 48zM156.5 379l-72.2002 -72.2002l-15.5996 -15.5996c-4.7002 -4.7002 -12.9004 -4.7002 -17.6006 0l-47.5996 47.3994c-4.7002 4.7002 -4.7002 12.3008 0 17l15.7002 15.7002c4.7002 4.7002 12.2998 4.7002 17 0l22.7002 -22.0996l63.6992 63.2998
+c4.7002 4.7002 12.3008 4.7002 17 0l17 -16.5c4.60059 -4.7002 4.60059 -12.2998 -0.0996094 -17zM156.5 219.4l-72.2002 -72.2002l-15.7002 -15.7002c-4.69922 -4.7002 -12.8994 -4.7002 -17.5996 0l-47.5 47.5c-4.7002 4.7002 -4.7002 12.2998 0 17l15.7002 15.7002
+c4.7002 4.7002 12.2998 4.7002 17 0l22.7002 -22.1006l63.6992 63.7002c4.7002 4.7002 12.3008 4.7002 17 0l17 -17c4.60059 -4.59961 4.60059 -12.2002 -0.0996094 -16.8994z" />
+ <glyph glyph-name="filter" unicode="&#xf0b0;"
+d="M487.976 448c21.3623 0 32.0459 -25.8965 16.9717 -40.9707l-184.947 -184.971v-262.039c0 -19.5127 -21.9805 -30.71 -37.7627 -19.6611l-80 55.9795c-6.41602 4.49219 -10.2373 11.8311 -10.2373 19.6621v206.059l-184.942 184.971
+c-15.1045 15.1045 -4.34766 40.9707 16.9707 40.9707h463.947z" />
+ <glyph glyph-name="briefcase" unicode="&#xf0b1;"
+d="M320 112v48h192v-144c0 -25.5996 -22.4004 -48 -48 -48h-416c-25.5996 0 -48 22.4004 -48 48v144h192v-48c0 -8.83984 7.16016 -16 16 -16h96c8.83984 0 16 7.16016 16 16zM464 320c25.5996 0 48 -22.4004 48 -48v-80h-512v80c0 25.5996 22.4004 48 48 48h80v48
+c0 25.5996 22.4004 48 48 48h160c25.5996 0 48 -22.4004 48 -48v-48h80zM320 320v32h-128v-32h128z" />
+ <glyph glyph-name="arrows-alt" unicode="&#xf0b2;" horiz-adv-x="511"
+d="M352.201 22.2246l-79.1963 -79.1953c-9.37305 -9.37305 -24.5684 -9.37305 -33.9404 0l-79.1963 79.1953c-15.1191 15.1191 -4.41113 40.9717 16.9707 40.9707h51.1621l-0.000976562 100.805h-100.804v-51.1621c0 -21.3818 -25.8516 -32.0898 -40.9717 -16.9707
+l-79.1953 79.1963c-9.37305 9.37207 -9.37305 24.5684 0 33.9404l79.1953 79.1963c15.1191 15.1191 40.9717 4.41113 40.9717 -16.9717v-51.2285h100.804v100.804h-51.2305c-21.3818 0 -32.0898 25.8516 -16.9707 40.9717l79.1963 79.1953
+c9.37305 9.37305 24.5684 9.37305 33.9404 0l79.1963 -79.1953c15.1191 -15.1191 4.41113 -40.9717 -16.9707 -40.9717h-51.1621v-100.804h100.804v51.1621c0 21.3818 25.8516 32.0898 40.9707 16.9707l79.1953 -79.1963c9.37305 -9.37207 9.37305 -24.5684 0 -33.9404
+l-79.1953 -79.1963c-15.1191 -15.1191 -40.9717 -4.41113 -40.9707 16.9717v51.2285h-100.803v-100.804h51.2305c21.3818 0 32.0898 -25.8516 16.9707 -40.9717z" />
+ <glyph glyph-name="users" unicode="&#xf0c0;" horiz-adv-x="640"
+d="M96 224c-35.2998 0 -64 28.7002 -64 64s28.7002 64 64 64s64 -28.7002 64 -64s-28.7002 -64 -64 -64zM544 224c-35.2998 0 -64 28.7002 -64 64s28.7002 64 64 64s64 -28.7002 64 -64s-28.7002 -64 -64 -64zM576 192c35.2998 0 64 -28.7002 64 -64v-32
+c0 -17.7002 -14.2998 -32 -32 -32h-66c-6.2002 47.4004 -34.7998 87.2998 -75.0996 109.4c11.5996 11.5 27.5 18.5996 45.0996 18.5996h64zM320 192c-61.9004 0 -112 50.0996 -112 112s50.0996 112 112 112s112 -50.0996 112 -112s-50.0996 -112 -112 -112zM396.8 160
+c63.6006 0 115.2 -51.5996 115.2 -115.2v-28.7998c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v28.7998c0 63.6006 51.5996 115.2 115.2 115.2h8.2998c20.9004 -10 43.9004 -16 68.5 -16s47.7002 6 68.5 16h8.2998zM173.1 173.4
+c-40.2998 -22.1006 -68.8994 -62 -75.1992 -109.4h-65.9004c-17.7002 0 -32 14.2998 -32 32v32c0 35.2998 28.7002 64 64 64h64c17.5996 0 33.5 -7.09961 45.0996 -18.5996z" />
+ <glyph glyph-name="link" unicode="&#xf0c1;"
+d="M326.612 262.609c59.7471 -59.8096 58.9268 -155.698 0.359375 -214.591c-0.109375 -0.119141 -0.239258 -0.25 -0.359375 -0.369141l-67.2002 -67.2002c-59.2705 -59.2705 -155.699 -59.2627 -214.96 0c-59.2705 59.2598 -59.2705 155.7 0 214.96l37.1055 37.1055
+c9.84082 9.84082 26.7861 3.30078 27.2939 -10.6055c0.648438 -17.7227 3.82617 -35.5273 9.69043 -52.7207c1.98633 -5.82227 0.567383 -12.2627 -3.7832 -16.6123l-13.0869 -13.0869c-28.0254 -28.0264 -28.9053 -73.6602 -1.15527 -101.96
+c28.0244 -28.5791 74.0859 -28.749 102.325 -0.510742l67.2002 67.1904c28.1914 28.1914 28.0732 73.7568 0 101.83c-3.70117 3.69434 -7.42871 6.56348 -10.3408 8.56934c-3.66504 2.51562 -6.77734 8.16309 -6.94727 12.6055
+c-0.395508 10.5674 3.34766 21.4561 11.6982 29.8057l21.0537 21.0557c5.52148 5.52051 14.1826 6.19922 20.584 1.73047c6.08301 -4.24707 15.2764 -11.9512 20.5225 -17.1963zM467.547 403.551c59.2705 -59.2598 59.2705 -155.7 -0.000976562 -214.959l-37.1055 -37.1055
+c-9.84082 -9.83984 -26.7852 -3.30078 -27.2939 10.6055c-0.648438 17.7227 -3.82617 35.5273 -9.69043 52.7217c-1.98633 5.82129 -0.567383 12.2617 3.7832 16.6113l13.0869 13.0869c28.0264 28.0264 28.9053 73.6602 1.15527 101.96
+c-28.0254 28.5791 -74.0869 28.749 -102.325 0.510742l-67.2002 -67.1904c-28.1914 -28.1914 -28.0732 -73.7568 0 -101.83c3.70117 -3.69434 7.42871 -6.56348 10.3408 -8.56934c3.66504 -2.51562 6.77734 -8.16309 6.94727 -12.6055
+c0.395508 -10.5674 -3.34766 -21.4561 -11.6982 -29.8057l-21.0537 -21.0557c-5.52051 -5.51953 -14.1826 -6.19922 -20.584 -1.73047c-6.08203 4.24609 -15.2754 11.9502 -20.5215 17.1953c-59.7471 59.8096 -58.9258 155.698 -0.359375 214.591
+c0.109375 0.119141 0.239258 0.25 0.359375 0.369141l67.2002 67.2002c59.2705 59.2705 155.699 59.2627 214.96 0z" />
+ <glyph glyph-name="cloud" unicode="&#xf0c2;" horiz-adv-x="640"
+d="M537.6 221.4c58.4004 -11.8008 102.4 -63.5 102.4 -125.4c0 -70.7002 -57.2998 -128 -128 -128h-368c-79.5 0 -144 64.5 -144 144c0 62.7998 40.2002 116.2 96.2002 135.9c-0.100586 2.69922 -0.200195 5.39941 -0.200195 8.09961c0 88.4004 71.5996 160 160 160
+c59.2998 0 111 -32.2002 138.7 -80.2002c15.2002 10.2002 33.5996 16.2002 53.2998 16.2002c53 0 96 -43 96 -96c0 -12.2002 -2.2998 -23.9004 -6.40039 -34.5996z" />
+ <glyph glyph-name="flask" unicode="&#xf0c3;" horiz-adv-x="448"
+d="M437.2 44.5c29.2998 -47 -4.40039 -108.5 -60.1006 -108.5h-306.199c-55.6006 0 -89.4004 61.4004 -60.1006 108.5l117.2 188.5v151h-8c-13.2998 0 -24 10.7002 -24 24v16c0 13.2998 10.7002 24 24 24h208c13.2998 0 24 -10.7002 24 -24v-16
+c0 -13.2998 -10.7002 -24 -24 -24h-8v-151zM137.9 128h172l-48.2002 77.5996c-3.60059 5.2002 -5.7998 11.5 -5.7998 18.4004v160h-64v-160c0 -6.7998 -2.10059 -13.2002 -5.80078 -18.4004z" />
+ <glyph glyph-name="cut" unicode="&#xf0c4;" horiz-adv-x="447"
+d="M278.06 192l166.421 -166.43c4.68945 -4.69043 4.68945 -12.29 0 -16.9707c-32.8008 -32.7998 -85.9902 -32.7998 -118.79 0l-115.511 115.521l-24.8594 -24.8604c4.30957 -10.9199 6.67969 -22.8096 6.67969 -35.2598c0 -53.0195 -42.9805 -96 -96 -96
+s-96 42.9805 -96 96s42.9805 96 96 96c4.53027 0 8.99023 -0.320312 13.3604 -0.929688l32.9297 32.9297l-32.9297 32.9297c-4.37012 -0.609375 -8.82031 -0.929688 -13.3604 -0.929688c-53.0195 0 -96 42.9805 -96 96s42.9805 96 96 96s96 -42.9805 96 -96
+c0 -12.4502 -2.37012 -24.3398 -6.67969 -35.2598l24.8594 -24.8604l115.511 115.521c32.7998 32.7998 85.9893 32.7998 118.79 0c4.68945 -4.68066 4.68945 -12.2803 0 -16.9707zM96 288c17.6396 0 32 14.3604 32 32s-14.3604 32 -32 32s-32 -14.3604 -32 -32
+s14.3604 -32 32 -32zM96 32c17.6396 0 32 14.3604 32 32s-14.3604 32 -32 32s-32 -14.3604 -32 -32s14.3604 -32 32 -32z" />
+ <glyph glyph-name="copy" unicode="&#xf0c5;" horiz-adv-x="448"
+d="M320 0v-40c0 -13.2549 -10.7451 -24 -24 -24h-272c-13.2549 0 -24 10.7451 -24 24v368c0 13.2549 10.7451 24 24 24h72v-296c0 -30.8789 25.1211 -56 56 -56h168zM320 344c0 -13.2002 10.7998 -24 24 -24h104v-264c0 -13.2549 -10.7451 -24 -24 -24h-272
+c-13.2549 0 -24 10.7451 -24 24v368c0 13.2549 10.7451 24 24 24h168v-104zM440.971 375.029c3.87988 -3.88086 7.0293 -11.4834 7.0293 -16.9707v-6.05859h-96v96h6.05859c5.4873 0 13.0898 -3.14941 16.9707 -7.0293z" />
+ <glyph glyph-name="paperclip" unicode="&#xf0c6;" horiz-adv-x="448"
+d="M43.2461 -18.1416c-58.4297 60.2891 -57.3408 157.511 1.38574 217.581l209.76 214.561c44.3164 45.332 116.352 45.3359 160.672 0c43.8896 -44.8936 43.9424 -117.329 0 -162.276l-182.85 -186.852c-29.8545 -30.5371 -78.6328 -30.1113 -107.981 0.998047
+c-28.2754 29.9697 -27.3682 77.4727 1.45117 106.953l143.743 146.835c6.18262 6.31348 16.3125 6.42188 22.626 0.241211l22.8613 -22.3799c6.31445 -6.18164 6.42188 -16.3115 0.241211 -22.626l-143.729 -146.82c-4.93164 -5.04492 -5.23535 -13.4287 -0.647461 -18.292
+c4.37207 -4.63379 11.2451 -4.71094 15.6875 -0.165039l182.85 186.851c19.6123 20.0625 19.6123 52.7256 -0.0117188 72.7979c-19.1885 19.627 -49.957 19.6377 -69.1533 0l-209.762 -214.56c-34.7627 -35.5605 -35.2988 -93.1201 -1.19043 -128.313
+c34.0098 -35.0928 88.9844 -35.1367 123.058 -0.285156l172.061 175.999c6.17676 6.31836 16.3066 6.43262 22.626 0.255859l22.877 -22.3643c6.31836 -6.17676 6.43359 -16.3066 0.255859 -22.626l-172.061 -175.998c-59.5752 -60.9385 -155.942 -60.2158 -214.77 0.485352
+z" />
+ <glyph glyph-name="save" unicode="&#xf0c7;" horiz-adv-x="448"
+d="M433.941 318.059c7.75977 -7.75977 14.0586 -22.9658 14.0586 -33.9404v-268.118c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h268.118c10.9746 0 26.1807 -6.29883 33.9404 -14.0586zM224 32
+c35.3457 0 64 28.6543 64 64s-28.6543 64 -64 64s-64 -28.6543 -64 -64s28.6543 -64 64 -64zM320 336.52c0 2.74316 -1.5752 6.5459 -3.51465 8.48535l-3.48047 3.48047c-2.25 2.25098 -5.30176 3.51465 -8.48535 3.51465h-228.52c-6.62695 0 -12 -5.37305 -12 -12v-104
+c0 -6.62695 5.37305 -12 12 -12h232c6.62695 0 12 5.37305 12 12v100.52z" />
+ <glyph glyph-name="square" unicode="&#xf0c8;" horiz-adv-x="448"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352z" />
+ <glyph glyph-name="bars" unicode="&#xf0c9;" horiz-adv-x="448"
+d="M16 316c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416zM16 156c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40
+c0 -8.83691 -7.16309 -16 -16 -16h-416zM16 -4c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h416c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-416z" />
+ <glyph glyph-name="list-ul" unicode="&#xf0ca;"
+d="M96 352c0 -26.5098 -21.4902 -48 -48 -48s-48 21.4902 -48 48s21.4902 48 48 48s48 -21.4902 48 -48zM48 240c26.5098 0 48 -21.4902 48 -48s-21.4902 -48 -48 -48s-48 21.4902 -48 48s21.4902 48 48 48zM48 80c26.5098 0 48 -21.4902 48 -48s-21.4902 -48 -48 -48
+s-48 21.4902 -48 48s21.4902 48 48 48zM144 316c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h352c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-352zM144 156c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h352
+c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-352zM144 -4c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h352c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-352z" />
+ <glyph glyph-name="list-ol" unicode="&#xf0cb;" horiz-adv-x="514"
+d="M3.2627 308.473c0 7.47754 3.91699 11.5723 11.5732 11.5723h15.1309v39.877c0 5.16309 0.53418 10.5029 0.53418 10.5029h-0.356445s-1.77832 -2.66992 -2.84766 -3.73828c-4.45117 -4.27246 -10.5039 -4.4502 -15.666 1.06836l-5.51758 6.23145
+c-5.3418 5.34082 -4.98438 11.2158 0.53418 16.3789l21.7197 19.9375c4.44824 4.0957 8.36523 5.69727 14.418 5.69727h12.1045c7.65625 0 11.749 -3.91602 11.749 -11.5723v-84.3838h15.4883c7.65527 0 11.5723 -4.09375 11.5723 -11.5723v-8.90039
+c0 -7.47754 -3.91699 -11.5723 -11.5723 -11.5723h-67.291c-7.65625 0 -11.5732 4.09473 -11.5732 11.5723v8.90137zM2.21094 143.409c0 47.2773 50.9551 56.3828 50.9551 69.165c0 7.17969 -5.9541 8.75488 -9.28027 8.75488
+c-3.15234 0 -6.47852 -1.05078 -9.45508 -3.85254c-5.0791 -4.90234 -10.5068 -7.00391 -16.1104 -2.4502l-8.5791 6.8291c-5.7793 4.55273 -7.18066 9.80469 -2.80273 15.4082c6.65332 8.75488 19.0869 18.7363 40.4482 18.7363
+c19.4375 0 44.4766 -10.5059 44.4766 -39.5732c0 -38.3467 -46.7529 -46.4014 -48.6797 -56.9092h39.0498c7.52832 0 11.5566 -4.02637 11.5566 -11.3818v-8.75488c0 -7.35352 -4.02832 -11.3818 -11.5566 -11.3818h-67.9404c-7.00488 0 -12.083 4.02832 -12.083 11.3818
+v4.02832h0.000976562zM5.6543 -6.61035l5.60254 9.28027c3.85352 6.6543 9.10547 7.00391 15.584 3.15234c4.90332 -2.10156 9.62988 -3.15234 14.3594 -3.15234c10.1553 0 14.3574 3.50195 14.3574 8.23047c0 6.65332 -5.60352 9.10547 -15.9336 9.10547h-4.72754
+c-5.9541 0 -9.28027 2.10156 -12.2588 7.87988l-1.0498 1.92578c-2.45117 4.72852 -1.22559 9.80664 2.80078 14.8848l5.60449 7.00391c6.8291 8.40527 12.2568 13.4824 12.2568 13.4824v0.350586s-4.20312 -1.05078 -12.6084 -1.05078h-12.957
+c-7.5293 0 -11.3828 4.02734 -11.3828 11.3818v8.75488c0 7.53027 3.85352 11.3818 11.3828 11.3818h58.4844c7.5293 0 11.3818 -4.02734 11.3818 -11.3818v-3.32715c0 -5.77832 -1.40039 -9.80566 -5.0791 -14.1826l-17.5088 -20.1377
+c19.6113 -5.07812 28.7158 -20.4863 28.7158 -34.8447c0 -21.3633 -14.3574 -44.126 -48.5029 -44.126c-16.6357 0 -28.1914 4.72754 -35.8955 9.45508c-5.7793 4.20215 -6.30469 9.80469 -2.62598 15.9336zM144 316c-8.83691 0 -16 7.16309 -16 16v40
+c0 8.83691 7.16309 16 16 16h352c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-352zM144 156c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h352c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-352zM144 -4
+c-8.83691 0 -16 7.16309 -16 16v40c0 8.83691 7.16309 16 16 16h352c8.83691 0 16 -7.16309 16 -16v-40c0 -8.83691 -7.16309 -16 -16 -16h-352z" />
+ <glyph glyph-name="strikethrough" unicode="&#xf0cc;"
+d="M496 160h-480c-8.83691 0 -16 7.16309 -16 16v32c0 8.83691 7.16309 16 16 16h480c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16zM281.334 144h122.773c6.52441 -14.1826 10.1426 -30.7852 10.1426 -50.5996
+c0 -88.9863 -73.4346 -134.425 -162.013 -134.425c-76.7705 0 -154.487 34.9756 -154.487 101.82v19.2041c0 8.83691 7.16406 16 16 16h45.6133c8.83594 0 16 -7.16309 16 -16v-1.64844c0 -32.1904 44.5488 -44.2705 76.874 -44.2705
+c46.6436 0 75.6211 20.5684 75.6211 53.6758c0 27.5605 -19.2666 43.3066 -46.5244 56.2432zM250.382 240h-129.823c-8.11133 15.3711 -12.7764 33.8369 -12.7764 56.5469c0 84.6738 65.5225 125.646 150.726 125.646c70.8506 0 142.575 -27.4375 142.575 -79.875v-30.3184
+c0 -8.83691 -7.16406 -16 -16 -16h-45.6133c-8.83594 0 -16 7.16309 -16 16v2.10352c0 16.3926 -22.3145 32.9854 -64.9619 32.9854c-34.0605 0 -64.9619 -13.4863 -64.9619 -47.4062c0 -30.7363 24.4141 -46.1777 56.8359 -59.6826z" />
+ <glyph glyph-name="underline" unicode="&#xf0cd;" horiz-adv-x="448"
+d="M224.264 59.7598c-91.6689 0 -156.603 51.165 -156.603 151.393v172.848h-28.291c-8.83691 0 -16 7.16309 -16 16v32c0 8.83691 7.16309 16 16 16h137.39c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-28.8125v-172.848
+c0 -53.6992 28.3135 -79.4443 76.3164 -79.4443c46.9668 0 75.7959 25.4336 75.7959 79.9648v172.327h-28.291c-8.83691 0 -16 7.16309 -16 16v32c0 8.83691 7.16309 16 16 16h136.868c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-28.291v-172.848
+c0 -99.4053 -64.8809 -151.393 -156.082 -151.393zM16 0h416c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-416c-8.83691 0 -16 7.16309 -16 16v32c0 8.83691 7.16309 16 16 16z" />
+ <glyph glyph-name="table" unicode="&#xf0ce;"
+d="M464 416c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h416zM224 32v96h-160v-96h160zM224 192v96h-160v-96h160zM448 32v96h-160v-96h160zM448 192v96h-160v-96h160z" />
+ <glyph glyph-name="magic" unicode="&#xf0d0;"
+d="M224 352l-16 32l-32 16l32 16l16 32l16 -32l32 -16l-32 -16zM80 288l-26.6602 53.3301l-53.3398 26.6699l53.3398 26.6699l26.6602 53.3301l26.6602 -53.3301l53.3398 -26.6699l-53.3398 -26.6699zM432 160l26.6602 -53.3301l53.3398 -26.6699l-53.3398 -26.6699
+l-26.6602 -53.3301l-26.6602 53.3301l-53.3398 26.6699l53.3398 26.6699zM502.62 353.77c12.5 -12.4893 12.5 -32.7598 0 -45.2393l-363.14 -363.15c-6.25 -6.25 -14.4404 -9.37012 -22.6309 -9.37012c-8.17969 0 -16.3691 3.12012 -22.6191 9.37012l-84.8506 84.8506
+c-12.5 12.4893 -12.5 32.75 0 45.25l363.14 363.14c6.25 6.25977 14.4404 9.37988 22.6309 9.37988c8.18945 0 16.3799 -3.12012 22.6191 -9.37988zM359.45 244.54l86.5996 86.5996l-50.9102 50.9102l-86.5996 -86.5996z" />
+ <glyph glyph-name="truck" unicode="&#xf0d1;" horiz-adv-x="640"
+d="M624 96c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-48c0 -53 -43 -96 -96 -96s-96 43 -96 96h-128c0 -53 -43 -96 -96 -96s-96 43 -96 96h-16c-26.5 0 -48 21.5 -48 48v320c0 26.5 21.5 48 48 48h320c26.5 0 48 -21.5 48 -48v-48h44.0996
+c12.7002 0 24.9004 -5.09961 33.9004 -14.0996l99.9004 -99.9004c9 -9 14.0996 -21.2002 14.0996 -33.9004v-108.1h16zM160 -16c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48zM480 -16c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48
+s21.5 -48 48 -48zM560 192v12.0996l-99.9004 99.9004h-44.0996v-112h144z" />
+ <glyph glyph-name="money-bill" unicode="&#xf0d6;" horiz-adv-x="640"
+d="M608 384c17.6699 0 32 -14.3301 32 -32v-320c0 -17.6699 -14.3301 -32 -32 -32h-576c-17.6699 0 -32 14.3301 -32 32v320c0 17.6699 14.3301 32 32 32h576zM48 48h64c0 35.3496 -28.6504 64 -64 64v-64zM48 272c35.3496 0 64 28.6504 64 64h-64v-64zM320 96
+c44.1699 0 80 42.9697 80 96c0 53.0195 -35.8203 96 -80 96s-80 -42.9805 -80 -96c0 -53.0098 35.8096 -96 80 -96zM592 48v64c-35.3496 0 -64 -28.6504 -64 -64h64zM592 272v64h-64c0 -35.3496 28.6504 -64 64 -64z" />
+ <glyph glyph-name="caret-down" unicode="&#xf0d7;" horiz-adv-x="319"
+d="M31.2998 256h257.3c17.8008 0 26.7002 -21.5 14.1006 -34.0996l-128.601 -128.7c-7.7998 -7.7998 -20.5 -7.7998 -28.2998 0l-128.6 128.7c-12.6006 12.5996 -3.7002 34.0996 14.0996 34.0996z" />
+ <glyph glyph-name="caret-up" unicode="&#xf0d8;" horiz-adv-x="320"
+d="M288.662 96h-257.324c-17.8184 0 -26.7412 21.543 -14.1416 34.1416l128.662 128.662c7.80957 7.81055 20.4736 7.81055 28.2832 0l128.662 -128.662c12.6006 -12.5986 3.67676 -34.1416 -14.1416 -34.1416z" />
+ <glyph glyph-name="caret-left" unicode="&#xf0d9;" horiz-adv-x="215"
+d="M192 320.662v-257.324c0 -17.8184 -21.543 -26.7412 -34.1416 -14.1416l-128.662 128.662c-7.81055 7.80957 -7.81055 20.4736 0 28.2832l128.662 128.662c12.5986 12.6006 34.1416 3.67676 34.1416 -14.1416z" />
+ <glyph glyph-name="caret-right" unicode="&#xf0da;" horiz-adv-x="168"
+d="M0 63.3379v257.324c0 17.8184 21.543 26.7412 34.1416 14.1416l128.662 -128.662c7.81055 -7.80957 7.81055 -20.4736 0 -28.2832l-128.662 -128.662c-12.5986 -12.6006 -34.1416 -3.67676 -34.1416 14.1416z" />
+ <glyph glyph-name="columns" unicode="&#xf0db;"
+d="M464 416c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h416zM224 32v256h-160v-256h160zM448 32v256h-160v-256h160z" />
+ <glyph glyph-name="sort" unicode="&#xf0dc;" horiz-adv-x="320"
+d="M41 160h238c21.4004 0 32.0996 -25.9004 17 -41l-119 -119c-9.40039 -9.40039 -24.5996 -9.40039 -33.9004 0l-119.1 119c-15.0996 15.0996 -4.40039 41 17 41zM296 265c15.0996 -15.0996 4.40039 -41 -17 -41h-238c-21.4004 0 -32.0996 25.9004 -17 41l119.1 119
+c9.30078 9.40039 24.5 9.40039 33.9004 0z" />
+ <glyph glyph-name="sort-down" unicode="&#xf0dd;" horiz-adv-x="320"
+d="M41 160h238c21.4004 0 32.0996 -25.9004 17 -41l-119 -119c-9.40039 -9.40039 -24.5996 -9.40039 -33.9004 0l-119.1 119c-15.0996 15.0996 -4.40039 41 17 41z" />
+ <glyph glyph-name="sort-up" unicode="&#xf0de;" horiz-adv-x="319"
+d="M279 224h-238c-21.4004 0 -32.0996 25.9004 -17 41l119 119c9.40039 9.40039 24.5996 9.40039 33.9004 0l119 -119c15.1992 -15.0996 4.5 -41 -16.9004 -41z" />
+ <glyph glyph-name="envelope" unicode="&#xf0e0;"
+d="M502.3 257.2c3.90039 3.09961 9.7002 0.200195 9.7002 -4.7002v-204.5c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v204.4c0 5 5.7002 7.7998 9.7002 4.69922c22.3994 -17.3994 52.0996 -39.5 154.1 -113.6
+c21.1006 -15.4004 56.7002 -47.7998 92.2002 -47.5996c35.7002 -0.300781 72 32.7998 92.2998 47.5996c102 74.0996 131.601 96.2998 154 113.7zM256 128c-23.2002 -0.400391 -56.5996 29.2002 -73.4004 41.4004c-132.699 96.2998 -142.8 104.8 -173.399 128.699
+c-5.7998 4.60059 -9.2002 11.5 -9.2002 18.9004v19c0 26.5 21.5 48 48 48h416c26.5 0 48 -21.5 48 -48v-19c0 -7.40039 -3.40039 -14.4004 -9.2002 -18.9004c-30.5996 -24 -40.7002 -32.3994 -173.399 -128.699c-16.8008 -12.2002 -50.2002 -41.8008 -73.4004 -41.4004z" />
+ <glyph glyph-name="undo" unicode="&#xf0e2;" horiz-adv-x="504"
+d="M212.333 223.667h-200.333c-6.62695 0 -12 5.37305 -12 12v200.333c0 6.62695 5.37305 12 12 12h48c6.62695 0 12 -5.37305 12 -12v-78.1123c45.7734 50.833 112.26 82.6426 186.175 82.1055c136.906 -0.994141 246.448 -111.623 246.157 -248.532
+c-0.291016 -136.719 -111.212 -247.461 -247.999 -247.461c-64.0889 0 -122.496 24.3135 -166.51 64.2148c-5.09961 4.62207 -5.33398 12.5537 -0.466797 17.4199l33.9668 33.9668c4.47363 4.47461 11.6621 4.71777 16.4004 0.525391
+c31.0361 -27.4629 71.8564 -44.127 116.609 -44.127c97.2676 0 176 78.7158 176 176c0 97.2666 -78.7158 176 -176 176c-58.4961 0 -110.28 -28.4756 -142.274 -72.333h98.2744c6.62695 0 12 -5.37305 12 -12v-48c0 -6.62695 -5.37305 -12 -12 -12z" />
+ <glyph glyph-name="gavel" unicode="&#xf0e3;"
+d="M504.971 248.638c9.37207 -9.37305 9.37207 -24.5684 0 -33.9404l-124.451 -124.451c-9.37109 -9.37305 -24.5674 -9.37305 -33.9404 0l-22.627 22.627c-9.37305 9.37207 -9.37305 24.5684 0 33.9414l5.65723 5.65625l-39.5986 39.5986l-81.04 -81.04l5.65723 -5.65723
+c12.4971 -12.4971 12.4971 -32.7578 0 -45.2549l-114.745 -114.745c-12.4971 -12.4971 -32.7578 -12.4971 -45.2549 0l-45.2549 45.2549c-12.4971 12.4971 -12.4971 32.7578 0 45.2549l114.744 114.746c12.4971 12.4971 32.7578 12.4971 45.2549 0l5.65723 -5.65723
+l81.04 81.04l-39.5986 39.5986l-5.65625 -5.65723c-9.37207 -9.37305 -24.5684 -9.37305 -33.9414 0l-22.627 22.627c-9.37305 9.37207 -9.37305 24.5684 0 33.9404l124.451 124.451c9.37207 9.37305 24.5674 9.37305 33.9404 0l22.627 -22.6279
+c9.37305 -9.37207 9.37305 -24.5674 0 -33.9404l-5.65625 -5.65723l113.138 -113.137l5.65723 5.65625c9.37207 9.37305 24.5674 9.37305 33.9404 0z" />
+ <glyph glyph-name="bolt" unicode="&#xf0e7;" horiz-adv-x="320"
+d="M296 288c18.5 0 30 -20.0996 20.7002 -36l-176 -304c-4.40039 -7.59961 -12.4004 -12 -20.7998 -12c-15.3008 0 -26.9004 14.2998 -23.3008 29.5l46.1006 194.5h-118.7c-14.5 0 -25.7002 12.7998 -23.7998 27.2002l32 240
+c1.59961 11.8994 11.7998 20.7998 23.7998 20.7998h144c15.7002 0 27.2002 -15 23.2002 -30.2002l-42.6006 -129.8h115.4z" />
+ <glyph glyph-name="sitemap" unicode="&#xf0e8;" horiz-adv-x="640"
+d="M128 96c17.6699 0 32 -14.3301 32 -32v-96c0 -17.6699 -14.3301 -32 -32 -32h-96c-17.6699 0 -32 14.3301 -32 32v96c0 17.6699 14.3301 32 32 32h96zM104 176v-48h-48v57.5898c0 21.1797 17.2305 38.4102 38.4102 38.4102h201.59v64h-40c-17.6699 0 -32 14.3301 -32 32
+v96c0 17.6699 14.3301 32 32 32h128c17.6699 0 32 -14.3301 32 -32v-96c0 -17.6699 -14.3301 -32 -32 -32h-40v-64h201.59c21.1797 0 38.4102 -17.2402 38.4102 -38.4102v-57.5898h-48v48h-192v-48h-48v48h-192zM368 96c17.6699 0 32 -14.3301 32 -32v-96
+c0 -17.6699 -14.3301 -32 -32 -32h-96c-17.6699 0 -32 14.3301 -32 32v96c0 17.6699 14.3301 32 32 32h96zM608 96c17.6699 0 32 -14.3301 32 -32v-96c0 -17.6699 -14.3301 -32 -32 -32h-96c-17.6699 0 -32 14.3301 -32 32v96c0 17.6699 14.3301 32 32 32h96z" />
+ <glyph glyph-name="umbrella" unicode="&#xf0e9;" horiz-adv-x="576"
+d="M575.7 167.2c2.2002 -10.1006 -8.40039 -21.4004 -18.7002 -11.4004c-51.5 54.4004 -107.6 52.5 -158.6 -37c-5.30078 -9.5 -14.9004 -8.59961 -19.7002 0c-2.5 4.40039 -32.2002 73.2002 -90.7002 73.2002c-45.7998 0 -70.5 -37.7998 -90.7002 -73.2002
+c-4.7998 -8.59961 -14.3994 -9.5 -19.7002 0c-50.8994 89.4004 -106.6 92 -158.6 37c-10.2002 -9.89941 -20.9004 1.2998 -18.7002 11.4004c29.2002 136.3 138 218.2 255.7 230.899v17.9004c0 17.7002 14.2998 32 32 32s32 -14.2998 32 -32v-17.9004
+c117.3 -12.6992 227.1 -94.5996 255.7 -230.899zM256 146.3c9.59961 8.90039 19.7002 13.6006 32 13.7002c12.2002 0 22.9004 -5.7002 32 -13.5996v-130.301c0 -44.0996 -35.9004 -80 -80 -80c-33.7998 0 -64.2002 21.4004 -75.4004 53.3008
+c-5.89941 16.5996 2.80078 34.8994 19.5 40.7998c16.7002 5.89941 34.9004 -2.7998 40.8008 -19.5c1.89941 -5.40039 7.2998 -10.7002 15.0996 -10.7002c8.7998 0 16 7.2002 16 16v130.3z" />
+ <glyph glyph-name="paste" unicode="&#xf0ea;" horiz-adv-x="448"
+d="M128 264v-232h-104c-13.2549 0 -24 10.7451 -24 24v336c0 13.2549 10.7451 24 24 24h80.6104c11.084 19.1104 31.7529 32 55.3896 32s44.3057 -12.8896 55.3896 -32h80.6104c13.2549 0 24 -10.7451 24 -24v-72h-136c-30.8779 0 -56 -25.1211 -56 -56zM160 408
+c-13.2549 0 -24 -10.7451 -24 -24s10.7451 -24 24 -24s24 10.7451 24 24s-10.7451 24 -24 24zM344 160h104v-200c0 -13.2549 -10.7451 -24 -24 -24h-240c-13.2549 0 -24 10.7451 -24 24v304c0 13.2549 10.7451 24 24 24h136v-104c0 -13.2002 10.7998 -24 24 -24z
+M448 198.059v-6.05859h-96v96h6.05859c5.4873 0 13.0898 -3.14941 16.9707 -7.0293l65.9404 -65.9404c3.88086 -3.88086 7.03027 -11.4834 7.03027 -16.9717z" />
+ <glyph glyph-name="lightbulb" unicode="&#xf0eb;" horiz-adv-x="352"
+d="M96.0596 -6.34961l-0.0498047 38.3496h159.98l-0.0400391 -38.3496c-0.00585938 -5.3291 -2.40723 -13.2549 -5.36035 -17.6904l-17.0898 -25.6904c-5.24023 -7.88184 -17.1748 -14.2793 -26.6396 -14.2793h-61.71c-9.46582 0 -21.4004 6.39746 -26.6406 14.2793
+l-17.0898 25.6904c-3.49023 5.24023 -5.34961 11.4004 -5.36035 17.6904zM0 272c0 93.0303 73.4404 175.69 175.45 176c97.46 0.299805 176.55 -78.6104 176.55 -176c0 -44.3701 -16.4502 -84.8496 -43.5596 -115.78c-16.5205 -18.8496 -42.3604 -58.2295 -52.21 -91.4502
+c-0.0400391 -0.269531 -0.0703125 -0.519531 -0.110352 -0.779297h-160.24c-0.0400391 0.259766 -0.0703125 0.519531 -0.110352 0.779297c-9.84961 33.2207 -35.6895 72.6006 -52.21 91.4502c-27.1094 30.9307 -43.5596 71.4102 -43.5596 115.78zM176 352
+c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16c-61.7598 0 -112 -50.2402 -112 -112c0 -8.83984 7.16016 -16 16 -16s16 7.16016 16 16c0 44.1104 35.8896 80 80 80z" />
+ <glyph glyph-name="user-md" unicode="&#xf0f0;" horiz-adv-x="448"
+d="M224 192c-70.7002 0 -128 57.2998 -128 128s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128zM104 24c0 13.2998 10.7002 24 24 24s24 -10.7002 24 -24s-10.7002 -24 -24 -24s-24 10.7002 -24 24zM320 159.4c71.2002 -3.5 128 -61.8008 128 -133.801
+v-44.7998c0 -24.7002 -20.0996 -44.7998 -44.7998 -44.7998h-358.4c-24.7002 0 -44.7998 20.0996 -44.7998 44.7998v44.7998c0 66.6006 48.5 121.4 112.1 132.101v-80.4004c-23.0996 -6.89941 -40 -28.0996 -40 -53.3994c0 -30.9004 25.1006 -56 56 -56
+c30.9004 0 56 25.0996 56 56c0 25.2998 -16.8994 46.5 -40 53.3994v81.6006c7.7002 -1 15.2002 -2.60059 22.6006 -5.2002c18 -6.2998 37.2998 -9.7998 57.3994 -9.7998c20.1006 0 39.4004 3.5 57.4004 9.7998c2.2002 0.799805 4.40039 1.2002 6.59961 1.89941v-45.1992
+c-36.5 -7.5 -64 -39.8008 -64 -78.4004v-43.7998c0 -8.10059 5.90039 -14.7998 13.8008 -15.9004l31.1992 -4.2002c4.30078 -0.799805 8.5 2 9.40039 6.30078l3.09961 15.6992c0.800781 4.30078 -2 8.5 -6.2998 9.40039l-19.2998 3.90039v26.6992
+c0 67 96 64.7002 96 1.90039v-28.5l-19.2998 -3.90039c-4.40039 -0.799805 -7.2002 -5.09961 -6.2998 -9.39941l3.09961 -15.7002c0.900391 -4.40039 5.09961 -7.2002 9.40039 -6.2998l32.1992 6.39941c7.5 1.5 12.9004 8.10059 12.9004 15.7002v41.7002
+c0 38.5996 -27.5 71 -64 78.4004v49z" />
+ <glyph glyph-name="stethoscope" unicode="&#xf0f1;"
+d="M447.1 336c35.7002 0.5 64.9004 -28.2998 64.9004 -64c0 -23.7002 -12.9004 -44.2998 -32 -55.4004v-112.6c0 -92.5996 -79 -168 -176 -168c-95.4004 0 -173.3 72.7998 -175.9 163.2c-73 14.7998 -128.1 79.5 -128.1 156.8v155.4c0 11.5 8.09961 21.2998 19.2998 23.5996
+l62.7998 12.4004c13 2.59961 25.6006 -5.80078 28.2002 -18.8008l3.10059 -15.6992c2.59961 -13 -5.80078 -25.6006 -18.8008 -28.2002l-30.6992 -6.10059v-122.5c0 -53.2998 43.5 -96.5 96.8994 -96c52.9004 0.5 95.1006 44.3008 95.1006 97.2002v121.4l-30.7002 6.09961
+c-13 2.60059 -21.4004 15.2002 -18.7998 28.2002l3.19922 15.7002c2.60059 13 15.2002 21.3994 28.2002 18.7998l62.9004 -12.5996c11.2002 -2.2002 19.2998 -12.1006 19.2998 -23.5v-155.4c0 -77.2002 -55 -141.8 -127.8 -156.8
+c2.7002 -55.1006 51.8994 -99.2002 111.899 -99.2002c61.8008 0 112 46.7002 112 104v112.6c-19.5 11.2002 -32.5 32.5 -32 56.8008c0.700195 34.1992 28.8008 62.0996 63 62.5996zM448 256c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16
+s7.2002 -16 16 -16z" />
+ <glyph glyph-name="suitcase" unicode="&#xf0f2;"
+d="M128 -32v400c0 26.5 21.5 48 48 48h160c26.5 0 48 -21.5 48 -48v-400h-256zM192 352v-32h128v32h-128zM512 272v-256c0 -26.5 -21.5 -48 -48 -48h-48v352h48c26.5 0 48 -21.5 48 -48zM96 -32h-48c-26.5 0 -48 21.5 -48 48v256c0 26.5 21.5 48 48 48h48v-352z" />
+ <glyph glyph-name="bell" unicode="&#xf0f3;" horiz-adv-x="448"
+d="M224 -64c-35.3203 0 -63.9697 28.6504 -63.9697 64h127.939c0 -35.3496 -28.6494 -64 -63.9697 -64zM439.39 85.71c6 -6.44043 8.66016 -14.1602 8.61035 -21.71c-0.0996094 -16.4004 -12.9805 -32 -32.0996 -32h-383.801c-19.1191 0 -31.9893 15.5996 -32.0996 32
+c-0.0498047 7.5498 2.61035 15.2598 8.61035 21.71c19.3193 20.7598 55.4697 51.9902 55.4697 154.29c0 77.7002 54.4795 139.9 127.939 155.16v20.8398c0 17.6699 14.3203 32 31.9805 32s31.9805 -14.3301 31.9805 -32v-20.8398
+c73.46 -15.2598 127.939 -77.46 127.939 -155.16c0 -102.3 36.1504 -133.53 55.4697 -154.29z" />
+ <glyph glyph-name="coffee" unicode="&#xf0f4;" horiz-adv-x="640"
+d="M192 64c-53 0 -96 43 -96 96v232c0 13.2998 10.7002 24 24 24h392c70.5996 0 128 -57.4004 128 -128s-57.4004 -128 -128 -128h-32c0 -53 -43 -96 -96 -96h-192zM512 352h-32v-128h32c35.2998 0 64 28.7002 64 64s-28.7002 64 -64 64zM559.7 -32h-511.4
+c-47.5996 0 -61 64 -36 64h583.3c25 0 11.8008 -64 -35.8994 -64z" />
+ <glyph glyph-name="hospital" unicode="&#xf0f8;" horiz-adv-x="448"
+d="M448 -44v-20h-448v20c0 6.62695 5.37305 12 12 12h20v360c0 13.2549 10.7451 24 24 24h88v72c0 13.2549 10.7451 24 24 24h112c13.2549 0 24 -10.7451 24 -24v-72h88c13.2549 0 24 -10.7451 24 -24v-360h20c6.62695 0 12 -5.37305 12 -12zM308 256h-40
+c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12v40c0 6.62695 -5.37305 12 -12 12zM140 192h40c6.62695 0 12 5.37305 12 12v40c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40
+c0 -6.62695 5.37305 -12 12 -12zM244 64h-40c-6.62695 0 -12 -5.37305 -12 -12v-84h64v84c0 6.62695 -5.37305 12 -12 12zM308 160h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12v40c0 6.62695 -5.37305 12 -12 12
+zM192 148c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12v40zM182 352h26v-26c0 -3.31152 2.68848 -6 6 -6h20c3.31152 0 6 2.68848 6 6v26h26c3.31152 0 6 2.68848 6 6v20
+c0 3.31152 -2.68848 6 -6 6h-26v26c0 3.31152 -2.68848 6 -6 6h-20c-3.31152 0 -6 -2.68848 -6 -6v-26h-26c-3.31152 0 -6 -2.68848 -6 -6v-20c0 -3.31152 2.68848 -6 6 -6z" />
+ <glyph glyph-name="ambulance" unicode="&#xf0f9;" horiz-adv-x="640"
+d="M624 96c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-48c0 -53 -43 -96 -96 -96s-96 43 -96 96h-128c0 -53 -43 -96 -96 -96s-96 43 -96 96h-16c-26.5 0 -48 21.5 -48 48v320c0 26.5 21.5 48 48 48h320c26.5 0 48 -21.5 48 -48v-48h44.0996
+c12.7002 0 24.9004 -5.09961 33.9004 -14.0996l99.9004 -99.9004c9 -9 14.0996 -21.2002 14.0996 -33.9004v-108.1h16zM160 -16c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48zM304 232v48c0 4.40039 -3.59961 8 -8 8h-56v56
+c0 4.40039 -3.59961 8 -8 8h-48c-4.40039 0 -8 -3.59961 -8 -8v-56h-56c-4.40039 0 -8 -3.59961 -8 -8v-48c0 -4.40039 3.59961 -8 8 -8h56v-56c0 -4.40039 3.59961 -8 8 -8h48c4.40039 0 8 3.59961 8 8v56h56c4.40039 0 8 3.59961 8 8zM480 -16c26.5 0 48 21.5 48 48
+s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48zM560 192v12.0996l-99.9004 99.9004h-44.0996v-112h144z" />
+ <glyph glyph-name="medkit" unicode="&#xf0fa;"
+d="M96 -32v352h32v48c0 26.5098 21.4902 48 48 48h160c26.5098 0 48 -21.4902 48 -48v-48h32v-352h-320zM192 352v-32h128v32h-128zM512 272v-256c0 -26.5098 -21.4902 -48 -48 -48h-16v352h16c26.5098 0 48 -21.4902 48 -48zM64 -32h-16c-26.5098 0 -48 21.4902 -48 48v256
+c0 26.5098 21.4902 48 48 48h16v-352zM352 176c0 8.83691 -7.16309 16 -16 16h-48v48c0 8.83691 -7.16309 16 -16 16h-32c-8.83691 0 -16 -7.16309 -16 -16v-48h-48c-8.83691 0 -16 -7.16309 -16 -16v-32c0 -8.83691 7.16309 -16 16 -16h48v-48
+c0 -8.83691 7.16309 -16 16 -16h32c8.83691 0 16 7.16309 16 16v48h48c8.83691 0 16 7.16309 16 16v32z" />
+ <glyph glyph-name="fighter-jet" unicode="&#xf0fb;" horiz-adv-x="640"
+d="M544 224c96 -21.333 96 -26.583 96 -32s0 -10.667 -96 -32l-128 -16l-48 -16h-24l-116.842 -148h39.5088c11.666 0 21.333 -2.625 21.333 -6s-9.66602 -6 -21.333 -6h-114.667v12h16v164h-48l-66.666 -80h-34.667l-10.667 10.667v69.333h8v16h48v2.66699l-64 8v42.667
+l64 8v2.66602h-48v16h-8v69.333l10.667 10.667h34.666l66.667 -80h48v164h-16v12h114.667c11.666 0 21.333 -2.625 21.333 -6s-9.66699 -6 -21.333 -6h-39.5088l116.842 -148h24l48 -16z" />
+ <glyph glyph-name="beer" unicode="&#xf0fc;" horiz-adv-x="448"
+d="M368 352c44.1123 0 80 -35.8877 80 -80v-128.86c0 -31.5273 -18.6035 -60.2031 -47.3936 -73.0527l-80.6064 -35.9766v-42.1104c0 -13.2549 -10.7451 -24 -24 -24h-272c-13.2549 0 -24 10.7451 -24 24v400c0 13.2549 10.7451 24 24 24h272c13.2549 0 24 -10.7451 24 -24
+v-40h48zM384 143.14v128.86c0 8.82227 -7.17773 16 -16 16h-48v-183.805l54.5215 24.334c5.22754 2.33789 9.47461 8.88379 9.47852 14.6104zM208 64c8.83594 0 16 7.16406 16 16v224c0 8.83594 -7.16406 16 -16 16s-16 -7.16406 -16 -16v-224
+c0 -8.83594 7.16406 -16 16 -16zM112 64c8.83594 0 16 7.16406 16 16v224c0 8.83594 -7.16406 16 -16 16s-16 -7.16406 -16 -16v-224c0 -8.83594 7.16406 -16 16 -16z" />
+ <glyph glyph-name="h-square" unicode="&#xf0fd;" horiz-adv-x="448"
+d="M448 368v-352c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h352c26.5098 0 48 -21.4902 48 -48zM336 320h-32c-8.83691 0 -16 -7.16309 -16 -16v-80h-128v80c0 8.83691 -7.16309 16 -16 16h-32
+c-8.83691 0 -16 -7.16309 -16 -16v-224c0 -8.83691 7.16309 -16 16 -16h32c8.83691 0 16 7.16309 16 16v80h128v-80c0 -8.83691 7.16309 -16 16 -16h32c8.83691 0 16 7.16309 16 16v224c0 8.83691 -7.16309 16 -16 16z" />
+ <glyph glyph-name="plus-square" unicode="&#xf0fe;" horiz-adv-x="448"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM368 164v56c0 6.59961 -5.40039 12 -12 12h-92v92c0 6.59961 -5.40039 12 -12 12h-56c-6.59961 0 -12 -5.40039 -12 -12v-92h-92
+c-6.59961 0 -12 -5.40039 -12 -12v-56c0 -6.59961 5.40039 -12 12 -12h92v-92c0 -6.59961 5.40039 -12 12 -12h56c6.59961 0 12 5.40039 12 12v92h92c6.59961 0 12 5.40039 12 12z" />
+ <glyph glyph-name="angle-double-left" unicode="&#xf100;" horiz-adv-x="448"
+d="M223.7 209l136 136c9.39941 9.40039 24.5996 9.40039 33.8994 0l22.6006 -22.5996c9.39941 -9.40039 9.39941 -24.6006 0 -33.9004l-96.2998 -96.5l96.3994 -96.4004c9.40039 -9.39941 9.40039 -24.5996 0 -33.8994l-22.5996 -22.7002
+c-9.40039 -9.40039 -24.6006 -9.40039 -33.9004 0l-136 136c-9.5 9.40039 -9.5 24.5996 -0.0996094 34zM31.7002 175c-9.40039 9.40039 -9.40039 24.5996 0.0996094 34l136 136c9.2998 9.40039 24.5 9.40039 33.9004 0l22.5996 -22.7002
+c9.40039 -9.2998 9.40039 -24.5 0 -33.8994l-96.3994 -96.4004l96.2998 -96.5c9.39941 -9.2998 9.39941 -24.5 0 -33.9004l-22.6006 -22.5996c-9.2998 -9.40039 -24.5 -9.40039 -33.8994 0z" />
+ <glyph glyph-name="angle-double-right" unicode="&#xf101;" horiz-adv-x="448"
+d="M224.3 175l-136 -136c-9.39941 -9.40039 -24.5996 -9.40039 -33.8994 0l-22.6006 22.5996c-9.39941 9.40039 -9.39941 24.6006 0 33.9004l96.4004 96.4004l-96.4004 96.3994c-9.39941 9.40039 -9.39941 24.6006 0 33.9004l22.5 22.7998
+c9.40039 9.40039 24.6006 9.40039 33.9004 0l136 -136c9.5 -9.40039 9.5 -24.5996 0.0996094 -34zM416.3 209c9.40039 -9.40039 9.40039 -24.5996 0 -33.7998l-136 -136c-9.2998 -9.40039 -24.5 -9.40039 -33.8994 0l-22.6006 22.5996
+c-9.39941 9.2998 -9.39941 24.5 0 33.9004l96.4004 96.3994l-96.4004 96.4004c-9.39941 9.2998 -9.39941 24.5 0 33.9004l22.6006 22.5996c9.2998 9.40039 24.5 9.40039 33.8994 0z" />
+ <glyph glyph-name="angle-double-up" unicode="&#xf102;" horiz-adv-x="320"
+d="M177 192.3l136 -136c9.40039 -9.39941 9.40039 -24.5996 0 -33.8994l-22.5996 -22.6006c-9.40039 -9.39941 -24.6006 -9.39941 -33.9004 0l-96.5 96.2998l-96.4004 -96.3994c-9.39941 -9.40039 -24.5996 -9.40039 -33.8994 0l-22.7002 22.5996
+c-9.40039 9.40039 -9.40039 24.6006 0 33.9004l136 136c9.40039 9.5 24.5996 9.5 34 0.0996094zM143 384.3c9.40039 9.40039 24.5996 9.40039 33.7998 0l136 -136c9.40039 -9.2998 9.40039 -24.5 0 -33.8994l-22.5996 -22.6006c-9.2998 -9.39941 -24.5 -9.39941 -33.9004 0
+l-96.3994 96.4004l-96.4004 -96.4004c-9.2998 -9.39941 -24.5 -9.39941 -33.9004 0l-22.5996 22.6006c-9.40039 9.2998 -9.40039 24.5 0 33.8994z" />
+ <glyph glyph-name="angle-double-down" unicode="&#xf103;" horiz-adv-x="320"
+d="M143 191.7l-136 136c-9.40039 9.39941 -9.40039 24.5996 0 33.8994l22.5996 22.6006c9.40039 9.39941 24.6006 9.39941 33.9004 0l96.4004 -96.4004l96.3994 96.4004c9.40039 9.39941 24.6006 9.39941 33.9004 0l22.7998 -22.5
+c9.40039 -9.40039 9.40039 -24.6006 0 -33.9004l-136 -136c-9.40039 -9.5 -24.5996 -9.5 -34 -0.0996094zM177 -0.299805c-9.40039 -9.40039 -24.5996 -9.40039 -34 0.0996094l-136 136c-9.40039 9.2998 -9.40039 24.5 0 33.9004l22.7002 22.5996
+c9.2998 9.40039 24.5 9.40039 33.8994 0l96.4004 -96.3994l96.5 96.2998c9.2998 9.39941 24.5 9.39941 33.9004 0l22.5996 -22.6006c9.40039 -9.2998 9.40039 -24.5 0 -33.8994z" />
+ <glyph glyph-name="angle-left" unicode="&#xf104;" horiz-adv-x="256"
+d="M31.7002 209l136 136c9.39941 9.40039 24.5996 9.40039 33.8994 0l22.6006 -22.5996c9.39941 -9.40039 9.39941 -24.6006 0 -33.9004l-96.2998 -96.5l96.3994 -96.4004c9.40039 -9.39941 9.40039 -24.5996 0 -33.8994l-22.5996 -22.7002
+c-9.40039 -9.40039 -24.6006 -9.40039 -33.9004 0l-136 136c-9.5 9.40039 -9.5 24.5996 -0.0996094 34z" />
+ <glyph glyph-name="angle-right" unicode="&#xf105;" horiz-adv-x="256"
+d="M224.3 175l-136 -136c-9.39941 -9.40039 -24.5996 -9.40039 -33.8994 0l-22.6006 22.5996c-9.39941 9.40039 -9.39941 24.6006 0 33.9004l96.4004 96.4004l-96.4004 96.3994c-9.39941 9.40039 -9.39941 24.6006 0 33.9004l22.5 22.7998
+c9.40039 9.40039 24.6006 9.40039 33.9004 0l136 -136c9.5 -9.40039 9.5 -24.5996 0.0996094 -34z" />
+ <glyph glyph-name="angle-up" unicode="&#xf106;" horiz-adv-x="320"
+d="M177 288.3l136 -136c9.40039 -9.39941 9.40039 -24.5996 0 -33.8994l-22.5996 -22.6006c-9.40039 -9.39941 -24.6006 -9.39941 -33.9004 0l-96.5 96.2998l-96.4004 -96.3994c-9.39941 -9.40039 -24.5996 -9.40039 -33.8994 0l-22.7002 22.5996
+c-9.40039 9.40039 -9.40039 24.6006 0 33.9004l136 136c9.40039 9.5 24.5996 9.5 34 0.0996094z" />
+ <glyph glyph-name="angle-down" unicode="&#xf107;" horiz-adv-x="319"
+d="M143 95.7002l-136 136c-9.40039 9.39941 -9.40039 24.5996 0 33.8994l22.5996 22.6006c9.40039 9.39941 24.6006 9.39941 33.9004 0l96.4004 -96.4004l96.3994 96.4004c9.40039 9.39941 24.6006 9.39941 33.9004 0l22.5996 -22.6006
+c9.40039 -9.39941 9.40039 -24.5996 0 -33.8994l-136 -136c-9.2002 -9.40039 -24.3994 -9.40039 -33.7998 0z" />
+ <glyph glyph-name="desktop" unicode="&#xf108;" horiz-adv-x="576"
+d="M528 448c26.5 0 48 -21.5 48 -48v-320c0 -26.5 -21.5 -48 -48 -48h-192l16 -48h72c13.2998 0 24 -10.7002 24 -24s-10.7002 -24 -24 -24h-272c-13.2998 0 -24 10.7002 -24 24s10.7002 24 24 24h72l16 48h-192c-26.5 0 -48 21.5 -48 48v320c0 26.5 21.5 48 48 48h480z
+M512 96v288h-448v-288h448z" />
+ <glyph glyph-name="laptop" unicode="&#xf109;" horiz-adv-x="640"
+d="M624 32c8.7998 0 16 -7.2002 16 -16v-16c0 -35.2002 -28.7998 -64 -64 -64h-512c-35.2002 0 -64 28.7998 -64 64v16c0 8.7998 7.2002 16 16 16h239.23c-0.25 -14.5303 14.0791 -32 32.7695 -32h60.7998c18.0303 0 32 12.1904 32.7402 32h242.46zM576 400v-336h-512v336
+c0 26.4004 21.5996 48 48 48h416c26.4004 0 48 -21.5996 48 -48zM512 128v256h-384v-256h384z" />
+ <glyph glyph-name="tablet" unicode="&#xf10a;" horiz-adv-x="448"
+d="M400 448c26.5 0 48 -21.5 48 -48v-416c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48h352zM224 -32c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32z" />
+ <glyph glyph-name="mobile" unicode="&#xf10b;" horiz-adv-x="320"
+d="M272 448c26.5 0 48 -21.5 48 -48v-416c0 -26.5 -21.5 -48 -48 -48h-224c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48h224zM160 -32c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32z" />
+ <glyph glyph-name="quote-left" unicode="&#xf10d;"
+d="M464 192c26.5 0 48 -21.5 48 -48v-128c0 -26.5 -21.5 -48 -48 -48h-128c-26.5 0 -48 21.5 -48 48v240c0 88.4004 71.5996 160 160 160h8c13.2998 0 24 -10.7002 24 -24v-48c0 -13.2998 -10.7002 -24 -24 -24h-8c-35.2998 0 -64 -28.7002 -64 -64v-64h80zM176 192
+c26.5 0 48 -21.5 48 -48v-128c0 -26.5 -21.5 -48 -48 -48h-128c-26.5 0 -48 21.5 -48 48v240c0 88.4004 71.5996 160 160 160h8c13.2998 0 24 -10.7002 24 -24v-48c0 -13.2998 -10.7002 -24 -24 -24h-8c-35.2998 0 -64 -28.7002 -64 -64v-64h80z" />
+ <glyph glyph-name="quote-right" unicode="&#xf10e;"
+d="M464 416c26.5 0 48 -21.5 48 -48v-240c0 -88.4004 -71.5996 -160 -160 -160h-8c-13.2998 0 -24 10.7002 -24 24v48c0 13.2998 10.7002 24 24 24h8c35.2998 0 64 28.7002 64 64v64h-80c-26.5 0 -48 21.5 -48 48v128c0 26.5 21.5 48 48 48h128zM176 416
+c26.5 0 48 -21.5 48 -48v-240c0 -88.4004 -71.5996 -160 -160 -160h-8c-13.2998 0 -24 10.7002 -24 24v48c0 13.2998 10.7002 24 24 24h8c35.2998 0 64 28.7002 64 64v64h-80c-26.5 0 -48 21.5 -48 48v128c0 26.5 21.5 48 48 48h128z" />
+ <glyph glyph-name="spinner" unicode="&#xf110;"
+d="M304 400c0 -26.5098 -21.4902 -48 -48 -48s-48 21.4902 -48 48s21.4902 48 48 48s48 -21.4902 48 -48zM256 32c26.5098 0 48 -21.4902 48 -48s-21.4902 -48 -48 -48s-48 21.4902 -48 48s21.4902 48 48 48zM464 240c26.5098 0 48 -21.4902 48 -48s-21.4902 -48 -48 -48
+s-48 21.4902 -48 48s21.4902 48 48 48zM96 192c0 -26.5098 -21.4902 -48 -48 -48s-48 21.4902 -48 48s21.4902 48 48 48s48 -21.4902 48 -48zM108.922 92.9219c26.5088 0 48 -21.4912 48 -48c0 -26.5098 -21.4902 -48 -48 -48s-48 21.4902 -48 48s21.4902 48 48 48z
+M403.078 92.9219c26.5098 0 48 -21.4912 48 -48c0 -26.5098 -21.4902 -48 -48 -48s-48 21.4902 -48 48s21.4902 48 48 48zM108.922 387.078c26.5088 0 48 -21.4902 48 -48s-21.4902 -48 -48 -48s-48 21.4902 -48 48s21.4902 48 48 48z" />
+ <glyph glyph-name="circle" unicode="&#xf111;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248z" />
+ <glyph glyph-name="smile" unicode="&#xf118;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM328 272c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32s32 14.2998 32 32s-14.2998 32 -32 32zM168 272c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32
+s32 14.2998 32 32s-14.2998 32 -32 32zM362.8 101.8c13.5 16.2998 -11.2002 36.7002 -24.5996 20.5c-22.4004 -26.7998 -55.2002 -42.2002 -90.2002 -42.2002s-67.7998 15.3008 -90.2002 42.2002c-13.5996 16.2002 -38.2002 -4.2002 -24.5996 -20.5
+c28.5 -34.2002 70.2998 -53.7998 114.8 -53.7998s86.2998 19.5996 114.8 53.7998z" />
+ <glyph glyph-name="frown" unicode="&#xf119;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM328 272c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32s32 14.2998 32 32s-14.2998 32 -32 32zM168 272c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32
+s32 14.2998 32 32s-14.2998 32 -32 32zM338.2 53.7998c13.5 -16.2998 38.0996 4.2002 24.5 20.4004c-28.4004 34.2002 -70.2998 53.7998 -114.7 53.7998s-86.2998 -19.5996 -114.8 -53.7002c-13.5 -16.2998 11.0996 -36.7998 24.5996 -20.5
+c22.4004 26.7998 55.2998 42.2002 90.2002 42.2002s67.7998 -15.4004 90.2002 -42.2002z" />
+ <glyph glyph-name="meh" unicode="&#xf11a;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM168 272c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32s32 14.2998 32 32s-14.2998 32 -32 32zM344 80c21.2002 0 21.2002 32 0 32h-192c-21.2002 0 -21.2002 -32 0 -32
+h192zM328 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32z" />
+ <glyph glyph-name="gamepad" unicode="&#xf11b;" horiz-adv-x="639"
+d="M480 352c88.4004 0 159.9 -71.5996 159.9 -160s-71.6006 -160 -160 -160c-44.7002 0 -85.2002 18.4004 -114.2 48h-91.5c-29 -29.5996 -69.4004 -48 -114.2 -48c-88.4004 0 -160 71.5996 -160 160s71.5996 160 160 160h320zM256 172v40c0 6.59961 -5.40039 12 -12 12h-52
+v52c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-52h-52c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h52v-52c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12v52h52c6.59961 0 12 5.40039 12 12zM440 104
+c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48zM520 184c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48z" />
+ <glyph glyph-name="keyboard" unicode="&#xf11c;" horiz-adv-x="576"
+d="M528 0h-480c-26.5098 0 -48 21.4902 -48 48v288c0 26.5098 21.4902 48 48 48h480c26.5098 0 48 -21.4902 48 -48v-288c0 -26.5098 -21.4902 -48 -48 -48zM128 268v40c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40
+c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12zM224 268v40c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12zM320 268v40c0 6.62695 -5.37305 12 -12 12h-40
+c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12zM416 268v40c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12zM512 268v40
+c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12zM176 172v40c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40
+c6.62695 0 12 5.37305 12 12zM272 172v40c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12zM368 172v40c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40
+c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12zM464 172v40c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12zM128 76v40c0 6.62695 -5.37305 12 -12 12h-40
+c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12zM416 76v40c0 6.62695 -5.37305 12 -12 12h-232c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h232c6.62695 0 12 5.37305 12 12zM512 76v40
+c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12z" />
+ <glyph glyph-name="flag-checkered" unicode="&#xf11e;" horiz-adv-x="519"
+d="M243.2 258.1c24.2002 -6.69922 47.5996 -16.3994 73.5996 -22.1992v-68.2002c-24.2998 6.7002 -47.5 16.3994 -73.5996 22.2998v68.0996zM466.5 381.1c21.2002 9.80078 45.5 -5.69922 45.4004 -29v-243.1c0 -10.5996 -5.10059 -20.4004 -13.8008 -26.4004
+c-35.7998 -24.5996 -74.2998 -40.8994 -122.5 -40.8994c-67.3994 0 -111.6 34.7998 -165.199 34.7998c-50.8008 0 -86.1006 -10 -114.4 -22.0996v-94.4004c0 -13.2998 -10.7002 -24 -24 -24h-16c-13.2998 0 -24 10.7002 -24 24v386.1c-14.5 10.1006 -24 26.9004 -24 45.9004
+c0 31.7002 26.2998 57.2998 58.2998 56c28.5 -1.2002 51.7998 -24 53.6006 -52.4004c0.5 -8.39941 -0.800781 -16.2998 -3.60059 -23.5996c20.7002 7.59961 43 12 68 12c67.4004 0 111.7 -34.7998 165.2 -34.7998c40.5 0 82.7002 16 117 31.8994zM169.6 122.5v71.2998
+c-26.0996 -2.39941 -47.3994 -8.09961 -73.5996 -17.3994v-70.5c23.5996 8.39941 47.7998 13.8994 73.5996 16.5996zM464 257v70.5c-21.2998 -8.90039 -46.5996 -17.7002 -73.5996 -22.5v-71.9004c-26 -4.19922 -49.9004 -2.59961 -73.6006 2.7002v68.4004
+c-26.3994 4.59961 -49.8994 13.8994 -73.5996 21.2998v-67.4004c-25.2002 7 -46.6006 9.40039 -73.6006 5.7002v71.6006c-23.5 -2.2002 -40.3994 -9.80078 -73.5996 -22v-70.5c29 10.6992 51.2002 17.7998 73.5996 20.8994v-70c32.8008 3 53.9004 0.600586 73.6006 -3.7998
+v-68.5c26.2998 -4.59961 49.7002 -13.9004 73.5996 -21.2998v67.3994c25.7002 -7.09961 46.6006 -9.2998 73.6006 -5.59961v-71.5996c25.0996 2.39941 48.5 11 73.5996 27.0996v70.5c-22.2002 -14.2002 -48.7998 -22.5996 -73.5996 -26v71.0996
+c27.2998 4.40039 50 14.1006 73.5996 23.9004z" />
+ <glyph glyph-name="terminal" unicode="&#xf120;" horiz-adv-x="640"
+d="M257.981 175.029l-194.344 -194.344c-9.37305 -9.37207 -24.5684 -9.37207 -33.9404 0l-22.668 22.667c-9.35742 9.35742 -9.375 24.5225 -0.0400391 33.9014l154.021 154.746l-154.021 154.745c-9.33496 9.37891 -9.31738 24.5439 0.0400391 33.9014l22.667 22.667
+c9.37305 9.37207 24.5684 9.37207 33.9404 0l194.344 -194.344c9.37207 -9.37207 9.37207 -24.5674 0 -33.9404zM640 -8c0 -13.2549 -10.7451 -24 -24 -24h-304c-13.2549 0 -24 10.7451 -24 24v32c0 13.2549 10.7451 24 24 24h304c13.2549 0 24 -10.7451 24 -24v-32z" />
+ <glyph glyph-name="code" unicode="&#xf121;" horiz-adv-x="640"
+d="M278.9 -63.5l-61 17.7002c-6.40039 1.7998 -10 8.5 -8.2002 14.8994l136.5 470.2c1.7998 6.40039 8.5 10 14.8994 8.2002l61 -17.7002c6.40039 -1.7998 10 -8.5 8.2002 -14.8994l-136.5 -470.2c-1.89941 -6.40039 -8.5 -10.1006 -14.8994 -8.2002zM164.9 48.7002
+c-4.5 -4.90039 -12.1006 -5.10059 -17 -0.5l-144.101 135.1c-5.09961 4.7002 -5.09961 12.7998 0 17.5l144.101 135c4.89941 4.60059 12.5 4.2998 17 -0.5l43.5 -46.3994c4.69922 -4.90039 4.2998 -12.7002 -0.800781 -17.2002l-90.5996 -79.7002l90.5996 -79.7002
+c5.10059 -4.5 5.40039 -12.2998 0.800781 -17.2002zM492.1 48.0996c-4.89941 -4.5 -12.5 -4.2998 -17 0.600586l-43.5 46.3994c-4.69922 4.90039 -4.2998 12.7002 0.800781 17.2002l90.5996 79.7002l-90.5996 79.7998c-5.10059 4.5 -5.40039 12.2998 -0.800781 17.2002
+l43.5 46.4004c4.60059 4.7998 12.2002 5 17 0.5l144.101 -135.2c5.09961 -4.7002 5.09961 -12.7998 0 -17.5z" />
+ <glyph glyph-name="reply-all" unicode="&#xf122;" horiz-adv-x="576"
+d="M136.309 258.164l176.005 151.985c15.4062 13.3047 39.6865 2.50293 39.6865 -18.1641v-82.7637c129.182 -10.2305 224 -52.2119 224 -183.548c0 -61.4404 -39.582 -122.309 -83.333 -154.132c-13.6533 -9.93066 -33.1113 2.5332 -28.0771 18.6309
+c38.5117 123.162 -3.92188 169.482 -112.59 182.016v-84.1758c0 -20.7012 -24.2998 -31.4531 -39.6865 -18.1641l-176.005 151.987c-11.0703 9.56152 -11.0859 26.7529 0 36.3281zM8.30859 221.836c-11.0703 9.56152 -11.0859 26.7529 0 36.3281l176.005 151.985
+c15.4062 13.3047 39.6865 2.50293 39.6865 -18.1641v-15.8174l-108.607 -93.7861c-10.7041 -9.23926 -19.3926 -28.2158 -19.3926 -42.3564v-0.0234375v-0.0244141c0 -14.1416 8.68848 -33.1191 19.3936 -42.3604l108.606 -93.7852v-15.8184
+c0 -20.7002 -24.2998 -31.4531 -39.6865 -18.1641z" />
+ <glyph glyph-name="location-arrow" unicode="&#xf124;"
+d="M444.52 444.48c38.3809 16 79.9609 -25.5801 63.9707 -63.9707l-191.9 -415.779c-22.3896 -47.9805 -92.75 -31.9805 -92.75 19.1895v175.91h-175.91c-51.1699 0 -67.1602 70.3604 -19.1895 92.75z" />
+ <glyph glyph-name="crop" unicode="&#xf125;"
+d="M488 96c13.25 0 24 -10.7402 24 -24v-48c0 -13.25 -10.75 -24 -24 -24h-40v-40c0 -13.25 -10.75 -24 -24 -24h-48c-13.25 0 -24 10.75 -24 24v282.75l-146.75 -146.75h114.75v-96h-232c-13.25 0 -24 10.75 -24 24v264h-40c-13.25 0 -24 10.75 -24 24v48
+c0 13.2598 10.75 24 24 24h40v40c0 13.2598 10.75 24 24 24h48c13.25 0 24 -10.7402 24 -24v-282.75l146.75 146.75h-114.75v96h210.75l59.3096 59.3096c6.25 6.25 16.3809 6.25 22.6309 0l22.6191 -22.6191c6.25 -6.25 6.25 -16.3809 0 -22.6309l-59.3096 -59.3096v-242.75
+h40z" />
+ <glyph glyph-name="code-branch" unicode="&#xf126;" horiz-adv-x="384"
+d="M384 304c0 -35.2002 -22.7998 -65.0996 -54.4004 -75.9004c-0.5 -28.0996 -7.59961 -50.5 -21.5996 -67.8994c-28.2002 -35 -76 -39.5 -118.2 -43.4004c-25.7002 -2.39941 -49.8994 -4.59961 -66.0996 -12.7998c-7.10059 -3.59961 -11.7998 -8.2002 -14.9004 -13.4004
+c30 -11.5 51.2002 -40.5996 51.2002 -74.5996c0 -44.2002 -35.7998 -80 -80 -80s-80 35.7998 -80 80c0 35.7998 23.5 66.0996 56 76.4004v199.3c-32.5 10.2002 -56 40.5 -56 76.2998c0 44.2002 35.7998 80 80 80s80 -35.7998 80 -80c0 -35.7998 -23.5 -66.0996 -56 -76.2998
+v-144c23.9004 11.5 53.0996 14.2998 81.2998 16.8994c35.9004 3.30078 69.7998 6.5 85.2002 25.7002c6.7998 8.40039 10.4004 20.7998 11 36.9004c-33.2002 9.7002 -57.5 40.3994 -57.5 76.7998c0 44.2002 35.7998 80 80 80s80 -35.7998 80 -80zM80 384
+c-8.7998 0 -16 -7.2002 -16 -16s7.2002 -16 16 -16s16 7.2002 16 16s-7.2002 16 -16 16zM80 0c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM304 320c-8.7998 0 -16 -7.2002 -16 -16s7.2002 -16 16 -16s16 7.2002 16 16
+s-7.2002 16 -16 16z" />
+ <glyph glyph-name="unlink" unicode="&#xf127;"
+d="M304.083 42.0928c4.68555 -4.68555 4.68555 -12.2842 0 -16.9707l-44.6738 -44.6738c-59.2627 -59.2627 -155.693 -59.2666 -214.961 0c-59.2646 59.2646 -59.2646 155.695 0 214.96l44.6748 44.6748c4.68555 4.68555 12.2842 4.68555 16.9707 0l39.5986 -39.5977
+c4.68555 -4.68652 4.68555 -12.2842 0 -16.9717l-44.6758 -44.6738c-28.0713 -28.0732 -28.0713 -73.75 0 -101.823c28.0723 -28.0713 73.75 -28.0723 101.824 0l44.6738 44.6748c4.68652 4.68555 12.2842 4.68555 16.9717 0zM247.515 302.309l-39.5967 39.5986
+c-4.68555 4.68652 -4.68555 12.2852 0 16.9707l44.6738 44.6738c59.2666 59.2646 155.695 59.2646 214.961 0s59.2656 -155.694 0 -214.96l-44.6748 -44.6748c-4.68652 -4.68555 -12.2852 -4.68555 -16.9707 0l-39.5986 39.5977c-4.68555 4.6875 -4.68555 12.2852 0 16.9717
+l44.6758 44.6738c28.0713 28.0732 28.0713 73.75 0 101.823c-28.0742 28.0723 -73.752 28.0742 -101.824 0l-44.6738 -44.6748c-4.6875 -4.68555 -12.2852 -4.68555 -16.9717 0zM482.343 -56.9707c-9.37207 -9.37207 -24.5674 -9.37207 -33.9404 0l-441.373 441.373
+c-9.37305 9.37207 -9.37305 24.5674 0 33.9404l22.6279 22.6279c9.37207 9.37305 24.5674 9.37305 33.9404 0l441.372 -441.374c9.37305 -9.37207 9.37305 -24.5674 0 -33.9404z" />
+ <glyph glyph-name="question" unicode="&#xf128;" horiz-adv-x="403"
+d="M202.021 448c84.8809 0 175.482 -66.2559 175.481 -153.6c0 -115.982 -125.268 -117.768 -125.268 -160.627v-5.77344c0 -13.2549 -10.7451 -24 -24 -24h-72.4717c-13.2549 0 -24 10.7451 -24 24v9.78809c0 61.8291 46.876 86.5449 82.2998 106.405
+c30.376 17.0293 48.9922 28.6113 48.9922 51.1641c0 29.832 -38.0518 49.6309 -68.8154 49.6309c-39.127 0 -57.708 -18.0684 -82.7568 -49.4492c-8.12109 -10.1738 -22.8809 -12.0127 -33.2529 -4.14844l-43.1387 32.709c-10.2705 7.78809 -12.541 22.2939 -5.17773 32.874
+c40.5889 58.3232 92.2881 91.0264 172.107 91.0264zM192 74.541c38.1963 0 69.2715 -31.0742 69.2715 -69.2695c0 -38.1963 -31.0752 -69.2715 -69.2715 -69.2715s-69.2715 31.0752 -69.2715 69.2695c0 38.1963 31.0752 69.2715 69.2715 69.2715z" />
+ <glyph glyph-name="info" unicode="&#xf129;" horiz-adv-x="192"
+d="M20 23.7715h20v144.457h-20c-11.0459 0 -20 8.9541 -20 20v47.7715c0 11.0459 8.9541 20 20 20h112c11.0459 0 20 -8.9541 20 -20v-212.229h20c11.0459 0 20 -8.9541 20 -20v-47.7715c0 -11.0459 -8.9541 -20 -20 -20h-152c-11.0459 0 -20 8.9541 -20 20v47.7715
+c0 11.0459 8.9541 20 20 20zM96 448c39.7637 0 72 -32.2354 72 -72s-32.2354 -72 -72 -72s-72 32.2354 -72 72s32.2354 72 72 72z" />
+ <glyph glyph-name="exclamation" unicode="&#xf12a;" horiz-adv-x="192"
+d="M176 16c0 -44.1123 -35.8877 -80 -80 -80s-80 35.8877 -80 80s35.8877 80 80 80s80 -35.8877 80 -80zM25.2598 422.801c-0.68457 13.709 10.2441 25.1992 23.9707 25.1992h93.5391c13.7266 0 24.6553 -11.4902 23.9707 -25.1992l-13.6006 -272
+c-0.638672 -12.7725 -11.1807 -22.8008 -23.9697 -22.8008h-66.3398c-12.7891 0 -23.3311 10.0283 -23.9697 22.8008z" />
+ <glyph glyph-name="superscript" unicode="&#xf12b;"
+d="M272 256c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-28l-52.5996 -75.7002l58.5996 -84.2998h22c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-62.4004c-5.5 0 -10.5996 2.7998 -13.5 7.5l-45.6992 72.5996
+c-2.30078 3.30078 -4.40039 7 -6.2002 10.2002c-1.7002 -3.2998 -3.7002 -7 -5.90039 -10.5996l-44.7002 -72.1006c-2.89941 -4.69922 -8.09961 -7.59961 -13.5996 -7.59961h-64c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h26.2998l56.6006 82.7002
+l-52.8008 77.2998h-30.0996c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h68.9004c5.5 0 10.6992 -2.90039 13.5996 -7.59961l39.9004 -65.1006c2 -3.59961 4 -7.2002 5.69922 -10.3994c1.7002 3.19922 3.90039 6.89941 6.2002 10.5l40.2998 65
+c2.90039 4.69922 8.10059 7.59961 13.6006 7.59961h67.7998zM496 192c8.7998 0 16 -7.2002 16 -15.9004v-32c0 -8.7998 -7.2002 -16 -16 -16h-168.1c-8.10059 0 -14.8008 5.90039 -15.9004 13.9004c-0.799805 6.2998 -1.40039 12.5996 -1.40039 19.2998
+c0 103 119.4 123.8 119.4 160c0 11 -6.90039 23.9004 -26.2998 23.9004c-12.9004 0 -23.7002 -7.7998 -31.7002 -18.2002c-5.09961 -6.7002 -14.5996 -8.2998 -21.5996 -3.59961l-30.3008 20.2998c-7.19922 4.89941 -9.2998 14.5996 -4.59961 21.8994
+c18 28.4004 51.9004 50.4004 94.4004 50.4004c47.0996 0 97.7998 -27.5 97.7998 -88.0996c0 -24.4004 -8.40039 -45.5 -25.9004 -64.6006c-14.7998 -16.2998 -33.7002 -28.2998 -50.2998 -38.8994c-17.5 -11.1006 -32.7998 -21.9004 -36.2998 -32.4004h100.8z" />
+ <glyph glyph-name="subscript" unicode="&#xf12c;"
+d="M272 416c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-28l-52.5996 -75.7002l58.5996 -84.2998h22c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-62.4004c-5.5 0 -10.5996 2.7998 -13.5 7.5l-45.6992 72.5996
+c-2.30078 3.30078 -4.40039 7 -6.2002 10.2002c-1.7002 -3.2998 -3.7002 -7 -5.90039 -10.5996l-44.7002 -72.1006c-2.89941 -4.69922 -8.09961 -7.59961 -13.5996 -7.59961h-64c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h26.2998l56.6006 82.7002
+l-52.8008 77.2998h-30.0996c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h68.9004c5.5 0 10.6992 -2.90039 13.5996 -7.59961l39.9004 -65.1006c2 -3.59961 4 -7.2002 5.69922 -10.3994c1.7002 3.19922 3.90039 6.89941 6.2002 10.5l40.2998 65
+c2.90039 4.69922 8.10059 7.59961 13.6006 7.59961h67.7998zM496 32c8.7998 0 16 -7.2002 16 -15.9004v-32c0 -8.7998 -7.2002 -16 -16 -16h-168.1c-8.10059 0 -14.8008 5.90039 -15.9004 13.9004c-0.799805 6.2998 -1.40039 12.5996 -1.40039 19.2998
+c0 103 119.4 123.8 119.4 160c0 11 -6.90039 23.9004 -26.2998 23.9004c-12.9004 0 -23.7002 -7.7998 -31.7002 -18.2002c-5.09961 -6.7002 -14.5996 -8.2998 -21.5996 -3.59961l-30.3008 20.2998c-7.19922 4.89941 -9.2998 14.5996 -4.59961 21.8994
+c18 28.4004 51.9004 50.4004 94.4004 50.4004c47.0996 0 97.7998 -27.5 97.7998 -88.0996c0 -24.4004 -8.40039 -45.5 -25.9004 -64.6006c-14.7998 -16.2998 -33.7002 -28.2998 -50.2998 -38.8994c-17.5 -11.1006 -32.7998 -21.9004 -36.2998 -32.4004h100.8z" />
+ <glyph glyph-name="eraser" unicode="&#xf12d;"
+d="M497.941 174.059l-142.059 -142.059h144.117c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-356c-10.9756 0 -26.1816 6.29883 -33.9424 14.0586l-96 96c-18.7441 18.7451 -18.7441 49.1377 0 67.8828l256 256
+c18.7471 18.7451 49.1387 18.7441 67.8838 0l160 -160c18.7441 -18.7451 18.7441 -49.1377 0 -67.8828zM195.314 236.686l-124.687 -124.686l80 -80h114.745l67.3135 67.3135z" />
+ <glyph glyph-name="puzzle-piece" unicode="&#xf12e;" horiz-adv-x="576"
+d="M519.442 159.349c37.5957 0 56.5576 -31.5928 56.5576 -65.792c0 -33.5469 -19.2881 -61.5566 -54.9229 -61.5557c-39.8848 0 -50.3457 36.1523 -86.3086 36.1523c-60.5518 0 -25.8262 -120.102 -25.8262 -120.102c-51.5557 0 -181.23 -35.0732 -181.23 25.7305
+c0 35.8271 36.2881 46.25 36.2881 85.9844c0 35.501 -28.1152 54.7178 -61.7881 54.7178c-34.3271 0 -63.5771 -18.8906 -63.5771 -56.3467c0 -41.3633 40 -58.998 40 -81.4707c0 -69.709 -178.635 -28.6621 -178.635 -28.6621v333.237s175.885 -40.9609 175.884 28.6621
+c0 22.4727 -31.7109 40.3857 -31.7109 81.75c0 37.4551 31.7119 56.3457 66.3662 56.3457c33.3457 0 61.4609 -19.2158 61.4609 -54.7178c0 -39.7354 -36.2881 -50.1582 -36.2881 -85.9854c0 -83.2969 196.288 -3.29688 196.288 -3.29688
+s-54.5908 -176.244 5.38379 -176.244c22.5586 0 40.5391 31.5928 82.0586 31.5928z" />
+ <glyph glyph-name="microphone" unicode="&#xf130;" horiz-adv-x="352"
+d="M176 96c-53.0195 0 -96 42.9805 -96 96v160c0 53.0195 42.9805 96 96 96s96 -42.9805 96 -96v-160c0 -53.0195 -42.9805 -96 -96 -96zM336 256c8.83984 0 16 -7.16016 16 -16v-48c0 -88.9004 -66.29 -162.47 -152 -174.23v-33.7695h56c8.83984 0 16 -7.16016 16 -16v-16
+c0 -8.83984 -7.16016 -16 -16 -16h-160c-8.83984 0 -16 7.16016 -16 16v16c0 8.83984 7.16016 16 16 16h56v34.1504c-88.0303 12.1396 -152 92.0498 -152 181.689v40.1602c0 8.83984 7.16016 16 16 16h16c8.83984 0 16 -7.16016 16 -16v-42.2998
+c0 -66.8105 48.71 -126.59 115.21 -133.08c76.2998 -7.44043 140.79 52.5801 140.79 127.38v48c0 8.83984 7.16016 16 16 16h16z" />
+ <glyph glyph-name="microphone-slash" unicode="&#xf131;" horiz-adv-x="640"
+d="M633.82 -10.0996c6.97949 -5.43066 8.22949 -15.4805 2.81934 -22.4502l-19.6396 -25.2705c-5.42969 -6.97949 -15.4805 -8.23926 -22.46 -2.80957l-588.36 454.729c-6.97949 5.43066 -8.22949 15.4805 -2.80957 22.4502l19.6396 25.2705
+c5.41992 6.97949 15.4805 8.22949 22.46 2.80957l178.54 -137.99v45.3604c0 53.0195 42.9805 96 96 96c53.0205 0 96 -42.9805 96 -96v-160.01c0 -10.4502 -2.17969 -20.2705 -5.2793 -29.6699l26.5498 -20.5205c6.75977 15.4004 10.7197 32.2803 10.7197 50.2002v48
+c0 8.83984 7.16016 16 16 16h16c8.83984 0 16 -7.16016 16 -16v-48c0 -29.0098 -7.38965 -56.1299 -19.9805 -80.1396zM400 -16c8.83984 0 16 -7.16016 16 -16v-16c0 -8.83984 -7.16016 -16 -16 -16h-160c-8.83984 0 -16 7.16016 -16 16v16c0 8.83984 7.16016 16 16 16h56
+v34.1504c-88.0303 12.1396 -152 92.0498 -152 181.689v6.85059l52.0303 -40.2305c12.4395 -53.2197 55.3301 -96.4004 111.18 -101.85c6.94043 -0.669922 13.6396 -0.200195 20.3496 0.199219l50.1104 -38.7295c-10.8203 -3.77051 -22.0098 -6.70996 -33.6699 -8.31055
+v-33.7695h56z" />
+ <glyph glyph-name="calendar" unicode="&#xf133;" horiz-adv-x="448"
+d="M12 256h424c6.59961 0 12 -5.40039 12 -12v-260c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v260c0 6.59961 5.40039 12 12 12zM448 300c0 -6.59961 -5.40039 -12 -12 -12h-424c-6.59961 0 -12 5.40039 -12 12v36c0 26.5 21.5 48 48 48h48v52
+c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h128v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h48c26.5 0 48 -21.5 48 -48v-36z" />
+ <glyph glyph-name="fire-extinguisher" unicode="&#xf134;" horiz-adv-x="448"
+d="M434.027 421.671c7.31445 1.21875 13.9727 -4.4209 13.9727 -11.8369v-115.668c0 -7.41602 -6.6582 -13.0557 -13.9727 -11.8369l-168 28c-11.7305 1.95508 -10.0273 14.6973 -10.0273 17.6709h-40v-27.0303c41.4043 -10.6582 72 -48.2383 72 -92.9697v-248
+c0 -13.2549 -10.7451 -24 -24 -24h-144c-13.2549 0 -24 10.7451 -24 24v246.795c0 44.8945 30.457 83.2666 72 94.1289v27.0762c-61.0361 0 -92.9424 7.00977 -121.711 -64.9141c-4.91699 -12.2949 -18.8789 -18.2959 -31.1963 -13.3701
+c-12.3066 4.92285 -18.293 18.8906 -13.3701 31.1973c14.668 36.6709 38.0107 77.833 90.0498 90.8838c-14.1406 36.5273 12.793 76.2031 52.2275 76.2031c37.4463 0 64.3525 -36.1084 53.668 -72h58.332c0 4.2002 -1.30664 15.7822 10.0273 17.6709zM144 376
+c8.82227 0 16 7.17773 16 16s-7.17773 16 -16 16s-16 -7.17773 -16 -16s7.17773 -16 16 -16z" />
+ <glyph glyph-name="rocket" unicode="&#xf135;" horiz-adv-x="511"
+d="M505.05 428.9c6.9502 -32.2002 6.9502 -57.4004 6.85059 -82.6006c0 -102.689 -55.4102 -164.79 -128 -211.09v-104.41v-0.0400391c0 -16.3516 -11.8721 -35.5527 -26.5 -42.8594l-98.7002 -49.3906c-2.79004 -1.38965 -7.58398 -2.5166 -10.7002 -2.5166
+c-13.248 0 -24 10.752 -24 24v0.00683594v103.84l-22.4697 -22.4697c-5.17383 -5.1748 -15.3125 -9.375 -22.6299 -9.375c-7.31836 0 -17.4561 4.2002 -22.6309 9.375l-50.8994 50.9102c-5.17285 5.17285 -9.37012 15.3096 -9.37012 22.625s4.19727 17.4512 9.37012 22.625
+l22.4697 22.4697h-103.77h-0.0126953c-13.248 0 -24 10.752 -24 24c0 3.12012 1.12988 7.91797 2.52246 10.71l49.4199 98.7998c7.32324 14.6094 26.5283 26.4766 42.8701 26.4902h104.2c46.1895 72.7998 108.09 128 211.29 128c25.0996 0 50.29 0 82.4893 -6.90039
+c5.54395 -1.19043 11.0098 -6.65527 12.2002 -12.1992zM384 280c22.0801 0 40 17.9199 40 40s-17.9199 40 -40 40s-40 -17.9199 -40 -40s17.9199 -40 40 -40z" />
+ <glyph glyph-name="chevron-circle-left" unicode="&#xf137;"
+d="M256 -56c-137 0 -248 111 -248 248s111 248 248 248s248 -111 248 -248s-111 -248 -248 -248zM142.1 175l135.5 -135.5c9.40039 -9.40039 24.6006 -9.40039 33.9004 0l17 17c9.40039 9.40039 9.40039 24.5996 0 33.9004l-101.6 101.6l101.6 101.6
+c9.40039 9.40039 9.40039 24.6006 0 33.9004l-17 17c-9.40039 9.40039 -24.5996 9.40039 -33.9004 0l-135.5 -135.5c-9.39941 -9.40039 -9.39941 -24.5996 0 -34z" />
+ <glyph glyph-name="chevron-circle-right" unicode="&#xf138;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM369.9 209l-135.5 135.5c-9.40039 9.40039 -24.6006 9.40039 -33.9004 0l-17 -17c-9.40039 -9.40039 -9.40039 -24.5996 0 -33.9004l101.6 -101.6l-101.6 -101.6
+c-9.40039 -9.40039 -9.40039 -24.6006 0 -33.9004l17 -17c9.40039 -9.40039 24.5996 -9.40039 33.9004 0l135.5 135.5c9.39941 9.40039 9.39941 24.5996 0 34z" />
+ <glyph glyph-name="chevron-circle-up" unicode="&#xf139;"
+d="M8 192c0 137 111 248 248 248s248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248zM239 305.9l-135.5 -135.5c-9.40039 -9.40039 -9.40039 -24.6006 0 -33.9004l17 -17c9.40039 -9.40039 24.5996 -9.40039 33.9004 0l101.6 101.6l101.6 -101.6
+c9.40039 -9.40039 24.6006 -9.40039 33.9004 0l17 17c9.40039 9.40039 9.40039 24.5996 0 33.9004l-135.5 135.5c-9.40039 9.39941 -24.5996 9.39941 -34 0z" />
+ <glyph glyph-name="chevron-circle-down" unicode="&#xf13a;"
+d="M504 192c0 -137 -111 -248 -248 -248s-248 111 -248 248s111 248 248 248s248 -111 248 -248zM273 78.0996l135.5 135.5c9.40039 9.40039 9.40039 24.6006 0 33.9004l-17 17c-9.40039 9.40039 -24.5996 9.40039 -33.9004 0l-101.6 -101.6l-101.6 101.6
+c-9.40039 9.40039 -24.6006 9.40039 -33.9004 0l-17 -17c-9.40039 -9.40039 -9.40039 -24.5996 0 -33.9004l135.5 -135.5c9.40039 -9.39941 24.5996 -9.39941 34 0z" />
+ <glyph glyph-name="anchor" unicode="&#xf13d;" horiz-adv-x="575"
+d="M12.9707 96c-10.6904 0 -16.0449 12.9258 -8.48535 20.4854l67.0283 67.0283c4.6875 4.68652 12.2852 4.68652 16.9717 0l67.0283 -67.0283c7.56055 -7.55957 2.20605 -20.4854 -8.48438 -20.4854h-35.1465c20.2969 -54.3359 85.1816 -86.6162 144.117 -94.0146v190.015
+h-52c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h52v5.46973c-37.2842 13.1807 -64 48.7324 -64 90.5303c0 53.4746 43.7227 96.7393 97.3701 95.9902c52.2354 -0.728516 94.6348 -43.7627 94.6289 -96.002
+c-0.00488281 -41.793 -26.7188 -77.3398 -64 -90.5186v-5.46973h52c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-52v-190.015c59.1543 7.42676 123.827 39.6973 144.117 94.0146h-35.1465c-10.6904 0 -16.0449 12.9248 -8.48438 20.4854
+l67.0283 67.0283c4.6875 4.68652 12.2852 4.68652 16.9717 0l67.0283 -67.0283c7.56055 -7.55957 2.20605 -20.4854 -8.48438 -20.4854h-32.3945c-21.7822 -102.62 -136.406 -160 -242.635 -160c-106.056 0 -220.828 57.2646 -242.635 160h-32.3945zM288 384
+c-17.6445 0 -32 -14.3555 -32 -32s14.3555 -32 32 -32s32 14.3555 32 32s-14.3555 32 -32 32z" />
+ <glyph glyph-name="unlock-alt" unicode="&#xf13e;" horiz-adv-x="448"
+d="M400 192c26.5 0 48 -21.5 48 -48v-160c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v160c0 26.5 21.5 48 48 48h24v102.5c0 84 67.5 153.2 151.5 153.5s152.5 -68 152.5 -152v-16c0 -13.2998 -10.7002 -24 -24 -24h-32c-13.2998 0 -24 10.7002 -24 24v16
+c0 39.9004 -32.7002 72.4004 -72.7002 72c-39.5996 -0.400391 -71.2998 -33.2998 -71.2998 -72.9004v-103.1h248zM264 40v48c0 22.0996 -17.9004 40 -40 40s-40 -17.9004 -40 -40v-48c0 -22.0996 17.9004 -40 40 -40s40 17.9004 40 40z" />
+ <glyph glyph-name="bullseye" unicode="&#xf140;" horiz-adv-x="496"
+d="M248 440c136.97 0 248 -111.03 248 -248s-111.03 -248 -248 -248s-248 111.03 -248 248s111.03 248 248 248zM248 8c101.71 0 184 82.3096 184 184c0 101.71 -82.3096 184 -184 184c-101.71 0 -184 -82.3096 -184 -184c0 -101.71 82.3096 -184 184 -184zM248 320
+c70.6904 0 128 -57.3096 128 -128s-57.3096 -128 -128 -128s-128 57.3096 -128 128s57.3096 128 128 128zM248 128c35.29 0 64 28.71 64 64s-28.71 64 -64 64s-64 -28.71 -64 -64s28.71 -64 64 -64z" />
+ <glyph glyph-name="ellipsis-h" unicode="&#xf141;"
+d="M328 192c0 -39.7998 -32.2002 -72 -72 -72s-72 32.2002 -72 72s32.2002 72 72 72s72 -32.2002 72 -72zM432 264c39.7998 0 72 -32.2002 72 -72s-32.2002 -72 -72 -72s-72 32.2002 -72 72s32.2002 72 72 72zM80 264c39.7998 0 72 -32.2002 72 -72s-32.2002 -72 -72 -72
+s-72 32.2002 -72 72s32.2002 72 72 72z" />
+ <glyph glyph-name="ellipsis-v" unicode="&#xf142;" horiz-adv-x="192"
+d="M96 264c39.7998 0 72 -32.2002 72 -72s-32.2002 -72 -72 -72s-72 32.2002 -72 72s32.2002 72 72 72zM24 368c0 39.7998 32.2002 72 72 72s72 -32.2002 72 -72s-32.2002 -72 -72 -72s-72 32.2002 -72 72zM24 16c0 39.7998 32.2002 72 72 72s72 -32.2002 72 -72
+s-32.2002 -72 -72 -72s-72 32.2002 -72 72z" />
+ <glyph glyph-name="rss-square" unicode="&#xf143;" horiz-adv-x="448"
+d="M400 416c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h352zM112 32c26.5098 0 48 21.4902 48 48s-21.4902 48 -48 48s-48 -21.4902 -48 -48s21.4902 -48 48 -48zM269.533 32
+c6.53516 0 11.7764 5.46777 11.4248 11.9941c-5.9668 110.428 -94.418 198.99 -204.964 204.964c-6.52637 0.351562 -11.9941 -4.88965 -11.9941 -11.4248v-34.335c0 -6.00977 4.63574 -11.0508 10.6328 -11.4414c79.8799 -5.20312 143.909 -69.0732 149.123 -149.123
+c0.391602 -5.99805 5.43066 -10.6338 11.4424 -10.6338h34.335zM372.56 32c6.4541 0 11.6641 5.33789 11.4326 11.7871c-5.99512 167.014 -140.375 302.18 -308.205 308.205c-6.44922 0.231445 -11.7871 -4.97852 -11.7871 -11.4326v-34.334
+c0 -6.16016 4.88184 -11.1748 11.0391 -11.4277c136.556 -5.59863 246.162 -115.225 251.76 -251.76c0.251953 -6.15625 5.2666 -11.0381 11.4268 -11.0381h34.334z" />
+ <glyph glyph-name="play-circle" unicode="&#xf144;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM371.7 168c16.3994 9.09961 16.3994 32.7998 0 42l-176 107c-15.9004 8.7998 -35.7002 -2.59961 -35.7002 -21v-208c0 -18.5 19.9004 -29.7998 35.7002 -21z" />
+ <glyph glyph-name="minus-square" unicode="&#xf146;" horiz-adv-x="448"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM92 152h264c6.59961 0 12 5.40039 12 12v56c0 6.59961 -5.40039 12 -12 12h-264c-6.59961 0 -12 -5.40039 -12 -12v-56
+c0 -6.59961 5.40039 -12 12 -12z" />
+ <glyph glyph-name="check-square" unicode="&#xf14a;" horiz-adv-x="448"
+d="M400 -32h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h352c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48zM195.314 66.0586l184 184c6.24707 6.24805 6.24707 16.3799 0 22.627l-22.6279 22.6279
+c-6.24707 6.24707 -16.3789 6.24805 -22.6279 0l-150.059 -150.059l-70.0586 70.0596c-6.24805 6.24707 -16.3799 6.24707 -22.6279 0l-22.6279 -22.6279c-6.24707 -6.24707 -6.24707 -16.3789 0 -22.627l104 -104c6.24902 -6.25 16.3799 -6.25 22.6289 -0.000976562z" />
+ <glyph glyph-name="pen-square" unicode="&#xf14b;" horiz-adv-x="448"
+d="M400 -32h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48zM238.1 270.1l-135.699 -135.699l-6.30078 -57.1006c-0.799805 -7.59961 5.60059 -14.0996 13.3008 -13.2998l57.0996 6.2998l135.7 135.7
+c2.2998 2.2998 2.2998 6.09961 0 8.5l-55.5 55.5c-2.5 2.40039 -6.2998 2.40039 -8.60059 0.0996094zM345 282.9l-30.0996 30.0996c-9.40039 9.40039 -24.6006 9.40039 -33.9004 0l-23.0996 -23.0996c-2.30078 -2.30078 -2.30078 -6.10059 0 -8.5l55.5 -55.5
+c2.2998 -2.30078 6.09961 -2.30078 8.5 0l23.0996 23.0996c9.2998 9.2998 9.2998 24.5 0 33.9004z" />
+ <glyph glyph-name="share-square" unicode="&#xf14d;" horiz-adv-x="576"
+d="M568.482 270.552l-144.004 -135.984c-15.1787 -14.335 -40.4785 -3.70703 -40.4785 17.4473v71.9629c-144.575 -0.969727 -205.566 -35.1123 -164.775 -171.353c4.4834 -14.9727 -12.8457 -26.5674 -25.0059 -17.3301
+c-38.9668 29.5996 -74.2188 86.2168 -74.2188 143.366c0 143.937 117.599 172.5 264 173.312v72.0156c0 21.1738 25.3174 31.7676 40.4785 17.4473l144.004 -135.987c10.0195 -9.46289 10.0273 -25.4248 0 -34.8965zM384 68.8721c0 7.34473 6.53027 12.9053 13.7998 11.8594
+c2.81152 -0.405273 7.39844 -0.734375 10.2393 -0.734375c6.80469 0 17.5342 1.8418 23.9502 4.11133c7.81348 2.76367 16.0107 -3.01465 16.0107 -11.3027v-88.8057c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48
+h121.033c12.5508 0 16.6748 -16.8301 5.54492 -22.6309c-18.7773 -9.78613 -36.0615 -22.1084 -51.0137 -37.6758c-1.95312 -2.03711 -5.82715 -3.69141 -8.64844 -3.69336h-50.916v-320h320v68.8721z" />
+ <glyph glyph-name="compass" unicode="&#xf14e;" horiz-adv-x="496"
+d="M225.38 214.63c12.4902 12.4902 32.75 12.4902 45.25 0s12.5 -32.75 0 -45.25c-12.4902 -12.5 -32.7598 -12.5 -45.25 0c-12.5 12.4902 -12.5 32.75 0 45.25zM248 440c136.97 0 248 -111.03 248 -248s-111.03 -248 -248 -248s-248 111.03 -248 248s111.03 248 248 248z
+M374.14 291.95c7.61035 16.6494 -9.54004 33.7998 -26.1895 26.2002l-144.34 -65.9707c-5.97461 -2.73047 -13.04 -9.7959 -15.7705 -15.7695l-65.9795 -144.351c-7.61035 -16.6494 9.5498 -33.8096 26.1992 -26.1992l144.341 65.9697
+c5.97363 2.73047 13.0391 9.7959 15.7695 15.7695z" />
+ <glyph glyph-name="caret-square-down" unicode="&#xf150;" horiz-adv-x="448"
+d="M448 368v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48zM92.5 227.5l123 -123c4.7002 -4.7002 12.2998 -4.7002 17 0l123 123c7.59961 7.59961 2.2002 20.5 -8.5 20.5h-246
+c-10.7002 0 -16.0996 -12.9004 -8.5 -20.5z" />
+ <glyph glyph-name="caret-square-up" unicode="&#xf151;" horiz-adv-x="448"
+d="M0 16v352c0 26.5098 21.4902 48 48 48h352c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48zM355.515 156.485l-123.029 123.029c-4.68652 4.68652 -12.2842 4.68652 -16.9717 0l-123.028 -123.029
+c-7.56055 -7.56055 -2.20605 -20.4854 8.48438 -20.4854h246.06c10.6904 0 16.0449 12.9258 8.48535 20.4854z" />
+ <glyph glyph-name="caret-square-right" unicode="&#xf152;" horiz-adv-x="448"
+d="M48 416h352c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48zM188.485 60.4854l123.028 123.028c4.68652 4.68652 4.68652 12.2842 0 16.9717l-123.028 123.029
+c-7.56055 7.56055 -20.4854 2.20605 -20.4854 -8.48438v-246.06c0 -10.6904 12.9258 -16.0449 20.4854 -8.48535z" />
+ <glyph glyph-name="euro-sign" unicode="&#xf153;" horiz-adv-x="319"
+d="M310.706 34.2354l8.81836 -44.4902c1.23828 -6.24902 -2.62109 -12.3623 -8.78809 -13.957c-12.5391 -3.24414 -34.8008 -7.78809 -61.1016 -7.78809c-104.371 0 -182.496 65.3076 -207.521 155.64h-30.1143c-6.62695 0 -12 5.37305 -12 12v28.3604
+c0 6.62695 5.37305 12 12 12h21.3877c-1 12.958 -0.828125 28.6377 0.181641 42.2451h-21.5693c-6.62695 0 -12 5.37305 -12 12v29.7549c0 6.62695 5.37305 12 12 12h33.0752c28.9551 83.748 107.376 144 204.56 144c21.0752 0 40.582 -2.91211 52.6865 -5.20703
+c6.86035 -1.30078 11.1475 -8.17578 9.32617 -14.917l-11.9912 -44.3682c-1.65527 -6.125 -7.78613 -9.89062 -14.002 -8.62305c-9.28711 1.89551 -23.3652 4.14551 -37.8516 4.14551c-54.9287 0 -96.9854 -30.0391 -117.619 -75.0303h138.278
+c7.66211 0 13.3613 -7.08203 11.7227 -14.5664l-6.51172 -29.7549c-1.13965 -5.20703 -6.3916 -9.43359 -11.7227 -9.43359v0h-146.593c-1.55176 -13.958 -1.34766 -27.917 -0.137695 -42.2451h134.237c7.68945 0 13.3936 -7.12891 11.708 -14.6309l-6.37305 -28.3604
+c-1.16211 -5.17188 -6.40723 -9.36914 -11.708 -9.36914h-113.689c19.5322 -50.6582 64.6982 -85.4482 121.462 -85.4482c18.0039 0 34.7334 2.97363 45.4258 5.41211c6.58887 1.50391 13.1094 -2.73828 14.4238 -9.36816z" />
+ <glyph glyph-name="pound-sign" unicode="&#xf154;" horiz-adv-x="320"
+d="M308 96c6.62695 0 12 -5.37305 12 -12v-104c0 -6.62695 -5.37305 -12 -12 -12h-296c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h36v128h-28c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h28v66.0391
+c0 73.2188 58.0264 125.961 139.931 125.961c48.6455 0 85.1934 -22.5596 101.575 -34.9277c5.39844 -4.07617 6.35254 -11.8057 2.11914 -17.0811l-28.4932 -35.5137c-3.7998 -4.73535 -10.5371 -5.89746 -15.6875 -2.68457
+c-11.7744 7.34375 -33.9941 18.8486 -57.6523 18.8486c-37.2305 0 -61.792 -24.8193 -61.792 -57.0859v-63.5557h84c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-84v-126.848h122.505v50.8477c0 6.62695 5.37305 12 12 12h45.4951z" />
+ <glyph glyph-name="dollar-sign" unicode="&#xf155;" horiz-adv-x="288"
+d="M209.2 214.6c57.8994 -16.8994 94 -80.0996 72.5 -141.699c-15.4004 -44.1006 -59.1006 -71.8008 -105.7 -72.7002v-48.2002c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v48c-31.4004 0.0996094 -62 10.7998 -86.5 30
+c-7.90039 6.09961 -8.90039 17.5996 -1.7998 24.5l34.7998 34c5.2002 5.09961 13.4004 6.09961 19.5 2c10 -6.7998 22 -10.5 34.2002 -10.5h66.2998c16.2998 0 29.5 13.2002 29.5 29.5c0 13 -8.7002 24.5996 -21.2002 28.2998l-102.5 30
+c-44.3994 13 -79.5996 50.5 -83.7998 96.6006c-5.90039 64.8994 45.2998 119.6 109 119.6h2.5v48c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-48c31.4004 -0.0996094 62 -10.7998 86.5 -30c7.90039 -6.09961 8.90039 -17.5996 1.7998 -24.5l-34.7998 -34
+c-5.2002 -5.09961 -13.4004 -6.09961 -19.5 -2c-10 6.7998 -22 10.5 -34.2002 10.5h-66.2998c-16.2998 0 -29.5 -13.2002 -29.5 -29.5c0 -13 8.7002 -24.7002 21.2002 -28.2998z" />
+ <glyph glyph-name="rupee-sign" unicode="&#xf156;" horiz-adv-x="320"
+d="M308 352h-72.9424c5.97266 -9.75391 10.7666 -20.459 14.252 -32h58.6904c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-52.8105c-7.1748 -74.5107 -61.8193 -125.566 -138.318 -127.906l150.882 -139.275
+c8.02734 -7.41016 2.78516 -20.8184 -8.13867 -20.8184h-82.5625c-2.58984 0 -6.23535 1.42578 -8.13867 3.18164l-165.052 152.356c-2.46094 2.27148 -3.86133 5.46875 -3.86133 8.81836v53.0117c0 6.62695 5.37305 12 12 12h84c41.7959 0 68.54 22.5459 74.7568 58.6318
+h-158.757c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h146.25c-12.709 17.2939 -33.6621 27.252 -60.9697 27.252h-85.2803c-6.62695 0 -12 5.37305 -12 12v44.748c0 6.62695 5.37305 12 12 12h296c6.62695 0 12 -5.37305 12 -12v-40
+c0 -6.62695 -5.37305 -12 -12 -12z" />
+ <glyph glyph-name="yen-sign" unicode="&#xf157;" horiz-adv-x="384"
+d="M351.2 416c9.09961 0 14.8994 -9.7002 10.5996 -17.5996l-80.0996 -150.4h58.2998c6.59961 0 12 -5.40039 12 -12v-32c0 -6.59961 -5.40039 -12 -12 -12h-88.2002l-19.7998 -37.2002v-26.7998h108c6.59961 0 12 -5.40039 12 -12v-32c0 -6.59961 -5.40039 -12 -12 -12
+h-108v-92c0 -6.59961 -5.40039 -12 -12 -12h-56c-6.59961 0 -12 5.40039 -12 12v92h-108c-6.59961 0 -12 5.40039 -12 12v32c0 6.59961 5.40039 12 12 12h108v26.7998l-19.7998 37.2002h-88.2002c-6.59961 0 -12 5.40039 -12 12v32c0 6.59961 5.40039 12 12 12h58.2998
+l-80.0996 150.4c-4.2002 7.89941 1.5 17.5996 10.5996 17.5996h65.2002c4.59961 0 8.7998 -2.59961 10.7998 -6.7002l55.4004 -113.2c14.5 -34.6992 27.0996 -71.8994 27.0996 -71.8994h1.2998s12.6006 37.2002 27.1006 71.8994l55.3994 113.2
+c2 4.10059 6.2002 6.7002 10.8008 6.7002h65.2998z" />
+ <glyph glyph-name="ruble-sign" unicode="&#xf158;" horiz-adv-x="384"
+d="M239.36 128h-92.8008v-32h161.44c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-161.44v-52c0 -6.62695 -5.37305 -12 -12 -12h-58.5596c-6.62695 0 -12 5.37305 -12 12v52h-52c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h52
+v32h-52c-6.62695 0 -12 5.37305 -12 12v45.3682c0 6.62695 5.37305 12 12 12h52v206.632c0 6.62695 5.37305 12 12 12h163.36c85.1201 0 144.64 -57.5996 144.64 -143.071c0 -85.4707 -59.5195 -144.929 -144.64 -144.929zM146.56 347.252v-149.884h77.4404
+c48 0 76.1602 29.7285 76.1602 75.5605c0 45.2129 -28.1602 74.3232 -74.8799 74.3232h-78.7207z" />
+ <glyph glyph-name="won-sign" unicode="&#xf159;" horiz-adv-x="576"
+d="M564 256h-62.7002l-7.39941 -32h70.0996c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-84.9004l-42.0996 -182.7c-1.2998 -5.39941 -6.09961 -9.2998 -11.7002 -9.2998h-56.7998c-5.59961 0 -10.4004 3.90039 -11.7002 9.2998l-42.3994 182.7
+h-55.1006l-42.2998 -182.7c-1.2998 -5.39941 -6.09961 -9.2998 -11.7002 -9.2998h-56.7998c-5.59961 0 -10.5 3.90039 -11.7002 9.40039l-40.8994 182.6h-83.9004c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h69.5l-7.2002 32h-62.2998
+c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h48l-18.0996 80.7002c-1.7002 7.5 4 14.5996 11.6992 14.5996h42.1006c5.7002 0 10.7002 -4 11.7998 -9.59961l17.5 -85.7002h108.7l20 86c1.2998 5.5 6.09961 9.2998 11.7002 9.2998h44
+c5.59961 0 10.3994 -3.7998 11.6992 -9.2998l19.7002 -86h109.9l14.3994 85.7998c1.10059 5.5 6 9.5 11.7002 9.5h46.1006c7.69922 0 13.3994 -7.2002 11.6992 -14.7002l-18.5996 -80.5996h48c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12zM183.8 106
+l12.6006 54h-38.8008l11 -54c5.10059 -25.2002 6.80078 -47.2002 6.80078 -47.2002h1.09961c0.5 0 1.09961 21.4004 7.2998 47.2002zM211.3 224l7.5 32h-80.7998l6.5 -32h66.7998zM274.2 224h25.3994l-2 8.59961c-1.89941 8 -3.5 16 -4.7998 23.4004h-11.7998
+c-1.2998 -7.40039 -2.90039 -15.4004 -4.7998 -23.4004zM405.1 106l11.5 54h-39.0996l12.4004 -54c6.19922 -25.7998 6.69922 -47.2002 7.2998 -47.2002h1.09961s1.7002 22 6.7998 47.2002zM430.3 224l6.90039 32h-81.6006l7.30078 -32h67.3994z" />
+ <glyph glyph-name="file" unicode="&#xf15b;" horiz-adv-x="384"
+d="M224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136zM384 326.1v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7l97.9004 -98
+c4.5 -4.5 7 -10.5996 7 -16.9004z" />
+ <glyph glyph-name="file-alt" unicode="&#xf15c;" horiz-adv-x="384"
+d="M224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136zM288 76v8c0 6.59961 -5.40039 12 -12 12h-168c-6.59961 0 -12 -5.40039 -12 -12v-8
+c0 -6.59961 5.40039 -12 12 -12h168c6.59961 0 12 5.40039 12 12zM288 140v8c0 6.59961 -5.40039 12 -12 12h-168c-6.59961 0 -12 -5.40039 -12 -12v-8c0 -6.59961 5.40039 -12 12 -12h168c6.59961 0 12 5.40039 12 12zM288 212c0 6.59961 -5.40039 12 -12 12h-168
+c-6.59961 0 -12 -5.40039 -12 -12v-8c0 -6.59961 5.40039 -12 12 -12h168c6.59961 0 12 5.40039 12 12v8zM384 326.1v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7l97.9004 -98c4.5 -4.5 7 -10.5996 7 -16.9004z" />
+ <glyph glyph-name="sort-alpha-down" unicode="&#xf15d;" horiz-adv-x="424"
+d="M400.7 20.9004c6.59961 0 12 -5.30078 12 -12v-28.9004c0 -6.59961 -5.40039 -12 -12 -12h-129.4c-6.59961 0 -12 5.40039 -12 12v21.9004c0 2.5 0.799805 4.89941 2.2002 6.89941l67.2002 95.2002c0.799805 1.09961 1.59961 2.09961 2.2998 3.09961h-56.5
+c-6.59961 0 -12 5.40039 -12 12v28.9004c0 6.59961 5.40039 12 12 12h125.1c6.60059 0 12 -5.40039 12 -12v-21.4004c0 -2.5 -0.799805 -4.89941 -2.19922 -6.89941l-67.5 -95.7002c-0.800781 -1.09961 -1.60059 -2.09961 -2.30078 -3.09961h61.1006zM176 80
+c14.2002 0 21.2998 -17.2998 11.2998 -27.2998l-80 -80c-6.2002 -6.2002 -16.3994 -6.2002 -22.5996 0l-80 80c-10.1006 10.0996 -2.90039 27.2998 11.2998 27.2998h48v320c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-320h48zM424.2 239.9
+c2.7002 -7.80078 -3.10059 -15.9004 -11.5 -15.9004h-35.7002c-4.94629 0.00292969 -10.0986 3.85645 -11.5 8.59961l-8.2998 28.3008h-42.9004l-8.09961 -28.2002c-1.40039 -5.2002 -6.10059 -8.7002 -11.5 -8.7002h-35.7002c-8.2998 0 -14 8.09961 -11.4004 15.9004
+l57.1006 168c1.7002 4.7998 6.2998 8.09961 11.3994 8.09961h39.6006c5.2002 0 9.7002 -3.2002 11.3994 -8.09961zM329.2 311.4h13.3994l-6.59961 22.8994z" />
+ <glyph glyph-name="sort-alpha-up" unicode="&#xf15e;" horiz-adv-x="424"
+d="M107.3 411.3l80 -80c10.1006 -10.0996 2.90039 -27.2998 -11.2998 -27.2998h-48v-320c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v320h-48c-14.2002 0 -21.2998 17.2998 -11.2998 27.2998l80 80c6.2002 6.2002 16.3994 6.2002 22.5996 0z
+M400.7 20.9004c6.59961 0 12 -5.30078 12 -12v-28.9004c0 -6.59961 -5.40039 -12 -12 -12h-129.4c-6.59961 0 -12 5.40039 -12 12v21.9004c0 2.5 0.799805 4.89941 2.2002 6.89941l67.2002 95.2002c0.799805 1.09961 1.59961 2.09961 2.2998 3.09961h-56.5
+c-6.59961 0 -12 5.40039 -12 12v28.9004c0 6.59961 5.40039 12 12 12h125.1c6.60059 0 12 -5.40039 12 -12v-21.4004c0 -2.5 -0.799805 -4.89941 -2.19922 -6.89941l-67.5 -95.7002c-0.800781 -1.09961 -1.60059 -2.09961 -2.30078 -3.09961h61.1006zM424.2 239.9
+c2.7002 -7.80078 -3.10059 -15.9004 -11.5 -15.9004h-35.7002c-4.94629 0.00292969 -10.0986 3.85645 -11.5 8.59961l-8.2998 28.3008h-42.9004l-8.09961 -28.2002c-1.40039 -5.2002 -6.10059 -8.7002 -11.5 -8.7002h-35.7002c-8.2998 0 -14 8.09961 -11.4004 15.9004
+l57.1006 168c1.7002 4.7998 6.2998 8.09961 11.3994 8.09961h39.6006c5.2002 0 9.7002 -3.2002 11.3994 -8.09961zM329.2 311.4h13.3994l-6.59961 22.8994z" />
+ <glyph glyph-name="sort-amount-down" unicode="&#xf160;"
+d="M187.298 52.6855l-79.9834 -80.002c-6.24805 -6.24707 -16.3838 -6.24414 -22.6279 0l-79.9814 80.002c-10.0703 10.0703 -2.89844 27.3145 11.3135 27.3145h47.9814v320c0 8.83691 7.16309 16 16 16h32c8.83691 0 16 -7.16309 16 -16v-320h47.9844
+c14.2402 0 21.3623 -17.2637 11.3135 -27.3145zM240 352c-8.83691 0 -16 7.16309 -16 16v32c0 8.83691 7.16309 16 16 16h256c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-256zM224 240v32c0 8.83691 7.16309 16 16 16h192
+c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-192c-8.83691 0 -16 7.16309 -16 16zM224 -16v32c0 8.83691 7.16309 16 16 16h64c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-64c-8.83691 0 -16 7.16309 -16 16zM224 112v32
+c0 8.83691 7.16309 16 16 16h128c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-128c-8.83691 0 -16 7.16309 -16 16z" />
+ <glyph glyph-name="sort-amount-up" unicode="&#xf161;"
+d="M4.70215 331.314l79.9834 80.002c6.24805 6.24707 16.3838 6.24414 22.6279 0l79.9805 -80.002c10.0703 -10.0703 2.89941 -27.3145 -11.3135 -27.3145h-47.9805v-320c0 -8.83691 -7.16309 -16 -16 -16h-32c-8.83691 0 -16 7.16309 -16 16v320h-47.9844
+c-14.2402 0 -21.3623 17.2637 -11.3135 27.3145zM240 352c-8.83691 0 -16 7.16309 -16 16v32c0 8.83691 7.16309 16 16 16h256c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-256zM224 240v32c0 8.83691 7.16309 16 16 16h192
+c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-192c-8.83691 0 -16 7.16309 -16 16zM224 -16v32c0 8.83691 7.16309 16 16 16h64c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-64c-8.83691 0 -16 7.16309 -16 16zM224 112v32
+c0 8.83691 7.16309 16 16 16h128c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-128c-8.83691 0 -16 7.16309 -16 16z" />
+ <glyph glyph-name="sort-numeric-down" unicode="&#xf162;" horiz-adv-x="425"
+d="M308.811 334.213l-19.4473 20.7949c-4.52246 4.83594 -4.27441 12.4209 0.555664 16.9502l43.4434 40.7412c1.91113 1.79199 5.58789 3.24707 8.20801 3.24707h0.000976562h31.5908c6.62695 0 12 -5.37305 12 -12v-127.07h25.6602c6.62695 0 12 -5.37305 12 -12v-28.9297
+c0 -6.62695 -5.37305 -12 -12 -12h-109.173c-6.62695 0 -12 5.37305 -12 12v28.9297c0 6.62695 5.37305 12 12 12h25.4141v57.9385c-7.25488 -6.58008 -14.2119 -4.92188 -18.2529 -0.601562zM278.241 95.6436c0 32.6533 23.8652 67.3564 68.0938 67.3564
+c38.2529 0 79.4238 -28.8613 79.4238 -92.2275c0 -51.2764 -32.2363 -105.772 -91.9824 -105.772c-17.8359 0 -30.5459 3.55664 -38.5488 6.78125c-5.78906 2.33301 -8.78809 8.74609 -6.92188 14.7031l9.2373 29.4795c2.03516 6.49609 9.04883 9.9834 15.4668 7.71582
+c13.0293 -4.60156 27.8779 -5.27441 38.1035 4.13867c-38.7422 -5.07227 -72.8721 25.3594 -72.8721 67.8252zM370.514 76.3057c0 22.2852 -15.3018 36.5049 -25.835 36.5049c-8.6416 0 -13.1641 -7.96484 -13.1641 -15.832c0 -5.66895 1.81543 -24.168 25.168 -24.168
+c9.97363 0 13.377 2.1543 13.7441 2.73145c0.0214844 0.0458984 0.0869141 0.291016 0.0869141 0.763672zM175.984 80c14.2402 0 21.3623 -17.2637 11.3125 -27.3145l-79.9834 -80.002c-6.24707 -6.24707 -16.3828 -6.24414 -22.6279 0l-79.9805 80.002
+c-10.0703 10.0703 -2.89844 27.3145 11.3135 27.3145h47.9814v320c0 8.83691 7.16309 16 16 16h32c8.83691 0 16 -7.16309 16 -16v-320h47.9844z" />
+ <glyph glyph-name="sort-numeric-up" unicode="&#xf163;" horiz-adv-x="425"
+d="M308.811 334.213l-19.4473 20.7949c-4.52246 4.83594 -4.27441 12.4209 0.555664 16.9502l43.4434 40.7412c1.91113 1.79199 5.58789 3.24707 8.20801 3.24707h0.000976562h31.5908c6.62695 0 12 -5.37305 12 -12v-127.07h25.6602c6.62695 0 12 -5.37305 12 -12v-28.9297
+c0 -6.62695 -5.37305 -12 -12 -12h-109.173c-6.62695 0 -12 5.37305 -12 12v28.9297c0 6.62695 5.37305 12 12 12h25.4141v57.9385c-7.25488 -6.58008 -14.2119 -4.92188 -18.2529 -0.601562zM278.241 95.6436c0 32.6533 23.8652 67.3564 68.0938 67.3564
+c38.2529 0 79.4238 -28.8613 79.4238 -92.2275c0 -51.2764 -32.2363 -105.772 -91.9824 -105.772c-17.8359 0 -30.5459 3.55664 -38.5488 6.78125c-5.78906 2.33301 -8.78809 8.74609 -6.92188 14.7031l9.2373 29.4795c2.03516 6.49609 9.04883 9.9834 15.4668 7.71582
+c13.0293 -4.60156 27.8779 -5.27441 38.1035 4.13867c-38.7422 -5.07227 -72.8721 25.3594 -72.8721 67.8252zM370.514 76.3057c0 22.2852 -15.3018 36.5049 -25.835 36.5049c-8.6416 0 -13.1641 -7.96484 -13.1641 -15.832c0 -5.66895 1.81543 -24.168 25.168 -24.168
+c9.97363 0 13.377 2.1543 13.7441 2.73145c0.0214844 0.0458984 0.0869141 0.291016 0.0869141 0.763672zM16.0156 304c-14.2402 0 -21.3623 17.2637 -11.3135 27.3145l79.9844 80.002c6.24707 6.24707 16.3828 6.24414 22.6279 0l79.9805 -80.002
+c10.0703 -10.0703 2.89844 -27.3145 -11.3135 -27.3145h-47.9814v-320c0 -8.83691 -7.16309 -16 -16 -16h-32c-8.83691 0 -16 7.16309 -16 16v320h-47.9844z" />
+ <glyph glyph-name="thumbs-up" unicode="&#xf164;" horiz-adv-x="511"
+d="M104 224c13.2549 0 24 -10.7451 24 -24v-240c0 -13.2549 -10.7451 -24 -24 -24h-80c-13.2549 0 -24 10.7451 -24 24v240c0 13.2549 10.7451 24 24 24h80zM64 -24c13.2549 0 24 10.7451 24 24s-10.7451 24 -24 24s-24 -10.7451 -24 -24s10.7451 -24 24 -24zM384 366.548
+c0 -42.416 -25.9697 -66.208 -33.2773 -94.5479h101.724c33.3965 0 59.3965 -27.7461 59.5527 -58.0977c0.0839844 -17.9385 -7.5459 -37.249 -19.4395 -49.1973l-0.109375 -0.110352c9.83594 -23.3369 8.23633 -56.0371 -9.30859 -79.4688
+c8.68164 -25.8945 -0.0683594 -57.7041 -16.3818 -74.7568c4.29785 -17.5977 2.24414 -32.5752 -6.14746 -44.6318c-20.4102 -29.3242 -70.9961 -29.7373 -113.773 -29.7373l-2.84473 0.000976562c-48.2871 0.0166016 -87.8057 17.5977 -119.561 31.7246
+c-15.957 7.09961 -36.8203 15.8877 -52.6504 16.1787c-6.54004 0.120117 -11.7832 5.45703 -11.7832 11.998v213.77c0 3.2002 1.28223 6.27148 3.55762 8.52148c39.6143 39.1436 56.6484 80.5869 89.1172 113.11c14.8037 14.832 20.1885 37.2363 25.3936 58.9023
+c4.44629 18.501 13.749 57.7939 33.9316 57.7939c24 0 72 -8 72 -81.4521z" />
+ <glyph glyph-name="thumbs-down" unicode="&#xf165;"
+d="M0 392c0 13.2549 10.7451 24 24 24h80c13.2549 0 24 -10.7451 24 -24v-240c0 -13.2549 -10.7451 -24 -24 -24h-80c-13.2549 0 -24 10.7451 -24 24v240zM40 192c0 -13.2549 10.7451 -24 24 -24s24 10.7451 24 24s-10.7451 24 -24 24s-24 -10.7451 -24 -24zM312 -64
+c-20.1826 0 -29.4854 39.293 -33.9307 57.7949c-5.20605 21.666 -10.5889 44.0703 -25.3936 58.9023c-32.4688 32.5234 -49.5029 73.9668 -89.1172 113.11c-1.96387 1.94141 -3.55762 5.75879 -3.55762 8.52051v0.000976562v213.77
+c0 6.54102 5.24316 11.8779 11.7832 11.998c15.8311 0.290039 36.6934 9.0791 52.6504 16.1787c31.7549 14.127 71.2744 31.708 119.561 31.7246h2.84375c42.7773 0 93.3633 -0.413086 113.774 -29.7373c8.3916 -12.0566 10.4453 -27.0342 6.14746 -44.6318
+c16.3125 -17.0527 25.0635 -48.8633 16.3818 -74.7568c17.5439 -23.4316 19.1436 -56.1318 9.30859 -79.4688l0.109375 -0.110352c11.8936 -11.9492 19.5234 -31.2588 19.4395 -49.1973c-0.15625 -30.3516 -26.1572 -58.0977 -59.5527 -58.0977h-101.725
+c7.30762 -28.3398 33.2773 -52.1318 33.2773 -94.5479c0 -73.4521 -48 -81.4521 -72 -81.4521z" />
+ <glyph glyph-name="female" unicode="&#xf182;" horiz-adv-x="256"
+d="M128 448c35.3457 0 64 -28.6543 64 -64s-28.6543 -64 -64 -64s-64 28.6543 -64 64s28.6543 64 64 64zM247.283 93.8213c3.78809 -15.1504 -7.69238 -29.8213 -23.2832 -29.8213h-56v-104c0 -13.2549 -10.7451 -24 -24 -24h-32c-13.2549 0 -24 10.7451 -24 24v104h-56
+c-15.6172 0 -27.0654 14.6953 -23.2832 29.8213l48 192c2.50879 10.0342 12.9395 18.1787 23.2832 18.1787h11.3604c23.6895 -10.8936 50.5684 -10.4434 73.2793 0h11.3604c10.3438 0 20.7744 -8.14453 23.2832 -18.1787z" />
+ <glyph glyph-name="male" unicode="&#xf183;" horiz-adv-x="192"
+d="M96 448c35.3457 0 64 -28.6543 64 -64s-28.6543 -64 -64 -64s-64 28.6543 -64 64s28.6543 64 64 64zM144 304c26.5098 0 48 -21.4902 48 -48v-136c0 -13.2549 -10.7451 -24 -24 -24h-16v-136c0 -13.2549 -10.7451 -24 -24 -24h-64c-13.2549 0 -24 10.7451 -24 24v136h-16
+c-13.2549 0 -24 10.7451 -24 24v136c0 26.5098 21.4902 48 48 48h11.3604c23.6895 -10.8936 50.5684 -10.4434 73.2793 0h11.3604z" />
+ <glyph glyph-name="sun" unicode="&#xf185;" horiz-adv-x="511"
+d="M256 288c52.9004 0 96 -43.0996 96 -96s-43.0996 -96 -96 -96s-96 43.0996 -96 96s43.0996 96 96 96zM502.4 207.5c12.7998 -6.40039 12.7998 -24.5996 -0.200195 -31.0996l-94.7002 -47.3008l33.5 -100.399c4.59961 -13.5 -8.2998 -26.4004 -21.9004 -21.9004
+l-100.399 33.5l-47.2998 -94.7002c-6.40039 -12.7998 -24.6006 -12.7998 -31 0l-47.3008 94.7002l-100.399 -33.5c-13.5 -4.59961 -26.4004 8.2998 -21.9004 21.9004l33.5 100.5l-94.7002 47.2998c-12.7998 6.40039 -12.7998 24.5996 0 31l94.7002 47.4004l-33.5 100.399
+c-4.59961 13.5 8.2998 26.4004 21.9004 21.9004l100.5 -33.5l47.2998 94.7002c6.40039 12.7998 24.5996 12.7998 31 0l47.4004 -94.8008l100.399 33.5c13.5 4.60059 26.4004 -8.2998 21.9004 -21.8994l-33.5 -100.4zM346.5 101.5c49.9004 49.9004 49.9004 131.1 0 181
+s-131.1 49.9004 -181 0s-49.9004 -131.1 0 -181s131.1 -49.9004 181 0z" />
+ <glyph glyph-name="moon" unicode="&#xf186;"
+d="M283.211 -64c-141.489 0 -256 114.691 -256 256c0 141.489 114.691 256 256 256c13.0176 -0.00195312 33.9727 -1.91895 46.7754 -4.28027c11.0059 -2.0332 13.4414 -16.7178 3.75586 -22.2295c-62.8359 -35.7588 -101.498 -102.172 -101.498 -174.395
+c0 -125.378 114.059 -220.607 238.262 -196.954c10.9229 2.08008 18.6299 -10.6416 11.5625 -19.3496c-47.7783 -58.8672 -119.896 -94.792 -198.857 -94.792z" />
+ <glyph glyph-name="archive" unicode="&#xf187;"
+d="M32 0v288h448v-288c0 -17.7002 -14.2998 -32 -32 -32h-384c-17.7002 0 -32 14.2998 -32 32zM192 212v-8c0 -6.59961 5.40039 -12 12 -12h104c6.59961 0 12 5.40039 12 12v8c0 6.59961 -5.40039 12 -12 12h-104c-6.59961 0 -12 -5.40039 -12 -12zM480 416
+c17.7002 0 32 -14.2998 32 -32v-48c0 -8.7998 -7.2002 -16 -16 -16h-480c-8.7998 0 -16 7.2002 -16 16v48c0 17.7002 14.2998 32 32 32h448z" />
+ <glyph glyph-name="bug" unicode="&#xf188;"
+d="M511.988 159.1c-0.478516 -17.4297 -15.2168 -31.0996 -32.6533 -31.0996h-55.335v-16c0 -21.8643 -4.88184 -42.584 -13.5996 -61.1445l60.2275 -60.2285c12.4961 -12.4971 12.4961 -32.7578 0 -45.2549c-12.498 -12.4971 -32.7588 -12.4961 -45.2559 0
+l-54.7363 54.7363c-24.75 -20.0732 -56.2852 -32.1084 -90.6357 -32.1084v244c0 6.62695 -5.37305 12 -12 12h-24c-6.62695 0 -12 -5.37305 -12 -12v-244c-34.3506 0 -65.8857 12.0352 -90.6357 32.1084l-54.7363 -54.7363c-12.498 -12.4971 -32.7588 -12.4961 -45.2559 0
+c-12.4961 12.4971 -12.4961 32.7578 0 45.2549l60.2275 60.2285c-8.71777 18.5605 -13.5996 39.2803 -13.5996 61.1445v16h-55.334c-17.4355 0 -32.1748 13.6699 -32.6533 31.0996c-0.49707 18.084 14.0156 32.9004 31.9873 32.9004h56v58.7451l-46.6279 46.6279
+c-12.4961 12.4971 -12.4961 32.7578 0 45.2549c12.498 12.4971 32.7578 12.4971 45.2559 0l54.627 -54.6279h229.489l54.627 54.627c12.498 12.4971 32.7578 12.4971 45.2559 0c12.4961 -12.4971 12.4961 -32.7578 0 -45.2549l-46.627 -46.627v-58.7451h56
+c17.9717 0 32.4844 -14.8164 31.9883 -32.9004zM257 448c61.8564 0 112 -50.1436 112 -112h-224c0 61.8564 50.1436 112 112 112z" />
+ <glyph glyph-name="caret-square-left" unicode="&#xf191;" horiz-adv-x="448"
+d="M400 -32h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h352c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48zM259.515 323.515l-123.029 -123.029c-4.68652 -4.68652 -4.68652 -12.2842 0 -16.9717l123.028 -123.028
+c7.56055 -7.56055 20.4854 -2.20605 20.4854 8.48438v246.06c0.000976562 10.6904 -12.9248 16.0449 -20.4844 8.48535z" />
+ <glyph glyph-name="dot-circle" unicode="&#xf192;"
+d="M256 440c136.967 0 248 -111.033 248 -248s-111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248zM336 192c0 44.1123 -35.8877 80 -80 80s-80 -35.8877 -80 -80s35.8877 -80 80 -80s80 35.8877 80 80z" />
+ <glyph glyph-name="wheelchair" unicode="&#xf193;"
+d="M496.101 62.3311l14.2275 -28.6631c3.92871 -7.91504 0.697266 -17.5156 -7.21777 -21.4453l-65.4658 -32.8857c-16.0488 -7.9668 -35.5557 -1.19434 -43.1885 15.0547l-62.7773 133.608h-139.679c-15.9248 0 -29.4258 11.71 -31.6787 27.4746
+c-33.8887 237.218 -31.9414 222.481 -32.3213 228.525c0 36.3584 30.3184 65.6348 67.0518 63.9287c33.2715 -1.54492 60.0479 -28.9043 60.9248 -62.2012c0.868164 -32.9326 -23.1514 -60.4229 -54.6074 -65.0381l4.66992 -32.6904h129.961c8.83691 0 16 -7.16309 16 -16
+v-32c0 -8.83691 -7.16309 -16 -16 -16h-120.818l4.57227 -32h132.246c11.2168 0 24.1924 -8.24023 28.9619 -18.3916l57.5146 -122.407l36.1787 18.3486c7.91504 3.92871 17.5166 0.697266 21.4453 -7.21777zM311.358 96l25.752 -54.8076
+c-27.3047 -61.8848 -89.2402 -105.192 -161.11 -105.192c-97.0469 0 -176 78.9531 -176 176c0 74.0371 45.9561 137.536 110.836 163.489c2.64453 -18.4736 5.77637 -40.3682 9.48828 -66.333c-33.6299 -19.3477 -56.3242 -55.6514 -56.3242 -97.1562
+c0 -61.7568 50.2432 -112 112 -112c56.3242 0 103.064 41.7959 110.852 96h24.5068z" />
+ <glyph glyph-name="lira-sign" unicode="&#xf195;" horiz-adv-x="384"
+d="M371.994 192c6.78613 0 12.2578 -5.62598 11.9971 -12.4082c-5.15332 -133.758 -94.3174 -211.592 -228.408 -211.592h-79.583c-6.62695 0 -12 5.37305 -12 12v193.442l-49.3975 -10.9775c-7.49316 -1.66602 -14.6025 4.03711 -14.6025 11.7139v40.9766
+c0 5.31348 4.20996 10.5615 9.39746 11.7139l54.6025 12.1338v30.4395l-49.3975 -10.9775c-7.49316 -1.66602 -14.6025 4.03711 -14.6025 11.7139v40.9766c0 5.31348 4.20996 10.5615 9.39746 11.7139l54.6025 12.1338v68.9971c0 6.62695 5.37305 12 12 12h56
+c6.62695 0 12 -5.37305 12 -12v-51.2188l129.397 28.7539c7.49316 1.66602 14.6025 -4.03711 14.6025 -11.7139v-40.9756c0 -5.31348 -4.20996 -10.5615 -9.39746 -11.7139l-134.603 -29.9121v-30.4385l129.397 28.7539c7.49316 1.66602 14.6025 -4.03711 14.6025 -11.7139
+v-40.9766c0 -5.31348 -4.20996 -10.5615 -9.39746 -11.7139l-134.603 -29.9121v-159.219c86.1787 0 168 48 168 148.754c0 6.33398 5.63965 11.2461 11.9746 11.2461h48.0195z" />
+ <glyph glyph-name="space-shuttle" unicode="&#xf197;" horiz-adv-x="640"
+d="M592.604 239.756c29.6787 -13.9111 47.3965 -31.7637 47.3965 -47.7559s-17.7178 -33.8447 -47.3965 -47.7559c-32.8682 -15.4082 -76.8262 -24.2441 -120.604 -24.2441h-285.674c-4.95215 -6.55469 -10.585 -11.9775 -16.7197 -16h206.394
+c-146.843 -30.2529 -156.597 -136 -279.997 -136h-0.00292969v128h-16v-128c-26.5098 0 -48 28.6543 -48 64v64c-23.1807 0 -32 10.0166 -32 24v40c0 13.9678 8.80273 24 32 24v16c-23.1807 0 -32 10.0166 -32 24v40c0 13.9678 8.80273 24 32 24v64
+c0 35.3457 21.4902 64 48 64v-128h16v128h0.00292969c123.4 0 133.154 -105.747 279.997 -136h-206.393c6.13477 -4.02246 11.7676 -9.44531 16.7197 -16h285.673c43.7773 0 87.7354 -8.83594 120.604 -24.2441zM488 152c31.9424 0 31.9092 80 0 80
+c-4.41602 0 -8 -3.58398 -8 -8v-64c0 -4.41602 3.58398 -8 8 -8z" />
+ <glyph glyph-name="envelope-square" unicode="&#xf199;" horiz-adv-x="448"
+d="M400 416c26.5098 0 48 -21.4902 48 -48v-352c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h352zM178.117 185.896c10.5156 -7.66895 31.3799 -26.1133 45.8828 -25.8955
+c14.4912 -0.225586 35.3828 18.2393 45.8828 25.8936c90.6836 65.8145 89.7461 65.9697 114.117 84.9385v25.167c0 13.2549 -10.7451 24 -24 24h-272c-13.2549 0 -24 -10.7451 -24 -24v-25.167c24.3525 -18.9541 23.4287 -19.1201 114.117 -84.9365zM384 230.225
+c-13.958 -10.793 -33.3252 -25.2334 -95.2832 -70.1982c-13.6826 -9.98438 -37.833 -32.1592 -64.7197 -32.0254c-26.7188 -0.134766 -50.5322 21.6689 -64.6943 32.0098c-61.9736 44.9785 -81.3447 59.4199 -95.3027 70.2139v-142.225c0 -13.2549 10.7451 -24 24 -24h272
+c13.2549 0 24 10.7451 24 24v142.225z" />
+ <glyph glyph-name="university" unicode="&#xf19c;"
+d="M496 320v-16c0 -4.41602 -3.58398 -8 -8 -8h-24v-12c0 -6.62695 -5.37305 -12 -12 -12h-392c-6.62695 0 -12 5.37305 -12 12v12h-24c-4.41602 0 -8 3.58398 -8 8v16c0 2.95215 2.21387 6.26367 4.94141 7.3916l232 88
+c0.810547 0.335938 2.18066 0.608398 3.05859 0.608398s2.24805 -0.272461 3.05859 -0.608398l232 -88c2.72754 -1.12793 4.94141 -4.43945 4.94141 -7.3916zM472 16c13.2549 0 24 -10.7451 24 -24v-16c0 -4.41602 -3.58398 -8 -8 -8h-464c-4.41602 0 -8 3.58398 -8 8v16
+c0 13.2549 10.7451 24 24 24h432zM96 256h64v-192h64v192h64v-192h64v192h64v-192h36c6.62695 0 12 -5.37305 12 -12v-20h-416v20c0 6.62695 5.37305 12 12 12h36v192z" />
+ <glyph glyph-name="graduation-cap" unicode="&#xf19d;" horiz-adv-x="640"
+d="M622.34 294.8c23.5498 -7.24023 23.5498 -38.3594 0 -45.5996l-278.95 -85.7002c-20.3496 -6.25 -37.7295 -2.78027 -46.79 0l-195.569 60.0898c-12.25 -8.41992 -19.9307 -21.7002 -20.6904 -36.7197c9.19043 -5.62012 15.6602 -15.2998 15.6602 -26.8701
+c0 -10.7803 -5.67969 -19.8496 -13.8604 -25.6504l25.5303 -114.88c2.21973 -9.98926 -5.37988 -19.4697 -15.6201 -19.4697h-56.1094c-10.2305 0 -17.8301 9.48047 -15.6104 19.4697l25.5303 114.88c-8.18066 5.80078 -13.8604 14.8701 -13.8604 25.6504
+c0 11.8896 6.78027 21.8496 16.4102 27.3701c0.649414 17.6201 7.20996 33.71 17.8799 46.8994l-48.6299 14.9404c-23.54 7.23047 -23.54 38.3604 0 45.5898l278.95 85.7002c15.1895 4.66992 31.5898 4.66992 46.79 0zM352.79 132.91l145.03 44.5596l14.1797 -113.47
+c0 -35.3496 -85.96 -64 -192 -64s-192 28.6504 -192 64l14.1797 113.46l145.021 -44.5498c12.75 -3.91992 37.0596 -8.75977 65.5898 0z" />
+ <glyph glyph-name="language" unicode="&#xf1ab;" horiz-adv-x="640"
+d="M152.1 211.8l10.9004 -37.5h-38.0996l11.0996 37.5c3.5 12.1006 7.7998 33.2002 7.7998 33.2002h0.5s4.2998 -21.0996 7.7998 -33.2002zM616 352c13.2998 0 24 -10.7002 24 -24v-272c0 -13.2998 -10.7002 -24 -24 -24h-280v320h280zM592 232v16
+c0 6.59961 -5.40039 12 -12 12h-64v16c0 6.59961 -5.40039 12 -12 12h-16c-6.59961 0 -12 -5.40039 -12 -12v-16h-64c-6.59961 0 -12 -5.40039 -12 -12v-16c0 -6.59961 5.40039 -12 12 -12h114.3c-6.2002 -14.2998 -16.5 -29 -30 -43.2002
+c-6.59961 6.90039 -12.3994 13.9004 -17.3994 20.9004c-3.60059 5.09961 -10.6006 6.59961 -16 3.39941l-7.30078 -4.2998l-6.5 -3.89941c-5.89941 -3.5 -7.69922 -11.4004 -3.69922 -17.1006c6.09961 -8.7002 13.0996 -17.2998 21 -25.7002
+c-8.10059 -6.2998 -16.8008 -12.2998 -26.1006 -18c-5.59961 -3.39941 -7.39941 -10.5996 -4.2002 -16.1992l7.90039 -13.9004c3.40039 -5.90039 10.9004 -7.7998 16.7002 -4.2998c12.7002 7.7998 24.5 16.2002 35.3994 24.8994
+c10.9004 -8.7998 22.8008 -17.0996 35.4004 -24.8994c5.7998 -3.5 13.2998 -1.60059 16.7002 4.2998l7.89941 13.9004c3.2002 5.69922 1.40039 12.7998 -4.09961 16.1992c-9 5.5 -17.7002 11.6006 -26.0996 18c21 22.5 35.7998 46.3008 42.6992 69.9004h11.4004
+c6.59961 0 12 5.40039 12 12zM0 328c0 13.2998 10.7002 24 24 24h280v-320h-280c-13.2998 0 -24 10.7002 -24 24v272zM58.9004 111.9c-2.60059 -7.80078 3.19922 -15.9004 11.3994 -15.9004h22.9004c5.2998 0 10 3.59961 11.5 8.7002l9.09961 31.7998h60.2002
+l9.40039 -31.9004c1.40137 -4.74316 6.55273 -8.59668 11.5 -8.59961h22.8994c8.2998 0 14 8.09961 11.4004 15.9004l-57.5 169.1c-1.7002 4.7998 -6.2998 8.09961 -11.4004 8.09961h-32.5c-5.2002 0 -9.7002 -3.19922 -11.3994 -8.09961z" />
+ <glyph glyph-name="fax" unicode="&#xf1ac;"
+d="M64 320c17.6699 0 32 -14.3301 32 -32v-320c0 -17.6699 -14.3301 -32 -32 -32h-32c-17.6699 0 -32 14.3301 -32 32v320c0 17.6699 14.3301 32 32 32h32zM480 288c17.6699 0 32 -14.3301 32 -32v-288c0 -17.6699 -14.3301 -32 -32 -32h-320c-17.6699 0 -32 14.3301 -32 32
+v448c0 17.6699 14.3301 32 32 32h242.74c8.49023 0 16.6299 -3.37012 22.6299 -9.37012l45.2598 -45.25c6 -6.00977 9.37012 -14.1396 9.37012 -22.6299v-82.75zM288 16v32c0 8.83984 -7.16016 16 -16 16h-32c-8.83984 0 -16 -7.16016 -16 -16v-32
+c0 -8.83984 7.16016 -16 16 -16h32c8.83984 0 16 7.16016 16 16zM288 144v32c0 8.83984 -7.16016 16 -16 16h-32c-8.83984 0 -16 -7.16016 -16 -16v-32c0 -8.83984 7.16016 -16 16 -16h32c8.83984 0 16 7.16016 16 16zM416 16v32c0 8.83984 -7.16016 16 -16 16h-32
+c-8.83984 0 -16 -7.16016 -16 -16v-32c0 -8.83984 7.16016 -16 16 -16h32c8.83984 0 16 7.16016 16 16zM416 144v32c0 8.83984 -7.16016 16 -16 16h-32c-8.83984 0 -16 -7.16016 -16 -16v-32c0 -8.83984 7.16016 -16 16 -16h32c8.83984 0 16 7.16016 16 16zM432 256v96h-32
+c-8.83984 0 -16 7.16016 -16 16v32h-208v-144h256z" />
+ <glyph glyph-name="building" unicode="&#xf1ad;" horiz-adv-x="448"
+d="M436 -32c6.62695 0 12 -5.37305 12 -12v-20h-448v20c0 6.62695 5.37305 12 12 12h20v456c0 13.2549 10.7451 24 24 24h336c13.2549 0 24 -10.7451 24 -24v-456h20zM128 372v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12v40
+c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12zM128 276v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12v40c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12zM180 128c6.62695 0 12 5.37305 12 12v40
+c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40zM256 -32v84c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-84h64zM320 140v40c0 6.62695 -5.37305 12 -12 12h-40
+c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12zM320 236v40c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12zM320 332v40
+c0 6.62695 -5.37305 12 -12 12h-40c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h40c6.62695 0 12 5.37305 12 12z" />
+ <glyph glyph-name="child" unicode="&#xf1ae;" horiz-adv-x="384"
+d="M120 376c0 39.7646 32.2354 72 72 72s72 -32.2354 72 -72c0 -39.7637 -32.2354 -72 -72 -72s-72 32.2363 -72 72zM374.627 374.627c12.4971 -12.4971 12.4971 -32.7568 0 -45.2539l-94.627 -94.627v-266.746c0 -17.6729 -14.3271 -32 -32 -32h-16
+c-17.6729 0 -32 14.3271 -32 32v112h-16v-112c0 -17.6729 -14.3271 -32 -32 -32h-16c-17.6729 0 -32 14.3271 -32 32v266.746l-94.627 94.626c-12.4971 12.4971 -12.4971 32.7578 0 45.2549c12.4961 12.4971 32.7578 12.4971 45.2539 0l86.627 -86.627h101.491
+l86.6279 86.627c12.4961 12.4971 32.7578 12.4971 45.2539 0z" />
+ <glyph glyph-name="paw" unicode="&#xf1b0;"
+d="M256 224c79.4102 0 192 -122.76 192 -200.25c0 -34.9004 -26.8096 -55.75 -71.7402 -55.75c-48.4102 0 -80.75 25.0801 -120.26 25.0801c-39.1699 0 -71.4199 -25.0801 -120.26 -25.0801c-44.9307 0 -71.7402 20.8496 -71.7402 55.75c0 77.4902 112.59 200.25 192 200.25
+zM108.72 236.61c10.4004 -34.6504 -4.76953 -68.3799 -33.8896 -75.3408c-29.1201 -6.95996 -61.1602 15.4805 -71.5605 50.1309c-10.3994 34.6494 4.77051 68.3799 33.8906 75.3398s61.1602 -15.4805 71.5596 -50.1299zM193.44 257.39
+c-30.9307 -8.14941 -65.6201 20.4502 -77.46 63.8701c-11.8408 43.4199 3.63965 85.2207 34.5791 93.3604c30.9404 8.13965 65.6201 -20.4502 77.46 -63.8701c11.8408 -43.4199 -3.63965 -85.2197 -34.5791 -93.3604zM474.83 286.73
+c29.1201 -6.96094 44.29 -40.6904 33.8896 -75.3408c-10.4102 -34.6494 -42.4395 -57.0898 -71.5596 -50.1299s-44.29 40.6904 -33.8906 75.3398c10.4102 34.6504 42.4404 57.0908 71.5605 50.1309zM318.56 257.39c-30.9395 8.14062 -46.4199 49.9404 -34.5791 93.3604
+c11.8398 43.4199 46.5195 72.0195 77.46 63.8701c30.9395 -8.15039 46.4199 -49.9404 34.5791 -93.3604c-11.8398 -43.4199 -46.5195 -72.0098 -77.46 -63.8701z" />
+ <glyph glyph-name="cube" unicode="&#xf1b2;" horiz-adv-x="511"
+d="M239.1 441.7c10.9004 4.09961 22.9004 4.09961 33.7002 -0.100586l208 -78c18.7002 -7 31.1006 -24.8994 31.1006 -44.8994v-225.101c0 -18.0996 -10.2002 -34.7998 -26.5 -42.8994l-208 -104c-13.5 -6.7998 -29.4004 -6.7998 -42.9004 0l-208 104
+c-16.2002 8.09961 -26.5 24.7002 -26.5 42.8994v225.101c0 20 12.4004 38 31.0996 45zM256 379.6l-192 -72v-1.09961l192 -78l192 78v1.09961zM288 23.5996l160 80v133.9l-160 -65v-148.9z" />
+ <glyph glyph-name="cubes" unicode="&#xf1b3;"
+d="M488.6 197.8c14.1006 -5.2998 23.4004 -18.7002 23.4004 -33.7002v-110.1c0 -13.5996 -7.7002 -26.0996 -19.9004 -32.2002l-100 -50c-10.0996 -5.09961 -22.0996 -5.09961 -32.1992 0l-103.9 52l-103.9 -52c-10.0996 -5.09961 -22.0996 -5.09961 -32.1992 0l-100 50
+c-12.2002 6.10059 -19.9004 18.6006 -19.9004 32.2002v110.1c0 15 9.2998 28.4004 23.2998 33.7002l96.6006 36.2002v108.5c0 15 9.2998 28.4004 23.3994 33.7002l100 37.5c8.2002 3.09961 17.2002 3.09961 25.2998 0l100 -37.5
+c14.1006 -5.2998 23.4004 -18.7002 23.4004 -33.7002v-108.5zM358 233.2v73.2998l-85 -37v-68.2002zM154 343.9v-0.600586l102 -41.3994l102 41.3994v0.600586l-102 38.1992zM238 52.7998v75.4004l-85 -38.7998v-79.1006zM238 164.8v0.600586l-102 38.1992l-102 -38.1992
+v-0.600586l102 -41.3994zM478 52.7998v75.4004l-85 -38.7998v-79.1006zM478 164.8v0.600586l-102 38.1992l-102 -38.1992v-0.600586l102 -41.3994z" />
+ <glyph glyph-name="recycle" unicode="&#xf1b8;"
+d="M184.561 186.097c3.23242 -13.9971 -12.1221 -24.6348 -24.0674 -17.168l-40.7363 25.4551l-50.8672 -81.4014c-13.2832 -21.2559 2.07031 -48.9824 27.1221 -48.9824h51.9883c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-51.8848
+c-75.334 0 -121.302 83.0479 -81.4082 146.88l50.8223 81.3877l-40.7256 25.4482c-12.0811 7.54688 -8.96582 25.9609 4.87891 29.1582l110.237 25.4502c8.61133 1.9873 17.2012 -3.38086 19.1895 -11.9902zM283.122 369.012
+c-12.2842 19.6543 -41.5449 20.3193 -54.2568 -0.0214844l-17.9609 -28.7432c-3.5127 -5.62012 -10.916 -7.3291 -16.5361 -3.81738l-33.9189 21.1953c-5.62012 3.51172 -7.33008 10.9131 -3.82031 16.5332l17.9629 28.7656c37.6074 60.1709 125.295 60.0332 162.816 0
+l41.2627 -66.082l40.6875 25.4238c12.0771 7.55176 27.2646 -3.32324 24.0674 -17.168l-25.4502 -110.236c-1.97363 -8.55273 -10.5166 -13.9893 -19.1885 -11.9902l-110.237 25.4502c-13.8789 3.20508 -16.9297 21.6299 -4.87891 29.1572l40.7402 25.457zM497.288 146.88
+c39.9268 -63.8828 -6.13379 -146.88 -81.4082 -146.88h-95.8799v-47.9893c0 -14.3105 -17.3105 -21.3184 -27.3135 -11.3145l-80 79.9883c-6.24805 6.24805 -6.24805 16.3799 0 22.627l80 79.9814c10.0713 10.0703 27.3135 2.90039 27.3135 -11.3125v-47.9814h95.9844
+c25.0791 0 40.3926 27.749 27.1357 48.96l-27.5645 44.1123c-3.51172 5.62109 -1.80176 13.0234 3.81836 16.5361l33.8613 21.1582c5.62207 3.51367 13.0264 1.80273 16.5381 -3.82031z" />
+ <glyph glyph-name="car" unicode="&#xf1b9;"
+d="M499.99 272c7.80957 0 13.54 -7.33984 11.6494 -14.9102l-6 -24c-1.33008 -5.33984 -6.12988 -9.08984 -11.6396 -9.08984h-20.0703c13.4199 -11.7305 22.0703 -28.7803 22.0703 -48v-48c0 -16.1299 -6.16016 -30.6797 -16 -41.9297v-54.0703
+c0 -17.6699 -14.3301 -32 -32 -32h-32c-17.6699 0 -32 14.3301 -32 32v32h-256v-32c0 -17.6699 -14.3301 -32 -32 -32h-32c-17.6699 0 -32 14.3301 -32 32v54.0703c-9.83984 11.2598 -16 25.8096 -16 41.9297v48c0 19.2197 8.65039 36.2695 22.0801 48h-20.0703
+c-5.50977 0 -10.3096 3.75 -11.6396 9.08984l-6 24c-1.90039 7.57031 3.83008 14.9102 11.6396 14.9102h59.8604l16.6396 41.5996c17.1104 42.7705 57.9307 70.4004 103.99 70.4004h127c46.0703 0 86.8799 -27.6299 103.98 -70.4004l16.6396 -41.5996h59.8701z
+M147.93 289.83l-19.9297 -49.8301h256l-19.9297 49.8301c-7.29004 18.2197 -24.9404 30.1699 -44.5703 30.1699h-127c-19.6299 0 -37.2803 -11.9502 -44.5703 -30.1699zM96 128.2c19.2002 0 48 -3.19043 48 15.9502c0 19.1396 -28.7998 47.8496 -48 47.8496
+s-32 -12.7598 -32 -31.9004c0 -19.1396 12.7998 -31.8994 32 -31.8994zM416 128.2c19.2002 0 32 12.7598 32 31.8994c0 19.1406 -12.7998 31.9004 -32 31.9004s-48 -28.71 -48 -47.8496c0 -19.1406 28.7998 -15.9502 48 -15.9502z" />
+ <glyph glyph-name="taxi" unicode="&#xf1ba;"
+d="M462 206.36c28.5898 -6.40039 50 -31.8301 50 -62.3604v-48c0 -23.6201 -12.9502 -44.0303 -32 -55.1201v-40.8799c0 -17.6699 -14.3301 -32 -32 -32h-32c-17.6699 0 -32 14.3301 -32 32v32h-256v-32c0 -17.6699 -14.3301 -32 -32 -32h-32c-17.6699 0 -32 14.3301 -32 32
+v40.8799c-19.0498 11.0801 -32 31.4902 -32 55.1201v48c0 30.5303 21.4102 55.96 50 62.3604l22 84.8398c9.59961 35.2002 41.5996 60.7998 76.7998 60.7998h11.2002v32c0 17.6699 14.3301 32 32 32h128c17.6699 0 32 -14.3301 32 -32v-32h11.2002
+c35.2002 0 67.2002 -25.5996 76.7998 -60.7998zM96 96c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM116.55 208h278.9l-17.3906 67.1396c-2.01953 7.37988 -9.37988 12.8604 -14.8496 12.8604h-214.4
+c-5.46973 0 -12.8291 -5.48047 -15.0596 -13.6396zM416 96c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="tree" unicode="&#xf1bb;" horiz-adv-x="384"
+d="M378.31 69.5098c6 -6.79004 7.36035 -16.0898 3.56055 -24.2598c-3.75 -8.0498 -12 -13.25 -21.0098 -13.25h-136.86v-24.4502l30.29 -48.3994c5.32031 -10.6406 -2.41992 -23.1602 -14.3096 -23.1602h-95.9609c-11.8896 0 -19.6299 12.5195 -14.3096 23.1602
+l30.29 48.3994v24.4502h-136.86c-9.00977 0 -17.2598 5.2002 -21.0098 13.25c-3.7998 8.16992 -2.43945 17.4697 3.56055 24.2598l79.8896 90.4902h-30.6299c-9.02051 0 -16.9805 5 -20.7803 13.0498c-3.79004 8.0498 -2.54004 17.2598 3.27051 24.04l78.1396 90.9102
+h-28.8896c-9.10059 0 -17.3105 5.34961 -20.8701 13.6104c-3.51074 8.12988 -1.86035 17.5898 4.23926 24.0801l110.28 117.479c6.04004 6.4502 17.29 6.4502 23.3203 0l110.27 -117.479c6.10059 -6.49023 7.76074 -15.9502 4.24023 -24.0801
+c-3.55957 -8.26074 -11.7598 -13.6104 -20.8604 -13.6104h-28.8896l78.1299 -90.8896c5.83008 -6.79004 7.08008 -16.0107 3.28027 -24.0508c-3.7998 -8.05957 -11.7705 -13.0596 -20.7803 -13.0596h-30.6299z" />
+ <glyph glyph-name="database" unicode="&#xf1c0;" horiz-adv-x="448"
+d="M448 374.857v-45.7148c0 -40.2852 -100.333 -73.1426 -224 -73.1426s-224 32.8574 -224 73.1426v45.7148c0 40.2852 100.333 73.1426 224 73.1426s224 -32.8574 224 -73.1426zM448 272v-102.857c0 -40.2852 -100.333 -73.1426 -224 -73.1426s-224 32.8574 -224 73.1426
+v102.857c48.125 -33.1426 136.208 -48.5723 224 -48.5723s175.874 15.4297 224 48.5723zM448 112v-102.857c0 -40.2852 -100.333 -73.1426 -224 -73.1426s-224 32.8574 -224 73.1426v102.857c48.125 -33.1426 136.208 -48.5723 224 -48.5723s175.874 15.4297 224 48.5723z
+" />
+ <glyph glyph-name="file-pdf" unicode="&#xf1c1;" horiz-adv-x="384"
+d="M181.9 191.9c-5 16 -4.90039 46.8994 -2 46.8994c8.39941 0 7.59961 -36.8994 2 -46.8994zM180.2 144.7c9.59961 -17.4004 21.7998 -31.2002 34.5 -40.7998c-23.9004 -4.7002 -44.6006 -14.9004 -62.9004 -21.9004c11.1006 19.4004 20.7002 42.5 28.4004 62.7002z
+M86.0996 19.9004c5.80078 15.6992 28.2002 33.8994 34.9004 40.1992c-21.7002 -34.7998 -34.9004 -41 -34.9004 -40.1992zM248 288h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136
+c0 -13.2002 10.7998 -24 24 -24zM240 116.2c-20 12.2002 -33.2998 29 -42.7002 53.7998c4.5 18.5 11.6006 46.5996 6.2002 64.2002c-4.7002 29.3994 -42.4004 26.5 -47.7998 6.7998c-5 -18.2998 -0.400391 -44.0996 8.09961 -77
+c-11.5996 -27.5996 -28.7002 -64.5996 -40.7998 -85.7998c-0.0996094 0 -0.0996094 -0.100586 -0.200195 -0.100586c-27.0996 -13.8994 -73.5996 -44.5 -54.5 -68c5.60059 -6.89941 16 -10 21.5 -10c17.9004 0 35.7002 18 61.1006 61.8008
+c25.7998 8.5 54.0996 19.0996 79 23.1992c21.6992 -11.7998 47.0996 -19.5 64 -19.5c29.1992 0 31.1992 32 19.6992 43.4004c-13.8994 13.5996 -54.2998 9.7002 -73.5996 7.2002zM377 343c4.5 -4.5 7 -10.5996 7 -16.9004v-6.09961h-128v128h6c6.40039 0 12.5 -2.5 17 -7z
+M302.9 87.7002c0 0 -5.7002 -6.7998 -42.8008 9c40.3008 2.89941 46.9004 -6.2998 42.8008 -9z" />
+ <glyph glyph-name="file-word" unicode="&#xf1c2;" horiz-adv-x="384"
+d="M224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136zM281.1 192c-5.59961 0 -10.5 -3.90039 -11.5 -9.5c-24.1992 -111.4 -21.7998 -118 -21.5996 -129.2
+c-0.799805 5.40039 -5.59961 29 -29.5996 129.4c-1.30078 5.39941 -6.10059 9.2002 -11.7002 9.2002h-29.1006c-5.59961 0 -10.3994 -3.7002 -11.6992 -9.10059c-22.1006 -90 -27.8008 -112.5 -29.4004 -122.7c-0.900391 12.7002 -5.40039 44.2002 -21 122.2
+c-1.09961 5.7002 -6.09961 9.7002 -11.7998 9.7002h-24.5c-7.7002 0 -13.4004 -7.09961 -11.7002 -14.5996l37.7998 -168c1.2002 -5.5 6.10059 -9.40039 11.7002 -9.40039h37.0996c5.5 0 10.3008 3.7998 11.6006 9.09961c23.2002 93.1006 24.5 96.2002 25.5996 110.5h0.5
+c4.7998 -29.2998 -0.200195 -7 25.6006 -110.5c1.2998 -5.2998 6.09961 -9.09961 11.5996 -9.09961h38c5.59961 0 10.5 3.7998 11.7002 9.2998l38 168c1.7002 7.60059 -4 14.7002 -11.7002 14.7002h-23.9004zM384 326.1v-6.09961h-128v128h6.09961
+c6.40039 0 12.5 -2.5 17 -7l97.9004 -98c4.5 -4.5 7 -10.5996 7 -16.9004z" />
+ <glyph glyph-name="file-excel" unicode="&#xf1c3;" horiz-adv-x="384"
+d="M224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136zM284.1 205.5c5.10059 8 -0.599609 18.5 -10.0996 18.4004h-34.7998
+c-4.40039 0 -8.5 -2.40039 -10.6006 -6.30078c-30.5 -56.7998 -36.5996 -68.5 -36.5996 -68.5c-16.5996 34.9004 -10.5 19.7002 -36.5996 68.5c-2.10059 3.90039 -6.2002 6.30078 -10.6006 6.30078h-34.7998c-9.5 0 -15.2998 -10.5 -10.0996 -18.5l60.2998 -93.5
+l-60.2998 -93.5c-5.10059 -8 0.599609 -18.5 10.0996 -18.5h34.9004c4.39941 0 8.39941 2.39941 10.5 6.2998c26.5996 48.7998 30.1992 54 36.5996 68.7998c0 0 16.9004 -32.5 36.5 -68.7002c2.09961 -3.89941 6.2002 -6.2998 10.5996 -6.2998h34.9004
+c9.5 0 15.2002 10.5 10.0996 18.5l-60.0996 93.5zM384 326.1v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7l97.9004 -98c4.5 -4.5 7 -10.5996 7 -16.9004z" />
+ <glyph glyph-name="file-powerpoint" unicode="&#xf1c4;" horiz-adv-x="384"
+d="M193.7 176.8c8.7998 0 15.5 -2.7002 20.2998 -8.09961c9.59961 -10.9004 9.7998 -32.7002 -0.200195 -44.1006c-4.89941 -5.59961 -11.8994 -8.5 -21.0996 -8.5h-26.9004v60.7002h27.9004zM377 343c4.5 -4.5 7 -10.5996 7 -16.9004v-6.09961h-128v128h6
+c6.40039 0 12.5 -2.5 17 -7zM224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136zM277 146.8c0 44.2002 -28.4004 77 -72.9004 77h-81
+c-6.59961 0 -12 -5.39941 -12 -12v-199.8c0 -6.59961 5.40039 -12 12 -12h30.8008c6.59961 0 12 5.40039 12 12v57.2002c22.2998 0 111.1 -12.7002 111.1 77.5996z" />
+ <glyph glyph-name="file-image" unicode="&#xf1c5;" horiz-adv-x="384"
+d="M384 326.059v-6.05859h-128v128h6.05859c5.4873 0 13.0898 -3.14941 16.9707 -7.0293l97.9404 -97.9404c3.88086 -3.88086 7.03027 -11.4834 7.03027 -16.9717zM248 288h136v-328c0 -13.2549 -10.7451 -24 -24 -24h-336c-13.2549 0 -24 10.7451 -24 24v464
+c0 13.2549 10.7451 24 24 24h200v-136c0 -13.2002 10.7998 -24 24 -24zM112.545 272c-26.5088 0 -48 -21.4902 -48 -48s21.4902 -48 48 -48s48 21.4902 48 48s-21.4902 48 -48 48zM320.545 32v112l-39.5137 39.5146c-4.6875 4.68652 -12.2852 4.68652 -16.9717 0
+l-103.515 -103.515l-39.5146 39.5146c-4.68652 4.68652 -11.7988 5.1709 -16.4854 0.485352l-39.5146 -39.5146l-0.485352 -48.4854h256z" />
+ <glyph glyph-name="file-archive" unicode="&#xf1c6;" horiz-adv-x="384"
+d="M377 343c4.5 -4.5 7 -10.5996 7 -16.9004v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7zM128.4 112c17.8994 0 32.5 -12.0996 32.5 -27s-14.5 -27 -32.4004 -27s-32.5 12 -32.5 27c0 14.9004 14.5 27 32.4004 27zM224 312c0 -13.2002 10.7998 -24 24 -24h136
+v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h104.4v-32h32v32h63.5996v-136zM95.9004 416v-32h32v32h-32zM128.2 32c33 0 57.7998 30.2002 51.3994 62.5996l-17.2998 87.7002
+c-1.09961 5.60059 -6.09961 9.7002 -11.7998 9.7002h-22.0996v32h32v32h-32v32h32v32h-32v32h32v32h-32v-32h-32v-32h32v-32h-32v-32h32v-32h-32v-32l-19.6006 -97.0996c-6.59961 -32.5 18.2002 -62.9004 51.4004 -62.9004z" />
+ <glyph glyph-name="file-audio" unicode="&#xf1c7;" horiz-adv-x="384"
+d="M224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136zM160 44v136c0 10.7002 -12.9004 16.0996 -20.5 8.5l-35.5 -36.5h-28c-6.59961 0 -12 -5.40039 -12 -12
+v-56c0 -6.59961 5.40039 -12 12 -12h28l35.5 -36.5c7.59961 -7.5 20.5 -2.2002 20.5 8.5zM193.2 91.5996c-22.5 -23.0996 12.5996 -55.7998 34.3994 -33.5c27.2002 28 27.2002 72.5 0 100.4c-22.1992 22.7002 -56.5 -10.7002 -34.3994 -33.5
+c9.09961 -9.2998 9.09961 -24.0996 0 -33.4004zM279.2 208.7c-22.1006 22.7998 -56.5 -10.7002 -34.4004 -33.5c36.2998 -37.2998 36.2002 -96.6006 0 -133.8c-22.5996 -23.2002 12.6006 -55.9004 34.4004 -33.5c54.3994 56 54.3994 144.899 0 200.8zM384 326.1v-6.09961
+h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7l97.9004 -98c4.5 -4.5 7 -10.5996 7 -16.9004z" />
+ <glyph glyph-name="file-video" unicode="&#xf1c8;" horiz-adv-x="384"
+d="M384 326.059v-6.05859h-128v128h6.05859c6.36523 0 12.4707 -2.5293 16.9717 -7.0293l97.9404 -97.9404c3.87988 -3.88086 7.0293 -11.4834 7.0293 -16.9717zM224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2549 -10.7451 -24 -24 -24h-336
+c-13.2549 0 -24 10.7451 -24 24v464c0 13.2549 10.7451 24 24 24h200v-136zM320 167.984c0 21.4609 -25.96 31.9795 -40.9707 16.9697l-55.0293 -55.0127v38.0586c0 13.2549 -10.7451 24 -24 24h-112c-13.2549 0 -24 -10.7451 -24 -24v-112c0 -13.2549 10.7451 -24 24 -24
+h112c13.2549 0 24 10.7451 24 24v38.0586l55.0293 -55.0088c15.0273 -15.0264 40.9707 -4.47363 40.9707 16.9717v111.963z" />
+ <glyph glyph-name="file-code" unicode="&#xf1c9;" horiz-adv-x="384"
+d="M384 326.059v-6.05859h-128v128h6.05859c6.36523 0 12.4707 -2.5293 16.9717 -7.0293l97.9404 -97.9404c3.87988 -3.88086 7.0293 -11.4834 7.0293 -16.9717zM248 288h136v-328c0 -13.2549 -10.7451 -24 -24 -24h-336c-13.2549 0 -24 10.7451 -24 24v464
+c0 13.2549 10.7451 24 24 24h200v-136c0 -13.2002 10.7998 -24 24 -24zM123.206 47.4951l19.5791 20.8838c0.805664 0.860352 1.45996 2.51465 1.45996 3.69336c0 1.34766 -0.820312 3.16309 -1.83203 4.05371l-40.7627 35.874l40.7627 35.874
+c1.01172 0.890625 1.83203 2.70605 1.83203 4.05371c0 1.17871 -0.654297 2.83301 -1.45996 3.69336l-19.5791 20.8848c-0.882812 0.942383 -2.64844 1.70703 -3.93945 1.70703c-1.17871 0 -2.83398 -0.654297 -3.69336 -1.46094l-64.8662 -60.8115
+c-0.942383 -0.883789 -1.70703 -2.64844 -1.70703 -3.93945c0 -1.29199 0.764648 -3.05664 1.70703 -3.94043l64.8662 -60.8115c0.859375 -0.806641 2.51465 -1.46094 3.69336 -1.46094c1.29102 0 3.05664 0.764648 3.93945 1.70703zM174.501 -2.98438
+c0.407227 -0.118164 1.08203 -0.213867 1.50586 -0.213867c2.23926 0 4.56152 1.74512 5.18457 3.89551l61.4395 211.626c0.118164 0.407227 0.214844 1.08203 0.214844 1.50586c0 2.23828 -1.74512 4.56152 -3.89453 5.18555l-27.4521 7.9707
+c-0.407227 0.117188 -1.08105 0.213867 -1.50488 0.213867c-2.23828 0 -4.5625 -1.74512 -5.1875 -3.89551l-61.4395 -211.626c-0.118164 -0.40625 -0.213867 -1.08105 -0.213867 -1.50391c0 -2.23926 1.74512 -4.56348 3.89453 -5.1875zM335.293 108.061
+c0.942383 0.883789 1.70703 2.64844 1.70703 3.94043c0 1.29102 -0.764648 3.05566 -1.70605 3.93945l-64.8662 60.8115c-0.859375 0.806641 -2.51465 1.46094 -3.69336 1.46094c-1.29102 0 -3.05566 -0.764648 -3.93945 -1.70703l-19.5801 -20.8848
+c-0.805664 -0.860352 -1.45996 -2.51465 -1.45996 -3.69336c0 -1.34766 0.820312 -3.16309 1.83203 -4.05371l40.7627 -35.874l-40.7637 -35.873c-1.01172 -0.890625 -1.83203 -2.70605 -1.83203 -4.05371c0 -1.17871 0.654297 -2.83301 1.45996 -3.69336l19.5801 -20.8848
+c0.882812 -0.942383 2.64844 -1.70703 3.93945 -1.70703c1.17871 0 2.83398 0.654297 3.69336 1.46094z" />
+ <glyph glyph-name="life-ring" unicode="&#xf1cd;"
+d="M256 440c136.967 0 248 -111.033 248 -248s-111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248zM429.696 320.441c-10.6406 14.3398 -30.915 34.6143 -45.2549 45.2549l-63.3994 -63.3994c18.585 -11.0029 34.2676 -26.6963 45.2549 -45.2549zM256 96
+c53.0186 0 96 42.9814 96 96s-42.9814 96 -96 96s-96 -42.9814 -96 -96s42.9814 -96 96 -96zM127.559 365.696c-14.3398 -10.6406 -34.6143 -30.915 -45.2549 -45.2549l63.3994 -63.3994c11.0029 18.585 26.6963 34.2676 45.2549 45.2549zM82.3037 63.5586
+c10.6406 -14.3398 30.915 -34.6143 45.2549 -45.2549l63.3994 63.3994c-18.585 11.0029 -34.2676 26.6963 -45.2549 45.2549zM384.441 18.3037c14.3398 10.6406 34.6143 30.915 45.2549 45.2549l-63.3994 63.3994c-11.0029 -18.585 -26.6963 -34.2676 -45.2549 -45.2549z
+" />
+ <glyph glyph-name="circle-notch" unicode="&#xf1ce;" horiz-adv-x="511"
+d="M288 408.944c0 15.5996 14.6777 27.167 29.7891 23.292c107.071 -27.457 186.211 -124.604 186.211 -240.236c0 -136.788 -110.745 -247.711 -247.466 -247.999c-137.054 -0.289062 -247.812 109.615 -248.531 246.667c-0.609375 116.126 78.5996 213.85 185.951 241.502
+c15.2119 3.91895 30.0459 -7.52539 30.0459 -23.2344v-16.6475c0 -10.8047 -7.28125 -20.1621 -17.6885 -23.0693c-77.5254 -21.6543 -134.312 -92.749 -134.312 -177.219c0 -101.705 82.3105 -184 184 -184c101.705 0 184 82.3105 184 184
+c0 84.4824 -56.7959 155.566 -134.314 177.219c-10.4043 2.90723 -17.6855 12.2627 -17.6855 23.0664v16.6592z" />
+ <glyph glyph-name="paper-plane" unicode="&#xf1d8;" horiz-adv-x="511"
+d="M476 444.8c17.2998 10 39 -4.59961 35.5996 -24.7998l-72 -432c-2.59961 -15.2998 -18.7998 -24.2002 -33 -18.2002l-124.6 52.2002l-63.5 -77.2998c-14 -17.1006 -42.5 -7.7998 -42.5 15.7998v80.5l240.9 293.5c4.69922 5.7002 -3.10059 13.2002 -8.60059 8.2998
+l-287.3 -253.2l-106.3 44.6006c-18 7.59961 -20.2998 32.7998 -2.2002 43.2002z" />
+ <glyph glyph-name="history" unicode="&#xf1da;"
+d="M504 192.469c0.25293 -136.64 -111.18 -248.372 -247.82 -248.468c-59.0146 -0.0419922 -113.223 20.5303 -155.821 54.9111c-11.0771 8.93945 -11.9053 25.541 -1.83984 35.6064l11.2676 11.2676c8.6084 8.6084 22.3525 9.55078 31.8906 1.9834
+c31.3848 -24.9043 71.1045 -39.7695 114.323 -39.7695c101.705 0 184 82.3105 184 184c0 101.705 -82.3105 184 -184 184c-48.8145 0 -93.1494 -18.9688 -126.068 -49.9316l50.7539 -50.7539c10.0801 -10.0801 2.94141 -27.3145 -11.3125 -27.3145h-145.373
+c-8.83691 0 -16 7.16309 -16 16v145.373c0 14.2539 17.2344 21.3926 27.3145 11.3135l49.3711 -49.3711c44.5234 42.5488 104.866 68.6846 171.314 68.6846c136.81 0 247.747 -110.78 248 -247.531zM323.088 113.685c-8.1377 -10.4629 -23.2158 -12.3467 -33.6787 -4.20996
+l-65.4092 50.874v135.651c0 13.2549 10.7451 24 24 24h16c13.2549 0 24 -10.7451 24 -24v-104.349l40.7012 -31.6572c10.4629 -8.13672 12.3477 -23.2158 4.20996 -33.6787z" />
+ <glyph glyph-name="heading" unicode="&#xf1dc;"
+d="M496 368c0 -8.83691 -7.16309 -16 -16 -16h-37.2754v-320h37.2754c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-160c-8.83691 0 -16 7.16309 -16 16v32c0 8.83691 7.16309 16 16 16h37.6201v128h-203.241v-128h37.6211
+c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-160c-8.83691 0 -16 7.16309 -16 16v32c0 8.83691 7.16309 16 16 16h37.2754v320h-37.2754c-8.83691 0 -16 7.16309 -16 16v32c0 8.83691 7.16309 16 16 16h160c8.83691 0 16 -7.16309 16 -16v-32
+c0 -8.83691 -7.16309 -16 -16 -16h-37.6211v-128h203.242v128h-37.6211c-8.83691 0 -16 7.16309 -16 16v32c0 8.83691 7.16309 16 16 16h160c8.83691 0 16 -7.16309 16 -16v-32z" />
+ <glyph glyph-name="paragraph" unicode="&#xf1dd;" horiz-adv-x="447"
+d="M408 416c13.2549 0 24 -10.7451 24 -24v-32c0 -13.2549 -10.7451 -24 -24 -24h-40v-344c0 -13.2549 -10.7451 -24 -24 -24h-32c-13.2549 0 -24 10.7451 -24 24v344h-32v-344c0 -13.2549 -10.7451 -24 -24 -24h-32c-13.2549 0 -24 10.7451 -24 24v104
+c-88.3926 0 -160.044 71.6787 -160 160.082c0.0449219 88.583 72.9482 159.918 161.531 159.918h230.469z" />
+ <glyph glyph-name="sliders-h" unicode="&#xf1de;"
+d="M496 64c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-336v-16c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v16h-80c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h80v16c0 8.7998 7.2002 16 16 16h32
+c8.7998 0 16 -7.2002 16 -16v-16h336zM496 224c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-80v-16c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v16h-336c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h336v16
+c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-16h80zM496 384c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-208v-16c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v16h-208c-8.7998 0 -16 7.2002 -16 16v32
+c0 8.7998 7.2002 16 16 16h208v16c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-16h208z" />
+ <glyph glyph-name="share-alt" unicode="&#xf1e0;" horiz-adv-x="448"
+d="M352 128c53.0186 0 96 -42.9814 96 -96s-42.9814 -96 -96 -96s-96 42.9814 -96 96v0.0283203c0 5.8125 1.01953 15.1367 2.27637 20.8125l-102.486 64.0537c-16.4033 -13.0752 -37.1816 -20.8945 -59.79 -20.8945c-53.0186 0 -96 42.9814 -96 96s42.9814 96 96 96
+c22.6084 0 43.3867 -7.81934 59.79 -20.8945l102.486 64.0537c-1.48633 6.71094 -2.27637 13.6826 -2.27637 20.8408c0 53.0186 42.9814 96 96 96s96 -42.9814 96 -96s-42.9814 -96 -96 -96c-22.6084 0 -43.3867 7.81934 -59.79 20.8965l-102.486 -64.0547
+c1.25684 -5.68359 2.27637 -15.0205 2.27637 -20.8408c0 -5.82129 -1.01953 -15.1582 -2.27637 -20.8418l102.486 -64.0537c16.4033 13.0752 37.1816 20.8945 59.79 20.8945z" />
+ <glyph glyph-name="share-alt-square" unicode="&#xf1e1;" horiz-adv-x="448"
+d="M448 368v-352c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h352c26.5098 0 48 -21.4902 48 -48zM304 152c-14.5615 0 -27.8232 -5.56055 -37.7832 -14.6709l-67.958 40.7744
+c0.960938 3.77539 1.74121 10.001 1.74121 13.8975c0 3.89551 -0.780273 10.1211 -1.74121 13.8965l67.958 40.7744c9.95996 -9.11133 23.2217 -14.6719 37.7832 -14.6719c30.9277 0 56 25.0723 56 56s-25.0723 56 -56 56s-56 -25.0723 -56 -56
+c0 -4.79688 0.605469 -9.45312 1.74023 -13.8975l-67.958 -40.7744c-9.95898 9.11133 -23.2207 14.6719 -37.7822 14.6719c-30.9277 0 -56 -25.0723 -56 -56s25.0723 -56 56 -56c14.5615 0 27.8232 5.56055 37.7832 14.6709l67.958 -40.7744
+c-0.960938 -3.7666 -1.74023 -9.97656 -1.74023 -13.8623v-0.0351562c0 -30.9277 25.0723 -56 56 -56s56 25.0723 56 56c-0.000976562 30.9287 -25.0732 56.001 -56.001 56.001z" />
+ <glyph glyph-name="bomb" unicode="&#xf1e2;"
+d="M440.5 359.5l-52 -52l26.5 -26.5c9.40039 -9.40039 9.40039 -24.5996 0 -33.9004l-17.4004 -17.3994c11.8008 -26.1006 18.4004 -55.1006 18.4004 -85.6006c0 -114.899 -93.0996 -208 -208 -208s-208 93 -208 207.9s93.0996 208 208 208
+c30.5 0 59.5 -6.59961 85.5996 -18.4004l17.4004 17.4004c9.40039 9.40039 24.5996 9.40039 33.9004 0l26.5 -26.5l52 52zM500 388c6.59961 0 12 -5.40039 12 -12s-5.40039 -12 -12 -12h-24c-6.59961 0 -12 5.40039 -12 12s5.40039 12 12 12h24zM440 448
+c6.59961 0 12 -5.40039 12 -12v-24c0 -6.59961 -5.40039 -12 -12 -12s-12 5.40039 -12 12v24c0 6.59961 5.40039 12 12 12zM473.9 393c-4.60059 -4.7002 -12.2002 -4.7002 -17 0c-4.7002 4.7002 -4.7002 12.2998 0 17l17 17c4.69922 4.7002 12.2998 4.7002 17 0
+c4.69922 -4.7002 4.69922 -12.2998 0 -17zM406.1 393l-17 17c-4.69922 4.7002 -4.69922 12.2998 0 17c4.7002 4.7002 12.3008 4.7002 17 0l17 -17c4.7002 -4.7002 4.7002 -12.2998 0 -17c-4.69922 -4.7002 -12.2998 -4.7002 -17 0zM473.9 359l17 -17
+c4.69922 -4.7002 4.69922 -12.2998 0 -17c-4.7002 -4.7002 -12.3008 -4.7002 -17 0l-17 17c-4.7002 4.7002 -4.7002 12.2998 0 17c4.69922 4.7002 12.2998 4.7002 17 0zM112 176c0 35.2998 28.7002 64 64 64c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16
+c-52.9004 0 -96 -43.0996 -96 -96c0 -8.7998 7.2002 -16 16 -16s16 7.2002 16 16z" />
+ <glyph glyph-name="futbol" unicode="&#xf1e3;"
+d="M504 192c0 -136.967 -111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248s248 -111.033 248 -248zM456 192l-0.00292969 0.282227l-26.0645 -22.7412l-62.6787 58.5l16.4541 84.3555l34.3027 -3.07227
+c-24.8887 34.2158 -60.0039 60.0889 -100.709 73.1406l13.6514 -31.9385l-74.9531 -41.5264l-74.9531 41.5254l13.6514 31.9385c-40.6309 -13.0283 -75.7803 -38.8701 -100.709 -73.1406l34.5645 3.07324l16.1924 -84.3555l-62.6777 -58.5l-26.0645 22.7412
+l-0.00292969 -0.282227c0 -43.0146 13.4971 -83.9521 38.4717 -117.991l7.7041 33.8975l85.1387 -10.4473l36.3008 -77.8262l-29.9023 -17.7861c40.2021 -13.1221 84.29 -13.1475 124.572 0l-29.9023 17.7861l36.3008 77.8262l85.1387 10.4473l7.7041 -33.8975
+c24.9756 34.0391 38.4727 74.9766 38.4727 117.991zM207.898 122.429l-29.8945 91.3125l77.9961 56.5264l77.9961 -56.5264l-29.6221 -91.3125h-96.4756z" />
+ <glyph glyph-name="tty" unicode="&#xf1e4;"
+d="M5.37012 344.178c138.532 138.532 362.936 138.326 501.262 0c6.07812 -6.07812 7.07422 -15.4961 2.58301 -22.6807l-43.2139 -69.1377c-2.97266 -4.75684 -9.9375 -8.61719 -15.5459 -8.61719c-1.94922 0 -5 0.587891 -6.81055 1.31152l-86.4219 34.5693
+c-6.36133 2.54492 -11.5244 10.1719 -11.5244 17.0234c0 0.503906 0.0410156 1.32031 0.0908203 1.82227l5.95215 59.5312c-62.1455 22.4541 -130.636 21.9863 -191.483 0l5.95312 -59.5322c0.0507812 -0.501953 0.0908203 -1.32031 0.0908203 -1.8252
+c0 -6.85156 -5.16309 -14.4766 -11.5244 -17.0205l-86.4238 -34.5684c-1.80957 -0.723633 -4.85938 -1.31152 -6.80859 -1.31152c-5.60938 0 -12.5742 3.86035 -15.5469 8.61719l-43.2109 69.1387c-1.53809 2.46094 -2.78711 6.81445 -2.78711 9.7168
+c0 4.19141 2.40625 9.99902 5.37012 12.9629zM96 140c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40zM192 140c0 -6.62695 -5.37305 -12 -12 -12h-40
+c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40zM288 140c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40zM384 140
+c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40zM480 140c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40
+c6.62695 0 12 -5.37305 12 -12v-40zM144 44c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40zM240 44c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40
+c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40zM336 44c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40zM432 44c0 -6.62695 -5.37305 -12 -12 -12h-40
+c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40zM96 -52c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40zM384 -52
+c0 -6.62695 -5.37305 -12 -12 -12h-232c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h232c6.62695 0 12 -5.37305 12 -12v-40zM480 -52c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40
+c6.62695 0 12 -5.37305 12 -12v-40z" />
+ <glyph glyph-name="binoculars" unicode="&#xf1e5;"
+d="M416 400v-48h-96v48c0 8.83984 7.16016 16 16 16h64c8.83984 0 16 -7.16016 16 -16zM63.9102 288.01c0.479492 17.6201 14.2998 31.9902 31.9297 31.9902h96.1602v-160h-32v-160c0 -17.6699 -14.3301 -32 -32 -32h-96c-17.6699 0 -32 14.3301 -32 32v44
+c3.45996 129.78 61.4004 150.16 63.9102 244.01zM448.09 288.01c2.50977 -93.8496 60.4502 -114.229 63.9102 -244.01v-44c0 -17.6699 -14.3301 -32 -32 -32h-96c-17.6699 0 -32 14.3301 -32 32v160h-32v160h96.1602c17.6299 0 31.4502 -14.3701 31.9297 -31.9902zM176 416
+c8.83984 0 16 -7.16016 16 -16v-48h-96v48c0 8.83984 7.16016 16 16 16h64zM224 160v160h64v-160h-64z" />
+ <glyph glyph-name="plug" unicode="&#xf1e6;" horiz-adv-x="384"
+d="M256 304v112c0 17.6729 14.3271 32 32 32s32 -14.3271 32 -32v-112h-64zM368 288c8.83691 0 16 -7.16309 16 -16v-32c0 -8.83691 -7.16309 -16 -16 -16h-16v-32c0 -77.4062 -54.9688 -141.971 -128 -156.796v-99.2041h-64v99.2041
+c-73.0312 14.8252 -128 79.3896 -128 156.796v32h-16c-8.83691 0 -16 7.16309 -16 16v32c0 8.83691 7.16309 16 16 16h352zM128 304h-64v112c0 17.6729 14.3271 32 32 32s32 -14.3271 32 -32v-112z" />
+ <glyph glyph-name="newspaper" unicode="&#xf1ea;" horiz-adv-x="576"
+d="M552 384c13.2549 0 24 -10.7451 24 -24v-312c0 -26.5098 -21.4902 -48 -48 -48h-472c-30.9277 0 -56 25.0723 -56 56v272c0 13.2549 10.7451 24 24 24h40v8c0 13.2549 10.7451 24 24 24h464zM56 48c4.41602 0 8 3.58398 8 8v248h-16v-248c0 -4.41602 3.58398 -8 8 -8z
+M292 64c6.62695 0 12 5.37305 12 12v8c0 6.62695 -5.37305 12 -12 12h-152c-6.62695 0 -12 -5.37305 -12 -12v-8c0 -6.62695 5.37305 -12 12 -12h152zM500 64c6.62695 0 12 5.37305 12 12v8c0 6.62695 -5.37305 12 -12 12h-152c-6.62695 0 -12 -5.37305 -12 -12v-8
+c0 -6.62695 5.37305 -12 12 -12h152zM292 160c6.62695 0 12 5.37305 12 12v8c0 6.62695 -5.37305 12 -12 12h-152c-6.62695 0 -12 -5.37305 -12 -12v-8c0 -6.62695 5.37305 -12 12 -12h152zM500 160c6.62695 0 12 5.37305 12 12v8c0 6.62695 -5.37305 12 -12 12h-152
+c-6.62695 0 -12 -5.37305 -12 -12v-8c0 -6.62695 5.37305 -12 12 -12h152zM500 256c6.62695 0 12 5.37305 12 12v40c0 6.62695 -5.37305 12 -12 12h-360c-6.62695 0 -12 -5.37305 -12 -12v-40c0 -6.62695 5.37305 -12 12 -12h360z" />
+ <glyph glyph-name="wifi" unicode="&#xf1eb;" horiz-adv-x="640"
+d="M634.91 293.12c6.66016 -6.16016 6.79004 -16.5898 0.359375 -22.9805l-34.2393 -33.9697c-6.14062 -6.08984 -16.0205 -6.22949 -22.4004 -0.379883c-145.95 133.71 -371.33 133.68 -517.25 0c-6.37988 -5.84961 -16.2598 -5.71973 -22.3994 0.379883l-34.2402 33.9697
+c-6.44043 6.39062 -6.31055 16.8203 0.349609 22.9805c177.101 163.81 452.65 163.87 629.82 0zM320 96c35.3496 0 64 -28.6504 64 -64s-28.6504 -64 -64 -64s-64 28.6504 -64 64s28.6504 64 64 64zM522.67 179.59c6.88965 -6.08984 7.12012 -16.6895 0.560547 -23.1494
+l-34.4404 -33.9902c-6 -5.93066 -15.6602 -6.32031 -22.0498 -0.799805c-83.75 72.4092 -209.54 72.5693 -293.49 0c-6.38965 -5.52051 -16.0498 -5.12012 -22.0498 0.799805l-34.4404 33.9902c-6.5498 6.45996 -6.33008 17.0498 0.570312 23.1494
+c115.13 101.82 290.08 101.93 405.34 0z" />
+ <glyph glyph-name="calculator" unicode="&#xf1ec;" horiz-adv-x="448"
+d="M400 448c25.5996 0 48 -22.4004 48 -48v-416c0 -25.5996 -22.4004 -48 -48 -48h-352c-25.5996 0 -48 22.4004 -48 48v416c0 25.5996 22.4004 48 48 48h352zM128 12.7998v38.4004c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-38.4004
+c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998v-38.4004c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h38.4004c6.39941 0 12.7998 6.40039 12.7998 12.7998zM128 140.8v38.4004c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-38.4004
+c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998v-38.4004c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h38.4004c6.39941 0 12.7998 6.40039 12.7998 12.7998zM256 12.7998v38.4004c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-38.4004
+c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998v-38.4004c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h38.4004c6.39941 0 12.7998 6.40039 12.7998 12.7998zM256 140.8v38.4004c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-38.4004
+c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998v-38.4004c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h38.4004c6.39941 0 12.7998 6.40039 12.7998 12.7998zM384 12.7998v166.4c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-38.4004
+c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998v-166.4c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h38.4004c6.39941 0 12.7998 6.40039 12.7998 12.7998zM384 268.8v102.4c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-294.4
+c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998v-102.4c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h294.4c6.39941 0 12.7998 6.40039 12.7998 12.7998z" />
+ <glyph glyph-name="bell-slash" unicode="&#xf1f6;" horiz-adv-x="640"
+d="M633.82 -10.0996c6.97949 -5.43066 8.22949 -15.4805 2.80957 -22.4502l-19.6396 -25.2705c-5.43066 -6.97949 -15.4805 -8.23926 -22.46 -2.80957l-588.351 454.729c-6.97949 5.43066 -8.22949 15.4805 -2.80957 22.4502l19.6396 25.2705
+c5.41992 6.97949 15.4805 8.22949 22.46 2.80957l144.96 -112.04c22.9307 31.5 57.2607 54.1904 97.5898 62.5703v20.8398c0 17.6699 14.3203 32 31.9805 32s31.9805 -14.3301 31.9805 -32v-20.8301c73.46 -15.2598 127.939 -77.46 127.939 -155.16
+c0 -102.3 36.1504 -133.529 55.4697 -154.29c6 -6.43945 8.66016 -14.1602 8.61035 -21.71c0 -1.39941 -0.610352 -2.67969 -0.799805 -4.05957zM157.23 196.46l212.789 -164.46h-241.92c-19.1191 0 -31.9893 15.5996 -32.0996 32
+c-0.0498047 7.5498 2.61035 15.2598 8.61035 21.71c16.21 17.4199 44.0098 42.79 52.6201 110.75zM320 -64c-35.3203 0 -63.9697 28.6504 -63.9697 64h127.939c0 -35.3496 -28.6494 -64 -63.9697 -64z" />
+ <glyph glyph-name="trash" unicode="&#xf1f8;" horiz-adv-x="448"
+d="M432 416c8.83203 0 16 -7.16797 16 -16v-32c0 -8.83203 -7.16797 -16 -16 -16h-416c-8.83203 0 -16 7.16797 -16 16v32c0 8.83203 7.16797 16 16 16h120l9.40039 18.7002c3.58984 7.3418 13.1357 13.2998 21.3086 13.2998h0.0908203h114.3h0.0175781
+c8.20215 0 17.8262 -5.95801 21.4824 -13.2998l9.40039 -18.7002h120zM53.2002 -19l-21.2002 339h384l-21.2002 -339c-1.55469 -24.8369 -23.0146 -44.9971 -47.8994 -45h-245.801c-24.8848 0.00292969 -46.3447 20.1631 -47.8994 45z" />
+ <glyph glyph-name="copyright" unicode="&#xf1f9;"
+d="M256 440c136.967 0 248 -111.033 248 -248s-111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248zM373.134 93.2471c1.58203 1.85645 2.86621 5.34375 2.86621 7.7832c0 2.16309 -1.03613 5.33594 -2.31348 7.08301l-24.5469 33.5713
+c-4.44824 6.08691 -13.376 6.61816 -18.5078 1.05371c-0.263672 -0.287109 -26.8467 -28.625 -65.5439 -28.625c-48.7627 0 -74.2773 40.0898 -74.2773 79.6914c0 42.5068 27.8008 76.082 73.916 76.082c35.3516 0 61.6475 -23.666 61.8809 -23.8799
+c5.45996 -5.05566 14.1846 -3.97168 18.2334 2.29492l22.3799 34.6553c1.05957 1.64062 1.91992 4.55762 1.91992 6.51074c0 2.57812 -1.41504 6.21191 -3.15723 8.1123c-1.45703 1.58887 -36.4658 38.9043 -103.423 38.9043
+c-81.7578 0 -143.762 -62.0986 -143.762 -143.401c0 -82.3066 59.792 -145.567 144.484 -145.567c70.0752 0 108.259 43.8643 109.851 45.7314z" />
+ <glyph glyph-name="at" unicode="&#xf1fa;"
+d="M256 440c138.023 0 248 -87.6533 248 -224c0 -75.7979 -41.3906 -147.41 -150.299 -147.41c-30.0977 0 -61.1885 -0.000976562 -70.71 34.1035c-17.6221 -22.6963 -48.0068 -38.333 -74.9912 -38.333c-59.2148 0 -96 40.5664 -96 105.87
+c0 89.2256 63.251 151.46 137.831 151.46c19.5225 0 45.2744 -3.87402 59.9707 -21.7754l0.00976562 0.0917969c0.751953 6.62012 6.76953 11.9932 13.4326 11.9932v0h44.9805c15.083 0 26.4287 -13.75 23.5625 -28.5586l-23.4336 -121.11
+c-3.43359 -17.167 -3.87207 -29.5703 13.4766 -30.0244c37.0771 3.95117 58.1699 44.9072 58.1699 83.6934c0 102.381 -83.8613 160 -184 160c-101.458 0 -184 -82.542 -184 -184s82.542 -184 184 -184c35.3145 0 69.9199 10.2432 99.4102 29.1572
+c10.1934 6.53809 23.7021 4.24219 31.373 -5.12891l10.1768 -12.4336c9.07324 -11.084 6.45312 -27.5566 -5.55176 -35.3721c-40.0664 -26.083 -87.2539 -40.2227 -135.408 -40.2227c-137.081 0 -248 110.941 -248 248c0 137.081 110.941 248 248 248zM234.32 135.57
+c24.2861 0 58.1611 27.6689 58.1611 72.7295c0 25.5293 -13.3096 40.7705 -35.6016 40.7705c-27.8506 0 -58.6299 -27.7363 -58.6299 -72.7295c0 -25.1475 13.8213 -40.7705 36.0703 -40.7705z" />
+ <glyph glyph-name="eye-dropper" unicode="&#xf1fb;"
+d="M50.75 114.75l126.63 126.61l128 -128l-126.64 -126.62c-12 -12 -28.2803 -18.7402 -45.25 -18.7402h-45.4902l-56 -32l-32 32l32 56v45.4902c0 16.9795 6.75 33.2598 18.75 45.2598zM483.88 419.88c37.5 -37.4902 37.5 -98.2695 -0.00976562 -135.75l-77.0898 -77.0898
+l13.0996 -13.0996c9.37012 -9.37012 9.37012 -24.5703 0 -33.9404l-40.9697 -40.96c-9.29004 -9.30957 -24.5 -9.44043 -33.9404 0l-161.939 161.94c-9.37012 9.36914 -9.37012 24.5693 0 33.9395l40.9697 40.9697c9.29004 9.31055 24.5 9.44043 33.9404 0l13.0996 -13.0996
+l77.0898 77.0898c37.4697 37.5 98.2803 37.5 135.75 0z" />
+ <glyph glyph-name="paint-brush" unicode="&#xf1fc;"
+d="M167.02 138.66l88.0107 -73.3398c0.319336 -3.05078 0.969727 -6.02051 0.969727 -9.12988c0 -76.4209 -52.1396 -120.19 -128 -120.19c-90.0703 0 -128 72.3799 -128.01 154.73c9.79004 -6.68066 44.1396 -34.3506 55.25 -34.3506
+c6.58984 0 12.2402 3.77051 14.5898 9.98047c20.6602 54.4395 57.0703 69.7197 97.1895 72.2998zM457.89 448c28.1104 0 54.1104 -20.6396 54.1104 -49.5498c0 -16.1406 -6.51953 -31.6406 -13.9004 -45.9902c-113.05 -210.99 -149.05 -256.46 -211.159 -256.46
+c-7.75 0 -15.1807 1.23047 -22.3906 3.03027l-63.8193 53.1797c-5.48047 11.9404 -8.73047 25 -8.73047 38.7002c0 53.75 21.2695 58.04 225.68 240.64c10.8398 9.74023 25.0508 16.4502 40.21 16.4502z" />
+ <glyph glyph-name="birthday-cake" unicode="&#xf1fd;" horiz-adv-x="448"
+d="M448 64c-28.0195 0 -31.2598 32 -74.5 32c-43.4297 0 -46.8252 -32 -74.75 -32c-27.6953 0 -31.4541 32 -74.75 32c-42.8418 0 -47.2178 -32 -74.5 -32c-28.1484 0 -31.2021 32 -74.75 32c-43.5469 0 -46.6533 -32 -74.75 -32v80c0 26.5 21.5 48 48 48h16v144h64v-144h64
+v144h64v-144h64v144h64v-144h16c26.5 0 48 -21.5 48 -48v-80zM448 -64h-448v96c43.3564 0 46.7666 32 74.75 32c27.9512 0 31.2529 -32 74.75 -32c42.8428 0 47.2168 32 74.5 32c28.1484 0 31.2012 -32 74.75 -32c43.3574 0 46.7666 32 74.75 32
+c27.4883 0 31.252 -32 74.5 -32v-96zM96 352c-17.75 0 -32 14.25 -32 32c0 31 32 23 32 64c12 0 32 -29.5 32 -56s-14.25 -40 -32 -40zM224 352c-17.75 0 -32 14.25 -32 32c0 31 32 23 32 64c12 0 32 -29.5 32 -56s-14.25 -40 -32 -40zM352 352c-17.75 0 -32 14.25 -32 32
+c0 31 32 23 32 64c12 0 32 -29.5 32 -56s-14.25 -40 -32 -40z" />
+ <glyph glyph-name="chart-area" unicode="&#xf1fe;"
+d="M500 64c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-488c-6.59961 0 -12 5.40039 -12 12v360c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-308h436zM372.7 288.5c6 4 14.2002 1.7998 17.3994 -4.7002l89.9004 -187.8h-384v104
+l86.7998 144.7c4.40039 7.2998 14.7998 7.7998 19.9004 1l85.2998 -113.7z" />
+ <glyph glyph-name="chart-pie" unicode="&#xf200;" horiz-adv-x="544"
+d="M527.79 160c9.5498 0 17.4004 -8.38965 16.0596 -17.8496c-7.80957 -55.25 -34.4297 -104.4 -73.1299 -140.86c-6.20996 -5.84961 -16.1494 -5.36035 -22.1895 0.679688l-158.03 158.03h237.29zM511.96 224.8c0.629883 -9.12012 -7.0498 -16.7998 -16.1904 -16.7998
+h-223.77v223.76c0 9.14062 7.67969 16.8301 16.7998 16.2002c119.46 -8.24023 214.92 -103.7 223.16 -223.16zM224 160l155.86 -155.87c6.84961 -6.84961 6.33008 -18.4795 -1.57031 -24.0801c-38.29 -27.1602 -84.8604 -43.3994 -135.26 -44.0303
+c-128.2 -1.60938 -238.53 103.471 -242.891 231.61c-4.23926 124.771 86.8506 228.88 206.021 245.72c9.4502 1.34082 17.8398 -6.50977 17.8398 -16.0596v-237.29z" />
+ <glyph glyph-name="chart-line" unicode="&#xf201;"
+d="M496 64c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-464c-17.6699 0 -32 14.3301 -32 32v336c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-304h432zM464 352c8.83984 0 16 -7.16016 15.9902 -16v-118.05
+c0 -21.3799 -25.8506 -32.0898 -40.9707 -16.9697l-32.3994 32.3994l-96 -96c-12.4902 -12.5 -32.75 -12.5 -45.25 0l-73.3701 73.3701l-46.0596 -46.0703c-6.25 -6.25 -16.3809 -6.25 -22.6309 0l-22.6191 22.6201c-6.25 6.25 -6.25 16.3799 0 22.6299l68.6895 68.6904
+c12.4902 12.5 32.75 12.5 45.25 0l73.3701 -73.3701l73.3701 73.3799l-32.4004 32.4004c-15.1201 15.1201 -4.41016 40.9697 16.9707 40.9697h118.06z" />
+ <glyph glyph-name="toggle-off" unicode="&#xf204;" horiz-adv-x="576"
+d="M384 384c106.039 0 192 -85.9609 192 -192s-85.9609 -192 -192 -192h-192c-106.039 0 -192 85.9609 -192 192s85.9609 192 192 192h192zM64 192c0 -70.751 57.2588 -128 128 -128c70.751 0 128 57.2588 128 128c0 70.751 -57.2588 128 -128 128
+c-70.751 0 -128 -57.2588 -128 -128zM384 64c70.751 0 128 57.2598 128 128c0 70.751 -57.2588 128 -128 128h-48.9053c65.2363 -72.8799 65.2168 -183.142 0 -256h48.9053z" />
+ <glyph glyph-name="toggle-on" unicode="&#xf205;" horiz-adv-x="576"
+d="M384 384c106 0 192 -86 192 -192s-86 -192 -192 -192h-192c-106 0 -192 86 -192 192s86 192 192 192h192zM384 64c70.7002 0 128 57.2002 128 128c0 70.7002 -57.2002 128 -128 128c-70.7002 0 -128 -57.2002 -128 -128c0 -70.7002 57.2002 -128 128 -128z" />
+ <glyph glyph-name="bicycle" unicode="&#xf206;" horiz-adv-x="640"
+d="M512.509 255.999c70.9502 -0.276367 128.562 -59.0547 127.477 -129.996c-1.07422 -70.1934 -58.6494 -126.681 -129.255 -125.996c-68.8301 0.667969 -126.584 58.8857 -126.729 127.719c-0.078125 37.3564 15.8516 70.9893 41.3066 94.4375l-14.9375 24.0674
+l-85.9619 -138.863c-3.88477 -6.27441 -13.0264 -11.3672 -20.4062 -11.3672h-52.0332c-14.209 -55.207 -64.3252 -96 -123.967 -96c-70.9473 0 -128.415 57.7207 -128 128.764c0.410156 70.2246 58.0918 127.406 128.317 127.236
+c15.0879 -0.0371094 29.5586 -2.69043 42.9912 -7.51953l29.417 47.5195h-48.7256c-13.1191 0 -23.7627 10.5186 -23.9951 23.5635c-0.241211 13.4375 11.0947 24.4365 24.5361 24.4365h87.459c8.83691 0 16 -7.16309 16 -16v-16h113.544l-14.8955 24h-50.6494
+c-8.83691 0 -16 7.16309 -16 16v16c0 8.83691 7.16309 16 16 16h64h0.000976562c7.36914 0 16.5049 -5.08203 20.3906 -11.3428l77.6807 -125.153c14.4053 5.54004 30.0625 8.55957 46.4355 8.49512zM186.75 182.228l-23.6641 -38.2275h43.3057
+c-2.96875 14.5674 -9.91504 27.6992 -19.6416 38.2275zM128.002 48c32.7383 0 60.9297 19.7754 73.2998 48h-81.2998c-18.7891 0 -30.2871 20.6729 -20.4062 36.6318l45.5049 73.5088c-5.5127 1.20605 -11.2295 1.85938 -17.0986 1.85938c-44.1123 0 -80 -35.8877 -80 -80
+s35.8877 -80 80 -80zM290.632 144l74.2861 120h-127.547l-24.7461 -39.9736c22.8271 -20.1328 38.4229 -48.2705 42.3828 -80.0264h35.624zM507.689 48.1143c46.0605 -2.43164 84.3115 34.3447 84.3125 79.8848c0 44.1123 -35.8877 80 -80 80h-0.0390625
+c-5.55664 0 -14.4355 -1.11914 -19.8193 -2.49707l44.4688 -71.6426c4.66113 -7.50879 2.35156 -17.3721 -5.15625 -22.0322l-13.5938 -8.4375c-7.50879 -4.65918 -17.3721 -2.35156 -22.0322 5.15625l-44.4326 71.5859
+c-12.7021 -14.7451 -20.1475 -34.1416 -19.3359 -55.2627c1.57812 -41.0635 34.5918 -74.5898 75.6279 -76.7549z" />
+ <glyph glyph-name="bus" unicode="&#xf207;"
+d="M488 320c13.25 0 24 -10.7402 24 -24v-80c0 -13.25 -10.75 -24 -24 -24h-8v-166.4c0 -12.7998 -9.59961 -25.5996 -25.5996 -25.5996h-6.40039v-32c0 -17.6699 -14.3301 -32 -32 -32h-32c-17.6699 0 -32 14.3301 -32 32v32h-192v-32c0 -17.6699 -14.3301 -32 -32 -32h-32
+c-17.6699 0 -32 14.3301 -32 32v32c-17.6699 0 -32 14.3301 -32 32v160h-8c-13.25 0 -24 10.75 -24 24v80c0 13.2598 10.75 24 24 24h8v48c0 44.7998 99.2002 80 224 80s224 -35.2002 224 -80v-48h8zM112 48c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32
+s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM128 160h256c17.6699 0 32 14.3301 32 32v128c0 17.6699 -14.3301 32 -32 32h-256c-17.6699 0 -32 -14.3301 -32 -32v-128c0 -17.6699 14.3301 -32 32 -32zM400 48c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32
+s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="closed-captioning" unicode="&#xf20a;"
+d="M464 384c26.5 0 48 -21.5 48 -48v-288c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v288c0 26.5 21.5 48 48 48h416zM218.1 160.3c-41.1992 -37.8994 -92.1992 -15.3994 -92.2998 32.6006c0 46.0996 53.7998 63.1992 94.6006 31.1992
+c2.89941 -2.2998 7.19922 -1.39941 9.09961 1.7002l17.5 30.5c1.5 2.5 1.09961 3.7002 -1 5.7002c-50.7998 49.4004 -172.5 27.2002 -172.5 -70.0996c0 -100 119.2 -124.7 172.8 -67.9004c2 2.09961 2.2002 5.2998 0.5 7.7002l-19.5 27.7002
+c-2.09961 3 -6.39941 3.39941 -9.2002 0.899414zM408.5 160.3c-41.2002 -37.8994 -92.2002 -15.3994 -92.2002 32.6006c0 46.0996 53.7998 63.1992 94.6006 31.1992c2.89941 -2.2998 7.19922 -1.39941 9.09961 1.7002l17.5 30.5c1.5 2.5 1.09961 3.7002 -1 5.7002
+c-50.7998 49.4004 -172.5 27.2002 -172.5 -70.0996c0 -100 119.2 -124.801 172.7 -67.9004c2 2.09961 2.2002 5.2998 0.5 7.7002l-19.5 27.7002c-2.10059 3 -6.40039 3.39941 -9.2002 0.899414z" />
+ <glyph glyph-name="shekel-sign" unicode="&#xf20b;" horiz-adv-x="448"
+d="M248 280c0 30.9297 -25.0703 56 -56 56h-112v-352c0 -8.83984 -7.16016 -16 -16 -16h-48c-8.83984 0 -16 7.16016 -16 16v408c0 13.2598 10.75 24 24 24h168c75.1104 0 136 -60.8896 136 -136v-168c0 -8.83984 -7.16016 -16 -16 -16h-48c-8.83984 0 -16 7.16016 -16 16
+v168zM432 416c8.83984 0 16 -7.16016 16 -16v-296c0 -75.1104 -60.8896 -136 -136 -136h-168c-13.25 0 -24 10.75 -24 24v280c0 8.83984 7.16016 16 16 16h48c8.83984 0 16 -7.16016 16 -16v-224h112c30.9297 0 56 25.0703 56 56v296c0 8.83984 7.16016 16 16 16h48z" />
+ <glyph glyph-name="cart-plus" unicode="&#xf217;" horiz-adv-x="575"
+d="M504.717 128h-293.145l6.54492 -32h268.418c15.4004 0 26.8154 -14.3008 23.4033 -29.3193l-5.51758 -24.2754c18.6914 -9.07324 31.5791 -28.2334 31.5791 -50.4053c0 -31.2021 -25.5186 -56.4443 -56.8242 -55.9941
+c-29.8232 0.428711 -54.3496 24.6309 -55.1543 54.4473c-0.44043 16.2871 6.08496 31.0488 16.8027 41.5479h-209.648c10.377 -10.166 16.8242 -24.3271 16.8242 -40.001c0 -31.8135 -26.5283 -57.4307 -58.6699 -55.9385c-28.54 1.3252 -51.751 24.3857 -53.251 52.917
+c-1.1582 22.0342 10.4355 41.4551 28.0508 51.5869l-70.2471 343.435h-69.8828c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24h102.529c11.4004 0 21.2275 -8.02148 23.5127 -19.1904l9.16602 -44.8096h392.782
+c15.4004 0 26.8154 -14.3008 23.4023 -29.3193l-47.2725 -208c-2.4834 -10.9268 -12.1973 -18.6807 -23.4033 -18.6807zM408 280h-48v40c0 8.83691 -7.16309 16 -16 16h-16c-8.83691 0 -16 -7.16309 -16 -16v-40h-48c-8.83691 0 -16 -7.16309 -16 -16v-16
+c0 -8.83691 7.16309 -16 16 -16h48v-40c0 -8.83691 7.16309 -16 16 -16h16c8.83691 0 16 7.16309 16 16v40h48c8.83691 0 16 7.16309 16 16v16c0 8.83691 -7.16309 16 -16 16z" />
+ <glyph glyph-name="cart-arrow-down" unicode="&#xf218;" horiz-adv-x="575"
+d="M504.717 128h-293.145l6.54492 -32h268.418c15.4004 0 26.8154 -14.3008 23.4033 -29.3193l-5.51758 -24.2754c18.6914 -9.07324 31.5791 -28.2334 31.5791 -50.4053c0 -31.2021 -25.5186 -56.4443 -56.8242 -55.9941
+c-29.8232 0.428711 -54.3496 24.6309 -55.1543 54.4473c-0.44043 16.2871 6.08496 31.0488 16.8027 41.5479h-209.648c10.377 -10.166 16.8242 -24.3271 16.8242 -40.001c0 -31.8135 -26.5283 -57.4307 -58.6699 -55.9385c-28.54 1.3252 -51.751 24.3857 -53.251 52.917
+c-1.1582 22.0342 10.4355 41.4551 28.0508 51.5869l-70.2471 343.435h-69.8828c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24h102.529c11.4004 0 21.2275 -8.02148 23.5127 -19.1904l9.16602 -44.8096h392.782
+c15.4004 0 26.8154 -14.3008 23.4023 -29.3193l-47.2725 -208c-2.4834 -10.9268 -12.1973 -18.6807 -23.4033 -18.6807zM403.029 256h-43.0293v60c0 6.62695 -5.37305 12 -12 12h-24c-6.62695 0 -12 -5.37305 -12 -12v-60h-43.0293
+c-10.6904 0 -16.0449 -12.9258 -8.48438 -20.4854l67.0283 -67.0283c4.68652 -4.68652 12.2842 -4.68652 16.9717 0l67.0283 67.0283c7.55957 7.55957 2.20508 20.4854 -8.48535 20.4854z" />
+ <glyph glyph-name="ship" unicode="&#xf21a;" horiz-adv-x="640"
+d="M496.616 75.3613c17.8418 -44.3604 58.5664 -75.3613 119.384 -75.3613c13.2549 0 24 -10.7451 24 -24v-16c0 -13.2549 -10.7451 -24 -24 -24c-61.0322 0 -107.505 20.6162 -143.258 59.3965c-14.4189 -34.8281 -48.7637 -59.3965 -88.7422 -59.3965h-128
+c-39.9785 0 -74.3232 24.5684 -88.7422 59.3965c-35.7588 -38.7861 -82.2344 -59.3965 -143.258 -59.3965c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24c61.5869 0 101.828 31.7129 119.384 75.3613l-70.0117 70.0117
+c-16.918 16.9189 -9.91699 45.7793 12.8359 53.0918l41.792 13.4336v140.102c0 17.6729 14.3271 32 32 32h64v40c0 13.2549 10.7451 24 24 24h144c13.2549 0 24 -10.7451 24 -24v-40h64c17.6729 0 32 -14.3271 32 -32v-140.102l41.792 -13.4336
+c22.7783 -7.32129 29.7354 -36.1914 12.8359 -53.0918zM192 320v-87.5312l118.208 37.9951c2.63574 0.847656 7.02344 1.53516 9.79199 1.53516s7.15625 -0.6875 9.79199 -1.53516l118.208 -37.9951v87.5312h-256z" />
+ <glyph glyph-name="user-secret" unicode="&#xf21b;" horiz-adv-x="448"
+d="M383.9 139.7c38.2998 -23.7002 64.0996 -65.7002 64.0996 -114.101v-44.7998c0 -24.7002 -20.0996 -44.7998 -44.7998 -44.7998h-358.4c-24.7002 0 -44.7998 20.0996 -44.7998 44.7998v44.7998c0 49.7002 27.2998 92.6006 67.4004 115.9l-25.8008 60.2002
+c-4.5 10.5996 3.2002 22.2998 14.7002 22.2998h57.5c-11 18.9004 -17.7998 40.5996 -17.7998 64v0.299805c-39.2002 7.7998 -64 19.1006 -64 31.7002c0 13.2998 27.2998 25.0996 70 33c9.2002 32.7998 27.0996 65.7998 40.5996 82.7998
+c9.5 11.9004 25.9004 15.6006 39.5 8.7998l27.6006 -13.7998c9 -4.5 19.5996 -4.5 28.5996 0l27.6006 13.7998c13.5996 6.80078 30 3.10059 39.5 -8.7998c13.5996 -17 31.3994 -50 40.5996 -82.7998c42.7998 -7.90039 70.0996 -19.7002 70.0996 -33
+c0 -12.5996 -24.7998 -23.9004 -64 -31.7002v-0.299805c0 -23.4004 -6.7998 -45.0996 -17.7998 -64h58.5c11.2998 0 19 -11.2002 15 -21.7002zM176 -32l32 120l-24 40l-49.5996 32zM272 -32l41.5996 192l-49.5996 -32l-24 -40zM313.7 266.5
+c0.799805 2.59961 6.2998 5.7002 6.39941 5.7998v10.7998c-28.2998 -3.69922 -61 -5.7998 -96 -5.7998s-67.6992 2.2002 -96 5.7998v-10.7998c0 -0.0996094 5.5 -3.2998 6.30078 -5.7998c3.7998 -11.9004 7 -24.5996 16.5 -33.4004c8 -7.39941 47 -25.1992 64 25
+c2.89941 8.40039 15.5 8.40039 18.2998 0c16 -47.3994 53.8994 -34.2998 64 -25c9.5 8.80078 12.5996 21.5 16.5 33.4004z" />
+ <glyph glyph-name="motorcycle" unicode="&#xf21c;" horiz-adv-x="640"
+d="M512.9 256c69.5996 -0.5 126.5 -57.2998 127.199 -126.9c0.600586 -71.5996 -57.5996 -129.8 -129.199 -129.1c-69.6006 0.599609 -126.301 57.5 -126.801 127.1c-0.299805 39.3008 17.2002 74.5 44.8008 98.2002l-12.5 20.7998
+c-38.7002 -31.2998 -58.3008 -77.8994 -56.2002 -125c0.599609 -13.6992 -10.2998 -25.0996 -24 -25.0996h-84.2002c-14.2998 -55.2002 -64.4004 -96 -124 -96c-71.7002 0 -129.6 58.9004 -128 131c1.59961 67.4004 55.9004 122.5 123.2 124.9
+c14.3994 0.5 28.2998 -1.30078 41.2998 -5.2002l11.2998 20.5c-9.09961 13.8994 -23.2998 24.7998 -47.7998 24.7998h-56c-13.0996 0 -23.7998 10.5 -24 23.5c-0.299805 13.5 11 24.5 24.5 24.5h55.5c55 0 82.2002 -16.9004 99.9004 -40h153.699l-19.1992 32h-66.4004
+c-8.7998 0 -16 7.2002 -16 16v16c0 8.7998 7.2002 16 16 16h80c8.40039 0 16.2002 -4.40039 20.5996 -11.5996l22.8008 -38l37.5 41.6992c4.5 5 11 7.90039 17.7998 7.90039h45.2998c13.2998 0 24 -10.7002 24 -24v-32c0 -13.2998 -10.7002 -24 -24 -24h-82.4004
+l32.9004 -54.9004c13.2998 4.60059 27.5 7 42.4004 6.90039zM128 48c32.7002 0 60.9004 19.7998 73.2998 48h-81.2998c-18.2002 0 -29.7998 19.5996 -21 35.5996l41.5 75.4004c-4.09961 0.700195 -8.2998 1 -12.5 1c-44.0996 0 -80 -35.9004 -80 -80s35.9004 -80 80 -80z
+M591.9 123.6c2.39941 46.1006 -34.3008 84.4004 -79.9004 84.3008c-5.40039 0 -10.7002 -0.5 -15.9004 -1.60059l48.6006 -80.8994c4.5 -7.60059 2.09961 -17.5 -5.5 -22l-13.7002 -8.2002c-7.59961 -4.5 -17.5 -2.10059 -22 5.5l-49.4004 82.3994
+c-13.6992 -14.2998 -22.0996 -33.6992 -22.0996 -55.0996c0 -45.5996 38.2998 -82.4004 84.4004 -79.9004c40.5 2.10059 73.2998 34.9004 75.5 75.5z" />
+ <glyph glyph-name="street-view" unicode="&#xf21d;"
+d="M367.9 118.24c85.2295 -15.5801 144.1 -48.29 144.1 -86.2402c0 -53.0195 -114.62 -96 -256 -96s-256 42.9805 -256 96c0 37.9502 58.8701 70.6602 144.1 86.2402c4.62012 -5.2998 9.78027 -10.1006 15.9004 -13.6504v-22.9395
+c-66.5195 -9.35059 -112 -28.0508 -112 -49.6504c0 -30.9297 93.1201 -56 208 -56s208 25.0703 208 56c0 21.5996 -45.4805 40.3096 -112 49.6504v22.9395c6.12012 3.5498 11.2803 8.35059 15.9004 13.6504zM256 320c-35.3496 0 -64 28.6504 -64 64s28.6504 64 64 64
+s64 -28.6504 64 -64s-28.6504 -64 -64 -64zM192 128c-17.6699 0 -32 14.3301 -32 32v96c0 26.5098 21.4902 48 48 48h11.7998c11.0703 -5.03027 23.2598 -8 36.2002 -8s25.1299 2.96973 36.2002 8h11.7998c26.5098 0 48 -21.4902 48 -48v-96
+c0 -17.6699 -14.3301 -32 -32 -32v-96c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v96z" />
+ <glyph glyph-name="heartbeat" unicode="&#xf21e;"
+d="M320.2 204.2l22.0996 -44.2002h109.101l-182.601 -186.5c-7.09961 -7.2998 -18.5996 -7.2998 -25.7002 0l-182.5 186.5h94.1006l30 71.7002l56.8994 -126.3c5.5 -12.3008 22.9004 -12.7002 28.9004 -0.600586zM473.7 374.1
+c48.7002 -49.7998 50.7998 -129.1 7.2998 -182.1h-118.9l-27.5996 55.2002c-5.90039 11.7998 -22.7002 11.7998 -28.5996 0l-49 -97.9004l-58.2002 129.3c-5.7998 12.8008 -24 12.5 -29.4004 -0.399414l-35.8994 -86.2002h-102.4c-43.5 53 -41.4004 132.3 7.2998 182.1
+l2.40039 2.40039c51.5 52.7002 135.899 52.7002 187.399 0l27.9004 -28.5l27.9004 28.5996c51.5996 52.6006 135.899 52.6006 187.399 0z" />
+ <glyph glyph-name="venus" unicode="&#xf221;" horiz-adv-x="288"
+d="M288 272c0 -68.5 -47.9004 -125.9 -112 -140.4v-51.5996h36c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-36v-36c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v36h-36c-6.59961 0 -12 5.40039 -12 12v40
+c0 6.59961 5.40039 12 12 12h36v51.5996c-64.0996 14.5 -112 71.9004 -112 140.4c0 79.5 64.5 144 144 144s144 -64.5 144 -144zM64 272c0 -44.0996 35.9004 -80 80 -80s80 35.9004 80 80s-35.9004 80 -80 80s-80 -35.9004 -80 -80z" />
+ <glyph glyph-name="mars" unicode="&#xf222;" horiz-adv-x="384"
+d="M372 384c6.59961 0 12 -5.40039 12 -12v-79c0 -10.7002 -12.9004 -16.0996 -20.5 -8.5l-16.9004 16.9004l-80.6992 -80.7002c14 -22.2002 22.0996 -48.5 22.0996 -76.7002c0 -79.5 -64.5 -144 -144 -144s-144 64.5 -144 144s64.5 144 144 144
+c28.2002 0 54.5 -8.09961 76.7002 -22.0996l80.7002 80.6992l-16.9004 16.9004c-7.5 7.59961 -2.2002 20.5 8.5 20.5h79zM144 64c44.0996 0 80 35.9004 80 80s-35.9004 80 -80 80s-80 -35.9004 -80 -80s35.9004 -80 80 -80z" />
+ <glyph glyph-name="mercury" unicode="&#xf223;" horiz-adv-x="288"
+d="M288 240c0 -68.5 -47.9004 -125.9 -112 -140.4v-51.5996h36c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-36v-36c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v36h-36c-6.59961 0 -12 5.40039 -12 12v40
+c0 6.59961 5.40039 12 12 12h36v51.5996c-64.0996 14.5 -112 71.9004 -112 140.4c0 44.2002 19.9004 83.7002 51.2002 110c-2.5 1.90039 -4.90039 3.7998 -7.2002 5.7998c-24.7998 21.2002 -39.7998 48.7998 -43.2002 78.9004
+c-0.899414 7.09961 4.7002 13.2998 11.9004 13.2998h40.5c5.7002 0 10.5996 -4.09961 11.7002 -9.7998c2.5 -12.5 9.59961 -24.2998 20.6992 -33.7998c15.4004 -13.2002 36.1006 -20.4004 58.4004 -20.4004s43 7.2002 58.2998 20.4004
+c11.1006 9.5 18.2998 21.2998 20.7002 33.7998c1.09961 5.7002 6 9.7998 11.7998 9.7998h40.5c7.2002 0 12.7998 -6.2002 11.9004 -13.2998c-3.40039 -30 -18.5 -57.6006 -43.2002 -78.7998c-2.2998 -2 -4.7002 -4 -7.2002 -5.80078
+c31.2998 -26.3994 51.2002 -65.8994 51.2002 -110.1zM64 240c0 -44.0996 35.9004 -80 80 -80s80 35.9004 80 80s-35.9004 80 -80 80s-80 -35.9004 -80 -80z" />
+ <glyph glyph-name="transgender" unicode="&#xf224;" horiz-adv-x="384"
+d="M372 448c6.59961 0 12 -5.40039 12 -12v-79c0 -10.7002 -12.9004 -16.0996 -20.5 -8.5l-16.9004 16.9004l-80.6992 -80.7002c14 -22.2002 22.0996 -48.5 22.0996 -76.7002c0 -68.5 -47.9004 -125.8 -112 -140.4v-27.5996h36c6.59961 0 12 -5.40039 12 -12v-40
+c0 -6.59961 -5.40039 -12 -12 -12h-36v-28c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v28h-36c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h36v27.5996c-64.0996 14.5 -112 71.9004 -112 140.4c0 79.5 64.5 144 144 144
+c28.2002 0 54.5 -8.09961 76.7002 -22.0996l80.7002 80.6992l-16.9004 16.9004c-7.5 7.59961 -2.2002 20.5 8.5 20.5h79zM144 128c44.0996 0 80 35.9004 80 80s-35.9004 80 -80 80s-80 -35.9004 -80 -80s35.9004 -80 80 -80z" />
+ <glyph glyph-name="transgender-alt" unicode="&#xf225;" horiz-adv-x="480"
+d="M468 448c6.59961 0 12 -5.40039 12 -12v-79c0 -10.7002 -12.9004 -16.0996 -20.5 -8.5l-16.9004 16.9004l-80.6992 -80.7002c14 -22.2002 22.0996 -48.5 22.0996 -76.7002c0 -68.5 -47.9004 -125.8 -112 -140.4v-27.5996h36c6.59961 0 12 -5.40039 12 -12v-40
+c0 -6.59961 -5.40039 -12 -12 -12h-36v-28c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v28h-36c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h36v27.5996c-64.0996 14.5 -112 71.9004 -112 140.4
+c0 28.2002 8.09961 54.5 22.2002 76.5996l-16.5 16.5l-19.7998 -19.7998c-4.7002 -4.7002 -12.3008 -4.7002 -17 0l-28.3008 28.2998c-4.69922 4.7002 -4.69922 12.3008 0 17l19.8008 19.8008l-19 19l-16.9004 -16.9004c-7.59961 -7.5 -20.5 -2.2002 -20.5 8.5v79
+c0 6.59961 5.40039 12 12 12h79c10.7002 0 16.0996 -12.9004 8.40039 -20.4004l-16.9004 -16.8994l19 -19l19.7998 19.7998c4.7002 4.7002 12.2998 4.7002 17 0l28.2998 -28.2998c4.7002 -4.7002 4.7002 -12.2998 0 -17l-19.7998 -19.7998l16.5 -16.5
+c22.2002 14 48.5 22.0996 76.7002 22.0996s54.5 -8.09961 76.7002 -22.0996l80.7002 80.6992l-16.9004 16.9004c-7.5 7.59961 -2.2002 20.5 8.5 20.5h79zM240 128c44.0996 0 80 35.9004 80 80s-35.9004 80 -80 80s-80 -35.9004 -80 -80s35.9004 -80 80 -80z" />
+ <glyph glyph-name="venus-double" unicode="&#xf226;"
+d="M288 272c0 -68.5 -47.9004 -125.9 -112 -140.4v-51.5996h36c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-36v-36c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v36h-36c-6.59961 0 -12 5.40039 -12 12v40
+c0 6.59961 5.40039 12 12 12h36v51.5996c-64.0996 14.5 -112 71.9004 -112 140.4c0 79.5 64.5 144 144 144s144 -64.5 144 -144zM64 272c0 -44.0996 35.9004 -80 80 -80s80 35.9004 80 80s-35.9004 80 -80 80s-80 -35.9004 -80 -80zM400 131.6v-51.5996h36
+c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-36v-36c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v36h-36c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h36v51.5996
+c-21.2002 4.80078 -40.5996 14.3008 -57.2002 27.3008c14 16.6992 25 36 32.1006 57.0996c14.5 -14.7998 34.6992 -24 57.0996 -24c44.0996 0 80 35.9004 80 80s-35.9004 80 -80 80c-22.2998 0 -42.5996 -9.2002 -57.0996 -24
+c-7.10059 21.0996 -18 40.4004 -32.1006 57.0996c24.6006 19.3008 55.5 30.9004 89.2002 30.9004c79.5 0 144 -64.5 144 -144c0 -68.5 -47.9004 -125.9 -112 -140.4z" />
+ <glyph glyph-name="mars-double" unicode="&#xf227;"
+d="M340 448c6.59961 0 12 -5.40039 12 -12v-79c0 -7.2002 -5.90039 -12 -12.0996 -12c-2.90039 0 -6 1.09961 -8.40039 3.5l-16.9004 16.9004l-48.6992 -48.7002c14 -22.2002 22.0996 -48.5 22.0996 -76.7002c0 -79.5 -64.5 -144 -144 -144s-144 64.5 -144 144
+s64.5 144 144 144c28.2002 0 54.5 -8.09961 76.7002 -22.0996l48.7002 48.6992l-16.9004 16.9004c-7.5 7.59961 -2.2002 20.5 8.5 20.5h79zM144 160c44.0996 0 80 35.9004 80 80s-35.9004 80 -80 80s-80 -35.9004 -80 -80s35.9004 -80 80 -80zM500 288.1
+c6.59961 0 12 -5.39941 12 -12.0996v-79c0 -7.2002 -5.90039 -12 -12.0996 -12c-3 0 -6 1.09961 -8.40039 3.5l-16.9004 16.9004l-48.6992 -48.7002c14 -22.2002 22.0996 -48.5 22.0996 -76.7002c0 -79.5 -64.5 -144 -144 -144c-74.4004 0 -135.6 56.4004 -143.2 128.9
+c21.7998 2 43.2998 8.19922 63.2998 18.3994c-0.0996094 -1 -0.0996094 -2.09961 -0.0996094 -3.2002c0 -44.0996 35.9004 -80 80 -80s80 35.9004 80 80c0 44.1006 -35.9004 80 -80 80c-1 0 -2.09961 -0.0996094 -3.2002 -0.0996094
+c10.2002 20 16.2998 41.5 18.4004 63.2998c22.5 -2.39941 43.2998 -9.89941 61.5 -21.2998l48.7002 48.7002l-16.9004 16.8994c-7.5 7.60059 -2.2002 20.5 8.5 20.5h79z" />
+ <glyph glyph-name="venus-mars" unicode="&#xf228;" horiz-adv-x="576"
+d="M564 448c6.59961 0 12 -5.40039 12 -12v-79c0 -7.2002 -5.90039 -12 -12.0996 -12c-3 0 -6 1.09961 -8.40039 3.5l-16.9004 16.9004l-48.6992 -48.7002c14 -22.2002 22.0996 -48.5 22.0996 -76.7002c0 -79.5 -64.5 -144 -144 -144
+c-33.7002 0 -64.7002 11.5 -89.2002 30.9004c14.1006 16.6992 25 36 32.1006 57.0996c14.5 -14.7998 34.7998 -24 57.0996 -24c44.0996 0 80 35.9004 80 80s-35.9004 80 -80 80c-22.4004 0 -42.5996 -9.2002 -57.0996 -24
+c-7.10059 21.0996 -18.1006 40.4004 -32.1006 57.0996c24.6006 19.3008 55.5 30.9004 89.2002 30.9004c28.2002 0 54.5 -8.09961 76.7002 -22.0996l48.7002 48.6992l-16.9004 16.9004c-7.5 7.59961 -2.2002 20.5 8.5 20.5h79zM144 384c79.5 0 144 -64.5 144 -144
+c0 -68.5 -47.9004 -125.8 -112 -140.4v-51.5996h36c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-36v-36c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v36h-36c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12
+h36v51.5996c-64.0996 14.5 -112 71.9004 -112 140.4c0 79.5 64.5 144 144 144zM144 160c44.0996 0 80 35.9004 80 80s-35.9004 80 -80 80s-80 -35.9004 -80 -80s35.9004 -80 80 -80z" />
+ <glyph glyph-name="mars-stroke" unicode="&#xf229;" horiz-adv-x="384"
+d="M372 384c6.59961 0 12 -5.40039 12.0996 -12v-78.9004c0 -10.6992 -12.8994 -16.0996 -20.5 -8.5l-16.8994 16.9004l-17.5 -17.5l14.0996 -14.0996c4.7002 -4.7002 4.7002 -12.3008 0 -17l-28.2998 -28.3008c-4.7002 -4.69922 -12.2998 -4.69922 -17 0l-14.0996 14.1006
+l-18 -18c14 -22.2002 22.0996 -48.5 22.0996 -76.7002c0 -79.5 -64.5 -144 -144 -144s-144 64.5 -144 144s64.5 144 143.9 144c28.1992 0 54.5 -8.09961 76.6992 -22.0996l18 18l-14.0996 14.0996c-4.7002 4.7002 -4.7002 12.2998 0 17l28.2998 28.2002
+c4.7002 4.7002 12.2998 4.7002 17 0l14.1006 -14.1006l17.5 17.5l-16.9004 16.9004c-7.5 7.59961 -2.2002 20.5 8.5 20.5h79zM144 64c44.0996 0 80 35.9004 80 80s-35.9004 80 -80 80s-80 -35.9004 -80 -80s35.9004 -80 80 -80z" />
+ <glyph glyph-name="mars-stroke-v" unicode="&#xf22a;" horiz-adv-x="288"
+d="M245.8 213.8c56.2998 -56.2002 56.2998 -147.399 0 -203.6c-56.2002 -56.2002 -147.399 -56.2002 -203.6 0s-56.2002 147.399 0 203.6c19.8994 19.9004 44.2002 32.7998 69.7998 38.6006v25.3994h-20c-6.59961 0 -12 5.40039 -12 12v40c0 6.60059 5.40039 12 12 12h20
+v24.7002h-23.9004c-10.6992 0 -16.0996 12.9004 -8.5 20.5l55.9004 55.9004c4.7002 4.69922 12.2998 4.69922 17 0l55.9004 -55.8008c7.5 -7.59961 2.19922 -20.5 -8.5 -20.5h-23.9004v-24.7998h20c6.59961 0 12 -5.39941 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-20
+v-25.3994c25.5996 -5.80078 49.9004 -18.7002 69.7998 -38.6006zM200.6 55.4004c31.2002 31.1992 31.2002 82 0 113.1c-31.1992 31.2002 -81.8994 31.2002 -113.1 0s-31.2002 -81.9004 0 -113.1c31.2002 -31.2002 81.9004 -31.2002 113.1 0z" />
+ <glyph glyph-name="mars-stroke-h" unicode="&#xf22b;" horiz-adv-x="479"
+d="M476.2 200.5c4.7002 -4.7002 4.7002 -12.2998 0.0996094 -17l-55.8994 -55.9004c-7.60059 -7.5 -20.5 -2.19922 -20.5 8.5v23.9004h-23.9004v-20c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v20h-27.5996
+c-5.80078 -25.5996 -18.7002 -49.9004 -38.6006 -69.7998c-56.2002 -56.2002 -147.399 -56.2002 -203.6 0s-56.2002 147.399 0 203.6s147.399 56.2002 203.6 0c19.9004 -19.8994 32.7998 -44.2002 38.6006 -69.7998h27.5996v20c0 6.59961 5.40039 12 12 12h40
+c6.59961 0 12 -5.40039 12 -12v-20h23.7998v23.9004c0 10.6992 12.9004 16.0996 20.5 8.5zM200.6 135.4c31.2002 31.1992 31.2002 82 0 113.1c-31.1992 31.2002 -81.8994 31.2002 -113.1 0s-31.2002 -81.9004 0 -113.1c31.2002 -31.2002 81.9004 -31.2002 113.1 0z" />
+ <glyph glyph-name="neuter" unicode="&#xf22c;" horiz-adv-x="288"
+d="M288 272c0 -68.5 -47.9004 -125.9 -112 -140.4v-151.6c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v151.6c-64.0996 14.5 -112 71.9004 -112 140.4c0 79.5 64.5 144 144 144s144 -64.5 144 -144zM144 192c44.0996 0 80 35.9004 80 80
+s-35.9004 80 -80 80s-80 -35.9004 -80 -80s35.9004 -80 80 -80z" />
+ <glyph glyph-name="genderless" unicode="&#xf22d;" horiz-adv-x="288"
+d="M144 272c-44.0996 0 -80 -35.9004 -80 -80s35.9004 -80 80 -80s80 35.9004 80 80s-35.9004 80 -80 80zM144 336c79.5 0 144 -64.5 144 -144s-64.5 -144 -144 -144s-144 64.5 -144 144s64.5 144 144 144z" />
+ <glyph glyph-name="server" unicode="&#xf233;"
+d="M480 288h-448c-17.6729 0 -32 14.3271 -32 32v64c0 17.6729 14.3271 32 32 32h448c17.6729 0 32 -14.3271 32 -32v-64c0 -17.6729 -14.3271 -32 -32 -32zM432 376c-13.2549 0 -24 -10.7451 -24 -24s10.7451 -24 24 -24s24 10.7451 24 24s-10.7451 24 -24 24zM368 376
+c-13.2549 0 -24 -10.7451 -24 -24s10.7451 -24 24 -24s24 10.7451 24 24s-10.7451 24 -24 24zM480 128h-448c-17.6729 0 -32 14.3271 -32 32v64c0 17.6729 14.3271 32 32 32h448c17.6729 0 32 -14.3271 32 -32v-64c0 -17.6729 -14.3271 -32 -32 -32zM432 216
+c-13.2549 0 -24 -10.7451 -24 -24s10.7451 -24 24 -24s24 10.7451 24 24s-10.7451 24 -24 24zM368 216c-13.2549 0 -24 -10.7451 -24 -24s10.7451 -24 24 -24s24 10.7451 24 24s-10.7451 24 -24 24zM480 -32h-448c-17.6729 0 -32 14.3271 -32 32v64
+c0 17.6729 14.3271 32 32 32h448c17.6729 0 32 -14.3271 32 -32v-64c0 -17.6729 -14.3271 -32 -32 -32zM432 56c-13.2549 0 -24 -10.7451 -24 -24s10.7451 -24 24 -24s24 10.7451 24 24s-10.7451 24 -24 24zM368 56c-13.2549 0 -24 -10.7451 -24 -24s10.7451 -24 24 -24
+s24 10.7451 24 24s-10.7451 24 -24 24z" />
+ <glyph glyph-name="user-plus" unicode="&#xf234;" horiz-adv-x="640"
+d="M624 240c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-64v-64c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v64h-64c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h64v64c0 8.7998 7.2002 16 16 16h32
+c8.7998 0 16 -7.2002 16 -16v-64h64zM224 192c-70.7002 0 -128 57.2998 -128 128s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128zM313.6 160c74.2002 0 134.4 -60.2002 134.4 -134.4v-41.5996c0 -26.5 -21.5 -48 -48 -48h-352
+c-26.5 0 -48 21.5 -48 48v41.5996c0 74.2002 60.2002 134.4 134.4 134.4h16.6992c22.3008 -10.2002 46.9004 -16 72.9004 -16s50.7002 5.7998 72.9004 16h16.6992z" />
+ <glyph glyph-name="user-times" unicode="&#xf235;" horiz-adv-x="639"
+d="M589.6 208l45.6006 -45.5996c6.2998 -6.30078 6.2998 -16.5 0 -22.8008l-22.7998 -22.7998c-6.30078 -6.2998 -16.5 -6.2998 -22.8008 0l-45.5996 45.6006l-45.5996 -45.6006c-6.30078 -6.2998 -16.5 -6.2998 -22.8008 0l-22.7998 22.7998
+c-6.2998 6.30078 -6.2998 16.5 0 22.8008l45.6006 45.5996l-45.6006 45.5996c-6.2998 6.30078 -6.2998 16.5 0 22.8008l22.7998 22.7998c6.30078 6.2998 16.5 6.2998 22.8008 0l45.5996 -45.6006l45.5996 45.6006c6.30078 6.2998 16.5 6.2998 22.8008 0l22.7998 -22.7998
+c6.2998 -6.30078 6.2998 -16.5 0 -22.8008zM224 192c-70.7002 0 -128 57.2998 -128 128s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128zM313.6 160c74.2002 0 134.4 -60.2002 134.4 -134.4v-41.5996c0 -26.5 -21.5 -48 -48 -48h-352
+c-26.5 0 -48 21.5 -48 48v41.5996c0 74.2002 60.2002 134.4 134.4 134.4h16.6992c22.3008 -10.2002 46.9004 -16 72.9004 -16s50.7002 5.7998 72.9004 16h16.6992z" />
+ <glyph glyph-name="bed" unicode="&#xf236;" horiz-adv-x="640"
+d="M176 192c-44.1104 0 -80 35.8896 -80 80s35.8896 80 80 80s80 -35.8896 80 -80s-35.8896 -80 -80 -80zM528 320c61.8604 0 112 -50.1396 112 -112v-192c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v48h-512v-48c0 -8.83984 -7.16016 -16 -16 -16
+h-32c-8.83984 0 -16 7.16016 -16 16v352c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-208h224v144c0 8.83984 7.16016 16 16 16h224z" />
+ <glyph glyph-name="train" unicode="&#xf238;" horiz-adv-x="448"
+d="M448 352v-256c0 -51.8154 -61.624 -96 -130.022 -96l62.9805 -49.7207c5.94727 -4.69629 2.60352 -14.2793 -4.95801 -14.2793h-304c-7.57812 0 -10.8916 9.59375 -4.95703 14.2793l62.9795 49.7207c-68.2021 0 -130.022 44.0459 -130.022 96v256c0 53.0186 64 96 128 96
+h192c65 0 128 -42.9814 128 -96zM400 216v112c0 13.2549 -10.7451 24 -24 24h-304c-13.2549 0 -24 -10.7451 -24 -24v-112c0 -13.2549 10.7451 -24 24 -24h304c13.2549 0 24 10.7451 24 24zM224 152c-30.9277 0 -56 -25.0723 -56 -56s25.0723 -56 56 -56s56 25.0723 56 56
+s-25.0723 56 -56 56z" />
+ <glyph glyph-name="subway" unicode="&#xf239;" horiz-adv-x="448"
+d="M448 352v-256c0 -51.8154 -61.624 -96 -130.022 -96l62.9805 -49.7207c5.94727 -4.69629 2.60352 -14.2793 -4.95801 -14.2793h-304c-7.57812 0 -10.8916 9.59375 -4.95703 14.2793l62.9795 49.7207c-68.2021 0 -130.022 44.0459 -130.022 96v256c0 53.0186 64 96 128 96
+h192c65 0 128 -42.9814 128 -96zM200 216v112c0 13.2549 -10.7451 24 -24 24h-104c-13.2549 0 -24 -10.7451 -24 -24v-112c0 -13.2549 10.7451 -24 24 -24h104c13.2549 0 24 10.7451 24 24zM400 216v112c0 13.2549 -10.7451 24 -24 24h-104c-13.2549 0 -24 -10.7451 -24 -24
+v-112c0 -13.2549 10.7451 -24 24 -24h104c13.2549 0 24 10.7451 24 24zM352 160c-26.5098 0 -48 -21.4902 -48 -48s21.4902 -48 48 -48s48 21.4902 48 48s-21.4902 48 -48 48zM96 160c-26.5098 0 -48 -21.4902 -48 -48s21.4902 -48 48 -48s48 21.4902 48 48
+s-21.4902 48 -48 48z" />
+ <glyph glyph-name="battery-full" unicode="&#xf240;" horiz-adv-x="640"
+d="M544 288h-480v-192h480v64h32v64h-32v64zM560 352c26.5098 0 48 -21.4902 48 -48v-16h8c13.2549 0 24 -10.7451 24 -24v-144c0 -13.2549 -10.7451 -24 -24 -24h-8v-16c0 -26.5098 -21.4902 -48 -48 -48h-512c-26.5098 0 -48 21.4902 -48 48v224
+c0 26.5098 21.4902 48 48 48h512zM512 256v-128h-416v128h416z" />
+ <glyph glyph-name="battery-three-quarters" unicode="&#xf241;" horiz-adv-x="640"
+d="M544 288h-480v-192h480v64h32v64h-32v64zM560 352c26.5098 0 48 -21.4902 48 -48v-16h8c13.2549 0 24 -10.7451 24 -24v-144c0 -13.2549 -10.7451 -24 -24 -24h-8v-16c0 -26.5098 -21.4902 -48 -48 -48h-512c-26.5098 0 -48 21.4902 -48 48v224
+c0 26.5098 21.4902 48 48 48h512zM416 256v-128h-320v128h320z" />
+ <glyph glyph-name="battery-half" unicode="&#xf242;" horiz-adv-x="640"
+d="M544 288h-480v-192h480v64h32v64h-32v64zM560 352c26.5098 0 48 -21.4902 48 -48v-16h8c13.2549 0 24 -10.7451 24 -24v-144c0 -13.2549 -10.7451 -24 -24 -24h-8v-16c0 -26.5098 -21.4902 -48 -48 -48h-512c-26.5098 0 -48 21.4902 -48 48v224
+c0 26.5098 21.4902 48 48 48h512zM320 256v-128h-224v128h224z" />
+ <glyph glyph-name="battery-quarter" unicode="&#xf243;" horiz-adv-x="640"
+d="M544 288h-480v-192h480v64h32v64h-32v64zM560 352c26.5098 0 48 -21.4902 48 -48v-16h8c13.2549 0 24 -10.7451 24 -24v-144c0 -13.2549 -10.7451 -24 -24 -24h-8v-16c0 -26.5098 -21.4902 -48 -48 -48h-512c-26.5098 0 -48 21.4902 -48 48v224
+c0 26.5098 21.4902 48 48 48h512zM224 256v-128h-128v128h128z" />
+ <glyph glyph-name="battery-empty" unicode="&#xf244;" horiz-adv-x="640"
+d="M544 288h-480v-192h480v64h32v64h-32v64zM560 352c26.5098 0 48 -21.4902 48 -48v-16h8c13.2549 0 24 -10.7451 24 -24v-144c0 -13.2549 -10.7451 -24 -24 -24h-8v-16c0 -26.5098 -21.4902 -48 -48 -48h-512c-26.5098 0 -48 21.4902 -48 48v224
+c0 26.5098 21.4902 48 48 48h512z" />
+ <glyph glyph-name="mouse-pointer" unicode="&#xf245;" horiz-adv-x="320"
+d="M302.189 118.874h-106.084l55.8301 -135.993c3.88965 -9.42773 -0.554688 -19.999 -9.44336 -23.999l-49.165 -21.4268c-9.16504 -4 -19.4434 0.571289 -23.332 9.71387l-53.0527 129.136l-86.6641 -89.1377c-11.5498 -11.877 -30.2783 -2.7207 -30.2783 12.8564v429.678
+c0 16.3994 19.9209 24.3945 30.2773 12.8555l284.412 -292.542c11.4717 -11.1787 3.00684 -31.1406 -12.5 -31.1406z" />
+ <glyph glyph-name="i-cursor" unicode="&#xf246;" horiz-adv-x="256"
+d="M256 395.952c0 -6.64648 -5.4043 -12.0098 -12.0498 -11.9922c-27.875 0.0712891 -83.9502 -3.20996 -83.9502 -48.1416v-111.818h36c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-36v-112c0 -44.9395 57.8887 -48.5527 83.8555 -48.2422
+c6.68652 0.0800781 12.1445 -5.31055 12.1445 -11.998v-39.6445c0 -6.5957 -5.31836 -11.957 -11.9131 -12c-35.0654 -0.228516 -78.3525 0.62207 -116.087 37.8447c-38.4688 -37.9482 -83.6211 -38.3027 -116.158 -37.8936
+c-6.56738 0.0820312 -11.8418 5.42969 -11.8418 11.999v39.9824c0 6.64648 5.4043 12.0098 12.0498 11.9932c27.875 -0.0722656 83.9502 3.02734 83.9502 47.959v112h-36c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h36v111.818
+c0 44.9385 -57.8887 48.7344 -83.8555 48.4248c-6.68652 -0.0800781 -12.1445 5.31055 -12.1445 11.998v39.6445c0 6.5957 5.31836 11.957 11.9131 12c35.0654 0.228516 78.3525 -0.62207 116.087 -37.8447c38.4688 37.9482 83.6211 38.3027 116.158 37.8926
+c6.56738 -0.0820312 11.8418 -5.42969 11.8418 -11.999v-39.9824z" />
+ <glyph glyph-name="object-group" unicode="&#xf247;"
+d="M480 320v-288h20c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v20h-384v-20c0 -6.62695 -5.37305 -12 -12 -12h-40c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h20v320h-20
+c-6.62695 0 -12 5.37305 -12 12v40c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-20h384v20c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-40c0 -6.62695 -5.37305 -12 -12 -12h-20v-32zM96 172c0 -6.62695 5.37305 -12 12 -12h168
+c6.62695 0 12 5.37305 12 12v136c0 6.62695 -5.37305 12 -12 12h-168c-6.62695 0 -12 -5.37305 -12 -12v-136zM416 76v136c0 6.62695 -5.37305 12 -12 12h-84v-72c0 -13.2549 -10.7451 -24 -24 -24h-72v-52c0 -6.62695 5.37305 -12 12 -12h168c6.62695 0 12 5.37305 12 12z
+" />
+ <glyph glyph-name="object-ungroup" unicode="&#xf248;" horiz-adv-x="576"
+d="M64 128v-26c0 -3.31152 -2.68848 -6 -6 -6h-52c-3.31152 0 -6 2.68848 -6 6v52c0 3.31152 2.68848 6 6 6h26v192h-26c-3.31152 0 -6 2.68848 -6 6v52c0 3.31152 2.68848 6 6 6h52c3.31152 0 6 -2.68848 6 -6v-26h288v26c0 3.31152 2.68848 6 6 6h52
+c3.31152 0 6 -2.68848 6 -6v-52c0 -3.31152 -2.68848 -6 -6 -6h-26v-192h26c3.31152 0 6 -2.68848 6 -6v-52c0 -3.31152 -2.68848 -6 -6 -6h-52c-3.31152 0 -6 2.68848 -6 6v26h-288zM544 192v-160h26c3.31152 0 6 -2.68848 6 -6v-52c0 -3.31152 -2.68848 -6 -6 -6h-52
+c-3.31152 0 -6 2.68848 -6 6v26h-288v-26c0 -3.31152 -2.68848 -6 -6 -6h-52c-3.31152 0 -6 2.68848 -6 6v52c0 3.31152 2.68848 6 6 6h26v72h136v-8c0 -13.2549 10.7451 -24 24 -24h64c13.2549 0 24 10.7451 24 24v64c0 13.2549 -10.7451 24 -24 24h-8v72h104v26
+c0 3.31152 2.68848 6 6 6h52c3.31152 0 6 -2.68848 6 -6v-52c0 -3.31152 -2.68848 -6 -6 -6h-26v-32z" />
+ <glyph glyph-name="sticky-note" unicode="&#xf249;" horiz-adv-x="448"
+d="M312 128c-13.2002 0 -24 -10.7998 -24 -24v-136h-264c-13.2998 0 -24 10.7002 -24 24v400c0 13.2998 10.7002 24 24 24h400c13.2998 0 24 -10.7002 24 -24v-264h-136zM441 73l-98 -98c-4.5 -4.5 -10.5996 -7 -17 -7h-6v128h128v-6.09961
+c0 -6.30078 -2.5 -12.4004 -7 -16.9004z" />
+ <glyph glyph-name="clone" unicode="&#xf24d;"
+d="M464 448c26.5098 0 48 -21.4902 48 -48v-288c0 -26.5098 -21.4902 -48 -48 -48h-288c-26.5098 0 -48 21.4902 -48 48v288c0 26.5098 21.4902 48 48 48h288zM176 32h208v-48c0 -26.5098 -21.4902 -48 -48 -48h-288c-26.5098 0 -48 21.4902 -48 48v288
+c0 26.5098 21.4902 48 48 48h48v-208c0 -44.1123 35.8877 -80 80 -80z" />
+ <glyph glyph-name="balance-scale" unicode="&#xf24e;" horiz-adv-x="640"
+d="M256 112c0 -44.1797 -57.3096 -80 -128 -80s-128 35.8203 -128 80h0.0195312c0 15.6699 -2.0791 7.25 85.04 181.51c17.6807 35.3604 68.2207 35.29 85.8701 0c86.3906 -172.779 85.0508 -165.33 85.0508 -181.51h0.0195312zM128 272l-72 -144h144zM639.98 112
+c0 -44.1797 -57.29 -80 -127.98 -80s-128 35.8203 -128 80h0.0195312c0 15.6699 -2.0791 7.25 85.04 181.51c17.6807 35.3604 68.2207 35.29 85.8701 0c86.3906 -172.779 85.0508 -165.33 85.0508 -181.51zM440 128h144l-72 144zM528 0c8.83984 0 16 -7.16016 16 -16v-32
+c0 -8.83984 -7.16016 -16 -16 -16h-416c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h176v294.75c-23.5195 10.29 -41.1602 31.4902 -46.3896 57.25h-129.61c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h144.36
+c14.5996 19.3203 37.5498 32 63.6396 32s49.04 -12.6797 63.6396 -32h144.36c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-129.61c-5.22949 -25.7695 -22.8799 -46.96 -46.3896 -57.25v-294.75h176z" />
+ <glyph glyph-name="hourglass-start" unicode="&#xf251;" horiz-adv-x="384"
+d="M360 448c13.2549 0 24 -10.7451 24 -24v-16c0 -13.2549 -10.7451 -24 -24 -24c0 -90.9648 -51.0156 -167.734 -120.842 -192c69.8262 -24.2656 120.842 -101.035 120.842 -192c13.2549 0 24 -10.7451 24 -24v-16c0 -13.2549 -10.7451 -24 -24 -24h-336
+c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24c0 90.9648 51.0156 167.734 120.842 192c-69.8262 24.2656 -120.842 101.035 -120.842 192c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24h336zM296 0c0 77.4834 -46.2139 144 -104 144
+c-57.7959 0 -104 -66.542 -104 -144h208z" />
+ <glyph glyph-name="hourglass-half" unicode="&#xf252;" horiz-adv-x="384"
+d="M360 448c13.2549 0 24 -10.7451 24 -24v-16c0 -13.2549 -10.7451 -24 -24 -24c0 -90.9648 -51.0156 -167.734 -120.842 -192c69.8262 -24.2656 120.842 -101.035 120.842 -192c13.2549 0 24 -10.7451 24 -24v-16c0 -13.2549 -10.7451 -24 -24 -24h-336
+c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24c0 90.9648 51.0156 167.734 120.842 192c-69.8262 24.2656 -120.842 101.035 -120.842 192c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24h336zM284.922 64
+c-17.0596 46.8037 -52.1006 80 -92.9219 80c-40.8242 0 -75.8613 -33.2031 -92.9199 -80h185.842zM284.941 320c7.07129 19.4131 11.0586 41.1953 11.0586 64h-208c0 -22.748 3.98828 -44.5479 11.0781 -64h185.863z" />
+ <glyph glyph-name="hourglass-end" unicode="&#xf253;" horiz-adv-x="384"
+d="M360 384c0 -90.9648 -51.0156 -167.734 -120.842 -192c69.8262 -24.2656 120.842 -101.035 120.842 -192c13.2549 0 24 -10.7451 24 -24v-16c0 -13.2549 -10.7451 -24 -24 -24h-336c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24
+c0 90.9648 51.0156 167.734 120.842 192c-69.8262 24.2656 -120.842 101.035 -120.842 192c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24h336c13.2549 0 24 -10.7451 24 -24v-16c0 -13.2549 -10.7451 -24 -24 -24zM192 240c57.4902 0 104 66.0547 104 144
+h-208c0 -77.4824 46.2129 -144 104 -144z" />
+ <glyph glyph-name="hourglass" unicode="&#xf254;" horiz-adv-x="384"
+d="M360 384c0 -90.9648 -51.0156 -167.734 -120.842 -192c69.8262 -24.2656 120.842 -101.035 120.842 -192c13.2549 0 24 -10.7451 24 -24v-16c0 -13.2549 -10.7451 -24 -24 -24h-336c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24
+c0 90.9648 51.0156 167.734 120.842 192c-69.8262 24.2656 -120.842 101.035 -120.842 192c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24h336c13.2549 0 24 -10.7451 24 -24v-16c0 -13.2549 -10.7451 -24 -24 -24z" />
+ <glyph glyph-name="hand-rock" unicode="&#xf255;"
+d="M464.8 368c26.2998 -0.400391 47.2002 -22.5 47.2002 -48.7998v-133.5c0 -12.7998 -2.5 -25.5 -7.5 -37.2998l-49 -116.301c-4.90039 -11.7998 -7.5 -24.5 -7.5 -37.2998v-2.89941c0 -13.3008 -10.7002 -24 -24 -24h-240c-13.2998 0 -24 10.6992 -24 24v6.69922
+c0 13.7002 -5.90039 26.8008 -16.0996 35.9004l-111.7 99.2998c-20.5 18.2998 -32.2002 44.4004 -32.2002 71.7998v66.4004c0 26.7998 21.9004 48.4004 48.7998 48c26.2998 -0.5 47.2002 -22.5 47.2002 -48.7998v-48.1006l8 -7.09961v136
+c0 26.7998 21.9004 48.4004 48.7998 48c26.2998 -0.5 47.2002 -22.5 47.2002 -48.7998v-31.2002h8v48c0 26.7998 21.9004 48.4004 48.7998 48c26.2998 -0.5 47.2002 -22.5 47.2002 -48.7998v-47.2002h8v32c0 26.7998 21.9004 48.4004 48.7998 48
+c26.2998 -0.5 47.2002 -22.5 47.2002 -48.7998v-31.2002h8c0 26.7998 21.9004 48.4004 48.7998 48z" />
+ <glyph glyph-name="hand-paper" unicode="&#xf256;" horiz-adv-x="447"
+d="M408.781 319.993c21.7305 -0.416016 39.2188 -18.1621 39.2178 -39.9932v-150.359c0 -12.2998 -2.28711 -32.001 -5.10449 -43.9746l-26.5078 -112.66c-5.10156 -21.6816 -24.4502 -37.0059 -46.7236 -37.0059h-197.59c-13.4922 0 -30.8838 8.85645 -38.8193 19.7676
+l-125.601 172.705c-12.9932 17.8672 -9.04297 42.8838 8.82129 55.877c17.8682 12.9941 42.8848 9.04297 55.877 -8.82227l31.6484 -43.5195v235.992c0 21.8311 17.4883 39.5771 39.2188 39.9932c22.4248 0.428711 40.7812 -18.3535 40.7812 -40.7832v-175.21h8v216
+c0 21.8311 17.4883 39.5771 39.2188 39.9932c22.4248 0.428711 40.7812 -18.3535 40.7812 -40.7832v-215.21h8v177c0 21.8311 17.4883 39.5771 39.2188 39.9932c22.4248 0.428711 40.7812 -18.3535 40.7812 -40.7832v-176.21h8v87.21
+c0 22.4297 18.3564 41.2119 40.7812 40.7832z" />
+ <glyph glyph-name="hand-scissors" unicode="&#xf257;"
+d="M216 8c0 22.0918 17.9092 40 40 40v8h-32c-22.0908 0 -40 17.9082 -40 40s17.9092 40 40 40h32v8h-208c-26.5098 0 -48 21.4902 -48 48s21.4902 48 48 48h208v13.5723l-177.551 69.7393c-24.6738 9.69434 -36.8184 37.5557 -27.125 62.2285
+c9.69238 24.6738 37.5537 36.8174 62.2275 27.124l190.342 -74.7646l24.8721 31.0898c12.3066 15.3809 33.9785 19.5146 51.0811 9.74121l112 -64c11.125 -6.3584 20.1533 -21.917 20.1533 -34.7305v-240c0 -18.5615 -12.7695 -34.6855 -30.8379 -38.9365l-136 -32
+c-2.49414 -0.586914 -6.59668 -1.06348 -9.1582 -1.06348h-0.00390625h-80c-22.0908 0 -40 17.9082 -40 40z" />
+ <glyph glyph-name="hand-lizard" unicode="&#xf258;" horiz-adv-x="576"
+d="M384 -32v61.4609c0 7.28906 -4.99707 16.3711 -11.1543 20.2734l-111.748 70.8105c-6.49316 4.11523 -18.0029 7.45508 -25.6904 7.45508h-0.000976562h-147.406c-13.2549 0 -24 10.7451 -24 24v8c0 35.3457 28.6543 64 64 64h123.648
+c11.7754 0 25.0088 8.82227 29.5371 19.6924l21.4102 51.3848c4.94141 11.8555 -3.77051 24.9229 -16.6143 24.9229h-229.981c-30.9277 0 -56 25.0723 -56 56v16c0 13.2549 10.7451 24 24 24h333.544c14.6035 0 32.7852 -10.0205 40.583 -22.3682l163.04 -258.146
+c8.1875 -12.9639 14.833 -35.9297 14.833 -51.2627v-0.000976562v-116.222h-192z" />
+ <glyph glyph-name="hand-spock" unicode="&#xf259;"
+d="M481.3 350.9c21.4004 -5.10059 34.7002 -26.7002 29.7002 -48.2002l-36.2998 -152.5c-1.7002 -7.2002 -2.60059 -14.7002 -2.60059 -22.2002v-42c0 -9.2998 -1.39941 -18.4004 -4 -27.2998l-26.1992 -88.2998c-6 -20.4004 -24.7002 -34.4004 -46 -34.4004h-216.7
+c-12.2002 0 -24 4.59961 -32.9004 13l-133.7 125.9c-16.0996 15.0996 -16.7998 40.3994 -1.69922 56.5c15.0996 16.0996 40.3994 16.7998 56.5 1.69922l60.5996 -57v79.4004l-39 171.6c-4.90039 21.6006 8.59961 43 30.0996 47.9004
+c21.6006 4.90039 43 -8.59961 47.9004 -30.0996l34.7998 -152.801h9.7998l-47.5996 207c-5 21.5 8.5 43 30 47.9004c21.5996 4.90039 43 -8.5 48 -30.0996l51.7002 -224.9h15.0996l48.4004 193.7c5.39941 21.3994 27.0996 34.5 48.5 29.0996
+c21.3994 -5.39941 34.5 -27.0996 29.0996 -48.5l-43.5996 -174.3h11.0996l30.7998 129.3c5.10059 21.4004 26.7002 34.7002 48.2002 29.6006z" />
+ <glyph glyph-name="hand-pointer" unicode="&#xf25a;" horiz-adv-x="448"
+d="M448 208v-96c0 -3.08398 -0.356445 -6.15918 -1.06348 -9.16211l-32 -136c-4.25098 -18.0684 -20.375 -30.8379 -38.9365 -30.8379h-208c-11.2432 0 -25.7363 7.37988 -32.3496 16.4727l-127.997 176c-12.9932 17.8662 -9.04297 42.8838 8.82129 55.876
+c17.8672 12.9941 42.8848 9.04297 55.877 -8.82227l31.6484 -43.5186v275.992c0 22.0908 17.9082 40 40 40s40 -17.9092 40 -40v-200h8v40c0 22.0908 17.9082 40 40 40s40 -17.9092 40 -40v-40h8v24c0 22.0908 17.9082 40 40 40s40 -17.9092 40 -40v-24h8
+c0 22.0908 17.9082 40 40 40s40 -17.9092 40 -40zM192 128h-8v-96h8v96zM280 128h-8v-96h8v96zM368 128h-8v-96h8v96z" />
+ <glyph glyph-name="hand-peace" unicode="&#xf25b;" horiz-adv-x="448"
+d="M408 232c22.0918 0 40 -17.9092 40 -40v-80v-0.00488281c0 -2.56152 -0.476562 -6.66406 -1.06348 -9.15723l-32 -136c-4.25098 -18.0684 -20.375 -30.8379 -38.9365 -30.8379h-240h-0.000976562c-12.8125 0 -28.3711 9.0293 -34.7275 20.1543l-64 112
+c-9.77441 17.1025 -5.64062 38.7744 9.74023 51.0811l31.0898 24.8721l-74.7646 190.342c-9.69336 24.6738 2.4502 52.5342 27.124 62.2266c24.6729 9.69434 52.5332 -2.4502 62.2275 -27.125l69.7393 -177.551h13.5723v208c0 26.5098 21.4902 48 48 48s48 -21.4902 48 -48
+v-208h8v32c0 22.0908 17.9082 40 40 40s40 -17.9092 40 -40v-32h8c0 22.0908 17.9082 40 40 40z" />
+ <glyph glyph-name="trademark" unicode="&#xf25c;" horiz-adv-x="640"
+d="M260.6 352c6.60059 0 12 -5.40039 11.9004 -12v-43.0996c0 -6.60059 -5.40039 -12 -12 -12h-85.0996v-240.9c0 -6.59961 -5.40039 -12 -12 -12h-54.3008c-6.59961 0 -12 5.40039 -12 12v240.9h-85.0996c-6.59961 0 -12 5.39941 -12 12v43.0996
+c0 6.59961 5.40039 12 12 12h248.6zM640 45c0.5 -7 -5 -13 -12 -13h-53.9004c-6.2998 0 -11.5996 4.90039 -12 11.2002l-9.09961 132.899c-1.7998 24.2002 0 53.7002 0 53.7002h-0.900391s-10.6992 -33.5996 -17.8994 -53.7002l-30.7002 -84.6992
+c-1.7002 -4.7002 -6.2002 -7.90039 -11.2998 -7.90039h-50.2998c-5.10059 0 -9.60059 3.2002 -11.3008 7.90039l-30.6992 84.6992c-7.2002 20.1006 -17.9004 53.7002 -17.9004 53.7002h-0.900391s1.80078 -29.5 0 -53.7002l-9.09961 -132.899
+c-0.5 -6.2998 -5.7002 -11.2002 -12 -11.2002h-54.5c-7.09961 0 -12.5996 6 -12 13l24.4004 296c0.599609 6.2002 5.7998 11 12 11h65.3994c5.10059 0 9.60059 -3.2998 11.2998 -8.09961l43.8008 -127.101c7.19922 -20.5996 16.0996 -52.7998 16.0996 -52.7998h0.900391
+s8.89941 32.2002 16.0996 52.7998l43.7998 127.101c1.60059 4.7998 6.2002 8.09961 11.2998 8.09961h65.4004c6.2998 0 11.5 -4.7998 12 -11z" />
+ <glyph glyph-name="registered" unicode="&#xf25d;"
+d="M285.363 240.525c0 -18.6006 -9.83105 -28.4316 -28.4316 -28.4316h-29.876v56.1406h23.3779c28.668 0 34.9297 -8.77344 34.9297 -27.709zM504 192c0 -136.967 -111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248s248 -111.033 248 -248z
+M363.411 87.5859c-46.7295 84.8252 -43.2988 78.6357 -44.7021 80.9805c23.4316 15.1719 37.9453 42.9785 37.9453 74.4854c0 54.2441 -31.5 89.252 -105.498 89.252h-70.667c-13.2549 0 -24 -10.7451 -24 -24v-232.304c0 -13.2549 10.7451 -24 24 -24h22.5664
+c13.2549 0 24 10.7451 24 24v71.6631h25.5566l44.1289 -82.9375c3.73828 -7.02441 13.2305 -12.7266 21.1875 -12.7266h24.4639c18.2617 0.000976562 29.8291 19.5908 21.0186 35.5869z" />
+ <glyph glyph-name="tv" unicode="&#xf26c;" horiz-adv-x="640"
+d="M592 448c26.5 0 48 -21.5 48 -48v-320c0 -26.5 -21.5 -48 -48 -48h-234.9v-32h160c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32h-384c-17.6992 0 -32 14.2998 -32 32s14.3008 32 32 32h160v32h-245.1c-26.5 0 -48 21.5 -48 48v320c0 26.5 21.5 48 48 48h544z
+M576 96v288h-512v-288h512z" />
+ <glyph glyph-name="calendar-plus" unicode="&#xf271;" horiz-adv-x="448"
+d="M436 288h-424c-6.59961 0 -12 5.40039 -12 12v36c0 26.5 21.5 48 48 48h48v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h128v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h48c26.5 0 48 -21.5 48 -48v-36
+c0 -6.59961 -5.40039 -12 -12 -12zM12 256h424c6.59961 0 12 -5.40039 12 -12v-260c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v260c0 6.59961 5.40039 12 12 12zM328 116c0 6.59961 -5.40039 12 -12 12h-60v60c0 6.59961 -5.40039 12 -12 12h-40
+c-6.59961 0 -12 -5.40039 -12 -12v-60h-60c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h60v-60c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12v60h60c6.59961 0 12 5.40039 12 12v40z" />
+ <glyph glyph-name="calendar-minus" unicode="&#xf272;" horiz-adv-x="448"
+d="M436 288h-424c-6.59961 0 -12 5.40039 -12 12v36c0 26.5 21.5 48 48 48h48v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h128v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h48c26.5 0 48 -21.5 48 -48v-36
+c0 -6.59961 -5.40039 -12 -12 -12zM12 256h424c6.59961 0 12 -5.40039 12 -12v-260c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v260c0 6.59961 5.40039 12 12 12zM316 64c6.59961 0 12 5.40039 12 12v40c0 6.59961 -5.40039 12 -12 12h-184
+c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h184z" />
+ <glyph glyph-name="calendar-times" unicode="&#xf273;" horiz-adv-x="448"
+d="M436 288h-424c-6.59961 0 -12 5.40039 -12 12v36c0 26.5 21.5 48 48 48h48v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h128v52c0 6.59961 5.40039 12 12 12h40c6.59961 0 12 -5.40039 12 -12v-52h48c26.5 0 48 -21.5 48 -48v-36
+c0 -6.59961 -5.40039 -12 -12 -12zM12 256h424c6.59961 0 12 -5.40039 12 -12v-260c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v260c0 6.59961 5.40039 12 12 12zM269.3 96l48.1006 48.0996c4.69922 4.7002 4.69922 12.3008 0 17l-28.3008 28.3008
+c-4.69922 4.69922 -12.2998 4.69922 -17 0l-48.0996 -48.1006l-48.0996 48.1006c-4.7002 4.69922 -12.3008 4.69922 -17 0l-28.3008 -28.3008c-4.69922 -4.69922 -4.69922 -12.2998 0 -17l48.1006 -48.0996l-48.1006 -48.0996c-4.69922 -4.7002 -4.69922 -12.3008 0 -17
+l28.3008 -28.3008c4.69922 -4.69922 12.2998 -4.69922 17 0l48.0996 48.1006l48.0996 -48.1006c4.7002 -4.69922 12.3008 -4.69922 17 0l28.3008 28.3008c4.69922 4.69922 4.69922 12.2998 0 17z" />
+ <glyph glyph-name="calendar-check" unicode="&#xf274;" horiz-adv-x="448"
+d="M436 288h-424c-6.62695 0 -12 5.37305 -12 12v36c0 26.5098 21.4902 48 48 48h48v52c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-52h128v52c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-52h48c26.5098 0 48 -21.4902 48 -48v-36
+c0 -6.62695 -5.37305 -12 -12 -12zM12 256h424c6.62695 0 12 -5.37305 12 -12v-260c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v260c0 6.62695 5.37305 12 12 12zM345.296 160.053l-28.1689 28.3984
+c-4.66699 4.70508 -12.2646 4.73535 -16.9697 0.0673828l-106.037 -105.184l-45.9805 46.3516c-4.66699 4.70508 -12.2656 4.73633 -16.9707 0.0683594l-28.3965 -28.1699c-4.70508 -4.66699 -4.73633 -12.2646 -0.0683594 -16.9697l82.6006 -83.2695
+c4.66699 -4.70508 12.2656 -4.73535 16.9707 -0.0673828l142.952 141.805c4.70508 4.66699 4.73633 12.2646 0.0683594 16.9697z" />
+ <glyph glyph-name="industry" unicode="&#xf275;"
+d="M475.115 284.219c15.9541 10.1514 36.8848 -1.33105 36.8848 -20.248v-271.971c0 -13.2549 -10.7451 -24 -24 -24h-464c-13.2549 0 -24 10.7451 -24 24v400c0 13.2549 10.7451 24 24 24h112c13.2549 0 24 -10.7451 24 -24v-196.309l139.115 88.5273
+c15.9541 10.1514 36.8848 -1.33203 36.8848 -20.248v-68.2793z" />
+ <glyph glyph-name="map-pin" unicode="&#xf276;" horiz-adv-x="288"
+d="M112 131.06c10.3896 -1.91992 21.0596 -3.05957 32 -3.05957s21.6104 1.13965 32 3.05957v-156.689l-22.0098 -33.0205c-4.75 -7.11914 -15.2207 -7.11914 -19.9707 0l-22.0195 33.0205v156.689zM144 448c79.5303 0 144 -64.4697 144 -144s-64.4697 -144 -144 -144
+s-144 64.4697 -144 144s64.4697 144 144 144zM144 372c6.62012 0 12 5.37988 12 12s-5.37988 12 -12 12c-50.7197 0 -92 -41.2695 -92 -92c0 -6.62012 5.37988 -12 12 -12s12 5.37988 12 12c0 37.5 30.5 68 68 68z" />
+ <glyph glyph-name="map-signs" unicode="&#xf277;"
+d="M507.31 363.31c6.25 -6.25 6.25 -16.3691 0 -22.6299l-43.3096 -43.3096c-6.00977 -6 -14.1396 -9.37012 -22.6299 -9.37012h-385.37c-13.25 0 -24 10.75 -24 24v80c0 13.25 10.75 24 24 24h168v16c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-16
+h153.37c8.49023 0 16.6299 -3.37012 22.6299 -9.37012zM224 -48v112h64v-112c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16zM456 224c13.25 0 24 -10.75 24 -24v-80c0 -13.25 -10.75 -24 -24 -24h-385.37
+c-8.49023 0 -16.6299 3.37012 -22.6299 9.37012l-43.3096 43.3096c-6.25 6.25 -6.25 16.3799 0 22.6299l43.3096 43.3203c6.00977 6 14.1396 9.37012 22.6299 9.37012h153.37v32h64v-32h168z" />
+ <glyph glyph-name="map" unicode="&#xf279;" horiz-adv-x="576"
+d="M0 330.34c0.00292969 11.959 9.0166 25.2686 20.1201 29.71l139.88 55.9502v-384l-138.06 -62.8398c-10.5107 -4.2002 -21.9404 3.54004 -21.9404 14.8594v346.32zM192 32v384l192 -64v-384zM554.06 414.84c10.5107 4.2002 21.9404 -3.54004 21.9404 -14.8594v-346.32
+c0 -11.9609 -9.01367 -25.2705 -20.1201 -29.71l-139.88 -55.9502v384z" />
+ <glyph glyph-name="comment-alt" unicode="&#xf27a;"
+d="M448 448c35.2998 0 64 -28.7002 64 -64v-288c0 -35.2998 -28.7002 -64 -64 -64h-144l-124.9 -93.7002c-7.89941 -5.7998 -19.0996 -0.0996094 -19.0996 9.7002v84h-96c-35.2998 0 -64 28.7002 -64 64v288c0 35.2998 28.7002 64 64 64h384z" />
+ <glyph glyph-name="pause-circle" unicode="&#xf28b;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM240 112v160c0 8.7998 -7.2002 16 -16 16h-48c-8.7998 0 -16 -7.2002 -16 -16v-160c0 -8.7998 7.2002 -16 16 -16h48c8.7998 0 16 7.2002 16 16zM352 112v160
+c0 8.7998 -7.2002 16 -16 16h-48c-8.7998 0 -16 -7.2002 -16 -16v-160c0 -8.7998 7.2002 -16 16 -16h48c8.7998 0 16 7.2002 16 16z" />
+ <glyph glyph-name="stop-circle" unicode="&#xf28d;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM352 112v160c0 8.7998 -7.2002 16 -16 16h-160c-8.7998 0 -16 -7.2002 -16 -16v-160c0 -8.7998 7.2002 -16 16 -16h160c8.7998 0 16 7.2002 16 16z" />
+ <glyph glyph-name="shopping-bag" unicode="&#xf290;" horiz-adv-x="448"
+d="M352 288h96v-272c0 -44.1826 -35.8174 -80 -80 -80h-288c-44.1826 0 -80 35.8174 -80 80v272h96v32c0 70.5801 57.4199 128 128 128c70.5791 0 128 -57.4199 128 -128v-32zM160 320v-32h128v32c0 35.29 -28.71 64 -64 64s-64 -28.71 -64 -64zM320 200
+c13.2549 0 24 10.7451 24 24s-10.7451 24 -24 24s-24 -10.7451 -24 -24s10.7451 -24 24 -24zM128 200c13.2549 0 24 10.7451 24 24s-10.7451 24 -24 24s-24 -10.7451 -24 -24s10.7451 -24 24 -24z" />
+ <glyph glyph-name="shopping-basket" unicode="&#xf291;" horiz-adv-x="576"
+d="M576 232v-16c0 -13.2549 -10.7451 -24 -24 -24h-8l-26.1133 -182.788c-3.37793 -23.6465 -23.6299 -41.2119 -47.5166 -41.2119h-364.74c-23.8867 0 -44.1387 17.5654 -47.5176 41.2119l-26.1123 182.788h-8c-13.2549 0 -24 10.7451 -24 24v16
+c0 13.2549 10.7451 24 24 24h67.3408l106.78 146.821c10.3945 14.292 30.4072 17.4531 44.7012 7.05762c14.293 -10.3945 17.4531 -30.4082 7.05762 -44.7012l-79.4033 -109.178h235.047l-79.4033 109.179c-10.3955 14.292 -7.23438 34.3066 7.05859 44.7012
+c14.291 10.3955 34.3066 7.23535 44.7012 -7.05762l106.779 -146.822h67.3408c13.2549 0 24 -10.7451 24 -24zM312 56v112c0 13.2549 -10.7451 24 -24 24s-24 -10.7451 -24 -24v-112c0 -13.2549 10.7451 -24 24 -24s24 10.7451 24 24zM424 56v112
+c0 13.2549 -10.7451 24 -24 24s-24 -10.7451 -24 -24v-112c0 -13.2549 10.7451 -24 24 -24s24 10.7451 24 24zM200 56v112c0 13.2549 -10.7451 24 -24 24s-24 -10.7451 -24 -24v-112c0 -13.2549 10.7451 -24 24 -24s24 10.7451 24 24z" />
+ <glyph glyph-name="hashtag" unicode="&#xf292;" horiz-adv-x="448"
+d="M440.667 265.891c-0.974609 -5.45898 -6.2666 -9.89062 -11.8135 -9.89062h-79.0957l-22.8564 -128h74.8096c7.4707 0 13.126 -6.75391 11.8135 -14.1094l-7.14355 -40c-0.974609 -5.45898 -6.2666 -9.89062 -11.8125 -9.89062h-79.0967l-15.377 -86.1094
+c-0.974609 -5.45898 -6.2666 -9.89062 -11.8125 -9.89062h-40.6318c-7.47266 0 -13.127 6.75391 -11.8135 14.1094l14.623 81.8906h-98.6338l-15.3779 -86.1094c-0.974609 -5.45898 -6.26758 -9.89062 -11.8135 -9.89062h-40.6318
+c-7.4707 0 -13.126 6.75391 -11.8125 14.1094l14.623 81.8906h-74.8105c-7.4707 0 -13.126 6.75391 -11.8125 14.1094l7.14258 40c0.974609 5.45898 6.2666 9.89062 11.8135 9.89062h79.0957l22.8564 128h-74.8096c-7.4707 0 -13.126 6.75391 -11.8135 14.1094l7.14355 40
+c0.974609 5.45898 6.2666 9.89062 11.8125 9.89062h79.0967l15.377 86.1094c0.974609 5.45898 6.2666 9.89062 11.8125 9.89062h40.6318c7.47266 0 13.127 -6.75391 11.8135 -14.1094l-14.623 -81.8906h98.6348l15.377 86.1094
+c0.974609 5.45898 6.26758 9.89062 11.8135 9.89062h40.6318c7.4707 0 13.126 -6.75391 11.8125 -14.1094l-14.623 -81.8906h74.8105c7.4707 0 13.126 -6.75391 11.8125 -14.1094zM261.889 128l22.8574 128h-98.6338l-22.8574 -128h98.6338z" />
+ <glyph glyph-name="percent" unicode="&#xf295;" horiz-adv-x="448"
+d="M112 224c-61.9004 0 -112 50.0996 -112 112s50.0996 112 112 112s112 -50.0996 112 -112s-50.0996 -112 -112 -112zM112 384c-26.5 0 -48 -21.5 -48 -48s21.5 -48 48 -48s48 21.5 48 48s-21.5 48 -48 48zM336 160c61.9004 0 112 -50.0996 112 -112
+s-50.0996 -112 -112 -112s-112 50.0996 -112 112s50.0996 112 112 112zM336 0c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48zM392.3 447.8l31.6006 0.100586c19.3994 0.0996094 30.8994 -21.8008 19.6992 -37.8008l-366.199 -463.699
+c-3.94629 -5.62793 -12.7275 -10.1973 -19.6006 -10.2002l-33.3994 -0.100586c-19.5 0 -30.9004 21.9004 -19.7002 37.8008l368 463.699c4.5 6.40039 11.7998 10.2002 19.5996 10.2002z" />
+ <glyph glyph-name="universal-access" unicode="&#xf29a;"
+d="M256 400c-114.971 0 -208 -93.0469 -208 -208c0 -114.971 93.0469 -208 208 -208c114.971 0 208 93.0469 208 208c0 114.971 -93.0469 208 -208 208zM256 440c136.967 0 248 -111.033 248 -248s-111.033 -248 -248 -248s-248 111.033 -248 248s111.033 248 248 248z
+M256 384c106.039 0 192 -85.9609 192 -192s-85.9609 -192 -192 -192s-192 85.9609 -192 192s85.9609 192 192 192zM256 340c-19.8818 0 -36 -16.1182 -36 -36s16.1182 -36 36 -36s36 16.1182 36 36s-16.1182 36 -36 36zM373.741 241.977
+c8.59961 2.03027 13.9258 10.6484 11.8965 19.249c-2.03027 8.60156 -10.6494 13.9258 -19.249 11.8955c-96.4912 -22.7832 -124.089 -22.8291 -220.774 0c-8.60254 2.03125 -17.2178 -3.29395 -19.249 -11.8955c-2.03125 -8.60059 3.29492 -17.2178 11.8945 -19.249
+c28.7129 -6.7793 55.5127 -12.749 82.1416 -15.8066c-0.852539 -101.08 -12.3242 -123.08 -25.0371 -155.621c-3.61719 -9.25879 0.957031 -19.6982 10.2168 -23.3145c9.26465 -3.61914 19.7002 0.961914 23.3154 10.2168c8.72754 22.3408 17.0947 40.6982 22.2617 78.5488
+h9.68555c5.1748 -37.9131 13.5566 -56.2412 22.2617 -78.5488c3.61621 -9.25977 14.0547 -13.834 23.3154 -10.2168c9.25977 3.61621 13.834 14.0547 10.2168 23.3145c-12.7305 32.5693 -24.1855 54.5986 -25.0371 155.621c26.6299 3.05859 53.4287 9.02832 82.1406 15.8066
+z" />
+ <glyph glyph-name="blind" unicode="&#xf29d;" horiz-adv-x="383"
+d="M380.15 -62.8369c-1.05664 -0.640625 -2.91602 -1.16113 -4.15137 -1.16113c-2.48438 0 -5.54785 1.72363 -6.83789 3.84766l-125.33 206.428c4.25684 1.68848 10.0615 5.9375 12.958 9.48438l126.048 -207.607c0.641602 -1.05664 1.16211 -2.91699 1.16211 -4.15234
+c0 -2.48535 -1.72461 -5.5498 -3.84863 -6.83887zM142.803 133.662l62.8145 -153.537c6.69141 -16.3584 -1.14453 -35.042 -17.501 -41.7344c-16.3564 -6.69043 -35.04 1.1416 -41.7334 17.501l-36.1201 88.2852zM96 360c-24.3008 0 -44 19.6992 -44 44s19.6992 44 44 44
+s44 -19.6992 44 -44s-19.6992 -44 -44 -44zM250.837 190.872c8.19336 -10.374 6.44434 -25.4922 -3.96582 -33.708c-9.33984 -7.37402 -24.5635 -7.61914 -33.708 3.96484l-102.3 129.217c-0.663086 0.836914 -2.06738 1.51562 -3.13477 1.51562
+c-2.20801 0 -4 -1.79297 -4 -4.00098c0 -0.769531 0.387695 -1.88281 0.865234 -2.48535l31.4062 -39.8164v-107.196l-65.9258 -181.288c-6.04102 -16.6143 -24.4072 -25.1768 -41.0088 -19.1387c-16.6104 6.04004 -25.1787 24.4004 -19.1387 41.0098l54.0732 148.693
+v140.698l-16 -20.5713v-79.7656c0 -13.0996 -10.4951 -23.748 -23.5361 -23.9961c-13.4531 -0.254883 -24.4639 11.0811 -24.4639 24.5361v95.6943l61.0557 78.5c4.72754 6.0791 11.7979 9.23633 18.9443 9.23926v0.0263672h32v-0.015625
+c7.08691 -0.00390625 14.1035 -3.11719 18.8369 -9.1123z" />
+ <glyph glyph-name="audio-description" unicode="&#xf29e;"
+d="M162.925 209.291l8.82227 -30.6553h-25.6064l9.04102 30.6523c1.27734 4.4209 2.65137 9.99414 3.87207 15.2451c1.2207 -5.25098 2.59473 -10.8232 3.87109 -15.2422zM329.399 241.39c28.6846 0 46.1748 -16.7656 46.1748 -49.0049
+c0 -32.0977 -16.3994 -49.7754 -46.1748 -49.7754h-14.5234v98.7803h14.5234zM512 336v-288c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v288c0 26.5098 21.4902 48 48 48h416c26.5098 0 48 -21.4902 48 -48zM245.459 111.861l-57.0967 168
+c-1.52734 4.49219 -6.61719 8.13867 -11.3623 8.13867h-35.8936c-4.74512 0 -9.83594 -3.64648 -11.3623 -8.13867l-57.0967 -168c-2.64453 -7.7832 3.1416 -15.8613 11.3613 -15.8613h29.1328c4.99219 0 10.1602 3.89453 11.5352 8.69336l8.57422 29.9053h51.3672
+l8.79297 -29.9766c1.39648 -4.75977 6.55469 -8.62207 11.5146 -8.62207v0h29.1719c8.2207 0 14.0059 8.07812 11.3613 15.8613zM430.16 192.386c0 58.9775 -37.9189 95.6143 -98.96 95.6143h-57.3662c-6.62695 0 -12 -5.37305 -12 -12v-168c0 -6.62695 5.37305 -12 12 -12
+h57.3662c61.041 0 98.96 36.9326 98.96 96.3857z" />
+ <glyph glyph-name="phone-volume" unicode="&#xf2a0;" horiz-adv-x="383"
+d="M97.333 -58.9658c-129.874 129.874 -129.681 340.252 0 469.933c5.69824 5.69824 14.5273 6.63184 21.2627 2.42188l64.8174 -40.5127c4.45898 -2.78711 8.07812 -9.31641 8.07812 -14.5752c0 -1.82715 -0.550781 -4.68652 -1.22949 -6.38281l-32.4082 -81.0205
+c-2.38477 -5.96484 -9.53418 -10.8047 -15.958 -10.8047c-0.473633 0 -1.23926 0.0380859 -1.71094 0.0849609l-55.8096 5.58008c-21.0508 -58.2607 -20.6123 -122.471 0 -179.515l55.8105 5.58105c0.47168 0.046875 1.2373 0.0849609 1.71094 0.0849609
+c6.42383 0 13.5732 -4.83984 15.959 -10.8037l32.4072 -81.0225c0.678711 -1.69629 1.22949 -4.55566 1.22949 -6.38281c0 -5.25879 -3.61914 -11.7881 -8.07812 -14.5752l-64.8174 -40.5127c-2.30762 -1.44238 -6.38867 -2.6123 -9.10938 -2.6123
+c-3.92969 0 -9.375 2.25488 -12.1543 5.03418zM247.126 352.527c11.832 -20.0469 11.832 -45.0088 0 -65.0557c-3.9502 -6.69238 -13.1084 -7.95898 -18.7178 -2.58105l-5.97559 5.72656c-3.91016 3.74805 -4.79297 9.62207 -2.26074 14.4102
+c2.04883 3.87793 3.71094 10.5859 3.71094 14.9717c0 4.38672 -1.66211 11.0947 -3.71094 14.9727c-2.5332 4.78809 -1.64941 10.6621 2.26074 14.4102l5.97559 5.72656c5.60938 5.37793 14.7676 4.11133 18.7178 -2.58105zM338.913 443.714
+c60.1396 -71.6035 60.0918 -175.882 0 -247.428c-4.47363 -5.32715 -12.5303 -5.74609 -17.5518 -0.933594l-5.79785 5.55762c-4.56055 4.37109 -4.97754 11.5293 -0.930664 16.3789c49.6875 59.5381 49.6465 145.933 0 205.422
+c-4.04688 4.84961 -3.63086 12.0078 0.930664 16.3789l5.79785 5.55762c5.02148 4.8125 13.0781 4.39355 17.5518 -0.933594zM292.941 398.773c36.0498 -46.3223 36.1074 -111.149 0 -157.547c-4.39062 -5.64062 -12.6973 -6.25098 -17.8564 -1.30371l-5.81836 5.5791
+c-4.39941 4.21875 -4.99805 11.0947 -1.28418 15.9307c26.5352 34.5645 26.5332 82.5723 0 117.135c-3.71387 4.83594 -3.11523 11.7109 1.28418 15.9307l5.81836 5.5791c5.15918 4.94727 13.4658 4.33691 17.8564 -1.30371z" />
+ <glyph glyph-name="braille" unicode="&#xf2a1;" horiz-adv-x="640"
+d="M128 192c0 -35.3457 -28.6543 -64 -64 -64s-64 28.6543 -64 64s28.6543 64 64 64s64 -28.6543 64 -64zM64 64c17.6729 0 32 -14.3271 32 -32s-14.3271 -32 -32 -32s-32 14.3271 -32 32s14.3271 32 32 32zM64 416c35.3457 0 64 -28.6543 64 -64s-28.6543 -64 -64 -64
+s-64 28.6543 -64 64s28.6543 64 64 64zM224 224c17.6729 0 32 -14.3271 32 -32s-14.3271 -32 -32 -32s-32 14.3271 -32 32s14.3271 32 32 32zM224 64c17.6729 0 32 -14.3271 32 -32s-14.3271 -32 -32 -32s-32 14.3271 -32 32s14.3271 32 32 32zM224 416
+c35.3457 0 64 -28.6543 64 -64s-28.6543 -64 -64 -64s-64 28.6543 -64 64s28.6543 64 64 64zM448 224c17.6729 0 32 -14.3271 32 -32s-14.3271 -32 -32 -32s-32 14.3271 -32 32s14.3271 32 32 32zM448 64c17.6729 0 32 -14.3271 32 -32s-14.3271 -32 -32 -32
+s-32 14.3271 -32 32s14.3271 32 32 32zM448 416c35.3457 0 64 -28.6543 64 -64s-28.6543 -64 -64 -64s-64 28.6543 -64 64s28.6543 64 64 64zM608 224c17.6729 0 32 -14.3271 32 -32s-14.3271 -32 -32 -32s-32 14.3271 -32 32s14.3271 32 32 32zM608 64
+c17.6729 0 32 -14.3271 32 -32s-14.3271 -32 -32 -32s-32 14.3271 -32 32s14.3271 32 32 32zM608 384c17.6729 0 32 -14.3271 32 -32s-14.3271 -32 -32 -32s-32 14.3271 -32 32s14.3271 32 32 32z" />
+ <glyph glyph-name="assistive-listening-systems" unicode="&#xf2a2;"
+d="M216 188c0 -15.4639 -12.5361 -28 -28 -28s-28 12.5361 -28 28c0 44.1123 35.8877 80 80 80s80 -35.8877 80 -80c0 -15.4639 -12.5361 -28 -28 -28s-28 12.5361 -28 28c0 13.2344 -10.7666 24 -24 24s-24 -10.7656 -24 -24zM240 364c97.0469 0 176 -78.9531 176 -176
+c0 -95.9863 -71.4053 -109.798 -72 -144.226c-0.124023 -59.4463 -48.5254 -107.774 -108 -107.774c-15.4639 0 -28 12.5361 -28 28s12.5361 28 28 28c28.6729 0 52 23.3271 52 52l0.00292969 0.37793c0.988281 73.3115 71.9971 68.458 71.9971 143.622
+c0 66.168 -53.832 120 -120 120s-120 -53.832 -120 -120c0 -15.4639 -12.5361 -28 -28 -28s-28 12.5361 -28 28c0 97.0469 78.9531 176 176 176zM160 128c17.6729 0 32 -14.3271 32 -32s-14.3271 -32 -32 -32s-32 14.3271 -32 32s14.3271 32 32 32zM32 0
+c17.6729 0 32 -14.3271 32 -32s-14.3271 -32 -32 -32s-32 14.3271 -32 32s14.3271 32 32 32zM512 187.993c0 -15.4639 -12.5371 -28 -28.001 -28s-28 12.5361 -28 28c0 1.12695 -0.00683594 2.24512 -0.0332031 3.36328l-0.00292969 0.1875
+c-1.4668 91.4404 -60.1709 172.599 -146.077 201.953c-14.6318 5.00098 -22.4414 20.917 -17.4414 35.5498c5.00098 14.6328 20.917 22.4404 35.5498 17.4424c108.163 -36.959 182.082 -139.015 183.961 -253.965c0.0332031 -1.50684 0.0449219 -3.01367 0.0449219 -4.53125
+zM152.971 8.9707l-33.9404 -33.9404l-80 80l33.9404 33.9404z" />
+ <glyph glyph-name="american-sign-language-interpreting" unicode="&#xf2a3;" horiz-adv-x="639"
+d="M290.547 258.961c-20.2949 10.1494 -44.1465 11.1992 -64.7393 3.88965c42.6064 0 71.208 -20.4746 85.5781 -50.5752c8.57617 -17.8994 -5.14746 -38.0713 -23.6172 -38.0713c18.4297 0 32.2119 -20.1357 23.6172 -38.0713
+c-14.7246 -30.8457 -46.123 -50.8535 -80.2979 -50.8535c-0.556641 0 -94.4707 8.61426 -94.4707 8.61426l-66.4062 -33.3467c-9.38379 -4.69336 -19.8145 -0.378906 -23.8945 7.78125l-44.4561 88.9248c-4.16699 8.61523 -1.11133 18.8975 6.94531 23.6211l58.0723 33.0693
+l41.1221 74.1953c6.38965 57.2451 34.7314 109.768 79.7432 146.727c11.3906 9.44824 28.3408 7.78125 37.5098 -3.61328c9.44629 -11.3936 7.78027 -28.0674 -3.6123 -37.5156c-12.5029 -10.5596 -23.6172 -22.5098 -32.5088 -35.5703
+c21.6719 14.7285 46.6787 24.7324 74.1865 28.0674c14.7246 1.94434 28.0625 -8.33594 29.7295 -23.0654c1.94531 -14.7275 -8.33594 -28.0674 -23.0615 -29.7344c-16.1162 -1.94434 -31.1201 -7.50293 -44.1787 -15.2832c26.1143 5.71289 58.7119 3.1377 88.0791 -11.1152
+c13.3359 -6.66895 18.8936 -22.5088 12.2246 -35.8486c-6.38965 -13.0596 -22.5039 -18.6162 -35.5645 -12.2256zM263.318 189.489c-6.1123 12.5049 -18.3379 20.2861 -32.2314 20.2861h-0.105469c-19.5732 0 -35.46 -15.8867 -35.46 -35.46
+c0 -0.0302734 0 -0.0800781 0.000976562 -0.110352c0 -21.4277 17.8076 -35.5703 35.5645 -35.5703c13.8936 0 26.1191 7.78125 32.2314 20.2861c4.44531 9.44922 13.6133 15.0059 23.3389 15.2842c-9.72559 0.277344 -18.8936 5.83496 -23.3389 15.2842zM638.139 226.726
+c4.16797 -8.61426 1.11133 -18.8965 -6.94531 -23.6201l-58.0713 -33.0693l-41.1221 -74.1963c-6.38965 -57.2451 -34.7314 -109.767 -79.7432 -146.726c-10.9316 -9.1123 -27.7988 -8.14453 -37.5098 3.6123c-9.44629 11.3945 -7.78027 28.0674 3.61328 37.5166
+c12.5029 10.5586 23.6162 22.5088 32.5078 35.5703c-21.6719 -14.7295 -46.6787 -24.7324 -74.1865 -28.0674c-10.0205 -2.50586 -27.5518 5.64258 -29.7295 23.0645c-1.94531 14.7285 8.33594 28.0674 23.0615 29.7344c16.1162 1.94629 31.1201 7.50293 44.1787 15.2842
+c-26.1143 -5.71289 -58.7119 -3.1377 -88.0791 11.1152c-13.3359 6.66895 -18.8936 22.5088 -12.2246 35.8477c6.38965 13.0605 22.5049 18.6191 35.5654 12.2266c20.2949 -10.1484 44.1465 -11.1982 64.7393 -3.88965c-42.6064 0 -71.208 20.4746 -85.5781 50.5762
+c-8.57617 17.8984 5.14746 38.0713 23.6172 38.0713c-18.4297 0 -32.2109 20.1357 -23.6172 38.0703c14.0332 29.3965 44.0391 50.8877 81.9658 50.8545l92.8027 -8.61523l66.4062 33.3467c9.4082 4.7041 19.8281 0.354492 23.8936 -7.78027zM408.912 245.344
+c-13.8936 0 -26.1191 -7.78027 -32.2314 -20.2861c-4.44531 -9.44824 -13.6133 -15.0059 -23.3389 -15.2832c9.72559 -0.27832 18.8936 -5.83594 23.3389 -15.2842c6.1123 -12.5049 18.3379 -20.2861 32.2314 -20.2861h0.105469c19.5732 0 35.46 15.8857 35.46 35.46
+c0 0.0302734 0 0.0791016 -0.000976562 0.110352c0 21.4287 -17.8076 35.5693 -35.5645 35.5693z" />
+ <glyph glyph-name="deaf" unicode="&#xf2a4;"
+d="M216 188c0 -15.4639 -12.5361 -28 -28 -28s-28 12.5361 -28 28c0 44.1123 35.8877 80 80 80s80 -35.8877 80 -80c0 -15.4639 -12.5361 -28 -28 -28s-28 12.5361 -28 28c0 13.2344 -10.7666 24 -24 24s-24 -10.7656 -24 -24zM240 364c97.0469 0 176 -78.9531 176 -176
+c0 -95.9863 -71.4053 -109.798 -72 -144.226c-0.124023 -59.4463 -48.5254 -107.774 -108 -107.774c-15.4639 0 -28 12.5361 -28 28s12.5361 28 28 28c28.6729 0 52 23.3271 52 52l0.00292969 0.37793c0.988281 73.3115 71.9971 68.458 71.9971 143.622
+c0 66.168 -53.832 120 -120 120s-120 -53.832 -120 -120c0 -15.4639 -12.5361 -28 -28 -28s-28 12.5361 -28 28c0 97.0469 78.9531 176 176 176zM508.485 416.201c4.68652 -4.68652 4.68652 -12.2842 0 -16.9727l-87.0303 -87.0283
+c-4.68652 -4.68652 -12.2842 -4.68652 -16.9697 0l-28.2852 28.2852c-4.68652 4.68652 -4.68652 12.2842 0 16.9707l87.0283 87.0293c4.6875 4.68555 12.2842 4.68555 16.9717 0zM168.97 133.255l28.2861 -28.2842c4.68652 -4.68652 4.68652 -12.2852 0 -16.9707
+l-148.484 -148.485c-4.6875 -4.68555 -12.2842 -4.68555 -16.9717 0l-28.2852 28.2842c-4.68555 4.6875 -4.68555 12.2852 0 16.9707l148.485 148.485c4.68555 4.68652 12.2842 4.68652 16.9697 0z" />
+ <glyph glyph-name="sign-language" unicode="&#xf2a7;" horiz-adv-x="447"
+d="M91.4336 -35.9873c-0.306641 16.0186 13.1094 29.1289 29.1309 29.1289h62.293v5.71484h-125.864c-16.0215 0 -29.4375 13.1104 -29.1299 29.1289c0.296875 15.5234 12.9717 28.0146 28.5645 28.0146h126.43v5.71387h-153.722
+c-16.0205 0 -29.4365 13.1113 -29.1299 29.1289c0.296875 15.5225 12.9727 28.0137 28.5664 28.0137h154.286v5.71387h-125.151c-16.0215 0 -29.4375 13.1104 -29.1299 29.1289c0.296875 15.5215 12.9727 28.0127 28.5654 28.0127h168.566l-31.085 22.6064
+c-12.7617 9.28027 -15.583 27.1484 -6.30176 39.9121c9.28027 12.7607 27.1494 15.5811 39.9121 6.30176l123.36 -89.7148c7.79395 -5.66895 14.1201 -18.0908 14.1201 -27.7285v-141.136c0 -15.9102 -10.9463 -29.7305 -26.4326 -33.374l-80.4717 -18.9346
+c-8.55176 -2.01172 -22.624 -3.64551 -31.4102 -3.64551h-107.4c-15.5928 0.000976562 -28.2686 12.4922 -28.5664 28.0137zM164.683 189.714l-36.3711 46.71c-9.5791 12.3027 -7.51172 29.9795 4.55371 39.75c12.4502 10.083 31.0371 7.55273 40.8799 -5.08789
+l13.0039 -16.7002c-17.1426 -15.6484 -15.4092 -43.0244 3.16992 -56.5361l11.1875 -8.13574h-36.4238zM447.981 191.817c0.00976562 -0.311523 0.0175781 -0.81543 0.0175781 -1.12695c0 -9.24902 -5.92188 -21.3682 -13.2197 -27.0508l-61.0645 -47.5488v16.999
+c0 13.4834 -6.51074 26.2686 -17.415 34.1982l-123.359 89.7139c-12.6357 9.18945 -29.1934 9.16113 -41.6904 0.904297l-52.0527 66.8486c-9.84375 12.6416 -7.74121 31.2822 5.08594 40.8809c12.4297 9.30273 30.0732 6.97559 39.6523 -5.32812l77.6758 -99.7539
+l4.50879 3.51172l-94.4434 121.287c-9.84277 12.6416 -7.74121 31.2822 5.08594 40.8818c12.4307 9.30176 30.0732 6.97461 39.6533 -5.32812l94.79 -121.734l4.50879 3.51074l-76.8887 98.7451c-9.84277 12.6416 -7.74121 31.2822 5.08594 40.8809
+c12.4297 9.30273 30.0732 6.97559 39.6533 -5.32715l103.562 -133.001l-1.26172 38.4141c-0.518555 15.7715 11.8457 28.9756 27.6191 29.4932c15.7705 0.517578 28.9746 -11.8477 29.4922 -27.6191z" />
+ <glyph glyph-name="low-vision" unicode="&#xf2a8;" horiz-adv-x="576"
+d="M569.344 216.369c3.67383 -6.22461 6.65625 -17.1416 6.65625 -24.3691s-2.98242 -18.1455 -6.65723 -24.3701c-31.9746 -54.2607 -79.6484 -98.3232 -136.81 -126.301l0.00683594 -0.00878906l43.1201 -58.377c7.60156 -10.8594 4.95996 -25.8252 -5.90039 -33.4268
+l-13.1133 -9.17773c-10.8594 -7.59863 -25.8223 -4.95801 -33.4238 5.90039l-251.836 356.544c-11.1797 -5.09375 -28.5518 -14.7539 -38.7764 -21.5635l189.979 -271.399c-9.52637 -1.00488 -25.0342 -1.82031 -34.6133 -1.82031
+c-12.29 0 -32.1484 1.33984 -44.3262 2.99023l-40.6309 58.04h-0.00976562l-119.399 170.58c-8.64453 -9.25391 -21.3203 -25.3428 -28.2939 -35.9121l124.19 -177.417c-73.1172 25.4863 -134.358 76.0166 -172.858 141.349c-8.96484 15.2109 -8.76562 33.8643 0 48.7393
+c0.0107422 0.0166016 0.0234375 0.0332031 0.0332031 0.0498047c33.5459 56.8984 82.7676 99.8506 136.79 126.242l-43.1309 58.3945c-7.60156 10.8604 -4.95996 25.8252 5.90039 33.4268l13.1143 9.17773c10.8584 7.59961 25.8213 4.95801 33.4229 -5.90039
+l52.7705 -72.1689c26.3496 6.79004 53.9834 10.4092 82.4512 10.4092c119.81 0 224.96 -63.9492 281.344 -159.631zM390.026 102.06c21.1406 23.9658 33.9736 55.4365 33.9736 89.9404c0 75.1738 -60.8379 136 -136 136c-17.5117 0 -34.2422 -3.30566 -49.6084 -9.32324
+l19.0684 -27.2363c25.9883 7.96289 54.7598 5.56836 79.5098 -7.68066h-0.0292969c-23.6504 0 -42.8203 -19.1699 -42.8203 -42.8193c0 -23.4717 18.9922 -42.8203 42.8203 -42.8203c23.6494 0 42.8193 19.1699 42.8193 42.8203v0.0292969
+c18.9111 -35.3271 15.8818 -79.1123 -8.7998 -111.68z" />
+ <glyph glyph-name="handshake" unicode="&#xf2b5;" horiz-adv-x="640"
+d="M434.7 384c8.5 0 16.7002 -3.40039 22.5996 -9.40039l54.6006 -54.5996v-193.5c-2.40039 2.7002 -5 5.2998 -7.90039 7.7002l-145.6 118.2l26.0996 23.8994c6.5 6 7 16.1006 1 22.6006c-5.90039 6.5 -16.0996 6.89941 -22.5996 1l-79.9004 -73.2002
+c-0.0996094 -0.100586 -0.299805 -0.100586 -0.400391 -0.200195c-16.6992 -14.9004 -43.3994 -11.2002 -56.0996 2.7002c-14.2002 15.5 -14.5 40.3994 2.09961 56c0.100586 0.0996094 0.200195 0.299805 0.300781 0.399414l98.2998 90
+c5.89941 5.40039 13.5996 8.40039 21.5996 8.40039h85.9004zM544 319.8h96v-255.899h-64c-17.7002 0 -32 14.2998 -32 32v223.899zM592 95.9004c8.7998 0 16 7.19922 16 16c0 8.7998 -7.2002 16 -16 16s-16 -7.2002 -16 -16c0 -8.80078 7.2002 -16 16 -16zM0 64v255.8h96
+v-223.8c0 -17.7002 -14.2998 -32 -32 -32h-64zM48 127.9c-8.7998 0 -16 -7.10059 -16 -16c0 -8.80078 7.2002 -16 16 -16s16 7.19922 16 16c0 8.7998 -7.2002 16 -16 16zM483.9 109.3c13.6992 -11.2002 15.7998 -31.2998 4.59961 -45.0996l-9.5 -11.7002
+c-11.0996 -13.7998 -31.2998 -15.7998 -45 -4.7002l-5.40039 4.40039l-31.3994 -38.6006c-12.9004 -15.8994 -36.4004 -18.3994 -52.2998 -5.39941l-17.9004 15.5l-0.200195 -0.200195c-22.2998 -27.4004 -62.5996 -31.5996 -90 -9.2998l-90.5 81.8994h-18.2998v223.9
+l54.7002 54.5996c6 6 14.0996 9.40039 22.5996 9.40039h83.7998l-81.7998 -74.9004c-29.2002 -26.7998 -31.2998 -72.2998 -4.39941 -101.699c26.5 -28.9004 72 -31.5 101.699 -4.40039l30 27.5z" />
+ <glyph glyph-name="envelope-open" unicode="&#xf2b6;"
+d="M512 -16c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v263.276c0 12.8955 8.2373 29.8193 18.3867 37.7754c24.9131 19.5293 45.501 35.3652 164.2 121.512c16.8252 12.2666 50.21 41.7832 73.4131 41.4336
+c23.1982 0.353516 56.5957 -29.1719 73.4131 -41.4326c118.687 -86.1377 139.303 -101.995 164.2 -121.513c10.1494 -7.95605 18.3867 -24.8799 18.3867 -37.7754v-263.276zM446.334 180.605c-2.5625 3.72754 -7.7002 4.59473 -11.3389 1.90625
+c-22.8447 -16.873 -55.4619 -40.7051 -105.582 -77.0791c-16.8252 -12.2656 -50.21 -41.7803 -73.4131 -41.4297c-23.2109 -0.34375 -56.5586 29.1436 -73.4131 41.4297c-50.1143 36.3701 -82.7344 60.2041 -105.582 77.0791
+c-3.63867 2.68848 -8.77637 1.82129 -11.3389 -1.90625l-9.07227 -13.1963c-0.777344 -1.13086 -1.4082 -3.16113 -1.4082 -4.5332c0 -2.22754 1.45508 -5.11035 3.24707 -6.43359c22.8877 -16.8994 55.4541 -40.6904 105.304 -76.8682
+c20.2734 -14.7812 56.5234 -47.8135 92.2637 -47.5732c35.7236 -0.242188 71.9609 32.7715 92.2627 47.5732c49.8506 36.1787 82.418 59.9697 105.304 76.8682c1.79199 1.32324 3.24707 4.20605 3.24707 6.43359c0 1.37207 -0.630859 3.40234 -1.4082 4.5332z" />
+ <glyph glyph-name="address-book" unicode="&#xf2b9;" horiz-adv-x="448"
+d="M436 288h-20v-64h20c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-20v-64h20c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-20v-48c0 -26.5 -21.5 -48 -48 -48h-320c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48
+h320c26.5 0 48 -21.5 48 -48v-48h20c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12zM208 320c-35.2998 0 -64 -28.7002 -64 -64s28.7002 -64 64 -64s64 28.7002 64 64s-28.7002 64 -64 64zM320 83.2002v19.2002
+c0 31.7998 -30.0996 57.5996 -67.2002 57.5996h-5c-12.2002 -5.09961 -25.7002 -8 -39.7998 -8s-27.5 2.90039 -39.7998 8h-5c-37.1006 0 -67.2002 -25.7998 -67.2002 -57.5996v-19.2002c0 -10.6006 10 -19.2002 22.4004 -19.2002h179.199
+c12.4004 0 22.4004 8.59961 22.4004 19.2002z" />
+ <glyph glyph-name="address-card" unicode="&#xf2bb;" horiz-adv-x="576"
+d="M528 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h480zM176 320c-35.2998 0 -64 -28.7002 -64 -64s28.7002 -64 64 -64s64 28.7002 64 64s-28.7002 64 -64 64zM288 83.2002v19.2002
+c0 31.7998 -30.0996 57.5996 -67.2002 57.5996h-5c-12.2002 -5.09961 -25.7002 -8 -39.7998 -8s-27.5 2.90039 -39.7998 8h-5c-37.1006 0 -67.2002 -25.7998 -67.2002 -57.5996v-19.2002c0 -10.6006 10 -19.2002 22.4004 -19.2002h179.199
+c12.4004 0 22.4004 8.59961 22.4004 19.2002zM512 136v16c0 4.40039 -3.59961 8 -8 8h-144c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h144c4.40039 0 8 3.59961 8 8zM512 200v16c0 4.40039 -3.59961 8 -8 8h-144c-4.40039 0 -8 -3.59961 -8 -8v-16
+c0 -4.40039 3.59961 -8 8 -8h144c4.40039 0 8 3.59961 8 8zM512 264v16c0 4.40039 -3.59961 8 -8 8h-144c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h144c4.40039 0 8 3.59961 8 8z" />
+ <glyph glyph-name="user-circle" unicode="&#xf2bd;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 344c-48.5996 0 -88 -39.4004 -88 -88s39.4004 -88 88 -88s88 39.4004 88 88s-39.4004 88 -88 88zM248 0c58.7002 0 111.3 26.5996 146.5 68.2002
+c-18.7998 35.3994 -55.5996 59.7998 -98.5 59.7998c-2.40039 0 -4.7998 -0.400391 -7.09961 -1.09961c-12.9004 -4.2002 -26.6006 -6.90039 -40.9004 -6.90039s-27.9004 2.7002 -40.9004 6.90039c-2.2998 0.699219 -4.69922 1.09961 -7.09961 1.09961
+c-42.9004 0 -79.7002 -24.4004 -98.5 -59.7998c35.2002 -41.6006 87.7998 -68.2002 146.5 -68.2002z" />
+ <glyph glyph-name="id-badge" unicode="&#xf2c1;" horiz-adv-x="384"
+d="M336 448c26.5 0 48 -21.5 48 -48v-416c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48h288zM144 416c-8.7998 0 -16 -7.2002 -16 -16s7.2002 -16 16 -16h96c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16h-96zM192 288
+c-35.2998 0 -64 -28.7002 -64 -64s28.7002 -64 64 -64s64 28.7002 64 64s-28.7002 64 -64 64zM304 51.2002v19.2002c0 31.7998 -30.0996 57.5996 -67.2002 57.5996h-5c-12.2002 -5.09961 -25.7002 -8 -39.7998 -8s-27.5 2.90039 -39.7998 8h-5
+c-37.1006 0 -67.2002 -25.7998 -67.2002 -57.5996v-19.2002c0 -10.6006 10 -19.2002 22.4004 -19.2002h179.199c12.4004 0 22.4004 8.59961 22.4004 19.2002z" />
+ <glyph glyph-name="id-card" unicode="&#xf2c2;" horiz-adv-x="576"
+d="M528 416c26.5 0 48 -21.5 48 -48v-16h-576v16c0 26.5 21.5 48 48 48h480zM0 16v304h576v-304c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48zM352 248v-16c0 -4.40039 3.59961 -8 8 -8h144c4.40039 0 8 3.59961 8 8v16c0 4.40039 -3.59961 8 -8 8h-144
+c-4.40039 0 -8 -3.59961 -8 -8zM352 184v-16c0 -4.40039 3.59961 -8 8 -8h144c4.40039 0 8 3.59961 8 8v16c0 4.40039 -3.59961 8 -8 8h-144c-4.40039 0 -8 -3.59961 -8 -8zM352 120v-16c0 -4.40039 3.59961 -8 8 -8h144c4.40039 0 8 3.59961 8 8v16
+c0 4.40039 -3.59961 8 -8 8h-144c-4.40039 0 -8 -3.59961 -8 -8zM176 256c-35.2998 0 -64 -28.7002 -64 -64s28.7002 -64 64 -64s64 28.7002 64 64s-28.7002 64 -64 64zM67.0996 51.7998c-3.19922 -9.7998 5.2002 -19.7998 15.6006 -19.7998h186.6
+c10.4004 0 18.7998 9.90039 15.6006 19.7998c-8.40039 25.7002 -32.5 44.2002 -60.9004 44.2002h-8.2002c-12.2002 -5.09961 -25.7002 -8 -39.7998 -8s-27.5 2.90039 -39.7998 8h-8.2002c-28.4004 0 -52.5 -18.5 -60.9004 -44.2002z" />
+ <glyph glyph-name="thermometer-full" unicode="&#xf2c7;" horiz-adv-x="256"
+d="M224 352v-203.347c19.9121 -22.5635 32 -52.1943 32 -84.6533c0 -70.6963 -57.3018 -128 -128 -128c-0.298828 0 -0.610352 0.000976562 -0.90918 0.00292969c-70.3018 0.488281 -127.448 58.3613 -127.089 128.664c0.164062 32.1973 12.2227 61.5771 31.998 83.9863
+v203.347c0 53.0186 42.9814 96 96 96s96 -42.9814 96 -96zM128 -16c44.1123 0 80 35.8877 80 80c0 34.3379 -19.3701 52.1904 -32 66.502v221.498c0 26.4668 -21.5332 48 -48 48s-48 -21.5332 -48 -48v-221.498c-12.7334 -14.4277 -31.8262 -32.0996 -31.999 -66.0801
+c-0.223633 -43.876 35.5635 -80.1162 79.4229 -80.4199zM192 64c0 -35.3457 -28.6543 -64 -64 -64s-64 28.6543 -64 64c0 23.6846 12.876 44.3486 32 55.417v232.583c0 17.6729 14.3271 32 32 32s32 -14.3271 32 -32v-232.583c19.124 -11.0684 32 -31.7324 32 -55.417z" />
+ <glyph glyph-name="thermometer-three-quarters" unicode="&#xf2c8;" horiz-adv-x="256"
+d="M192 64c0 -35.3457 -28.6543 -64 -64 -64s-64 28.6543 -64 64c0 23.6846 12.876 44.3486 32 55.417v168.583c0 17.6729 14.3271 32 32 32s32 -14.3271 32 -32v-168.583c19.124 -11.0684 32 -31.7324 32 -55.417zM224 148.653c19.9121 -22.5635 32 -52.1943 32 -84.6533
+c0 -70.6963 -57.3027 -128 -128 -128c-0.298828 0 -0.609375 0.000976562 -0.90918 0.00292969c-70.3018 0.488281 -127.448 58.3613 -127.089 128.664c0.164062 32.1982 12.2227 61.5781 31.998 83.9863v203.347c0 53.0186 42.9814 96 96 96s96 -42.9814 96 -96v-203.347z
+M208 64c0 34.3389 -19.3701 52.1904 -32 66.502v221.498c0 26.4668 -21.5332 48 -48 48s-48 -21.5332 -48 -48v-221.498c-12.7324 -14.4277 -31.8252 -32.0996 -31.999 -66.0801c-0.223633 -43.876 35.5635 -80.1162 79.4229 -80.4199l0.576172 -0.00195312
+c44.1123 0 80 35.8877 80 80z" />
+ <glyph glyph-name="thermometer-half" unicode="&#xf2c9;" horiz-adv-x="256"
+d="M192 64c0 -35.3457 -28.6543 -64 -64 -64s-64 28.6543 -64 64c0 23.6846 12.876 44.3486 32 55.417v104.583c0 17.6729 14.3271 32 32 32s32 -14.3271 32 -32v-104.583c19.124 -11.0684 32 -31.7324 32 -55.417zM224 148.653c19.9121 -22.5635 32 -52.1943 32 -84.6533
+c0 -70.6963 -57.3027 -128 -128 -128c-0.298828 0 -0.609375 0.000976562 -0.90918 0.00292969c-70.3018 0.488281 -127.448 58.3613 -127.089 128.664c0.164062 32.1982 12.2227 61.5781 31.998 83.9863v203.347c0 53.0186 42.9814 96 96 96s96 -42.9814 96 -96v-203.347z
+M208 64c0 34.3389 -19.3701 52.1904 -32 66.502v221.498c0 26.4668 -21.5332 48 -48 48s-48 -21.5332 -48 -48v-221.498c-12.7324 -14.4277 -31.8252 -32.0996 -31.999 -66.0801c-0.223633 -43.876 35.5635 -80.1162 79.4229 -80.4199l0.576172 -0.00195312
+c44.1123 0 80 35.8877 80 80z" />
+ <glyph glyph-name="thermometer-quarter" unicode="&#xf2ca;" horiz-adv-x="256"
+d="M192 64c0 -35.3457 -28.6543 -64 -64 -64s-64 28.6543 -64 64c0 23.6846 12.876 44.3486 32 55.417v40.583c0 17.6729 14.3271 32 32 32s32 -14.3271 32 -32v-40.583c19.124 -11.0684 32 -31.7324 32 -55.417zM224 148.653c19.9121 -22.5635 32 -52.1943 32 -84.6533
+c0 -70.6963 -57.3027 -128 -128 -128c-0.298828 0 -0.609375 0.000976562 -0.90918 0.00292969c-70.3018 0.488281 -127.448 58.3613 -127.089 128.664c0.164062 32.1982 12.2227 61.5781 31.998 83.9863v203.347c0 53.0186 42.9814 96 96 96s96 -42.9814 96 -96v-203.347z
+M208 64c0 34.3389 -19.3701 52.1904 -32 66.502v221.498c0 26.4668 -21.5332 48 -48 48s-48 -21.5332 -48 -48v-221.498c-12.7324 -14.4277 -31.8252 -32.0996 -31.999 -66.0801c-0.223633 -43.876 35.5635 -80.1162 79.4229 -80.4199l0.576172 -0.00195312
+c44.1123 0 80 35.8877 80 80z" />
+ <glyph glyph-name="thermometer-empty" unicode="&#xf2cb;" horiz-adv-x="256"
+d="M192 64c0 -35.3457 -28.6543 -64 -64 -64s-64 28.6543 -64 64s28.6543 64 64 64s64 -28.6543 64 -64zM224 148.653c19.9121 -22.5635 32 -52.1943 32 -84.6533c0 -70.6963 -57.3027 -128 -128 -128c-0.298828 0 -0.609375 0.000976562 -0.90918 0.00292969
+c-70.3018 0.488281 -127.448 58.3613 -127.089 128.664c0.164062 32.1982 12.2227 61.5781 31.998 83.9863v203.347c0 53.0186 42.9814 96 96 96s96 -42.9814 96 -96v-203.347zM208 64c0 34.3389 -19.3701 52.1904 -32 66.502v221.498c0 26.4668 -21.5332 48 -48 48
+s-48 -21.5332 -48 -48v-221.498c-12.7324 -14.4277 -31.8252 -32.0996 -31.999 -66.0801c-0.223633 -43.876 35.5635 -80.1162 79.4229 -80.4199l0.576172 -0.00195312c44.1123 0 80 35.8877 80 80z" />
+ <glyph glyph-name="shower" unicode="&#xf2cc;"
+d="M389.66 312.4l-158.061 -158.061c-9.36914 -9.37012 -24.5693 -9.37012 -33.9395 0l-11.3203 11.3203c-9.37012 9.37012 -9.37012 24.5703 0 33.9395l0.110352 0.110352c-34.0303 40.21 -35.1602 98.9404 -3.39062 140.38
+c-11.9697 7.5498 -26.1396 11.9102 -41.2998 11.9102c-42.8799 0 -77.7598 -34.8799 -77.7598 -77.7598v-306.24h-64v306.24c0 78.1699 63.5898 141.76 141.76 141.76c36.9307 0 70.6104 -14.2002 95.8604 -37.4199c35.8994 11.5098 76.5 4.5 106.67 -21.0303
+l0.110352 0.110352c9.36914 9.37012 24.5693 9.37012 33.9395 0l11.3203 -11.3203c9.37012 -9.37012 9.37012 -24.5703 0 -33.9395zM384 240c0 -8.83691 -7.16309 -16 -16 -16s-16 7.16309 -16 16s7.16309 16 16 16s16 -7.16309 16 -16zM416 240c0 8.83691 7.16309 16 16 16
+s16 -7.16309 16 -16s-7.16309 -16 -16 -16s-16 7.16309 -16 16zM512 240c0 -8.83691 -7.16309 -16 -16 -16s-16 7.16309 -16 16s7.16309 16 16 16s16 -7.16309 16 -16zM352 208c0 -8.83691 -7.16309 -16 -16 -16s-16 7.16309 -16 16s7.16309 16 16 16s16 -7.16309 16 -16z
+M400 224c8.83691 0 16 -7.16309 16 -16s-7.16309 -16 -16 -16s-16 7.16309 -16 16s7.16309 16 16 16zM480 208c0 -8.83691 -7.16309 -16 -16 -16s-16 7.16309 -16 16s7.16309 16 16 16s16 -7.16309 16 -16zM320 176c0 -8.83691 -7.16309 -16 -16 -16s-16 7.16309 -16 16
+s7.16309 16 16 16s16 -7.16309 16 -16zM352 176c0 8.83691 7.16309 16 16 16s16 -7.16309 16 -16s-7.16309 -16 -16 -16s-16 7.16309 -16 16zM448 176c0 -8.83691 -7.16309 -16 -16 -16s-16 7.16309 -16 16s7.16309 16 16 16s16 -7.16309 16 -16zM320 144
+c0 8.83691 7.16309 16 16 16s16 -7.16309 16 -16s-7.16309 -16 -16 -16s-16 7.16309 -16 16zM416 144c0 -8.83691 -7.16309 -16 -16 -16s-16 7.16309 -16 16s7.16309 16 16 16s16 -7.16309 16 -16zM320 112c0 -8.83691 -7.16309 -16 -16 -16s-16 7.16309 -16 16
+s7.16309 16 16 16s16 -7.16309 16 -16zM384 112c0 -8.83691 -7.16309 -16 -16 -16s-16 7.16309 -16 16s7.16309 16 16 16s16 -7.16309 16 -16zM352 80c0 -8.83691 -7.16309 -16 -16 -16s-16 7.16309 -16 16s7.16309 16 16 16s16 -7.16309 16 -16zM320 48
+c0 -8.83691 -7.16309 -16 -16 -16s-16 7.16309 -16 16s7.16309 16 16 16s16 -7.16309 16 -16z" />
+ <glyph glyph-name="bath" unicode="&#xf2cd;"
+d="M488 192c13.2549 0 24 -10.7451 24 -24v-16c0 -13.2549 -10.7451 -24 -24 -24h-8v-32c0 -28.4297 -12.3623 -53.9688 -32 -71.5469v-32.4531c0 -13.2549 -10.7451 -24 -24 -24h-16c-13.2549 0 -24 10.7451 -24 24v8h-256v-8c0 -13.2549 -10.7451 -24 -24 -24h-16
+c-13.2549 0 -24 10.7451 -24 24v32.4531c-19.6377 17.5781 -32 43.1172 -32 71.5469v32h-8c-13.2549 0 -24 10.7451 -24 24v16c0 13.2549 10.7451 24 24 24h8v144c0 44.1123 35.8877 80 80 80c27.2119 0 51.2812 -13.667 65.7393 -34.4873
+c21.8838 6.06445 46.2285 1.10449 64.1777 -15.3643c4.71289 4.1748 11.916 4.02051 16.4277 -0.491211l11.3145 -11.3145c4.68555 -4.68652 4.68555 -12.2852 0 -16.9707l-95.0303 -95.0293c-4.68652 -4.68555 -12.2852 -4.68555 -16.9707 0l-11.3145 11.3145
+c-4.51172 4.51172 -4.66699 11.7148 -0.491211 16.4277c-21.5244 23.459 -23.3291 57.8281 -6.83789 83.0352c-5.68262 8.93457 -15.6641 14.8799 -27.0146 14.8799c-17.6445 0 -32 -14.3555 -32 -32v-144h408z" />
+ <glyph glyph-name="podcast" unicode="&#xf2ce;" horiz-adv-x="448"
+d="M267.429 -40.5635c-5.14258 -19.0098 -24.5703 -23.4365 -43.4287 -23.4365c-18.8574 0 -38.2861 4.42676 -43.4277 23.4365c-7.64551 28.4297 -20.5723 99.665 -20.5723 132.813c0 35.1562 31.1416 43.75 64 43.75s64 -8.59375 64 -43.75
+c0 -32.9492 -12.8711 -104.179 -20.5713 -132.813zM156.867 159.446c2.6748 -2.61914 2.39941 -6.98535 -0.628906 -9.18555c-9.3125 -6.76465 -16.4609 -15.3418 -21.2354 -25.3623c-1.74219 -3.65723 -6.5 -4.6582 -9.45312 -1.8877
+c-28.0176 26.2891 -45.5498 63.6279 -45.5498 104.989c0 80.7852 66.8691 146.247 148.163 143.941c76.1982 -2.16113 137.938 -64.1631 139.793 -140.369c1.04199 -42.7822 -16.6846 -81.5225 -45.5107 -108.565c-2.95215 -2.76855 -7.70801 -1.7627 -9.44922 1.8916
+c-4.77441 10.0195 -11.9219 18.5977 -21.2344 25.3623c-3.02832 2.20117 -3.30273 6.56738 -0.62793 9.1875c17.8018 17.4355 28.8662 41.7246 28.8662 68.5518c0 54.1934 -45.1377 98.042 -99.793 95.9258c-49.7295 -1.9248 -90.0703 -42.1572 -92.124 -91.8809
+c-1.17383 -28.4258 10.0908 -54.291 28.7842 -72.5986zM224 448c123.815 0 224 -100.205 224 -224c0 -90.1865 -52.7734 -165.727 -125.739 -201.407c-4.33203 -2.11914 -9.2666 1.54297 -8.53516 6.31055c2.55566 16.6416 4.625 33.1924 5.62402 47.2295
+c48.4922 31.377 80.6504 85.9355 80.6504 147.867c0 97.2031 -79.207 176.253 -176.468 175.999c-96.2393 -0.250977 -174.938 -78.6621 -175.529 -174.899c-0.376953 -61.3311 30.7871 -115.541 78.1875 -147.347c1.52832 -1.02637 2.53125 -2.67578 2.67773 -4.51172
+c1.06348 -13.3936 3.02344 -28.8271 5.40625 -44.3389c0.731445 -4.7666 -4.20215 -8.42871 -8.53516 -6.31055c-73.1377 35.7607 -125.739 111.416 -125.739 201.408c0 123.815 100.204 224 224 224zM224 288c35.3457 0 64 -28.6543 64 -64s-28.6543 -64 -64 -64
+s-64 28.6543 -64 64s28.6543 64 64 64z" />
+ <glyph glyph-name="window-maximize" unicode="&#xf2d0;"
+d="M464 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h416zM448 256v84c0 6.59961 -5.40039 12 -12 12h-360c-6.59961 0 -12 -5.40039 -12 -12v-84h384z" />
+ <glyph glyph-name="window-minimize" unicode="&#xf2d1;"
+d="M464 96c26.5 0 48 -21.5 48 -48v-32c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v32c0 26.5 21.5 48 48 48h416z" />
+ <glyph glyph-name="window-restore" unicode="&#xf2d2;"
+d="M512 400v-288c0 -26.5 -21.5 -48 -48 -48h-48v208c0 44.0996 -35.9004 80 -80 80h-208v48c0 26.5 21.5 48 48 48h288c26.5 0 48 -21.5 48 -48zM384 272v-288c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v288c0 26.5 21.5 48 48 48h288
+c26.5 0 48 -21.5 48 -48zM316 244c0 6.59961 -5.40039 12 -12 12h-228c-6.59961 0 -12 -5.40039 -12 -12v-52h252v52z" />
+ <glyph glyph-name="microchip" unicode="&#xf2db;"
+d="M416 400v-416c0 -26.5098 -21.4902 -48 -48 -48h-224c-26.5098 0 -48 21.4902 -48 48v416c0 26.5098 21.4902 48 48 48h224c26.5098 0 48 -21.4902 48 -48zM512 342v-12c0 -3.31152 -2.68848 -6 -6 -6h-18v-6c0 -3.31152 -2.68848 -6 -6 -6h-42v48h42
+c3.31152 0 6 -2.68848 6 -6v-6h18c3.31152 0 6 -2.68848 6 -6zM512 246v-12c0 -3.31152 -2.68848 -6 -6 -6h-18v-6c0 -3.31152 -2.68848 -6 -6 -6h-42v48h42c3.31152 0 6 -2.68848 6 -6v-6h18c3.31152 0 6 -2.68848 6 -6zM512 150v-12c0 -3.31152 -2.68848 -6 -6 -6h-18v-6
+c0 -3.31152 -2.68848 -6 -6 -6h-42v48h42c3.31152 0 6 -2.68848 6 -6v-6h18c3.31152 0 6 -2.68848 6 -6zM512 54v-12c0 -3.31152 -2.68848 -6 -6 -6h-18v-6c0 -3.31152 -2.68848 -6 -6 -6h-42v48h42c3.31152 0 6 -2.68848 6 -6v-6h18c3.31152 0 6 -2.68848 6 -6zM30 72h42
+v-48h-42c-3.31152 0 -6 2.68848 -6 6v6h-18c-3.31152 0 -6 2.68848 -6 6v12c0 3.31152 2.68848 6 6 6h18v6c0 3.31152 2.68848 6 6 6zM30 168h42v-48h-42c-3.31152 0 -6 2.68848 -6 6v6h-18c-3.31152 0 -6 2.68848 -6 6v12c0 3.31152 2.68848 6 6 6h18v6
+c0 3.31152 2.68848 6 6 6zM30 264h42v-48h-42c-3.31152 0 -6 2.68848 -6 6v6h-18c-3.31152 0 -6 2.68848 -6 6v12c0 3.31152 2.68848 6 6 6h18v6c0 3.31152 2.68848 6 6 6zM30 360h42v-48h-42c-3.31152 0 -6 2.68848 -6 6v6h-18c-3.31152 0 -6 2.68848 -6 6v12
+c0 3.31152 2.68848 6 6 6h18v6c0 3.31152 2.68848 6 6 6z" />
+ <glyph glyph-name="snowflake" unicode="&#xf2dc;" horiz-adv-x="448"
+d="M440.3 102.8c7.40039 -4.2002 9.90039 -13.7002 5.60059 -21l-15.5 -26.7998c-4.30078 -7.40039 -13.7002 -10 -21.1006 -5.7002l-33.7998 19.5l7 -26c2.2002 -8.2002 -2.7002 -16.7002 -10.9004 -18.8994l-14.8994 -4
+c-8.2002 -2.2002 -16.7002 2.69922 -18.9004 10.8994l-19 70.7998l-62.7998 36.2002v-77.5l53.4004 -53.7002c6.19922 -6.19922 6.19922 -16.3994 0 -22.5996l-11.3008 -11.2998c-6.19922 -6.2002 -16.3994 -6.2002 -22.5996 0l-19.7002 19.7002v-40.4004
+c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v40.2998l-19.7002 -19.7002c-6.19922 -6.19922 -16.3994 -6.19922 -22.5996 0l-11.2998 11.3008c-6.2998 6.19922 -6.2998 16.3994 0 22.5996l53.7002 53.7002v77.5l-62.8008 -36.2002l-19 -70.7998
+c-2.19922 -8.2002 -10.6992 -13.1006 -18.8994 -10.9004l-14.9004 4c-8.2002 2.2002 -13.0996 10.7002 -10.8994 18.9004l7 26l-33.8008 -19.5c-7.39941 -4.2002 -16.7998 -1.7002 -21.0996 5.7002l-15.5 26.7998c-4.2002 7.39941 -1.7002 16.7998 5.7002 21.0996
+l33.7998 19.6006l-26 7c-8.2002 2.19922 -13.0996 10.6992 -10.9004 18.8994l4 14.9004c2.2002 8.2002 10.7002 13.0996 18.9004 10.8994l70.7998 -19l63.7998 36.9004l-63.7998 36.9004l-70.7998 -19c-8.2002 -2.2002 -16.7002 2.69922 -18.9004 10.8994l-4 14.9004
+c-2.19922 8.2998 2.7002 16.7998 11 19l26 7l-33.7998 19.5c-7.39941 4.2998 -10 13.7002 -5.7002 21.0996l15.5 26.7002c4.30078 7.40039 13.7002 10 21.1006 5.7002l33.7998 -19.5l-7 26c-2.2002 8.2002 2.7002 16.7002 10.9004 18.8994l14.8994 4
+c8.2002 2.2002 16.7002 -2.69922 18.9004 -10.8994l19 -70.7998l62.7998 -36.2002v77.5l-53.7002 53.7002c-6.2998 6.19922 -6.2998 16.3994 0 22.5996l11.4004 11.2998c6.2002 6.2002 16.3994 6.2002 22.5996 0l19.7002 -19.7002v40.4004c0 8.7998 7.2002 16 16 16h32
+c8.7998 0 16 -7.2002 16 -16v-40.4004l19.7998 19.7002c6.2002 6.2002 16.4004 6.2002 22.6006 0l11.2998 -11.2998c6.2002 -6.2002 6.2002 -16.4004 0 -22.5996l-53.7002 -53.7002v-77.5l62.7998 36.2002l19 70.7998c2.2002 8.2002 10.7002 13.0996 18.9004 10.8994
+l14.8994 -4c8.2002 -2.19922 13.1006 -10.6992 10.9004 -18.8994l-7 -26l33.7998 19.5c7.40039 4.2002 16.7998 1.7002 21.1006 -5.7002l15.5 -26.7998c4.19922 -7.40039 1.69922 -16.7998 -5.7002 -21.1006l-33.7998 -19.5l26 -7
+c8.19922 -2.19922 13.0996 -10.6992 10.8994 -18.8994l-4 -14.9004c-2.2002 -8.2002 -10.7002 -13.0996 -18.8994 -10.8994l-70.8008 19l-63.7998 -36.9004l63.9004 -37l70.7998 19c8.2002 2.2002 16.7002 -2.7002 18.9004 -10.9004l4 -14.8994
+c2.19922 -8.2002 -2.7002 -16.7002 -10.9004 -18.9004l-26 -7z" />
+ <glyph glyph-name="utensil-spoon" unicode="&#xf2e5;"
+d="M480.1 416.1c55.1006 -55 34.5 -164.899 -28.5 -227.8c-49.2998 -49.2998 -110 -55.0996 -160.399 -28.7998l-192.4 -214.4c-10.3994 -11.5996 -28.5 -12.0996 -39.5 -1.09961l-51.2998 51.2998c-11.0996 11 -10.5996 29 1 39.5l214.5 192.4
+c-26.2998 50.3994 -20.5 111.1 28.7998 160.399c62.9004 63 172.8 83.6006 227.8 28.5z" />
+ <glyph glyph-name="utensils" unicode="&#xf2e7;" horiz-adv-x="416"
+d="M207.9 432.8c0.799805 -4.7002 16.0996 -94.5 16.0996 -128.8c0 -52.2998 -27.7998 -89.5996 -68.9004 -104.6l12.9004 -238.101c0.700195 -13.7002 -10.2002 -25.2998 -24 -25.2998h-64c-13.7002 0 -24.7002 11.5 -24 25.2998l12.9004 238.101
+c-41.2002 15 -68.9004 52.3994 -68.9004 104.6c0 34.4004 15.2998 124.1 16.0996 128.8c3.2002 20.2998 45.3008 20.6006 47.9004 -1.09961v-141.2c1.2998 -3.40039 15.0996 -3.2002 16 0c1.40039 25.2998 7.90039 139.2 8 141.8c3.2998 20.7998 44.7002 20.7998 47.9004 0
+c0.199219 -2.7002 6.59961 -116.5 8 -141.8c0.899414 -3.2002 14.7998 -3.40039 16 0v141.2c2.59961 21.5996 44.7998 21.3994 48 1.09961zM327.1 147.1c-156.5 122.4 -17.5996 300.9 64.9004 300.9c13.2998 0 24 -10.7998 24 -24v-464c0 -13.2998 -10.7002 -24 -24 -24h-56
+c-14 0 -25.0996 12 -23.9004 26z" />
+ <glyph glyph-name="undo-alt" unicode="&#xf2ea;"
+d="M255.545 440c136.809 0.245117 248.456 -111.193 248.455 -248.002c-0.000976562 -136.965 -111.034 -247.998 -248 -247.998c-63.9258 0 -122.202 24.1865 -166.178 63.9082c-5.11328 4.61816 -5.35449 12.5605 -0.482422 17.4326l39.6621 39.6621
+c4.46191 4.46094 11.625 4.71387 16.3682 0.552734c30.6328 -26.8779 69.5029 -41.5557 110.63 -41.5557c93.8164 0 167.236 75.9912 167.994 166.552c0.798828 95.4648 -77.0859 170.24 -169.484 169.442c-42.4287 -0.366211 -82.3662 -16.374 -113.229 -45.2734
+l41.75 -41.75c15.1191 -15.1201 4.41113 -40.9707 -16.9717 -40.9707h-134.059c-13.2549 0 -24 10.7451 -24 24v134.059c0 21.3828 25.8506 32.0908 40.9707 16.9707l35.7139 -35.7139c44.4229 42.4512 104.592 68.5654 170.86 68.6846z" />
+ <glyph glyph-name="trash-alt" unicode="&#xf2ed;" horiz-adv-x="448"
+d="M32 -16v336h384v-336c0 -26.4961 -21.5039 -48 -48 -48h-288c-26.4961 0 -48 21.5039 -48 48zM304 240v-224c0 -8.83203 7.16797 -16 16 -16s16 7.16797 16 16v224c0 8.83203 -7.16797 16 -16 16s-16 -7.16797 -16 -16zM208 240v-224c0 -8.83203 7.16797 -16 16 -16
+s16 7.16797 16 16v224c0 8.83203 -7.16797 16 -16 16s-16 -7.16797 -16 -16zM112 240v-224c0 -8.83203 7.16797 -16 16 -16s16 7.16797 16 16v224c0 8.83203 -7.16797 16 -16 16s-16 -7.16797 -16 -16zM432 416c8.83203 0 16 -7.16797 16 -16v-32
+c0 -8.83203 -7.16797 -16 -16 -16h-416c-8.83203 0 -16 7.16797 -16 16v32c0 8.83203 7.16797 16 16 16h120l9.40039 18.7002c3.58984 7.3418 13.1357 13.2998 21.3086 13.2998h0.0908203h114.3h0.0175781c8.20215 0 17.8262 -5.95801 21.4824 -13.2998l9.40039 -18.7002
+h120z" />
+ <glyph glyph-name="sync-alt" unicode="&#xf2f1;"
+d="M370.72 314.72c-31.2617 29.2725 -71.832 45.3184 -114.872 45.2803c-77.458 -0.0683594 -144.328 -53.1777 -162.791 -126.85c-1.34375 -5.36328 -6.12207 -9.15039 -11.6504 -9.15039h-57.3037c-7.49707 0 -13.1934 6.80664 -11.8066 14.1758
+c21.6367 114.9 122.518 201.824 243.704 201.824c66.4482 0 126.791 -26.1357 171.315 -68.6846l35.7148 35.7148c15.1191 15.1191 40.9697 4.41113 40.9697 -16.9717v-134.059c0 -13.2549 -10.7451 -24 -24 -24h-134.059c-21.3828 0 -32.0908 25.8506 -16.9717 40.9707z
+M32 152h134.059c21.3828 0 32.0908 -25.8506 16.9717 -40.9707l-41.75 -41.75c31.2617 -29.2734 71.835 -45.3193 114.876 -45.2803c77.418 0.0703125 144.314 53.1436 162.787 126.849c1.34375 5.36328 6.12207 9.15039 11.6504 9.15039h57.3047
+c7.49805 0 13.1934 -6.80664 11.8066 -14.1758c-21.6377 -114.898 -122.519 -201.822 -243.705 -201.822c-66.4482 0 -126.791 26.1357 -171.315 68.6846l-35.7148 -35.7148c-15.1191 -15.1191 -40.9697 -4.41113 -40.9697 16.9717v134.059c0 13.2549 10.7451 24 24 24z" />
+ <glyph glyph-name="stopwatch" unicode="&#xf2f2;" horiz-adv-x="448"
+d="M432 144c0 -114.9 -93.0996 -208 -208 -208s-208 93.0996 -208 208c0 104 76.2998 190.2 176 205.5v34.5h-28c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h120c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-28v-34.5
+c37.5 -5.7998 71.7002 -21.5996 99.7002 -44.5996l27.5 27.5c4.7002 4.69922 12.2998 4.69922 17 0l28.2998 -28.3008c4.7002 -4.69922 4.7002 -12.2998 0 -17l-29.4004 -29.3994l-0.599609 -0.600586c21.2002 -32.3994 33.5 -71.2998 33.5 -113.1zM256 108v151.5
+c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-151.5c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12z" />
+ <glyph glyph-name="sign-out-alt" unicode="&#xf2f5;" horiz-adv-x="503"
+d="M497 175l-168 -168c-15 -15 -41 -4.5 -41 17v96h-136c-13.2998 0 -24 10.7002 -24 24v96c0 13.2998 10.7002 24 24 24h136v96c0 21.4004 25.9004 32 41 17l168 -168c9.2998 -9.40039 9.2998 -24.5996 0 -34zM192 12c0 -6.59961 -5.40039 -12 -12 -12h-84
+c-53 0 -96 43 -96 96v192c0 53 43 96 96 96h84c6.59961 0 12 -5.40039 12 -12v-40c0 -6.59961 -5.40039 -12 -12 -12h-84c-17.7002 0 -32 -14.2998 -32 -32v-192c0 -17.7002 14.2998 -32 32 -32h84c6.59961 0 12 -5.40039 12 -12v-40z" />
+ <glyph glyph-name="sign-in-alt" unicode="&#xf2f6;"
+d="M416 0h-84c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h84c17.7002 0 32 14.2998 32 32v192c0 17.7002 -14.2998 32 -32 32h-84c-6.59961 0 -12 5.40039 -12 12v40c0 6.59961 5.40039 12 12 12h84c53 0 96 -43 96 -96v-192c0 -53 -43 -96 -96 -96z
+M369 201c9.2998 -9.40039 9.2998 -24.5996 0 -34l-168 -168c-15 -15 -41 -4.5 -41 17v96h-136c-13.2998 0 -24 10.7002 -24 24v96c0 13.2998 10.7002 24 24 24h136v96c0 21.5 26 32 41 17z" />
+ <glyph glyph-name="redo-alt" unicode="&#xf2f9;" horiz-adv-x="511"
+d="M256.455 440c66.2686 -0.119141 126.437 -26.2334 170.859 -68.6846l35.7148 35.7148c15.1201 15.1191 40.9707 4.41113 40.9707 -16.9717v-134.059c0 -13.2549 -10.7451 -24 -24 -24h-134.059c-21.3828 0 -32.0908 25.8506 -16.9717 40.9707l41.75 41.75
+c-30.8633 28.8994 -70.8008 44.9072 -113.229 45.2734c-92.3984 0.797852 -170.283 -73.9775 -169.484 -169.442c0.757812 -90.5605 74.1777 -166.552 167.994 -166.552c41.127 0 79.9971 14.6777 110.629 41.5557c4.74316 4.16113 11.9062 3.9082 16.3682 -0.552734
+l39.6621 -39.6621c4.87207 -4.87207 4.63086 -12.8145 -0.482422 -17.4326c-43.9746 -39.7217 -102.251 -63.9082 -166.177 -63.9082c-136.966 0 -247.999 111.033 -248 247.998c-0.000976562 136.809 111.646 248.247 248.455 248.002z" />
+ <glyph glyph-name="poo" unicode="&#xf2fe;"
+d="M451.4 78.9004c34.2998 -5.5 60.5996 -35 60.5996 -70.9004c0 -39.7998 -32.2002 -72 -72 -72h-368c-39.7998 0 -72 32.2002 -72 72c0 35.9004 26.2998 65.4004 60.5996 70.9004c-17.2998 13.0996 -28.5996 33.6992 -28.5996 57.0996c0 39.7998 32.2002 72 72 72h14.0996
+c-13.3994 11.7002 -22.0996 28.7998 -22.0996 48c0 35.2998 28.7002 64 64 64h16c44.2002 0 80 35.7998 80 80c0 17.4004 -5.7002 33.4004 -15.0996 46.5c4.89941 0.799805 9.89941 1.5 15.0996 1.5c53 0 96 -43 96 -96c0 -11.2998 -2.2998 -21.9004 -5.90039 -32h5.90039
+c35.2998 0 64 -28.7002 64 -64c0 -19.2002 -8.7002 -36.2998 -22.0996 -48h14.0996c39.7998 0 72 -32.2002 72 -72c0 -23.4004 -11.2998 -44 -28.5996 -57.0996zM192 192c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32s32 14.2998 32 32s-14.2998 32 -32 32z
+M351.5 53c2 5.2998 -2 11 -7.7998 11h-175.4c-5.7998 0 -9.7998 -5.7002 -7.7998 -11c10.5 -27.9004 58.5 -53 95.5 -53s85 25.0996 95.5 53zM320 128c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32z" />
+ <glyph glyph-name="images" unicode="&#xf302;" horiz-adv-x="576"
+d="M480 32v-16c0 -26.5098 -21.4902 -48 -48 -48h-384c-26.5098 0 -48 21.4902 -48 48v256c0 26.5098 21.4902 48 48 48h16v-208c0 -44.1123 35.8877 -80 80 -80h336zM576 112c0 -26.5098 -21.4902 -48 -48 -48h-384c-26.5098 0 -48 21.4902 -48 48v256
+c0 26.5098 21.4902 48 48 48h384c26.5098 0 48 -21.4902 48 -48v-256zM256 320c0 26.5098 -21.4902 48 -48 48s-48 -21.4902 -48 -48s21.4902 -48 48 -48s48 21.4902 48 48zM160 176v-48h352v112l-87.5137 87.5146c-4.6875 4.68652 -12.2852 4.68652 -16.9717 0
+l-135.515 -135.515l-39.5137 39.5146c-4.6875 4.68652 -12.2852 4.68652 -16.9717 0z" />
+ <glyph glyph-name="pencil-alt" unicode="&#xf303;"
+d="M497.9 305.9l-46.1006 -46.1006c-4.7002 -4.7002 -12.2998 -4.7002 -17 0l-111 111c-4.7002 4.7002 -4.7002 12.2998 0 17l46.1006 46.1006c18.6992 18.6992 49.0996 18.6992 67.8994 0l60.1006 -60.1006c18.7998 -18.7002 18.7998 -49.0996 0 -67.8994zM284.2 348.2
+c4.7002 4.7002 12.2998 4.7002 17.0996 0l111 -111c4.7002 -4.7002 4.7002 -12.2998 0 -17l-262.6 -262.601l-121.5 -21.2998c-16.4004 -2.7998 -30.7002 11.4004 -27.7998 27.7998l21.1992 121.5zM124.1 108.1c5.5 -5.5 14.3008 -5.5 19.8008 0l154 154
+c5.5 5.5 5.5 14.3008 0 19.8008s-14.3008 5.5 -19.8008 0l-154 -154c-5.5 -5.5 -5.5 -14.3008 0 -19.8008zM88 24v48h-36.2998l-11.2998 -64.5l31.0996 -31.0996l64.5 11.2998v36.2998h-48z" />
+ <glyph glyph-name="pen" unicode="&#xf304;"
+d="M290.74 354.76l128.02 -128.02l-277.99 -277.99l-114.14 -12.5996c-15.2803 -1.69043 -28.1895 11.2295 -26.4902 26.5098l12.7002 114.22zM497.94 373.82c18.75 -18.75 18.75 -49.1504 0 -67.9102l-56.5508 -56.5498l-128.02 128.02l56.5498 56.5498
+c18.75 18.75 49.1602 18.75 67.9102 0z" />
+ <glyph glyph-name="pen-alt" unicode="&#xf305;"
+d="M497.94 373.83c18.75 -18.7598 18.75 -49.1602 0 -67.9102l-56.5508 -56.5498l-128.02 128.02l56.5498 56.5508c18.75 18.75 49.1602 18.75 67.9102 0zM251.14 394.36l84.8506 -84.8506l82.7695 -82.7695l-196.79 -196.79
+c-44.8223 -44.8203 -132.335 -86.8428 -195.34 -93.7998c-15.2803 -1.69043 -28.1895 11.2295 -26.4902 26.5098l0.0302734 0.229492c7.00195 62.9189 49.0156 150.315 93.7803 195.08l151.56 151.55l-22.6299 22.6201l-101.82 -101.819
+c-6.25 -6.25 -16.3799 -6.25 -22.6299 0l-22.6299 22.6299c-6.25 6.24023 -6.25 16.3701 0 22.6201l118.78 118.79c15.6201 15.6201 40.9395 15.6201 56.5596 0z" />
+ <glyph glyph-name="long-arrow-alt-down" unicode="&#xf309;" horiz-adv-x="255"
+d="M168 102.059h46.0576c21.3828 0 32.0908 -25.8516 16.9717 -40.9707l-86.0596 -86.0586c-9.37207 -9.37305 -24.5674 -9.37305 -33.9404 0l-86.0596 86.0586c-15.1191 15.1201 -4.41113 40.9707 16.9717 40.9707h46.0586v301.941c0 6.62695 5.37305 12 12 12h56
+c6.62695 0 12 -5.37305 12 -12v-301.941z" />
+ <glyph glyph-name="long-arrow-alt-left" unicode="&#xf30a;" horiz-adv-x="448"
+d="M134.059 152v-46.0576c0 -21.3828 -25.8516 -32.0908 -40.9707 -16.9717l-86.0586 86.0596c-9.37305 9.37207 -9.37305 24.5674 0 33.9404l86.0586 86.0596c15.1201 15.1191 40.9707 4.41113 40.9707 -16.9717v-46.0586h301.941c6.62695 0 12 -5.37305 12 -12v-56
+c0 -6.62695 -5.37305 -12 -12 -12h-301.941z" />
+ <glyph glyph-name="long-arrow-alt-right" unicode="&#xf30b;" horiz-adv-x="448"
+d="M313.941 232v46.0576c0 21.3828 25.8516 32.0908 40.9707 16.9717l86.0586 -86.0596c9.37305 -9.37207 9.37305 -24.5674 0 -33.9404l-86.0586 -86.0596c-15.1201 -15.1191 -40.9707 -4.41113 -40.9707 16.9717v46.0586h-301.941c-6.62695 0 -12 5.37305 -12 12v56
+c0 6.62695 5.37305 12 12 12h301.941z" />
+ <glyph glyph-name="long-arrow-alt-up" unicode="&#xf30c;" horiz-adv-x="256"
+d="M88 281.941h-46.0576c-21.3828 0 -32.0908 25.8516 -16.9717 40.9707l86.0596 86.0586c9.37207 9.37305 24.5674 9.37305 33.9404 0l86.0596 -86.0586c15.1191 -15.1201 4.41113 -40.9707 -16.9717 -40.9707h-46.0586v-301.941c0 -6.62695 -5.37305 -12 -12 -12h-56
+c-6.62695 0 -12 5.37305 -12 12v301.941z" />
+ <glyph glyph-name="expand-arrows-alt" unicode="&#xf31e;" horiz-adv-x="448"
+d="M448.1 104v-112c0 -13.2998 -10.6992 -24 -24 -24h-112c-21.3994 0 -32.0996 25.9004 -17 41l36.2002 36.2002l-107.3 107.2l-107.2 -107.301l36.2002 -36.0996c15.0996 -15.0996 4.40039 -41 -17 -41h-112c-13.2998 0 -24 10.7002 -24 24v112
+c0 21.4004 25.9004 32.0996 41 17l36.2002 -36.2002l107.3 107.2l-107.3 107.3l-36.2002 -36.2998c-15.0996 -15.0996 -41 -4.40039 -41 17v112c0 13.2998 10.7002 24 24 24h112c21.4004 0 32.0996 -25.9004 17 -41l-36.2002 -36.2002l107.2 -107.2l107.3 107.301
+l-36.2002 36.0996c-15.0996 15.0996 -4.39941 41 17 41h112c13.3008 0 24 -10.7002 24 -24v-112c0 -21.4004 -25.8994 -32.0996 -41 -17l-36.1992 36.2002l-107.301 -107.2l107.301 -107.3l36.1992 36.2002c15.1006 15.1992 41 4.5 41 -16.9004z" />
+ <glyph glyph-name="clipboard" unicode="&#xf328;" horiz-adv-x="384"
+d="M384 336v-352c0 -26.5098 -21.4902 -48 -48 -48h-288c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h80c0 35.29 28.71 64 64 64s64 -28.71 64 -64h80c26.5098 0 48 -21.4902 48 -48zM192 408c-13.2549 0 -24 -10.7451 -24 -24s10.7451 -24 24 -24
+s24 10.7451 24 24s-10.7451 24 -24 24zM288 294v20c0 3.31152 -2.68848 6 -6 6h-180c-3.31152 0 -6 -2.68848 -6 -6v-20c0 -3.31152 2.68848 -6 6 -6h180c3.31152 0 6 2.68848 6 6z" />
+ <glyph glyph-name="arrows-alt-h" unicode="&#xf337;"
+d="M377.941 278.059c0 21.3828 25.8516 32.0908 40.9707 16.9707l86.0586 -86.0596c9.37305 -9.37305 9.37305 -24.5674 0 -33.9404l-86.0586 -86.0596c-15.1201 -15.1191 -40.9707 -4.41113 -40.9707 16.9717v46.0586h-243.883v-46.0576
+c0 -21.3828 -25.8516 -32.0908 -40.9707 -16.9717l-86.0586 86.0596c-9.37305 9.37305 -9.37305 24.5674 0 33.9404l86.0586 86.0596c15.1201 15.1191 40.9707 4.41113 40.9707 -16.9717v-46.0586h243.883v46.0586z" />
+ <glyph glyph-name="arrows-alt-v" unicode="&#xf338;" horiz-adv-x="256"
+d="M214.059 70.0586c21.3828 0 32.0908 -25.8516 16.9707 -40.9707l-86.0596 -86.0586c-9.37305 -9.37305 -24.5674 -9.37305 -33.9404 0l-86.0596 86.0586c-15.1191 15.1201 -4.41113 40.9707 16.9717 40.9707h46.0586v243.883h-46.0576
+c-21.3828 0 -32.0908 25.8516 -16.9717 40.9707l86.0596 86.0586c9.37305 9.37305 24.5674 9.37305 33.9404 0l86.0596 -86.0586c15.1191 -15.1201 4.41113 -40.9707 -16.9717 -40.9707h-46.0586v-243.883h46.0586z" />
+ <glyph glyph-name="arrow-alt-circle-down" unicode="&#xf358;"
+d="M504 192c0 -137 -111 -248 -248 -248s-248 111 -248 248s111 248 248 248s248 -111 248 -248zM212 308v-116h-70.9004c-10.6992 0 -16.0996 -13 -8.5 -20.5l114.9 -114.3c4.7002 -4.7002 12.2002 -4.7002 16.9004 0l114.899 114.3c7.60059 7.59961 2.2002 20.5 -8.5 20.5
+h-70.7998v116c0 6.59961 -5.40039 12 -12 12h-64c-6.59961 0 -12 -5.40039 -12 -12z" />
+ <glyph glyph-name="arrow-alt-circle-left" unicode="&#xf359;"
+d="M256 -56c-137 0 -248 111 -248 248s111 248 248 248s248 -111 248 -248s-111 -248 -248 -248zM372 236h-116v70.9004c0 10.6992 -13 16.0996 -20.5 8.5l-114.3 -114.9c-4.7002 -4.7002 -4.7002 -12.2002 0 -16.9004l114.3 -114.899
+c7.59961 -7.60059 20.5 -2.2002 20.5 8.5v70.7998h116c6.59961 0 12 5.40039 12 12v64c0 6.59961 -5.40039 12 -12 12z" />
+ <glyph glyph-name="arrow-alt-circle-right" unicode="&#xf35a;"
+d="M256 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM140 148h116v-70.9004c0 -10.6992 13 -16.0996 20.5 -8.5l114.3 114.9c4.7002 4.7002 4.7002 12.2002 0 16.9004l-114.3 115c-7.59961 7.59961 -20.5 2.19922 -20.5 -8.5
+v-70.9004h-116c-6.59961 0 -12 -5.40039 -12 -12v-64c0 -6.59961 5.40039 -12 12 -12z" />
+ <glyph glyph-name="arrow-alt-circle-up" unicode="&#xf35b;"
+d="M8 192c0 137 111 248 248 248s248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248zM300 76v116h70.9004c10.6992 0 16.0996 13 8.5 20.5l-114.9 114.3c-4.7002 4.7002 -12.2002 4.7002 -16.9004 0l-115 -114.3c-7.59961 -7.59961 -2.19922 -20.5 8.5 -20.5
+h70.9004v-116c0 -6.59961 5.40039 -12 12 -12h64c6.59961 0 12 5.40039 12 12z" />
+ <glyph glyph-name="external-link-alt" unicode="&#xf35d;" horiz-adv-x="576"
+d="M576 424v-127.984c0 -21.4609 -25.96 -31.9795 -40.9707 -16.9707l-35.707 35.709l-243.523 -243.522c-9.37305 -9.37305 -24.5674 -9.37305 -33.9404 0l-22.627 22.627c-9.37305 9.37305 -9.37305 24.5684 0 33.9404l243.524 243.525l-35.7031 35.7051
+c-15.0703 15.0703 -4.39648 40.9707 16.9717 40.9707h127.976c13.2549 0 24 -10.7451 24 -24zM407.029 177.206c15.1191 15.1201 40.9707 4.41211 40.9707 -16.9697v-176.236c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352
+c0 26.5098 21.4902 48 48 48h296c21.3809 0 32.0889 -25.8506 16.9697 -40.9707l-16 -16c-3.87988 -3.87988 -11.4824 -7.0293 -16.9697 -7.0293h-264v-320h320v144.235v0.000976562c0 5.4873 3.14941 13.0898 7.0293 16.9697z" />
+ <glyph glyph-name="external-link-square-alt" unicode="&#xf360;" horiz-adv-x="448"
+d="M448 368v-352c0 -26.5098 -21.4902 -48 -48 -48h-352c-26.5098 0 -48 21.4902 -48 48v352c0 26.5098 21.4902 48 48 48h352c26.5098 0 48 -21.4902 48 -48zM360 352h-111.971c-21.3135 0 -32.0801 -25.8613 -16.9717 -40.9707l31.9844 -31.9873l-195.527 -195.527
+c-4.68555 -4.68555 -4.68555 -12.2832 0 -16.9707l31.0293 -31.0293c4.6875 -4.68555 12.2852 -4.68555 16.9707 0l195.526 195.526l31.9883 -31.9912c15.0283 -15.0264 40.9707 -4.47461 40.9707 16.9717v111.979c0 13.2549 -10.7451 24 -24 24z" />
+ <glyph glyph-name="exchange-alt" unicode="&#xf362;"
+d="M0 280v16c0 13.2549 10.7451 24 24 24h360v48c0 21.3672 25.8994 32.042 40.9707 16.9707l80 -80c9.37207 -9.37305 9.37207 -24.5684 0 -33.9404l-80 -80c-15.0146 -15.0127 -40.9707 -4.48633 -40.9707 16.9697v48h-360c-13.2549 0 -24 10.7451 -24 24zM488 128
+c13.2549 0 24 -10.7451 24 -24v-16c0 -13.2549 -10.7451 -24 -24 -24h-360v-48c0 -21.4365 -25.9434 -31.9971 -40.9707 -16.9697l-80 80c-9.37207 9.37207 -9.37207 24.5674 0 33.9404l80 80c15.1084 15.1094 40.9707 4.34375 40.9707 -16.9707v-48h360z" />
+ <glyph glyph-name="cloud-download-alt" unicode="&#xf381;" horiz-adv-x="640"
+d="M537.6 221.4c58.4004 -11.8008 102.4 -63.5 102.4 -125.4c0 -70.7002 -57.2998 -128 -128 -128h-368c-79.5 0 -144 64.5 -144 144c0 62.7998 40.2002 116.2 96.2002 135.9c-0.100586 2.69922 -0.200195 5.39941 -0.200195 8.09961c0 88.4004 71.5996 160 160 160
+c59.2998 0 111 -32.2002 138.7 -80.2002c15.2002 10.2002 33.5996 16.2002 53.2998 16.2002c53 0 96 -43 96 -96c0 -12.2002 -2.2998 -23.9004 -6.40039 -34.5996zM404.7 132.7c10.0996 10.0996 2.89941 27.2998 -11.2998 27.2998h-65.4004v112c0 8.7998 -7.2002 16 -16 16
+h-48c-8.7998 0 -16 -7.2002 -16 -16v-112h-65.4004c-14.1992 0 -21.3994 -17.2002 -11.2998 -27.2998l105.4 -105.4c6.2002 -6.2002 16.3994 -6.2002 22.5996 0z" />
+ <glyph glyph-name="cloud-upload-alt" unicode="&#xf382;" horiz-adv-x="640"
+d="M537.6 221.4c58.4004 -11.8008 102.4 -63.5 102.4 -125.4c0 -70.7002 -57.2998 -128 -128 -128h-368c-79.5 0 -144 64.5 -144 144c0 62.7998 40.2002 116.2 96.2002 135.9c-0.100586 2.69922 -0.200195 5.39941 -0.200195 8.09961c0 88.4004 71.5996 160 160 160
+c59.2998 0 111 -32.2002 138.7 -80.2002c15.2002 10.2002 33.5996 16.2002 53.2998 16.2002c53 0 96 -43 96 -96c0 -12.2002 -2.2998 -23.9004 -6.40039 -34.5996zM393.4 160c14.1992 0 21.3994 17.2002 11.2998 27.2998l-105.4 105.4
+c-6.2002 6.2002 -16.3994 6.2002 -22.5996 0l-105.4 -105.4c-10.0996 -10.0996 -3 -27.2998 11.2998 -27.2998h65.4004v-112c0 -8.7998 7.2002 -16 16 -16h48c8.7998 0 16 7.2002 16 16v112h65.4004z" />
+ <glyph glyph-name="gem" unicode="&#xf3a5;" horiz-adv-x="576"
+d="M485.5 448l90.5 -160h-101.1l-69.2002 160h79.7998zM357.5 448l69.2002 -160h-277.4l69.2002 160h139zM90.5 448h79.7998l-69.2002 -160h-101.1zM0 256h100.7l123 -251.7c1.5 -3.09961 -2.7002 -5.89941 -5 -3.2998zM148.2 256h279.6l-137 -318.2
+c-1 -2.39941 -4.5 -2.39941 -5.5 0zM352.3 4.2998l123 251.7h100.7l-218.7 -254.9c-2.2998 -2.69922 -6.5 0.100586 -5 3.2002z" />
+ <glyph glyph-name="level-down-alt" unicode="&#xf3be;" horiz-adv-x="320"
+d="M313.553 55.6689l-103.966 -112.003c-9.48535 -10.2139 -25.6758 -10.2295 -35.1738 0l-103.975 112.003c-14.2061 15.2998 -3.37695 40.3311 17.5869 40.3311h63.9746v272h-83.9756h-0.00195312c-2.74316 0 -6.54395 1.5752 -8.4834 3.51465l-56 56
+c-7.56055 7.55957 -2.20605 20.4854 8.48535 20.4854h195.976c13.2549 0 24 -10.7451 24 -24v-328h63.9658c20.8779 0 31.8516 -24.9688 17.5869 -40.3311z" />
+ <glyph glyph-name="level-up-alt" unicode="&#xf3bf;" horiz-adv-x="320"
+d="M313.553 328.331c14.2646 -15.3623 3.29102 -40.3311 -17.5869 -40.3311h-63.9658v-328c0 -13.2549 -10.7451 -24 -24 -24h-195.976c-10.6914 0 -16.0459 12.9258 -8.48535 20.4854l56 56c1.93945 1.93945 5.74023 3.51465 8.4834 3.51465h0.00195312h83.9756v272
+h-63.9746c-20.9639 0 -31.793 25.0312 -17.5869 40.3311l103.975 112.003c9.49805 10.2295 25.6885 10.2139 35.1738 0z" />
+ <glyph glyph-name="lock-open" unicode="&#xf3c1;" horiz-adv-x="576"
+d="M423.5 448c84 0.299805 152.5 -68 152.5 -152v-80c0 -13.2998 -10.7002 -24 -24 -24h-32c-13.2998 0 -24 10.7002 -24 24v80c0 39.9004 -32.7002 72.4004 -72.7002 72c-39.5996 -0.400391 -71.2998 -33.2998 -71.2998 -72.9004v-71.0996h48c26.5 0 48 -21.5 48 -48v-192
+c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v192c0 26.5 21.5 48 48 48h224v70.5c0 84 67.5 153.2 151.5 153.5z" />
+ <glyph glyph-name="map-marker-alt" unicode="&#xf3c5;" horiz-adv-x="384"
+d="M172.268 -53.6699c-145.298 210.639 -172.268 232.257 -172.268 309.67c0 106.039 85.9609 192 192 192s192 -85.9609 192 -192c0 -77.4131 -26.9697 -99.0312 -172.268 -309.67c-9.53516 -13.7744 -29.9307 -13.7734 -39.4648 0zM192 176c44.1826 0 80 35.8174 80 80
+s-35.8174 80 -80 80s-80 -35.8174 -80 -80s35.8174 -80 80 -80z" />
+ <glyph glyph-name="microphone-alt" unicode="&#xf3c9;" horiz-adv-x="352"
+d="M336 256c8.83984 0 16 -7.16016 16 -16v-48c0 -88.9004 -66.29 -162.47 -152 -174.23v-33.7695h56c8.83984 0 16 -7.16016 16 -16v-16c0 -8.83984 -7.16016 -16 -16 -16h-160c-8.83984 0 -16 7.16016 -16 16v16c0 8.83984 7.16016 16 16 16h56v34.1504
+c-88.0303 12.1396 -152 92.0498 -152 181.689v40.1602c0 8.83984 7.16016 16 16 16h16c8.83984 0 16 -7.16016 16 -16v-42.2998c0 -66.8105 48.71 -126.59 115.21 -133.08c76.2998 -7.44043 140.79 52.5801 140.79 127.38v48c0 8.83984 7.16016 16 16 16h16zM176 96
+c-53.0195 0 -96 42.9805 -96 96v160c0 53.0195 42.9805 96 96 96s96 -42.9805 96 -96h-85.3301c-5.88965 0 -10.6699 -3.58008 -10.6699 -8v-16c0 -4.41992 4.78027 -8 10.6699 -8h85.3301v-32h-85.3301c-5.88965 0 -10.6699 -3.58008 -10.6699 -8v-16
+c0 -4.41992 4.78027 -8 10.6699 -8h85.3301v-32h-85.3301c-5.88965 0 -10.6699 -3.58008 -10.6699 -8v-16c0 -4.41992 4.78027 -8 10.6699 -8h85.3301c0 -53.0195 -42.9805 -96 -96 -96z" />
+ <glyph glyph-name="mobile-alt" unicode="&#xf3cd;" horiz-adv-x="320"
+d="M272 448c26.5 0 48 -21.5 48 -48v-416c0 -26.5 -21.5 -48 -48 -48h-224c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48h224zM160 -32c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM272 76v312
+c0 6.59961 -5.40039 12 -12 12h-200c-6.59961 0 -12 -5.40039 -12 -12v-312c0 -6.59961 5.40039 -12 12 -12h200c6.59961 0 12 5.40039 12 12z" />
+ <glyph glyph-name="money-bill-alt" unicode="&#xf3d1;" horiz-adv-x="640"
+d="M352 160c4.41992 0 8 -3.58008 8 -8v-16c0 -4.41992 -3.58008 -8 -8 -8h-64c-4.41992 0 -8 3.58008 -8 8v16c0 4.41992 3.58008 8 8 8h16v55.4404l-0.469727 -0.310547c-1.1123 -0.741211 -3.09961 -1.34375 -4.43652 -1.34375
+c-2.36328 0 -5.34375 1.59668 -6.65332 3.56348l-8.88086 13.3105c-0.741211 1.1123 -1.34375 3.09961 -1.34375 4.43555c0 2.36328 1.59668 5.34473 3.56445 6.6543l15.3301 10.2197c3.93945 2.62988 8.56934 4.03027 13.3096 4.03027h13.5801c4.41992 0 8 -3.58008 8 -8
+v-88h16zM608 384c17.6699 0 32 -14.3301 32 -32v-320c0 -17.6699 -14.3301 -32 -32 -32h-576c-17.6699 0 -32 14.3301 -32 32v320c0 17.6699 14.3301 32 32 32h576zM48 48h64c0 35.3496 -28.6504 64 -64 64v-64zM48 272c35.3496 0 64 28.6504 64 64h-64v-64zM320 80
+c53 0 96 50.1299 96 112c0 61.8604 -42.9805 112 -96 112s-96 -50.1396 -96 -112c0 -61.8496 42.9805 -112 96 -112zM592 48v64c-35.3496 0 -64 -28.6504 -64 -64h64zM592 272v64h-64c0 -35.3496 28.6504 -64 64 -64z" />
+ <glyph glyph-name="phone-slash" unicode="&#xf3dd;" horiz-adv-x="640"
+d="M268.2 66.5996c11.5 5.40039 22.7002 11.6006 33.5996 18.1006l80 -61.7998c-76 -54.5 -169 -86.9004 -269.7 -86.9004c-11.2998 0 -20.8994 7.7998 -23.3994 18.5996l-24 104c-2.60059 11.4004 3.2002 22.9004 13.8994 27.5l112 48
+c9.80078 4.2002 21.2002 1.40039 28 -6.89941zM633.8 -10.0996c7 -5.40039 8.2998 -15.5 2.90039 -22.3008l-19.6006 -25.2998c-5.5 -7 -15.5 -8.2002 -22.5 -2.7998l-588.399 454.7c-7 5.39941 -8.2002 15.3994 -2.7998 22.3994l19.5996 25.2002
+c5.5 7 15.5 8.2002 22.5 2.7998l353.9 -273.5c18.0996 22.5 33.7998 46.7002 46 72.8008l-60.6006 49.5996c-8.2998 6.7002 -11.0996 18.2002 -6.89941 28l48 112c4.59961 10.5996 16.1992 16.5 27.5 13.9004l104 -24c10.8994 -2.5 18.5996 -12.2002 18.5996 -23.4004
+c0 -108.9 -38.0996 -208.4 -100.9 -287.5z" />
+ <glyph glyph-name="portrait" unicode="&#xf3e0;" horiz-adv-x="384"
+d="M336 448c26.5 0 48 -21.5 48 -48v-416c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48h288zM192 320c-35.2998 0 -64 -28.7002 -64 -64s28.7002 -64 64 -64s64 28.7002 64 64s-28.7002 64 -64 64zM304 83.2002v19.2002
+c0 31.7998 -30.0996 57.5996 -67.2002 57.5996h-5c-12.2002 -5.09961 -25.7002 -8 -39.7998 -8s-27.5 2.90039 -39.7998 8h-5c-37.1006 0 -67.2002 -25.7998 -67.2002 -57.5996v-19.2002c0 -10.6006 10 -19.2002 22.4004 -19.2002h179.199
+c12.4004 0 22.4004 8.59961 22.4004 19.2002z" />
+ <glyph glyph-name="reply" unicode="&#xf3e5;"
+d="M8.30859 258.164l176.005 151.985c15.4053 13.3047 39.6865 2.50391 39.6865 -18.1641v-80.0537c160.629 -1.83887 288 -34.0312 288 -186.258c0 -61.4404 -39.5811 -122.309 -83.333 -154.132c-13.6533 -9.93066 -33.1113 2.5332 -28.0771 18.6309
+c45.3438 145.012 -21.5068 183.51 -176.59 185.742v-87.915c0 -20.7002 -24.2998 -31.4531 -39.6865 -18.1641l-176.005 152c-11.0703 9.5625 -11.0859 26.7529 0 36.3281z" />
+ <glyph glyph-name="shield-alt" unicode="&#xf3ed;"
+d="M466.5 364.3c17.7998 -7.39941 29.5 -24.8994 29.5 -44.2998c0 -221.3 -135.9 -344.6 -221.6 -380.3c-11.8008 -4.90039 -25.1006 -4.90039 -36.9004 0c-107 44.5996 -221.5 181.8 -221.5 380.3c0 19.4004 11.7002 36.9004 29.5996 44.2998l192 80
+c4.89062 2.0293 13.1562 3.6748 18.4502 3.6748c5.29492 0 13.5596 -1.64551 18.4502 -3.6748zM256.1 1.7002c93.7002 46.5996 172.5 156.3 175.801 307.7l-175.9 73.2998z" />
+ <glyph glyph-name="tablet-alt" unicode="&#xf3fa;" horiz-adv-x="448"
+d="M400 448c26.5 0 48 -21.5 48 -48v-416c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v416c0 26.5 21.5 48 48 48h352zM224 -32c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM400 76v312
+c0 6.59961 -5.40039 12 -12 12h-328c-6.59961 0 -12 -5.40039 -12 -12v-312c0 -6.59961 5.40039 -12 12 -12h328c6.59961 0 12 5.40039 12 12z" />
+ <glyph glyph-name="tachometer-alt" unicode="&#xf3fd;" horiz-adv-x="576"
+d="M288 416c159.06 0 288 -128.94 288 -288c0 -52.7998 -14.25 -102.26 -39.0596 -144.8c-5.61035 -9.62012 -16.3008 -15.2002 -27.4404 -15.2002h-443c-11.1396 0 -21.8301 5.58008 -27.4404 15.2002c-24.8096 42.54 -39.0596 92 -39.0596 144.8
+c0 159.06 128.94 288 288 288zM288 352c-17.6699 0 -31.9902 -14.3301 -31.9902 -32s14.3301 -32 32 -32c6.66992 0 12.5098 2.51953 17.6406 6.00977l9.21973 27.6699c0.80957 2.44043 2.33984 4.41016 3.4502 6.66992c-3.74023 13.5205 -15.6104 23.6504 -30.3203 23.6504
+zM96 64c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM144 224c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM390.77 296.41c4.18066 12.5703 -2.59961 26.1699 -15.1699 30.3594
+c-12.6299 4.28027 -26.1895 -2.60938 -30.3594 -15.1699l-61.3398 -184.01c-33.4004 -2.16016 -59.9004 -29.6494 -59.9004 -63.5898c0 -11.7197 3.37988 -22.5498 8.87988 -32h110.24c5.5 9.4502 8.87988 20.2803 8.87988 32c0 19.46 -8.87012 36.6699 -22.5596 48.4102z
+M405.43 239.21c5.68066 -8.94043 15.1904 -15.21 26.5703 -15.2197c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32c-3.91992 0 -7.58008 -0.94043 -11.0498 -2.23047zM480 64c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z
+" />
+ <glyph glyph-name="ticket-alt" unicode="&#xf3ff;" horiz-adv-x="576"
+d="M128 288h320v-192h-320v192zM528 192c0 -26.5098 21.4902 -48 48 -48v-96c0 -26.5098 -21.4902 -48 -48 -48h-480c-26.5098 0 -48 21.4902 -48 48v96c26.5098 0 48 21.4902 48 48s-21.4902 48 -48 48v96c0 26.5098 21.4902 48 48 48h480c26.5098 0 48 -21.4902 48 -48
+v-96c-26.5098 0 -48 -21.4902 -48 -48zM480 296c0 13.2549 -10.7451 24 -24 24h-336c-13.2549 0 -24 -10.7451 -24 -24v-208c0 -13.2549 10.7451 -24 24 -24h336c13.2549 0 24 10.7451 24 24v208z" />
+ <glyph glyph-name="user-alt" unicode="&#xf406;"
+d="M256 160c-79.5 0 -144 64.5 -144 144s64.5 144 144 144s144 -64.5 144 -144s-64.5 -144 -144 -144zM384 128c70.7002 0 128 -57.2998 128 -128v-16c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v16c0 70.7002 57.2998 128 128 128h55.0996
+c22.3008 -10.2002 46.9004 -16 72.9004 -16s50.7002 5.7998 72.9004 16h55.0996z" />
+ <glyph glyph-name="window-close" unicode="&#xf410;"
+d="M464 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h416zM380.4 125.5l-67.1006 66.5l67.1006 66.5c4.7998 4.7998 4.7998 12.5996 0 17.4004l-40.5 40.5
+c-4.80078 4.7998 -12.6006 4.7998 -17.4004 0l-66.5 -67.1006l-66.5 67.1006c-4.7998 4.7998 -12.5996 4.7998 -17.4004 0l-40.5 -40.5c-4.7998 -4.80078 -4.7998 -12.6006 0 -17.4004l67.1006 -66.5l-67.1006 -66.5c-4.7998 -4.7998 -4.7998 -12.5996 0 -17.4004
+l40.5 -40.5c4.80078 -4.7998 12.6006 -4.7998 17.4004 0l66.5 67.1006l66.5 -67.1006c4.7998 -4.7998 12.5996 -4.7998 17.4004 0l40.5 40.5c4.7998 4.80078 4.7998 12.6006 0 17.4004z" />
+ <glyph glyph-name="baseball-ball" unicode="&#xf433;" horiz-adv-x="495"
+d="M368.5 84.0996c12.9004 -26.6992 30.2998 -50.1992 51.4004 -70.5996c-44.6006 -43 -105.101 -69.5 -171.9 -69.5c-66.9004 0 -127.5 26.5996 -172 69.7002c21.2002 20.3994 38.5996 44 51.5 70.7002l-28.7998 13.8994c-11.1006 -23 -26.1006 -43.2998 -44.2998 -61
+c-34 42.4004 -54.4004 96.1006 -54.4004 154.7s20.4004 112.3 54.4004 154.8c17.7998 -17.2998 32.5 -37.0996 43.5 -59.3994l28.6992 14.0996c-12.7998 25.9004 -30 48.9004 -50.6992 68.7998c44.5996 43.1006 105.199 69.7002 172.1 69.7002
+c67 0 127.6 -26.7002 172.2 -69.7998c-20.7998 -20 -38 -43 -50.7998 -69l28.6992 -14.1006c11 22.4004 25.8008 42.2002 43.6006 59.5c33.7998 -42.3994 54.2002 -96.0996 54.2002 -154.6c0 -58.5996 -20.5 -112.4 -54.5 -154.9c-18.1006 17.7002 -33 38 -44.1006 60.9004z
+M140.2 116.1c17.2998 53.9004 14.2998 108.2 -0.700195 153.801l-30.4004 -10c13.3008 -40.2002 15.5 -87.6006 0.600586 -134zM356.5 269.5c-15 -45.5 -18 -99.7998 -0.700195 -153.8l30.5 9.7998c-14.8994 46.5 -12.5996 93.9004 0.600586 134z" />
+ <glyph glyph-name="basketball-ball" unicode="&#xf434;" horiz-adv-x="496"
+d="M212.3 437.7c-1.5 -50 -17 -95.4004 -44.7998 -131.2l-77.4004 77.4004c36 29.6992 78.4004 47.5 122.2 53.7998zM248 226l-46.2998 46.2998c37.2002 45.4004 57.5 103.8 58.7002 167.7c51.8994 -2.59961 103.1 -21.0996 145.5 -56.0996zM56.0996 349.9l77.4004 -77.4004
+c-35.7002 -27.7998 -81.2002 -43.2998 -131.2 -44.7998c6.2998 43.7998 24.1006 86.2002 53.7998 122.2zM328.3 145.7l-46.2998 46.2998l157.9 157.9c35 -42.4004 53.5 -93.6006 56.0996 -145.5c-64 -1.30078 -122.4 -21.6006 -167.7 -58.7002zM248 158l46.2998 -46.2998
+c-37.0996 -45.2998 -57.3994 -103.7 -58.7002 -167.7c-51.8994 2.59961 -103.1 21.2002 -145.5 56.0996zM439.9 34.0996l-77.4004 77.4004c35.7002 27.7002 81.0996 43.2002 131.2 44.7998c-6.2998 -43.7998 -24.1006 -86.2002 -53.7998 -122.2zM167.7 238.3
+l46.2998 -46.2998l-157.9 -157.9c-35 42.4004 -53.5 93.6006 -56.0996 145.5c63.9004 1.10059 122.3 21.5 167.7 58.7002zM283.7 -53.7002c1.59961 50 17.0996 95.5 44.7998 131.2l77.4004 -77.4004c-36 -29.6992 -78.4004 -47.5 -122.2 -53.7998z" />
+ <glyph glyph-name="bowling-ball" unicode="&#xf436;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM120 256c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM184 352c0 -17.7002 14.2998 -32 32 -32s32 14.2998 32 32
+s-14.2998 32 -32 32s-32 -14.2998 -32 -32zM232 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32z" />
+ <glyph glyph-name="chess" unicode="&#xf439;" horiz-adv-x="514"
+d="M76.0996 237.8l-34.5996 95.1006c-2.7998 7.7998 3 16.0996 11.2998 16.0996h51.2002v30.2002h-21.5996c-3.30078 0 -6 2.7002 -6 6v22.3994c0 3.30078 2.69922 6 6 6h28.3994v28.4004c0 3.2998 2.7002 6 6 6h22.4004c3.2998 0 6 -2.7002 6 -6v-28.4004h28.3994
+c3.30078 0 6 -2.69922 6 -6v-22.3994c0 -3.2998 -2.69922 -6 -6 -6h-21.5996v-30.2002h51.0996c8.30078 0 14.1006 -8.2998 11.3008 -16.0996l-34.6006 -95.1006h-103.7zM313.3 147.1c-1.09961 0.700195 -1.7002 1.90039 -1.89941 3.10059v69.8994
+c0 2.2002 1.7998 3.90039 3.89941 3.90039h27.7002c2.2002 0 3.90039 -1.7998 3.90039 -3.90039v-31.5h22.6992v31.5c0 2.2002 1.80078 3.90039 3.90039 3.90039h52.9004c2.19922 0 3.89941 -1.7998 3.89941 -3.90039v-31.5h22.7002v31.5
+c0 2.2002 1.7998 3.90039 3.90039 3.90039h27.6992c2.2002 0 3.90039 -1.7998 3.90039 -3.90039v-69.8994c0 -1.2998 -0.599609 -2.5 -1.7002 -3.2002l-24.8994 -16.9004c-0.100586 -18.1992 0.199219 -64 11.8994 -114.5h-147.5c11.7002 50.6006 12 96.4004 11.9004 114.5z
+M384.8 95.0996h0.100586v-30.2998h30.2998v30.2998c0 8.40039 -6.7998 15.2002 -15.2002 15.2002s-15.2002 -6.7998 -15.2002 -15.2002zM504.6 -22.0996c1 -0.700195 1.5 -1.80078 1.5 -2.90039v-35c0 -2.2002 -1.7998 -3.90039 -3.89941 -3.90039h-204.5
+c-2.2002 0 -3.90039 1.80078 -3.90039 3.90039v35c0 1.09961 0.600586 2.2998 1.5 3l13.7002 10.5996v15.6006c0 2.2002 1.7998 3.89941 3.90039 3.89941h174.199c2.2002 0 3.90039 -1.7998 3.90039 -3.89941v-15.6006zM244.8 -11.2998
+c1.7998 -1.10059 2.7998 -3 2.7998 -5.10059v-41.5996c0 -3.2998 -2.69922 -6 -6 -6h-227.199c-3.30078 0 -6 2.7002 -6 6v41.5996c0 2.10059 1.09961 4 2.7998 5.10059l22.0996 14.2002v28.0996c0 3.2998 2.7002 6 6 6h177.4c3.2998 0 6 -2.7002 6 -6v-28.0996z
+M199.8 197.6h-25.7998c0 -31.6992 -2.2002 -96.1992 17.4004 -153.199h-126.801c19.9004 58.0996 17.4004 124.399 17.4004 153.199h-25.7998c-3.2998 0 -6 2.7002 -6 6v20.8008c0 3.2998 2.7002 6 6 6h143.6c3.2998 0 6 -2.7002 6 -6v-20.8008c0 -3.2998 -2.7002 -6 -6 -6z
+" />
+ <glyph glyph-name="chess-bishop" unicode="&#xf43a;" horiz-adv-x="320"
+d="M123.158 370.119c-15.7891 5.35059 -27.1582 20.2842 -27.1582 37.8809c0 22.0908 17.9092 40 40 40h47.7959c22.0908 0 40 -17.9092 40 -40c0 -17.541 -11.2949 -32.4336 -27.0049 -37.8291c23.9932 -16.6572 48.5771 -46.8389 68.7031 -82.0498l-120.565 -120.564
+c-0.969727 -0.969727 -1.75684 -2.87012 -1.75684 -4.24219s0.787109 -3.27246 1.75684 -4.24219l14.1426 -14.1426c0.969727 -0.969727 2.87012 -1.75684 4.24219 -1.75684s3.27246 0.787109 4.24219 1.75684l113.345 113.345
+c17.7578 -38.2979 29.3711 -79.4434 29.3711 -114.273c0 -53.7861 -22.8975 -75.7881 -58.4463 -86.0332v-57.9678h-183.651v57.9697c-35.543 10.2461 -58.4346 32.249 -58.4346 86.0303c0 78.0293 58.2803 187.766 113.419 226.119zM320 -52
+c0 -6.62695 -5.37305 -12 -12 -12h-296c-6.62695 0 -12 5.37305 -12 12v24c0 6.62695 5.37305 12 12 12h296c6.62695 0 12 -5.37305 12 -12v-24z" />
+ <glyph glyph-name="chess-board" unicode="&#xf43c;"
+d="M192 192h64v-64h-64v64zM512 128h-64v64h64v-64zM0 256h64v-64h-64v64zM512 256h-64v64h64v-64zM512 0h-64v64h64v-64zM256 192v64h64v-64h-64zM0 -64v64h64v-64h-64zM128 -64v64h64v-64h-64zM384 448v-64h-64v64h64zM128 448v-64h-64v64h64zM256 -64v64h64v-64h-64z
+M0 384h64v-64h-64v64zM0 128h64v-64h-64v64zM256 448v-64h-64v64h64zM384 -64v64h64v-64h-64zM64 64h64v-64h-64v64zM320 64h64v-64h-64v64zM256 256h-64v64h64v-64zM384 64v64h64v-64h-64zM128 192v-64h-64v64h64zM384 192v64h64v-64h-64zM512 448v-64h-64v64h64zM128 320
+v-64h-64v64h64zM384 384h64v-64h-64v64zM192 64h64v-64h-64v64zM128 128h64v-64h-64v64zM256 128h64v-64h-64v64zM192 256v-64h-64v64h64zM320 320h-64v64h64v-64zM192 320h-64v64h64v-64zM320 256v64h64v-64h-64zM320 128v64h64v-64h-64z" />
+ <glyph glyph-name="chess-king" unicode="&#xf43f;" horiz-adv-x="448"
+d="M416 -28v-24c0 -6.62695 -5.37305 -12 -12 -12h-360c-6.62695 0 -12 5.37305 -12 12v24c0 6.62695 5.37305 12 12 12h360c6.62695 0 12 -5.37305 12 -12zM407.967 296c27.5742 0 46.8789 -27.2441 37.7383 -53.2588l-85.2871 -242.741h-272.836l-85.2871 242.741
+c-9.14062 26.0146 10.1641 53.2588 37.7383 53.2588h159.967v48h-50c-3.31152 0 -6 2.68848 -6 6v36c0 3.31152 2.68848 6 6 6h50v50c0 3.31152 2.68848 6 6 6h36c3.31152 0 6 -2.68848 6 -6v-50h50c3.31152 0 6 -2.68848 6 -6v-36c0 -3.31152 -2.68848 -6 -6 -6h-50v-48
+h159.967z" />
+ <glyph glyph-name="chess-knight" unicode="&#xf441;" horiz-adv-x="384"
+d="M352 224v-224h-320v46.5566c0 30.3027 17.1201 58.0029 44.2227 71.5547l57.2432 28.6221c14.6465 7.32324 26.5342 26.5566 26.5342 42.9326v0v50.334l-22.127 -11.0635c-5.4502 -2.72461 -11.0723 -9.7334 -12.5498 -15.6445l-11.835 -47.3379
+c-0.822266 -3.28613 -4.04102 -6.97363 -7.18555 -8.23145l-29.6006 -11.8398c-1.18457 -0.473633 -3.18066 -0.858398 -4.45703 -0.858398c-1.40527 0 -3.58887 0.463867 -4.87305 1.03418l-60.2461 26.7744c-3.93359 1.74805 -7.12598 6.66113 -7.12598 10.9658v0v158.26
+c0 6.36523 2.5293 12.4707 7.03027 16.9717l8.96973 8.96973l-14.2109 28.4219c-0.987305 1.97461 -1.78906 5.36914 -1.78906 7.57617v0.00195312c0 6.62695 5.37305 12 12 12h148c106.039 0 192 -85.9609 192 -192zM372 -16c6.62695 0 12 -5.37305 12 -12v-24
+c0 -6.62695 -5.37305 -12 -12 -12h-360c-6.62695 0 -12 5.37305 -12 12v24c0 6.62695 5.37305 12 12 12h360zM52 320c-11.0459 0 -20 -8.9541 -20 -20s8.9541 -20 20 -20s20 8.9541 20 20s-8.9541 20 -20 20z" />
+ <glyph glyph-name="chess-pawn" unicode="&#xf443;" horiz-adv-x="320"
+d="M264 0h-208s60 42.7432 60 176h-32c-6.62695 0 -12 5.37305 -12 12v24c0 6.62695 5.37305 12 12 12h40.209c-28.4883 13.4404 -48.209 42.4121 -48.209 76c0 46.3916 37.6084 84 84 84s84 -37.6084 84 -84c0 -33.5879 -19.7207 -62.5596 -48.209 -76h40.209
+c6.62695 0 12 -5.37305 12 -12v-24c0 -6.62695 -5.37305 -12 -12 -12h-32c0 -133.257 60 -176 60 -176zM292 -16c6.62695 0 12 -5.37305 12 -12v-24c0 -6.62695 -5.37305 -12 -12 -12h-264c-6.62695 0 -12 5.37305 -12 12v24c0 6.62695 5.37305 12 12 12h264z" />
+ <glyph glyph-name="chess-queen" unicode="&#xf445;"
+d="M436 -64h-360c-6.62695 0 -12 5.37305 -12 12v24c0 6.62695 5.37305 12 12 12h360c6.62695 0 12 -5.37305 12 -12v-24c0 -6.62695 -5.37305 -12 -12 -12zM255.579 448c30.9277 0 56 -25.0723 56 -56s-25.0723 -56 -56 -56s-56 25.0723 -56 56s25.0723 56 56 56z
+M460.147 293.366l39.3564 -20.7734c3.53223 -1.86426 6.39941 -6.61816 6.39941 -10.6123c0 -1.29883 -0.398438 -3.33008 -0.889648 -4.53223l-105.014 -257.448h-288l-105.014 257.449c-0.491211 1.20215 -0.889648 3.2334 -0.889648 4.53223
+c0 3.99414 2.86719 8.74805 6.39941 10.6123l39.1787 20.6797c5.6416 2.97754 12.6328 1.02637 15.8994 -4.45312c10.6074 -17.8018 23.7334 -38.0254 44.4248 -38.0254c28.7539 0 30.6357 19.8975 31.6875 57.5391c0.182617 6.49316 5.5 11.667 11.9951 11.667h41.0049
+c5.17578 0 9.75488 -3.32812 11.3887 -8.23828c8.89062 -26.709 26.0732 -40.9912 47.9248 -40.9912s39.0352 14.2822 47.9248 40.9912c1.63379 4.91016 6.21289 8.23828 11.3887 8.23828h41.0059c6.49414 0 11.8115 -5.17188 11.9951 -11.6641
+c1.06055 -37.7383 2.97168 -57.542 31.6855 -57.542c21.3184 0 35.4492 22.2852 44.0654 37.8008c3.16602 5.70215 10.3135 7.81543 16.082 4.77051z" />
+ <glyph glyph-name="chess-rook" unicode="&#xf447;" horiz-adv-x="384"
+d="M81.2412 232.973l-45.4209 42.3154c-2.43652 2.26953 -3.82031 5.4502 -3.82031 8.78027v119.932c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-44h47.999v44c0 6.62695 5.37305 12 12 12h72c6.62695 0 12 -5.37305 12 -12v-44h48v44
+c0 6.62695 5.37305 12 12 12h40c6.62695 0 12 -5.37305 12 -12v-119.933v-0.000976562c0 -2.88184 -1.71094 -6.81543 -3.82031 -8.7793l-45.418 -42.3125c0.285156 -44.0625 3.82129 -133.371 33.2217 -232.975h-287.982c29.4111 99.9238 32.957 189.08 33.2412 232.973z
+M160 192v-64.0039h64v64.0039c0 17.6729 -14.3271 32 -32 32s-32 -14.3271 -32 -32zM384 -28v-24c0 -6.62695 -5.37305 -12 -12 -12h-360c-6.62695 0 -12 5.37305 -12 12v24c0 6.62695 5.37305 12 12 12h360c6.62695 0 12 -5.37305 12 -12z" />
+ <glyph glyph-name="dumbbell" unicode="&#xf44b;" horiz-adv-x="640"
+d="M104 352c13.2998 0 24 -10.7002 24 -24v-272c0 -13.2998 -10.7002 -24 -24 -24h-48c-13.2998 0 -24 10.7002 -24 24v104h-24c-4.40039 0 -8 3.59961 -8 8v48c0 4.40039 3.59961 8 8 8h24v104c0 13.2998 10.7002 24 24 24h48zM632 224c4.40039 0 8 -3.59961 8 -8v-48
+c0 -4.40039 -3.59961 -8 -8 -8h-24v-104c0 -13.2998 -10.7002 -24 -24 -24h-48c-13.2998 0 -24 10.7002 -24 24v272c0 13.2998 10.7002 24 24 24h48c13.2998 0 24 -10.7002 24 -24v-104h24zM456 416c13.2998 0 24 -10.7002 24 -24v-400c0 -13.2998 -10.7002 -24 -24 -24h-48
+c-13.2998 0 -24 10.7002 -24 24v168h-128v-168c0 -13.2998 -10.7002 -24 -24 -24h-48c-13.2998 0 -24 10.7002 -24 24v400c0 13.2998 10.7002 24 24 24h48c13.2998 0 24 -10.7002 24 -24v-168h128v168c0 13.2998 10.7002 24 24 24h48z" />
+ <glyph glyph-name="football-ball" unicode="&#xf44e;" horiz-adv-x="496"
+d="M481.5 387.7c6.2998 -23.9004 13.7002 -61 14.5 -104.5l-156.6 156.8c43.5996 -0.900391 80.8994 -8.5 104.8 -14.9004c18.2002 -4.89941 32.5 -19.1992 37.2998 -37.3994zM14.5 -3.7002c-6.2998 23.9004 -13.7002 61 -14.5 104.5l156.6 -156.8
+c-43.5996 0.900391 -80.8994 8.5 -104.8 14.9004c-18.2002 4.89941 -32.5 19.1992 -37.2998 37.3994zM4.2002 164.6c22.5996 152.7 138.899 252 271.399 271.4l216.301 -216.6c-22.7002 -152.7 -139 -252 -271.5 -271.4zM321.5 288.2l-28.2998 -28.5l-28.2998 28.2998
+c-3.10059 3.09961 -8.2002 3.09961 -11.3008 0l-11.2998 -11.2998c-3.09961 -3.10059 -3.09961 -8.2002 0 -11.2998l28.2998 -28.3008l-22.5996 -22.5996l-28.2998 28.2998c-3.10059 3.10059 -8.2002 3.10059 -11.2998 0l-11.3008 -11.2998
+c-3.09961 -3.09961 -3.09961 -8.2002 0 -11.2998l28.3008 -28.2998l-22.6006 -22.6006l-28.2998 28.2998c-3.09961 3.10059 -8.2002 3.10059 -11.2998 0l-11.2998 -11.2998c-3.10059 -3.09961 -3.10059 -8.2002 0 -11.2998l28.2998 -28.2002l-28.2998 -28.2998
+c-3.10059 -3.09961 -3.10059 -8.2002 0 -11.2998l11.2998 -11.2998c3.09961 -3.10059 8.2002 -3.10059 11.2998 0l28.2998 28.2998l28.2998 -28.2998c3.10059 -3.10059 8.2002 -3.10059 11.3008 0l11.2998 11.2998c3.09961 3.09961 3.09961 8.2002 0 11.2998
+l-28.2998 28.2998l22.5996 22.6006l28.2998 -28.3008c3.10059 -3.09961 8.2002 -3.09961 11.2998 0l11.3008 11.3008c3.09961 3.09961 3.09961 8.19922 0 11.2998l-28.3008 28.2998l22.6006 22.7002l28.2998 -28.2998c3.09961 -3.10059 8.2002 -3.10059 11.2998 0
+l11.2998 11.2998c3.10059 3.09961 3.10059 8.2002 0 11.2998l-28.2998 28.2998l28.2998 28.2998c3.10059 3.10059 3.10059 8.2002 0 11.3008l-11.2998 11.2998c-3.09961 3.09961 -8.2002 3.09961 -11.2998 0z" />
+ <glyph glyph-name="golf-ball" unicode="&#xf450;" horiz-adv-x="416"
+d="M96 32h224c0 -17.7002 -14.2998 -32 -32 -32h-16c-17.7002 0 -32 -14.2998 -32 -32v-20c0 -6.59961 -5.40039 -12 -12 -12h-40c-6.59961 0 -12 5.40039 -12 12v20c0 17.7002 -14.2998 32 -32 32h-16c-17.7002 0 -32 14.2998 -32 32zM416 240
+c0 -74.2002 -39 -139.2 -97.5 -176h-221c-58.5 36.7998 -97.5 101.8 -97.5 176c0 114.9 93.0996 208 208 208s208 -93.0996 208 -208zM235.9 196.1c18.2998 0 33.0996 14.8008 33.0996 33.1006c0 14.3994 -9.2998 26.2998 -22.0996 30.8994
+c9.59961 -26.7998 -15.6006 -51.2998 -41.9004 -41.8994c4.59961 -12.7998 16.5 -22.1006 30.9004 -22.1006zM285 149.2c0 14.3994 -9.2998 26.2998 -22.0996 30.8994c9.59961 -26.7998 -15.6006 -51.2998 -41.9004 -41.8994
+c4.59961 -12.7998 16.5 -22.1006 30.9004 -22.1006c18.2998 0 33.0996 14.9004 33.0996 33.1006zM349 213.2c0 14.3994 -9.2998 26.2998 -22.0996 30.8994c9.59961 -26.7998 -15.6006 -51.2998 -41.9004 -41.8994c4.59961 -12.7998 16.5 -22.1006 30.9004 -22.1006
+c18.2998 0 33.0996 14.9004 33.0996 33.1006z" />
+ <glyph glyph-name="hockey-puck" unicode="&#xf453;"
+d="M0 288c0 53 114.6 96 256 96s256 -43 256 -96s-114.6 -96 -256 -96s-256 43 -256 96zM0 205.8c113.5 -82.3994 398.6 -82.2998 512 0v-109.8c0 -53 -114.6 -96 -256 -96s-256 43 -256 96v109.8z" />
+ <glyph glyph-name="quidditch" unicode="&#xf458;" horiz-adv-x="639"
+d="M256.5 231.2l86.7002 -109.2s-16.6006 -102.4 -76.6006 -150.1c-59.8994 -47.7002 -266.6 -34.1006 -266.6 -34.1006s3.7998 23.1006 11 55.4004l94.5996 112.2c4 4.69922 -0.899414 11.5996 -6.59961 9.5l-60.4004 -22.1006c14.4004 41.7002 32.7002 80 54.6006 97.5
+c59.8994 47.7998 163.3 40.9004 163.3 40.9004zM494.5 96.2002c44 0 79.7998 -35.7002 79.7998 -79.9004c0 -44.0996 -35.7002 -79.8994 -79.7998 -79.8994s-79.7998 35.7998 -79.7998 79.8994c0 44.1006 35.7998 79.9004 79.7998 79.9004zM636.5 417
+c5.5 -6.90039 4.40039 -17 -2.5 -22.5l-232.5 -177.9l34.0996 -42.8994c5.10059 -6.40039 1.7002 -15.9004 -6.2998 -17.6006l-58.7998 -12.3994l-86.7002 109.2l25.2998 54.5996c3.5 7.40039 13.5 8.59961 18.6006 2.2002l34.0996 -43l232.5 177.899
+c6.90039 5.40039 16.9004 4.30078 22.4004 -2.59961z" />
+ <glyph glyph-name="square-full" unicode="&#xf45c;"
+d="M512 -64h-512v512h512v-512z" />
+ <glyph glyph-name="table-tennis" unicode="&#xf45d;"
+d="M496.2 151.5c-64.1006 43.2002 -149.5 27.9004 -195.601 -34.2002l-211.5 211.5l56 56.1006c83.9004 84.0996 220 84.0996 303.9 0c63 -63.1006 78.7002 -155.601 47.2002 -233.4zM278.3 71.7998c-3.7002 -12.7002 -6.2998 -25.8994 -6.2002 -39.7002
+c0 -19.5 3.90039 -38.0996 11 -55.0996c-25.6992 2.7998 -50.5996 13.5996 -70.2998 33.2998l-35.7002 35.7002l-89.2998 -103.3c-7.5 -8.60059 -20.7002 -9.10059 -28.7002 -1l-53.3994 53.5c-8.10059 8.09961 -7.60059 21.2998 1 28.7998l103 89.4004l-34.5 34.5996
+c-39 39.0996 -44.6006 98.7998 -17.2998 144.1zM416 128c53 0 96 -43 96 -96s-43 -96 -96 -96s-96 43 -96 96s43 96 96 96z" />
+ <glyph glyph-name="volleyball-ball" unicode="&#xf45f;" horiz-adv-x="495"
+d="M223.3 204.6c-71.8994 -44.3994 -123.399 -113 -146.1 -192.199c-24 22.7998 -43.5 50.2998 -56.9004 81.0996c22.7998 94.4004 89.5 174.4 180.3 216.8c14.1006 -32.8994 21.8008 -68.7002 22.7002 -105.7zM186.4 339c-84 -39.5 -149 -108.4 -182.4 -191.5
+c-19.7998 109.3 34 212.4 125 262.2c22.9004 -20.6006 42.4004 -44.2998 57.4004 -70.7002zM374 173.9c-35.5996 4.19922 -70.4004 15.5 -102.9 33.1992c-2.5 84.5 -36.1992 163.4 -93.5 222.7c47.3008 14 84.2002 10.2002 98.9004 8.5
+c70.2002 -66.8994 106.1 -164.6 97.5 -264.399zM249.3 164.4c49.7002 -26.8008 104 -40.8008 158.601 -40.9004c27.1992 0 54.2998 3.7998 80.8994 10.4004c-8 -33.1006 -22.5 -63.5 -42.2002 -89.9004c-93 -27.2998 -195.5 -9.5 -277.5 47.7998
+c21.5 28.7002 48.6006 53.2998 80.2002 72.6006zM151 64.7002c53.5996 -37.2998 144 -78.2002 256.9 -62.1006c-43.2002 -36.5 -98.9004 -58.5996 -159.9 -58.5996c-47.4004 0 -91.5 13.5 -129.1 36.5c6.39941 29.7998 16.8994 58.2002 32.0996 84.2002zM331.3 425.3
+c95.9004 -34.2998 164.601 -125.6 164.601 -233.399c0 -2 -0.300781 -4 -0.300781 -6c-29.1992 -9.40039 -59.3994 -14.4004 -89.6992 -14.2002c7.89941 92.3994 -19.3008 183.2 -74.6006 253.6z" />
+ <glyph glyph-name="allergies" unicode="&#xf461;" horiz-adv-x="448"
+d="M416 336c17.5996 0 32 -14.4004 32 -32v-176.1c-0.200195 -14 -1.90039 -28.6006 -5.09961 -42.3008l-26.5 -112.699c-5.10059 -21.7002 -24.4004 -37 -46.7002 -37h-197.601c-15.2998 0 -29.7998 7.39941 -38.7998 19.7998l-125.6 172.7
+c-13 17.8994 -9.10059 42.8994 8.7998 55.8994s42.9004 9.10059 55.9004 -8.7998l23.5996 -32.5v241c0 17.5996 14.4004 32 32 32s32 -14.4004 32 -32v-152c0 -4.40039 3.59961 -8 8 -8h16c4.40039 0 8 3.59961 8 8v184c0 17.5996 14.4004 32 32 32s32 -14.4004 32 -32v-184
+c0 -4.40039 3.59961 -8 8 -8h16c4.40039 0 8 3.59961 8 8v152c0 17.5996 14.4004 32 32 32s32 -14.4004 32 -32v-152c0 -4.40039 3.59961 -8 8 -8h16c4.40039 0 8 3.59961 8 8v72c0 17.5996 14.4004 32 32 32zM176 32c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16
+s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM176 128c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM240 0c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM240 96c8.7998 0 16 7.2002 16 16
+s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM304 64c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM336 0c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM368 128
+c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16z" />
+ <glyph glyph-name="band-aid" unicode="&#xf462;" horiz-adv-x="640"
+d="M0 288c0 35.2998 28.7002 64 64 64h96v-320h-96c-35.2998 0 -64 28.7002 -64 64v192zM576 352c35.2998 0 64 -28.7002 64 -64v-192c0 -35.2998 -28.7002 -64 -64 -64h-96v320h96zM192 32v320h256v-320h-256zM368 264c-13.2998 0 -24 -10.7002 -24 -24s10.7002 -24 24 -24
+s24 10.7002 24 24s-10.7002 24 -24 24zM368 168c-13.2998 0 -24 -10.7002 -24 -24s10.7002 -24 24 -24s24 10.7002 24 24s-10.7002 24 -24 24zM272 264c-13.2998 0 -24 -10.7002 -24 -24s10.7002 -24 24 -24s24 10.7002 24 24s-10.7002 24 -24 24zM272 168
+c-13.2998 0 -24 -10.7002 -24 -24s10.7002 -24 24 -24s24 10.7002 24 24s-10.7002 24 -24 24z" />
+ <glyph glyph-name="box" unicode="&#xf466;"
+d="M509.5 263.4c0.799805 -2.40039 0.799805 -4.90039 1.2002 -7.40039h-238.7v192h141.4c20.6992 0 39 -13.2002 45.5 -32.7998zM240 448v-192h-238.7c0.400391 2.5 0.400391 5 1.2002 7.40039l50.5996 151.8c6.5 19.5996 24.8008 32.7998 45.5 32.7998h141.4zM0 224h512
+v-240c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v240z" />
+ <glyph glyph-name="boxes" unicode="&#xf468;" horiz-adv-x="576"
+d="M560 160c8.7998 0 16 -7.2002 16 -16v-192c0 -8.7998 -7.2002 -16 -16 -16h-224c-8.7998 0 -16 7.2002 -16 16v192c0 8.7998 7.2002 16 16 16h80v-96l32 21.2998l32 -21.2998v96h80zM176 224c-8.7998 0 -16 7.2002 -16 16v192c0 8.7998 7.2002 16 16 16h80v-96
+l32 21.2998l32 -21.2998v96h80c8.7998 0 16 -7.2002 16 -16v-192c0 -8.7998 -7.2002 -16 -16 -16h-224zM240 160c8.7998 0 16 -7.2002 16 -16v-192c0 -8.7998 -7.2002 -16 -16 -16h-224c-8.7998 0 -16 7.2002 -16 16v192c0 8.7998 7.2002 16 16 16h80v-96l32 21.2998
+l32 -21.2998v96h80z" />
+ <glyph glyph-name="briefcase-medical" unicode="&#xf469;"
+d="M464 320c26.5 0 48 -21.5 48 -48v-288c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v288c0 26.5 21.5 48 48 48h80v48c0 26.5 21.5 48 48 48h160c26.5 0 48 -21.5 48 -48v-48h80zM192 352v-32h128v32h-128zM352 104v48c0 4.40039 -3.59961 8 -8 8h-56v56
+c0 4.40039 -3.59961 8 -8 8h-48c-4.40039 0 -8 -3.59961 -8 -8v-56h-56c-4.40039 0 -8 -3.59961 -8 -8v-48c0 -4.40039 3.59961 -8 8 -8h56v-56c0 -4.40039 3.59961 -8 8 -8h48c4.40039 0 8 3.59961 8 8v56h56c4.40039 0 8 3.59961 8 8z" />
+ <glyph glyph-name="burn" unicode="&#xf46a;" horiz-adv-x="384"
+d="M192 448c111.8 -100.9 192 -220.6 192 -300.5c0 -124.5 -79 -211.5 -192 -211.5s-192 87 -192 211.5c0 79.5996 79.7002 199.2 192 300.5zM192 0c56.5 0 96 39 96 94.7998c0 13.5 -4.59961 61.5 -96 161.2c-91.4004 -99.7002 -96 -147.7 -96 -161.2
+c0 -55.7998 39.5 -94.7998 96 -94.7998z" />
+ <glyph glyph-name="capsules" unicode="&#xf46b;" horiz-adv-x="575"
+d="M555.3 147.9c36.2002 -51.7002 23.7002 -123 -28 -159.2c-20 -14 -42.7998 -20.7002 -65.5 -20.7002c-36.0996 0 -71.5996 17 -93.7998 48.7998l-131.2 187.3c-5.5 7.90039 -9.5 16.4004 -12.7998 25v-149.1c0 -61.9004 -50.0996 -112 -112 -112s-112 50.0996 -112 112
+v224c0 61.9004 50.0996 112 112 112c60 0 108.5 -47.2002 111.4 -106.5c7.7998 21 21.7998 40 41.5 53.7998c20 14 42.8994 20.7002 65.5 20.7002c36 0 71.5 -17 93.7998 -48.7998zM160 192v112c0 26.5 -21.5 48 -48 48s-48 -21.5 -48 -48v-112h96zM354.8 147.1
+l82.5 57.7002l-65.5996 93.7002c-9.40039 13.5 -24.7998 21.5 -41.2998 21.5c-10.3008 0 -20.3008 -3.09961 -28.8008 -9.09961c-11 -7.7002 -18.3994 -19.3008 -20.6992 -32.5c-2.40039 -13.2002 0.599609 -26.6006 8.2998 -37.6006z" />
+ <glyph glyph-name="clipboard-check" unicode="&#xf46c;" horiz-adv-x="384"
+d="M336 384c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h80c0 35.2998 28.7002 64 64 64s64 -28.7002 64 -64h80zM192 408c-13.2998 0 -24 -10.7002 -24 -24s10.7002 -24 24 -24s24 10.7002 24 24
+s-10.7002 24 -24 24zM313.2 176.2c4.7002 4.7002 4.7998 12.2002 0.0996094 17l-28.2002 28.3994c-4.69922 4.7002 -12.2998 4.80078 -17 0.100586l-106 -105.2l-46 46.4004c-4.69922 4.69922 -12.2998 4.7998 -17 0.0996094l-28.3994 -28.2002
+c-4.7002 -4.7002 -4.7998 -12.2998 -0.100586 -17l82.6006 -83.2998c4.7002 -4.7002 12.2998 -4.7998 17 -0.0996094z" />
+ <glyph glyph-name="clipboard-list" unicode="&#xf46d;" horiz-adv-x="384"
+d="M336 384c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h80c0 35.2998 28.7002 64 64 64s64 -28.7002 64 -64h80zM96 24c13.2998 0 24 10.7002 24 24s-10.7002 24 -24 24s-24 -10.7002 -24 -24
+s10.7002 -24 24 -24zM96 120c13.2998 0 24 10.7002 24 24s-10.7002 24 -24 24s-24 -10.7002 -24 -24s10.7002 -24 24 -24zM96 216c13.2998 0 24 10.7002 24 24s-10.7002 24 -24 24s-24 -10.7002 -24 -24s10.7002 -24 24 -24zM192 408c-13.2998 0 -24 -10.7002 -24 -24
+s10.7002 -24 24 -24s24 10.7002 24 24s-10.7002 24 -24 24zM320 40v16c0 4.40039 -3.59961 8 -8 8h-144c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h144c4.40039 0 8 3.59961 8 8zM320 136v16c0 4.40039 -3.59961 8 -8 8h-144
+c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h144c4.40039 0 8 3.59961 8 8zM320 232v16c0 4.40039 -3.59961 8 -8 8h-144c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h144c4.40039 0 8 3.59961 8 8z" />
+ <glyph glyph-name="diagnoses" unicode="&#xf470;" horiz-adv-x="640"
+d="M496 192c-8.7998 0 -16 7.2002 -16 16s7.2002 16 16 16s16 -7.2002 16 -16s-7.2002 -16 -16 -16zM320 272c-48.5 0 -88 39.5 -88 88s39.5 88 88 88s88 -39.5 88 -88s-39.5 -88 -88 -88zM59.7998 84l-17.7002 26.7002c-8.7998 13.2998 -7.59961 34.5996 10 45.0996
+c7.40039 4.40039 17.5 10 28.7002 16c31.6006 -27.2998 79 -4.2002 79.2002 36c47.0996 17.7002 103 32.2002 160 32.2002c45.0996 0 89.2998 -9.2002 129.2 -21.7998c-11.7002 -52.9004 59.5996 -81.2002 87.7002 -35.1006
+c21.3994 -10.3994 39.1992 -20.2998 51.0996 -27.3994c17.5996 -10.5 18.7998 -31.9004 10 -45.1006l-17.7998 -26.6992c-10.2002 -15.1006 -29.2998 -17.8008 -42.9004 -9.80078c-16.2002 9.60059 -56.2002 31.8008 -105.3 48.6006v-90.7002h-224v90.7998
+c-49.0996 -16.8994 -89.0996 -39 -105.3 -48.5996c-13.6006 -8 -32.7002 -5.5 -42.9004 9.7998zM368 104c-13.2998 0 -24 -10.7002 -24 -24s10.7002 -24 24 -24s24 10.7002 24 24s-10.7002 24 -24 24zM272 200c-13.2998 0 -24 -10.7002 -24 -24s10.7002 -24 24 -24
+s24 10.7002 24 24s-10.7002 24 -24 24zM112 192c-8.7998 0 -16 7.2002 -16 16s7.2002 16 16 16s16 -7.2002 16 -16s-7.2002 -16 -16 -16zM624 0c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-608c-8.7998 0 -16 7.2002 -16 16v32
+c0 8.7998 7.2002 16 16 16h608z" />
+ <glyph glyph-name="dna" unicode="&#xf471;" horiz-adv-x="448"
+d="M0.0996094 -46.0996c5.2002 42.8994 31.4004 153.899 159.9 238.1c-128.6 84.2002 -154.7 195.2 -159.9 238.1c-1.09961 9.5 6.40039 17.8008 16 17.8008l32.3008 0.0996094c8.09961 0.0996094 14.8994 -5.90039 16 -13.9004
+c0.699219 -5 1.7998 -11.0996 3.39941 -18.0996h312.4c1.59961 7 2.7002 13.2002 3.39941 18.0996c1.10059 8 7.90039 13.9004 16 13.9004l32.4004 -0.0996094c9.59961 0 17 -8.30078 15.9004 -17.8008c-5.80078 -47.8994 -37.4004 -181.199 -209.5 -266.699
+c-31.7002 -15.8008 -57.4004 -33.3008 -78.7002 -51.4004h127.6c-5.59961 4.7998 -10.7998 9.59961 -17 14.2002c21.4004 11.2002 40.9004 23 58.5 35.3994c93.2998 -78.6992 114.3 -169.8 118.9 -207.699c1.2002 -9.5 -6.2998 -17.8008 -15.9004 -17.8008
+l-32.2998 -0.0996094c-8.09961 -0.0996094 -14.9004 5.90039 -16 13.9004c-0.599609 4.89941 -1.90039 11.1992 -3.5 18.0996h-312.3c-1.60059 -7 -2.7002 -13.2002 -3.40039 -18.0996c-1.09961 -8 -7.89941 -13.9004 -16 -13.9004l-32.2998 0.0996094
+c-9.59961 0 -17 8.30078 -15.9004 17.8008zM224 228.4c25.0996 13.5996 46.4004 28.3994 64.2002 43.5996h-128.5c17.8994 -15.2002 39.2002 -29.9004 64.2998 -43.5996zM355.1 352h-262.1c5.7002 -10.4004 12.7002 -21.0996 21 -32h220.1
+c8.2002 10.9004 15.2002 21.5996 21 32zM92.9004 32h261.6c-5.7998 10.4004 -12.9004 21.0996 -21.2002 32h-219.399c-8.2002 -10.9004 -15.2002 -21.5996 -21 -32z" />
+ <glyph glyph-name="dolly" unicode="&#xf472;" horiz-adv-x="576"
+d="M294.2 170.3l-53 159.4c-2.7998 8.2998 1.7002 17.3994 10.0996 20.2002l61.6006 20.5l33.0996 -99.4004l60.7002 20.0996l-33.1006 99.4004l61.1006 20.4004c8.2998 2.7998 17.3994 -1.7002 20.2002 -10.1006l60.3994 -181.2
+c2.7998 -8.2998 -1.7002 -17.3994 -10.0996 -20.1992l-161.5 -53.8008c-14.7998 11.3008 -31.5 19.7002 -49.5 24.7002zM575.2 121.6c2.7998 -8.39941 -1.7998 -17.3994 -10.1006 -20.1992l-213.3 -71.2002c-1.09961 -57.7998 -53.2002 -103.3 -113.399 -92.6006
+c-39.4004 6.90039 -71.2002 39.8008 -77.3008 79.2002c-5.69922 36.9004 9.90039 70.1006 36 90.5l-92.1992 276.7h-88.9004c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h112c13.7998 0 26 -8.7998 30.4004 -21.9004l99.3994 -298.199
+c29.9004 -0.600586 56.2998 -15 73.5 -37l213.5 71.1992c8.2998 2.80078 17.4004 -1.69922 20.2002 -10.0996zM256 -16c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48z" />
+ <glyph glyph-name="dolly-flatbed" unicode="&#xf474;" horiz-adv-x="640"
+d="M208 128c-8.7998 0 -16 7.2002 -16 16v256c0 8.7998 7.2002 16 16 16h144v-128l48 32l48 -32v128h144c8.7998 0 16 -7.2002 16 -16v-256c0 -8.7998 -7.2002 -16 -16 -16h-384zM624 64c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-82.7998
+c1.7002 -5 2.89941 -10.4004 2.89941 -16c0 -26.5 -21.5 -48 -48 -48s-48 21.5 -48 48c0 5.59961 1.10059 11 2.90039 16h-197.9c1.7002 -5 2.90039 -10.4004 2.90039 -16c0 -26.5 -21.5 -48 -48 -48s-48 21.5 -48 48c0 5.59961 1.09961 11 2.90039 16h-82.9004
+c-8.7998 0 -16 7.2002 -16 16v368h-48c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h96c8.7998 0 16 -7.2002 16 -16v-368h496z" />
+ <glyph glyph-name="file-medical" unicode="&#xf477;" horiz-adv-x="384"
+d="M377 343c4.5 -4.5 7 -10.5996 7 -16.9004v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7zM224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136z
+M288 152c0 4.40039 -3.59961 8 -8 8h-56v56c0 4.40039 -3.59961 8 -8 8h-48c-4.40039 0 -8 -3.59961 -8 -8v-56h-56c-4.40039 0 -8 -3.59961 -8 -8v-48c0 -4.40039 3.59961 -8 8 -8h56v-56c0 -4.40039 3.59961 -8 8 -8h48c4.40039 0 8 3.59961 8 8v56h56
+c4.40039 0 8 3.59961 8 8v48z" />
+ <glyph glyph-name="file-medical-alt" unicode="&#xf478;" horiz-adv-x="448"
+d="M288 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v168h70.0996l34.8008 -69.5c2.89941 -5.90039 11.3994 -5.90039 14.2998 0l56.7998 113.7l22.0996 -44.2002h89.9004c8.7998 0 16 7.2002 16 16
+s-7.2002 16 -16 16h-70.2002l-34.7002 69.5c-2.89941 5.90039 -11.3994 5.90039 -14.2998 0l-56.7998 -113.7l-19.9004 39.7998c-1.39941 2.7002 -4.19922 4.40039 -7.19922 4.40039h-140.9c-4.40039 0 -8 3.59961 -8 8v16c0 4.40039 3.59961 8 8 8h56v232
+c0 13.2998 10.7002 24 24 24h200v-136zM441 343c4.5 -4.5 7 -10.5996 7 -16.9004v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7z" />
+ <glyph glyph-name="first-aid" unicode="&#xf479;" horiz-adv-x="576"
+d="M0 368c0 26.5 21.5 48 48 48h48v-448h-48c-26.5 0 -48 21.5 -48 48v352zM128 -32v448h320v-448h-320zM192 216v-48c0 -4.40039 3.59961 -8 8 -8h56v-56c0 -4.40039 3.59961 -8 8 -8h48c4.40039 0 8 3.59961 8 8v56h56c4.40039 0 8 3.59961 8 8v48
+c0 4.40039 -3.59961 8 -8 8h-56v56c0 4.40039 -3.59961 8 -8 8h-48c-4.40039 0 -8 -3.59961 -8 -8v-56h-56c-4.40039 0 -8 -3.59961 -8 -8zM528 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-48v448h48z" />
+ <glyph glyph-name="hospital-alt" unicode="&#xf47d;" horiz-adv-x="576"
+d="M544 352c17.7002 0 32 -14.2998 32 -32v-368c0 -8.7998 -7.2002 -16 -16 -16h-544c-8.7998 0 -16 7.2002 -16 16v368c0 17.7002 14.2998 32 32 32h128v64c0 17.7002 14.2998 32 32 32h192c17.7002 0 32 -14.2998 32 -32v-64h128zM160 12v40c0 6.59961 -5.40039 12 -12 12
+h-40c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12zM160 140v40c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12zM320 12v40
+c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12zM320 140v40c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h40
+c6.59961 0 12 5.40039 12 12zM336 310v20c0 3.2998 -2.7002 6 -6 6h-26v26c0 3.2998 -2.7002 6 -6 6h-20c-3.2998 0 -6 -2.7002 -6 -6v-26h-26c-3.2998 0 -6 -2.7002 -6 -6v-20c0 -3.2998 2.7002 -6 6 -6h26v-26c0 -3.2998 2.7002 -6 6 -6h20c3.2998 0 6 2.7002 6 6v26h26
+c3.2998 0 6 2.7002 6 6zM480 12v40c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-40c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12zM480 140v40c0 6.59961 -5.40039 12 -12 12h-40c-6.59961 0 -12 -5.40039 -12 -12v-40
+c0 -6.59961 5.40039 -12 12 -12h40c6.59961 0 12 5.40039 12 12z" />
+ <glyph glyph-name="hospital-symbol" unicode="&#xf47e;"
+d="M256 448c141.4 0 256 -114.6 256 -256s-114.6 -256 -256 -256s-256 114.6 -256 256s114.6 256 256 256zM368 72v240c0 4.40039 -3.59961 8 -8 8h-48c-4.40039 0 -8 -3.59961 -8 -8v-88h-96v88c0 4.40039 -3.59961 8 -8 8h-48c-4.40039 0 -8 -3.59961 -8 -8v-240
+c0 -4.40039 3.59961 -8 8 -8h48c4.40039 0 8 3.59961 8 8v88h96v-88c0 -4.40039 3.59961 -8 8 -8h48c4.40039 0 8 3.59961 8 8z" />
+ <glyph glyph-name="id-card-alt" unicode="&#xf47f;" horiz-adv-x="576"
+d="M528 384c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-480c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h144v-96h192v96h144zM288 224c-35.2998 0 -64 -28.7002 -64 -64s28.7002 -64 64 -64s64 28.7002 64 64s-28.7002 64 -64 64zM381.3 0
+c10.4004 0 18.7998 10 15.6006 19.7998c-8.40039 25.7002 -32.5 44.2002 -60.9004 44.2002h-8.2002c-12.2002 -5.09961 -25.7002 -8 -39.7998 -8s-27.5 2.90039 -39.7998 8h-8.2002c-28.5 0 -52.5996 -18.5996 -60.9004 -44.2002
+c-3.19922 -9.7998 5.2002 -19.7998 15.6006 -19.7998h186.6zM352 416v-96h-128v96c0 17.7002 14.2998 32 32 32h64c17.7002 0 32 -14.2998 32 -32z" />
+ <glyph glyph-name="notes-medical" unicode="&#xf481;" horiz-adv-x="384"
+d="M336 384c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h80c0 35.2998 28.7002 64 64 64s64 -28.7002 64 -64h80zM192 408c-13.2998 0 -24 -10.7002 -24 -24s10.7002 -24 24 -24s24 10.7002 24 24
+s-10.7002 24 -24 24zM288 104v48c0 4.40039 -3.59961 8 -8 8h-56v56c0 4.40039 -3.59961 8 -8 8h-48c-4.40039 0 -8 -3.59961 -8 -8v-56h-56c-4.40039 0 -8 -3.59961 -8 -8v-48c0 -4.40039 3.59961 -8 8 -8h56v-56c0 -4.40039 3.59961 -8 8 -8h48c4.40039 0 8 3.59961 8 8
+v56h56c4.40039 0 8 3.59961 8 8zM288 296v16c0 4.40039 -3.59961 8 -8 8h-176c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h176c4.40039 0 8 3.59961 8 8z" />
+ <glyph glyph-name="pallet" unicode="&#xf482;" horiz-adv-x="640"
+d="M144 192c-8.7998 0 -16 7.2002 -16 16v224c0 8.7998 7.2002 16 16 16h112v-128l64 32l64 -32v128h112c8.7998 0 16 -7.2002 16 -16v-224c0 -8.7998 -7.2002 -16 -16 -16h-352zM624 64h-48v-64h48c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-608
+c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h48v64h-48c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h608c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16zM288 0v64h-160v-64h160zM512 0v64h-160v-64h160z" />
+ <glyph glyph-name="pills" unicode="&#xf484;" horiz-adv-x="575"
+d="M112 416c61.9004 0 112 -50.0996 112 -112v-224c0 -61.9004 -50.0996 -112 -112 -112s-112 50.0996 -112 112v224c0 61.9004 50.0996 112 112 112zM160 192v112c0 26.5 -21.5 48 -48 48s-48 -21.5 -48 -48v-112h96zM299.7 221.7l210.8 -210.8
+c3.5 -3.5 3.2002 -9.40039 -0.799805 -12.3008c-62.5 -45.2998 -150.101 -40.3994 -206.4 15.9004s-61.2002 143.9 -15.8994 206.4c2.89941 3.89941 8.7998 4.2998 12.2998 0.799805zM529.5 240.7c56.4004 -56.2998 61.2002 -143.8 15.9004 -206.4
+c-2.90039 -3.89941 -8.80078 -4.2998 -12.3008 -0.799805l-210.8 210.8c-3.5 3.5 -3.2002 9.40039 0.799805 12.2998c62.5 45.3008 150.101 40.4004 206.4 -15.8994z" />
+ <glyph glyph-name="prescription-bottle" unicode="&#xf485;" horiz-adv-x="384"
+d="M32 256v64h320v-352c0 -17.5996 -14.4004 -32 -32 -32h-256c-17.5996 0 -32 14.4004 -32 32v64h120c4.40039 0 8 3.59961 8 8v16c0 4.40039 -3.59961 8 -8 8h-120v64h120c4.40039 0 8 3.59961 8 8v16c0 4.40039 -3.59961 8 -8 8h-120v64h120c4.40039 0 8 3.59961 8 8v16
+c0 4.40039 -3.59961 8 -8 8h-120zM360 448c13.2002 0 24 -10.7998 24 -24v-48c0 -13.2002 -10.7998 -24 -24 -24h-336c-13.2002 0 -24 10.7998 -24 24v48c0 13.2002 10.7998 24 24 24h336z" />
+ <glyph glyph-name="prescription-bottle-alt" unicode="&#xf486;" horiz-adv-x="384"
+d="M360 448c13.2002 0 24 -10.7998 24 -24v-48c0 -13.2002 -10.7998 -24 -24 -24h-336c-13.2002 0 -24 10.7998 -24 24v48c0 13.2002 10.7998 24 24 24h336zM32 -32v352h320v-352c0 -17.5996 -14.4004 -32 -32 -32h-256c-17.5996 0 -32 14.4004 -32 32zM96 152v-48
+c0 -4.40039 3.59961 -8 8 -8h56v-56c0 -4.40039 3.59961 -8 8 -8h48c4.40039 0 8 3.59961 8 8v56h56c4.40039 0 8 3.59961 8 8v48c0 4.40039 -3.59961 8 -8 8h-56v56c0 4.40039 -3.59961 8 -8 8h-48c-4.40039 0 -8 -3.59961 -8 -8v-56h-56c-4.40039 0 -8 -3.59961 -8 -8z
+" />
+ <glyph glyph-name="procedures" unicode="&#xf487;" horiz-adv-x="640"
+d="M528 224c61.9004 0 112 -50.0996 112 -112v-160c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v48h-512v-48c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v352c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-240h192
+v144c0 8.7998 7.2002 16 16 16h256zM136 352c-4.40039 0 -8 3.59961 -8 8v16c0 4.40039 3.59961 8 8 8h140.9c3.09961 0 5.7998 -1.7002 7.19922 -4.40039l19.9004 -39.7998l49.7002 99.4004c5.89941 11.7998 22.7002 11.7998 28.5996 0l27.6006 -55.2002h102.1
+c8.7998 0 16 -7.2002 16 -16s-7.2002 -16 -16 -16h-121.9l-22.0996 44.2002l-49.7002 -99.4004c-5.89941 -11.7998 -22.7002 -11.7998 -28.5996 0l-27.6006 55.2002h-126.1zM160 96c-35.2998 0 -64 28.7002 -64 64s28.7002 64 64 64s64 -28.7002 64 -64
+s-28.7002 -64 -64 -64z" />
+ <glyph glyph-name="shipping-fast" unicode="&#xf48b;" horiz-adv-x="640"
+d="M624 96c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-48c0 -53 -43 -96 -96 -96s-96 43 -96 96h-128c0 -53 -43 -96 -96 -96s-96 43 -96 96v128h152c4.40039 0 8 3.59961 8 8v16c0 4.40039 -3.59961 8 -8 8h-208c-4.40039 0 -8 3.59961 -8 8v16
+c0 4.40039 3.59961 8 8 8h240c4.40039 0 8 3.59961 8 8v16c0 4.40039 -3.59961 8 -8 8h-208c-4.40039 0 -8 3.59961 -8 8v16c0 4.40039 3.59961 8 8 8h240c4.40039 0 8 3.59961 8 8v16c0 4.40039 -3.59961 8 -8 8h-272c-4.40039 0 -8 3.59961 -8 8v16
+c0 4.40039 3.59961 8 8 8h56v48c0 26.5 21.5 48 48 48h256c26.5 0 48 -21.5 48 -48v-48h44.0996c12.7002 0 24.9004 -5.09961 33.9004 -14.0996l99.9004 -99.9004c9 -9 14.0996 -21.2002 14.0996 -33.9004v-108.1h16zM160 -16c26.5 0 48 21.5 48 48s-21.5 48 -48 48
+s-48 -21.5 -48 -48s21.5 -48 48 -48zM480 -16c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48zM560 192v12.0996l-99.9004 99.9004h-44.0996v-112h144z" />
+ <glyph glyph-name="smoking" unicode="&#xf48d;" horiz-adv-x="640"
+d="M632 96c4.40039 0 8 -3.59961 8 -8v-144c0 -4.40039 -3.59961 -8 -8 -8h-48c-4.40039 0 -8 3.59961 -8 8v144c0 4.40039 3.59961 8 8 8h48zM553.3 360.9c54.2998 -36.4004 86.7002 -97.1006 86.7002 -162.601v-30.2998c0 -4.40039 -3.59961 -8 -8 -8h-48
+c-4.40039 0 -8 3.59961 -8 8v30.2998c0 50.2002 -25.2002 96.7002 -67.4004 124c-18.3994 12 -28.5996 33.4004 -28.5996 55.4004v62.2998c0 4.40039 3.59961 8 8 8h48c4.40039 0 8 -3.59961 8 -8v-62.2998c0 -6.7998 3.59961 -13 9.2998 -16.7998zM432 96
+c8.7998 0 16 -7.2002 16 -16v-128c0 -8.7998 -7.2002 -16 -16 -16h-384c-26.5 0 -48 21.5 -48 48v64c0 26.5 21.5 48 48 48h384zM400 -16v64h-176v-64h176zM487.7 306.4c35.2998 -24.7002 56.2998 -64.8008 56.2998 -108.101v-30.2998c0 -4.40039 -3.59961 -8 -8 -8h-48
+c-4.40039 0 -8 3.59961 -8 8v30.2998c0 27.4004 -13.2998 52.9004 -35.7002 68.6006c-35.7002 25.0996 -60.2998 63 -60.2998 106.699v66.4004c0 4.40039 3.59961 8 8 8h48c4.40039 0 8 -3.59961 8 -8v-62.2998c0 -29 15.7998 -54.7002 39.7002 -71.2998zM536 96
+c4.40039 0 8 -3.59961 8 -8v-144c0 -4.40039 -3.59961 -8 -8 -8h-48c-4.40039 0 -8 3.59961 -8 8v144c0 4.40039 3.59961 8 8 8h48z" />
+ <glyph glyph-name="syringe" unicode="&#xf48e;"
+d="M201.5 273.2l64.9004 65l135.699 -135.7l-181.899 -181.9c-17.5 -17.5996 -41.5 -25.5996 -65.4004 -23l-63.5996 7.10059l-66.2998 -66.2998c-3.10059 -3.10059 -8.2002 -3.10059 -11.3008 0l-11.2998 11.2998c-3.09961 3.09961 -3.09961 8.2002 0 11.2998
+l66.4004 66.4004l-7.10059 63.5996c-2.59961 24.2998 5.7002 48.0996 23 65.4004l26.4004 26.3994l55.7998 -55.8994c3.10059 -3.10059 8.2002 -3.10059 11.2998 0l11.3008 11.2998c3.09961 3.09961 3.09961 8.2002 0 11.2998l-55.8008 55.7998l45.3008 45.2998
+l55.6992 -55.7998c3.10059 -3.09961 8.2002 -3.09961 11.3008 0l11.2998 11.2998c3.09961 3.10059 3.09961 8.2002 0 11.3008zM509.7 366.5c3.09961 -3.2002 3.09961 -8.2002 0 -11.4004l-11.2998 -11.2998c-3.10059 -3.09961 -8.2002 -3.09961 -11.3008 0l-28.2998 28.2998
+l-45.2998 -45.2998l73.5 -73.5c3.09961 -3.09961 3.09961 -8.2002 0 -11.2998l-33.9004 -34c-3.09961 -3.09961 -8.19922 -3.09961 -11.2998 0l-17 17l-135.7 135.9l-17 17c-3.09961 3.09961 -3.09961 8.19922 0 11.2998l33.9004 33.8994
+c3.09961 3.10059 8.2002 3.10059 11.2998 0l17 -17l56.6006 -56.5996l45.2998 45.2998l-28.2998 28.2998c-3.10059 3.10059 -3.10059 8.2002 0 11.3008l11.2998 11.2998c3.09961 3.09961 8.2002 3.09961 11.2998 0z" />
+ <glyph glyph-name="tablets" unicode="&#xf490;" horiz-adv-x="639"
+d="M160 256c81.0996 0 147.5 -58.5 160 -134.7c0.799805 -4.7998 -3.2998 -9.2998 -8.2998 -9.2998h-303.3c-5 0 -9.10059 4.5 -8.30078 9.2998c12.4004 76.2002 78.8008 134.7 159.9 134.7zM311.6 80c5 0 9.10059 -4.5 8.30078 -9.2998
+c-12.4004 -76.2002 -78.8008 -134.7 -159.9 -134.7s-147.5 58.5 -159.9 134.7c-0.799805 4.7998 3.30078 9.2998 8.30078 9.2998h303.199zM593.4 401.4c56.5 -56.5 61.3994 -144.2 15.8994 -206.9c-2.7998 -4 -8.7998 -4.2998 -12.2998 -0.799805l-211.3 211.399
+c-3.5 3.40039 -3.2002 9.40039 0.799805 12.3008c62.7002 45.3994 150.4 40.5 206.9 -16zM363 382.3l211.3 -211.3c3.5 -3.40039 3.2002 -9.40039 -0.799805 -12.2998c-62.7002 -45.5 -150.4 -40.6006 -206.9 15.8994c-56.3994 56.5 -61.2998 144.2 -15.8994 206.9
+c2.7998 4 8.7998 4.2998 12.2998 0.799805z" />
+ <glyph glyph-name="thermometer" unicode="&#xf491;"
+d="M476.8 427.6c49.4004 -40.6992 42.1006 -107.3 7.2002 -142.199l-254.2 -253.301h-99.8994l-89 -89c-9.30078 -9.39941 -24.5 -9.39941 -33.9004 0c-9.40039 9.30078 -9.40039 24.5 0 33.9004l89 89v100.9l45.2998 45.6992l50.1006 -50.1992
+c3.09961 -3.10059 8.19922 -3.10059 11.2998 0l11.2998 11.2998c3.09961 3.09961 3.09961 8.2002 0 11.2998l-50.0996 50.2002l45.0996 45.3994l50.2998 -50.1992c3.10059 -3.10059 8.2002 -3.10059 11.2998 0l11.3008 11.2998c3.09961 3.09961 3.09961 8.2002 0 11.2998
+l-50.3008 50.4004l45.1006 45.3994l50.3994 -50.5c3.10059 -3.09961 8.2002 -3.09961 11.3008 0l11.2998 11.2998c3.09961 3.10059 3.09961 8.2002 0 11.3008l-50.5 50.5l45.7002 46c36.3994 36.5 94.3994 40.8994 131.899 10.1992z" />
+ <glyph glyph-name="vial" unicode="&#xf492;" horiz-adv-x="480"
+d="M477.7 261.9c3.09961 -3.10059 3.09961 -8.2002 0 -11.2002l-34 -33.9004c-3.10059 -3.09961 -8.2002 -3.09961 -11.2998 0l-11.2002 11.1006l-246.3 -245.7c-20.1006 -20.1006 -46.5 -30.1006 -72.9004 -30.1006c-28.9004 -0.0996094 -57.7998 11.9004 -78.4004 35.9004
+c-35.6992 41.5 -29.3994 104.8 9.40039 143.5l242.4 241.9l-11.2002 11.0996c-3.10059 3.09961 -3.10059 8.2002 0 11.2998l34 33.9004c3.09961 3.09961 8.2002 3.09961 11.2998 0zM318 192l69.5 69.4004l-78.5 78.2998l-148 -147.7h157z" />
+ <glyph glyph-name="vials" unicode="&#xf493;" horiz-adv-x="640"
+d="M72 384c-4.40039 0 -8 3.59961 -8 8v48c0 4.40039 3.59961 8 8 8h208c4.40039 0 8 -3.59961 8 -8v-48c0 -4.40039 -3.59961 -8 -8 -8h-24v-240c0 -44.0996 -35.9004 -80 -80 -80s-80 35.9004 -80 80v240h-24zM144 384v-96h64v96h-64zM624 0c8.7998 0 16 -7.2002 16 -16
+v-32c0 -8.7998 -7.2002 -16 -16 -16h-608c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h608zM360 384c-4.40039 0 -8 3.59961 -8 8v48c0 4.40039 3.59961 8 8 8h208c4.40039 0 8 -3.59961 8 -8v-48c0 -4.40039 -3.59961 -8 -8 -8h-24v-240
+c0 -44.0996 -35.9004 -80 -80 -80s-80 35.9004 -80 80v240h-24zM432 384v-96h64v96h-64z" />
+ <glyph glyph-name="warehouse" unicode="&#xf494;" horiz-adv-x="640"
+d="M504 96c4.40039 0 8 -3.59961 8 -8v-48c0 -4.40039 -3.59961 -8 -8 -8h-367.7c-4.39941 0 -8 3.59961 -8 8l0.100586 48c0 4.40039 3.59961 8 8 8h367.6zM504 0c4.40039 0 8 -3.59961 8 -8v-48c0 -4.40039 -3.59961 -8 -8 -8h-368c-4.40039 0 -8 3.59961 -8 8
+l0.0996094 48c0 4.40039 3.60059 8 8 8h367.9zM504 192c4.40039 0 8 -3.59961 8 -8v-48c0 -4.40039 -3.59961 -8 -8 -8h-367.5c-4.40039 0 -8 3.59961 -8 8l0.0996094 48c0 4.40039 3.60059 8 8 8h367.4zM610.5 331c17.7998 -7.5 29.5 -24.9004 29.5 -44.2998v-342.7
+c0 -4.40039 -3.59961 -8 -8 -8h-80c-4.40039 0 -8 3.59961 -8 8v248c0 17.5996 -14.5996 32 -32.5996 32h-382.801c-18 0 -32.5996 -14.4004 -32.5996 -32v-248c0 -4.40039 -3.59961 -8 -8 -8h-80c-4.40039 0 -8 3.59961 -8 8v342.7c0 19.3994 11.7002 36.7998 29.5 44.2998
+l272 113.3c4.89062 2.0293 13.1553 3.6748 18.4502 3.6748c5.29395 0 13.5596 -1.64551 18.4502 -3.6748z" />
+ <glyph glyph-name="weight" unicode="&#xf496;"
+d="M448 384c35.29 0 64 -28.71 64 -64v-320c0 -35.29 -28.71 -64 -64 -64h-384c-35.29 0 -64 28.71 -64 64v320c0 35.29 28.71 64 64 64h25.9805c-16.4209 -28.2803 -25.9805 -61.0098 -25.9805 -96c0 -105.87 86.1299 -192 192 -192s192 86.1299 192 192
+c0 34.9902 -9.55957 67.7197 -25.9805 96h25.9805zM256 128c-88.3701 0 -160 71.6299 -160 160s71.6299 160 160 160s160 -71.6299 160 -160s-71.6299 -160 -160 -160zM255.7 279.94c-21.9404 -0.170898 -39.7002 -17.96 -39.7002 -39.9404c0 -22.0898 17.9102 -40 40 -40
+s40 17.9102 40 40c0 10.5498 -4.26953 20 -10.9502 27.1602l33.6699 78.5498c3.4707 8.11035 -0.290039 17.5205 -8.41016 21c-8.08984 3.50977 -17.5293 -0.240234 -21.0293 -8.41016z" />
+ <glyph glyph-name="x-ray" unicode="&#xf497;" horiz-adv-x="640"
+d="M240 64c8.7998 0 16 -7.2002 16 -16s-7.2002 -16 -16 -16s-16 7.2002 -16 16s7.2002 16 16 16zM400 32c-8.7998 0 -16 7.2002 -16 16s7.2002 16 16 16s16 -7.2002 16 -16s-7.2002 -16 -16 -16zM624 448c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16
+h-608c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h608zM624 0c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-608c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h48v352h512v-352h48zM480 200v16c0 4.40039 -3.59961 8 -8 8
+h-136v32h104c4.40039 0 8 3.59961 8 8v16c0 4.40039 -3.59961 8 -8 8h-104v24c0 4.40039 -3.59961 8 -8 8h-16c-4.40039 0 -8 -3.59961 -8 -8v-24h-104c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h104v-32h-136c-4.40039 0 -8 -3.59961 -8 -8v-16
+c0 -4.40039 3.59961 -8 8 -8h136v-32h-104c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h104v-32h-64c-26.5 0 -48 -21.5 -48 -48s21.5 -48 48 -48s48 21.5 48 48v16h64v-16c0 -26.5 21.5 -48 48 -48s48 21.5 48 48s-21.5 48 -48 48h-64v32h104
+c4.40039 0 8 3.59961 8 8v16c0 4.40039 -3.59961 8 -8 8h-104v32h136c4.40039 0 8 3.59961 8 8z" />
+ <glyph glyph-name="box-open" unicode="&#xf49e;" horiz-adv-x="640"
+d="M425.7 192c4.5 0 9 0.599609 13.2998 1.90039l137 39.0996v-178c0 -14.5996 -10 -27.4004 -24.2002 -31l-216.399 -54.0996c-10.1006 -2.5 -20.8008 -2.5 -31 0l-216.2 54.0996c-14.2002 3.5 -24.2002 16.2998 -24.2002 31v178l137 -39.2002
+c4.2998 -1.2998 8.7998 -1.89941 13.2998 -1.89941c16.9004 0 32.7998 9 41.5 23.5l64.2002 106.6l64.2998 -106.6c8.60059 -14.4004 24.5 -23.4004 41.4004 -23.4004zM638.3 304.2c4.5 -9.2002 -0.299805 -20.2002 -10.2002 -23.1006l-197.899 -56.5
+c-7.10059 -2 -14.7002 1 -18.5 7.30078l-91.7002 152.1l250.1 31.9004c6.90039 0.899414 13.6006 -2.7002 16.7002 -8.90039zM53.2002 407c3.09961 6.2002 9.7002 9.7002 16.5996 8.90039l250.2 -31.9004l-91.7998 -152c-3.7998 -6.2998 -11.4004 -9.2998 -18.5 -7.2998
+l-197.9 56.5c-9.7998 2.7998 -14.7002 13.7998 -10.0996 23z" />
+ <glyph glyph-name="comment-dots" unicode="&#xf4ad;"
+d="M256 416c141.4 0 256 -93.0996 256 -208s-114.6 -208 -256 -208c-38.4004 0 -74.7002 7.09961 -107.4 19.4004c-24.5996 -19.6006 -74.2998 -51.4004 -140.6 -51.4004c-3.2002 0 -6 1.7998 -7.2998 4.7998s-0.700195 6.40039 1.5 8.7002
+c0.5 0.5 42.2998 45.4004 54.7998 95.7998c-35.5996 35.7002 -57 81.1006 -57 130.7c0 114.9 114.6 208 256 208zM128 176c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM256 176c17.7002 0 32 14.2998 32 32
+s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM384 176c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32z" />
+ <glyph glyph-name="comment-slash" unicode="&#xf4b3;" horiz-adv-x="640"
+d="M64 208c0 18.5996 3.2998 36.5 8.90039 53.5996l325.5 -251.6c-24.7002 -6.40039 -51 -10 -78.4004 -10c-38.4004 0 -74.7002 7.09961 -107.4 19.4004c-24.5996 -19.6006 -74.2998 -51.4004 -140.6 -51.4004c-3.2002 0 -6 1.90039 -7.2998 4.7998
+c-1.2998 3 -0.700195 6.40039 1.5 8.7002c0.5 0.599609 42.2002 45.5 54.7998 95.7998c-35.5996 35.7002 -57 81.1006 -57 130.7zM633.8 -10.0996c7 -5.40039 8.2998 -15.5 2.90039 -22.3008l-19.6006 -25.2998c-5.5 -7 -15.5 -8.2002 -22.5 -2.7998l-588.399 454.7
+c-7 5.39941 -8.2002 15.3994 -2.7998 22.3994l19.5996 25.2002c5.5 7 15.5 8.2002 22.5 2.7998l105.1 -81.2998c45.2002 32.6006 104.301 52.7002 169.4 52.7002c141.4 0 256 -93.0996 256 -208c0 -49.2002 -21.4004 -94.0996 -56.5996 -129.7z" />
+ <glyph glyph-name="couch" unicode="&#xf4b8;" horiz-adv-x="640"
+d="M160 224c0 35.2998 -28.7002 64 -64 64h-32c0 53 43 96 96 96h320c53 0 96 -43 96 -96h-32c-35.2998 0 -64 -28.7002 -64 -64v-64h-320v64zM576 256c35.2998 0 64 -28.7002 64 -64c0 -23.5996 -13 -44 -32 -55.0996v-120.9c0 -8.7998 -7.2002 -16 -16 -16h-64
+c-8.7998 0 -16 7.2002 -16 16v16h-384v-16c0 -8.7998 -7.2002 -16 -16 -16h-64c-8.7998 0 -16 7.2002 -16 16v120.9c-19 11.0996 -32 31.5 -32 55.0996c0 35.2998 28.7002 64 64 64h32c17.7002 0 32 -14.2998 32 -32v-96h384v96c0 17.7002 14.2998 32 32 32h32z" />
+ <glyph glyph-name="donate" unicode="&#xf4b9;"
+d="M256 32c-114.9 0 -208 93.0996 -208 208s93.0996 208 208 208s208 -93.0996 208 -208s-93.0996 -208 -208 -208zM233.8 350.6c-32.7002 -0.899414 -59 -28.3994 -59.0996 -62.3994c0 -27.7002 18 -52.4004 43.7002 -60.1006l62.2998 -18.6992
+c7.09961 -2.10059 12.0996 -9.40039 12.0996 -17.6006c0 -10.0996 -7.2998 -18.2998 -16.2998 -18.2998h-38.9004c-6.39941 0 -12.3994 1.7002 -17.7998 5.09961c-4.5 2.80078 -10.2002 2.7002 -14 -1l-16.2998 -15.5c-5 -4.7998 -4.40039 -13 1.2002 -17.0996
+c12.5 -9.2998 27.5 -14.5996 43 -15.4004v-17c0 -9.19922 7.39941 -16.5996 16.5996 -16.5996h11.1006c9.19922 0 16.5996 7.40039 16.5996 16.5996v16.8008c32.7002 0.899414 59.0996 28.3994 59.0996 62.3994c0 27.7002 -18 52.4004 -43.6992 60.1006l-62.3008 18.6992
+c-7.09961 2.10059 -12.0996 9.40039 -12.0996 17.6006c0 10.0996 7.2998 18.2998 16.2998 18.2998h38.9004c6.39941 0 12.3994 -1.7002 17.7998 -5.09961c4.5 -2.80078 10.2002 -2.7002 14 1l16.2998 15.5c5 4.7998 4.40039 13 -1.2002 17.0996
+c-12.5 9.2998 -27.5 14.5996 -43 15.4004v17c0 9.19922 -7.39941 16.5996 -16.5996 16.5996h-11.0996c-9.2002 0 -16.6006 -7.40039 -16.6006 -16.5996v-16.8008zM480 96c17.7002 0 32 -14.2998 32 -32v-96c0 -17.7002 -14.2998 -32 -32 -32h-448
+c-17.7002 0 -32 14.2998 -32 32v96c0 17.7002 14.2998 32 32 32h32.4004c19.6992 -26 44.5996 -47.7002 73 -64h-63.8008c-5.2998 0 -9.59961 -3.59961 -9.59961 -8v-16c0 -4.40039 4.2998 -8 9.59961 -8h364.7c5.2998 0 9.60059 3.59961 9.60059 8v16
+c0 4.40039 -4.30078 8 -9.60059 8h-63.7998c28.4004 16.2998 53.4004 38 73 64h32.5z" />
+ <glyph glyph-name="dove" unicode="&#xf4ba;"
+d="M288 280.8c-50.7998 10.5 -96.5996 36.7998 -130.8 75.1006c11.2002 32.3994 27.7998 61.5996 48.8994 86.5c8.80078 10.5 25.7002 5.19922 27.8008 -8.30078c7 -45.8994 25.8994 -88.8994 54.0996 -125.199v-28.1006zM400 384h112l-32 -64v-160.1
+c0 -88.4004 -71.5996 -159.9 -160 -159.9h-76.9004l-65.1992 -56.0996c-6.10059 -5.30078 -14.1006 -8.2002 -22.1006 -7.90039c-92.7998 3.7998 -135.8 49.4004 -153.2 76.2998c-6 9.2998 -1.19922 21.7002 9.5 24.4004l143.9 36
+c-12.7998 9.59961 -25.7002 20.0996 -38.9004 32.7998c-51 49 -85.0996 115.1 -85.0996 185.9c0 41.3994 9.40039 80.5996 26 115.699c5.7998 12.3008 23.5 11.6006 29 -0.899414c40 -91.2002 128.6 -155.5 233 -161.7v59.4004c0 44.1992 35.7998 80.0996 80 80.0996z
+M400 287.9c8.7998 0 16 7.19922 16 16c0 8.7998 -7.2002 16 -16 16s-16 -7.2002 -16 -16c0 -8.80078 7.2002 -16 16 -16z" />
+ <glyph glyph-name="hand-holding" unicode="&#xf4bd;" horiz-adv-x="576"
+d="M565.3 119.9c15.1006 -13.6006 13.9004 -36.8008 -1.2998 -48.9004l-151.2 -121c-11.3994 -9.09961 -25.5 -14 -40 -14h-356.8c-8.7998 0 -16 7.2002 -16 16v96c0 8.7998 7.2002 16 16 16h55.4004l46.5 37.7002c21 17 47.0996 26.2998 74.0996 26.2998h160
+c19.5 0 34.9004 -17.4004 31.5996 -37.4004c-2.59961 -15.6992 -17.3994 -26.5996 -33.2998 -26.5996h-78.2998c-8.7998 0 -16 -7.2002 -16 -16s7.2002 -16 16 -16h118.3c14.6006 0 28.7002 4.90039 40 14l92.4004 73.9004c12.3994 10 30.7998 10.6992 42.5996 0z" />
+ <glyph glyph-name="hand-holding-heart" unicode="&#xf4be;" horiz-adv-x="576"
+d="M275.3 197.5l-108.899 114.2c-31.6006 33.2002 -29.7002 88.2002 5.59961 118.8c30.7998 26.7002 76.7002 21.9004 104.9 -7.7998l11.0996 -11.6006l11.2002 11.7002c28.2002 29.6006 74.0996 34.4004 104.899 7.7002c35.4004 -30.5996 37.2002 -85.5996 5.60059 -118.8
+l-108.9 -114.2c-7.09961 -7.40039 -18.5 -7.40039 -25.5 0zM565.3 119.9c15.1006 -13.6006 13.9004 -36.8008 -1.2998 -48.9004l-151.2 -121c-11.3994 -9.09961 -25.5 -14 -40 -14h-356.8c-8.7998 0 -16 7.2002 -16 16v96c0 8.7998 7.2002 16 16 16h55.4004l46.5 37.7002
+c21 17 47.0996 26.2998 74.0996 26.2998h160c19.5 0 34.9004 -17.4004 31.5996 -37.4004c-2.59961 -15.6992 -17.3994 -26.5996 -33.2998 -26.5996h-78.2998c-8.7998 0 -16 -7.2002 -16 -16s7.2002 -16 16 -16h118.3c14.6006 0 28.7002 4.90039 40 14l92.4004 73.9004
+c12.3994 10 30.7998 10.6992 42.5996 0z" />
+ <glyph glyph-name="hand-holding-usd" unicode="&#xf4c0;" horiz-adv-x="543"
+d="M257.6 303.7c-22.1992 6.39941 -40 24.7002 -42.8994 47.7002c-4 32 19 59.3994 49.2998 63v17.5996c0 8.7998 7.2002 16 16 16h16c8.7998 0 16 -7.2002 16 -16v-17.7002c11.5 -1.39941 22.2998 -5.2002 31.7998 -11.5c6.2002 -4.09961 6.7998 -13.0996 1.5 -18.3994
+l-17.5 -17.5c-3.7002 -3.7002 -9.2998 -4.2002 -14.0996 -2c-3.2002 1.39941 -6.7002 2.19922 -10.2998 2.19922h-32.8008c-4.59961 0 -8.39941 -3.7998 -8.39941 -8.39941c0 -3.7002 2.5 -7.10059 6.09961 -8.10059l50 -14.2998
+c22.2002 -6.39941 40 -24.7002 42.9004 -47.7002c4 -32 -19 -59.3994 -49.2998 -63v-17.5996c0 -8.7998 -7.2002 -16 -16 -16h-16c-8.80078 0 -16 7.2002 -16 16v17.7002c-11.5 1.39941 -22.3008 5.2002 -31.8008 11.5c-6.19922 4.09961 -6.7998 13.0996 -1.5 18.3994
+l17.5 17.5c3.7002 3.7002 9.30078 4.2002 14.1006 2c3.2002 -1.39941 6.7002 -2.19922 10.2998 -2.19922h32.7998c4.60059 0 8.40039 3.7998 8.40039 8.39941c0 3.7002 -2.5 7.10059 -6.10059 8.10059zM533.9 119.9c14.1992 -13.6006 13.0996 -36.8008 -1.30078 -48.9004
+l-142.8 -121c-10.7998 -9.09961 -24.0996 -14 -37.7998 -14h-336.9c-8.2998 0 -15.0996 7.2002 -15.0996 16v96c0 8.7998 6.7998 16 15.0996 16h52.4004l43.9004 37.7002c19.6992 17 44.3994 26.2998 69.8994 26.2998h151.101c18.2998 0 32.8994 -17.4004 29.7998 -37.4004
+c-2.40039 -15.6992 -16.2998 -26.5996 -31.4004 -26.5996h-73.8994c-8.30078 0 -15.1006 -7.2002 -15.1006 -16s6.7998 -16 15.1006 -16h111.699c13.8008 0 27.1006 4.90039 37.8008 14l87.1992 73.9004c11.8008 10 29.1006 10.6992 40.3008 0z" />
+ <glyph glyph-name="hands" unicode="&#xf4c2;" horiz-adv-x="640"
+d="M204.8 217.6l57.6006 -76.7998c16.5996 -22.2002 25.5996 -49.0996 25.5996 -76.7998v-112c0 -8.7998 -7.2002 -16 -16 -16h-131.7c-7.2002 0 -13.5 4.7002 -15.2998 11.5996c-2 7.80078 -5.40039 15.2002 -10.4004 21.7002l-104.1 134.3
+c-6.7998 8.5 -10.5 19.1006 -10.5 30v218.4c0 17.7002 14.2998 32 32 32s32 -14.2998 32 -32v-148.4l89.7998 -107.8c6 -7.2998 16.9004 -7.7998 23.6006 -1.09961l12.7998 12.7998c5.59961 5.59961 6.2998 14.5 1.5 20.9004l-38.1006 50.7998
+c-10.5996 14.0996 -7.69922 34.2002 6.40039 44.7998s34.2002 7.7002 44.7998 -6.40039zM608 384c17.7002 0 32 -14.2998 32 -32v-218.4c0 -10.8994 -3.7002 -21.5 -10.5 -30l-104.1 -134.3c-5 -6.5 -8.40039 -13.8994 -10.4004 -21.7002
+c-1.7998 -6.89941 -8.2002 -11.5996 -15.2998 -11.5996h-131.7c-8.7998 0 -16 7.2002 -16 16v112c0 27.7002 9 54.5996 25.5996 76.7998l57.6006 76.7998c10.5996 14.1006 30.7002 17 44.7998 6.40039s17 -30.7002 6.40039 -44.7998l-38.1006 -50.7998
+c-4.7998 -6.40039 -4.09961 -15.3008 1.5 -20.9004l12.7998 -12.7998c6.60059 -6.60059 17.6006 -6.10059 23.6006 1.09961l89.7998 107.8v148.4c0 17.7002 14.2998 32 32 32z" />
+ <glyph glyph-name="hands-helping" unicode="&#xf4c4;" horiz-adv-x="640"
+d="M488 256c13.2998 0 24 -10.7002 24 -24v-48c0 -13.2998 -10.7002 -24 -24 -24h-8v-64c0 -17.7002 -14.2998 -32 -32 -32h-16c0 -35.2998 -28.7002 -64 -64 -64h-136.6l-103.4 -59.7002c-15.2998 -8.7998 -34.9004 -3.59961 -43.7002 11.7002l-80 138.6
+c-8.89941 15.3008 -3.59961 34.9004 11.7002 43.7002l80 46.2002v47.2998c0 22.4004 11.7998 43.2998 31.0996 54.7998l64.9004 39v-121.6c0 -39.7002 32.2998 -72 72 -72s72 32.2998 72 72v56h152zM635.7 293.4c8.7998 -15.3008 3.59961 -34.8008 -11.7002 -43.7002
+l-80 -46.2002v28.5c0 30.9004 -25.0996 56 -56 56h-184v-88c0 -22.0996 -17.9004 -40 -40 -40s-40 17.9004 -40 40v126.3c0 11 5.59961 21.2998 15 27.1006l33.5 20.8994c10.2002 6.2998 21.9004 9.7002 33.9004 9.7002h102.199l103.4 59.7002
+c15.2998 8.7998 34.9004 3.59961 43.7002 -11.7002z" />
+ <glyph glyph-name="parachute-box" unicode="&#xf4cd;"
+d="M511.9 273c1.09961 -9.2002 -6.80078 -17 -16.1006 -17h-8.7002l-136.8 -151.9c0.700195 -2.69922 1.60059 -5.19922 1.60059 -8.09961v-128c0 -17.7002 -14.3008 -32 -32 -32h-128c-17.7002 0 -32 14.2998 -32 32v128c0 2.90039 0.899414 5.5 1.59961 8.09961
+l-136.7 151.9h-8.7002c-9.19922 0 -17.0996 7.90039 -16 17c9.10059 75.5 78.4004 132.3 158.301 158.7c-36.4004 -39.4004 -62.4004 -100.601 -62.4004 -175.7h-28.0996l116.6 -129.5c2.5 0.599609 4.7998 1.5 7.5 1.5h48v128h-112c0 115.2 68.9004 192 128 192
+s128 -76.7998 128 -192h-112v-128h48c2.7002 0 5 -0.900391 7.5 -1.5l116.6 129.5h-28.0996c0 75.0996 -26 136.3 -62.4004 175.7c79.9004 -26.2998 149.2 -83.1006 158.301 -158.7z" />
+ <glyph glyph-name="people-carry" unicode="&#xf4ce;" horiz-adv-x="640"
+d="M128 352c-26.5 0 -48 21.5 -48 48s21.5 48 48 48s48 -21.5 48 -48s-21.5 -48 -48 -48zM512 352c-26.5 0 -48 21.5 -48 48s21.5 48 48 48s48 -21.5 48 -48s-21.5 -48 -48 -48zM637.7 -20.0996c6.59961 -16.4004 -1.40039 -35 -17.7998 -41.6006
+c-3.90039 -1.59961 -7.90039 -2.2998 -11.9004 -2.2998c-12.7002 0 -24.7002 7.59961 -29.7002 20.0996l-27.7002 69.2002l2 18.2002l41.1006 46.4004zM603.5 189.7c4.59961 -20 -0.799805 -41.2002 -14.4004 -56.7002l-67.1992 -75.9004l-10.1006 -92.5996
+c-1.7998 -16.4004 -15.7002 -28.5 -31.7998 -28.5c-1.2002 0 -2.2998 0.0996094 -3.5 0.200195c-17.5 1.89941 -30.2002 17.7002 -28.2998 35.2998l10.0996 92.7998c1.5 13 6.90039 25.1006 15.6006 35l43.2998 49l-17.6006 70.2998l-6.7998 -20.3994
+c-4.09961 -12.6006 -11.8994 -23.4004 -24.5 -32.6006l-51.0996 -32.5c-4.60059 -2.89941 -12.1006 -4.59961 -17.2002 -5h-160c-5.09961 0.400391 -12.5996 2.10059 -17.2002 5l-51.0996 32.5c-12.6006 9.2002 -20.4004 20.1006 -24.5 32.6006l-6.7998 20.3994
+l-17.6006 -70.2998l43.2998 -49c8.7002 -9.89941 14.1006 -22 15.6006 -35l10.0996 -92.7998c1.90039 -17.5996 -10.7002 -33.4004 -28.2998 -35.2998c-1.2002 -0.100586 -2.2998 -0.200195 -3.5 -0.200195c-16.2002 0 -30 12.2002 -31.7998 28.5l-10.1006 92.5996
+l-67.1992 75.9004c-13.7002 15.5 -19 36.7002 -14.4004 56.7002l18.4004 80.2002c4.59961 20 18.5996 36.7998 37.5 44.8994c18.5 8 38.8994 6.7002 56.0996 -3.2998c22.7002 -13.4004 39.7998 -34.4004 48.0996 -59.4004l11.3008 -33.8994l16.0996 -10.2002v96
+c0 8.7998 7.2002 16 16 16h160c8.7998 0 16 -7.2002 16 -16v-96l16 10.2002l11.2998 33.8994c8.40039 24.9004 25.4004 46 48.1006 59.4004c17.0996 10 37.5996 11.2998 56.0996 3.2998c18.9004 -8.09961 32.9004 -24.8994 37.5 -44.8994zM46.2998 89.9004l41.1006 -46.4004
+l2 -18.2002l-27.7002 -69.2002c-6.5 -16.0996 -24.7998 -24.3994 -41.6006 -17.7998c-16.3994 6.60059 -24.3994 25.2002 -17.7998 41.6006z" />
+ <glyph glyph-name="piggy-bank" unicode="&#xf4d3;" horiz-adv-x="576"
+d="M560 224c8.7998 0 16 -7.2002 16 -16v-128c0 -8.7998 -7.2002 -16 -16 -16h-48.7002c-9 -11.9004 -19.5 -22.4004 -31.2998 -31.2998v-80.7002c0 -8.7998 -7.2002 -16 -16 -16h-64c-8.7998 0 -16 7.2002 -16 16v48h-128v-48c0 -8.7998 -7.2002 -16 -16 -16h-64
+c-8.7998 0 -16 7.2002 -16 16v80.7002c-38.5996 29.2002 -64 75.0996 -64 127.3h-40c-33.2998 0 -59.9004 29.2002 -55.5 63.4004c3.59961 28.1992 29 48.5996 57.5 48.5996c3.2998 0 6 -2.7002 6 -6v-20c0 -3.2998 -2.7002 -6 -6 -6h-1
+c-11.5996 0 -22.2998 -7.7998 -24.5 -19.2002c-3 -15.2998 8.7002 -28.7998 23.5 -28.7998h43.2002c14.8994 73 79.3994 128 156.8 128h128c7.90039 0 15.4004 -1.2002 23 -2.2998c17.5996 20.7998 43.5996 34.2998 73 34.2998h32l-18.9004 -75.5
+c15.8008 -14.7998 28.6006 -32.5 37.4004 -52.5h29.5zM432 160c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM256 352c-16.4004 0 -32.2002 -2.2998 -47.4004 -6.2002c-0.0996094 2.10059 -0.599609 4.10059 -0.599609 6.2002
+c0 53 43 96 96 96s96 -43 96 -96c0 -0.299805 -0.0996094 -0.5 -0.0996094 -0.799805c-5.2002 0.399414 -10.5 0.799805 -15.9004 0.799805h-128z" />
+ <glyph glyph-name="ribbon" unicode="&#xf4d6;" horiz-adv-x="448"
+d="M6.09961 3.7002l117.2 130l79.2002 -87.9004l-91.7998 -101.899c-8 -8.80078 -21.4004 -10.5 -31.2998 -3.80078l-68.8008 27.9004c-12 8.09961 -14.0996 24.9004 -4.5 35.7002zM441.9 3.7002c9.69922 -10.7998 7.59961 -27.6006 -4.30078 -35.6006l-68.7998 -27.8994
+c-9.89941 -6.7002 -23.2998 -5.10059 -31.2998 3.7998l-248.1 275.3c-48.6006 53.7998 -13 113.5 -11.5 116l43.5996 73.2002c4.2998 7.2002 9.90039 13.2998 16.7998 18c44 29.7002 130.7 27.5996 171.4 0c6.89941 -4.7002 12.5 -10.7998 16.7998 -18l43.7002 -73.5
+c21.8994 -36.9004 17.2998 -83.5996 -11.4004 -115.5l-34.2002 -38l-79.0996 87.7002s52.7002 59 56 64.5996c-15.4004 8.40039 -40.2002 17.9004 -77.5 17.9004s-62.0996 -9.5 -77.5 -17.9004c3.40039 -5.5 295.4 -330.1 295.4 -330.1z" />
+ <glyph glyph-name="route" unicode="&#xf4d7;"
+d="M416 128c52.9004 0 96 -43.0996 96 -96s-43.0996 -96 -96 -96h-277.8c13.5 16.2998 31.2998 39.2002 47.2998 64h230.5c17.5996 0 32 14.4004 32 32s-14.4004 32 -32 32h-96c-52.9004 0 -96 43.0996 -96 96s43.0996 96 96 96h45.2998
+c-23.0996 32.5996 -45.2998 70.5 -45.2998 96c0 53 43 96 96 96s96 -43 96 -96s-96 -160 -96 -160h-96c-17.5996 0 -32 -14.4004 -32 -32s14.4004 -32 32 -32h96zM416 384c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32s32 14.2998 32 32s-14.2998 32 -32 32zM96 192
+c53 0 96 -43 96 -96s-96 -160 -96 -160s-96 107 -96 160s43 96 96 96zM96 64c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32z" />
+ <glyph glyph-name="seedling" unicode="&#xf4d8;"
+d="M64 352c123.7 0 224 -100.3 224 -224v-144c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v144c-123.7 0 -224 100.3 -224 224h64zM448 416h64c0 -115.9 -88 -211.1 -200.7 -222.8c-10.7998 40.7002 -31.2998 77.3994 -59 107.6
+c38.2998 68.7002 111.5 115.2 195.7 115.2z" />
+ <glyph glyph-name="sign" unicode="&#xf4d9;"
+d="M496 384c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-368v-368c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v368h-48c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h48v48c0 8.7998 7.2002 16 16 16h32
+c8.7998 0 16 -7.2002 16 -16v-48h368zM160 64v224h320v-224h-320z" />
+ <glyph glyph-name="smile-wink" unicode="&#xf4da;" horiz-adv-x="496"
+d="M0 192c0 137 111 248 248 248s248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248zM200 240c0 17.7002 -14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32s32 14.2998 32 32zM358.5 223.5l9.7002 -8.5c8.39941 -7.5 21.5 -0.299805 19.7998 10.7998
+c-4 25.2002 -34.2002 42.1006 -59.9004 42.1006c-25.6992 0 -55.8994 -16.9004 -59.8994 -42.1006c-1.7998 -11.2002 11.5 -18.2002 19.7998 -10.7998l9.5 8.5c14.7998 13.2002 46.2002 13.2002 61 0zM157.8 122.2c-13.3994 16.2998 -38.0996 -4.10059 -24.5996 -20.4004
+c28.5 -34.2002 70.2998 -53.7998 114.8 -53.7998s86.2998 19.5996 114.8 53.7002c13.5 16.2998 -11 36.7002 -24.5996 20.5c-22.4004 -26.7998 -55.2002 -42.2002 -90.2002 -42.2002s-67.7998 15.2998 -90.2002 42.2002z" />
+ <glyph glyph-name="tape" unicode="&#xf4db;" horiz-adv-x="640"
+d="M224 256c35.2998 0 64 -28.7002 64 -64s-28.7002 -64 -64 -64s-64 28.7002 -64 64s28.7002 64 64 64zM624 32c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-400c-123.7 0 -224 100.3 -224 224s100.3 224 224 224s224 -100.3 224 -224
+c0 -62.7002 -25.9004 -119.3 -67.4004 -160h243.4zM224 96c53 0 96 43 96 96s-43 96 -96 96s-96 -43 -96 -96s43 -96 96 -96z" />
+ <glyph glyph-name="truck-loading" unicode="&#xf4de;" horiz-adv-x="640"
+d="M50.2002 72.4004l-49.6006 185.5c-2.2998 8.5 2.80078 17.2998 11.3008 19.5996l77.2998 20.7002l24.7998 -92.7002l61.7998 16.5l-24.7998 92.7002l77.2002 20.7998c8.5 2.2998 17.2998 -2.7998 19.5996 -11.2998l49.7002 -185.5
+c2.2998 -8.5 -2.7998 -17.2998 -11.2998 -19.6006l-216.4 -58c-8.5 -2.2998 -17.2998 2.80078 -19.5996 11.3008zM384 448h256v-400c0 -61.9004 -50.0996 -112 -112 -112c-60.4004 0 -109.2 47.9004 -111.6 107.7l-393.7 -107.4
+c-4.2002 -1.2002 -8.60059 1.2998 -9.7998 5.60059l-12.6006 46.2998c-1.2002 4.2002 1.2998 8.59961 5.60059 9.7998l346.1 94.4004v323.6c0 17.7002 14.2998 32 32 32zM528 0c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48z" />
+ <glyph glyph-name="truck-moving" unicode="&#xf4df;" horiz-adv-x="640"
+d="M621.3 210.7c12 -12 18.7002 -28.2002 18.7002 -45.2002v-85.5c0 -8.7998 -7.2002 -16 -16 -16h-17.5996c1 -5.2002 1.59961 -10.5 1.59961 -16c0 -44.2002 -35.7998 -80 -80 -80s-80 35.7998 -80 80c0 5.5 0.5 10.7998 1.59961 16h-163.199
+c1 -5.2002 1.59961 -10.5 1.59961 -16c0 -44.2002 -35.7998 -80 -80 -80c-26.2998 0 -49.4004 12.7998 -64 32.4004c-14.5996 -19.5 -37.7002 -32.4004 -64 -32.4004c-44.2002 0 -80 35.7998 -80 80v336c0 17.7002 14.2998 32 32 32h416c17.7002 0 32 -14.2998 32 -32
+v-96.0996h37.5c17 0 33.2998 -6.7002 45.2998 -18.7002zM80 16c17.5996 0 32 14.4004 32 32s-14.4004 32 -32 32s-32 -14.4004 -32 -32s14.4004 -32 32 -32zM208 16c17.5996 0 32 14.4004 32 32s-14.4004 32 -32 32s-32 -14.4004 -32 -32s14.4004 -32 32 -32zM480 240v-48
+h92.0996l-43.2998 43.2998c-3 3 -7 4.7002 -11.2998 4.7002h-37.5zM528 16c17.5996 0 32 14.4004 32 32s-14.4004 32 -32 32s-32 -14.4004 -32 -32s14.4004 -32 32 -32z" />
+ <glyph glyph-name="video-slash" unicode="&#xf4e2;" horiz-adv-x="640"
+d="M633.8 -10.0996c7 -5.40039 8.2998 -15.5 2.7998 -22.4004l-19.5996 -25.2998c-5.5 -7 -15.5 -8.2002 -22.5 -2.7998l-178.5 138l-373.3 288.6l-36.5 28.2002c-7 5.39941 -8.2002 15.3994 -2.7998 22.3994l19.5996 25.2002c5.5 7 15.5 8.2002 22.5 2.7998
+l78.4004 -60.5996h244.3c26.3994 0 47.7998 -21.4004 47.7998 -47.7998v-178l32 -24.7002v137.2l109.6 75.5996c21.3008 14.6006 50.4004 -0.299805 50.4004 -25.7998v-257c0 -17.4004 -13.7998 -29.7002 -29.2002 -31.0996zM32 47.7998v245.5l365.8 -282.8
+c-8.2002 -6.5 -18.3994 -10.5 -29.5996 -10.5h-288.4c-26.3994 0 -47.7998 21.4004 -47.7998 47.7998z" />
+ <glyph glyph-name="wine-glass" unicode="&#xf4e3;" horiz-adv-x="288"
+d="M216 -16c22.0898 0 40 -17.9102 40 -40c0 -4.41992 -3.58008 -8 -8 -8h-208c-4.41992 0 -8 3.58008 -8 8c0 22.0898 17.9102 40 40 40h40v117.18c-68.4697 15.8906 -118.05 79.9102 -111.4 154.16l15.96 178.11c0.730469 8.24023 7.55078 14.5498 15.7002 14.5498h223.48
+c8.16016 0 14.9697 -6.30957 15.71 -14.5498l15.9502 -178.101c6.64941 -74.25 -42.9307 -138.27 -111.4 -154.159v-117.19h40z" />
+ <glyph glyph-name="user-alt-slash" unicode="&#xf4fa;" horiz-adv-x="640"
+d="M633.8 -10.0996c7 -5.40039 8.2998 -15.5 2.90039 -22.3008l-19.6006 -25.2998c-5.5 -7 -15.5 -8.2002 -22.5 -2.7998l-588.399 454.7c-7 5.39941 -8.2002 15.3994 -2.7998 22.3994l19.5996 25.2002c5.5 7 15.5 8.2002 22.5 2.7998l135.5 -104.8
+c16 62.1006 71.9004 108.2 139 108.2c79.5 0 144 -64.5 144 -144c0 -54.0996 -30.2002 -100.7 -74.4004 -125.3zM198.4 128h47.3994l248.4 -192h-382.2c-26.5 0 -48 21.5 -48 48v9.59961c0 74.2002 60.2002 134.4 134.4 134.4z" />
+ <glyph glyph-name="user-astronaut" unicode="&#xf4fb;" horiz-adv-x="448"
+d="M64 224c-8.7998 0 -16 7.2002 -16 16v96c0 8.7998 7.2002 16 16 16h13.5c24.7002 56.5 80.9004 96 146.5 96s121.8 -39.5 146.5 -96h13.5c8.7998 0 16 -7.2002 16 -16v-96c0 -8.7998 -7.2002 -16 -16 -16h-13.5c-24.7002 -56.5 -80.9004 -96 -146.5 -96
+s-121.8 39.5 -146.5 96h-13.5zM104 312v-24c0 -53 43 -96 96 -96h48c53 0 96 43 96 96v24c0 22.0996 -21.5 40 -48 40h-144c-26.5 0 -48 -17.9004 -48 -40zM176 240l-12 36l-36 12l36 12l12 36l12 -36l36 -12l-36 -12zM327.6 126.6c67.5 -7.09961 120.4 -63.5996 120.4 -133
+v-9.59961c0 -26.5 -21.5 -48 -48 -48h-80v64c0 17.7002 -14.2998 32 -32 32h-128c-17.7002 0 -32 -14.2998 -32 -32v-64h-80c-26.5 0 -48 21.5 -48 48v9.59961c0 69.4004 52.9004 125.9 120.4 133c29.8994 -19.2998 65.3994 -30.5996 103.6 -30.5996
+s73.7002 11.2998 103.6 30.5996zM272 0c8.7998 0 16 -7.2002 16 -16s-7.2002 -16 -16 -16s-16 7.2002 -16 16s7.2002 16 16 16zM176 0c8.7998 0 16 -7.2002 16 -16v-48h-32v48c0 8.7998 7.2002 16 16 16z" />
+ <glyph glyph-name="user-check" unicode="&#xf4fc;" horiz-adv-x="640"
+d="M224 192c-70.7002 0 -128 57.2998 -128 128s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128zM313.6 160c74.2002 0 134.4 -60.2002 134.4 -134.4v-41.5996c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v41.5996
+c0 74.2002 60.2002 134.4 134.4 134.4h16.6992c22.3008 -10.2002 46.9004 -16 72.9004 -16s50.7002 5.7998 72.9004 16h16.6992zM636.6 288.4c4.60059 -4.60059 4.5 -12.1006 -0.0996094 -16.8008l-141.3 -140.199c-4.7002 -4.60059 -12.2002 -4.60059 -16.7998 0.0996094
+l-81.7002 82.2998c-4.60059 4.7002 -4.60059 12.2002 0.0996094 16.7998l28.1006 27.9004c4.69922 4.59961 12.1992 4.59961 16.7998 -0.0996094l45.5 -45.8008l104.8 104c4.7002 4.60059 12.2002 4.60059 16.7998 -0.0996094z" />
+ <glyph glyph-name="user-clock" unicode="&#xf4fd;" horiz-adv-x="640"
+d="M496 224c79.5996 0 144 -64.4004 144 -144s-64.4004 -144 -144 -144s-144 64.4004 -144 144s64.4004 144 144 144zM560 73.7002v12.5996c0 5.2998 -4.40039 9.7002 -9.7002 9.7002h-38.2998v54.2998c0 5.2998 -4.40039 9.7002 -9.7002 9.7002h-12.5996
+c-5.2998 0 -9.7002 -4.40039 -9.7002 -9.7002v-76.5996c0 -5.2998 4.40039 -9.7002 9.7002 -9.7002h60.5996c5.2998 0 9.7002 4.40039 9.7002 9.7002zM320 80c0 -59.5 29.7998 -112.1 75.0996 -144h-347.1c-26.5 0 -48 21.5 -48 48v41.5996
+c0 74.2002 60.2002 134.4 134.4 134.4h16.6992c22.3008 -10.2002 46.9004 -16 72.9004 -16s50.7002 5.7998 72.9004 16h16.6992c8.40039 0 16.6006 -1 24.6006 -2.5c-11.5 -23.4004 -18.2002 -49.7002 -18.2002 -77.5zM224 192c-70.7002 0 -128 57.2998 -128 128
+s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128z" />
+ <glyph glyph-name="user-cog" unicode="&#xf4fe;" horiz-adv-x="639"
+d="M610.5 74.7002l25.7998 -14.9004c2.90039 -1.7002 4.2998 -5.2002 3.2998 -8.5c-6.69922 -21.5996 -18.1992 -41.2002 -33.1992 -57.3994c-2.30078 -2.5 -6.10059 -3.10059 -9 -1.40039l-25.8008 14.9004c-10.8994 -9.30078 -23.3994 -16.5 -36.8994 -21.3008v-29.7998
+c0 -3.39941 -2.40039 -6.39941 -5.7002 -7.09961c-21.2002 -4.7998 -43.9004 -5 -66.2002 0c-3.2998 0.700195 -5.7002 3.7002 -5.7002 7.09961v29.7998c-13.5 4.80078 -26 12 -36.8994 21.3008l-25.7998 -14.9004c-3 -1.7002 -6.7002 -1.09961 -9 1.40039
+c-15 16.2998 -26.5 35.7998 -33.2002 57.3994c-1 3.2998 0.299805 6.7998 3.2998 8.5l25.7998 14.9004c-2.59961 14.0996 -2.59961 28.5 0 42.5996l-25.7998 14.9004c-2.90039 1.7002 -4.2998 5.2002 -3.2998 8.5c6.7002 21.5996 18.2002 41.2002 33.2002 57.3994
+c2.2998 2.5 6.09961 3.10059 9 1.40039l25.7998 -14.9004c10.8994 9.30078 23.3994 16.5 36.8994 21.3008v29.7998c0 3.39941 2.40039 6.39941 5.7002 7.09961c21.2002 4.7998 43.9004 5 66.2002 0c3.2998 -0.700195 5.7002 -3.7002 5.7002 -7.09961v-29.7998
+c13.5 -4.80078 26 -12 36.8994 -21.3008l25.8008 14.9004c3 1.7002 6.69922 1.09961 9 -1.40039c15 -16.1992 26.5 -35.7998 33.1992 -57.3994c1 -3.2998 -0.299805 -6.7998 -3.2998 -8.5l-25.7998 -14.9004c2.59961 -14.0996 2.59961 -28.5 0 -42.5996zM496 47.5
+c26.7998 0 48.5 21.7998 48.5 48.5s-21.7998 48.5 -48.5 48.5s-48.5 -21.7998 -48.5 -48.5s21.7002 -48.5 48.5 -48.5zM224 192c-70.7002 0 -128 57.2998 -128 128s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128zM425.2 -34.5v-9.2002
+c0 -4.09961 0.799805 -8 2 -11.7998c-7.7002 -5.2998 -17.1006 -8.5 -27.2002 -8.5h-352c-26.5 0 -48 21.5 -48 48v41.5996c0 74.2002 60.2002 134.4 134.4 134.4h16.6992c22.3008 -10.2002 46.9004 -16 72.9004 -16s50.7002 5.7998 72.9004 16h16.6992
+c3.90039 0 7.60059 -0.900391 11.4004 -1.2002c-1 -2.89941 -2.2998 -5.7998 -3.2002 -8.7002c-5.5 -17.6992 1.90039 -36.5 17.9004 -45.6992l7.89941 -4.60059c-0.0996094 -2.59961 -0.0996094 -5.2002 0 -7.7998l-7.89941 -4.59961
+c-16 -9.30078 -23.4004 -28 -17.9004 -45.7002c7.90039 -25.7002 21.9004 -49.7998 40.2002 -69.6006c7.5 -8 18 -12.5996 28.9004 -12.5996c6.7998 0 13.5996 1.90039 19.5996 5.2998l7.90039 4.60059c2.19922 -1.30078 4.5 -2.7002 6.7998 -3.90039z" />
+ <glyph glyph-name="user-edit" unicode="&#xf4ff;" horiz-adv-x="639"
+d="M224 192c-70.7002 0 -128 57.2998 -128 128s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128zM313.6 160c39.9004 0 75.4004 -17.7998 99.9004 -45.5l-77.2998 -77.2998l-7.90039 -7.90039l-1.2002 -11.0996l-6.7998 -60.9004
+c-0.799805 -7.2998 0.200195 -14.5 2.60059 -21.2998h-274.9c-26.5 0 -48 21.5 -48 48v41.5996c0 74.2002 60.2002 134.4 134.4 134.4h16.6992c22.3008 -10.2002 46.9004 -16 72.9004 -16s50.7002 5.7998 72.9004 16h16.6992zM358.9 14.7002l137.899 137.8l71.7002 -71.7002
+l-137.9 -137.899l-60.8994 -6.80078c-10.1006 -1.19922 -18.7002 7.40039 -17.6006 17.6006zM633 179.1c9.2998 -9.39941 9.2998 -24.5 0 -33.8994l-41.7998 -41.7998l-71.7998 71.6992l4.09961 4.10059l37.7998 37.7998c9.2998 9.2998 24.5 9.2998 33.7998 0z" />
+ <glyph glyph-name="user-friends" unicode="&#xf500;" horiz-adv-x="640"
+d="M192 192c-61.9004 0 -112 50.0996 -112 112s50.0996 112 112 112s112 -50.0996 112 -112s-50.0996 -112 -112 -112zM268.8 160c63.6006 0 115.2 -51.5996 115.2 -115.2v-28.7998c0 -26.5 -21.5 -48 -48 -48h-288c-26.5 0 -48 21.5 -48 48v28.7998
+c0 63.6006 51.5996 115.2 115.2 115.2h8.2998c20.9004 -10 43.9004 -16 68.5 -16s47.7002 6 68.5 16h8.2998zM480 192c-53 0 -96 43 -96 96s43 96 96 96s96 -43 96 -96s-43 -96 -96 -96zM528 160c61.9004 0 112 -50.0996 112 -112c0 -26.5 -21.5 -48 -48 -48h-176.6
+c0.0996094 2.09961 0.599609 4.2002 0.599609 6.40039v38.3994c0 38.6006 -15.2998 73.5 -39.7002 99.7998c16.5 9.5 35.2998 15.4004 55.7002 15.4004h3.7998c13.9004 -4.7998 28.6006 -8 44.2002 -8s30.2998 3.2002 44.2002 8h3.7998z" />
+ <glyph glyph-name="user-graduate" unicode="&#xf501;" horiz-adv-x="448"
+d="M319.4 127.4c71.5 -3.10059 128.6 -61.6006 128.6 -133.801v-9.59961c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v9.59961c0 72.2002 57.0996 130.7 128.6 133.801l95.4004 -95.4004zM13.5996 368.2c-18.0996 4.39941 -18.0996 27.2002 0 31.5996
+l190.601 45.9004c13 3.09961 26.7002 3.09961 39.7002 0l190.399 -46c18.2002 -4.40039 18.2002 -27.1006 0 -31.5l-96.2998 -23.2002c8.7002 -17.2002 14 -36.4004 14 -57c0 -70.7002 -57.2998 -128 -128 -128s-128 57.2998 -128 128c0 20.5996 5.40039 39.7998 14 57
+l-66 15.9004v-52.6006c7 -4.2002 12 -11.5 12 -20.2998c0 -8.40039 -4.59961 -15.4004 -11.0996 -19.7002l15.5996 -62.2998c1.7002 -6.90039 -2.09961 -14 -7.59961 -14h-41.8008c-5.5 0 -9.2998 7.09961 -7.59961 14l15.5996 62.2998
+c-6.5 4.2998 -11.0996 11.2998 -11.0996 19.7002c0 8.7998 5 16.0996 12 20.2998v58.4004z" />
+ <glyph glyph-name="user-lock" unicode="&#xf502;" horiz-adv-x="640"
+d="M320 128v-160c0 -11.7002 3.40039 -22.5 8.90039 -32h-280.9c-26.5 0 -48 21.5 -48 48v41.5996c0 74.2002 60.2002 134.4 134.4 134.4h16.6992c22.3008 -10.2002 46.9004 -16 72.9004 -16s50.7002 5.7998 72.9004 16h16.6992c5 0 9.7002 -1 14.5 -1.5
+c-5 -9.09961 -8.09961 -19.4004 -8.09961 -30.5zM224 192c-70.7002 0 -128 57.2998 -128 128s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128zM608 160c17.7002 0 32 -14.2998 32 -32v-160c0 -17.7002 -14.2998 -32 -32 -32h-224
+c-17.7002 0 -32 14.2998 -32 32v160c0 17.7002 14.2998 32 32 32h32v48c0 44.2002 35.7998 80 80 80s80 -35.7998 80 -80v-48h32zM528 160v48c0 17.5996 -14.4004 32 -32 32s-32 -14.4004 -32 -32v-48h64z" />
+ <glyph glyph-name="user-minus" unicode="&#xf503;" horiz-adv-x="640"
+d="M624 240c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-192c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h192zM224 192c-70.7002 0 -128 57.2998 -128 128s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128z
+M313.6 160c74.2002 0 134.4 -60.2002 134.4 -134.4v-41.5996c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v41.5996c0 74.2002 60.2002 134.4 134.4 134.4h16.6992c22.3008 -10.2002 46.9004 -16 72.9004 -16s50.7002 5.7998 72.9004 16h16.6992z" />
+ <glyph glyph-name="user-ninja" unicode="&#xf504;" horiz-adv-x="448"
+d="M325.4 158.8c68.5996 -6.09961 122.6 -63 122.6 -133.2v-41.5996c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v41.5996c0 70.2002 54 127.101 122.6 133.2l101.4 -101.399zM32 256c0 33.4004 17.0996 62.7998 43.0996 80
+c-26 17.2002 -43.0996 46.5996 -43.0996 80c32 0 60.0996 -15.7998 77.5 -39.7998c20.9004 42.3994 64.0996 71.7998 114.5 71.7998c70.7002 0 128 -57.2998 128 -128s-57.2998 -128 -128 -128c-58.7998 0 -107.7 39.7998 -122.8 93.7002
+c-17.4004 -18.2002 -41.9004 -29.7002 -69.2002 -29.7002zM176 352c-17.7002 0 -32 -14.2998 -32 -32h160c0 17.7002 -14.2998 32 -32 32h-96z" />
+ <glyph glyph-name="user-shield" unicode="&#xf505;" horiz-adv-x="640"
+d="M622.3 176.9c10.7002 -4.2002 17.7002 -14 17.7002 -24.9004c0 -124.5 -81.5996 -193.9 -132.9 -213.9c-4.19922 -1.59961 -12.5996 -3.69922 -22.1992 0c-64.2002 25.1006 -132.9 102.301 -132.9 213.9c0 10.9004 7 20.7002 17.7002 24.9004l115.2 45
+c9.59961 3.69922 18.0996 1.59961 22.1992 0zM496 -14.4004c34.5996 16.4004 89.9004 64.7002 95.5 151.801l-95.5 37.2998v-189.101zM224 192c-70.7002 0 -128 57.2998 -128 128s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128zM320 152
+c0 -95.2998 45.2002 -169.1 99.2002 -212c-5.90039 -2.5 -12.4004 -4 -19.2002 -4h-352c-26.5 0 -48 21.5 -48 48v41.5996c0 74.2002 60.2002 134.4 134.4 134.4h16.6992c22.3008 -10.2002 46.9004 -16 72.9004 -16s50.7002 5.7998 72.9004 16h16.6992
+c2.60059 0 5 -0.700195 7.5 -0.799805c-0.299805 -2.40039 -1.09961 -4.7002 -1.09961 -7.2002z" />
+ <glyph glyph-name="user-slash" unicode="&#xf506;" horiz-adv-x="640"
+d="M633.8 -10.0996c7 -5.40039 8.2998 -15.5 2.90039 -22.3008l-19.6006 -25.2998c-5.5 -7 -15.5 -8.2002 -22.5 -2.7998l-588.399 454.7c-7 5.39941 -8.2002 15.3994 -2.7998 22.3994l19.5996 25.2002c5.5 7 15.5 8.2002 22.5 2.7998l147.6 -114
+c5.40039 65.6006 59.8008 117.4 126.9 117.4c70.7002 0 128 -57.2998 128 -128c0 -55.7998 -35.9004 -102.7 -85.7002 -120.3zM96 25.5996c0 66.4004 48.2002 121.101 111.4 132.101l286.8 -221.7h-350.2c-26.5 0 -48 21.5 -48 48v41.5996z" />
+ <glyph glyph-name="user-tag" unicode="&#xf507;" horiz-adv-x="639"
+d="M630.6 83.0996c12.5 -12.5 12.5 -32.6992 -0.0996094 -45.1992l-92.5 -92.5c-12.5 -12.5 -32.7998 -12.5 -45.2998 0l-90.2998 90.1992c-12 12 -18.7002 28.2002 -18.7002 45.2002v79.2002c0 17.7002 14.2998 32 32 32h79.2998c17 0 33.2998 -6.7002 45.2998 -18.7002z
+M447.8 104.1c13.2998 0 24 10.8008 24 24c0 13.3008 -10.7002 24 -24 24s-24 -10.6992 -24 -24c0 -13.2998 10.7002 -24 24 -24zM224 192.1c-70.7002 0 -128 57.3008 -128 127.9c0 70.7002 57.2998 128 128 128s128 -57.2998 128 -127.9c0 -70.6992 -57.2998 -128 -128 -128
+zM351.8 80.9004c0 -25.7002 10 -49.8008 28.1006 -67.9004l58 -58c-8.80078 -11.4004 -22.4004 -18.9004 -37.9004 -18.9004h-352c-26.5 0 -48 21.5 -48 48v41.6006c0 74.2002 60.2002 134.399 134.4 134.5h16.6992c22.3008 -10.2002 46.9004 -16 72.9004 -16
+s50.7002 5.7998 72.9004 16h16.6992c13.3008 0 26 -2.60059 38.2002 -6.2002v-73.0996z" />
+ <glyph glyph-name="user-tie" unicode="&#xf508;" horiz-adv-x="448"
+d="M224 192c-70.7002 0 -128 57.2998 -128 128s57.2998 128 128 128s128 -57.2998 128 -128s-57.2998 -128 -128 -128zM319.8 159.4c71.2998 -3.40039 128.2 -61.7002 128.2 -133.801v-41.5996c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v41.5996
+c0 72.1006 56.9004 130.4 128.2 133.801l47.7998 -191.4l32 136l-32 56h96l-32 -56l32 -136z" />
+ <glyph glyph-name="users-cog" unicode="&#xf509;" horiz-adv-x="639"
+d="M610.5 106.7l25.7998 -14.9004c2.90039 -1.7002 4.2998 -5.2002 3.2998 -8.5c-6.69922 -21.5996 -18.1992 -41.2002 -33.1992 -57.3994c-2.30078 -2.5 -6.10059 -3.10059 -9 -1.40039l-25.8008 14.9004c-10.8994 -9.30078 -23.3994 -16.5 -36.8994 -21.3008v-29.7998
+c0 -3.39941 -2.40039 -6.39941 -5.7002 -7.09961c-21.2002 -4.7998 -43.9004 -5 -66.2002 0c-3.2998 0.700195 -5.7002 3.7002 -5.7002 7.09961v29.7998c-13.5 4.80078 -26 12 -36.8994 21.3008l-25.7998 -14.9004c-3 -1.7002 -6.7002 -1.09961 -9 1.40039
+c-15 16.2998 -26.5 35.7998 -33.2002 57.3994c-1 3.2998 0.299805 6.7998 3.2998 8.5l25.7998 14.9004c-2.59961 14.0996 -2.59961 28.5 0 42.5996l-25.7998 14.9004c-2.90039 1.7002 -4.2998 5.2002 -3.2998 8.5c6.7002 21.5996 18.2002 41.2002 33.2002 57.3994
+c2.2998 2.5 6.09961 3.10059 9 1.40039l25.7998 -14.9004c10.8994 9.30078 23.3994 16.5 36.8994 21.3008v29.7998c0 3.39941 2.40039 6.39941 5.7002 7.09961c21.2002 4.7998 43.9004 5 66.2002 0c3.2998 -0.700195 5.7002 -3.7002 5.7002 -7.09961v-29.7998
+c13.5 -4.80078 26 -12 36.8994 -21.3008l25.8008 14.9004c3 1.7002 6.69922 1.09961 9 -1.40039c15 -16.1992 26.5 -35.7998 33.1992 -57.3994c1 -3.2998 -0.299805 -6.7998 -3.2998 -8.5l-25.7998 -14.9004c2.59961 -14.0996 2.59961 -28.5 0 -42.5996zM496 79.5
+c26.7998 0 48.5 21.7998 48.5 48.5s-21.7998 48.5 -48.5 48.5s-48.5 -21.7998 -48.5 -48.5s21.7002 -48.5 48.5 -48.5zM96 224c-35.2998 0 -64 28.7002 -64 64s28.7002 64 64 64s64 -28.7002 64 -64s-28.7002 -64 -64 -64zM320 192c-61.9004 0 -112 50.0996 -112 112
+s50 111.9 111.9 111.9c61.8994 0 112 -50.1006 112 -112c0 -17.2002 -4.2002 -33.4004 -11.2002 -48c-0.799805 -0.5 -1.60059 -0.900391 -2.40039 -1.40039l-7.89941 4.59961c-5.90039 3.5 -12.7002 5.30078 -19.6006 5.30078c-11 0 -21.5 -4.60059 -28.8994 -12.6006
+c-15.8008 -17.0996 -28 -37.5 -36.3008 -59.2002c-1.89941 -0.0996094 -3.69922 -0.599609 -5.59961 -0.599609zM425.2 -2.5v-9.2002c0 -7.5 2.5 -14.2998 6.2002 -20.2998h-255.4c-26.5 0 -48 21.5 -48 48v28.7998c0 63.6006 51.5996 115.2 115.3 115.2h8.2998
+c20.9004 -10 43.9004 -16 68.5 -16c3.30078 0 6.5 0.400391 9.80078 0.599609c2.7998 -3.09961 6 -5.89941 9.7998 -8.09961l7.89941 -4.59961c-0.0996094 -2.60059 -0.0996094 -5.2002 0 -7.80078c-0.899414 -0.599609 -36.5 -15.7998 -25.7998 -50.2998
+c7.90039 -25.7002 21.9004 -49.7998 40.2002 -69.5996c7.5 -8 18 -12.6006 28.9004 -12.6006c12.1992 0 19.2998 5 27.5 9.80078c2.19922 -1.30078 4.5 -2.7002 6.7998 -3.90039zM173.1 173.4c-40.2998 -22.1006 -68.8994 -62 -75.1992 -109.4h-65.9004
+c-17.7002 0 -32 14.2998 -32 32v32c0 35.2998 28.7002 64 64 64h64c17.5996 0 33.5 -7.09961 45.0996 -18.5996z" />
+ <glyph glyph-name="blender" unicode="&#xf517;"
+d="M416 64c35.3496 0 64 -28.6504 64 -64v-32c0 -17.6699 -14.3301 -32 -32 -32h-320c-17.6699 0 -32 14.3301 -32 32v32c0 35.3496 28.6504 64 64 64h256zM288 -32c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM328 384
+c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h157.82l-17.46 -64h-140.36c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h131.64l-17.46 -64h-114.18c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h105.46l-17.46 -64
+h-256l-8.73047 96h-103.27c-26.5098 0 -48 21.4902 -48 48v160c0 26.5098 21.4902 48 48 48h464l-17.46 -64h-166.54zM64 256h81.46l-11.6396 128h-69.8203v-128z" />
+ <glyph glyph-name="book-open" unicode="&#xf518;" horiz-adv-x="575"
+d="M542.22 415.95c18.4199 1.04004 33.7803 -12.9902 33.7705 -30.7002v-337.84c0 -16.2305 -13.1299 -29.7705 -30.0205 -30.6602c-49.4697 -2.59961 -149.52 -12.0996 -218.7 -46.9199c-10.6494 -5.36035 -23.2793 1.93945 -23.2793 13.4902v363.87
+c0 5.2793 2.62988 10.3291 7.26953 13.1699c67.2402 41.1592 176.16 52.4795 230.96 55.5898zM264.73 360.36c4.64941 -2.85059 7.26953 -7.7002 7.26953 -12.9902v-364.12c0 -11.5195 -12.5898 -18.8096 -23.21 -13.46c-69.1797 34.8398 -169.28 44.3496 -218.771 46.9502
+c-16.8896 0.879883 -30.0195 14.4199 -30.0195 30.6602v337.85c0 17.71 15.3604 31.7402 33.7803 30.7002c54.7998 -3.12012 163.72 -14.4307 230.95 -55.5898z" />
+ <glyph glyph-name="broadcast-tower" unicode="&#xf519;" horiz-adv-x="640"
+d="M150.94 256c-7.01074 0 -13.46 4.5 -15.4004 11.2402c-4.90039 16.9697 -7.54004 34.6396 -7.54004 52.7598s2.63965 35.79 7.53027 52.7695c1.9502 6.74023 8.39941 11.2305 15.4102 11.2305h33.7295c11.0098 0 18.6201 -10.8301 14.8604 -21.1797
+c-4.93066 -13.5801 -7.5498 -27.9805 -7.5498 -42.8203s2.61914 -29.2402 7.5498 -42.8203c3.75 -10.3496 -3.85059 -21.1797 -14.8604 -21.1797h-33.7295zM89.9199 424.66c-16.54 -31.1406 -49.6104 -115.97 0.169922 -209.29
+c5.66016 -10.6299 -1.92969 -23.3701 -13.9502 -23.3701h-34.8398c-6.18945 0 -11.9902 3.50977 -14.6094 9.13965c-23.5703 50.5303 -26.6904 94.1104 -26.6904 118.86c0 42.3496 9.40039 82.46 25.8896 118.69c2.60059 5.71973 8.4707 9.30957 14.7402 9.30957h35.3301
+c12.0098 0 19.5996 -12.7197 13.96 -23.3398zM614.06 438.71c16.5205 -36.2295 25.9404 -76.3496 25.9404 -118.71s-9.42969 -82.4805 -25.96 -118.71c-2.59961 -5.70996 -8.45996 -9.29004 -14.7305 -9.29004h-35.2295c-12.1104 0 -19.6299 12.8398 -13.9102 23.5195
+c48.1396 89.8105 17.9902 174.94 -0.280273 209.23c-5.63965 10.5898 2.04004 23.25 14.0205 23.25h35.4199c6.26953 0 12.1299 -3.58008 14.7295 -9.29004zM489.06 384c7.01074 0 13.46 -4.49023 15.4004 -11.2402c4.90039 -16.9697 7.54004 -34.6396 7.54004 -52.7598
+s-2.62988 -35.79 -7.53027 -52.7598c-1.9502 -6.75 -8.38965 -11.2402 -15.4102 -11.2402h-33.7295c-11.0098 0 -18.6201 10.8301 -14.8604 21.1797c4.93066 13.5801 7.5498 27.9805 7.5498 42.8203s-2.61914 29.2402 -7.5498 42.8203
+c-3.75 10.3496 3.85059 21.1797 14.8604 21.1797h33.7295zM372.76 283.88l130.5 -313.41c3.39062 -8.14941 -0.459961 -17.5195 -8.60938 -20.9199l-29.5107 -12.3096c-8.14941 -3.40039 -17.5098 0.450195 -20.9092 8.60938l-49.2002 118.15h-150.07l-49.1904 -118.15
+c-3.38965 -8.14941 -12.7598 -12.0098 -20.9092 -8.60938l-29.5107 12.3096c-8.14941 3.40039 -12.0098 12.7705 -8.60938 20.9199l130.5 313.41c-7.0498 10.29 -11.2002 22.71 -11.2002 36.1201c0 35.3496 28.6396 64 63.96 64c35.3301 0 63.96 -28.6504 63.96 -64
+c0 -13.4102 -4.15039 -25.8301 -11.2002 -36.1201zM271.62 128h96.7598l-48.3799 116.19z" />
+ <glyph glyph-name="broom" unicode="&#xf51a;" horiz-adv-x="640"
+d="M256.47 231.23l86.7305 -109.181s-16.6006 -102.359 -76.5703 -150.12c-59.9697 -47.7793 -266.63 -34.1201 -266.63 -34.1201s3.7998 23.1406 11 55.4307l94.6201 112.17c3.96973 4.7002 -0.870117 11.6201 -6.65039 9.5l-60.3994 -22.0898
+c14.4395 41.6602 32.7197 80.04 54.5996 97.4697c59.9697 47.7598 163.3 40.9404 163.3 40.9404zM636.53 416.97c5.48926 -6.91016 4.33984 -16.96 -2.5498 -22.4395l-232.48 -177.8l34.0898 -42.9209c5.08008 -6.39941 1.66016 -15.9092 -6.33984 -17.5996
+l-58.7998 -12.4502l-86.7305 109.181l25.3301 54.5498c3.4502 7.41992 13.5 8.62012 18.5898 2.20996l34.1406 -42.9697l232.479 177.8c6.89062 5.48926 16.9199 4.33984 22.4102 -2.56055z" />
+ <glyph glyph-name="chalkboard" unicode="&#xf51b;" horiz-adv-x="640"
+d="M96 384v-352h-64v376c0 22.0596 17.9404 40 40 40h496c22.0596 0 40 -17.9404 40 -40v-376h-64v352h-448zM624 0c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-608c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h272v64h192v-64
+h144z" />
+ <glyph glyph-name="chalkboard-teacher" unicode="&#xf51c;" horiz-adv-x="640"
+d="M208 96c62.0596 0 112.33 -50.4805 112 -112.62c-0.139648 -26.2598 -21.7305 -47.3799 -48 -47.3799h-224c-26.2695 0 -47.8604 21.1201 -48 47.3799c-0.330078 62.1396 49.9404 112.62 112 112.62c2.38965 0 4.76953 -0.349609 7.0498 -1.08984
+c12.9697 -4.20996 26.6006 -6.91016 40.9502 -6.91016s27.9805 2.7002 40.9404 6.91016c2.2793 0.740234 4.66992 1.08984 7.05957 1.08984zM160 128c-53.0195 0 -96 42.9805 -96 96s42.9805 96 96 96s96 -42.9805 96 -96s-42.9805 -96 -96 -96zM592 448
+c26.4697 0 48 -22.25 48 -49.5898v-316.82c0 -27.3398 -21.5303 -49.5898 -48 -49.5898h-244.55c-6.57031 25.2695 -20.5898 47.3096 -39.6904 64h76.2402v64h128v-64h64v288h-352v-49.7998c-18.9004 11.0195 -40.5801 17.7998 -64 17.7998v46.4102
+c0 27.3398 21.5303 49.5898 48 49.5898h384z" />
+ <glyph glyph-name="church" unicode="&#xf51d;" horiz-adv-x="640"
+d="M464.46 201.32c9.63965 -5.78027 15.54 -16.2002 15.54 -27.4404v-237.88h-96v96c0 35.3496 -28.6504 64 -64 64s-64 -28.6504 -64 -64v-96h-96v237.88c0 10.0029 6.96191 22.2959 15.54 27.4404l112.46 67.4795v51.2002h-48c-8.83984 0 -16 7.16016 -16 16v32
+c0 8.83984 7.16016 16 16 16h48v48c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-48h48c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-48v-51.2002zM0 52.04c0.00195312 11.6455 8.68945 24.8252 19.3896 29.4199l108.61 46.54
+v-192h-112c-8.83984 0 -16 7.16016 -16 16v100.04zM620.61 81.46c11.7598 -5.0498 19.3896 -16.6201 19.3896 -29.4199v-100.04c0 -8.83984 -7.16016 -16 -16 -16h-112v192z" />
+ <glyph glyph-name="coins" unicode="&#xf51e;"
+d="M0 42.7002c41.2998 -29.1006 116.8 -42.7002 192 -42.7002s150.7 13.5996 192 42.7002v-42.7002c0 -35.2998 -86 -64 -192 -64s-192 28.7002 -192 64v42.7002zM320 320c-106 0 -192 28.7002 -192 64s86 64 192 64s192 -28.7002 192 -64s-86 -64 -192 -64zM0 147.6
+c41.2998 -34 116.9 -51.5996 192 -51.5996s150.7 17.5996 192 51.5996v-51.5996c0 -35.2998 -86 -64 -192 -64s-192 28.7002 -192 64v51.5996zM416 136.6v63.6006c38.7002 6.89941 72.7998 18.0996 96 34.5v-42.7002c0 -23.7002 -38.7002 -44.2998 -96 -55.4004zM192 288
+c106 0 192 -35.7998 192 -80s-86 -80 -192 -80s-192 35.7998 -192 80s86 80 192 80zM411.3 231.7c-8.7998 23.7002 -30.5 42.8994 -60 57.2002c64.2002 3.19922 125.2 16.6992 160.7 41.7998v-42.7002c0 -24.2998 -40.7002 -45.5 -100.7 -56.2998z" />
+ <glyph glyph-name="compact-disc" unicode="&#xf51f;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM88 192c0 88.2002 71.7998 160 160 160v32c-105.9 0 -192 -86.0996 -192 -192h32zM248 96c53 0 96 43 96 96s-43 96 -96 96s-96 -43 -96 -96s43 -96 96 -96zM248 224
+c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32z" />
+ <glyph glyph-name="crow" unicode="&#xf520;" horiz-adv-x="640"
+d="M544 416c53.0195 0 96 -28.6504 96 -64l-96 -16v-80c0 -87.2598 -58.2598 -160.8 -137.97 -184.14l41.2393 -111.53c2.27051 -6.23047 -0.939453 -13.1104 -7.16992 -15.3799l-22.5498 -8.20996c-6.22949 -2.27051 -13.1094 0.939453 -15.3799 7.16992l-44.5098 120.38
+c-1.90039 -0.0595703 -3.75 -0.290039 -5.66016 -0.290039h-39.0596l38.3291 -103.68c2.27051 -6.23047 -0.939453 -13.1104 -7.16992 -15.3799l-22.5498 -8.20996c-6.22949 -2.27051 -13.1094 0.939453 -15.3799 7.16992l-44.4102 120.1h-96.4902l-121.539 -60.7695
+c-20.0908 -10.04 -43.7305 4.56934 -43.7305 27.0293c0.00195312 8.33984 5.41797 19.168 12.0898 24.1699l371.91 292.59v20.9805c0 44.1797 35.8203 80 80 80c26.0898 0 49.04 -12.6797 63.6396 -32h16.3604zM464 344c13.25 0 24 10.75 24 24c0 13.2598 -10.75 24 -24 24
+s-24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24z" />
+ <glyph glyph-name="crown" unicode="&#xf521;" horiz-adv-x="640"
+d="M528 0c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-416c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h416zM592 320c26.5 0 48 -21.5 48 -48s-21.5 -48 -48 -48c-2.59961 0 -5.2002 0.400391 -7.7002 0.799805l-72.2998 -192.8h-384
+l-72.2998 192.8c-2.5 -0.399414 -5.10059 -0.799805 -7.7002 -0.799805c-26.5 0 -48 21.5 -48 48s21.5996 48 48.0996 48s48 -21.5 48 -48c0 -7.09961 -1.69922 -13.7998 -4.39941 -19.7998l72.2998 -43.4004c15.2998 -9.2002 35.2998 -4 44.2002 11.6006l81.5 142.6
+c-10.7002 8.7998 -17.7002 22 -17.7002 37c0 26.5 21.5 48 48 48s48 -21.5 48 -48c0 -15 -7 -28.2002 -17.7002 -37l81.5 -142.6c8.90039 -15.6006 28.7998 -20.8008 44.2002 -11.6006l72.4004 43.4004c-2.80078 6.09961 -4.40039 12.7002 -4.40039 19.7998
+c0 26.5 21.5 48 48 48z" />
+ <glyph glyph-name="dice" unicode="&#xf522;" horiz-adv-x="640"
+d="M592 256c26.5098 0 48 -21.4902 48 -48v-224c0 -26.5098 -21.4902 -48 -48 -48h-224c-26.5098 0 -48 21.4902 -48 48v46.4199l136.26 136.26c24.1201 24.1201 29.6904 59.7305 17 89.3203h118.74zM480 72c13.25 0 24 10.75 24 24c0 13.2598 -10.75 24 -24 24
+s-24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24zM433.63 258.7c19.1602 -19.1602 19.1602 -50.2305 0 -69.4004l-174.939 -174.93c-19.1602 -19.1602 -50.2305 -19.1602 -69.3906 0l-174.93 174.939c-19.1602 19.1602 -19.1602 50.2305 0 69.3906l174.939 174.93
+c19.1602 19.1602 50.2305 19.1602 69.3906 0zM96 200c13.25 0 24 10.75 24 24c0 13.2598 -10.75 24 -24 24s-24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24zM224 72c13.25 0 24 10.75 24 24c0 13.2598 -10.75 24 -24 24s-24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24z
+M224 200c13.25 0 24 10.75 24 24c0 13.2598 -10.75 24 -24 24s-24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24zM224 328c13.25 0 24 10.75 24 24c0 13.2598 -10.75 24 -24 24s-24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24zM352 200c13.25 0 24 10.75 24 24
+c0 13.2598 -10.75 24 -24 24s-24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24z" />
+ <glyph glyph-name="dice-five" unicode="&#xf523;" horiz-adv-x="448"
+d="M384 416c35.3496 0 64 -28.6504 64 -64v-320c0 -35.3496 -28.6504 -64 -64 -64h-320c-35.3496 0 -64 28.6504 -64 64v320c0 35.3496 28.6504 64 64 64h320zM128 64c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM128 256
+c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM224 160c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM320 64c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32
+s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM320 256c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="dice-four" unicode="&#xf524;" horiz-adv-x="448"
+d="M384 416c35.3496 0 64 -28.6504 64 -64v-320c0 -35.3496 -28.6504 -64 -64 -64h-320c-35.3496 0 -64 28.6504 -64 64v320c0 35.3496 28.6504 64 64 64h320zM128 64c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM128 256
+c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM320 64c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM320 256c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32
+s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="dice-one" unicode="&#xf525;" horiz-adv-x="448"
+d="M384 416c35.3496 0 64 -28.6504 64 -64v-320c0 -35.3496 -28.6504 -64 -64 -64h-320c-35.3496 0 -64 28.6504 -64 64v320c0 35.3496 28.6504 64 64 64h320zM224 160c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="dice-six" unicode="&#xf526;" horiz-adv-x="448"
+d="M384 416c35.3496 0 64 -28.6504 64 -64v-320c0 -35.3496 -28.6504 -64 -64 -64h-320c-35.3496 0 -64 28.6504 -64 64v320c0 35.3496 28.6504 64 64 64h320zM128 64c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM128 160
+c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM128 256c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM320 64c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32
+s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM320 160c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM320 256c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="dice-three" unicode="&#xf527;" horiz-adv-x="448"
+d="M384 416c35.3496 0 64 -28.6504 64 -64v-320c0 -35.3496 -28.6504 -64 -64 -64h-320c-35.3496 0 -64 28.6504 -64 64v320c0 35.3496 28.6504 64 64 64h320zM128 256c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM224 160
+c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM320 64c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="dice-two" unicode="&#xf528;" horiz-adv-x="448"
+d="M384 416c35.3496 0 64 -28.6504 64 -64v-320c0 -35.3496 -28.6504 -64 -64 -64h-320c-35.3496 0 -64 28.6504 -64 64v320c0 35.3496 28.6504 64 64 64h320zM128 256c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM320 64
+c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="divide" unicode="&#xf529;" horiz-adv-x="448"
+d="M224 96c35.3496 0 64 -28.6504 64 -64s-28.6504 -64 -64 -64s-64 28.6504 -64 64s28.6504 64 64 64zM224 288c-35.3496 0 -64 28.6504 -64 64s28.6504 64 64 64s64 -28.6504 64 -64s-28.6504 -64 -64 -64zM416 240c17.6699 0 32 -14.3301 32 -32v-32
+c0 -17.6699 -14.3301 -32 -32 -32h-384c-17.6699 0 -32 14.3301 -32 32v32c0 17.6699 14.3301 32 32 32h384z" />
+ <glyph glyph-name="door-closed" unicode="&#xf52a;" horiz-adv-x="640"
+d="M624 0c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-608c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h111.99v397.2c0 28.0195 21.5293 50.7998 48 50.7998h288.01c26.4697 0 48 -22.7803 48 -50.7998v-397.2h112zM415.99 160
+c17.6797 0 32.0098 14.3301 32 32c0 17.6699 -14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="door-open" unicode="&#xf52b;" horiz-adv-x="640"
+d="M624 0c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-144v384h-96v64h112c26.4697 0 48 -22.1904 48 -49.4502v-334.55h80zM312.24 446.99c20.2002 5.24023 39.7598 -10.5898 39.7598 -32.1699v-478.82h-336c-8.83984 0 -16 7.16016 -16 16v32
+c0 8.83984 7.16016 16 16 16h80v365.08c0 15.2197 9.99023 28.4795 24.2402 32.1699zM264 160c13.25 0 24 14.3301 24 32s-10.75 32 -24 32s-24 -14.3301 -24 -32s10.75 -32 24 -32z" />
+ <glyph glyph-name="equals" unicode="&#xf52c;" horiz-adv-x="448"
+d="M416 144c17.6699 0 32 -14.3301 32 -32v-32c0 -17.6699 -14.3301 -32 -32 -32h-384c-17.6699 0 -32 14.3301 -32 32v32c0 17.6699 14.3301 32 32 32h384zM416 336c17.6699 0 32 -14.3301 32 -32v-32c0 -17.6699 -14.3301 -32 -32 -32h-384c-17.6699 0 -32 14.3301 -32 32
+v32c0 17.6699 14.3301 32 32 32h384z" />
+ <glyph glyph-name="feather" unicode="&#xf52d;"
+d="M467.14 403.16c50.5703 -50.5205 61.7002 -124.9 16.2607 -199.36l-131.54 -43.7998h97.7793c-9.92969 -10.6797 3.68066 3.07031 -46.3096 -46.8604l-147.57 -49.1396h98.1904c-74.9502 -73.1104 -194.53 -70.6504 -246.83 -54.9404l-66.1006 -66.0293
+c-9.37988 -9.37012 -24.5996 -9.37012 -33.9795 0s-9.37988 24.5693 0 33.9395l259.52 259.25c6.25 6.25 6.25 16.3799 0 22.6299s-16.3896 6.25 -22.6494 0l-178.44 -178.25c-6.75 60.3408 3.18066 150.78 63.6406 211.17c24.7695 24.7402 7.13965 7.14062 85.75 85.6602
+c90.6094 90.5107 189.729 88.21 252.279 25.7305z" />
+ <glyph glyph-name="frog" unicode="&#xf52e;" horiz-adv-x="576"
+d="M446.53 350.57c0 0 58.4297 -19.0605 98.9893 -41.2803c18.7607 -10.2803 30.4805 -29.8301 30.4805 -51.2305v-0.00292969c0 -18.8037 -13.3105 -41.5283 -29.71 -50.7266l-154.44 -86.6504l98.5205 -104.68h53.6299c17.6699 0 32 -14.3301 32 -32
+c0 -8.83984 -7.16016 -16 -16 -16h-90.3799l-118.53 125.94c5.07031 54.1494 -29.9297 85.0596 -40.7998 93.21c-36.8496 27.6191 -88.29 27.6592 -125.13 0l-34.7803 -26.0908c-7.07031 -5.2998 -8.49023 -15.3291 -3.18945 -22.4092
+c5.31934 -7.10059 15.3496 -8.5 22.4092 -3.19043l32.7607 24.5898c20.6895 15.5303 48.3496 20.8105 72.2393 10.8799c44.0605 -18.3193 57.8506 -70.3701 33.71 -106.6l-35.7998 -48.3301h79.4902c17.6699 0 32 -14.3301 32 -32c0 -8.83984 -7.16016 -16 -16 -16h-304
+c-34.9199 0 -63.8896 28.0996 -64 63.0195c-0.5 166.86 126.75 304.021 289.46 319.44c6.82031 37.25 39.3096 65.54 78.54 65.54c39.1904 0 71.6699 -28.2305 78.5303 -65.4297zM368 312c13.25 0 24 10.75 24 24c0 13.2598 -10.75 24 -24 24
+c-13.2598 0 -24 -10.7402 -24 -24c0 -13.25 10.7402 -24 24 -24z" />
+ <glyph glyph-name="gas-pump" unicode="&#xf52f;"
+d="M336 0c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-320c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h320zM493.2 340.7c12 -12 18.7998 -28.2998 18.7998 -45.2998v-223.4c0 -42.2002 -36.5 -76 -79.5 -71.7002
+c-37.5 3.90039 -64.5 38.2002 -64.5 75.9004v27.7998c0 22.0996 -17.9004 40 -40 40h-8v-112h-288v352c0 35.2998 28.7002 64 64 64h160c35.2998 0 64 -28.7002 64 -64v-192h8c48.5996 0 88 -39.4004 88 -88v-32c0 -13.2002 10.7998 -24 24 -24s24 10.7998 24 24v160.8
+c-27.0996 3.90039 -48 27.1006 -48 55.2002v62.0996l-37.7002 37.7002c-6.2002 6.2002 -6.2002 16.4004 0 22.6006l11.2998 11.2998c6.2002 6.2002 16.4004 6.2002 22.6006 0zM256 256v128h-160v-128h160z" />
+ <glyph glyph-name="glasses" unicode="&#xf530;" horiz-adv-x="576"
+d="M574.1 167.63c1.26074 -5.07031 1.90039 -10.2695 1.90039 -15.5v-70.25c0 -62.8896 -51.5801 -113.88 -115.2 -113.88h-37.1201c-60.2793 0 -110.37 45.9404 -114.87 105.37l-2.92969 38.6299h-35.75l-2.92969 -38.6299
+c-4.50977 -59.4297 -54.6006 -105.37 -114.88 -105.37h-37.1201c-63.6201 0 -115.2 50.9902 -115.2 113.89v70.25c0.000976562 4.33984 0.852539 11.2793 1.90039 15.4902l45.3398 181.73c5.91016 23.6895 21.5898 44.0293 43 55.7998
+c21.4395 11.7402 46.9697 14.1094 70.1895 6.33008l15.25 -5.08008c8.39062 -2.79004 12.9199 -11.8604 10.1201 -20.2402l-5.05957 -15.1797c-2.79004 -8.37988 -11.8506 -12.9102 -20.2305 -10.1201l-13.1699 4.38965
+c-10.8701 3.62012 -22.9902 3.57031 -33.1494 -1.72949c-10.29 -5.36035 -17.5908 -14.5605 -20.3809 -25.8105l-38.46 -153.83c22.1904 6.81055 49.79 12.46 81.21 12.46c34.7803 0 73.9902 -7.00977 114.86 -26.75h73.1797
+c40.8701 19.7207 80.0801 26.7402 114.851 26.7402c31.4102 0 59.0098 -5.64941 81.2002 -12.46l-38.46 153.82c-2.80078 11.2598 -10.0801 20.4502 -20.3701 25.8193c-10.1602 5.30078 -22.29 5.35059 -33.1602 1.73047l-13.1797 -4.38965
+c-8.38086 -2.79004 -17.4404 1.73926 -20.2305 10.1201l-5.05957 15.1797c-2.80078 8.37988 1.72949 17.4502 10.1191 20.2402l15.25 5.08008c23.2207 7.7793 48.75 5.39941 70.1904 -6.33008c21.4102 -11.7607 37.0898 -32.1104 43 -55.8105zM203.38 78.21l3.12988 41.2197
+c-22.6699 8.58984 -46.0693 12.9199 -69.9297 12.9199c-29.1602 0 -54.46 -6.42969 -72.5801 -12.9199v-37.54c0 -27.5098 22.9697 -49.8896 51.2002 -49.8896h37.1201c26.6602 0 49.0898 20.2998 51.0596 46.21zM512 81.8799l-0.00976562 37.54
+c-18.1201 6.49023 -43.4307 12.9297 -72.5498 12.9297c-23.8701 0 -47.2803 -4.33008 -69.9502 -12.9199l3.12988 -41.2197c1.95996 -25.9102 24.3896 -46.21 51.0596 -46.21h37.1201c28.2305 0 51.2002 22.3701 51.2002 49.8799z" />
+ <glyph glyph-name="greater-than" unicode="&#xf531;" horiz-adv-x="384"
+d="M365.52 238.15c11.2803 -5.25 18.4805 -16.5605 18.4902 -29.0107v-34.2295c-0.00195312 -11.2529 -8.28027 -24.2451 -18.4795 -29l-306.471 -142.91c-16.0195 -7.46973 -35.0596 -0.540039 -42.5293 15.4805l-13.5205 29
+c-7.46973 16.0195 -0.540039 35.0596 15.4805 42.5293l218.47 101.891l-218.43 101.85c-16.0605 7.49023 -23.0107 26.5801 -15.5205 42.6396l13.5703 29.0801c7.49023 16.0605 26.5801 23.0107 42.6396 15.5205z" />
+ <glyph glyph-name="greater-than-equal" unicode="&#xf532;" horiz-adv-x="448"
+d="M55.2197 340.31c-18.29 6 -27.7393 24.2607 -21.0996 40.79l12.0303 29.9199c6.63965 16.5303 26.8594 25.0605 45.1494 19.0605l301.72 -119.98c13.7705 -4.51953 22.9805 -16.6094 22.9805 -30.1699v-15.96c0 -13.5596 -9.20996 -25.6494 -22.9805 -30.1699
+l-301.409 -119.859c-18.3906 -6.04004 -38.7002 2.54004 -45.3799 19.1494l-12.0908 30.0801c-6.67969 16.6104 2.81055 34.9697 21.2002 41l175.44 68.0498zM424 48c13.25 0 24 -10.7402 24 -24v-48c0 -13.25 -10.75 -24 -24 -24h-400c-13.25 0 -24 10.75 -24 24v48
+c0 13.2598 10.75 24 24 24h400z" />
+ <glyph glyph-name="helicopter" unicode="&#xf533;" horiz-adv-x="640"
+d="M304 64c-8.83105 0 -20.3018 5.73438 -25.5996 12.7998l-86.4004 115.2l-160 64l-31.5098 108.12c-2.53027 10.0996 5.10938 19.8799 15.5195 19.8799h39.9902c5.03027 0 9.78027 -2.37012 12.7998 -6.40039l43.2002 -57.5996h208v64h-176c-8.83984 0 -16 7.16016 -16 16
+v32c0 8.83984 7.16016 16 16 16h416c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-176v-64c123.71 0 224 -100.29 224 -224c0 -17.6699 -14.3301 -32 -32 -32h-272zM416 252.49v-124.49h124.79c-12.7598 62.5596 -62.2402 111.7 -124.79 124.49z
+M635.37 -10.8096c6.58984 -6.61035 6.04004 -17.5205 -0.980469 -23.6602c-33.1494 -29.0498 -53.5693 -29.5205 -68.4492 -29.5205h-325.94c-8.83984 0 -16 7.18066 -16 16.0303v32.0596c0 8.85059 7.16016 16.0303 16 16.0303h325.94
+c10.7998 0 17.5498 4.48047 24.6396 11.25c6.40039 6.11035 16.3896 6.27051 22.6396 0.00976562z" />
+ <glyph glyph-name="infinity" unicode="&#xf534;" horiz-adv-x="640"
+d="M471.1 352c93.1006 0 168.9 -71.7998 168.9 -160s-75.7998 -160 -168.9 -160c-66.0996 0 -117.8 41.2998 -151.1 78.5996c-33.2998 -37.2998 -85 -78.5996 -151.1 -78.5996c-93.1006 0 -168.9 71.7998 -168.9 160s75.7998 160 168.9 160
+c66.0996 0 117.8 -41.2998 151.1 -78.5996c33.2998 37.2998 85 78.5996 151.1 78.5996zM168.9 128c38.0996 0 73.5996 36.4004 94 64c-20.6006 27.9004 -55.8008 64 -94 64c-40.2002 0 -72.9004 -28.7002 -72.9004 -64s32.7002 -64 72.9004 -64zM471.1 128
+c40.2002 0 72.9004 28.7002 72.9004 64s-32.7002 64 -72.9004 64c-38.0996 0 -73.5996 -36.4004 -94 -64c20.6006 -27.9004 55.8008 -64 94 -64z" />
+ <glyph glyph-name="kiwi-bird" unicode="&#xf535;" horiz-adv-x="576"
+d="M575.81 230.02c0.300781 -5.7793 0.160156 -246.02 0.160156 -246.02c0 -7.30957 -4.95996 -13.7002 -12.0498 -15.5c-1.30957 -0.339844 -2.63965 -0.5 -3.9502 -0.5c-5.75 0 -11.1895 3.11035 -14.0498 8.33984l-74.4102 136.44
+c-7.04004 -0.470703 2.43066 -0.780273 -23.0498 -0.780273c-54.5996 0 -106.39 -19.25 -152.13 -49.0596c-12.46 -8.12012 -26.0996 -14.4502 -40.3398 -19.5107v-59.4297c0 -8.83984 -7.16016 -16 -16 -16h-16c-8.83984 0 -16 7.16016 -16 16v49.1797
+c-5.32031 -0.449219 -10.5605 -1.17969 -16 -1.17969c-16.6006 0 -32.6406 2.2998 -48 6.25977v-54.2598c0 -8.83984 -7.16016 -16 -16 -16h-16c-8.83984 0 -16 7.16016 -16 16v73.9902c-57.3105 33.21 -95.9502 95.0596 -95.9902 166.01
+c-0.0800781 145.76 129.3 182.88 147.31 186.94c57.1709 12.9199 111.221 0.259766 153.21 -28.7002c43.4902 -29.9902 94.9209 -46.2402 147.74 -46.2402h9.37012c60.6504 0 115.01 -45.4102 118.18 -105.98zM463.97 200c13.25 0 24 10.75 24 24
+c0 13.2598 -10.75 24 -24 24s-24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24zM543.97 46.75v99.0596c-11.1299 -11.3799 -24.7393 -20.1494 -39.8594 -25.9795z" />
+ <glyph glyph-name="less-than" unicode="&#xf536;" horiz-adv-x="384"
+d="M365.46 90.2598c16.0703 -7.49023 23.0205 -26.5801 15.5303 -42.6396l-13.5605 -29.0801c-7.48926 -16.0596 -26.5801 -23.0098 -42.6396 -15.5205l-306.31 142.83c-11.2705 5.25 -18.4805 16.5605 -18.4805 29v34.2402c0.00488281 11.252 8.2832 24.2432 18.4805 29
+l306.46 142.91c16.0195 7.46973 35.0596 0.540039 42.5293 -15.4805l13.5205 -29c7.46973 -16.0195 0.540039 -35.0596 -15.4805 -42.5293l-218.47 -101.88z" />
+ <glyph glyph-name="less-than-equal" unicode="&#xf537;" horiz-adv-x="448"
+d="M54.9805 233.8c-13.7705 4.52051 -22.9805 16.6104 -22.9805 30.1699v15.96c0 13.5703 9.20996 25.6602 22.9805 30.1807l301.71 119.96c18.29 6 38.5098 -2.53027 45.1494 -19.0605l12.0303 -29.9199c6.63965 -16.5195 -2.81055 -34.79 -21.1006 -40.79
+l-175.56 -68.0898l175.44 -68.0498c18.3896 -6.03027 27.8896 -24.3906 21.21 -41l-12.0908 -30.0801c-6.66992 -16.6104 -26.9893 -25.1797 -45.3799 -19.1504zM424 48c13.25 0 24 -10.7402 24 -24v-48c0 -13.25 -10.75 -24 -24 -24h-400c-13.25 0 -24 10.75 -24 24v48
+c0 13.2598 10.75 24 24 24h400z" />
+ <glyph glyph-name="memory" unicode="&#xf538;" horiz-adv-x="640"
+d="M640 317.06c-18.5996 -6.60938 -32 -24.1895 -32 -45.0596s13.4004 -38.4502 32 -45.0596v-98.9404h-640v98.9404c18.5996 6.60938 32 24.1895 32 45.0596s-13.4004 38.4502 -32 45.0596v34.9404c0 17.6699 14.3301 32 32 32h576c17.6699 0 32 -14.3301 32 -32v-34.9404z
+M224 192v128h-64v-128h64zM352 192v128h-64v-128h64zM480 192v128h-64v-128h64zM0 0v96h640v-96h-64v26.6699c0 8.83984 -7.16016 16 -16 16s-16 -7.16016 -16 -16v-26.6699h-128v26.6699c0 8.83984 -7.16016 16 -16 16s-16 -7.16016 -16 -16v-26.6699h-128v26.6699
+c0 8.83984 -7.16016 16 -16 16s-16 -7.16016 -16 -16v-26.6699h-128v26.6699c0 8.83984 -7.16016 16 -16 16s-16 -7.16016 -16 -16v-26.6699h-64z" />
+ <glyph glyph-name="microphone-alt-slash" unicode="&#xf539;" horiz-adv-x="640"
+d="M633.82 -10.0996c6.97949 -5.43066 8.22949 -15.4805 2.81934 -22.4502l-19.6396 -25.2705c-5.42969 -6.97949 -15.4805 -8.23926 -22.46 -2.80957l-588.36 454.729c-6.97949 5.43066 -8.22949 15.4805 -2.80957 22.4502l19.6396 25.2705
+c5.41992 6.97949 15.4805 8.22949 22.46 2.80957l178.53 -138v45.3604c0 53.0195 42.9805 96 96 96s96 -42.9805 96 -96h-85.3301c-5.88965 0 -10.6699 -3.58008 -10.6699 -8v-16c0 -4.41992 4.78027 -8 10.6699 -8h85.3301v-32h-85.3301
+c-5.88965 0 -10.6699 -3.58008 -10.6699 -8v-16c0 -4.41992 4.78027 -8 10.6699 -8h85.3301v-32h-85.0596l41.3994 -32h43.6699c0 -10.4502 -2.17969 -20.2705 -5.2793 -29.6699l26.5498 -20.5205c6.75977 15.4004 10.7197 32.2803 10.7197 50.2002v48
+c0 8.83984 7.16016 16 16 16h16c8.83984 0 16 -7.16016 16 -16v-48c0 -28.9805 -7.25977 -56.21 -19.7402 -80.3301zM400 -16c8.83984 0 16 -7.16016 16 -16v-16c0 -8.83984 -7.16016 -16 -16 -16h-160c-8.83984 0 -16 7.16016 -16 16v16c0 8.83984 7.16016 16 16 16h56
+v34.1504c-88.0303 12.1396 -152 92.0498 -152 181.689v6.85059l52.0303 -40.2207c12.4395 -53.2197 55.3301 -96.3994 111.18 -101.85c6.94043 -0.669922 13.6396 -0.200195 20.3496 0.200195l50.4004 -38.96c-10.8604 -3.80078 -22.25 -6.45996 -33.96 -8.08008v-33.7803
+h56z" />
+ <glyph glyph-name="money-bill-wave" unicode="&#xf53a;" horiz-adv-x="640"
+d="M621.16 393.54c11.6094 -4.87012 18.8398 -16.8301 18.8496 -29.4697v-317.25c0 -18.4902 -15.04 -31.8105 -32.25 -31.8105c-3.39941 0 -6.87988 0.520508 -10.3496 1.62012c-30.8906 9.7998 -61.7598 13.7197 -92.6504 13.7197
+c-123.17 0 -246.34 -62.3496 -369.51 -62.3496c-38.7998 0 -77.6201 6.19043 -116.41 22.4697c-11.6094 4.86035 -18.8398 16.8301 -18.8398 29.46v317.261c0 18.4893 15.04 31.8096 32.25 31.8096c3.40039 0 6.87988 -0.519531 10.3496 -1.62012
+c30.8906 -9.7998 61.7607 -13.7197 92.6504 -13.7197c123.17 0 246.33 62.3496 369.5 62.3398c38.7998 0 77.6201 -6.19043 116.41 -22.46zM48 315.78v-60.4707c31 0 56.8398 22.1504 62.7197 51.54c-21.5996 1.36035 -42.5996 3.89062 -62.7197 8.93066zM48 30.7803
+c20.1201 -7.31055 41.1797 -11.8105 63.71 -13.6201c-1.53027 34.1299 -29.3398 61.3994 -63.71 61.3994v-47.7793zM320 96c44.1699 0 80 42.9697 80 96c0 53.0195 -35.8203 96 -80 96s-80 -42.9805 -80 -96c0 -53.0098 35.8096 -96 80 -96zM592 68.2197v57.7207
+c-26.8203 -3.40039 -48.4502 -23.2002 -54.3203 -49.2803c18.6104 -1.58984 36.8008 -4.0498 54.3203 -8.44043zM592 304.33v48.8896c-17.7197 6.43066 -36.2695 10.4902 -55.8096 12.6602c0.949219 -31.8496 24.9199 -57.6396 55.8096 -61.5498z" />
+ <glyph glyph-name="money-bill-wave-alt" unicode="&#xf53b;" horiz-adv-x="640"
+d="M621.16 393.54c11.6094 -4.87012 18.8398 -16.8301 18.8496 -29.4697v-317.25c0 -18.4902 -15.04 -31.8105 -32.25 -31.8105c-3.39941 0 -6.87988 0.520508 -10.3496 1.62012c-30.8906 9.7998 -61.7598 13.7197 -92.6504 13.7197
+c-123.17 0 -246.34 -62.3496 -369.51 -62.3496c-38.7998 0 -77.6201 6.19043 -116.41 22.4697c-11.6094 4.86035 -18.8398 16.8301 -18.8398 29.46v317.261c0 18.4893 15.04 31.8096 32.25 31.8096c3.40039 0 6.87988 -0.519531 10.3496 -1.62012
+c30.8906 -9.7998 61.7607 -13.7197 92.6504 -13.7197c123.17 0 246.33 62.3496 369.5 62.3398c38.7998 0 77.6201 -6.19043 116.41 -22.46zM320 96c44.1699 0 80 42.9697 80 96c0 53.0195 -35.8203 96 -80 96s-80 -42.9805 -80 -96c0 -53.0098 35.8096 -96 80 -96z" />
+ <glyph glyph-name="money-check" unicode="&#xf53c;" horiz-adv-x="640"
+d="M0 0v320h640v-320c0 -17.6699 -14.3301 -32 -32 -32h-576c-17.6699 0 -32 14.3301 -32 32zM448 208v-32c0 -8.83984 7.16016 -16 16 -16h96c8.83984 0 16 7.16016 16 16v32c0 8.83984 -7.16016 16 -16 16h-96c-8.83984 0 -16 -7.16016 -16 -16zM448 88v-16
+c0 -4.41992 3.58008 -8 8 -8h112c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-112c-4.41992 0 -8 -3.58008 -8 -8zM64 184v-16c0 -4.41992 3.58008 -8 8 -8h304c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-304c-4.41992 0 -8 -3.58008 -8 -8z
+M64 88v-16c0 -4.41992 3.58008 -8 8 -8h176c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-176c-4.41992 0 -8 -3.58008 -8 -8zM624 416c8.83984 0 16 -7.16016 16 -16v-48h-640v48c0 8.83984 7.16016 16 16 16h608z" />
+ <glyph glyph-name="money-check-alt" unicode="&#xf53d;" horiz-adv-x="640"
+d="M608 416c17.6699 0 32 -14.3301 32 -32v-384c0 -17.6699 -14.3301 -32 -32 -32h-576c-17.6699 0 -32 14.3301 -32 32v384c0 17.6699 14.3301 32 32 32h576zM176 120.12c23.6201 0.629883 42.6699 20.54 42.6699 45.0703c0 19.9697 -12.9902 37.8096 -31.5801 43.3896
+l-45 13.5c-5.16016 1.54004 -8.76953 6.78027 -8.76953 12.7295c0 7.27051 5.2998 13.1904 11.7998 13.1904h28.1104c4.55957 0 8.94922 -1.29004 12.8193 -3.71973c3.24023 -2.03027 7.36035 -1.91016 10.1299 0.729492l11.75 11.21
+c3.53027 3.37012 3.33008 9.20996 -0.569336 12.1406c-9.10059 6.83984 -20.0801 10.7695 -31.3701 11.3496v16.29c0 4.41992 -3.58008 8 -8 8h-16c-4.41992 0 -8 -3.58008 -8 -8v-16.1201c-23.6201 -0.629883 -42.6699 -20.5498 -42.6699 -45.0703
+c0 -19.9697 12.9893 -37.8096 31.5801 -43.3896l45 -13.5c5.15918 -1.54004 8.76953 -6.78027 8.76953 -12.7295c0 -7.27051 -5.2998 -13.1904 -11.7998 -13.1904h-28.1104c-4.55957 0 -8.9502 1.2998 -12.8193 3.71973
+c-3.24023 2.03027 -7.36035 1.91016 -10.1309 -0.729492l-11.75 -11.21c-3.5293 -3.37012 -3.3291 -9.20996 0.570312 -12.1406c9.10059 -6.83008 20.0801 -10.7695 31.3701 -11.3496v-16.29c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v16.1201zM416 136v16
+c0 4.41992 -3.58008 8 -8 8h-112c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h112c4.41992 0 8 3.58008 8 8zM576 136v16c0 4.41992 -3.58008 8 -8 8h-80c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h80c4.41992 0 8 3.58008 8 8z
+M576 232v16c0 4.41992 -3.58008 8 -8 8h-272c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h272c4.41992 0 8 3.58008 8 8z" />
+ <glyph glyph-name="not-equal" unicode="&#xf53e;" horiz-adv-x="448"
+d="M416 240h-98.4199l-74.5303 -96h172.95c17.6699 0 32 -14.3301 32 -32v-32c0 -17.6699 -14.3301 -32 -32 -32h-247.48l-82.0898 -105.73c-5.37012 -7.00977 -15.4102 -8.34961 -22.4297 -2.96973l-25.4102 19.46c-7.00977 5.37988 -8.33984 15.4199 -2.96973 22.4307
+l51.8701 66.8096h-55.4902c-17.6699 0 -32 14.3301 -32 32v32c0 17.6699 14.3301 32 32 32h130.03l74.5293 96h-204.56c-17.6699 0 -32 14.3301 -32 32v32c0 17.6699 14.3301 32 32 32h279.09l82.0801 105.73c5.37988 7.00977 15.4199 8.34961 22.4404 2.96973
+l25.4092 -19.46c7.01074 -5.37012 8.34082 -15.4102 2.9707 -22.4307l-51.8701 -66.8096h23.8799c17.6699 0 32 -14.3301 32 -32v-32c0 -17.6699 -14.3301 -32 -32 -32z" />
+ <glyph glyph-name="palette" unicode="&#xf53f;"
+d="M204.3 443c163.8 31.9004 307.2 -92.0996 307.7 -249.7c-0.0996094 -35.7002 -29.0996 -65.2998 -64.9004 -65.2998h-79.6992c-51 0 -84 -53 -60.9004 -98.4004c18.9004 -37.0996 -1.2998 -85.2998 -42.5 -91.6992c-127.1 -19.7002 -295.8 119.699 -258.8 306.699
+c19.5996 99.1006 99.7002 179 199.1 198.4zM96 128c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM128 256c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM256 320
+c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM384 256c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32z" />
+ <glyph glyph-name="parking" unicode="&#xf540;" horiz-adv-x="448"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM240 128c52.9004 0 96 43.0996 96 96s-43.0996 96 -96 96h-96c-8.7998 0 -16 -7.2002 -16 -16v-224c0 -8.7998 7.2002 -16 16 -16h32
+c8.7998 0 16 7.2002 16 16v48h48zM240 256c17.5996 0 32 -14.4004 32 -32s-14.4004 -32 -32 -32h-48v64h48z" />
+ <glyph glyph-name="percentage" unicode="&#xf541;" horiz-adv-x="384"
+d="M109.25 274.75c-24.9902 -25 -65.5098 -25 -90.5098 0c-24.9902 24.9902 -24.9902 65.5195 0 90.5098s65.5195 24.9902 90.5098 0s24.9902 -65.5195 0 -90.5098zM365.25 109.26c25 -25 25 -65.5195 0 -90.5098c-24.9902 -24.9902 -65.5195 -24.9902 -90.5098 0
+s-24.9902 65.5195 0 90.5098s65.5195 24.9902 90.5098 0zM363.31 340.69c12.5 -12.5 12.5 -32.7607 0 -45.25l-274.75 -274.75c-12.4893 -12.5 -32.75 -12.5 -45.25 0l-22.6191 22.6191c-12.5 12.4902 -12.5 32.75 0 45.25l274.75 274.75c12.4893 12.5 32.75 12.5 45.25 0z
+" />
+ <glyph glyph-name="project-diagram" unicode="&#xf542;" horiz-adv-x="640"
+d="M384 128c17.6699 0 32 -14.3301 32 -32v-128c0 -17.6699 -14.3301 -32 -32 -32h-128c-17.6699 0 -32 14.3301 -32 32v128c0 17.6699 14.3301 32 32 32h128zM192 416v-32h224v-64h-224v-47.5098l64.2803 -112.49h-0.280273c-23.5996 0 -44.0195 -12.9805 -55.1201 -32.04
+l-73.1602 128.04h-95.7197c-17.6699 0 -32 14.3301 -32 32v128c0 17.6699 14.3301 32 32 32h128c17.6699 0 32 -14.3301 32 -32zM608 448c17.6699 0 32 -14.3301 32 -32v-128c0 -17.6699 -14.3301 -32 -32 -32h-128c-17.6699 0 -32 14.3301 -32 32v128
+c0 17.6699 14.3301 32 32 32h128z" />
+ <glyph glyph-name="receipt" unicode="&#xf543;" horiz-adv-x="384"
+d="M358.4 444.8c10.5996 7.90039 25.5996 0.400391 25.5996 -12.7998v-480c0 -13.2002 -15.0996 -20.7002 -25.5996 -12.7998l-38.4004 44.7998l-54.4004 -44.7998c-2.35059 -1.78027 -6.65137 -3.22559 -9.59961 -3.22559s-7.24902 1.44531 -9.59961 3.22559
+l-54.4004 44.7998l-54.4004 -44.7998c-2.35059 -1.78027 -6.65137 -3.22559 -9.59961 -3.22559s-7.24902 1.44531 -9.59961 3.22559l-54.4004 44.7998l-38.4004 -44.7998c-10.5996 -7.90039 -25.5996 -0.400391 -25.5996 12.7998v480c0 13.2002 15 20.7002 25.5996 12.7998
+l38.4004 -44.7998l54.4004 44.7998c2.35059 1.78027 6.65137 3.22559 9.59961 3.22559s7.24902 -1.44531 9.59961 -3.22559l54.4004 -44.7998l54.4004 44.7998c2.35059 1.78027 6.65137 3.22559 9.59961 3.22559s7.24902 -1.44531 9.59961 -3.22559l54.4004 -44.7998z
+M320 88v16c0 4.40039 -3.59961 8 -8 8h-240c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h240c4.40039 0 8 3.59961 8 8zM320 184v16c0 4.40039 -3.59961 8 -8 8h-240c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h240
+c4.40039 0 8 3.59961 8 8zM320 280v16c0 4.40039 -3.59961 8 -8 8h-240c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h240c4.40039 0 8 3.59961 8 8z" />
+ <glyph glyph-name="robot" unicode="&#xf544;" horiz-adv-x="640"
+d="M0 192c0 17.7002 14.2998 32 32 32h32v-192h-32c-17.7002 0 -32 14.2998 -32 32v128zM464 352c44.2002 0 80 -35.7998 80 -80v-272c0 -35.2998 -28.7002 -64 -64 -64h-320c-35.2998 0 -64 28.7002 -64 64v272c0 44.2002 35.7998 80 80 80h112v64
+c0 17.7002 14.2998 32 32 32s32 -14.2998 32 -32v-64h112zM256 32v32h-64v-32h64zM224 152c22.0996 0 40 17.9004 40 40s-17.9004 40 -40 40s-40 -17.9004 -40 -40s17.9004 -40 40 -40zM352 32v32h-64v-32h64zM448 32v32h-64v-32h64zM416 152c22.0996 0 40 17.9004 40 40
+s-17.9004 40 -40 40s-40 -17.9004 -40 -40s17.9004 -40 40 -40zM608 224c17.7002 0 32 -14.2998 32 -32v-128c0 -17.7002 -14.2998 -32 -32 -32h-32v192h32z" />
+ <glyph glyph-name="ruler" unicode="&#xf545;" horiz-adv-x="640"
+d="M635.7 280.8c8.7998 -15 3.59961 -34.2002 -11.6006 -42.7998l-496.8 -281.9c-15.2002 -8.59961 -34.7002 -3.5 -43.5 11.5l-79.5996 135.601c-8.7998 15 -3.5 34.0996 11.7002 42.7998l69 39.0996l59.6992 -101.399c2.2002 -3.7998 7.10059 -5.10059 10.9004 -2.90039
+l13.7998 7.7998c3.7998 2.2002 5.10059 7 2.90039 10.7002l-59.7002 101.7l55.2002 31.2998l27.8994 -47.5c2.2002 -3.7998 7.10059 -5.09961 10.9004 -2.89941l13.7998 7.7998c3.7998 2.2002 5.10059 6.89941 2.90039 10.7002l-27.9004 47.3994l55.2002 31.2998
+l59.7002 -101.699c2.2002 -3.80078 7.09961 -5.10059 10.8994 -2.90039l13.8008 7.7998c3.7998 2.2002 5.09961 7 2.89941 10.7002l-59.7998 101.7l55.2002 31.2998l27.8994 -47.2998c2.2002 -3.7998 7.10059 -5.10059 10.9004 -2.90039l13.7998 7.7998
+c3.7998 2.2002 5.10059 6.90039 2.90039 10.7002l-27.9004 47.4004l55.2002 31.2998l59.7002 -101.6c2.2002 -3.80078 7.09961 -5.10059 10.8994 -2.90039l13.8008 7.7998c3.7998 2.2002 5.09961 6.90039 2.89941 10.7002l-59.7002 101.7l69 39.0996
+c15.2002 8.60059 34.7002 3.5 43.5 -11.5z" />
+ <glyph glyph-name="ruler-combined" unicode="&#xf546;"
+d="M160 160v-41.3799l-158.43 -158.42c-0.660156 2.55957 -1.57031 5.03027 -1.57031 7.7998v448c0 17.6699 14.3301 32 32 32h96c17.6699 0 32 -14.3301 32 -32v-32h-56c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h56v-64h-56
+c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h56v-64h-56c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h56zM480 96c17.6699 0 32 -14.3301 32 -32v-96c0 -17.6699 -14.3301 -32 -32 -32h-448
+c-2.75977 0 -5.24023 0.910156 -7.7998 1.57031l158.43 158.43h41.3701v-56c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v56h64v-56c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v56h64v-56c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8
+v56h32z" />
+ <glyph glyph-name="ruler-horizontal" unicode="&#xf547;" horiz-adv-x="576"
+d="M544 320c17.6699 0 32 -14.3301 32 -32v-192c0 -17.6699 -14.3301 -32 -32 -32h-512c-17.6699 0 -32 14.3301 -32 32v192c0 17.6699 14.3301 32 32 32h48v-88c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v88h64v-88c0 -4.41992 3.58008 -8 8 -8h16
+c4.41992 0 8 3.58008 8 8v88h64v-88c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v88h64v-88c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v88h64v-88c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v88h48z" />
+ <glyph glyph-name="ruler-vertical" unicode="&#xf548;" horiz-adv-x="256"
+d="M168 32h88v-64c0 -17.6699 -14.3301 -32 -32 -32h-192c-17.6699 0 -32 14.3301 -32 32v448c0 17.6699 14.3301 32 32 32h192c17.6699 0 32 -14.3301 32 -32v-64h-88c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h88v-64h-88
+c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h88v-64h-88c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h88v-64h-88c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8z" />
+ <glyph glyph-name="school" unicode="&#xf549;" horiz-adv-x="640"
+d="M0 224c0 17.6699 14.3301 32 32 32h64v-320h-80c-8.83984 0 -16 7.16016 -16 16v272zM360 272c4.41992 0 8 -3.58008 8 -8v-16c0 -4.41992 -3.58008 -8 -8 -8h-48c-4.41992 0 -8 3.58008 -8 8v64c0 4.41992 3.58008 8 8 8h16c4.41992 0 8 -3.58008 8 -8v-40h24z
+M497.75 335.96c8.90039 -5.92969 14.25 -15.9297 14.25 -26.6299v-373.33h-128v144c0 8.83984 -7.16016 16 -16 16h-96c-8.83984 0 -16 -7.16016 -16 -16v-144h-128v373.34c0.00195312 9.45117 6.38574 21.377 14.25 26.6201l160 106.67
+c4.4502 2.96484 12.4023 5.37012 17.75 5.37012s13.2998 -2.40527 17.75 -5.37012zM320 192c44.1797 0 80 35.8203 80 80s-35.8203 80 -80 80s-80 -35.8203 -80 -80s35.8203 -80 80 -80zM608 256c17.6699 0 32 -14.3301 32 -32v-272c0 -8.83984 -7.16016 -16 -16 -16h-80
+v320h64z" />
+ <glyph glyph-name="screwdriver" unicode="&#xf54a;"
+d="M448 448l64 -64l-96 -128h-62.0596l-83.0303 -83.0303c-4.25 6.79004 -9.07031 13.2705 -14.8701 19.0703c-5.7998 5.80957 -12.2803 10.6201 -19.0703 14.8701l83.0303 83.0303v62.0596zM128 169.41c29.1104 29.1094 76.2998 29.1094 105.41 0
+c29.1094 -29.1104 29.1094 -76.2998 0 -105.41l-117.08 -117.08c-14.5605 -14.5596 -38.1504 -14.5596 -52.71 0l-52.7002 52.7002c-14.5498 14.5596 -14.5498 38.1602 0 52.71z" />
+ <glyph glyph-name="shoe-prints" unicode="&#xf54b;" horiz-adv-x="640"
+d="M192 288c-35.3496 0 -64 28.6504 -64 64s28.6504 64 64 64h32v-128h-32zM0 32c0 35.3496 28.6504 64 64 64h32v-128h-32c-35.3496 0 -64 28.6504 -64 64zM337.46 160c90.3604 0 174.54 -32 174.54 -96c0 -45.2197 -39.0996 -97.2998 -148.58 -120.82
+c-31.9795 -6.87012 -64.8896 -8.50977 -97.5801 -6.19922c-27.2998 1.92969 -54.0996 7.76953 -80.3096 15.0498l-57.5303 15.9697v128c60.21 0 79.9404 15.6201 104.73 32c28.5693 18.8799 69.8193 32 104.729 32zM491.42 440.81
+c109.48 -23.5098 148.58 -75.5898 148.58 -120.81c0 -64 -84.1797 -96 -174.54 -96c-34.9102 0 -76.1602 13.1201 -104.729 32c-24.79 16.3799 -44.5303 32 -104.73 32v128l57.5303 15.9697c26.21 7.27051 53.0098 13.1104 80.3096 15.04
+c32.6904 2.32031 65.6006 0.669922 97.5801 -6.2002z" />
+ <glyph glyph-name="skull" unicode="&#xf54c;"
+d="M256 448c141.4 0 256 -100.3 256 -224c0 -70.0996 -36.9004 -132.6 -94.5 -173.7c-9.7002 -6.89941 -15.2002 -18.2002 -13.5 -29.8994l9.40039 -66.2002c1.39941 -9.60059 -6 -18.2002 -15.7002 -18.2002h-77.7002v56c0 4.40039 -3.59961 8 -8 8h-16
+c-4.40039 0 -8 -3.59961 -8 -8v-56h-64v56c0 4.40039 -3.59961 8 -8 8h-16c-4.40039 0 -8 -3.59961 -8 -8v-56h-77.7002c-9.7002 0 -17.0996 8.59961 -15.7002 18.2002l9.40039 66.2002c1.7002 11.7998 -3.90039 23 -13.5 29.8994
+c-57.5996 41.1006 -94.5 103.601 -94.5 173.7c0 123.7 114.6 224 256 224zM160 128c35.2998 0 64 28.7002 64 64s-28.7002 64 -64 64s-64 -28.7002 -64 -64s28.7002 -64 64 -64zM352 128c35.2998 0 64 28.7002 64 64s-28.7002 64 -64 64s-64 -28.7002 -64 -64
+s28.7002 -64 64 -64z" />
+ <glyph glyph-name="smoking-ban" unicode="&#xf54d;"
+d="M96 144v64c0 8.7998 7.2002 16 16 16h21.5l96 -96h-117.5c-8.7998 0 -16 7.2002 -16 16zM256 448c141.4 0 256 -114.6 256 -256s-114.6 -256 -256 -256s-256 114.6 -256 256s114.6 256 256 256zM256 0c41.4004 0 79.7002 13.2998 111.1 35.7002l-267.399 267.399
+c-22.4004 -31.3994 -35.7002 -69.6992 -35.7002 -111.1c0 -105.9 86.0996 -192 192 -192zM301.2 192l32 -32h50.7998v32h-82.7998zM412.3 80.9004c22.4004 31.3994 35.7002 69.6992 35.7002 111.1c0 105.9 -86.0996 192 -192 192
+c-41.4004 0 -79.7002 -13.2998 -111.1 -35.7002l124.3 -124.3h130.8c8.7998 0 16 -7.2002 16 -16v-64c0 -8.7998 -7.2002 -16 -16 -16h-34.7998zM320.6 320c32.1006 0 58.7002 -23.7002 63.3008 -54.5996c0.699219 -4.90039 -3 -9.40039 -8 -9.40039h-16.2002
+c-3.7002 0 -7 2.5 -7.7002 6.09961c-2.7998 14.7002 -15.7998 25.9004 -31.4004 25.9004c-32.0996 0 -58.6992 23.7002 -63.2998 54.5996c-0.700195 4.90039 3 9.40039 8 9.40039h16.2002c3.7002 0 7 -2.5 7.7002 -6.09961
+c2.7998 -14.7002 15.7998 -25.9004 31.3994 -25.9004z" />
+ <glyph glyph-name="store" unicode="&#xf54e;" horiz-adv-x="616"
+d="M602 329.4c33.5996 -53.6006 3.7998 -128 -59 -136.4c-4.5 -0.599609 -9 -0.900391 -13.7002 -0.900391c-29.5 0 -55.7002 13 -73.7998 33.1006c-18 -20.1006 -44.2002 -33.1006 -73.7998 -33.1006c-29.5 0 -55.7998 13 -73.7998 33.1006
+c-18 -20.1006 -44.2002 -33.1006 -73.8008 -33.1006c-29.5 0 -55.7998 13 -73.7998 33.1006c-18 -20.1006 -44.2002 -33.1006 -73.7998 -33.1006c-4.59961 0 -9.2002 0.300781 -13.7002 0.900391c-62.5996 8.5 -92.2998 82.9004 -58.7998 136.4l64.9004 103.6
+c5.7998 9.2998 16.0996 15 27.0996 15h404c11 0 21.2998 -5.7002 27.0996 -15zM529.5 160c6.09961 0 12.0996 0.400391 18.2002 1.2002c5.59961 0.700195 11 2 16.3994 3.59961v-196.8c0 -17.7002 -14.2998 -32 -32 -32h-448c-17.6992 0 -32 14.2998 -32 32v196.8
+c5.30078 -1.5 10.8008 -2.7998 16.4004 -3.59961c5.90039 -0.799805 12 -1.2002 18 -1.2002c10 0 19.9004 1.59961 29.5 3.7998v-99.7998h384v99.7998c9.59961 -2.2998 19.5 -3.7998 29.5 -3.7998z" />
+ <glyph glyph-name="store-alt" unicode="&#xf54f;" horiz-adv-x="640"
+d="M320 64v160h64v-256c0 -17.7002 -14.2998 -32 -32 -32h-256c-17.7002 0 -32 14.2998 -32 32v256h64v-160h192zM634.6 305.8c14.1006 -21.2998 -1.09961 -49.7998 -26.5996 -49.7998h-575.9c-25.5996 0 -40.7998 28.5 -26.5996 49.7998l85.2998 128
+c5.90039 8.90039 15.9004 14.2002 26.6006 14.2002h405.199c10.7002 0 20.7002 -5.2998 26.7002 -14.2002zM512 -48v272h64v-272c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16z" />
+ <glyph glyph-name="stream" unicode="&#xf550;"
+d="M16 320c-8.83984 0 -16 7.16016 -16 16v64c0 8.83984 7.16016 16 16 16h416c8.83984 0 16 -7.16016 16 -16v-64c0 -8.83984 -7.16016 -16 -16 -16h-416zM496 240c8.83984 0 16 -7.16016 16 -16v-64c0 -8.83984 -7.16016 -16 -16 -16h-416c-8.83984 0 -16 7.16016 -16 16
+v64c0 8.83984 7.16016 16 16 16h416zM432 64c8.83984 0 16 -7.16016 16 -16v-64c0 -8.83984 -7.16016 -16 -16 -16h-416c-8.83984 0 -16 7.16016 -16 16v64c0 8.83984 7.16016 16 16 16h416z" />
+ <glyph glyph-name="stroopwafel" unicode="&#xf551;"
+d="M188.12 237.26l45.25 -45.2598l-45.2598 -45.25l-45.25 45.25zM301.25 259.88l-45.25 -45.25l-45.25 45.2598l45.25 45.25zM210.75 124.12l45.25 45.25l45.2598 -45.25l-45.2598 -45.2598zM256 448c141.38 0 256 -114.62 256 -256s-114.62 -256 -256 -256
+s-256 114.62 -256 256s114.62 256 256 256zM442.68 152.4c1.29199 1.29492 2.34082 3.83008 2.34082 5.65918c0 1.83008 -1.04883 4.36523 -2.34082 5.66016l-28.29 28.2803l28.2705 28.3096c3.12012 3.12012 3.12012 8.19043 0 11.3105l-11.3105 11.3096
+c-3.11914 3.12012 -8.18945 3.12012 -11.3096 0l-28.29 -28.29l-45.25 45.2607l33.9404 33.9395l16.9697 -16.9697c3.12012 -3.12012 8.18945 -3.12012 11.3096 0l11.3105 11.3096c3.12012 3.12012 3.12012 8.19043 0 11.3105l-16.9707 16.9697l16.9707 16.9697
+c3.12012 3.12012 3.12012 8.19043 0 11.3105l-11.3105 11.3096c-3.12012 3.12012 -8.18945 3.12012 -11.3096 0l-16.9697 -16.9697l-16.9707 16.9697c-3.12012 3.12012 -8.18945 3.12012 -11.3096 0l-11.3105 -11.3096c-3.11914 -3.12012 -3.11914 -8.19043 0 -11.3105
+l16.9707 -16.9697l-33.9404 -33.9404l-45.2598 45.25l28.29 28.29c3.12012 3.12012 3.12012 8.19043 0 11.3105l-11.3105 11.3096c-3.11914 3.12012 -8.18945 3.12012 -11.3096 0l-28.29 -28.29l-28.3096 28.2705c-3.12012 3.12012 -8.19043 3.12012 -11.3105 0
+l-11.3096 -11.3105c-3.12012 -3.11914 -3.12012 -8.18945 0 -11.3096l28.29 -28.29l-45.2607 -45.25l-33.9395 33.9404l16.9697 16.9697c3.12012 3.12012 3.12012 8.18945 0 11.3096l-11.3096 11.3105c-3.12012 3.12012 -8.19043 3.12012 -11.3105 0l-16.9697 -16.9707
+l-16.9697 16.9707c-3.12012 3.12012 -8.19043 3.12012 -11.3105 0l-11.3096 -11.3105c-3.12012 -3.12012 -3.12012 -8.18945 0 -11.3096l16.9697 -16.9697l-16.9697 -16.9707c-3.12012 -3.12012 -3.12012 -8.18945 0 -11.3096l11.3096 -11.3105
+c3.12012 -3.11914 8.19043 -3.11914 11.3105 0l16.9697 16.9707l33.9404 -33.9404l-45.25 -45.2598l-28.29 28.29c-3.12012 3.12012 -8.19043 3.12012 -11.3105 0l-11.3096 -11.3105c-3.12012 -3.11914 -3.12012 -8.18945 0 -11.3096l28.29 -28.29l-28.29 -28.29
+c-3.12012 -3.12012 -3.12012 -8.19043 0 -11.3096l11.3398 -11.3301c3.12012 -3.12012 8.18945 -3.12012 11.3096 0l28.29 28.29l45.25 -45.25l-33.9395 -33.9404l-16.9707 16.9697c-3.11914 3.12012 -8.18945 3.12012 -11.3096 0l-11.3096 -11.3096
+c-3.12012 -3.12012 -3.12012 -8.19043 0 -11.3105l16.9697 -16.9697l-16.9697 -16.9697c-3.12012 -3.12012 -3.12012 -8.19043 0 -11.3105l11.3096 -11.3096c3.12012 -3.12012 8.19043 -3.12012 11.3096 0l16.9707 16.9697l16.9697 -16.9697
+c3.12012 -3.12012 8.19043 -3.12012 11.3096 0l11.3105 11.3096c3.12012 3.12012 3.12012 8.19043 0 11.3105l-16.9697 16.9697l33.9395 33.9404l45.25 -45.2598l-28.29 -28.29c-3.12012 -3.12012 -3.12012 -8.19043 0 -11.3105l11.3105 -11.3096
+c3.11914 -3.12012 8.18945 -3.12012 11.3096 0l28.29 28.29l28.3096 -28.2705c3.12012 -3.12012 8.19043 -3.12012 11.3105 0l11.3096 11.3105c3.12012 3.11914 3.12012 8.18945 0 11.3096l-28.29 28.29l45.2607 45.2598l33.9395 -33.9395l-16.9697 -16.9707
+c-3.12012 -3.11914 -3.12012 -8.18945 0 -11.3096l11.3096 -11.3096c3.12012 -3.12012 8.19043 -3.12012 11.3105 0l16.9697 16.9697l16.9697 -16.9697c3.12012 -3.12012 8.19043 -3.12012 11.3105 0l11.3096 11.3096c3.12012 3.12012 3.12012 8.19043 0 11.3096
+l-16.9697 16.9707l16.9697 16.9697c3.12012 3.12012 3.12012 8.19043 0 11.3096l-11.3096 11.3105c-3.12012 3.12012 -8.19043 3.12012 -11.3105 0l-16.9697 -16.9697l-33.9404 33.9395l45.25 45.25l28.29 -28.29c3.12012 -3.12012 8.19043 -3.12012 11.3105 0zM278.63 192
+l45.2598 45.2598l45.25 -45.2598l-45.25 -45.25z" />
+ <glyph glyph-name="toolbox" unicode="&#xf552;"
+d="M502.63 233.37c6 -6 9.37012 -14.1396 9.37012 -22.6201v-82.75h-128v16c0 8.83984 -7.16016 16 -16 16h-32c-8.83984 0 -16 -7.16016 -16 -16v-16h-128v16c0 8.83984 -7.16016 16 -16 16h-32c-8.83984 0 -16 -7.16016 -16 -16v-16h-128v82.7402
+c0 8.49023 3.37012 16.6299 9.37012 22.6299l45.25 45.2598c6.00977 6 14.1396 9.37012 22.6299 9.37012h50.75v80c0 26.5098 21.4902 48 48 48h160c26.5098 0 48 -21.4902 48 -48v-80.0098h50.75c8.49023 0 16.6299 -3.37012 22.6299 -9.37012zM320 288v64h-128v-64h128z
+M384 80v16h128v-96c0 -17.6699 -14.3301 -32 -32 -32h-448c-17.6699 0 -32 14.3301 -32 32v96h128v-16c0 -8.83984 7.16016 -16 16 -16h32c8.83984 0 16 7.16016 16 16v16h128v-16c0 -8.83984 7.16016 -16 16 -16h32c8.83984 0 16 7.16016 16 16z" />
+ <glyph glyph-name="tshirt" unicode="&#xf553;" horiz-adv-x="640"
+d="M631.2 351.5c7.89941 -3.90039 11.0996 -13.5996 7.09961 -21.5l-57.2998 -114.4c-4 -8 -13.5996 -11.1992 -21.5 -7.19922l-56.5996 27.6992c-10.6006 5.2002 -23 -2.59961 -23 -14.3994v-253.7c0 -17.7002 -14.3008 -32 -32 -32h-256c-17.7002 0 -32 14.2998 -32 32
+v253.6c0 11.9004 -12.4004 19.6006 -23 14.4004l-56.6006 -27.7002c-7.89941 -3.89941 -17.5 -0.700195 -21.5 7.2002l-57.2002 114.5c-3.89941 7.90039 -0.699219 17.5 7.2002 21.5l194.7 96.5c20.0996 -27.7998 64.5996 -47.2002 116.5 -47.2002
+s96.4004 19.4004 116.5 47.2002z" />
+ <glyph glyph-name="walking" unicode="&#xf554;" horiz-adv-x="320"
+d="M208 352c-26.5 0 -48 21.5 -48 48s21.5 48 48 48s48 -21.5 48 -48s-21.5 -48 -48 -48zM302.5 202.9c15.7002 -7.90039 22 -27.3008 14.2998 -43c-7.89941 -15.8008 -26.8994 -22.2002 -42.5 -14.3008l-23.2998 11.8008c-15 7.59961 -26.4004 20.8994 -31.7002 37
+l-5.5 16.5l-17.2002 -68.7002l45.5 -49.7002c7.2002 -7.7998 12.3008 -17.4004 14.9004 -27.7002l22.2002 -89c4.2998 -17.0996 -6.2002 -34.5 -23.2998 -38.7998c-17.1006 -4.2998 -34.5 6.2002 -38.8008 23.2998l-18.2998 73.2998
+c-2.59961 10.3008 -7.7002 19.8008 -14.8994 27.7002l-59.9004 65.4004c-14.5 15.8994 -20.0996 38 -14.9004 58.7998l15.2002 60.7002l-26.7998 -10.7998c-7.2002 -2.90039 -13 -8.40039 -16.5 -15.4004l-6.7002 -13.5996
+c-7.89941 -15.8008 -26.8994 -22.2002 -42.5 -14.3008c-15.7002 7.90039 -22 27.1006 -14.2002 42.9004l6.7002 13.5996c10.4004 21 28.1006 37.5 49.7002 46.2002c37.4004 15.1006 57.2998 25.2998 93.2998 25.2002c46.5 -0.0996094 87.5 -31.2998 102.2 -75.9004
+l9.7002 -29.3994zM73.5996 62.2002l20.7002 51.5c8.7002 -11.9004 -7.89941 6.59961 47.4004 -53.7002l-13.5 -33.7998c-3.2998 -8.10059 -8.10059 -15.4004 -14.2002 -21.5l-59.4004 -59.4004c-12.5 -12.5 -32.6992 -12.5 -45.1992 0s-12.5 32.7998 0 45.2998l50 50.1006
+c6.19922 6.09961 11 13.3994 14.1992 21.5z" />
+ <glyph glyph-name="wallet" unicode="&#xf555;"
+d="M461.2 320c28.0195 0 50.7998 -21.5303 50.7998 -48v-256c0 -26.4697 -22.7803 -48 -50.7998 -48h-397.2c-35.3496 0 -64 28.6504 -64 64v320c0 35.3496 28.6504 64 64 64h368c26.5098 0 48 -21.4902 48 -48c0 -8.83984 -7.16016 -16 -16 -16h-384
+c-8.83984 0 -16 -7.16016 -16 -16s7.16016 -16 16 -16h381.2zM416 112c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="angry" unicode="&#xf556;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM136 208c0 -17.7002 14.4004 -32 32.0996 -32c17.7002 0 32 14.2998 32 32c0 2.90039 -1 5.5 -1.69922 8.2002c0.599609 0 1.19922 -0.200195 1.69922 -0.200195
+c6.90039 0 13.2002 4.5 15.3008 11.4004c2.59961 8.39941 -2.2002 17.3994 -10.7002 19.8994l-80 24c-8.5 2.5 -17.4004 -2.2002 -19.9004 -10.7002c-2.59961 -8.39941 2.2002 -17.3994 10.7002 -19.8994l31 -9.2998c-6.40039 -5.90039 -10.5 -14.1006 -10.5 -23.4004z
+M304 53.7998c13.4004 -16.0996 38.2998 4 24.5 20.5c-20 24 -49.4004 37.7998 -80.5996 37.7998c-31.2002 0 -60.6006 -13.7998 -80.6006 -37.7998c-13.5996 -16.2998 11.1006 -36.7998 24.6006 -20.5c27.8994 33.4004 84.2998 33.4004 112.1 0zM380.6 240.7
+c8.5 2.5 13.3008 11.3994 10.8008 19.8994c-2.5 8.40039 -11.5 13.2002 -19.9004 10.7002l-80 -24c-8.40039 -2.5 -13.2002 -11.3994 -10.7002 -19.8994c2.10059 -6.90039 8.40039 -11.4004 15.2998 -11.4004c0.600586 0 1.10059 0.0996094 1.7002 0.200195
+c-0.799805 -2.60059 -1.7002 -5.2998 -1.7002 -8.2002c0 -17.7002 14.3008 -32 32 -32c17.7002 0 32 14.2998 32 32c0 9.2998 -4.19922 17.5996 -10.5 23.4004z" />
+ <glyph glyph-name="archway" unicode="&#xf557;" horiz-adv-x="576"
+d="M560 0c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-159.98c-8.83984 0 -16 7.16016 -16 16v16l-0.0195312 160c0 53.0195 -42.9805 96 -96 96s-96 -42.9805 -96 -96v-176c0 -8.83984 -7.16016 -16 -16 -16h-159.98
+c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h15.9805v352h512v-352h16zM560 448c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-544c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h544z" />
+ <glyph glyph-name="atlas" unicode="&#xf558;" horiz-adv-x="448"
+d="M318.38 240c-5.28027 -31.2197 -25.5898 -57.1699 -53.2998 -70.4102c7.66992 19.0605 12.7197 43.3799 14.21 70.4102h39.0898zM318.38 272h-39.0898c-1.49023 27.0303 -6.53027 51.3496 -14.21 70.4102c27.71 -13.2402 48.0098 -39.1904 53.2998 -70.4102zM224 350.69
+c7.69043 -7.4502 20.7695 -34.4307 23.4404 -78.6904h-46.8701c2.66016 44.2695 15.7393 71.2402 23.4297 78.6904zM182.92 342.41c-7.67969 -19.0605 -12.7197 -43.3799 -14.21 -70.4102h-39.0898c5.28027 31.2197 25.5898 57.1699 53.2998 70.4102zM182.92 169.59
+c-27.71 13.2402 -48.0195 39.1904 -53.2998 70.4102h39.0898c1.49023 -27.0303 6.53027 -51.3496 14.21 -70.4102zM247.43 240c-2.66016 -44.2598 -15.7393 -71.2402 -23.4395 -78.6904c-7.69043 7.4502 -20.7705 34.4307 -23.4307 78.6904h46.8701zM448 89.5996
+c0 -9.59961 -3.2002 -16 -9.59961 -19.1992c-3.2002 -12.8008 -3.2002 -57.6006 0 -73.6006c6.39941 -6.39941 9.59961 -12.7998 9.59961 -19.2002v-16c0 -16 -12.7998 -25.5996 -25.5996 -25.5996h-326.4c-54.4004 0 -96 41.5996 -96 96v320c0 54.4004 41.5996 96 96 96
+h326.4c16 0 25.5996 -9.59961 25.5996 -25.5996v-332.801zM224 384c-70.6904 0 -128 -57.3096 -128 -128s57.3096 -128 128 -128s128 57.3096 128 128s-57.3096 128 -128 128zM384 0v64h-288c-16 0 -32 -12.7998 -32 -32s12.7998 -32 32 -32h288z" />
+ <glyph glyph-name="award" unicode="&#xf559;" horiz-adv-x="384"
+d="M97.1201 85.3701c13.79 -13.7803 32.1104 -21.3701 51.6104 -21.3701c12.4395 0 24.4697 3.54004 35.3096 9.58008l-52.0498 -127.62c-4.39062 -10.7695 -18.4307 -13.4004 -26.4307 -4.95996l-36.2393 38.2803l-52.6904 -2.01074
+c-11.6201 -0.439453 -19.8203 11.2607 -15.4297 22.0303l45.3701 111.24c7.55957 -5.87012 15.9199 -10.7705 25.4297 -13.3203c20.96 -5.60938 16.4297 -3.16016 25.1201 -11.8496zM382.8 -0.700195c4.39062 -10.7598 -3.80957 -22.4697 -15.4297 -22.0303
+l-52.6904 2.01074l-36.25 -38.2803c-7.98926 -8.44043 -22.04 -5.80957 -26.4297 4.95996l-52.0498 127.62c10.8398 -6.03027 22.8701 -9.58008 35.3096 -9.58008c19.5 0 37.8301 7.58984 51.6201 21.3701c8.66992 8.66992 4.0498 6.20996 25.1201 11.8496
+c9.50977 2.5498 17.8701 7.44043 25.4297 13.3203zM263 108c-13.2305 -13.4697 -33.8398 -15.8799 -49.7305 -5.82031c-5.37305 3.41211 -14.8994 6.18066 -21.2646 6.18066c-6.36426 0 -15.8916 -2.76855 -21.2646 -6.18066
+c-15.9004 -10.0596 -36.5098 -7.64941 -49.7402 5.82031c-14.7305 15 -16.4004 14.04 -38.7803 20.1396c-13.8896 3.79004 -24.75 14.8408 -28.4697 28.9805c-7.48047 28.3994 -5.54004 24.9697 -25.9502 45.75c-10.1699 10.3604 -14.1396 25.4502 -10.4199 39.5898
+c7.48047 28.4199 7.46973 24.46 0 52.8203c-3.72949 14.1396 0.25 29.2295 10.4199 39.5801c20.4102 20.7793 18.4805 17.3594 25.9502 45.75c3.71973 14.1396 14.5801 25.1895 28.4697 28.9795c27.8906 7.61035 24.5303 5.62988 44.9404 26.4102
+c10.1699 10.3604 25 14.4004 38.8896 10.6104c27.9199 -7.61035 24.0303 -7.60059 51.9004 0c13.8896 3.79004 28.7197 -0.260742 38.8896 -10.6104c20.4297 -20.79 17.0703 -18.7998 44.9502 -26.4102c13.8896 -3.79004 24.75 -14.8398 28.4697 -28.9795
+c7.48047 -28.3906 5.54004 -24.9707 25.9502 -45.75c10.1699 -10.3506 14.1396 -25.4404 10.4199 -39.5801c-7.47949 -28.4102 -7.46973 -24.4502 0 -52.8301c3.71973 -14.1406 -0.25 -29.2305 -10.4199 -39.5801c-20.4102 -20.7803 -18.4697 -17.3506 -25.9502 -45.75
+c-3.71973 -14.1396 -14.5801 -25.1904 -28.4697 -28.9805c-21.7598 -5.92969 -23.5098 -4.58984 -38.79 -20.1396zM97.6602 272.04c0 -53.0303 42.2402 -96.0205 94.3398 -96.0205s94.3398 42.9902 94.3398 96.0205s-42.2402 96.0195 -94.3398 96.0195
+s-94.3398 -42.9893 -94.3398 -96.0195z" />
+ <glyph glyph-name="backspace" unicode="&#xf55a;" horiz-adv-x="640"
+d="M576 384c35.3496 0 64 -28.6504 64 -64v-256c0 -35.3496 -28.6504 -64 -64 -64h-370.75c-16.9697 0 -33.25 6.75 -45.25 18.75l-150.63 150.63c-12.5 12.4902 -12.5 32.75 0 45.25l150.63 150.62c10.3438 10.3496 30.6143 18.75 45.2471 18.75h0.0126953h370.74z
+M491.31 129.94l-62.0596 62.0596l62.0596 62.0596c6.25 6.25 6.25 16.3809 0 22.6309l-22.6191 22.6191c-6.25 6.25 -16.3809 6.25 -22.6309 0l-62.0596 -62.0596l-62.0596 62.0596c-6.25 6.25 -16.3809 6.25 -22.6309 0l-22.6191 -22.6191
+c-6.25 -6.25 -6.25 -16.3809 0 -22.6309l62.0596 -62.0596l-62.0596 -62.0596c-6.25 -6.25 -6.25 -16.3809 0 -22.6309l22.6191 -22.6191c6.25 -6.25 16.3809 -6.25 22.6309 0l62.0596 62.0596l62.0596 -62.0596c6.25 -6.25 16.3809 -6.25 22.6309 0l22.6191 22.6191
+c6.25 6.25 6.25 16.3809 0 22.6309z" />
+ <glyph glyph-name="bezier-curve" unicode="&#xf55b;" horiz-adv-x="640"
+d="M368 416c17.6699 0 32 -14.3301 32 -32v-96c0 -17.6699 -14.3301 -32 -32 -32h-96c-17.6699 0 -32 14.3301 -32 32v96c0 17.6699 14.3301 32 32 32h96zM208 360v-72c0 -9.4502 2.17969 -18.3604 5.87988 -26.4404c-34.2695 -24.3496 -59.7402 -59.9492 -71.04 -101.56
+h-49.3994c13.6797 64.6797 54.1699 119.48 109.54 152h-79.7305c-9.5 -23.4404 -32.4102 -40 -59.25 -40c-35.3398 0 -64 28.6504 -64 64s28.6602 64 64 64c26.8398 0 49.75 -16.5596 59.25 -40h84.75zM160 128c17.6699 0 32 -14.3301 32 -32v-96
+c0 -17.6699 -14.3301 -32 -32 -32h-96c-17.6699 0 -32 14.3301 -32 32v96c0 17.6699 14.3301 32 32 32h96zM576 400c35.3398 0 64 -28.6504 64 -64s-28.6602 -64 -64 -64c-26.8398 0 -49.75 16.5596 -59.25 40h-79.7305c55.3701 -32.5195 95.8604 -87.3203 109.54 -152
+h-49.3994c-11.2998 41.6104 -36.7705 77.21 -71.04 101.56c3.68945 8.08008 5.87988 16.9902 5.87988 26.4404v72h84.75c9.5 23.4404 32.4102 40 59.25 40zM576 128c17.6699 0 32 -14.3301 32 -32v-96c0 -17.6699 -14.3301 -32 -32 -32h-96c-17.6699 0 -32 14.3301 -32 32
+v96c0 17.6699 14.3301 32 32 32h96z" />
+ <glyph glyph-name="bong" unicode="&#xf55c;" horiz-adv-x="447"
+d="M302.5 -64h-221c-23.1699 0 -44.4199 12.5801 -56 32.6602c-16.1904 28.0703 -25.5 60.5898 -25.5 95.3398c0 71.0303 38.6699 132.9 96 166.11v153.93l-15.9697 -0.0205078c-8.85059 -0.00976562 -16.0303 7.16016 -16.0205 16.0107l0.0400391 31.7295
+c0.00976562 8.82031 7.16016 15.9707 15.9805 15.9805l223.939 0.259766c8.85059 0.00976562 16.0303 -7.16992 16.0205 -16.0195l-0.0400391 -31.7207c-0.00976562 -8.81934 -7.16016 -15.9697 -15.9805 -15.9795l-15.9697 -0.0205078v-154.16
+c14.1201 -8.17969 27.0898 -18.1396 38.6504 -29.5098l39.4092 39.4102l-9.37988 9.37988c-6.25 6.25 -6.25 16.3799 0 22.6299l11.3105 11.3105c6.25 6.25 16.3799 6.25 22.6299 0l52.6895 -52.6904c6.25 -6.25 6.25 -16.3799 0 -22.6299l-11.2998 -11.3203
+c-6.25 -6.25 -16.3799 -6.25 -22.6299 0l-9.37988 9.37988l-43.4404 -43.4395c17.3604 -28.8105 27.4404 -62.5 27.4404 -98.6201c0 -34.75 -9.30957 -67.2598 -25.5 -95.3398c-11.5703 -20.0801 -32.8203 -32.6602 -56 -32.6602zM120.06 188.57
+c-24.8096 -14.3701 -44.1094 -35.7305 -56.5596 -60.5703h257c-12.4404 24.8398 -31.75 46.2002 -56.5596 60.5703l-23.9404 13.8701v181.76l-96 -0.110352v-181.649z" />
+ <glyph glyph-name="brush" unicode="&#xf55d;" horiz-adv-x="384"
+d="M352 448c17.6699 0 32 -14.3301 32 -32v-224h-384v224c0 17.6699 14.3301 32 32 32h320zM0 128v32h384v-32c0 -35.3496 -28.6602 -64 -64 -64h-64v-64c0 -35.3496 -28.6602 -64 -64 -64s-64 28.6504 -64 64v64h-64c-35.3398 0 -64 28.6504 -64 64zM192 24
+c-13.25 0 -24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24s24 10.75 24 24c0 13.2598 -10.75 24 -24 24z" />
+ <glyph glyph-name="bus-alt" unicode="&#xf55e;"
+d="M488 320c13.25 0 24 -10.7402 24 -24v-80c0 -13.25 -10.75 -24 -24 -24h-8v-166.4c0 -12.7998 -9.59961 -25.5996 -25.5996 -25.5996h-6.40039v-32c0 -17.6699 -14.3301 -32 -32 -32h-32c-17.6699 0 -32 14.3301 -32 32v32h-192v-32c0 -17.6699 -14.3301 -32 -32 -32h-32
+c-17.6699 0 -32 14.3301 -32 32v32c-17.6699 0 -32 14.3301 -32 32v160h-8c-13.25 0 -24 10.75 -24 24v80c0 13.2598 10.75 24 24 24h8v48c0 44.7998 99.2002 80 224 80s224 -35.2002 224 -80v-48h8zM160 376v-16c0 -4.41992 3.58008 -8 8 -8h176c4.41992 0 8 3.58008 8 8
+v16c0 4.41992 -3.58008 8 -8 8h-176c-4.41992 0 -8 -3.58008 -8 -8zM112 48c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM240 160v160h-112c-17.6699 0 -32 -14.3301 -32 -32v-96c0 -17.6699 14.3301 -32 32 -32h112zM272 160
+h112c17.6699 0 32 14.3301 32 32v96c0 17.6699 -14.3301 32 -32 32h-112v-160zM400 48c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="cannabis" unicode="&#xf55f;"
+d="M503.47 87.75c5.24023 -2.75977 8.52051 -8.20996 8.53027 -14.1504c0.00976562 -5.92969 -3.26953 -11.3691 -8.50977 -14.1494c-2.4502 -1.29004 -60.7705 -31.7197 -133.49 -31.7197c-6.12988 0 -11.96 0.0996094 -17.5 0.30957
+c11.3604 -22.2305 16.5195 -38.3096 16.8096 -39.2197c1.80078 -5.68066 0.290039 -11.8906 -3.90918 -16.1104c-2.5918 -2.60059 -7.67578 -4.71094 -11.3467 -4.71094c-1.34277 0 -3.47168 0.322266 -4.75391 0.720703c-1.83008 0.580078 -37.7197 11.9902 -77.2998 39.29
+v-64.0098c0 -4.41992 -3.58008 -8 -8 -8h-16c-4.41992 0 -8 3.58008 -8 8v64.0195c-39.5801 -27.2998 -75.4697 -38.7197 -77.2998 -39.2891c-1.28223 -0.398438 -3.41113 -0.72168 -4.75391 -0.72168c-3.6709 0 -8.75488 2.11035 -11.3467 4.71094
+c-4.19922 4.2207 -5.70996 10.4307 -3.90918 16.1104c0.290039 0.910156 5.43945 16.9902 16.8096 39.2197c-5.54004 -0.209961 -11.3604 -0.30957 -17.5 -0.30957c-72.7305 0 -131.04 30.4297 -133.49 31.7197c-4.69629 2.48926 -8.50879 8.82422 -8.50879 14.1396
+c0 5.32422 3.82129 11.6641 8.5293 14.1504c1.56934 0.820312 32.3896 16.8896 76.7793 25.8096c-64.25 75.1201 -84.0498 161.671 -84.9297 165.641c-0.210938 0.947266 -0.382812 2.50488 -0.382812 3.47559c0 3.65039 2.09375 8.71094 4.67285 11.2939
+c3.03027 3.04004 7.12012 4.7002 11.3203 4.7002c1.14941 0 2.2998 -0.129883 3.43945 -0.379883c3.89062 -0.860352 86.5508 -19.5996 160.58 -79.7598c0 1.45996 -0.00976562 2.92969 -0.00976562 4.39941c0 118.79 59.9805 213.721 62.5303 217.7
+c2.93945 4.58984 8.01953 7.37012 13.4697 7.37012h0.00195312c4.82812 0 10.8574 -3.30176 13.458 -7.37012c2.5498 -3.97949 62.5303 -98.9102 62.5303 -217.7c0 -1.46973 0 -2.93945 -0.00976562 -4.39941c74.0391 60.1699 156.699 78.9102 160.579 79.7598
+c1.15039 0.259766 2.30078 0.379883 3.44043 0.379883c4.2002 0 8.29004 -1.66016 11.3203 -4.7002c3.85938 -3.87012 5.47949 -9.43945 4.2998 -14.7695c-0.879883 -3.96973 -20.6797 -90.5205 -84.9297 -165.641c44.3896 -8.91992 75.2197 -24.9893 76.7793 -25.8096z" />
+ <glyph glyph-name="check-double" unicode="&#xf560;"
+d="M504.5 276.05c10 -10.0596 10 -26.3594 0 -36.4199l-294.4 -296.09c-9.98926 -10.0596 -26.1992 -10.0596 -36.1992 0l-166.4 167.36c-10 10.0498 -10 26.3594 0 36.4092l36.2002 36.4102c9.99023 10.0605 26.2002 10.0605 36.2002 0l112.1 -112.739l240.1 241.479
+c9.99023 10.0498 26.2002 10.0498 36.2002 0zM166.57 165.29l-122.601 123.5c-6.83984 7.01953 -6.83984 18.3604 0.180664 25.2002l42.1299 41.7695c7.01953 6.83984 18.3594 6.83984 25.2002 -0.179688l68.2295 -68.7705l157.16 156.08
+c7.03027 6.84082 18.3701 6.84082 25.21 -0.179688l41.9502 -42.1201c6.83984 -7.03027 6.83984 -18.3701 -0.180664 -25.21l-212.069 -210.27c-7.03027 -6.84082 -18.3701 -6.84082 -25.21 0.179688z" />
+ <glyph glyph-name="cocktail" unicode="&#xf561;" horiz-adv-x="576"
+d="M296 -16c22.0898 0 40 -17.9102 40 -40c0 -4.41992 -3.58008 -8 -8 -8h-240c-4.41992 0 -8 3.58008 -8 8c0 22.0898 17.9102 40 40 40h56v125.22l-168.74 168.73c-15.5195 15.5195 -4.5293 42.0498 17.4199 42.0498h366.641c21.9492 0 32.9395 -26.5303 17.4199 -42.0498
+l-168.74 -168.73v-125.22h56zM432 448c79.5303 0 144 -64.4697 144 -144s-64.4697 -144 -144 -144c-27.4102 0 -52.7695 8.08008 -74.5801 21.3799l35.2598 35.2598c12.0303 -5.43945 25.2803 -8.63965 39.3203 -8.63965c52.9297 0 96 43.0596 96 96s-43.0703 96 -96 96
+c-35.3701 0 -65.9902 -19.4502 -82.6396 -48h-52.54c19.8301 55.7998 72.5693 96 135.18 96z" />
+ <glyph glyph-name="concierge-bell" unicode="&#xf562;"
+d="M288 317.46c108.51 -15.5703 192 -108.64 192 -221.46h-448c0 112.82 83.4902 205.89 192 221.46v18.54h-16c-8.83984 0 -16 7.16016 -16 16v16c0 8.83984 7.16016 16 16 16h96c8.83984 0 16 -7.16016 16 -16v-16c0 -8.83984 -7.16016 -16 -16 -16h-16v-18.54zM496 64
+c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-480c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h480z" />
+ <glyph glyph-name="cookie" unicode="&#xf563;"
+d="M510.37 193.21c4.37988 -27.6602 -0.110352 -56 -12.8203 -80.96l-35.0996 -68.8701c-10.7695 -21.1328 -36.6592 -47.0273 -57.79 -57.7998l-69.1201 -35.21c-24.8301 -12.6396 -53.0098 -17.1104 -80.5205 -12.75l-76.6992 12.1396
+c-23.3896 3.70801 -55.9639 20.3066 -72.71 37.0498l-54.7607 54.75c-16.7959 16.8027 -33.4443 49.4844 -37.1592 72.9502l-12.0801 76.2705c-0.901367 5.68848 -1.63281 14.9805 -1.63281 20.7412c0 17.5723 6.47461 44.541 14.4521 60.1982l35.1006 68.8799
+c10.7666 21.1357 36.6562 47.0303 57.79 57.8008l69.1201 35.21c24.8291 12.6494 53.0098 17.1201 80.5195 12.7598l76.7002 -12.1504c27.5303 -4.34961 52.9795 -17.3301 72.71 -37.0498l54.7598 -54.75c16.7969 -16.8027 33.4443 -49.4844 37.1602 -72.9502zM176 80
+c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM208 240c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM368 112c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32
+s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="cookie-bite" unicode="&#xf564;"
+d="M510.52 192.18c4.10059 -27.3301 -0.349609 -55.2695 -12.9092 -79.9102l-35.1006 -68.8799c-10.7656 -21.1367 -36.6562 -47.0361 -57.79 -57.8096l-69.1299 -35.21c-24.8301 -12.6504 -53.0195 -17.1201 -80.5303 -12.7598l-76.71 12.1494
+c-23.3916 3.70801 -55.9658 20.3105 -72.71 37.0605l-54.7695 54.7598c-16.7969 16.8057 -33.4443 49.4912 -37.1602 72.96l-12.0801 76.2695c-0.900391 5.68945 -1.63184 14.9824 -1.63184 20.7422c0 17.5752 6.47461 44.5479 14.4521 60.208l35.0996 68.8799
+c10.7646 21.1396 36.6543 47.0391 57.79 57.8105l69.1299 35.2197c24.5898 12.5205 52.46 16.96 79.7207 12.8203c0.859375 -69.96 57.6895 -126.45 127.859 -126.45c0 -70.1699 56.5 -127.01 126.47 -127.86zM176 80c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32
+s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM208 240c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM368 112c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="crop-alt" unicode="&#xf565;"
+d="M488 96c13.25 0 24 -10.7402 24 -24v-48c0 -13.25 -10.75 -24 -24 -24h-40v-40c0 -13.25 -10.75 -24 -24 -24h-48c-13.25 0 -24 10.75 -24 24v328h-160v96h224c17.6699 0 32 -14.3301 32 -32v-256h40zM160 424v-328h160v-96h-224c-17.6699 0 -32 14.3301 -32 32v256h-40
+c-13.25 0 -24 10.75 -24 24v48c0 13.2598 10.75 24 24 24h40v40c0 13.2598 10.75 24 24 24h48c13.25 0 24 -10.7402 24 -24z" />
+ <glyph glyph-name="digital-tachograph" unicode="&#xf566;" horiz-adv-x="640"
+d="M608 352c17.6699 0 32 -14.3301 32 -32v-256c0 -17.6699 -14.3301 -32 -32 -32h-576c-17.6699 0 -32 14.3301 -32 32v256c0 17.6699 14.3301 32 32 32h576zM304 96v8c0 4.41992 -3.58008 8 -8 8h-224c-4.41992 0 -8 -3.58008 -8 -8v-8c0 -4.41992 3.58008 -8 8 -8h224
+c4.41992 0 8 3.58008 8 8zM72 160c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-16c-4.41992 0 -8 -3.58008 -8 -8v-16zM136 160c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-16
+c-4.41992 0 -8 -3.58008 -8 -8v-16zM200 160c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-16c-4.41992 0 -8 -3.58008 -8 -8v-16zM264 160c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8
+h-16c-4.41992 0 -8 -3.58008 -8 -8v-16zM304 224v48c0 8.83984 -7.16016 16 -16 16h-208c-8.83984 0 -16 -7.16016 -16 -16v-48c0 -8.83984 7.16016 -16 16 -16h208c8.83984 0 16 7.16016 16 16zM576 96v8c0 4.41992 -3.58008 8 -8 8h-224c-4.41992 0 -8 -3.58008 -8 -8v-8
+c0 -4.41992 3.58008 -8 8 -8h224c4.41992 0 8 3.58008 8 8z" />
+ <glyph glyph-name="dizzy" unicode="&#xf567;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM152 233.4l28.7002 -28.7002c14.7002 -14.7998 37.7998 7.39941 22.5996 22.5996l-28.7002 28.7002l28.7002 28.7002c15 14.8994 -7.59961 37.5996 -22.5996 22.5996
+l-28.7002 -28.7002l-28.7002 28.7002c-14.8994 15 -37.5996 -7.59961 -22.5996 -22.5996l28.7002 -28.7002l-28.7002 -28.7002c-15.2002 -15.0996 7.7998 -37.3994 22.5996 -22.5996zM248 32c35.2998 0 64 28.7002 64 64s-28.7002 64 -64 64s-64 -28.7002 -64 -64
+s28.7002 -64 64 -64zM395.3 227.3l-28.7002 28.7002l28.7002 28.7002c15 14.8994 -7.59961 37.5996 -22.5996 22.5996l-28.7002 -28.7002l-28.7002 28.7002c-14.8994 15 -37.5996 -7.59961 -22.5996 -22.5996l28.7002 -28.7002l-28.7002 -28.7002
+c-15.2002 -15.0996 7.7998 -37.3994 22.5996 -22.5996l28.7002 28.7002l28.7002 -28.7002c14.7002 -14.7998 37.7998 7.39941 22.5996 22.5996z" />
+ <glyph glyph-name="drafting-compass" unicode="&#xf568;" horiz-adv-x="511"
+d="M457.01 103.58l54.9805 -95.2305l-7.02051 -58.25c-1.2793 -10.5898 -12.3594 -16.9893 -22.1699 -12.7998l-53.9502 23.04l-54.3799 94.1904c29.9102 11.8701 57.4902 28.7197 82.54 49.0498zM499.5 198.14c-52.6201 -83.1299 -144.45 -134.14 -243.5 -134.14
+c-35.3799 0 -69.8701 6.71973 -102.06 18.96l-70.8008 -122.63l-53.9492 -23.04c-9.81055 -4.19043 -20.8906 2.20996 -22.1709 12.7998l-7.01953 58.25l71.2803 123.46c-21.29 17.8105 -40.4102 38.3799 -55.9805 62.0205
+c-4.99023 7.56934 -2.20996 17.9297 5.64062 22.4697l27.75 16.0703c7.40918 4.29004 16.5898 1.76953 21.3799 -5.33008c9.71973 -14.4102 21.1299 -27.3906 33.6797 -39l68.2998 118.31c-7.43945 13.6299 -12.0498 29.0303 -12.0498 45.6602c0 53.0195 42.9805 96 96 96
+s96 -42.9805 96 -96c0 -16.6299 -4.61035 -32.0303 -12.0596 -45.6602l51.79 -89.71c-23.0508 -23.1699 -51.3809 -39.96 -82.6104 -48.9199l-51.0898 88.5c-0.69043 -0.0195312 -1.33984 -0.209961 -2.04004 -0.209961s-1.33984 0.19043 -2.04004 0.209961
+l-67.3604 -116.68c22.1797 -7.28027 45.4805 -11.5303 69.4102 -11.5303c76.25 0 147.01 38.8496 188.12 102.38c4.64941 7.17969 13.7803 9.87012 21.2598 5.71973l28.0703 -15.5693c7.93945 -4.40039 10.9102 -14.7207 6.0498 -22.3906zM256 384
+c-17.6699 0 -32 -14.3301 -32 -32s14.3301 -32 32 -32s32 14.3301 32 32s-14.3301 32 -32 32z" />
+ <glyph glyph-name="drum" unicode="&#xf569;" horiz-adv-x="576"
+d="M458.08 327.12c71.3799 -23.29 117.91 -60.75 117.92 -103.13v-160.83c0 -30.46 -24.0303 -58.4004 -64 -80.3701v96.3701c0 17.5996 -14.4004 32 -32 32s-32 -14.4004 -32 -32v-122.41c-37.4004 -11.1299 -81 -18.4404 -128 -20.75v111.16c0 17.5996 -14.4004 32 -32 32
+s-32 -14.4004 -32 -32v-111.15c-47 2.31055 -90.5996 9.62012 -128 20.75v122.41c0 17.5996 -14.4004 32 -32 32s-32 -14.4004 -32 -32v-96.3701c-39.9697 21.9697 -64 49.9102 -64 80.3701v160.83c0 70.6904 128.94 128 288 128
+c21.8467 -0.00585938 57.167 -2.2373 78.8398 -4.98047l160.69 96.4102c15.1699 9.10059 34.8096 4.18066 43.9102 -10.9697c9.08984 -15.1602 4.18945 -34.8203 -10.9707 -43.9102zM288 144c132.54 0 240 35.8096 240 79.9902c0 30.2695 -50.4502 56.5996 -124.82 70.1895
+l-162.71 -97.6201c-14.3994 -8.63965 -34.3496 -4.95996 -43.9102 10.9707c-9.08984 15.1602 -4.18945 34.8193 10.9707 43.9102l87.4102 52.4395c-2.32031 0.0205078 -4.60059 0.120117 -6.94043 0.120117c-132.55 0 -240 -35.8203 -240 -80s107.45 -80 240 -80z" />
+ <glyph glyph-name="drum-steelpan" unicode="&#xf56a;" horiz-adv-x="576"
+d="M288 416c159.06 0 288 -57.3096 288 -128v-192c0 -70.6904 -128.94 -128 -288 -128s-288 57.3096 -288 128v192c0 70.6904 128.94 128 288 128zM205.01 257.64c5.11035 19.0605 2.49023 38.96 -7.37012 56.0508l-25.5996 44.3398
+c-73.9297 -13.6406 -124.04 -39.8701 -124.04 -70.0303c0 -30.7803 52.2305 -57.46 128.7 -70.8398c13.7695 9.91016 23.8594 23.8701 28.3096 40.4795zM288 208c21.0801 0 41.4102 1 60.8896 2.7002c-8.05957 26.1299 -32.1494 45.2998 -60.8896 45.2998
+s-52.8301 -19.1699 -60.8896 -45.2998c19.4795 -1.7002 39.8096 -2.7002 60.8896 -2.7002zM352 352v13.04c-20.4004 1.87988 -41.7998 2.95996 -64 2.95996s-43.5996 -1.08008 -64 -2.95996v-13.04c0 -35.29 28.71 -64 64 -64s64 28.71 64 64zM398.93 217.1
+c76.6699 13.3604 129.07 40.0703 129.07 70.9004c0 30.21 -50.2803 56.5 -124.44 70.0996l-25.6494 -44.4199c-9.87012 -17.0801 -12.4902 -36.9795 -7.37988 -56.04c4.45996 -16.6396 14.5898 -30.6299 28.3994 -40.54z" />
+ <glyph glyph-name="feather-alt" unicode="&#xf56b;"
+d="M512 448c-1.80957 -26.2598 -11.71 -132.86 -53.6201 -234.79l-106.54 -53.21h81.1406c-9.08008 -16.4102 -19.2002 -32.2305 -30.4502 -47.1201l-146.79 -48.8799h100.95c-35.5605 -30.0703 -79.1006 -51.0996 -132.58 -56.54
+c-41.8105 -4.83008 -83.8701 -7.21973 -125.96 -7.36035l-57.1309 -57.0693c-9.38965 -9.37012 -24.5996 -9.37012 -33.9795 0s-9.37988 24.5693 0 33.9395l259.5 259.24c6.25 6.25 6.25 16.3799 0 22.6299s-16.4004 6.25 -22.6504 0l-178.87 -178.689
+c1.15039 26.7998 2.90039 53.5801 5.99023 80.2393c25.4307 249.41 389.21 284.051 440.99 287.61z" />
+ <glyph glyph-name="file-contract" unicode="&#xf56c;" horiz-adv-x="384"
+d="M224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136zM64 376v-16c0 -4.41992 3.58008 -8 8 -8h80c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8
+h-80c-4.41992 0 -8 -3.58008 -8 -8zM64 312v-16c0 -4.41992 3.58008 -8 8 -8h80c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-80c-4.41992 0 -8 -3.58008 -8 -8zM256.81 64c-4.19922 0 -8.13965 2.44043 -10.1592 6.5
+c-11.9502 23.8604 -46.2305 30.3496 -66 14.1602l-13.8809 41.6396c-3.28906 9.82031 -12.4297 16.4102 -22.7695 16.4102s-19.4805 -6.59961 -22.7803 -16.4102l-18.1895 -54.6396c-1.53027 -4.58008 -5.81055 -7.66016 -10.6406 -7.66016h-12.3896
+c-8.83984 0 -16 -7.16016 -16 -16s7.16016 -16 16 -16h12.3896c18.6201 0 35.1104 11.8701 41 29.5303l10.6104 31.8799l16.8301 -50.46c2.03027 -6.14062 7.58008 -10.4404 14.0303 -10.8906c0.389648 -0.0292969 0.759766 -0.0498047 1.13965 -0.0498047h0.00976562
+c5.45508 0 11.8613 3.96094 14.2998 8.83984l7.6709 15.3408c2.7998 5.59961 7.93945 6.18945 10.0195 6.18945s7.21973 -0.599609 10.1699 -6.51953c7.37012 -14.7207 22.1904 -23.8604 38.6396 -23.8604h47.1904c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16h-47.1904z
+M377 343c4.5 -4.5 7 -10.5996 7 -16.9004v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7z" />
+ <glyph glyph-name="file-download" unicode="&#xf56d;" horiz-adv-x="384"
+d="M224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136zM300.45 100.64c10.1299 10.0703 3.00977 27.3604 -11.2705 27.3604h-65.1797v80
+c0 8.83984 -7.16016 16 -16 16h-32c-8.83984 0 -16 -7.16016 -16 -16v-80h-65.1797c-14.2803 0 -21.4004 -17.29 -11.25 -27.3604l96.4199 -95.6992c6.64941 -6.61035 17.3896 -6.61035 24.04 0zM377 343c4.5 -4.5 7 -10.5996 7 -16.9004v-6.09961h-128v128h6.09961
+c6.40039 0 12.5 -2.5 17 -7z" />
+ <glyph glyph-name="file-export" unicode="&#xf56e;" horiz-adv-x="575"
+d="M384 326.1v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7l97.9004 -98c4.5 -4.5 7 -10.5996 7 -16.9004zM571 140c6.59961 -6.59961 6.59961 -17.4004 0 -24l-95.7002 -96.5c-10.0996 -10.0996 -27.3994 -3 -27.3994 11.2998v65.2002h-64v64h64v65.0996
+c0 14.3008 17.2998 21.4004 27.3994 11.3008zM192 112c0 -8.7998 7.2002 -16 16 -16h176v-136c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136c0 -13.2002 10.7998 -24 24 -24h136v-128h-176
+c-8.7998 0 -16 -7.2002 -16 -16v-32z" />
+ <glyph glyph-name="file-import" unicode="&#xf56f;"
+d="M16 160h112v-64h-112c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16zM505 343c4.5 -4.5 7 -10.5996 7 -16.9004v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7zM352 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24
+h-336c-13.2998 0 -24 10.7002 -24 24v136h127.9v-65.0996c0 -14.3008 17.2998 -21.4004 27.3994 -11.3008l95.7002 96.4004c6.59961 6.59961 6.59961 17.2998 0 24l-95.5996 96.5c-10.1006 10.0996 -27.4004 3 -27.4004 -11.2998v-65.2002h-128v264
+c0 13.2998 10.7002 24 24 24h200v-136z" />
+ <glyph glyph-name="file-invoice" unicode="&#xf570;" horiz-adv-x="384"
+d="M288 192v-64h-192v64h192zM377 343c4.5 -4.5 7 -10.5996 7 -16.9004v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7zM224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464
+c0 13.2998 10.7002 24 24 24h200v-136zM64 376v-16c0 -4.41992 3.58008 -8 8 -8h80c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-80c-4.41992 0 -8 -3.58008 -8 -8zM64 312v-16c0 -4.41992 3.58008 -8 8 -8h80c4.41992 0 8 3.58008 8 8v16
+c0 4.41992 -3.58008 8 -8 8h-80c-4.41992 0 -8 -3.58008 -8 -8zM320 8v16c0 4.41992 -3.58008 8 -8 8h-80c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h80c4.41992 0 8 3.58008 8 8zM320 208c0 8.83984 -7.16016 16 -16 16h-224
+c-8.83984 0 -16 -7.16016 -16 -16v-96c0 -8.83984 7.16016 -16 16 -16h224c8.83984 0 16 7.16016 16 16v96z" />
+ <glyph glyph-name="file-invoice-dollar" unicode="&#xf571;" horiz-adv-x="384"
+d="M377 343c4.5 -4.5 7 -10.5996 7 -16.9004v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7zM224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136z
+M64 376v-16c0 -4.41992 3.58008 -8 8 -8h80c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-80c-4.41992 0 -8 -3.58008 -8 -8zM64 296c0 -4.41992 3.58008 -8 8 -8h80c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-80c-4.41992 0 -8 -3.58008 -8 -8
+v-16zM208 32.1201c23.6201 0.629883 42.6699 20.54 42.6699 45.0703c0 19.9697 -12.9902 37.8096 -31.5801 43.3896l-45 13.5c-5.16016 1.54004 -8.76953 6.78027 -8.76953 12.7295c0 7.27051 5.2998 13.1904 11.7998 13.1904h28.1104
+c4.55957 0 8.94922 -1.29004 12.8193 -3.71973c3.24023 -2.03027 7.36035 -1.91016 10.1299 0.729492l11.75 11.21c3.53027 3.37012 3.33008 9.20996 -0.569336 12.1406c-9.10059 6.83984 -20.0801 10.7695 -31.3701 11.3496v24.29c0 4.41992 -3.58008 8 -8 8h-16
+c-4.41992 0 -8 -3.58008 -8 -8v-24.1201c-23.6201 -0.629883 -42.6699 -20.5498 -42.6699 -45.0703c0 -19.9697 12.9893 -37.8096 31.5801 -43.3896l45 -13.5c5.15918 -1.54004 8.76953 -6.78027 8.76953 -12.7295c0 -7.27051 -5.2998 -13.1904 -11.7998 -13.1904h-28.1104
+c-4.55957 0 -8.9502 1.2998 -12.8193 3.71973c-3.24023 2.03027 -7.36035 1.91016 -10.1309 -0.729492l-11.75 -11.21c-3.5293 -3.37012 -3.3291 -9.20996 0.570312 -12.1406c9.10059 -6.83008 20.0801 -10.7695 31.3701 -11.3496v-24.29c0 -4.41992 3.58008 -8 8 -8h16
+c4.41992 0 8 3.58008 8 8v24.1201z" />
+ <glyph glyph-name="file-prescription" unicode="&#xf572;" horiz-adv-x="384"
+d="M224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136zM292.53 132.52c-6.25 6.25 -16.3799 6.25 -22.6299 0l-29.9004 -29.8994l-33.46 33.46
+c19.8398 10.8701 33.46 31.7002 33.46 55.9199c0 35.3496 -28.6504 64 -64 64h-80c-8.83984 0 -16 -7.16016 -16 -16v-160c0 -8.83984 7.16016 -16 16 -16h16c8.83984 0 16 7.16016 16 16v48h18.7402l59.3193 -59.3096l-30.0596 -30.0605
+c-6.25 -6.25 -6.25 -16.3799 0 -22.6299l11.3096 -11.3096c6.25 -6.25 16.3809 -6.25 22.6309 0l30.0596 30.0596l30.0596 -30.0703c6.25 -6.25 16.3809 -6.25 22.6309 0l11.3096 11.3105c6.25 6.25 6.25 16.3799 0 22.6299l-30.0596 30.0596l29.8994 29.9004
+c6.25 6.25 6.25 16.3799 0 22.6299zM176 176h-48v32h48c8.82031 0 16 -7.17969 16 -16s-7.17969 -16 -16 -16zM384 326.1v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7l97.9004 -98c4.5 -4.5 7 -10.5996 7 -16.9004z" />
+ <glyph glyph-name="file-signature" unicode="&#xf573;" horiz-adv-x="576"
+d="M218.17 23.8604c7.25 -14.4707 21.71 -23.4404 37.8301 -23.75l128 -0.110352v-40c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136c0 -13.2002 10.7998 -24 24 -24h136.01v-46.5498l-128 -127.09
+v-82.1201c-3.87012 0.30957 -7.46973 2.47949 -9.35938 6.25977c-11.9404 23.8604 -46.25 30.3496 -66 14.1602l-13.8809 41.6396c-3.28906 9.82031 -12.4297 16.4102 -22.7695 16.4102s-19.4805 -6.59961 -22.7803 -16.4102l-18.1895 -54.6396
+c-1.53027 -4.58008 -5.81055 -7.66016 -10.6406 -7.66016h-12.3896c-8.83984 0 -16 -7.16016 -16 -16s7.16016 -16 16 -16h12.3896c18.6201 0 35.1104 11.8701 41 29.5303l10.6104 31.8799l16.8301 -50.4697c4.4502 -13.46 23.1104 -14.8701 29.4795 -2.09082
+l7.6709 15.3408c2.7998 5.59961 7.93945 6.18945 10.0195 6.18945s7.21973 -0.599609 10.1699 -6.51953zM384 326.1v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7l97.9004 -98c4.5 -4.5 7 -10.5996 7 -16.9004zM288 101.04l162.79 161.62l67.8799 -67.8799
+l-161.68 -162.78h-68.9902v69.04zM568.54 280.67c9.9502 -9.93945 9.9502 -26.0703 0 -36.0098l-27.25 -27.25l-67.8799 67.8799l27.25 27.25c9.93945 9.94043 26.0703 9.94043 36.0098 0z" />
+ <glyph glyph-name="file-upload" unicode="&#xf574;" horiz-adv-x="384"
+d="M224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136zM289.18 95.9902c14.2803 0 21.4004 17.29 11.25 27.3594l-96.4199 95.7002
+c-6.64941 6.61035 -17.3896 6.61035 -24.04 0l-96.4199 -95.7002c-10.1396 -10.0693 -3.00977 -27.3594 11.2705 -27.3594h65.1797v-80c0 -8.83984 7.16016 -16 16 -16h32c8.83984 0 16 7.16016 16 16v80h65.1797zM377 343c4.5 -4.5 7 -10.5996 7 -16.9004v-6.09961h-128
+v128h6.09961c6.40039 0 12.5 -2.5 17 -7z" />
+ <glyph glyph-name="fill" unicode="&#xf575;"
+d="M502.63 230.94c12.4902 -12.5 12.4902 -32.7607 0 -45.2607l-221.57 -221.569c-18.75 -18.75 -43.3096 -28.1201 -67.8799 -28.1201c-24.5596 0 -49.1201 9.37988 -67.8701 28.1201l-117.189 117.189c-37.4902 37.4902 -37.4902 98.2598 0 135.75l94.7598 94.7598
+l-86.1895 86.1807c-6.24023 6.25 -6.24023 16.3799 0 22.6299l22.6191 22.6104c6.24023 6.25 16.3809 6.25 22.6201 0l86.1807 -86.1807l81.5801 81.5801c6.23926 6.25 14.4297 9.37012 22.6191 9.37012c8.19043 0 16.3809 -3.12012 22.6309 -9.37012zM386.41 159.97
+l48.3496 48.3398l-162.45 162.44l-58.9492 -58.9502l58.6094 -58.5996c12.4902 -12.4902 12.4902 -32.75 0 -45.2402c-12.4893 -12.4902 -32.75 -12.4902 -45.2393 0l-58.6104 58.5996l-81.6104 -81.6094l-13.1494 -13.1504
+c-3.86035 -3.84961 -6.07031 -7.99023 -7.43066 -11.8301h320.48z" />
+ <glyph glyph-name="fill-drip" unicode="&#xf576;" horiz-adv-x="576"
+d="M512 128c0 0 64 -92.6504 64 -128s-28.6602 -64 -64 -64s-64 28.6504 -64 64s64 128 64 128zM502.63 230.94c12.4902 -12.5 12.4902 -32.75 -0.00976562 -45.25l-221.57 -221.57c-18.7402 -18.75 -43.2998 -28.1201 -67.8701 -28.1201
+c-24.5596 0 -49.1299 9.37988 -67.8701 28.1201l-117.189 117.189c-37.4902 37.4902 -37.4902 98.2705 0 135.75l94.7598 94.7607l-86.1895 86.1797c-6.24023 6.24023 -6.24023 16.3701 0 22.6201l22.6191 22.6201c6.24023 6.25 16.3701 6.25 22.6201 0l86.1807 -86.1904
+l81.5801 81.5801c6.23926 6.25 14.4297 9.37012 22.6191 9.37012c8.19043 0 16.3809 -3.12012 22.6309 -9.37012zM386.41 159.97l48.3398 48.3398l-162.44 162.44l-58.9492 -58.9502l58.5996 -58.5996c12.4902 -12.4902 12.4902 -32.75 0 -45.2402
+s-32.75 -12.4902 -45.2402 0l-58.5996 58.5996l-81.6104 -81.6094l-13.1494 -13.1504c-3.86035 -3.84961 -6.07031 -7.99023 -7.43066 -11.8301h320.48z" />
+ <glyph glyph-name="fingerprint" unicode="&#xf577;"
+d="M256.12 202.04c13.2598 0 24 -10.75 23.9902 -24c1.55957 -99.1104 -15.9502 -176.61 -29.4805 -224.521c-2.97949 -10.5996 -12.6104 -17.5195 -23.0898 -17.5195c-20.9697 0 -25.8496 20.7695 -23.1201 30.4902c19.5605 69.6494 28.8398 139.3 27.7002 211.55
+c0 13.2598 10.75 24 24 24zM255.26 283.77c56.9707 -0.839844 104.03 -46.9199 104.91 -102.72c0.790039 -50.8994 -2.99023 -102.08 -11.2695 -152.109c-0.770508 -4.6709 -7.61035 -23.1006 -27.5801 -19.7705c-13.0703 2.16016 -21.9307 14.5205 -19.7607 27.5801
+c7.80078 47.2197 11.3809 95.5 10.6104 143.55c-0.469727 30.1299 -26.3096 55 -57.6201 55.4697c-34.5303 0.150391 -54.8594 -26.3896 -54.4502 -52.3398c0.800781 -51.4395 -4.0293 -102.93 -14.3691 -153.02c-2.69043 -12.9805 -15.3105 -21.3105 -28.3604 -18.6406
+c-12.9697 2.6709 -21.3301 15.3809 -18.6396 28.3604c9.61914 46.6396 14.1191 94.6104 13.3691 142.55c-0.849609 54.0205 41.9004 101.16 103.16 101.09zM144.57 303.55c10.2793 -8.37012 11.8398 -23.4795 3.48926 -33.7598
+c-18.7998 -23.0898 -28.4697 -51.0898 -28 -80.9697c0.640625 -40.6699 -2.66992 -81.4902 -9.85938 -121.33c-0.820312 -4.5498 -7.78027 -22.8604 -27.8906 -19.3604c-13.0498 2.36035 -21.6992 14.8398 -19.3594 27.8896c6.63965 36.7803 9.7002 74.4707 9.10938 112.051
+c-0.639648 40.7197 13.1309 80.5 38.75 112c8.39062 10.2793 23.46 11.8193 33.7607 3.47949zM254.04 365.88c101.09 -1.5 184.6 -83.0801 186.16 -181.83c0.0615234 -3.97852 0.111328 -10.4375 0.111328 -14.417c0 -29.1934 -2.68848 -76.4277 -6.00195 -105.433
+c-1.06934 -9.23047 -9.85938 -23.0605 -26.5791 -21.1104c-13.1602 1.5 -22.6104 13.4102 -21.1104 26.5801c4.2998 37.5205 6.16992 75.75 5.58008 113.63c-1.15039 73.0801 -63.4502 133.45 -138.88 134.58c-11.5303 0.0800781 -22.9502 -1.08008 -34 -3.68945
+c-12.8906 -3 -25.8301 4.93945 -28.8906 17.8291c-3.04004 12.9102 4.94043 25.8301 17.8301 28.8906c14.9102 3.53027 30.3701 5.37988 45.7803 4.96973zM506.11 244.43c6.22949 -28.2295 6.00977 -50.1602 5.83008 -72.3398
+c-0.110352 -13.1895 -10.8301 -23.7998 -24 -23.7998h-0.200195c-13.25 0.110352 -23.9102 10.9297 -23.7998 24.2002c0.209961 24.1895 0.00976562 40.1895 -4.7002 61.5996c-2.86035 12.9502 5.31934 25.75 18.2598 28.6104
+c13.0996 2.92969 25.75 -5.35059 28.6104 -18.2705zM465.99 335.15c7.62012 -10.8408 5 -25.8105 -5.83984 -33.4307c-10.8604 -7.58984 -25.8105 -5 -33.4199 5.86035c-39.8906 56.8301 -105.171 91.3604 -174.62 92.3896
+c-56.3408 0.810547 -108.92 -20.0596 -147.681 -58.8594c-37.1201 -37.1406 -57.1094 -86.5 -56.2793 -139l-0.160156 -23.6406c-0.379883 -13 -11.0498 -23.2998 -23.9707 -23.2998c-0.239258 0 -0.489258 0 -0.719727 0.0205078
+c-13.25 0.379883 -23.6699 11.4395 -23.2803 24.6895l0.120117 21.4697c-1.04004 65.6104 23.9502 127.28 70.3203 173.71c48.0098 48.0303 112.56 73.7607 182.35 72.9209c84.79 -1.27051 164.48 -43.4404 213.181 -112.83z" />
+ <glyph glyph-name="fish" unicode="&#xf578;" horiz-adv-x="576"
+d="M327.1 352c137.46 0 248.9 -128 248.9 -160s-111.44 -160 -248.9 -160c-89.9697 0 -168.55 54.7695 -212.279 101.62l-87.3301 -66.0498c-12.1299 -9.16992 -30.2402 0.599609 -27.1406 14.6602l24.1904 109.77l-24.1797 109.76
+c-3.10059 14.0605 15.0098 23.8398 27.1396 14.6602l87.3301 -66.0498c43.7295 46.8604 122.3 101.63 212.27 101.63zM414.53 168c13.25 0 24 10.75 24 24c0 13.2598 -10.7402 24 -24 24c-13.25 0 -24 -10.7402 -24 -24c0 -13.25 10.75 -24 24 -24z" />
+ <glyph glyph-name="flushed" unicode="&#xf579;" horiz-adv-x="496"
+d="M344 248c13.2998 0 24 -10.7002 24 -24s-10.7002 -24 -24 -24s-24 10.7002 -24 24s10.7002 24 24 24zM152 248c13.2998 0 24 -10.7002 24 -24s-10.7002 -24 -24 -24s-24 10.7002 -24 24s10.7002 24 24 24zM248 440c137 0 248 -111 248 -248s-111 -248 -248 -248
+s-248 111 -248 248s111 248 248 248zM80 224c0 -39.7998 32.2002 -72 72 -72s72 32.2002 72 72s-32.2002 72 -72 72s-72 -32.2002 -72 -72zM312 48c21.2002 0 21.2002 32 0 32h-128c-21.2002 0 -21.2002 -32 0 -32h128zM344 152c39.7998 0 72 32.2002 72 72
+s-32.2002 72 -72 72s-72 -32.2002 -72 -72s32.2002 -72 72 -72z" />
+ <glyph glyph-name="frown-open" unicode="&#xf57a;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM136 240c0 -17.7002 14.2998 -32 32 -32s32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32zM323.3 56.7002c11.4004 -3.5 22.5 6.2002 20.5 18.0996
+c-7 39.9004 -60.0996 61.2002 -95.7998 61.2002s-88.7998 -21.2002 -95.7998 -61.2002c-2 -11.7998 9 -21.5996 20.5 -18.0996c31.2002 9.59961 59.3994 15.2998 75.2998 15.2998s44.0996 -5.7002 75.2998 -15.2998zM328 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32
+s-32 -14.2998 -32 -32s14.2998 -32 32 -32z" />
+ <glyph glyph-name="glass-martini-alt" unicode="&#xf57b;"
+d="M502.05 390.4l-214.05 -214.04v-192.36h56c22.0898 0 40 -17.9102 40 -40c0 -4.41992 -3.58008 -8 -8 -8h-240c-4.41992 0 -8 3.58008 -8 8c0 22.0898 17.9102 40 40 40h56v192.36l-214.05 214.04c-21.25 21.2598 -6.2002 57.5996 23.8496 57.5996h444.4
+c30.0498 0 45.0996 -36.3398 23.8496 -57.5996zM443.77 400h-375.529l48 -48h279.529z" />
+ <glyph glyph-name="globe-africa" unicode="&#xf57c;" horiz-adv-x="496"
+d="M248 440c136.97 0 248 -111.03 248 -248s-111.03 -248 -248 -248s-248 111.03 -248 248s111.03 248 248 248zM408 224.5c0 8.55957 6.94043 15.5 15.5 15.5h18.4297c-19.0693 76.9805 -82.6992 136.37 -161.92 149.21v-21.7002c0 -8.55957 -6.93945 -15.5 -15.5 -15.5
+h-24.21c-5.17969 0 -10.0195 -2.58984 -12.8896 -6.89941l-8.08008 -12.1104c-2.13965 -3.20996 -5.40039 -5.5 -9.13965 -6.44043l-14.4502 -3.60938c-6.90039 -1.73047 -11.7402 -7.93066 -11.7402 -15.04v-4.40039c0 -8.55957 6.94043 -15.5 15.5 -15.5h90.0498
+h0.00292969c3.54297 0 8.45215 -2.0332 10.957 -4.54004l6.91992 -6.91992c2.91016 -2.91016 6.85059 -4.54004 10.96 -4.54004h10.0908c8.55957 0 15.5 -6.93945 15.5 -15.5c0 -6.66992 -4.27051 -12.5898 -10.6006 -14.7002l-47.3096 -15.7695
+c-3.90039 -1.2998 -8.15039 -1 -11.8301 0.839844l-14.7207 7.36035c-7.5791 3.7998 -15.9492 5.76953 -24.4297 5.76953h-0.889648c-10.0527 -0.00195312 -24.7383 -4.89941 -32.7803 -10.9297l-27.5801 -20.6904c-13.75 -10.3193 -21.8496 -26.5098 -21.8496 -43.6992
+v-14.0605c0.00195312 -12.4902 7.16992 -29.7959 16 -38.6299c10.25 -10.2402 24.1396 -16 38.6299 -16h25.8799c8.55957 0 15.5 -6.94043 15.5 -15.5v-29.8896v-0.00390625c0 -10.2822 3.73145 -26.0898 8.33008 -35.2861
+c4.7002 -9.40039 14.3096 -15.3398 24.8203 -15.3398c8.19824 0.000976562 18.542 5.53809 23.0898 12.3594l13.0293 19.5498c5.9248 8.88477 17.125 21.9482 25 29.1602c2.4707 2.27051 4.14062 5.27051 4.76074 8.56055l4.2998 22.8301
+c0.439453 2.3291 1.41016 4.5293 2.83008 6.42969l18.7402 24.9795c2.00977 2.68066 3.09961 5.9502 3.09961 9.30078v11.3398c0 8.55957 -6.94043 15.5 -15.5 15.5h-8.20996c-5.17969 0 -10.0205 2.58984 -12.8896 6.89941l-13.2402 19.8604
+c-5.66992 8.50977 -1.70996 20.0703 7.99023 23.2998l2.64941 0.879883c1.31641 0.4375 3.50977 0.792969 4.89746 0.792969c2.5918 0 6.44531 -1.16602 8.60254 -2.60254l18.21 -12.1396c2.15527 -1.43945 6.00781 -2.60742 8.59961 -2.60742
+c2.01953 0 5.125 0.733398 6.93066 1.63672l15.3896 7.7002c5.25 2.62012 8.57031 7.99023 8.57031 13.8604v6.92969z" />
+ <glyph glyph-name="globe-americas" unicode="&#xf57d;" horiz-adv-x="496"
+d="M248 440c136.97 0 248 -111.03 248 -248s-111.03 -248 -248 -248s-248 111.03 -248 248s111.03 248 248 248zM330.29 82.4004c7.56934 7.52832 13.7168 22.3037 13.7197 32.9795v0.00488281c0 6.97266 -4 16.6338 -8.92969 21.5654l-13.6904 13.6895
+c-6.00977 6 -14.1396 9.37012 -22.6299 9.37012h-66.75c-9.41016 4.70996 -21.4795 32 -32 32c-10.5195 0 -20.8994 2.4502 -30.3096 7.16016l-11.0801 5.54004c-4.0498 2.03027 -6.61035 6.16016 -6.61035 10.6904v0.00292969c0 4.75391 3.66016 9.83301 8.16992 11.3369
+l31.1699 10.3896c1.3584 0.452148 3.62305 0.818359 5.05469 0.818359c3.30078 0 8.00293 -1.75488 10.4961 -3.91797l9.2793 -8.06055c1.4502 -1.25977 3.31055 -1.95996 5.24023 -1.95996h5.63965c5.94043 0 9.81055 6.25977 7.15039 11.5801l-15.5898 31.1904
+c-0.464844 0.931641 -0.84082 2.53223 -0.84082 3.57227c0 1.86621 1.08398 4.43555 2.4209 5.7373l9.91992 9.65039c1.5 1.45996 3.5 2.26953 5.58008 2.26953h8.99023h0.00292969c1.82812 0 4.3623 1.04883 5.65625 2.33984l8 8
+c3.12012 3.12012 3.12012 8.19043 0 11.3105l-4.68945 4.68945c-3.12012 3.12012 -3.12012 8.19043 0 11.3105l10.3398 10.3398l4.69043 4.67969c6.25 6.25 6.25 16.3799 0 22.6299l-28.3008 28.3008c-8.26953 -0.310547 -16.3994 -1.12988 -24.3896 -2.41992v-11.3008
+c0 -11.8994 -12.5195 -19.6299 -23.1602 -14.3096l-24.0098 12.0098c-45.8398 -19.8496 -82.7305 -56.3896 -103.2 -101.89c9.92969 -14.7197 25.21 -37.3701 34.5898 -51.1406c4.29395 -6.3252 12.3848 -15.6172 18.0605 -20.7393l0.799805 -0.720703
+c9.5498 -8.60938 20.1699 -15.9697 31.6504 -21.75c14 -7.0498 34.4395 -18.1592 48.8096 -26.1094c10.1904 -5.62988 16.46 -16.3301 16.46 -27.9707v-32.0098c0 -8.49023 3.37012 -16.6299 9.37012 -22.6299c14.9902 -14.9902 24.3203 -38.6299 22.6299 -51.25v-27.3799
+c14.6504 0 28.8896 1.69043 42.6504 4.69043l17.3896 46.8496c2.04004 5.49023 3.25977 11.21 4.76953 16.8701c1.07031 4.00977 3.18066 7.70996 6.1709 10.71c3.31934 3.33008 7.40918 7.39941 11.3096 11.2803zM417 173.75l29.1797 -7.29004
+c1.08008 8.37988 1.82031 16.8701 1.82031 25.54c0 32.1299 -7.7998 62.4102 -21.3203 89.3301l-12.9795 -6.49023c-3.74023 -1.85938 -6.91992 -4.67969 -9.24023 -8.14941l-19.5898 -29.3809c-2.22754 -3.33594 -4.03516 -9.29883 -4.03516 -13.3096
+s1.80762 -9.97363 4.03516 -13.3096l17.9795 -26.9707c3.31055 -4.96973 8.36035 -8.51953 14.1504 -9.96973z" />
+ <glyph glyph-name="globe-asia" unicode="&#xf57e;" horiz-adv-x="496"
+d="M248 440c136.97 0 248 -111.03 248 -248s-111.03 -248 -248 -248s-248 111.03 -248 248s111.03 248 248 248zM236.66 199.77l17.3701 28.9502c1.7998 2.99023 6.2002 2.82031 7.75977 -0.299805c1.34961 -2.70996 4.12012 -4.41992 7.15039 -4.41992h3.05957
+c4.41992 0 8 3.58008 8 8v78.1201c0 6.05957 -3.41992 11.5996 -8.83984 14.3096l-10.8301 5.41016c-5.49023 2.75 -5.96973 10.4004 -0.860352 13.8105l50.1602 38.5293c-19.4297 6.31055 -40.1201 9.82031 -61.6299 9.82031c-110.28 0 -200 -89.7197 -200 -200
+c0 -10.9199 1.12012 -21.5498 2.80957 -31.9902h62.5703c4.24023 0 8.31055 1.69043 11.3105 4.69043l19.4697 19.46c3.85938 3.85938 10.3701 2.7998 12.8096 -2.08008l22.6201 -45.2305c2.70996 -5.42969 8.25 -8.84961 14.3105 -8.84961h6.10938
+c8.83984 0 16 7.16016 16 16v9.37012c0 4.24023 -1.68945 8.30957 -4.68945 11.3096l-5.66016 5.66016c-3.12012 3.12012 -3.12012 8.19043 0 11.3105l5.66016 5.65918c3 3 7.06934 4.69043 11.3096 4.69043h0.310547c5.61914 0 10.8291 2.9502 13.7197 7.76953z
+M408 89.5703l-0.00976562 24.5996c0 4.24023 -1.69043 8.31055 -4.69043 11.3105l-11.9102 11.9092c-1.5 1.5 -2.33984 3.54004 -2.33984 5.66016v12.9307c0 2.20996 -1.79004 4 -4 4h-6.05957c-1.78027 0 -3.33984 -1.1709 -3.83984 -2.88086l-4.2002 -14.4697
+c-0.490234 -1.7002 -2.06055 -2.87988 -3.83984 -2.87988h-3.80078c-1.4707 0.000976562 -3.12305 1.10254 -3.68945 2.45996l-5.35059 12.8496c-1.23926 2.99023 -4.15918 4.93066 -7.38965 4.93066h-12.0898h-0.0117188c-1.4082 0 -3.48145 -0.663086 -4.62793 -1.48047
+l-23.71 -16.8896c-1.73047 -1.23047 -3.61035 -2.25977 -5.59082 -3.0498l-39.3398 -15.7402c-3.04004 -1.21973 -5.0293 -4.16016 -5.0293 -7.42969v-10.2002v-0.00292969c0 -1.8291 1.04785 -4.36328 2.33984 -5.65723l11.9102 -11.9102
+c3 -3 7.06934 -4.68945 11.3096 -4.68945h10.3398c1.31055 0 2.61035 0.15918 3.87988 0.479492l21.2705 5.32031c1.76465 0.441406 4.67383 0.798828 6.49316 0.798828c6.12012 0 14.5986 -3.51172 18.9268 -7.83887l13.0098 -13.0098
+c3 -3 7.07031 -4.69043 11.3096 -4.69043h15.1602c4.24023 0 8.31055 1.69043 11.3105 4.69043l9.56934 9.56934c3 3 4.69043 7.07031 4.69043 11.3105z" />
+ <glyph glyph-name="grimace" unicode="&#xf57f;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM144 48v40h-40v-8c0 -17.7002 14.2998 -32 32 -32h8zM144 104v40h-8c-17.7002 0 -32 -14.2998 -32 -32v-8h40zM136 240c0 -17.7002 14.2998 -32 32 -32s32 14.2998 32 32
+s-14.2998 32 -32 32s-32 -14.2998 -32 -32zM208 48v40h-48v-40h48zM208 104v40h-48v-40h48zM272 48v40h-48v-40h48zM272 104v40h-48v-40h48zM336 48v40h-48v-40h48zM336 104v40h-48v-40h48zM328 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32
+s14.2998 -32 32 -32zM392 80v8h-40v-40h8c17.7002 0 32 14.2998 32 32zM392 104v8c0 17.7002 -14.2998 32 -32 32h-8v-40h40z" />
+ <glyph glyph-name="grin" unicode="&#xf580;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM328 272c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32s32 14.2998 32 32s-14.2998 32 -32 32zM168 272c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32
+s32 14.2998 32 32s-14.2998 32 -32 32zM248 16c60.5996 0 134.5 38.2998 143.8 93.2998c1.90039 11.7998 -9.39941 21.6006 -20.7002 17.9004c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002s-92.9004 5.5 -123.1 15.2002
+c-11.4004 3.7002 -22.7002 -6.10059 -20.7002 -17.9004c9.2998 -55 83.2002 -93.2998 143.8 -93.2998z" />
+ <glyph glyph-name="grin-alt" unicode="&#xf581;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM311.7 311.3c-12.4004 -18.3994 -15.2002 -36.8994 -15.7002 -55.2998c0.599609 -18.4004 3.2998 -36.9004 15.7002 -55.2998c8 -11.7002 25.0996 -11.4004 32.7002 0
+c12.3994 18.3994 15.1992 36.8994 15.6992 55.2998c-0.599609 18.4004 -3.2998 36.9004 -15.6992 55.2998c-8 11.7002 -25.1006 11.4004 -32.7002 0zM151.7 311.3c-12.4004 -18.3994 -15.2002 -36.8994 -15.7002 -55.2998
+c0.599609 -18.4004 3.2998 -36.9004 15.7002 -55.2998c8 -11.7002 25.0996 -11.4004 32.7002 0c12.3994 18.3994 15.1992 36.8994 15.6992 55.2998c-0.599609 18.4004 -3.2998 36.9004 -15.6992 55.2998c-8 11.7002 -25.1006 11.4004 -32.7002 0zM248 16
+c60.5996 0 134.5 38.2998 143.8 93.2998c1.90039 11.7998 -9.2998 21.6006 -20.7002 17.9004c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002s-92.9004 5.5 -123.1 15.2002c-11.4004 3.7002 -22.7002 -6.10059 -20.7002 -17.9004
+c9.2998 -55 83.2002 -93.2998 143.8 -93.2998z" />
+ <glyph glyph-name="grin-beam" unicode="&#xf582;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM328 296c-23.7998 0 -52.7002 -29.2998 -55.7998 -71.4004c-0.700195 -8.5 10.7998 -11.7998 14.8994 -4.5l9.5 17c7.7002 13.7002 19.2002 21.6006 31.5 21.6006
+c12.3008 0 23.8008 -7.90039 31.5 -21.6006l9.5 -17c4.10059 -7.39941 15.6006 -4.09961 14.9004 4.5c-3.2998 42.1006 -32.2002 71.4004 -56 71.4004zM168 296c-23.7998 0 -52.7002 -29.2998 -55.7998 -71.4004c-0.700195 -8.5 10.7002 -11.8994 14.8994 -4.5l9.5 17
+c7.7002 13.7002 19.2002 21.6006 31.5 21.6006c12.3008 0 23.8008 -7.90039 31.5 -21.6006l9.5 -17c4.10059 -7.39941 15.6006 -4.09961 14.9004 4.5c-3.2998 42.1006 -32.2002 71.4004 -56 71.4004zM248 16c60.5996 0 134.5 38.2998 143.8 93.2998
+c1.90039 11.7998 -9.2998 21.6006 -20.7002 17.9004c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002s-92.9004 5.5 -123.1 15.2002c-11.3008 3.7002 -22.7002 -6 -20.7002 -17.9004c9.2998 -55 83.2002 -93.2998 143.8 -93.2998z" />
+ <glyph glyph-name="grin-beam-sweat" unicode="&#xf583;" horiz-adv-x="504"
+d="M456 320c-26.5 0 -48 21 -48 47c0 20 28.5 60.4004 41.5996 77.7998c3.2002 4.2998 9.60059 4.2998 12.8008 0c13.0996 -17.3994 41.5996 -57.7998 41.5996 -77.7998c0 -26 -21.5 -47 -48 -47zM456 288c6.7998 0 13.2002 1.09961 19.5 2.59961
+c13.0996 -30.1992 20.5 -63.5 20.5 -98.5996c0 -137 -111 -248 -248 -248s-248 111 -248 248s111 248 248 248c50.2998 0 97 -15.0996 136.1 -40.7998c-7.7998 -18 -8.09961 -27.7998 -8.09961 -32.2002c0 -43.5996 35.9004 -79 80 -79zM328 296
+c-23.7998 0 -52.7002 -29.2998 -55.7998 -71.4004c-0.700195 -8.5 10.7998 -11.8994 14.8994 -4.5l9.5 17c7.7002 13.7002 19.2002 21.6006 31.5 21.6006c12.3008 0 23.8008 -7.90039 31.5 -21.6006l9.5 -17c4.10059 -7.5 15.6006 -4.09961 14.9004 4.5
+c-3.2998 42.1006 -32.2002 71.4004 -56 71.4004zM168 296c-23.7998 0 -52.7002 -29.2998 -55.7998 -71.4004c-0.700195 -8.5 10.7002 -11.8994 14.8994 -4.5l9.5 17c7.7002 13.7002 19.2002 21.6006 31.5 21.6006c12.3008 0 23.8008 -7.90039 31.5 -21.6006l9.5 -17
+c4.10059 -7.5 15.6006 -4.09961 14.9004 4.5c-3.2998 42.1006 -32.2002 71.4004 -56 71.4004zM248 16c60.5996 0 134.5 38.2998 143.8 93.2998c1.90039 11.7002 -9.2002 21.6006 -20.7002 17.9004c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002
+s-92.9004 5.5 -123.1 15.2002c-11.4004 3.7002 -22.7002 -6.10059 -20.7002 -17.9004c9.2998 -55 83.2002 -93.2998 143.8 -93.2998z" />
+ <glyph glyph-name="grin-hearts" unicode="&#xf584;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM90.4004 264.4c-7.7002 -20.2002 3.7998 -41.8008 24.1992 -47.2002l70.2002 -18.2002c4.60059 -1.2002 9.2998 1.5 10.5 6l19.4004 69.9004
+c5.59961 20.2998 -7.40039 41.0996 -28.7998 44.5c-18.7002 3 -36.5 -9.80078 -41.5 -27.9004l-2 -7.09961l-7.10059 1.89941c-18.2002 4.7998 -38.2002 -4.2998 -44.8994 -21.8994zM248 16c60.5996 0 134.5 38.2998 143.8 93.2998
+c1.90039 11.7998 -9.2998 21.5 -20.7002 17.9004c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002s-92.9004 5.5 -123.1 15.2002c-11.5 3.59961 -22.7002 -6.10059 -20.7002 -17.9004c9.2998 -55 83.2002 -93.2998 143.8 -93.2998zM381.4 217.3
+c20.3994 5.2998 31.8994 26.9004 24.1992 47.2002c-6.69922 17.5996 -26.6992 26.5996 -44.8994 21.9004l-7.10059 -1.90039l-2 7.09961c-5.09961 18.1006 -22.8994 30.9004 -41.5 27.9004c-21.3994 -3.40039 -34.3994 -24.2002 -28.7998 -44.5l19.4004 -69.9004
+c1.2998 -4.5 6 -7.19922 10.5 -6z" />
+ <glyph glyph-name="grin-squint" unicode="&#xf585;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM281.8 250.3c-7.7002 -4.7002 -7.7002 -15.8994 0 -20.5996l80 -48c11.5 -6.7998 24.1006 7.59961 15.4004 18l-33.6006 40.2998l33.6006 40.2998
+c8.59961 10.2998 -3.7998 24.9004 -15.4004 18zM118.8 280.3l33.6006 -40.2998l-33.6006 -40.2998c-8.59961 -10.4004 3.90039 -24.7998 15.4004 -18l80 48c7.7998 4.7002 7.7998 15.8994 0 20.5996l-80 48c-11.6006 6.90039 -24 -7.7002 -15.4004 -18zM248 16
+c60.5996 0 134.5 38.2998 143.8 93.2998c1.90039 11.7002 -9.2002 21.6006 -20.7002 17.9004c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002s-92.9004 5.5 -123.1 15.2002c-11.3008 3.7002 -22.7002 -6 -20.7002 -17.9004
+c9.2998 -55 83.2002 -93.2998 143.8 -93.2998z" />
+ <glyph glyph-name="grin-squint-tears" unicode="&#xf586;"
+d="M409.6 336.1c-5.59961 -0.799805 -10.2998 3.90039 -9.5 9.40039c3.30078 22.5996 12 73.5 26.8008 88.2998c19.0996 19.2002 50.6992 18.9004 70.2998 -0.700195c19.5996 -19.5996 19.8994 -51 0.700195 -70.1992
+c-14.8008 -14.8008 -65.7002 -23.6006 -88.3008 -26.8008zM102.4 47.9004c5.59961 0.799805 10.2998 -3.90039 9.5 -9.40039c-3.30078 -22.5996 -12 -73.5 -26.8008 -88.2998c-19.1992 -19.2002 -50.5996 -18.9004 -70.1992 0.700195
+c-19.6006 19.5996 -19.9004 51.0996 -0.800781 70.1992c14.8008 14.8008 65.7002 23.6006 88.3008 26.8008zM414.1 304.4c24 3.5 42.1006 7.39941 56.5 11.5c54.8008 -94.9004 42 -218.2 -39.1992 -299.301c-81.2002 -81.0996 -204.5 -94 -299.301 -39.1992
+c4.10059 14.3994 8.10059 32.5 11.5 56.5c2.90039 20.5 -12.5 49.5996 -45.6992 45.6992c-24.1006 -3.5 -42.1006 -7.39941 -56.5 -11.5c-54.8008 94.9004 -41.9004 218.2 39.1992 299.301c81.2002 81.0996 204.5 94 299.301 39.1992
+c-4.10059 -14.3994 -8.10059 -32.5 -11.5 -56.5c-2.90039 -20.5996 12.6992 -49.5996 45.6992 -45.6992zM255.7 342l-22.5 -90.5996c-2.2002 -8.60059 5.59961 -16.7002 14.5 -14.5l90.5 22.5996c13.0996 3.2998 11.5996 22.4004 -1.7998 23.5996l-52.3008 4.80078
+l-4.7998 52.2998c-1.2002 13.2998 -20.2998 15 -23.5996 1.7998zM164.8 111.7c1.2998 -13.4004 20.4004 -14.9004 23.5 -1.7002l22.6006 90.5c2.19922 8.7002 -5.7002 16.7002 -14.5 14.5l-90.5 -22.5996c-13.1006 -3.30078 -11.6006 -22.4004 1.7998 -23.6006
+l52.2998 -4.7998zM380.5 67.5c42.7998 42.9004 68 122.3 35.7002 167.6c-7.10059 9.90039 -21.9004 8.5 -27.2998 -2c-14.6006 -28.1992 -42.4004 -63.8994 -76.3008 -97.7998c-33.8994 -33.8994 -69.5 -61.7002 -97.7998 -76.2998
+c-10.7002 -5.40039 -11.7998 -20.2998 -2 -27.2002c14.4004 -10.2002 32.1006 -14.7002 51 -14.7002c41 0 87.4004 21.1006 116.7 50.4004z" />
+ <glyph glyph-name="grin-stars" unicode="&#xf587;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM94.5996 279.1c-6.19922 -1 -8.89941 -8.59961 -4.2998 -13.2998l25.4004 -24.5996l-6 -34.9004c-1 -6.2002 5.2998 -11 11 -7.89941l31.2998 16.2998l31.2002 -16.2002
+c5.7002 -3.09961 12 1.7002 11 7.90039l-6 34.8994l25.3994 24.6006c4.60059 4.59961 1.90039 12.1992 -4.2998 13.1992l-34.8994 5l-15.5 31.6006c-2.90039 5.7998 -11 5.7998 -13.9004 0l-15.5 -31.6006zM248 16c60.5996 0 134.5 38.2998 143.8 93.2998
+c1.90039 11.7998 -9.2002 21.6006 -20.7002 17.9004c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002s-92.9004 5.5 -123.1 15.2002c-11.4004 3.59961 -22.7002 -6.10059 -20.7002 -17.9004c9.2998 -55 83.2002 -93.2998 143.8 -93.2998zM405.7 265.9
+c4.59961 4.59961 1.89941 12.1992 -4.40039 13.0996l-34.8994 5l-15.5 31.5996c-2.90039 5.80078 -11 5.80078 -13.9004 0l-15.5 -31.5996l-34.9004 -5c-6.19922 -1 -8.7998 -8.59961 -4.2998 -13.2002l25.4004 -24.5996l-6 -34.9004c-1 -6.2002 5.2998 -11 11 -7.89941
+l31.2998 16.2998l31.2998 -16.2002c5.7002 -3.09961 12 1.7002 11 7.90039l-6 34.8994z" />
+ <glyph glyph-name="grin-tears" unicode="&#xf588;" horiz-adv-x="639"
+d="M102.4 191.9c5.59961 0.799805 10.2998 -3.90039 9.5 -9.40039c-3.30078 -22.5996 -12 -73.5 -26.8008 -88.2998c-19.1992 -19.2002 -50.5996 -18.9004 -70.1992 0.700195c-19.6006 19.5996 -19.9004 51.0996 -0.800781 70.1992
+c14.8008 14.8008 65.7002 23.6006 88.3008 26.8008zM625.8 165.1c19.2002 -19.0996 18.7998 -50.6992 -0.799805 -70.2998c-19.5996 -19.5996 -51 -19.8994 -70.2002 -0.700195c-14.7998 14.8008 -23.5996 65.7002 -26.7998 88.3008c-0.799805 5.59961 4 10.2998 9.5 9.5
+c22.5996 -3.30078 73.5 -12 88.2998 -26.8008zM496.4 177.9c11.7998 -82.3008 29.8994 -100.4 35.7998 -106.301c0.899414 -1 2 -1.59961 3 -2.5c-42.7002 -74.6992 -123 -125.1 -215.2 -125.1s-172.5 50.4004 -215.2 125c1 0.900391 2.10059 1.59961 3 2.5
+c5.90039 6 24 24.0996 35.7998 106.4c2.90039 20.3994 -12.5 49.5996 -45.6992 45.6992c-8.90039 -1.2998 -16.8008 -2.69922 -24.3008 -4.09961c13.7002 124 118.7 220.5 246.4 220.5s232.7 -96.5 246.4 -220.5c-7.5 1.40039 -15.4004 2.7998 -24.3008 4.09961
+c-26.5996 3.80078 -49.5 -19.0996 -45.6992 -45.6992zM400 296c-23.7998 0 -52.7002 -29.2998 -55.7998 -71.4004c-0.700195 -8.5 10.7002 -11.8994 14.8994 -4.5l9.5 17c7.7002 13.7002 19.2002 21.6006 31.5 21.6006c12.3008 0 23.8008 -7.90039 31.5 -21.6006l9.5 -17
+c4.10059 -7.5 15.6006 -4.09961 14.9004 4.5c-3.2998 42.1006 -32.2002 71.4004 -56 71.4004zM240 296c-23.7998 0 -52.7002 -29.2998 -55.7998 -71.4004c-0.700195 -8.5 10.7002 -11.8994 14.8994 -4.5l9.5 17c7.7002 13.7002 19.2002 21.6006 31.5 21.6006
+c12.3008 0 23.8008 -7.90039 31.5 -21.6006l9.5 -17c4.10059 -7.5 15.6006 -4.09961 14.9004 4.5c-3.2998 42.1006 -32.2002 71.4004 -56 71.4004zM320 16c60.5996 0 134.5 38.2998 143.8 93.2998c1.90039 11.7998 -9.2998 21.6006 -20.7002 17.9004
+c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002s-92.9004 5.5 -123.1 15.2002c-11.5 3.7002 -22.7002 -6.2002 -20.7002 -17.9004c9.2998 -55 83.2002 -93.2998 143.8 -93.2998z" />
+ <glyph glyph-name="grin-tongue" unicode="&#xf589;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248c0 -106.3 -67 -196.7 -161 -232c5.59961 12.2002 9 25.7002 9 40v45.5c24.7002 16.2002 43.5 38.0996 47.7998 63.7998c1.90039 11.7998 -9.2998 21.5 -20.7002 17.9004c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002
+s-92.9004 5.5 -123.1 15.2002c-11.4004 3.59961 -22.7002 -6.10059 -20.7002 -17.9004c4.2998 -25.7002 23.0996 -47.5996 47.7998 -63.7998v-45.5c0 -14.2998 3.40039 -27.7998 9 -40c-94 35.2998 -161 125.7 -161 232c0 137 111 248 248 248zM168 208
+c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM328 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM293.1 73.4004c0.800781 -0.400391 -0.5 0.299805 19 -9.30078v-64
+c0 -35.5996 -29.1992 -64.5 -64.8994 -64c-35.1006 0.400391 -63.1006 29.7002 -63.1006 64.9004v63c19.2002 9.59961 18 9 18.9004 9.40039c14.4004 6.5 31.0996 -2.2002 34.5996 -17.6006l1.80078 -7.7998c2.09961 -9.2002 15.1992 -9.2002 17.2998 0l1.7998 7.7998
+c3.5 15.4004 20.2002 24.1006 34.5996 17.6006z" />
+ <glyph glyph-name="grin-tongue-squint" unicode="&#xf58a;" horiz-adv-x="496"
+d="M293.1 73.4004c0.800781 -0.400391 -0.5 0.299805 19 -9.30078v-64c0 -35.5996 -29.1992 -64.5 -64.8994 -64c-35.1006 0.400391 -63.1006 29.7002 -63.1006 64.9004v63c19.2002 9.59961 18 9 18.9004 9.40039c14.4004 6.5 31.0996 -2.2002 34.5996 -17.6006
+l1.80078 -7.7998c2.09961 -9.2002 15.1992 -9.2002 17.2998 0l1.7998 7.7998c3.5 15.4004 20.2002 24.1006 34.5996 17.6006zM248 440c137 0 248 -111 248 -248c0 -106.3 -67 -196.7 -161 -232c5.59961 12.2002 9 25.7002 9 40v45.5
+c24.7002 16.2002 43.5 38.0996 47.7998 63.7998c1.90039 11.7998 -9.2998 21.6006 -20.7002 17.9004c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002s-92.9004 5.5 -123.1 15.2002c-11.5 3.59961 -22.7002 -6.10059 -20.7002 -17.9004
+c4.2998 -25.7002 23.0996 -47.5996 47.7998 -63.7998v-45.5c0 -14.2998 3.40039 -27.7998 9 -40c-94 35.2998 -161 125.7 -161 232c0 137 111 248 248 248zM214.2 229.7c7.7002 4.7002 7.7002 15.8994 0 20.5996l-80 48c-11.6006 6.90039 -24 -7.7002 -15.4004 -18
+l33.6006 -40.2998l-33.6006 -40.2998c-8.59961 -10.4004 3.90039 -24.7998 15.4004 -18zM377.2 199.7l-33.6006 40.2998l33.6006 40.2998c8.5 10.2998 -3.7002 24.9004 -15.4004 18l-80 -48c-7.7998 -4.7002 -7.7998 -15.8994 0 -20.5996l80 -48
+c11.5 -6.7998 24.1006 7.59961 15.4004 18z" />
+ <glyph glyph-name="grin-tongue-wink" unicode="&#xf58b;" horiz-adv-x="496"
+d="M344 264c13.2998 0 24 -10.7002 24 -24s-10.7002 -24 -24 -24s-24 10.7002 -24 24s10.7002 24 24 24zM248 440c137 0 248 -111 248 -248c0 -106.3 -67 -196.7 -161 -232c5.59961 12.2002 9 25.7002 9 40v45.5c24.7002 16.2002 43.5 38.0996 47.7998 63.7998
+c1.90039 11.7998 -9.2002 21.6006 -20.7002 17.9004c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002s-92.9004 5.5 -123.1 15.2002c-11.4004 3.59961 -22.7002 -6.10059 -20.7002 -17.9004c4.2998 -25.7002 23.0996 -47.5996 47.7998 -63.7998v-45.5
+c0 -14.2998 3.40039 -27.7998 9 -40c-94 35.2998 -161 125.7 -161 232c0 137 111 248 248 248zM192 215c8.40039 -7.40039 21.5996 -0.299805 20 10.7998c-4 25.2002 -34.2002 42.1006 -59.9004 42.1006c-25.6992 0 -55.8994 -16.9004 -59.8994 -42.1006
+c-1.7998 -11.0996 11.2998 -18.2002 19.7998 -10.7998l9.5 8.5c14.7998 13.2002 46.2002 13.2002 61 0zM344 176c35.2998 0 64 28.7002 64 64s-28.7002 64 -64 64s-64 -28.7002 -64 -64s28.7002 -64 64 -64zM293.1 73.4004c0.800781 -0.400391 -0.5 0.299805 19 -9.30078
+v-64c0 -35.5996 -29.1992 -64.5 -64.8994 -64c-35.1006 0.400391 -63.1006 29.7002 -63.1006 64.9004v63c19.2002 9.59961 18 9 18.9004 9.40039c14.4004 6.5 31.0996 -2.2002 34.5996 -17.6006l1.80078 -7.7998c2.09961 -9.2002 15.1992 -9.2002 17.2998 0l1.7998 7.7998
+c3.5 15.4004 20.2002 24.1006 34.5996 17.6006z" />
+ <glyph glyph-name="grin-wink" unicode="&#xf58c;" horiz-adv-x="496"
+d="M0 192c0 137 111 248 248 248s248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248zM200 240c0 17.7002 -14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32s32 14.2998 32 32zM368 215c8.5 -7.40039 21.5996 -0.200195 20 10.7998
+c-4 25.2002 -34.2002 42.1006 -59.9004 42.1006c-25.6992 0 -55.8994 -16.9004 -59.8994 -42.1006c-1.7998 -11.2002 11.5 -18.2002 19.7998 -10.7998l9.5 8.5c14.7998 13.2002 46.2002 13.2002 61 0zM124.9 127.2c-11.4004 3.7002 -22.7002 -6 -20.7002 -17.9004
+c9.2998 -55 83.2002 -93.2998 143.8 -93.2998s134.6 38.2998 143.8 93.2998c1.90039 11.9004 -9.39941 21.6006 -20.7002 17.9004c-30.1992 -9.7002 -75.0996 -15.2002 -123.1 -15.2002s-92.9004 5.5 -123.1 15.2002z" />
+ <glyph glyph-name="grip-horizontal" unicode="&#xf58d;" horiz-adv-x="448"
+d="M96 160c17.6699 0 32 -14.3301 32 -32v-64c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v64c0 17.6699 14.3301 32 32 32h64zM256 160c17.6699 0 32 -14.3301 32 -32v-64c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v64
+c0 17.6699 14.3301 32 32 32h64zM416 160c17.6699 0 32 -14.3301 32 -32v-64c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v64c0 17.6699 14.3301 32 32 32h64zM96 352c17.6699 0 32 -14.3301 32 -32v-64c0 -17.6699 -14.3301 -32 -32 -32h-64
+c-17.6699 0 -32 14.3301 -32 32v64c0 17.6699 14.3301 32 32 32h64zM256 352c17.6699 0 32 -14.3301 32 -32v-64c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v64c0 17.6699 14.3301 32 32 32h64zM416 352c17.6699 0 32 -14.3301 32 -32v-64
+c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v64c0 17.6699 14.3301 32 32 32h64z" />
+ <glyph glyph-name="grip-vertical" unicode="&#xf58e;" horiz-adv-x="320"
+d="M96 416c17.6699 0 32 -14.3301 32 -32v-64c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v64c0 17.6699 14.3301 32 32 32h64zM96 256c17.6699 0 32 -14.3301 32 -32v-64c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v64
+c0 17.6699 14.3301 32 32 32h64zM96 96c17.6699 0 32 -14.3301 32 -32v-64c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v64c0 17.6699 14.3301 32 32 32h64zM288 416c17.6699 0 32 -14.3301 32 -32v-64c0 -17.6699 -14.3301 -32 -32 -32h-64
+c-17.6699 0 -32 14.3301 -32 32v64c0 17.6699 14.3301 32 32 32h64zM288 256c17.6699 0 32 -14.3301 32 -32v-64c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v64c0 17.6699 14.3301 32 32 32h64zM288 96c17.6699 0 32 -14.3301 32 -32v-64
+c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v64c0 17.6699 14.3301 32 32 32h64z" />
+ <glyph glyph-name="headphones-alt" unicode="&#xf58f;"
+d="M160 160c17.6699 0 32 -14.3496 32 -32.0596v-127.881c0 -17.6992 -14.3301 -32.0596 -32 -32.0596h-16c-35.3496 0 -64 28.71 -64 64.1201v63.7598c0 35.4199 28.6504 64.1201 64 64.1201h16zM368 160c35.3496 0 64 -28.71 64 -64.1201v-63.7598
+c0 -35.4102 -28.6504 -64.1201 -64 -64.1201h-16c-17.6699 0 -32 14.3604 -32 32.0596v127.881c0 17.71 14.3301 32.0596 32 32.0596h16zM256 416c143.09 0 251.43 -119.13 256 -256v-112c0 -8.83984 -7.16016 -16 -16 -16h-16c-8.83984 0 -16 7.16016 -16 16v112
+c0 114.67 -93.3301 207.8 -208 207.82c-114.67 -0.0205078 -208 -93.1504 -208 -207.82v-112c0 -8.83984 -7.16016 -16 -16 -16h-16c-8.83984 0 -16 7.16016 -16 16v112c4.57031 136.87 112.91 256 256 256z" />
+ <glyph glyph-name="headset" unicode="&#xf590;"
+d="M192 240v-112c0 -17.6699 -14.3301 -32 -32 -32h-16c-35.3496 0 -64 28.6504 -64 64v48c0 35.3496 28.6504 64 64 64h16c17.6699 0 32 -14.3301 32 -32zM368 96h-16c-17.6699 0 -32 14.3301 -32 32v112c0 17.6699 14.3301 32 32 32h16c35.3496 0 64 -28.6504 64 -64v-48
+c0 -35.3496 -28.6504 -64 -64 -64zM256 448c142.82 0 251.42 -118.83 256 -256v-165.72c0 -49.8604 -40.4199 -90.2803 -90.2803 -90.2803h-181.72c-26.5098 0 -48 21.4902 -48 48s21.4902 48 48 48h32c26.5098 0 48 -21.4902 48 -48h101.72
+c23.3506 0 42.2803 18.9297 42.2803 42.2803c0 0 -0.0400391 163.29 -0.120117 165.72h0.120117c0 114.69 -93.3096 208 -208 208s-208 -93.3096 -208 -208v-16c0 -8.83984 -7.16016 -16 -16 -16h-16c-8.83984 0 -16 7.16016 -16 16v16c4.58008 137.17 113.18 256 256 256z
+" />
+ <glyph glyph-name="highlighter" unicode="&#xf591;" horiz-adv-x="544"
+d="M0 -31.9805l68.3301 70.4707l67.04 -67.04l-35.4502 -35.4502zM124.61 208.03l41.5195 35.4395l173.34 -173.34l-35.5498 -41.6396c-9.5 -10.7705 -24.4199 -14.9805 -38.1504 -10.7803l-42.7393 13.0801l-50.8604 -50.8604l-96.2295 96.2305l50.9297 50.9395
+l-13.0498 42.8301c-0.876953 2.87891 -1.58887 7.65625 -1.58887 10.665c0 9.11328 5.5459 21.4043 12.3789 27.4355zM527.92 368.73c20.5 -20.5 21.5303 -53.3906 2.34961 -75.1309l-169.949 -199.06l-169.771 169.78l199.05 169.96
+c21.7402 19.1699 54.6309 18.1396 75.1201 -2.35059z" />
+ <glyph glyph-name="hot-tub" unicode="&#xf593;"
+d="M414.21 270.35c-3.15039 25.3906 -14.6104 47.9707 -31.9697 62.1406c-27.7305 22.6299 -45.79 58.0498 -50.1299 97.1602c-1.09082 9.7793 6.48926 18.3496 16 18.3496h16.1201c7.98926 0 14.7295 -6.13965 15.7393 -14.3398
+c3.16016 -25.4004 14.6104 -47.9805 31.9805 -62.1504c27.7295 -22.6299 45.79 -58.0498 50.1299 -97.1602c1.08008 -9.7793 -6.49023 -18.3496 -16 -18.3496h-16.1201c-8 0 -14.7295 6.13965 -15.75 14.3496zM306.21 270.35
+c-3.15039 25.3906 -14.6104 47.9707 -31.9697 62.1406c-27.7305 22.6299 -45.79 58.0498 -50.1299 97.1602c-1.09082 9.7793 6.48926 18.3496 16 18.3496h16.1201c7.98926 0 14.7295 -6.13965 15.7393 -14.3398c3.16016 -25.4004 14.6104 -47.9805 31.9805 -62.1504
+c27.7295 -22.6299 45.79 -58.0498 50.1299 -97.1602c1.08008 -9.7793 -6.49023 -18.3496 -16 -18.3496h-16.1201c-8 0 -14.7295 6.13965 -15.75 14.3496zM480 192c17.6699 0 32 -14.3301 32 -32v-160c0 -35.3496 -28.6504 -64 -64 -64h-384c-35.3496 0 -64 28.6504 -64 64
+v224c0 35.3496 28.6504 64 64 64h42.6699h0.00292969c11.7754 0 28.9775 -5.73438 38.3975 -12.7998l110.93 -83.2002h224zM128 8v112c0 4.41992 -3.58008 8 -8 8h-16c-4.41992 0 -8 -3.58008 -8 -8v-112c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8zM224 8v112
+c0 4.41992 -3.58008 8 -8 8h-16c-4.41992 0 -8 -3.58008 -8 -8v-112c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8zM320 8v112c0 4.41992 -3.58008 8 -8 8h-16c-4.41992 0 -8 -3.58008 -8 -8v-112c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8zM416 8
+v112c0 4.41992 -3.58008 8 -8 8h-16c-4.41992 0 -8 -3.58008 -8 -8v-112c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8zM64 320c-35.3496 0 -64 28.6504 -64 64s28.6504 64 64 64s64 -28.6504 64 -64s-28.6504 -64 -64 -64z" />
+ <glyph glyph-name="hotel" unicode="&#xf594;" horiz-adv-x="576"
+d="M560 384h-16v-384h16c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-240v80c0 8.7998 -7.2002 16 -16 16h-32c-8.7998 0 -16 -7.2002 -16 -16v-80h-240c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h15.9805v384h-15.9805
+c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h544c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16zM256 339.2v-38.4004c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h38.4004c6.39941 0 12.7998 6.40039 12.7998 12.7998v38.4004
+c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-38.4004c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998zM256 243.2v-38.4004c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h38.4004c6.39941 0 12.7998 6.40039 12.7998 12.7998v38.4004
+c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-38.4004c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998zM128 339.2v-38.4004c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h38.4004c6.39941 0 12.7998 6.40039 12.7998 12.7998v38.4004
+c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-38.4004c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998zM179.2 192c6.39941 0 12.7998 6.40039 12.7998 12.7998v38.4004c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-38.4004
+c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998v-38.4004c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h38.4004zM192 64h192c0 53.0195 -42.9805 96 -96 96s-96 -42.9805 -96 -96zM448 204.8v38.4004c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-38.4004
+c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998v-38.4004c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h38.4004c6.39941 0 12.7998 6.40039 12.7998 12.7998zM448 300.8v38.4004c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-38.4004
+c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998v-38.4004c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h38.4004c6.39941 0 12.7998 6.40039 12.7998 12.7998z" />
+ <glyph glyph-name="joint" unicode="&#xf595;" horiz-adv-x="640"
+d="M444.34 266.9c-35.7803 25.0693 -60.3398 63.0098 -60.3398 106.699v66.4004c0 4.41992 3.58008 8 8 8h48c4.41992 0 8 -3.58008 8 -8v-62.3096c0 -29.0303 15.8496 -54.71 39.6602 -71.3203c35.3301 -24.6504 56.3398 -64.8203 56.3398 -108.061v-30.3096
+c0 -4.41992 -3.58008 -8 -8 -8h-48c-4.41992 0 -8 3.58008 -8 8v30.3096c0 27.4307 -13.2803 52.9102 -35.6602 68.5908zM194.97 89.0195c22.3701 3.60059 45.0801 -4.30957 59.8203 -21.5098l112.72 -131.51h-88.5693c-98.6406 0 -195.29 27.7197 -278.94 80
+c59.6904 37.3096 126.03 61.9297 194.97 73.0195zM553.28 360.91c54.3096 -36.4102 86.7197 -97.1602 86.7197 -162.601v-30.3096c0 -4.41992 -3.58008 -8 -8 -8h-48c-4.41992 0 -8 3.58008 -8 8v30.3096c0 50.1504 -25.21 96.6504 -67.3604 123.99
+c-18.4697 11.9805 -28.6396 33.3701 -28.6396 55.3906v62.3096c0 4.41992 3.58008 8 8 8h48c4.41992 0 8 -3.58008 8 -8v-62.3096c0 -6.82031 3.61035 -12.9805 9.28027 -16.7803zM360.89 95.9502h0.108398c16.2441 0 38.0049 -10.0127 48.5723 -22.3506l117.949 -137.6
+h-88.4492h-0.00292969c-16.248 0 -38.0146 10.0127 -48.5869 22.3496l-117.801 137.431c1.40039 0.0195312 53.8105 0.109375 88.21 0.169922zM616 96c13.25 0 24 -10.7402 24 -24v-112c0 -13.25 -10.75 -24 -24 -24h-17.4199h-0.00292969
+c-16.248 0 -38.0146 10.0127 -48.5869 22.3496l-117.99 137.65h184z" />
+ <glyph glyph-name="kiss" unicode="&#xf596;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM168 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM304 52c0 13 -13.4004 27.2998 -35.2002 36.4004
+c21.7998 8.69922 35.2002 23 35.2002 36c0 19.1992 -28.7002 41.5 -71.5 44c-8.40039 1.09961 -12.2002 -11.8008 -3.59961 -15.4004l17 -7.2002c13 -5.5 20.7998 -13.5 20.7998 -21.5s-7.7998 -16 -20.7998 -21.5l-17 -7.2002c-6.10059 -2.59961 -6 -12.2998 0 -14.7998
+l17 -7.2002c13 -5.5 20.7998 -13.5 20.7998 -21.5s-7.7998 -16 -20.7998 -21.5l-17 -7.19922c-8.5 -3.60059 -4.90039 -16.2002 3.59961 -15.4004c42.7998 2.5 71.5 24.7998 71.5 44zM328 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32
+s14.2998 -32 32 -32z" />
+ <glyph glyph-name="kiss-beam" unicode="&#xf597;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM209 220.1c4.2002 -7.5 15.5996 -4 15.0996 4.5c-3.2998 42.1006 -32.1992 71.4004 -56 71.4004c-23.7998 0 -52.6992 -29.2998 -56 -71.4004
+c-0.699219 -8.5 10.7002 -11.8994 14.9004 -4.5l9.5 17c7.7002 13.7002 19.2002 21.6006 31.5 21.6006s23.7998 -7.90039 31.5 -21.6006zM304 52c0 13 -13.4004 27.2998 -35.2002 36.4004c21.7998 8.69922 35.2002 23 35.2002 36c0 19.1992 -28.7002 41.5 -71.5 44
+c-8.40039 1.09961 -12.2002 -11.8008 -3.59961 -15.4004l17 -7.2002c13 -5.5 20.7998 -13.5 20.7998 -21.5s-7.7998 -16 -20.7998 -21.5l-17 -7.2002c-6.10059 -2.59961 -6 -12.2998 0 -14.7998l17 -7.2002c13 -5.5 20.7998 -13.5 20.7998 -21.5s-7.7998 -16 -20.7998 -21.5
+l-17 -7.19922c-8.5 -3.60059 -4.90039 -16.2002 3.59961 -15.4004c42.7998 2.5 71.5 24.7998 71.5 44zM369 220.1c4.2002 -7.5 15.5996 -4 15.0996 4.5c-3.2998 42.1006 -32.1992 71.4004 -56 71.4004c-23.7998 0 -52.6992 -29.2998 -56 -71.4004
+c-0.699219 -8.5 10.8008 -11.7998 14.9004 -4.5l9.5 17c7.7002 13.7002 19.2002 21.6006 31.5 21.6006s23.7998 -7.90039 31.5 -21.6006z" />
+ <glyph glyph-name="kiss-wink-heart" unicode="&#xf598;" horiz-adv-x="503"
+d="M501.1 45.5c9.2002 -23.9004 -4.39941 -49.4004 -28.5 -55.7002l-83 -21.5c-5.39941 -1.39941 -10.8994 1.7998 -12.3994 7.10059l-22.9004 82.5996c-6.59961 24 8.7998 48.5996 34 52.5996c22 3.5 43.1006 -11.5996 49 -33l2.2998 -8.39941l8.40039 2.2002
+c21.5996 5.59961 45.0996 -5.10059 53.0996 -25.9004zM323.5 49.5c0 0 23.5996 -83.9004 23.9004 -84.5996c-30.5 -13.4004 -64 -20.9004 -99.4004 -20.9004c-137 0 -248 111 -248 248s111 248 248 248s248 -111 248 -248c0 -31.7998 -6.2002 -62.0996 -17.0996 -90
+c-6 1.5 -12.2002 2.7998 -18.6006 2.90039c-29.0996 49.7998 -98.0996 50.5996 -127.8 4.2998c-11.2998 -17.7002 -14.5996 -39.4004 -9 -59.7002zM168 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM288 52
+c0 13 -13.4004 27.2998 -35.2002 36.4004c21.7998 8.69922 35.2002 23 35.2002 36c0 19.1992 -28.7002 41.5 -71.5 44c-8.2002 1.19922 -12.4004 -11.7002 -3.59961 -15.4004l17 -7.2002c13 -5.5 20.7998 -13.5 20.7998 -21.5s-7.7998 -16 -20.7998 -21.5l-17 -7.2002
+c-5.7002 -2.5 -6 -12.2998 0 -14.7998l17 -7.2002c13 -5.5 20.7998 -13.5 20.7998 -21.5s-7.7998 -16 -20.7998 -21.5l-17 -7.19922c-8.5 -3.60059 -4.90039 -16.2002 3.59961 -15.4004c42.7998 2.5 71.5 24.7998 71.5 44zM304 231l9.7002 8.5
+c14.7998 13.2002 46.2002 13.2002 61 0l9.5 -8.5c8.5 -7.5 21.5 -0.299805 19.7998 10.7998c-4 25.2002 -34.2002 42.1006 -59.9004 42.1006c-25.6992 0 -55.8994 -16.9004 -59.8994 -42.1006c-1.7998 -11.2002 11.5 -18.2002 19.7998 -10.7998z" />
+ <glyph glyph-name="laugh" unicode="&#xf599;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM328 288c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32s32 14.2998 32 32s-14.2998 32 -32 32zM168 288c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32
+s32 14.2998 32 32s-14.2998 32 -32 32zM256 16c73.4004 0 134 55 142.9 126c1.19922 9.59961 -6.30078 18 -15.9004 18h-270c-9.59961 0 -17.0996 -8.5 -15.9004 -18c8.90039 -71 69.5 -126 142.9 -126h16z" />
+ <glyph glyph-name="laugh-beam" unicode="&#xf59a;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM272 240.6c-0.700195 -8.59961 10.9004 -11.8994 15.0996 -4.5l9.5 17c7.7002 13.7002 19.2002 21.6006 31.5 21.6006c12.3008 0 23.8008 -7.90039 31.5 -21.6006l9.5 -17
+c4.10059 -7.39941 15.6006 -4.09961 14.9004 4.5c-3.2998 42.1006 -32.2002 71.4004 -56 71.4004s-52.7002 -29.2998 -56 -71.4004zM112 240.6c-0.700195 -8.5 10.7998 -11.8994 15.0996 -4.5l9.5 17c7.7002 13.7002 19.2002 21.6006 31.5 21.6006
+c12.3008 0 23.8008 -7.90039 31.5 -21.6006l9.5 -17c4.10059 -7.39941 15.6006 -4.09961 14.9004 4.5c-3.2998 42.1006 -32.2002 71.4004 -56 71.4004s-52.7002 -29.2998 -56 -71.4004zM398.9 142c1.19922 9.59961 -6.30078 18 -15.9004 18h-270
+c-9.59961 0 -17.0996 -8.5 -15.9004 -18c8.90039 -71 69.5 -126 142.9 -126h16c73.4004 0 134 55 142.9 126z" />
+ <glyph glyph-name="laugh-squint" unicode="&#xf59b;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM281.8 278.3c-7.7002 -4.7002 -7.7002 -15.8994 0 -20.5996l80 -48c11.5 -6.7998 24.1006 7.59961 15.4004 18l-33.6006 40.2998l33.6006 40.2998
+c8.59961 10.2998 -3.7998 24.9004 -15.4004 18zM118.8 308.3l33.6006 -40.2998l-33.6006 -40.2998c-8.59961 -10.4004 3.90039 -24.7998 15.4004 -18l80 48c7.7998 4.7002 7.7998 15.8994 0 20.5996l-80 48c-11.6006 6.90039 -24 -7.7002 -15.4004 -18zM398.9 142
+c1.19922 9.59961 -6.30078 18 -15.9004 18h-270c-9.59961 0 -17.0996 -8.5 -15.9004 -18c8.90039 -71 69.5 -126 142.9 -126h16c73.4004 0 134 55 142.9 126z" />
+ <glyph glyph-name="laugh-wink" unicode="&#xf59c;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM268.1 241.9c-1.69922 -11.2002 11.5 -18.3008 19.9004 -10.9004l9.59961 8.59961c14.8008 13.2002 46.2002 13.2002 61 0l9.5 -8.5
+c8.40039 -7.5 21.5 -0.299805 19.8008 10.8008c-4 25.1992 -34.2002 42.0996 -59.9004 42.0996s-55.9004 -16.9004 -59.9004 -42.0996zM168 288c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32s32 14.2998 32 32s-14.2998 32 -32 32zM398.9 142
+c1.19922 9.59961 -6.30078 18 -15.9004 18h-270c-9.59961 0 -17.0996 -8.5 -15.9004 -18c8.90039 -71 69.5 -126 142.9 -126h16c73.4004 0 134 55 142.9 126z" />
+ <glyph glyph-name="luggage-cart" unicode="&#xf59d;" horiz-adv-x="640"
+d="M224 128c-17.6699 0 -32 14.3301 -32 32v160c0 17.6699 14.3301 32 32 32h32v-224h-32zM576 160c0 -17.6699 -14.3301 -32 -32 -32h-32v224h32c17.6699 0 32 -14.3301 32 -32v-160zM624 64c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-82.9404
+c1.79004 -5.03027 2.94043 -10.3604 2.94043 -16c0 -26.5098 -21.4902 -48 -48 -48s-48 21.4902 -48 48c0 5.63965 1.15039 10.9697 2.94043 16h-197.881c1.79004 -5.03027 2.94043 -10.3604 2.94043 -16c0 -26.5098 -21.4902 -48 -48 -48s-48 21.4902 -48 48
+c0 5.63965 1.15039 10.9697 2.94043 16h-82.9404c-8.83984 0 -16 7.16016 -16 16v368h-48c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h96c8.83984 0 16 -7.16016 16 -16v-368h496zM480 352v-224h-192v272c0 26.5098 21.4902 48 48 48h96
+c26.5098 0 48 -21.4902 48 -48v-48zM432 352v48h-96v-48h96z" />
+ <glyph glyph-name="map-marked" unicode="&#xf59f;" horiz-adv-x="576"
+d="M288 448c69.5898 0 126 -56.4102 126 -126c0 -56.2598 -82.3496 -158.8 -113.9 -196.02c-6.37988 -7.54004 -17.8096 -7.54004 -24.1992 0c-31.5508 37.2197 -113.9 139.76 -113.9 196.02c0 69.5898 56.4102 126 126 126zM20.1201 232.05l118.63 47.4502
+c5.17969 -14.8799 12.4102 -30.4404 21.25 -46.4199v-233.08l-138.06 -62.8398c-10.5107 -4.2002 -21.9404 3.54004 -21.9404 14.8594v250.32c0.00292969 11.959 9.0166 25.2686 20.1201 29.71zM288 88.3301c14.0703 0 27.3799 6.17969 36.5098 16.9502
+c19.6699 23.2002 40.5703 49.6299 59.4902 76.7197v-245.99l-192 64v182c18.9199 -27.0996 39.8301 -53.5195 59.4902 -76.7197c9.12988 -10.7803 22.4395 -16.96 36.5098 -16.96zM554.06 286.84c10.5107 4.2002 21.9404 -3.54004 21.9404 -14.8594v-250.32
+c0 -11.9609 -9.01367 -25.2705 -20.1201 -29.71l-139.88 -55.9502v288z" />
+ <glyph glyph-name="map-marked-alt" unicode="&#xf5a0;" horiz-adv-x="576"
+d="M288 448c69.5898 0 126 -56.4102 126 -126c0 -56.2598 -82.3496 -158.8 -113.9 -196.02c-6.37988 -7.54004 -17.8096 -7.54004 -24.1992 0c-31.5508 37.2197 -113.9 139.76 -113.9 196.02c0 69.5898 56.4102 126 126 126zM288 280c23.2002 0 42 18.7998 42 42
+s-18.7998 42 -42 42s-42 -18.7998 -42 -42s18.7998 -42 42 -42zM20.1201 232.05l118.63 47.4502c5.17969 -14.8799 12.4102 -30.4404 21.25 -46.4199v-233.08l-138.06 -62.8398c-10.5107 -4.2002 -21.9404 3.54004 -21.9404 14.8594v250.32
+c0.00292969 11.959 9.0166 25.2686 20.1201 29.71zM288 88.3301c14.0703 0 27.3799 6.17969 36.5098 16.9502c19.6699 23.2002 40.5703 49.6299 59.4902 76.7197v-245.99l-192 64v182c18.9199 -27.0996 39.8301 -53.5195 59.4902 -76.7197
+c9.12988 -10.7803 22.4395 -16.96 36.5098 -16.96zM554.06 286.84c10.5107 4.2002 21.9404 -3.54004 21.9404 -14.8594v-250.32c0 -11.9609 -9.01367 -25.2705 -20.1201 -29.71l-139.88 -55.9502v288z" />
+ <glyph glyph-name="marker" unicode="&#xf5a1;"
+d="M93.9502 157.97l75.3994 75.4004l128.021 -128.021l-75.4004 -75.3994c-44.8223 -44.8203 -132.335 -86.8428 -195.34 -93.7998c-15.2803 -1.69043 -28.1895 11.2295 -26.4902 26.5098l0.0302734 0.229492c7.00195 62.9189 49.0156 150.315 93.7803 195.08z
+M485.49 421.49c35.3496 -35.3604 35.3496 -92.6699 0 -128.021l-165.49 -165.489l-128.02 128.02l98.4795 98.4697l-19.5898 19.5898l-87.1504 -87.1494c-6.25 -6.25 -16.3799 -6.25 -22.6299 0l-22.6201 22.6201c-6.25 6.25 -6.25 16.3799 0 22.6299l104.12 104.12
+c15.6104 15.6201 40.9404 15.6201 56.5605 0l36.5596 -36.5498l21.7598 21.7598c35.3506 35.3496 92.6699 35.3496 128.021 0z" />
+ <glyph glyph-name="medal" unicode="&#xf5a2;"
+d="M223.75 317.25c-42.04 -6.55957 -79.8398 -25.6201 -109.56 -53.3896l-111.271 158.96c-7.41992 10.6094 0.160156 25.1797 13.1104 25.1797h111.149c10.0029 0 22.2959 -6.96191 27.4404 -15.54zM495.97 448c12.9502 0 20.5303 -14.5703 13.1104 -25.1797
+l-111.271 -158.95c-29.7197 27.7598 -67.5195 46.8203 -109.56 53.3799l69.1299 115.21c5.78027 9.63965 16.2002 15.54 27.4404 15.54h111.149zM256 288c97.2002 0 176 -78.7998 176 -176s-78.7998 -176 -176 -176s-176 78.7998 -176 176s78.7998 176 176 176z
+M348.52 130.74c6.82031 6.63965 3.05078 18.2295 -6.34961 19.5898l-52.4297 7.63965l-23.4307 47.5205c-2.10938 4.25 -6.21973 6.38965 -10.3291 6.38965c-4.09082 0 -8.1709 -2.11035 -10.2803 -6.38965l-23.4307 -47.5205l-52.4297 -7.63965
+c-9.39941 -1.36035 -13.1699 -12.9502 -6.34961 -19.5898l37.9297 -36.96l-8.96973 -52.2207c-1.60059 -9.34961 8.25 -16.54 16.6494 -12.0898l46.9004 24.6504l46.9102 -24.6504c8.38965 -4.41992 18.25 2.73047 16.6494 12.0898l-8.96973 52.2207z" />
+ <glyph glyph-name="meh-blank" unicode="&#xf5a4;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM168 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM328 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32
+s-32 -14.2998 -32 -32s14.2998 -32 32 -32z" />
+ <glyph glyph-name="meh-rolling-eyes" unicode="&#xf5a5;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM88 224c0 -35.2998 28.7002 -64 64 -64s64 28.7002 64 64c0 24.2998 -13.7002 45.2002 -33.5996 56c0.699219 -2.59961 1.59961 -5.2002 1.59961 -8
+c0 -17.7002 -14.2998 -32 -32 -32s-32 14.2998 -32 32c0 2.7998 0.900391 5.40039 1.59961 8c-19.8994 -10.7998 -33.5996 -31.7002 -33.5996 -56zM312 48c21.2002 0 21.2002 32 0 32h-128c-21.2002 0 -21.2002 -32 0 -32h128zM344 160c35.2998 0 64 28.7002 64 64
+c0 24.2998 -13.7002 45.2002 -33.5996 56c0.699219 -2.59961 1.59961 -5.2002 1.59961 -8c0 -17.7002 -14.2998 -32 -32 -32s-32 14.2998 -32 32c0 2.7998 0.900391 5.40039 1.59961 8c-19.8994 -10.7998 -33.5996 -31.7002 -33.5996 -56c0 -35.2998 28.7002 -64 64 -64z
+" />
+ <glyph glyph-name="monument" unicode="&#xf5a6;" horiz-adv-x="384"
+d="M368 0c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-352c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h352zM289.14 347.26l30.8604 -315.26h-256l30.8701 315.26c0.625 6.27344 4.75098 14.9834 9.20996 19.4404
+l76.5996 76.6094c6.25 6.25 16.3799 6.25 22.6299 0l76.6201 -76.6094c4.45898 -4.45703 8.58496 -13.167 9.20996 -19.4404zM240 140.8v38.4004c0 6.39941 -6.40039 12.7998 -12.7998 12.7998h-70.4004c-6.39941 0 -12.7998 -6.40039 -12.7998 -12.7998v-38.4004
+c0 -6.39941 6.40039 -12.7998 12.7998 -12.7998h70.4004c6.39941 0 12.7998 6.40039 12.7998 12.7998z" />
+ <glyph glyph-name="mortar-pestle" unicode="&#xf5a7;"
+d="M501.54 387.09l-99.0801 -99.0898h-151.37l203.811 152.86c5.25293 3.93848 14.8457 7.13477 21.4121 7.13477c4.64941 0 11.7988 -1.68652 15.957 -3.76465c21.7803 -10.8906 26.4902 -39.9209 9.27051 -57.1406zM496 256c8.83984 0 16 -7.16016 16 -16v-32
+c0 -8.83984 -7.16016 -16 -16 -16h-16c0 -80.9805 -50.2002 -150.11 -121.13 -178.32c12.7695 -16.8701 21.7295 -36.7998 24.9502 -58.6895c1.45996 -9.91992 -6.04004 -18.9805 -16.0703 -18.9805h-223.5c-10.0303 0 -17.5303 9.06055 -16.0703 18.9805
+c3.23047 21.8896 12.1904 41.8193 24.9502 58.6895c-70.9297 28.21 -121.13 97.3398 -121.13 178.32h-16c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h480z" />
+ <glyph glyph-name="paint-roller" unicode="&#xf5aa;"
+d="M416 320c0 -17.6699 -14.3301 -32 -32 -32h-352c-17.6699 0 -32 14.3301 -32 32v96c0 17.6699 14.3301 32 32 32h352c17.6699 0 32 -14.3301 32 -32v-96zM448 384c35.3496 0 64 -28.6504 64 -64v-64c0 -53.0195 -42.9805 -96 -96 -96h-160v-32
+c17.6699 0 32 -14.3301 32 -32v-128c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v128c0 17.6699 14.3301 32 32 32v32c0 35.3496 28.6504 64 64 64h160c17.6699 0 32 14.3301 32 32v128z" />
+ <glyph glyph-name="passport" unicode="&#xf5ab;" horiz-adv-x="448"
+d="M129.62 272c5.28027 31.2197 25.5898 57.1699 53.2998 70.4102c-7.66992 -19.0605 -12.7197 -43.3799 -14.21 -70.4102h-39.0898zM129.62 240h39.0898c1.49023 -27.0303 6.53027 -51.3496 14.21 -70.4102c-27.71 13.2402 -48.0098 39.1904 -53.2998 70.4102zM224 161.31
+c-7.69043 7.4502 -20.7695 34.4307 -23.4404 78.6904h46.8701c-2.66016 -44.2695 -15.7393 -71.2402 -23.4297 -78.6904zM200.57 272c2.66016 44.2598 15.7393 71.2402 23.4395 78.6904c7.69043 -7.4502 20.7705 -34.4307 23.4307 -78.6904h-46.8701zM265.08 169.59
+c7.67969 19.0605 12.7197 43.3799 14.21 70.4102h39.0898c-5.28027 -31.2197 -25.5898 -57.1699 -53.2998 -70.4102zM416 448c17.6699 0 32 -14.3301 32 -32v-448c0 -17.6699 -14.3301 -32 -32 -32h-352c-35.3496 0 -64 28.6504 -64 64v384c0 35.3496 28.6504 64 64 64h352z
+M336 32c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16h-224c-8.7998 0 -16 -7.2002 -16 -16s7.2002 -16 16 -16h224zM224 128c70.6904 0 128 57.3096 128 128s-57.3096 128 -128 128s-128 -57.3096 -128 -128s57.3096 -128 128 -128zM265.08 342.41
+c27.71 -13.2402 48.0195 -39.1904 53.2998 -70.4102h-39.0898c-1.49023 27.0303 -6.53027 51.3496 -14.21 70.4102z" />
+ <glyph glyph-name="pen-fancy" unicode="&#xf5ac;" horiz-adv-x="511"
+d="M79.1797 165.06l84.0703 33.0703l98.8799 -98.8799l-33.0703 -84.0703c-2.79102 -8.38086 -11.8584 -17.4482 -20.2393 -20.2393l-176.82 -58.9404l-4.67969 4.67969l92.8896 92.8906c2.55957 -0.660156 5.03027 -1.57031 7.7998 -1.57031c17.6699 0 32 14.3301 32 32
+s-14.3301 32 -32 32s-32 -14.3301 -32 -32c0 -2.76953 0.910156 -5.24023 1.57031 -7.7998l-92.8896 -92.8906l-4.69043 4.69043l58.9404 176.82c2.79297 8.37891 11.8604 17.4463 20.2393 20.2393zM369.25 419.68c74.4805 84.2607 199.15 -39.1602 114.23 -114.229
+l-199.49 -183.11l-97.8506 97.8506z" />
+ <glyph glyph-name="pen-nib" unicode="&#xf5ad;"
+d="M136.6 309.21l151.4 42.79l128 -128l-42.79 -151.4c-5.08594 -17.9932 -23.6104 -37.3965 -41.3496 -43.3096l-279.86 -93.29l-14.6904 14.6904l150.11 150.109c6.25977 -2.99023 13.1797 -4.7998 20.5801 -4.7998c26.5098 0 48 21.4902 48 48s-21.4902 48 -48 48
+s-48 -21.4902 -48 -48c0 -7.40039 1.80957 -14.3203 4.7998 -20.5801l-150.109 -150.11l-14.6904 14.6904l93.29 279.86c5.91309 17.7393 25.3164 36.2637 43.3096 41.3496zM497.94 373.83c18.75 -18.7598 18.75 -49.1602 0 -67.9102l-56.5508 -56.5498l-128.02 128.02
+l56.5498 56.5508c18.75 18.75 49.1602 18.75 67.9102 0z" />
+ <glyph glyph-name="pencil-ruler" unicode="&#xf5ae;"
+d="M109.46 203.96l-100.17 100.18c-12.3896 12.3906 -12.3799 32.4707 0 44.8604l89.71 89.71c12.3896 12.3896 32.4697 12.3896 44.8604 0l33.6396 -33.6504l-61.6797 -61.6797c-3.10059 -3.08984 -3.10059 -8.11035 0 -11.21l11.21 -11.21
+c1.28027 -1.2832 3.79199 -2.3252 5.60449 -2.3252c1.81348 0 4.32422 1.04199 5.60547 2.3252l61.6797 61.6797l44.1201 -44.1201zM497.93 320.76l-46.0195 -46.0293l-113.2 113.199l46.0205 46.0107c18.7695 18.7598 49.1895 18.7598 67.9492 0l45.25 -45.25
+c18.75 -18.7607 18.7607 -49.1709 0 -67.9307zM316.08 365.29l113.2 -113.19l-296.92 -296.93l-107.45 -18.8398c-14.5 -2.5498 -27.1201 10.0703 -24.5898 24.5596l18.7598 107.44zM502.71 79.8604c12.3896 -12.3906 12.3896 -32.4707 0 -44.8604l-89.71 -89.7002
+c-12.3896 -12.3896 -32.4697 -12.3896 -44.8604 0l-100.21 100.2l134.58 134.56l44.1406 -44.1396l-61.6807 -61.6797c-3.08984 -3.08984 -3.08984 -8.11035 0 -11.21l11.21 -11.21c3.08984 -3.10059 8.11035 -3.10059 11.21 0l61.6807 61.6797z" />
+ <glyph glyph-name="plane-arrival" unicode="&#xf5af;" horiz-adv-x="640"
+d="M624 0c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-608c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h608zM44.8096 242.34c-6.5498 5.91016 -12.3896 14.3398 -12.5791 23.25l-0.230469 101.78
+c0.19043 10.8799 10.3799 18.7002 20.7197 15.8799l39.7305 -10.8301c5.00977 -1.36035 9.08984 -5.04004 11.0195 -9.92969l27.5898 -67.8799l102.2 -27.8408l-47.9199 164.211c-0.189453 11.1191 10.1504 19.3193 20.71 16.4395l65.0898 -17.7295
+c5.70996 -1.56055 10.1504 -6.10059 11.6602 -11.9102l100.36 -191.851l97.5098 -26.5596c26.4805 -7.20996 51.5498 -20.1797 70.8301 -40c21.6396 -22.25 27.2002 -40.46 23.3701 -54.96c-3.81055 -14.5 -17.5801 -27.4404 -47.25 -35.71
+c-26.4404 -7.36035 -54.5205 -5.85059 -81 1.35938l-287.601 78.3506c-7.94531 2.16895 -19.3564 8.41016 -25.4697 13.9297z" />
+ <glyph glyph-name="plane-departure" unicode="&#xf5b0;" horiz-adv-x="640"
+d="M624 0c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-608c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h608zM80.5498 106.73l-76.21 82.9697c-7.62012 8.95996 -4.87012 22.7803 5.57031 28.0801l40.1299 20.3701
+c2.0957 1.0625 5.7041 1.92578 8.05469 1.92578c2.33691 0 5.92773 -0.853516 8.01562 -1.90625l72.3496 -36.4697l103.21 52.3799l-156.22 98.0996c-8.08008 8.87988 -5.5 23.1201 5.16992 28.5303l65.75 33.3701c2.0957 1.06348 5.70508 1.92676 8.05566 1.92676
+c3 0 7.45508 -1.36035 9.94434 -3.03711l218.7 -82.0596l98.5098 49.9902c26.7402 13.5596 56.4297 21.4199 86.2803 19.4795c33.5098 -2.17969 51.04 -12.8799 58.25 -27.4502c7.22949 -14.5596 5.23926 -35.1699 -13.0703 -63.6494
+c-16.3096 -25.3701 -40.2803 -44.7402 -67.0205 -58.3105l-290.96 -147.649c-7.71094 -3.91895 -20.9893 -7.1084 -29.6396 -7.12012l-130.54 -0.180664c-9.22949 -0.00976562 -18.0498 3.87012 -24.3301 10.7109z" />
+ <glyph glyph-name="prescription" unicode="&#xf5b1;" horiz-adv-x="384"
+d="M301.26 96l78.0605 -78.0498c6.25 -6.25 6.25 -16.3799 0 -22.6299l-22.6299 -22.6299c-6.25 -6.25 -16.3809 -6.25 -22.6309 0l-78.0596 78.0596l-78.0498 -78.0703c-6.25 -6.25 -16.3799 -6.25 -22.6299 0l-22.6299 22.6299c-6.25 6.25 -6.25 16.3809 0 22.6309
+l78.0596 78.0596l-128 128h-18.75v-80c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v256c0 8.83984 7.16016 16 16 16h144c53.0195 0 96 -42.9805 96 -96c0 -48.8896 -36.6904 -88.7998 -83.96 -94.7803l83.96 -83.96l78.0596 78.0605
+c6.25 6.25 16.3809 6.25 22.6309 0l22.6299 -22.6299c6.25 -6.25 6.25 -16.3809 0 -22.6309zM64 352v-64h96c17.6396 0 32 14.3604 32 32s-14.3604 32 -32 32h-96z" />
+ <glyph glyph-name="sad-cry" unicode="&#xf5b3;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248c0 -90 -48.2002 -168.7 -120 -212.1v180.1c0 8.7998 -7.2002 16 -16 16s-16 -7.2002 -16 -16v-196.7c-29.5 -12.3994 -62 -19.2998 -96 -19.2998s-66.5 6.90039 -96 19.2998v196.7c0 8.7998 -7.2002 16 -16 16s-16 -7.2002 -16 -16
+v-180.1c-71.7998 43.3994 -120 122 -120 212.1c0 137 111 248 248 248zM182.5 223.5l9.7002 -8.5c2.5 -2.2998 7.89941 -4.7002 13.7002 -1.59961c4.39941 2.39941 6.89941 7.39941 6.09961 12.3994c-4 25.2002 -34.2002 42.1006 -59.9004 42.1006
+c-25.6992 0 -55.8994 -16.9004 -59.8994 -42.1006c-0.799805 -5 1.7002 -10 6.09961 -12.3994c4.40039 -2.40039 9.90039 -1.7002 13.7002 1.59961l9.5 8.5c14.7998 13.2002 46.2002 13.2002 61 0zM248 32c26.5 0 48 28.7002 48 64s-21.5 64 -48 64s-48 -28.7002 -48 -64
+s21.5 -64 48 -64zM397.8 213.5c4.40039 2.40039 6.7998 7.40039 6.2002 12.2998c-4 25.2002 -34.2002 42.1006 -59.9004 42.1006c-25.6992 0 -55.8994 -16.9004 -59.8994 -42.1006c-0.799805 -5 1.7002 -10 6.09961 -12.3994
+c4.40039 -2.40039 9.90039 -1.7002 13.7002 1.59961l9.59961 8.59961c14.8008 13.2002 46.2002 13.2002 61 0l9.5 -8.5c2.5 -2.2998 7.90039 -4.69922 13.7002 -1.59961z" />
+ <glyph glyph-name="sad-tear" unicode="&#xf5b4;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM328 272c-17.7002 0 -32 -14.2998 -32 -32s14.2998 -32 32 -32s32 14.2998 32 32s-14.2998 32 -32 32zM152 32c26.5 0 48 21 48 47c0 20 -28.5 60.4004 -41.5996 77.7998
+c-3.2002 4.2998 -9.60059 4.2998 -12.8008 0c-13.0996 -17.3994 -41.5996 -57.7998 -41.5996 -77.7998c0 -26 21.5 -47 48 -47zM168 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM338.2 53.7998
+c13.2998 -16.0996 38.2998 4 24.5 20.4004c-28.4004 34.2002 -70.2998 53.7998 -114.7 53.7998c-21.2002 0 -21.2002 -32 0 -32c34.9004 0 67.7998 -15.4004 90.2002 -42.2002z" />
+ <glyph glyph-name="shuttle-van" unicode="&#xf5b6;" horiz-adv-x="640"
+d="M628.88 237.35c7.17969 -8.62988 11.1201 -19.5 11.1201 -30.7295v-110.62c0 -17.6699 -14.3301 -32 -32 -32h-32c0 -53.0195 -42.9805 -96 -96 -96s-96 42.9805 -96 96h-128c0 -53.0195 -42.9805 -96 -96 -96s-96 42.9805 -96 96h-32c-17.6699 0 -32 14.3301 -32 32v288
+c0 17.6699 14.3301 32 32 32h425.52c12.4082 -0.00195312 28.9258 -7.73926 36.8701 -17.2695zM64 256h96v96h-96v-96zM160 16c26.5098 0 48 21.4902 48 48s-21.4902 48 -48 48s-48 -21.4902 -48 -48s21.4902 -48 48 -48zM320 256v96h-96v-96h96zM480 16
+c26.5098 0 48 21.4902 48 48s-21.4902 48 -48 48s-48 -21.4902 -48 -48s21.4902 -48 48 -48zM384 256h146.02l-80 96h-66.0195v-96z" />
+ <glyph glyph-name="signature" unicode="&#xf5b7;" horiz-adv-x="639"
+d="M623.2 256c9.09961 0.599609 16.7998 -7.09961 16.5996 -16.2002v-32.0996c0 -8.5 -6.7002 -15.1006 -15.2002 -15.7998c-39.3994 -3.2002 -105.399 -51 -138.399 -65.8008c-34.2998 -15.3994 -66.7002 -30 -102.3 -30c-28.2002 0 -50.2002 8.5 -65.5 25.3008
+c-22.7002 24.8994 -22.8008 55.2998 -20.6006 83.7998c-56.5 -45.1006 -169 -153.601 -211.2 -195.8c-6.09961 -6.2002 -14.2998 -9.40039 -22.5996 -9.40039c-27 0 -36.5 27 -29.7002 43.9004l98.2002 245.6c8 19.9004 -14.2998 38.7998 -32.7002 27.0996l-58 -38.8994
+c-7.5 -4.7998 -17.3994 -2.60059 -22.0996 4.89941l-17.2002 27c-4.7002 7.5 -2.5 17.4004 4.90039 22.1006l54.8994 36.8994c76.5 48.7002 160.101 -26.8994 129.7 -102.8l-41.5 -103.7c105.2 101.2 144.4 124.5 169.5 126
+c54.4004 3.10059 43.7998 -68.0996 42.7002 -76.0996c-4.7002 -35.7002 -1.2998 -51.9004 21.2998 -51.9004c21.9004 0 47 11.3008 76.0996 24.4004c37.4004 16.7998 111.301 68 163.101 71.5z" />
+ <glyph glyph-name="smile-beam" unicode="&#xf5b8;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM112 224.6c-0.700195 -8.5 10.7998 -11.8994 15.0996 -4.5l9.5 17c7.7002 13.7002 19.2002 21.6006 31.5 21.6006c12.3008 0 23.8008 -7.90039 31.5 -21.6006l9.5 -17
+c4.10059 -7.39941 15.6006 -4.09961 14.9004 4.5c-3.2998 42.1006 -32.2002 71.4004 -56 71.4004s-52.7002 -29.2998 -56 -71.4004zM362.8 101.8c13.5 16.2002 -11 36.7002 -24.5996 20.5c-22.4004 -26.7998 -55.2002 -42.2002 -90.2002 -42.2002
+s-67.7998 15.3008 -90.2002 42.2002c-13.5996 16.2002 -38.0996 -4.2002 -24.5996 -20.5c28.5 -34.2002 70.2998 -53.7998 114.8 -53.7998s86.2998 19.5996 114.8 53.7998zM369 220.1c4.09961 -7.39941 15.7002 -4.09961 15.0996 4.5
+c-3.2998 42.1006 -32.1992 71.4004 -56 71.4004c-23.7998 0 -52.6992 -29.2998 -56 -71.4004c-0.699219 -8.5 10.8008 -11.7998 14.9004 -4.5l9.5 17c7.7002 13.7002 19.2002 21.6006 31.5 21.6006s23.7998 -7.90039 31.5 -21.6006z" />
+ <glyph glyph-name="solar-panel" unicode="&#xf5ba;" horiz-adv-x="640"
+d="M431.98 -0.00976562c8.84961 0.00976562 16.0293 -7.16992 16.0195 -16.0205l-0.0400391 -31.7295c-0.00976562 -8.82031 -7.16016 -15.9707 -15.9795 -15.9805l-223.961 -0.259766c-8.84961 -0.00976562 -16.0293 7.16992 -16.0195 16.0195l0.0498047 31.7305
+c0.00976562 8.83008 7.16016 15.9805 15.9805 15.9902l47.9795 0.0498047v32.21h128v-32.0596zM585.2 421.26c58.1094 -342.42 54.7803 -321.39 54.7598 -325.47c-0.0800781 -17.2305 -14.3604 -31.79 -32.5898 -31.79h-574.74c-18.3096 0 -32.6299 14.6797 -32.5996 32
+c0.00976562 3.91992 -3.35059 -17.1602 54.7598 325.26c2.62012 15.4307 16.21 26.7402 32.1396 26.7402h466.13c15.9307 0 29.5205 -11.3096 32.1406 -26.7402zM259.83 384l-9.77051 -96h139.87l-9.76953 96h-120.33zM184.66 128l11.4102 112h-105.971l-19.0098 -112
+h113.57zM200.95 288l9.76953 96h-96.1895l-16.29 -96h102.71zM233.77 128h172.45l-11.3994 112h-149.65zM429.27 384l9.77051 -96h102.71l-16.29 96h-96.1904zM455.33 128h113.58l-19.0098 112h-105.971z" />
+ <glyph glyph-name="spa" unicode="&#xf5bb;" horiz-adv-x="576"
+d="M568.25 256c4.41016 0.0195312 7.79004 -3.40039 7.75 -7.82031c-0.230469 -27.9199 -7.12988 -126.13 -88.7695 -199.3c-84.04 -81.8301 -167.23 -80.8799 -199.23 -80.8799s-115.21 -0.94043 -199.23 80.8799c-81.6299 73.1602 -88.5391 171.38 -88.7695 199.3
+c-0.0400391 4.41992 3.33984 7.83984 7.75 7.82031c29.04 -0.129883 135.01 -6.16016 213.84 -83c33.1201 -29.6299 53.3604 -63.2998 66.4102 -94.8604c13.0498 31.5605 33.29 65.2305 66.4102 94.8604c78.8301 76.8398 184.8 82.8701 213.84 83zM287.98 145.4
+c-10.7012 15.7168 -30.8789 38.7705 -45.04 51.46c-18.7207 18.25 -38.8506 32.6895 -59.2207 44.3896c16.4707 70.4404 51.75 132.93 96.7402 172.07c4.12012 3.58008 11.0303 3.58008 15.1396 0c45.0107 -39.1699 80.29 -101.721 96.7305 -172.221
+c-20.6797 -11.8799 -41.1699 -26.5693 -60.2598 -45.1797c-16.4902 -14.7402 -31.2705 -31.6699 -44.0898 -50.5195z" />
+ <glyph glyph-name="splotch" unicode="&#xf5bc;"
+d="M472.29 252.11c48.54 -16.6201 53.8301 -73.8301 8.9502 -96.79l-62 -31.7402c-17.8301 -9.12988 -29.2803 -25.2002 -30.6299 -42.9902l-4.7002 -61.8594c-3.41016 -44.79 -65.1299 -66.7803 -104.45 -37.2207l-54.3203 40.8301
+c-15.6201 11.7305 -36.96 16.1201 -57.0693 11.7305l-69.96 -15.2803c-50.6504 -11.0596 -94.0801 32.5596 -73.4902 73.8096l28.4297 56.9805c8.18066 16.3799 6.44043 35.1699 -4.63965 50.2402l-38.54 52.4199c-27.9004 37.9502 6.97949 86.8994 59.0303 82.8301
+l71.8799 -5.62012c20.6602 -1.62012 40.9395 5.59961 54.2002 19.3096l46.1396 47.6699c33.4102 34.5107 98.3994 21.1504 109.979 -22.6201l15.9902 -60.4492c4.60059 -17.3799 18.8604 -31.7002 38.1406 -38.3008z" />
+ <glyph glyph-name="spray-can" unicode="&#xf5bd;"
+d="M224 416v-96h-128v96c0 17.6699 14.3301 32 32 32h64c17.6699 0 32 -14.3301 32 -32zM480 320c17.6699 0 32 -14.3301 32 -32s-14.3301 -32 -32 -32s-32 14.3301 -32 32s14.3301 32 32 32zM224 288c53.0195 0 96 -42.9805 96 -96v-224c0 -17.6699 -14.3301 -32 -32 -32
+h-256c-17.6699 0 -32 14.3301 -32 32v224c0 53.0195 42.9805 96 96 96h128zM160 32c44.1797 0 80 35.8203 80 80s-35.8203 80 -80 80s-80 -35.8203 -80 -80s35.8203 -80 80 -80zM480 352c-17.6699 0 -32 14.3301 -32 32s14.3301 32 32 32s32 -14.3301 32 -32
+s-14.3301 -32 -32 -32zM384 320c17.6699 0 32 -14.3301 32 -32s-14.3301 -32 -32 -32s-32 14.3301 -32 32s14.3301 32 32 32zM288 416c17.6699 0 32 -14.3301 32 -32s-14.3301 -32 -32 -32s-32 14.3301 -32 32s14.3301 32 32 32zM384 416c17.6699 0 32 -14.3301 32 -32
+s-14.3301 -32 -32 -32s-32 14.3301 -32 32s14.3301 32 32 32zM480 224c17.6699 0 32 -14.3301 32 -32s-14.3301 -32 -32 -32s-32 14.3301 -32 32s14.3301 32 32 32z" />
+ <glyph glyph-name="stamp" unicode="&#xf5bf;"
+d="M32 -64v64h448v-64h-448zM416 192c53.0195 0 96 -42.9805 96 -96v-32c0 -17.6699 -14.3301 -32 -32 -32h-448c-17.6699 0 -32 14.3301 -32 32v32c0 53.0195 42.9805 96 96 96h66.5596c16.2607 0 29.4404 13.1797 29.4404 29.4404v0.0693359
+c0 31.79 -9.98047 62.0605 -23.3096 90.9102c-5.57031 12.04 -8.69043 25.4199 -8.69043 39.5801c0 58.6699 52.6201 105.04 113.25 94.4902c38.79 -6.75 70.4902 -38.6699 77.2598 -77.4502c3.85059 -22.0303 0.0605469 -42.8096 -9.0498 -60.4199
+c-12.5801 -24.3105 -21.46 -50.3506 -21.46 -77.7197v-9.45996c0 -16.2607 13.1797 -29.4404 29.4404 -29.4404h66.5596z" />
+ <glyph glyph-name="star-half-alt" unicode="&#xf5c0;" horiz-adv-x="536"
+d="M508.55 276.49c26.25 -3.7998 36.7705 -36.1006 17.7305 -54.6006l-105.91 -102.979l25.0303 -145.49c3.55957 -20.79 -13.0605 -37.4004 -31.6602 -37.4004c-4.91016 0 -9.9707 1.16016 -14.8301 3.71094l-130.94 68.6992l-130.95 -68.6797
+c-4.86914 -2.58008 -9.93945 -3.75 -14.8691 -3.75c-18.5801 0 -35.1699 16.6699 -31.6104 37.4502l25.0596 145.479l-105.89 103c-19.0303 18.5 -8.50977 50.79 17.7402 54.5898l146.38 21.29l65.4297 132.381c5.90039 11.9092 17.29 17.8096 28.6904 17.8096
+c11.4697 0 22.9395 -5.98047 28.8193 -17.8096l65.4102 -132.391zM386.81 153.29l82.6504 80.3799l-114.229 16.6299l-25.0107 3.64062l-11.1797 22.6299l-51.0303 103.29l-0.0292969 -317.19l22.3799 -11.7402l102.13 -53.5898l-19.5205 113.45l-4.2793 24.8799z" />
+ <glyph glyph-name="suitcase-rolling" unicode="&#xf5c1;" horiz-adv-x="384"
+d="M336 288c26.5098 0 48 -21.4902 48 -48v-224c0 -26.5098 -21.4902 -48 -48 -48h-16v-16c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v16h-128v-16c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v16h-16
+c-26.5098 0 -48 21.4902 -48 48v224c0 26.5098 21.4902 48 48 48h288zM320 72v16c0 4.41992 -3.58008 8 -8 8h-240c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h240c4.41992 0 8 3.58008 8 8zM320 168v16c0 4.41992 -3.58008 8 -8 8h-240
+c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h240c4.41992 0 8 3.58008 8 8zM144 400v-80h-48v80c0 26.5098 21.4902 48 48 48h96c26.5098 0 48 -21.4902 48 -48v-80h-48v80h-96z" />
+ <glyph glyph-name="surprise" unicode="&#xf5c2;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM136 240c0 -17.7002 14.2998 -32 32 -32s32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32zM248 32c35.2998 0 64 28.7002 64 64s-28.7002 64 -64 64
+s-64 -28.7002 -64 -64s28.7002 -64 64 -64zM328 208c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32z" />
+ <glyph glyph-name="swatchbook" unicode="&#xf5c3;" horiz-adv-x="511"
+d="M479.06 128c17.6406 0 31.9404 -14.3301 31.9404 -32v-128c0 -17.6699 -14.2998 -32 -31.9404 -32h-299.579c2.17969 1.91016 4.60938 3.41992 6.66992 5.49023l186.14 186.51h106.77zM434.56 280.9c12.4707 -12.4902 12.4707 -32.7607 0 -45.2607l-211.869 -212.279
+c0.199219 2.90918 0.869141 5.67969 0.869141 8.63965v263.76l75.5 75.6504c12.4805 12.5 32.7002 12.5 45.1709 0zM191.62 416v-384c0 -53.0195 -42.9004 -96 -95.8105 -96c-52.9092 0 -95.8096 42.9805 -95.8096 96v384c0 17.6699 14.2998 32 31.9404 32h127.739
+c17.6406 0 31.9404 -14.3301 31.9404 -32zM95.8096 8c13.2305 0 23.96 10.75 23.9502 24c0 13.2598 -10.7295 24 -23.9502 24c-13.2197 0 -23.9492 -10.7402 -23.9492 -24c0 -13.25 10.7197 -24 23.9492 -24zM127.75 192l0.00976562 64h-63.8799v-64h63.8701zM127.75 320
+l0.00976562 64h-63.8799v-64h63.8701z" />
+ <glyph glyph-name="swimmer" unicode="&#xf5c4;" horiz-adv-x="640"
+d="M189.61 137.42c-5.04004 4.65039 -10.3906 8.34961 -15.8604 11.5801l68.6299 98.04c7.36035 10.5 16.3398 19.5498 26.7197 26.9404l80.0205 57.1699c25.54 18.2598 57.8301 24.96 88.5596 18.3799l100.351 -21.5303c25.9297 -5.55957 42.4297 -31.0801 36.8799 -57
+c-5.56055 -25.9102 -31.0898 -42.4102 -57 -36.8799l-100.351 21.5303c-4.33984 0.90918 -8.97949 -0.0302734 -12.6191 -2.61035l-18 -12.8604l112.84 -80.5996c-17.5107 -1.04004 -34.5303 -8.4502 -49.3906 -22.1602
+c-3.5293 -3.25977 -15.2695 -9.41992 -34.3896 -9.41992s-30.8496 6.16016 -34.3896 9.41992c-16.0107 14.7705 -34.5 22.5801 -53.46 22.5801h-16.3008c-18.96 0 -37.4395 -7.80957 -53.46 -22.5801c-3.5293 -3.25977 -15.2695 -9.41992 -34.3896 -9.41992
+s-30.8496 6.16016 -34.3896 9.41992zM624 96c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-16c-38.6201 0 -72.7197 12.1797 -96 31.8398c-23.2803 -19.6494 -57.3799 -31.8398 -96 -31.8398s-72.7197 12.1797 -96 31.8398
+c-23.2803 -19.6494 -57.3799 -31.8398 -96 -31.8398s-72.7197 12.1797 -96 31.8398c-23.2803 -19.6494 -57.3799 -31.8398 -96 -31.8398h-16c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h16c26.04 0 45.7998 8.41992 56.0703 17.9004
+c8.89941 8.20996 19.6602 14.0996 31.7695 14.0996h16.2998c12.1104 0 22.8701 -5.88965 31.7705 -14.0996c10.29 -9.48047 30.0498 -17.9004 56.0898 -17.9004s45.7998 8.41992 56.0703 17.9004c8.89941 8.20996 19.6602 14.0996 31.7695 14.0996h16.2998
+c12.1104 0 22.8701 -5.88965 31.7705 -14.0996c10.29 -9.48047 30.0498 -17.9004 56.0898 -17.9004s45.7998 8.41992 56.0703 17.9004c8.89941 8.20996 19.6602 14.0996 31.7695 14.0996h16.2998c12.1104 0 22.8701 -5.88965 31.7705 -14.0996
+c10.29 -9.48047 30.0498 -17.9004 56.0898 -17.9004h16zM112 192c-44.1797 0 -80 35.8203 -80 80s35.8203 80 80 80s80 -35.8203 80 -80s-35.8203 -80 -80 -80z" />
+ <glyph glyph-name="swimming-pool" unicode="&#xf5c5;" horiz-adv-x="640"
+d="M624 32c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-16c-38.6201 0 -72.7197 12.1797 -96 31.8398c-23.2803 -19.6494 -57.3799 -31.8398 -96 -31.8398s-72.7197 12.1797 -96 31.8398c-23.2803 -19.6494 -57.3799 -31.8398 -96 -31.8398
+s-72.7197 12.1797 -96 31.8398c-23.2803 -19.6494 -57.3799 -31.8398 -96 -31.8398h-16c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h16c26.04 0 45.7998 8.41992 56.0703 17.9004c8.89941 8.20996 19.6602 14.0996 31.7695 14.0996h16.2998
+c12.1104 0 22.8701 -5.88965 31.7705 -14.0996c10.29 -9.48047 30.0498 -17.9004 56.0898 -17.9004s45.7998 8.41992 56.0703 17.9004c8.89941 8.20996 19.6602 14.0996 31.7695 14.0996h16.2998c12.1104 0 22.8701 -5.88965 31.7705 -14.0996
+c10.29 -9.48047 30.0498 -17.9004 56.0898 -17.9004s45.7998 8.41992 56.0703 17.9004c8.89941 8.20996 19.6602 14.0996 31.7695 14.0996h16.2998c12.1104 0 22.8701 -5.88965 31.7705 -14.0996c10.29 -9.48047 30.0498 -17.9004 56.0898 -17.9004h16zM224 64
+c-19.1201 0 -30.8604 6.16016 -34.3896 9.42969c-9.16992 8.4502 -19.2002 14.3398 -29.6104 18.0703v228.5c0 52.9404 43.0596 96 96 96s96 -43.0596 96 -96v-16c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v16c0 17.6396 -14.3604 32 -32 32
+s-32 -14.3604 -32 -32v-96h192v96c0 52.9404 43.0596 96 96 96s96 -43.0596 96 -96v-16c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v16c0 17.6396 -14.3604 32 -32 32s-32 -14.3604 -32 -32v-228.51
+c-10.4102 -3.73047 -20.4404 -9.61035 -29.6104 -18.0703c-3.5293 -3.25977 -15.2695 -9.41992 -34.3896 -9.41992v96h-192v-96z" />
+ <glyph glyph-name="tint-slash" unicode="&#xf5c7;" horiz-adv-x="640"
+d="M633.82 -10.0996c6.97949 -5.43066 8.22949 -15.4805 2.81934 -22.4502l-19.6396 -25.2705c-5.42969 -6.97949 -15.4805 -8.23926 -22.46 -2.80957l-588.36 454.729c-6.97949 5.43066 -8.22949 15.4805 -2.80957 22.4502l19.6396 25.2705
+c5.41992 6.97949 15.4805 8.22949 22.46 2.80957l186.82 -144.399c21.6201 33.7197 42.9697 73.3398 58.4902 125.68c9 30.1201 50.5 28.7803 58.4395 0c46.9902 -158.48 146.78 -200.061 146.78 -311.82c0 -5.70996 -0.509766 -11.2998 -1.03027 -16.8701zM144 114.09
+c0 29.7803 7.30957 54.6299 18.7197 78.1299l273.681 -211.52c-31.0303 -27.7402 -71.6904 -44.7002 -116.4 -44.7002c-97.2803 0 -176 79.6504 -176 178.09z" />
+ <glyph glyph-name="tired" unicode="&#xf5c8;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM281.8 250.3c-7.7002 -4.7002 -7.7002 -15.8994 0 -20.5996l80 -48c11.5 -6.7998 24.1006 7.59961 15.4004 18l-33.6006 40.2998l33.6006 40.2998
+c8.59961 10.2998 -3.7998 24.9004 -15.4004 18zM118.8 280.3l33.6006 -40.2998l-33.6006 -40.2998c-8.59961 -10.4004 3.90039 -24.7998 15.4004 -18l80 48c7.7998 4.7002 7.7998 15.8994 0 20.5996l-80 48c-11.6006 6.90039 -24 -7.7002 -15.4004 -18zM248 160
+c-51.9004 0 -115.3 -43.7998 -123.2 -106.7c-1.7002 -13.3994 7.90039 -24.5996 17.7002 -20.3994c25.9004 11.0996 64.4004 17.3994 105.5 17.3994s79.5996 -6.2998 105.5 -17.3994c9.7002 -4.2002 19.4004 6.7998 17.7002 20.3994
+c-7.90039 62.9004 -71.2998 106.7 -123.2 106.7z" />
+ <glyph glyph-name="tooth" unicode="&#xf5c9;" horiz-adv-x="448"
+d="M443.98 351.75c10.1299 -41.6299 0.419922 -80.8203 -21.5303 -110.43c-23.3604 -31.5703 -32.6807 -68.6504 -36.29 -107.351c-4.41016 -47.1602 -10.3301 -94.1699 -20.9404 -140.319l-7.7998 -33.9502c-3.18945 -13.8701 -15.4902 -23.7002 -29.6699 -23.7002
+c-13.9697 0 -26.1504 9.5498 -29.54 23.1602l-34.4697 138.42c-4.56055 18.3096 -20.96 31.1602 -39.7598 31.1602c-18.8008 0 -35.2002 -12.8398 -39.7607 -31.1602l-34.4697 -138.42c-3.38965 -13.6104 -15.5703 -23.1602 -29.54 -23.1602
+c-14.1797 0 -26.4795 9.83008 -29.6699 23.7002l-7.7998 33.9502c-10.6104 46.1592 -16.54 93.1592 -20.9404 140.319c-3.60938 38.6904 -12.9297 75.7803 -36.29 107.351c-21.9199 29.6201 -31.6299 68.8096 -21.5 110.43c11.0098 45.2197 47.1104 82.0498 92.0098 93.7197
+c23.4307 6.08984 46.1104 0.540039 66.8105 -10.3096l100.51 -64.6201c7.83984 -5.05957 17.6504 -2.15039 22.1104 4.7998c4.78027 7.44043 2.62012 17.3398 -4.7998 22.1104l-28.3203 18.21c3.54004 1.75 7.25 3.08984 10.5 5.47949
+c26.1396 19.2305 56.9502 32.6904 89.1396 24.3301c44.9004 -11.6602 81 -48.5 92.0107 -93.7197z" />
+ <glyph glyph-name="umbrella-beach" unicode="&#xf5ca;" horiz-adv-x="640"
+d="M115.38 311.1c-10.0801 3.66992 -14.1104 16.3203 -7.41992 24.7207c59.75 74.8398 152.65 116.689 248.53 111.8c-52.79 -29.4102 -103.811 -92.1602 -139 -173.7zM247.63 262.94c44.4902 101.979 114.74 171.14 172.76 171.149c7.95996 0 15.6904 -1.2998 23.0908 -4
+c61.3291 -22.3203 78.3896 -132.6 42.6299 -253.979zM521.48 387.5c76.5293 -57.9199 120.76 -149.67 118.439 -245.36c-0.259766 -10.7393 -11.4795 -17.8398 -21.5703 -14.1699l-102.619 37.3604c17.5293 58.75 24.6895 117.09 18.9492 166.979
+c-2.37012 20.5908 -6.97949 38.8906 -13.1992 55.1904zM560 0.0195312c8.83984 0 16 -7.15918 16 -16v-32.0098c0 -8.83984 -7.16016 -16.0098 -16 -16.0098h-544c-8.83984 0 -16 7.16992 -16 16.0098v32.0098c0 8.85059 7.16016 16.0107 16 16.0107h236.96l72.9004 200.37
+l60.1396 -21.9004l-64.9404 -178.48h238.94z" />
+ <glyph glyph-name="vector-square" unicode="&#xf5cb;"
+d="M512 320c0 -17.6699 -14.3301 -32 -32 -32v-192c17.6699 0 32 -14.3301 32 -32v-96c0 -17.6699 -14.3301 -32 -32 -32h-96c-17.6699 0 -32 14.3301 -32 32h-192c0 -17.6699 -14.3301 -32 -32 -32h-96c-17.6699 0 -32 14.3301 -32 32v96c0 17.6699 14.3301 32 32 32v192
+c-17.6699 0 -32 14.3301 -32 32v96c0 17.6699 14.3301 32 32 32h96c17.6699 0 32 -14.3301 32 -32h192c0 17.6699 14.3301 32 32 32h96c17.6699 0 32 -14.3301 32 -32v-96zM416 384v-32h32v32h-32zM64 384v-32h32v32h-32zM96 0v32h-32v-32h32zM448 0v32h-32v-32h32zM416 96
+v192h-32c-17.6699 0 -32 14.3301 -32 32v32h-192v-32c0 -17.6699 -14.3301 -32 -32 -32h-32v-192h32c17.6699 0 32 -14.3301 32 -32v-32h192v32c0 17.6699 14.3301 32 32 32h32z" />
+ <glyph glyph-name="weight-hanging" unicode="&#xf5cd;"
+d="M510.28 2.13965c8.33008 -33.3096 -14.6602 -66.1396 -46.2998 -66.1396h-415.95c-31.6504 0 -54.6406 32.8301 -46.3105 66.1396l73.0498 292.13c3.79004 15.1807 16.4404 25.7207 30.8701 25.7207h60.25c-3.58008 10.0498 -5.87988 20.7197 -5.87988 32
+c0 53.0195 42.9805 96 96 96c53.0205 0 96 -42.9805 96 -96c0 -11.2803 -2.30957 -21.9502 -5.87988 -32h60.25c14.4297 0 27.0703 -10.5303 30.8701 -25.7207zM256 320c17.6396 0 32 14.3604 32 32s-14.3604 32 -32 32s-32 -14.3604 -32 -32s14.3604 -32 32 -32z" />
+ <glyph glyph-name="wine-glass-alt" unicode="&#xf5ce;" horiz-adv-x="288"
+d="M216 -16c22.0898 0 40 -17.9102 40 -40c0 -4.41992 -3.58008 -8 -8 -8h-208c-4.41992 0 -8 3.58008 -8 8c0 22.0898 17.9102 40 40 40h40v117.18c-68.4697 15.8906 -118.05 79.9102 -111.4 154.16l15.96 178.11c0.730469 8.24023 7.55078 14.5498 15.7002 14.5498h223.48
+c8.16016 0 14.9697 -6.30957 15.71 -14.5498l15.9502 -178.101c6.64941 -74.25 -42.9307 -138.27 -111.4 -154.159v-117.19h40zM61.75 400l-7.16992 -80h178.84l-7.16992 80h-164.5z" />
+ <glyph glyph-name="air-freshener" unicode="&#xf5d0;" horiz-adv-x="384"
+d="M378.94 126.59c11.75 -12.1494 1.71973 -30.5898 -16.6406 -30.5898h-138.3v-32h112c8.83984 0 16 -7.16016 16 -16v-96c0 -8.83984 -7.16016 -16 -16 -16h-288c-8.83984 0 -16 7.16016 -16 16v96c0 8.83984 7.16016 16 16 16h112v32h-138.3
+c-18.3604 0 -28.3906 18.4404 -16.6406 30.5898l94.2402 97.4102h-49.2197c-15.2998 0 -23.6602 16.5996 -13.8604 27.5303l113.33 126.51c-3.42969 6.61035 -5.5498 14 -5.5498 21.96c0 26.5098 21.4902 48 48 48s48 -21.4902 48 -48
+c0 -7.95996 -2.12012 -15.3496 -5.5498 -21.96l113.33 -126.51c9.7998 -10.9307 1.43945 -27.5303 -13.8604 -27.5303h-49.2197zM192 416.02c-8.84961 0 -16.0195 -7.16992 -16.0195 -16.0195c0 -8.83984 7.16992 -16.0195 16.0195 -16.0195
+s16.0195 7.17969 16.0195 16.0195c0 8.84961 -7.16992 16.0195 -16.0195 16.0195zM304 16h-224v-32h224v32z" />
+ <glyph glyph-name="apple-alt" unicode="&#xf5d1;" horiz-adv-x="448"
+d="M350.85 319c25.9707 -4.66992 47.2705 -18.6699 63.9199 -42c14.6504 -20.6699 24.6406 -46.6699 29.9609 -78c4.66992 -28.6699 4.31934 -57.3301 -1 -86c-7.99023 -47.3301 -23.9707 -87 -47.9404 -119c-28.6396 -38.6699 -64.5898 -58 -107.87 -58
+c-10.6602 0 -22.2998 3.33008 -34.96 10c-8.66016 5.33008 -18.3096 8 -28.9697 8s-20.2998 -2.66992 -28.9707 -8c-12.6592 -6.66992 -24.2998 -10 -34.96 -10c-43.2793 0 -79.2295 19.3301 -107.869 58c-23.9707 32 -39.9502 71.6699 -47.9404 119
+c-5.32031 28.6699 -5.66992 57.3301 -1 86c5.32031 31.3301 15.3096 57.3301 29.96 78c16.6504 23.3301 37.9502 37.3301 63.9199 42c15.9805 2.66992 37.9502 0.330078 65.9199 -7c23.9697 -6.66992 44.2803 -14.6699 60.9307 -24
+c16.6494 9.33008 36.96 17.3301 60.9297 24c27.9795 7.33008 49.96 9.66992 65.9395 7zM295.91 360c-9.32031 -8.66992 -21.6504 -15 -36.96 -19c-10.6602 -3.33008 -22.2998 -5 -34.96 -5l-14.9805 1c-1.33008 9.33008 -1.33008 20 0 32
+c2.66992 24 10.3203 42.3301 22.9707 55c9.31934 8.66992 21.6494 15 36.96 19c10.6592 3.33008 22.2998 5 34.96 5l14.9795 -1l1 -15c0 -12.6699 -1.66992 -24.3301 -4.99023 -35c-3.98926 -15.3301 -10.3096 -27.6699 -18.9795 -37z" />
+ <glyph glyph-name="atom" unicode="&#xf5d2;" horiz-adv-x="448"
+d="M413.03 192c40.1396 -54.9102 41.5195 -98.5996 25.1396 -128c-29.2197 -52.3398 -101.689 -43.5801 -116.33 -41.8799c-21.4697 -51.2197 -54.2002 -86.1201 -97.8398 -86.1201s-76.3701 34.9004 -97.8398 86.1201c-14.6504 -1.7002 -87.1201 -10.46 -116.33 41.8799
+c-16.3701 29.3799 -14.9902 73.1104 25.1396 128c-40.1396 54.9102 -41.5195 98.5996 -25.1396 128c10.9004 19.5195 40.5996 50.6602 116.33 41.8799c21.4795 51.2305 54.2002 86.1201 97.8398 86.1201s76.3604 -34.8896 97.8398 -86.1201
+c75.79 8.85059 105.42 -22.3604 116.33 -41.8799c16.3701 -29.3799 14.9902 -73.1104 -25.1396 -128zM63.3799 96c3.69043 -6.59961 19.0205 -11.8604 43.5801 -10.9697c-2.75977 13 -5.0498 26.3701 -6.75977 40.0801c-7.66992 6.29004 -14.9102 12.6494 -21.8701 19.1797
+c-15.1396 -23.4902 -18.9805 -41.0801 -14.9502 -48.29zM100.2 258.88c1.39355 11.1816 4.43555 29.2002 6.79004 40.2197c-1.82031 0.0703125 -3.98047 0.370117 -5.69043 0.370117c-21.5303 0 -34.5098 -5.33008 -37.9199 -11.4697
+c-4.01953 -7.20996 -0.179688 -24.7998 14.9502 -48.2998c6.96973 6.53027 14.21 12.8896 21.8701 19.1797zM224 384c-9.46973 0 -22.2002 -13.5195 -33.8604 -37.2598c11.1904 -3.7002 22.4404 -8 33.8604 -12.8604c11.4199 4.86035 22.6699 9.16016 33.8604 12.8604
+c-11.6602 23.7402 -24.3906 37.2598 -33.8604 37.2598zM224 0c9.46973 0 22.2002 13.5195 33.8604 37.2598c-11.1904 3.7002 -22.4404 8 -33.8604 12.8604c-11.4199 -4.86035 -22.6699 -9.16016 -33.8604 -12.8604c11.6602 -23.7402 24.3906 -37.2598 33.8604 -37.2598z
+M286.5 157.33c1.99023 27.7998 1.98047 41.5498 0 69.3301c-26.6396 19.04 -46.1104 29.3096 -62.5 37.4795c-16.3701 -8.15918 -35.8301 -18.4297 -62.5 -37.4795c-1.99023 -27.79 -1.99023 -41.54 0 -69.3301c26.7002 -19.0703 46.1504 -29.3398 62.5 -37.4805
+c16.3604 8.15039 35.7998 18.4004 62.5 37.4805zM384.62 96c4.01953 7.20996 0.179688 24.7998 -14.9502 48.29c-6.96973 -6.53027 -14.21 -12.8896 -21.8701 -19.1797c-1.70996 -13.6904 -4 -27.0605 -6.75977 -40.0605c24.5801 -0.870117 39.9102 4.33008 43.5801 10.9502
+zM369.67 239.71c15.1299 23.4902 18.9697 41.0801 14.9502 48.2998c-3.41016 6.12988 -16.4004 11.4707 -37.9199 11.4707c-1.71973 0 -3.87012 -0.300781 -5.69043 -0.370117c2.35254 -11.0205 5.39453 -29.0391 6.79004 -40.2207
+c7.66992 -6.29004 14.9102 -12.6494 21.8701 -19.1797zM224 224c17.6699 0 32 -14.3301 32 -32s-14.3301 -32 -32 -32s-32 14.3301 -32 32s14.3301 32 32 32z" />
+ <glyph glyph-name="bone" unicode="&#xf5d7;" horiz-adv-x="640"
+d="M598.88 203.44c-9.42969 -4.70996 -9.42969 -18.1709 -0.00976562 -22.8809c25.2002 -12.5996 41.1201 -38.3496 41.1201 -66.5293v-7.64062c0 -41.0898 -33.2998 -74.3896 -74.3799 -74.3896c-32.0107 0 -60.4404 20.4902 -70.5703 50.8604
+c-6.53027 19.5996 -10.7305 45.1396 -38.1104 45.1396h-273.87c-26.5098 0 -30.4297 -22.1104 -38.1094 -45.1396c-10.1299 -30.3701 -38.5498 -50.8604 -70.5703 -50.8604c-41.0801 0 -74.3799 33.2998 -74.3799 74.3896v7.64062
+c0 28.1699 15.9199 53.9297 41.1201 66.5293c9.42969 4.70996 9.42969 18.1709 0 22.8809c-25.2002 12.5996 -41.1201 38.3594 -41.1201 66.5293v7.64062c0 41.0898 33.2998 74.3896 74.3896 74.3896c32.0107 0 60.4404 -20.4902 70.5605 -50.8604
+c6.53027 -19.5996 10.7295 -45.1396 38.1094 -45.1396h273.87c26.5107 0 30.4307 22.1104 38.1104 45.1396c10.1299 30.3701 38.5498 50.8604 70.5703 50.8604c41.0898 0 74.3896 -33.2998 74.3896 -74.3896v-7.64062c0 -28.1699 -15.9199 -53.9297 -41.1201 -66.5293z" />
+ <glyph glyph-name="book-reader" unicode="&#xf5da;"
+d="M352 352c0 -53.0195 -42.9805 -96 -96 -96s-96 42.9805 -96 96s42.9805 96 96 96s96 -42.9805 96 -96zM233.59 206.9c4.10059 -2.51074 6.41016 -6.79004 6.41992 -11.46v-245.99c0 -10.1602 -11.1094 -16.5898 -20.4795 -11.8701
+c-61.0498 30.75 -149.38 39.1396 -193.04 41.4297c-14.9004 0.770508 -26.4902 12.7207 -26.4902 27.0498v222.801c0 15.6299 13.5498 28.0098 29.7998 27.0898c48.3604 -2.75 144.46 -12.7305 203.79 -49.0498zM482.2 255.95
+c16.25 0.919922 29.7998 -11.46 29.8096 -27.0898v-222.82c0 -14.3301 -11.5898 -26.2803 -26.4902 -27.0596c-43.6494 -2.29004 -131.93 -10.6807 -192.97 -41.4004c-9.39941 -4.73047 -20.54 1.70996 -20.54 11.9004v245.789c0 4.6709 2.31055 9.12012 6.41016 11.6309
+c59.3203 36.3193 155.43 46.3096 203.78 49.0498z" />
+ <glyph glyph-name="brain" unicode="&#xf5dc;" horiz-adv-x="576"
+d="M208 448c35.3398 0 64 -28.6504 64 -64v-74.0703c0 -26.0898 -18 -47.8799 -42.2002 -54.04c-3.4502 -0.879883 -5.7998 -4.08984 -5.7998 -7.65918v-16.1807c0 -5.06934 4.66992 -8.91016 9.63965 -7.88965c14.3799 2.93945 27.4502 9.41016 38.3604 18.3896v-234.55
+c0 -39.7598 -32.2305 -72 -72 -72c-31.75 0 -58.3896 20.6904 -67.9697 49.21c-3.93066 -0.660156 -7.91016 -1.20996 -12.0303 -1.20996c-39.7695 0 -72 32.2402 -72 72c0 4.83008 0.519531 9.54004 1.41992 14.1104c-29 12.0098 -49.4199 40.5498 -49.4199 73.8896
+c0 29.6699 16.3398 55.2803 40.3398 69.0898c-5.17969 10.5898 -8.33984 22.3301 -8.33984 34.9102c0 33.4297 20.54 62 49.6602 73.96c-1.02051 4.53027 -1.66016 9.2002 -1.66016 14.04c0 35.3496 28.6602 64 64 64c0.75 0 1.4502 -0.200195 2.2002 -0.219727
+c7.05957 27.6699 31.9297 48.2197 61.7998 48.2197zM576 144c0 -33.3398 -20.4199 -61.8799 -49.4199 -73.8896c0.900391 -4.57031 1.41992 -9.28027 1.41992 -14.1104c0 -39.7598 -32.2305 -72 -72 -72c-4.12012 0 -8.09961 0.540039 -12.0303 1.20996
+c-9.58008 -28.5195 -36.2197 -49.21 -67.9697 -49.21c-39.7695 0 -72 32.2402 -72 72v234.55c10.9199 -8.97949 23.9805 -15.4502 38.3604 -18.3896c4.95996 -1.02051 9.63965 2.82031 9.63965 7.88965v16.1807c0 3.56934 -2.33984 6.7793 -5.7998 7.65918
+c-24.21 6.16016 -42.2002 27.9502 -42.2002 54.04v74.0703c0 35.3496 28.6602 64 64 64c29.8701 0 54.7402 -20.5498 61.7998 -48.2197c0.75 0.0195312 1.4502 0.219727 2.2002 0.219727c35.3398 0 64 -28.6504 64 -64c0 -4.83984 -0.639648 -9.50977 -1.66016 -14.04
+c29.1201 -11.96 49.6602 -40.5303 49.6602 -73.96c0 -12.5801 -3.16992 -24.3203 -8.33984 -34.9102c24 -13.8096 40.3398 -39.4199 40.3398 -69.0898z" />
+ <glyph glyph-name="car-alt" unicode="&#xf5de;" horiz-adv-x="480"
+d="M438.66 235.67c24.1201 -9.16992 41.3398 -32.3301 41.3398 -59.6699v-48c0 -16.1299 -6.16016 -30.6797 -16 -41.9297v-54.0703c0 -17.6699 -14.3301 -32 -32 -32h-32c-17.6699 0 -32 14.3301 -32 32v32h-256v-32c0 -17.6699 -14.3301 -32 -32 -32h-32
+c-17.6699 0 -32 14.3301 -32 32v54.0703c-9.83984 11.2598 -16 25.8096 -16 41.9297v48c0 27.3398 17.2197 50.5 41.3398 59.6699l11.2402 28.0996l19.9297 49.8301c17.1104 42.7705 57.9307 70.4004 103.99 70.4004h127c46.0703 0 86.8799 -27.6299 103.99 -70.4004
+l19.9297 -49.8301zM131.93 289.83l-19.9297 -49.8301h256l-19.9297 49.8301c-7.29004 18.2197 -24.9404 30.1699 -44.5703 30.1699h-127c-19.6299 0 -37.2803 -11.9502 -44.5703 -30.1699zM80 128.2c19.2002 0 48 -3.19043 48 15.9502
+c0 19.1396 -28.7998 47.8496 -48 47.8496s-32 -12.7598 -32 -31.9004c0 -19.1396 12.7998 -31.8994 32 -31.8994zM400 128.2c19.2002 0 32 12.7598 32 31.8994c0 19.1406 -12.7998 31.9004 -32 31.9004s-48 -28.71 -48 -47.8496c0 -19.1406 28.7998 -15.9502 48 -15.9502z
+" />
+ <glyph glyph-name="car-battery" unicode="&#xf5df;"
+d="M480 320c17.6699 0 32 -14.3301 32 -32v-256c0 -17.6699 -14.3301 -32 -32 -32h-448c-17.6699 0 -32 14.3301 -32 32v256c0 17.6699 14.3301 32 32 32h32v48c0 8.83984 7.16016 16 16 16h96c8.83984 0 16 -7.16016 16 -16v-48h128v48c0 8.83984 7.16016 16 16 16h96
+c8.83984 0 16 -7.16016 16 -16v-48h32zM192 184v16c0 4.41992 -3.58008 8 -8 8h-112c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h112c4.41992 0 8 3.58008 8 8zM448 184v16c0 4.41992 -3.58008 8 -8 8h-40v40c0 4.41992 -3.58008 8 -8 8h-16
+c-4.41992 0 -8 -3.58008 -8 -8v-40h-40c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h40v-40c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v40h40c4.41992 0 8 3.58008 8 8z" />
+ <glyph glyph-name="car-crash" unicode="&#xf5e1;" horiz-adv-x="640"
+d="M143.25 227.19l-12.4199 -46.3701c-3.00977 -11.25 -3.62988 -22.8906 -2.41016 -34.3906l-35.2002 -28.9795c-6.56934 -5.41016 -16.3096 0.429688 -14.6201 8.76953l15.4404 76.6807c1.05957 5.25977 -2.66016 10.2793 -8 10.79l-77.8604 7.5498
+c-8.46973 0.819336 -11.2295 11.8301 -4.13965 16.54l65.1504 43.2998c4.45996 2.96973 5.37988 9.15039 1.97949 13.29l-49.71 60.4102c-5.41016 6.56934 0.429688 16.2998 8.78027 14.6201l76.6797 -15.4404c5.25977 -1.05957 10.2803 2.66016 10.7998 8l7.5498 77.8604
+c0.820312 8.47949 11.8301 11.2295 16.5508 4.13965l43.2998 -65.1396c2.96973 -4.45996 9.14941 -5.37988 13.29 -1.98047l60.3994 49.71c6.57031 5.41016 16.3008 -0.429688 14.6201 -8.76953l-11.3301 -56.1602c-2.70996 -3.0498 -5.42969 -6.08984 -7.90918 -9.40039
+l-32.1504 -42.9697l-10.71 -14.3203c-32.7305 -8.75977 -59.1797 -34.5293 -68.0801 -67.7393zM637.82 94.6797l-12.4199 -46.3594c-3.13086 -11.6807 -9.38086 -21.6104 -17.5508 -29.3604c-2.25488 -2.13574 -6.17969 -5.27148 -8.75977 -7l-13.9902 -52.2295
+c-1.13965 -4.27051 -3.09961 -8.10059 -5.64941 -11.3809c-7.66992 -9.83984 -20.7402 -14.6797 -33.54 -11.25l-30.9102 8.28027c-17.0703 4.57031 -27.2002 22.1201 -22.6299 39.1904l8.28027 30.9102l-247.28 66.2598l-8.28027 -30.9102
+c-4.57031 -17.0703 -22.1201 -27.2002 -39.1895 -22.6299l-30.9102 8.28027c-12.7998 3.42969 -21.7002 14.1592 -23.4199 26.5098c-0.570312 4.12012 -0.350586 8.41992 0.790039 12.6797l13.9893 52.2305c-1.37207 2.78809 -3.2041 7.46973 -4.08984 10.4492
+c-3.2002 10.79 -3.64941 22.5205 -0.519531 34.2002l12.4199 46.3701c5.30957 19.7998 19.3594 34.8301 36.8896 42.21c4.9082 2.06836 13.1914 4.18262 18.4902 4.71973l18.1299 24.2305l32.1504 42.9697c3.44922 4.61035 7.18945 8.90039 11.1992 12.8398
+c8 7.89062 17.0303 14.4404 26.7402 19.5107c4.86035 2.54004 9.89062 4.70996 15.0498 6.48926c10.3301 3.58008 21.1904 5.62988 32.2402 6.04004s22.3105 -0.819336 33.4307 -3.7998l122.68 -32.8701c11.1201 -2.97949 21.4795 -7.54004 30.8496 -13.4297
+c11.7236 -7.36133 27.2646 -22.8174 34.6904 -34.5c8.81934 -13.8799 14.6396 -29.8398 16.6797 -46.9902l6.36035 -53.29l3.58984 -30.0498c8.79297 -6.34863 18.9805 -19.7568 22.7402 -29.9297c4.38965 -11.8799 5.29004 -25.1904 1.75 -38.3906zM255.58 213.66
+c-18.5498 4.96973 -34.21 -4.04004 -39.1699 -22.5303s4.10938 -34.1201 22.6494 -39.0898c18.5508 -4.96973 45.54 -15.5098 50.4902 2.97949c4.95996 18.4902 -15.4297 53.6709 -33.9697 58.6406zM546.19 185.49l-6.36035 53.29
+c-0.580078 4.87012 -1.88965 9.5293 -3.82031 13.8594c-5.7998 12.9902 -17.2002 23.0107 -31.4199 26.8203l-122.68 32.8701c-3.36914 0.902344 -8.93457 1.63477 -12.4229 1.63477c-13.2676 0 -30.4883 -8.62207 -38.4375 -19.2451l-32.1494 -42.9697l172 -46.0801z
+M564.68 130.84c-18.5498 4.96973 -53.7998 -15.3096 -58.75 -33.79c-4.94922 -18.4902 23.6904 -22.8594 42.2402 -27.8301c18.5498 -4.96973 34.21 4.04004 39.1699 22.5303c4.9502 18.4805 -4.10938 34.1201 -22.6602 39.0898z" />
+ <glyph glyph-name="car-side" unicode="&#xf5e4;" horiz-adv-x="640"
+d="M544 256c53.0195 0 96 -42.9805 96 -96v-80c0 -8.83984 -7.16016 -16 -16 -16h-48c0 -53.0195 -42.9805 -96 -96 -96s-96 42.9805 -96 96h-128c0 -53.0195 -42.9805 -96 -96 -96s-96 42.9805 -96 96h-48c-8.83984 0 -16 7.16016 -16 16v112
+c0 29.79 20.4404 54.5996 48 61.7402l47.9102 122.029c9.71973 24.3008 33.25 40.2305 59.4199 40.2305h213.91c16.9795 -0.00195312 39.3701 -10.7627 49.9795 -24.0195l108.78 -135.98h16zM160 16c26.4697 0 48 21.5303 48 48s-21.5303 48 -48 48s-48 -21.5303 -48 -48
+s21.5303 -48 48 -48zM232 256v96h-76.6699l-38.4004 -96h115.07zM280 256h166.04l-76.7998 96h-89.2402v-96zM480 16c26.4697 0 48 21.5303 48 48s-21.5303 48 -48 48s-48 -21.5303 -48 -48s21.5303 -48 48 -48z" />
+ <glyph glyph-name="charging-station" unicode="&#xf5e7;" horiz-adv-x="576"
+d="M336 0c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-320c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h320zM544 320h16c8.83984 0 16 -7.16016 16 -16v-32c0 -35.7598 -23.6201 -65.6904 -56 -75.9297v-120.07
+c0 -44.4102 -38.29 -80.0498 -83.5898 -75.6201c-39.4902 3.85059 -68.4102 39.3398 -68.4102 79.0098v24.6104c0 22.0898 -17.9102 40 -40 40h-8v-112h-288v352c0 35.3496 28.6504 64 64 64h160c35.3496 0 64 -28.6504 64 -64v-192h8c48.5996 0 88 -39.4004 88 -88v-28
+c0 -16.9902 15.2197 -30.5 32.7402 -27.6104c13.7598 2.27051 23.2598 15.2402 23.2598 29.1904v118.49c-32.3799 10.2393 -56 40.1699 -56 75.9297v32c0 8.83984 7.16016 16 16 16h16v48c0 8.83984 7.16016 16 16 16s16 -7.16016 16 -16v-48h32v48
+c0 8.83984 7.16016 16 16 16s16 -7.16016 16 -16v-48zM260.09 272.24c4.62012 6.97949 -1.14941 15.7598 -10.3896 15.7598h-57.7002l11.5996 50.79c2.02051 6.66992 -3.71973 13.21 -11.5996 13.21h-68c-6.01953 0 -11.0996 -3.90039 -11.8896 -9.11035l-16 -107
+c-0.959961 -6.2998 4.63965 -11.8896 11.8896 -11.8896h59.3496l-23.0195 -83.0801c-1.7998 -6.63965 4 -12.9199 11.6699 -12.9199c4.17969 0 8.19043 1.91016 10.3896 5.24023z" />
+ <glyph glyph-name="directions" unicode="&#xf5eb;"
+d="M502.61 214.68c12.5195 -12.5293 12.5195 -32.8301 0 -45.3594l-223.931 -223.931c-12.5293 -12.5293 -32.8398 -12.5293 -45.3594 0l-223.931 223.931c-12.5195 12.5293 -12.5195 32.8301 0 45.3594l223.931 223.931c12.5293 12.5195 32.8398 12.5195 45.3594 0z
+M401.63 202.12c3.42969 3.16992 3.42969 8.58984 0 11.7598l-84.21 77.7305c-5.12988 4.72949 -13.4297 1.08984 -13.4297 -5.87988v-53.7305h-112c-17.6699 0 -32 -14.3301 -32 -32v-80c0 -4.41992 3.58008 -8 8 -8h32c4.41992 0 8 3.58008 8 8v64h96v-53.7305
+c0 -6.97949 8.30957 -10.6094 13.4297 -5.87988z" />
+ <glyph glyph-name="draw-polygon" unicode="&#xf5ee;" horiz-adv-x="448"
+d="M384 96c35.3496 0 64 -28.6504 63.9902 -64c0 -35.3496 -28.6504 -64 -64 -64c-23.6299 0 -44.0303 12.9502 -55.1201 32h-209.75c-11.0801 -19.0498 -31.4902 -32 -55.1201 -32c-35.3496 0 -64 28.6504 -64 64c0 23.6299 12.9502 44.04 32 55.1299v209.75
+c-19.0498 11.0801 -32 31.4902 -32 55.1201c0 35.3496 28.6504 64 64 64c23.6299 0 44.04 -12.9502 55.1201 -32h209.76c11.0801 19.0498 31.4902 32 55.1201 32c35.3496 0 64 -28.6504 64 -64s-28.6504 -64 -64 -64c-0.349609 0 -0.669922 0.0898438 -1.01953 0.0996094
+l-39.2002 -65.3193c5.08008 -9.16992 8.21973 -19.5605 8.21973 -30.7803s-3.15039 -21.6104 -8.21973 -30.7803l39.2002 -65.3193c0.349609 0 0.669922 0.0996094 1.01953 0.0996094zM96 87.1201c8.07422 -4.68848 18.4316 -15.0459 23.1201 -23.1201h208.36
+l-38.4609 64.0996c-0.349609 0 -0.669922 -0.0996094 -1.01953 -0.0996094c-35.3496 0 -64 28.6504 -64 64s28.6504 64 64 64c0.349609 0 0.669922 -0.0898438 1.01953 -0.0996094l38.4609 64.0996h-208.36c-4.69043 -8.07227 -15.0479 -18.4297 -23.1201 -23.1201v-209.76z
+M272 192c0 -8.82031 7.17969 -16 16 -16s16 7.17969 16 16s-7.17969 16 -16 16s-16 -7.17969 -16 -16zM400 352c0 8.82031 -7.17969 16 -16 16s-16 -7.17969 -16 -16s7.17969 -16 16 -16s16 7.17969 16 16zM64 368c-8.82031 0 -16 -7.17969 -16 -16s7.17969 -16 16 -16
+s16 7.17969 16 16s-7.17969 16 -16 16zM48 32c0 -8.82031 7.17969 -16 16 -16s16 7.17969 16 16s-7.17969 16 -16 16s-16 -7.17969 -16 -16zM384 16c8.82031 0 16 7.17969 16 16s-7.17969 16 -16 16s-16 -7.17969 -16 -16s7.17969 -16 16 -16z" />
+ <glyph glyph-name="laptop-code" unicode="&#xf5fc;" horiz-adv-x="640"
+d="M255.03 186.35l-58.3506 58.3408c-6.25 6.25 -6.25 16.3799 0 22.6299l58.3398 58.3398c6.25 6.25 16.3809 6.25 22.6309 0l11.3096 -11.3105c6.25 -6.25 6.25 -16.3799 0 -22.6299l-35.71 -35.7197l35.7197 -35.71c6.25 -6.25 6.25 -16.3799 0 -22.6299
+l-11.3096 -11.3105c-6.25 -6.25 -16.3799 -6.25 -22.6299 0zM351.04 197.65c-6.25 6.25 -6.25 16.3799 0 22.6299l35.71 35.7197l-35.71 35.71c-6.25 6.25 -6.25 16.3799 0 22.6299l11.3096 11.3105c6.25 6.25 16.3809 6.25 22.6309 0l58.3398 -58.3408
+c6.25 -6.25 6.25 -16.3799 0 -22.6299l-58.3398 -58.3398c-6.25 -6.25 -16.3809 -6.25 -22.6309 0zM624 32c8.7998 0 16 -7.2002 16 -16v-16c0 -35.2002 -28.7998 -64 -64 -64h-512c-35.2002 0 -64 28.7998 -64 64v16c0 8.7998 7.2002 16 16 16h239.23
+c-0.25 -14.5303 14.0791 -32 32.7695 -32h60.7998c18.0303 0 32 12.1904 32.7402 32h242.46zM576 400v-336h-512v336c0 26.4004 21.5996 48 48 48h416c26.4004 0 48 -21.5996 48 -48zM512 128v256h-384v-256h384z" />
+ <glyph glyph-name="layer-group" unicode="&#xf5fd;"
+d="M12.4102 299.98c-16.5498 7.50977 -16.5498 32.5293 0 40.0391l232.95 105.671c2.79883 1.2793 7.56738 2.31738 10.6445 2.31738s7.84668 -1.03809 10.6455 -2.31738l232.93 -105.681c16.5498 -7.50977 16.5498 -32.5195 0 -40.0293l-232.94 -105.671
+c-6.7998 -3.08984 -14.4893 -3.08984 -21.29 0zM499.59 211.7c16.5498 -7.5 16.5498 -32.5 0 -40l-232.95 -105.59c-6.7998 -3.08008 -14.4893 -3.08008 -21.29 0l-232.939 105.59c-16.5498 7.5 -16.5498 32.5 0 40l58.0996 26.3301l161.63 -73.2705
+c7.57031 -3.42969 15.5908 -5.16992 23.8604 -5.16992s16.2998 1.74023 23.8604 5.16992l161.64 73.2705zM499.59 83.9004c16.5498 -7.5 16.5498 -32.5 0 -40l-232.95 -105.591c-6.7998 -3.0791 -14.4893 -3.0791 -21.29 0l-232.939 105.591
+c-16.5498 7.5 -16.5498 32.5 0 40l57.8799 26.2295l161.85 -73.3701c7.57031 -3.42969 15.5908 -5.16992 23.8604 -5.16992s16.2998 1.74023 23.8604 5.16992l161.859 73.3701z" />
+ <glyph glyph-name="microscope" unicode="&#xf610;"
+d="M160 128c-17.6699 0 -32 14.3301 -32 32v224c0 17.6699 14.3301 32 32 32v16c0 8.83984 7.16016 16 16 16h64c8.83984 0 16 -7.16016 16 -16v-16c17.6699 0 32 -14.3301 32 -32v-224c0 -17.6699 -14.3301 -32 -32 -32h-12v-16c0 -8.83984 -7.16016 -16 -16 -16h-40
+c-8.83984 0 -16 7.16016 -16 16v16h-12zM464 0c26.5098 0 48 -21.4902 48 -48c0 -8.83984 -7.16016 -16 -16 -16h-480c-8.83984 0 -16 7.16016 -16 16c0 26.5098 21.4902 48 48 48h272c70.5801 0 128 57.4199 128 128s-57.4199 128 -128 128v64
+c105.88 0 192 -86.1201 192 -192c0 -49.2002 -18.7598 -93.9902 -49.29 -128h1.29004zM104 32c-4.41992 0 -8 3.58008 -8 8v16c0 4.41992 3.58008 8 8 8h208c4.41992 0 8 -3.58008 8 -8v-16c0 -4.41992 -3.58008 -8 -8 -8h-208z" />
+ <glyph glyph-name="oil-can" unicode="&#xf613;" horiz-adv-x="639"
+d="M629.8 287.69c5.11035 1.45996 10.2002 -2.38086 10.1904 -7.69043v-18.0801c0 -2.12012 -0.839844 -4.16016 -2.33984 -5.66016l-212.261 -214.75c-6.00977 -6.08984 -14.21 -9.50977 -22.7598 -9.50977h-274.63c-17.6699 0 -32 14.3301 -32 32v46.54l-69.7197 12.6904
+c-15.2207 2.75977 -26.2803 16.0195 -26.2803 31.4795v94.9502c0 17.9795 14.6699 32.0098 31.9297 32.0098c1.61133 0 4.20605 -0.237305 5.79004 -0.530273l138.28 -25.1396h56v48h-56c-8.83984 0 -16 7.16016 -16 16v16c0 8.83984 7.16016 16 16 16h160
+c8.83984 0 16 -7.16016 16 -16v-16c0 -8.83984 -7.16016 -16 -16 -16h-56v-48h56.8896c8.33984 -0.00292969 21.1611 -3.03125 28.6201 -6.75977l50.4902 -25.2402zM96 159.33v62.4297l-48 8.73047v-62.4307zM549.33 74.6699c0 23.5605 42.6699 85.3301 42.6699 85.3301
+s42.6699 -61.7598 42.6699 -85.3301s-19.0996 -42.6699 -42.6699 -42.6699s-42.6699 19.1104 -42.6699 42.6699z" />
+ <glyph glyph-name="poop" unicode="&#xf619;"
+d="M451.36 78.8604c34.3301 -5.48047 60.6396 -34.9805 60.6396 -70.8604c0 -39.7598 -32.2402 -72 -72 -72h-368c-39.7598 0 -72 32.2402 -72 72c0 35.8799 26.3096 65.3799 60.6396 70.8604c-17.2998 13.1494 -28.6396 33.7295 -28.6396 57.1396
+c0 39.7695 32.2402 72 72 72h14.0703c-13.4199 11.7305 -22.0703 28.7803 -22.0703 48c0 35.3496 28.6504 64 64 64h16c44.1797 0 80 35.8203 80 80c0 17.3799 -5.69043 33.3604 -15.1104 46.4805c4.95996 0.779297 9.94043 1.51953 15.1104 1.51953
+c53.0195 0 96 -42.9805 96 -96c0 -11.2803 -2.30957 -21.9502 -5.87988 -32h5.87988c35.3496 0 64 -28.6504 64 -64c0 -19.2197 -8.65039 -36.2695 -22.0703 -48h14.0703c39.7598 0 72 -32.2305 72 -72c0 -23.4102 -11.3398 -43.9902 -28.6396 -57.1396z" />
+ <glyph glyph-name="shapes" unicode="&#xf61f;"
+d="M512 128v-160c0 -17.6699 -14.3301 -32 -32 -32h-160c-17.6699 0 -32 14.3301 -32 32v160c0 17.6699 14.3301 32 32 32h160c17.6699 0 32 -14.3301 32 -32zM128 192c70.6904 0 128 -57.3096 128 -128s-57.3096 -128 -128 -128s-128 57.3096 -128 128s57.3096 128 128 128
+zM479.03 224h-190.061c-25.3398 0 -41.1797 26.6699 -28.5098 48l95.0303 160c12.6699 21.3301 44.3496 21.3301 57.0195 0l95.0303 -160c12.6699 -21.3301 -3.16992 -48 -28.5098 -48z" />
+ <glyph glyph-name="star-of-life" unicode="&#xf621;" horiz-adv-x="480"
+d="M471.99 113.57c7.66016 -4.41992 10.2793 -14.2002 5.85938 -21.8506l-32.0195 -55.4297c-4.41992 -7.66016 -14.21 -10.2803 -21.8701 -5.86035l-135.93 78.4307v-156.86c0 -8.83984 -7.16992 -16 -16.0107 -16h-64.0391c-8.84082 0 -16.0107 7.16016 -16.0107 16
+v156.85l-135.93 -78.4297c-7.66016 -4.41016 -17.4502 -1.79004 -21.8701 5.86035l-32.0195 55.4297c-4.41992 7.65039 -1.80078 17.4404 5.85938 21.8604l135.931 78.4297l-135.931 78.4297c-7.66016 4.41992 -10.2793 14.21 -5.85938 21.8604l32.0195 55.4199
+c4.41992 7.65039 14.21 10.2803 21.8701 5.86035l135.93 -78.4307v156.86c0 8.83984 7.16992 16 16.0107 16h64.0391c8.84082 0 16.0107 -7.16016 16.0107 -16v-156.85l135.93 78.4297c7.66016 4.41992 17.4502 1.79004 21.8701 -5.86035l32.0195 -55.4297
+c4.41992 -7.66016 1.80078 -17.4404 -5.85938 -21.8604l-135.931 -78.4297z" />
+ <glyph glyph-name="teeth" unicode="&#xf62e;" horiz-adv-x="640"
+d="M544 448c53.0195 0 96 -42.9805 96 -96v-320c0 -53.0195 -42.9805 -96 -96 -96h-448c-53.0195 0 -96 42.9805 -96 96v320c0 53.0195 42.9805 96 96 96h448zM160 80v64c0 8.83984 -7.16016 16 -16 16h-64c-8.83984 0 -16 -7.16016 -16 -16v-64
+c0 -26.5098 21.4902 -48 48 -48s48 21.4902 48 48zM160 208v64c0 26.5098 -21.4902 48 -48 48s-48 -21.4902 -48 -48v-64c0 -8.83984 7.16016 -16 16 -16h64c8.83984 0 16 7.16016 16 16zM304 88v56c0 8.83984 -7.16016 16 -16 16h-80c-8.83984 0 -16 -7.16016 -16 -16v-56
+c0 -30.9297 25.0703 -56 56 -56s56 25.0703 56 56zM304 208v88c0 30.9297 -25.0703 56 -56 56s-56 -25.0703 -56 -56v-88c0 -8.83984 7.16016 -16 16 -16h80c8.83984 0 16 7.16016 16 16zM448 88v56c0 8.83984 -7.16016 16 -16 16h-80c-8.83984 0 -16 -7.16016 -16 -16v-56
+c0 -30.9297 25.0703 -56 56 -56s56 25.0703 56 56zM448 208v88c0 30.9297 -25.0703 56 -56 56s-56 -25.0703 -56 -56v-88c0 -8.83984 7.16016 -16 16 -16h80c8.83984 0 16 7.16016 16 16zM576 80v64c0 8.83984 -7.16016 16 -16 16h-64c-8.83984 0 -16 -7.16016 -16 -16v-64
+c0 -26.5098 21.4902 -48 48 -48s48 21.4902 48 48zM576 208v64c0 26.5098 -21.4902 48 -48 48s-48 -21.4902 -48 -48v-64c0 -8.83984 7.16016 -16 16 -16h64c8.83984 0 16 7.16016 16 16z" />
+ <glyph glyph-name="teeth-open" unicode="&#xf62f;" horiz-adv-x="640"
+d="M544 448c53.0195 0 96 -42.9805 96 -96v-64c0 -35.3496 -28.6602 -64 -64 -64h-512c-35.3398 0 -64 28.6504 -64 64v64c0 53.0195 42.9805 96 96 96h448zM160 272v32c0 26.5098 -21.4902 48 -48 48s-48 -21.4902 -48 -48v-32c0 -8.83984 7.16016 -16 16 -16h64
+c8.83984 0 16 7.16016 16 16zM304 272v56c0 30.9297 -25.0703 56 -56 56s-56 -25.0703 -56 -56v-56c0 -8.83984 7.16016 -16 16 -16h80c8.83984 0 16 7.16016 16 16zM448 272v56c0 30.9297 -25.0703 56 -56 56s-56 -25.0703 -56 -56v-56c0 -8.83984 7.16016 -16 16 -16h80
+c8.83984 0 16 7.16016 16 16zM576 272v32c0 26.5098 -21.4902 48 -48 48s-48 -21.4902 -48 -48v-32c0 -8.83984 7.16016 -16 16 -16h64c8.83984 0 16 7.16016 16 16zM576 128c35.3398 0 64 -28.6504 64 -64v-32c0 -53.0195 -42.9805 -96 -96 -96h-448
+c-53.0195 0 -96 42.9805 -96 96v32c0 35.3496 28.6602 64 64 64h512zM160 48v32c0 8.83984 -7.16016 16 -16 16h-64c-8.83984 0 -16 -7.16016 -16 -16v-32c0 -26.5098 21.4902 -48 48 -48s48 21.4902 48 48zM304 56v24c0 8.83984 -7.16016 16 -16 16h-80
+c-8.83984 0 -16 -7.16016 -16 -16v-24c0 -30.9297 25.0703 -56 56 -56s56 25.0703 56 56zM448 56v24c0 8.83984 -7.16016 16 -16 16h-80c-8.83984 0 -16 -7.16016 -16 -16v-24c0 -30.9297 25.0703 -56 56 -56s56 25.0703 56 56zM576 48v32c0 8.83984 -7.16016 16 -16 16h-64
+c-8.83984 0 -16 -7.16016 -16 -16v-32c0 -26.5098 21.4902 -48 48 -48s48 21.4902 48 48z" />
+ <glyph glyph-name="theater-masks" unicode="&#xf630;" horiz-adv-x="640"
+d="M206.86 202.85l-7.62988 -43.1797c-21.0908 -7.21973 -38.5 -18.2002 -49.9004 -30.9199c-2.41992 32.9004 21.6504 63.6504 57.5303 74.0996zM95.8096 153c5.87012 -33.1699 64.3203 -71.8096 111.931 -84.79c10.5596 -27.0996 25.9795 -50.5 39.5 -67.5801
+c-3.25977 -0.339844 -6.41992 -0.799805 -10.0098 -0.799805c-68.1201 0 -190.221 61.5596 -204.45 142.08l-31.7305 179.51c-4.96973 28.0996 7.98047 56.0996 32.1504 69.5205c67.8193 37.6396 143.46 57.0596 220.12 57.0596c23.9199 0 47.9492 -1.88965 71.8594 -5.7002
+c17.29 -2.76953 31.7803 -13.0498 41.7705 -27c-19.1299 -0.769531 -38.29 -2.2998 -57.1807 -5.31934c-19.8594 -3.1709 -37.6396 -12.5303 -51.5898 -26c-1.62012 0.0195312 -3.23926 0.189453 -4.84961 0.189453c-65.8896 0 -131.25 -16.9502 -189.01 -49.0098
+c0.0400391 -0.0302734 -0.530273 -1.03027 -0.240234 -2.65039zM193.36 290.46c3.83984 -21.7002 -10.6904 -42.4004 -32.4404 -46.2197c-21.7598 -3.82031 -42.5 10.6699 -46.3398 32.3701c-0.730469 4.12012 -0.610352 8.15918 -0.0898438 12.0996
+c10.8096 -5.57031 24.8301 -7.75 39.4795 -5.16992c14.6504 2.57031 27.0703 9.41016 35.3203 18.3203c1.83984 -3.53027 3.33984 -7.28027 4.07031 -11.4004zM606.8 327.1c24.1699 -13.4092 37.1201 -41.4092 32.1504 -69.5195l-31.7305 -179.51
+c-15.3896 -87.0508 -156.83 -151.931 -219.859 -140.84c-63.0303 11.0791 -173.7 120.3 -158.311 207.35l31.7305 179.51c4.95996 28.1006 26.7295 50 54.04 54.3604c100.55 16.0596 203 -1.96973 291.979 -51.3506zM333.56 230.3
+c-0.729492 -4.12012 -0.609375 -8.16016 -0.0791016 -12.1094c10.8096 5.56934 24.8291 7.75 39.4795 5.16992c14.6504 -2.57031 27.0703 -9.41016 35.3203 -18.3203c1.83984 3.53027 3.33984 7.28027 4.06934 11.4004c3.83008 21.6992 -10.6895 42.3896 -32.4492 46.2197
+c-21.7607 3.83008 -42.5 -10.6602 -46.3408 -32.3604zM404.03 31.54c55.6699 -9.79004 108.2 23.7803 122.38 75.7197c-28.1104 -16.9697 -68.6504 -24.21 -111.93 -16.5996c-43.2803 7.60938 -78.8906 28.2402 -99.4902 53.7803
+c-4.48047 -53.6309 33.3594 -103.11 89.04 -112.9zM534.33 182.88c1.83984 3.52051 3.33984 7.27051 4.07031 11.4004c3.83008 21.7002 -10.7002 42.3896 -32.4502 46.2197c-21.7598 3.82031 -42.5 -10.6699 -46.3398 -32.3701
+c-0.730469 -4.12012 -0.610352 -8.16016 -0.0908203 -12.0996c10.8105 5.56934 24.8301 7.75 39.4805 5.16992c14.6504 -2.57031 27.0801 -9.41016 35.3301 -18.3203z" />
+ <glyph glyph-name="traffic-light" unicode="&#xf637;" horiz-adv-x="384"
+d="M384 256c0 -41.7402 -26.7998 -76.9004 -64 -90.1201v-37.8799h64c0 -42.8398 -28.25 -78.6904 -66.9902 -91.0498c-12.4297 -57.6699 -63.6094 -100.95 -125.01 -100.95s-112.58 43.2803 -125.01 100.95c-38.7402 12.3594 -66.9902 48.21 -66.9902 91.0498h64v37.8799
+c-37.2002 13.2197 -64 48.3799 -64 90.1201h64v37.8799c-37.2002 13.2197 -64 48.3799 -64 90.1201h64v32c0 17.6699 14.3301 32 32 32h192c17.6699 0 32 -14.3301 32 -32v-32h64c0 -41.7402 -26.7998 -76.9004 -64 -90.1201v-37.8799h64zM192 32
+c26.5098 0 48 21.4902 48 48s-21.4902 48 -48 48s-48 -21.4902 -48 -48s21.4902 -48 48 -48zM192 160c26.5098 0 48 21.4902 48 48s-21.4902 48 -48 48s-48 -21.4902 -48 -48s21.4902 -48 48 -48zM192 288c26.5098 0 48 21.4902 48 48s-21.4902 48 -48 48
+s-48 -21.4902 -48 -48s21.4902 -48 48 -48z" />
+ <glyph glyph-name="truck-monster" unicode="&#xf63b;" horiz-adv-x="640"
+d="M624 224c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-16.71c-29.21 38.6504 -75.0996 64 -127.28 64c-52.1797 0 -98.0693 -25.3496 -127.279 -64h-65.4502c-29.21 38.6504 -75.1006 64 -127.28 64s-98.0703 -25.3496 -127.28 -64h-16.7197
+c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h16v80c0 8.83984 7.16016 16 16 16h176v96c0 17.6699 14.3301 32 32 32h113.24c16.9795 -0.00195312 39.3701 -10.7627 49.9795 -24.0195l83.1807 -103.98h73.5996c17.6699 0 32 -14.3301 32 -32v-64h16z
+M288 320h132.44l-51.2002 64h-81.2402v-64zM592 96c8.83984 0 16 -7.16016 16.0195 -16v-32c0 -8.83984 -7.15918 -16 -16 -16h-5.19922c-1.79688 -6.01074 -5.67188 -15.3691 -8.65039 -20.8896l3.66992 -3.66992c6.25 -6.25 6.25 -16.3809 0 -22.6309l-22.6299 -22.6299
+c-6.25 -6.25 -16.3799 -6.25 -22.6299 0l-3.66992 3.66992c-6.62012 -3.58008 -13.5703 -6.44922 -20.9004 -8.64941v-5.2002c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v5.2002c-7.33008 2.2002 -14.29 5.08008 -20.8994 8.64941
+l-3.66992 -3.66992c-6.25 -6.25 -16.3809 -6.25 -22.6309 0l-22.6299 22.6299c-6.25 6.25 -6.25 16.3809 0 22.6309l3.66992 3.66992c-3.58008 6.60938 -6.44922 13.5596 -8.64941 20.8896h-5.2002c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h5.2002
+c1.7959 6.01074 5.6709 15.3691 8.64941 20.8896l-3.66992 3.66992c-6.25 6.25 -6.25 16.3809 0 22.6309l22.6299 22.6299c6.25 6.25 16.3809 6.25 22.6309 0l3.66992 -3.66992c6.60938 3.58008 13.5596 6.44922 20.8896 8.64941v5.2002c0 8.83984 7.16016 16 16 16h32
+c8.83984 0 16 -7.16016 16 -16v-5.2002c6.01074 -1.7959 15.3691 -5.6709 20.8896 -8.64941l3.66992 3.66992c6.25 6.25 16.3809 6.25 22.6309 0l22.6299 -22.6299c6.25 -6.25 6.25 -16.3809 0 -22.6309l-3.66992 -3.66992
+c3.58008 -6.60938 6.44922 -13.5596 8.64941 -20.8896h5.2002zM480 16c26.5098 0 48 21.4902 48 48s-21.4902 48 -48 48s-48 -21.4902 -48 -48s21.4902 -48 48 -48zM272 96c8.83984 0 16 -7.16016 16.0195 -15.9902v-32c0 -8.83984 -7.15918 -16 -16 -16h-5.19922
+c-1.79688 -6.00977 -5.67188 -15.3691 -8.65039 -20.8896l3.66992 -3.66992c6.25 -6.25 6.25 -16.3799 0 -22.6299l-22.6299 -22.6299c-6.25 -6.25 -16.3799 -6.25 -22.6299 0l-3.66992 3.66992c-6.62012 -3.58008 -13.5703 -6.4502 -20.9004 -8.65039v-5.2002
+c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v5.2002c-7.33008 2.2002 -14.29 5.08008 -20.8994 8.65039l-3.66992 -3.66992c-6.25 -6.25 -16.3809 -6.25 -22.6309 0l-22.6299 22.6299c-6.25 6.25 -6.25 16.3799 0 22.6299l3.66992 3.66992
+c-3.58008 6.61035 -6.44922 13.5596 -8.64941 20.8896h-5.2002c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h5.2002c1.7959 6.01074 5.6709 15.3691 8.64941 20.8906l-3.66992 3.66992c-6.25 6.25 -6.25 16.3799 0 22.6299l22.6299 22.6201
+c6.25 6.25 16.3809 6.25 22.6309 0l3.66992 -3.66992c6.60938 3.58008 13.5596 6.44922 20.8896 8.64941v5.2002c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-5.2002c6.01074 -1.7959 15.3691 -5.6709 20.8896 -8.64941l3.66992 3.66992
+c6.25 6.25 16.3809 6.25 22.6309 0l22.6299 -22.6299c6.25 -6.25 6.25 -16.3809 0 -22.6309l-3.66992 -3.66992c3.58008 -6.60938 6.44922 -13.5596 8.64941 -20.8896h5.2002zM160 16c26.5098 0 48 21.4902 48 48s-21.4902 48 -48 48s-48 -21.4902 -48 -48
+s21.4902 -48 48 -48z" />
+ <glyph glyph-name="truck-pickup" unicode="&#xf63c;" horiz-adv-x="640"
+d="M624 160c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-49.5996c0.759766 -5.26953 1.60938 -10.5195 1.60938 -16c0 -61.8604 -50.1396 -112 -112 -112c-61.8594 0 -112 50.1396 -112 112c0 5.48047 0.850586 10.7305 1.61035 16h-67.2305
+c0.760742 -5.26953 1.61035 -10.5195 1.61035 -16c0 -61.8604 -50.1396 -112 -112 -112s-112 50.1396 -112 112c0 5.48047 0.849609 10.7305 1.61035 16h-49.6104c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h16v64c0 17.6699 14.3301 32 32 32h160v128
+c0 17.6699 14.3301 32 32 32h113.24c16.9795 -0.00195312 39.3701 -10.7627 49.9795 -24.0195l108.78 -135.98h48c17.6699 0 32 -14.3301 32 -32v-64h16zM288 352v-96h158.04l-76.7998 96h-81.2402zM176 32c26.4697 0 48 21.5303 48 48s-21.5303 48 -48 48
+s-48 -21.5303 -48 -48s21.5303 -48 48 -48zM464 32c26.4697 0 48 21.5303 48 48s-21.5303 48 -48 48s-48 -21.5303 -48 -48s21.5303 -48 48 -48z" />
+ <glyph glyph-name="ad" unicode="&#xf641;"
+d="M157.52 176l18.4805 53.2197l18.4805 -53.2197h-36.9609zM352 192c13.2305 0 24 -10.7695 24 -24s-10.7695 -24 -24 -24s-24 10.7695 -24 24s10.7695 24 24 24zM464 384c26.5 0 48 -21.5 48 -48v-288c0 -26.5 -21.5 -48 -48 -48h-416c-26.5 0 -48 21.5 -48 48v288
+c0 26.5 21.5 48 48 48h416zM250.58 96c11 0 18.7197 10.8496 15.1104 21.25l-53.6904 154.62c-3.08789 8.90332 -13.2422 16.1299 -22.666 16.1299h-0.00390625h-26.6602h-0.00390625c-9.41992 0 -19.5742 -7.22168 -22.666 -16.1201l-53.7002 -154.63
+c-3.60938 -10.4004 4.11035 -21.25 15.1201 -21.25h16.9404h0.00585938c6.28125 0 13.0527 4.81641 15.1143 10.75l7.37988 21.25h70.29l7.36914 -21.25c2.24023 -6.42969 8.31055 -10.75 15.1201 -10.75h16.9404zM424 112v160c0 8.83984 -7.16016 16 -16 16h-16
+c-8.83984 0 -16 -7.16016 -16 -16v-36.4199c-7.54004 2.68945 -15.54 4.41992 -24 4.41992c-39.7002 0 -72 -32.2998 -72 -72s32.2998 -72 72 -72c9.92969 0 19.4004 2.01953 28.0195 5.67969c2.94043 -3.41016 7.13086 -5.67969 11.9805 -5.67969h16
+c8.83984 0 16 7.16016 16 16z" />
+ <glyph glyph-name="ankh" unicode="&#xf644;" horiz-adv-x="320"
+d="M296 192c13.25 0 24 -10.7402 24 -24v-32c0 -13.25 -10.75 -24 -24 -24h-96v-152c0 -13.25 -10.75 -24 -24 -24h-32c-13.25 0 -24 10.75 -24 24v152h-96c-13.25 0 -24 10.75 -24 24v32c0 13.2598 10.75 24 24 24h44.6201c-21.0801 33.9902 -36.6201 74.3496 -36.6201 112
+c0 88.3701 57.3096 144 128 144s128 -55.6299 128 -144c0 -37.6504 -15.54 -78.0098 -36.6201 -112h44.6201zM160 368c-29.6104 0 -48 -24.5195 -48 -64c0 -34.6602 27.1396 -78.1504 48 -100.87c20.8604 22.7305 48 66.21 48 100.87c0 39.4805 -18.3896 64 -48 64z" />
+ <glyph glyph-name="bible" unicode="&#xf647;" horiz-adv-x="448"
+d="M448 89.5996c0 -9.59961 -3.2002 -16 -9.59961 -19.1992c-3.2002 -12.8008 -3.2002 -57.6006 0 -73.6006c6.39941 -6.39941 9.59961 -12.7998 9.59961 -19.2002v-16c0 -16 -12.7998 -25.5996 -25.5996 -25.5996h-326.4c-54.4004 0 -96 41.5996 -96 96v320
+c0 54.4004 41.5996 96 96 96h326.4c16 0 25.5996 -9.59961 25.5996 -25.5996v-332.801zM144 304v-32c0 -8.83984 7.16016 -16 16 -16h48v-112c0 -8.83984 7.16016 -16 16 -16h32c8.83984 0 16 7.16016 16 16v112h48c8.83984 0 16 7.16016 16 16v32
+c0 8.83984 -7.16016 16 -16 16h-48v48c0 8.83984 -7.16016 16 -16 16h-32c-8.83984 0 -16 -7.16016 -16 -16v-48h-48c-8.83984 0 -16 -7.16016 -16 -16zM380.8 0v64h-284.8c-16 0 -32 -12.7998 -32 -32s12.7998 -32 32 -32h284.8z" />
+ <glyph glyph-name="business-time" unicode="&#xf64a;" horiz-adv-x="640"
+d="M496 224c79.5898 0 144 -64.4102 144 -144s-64.4102 -144 -144 -144s-144 64.4102 -144 144s64.4102 144 144 144zM560 73.71v12.5801c0 5.33984 -4.37012 9.70996 -9.70996 9.70996h-38.29v54.2803c0 5.33984 -4.37012 9.70996 -9.70996 9.70996h-12.5703
+c-5.33984 0 -9.70996 -4.37012 -9.70996 -9.70996v-76.5703c0 -5.33984 4.37012 -9.70996 9.70996 -9.70996h60.5703c5.33984 0 9.70996 4.37012 9.70996 9.70996zM496 256c-37.5303 0 -72.2803 -11.9102 -100.88 -32h-395.12v80c0 25.5996 22.4004 48 48 48h80v48
+c0 25.5996 22.4004 48 48 48h160c25.5996 0 48 -22.4004 48 -48v-48h80c25.5996 0 48 -22.4004 48 -48v-48.8096c-5.28027 0.479492 -10.5996 0.80957 -16 0.80957zM320 352v32h-128v-32h128zM326.82 128c-4.33984 -15.2803 -6.82031 -31.3398 -6.82031 -48
+c0 -28.8203 7.09961 -55.96 19.4297 -80h-291.43c-25.5996 0 -48 22.4004 -48 48v144h192v-48c0 -8.83984 7.16016 -16 16 -16h118.82z" />
+ <glyph glyph-name="city" unicode="&#xf64f;" horiz-adv-x="640"
+d="M616 256c13.25 0 24 -10.7402 24 -24v-264c0 -17.6699 -14.3301 -32 -32 -32h-576c-17.6699 0 -32 14.3301 -32 32v360c0 13.2598 10.7402 24 24 24h40v80c0 8.83984 7.16016 16 16 16h16c8.83984 0 16 -7.16016 16 -16v-80h64v80c0 8.83984 7.16016 16 16 16h16
+c8.83984 0 16 -7.16016 16 -16v-80h64v72c0 13.2598 10.7402 24 24 24h144c13.2598 0 24 -10.7402 24 -24v-168h136zM128 44v40c0 6.62988 -5.37012 12 -12 12h-40c-6.62988 0 -12 -5.37012 -12 -12v-40c0 -6.62988 5.37012 -12 12 -12h40c6.62988 0 12 5.37012 12 12z
+M128 140v40c0 6.62988 -5.37012 12 -12 12h-40c-6.62988 0 -12 -5.37012 -12 -12v-40c0 -6.62988 5.37012 -12 12 -12h40c6.62988 0 12 5.37012 12 12zM128 236v40c0 6.62988 -5.37012 12 -12 12h-40c-6.62988 0 -12 -5.37012 -12 -12v-40c0 -6.62988 5.37012 -12 12 -12h40
+c6.62988 0 12 5.37012 12 12zM256 44v40c0 6.62988 -5.37012 12 -12 12h-40c-6.62988 0 -12 -5.37012 -12 -12v-40c0 -6.62988 5.37012 -12 12 -12h40c6.62988 0 12 5.37012 12 12zM256 140v40c0 6.62988 -5.37012 12 -12 12h-40c-6.62988 0 -12 -5.37012 -12 -12v-40
+c0 -6.62988 5.37012 -12 12 -12h40c6.62988 0 12 5.37012 12 12zM256 236v40c0 6.62988 -5.37012 12 -12 12h-40c-6.62988 0 -12 -5.37012 -12 -12v-40c0 -6.62988 5.37012 -12 12 -12h40c6.62988 0 12 5.37012 12 12zM416 140v40c0 6.62988 -5.37012 12 -12 12h-40
+c-6.62988 0 -12 -5.37012 -12 -12v-40c0 -6.62988 5.37012 -12 12 -12h40c6.62988 0 12 5.37012 12 12zM416 236v40c0 6.62988 -5.37012 12 -12 12h-40c-6.62988 0 -12 -5.37012 -12 -12v-40c0 -6.62988 5.37012 -12 12 -12h40c6.62988 0 12 5.37012 12 12zM416 332v40
+c0 6.62988 -5.37012 12 -12 12h-40c-6.62988 0 -12 -5.37012 -12 -12v-40c0 -6.62988 5.37012 -12 12 -12h40c6.62988 0 12 5.37012 12 12zM576 44v40c0 6.62988 -5.37012 12 -12 12h-40c-6.62988 0 -12 -5.37012 -12 -12v-40c0 -6.62988 5.37012 -12 12 -12h40
+c6.62988 0 12 5.37012 12 12zM576 140v40c0 6.62988 -5.37012 12 -12 12h-40c-6.62988 0 -12 -5.37012 -12 -12v-40c0 -6.62988 5.37012 -12 12 -12h40c6.62988 0 12 5.37012 12 12z" />
+ <glyph glyph-name="comment-dollar" unicode="&#xf651;"
+d="M256 416c141.38 0 256 -93.1201 256 -208s-114.62 -208 -256 -208c-38.4102 0 -74.71 7.07031 -107.4 19.3799c-24.6094 -19.6299 -74.3398 -51.3799 -140.6 -51.3799h-0.00195312c-4.41309 0 -7.99512 3.58203 -7.99512 7.99512
+c0 1.76172 0.984375 4.22754 2.19727 5.50488c0.5 0.530273 42.2598 45.4502 54.8193 95.7598c-35.6094 35.7305 -57.0195 81.1807 -57.0195 130.74c0 114.88 114.62 208 256 208zM280 113.56c30.29 3.62012 53.3701 30.9805 49.3203 63.04
+c-2.90039 22.96 -20.6602 41.3105 -42.9102 47.6699l-50.0703 14.3008c-3.59961 1.0293 -6.12012 4.35938 -6.12012 8.10938c0 4.64062 3.78027 8.41992 8.44043 8.41992h32.7803h0.0654297c2.94727 0 7.51367 -0.994141 10.1943 -2.21973
+c4.7998 -2.20996 10.3701 -1.70996 14.1094 2.03027l17.5205 17.5195c5.26953 5.27051 4.66992 14.2705 -1.5498 18.3799c-9.5 6.27051 -20.3604 10.1104 -31.7803 11.46v17.7305c0 8.83984 -7.16016 16 -16 16h-16c-8.83984 0 -16 -7.16016 -16 -16v-17.5498
+c-30.29 -3.62012 -53.3701 -30.9805 -49.3203 -63.0498c2.90039 -22.96 20.6602 -41.3203 42.9102 -47.6699l50.0703 -14.3008c3.59961 -1.0293 6.12012 -4.35938 6.12012 -8.10938c0 -4.64062 -3.78027 -8.41992 -8.44043 -8.41992h-32.7803
+c-3.59961 0 -7.0791 0.759766 -10.2598 2.21973c-4.7998 2.20996 -10.3701 1.70996 -14.1094 -2.03027l-17.5205 -17.5195c-5.26953 -5.27051 -4.66992 -14.2705 1.5498 -18.3799c9.5 -6.27051 20.3604 -10.1104 31.7803 -11.46v-17.7305c0 -8.83984 7.16016 -16 16 -16h16
+c8.83984 0 16 7.16016 16 16v17.5596z" />
+ <glyph glyph-name="comments-dollar" unicode="&#xf653;" horiz-adv-x="576"
+d="M416 256c0 -88.3701 -93.1201 -160 -208 -160c-40.9805 0 -79.0703 9.24023 -111.27 24.9805c-21.8008 -12.7305 -52.1504 -24.9805 -88.7305 -24.9805h-0.00195312c-4.41309 0 -7.99512 3.58203 -7.99512 7.99512c0 1.76172 0.984375 4.22754 2.19727 5.50488
+c0.319336 0.339844 22.4102 24.2803 35.7695 54.5195c-23.8398 26.0303 -37.9697 57.7109 -37.9697 91.9805c0 88.3701 93.1201 160 208 160s208 -71.6299 208 -160zM192 160c0 -4.41992 3.58008 -8 8 -8h16c4.41992 0 8 3.58008 8 8v16.1201
+c23.6201 0.629883 42.6699 20.54 42.6699 45.0703c0 19.9697 -12.9902 37.8096 -31.5801 43.3896l-45 13.5c-5.16016 1.54004 -8.76953 6.78027 -8.76953 12.7295c0 7.27051 5.2998 13.1904 11.7998 13.1904h28.1104c4.55957 0 8.94922 -1.29004 12.8193 -3.71973
+c3.24023 -2.03027 7.36035 -1.91016 10.1299 0.729492l11.75 11.21c3.53027 3.37012 3.33008 9.20996 -0.569336 12.1406c-9.10059 6.83984 -20.0801 10.7695 -31.3701 11.3496v16.29c0 4.41992 -3.58008 8 -8 8h-16c-4.41992 0 -8 -3.58008 -8 -8v-16.1201
+c-23.6201 -0.629883 -42.6699 -20.5498 -42.6699 -45.0703c0 -19.9697 12.9893 -37.8096 31.5801 -43.3896l45 -13.5c5.15918 -1.54004 8.76953 -6.78027 8.76953 -12.7295c0 -7.27051 -5.2998 -13.1904 -11.7998 -13.1904h-28.1104
+c-4.55957 0 -8.9502 1.2998 -12.8193 3.71973c-3.24023 2.03027 -7.36035 1.91016 -10.1309 -0.729492l-11.75 -11.21c-3.5293 -3.37012 -3.3291 -9.20996 0.570312 -12.1406c9.10059 -6.83008 20.0801 -10.7695 31.3701 -11.3496v-16.29zM538.01 36.0098
+c13.3604 -30.2598 35.4707 -54.1699 35.7803 -54.5c2.20996 -2.33008 2.82031 -5.72949 1.5498 -8.66992c-1.25 -2.92969 -4.13965 -4.83984 -7.33984 -4.83984c-36.5801 0 -66.9297 12.25 -88.7305 24.9805c-32.1992 -15.7402 -70.2891 -24.9805 -111.27 -24.9805
+c-86.2305 0 -160.2 40.3701 -191.73 97.8799c10.4102 -1.11035 20.9502 -1.87988 31.7305 -1.87988c132.34 0 240 86.1299 240 192c0 6.78027 -0.469727 13.4697 -1.33008 20.0703c75.8398 -23.8701 129.33 -81.1299 129.33 -148.07
+c0 -34.2695 -14.1299 -65.96 -37.9902 -91.9902z" />
+ <glyph glyph-name="cross" unicode="&#xf654;" horiz-adv-x="384"
+d="M352 320c17.6699 0 32 -14.3301 32 -32v-64c0 -17.6699 -14.3301 -32 -32 -32h-96v-224c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32v224h-96c-17.6699 0 -32 14.3301 -32 32v64c0 17.6699 14.3301 32 32 32h96v96c0 17.6699 14.3301 32 32 32
+h64c17.6699 0 32 -14.3301 32 -32v-96h96z" />
+ <glyph glyph-name="dharmachakra" unicode="&#xf655;"
+d="M495 222.94c9.20996 0.569336 17 -6.74023 17.0098 -15.9707v-29.9395c0 -9.23047 -7.79004 -16.5498 -17 -15.9707l-17.2197 1.08008c-5.28027 -39.4795 -20.7998 -75.6299 -43.8604 -105.83l12.9502 -11.4297c6.91992 -6.09961 7.25977 -16.7803 0.730469 -23.3096
+l-21.1699 -21.1699c-6.52051 -6.52051 -17.2002 -6.19043 -23.3105 0.729492l-11.4297 12.9502c-30.2002 -23.0703 -66.3506 -38.5898 -105.84 -43.8604l1.08008 -17.2197c0.569336 -9.20996 -6.74023 -17 -15.9707 -17h-29.9395c-9.23047 0 -16.5498 7.79004 -15.9707 17
+l1.09082 17.2197c-39.4902 5.28027 -75.6406 20.7998 -105.841 43.8604l-11.4297 -12.9502c-6.09961 -6.91992 -16.7803 -7.25977 -23.3096 -0.729492l-21.1699 21.1699c-6.52051 6.51953 -6.19043 17.1992 0.729492 23.3096l12.9502 11.4297
+c-23.0703 30.1904 -38.5898 66.3408 -43.8604 105.83l-17.2197 -1.08008c-9.20996 -0.569336 -17 6.74023 -17 15.9707v29.9395c0 9.23047 7.79004 16.5498 17 15.9707l17.2197 -1.08008c5.28027 39.4893 20.7998 75.6396 43.8604 105.84l-12.9502 11.4297
+c-6.91992 6.10059 -7.25977 16.7803 -0.729492 23.3105l21.1699 21.1592c6.51953 6.52051 17.1992 6.19043 23.3096 -0.729492l11.4297 -12.9502c30.1904 23.0703 66.3408 38.5898 105.83 43.8604l-1.08008 17.2197c-0.569336 9.20996 6.74023 17 15.9707 17h29.9395
+c9.23047 0 16.5498 -7.79004 15.9707 -17l-1.07031 -17.21c39.4795 -5.28027 75.6299 -20.7998 105.83 -43.8604l11.4297 12.9404c6.10059 6.91992 16.7803 7.25977 23.3105 0.729492l21.1592 -21.1592c6.52051 -6.52051 6.19043 -17.2002 -0.729492 -23.3105
+l-12.9502 -11.4297c23.0703 -30.2002 38.5898 -66.3506 43.8604 -105.84zM281.84 349.39l-4.00977 -64.1201c10.2998 -2.40918 19.8896 -6.50977 28.6201 -11.9492l42.6201 48.29c-19.6006 14.1201 -42.4199 23.71 -67.2305 27.7793zM230.16 349.39
+c-24.8105 -4.06934 -47.6299 -13.6592 -67.2305 -27.7793l42.6201 -48.29c8.73047 5.42969 18.3301 9.54004 28.6201 11.9492zM126.39 285.06c-14.1201 -19.5996 -23.71 -42.4092 -27.7793 -67.2197l64.1201 -4.00977c2.41992 10.29 6.51953 19.8896 11.96 28.6201z
+M98.6104 166.16c4.06934 -24.8105 13.6592 -47.6299 27.7793 -67.2305l48.29 42.6201c-5.42969 8.73047 -9.54004 18.3301 -11.9492 28.6201zM230.16 34.6104l4 64.1201c-10.29 2.41992 -19.8906 6.51953 -28.6201 11.96l-42.6104 -48.3008
+c19.6006 -14.1201 42.4199 -23.71 67.2305 -27.7793zM256 160c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM281.84 34.6104c24.8105 4.06934 47.6201 13.6592 67.2197 27.7793l-42.6094 48.3008
+c-8.73047 -5.44043 -18.3301 -9.55078 -28.6201 -11.96zM385.61 98.9404c14.1201 19.5898 23.71 42.4092 27.7793 67.2295l-64.1201 4.00977c-2.40918 -10.2998 -6.50977 -19.8896 -11.9492 -28.6201zM349.27 213.83l64.1201 4
+c-4.06934 24.8096 -13.6592 47.6299 -27.7793 67.2295l-48.3008 -42.6094c5.44043 -8.73047 9.55078 -18.3301 11.96 -28.6201z" />
+ <glyph glyph-name="envelope-open-text" unicode="&#xf658;"
+d="M176 232c-8.83984 0 -16 7.16016 -16 16v16c0 8.83984 7.16016 16 16 16h160c8.83984 0 16 -7.16016 16 -16v-16c0 -8.83984 -7.16016 -16 -16 -16h-160zM160 152v16c0 8.83984 7.16016 16 16 16h160c8.83984 0 16 -7.16016 16 -16v-16c0 -8.83984 -7.16016 -16 -16 -16
+h-160c-8.83984 0 -16 7.16016 -16 16zM256 30.8701c16.4199 0 32.8398 5.07031 46.8604 15.1895l209.14 151.08v-213.14c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v213.14l209.14 -151.08c14.0205 -10.1299 30.4404 -15.1895 46.8604 -15.1895z
+M493.61 285.05c11.6094 -9.09961 18.3896 -23.0303 18.3896 -37.7695v-10.6504l-96 -69.3496v184.72h-320v-184.72l-96 69.3496v10.6504c0.00195312 12.8945 8.24023 29.8154 18.3896 37.7695c8.85059 6.94043 17.2402 13.4805 29.6104 22.8105v44.1396
+c0 26.5098 21.4902 48 48 48h77.5498c3.04004 2.2002 5.87012 4.25977 9.04004 6.55957c16.8203 12.2705 50.21 41.79 73.4102 41.4404c23.2002 0.349609 56.5996 -29.1699 73.4102 -41.4404c3.16992 -2.2998 6 -4.35938 9.04004 -6.55957h77.5498
+c26.5098 0 48 -21.4902 48 -48v-44.1396c12.3701 -9.34082 20.7598 -15.8701 29.6104 -22.8105z" />
+ <glyph glyph-name="folder-minus" unicode="&#xf65d;"
+d="M464 320c26.5098 0 48 -21.4902 48 -48v-224c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v288c0 26.5098 21.4902 48 48 48h160l64 -64h192zM368 152v16c0 8.83984 -7.16016 16 -16 16h-192c-8.83984 0 -16 -7.16016 -16 -16v-16
+c0 -8.83984 7.16016 -16 16 -16h192c8.83984 0 16 7.16016 16 16z" />
+ <glyph glyph-name="folder-plus" unicode="&#xf65e;"
+d="M464 320c26.5098 0 48 -21.4902 48 -48v-224c0 -26.5098 -21.4902 -48 -48 -48h-416c-26.5098 0 -48 21.4902 -48 48v288c0 26.5098 21.4902 48 48 48h160l64 -64h192zM368 152v16c0 8.83984 -7.16016 16 -16 16h-72v72c0 8.83984 -7.16016 16 -16 16h-16
+c-8.83984 0 -16 -7.16016 -16 -16v-72h-72c-8.83984 0 -16 -7.16016 -16 -16v-16c0 -8.83984 7.16016 -16 16 -16h72v-72c0 -8.83984 7.16016 -16 16 -16h16c8.83984 0 16 7.16016 16 16v72h72c8.83984 0 16 7.16016 16 16z" />
+ <glyph glyph-name="funnel-dollar" unicode="&#xf662;" horiz-adv-x="640"
+d="M433.46 282.06c-83.4102 -20.8896 -145.46 -96.2695 -145.46 -186.06c0 -54.3496 22.7998 -103.38 59.21 -138.35c-10.75 -20.54 -38.3604 -29.21 -59.2197 -13.5703l-79.9902 60c-10.0703 7.55957 -16 19.4102 -16 32v155.92l-182.66 201.93
+c-19.9502 19.9502 -5.82031 54.0703 22.4004 54.0703h480.52c28.2207 0 42.3506 -34.1201 22.4004 -54.0703zM480 256c88.3701 0 160 -71.6299 160 -160s-71.6299 -160 -160 -160s-160 71.6299 -160 160s71.6299 160 160 160zM496 16.1201
+c23.6299 0.629883 42.6699 20.54 42.6699 45.0703c0 19.9697 -12.9902 37.8096 -31.5801 43.3896l-45 13.5c-5.16016 1.54004 -8.76953 6.78027 -8.76953 12.7295c0 7.27051 5.2998 13.1904 11.7998 13.1904h28.1104c4.55957 0 8.94922 -1.29004 12.8193 -3.71973
+c3.24023 -2.03027 7.36035 -1.91016 10.1299 0.729492l11.75 11.21c3.53027 3.37012 3.33008 9.20996 -0.569336 12.1406c-9.10059 6.83984 -20.0801 10.7695 -31.3701 11.3496v16.29c0 4.41992 -3.58008 8 -8 8h-16c-4.41992 0 -8 -3.58008 -8 -8v-16.1201
+c-23.6201 -0.629883 -42.6699 -20.5498 -42.6699 -45.0703c0 -19.9697 12.9893 -37.8096 31.5801 -43.3896l45 -13.5c5.15918 -1.54004 8.76953 -6.78027 8.76953 -12.7295c0 -7.27051 -5.2998 -13.1904 -11.7998 -13.1904h-28.1104
+c-4.55957 0 -8.9502 1.2998 -12.8193 3.71973c-3.24023 2.03027 -7.36035 1.91016 -10.1309 -0.729492l-11.75 -11.21c-3.5293 -3.37012 -3.3291 -9.20996 0.570312 -12.1406c9.10059 -6.83008 20.0801 -10.7695 31.3701 -11.3496v-16.29c0 -4.41992 3.58008 -8 8 -8h16
+c4.41992 0 8 3.58008 8 8v16.1201z" />
+ <glyph glyph-name="gopuram" unicode="&#xf664;"
+d="M496 96c8.7998 0 16 -7.2002 16 -16v-128c0 -8.7998 -7.2002 -16 -16 -16h-80v160h-32v128h-32v96h-32v-96h32v-128h32v-160h-80v80c0 8.7998 -7.2002 16 -16 16h-64c-8.7998 0 -16 -7.2002 -16 -16v-80h-80v160h32v128h32v96h-32v-96h-32v-128h-32v-160h-80
+c-8.7998 0 -16 7.2002 -16 16v128c0 8.7998 7.2002 16 16 16h16v112c0 8.7998 7.2002 16 16 16h16v80c0 8.7998 7.2002 16 16 16h16v112c0 8.7998 7.2002 16 16 16s16 -7.2002 16 -16v-16h64v16c0 8.7998 7.2002 16 16 16s16 -7.2002 16 -16v-16h64v16
+c0 8.7998 7.2002 16 16 16s16 -7.2002 16 -16v-16h64v16c0 8.7998 7.2002 16 16 16s16 -7.2002 16 -16v-112h16c8.7998 0 16 -7.2002 16 -16v-80h16c8.7998 0 16 -7.2002 16 -16v-112h16zM232 272v-48h48v48c0 8.7998 -7.2002 16 -16 16h-16c-8.7998 0 -16 -7.2002 -16 -16z
+M288 96v64c0 8.7998 -7.2002 16 -16 16h-32c-8.7998 0 -16 -7.2002 -16 -16v-64h64z" />
+ <glyph glyph-name="hamsa" unicode="&#xf665;"
+d="M509.34 140.75c1.46875 -3.37012 2.66016 -9.08984 2.66016 -12.7656c0 -6.95703 -3.85254 -16.7295 -8.59961 -21.8145l-102.681 -110.03c-35.6895 -38.2197 -88.4102 -60.1396 -144.72 -60.1396s-109.03 21.9199 -144.71 60.1396l-102.69 110.03
+c-4.74707 5.08496 -8.59961 14.8574 -8.59961 21.8145c0 3.67578 1.19141 9.39551 2.66016 12.7656c5.05957 11.6904 16.5898 19.25 29.3398 19.25h64v208c0 22 18 40 40 40s40 -18 40 -40v-134c0 -5.51953 4.48047 -10 10 -10h20c5.51953 0 10 4.48047 10 10v174
+c0 22 18 40 40 40s40 -18 40 -40v-174c0 -5.51953 4.48047 -10 10 -10h20c5.51953 0 10 4.48047 10 10v134c0 22 18 40 40 40s40 -18 40 -40v-208h64c12.75 0 24.2803 -7.55957 29.3398 -19.25zM256 32c53.0195 0 96 64 96 64s-42.9805 64 -96 64s-96 -64 -96 -64
+s42.9805 -64 96 -64zM256 128c17.6699 0 32 -14.3301 32 -32s-14.3301 -32 -32 -32s-32 14.3301 -32 32s14.3301 32 32 32z" />
+ <glyph glyph-name="haykal" unicode="&#xf666;"
+d="M496.25 245.48c17.54 -2.46094 21.6797 -26.2705 6.04004 -34.6602l-98.1602 -52.6602l74.4805 -83.54c11.8594 -13.29 0.00976562 -34.25 -17.3506 -30.4902l-108.569 23.6504l4.10938 -112.55c0.430664 -11.6504 -8.87012 -19.2207 -18.4102 -19.2207
+c-5.15918 0 -10.3896 2.20996 -14.1992 7.18066l-68.1807 88.8994l-68.1797 -88.8994c-3.81055 -4.9707 -9.0498 -7.18066 -14.2002 -7.18066c-9.54004 0 -18.8398 7.57031 -18.4102 19.2207l4.11035 112.55l-108.57 -23.6504
+c-1.39941 -0.30957 -2.75977 -0.450195 -4.06934 -0.450195c-15.0107 0 -24.21 18.6807 -13.29 30.9307l74.4795 83.54l-98.1602 52.6592c-15.6494 8.40039 -11.5098 32.21 6.03027 34.6709l110 15.4297l-41.8203 104.34c-6.66016 16.6396 11.6006 32.1797 26.5898 22.6299
+l94.04 -59.8896l34.0908 107.189c2.70996 8.55078 10.0293 12.8203 17.3496 12.8203s14.6396 -4.26953 17.3496 -12.8203l34.0908 -107.18l94.04 59.8896c14.9893 9.55078 33.2598 -5.98926 26.5898 -22.6299l-41.8203 -104.34zM338.51 136.32l-35.6094 39.9297
+l46.9199 25.1699l-52.5703 7.37988l19.9902 49.8701l-44.9502 -28.6201l-16.29 51.2305l-16.3096 -51.2305l-44.9502 28.6201l19.9902 -49.8701l-52.5703 -7.37988l46.9199 -25.1699l-35.5996 -39.9297l51.8896 11.2998l-1.95996 -53.79l32.5898 42.4902l32.5898 -42.4902
+l-1.96973 53.79z" />
+ <glyph glyph-name="jedi" unicode="&#xf669;" horiz-adv-x="544"
+d="M479.99 96h39.96c-42.6299 -94.1699 -137.641 -160 -247.98 -160c-4.25977 0 -8.5498 0.0898438 -12.8496 0.290039c-103.97 4.76953 -193.851 69.4795 -235.101 159.71h39.9102l-58.5996 58.5996c-2.57031 12.8809 -4.49023 25.9805 -5.11035 39.4102
+c-0.469727 10.0801 -0.129883 20.0703 0.5 29.9902h47.21l-41.3799 41.3799c14.3701 64.7002 52.1006 122.55 107.97 162.07c2.77051 1.95996 5.9707 3 9.27051 3c5.37988 0 10.4297 -2.70996 13.5098 -7.25c3.0498 -4.5 3.64062 -10 1.62012 -15.0898
+c-6.53027 -16.4502 -9.83984 -33.7002 -9.83984 -51.2607c0 -45.1191 21.04 -86.5801 57.71 -113.739c4.00977 -2.9707 6.4502 -7.48047 6.69043 -12.3799c0.239258 -4.90039 -1.76074 -9.65039 -5.48047 -13.0107c-26.5498 -23.9795 -41.1699 -56.5 -41.1699 -91.5801
+c0 -60.0293 42.9502 -110.279 99.8896 -121.92l2.5 65.2607l-27.1602 -18.4805c-2.96973 -2 -7.40918 -1.7002 -10 0.75c-2.72949 2.61035 -3.30957 6.70996 -1.38965 9.94043l20.1299 33.7695l-42.0693 8.71973c-3.71094 0.75 -6.38086 4.05078 -6.38086 7.83008
+c0 3.78027 2.68066 7.08008 6.38086 7.83008l42.0693 8.73047l-20.1094 33.7295c-1.94043 3.27051 -1.36035 7.35059 1.35938 9.94043c2.73047 2.60938 6.86035 2.89941 10 0.779297l30.3906 -20.6592l11.5195 287.97c0.160156 4.29004 3.66992 7.66992 8 7.66992h0.0400391
+c4.25293 0 7.81934 -3.44922 7.95996 -7.7002l11.5303 -287.93l30.3896 20.6699c3.03027 2.08984 7.2998 1.75 10 -0.799805c2.71973 -2.60059 3.2998 -6.68066 1.37988 -9.91016l-20.1299 -33.7705l42.0703 -8.72949c3.68945 -0.770508 6.37988 -4.06055 6.37988 -7.83008
+c0 -3.78027 -2.67969 -7.08008 -6.37988 -7.83008l-42.0703 -8.71973l20.1104 -33.7305c0.631836 -1.05078 1.14453 -2.89844 1.14453 -4.12402c0 -1.89355 -1.11328 -4.49023 -2.48438 -5.7959c-2.63086 -2.49023 -7.04004 -2.85938 -10.0205 -0.799805l-27.1699 18.4697
+l2.5 -65.3398c48.4697 9.40039 87.5703 48.1504 97.3096 96.5c8.78027 43.5605 -5.63965 87.3203 -38.5693 117.07c-3.73047 3.37012 -5.73047 8.10938 -5.49023 13.0303c0.240234 4.89941 2.67969 9.41992 6.7002 12.3994c36.6602 27.1602 57.6895 68.6104 57.6895 113.73
+c0 17.5801 -3.30957 34.8496 -9.85938 51.3096c-2.03027 5.09961 -1.44043 10.5996 1.60938 15.0898c3.08008 4.53027 8.12012 7.24023 13.4902 7.24023c3.28027 0 6.48047 -1.03027 9.25 -2.99023c55.4805 -39.2197 93.4102 -97.4795 107.91 -162.27l-41.25 -41.2402
+h46.9502c0.370117 -5.75977 1.0498 -11.46 1.0498 -17.2695c0 -17.7402 -1.83984 -35.0605 -5.12988 -51.8604z" />
+ <glyph glyph-name="journal-whills" unicode="&#xf66a;" horiz-adv-x="448"
+d="M448 89.5996c0 -9.59961 -3.2002 -16 -9.59961 -19.1992c-3.2002 -12.8008 -3.2002 -57.6006 0 -73.6006c6.39941 -6.39941 9.59961 -12.7998 9.59961 -19.2002v-16c0 -16 -12.7998 -25.5996 -25.5996 -25.5996h-326.4c-54.4004 0 -96 41.5996 -96 96v320
+c0 54.4004 41.5996 96 96 96h326.4c16 0 25.5996 -9.59961 25.5996 -25.5996v-332.801zM133.08 303.61c-2.98047 -10.0908 -5.08008 -20.5605 -5.07031 -31.6201c0 -0.520508 0.140625 -0.990234 0.150391 -1.50977l37.1094 -32.4707
+c3.33008 -2.89941 3.6709 -7.9502 0.75 -11.2793c-1.5791 -1.81055 -3.7998 -2.73047 -6.01953 -2.73047h-0.0175781c-1.65527 0 -4.00879 0.886719 -5.25195 1.98047l-23.5908 20.6396c11.54 -49.5801 55.7705 -86.6201 108.86 -86.6201s97.3203 37.04 108.87 86.6299
+l-23.5898 -20.6396c-1.52051 -1.32031 -3.39062 -1.98047 -5.27051 -1.98047h-0.0146484c-2 0 -4.69043 1.22363 -6.00488 2.73047c-1.09668 1.24707 -1.98633 3.60645 -1.98633 5.2666c0 2.00293 1.22559 4.69727 2.73633 6.0127l37.1094 32.4707
+c0.0107422 0.519531 0.150391 0.990234 0.150391 1.50977c0 11.0498 -2.09961 21.5195 -5.07031 31.5996l-21.2598 -21.2598c-1.57031 -1.55957 -3.61035 -2.33984 -5.66016 -2.33984s-4.09961 0.780273 -5.66016 2.33984c-3.11914 3.12012 -3.11914 8.19043 0 11.3105
+l26.4199 26.4199c-10 20.8994 -26.2393 37.9795 -46.3691 49.2598c5.97949 -9.73047 9.59961 -21.0703 9.59961 -33.3301c0 -19.96 -9.33008 -37.5703 -23.6602 -49.3096c9.65039 -10.0605 15.6602 -23.6504 15.6602 -38.6904c0 -26.9404 -19.04 -49.4004 -44.3701 -54.7402
+l-1.42969 34.2803l12.6797 -8.62012c0.69043 -0.459961 1.46973 -0.689453 2.25 -0.689453c0.980469 0 1.98047 0.369141 2.75 1.08984c1.36035 1.2793 1.63965 3.33984 0.69043 4.94922l-8.54004 14.3105l17.9102 3.71973
+c1.85938 0.390625 3.18945 2.03027 3.18945 3.91992c0 1.89062 -1.33008 3.53027 -3.18945 3.91992l-17.9102 3.7207l8.54004 14.3096c0.308594 0.521484 0.55957 1.43652 0.55957 2.04297c0 0.950195 -0.55957 2.25293 -1.25 2.90723
+c-0.645508 0.59668 -1.88281 1.08105 -2.76172 1.08105c-0.672852 0 -1.67578 -0.300781 -2.23828 -0.670898l-14.2002 -9.65039l-4.67969 112.29c-0.0898438 2.13965 -1.86035 3.83008 -4 3.83008s-3.91016 -1.69043 -4 -3.83008l-4.62012 -110.81l-12.0098 8.15918
+c-1.56055 1.03027 -3.63965 0.890625 -5 -0.40918c-1.36035 -1.28027 -1.63965 -3.34082 -0.69043 -4.9502l8.54004 -14.3105l-17.9102 -3.71973c-1.85938 -0.389648 -3.18945 -2.03027 -3.18945 -3.91992s1.33008 -3.53027 3.18945 -3.91992l17.9102 -3.71973
+l-8.54004 -14.3105c-0.308594 -0.521484 -0.55957 -1.43652 -0.55957 -2.04297c0 -0.950195 0.55957 -2.25293 1.25 -2.90723c0.769531 -0.709961 1.75 -1.08984 2.75 -1.08984c0.780273 0 1.55957 0.240234 2.25 0.69043l10.3701 7.04004l-1.36035 -32.71
+c-25.3398 5.35938 -44.3799 27.8193 -44.3799 54.7598c0 15.04 6.00977 28.6299 15.6602 38.6904c-14.3301 11.7393 -23.6602 29.3496 -23.6602 49.3096c0 12.2598 3.62012 23.5996 9.61035 33.3398c-20.1299 -11.29 -36.3701 -28.3594 -46.3701 -49.2598l26.4199 -26.4199
+c3.12012 -3.12012 3.12012 -8.19043 0 -11.3105c-1.57031 -1.55957 -3.61035 -2.33984 -5.66016 -2.33984s-4.09961 0.780273 -5.66016 2.33984zM380.8 0v64h-284.8c-16 0 -32 -12.7998 -32 -32s12.7998 -32 32 -32h284.8z" />
+ <glyph glyph-name="kaaba" unicode="&#xf66b;" horiz-adv-x="576"
+d="M554.12 364.49c13.0703 -4.36035 21.8799 -16.5898 21.8799 -30.3604v-49.0098l-265 79.5098c-15.0596 4.5 -30.9502 4.5 -45.9805 0l-265.02 -79.5098v49.0098c0.000976562 12.7314 9.80273 26.332 21.8799 30.3604l235.771 78.5801
+c8.15723 2.71973 21.7559 4.92676 30.3545 4.92676s22.1982 -2.20703 30.3555 -4.92676zM274.22 333.97c9 2.7207 18.5498 2.7207 27.5898 0l274.2 -82.2598v-228.39c0 -15 -10.4199 -27.9902 -25.0596 -31.2402l-242.12 -53.7998
+c-5.67871 -1.2627 -15.0078 -2.28809 -20.8252 -2.28809s-15.1465 1.02539 -20.8252 2.28809l-242.12 53.7998c-14.6396 3.25977 -25.0596 16.2402 -25.0596 31.2402v228.38zM128 217.89v16.5801c0 5.28027 -5.01953 9.11035 -10.1104 7.7207l-80 -21.8203
+c-3.47949 -0.950195 -5.88965 -4.11035 -5.88965 -7.71973v-16.5801c0 -5.27051 5.01953 -9.10059 10.1104 -7.7207l80 21.8203c3.47949 0.950195 5.88965 4.11035 5.88965 7.71973zM272 257.17v16.5801c0 5.28027 -5.01953 9.11035 -10.1104 7.71973l-96 -26.1797
+c-3.47949 -0.950195 -5.88965 -4.11035 -5.88965 -7.71973v-16.5801c0 -5.28027 5.01953 -9.11035 10.1104 -7.7207l96 26.1807c3.47949 0.950195 5.88965 4.10938 5.88965 7.71973zM448 234.47v-16.5801v-0.00488281c0 -3.37109 2.63867 -6.82715 5.88965 -7.71484
+l80 -21.8203c5.09082 -1.38965 10.1104 2.44043 10.1104 7.7207v16.5801c0 3.60938 -2.41016 6.76953 -5.88965 7.71973l-80 21.8203c-5.09082 1.38965 -10.1104 -2.44043 -10.1104 -7.7207zM304 273.74v-16.5801v-0.00585938c0 -3.37012 2.63867 -6.82617 5.88965 -7.71387
+l96 -26.1807c5.09082 -1.38965 10.1104 2.44043 10.1104 7.7207v16.5791c0 3.61035 -2.41016 6.77051 -5.88965 7.7207l-96 26.1797c-5.09082 1.38965 -10.1104 -2.44043 -10.1104 -7.71973z" />
+ <glyph glyph-name="khanda" unicode="&#xf66d;" horiz-adv-x="511"
+d="M415.81 382c73.71 -40.2402 111.78 -123.85 90.1602 -207.51c-7.25 -28.0898 -22.3799 -53.5703 -41.25 -75.5898l-52.5098 -61.3105c-4.87012 -5.67969 -13.04 -7.22949 -19.6504 -3.70996l-79.3496 42.2305l-29.21 -20.3408l47.0801 -32.7793
+c1.66992 0.370117 3.22949 1.00977 5.00977 1.00977c13.25 0 23.9902 -10.7402 23.9902 -24c0 -13.25 -10.7402 -24 -23.9902 -24c-12.0898 0 -21.6797 9.11035 -23.3301 20.7598l-40.9102 28.4805v-30.1504c9.38086 -5.58008 15.9902 -15.3896 15.9902 -27.0996
+c0 -17.6699 -14.3203 -32 -31.9795 -32c-17.6602 0 -31.9805 14.3301 -31.9805 32c0 11.7197 6.60059 21.5293 15.9902 27.0996v29.9502l-40.6299 -28.2803c-1.64062 -11.6494 -11.2305 -20.7598 -23.3301 -20.7598c-13.25 0 -23.9902 10.75 -23.9902 24
+c0 13.2598 10.7402 24 23.9902 24c1.78027 0 3.33984 -0.639648 5.00977 -1.00977l47.0801 32.7793l-29.21 20.3301l-79.3496 -42.2295c-6.61035 -3.52051 -14.7803 -1.96973 -19.6504 3.71973l-55.9697 65.3604c-12.5703 14.6797 -23.3906 31.0693 -30.46 49.0596
+c-35.6602 90.6904 2.95996 186.391 81.4893 229.24c6.34082 3.5 15.0205 2.63965 20.0205 -2.7002c4.99023 -5.30957 6.45996 -12.9199 2.58984 -19.0801c-16.4902 -26.1602 -25.2002 -56.3896 -25.2002 -87.4697c0.180664 -53.1904 26.7598 -102.62 71.0303 -132.18
+l76.5898 -53.3301v19.8994l-44.0498 36.0908c-3.91016 4.20996 -5 10.0996 -2.81055 15.2793l7.85059 17.2402c-33.8506 19.2598 -56.9404 55.2402 -56.9404 96.9902c0 40.79 22.0205 76.1396 54.5898 95.7197l-5.21973 11.4404
+c-2.33008 5.5293 -0.929688 11.8301 3.57031 16.04l58.9902 52.8096l58.9893 -52.8203c4.5 -4.20996 5.91016 -10.5098 3.57031 -16.04l-5.21973 -11.4395c32.5693 -19.5801 54.5898 -54.9199 54.5898 -95.71c0 -41.7402 -23.0996 -77.7305 -56.9404 -96.9902
+l7.85059 -17.2402c2.18945 -5.18945 1.10938 -11.0801 -2.81055 -15.2793l-44.0498 -36.0908v-20.0996l76.6299 53.3496c44.5 29.7207 71.0801 79.1602 71.2705 132.41c0 31.0205 -8.70996 61.25 -25.2002 87.4102c-1.36328 2.15723 -2.46973 5.97949 -2.46973 8.53223
+c0 3.51465 1.96191 8.43652 4.37988 10.9883c4.99023 5.34961 12.9902 6.51953 19.3594 3.01953zM319.82 272c0 21.3203 -10.5801 40.1201 -26.6504 51.7695l-7.83008 -17.1797c-8.75 -24.5195 -8.75 -51.04 0 -75.5596l5.65039 -12.4102
+c17.3398 11.46 28.8301 31.0801 28.8301 53.3799zM191.89 272c0 -22.2998 11.5 -41.9297 28.8408 -53.3896l5.64941 12.4092c8.75 24.5303 8.75 51.04 0 75.5605l-7.83008 17.1797c-16.0801 -11.6396 -26.6602 -30.4395 -26.6602 -51.7598z" />
+ <glyph glyph-name="landmark" unicode="&#xf66f;"
+d="M501.62 355.89c6.24023 -2.33984 10.3799 -8.30957 10.3799 -14.9795v-36.9102c0 -8.83984 -7.16016 -16 -16 -16h-480c-8.83984 0 -16 7.16016 -16 16v36.9102c0 6.11914 4.65039 12.8301 10.3799 14.9795l234.39 90.0703
+c2.99902 1.12598 8.03223 2.04004 11.2354 2.04004s8.23633 -0.914062 11.2354 -2.04004zM64 256h64v-160h96v160h64v-160h96v160h64v-160h16c8.83984 0 16 -7.16016 16 -16v-48h-448v48c0 8.83984 7.16016 16 16 16h16v160zM496 0c8.83984 0 16 -7.16016 16 -16v-32
+c0 -8.83984 -7.16016 -16 -16 -16h-480c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h480z" />
+ <glyph glyph-name="mail-bulk" unicode="&#xf674;" horiz-adv-x="576"
+d="M160 0c25.5996 0 51.2002 22.4004 64 32c64 44.7998 83.2002 60.7998 96 70.4004v-134.4c0 -17.6699 -14.3301 -32 -32 -32h-256c-17.6699 0 -32 14.3301 -32 32v134.4c12.7998 -9.60059 32 -25.6006 96 -70.4004c12.7998 -9.59961 38.4004 -32 64 -32zM288 192
+c17.6699 0 32 -14.3301 32 -32v-19.2002c-25.5996 -19.2002 -22.4004 -19.2002 -115.2 -86.3994c-9.59961 -3.2002 -28.7998 -22.4004 -44.7998 -22.4004s-35.2002 19.2002 -44.7998 25.5996c-92.7998 67.2002 -89.6006 67.2002 -115.2 86.4004v16
+c0 17.6699 14.3301 32 32 32h256zM544 288c17.6699 0 32 -14.3301 32 -32v-192c0 -17.6699 -14.3301 -32 -32 -32h-192v134.4l-0.290039 -0.220703c-3.12012 32.4004 -30.5 57.8203 -63.71 57.8203h-96v32c0 17.6699 14.3301 32 32 32h320zM512 160v64h-64v-64h64zM160 256
+v-32h-96v192c0 17.6699 14.3301 32 32 32h320c17.6699 0 32 -14.3301 32 -32v-96h-224c-35.29 0 -64 -28.71 -64 -64z" />
+ <glyph glyph-name="menorah" unicode="&#xf676;" horiz-adv-x="640"
+d="M144 320c8.83984 0 16 -7.16016 16 -16v-144h-64v144c0 8.83984 7.16016 16 16 16h32zM240 320c8.83984 0 16 -7.16016 16 -16v-144h-64v144c0 8.83984 7.16016 16 16 16h32zM432 320c8.83984 0 16 -7.16016 16 -16v-144h-64v144c0 8.83984 7.16016 16 16 16h32zM528 320
+c8.83984 0 16 -7.16016 16 -16v-144h-64v144c0 8.83984 7.16016 16 16 16h32zM608 352c-17.6699 0 -32 14.3301 -32 32s32 64 32 64s32 -46.3301 32 -64s-14.3301 -32 -32 -32zM512 352c-17.6699 0 -32 14.3301 -32 32s32 64 32 64s32 -46.3301 32 -64s-14.3301 -32 -32 -32
+zM416 352c-17.6699 0 -32 14.3301 -32 32s32 64 32 64s32 -46.3301 32 -64s-14.3301 -32 -32 -32zM320 352c-17.6699 0 -32 14.3301 -32 32s32 64 32 64s32 -46.3301 32 -64s-14.3301 -32 -32 -32zM224 352c-17.6699 0 -32 14.3301 -32 32s32 64 32 64s32 -46.3301 32 -64
+s-14.3301 -32 -32 -32zM128 352c-17.6699 0 -32 14.3301 -32 32s32 64 32 64s32 -46.3301 32 -64s-14.3301 -32 -32 -32zM32 352c-17.6699 0 -32 14.3301 -32 32s32 64 32 64s32 -46.3301 32 -64s-14.3301 -32 -32 -32zM576 160v144c0 8.83984 7.16016 16 16 16h32
+c8.83984 0 16 -7.16016 16 -16v-144c0 -53.0195 -42.9805 -96 -96 -96h-192v-64h176c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-416c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h176v64h-192c-53.0195 0 -96 42.9805 -96 96
+v144c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-144c0 -17.6699 14.3301 -32 32 -32h192v176c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-176h192c17.6699 0 32 14.3301 32 32z" />
+ <glyph glyph-name="mosque" unicode="&#xf678;" horiz-adv-x="640"
+d="M0 -32v320h128v-320c0 -17.6699 -14.3301 -32 -32 -32h-64c-17.6699 0 -32 14.3301 -32 32zM579.16 160h-358.32c-17.8594 17.3896 -28.8398 37.3398 -28.8398 58.9102c0 52.8594 41.79 93.79 87.9199 122.899c41.9502 26.46 80.6299 57.7705 111.96 96.2207
+l8.12012 9.96973l8.12012 -9.96973c31.3301 -38.4502 70.0195 -69.75 111.96 -96.2207c46.1299 -29.1094 87.9199 -70.04 87.9199 -122.899c0 -21.5703 -10.9805 -41.5205 -28.8398 -58.9102zM608 128c17.6699 0 32 -14.3301 32 -32v-128c0 -17.6699 -14.3301 -32 -32 -32
+h-32v64c0 17.6699 -14.3301 32 -32 32s-32 -14.3301 -32 -32v-64h-64v72c0 48 -48 72 -48 72s-48 -24 -48 -72v-72h-64v64c0 17.6699 -14.3301 32 -32 32s-32 -14.3301 -32 -32v-64h-32c-17.6699 0 -32 14.3301 -32 32v128c0 17.6699 14.3301 32 32 32h416zM64 448
+c0 0 64 -32 64 -96v-32h-128v32c0 64 64 96 64 96z" />
+ <glyph glyph-name="om" unicode="&#xf679;"
+d="M360.6 387.06l-21.5801 21.5605c-1.68848 1.68652 -3.05957 4.99316 -3.05957 7.37988s1.37109 5.69336 3.05957 7.37988l21.5703 21.5605c4.08008 4.06934 10.6797 4.06934 14.7598 0l21.5801 -21.5605c1.68945 -1.68652 3.06055 -4.99316 3.06055 -7.37988
+s-1.37109 -5.69336 -3.06055 -7.37988l-21.5693 -21.5605c-1.6875 -1.68848 -4.99316 -3.05957 -7.37988 -3.05957c-2.3877 0 -5.69336 1.37109 -7.38086 3.05957zM412.11 256c55.0898 0 99.8896 -44.7998 99.8896 -99.8799v-100.12c0 -48.5303 -47.4805 -88 -96.0195 -88
+c-96.0205 0 -96.0205 64 -96.0205 64v37.8701c0 7.55957 9.42969 10.8896 14.2002 5.01953c15.1494 -18.6494 42.4199 -42.8896 81.8203 -42.8896c13.2393 0 32.0098 10.7695 32.0098 24v100.12c0 19.79 -16.1006 35.8799 -35.8799 35.8799
+c-9.60059 0 -18.6006 -3.73047 -25.3799 -10.5l-24.25 -24.25c-18.8701 -18.8604 -43.9502 -29.25 -70.6406 -29.25h-21.6895c11.0293 -18.9004 17.8096 -40.5801 17.8096 -64c0 -70.5801 -57.4297 -128 -128.02 -128c-118.811 0 -160.03 96 -159.94 150.81
+c0.0195312 8.80078 10.2598 12.7705 14.79 5.2207c22.7998 -38.0107 49.1299 -92.0303 145.15 -92.0303c35.2998 0 64.0098 28.7002 64.0098 64s-28.71 64 -64.0098 64h-33.0303c-5.57031 0.450195 -10.6406 3.49023 -13.1699 8.5498l-16.0898 32.1699
+c-5.35059 10.7002 2.42969 23.2803 14.3896 23.2803h31.9004c26.4697 0 48.0098 21.5303 48.0098 48s-21.54 48 -48.0098 48c-11.25 0 -21.8203 -3.80957 -30.2705 -10.71c-5.54004 -4.53027 -13.4795 -4.50977 -19.2002 -0.209961l-26.1494 19.6299
+c-8.08984 6.08008 -8.48047 17.9697 -1.12012 24.9297c25.1094 23.7402 59.8594 34.71 96.0098 28.7803c43.1602 -7.08008 79.4199 -40.6396 89.5205 -83.1895c6.43945 -27.1201 2.80957 -53.1309 -7.73047 -75.2305h46.8398c9.60059 0 18.6006 3.73047 25.3799 10.5
+l24.25 24.25c18.8701 18.8604 43.9502 29.25 70.6406 29.25zM454.29 380.73c10.5596 7.95996 25.7002 0.489258 25.7002 -12.7305v-35.5195c0 -2.36035 -0.509766 -4.71094 -1.53027 -6.83008c-2.96973 -6.30078 -21.8301 -37.6602 -101.75 -37.6602
+c-78.4297 0 -117.19 69.3896 -118.8 72.3398c-3.61035 6.62988 -2.08008 14.8799 3.66016 19.7998c5.69922 4.92969 14.1201 5.16992 20.1396 0.549805c3.53027 -2.69922 87.0303 -65.0693 172.58 0.0507812z" />
+ <glyph glyph-name="pastafarianism" unicode="&#xf67b;" horiz-adv-x="640"
+d="M624.54 100.33c12.4004 -4.71973 18.5996 -18.5801 13.8896 -30.9805c-4.69922 -12.4092 -18.5801 -18.6299 -30.9697 -13.8799c-8.11035 3.08984 -14.3398 0.19043 -31.3896 -11.3594c-13.5508 -9.15039 -30.8301 -20.8408 -52.4199 -20.8408
+c-7.16992 0 -14.8301 1.28027 -22.9707 4.39062c-32.6602 12.4395 -39.9893 41.3301 -45.3301 62.4395c-2.20996 8.7207 -3.98926 14.4902 -5.94922 18.8701c-16.6201 -13.5996 -36.9307 -25.8701 -61.6201 -34.1602c10.0098 -37 32.2793 -90.8096 60.2197 -90.8096
+c13.25 0 24 -10.75 24 -24s-10.75 -24 -24 -24c-66.7402 0 -97.0498 88.6299 -107.42 129.14c-6.69043 -0.599609 -13.4199 -1.13965 -20.5801 -1.13965s-13.8896 0.540039 -20.5801 1.13965c-10.3701 -40.5098 -40.6797 -129.14 -107.42 -129.14c-13.25 0 -24 10.75 -24 24
+s10.75 24 24 24c28.0801 0 50.2998 53.7998 60.2598 90.7998c-24.6895 8.29004 -45.0195 20.5605 -61.6396 34.1699c-1.95996 -4.37988 -3.74023 -10.1494 -5.9502 -18.8701c-5.34961 -21.1094 -12.6699 -50 -45.3301 -62.4395
+c-8.13965 -3.11035 -15.7998 -4.39062 -22.9697 -4.39062c-21.5898 -0.0195312 -38.8701 11.6807 -52.4199 20.8408c-17.0498 11.5498 -23.2305 14.4492 -31.3906 11.3594c-12.3594 -4.72949 -26.25 1.4707 -30.9697 13.8799
+c-4.71973 12.3906 1.48047 26.25 13.8701 30.9707c32.6504 12.4697 57.3398 -4.25 75.3701 -16.4502c17.0801 -11.5303 23.2998 -14.4199 31.4102 -11.3604c8.12012 3.10059 10.8301 9.37988 15.8896 29.3799c3.33008 13.1504 7.44043 29.3203 17.9502 42.6504
+c-2.24023 2.91016 -4.42969 5.78027 -6.37988 8.57031c-10.1699 -9.56055 -23.4102 -17.1104 -41.7002 -17.1104c-33.9502 0 -50.8701 25.7803 -62.0596 42.8301c-10.6006 16.1396 -15 21.1699 -21.9404 21.1699c-13.25 0 -24 10.75 -24 24s10.75 24 24 24
+c33.96 0 50.8799 -25.7803 62.0596 -42.8301c10.6006 -16.1396 15 -21.1699 21.9404 -21.1699c17.1504 0 37.6797 61.5596 97.2695 101.9l-17.25 34.5c-33.46 2.09961 -60.0195 29.6191 -60.0195 63.5996c0 35.3496 28.6504 64 64 64s64 -28.6504 64 -64
+c0 -13.0195 -3.94043 -25.0996 -10.5996 -35.21l18.1494 -36.2998c16.9697 4.59961 35.6006 7.50977 56.46 7.50977c20.8604 0 39.4805 -2.91016 56.46 -7.50977l18.1504 36.2998c-6.67969 10.1104 -10.6201 22.1904 -10.6201 35.21c0 35.3496 28.6504 64 64 64
+s64 -28.6504 64 -64c0 -33.9805 -26.5703 -61.5 -60.0098 -63.5898l-17.25 -34.5c59.7793 -40.4805 79.9502 -101.91 97.2598 -101.91c6.94043 0 11.3398 5.03027 21.9404 21.1699c11.1895 17.0498 28.1094 42.8301 62.0596 42.8301c13.25 0 24 -10.75 24 -24
+s-10.75 -24 -24 -24c-6.94043 0 -11.3496 -5.03027 -21.9404 -21.1699c-11.1895 -17.0498 -28.1094 -42.8301 -62.0596 -42.8301c-18.29 0 -31.5303 7.5498 -41.7002 17.1201c-1.9502 -2.78027 -4.13965 -5.66016 -6.37988 -8.57031
+c10.5098 -13.3301 14.6201 -29.5 17.9502 -42.6494c5.05957 -20 7.76953 -26.29 15.8896 -29.3809c8.16016 -3.05957 14.3506 -0.169922 31.4102 11.3604c18.0098 12.2002 42.6699 28.9697 75.3701 16.4502zM448 400c-8.82031 0 -16 -7.17969 -16 -16s7.17969 -16 16 -16
+s16 7.17969 16 16s-7.17969 16 -16 16zM192 400c-8.82031 0 -16 -7.17969 -16 -16s7.17969 -16 16 -16s16 7.17969 16 16s-7.17969 16 -16 16z" />
+ <glyph glyph-name="peace" unicode="&#xf67c;" horiz-adv-x="496"
+d="M248 440c136.97 0 248 -111.03 248 -248s-111.03 -248 -248 -248s-248 111.03 -248 248s111.03 248 248 248zM432 192c0 90.5303 -65.7695 165.82 -152 181.03v-165.66l129.43 -103.54c14.3701 26.2002 22.5703 56.2402 22.5703 88.1699zM216 10.9697v114.46
+l-89.29 -71.4395c24.7998 -21.8203 55.4297 -37.0498 89.29 -43.0205zM280 125.43v-114.449c33.8604 5.96973 64.4902 21.1992 89.29 43.0195zM216 373.03c-86.2305 -15.21 -152 -90.5 -152 -181.03c0 -31.9297 8.2002 -61.9697 22.5703 -88.1699l129.43 103.54v165.66z" />
+ <glyph glyph-name="place-of-worship" unicode="&#xf67f;" horiz-adv-x="640"
+d="M620.61 81.4502c10.7031 -4.58887 19.3896 -17.7646 19.3896 -29.4102v-100.04c0 -8.83984 -7.16016 -16 -16 -16h-112v192zM0 52.04c0 11.6455 8.68652 24.8213 19.3896 29.4102l108.61 46.5498v-192h-112c-8.83984 0 -16 7.16016 -16 16v100.04zM464.46 201.32
+c9.63965 -5.78027 15.54 -16.2002 15.54 -27.4404v-237.88h-96v96c0 35.3496 -28.6602 64 -64 64s-64 -28.6504 -64 -64v-96h-96v237.88c0 10.0029 6.96191 22.2959 15.54 27.4404l48.46 29.0801v114.97c0 8.49023 3.37988 16.6299 9.37988 22.6299l75.3105 75.3096
+c6.23926 6.25 16.3691 6.25 22.6191 0l75.3105 -75.3096c6.00977 -6.00977 9.37988 -14.1396 9.37988 -22.6299v-114.97z" />
+ <glyph glyph-name="poll" unicode="&#xf681;" horiz-adv-x="448"
+d="M400 416c26.5 0 48 -21.5 48 -48v-352c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352zM160 80v128c0 8.83984 -7.16016 16 -16 16h-32c-8.83984 0 -16 -7.16016 -16 -16v-128c0 -8.83984 7.16016 -16 16 -16h32
+c8.83984 0 16 7.16016 16 16zM256 80v224c0 8.83984 -7.16016 16 -16 16h-32c-8.83984 0 -16 -7.16016 -16 -16v-224c0 -8.83984 7.16016 -16 16 -16h32c8.83984 0 16 7.16016 16 16zM352 80v64c0 8.83984 -7.16016 16 -16 16h-32c-8.83984 0 -16 -7.16016 -16 -16v-64
+c0 -8.83984 7.16016 -16 16 -16h32c8.83984 0 16 7.16016 16 16z" />
+ <glyph glyph-name="poll-h" unicode="&#xf682;" horiz-adv-x="448"
+d="M448 16c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48v352c0 26.5 21.5 48 48 48h352c26.5 0 48 -21.5 48 -48v-352zM112 256h128c8.83984 0 16 7.16016 16 16v32c0 8.83984 -7.16016 16 -16 16h-128c-8.83984 0 -16 -7.16016 -16 -16v-32
+c0 -8.83984 7.16016 -16 16 -16zM112 160h224c8.83984 0 16 7.16016 16 16v32c0 8.83984 -7.16016 16 -16 16h-224c-8.83984 0 -16 -7.16016 -16 -16v-32c0 -8.83984 7.16016 -16 16 -16zM112 64h64c8.83984 0 16 7.16016 16 16v32c0 8.83984 -7.16016 16 -16 16h-64
+c-8.83984 0 -16 -7.16016 -16 -16v-32c0 -8.83984 7.16016 -16 16 -16z" />
+ <glyph glyph-name="pray" unicode="&#xf683;" horiz-adv-x="383"
+d="M256 320c-35.3496 0 -64 28.6504 -64 64s28.6504 64 64 64s64 -28.6504 64 -64s-28.6504 -64 -64 -64zM225.37 150.25l-24.1504 28.7598l-34.7998 -64.8701l109.86 -109.859c25.4893 -25.4902 5.7998 -68.2803 -28.2803 -68.2803h-208c-22.0898 0 -40 17.9102 -40 40
+s17.9102 40 40 40h91.5596l-44.8096 34.8896c-42.8799 27.3799 -57.5898 80.1104 -34.1904 123.75l49.3701 92.0303c11.1201 20.6504 32.1807 34.4404 56.3701 36.9199c24.7803 2.58984 48.5605 -6.93945 64 -25.3301l38.9102 -46.3096l57.4404 47
+c17.1191 13.9697 42.3398 11.4902 56.3096 -5.62012c13.9697 -17.0898 11.4697 -42.2998 -5.62012 -56.2803l-88 -72.0195c-16.9697 -13.8701 -41.9102 -11.5 -55.9697 5.21973z" />
+ <glyph glyph-name="praying-hands" unicode="&#xf684;" horiz-adv-x="640"
+d="M272 256.09c17.5996 0 32 -14.3994 32 -32v-128c0 -51.8896 -34.8398 -98.0801 -84.75 -112.35l-179.19 -46.6201c-2.64941 -0.69043 -5.36914 -1.03027 -8.05957 -1.03027c-23.4805 0 -32 21.1797 -32 32v96v0.0117188c0 12.7285 9.80176 26.3203 21.8799 30.3389
+l90.1201 30.04v80.2295c0 18.9805 5.55957 37.3896 16.1201 53.2305l117.26 175.899c0.169922 0.270508 0.589844 0.25 0.790039 0.480469c9.58008 13.5098 27.8496 17.8799 42.2998 9.20996c15.1602 -9.10059 20.0605 -28.75 10.9707 -43.9102l-77.75 -129.59
+c-8.9707 -14.9199 -13.6904 -32 -13.6904 -49.3906v-76.5498c0 -8.83984 7.16016 -16 16 -16s16 7.16016 16 16v80c0 17.6006 14.4004 32 32 32zM618.12 94.3604c13.0703 -4.36035 21.8799 -16.5801 21.8799 -30.3506v-96c0 -10.8193 -8.51953 -32 -32 -32
+c-2.67969 0 -5.40039 0.339844 -8.05957 1.03027l-179.19 46.6201c-49.9102 14.2598 -84.75 60.4502 -84.75 112.34v128c0 17.5996 14.4004 32 32 32s32 -14.4004 32 -32v-80c0 -8.83984 7.16016 -16 16 -16s16 7.16016 16 16v76.5498
+c0 17.3906 -4.71973 34.4697 -13.6904 49.3906l-77.75 129.59c-9.08984 15.1602 -4.18945 34.8193 10.9707 43.9102c14.4502 8.66992 32.7197 4.2998 42.2998 -9.20996c0.200195 -0.240234 0.610352 -0.210938 0.790039 -0.480469l117.26 -175.89
+c10.5605 -15.8408 16.1201 -34.25 16.1201 -53.2305v-80.2295z" />
+ <glyph glyph-name="quran" unicode="&#xf687;" horiz-adv-x="448"
+d="M448 89.5996c0 -9.59961 -3.2002 -16 -9.59961 -19.1992c-3.2002 -12.8008 -3.2002 -57.6006 0 -73.6006c6.39941 -6.39941 9.59961 -12.7998 9.59961 -19.2002v-16c0 -16 -12.7998 -25.5996 -25.5996 -25.5996h-326.4c-54.4004 0 -96 41.5996 -96 96v320
+c0 54.4004 41.5996 96 96 96h326.4c16 0 25.5996 -9.59961 25.5996 -25.5996v-332.801zM301.08 302.18l-11.1904 -22.6494l-24.9893 -3.62988c-2.68066 -0.390625 -3.75 -3.66992 -1.81055 -5.56055l18.0898 -17.6299l-4.26953 -24.8896
+c-0.360352 -2.11035 1.30957 -3.82031 3.20996 -3.82031c0.5 0 1.01953 0.120117 1.51953 0.379883l22.3604 11.75l22.3604 -11.75c0.5 -0.259766 1.01953 -0.379883 1.51953 -0.379883c1.90039 0 3.57031 1.70996 3.20996 3.82031l-4.26953 24.8896l18.0898 17.6299
+c1.92969 1.89062 0.859375 5.16992 -1.81055 5.56055l-25 3.62988l-11.1797 22.6494c-0.599609 1.20996 -1.75977 1.82031 -2.91992 1.82031s-2.32031 -0.610352 -2.91992 -1.82031zM243.19 371.19c-63.5205 0 -115.19 -51.6709 -115.19 -115.19
+c0 -63.5098 51.6699 -115.19 115.18 -115.18c13.6006 0 27.1201 2.46973 40.1904 7.33984c2.67969 0.910156 4.62012 3.43945 4.62012 6.41992c0 3.63965 -2.87012 6.78027 -6.7998 6.78027c-0.650391 0 -3.10059 -0.209961 -4.13086 -0.209961
+c-52.3096 0 -94.8594 42.5596 -94.8594 94.8594c0 52.3105 42.5498 94.8604 94.8594 94.8604c1.04004 0 3.45996 -0.209961 4.13086 -0.209961c0.633789 -0.237305 1.69727 -0.429688 2.375 -0.429688c3.73926 0 6.77441 3.03516 6.77441 6.77441
+c0 3.7373 -3.03223 6.77246 -6.76953 6.77539c-13.1201 4.91992 -26.71 7.41016 -40.3799 7.41016zM380.8 0v64h-284.8c-16 0 -32 -12.7998 -32 -32s12.7998 -32 32 -32h284.8z" />
+ <glyph glyph-name="search-dollar" unicode="&#xf688;"
+d="M505.04 5.33984c9.2998 -9.39941 9.2998 -24.5898 -0.0996094 -33.9902l-28.3008 -28.2998c-9.2998 -9.39941 -24.5 -9.39941 -33.8994 0l-99.71 99.6904c-4.5 4.5 -7 10.5996 -7 17v16.2998c-35.2998 -27.5996 -79.71 -44 -128.011 -44
+c-114.909 0 -208.02 93.0898 -208.02 207.979c0 114.891 93.1201 207.98 208.02 207.98c114.9 0 208.011 -93.0898 208.011 -207.98c0 -48.2998 -16.4004 -92.6895 -44 -127.989h16.2998c6.40039 0 12.5 -2.5 17 -7zM208.02 96.04c79.6504 0 144 64.4502 144 143.979
+c0 79.6406 -64.46 143.98 -144 143.98c-79.6494 0 -144 -64.4502 -144 -143.98c0 -79.6396 64.4609 -143.979 144 -143.979zM235.13 248.58c18.6006 -5.58008 31.5898 -23.4199 31.5898 -43.3896c0 -24.5303 -19.0498 -44.4404 -42.6797 -45.0703v-16.1201
+c0 -4.41992 -3.58008 -8 -8 -8h-16c-4.41992 0 -8 3.58008 -8 8v16.29c-11.29 0.580078 -22.2705 4.51953 -31.3701 11.3496c-3.90039 2.93066 -4.09961 8.77051 -0.570312 12.1406l11.75 11.21c2.77051 2.63965 6.89062 2.75977 10.1309 0.729492
+c3.85938 -2.42969 8.25977 -3.71973 12.8193 -3.71973h28.1006c6.5 0 11.7998 5.91992 11.7998 13.1904c0 5.94922 -3.61035 11.1797 -8.77051 12.7295l-45.0098 13.5c-18.5898 5.58008 -31.5801 23.4199 -31.5801 43.3896c0 24.5205 19.0498 44.4404 42.6797 45.0703
+v16.1201c0 4.41992 3.58008 8 8 8h16c4.4209 0 8 -3.58008 8 -8v-16.29c11.29 -0.580078 22.2705 -4.51953 31.3701 -11.3496c3.90039 -2.93066 4.10059 -8.77051 0.570312 -12.1406l-11.75 -11.21c-2.76953 -2.63965 -6.88965 -2.75977 -10.1299 -0.729492
+c-3.86035 2.42969 -8.25977 3.71973 -12.8203 3.71973h-28.1094c-6.5 0 -11.8008 -5.91992 -11.8008 -13.1904c0 -5.94922 3.61035 -11.1797 8.77051 -12.7295z" />
+ <glyph glyph-name="search-location" unicode="&#xf689;"
+d="M505.04 5.33984c9.2998 -9.39941 9.2998 -24.5898 -0.0996094 -33.9902l-28.3008 -28.2998c-9.2998 -9.39941 -24.5 -9.39941 -33.8994 0l-99.71 99.6904c-4.5 4.5 -7 10.5996 -7 17v16.2998c-35.2998 -27.5996 -79.71 -44 -128.011 -44
+c-114.909 0 -208.02 93.0898 -208.02 207.979c0 114.891 93.1201 207.98 208.02 207.98c114.9 0 208.011 -93.0898 208.011 -207.98c0 -48.2998 -16.4004 -92.6895 -44 -127.989h16.2998c6.40039 0 12.5 -2.5 17 -7zM208.02 96.04c79.6504 0 144 64.4502 144 143.979
+c0 79.6406 -64.46 143.98 -144 143.98c-79.6494 0 -144 -64.4502 -144 -143.98c0 -79.6396 64.4609 -143.979 144 -143.979zM208.04 336c40.7803 0 73.8398 -33.0498 73.8398 -73.8301c0 -32.9697 -48.2598 -93.0498 -66.75 -114.86
+c-1.5293 -1.8291 -4.70508 -3.31445 -7.08984 -3.31445s-5.56055 1.48535 -7.08984 3.31445c-18.4902 21.8105 -66.75 81.9004 -66.75 114.86c0 40.7803 33.0596 73.8301 73.8398 73.8301zM208.04 240c13.25 0 24 10.75 24 24c0 13.2598 -10.75 24 -24 24
+s-24 -10.7402 -24 -24c0 -13.25 10.7402 -24 24 -24z" />
+ <glyph glyph-name="socks" unicode="&#xf696;"
+d="M214.66 136.99c-52.7402 -39.5605 -69.0498 -110.021 -39.2002 -165.4l-21.8604 -16.3896c-17.2695 -12.9502 -37.4893 -19.2002 -57.5195 -19.2002c-32.8105 0 -65.6904 16.75 -83.9404 48.3301c-24.6494 42.6396 -10.1895 97.5 29.21 127.06l86.6504 64.6104v176h160
+v-160zM288 416v-32h-160v32c0 17.6699 14.3301 32 32 32h128c2.84961 0 5.40039 -0.919922 8.01953 -1.62012c-4.94922 -9.08008 -8.01953 -19.3301 -8.01953 -30.3799zM480 448c17.6699 0 32 -14.3301 32 -32v-32h-192v32c0 17.6699 14.3301 32 32 32h128zM320 176v176h192
+v-208v-0.00195312c0 -35.3271 -22.9375 -81.2021 -51.2002 -102.398l-115.2 -86.3994c-17.2695 -12.9502 -37.4893 -19.2002 -57.5195 -19.2002c-32.8105 0 -65.1699 16.75 -83.4199 48.3301c-24.6504 42.6396 -10.1904 97.5 29.21 127.06z" />
+ <glyph glyph-name="square-root-alt" unicode="&#xf698;" horiz-adv-x="576"
+d="M571.31 196.69c6.25 -6.25 6.25 -16.3809 0 -22.6309l-46.0596 -46.0596l46.0596 -46.0596c6.25 -6.25 6.25 -16.3809 0 -22.6309l-22.6191 -22.6191c-6.25 -6.25 -16.3809 -6.25 -22.6309 0l-46.0596 46.0596l-46.0596 -46.0596c-6.25 -6.25 -16.3809 -6.25 -22.6309 0
+l-22.6191 22.6191c-6.25 6.25 -6.25 16.3809 0 22.6309l46.0596 46.0596l-46.0596 46.0596c-6.25 6.25 -6.25 16.3809 0 22.6309l22.6191 22.6191c6.25 6.25 16.3809 6.25 22.6309 0l46.0596 -46.0596l46.0596 46.0596c6.25 6.25 16.3809 6.25 22.6309 0zM552 448
+c13.25 0 24 -10.7402 24 -24v-48c0 -13.25 -10.75 -24 -24 -24h-194.97l-97.8105 -374.52c-9.83984 -32.4805 -37.0098 -41.4805 -54.2793 -41.4805c-18.6406 0 -35.9502 8.5 -48.4404 28.2695l-88.8799 163.73h-43.6201c-13.25 0 -24 10.75 -24 24v48
+c0 13.2598 10.75 24 24 24h81.4697c10.4385 -0.00292969 22.9961 -7.42578 28.0303 -16.5703l58.4102 -106.1l84.79 322.8c3.68945 14.0703 16.4102 23.8701 30.9502 23.8701h244.35z" />
+ <glyph glyph-name="star-and-crescent" unicode="&#xf699;" horiz-adv-x="511"
+d="M340.47 -18.3604c8.74023 0 15.1299 -6.96973 15.1299 -15.0596c0 -6.62012 -4.31934 -12.2402 -10.2793 -14.2598c-29.04 -10.8301 -59.0898 -16.3203 -89.3203 -16.3203c-141.16 0 -256 114.84 -256 256s114.84 256 256 256c30.3896 0 60.5801 -5.54004 89.75 -16.4805
+c6.91992 -2.59961 10.9297 -9.83984 9.46973 -17.0898c-1.41992 -7.04004 -7.62012 -12.0693 -14.75 -12.0693c-1.47949 0 -6.85938 0.459961 -9.17969 0.459961c-116.25 0 -210.82 -94.5703 -210.82 -210.82s94.5703 -210.82 210.82 -210.82
+c2.29004 0 7.72949 0.459961 9.17969 0.459961zM503.46 234.14c8.16992 -1.17969 11.4297 -11.2197 5.52051 -16.9893l-55.2705 -53.8701l13.0498 -76.0703c1.11035 -6.42969 -4.00977 -11.6602 -9.80957 -11.6602c-1.53027 0 -3.11035 0.370117 -4.64062 1.16992
+l-68.3096 35.9102l-68.3301 -35.9102c-1.53027 -0.80957 -3.11035 -1.16992 -4.63965 -1.16992c-5.7998 0 -10.9199 5.21973 -9.81055 11.6602l13.0498 76.0703l-55.2695 53.8701c-5.91016 5.76953 -2.65039 15.8096 5.51953 16.9893l76.3809 11.1006l34.1592 69.21
+c1.83008 3.7002 5.38086 5.5498 8.93066 5.5498s7.09961 -1.84961 8.92969 -5.5498l34.1602 -69.21z" />
+ <glyph glyph-name="star-of-david" unicode="&#xf69a;" horiz-adv-x="464"
+d="M405.68 192l53.2207 -89.3896c14.4092 -24.21 -3.41016 -54.6104 -32.0107 -54.6104h-106.93l-55.9502 -93.9805c-7.14941 -12.0098 -19.5801 -18.0195 -32.0098 -18.0195s-24.8604 6.00977 -32.0098 18.0195l-55.9502 93.9805h-106.93
+c-28.6006 0 -46.4199 30.4004 -32.0107 54.6104l53.2207 89.3896l-53.2207 89.3896c-14.4092 24.21 3.41016 54.6104 32.0107 54.6104h106.93l55.9502 93.9805c7.14941 12.0098 19.5801 18.0195 32.0098 18.0195s24.8604 -6.00977 32.0098 -18.0195l55.9502 -93.9805h106.92
+c28.6006 0 46.4199 -30.4004 32.0098 -54.6104zM392.9 280h-39.6006l19.7998 -33.2598zM340.51 192l-52.3896 88h-112.25l-52.3799 -88l52.3896 -88h112.24zM232 374.28l-22.7803 -38.2803h45.5703zM71.0996 280l19.8008 -33.2598l19.7998 33.2598h-39.6006zM71.0996 104
+h39.6006l-19.7998 33.2598zM232 9.71973l22.7803 38.2803h-45.5703zM353.29 104h39.6104l-19.8105 33.2598z" />
+ <glyph glyph-name="synagogue" unicode="&#xf69b;" horiz-adv-x="640"
+d="M70 251.49c5.30957 6.00977 14.6904 6.00977 20 0l38 -43.0703v-272.42h-128v226.07v0.00683594c0 5.5625 2.98828 13.4629 6.66992 17.6328zM633.33 179.71c4.2998 -4.87012 6.66992 -11.1396 6.66992 -17.6396v-226.07h-128v272.42l38 43.0703
+c5.30957 6.01953 14.6904 6.01953 20 0zM339.99 440.99l128 -102.4c7.58984 -6.07031 12.0098 -15.2695 12.0098 -24.9902v-377.6h-96v96c0 38.8701 -34.6602 69.6504 -74.75 63.1201c-31.4697 -5.12012 -53.25 -34.6699 -53.25 -66.5498v-92.5703h-96v377.6
+c0 8.49023 5.38086 19.6865 12.0098 24.9902l128 102.4c11.6904 9.34961 28.29 9.34961 39.9805 0zM392.06 225.44l-19.1895 30.5596l19.2002 30.5703c1.97949 3.14941 -0.290039 7.23926 -4 7.23926h-38.9502l-25.1201 39.9805c-1.84961 2.9502 -6.15039 2.9502 -8 0
+l-25.1201 -40h-38.9395c-3.70996 0 -5.98047 -4.08984 -4 -7.24023l19.1992 -30.5596l-19.1992 -30.5605c-1.98047 -3.14941 0.279297 -7.23926 4 -7.23926h38.9395l25.1201 -39.9805c1.84961 -2.9502 6.15039 -2.9502 8 0l25.1201 39.9902h38.9395
+c3.70996 0 5.98047 4.08984 4 7.24023z" />
+ <glyph glyph-name="torah" unicode="&#xf6a0;" horiz-adv-x="640"
+d="M48 448c26.4688 -0.0273438 47.9727 -21.5312 48 -48v-80h-80c-8.83203 0 -16 7.16797 -16 16v64c0.0273438 26.4688 21.5312 47.9727 48 48zM395.14 219.44l17.4404 29.1699l17.4697 -29.1699h-34.9102zM336.05 167.37l-16.7998 28.0703h33.5801zM242 219.44
+l17.4199 29.1895l17.4805 -29.1895h-34.9004zM430 324.56l-17.4199 -29.1895l-17.4805 29.1895h34.9004zM256 35.4297c0 -31.8896 -21.7803 -61.4297 -53.25 -66.5498c-2.94531 -0.501953 -7.76172 -0.90918 -10.75 -0.90918c-35.3281 0 -64 28.6719 -64 64v0.0292969v368
+c-0.0527344 14.7783 -7.40527 36.2822 -16.4102 48h336.41c52.9365 -0.0546875 95.9453 -43.0635 96 -96v-256h-288v-60.5703zM217.41 338.43c-1.44824 -2.50977 -2.62402 -6.91797 -2.62402 -9.81543c0 -3.03125 1.27832 -7.59375 2.85352 -10.1846l27.79 -46.4297
+l-27.6602 -46.2695c-1.57324 -2.62305 -2.84961 -7.23242 -2.84961 -10.29c0 -11.0352 8.95508 -19.9951 19.9902 -20h56.3896l27.7002 -46.2803c3.14844 -5.34375 10.7363 -9.68066 16.9385 -9.68066c0.0175781 0 0.0449219 0 0.0615234 0.000976562
+c6.29199 0.00390625 13.9971 4.40332 17.2002 9.81934l27.5898 46.1201h56.2998h0.0302734c6.44922 0 14.2754 4.54688 17.4697 10.1504c1.45117 2.51172 2.62988 6.9043 2.62988 9.80566c0 3.03516 -1.28125 7.60156 -2.85938 10.1943l-27.79 46.4297l27.6797 46.2695
+c1.57324 2.62305 2.85059 7.23242 2.85059 10.29c0 11.0352 -8.95605 19.9951 -19.9902 20h-56.3604l-27.7002 46.2803c-3.14844 5.34375 -10.7363 9.68066 -16.9385 9.68066c-0.0166016 0 -0.0439453 0 -0.0615234 -0.000976562
+c-0.00976562 0 -0.0244141 0.000976562 -0.0341797 0.000976562c-6.29785 0 -14.0098 -4.39941 -17.2158 -9.82031l-27.5898 -46.1201h-56.2998h-0.0302734c-6.44922 0 -14.2754 -4.54688 -17.4697 -10.1504zM624 64c8.83203 0 16 -7.16797 16 -16
+c0 -61.8242 -50.1758 -112 -112 -112h-336c52.9316 0.0605469 95.9395 43.0684 96 96v32h336zM276.86 324.56l-17.3906 -29.1699l-17.4697 29.1699h34.8604zM336 376.62l16.75 -28.0605h-33.5801zM304.88 219.44l-31.4697 52.5596l31.4102 52.5801h62.2998l31.4697 -52.5801
+l-31.4102 -52.5596h-62.2998z" />
+ <glyph glyph-name="torii-gate" unicode="&#xf6a1;"
+d="M376.45 416c39.4961 0.00195312 100.223 14.3389 135.55 32v-96c0 -17.6699 -14.3301 -32 -32 -32h-32v-64h48c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-48v-240c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v240
+h-256v-240c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v240h-48c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h48v64h-32c-17.6699 0 -32 14.3301 -32 32v96c35.3271 -17.6611 96.0537 -31.998 135.55 -32h240.9zM128 320v-64h96
+v64h-96zM384 256v64h-96v-64h96z" />
+ <glyph glyph-name="vihara" unicode="&#xf6a7;" horiz-adv-x="639"
+d="M632.88 47.29c5.41992 -3.61035 7.7002 -9.62012 6.99023 -15.29c-0.620117 -5.00977 -3.56055 -9.75 -8.71973 -12.3301l-55.1504 -19.6699v-48c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v48h-160v-48c0 -8.83984 -7.16016 -16 -16 -16h-32
+c-8.83984 0 -16 7.16016 -16 16v48h-160v-48c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v48l-55.1602 19.6699c-5.14941 2.58008 -8.09961 7.32031 -8.71973 12.3301c-0.700195 5.66016 1.58008 11.6699 7 15.29l88.8799 48.71v64
+l-55.1602 17.6904c-11.79 5.89941 -11.79 22.7197 0 28.6191l119.16 49.6904v64l-27.3096 16.3096c-7.7207 7.7207 -5.61035 20.7402 4.15918 25.6201l183.15 86.0703l183.15 -86.0801c9.76953 -4.87988 11.8799 -17.9004 4.15918 -25.6201l-27.3096 -16.2998v-64
+l119.16 -49.6904c11.79 -5.89941 11.79 -22.7197 0 -28.6191l-55.1602 -17.6904v-64zM224 320v-64h192v64h-192zM160 96h320v64h-320v-64z" />
+ <glyph glyph-name="volume-mute" unicode="&#xf6a9;"
+d="M215.03 376.95c15.0098 15.0098 40.9697 4.49023 40.9697 -16.9697v-335.961c0 -21.4395 -25.9404 -32 -40.9697 -16.9697l-88.9707 88.9502h-102.06c-13.2598 0 -24 10.75 -24 24v144c0 13.2598 10.7402 24 24 24h102.06zM461.64 192l45.6406 -45.6396
+c6.2998 -6.30078 6.2998 -16.5205 0 -22.8203l-22.8203 -22.8203c-6.2998 -6.2998 -16.5195 -6.2998 -22.8203 0l-45.6396 45.6406l-45.6299 -45.6299c-6.2998 -6.30078 -16.5205 -6.30078 -22.8203 0l-22.8193 22.8193c-6.30078 6.2998 -6.30078 16.5205 0 22.8203
+l45.6299 45.6299l-45.6406 45.6396c-6.2998 6.30078 -6.2998 16.5205 0 22.8203l22.8203 22.8203c6.2998 6.2998 16.5195 6.2998 22.8203 0l45.6396 -45.6406l45.6396 45.6406c6.30078 6.2998 16.5205 6.2998 22.8203 0l22.8203 -22.8203
+c6.2998 -6.2998 6.2998 -16.5195 0 -22.8203z" />
+ <glyph glyph-name="yin-yang" unicode="&#xf6ad;" horiz-adv-x="496"
+d="M248 440c136.97 0 248 -111.03 248 -248s-111.03 -248 -248 -248s-248 111.03 -248 248s111.03 248 248 248zM248 64c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM248 192c53.0195 0 96 42.9805 96 96s-42.9805 96 -96 96
+c-106.04 0 -192 -85.96 -192 -192s85.96 -192 192 -192c-53.0195 0 -96 42.9805 -96 96s42.9805 96 96 96zM248 320c17.6699 0 32 -14.3301 32 -32s-14.3301 -32 -32 -32s-32 14.3301 -32 32s14.3301 32 32 32z" />
+ <glyph glyph-name="blender-phone" unicode="&#xf6b6;" horiz-adv-x="576"
+d="M392 384c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h157.82l-17.46 -64h-140.36c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h131.64l-17.46 -64h-114.18c-4.41992 0 -8 -3.58008 -8 -8v-16c0 -4.41992 3.58008 -8 8 -8h105.46
+l-17.46 -64h-288v352h384l-17.46 -64h-166.54zM158.8 112.99c3.00977 -7.40039 0.200195 -15.8506 -6.68945 -20.0703l-39.4102 -24.1797c-12.5303 -7.68066 -31.7803 -6 -41.6504 4.7998c-94.0996 102.94 -94.6699 258.89 -2.09961 362.49
+c11.1396 12.4697 29.5596 15.8398 43.8896 7.0498l39.2803 -24.0996c6.87988 -4.2207 9.7002 -12.6807 6.67969 -20.0703l-25.7803 -63.2598c-2.7793 -6.80078 -9.80957 -10.9902 -17.2393 -10.2607l-45.0303 4.41992c-17.6504 -47.9395 -17.2803 -100.779 0 -147.72
+l45.0303 4.41992c7.43945 0.730469 14.46 -3.4502 17.2393 -10.2598zM480 64c35.3496 0 64 -28.6504 64 -64v-32c0 -17.6699 -14.3301 -32 -32 -32h-352c-17.6699 0 -32 14.3301 -32 32v32c0 35.3496 28.6504 64 64 64h288zM336 -32c17.6699 0 32 14.3301 32 32
+s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="book-dead" unicode="&#xf6b7;" horiz-adv-x="448"
+d="M272 312c-8.7998 0 -16 7.2002 -16 16s7.2002 16 16 16s16 -7.2002 16 -16s-7.2002 -16 -16 -16zM448 89.5996c0 -9.59961 -3.2002 -16 -9.59961 -19.1992c-3.2002 -12.8008 -3.2002 -57.6006 0 -73.6006c6.39941 -6.39941 9.59961 -12.7998 9.59961 -19.2002v-16
+c0 -16 -12.7998 -25.5996 -25.5996 -25.5996h-326.4c-54.4004 0 -96 41.5996 -96 96v320c0 54.4004 41.5996 96 96 96h326.4c16 0 25.5996 -9.59961 25.5996 -25.5996v-332.801zM240 392c-44.2002 0 -80 -28.7002 -80 -64c0 -20.9004 12.7002 -39.2002 32 -50.9004v-13.0996
+c0 -8.7998 7.2002 -16 16 -16h64c8.7998 0 16 7.2002 16 16v13.0996c19.2998 11.7002 32 30 32 50.9004c0 35.2998 -35.7998 64 -80 64zM124.8 224.7c-1.7002 -4.10059 0.100586 -8.7998 4.2002 -10.5l70.5 -30.2998l-70.4004 -30.1006
+c-4.09961 -1.7002 -5.89941 -6.39941 -4.19922 -10.5l6.2998 -14.7002c1.7002 -4.09961 6.39941 -5.89941 10.5 -4.19922l98.2998 42.1992l98.2998 -42.0996c4.10059 -1.7002 8.7998 0.0996094 10.5 4.2002l6.2998 14.7002c1.7002 4.09961 -0.0996094 8.7998 -4.19922 10.5
+l-70.3008 30.0996l70.3008 30.2002c4.09961 1.7002 5.89941 6.39941 4.19922 10.5l-6.2998 14.7002c-1.7002 4.09961 -6.39941 5.89941 -10.5 4.19922l-98.3994 -42.0996l-98.3008 42.0996c-4.09961 1.7002 -8.7998 -0.0996094 -10.5 -4.19922zM380.8 0v64h-284.8
+c-16 0 -32 -12.7998 -32 -32s12.7998 -32 32 -32h284.8zM208 312c-8.7998 0 -16 7.2002 -16 16s7.2002 16 16 16s16 -7.2002 16 -16s-7.2002 -16 -16 -16z" />
+ <glyph glyph-name="campground" unicode="&#xf6bb;" horiz-adv-x="640"
+d="M624 0c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-608c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h24.6797l239.79 330.25l-53.4102 73.5498c-5.19922 7.15039 -3.60938 17.1602 3.54004 22.3506l25.8809 18.7998
+c7.14941 5.18945 17.1592 3.59961 22.3496 -3.5498l41.1699 -56.7002l41.1602 56.6895c5.2002 7.16016 15.2002 8.74023 22.3496 3.55078l25.9004 -18.79c7.14941 -5.19043 8.72949 -15.2002 3.54004 -22.3506l-53.4102 -73.5498l239.78 -330.25h24.6797zM320 160
+l-116.36 -160h232.721z" />
+ <glyph glyph-name="cat" unicode="&#xf6be;"
+d="M290.59 256c11.8906 -58.3496 63.6006 -102.4 125.41 -102.4c11.1104 0 21.71 1.87012 32 4.54004v-206.14c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v144l-128 -96h32c17.6699 0 32 -14.3301 32 -32v-16c0 -8.83984 -7.16016 -16 -16 -16
+h-176c-35.2998 0 -64 28.7002 -64 64v256c0 17.6396 -14.3604 32 -32 32c-17.6699 0 -32 14.3301 -32 32s14.3301 32 32 32c52.9404 0 96 -43.0596 96 -96v-85.9502c55.7695 83.9697 142.41 85.9502 162.59 85.9502zM448 352l64 64v-134.4c0 -53.0195 -42.9805 -96 -96 -96
+s-96 42.9805 -96 96v134.4l64 -64h64zM376 272c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16s-16 -7.16016 -16 -16s7.16016 -16 16 -16zM456 272c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16s-16 -7.16016 -16 -16s7.16016 -16 16 -16z" />
+ <glyph glyph-name="chair" unicode="&#xf6c0;" horiz-adv-x="447"
+d="M112 320v-128h-48v128c0 70.7002 57.2998 128 128 128h64c70.7002 0 128 -57.2998 128 -128v-128h-48v128c0 29.5 -16.2002 55 -40 68.9004v-196.9h-48v208h-48v-208h-48v196.9c-23.7998 -13.9004 -40 -39.4004 -40 -68.9004zM446.3 106.1
+c6.90039 -20.6992 -8.5 -42.0996 -30.2998 -42.0996v-112c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v112h-256v-112c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v112c-21.7998 0 -37.2002 21.4004 -30.4004 42.0996l10.7002 32
+c4.40039 13.1006 16.6006 21.9004 30.4004 21.9004h362.5c13.7998 0 26 -8.7998 30.3994 -21.9004z" />
+ <glyph glyph-name="cloud-moon" unicode="&#xf6c3;" horiz-adv-x="576"
+d="M342.8 95.2998c40.9004 -3.5 73.2002 -37.3994 73.2002 -79.2998c0 -44.2002 -35.7998 -80 -80 -80h-240c-53 0 -96 43 -96 96c0 41.9004 27.0996 77.2002 64.5996 90.2998c-0.0996094 1.90039 -0.599609 3.7002 -0.599609 5.7002c0 53 43 96 96 96
+c36.2002 0 67.4004 -20.2998 83.7002 -49.9004c11.5 11 27.0996 17.9004 44.2998 17.9004c35.2998 0 64 -28.7002 64 -64c0 -12 -3.5 -23.0996 -9.2002 -32.7002zM565.3 149.6c8.2002 1.60059 14 -8 8.7002 -14.5c-33.4004 -41.0996 -82.7002 -67 -137.1 -70.3994
+c-11.1006 23.0996 -29.9004 41.7998 -53.5 52.5996c0.399414 3.5 0.599609 7.10059 0.599609 10.7002c0 52.9004 -43.0996 96 -96 96c-12.7002 0 -25 -2.5 -36.4004 -7.2002c-5.09961 5.2998 -10.7998 9.90039 -16.6992 14.2002c-1.10059 8.2002 -1.80078 16.5 -1.80078 25
+c0 106.1 86 192 191.9 192c11.7002 0 23.4004 -1.09961 35.0996 -3.2002c8.2002 -1.59961 10.1006 -12.5996 2.80078 -16.7002c-47.1006 -26.7998 -76.1006 -76.5996 -76.1006 -130.8c0 -94 85.4004 -165.399 178.5 -147.7z" />
+ <glyph glyph-name="cloud-sun" unicode="&#xf6c4;" horiz-adv-x="640"
+d="M575.2 122.3c37.5996 -13 64.7998 -48.2998 64.7998 -90.3994c0 -53 -43 -96 -96 -96h-272c-53 0 -96 43 -96 96c0 50.5996 39.2998 91.5996 88.9004 95.2998c-0.200195 2.89941 -0.900391 5.7002 -0.900391 8.7002c0 61.8994 50.2002 112 112 112
+c45.4004 0 84.2998 -27.2002 101.9 -66c9.89941 6.19922 21.5 10 34.0996 10c35.2998 0 64 -28.7002 64 -64c0 -1.90039 -0.599609 -3.7002 -0.799805 -5.60059zM144.8 144.9c10.6006 -10.6006 22.9004 -18.6006 36 -24c-9.59961 -9.80078 -17.5 -21.1006 -23.5996 -33.5
+l-76.1006 -25.3008c-11.8994 -3.89941 -23.1992 7.30078 -19.1992 19.2002l29.2998 87.7998l-82.7998 41.4004c-11.2002 5.59961 -11.2002 21.5 0 27.0996l82.7998 41.5l-29.2998 87.8008c-3.90039 11.7998 7.39941 23.0996 19.1992 19.1992l87.9004 -29.2998
+l41.4004 82.7998c5.59961 11.2002 21.5996 11.2002 27.0996 0l41.5 -82.7998l87.7998 29.2998c11.9004 3.90039 23.2002 -7.2998 19.2002 -19.1992l-29.7998 -88.9004c-10.6006 -1.5 -20.9004 -3.7998 -30.7002 -7.5c-5.40039 11.7998 -12.5996 23 -22.2998 32.7002
+c-43.7002 43.7002 -114.7 43.7002 -158.4 0c-43.7002 -43.6006 -43.7002 -114.601 0 -158.3zM140 224.1c0 46.3008 37.7002 83.9004 84 83.9004c34 0 63.2998 -20.4004 76.5 -49.5c-36.4004 -22.5 -62 -60.7002 -67.4004 -105
+c-8.89941 -2.90039 -17.1992 -7 -25.1992 -11.7002c-38.6006 7.5 -67.9004 41.5 -67.9004 82.2998z" />
+ <glyph glyph-name="dice-d20" unicode="&#xf6cf;" horiz-adv-x="480"
+d="M106.75 232.94l108.64 -190.101l-208.26 22.0703c-5.83008 0.639648 -9.00977 7.13965 -5.92969 12.1396zM7.41016 132.57c-2.12012 -3.44043 -7.41016 -1.94043 -7.41016 2.08984v162.81c0 3.11035 3.38965 5.03027 6.05957 3.43066l76.6406 -45.9805zM18.25 24.4004
+c-4.03027 1.97949 -2.25 8.06934 2.2002 7.56934l203.55 -22.2998v-65.6699c0 -5.83008 -6.0498 -9.70996 -11.3496 -7.25977zM99.4697 282.18l-81.5293 48.6904c-2.52051 1.51953 -2.60059 5.16016 -0.130859 6.78027l150.811 98.6094
+c7.18945 4.11035 15.1201 -4.08008 10.7803 -11.1396zM240 272h-109.21l95.5801 168.38c3.12988 5.08008 8.37988 7.62012 13.6299 7.62012s10.5 -2.54004 13.6299 -7.62012l95.5801 -168.38h-109.21zM473.94 300.9c2.66992 1.59961 6.05957 -0.320312 6.05957 -3.43066
+v-162.81c0 -4.04004 -5.2998 -5.54004 -7.41016 -2.10059l-75.29 122.351zM380.53 282.18l-79.9307 142.94c-4.33984 7.05957 3.59082 15.25 10.7803 11.1396l150.811 -98.5996c2.46973 -1.62012 2.39941 -5.25977 -0.130859 -6.78027zM459.55 31.9697
+c4.4502 0.5 6.23047 -5.58984 2.2002 -7.55957l-194.4 -87.6602c-5.2998 -2.4502 -11.3496 1.41992 -11.3496 7.25977v65.6699zM373.25 232.94l105.56 -155.891c3.08008 -5 -0.0996094 -11.4902 -5.92969 -12.1396l-208.26 -22.0703zM240 240h100.43l-100.43 -175.75
+l-100.43 175.75h100.43z" />
+ <glyph glyph-name="dice-d6" unicode="&#xf6d1;" horiz-adv-x="448"
+d="M422.19 338.05c5.3291 -3.24023 5.2998 -11.2695 -0.0507812 -14.46l-198.14 -118.14l-198.13 118.14c-5.35059 3.19043 -5.37988 11.2305 -0.0605469 14.46l165.971 100.88c19.9102 12.1006 44.5195 12.1006 64.4297 0zM436.03 293.42
+c5.33008 3.17969 11.9697 -0.839844 11.9697 -7.25v-197.7c0 -23.7598 -12.1104 -45.7393 -31.79 -57.7002l-152.16 -92.4795c-10.6602 -6.48047 -24.0498 1.5498 -24.0498 14.4297v223.82zM0 286.17c0 6.41016 6.63965 10.4297 11.9697 7.25l196.03 -116.88v-223.81
+c0 -12.8906 -13.3799 -20.9102 -24.0498 -14.4307l-152.16 92.4697c-19.6797 11.9609 -31.79 33.9307 -31.79 57.7002v197.7z" />
+ <glyph glyph-name="dog" unicode="&#xf6d3;"
+d="M496 352c8.83984 0 16 -7.16016 16 -16v-32c0 -35.3496 -28.6504 -64 -64 -64h-32v-35.5801l-128 45.71v149.84c0 14.25 17.2305 21.3906 27.3203 11.3105l27.2793 -27.2803h53.6201c10.917 -0.000976562 23.7383 -7.92578 28.6201 -17.6904l7.16016 -14.3096h64z
+M384 304c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16s-16 -7.16016 -16 -16s7.16016 -16 16 -16zM96 224h170.05l149.95 -53.5498v-218.45c0 -8.83984 -7.16016 -16 -16 -16h-64c-8.83984 0 -16 7.16016 -16 16v112h-160v-112c0 -8.83984 -7.16016 -16 -16 -16h-64
+c-8.83984 0 -16 7.16016 -16 16v213.9c-37.1699 13.25 -64 48.4395 -64 90.0996c0 17.6699 14.3301 32 32 32s32 -14.3301 32 -32c0 -17.6396 14.3604 -32 32 -32z" />
+ <glyph glyph-name="dragon" unicode="&#xf6d5;" horiz-adv-x="640"
+d="M18.3203 192.22c-15.96 -2.2793 -24.8906 17.8105 -12.5107 28.1406l117.4 116.34c21.7705 18.5996 53.2402 20.4697 77.0596 4.58984l119.73 -87.5996v-42.2705c0 -28.9102 5.29004 -56.9795 14.7305 -83.3799h-222.7c-14.25 0 -21.3906 17.2295 -11.3105 27.3096
+l91.2803 68.6904zM575.19 158.12c41.9092 -20.96 67.1592 -64.0801 64.6396 -111.36c-3.37988 -63.2002 -59.7002 -110.77 -122.99 -110.76h-499.08c-9.80957 0 -17.7598 8 -17.7598 17.7998c0 8.32031 5.78027 15.5303 13.9004 17.3301
+c89.54 19.9004 238.51 54.1006 434.1 60.9102c-59.9697 39.9902 -96 107.3 -96 179.38v108.62l-59.5801 24.8496c-5.90039 2.9502 -5.90039 11.3604 0 14.3105l59.5801 24.8398l-61.6396 50.3496c-5.04004 5.04004 -1.4707 13.6104 5.65918 13.6104h237.45
+c10.0703 0 19.5498 -4.7002 25.6006 -12.7598l74.5293 -99.3799c3.53223 -4.71094 6.39844 -13.3115 6.39844 -19.1992c0 -4.16992 -1.51367 -10.5811 -3.37793 -14.3115l-14.3105 -28.6191c-4.88086 -9.76465 -17.7031 -17.6895 -28.6191 -17.6904h-30.9707
+c-8.48926 0 -16.6299 3.37012 -22.6299 9.37012l-28.0898 22.6299h-64v-36.6904c0.00195312 -16.3701 11.8877 -35.5986 26.5303 -42.9199zM489.18 381.75c-4.33008 -17.1396 8.56055 -28.96 21.5205 -29.6699c11.6602 -0.629883 21.3799 7.34961 24.1299 18.2598z" />
+ <glyph glyph-name="drumstick-bite" unicode="&#xf6d7;"
+d="M462.8 398.43c34.3203 -34.2793 50.4307 -79.5996 49.1299 -124.56c-41.9795 22.6602 -94.3594 17.5596 -128.739 -16.7998c-40.8809 -40.8398 -40.6904 -107.181 -1.05078 -151.07c-16.2383 -5.52246 -43.3389 -10.0049 -60.4912 -10.0049
+c-0.385742 0 -1.0127 0.00195312 -1.39844 0.00488281h-85.8896l-40.6104 -40.5596c-9.71973 -9.75 -11.0898 -24.0205 -6 -36.75c2.38477 -5.95898 4.32031 -16.0049 4.32031 -22.4229c0 -33.3135 -27.0371 -60.3496 -60.3506 -60.3496
+c-13.8428 0 -33.0039 7.96191 -42.7695 17.7725c-15.2803 15.2695 -19.6006 36.5 -15.1006 56.0996c-19.6094 -4.49023 -40.8496 -0.179688 -56.1191 15.0703c-9.77148 9.75684 -17.7021 28.8828 -17.7021 42.6914c0 33.3018 27.0273 60.3301 60.3301 60.3301
+c6.40234 0 16.4248 -1.92773 22.3721 -4.30176c12.7793 -5.07031 27.0791 -3.69043 36.7793 6l40.6201 40.5898v85.8301c0 64 27.6904 107 63.1699 142.43c27.3887 27.3604 81.0371 49.5664 119.75 49.5664c38.7139 0 92.3613 -22.2061 119.75 -49.5664z" />
+ <glyph glyph-name="dungeon" unicode="&#xf6d9;"
+d="M128.73 252.68c6.58984 -4.12012 8.89941 -12.2393 6.33984 -19.5801c-3 -8.60938 -5.15039 -17.6094 -6.24023 -26.9395c-0.929688 -7.91016 -7.0498 -14.1602 -15.0098 -14.1602h-97.1299c-9.10059 0 -16.7402 7.62988 -16.1504 16.7197
+c1.66016 25.7793 11.7178 65.7725 22.4502 89.2705c3.93945 8.62012 14.8896 11.4697 22.9297 6.4502zM319.03 440c9.16992 -2.36035 13.9102 -12.5996 10.3896 -21.3896l-37.4697 -104.03c-2.28027 -6.34961 -8.2998 -10.5801 -15.0498 -10.5801h-41.8008
+c-6.2041 0.00195312 -12.9473 4.74219 -15.0498 10.5801l-37.4697 104.03c-3.52051 8.79004 1.21973 19.04 10.3896 21.3896c20.1699 5.17969 41.2607 8 63.0303 8s42.8604 -2.82031 63.0303 -8zM112 160c8.83984 0 16 -7.16016 16 -16v-64c0 -8.83984 -7.16016 -16 -16 -16
+h-96c-8.83984 0 -16 7.16016 -16 16v64c0 8.83984 7.16016 16 16 16h96zM112 32c8.83984 0 16 -7.16016 16 -16v-64c0 -8.83984 -7.16016 -16 -16 -16h-96c-8.83984 0 -16 7.16016 -16 16v64c0 8.83984 7.16016 16 16 16h96zM189.31 315.67
+c2.85059 -7.12012 -0.0195312 -14.8799 -6.2998 -19.29c-6 -4.2002 -11.6094 -8.89941 -16.79 -14.0498c-5.4502 -5.41016 -13.5996 -6.86035 -20.1094 -2.79004l-82.9307 51.8301c-8.06934 5.04004 -10.2793 16.2002 -4.21973 23.5195
+c16.7148 20.1758 48.7969 47.0068 71.6104 59.8906c8.29004 4.67969 18.8896 0.519531 22.4199 -8.31055zM398.18 192c-7.95996 0 -14.0801 6.25 -15.0098 14.1602c-1.08984 9.32031 -3.22949 18.3301 -6.24023 26.9395c-2.55957 7.34082 -0.25 15.46 6.33984 19.5801
+l82.8105 51.7607c8.04004 5.01953 18.9902 2.16992 22.9297 -6.4502c10.7334 -23.498 20.791 -63.4902 22.4502 -89.2705c0.589844 -9.08984 -7.0498 -16.7197 -16.1504 -16.7197h-97.1299zM453.03 354.89c6.06934 -7.31934 3.84961 -18.4795 -4.2207 -23.5098
+l-82.9297 -51.8301c-6.50977 -4.06934 -14.6699 -2.62012 -20.1104 2.79004c-5.17969 5.15039 -10.7891 9.85059 -16.7891 14.0498c-6.28027 4.40039 -9.15039 12.1602 -6.30078 19.2803l36.3203 90.7998c3.54004 8.83008 14.1396 12.9902 22.4199 8.31055
+c22.8135 -12.8838 54.8945 -39.7148 71.6104 -59.8906zM496 160c8.83984 0 16 -7.16016 16 -16v-64c0 -8.83984 -7.16016 -16 -16 -16h-96c-8.83984 0 -16 7.16016 -16 16v64c0 8.83984 7.16016 16 16 16h96zM496 32c8.83984 0 16 -7.16016 16 -16v-64
+c0 -8.83984 -7.16016 -16 -16 -16h-96c-8.83984 0 -16 7.16016 -16 16v64c0 8.83984 7.16016 16 16 16h96zM240 270.38c5.23047 0.889648 10.5195 1.62012 16 1.62012s10.7695 -0.730469 16 -1.62012v-294.38c0 -4.41992 -3.58008 -8 -8 -8h-16c-4.41992 0 -8 3.58008 -8 8
+v294.38zM176 228.87c8.16016 12.2998 19.2197 22.3203 32 29.7695v-282.64c0 -4.41992 -3.58008 -8 -8 -8h-16c-4.41992 0 -8 3.58008 -8 8v252.87zM304 258.64c12.7803 -7.44922 23.8398 -17.4697 32 -29.7695v-252.87c0 -4.41992 -3.58008 -8 -8 -8h-16
+c-4.41992 0 -8 3.58008 -8 8v282.64z" />
+ <glyph glyph-name="file-csv" unicode="&#xf6dd;" horiz-adv-x="384"
+d="M224 312c0 -13.2002 10.7998 -24 24 -24h136v-328c0 -13.2998 -10.7002 -24 -24 -24h-336c-13.2998 0 -24 10.7002 -24 24v464c0 13.2998 10.7002 24 24 24h200v-136zM128 168v16c0 4.41992 -3.58008 8 -8 8h-8c-26.5098 0 -48 -21.4902 -48 -48v-32
+c0 -26.5098 21.4902 -48 48 -48h8c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-8c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h8c4.41992 0 8 3.58008 8 8zM172.27 64c23.3906 0 42.4004 17.3301 42.3906 38.6201
+c0 10.6602 -4.86035 20.9199 -13.3301 28.1396l-21.8896 18.7705c-1.37012 1.17969 -2.12012 2.54004 -2.12012 3.83984c0 3.12012 4.45996 6.62012 10.4102 6.62012h12.2695c4.41992 0 8 3.58008 8 8v16c0 4.41992 -3.58008 8 -8 8h-12.25
+c-23.3896 0 -42.4102 -17.3203 -42.4102 -38.6201c0 -10.6602 4.86035 -20.9199 13.3301 -28.1396l21.8896 -18.7705c1.37012 -1.17969 2.12012 -2.54004 2.12012 -3.83984c0 -3.12012 -4.45996 -6.62012 -10.4102 -6.62012h-12.2695c-4.41992 0 -8 -3.58008 -8 -8v-16
+c0 -4.41992 3.58008 -8 8 -8h12.2695zM256 184c0 4.41992 -3.58008 8 -8 8h-16c-4.41992 0 -8 -3.58008 -8 -8v-20.7998c0 -35.4805 12.8799 -68.8906 36.2803 -94.0898c3.01953 -3.25 7.26953 -5.11035 11.7197 -5.11035s8.7002 1.86035 11.7197 5.11035
+c23.4004 25.1992 36.2803 58.6094 36.2803 94.0898v20.7998c0 4.41992 -3.58008 8 -8 8h-16c-4.41992 0 -8 -3.58008 -8 -8v-20.7998c0 -20.2705 -5.7002 -40.1807 -16 -56.8799c-10.2998 16.71 -16 36.6094 -16 56.8799v20.7998zM377 343c4.5 -4.5 7 -10.5996 7 -16.9004
+v-6.09961h-128v128h6.09961c6.40039 0 12.5 -2.5 17 -7z" />
+ <glyph glyph-name="fist-raised" unicode="&#xf6de;" horiz-adv-x="384"
+d="M255.98 288h-48.0303c-5.62988 0 -10.9502 -1.15039 -15.9697 -2.92969v146.93c0 8.83984 7.15918 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-144zM383.98 192.01l0.0195312 -88.2197v-0.0107422c0 -21.9551 -12.5977 -52.373 -28.1201 -67.8994l-35.9102 -35.9199
+v-63.96h-255.979v64l-26.5 26.5098c-24.0098 24.0098 -37.4902 56.5605 -37.4902 90.5098v77.9307c5.03027 -1.7998 10.3604 -2.9502 16 -2.9502h32c12.3301 0 23.4805 4.80957 32 12.4902c8.50977 -7.66992 19.6602 -12.4805 32 -12.4805h32
+c7.91016 0 15.2803 2.11035 21.8701 5.52051c7.46973 -16.3301 21.5996 -28.9404 38.8701 -34.4502c-17.1104 -14.8203 -31.5801 -34.4805 -47.3105 -58.0801l-6.30957 -9.46973c-0.742188 -1.1123 -1.34375 -3.09961 -1.34375 -4.43652
+c0 -2.36328 1.59668 -5.34375 3.56348 -6.65332l13.3105 -8.88086c1.11133 -0.741211 3.09863 -1.34375 4.43555 -1.34375c2.36328 0 5.34473 1.59668 6.6543 3.56445l6.30957 9.46973c31.8906 47.8398 51.5303 70.2695 96.0498 72.5498
+c4.29004 0.219727 7.88086 3.70996 7.88086 8v16.2002c0 4.41992 -3.52051 8 -13.2002 8h-35.2607c-26.2695 0 -47.5693 21.3203 -47.5693 47.5898v0.560547c0 8.7793 7.12012 15.8496 15.8994 15.8496h112.141c35.3301 0 63.9795 -28.6504 63.9902 -63.9902zM351.97 282.1
+c-10.0596 3.59082 -20.7197 5.90039 -32 5.90039h-32v112c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-117.9zM16 224c-8.83984 0 -16 7.16016 -16 16v128c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-128
+c0 -8.83984 -7.16016 -16 -16 -16h-32zM111.99 224c-8.83984 0 -16 7.16016 -16 16v160c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-160c0 -8.83984 -7.16016 -16 -16 -16h-32z" />
+ <glyph glyph-name="ghost" unicode="&#xf6e2;" horiz-adv-x="384"
+d="M186.1 447.91c108.73 3.25977 197.9 -83.9102 197.9 -191.91v-271.97c0 -14.25 -17.2305 -21.3906 -27.3096 -11.3105l-24.9209 18.5303c-6.65918 4.95996 -16 3.99023 -21.5098 -2.20996l-42.9502 -48.3496c-6.25 -6.25 -16.3799 -6.25 -22.6299 0l-40.7197 45.8496
+c-6.36035 7.16992 -17.5498 7.16992 -23.9199 0l-40.7197 -45.8496c-6.25 -6.25 -16.3799 -6.25 -22.6299 0l-42.9502 48.3496c-5.50977 6.2002 -14.8506 7.16016 -21.5098 2.20996l-24.9209 -18.5303c-10.0791 -10.0801 -27.3096 -2.9502 -27.3096 11.3105v263.92
+c0 105.13 81.0098 196.81 186.1 199.96zM128 224c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32zM256 224c17.6699 0 32 14.3301 32 32s-14.3301 32 -32 32s-32 -14.3301 -32 -32s14.3301 -32 32 -32z" />
+ <glyph glyph-name="hammer" unicode="&#xf6e3;" horiz-adv-x="576"
+d="M571.31 254.06c6.25 -6.25 6.25 -16.3799 0.0205078 -22.6191l-90.5098 -90.5107c-6.25 -6.25 -16.3799 -6.25 -22.6299 0l-22.6309 22.6299c-6.25 6.25 -6.25 16.3809 0 22.6309l11.3105 11.3096l-28.9004 28.9004
+c-21.3096 -5.63086 -44.8994 -0.360352 -61.6094 16.3496l-49.1406 49.1396c-12.0098 12 -18.75 28.2803 -18.75 45.25v18.75l-90.5098 45.25c62.4902 62.4805 163.8 62.4805 226.28 0l45.25 -45.25c16.71 -16.71 21.9795 -40.2998 16.3496 -61.6094l28.9004 -28.9004
+l11.3096 11.3105c6.25 6.25 16.3799 6.25 22.6299 0zM284.59 269.26l49.1406 -49.1396c3.53906 -3.54004 7.47949 -6.5 11.4395 -9.41016l-238.13 -255.07c-23.8799 -25.5801 -64.2002 -26.2695 -88.9297 -1.5293c-24.7305 24.7393 -24.04 65.0498 1.5293 88.9297
+l255.101 238.17c3.00977 -4.16016 6.14941 -8.25 9.84961 -11.9502z" />
+ <glyph glyph-name="hanukiah" unicode="&#xf6e6;" horiz-adv-x="640"
+d="M232 288h16c4.41992 0 8 -3.58008 8 -8v-120h-32v120c0 4.41992 3.58008 8 8 8zM168 288h16c4.41992 0 8 -3.58008 8 -8v-120h-32v120c0 4.41992 3.58008 8 8 8zM392 288h16c4.41992 0 8 -3.58008 8 -8v-120h-32v120c0 4.41992 3.58008 8 8 8zM456 288h16
+c4.41992 0 8 -3.58008 8 -8v-120h-32v120c0 4.41992 3.58008 8 8 8zM544 280v-120h-32v120c0 4.41992 3.58008 8 8 8h16c4.41992 0 8 -3.58008 8 -8zM104 288h16c4.41992 0 8 -3.58008 8 -8v-120h-32v120c0 4.41992 3.58008 8 8 8zM624 288c8.83984 0 16 -7.16016 16 -16
+v-112c0 -53.0195 -42.9805 -96 -96 -96h-192v-64h176c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-416c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h176v64h-192c-53.0195 0 -96 42.9805 -96 96v112c0 8.83984 7.16016 16 16 16
+h32c8.83984 0 16 -7.16016 16 -16v-112c0 -17.6699 14.3301 -32 32 -32h192v192c0 8.83984 7.16016 16 16 16h32c8.83984 0 16 -7.16016 16 -16v-192h192c17.6699 0 32 14.3301 32 32v112c0 8.83984 7.16016 16 16 16h32zM608 320c-13.25 0 -24 11.9502 -24 26.6699
+s24 53.3301 24 53.3301s24 -38.5996 24 -53.3301c0 -14.7295 -10.75 -26.6699 -24 -26.6699zM32 320c-13.25 0 -24 11.9502 -24 26.6699s24 53.3301 24 53.3301s24 -38.5996 24 -53.3301c0 -14.7295 -10.75 -26.6699 -24 -26.6699zM320 368
+c-13.25 0 -24 11.9502 -24 26.6699s24 53.3301 24 53.3301s24 -38.5996 24 -53.3301c0 -14.7295 -10.75 -26.6699 -24 -26.6699zM112 320c-13.25 0 -24 11.9502 -24 26.6699s24 53.3301 24 53.3301s24 -38.5996 24 -53.3301c0 -14.7295 -10.75 -26.6699 -24 -26.6699z
+M176 320c-13.25 0 -24 11.9502 -24 26.6699s24 53.3301 24 53.3301s24 -38.5996 24 -53.3301c0 -14.7295 -10.75 -26.6699 -24 -26.6699zM240 320c-13.25 0 -24 11.9502 -24 26.6699s24 53.3301 24 53.3301s24 -38.5996 24 -53.3301
+c0 -14.7295 -10.75 -26.6699 -24 -26.6699zM400 320c-13.25 0 -24 11.9502 -24 26.6699s24 53.3301 24 53.3301s24 -38.5996 24 -53.3301c0 -14.7295 -10.75 -26.6699 -24 -26.6699zM464 320c-13.25 0 -24 11.9502 -24 26.6699s24 53.3301 24 53.3301
+s24 -38.5996 24 -53.3301c0 -14.7295 -10.75 -26.6699 -24 -26.6699zM528 320c-13.25 0 -24 11.9502 -24 26.6699s24 53.3301 24 53.3301s24 -38.5996 24 -53.3301c0 -14.7295 -10.75 -26.6699 -24 -26.6699z" />
+ <glyph glyph-name="hat-wizard" unicode="&#xf6e8;"
+d="M496 0c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-480c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h480zM192 64l16 -32h-144l110.96 249.66c9.4043 21.1611 33.3545 47.6611 53.46 59.1494l187.58 107.19l-56.2998 -168.92
+c-1.81543 -5.43945 -3.28809 -14.5078 -3.28809 -20.2412c0 -7.25098 2.31934 -18.5449 5.17773 -25.209l86.4102 -201.63h-208l16 32l64 32l-64 32l-32 64l-32 -64l-64 -32zM256 288l-32 -16l32 -16l16 -32l16 32l32 16l-32 16l-16 32z" />
+ <glyph glyph-name="hiking" unicode="&#xf6ec;" horiz-adv-x="383"
+d="M80.9502 -24.2305l34.5596 138.221l52.7803 -52.7803l-25.2402 -100.97c-3.64941 -14.5303 -16.6895 -24.2305 -31.0195 -24.2305c-2.58008 0 -5.19043 0.290039 -7.7998 0.950195c-17.1406 4.28027 -27.5605 21.6504 -23.2803 38.8096zM95.8398 171.89
+c-2.18945 -8.41992 -11.0801 -13.54 -19.8701 -11.4395l-63.5596 15.25c-8.78027 2.10938 -14.1104 10.6396 -11.9199 19.0596l25.2695 98.1299c10.9707 42.1006 55.4404 67.6904 99.3203 57.1699c8.78027 -2.10938 14.1104 -10.6396 11.9199 -19.0596zM368 288
+c8.83984 0 16 -7.16016 15.9902 -16v-320.01c0 -8.83984 -7.16016 -16 -16 -16h-16c-8.83984 0 -16 7.16016 -16 16v240h-48c-8.48047 0 -16.6201 3.37988 -22.6201 9.37988l-22.4297 22.4297l-19.8906 -79.5693
+c-0.129883 -0.520508 -0.469727 -0.910156 -0.620117 -1.41016l46.8203 -46.8203c12.0898 -12.0898 18.75 -28.1602 18.75 -45.25v-82.75c0 -17.6699 -14.3301 -32 -32 -32s-32 14.3301 -32 32v82.75l-86.6201 86.6201c-5.17383 5.1748 -9.37402 15.3154 -9.37402 22.6338
+c0 2.17285 0.427734 5.64746 0.954102 7.75586l26.9199 107.721c6.56055 26.2197 30.0098 44.5195 57.0303 44.5195c15.7002 0 30.4697 -6.11035 41.5596 -17.2197l46.7803 -46.7803h34.75v16c0 8.83984 7.16016 16 16 16h16zM240 352c-26.5098 0 -48 21.4902 -48 48
+s21.4902 48 48 48s48 -21.4902 48 -48s-21.4902 -48 -48 -48z" />
+ <glyph glyph-name="hippo" unicode="&#xf6ed;" horiz-adv-x="640"
+d="M581.12 351.8c34.0898 -0.189453 58.8799 -33.6895 58.8799 -67.7803v-92.0195c0 -17.6699 -14.3301 -32 -32 -32v-32c0 -8.83984 -7.16016 -16 -16 -16h-32c-8.83984 0 -16 7.16016 -16 16v32h-128v-176c0 -8.83984 -7.16016 -16 -16 -16h-64
+c-8.83984 0 -16 7.16016 -16 16v70.79c-32.3496 -14.3604 -70.7197 -22.79 -112 -22.79s-79.6504 8.42969 -112 22.79v-70.79c0 -8.83984 -7.16016 -16 -16 -16h-64c-8.83984 0 -16 7.16016 -16 16v240c0 88.3604 85.96 160 192 160
+c49.2402 0 94.0098 -15.5801 128 -40.9805v48.9805c0 13.2598 10.75 24 24 24h16c13.25 0 24 -10.7402 24 -24v-13.8799c10.0498 3.58008 20.7197 5.87988 32 5.87988c39.8301 0 73.9805 -24.2695 88.5195 -58.8203c24.1006 9.04004 48.9307 26.7705 76.6006 26.6201z
+M448 272c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16s-16 -7.16016 -16 -16s7.16016 -16 16 -16z" />
+ <glyph glyph-name="horse" unicode="&#xf6f0;" horiz-adv-x="575"
+d="M575.92 371.4l0.0605469 -77.71v-0.0292969c0 -11.9609 -9.01465 -25.2715 -20.1201 -29.7109l-32.5508 -13.0205c-15.4395 -6.17969 -33.04 0.5 -40.4893 15.3701l-18.9004 37.7002l-16 7.11035v-102.471c0.00976562 -0.219727 0.0800781 -0.419922 0.0800781 -0.639648
+c0 -30.4697 -12.2598 -58.0303 -32 -78.2197v-177.78c0 -8.83984 -7.16016 -16 -16 -16h-64c-8.83984 0 -16 7.16016 -16 16v150.4l-133.97 22.3301l-23.8398 -63.5908l26.3096 -105.26c2.53027 -10.0996 -5.11035 -19.8799 -15.5195 -19.8799h-65.9609h-0.000976562
+c-6.89453 0 -13.8428 5.42969 -15.5088 12.1201l-24.8496 99.4102c-1.05664 4.2207 -1.91406 11.1777 -1.91406 15.5293c0 6.40332 1.8252 16.4648 4.07422 22.46l25.7197 68.6006c-18.7002 17.5195 -30.54 42.2402 -30.54 69.8799
+c0 2.62988 0.570312 5.09961 0.780273 7.67969c-9.91016 -7.29004 -16.7803 -18.46 -16.7803 -31.6797v-56c0 -8.83984 -7.16016 -16 -16 -16h-16c-8.83984 0 -16 7.16016 -16 16v56c0 48.5303 39.4697 88 88 88v-1.11035c17.5996 20.1299 43.1602 33.1104 72 33.1104
+h159.92c0 70.6904 57.3105 128 128 128h119.98c5.05957 0 8.94922 -4.67969 7.92969 -9.63965c-2.67969 -13.1699 -11.1201 -23.8203 -22.1797 -30.6602c5.10938 -5.37988 9.90918 -10.4697 13.6895 -14.5c5.56055 -5.93066 8.57031 -13.6699 8.58008 -21.7998zM511.92 352
+c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16s-16 -7.16016 -16 -16s7.16016 -16 16 -16z" />
+ <glyph glyph-name="house-damage" unicode="&#xf6f1;" horiz-adv-x="576"
+d="M288 333.04l218.74 -192.94c1.54004 -1.37988 3.55957 -2.04004 5.25977 -3.19922v-184.9c0 -8.83984 -7.16016 -16 -16 -16h-176.19l-39.9199 55.25l104.11 64l-148.05 136.78l60.1602 -119.221l-104.11 -64l37.2305 -72.8096h-149.23c-8.83984 0 -16 7.16016 -16 16
+v184.94c1.78027 1.20996 3.84961 1.88965 5.46973 3.34961zM570.69 211.72c6.5791 -5.89941 7.11914 -16.0195 1.21973 -22.5898l-21.4004 -23.8203c-5.91016 -6.56934 -16.0293 -7.10938 -22.5996 -1.20996l-229.32 202.271c-6.0498 5.33008 -15.1201 5.33008 -21.1699 0
+l-229.32 -202.28c-6.58008 -5.91016 -16.6992 -5.35938 -22.5996 1.20996l-21.4004 23.8203c-5.90918 6.58008 -5.35938 16.6895 1.20996 22.5996l255.99 226.011c7.60059 6.85938 17.1406 10.2793 26.7002 10.2695s19.1201 -3.4502 26.75 -10.3096l101.25 -89.3809v51.6904
+c0 8.83984 7.16016 16 16 16h64c8.83984 0 16 -7.16016 16 -16v-136.45z" />
+ <glyph glyph-name="hryvnia" unicode="&#xf6f2;" horiz-adv-x="384"
+d="M368 208h-99.7002l-34.6699 -32h134.37c8.83984 0 16 -7.16016 16 -16v-32c0 -8.83984 -7.16016 -16 -16 -16h-203.7l-29.4902 -27.2197c-4.3291 -4 -6.80957 -9.66992 -6.80957 -15.5801c0 -11.6807 9.50977 -21.2002 21.2002 -21.2002h83.6299h0.00195312
+c4.79395 0 11.6748 2.49121 15.3584 5.55957l11.75 9.80078c10.1895 8.48926 25.3193 7.12012 33.8096 -3.07031l20.4902 -24.5898c8.49023 -10.1807 7.10938 -25.3105 -3.07031 -33.7998l-11.7695 -9.81055c-18.6807 -15.5596 -42.2207 -24.0898 -66.54 -24.0898h-78.8203
+c-37.1396 0 -73.3799 17.8496 -92.0498 49.9502c-17.8701 30.7197 -17.54 65.4199 -4.12988 94.0498h-41.8604c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16h99.7002l34.6699 32h-134.37c-8.83984 0 -16 7.16016 -16 16v32c0 8.83984 7.16016 16 16 16
+h203.7l29.4902 27.21c4.3291 4 6.80957 9.66992 6.80957 15.5801c0 11.6797 -9.50977 21.2002 -21.2002 21.2002h-83.6201c-5.62012 0 -11.0693 -1.9707 -15.3896 -5.57031l-11.7305 -9.78027c-10.1895 -8.48926 -25.3193 -7.12012 -33.8096 3.07031l-20.4902 24.5898
+c-8.49023 10.1807 -7.10938 25.3105 3.07031 33.7998l11.7695 9.81055c18.6807 15.5596 42.2207 24.0898 66.54 24.0898h78.8203c37.1396 0 73.3799 -17.8398 92.0498 -49.9502c17.8701 -30.7197 17.54 -65.4199 4.12988 -94.0498h41.8604c8.83984 0 16 -7.16016 16 -16v-32
+c0 -8.83984 -7.16016 -16 -16 -16z" />
+ <glyph glyph-name="mask" unicode="&#xf6fa;" horiz-adv-x="640"
+d="M320.67 384c449.09 0 348.32 -384 158.46 -384c-39.8994 0 -77.4697 20.6904 -101.41 55.8604l-25.7295 37.79c-15.6602 22.9893 -46.9707 22.9893 -62.6299 0l-25.7305 -37.79c-23.9502 -35.1699 -61.5195 -55.8604 -101.42 -55.8604c-199.11 0 -284.14 384 158.46 384z
+M184 139.64c41.0596 0 67.7598 25.6504 80.0801 41.0508c5.22949 6.54004 5.22949 16.0996 0 22.6299c-12.3203 15.3896 -39.0098 41.0498 -80.0801 41.0498s-67.7598 -25.6504 -80.0801 -41.0498c-5.22949 -6.54004 -5.22949 -16.1006 0 -22.6299
+c12.3203 -15.3906 39.0205 -41.0508 80.0801 -41.0508zM456 139.64c41.0596 0 67.7598 25.6504 80.0801 41.0508c5.22949 6.54004 5.22949 16.0996 0 22.6299c-12.3203 15.3896 -39.0098 41.0498 -80.0801 41.0498s-67.7598 -25.6504 -80.0801 -41.0498
+c-5.22949 -6.54004 -5.22949 -16.1006 0 -22.6299c12.3203 -15.3906 39.0205 -41.0508 80.0801 -41.0508z" />
+ <glyph glyph-name="mountain" unicode="&#xf6fc;" horiz-adv-x="640"
+d="M634.92 -14.7002c2.80566 -4.3623 5.08301 -12.1133 5.08301 -17.3008c0 -4.50586 -1.75293 -11.374 -3.91309 -15.3291c-5.60938 -10.2803 -16.3799 -16.6699 -28.0898 -16.6699h-576h-0.00390625c-10.4795 0 -23.0615 7.46387 -28.0859 16.6602
+c-2.16016 3.95605 -3.91309 10.8262 -3.91309 15.334c0 5.18848 2.27734 12.9414 5.08301 17.3057l288 448c5.88965 9.16016 16.0303 14.7002 26.9199 14.7002s21.0303 -5.54004 26.9199 -14.7002zM320 356.82l-102.06 -158.761l38.0596 -38.0596l64 64h85.3896z" />
+ <glyph glyph-name="network-wired" unicode="&#xf6ff;" horiz-adv-x="640"
+d="M640 184c0 -8.83984 -7.16016 -16 -16 -16h-104v-40h56c17.6699 0 32 -14.3301 32 -32v-128c0 -17.6699 -14.3301 -32 -32 -32h-160c-17.6699 0 -32 14.3301 -32 32v128c0 17.6699 14.3301 32 32 32h56v40h-304v-40h56c17.6699 0 32 -14.3301 32 -32v-128
+c0 -17.6699 -14.3301 -32 -32 -32h-160c-17.6699 0 -32 14.3301 -32 32v128c0 17.6699 14.3301 32 32 32h56v40h-104c-8.83984 0 -16 7.16016 -16 16v16c0 8.83984 7.16016 16 16 16h280v40h-72c-17.6699 0 -32 14.3301 -32 32v128c0 17.6699 14.3301 32 32 32h192
+c17.6699 0 32 -14.3301 32 -32v-128c0 -17.6699 -14.3301 -32 -32 -32h-72v-40h280c8.83984 0 16 -7.16016 16 -16v-16zM256 320h128v64h-128v-64zM192 0v64h-96v-64h96zM544 0v64h-96v-64h96z" />
+ <glyph glyph-name="otter" unicode="&#xf700;" horiz-adv-x="640"
+d="M608 416c17.6699 0 32 -14.3301 32 -32v-32c0 -53.0195 -42.9805 -96 -96 -96h-22.8604l-92.4697 -49.79l55.1104 -110.21h28.2197c17.6699 0 32 -14.3301 32 -32v-16c0 -8.83984 -7.16016 -16 -16 -16h-80l-74.5098 144.5l-149.49 -80.5h64
+c17.6699 0 32 -14.3301 32 -32v-16c0 -8.83984 -7.16016 -16 -16 -16h-224c-8.83008 0 -16 -7.16992 -16 -16s7.16992 -16 16 -16h112c17.6699 0 32 -14.3301 32 -32s-14.3301 -32 -32 -32h-108.47c-39.3799 0 -75.5205 26.9004 -82.2803 65.7002
+c-4.91016 28.1201 5 54.2197 23.1904 71.7998c23.5596 22.75 39.5596 52.1396 39.5596 84.8896v1.61035c0 106.04 85.96 192 192 192h56l153.25 87.5703c9.66992 5.51953 20.6104 8.42969 31.75 8.42969h20.4902h0.0126953c14.6328 0 34.9033 -8.40039 45.2471 -18.75
+l13.25 -13.25h32zM512 400c-8.83984 0 -16 -7.16016 -16 -16s7.16016 -16 16 -16s16 7.16016 16 16s-7.16016 16 -16 16zM544 304c20.8301 0 38.4297 13.4199 45.0498 32h-77.0498l-118.57 -59.29l13.7705 -27.5498l101.84 54.8398h34.96z" />
+ <glyph glyph-name="ring" unicode="&#xf70b;"
+d="M256 384c145.94 0 256 -61.9102 256 -144v-98.1299c0 -78.3506 -114.62 -141.87 -256 -141.87s-256 63.5195 -256 141.87v98.1299c0 82.0898 110.06 144 256 144zM256 320c-106.04 0 -192 -35.8203 -192 -80c0 -9.25977 3.96973 -18.1201 10.9102 -26.3896
+c44.9395 26.1797 108.859 42.3896 181.09 42.3896s136.15 -16.21 181.09 -42.3896c6.94043 8.26953 10.9102 17.1299 10.9102 26.3896c0 44.1797 -85.96 80 -192 80zM120.43 183.36c34.7305 -14.4307 82.6406 -23.3604 135.57 -23.3604s100.84 8.92969 135.57 23.3604
+c-34.6104 14.71 -81.21 24.6396 -135.57 24.6396s-100.96 -9.92969 -135.57 -24.6396z" />
+ <glyph glyph-name="running" unicode="&#xf70c;" horiz-adv-x="416"
+d="M272 352c-26.5098 0 -48 21.4902 -48 48s21.4902 48 48 48s48 -21.4902 48 -48s-21.4902 -48 -48 -48zM113.69 130.53c7.92969 -17.2402 20.6699 -32.3799 37.9893 -42.6104l10.6699 -6.2998l-8.79004 -20.5205c-7.5293 -17.6494 -24.8594 -29.0898 -44.1094 -29.0898
+h-77.4502c-17.6699 0 -32 14.3301 -32 32s14.3301 32 32 32h66.8896zM384 224.01c17.6699 0 32 -14.3193 32 -31.9902c0 -17.6699 -14.3301 -32 -32 -32h-53.9902c-18.1895 0 -35.1094 10.5508 -43.1094 26.8906l-20.2705 41.4297l-31.3096 -78.2803l61.2393 -36.1396
+c18.75 -11.3096 27.5508 -33.6201 21.6406 -54.3896l-31.6406 -101.061c-4.29004 -13.6797 -16.9092 -22.4502 -30.5195 -22.4502c-3.16992 0 -6.38965 0.480469 -9.58008 1.48047c-16.8604 5.28027 -26.25 23.2305 -20.9697 40.0898l27.4697 87.7305l-84.9795 50.1699
+c-27.6104 16.2998 -38.9209 50.8301 -26.3008 80.3096l37.46 87.3906l-14.6992 4.36914c-7.83008 1.86035 -17.6006 -0.25 -25.2705 -6.13965l-39.6895 -30.4102c-14.0205 -10.7402 -34.0908 -8.10938 -44.8604 5.91992c-10.7705 14.0303 -8.11035 34.1104 5.91992 44.8604
+l39.6699 30.4102c23.0703 17.6895 52.54 23.9395 80.8398 17.1396l71.0801 -21.1396c26.3301 -6.70996 49.2803 -25.3906 61.7803 -50.9404l26.0596 -53.25h44.0303z" />
+ <glyph glyph-name="scroll" unicode="&#xf70e;" horiz-adv-x="640"
+d="M48 448c26.4697 0 48 -21.5303 48 -48v-80h-80c-8.83984 0 -16 7.16016 -16 16v64c0 26.4697 21.5303 48 48 48zM256 35.4297c0 -31.8896 -21.7803 -61.4297 -53.25 -66.5498c-40.0996 -6.53027 -74.75 24.25 -74.75 63.1201v368c0 18.0801 -6.25977 34.5898 -16.4102 48
+h336.41c52.9404 0 96 -43.0596 96 -96v-256h-288v-60.5703zM288 64h336c8.83984 0 16 -7.16016 16 -16c0 -61.8604 -50.1396 -112 -112 -112h-336c52.9404 0 96 43.0703 96 96v32z" />
+ <glyph glyph-name="skull-crossbones" unicode="&#xf714;" horiz-adv-x="448"
+d="M439.15 -5.05957c7.89941 -3.9502 11.1094 -13.5605 7.15918 -21.4707l-14.3096 -28.6299c-3.95996 -7.89941 -13.5703 -11.0996 -21.4697 -7.14941l-186.53 90.7197l-186.52 -90.7197c-7.91016 -3.95996 -17.5205 -0.75 -21.4707 7.14941l-14.3096 28.6299
+c-3.95996 7.91016 -0.75 17.5205 7.14941 21.4707l141.98 69.0596l-141.99 69.0596c-7.89941 3.9502 -11.0996 13.5605 -7.14941 21.46l14.3096 28.6309c3.95996 7.90918 13.5703 11.1094 21.4697 7.15918l186.53 -90.7197l186.53 90.7197
+c7.91016 3.9502 17.5195 0.740234 21.4697 -7.15918l14.3096 -28.6309c3.95996 -7.89941 0.75 -17.5098 -7.14941 -21.46l-141.99 -69.0596zM150 210.72c-41.7803 22.4102 -70 62.75 -70 109.28c0 70.6904 64.4697 128 144 128s144 -57.3096 144 -128
+c0 -46.5303 -28.2197 -86.8701 -70 -109.28l5.5 -25.8701c2.66992 -12.6191 -5.41992 -24.8496 -16.4502 -24.8496h-126.08c-11.0293 0 -19.1201 12.2305 -16.4502 24.8496zM280 336c-17.6504 0 -32 -14.3496 -32 -32s14.3496 -32 32 -32s32 14.3496 32 32
+s-14.3496 32 -32 32zM168 336c-17.6504 0 -32 -14.3496 -32 -32s14.3496 -32 32 -32s32 14.3496 32 32s-14.3496 32 -32 32z" />
+ <glyph glyph-name="slash" unicode="&#xf715;" horiz-adv-x="640"
+d="M594.53 -60.6299l-588.351 454.729c-6.96973 5.41992 -8.22949 15.4707 -2.80957 22.4502l19.6396 25.2705c5.41992 6.97949 15.4805 8.23926 22.46 2.80957l588.351 -454.729c6.96973 -5.41992 8.22949 -15.4707 2.80957 -22.4502l-19.6396 -25.2705
+c-5.41992 -6.97949 -15.4805 -8.22949 -22.46 -2.80957z" />
+ <glyph glyph-name="spider" unicode="&#xf717;" horiz-adv-x="576"
+d="M151.17 280.65l-27.1504 54.2998c-1.86035 3.72949 -3.37109 10.1387 -3.37109 14.3066c0 2.86719 0.735352 7.40234 1.6416 10.123l25.8896 77.6797c2.79004 8.39062 11.8604 12.9209 20.2402 10.1201l15.1699 -5.05957
+c8.39062 -2.7998 12.9102 -11.8604 10.1201 -20.2402l-23.7998 -71.3896l20.29 -40.5801c-1.41016 -4.20996 -2.49023 -8.20996 -3.20996 -11.79l-5.2207 -26.1201h-4.66992zM573.31 98.6201c4.90039 -7.35059 2.9209 -17.2803 -4.43945 -22.1797l-13.3105 -8.88086
+c-7.34961 -4.89941 -17.29 -2.90918 -22.1895 4.44043l-48 72h-47.0605l60.8301 -97.3301c3.16992 -5.08008 4.86035 -10.96 4.86035 -16.96v-77.71c0 -8.83984 -7.16016 -16 -16 -16h-16c-8.83984 0 -16 7.16016 -16 16v73.1104l-74.0801 118.529
+c1 -14.0498 2.08008 -28.1094 2.08008 -42.21c0 -53.0693 -40.7598 -101.43 -96 -101.43s-96 48.3604 -96 101.43c0 14.1006 1.07031 28.1602 2.08008 42.21l-74.0801 -118.529v-73.1104c0 -8.83984 -7.16016 -16 -16 -16h-16c-8.83984 0 -16 7.16016 -16 16v77.7002
+v0.00976562c0 5.0625 2.17773 12.6562 4.86035 16.9502l60.8301 97.3301h-47.0605l-48 -72c-4.89941 -7.35059 -14.8398 -9.33984 -22.1895 -4.44043l-13.3105 8.87988c-7.36035 4.90039 -9.33984 14.8398 -4.43945 22.1904l52.7393 79.1299
+c5.24707 7.86426 17.1768 14.248 26.6299 14.25h77.9404l-68.9902 24.3496c-5.75 1.91895 -13.1465 7.56738 -16.5098 12.6104l-53.5996 80.4102c-4.90039 7.36035 -2.91016 17.29 4.43945 22.1895l13.3105 8.88086c7.35938 4.89941 17.29 2.90918 22.1895 -4.44043
+l50.5703 -75.8301l60.4902 -20.1699h36.0996l10.3701 51.8496c2.18945 10.9707 17.3701 60.1504 69.6299 60.1504s67.4404 -49.1797 69.6299 -60.1504l10.3701 -51.8496h36.0996l60.5 20.1699l50.5605 75.8301c4.89941 7.34961 14.8398 9.33984 22.1895 4.44043
+l13.3105 -8.88086c7.34961 -4.89941 9.33984 -14.8398 4.43945 -22.1895l-53.5996 -80.4102c-3.36328 -5.04297 -10.7598 -10.6914 -16.5098 -12.6104l-68.9902 -24.3594h77.9404c9.45117 -0.00195312 21.377 -6.38672 26.6191 -14.25zM406.09 350.49l-23.7998 71.3896
+c-2.79004 8.37988 1.74023 17.4404 10.1201 20.2402l15.1699 5.05957c8.37988 2.80078 17.4502 -1.73926 20.2402 -10.1201l25.8896 -77.6797c0.908203 -2.72168 1.64551 -7.25781 1.64551 -10.127c0 -4.16699 -1.5127 -10.5752 -3.375 -14.3027l-27.1504 -54.2998
+l-25.9297 -8.65039h-4.66992l-5.2207 26.1201c-0.719727 3.58008 -1.7998 7.58008 -3.20996 11.79z" />
+ <glyph glyph-name="toilet-paper" unicode="&#xf71e;" horiz-adv-x="576"
+d="M128 448h284.44c-36.7705 -38.4805 -60.4404 -108.4 -60.4404 -192v-172.07c0 -53.6494 -11.8799 -87.5693 -24.71 -126.05c-4.36035 -13.0703 -16.5898 -21.8799 -30.3604 -21.8799h-280.92c-10.9199 0 -18.6299 10.7002 -15.1797 21.0596
+c21.3701 64.1006 31.1699 85.75 31.1699 126.87v172.07c0 106.04 42.9805 192 96 192zM96 224c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16s-16 -7.16016 -16 -16s7.16016 -16 16 -16zM160 224c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16s-16 -7.16016 -16 -16
+s7.16016 -16 16 -16zM224 224c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16s-16 -7.16016 -16 -16s7.16016 -16 16 -16zM288 224c8.83984 0 16 7.16016 16 16s-7.16016 16 -16 16s-16 -7.16016 -16 -16s7.16016 -16 16 -16zM480 448c53.0195 0 96 -85.96 96 -192
+s-42.9805 -192 -96 -192s-96 85.96 -96 192s42.9805 192 96 192zM480 192c17.6699 0 32 28.6504 32 64s-14.3301 64 -32 64s-32 -28.6504 -32 -64s14.3301 -64 32 -64z" />
+ <glyph glyph-name="tractor" unicode="&#xf722;" horiz-adv-x="639"
+d="M528 112c48.5996 0 88 -39.4004 88 -88s-39.4004 -88 -88 -88s-88 39.4004 -88 88s39.4004 88 88 88zM528 0c13.2305 0 24 10.7695 24 24s-10.7695 24 -24 24s-24 -10.7695 -24 -24s10.7695 -24 24 -24zM608 288c17.6699 0 32 -14.3301 31.9902 -32v-50.7598
+c0 -8.49023 -3.37012 -16.6299 -9.37012 -22.6299l-50.8203 -50.8203c-15.7295 7.58984 -33.1602 12.2002 -51.7998 12.2002c-39.1396 0 -73.5498 -19.0098 -95.46 -48h-80.54v-6c0 -12.1504 -9.84961 -22 -22 -22h-7.16016
+c-2.9502 -9.90039 -6.91992 -19.46 -11.9102 -28.7207l5.06055 -5.05957c8.58984 -8.58984 8.58984 -22.5195 0 -31.1104l-31.1104 -31.1094c-8.58984 -8.58984 -22.5195 -8.58984 -31.1104 0l-5.05957 5.05957c-9.25977 -4.99023 -18.8203 -8.95996 -28.7197 -11.9102
+v-7.13965c0 -12.1504 -9.85059 -22 -22 -22h-44c-12.1504 0 -22 9.84961 -22 22v7.15039c-9.90039 2.94922 -19.46 6.91992 -28.7207 11.9092l-5.05957 -5.05957c-8.58984 -8.58984 -22.5195 -8.58984 -31.1104 0l-31.1094 31.1104
+c-8.58984 8.58984 -8.58984 22.5195 0 31.1094l5.05957 5.06055c-4.99023 9.26953 -8.9502 18.8193 -11.9102 28.7295h-7.13965c-12.1504 0 -22 9.85059 -22 22v44c0 12.1504 9.84961 22 22 22h7.15039c2.94922 9.90039 6.91992 19.46 11.9092 28.7207l-5.05957 5.05957
+c-8.58984 8.58984 -8.58984 22.5195 0 31.1104l31.1104 31.1094c7.92969 7.93066 20.2598 8.2002 28.8896 1.4707v146.52c0 26.4697 21.5303 48 48 48h133.45h0.046875c17.4834 0 37.2324 -13.0547 44.083 -29.1396l56.0898 -130.86h102.33v40.2002
+c0 29.9902 10.5801 58.8994 29.5 81.7197c6.37988 7.7002 18.04 8.23047 24.7002 0.780273l21.6299 -24.1699c4.87012 -5.43066 5.74023 -13.6904 1.32031 -19.4902c-8.4502 -11.0801 -13.1504 -24.7197 -13.1504 -38.8398v-40.2002h64zM176 32c44.1797 0 80 35.8203 80 80
+s-35.8203 80 -80 80s-80 -35.8203 -80 -80s35.8203 -80 80 -80zM198 288h110.04l-41.1504 96h-106.89v-96h38z" />
+ <glyph glyph-name="user-injured" unicode="&#xf728;" horiz-adv-x="448"
+d="M277.37 436.02l-90.6904 -68.0195h-81.1895c19.0098 46.8701 64.8193 80 118.51 80c19.1104 0 37.0801 -4.46973 53.3701 -11.9805zM342.51 368h-102.52l66.0293 49.5195c15.8203 -13.3193 28.5908 -30.0498 36.4902 -49.5195zM224 192c-70.6904 0 -128 57.3096 -128 128
+c0 5.48047 0.94043 10.7002 1.61035 16h252.779c0.660156 -5.2998 1.61035 -10.5195 1.61035 -16c0 -70.6904 -57.3096 -128 -128 -128zM80 148.3c7.92676 3.51758 21.2812 7.64844 29.8096 9.21973l98.4502 -221.52h-128.26v212.3zM0 -16v41.5996
+c0 41.1406 18.8799 77.5107 48 102.16v-191.76c-26.5098 0 -48 21.4902 -48 48zM256 32c26.4697 0 48 -21.5303 48 -48s-21.5303 -48 -48 -48h-12.71l-42.6699 96h55.3799zM313.6 160c74.2305 0 134.4 -60.1699 134.4 -134.4v-41.5996c0 -26.5098 -21.4902 -48 -48 -48
+h-80.4102c10.1504 13.4102 16.4102 29.9199 16.4102 48c0 44.1104 -35.8896 80 -80 80h-69.5898l-42.6699 96h7.37012c22.2393 -10.1797 46.8799 -16 72.8896 -16s50.6504 5.82031 72.8896 16h16.71z" />
+ <glyph glyph-name="vr-cardboard" unicode="&#xf729;" horiz-adv-x="640"
+d="M608 384c17.6699 0 32 -14.3301 32 -32v-320c0 -17.6699 -14.3301 -32 -32 -32h-160.22c-25.1807 0 -48.0303 14.7695 -58.3604 37.7402l-27.7402 61.6396c-7.88965 17.54 -24.0293 28.6201 -41.6797 28.6201s-33.79 -11.0801 -41.6797 -28.6201l-27.7402 -61.6396
+c-10.3301 -22.9707 -33.1699 -37.7402 -58.3604 -37.7402h-160.22c-17.6699 0 -32 14.3301 -32 32v320c0 17.6699 14.3301 32 32 32h576zM160 144c35.3496 0 64 28.6504 64 64s-28.6504 64 -64 64s-64 -28.6504 -64 -64s28.6504 -64 64 -64zM480 144
+c35.3496 0 64 28.6504 64 64s-28.6504 64 -64 64s-64 -28.6504 -64 -64s28.6504 -64 64 -64z" />
+ <glyph glyph-name="wind" unicode="&#xf72e;"
+d="M156.7 192c48.7002 0 92.2998 -35 98.3994 -83.4004c7.5 -58.5 -38.0996 -108.6 -95.1992 -108.6c-46.6006 0 -85.6006 33.5 -94.2002 77.5996c-1.7998 9.60059 6.09961 18.4004 15.8994 18.4004h32.8008c6.59961 0 13.0996 -3.7998 15.1992 -10.0996
+c4.30078 -12.7002 16.3008 -21.9004 30.4004 -21.9004c19.5 0 34.9004 17.4004 31.5996 37.4004c-2.59961 15.6992 -17.5 26.5996 -33.3994 26.5996h-142.2c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h140.7zM16 224c-8.7998 0 -16 7.2002 -16 16v32
+c0 8.7998 7.2002 16 16 16h334.2c16 0 30.7998 10.9004 33.3994 26.5996c3.30078 20 -12.1992 37.4004 -31.5996 37.4004c-14.0996 0 -26.0996 -9.2002 -30.4004 -21.9004c-2.09961 -6.2998 -8.5 -10.0996 -15.1992 -10.0996h-32.8008
+c-9.69922 0 -17.6992 8.7002 -15.7998 18.2998c9.7998 50.6006 59.5 87.1006 114.9 75.5c36.2002 -7.59961 65.5 -36.8994 73.0996 -73.0996c13 -61.9004 -34.0996 -116.7 -93.7998 -116.7h-336zM400 192c70.5 0 126 -65.5 108.9 -138.7
+c-9.60059 -41.0996 -43.5 -74 -84.7002 -82.7002c-58.9004 -12.5 -111.601 21.7002 -129.4 72.3008c-3.7002 10.2998 4.40039 21.0996 15.2998 21.0996h33.8008c5.7998 0 11.5996 -2.59961 14.5 -7.59961c8.2998 -14.5 23.6992 -24.4004 41.5996 -24.4004
+c26.5 0 48 21.5 48 48s-21.5 48 -48 48h-116.5c-6.59961 25.2002 -20.5 47.4004 -39.7998 64h156.3z" />
+ <glyph glyph-name="wine-bottle" unicode="&#xf72f;"
+d="M507.31 375.43c6.25 -6.25 6.25 -16.3799 0.0107422 -22.6201l-22.6299 -22.6299c-6.25 -6.25 -16.3809 -6.25 -22.6309 0l-76.6699 -76.6699c19.7002 -46.5801 10.7305 -102.41 -27.2295 -140.37l-158.391 -158.39c-24.9893 -24.9902 -65.5195 -24.9902 -90.5098 0
+l-90.5098 90.5098c-24.9902 24.9902 -24.9902 65.5205 0 90.5098l158.38 158.381c37.9697 37.96 93.79 46.9297 140.37 27.2295l76.6699 76.6699c-6.25 6.25 -6.25 16.3799 0 22.6299l22.6299 22.6299c6.25 6.25 16.3799 6.25 22.6299 0zM179.22 24.71l122.04 122.04
+l-90.5098 90.5098l-122.04 -122.04z" />
+ <glyph glyph-name="cloud-meatball" unicode="&#xf73b;"
+d="M48 96c26.5 0 48 -21.5 48 -48s-21.5 -48 -48 -48s-48 21.5 -48 48s21.5 48 48 48zM464 96c26.5 0 48 -21.5 48 -48s-21.5 -48 -48 -48s-48 21.5 -48 48s21.5 48 48 48zM345 84.9004c13.5 -7 23 -20.7002 23 -36.9004s-9.5 -29.9004 -23 -36.9004
+c4.59961 -14.5 1.7002 -30.7998 -9.7998 -42.2998c-8.2002 -8.2002 -18.9004 -12.2998 -29.7002 -12.2998c-4.2998 0 -8.5 1.2002 -12.5996 2.5c-7 -13.5 -20.7002 -23 -36.9004 -23s-29.9004 9.5 -36.9004 23c-4.09961 -1.40039 -8.2998 -2.5 -12.5996 -2.5
+c-10.7998 0 -21.5 4.09961 -29.7002 12.2998c-11.5 11.5 -14.5 27.7998 -9.89941 42.2998c-13.5 7 -23 20.7002 -23 36.9004s9.5 29.9004 23 36.9004c-4.5 14.5 -1.60059 30.7998 9.89941 42.2998s27.7998 14.5 42.2998 9.89941c7 13.5 20.7002 23 36.9004 23
+s29.9004 -9.5 36.9004 -23c14.5 4.5 30.7998 1.60059 42.2998 -9.89941c11.3994 -11.5 14.3994 -27.7998 9.7998 -42.2998zM512 224c0 -53 -43 -96 -96 -96h-43.4004c-3.5 8 -8.39941 15.4004 -14.7998 21.7998c-13.5 13.5 -31.5 21.1006 -50.7998 21.2998
+c-13.5 13.2002 -31.7002 20.9004 -51 20.9004s-37.5 -7.7002 -51 -20.9004c-19.2998 -0.199219 -37.2998 -7.7998 -50.7998 -21.2998c-6.40039 -6.39941 -11.2002 -13.7998 -14.7998 -21.7998h-43.4004c-53 0 -96 43 -96 96c0 42.5 27.7998 78.2002 66.0996 90.7998
+c-1.2998 6.90039 -2.09961 13.9004 -2.09961 21.2002c0 61.9004 50.0996 112 112 112c43.2998 0 80.4004 -24.7998 99 -60.7998c14.7002 17.5 36.4004 28.7998 61 28.7998c44.2002 0 80 -35.7998 80 -80c0 -5.59961 -0.5 -11 -1.59961 -16.2002
+c0.5 0 1 0.200195 1.59961 0.200195c53 0 96 -43 96 -96z" />
+ <glyph glyph-name="cloud-moon-rain" unicode="&#xf73c;" horiz-adv-x="575"
+d="M350.5 222.5c37.2998 -6.7998 65.5 -39.2998 65.5 -78.5c0 -44.2002 -35.7998 -80 -80 -80h-256c-44.2002 0 -80 35.7998 -80 80c0 38.7998 27.5996 71.0996 64.2002 78.4004c0 0.5 -0.200195 1.09961 -0.200195 1.59961c0 53 43 96 96 96
+c32.0996 0 60.2998 -15.9004 77.7002 -40c10.3994 5 22 8 34.2998 8c39.2002 0 71.5996 -28.2998 78.5 -65.5zM567.9 224.2c6.19922 1.2002 10.5996 -6 6.39941 -10.7998c-27 -33.1006 -67.8994 -53.3008 -112.6 -53.3008c-5.2002 0 -10.1006 1 -15.2002 1.5
+c-6.2002 39.4004 -33.0996 72.5 -70.2002 86.8008c-10.7002 27.8994 -32.2002 49.7998 -58.8994 61.6992c3.2998 76.7002 66.5 137.9 144.399 137.9c8.90039 0 17.7998 -0.799805 26.5 -2.40039c6.2002 -1.09961 7.60059 -9.39941 2.10059 -12.5
+c-35.6006 -20.0996 -57.5 -57.5 -57.5 -98.0996c0 -70.5 64.5996 -124.1 135 -110.8zM364.5 29.9004c7.7002 -4.40039 10.2998 -14.1006 6 -21.8008l-36.5996 -64c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961
+c-7.69922 4.40039 -10.3994 14.1006 -6 21.8008l36.6006 64c4.39941 7.7998 14.2002 10.2998 21.7998 6zM268.5 29.9004c7.7002 -4.40039 10.2998 -14.1006 6 -21.8008l-36.5996 -64c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961
+c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961c-7.69922 4.40039 -10.3994 14.1006 -6 21.8008l36.6006 64c4.39941 7.7998 14.2002 10.2998 21.7998 6zM172.5 29.9004c7.7002 -4.40039 10.2998 -14.1006 6 -21.8008l-36.5996 -64
+c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961c-7.69922 4.40039 -10.3994 14.1006 -6 21.8008l36.6006 64c4.39941 7.7998 14.2002 10.2998 21.7998 6zM76.5 29.9004c7.7002 -4.40039 10.2998 -14.1006 6 -21.8008
+l-36.5996 -64c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961c-7.69922 4.40039 -10.3994 14.1006 -6 21.8008l36.6006 64c4.39941 7.7998 14.2002 10.2998 21.7998 6z" />
+ <glyph glyph-name="cloud-rain" unicode="&#xf73d;"
+d="M416 320c53 0 96 -43 96 -96s-43 -96 -96 -96h-320c-53 0 -96 43 -96 96c0 42.5 27.7998 78.2002 66.0996 90.7998c-1.2998 6.90039 -2.09961 13.9004 -2.09961 21.2002c0 61.9004 50.0996 112 112 112c43.2998 0 80.4004 -24.7998 99 -60.7998
+c14.7002 17.5 36.4004 28.7998 61 28.7998c44.2002 0 80 -35.7998 80 -80c0 -5.59961 -0.5 -11 -1.59961 -16.2002c0.5 0 1 0.200195 1.59961 0.200195zM88 73.7998c2.5 8.5 13.7998 8.10059 16 0c12.7998 -44.5996 40 -56.2998 40 -87.7002
+c0 -27.6992 -21.5 -50.0996 -48 -50.0996s-48 22.4004 -48 50.0996c0 31.3008 27.2002 43.3008 40 87.7002zM248 73.7998c2.5 8.5 13.7998 8.10059 16 0c12.7998 -44.5996 40 -56.2998 40 -87.7002c0 -27.6992 -21.5 -50.0996 -48 -50.0996s-48 22.4004 -48 50.0996
+c0 31.3008 27.2002 43.3008 40 87.7002zM408 73.7998c2.5 8.5 13.7998 8.10059 16 0c12.7998 -44.5996 40 -56.2998 40 -87.7002c0 -27.6992 -21.5 -50.0996 -48 -50.0996s-48 22.4004 -48 50.0996c0 31.3008 27.2002 43.3008 40 87.7002z" />
+ <glyph glyph-name="cloud-showers-heavy" unicode="&#xf740;"
+d="M183.9 77.9004c7.69922 -4.40039 10.3994 -14.2002 6 -21.8008l-64 -112c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961c-7.69922 4.30078 -10.3994 14.1006 -6 21.8008l64 112
+c4.40039 7.7998 14.2002 10.3994 21.8008 6zM279.9 77.9004c7.69922 -4.40039 10.3994 -14.2002 6 -21.8008l-64 -112c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961c-7.69922 4.30078 -10.3994 14.1006 -6 21.8008
+l64 112c4.40039 7.7998 14.2002 10.3994 21.8008 6zM87.9004 77.9004c7.69922 -4.40039 10.3994 -14.2002 6 -21.8008l-64 -112c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961
+c-7.69922 4.30078 -10.3994 14.1006 -6 21.8008l64 112c4.40039 7.7998 14.2002 10.3994 21.8008 6zM471.9 77.9004c7.69922 -4.40039 10.3994 -14.2002 6 -21.8008l-64 -112c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961
+c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961c-7.69922 4.30078 -10.3994 14.1006 -6 21.8008l64 112c4.40039 7.7998 14.2002 10.3994 21.8008 6zM375.9 77.9004c7.69922 -4.40039 10.3994 -14.2002 6 -21.8008l-64 -112
+c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961c-7.69922 4.30078 -10.3994 14.1006 -6 21.8008l64 112c4.40039 7.7998 14.2002 10.3994 21.8008 6zM416 320c53 0 96 -43 96 -96s-43 -96 -96 -96h-320
+c-53 0 -96 43 -96 96c0 42.5 27.7998 78.2002 66.0996 90.7998c-1.2998 6.90039 -2.09961 13.9004 -2.09961 21.2002c0 61.9004 50.2002 112 112 112c43.2998 0 80.4004 -24.7998 99 -60.7998c14.7002 17.5 36.4004 28.7998 61 28.7998c44.2002 0 80 -35.7998 80 -80
+c0 -5.59961 -0.5 -11 -1.59961 -16.2002c0.5 0 1 0.200195 1.59961 0.200195z" />
+ <glyph glyph-name="cloud-sun-rain" unicode="&#xf743;" horiz-adv-x="576"
+d="M510.5 222.5c37.2998 -6.7998 65.5 -39.2998 65.5 -78.5c0 -44.2002 -35.7998 -80 -80 -80h-256c-44.2002 0 -80 35.7998 -80 80c0 38.7998 27.5996 71 64.2002 78.4004c0 0.5 -0.200195 1.09961 -0.200195 1.59961c0 53 43 96 96 96
+c32.0996 0 60.2998 -15.9004 77.7002 -40c10.3994 5 22 8 34.2998 8c39.2002 0 71.5996 -28.2998 78.5 -65.5zM124.1 188.1c3.10059 -3.09961 6.60059 -5.59961 10.2002 -8.2998c-3.89941 -11.3994 -6.2002 -23.3994 -6.2002 -35.8994c0 -2.5 0.5 -4.80078 0.700195 -7.2002
+l-59.2002 -19.7002c-10.1992 -3.2998 -19.8994 6.2998 -16.5 16.5l25.1006 75.2998l-71 35.5c-9.60059 4.7998 -9.60059 18.5 0 23.2998l71 35.5l-25.1006 75.3008c-3.39941 10.1992 6.2002 19.8994 16.4004 16.5l75.2998 -25.1006l35.5 71
+c4.7998 9.60059 18.5 9.60059 23.2998 0l35.5 -71l75.3008 25.1006c10.1992 3.39941 19.8994 -6.2002 16.5 -16.4004l-8.90039 -26.7002c-0.700195 0 -1.2998 0.200195 -2 0.200195c-25.5996 0 -49.2002 -7.7998 -69.2002 -20.7002
+c-37.5996 29.4004 -92.0996 27.2002 -126.7 -7.39941c-37.3994 -37.5 -37.3994 -98.4004 0 -135.801zM193.9 246.1c-18.2002 -8.2998 -33.5 -21.2998 -44.8008 -37.1992c-12.8994 11.6992 -21.0996 28.3994 -21.0996 47.0996c0 35.2998 28.7002 64 64 64
+c12.4004 0 24 -3.7002 33.7998 -9.90039c-16.0996 -17.5996 -27.5996 -39.5 -31.8994 -64zM524.5 29.9004c7.7002 -4.40039 10.2998 -14.1006 6 -21.8008l-36.5996 -64c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961
+c-7.69922 4.40039 -10.3994 14.1006 -6 21.8008l36.6006 64c4.39941 7.7998 14.2002 10.2998 21.7998 6zM428.5 29.9004c7.7002 -4.40039 10.2998 -14.1006 6 -21.8008l-36.5996 -64c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961
+c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961c-7.69922 4.40039 -10.3994 14.1006 -6 21.8008l36.6006 64c4.39941 7.7998 14.2002 10.2998 21.7998 6zM332.5 29.9004c7.7002 -4.40039 10.2998 -14.1006 6 -21.8008l-36.5996 -64
+c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961c-7.69922 4.40039 -10.3994 14.1006 -6 21.8008l36.6006 64c4.39941 7.7998 14.2002 10.2998 21.7998 6zM236.5 29.9004c7.7002 -4.40039 10.2998 -14.1006 6 -21.8008
+l-36.5996 -64c-3 -5.19922 -8.40039 -8.09961 -13.9004 -8.09961c-2.7002 0 -5.40039 0.700195 -7.90039 2.09961c-7.69922 4.40039 -10.3994 14.1006 -6 21.8008l36.6006 64c4.39941 7.7998 14.2002 10.2998 21.7998 6z" />
+ <glyph glyph-name="democrat" unicode="&#xf747;" horiz-adv-x="639"
+d="M637.3 191.1c4.90039 -7.39941 2.90039 -17.2998 -4.39941 -22.2998l-26.6006 -17.7002c-7.39941 -4.89941 -17.2998 -3 -22.2002 4.40039c-21.7998 32.7002 -23.8994 38.0996 -40.0996 50.2998v-77.7998h-352l-54 108l-38.0996 -34.5996
+c-6 -6 -14.1006 -9.40039 -22.6006 -9.40039h-31c-12.0996 0 -23.2002 6.90039 -28.5996 17.7002l-14.2998 28.5996c-5.40039 10.7998 -4.30078 23.7998 3 33.5l74.5996 99.2998c1.7002 2.30078 4.2002 3.7002 6.40039 5.40039c-4 2.2002 -8 4.2002 -11.3008 7.5
+c-16.3994 16.4004 -18.3994 41.7998 -6.09961 60.4004c2.7998 4.19922 8.7998 4.7998 12.4004 1.19922l42.8994 -42.7998l41.7998 41.7998c4.40039 4.40039 11.8008 3.7002 15.2002 -1.5c15.1006 -22.7998 12.6006 -53.7998 -7.5 -73.8994l81.2002 -81.2002h235.6
+c50.8008 0 97.9004 -25.2002 126.101 -67.5zM296.2 204.7c3 2.89941 1.39941 7.89941 -2.60059 8.5l-22.7998 3.2998l-10.2002 20.7002c-1.89941 3.7002 -7.19922 3.7002 -9 0l-10.1992 -20.7002l-22.8008 -3.2998c-4.09961 -0.600586 -5.7998 -5.60059 -2.7998 -8.5
+l16.5 -16.1006l-3.89941 -22.6992c-0.700195 -4.10059 3.59961 -7.2002 7.19922 -5.30078l20.4004 10.7002l20.4004 -10.7002c3.59961 -1.89941 7.89941 1.2002 7.19922 5.30078l-3.89941 22.6992zM408.2 204.7c3 2.89941 1.39941 7.89941 -2.60059 8.5l-22.7998 3.2998
+l-10.2002 20.7002c-1.89941 3.7002 -7.19922 3.7002 -9 0l-10.1992 -20.7002l-22.8008 -3.2998c-4.09961 -0.600586 -5.7998 -5.60059 -2.7998 -8.5l16.5 -16.1006l-3.89941 -22.6992c-0.700195 -4.10059 3.59961 -7.2002 7.19922 -5.30078l20.4004 10.7002
+l20.4004 -10.7002c3.59961 -1.89941 7.89941 1.2002 7.19922 5.30078l-3.89941 22.6992zM520.2 204.7c3 2.89941 1.39941 7.89941 -2.60059 8.5l-22.7998 3.2998l-10.2002 20.7002c-1.89941 3.7002 -7.19922 3.7002 -9 0l-10.1992 -20.7002l-22.8008 -3.2998
+c-4.09961 -0.600586 -5.7998 -5.60059 -2.7998 -8.5l16.5 -16.1006l-3.89941 -22.6992c-0.700195 -4.10059 3.59961 -7.2002 7.19922 -5.30078l20.4004 10.7002l20.4004 -10.7002c3.59961 -1.89941 7.89941 1.2002 7.19922 5.30078l-3.89941 22.6992zM192 -48v144h352v-144
+c0 -8.7998 -7.2002 -16 -16 -16h-64c-8.7998 0 -16 7.2002 -16 16v80h-160v-80c0 -8.7998 -7.2002 -16 -16 -16h-64c-8.7998 0 -16 7.2002 -16 16z" />
+ <glyph glyph-name="flag-usa" unicode="&#xf74d;"
+d="M32 448c17.7002 0 32 -14.2998 32 -32v-464c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v464c0 17.7002 14.2998 32 32 32zM299.9 144.4c31.0996 -8.2002 62.5996 -16.5 100 -16.6006c31.8994 0 68.5 6.7998 112.1 24.1006v-36
+c0 -12.3008 -7.09961 -23.8008 -18.5 -28.8008c-175.8 -76.3994 -211.8 69.1006 -397.5 -23.0996v69.2998c92.2002 39.9004 146.7 26.2002 203.9 11.1006zM299.9 240.4c31.0996 -8.2002 62.5996 -16.5 100 -16.5c31.8994 0 68.5 6.7998 112.1 24.0996v-61.5
+c-92.2002 -39.9004 -146.7 -26.2002 -203.9 -11.0996c-57.5 15.0996 -117.3 30 -212.1 -7.60059v61.5c92.2002 39.9004 146.7 26.2002 203.9 11.1006zM309.4 366.3c-7 2.10059 -14.1006 4.2002 -21.3008 6.2002v-33.2002c4 -1.09961 8.10059 -2.2998 12.1006 -3.5
+c26.7002 -8.09961 52.8994 -15.8994 85.7002 -15.8994c32.7998 0 72.5 7.89941 126.1 31.3994v-68.8994c-92.2002 -39.8008 -146.7 -26.1006 -203.9 -11.1006c-57.5 15.1006 -117.3 29.9004 -212.1 -7.59961v151.899c209.4 94.6006 195.3 -59.0996 366.6 28.2002
+c22.6006 11.5 49.4004 -1.5 49.4004 -26.5996v-30.7998c-105.2 -49.1006 -150.8 -35.7002 -202.6 -20.1006zM160 319.9c8.7998 0 16 7.09961 16 16c0 8.89941 -7.2002 16 -16 16s-16 -7.2002 -16 -16c0 -8.90039 7.2002 -16 16 -16zM160 375.7c8.7998 0 16 7.2002 16 16
+c0 8.89941 -7.2002 16 -16 16s-16 -7.2002 -16 -16c0 -8.90039 7.2002 -16 16 -16zM224 327.8c8.7998 0 16 7.2002 16 16c0 8.90039 -7.2002 16 -16 16s-16 -7.2002 -16 -16c0 -8.89941 7.2002 -16 16 -16zM224 383.7c8.7998 0 16 7.2002 16 16c0 8.89941 -7.2002 16 -16 16
+s-16 -7.2002 -16 -16c0 -8.90039 7.2002 -16 16 -16z" />
+ <glyph glyph-name="meteor" unicode="&#xf753;"
+d="M491.2 447.3c12.3994 3.7002 23.7998 -7.7002 20.2002 -20.0996c-11.6006 -38.7002 -34.3008 -111.7 -61.3008 -187.7c7 -2.09961 13.4004 -4 18.6006 -5.59961c9.7002 -3 14.2002 -13.9004 9.5 -22.9004c-22.1006 -42.2998 -82.7002 -152.8 -142.5 -214.4
+c-1 -1.09961 -2 -2.5 -3 -3.5c-38.1006 -38.0996 -88 -57.0996 -137.9 -57.0996c-49.8994 -0.0996094 -99.7998 19 -137.8 57c-38 38.0996 -57 88 -57 137.8c0 49.9004 19 99.7998 57.0996 137.8c1 1 2.40039 2 3.5 3c61.6006 59.9004 172 120.4 214.4 142.5
+c9 4.7002 19.9004 0.200195 22.9004 -9.5c1.59961 -5.09961 3.5 -11.5996 5.59961 -18.5996c75.9004 27 149 49.7002 187.7 61.2998zM192 0c70.7002 0 128 57.2998 128 128s-57.2998 128 -128 128s-128 -57.2998 -128 -128s57.2998 -128 128 -128zM160 192
+c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32s-32 14.2998 -32 32s14.2998 32 32 32zM208 96c8.7998 0 16 -7.2002 16 -16s-7.2002 -16 -16 -16s-16 7.2002 -16 16s7.2002 16 16 16z" />
+ <glyph glyph-name="person-booth" unicode="&#xf756;" horiz-adv-x="576"
+d="M192 -48v176h64v-176c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16zM224 224c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32h-57.5c-12.7998 0 -24.7998 5 -33.9004 14.0996l-20.8994 20.9004v-80.5996l41.2002 -61.3008
+c4.39941 -8.7998 6.69922 -18.6992 6.69922 -28.5996v-56.5c0 -17.7002 -14.2998 -32 -32 -32c-17.6992 0 -32 14.2998 -32 32v56l-29.0996 43c-0.900391 0.400391 -1.59961 1.2002 -2.5 1.7002l-0.0996094 -100.7c0 -17.7002 -14.4004 -32 -32 -32
+c-17.6006 0 -31.9004 14.2998 -31.9004 32l0.200195 160l-0.200195 95.9004c0 17.0996 6.7002 33.1992 18.7002 45.2998c12.0996 12.0996 28.2002 18.7998 45.2998 18.7998h18.7002c17 0 33.0996 -6.59961 45.2002 -18.7002l45.1992 -45.2998h50.9004zM64 320
+c-26.5 0 -48 21.5 -48 48s21.5 48 48 48s48 -21.5 48 -48s-21.5 -48 -48 -48zM288 416v32h192v-416c0 -17.7002 -14.2998 -32 -32 -32s-32 14.2998 -32 32c0 -17.7002 -14.2998 -32 -32 -32c-17.0996 0 -30.7998 13.5 -31.7002 30.4004
+c-4.2998 -21.3008 -17.0996 -30.4004 -32.2998 -30.4004c-18.4004 0 -35.7002 16.7002 -31.4004 38.2998l30.9004 154.601zM192 416c0 17.7002 14.2998 32 32 32h32v-192h-64v160zM544 448c17.7002 0 32 -14.2998 32 -32v-464c0 -8.7998 -7.2002 -16 -16 -16h-32
+c-8.7998 0 -16 7.2002 -16 16v496h32z" />
+ <glyph glyph-name="poo-storm" unicode="&#xf75a;" horiz-adv-x="447"
+d="M308 112c9.2002 0 15 -10 10.4004 -18l-88 -152c-2.2002 -3.7998 -6.2002 -6 -10.4004 -6c-7.7002 0 -13.5 7.2002 -11.7002 14.7998l23 97.2002h-59.2998c-7.2998 0 -12.9004 6.40039 -11.9004 13.5996l16 120c0.800781 5.90039 5.90039 10.4004 11.9004 10.4004h68
+c7.90039 0 13.5996 -7.5 11.5996 -15.0996l-17.2998 -64.9004h57.7002zM374.4 223.3c41 -3.2998 73.5996 -37.5 73.5 -79.2998c0 -44 -36 -80 -80 -80h-30l8.09961 14c7.7998 13.5 7.7998 30.4004 0 44s-22.4004 22 -38.0996 22h-16l6.59961 24.7002
+c3.59961 13.2998 0.799805 27.2002 -7.59961 38.0996c-8.40039 10.9004 -21.1006 17.2002 -34.9004 17.2002h-68c-22 0 -40.7002 -16.4004 -43.7002 -38.2002l-16 -120c0 -0.599609 0.100586 -1.2002 0 -1.7998h-48.2998c-44 0 -80 36 -80 80
+c0 41.7998 32.5996 76 73.5996 79.2998c-5.89941 9.60059 -9.59961 20.6006 -9.59961 32.7002c0 35.2998 28.7002 64 64 64h16c44.2002 0 80 35.7998 80 80c0 17.4004 -5.7002 33.4004 -15.0996 46.5c4.89941 0.799805 9.89941 1.5 15.0996 1.5c53 0 96 -43 96 -96
+c0 -11.2998 -2.2998 -21.9004 -5.90039 -32h5.90039c35.2998 0 64 -28.7002 64 -64c0 -12.0996 -3.7002 -23.0996 -9.59961 -32.7002z" />
+ <glyph glyph-name="rainbow" unicode="&#xf75b;" horiz-adv-x="576"
+d="M268.3 415.3c167.7 11.2998 307.7 -122 307.7 -287.3v-144c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v136.2c0 113.8 -81.5996 215.399 -194.5 229.899c-136.6 17.6006 -253.5 -88.8994 -253.5 -222.1v-144c0 -8.7998 -7.2002 -16 -16 -16h-32
+c-8.7998 0 -16 7.2002 -16 16v133.8c0 153.3 115.4 287.3 268.3 297.5zM262.7 318.4c117.1 15 217.3 -76.2002 217.3 -190.4v-144c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v138.3c0 66.9004 -48.7002 126.601 -115.2 133.101
+c-76.2998 7.39941 -140.8 -52.6006 -140.8 -127.4v-144c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v137.3c0 97.6006 70 184.7 166.7 197.101zM268.9 222.1c61.2998 11.9004 115.1 -34.8994 115.1 -94.0996v-144c0 -8.7998 -7.2002 -16 -16 -16h-32
+c-8.7998 0 -16 7.2002 -16 16v144c0 17.5996 -14.2998 32 -32 32s-32 -14.4004 -32 -32v-144c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v140.2c0 46.3994 31.2998 89 76.9004 97.8994z" />
+ <glyph glyph-name="republican" unicode="&#xf75e;" horiz-adv-x="640"
+d="M544 256v-64h-544v64c0 88.4004 71.5996 160 160 160h224c88.4004 0 160 -71.5996 160 -160zM176.3 277.6c3.5 3.5 1.60059 9.5 -3.39941 10.2002l-27.4004 4l-12.2002 24.7998c-2.09961 4.40039 -8.5 4.5 -10.7002 0l-12.1992 -24.7998l-27.4004 -4
+c-4.90039 -0.700195 -6.90039 -6.7002 -3.2998 -10.2002l19.7998 -19.2998l-4.7002 -27.2998c-0.799805 -4.90039 4.40039 -8.59961 8.7002 -6.2998l24.5 12.8994l24.5 -12.8994c4.40039 -2.2998 9.5 1.39941 8.7002 6.2998l-4.7002 27.2998zM320.3 277.6
+c3.5 3.5 1.60059 9.5 -3.39941 10.2002l-27.4004 4l-12.2002 24.7998c-2.09961 4.40039 -8.5 4.5 -10.7002 0l-12.1992 -24.7998l-27.4004 -4c-4.90039 -0.700195 -6.90039 -6.7002 -3.2998 -10.2002l19.7998 -19.2998l-4.7002 -27.2998
+c-0.799805 -4.90039 4.40039 -8.59961 8.7002 -6.2998l24.5 12.8994l24.5 -12.8994c4.40039 -2.2998 9.5 1.39941 8.7002 6.2998l-4.7002 27.2998zM464.3 277.6c3.5 3.5 1.60059 9.5 -3.39941 10.2002l-27.4004 4l-12.2002 24.7998c-2.09961 4.40039 -8.5 4.5 -10.7002 0
+l-12.1992 -24.7998l-27.4004 -4c-4.90039 -0.700195 -6.90039 -6.7002 -3.2998 -10.2002l19.7998 -19.2998l-4.7002 -27.2998c-0.799805 -4.90039 4.40039 -8.59961 8.7002 -6.2998l24.5 12.8994l24.5 -12.8994c4.40039 -2.2998 9.5 1.39941 8.7002 6.2998l-4.7002 27.2998z
+M624 128c8.7998 0 16 -7.2002 16 -16v-64c0 -46.9004 -40.5996 -84.5 -88.4004 -79.5996c-41.5996 4.19922 -71.5996 42.5 -71.5996 84.2998v43.2998h-32v-112c0 -8.7998 -7.2002 -16 -16 -16h-96c-8.7998 0 -16 7.2002 -16 16v80h-192v-80c0 -8.7998 -7.2002 -16 -16 -16
+h-96c-8.7998 0 -16 7.2002 -16 16v176h544v-112c0 -8.7998 7.2002 -16 16 -16s16 7.2002 16 16v64c0 8.7998 7.2002 16 16 16h32z" />
+ <glyph glyph-name="smog" unicode="&#xf75f;" horiz-adv-x="640"
+d="M624 80c8.7998 0 16 -7.2002 16 -16v-16c0 -8.7998 -7.2002 -16 -16 -16h-544c-8.7998 0 -16 7.2002 -16 16v16c0 8.7998 7.2002 16 16 16h544zM144 -16c8.7998 0 16 -7.2002 16 -16v-16c0 -8.7998 -7.2002 -16 -16 -16h-128c-8.7998 0 -16 7.2002 -16 16v16
+c0 8.7998 7.2002 16 16 16h128zM560 -16c8.7998 0 16 -7.2002 16 -16v-16c0 -8.7998 -7.2002 -16 -16 -16h-336c-8.7998 0 -16 7.2002 -16 16v16c0 8.7998 7.2002 16 16 16h336zM144 160c-79.5 0 -144 64.5 -144 144s64.5 144 144 144c41 0 77.7998 -17.2998 104 -44.7998
+c26.2002 27.5 63 44.7998 104 44.7998c54.7998 0 102 -31 126.3 -76.0996c15 7.5 31.7002 12.0996 49.7002 12.0996c61.9004 0 112 -50.0996 112 -112s-50.0996 -112 -112 -112h-60.0996c-22.6006 -19.7002 -51.6006 -32 -83.9004 -32s-61.4004 12.2998 -83.9004 32h-156.1z
+" />
+ <glyph glyph-name="temperature-high" unicode="&#xf769;"
+d="M416 448c52.9004 0 96 -43.0996 96 -96s-43.0996 -96 -96 -96s-96 43.0996 -96 96s43.0996 96 96 96zM416 320c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM256 336v-166.5c19.7002 -24.5996 32 -55.5 32 -89.5
+c0 -79.5 -64.5 -144 -144 -144s-144 64.5 -144 144c0 34 12.2998 64.7998 32 89.5v166.5c0 61.9004 50.0996 112 112 112s112 -50.0996 112 -112zM144 0c44.0996 0 80 35.9004 80 80c0 25.5 -12.2002 49 -32 63.7998v192.2c0 26.5 -21.5 48 -48 48s-48 -21.5 -48 -48v-192.2
+c-19.7998 -14.8994 -32 -38.2998 -32 -63.7998c0 -44.0996 35.9004 -80 80 -80zM160 125.1c18.5996 -6.59961 32 -24.1992 32 -45.0996c0 -26.5 -21.5 -48 -48 -48s-48 21.5 -48 48c0 20.9004 13.4004 38.5 32 45.0996v210.9c0 8.7998 7.2002 16 16 16s16 -7.2002 16 -16
+v-210.9z" />
+ <glyph glyph-name="temperature-low" unicode="&#xf76b;"
+d="M416 448c52.9004 0 96 -43.0996 96 -96s-43.0996 -96 -96 -96s-96 43.0996 -96 96s43.0996 96 96 96zM416 320c17.7002 0 32 14.2998 32 32s-14.2998 32 -32 32s-32 -14.2998 -32 -32s14.2998 -32 32 -32zM256 336v-166.5c19.7002 -24.5996 32 -55.5 32 -89.5
+c0 -79.5 -64.5 -144 -144 -144s-144 64.5 -144 144c0 34 12.2998 64.7998 32 89.5v166.5c0 61.9004 50.0996 112 112 112s112 -50.0996 112 -112zM144 0c44.0996 0 80 35.9004 80 80c0 25.5 -12.2002 49 -32 63.7998v192.2c0 26.5 -21.5 48 -48 48s-48 -21.5 -48 -48v-192.2
+c-19.7998 -14.8994 -32 -38.2998 -32 -63.7998c0 -44.0996 35.9004 -80 80 -80zM160 125.1c18.5996 -6.59961 32 -24.1992 32 -45.0996c0 -26.5 -21.5 -48 -48 -48s-48 21.5 -48 48c0 20.9004 13.4004 38.5 32 45.0996v18.9004c0 8.7998 7.2002 16 16 16s16 -7.2002 16 -16
+v-18.9004z" />
+ <glyph glyph-name="vote-yea" unicode="&#xf772;" horiz-adv-x="640"
+d="M608 128c17.7002 0 32 -14.2998 32 -32v-96c0 -17.7002 -14.2998 -32 -32 -32h-576c-17.7002 0 -32 14.2998 -32 32v96c0 17.7002 14.2998 32 32 32h64v-64h-22.4004c-5.2998 0 -9.59961 -3.59961 -9.59961 -8v-16c0 -4.40039 4.2998 -8 9.59961 -8h492.801
+c5.2998 0 9.59961 3.59961 9.59961 8v16c0 4.40039 -4.2998 8 -9.59961 8h-22.4004v64h64zM512 64h-384v319.7c0 17.7998 14.5 32.2998 32.4004 32.2998h319.3c17.7998 0 32.2998 -14.4004 32.2998 -32.2998v-319.7zM211.2 246c-4.2002 -4.2002 -4.2998 -11 0 -15.2002
+l74.0996 -74.7002c4.2002 -4.2998 11 -4.2998 15.2002 -0.0996094l128.3 127.2c4.2998 4.2002 4.2998 11 0.100586 15.2002l-25.3008 25.5c-4.19922 4.2998 -11 4.2998 -15.1992 0.0996094l-95.2002 -94.4004l-41.2998 41.6006
+c-4.2002 4.2998 -11 4.2998 -15.2002 0.0996094z" />
+ <glyph glyph-name="water" unicode="&#xf773;" horiz-adv-x="575"
+d="M562.1 64.0996c8.10059 -0.899414 13.9004 -8.2998 13.8008 -16.2998v-31.5996c0 -9.10059 -7.60059 -16.7998 -16.7002 -16c-28.2002 2.5 -55.7998 11.5996 -79.1006 25.7998c-55 -34.0996 -135.5 -34.5996 -192 0c-55 -34.0996 -135.5 -34.5996 -192 0
+c-23.2998 -14.5 -50.5996 -23.4004 -79.3994 -25.9004c-9.10059 -0.799805 -16.7002 6.90039 -16.7002 16v32.2002c0 7.90039 5.7002 14.9004 13.5996 15.7002c21.7002 2.2998 42.2002 10.2002 57.7002 22.4004c13.7998 10.8994 33.6006 13.1992 47.1006 2
+c38.2998 -31.7002 107.199 -31.8008 145.199 -1.7002c13.7002 10.8994 33.2002 13 46.7002 1.7998c38.5 -31.9004 107.8 -31.9004 145.7 -1.5c14 11.2998 34.0996 11.0996 48.2002 0c15.7998 -12.4004 36.3994 -20.5 57.8994 -22.9004zM562.1 208.1
+c8.10059 -0.899414 13.9004 -8.2998 13.8008 -16.2998v-31.5996c0 -9.10059 -7.60059 -16.7998 -16.7002 -16c-28.2002 2.5 -55.7998 11.5996 -79.1006 25.7998c-55 -34.0996 -135.5 -34.5996 -192 0c-55 -34.0996 -135.5 -34.5996 -192 0
+c-23.2998 -14.5 -50.5996 -23.4004 -79.3994 -25.9004c-9.10059 -0.799805 -16.7002 6.90039 -16.7002 16v32.2002c0 7.90039 5.7002 14.9004 13.5996 15.7002c21.7002 2.2998 42.2002 10.2002 57.7002 22.4004c13.7998 10.8994 33.6006 13.1992 47.1006 2
+c38.2998 -31.7002 107.199 -31.8008 145.199 -1.7002c13.7002 10.8994 33.2002 13 46.7002 1.7998c38.5 -31.9004 107.8 -31.9004 145.7 -1.5c14 11.2998 34.0996 11.0996 48.2002 0c15.7998 -12.4004 36.3994 -20.5 57.8994 -22.9004zM562.1 352.1
+c8.10059 -0.899414 13.9004 -8.2998 13.8008 -16.2998v-31.5996c0 -9.10059 -7.60059 -16.7998 -16.7002 -16c-28.2002 2.5 -55.7998 11.5996 -79.1006 25.7998c-55 -34.0996 -135.5 -34.5996 -192 0c-55 -34.0996 -135.5 -34.5996 -192 0
+c-23.2998 -14.5 -50.5996 -23.4004 -79.3994 -25.9004c-9.10059 -0.799805 -16.7002 6.90039 -16.7002 16v32.2002c0 7.90039 5.7002 14.9004 13.5996 15.7002c21.7002 2.2998 42.2002 10.2002 57.7002 22.4004c13.7998 10.8994 33.6006 13.1992 47.1006 2
+c38.2998 -31.7002 107.199 -31.8008 145.199 -1.7002c13.7002 10.8994 33.2002 13 46.7002 1.7998c38.5 -31.9004 107.8 -31.9004 145.7 -1.5c14 11.2998 34.0996 11.0996 48.2002 0c15.7998 -12.4004 36.3994 -20.4004 57.8994 -22.9004z" />
+ <glyph glyph-name="baby" unicode="&#xf77c;" horiz-adv-x="384"
+d="M192 288c-44.2002 0 -80 35.7998 -80 80s35.7998 80 80 80s80 -35.7998 80 -80s-35.7998 -80 -80 -80zM138.6 39.2002l29.4004 -39.2002c13.2002 -17.7002 9.7002 -42.7998 -8 -56c-7.2002 -5.40039 -15.7002 -8 -24 -8c-12.0996 0 -24.0996 5.5 -32 16l-48 64
+c-10.9004 14.5996 -10.5996 34.7998 0.799805 49l45.9004 57.4004l61.5 -51.2002zM281.3 122.4l45.9004 -57.4004c11.3994 -14.2002 11.7002 -34.4004 0.799805 -49l-48 -64c-7.7998 -10.5 -19.7998 -16 -32 -16c-8.40039 0 -16.7998 2.59961 -24 8
+c-17.7002 13.2002 -21.2002 38.2998 -8 56l29.4004 39.2002l-25.6006 32zM376.7 303c12.7002 -18.0996 8.39941 -43 -9.7002 -55.7998l-40.5996 -28.5c-17 -11.9004 -35.4004 -20.9004 -54.4004 -27.9004v-30.7998h-160v30.9004c-19 7 -37.4004 16 -54.4004 27.8994
+l-40.5996 28.5c-18 12.7002 -22.4004 37.6006 -9.7002 55.7002c12.7002 18 37.6006 22.4004 55.7002 9.7002l40.5996 -28.4004c52.6006 -37 124.101 -37 176.801 0l40.5996 28.5c18.0996 12.6006 43 8.2998 55.7002 -9.7998z" />
+ <glyph glyph-name="baby-carriage" unicode="&#xf77d;"
+d="M144.8 431l111.2 -175h-256c0 74 35.2998 140.1 90.7998 184.4c16.7998 13.3994 42.7002 8.39941 54 -9.40039zM496 352c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-48v-64c0 -50.5996 -23 -96.4004 -60.2998 -130.7
+c34.5996 -8.89941 60.2998 -40 60.2998 -77.2998c0 -44.2002 -35.7998 -80 -80 -80s-80 35.7998 -80 80c0 8.90039 1.7002 17.2002 4.40039 25.2002c-21.5 -5.90039 -44.6006 -9.2002 -68.4004 -9.2002s-46.7998 3.2998 -68.4004 9.2002
+c2.60059 -8 4.40039 -16.2998 4.40039 -25.2002c0 -44.2002 -35.7998 -80 -80 -80s-80 35.7998 -80 80c0 37.2998 25.7002 68.4004 60.2998 77.2998c-37.2998 34.2998 -60.2998 80.1006 -60.2998 130.7h384v64c0 35.2998 28.7002 64 64 64h48zM80 -16
+c17.5996 0 32 14.4004 32 32s-14.4004 32 -32 32s-32 -14.4004 -32 -32s14.4004 -32 32 -32zM400 16c0 17.5996 -14.4004 32 -32 32s-32 -14.4004 -32 -32s14.4004 -32 32 -32s32 14.4004 32 32z" />
+ <glyph glyph-name="biohazard" unicode="&#xf780;" horiz-adv-x="575"
+d="M287.9 336c-18.5 0 -36 -3.7998 -52.5 -9.5c-13.3008 10.2998 -23.6006 24.2998 -29.5 40.7002c25.1992 10.8994 53 16.8994 82.0996 16.8994c29.2002 0 57 -6.09961 82.2002 -17c-5.90039 -16.3994 -16.2002 -30.3994 -29.5 -40.6992
+c-16.6006 5.7998 -34.2002 9.59961 -52.7998 9.59961zM163.6 9.2998c-47.5 35.5 -79.1992 90.7002 -83.2998 153.5c7.2998 2.10059 14.9004 3.10059 22.5 3.10059c9.2002 0 17.9004 -1.80078 26.4004 -4.60059c4.09961 -44.2998 26 -83.2002 58.8994 -109.6
+c-4.09961 -16 -12.5 -30.6006 -24.5 -42.4004zM387.8 51.9004c32.7002 26.3994 54.6006 65.0996 58.7002 109.3c8.59961 2.7998 17.4004 4.7002 26.5996 4.7002c7.5 0 15 -1 22.2002 -3c-3.89941 -62.8008 -35.5996 -118 -83 -153.5c-12 11.7998 -20.3994 26.5 -24.5 42.5z
+M501.3 256.9c34.6006 -20.4004 61 -53.3008 74.1006 -92.4004c1.2998 -3.7002 -0.200195 -7.7998 -3.5 -9.7998c-3.30078 -2 -7.5 -1.2998 -10 1.59961c-9.40039 10.7998 -19 19 -29.2002 25.1006c-57.2998 33.8994 -130.8 13.6992 -163.9 -45
+c-33.0996 -58.7002 -13.3994 -134 43.9004 -167.9c10.2002 -6.09961 21.8994 -10.5 35.7998 -13.4004c3.7998 -0.799805 6.40039 -4.19922 6.40039 -8.09961c-0.100586 -4 -2.7002 -7.2998 -6.5 -8c-39.7002 -7.7998 -80.6006 -0.799805 -115.2 19.7002
+c-18 10.5996 -32.9004 24.5 -45.2998 40.0996c-12.4004 -15.5996 -27.3008 -29.5 -45.3008 -40.0996c-34.5996 -20.5 -75.5 -27.5 -115.199 -19.7002c-3.80078 0.700195 -6.40039 4 -6.5 8c0 3.90039 2.69922 7.2998 6.39941 8.09961
+c13.7998 3 25.6006 7.30078 35.7998 13.4004c57.3008 33.9004 77 109.2 43.9004 167.9c-33.0996 58.6992 -106.6 78.8994 -163.9 45c-10.1992 -6 -19.7998 -14.3008 -29.1992 -25.1006c-2.5 -2.89941 -6.7002 -3.59961 -10 -1.59961
+c-3.30078 2.09961 -4.80078 6.09961 -3.5 9.7998c13.2998 39.0996 39.6992 71.9004 74.2998 92.4004c17.5996 10.3994 36.3994 16.5996 55.2998 19.8994c-6.09961 17.7002 -10 36.4004 -10 56.2002c0 41 14.5996 80.7998 41 112.2c2.5 3 6.59961 3.7002 10 1.7998
+c3.2998 -1.90039 4.7998 -6 3.59961 -9.7002c-4.39941 -13.7998 -6.59961 -26.3994 -6.59961 -38.5c0 -67.7998 53.7998 -122.899 120 -122.899s120 55.0996 120 122.899c0 12.2002 -2.09961 24.7002 -6.59961 38.5c-1.2002 3.7002 0.299805 7.7998 3.59961 9.7002
+c3.40039 1.90039 7.5 1.2002 10 -1.7998c26.5 -31.4004 41 -71.2002 41 -112.2c0 -19.7998 -4 -38.5 -10 -56.2002c19 -3.2998 37.7002 -9.5 55.2998 -19.8994zM287.9 127.9c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48z" />
+ <glyph glyph-name="blog" unicode="&#xf781;" horiz-adv-x="511"
+d="M172.2 221.2c75.5 -15 129.899 -89.2998 112.5 -172.2c-11.4004 -54.2998 -55.2998 -98.2998 -109.7 -109.7c-92.9004 -19.5 -175 51.2002 -175 140.7v248c0 13.2998 10.7002 24 24 24h48c13.2998 0 24 -10.7002 24 -24v-248c0 -26.5 21.5 -48 48 -48s48 21.5 48 48
+c0 20.5996 -13.0996 38.2002 -31.2998 45c-9.60059 3.59961 -16.7002 11.7998 -16.7002 22v50.4004c0 14.8994 13.5996 26.6992 28.2002 23.7998zM209 448c163.2 -8.59961 294.4 -139.8 302.9 -303c0.5 -9.2002 -6.80078 -17 -16 -17h-32.1006
+c-8.39941 0 -15.3994 6.59961 -15.8994 15c-7.5 129.5 -111.5 234.5 -240.9 241.5c-8.40039 0.400391 -15 7.40039 -15 15.9004v31.5996c0 9.2002 7.7998 16.5 17 16zM209.3 352c110.101 -8.5 198.2 -96.5996 206.601 -206.7
+c0.699219 -9.2998 -6.80078 -17.2998 -16.1006 -17.2998h-32.2002c-8.2998 0 -15.0996 6.40039 -15.8994 14.7002c-6.90039 77 -68.1006 138.899 -144.9 145.2c-8.2998 0.599609 -14.7998 7.5 -14.7998 15.8994v32.1006c0 9.39941 8 16.7998 17.2998 16.0996z" />
+ <glyph glyph-name="calendar-day" unicode="&#xf783;" horiz-adv-x="448"
+d="M0 -16v272h448v-272c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48zM64 176v-96c0 -8.7998 7.2002 -16 16 -16h96c8.7998 0 16 7.2002 16 16v96c0 8.7998 -7.2002 16 -16 16h-96c-8.7998 0 -16 -7.2002 -16 -16zM400 384c26.5 0 48 -21.5 48 -48v-48h-448v48
+c0 26.5 21.5 48 48 48h48v48c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-48h128v48c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-48h48z" />
+ <glyph glyph-name="calendar-week" unicode="&#xf784;" horiz-adv-x="448"
+d="M0 -16v272h448v-272c0 -26.5 -21.5 -48 -48 -48h-352c-26.5 0 -48 21.5 -48 48zM64 176v-64c0 -8.7998 7.2002 -16 16 -16h288c8.7998 0 16 7.2002 16 16v64c0 8.7998 -7.2002 16 -16 16h-288c-8.7998 0 -16 -7.2002 -16 -16zM400 384c26.5 0 48 -21.5 48 -48v-48h-448
+v48c0 26.5 21.5 48 48 48h48v48c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-48h128v48c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-48h48z" />
+ <glyph glyph-name="candy-cane" unicode="&#xf786;"
+d="M497.5 356c35.7002 -75.5 2.7998 -166.1 -68.9004 -209l-347.3 -206.5c-5.2002 -3 -10.7998 -4.5 -16.3994 -4.5c-10.9004 0 -21.5 5.59961 -27.5 15.5996l-32.8008 54.9004c-9.09961 15.2002 -4.19922 34.7998 11 43.9004l353.601 210.1
+c15.0996 9.09961 20.0996 28.7998 11 43.9004c-6 10 -16.6006 15.5996 -27.5 15.5996c-5.60059 0 -11.2998 -1.40039 -16.4004 -4.5l-27.5 -16.4004c-5.2002 -3 -10.7998 -4.5 -16.3994 -4.5c-10.9004 0 -21.5 5.60059 -27.5 15.6006l-32.8008 54.8994
+c-9.09961 15.2002 -4.19922 34.8008 11 43.9004l27.5 16.4004c25.6006 15.2998 53.9004 22.5996 81.8008 22.5996c59.3994 0 117.199 -33.0996 145.1 -92zM319.8 343c8.5 5.09961 18.1006 7.59961 27.9004 8.40039l-20.6006 61.7998
+c-10.5 -2.10059 -20.5996 -5.5 -30.2998 -10.2002l20.5 -61.5zM145.9 16.2002l30.7998 18.2998l-60.5 38.5l-30.7998 -18.2998zM253.4 80.0996l30.7998 18.3008l-60.5 38.5l-30.7998 -18.3008zM364.3 146l30.7998 18.2998l-60.5 38.5l-30.7998 -18.2998zM384.7 343.3
+c9.2002 -5.39941 17.2002 -13 22.8994 -22.2998l45.7002 45.7002c-6.59961 8.5 -14.2002 16.0996 -22.5996 22.5996zM466.8 235.5c4.60059 9.7998 8 20 10.1006 30.4004l-60.4004 20.0996c-0.0996094 -4.5 -0.700195 -9.09961 -1.7998 -13.5996
+c-1.60059 -6.2002 -4.2002 -11.8008 -7.40039 -17.1006z" />
+ <glyph glyph-name="carrot" unicode="&#xf787;"
+d="M298.2 291.4c61.7002 -30.1006 87.2998 -104.5 57.2002 -166.2c-12.6006 -25.7998 -33.1006 -45.4004 -57.1006 -57.1006l-102 -49.7998l-57 57c-6.2002 6.2002 -16.2998 6.2002 -22.5996 0s-6.2998 -16.3994 0 -22.5996l49.2002 -49.2002l-133.601 -65.2002
+c-11.0996 -5.39941 -24.5996 -0.799805 -30 10.2998c-3.09961 6.40039 -2.89941 13.7002 0 19.7002l128.101 262.7l50.1992 -50.2002c3.10059 -3.09961 7.2002 -4.7002 11.3008 -4.7002c4.09961 0 8.19922 1.60059 11.2998 4.7002c6.2998 6.2002 6.2998 16.2998 0 22.6006
+l-55.2002 55.1992c35.7002 43.3008 97.5 58.5 150.2 32.8008zM390.3 326.3c40.7002 19.5 88.7998 9.40039 121.7 -30.2998c-41.5996 -50.2998 -107.5 -52.5 -151.9 -7.90039l-8 8c-44.5996 44.4004 -42.3994 110.2 7.90039 151.9
+c39.7002 -32.9004 49.7998 -81 30.2998 -121.7z" />
+ <glyph glyph-name="cash-register" unicode="&#xf788;"
+d="M511.1 69.2002c0.600586 -3.5 0.900391 -7 0.800781 -10.5v-90.7002c0 -17.7002 -14.3008 -32 -32 -32h-448c-17.7002 0 -32 14.2998 -32 32v90.7998c0 3.5 0.299805 7 0.899414 10.5l26.7002 160c2.59961 15.4004 16 26.7002 31.5996 26.7002h84.9004v64h-96
+c-8.7998 0 -16 7.2002 -16 16v96c0 8.7998 7.2002 16 16 16h256c8.7998 0 16 -7.2002 16 -16v-96.0996c0 -8.80078 -7.2002 -16 -16 -16h-96v-64h244.8c15.7002 0 29 -11.3008 31.6006 -26.7002zM280 200v-16c0 -8.7998 7.2002 -16 16 -16h16c8.7998 0 16 7.2002 16 16v16
+c0 8.7998 -7.2002 16 -16 16h-16c-8.7998 0 -16 -7.2002 -16 -16zM248 136c-8.7998 0 -16 -7.2002 -16 -16v-16c0 -8.7998 7.2002 -16 16 -16h16c8.7998 0 16 7.2002 16 16v16c0 8.7998 -7.2002 16 -16 16h-16zM216 216h-16c-8.7998 0 -16 -7.2002 -16 -16v-16
+c0 -8.7998 7.2002 -16 16 -16h16c8.7998 0 16 7.2002 16 16v16c0 8.7998 -7.2002 16 -16 16zM80 368h192v32h-192v-32zM120 168c8.7998 0 16 7.2002 16 16v16c0 8.7998 -7.2002 16 -16 16h-16c-8.7998 0 -16 -7.2002 -16 -16v-16c0 -8.7998 7.2002 -16 16 -16h16zM136 104
+c0 -8.7998 7.2002 -16 16 -16h16c8.7998 0 16 7.2002 16 16v16c0 8.7998 -7.2002 16 -16 16h-16c-8.7998 0 -16 -7.2002 -16 -16v-16zM352 -8v16c0 4.40039 -3.59961 8 -8 8h-176c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h176c4.40039 0 8 3.59961 8 8
+zM376 104v16c0 8.7998 -7.2002 16 -16 16h-16c-8.7998 0 -16 -7.2002 -16 -16v-16c0 -8.7998 7.2002 -16 16 -16h16c8.7998 0 16 7.2002 16 16zM424 184v16c0 8.7998 -7.2002 16 -16 16h-16c-8.7998 0 -16 -7.2002 -16 -16v-16c0 -8.7998 7.2002 -16 16 -16h16
+c8.7998 0 16 7.2002 16 16z" />
+ <glyph glyph-name="compress-arrows-alt" unicode="&#xf78c;"
+d="M200 160c13.2998 0 24 -10.7002 24 -24v-112c0 -21.4004 -25.7998 -32.0996 -40.9004 -17l-31.0996 33l-99.2998 -99.2998c-6.2002 -6.2002 -16.4004 -6.2002 -22.6006 0l-25.3994 25.3994c-6.2002 6.2002 -6.2002 16.4004 0 22.6006l99.2002 99.2998l-32.9004 31
+c-15.0996 15.2002 -4.40039 41 17 41h112zM312 224c-13.2998 0 -24 10.7002 -24 24v112c0 21.4004 25.7998 32.0996 40.9004 17l31.0996 -33l99.2998 99.2998c6.2002 6.2002 16.4004 6.2002 22.6006 0l25.3994 -25.3994c6.2002 -6.2002 6.2002 -16.4004 0 -22.6006
+l-99.2998 -99.2998l33 -31c15.0996 -15.0996 4.40039 -41 -17 -41h-112zM408 88l99.2998 -99.4004c6.2002 -6.19922 6.2002 -16.3994 0 -22.5996l-25.3994 -25.4004c-6.2002 -6.19922 -16.4004 -6.19922 -22.6006 0l-99.2998 99.3008l-31 -32.9004
+c-15.0996 -15.0996 -41 -4.40039 -41 17v112c0 13.2998 10.7002 24 24 24h112c21.4004 0 32.0996 -25.7998 17 -40.9004zM183 376.9c15.0996 15.0996 41 4.39941 41 -16.9004v-112c0 -13.2998 -10.7002 -24 -24 -24h-112c-21.4004 0 -32.0996 25.7998 -17 40.9004
+l33 31.0996l-99.2998 99.2998c-6.2002 6.2002 -6.2002 16.4004 0 22.6006l25.3994 25.3994c6.2002 6.2002 16.4004 6.2002 22.6006 0l99.2998 -99.2998z" />
+ <glyph glyph-name="dumpster" unicode="&#xf793;" horiz-adv-x="576"
+d="M560 288h-97.2998l-25.6006 128h98.9004c7.2998 0 13.7002 -5 15.5 -12.0996l24 -96c2.5 -10.1006 -5.09961 -19.9004 -15.5 -19.9004zM272 416v-128h-126.1l25.5996 128h100.5zM404.5 416l25.5996 -128h-126.1v128h100.5zM16 288c-10.4004 0 -18 9.7998 -15.5 19.9004
+l24 96c1.7998 7.09961 8.2002 12.0996 15.5 12.0996h98.9004l-25.6006 -128h-97.2998zM560 224c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-28l-20 -160v-16c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v16h-320v-16
+c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v16l-20 160h-28c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h20l-4 32h512l-4 -32h20z" />
+ <glyph glyph-name="dumpster-fire" unicode="&#xf794;" horiz-adv-x="639"
+d="M418.7 343.9c-19.7002 -17.6006 -37.7002 -36.5 -53.9004 -55.8008h-60.7998v128h100.5l14.4004 -72zM272 416v-128h-126.1l25.5996 128h100.5zM461.3 343.9l-11.8994 10.5996l-12.3008 61.5h98.9004c7.2998 0 13.7002 -5 15.4004 -12.0996l24 -96
+c0.199219 -0.800781 -0.100586 -1.5 0 -2.30078c-1 1 -2 2.2002 -3.10059 3.10059l-21.2998 19l-21.2998 -19c-5.90039 -5.2002 -11.6006 -10.7002 -17.2998 -16.2998c-15.6006 17.7998 -32.9004 35.1992 -51.1006 51.5zM16 288c-10.4004 0 -18 9.7998 -15.5 19.9004l24 96
+c1.7998 7.09961 8.2002 12.0996 15.5 12.0996h98.9004l-25.6006 -128h-97.2998zM340.6 256c-32.6992 -46.7002 -52.5996 -93.7002 -52.5996 -129.6c0 -48.5 18.5996 -92.7002 48.7998 -126.4h-208.8v-16c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v16
+l-20 160h-28c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h20l-4 32h308.6zM551.1 284.8c51.8008 -46.2002 88.9004 -121.8 88.8008 -158.399c0 -87.5 -71.6006 -158.4 -160 -158.4c-88.4004 0 -160 70.9004 -160 158.4c0 49.2998 49.7998 130.899 120 193.6
+c27.3994 -24.4004 51.5 -50.5996 71 -76.4004c11.8994 14 25.2998 27.9004 40.1992 41.2002zM532.5 55.4004c33.4004 24.1992 41.2002 71.0996 22.5996 107.8c-2.2998 4.5 -4.89941 9.2002 -7.69922 14l-39.8008 -47s-62.3994 82.5 -67.0996 88.0996
+c-32.9004 -40.8994 -49.4004 -64.7998 -49.4004 -91.8994c0 -54.5 39.9004 -88 88.9004 -88c19.5996 0 37.7998 6.2998 52.5 17z" />
+ <glyph glyph-name="ethernet" unicode="&#xf796;"
+d="M496 256c8.7998 0 16 -7.2002 16 -16v-224c0 -8.7998 -7.2002 -16 -16 -16h-80v128h-32v-128h-64v128h-32v-128h-64v128h-32v-128h-64v128h-32v-128h-80c-8.7998 0 -16 7.2002 -16 16v224c0 8.7998 7.2002 16 16 16h48v48c0 8.7998 7.2002 16 16 16h48v48
+c0 8.7998 7.2002 16 16 16h224c8.7998 0 16 -7.2002 16 -16v-48h48c8.7998 0 16 -7.2002 16 -16v-48h48z" />
+ <glyph glyph-name="gifts" unicode="&#xf79c;" horiz-adv-x="640"
+d="M240.6 253.9c-27.7998 -6.90039 -48.5996 -32 -48.5996 -61.9004v-224c0 -11.7002 3.40039 -22.5 8.90039 -32h-168.9c-17.7002 0 -32 14.2998 -32 32v352c0 17.7002 14.2998 32 32 32h29.4004l-30.7002 22c-7.2002 5.09961 -8.7998 15.0996 -3.7002 22.2998l9.2998 13
+c5.10059 7.2002 15.1006 8.7998 22.2998 3.7002l32.1006 -22.7998l-11.5 30.5996c-3.2002 8.2002 1 17.5 9.2998 20.6006l15 5.59961c8.2998 3.09961 17.5 -1.09961 20.5996 -9.40039l19.9004 -53.0996l19.9004 53c3.09961 8.2998 12.2998 12.5 20.5996 9.40039l15 -5.60059
+c8.2998 -3.09961 12.5 -12.2998 9.40039 -20.5996l-11.5 -30.6006l32 22.9004c7.19922 5.2002 17.1992 3.5 22.2998 -3.7002l9.2998 -13c5.2002 -7.2002 3.5 -17.2002 -3.7002 -22.2998l-30.7002 -22h29.4004c12.7002 0 23.4004 -7.5 28.5996 -18.2998
+c-26.6992 -18.6006 -42.0996 -49 -44 -79.7998zM224 -32v96h192v-128h-160c-17.7002 0 -32 14.2998 -32 32zM448 -64v128h192v-96c0 -17.7002 -14.2998 -32 -32 -32h-160zM608 224c17.7002 0 32 -14.2998 32 -32v-96h-192v128h-15.2998l-0.700195 0.200195
+l-0.700195 -0.200195h-15.2998v-128h-192v96c0 17.7002 14.2998 32 32 32h20.4004c-2.7002 7.59961 -4.40039 15.5 -4.40039 23.7998c0 35.5 27 72.2002 72.0996 72.2002c48 0 75.8008 -47.7002 87.9004 -75.2998c12 27.5996 39.7998 75.2998 87.9004 75.2998
+c45.0996 0 72.0996 -36.7002 72.0996 -72.2002c0 -8.2998 -1.7998 -16.2002 -4.40039 -23.7998h20.4004zM336 224h52.5996c-8.89941 20.5996 -25.7998 48 -44.5 48c-17.6992 0 -24.0996 -14.5 -24.0996 -24.2002c0 -5.2002 1.5 -12.5996 8.7998 -19
+c2.10059 -1.7998 4.5 -3.39941 7.2002 -4.7998zM535.2 228.8c7.2998 6.40039 8.7998 13.7998 8.7998 19c0 9.7002 -6.40039 24.2002 -24.0996 24.2002c-18.7002 0 -35.7002 -27.7002 -44.5 -48h52.5996c2.7002 1.40039 5.09961 3 7.2002 4.7998z" />
+ <glyph glyph-name="glass-cheers" unicode="&#xf79f;" horiz-adv-x="640"
+d="M639.4 14.4004c1.69922 -4.10059 -0.300781 -8.7002 -4.30078 -10.4004l-162.399 -67.4004c-4 -1.69922 -8.7002 0.200195 -10.4004 4.30078c-8.5 20.3994 1.2002 43.7998 21.6006 52.2998l22.0996 9.2002l-39.2998 103.6
+c-4.40039 -0.5 -8.7998 -1.2998 -13.1006 -1.2998c-51.6992 0 -99.3994 33.0996 -113.399 85.2998l-20.2002 75.4004l-20.2002 -75.4004c-14 -52.2002 -61.7002 -85.2998 -113.399 -85.2998c-4.30078 0 -8.7002 0.799805 -13.1006 1.2998l-39.3994 -103.6l22.0996 -9.2002
+c20.4004 -8.5 30 -31.9004 21.5996 -52.2998c-1.69922 -4.10059 -6.2998 -6 -10.3994 -4.30078l-162.3 67.4004c-4.10059 1.7002 -6 6.40039 -4.30078 10.5c8.5 20.4004 31.8008 30.0996 52.2002 21.5996l22.1006 -9.19922l38.6992 101.899
+c-47.8994 34.9004 -64.6992 100.2 -34.5 152.7l86.6006 150.5c8 13.9004 25.0996 19.7998 40 13.5996l114.3 -47.3994l114.3 47.3994c14.9004 6.10059 32 0.300781 40 -13.5996l86.6006 -150.5c30.2998 -52.5 13.3994 -117.8 -34.5 -152.8l38.6992 -101.9l22.1006 9.2002
+c20.3994 8.5 43.7998 -1.2002 52.2002 -21.5996zM275.9 285.9l18.8994 70.6992l-94.5 39.2002l-36.5 -63.3994zM364.1 285.9l112.101 46.5l-36.5 63.3994l-94.5 -39.2002z" />
+ <glyph glyph-name="glass-whiskey" unicode="&#xf7a0;"
+d="M480 416c19.5 0 34.4004 -17.2002 31.7002 -36.5l-55.6006 -356.5c-4.5 -31.5 -31.5996 -54.9004 -63.3994 -54.9004h-273c-31.9004 0 -58.9004 23.4004 -63.4004 54.9004l-56 356.5c-2.7002 19.2998 12.2002 36.5 31.7002 36.5h448zM442.6 352h-373.199l30.1992 -192
+h313z" />
+ <glyph glyph-name="globe-europe" unicode="&#xf7a2;" horiz-adv-x="496"
+d="M248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM448 192c0 110.3 -89.7002 200 -200.2 200c-1.89941 0 -3.7998 -0.200195 -5.7002 -0.299805l-28.8994 -21.7002c-2 -1.5 -3.2002 -3.90039 -3.2002 -6.40039v-20
+c0 -4.39941 3.59961 -8 8 -8h16c4.40039 0 8 3.60059 8 8v8l16 16h20.7002c6.2002 0 11.2998 -5.09961 11.2998 -11.2998c0 -3 -1.2002 -5.89941 -3.2998 -8l-26.7998 -26.7998c-1.2002 -1.2002 -2.7002 -2.09961 -4.40039 -2.7002l-40 -13.2998
+c-3.2998 -1.09961 -5.5 -4.2002 -5.5 -7.59961c0 -6.60059 -2.59961 -12.9004 -7.2002 -17.5l-20.0996 -20.1006c-3 -3 -4.7002 -7.09961 -4.7002 -11.2998v-25.2998c0 -8.7998 7.2002 -16 16 -16h22.0996c6.10059 0 11.6006 3.39941 14.3008 8.7998l9.39941 18.7002
+c1.40039 2.7002 4.2002 4.39941 7.2002 4.39941h3.09961c4.40039 0 8 -3.59961 8 -8c0 -4.39941 3.60059 -8 8 -8h16c4.40039 0 8 3.60059 8 8v2.2002c0 3.5 2.2002 6.5 5.5 7.60059l31.6006 10.5c6.5 2.19922 10.8994 8.2998 10.8994 15.1992v4.5
+c0 8.80078 7.2002 16 16 16h36.7002c6.2002 0 11.2998 -5.09961 11.2998 -11.2998v-9.39941c0 -6.2002 -5.09961 -11.3008 -11.2998 -11.3008h-32c-3 0 -5.89941 -1.19922 -8 -3.2998l-9.39941 -9.39941c-2.10059 -2.10059 -3.30078 -5 -3.30078 -8
+c0 -6.2002 5.10059 -11.3008 11.3008 -11.3008h16c3 0 5.89941 -1.19922 8 -3.2998l9.39941 -9.39941c2.10059 -2.10059 3.2998 -5 3.2998 -8v-8.7002l-12.5 -12.5c-4.59961 -4.60059 -4.59961 -12.1006 -0.0996094 -16.7002l32 -32.5996
+c3 -3.10059 7.09961 -4.80078 11.4004 -4.80078h20.2998c6.89941 20.2002 10.7998 41.9004 10.7998 64.4004zM130.1 298.9c0 -6.2002 5.10059 -11.3008 11.3008 -11.3008h16c3 0 5.89941 1.2002 8 3.30078l9.39941 9.39941c2.10059 2.10059 3.2998 5 3.2998 8v16
+c0 6.2002 -5.09961 11.2998 -11.2998 11.2998c-3 0 -5.89941 -1.19922 -8 -3.2998l-25.3994 -25.3994c-2.10059 -2.10059 -3.30078 -5 -3.30078 -8zM258.1 -7.5c71.1006 3.59961 132.5 44.2002 164.9 103.1h-13.4004c-4.7998 0 -9.5 1.90039 -12.8994 5.30078
+l-17.2998 17.2998c-6 6 -14.1006 9.39941 -22.6006 9.39941h-18.2998l-43.2002 37.1006c-8.2002 7 -18.7002 10.8994 -29.5996 10.8994h-31.2002c-8.2002 0 -16.2998 -2.2998 -23.4004 -6.5l-42.8994 -25.6992c-13.7002 -8.2002 -22.1006 -23 -22.1006 -39v-23.9004
+c0 -14.2998 6.7002 -27.7998 18.2002 -36.4004l22.2002 -16.6992c8.7002 -6.5 24.5996 -11.8008 35.4004 -11.8008h20.1992c8.80078 0 16 -7.19922 16 -16v-7.09961z" />
+ <glyph glyph-name="grip-lines" unicode="&#xf7a4;"
+d="M496 160c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-480c-8.7998 0 -16 7.2002 -16 16v32c0 8.7998 7.2002 16 16 16h480zM496 288c8.7998 0 16 -7.2002 16 -16v-32c0 -8.7998 -7.2002 -16 -16 -16h-480c-8.7998 0 -16 7.2002 -16 16v32
+c0 8.7998 7.2002 16 16 16h480z" />
+ <glyph glyph-name="grip-lines-vertical" unicode="&#xf7a5;" horiz-adv-x="256"
+d="M96 -48c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v480c0 8.7998 7.2002 16 16 16h32c8.7998 0 16 -7.2002 16 -16v-480zM224 -48c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v480c0 8.7998 7.2002 16 16 16h32
+c8.7998 0 16 -7.2002 16 -16v-480z" />
+ <glyph glyph-name="guitar" unicode="&#xf7a6;" horiz-adv-x="511"
+d="M502.6 393.4c12.5 -12.5 12.5 -32.8008 0.100586 -45.2002l-67.9004 -67.9004c-12.5 -12.5 -32.7998 -12.5 -45.2998 0l-54.2002 -54.2002c28.9004 -45.3994 28.9004 -100.399 -4.2002 -133.5c-9.69922 -9.69922 -21.1992 -16.3994 -33.8994 -20.5
+c-18.7998 -6.09961 -33.1006 -23.5996 -34.9004 -42.6992c-2.2998 -24.1006 -11.5996 -46.4004 -28.7998 -63.5c-46.0996 -46.1006 -129.1 -37.9004 -185.3 18.2998s-64.5 139.2 -18.2998 185.3c17.0996 17.2002 39.3994 26.5 63.3994 28.7998
+c19.2002 1.7998 36.6006 16.1006 42.7002 34.9004c4.09961 12.7002 10.7998 24.2002 20.5 33.8994c33.0996 33.1006 88.0996 33.2002 133.5 4.2002l54.2002 54.1006c-12.5 12.5 -12.5 32.7998 0 45.2998l67.8994 67.8994c12.5 12.5 32.8008 12.5 45.3008 0zM208 96
+c26.5 0 48 21.5 48 48s-21.5 48 -48 48s-48 -21.5 -48 -48s21.5 -48 48 -48z" />
+ <glyph glyph-name="heart-broken" unicode="&#xf7a9;"
+d="M473.7 374.2c48.7002 -49.7998 50.7998 -129.101 7.2998 -182.101l-212.2 -218.699c-7.09961 -7.30078 -18.5996 -7.30078 -25.7002 0l-212.1 218.6c-43.5 53.0996 -41.4004 132.4 7.2998 182.2l2.40039 2.39941c46.2998 47.4004 119 51.8008 170.7 14l28.5996 -86.5
+l-96 -64l144 -144l-48 128l96 64l-34.2998 103.4c51.5996 36.9004 123.6 32.2002 169.6 -14.7998z" />
+ <glyph glyph-name="holly-berry" unicode="&#xf7aa;" horiz-adv-x="447"
+d="M144 256c-26.5 0 -48 21.5 -48 48s21.5 48 48 48s48 -21.5 48 -48s-21.5 -48 -48 -48zM256 304c0 26.5 21.5 48 48 48s48 -21.5 48 -48s-21.5 -48 -48 -48s-48 21.5 -48 48zM224 352c-26.5 0 -48 21.5 -48 48s21.5 48 48 48s48 -21.5 48 -48s-21.5 -48 -48 -48z
+M207.8 212.9c-0.399414 -39.8008 7.40039 -78.1006 22.9004 -112.301c4 -8.89941 -2 -19.1992 -11.7002 -20.1992c-15.2002 -1.5 -30.4004 -4.60059 -45.2998 -9.10059c-14.9004 -4.5 -23.7998 -19.8994 -20.2002 -35.0996s8.5 -29.9004 14.7002 -43.7998
+c4 -8.90039 -1.90039 -19.3008 -11.6006 -20.2002c-35.8994 -3.40039 -71.5996 -14.9004 -104.8 -33.9004c-12.3994 -7.09961 -27.5 1.60059 -27.5996 16c-0.100586 38.2002 -8 74.9004 -23 107.7c-4 8.90039 2 19.2002 11.7002 20.2002
+c15.1992 1.5 30.3994 4.59961 45.2998 9.09961c14.8994 4.5 23.7998 19.9004 20.2002 35.1006c-3.60059 15.1992 -8.5 29.8994 -14.7002 43.7998c-4 8.89941 1.89941 19.2998 11.5996 20.2002c37.2998 3.5 74.4004 15.8994 108.7 36.1992
+c10.7002 6.40039 23.9004 -1.2998 23.7998 -13.6992zM435 82.4004c9.7002 -1 15.7998 -11.4004 11.5 -20.1006c-15 -32.7002 -22.7998 -69.5 -23 -107.7c0 -14.3994 -15.0996 -23.0996 -27.5996 -16c-33.2002 19 -68.9004 30.5 -104.801 33.9004
+c-9.69922 0.900391 -15.5996 11.2998 -11.5996 20.2002c6.2002 14 11.0996 28.5996 14.7002 43.7998c3.59961 15.2002 -5.2998 30.5996 -20.2002 35.0996c-4.90039 1.5 -9.90039 2.5 -14.7998 3.7002c5.7998 12.2998 6.2998 26.5 0.599609 38.9004
+c-12.8994 28.2998 -19.7002 60.7002 -19.8994 94c0 1.7002 0.199219 3.2998 0.199219 4.89941c-0.0996094 12.3008 13.1006 20 23.8008 13.7002c34.2998 -20.2998 71.3994 -32.7002 108.699 -36.2002c9.7002 -0.899414 15.6006 -11.2998 11.6006 -20.1992
+c-6.2002 -14 -11.1006 -28.6006 -14.7002 -43.8008c-3.59961 -15.1992 5.2998 -30.5996 20.2002 -35.0996c15 -4.40039 30.0996 -7.5 45.2998 -9.09961z" />
+ <glyph glyph-name="horse-head" unicode="&#xf7ab;" horiz-adv-x="511"
+d="M509.8 115.5c4.60059 -11.7998 1.7998 -25.2998 -7.09961 -34.4004l-45.2998 -39.7998c-6 -6 -14.1006 -9.39941 -22.6006 -9.39941h-50.2998c-10.2998 0 -20 4.89941 -26 13.2998l-46 63.8994c-13.7998 -8.09961 -29.5996 -13.1992 -46.7998 -13.1992
+c-39.2002 0 -72.6006 23.6992 -87.4004 57.3994c-2.2998 5.10059 -9 6.2998 -12.8994 2.40039l-12.1006 -12.1006c-2.5 -2.39941 -3.2002 -6.19922 -1.59961 -9.2998c19.7002 -38.8994 58.7002 -66.0996 104.3 -69.5996v-0.700195l40.7998 -81.7002
+c10.7002 -21.2998 -4.7998 -46.2998 -28.5996 -46.2998h-236.2c-17.7002 0 -32 14.2998 -32 32v81.2002c0 159.899 35.9004 275.399 166.9 322.5l202.199 75.7002c4.90039 1.7998 10.7002 -1.10059 12 -6.10059c12.1006 -46.3994 -16.1992 -71.7002 -34.1992 -82.2998
+c42.5996 -8.2002 78.0996 -38 93 -79.2002zM328 224c13.2998 0 24 10.7002 24 24s-10.7002 24 -24 24s-24 -10.7002 -24 -24s10.7002 -24 24 -24z" />
+ <glyph glyph-name="icicles" unicode="&#xf7ad;" horiz-adv-x="511"
+d="M511.4 410.1l-87.5 -467.699c-1.7002 -8.60059 -14 -8.60059 -15.7002 0l-66.7002 363.8l-45.7998 -172.5c-2.2998 -7.60059 -13 -7.60059 -15.2998 0l-34.1006 133.399l-46.5 -196.899c-1.89941 -8.2998 -13.7002 -8.2998 -15.5996 0l-44.2002 187.3l-36.4004 -124.1
+c-2.39941 -7.2002 -12.5996 -7.2002 -15.0996 0l-87.0996 273.399c-6.2002 20.5 9.19922 41.2002 30.5996 41.2002h448c20 0 35.0996 -18.2002 31.4004 -37.9004z" />
+ <glyph glyph-name="igloo" unicode="&#xf7ae;" horiz-adv-x="576"
+d="M320 414.1v-126.1h-271.4c51.6006 77.2002 139.601 128 239.4 128c10.7998 0 21.5 -0.700195 32 -1.90039zM96 256v-128h-96c0 46 11.0996 89.4004 30.2998 128h65.7002zM352 408.6c72.7998 -16.5 135.2 -60.5 175.4 -120.6h-175.4v120.6zM480 128v128h65.7002
+c19.2002 -38.5996 30.2998 -82 30.2998 -128h-96zM416 64c0 11.0996 -1.90039 21.7002 -4.5 32h164.5v-96c0 -17.7002 -14.2998 -32 -32 -32h-128v96zM448 256v-128h-49.7998c-22.2002 38.0996 -63 64 -110.2 64s-88 -25.9004 -110.2 -64h-49.7998v128h320zM0 0v96h164.5
+c-2.59961 -10.2998 -4.5 -20.9004 -4.5 -32v-96h-128c-17.7002 0 -32 14.2998 -32 32zM288 160c53 0 96 -43 96 -96v-96h-192v96c0 53 43 96 96 96z" />
+ <glyph glyph-name="mitten" unicode="&#xf7b5;" horiz-adv-x="448"
+d="M368 32c8.7998 0 16 -7.2002 16 -16v-64c0 -8.7998 -7.2002 -16 -16 -16h-320c-8.7998 0 -16 7.2002 -16 16v64c0 8.7998 7.2002 16 16 16h320zM425 241.1c27.0996 -22.5996 30.7998 -62.8994 8.09961 -90.0996l-72.5 -87h-309l-47.8994 207.6
+c-17.9004 77.5 30.5 154.801 107.899 172.7c77.4004 17.9004 154.801 -30.5 172.801 -108l29.5996 -128.399l20.9004 25c22.5996 27.1992 62.8994 30.7998 90.0996 8.19922z" />
+ <glyph glyph-name="mug-hot" unicode="&#xf7b6;"
+d="M127.1 301.5c-2.69922 16.5996 -10.3994 31.9004 -22.3994 44.0996c-22.6006 23.1006 -36.7002 52.5 -40.6006 84.4004c-1.09961 9.5 6.5 18 16.3008 18h16.3994c8.2002 0 15 -5.90039 16.1006 -13.7998c2.7998 -20.2002 12 -38.7998 26.3994 -53.5
+c20.2002 -20.5 32.7998 -46.5 36.6006 -74.7002c1.2998 -9.5 -6.5 -18 -16.3008 -18h-16.5c-8 0 -14.6992 5.7998 -16 13.5zM239.1 301.5c-2.69922 16.5996 -10.3994 31.9004 -22.3994 44.0996c-22.6006 23.1006 -36.7002 52.5 -40.6006 84.4004
+c-1.19922 9.5 6.5 18 16.3008 18h16.3994c8.2002 0 15 -5.90039 16.1006 -13.7998c2.7998 -20.2002 12 -38.7998 26.3994 -53.5c20.2002 -20.5 32.7998 -46.5 36.6006 -74.7002c1.2998 -9.5 -6.5 -18 -16.3008 -18h-16.5c-8 0 -14.6992 5.7998 -16 13.5zM400 256
+c61.7998 0 112 -50.2002 112 -112s-50.2002 -112 -112 -112h-16c0 -53 -43 -96 -96 -96h-192c-53 0 -96 43 -96 96v192c0 17.7002 14.2998 32 32 32h368zM400 96c26.5 0 48 21.5 48 48s-21.5 48 -48 48h-16v-96h16z" />
+ <glyph glyph-name="radiation" unicode="&#xf7b9;" horiz-adv-x="496"
+d="M167.8 191.8c0 -28.5996 15.2002 -53.5 37.7998 -67.7998l-80.3994 -128.4c-4.7998 -7.69922 -15.2998 -10.0996 -22.7002 -4.7998c-58.0996 42 -97.4004 108.4 -102.5 184.2c-0.599609 9.09961 7.09961 16.7998 16.2002 16.7998h151.6zM205.6 259.5l-80.3994 128.5
+c-4.7998 7.59961 -2.40039 18.0996 5.59961 22.4004c34.9004 18.7998 74.7998 29.5996 117.2 29.5996s82.2998 -10.7998 117.2 -29.5996c8 -4.30078 10.3994 -14.7002 5.59961 -22.4004l-80.3994 -128.5c-12.4004 7.7002 -26.8008 12.4004 -42.4004 12.4004
+s-30.0996 -4.7002 -42.4004 -12.4004zM248 239.8c26.5 0 48 -21.5 48 -48s-21.5 -48 -48 -48s-48 21.5 -48 48s21.5 48 48 48zM479.8 191.8c9.10059 0 16.7998 -7.7002 16.2002 -16.7002c-5.09961 -75.7998 -44.4004 -142.199 -102.5 -184.199
+c-7.40039 -5.40039 -17.9004 -2.90039 -22.7002 4.7998l-80.3994 128.399c22.5996 14.2002 37.7998 39.1006 37.7998 67.7002h151.6z" />
+ <glyph glyph-name="radiation-alt" unicode="&#xf7ba;" horiz-adv-x="496"
+d="M184 192c0 -22.7998 12.0996 -42.7998 30.0996 -54l-41.6992 -66.7998c-5.2002 -8.2998 -16.4004 -9.90039 -24 -3.7998c-32.5 26 -54.9004 64.1992 -59.5 107.8c-0.900391 9.09961 6.7998 16.7998 16 16.7998h79.0996zM281.8 246.1
+c-9.7998 6.2002 -21.5 9.90039 -33.8994 9.90039c-12.4004 0 -24 -3.7002 -33.8008 -9.90039l-41.7998 66.9004c-4.7998 7.7998 -2.39941 18.4004 5.7998 22.5c21.2002 10.4004 44.8008 16.5 69.9004 16.5s48.7002 -6.09961 69.7998 -16.5
+c8.2002 -4.09961 10.7002 -14.7002 5.7998 -22.5zM391.1 192c9.2002 0 17 -7.7002 15.9004 -16.9004c-4.59961 -43.5996 -26.9004 -81.7998 -59.5 -107.8c-7.59961 -6.09961 -18.7998 -4.5 -24 3.7998l-41.7002 66.8008c18.1006 11.2998 30.2002 31.2998 30.2002 54.0996
+h79.0996zM248 440c137 0 248 -111 248 -248s-111 -248 -248 -248s-248 111 -248 248s111 248 248 248zM248 8c101.5 0 184 82.5 184 184s-82.5 184 -184 184s-184 -82.5 -184 -184s82.5 -184 184 -184zM248 224c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32
+s-32 14.2998 -32 32s14.2998 32 32 32z" />
+ <glyph glyph-name="restroom" unicode="&#xf7bd;" horiz-adv-x="671"
+d="M128 320c-35.2998 0 -64 28.7002 -64 64s28.7002 64 64 64s64 -28.7002 64 -64s-28.7002 -64 -64 -64zM512 320c-35.2998 0 -64 28.7002 -64 64s28.7002 64 64 64s64 -28.7002 64 -64s-28.7002 -64 -64 -64zM639.3 93.5c3.7002 -15.0996 -8.2998 -29.5 -24.5 -29.5
+h-54.7998v-104c0 -13.2998 -10.7002 -24 -24 -24h-48c-13.2998 0 -24 10.7002 -24 24v104h-54.7998c-16.2002 0 -28.2002 14.4004 -24.7002 29.5l45.5996 185.8c3.30078 13.5 15.5 23 29.8008 24.2002c15 -9.7002 32.7998 -15.5 52 -15.5c19.1992 0 37 5.7998 52 15.5
+c14.2998 -1.2002 26.5 -10.7002 29.7998 -24.2002zM336 448c8.7998 0 16 -7.2002 16 -16v-480c0 -8.7998 -7.2002 -16 -16 -16h-32c-8.7998 0 -16 7.2002 -16 16v480c0 8.7998 7.2002 16 16 16h32zM180.1 303.6c24.6006 -2.09961 43.9004 -22.5 43.9004 -47.5996v-136
+c0 -13.2998 -10.7002 -24 -24 -24h-8v-136c0 -13.2998 -10.7002 -24 -24 -24h-80c-13.2998 0 -24 10.7002 -24 24v136h-8c-13.2998 0 -24 10.7002 -24 24v136c0 25.0996 19.2998 45.5 43.9004 47.5996c15 -9.7998 32.8994 -15.5996 52.0996 -15.5996
+s37.0996 5.7998 52.0996 15.5996z" />
+ <glyph glyph-name="satellite" unicode="&#xf7bf;"
+d="M502.7 183c12.3994 -12.4004 12.3994 -32.5996 -0.100586 -45l-96.6992 -96.7002c-6.2002 -6.2002 -14.4004 -9.2998 -22.5 -9.2998c-8.10059 0 -16.3008 3.09961 -22.5 9.2998l-80.3008 80.4004l-9.89941 -9.90039c24.2998 -53.7002 22.7002 -116.2 -5.40039 -168.5
+c-4.5 -8.5 -16.3994 -9.59961 -23.2002 -2.7998l-107.5 107.5l-17.7998 -17.7998c0.700195 -2.60059 1.60059 -5 1.60059 -7.7998c0 -17.7002 -14.3008 -32 -32 -32c-17.7002 0 -32 14.2998 -32 32c0 17.6992 14.2998 32 32 32c2.7998 0 5.19922 -0.900391 7.7998 -1.60059
+l17.7998 17.7998l-107.5 107.5c-6.7998 6.80078 -5.7002 18.6006 2.7998 23.2002c52.2998 28.1006 114.8 29.7002 168.5 5.40039l9.7998 9.7998l-80.2998 80.4004c-12.3994 12.5 -12.3994 32.6992 0 45.0996l96.7002 96.7002c6.2002 6.2002 14.2998 9.2998 22.5 9.2998
+s16.2998 -3.09961 22.5996 -9.2998l80.3008 -80.2998l47.7998 47.8994c13.0996 13.1006 34.3994 13.1006 47.5 0l47.5 -47.5c13.0996 -13.0996 13.0996 -34.3994 0 -47.5l-47.7998 -47.8994zM150.7 319.5l68.8994 -68.9004l73.8008 73.8008l-68.9004 68.8994zM383.5 86.7002
+l73.7998 73.7998l-68.8994 68.9004l-73.8008 -73.8008z" />
+ <glyph glyph-name="satellite-dish" unicode="&#xf7c0;" horiz-adv-x="511"
+d="M188.8 102.1l116.601 -116.6c7.39941 -7.2998 6.19922 -20.0996 -3 -25c-77.7002 -41.7998 -176.7 -29.9004 -242.301 35.7002c-65.5996 65.5996 -77.5 164.5 -35.6992 242.3c4.89941 9.09961 17.6992 10.2998 25 3l116.8 -116.8l27.3994 27.3994
+c-0.699219 2.60059 -1.59961 5 -1.59961 7.80078c0 17.6992 14.2998 32 32 32s32 -14.3008 32 -32c0 -17.7002 -14.2998 -32 -32 -32c-2.7998 0 -5.2002 0.899414 -7.7998 1.59961zM209 448c163.2 -8.59961 294.4 -139.8 302.9 -303c0.5 -9.2002 -6.80078 -17 -16 -17
+h-32.1006c-8.39941 0 -15.3994 6.59961 -15.8994 15c-7.5 129.5 -111.5 234.5 -240.9 241.5c-8.40039 0.400391 -15 7.40039 -15 15.9004v31.5996c0 9.2002 7.7998 16.5 17 16zM209.3 352c110.101 -8.5 198.2 -96.5996 206.601 -206.7
+c0.699219 -9.2998 -6.80078 -17.2998 -16.1006 -17.2998h-32.2002c-8.2998 0 -15.0996 6.40039 -15.8994 14.7002c-6.90039 77 -68.1006 138.899 -144.9 145.2c-8.2998 0.599609 -14.7998 7.5 -14.7998 15.8994v32.1006c0 9.39941 8 16.7998 17.2998 16.0996z" />
+ <glyph glyph-name="sd-card" unicode="&#xf7c2;" horiz-adv-x="384"
+d="M320 448c35.2998 0 64 -28.7002 64 -64v-384c0 -35.2998 -28.7002 -64 -64 -64h-256c-35.2998 0 -64 28.7002 -64 64v320l128 128h192zM160 288v96h-48v-96h48zM240 288v96h-48v-96h48zM320 288v96h-48v-96h48z" />
+ <glyph glyph-name="sim-card" unicode="&#xf7c4;" horiz-adv-x="384"
+d="M320 448c35.2998 0 64 -28.7002 64 -64v-384c0 -35.2998 -28.7002 -64 -64 -64h-256c-35.2998 0 -64 28.7002 -64 64v320l128 128h192zM160 256v-64h64v64h-64zM64 224v-32h64v64h-32c-17.7002 0 -32 -14.2998 -32 -32zM128 0v64h-64v-32c0 -17.7002 14.2998 -32 32 -32
+h32zM224 0v64h-64v-64h64zM320 32v32h-64v-64h32c17.7002 0 32 14.2998 32 32zM320 96v64h-256v-64h256zM320 192v32c0 17.7002 -14.2998 32 -32 32h-32v-64h64z" />
+ <glyph glyph-name="skating" unicode="&#xf7c5;" horiz-adv-x="448"
+d="M400 448c26.5 0 48 -21.5 48 -48s-21.5 -48 -48 -48s-48 21.5 -48 48s21.5 48 48 48zM400 0c8.7998 0 16 -7.2002 16 -16c0 -26.5 -21.5 -48 -48 -48h-96c-8.7998 0 -16 7.2002 -16 16s7.2002 16 16 16h96c8.7998 0 16 7.2002 16 16s7.2002 16 16 16zM117.8 -8.59961
+c6.2998 6.2998 16.5 6.19922 22.7002 0c6.2002 -6.2002 6.2002 -16.4004 0 -22.6006c-9.2998 -9.2998 -21.5996 -14 -33.9004 -14c-12.2998 0 -24.5996 4.60059 -34 14l-67.8994 67.9004c-6.2002 6.2002 -6.2002 16.3994 0 22.5996s16.3994 6.2002 22.5996 0
+l67.9004 -67.8994c6.2002 -6.30078 16.3994 -6.2002 22.5996 0zM173.9 171.2c3.7998 -6.10059 8.19922 -11.7998 13.1992 -16.7998l30.2002 -30.2002l-91.8994 -91.9004c-6.2002 -6.2998 -14.4004 -9.39941 -22.6006 -9.39941s-16.3994 3.19922 -22.5996 9.39941
+c-12.5 12.4004 -12.5 32.7002 0 45.2002zM128 288c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32h204.6c16.3008 0 30.8008 -9.7002 37 -24.7002c6.2002 -15 2.80078 -32.0996 -8.69922 -43.5996l-82.3008 -82.2998c-0.5 -0.5 -1.19922 -0.700195 -1.69922 -1.10059
+l61 -61c9 -8.89941 14.0996 -21.2998 14.0996 -33.8994v-89.4004c0 -17.7002 -14.2998 -32 -32 -32s-32 14.2998 -32 32v82.7998l-78.2002 78.2002c-12.5996 12.5 -19.3994 29.9004 -18.7002 47.7002c0.700195 17.7002 8.80078 34.5996 22.3008 46.0996l20.0996 17.2002
+h-105.5z" />
+ <glyph glyph-name="skiing" unicode="&#xf7c9;" horiz-adv-x="511"
+d="M432 352c-26.5 0 -48 21.5 -48 48s21.5 48 48 48s48 -21.5 48 -48s-21.5 -48 -48 -48zM505 -4.09961c9.2998 -9.30078 9.2998 -24.5 0 -33.8008c-17 -17 -39.7998 -26 -63 -26c-12.5996 0 -25.2002 2.60059 -37.0996 8.10059l-391.9 202.5
+c-11.7998 6 -16.4004 20.5 -10.2998 32.2998c6.09961 11.7998 20.5996 16.2998 32.2998 10.2998l197.9 -102.3l45.8994 68.7998l-75.0996 75.2002c-14.2998 14.4004 -20.6006 34.5 -17.7002 54.4004l107 -53.1006l40.4004 -40.5
+c16.1992 -16.0996 18.6992 -41.5996 6 -60.5996l-49.2002 -73.7998l135.8 -70.2002c14.5996 -6.7002 33 -3.40039 45.0996 8.7002c9.30078 9.39941 24.5 9.39941 33.9004 0zM120 356.4l-26.2002 23c-2.2002 1.89941 -2.39941 5.19922 -0.5 7.39941
+c1.2998 1.5 3.2002 2.10059 5 1.7002l34.4004 -7h0.200195l11.0996 21.7002c13.7002 -7 21.2002 -21.1006 20.9004 -35.6006l62.5 -31l81.1992 32.5c43.6006 17.4004 76.4004 -15 84.5 -39.1992l17.1006 -51.2002l52.0996 -26.1006
+c15.7998 -7.89941 22.2002 -27.0996 14.2998 -42.8994c-7.89941 -15.7998 -27 -22.2002 -42.8994 -14.2998l-58.1006 29c-11.3994 5.69922 -20 15.5996 -24 27.6992l-6.39941 19.1006l-32.4004 -13l-114.5 56.7998c0.100586 0.0996094 0.100586 0.200195 0.200195 0.299805
+l-47.2002 23.4004c-11.5996 -9.7002 -28.3994 -12.1006 -42.7998 -4.7998z" />
+ <glyph glyph-name="skiing-nordic" unicode="&#xf7ca;" horiz-adv-x="576"
+d="M336 352c-26.5 0 -48 21.5 -48 48s21.5 48 48 48s48 -21.5 48 -48s-21.5 -48 -48 -48zM552 32c13.2002 0 24 -10.7998 24 -24c0 -39.7002 -32.2998 -72 -72 -72h-480c-13.2002 0 -24 10.7002 -24 24s10.7998 24 24 24h42.9004l54.0996 270.8
+c-0.700195 0.799805 -1.7002 1.2002 -2.40039 2.10059c-10.7998 14.0996 -8.09961 34.1992 5.90039 44.8994l39.7002 30.4004c23 17.5996 52.5 23.8994 80.7998 17.0996l71.2002 -21.2002c26.2998 -6.69922 49.2998 -25.3994 61.7998 -50.8994l26.0996 -53.2002h44
+c17.7002 0 32 -14.2998 32 -32c0 -13.4004 -8.39941 -24.9004 -20.0996 -29.5996l-25.5 -178.4h69.5c13.2002 0 24 10.7998 24 24c0 13.2998 10.7998 24 24 24zM291.5 -16l25.5 81.2002l-85 50.2002c-27.5996 16.2998 -38.9004 50.7998 -26.2998 80.2998l37.5 87.3994
+l-14.7002 4.40039c-7.90039 1.90039 -17.5996 -0.200195 -25.2998 -6.09961l-39.7002 -30.4004c-3.59961 -2.7002 -7.59961 -4.59961 -11.7002 -5.59961l-52.2998 -261.4h24.2002l62.5 131.8c7.59961 -10.8994 17.3994 -20.5996 29.5 -27.7998l22 -13l-43.1006 -91h96.9004z
+M402.1 -16l25.2002 176h-33.2998c-18.2002 0 -35.0996 10.5996 -43.0996 26.9004l-20.3008 41.3994l-31.2998 -78.2998l61.2002 -36.0996c18.7002 -11.3008 27.5 -33.6006 21.5996 -54.4004l-23.5996 -75.5h43.5996z" />
+ <glyph glyph-name="sleigh" unicode="&#xf7cc;" horiz-adv-x="640"
+d="M612.7 97.2998c18.5 -14.7002 28.5996 -37.2002 27.2002 -61c-2.2002 -39 -36.9004 -68.2998 -75.9004 -68.2998h-516c-8.7998 0 -16 7.2002 -16 16v16c0 8.7998 7.2002 16 16 16h519.3c13.6006 0 24.6006 11 24.6006 24.5996c0 7.5 -3.30078 14.5 -9.2002 19.2002
+l-9.2998 7.40039c-6.90039 5.5 -8 15.5996 -2.5 22.5l10 12.5c5.5 6.89941 15.5996 8 22.5 2.5zM32 224v128c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32h20.7002c65.7002 0 125.899 -37.2002 155.3 -96s89.5 -96 155.3 -96h20.7002c35.2998 0 64 28.7002 64 64v64h96
+c17.7002 0 32 -14.2998 32 -32s-14.2998 -32 -32 -32v-96c0 -53 -43 -96 -96 -96v-48h-64v48h-192v-48h-64v52.5c-55.0996 14.2998 -96 63.9004 -96 123.5z" />
+ <glyph glyph-name="sms" unicode="&#xf7cd;"
+d="M256 416c141.4 0 256 -93.0996 256 -208s-114.6 -208 -256 -208c-38.4004 0 -74.7002 7.09961 -107.4 19.4004c-24.5996 -19.6006 -74.2998 -51.4004 -140.6 -51.4004c-3.2002 0 -6 1.7998 -7.2998 4.7998s-0.700195 6.40039 1.5 8.7002
+c0.5 0.5 42.2998 45.4004 54.7998 95.7998c-35.5996 35.7002 -57 81.1006 -57 130.7c0 114.9 114.6 208 256 208zM128.2 144c23.3994 0 42.5 17.2998 42.3994 38.5996c0 10.6006 -4.7998 20.9004 -13.2998 28.1006l-21.8994 18.7998
+c-1.30078 1.09961 -2.10059 2.5 -2.10059 3.7998c0 3.10059 4.40039 6.60059 10.4004 6.60059h12.2998c4.40039 0 8 3.59961 8 8v16c0 4.39941 -3.59961 8 -8 8h-12.2002c-23.3994 0 -42.3994 -17.3008 -42.3994 -38.6006c0 -10.5996 4.7998 -20.8994 13.2998 -28.0996
+l21.8994 -18.7998c1.30078 -1.10059 2.10059 -2.5 2.10059 -3.80078c0 -3.09961 -4.40039 -6.59961 -10.4004 -6.59961h-12.2998c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h12.2002zM320 152v104c0 8.7998 -7.2002 16 -16 16h-16
+c-6 0 -11.5996 -3.40039 -14.2998 -8.7998l-17.7002 -35.4004l-17.7002 35.4004c-2.7002 5.39941 -8.2002 8.7998 -14.2998 8.7998h-16c-8.7998 0 -16 -7.2002 -16 -16v-104c0 -4.40039 3.59961 -8 8 -8h16c4.40039 0 8 3.59961 8 8v68.2002l24.9004 -55.7998
+c2.89941 -5.90039 11.3994 -5.90039 14.2998 0l24.7998 55.7998v-68.2002c0 -4.40039 3.59961 -8 8 -8h16c4.40039 0 8 3.59961 8 8zM368.3 144c23.4004 0 42.4004 17.2998 42.2998 38.5996c0 10.6006 -4.7998 20.9004 -13.2998 28.1006l-21.8994 18.7998
+c-1.30078 1.09961 -2.10059 2.5 -2.10059 3.7998c0 3.10059 4.40039 6.60059 10.4004 6.60059h12.2998c4.40039 0 8 3.59961 8 8v16c0 4.39941 -3.59961 8 -8 8h-12.2002c-23.3994 0 -42.3994 -17.3008 -42.3994 -38.6006c0 -10.5996 4.7998 -20.8994 13.2998 -28.0996
+l21.8994 -18.7998c1.30078 -1.10059 2.10059 -2.5 2.10059 -3.80078c0 -3.09961 -4.40039 -6.59961 -10.4004 -6.59961h-12.2998c-4.40039 0 -8 -3.59961 -8 -8v-16c0 -4.40039 3.59961 -8 8 -8h12.2998z" />
+ <glyph glyph-name="snowboarding" unicode="&#xf7ce;"
+d="M432 352c-26.5 0 -48 21.5 -48 48s21.5 48 48 48s48 -21.5 48 -48s-21.5 -48 -48 -48zM460.8 198.4l-83.5 62.8994l-66 -30.5l52 -39c14.9004 -11.2002 22 -30.2002 18.1006 -48.3994l-21.9004 -102c-2.2002 -10.1006 -9.09961 -17.8008 -17.7998 -21.9004
+l90.7998 -33.0996c9.7002 -3.5 20.2002 -3.10059 29.5 1.2998c12 5.59961 26.2998 0.399414 31.9004 -11.6006c5.59961 -12 0.399414 -26.2998 -11.6006 -31.8994c-11.5996 -5.5 -24 -8.2002 -36.5 -8.2002c-10 0 -20 1.7998 -29.7002 5.2998l-364.8 132.7
+c-21.7998 7.90039 -39.0996 23.7998 -48.8994 44.7998c-5.60059 12 -0.400391 26.2998 11.5996 31.9004c12 5.59961 26.2998 0.399414 31.9004 -11.6006c4.39941 -9.39941 12.0996 -16.5 21.7998 -20l38.7002 -14.0996c-3.80078 3.5 -7 7.7002 -8.7002 12.9004
+c-5.60059 16.7998 3.39941 34.8994 20.2002 40.5l74.0996 24.6992v53.1006c0 24.3994 13.5996 46.2998 35.4004 57.2002l41.1992 20.5996l-16.3994 5.5c-12.1006 4.09961 -22 12.5996 -27.7002 24l-29.0996 58.0996c-7.90039 15.8008 -1.5 35 14.2998 42.9004
+s35 1.5 42.8994 -14.2998l26.1006 -52.1006l63.7002 -21.1992c16.2998 -5.40039 31.5996 -13.5 45.3994 -23.8008l111.4 -83.5c14.2002 -10.5996 17 -30.6992 6.39941 -44.7998c-6.2998 -8.39941 -15.8994 -12.7998 -25.5996 -12.7998
+c-6.7002 0 -13.4004 2.09961 -19.2002 6.40039zM316.4 146.7l-60.5 45.2998v-20.5c0 -20.7002 -13.2002 -39 -32.8008 -45.5l-85 -28.4004c-2.59961 -0.899414 -5.2998 -1.09961 -8 -1.2998l169.4 -61.5996c-2.90039 6.09961 -4.2998 12.8994 -2.7998 20z" />
+ <glyph glyph-name="snowman" unicode="&#xf7d0;"
+d="M510.9 295.7c3.19922 -8.10059 -0.800781 -17.2002 -8.90039 -20.2998l-135.2 -55.2002c0.400391 -4.10059 1.2002 -8 1.2002 -12.2002c0 -10 -1.7002 -19.5 -4.2002 -28.7002c21.1006 -21.3994 36.1006 -48.7998 41.6006 -79
+c11.5 -63.2002 -16.4004 -120.3 -62.9004 -152.6c-10.9004 -7.60059 -23.9004 -11.7002 -37.2002 -11.7002h-99.2002c-11 0 -22 2.2998 -31.2998 8.2002c-42.3994 26.8994 -70.7998 73.7998 -70.7998 127.8c0 41.7998 16.9004 79.5996 44.2998 107.1
+c-2.5 9.30078 -4.2002 18.8008 -4.2002 28.9004c0 4.2002 0.800781 8.2002 1.2002 12.2002l-135.2 55.2002c-8.19922 3.19922 -12.1992 12.2998 -8.89941 20.2998l5.89941 14.5c3.30078 8 12.6006 11.8994 20.8008 8.7002l28.0996 -11.5v29
+c0 8.59961 7.2002 15.5996 16 15.5996h16c8.7998 0 16 -7 15.7998 -15.5996v-46.9004c0 -0.5 -0.200195 -1 -0.299805 -1.5l56.4004 -23c6 10 13.2998 18.9004 22 26.5996c-13.5 16.6006 -22 37.4004 -22 60.5c0 53 43 96 96 96s96 -43 96 -96
+c0 -23.0996 -8.40039 -43.8994 -22 -60.5c8.69922 -7.69922 16.0996 -16.5996 22 -26.5996l56.3994 23c0 0.5 -0.299805 1 -0.299805 1.5v46.9004c0 8.59961 7.2002 15.5996 16 15.5996h16c8.7998 0 16 -7 16 -15.5996v-29l28.2002 11.5
+c8.2002 3.19922 17.5 -0.700195 20.7998 -8.7002zM224 352c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM256 80c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM256 144
+c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM256 208c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16zM256 296c0 0 16 23.2002 16 32s-7.2002 16 -16 16s-16 -7.2002 -16 -16s16 -32 16 -32z
+M288 352c8.7998 0 16 7.2002 16 16s-7.2002 16 -16 16s-16 -7.2002 -16 -16s7.2002 -16 16 -16z" />
+ <glyph glyph-name="snowplow" unicode="&#xf7d2;" horiz-adv-x="639"
+d="M120 72c13.2998 0 24 -10.7002 24 -24s-10.7002 -24 -24 -24s-24 10.7002 -24 24s10.7002 24 24 24zM200 72c13.2998 0 24 -10.7002 24 -24s-10.7002 -24 -24 -24s-24 10.7002 -24 24s10.7002 24 24 24zM280 72c13.2998 0 24 -10.7002 24 -24s-10.7002 -24 -24 -24
+s-24 10.7002 -24 24s10.7002 24 24 24zM360 72c13.2998 0 24 -10.7002 24 -24s-10.7002 -24 -24 -24s-24 10.7002 -24 24s10.7002 24 24 24zM598.6 22.5996l36.7002 -36.6992c6.2002 -6.2002 6.2002 -16.4004 0 -22.6006l-22.5996 -22.5996
+c-6.2002 -6.2002 -16.4004 -6.2002 -22.6006 0l-36.6992 36.7002c-26.5 26.5 -41.4004 62.3994 -41.4004 99.8994v18.7002h-43.2002c7 -14.5996 11.2002 -30.7002 11.2002 -48c0 -61.9004 -50.0996 -112 -112 -112h-256c-61.9004 0 -112 50.0996 -112 112
+c0 44.5996 26.2998 82.7998 64 100.8v91.2002c0 26.5 21.5 48 48 48h16v112c0 26.5 21.5 48 48 48h144.3c19.2998 0 36.6006 -11.4004 44.2002 -29.0996l78.2998 -182.801c3.40039 -8 5.2002 -16.5 5.2002 -25.1992v-50.9004h64v18.7002
+c0 37.5 14.9004 73.3994 41.4004 99.8994l36.6992 36.7002c6.2002 6.2002 16.4004 6.2002 22.6006 0l22.5996 -22.5996c6.2002 -6.2002 6.2002 -16.4004 0 -22.6006l-36.7002 -36.6992c-14.5 -14.5 -22.5996 -34.1006 -22.5996 -54.6006v-101.6
+c0 -20.5 8.09961 -40.1006 22.5996 -54.6006zM192 384v-96l64 -64h122.4l-68.6006 160h-117.8zM368 0c26.5 0 48 21.5 48 48s-21.5 48 -48 48h-256c-26.5 0 -48 -21.5 -48 -48s21.5 -48 48 -48h256z" />
+ <glyph glyph-name="tenge" unicode="&#xf7d7;" horiz-adv-x="384"
+d="M372 288c6.59961 0 12 -5.40039 12 -12v-56c0 -6.59961 -5.40039 -12 -12 -12h-140v-228c0 -6.59961 -5.40039 -12 -12 -12h-56c-6.59961 0 -12 5.40039 -12 12v228h-140c-6.59961 0 -12 5.40039 -12 12v56c0 6.59961 5.40039 12 12 12h360zM372 416
+c6.59961 0 12 -5.40039 12 -12v-56c0 -6.59961 -5.40039 -12 -12 -12h-360c-6.59961 0 -12 5.40039 -12 12v56c0 6.59961 5.40039 12 12 12h360z" />
+ <glyph glyph-name="toilet" unicode="&#xf7d8;" horiz-adv-x="384"
+d="M368 400h-16v-156.7c20.2002 -10.0996 32 -22.2002 32 -35.2998c0 -67.2002 -34.5996 -126.2 -86.7998 -160.5l21.3994 -70.2002c6.30078 -20.5 -9.09961 -41.2998 -30.5996 -41.2998h-192c-21.5 0 -36.9004 20.7998 -30.5996 41.2998l21.3994 70.2002
+c-52.2002 34.2998 -86.7998 93.2998 -86.7998 160.5c0 13.0996 11.7998 25.2002 32 35.2998v156.7h-16c-8.7998 0 -16 7.2002 -16 16v16c0 8.7998 7.2002 16 16 16h352c8.7998 0 16 -7.2002 16 -16v-16c0 -8.7998 -7.2002 -16 -16 -16zM80 376v-16
+c0 -4.40039 3.59961 -8 8 -8h48c4.40039 0 8 3.59961 8 8v16c0 4.40039 -3.59961 8 -8 8h-48c-4.40039 0 -8 -3.59961 -8 -8zM192 176c77.0996 0 139.6 14.2998 139.6 32s-62.5 32 -139.6 32s-139.6 -14.2998 -139.6 -32s62.5 -32 139.6 -32z" />
+ <glyph glyph-name="tools" unicode="&#xf7d9;"
+d="M501.1 52.2998c14.5 -14.5 14.5 -38.0996 0 -52.7002l-52.6992 -52.6992c-14.5 -14.6006 -38.1006 -14.6006 -52.7002 0l-117.101 117.1c-23.0996 23.0996 -27.5 57.5996 -13.8994 85.4004l-106.601 106.6h-62.0996l-96 128l64 64l128 -96v-62.0996l106.6 -106.601
+c27.8008 13.7002 62.3008 9.2002 85.4004 -13.8994zM331.7 223c-8.2002 0 -16.6006 -1 -24.7002 -2.90039l-82.0996 82.1006c-0.700195 37.5 12.6992 75.0996 41.1992 103.6c37 37 89.2002 49.6006 136.601 37.9004c9.09961 -2.2998 12.2998 -13.6006 5.7002 -20.2002
+l-74.4004 -74.4004l11.2998 -67.8994l67.9004 -11.2998l74.3994 74.3994c6.60059 6.60059 17.9004 3.5 20.1006 -5.5c11.7998 -47.3994 -0.799805 -99.5996 -37.9004 -136.7c-13 -13 -28 -22.5996 -43.7998 -29.5l-19.4004 19.4004c-20 20 -46.5996 31 -74.8994 31z
+M227.8 141c-4.89941 -21.0996 -2.59961 -42.7998 5 -62.7002l-123.6 -123.6c-25 -25 -65.5 -25 -90.5 0s-25 65.5 0 90.5l152.399 152.5zM64 -24c13.2998 0 24 10.7998 24 24c0 13.2998 -10.7002 24 -24 24s-24 -10.7002 -24 -24c0 -13.2002 10.7998 -24 24 -24z" />
+ <glyph glyph-name="tram" unicode="&#xf7da;"
+d="M288 384c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32s32 -14.2998 32 -32s-14.2998 -32 -32 -32zM511.5 396.1c2.2002 -8.5 -2.7998 -17.2998 -11.4004 -19.5996l-228.1 -60.7998v-91.7002h176c17.7002 0 32 -14.2998 32 -32v-224c0 -17.7002 -14.2998 -32 -32 -32
+h-384c-17.7002 0 -32 14.2998 -32 32v224c0 17.7002 14.2998 32 32 32h176v83.2002l-219.9 -58.7002c-1.2998 -0.299805 -2.69922 -0.5 -4.09961 -0.5c-7.09961 0 -13.5 4.7002 -15.4004 11.7998c-2.2998 8.60059 2.80078 17.2998 11.3008 19.6006l480 128
+c8.59961 2.2998 17.2998 -2.7002 19.5996 -11.3008zM176 64v96h-96v-96h96zM336 160v-96h96v96h-96zM304 160h-96v-96h96v96zM192 352c-17.7002 0 -32 14.2998 -32 32s14.2998 32 32 32s32 -14.2998 32 -32s-14.2998 -32 -32 -32z" />
+ <glyph glyph-name="fire-alt" unicode="&#xf7e4;" horiz-adv-x="448"
+d="M323.56 396.8c72.46 -67.2598 124.44 -177.13 124.44 -230.399c0 -127.25 -100.29 -230.4 -224 -230.4s-224 103.15 -224 230.4c0 71.6396 69.7402 190.43 168 281.6c38.2803 -35.5303 72.0801 -73.6201 99.3398 -111.17
+c16.6406 20.3799 35.4199 40.6699 56.2197 59.9697zM304.09 56.1504c49.2402 34.4492 60.7803 101.229 33.4297 153.42c-3.35938 6.41992 -7.12988 13.1201 -11.2695 19.9697l-58.6299 -66.8799s-91.9004 117.359 -98.8301 125.34
+c-48.4805 -58.1201 -72.79 -92.1396 -72.79 -130.75c0 -77.5098 58.71 -125.25 130.86 -125.25c28.8594 0 55.5693 8.99023 77.2295 24.1504z" />
+ <glyph glyph-name="bacon" unicode="&#xf7e5;" horiz-adv-x="576"
+d="M218.92 111.61c-34.8604 -34.8799 -59.6504 -44.1602 -85.9199 -54c-26 -9.76074 -53 -20.1299 -88.1699 -52.7305l-35.7598 35.5098c-12.7002 12.6104 -12.0703 33.6104 1.79004 45.0605c36.3398 29.9795 66.21 41.0996 91.0693 50.3594
+c9.16992 3.41016 68.9707 19.2607 98.7207 98.0908c8.60938 22.8096 46.3496 134.58 188.979 187.72c15.8506 5.91016 27.3604 10.2002 41 20.2998c4.53516 3.35449 12.7939 6.07617 18.4346 6.07617c7.00879 0 16.751 -3.98926 21.7451 -8.90625l37.6709 -37.3896
+c-30.1504 -27.6904 -52.9102 -36.3701 -76.79 -45.3701c-28.3008 -10.6104 -57.5703 -21.5801 -97.3506 -61.3799s-50.7598 -69.0498 -61.3701 -97.3398c-9.84961 -26.3008 -19.1602 -51.1104 -54.0498 -86zM566.92 343.61
+c12.7002 -12.5996 12.0801 -33.6602 -1.80957 -45.0605c-36.3701 -30.0098 -66.2402 -41.1396 -91.1104 -50.4102c-9.2002 -3.43945 -69 -19.3193 -98.7305 -98.1094c-8.93945 -23.5898 -46.7393 -134.69 -188.939 -187.69
+c-15.8496 -5.89941 -27.3398 -10.1797 -40.9404 -20.2402c-4.53613 -3.35645 -12.7969 -6.08008 -18.4395 -6.08008c-7.00586 0 -16.7461 3.9873 -21.7402 8.90039l-37.6797 37.4297c30.1094 27.6699 52.8594 36.3301 76.7197 45.3301
+c28.2998 10.5898 57.5098 21.54 97.2998 61.3203s50.7803 69 61.4199 97.3301c9.81055 26.3096 19.1104 51.1602 54 86c34.8906 34.8398 59.6904 44.1504 85.9502 54c26.0703 9.79004 53.0605 20.1602 88.25 52.79z" />
+ <glyph glyph-name="book-medical" unicode="&#xf7e6;" horiz-adv-x="448"
+d="M448 89.5996c0 -9.59961 -3.2002 -15.999 -9.59961 -19.1992c-3.2002 -12.8008 -3.2002 -57.6006 0 -73.6006c6.39941 -6.39941 9.59961 -12.7998 9.59961 -19.2002v-16c0 -16 -12.7998 -25.5996 -25.5996 -25.5996h-326.4c-54.4004 0 -96 41.5996 -96 96v320
+c0 54.4004 41.5996 96 96 96h326.4c16 0 25.5996 -9.59961 25.5996 -25.5996v-332.801zM144 280v-48c0 -4.41602 3.58398 -8 8 -8h56v-56c0 -4.41602 3.58398 -8 8 -8h48c4.41602 0 8 3.58398 8 8v56h56c4.41602 0 8 3.58398 8 8v48c0 4.41602 -3.58398 8 -8 8h-56v56
+c0 4.41602 -3.58398 8 -8 8h-48c-4.41602 0 -8 -3.58398 -8 -8v-56h-56c-4.41602 0 -8 -3.58398 -8 -8zM380.8 0v64h-284.8c-16 0 -32 -12.7998 -32 -32s12.7998 -32 32 -32h284.8z" />
+ <glyph glyph-name="bread-slice" unicode="&#xf7ec;" horiz-adv-x="576"
+d="M288 448c180 0 288 -93.4004 288 -169.14c0 -30.3008 -24.2402 -54.8604 -64 -54.8604v-256c0 -17.6699 -16.1201 -32 -36 -32h-376c-19.8799 0 -36 14.3301 -36 32v256c-39.7598 0 -64 24.5596 -64 54.8604c0 75.7393 108 169.14 288 169.14z" />
+ <glyph glyph-name="cheese" unicode="&#xf7ef;"
+d="M0 160h512v-160c0 -17.6641 -14.3359 -32 -32 -32h-448c-17.6641 0 -32 14.3359 -32 32v160zM299.83 416c118.17 -6.2002 212.17 -104.11 212.17 -224h-512l278.7 217c4.83984 3.875 13.7998 7.01953 20 7.01953c0.311523 0 0.818359 -0.00878906 1.12988 -0.0195312z
+" />
+ <glyph glyph-name="clinic-medical" unicode="&#xf7f2;" horiz-adv-x="576"
+d="M288 333l218.74 -192.9c1.54004 -1.37988 3.55957 -2.04004 5.25977 -3.19922v-184.9c0 -8.83203 -7.16797 -16 -16 -16h-416c-8.83203 0 -16 7.16797 -16 16v184.94c1.78027 1.20996 3.84961 1.88965 5.46973 3.34961zM384 72v48c0 4.41602 -3.58398 8 -8 8h-56v56
+c0 4.41602 -3.58398 8 -8 8h-48c-4.41602 0 -8 -3.58398 -8 -8v-56h-56c-4.41602 0 -8 -3.58398 -8 -8v-48c0 -4.41602 3.58398 -8 8 -8h56v-56c0 -4.41602 3.58398 -8 8 -8h48c4.41602 0 8 3.58398 8 8v56h56c4.41602 0 8 3.58398 8 8zM570.69 211.72
+c2.93066 -2.63184 5.31055 -7.96484 5.31055 -11.9053c0 -3.38086 -1.83301 -8.16797 -4.09082 -10.6846l-21.4004 -23.8203c-2.63184 -2.93066 -7.96484 -5.30957 -11.9043 -5.30957c-3.38574 0 -8.17773 1.83691 -10.6953 4.09961l-229.32 202.271
+c-2.50391 2.20898 -7.24609 4.00195 -10.585 4.00195s-8.08105 -1.79297 -10.585 -4.00195l-229.32 -202.28c-2.51758 -2.2627 -7.30957 -4.09961 -10.6953 -4.09961c-3.93848 0 -9.27246 2.37891 -11.9043 5.30957l-21.4102 23.8203
+c-2.2627 2.51758 -4.09961 7.30957 -4.09961 10.6953c0 3.93945 2.37891 9.27246 5.30957 11.9043l256 226c6.2832 5.68066 18.2559 10.29 26.7256 10.29c8.46875 0 20.4424 -4.60938 26.7246 -10.29z" />
+ <glyph glyph-name="comment-medical" unicode="&#xf7f5;"
+d="M256 416c141.39 0 256 -93.1201 256 -208s-114.61 -208 -256 -208h-0.473633c-30.4814 0 -78.3838 8.68164 -106.927 19.3799c-24.5996 -19.6299 -74.3398 -51.3799 -140.6 -51.3799c-4.41113 0.00488281 -7.99023 3.58887 -7.99023 8
+c0 1.75879 0.981445 4.22266 2.19043 5.5c0.5 0.5 42.2598 45.4502 54.7998 95.7598c-35.5898 35.7402 -57 81.1807 -57 130.74c0 114.88 114.62 208 256 208zM352 184v48c0 4.41602 -3.58398 8 -8 8h-56v56c0 4.41602 -3.58398 8 -8 8h-48c-4.41602 0 -8 -3.58398 -8 -8
+v-56h-56c-4.41602 0 -8 -3.58398 -8 -8v-48c0 -4.41602 3.58398 -8 8 -8h56v-56c0 -4.41602 3.58398 -8 8 -8h48c4.41602 0 8 3.58398 8 8v56h56c4.41602 0 8 3.58398 8 8z" />
+ <glyph glyph-name="crutch" unicode="&#xf7f7;" horiz-adv-x="511"
+d="M507.31 262.29c2.56348 -2.58301 4.64355 -7.63184 4.64355 -11.2695c0 -3.66602 -2.10742 -8.74219 -4.70312 -11.3301l-22.6201 -22.6309c-2.58691 -2.58691 -7.65625 -4.6875 -11.3145 -4.6875c-3.65918 0 -8.72852 2.10059 -11.3154 4.6875l-181 181
+c-2.58789 2.58691 -4.6875 7.65625 -4.6875 11.3154s2.09961 8.72852 4.6875 11.3154l22.6904 22.5996c2.58594 2.58496 7.65332 4.68262 11.3096 4.68262s8.72363 -2.09766 11.3096 -4.68262zM327.77 195.88l55.1006 55.1201l45.25 -45.2695l-109.68 -109.681
+c-10.7441 -10.748 -31.4814 -22.2393 -46.29 -25.6494l-120.25 -27.75l-102 -102c-2.58691 -2.58789 -7.65625 -4.6875 -11.3154 -4.6875s-8.72754 2.09961 -11.3154 4.6875l-22.6191 22.6191c-2.58789 2.58789 -4.6875 7.65625 -4.6875 11.3154
+s2.09961 8.72852 4.6875 11.3154l102 102l27.7393 120.26c3.4248 14.8057 14.9248 35.5439 25.6699 46.29l109.671 109.67l45.25 -45.25l-55.1006 -55.1006zM273.2 141.31l9.30957 9.31055l-67.8896 67.8896l-9.31055 -9.30957
+c-3.57715 -3.59082 -7.41211 -10.5127 -8.55957 -15.4502l-18.2998 -79.2998l79.2998 18.3193c4.94043 1.13379 11.8623 4.95996 15.4502 8.54004z" />
+ <glyph glyph-name="egg" unicode="&#xf7fb;" horiz-adv-x="384"
+d="M192 448c106 0 192 -214 192 -320s-86 -192 -192 -192s-192 86 -192 192s86 320 192 320z" />
+ <glyph glyph-name="hamburger" unicode="&#xf805;"
+d="M464 192c26.4961 0 48 -21.5039 48 -48s-21.5039 -48 -48 -48h-416c-26.4961 0 -48 21.5039 -48 48s21.5039 48 48 48h416zM480 64c8.83203 0 16 -7.16797 16 -16v-16c0 -35.3281 -28.6719 -64 -64 -64h-352c-35.3281 0 -64 28.6719 -64 64v16
+c0 8.83203 7.16797 16 16 16h448zM58.6396 224c-34.5693 0 -54.6396 43.9102 -34.8193 75.8896c40.1797 64.9102 128.64 116.011 232.18 116.11c103.55 -0.0996094 192 -51.2002 232.18 -116.12c19.8008 -31.9795 -0.25 -75.8799 -34.8193 -75.8799h-394.721zM384 336
+c-8.83203 0 -16 -7.16797 -16 -16s7.16797 -16 16 -16s16 7.16797 16 16s-7.16797 16 -16 16zM256 368c-8.83203 0 -16 -7.16797 -16 -16s7.16797 -16 16 -16s16 7.16797 16 16s-7.16797 16 -16 16zM128 336c-8.83203 0 -16 -7.16797 -16 -16s7.16797 -16 16 -16
+s16 7.16797 16 16s-7.16797 16 -16 16z" />
+ <glyph glyph-name="hand-middle-finger" unicode="&#xf806;"
+d="M479.93 130.88l0.0703125 -82.8799v0c0 -61.7969 -50.1533 -111.973 -111.95 -112h-215c-25.6074 0.00292969 -61.084 14.7012 -79.1895 32.8096l-30.9307 30.9307c-6.0332 6.03223 -10.9297 17.8525 -10.9297 26.3838v0.00585938v73.4697v0.00292969
+c0 12.7383 9.24707 27.7002 20.6396 33.3975l27.3604 15v-76c0 -4.41602 3.58398 -8 8 -8s8 3.58398 8 8v147.04c0 15.2598 12.8701 28.3799 30.8701 31.3799l30.6797 5.12012c17.8203 2.96973 34.4502 -8.38965 34.4502 -23.54v-32c0 -4.41602 3.58398 -8 8 -8
+s8 3.58398 8 8v200v0.0224609c0 26.4961 21.5039 48 48 48c0.408203 0 1.07129 -0.00976562 1.48047 -0.0224609c26.2695 -0.799805 46.5195 -23.7197 46.5195 -50v-198c0 -4.41602 3.58398 -8 8 -8s8 3.58398 8 8v32c0 15.1396 16.6299 26.5 34.4502 23.5303
+l38.3994 -6.40039c13.46 -2.25 23.1504 -12.0996 23.1504 -23.54v-49.5898l35.6504 -8.92969c15.5986 -3.89844 28.2686 -20.1113 28.2793 -36.1904z" />
+ <glyph glyph-name="hard-hat" unicode="&#xf807;"
+d="M480 160v-64h-448v64c0 80.25 49.2803 148.92 119.19 177.62l40.8096 -81.6201v112c0 8.83203 7.16797 16 16 16h96c8.83203 0 16 -7.16797 16 -16v-112l40.8096 81.6201c69.9102 -28.7002 119.19 -97.3701 119.19 -177.62zM496 64c8.83203 0 16 -7.16797 16 -16v-32
+c0 -8.83203 -7.16797 -16 -16 -16h-480c-8.83203 0 -16 7.16797 -16 16v32c0 8.83203 7.16797 16 16 16h480z" />
+ <glyph glyph-name="hotdog" unicode="&#xf80f;"
+d="M488.56 424.56c12.9297 -12.9326 23.4238 -38.2715 23.4238 -56.5596s-10.4941 -43.627 -23.4238 -56.5596l-352 -352c-13.0205 -13.4824 -38.7998 -24.4238 -57.543 -24.4238c-44.1592 0 -80 35.8408 -80 80c0 18.7432 10.9414 44.5225 24.4238 57.543l352 352
+c12.9326 12.9297 38.2715 23.4238 56.5596 23.4238s43.627 -10.4941 56.5596 -23.4238zM438.63 329.37c2.58691 2.58691 4.68652 7.65625 4.68652 11.3145c0 8.83301 -7.16797 16.002 -16.001 16.002c-3.65918 0 -8.72852 -2.09961 -11.3154 -4.68652
+c-12.4102 -12.4102 -21.7598 -14 -34.71 -16.2305c-14.4004 -2.46973 -32.3203 -5.55957 -51.9199 -25.1494c-19.6006 -19.5898 -22.6699 -37.5205 -25.1396 -51.9199c-2.24023 -12.9502 -3.84082 -22.3105 -16.2402 -34.71
+c-12.4004 -12.4004 -21.7002 -14.0098 -34.7002 -16.2305c-14.4004 -2.46973 -32.3203 -5.5498 -51.9199 -25.1396c-19.6006 -19.5898 -22.6699 -37.5205 -25.1504 -51.9199c-2.21973 -12.9502 -3.80957 -22.29 -16.2197 -34.7002s-21.71 -14.0098 -34.71 -16.2305
+c-14.4004 -2.46973 -32.3203 -5.5498 -51.9199 -25.1396c-2.58691 -2.58691 -4.68652 -7.65625 -4.68652 -11.3145c0 -8.83301 7.16797 -16.002 16.001 -16.002c3.65918 0 8.72852 2.09961 11.3154 4.68652c12.4004 12.4004 21.7598 14 34.7002 16.2305
+c14.3994 2.46973 32.3301 5.55957 51.9297 25.1494c19.6006 19.5898 22.6699 37.5205 25.1396 51.9199c2.23047 12.9502 3.83008 22.2998 16.2305 34.7002s21.75 14 34.7002 16.2197c14.3994 2.48047 32.3193 5.56055 51.9199 25.1504
+c19.5996 19.5898 22.6699 37.5195 25.1494 51.9199c2.23047 12.9502 3.83008 22.3096 16.2305 34.71s21.7598 14 34.7002 16.2305c14.4102 2.46973 32.3301 5.5498 51.9297 25.1396zM31.4404 125.82l-11.5508 11.5498c-24.9893 24.9902 -26.6592 63.8398 -3.71973 86.7803
+l207.68 207.68c22.9404 22.9395 61.79 21.2803 86.79 -3.71973l11.54 -11.5508zM480.56 258.18l11.5508 -11.54c24.9893 -25 26.6592 -63.8496 3.71973 -86.79l-207.68 -207.68c-22.9404 -22.9395 -61.79 -21.2803 -86.79 3.71973l-11.54 11.5508z" />
+ <glyph glyph-name="ice-cream" unicode="&#xf810;" horiz-adv-x="448"
+d="M368 288c26.4961 0 48 -21.5039 48 -48s-21.5039 -48 -48 -48h-288c-26.4961 0 -48 21.5039 -48 48s21.5039 48 48 48h0.94043c-0.519531 4.51855 -0.94043 11.8779 -0.94043 16.4268c0 79.4883 64.5117 144 144 144s144 -64.5117 144 -144
+c0 -4.54883 -0.420898 -11.9082 -0.94043 -16.4268h0.94043zM195.38 -45.6904l-99.3799 205.69h256l-99.3799 -205.69c-4.66504 -10.1084 -17.4863 -18.3135 -28.6201 -18.3135s-23.9551 8.20508 -28.6201 18.3135z" />
+ <glyph glyph-name="laptop-medical" unicode="&#xf812;" horiz-adv-x="640"
+d="M232 224c-4.41602 0 -8 3.58398 -8 8v48c0 4.41602 3.58398 8 8 8h56v56c0 4.41602 3.58398 8 8 8h48c4.41602 0 8 -3.58398 8 -8v-56h56c4.41602 0 8 -3.58398 8 -8v-48c0 -4.41602 -3.58398 -8 -8 -8h-56v-56c0 -4.41602 -3.58398 -8 -8 -8h-48
+c-4.41602 0 -8 3.58398 -8 8v56h-56zM576 400v-336h-512v336c0.0771484 26.4189 21.5811 47.9229 48 48h416c26.4189 -0.0771484 47.9229 -21.5811 48 -48zM512 128v256h-384v-256h384zM624 32c8.83203 0 16 -7.16797 16 -16v-16
+c-0.104492 -35.2236 -28.7764 -63.8955 -64 -64h-512c-35.2236 0.104492 -63.8955 28.7764 -64 64v16c0 8.83203 7.16797 16 16 16h239.23c-0.230469 -14.5303 14.0791 -32 32.7695 -32h60.7998c18.0303 0 32 12.1904 32.7402 32h242.46z" />
+ <glyph glyph-name="pager" unicode="&#xf815;"
+d="M448 384c35.3281 0 64 -28.6719 64 -64v-256c0 -35.3281 -28.6719 -64 -64 -64h-384c-35.3281 0 -64 28.6719 -64 64v256c0 35.3281 28.6719 64 64 64h384zM160 80v48h-80c-8.83203 0 -16 -7.16797 -16 -16v-16c0 -8.83203 7.16797 -16 16 -16h80zM288 96v16
+c0 8.83203 -7.16797 16 -16 16h-80v-48h80c8.83203 0 16 7.16797 16 16zM448 224v64c0 17.6641 -14.3359 32 -32 32h-320c-17.6641 0 -32 -14.3359 -32 -32v-64c0 -17.6641 14.3359 -32 32 -32h320c17.6641 0 32 14.3359 32 32z" />
+ <glyph glyph-name="pepper-hot" unicode="&#xf816;"
+d="M330.67 184.88h107.46l37.0498 -38.54c-48.5293 -87.4697 -206.54 -210.34 -419.18 -210.34c-30.9121 0 -56 25.0879 -56 56s25.0879 56 56 56c141.58 0 163.44 181.24 221.92 250.82l52.75 -24.2207v-89.7197zM461.76 313.25
+c27.7324 -25.2861 50.2402 -76.2676 50.2402 -113.798v-0.142578c0 -13.6797 -2.2998 -26.6895 -5.55957 -39.3096l-54.6807 56.8799h-89.0898v78.2402l-74.6699 34.29c22.3398 14.0498 48.3398 22.5898 76.3398 22.5898
+c16.4658 -0.00683594 42.0732 -5.36523 57.1602 -11.96c18.4502 37.2197 8.25977 61.96 1.40039 72.3203c-0.787109 1.14062 -1.42578 3.19043 -1.42578 4.57617c0 1.82227 1.03711 4.35449 2.31543 5.65332l22.9004 23c1.29688 1.31836 3.85156 2.38867 5.70117 2.38867
+c2.1123 0 4.90234 -1.33398 6.22852 -2.97852c18.5596 -23.4805 35.2998 -71.9102 3.13965 -131.75z" />
+ <glyph glyph-name="pizza-slice" unicode="&#xf818;"
+d="M158.87 447.85c181.91 -17.1699 332.02 -164.93 352.899 -345.71c1.87012 -16.2197 -7.89941 -31.54 -23.6191 -35.8994l-56.9404 -15.7803c-7.94043 186.39 -134.86 311.51 -322.479 317l14.8096 56.2705c4.12988 15.6992 19.1699 25.6396 35.3301 24.1191z
+M100.4 335.85c176.069 -1.95996 294.88 -119.25 299.149 -294.14l-379 -105.1c-1.1709 -0.324219 -3.1084 -0.587891 -4.32422 -0.587891c-8.94824 0 -16.21 7.26172 -16.21 16.21c0 1.1582 0.239258 3.00781 0.53418 4.12793zM128 32c17.6641 0 32 14.3359 32 32
+s-14.3359 32 -32 32s-32 -14.3359 -32 -32s14.3359 -32 32 -32zM176 184c17.6641 0 32 14.3359 32 32s-14.3359 32 -32 32s-32 -14.3359 -32 -32s14.3359 -32 32 -32zM280 80c17.6641 0 32 14.3359 32 32s-14.3359 32 -32 32s-32 -14.3359 -32 -32s14.3359 -32 32 -32z" />
+ <glyph glyph-name="trash-restore" unicode="&#xf829;" horiz-adv-x="448"
+d="M53.2002 -19l-21.2002 339h384l-21.2002 -339c-1.55469 -24.8369 -23.0146 -44.9971 -47.8994 -45h-245.801c-24.8848 0.00292969 -46.3447 20.1631 -47.8994 45zM123.31 156.8c-10.0791 -10.6201 -2.93945 -28.7998 11.3203 -28.7998h57.3701v-112
+c0 -8.83203 7.16797 -16 16 -16h32c8.83203 0 16 7.16797 16 16v112h57.3701c14.2598 0 21.3994 18.1797 11.3203 28.7998l-89.3809 94.2598c-2.52441 2.72949 -7.5918 4.94336 -11.3096 4.94336s-8.78516 -2.21387 -11.3096 -4.94336zM432 416
+c8.83203 0 16 -7.16797 16 -16v-32c0 -8.83203 -7.16797 -16 -16 -16h-416c-8.83203 0 -16 7.16797 -16 16v32c0 8.83203 7.16797 16 16 16h120l9.40039 18.7002c3.58984 7.3418 13.1357 13.2998 21.3086 13.2998h0.0908203h114.3h0.0175781
+c8.20215 0 17.8262 -5.95801 21.4824 -13.2998l9.40039 -18.7002h120z" />
+ <glyph glyph-name="trash-restore-alt" unicode="&#xf82a;" horiz-adv-x="448"
+d="M32 -16v336h384v-336c0 -26.4961 -21.5039 -48 -48 -48h-288c-26.4961 0 -48 21.5039 -48 48zM123.31 156.8c-10.0791 -10.6201 -2.93945 -28.7998 11.3203 -28.7998h57.3701v-112c0 -8.83203 7.16797 -16 16 -16h32c8.83203 0 16 7.16797 16 16v112h57.3701
+c14.2598 0 21.3994 18.1797 11.3203 28.7998l-89.3809 94.2598c-2.52441 2.72949 -7.5918 4.94336 -11.3096 4.94336s-8.78516 -2.21387 -11.3096 -4.94336zM432 416c8.83203 0 16 -7.16797 16 -16v-32c0 -8.83203 -7.16797 -16 -16 -16h-416c-8.83203 0 -16 7.16797 -16 16
+v32c0 8.83203 7.16797 16 16 16h120l9.40039 18.7002c3.58984 7.3418 13.1357 13.2998 21.3086 13.2998h0.0908203h114.3h0.0175781c8.20215 0 17.8262 -5.95801 21.4824 -13.2998l9.40039 -18.7002h120z" />
+ <glyph glyph-name="user-nurse" unicode="&#xf82f;" horiz-adv-x="448"
+d="M57.7803 160c-8.82227 0.00976562 -15.9814 7.17773 -15.9814 16c0 2.09277 0.761719 5.30957 1.70117 7.17969c15.2305 29.8203 31.2803 62.2305 42.1699 95.54c7.58008 23.1904 10.3301 47.6904 10.3301 72.0801v49.2002l128 48l128 -48v-49.2002
+c0 -24.3896 2.78027 -48.8896 10.3496 -72.0801c10.8701 -33.3096 26.9199 -65.6895 42.1504 -95.54c0.939453 -1.87012 1.70117 -5.08691 1.70117 -7.17969c0 -8.82227 -7.15918 -15.9902 -15.9814 -16h-82.3594c-22.5107 -19.6797 -51.6201 -32 -83.8604 -32
+s-61.3496 12.3203 -83.8604 32h-82.3594zM184 376.33v-16.6602c0 -2.75977 2.24023 -5 5 -5h21.6699v-21.6699c0 -2.75977 2.24023 -5 5 -5h16.6602c2.75977 0 5 2.24023 5 5v21.6699h21.6699c2.75977 0 5 2.24023 5 5v16.6602c0 2.75977 -2.24023 5 -5 5h-21.6699v21.6699
+c0 2.75977 -2.24023 5 -5 5h-16.6602c-2.75977 0 -5 -2.24023 -5 -5v-21.6699h-21.6699c-2.75977 0 -5 -2.24023 -5 -5zM144 288v-32c0 -44.1602 35.8398 -80 80 -80s80 35.8398 80 80v32h-160zM319.41 128c71.4902 -3.09961 128.59 -61.5996 128.59 -133.79
+c0 -32.1318 -26.0781 -58.21 -58.21 -58.21v0h-331.58c-32.1318 0 -58.21 26.0781 -58.21 58.21c0 72.1904 57.0996 130.69 128.59 133.79l95.4102 -95.3896z" />
+ </font>
+</defs></svg>
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.ttf b/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.ttf
new file mode 100644
index 0000000000..7ece3282a4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.ttf
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.woff b/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.woff
new file mode 100644
index 0000000000..a892a7a9c1
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.woff
Binary files differ
diff --git a/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.woff2 b/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.woff2
new file mode 100644
index 0000000000..71b07ce028
--- /dev/null
+++ b/testing/web-platform/tests/tools/wave/www/webfonts/fa-solid-900.woff2
Binary files differ
diff --git a/testing/web-platform/tests/tools/webdriver/.gitignore b/testing/web-platform/tests/tools/webdriver/.gitignore
new file mode 100644
index 0000000000..e8413e0ee4
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/.gitignore
@@ -0,0 +1,2 @@
+webdriver.egg-info/
+*.pyc
diff --git a/testing/web-platform/tests/tools/webdriver/README.md b/testing/web-platform/tests/tools/webdriver/README.md
new file mode 100644
index 0000000000..9433aaa926
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/README.md
@@ -0,0 +1,73 @@
+# WebDriver client for Python
+
+This package provides Python bindings
+that conform to the [W3C WebDriver standard](https://w3c.github.io/webdriver/),
+which specifies a remote control protocol for web browsers.
+
+These bindings are written with determining
+implementation compliance to the specification in mind,
+so that different remote end drivers
+can determine whether they meet the recognised standard.
+The client is used for the WebDriver specification tests
+in [web-platform-tests](https://github.com/web-platform-tests/wpt).
+
+## Installation
+
+To install the package individually
+in your virtualenv or system-wide:
+
+ % python setup.py install
+
+Since this package does not have any external dependencies,
+you can also use the client directly from the checkout directory,
+which is useful if you want to contribute patches back:
+
+ % cd /path/to/wdclient
+ % python
+ Python 2.7.12+ (default, Aug 4 2016, 20:04:34)
+ [GCC 6.1.1 20160724] on linux2
+ Type "help", "copyright", "credits" or "license" for more information.
+ >>> import webdriver
+ >>>
+
+If you are writing WebDriver specification tests for
+[WPT](https://github.com/web-platform-tests/wpt),
+there is no need to install the client manually
+as it is included in the `tools/webdriver` directory.
+
+## Usage
+
+You can use the built-in
+[context manager](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement)
+to manage the lifetime of the session.
+The session is started implicitly
+at the first call to a command if it has not already been started,
+and will implicitly be ended when exiting the context:
+
+```py
+import webdriver
+
+with webdriver.Session("127.0.0.1", 4444) as session:
+ session.url = "https://mozilla.org"
+ print "The current URL is %s" % session.url
+```
+
+The following is functionally equivalent to the above,
+but giving you manual control of the session:
+
+```py
+import webdriver
+
+session = webdriver.Session("127.0.0.1", 4444)
+session.start()
+
+session.url = "https://mozilla.org"
+print "The current URL is %s" % session.url
+
+session.end()
+```
+
+## Dependencies
+
+This client has the benefit of only using standard library dependencies.
+No external PyPI dependencies are needed.
diff --git a/testing/web-platform/tests/tools/webdriver/setup.py b/testing/web-platform/tests/tools/webdriver/setup.py
new file mode 100644
index 0000000000..c473961cb6
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/setup.py
@@ -0,0 +1,14 @@
+from setuptools import setup, find_packages
+
+setup(name="webdriver",
+ version="1.0",
+ description="WebDriver client compatible with "
+ "the W3C browser automation specification.",
+ author="Mozilla Engineering Productivity",
+ author_email="tools@lists.mozilla.org",
+ license="BSD",
+ packages=find_packages(),
+ classifiers=["Development Status :: 4 - Beta",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
+ "Operating System :: OS Independent"])
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/__init__.py b/testing/web-platform/tests/tools/webdriver/webdriver/__init__.py
new file mode 100644
index 0000000000..a81751407e
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/__init__.py
@@ -0,0 +1,39 @@
+# flake8: noqa
+
+from .client import (
+ Cookies,
+ Element,
+ Find,
+ Frame,
+ Session,
+ ShadowRoot,
+ Timeouts,
+ Window)
+from .error import (
+ ElementNotSelectableException,
+ ElementNotVisibleException,
+ InvalidArgumentException,
+ InvalidCookieDomainException,
+ InvalidElementCoordinatesException,
+ InvalidElementStateException,
+ InvalidSelectorException,
+ InvalidSessionIdException,
+ JavascriptErrorException,
+ MoveTargetOutOfBoundsException,
+ NoSuchAlertException,
+ NoSuchElementException,
+ NoSuchFrameException,
+ NoSuchWindowException,
+ ScriptTimeoutException,
+ SessionNotCreatedException,
+ StaleElementReferenceException,
+ TimeoutException,
+ UnableToSetCookieException,
+ UnexpectedAlertOpenException,
+ UnknownCommandException,
+ UnknownErrorException,
+ UnknownMethodException,
+ UnsupportedOperationException,
+ WebDriverException)
+from .bidi import (
+ BidiSession)
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/bidi/__init__.py b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/__init__.py
new file mode 100644
index 0000000000..e7c56332f9
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/__init__.py
@@ -0,0 +1,3 @@
+# flake8: noqa
+
+from .client import BidiSession
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/bidi/client.py b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/client.py
new file mode 100644
index 0000000000..9dc80d8121
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/client.py
@@ -0,0 +1,226 @@
+# mypy: allow-untyped-defs
+
+import asyncio
+from collections import defaultdict
+from typing import Any, Awaitable, Callable, List, Optional, Mapping, MutableMapping
+from urllib.parse import urljoin, urlparse
+
+from . import modules
+from .error import from_error_details
+from .transport import get_running_loop, Transport
+
+
+class BidiSession:
+ """A WebDriver BiDi session.
+
+ This is the main representation of a BiDi session and provides the
+ interface for running commands in the session, and for attaching
+ event handlers to the session. For example:
+
+ async def on_log(method, data):
+ print(data)
+
+ session = BidiSession("ws://localhost:4445", capabilities)
+ remove_listener = session.add_event_listener("log.entryAdded", on_log)
+ await session.start()
+ await session.subscribe("log.entryAdded")
+
+ # Do some stuff with the session
+
+ remove_listener()
+ session.end()
+
+ If the session id is provided it's assumed that the underlying
+ WebDriver session was already created, and the WebSocket URL was
+ taken from the new session response. If no session id is provided, it's
+ assumed that a BiDi-only session should be created when start() is called.
+
+ It can also be used as a context manager, with the WebSocket transport
+ implictly being created when the context is entered, and closed when
+ the context is exited.
+
+ :param websocket_url: WebSockets URL on which to connect to the session.
+ This excludes any path component.
+ :param session_id: String id of existing HTTP session
+ :param capabilities: Capabilities response of existing session
+ :param requested_capabilities: Dictionary representing the capabilities request.
+
+ """
+
+ def __init__(self,
+ websocket_url: str,
+ session_id: Optional[str] = None,
+ capabilities: Optional[Mapping[str, Any]] = None,
+ requested_capabilities: Optional[Mapping[str, Any]] = None):
+ self.transport: Optional[Transport] = None
+
+ # The full URL for a websocket looks like
+ # ws://<host>:<port>/session when we're creating a session and
+ # ws://<host>:<port>/session/<sessionid> when we're connecting to an existing session.
+ # To be user friendly, handle the case where the class was created with either a
+ # full URL including the path, and also the case where just a server url is passed in.
+ parsed_url = urlparse(websocket_url)
+ if parsed_url.path == "" or parsed_url.path == "/":
+ if session_id is None:
+ websocket_url = urljoin(websocket_url, "session")
+ else:
+ websocket_url = urljoin(websocket_url, f"session/{session_id}")
+ else:
+ if session_id is not None:
+ if parsed_url.path != f"/session/{session_id}":
+ raise ValueError(f"WebSocket URL {session_id} doesn't match session id")
+ else:
+ if parsed_url.path != "/session":
+ raise ValueError(f"WebSocket URL {session_id} doesn't match session url")
+
+ if session_id is None and capabilities is not None:
+ raise ValueError("Tried to create BiDi-only session with existing capabilities")
+
+ self.websocket_url = websocket_url
+ self.requested_capabilities = requested_capabilities
+ self.capabilities = capabilities
+ self.session_id = session_id
+
+ self.command_id = 0
+ self.pending_commands: MutableMapping[int, "asyncio.Future[Any]"] = {}
+ self.event_listeners: MutableMapping[
+ Optional[str],
+ List[Callable[[str, Mapping[str, Any]], Any]]
+ ] = defaultdict(list)
+
+ # Modules.
+ # For each module, have a property representing that module
+ self.session = modules.Session(self)
+ self.browsing_context = modules.BrowsingContext(self)
+ self.script = modules.Script(self)
+
+ @property
+ def event_loop(self):
+ if self.transport:
+ return self.transport.loop
+
+ return None
+
+ @classmethod
+ def from_http(cls,
+ session_id: str,
+ capabilities: Mapping[str, Any]) -> "BidiSession":
+ """Create a BiDi session from an existing HTTP session
+
+ :param session_id: String id of the session
+ :param capabilities: Capabilities returned in the New Session HTTP response."""
+ websocket_url = capabilities.get("webSocketUrl")
+ if websocket_url is None:
+ raise ValueError("No webSocketUrl found in capabilities")
+ if not isinstance(websocket_url, str):
+ raise ValueError("webSocketUrl is not a string")
+ return cls(websocket_url, session_id=session_id, capabilities=capabilities)
+
+ @classmethod
+ def bidi_only(cls,
+ websocket_url: str,
+ requested_capabilities: Optional[Mapping[str, Any]] = None) -> "BidiSession":
+ """Create a BiDi session where there is no existing HTTP session
+
+ :param webdocket_url: URL to the WebSocket server listening for BiDi connections
+ :param requested_capabilities: Capabilities request for establishing the session."""
+ return cls(websocket_url, requested_capabilities=requested_capabilities)
+
+ async def __aenter__(self) -> "BidiSession":
+ await self.start()
+ return self
+
+ async def __aexit__(self, *args: Any) -> None:
+ await self.end()
+
+ async def start(self,
+ loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ """Connect to the WebDriver BiDi remote via WebSockets"""
+
+ if loop is None:
+ loop = get_running_loop()
+
+ self.transport = Transport(self.websocket_url, self.on_message, loop=loop)
+ await self.transport.start()
+
+ if self.session_id is None:
+ self.session_id, self.capabilities = await self.session.new(
+ capabilities=self.requested_capabilities)
+
+ async def send_command(
+ self,
+ method: str,
+ params: Mapping[str, Any]
+ ) -> Awaitable[Mapping[str, Any]]:
+ """Send a command to the remote server"""
+ # this isn't threadsafe
+ self.command_id += 1
+ command_id = self.command_id
+
+ body = {
+ "id": command_id,
+ "method": method,
+ "params": params
+ }
+ assert command_id not in self.pending_commands
+ assert self.transport is not None
+ self.pending_commands[command_id] = self.transport.loop.create_future()
+ await self.transport.send(body)
+
+ return self.pending_commands[command_id]
+
+ async def on_message(self, data: Mapping[str, Any]) -> None:
+ """Handle a message from the remote server"""
+ if "id" in data:
+ # This is a command response or error
+ future = self.pending_commands.get(data["id"])
+ if future is None:
+ raise ValueError(f"No pending command with id {data['id']}")
+ if "result" in data:
+ future.set_result(data["result"])
+ elif "error" in data and "message" in data:
+ assert isinstance(data["error"], str)
+ assert isinstance(data["message"], str)
+ exception = from_error_details(data["error"],
+ data["message"],
+ data.get("stacktrace"))
+ future.set_exception(exception)
+ else:
+ raise ValueError(f"Unexpected message: {data!r}")
+ elif "method" in data and "params" in data:
+ # This is an event
+ method = data["method"]
+ params = data["params"]
+
+ listeners = self.event_listeners.get(method, [])
+ if not listeners:
+ listeners = self.event_listeners.get(None, [])
+ for listener in listeners:
+ await listener(method, params)
+ else:
+ raise ValueError(f"Unexpected message: {data!r}")
+
+ async def end(self) -> None:
+ """Close websocket connection."""
+ assert self.transport is not None
+ await self.transport.end()
+ self.transport = None
+
+ def add_event_listener(
+ self,
+ name: Optional[str],
+ fn: Callable[[str, Mapping[str, Any]], Awaitable[Any]]
+ ) -> Callable[[], None]:
+ """Add a listener for the event with a given name.
+
+ If name is None, the listener is called for all messages that are not otherwise
+ handled.
+
+ :param name: Name of event to listen for or None to register a default handler
+ :param fn: Async callback function that receives event data
+
+ :return: Function to remove the added listener
+ """
+ self.event_listeners[name].append(fn)
+
+ return lambda: self.event_listeners[name].remove(fn)
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/bidi/error.py b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/error.py
new file mode 100644
index 0000000000..9e8737e54c
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/error.py
@@ -0,0 +1,70 @@
+# mypy: allow-untyped-defs
+
+import collections
+
+from typing import ClassVar, DefaultDict, Optional, Type
+
+
+class BidiException(Exception):
+ # The error_code class variable is used to map the JSON Error Code (see
+ # https://w3c.github.io/webdriver/#errors) to a BidiException subclass.
+ # TODO: Match on error and let it be a class variables only.
+ error_code = None # type: ClassVar[str]
+
+ def __init__(self, message: str, stacktrace: Optional[str] = None):
+ super()
+
+ self.message = message
+ self.stacktrace = stacktrace
+
+ def __repr__(self):
+ """Return the object representation in string format."""
+ return f"{self.__class__.__name__}({self.error}, {self.message}, {self.stacktrace})"
+
+ def __str__(self):
+ """Return the string representation of the object."""
+ message = f"{self.error_code} ({self.message})"
+
+ if self.stacktrace is not None:
+ message += f"\n\nRemote-end stacktrace:\n\n{self.stacktrace}"
+
+ return message
+
+
+class InvalidArgumentException(BidiException):
+ error_code = "invalid argument"
+
+
+class NoSuchFrameException(BidiException):
+ error_code = "no such frame"
+
+
+class UnknownCommandException(BidiException):
+ error_code = "unknown command"
+
+
+class UnknownErrorException(BidiException):
+ error_code = "unknown error"
+
+
+def from_error_details(error: str, message: str, stacktrace: Optional[str]) -> BidiException:
+ """Create specific WebDriver BiDi exception class from error details.
+
+ Defaults to ``UnknownErrorException`` if `error` is unknown.
+ """
+ cls = get(error)
+ return cls(message, stacktrace)
+
+
+def get(error_code: str) -> Type[BidiException]:
+ """Get exception from `error_code`.
+
+ It's falling back to ``UnknownErrorException`` if it is not found.
+ """
+ return _errors.get(error_code, UnknownErrorException)
+
+
+_errors: DefaultDict[str, Type[BidiException]] = collections.defaultdict()
+for item in list(locals().values()):
+ if type(item) == type and issubclass(item, BidiException):
+ _errors[item.error_code] = item
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/__init__.py b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/__init__.py
new file mode 100644
index 0000000000..487b1270ab
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/__init__.py
@@ -0,0 +1,5 @@
+# flake8: noqa
+
+from .session import Session
+from .browsing_context import BrowsingContext
+from .script import Script
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/_module.py b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/_module.py
new file mode 100644
index 0000000000..c2034033c7
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/_module.py
@@ -0,0 +1,99 @@
+import functools
+from typing import (
+ Any,
+ Awaitable,
+ Callable,
+ Optional,
+ Mapping,
+ MutableMapping,
+ TYPE_CHECKING,
+)
+
+if TYPE_CHECKING:
+ from ..client import BidiSession
+
+
+class command:
+ """Decorator for implementing bidi commands.
+
+ Implementing a command involves specifying an async function that
+ builds the parameters to the command. The decorator arranges those
+ parameters to be turned into a send_command call, using the class
+ and method names to determine the method in the call.
+
+ Commands decorated in this way don't return a future, but await
+ the actual response. In some cases it can be useful to
+ post-process this response before returning it to the client. This
+ can be done by specifying a second decorated method like
+ @command_name.result. That method will then be called once the
+ result of the original command is known, and the return value of
+ the method used as the response of the command. If this method
+ is specified, the `raw_result` parameter of the command can be set
+ to `True` to get the result without post-processing.
+
+ So for an example, if we had a command test.testMethod, which
+ returned a result which we want to convert to a TestResult type,
+ the implementation might look like:
+
+ class Test(BidiModule):
+ @command
+ def test_method(self, test_data=None):
+ return {"testData": test_data}
+
+ @test_method.result
+ def convert_test_method_result(self, result):
+ return TestData(**result)
+ """
+
+ def __init__(self, fn: Callable[..., Mapping[str, Any]]):
+ self.params_fn = fn
+ self.result_fn: Optional[Callable[..., Any]] = None
+
+ def result(self, fn: Callable[[Any, MutableMapping[str, Any]], Any]) -> None:
+ self.result_fn = fn
+
+ def __set_name__(self, owner: Any, name: str) -> None:
+ # This is called when the class is created
+ # see https://docs.python.org/3/reference/datamodel.html#object.__set_name__
+ params_fn = self.params_fn
+ result_fn = self.result_fn
+
+ @functools.wraps(params_fn)
+ async def inner(self: Any, **kwargs: Any) -> Any:
+ raw_result = kwargs.pop("raw_result", False)
+ params = params_fn(self, **kwargs)
+
+ # Convert the classname and the method name to a bidi command name
+ mod_name = owner.__name__[0].lower() + owner.__name__[1:]
+ if hasattr(owner, "prefix"):
+ mod_name = f"{owner.prefix}:{mod_name}"
+ cmd_name = f"{mod_name}.{to_camelcase(name)}"
+
+ future = await self.session.send_command(cmd_name, params)
+ result = await future
+
+ if result_fn is not None and not raw_result:
+ # Convert the result if we have a conversion function defined
+ result = result_fn(self, result)
+ return result
+
+ # Overwrite the method on the owner class with the wrapper
+ setattr(owner, name, inner)
+
+ def __call__(*args: Any, **kwargs: Any) -> Awaitable[Any]:
+ # This isn't really used, but mypy doesn't understand __set_name__
+ pass
+
+
+class BidiModule:
+ def __init__(self, session: "BidiSession"):
+ self.session = session
+
+
+def to_camelcase(name: str) -> str:
+ """Convert a python style method name foo_bar to a BiDi command name fooBar"""
+ parts = name.split("_")
+ parts[0] = parts[0].lower()
+ for i in range(1, len(parts)):
+ parts[i] = parts[i].title()
+ return "".join(parts)
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/browsing_context.py b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/browsing_context.py
new file mode 100644
index 0000000000..70c834c384
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/browsing_context.py
@@ -0,0 +1,82 @@
+import base64
+from typing import Any, Mapping, MutableMapping, Optional
+
+from ._module import BidiModule, command
+
+
+class BrowsingContext(BidiModule):
+ @command
+ def capture_screenshot(self, context: str) -> Mapping[str, Any]:
+ params: MutableMapping[str, Any] = {
+ "context": context
+ }
+
+ return params
+
+ @capture_screenshot.result
+ def _capture_screenshot(self, result: Mapping[str, Any]) -> bytes:
+ assert result["data"] is not None
+ return base64.b64decode(result["data"])
+
+ @command
+ def close(self, context: Optional[str] = None) -> Mapping[str, Any]:
+ params: MutableMapping[str, Any] = {}
+
+ if context is not None:
+ params["context"] = context
+
+ return params
+
+ @command
+ def create(self, type_hint: str, reference_context: Optional[str] = None) -> Mapping[str, Any]:
+ params: MutableMapping[str, Any] = {"type": type_hint}
+
+ if reference_context is not None:
+ params["referenceContext"] = reference_context
+
+ return params
+
+ @create.result
+ def _create(self, result: Mapping[str, Any]) -> Any:
+ assert result["context"] is not None
+
+ return result
+
+ @command
+ def get_tree(self,
+ max_depth: Optional[int] = None,
+ root: Optional[str] = None) -> Mapping[str, Any]:
+ params: MutableMapping[str, Any] = {}
+
+ if max_depth is not None:
+ params["maxDepth"] = max_depth
+ if root is not None:
+ params["root"] = root
+
+ return params
+
+ @get_tree.result
+ def _get_tree(self, result: Mapping[str, Any]) -> Any:
+ assert result["contexts"] is not None
+ assert isinstance(result["contexts"], list)
+
+ return result["contexts"]
+
+ @command
+ def navigate(
+ self, context: str, url: str, wait: Optional[str] = None
+ ) -> Mapping[str, Any]:
+ params: MutableMapping[str, Any] = {"context": context, "url": url}
+ if wait is not None:
+ params["wait"] = wait
+ return params
+
+ @navigate.result
+ def _navigate(self, result: Mapping[str, Any]) -> Any:
+ if result["navigation"] is not None:
+ assert isinstance(result["navigation"], str)
+
+ assert result["url"] is not None
+ assert isinstance(result["url"], str)
+
+ return result
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/script.py b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/script.py
new file mode 100644
index 0000000000..d9af11a8e2
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/script.py
@@ -0,0 +1,136 @@
+from enum import Enum
+from typing import Any, Dict, List, Mapping, MutableMapping, Optional, Union
+
+from ..error import UnknownErrorException
+from ._module import BidiModule, command
+
+
+class ScriptEvaluateResultException(Exception):
+ def __init__(self, result: Mapping[str, Any]):
+ self.result = result
+ super().__init__("Script execution failed.")
+
+
+class OwnershipModel(Enum):
+ NONE = "none"
+ ROOT = "root"
+
+
+class RealmTypes(Enum):
+ AUDIO_WORKLET = "audio-worklet"
+ DEDICATED_WORKER = "dedicated-worker"
+ PAINT_WORKLET = "paint-worklet"
+ SERVICE_WORKER = "service-worker"
+ SHARED_WORKER = "shared-worker"
+ WINDOW = "window"
+ WORKER = "worker"
+ WORKLET = "worklet"
+
+
+class RealmTarget(Dict[str, Any]):
+ def __init__(self, realm: str):
+ dict.__init__(self, realm=realm)
+
+
+class ContextTarget(Dict[str, Any]):
+ def __init__(self, context: str, sandbox: Optional[str] = None):
+ if sandbox is None:
+ dict.__init__(self, context=context)
+ else:
+ dict.__init__(self, context=context, sandbox=sandbox)
+
+
+Target = Union[RealmTarget, ContextTarget]
+
+
+class Script(BidiModule):
+ @command
+ def call_function(
+ self,
+ function_declaration: str,
+ await_promise: bool,
+ target: Target,
+ arguments: Optional[List[Mapping[str, Any]]] = None,
+ this: Optional[Mapping[str, Any]] = None,
+ result_ownership: Optional[OwnershipModel] = None,
+ ) -> Mapping[str, Any]:
+ params: MutableMapping[str, Any] = {
+ "functionDeclaration": function_declaration,
+ "target": target,
+ "awaitPromise": await_promise,
+ }
+
+ if arguments is not None:
+ params["arguments"] = arguments
+ if this is not None:
+ params["this"] = this
+ if result_ownership is not None:
+ params["resultOwnership"] = result_ownership
+ return params
+
+ @call_function.result
+ def _call_function(self, result: Mapping[str, Any]) -> Any:
+ assert "type" in result
+
+ if result["type"] == "success":
+ return result["result"]
+ elif result["type"] == "exception":
+ raise ScriptEvaluateResultException(result)
+ else:
+ raise UnknownErrorException(f"""Invalid type '{result["type"]}' in response""")
+
+ @command
+ def disown(self, handles: List[str], target: Target) -> Mapping[str, Any]:
+ params: MutableMapping[str, Any] = {"handles": handles, "target": target}
+ return params
+
+ @command
+ def evaluate(
+ self,
+ expression: str,
+ target: Target,
+ await_promise: bool,
+ result_ownership: Optional[OwnershipModel] = None,
+ ) -> Mapping[str, Any]:
+ params: MutableMapping[str, Any] = {
+ "expression": expression,
+ "target": target,
+ "awaitPromise": await_promise,
+ }
+
+ if result_ownership is not None:
+ params["resultOwnership"] = result_ownership
+ return params
+
+ @evaluate.result
+ def _evaluate(self, result: Mapping[str, Any]) -> Any:
+ assert "type" in result
+
+ if result["type"] == "success":
+ return result["result"]
+ elif result["type"] == "exception":
+ raise ScriptEvaluateResultException(result)
+ else:
+ raise UnknownErrorException(f"""Invalid type '{result["type"]}' in response""")
+
+ @command
+ def get_realms(
+ self,
+ context: Optional[str] = None,
+ type: Optional[RealmTypes] = None,
+ ) -> Mapping[str, Any]:
+ params: MutableMapping[str, Any] = {}
+
+ if context is not None:
+ params["context"] = context
+ if type is not None:
+ params["type"] = type
+
+ return params
+
+ @get_realms.result
+ def _get_realms(self, result: Mapping[str, Any]) -> Any:
+ assert result["realms"] is not None
+ assert isinstance(result["realms"], list)
+
+ return result["realms"]
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/session.py b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/session.py
new file mode 100644
index 0000000000..7c1fef30ae
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/modules/session.py
@@ -0,0 +1,31 @@
+from typing import Any, List, Optional, Mapping, MutableMapping
+
+from ._module import BidiModule, command
+
+
+class Session(BidiModule):
+ @command
+ def new(self, capabilities: Mapping[str, Any]) -> Mapping[str, Mapping[str, Any]]:
+ return {"capabilities": capabilities}
+
+ @new.result
+ def _new(self, result: Mapping[str, Any]) -> Any:
+ return result.get("session_id"), result.get("capabilities", {})
+
+ @command
+ def subscribe(self,
+ events: List[str],
+ contexts: Optional[List[str]] = None) -> Mapping[str, Any]:
+ params: MutableMapping[str, Any] = {"events": events}
+ if contexts is not None:
+ params["contexts"] = contexts
+ return params
+
+ @command
+ def unsubscribe(self,
+ events: Optional[List[str]] = None,
+ contexts: Optional[List[str]] = None) -> Mapping[str, Any]:
+ params: MutableMapping[str, Any] = {"events": events if events is not None else []}
+ if contexts is not None:
+ params["contexts"] = contexts
+ return params
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/bidi/transport.py b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/transport.py
new file mode 100644
index 0000000000..afe054528e
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/bidi/transport.py
@@ -0,0 +1,76 @@
+import asyncio
+import json
+import logging
+import sys
+from typing import Any, Callable, Coroutine, List, Optional, Mapping
+
+import websockets
+
+logger = logging.getLogger("webdriver.bidi")
+
+
+def get_running_loop() -> asyncio.AbstractEventLoop:
+ if sys.version_info >= (3, 7):
+ return asyncio.get_running_loop()
+ else:
+ # Unlike the above, this will actually create an event loop
+ # if there isn't one; hopefully running tests in Python >= 3.7
+ # will allow us to catch any behaviour difference
+ # (Needs to be in else for mypy to believe this is reachable)
+ return asyncio.get_event_loop()
+
+
+class Transport:
+ """Low level message handler for the WebSockets connection"""
+ def __init__(self, url: str,
+ msg_handler: Callable[[Mapping[str, Any]], Coroutine[Any, Any, None]],
+ loop: Optional[asyncio.AbstractEventLoop] = None):
+ self.url = url
+ self.connection: Optional[websockets.WebSocketClientProtocol] = None
+ self.msg_handler = msg_handler
+ self.send_buf: List[Mapping[str, Any]] = []
+
+ if loop is None:
+ loop = get_running_loop()
+ self.loop = loop
+
+ self.read_message_task: Optional[asyncio.Task[Any]] = None
+
+ async def start(self) -> None:
+ self.connection = await websockets.client.connect(self.url)
+ self.read_message_task = self.loop.create_task(self.read_messages())
+
+ for msg in self.send_buf:
+ await self._send(self.connection, msg)
+
+ async def send(self, data: Mapping[str, Any]) -> None:
+ if self.connection is not None:
+ await self._send(self.connection, data)
+ else:
+ self.send_buf.append(data)
+
+ @staticmethod
+ async def _send(
+ connection: websockets.WebSocketClientProtocol,
+ data: Mapping[str, Any]
+ ) -> None:
+ msg = json.dumps(data)
+ logger.debug("→ %s", msg)
+ await connection.send(msg)
+
+ async def handle(self, msg: str) -> None:
+ logger.debug("↠%s", msg)
+ data = json.loads(msg)
+ await self.msg_handler(data)
+
+ async def end(self) -> None:
+ if self.connection:
+ await self.connection.close()
+ self.connection = None
+
+ async def read_messages(self) -> None:
+ assert self.connection is not None
+ async for msg in self.connection:
+ if not isinstance(msg, str):
+ raise ValueError("Got a binary message")
+ await self.handle(msg)
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/client.py b/testing/web-platform/tests/tools/webdriver/webdriver/client.py
new file mode 100644
index 0000000000..030e3fc56b
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/client.py
@@ -0,0 +1,900 @@
+# mypy: allow-untyped-defs
+
+from typing import Dict
+from urllib import parse as urlparse
+
+from . import error
+from . import protocol
+from . import transport
+from .bidi.client import BidiSession
+
+
+def command(func):
+ def inner(self, *args, **kwargs):
+ if hasattr(self, "session"):
+ session = self.session
+ else:
+ session = self
+
+ if session.session_id is None:
+ session.start()
+
+ return func(self, *args, **kwargs)
+
+ inner.__name__ = func.__name__
+ inner.__doc__ = func.__doc__
+
+ return inner
+
+
+class Timeouts:
+
+ def __init__(self, session):
+ self.session = session
+
+ def _get(self, key=None):
+ timeouts = self.session.send_session_command("GET", "timeouts")
+ if key is not None:
+ return timeouts[key]
+ return timeouts
+
+ def _set(self, key, secs):
+ body = {key: secs * 1000}
+ self.session.send_session_command("POST", "timeouts", body)
+ return None
+
+ @property
+ def script(self):
+ return self._get("script")
+
+ @script.setter
+ def script(self, secs):
+ return self._set("script", secs)
+
+ @property
+ def page_load(self):
+ return self._get("pageLoad")
+
+ @page_load.setter
+ def page_load(self, secs):
+ return self._set("pageLoad", secs)
+
+ @property
+ def implicit(self):
+ return self._get("implicit")
+
+ @implicit.setter
+ def implicit(self, secs):
+ return self._set("implicit", secs)
+
+ def __str__(self):
+ name = "%s.%s" % (self.__module__, self.__class__.__name__)
+ return "<%s script=%d, load=%d, implicit=%d>" % \
+ (name, self.script, self.page_load, self.implicit)
+
+
+class ActionSequence:
+ """API for creating and performing action sequences.
+
+ Each action method adds one or more actions to a queue. When perform()
+ is called, the queued actions fire in order.
+
+ May be chained together as in::
+
+ ActionSequence(session, "key", id) \
+ .key_down("a") \
+ .key_up("a") \
+ .perform()
+ """
+ def __init__(self, session, action_type, input_id, pointer_params=None):
+ """Represents a sequence of actions of one type for one input source.
+
+ :param session: WebDriver session.
+ :param action_type: Action type; may be "none", "key", or "pointer".
+ :param input_id: ID of input source.
+ :param pointer_params: Optional dictionary of pointer parameters.
+ """
+ self.session = session
+ self._id = input_id
+ self._type = action_type
+ self._actions = []
+ self._pointer_params = pointer_params
+
+ @property
+ def dict(self):
+ d = {
+ "type": self._type,
+ "id": self._id,
+ "actions": self._actions,
+ }
+ if self._pointer_params is not None:
+ d["parameters"] = self._pointer_params
+ return d
+
+ @command
+ def perform(self):
+ """Perform all queued actions."""
+ self.session.actions.perform([self.dict])
+
+ def _key_action(self, subtype, value):
+ self._actions.append({"type": subtype, "value": value})
+
+ def _pointer_action(self, subtype, button=None, x=None, y=None, duration=None, origin=None, width=None,
+ height=None, pressure=None, tangential_pressure=None, tilt_x=None,
+ tilt_y=None, twist=None, altitude_angle=None, azimuth_angle=None):
+ action = {
+ "type": subtype
+ }
+ if button is not None:
+ action["button"] = button
+ if x is not None:
+ action["x"] = x
+ if y is not None:
+ action["y"] = y
+ if duration is not None:
+ action["duration"] = duration
+ if origin is not None:
+ action["origin"] = origin
+ if width is not None:
+ action["width"] = width
+ if height is not None:
+ action["height"] = height
+ if pressure is not None:
+ action["pressure"] = pressure
+ if tangential_pressure is not None:
+ action["tangentialPressure"] = tangential_pressure
+ if tilt_x is not None:
+ action["tiltX"] = tilt_x
+ if tilt_y is not None:
+ action["tiltY"] = tilt_y
+ if twist is not None:
+ action["twist"] = twist
+ if altitude_angle is not None:
+ action["altitudeAngle"] = altitude_angle
+ if azimuth_angle is not None:
+ action["azimuthAngle"] = azimuth_angle
+ self._actions.append(action)
+
+ def pause(self, duration):
+ self._actions.append({"type": "pause", "duration": duration})
+ return self
+
+ def pointer_move(self, x, y, duration=None, origin=None, width=None, height=None,
+ pressure=None, tangential_pressure=None, tilt_x=None, tilt_y=None,
+ twist=None, altitude_angle=None, azimuth_angle=None):
+ """Queue a pointerMove action.
+
+ :param x: Destination x-axis coordinate of pointer in CSS pixels.
+ :param y: Destination y-axis coordinate of pointer in CSS pixels.
+ :param duration: Number of milliseconds over which to distribute the
+ move. If None, remote end defaults to 0.
+ :param origin: Origin of coordinates, either "viewport", "pointer" or
+ an Element. If None, remote end defaults to "viewport".
+ """
+ self._pointer_action("pointerMove", x=x, y=y, duration=duration, origin=origin,
+ width=width, height=height, pressure=pressure,
+ tangential_pressure=tangential_pressure, tilt_x=tilt_x, tilt_y=tilt_y,
+ twist=twist, altitude_angle=altitude_angle, azimuth_angle=azimuth_angle)
+ return self
+
+ def pointer_up(self, button=0):
+ """Queue a pointerUp action for `button`.
+
+ :param button: Pointer button to perform action with.
+ Default: 0, which represents main device button.
+ """
+ self._pointer_action("pointerUp", button=button)
+ return self
+
+ def pointer_down(self, button=0, width=None, height=None, pressure=None,
+ tangential_pressure=None, tilt_x=None, tilt_y=None,
+ twist=None, altitude_angle=None, azimuth_angle=None):
+ """Queue a pointerDown action for `button`.
+
+ :param button: Pointer button to perform action with.
+ Default: 0, which represents main device button.
+ """
+ self._pointer_action("pointerDown", button=button, width=width, height=height,
+ pressure=pressure, tangential_pressure=tangential_pressure,
+ tilt_x=tilt_x, tilt_y=tilt_y, twist=twist, altitude_angle=altitude_angle,
+ azimuth_angle=azimuth_angle)
+ return self
+
+ def click(self, element=None, button=0):
+ """Queue a click with the specified button.
+
+ If an element is given, move the pointer to that element first,
+ otherwise click current pointer coordinates.
+
+ :param element: Optional element to click.
+ :param button: Integer representing pointer button to perform action
+ with. Default: 0, which represents main device button.
+ """
+ if element:
+ self.pointer_move(0, 0, origin=element)
+ return self.pointer_down(button).pointer_up(button)
+
+ def key_up(self, value):
+ """Queue a keyUp action for `value`.
+
+ :param value: Character to perform key action with.
+ """
+ self._key_action("keyUp", value)
+ return self
+
+ def key_down(self, value):
+ """Queue a keyDown action for `value`.
+
+ :param value: Character to perform key action with.
+ """
+ self._key_action("keyDown", value)
+ return self
+
+ def send_keys(self, keys):
+ """Queue a keyDown and keyUp action for each character in `keys`.
+
+ :param keys: String of keys to perform key actions with.
+ """
+ for c in keys:
+ self.key_down(c)
+ self.key_up(c)
+ return self
+
+ def scroll(self, x, y, delta_x, delta_y, duration=None, origin=None):
+ """Queue a scroll action.
+
+ :param x: Destination x-axis coordinate of pointer in CSS pixels.
+ :param y: Destination y-axis coordinate of pointer in CSS pixels.
+ :param delta_x: scroll delta on x-axis in CSS pixels.
+ :param delta_y: scroll delta on y-axis in CSS pixels.
+ :param duration: Number of milliseconds over which to distribute the
+ scroll. If None, remote end defaults to 0.
+ :param origin: Origin of coordinates, either "viewport" or an Element.
+ If None, remote end defaults to "viewport".
+ """
+ action = {
+ "type": "scroll",
+ "x": x,
+ "y": y,
+ "deltaX": delta_x,
+ "deltaY": delta_y
+ }
+ if duration is not None:
+ action["duration"] = duration
+ if origin is not None:
+ action["origin"] = origin
+ self._actions.append(action)
+ return self
+
+
+class Actions:
+ def __init__(self, session):
+ self.session = session
+
+ @command
+ def perform(self, actions=None):
+ """Performs actions by tick from each action sequence in `actions`.
+
+ :param actions: List of input source action sequences. A single action
+ sequence may be created with the help of
+ ``ActionSequence.dict``.
+ """
+ body = {"actions": [] if actions is None else actions}
+ actions = self.session.send_session_command("POST", "actions", body)
+ return actions
+
+ @command
+ def release(self):
+ return self.session.send_session_command("DELETE", "actions")
+
+ def sequence(self, *args, **kwargs):
+ """Return an empty ActionSequence of the designated type.
+
+ See ActionSequence for parameter list.
+ """
+ return ActionSequence(self.session, *args, **kwargs)
+
+
+class Window:
+ identifier = "window-fcc6-11e5-b4f8-330a88ab9d7f"
+
+ def __init__(self, session):
+ self.session = session
+
+ @command
+ def close(self):
+ handles = self.session.send_session_command("DELETE", "window")
+ if handles is not None and len(handles) == 0:
+ # With no more open top-level browsing contexts, the session is closed.
+ self.session.session_id = None
+
+ return handles
+
+ # The many "type: ignore" comments here and below are to silence mypy's
+ # "Decorated property not supported" error, which is due to a limitation
+ # in mypy, see https://github.com/python/mypy/issues/1362.
+ @property # type: ignore
+ @command
+ def rect(self):
+ return self.session.send_session_command("GET", "window/rect")
+
+ @rect.setter # type: ignore
+ @command
+ def rect(self, new_rect):
+ self.session.send_session_command("POST", "window/rect", new_rect)
+
+ @property # type: ignore
+ @command
+ def size(self):
+ """Gets the window size as a tuple of `(width, height)`."""
+ rect = self.rect
+ return (rect["width"], rect["height"])
+
+ @size.setter # type: ignore
+ @command
+ def size(self, new_size):
+ """Set window size by passing a tuple of `(width, height)`."""
+ try:
+ width, height = new_size
+ body = {"width": width, "height": height}
+ self.session.send_session_command("POST", "window/rect", body)
+ except (error.UnknownErrorException, error.InvalidArgumentException):
+ # silently ignore this error as the command is not implemented
+ # for Android. Revert this once it is implemented.
+ pass
+
+ @property # type: ignore
+ @command
+ def position(self):
+ """Gets the window position as a tuple of `(x, y)`."""
+ rect = self.rect
+ return (rect["x"], rect["y"])
+
+ @position.setter # type: ignore
+ @command
+ def position(self, new_position):
+ """Set window position by passing a tuple of `(x, y)`."""
+ try:
+ x, y = new_position
+ body = {"x": x, "y": y}
+ self.session.send_session_command("POST", "window/rect", body)
+ except error.UnknownErrorException:
+ # silently ignore this error as the command is not implemented
+ # for Android. Revert this once it is implemented.
+ pass
+
+ @command
+ def maximize(self):
+ return self.session.send_session_command("POST", "window/maximize")
+
+ @command
+ def minimize(self):
+ return self.session.send_session_command("POST", "window/minimize")
+
+ @command
+ def fullscreen(self):
+ return self.session.send_session_command("POST", "window/fullscreen")
+
+ @classmethod
+ def from_json(cls, json, session):
+ uuid = json[Window.identifier]
+ return cls(uuid, session)
+
+
+class Frame:
+ identifier = "frame-075b-4da1-b6ba-e579c2d3230a"
+
+ def __init__(self, session):
+ self.session = session
+
+ @classmethod
+ def from_json(cls, json, session):
+ uuid = json[Frame.identifier]
+ return cls(uuid, session)
+
+
+class ShadowRoot:
+ identifier = "shadow-6066-11e4-a52e-4f735466cecf"
+
+ def __init__(self, session, id):
+ """
+ Construct a new shadow root representation.
+
+ :param id: Shadow root UUID which must be unique across
+ all browsing contexts.
+ :param session: Current ``webdriver.Session``.
+ """
+ self.id = id
+ self.session = session
+
+ @classmethod
+ def from_json(cls, json, session):
+ uuid = json[ShadowRoot.identifier]
+ return cls(session, uuid)
+
+ def send_shadow_command(self, method, uri, body=None):
+ url = f"shadow/{self.id}/{uri}"
+ return self.session.send_session_command(method, url, body)
+
+ @command
+ def find_element(self, strategy, selector):
+ body = {"using": strategy,
+ "value": selector}
+ return self.send_shadow_command("POST", "element", body)
+
+ @command
+ def find_elements(self, strategy, selector):
+ body = {"using": strategy,
+ "value": selector}
+ return self.send_shadow_command("POST", "elements", body)
+
+
+class Find:
+ def __init__(self, session):
+ self.session = session
+
+ @command
+ def css(self, element_selector, all=True):
+ elements = self._find_element("css selector", element_selector, all)
+ return elements
+
+ def _find_element(self, strategy, selector, all):
+ route = "elements" if all else "element"
+ body = {"using": strategy,
+ "value": selector}
+ return self.session.send_session_command("POST", route, body)
+
+
+class Cookies:
+ def __init__(self, session):
+ self.session = session
+
+ def __getitem__(self, name):
+ self.session.send_session_command("GET", "cookie/%s" % name, {})
+
+ def __setitem__(self, name, value):
+ cookie = {"name": name,
+ "value": None}
+
+ if isinstance(name, str):
+ cookie["value"] = value
+ elif hasattr(value, "value"):
+ cookie["value"] = value.value
+ self.session.send_session_command("POST", "cookie/%s" % name, {})
+
+
+class UserPrompt:
+ def __init__(self, session):
+ self.session = session
+
+ @command
+ def dismiss(self):
+ self.session.send_session_command("POST", "alert/dismiss")
+
+ @command
+ def accept(self):
+ self.session.send_session_command("POST", "alert/accept")
+
+ @property # type: ignore
+ @command
+ def text(self):
+ return self.session.send_session_command("GET", "alert/text")
+
+ @text.setter # type: ignore
+ @command
+ def text(self, value):
+ body = {"text": value}
+ self.session.send_session_command("POST", "alert/text", body=body)
+
+
+class Session:
+ def __init__(self,
+ host,
+ port,
+ url_prefix="/",
+ enable_bidi=False,
+ capabilities=None,
+ extension=None):
+
+ if enable_bidi:
+ if capabilities is not None:
+ capabilities.setdefault("alwaysMatch", {}).update({"webSocketUrl": True})
+ else:
+ capabilities = {"alwaysMatch": {"webSocketUrl": True}}
+
+ self.transport = transport.HTTPWireProtocol(host, port, url_prefix)
+ self.requested_capabilities = capabilities
+ self.capabilities = None
+ self.session_id = None
+ self.timeouts = None
+ self.window = None
+ self.find = None
+ self.enable_bidi = enable_bidi
+ self.bidi_session = None
+ self.extension = None
+ self.extension_cls = extension
+
+ self.timeouts = Timeouts(self)
+ self.window = Window(self)
+ self.find = Find(self)
+ self.alert = UserPrompt(self)
+ self.actions = Actions(self)
+
+ def __repr__(self):
+ return "<%s %s>" % (self.__class__.__name__, self.session_id or "(disconnected)")
+
+ def __eq__(self, other):
+ return (self.session_id is not None and isinstance(other, Session) and
+ self.session_id == other.session_id)
+
+ def __enter__(self):
+ self.start()
+ return self
+
+ def __exit__(self, *args, **kwargs):
+ self.end()
+
+ def __del__(self):
+ self.end()
+
+ def match(self, capabilities):
+ return self.requested_capabilities == capabilities
+
+ def start(self):
+ """Start a new WebDriver session.
+
+ :return: Dictionary with `capabilities` and `sessionId`.
+
+ :raises error.WebDriverException: If the remote end returns
+ an error.
+ """
+ if self.session_id is not None:
+ return
+
+ self.transport.close()
+
+ body = {"capabilities": {}}
+
+ if self.requested_capabilities is not None:
+ body["capabilities"] = self.requested_capabilities
+
+ value = self.send_command("POST", "session", body=body)
+ assert isinstance(value["sessionId"], str)
+ assert isinstance(value["capabilities"], Dict)
+
+ self.session_id = value["sessionId"]
+ self.capabilities = value["capabilities"]
+
+ if "webSocketUrl" in self.capabilities:
+ self.bidi_session = BidiSession.from_http(self.session_id,
+ self.capabilities)
+ elif self.enable_bidi:
+ self.end()
+ raise error.SessionNotCreatedException(
+ "Requested bidi session, but webSocketUrl capability not found")
+
+ if self.extension_cls:
+ self.extension = self.extension_cls(self)
+
+ return value
+
+ def end(self):
+ """Try to close the active session."""
+ if self.session_id is None:
+ return
+
+ try:
+ self.send_command("DELETE", "session/%s" % self.session_id)
+ except (OSError, error.InvalidSessionIdException):
+ pass
+ finally:
+ self.session_id = None
+ self.transport.close()
+
+ def send_command(self, method, url, body=None, timeout=None):
+ """
+ Send a command to the remote end and validate its success.
+
+ :param method: HTTP method to use in request.
+ :param uri: "Command part" of the HTTP request URL,
+ e.g. `window/rect`.
+ :param body: Optional body of the HTTP request.
+
+ :return: `None` if the HTTP response body was empty, otherwise
+ the `value` field returned after parsing the response
+ body as JSON.
+
+ :raises error.WebDriverException: If the remote end returns
+ an error.
+ :raises ValueError: If the response body does not contain a
+ `value` key.
+ """
+
+ response = self.transport.send(
+ method, url, body,
+ encoder=protocol.Encoder, decoder=protocol.Decoder,
+ session=self, timeout=timeout)
+
+ if response.status != 200:
+ err = error.from_response(response)
+
+ if isinstance(err, error.InvalidSessionIdException):
+ # The driver could have already been deleted the session.
+ self.session_id = None
+
+ raise err
+
+ if "value" in response.body:
+ value = response.body["value"]
+ """
+ Edge does not yet return the w3c session ID.
+ We want the tests to run in Edge anyway to help with REC.
+ In order to run the tests in Edge, we need to hack around
+ bug:
+ https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14641972
+ """
+ if url == "session" and method == "POST" and "sessionId" in response.body and "sessionId" not in value:
+ value["sessionId"] = response.body["sessionId"]
+ else:
+ raise ValueError("Expected 'value' key in response body:\n"
+ "%s" % response)
+
+ return value
+
+ def send_session_command(self, method, uri, body=None, timeout=None):
+ """
+ Send a command to an established session and validate its success.
+
+ :param method: HTTP method to use in request.
+ :param url: "Command part" of the HTTP request URL,
+ e.g. `window/rect`.
+ :param body: Optional body of the HTTP request. Must be JSON
+ serialisable.
+
+ :return: `None` if the HTTP response body was empty, otherwise
+ the result of parsing the body as JSON.
+
+ :raises error.WebDriverException: If the remote end returns
+ an error.
+ """
+ url = urlparse.urljoin("session/%s/" % self.session_id, uri)
+ return self.send_command(method, url, body, timeout)
+
+ @property # type: ignore
+ @command
+ def url(self):
+ return self.send_session_command("GET", "url")
+
+ @url.setter # type: ignore
+ @command
+ def url(self, url):
+ if urlparse.urlsplit(url).netloc is None:
+ return self.url(url)
+ body = {"url": url}
+ return self.send_session_command("POST", "url", body)
+
+ @command
+ def back(self):
+ return self.send_session_command("POST", "back")
+
+ @command
+ def forward(self):
+ return self.send_session_command("POST", "forward")
+
+ @command
+ def refresh(self):
+ return self.send_session_command("POST", "refresh")
+
+ @property # type: ignore
+ @command
+ def title(self):
+ return self.send_session_command("GET", "title")
+
+ @property # type: ignore
+ @command
+ def source(self):
+ return self.send_session_command("GET", "source")
+
+ @command
+ def new_window(self, type_hint="tab"):
+ body = {"type": type_hint}
+ value = self.send_session_command("POST", "window/new", body)
+
+ return value["handle"]
+
+ @property # type: ignore
+ @command
+ def window_handle(self):
+ return self.send_session_command("GET", "window")
+
+ @window_handle.setter # type: ignore
+ @command
+ def window_handle(self, handle):
+ body = {"handle": handle}
+ return self.send_session_command("POST", "window", body=body)
+
+ def switch_frame(self, frame):
+ if frame == "parent":
+ url = "frame/parent"
+ body = None
+ else:
+ url = "frame"
+ body = {"id": frame}
+
+ return self.send_session_command("POST", url, body)
+
+ @property # type: ignore
+ @command
+ def handles(self):
+ return self.send_session_command("GET", "window/handles")
+
+ @property # type: ignore
+ @command
+ def active_element(self):
+ return self.send_session_command("GET", "element/active")
+
+ @command
+ def cookies(self, name=None):
+ if name is None:
+ url = "cookie"
+ else:
+ url = "cookie/%s" % name
+ return self.send_session_command("GET", url, {})
+
+ @command
+ def set_cookie(self, name, value, path=None, domain=None,
+ secure=None, expiry=None, http_only=None):
+ body = {
+ "name": name,
+ "value": value,
+ }
+
+ if domain is not None:
+ body["domain"] = domain
+ if expiry is not None:
+ body["expiry"] = expiry
+ if http_only is not None:
+ body["httpOnly"] = http_only
+ if path is not None:
+ body["path"] = path
+ if secure is not None:
+ body["secure"] = secure
+ self.send_session_command("POST", "cookie", {"cookie": body})
+
+ def delete_cookie(self, name=None):
+ if name is None:
+ url = "cookie"
+ else:
+ url = "cookie/%s" % name
+ self.send_session_command("DELETE", url, {})
+
+ #[...]
+
+ @command
+ def execute_script(self, script, args=None):
+ if args is None:
+ args = []
+
+ body = {
+ "script": script,
+ "args": args
+ }
+ return self.send_session_command("POST", "execute/sync", body)
+
+ @command
+ def execute_async_script(self, script, args=None):
+ if args is None:
+ args = []
+
+ body = {
+ "script": script,
+ "args": args
+ }
+ return self.send_session_command("POST", "execute/async", body)
+
+ #[...]
+
+ @command
+ def screenshot(self):
+ return self.send_session_command("GET", "screenshot")
+
+class Element:
+ """
+ Representation of a web element.
+
+ A web element is an abstraction used to identify an element when
+ it is transported via the protocol, between remote- and local ends.
+ """
+ identifier = "element-6066-11e4-a52e-4f735466cecf"
+
+ def __init__(self, id, session):
+ """
+ Construct a new web element representation.
+
+ :param id: Web element UUID which must be unique across
+ all browsing contexts.
+ :param session: Current ``webdriver.Session``.
+ """
+ self.id = id
+ self.session = session
+
+ def __repr__(self):
+ return "<%s %s>" % (self.__class__.__name__, self.id)
+
+ def __eq__(self, other):
+ return (isinstance(other, Element) and self.id == other.id and
+ self.session == other.session)
+
+ @classmethod
+ def from_json(cls, json, session):
+ uuid = json[Element.identifier]
+ return cls(uuid, session)
+
+ def send_element_command(self, method, uri, body=None):
+ url = "element/%s/%s" % (self.id, uri)
+ return self.session.send_session_command(method, url, body)
+
+ @command
+ def find_element(self, strategy, selector):
+ body = {"using": strategy,
+ "value": selector}
+ return self.send_element_command("POST", "element", body)
+
+ @command
+ def click(self):
+ self.send_element_command("POST", "click", {})
+
+ @command
+ def tap(self):
+ self.send_element_command("POST", "tap", {})
+
+ @command
+ def clear(self):
+ self.send_element_command("POST", "clear", {})
+
+ @command
+ def send_keys(self, text):
+ return self.send_element_command("POST", "value", {"text": text})
+
+ @property # type: ignore
+ @command
+ def text(self):
+ return self.send_element_command("GET", "text")
+
+ @property # type: ignore
+ @command
+ def name(self):
+ return self.send_element_command("GET", "name")
+
+ @command
+ def style(self, property_name):
+ return self.send_element_command("GET", "css/%s" % property_name)
+
+ @property # type: ignore
+ @command
+ def rect(self):
+ return self.send_element_command("GET", "rect")
+
+ @property # type: ignore
+ @command
+ def selected(self):
+ return self.send_element_command("GET", "selected")
+
+ @command
+ def screenshot(self):
+ return self.send_element_command("GET", "screenshot")
+
+ @property # type: ignore
+ @command
+ def shadow_root(self):
+ return self.send_element_command("GET", "shadow")
+
+ @command
+ def attribute(self, name):
+ return self.send_element_command("GET", "attribute/%s" % name)
+
+ # This MUST come last because otherwise @property decorators above
+ # will be overridden by this.
+ @command
+ def property(self, name):
+ return self.send_element_command("GET", "property/%s" % name)
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/error.py b/testing/web-platform/tests/tools/webdriver/webdriver/error.py
new file mode 100644
index 0000000000..1b67d3325a
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/error.py
@@ -0,0 +1,232 @@
+# mypy: allow-untyped-defs
+
+import collections
+import json
+
+from typing import ClassVar, DefaultDict, Type
+
+
+class WebDriverException(Exception):
+ # The status_code class variable is used to map the JSON Error Code (see
+ # https://w3c.github.io/webdriver/#errors) to a WebDriverException subclass.
+ # However, http_status need not match, and both are set as instance
+ # variables, shadowing the class variables. TODO: Match on both http_status
+ # and status_code and let these be class variables only.
+ http_status = None # type: ClassVar[int]
+ status_code = None # type: ClassVar[str]
+
+ def __init__(self, http_status=None, status_code=None, message=None, stacktrace=None):
+ super()
+
+ if http_status is not None:
+ self.http_status = http_status
+ if status_code is not None:
+ self.status_code = status_code
+ self.message = message
+ self.stacktrace = stacktrace
+
+ def __repr__(self):
+ return f"<{self.__class__.__name__} http_status={self.http_status}>"
+
+ def __str__(self):
+ message = f"{self.status_code} ({self.http_status})"
+
+ if self.message is not None:
+ message += ": %s" % self.message
+ message += "\n"
+
+ if self.stacktrace:
+ message += ("\nRemote-end stacktrace:\n\n%s" % self.stacktrace)
+
+ return message
+
+
+class DetachedShadowRootException(WebDriverException):
+ http_status = 404
+ status_code = "detached shadow root"
+
+
+class ElementClickInterceptedException(WebDriverException):
+ http_status = 400
+ status_code = "element click intercepted"
+
+
+class ElementNotSelectableException(WebDriverException):
+ http_status = 400
+ status_code = "element not selectable"
+
+
+class ElementNotVisibleException(WebDriverException):
+ http_status = 400
+ status_code = "element not visible"
+
+
+class InsecureCertificateException(WebDriverException):
+ http_status = 400
+ status_code = "insecure certificate"
+
+
+class InvalidArgumentException(WebDriverException):
+ http_status = 400
+ status_code = "invalid argument"
+
+
+class InvalidCookieDomainException(WebDriverException):
+ http_status = 400
+ status_code = "invalid cookie domain"
+
+
+class InvalidElementCoordinatesException(WebDriverException):
+ http_status = 400
+ status_code = "invalid element coordinates"
+
+
+class InvalidElementStateException(WebDriverException):
+ http_status = 400
+ status_code = "invalid element state"
+
+
+class InvalidSelectorException(WebDriverException):
+ http_status = 400
+ status_code = "invalid selector"
+
+
+class InvalidSessionIdException(WebDriverException):
+ http_status = 404
+ status_code = "invalid session id"
+
+
+class JavascriptErrorException(WebDriverException):
+ http_status = 500
+ status_code = "javascript error"
+
+
+class MoveTargetOutOfBoundsException(WebDriverException):
+ http_status = 500
+ status_code = "move target out of bounds"
+
+
+class NoSuchAlertException(WebDriverException):
+ http_status = 404
+ status_code = "no such alert"
+
+
+class NoSuchCookieException(WebDriverException):
+ http_status = 404
+ status_code = "no such cookie"
+
+
+class NoSuchElementException(WebDriverException):
+ http_status = 404
+ status_code = "no such element"
+
+
+class NoSuchFrameException(WebDriverException):
+ http_status = 404
+ status_code = "no such frame"
+
+
+class NoSuchShadowRootException(WebDriverException):
+ http_status = 404
+ status_code = "no such shadow root"
+
+
+class NoSuchWindowException(WebDriverException):
+ http_status = 404
+ status_code = "no such window"
+
+
+class ScriptTimeoutException(WebDriverException):
+ http_status = 500
+ status_code = "script timeout"
+
+
+class SessionNotCreatedException(WebDriverException):
+ http_status = 500
+ status_code = "session not created"
+
+
+class StaleElementReferenceException(WebDriverException):
+ http_status = 404
+ status_code = "stale element reference"
+
+
+class TimeoutException(WebDriverException):
+ http_status = 500
+ status_code = "timeout"
+
+
+class UnableToSetCookieException(WebDriverException):
+ http_status = 500
+ status_code = "unable to set cookie"
+
+
+class UnexpectedAlertOpenException(WebDriverException):
+ http_status = 500
+ status_code = "unexpected alert open"
+
+
+class UnknownErrorException(WebDriverException):
+ http_status = 500
+ status_code = "unknown error"
+
+
+class UnknownCommandException(WebDriverException):
+ http_status = 404
+ status_code = "unknown command"
+
+
+class UnknownMethodException(WebDriverException):
+ http_status = 405
+ status_code = "unknown method"
+
+
+class UnsupportedOperationException(WebDriverException):
+ http_status = 500
+ status_code = "unsupported operation"
+
+
+def from_response(response):
+ """
+ Unmarshals an error from a ``Response``'s `body`, failing
+ if not all three required `error`, `message`, and `stacktrace`
+ fields are given. Defaults to ``WebDriverException`` if `error`
+ is unknown.
+ """
+ if response.status == 200:
+ raise UnknownErrorException(
+ response.status,
+ None,
+ "Response is not an error:\n"
+ "%s" % json.dumps(response.body))
+
+ if "value" in response.body:
+ value = response.body["value"]
+ else:
+ raise UnknownErrorException(
+ response.status,
+ None,
+ "Expected 'value' key in response body:\n"
+ "%s" % json.dumps(response.body))
+
+ # all fields must exist, but stacktrace can be an empty string
+ code = value["error"]
+ message = value["message"]
+ stack = value["stacktrace"] or None
+
+ cls = get(code)
+ return cls(response.status, code, message, stacktrace=stack)
+
+
+def get(error_code):
+ """
+ Gets exception from `error_code`, falling back to
+ ``WebDriverException`` if it is not found.
+ """
+ return _errors.get(error_code, WebDriverException)
+
+
+_errors: DefaultDict[str, Type[WebDriverException]] = collections.defaultdict()
+for item in list(locals().values()):
+ if type(item) == type and issubclass(item, WebDriverException):
+ _errors[item.status_code] = item
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/protocol.py b/testing/web-platform/tests/tools/webdriver/webdriver/protocol.py
new file mode 100644
index 0000000000..1972c3fce2
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/protocol.py
@@ -0,0 +1,49 @@
+# mypy: allow-untyped-defs
+
+import json
+
+import webdriver
+
+
+"""WebDriver wire protocol codecs."""
+
+
+class Encoder(json.JSONEncoder):
+ def __init__(self, *args, **kwargs):
+ kwargs.pop("session")
+ super().__init__(*args, **kwargs)
+
+ def default(self, obj):
+ if isinstance(obj, (list, tuple)):
+ return [self.default(x) for x in obj]
+ elif isinstance(obj, webdriver.Element):
+ return {webdriver.Element.identifier: obj.id}
+ elif isinstance(obj, webdriver.Frame):
+ return {webdriver.Frame.identifier: obj.id}
+ elif isinstance(obj, webdriver.Window):
+ return {webdriver.Frame.identifier: obj.id}
+ elif isinstance(obj, webdriver.ShadowRoot):
+ return {webdriver.ShadowRoot.identifier: obj.id}
+ return super().default(obj)
+
+
+class Decoder(json.JSONDecoder):
+ def __init__(self, *args, **kwargs):
+ self.session = kwargs.pop("session")
+ super().__init__(
+ object_hook=self.object_hook, *args, **kwargs)
+
+ def object_hook(self, payload):
+ if isinstance(payload, (list, tuple)):
+ return [self.object_hook(x) for x in payload]
+ elif isinstance(payload, dict) and webdriver.Element.identifier in payload:
+ return webdriver.Element.from_json(payload, self.session)
+ elif isinstance(payload, dict) and webdriver.Frame.identifier in payload:
+ return webdriver.Frame.from_json(payload, self.session)
+ elif isinstance(payload, dict) and webdriver.Window.identifier in payload:
+ return webdriver.Window.from_json(payload, self.session)
+ elif isinstance(payload, dict) and webdriver.ShadowRoot.identifier in payload:
+ return webdriver.ShadowRoot.from_json(payload, self.session)
+ elif isinstance(payload, dict):
+ return {k: self.object_hook(v) for k, v in payload.items()}
+ return payload
diff --git a/testing/web-platform/tests/tools/webdriver/webdriver/transport.py b/testing/web-platform/tests/tools/webdriver/webdriver/transport.py
new file mode 100644
index 0000000000..47d0659196
--- /dev/null
+++ b/testing/web-platform/tests/tools/webdriver/webdriver/transport.py
@@ -0,0 +1,267 @@
+# mypy: allow-untyped-defs
+
+import json
+import select
+
+from http.client import HTTPConnection
+from typing import Dict, List, Mapping, Sequence, Tuple
+from urllib import parse as urlparse
+
+from . import error
+
+"""Implements HTTP transport for the WebDriver wire protocol."""
+
+
+missing = object()
+
+
+class ResponseHeaders(Mapping[str, str]):
+ """Read-only dictionary-like API for accessing response headers.
+
+ This class:
+ * Normalizes the header keys it is built with to lowercase (such that
+ iterating the items will return lowercase header keys).
+ * Has case-insensitive header lookup.
+ * Always returns all header values that have the same name, separated by
+ commas.
+ """
+ def __init__(self, items: Sequence[Tuple[str, str]]):
+ self.headers_dict: Dict[str, List[str]] = {}
+ for key, value in items:
+ key = key.lower()
+ if key not in self.headers_dict:
+ self.headers_dict[key] = []
+ self.headers_dict[key].append(value)
+
+ def __getitem__(self, key):
+ """Get all headers of a certain (case-insensitive) name. If there is
+ more than one, the values are returned comma separated"""
+ values = self.headers_dict[key.lower()]
+ if len(values) == 1:
+ return values[0]
+ else:
+ return ", ".join(values)
+
+ def get_list(self, key, default=missing):
+ """Get all the header values for a particular field name as a list"""
+ try:
+ return self.headers_dict[key.lower()]
+ except KeyError:
+ if default is not missing:
+ return default
+ else:
+ raise
+
+ def __iter__(self):
+ yield from self.headers_dict
+
+ def __len__(self):
+ return len(self.headers_dict)
+
+
+class Response:
+ """
+ Describes an HTTP response received from a remote end whose
+ body has been read and parsed as appropriate.
+ """
+
+ def __init__(self, status, body, headers):
+ self.status = status
+ self.body = body
+ self.headers = headers
+
+ def __repr__(self):
+ cls_name = self.__class__.__name__
+ if self.error:
+ return f"<{cls_name} status={self.status} error={repr(self.error)}>"
+ return f"<{cls_name: }tatus={self.status} body={json.dumps(self.body)}>"
+
+ def __str__(self):
+ return json.dumps(self.body, indent=2)
+
+ @property
+ def error(self):
+ if self.status != 200:
+ return error.from_response(self)
+ return None
+
+ @classmethod
+ def from_http(cls, http_response, decoder=json.JSONDecoder, **kwargs):
+ try:
+ body = json.load(http_response, cls=decoder, **kwargs)
+ headers = ResponseHeaders(http_response.getheaders())
+ except ValueError:
+ raise ValueError("Failed to decode response body as JSON:\n" +
+ http_response.read())
+
+ return cls(http_response.status, body, headers)
+
+
+class HTTPWireProtocol:
+ """
+ Transports messages (commands and responses) over the WebDriver
+ wire protocol.
+
+ Complex objects, such as ``webdriver.Element``, ``webdriver.Frame``,
+ and ``webdriver.Window`` are by default not marshaled to enable
+ use of `session.transport.send` in WPT tests::
+
+ session = webdriver.Session("127.0.0.1", 4444)
+ response = transport.send("GET", "element/active", None)
+ print response.body["value"]
+ # => {u'element-6066-11e4-a52e-4f735466cecf': u'<uuid>'}
+
+ Automatic marshaling is provided by ``webdriver.protocol.Encoder``
+ and ``webdriver.protocol.Decoder``, which can be passed in to
+ ``HTTPWireProtocol.send`` along with a reference to the current
+ ``webdriver.Session``::
+
+ session = webdriver.Session("127.0.0.1", 4444)
+ response = transport.send("GET", "element/active", None,
+ encoder=protocol.Encoder, decoder=protocol.Decoder,
+ session=session)
+ print response.body["value"]
+ # => webdriver.Element
+ """
+
+ def __init__(self, host, port, url_prefix="/"):
+ """
+ Construct interface for communicating with the remote server.
+
+ :param url: URL of remote WebDriver server.
+ :param wait: Duration to wait for remote to appear.
+ """
+ self.host = host
+ self.port = port
+ self.url_prefix = url_prefix
+ self._conn = None
+ self._last_request_is_blocked = False
+
+ def __del__(self):
+ self.close()
+
+ def close(self):
+ """Closes the current HTTP connection, if there is one."""
+ if self._conn:
+ try:
+ self._conn.close()
+ except OSError:
+ # The remote closed the connection
+ pass
+ self._conn = None
+
+ @property
+ def connection(self):
+ """Gets the current HTTP connection, or lazily creates one."""
+ if not self._conn:
+ conn_kwargs = {}
+ # We are not setting an HTTP timeout other than the default when the
+ # connection its created. The send method has a timeout value if needed.
+ self._conn = HTTPConnection(self.host, self.port, **conn_kwargs)
+
+ return self._conn
+
+ def url(self, suffix):
+ """
+ From the relative path to a command end-point,
+ craft a full URL suitable to be used in a request to the HTTPD.
+ """
+ return urlparse.urljoin(self.url_prefix, suffix)
+
+ def send(self,
+ method,
+ uri,
+ body=None,
+ headers=None,
+ encoder=json.JSONEncoder,
+ decoder=json.JSONDecoder,
+ timeout=None,
+ **codec_kwargs):
+ """
+ Send a command to the remote.
+
+ The request `body` must be JSON serialisable unless a
+ custom `encoder` has been provided. This means complex
+ objects such as ``webdriver.Element``, ``webdriver.Frame``,
+ and `webdriver.Window`` are not automatically made
+ into JSON. This behaviour is, however, provided by
+ ``webdriver.protocol.Encoder``, should you want it.
+
+ Similarly, the response body is returned au natural
+ as plain JSON unless a `decoder` that converts web
+ element references to ``webdriver.Element`` is provided.
+ Use ``webdriver.protocol.Decoder`` to achieve this behaviour.
+
+ The client will attempt to use persistent HTTP connections.
+
+ :param method: `GET`, `POST`, or `DELETE`.
+ :param uri: Relative endpoint of the requests URL path.
+ :param body: Body of the request. Defaults to an empty
+ dictionary if ``method`` is `POST`.
+ :param headers: Additional dictionary of headers to include
+ in the request.
+ :param encoder: JSON encoder class, which defaults to
+ ``json.JSONEncoder`` unless specified.
+ :param decoder: JSON decoder class, which defaults to
+ ``json.JSONDecoder`` unless specified.
+ :param codec_kwargs: Surplus arguments passed on to `encoder`
+ and `decoder` on construction.
+
+ :return: Instance of ``webdriver.transport.Response``
+ describing the HTTP response received from the remote end.
+
+ :raises ValueError: If `body` or the response body are not
+ JSON serialisable.
+ """
+ if body is None and method == "POST":
+ body = {}
+
+ payload = None
+ if body is not None:
+ try:
+ payload = json.dumps(body, cls=encoder, **codec_kwargs)
+ except ValueError:
+ raise ValueError("Failed to encode request body as JSON:\n"
+ "%s" % json.dumps(body, indent=2))
+
+ # When the timeout triggers, the TestRunnerManager thread will reuse
+ # this connection to check if the WebDriver its alive and we may end
+ # raising an httplib.CannotSendRequest exception if the WebDriver is
+ # not responding and this httplib.request() call is blocked on the
+ # runner thread. We use the boolean below to check for that and restart
+ # the connection in that case.
+ self._last_request_is_blocked = True
+ response = self._request(method, uri, payload, headers, timeout=None)
+ self._last_request_is_blocked = False
+ return Response.from_http(response, decoder=decoder, **codec_kwargs)
+
+ def _request(self, method, uri, payload, headers=None, timeout=None):
+ if isinstance(payload, str):
+ payload = payload.encode("utf-8")
+
+ if headers is None:
+ headers = {}
+ headers.update({"Connection": "keep-alive"})
+
+ url = self.url(uri)
+
+ if self._last_request_is_blocked or self._has_unread_data():
+ self.close()
+
+ self.connection.request(method, url, payload, headers)
+
+ # timeout for request has to be set just before calling httplib.getresponse()
+ # and the previous value restored just after that, even on exception raised
+ try:
+ if timeout:
+ previous_timeout = self._conn.gettimeout()
+ self._conn.settimeout(timeout)
+ response = self.connection.getresponse()
+ finally:
+ if timeout:
+ self._conn.settimeout(previous_timeout)
+
+ return response
+
+ def _has_unread_data(self):
+ return self._conn and self._conn.sock and select.select([self._conn.sock], [], [], 0)[0]
diff --git a/testing/web-platform/tests/tools/webtransport/META.yml b/testing/web-platform/tests/tools/webtransport/META.yml
new file mode 100644
index 0000000000..b0e446dd5f
--- /dev/null
+++ b/testing/web-platform/tests/tools/webtransport/META.yml
@@ -0,0 +1,3 @@
+suggested_reviewers:
+ - bashi
+ - yutakahirano
diff --git a/testing/web-platform/tests/tools/webtransport/README.md b/testing/web-platform/tests/tools/webtransport/README.md
new file mode 100644
index 0000000000..3ddf515f03
--- /dev/null
+++ b/testing/web-platform/tests/tools/webtransport/README.md
@@ -0,0 +1,78 @@
+# WebTransport in web-platform-tests
+
+This document describes [WebTransport](https://datatracker.ietf.org/wg/webtrans/documents/) support in web-platform-tests.
+
+## WebTransport over HTTP/3
+`tools/webtransport` provides a simple
+[WebTransport over HTTP/3](https://datatracker.ietf.org/doc/draft-ietf-webtrans-http3/) server for testing. The server interprets the underlying protocols (WebTransport, HTTP/3 and QUIC) and manages webtransport sessions. When the server receives a request (extended CONNECT method) from a client the server looks up a corresponding webtransport handler based on the `:path` header value, then delegates actual tasks to the handler. Handlers are typically located under `webtransport/handlers`.
+
+### Handlers
+
+A WebTransport handler is a python script which contains callback functions. Callback functions are called every time a WebTransport event happens. Definitions of all callback can be found the [APIs section](#APIs).
+
+The following is an example handler which echos back received data.
+
+```python
+def stream_data_received(session, stream_id: int, data: bytes, stream_ended: bool):
+ if session.stream_is_unidirectional(stream_id):
+ return
+ session.send_stream_data(stream_id, data)
+
+
+def datagram_received(session, data: bytes):
+ session.send_datagram(data)
+```
+
+`session` is a `WebTransportSession` object that represents a WebTransport over HTTP/3 session. It provides APIs to handle the session.
+
+### Handler APIs
+
+#### `connection_received(request_headers, response_headers)`
+Called whenever an extended CONNECT method is received.
+
+- <b>Parameters</b>
+
+ - <b>request_headers</b>: The request headers received from the peer.
+ - <b>response_headers</b>: The response headers which will be sent to the peer `:status` is set to 200 when it isn't specified.
+
+---
+
+#### `session_established(session)`
+Called whenever a WebTransport session is established.
+
+- <b>Parameters</b>
+
+ - <b>session</b>: A WebTransport session object.
+
+---
+
+#### `stream_data_received(session, stream_id, data, stream_ended)`
+Called whenever data is received on a WebTransport stream.
+
+- <b>Parameters</b>
+
+ - <b>session</b>: A WebTransport session object.
+ - <b>stream_id</b>: The ID of the stream.
+ - <b>data</b>: The received data.
+ - <b>stream_ended</b>: Whether the stream is ended.
+
+---
+
+#### `datagram_received(session, data)`
+Called whenever a datagram is received on a WebTransport session.
+
+- <b>Parameters</b>
+
+ - <b>session</b>: A WebTransport session object.
+ - <b>data</b>: The received data.
+
+---
+
+#### `stream_reset(session, stream_id, error_code)`
+Called whenever a datagram is reset with RESET_STREAM.
+
+- <b>Parameters</b>
+
+ - <b>session</b>: A WebTransport session object.
+ - <b>stream_id</b>: The ID of the stream.
+ - <b>error_code</b>: The reason of the reset.
diff --git a/testing/web-platform/tests/tools/webtransport/__init__.py b/testing/web-platform/tests/tools/webtransport/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/webtransport/__init__.py
diff --git a/testing/web-platform/tests/tools/webtransport/h3/__init__.py b/testing/web-platform/tests/tools/webtransport/h3/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/webtransport/h3/__init__.py
diff --git a/testing/web-platform/tests/tools/webtransport/h3/capsule.py b/testing/web-platform/tests/tools/webtransport/h3/capsule.py
new file mode 100644
index 0000000000..8844dbc8c7
--- /dev/null
+++ b/testing/web-platform/tests/tools/webtransport/h3/capsule.py
@@ -0,0 +1,111 @@
+# mypy: no-warn-return-any
+
+from enum import IntEnum
+from typing import Iterator, Optional
+
+# TODO(bashi): Remove import check suppressions once aioquic dependency is
+# resolved.
+from aioquic.buffer import UINT_VAR_MAX_SIZE, Buffer, BufferReadError # type: ignore
+
+
+class CapsuleType(IntEnum):
+ # Defined in
+ # https://www.ietf.org/archive/id/draft-ietf-masque-h3-datagram-03.html.
+ DATAGRAM = 0xff37a0
+ REGISTER_DATAGRAM_CONTEXT = 0xff37a1
+ REGISTER_DATAGRAM_NO_CONTEXT = 0xff37a2
+ CLOSE_DATAGRAM_CONTEXT = 0xff37a3
+ # Defined in
+ # https://www.ietf.org/archive/id/draft-ietf-webtrans-http3-01.html.
+ CLOSE_WEBTRANSPORT_SESSION = 0x2843
+
+
+class H3Capsule:
+ """
+ Represents the Capsule concept defined in
+ https://ietf-wg-masque.github.io/draft-ietf-masque-h3-datagram/draft-ietf-masque-h3-datagram.html#name-capsules.
+ """
+ def __init__(self, type: int, data: bytes) -> None:
+ """
+ :param type the type of this Capsule. We don't use CapsuleType here
+ because this may be a capsule of an unknown type.
+ :param data the payload
+ """
+ self.type = type
+ self.data = data
+
+ def encode(self) -> bytes:
+ """
+ Encodes this H3Capsule and return the bytes.
+ """
+ buffer = Buffer(capacity=len(self.data) + 2 * UINT_VAR_MAX_SIZE)
+ buffer.push_uint_var(self.type)
+ buffer.push_uint_var(len(self.data))
+ buffer.push_bytes(self.data)
+ return buffer.data
+
+
+class H3CapsuleDecoder:
+ """
+ A decoder of H3Capsule. This is a streaming decoder and can handle multiple
+ decoders.
+ """
+ def __init__(self) -> None:
+ self._buffer: Optional[Buffer] = None
+ self._type: Optional[int] = None
+ self._length: Optional[int] = None
+ self._final: bool = False
+
+ def append(self, data: bytes) -> None:
+ """
+ Appends the given bytes to this decoder.
+ """
+ assert not self._final
+
+ if len(data) == 0:
+ return
+ if self._buffer:
+ remaining = self._buffer.pull_bytes(
+ self._buffer.capacity - self._buffer.tell())
+ self._buffer = Buffer(data=(remaining + data))
+ else:
+ self._buffer = Buffer(data=data)
+
+ def final(self) -> None:
+ """
+ Pushes the end-of-stream mark to this decoder. After calling this,
+ calling append() will be invalid.
+ """
+ self._final = True
+
+ def __iter__(self) -> Iterator[H3Capsule]:
+ """
+ Yields decoded capsules.
+ """
+ try:
+ while self._buffer is not None:
+ if self._type is None:
+ self._type = self._buffer.pull_uint_var()
+ if self._length is None:
+ self._length = self._buffer.pull_uint_var()
+ if self._buffer.capacity - self._buffer.tell() < self._length:
+ if self._final:
+ raise ValueError('insufficient buffer')
+ return
+ capsule = H3Capsule(
+ self._type, self._buffer.pull_bytes(self._length))
+ self._type = None
+ self._length = None
+ if self._buffer.tell() == self._buffer.capacity:
+ self._buffer = None
+ yield capsule
+ except BufferReadError as e:
+ if self._final:
+ raise e
+ if not self._buffer:
+ return 0
+ size = self._buffer.capacity - self._buffer.tell()
+ if size >= UINT_VAR_MAX_SIZE:
+ raise e
+ # Ignore the error because there may not be sufficient input.
+ return
diff --git a/testing/web-platform/tests/tools/webtransport/h3/handler.py b/testing/web-platform/tests/tools/webtransport/h3/handler.py
new file mode 100644
index 0000000000..9b6cb1ab20
--- /dev/null
+++ b/testing/web-platform/tests/tools/webtransport/h3/handler.py
@@ -0,0 +1,76 @@
+from typing import List, Optional, Tuple
+
+from .webtransport_h3_server import WebTransportSession
+
+# This file exists for documentation purpose.
+
+
+def connect_received(request_headers: List[Tuple[bytes, bytes]],
+ response_headers: List[Tuple[bytes, bytes]]) -> None:
+ """
+ Called whenever an extended CONNECT method is received.
+
+ :param request_headers: The request headers received from the peer.
+ :param response_headers: The response headers which will be sent to the peer. ``:status`` is set
+ to 200 when it isn't specified.
+ """
+ pass
+
+
+def session_established(session: WebTransportSession) -> None:
+ """
+ Called whenever an WebTransport session is established.
+
+ :param session: A WebTransport session object.
+ """
+
+
+def stream_data_received(session: WebTransportSession, stream_id: int,
+ data: bytes, stream_ended: bool) -> None:
+ """
+ Called whenever data is received on a WebTransport stream.
+
+ :param session: A WebTransport session object.
+ :param stream_id: The ID of the stream.
+ :param data: The received data.
+ :param stream_ended: Whether the stream is ended.
+ """
+ pass
+
+
+def datagram_received(session: WebTransportSession, data: bytes) -> None:
+ """
+ Called whenever a datagram is received on a WebTransport session.
+
+ :param session: A WebTransport session object.
+ :param data: The received data.
+ """
+ pass
+
+
+def session_closed(session: WebTransportSession,
+ close_info: Optional[Tuple[int, bytes]],
+ abruptly: bool) -> None:
+ """
+ Called when a WebTransport session is closed.
+
+ :param session: A WebTransport session.
+ :param close_info: The code and reason attached to the
+ CLOSE_WEBTRANSPORT_SESSION capsule.
+ :param abruptly: True when the session is closed forcibly
+ (by a CLOSE_CONNECTION QUIC frame for example).
+ """
+ pass
+
+
+def stream_reset(session: WebTransportSession,
+ stream_id: int,
+ error_code: int) -> None:
+ """
+ Called when a stream is reset with RESET_STREAM.
+
+ :param session: A WebTransport session.
+ :param stream_id: The ID of the stream.
+ :param error_code: The reason of the reset.
+ """
+ pass
diff --git a/testing/web-platform/tests/tools/webtransport/h3/test_capsule.py b/testing/web-platform/tests/tools/webtransport/h3/test_capsule.py
new file mode 100644
index 0000000000..4321775e93
--- /dev/null
+++ b/testing/web-platform/tests/tools/webtransport/h3/test_capsule.py
@@ -0,0 +1,130 @@
+# type: ignore
+
+import unittest
+import importlib.util
+import pytest
+
+if importlib.util.find_spec('aioquic'):
+ has_aioquic = True
+ from .capsule import H3Capsule, H3CapsuleDecoder
+ from aioquic.buffer import BufferReadError
+else:
+ has_aioquic = False
+
+
+class H3CapsuleTest(unittest.TestCase):
+ @pytest.mark.skipif(not has_aioquic, reason='not having aioquic')
+ def test_capsule(self) -> None:
+ capsule1 = H3Capsule(0x12345, b'abcde')
+ bs = capsule1.encode()
+ decoder = H3CapsuleDecoder()
+ decoder.append(bs)
+ capsule2 = next(iter(decoder))
+
+ self.assertEqual(bs, b'\x80\x01\x23\x45\x05abcde', 'bytes')
+ self.assertEqual(capsule1.type, capsule2.type, 'type')
+ self.assertEqual(capsule1.data, capsule2.data, 'data')
+
+ @pytest.mark.skipif(
+ not has_aioquic, reason='not having aioquic')
+ def test_small_capsule(self) -> None:
+ capsule1 = H3Capsule(0, b'')
+ bs = capsule1.encode()
+ decoder = H3CapsuleDecoder()
+ decoder.append(bs)
+ capsule2 = next(iter(decoder))
+
+ self.assertEqual(bs, b'\x00\x00', 'bytes')
+ self.assertEqual(capsule1.type, capsule2.type, 'type')
+ self.assertEqual(capsule1.data, capsule2.data, 'data')
+
+ @pytest.mark.skipif(not has_aioquic, reason='not having aioquic')
+ def test_capsule_append(self) -> None:
+ decoder = H3CapsuleDecoder()
+ decoder.append(b'\x80')
+
+ with self.assertRaises(StopIteration):
+ next(iter(decoder))
+
+ decoder.append(b'\x01\x23')
+ with self.assertRaises(StopIteration):
+ next(iter(decoder))
+
+ decoder.append(b'\x45\x05abcd')
+ with self.assertRaises(StopIteration):
+ next(iter(decoder))
+
+ decoder.append(b'e\x00')
+ capsule1 = next(iter(decoder))
+
+ self.assertEqual(capsule1.type, 0x12345, 'type')
+ self.assertEqual(capsule1.data, b'abcde', 'data')
+
+ with self.assertRaises(StopIteration):
+ next(iter(decoder))
+
+ decoder.append(b'\x00')
+ capsule2 = next(iter(decoder))
+ self.assertEqual(capsule2.type, 0, 'type')
+ self.assertEqual(capsule2.data, b'', 'data')
+
+ @pytest.mark.skipif(not has_aioquic, reason='not having aioquic')
+ def test_multiple_values(self) -> None:
+ decoder = H3CapsuleDecoder()
+ decoder.append(b'\x01\x02ab\x03\x04cdef')
+
+ it = iter(decoder)
+ capsule1 = next(it)
+ capsule2 = next(it)
+ with self.assertRaises(StopIteration):
+ next(it)
+ with self.assertRaises(StopIteration):
+ next(iter(decoder))
+
+ self.assertEqual(capsule1.type, 1, 'type')
+ self.assertEqual(capsule1.data, b'ab', 'data')
+ self.assertEqual(capsule2.type, 3, 'type')
+ self.assertEqual(capsule2.data, b'cdef', 'data')
+
+ @pytest.mark.skipif(not has_aioquic, reason='not having aioquic')
+ def test_final(self) -> None:
+ decoder = H3CapsuleDecoder()
+ decoder.append(b'\x01')
+
+ with self.assertRaises(StopIteration):
+ next(iter(decoder))
+
+ decoder.append(b'\x01a')
+ decoder.final()
+ capsule1 = next(iter(decoder))
+ with self.assertRaises(StopIteration):
+ next(iter(decoder))
+
+ self.assertEqual(capsule1.type, 1, 'type')
+ self.assertEqual(capsule1.data, b'a', 'data')
+
+ @pytest.mark.skipif(not has_aioquic, reason='not having aioquic')
+ def test_empty_bytes_before_fin(self) -> None:
+ decoder = H3CapsuleDecoder()
+ decoder.append(b'')
+ decoder.final()
+
+ it = iter(decoder)
+ with self.assertRaises(StopIteration):
+ next(it)
+
+ @pytest.mark.skipif(not has_aioquic, reason='not having aioquic')
+ def test_final_invalid(self) -> None:
+ decoder = H3CapsuleDecoder()
+ decoder.append(b'\x01')
+
+ with self.assertRaises(StopIteration):
+ next(iter(decoder))
+
+ decoder.final()
+ with self.assertRaises(BufferReadError):
+ next(iter(decoder))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/web-platform/tests/tools/webtransport/h3/webtransport_h3_server.py b/testing/web-platform/tests/tools/webtransport/h3/webtransport_h3_server.py
new file mode 100644
index 0000000000..d3031b4c0c
--- /dev/null
+++ b/testing/web-platform/tests/tools/webtransport/h3/webtransport_h3_server.py
@@ -0,0 +1,545 @@
+# mypy: allow-subclassing-any, no-warn-return-any
+
+import asyncio
+import logging
+import os
+import ssl
+import threading
+import traceback
+from urllib.parse import urlparse
+from typing import Any, Dict, List, Optional, Tuple
+
+# TODO(bashi): Remove import check suppressions once aioquic dependency is resolved.
+from aioquic.buffer import Buffer # type: ignore
+from aioquic.asyncio import QuicConnectionProtocol, serve # type: ignore
+from aioquic.asyncio.client import connect # type: ignore
+from aioquic.h3.connection import H3_ALPN, FrameType, H3Connection, ProtocolError, Setting # type: ignore
+from aioquic.h3.events import H3Event, HeadersReceived, WebTransportStreamDataReceived, DatagramReceived, DataReceived # type: ignore
+from aioquic.quic.configuration import QuicConfiguration # type: ignore
+from aioquic.quic.connection import logger as quic_connection_logger # type: ignore
+from aioquic.quic.connection import stream_is_unidirectional
+from aioquic.quic.events import QuicEvent, ProtocolNegotiated, ConnectionTerminated, StreamReset # type: ignore
+from aioquic.tls import SessionTicket # type: ignore
+
+from tools.wptserve.wptserve import stash # type: ignore
+from .capsule import H3Capsule, H3CapsuleDecoder, CapsuleType
+
+"""
+A WebTransport over HTTP/3 server for testing.
+
+The server interprets the underlying protocols (WebTransport, HTTP/3 and QUIC)
+and passes events to a particular webtransport handler. From the standpoint of
+test authors, a webtransport handler is a Python script which contains some
+callback functions. See handler.py for available callbacks.
+"""
+
+SERVER_NAME = 'webtransport-h3-server'
+
+_logger: logging.Logger = logging.getLogger(__name__)
+_doc_root: str = ""
+
+# Set aioquic's log level to WARNING to suppress some INFO logs which are
+# recorded every connection close.
+quic_connection_logger.setLevel(logging.WARNING)
+
+
+class H3ConnectionWithDatagram04(H3Connection):
+ """
+ A H3Connection subclass, to make it work with the latest
+ HTTP Datagram protocol.
+ """
+ H3_DATAGRAM_04 = 0xffd277
+ # https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-h3-websockets-00#section-5
+ ENABLE_CONNECT_PROTOCOL = 0x08
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self._supports_h3_datagram_04 = False
+
+ def _validate_settings(self, settings: Dict[int, int]) -> None:
+ H3_DATAGRAM_04 = H3ConnectionWithDatagram04.H3_DATAGRAM_04
+ if H3_DATAGRAM_04 in settings and settings[H3_DATAGRAM_04] == 1:
+ settings[Setting.H3_DATAGRAM] = 1
+ self._supports_h3_datagram_04 = True
+ return super()._validate_settings(settings)
+
+ def _get_local_settings(self) -> Dict[int, int]:
+ H3_DATAGRAM_04 = H3ConnectionWithDatagram04.H3_DATAGRAM_04
+ settings = super()._get_local_settings()
+ settings[H3_DATAGRAM_04] = 1
+ settings[H3ConnectionWithDatagram04.ENABLE_CONNECT_PROTOCOL] = 1
+ return settings
+
+ @property
+ def supports_h3_datagram_04(self) -> bool:
+ """
+ True if the client supports the latest HTTP Datagram protocol.
+ """
+ return self._supports_h3_datagram_04
+
+
+class WebTransportH3Protocol(QuicConnectionProtocol):
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ self._handler: Optional[Any] = None
+ self._http: Optional[H3ConnectionWithDatagram04] = None
+ self._session_stream_id: Optional[int] = None
+ self._close_info: Optional[Tuple[int, bytes]] = None
+ self._capsule_decoder_for_session_stream: H3CapsuleDecoder =\
+ H3CapsuleDecoder()
+ self._allow_calling_session_closed = True
+ self._allow_datagrams = False
+
+ def quic_event_received(self, event: QuicEvent) -> None:
+ if isinstance(event, ProtocolNegotiated):
+ self._http = H3ConnectionWithDatagram04(
+ self._quic, enable_webtransport=True)
+ if not self._http.supports_h3_datagram_04:
+ self._allow_datagrams = True
+
+ if self._http is not None:
+ for http_event in self._http.handle_event(event):
+ self._h3_event_received(http_event)
+
+ if isinstance(event, ConnectionTerminated):
+ self._call_session_closed(close_info=None, abruptly=True)
+ if isinstance(event, StreamReset):
+ if self._handler:
+ self._handler.stream_reset(event.stream_id, event.error_code)
+
+ def _h3_event_received(self, event: H3Event) -> None:
+ if isinstance(event, HeadersReceived):
+ # Convert from List[Tuple[bytes, bytes]] to Dict[bytes, bytes].
+ # Only the last header will be kept when there are duplicate
+ # headers.
+ headers = {}
+ for header, value in event.headers:
+ headers[header] = value
+
+ method = headers.get(b":method")
+ protocol = headers.get(b":protocol")
+ if method == b"CONNECT" and protocol == b"webtransport":
+ self._session_stream_id = event.stream_id
+ self._handshake_webtransport(event, headers)
+ else:
+ self._send_error_response(event.stream_id, 400)
+
+ if isinstance(event, DataReceived) and\
+ self._session_stream_id == event.stream_id:
+ if self._http and not self._http.supports_h3_datagram_04 and\
+ len(event.data) > 0:
+ raise ProtocolError('Unexpected data on the session stream')
+ self._receive_data_on_session_stream(
+ event.data, event.stream_ended)
+ elif self._handler is not None:
+ if isinstance(event, WebTransportStreamDataReceived):
+ self._handler.stream_data_received(
+ stream_id=event.stream_id,
+ data=event.data,
+ stream_ended=event.stream_ended)
+ elif isinstance(event, DatagramReceived):
+ if self._allow_datagrams:
+ self._handler.datagram_received(data=event.data)
+
+ def _receive_data_on_session_stream(self, data: bytes, fin: bool) -> None:
+ self._capsule_decoder_for_session_stream.append(data)
+ if fin:
+ self._capsule_decoder_for_session_stream.final()
+ for capsule in self._capsule_decoder_for_session_stream:
+ if capsule.type in {CapsuleType.DATAGRAM,
+ CapsuleType.REGISTER_DATAGRAM_CONTEXT,
+ CapsuleType.CLOSE_DATAGRAM_CONTEXT}:
+ raise ProtocolError(
+ f"Unimplemented capsule type: {capsule.type}")
+ if capsule.type in {CapsuleType.REGISTER_DATAGRAM_NO_CONTEXT,
+ CapsuleType.CLOSE_WEBTRANSPORT_SESSION}:
+ # We'll handle this case below.
+ pass
+ else:
+ # We should ignore unknown capsules.
+ continue
+
+ if self._close_info is not None:
+ raise ProtocolError((
+ "Receiving a capsule with type = {} after receiving " +
+ "CLOSE_WEBTRANSPORT_SESSION").format(capsule.type))
+
+ if capsule.type == CapsuleType.REGISTER_DATAGRAM_NO_CONTEXT:
+ buffer = Buffer(data=capsule.data)
+ format_type = buffer.pull_uint_var()
+ # https://ietf-wg-webtrans.github.io/draft-ietf-webtrans-http3/draft-ietf-webtrans-http3.html#name-datagram-format-type
+ WEBTRANPORT_FORMAT_TYPE = 0xff7c00
+ if format_type != WEBTRANPORT_FORMAT_TYPE:
+ raise ProtocolError(
+ "Unexpected datagram format type: {}".format(
+ format_type))
+ self._allow_datagrams = True
+ elif capsule.type == CapsuleType.CLOSE_WEBTRANSPORT_SESSION:
+ buffer = Buffer(data=capsule.data)
+ code = buffer.pull_uint32()
+ # 4 bytes for the uint32.
+ reason = buffer.pull_bytes(len(capsule.data) - 4)
+ # TODO(yutakahirano): Make sure `reason` is a UTF-8 text.
+ self._close_info = (code, reason)
+ if fin:
+ self._call_session_closed(self._close_info, abruptly=False)
+
+ def _send_error_response(self, stream_id: int, status_code: int) -> None:
+ assert self._http is not None
+ headers = [(b"server", SERVER_NAME.encode()),
+ (b":status", str(status_code).encode())]
+ self._http.send_headers(stream_id=stream_id,
+ headers=headers,
+ end_stream=True)
+
+ def _handshake_webtransport(self, event: HeadersReceived,
+ request_headers: Dict[bytes, bytes]) -> None:
+ assert self._http is not None
+ path = request_headers.get(b":path")
+ if path is None:
+ # `:path` must be provided.
+ self._send_error_response(event.stream_id, 400)
+ return
+
+ # Create a handler using `:path`.
+ try:
+ self._handler = self._create_event_handler(
+ session_id=event.stream_id,
+ path=path,
+ request_headers=event.headers)
+ except OSError:
+ self._send_error_response(event.stream_id, 404)
+ return
+
+ response_headers = [
+ (b"server", SERVER_NAME.encode()),
+ (b"sec-webtransport-http3-draft", b"draft02"),
+ ]
+ self._handler.connect_received(response_headers=response_headers)
+
+ status_code = None
+ for name, value in response_headers:
+ if name == b":status":
+ status_code = value
+ break
+ if not status_code:
+ response_headers.append((b":status", b"200"))
+ self._http.send_headers(stream_id=event.stream_id,
+ headers=response_headers)
+
+ if status_code is None or status_code == b"200":
+ self._handler.session_established()
+
+ def _create_event_handler(self, session_id: int, path: bytes,
+ request_headers: List[Tuple[bytes, bytes]]) -> Any:
+ parsed = urlparse(path.decode())
+ file_path = os.path.join(_doc_root, parsed.path.lstrip("/"))
+ callbacks = {"__file__": file_path}
+ with open(file_path) as f:
+ exec(compile(f.read(), path, "exec"), callbacks)
+ session = WebTransportSession(self, session_id, request_headers)
+ return WebTransportEventHandler(session, callbacks)
+
+ def _call_session_closed(
+ self, close_info: Optional[Tuple[int, bytes]],
+ abruptly: bool) -> None:
+ allow_calling_session_closed = self._allow_calling_session_closed
+ self._allow_calling_session_closed = False
+ if self._handler and allow_calling_session_closed:
+ self._handler.session_closed(close_info, abruptly)
+
+
+class WebTransportSession:
+ """
+ A WebTransport session.
+ """
+
+ def __init__(self, protocol: WebTransportH3Protocol, session_id: int,
+ request_headers: List[Tuple[bytes, bytes]]) -> None:
+ self.session_id = session_id
+ self.request_headers = request_headers
+
+ self._protocol: WebTransportH3Protocol = protocol
+ self._http: H3Connection = protocol._http
+
+ # Use the a shared default path for all handlers so that different
+ # WebTransport sessions can access the same store easily.
+ self._stash_path = '/webtransport/handlers'
+ self._stash: Optional[stash.Stash] = None
+ self._dict_for_handlers: Dict[str, Any] = {}
+
+ @property
+ def stash(self) -> stash.Stash:
+ """A Stash object for storing cross-session state."""
+ if self._stash is None:
+ address, authkey = stash.load_env_config()
+ self._stash = stash.Stash(self._stash_path, address, authkey)
+ return self._stash
+
+ @property
+ def dict_for_handlers(self) -> Dict[str, Any]:
+ """A dictionary that handlers can attach arbitrary data."""
+ return self._dict_for_handlers
+
+ def stream_is_unidirectional(self, stream_id: int) -> bool:
+ """Return True if the stream is unidirectional."""
+ return stream_is_unidirectional(stream_id)
+
+ def close(self, close_info: Optional[Tuple[int, bytes]]) -> None:
+ """
+ Close the session.
+
+ :param close_info The close information to send.
+ """
+ self._protocol._allow_calling_session_closed = False
+ assert self._protocol._session_stream_id is not None
+ session_stream_id = self._protocol._session_stream_id
+ if close_info is not None:
+ code = close_info[0]
+ reason = close_info[1]
+ buffer = Buffer(capacity=len(reason) + 4)
+ buffer.push_uint32(code)
+ buffer.push_bytes(reason)
+ capsule =\
+ H3Capsule(CapsuleType.CLOSE_WEBTRANSPORT_SESSION, buffer.data)
+ self._http.send_data(session_stream_id, capsule.encode(), end_stream=False)
+
+ self._http.send_data(session_stream_id, b'', end_stream=True)
+ # TODO(yutakahirano): Reset all other streams.
+ # TODO(yutakahirano): Reject future stream open requests
+ # We need to wait for the stream data to arrive at the client, and then
+ # we need to close the connection. At this moment we're relying on the
+ # client's behavior.
+ # TODO(yutakahirano): Implement the above.
+
+ def create_unidirectional_stream(self) -> int:
+ """
+ Create a unidirectional WebTransport stream and return the stream ID.
+ """
+ return self._http.create_webtransport_stream(
+ session_id=self.session_id, is_unidirectional=True)
+
+ def create_bidirectional_stream(self) -> int:
+ """
+ Create a bidirectional WebTransport stream and return the stream ID.
+ """
+ stream_id = self._http.create_webtransport_stream(
+ session_id=self.session_id, is_unidirectional=False)
+ # TODO(bashi): Remove this workaround when aioquic supports receiving
+ # data on server-initiated bidirectional streams.
+ stream = self._http._get_or_create_stream(stream_id)
+ assert stream.frame_type is None
+ assert stream.session_id is None
+ stream.frame_type = FrameType.WEBTRANSPORT_STREAM
+ stream.session_id = self.session_id
+ return stream_id
+
+ def send_stream_data(self,
+ stream_id: int,
+ data: bytes,
+ end_stream: bool = False) -> None:
+ """
+ Send data on the specific stream.
+
+ :param stream_id: The stream ID on which to send the data.
+ :param data: The data to send.
+ :param end_stream: If set to True, the stream will be closed.
+ """
+ self._http._quic.send_stream_data(stream_id=stream_id,
+ data=data,
+ end_stream=end_stream)
+
+ def send_datagram(self, data: bytes) -> None:
+ """
+ Send data using a datagram frame.
+
+ :param data: The data to send.
+ """
+ if not self._protocol._allow_datagrams:
+ _logger.warn(
+ "Sending a datagram while that's now allowed - discarding it")
+ return
+ flow_id = self.session_id
+ if self._http.supports_h3_datagram_04:
+ # The REGISTER_DATAGRAM_NO_CONTEXT capsule was on the session
+ # stream, so we must have the ID of the stream.
+ assert self._protocol._session_stream_id is not None
+ # TODO(yutakahirano): Make sure if this is the correct logic.
+ # Chrome always use 0 for the initial stream and the initial flow
+ # ID, we cannot check the correctness with it.
+ flow_id = self._protocol._session_stream_id // 4
+ self._http.send_datagram(flow_id=flow_id, data=data)
+
+ def stop_stream(self, stream_id: int, code: int) -> None:
+ """
+ Send a STOP_SENDING frame to the given stream.
+ :param code: the reason of the error.
+ """
+ self._http._quic.stop_stream(stream_id, code)
+
+ def reset_stream(self, stream_id: int, code: int) -> None:
+ """
+ Send a RESET_STREAM frame to the given stream.
+ :param code: the reason of the error.
+ """
+ self._http._quic.reset_stream(stream_id, code)
+
+
+class WebTransportEventHandler:
+ def __init__(self, session: WebTransportSession,
+ callbacks: Dict[str, Any]) -> None:
+ self._session = session
+ self._callbacks = callbacks
+
+ def _run_callback(self, callback_name: str,
+ *args: Any, **kwargs: Any) -> None:
+ if callback_name not in self._callbacks:
+ return
+ try:
+ self._callbacks[callback_name](*args, **kwargs)
+ except Exception as e:
+ _logger.warn(str(e))
+ traceback.print_exc()
+
+ def connect_received(self, response_headers: List[Tuple[bytes,
+ bytes]]) -> None:
+ self._run_callback("connect_received", self._session.request_headers,
+ response_headers)
+
+ def session_established(self) -> None:
+ self._run_callback("session_established", self._session)
+
+ def stream_data_received(self, stream_id: int, data: bytes,
+ stream_ended: bool) -> None:
+ self._run_callback("stream_data_received", self._session, stream_id,
+ data, stream_ended)
+
+ def datagram_received(self, data: bytes) -> None:
+ self._run_callback("datagram_received", self._session, data)
+
+ def session_closed(
+ self,
+ close_info: Optional[Tuple[int, bytes]],
+ abruptly: bool) -> None:
+ self._run_callback(
+ "session_closed", self._session, close_info, abruptly=abruptly)
+
+ def stream_reset(self, stream_id: int, error_code: int) -> None:
+ self._run_callback(
+ "stream_reset", self._session, stream_id, error_code)
+
+
+class SessionTicketStore:
+ """
+ Simple in-memory store for session tickets.
+ """
+ def __init__(self) -> None:
+ self.tickets: Dict[bytes, SessionTicket] = {}
+
+ def add(self, ticket: SessionTicket) -> None:
+ self.tickets[ticket.ticket] = ticket
+
+ def pop(self, label: bytes) -> Optional[SessionTicket]:
+ return self.tickets.pop(label, None)
+
+
+class WebTransportH3Server:
+ """
+ A WebTransport over HTTP/3 for testing.
+
+ :param host: Host from which to serve.
+ :param port: Port from which to serve.
+ :param doc_root: Document root for serving handlers.
+ :param cert_path: Path to certificate file to use.
+ :param key_path: Path to key file to use.
+ :param logger: a Logger object for this server.
+ """
+ def __init__(self, host: str, port: int, doc_root: str, cert_path: str,
+ key_path: str, logger: Optional[logging.Logger]) -> None:
+ self.host = host
+ self.port = port
+ self.doc_root = doc_root
+ self.cert_path = cert_path
+ self.key_path = key_path
+ self.started = False
+ global _doc_root
+ _doc_root = self.doc_root
+ global _logger
+ if logger is not None:
+ _logger = logger
+
+ def start(self) -> None:
+ """Start the server."""
+ self.server_thread = threading.Thread(
+ target=self._start_on_server_thread, daemon=True)
+ self.server_thread.start()
+ self.started = True
+
+ def _start_on_server_thread(self) -> None:
+ configuration = QuicConfiguration(
+ alpn_protocols=H3_ALPN,
+ is_client=False,
+ max_datagram_frame_size=65536,
+ )
+
+ _logger.info("Starting WebTransport over HTTP/3 server on %s:%s",
+ self.host, self.port)
+
+ configuration.load_cert_chain(self.cert_path, self.key_path)
+
+ ticket_store = SessionTicketStore()
+
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+ self.loop.run_until_complete(
+ serve(
+ self.host,
+ self.port,
+ configuration=configuration,
+ create_protocol=WebTransportH3Protocol,
+ session_ticket_fetcher=ticket_store.pop,
+ session_ticket_handler=ticket_store.add,
+ ))
+ self.loop.run_forever()
+
+ def stop(self) -> None:
+ """Stop the server."""
+ if self.started:
+ asyncio.run_coroutine_threadsafe(self._stop_on_server_thread(),
+ self.loop)
+ self.server_thread.join()
+ _logger.info("Stopped WebTransport over HTTP/3 server on %s:%s",
+ self.host, self.port)
+ self.started = False
+
+ async def _stop_on_server_thread(self) -> None:
+ self.loop.stop()
+
+
+def server_is_running(host: str, port: int, timeout: float) -> bool:
+ """
+ Check the WebTransport over HTTP/3 server is running at the given `host` and
+ `port`.
+ """
+ loop = asyncio.get_event_loop()
+ return loop.run_until_complete(_connect_server_with_timeout(host, port, timeout))
+
+
+async def _connect_server_with_timeout(host: str, port: int, timeout: float) -> bool:
+ try:
+ await asyncio.wait_for(_connect_to_server(host, port), timeout=timeout)
+ except asyncio.TimeoutError:
+ _logger.warning("Failed to connect WebTransport over HTTP/3 server")
+ return False
+ return True
+
+
+async def _connect_to_server(host: str, port: int) -> None:
+ configuration = QuicConfiguration(
+ alpn_protocols=H3_ALPN,
+ is_client=True,
+ verify_mode=ssl.CERT_NONE,
+ )
+
+ async with connect(host, port, configuration=configuration) as protocol:
+ await protocol.ping()
diff --git a/testing/web-platform/tests/tools/webtransport/requirements.txt b/testing/web-platform/tests/tools/webtransport/requirements.txt
new file mode 100644
index 0000000000..4e347c647c
--- /dev/null
+++ b/testing/web-platform/tests/tools/webtransport/requirements.txt
@@ -0,0 +1,4 @@
+# aioquic 0.9.15 is the last to support Python 3.6, but doesn't have prebuilt
+# wheels for Python 3.10, so use a different version depending on Python.
+aioquic==0.9.15; python_version == '3.6'
+aioquic==0.9.19; python_version != '3.6'
diff --git a/testing/web-platform/tests/tools/wpt/__init__.py b/testing/web-platform/tests/tools/wpt/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/__init__.py
diff --git a/testing/web-platform/tests/tools/wpt/android.py b/testing/web-platform/tests/tools/wpt/android.py
new file mode 100644
index 0000000000..366502cc6c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/android.py
@@ -0,0 +1,181 @@
+# mypy: allow-untyped-defs
+
+import argparse
+import os
+import platform
+import shutil
+import subprocess
+
+import requests
+from .wpt import venv_dir
+
+android_device = None
+
+here = os.path.abspath(os.path.dirname(__file__))
+wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
+
+
+def do_delayed_imports():
+ global android_device
+ from mozrunner.devices import android_device
+ android_device.TOOLTOOL_PATH = os.path.join(os.path.dirname(__file__),
+ os.pardir,
+ "third_party",
+ "tooltool",
+ "tooltool.py")
+
+
+def get_parser_install():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--reinstall", action="store_true", default=False,
+ help="Force reinstall even if the emulator already exists")
+ return parser
+
+
+def get_parser_start():
+ return get_parser_install()
+
+
+def get_sdk_path(dest):
+ if dest is None:
+ # os.getcwd() doesn't include the venv path
+ dest = os.path.join(wpt_root, venv_dir())
+ dest = os.path.join(dest, 'android-sdk')
+ return os.path.abspath(os.environ.get('ANDROID_SDK_PATH', dest))
+
+
+def uninstall_sdk(dest=None):
+ path = get_sdk_path(dest)
+ if os.path.exists(path) and os.path.isdir(path):
+ shutil.rmtree(path)
+
+
+def install_sdk(logger, dest=None):
+ sdk_path = get_sdk_path(dest)
+ if os.path.isdir(sdk_path):
+ logger.info("Using SDK installed at %s" % sdk_path)
+ return sdk_path, False
+
+ if not os.path.exists(sdk_path):
+ os.makedirs(sdk_path)
+
+ os_name = platform.system().lower()
+ if os_name not in ["darwin", "linux", "windows"]:
+ logger.critical("Unsupported platform %s" % os_name)
+ raise NotImplementedError
+
+ os_name = 'darwin' if os_name == 'macosx' else os_name
+ # TODO: either always use the latest version or have some way to
+ # configure a per-product version if there are strong requirements
+ # to use a specific version.
+ url = f'https://dl.google.com/android/repository/sdk-tools-{os_name}-4333796.zip'
+
+ logger.info("Getting SDK from %s" % url)
+ temp_path = os.path.join(sdk_path, url.rsplit("/", 1)[1])
+ try:
+ with open(temp_path, "wb") as f:
+ with requests.get(url, stream=True) as resp:
+ shutil.copyfileobj(resp.raw, f)
+
+ # Python's zipfile module doesn't seem to work here
+ subprocess.check_call(["unzip", temp_path], cwd=sdk_path)
+ finally:
+ os.unlink(temp_path)
+
+ return sdk_path, True
+
+
+def install_android_packages(logger, sdk_path, no_prompt=False):
+ sdk_manager_path = os.path.join(sdk_path, "tools", "bin", "sdkmanager")
+ if not os.path.exists(sdk_manager_path):
+ raise OSError("Can't find sdkmanager at %s" % sdk_manager_path)
+
+ packages = ["platform-tools",
+ "build-tools;33.0.1",
+ "platforms;android-33",
+ "emulator"]
+
+ # TODO: make this work non-internactively
+ logger.info("Installing SDK packages")
+ cmd = [sdk_manager_path] + packages
+
+ proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
+ if no_prompt:
+ data = "Y\n" * 100 if no_prompt else None
+ proc.communicate(data)
+ else:
+ proc.wait()
+ if proc.returncode != 0:
+ raise subprocess.CalledProcessError(proc.returncode, cmd)
+
+
+def get_emulator(sdk_path, device_serial=None):
+ if android_device is None:
+ do_delayed_imports()
+ if "ANDROID_SDK_ROOT" not in os.environ:
+ os.environ["ANDROID_SDK_ROOT"] = sdk_path
+ substs = {"top_srcdir": wpt_root, "TARGET_CPU": "x86"}
+ emulator = android_device.AndroidEmulator("*", substs=substs, device_serial=device_serial)
+ emulator.emulator_path = os.path.join(sdk_path, "emulator", "emulator")
+ return emulator
+
+
+def install(logger, reinstall=False, no_prompt=False, device_serial=None):
+ if reinstall:
+ uninstall_sdk()
+
+ dest, new_install = install_sdk(logger)
+ if new_install:
+ install_android_packages(logger, dest, no_prompt)
+
+ if "ANDROID_SDK_ROOT" not in os.environ:
+ os.environ["ANDROID_SDK_ROOT"] = dest
+
+ emulator = get_emulator(dest, device_serial=device_serial)
+ return emulator
+
+
+def start(logger, emulator=None, reinstall=False, device_serial=None):
+ if reinstall:
+ install(reinstall=True)
+
+ sdk_path = get_sdk_path(None)
+
+ if emulator is None:
+ emulator = get_emulator(sdk_path, device_serial=device_serial)
+
+ if not emulator.check_avd():
+ logger.critical("Android AVD not found, please run |mach bootstrap|")
+ raise NotImplementedError
+
+ emulator.start()
+ emulator.wait_for_start()
+ return emulator
+
+
+def run_install(venv, **kwargs):
+ try:
+ import logging
+ logging.basicConfig()
+ logger = logging.getLogger()
+
+ install(logger, **kwargs)
+ except Exception:
+ import traceback
+ traceback.print_exc()
+ import pdb
+ pdb.post_mortem()
+
+
+def run_start(venv, **kwargs):
+ try:
+ import logging
+ logging.basicConfig()
+ logger = logging.getLogger()
+
+ start(logger, **kwargs)
+ except Exception:
+ import traceback
+ traceback.print_exc()
+ import pdb
+ pdb.post_mortem()
diff --git a/testing/web-platform/tests/tools/wpt/browser.py b/testing/web-platform/tests/tools/wpt/browser.py
new file mode 100644
index 0000000000..66796a8968
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/browser.py
@@ -0,0 +1,2048 @@
+# mypy: allow-untyped-defs
+import os
+import platform
+import re
+import shutil
+import stat
+import subprocess
+import tempfile
+from abc import ABCMeta, abstractmethod
+from datetime import datetime, timedelta
+from distutils.spawn import find_executable
+from urllib.parse import urlsplit
+
+import html5lib
+import requests
+from packaging.specifiers import SpecifierSet
+
+from .utils import (
+ call,
+ get,
+ get_download_to_descriptor,
+ rmtree,
+ sha256sum,
+ untar,
+ unzip,
+)
+from .wpt import venv_dir
+
+uname = platform.uname()
+
+# the rootUrl for the firefox-ci deployment of Taskcluster
+FIREFOX_CI_ROOT_URL = 'https://firefox-ci-tc.services.mozilla.com'
+
+
+def _get_fileversion(binary, logger=None):
+ command = "(Get-Item '%s').VersionInfo.FileVersion" % binary.replace("'", "''")
+ try:
+ return call("powershell.exe", command).strip()
+ except (subprocess.CalledProcessError, OSError):
+ if logger is not None:
+ logger.warning("Failed to call %s in PowerShell" % command)
+ return None
+
+
+def get_ext(filename):
+ """Get the extension from a filename with special handling for .tar.foo"""
+ name, ext = os.path.splitext(filename)
+ if name.endswith(".tar"):
+ ext = ".tar%s" % ext
+ return ext
+
+
+def get_download_filename(resp, default=None):
+ """Get the filename from a requests.Response, or default"""
+ filename = None
+
+ content_disposition = resp.headers.get("content-disposition")
+ if content_disposition:
+ filenames = re.findall("filename=(.+)", content_disposition)
+ if filenames:
+ filename = filenames[0]
+
+ if not filename:
+ filename = urlsplit(resp.url).path.rsplit("/", 1)[1]
+
+ return filename or default
+
+
+def get_taskcluster_artifact(index, path):
+ TC_INDEX_BASE = FIREFOX_CI_ROOT_URL + "/api/index/v1/"
+
+ resp = get(TC_INDEX_BASE + "task/%s/artifacts/%s" % (index, path))
+ resp.raise_for_status()
+
+ return resp
+
+
+class Browser:
+ __metaclass__ = ABCMeta
+
+ def __init__(self, logger):
+ self.logger = logger
+
+ def _get_browser_binary_dir(self, dest, channel):
+ if dest is None:
+ # os.getcwd() doesn't include the venv path
+ dest = os.path.join(os.getcwd(), venv_dir())
+
+ dest = os.path.join(dest, "browsers", channel)
+
+ if not os.path.exists(dest):
+ os.makedirs(dest)
+
+ return dest
+
+ @abstractmethod
+ def download(self, dest=None, channel=None, rename=None):
+ """Download a package or installer for the browser
+ :param dest: Directory in which to put the dowloaded package
+ :param channel: Browser channel to download
+ :param rename: Optional name for the downloaded package; the original
+ extension is preserved.
+ :return: The path to the downloaded package/installer
+ """
+ return NotImplemented
+
+ @abstractmethod
+ def install(self, dest=None, channel=None):
+ """Download and install the browser.
+
+ This method usually calls download().
+
+ :param dest: Directory in which to install the browser
+ :param channel: Browser channel to install
+ :return: The path to the installed browser
+ """
+ return NotImplemented
+
+ @abstractmethod
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ """Download and install the WebDriver implementation for this browser.
+
+ :param dest: Directory in which to install the WebDriver
+ :param channel: Browser channel to install
+ :param browser_binary: The path to the browser binary
+ :return: The path to the installed WebDriver
+ """
+ return NotImplemented
+
+ @abstractmethod
+ def find_binary(self, venv_path=None, channel=None):
+ """Find the binary of the browser.
+
+ If the WebDriver for the browser is able to find the binary itself, this
+ method doesn't need to be implemented, in which case NotImplementedError
+ is suggested to be raised to prevent accidental use.
+ """
+ return NotImplemented
+
+ @abstractmethod
+ def find_webdriver(self, venv_path=None, channel=None):
+ """Find the binary of the WebDriver."""
+ return NotImplemented
+
+ @abstractmethod
+ def version(self, binary=None, webdriver_binary=None):
+ """Retrieve the release version of the installed browser."""
+ return NotImplemented
+
+ @abstractmethod
+ def requirements(self):
+ """Name of the browser-specific wptrunner requirements file"""
+ return NotImplemented
+
+
+class Firefox(Browser):
+ """Firefox-specific interface.
+
+ Includes installation, webdriver installation, and wptrunner setup methods.
+ """
+
+ product = "firefox"
+ binary = "browsers/firefox/firefox"
+ requirements = "requirements_firefox.txt"
+
+ platform = {
+ "Linux": "linux",
+ "Windows": "win",
+ "Darwin": "macos"
+ }.get(uname[0])
+
+ application_name = {
+ "stable": "Firefox.app",
+ "beta": "Firefox.app",
+ "nightly": "Firefox Nightly.app"
+ }
+
+ def platform_string_geckodriver(self):
+ if self.platform is None:
+ raise ValueError("Unable to construct a valid Geckodriver package name for current platform")
+
+ if self.platform in ("linux", "win"):
+ bits = "64" if uname[4] == "x86_64" else "32"
+ elif self.platform == "macos" and uname.machine == "arm64":
+ bits = "-aarch64"
+ else:
+ bits = ""
+
+ return "%s%s" % (self.platform, bits)
+
+ def download(self, dest=None, channel="nightly", rename=None):
+ product = {
+ "nightly": "firefox-nightly-latest-ssl",
+ "beta": "firefox-beta-latest-ssl",
+ "stable": "firefox-latest-ssl"
+ }
+
+ os_builds = {
+ ("linux", "x86"): "linux",
+ ("linux", "x86_64"): "linux64",
+ ("win", "x86"): "win",
+ ("win", "AMD64"): "win64",
+ ("macos", "x86_64"): "osx",
+ }
+ os_key = (self.platform, uname[4])
+
+ if dest is None:
+ dest = self._get_browser_binary_dir(None, channel)
+
+ if channel not in product:
+ raise ValueError("Unrecognised release channel: %s" % channel)
+
+ if os_key not in os_builds:
+ raise ValueError("Unsupported platform: %s %s" % os_key)
+
+ url = "https://download.mozilla.org/?product=%s&os=%s&lang=en-US" % (product[channel],
+ os_builds[os_key])
+ self.logger.info("Downloading Firefox from %s" % url)
+ resp = get(url)
+
+ filename = get_download_filename(resp, "firefox.tar.bz2")
+
+ if rename:
+ filename = "%s%s" % (rename, get_ext(filename))
+
+ installer_path = os.path.join(dest, filename)
+
+ with open(installer_path, "wb") as f:
+ f.write(resp.content)
+
+ return installer_path
+
+ def install(self, dest=None, channel="nightly"):
+ """Install Firefox."""
+ import mozinstall
+
+ dest = self._get_browser_binary_dir(dest, channel)
+
+ filename = os.path.basename(dest)
+
+ installer_path = self.download(dest, channel)
+
+ try:
+ mozinstall.install(installer_path, dest)
+ except mozinstall.mozinstall.InstallError:
+ if self.platform == "macos" and os.path.exists(os.path.join(dest, self.application_name.get(channel, "Firefox Nightly.app"))):
+ # mozinstall will fail if nightly is already installed in the venv because
+ # mac installation uses shutil.copy_tree
+ mozinstall.uninstall(os.path.join(dest, self.application_name.get(channel, "Firefox Nightly.app")))
+ mozinstall.install(filename, dest)
+ else:
+ raise
+
+ os.remove(installer_path)
+ return self.find_binary_path(dest)
+
+ def find_binary_path(self, path=None, channel="nightly"):
+ """Looks for the firefox binary in the virtual environment"""
+
+ if path is None:
+ path = self._get_browser_binary_dir(None, channel)
+
+ binary = None
+
+ if self.platform == "linux":
+ binary = find_executable("firefox", os.path.join(path, "firefox"))
+ elif self.platform == "win":
+ import mozinstall
+ try:
+ binary = mozinstall.get_binary(path, "firefox")
+ except mozinstall.InvalidBinary:
+ # ignore the case where we fail to get a binary
+ pass
+ elif self.platform == "macos":
+ binary = find_executable("firefox", os.path.join(path, self.application_name.get(channel, "Firefox Nightly.app"),
+ "Contents", "MacOS"))
+
+ return binary
+
+ def find_binary(self, venv_path=None, channel="nightly"):
+
+ path = self._get_browser_binary_dir(venv_path, channel)
+ binary = self.find_binary_path(path, channel)
+
+ if not binary and self.platform == "win":
+ winpaths = [os.path.expandvars("$SYSTEMDRIVE\\Program Files\\Mozilla Firefox"),
+ os.path.expandvars("$SYSTEMDRIVE\\Program Files (x86)\\Mozilla Firefox")]
+ for winpath in winpaths:
+ binary = self.find_binary_path(winpath, channel)
+ if binary is not None:
+ break
+
+ if not binary and self.platform == "macos":
+ macpaths = ["/Applications/Firefox Nightly.app/Contents/MacOS",
+ os.path.expanduser("~/Applications/Firefox Nightly.app/Contents/MacOS"),
+ "/Applications/Firefox Developer Edition.app/Contents/MacOS",
+ os.path.expanduser("~/Applications/Firefox Developer Edition.app/Contents/MacOS"),
+ "/Applications/Firefox.app/Contents/MacOS",
+ os.path.expanduser("~/Applications/Firefox.app/Contents/MacOS")]
+ return find_executable("firefox", os.pathsep.join(macpaths))
+
+ if binary is None:
+ return find_executable("firefox")
+
+ return binary
+
+ def find_certutil(self):
+ path = find_executable("certutil")
+ if path is None:
+ return None
+ if os.path.splitdrive(os.path.normcase(path))[1].split(os.path.sep) == ["", "windows", "system32", "certutil.exe"]:
+ return None
+ return path
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ return find_executable("geckodriver")
+
+ def get_version_and_channel(self, binary):
+ version_string = call(binary, "--version").strip()
+ m = re.match(r"Mozilla Firefox (\d+\.\d+(?:\.\d+)?)(a|b)?", version_string)
+ if not m:
+ return None, "nightly"
+ version, status = m.groups()
+ channel = {"a": "nightly", "b": "beta"}
+ return version, channel.get(status, "stable")
+
+ def get_profile_bundle_url(self, version, channel):
+ if channel == "stable":
+ repo = "https://hg.mozilla.org/releases/mozilla-release"
+ tag = "FIREFOX_%s_RELEASE" % version.replace(".", "_")
+ elif channel == "beta":
+ repo = "https://hg.mozilla.org/releases/mozilla-beta"
+ major_version = version.split(".", 1)[0]
+ # For beta we have a different format for betas that are now in stable releases
+ # vs those that are not
+ tags = get("https://hg.mozilla.org/releases/mozilla-beta/json-tags").json()["tags"]
+ tags = {item["tag"] for item in tags}
+ end_tag = "FIREFOX_BETA_%s_END" % major_version
+ if end_tag in tags:
+ tag = end_tag
+ else:
+ tag = "tip"
+ else:
+ repo = "https://hg.mozilla.org/mozilla-central"
+ # Always use tip as the tag for nightly; this isn't quite right
+ # but to do better we need the actual build revision, which we
+ # can get if we have an application.ini file
+ tag = "tip"
+
+ return "%s/archive/%s.zip/testing/profiles/" % (repo, tag)
+
+ def install_prefs(self, binary, dest=None, channel=None):
+ if binary:
+ version, channel_ = self.get_version_and_channel(binary)
+ if channel is not None and channel != channel_:
+ # Beta doesn't always seem to have the b in the version string, so allow the
+ # manually supplied value to override the one from the binary
+ self.logger.warning("Supplied channel doesn't match binary, using supplied channel")
+ elif channel is None:
+ channel = channel_
+ else:
+ version = None
+
+ if dest is None:
+ dest = os.curdir
+
+ dest = os.path.join(dest, "profiles", channel)
+ if version:
+ dest = os.path.join(dest, version)
+ have_cache = False
+ if os.path.exists(dest) and len(os.listdir(dest)) > 0:
+ if channel != "nightly":
+ have_cache = True
+ else:
+ now = datetime.now()
+ have_cache = (datetime.fromtimestamp(os.stat(dest).st_mtime) >
+ now - timedelta(days=1))
+
+ # If we don't have a recent download, grab and extract the latest one
+ if not have_cache:
+ if os.path.exists(dest):
+ rmtree(dest)
+ os.makedirs(dest)
+
+ url = self.get_profile_bundle_url(version, channel)
+
+ self.logger.info("Installing test prefs from %s" % url)
+ try:
+ extract_dir = tempfile.mkdtemp()
+ unzip(get(url).raw, dest=extract_dir)
+
+ profiles = os.path.join(extract_dir, os.listdir(extract_dir)[0], 'testing', 'profiles')
+ for name in os.listdir(profiles):
+ path = os.path.join(profiles, name)
+ shutil.move(path, dest)
+ finally:
+ rmtree(extract_dir)
+ else:
+ self.logger.info("Using cached test prefs from %s" % dest)
+
+ return dest
+
+ def _latest_geckodriver_version(self):
+ """Get and return latest version number for geckodriver."""
+ # This is used rather than an API call to avoid rate limits
+ tags = call("git", "ls-remote", "--tags", "--refs",
+ "https://github.com/mozilla/geckodriver.git")
+ release_re = re.compile(r".*refs/tags/v(\d+)\.(\d+)\.(\d+)")
+ latest_release = (0, 0, 0)
+ for item in tags.split("\n"):
+ m = release_re.match(item)
+ if m:
+ version = tuple(int(item) for item in m.groups())
+ if version > latest_release:
+ latest_release = version
+ assert latest_release != (0, 0, 0)
+ return "v%s.%s.%s" % tuple(str(item) for item in latest_release)
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ """Install latest Geckodriver."""
+ if dest is None:
+ dest = os.getcwd()
+
+ path = None
+ if channel == "nightly":
+ path = self.install_geckodriver_nightly(dest)
+ if path is None:
+ self.logger.warning("Nightly webdriver not found; falling back to release")
+
+ if path is None:
+ version = self._latest_geckodriver_version()
+ format = "zip" if uname[0] == "Windows" else "tar.gz"
+ self.logger.debug("Latest geckodriver release %s" % version)
+ url = ("https://github.com/mozilla/geckodriver/releases/download/%s/geckodriver-%s-%s.%s" %
+ (version, version, self.platform_string_geckodriver(), format))
+ if format == "zip":
+ unzip(get(url).raw, dest=dest)
+ else:
+ untar(get(url).raw, dest=dest)
+ path = find_executable(os.path.join(dest, "geckodriver"))
+
+ assert path is not None
+ self.logger.info("Installed %s" %
+ subprocess.check_output([path, "--version"]).splitlines()[0])
+ return path
+
+ def install_geckodriver_nightly(self, dest):
+ self.logger.info("Attempting to install webdriver from nightly")
+
+ platform_bits = ("64" if uname[4] == "x86_64" else
+ ("32" if self.platform == "win" else ""))
+ tc_platform = "%s%s" % (self.platform, platform_bits)
+
+ archive_ext = ".zip" if uname[0] == "Windows" else ".tar.gz"
+ archive_name = "public/build/geckodriver%s" % archive_ext
+
+ try:
+ resp = get_taskcluster_artifact(
+ "gecko.v2.mozilla-central.latest.geckodriver.%s" % tc_platform,
+ archive_name)
+ except Exception:
+ self.logger.info("Geckodriver download failed")
+ return
+
+ if archive_ext == ".zip":
+ unzip(resp.raw, dest)
+ else:
+ untar(resp.raw, dest)
+
+ exe_ext = ".exe" if uname[0] == "Windows" else ""
+ path = os.path.join(dest, "geckodriver%s" % exe_ext)
+
+ self.logger.info("Extracted geckodriver to %s" % path)
+
+ return path
+
+ def version(self, binary=None, webdriver_binary=None):
+ """Retrieve the release version of the installed browser."""
+ version_string = call(binary, "--version").strip()
+ m = re.match(r"Mozilla Firefox (.*)", version_string)
+ if not m:
+ return None
+ return m.group(1)
+
+
+class FirefoxAndroid(Browser):
+ """Android-specific Firefox interface."""
+
+ product = "firefox_android"
+ requirements = "requirements_firefox.txt"
+
+ def __init__(self, logger):
+ super().__init__(logger)
+ self.apk_path = None
+
+ def download(self, dest=None, channel=None, rename=None):
+ if dest is None:
+ dest = os.pwd
+
+ resp = get_taskcluster_artifact(
+ "gecko.v2.mozilla-central.latest.mobile.android-x86_64-opt",
+ "public/build/geckoview-androidTest.apk")
+
+ filename = "geckoview-androidTest.apk"
+ if rename:
+ filename = "%s%s" % (rename, get_ext(filename)[1])
+ self.apk_path = os.path.join(dest, filename)
+
+ with open(self.apk_path, "wb") as f:
+ f.write(resp.content)
+
+ return self.apk_path
+
+ def install(self, dest=None, channel=None):
+ return self.download(dest, channel)
+
+ def install_prefs(self, binary, dest=None, channel=None):
+ fx_browser = Firefox(self.logger)
+ return fx_browser.install_prefs(binary, dest, channel)
+
+ def find_binary(self, venv_path=None, channel=None):
+ return self.apk_path
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ raise NotImplementedError
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ raise NotImplementedError
+
+ def version(self, binary=None, webdriver_binary=None):
+ return None
+
+
+class ChromeChromiumBase(Browser):
+ """
+ Chrome/Chromium base Browser class for shared functionality between Chrome and Chromium
+
+ For a detailed description on the installation and detection of these browser components,
+ see https://web-platform-tests.org/running-tests/chrome-chromium-installation-detection.html
+ """
+
+ requirements = "requirements_chromium.txt"
+ platform = {
+ "Linux": "Linux",
+ "Windows": "Win",
+ "Darwin": "Mac",
+ }.get(uname[0])
+
+ def _build_snapshots_url(self, revision, filename):
+ return ("https://storage.googleapis.com/chromium-browser-snapshots/"
+ f"{self._chromium_platform_string}/{revision}/{filename}")
+
+ def _get_latest_chromium_revision(self):
+ """Returns latest Chromium revision available for download."""
+ # This is only used if the user explicitly passes "latest" for the revision flag.
+ # The pinned revision is used by default to avoid unexpected failures as versions update.
+ revision_url = ("https://storage.googleapis.com/chromium-browser-snapshots/"
+ f"{self._chromium_platform_string}/LAST_CHANGE")
+ return get(revision_url).text.strip()
+
+ def _get_pinned_chromium_revision(self):
+ """Returns the pinned Chromium revision number."""
+ return get("https://storage.googleapis.com/wpt-versions/pinned_chromium_revision").text.strip()
+
+ def _get_chromium_revision(self, filename=None, version=None):
+ """Retrieve a valid Chromium revision to download a browser component."""
+
+ # If a specific version is passed as an argument, we will use it.
+ if version is not None:
+ # Detect a revision number based on the version passed.
+ revision = self._get_base_revision_from_version(version)
+ if revision is not None:
+ # File name is needed to test if request is valid.
+ url = self._build_snapshots_url(revision, filename)
+ try:
+ # Check the status without downloading the content (this is a streaming request).
+ get(url)
+ return revision
+ except requests.RequestException:
+ self.logger.warning("404: Unsuccessful attempt to download file "
+ f"based on version. {url}")
+ # If no URL was used in a previous install
+ # and no version was passed, use the pinned Chromium revision.
+ revision = self._get_pinned_chromium_revision()
+
+ # If the url is successfully used to download/install, it will be used again
+ # if another component is also installed during this run (browser/webdriver).
+ return revision
+
+ def _get_base_revision_from_version(self, version):
+ """Get a Chromium revision number that is associated with a given version."""
+ # This is not the single revision associated with the version,
+ # but instead is where it branched from. Chromium revisions are just counting
+ # commits on the master branch, there are no Chromium revisions for branches.
+
+ version = self._remove_version_suffix(version)
+
+ # Try to find the Chromium build with the same revision.
+ try:
+ omaha = get(f"https://omahaproxy.appspot.com/deps.json?version={version}").json()
+ detected_revision = omaha['chromium_base_position']
+ return detected_revision
+ except requests.RequestException:
+ self.logger.debug("Unsuccessful attempt to detect revision based on version")
+ return None
+
+ def _remove_existing_chromedriver_binary(self, path):
+ """Remove an existing ChromeDriver for this product if it exists
+ in the virtual environment.
+ """
+ # There may be an existing chromedriver binary from a previous install.
+ # To provide a clean install experience, remove the old binary - this
+ # avoids tricky issues like unzipping over a read-only file.
+ existing_chromedriver_path = find_executable("chromedriver", path)
+ if existing_chromedriver_path:
+ self.logger.info(f"Removing existing ChromeDriver binary: {existing_chromedriver_path}")
+ os.chmod(existing_chromedriver_path, stat.S_IWUSR)
+ os.remove(existing_chromedriver_path)
+
+ def _remove_version_suffix(self, version):
+ """Removes channel suffixes from Chrome/Chromium version string (e.g. " dev")."""
+ return version.split(' ')[0]
+
+ @property
+ def _chromedriver_platform_string(self):
+ """Returns a string that represents the suffix of the ChromeDriver
+ file name when downloaded from Chromium Snapshots.
+ """
+ if self.platform == "Linux":
+ bits = "64" if uname[4] == "x86_64" else "32"
+ elif self.platform == "Mac":
+ bits = "64"
+ elif self.platform == "Win":
+ bits = "32"
+ return f"{self.platform.lower()}{bits}"
+
+ @property
+ def _chromium_platform_string(self):
+ """Returns a string that is used for the platform directory in Chromium Snapshots"""
+ if (self.platform == "Linux" or self.platform == "Win") and uname[4] == "x86_64":
+ return f"{self.platform}_x64"
+ if self.platform == "Mac" and uname.machine == "arm64":
+ return "Mac_Arm"
+ return self.platform
+
+ def find_webdriver(self, venv_path=None, channel=None, browser_binary=None):
+ if venv_path:
+ venv_path = os.path.join(venv_path, self.product)
+ return find_executable("chromedriver", path=venv_path)
+
+ def install_mojojs(self, dest, browser_binary):
+ """Install MojoJS web framework."""
+ # MojoJS is platform agnostic, but the version number must be an
+ # exact match of the Chrome/Chromium version to be compatible.
+ chrome_version = self.version(binary=browser_binary)
+ if not chrome_version:
+ return None
+ chrome_version = self._remove_version_suffix(chrome_version)
+
+ try:
+ # MojoJS version url must match the browser binary version exactly.
+ url = ("https://storage.googleapis.com/chrome-wpt-mojom/"
+ f"{chrome_version}/linux64/mojojs.zip")
+ # Check the status without downloading the content (this is a streaming request).
+ get(url)
+ except requests.RequestException:
+ # If a valid matching version cannot be found in the wpt archive,
+ # download from Chromium snapshots bucket. However,
+ # MojoJS is only bundled with Linux from Chromium snapshots.
+ if self.platform == "Linux":
+ filename = "mojojs.zip"
+ revision = self._get_chromium_revision(filename, chrome_version)
+ url = self._build_snapshots_url(revision, filename)
+ else:
+ self.logger.error("A valid MojoJS version cannot be found "
+ f"for browser binary version {chrome_version}.")
+ return None
+
+ extracted = os.path.join(dest, "mojojs", "gen")
+ last_url_file = os.path.join(extracted, "DOWNLOADED_FROM")
+ if os.path.exists(last_url_file):
+ with open(last_url_file, "rt") as f:
+ last_url = f.read().strip()
+ if last_url == url:
+ self.logger.info("Mojo bindings already up to date")
+ return extracted
+ rmtree(extracted)
+
+ try:
+ self.logger.info(f"Downloading Mojo bindings from {url}")
+ unzip(get(url).raw, dest)
+ with open(last_url_file, "wt") as f:
+ f.write(url)
+ return extracted
+ except Exception as e:
+ self.logger.error(f"Cannot enable MojoJS: {e}")
+ return None
+
+ def install_webdriver_by_version(self, version, dest, revision=None):
+ dest = os.path.join(dest, self.product)
+ self._remove_existing_chromedriver_binary(dest)
+ # _get_webdriver_url is implemented differently for Chrome and Chromium because
+ # they download their respective versions of ChromeDriver from different sources.
+ url = self._get_webdriver_url(version, revision)
+ self.logger.info(f"Downloading ChromeDriver from {url}")
+ unzip(get(url).raw, dest)
+
+ # The two sources of ChromeDriver have different zip structures:
+ # * Chromium archives the binary inside a chromedriver_* directory;
+ # * Chrome archives the binary directly.
+ # We want to make sure the binary always ends up directly in bin/.
+ chromedriver_dir = os.path.join(dest,
+ f"chromedriver_{self._chromedriver_platform_string}")
+ chromedriver_path = find_executable("chromedriver", chromedriver_dir)
+ if chromedriver_path is not None:
+ shutil.move(chromedriver_path, dest)
+ rmtree(chromedriver_dir)
+
+ chromedriver_path = find_executable("chromedriver", dest)
+ assert chromedriver_path is not None
+ return chromedriver_path
+
+ def version(self, binary=None, webdriver_binary=None):
+ if not binary:
+ self.logger.warning("No browser binary provided.")
+ return None
+
+ if uname[0] == "Windows":
+ return _get_fileversion(binary, self.logger)
+
+ try:
+ version_string = call(binary, "--version").strip()
+ except (subprocess.CalledProcessError, OSError) as e:
+ self.logger.warning(f"Failed to call {binary}: {e}")
+ return None
+ m = re.match(r"(?:Google Chrome|Chromium) (.*)", version_string)
+ if not m:
+ self.logger.warning(f"Failed to extract version from: {version_string}")
+ return None
+ return m.group(1)
+
+ def webdriver_version(self, webdriver_binary):
+ if webdriver_binary is None:
+ self.logger.warning("No valid webdriver supplied to detect version.")
+ return None
+
+ try:
+ version_string = call(webdriver_binary, "--version").strip()
+ except (subprocess.CalledProcessError, OSError) as e:
+ self.logger.warning(f"Failed to call {webdriver_binary}: {e}")
+ return None
+ m = re.match(r"ChromeDriver ([0-9][0-9.]*)", version_string)
+ if not m:
+ self.logger.warning(f"Failed to extract version from: {version_string}")
+ return None
+ return m.group(1)
+
+
+class Chromium(ChromeChromiumBase):
+ """Chromium-specific interface.
+
+ Includes browser binary installation and detection.
+ Webdriver installation and wptrunner setup shared in base class with Chrome
+
+ For a detailed description on the installation and detection of these browser components,
+ see https://web-platform-tests.org/running-tests/chrome-chromium-installation-detection.html
+ """
+ product = "chromium"
+
+ @property
+ def _chromium_package_name(self):
+ return f"chrome-{self.platform.lower()}"
+
+ def _get_existing_browser_revision(self, venv_path, channel):
+ revision = None
+ try:
+ # A file referencing the revision number is saved with the binary.
+ # Check if this revision number exists and use it if it does.
+ path = os.path.join(self._get_browser_binary_dir(None, channel), "revision")
+ with open(path) as f:
+ revision = f.read().strip()
+ except FileNotFoundError:
+ # If there is no information about the revision downloaded,
+ # use the pinned revision.
+ revision = self._get_pinned_chromium_revision()
+ return revision
+
+ def _find_binary_in_directory(self, directory):
+ """Search for Chromium browser binary in a given directory."""
+ if uname[0] == "Darwin":
+ return find_executable("Chromium", os.path.join(directory,
+ self._chromium_package_name,
+ "Chromium.app",
+ "Contents",
+ "MacOS"))
+ # find_executable will add .exe on Windows automatically.
+ return find_executable("chrome", os.path.join(directory, self._chromium_package_name))
+
+ def _get_webdriver_url(self, version, revision=None):
+ """Get Chromium Snapshots url to download Chromium ChromeDriver."""
+ filename = f"chromedriver_{self._chromedriver_platform_string}.zip"
+
+ # Make sure we use the same revision in an invocation.
+ # If we have a url that was last used successfully during this run,
+ # that url takes priority over trying to form another.
+ if hasattr(self, "last_revision_used") and self.last_revision_used is not None:
+ return self._build_snapshots_url(self.last_revision_used, filename)
+ if revision is None:
+ revision = self._get_chromium_revision(filename, version)
+ elif revision == "latest":
+ revision = self._get_latest_chromium_revision()
+ elif revision == "pinned":
+ revision = self._get_pinned_chromium_revision()
+
+ return self._build_snapshots_url(revision, filename)
+
+ def download(self, dest=None, channel=None, rename=None, version=None, revision=None):
+ if dest is None:
+ dest = self._get_browser_binary_dir(None, channel)
+
+ filename = f"{self._chromium_package_name}.zip"
+
+ if revision is None:
+ revision = self._get_chromium_revision(filename, version)
+ elif revision == "latest":
+ revision = self._get_latest_chromium_revision()
+ elif revision == "pinned":
+ revision = self._get_pinned_chromium_revision()
+
+ url = self._build_snapshots_url(revision, filename)
+ self.logger.info(f"Downloading Chromium from {url}")
+ resp = get(url)
+ installer_path = os.path.join(dest, filename)
+ with open(installer_path, "wb") as f:
+ f.write(resp.content)
+
+ # Revision successfully used. Keep this revision if another component install is needed.
+ self.last_revision_used = revision
+ with open(os.path.join(dest, "revision"), "w") as f:
+ f.write(revision)
+ return installer_path
+
+ def find_binary(self, venv_path=None, channel=None):
+ return self._find_binary_in_directory(self._get_browser_binary_dir(venv_path, channel))
+
+ def install(self, dest=None, channel=None, version=None, revision=None):
+ dest = self._get_browser_binary_dir(dest, channel)
+ installer_path = self.download(dest, channel, version=version, revision=revision)
+ with open(installer_path, "rb") as f:
+ unzip(f, dest)
+ os.remove(installer_path)
+ return self._find_binary_in_directory(dest)
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None, revision=None):
+ if dest is None:
+ dest = os.pwd
+
+ if revision is None:
+ # If a revision was not given, we will need to detect the browser version.
+ # The ChromeDriver that is installed will match this version.
+ revision = self._get_existing_browser_revision(dest, channel)
+
+ chromedriver_path = self.install_webdriver_by_version(None, dest, revision)
+
+ return chromedriver_path
+
+ def webdriver_supports_browser(self, webdriver_binary, browser_binary, browser_channel=None):
+ """Check that the browser binary and ChromeDriver versions are a valid match."""
+ browser_version = self.version(browser_binary)
+ chromedriver_version = self.webdriver_version(webdriver_binary)
+
+ if not chromedriver_version:
+ self.logger.warning("Unable to get version for ChromeDriver "
+ f"{webdriver_binary}, rejecting it")
+ return False
+
+ if not browser_version:
+ # If we can't get the browser version,
+ # we just have to assume the ChromeDriver is good.
+ return True
+
+ # Because Chromium and its ChromeDriver should be pulled from the
+ # same revision number, their version numbers should match exactly.
+ if browser_version == chromedriver_version:
+ self.logger.debug("Browser and ChromeDriver versions match.")
+ return True
+ self.logger.warning(f"ChromeDriver version {chromedriver_version} does not match "
+ f"Chromium version {browser_version}.")
+ return False
+
+
+class Chrome(ChromeChromiumBase):
+ """Chrome-specific interface.
+
+ Includes browser binary installation and detection.
+ Webdriver installation and wptrunner setup shared in base class with Chromium.
+
+ For a detailed description on the installation and detection of these browser components,
+ see https://web-platform-tests.org/running-tests/chrome-chromium-installation-detection.html
+ """
+
+ product = "chrome"
+
+ @property
+ def _chromedriver_api_platform_string(self):
+ """chromedriver.storage.googleapis.com has a different filename for M1 binary,
+ while the snapshot URL has a different directory but the same filename."""
+ if self.platform == "Mac" and uname.machine == "arm64":
+ return "mac_arm64"
+ return self._chromedriver_platform_string
+
+ def _get_webdriver_url(self, version, revision=None):
+ """Get a ChromeDriver API URL to download a version of ChromeDriver that matches
+ the browser binary version. Version selection is described here:
+ https://chromedriver.chromium.org/downloads/version-selection"""
+ filename = f"chromedriver_{self._chromedriver_api_platform_string}.zip"
+
+ version = self._remove_version_suffix(version)
+
+ parts = version.split(".")
+ assert len(parts) == 4
+ latest_url = ("https://chromedriver.storage.googleapis.com/LATEST_RELEASE_"
+ f"{'.'.join(parts[:-1])}")
+ try:
+ latest = get(latest_url).text.strip()
+ except requests.RequestException:
+ latest_url = f"https://chromedriver.storage.googleapis.com/LATEST_RELEASE_{parts[0]}"
+ try:
+ latest = get(latest_url).text.strip()
+ except requests.RequestException:
+ # We currently use the latest Chromium revision to get a compatible Chromedriver
+ # version for Chrome Dev, since it is not available through the ChromeDriver API.
+ # If we've gotten to this point, it is assumed that this is Chrome Dev.
+ filename = f"chromedriver_{self._chromedriver_platform_string}.zip"
+ revision = self._get_chromium_revision(filename, version)
+ return self._build_snapshots_url(revision, filename)
+ return f"https://chromedriver.storage.googleapis.com/{latest}/{filename}"
+
+ def download(self, dest=None, channel=None, rename=None):
+ raise NotImplementedError("Downloading of Chrome browser binary not implemented.")
+
+ def find_binary(self, venv_path=None, channel=None):
+ if uname[0] == "Linux":
+ name = "google-chrome"
+ if channel == "stable":
+ name += "-stable"
+ elif channel == "beta":
+ name += "-beta"
+ elif channel == "dev":
+ name += "-unstable"
+ # No Canary on Linux.
+ return find_executable(name)
+ if uname[0] == "Darwin":
+ suffix = ""
+ if channel in ("beta", "dev", "canary"):
+ suffix = " " + channel.capitalize()
+ return f"/Applications/Google Chrome{suffix}.app/Contents/MacOS/Google Chrome{suffix}"
+ if uname[0] == "Windows":
+ name = "Chrome"
+ if channel == "beta":
+ name += " Beta"
+ elif channel == "dev":
+ name += " Dev"
+ path = os.path.expandvars(fr"$PROGRAMFILES\Google\{name}\Application\chrome.exe")
+ if channel == "canary":
+ path = os.path.expandvars(r"$LOCALAPPDATA\Google\Chrome SxS\Application\chrome.exe")
+ return path
+ self.logger.warning("Unable to find the browser binary.")
+ return None
+
+ def install(self, dest=None, channel=None):
+ raise NotImplementedError("Installing of Chrome browser binary not implemented.")
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None, revision=None):
+ if dest is None:
+ dest = os.pwd
+
+ # Detect the browser version.
+ # The ChromeDriver that is installed will match this version.
+ if browser_binary is None:
+ # If a browser binary path was not given, detect a valid path.
+ browser_binary = self.find_binary(channel=channel)
+ # We need a browser to version match, so if a browser binary path
+ # was not given and cannot be detected, raise an error.
+ if browser_binary is None:
+ raise FileNotFoundError("No browser binary detected. "
+ "Cannot install ChromeDriver without a browser version.")
+
+ version = self.version(browser_binary)
+ if version is None:
+ raise ValueError(f"Unable to detect browser version from binary at {browser_binary}. "
+ " Cannot install ChromeDriver without a valid version to match.")
+
+ chromedriver_path = self.install_webdriver_by_version(version, dest, revision)
+
+ return chromedriver_path
+
+ def webdriver_supports_browser(self, webdriver_binary, browser_binary, browser_channel):
+ """Check that the browser binary and ChromeDriver versions are a valid match."""
+ # TODO(DanielRyanSmith): The procedure for matching the browser and ChromeDriver
+ # versions here is too loose. More strict rules for version matching
+ # should be in place. (#33231)
+ chromedriver_version = self.webdriver_version(webdriver_binary)
+ if not chromedriver_version:
+ self.logger.warning("Unable to get version for ChromeDriver "
+ f"{webdriver_binary}, rejecting it")
+ return False
+
+ browser_version = self.version(browser_binary)
+ if not browser_version:
+ # If we can't get the browser version,
+ # we just have to assume the ChromeDriver is good.
+ return True
+
+ # Check that the ChromeDriver version matches the Chrome version.
+ chromedriver_major = int(chromedriver_version.split('.')[0])
+ browser_major = int(browser_version.split('.')[0])
+ if chromedriver_major != browser_major:
+ # There is no official ChromeDriver release for the dev channel -
+ # it switches between beta and tip-of-tree, so we accept version+1
+ # too for dev.
+ if browser_channel == "dev" and chromedriver_major == (browser_major + 1):
+ self.logger.debug(f"Accepting ChromeDriver {chromedriver_version} "
+ f"for Chrome/Chromium Dev {browser_version}")
+ return True
+ self.logger.warning(f"ChromeDriver {chromedriver_version} does not match "
+ f"Chrome/Chromium {browser_version}")
+ return False
+ return True
+
+
+class ContentShell(Browser):
+ """Interface for the Chromium content shell.
+ """
+
+ product = "content_shell"
+ requirements = None
+
+ def download(self, dest=None, channel=None, rename=None):
+ raise NotImplementedError
+
+ def install(self, dest=None, channel=None):
+ raise NotImplementedError
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ raise NotImplementedError
+
+ def find_binary(self, venv_path=None, channel=None):
+ if uname[0] == "Darwin":
+ return find_executable("Content Shell.app/Contents/MacOS/Content Shell")
+ return find_executable("content_shell") # .exe is added automatically for Windows
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ return None
+
+ def version(self, binary=None, webdriver_binary=None):
+ # content_shell does not return version information.
+ return "N/A"
+
+class ChromeAndroidBase(Browser):
+ """A base class for ChromeAndroid and AndroidWebView.
+
+ On Android, WebView is based on Chromium open source project, and on some
+ versions of Android we share the library with Chrome. Therefore, we have
+ a very similar WPT runner implementation.
+ Includes webdriver installation.
+ """
+ __metaclass__ = ABCMeta # This is an abstract class.
+
+ def __init__(self, logger):
+ super().__init__(logger)
+ self.device_serial = None
+ self.adb_binary = "adb"
+
+ def download(self, dest=None, channel=None, rename=None):
+ raise NotImplementedError
+
+ def install(self, dest=None, channel=None):
+ raise NotImplementedError
+
+ @abstractmethod
+ def find_binary(self, venv_path=None, channel=None):
+ raise NotImplementedError
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ return find_executable("chromedriver")
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ if browser_binary is None:
+ browser_binary = self.find_binary(channel)
+ chrome = Chrome(self.logger)
+ return chrome.install_webdriver_by_version(self.version(browser_binary), dest)
+
+ def version(self, binary=None, webdriver_binary=None):
+ if not binary:
+ self.logger.warning("No package name provided.")
+ return None
+
+ command = [self.adb_binary]
+ if self.device_serial:
+ # Assume we have same version of browser on all devices
+ command.extend(['-s', self.device_serial[0]])
+ command.extend(['shell', 'dumpsys', 'package', binary])
+ try:
+ output = call(*command)
+ except (subprocess.CalledProcessError, OSError):
+ self.logger.warning("Failed to call %s" % " ".join(command))
+ return None
+ match = re.search(r'versionName=(.*)', output)
+ if not match:
+ self.logger.warning("Failed to find versionName")
+ return None
+ return match.group(1)
+
+
+class ChromeAndroid(ChromeAndroidBase):
+ """Chrome-specific interface for Android.
+ """
+
+ product = "chrome_android"
+ requirements = "requirements_chromium.txt"
+
+ def find_binary(self, venv_path=None, channel=None):
+ if channel in ("beta", "dev", "canary"):
+ return "com.chrome." + channel
+ return "com.android.chrome"
+
+
+# TODO(aluo): This is largely copied from the AndroidWebView implementation.
+# Tests are not running for weblayer yet (crbug/1019521), this initial
+# implementation will help to reproduce and debug any issues.
+class AndroidWeblayer(ChromeAndroidBase):
+ """Weblayer-specific interface for Android."""
+
+ product = "android_weblayer"
+ # TODO(aluo): replace this with weblayer version after tests are working.
+ requirements = "requirements_chromium.txt"
+
+ def find_binary(self, venv_path=None, channel=None):
+ return "org.chromium.weblayer.shell"
+
+
+class AndroidWebview(ChromeAndroidBase):
+ """Webview-specific interface for Android.
+
+ Design doc:
+ https://docs.google.com/document/d/19cGz31lzCBdpbtSC92svXlhlhn68hrsVwSB7cfZt54o/view
+ """
+
+ product = "android_webview"
+ requirements = "requirements_chromium.txt"
+
+ def find_binary(self, venv_path=None, channel=None):
+ # Just get the current package name of the WebView provider.
+ # For WebView, it is not trivial to change the WebView provider, so
+ # we will just grab whatever is available.
+ # https://chromium.googlesource.com/chromium/src/+/HEAD/android_webview/docs/channels.md
+ command = [self.adb_binary]
+ if self.device_serial:
+ command.extend(['-s', self.device_serial[0]])
+ command.extend(['shell', 'dumpsys', 'webviewupdate'])
+ try:
+ output = call(*command)
+ except (subprocess.CalledProcessError, OSError):
+ self.logger.warning("Failed to call %s" % " ".join(command))
+ return None
+ m = re.search(r'^\s*Current WebView package \(name, version\): \((.*), ([0-9.]*)\)$',
+ output, re.M)
+ if m is None:
+ self.logger.warning("Unable to find current WebView package in dumpsys output")
+ return None
+ self.logger.warning("Final package name: " + m.group(1))
+ return m.group(1)
+
+
+class ChromeiOS(Browser):
+ """Chrome-specific interface for iOS.
+ """
+
+ product = "chrome_ios"
+ requirements = None
+
+ def download(self, dest=None, channel=None, rename=None):
+ raise NotImplementedError
+
+ def install(self, dest=None, channel=None):
+ raise NotImplementedError
+
+ def find_binary(self, venv_path=None, channel=None):
+ raise NotImplementedError
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ raise NotImplementedError
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ raise NotImplementedError
+
+ def version(self, binary=None, webdriver_binary=None):
+ return None
+
+
+class Opera(Browser):
+ """Opera-specific interface.
+
+ Includes webdriver installation, and wptrunner setup methods.
+ """
+
+ product = "opera"
+ requirements = "requirements_opera.txt"
+
+ @property
+ def binary(self):
+ if uname[0] == "Linux":
+ return "/usr/bin/opera"
+ # TODO Windows, Mac?
+ self.logger.warning("Unable to find the browser binary.")
+ return None
+
+ def download(self, dest=None, channel=None, rename=None):
+ raise NotImplementedError
+
+ def install(self, dest=None, channel=None):
+ raise NotImplementedError
+
+ def platform_string(self):
+ platform = {
+ "Linux": "linux",
+ "Windows": "win",
+ "Darwin": "mac"
+ }.get(uname[0])
+
+ if platform is None:
+ raise ValueError("Unable to construct a valid Opera package name for current platform")
+
+ if platform == "linux":
+ bits = "64" if uname[4] == "x86_64" else "32"
+ elif platform == "mac":
+ bits = "64"
+ elif platform == "win":
+ bits = "32"
+
+ return "%s%s" % (platform, bits)
+
+ def find_binary(self, venv_path=None, channel=None):
+ raise NotImplementedError
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ return find_executable("operadriver")
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ if dest is None:
+ dest = os.pwd
+ latest = get("https://api.github.com/repos/operasoftware/operachromiumdriver/releases/latest").json()["tag_name"]
+ url = "https://github.com/operasoftware/operachromiumdriver/releases/download/%s/operadriver_%s.zip" % (latest,
+ self.platform_string())
+ unzip(get(url).raw, dest)
+
+ operadriver_dir = os.path.join(dest, "operadriver_%s" % self.platform_string())
+ shutil.move(os.path.join(operadriver_dir, "operadriver"), dest)
+ rmtree(operadriver_dir)
+
+ path = find_executable("operadriver")
+ st = os.stat(path)
+ os.chmod(path, st.st_mode | stat.S_IEXEC)
+ return path
+
+ def version(self, binary=None, webdriver_binary=None):
+ """Retrieve the release version of the installed browser."""
+ binary = binary or self.binary
+ try:
+ output = call(binary, "--version")
+ except subprocess.CalledProcessError:
+ self.logger.warning("Failed to call %s" % binary)
+ return None
+ m = re.search(r"[0-9\.]+( [a-z]+)?$", output.strip())
+ if m:
+ return m.group(0)
+
+
+class EdgeChromium(Browser):
+ """MicrosoftEdge-specific interface."""
+ platform = {
+ "Linux": "linux",
+ "Windows": "win",
+ "Darwin": "macos"
+ }.get(uname[0])
+ product = "edgechromium"
+ edgedriver_name = "msedgedriver"
+ requirements = "requirements_chromium.txt"
+
+ def download(self, dest=None, channel=None, rename=None):
+ raise NotImplementedError
+
+ def install(self, dest=None, channel=None):
+ raise NotImplementedError
+
+ def find_binary(self, venv_path=None, channel=None):
+ self.logger.info(f'Finding Edge binary for channel {channel}')
+
+ if self.platform == "linux":
+ name = "microsoft-edge"
+ if channel == "stable":
+ name += "-stable"
+ elif channel == "beta":
+ name += "-beta"
+ elif channel == "dev":
+ name += "-dev"
+ # No Canary on Linux.
+ return find_executable(name)
+ if self.platform == "macos":
+ suffix = ""
+ if channel in ("beta", "dev", "canary"):
+ suffix = " " + channel.capitalize()
+ return f"/Applications/Microsoft Edge{suffix}.app/Contents/MacOS/Microsoft Edge{suffix}"
+ if self.platform == "win":
+ binaryname = "msedge"
+ if channel == "beta":
+ winpaths = [os.path.expandvars("$SYSTEMDRIVE\\Program Files\\Microsoft\\Edge Beta\\Application"),
+ os.path.expandvars("$SYSTEMDRIVE\\Program Files (x86)\\Microsoft\\Edge Beta\\Application")]
+ return find_executable(binaryname, os.pathsep.join(winpaths))
+ elif channel == "dev":
+ winpaths = [os.path.expandvars("$SYSTEMDRIVE\\Program Files\\Microsoft\\Edge Dev\\Application"),
+ os.path.expandvars("$SYSTEMDRIVE\\Program Files (x86)\\Microsoft\\Edge Dev\\Application")]
+ return find_executable(binaryname, os.pathsep.join(winpaths))
+ elif channel == "canary":
+ winpaths = [os.path.expanduser("~\\AppData\\Local\\Microsoft\\Edge\\Application"),
+ os.path.expanduser("~\\AppData\\Local\\Microsoft\\Edge SxS\\Application")]
+ return find_executable(binaryname, os.pathsep.join(winpaths))
+ else:
+ winpaths = [os.path.expandvars("$SYSTEMDRIVE\\Program Files\\Microsoft\\Edge\\Application"),
+ os.path.expandvars("$SYSTEMDRIVE\\Program Files (x86)\\Microsoft\\Edge\\Application")]
+ return find_executable(binaryname, os.pathsep.join(winpaths))
+
+ self.logger.warning("Unable to find the browser binary.")
+ return None
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ return find_executable("msedgedriver")
+
+ def webdriver_supports_browser(self, webdriver_binary, browser_binary):
+ edgedriver_version = self.webdriver_version(webdriver_binary)
+ if not edgedriver_version:
+ self.logger.warning(
+ f"Unable to get version for EdgeDriver {webdriver_binary}, rejecting it")
+ return False
+
+ browser_version = self.version(browser_binary)
+ if not browser_version:
+ # If we can't get the browser version, we just have to assume the
+ # EdgeDriver is good.
+ return True
+
+ # Check that the EdgeDriver version matches the Edge version.
+ edgedriver_major = int(edgedriver_version.split('.')[0])
+ browser_major = int(browser_version.split('.')[0])
+ if edgedriver_major != browser_major:
+ self.logger.warning(
+ f"EdgeDriver {edgedriver_version} does not match Edge {browser_version}")
+ return False
+ return True
+
+ def install_webdriver_by_version(self, version, dest=None):
+ if dest is None:
+ dest = os.pwd
+
+ if self.platform == "linux":
+ bits = "linux64"
+ edgedriver_path = os.path.join(dest, self.edgedriver_name)
+ elif self.platform == "macos":
+ bits = "mac64"
+ edgedriver_path = os.path.join(dest, self.edgedriver_name)
+ else:
+ bits = "win64" if uname[4] == "x86_64" else "win32"
+ edgedriver_path = os.path.join(dest, f"{self.edgedriver_name}.exe")
+ url = f"https://msedgedriver.azureedge.net/{version}/edgedriver_{bits}.zip"
+
+ # cleanup existing Edge driver files to avoid access_denied errors when unzipping
+ if os.path.isfile(edgedriver_path):
+ # remove read-only attribute
+ os.chmod(edgedriver_path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # 0777
+ print(f"Delete {edgedriver_path} file")
+ os.remove(edgedriver_path)
+ driver_notes_path = os.path.join(dest, "Driver_notes")
+ if os.path.isdir(driver_notes_path):
+ print(f"Delete {driver_notes_path} folder")
+ rmtree(driver_notes_path)
+
+ self.logger.info(f"Downloading MSEdgeDriver from {url}")
+ unzip(get(url).raw, dest)
+ if os.path.isfile(edgedriver_path):
+ self.logger.info(f"Successfully downloaded MSEdgeDriver to {edgedriver_path}")
+ return find_executable(self.edgedriver_name, dest)
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ self.logger.info(f"Installing MSEdgeDriver for channel {channel}")
+
+ if browser_binary is None:
+ browser_binary = self.find_binary(channel=channel)
+ else:
+ self.logger.info(f"Installing matching MSEdgeDriver for Edge binary at {browser_binary}")
+
+ version = self.version(browser_binary)
+
+ # If an exact version can't be found, use a suitable fallback based on
+ # the browser channel, if available.
+ if version is None:
+ platforms = {
+ "linux": "LINUX",
+ "macos": "MACOS",
+ "win": "WINDOWS"
+ }
+ if channel is None:
+ channel = "dev"
+ platform = platforms[self.platform]
+ suffix = f"{channel.upper()}_{platform}"
+ version_url = f"https://msedgedriver.azureedge.net/LATEST_{suffix}"
+ version = get(version_url).text.strip()
+
+ return self.install_webdriver_by_version(version, dest)
+
+ def version(self, binary=None, webdriver_binary=None):
+ if not binary:
+ self.logger.warning("No browser binary provided.")
+ return None
+
+ if self.platform == "win":
+ return _get_fileversion(binary, self.logger)
+
+ try:
+ version_string = call(binary, "--version").strip()
+ except (subprocess.CalledProcessError, OSError) as e:
+ self.logger.warning(f"Failed to call {binary}: {e}")
+ return None
+ m = re.match(r"Microsoft Edge ([0-9][0-9.]*)", version_string)
+ if not m:
+ self.logger.warning(f"Failed to extract version from: {version_string}")
+ return None
+ return m.group(1)
+
+ def webdriver_version(self, webdriver_binary):
+ if webdriver_binary is None:
+ self.logger.warning("No valid webdriver supplied to detect version.")
+ return None
+ if self.platform == "win":
+ return _get_fileversion(webdriver_binary, self.logger)
+
+ try:
+ version_string = call(webdriver_binary, "--version").strip()
+ except (subprocess.CalledProcessError, OSError) as e:
+ self.logger.warning(f"Failed to call {webdriver_binary}: {e}")
+ return None
+ m = re.match(r"Microsoft Edge WebDriver ([0-9][0-9.]*)", version_string)
+ if not m:
+ self.logger.warning(f"Failed to extract version from: {version_string}")
+ return None
+ return m.group(1)
+
+
+class Edge(Browser):
+ """Edge-specific interface."""
+
+ product = "edge"
+ requirements = "requirements_edge.txt"
+
+ def download(self, dest=None, channel=None, rename=None):
+ raise NotImplementedError
+
+ def install(self, dest=None, channel=None):
+ raise NotImplementedError
+
+ def find_binary(self, venv_path=None, channel=None):
+ raise NotImplementedError
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ return find_executable("MicrosoftWebDriver")
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ raise NotImplementedError
+
+ def version(self, binary=None, webdriver_binary=None):
+ command = "(Get-AppxPackage Microsoft.MicrosoftEdge).Version"
+ try:
+ return call("powershell.exe", command).strip()
+ except (subprocess.CalledProcessError, OSError):
+ self.logger.warning("Failed to call %s in PowerShell" % command)
+ return None
+
+
+class EdgeWebDriver(Edge):
+ product = "edge_webdriver"
+
+
+class InternetExplorer(Browser):
+ """Internet Explorer-specific interface."""
+
+ product = "ie"
+ requirements = "requirements_ie.txt"
+
+ def download(self, dest=None, channel=None, rename=None):
+ raise NotImplementedError
+
+ def install(self, dest=None, channel=None):
+ raise NotImplementedError
+
+ def find_binary(self, venv_path=None, channel=None):
+ raise NotImplementedError
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ return find_executable("IEDriverServer.exe")
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ raise NotImplementedError
+
+ def version(self, binary=None, webdriver_binary=None):
+ return None
+
+
+class Safari(Browser):
+ """Safari-specific interface.
+
+ Includes installation, webdriver installation, and wptrunner setup methods.
+ """
+
+ product = "safari"
+ requirements = "requirements_safari.txt"
+
+ def _find_downloads(self):
+ def text_content(e, __output=None):
+ # this doesn't use etree.tostring so that we can add spaces for p and br
+ if __output is None:
+ __output = []
+
+ if e.tag == "p":
+ __output.append("\n\n")
+
+ if e.tag == "br":
+ __output.append("\n")
+
+ if e.text is not None:
+ __output.append(e.text)
+
+ for child in e:
+ text_content(child, __output)
+ if child.tail is not None:
+ __output.append(child.tail)
+
+ return "".join(__output)
+
+ self.logger.info("Finding STP download URLs")
+ resp = get("https://developer.apple.com/safari/download/")
+
+ doc = html5lib.parse(
+ resp.content,
+ "etree",
+ namespaceHTMLElements=False,
+ transport_encoding=resp.encoding,
+ )
+ ascii_ws = re.compile(r"[\x09\x0A\x0C\x0D\x20]+")
+
+ downloads = []
+ for candidate in doc.iterfind(".//li[@class]"):
+ class_names = set(ascii_ws.split(candidate.attrib["class"]))
+ if {"download", "dmg", "zip"} & class_names:
+ downloads.append(candidate)
+
+ # Note we use \s throughout for space as we don't care what form the whitespace takes
+ stp_link_text = re.compile(
+ r"^\s*Safari\s+Technology\s+Preview\s+(?:[0-9]+\s+)?for\s+macOS"
+ )
+ requirement = re.compile(
+ r"""(?x) # (extended regexp syntax for comments)
+ ^\s*Requires\s+macOS\s+ # Starting with the magic string
+ ([0-9]+(?:\.[0-9]+)*) # A macOS version number of numbers and dots
+ (?:\s+beta(?:\s+[0-9]+)?)? # Optionally a beta, itself optionally with a number (no dots!)
+ (?:\s+or\s+later)? # Optionally an 'or later'
+ \.?\s*$ # Optionally ending with a literal dot
+ """
+ )
+
+ stp_downloads = []
+ for download in downloads:
+ for link in download.iterfind(".//a[@href]"):
+ if stp_link_text.search(text_content(link)):
+ break
+ else:
+ self.logger.debug("non-matching anchor: " + text_content(link))
+ else:
+ continue
+
+ for el in download.iter():
+ # avoid assuming any given element here, just assume it is a single element
+ m = requirement.search(text_content(el))
+ if m:
+ version = m.group(1)
+
+ # This assumes the current macOS numbering, whereby X.Y is compatible
+ # with X.(Y+1), e.g. 12.4 is compatible with 12.3, but 13.0 isn't
+ # compatible with 12.3.
+ if version.count(".") >= (2 if version.startswith("10.") else 1):
+ spec = SpecifierSet(f"~={version}")
+ else:
+ spec = SpecifierSet(f"=={version}.*")
+
+ stp_downloads.append((spec, link.attrib["href"].strip()))
+ break
+ else:
+ self.logger.debug(
+ "Found a link but no requirement: " + text_content(download)
+ )
+
+ if stp_downloads:
+ self.logger.info(
+ "Found STP URLs for macOS " +
+ ", ".join(str(dl[0]) for dl in stp_downloads)
+ )
+ else:
+ self.logger.warning("Did not find any STP URLs")
+
+ return stp_downloads
+
+ def _download_image(self, downloads, dest, system_version=None):
+ if system_version is None:
+ system_version, _, _ = platform.mac_ver()
+
+ chosen_url = None
+ for version_spec, url in downloads:
+ if system_version in version_spec:
+ self.logger.debug(f"Will download Safari for {version_spec}")
+ chosen_url = url
+ break
+
+ if chosen_url is None:
+ raise ValueError(f"no download for {system_version}")
+
+ self.logger.info(f"Downloading Safari from {chosen_url}")
+ resp = get(chosen_url)
+
+ filename = get_download_filename(resp, "SafariTechnologyPreview.dmg")
+ installer_path = os.path.join(dest, filename)
+ with open(installer_path, "wb") as f:
+ f.write(resp.content)
+
+ return installer_path
+
+ def _download_extract(self, image_path, dest, rename=None):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ self.logger.debug(f"Mounting {image_path}")
+ r = subprocess.run(
+ [
+ "hdiutil",
+ "attach",
+ "-readonly",
+ "-mountpoint",
+ tmpdir,
+ "-nobrowse",
+ "-verify",
+ "-noignorebadchecksums",
+ "-autofsck",
+ image_path,
+ ],
+ encoding="utf-8",
+ capture_output=True,
+ check=True,
+ )
+
+ mountpoint = None
+ for line in r.stdout.splitlines():
+ if not line.startswith("/dev/"):
+ continue
+
+ _, _, mountpoint = line.split("\t", 2)
+ if mountpoint:
+ break
+
+ if mountpoint is None:
+ raise ValueError("no volume mounted from image")
+
+ pkgs = [p for p in os.listdir(mountpoint) if p.endswith((".pkg", ".mpkg"))]
+ if len(pkgs) != 1:
+ raise ValueError(
+ f"Expected a single .pkg/.mpkg, found {len(pkgs)}: {', '.join(pkgs)}"
+ )
+
+ source_path = os.path.join(mountpoint, pkgs[0])
+ dest_path = os.path.join(
+ dest, (rename + get_ext(pkgs[0])) if rename is not None else pkgs[0]
+ )
+
+ self.logger.debug(f"Copying {source_path} to {dest_path}")
+ shutil.copy2(
+ source_path,
+ dest_path,
+ )
+
+ self.logger.debug(f"Unmounting {mountpoint}")
+ subprocess.run(
+ ["hdiutil", "detach", mountpoint],
+ encoding="utf-8",
+ capture_output=True,
+ check=True,
+ )
+
+ return dest_path
+
+ def download(self, dest=None, channel="preview", rename=None, system_version=None):
+ if channel != "preview":
+ raise ValueError(f"can only install 'preview', not '{channel}'")
+
+ if dest is None:
+ dest = self._get_browser_binary_dir(None, channel)
+
+ stp_downloads = self._find_downloads()
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ image_path = self._download_image(stp_downloads, tmpdir, system_version)
+ return self._download_extract(image_path, dest, rename)
+
+ def install(self, dest=None, channel=None):
+ # We can't do this because stable/beta releases are system components and STP
+ # requires admin permissions to install.
+ raise NotImplementedError
+
+ def find_binary(self, venv_path=None, channel=None):
+ raise NotImplementedError
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ path = None
+ if channel == "preview":
+ path = "/Applications/Safari Technology Preview.app/Contents/MacOS"
+ return find_executable("safaridriver", path)
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ raise NotImplementedError
+
+ def version(self, binary=None, webdriver_binary=None):
+ if webdriver_binary is None:
+ self.logger.warning("Cannot find Safari version without safaridriver")
+ return None
+ # Use `safaridriver --version` to get the version. Example output:
+ # "Included with Safari 12.1 (14607.1.11)"
+ # "Included with Safari Technology Preview (Release 67, 13607.1.9.0.1)"
+ # The `--version` flag was added in STP 67, so allow the call to fail.
+ try:
+ version_string = call(webdriver_binary, "--version").strip()
+ except subprocess.CalledProcessError:
+ self.logger.warning("Failed to call %s --version" % webdriver_binary)
+ return None
+ m = re.match(r"Included with Safari (.*)", version_string)
+ if not m:
+ self.logger.warning("Failed to extract version from: %s" % version_string)
+ return None
+ return m.group(1)
+
+
+class Servo(Browser):
+ """Servo-specific interface."""
+
+ product = "servo"
+ requirements = None
+
+ def platform_components(self):
+ platform = {
+ "Linux": "linux",
+ "Windows": "win",
+ "Darwin": "mac"
+ }.get(uname[0])
+
+ if platform is None:
+ raise ValueError("Unable to construct a valid Servo package for current platform")
+
+ if platform == "linux":
+ extension = ".tar.gz"
+ decompress = untar
+ elif platform == "win" or platform == "mac":
+ raise ValueError("Unable to construct a valid Servo package for current platform")
+
+ return (platform, extension, decompress)
+
+ def _get(self, channel="nightly"):
+ if channel != "nightly":
+ raise ValueError("Only nightly versions of Servo are available")
+
+ platform, extension, _ = self.platform_components()
+ url = "https://download.servo.org/nightly/%s/servo-latest%s" % (platform, extension)
+ return get(url)
+
+ def download(self, dest=None, channel="nightly", rename=None):
+ if dest is None:
+ dest = os.pwd
+
+ resp = self._get(dest, channel)
+ _, extension, _ = self.platform_components()
+
+ filename = rename if rename is not None else "servo-latest"
+ with open(os.path.join(dest, "%s%s" % (filename, extension,)), "w") as f:
+ f.write(resp.content)
+
+ def install(self, dest=None, channel="nightly"):
+ """Install latest Browser Engine."""
+ if dest is None:
+ dest = os.pwd
+
+ _, _, decompress = self.platform_components()
+
+ resp = self._get(channel)
+ decompress(resp.raw, dest=dest)
+ path = find_executable("servo", os.path.join(dest, "servo"))
+ st = os.stat(path)
+ os.chmod(path, st.st_mode | stat.S_IEXEC)
+ return path
+
+ def find_binary(self, venv_path=None, channel=None):
+ path = find_executable("servo", os.path.join(venv_path, "servo"))
+ if path is None:
+ path = find_executable("servo")
+ return path
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ return None
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ raise NotImplementedError
+
+ def version(self, binary=None, webdriver_binary=None):
+ """Retrieve the release version of the installed browser."""
+ output = call(binary, "--version")
+ m = re.search(r"Servo ([0-9\.]+-[a-f0-9]+)?(-dirty)?$", output.strip())
+ if m:
+ return m.group(0)
+
+
+class ServoWebDriver(Servo):
+ product = "servodriver"
+
+
+class Sauce(Browser):
+ """Sauce-specific interface."""
+
+ product = "sauce"
+ requirements = "requirements_sauce.txt"
+
+ def download(self, dest=None, channel=None, rename=None):
+ raise NotImplementedError
+
+ def install(self, dest=None, channel=None):
+ raise NotImplementedError
+
+ def find_binary(self, venev_path=None, channel=None):
+ raise NotImplementedError
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ raise NotImplementedError
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ raise NotImplementedError
+
+ def version(self, binary=None, webdriver_binary=None):
+ return None
+
+
+class WebKit(Browser):
+ """WebKit-specific interface."""
+
+ product = "webkit"
+ requirements = None
+
+ def download(self, dest=None, channel=None, rename=None):
+ raise NotImplementedError
+
+ def install(self, dest=None, channel=None):
+ raise NotImplementedError
+
+ def find_binary(self, venv_path=None, channel=None):
+ return None
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ return None
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ raise NotImplementedError
+
+ def version(self, binary=None, webdriver_binary=None):
+ return None
+
+
+class WebKitGTKMiniBrowser(WebKit):
+
+
+ def _get_osidversion(self):
+ with open('/etc/os-release') as osrelease_handle:
+ for line in osrelease_handle.readlines():
+ if line.startswith('ID='):
+ os_id = line.split('=')[1].strip().strip('"')
+ if line.startswith('VERSION_ID='):
+ version_id = line.split('=')[1].strip().strip('"')
+ assert(os_id)
+ assert(version_id)
+ osidversion = os_id + '-' + version_id
+ assert(' ' not in osidversion)
+ assert(len(osidversion) > 3)
+ return osidversion.capitalize()
+
+
+ def download(self, dest=None, channel=None, rename=None):
+ base_dowload_uri = "https://webkitgtk.org/built-products/"
+ base_download_dir = base_dowload_uri + "x86_64/release/" + channel + "/" + self._get_osidversion() + "/MiniBrowser/"
+ try:
+ response = get(base_download_dir + "LAST-IS")
+ except requests.exceptions.HTTPError as e:
+ if e.response.status_code == 404:
+ raise RuntimeError("Can't find a WebKitGTK MiniBrowser %s bundle for %s at %s"
+ % (channel, self._get_osidversion(), base_dowload_uri))
+ raise
+
+ bundle_filename = response.text.strip()
+ bundle_url = base_download_dir + bundle_filename
+
+ if dest is None:
+ dest = self._get_browser_binary_dir(None, channel)
+ bundle_file_path = os.path.join(dest, bundle_filename)
+
+ self.logger.info("Downloading WebKitGTK MiniBrowser bundle from %s" % bundle_url)
+ with open(bundle_file_path, "w+b") as f:
+ get_download_to_descriptor(f, bundle_url)
+
+ bundle_filename_no_ext, _ = os.path.splitext(bundle_filename)
+ bundle_hash_url = base_download_dir + bundle_filename_no_ext + ".sha256sum"
+ bundle_expected_hash = get(bundle_hash_url).text.strip().split(" ")[0]
+ bundle_computed_hash = sha256sum(bundle_file_path)
+
+ if bundle_expected_hash != bundle_computed_hash:
+ self.logger.error("Calculated SHA256 hash is %s but was expecting %s" % (bundle_computed_hash,bundle_expected_hash))
+ raise RuntimeError("The WebKitGTK MiniBrowser bundle at %s has incorrect SHA256 hash." % bundle_file_path)
+ return bundle_file_path
+
+ def install(self, dest=None, channel=None, prompt=True):
+ dest = self._get_browser_binary_dir(dest, channel)
+ bundle_path = self.download(dest, channel)
+ bundle_uncompress_directory = os.path.join(dest, "webkitgtk_minibrowser")
+
+ # Clean it from previous runs
+ if os.path.exists(bundle_uncompress_directory):
+ rmtree(bundle_uncompress_directory)
+ os.mkdir(bundle_uncompress_directory)
+
+ with open(bundle_path, "rb") as f:
+ unzip(f, bundle_uncompress_directory)
+
+ install_dep_script = os.path.join(bundle_uncompress_directory, "install-dependencies.sh")
+ if os.path.isfile(install_dep_script):
+ self.logger.info("Executing install-dependencies.sh script from bundle.")
+ install_dep_cmd = [install_dep_script]
+ if not prompt:
+ install_dep_cmd.append("--autoinstall")
+ # use subprocess.check_call() directly to display unbuffered stdout/stderr in real-time.
+ subprocess.check_call(install_dep_cmd)
+
+ minibrowser_path = os.path.join(bundle_uncompress_directory, "MiniBrowser")
+ if not os.path.isfile(minibrowser_path):
+ raise RuntimeError("Can't find a MiniBrowser binary at %s" % minibrowser_path)
+
+ os.remove(bundle_path)
+ install_ok_file = os.path.join(bundle_uncompress_directory, ".installation-ok")
+ open(install_ok_file, "w").close() # touch
+ self.logger.info("WebKitGTK MiniBrowser bundle for channel %s installed." % channel)
+ return minibrowser_path
+
+ def _find_executable_in_channel_bundle(self, binary, venv_path=None, channel=None):
+ if venv_path:
+ venv_base_path = self._get_browser_binary_dir(venv_path, channel)
+ bundle_dir = os.path.join(venv_base_path, "webkitgtk_minibrowser")
+ install_ok_file = os.path.join(bundle_dir, ".installation-ok")
+ if os.path.isfile(install_ok_file):
+ return find_executable(binary, bundle_dir)
+ return None
+
+
+ def find_binary(self, venv_path=None, channel=None):
+ minibrowser_path = self._find_executable_in_channel_bundle("MiniBrowser", venv_path, channel)
+ if minibrowser_path:
+ return minibrowser_path
+
+ libexecpaths = ["/usr/libexec/webkit2gtk-4.0"] # Fedora path
+ triplet = "x86_64-linux-gnu"
+ # Try to use GCC to detect this machine triplet
+ gcc = find_executable("gcc")
+ if gcc:
+ try:
+ triplet = call(gcc, "-dumpmachine").strip()
+ except subprocess.CalledProcessError:
+ pass
+ # Add Debian/Ubuntu path
+ libexecpaths.append("/usr/lib/%s/webkit2gtk-4.0" % triplet)
+ return find_executable("MiniBrowser", os.pathsep.join(libexecpaths))
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ webdriver_path = self._find_executable_in_channel_bundle("WebKitWebDriver", venv_path, channel)
+ if not webdriver_path:
+ webdriver_path = find_executable("WebKitWebDriver")
+ return webdriver_path
+
+ def version(self, binary=None, webdriver_binary=None):
+ if binary is None:
+ return None
+ try: # WebKitGTK MiniBrowser before 2.26.0 doesn't support --version
+ output = call(binary, "--version").strip()
+ except subprocess.CalledProcessError:
+ return None
+ # Example output: "WebKitGTK 2.26.1"
+ if output:
+ m = re.match(r"WebKitGTK (.+)", output)
+ if not m:
+ self.logger.warning("Failed to extract version from: %s" % output)
+ return None
+ return m.group(1)
+ return None
+
+
+class Epiphany(Browser):
+ """Epiphany-specific interface."""
+
+ product = "epiphany"
+ requirements = None
+
+ def download(self, dest=None, channel=None, rename=None):
+ raise NotImplementedError
+
+ def install(self, dest=None, channel=None):
+ raise NotImplementedError
+
+ def find_binary(self, venv_path=None, channel=None):
+ return find_executable("epiphany")
+
+ def find_webdriver(self, venv_path=None, channel=None):
+ return find_executable("WebKitWebDriver")
+
+ def install_webdriver(self, dest=None, channel=None, browser_binary=None):
+ raise NotImplementedError
+
+ def version(self, binary=None, webdriver_binary=None):
+ if binary is None:
+ return None
+ output = call(binary, "--version")
+ if output:
+ # Stable release output looks like: "Web 3.30.2"
+ # Tech Preview output looks like "Web 3.31.3-88-g97db4f40f"
+ return output.split()[1]
+ return None
diff --git a/testing/web-platform/tests/tools/wpt/commands.json b/testing/web-platform/tests/tools/wpt/commands.json
new file mode 100644
index 0000000000..41304a0122
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/commands.json
@@ -0,0 +1,94 @@
+{
+ "run": {
+ "path": "run.py",
+ "script": "run",
+ "parser": "create_parser",
+ "help": "Run tests in a browser",
+ "virtualenv": true,
+ "requirements": [
+ "../manifest/requirements.txt",
+ "../wptrunner/requirements.txt"
+ ],
+ "conditional_requirements": {
+ "commandline_flag": {
+ "enable_webtransport_h3": [
+ "../webtransport/requirements.txt"
+ ]
+ }
+ }
+ },
+ "create": {
+ "path": "create.py",
+ "script": "run",
+ "parser": "get_parser",
+ "help": "Create a new wpt test"
+ },
+ "update-expectations": {
+ "path": "update.py",
+ "script": "update_expectations",
+ "parser": "create_parser_update",
+ "help": "Update expectations files from raw logs.",
+ "virtualenv": true,
+ "requirements": [
+ "../wptrunner/requirements.txt"
+ ]
+ },
+ "files-changed": {
+ "path": "testfiles.py",
+ "script": "run_changed_files",
+ "parser": "get_parser",
+ "help": "Get a list of files that have changed",
+ "virtualenv": false
+ },
+ "tests-affected": {
+ "path": "testfiles.py",
+ "script": "run_tests_affected",
+ "parser": "get_parser_affected",
+ "help": "Get a list of tests affected by changes",
+ "virtualenv": false
+ },
+ "install": {
+ "path": "install.py",
+ "script": "run",
+ "parser": "get_parser",
+ "help": "Install browser components",
+ "virtualenv": true,
+ "requirements": [
+ "requirements_install.txt"
+ ]
+ },
+ "branch-point": {
+ "path": "testfiles.py",
+ "script": "display_branch_point",
+ "parser": null,
+ "help": "Print branch point from master",
+ "virtualenv": false
+ },
+ "rev-list": {
+ "path": "revlist.py",
+ "script": "run_rev_list",
+ "parser": "get_parser",
+ "help": "List tagged revisions at regular intervals",
+ "virtualenv": false
+ },
+ "install-android-emulator": {
+ "path": "android.py",
+ "script": "run_install",
+ "parser": "get_parser_install",
+ "help": "Setup the x86 android emulator",
+ "virtualenv": true,
+ "requirements": [
+ "requirements.txt"
+ ]
+ },
+ "start-android-emulator": {
+ "path": "android.py",
+ "script": "run_start",
+ "parser": "get_parser_start",
+ "help": "Start the x86 android emulator",
+ "virtualenv": true,
+ "requirements": [
+ "requirements.txt"
+ ]
+ }
+}
diff --git a/testing/web-platform/tests/tools/wpt/create.py b/testing/web-platform/tests/tools/wpt/create.py
new file mode 100644
index 0000000000..27a23ca901
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/create.py
@@ -0,0 +1,133 @@
+# mypy: allow-untyped-defs
+
+import subprocess
+import os
+
+here = os.path.dirname(__file__)
+
+template_prefix = """<!doctype html>
+%(documentElement)s<meta charset=utf-8>
+"""
+template_long_timeout = "<meta name=timeout content=long>\n"
+
+template_body_th = """<title></title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+
+</script>
+"""
+
+template_body_reftest = """<title></title>
+<link rel=%(match)s href=%(ref)s>
+"""
+
+template_body_reftest_wait = """<script src="/common/reftest-wait.js"></script>
+"""
+
+def get_parser():
+ import argparse
+ p = argparse.ArgumentParser()
+ p.add_argument("--no-editor", action="store_true",
+ help="Don't try to open the test in an editor")
+ p.add_argument("-e", "--editor", action="store", help="Editor to use")
+ p.add_argument("--long-timeout", action="store_true",
+ help="Test should be given a long timeout (typically 60s rather than 10s, but varies depending on environment)")
+ p.add_argument("--overwrite", action="store_true",
+ help="Allow overwriting an existing test file")
+ p.add_argument("-r", "--reftest", action="store_true",
+ help="Create a reftest rather than a testharness (js) test"),
+ p.add_argument("-m", "--reference", dest="ref", help="Path to the reference file")
+ p.add_argument("--mismatch", action="store_true",
+ help="Create a mismatch reftest")
+ p.add_argument("--wait", action="store_true",
+ help="Create a reftest that waits until takeScreenshot() is called")
+ p.add_argument("--tests-root", action="store", default=os.path.join(here, "..", ".."),
+ help="Path to the root of the wpt directory")
+ p.add_argument("path", action="store", help="Path to the test file")
+ return p
+
+
+
+def rel_path(path, tests_root):
+ if path is None:
+ return
+
+ abs_path = os.path.normpath(os.path.abspath(path))
+ return os.path.relpath(abs_path, tests_root)
+
+
+def run(_venv, **kwargs):
+ path = rel_path(kwargs["path"], kwargs["tests_root"])
+ ref_path = rel_path(kwargs["ref"], kwargs["tests_root"])
+
+ if kwargs["ref"]:
+ kwargs["reftest"] = True
+
+ if ".." in path:
+ print("""Test path %s is not under wpt root.""" % path)
+ return 1
+
+ if ref_path and ".." in ref_path:
+ print("""Reference path %s is not under wpt root""" % ref_path)
+ return 1
+
+
+ if os.path.exists(path) and not kwargs["overwrite"]:
+ print("Test path already exists, pass --overwrite to replace")
+ return 1
+
+ if kwargs["mismatch"] and not kwargs["reftest"]:
+ print("--mismatch only makes sense for a reftest")
+ return 1
+
+ if kwargs["wait"] and not kwargs["reftest"]:
+ print("--wait only makes sense for a reftest")
+ return 1
+
+ args = {"documentElement": "<html class=reftest-wait>\n" if kwargs["wait"] else ""}
+ template = template_prefix % args
+ if kwargs["long_timeout"]:
+ template += template_long_timeout
+
+ if kwargs["reftest"]:
+ args = {"match": "match" if not kwargs["mismatch"] else "mismatch",
+ "ref": os.path.relpath(ref_path, path) if kwargs["ref"] else '""'}
+ template += template_body_reftest % args
+ if kwargs["wait"]:
+ template += template_body_reftest_wait
+ else:
+ template += template_body_th
+ try:
+ os.makedirs(os.path.dirname(path))
+ except OSError:
+ pass
+ with open(path, "w") as f:
+ f.write(template)
+
+ ref_path = kwargs["ref"]
+ if ref_path and not os.path.exists(ref_path):
+ with open(ref_path, "w") as f:
+ f.write(template_prefix % {"documentElement": ""})
+
+ if kwargs["no_editor"]:
+ editor = None
+ elif kwargs["editor"]:
+ editor = kwargs["editor"]
+ elif "VISUAL" in os.environ:
+ editor = os.environ["VISUAL"]
+ elif "EDITOR" in os.environ:
+ editor = os.environ["EDITOR"]
+ else:
+ editor = None
+
+ proc = None
+ if editor:
+ if ref_path:
+ path = f"{path} {ref_path}"
+ proc = subprocess.Popen(f"{editor} {path}", shell=True)
+ else:
+ print("Created test %s" % path)
+
+ if proc:
+ proc.wait()
diff --git a/testing/web-platform/tests/tools/wpt/install.py b/testing/web-platform/tests/tools/wpt/install.py
new file mode 100644
index 0000000000..821ce86f97
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/install.py
@@ -0,0 +1,120 @@
+# mypy: allow-untyped-defs
+
+import argparse
+from . import browser
+
+latest_channels = {
+ 'android_weblayer': 'dev',
+ 'android_webview': 'dev',
+ 'firefox': 'nightly',
+ 'chrome': 'nightly',
+ 'chrome_android': 'dev',
+ 'chromium': 'nightly',
+ 'edgechromium': 'dev',
+ 'safari': 'preview',
+ 'servo': 'nightly',
+ 'webkitgtk_minibrowser': 'nightly'
+}
+
+channel_by_name = {
+ 'stable': 'stable',
+ 'release': 'stable',
+ 'beta': 'beta',
+ 'dev': 'dev',
+ 'canary': 'canary',
+ 'nightly': latest_channels,
+ 'preview': latest_channels,
+ 'experimental': latest_channels,
+}
+
+channel_args = argparse.ArgumentParser(add_help=False)
+channel_args.add_argument('--channel', choices=channel_by_name.keys(),
+ default='nightly', action='store',
+ help='''
+Name of browser release channel (default: nightly). "stable" and "release" are
+synonyms for the latest browser stable release; "beta" is the beta release;
+"dev" is only meaningful for Chrome (i.e. Chrome Dev); "nightly",
+"experimental", and "preview" are all synonyms for the latest available
+development or trunk release. (For WebDriver installs, we attempt to select an
+appropriate, compatible version for the latest browser release on the selected
+channel.) This flag overrides --browser-channel.''')
+
+
+def get_parser():
+ parser = argparse.ArgumentParser(
+ parents=[channel_args],
+ description="Install a given browser or webdriver frontend.")
+ parser.add_argument('browser', choices=['firefox', 'chrome', 'chromium', 'servo', 'safari'],
+ help='name of web browser product')
+ parser.add_argument('component', choices=['browser', 'webdriver'],
+ help='name of component')
+ parser.add_argument('--download-only', action="store_true",
+ help="Download the selected component but don't install it")
+ parser.add_argument('--rename', action="store", default=None,
+ help="Filename, excluding extension for downloaded archive "
+ "(only with --download-only)")
+ parser.add_argument('-d', '--destination',
+ help='filesystem directory to place the component')
+ parser.add_argument('--revision', default=None,
+ help='Chromium revision to install from snapshots')
+ return parser
+
+
+def get_channel(browser, channel):
+ channel = channel_by_name[channel]
+ if isinstance(channel, dict):
+ channel = channel.get(browser)
+ return channel
+
+
+def run(venv, **kwargs):
+ import logging
+ logger = logging.getLogger("install")
+
+ browser = kwargs["browser"]
+ destination = kwargs["destination"]
+ channel = get_channel(browser, kwargs["channel"])
+
+ if channel != kwargs["channel"]:
+ logger.info("Interpreting channel '%s' as '%s'", kwargs["channel"], channel)
+
+ if destination is None:
+ if venv:
+ if kwargs["component"] == "browser":
+ destination = venv.path
+ else:
+ destination = venv.bin_path
+ else:
+ raise argparse.ArgumentError(None,
+ "No --destination argument, and no default for the environment")
+
+ if kwargs["revision"] is not None and browser != "chromium":
+ raise argparse.ArgumentError(None, "--revision flag cannot be used for non-Chromium browsers.")
+
+ install(browser, kwargs["component"], destination, channel, logger=logger,
+ download_only=kwargs["download_only"], rename=kwargs["rename"],
+ revision=kwargs["revision"])
+
+
+def install(name, component, destination, channel="nightly", logger=None, download_only=False,
+ rename=None, revision=None):
+ if logger is None:
+ import logging
+ logger = logging.getLogger("install")
+
+ prefix = "download" if download_only else "install"
+ suffix = "_webdriver" if component == 'webdriver' else ""
+
+ method = prefix + suffix
+
+ browser_cls = getattr(browser, name.title())
+ logger.info('Now installing %s %s...', name, component)
+ kwargs = {}
+ if download_only and rename:
+ kwargs["rename"] = rename
+ if revision:
+ kwargs["revision"] = revision
+
+ path = getattr(browser_cls(logger), method)(dest=destination, channel=channel, **kwargs)
+ if path:
+ logger.info('Binary %s as %s', "downloaded" if download_only else "installed", path)
diff --git a/testing/web-platform/tests/tools/wpt/markdown.py b/testing/web-platform/tests/tools/wpt/markdown.py
new file mode 100644
index 0000000000..e1d8c4ebfe
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/markdown.py
@@ -0,0 +1,44 @@
+# mypy: allow-untyped-defs
+
+from functools import reduce
+
+def format_comment_title(product):
+ """Produce a Markdown-formatted string based on a given "product"--a string
+ containing a browser identifier optionally followed by a colon and a
+ release channel. (For example: "firefox" or "chrome:dev".) The generated
+ title string is used both to create new comments and to locate (and
+ subsequently update) previously-submitted comments."""
+ parts = product.split(":")
+ title = parts[0].title()
+
+ if len(parts) > 1:
+ title += " (%s)" % parts[1]
+
+ return "# %s #" % title
+
+
+def markdown_adjust(s):
+ """Escape problematic markdown sequences."""
+ s = s.replace('\t', '\\t')
+ s = s.replace('\n', '\\n')
+ s = s.replace('\r', '\\r')
+ s = s.replace('`', '')
+ s = s.replace('|', '\\|')
+ return s
+
+
+def table(headings, data, log):
+ """Create and log data to specified logger in tabular format."""
+ cols = range(len(headings))
+ assert all(len(item) == len(cols) for item in data)
+ max_widths = reduce(lambda prev, cur: [(len(cur[i]) + 2)
+ if (len(cur[i]) + 2) > prev[i]
+ else prev[i]
+ for i in cols],
+ data,
+ [len(item) + 2 for item in headings])
+ log("|%s|" % "|".join(item.center(max_widths[i]) for i, item in enumerate(headings)))
+ log("|%s|" % "|".join("-" * max_widths[i] for i in cols))
+ for row in data:
+ log("|%s|" % "|".join(" %s" % row[i].ljust(max_widths[i] - 1) for i in cols))
+ log("")
diff --git a/testing/web-platform/tests/tools/wpt/paths b/testing/web-platform/tests/tools/wpt/paths
new file mode 100644
index 0000000000..7e9ae837ec
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/paths
@@ -0,0 +1,7 @@
+docs/
+tools/ci/
+tools/docker/
+tools/lint/
+tools/manifest/
+tools/serve/
+tools/wpt/
diff --git a/testing/web-platform/tests/tools/wpt/requirements.txt b/testing/web-platform/tests/tools/wpt/requirements.txt
new file mode 100644
index 0000000000..a743bbe341
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/requirements.txt
@@ -0,0 +1 @@
+requests==2.27.1
diff --git a/testing/web-platform/tests/tools/wpt/requirements_install.txt b/testing/web-platform/tests/tools/wpt/requirements_install.txt
new file mode 100644
index 0000000000..5db7bce788
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/requirements_install.txt
@@ -0,0 +1 @@
+mozinstall==2.0.1
diff --git a/testing/web-platform/tests/tools/wpt/revlist.py b/testing/web-platform/tests/tools/wpt/revlist.py
new file mode 100644
index 0000000000..e9fea30522
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/revlist.py
@@ -0,0 +1,107 @@
+import argparse
+import os
+import time
+from typing import Any, Iterator, Tuple
+
+from tools.wpt.testfiles import get_git_cmd
+
+here = os.path.dirname(__file__)
+wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
+
+
+def calculate_cutoff_date(until: int, epoch: int, offset: int) -> int:
+ return (((until - offset) // epoch) * epoch) + offset
+
+
+def parse_epoch(string: str) -> int:
+ UNIT_DICT = {"h": 3600, "d": 86400, "w": 604800}
+ base = string[:-1]
+ unit = string[-1:]
+ if base.isdigit() and unit in UNIT_DICT:
+ return int(base) * UNIT_DICT[unit]
+ raise argparse.ArgumentTypeError('must be digits followed by h/d/w')
+
+
+def get_tagged_revisions(pattern: str) -> Iterator[Tuple[str, str, int]]:
+ '''
+ Iterates the tagged revisions as (tag name, commit sha, committer date) tuples.
+ '''
+ git = get_git_cmd(wpt_root)
+ args = [
+ pattern,
+ '--sort=-committerdate',
+ '--format=%(refname:lstrip=2) %(objectname) %(committerdate:raw)',
+ '--count=100000'
+ ]
+ ref_list = git("for-each-ref", *args) # type: ignore
+ for line in ref_list.splitlines():
+ if not line:
+ continue
+ tag, commit, date, _ = line.split(" ")
+ date = int(date)
+ yield tag, commit, date
+
+
+def get_epoch_revisions(epoch: int, until: int, max_count: int) -> Iterator[str]:
+ # Set an offset to start to count the the weekly epoch from
+ # Monday 00:00:00. This is particularly important for the weekly epoch
+ # because fix the start of the epoch to Monday. This offset is calculated
+ # from Thursday, 1 January 1970 0:00:00 to Monday, 5 January 1970 0:00:00
+ epoch_offset = 345600
+ count = 0
+
+ # Iterates the tagged revisions in descending order finding the more
+ # recent commit still older than a "cutoff_date" value.
+ # When a commit is found "cutoff_date" is set to a new value multiplier of
+ # "epoch" but still below of the date of the current commit found.
+ # This needed to deal with intervals where no candidates were found
+ # for the current "epoch" and the next candidate found is yet below
+ # the lower values of the interval (it is the case of J and I for the
+ # interval between Wed and Tue, in the example). The algorithm fix
+ # the next "cutoff_date" value based on the date value of the current one
+ # skipping the intermediate values.
+ # The loop ends once we reached the required number of revisions to return
+ # or the are no more tagged revisions or the cutoff_date reach zero.
+ #
+ # Fri Sat Sun Mon Tue Wed Thu Fri Sat
+ # | | | | | | | | |
+ # -A---B-C---DEF---G---H--IJ----------K-----L-M----N--O--
+ # ^
+ # now
+ # Expected result: N,M,K,J,H,G,F,C,A
+
+ cutoff_date = calculate_cutoff_date(until, epoch, epoch_offset)
+ for _, commit, date in get_tagged_revisions("refs/tags/merge_pr_*"):
+ if count >= max_count:
+ return
+ if date < cutoff_date:
+ yield commit
+ count += 1
+ cutoff_date = calculate_cutoff_date(date, epoch, epoch_offset)
+
+
+def get_parser() -> argparse.ArgumentParser:
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--epoch",
+ default="1d",
+ type=parse_epoch,
+ help="regular interval of time selected to get the "
+ "tagged revisions. Valid values are digits "
+ "followed by h/d/w (e.x. 9h, 9d, 9w ...) where "
+ "the mimimun selectable interval is one hour "
+ "(1h)")
+ parser.add_argument("--max-count",
+ default=1,
+ type=int,
+ help="maximum number of revisions to be returned by "
+ "the command")
+ return parser
+
+
+def run_rev_list(**kwargs: Any) -> None:
+ # "epoch_threshold" is a safety margin. After this time it is fine to
+ # assume that any tags are created and pushed.
+ epoch_threshold = 600
+ until = int(time.time()) - epoch_threshold
+ for line in get_epoch_revisions(kwargs["epoch"], until, kwargs["max_count"]):
+ print(line)
diff --git a/testing/web-platform/tests/tools/wpt/run.py b/testing/web-platform/tests/tools/wpt/run.py
new file mode 100644
index 0000000000..468bea3c51
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/run.py
@@ -0,0 +1,873 @@
+# mypy: allow-untyped-defs
+
+import argparse
+import os
+import platform
+import sys
+from distutils.spawn import find_executable
+from typing import ClassVar, Tuple, Type
+
+wpt_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+sys.path.insert(0, os.path.abspath(os.path.join(wpt_root, "tools")))
+
+from . import browser, install, testfiles
+from ..serve import serve
+
+logger = None
+
+
+class WptrunError(Exception):
+ pass
+
+
+class WptrunnerHelpAction(argparse.Action):
+ def __init__(self,
+ option_strings,
+ dest=argparse.SUPPRESS,
+ default=argparse.SUPPRESS,
+ help=None):
+ super().__init__(
+ option_strings=option_strings,
+ dest=dest,
+ default=default,
+ nargs=0,
+ help=help)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ from wptrunner import wptcommandline
+ wptparser = wptcommandline.create_parser()
+ wptparser.usage = parser.usage
+ wptparser.print_help()
+ parser.exit()
+
+
+def create_parser():
+ from wptrunner import wptcommandline
+
+ parser = argparse.ArgumentParser(add_help=False, parents=[install.channel_args])
+ parser.add_argument("product", action="store",
+ help="Browser to run tests in")
+ parser.add_argument("--affected", action="store", default=None,
+ help="Run affected tests since revish")
+ parser.add_argument("--yes", "-y", dest="prompt", action="store_false", default=True,
+ help="Don't prompt before installing components")
+ parser.add_argument("--install-browser", action="store_true",
+ help="Install the browser from the release channel specified by --channel "
+ "(or the nightly channel by default).")
+ parser.add_argument("--install-webdriver", action="store_true",
+ help="Install WebDriver from the release channel specified by --channel "
+ "(or the nightly channel by default).")
+ parser._add_container_actions(wptcommandline.create_parser())
+ return parser
+
+
+def exit(msg=None):
+ if msg:
+ logger.critical(msg)
+ sys.exit(1)
+ else:
+ sys.exit(0)
+
+
+def args_general(kwargs):
+
+ def set_if_none(name, value):
+ if kwargs.get(name) is None:
+ kwargs[name] = value
+ logger.info("Set %s to %s" % (name, value))
+
+ set_if_none("tests_root", wpt_root)
+ set_if_none("metadata_root", wpt_root)
+ set_if_none("manifest_update", True)
+ set_if_none("manifest_download", True)
+
+ if kwargs["ssl_type"] in (None, "pregenerated"):
+ cert_root = os.path.join(wpt_root, "tools", "certs")
+ if kwargs["ca_cert_path"] is None:
+ kwargs["ca_cert_path"] = os.path.join(cert_root, "cacert.pem")
+
+ if kwargs["host_key_path"] is None:
+ kwargs["host_key_path"] = os.path.join(cert_root, "web-platform.test.key")
+
+ if kwargs["host_cert_path"] is None:
+ kwargs["host_cert_path"] = os.path.join(cert_root, "web-platform.test.pem")
+ elif kwargs["ssl_type"] == "openssl":
+ if not find_executable(kwargs["openssl_binary"]):
+ if os.uname()[0] == "Windows":
+ raise WptrunError("""OpenSSL binary not found. If you need HTTPS tests, install OpenSSL from
+
+https://slproweb.com/products/Win32OpenSSL.html
+
+Ensuring that libraries are added to /bin and add the resulting bin directory to
+your PATH.
+
+Otherwise run with --ssl-type=none""")
+ else:
+ raise WptrunError("""OpenSSL not found. If you don't need HTTPS support run with --ssl-type=none,
+otherwise install OpenSSL and ensure that it's on your $PATH.""")
+
+
+def check_environ(product):
+ if product not in ("android_weblayer", "android_webview", "chrome",
+ "chrome_android", "chrome_ios", "content_shell",
+ "firefox", "firefox_android", "servo"):
+ config_builder = serve.build_config(os.path.join(wpt_root, "config.json"))
+ # Override the ports to avoid looking for free ports
+ config_builder.ssl = {"type": "none"}
+ config_builder.ports = {"http": [8000]}
+
+ is_windows = platform.uname()[0] == "Windows"
+
+ with config_builder as config:
+ expected_hosts = set(config.domains_set)
+ if is_windows:
+ expected_hosts.update(config.not_domains_set)
+
+ missing_hosts = set(expected_hosts)
+ if is_windows:
+ hosts_path = r"%s\System32\drivers\etc\hosts" % os.environ.get(
+ "SystemRoot", r"C:\Windows")
+ else:
+ hosts_path = "/etc/hosts"
+
+ if os.path.abspath(os.curdir) == wpt_root:
+ wpt_path = "wpt"
+ else:
+ wpt_path = os.path.join(wpt_root, "wpt")
+
+ with open(hosts_path) as f:
+ for line in f:
+ line = line.split("#", 1)[0].strip()
+ parts = line.split()
+ hosts = parts[1:]
+ for host in hosts:
+ missing_hosts.discard(host)
+ if missing_hosts:
+ if is_windows:
+ message = """Missing hosts file configuration. Run
+
+python %s make-hosts-file | Out-File %s -Encoding ascii -Append
+
+in PowerShell with Administrator privileges.""" % (wpt_path, hosts_path)
+ else:
+ message = """Missing hosts file configuration. Run
+
+%s make-hosts-file | sudo tee -a %s""" % ("./wpt" if wpt_path == "wpt" else wpt_path,
+ hosts_path)
+ raise WptrunError(message)
+
+
+class BrowserSetup:
+ name = None # type: ClassVar[str]
+ browser_cls = None # type: ClassVar[Type[browser.Browser]]
+
+ def __init__(self, venv, prompt=True):
+ self.browser = self.browser_cls(logger)
+ self.venv = venv
+ self.prompt = prompt
+
+ def prompt_install(self, component):
+ if not self.prompt:
+ return True
+ while True:
+ resp = input("Download and install %s [Y/n]? " % component).strip().lower()
+ if not resp or resp == "y":
+ return True
+ elif resp == "n":
+ return False
+
+ def install(self, channel=None):
+ if self.prompt_install(self.name):
+ return self.browser.install(self.venv.path, channel)
+
+ def install_requirements(self):
+ if not self.venv.skip_virtualenv_setup and self.browser.requirements:
+ self.venv.install_requirements(os.path.join(
+ wpt_root, "tools", "wptrunner", self.browser.requirements))
+
+
+ def setup(self, kwargs):
+ self.setup_kwargs(kwargs)
+
+
+def safe_unsetenv(env_var):
+ """Safely remove an environment variable.
+
+ Python3 does not support os.unsetenv in Windows for python<3.9, so we better
+ remove the variable directly from os.environ.
+ """
+ try:
+ del os.environ[env_var]
+ except KeyError:
+ pass
+
+
+class Firefox(BrowserSetup):
+ name = "firefox"
+ browser_cls = browser.Firefox
+
+ def setup_kwargs(self, kwargs):
+ if kwargs["binary"] is None:
+ if kwargs["browser_channel"] is None:
+ kwargs["browser_channel"] = "nightly"
+ logger.info("No browser channel specified. Running nightly instead.")
+
+ binary = self.browser.find_binary(self.venv.path,
+ kwargs["browser_channel"])
+ if binary is None:
+ raise WptrunError("""Firefox binary not found on $PATH.
+
+Install Firefox or use --binary to set the binary path""")
+ kwargs["binary"] = binary
+
+ if kwargs["certutil_binary"] is None and kwargs["ssl_type"] != "none":
+ certutil = self.browser.find_certutil()
+
+ if certutil is None:
+ # Can't download this for now because it's missing the libnss3 library
+ logger.info("""Can't find certutil, certificates will not be checked.
+Consider installing certutil via your OS package manager or directly.""")
+ else:
+ logger.info("Using certutil %s" % certutil)
+
+ kwargs["certutil_binary"] = certutil
+
+ if kwargs["webdriver_binary"] is None and "wdspec" in kwargs["test_types"]:
+ webdriver_binary = None
+ if not kwargs["install_webdriver"]:
+ webdriver_binary = self.browser.find_webdriver()
+
+ if webdriver_binary is None:
+ install = self.prompt_install("geckodriver")
+
+ if install:
+ logger.info("Downloading geckodriver")
+ webdriver_binary = self.browser.install_webdriver(
+ dest=self.venv.bin_path,
+ channel=kwargs["browser_channel"],
+ browser_binary=kwargs["binary"])
+ else:
+ logger.info("Using webdriver binary %s" % webdriver_binary)
+
+ if webdriver_binary:
+ kwargs["webdriver_binary"] = webdriver_binary
+ else:
+ logger.info("Unable to find or install geckodriver, skipping wdspec tests")
+ kwargs["test_types"].remove("wdspec")
+
+ if kwargs["prefs_root"] is None:
+ prefs_root = self.browser.install_prefs(kwargs["binary"],
+ self.venv.path,
+ channel=kwargs["browser_channel"])
+ kwargs["prefs_root"] = prefs_root
+
+ if kwargs["headless"] is None and not kwargs["debug_test"]:
+ kwargs["headless"] = True
+ logger.info("Running in headless mode, pass --no-headless to disable")
+
+ # Turn off Firefox WebRTC ICE logging on WPT (turned on by mozrunner)
+ safe_unsetenv('R_LOG_LEVEL')
+ safe_unsetenv('R_LOG_DESTINATION')
+ safe_unsetenv('R_LOG_VERBOSE')
+
+ # Allow WebRTC tests to call getUserMedia.
+ kwargs["extra_prefs"].append("media.navigator.streams.fake=true")
+
+
+class FirefoxAndroid(BrowserSetup):
+ name = "firefox_android"
+ browser_cls = browser.FirefoxAndroid
+
+ def setup_kwargs(self, kwargs):
+ from . import android
+ import mozdevice
+
+ # We don't support multiple channels for android yet
+ if kwargs["browser_channel"] is None:
+ kwargs["browser_channel"] = "nightly"
+
+ if kwargs["prefs_root"] is None:
+ prefs_root = self.browser.install_prefs(kwargs["binary"],
+ self.venv.path,
+ channel=kwargs["browser_channel"])
+ kwargs["prefs_root"] = prefs_root
+
+ if kwargs["package_name"] is None:
+ kwargs["package_name"] = "org.mozilla.geckoview.test_runner"
+ app = kwargs["package_name"]
+
+ if not kwargs["device_serial"]:
+ kwargs["device_serial"] = ["emulator-5554"]
+
+ for device_serial in kwargs["device_serial"]:
+ if device_serial.startswith("emulator-"):
+ # We're running on an emulator so ensure that's set up
+ emulator = android.install(logger,
+ reinstall=False,
+ no_prompt=not self.prompt,
+ device_serial=device_serial)
+ android.start(logger,
+ emulator=emulator,
+ reinstall=False,
+ device_serial=device_serial)
+
+ if "ADB_PATH" not in os.environ:
+ adb_path = os.path.join(android.get_sdk_path(None),
+ "platform-tools",
+ "adb")
+ os.environ["ADB_PATH"] = adb_path
+ adb_path = os.environ["ADB_PATH"]
+
+ for device_serial in kwargs["device_serial"]:
+ device = mozdevice.ADBDeviceFactory(adb=adb_path,
+ device=device_serial)
+
+ if self.browser.apk_path:
+ device.uninstall_app(app)
+ device.install_app(self.browser.apk_path)
+ elif not device.is_app_installed(app):
+ raise WptrunError("app %s not installed on device %s" %
+ (app, device_serial))
+
+
+class Chrome(BrowserSetup):
+ name = "chrome"
+ browser_cls = browser.Chrome # type: ClassVar[Type[browser.ChromeChromiumBase]]
+ experimental_channels = ("dev", "canary", "nightly") # type: ClassVar[Tuple[str, ...]]
+
+ def setup_kwargs(self, kwargs):
+ browser_channel = kwargs["browser_channel"]
+ if kwargs["binary"] is None:
+ binary = self.browser.find_binary(venv_path=self.venv.path, channel=browser_channel)
+ if binary:
+ kwargs["binary"] = binary
+ else:
+ raise WptrunError(f"Unable to locate {self.name.capitalize()} binary")
+
+ if kwargs["mojojs_path"]:
+ kwargs["enable_mojojs"] = True
+ logger.info("--mojojs-path is provided, enabling MojoJS")
+ else:
+ path = self.browser.install_mojojs(dest=self.venv.path,
+ browser_binary=kwargs["binary"])
+ if path:
+ kwargs["mojojs_path"] = path
+ kwargs["enable_mojojs"] = True
+ logger.info(f"MojoJS enabled automatically (mojojs_path: {path})")
+ else:
+ kwargs["enable_mojojs"] = False
+ logger.info("MojoJS is disabled for this run.")
+
+ if kwargs["webdriver_binary"] is None:
+ webdriver_binary = None
+ if not kwargs["install_webdriver"]:
+ webdriver_binary = self.browser.find_webdriver(self.venv.bin_path)
+ if webdriver_binary and not self.browser.webdriver_supports_browser(
+ webdriver_binary, kwargs["binary"], browser_channel):
+ webdriver_binary = None
+
+ if webdriver_binary is None:
+ install = self.prompt_install("chromedriver")
+
+ if install:
+ webdriver_binary = self.browser.install_webdriver(
+ dest=self.venv.bin_path,
+ channel=browser_channel,
+ browser_binary=kwargs["binary"],
+ )
+ else:
+ logger.info("Using webdriver binary %s" % webdriver_binary)
+
+ if webdriver_binary:
+ kwargs["webdriver_binary"] = webdriver_binary
+ else:
+ raise WptrunError("Unable to locate or install matching ChromeDriver binary")
+ if browser_channel in self.experimental_channels:
+ # HACK(Hexcles): work around https://github.com/web-platform-tests/wpt/issues/16448
+ kwargs["webdriver_args"].append("--disable-build-check")
+ if kwargs["enable_experimental"] is None:
+ logger.info(
+ "Automatically turning on experimental features for Chrome Dev/Canary or Chromium trunk")
+ kwargs["enable_experimental"] = True
+ if kwargs["enable_webtransport_h3"] is None:
+ # To start the WebTransport over HTTP/3 test server.
+ kwargs["enable_webtransport_h3"] = True
+ if os.getenv("TASKCLUSTER_ROOT_URL"):
+ # We are on Taskcluster, where our Docker container does not have
+ # enough capabilities to run Chrome with sandboxing. (gh-20133)
+ kwargs["binary_args"].append("--no-sandbox")
+
+
+class ContentShell(BrowserSetup):
+ name = "content_shell"
+ browser_cls = browser.ContentShell
+ experimental_channels = ("dev", "canary", "nightly")
+
+ def setup_kwargs(self, kwargs):
+ browser_channel = kwargs["browser_channel"]
+ if kwargs["binary"] is None:
+ binary = self.browser.find_binary(venv_path=self.venv.path, channel=browser_channel)
+ if binary:
+ kwargs["binary"] = binary
+ else:
+ raise WptrunError(f"Unable to locate {self.name.capitalize()} binary")
+
+ if kwargs["mojojs_path"]:
+ kwargs["enable_mojojs"] = True
+ logger.info("--mojojs-path is provided, enabling MojoJS")
+ elif kwargs["enable_mojojs"]:
+ logger.warning(f"Cannot install MojoJS for {self.name}, "
+ "which does not return version information. "
+ "Provide '--mojojs-path' explicitly instead.")
+ logger.warning("MojoJS is disabled for this run.")
+
+ kwargs["enable_webtransport_h3"] = True
+
+
+class Chromium(Chrome):
+ name = "chromium"
+ browser_cls = browser.Chromium # type: ClassVar[Type[browser.ChromeChromiumBase]]
+ experimental_channels = ("nightly",)
+
+
+class ChromeAndroidBase(BrowserSetup):
+ experimental_channels = ("dev", "canary")
+
+ def setup_kwargs(self, kwargs):
+ if kwargs.get("device_serial"):
+ self.browser.device_serial = kwargs["device_serial"]
+ if kwargs.get("adb_binary"):
+ self.browser.adb_binary = kwargs["adb_binary"]
+ browser_channel = kwargs["browser_channel"]
+ if kwargs["package_name"] is None:
+ kwargs["package_name"] = self.browser.find_binary(
+ channel=browser_channel)
+ if kwargs["webdriver_binary"] is None:
+ webdriver_binary = None
+ if not kwargs["install_webdriver"]:
+ webdriver_binary = self.browser.find_webdriver()
+
+ if webdriver_binary is None:
+ install = self.prompt_install("chromedriver")
+
+ if install:
+ logger.info("Downloading chromedriver")
+ webdriver_binary = self.browser.install_webdriver(
+ dest=self.venv.bin_path,
+ channel=browser_channel,
+ browser_binary=kwargs["package_name"],
+ )
+ else:
+ logger.info("Using webdriver binary %s" % webdriver_binary)
+
+ if webdriver_binary:
+ kwargs["webdriver_binary"] = webdriver_binary
+ else:
+ raise WptrunError("Unable to locate or install chromedriver binary")
+
+
+class ChromeAndroid(ChromeAndroidBase):
+ name = "chrome_android"
+ browser_cls = browser.ChromeAndroid
+
+ def setup_kwargs(self, kwargs):
+ super().setup_kwargs(kwargs)
+ if kwargs["browser_channel"] in self.experimental_channels:
+ # HACK(Hexcles): work around https://github.com/web-platform-tests/wpt/issues/16448
+ kwargs["webdriver_args"].append("--disable-build-check")
+ if kwargs["enable_experimental"] is None:
+ logger.info("Automatically turning on experimental features for Chrome Dev/Canary")
+ kwargs["enable_experimental"] = True
+
+
+class ChromeiOS(BrowserSetup):
+ name = "chrome_ios"
+ browser_cls = browser.ChromeiOS
+
+ def setup_kwargs(self, kwargs):
+ if kwargs["webdriver_binary"] is None:
+ raise WptrunError("Unable to locate or install chromedriver binary")
+
+
+class AndroidWeblayer(ChromeAndroidBase):
+ name = "android_weblayer"
+ browser_cls = browser.AndroidWeblayer
+
+ def setup_kwargs(self, kwargs):
+ super().setup_kwargs(kwargs)
+ if kwargs["browser_channel"] in self.experimental_channels and kwargs["enable_experimental"] is None:
+ logger.info("Automatically turning on experimental features for WebLayer Dev/Canary")
+ kwargs["enable_experimental"] = True
+
+
+class AndroidWebview(ChromeAndroidBase):
+ name = "android_webview"
+ browser_cls = browser.AndroidWebview
+
+
+class Opera(BrowserSetup):
+ name = "opera"
+ browser_cls = browser.Opera
+
+ def setup_kwargs(self, kwargs):
+ if kwargs["webdriver_binary"] is None:
+ webdriver_binary = None
+ if not kwargs["install_webdriver"]:
+ webdriver_binary = self.browser.find_webdriver()
+
+ if webdriver_binary is None:
+ install = self.prompt_install("operadriver")
+
+ if install:
+ logger.info("Downloading operadriver")
+ webdriver_binary = self.browser.install_webdriver(
+ dest=self.venv.bin_path,
+ channel=kwargs["browser_channel"])
+ else:
+ logger.info("Using webdriver binary %s" % webdriver_binary)
+
+ if webdriver_binary:
+ kwargs["webdriver_binary"] = webdriver_binary
+ else:
+ raise WptrunError("Unable to locate or install operadriver binary")
+
+
+class EdgeChromium(BrowserSetup):
+ name = "MicrosoftEdge"
+ browser_cls = browser.EdgeChromium
+
+ def setup_kwargs(self, kwargs):
+ browser_channel = kwargs["browser_channel"]
+ if kwargs["binary"] is None:
+ binary = self.browser.find_binary(channel=browser_channel)
+ if binary:
+ logger.info("Using Edge binary %s" % binary)
+ kwargs["binary"] = binary
+ else:
+ raise WptrunError("Unable to locate Edge binary")
+
+ if kwargs["webdriver_binary"] is None:
+ webdriver_binary = None
+ if not kwargs["install_webdriver"]:
+ webdriver_binary = self.browser.find_webdriver()
+ if (webdriver_binary and not self.browser.webdriver_supports_browser(
+ webdriver_binary, kwargs["binary"])):
+ webdriver_binary = None
+
+ if webdriver_binary is None:
+ install = self.prompt_install("msedgedriver")
+
+ if install:
+ logger.info("Downloading msedgedriver")
+ webdriver_binary = self.browser.install_webdriver(
+ dest=self.venv.bin_path,
+ channel=browser_channel)
+ else:
+ logger.info("Using webdriver binary %s" % webdriver_binary)
+
+ if webdriver_binary:
+ kwargs["webdriver_binary"] = webdriver_binary
+ else:
+ raise WptrunError("Unable to locate or install msedgedriver binary")
+ if browser_channel in ("dev", "canary") and kwargs["enable_experimental"] is None:
+ logger.info("Automatically turning on experimental features for Edge Dev/Canary")
+ kwargs["enable_experimental"] = True
+
+
+class Edge(BrowserSetup):
+ name = "edge"
+ browser_cls = browser.Edge
+
+ def install(self, channel=None):
+ raise NotImplementedError
+
+ def setup_kwargs(self, kwargs):
+ if kwargs["webdriver_binary"] is None:
+ webdriver_binary = self.browser.find_webdriver()
+
+ if webdriver_binary is None:
+ raise WptrunError("""Unable to find WebDriver and we aren't yet clever enough to work out which
+version to download. Please go to the following URL and install the correct
+version for your Edge/Windows release somewhere on the %PATH%:
+
+https://developer.microsoft.com/en-us/microsoft-edge/tools/webdriver/
+""")
+ kwargs["webdriver_binary"] = webdriver_binary
+
+
+class EdgeWebDriver(Edge):
+ name = "edge_webdriver"
+ browser_cls = browser.EdgeWebDriver
+
+
+class InternetExplorer(BrowserSetup):
+ name = "ie"
+ browser_cls = browser.InternetExplorer
+
+ def install(self, channel=None):
+ raise NotImplementedError
+
+ def setup_kwargs(self, kwargs):
+ if kwargs["webdriver_binary"] is None:
+ webdriver_binary = self.browser.find_webdriver()
+
+ if webdriver_binary is None:
+ raise WptrunError("""Unable to find WebDriver and we aren't yet clever enough to work out which
+version to download. Please go to the following URL and install the driver for Internet Explorer
+somewhere on the %PATH%:
+
+https://selenium-release.storage.googleapis.com/index.html
+""")
+ kwargs["webdriver_binary"] = webdriver_binary
+
+
+class Safari(BrowserSetup):
+ name = "safari"
+ browser_cls = browser.Safari
+
+ def install(self, channel=None):
+ raise NotImplementedError
+
+ def setup_kwargs(self, kwargs):
+ if kwargs["webdriver_binary"] is None:
+ webdriver_binary = self.browser.find_webdriver(channel=kwargs["browser_channel"])
+
+ if webdriver_binary is None:
+ raise WptrunError("Unable to locate safaridriver binary")
+
+ kwargs["webdriver_binary"] = webdriver_binary
+
+
+class Sauce(BrowserSetup):
+ name = "sauce"
+ browser_cls = browser.Sauce
+
+ def install(self, channel=None):
+ raise NotImplementedError
+
+ def setup_kwargs(self, kwargs):
+ if kwargs["sauce_browser"] is None:
+ raise WptrunError("Missing required argument --sauce-browser")
+ if kwargs["sauce_version"] is None:
+ raise WptrunError("Missing required argument --sauce-version")
+ kwargs["test_types"] = ["testharness", "reftest"]
+
+
+class Servo(BrowserSetup):
+ name = "servo"
+ browser_cls = browser.Servo
+
+ def install(self, channel=None):
+ if self.prompt_install(self.name):
+ return self.browser.install(self.venv.path)
+
+ def setup_kwargs(self, kwargs):
+ if kwargs["binary"] is None:
+ binary = self.browser.find_binary(self.venv.path, None)
+
+ if binary is None:
+ raise WptrunError("Unable to find servo binary in PATH")
+ kwargs["binary"] = binary
+
+
+class ServoWebDriver(Servo):
+ name = "servodriver"
+ browser_cls = browser.ServoWebDriver
+
+
+class WebKit(BrowserSetup):
+ name = "webkit"
+ browser_cls = browser.WebKit
+
+ def install(self, channel=None):
+ raise NotImplementedError
+
+ def setup_kwargs(self, kwargs):
+ pass
+
+
+class WebKitGTKMiniBrowser(BrowserSetup):
+ name = "webkitgtk_minibrowser"
+ browser_cls = browser.WebKitGTKMiniBrowser
+
+ def install(self, channel=None):
+ if self.prompt_install(self.name):
+ return self.browser.install(self.venv.path, channel, self.prompt)
+
+ def setup_kwargs(self, kwargs):
+ if kwargs["binary"] is None:
+ binary = self.browser.find_binary(
+ venv_path=self.venv.path, channel=kwargs["browser_channel"])
+
+ if binary is None:
+ raise WptrunError("Unable to find MiniBrowser binary")
+ kwargs["binary"] = binary
+
+ if kwargs["webdriver_binary"] is None:
+ webdriver_binary = self.browser.find_webdriver(
+ venv_path=self.venv.path, channel=kwargs["browser_channel"])
+
+ if webdriver_binary is None:
+ raise WptrunError("Unable to find WebKitWebDriver in PATH")
+ kwargs["webdriver_binary"] = webdriver_binary
+
+
+class Epiphany(BrowserSetup):
+ name = "epiphany"
+ browser_cls = browser.Epiphany
+
+ def install(self, channel=None):
+ raise NotImplementedError
+
+ def setup_kwargs(self, kwargs):
+ if kwargs["binary"] is None:
+ binary = self.browser.find_binary()
+
+ if binary is None:
+ raise WptrunError("Unable to find epiphany in PATH")
+ kwargs["binary"] = binary
+
+ if kwargs["webdriver_binary"] is None:
+ webdriver_binary = self.browser.find_webdriver()
+
+ if webdriver_binary is None:
+ raise WptrunError("Unable to find WebKitWebDriver in PATH")
+ kwargs["webdriver_binary"] = webdriver_binary
+
+
+product_setup = {
+ "android_weblayer": AndroidWeblayer,
+ "android_webview": AndroidWebview,
+ "firefox": Firefox,
+ "firefox_android": FirefoxAndroid,
+ "chrome": Chrome,
+ "chrome_android": ChromeAndroid,
+ "chrome_ios": ChromeiOS,
+ "chromium": Chromium,
+ "content_shell": ContentShell,
+ "edgechromium": EdgeChromium,
+ "edge": Edge,
+ "edge_webdriver": EdgeWebDriver,
+ "ie": InternetExplorer,
+ "safari": Safari,
+ "servo": Servo,
+ "servodriver": ServoWebDriver,
+ "sauce": Sauce,
+ "opera": Opera,
+ "webkit": WebKit,
+ "webkitgtk_minibrowser": WebKitGTKMiniBrowser,
+ "epiphany": Epiphany,
+}
+
+
+def setup_logging(kwargs, default_config=None, formatter_defaults=None):
+ import mozlog
+ from wptrunner import wptrunner
+
+ global logger
+
+ # Use the grouped formatter by default where mozlog 3.9+ is installed
+ if default_config is None:
+ if hasattr(mozlog.formatters, "GroupingFormatter"):
+ default_formatter = "grouped"
+ else:
+ default_formatter = "mach"
+ default_config = {default_formatter: sys.stdout}
+ wptrunner.setup_logging(kwargs, default_config, formatter_defaults=formatter_defaults)
+ logger = wptrunner.logger
+ return logger
+
+
+def setup_wptrunner(venv, **kwargs):
+ from wptrunner import wptcommandline
+
+ kwargs = kwargs.copy()
+
+ kwargs["product"] = kwargs["product"].replace("-", "_")
+
+ check_environ(kwargs["product"])
+ args_general(kwargs)
+
+ if kwargs["product"] not in product_setup:
+ raise WptrunError("Unsupported product %s" % kwargs["product"])
+
+ setup_cls = product_setup[kwargs["product"]](venv, kwargs["prompt"])
+ setup_cls.install_requirements()
+
+ affected_revish = kwargs.get("affected")
+ if affected_revish is not None:
+ files_changed, _ = testfiles.files_changed(
+ affected_revish, include_uncommitted=True, include_new=True)
+ # TODO: Perhaps use wptrunner.testloader.ManifestLoader here
+ # and remove the manifest-related code from testfiles.
+ # https://github.com/web-platform-tests/wpt/issues/14421
+ tests_changed, tests_affected = testfiles.affected_testfiles(
+ files_changed, manifest_path=kwargs.get("manifest_path"), manifest_update=kwargs["manifest_update"])
+ test_list = tests_changed | tests_affected
+ logger.info("Identified %s affected tests" % len(test_list))
+ test_list = [os.path.relpath(item, wpt_root) for item in test_list]
+ kwargs["test_list"] += test_list
+ kwargs["default_exclude"] = True
+
+ if kwargs["install_browser"] and not kwargs["channel"]:
+ logger.info("--install-browser is given but --channel is not set, default to nightly channel")
+ kwargs["channel"] = "nightly"
+
+ if kwargs["channel"]:
+ channel = install.get_channel(kwargs["product"], kwargs["channel"])
+ if channel is not None:
+ if channel != kwargs["channel"]:
+ logger.info("Interpreting channel '%s' as '%s'" % (kwargs["channel"],
+ channel))
+ kwargs["browser_channel"] = channel
+ else:
+ logger.info("Valid channels for %s not known; using argument unmodified" %
+ kwargs["product"])
+ kwargs["browser_channel"] = kwargs["channel"]
+
+ if kwargs["install_browser"]:
+ logger.info("Installing browser")
+ kwargs["binary"] = setup_cls.install(channel=channel)
+
+ setup_cls.setup(kwargs)
+
+ # Remove kwargs we handle here
+ wptrunner_kwargs = kwargs.copy()
+ for kwarg in ["affected",
+ "install_browser",
+ "install_webdriver",
+ "channel",
+ "prompt"]:
+ del wptrunner_kwargs[kwarg]
+
+ wptcommandline.check_args(wptrunner_kwargs)
+
+ wptrunner_path = os.path.join(wpt_root, "tools", "wptrunner")
+
+ if not venv.skip_virtualenv_setup:
+ venv.install_requirements(os.path.join(wptrunner_path, "requirements.txt"))
+
+ # Only update browser_version if it was not given as a command line
+ # argument, so that it can be overridden on the command line.
+ if not wptrunner_kwargs["browser_version"]:
+ wptrunner_kwargs["browser_version"] = setup_cls.browser.version(
+ binary=wptrunner_kwargs.get("binary") or wptrunner_kwargs.get("package_name"),
+ webdriver_binary=wptrunner_kwargs.get("webdriver_binary"),
+ )
+
+ return wptrunner_kwargs
+
+
+def run(venv, **kwargs):
+ setup_logging(kwargs)
+
+ wptrunner_kwargs = setup_wptrunner(venv, **kwargs)
+
+ rv = run_single(venv, **wptrunner_kwargs) > 0
+
+ return rv
+
+
+def run_single(venv, **kwargs):
+ from wptrunner import wptrunner
+ return wptrunner.start(**kwargs)
diff --git a/testing/web-platform/tests/tools/wpt/testfiles.py b/testing/web-platform/tests/tools/wpt/testfiles.py
new file mode 100644
index 0000000000..172ad201fc
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/testfiles.py
@@ -0,0 +1,442 @@
+import argparse
+import logging
+import os
+import re
+import subprocess
+import sys
+
+from collections import OrderedDict
+
+try:
+ from ..manifest import manifest
+ from ..manifest.utils import git as get_git_cmd
+except ValueError:
+ # if we're not within the tools package, the above is an import from above
+ # the top-level which raises ValueError, so reimport it with an absolute
+ # reference
+ #
+ # note we need both because depending on caller we may/may not have the
+ # paths set up correctly to handle both and MYPY has no knowledge of our
+ # sys.path magic
+ from manifest import manifest # type: ignore
+ from manifest.utils import git as get_git_cmd # type: ignore
+
+MYPY = False
+if MYPY:
+ # MYPY is set to True when run under Mypy.
+ from typing import Any
+ from typing import Dict
+ from typing import Iterable
+ from typing import List
+ from typing import Optional
+ from typing import Pattern
+ from typing import Sequence
+ from typing import Set
+ from typing import Text
+ from typing import Tuple
+
+DEFAULT_IGNORE_RULERS = ("resources/testharness*", "resources/testdriver*")
+
+here = os.path.dirname(__file__)
+wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
+
+logger = logging.getLogger()
+
+
+def display_branch_point():
+ # type: () -> None
+ print(branch_point())
+
+
+def branch_point():
+ # type: () -> Optional[Text]
+ git = get_git_cmd(wpt_root)
+ if git is None:
+ raise Exception("git not found")
+
+ if (os.environ.get("GITHUB_PULL_REQUEST", "false") == "false" and
+ os.environ.get("GITHUB_BRANCH") == "master"):
+ # For builds on the master branch just return the HEAD commit
+ return git("rev-parse", "HEAD")
+ elif os.environ.get("GITHUB_PULL_REQUEST", "false") != "false":
+ # This is a PR, so the base branch is in GITHUB_BRANCH
+ base_branch = os.environ.get("GITHUB_BRANCH")
+ assert base_branch, "GITHUB_BRANCH environment variable is defined"
+ branch_point = git("merge-base", "HEAD", base_branch) # type: Optional[Text]
+ else:
+ # Otherwise we aren't on a PR, so we try to find commits that are only in the
+ # current branch c.f.
+ # http://stackoverflow.com/questions/13460152/find-first-ancestor-commit-in-another-branch
+
+ # parse HEAD into an object ref
+ head = git("rev-parse", "HEAD")
+
+ # get everything in refs/heads and refs/remotes that doesn't include HEAD
+ not_heads = [item for item in git("rev-parse", "--not", "--branches", "--remotes").split("\n")
+ if item and item != "^%s" % head]
+
+ # get all commits on HEAD but not reachable from anything in not_heads
+ cmd = ["git", "rev-list", "--topo-order", "--parents", "--stdin", "HEAD"]
+ proc = subprocess.Popen(cmd,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ cwd=wpt_root)
+ commits_bytes, _ = proc.communicate(b"\n".join(item.encode("ascii") for item in not_heads))
+ if proc.returncode != 0:
+ raise subprocess.CalledProcessError(proc.returncode,
+ cmd,
+ commits_bytes)
+
+ commit_parents = OrderedDict() # type: Dict[Text, List[Text]]
+ commits = commits_bytes.decode("ascii")
+ if commits:
+ for line in commits.split("\n"):
+ line_commits = line.split(" ")
+ commit_parents[line_commits[0]] = line_commits[1:]
+
+ branch_point = None
+
+ # if there are any commits, take the first parent that is not in commits
+ for commit, parents in commit_parents.items():
+ for parent in parents:
+ if parent not in commit_parents:
+ branch_point = parent
+ break
+
+ if branch_point:
+ break
+
+ # if we had any commits, we should now have a branch point
+ assert branch_point or not commit_parents
+
+ # The above heuristic will fail in the following cases:
+ #
+ # - The current branch has fallen behind the remote version
+ # - Changes on the current branch were rebased and therefore do not exist on any
+ # other branch. This will result in the selection of a commit that is earlier
+ # in the history than desired (as determined by calculating the later of the
+ # branch point and the merge base)
+ #
+ # In either case, fall back to using the merge base as the branch point.
+ merge_base = git("merge-base", "HEAD", "origin/master")
+ if (branch_point is None or
+ (branch_point != merge_base and
+ not git("log", "--oneline", f"{merge_base}..{branch_point}").strip())):
+ logger.debug("Using merge-base as the branch point")
+ branch_point = merge_base
+ else:
+ logger.debug("Using first commit on another branch as the branch point")
+
+ logger.debug("Branch point from master: %s" % branch_point)
+ if branch_point:
+ branch_point = branch_point.strip()
+ return branch_point
+
+
+def compile_ignore_rule(rule):
+ # type: (Text) -> Pattern[Text]
+ rule = rule.replace(os.path.sep, "/")
+ parts = rule.split("/")
+ re_parts = []
+ for part in parts:
+ if part.endswith("**"):
+ re_parts.append(re.escape(part[:-2]) + ".*")
+ elif part.endswith("*"):
+ re_parts.append(re.escape(part[:-1]) + "[^/]*")
+ else:
+ re_parts.append(re.escape(part))
+ return re.compile("^%s$" % "/".join(re_parts))
+
+
+def repo_files_changed(revish, include_uncommitted=False, include_new=False):
+ # type: (Text, bool, bool) -> Set[Text]
+ git = get_git_cmd(wpt_root)
+ if git is None:
+ raise Exception("git not found")
+
+ if "..." in revish:
+ raise Exception(f"... not supported when finding files changed (revish: {revish!r}")
+
+ if ".." in revish:
+ # ".." isn't treated as a range for git-diff; what we want is
+ # everything reachable from B but not A, and git diff A...B
+ # gives us that (via the merge-base)
+ revish = revish.replace("..", "...")
+
+ files_list = git("diff", "--no-renames", "--name-only", "-z", revish).split("\0")
+ assert not files_list[-1], f"final item should be empty, got: {files_list[-1]!r}"
+ files = set(files_list[:-1])
+
+ if include_uncommitted:
+ entries = git("status", "-z").split("\0")
+ assert not entries[-1]
+ entries = entries[:-1]
+ for item in entries:
+ status, path = item.split(" ", 1)
+ if status == "??" and not include_new:
+ continue
+ else:
+ if not os.path.isdir(path):
+ files.add(path)
+ else:
+ for dirpath, dirnames, filenames in os.walk(path):
+ for filename in filenames:
+ files.add(os.path.join(dirpath, filename))
+
+ return files
+
+
+def exclude_ignored(files, ignore_rules):
+ # type: (Iterable[Text], Optional[Sequence[Text]]) -> Tuple[List[Text], List[Text]]
+ if ignore_rules is None:
+ ignore_rules = DEFAULT_IGNORE_RULERS
+ compiled_ignore_rules = [compile_ignore_rule(item) for item in set(ignore_rules)]
+
+ changed = []
+ ignored = []
+ for item in sorted(files):
+ fullpath = os.path.join(wpt_root, item)
+ rule_path = item.replace(os.path.sep, "/")
+ for rule in compiled_ignore_rules:
+ if rule.match(rule_path):
+ ignored.append(fullpath)
+ break
+ else:
+ changed.append(fullpath)
+
+ return changed, ignored
+
+
+def files_changed(revish, # type: Text
+ ignore_rules=None, # type: Optional[Sequence[Text]]
+ include_uncommitted=False, # type: bool
+ include_new=False # type: bool
+ ):
+ # type: (...) -> Tuple[List[Text], List[Text]]
+ """Find files changed in certain revisions.
+
+ The function passes `revish` directly to `git diff`, so `revish` can have a
+ variety of forms; see `git diff --help` for details. Files in the diff that
+ are matched by `ignore_rules` are excluded.
+ """
+ files = repo_files_changed(revish,
+ include_uncommitted=include_uncommitted,
+ include_new=include_new)
+ if not files:
+ return [], []
+
+ return exclude_ignored(files, ignore_rules)
+
+
+def _in_repo_root(full_path):
+ # type: (Text) -> bool
+ rel_path = os.path.relpath(full_path, wpt_root)
+ path_components = rel_path.split(os.sep)
+ return len(path_components) < 2
+
+
+def load_manifest(manifest_path=None, manifest_update=True):
+ # type: (Optional[Text], bool) -> manifest.Manifest
+ if manifest_path is None:
+ manifest_path = os.path.join(wpt_root, "MANIFEST.json")
+ return manifest.load_and_update(wpt_root, manifest_path, "/",
+ update=manifest_update)
+
+
+def affected_testfiles(files_changed, # type: Iterable[Text]
+ skip_dirs=None, # type: Optional[Set[Text]]
+ manifest_path=None, # type: Optional[Text]
+ manifest_update=True # type: bool
+ ):
+ # type: (...) -> Tuple[Set[Text], Set[Text]]
+ """Determine and return list of test files that reference changed files."""
+ if skip_dirs is None:
+ skip_dirs = {"conformance-checkers", "docs", "tools"}
+ affected_testfiles = set()
+ # Exclude files that are in the repo root, because
+ # they are not part of any test.
+ files_changed = [f for f in files_changed if not _in_repo_root(f)]
+ nontests_changed = set(files_changed)
+ wpt_manifest = load_manifest(manifest_path, manifest_update)
+
+ test_types = ["crashtest", "print-reftest", "reftest", "testharness", "wdspec"]
+ support_files = {os.path.join(wpt_root, path)
+ for _, path, _ in wpt_manifest.itertypes("support")}
+ wdspec_test_files = {os.path.join(wpt_root, path)
+ for _, path, _ in wpt_manifest.itertypes("wdspec")}
+ test_files = {os.path.join(wpt_root, path)
+ for _, path, _ in wpt_manifest.itertypes(*test_types)}
+
+ interface_dir = os.path.join(wpt_root, 'interfaces')
+ interfaces_files = {os.path.join(wpt_root, 'interfaces', filename)
+ for filename in os.listdir(interface_dir)}
+
+ interfaces_changed = interfaces_files.intersection(nontests_changed)
+ nontests_changed = nontests_changed.intersection(support_files)
+
+ tests_changed = {item for item in files_changed if item in test_files}
+
+ nontest_changed_paths = set()
+ rewrites = {"/resources/webidl2/lib/webidl2.js": "/resources/WebIDLParser.js"} # type: Dict[Text, Text]
+ for full_path in nontests_changed:
+ rel_path = os.path.relpath(full_path, wpt_root)
+ path_components = rel_path.split(os.sep)
+ top_level_subdir = path_components[0]
+ if top_level_subdir in skip_dirs:
+ continue
+ repo_path = "/" + os.path.relpath(full_path, wpt_root).replace(os.path.sep, "/")
+ if repo_path in rewrites:
+ repo_path = rewrites[repo_path]
+ full_path = os.path.join(wpt_root, repo_path[1:].replace("/", os.path.sep))
+ nontest_changed_paths.add((full_path, repo_path))
+
+ interfaces_changed_names = [os.path.splitext(os.path.basename(interface))[0]
+ for interface in interfaces_changed]
+
+ def affected_by_wdspec(test):
+ # type: (Text) -> bool
+ affected = False
+ if test in wdspec_test_files:
+ for support_full_path, _ in nontest_changed_paths:
+ # parent of support file or of "support" directory
+ parent = os.path.dirname(support_full_path)
+ if os.path.basename(parent) == "support":
+ parent = os.path.dirname(parent)
+ relpath = os.path.relpath(test, parent)
+ if not relpath.startswith(os.pardir):
+ # testfile is in subtree of support file
+ affected = True
+ break
+ return affected
+
+ def affected_by_interfaces(file_contents):
+ # type: (Text) -> bool
+ if len(interfaces_changed_names) > 0:
+ if 'idlharness.js' in file_contents:
+ for interface in interfaces_changed_names:
+ regex = '[\'"]' + interface + '(\\.idl)?[\'"]'
+ if re.search(regex, file_contents):
+ return True
+ return False
+
+ for root, dirs, fnames in os.walk(wpt_root):
+ # Walk top_level_subdir looking for test files containing either the
+ # relative filepath or absolute filepath to the changed files.
+ if root == wpt_root:
+ for dir_name in skip_dirs:
+ dirs.remove(dir_name)
+ for fname in fnames:
+ test_full_path = os.path.join(root, fname)
+ # Skip any file that's not a test file.
+ if test_full_path not in test_files:
+ continue
+ if affected_by_wdspec(test_full_path):
+ affected_testfiles.add(test_full_path)
+ continue
+
+ with open(test_full_path, "rb") as fh:
+ raw_file_contents = fh.read() # type: bytes
+ if raw_file_contents.startswith(b"\xfe\xff"):
+ file_contents = raw_file_contents.decode("utf-16be", "replace") # type: Text
+ elif raw_file_contents.startswith(b"\xff\xfe"):
+ file_contents = raw_file_contents.decode("utf-16le", "replace")
+ else:
+ file_contents = raw_file_contents.decode("utf8", "replace")
+ for full_path, repo_path in nontest_changed_paths:
+ rel_path = os.path.relpath(full_path, root).replace(os.path.sep, "/")
+ if rel_path in file_contents or repo_path in file_contents or affected_by_interfaces(file_contents):
+ affected_testfiles.add(test_full_path)
+ continue
+
+ return tests_changed, affected_testfiles
+
+
+def get_parser():
+ # type: () -> argparse.ArgumentParser
+ parser = argparse.ArgumentParser()
+ parser.add_argument("revish", default=None, help="Commits to consider. Defaults to the "
+ "commits on the current branch", nargs="?")
+ parser.add_argument("--ignore-rule", action="append",
+ help="Override the rules for paths to exclude from lists of changes. "
+ "Rules are paths relative to the test root, with * before a separator "
+ "or the end matching anything other than a path separator and ** in that "
+ "position matching anything. This flag can be used multiple times for "
+ "multiple rules. Specifying this flag overrides the default: " +
+ ", ".join(DEFAULT_IGNORE_RULERS))
+ parser.add_argument("--modified", action="store_true",
+ help="Include files under version control that have been "
+ "modified or staged")
+ parser.add_argument("--new", action="store_true",
+ help="Include files in the worktree that are not in version control")
+ parser.add_argument("--show-type", action="store_true",
+ help="Print the test type along with each affected test")
+ parser.add_argument("--null", action="store_true",
+ help="Separate items with a null byte")
+ return parser
+
+
+def get_parser_affected():
+ # type: () -> argparse.ArgumentParser
+ parser = get_parser()
+ parser.add_argument("--metadata",
+ dest="metadata_root",
+ action="store",
+ default=wpt_root,
+ help="Directory that will contain MANIFEST.json")
+ return parser
+
+
+def get_revish(**kwargs):
+ # type: (**Any) -> Text
+ revish = kwargs.get("revish")
+ if revish is None:
+ revish = "%s..HEAD" % branch_point()
+ return revish.strip()
+
+
+def run_changed_files(**kwargs):
+ # type: (**Any) -> None
+ revish = get_revish(**kwargs)
+ changed, _ = files_changed(revish,
+ kwargs["ignore_rule"],
+ include_uncommitted=kwargs["modified"],
+ include_new=kwargs["new"])
+
+ separator = "\0" if kwargs["null"] else "\n"
+
+ for item in sorted(changed):
+ line = os.path.relpath(item, wpt_root) + separator
+ sys.stdout.write(line)
+
+
+def run_tests_affected(**kwargs):
+ # type: (**Any) -> None
+ revish = get_revish(**kwargs)
+ changed, _ = files_changed(revish,
+ kwargs["ignore_rule"],
+ include_uncommitted=kwargs["modified"],
+ include_new=kwargs["new"])
+ manifest_path = os.path.join(kwargs["metadata_root"], "MANIFEST.json")
+ tests_changed, dependents = affected_testfiles(
+ changed,
+ {"conformance-checkers", "docs", "tools"},
+ manifest_path=manifest_path
+ )
+
+ message = "{path}"
+ if kwargs["show_type"]:
+ wpt_manifest = load_manifest(manifest_path)
+ message = "{path}\t{item_type}"
+
+ message += "\0" if kwargs["null"] else "\n"
+
+ for item in sorted(tests_changed | dependents):
+ results = {
+ "path": os.path.relpath(item, wpt_root)
+ }
+ if kwargs["show_type"]:
+ item_types = {i.item_type for i in wpt_manifest.iterpath(results["path"])}
+ if len(item_types) != 1:
+ item_types = {" ".join(item_types)}
+ results["item_type"] = item_types.pop()
+ sys.stdout.write(message.format(**results))
diff --git a/testing/web-platform/tests/tools/wpt/tests/__init__.py b/testing/web-platform/tests/tools/wpt/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/wpt/tests/latest_mozilla_central.txt b/testing/web-platform/tests/tools/wpt/tests/latest_mozilla_central.txt
new file mode 100644
index 0000000000..7078a36b0c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/latest_mozilla_central.txt
@@ -0,0 +1,20834 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <meta charset="UTF-8">
+ <title>Directory Listing: /pub/firefox/nightly/latest-mozilla-central/</title>
+ </head>
+ <body>
+ <h1>Index of /pub/firefox/nightly/latest-mozilla-central/</h1>
+ <table>
+ <tr>
+ <th>Type</th>
+ <th>Name</th>
+ <th>Size</th>
+ <th>Last Modified</th>
+ </tr>
+
+ <tr>
+ <td>Dir</td>
+ <td><a href="/pub/firefox/nightly/">..</a></td>
+ <td></td>
+ <td></td>
+ </tr>
+
+
+ <tr>
+ <td>Dir</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/mar-tools/">mar-tools/</a></td>
+ <td></td>
+ <td></td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/Firefox%20Installer.en-US.exe">Firefox Installer.en-US.exe</a></td>
+ <td>269K</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/README">README</a></td>
+ <td>82</td>
+ <td>17-Nov-2015 10:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.langpack.xpi">firefox-57.0a1.en-US.langpack.xpi</a></td>
+ <td>424K</td>
+ <td>21-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.awsy.tests.zip">firefox-57.0a1.en-US.linux-i686.awsy.tests.zip</a></td>
+ <td>14K</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.checksums">firefox-57.0a1.en-US.linux-i686.checksums</a></td>
+ <td>8K</td>
+ <td>21-Sep-2017 12:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.checksums.asc">firefox-57.0a1.en-US.linux-i686.checksums.asc</a></td>
+ <td>836</td>
+ <td>21-Sep-2017 12:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.common.tests.zip">firefox-57.0a1.en-US.linux-i686.common.tests.zip</a></td>
+ <td>45M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.complete.mar">firefox-57.0a1.en-US.linux-i686.complete.mar</a></td>
+ <td>47M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.cppunittest.tests.zip">firefox-57.0a1.en-US.linux-i686.cppunittest.tests.zip</a></td>
+ <td>13M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.crashreporter-symbols.zip">firefox-57.0a1.en-US.linux-i686.crashreporter-symbols.zip</a></td>
+ <td>108M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.mochitest.tests.zip">firefox-57.0a1.en-US.linux-i686.mochitest.tests.zip</a></td>
+ <td>73M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.mozinfo.json">firefox-57.0a1.en-US.linux-i686.mozinfo.json</a></td>
+ <td>871</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.reftest.tests.zip">firefox-57.0a1.en-US.linux-i686.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.talos.tests.zip">firefox-57.0a1.en-US.linux-i686.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>21-Sep-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.tar.bz2">firefox-57.0a1.en-US.linux-i686.tar.bz2</a></td>
+ <td>60M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.tar.bz2.asc">firefox-57.0a1.en-US.linux-i686.tar.bz2.asc</a></td>
+ <td>836</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.test_packages.json">firefox-57.0a1.en-US.linux-i686.test_packages.json</a></td>
+ <td>1K</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.txt">firefox-57.0a1.en-US.linux-i686.txt</a></td>
+ <td>99</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.web-platform.tests.tar.gz">firefox-57.0a1.en-US.linux-i686.web-platform.tests.tar.gz</a></td>
+ <td>49M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686.xpcshell.tests.zip">firefox-57.0a1.en-US.linux-i686.xpcshell.tests.zip</a></td>
+ <td>10M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-i686_info.txt">firefox-57.0a1.en-US.linux-i686_info.txt</a></td>
+ <td>23</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.awsy.tests.zip">firefox-57.0a1.en-US.linux-x86_64.awsy.tests.zip</a></td>
+ <td>14K</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.checksums">firefox-57.0a1.en-US.linux-x86_64.checksums</a></td>
+ <td>8K</td>
+ <td>21-Sep-2017 12:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.checksums.asc">firefox-57.0a1.en-US.linux-x86_64.checksums.asc</a></td>
+ <td>836</td>
+ <td>21-Sep-2017 12:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.common.tests.zip">firefox-57.0a1.en-US.linux-x86_64.common.tests.zip</a></td>
+ <td>52M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.complete.mar">firefox-57.0a1.en-US.linux-x86_64.complete.mar</a></td>
+ <td>47M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.cppunittest.tests.zip">firefox-57.0a1.en-US.linux-x86_64.cppunittest.tests.zip</a></td>
+ <td>13M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.crashreporter-symbols.zip">firefox-57.0a1.en-US.linux-x86_64.crashreporter-symbols.zip</a></td>
+ <td>103M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.json">firefox-57.0a1.en-US.linux-x86_64.json</a></td>
+ <td>877</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.mochitest.tests.zip">firefox-57.0a1.en-US.linux-x86_64.mochitest.tests.zip</a></td>
+ <td>73M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.mozinfo.json">firefox-57.0a1.en-US.linux-x86_64.mozinfo.json</a></td>
+ <td>876</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.reftest.tests.zip">firefox-57.0a1.en-US.linux-x86_64.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.talos.tests.zip">firefox-57.0a1.en-US.linux-x86_64.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.tar.bz2">firefox-57.0a1.en-US.linux-x86_64.tar.bz2</a></td>
+ <td>59M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.tar.bz2.asc">firefox-57.0a1.en-US.linux-x86_64.tar.bz2.asc</a></td>
+ <td>836</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.test_packages.json">firefox-57.0a1.en-US.linux-x86_64.test_packages.json</a></td>
+ <td>1K</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.txt">firefox-57.0a1.en-US.linux-x86_64.txt</a></td>
+ <td>99</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.web-platform.tests.tar.gz">firefox-57.0a1.en-US.linux-x86_64.web-platform.tests.tar.gz</a></td>
+ <td>49M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64.xpcshell.tests.zip">firefox-57.0a1.en-US.linux-x86_64.xpcshell.tests.zip</a></td>
+ <td>10M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.linux-x86_64_info.txt">firefox-57.0a1.en-US.linux-x86_64_info.txt</a></td>
+ <td>23</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.awsy.tests.zip">firefox-57.0a1.en-US.mac.awsy.tests.zip</a></td>
+ <td>14K</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.checksums">firefox-57.0a1.en-US.mac.checksums</a></td>
+ <td>7K</td>
+ <td>21-Sep-2017 11:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.checksums.asc">firefox-57.0a1.en-US.mac.checksums.asc</a></td>
+ <td>836</td>
+ <td>21-Sep-2017 11:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.common.tests.zip">firefox-57.0a1.en-US.mac.common.tests.zip</a></td>
+ <td>35M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.complete.mar">firefox-57.0a1.en-US.mac.complete.mar</a></td>
+ <td>46M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.cppunittest.tests.zip">firefox-57.0a1.en-US.mac.cppunittest.tests.zip</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.crashreporter-symbols.zip">firefox-57.0a1.en-US.mac.crashreporter-symbols.zip</a></td>
+ <td>118M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.dmg">firefox-57.0a1.en-US.mac.dmg</a></td>
+ <td>63M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.json">firefox-57.0a1.en-US.mac.json</a></td>
+ <td>1K</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.mochitest.tests.zip">firefox-57.0a1.en-US.mac.mochitest.tests.zip</a></td>
+ <td>72M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.mozinfo.json">firefox-57.0a1.en-US.mac.mozinfo.json</a></td>
+ <td>877</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.reftest.tests.zip">firefox-57.0a1.en-US.mac.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.talos.tests.zip">firefox-57.0a1.en-US.mac.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.test_packages.json">firefox-57.0a1.en-US.mac.test_packages.json</a></td>
+ <td>1K</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.txt">firefox-57.0a1.en-US.mac.txt</a></td>
+ <td>99</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.web-platform.tests.tar.gz">firefox-57.0a1.en-US.mac.web-platform.tests.tar.gz</a></td>
+ <td>49M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac.xpcshell.tests.zip">firefox-57.0a1.en-US.mac.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.mac_info.txt">firefox-57.0a1.en-US.mac_info.txt</a></td>
+ <td>23</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.awsy.tests.zip">firefox-57.0a1.en-US.win32.awsy.tests.zip</a></td>
+ <td>14K</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.checksums">firefox-57.0a1.en-US.win32.checksums</a></td>
+ <td>8K</td>
+ <td>21-Sep-2017 13:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.checksums.asc">firefox-57.0a1.en-US.win32.checksums.asc</a></td>
+ <td>836</td>
+ <td>21-Sep-2017 13:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.common.tests.zip">firefox-57.0a1.en-US.win32.common.tests.zip</a></td>
+ <td>38M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.complete.mar">firefox-57.0a1.en-US.win32.complete.mar</a></td>
+ <td>38M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.cppunittest.tests.zip">firefox-57.0a1.en-US.win32.cppunittest.tests.zip</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.crashreporter-symbols.zip">firefox-57.0a1.en-US.win32.crashreporter-symbols.zip</a></td>
+ <td>39M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.installer-stub.exe">firefox-57.0a1.en-US.win32.installer-stub.exe</a></td>
+ <td>288K</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.installer.exe">firefox-57.0a1.en-US.win32.installer.exe</a></td>
+ <td>36M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.json">firefox-57.0a1.en-US.win32.json</a></td>
+ <td>832</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.mochitest.tests.zip">firefox-57.0a1.en-US.win32.mochitest.tests.zip</a></td>
+ <td>72M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.mozinfo.json">firefox-57.0a1.en-US.win32.mozinfo.json</a></td>
+ <td>844</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.reftest.tests.zip">firefox-57.0a1.en-US.win32.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.talos.tests.zip">firefox-57.0a1.en-US.win32.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.test_packages.json">firefox-57.0a1.en-US.win32.test_packages.json</a></td>
+ <td>1K</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.txt">firefox-57.0a1.en-US.win32.txt</a></td>
+ <td>100</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.web-platform.tests.tar.gz">firefox-57.0a1.en-US.win32.web-platform.tests.tar.gz</a></td>
+ <td>49M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.xpcshell.tests.zip">firefox-57.0a1.en-US.win32.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32.zip">firefox-57.0a1.en-US.win32.zip</a></td>
+ <td>52M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win32_info.txt">firefox-57.0a1.en-US.win32_info.txt</a></td>
+ <td>23</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.awsy.tests.zip">firefox-57.0a1.en-US.win64.awsy.tests.zip</a></td>
+ <td>14K</td>
+ <td>21-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.checksums">firefox-57.0a1.en-US.win64.checksums</a></td>
+ <td>7K</td>
+ <td>21-Sep-2017 13:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.checksums.asc">firefox-57.0a1.en-US.win64.checksums.asc</a></td>
+ <td>836</td>
+ <td>21-Sep-2017 13:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.common.tests.zip">firefox-57.0a1.en-US.win64.common.tests.zip</a></td>
+ <td>38M</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.complete.mar">firefox-57.0a1.en-US.win64.complete.mar</a></td>
+ <td>41M</td>
+ <td>21-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.cppunittest.tests.zip">firefox-57.0a1.en-US.win64.cppunittest.tests.zip</a></td>
+ <td>9M</td>
+ <td>21-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.crashreporter-symbols.zip">firefox-57.0a1.en-US.win64.crashreporter-symbols.zip</a></td>
+ <td>34M</td>
+ <td>21-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.installer.exe">firefox-57.0a1.en-US.win64.installer.exe</a></td>
+ <td>38M</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.json">firefox-57.0a1.en-US.win64.json</a></td>
+ <td>834</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.mochitest.tests.zip">firefox-57.0a1.en-US.win64.mochitest.tests.zip</a></td>
+ <td>72M</td>
+ <td>21-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.mozinfo.json">firefox-57.0a1.en-US.win64.mozinfo.json</a></td>
+ <td>847</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.reftest.tests.zip">firefox-57.0a1.en-US.win64.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>21-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.talos.tests.zip">firefox-57.0a1.en-US.win64.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.test_packages.json">firefox-57.0a1.en-US.win64.test_packages.json</a></td>
+ <td>1K</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.txt">firefox-57.0a1.en-US.win64.txt</a></td>
+ <td>100</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.web-platform.tests.tar.gz">firefox-57.0a1.en-US.win64.web-platform.tests.tar.gz</a></td>
+ <td>49M</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.xpcshell.tests.zip">firefox-57.0a1.en-US.win64.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>21-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64.zip">firefox-57.0a1.en-US.win64.zip</a></td>
+ <td>56M</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-57.0a1.en-US.win64_info.txt">firefox-57.0a1.en-US.win64_info.txt</a></td>
+ <td>23</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.langpack.xpi">firefox-58.0a1.en-US.langpack.xpi</a></td>
+ <td>433K</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.awsy.tests.zip">firefox-58.0a1.en-US.linux-i686.awsy.tests.zip</a></td>
+ <td>16K</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.checksums">firefox-58.0a1.en-US.linux-i686.checksums</a></td>
+ <td>8K</td>
+ <td>13-Nov-2017 00:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.checksums.asc">firefox-58.0a1.en-US.linux-i686.checksums.asc</a></td>
+ <td>836</td>
+ <td>13-Nov-2017 00:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.common.tests.zip">firefox-58.0a1.en-US.linux-i686.common.tests.zip</a></td>
+ <td>47M</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.complete.mar">firefox-58.0a1.en-US.linux-i686.complete.mar</a></td>
+ <td>43M</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.cppunittest.tests.zip">firefox-58.0a1.en-US.linux-i686.cppunittest.tests.zip</a></td>
+ <td>11M</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.crashreporter-symbols.zip">firefox-58.0a1.en-US.linux-i686.crashreporter-symbols.zip</a></td>
+ <td>121M</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.json">firefox-58.0a1.en-US.linux-i686.json</a></td>
+ <td>911</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.mochitest.tests.zip">firefox-58.0a1.en-US.linux-i686.mochitest.tests.zip</a></td>
+ <td>73M</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.mozinfo.json">firefox-58.0a1.en-US.linux-i686.mozinfo.json</a></td>
+ <td>871</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.reftest.tests.zip">firefox-58.0a1.en-US.linux-i686.reftest.tests.zip</a></td>
+ <td>57M</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.talos.tests.zip">firefox-58.0a1.en-US.linux-i686.talos.tests.zip</a></td>
+ <td>17M</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.tar.bz2">firefox-58.0a1.en-US.linux-i686.tar.bz2</a></td>
+ <td>56M</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.tar.bz2.asc">firefox-58.0a1.en-US.linux-i686.tar.bz2.asc</a></td>
+ <td>836</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.test_packages.json">firefox-58.0a1.en-US.linux-i686.test_packages.json</a></td>
+ <td>1K</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.txt">firefox-58.0a1.en-US.linux-i686.txt</a></td>
+ <td>99</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.web-platform.tests.tar.gz">firefox-58.0a1.en-US.linux-i686.web-platform.tests.tar.gz</a></td>
+ <td>46M</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686.xpcshell.tests.zip">firefox-58.0a1.en-US.linux-i686.xpcshell.tests.zip</a></td>
+ <td>10M</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-i686_info.txt">firefox-58.0a1.en-US.linux-i686_info.txt</a></td>
+ <td>23</td>
+ <td>13-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.awsy.tests.zip">firefox-58.0a1.en-US.linux-x86_64.awsy.tests.zip</a></td>
+ <td>16K</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.checksums">firefox-58.0a1.en-US.linux-x86_64.checksums</a></td>
+ <td>8K</td>
+ <td>13-Nov-2017 00:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.checksums.asc">firefox-58.0a1.en-US.linux-x86_64.checksums.asc</a></td>
+ <td>836</td>
+ <td>13-Nov-2017 00:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.common.tests.zip">firefox-58.0a1.en-US.linux-x86_64.common.tests.zip</a></td>
+ <td>55M</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.complete.mar">firefox-58.0a1.en-US.linux-x86_64.complete.mar</a></td>
+ <td>47M</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.cppunittest.tests.zip">firefox-58.0a1.en-US.linux-x86_64.cppunittest.tests.zip</a></td>
+ <td>13M</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.crashreporter-symbols.zip">firefox-58.0a1.en-US.linux-x86_64.crashreporter-symbols.zip</a></td>
+ <td>105M</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.json">firefox-58.0a1.en-US.linux-x86_64.json</a></td>
+ <td>877</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.mochitest.tests.zip">firefox-58.0a1.en-US.linux-x86_64.mochitest.tests.zip</a></td>
+ <td>73M</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.mozinfo.json">firefox-58.0a1.en-US.linux-x86_64.mozinfo.json</a></td>
+ <td>876</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.reftest.tests.zip">firefox-58.0a1.en-US.linux-x86_64.reftest.tests.zip</a></td>
+ <td>57M</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.talos.tests.zip">firefox-58.0a1.en-US.linux-x86_64.talos.tests.zip</a></td>
+ <td>17M</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.tar.bz2">firefox-58.0a1.en-US.linux-x86_64.tar.bz2</a></td>
+ <td>60M</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.tar.bz2.asc">firefox-58.0a1.en-US.linux-x86_64.tar.bz2.asc</a></td>
+ <td>836</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.test_packages.json">firefox-58.0a1.en-US.linux-x86_64.test_packages.json</a></td>
+ <td>1K</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.txt">firefox-58.0a1.en-US.linux-x86_64.txt</a></td>
+ <td>99</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.web-platform.tests.tar.gz">firefox-58.0a1.en-US.linux-x86_64.web-platform.tests.tar.gz</a></td>
+ <td>46M</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64.xpcshell.tests.zip">firefox-58.0a1.en-US.linux-x86_64.xpcshell.tests.zip</a></td>
+ <td>10M</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.linux-x86_64_info.txt">firefox-58.0a1.en-US.linux-x86_64_info.txt</a></td>
+ <td>23</td>
+ <td>13-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.awsy.tests.zip">firefox-58.0a1.en-US.mac.awsy.tests.zip</a></td>
+ <td>16K</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.checksums">firefox-58.0a1.en-US.mac.checksums</a></td>
+ <td>7K</td>
+ <td>12-Nov-2017 23:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.checksums.asc">firefox-58.0a1.en-US.mac.checksums.asc</a></td>
+ <td>836</td>
+ <td>12-Nov-2017 23:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.common.tests.zip">firefox-58.0a1.en-US.mac.common.tests.zip</a></td>
+ <td>36M</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.complete.mar">firefox-58.0a1.en-US.mac.complete.mar</a></td>
+ <td>47M</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.cppunittest.tests.zip">firefox-58.0a1.en-US.mac.cppunittest.tests.zip</a></td>
+ <td>8M</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.crashreporter-symbols.zip">firefox-58.0a1.en-US.mac.crashreporter-symbols.zip</a></td>
+ <td>118M</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.dmg">firefox-58.0a1.en-US.mac.dmg</a></td>
+ <td>63M</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.json">firefox-58.0a1.en-US.mac.json</a></td>
+ <td>1K</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.mochitest.tests.zip">firefox-58.0a1.en-US.mac.mochitest.tests.zip</a></td>
+ <td>72M</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.mozinfo.json">firefox-58.0a1.en-US.mac.mozinfo.json</a></td>
+ <td>877</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.reftest.tests.zip">firefox-58.0a1.en-US.mac.reftest.tests.zip</a></td>
+ <td>57M</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.talos.tests.zip">firefox-58.0a1.en-US.mac.talos.tests.zip</a></td>
+ <td>17M</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.test_packages.json">firefox-58.0a1.en-US.mac.test_packages.json</a></td>
+ <td>1K</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.txt">firefox-58.0a1.en-US.mac.txt</a></td>
+ <td>99</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.web-platform.tests.tar.gz">firefox-58.0a1.en-US.mac.web-platform.tests.tar.gz</a></td>
+ <td>46M</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac.xpcshell.tests.zip">firefox-58.0a1.en-US.mac.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.mac_info.txt">firefox-58.0a1.en-US.mac_info.txt</a></td>
+ <td>23</td>
+ <td>12-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.awsy.tests.zip">firefox-58.0a1.en-US.win32.awsy.tests.zip</a></td>
+ <td>16K</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.checksums">firefox-58.0a1.en-US.win32.checksums</a></td>
+ <td>8K</td>
+ <td>13-Nov-2017 00:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.checksums.asc">firefox-58.0a1.en-US.win32.checksums.asc</a></td>
+ <td>836</td>
+ <td>13-Nov-2017 00:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.common.tests.zip">firefox-58.0a1.en-US.win32.common.tests.zip</a></td>
+ <td>38M</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.complete.mar">firefox-58.0a1.en-US.win32.complete.mar</a></td>
+ <td>39M</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.cppunittest.tests.zip">firefox-58.0a1.en-US.win32.cppunittest.tests.zip</a></td>
+ <td>8M</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.crashreporter-symbols.zip">firefox-58.0a1.en-US.win32.crashreporter-symbols.zip</a></td>
+ <td>39M</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.installer-stub.exe">firefox-58.0a1.en-US.win32.installer-stub.exe</a></td>
+ <td>269K</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.installer.exe">firefox-58.0a1.en-US.win32.installer.exe</a></td>
+ <td>37M</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.json">firefox-58.0a1.en-US.win32.json</a></td>
+ <td>846</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.mochitest.tests.zip">firefox-58.0a1.en-US.win32.mochitest.tests.zip</a></td>
+ <td>72M</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.mozinfo.json">firefox-58.0a1.en-US.win32.mozinfo.json</a></td>
+ <td>844</td>
+ <td>13-Nov-2017 00:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.reftest.tests.zip">firefox-58.0a1.en-US.win32.reftest.tests.zip</a></td>
+ <td>57M</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.talos.tests.zip">firefox-58.0a1.en-US.win32.talos.tests.zip</a></td>
+ <td>17M</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.test_packages.json">firefox-58.0a1.en-US.win32.test_packages.json</a></td>
+ <td>1K</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.txt">firefox-58.0a1.en-US.win32.txt</a></td>
+ <td>100</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.web-platform.tests.tar.gz">firefox-58.0a1.en-US.win32.web-platform.tests.tar.gz</a></td>
+ <td>46M</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.xpcshell.tests.zip">firefox-58.0a1.en-US.win32.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32.zip">firefox-58.0a1.en-US.win32.zip</a></td>
+ <td>54M</td>
+ <td>13-Nov-2017 00:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win32_info.txt">firefox-58.0a1.en-US.win32_info.txt</a></td>
+ <td>23</td>
+ <td>13-Nov-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.awsy.tests.zip">firefox-58.0a1.en-US.win64.awsy.tests.zip</a></td>
+ <td>16K</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.checksums">firefox-58.0a1.en-US.win64.checksums</a></td>
+ <td>7K</td>
+ <td>13-Nov-2017 00:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.checksums.asc">firefox-58.0a1.en-US.win64.checksums.asc</a></td>
+ <td>836</td>
+ <td>13-Nov-2017 00:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.common.tests.zip">firefox-58.0a1.en-US.win64.common.tests.zip</a></td>
+ <td>38M</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.complete.mar">firefox-58.0a1.en-US.win64.complete.mar</a></td>
+ <td>42M</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.cppunittest.tests.zip">firefox-58.0a1.en-US.win64.cppunittest.tests.zip</a></td>
+ <td>9M</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.crashreporter-symbols.zip">firefox-58.0a1.en-US.win64.crashreporter-symbols.zip</a></td>
+ <td>34M</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.installer.exe">firefox-58.0a1.en-US.win64.installer.exe</a></td>
+ <td>39M</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.json">firefox-58.0a1.en-US.win64.json</a></td>
+ <td>856</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.mochitest.tests.zip">firefox-58.0a1.en-US.win64.mochitest.tests.zip</a></td>
+ <td>72M</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.mozinfo.json">firefox-58.0a1.en-US.win64.mozinfo.json</a></td>
+ <td>847</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.reftest.tests.zip">firefox-58.0a1.en-US.win64.reftest.tests.zip</a></td>
+ <td>57M</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.talos.tests.zip">firefox-58.0a1.en-US.win64.talos.tests.zip</a></td>
+ <td>17M</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.test_packages.json">firefox-58.0a1.en-US.win64.test_packages.json</a></td>
+ <td>1K</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.txt">firefox-58.0a1.en-US.win64.txt</a></td>
+ <td>100</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.web-platform.tests.tar.gz">firefox-58.0a1.en-US.win64.web-platform.tests.tar.gz</a></td>
+ <td>46M</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.xpcshell.tests.zip">firefox-58.0a1.en-US.win64.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64.zip">firefox-58.0a1.en-US.win64.zip</a></td>
+ <td>58M</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-58.0a1.en-US.win64_info.txt">firefox-58.0a1.en-US.win64_info.txt</a></td>
+ <td>23</td>
+ <td>13-Nov-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.langpack.xpi">firefox-59.0a1.en-US.langpack.xpi</a></td>
+ <td>431K</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.awsy.tests.zip">firefox-59.0a1.en-US.linux-i686.awsy.tests.zip</a></td>
+ <td>15K</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.checksums">firefox-59.0a1.en-US.linux-i686.checksums</a></td>
+ <td>8K</td>
+ <td>22-Jan-2018 12:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.checksums.asc">firefox-59.0a1.en-US.linux-i686.checksums.asc</a></td>
+ <td>836</td>
+ <td>22-Jan-2018 12:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.common.tests.zip">firefox-59.0a1.en-US.linux-i686.common.tests.zip</a></td>
+ <td>54M</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.complete.mar">firefox-59.0a1.en-US.linux-i686.complete.mar</a></td>
+ <td>44M</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.cppunittest.tests.zip">firefox-59.0a1.en-US.linux-i686.cppunittest.tests.zip</a></td>
+ <td>11M</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.crashreporter-symbols.zip">firefox-59.0a1.en-US.linux-i686.crashreporter-symbols.zip</a></td>
+ <td>104M</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.json">firefox-59.0a1.en-US.linux-i686.json</a></td>
+ <td>866</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.mochitest.tests.zip">firefox-59.0a1.en-US.linux-i686.mochitest.tests.zip</a></td>
+ <td>74M</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.mozinfo.json">firefox-59.0a1.en-US.linux-i686.mozinfo.json</a></td>
+ <td>897</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.reftest.tests.zip">firefox-59.0a1.en-US.linux-i686.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.talos.tests.zip">firefox-59.0a1.en-US.linux-i686.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.tar.bz2">firefox-59.0a1.en-US.linux-i686.tar.bz2</a></td>
+ <td>56M</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.tar.bz2.asc">firefox-59.0a1.en-US.linux-i686.tar.bz2.asc</a></td>
+ <td>836</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.test_packages.json">firefox-59.0a1.en-US.linux-i686.test_packages.json</a></td>
+ <td>1K</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.txt">firefox-59.0a1.en-US.linux-i686.txt</a></td>
+ <td>99</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.web-platform.tests.tar.gz">firefox-59.0a1.en-US.linux-i686.web-platform.tests.tar.gz</a></td>
+ <td>47M</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686.xpcshell.tests.zip">firefox-59.0a1.en-US.linux-i686.xpcshell.tests.zip</a></td>
+ <td>10M</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-i686_info.txt">firefox-59.0a1.en-US.linux-i686_info.txt</a></td>
+ <td>23</td>
+ <td>22-Jan-2018 11:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.awsy.tests.zip">firefox-59.0a1.en-US.linux-x86_64.awsy.tests.zip</a></td>
+ <td>15K</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.checksums">firefox-59.0a1.en-US.linux-x86_64.checksums</a></td>
+ <td>8K</td>
+ <td>22-Jan-2018 11:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.checksums.asc">firefox-59.0a1.en-US.linux-x86_64.checksums.asc</a></td>
+ <td>836</td>
+ <td>22-Jan-2018 11:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.common.tests.zip">firefox-59.0a1.en-US.linux-x86_64.common.tests.zip</a></td>
+ <td>55M</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.complete.mar">firefox-59.0a1.en-US.linux-x86_64.complete.mar</a></td>
+ <td>48M</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.cppunittest.tests.zip">firefox-59.0a1.en-US.linux-x86_64.cppunittest.tests.zip</a></td>
+ <td>13M</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.crashreporter-symbols.zip">firefox-59.0a1.en-US.linux-x86_64.crashreporter-symbols.zip</a></td>
+ <td>91M</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.json">firefox-59.0a1.en-US.linux-x86_64.json</a></td>
+ <td>832</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.mochitest.tests.zip">firefox-59.0a1.en-US.linux-x86_64.mochitest.tests.zip</a></td>
+ <td>74M</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.mozinfo.json">firefox-59.0a1.en-US.linux-x86_64.mozinfo.json</a></td>
+ <td>902</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.reftest.tests.zip">firefox-59.0a1.en-US.linux-x86_64.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.talos.tests.zip">firefox-59.0a1.en-US.linux-x86_64.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.tar.bz2">firefox-59.0a1.en-US.linux-x86_64.tar.bz2</a></td>
+ <td>60M</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.tar.bz2.asc">firefox-59.0a1.en-US.linux-x86_64.tar.bz2.asc</a></td>
+ <td>836</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.test_packages.json">firefox-59.0a1.en-US.linux-x86_64.test_packages.json</a></td>
+ <td>1K</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.txt">firefox-59.0a1.en-US.linux-x86_64.txt</a></td>
+ <td>99</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.web-platform.tests.tar.gz">firefox-59.0a1.en-US.linux-x86_64.web-platform.tests.tar.gz</a></td>
+ <td>47M</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64.xpcshell.tests.zip">firefox-59.0a1.en-US.linux-x86_64.xpcshell.tests.zip</a></td>
+ <td>10M</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.linux-x86_64_info.txt">firefox-59.0a1.en-US.linux-x86_64_info.txt</a></td>
+ <td>23</td>
+ <td>22-Jan-2018 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.awsy.tests.zip">firefox-59.0a1.en-US.mac.awsy.tests.zip</a></td>
+ <td>15K</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.checksums">firefox-59.0a1.en-US.mac.checksums</a></td>
+ <td>7K</td>
+ <td>22-Jan-2018 11:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.checksums.asc">firefox-59.0a1.en-US.mac.checksums.asc</a></td>
+ <td>836</td>
+ <td>22-Jan-2018 11:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.common.tests.zip">firefox-59.0a1.en-US.mac.common.tests.zip</a></td>
+ <td>34M</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.complete.mar">firefox-59.0a1.en-US.mac.complete.mar</a></td>
+ <td>47M</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.cppunittest.tests.zip">firefox-59.0a1.en-US.mac.cppunittest.tests.zip</a></td>
+ <td>9M</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.crashreporter-symbols.zip">firefox-59.0a1.en-US.mac.crashreporter-symbols.zip</a></td>
+ <td>102M</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.dmg">firefox-59.0a1.en-US.mac.dmg</a></td>
+ <td>64M</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.json">firefox-59.0a1.en-US.mac.json</a></td>
+ <td>1K</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.mochitest.tests.zip">firefox-59.0a1.en-US.mac.mochitest.tests.zip</a></td>
+ <td>74M</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.mozinfo.json">firefox-59.0a1.en-US.mac.mozinfo.json</a></td>
+ <td>903</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.reftest.tests.zip">firefox-59.0a1.en-US.mac.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.talos.tests.zip">firefox-59.0a1.en-US.mac.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.test_packages.json">firefox-59.0a1.en-US.mac.test_packages.json</a></td>
+ <td>1K</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.txt">firefox-59.0a1.en-US.mac.txt</a></td>
+ <td>99</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.web-platform.tests.tar.gz">firefox-59.0a1.en-US.mac.web-platform.tests.tar.gz</a></td>
+ <td>47M</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac.xpcshell.tests.zip">firefox-59.0a1.en-US.mac.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.mac_info.txt">firefox-59.0a1.en-US.mac_info.txt</a></td>
+ <td>23</td>
+ <td>22-Jan-2018 11:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.awsy.tests.zip">firefox-59.0a1.en-US.win32.awsy.tests.zip</a></td>
+ <td>15K</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.checksums">firefox-59.0a1.en-US.win32.checksums</a></td>
+ <td>8K</td>
+ <td>22-Jan-2018 12:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.checksums.asc">firefox-59.0a1.en-US.win32.checksums.asc</a></td>
+ <td>836</td>
+ <td>22-Jan-2018 12:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.common.tests.zip">firefox-59.0a1.en-US.win32.common.tests.zip</a></td>
+ <td>36M</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.complete.mar">firefox-59.0a1.en-US.win32.complete.mar</a></td>
+ <td>40M</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.cppunittest.tests.zip">firefox-59.0a1.en-US.win32.cppunittest.tests.zip</a></td>
+ <td>8M</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.crashreporter-symbols.zip">firefox-59.0a1.en-US.win32.crashreporter-symbols.zip</a></td>
+ <td>32M</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.installer-stub.exe">firefox-59.0a1.en-US.win32.installer-stub.exe</a></td>
+ <td>269K</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.installer.exe">firefox-59.0a1.en-US.win32.installer.exe</a></td>
+ <td>37M</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.json">firefox-59.0a1.en-US.win32.json</a></td>
+ <td>846</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.mochitest.tests.zip">firefox-59.0a1.en-US.win32.mochitest.tests.zip</a></td>
+ <td>74M</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.mozinfo.json">firefox-59.0a1.en-US.win32.mozinfo.json</a></td>
+ <td>870</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.reftest.tests.zip">firefox-59.0a1.en-US.win32.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.talos.tests.zip">firefox-59.0a1.en-US.win32.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.test_packages.json">firefox-59.0a1.en-US.win32.test_packages.json</a></td>
+ <td>1K</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.txt">firefox-59.0a1.en-US.win32.txt</a></td>
+ <td>99</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.web-platform.tests.tar.gz">firefox-59.0a1.en-US.win32.web-platform.tests.tar.gz</a></td>
+ <td>47M</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.xpcshell.tests.zip">firefox-59.0a1.en-US.win32.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32.zip">firefox-59.0a1.en-US.win32.zip</a></td>
+ <td>55M</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win32_info.txt">firefox-59.0a1.en-US.win32_info.txt</a></td>
+ <td>23</td>
+ <td>22-Jan-2018 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.awsy.tests.zip">firefox-59.0a1.en-US.win64.awsy.tests.zip</a></td>
+ <td>15K</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.checksums">firefox-59.0a1.en-US.win64.checksums</a></td>
+ <td>7K</td>
+ <td>22-Jan-2018 12:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.checksums.asc">firefox-59.0a1.en-US.win64.checksums.asc</a></td>
+ <td>836</td>
+ <td>22-Jan-2018 12:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.common.tests.zip">firefox-59.0a1.en-US.win64.common.tests.zip</a></td>
+ <td>37M</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.complete.mar">firefox-59.0a1.en-US.win64.complete.mar</a></td>
+ <td>43M</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.cppunittest.tests.zip">firefox-59.0a1.en-US.win64.cppunittest.tests.zip</a></td>
+ <td>9M</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.crashreporter-symbols.zip">firefox-59.0a1.en-US.win64.crashreporter-symbols.zip</a></td>
+ <td>27M</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.installer.exe">firefox-59.0a1.en-US.win64.installer.exe</a></td>
+ <td>40M</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.json">firefox-59.0a1.en-US.win64.json</a></td>
+ <td>856</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.mochitest.tests.zip">firefox-59.0a1.en-US.win64.mochitest.tests.zip</a></td>
+ <td>74M</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.mozinfo.json">firefox-59.0a1.en-US.win64.mozinfo.json</a></td>
+ <td>873</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.reftest.tests.zip">firefox-59.0a1.en-US.win64.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.talos.tests.zip">firefox-59.0a1.en-US.win64.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.test_packages.json">firefox-59.0a1.en-US.win64.test_packages.json</a></td>
+ <td>1K</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.txt">firefox-59.0a1.en-US.win64.txt</a></td>
+ <td>99</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.web-platform.tests.tar.gz">firefox-59.0a1.en-US.win64.web-platform.tests.tar.gz</a></td>
+ <td>47M</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.xpcshell.tests.zip">firefox-59.0a1.en-US.win64.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64.zip">firefox-59.0a1.en-US.win64.zip</a></td>
+ <td>59M</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-59.0a1.en-US.win64_info.txt">firefox-59.0a1.en-US.win64_info.txt</a></td>
+ <td>23</td>
+ <td>22-Jan-2018 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.langpack.xpi">firefox-60.0a1.en-US.langpack.xpi</a></td>
+ <td>434K</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.awsy.tests.zip">firefox-60.0a1.en-US.linux-i686.awsy.tests.zip</a></td>
+ <td>15K</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.checksums">firefox-60.0a1.en-US.linux-i686.checksums</a></td>
+ <td>8K</td>
+ <td>15-Feb-2018 12:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.checksums.asc">firefox-60.0a1.en-US.linux-i686.checksums.asc</a></td>
+ <td>836</td>
+ <td>15-Feb-2018 12:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.common.tests.zip">firefox-60.0a1.en-US.linux-i686.common.tests.zip</a></td>
+ <td>53M</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.complete.mar">firefox-60.0a1.en-US.linux-i686.complete.mar</a></td>
+ <td>44M</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.cppunittest.tests.zip">firefox-60.0a1.en-US.linux-i686.cppunittest.tests.zip</a></td>
+ <td>11M</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.crashreporter-symbols.zip">firefox-60.0a1.en-US.linux-i686.crashreporter-symbols.zip</a></td>
+ <td>104M</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.json">firefox-60.0a1.en-US.linux-i686.json</a></td>
+ <td>850</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.mochitest.tests.zip">firefox-60.0a1.en-US.linux-i686.mochitest.tests.zip</a></td>
+ <td>75M</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.mozinfo.json">firefox-60.0a1.en-US.linux-i686.mozinfo.json</a></td>
+ <td>897</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.reftest.tests.zip">firefox-60.0a1.en-US.linux-i686.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.talos.tests.zip">firefox-60.0a1.en-US.linux-i686.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.tar.bz2">firefox-60.0a1.en-US.linux-i686.tar.bz2</a></td>
+ <td>56M</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.tar.bz2.asc">firefox-60.0a1.en-US.linux-i686.tar.bz2.asc</a></td>
+ <td>836</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.test_packages.json">firefox-60.0a1.en-US.linux-i686.test_packages.json</a></td>
+ <td>1K</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.txt">firefox-60.0a1.en-US.linux-i686.txt</a></td>
+ <td>99</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.web-platform.tests.tar.gz">firefox-60.0a1.en-US.linux-i686.web-platform.tests.tar.gz</a></td>
+ <td>48M</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686.xpcshell.tests.zip">firefox-60.0a1.en-US.linux-i686.xpcshell.tests.zip</a></td>
+ <td>10M</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-i686_info.txt">firefox-60.0a1.en-US.linux-i686_info.txt</a></td>
+ <td>23</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.awsy.tests.zip">firefox-60.0a1.en-US.linux-x86_64.awsy.tests.zip</a></td>
+ <td>15K</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.checksums">firefox-60.0a1.en-US.linux-x86_64.checksums</a></td>
+ <td>8K</td>
+ <td>15-Feb-2018 12:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.checksums.asc">firefox-60.0a1.en-US.linux-x86_64.checksums.asc</a></td>
+ <td>836</td>
+ <td>15-Feb-2018 12:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.common.tests.zip">firefox-60.0a1.en-US.linux-x86_64.common.tests.zip</a></td>
+ <td>54M</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.complete.mar">firefox-60.0a1.en-US.linux-x86_64.complete.mar</a></td>
+ <td>48M</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.cppunittest.tests.zip">firefox-60.0a1.en-US.linux-x86_64.cppunittest.tests.zip</a></td>
+ <td>13M</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.crashreporter-symbols.zip">firefox-60.0a1.en-US.linux-x86_64.crashreporter-symbols.zip</a></td>
+ <td>91M</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.json">firefox-60.0a1.en-US.linux-x86_64.json</a></td>
+ <td>816</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.mochitest.tests.zip">firefox-60.0a1.en-US.linux-x86_64.mochitest.tests.zip</a></td>
+ <td>75M</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.mozinfo.json">firefox-60.0a1.en-US.linux-x86_64.mozinfo.json</a></td>
+ <td>902</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.reftest.tests.zip">firefox-60.0a1.en-US.linux-x86_64.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.talos.tests.zip">firefox-60.0a1.en-US.linux-x86_64.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.tar.bz2">firefox-60.0a1.en-US.linux-x86_64.tar.bz2</a></td>
+ <td>60M</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.tar.bz2.asc">firefox-60.0a1.en-US.linux-x86_64.tar.bz2.asc</a></td>
+ <td>836</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.test_packages.json">firefox-60.0a1.en-US.linux-x86_64.test_packages.json</a></td>
+ <td>1K</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.txt">firefox-60.0a1.en-US.linux-x86_64.txt</a></td>
+ <td>99</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.web-platform.tests.tar.gz">firefox-60.0a1.en-US.linux-x86_64.web-platform.tests.tar.gz</a></td>
+ <td>48M</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64.xpcshell.tests.zip">firefox-60.0a1.en-US.linux-x86_64.xpcshell.tests.zip</a></td>
+ <td>10M</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.linux-x86_64_info.txt">firefox-60.0a1.en-US.linux-x86_64_info.txt</a></td>
+ <td>23</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.awsy.tests.zip">firefox-60.0a1.en-US.mac.awsy.tests.zip</a></td>
+ <td>15K</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.checksums">firefox-60.0a1.en-US.mac.checksums</a></td>
+ <td>7K</td>
+ <td>15-Feb-2018 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.checksums.asc">firefox-60.0a1.en-US.mac.checksums.asc</a></td>
+ <td>836</td>
+ <td>15-Feb-2018 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.common.tests.zip">firefox-60.0a1.en-US.mac.common.tests.zip</a></td>
+ <td>34M</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.complete.mar">firefox-60.0a1.en-US.mac.complete.mar</a></td>
+ <td>48M</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.cppunittest.tests.zip">firefox-60.0a1.en-US.mac.cppunittest.tests.zip</a></td>
+ <td>9M</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.crashreporter-symbols.zip">firefox-60.0a1.en-US.mac.crashreporter-symbols.zip</a></td>
+ <td>117M</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.dmg">firefox-60.0a1.en-US.mac.dmg</a></td>
+ <td>65M</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.json">firefox-60.0a1.en-US.mac.json</a></td>
+ <td>1K</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.mochitest.tests.zip">firefox-60.0a1.en-US.mac.mochitest.tests.zip</a></td>
+ <td>74M</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.mozinfo.json">firefox-60.0a1.en-US.mac.mozinfo.json</a></td>
+ <td>903</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.reftest.tests.zip">firefox-60.0a1.en-US.mac.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.talos.tests.zip">firefox-60.0a1.en-US.mac.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.test_packages.json">firefox-60.0a1.en-US.mac.test_packages.json</a></td>
+ <td>1K</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.txt">firefox-60.0a1.en-US.mac.txt</a></td>
+ <td>99</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.web-platform.tests.tar.gz">firefox-60.0a1.en-US.mac.web-platform.tests.tar.gz</a></td>
+ <td>48M</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac.xpcshell.tests.zip">firefox-60.0a1.en-US.mac.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.mac_info.txt">firefox-60.0a1.en-US.mac_info.txt</a></td>
+ <td>23</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.awsy.tests.zip">firefox-60.0a1.en-US.win32.awsy.tests.zip</a></td>
+ <td>15K</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.checksums">firefox-60.0a1.en-US.win32.checksums</a></td>
+ <td>8K</td>
+ <td>15-Feb-2018 13:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.checksums.asc">firefox-60.0a1.en-US.win32.checksums.asc</a></td>
+ <td>836</td>
+ <td>15-Feb-2018 13:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.common.tests.zip">firefox-60.0a1.en-US.win32.common.tests.zip</a></td>
+ <td>36M</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.complete.mar">firefox-60.0a1.en-US.win32.complete.mar</a></td>
+ <td>40M</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.cppunittest.tests.zip">firefox-60.0a1.en-US.win32.cppunittest.tests.zip</a></td>
+ <td>8M</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.crashreporter-symbols.zip">firefox-60.0a1.en-US.win32.crashreporter-symbols.zip</a></td>
+ <td>32M</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.installer-stub.exe">firefox-60.0a1.en-US.win32.installer-stub.exe</a></td>
+ <td>269K</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.installer.exe">firefox-60.0a1.en-US.win32.installer.exe</a></td>
+ <td>37M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.json">firefox-60.0a1.en-US.win32.json</a></td>
+ <td>830</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.mochitest.tests.zip">firefox-60.0a1.en-US.win32.mochitest.tests.zip</a></td>
+ <td>74M</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.mozinfo.json">firefox-60.0a1.en-US.win32.mozinfo.json</a></td>
+ <td>870</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.reftest.tests.zip">firefox-60.0a1.en-US.win32.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.talos.tests.zip">firefox-60.0a1.en-US.win32.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.test_packages.json">firefox-60.0a1.en-US.win32.test_packages.json</a></td>
+ <td>1K</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.txt">firefox-60.0a1.en-US.win32.txt</a></td>
+ <td>99</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.web-platform.tests.tar.gz">firefox-60.0a1.en-US.win32.web-platform.tests.tar.gz</a></td>
+ <td>48M</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.xpcshell.tests.zip">firefox-60.0a1.en-US.win32.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32.zip">firefox-60.0a1.en-US.win32.zip</a></td>
+ <td>55M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win32_info.txt">firefox-60.0a1.en-US.win32_info.txt</a></td>
+ <td>23</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.awsy.tests.zip">firefox-60.0a1.en-US.win64.awsy.tests.zip</a></td>
+ <td>15K</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.checksums">firefox-60.0a1.en-US.win64.checksums</a></td>
+ <td>7K</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.checksums.asc">firefox-60.0a1.en-US.win64.checksums.asc</a></td>
+ <td>836</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.common.tests.zip">firefox-60.0a1.en-US.win64.common.tests.zip</a></td>
+ <td>37M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.complete.mar">firefox-60.0a1.en-US.win64.complete.mar</a></td>
+ <td>43M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.cppunittest.tests.zip">firefox-60.0a1.en-US.win64.cppunittest.tests.zip</a></td>
+ <td>9M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.crashreporter-symbols.zip">firefox-60.0a1.en-US.win64.crashreporter-symbols.zip</a></td>
+ <td>28M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.installer.exe">firefox-60.0a1.en-US.win64.installer.exe</a></td>
+ <td>40M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.json">firefox-60.0a1.en-US.win64.json</a></td>
+ <td>840</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.mochitest.tests.zip">firefox-60.0a1.en-US.win64.mochitest.tests.zip</a></td>
+ <td>74M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.mozinfo.json">firefox-60.0a1.en-US.win64.mozinfo.json</a></td>
+ <td>873</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.reftest.tests.zip">firefox-60.0a1.en-US.win64.reftest.tests.zip</a></td>
+ <td>58M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.talos.tests.zip">firefox-60.0a1.en-US.win64.talos.tests.zip</a></td>
+ <td>13M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.test_packages.json">firefox-60.0a1.en-US.win64.test_packages.json</a></td>
+ <td>1K</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.txt">firefox-60.0a1.en-US.win64.txt</a></td>
+ <td>99</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.web-platform.tests.tar.gz">firefox-60.0a1.en-US.win64.web-platform.tests.tar.gz</a></td>
+ <td>48M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.xpcshell.tests.zip">firefox-60.0a1.en-US.win64.xpcshell.tests.zip</a></td>
+ <td>9M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64.zip">firefox-60.0a1.en-US.win64.zip</a></td>
+ <td>59M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-60.0a1.en-US.win64_info.txt">firefox-60.0a1.en-US.win64_info.txt</a></td>
+ <td>23</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-i686-en-US-20170917100334-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-linux-i686-en-US-20170917100334-20170920220431.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 00:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-i686-en-US-20170917220255-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-linux-i686-en-US-20170917220255-20170920220431.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 00:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-i686-en-US-20170917220255-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-linux-i686-en-US-20170917220255-20170921100141.partial.mar</a></td>
+ <td>9M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-i686-en-US-20170918100059-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-linux-i686-en-US-20170918100059-20170920220431.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 00:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-i686-en-US-20170918100059-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-linux-i686-en-US-20170918100059-20170921100141.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-i686-en-US-20170918220054-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-linux-i686-en-US-20170918220054-20170920220431.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 00:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-i686-en-US-20170918220054-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-linux-i686-en-US-20170918220054-20170921100141.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-i686-en-US-20170920220431-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-linux-i686-en-US-20170920220431-20170921100141.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Sep-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170917100334-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170917100334-20170919100405.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Sep-2017 14:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170917220255-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170917220255-20170919100405.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Sep-2017 14:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170917220255-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170917220255-20170919220202.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 00:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918100059-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918100059-20170919100405.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Sep-2017 14:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918100059-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918100059-20170919220202.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 00:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918100059-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918100059-20170920100426.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 12:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918220054-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918220054-20170919100405.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Sep-2017 14:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918220054-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918220054-20170919220202.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 00:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918220054-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918220054-20170920100426.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 12:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918220054-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170918220054-20170920220431.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 00:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919100405-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919100405-20170919220202.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 00:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919100405-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919100405-20170920100426.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 12:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919100405-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919100405-20170920220431.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919100405-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919100405-20170921100141.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919220202-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919220202-20170920100426.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Sep-2017 12:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919220202-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919220202-20170920220431.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Sep-2017 00:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919220202-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170919220202-20170921100141.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170920100426-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170920100426-20170920220431.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Sep-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170920100426-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170920100426-20170921100141.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Sep-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170920220431-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-linux-x86_64-en-US-20170920220431-20170921100141.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170917100334-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170917100334-20170919100405.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Sep-2017 12:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170917220255-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170917220255-20170919100405.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Sep-2017 12:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170917220255-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170917220255-20170919220202.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Sep-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170918100059-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170918100059-20170919100405.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Sep-2017 12:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170918100059-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170918100059-20170919220202.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Sep-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170918100059-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170918100059-20170920100426.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Sep-2017 11:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170918220054-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170918220054-20170919100405.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Sep-2017 12:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170918220054-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170918220054-20170919220202.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Sep-2017 23:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170918220054-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170918220054-20170920100426.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Sep-2017 11:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170918220054-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170918220054-20170920220431.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Sep-2017 23:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170919100405-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170919100405-20170919220202.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Sep-2017 23:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170919100405-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170919100405-20170920100426.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Sep-2017 11:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170919100405-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170919100405-20170920220431.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Sep-2017 23:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170919100405-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170919100405-20170921100141.partial.mar</a></td>
+ <td>4M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170919220202-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170919220202-20170920100426.partial.mar</a></td>
+ <td>3M</td>
+ <td>20-Sep-2017 11:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170919220202-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170919220202-20170920220431.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Sep-2017 23:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170919220202-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170919220202-20170921100141.partial.mar</a></td>
+ <td>4M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170920100426-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170920100426-20170920220431.partial.mar</a></td>
+ <td>3M</td>
+ <td>20-Sep-2017 23:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170920100426-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170920100426-20170921100141.partial.mar</a></td>
+ <td>4M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-mac-en-US-20170920220431-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-mac-en-US-20170920220431-20170921100141.partial.mar</a></td>
+ <td>3M</td>
+ <td>21-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170917100334-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170917100334-20170919100405.partial.mar</a></td>
+ <td>6M</td>
+ <td>19-Sep-2017 15:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170917220255-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170917220255-20170919100405.partial.mar</a></td>
+ <td>5M</td>
+ <td>19-Sep-2017 15:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170917220255-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170917220255-20170919220202.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Sep-2017 00:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170918100059-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170918100059-20170919100405.partial.mar</a></td>
+ <td>5M</td>
+ <td>19-Sep-2017 15:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170918100059-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170918100059-20170919220202.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Sep-2017 00:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170918100059-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170918100059-20170920100426.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Sep-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170918220054-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170918220054-20170919100405.partial.mar</a></td>
+ <td>6M</td>
+ <td>19-Sep-2017 15:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170918220054-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170918220054-20170919220202.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Sep-2017 00:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170918220054-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170918220054-20170920100426.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Sep-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170918220054-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170918220054-20170920220431.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Sep-2017 00:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170919100405-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170919100405-20170919220202.partial.mar</a></td>
+ <td>5M</td>
+ <td>20-Sep-2017 00:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170919100405-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170919100405-20170920100426.partial.mar</a></td>
+ <td>5M</td>
+ <td>20-Sep-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170919100405-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170919100405-20170920220431.partial.mar</a></td>
+ <td>5M</td>
+ <td>21-Sep-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170919100405-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170919100405-20170921100141.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170919220202-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170919220202-20170920100426.partial.mar</a></td>
+ <td>5M</td>
+ <td>20-Sep-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170919220202-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170919220202-20170920220431.partial.mar</a></td>
+ <td>5M</td>
+ <td>21-Sep-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170919220202-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170919220202-20170921100141.partial.mar</a></td>
+ <td>5M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170920100426-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170920100426-20170920220431.partial.mar</a></td>
+ <td>5M</td>
+ <td>21-Sep-2017 00:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170920100426-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170920100426-20170921100141.partial.mar</a></td>
+ <td>5M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win32-en-US-20170920220431-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-win32-en-US-20170920220431-20170921100141.partial.mar</a></td>
+ <td>5M</td>
+ <td>21-Sep-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170917100334-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170917100334-20170919100405.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Sep-2017 15:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170917220255-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170917220255-20170919100405.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Sep-2017 15:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170917220255-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170917220255-20170919220202.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 00:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170918100059-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170918100059-20170919100405.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Sep-2017 15:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170918100059-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170918100059-20170919220202.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 00:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170918100059-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170918100059-20170920100426.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 12:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170918220054-20170919100405.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170918220054-20170919100405.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Sep-2017 15:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170918220054-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170918220054-20170919220202.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 00:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170918220054-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170918220054-20170920100426.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 12:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170918220054-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170918220054-20170920220431.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Sep-2017 01:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170919100405-20170919220202.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170919100405-20170919220202.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 00:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170919100405-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170919100405-20170920100426.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Sep-2017 12:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170919100405-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170919100405-20170920220431.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Sep-2017 01:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170919100405-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170919100405-20170921100141.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170919220202-20170920100426.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170919220202-20170920100426.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Sep-2017 12:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170919220202-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170919220202-20170920220431.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Sep-2017 01:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170919220202-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170919220202-20170921100141.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170920100426-20170920220431.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170920100426-20170920220431.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Sep-2017 01:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170920100426-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170920100426-20170921100141.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-57.0a1-win64-en-US-20170920220431-20170921100141.partial.mar">firefox-mozilla-central-57.0a1-win64-en-US-20170920220431-20170921100141.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170918100059-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170918100059-20170921220243.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 00:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170918220054-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170918220054-20170921220243.partial.mar</a></td>
+ <td>8M</td>
+ <td>22-Sep-2017 00:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170918220054-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170918220054-20170922100051.partial.mar</a></td>
+ <td>9M</td>
+ <td>22-Sep-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170920220431-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170920220431-20170921220243.partial.mar</a></td>
+ <td>8M</td>
+ <td>22-Sep-2017 00:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170920220431-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170920220431-20170922100051.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170920220431-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170920220431-20170922220129.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Sep-2017 00:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921100141-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921100141-20170921220243.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 00:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921100141-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921100141-20170922100051.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921100141-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921100141-20170922220129.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Sep-2017 00:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921100141-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921100141-20170923100045.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Sep-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921220243-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921220243-20170922100051.partial.mar</a></td>
+ <td>8M</td>
+ <td>22-Sep-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921220243-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921220243-20170922220129.partial.mar</a></td>
+ <td>8M</td>
+ <td>23-Sep-2017 00:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921220243-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921220243-20170923100045.partial.mar</a></td>
+ <td>8M</td>
+ <td>23-Sep-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921220243-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170921220243-20170923220337.partial.mar</a></td>
+ <td>8M</td>
+ <td>24-Sep-2017 01:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922100051-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922100051-20170922220129.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Sep-2017 00:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922100051-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922100051-20170923100045.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Sep-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922100051-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922100051-20170923220337.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Sep-2017 01:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922100051-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922100051-20170924100550.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Sep-2017 13:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922220129-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922220129-20170923100045.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Sep-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922220129-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922220129-20170923220337.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Sep-2017 01:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922220129-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922220129-20170924100550.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Sep-2017 13:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922220129-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170922220129-20170924220116.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 01:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923100045-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923100045-20170923220337.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Sep-2017 01:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923100045-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923100045-20170924100550.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Sep-2017 13:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923100045-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923100045-20170924220116.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 01:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923100045-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923100045-20170925100307.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923220337-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923220337-20170924100550.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Sep-2017 13:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923220337-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923220337-20170924220116.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 01:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923220337-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923220337-20170925100307.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923220337-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170923220337-20170925220207.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Sep-2017 00:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924100550-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924100550-20170924220116.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 01:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924100550-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924100550-20170925100307.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924100550-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924100550-20170925220207.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Sep-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924100550-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924100550-20170926100259.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 12:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924220116-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924220116-20170925100307.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924220116-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924220116-20170925220207.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Sep-2017 00:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924220116-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924220116-20170926100259.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 12:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924220116-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170924220116-20170926220106.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Sep-2017 00:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925100307-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925100307-20170925220207.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Sep-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925100307-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925100307-20170926100259.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 12:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925100307-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925100307-20170926220106.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Sep-2017 00:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925100307-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925100307-20170927100120.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Sep-2017 12:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925220207-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925220207-20170926100259.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 12:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925220207-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925220207-20170926220106.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Sep-2017 00:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925220207-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925220207-20170927100120.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Sep-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925220207-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170925220207-20170928100123.partial.mar</a></td>
+ <td>9M</td>
+ <td>28-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926100259-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926100259-20170926220106.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Sep-2017 00:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926100259-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926100259-20170927100120.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Sep-2017 12:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926100259-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926100259-20170928100123.partial.mar</a></td>
+ <td>9M</td>
+ <td>28-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926100259-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926100259-20170928220658.partial.mar</a></td>
+ <td>8M</td>
+ <td>29-Sep-2017 00:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926220106-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926220106-20170927100120.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Sep-2017 12:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926220106-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926220106-20170928100123.partial.mar</a></td>
+ <td>8M</td>
+ <td>28-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926220106-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926220106-20170928220658.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 00:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926220106-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170926220106-20170929100122.partial.mar</a></td>
+ <td>8M</td>
+ <td>29-Sep-2017 12:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170927100120-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170927100120-20170928100123.partial.mar</a></td>
+ <td>8M</td>
+ <td>28-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170927100120-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170927100120-20170928220658.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 00:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170927100120-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170927100120-20170929100122.partial.mar</a></td>
+ <td>8M</td>
+ <td>29-Sep-2017 12:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170927100120-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170927100120-20170929220356.partial.mar</a></td>
+ <td>8M</td>
+ <td>30-Sep-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928100123-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928100123-20170928220658.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 00:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928100123-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928100123-20170929100122.partial.mar</a></td>
+ <td>8M</td>
+ <td>29-Sep-2017 12:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928100123-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928100123-20170929220356.partial.mar</a></td>
+ <td>9M</td>
+ <td>30-Sep-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928100123-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928100123-20170930100302.partial.mar</a></td>
+ <td>9M</td>
+ <td>30-Sep-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928220658-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928220658-20170929100122.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 12:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928220658-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928220658-20170929220356.partial.mar</a></td>
+ <td>8M</td>
+ <td>30-Sep-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928220658-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928220658-20170930100302.partial.mar</a></td>
+ <td>8M</td>
+ <td>30-Sep-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928220658-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170928220658-20170930220116.partial.mar</a></td>
+ <td>8M</td>
+ <td>01-Oct-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929100122-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929100122-20170929220356.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Sep-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929100122-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929100122-20170930100302.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Sep-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929100122-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929100122-20170930220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929100122-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929100122-20171001100335.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929220356-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929220356-20170930100302.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Sep-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929220356-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929220356-20170930220116.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Oct-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929220356-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929220356-20171001100335.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Oct-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929220356-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170929220356-20171001220301.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930100302-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930100302-20170930220116.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Oct-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930100302-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930100302-20171001100335.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Oct-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930100302-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930100302-20171001220301.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930100302-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930100302-20171002100134.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930220116-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930220116-20171001100335.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Oct-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930220116-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930220116-20171001220301.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930220116-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930220116-20171002100134.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930220116-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20170930220116-20171002220204.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Oct-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001100335-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001100335-20171001220301.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001100335-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001100335-20171002100134.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001100335-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001100335-20171002220204.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Oct-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001100335-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001100335-20171003100226.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 12:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001220301-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001220301-20171002100134.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001220301-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001220301-20171002220204.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Oct-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001220301-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001220301-20171003100226.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 12:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001220301-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171001220301-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002100134-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002100134-20171002220204.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Oct-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002100134-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002100134-20171003100226.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Oct-2017 12:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002100134-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002100134-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002100134-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002100134-20171004100049.partial.mar</a></td>
+ <td>10M</td>
+ <td>04-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002220204-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002220204-20171003100226.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 12:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002220204-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002220204-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002220204-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002220204-20171004100049.partial.mar</a></td>
+ <td>10M</td>
+ <td>04-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002220204-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171002220204-20171004220309.partial.mar</a></td>
+ <td>11M</td>
+ <td>05-Oct-2017 00:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003100226-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003100226-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003100226-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003100226-20171004100049.partial.mar</a></td>
+ <td>10M</td>
+ <td>04-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003100226-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003100226-20171004220309.partial.mar</a></td>
+ <td>11M</td>
+ <td>05-Oct-2017 00:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003100226-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003100226-20171005100211.partial.mar</a></td>
+ <td>11M</td>
+ <td>05-Oct-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003220138-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003220138-20171004100049.partial.mar</a></td>
+ <td>9M</td>
+ <td>04-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003220138-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003220138-20171004220309.partial.mar</a></td>
+ <td>10M</td>
+ <td>05-Oct-2017 00:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003220138-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003220138-20171005100211.partial.mar</a></td>
+ <td>10M</td>
+ <td>05-Oct-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003220138-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171003220138-20171005220204.partial.mar</a></td>
+ <td>11M</td>
+ <td>06-Oct-2017 00:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004100049-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004100049-20171004220309.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004100049-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004100049-20171005100211.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Oct-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004100049-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004100049-20171005220204.partial.mar</a></td>
+ <td>8M</td>
+ <td>06-Oct-2017 00:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004100049-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004100049-20171006100327.partial.mar</a></td>
+ <td>9M</td>
+ <td>06-Oct-2017 12:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004220309-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004220309-20171005100211.partial.mar</a></td>
+ <td>7M</td>
+ <td>05-Oct-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004220309-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004220309-20171005220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Oct-2017 00:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004220309-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004220309-20171006100327.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Oct-2017 12:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004220309-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171004220309-20171006220306.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005100211-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005100211-20171005220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Oct-2017 00:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005100211-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005100211-20171006100327.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Oct-2017 12:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005100211-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005100211-20171006220306.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005100211-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005100211-20171007100142.partial.mar</a></td>
+ <td>8M</td>
+ <td>07-Oct-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005220204-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005220204-20171006100327.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Oct-2017 12:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005220204-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005220204-20171006220306.partial.mar</a></td>
+ <td>8M</td>
+ <td>07-Oct-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005220204-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005220204-20171007100142.partial.mar</a></td>
+ <td>9M</td>
+ <td>07-Oct-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005220204-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171005220204-20171007220156.partial.mar</a></td>
+ <td>9M</td>
+ <td>08-Oct-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006100327-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006100327-20171006220306.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006100327-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006100327-20171007100142.partial.mar</a></td>
+ <td>8M</td>
+ <td>07-Oct-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006100327-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006100327-20171007220156.partial.mar</a></td>
+ <td>8M</td>
+ <td>08-Oct-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006100327-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006100327-20171008131700.partial.mar</a></td>
+ <td>9M</td>
+ <td>08-Oct-2017 15:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006220306-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006220306-20171007100142.partial.mar</a></td>
+ <td>8M</td>
+ <td>07-Oct-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006220306-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006220306-20171007220156.partial.mar</a></td>
+ <td>8M</td>
+ <td>08-Oct-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006220306-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006220306-20171008131700.partial.mar</a></td>
+ <td>8M</td>
+ <td>08-Oct-2017 15:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006220306-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171006220306-20171008220130.partial.mar</a></td>
+ <td>9M</td>
+ <td>09-Oct-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007100142-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007100142-20171007220156.partial.mar</a></td>
+ <td>5M</td>
+ <td>08-Oct-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007100142-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007100142-20171008131700.partial.mar</a></td>
+ <td>7M</td>
+ <td>08-Oct-2017 15:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007100142-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007100142-20171008220130.partial.mar</a></td>
+ <td>9M</td>
+ <td>09-Oct-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007100142-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007100142-20171009100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>09-Oct-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007220156-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007220156-20171008131700.partial.mar</a></td>
+ <td>6M</td>
+ <td>08-Oct-2017 15:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007220156-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007220156-20171008220130.partial.mar</a></td>
+ <td>9M</td>
+ <td>09-Oct-2017 01:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007220156-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007220156-20171009100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>09-Oct-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007220156-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171007220156-20171009220104.partial.mar</a></td>
+ <td>9M</td>
+ <td>10-Oct-2017 00:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008131700-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008131700-20171008220130.partial.mar</a></td>
+ <td>8M</td>
+ <td>09-Oct-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008131700-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008131700-20171009100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>09-Oct-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008131700-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008131700-20171009220104.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 00:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008131700-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008131700-20171010100200.partial.mar</a></td>
+ <td>9M</td>
+ <td>10-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008220130-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008220130-20171009100134.partial.mar</a></td>
+ <td>9M</td>
+ <td>09-Oct-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008220130-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008220130-20171009220104.partial.mar</a></td>
+ <td>9M</td>
+ <td>10-Oct-2017 00:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008220130-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008220130-20171010100200.partial.mar</a></td>
+ <td>10M</td>
+ <td>10-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008220130-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171008220130-20171010220102.partial.mar</a></td>
+ <td>10M</td>
+ <td>11-Oct-2017 00:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009100134-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009100134-20171009220104.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 00:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009100134-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009100134-20171010100200.partial.mar</a></td>
+ <td>9M</td>
+ <td>10-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009100134-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009100134-20171010220102.partial.mar</a></td>
+ <td>9M</td>
+ <td>11-Oct-2017 00:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009100134-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009100134-20171011100133.partial.mar</a></td>
+ <td>9M</td>
+ <td>11-Oct-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009220104-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009220104-20171010100200.partial.mar</a></td>
+ <td>7M</td>
+ <td>10-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009220104-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009220104-20171010220102.partial.mar</a></td>
+ <td>8M</td>
+ <td>11-Oct-2017 00:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009220104-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009220104-20171011100133.partial.mar</a></td>
+ <td>8M</td>
+ <td>11-Oct-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009220104-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171009220104-20171011220113.partial.mar</a></td>
+ <td>9M</td>
+ <td>12-Oct-2017 00:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010100200-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010100200-20171010220102.partial.mar</a></td>
+ <td>7M</td>
+ <td>11-Oct-2017 00:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010100200-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010100200-20171011100133.partial.mar</a></td>
+ <td>8M</td>
+ <td>11-Oct-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010100200-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010100200-20171011220113.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 00:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010100200-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010100200-20171012100228.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010100200-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010100200-20171012105833.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 15:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010220102-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010220102-20171011100133.partial.mar</a></td>
+ <td>7M</td>
+ <td>11-Oct-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010220102-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010220102-20171011220113.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 00:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010220102-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010220102-20171012100228.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010220102-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171010220102-20171012105833.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 15:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011100133-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011100133-20171011220113.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 00:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011100133-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011100133-20171012100228.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011100133-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011100133-20171012105833.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 15:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011100133-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011100133-20171012220111.partial.mar</a></td>
+ <td>9M</td>
+ <td>13-Oct-2017 00:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011220113-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011220113-20171012100228.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011220113-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011220113-20171012105833.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 15:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011220113-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011220113-20171012220111.partial.mar</a></td>
+ <td>9M</td>
+ <td>13-Oct-2017 00:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011220113-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171011220113-20171013100112.partial.mar</a></td>
+ <td>9M</td>
+ <td>13-Oct-2017 12:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012100228-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012100228-20171012220111.partial.mar</a></td>
+ <td>8M</td>
+ <td>13-Oct-2017 00:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012100228-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012100228-20171013100112.partial.mar</a></td>
+ <td>9M</td>
+ <td>13-Oct-2017 12:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012100228-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012100228-20171013220204.partial.mar</a></td>
+ <td>9M</td>
+ <td>14-Oct-2017 01:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012105833-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012105833-20171012220111.partial.mar</a></td>
+ <td>8M</td>
+ <td>13-Oct-2017 00:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012105833-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012105833-20171013100112.partial.mar</a></td>
+ <td>9M</td>
+ <td>13-Oct-2017 12:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012105833-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012105833-20171013220204.partial.mar</a></td>
+ <td>9M</td>
+ <td>14-Oct-2017 01:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012105833-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012105833-20171014100219.partial.mar</a></td>
+ <td>11M</td>
+ <td>14-Oct-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012220111-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012220111-20171013100112.partial.mar</a></td>
+ <td>9M</td>
+ <td>13-Oct-2017 12:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012220111-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012220111-20171013220204.partial.mar</a></td>
+ <td>9M</td>
+ <td>14-Oct-2017 01:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012220111-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012220111-20171014100219.partial.mar</a></td>
+ <td>10M</td>
+ <td>14-Oct-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012220111-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171012220111-20171014220542.partial.mar</a></td>
+ <td>11M</td>
+ <td>15-Oct-2017 01:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013100112-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013100112-20171013220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>14-Oct-2017 01:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013100112-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013100112-20171014100219.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013100112-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013100112-20171014220542.partial.mar</a></td>
+ <td>8M</td>
+ <td>15-Oct-2017 01:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013100112-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013100112-20171015100127.partial.mar</a></td>
+ <td>8M</td>
+ <td>15-Oct-2017 14:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013220204-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013220204-20171014100219.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013220204-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013220204-20171014220542.partial.mar</a></td>
+ <td>8M</td>
+ <td>15-Oct-2017 01:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013220204-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013220204-20171015100127.partial.mar</a></td>
+ <td>9M</td>
+ <td>15-Oct-2017 14:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013220204-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171013220204-20171015220106.partial.mar</a></td>
+ <td>9M</td>
+ <td>16-Oct-2017 01:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014100219-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014100219-20171014220542.partial.mar</a></td>
+ <td>6M</td>
+ <td>15-Oct-2017 01:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014100219-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014100219-20171015100127.partial.mar</a></td>
+ <td>6M</td>
+ <td>15-Oct-2017 14:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014100219-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014100219-20171015220106.partial.mar</a></td>
+ <td>7M</td>
+ <td>16-Oct-2017 01:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014100219-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014100219-20171016100113.partial.mar</a></td>
+ <td>7M</td>
+ <td>16-Oct-2017 12:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014220542-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014220542-20171015100127.partial.mar</a></td>
+ <td>5M</td>
+ <td>15-Oct-2017 14:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014220542-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014220542-20171015220106.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 01:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014220542-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014220542-20171016100113.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 12:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014220542-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171014220542-20171016220427.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 01:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015100127-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015100127-20171015220106.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 01:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015100127-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015100127-20171016100113.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 12:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015100127-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015100127-20171016220427.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 01:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015100127-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015100127-20171017100127.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015220106-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015220106-20171016100113.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 12:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015220106-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015220106-20171016220427.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 01:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015220106-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015220106-20171017100127.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015220106-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171015220106-20171017141229.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 17:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016100113-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016100113-20171016220427.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 01:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016100113-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016100113-20171017100127.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016100113-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016100113-20171017141229.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 17:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016100113-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016100113-20171017220415.partial.mar</a></td>
+ <td>8M</td>
+ <td>18-Oct-2017 01:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016220427-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016220427-20171017100127.partial.mar</a></td>
+ <td>9M</td>
+ <td>17-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016220427-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016220427-20171017141229.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 17:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016220427-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016220427-20171017220415.partial.mar</a></td>
+ <td>9M</td>
+ <td>18-Oct-2017 01:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016220427-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171016220427-20171018100140.partial.mar</a></td>
+ <td>9M</td>
+ <td>18-Oct-2017 12:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017100127-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017100127-20171017141229.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 17:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017100127-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017100127-20171017220415.partial.mar</a></td>
+ <td>7M</td>
+ <td>18-Oct-2017 01:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017100127-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017100127-20171018100140.partial.mar</a></td>
+ <td>7M</td>
+ <td>18-Oct-2017 12:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017100127-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017100127-20171018220049.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017141229-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017141229-20171017220415.partial.mar</a></td>
+ <td>7M</td>
+ <td>18-Oct-2017 01:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017141229-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017141229-20171018100140.partial.mar</a></td>
+ <td>7M</td>
+ <td>18-Oct-2017 12:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017141229-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017141229-20171018220049.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017141229-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017141229-20171019100107.partial.mar</a></td>
+ <td>8M</td>
+ <td>19-Oct-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017220415-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017220415-20171018100140.partial.mar</a></td>
+ <td>7M</td>
+ <td>18-Oct-2017 12:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017220415-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017220415-20171018220049.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017220415-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017220415-20171019100107.partial.mar</a></td>
+ <td>8M</td>
+ <td>19-Oct-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017220415-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171017220415-20171019222141.partial.mar</a></td>
+ <td>9M</td>
+ <td>20-Oct-2017 01:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018100140-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018100140-20171018220049.partial.mar</a></td>
+ <td>5M</td>
+ <td>19-Oct-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018100140-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018100140-20171019100107.partial.mar</a></td>
+ <td>8M</td>
+ <td>19-Oct-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018100140-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018100140-20171019222141.partial.mar</a></td>
+ <td>9M</td>
+ <td>20-Oct-2017 01:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018100140-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018100140-20171020100426.partial.mar</a></td>
+ <td>8M</td>
+ <td>20-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018220049-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018220049-20171019100107.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018220049-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018220049-20171019222141.partial.mar</a></td>
+ <td>9M</td>
+ <td>20-Oct-2017 01:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018220049-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018220049-20171020100426.partial.mar</a></td>
+ <td>8M</td>
+ <td>20-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018220049-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171018220049-20171020221129.partial.mar</a></td>
+ <td>9M</td>
+ <td>21-Oct-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019100107-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019100107-20171019222141.partial.mar</a></td>
+ <td>8M</td>
+ <td>20-Oct-2017 01:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019100107-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019100107-20171020100426.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019100107-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019100107-20171020221129.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Oct-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019100107-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019100107-20171021100029.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019222141-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019222141-20171020100426.partial.mar</a></td>
+ <td>8M</td>
+ <td>20-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019222141-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019222141-20171020221129.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019222141-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019222141-20171021100029.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019222141-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171019222141-20171021220121.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 02:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020100426-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020100426-20171020221129.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Oct-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020100426-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020100426-20171021100029.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020100426-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020100426-20171021220121.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 02:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020100426-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020100426-20171022100058.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020221129-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020221129-20171021100029.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020221129-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020221129-20171021220121.partial.mar</a></td>
+ <td>8M</td>
+ <td>22-Oct-2017 02:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020221129-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020221129-20171022100058.partial.mar</a></td>
+ <td>8M</td>
+ <td>22-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020221129-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171020221129-20171022220103.partial.mar</a></td>
+ <td>8M</td>
+ <td>23-Oct-2017 00:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021100029-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021100029-20171021220121.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 02:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021100029-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021100029-20171022100058.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021100029-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021100029-20171022220103.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 00:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021100029-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021100029-20171023100252.partial.mar</a></td>
+ <td>8M</td>
+ <td>23-Oct-2017 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021220121-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021220121-20171022100058.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021220121-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021220121-20171022220103.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Oct-2017 00:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021220121-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021220121-20171023100252.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021220121-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171021220121-20171023220222.partial.mar</a></td>
+ <td>13M</td>
+ <td>24-Oct-2017 01:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022100058-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022100058-20171022220103.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Oct-2017 00:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022100058-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022100058-20171023100252.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022100058-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022100058-20171023220222.partial.mar</a></td>
+ <td>13M</td>
+ <td>24-Oct-2017 01:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022100058-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022100058-20171024100135.partial.mar</a></td>
+ <td>13M</td>
+ <td>24-Oct-2017 12:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022220103-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022220103-20171023100252.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022220103-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022220103-20171023220222.partial.mar</a></td>
+ <td>13M</td>
+ <td>24-Oct-2017 01:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022220103-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022220103-20171024100135.partial.mar</a></td>
+ <td>13M</td>
+ <td>24-Oct-2017 12:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022220103-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171022220103-20171024220325.partial.mar</a></td>
+ <td>13M</td>
+ <td>25-Oct-2017 00:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023100252-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023100252-20171023220222.partial.mar</a></td>
+ <td>13M</td>
+ <td>24-Oct-2017 01:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023100252-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023100252-20171024100135.partial.mar</a></td>
+ <td>13M</td>
+ <td>24-Oct-2017 12:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023100252-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023100252-20171024220325.partial.mar</a></td>
+ <td>13M</td>
+ <td>25-Oct-2017 00:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023100252-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023100252-20171025100449.partial.mar</a></td>
+ <td>13M</td>
+ <td>25-Oct-2017 12:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023220222-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023220222-20171024100135.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Oct-2017 12:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023220222-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023220222-20171024220325.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Oct-2017 00:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023220222-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023220222-20171025100449.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Oct-2017 12:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023220222-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171023220222-20171025230440.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Oct-2017 02:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024100135-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024100135-20171024220325.partial.mar</a></td>
+ <td>4M</td>
+ <td>25-Oct-2017 00:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024100135-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024100135-20171025100449.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Oct-2017 12:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024100135-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024100135-20171025230440.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Oct-2017 02:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024100135-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024100135-20171026100047.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024220325-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024220325-20171025100449.partial.mar</a></td>
+ <td>4M</td>
+ <td>25-Oct-2017 12:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024220325-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024220325-20171025230440.partial.mar</a></td>
+ <td>4M</td>
+ <td>26-Oct-2017 02:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024220325-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024220325-20171026100047.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024220325-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171024220325-20171026221945.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Oct-2017 07:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025100449-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025100449-20171025230440.partial.mar</a></td>
+ <td>3M</td>
+ <td>26-Oct-2017 02:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025100449-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025100449-20171026100047.partial.mar</a></td>
+ <td>4M</td>
+ <td>26-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025100449-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025100449-20171026221945.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Oct-2017 07:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025100449-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025100449-20171027100103.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Oct-2017 15:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025230440-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025230440-20171026100047.partial.mar</a></td>
+ <td>4M</td>
+ <td>26-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025230440-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025230440-20171026221945.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Oct-2017 07:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025230440-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025230440-20171027100103.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Oct-2017 15:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025230440-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171025230440-20171027220059.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Oct-2017 02:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026100047-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026100047-20171026221945.partial.mar</a></td>
+ <td>4M</td>
+ <td>27-Oct-2017 07:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026100047-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026100047-20171027100103.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Oct-2017 14:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026100047-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026100047-20171027220059.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Oct-2017 02:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026100047-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026100047-20171028100423.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Oct-2017 13:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026221945-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026221945-20171027100103.partial.mar</a></td>
+ <td>4M</td>
+ <td>27-Oct-2017 14:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026221945-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026221945-20171027220059.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Oct-2017 02:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026221945-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026221945-20171028100423.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Oct-2017 13:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026221945-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171026221945-20171028220326.partial.mar</a></td>
+ <td>5M</td>
+ <td>29-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027100103-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027100103-20171027220059.partial.mar</a></td>
+ <td>4M</td>
+ <td>28-Oct-2017 02:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027100103-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027100103-20171028100423.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Oct-2017 13:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027100103-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027100103-20171028220326.partial.mar</a></td>
+ <td>4M</td>
+ <td>29-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027100103-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027100103-20171029102300.partial.mar</a></td>
+ <td>4M</td>
+ <td>29-Oct-2017 14:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027220059-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027220059-20171028100423.partial.mar</a></td>
+ <td>4M</td>
+ <td>28-Oct-2017 13:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027220059-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027220059-20171028220326.partial.mar</a></td>
+ <td>4M</td>
+ <td>29-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027220059-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027220059-20171029102300.partial.mar</a></td>
+ <td>4M</td>
+ <td>29-Oct-2017 14:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027220059-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171027220059-20171029220112.partial.mar</a></td>
+ <td>4M</td>
+ <td>30-Oct-2017 03:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028100423-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028100423-20171028220326.partial.mar</a></td>
+ <td>4M</td>
+ <td>29-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028100423-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028100423-20171029102300.partial.mar</a></td>
+ <td>4M</td>
+ <td>29-Oct-2017 14:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028100423-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028100423-20171029220112.partial.mar</a></td>
+ <td>4M</td>
+ <td>30-Oct-2017 03:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028100423-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028100423-20171030103605.partial.mar</a></td>
+ <td>4M</td>
+ <td>30-Oct-2017 20:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028220326-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028220326-20171029102300.partial.mar</a></td>
+ <td>1M</td>
+ <td>29-Oct-2017 14:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028220326-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028220326-20171029220112.partial.mar</a></td>
+ <td>2M</td>
+ <td>30-Oct-2017 03:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028220326-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028220326-20171030103605.partial.mar</a></td>
+ <td>4M</td>
+ <td>30-Oct-2017 20:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028220326-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028220326-20171031220132.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 03:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028220326-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171028220326-20171031235118.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 10:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029102300-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029102300-20171029220112.partial.mar</a></td>
+ <td>1M</td>
+ <td>30-Oct-2017 03:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029102300-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029102300-20171030103605.partial.mar</a></td>
+ <td>4M</td>
+ <td>30-Oct-2017 20:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029102300-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029102300-20171031220132.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 03:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029102300-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029102300-20171031235118.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 10:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029220112-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029220112-20171030103605.partial.mar</a></td>
+ <td>4M</td>
+ <td>30-Oct-2017 20:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029220112-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029220112-20171031220132.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 03:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029220112-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029220112-20171031235118.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 10:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029220112-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171029220112-20171101104430.partial.mar</a></td>
+ <td>38M</td>
+ <td>01-Nov-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171030103605-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171030103605-20171031220132.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 03:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171030103605-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171030103605-20171031235118.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 10:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171030103605-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171030103605-20171101104430.partial.mar</a></td>
+ <td>38M</td>
+ <td>01-Nov-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171030103605-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171030103605-20171101220120.partial.mar</a></td>
+ <td>38M</td>
+ <td>02-Nov-2017 00:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031220132-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031220132-20171101104430.partial.mar</a></td>
+ <td>38M</td>
+ <td>01-Nov-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031220132-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031220132-20171101220120.partial.mar</a></td>
+ <td>38M</td>
+ <td>02-Nov-2017 00:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031220132-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031220132-20171102100041.partial.mar</a></td>
+ <td>5M</td>
+ <td>02-Nov-2017 12:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031235118-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031235118-20171101104430.partial.mar</a></td>
+ <td>38M</td>
+ <td>01-Nov-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031235118-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031235118-20171101220120.partial.mar</a></td>
+ <td>38M</td>
+ <td>02-Nov-2017 00:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031235118-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031235118-20171102100041.partial.mar</a></td>
+ <td>5M</td>
+ <td>02-Nov-2017 12:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031235118-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171031235118-20171102222620.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Nov-2017 00:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101104430-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101104430-20171101220120.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Nov-2017 00:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101104430-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101104430-20171102100041.partial.mar</a></td>
+ <td>25M</td>
+ <td>02-Nov-2017 12:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101104430-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101104430-20171102222620.partial.mar</a></td>
+ <td>25M</td>
+ <td>03-Nov-2017 00:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101104430-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101104430-20171103100331.partial.mar</a></td>
+ <td>25M</td>
+ <td>03-Nov-2017 12:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101220120-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101220120-20171102100041.partial.mar</a></td>
+ <td>24M</td>
+ <td>02-Nov-2017 12:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101220120-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101220120-20171102222620.partial.mar</a></td>
+ <td>25M</td>
+ <td>03-Nov-2017 00:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101220120-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101220120-20171103100331.partial.mar</a></td>
+ <td>24M</td>
+ <td>03-Nov-2017 12:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101220120-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171101220120-20171103220715.partial.mar</a></td>
+ <td>24M</td>
+ <td>04-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102100041-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102100041-20171102222620.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Nov-2017 00:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102100041-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102100041-20171103100331.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Nov-2017 12:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102100041-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102100041-20171103220715.partial.mar</a></td>
+ <td>5M</td>
+ <td>04-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102100041-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102100041-20171104100412.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Nov-2017 12:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102222620-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102222620-20171103100331.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Nov-2017 12:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102222620-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102222620-20171103220715.partial.mar</a></td>
+ <td>4M</td>
+ <td>04-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102222620-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102222620-20171104100412.partial.mar</a></td>
+ <td>5M</td>
+ <td>04-Nov-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102222620-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171102222620-20171104220420.partial.mar</a></td>
+ <td>5M</td>
+ <td>05-Nov-2017 00:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103100331-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103100331-20171103220715.partial.mar</a></td>
+ <td>3M</td>
+ <td>04-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103100331-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103100331-20171104100412.partial.mar</a></td>
+ <td>5M</td>
+ <td>04-Nov-2017 12:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103100331-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103100331-20171104220420.partial.mar</a></td>
+ <td>5M</td>
+ <td>05-Nov-2017 00:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103100331-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103100331-20171105100353.partial.mar</a></td>
+ <td>5M</td>
+ <td>05-Nov-2017 12:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103220715-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103220715-20171104100412.partial.mar</a></td>
+ <td>5M</td>
+ <td>04-Nov-2017 12:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103220715-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103220715-20171104220420.partial.mar</a></td>
+ <td>5M</td>
+ <td>05-Nov-2017 00:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103220715-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103220715-20171105100353.partial.mar</a></td>
+ <td>5M</td>
+ <td>05-Nov-2017 12:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103220715-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171103220715-20171105220721.partial.mar</a></td>
+ <td>5M</td>
+ <td>06-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104100412-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104100412-20171104220420.partial.mar</a></td>
+ <td>3M</td>
+ <td>05-Nov-2017 00:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104100412-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104100412-20171105100353.partial.mar</a></td>
+ <td>3M</td>
+ <td>05-Nov-2017 12:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104100412-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104100412-20171105220721.partial.mar</a></td>
+ <td>3M</td>
+ <td>06-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104100412-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104100412-20171106100122.partial.mar</a></td>
+ <td>4M</td>
+ <td>06-Nov-2017 11:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104220420-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104220420-20171105100353.partial.mar</a></td>
+ <td>46K</td>
+ <td>05-Nov-2017 12:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104220420-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104220420-20171105220721.partial.mar</a></td>
+ <td>469K</td>
+ <td>06-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104220420-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171104220420-20171106100122.partial.mar</a></td>
+ <td>3M</td>
+ <td>06-Nov-2017 11:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171105100353-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171105100353-20171105220721.partial.mar</a></td>
+ <td>466K</td>
+ <td>06-Nov-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171105100353-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171105100353-20171106100122.partial.mar</a></td>
+ <td>3M</td>
+ <td>06-Nov-2017 11:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-i686-en-US-20171105220721-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-linux-i686-en-US-20171105220721-20171106100122.partial.mar</a></td>
+ <td>3M</td>
+ <td>06-Nov-2017 11:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170919220202-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170919220202-20170921220243.partial.mar</a></td>
+ <td>8M</td>
+ <td>22-Sep-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170920100426-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170920100426-20170921220243.partial.mar</a></td>
+ <td>8M</td>
+ <td>22-Sep-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170920100426-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170920100426-20170922100051.partial.mar</a></td>
+ <td>8M</td>
+ <td>22-Sep-2017 12:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170920220431-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170920220431-20170921220243.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170920220431-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170920220431-20170922100051.partial.mar</a></td>
+ <td>8M</td>
+ <td>22-Sep-2017 12:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170920220431-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170920220431-20170922220129.partial.mar</a></td>
+ <td>8M</td>
+ <td>23-Sep-2017 01:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921100141-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921100141-20170921220243.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Sep-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921100141-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921100141-20170922100051.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 12:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921100141-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921100141-20170922220129.partial.mar</a></td>
+ <td>8M</td>
+ <td>23-Sep-2017 01:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921100141-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921100141-20170923100045.partial.mar</a></td>
+ <td>8M</td>
+ <td>23-Sep-2017 13:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921220243-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921220243-20170922100051.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 12:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921220243-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921220243-20170922220129.partial.mar</a></td>
+ <td>8M</td>
+ <td>23-Sep-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921220243-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921220243-20170923100045.partial.mar</a></td>
+ <td>8M</td>
+ <td>23-Sep-2017 13:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921220243-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170921220243-20170923220337.partial.mar</a></td>
+ <td>8M</td>
+ <td>24-Sep-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922100051-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922100051-20170922220129.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Sep-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922100051-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922100051-20170923100045.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Sep-2017 13:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922100051-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922100051-20170923220337.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Sep-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922100051-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922100051-20170924100550.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Sep-2017 13:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922220129-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922220129-20170923100045.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Sep-2017 13:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922220129-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922220129-20170923220337.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Sep-2017 00:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922220129-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922220129-20170924100550.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Sep-2017 13:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922220129-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170922220129-20170924220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923100045-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923100045-20170923220337.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Sep-2017 00:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923100045-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923100045-20170924100550.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Sep-2017 13:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923100045-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923100045-20170924220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923100045-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923100045-20170925100307.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923220337-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923220337-20170924100550.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Sep-2017 13:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923220337-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923220337-20170924220116.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923220337-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923220337-20170925100307.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923220337-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170923220337-20170925220207.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924100550-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924100550-20170924220116.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 01:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924100550-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924100550-20170925100307.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924100550-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924100550-20170925220207.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Sep-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924100550-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924100550-20170926100259.partial.mar</a></td>
+ <td>8M</td>
+ <td>26-Sep-2017 12:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924220116-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924220116-20170925100307.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924220116-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924220116-20170925220207.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924220116-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924220116-20170926100259.partial.mar</a></td>
+ <td>8M</td>
+ <td>26-Sep-2017 12:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924220116-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170924220116-20170926220106.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Sep-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925100307-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925100307-20170925220207.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Sep-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925100307-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925100307-20170926100259.partial.mar</a></td>
+ <td>8M</td>
+ <td>26-Sep-2017 12:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925100307-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925100307-20170926220106.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Sep-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925100307-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925100307-20170928100123.partial.mar</a></td>
+ <td>9M</td>
+ <td>28-Sep-2017 12:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925220207-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925220207-20170926100259.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 12:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925220207-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925220207-20170926220106.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Sep-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925220207-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925220207-20170928100123.partial.mar</a></td>
+ <td>9M</td>
+ <td>28-Sep-2017 12:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925220207-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170925220207-20170928220658.partial.mar</a></td>
+ <td>9M</td>
+ <td>29-Sep-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926100259-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926100259-20170926220106.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Sep-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926100259-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926100259-20170928100123.partial.mar</a></td>
+ <td>9M</td>
+ <td>28-Sep-2017 12:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926100259-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926100259-20170928220658.partial.mar</a></td>
+ <td>8M</td>
+ <td>29-Sep-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926100259-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926100259-20170929100122.partial.mar</a></td>
+ <td>9M</td>
+ <td>29-Sep-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926220106-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926220106-20170928100123.partial.mar</a></td>
+ <td>8M</td>
+ <td>28-Sep-2017 12:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926220106-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926220106-20170928220658.partial.mar</a></td>
+ <td>8M</td>
+ <td>29-Sep-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926220106-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926220106-20170929100122.partial.mar</a></td>
+ <td>8M</td>
+ <td>29-Sep-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926220106-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170926220106-20170929220356.partial.mar</a></td>
+ <td>9M</td>
+ <td>30-Sep-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928100123-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928100123-20170928220658.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928100123-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928100123-20170929100122.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928100123-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928100123-20170929220356.partial.mar</a></td>
+ <td>8M</td>
+ <td>30-Sep-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928100123-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928100123-20170930100302.partial.mar</a></td>
+ <td>8M</td>
+ <td>30-Sep-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928220658-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928220658-20170929100122.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928220658-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928220658-20170929220356.partial.mar</a></td>
+ <td>8M</td>
+ <td>30-Sep-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928220658-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928220658-20170930100302.partial.mar</a></td>
+ <td>8M</td>
+ <td>30-Sep-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928220658-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170928220658-20170930220116.partial.mar</a></td>
+ <td>8M</td>
+ <td>01-Oct-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929100122-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929100122-20170929220356.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Sep-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929100122-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929100122-20170930100302.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Sep-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929100122-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929100122-20170930220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929100122-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929100122-20171001100335.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929220356-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929220356-20170930100302.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Sep-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929220356-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929220356-20170930220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929220356-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929220356-20171001100335.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929220356-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170929220356-20171001220301.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Oct-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930100302-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930100302-20170930220116.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Oct-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930100302-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930100302-20171001100335.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930100302-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930100302-20171001220301.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Oct-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930100302-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930100302-20171002100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Oct-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930220116-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930220116-20171001100335.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Oct-2017 12:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930220116-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930220116-20171001220301.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930220116-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930220116-20171002100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Oct-2017 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930220116-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20170930220116-20171002220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001100335-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001100335-20171001220301.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001100335-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001100335-20171002100134.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 12:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001100335-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001100335-20171002220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001100335-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001100335-20171003100226.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001220301-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001220301-20171002100134.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 12:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001220301-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001220301-20171002220204.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Oct-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001220301-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001220301-20171003100226.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001220301-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171001220301-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 00:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002100134-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002100134-20171002220204.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Oct-2017 00:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002100134-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002100134-20171003100226.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002100134-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002100134-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 00:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002100134-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002100134-20171004220309.partial.mar</a></td>
+ <td>11M</td>
+ <td>05-Oct-2017 00:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002220204-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002220204-20171003100226.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002220204-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002220204-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 00:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002220204-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002220204-20171004220309.partial.mar</a></td>
+ <td>11M</td>
+ <td>05-Oct-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002220204-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171002220204-20171005100211.partial.mar</a></td>
+ <td>11M</td>
+ <td>05-Oct-2017 12:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003100226-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003100226-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 00:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003100226-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003100226-20171004220309.partial.mar</a></td>
+ <td>10M</td>
+ <td>05-Oct-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003100226-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003100226-20171005100211.partial.mar</a></td>
+ <td>11M</td>
+ <td>05-Oct-2017 12:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003100226-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003100226-20171005220204.partial.mar</a></td>
+ <td>11M</td>
+ <td>06-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003220138-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003220138-20171004220309.partial.mar</a></td>
+ <td>10M</td>
+ <td>05-Oct-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003220138-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003220138-20171005100211.partial.mar</a></td>
+ <td>10M</td>
+ <td>05-Oct-2017 12:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003220138-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003220138-20171005220204.partial.mar</a></td>
+ <td>11M</td>
+ <td>06-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003220138-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171003220138-20171006100327.partial.mar</a></td>
+ <td>10M</td>
+ <td>06-Oct-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171004220309-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171004220309-20171005100211.partial.mar</a></td>
+ <td>6M</td>
+ <td>05-Oct-2017 12:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171004220309-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171004220309-20171005220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171004220309-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171004220309-20171006100327.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Oct-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171004220309-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171004220309-20171006220306.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 00:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005100211-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005100211-20171005220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005100211-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005100211-20171006100327.partial.mar</a></td>
+ <td>5M</td>
+ <td>06-Oct-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005100211-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005100211-20171006220306.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 00:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005100211-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005100211-20171007100142.partial.mar</a></td>
+ <td>9M</td>
+ <td>07-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005220204-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005220204-20171006100327.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Oct-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005220204-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005220204-20171006220306.partial.mar</a></td>
+ <td>8M</td>
+ <td>07-Oct-2017 00:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005220204-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005220204-20171007100142.partial.mar</a></td>
+ <td>8M</td>
+ <td>07-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005220204-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171005220204-20171007220156.partial.mar</a></td>
+ <td>9M</td>
+ <td>08-Oct-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006100327-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006100327-20171006220306.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 00:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006100327-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006100327-20171007100142.partial.mar</a></td>
+ <td>9M</td>
+ <td>07-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006100327-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006100327-20171007220156.partial.mar</a></td>
+ <td>9M</td>
+ <td>08-Oct-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006100327-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006100327-20171008131700.partial.mar</a></td>
+ <td>9M</td>
+ <td>08-Oct-2017 15:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006220306-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006220306-20171007100142.partial.mar</a></td>
+ <td>8M</td>
+ <td>07-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006220306-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006220306-20171007220156.partial.mar</a></td>
+ <td>8M</td>
+ <td>08-Oct-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006220306-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006220306-20171008131700.partial.mar</a></td>
+ <td>8M</td>
+ <td>08-Oct-2017 15:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006220306-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171006220306-20171008220130.partial.mar</a></td>
+ <td>8M</td>
+ <td>09-Oct-2017 00:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007100142-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007100142-20171007220156.partial.mar</a></td>
+ <td>6M</td>
+ <td>08-Oct-2017 00:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007100142-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007100142-20171008131700.partial.mar</a></td>
+ <td>7M</td>
+ <td>08-Oct-2017 15:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007100142-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007100142-20171008220130.partial.mar</a></td>
+ <td>7M</td>
+ <td>09-Oct-2017 00:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007100142-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007100142-20171009100134.partial.mar</a></td>
+ <td>8M</td>
+ <td>09-Oct-2017 12:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007220156-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007220156-20171008131700.partial.mar</a></td>
+ <td>7M</td>
+ <td>08-Oct-2017 15:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007220156-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007220156-20171008220130.partial.mar</a></td>
+ <td>6M</td>
+ <td>09-Oct-2017 00:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007220156-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007220156-20171009100134.partial.mar</a></td>
+ <td>8M</td>
+ <td>09-Oct-2017 12:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007220156-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171007220156-20171009220104.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 00:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008131700-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008131700-20171008220130.partial.mar</a></td>
+ <td>5M</td>
+ <td>09-Oct-2017 00:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008131700-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008131700-20171009100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>09-Oct-2017 12:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008131700-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008131700-20171009220104.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 00:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008131700-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008131700-20171010100200.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 12:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008220130-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008220130-20171009100134.partial.mar</a></td>
+ <td>8M</td>
+ <td>09-Oct-2017 12:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008220130-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008220130-20171009220104.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 00:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008220130-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008220130-20171010100200.partial.mar</a></td>
+ <td>9M</td>
+ <td>10-Oct-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008220130-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171008220130-20171010220102.partial.mar</a></td>
+ <td>9M</td>
+ <td>11-Oct-2017 00:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009100134-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009100134-20171009220104.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 00:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009100134-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009100134-20171010100200.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009100134-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009100134-20171010220102.partial.mar</a></td>
+ <td>9M</td>
+ <td>11-Oct-2017 00:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009100134-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009100134-20171011100133.partial.mar</a></td>
+ <td>9M</td>
+ <td>11-Oct-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009220104-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009220104-20171010100200.partial.mar</a></td>
+ <td>7M</td>
+ <td>10-Oct-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009220104-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009220104-20171010220102.partial.mar</a></td>
+ <td>8M</td>
+ <td>11-Oct-2017 00:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009220104-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009220104-20171011100133.partial.mar</a></td>
+ <td>8M</td>
+ <td>11-Oct-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009220104-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171009220104-20171011220113.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 00:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010100200-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010100200-20171010220102.partial.mar</a></td>
+ <td>7M</td>
+ <td>11-Oct-2017 00:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010100200-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010100200-20171011100133.partial.mar</a></td>
+ <td>8M</td>
+ <td>11-Oct-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010100200-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010100200-20171011220113.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 00:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010100200-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010100200-20171012100228.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010100200-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010100200-20171012105833.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 15:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010220102-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010220102-20171011100133.partial.mar</a></td>
+ <td>7M</td>
+ <td>11-Oct-2017 17:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010220102-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010220102-20171011220113.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 00:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010220102-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010220102-20171012100228.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010220102-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171010220102-20171012105833.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 15:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011100133-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011100133-20171011220113.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 00:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011100133-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011100133-20171012100228.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011100133-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011100133-20171012105833.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 15:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011100133-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011100133-20171012220111.partial.mar</a></td>
+ <td>8M</td>
+ <td>13-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011220113-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011220113-20171012100228.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011220113-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011220113-20171012105833.partial.mar</a></td>
+ <td>6M</td>
+ <td>12-Oct-2017 15:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011220113-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011220113-20171012220111.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011220113-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171011220113-20171013100112.partial.mar</a></td>
+ <td>9M</td>
+ <td>13-Oct-2017 12:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012100228-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012100228-20171012220111.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012100228-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012100228-20171013100112.partial.mar</a></td>
+ <td>9M</td>
+ <td>13-Oct-2017 12:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012100228-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012100228-20171013220204.partial.mar</a></td>
+ <td>9M</td>
+ <td>14-Oct-2017 00:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012105833-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012105833-20171012220111.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 00:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012105833-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012105833-20171013100112.partial.mar</a></td>
+ <td>9M</td>
+ <td>13-Oct-2017 12:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012105833-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012105833-20171013220204.partial.mar</a></td>
+ <td>9M</td>
+ <td>14-Oct-2017 00:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012105833-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012105833-20171014100219.partial.mar</a></td>
+ <td>10M</td>
+ <td>14-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012220111-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012220111-20171013100112.partial.mar</a></td>
+ <td>8M</td>
+ <td>13-Oct-2017 12:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012220111-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012220111-20171013220204.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 00:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012220111-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012220111-20171014100219.partial.mar</a></td>
+ <td>10M</td>
+ <td>14-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012220111-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171012220111-20171014220542.partial.mar</a></td>
+ <td>10M</td>
+ <td>15-Oct-2017 01:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013100112-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013100112-20171013220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>14-Oct-2017 00:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013100112-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013100112-20171014100219.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013100112-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013100112-20171014220542.partial.mar</a></td>
+ <td>9M</td>
+ <td>15-Oct-2017 01:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013100112-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013100112-20171015100127.partial.mar</a></td>
+ <td>9M</td>
+ <td>15-Oct-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013220204-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013220204-20171014100219.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013220204-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013220204-20171014220542.partial.mar</a></td>
+ <td>9M</td>
+ <td>15-Oct-2017 01:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013220204-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013220204-20171015100127.partial.mar</a></td>
+ <td>8M</td>
+ <td>15-Oct-2017 13:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013220204-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171013220204-20171015220106.partial.mar</a></td>
+ <td>8M</td>
+ <td>16-Oct-2017 01:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014100219-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014100219-20171014220542.partial.mar</a></td>
+ <td>7M</td>
+ <td>15-Oct-2017 01:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014100219-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014100219-20171015100127.partial.mar</a></td>
+ <td>6M</td>
+ <td>15-Oct-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014100219-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014100219-20171015220106.partial.mar</a></td>
+ <td>7M</td>
+ <td>16-Oct-2017 01:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014100219-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014100219-20171016100113.partial.mar</a></td>
+ <td>7M</td>
+ <td>16-Oct-2017 12:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014220542-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014220542-20171015100127.partial.mar</a></td>
+ <td>6M</td>
+ <td>15-Oct-2017 13:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014220542-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014220542-20171015220106.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 01:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014220542-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014220542-20171016100113.partial.mar</a></td>
+ <td>7M</td>
+ <td>16-Oct-2017 12:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014220542-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171014220542-20171016220427.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 01:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015100127-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015100127-20171015220106.partial.mar</a></td>
+ <td>5M</td>
+ <td>16-Oct-2017 01:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015100127-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015100127-20171016100113.partial.mar</a></td>
+ <td>7M</td>
+ <td>16-Oct-2017 12:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015100127-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015100127-20171016220427.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 01:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015100127-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015100127-20171017100127.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 12:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015220106-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015220106-20171016100113.partial.mar</a></td>
+ <td>7M</td>
+ <td>16-Oct-2017 12:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015220106-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015220106-20171016220427.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 01:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015220106-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015220106-20171017100127.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 12:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015220106-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171015220106-20171017141229.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 17:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016100113-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016100113-20171016220427.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 01:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016100113-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016100113-20171017100127.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 12:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016100113-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016100113-20171017141229.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 17:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016100113-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016100113-20171017220415.partial.mar</a></td>
+ <td>8M</td>
+ <td>18-Oct-2017 01:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016220427-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016220427-20171017100127.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 12:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016220427-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016220427-20171017141229.partial.mar</a></td>
+ <td>8M</td>
+ <td>17-Oct-2017 17:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016220427-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016220427-20171017220415.partial.mar</a></td>
+ <td>9M</td>
+ <td>18-Oct-2017 01:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016220427-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171016220427-20171018100140.partial.mar</a></td>
+ <td>8M</td>
+ <td>18-Oct-2017 12:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017100127-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017100127-20171017141229.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 17:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017100127-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017100127-20171017220415.partial.mar</a></td>
+ <td>8M</td>
+ <td>18-Oct-2017 01:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017100127-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017100127-20171018100140.partial.mar</a></td>
+ <td>8M</td>
+ <td>18-Oct-2017 12:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017100127-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017100127-20171018220049.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 01:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017141229-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017141229-20171017220415.partial.mar</a></td>
+ <td>8M</td>
+ <td>18-Oct-2017 01:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017141229-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017141229-20171018100140.partial.mar</a></td>
+ <td>8M</td>
+ <td>18-Oct-2017 12:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017141229-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017141229-20171018220049.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 01:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017141229-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017141229-20171019100107.partial.mar</a></td>
+ <td>8M</td>
+ <td>19-Oct-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017220415-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017220415-20171018100140.partial.mar</a></td>
+ <td>7M</td>
+ <td>18-Oct-2017 12:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017220415-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017220415-20171018220049.partial.mar</a></td>
+ <td>8M</td>
+ <td>19-Oct-2017 01:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017220415-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017220415-20171019100107.partial.mar</a></td>
+ <td>8M</td>
+ <td>19-Oct-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017220415-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171017220415-20171019222141.partial.mar</a></td>
+ <td>9M</td>
+ <td>20-Oct-2017 01:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018100140-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018100140-20171018220049.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 01:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018100140-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018100140-20171019100107.partial.mar</a></td>
+ <td>8M</td>
+ <td>19-Oct-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018100140-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018100140-20171019222141.partial.mar</a></td>
+ <td>8M</td>
+ <td>20-Oct-2017 01:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018100140-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018100140-20171020100426.partial.mar</a></td>
+ <td>8M</td>
+ <td>20-Oct-2017 12:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018220049-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018220049-20171019100107.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018220049-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018220049-20171019222141.partial.mar</a></td>
+ <td>8M</td>
+ <td>20-Oct-2017 01:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018220049-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018220049-20171020100426.partial.mar</a></td>
+ <td>8M</td>
+ <td>20-Oct-2017 12:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018220049-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171018220049-20171020221129.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019100107-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019100107-20171019222141.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Oct-2017 01:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019100107-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019100107-20171020100426.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Oct-2017 12:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019100107-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019100107-20171020221129.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019100107-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019100107-20171021100029.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019222141-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019222141-20171020100426.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Oct-2017 12:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019222141-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019222141-20171020221129.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019222141-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019222141-20171021100029.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019222141-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171019222141-20171021220121.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 02:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020100426-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020100426-20171020221129.partial.mar</a></td>
+ <td>8M</td>
+ <td>21-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020100426-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020100426-20171021100029.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020100426-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020100426-20171021220121.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 02:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020100426-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020100426-20171022100058.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020221129-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020221129-20171021100029.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020221129-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020221129-20171021220121.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 02:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020221129-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020221129-20171022100058.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020221129-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171020221129-20171022220103.partial.mar</a></td>
+ <td>8M</td>
+ <td>23-Oct-2017 00:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021100029-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021100029-20171021220121.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Oct-2017 02:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021100029-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021100029-20171022100058.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021100029-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021100029-20171022220103.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 00:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021100029-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021100029-20171023100252.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 12:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021220121-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021220121-20171022100058.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Oct-2017 12:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021220121-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021220121-20171022220103.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 00:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021220121-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021220121-20171023100252.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 12:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021220121-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171021220121-20171023220222.partial.mar</a></td>
+ <td>8M</td>
+ <td>24-Oct-2017 00:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022100058-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022100058-20171022220103.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 00:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022100058-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022100058-20171023100252.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 12:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022100058-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022100058-20171023220222.partial.mar</a></td>
+ <td>8M</td>
+ <td>24-Oct-2017 00:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022100058-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022100058-20171024100135.partial.mar</a></td>
+ <td>8M</td>
+ <td>24-Oct-2017 12:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022220103-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022220103-20171023100252.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 12:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022220103-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022220103-20171023220222.partial.mar</a></td>
+ <td>8M</td>
+ <td>24-Oct-2017 00:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022220103-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022220103-20171024100135.partial.mar</a></td>
+ <td>8M</td>
+ <td>24-Oct-2017 12:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022220103-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171022220103-20171024220325.partial.mar</a></td>
+ <td>9M</td>
+ <td>25-Oct-2017 00:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023100252-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023100252-20171023220222.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Oct-2017 00:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023100252-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023100252-20171024100135.partial.mar</a></td>
+ <td>8M</td>
+ <td>24-Oct-2017 12:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023100252-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023100252-20171024220325.partial.mar</a></td>
+ <td>8M</td>
+ <td>25-Oct-2017 00:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023100252-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023100252-20171025100449.partial.mar</a></td>
+ <td>8M</td>
+ <td>25-Oct-2017 12:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023220222-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023220222-20171024100135.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Oct-2017 12:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023220222-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023220222-20171024220325.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Oct-2017 00:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023220222-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023220222-20171025100449.partial.mar</a></td>
+ <td>8M</td>
+ <td>25-Oct-2017 12:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023220222-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171023220222-20171025230440.partial.mar</a></td>
+ <td>8M</td>
+ <td>26-Oct-2017 02:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024100135-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024100135-20171024220325.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Oct-2017 00:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024100135-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024100135-20171025100449.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Oct-2017 12:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024100135-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024100135-20171025230440.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 02:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024100135-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024100135-20171026100047.partial.mar</a></td>
+ <td>8M</td>
+ <td>26-Oct-2017 14:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024220325-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024220325-20171025100449.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Oct-2017 12:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024220325-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024220325-20171025230440.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 02:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024220325-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024220325-20171026100047.partial.mar</a></td>
+ <td>8M</td>
+ <td>26-Oct-2017 14:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024220325-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171024220325-20171026221945.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Oct-2017 02:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025100449-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025100449-20171025230440.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 02:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025100449-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025100449-20171026100047.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 14:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025100449-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025100449-20171026221945.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Oct-2017 02:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025100449-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025100449-20171027100103.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Oct-2017 15:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025230440-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025230440-20171026100047.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 14:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025230440-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025230440-20171026221945.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Oct-2017 02:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025230440-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025230440-20171027100103.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 15:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025230440-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171025230440-20171027220059.partial.mar</a></td>
+ <td>8M</td>
+ <td>28-Oct-2017 02:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026100047-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026100047-20171026221945.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 02:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026100047-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026100047-20171027100103.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 15:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026100047-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026100047-20171027220059.partial.mar</a></td>
+ <td>8M</td>
+ <td>28-Oct-2017 02:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026100047-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026100047-20171028100423.partial.mar</a></td>
+ <td>8M</td>
+ <td>28-Oct-2017 14:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026221945-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026221945-20171027100103.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 15:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026221945-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026221945-20171027220059.partial.mar</a></td>
+ <td>7M</td>
+ <td>28-Oct-2017 02:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026221945-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026221945-20171028100423.partial.mar</a></td>
+ <td>8M</td>
+ <td>28-Oct-2017 14:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026221945-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171026221945-20171028220326.partial.mar</a></td>
+ <td>8M</td>
+ <td>29-Oct-2017 01:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027100103-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027100103-20171027220059.partial.mar</a></td>
+ <td>7M</td>
+ <td>28-Oct-2017 02:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027100103-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027100103-20171028100423.partial.mar</a></td>
+ <td>7M</td>
+ <td>28-Oct-2017 14:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027100103-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027100103-20171028220326.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Oct-2017 01:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027100103-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027100103-20171029102300.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Oct-2017 14:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027220059-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027220059-20171028100423.partial.mar</a></td>
+ <td>7M</td>
+ <td>28-Oct-2017 14:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027220059-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027220059-20171028220326.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Oct-2017 01:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027220059-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027220059-20171029102300.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Oct-2017 14:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027220059-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171027220059-20171029220112.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 03:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028100423-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028100423-20171028220326.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Oct-2017 01:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028100423-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028100423-20171029102300.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Oct-2017 14:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028100423-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028100423-20171029220112.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 03:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028100423-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028100423-20171030103605.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 19:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028220326-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028220326-20171029102300.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Oct-2017 14:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028220326-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028220326-20171029220112.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 03:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028220326-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028220326-20171030103605.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 19:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028220326-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028220326-20171031220132.partial.mar</a></td>
+ <td>8M</td>
+ <td>01-Nov-2017 03:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028220326-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171028220326-20171031235118.partial.mar</a></td>
+ <td>8M</td>
+ <td>01-Nov-2017 10:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029102300-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029102300-20171029220112.partial.mar</a></td>
+ <td>5M</td>
+ <td>30-Oct-2017 03:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029102300-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029102300-20171030103605.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 19:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029102300-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029102300-20171031220132.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 03:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029102300-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029102300-20171031235118.partial.mar</a></td>
+ <td>8M</td>
+ <td>01-Nov-2017 10:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029220112-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029220112-20171030103605.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 19:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029220112-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029220112-20171031220132.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 03:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029220112-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029220112-20171031235118.partial.mar</a></td>
+ <td>8M</td>
+ <td>01-Nov-2017 10:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029220112-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171029220112-20171101104430.partial.mar</a></td>
+ <td>8M</td>
+ <td>01-Nov-2017 16:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171030103605-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171030103605-20171031220132.partial.mar</a></td>
+ <td>8M</td>
+ <td>01-Nov-2017 03:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171030103605-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171030103605-20171031235118.partial.mar</a></td>
+ <td>8M</td>
+ <td>01-Nov-2017 10:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171030103605-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171030103605-20171101104430.partial.mar</a></td>
+ <td>8M</td>
+ <td>01-Nov-2017 16:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171030103605-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171030103605-20171101220120.partial.mar</a></td>
+ <td>9M</td>
+ <td>02-Nov-2017 00:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031220132-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031220132-20171101104430.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 16:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031220132-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031220132-20171101220120.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 00:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031220132-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031220132-20171102222620.partial.mar</a></td>
+ <td>8M</td>
+ <td>03-Nov-2017 00:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031235118-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031235118-20171101104430.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 16:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031235118-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031235118-20171101220120.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 00:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031235118-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031235118-20171102222620.partial.mar</a></td>
+ <td>8M</td>
+ <td>03-Nov-2017 00:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031235118-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171031235118-20171103100331.partial.mar</a></td>
+ <td>8M</td>
+ <td>03-Nov-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101104430-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101104430-20171101220120.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 00:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101104430-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101104430-20171102222620.partial.mar</a></td>
+ <td>8M</td>
+ <td>03-Nov-2017 00:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101104430-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101104430-20171103100331.partial.mar</a></td>
+ <td>8M</td>
+ <td>03-Nov-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101104430-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101104430-20171103220715.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Nov-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101220120-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101220120-20171102222620.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 00:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101220120-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101220120-20171103100331.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101220120-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101220120-20171103220715.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Nov-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101220120-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171101220120-20171104100412.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Nov-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171102222620-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171102222620-20171103100331.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171102222620-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171102222620-20171103220715.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Nov-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171102222620-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171102222620-20171104100412.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Nov-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171102222620-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171102222620-20171104220420.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Nov-2017 23:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103100331-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103100331-20171103220715.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Nov-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103100331-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103100331-20171104100412.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Nov-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103100331-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103100331-20171104220420.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Nov-2017 23:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103100331-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103100331-20171105100353.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Nov-2017 12:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103220715-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103220715-20171104100412.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Nov-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103220715-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103220715-20171104220420.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Nov-2017 23:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103220715-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103220715-20171105100353.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Nov-2017 12:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103220715-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171103220715-20171105220721.partial.mar</a></td>
+ <td>8M</td>
+ <td>06-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104100412-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104100412-20171104220420.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Nov-2017 23:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104100412-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104100412-20171105100353.partial.mar</a></td>
+ <td>6M</td>
+ <td>05-Nov-2017 12:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104100412-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104100412-20171105220721.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104100412-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104100412-20171106100122.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Nov-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104220420-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104220420-20171105100353.partial.mar</a></td>
+ <td>6M</td>
+ <td>05-Nov-2017 12:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104220420-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104220420-20171105220721.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104220420-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171104220420-20171106100122.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Nov-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171105100353-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171105100353-20171105220721.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Nov-2017 00:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171105100353-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171105100353-20171106100122.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Nov-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171105220721-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-linux-x86_64-en-US-20171105220721-20171106100122.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Nov-2017 12:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170919220202-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170919220202-20170921220243.partial.mar</a></td>
+ <td>4M</td>
+ <td>21-Sep-2017 23:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170920100426-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170920100426-20170921220243.partial.mar</a></td>
+ <td>4M</td>
+ <td>21-Sep-2017 23:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170920100426-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170920100426-20170922100051.partial.mar</a></td>
+ <td>4M</td>
+ <td>22-Sep-2017 11:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170920220431-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170920220431-20170921220243.partial.mar</a></td>
+ <td>4M</td>
+ <td>21-Sep-2017 23:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170920220431-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170920220431-20170922100051.partial.mar</a></td>
+ <td>4M</td>
+ <td>22-Sep-2017 11:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170920220431-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170920220431-20170922220129.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Sep-2017 23:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170921100141-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170921100141-20170921220243.partial.mar</a></td>
+ <td>3M</td>
+ <td>21-Sep-2017 23:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170921100141-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170921100141-20170922100051.partial.mar</a></td>
+ <td>4M</td>
+ <td>22-Sep-2017 11:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170921100141-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170921100141-20170922220129.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Sep-2017 23:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170921100141-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170921100141-20170923100045.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Sep-2017 11:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170921220243-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170921220243-20170922100051.partial.mar</a></td>
+ <td>4M</td>
+ <td>22-Sep-2017 11:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170921220243-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170921220243-20170922220129.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Sep-2017 23:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170921220243-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170921220243-20170923100045.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Sep-2017 11:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170921220243-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170921220243-20170923220337.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Sep-2017 23:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170922100051-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170922100051-20170922220129.partial.mar</a></td>
+ <td>4M</td>
+ <td>22-Sep-2017 23:34</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170922100051-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170922100051-20170923100045.partial.mar</a></td>
+ <td>4M</td>
+ <td>23-Sep-2017 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170922100051-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170922100051-20170923220337.partial.mar</a></td>
+ <td>4M</td>
+ <td>23-Sep-2017 23:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170922100051-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170922100051-20170924100550.partial.mar</a></td>
+ <td>4M</td>
+ <td>24-Sep-2017 12:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170922220129-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170922220129-20170923100045.partial.mar</a></td>
+ <td>3M</td>
+ <td>23-Sep-2017 11:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170922220129-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170922220129-20170923220337.partial.mar</a></td>
+ <td>3M</td>
+ <td>23-Sep-2017 23:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170922220129-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170922220129-20170924100550.partial.mar</a></td>
+ <td>3M</td>
+ <td>24-Sep-2017 12:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170922220129-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170922220129-20170924220116.partial.mar</a></td>
+ <td>3M</td>
+ <td>24-Sep-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170923100045-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170923100045-20170923220337.partial.mar</a></td>
+ <td>1M</td>
+ <td>23-Sep-2017 23:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170923100045-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170923100045-20170924100550.partial.mar</a></td>
+ <td>2M</td>
+ <td>24-Sep-2017 12:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170923100045-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170923100045-20170924220116.partial.mar</a></td>
+ <td>3M</td>
+ <td>24-Sep-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170923100045-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170923100045-20170925100307.partial.mar</a></td>
+ <td>4M</td>
+ <td>25-Sep-2017 11:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170923220337-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170923220337-20170924100550.partial.mar</a></td>
+ <td>2M</td>
+ <td>24-Sep-2017 12:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170923220337-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170923220337-20170924220116.partial.mar</a></td>
+ <td>3M</td>
+ <td>24-Sep-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170923220337-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170923220337-20170925100307.partial.mar</a></td>
+ <td>3M</td>
+ <td>25-Sep-2017 11:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170923220337-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170923220337-20170925220207.partial.mar</a></td>
+ <td>4M</td>
+ <td>25-Sep-2017 23:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170924100550-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170924100550-20170924220116.partial.mar</a></td>
+ <td>3M</td>
+ <td>24-Sep-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170924100550-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170924100550-20170925100307.partial.mar</a></td>
+ <td>3M</td>
+ <td>25-Sep-2017 11:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170924100550-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170924100550-20170925220207.partial.mar</a></td>
+ <td>3M</td>
+ <td>25-Sep-2017 23:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170924100550-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170924100550-20170926100259.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Sep-2017 11:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170924220116-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170924220116-20170925100307.partial.mar</a></td>
+ <td>3M</td>
+ <td>25-Sep-2017 11:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170924220116-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170924220116-20170925220207.partial.mar</a></td>
+ <td>3M</td>
+ <td>25-Sep-2017 23:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170924220116-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170924220116-20170926100259.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Sep-2017 11:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170924220116-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170924220116-20170926220106.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Sep-2017 23:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170925100307-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170925100307-20170925220207.partial.mar</a></td>
+ <td>1M</td>
+ <td>25-Sep-2017 23:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170925100307-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170925100307-20170926100259.partial.mar</a></td>
+ <td>4M</td>
+ <td>26-Sep-2017 11:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170925100307-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170925100307-20170926220106.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Sep-2017 23:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170925100307-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170925100307-20170927100120.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170925220207-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170925220207-20170926100259.partial.mar</a></td>
+ <td>4M</td>
+ <td>26-Sep-2017 11:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170925220207-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170925220207-20170926220106.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Sep-2017 23:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170925220207-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170925220207-20170927100120.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170925220207-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170925220207-20170928100123.partial.mar</a></td>
+ <td>6M</td>
+ <td>28-Sep-2017 11:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170926100259-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170926100259-20170926220106.partial.mar</a></td>
+ <td>3M</td>
+ <td>26-Sep-2017 23:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170926100259-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170926100259-20170927100120.partial.mar</a></td>
+ <td>4M</td>
+ <td>27-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170926100259-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170926100259-20170928100123.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Sep-2017 11:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170926100259-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170926100259-20170928220658.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Sep-2017 23:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170926220106-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170926220106-20170927100120.partial.mar</a></td>
+ <td>4M</td>
+ <td>27-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170926220106-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170926220106-20170928100123.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Sep-2017 11:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170926220106-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170926220106-20170928220658.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Sep-2017 23:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170926220106-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170926220106-20170929100122.partial.mar</a></td>
+ <td>5M</td>
+ <td>29-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170927100120-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170927100120-20170928100123.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Sep-2017 11:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170927100120-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170927100120-20170928220658.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Sep-2017 23:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170927100120-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170927100120-20170929100122.partial.mar</a></td>
+ <td>5M</td>
+ <td>29-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170927100120-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170927100120-20170929220356.partial.mar</a></td>
+ <td>5M</td>
+ <td>29-Sep-2017 23:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170928100123-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170928100123-20170928220658.partial.mar</a></td>
+ <td>3M</td>
+ <td>28-Sep-2017 23:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170928100123-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170928100123-20170929100122.partial.mar</a></td>
+ <td>4M</td>
+ <td>29-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170928100123-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170928100123-20170929220356.partial.mar</a></td>
+ <td>5M</td>
+ <td>29-Sep-2017 23:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170928100123-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170928100123-20170930100302.partial.mar</a></td>
+ <td>5M</td>
+ <td>30-Sep-2017 11:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170928220658-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170928220658-20170929100122.partial.mar</a></td>
+ <td>4M</td>
+ <td>29-Sep-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170928220658-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170928220658-20170929220356.partial.mar</a></td>
+ <td>5M</td>
+ <td>29-Sep-2017 23:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170928220658-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170928220658-20170930100302.partial.mar</a></td>
+ <td>5M</td>
+ <td>30-Sep-2017 11:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170928220658-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170928220658-20170930220116.partial.mar</a></td>
+ <td>5M</td>
+ <td>30-Sep-2017 23:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170929100122-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170929100122-20170929220356.partial.mar</a></td>
+ <td>4M</td>
+ <td>29-Sep-2017 23:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170929100122-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170929100122-20170930100302.partial.mar</a></td>
+ <td>4M</td>
+ <td>30-Sep-2017 11:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170929100122-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170929100122-20170930220116.partial.mar</a></td>
+ <td>4M</td>
+ <td>30-Sep-2017 23:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170929100122-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170929100122-20171001100335.partial.mar</a></td>
+ <td>4M</td>
+ <td>01-Oct-2017 11:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170929220356-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170929220356-20170930100302.partial.mar</a></td>
+ <td>2M</td>
+ <td>30-Sep-2017 11:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170929220356-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170929220356-20170930220116.partial.mar</a></td>
+ <td>3M</td>
+ <td>30-Sep-2017 23:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170929220356-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170929220356-20171001100335.partial.mar</a></td>
+ <td>3M</td>
+ <td>01-Oct-2017 11:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170929220356-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170929220356-20171001220301.partial.mar</a></td>
+ <td>3M</td>
+ <td>01-Oct-2017 23:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170930100302-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170930100302-20170930220116.partial.mar</a></td>
+ <td>1006K</td>
+ <td>30-Sep-2017 23:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170930100302-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170930100302-20171001100335.partial.mar</a></td>
+ <td>3M</td>
+ <td>01-Oct-2017 11:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170930100302-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170930100302-20171001220301.partial.mar</a></td>
+ <td>3M</td>
+ <td>01-Oct-2017 23:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170930100302-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170930100302-20171002100134.partial.mar</a></td>
+ <td>4M</td>
+ <td>02-Oct-2017 11:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170930220116-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170930220116-20171001100335.partial.mar</a></td>
+ <td>3M</td>
+ <td>01-Oct-2017 11:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170930220116-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170930220116-20171001220301.partial.mar</a></td>
+ <td>3M</td>
+ <td>01-Oct-2017 23:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170930220116-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170930220116-20171002100134.partial.mar</a></td>
+ <td>4M</td>
+ <td>02-Oct-2017 11:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20170930220116-20171002223859.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20170930220116-20171002223859.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Oct-2017 00:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171001100335-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171001100335-20171001220301.partial.mar</a></td>
+ <td>2M</td>
+ <td>01-Oct-2017 23:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171001100335-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171001100335-20171002100134.partial.mar</a></td>
+ <td>3M</td>
+ <td>02-Oct-2017 11:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171001100335-20171002223859.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171001100335-20171002223859.partial.mar</a></td>
+ <td>3M</td>
+ <td>03-Oct-2017 00:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171001100335-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171001100335-20171003100226.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Oct-2017 11:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171001220301-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171001220301-20171002100134.partial.mar</a></td>
+ <td>3M</td>
+ <td>02-Oct-2017 11:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171001220301-20171002223859.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171001220301-20171002223859.partial.mar</a></td>
+ <td>3M</td>
+ <td>03-Oct-2017 00:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171001220301-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171001220301-20171003100226.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Oct-2017 11:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171001220301-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171001220301-20171003220138.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Oct-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171002100134-20171002223859.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171002100134-20171002223859.partial.mar</a></td>
+ <td>1M</td>
+ <td>03-Oct-2017 00:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171002100134-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171002100134-20171003100226.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Oct-2017 11:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171002100134-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171002100134-20171003220138.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Oct-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171002100134-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171002100134-20171004100049.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Oct-2017 11:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171002223859-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171002223859-20171003100226.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Oct-2017 11:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171002223859-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171002223859-20171003220138.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Oct-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171002223859-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171002223859-20171004100049.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Oct-2017 11:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171002223859-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171002223859-20171004220309.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Oct-2017 23:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171003100226-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171003100226-20171003220138.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Oct-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171003100226-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171003100226-20171004100049.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Oct-2017 11:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171003100226-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171003100226-20171004220309.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 23:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171003100226-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171003100226-20171005100211.partial.mar</a></td>
+ <td>7M</td>
+ <td>05-Oct-2017 11:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171003220138-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171003220138-20171004100049.partial.mar</a></td>
+ <td>5M</td>
+ <td>04-Oct-2017 11:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171003220138-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171003220138-20171004220309.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 23:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171003220138-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171003220138-20171005100211.partial.mar</a></td>
+ <td>7M</td>
+ <td>05-Oct-2017 11:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171003220138-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171003220138-20171005220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>05-Oct-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171004100049-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171004100049-20171004220309.partial.mar</a></td>
+ <td>5M</td>
+ <td>04-Oct-2017 23:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171004100049-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171004100049-20171005100211.partial.mar</a></td>
+ <td>5M</td>
+ <td>05-Oct-2017 11:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171004100049-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171004100049-20171005220204.partial.mar</a></td>
+ <td>5M</td>
+ <td>05-Oct-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171004100049-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171004100049-20171006100327.partial.mar</a></td>
+ <td>5M</td>
+ <td>06-Oct-2017 11:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171004220309-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171004220309-20171005100211.partial.mar</a></td>
+ <td>4M</td>
+ <td>05-Oct-2017 11:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171004220309-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171004220309-20171005220204.partial.mar</a></td>
+ <td>4M</td>
+ <td>05-Oct-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171004220309-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171004220309-20171006100327.partial.mar</a></td>
+ <td>4M</td>
+ <td>06-Oct-2017 11:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171004220309-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171004220309-20171006220306.partial.mar</a></td>
+ <td>5M</td>
+ <td>06-Oct-2017 23:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171005100211-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171005100211-20171005220204.partial.mar</a></td>
+ <td>103K</td>
+ <td>05-Oct-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171005100211-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171005100211-20171006100327.partial.mar</a></td>
+ <td>109K</td>
+ <td>06-Oct-2017 11:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171005100211-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171005100211-20171006220306.partial.mar</a></td>
+ <td>4M</td>
+ <td>06-Oct-2017 23:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171005100211-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171005100211-20171007100142.partial.mar</a></td>
+ <td>5M</td>
+ <td>07-Oct-2017 11:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171005220204-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171005220204-20171006100327.partial.mar</a></td>
+ <td>108K</td>
+ <td>06-Oct-2017 11:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171005220204-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171005220204-20171006220306.partial.mar</a></td>
+ <td>4M</td>
+ <td>06-Oct-2017 23:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171005220204-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171005220204-20171007100142.partial.mar</a></td>
+ <td>5M</td>
+ <td>07-Oct-2017 11:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171005220204-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171005220204-20171007220156.partial.mar</a></td>
+ <td>5M</td>
+ <td>07-Oct-2017 23:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171006100327-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171006100327-20171006220306.partial.mar</a></td>
+ <td>4M</td>
+ <td>06-Oct-2017 23:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171006100327-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171006100327-20171007100142.partial.mar</a></td>
+ <td>5M</td>
+ <td>07-Oct-2017 11:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171006100327-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171006100327-20171007220156.partial.mar</a></td>
+ <td>5M</td>
+ <td>07-Oct-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171006100327-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171006100327-20171008131700.partial.mar</a></td>
+ <td>5M</td>
+ <td>08-Oct-2017 14:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171006220306-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171006220306-20171007100142.partial.mar</a></td>
+ <td>4M</td>
+ <td>07-Oct-2017 11:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171006220306-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171006220306-20171007220156.partial.mar</a></td>
+ <td>4M</td>
+ <td>07-Oct-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171006220306-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171006220306-20171008131700.partial.mar</a></td>
+ <td>4M</td>
+ <td>08-Oct-2017 14:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171006220306-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171006220306-20171008220130.partial.mar</a></td>
+ <td>4M</td>
+ <td>08-Oct-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171007100142-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171007100142-20171007220156.partial.mar</a></td>
+ <td>1M</td>
+ <td>07-Oct-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171007100142-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171007100142-20171008131700.partial.mar</a></td>
+ <td>3M</td>
+ <td>08-Oct-2017 14:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171007100142-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171007100142-20171008220130.partial.mar</a></td>
+ <td>3M</td>
+ <td>08-Oct-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171007100142-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171007100142-20171009100134.partial.mar</a></td>
+ <td>4M</td>
+ <td>09-Oct-2017 11:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171007220156-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171007220156-20171008131700.partial.mar</a></td>
+ <td>3M</td>
+ <td>08-Oct-2017 14:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171007220156-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171007220156-20171008220130.partial.mar</a></td>
+ <td>3M</td>
+ <td>08-Oct-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171007220156-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171007220156-20171009100134.partial.mar</a></td>
+ <td>4M</td>
+ <td>09-Oct-2017 11:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171007220156-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171007220156-20171009220104.partial.mar</a></td>
+ <td>4M</td>
+ <td>09-Oct-2017 23:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171008131700-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171008131700-20171008220130.partial.mar</a></td>
+ <td>1M</td>
+ <td>08-Oct-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171008131700-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171008131700-20171009100134.partial.mar</a></td>
+ <td>3M</td>
+ <td>09-Oct-2017 11:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171008131700-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171008131700-20171009220104.partial.mar</a></td>
+ <td>4M</td>
+ <td>09-Oct-2017 23:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171008131700-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171008131700-20171010100200.partial.mar</a></td>
+ <td>5M</td>
+ <td>10-Oct-2017 11:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171008220130-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171008220130-20171009100134.partial.mar</a></td>
+ <td>3M</td>
+ <td>09-Oct-2017 11:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171008220130-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171008220130-20171009220104.partial.mar</a></td>
+ <td>4M</td>
+ <td>09-Oct-2017 23:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171008220130-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171008220130-20171010100200.partial.mar</a></td>
+ <td>5M</td>
+ <td>10-Oct-2017 11:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171008220130-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171008220130-20171010220102.partial.mar</a></td>
+ <td>6M</td>
+ <td>10-Oct-2017 23:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171009100134-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171009100134-20171009220104.partial.mar</a></td>
+ <td>4M</td>
+ <td>09-Oct-2017 23:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171009100134-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171009100134-20171010100200.partial.mar</a></td>
+ <td>5M</td>
+ <td>10-Oct-2017 11:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171009100134-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171009100134-20171010220102.partial.mar</a></td>
+ <td>5M</td>
+ <td>10-Oct-2017 23:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171009100134-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171009100134-20171011100133.partial.mar</a></td>
+ <td>6M</td>
+ <td>11-Oct-2017 18:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171009220104-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171009220104-20171010100200.partial.mar</a></td>
+ <td>4M</td>
+ <td>10-Oct-2017 11:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171009220104-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171009220104-20171010220102.partial.mar</a></td>
+ <td>5M</td>
+ <td>10-Oct-2017 23:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171009220104-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171009220104-20171011100133.partial.mar</a></td>
+ <td>5M</td>
+ <td>11-Oct-2017 18:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171009220104-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171009220104-20171011220113.partial.mar</a></td>
+ <td>6M</td>
+ <td>11-Oct-2017 23:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171010100200-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171010100200-20171010220102.partial.mar</a></td>
+ <td>4M</td>
+ <td>10-Oct-2017 23:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171010100200-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171010100200-20171011100133.partial.mar</a></td>
+ <td>5M</td>
+ <td>11-Oct-2017 18:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171010100200-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171010100200-20171011220113.partial.mar</a></td>
+ <td>5M</td>
+ <td>11-Oct-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171010100200-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171010100200-20171012100228.partial.mar</a></td>
+ <td>6M</td>
+ <td>12-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171010100200-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171010100200-20171012105833.partial.mar</a></td>
+ <td>6M</td>
+ <td>12-Oct-2017 13:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171010220102-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171010220102-20171011100133.partial.mar</a></td>
+ <td>4M</td>
+ <td>11-Oct-2017 18:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171010220102-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171010220102-20171011220113.partial.mar</a></td>
+ <td>5M</td>
+ <td>11-Oct-2017 23:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171010220102-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171010220102-20171012100228.partial.mar</a></td>
+ <td>5M</td>
+ <td>12-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171010220102-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171010220102-20171012105833.partial.mar</a></td>
+ <td>5M</td>
+ <td>12-Oct-2017 13:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171011100133-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171011100133-20171011220113.partial.mar</a></td>
+ <td>4M</td>
+ <td>11-Oct-2017 23:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171011100133-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171011100133-20171012100228.partial.mar</a></td>
+ <td>4M</td>
+ <td>12-Oct-2017 11:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171011100133-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171011100133-20171012105833.partial.mar</a></td>
+ <td>4M</td>
+ <td>12-Oct-2017 13:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171011100133-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171011100133-20171012220111.partial.mar</a></td>
+ <td>5M</td>
+ <td>12-Oct-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171011220113-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171011220113-20171012100228.partial.mar</a></td>
+ <td>4M</td>
+ <td>12-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171011220113-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171011220113-20171012105833.partial.mar</a></td>
+ <td>3M</td>
+ <td>12-Oct-2017 13:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171011220113-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171011220113-20171012220111.partial.mar</a></td>
+ <td>4M</td>
+ <td>12-Oct-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171011220113-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171011220113-20171013100112.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 11:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171012100228-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171012100228-20171012220111.partial.mar</a></td>
+ <td>3M</td>
+ <td>12-Oct-2017 23:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171012100228-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171012100228-20171013100112.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 11:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171012100228-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171012100228-20171013220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 23:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171012105833-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171012105833-20171012220111.partial.mar</a></td>
+ <td>4M</td>
+ <td>12-Oct-2017 23:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171012105833-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171012105833-20171013100112.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 11:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171012105833-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171012105833-20171013220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 23:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171012105833-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171012105833-20171014100219.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 11:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171012220111-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171012220111-20171013100112.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 11:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171012220111-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171012220111-20171013220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 23:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171012220111-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171012220111-20171014100219.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 11:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171012220111-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171012220111-20171014220542.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 23:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171013100112-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171013100112-20171013220204.partial.mar</a></td>
+ <td>4M</td>
+ <td>13-Oct-2017 23:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171013100112-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171013100112-20171014100219.partial.mar</a></td>
+ <td>5M</td>
+ <td>14-Oct-2017 11:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171013100112-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171013100112-20171014220542.partial.mar</a></td>
+ <td>6M</td>
+ <td>14-Oct-2017 23:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171013100112-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171013100112-20171015100127.partial.mar</a></td>
+ <td>6M</td>
+ <td>15-Oct-2017 12:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171013220204-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171013220204-20171014100219.partial.mar</a></td>
+ <td>5M</td>
+ <td>14-Oct-2017 11:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171013220204-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171013220204-20171014220542.partial.mar</a></td>
+ <td>5M</td>
+ <td>14-Oct-2017 23:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171013220204-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171013220204-20171015100127.partial.mar</a></td>
+ <td>6M</td>
+ <td>15-Oct-2017 12:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171013220204-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171013220204-20171015220106.partial.mar</a></td>
+ <td>5M</td>
+ <td>15-Oct-2017 23:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171014100219-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171014100219-20171014220542.partial.mar</a></td>
+ <td>3M</td>
+ <td>14-Oct-2017 23:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171014100219-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171014100219-20171015100127.partial.mar</a></td>
+ <td>3M</td>
+ <td>15-Oct-2017 12:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171014100219-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171014100219-20171015220106.partial.mar</a></td>
+ <td>3M</td>
+ <td>15-Oct-2017 23:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171014100219-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171014100219-20171016100113.partial.mar</a></td>
+ <td>4M</td>
+ <td>16-Oct-2017 11:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171014220542-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171014220542-20171015100127.partial.mar</a></td>
+ <td>3M</td>
+ <td>15-Oct-2017 12:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171014220542-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171014220542-20171015220106.partial.mar</a></td>
+ <td>3M</td>
+ <td>15-Oct-2017 23:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171014220542-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171014220542-20171016100113.partial.mar</a></td>
+ <td>4M</td>
+ <td>16-Oct-2017 11:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171014220542-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171014220542-20171016220427.partial.mar</a></td>
+ <td>4M</td>
+ <td>16-Oct-2017 23:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171015100127-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171015100127-20171015220106.partial.mar</a></td>
+ <td>2M</td>
+ <td>15-Oct-2017 23:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171015100127-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171015100127-20171016100113.partial.mar</a></td>
+ <td>3M</td>
+ <td>16-Oct-2017 11:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171015100127-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171015100127-20171016220427.partial.mar</a></td>
+ <td>3M</td>
+ <td>16-Oct-2017 23:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171015100127-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171015100127-20171017100127.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 11:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171015220106-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171015220106-20171016100113.partial.mar</a></td>
+ <td>3M</td>
+ <td>16-Oct-2017 11:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171015220106-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171015220106-20171016220427.partial.mar</a></td>
+ <td>3M</td>
+ <td>16-Oct-2017 23:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171015220106-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171015220106-20171017100127.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 11:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171015220106-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171015220106-20171017141229.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 15:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171016100113-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171016100113-20171016220427.partial.mar</a></td>
+ <td>103K</td>
+ <td>16-Oct-2017 23:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171016100113-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171016100113-20171017100127.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 11:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171016100113-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171016100113-20171017141229.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 15:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171016100113-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171016100113-20171017220415.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 23:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171016220427-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171016220427-20171017100127.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 11:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171016220427-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171016220427-20171017141229.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 15:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171016220427-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171016220427-20171017220415.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 23:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171016220427-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171016220427-20171018100140.partial.mar</a></td>
+ <td>5M</td>
+ <td>18-Oct-2017 11:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017100127-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017100127-20171017141229.partial.mar</a></td>
+ <td>115K</td>
+ <td>17-Oct-2017 15:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017100127-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017100127-20171017220415.partial.mar</a></td>
+ <td>4M</td>
+ <td>17-Oct-2017 23:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017100127-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017100127-20171018100140.partial.mar</a></td>
+ <td>4M</td>
+ <td>18-Oct-2017 11:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017100127-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017100127-20171018220049.partial.mar</a></td>
+ <td>4M</td>
+ <td>18-Oct-2017 23:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017141229-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017141229-20171017220415.partial.mar</a></td>
+ <td>4M</td>
+ <td>17-Oct-2017 23:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017141229-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017141229-20171018100140.partial.mar</a></td>
+ <td>4M</td>
+ <td>18-Oct-2017 11:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017141229-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017141229-20171018220049.partial.mar</a></td>
+ <td>4M</td>
+ <td>18-Oct-2017 23:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017141229-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017141229-20171019100107.partial.mar</a></td>
+ <td>5M</td>
+ <td>19-Oct-2017 11:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017220415-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017220415-20171018100140.partial.mar</a></td>
+ <td>3M</td>
+ <td>18-Oct-2017 11:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017220415-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017220415-20171018220049.partial.mar</a></td>
+ <td>3M</td>
+ <td>18-Oct-2017 23:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017220415-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017220415-20171019100107.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Oct-2017 11:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171017220415-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171017220415-20171019222141.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Oct-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171018100140-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171018100140-20171018220049.partial.mar</a></td>
+ <td>998K</td>
+ <td>18-Oct-2017 23:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171018100140-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171018100140-20171019100107.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Oct-2017 11:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171018100140-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171018100140-20171019222141.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Oct-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171018100140-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171018100140-20171020100426.partial.mar</a></td>
+ <td>5M</td>
+ <td>20-Oct-2017 11:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171018220049-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171018220049-20171019100107.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Oct-2017 11:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171018220049-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171018220049-20171019222141.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Oct-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171018220049-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171018220049-20171020100426.partial.mar</a></td>
+ <td>5M</td>
+ <td>20-Oct-2017 11:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171018220049-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171018220049-20171020221129.partial.mar</a></td>
+ <td>5M</td>
+ <td>20-Oct-2017 23:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171019100107-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171019100107-20171019222141.partial.mar</a></td>
+ <td>3M</td>
+ <td>20-Oct-2017 00:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171019100107-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171019100107-20171020100426.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Oct-2017 11:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171019100107-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171019100107-20171020221129.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Oct-2017 23:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171019100107-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171019100107-20171021100029.partial.mar</a></td>
+ <td>4M</td>
+ <td>21-Oct-2017 11:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171019222141-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171019222141-20171020100426.partial.mar</a></td>
+ <td>3M</td>
+ <td>20-Oct-2017 11:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171019222141-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171019222141-20171020221129.partial.mar</a></td>
+ <td>4M</td>
+ <td>20-Oct-2017 23:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171019222141-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171019222141-20171021100029.partial.mar</a></td>
+ <td>4M</td>
+ <td>21-Oct-2017 11:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171019222141-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171019222141-20171021220121.partial.mar</a></td>
+ <td>4M</td>
+ <td>22-Oct-2017 00:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171020100426-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171020100426-20171020221129.partial.mar</a></td>
+ <td>3M</td>
+ <td>20-Oct-2017 23:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171020100426-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171020100426-20171021100029.partial.mar</a></td>
+ <td>3M</td>
+ <td>21-Oct-2017 11:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171020100426-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171020100426-20171021220121.partial.mar</a></td>
+ <td>3M</td>
+ <td>22-Oct-2017 00:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171020100426-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171020100426-20171022100058.partial.mar</a></td>
+ <td>4M</td>
+ <td>22-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171020221129-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171020221129-20171021100029.partial.mar</a></td>
+ <td>3M</td>
+ <td>21-Oct-2017 11:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171020221129-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171020221129-20171021220121.partial.mar</a></td>
+ <td>3M</td>
+ <td>22-Oct-2017 00:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171020221129-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171020221129-20171022100058.partial.mar</a></td>
+ <td>3M</td>
+ <td>22-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171020221129-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171020221129-20171022220103.partial.mar</a></td>
+ <td>3M</td>
+ <td>22-Oct-2017 23:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171021100029-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171021100029-20171021220121.partial.mar</a></td>
+ <td>1M</td>
+ <td>22-Oct-2017 00:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171021100029-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171021100029-20171022100058.partial.mar</a></td>
+ <td>3M</td>
+ <td>22-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171021100029-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171021100029-20171022220103.partial.mar</a></td>
+ <td>3M</td>
+ <td>22-Oct-2017 23:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171021100029-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171021100029-20171023100252.partial.mar</a></td>
+ <td>4M</td>
+ <td>23-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171021220121-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171021220121-20171022100058.partial.mar</a></td>
+ <td>3M</td>
+ <td>22-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171021220121-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171021220121-20171022220103.partial.mar</a></td>
+ <td>3M</td>
+ <td>22-Oct-2017 23:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171021220121-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171021220121-20171023100252.partial.mar</a></td>
+ <td>4M</td>
+ <td>23-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171021220121-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171021220121-20171023220222.partial.mar</a></td>
+ <td>4M</td>
+ <td>23-Oct-2017 23:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171022100058-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171022100058-20171022220103.partial.mar</a></td>
+ <td>1M</td>
+ <td>22-Oct-2017 23:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171022100058-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171022100058-20171023100252.partial.mar</a></td>
+ <td>3M</td>
+ <td>23-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171022100058-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171022100058-20171023220222.partial.mar</a></td>
+ <td>4M</td>
+ <td>23-Oct-2017 23:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171022100058-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171022100058-20171024100135.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Oct-2017 11:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171022220103-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171022220103-20171023100252.partial.mar</a></td>
+ <td>3M</td>
+ <td>23-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171022220103-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171022220103-20171023220222.partial.mar</a></td>
+ <td>4M</td>
+ <td>23-Oct-2017 23:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171022220103-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171022220103-20171024100135.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Oct-2017 11:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171022220103-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171022220103-20171024220325.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Oct-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171023100252-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171023100252-20171023220222.partial.mar</a></td>
+ <td>3M</td>
+ <td>23-Oct-2017 23:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171023100252-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171023100252-20171024100135.partial.mar</a></td>
+ <td>4M</td>
+ <td>24-Oct-2017 11:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171023100252-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171023100252-20171024220325.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Oct-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171023100252-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171023100252-20171025100449.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171023220222-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171023220222-20171024100135.partial.mar</a></td>
+ <td>4M</td>
+ <td>24-Oct-2017 11:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171023220222-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171023220222-20171024220325.partial.mar</a></td>
+ <td>4M</td>
+ <td>24-Oct-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171023220222-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171023220222-20171025100449.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171023220222-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171023220222-20171025230440.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Oct-2017 01:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171024100135-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171024100135-20171024220325.partial.mar</a></td>
+ <td>4M</td>
+ <td>24-Oct-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171024100135-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171024100135-20171025100449.partial.mar</a></td>
+ <td>4M</td>
+ <td>25-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171024100135-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171024100135-20171025230440.partial.mar</a></td>
+ <td>4M</td>
+ <td>26-Oct-2017 01:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171024100135-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171024100135-20171026100047.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171024220325-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171024220325-20171025100449.partial.mar</a></td>
+ <td>4M</td>
+ <td>25-Oct-2017 11:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171024220325-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171024220325-20171025230440.partial.mar</a></td>
+ <td>4M</td>
+ <td>26-Oct-2017 01:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171024220325-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171024220325-20171026100047.partial.mar</a></td>
+ <td>4M</td>
+ <td>26-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171024220325-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171024220325-20171026221945.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Oct-2017 00:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171025100449-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171025100449-20171025230440.partial.mar</a></td>
+ <td>3M</td>
+ <td>26-Oct-2017 01:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171025100449-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171025100449-20171026100047.partial.mar</a></td>
+ <td>4M</td>
+ <td>26-Oct-2017 12:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171025100449-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171025100449-20171026221945.partial.mar</a></td>
+ <td>4M</td>
+ <td>27-Oct-2017 00:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171025100449-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171025100449-20171027100103.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171025230440-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171025230440-20171026100047.partial.mar</a></td>
+ <td>4M</td>
+ <td>26-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171025230440-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171025230440-20171026221945.partial.mar</a></td>
+ <td>4M</td>
+ <td>27-Oct-2017 00:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171025230440-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171025230440-20171027100103.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171025230440-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171025230440-20171027220059.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Oct-2017 00:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171026100047-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171026100047-20171026221945.partial.mar</a></td>
+ <td>3M</td>
+ <td>27-Oct-2017 00:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171026100047-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171026100047-20171027100103.partial.mar</a></td>
+ <td>4M</td>
+ <td>27-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171026100047-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171026100047-20171027220059.partial.mar</a></td>
+ <td>4M</td>
+ <td>28-Oct-2017 00:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171026100047-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171026100047-20171028100423.partial.mar</a></td>
+ <td>5M</td>
+ <td>28-Oct-2017 13:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171026221945-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171026221945-20171027100103.partial.mar</a></td>
+ <td>4M</td>
+ <td>27-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171026221945-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171026221945-20171027220059.partial.mar</a></td>
+ <td>4M</td>
+ <td>28-Oct-2017 00:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171026221945-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171026221945-20171028100423.partial.mar</a></td>
+ <td>4M</td>
+ <td>28-Oct-2017 13:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171026221945-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171026221945-20171028220326.partial.mar</a></td>
+ <td>4M</td>
+ <td>28-Oct-2017 23:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171027100103-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171027100103-20171027220059.partial.mar</a></td>
+ <td>4M</td>
+ <td>28-Oct-2017 00:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171027100103-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171027100103-20171028100423.partial.mar</a></td>
+ <td>4M</td>
+ <td>28-Oct-2017 13:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171027100103-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171027100103-20171028220326.partial.mar</a></td>
+ <td>4M</td>
+ <td>28-Oct-2017 23:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171027100103-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171027100103-20171029102300.partial.mar</a></td>
+ <td>4M</td>
+ <td>29-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171027220059-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171027220059-20171028100423.partial.mar</a></td>
+ <td>3M</td>
+ <td>28-Oct-2017 13:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171027220059-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171027220059-20171028220326.partial.mar</a></td>
+ <td>4M</td>
+ <td>28-Oct-2017 23:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171027220059-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171027220059-20171029102300.partial.mar</a></td>
+ <td>4M</td>
+ <td>29-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171027220059-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171027220059-20171029220112.partial.mar</a></td>
+ <td>4M</td>
+ <td>30-Oct-2017 02:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171028100423-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171028100423-20171028220326.partial.mar</a></td>
+ <td>3M</td>
+ <td>28-Oct-2017 23:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171028100423-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171028100423-20171029102300.partial.mar</a></td>
+ <td>3M</td>
+ <td>29-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171028100423-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171028100423-20171029220112.partial.mar</a></td>
+ <td>3M</td>
+ <td>30-Oct-2017 02:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171028100423-20171030100132.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171028100423-20171030100132.partial.mar</a></td>
+ <td>3M</td>
+ <td>30-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171028100423-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171028100423-20171030103605.partial.mar</a></td>
+ <td>4M</td>
+ <td>30-Oct-2017 18:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171028220326-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171028220326-20171029102300.partial.mar</a></td>
+ <td>1M</td>
+ <td>29-Oct-2017 12:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171028220326-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171028220326-20171029220112.partial.mar</a></td>
+ <td>2M</td>
+ <td>30-Oct-2017 02:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171028220326-20171030100132.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171028220326-20171030100132.partial.mar</a></td>
+ <td>2M</td>
+ <td>30-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171028220326-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171028220326-20171030103605.partial.mar</a></td>
+ <td>3M</td>
+ <td>30-Oct-2017 18:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171029102300-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171029102300-20171029220112.partial.mar</a></td>
+ <td>2M</td>
+ <td>30-Oct-2017 02:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171029102300-20171030100132.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171029102300-20171030100132.partial.mar</a></td>
+ <td>2M</td>
+ <td>30-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171029102300-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171029102300-20171030103605.partial.mar</a></td>
+ <td>3M</td>
+ <td>30-Oct-2017 18:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171029102300-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171029102300-20171031220132.partial.mar</a></td>
+ <td>4M</td>
+ <td>01-Nov-2017 01:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171029102300-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171029102300-20171031235118.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 10:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171029220112-20171030100132.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171029220112-20171030100132.partial.mar</a></td>
+ <td>100K</td>
+ <td>30-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171029220112-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171029220112-20171030103605.partial.mar</a></td>
+ <td>3M</td>
+ <td>30-Oct-2017 18:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171029220112-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171029220112-20171031220132.partial.mar</a></td>
+ <td>4M</td>
+ <td>01-Nov-2017 01:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171029220112-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171029220112-20171031235118.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 10:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171030100132-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171030100132-20171031220132.partial.mar</a></td>
+ <td>4M</td>
+ <td>01-Nov-2017 01:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171030100132-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171030100132-20171031235118.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 10:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171030100132-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171030100132-20171101104430.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 16:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171030103605-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171030103605-20171031220132.partial.mar</a></td>
+ <td>4M</td>
+ <td>01-Nov-2017 01:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171030103605-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171030103605-20171031235118.partial.mar</a></td>
+ <td>4M</td>
+ <td>01-Nov-2017 10:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171030103605-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171030103605-20171101104430.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 16:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171030103605-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171030103605-20171101220120.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Nov-2017 23:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171031220132-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171031220132-20171101104430.partial.mar</a></td>
+ <td>4M</td>
+ <td>01-Nov-2017 16:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171031220132-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171031220132-20171101220120.partial.mar</a></td>
+ <td>4M</td>
+ <td>01-Nov-2017 23:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171031220132-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171031220132-20171102100041.partial.mar</a></td>
+ <td>5M</td>
+ <td>02-Nov-2017 11:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171031235118-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171031235118-20171101104430.partial.mar</a></td>
+ <td>3M</td>
+ <td>01-Nov-2017 16:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171031235118-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171031235118-20171101220120.partial.mar</a></td>
+ <td>4M</td>
+ <td>01-Nov-2017 23:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171031235118-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171031235118-20171102100041.partial.mar</a></td>
+ <td>4M</td>
+ <td>02-Nov-2017 11:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171031235118-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171031235118-20171102222620.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Nov-2017 00:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171101104430-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171101104430-20171101220120.partial.mar</a></td>
+ <td>4M</td>
+ <td>01-Nov-2017 23:22</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171101104430-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171101104430-20171102100041.partial.mar</a></td>
+ <td>4M</td>
+ <td>02-Nov-2017 11:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171101104430-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171101104430-20171102222620.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Nov-2017 00:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171101104430-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171101104430-20171103100331.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Nov-2017 11:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171101220120-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171101220120-20171102100041.partial.mar</a></td>
+ <td>3M</td>
+ <td>02-Nov-2017 11:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171101220120-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171101220120-20171102222620.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Nov-2017 00:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171101220120-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171101220120-20171103100331.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Nov-2017 11:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171101220120-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171101220120-20171103220715.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Nov-2017 23:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171102100041-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171102100041-20171102222620.partial.mar</a></td>
+ <td>3M</td>
+ <td>03-Nov-2017 00:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171102100041-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171102100041-20171103100331.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Nov-2017 11:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171102100041-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171102100041-20171103220715.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Nov-2017 23:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171102100041-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171102100041-20171104100412.partial.mar</a></td>
+ <td>5M</td>
+ <td>04-Nov-2017 11:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171102222620-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171102222620-20171103100331.partial.mar</a></td>
+ <td>3M</td>
+ <td>03-Nov-2017 11:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171102222620-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171102222620-20171103220715.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Nov-2017 23:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171102222620-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171102222620-20171104100412.partial.mar</a></td>
+ <td>5M</td>
+ <td>04-Nov-2017 11:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171102222620-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171102222620-20171104220420.partial.mar</a></td>
+ <td>5M</td>
+ <td>04-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171103100331-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171103100331-20171103220715.partial.mar</a></td>
+ <td>3M</td>
+ <td>03-Nov-2017 23:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171103100331-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171103100331-20171104100412.partial.mar</a></td>
+ <td>4M</td>
+ <td>04-Nov-2017 11:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171103100331-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171103100331-20171104220420.partial.mar</a></td>
+ <td>5M</td>
+ <td>04-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171103100331-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171103100331-20171105100353.partial.mar</a></td>
+ <td>4M</td>
+ <td>05-Nov-2017 11:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171103220715-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171103220715-20171104100412.partial.mar</a></td>
+ <td>4M</td>
+ <td>04-Nov-2017 11:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171103220715-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171103220715-20171104220420.partial.mar</a></td>
+ <td>4M</td>
+ <td>04-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171103220715-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171103220715-20171105100353.partial.mar</a></td>
+ <td>4M</td>
+ <td>05-Nov-2017 11:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171103220715-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171103220715-20171105220721.partial.mar</a></td>
+ <td>4M</td>
+ <td>05-Nov-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171104100412-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171104100412-20171104220420.partial.mar</a></td>
+ <td>3M</td>
+ <td>04-Nov-2017 23:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171104100412-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171104100412-20171105100353.partial.mar</a></td>
+ <td>3M</td>
+ <td>05-Nov-2017 11:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171104100412-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171104100412-20171105220721.partial.mar</a></td>
+ <td>3M</td>
+ <td>05-Nov-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171104100412-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171104100412-20171106100122.partial.mar</a></td>
+ <td>3M</td>
+ <td>06-Nov-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171104220420-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171104220420-20171105100353.partial.mar</a></td>
+ <td>104K</td>
+ <td>05-Nov-2017 11:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171104220420-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171104220420-20171105220721.partial.mar</a></td>
+ <td>1M</td>
+ <td>05-Nov-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171104220420-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171104220420-20171106100122.partial.mar</a></td>
+ <td>3M</td>
+ <td>06-Nov-2017 11:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171105100353-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171105100353-20171105220721.partial.mar</a></td>
+ <td>1M</td>
+ <td>05-Nov-2017 23:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171105100353-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171105100353-20171106100122.partial.mar</a></td>
+ <td>3M</td>
+ <td>06-Nov-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-mac-en-US-20171105220721-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-mac-en-US-20171105220721-20171106100122.partial.mar</a></td>
+ <td>3M</td>
+ <td>06-Nov-2017 11:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170919220202-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170919220202-20170921220243.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Sep-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170920100426-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170920100426-20170921220243.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Sep-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170920100426-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170920100426-20170922100051.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Sep-2017 13:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170920220431-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170920220431-20170921220243.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Sep-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170920220431-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170920220431-20170922100051.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Sep-2017 13:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170920220431-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170920220431-20170922220129.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Sep-2017 01:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170921100141-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170921100141-20170921220243.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Sep-2017 01:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170921100141-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170921100141-20170922100051.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Sep-2017 13:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170921100141-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170921100141-20170922220129.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Sep-2017 01:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170921100141-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170921100141-20170923100045.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Sep-2017 13:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170921220243-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170921220243-20170922100051.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Sep-2017 13:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170921220243-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170921220243-20170922220129.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Sep-2017 01:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170921220243-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170921220243-20170923100045.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Sep-2017 13:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170921220243-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170921220243-20170923220337.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Sep-2017 01:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170922100051-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170922100051-20170922220129.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Sep-2017 01:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170922100051-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170922100051-20170923100045.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Sep-2017 13:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170922100051-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170922100051-20170923220337.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Sep-2017 01:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170922100051-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170922100051-20170924100550.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Sep-2017 14:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170922220129-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170922220129-20170923100045.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Sep-2017 13:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170922220129-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170922220129-20170923220337.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Sep-2017 01:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170922220129-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170922220129-20170924100550.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Sep-2017 14:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170922220129-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170922220129-20170924220116.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Sep-2017 02:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170923100045-20170923220337.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170923100045-20170923220337.partial.mar</a></td>
+ <td>4M</td>
+ <td>24-Sep-2017 01:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170923100045-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170923100045-20170924100550.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Sep-2017 14:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170923100045-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170923100045-20170924220116.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Sep-2017 02:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170923100045-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170923100045-20170925100307.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Sep-2017 12:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170923220337-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170923220337-20170924100550.partial.mar</a></td>
+ <td>5M</td>
+ <td>24-Sep-2017 14:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170923220337-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170923220337-20170924220116.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Sep-2017 02:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170923220337-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170923220337-20170925100307.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Sep-2017 12:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170923220337-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170923220337-20170925220207.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Sep-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170924100550-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170924100550-20170924220116.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Sep-2017 02:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170924100550-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170924100550-20170925100307.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Sep-2017 12:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170924100550-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170924100550-20170925220207.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Sep-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170924100550-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170924100550-20170926100259.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Sep-2017 13:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170924220116-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170924220116-20170925100307.partial.mar</a></td>
+ <td>5M</td>
+ <td>25-Sep-2017 12:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170924220116-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170924220116-20170925220207.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Sep-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170924220116-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170924220116-20170926100259.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Sep-2017 13:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170924220116-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170924220116-20170926220106.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Sep-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170925100307-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170925100307-20170925220207.partial.mar</a></td>
+ <td>4M</td>
+ <td>26-Sep-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170925100307-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170925100307-20170926100259.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Sep-2017 13:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170925100307-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170925100307-20170926220106.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Sep-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170925100307-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170925100307-20170927100120.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Sep-2017 13:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170925220207-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170925220207-20170926100259.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Sep-2017 13:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170925220207-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170925220207-20170926220106.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Sep-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170925220207-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170925220207-20170927100120.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Sep-2017 13:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170925220207-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170925220207-20170928100123.partial.mar</a></td>
+ <td>6M</td>
+ <td>28-Sep-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170926100259-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170926100259-20170926220106.partial.mar</a></td>
+ <td>5M</td>
+ <td>27-Sep-2017 01:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170926100259-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170926100259-20170927100120.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Sep-2017 13:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170926100259-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170926100259-20170928100123.partial.mar</a></td>
+ <td>6M</td>
+ <td>28-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170926100259-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170926100259-20170928220658.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170926220106-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170926220106-20170927100120.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Sep-2017 13:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170926220106-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170926220106-20170928100123.partial.mar</a></td>
+ <td>6M</td>
+ <td>28-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170926220106-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170926220106-20170928220658.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170926220106-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170926220106-20170929100122.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Sep-2017 13:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170927100120-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170927100120-20170928100123.partial.mar</a></td>
+ <td>6M</td>
+ <td>28-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170927100120-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170927100120-20170928220658.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170927100120-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170927100120-20170929100122.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Sep-2017 13:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170927100120-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170927100120-20170929220356.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170928100123-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170928100123-20170928220658.partial.mar</a></td>
+ <td>5M</td>
+ <td>29-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170928100123-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170928100123-20170929100122.partial.mar</a></td>
+ <td>5M</td>
+ <td>29-Sep-2017 13:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170928100123-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170928100123-20170929220356.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170928100123-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170928100123-20170930100302.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Sep-2017 13:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170928220658-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170928220658-20170929100122.partial.mar</a></td>
+ <td>5M</td>
+ <td>29-Sep-2017 13:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170928220658-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170928220658-20170929220356.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170928220658-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170928220658-20170930100302.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Sep-2017 13:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170928220658-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170928220658-20170930220116.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Oct-2017 01:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170929100122-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170929100122-20170929220356.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170929100122-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170929100122-20170930100302.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Sep-2017 13:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170929100122-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170929100122-20170930220116.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Oct-2017 01:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170929100122-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170929100122-20171001100335.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170929220356-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170929220356-20170930100302.partial.mar</a></td>
+ <td>5M</td>
+ <td>30-Sep-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170929220356-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170929220356-20170930220116.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Oct-2017 01:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170929220356-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170929220356-20171001100335.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Oct-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170929220356-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170929220356-20171001220301.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170930100302-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170930100302-20170930220116.partial.mar</a></td>
+ <td>4M</td>
+ <td>01-Oct-2017 01:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170930100302-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170930100302-20171001100335.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Oct-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170930100302-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170930100302-20171001220301.partial.mar</a></td>
+ <td>5M</td>
+ <td>02-Oct-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170930100302-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170930100302-20171002100134.partial.mar</a></td>
+ <td>5M</td>
+ <td>02-Oct-2017 13:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170930220116-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170930220116-20171001100335.partial.mar</a></td>
+ <td>5M</td>
+ <td>01-Oct-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170930220116-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170930220116-20171001220301.partial.mar</a></td>
+ <td>5M</td>
+ <td>02-Oct-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170930220116-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170930220116-20171002100134.partial.mar</a></td>
+ <td>5M</td>
+ <td>02-Oct-2017 13:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20170930220116-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20170930220116-20171002220204.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171001100335-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171001100335-20171001220301.partial.mar</a></td>
+ <td>4M</td>
+ <td>02-Oct-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171001100335-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171001100335-20171002100134.partial.mar</a></td>
+ <td>5M</td>
+ <td>02-Oct-2017 13:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171001100335-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171001100335-20171002220204.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171001100335-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171001100335-20171003100226.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Oct-2017 12:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171001220301-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171001220301-20171002100134.partial.mar</a></td>
+ <td>5M</td>
+ <td>02-Oct-2017 13:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171001220301-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171001220301-20171002220204.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171001220301-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171001220301-20171003100226.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Oct-2017 12:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171001220301-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171001220301-20171003220138.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Oct-2017 00:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171002100134-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171002100134-20171002220204.partial.mar</a></td>
+ <td>4M</td>
+ <td>03-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171002100134-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171002100134-20171003100226.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Oct-2017 12:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171002100134-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171002100134-20171003220138.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Oct-2017 00:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171002100134-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171002100134-20171004100049.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 12:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171002220204-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171002220204-20171003100226.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Oct-2017 12:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171002220204-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171002220204-20171003220138.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Oct-2017 00:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171002220204-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171002220204-20171004100049.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 12:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171002220204-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171002220204-20171004220309.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Oct-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171003100226-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171003100226-20171003220138.partial.mar</a></td>
+ <td>5M</td>
+ <td>04-Oct-2017 00:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171003100226-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171003100226-20171004100049.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 12:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171003100226-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171003100226-20171004220309.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Oct-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171003100226-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171003100226-20171005100211.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Oct-2017 12:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171003220138-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171003220138-20171004100049.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 12:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171003220138-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171003220138-20171004220309.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Oct-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171003220138-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171003220138-20171005100211.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Oct-2017 12:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171003220138-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171003220138-20171005220204.partial.mar</a></td>
+ <td>8M</td>
+ <td>06-Oct-2017 01:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171004100049-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171004100049-20171004220309.partial.mar</a></td>
+ <td>6M</td>
+ <td>05-Oct-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171004100049-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171004100049-20171005100211.partial.mar</a></td>
+ <td>6M</td>
+ <td>05-Oct-2017 12:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171004100049-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171004100049-20171005220204.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Oct-2017 01:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171004100049-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171004100049-20171006100327.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Oct-2017 13:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171004220309-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171004220309-20171005100211.partial.mar</a></td>
+ <td>5M</td>
+ <td>05-Oct-2017 12:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171004220309-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171004220309-20171005220204.partial.mar</a></td>
+ <td>5M</td>
+ <td>06-Oct-2017 01:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171004220309-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171004220309-20171006100327.partial.mar</a></td>
+ <td>5M</td>
+ <td>06-Oct-2017 13:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171004220309-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171004220309-20171006220306.partial.mar</a></td>
+ <td>6M</td>
+ <td>07-Oct-2017 00:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171005100211-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171005100211-20171005220204.partial.mar</a></td>
+ <td>4M</td>
+ <td>06-Oct-2017 01:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171005100211-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171005100211-20171006100327.partial.mar</a></td>
+ <td>4M</td>
+ <td>06-Oct-2017 13:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171005100211-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171005100211-20171006220306.partial.mar</a></td>
+ <td>5M</td>
+ <td>07-Oct-2017 00:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171005100211-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171005100211-20171007100142.partial.mar</a></td>
+ <td>6M</td>
+ <td>07-Oct-2017 12:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171005220204-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171005220204-20171006100327.partial.mar</a></td>
+ <td>4M</td>
+ <td>06-Oct-2017 13:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171005220204-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171005220204-20171006220306.partial.mar</a></td>
+ <td>6M</td>
+ <td>07-Oct-2017 00:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171005220204-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171005220204-20171007100142.partial.mar</a></td>
+ <td>6M</td>
+ <td>07-Oct-2017 12:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171005220204-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171005220204-20171007220156.partial.mar</a></td>
+ <td>6M</td>
+ <td>08-Oct-2017 00:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171006100327-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171006100327-20171006220306.partial.mar</a></td>
+ <td>6M</td>
+ <td>07-Oct-2017 00:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171006100327-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171006100327-20171007100142.partial.mar</a></td>
+ <td>6M</td>
+ <td>07-Oct-2017 12:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171006100327-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171006100327-20171007220156.partial.mar</a></td>
+ <td>6M</td>
+ <td>08-Oct-2017 00:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171006100327-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171006100327-20171008131700.partial.mar</a></td>
+ <td>6M</td>
+ <td>08-Oct-2017 16:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171006220306-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171006220306-20171007100142.partial.mar</a></td>
+ <td>6M</td>
+ <td>07-Oct-2017 12:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171006220306-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171006220306-20171007220156.partial.mar</a></td>
+ <td>6M</td>
+ <td>08-Oct-2017 00:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171006220306-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171006220306-20171008131700.partial.mar</a></td>
+ <td>6M</td>
+ <td>08-Oct-2017 16:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171006220306-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171006220306-20171008220130.partial.mar</a></td>
+ <td>6M</td>
+ <td>09-Oct-2017 00:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171007100142-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171007100142-20171007220156.partial.mar</a></td>
+ <td>4M</td>
+ <td>08-Oct-2017 00:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171007100142-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171007100142-20171008131700.partial.mar</a></td>
+ <td>5M</td>
+ <td>08-Oct-2017 16:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171007100142-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171007100142-20171008220130.partial.mar</a></td>
+ <td>5M</td>
+ <td>09-Oct-2017 00:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171007100142-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171007100142-20171009100134.partial.mar</a></td>
+ <td>5M</td>
+ <td>09-Oct-2017 12:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171007220156-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171007220156-20171008131700.partial.mar</a></td>
+ <td>5M</td>
+ <td>08-Oct-2017 16:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171007220156-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171007220156-20171008220130.partial.mar</a></td>
+ <td>5M</td>
+ <td>09-Oct-2017 00:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171007220156-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171007220156-20171009100134.partial.mar</a></td>
+ <td>5M</td>
+ <td>09-Oct-2017 12:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171007220156-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171007220156-20171009220104.partial.mar</a></td>
+ <td>6M</td>
+ <td>10-Oct-2017 01:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171008131700-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171008131700-20171008220130.partial.mar</a></td>
+ <td>4M</td>
+ <td>09-Oct-2017 00:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171008131700-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171008131700-20171009100134.partial.mar</a></td>
+ <td>5M</td>
+ <td>09-Oct-2017 12:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171008131700-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171008131700-20171009220104.partial.mar</a></td>
+ <td>6M</td>
+ <td>10-Oct-2017 01:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171008131700-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171008131700-20171010100200.partial.mar</a></td>
+ <td>6M</td>
+ <td>10-Oct-2017 13:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171008220130-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171008220130-20171009100134.partial.mar</a></td>
+ <td>5M</td>
+ <td>09-Oct-2017 12:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171008220130-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171008220130-20171009220104.partial.mar</a></td>
+ <td>6M</td>
+ <td>10-Oct-2017 01:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171008220130-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171008220130-20171010100200.partial.mar</a></td>
+ <td>6M</td>
+ <td>10-Oct-2017 13:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171008220130-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171008220130-20171010220102.partial.mar</a></td>
+ <td>7M</td>
+ <td>11-Oct-2017 01:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171009100134-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171009100134-20171009220104.partial.mar</a></td>
+ <td>6M</td>
+ <td>10-Oct-2017 01:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171009100134-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171009100134-20171010100200.partial.mar</a></td>
+ <td>6M</td>
+ <td>10-Oct-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171009100134-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171009100134-20171010220102.partial.mar</a></td>
+ <td>7M</td>
+ <td>11-Oct-2017 01:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171009100134-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171009100134-20171011100133.partial.mar</a></td>
+ <td>8M</td>
+ <td>11-Oct-2017 17:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171009220104-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171009220104-20171010100200.partial.mar</a></td>
+ <td>6M</td>
+ <td>10-Oct-2017 13:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171009220104-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171009220104-20171010220102.partial.mar</a></td>
+ <td>6M</td>
+ <td>11-Oct-2017 01:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171009220104-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171009220104-20171011100133.partial.mar</a></td>
+ <td>7M</td>
+ <td>11-Oct-2017 17:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171009220104-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171009220104-20171011220113.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 01:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171010100200-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171010100200-20171010220102.partial.mar</a></td>
+ <td>6M</td>
+ <td>11-Oct-2017 01:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171010100200-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171010100200-20171011100133.partial.mar</a></td>
+ <td>7M</td>
+ <td>11-Oct-2017 17:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171010100200-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171010100200-20171011220113.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 01:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171010100200-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171010100200-20171012100228.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 14:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171010100200-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171010100200-20171012105833.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 15:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171010220102-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171010220102-20171011100133.partial.mar</a></td>
+ <td>6M</td>
+ <td>11-Oct-2017 17:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171010220102-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171010220102-20171011220113.partial.mar</a></td>
+ <td>6M</td>
+ <td>12-Oct-2017 01:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171010220102-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171010220102-20171012100228.partial.mar</a></td>
+ <td>6M</td>
+ <td>12-Oct-2017 14:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171010220102-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171010220102-20171012105833.partial.mar</a></td>
+ <td>6M</td>
+ <td>12-Oct-2017 15:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171011100133-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171011100133-20171011220113.partial.mar</a></td>
+ <td>5M</td>
+ <td>12-Oct-2017 01:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171011100133-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171011100133-20171012100228.partial.mar</a></td>
+ <td>6M</td>
+ <td>12-Oct-2017 14:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171011100133-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171011100133-20171012105833.partial.mar</a></td>
+ <td>6M</td>
+ <td>12-Oct-2017 15:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171011100133-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171011100133-20171012220111.partial.mar</a></td>
+ <td>6M</td>
+ <td>13-Oct-2017 00:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171011220113-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171011220113-20171012100228.partial.mar</a></td>
+ <td>5M</td>
+ <td>12-Oct-2017 14:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171011220113-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171011220113-20171012105833.partial.mar</a></td>
+ <td>5M</td>
+ <td>12-Oct-2017 15:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171011220113-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171011220113-20171012220111.partial.mar</a></td>
+ <td>6M</td>
+ <td>13-Oct-2017 00:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171011220113-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171011220113-20171013100112.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 13:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171012100228-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171012100228-20171012220111.partial.mar</a></td>
+ <td>6M</td>
+ <td>13-Oct-2017 00:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171012100228-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171012100228-20171013100112.partial.mar</a></td>
+ <td>6M</td>
+ <td>13-Oct-2017 13:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171012100228-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171012100228-20171013220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>14-Oct-2017 01:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171012105833-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171012105833-20171012220111.partial.mar</a></td>
+ <td>6M</td>
+ <td>13-Oct-2017 00:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171012105833-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171012105833-20171013100112.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 13:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171012105833-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171012105833-20171013220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>14-Oct-2017 01:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171012105833-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171012105833-20171014100219.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 12:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171012220111-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171012220111-20171013100112.partial.mar</a></td>
+ <td>6M</td>
+ <td>13-Oct-2017 13:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171012220111-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171012220111-20171013220204.partial.mar</a></td>
+ <td>6M</td>
+ <td>14-Oct-2017 01:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171012220111-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171012220111-20171014100219.partial.mar</a></td>
+ <td>7M</td>
+ <td>14-Oct-2017 12:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171012220111-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171012220111-20171014220542.partial.mar</a></td>
+ <td>8M</td>
+ <td>15-Oct-2017 01:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171013100112-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171013100112-20171013220204.partial.mar</a></td>
+ <td>5M</td>
+ <td>14-Oct-2017 01:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171013100112-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171013100112-20171014100219.partial.mar</a></td>
+ <td>7M</td>
+ <td>14-Oct-2017 12:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171013100112-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171013100112-20171014220542.partial.mar</a></td>
+ <td>7M</td>
+ <td>15-Oct-2017 01:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171013100112-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171013100112-20171015100127.partial.mar</a></td>
+ <td>7M</td>
+ <td>15-Oct-2017 14:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171013220204-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171013220204-20171014100219.partial.mar</a></td>
+ <td>6M</td>
+ <td>14-Oct-2017 12:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171013220204-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171013220204-20171014220542.partial.mar</a></td>
+ <td>7M</td>
+ <td>15-Oct-2017 01:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171013220204-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171013220204-20171015100127.partial.mar</a></td>
+ <td>7M</td>
+ <td>15-Oct-2017 14:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171013220204-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171013220204-20171015220106.partial.mar</a></td>
+ <td>7M</td>
+ <td>16-Oct-2017 02:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171014100219-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171014100219-20171014220542.partial.mar</a></td>
+ <td>6M</td>
+ <td>15-Oct-2017 01:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171014100219-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171014100219-20171015100127.partial.mar</a></td>
+ <td>6M</td>
+ <td>15-Oct-2017 14:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171014100219-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171014100219-20171015220106.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 02:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171014100219-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171014100219-20171016100113.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 14:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171014220542-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171014220542-20171015100127.partial.mar</a></td>
+ <td>5M</td>
+ <td>15-Oct-2017 14:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171014220542-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171014220542-20171015220106.partial.mar</a></td>
+ <td>5M</td>
+ <td>16-Oct-2017 02:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171014220542-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171014220542-20171016100113.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 14:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171014220542-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171014220542-20171016220427.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 01:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171015100127-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171015100127-20171015220106.partial.mar</a></td>
+ <td>4M</td>
+ <td>16-Oct-2017 02:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171015100127-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171015100127-20171016100113.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 14:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171015100127-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171015100127-20171016220427.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 01:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171015100127-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171015100127-20171017100127.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 13:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171015220106-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171015220106-20171016100113.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 14:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171015220106-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171015220106-20171016220427.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 01:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171015220106-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171015220106-20171017100127.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 13:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171015220106-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171015220106-20171017141229.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 18:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171016100113-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171016100113-20171016220427.partial.mar</a></td>
+ <td>4M</td>
+ <td>17-Oct-2017 01:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171016100113-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171016100113-20171017100127.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 13:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171016100113-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171016100113-20171017141229.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 18:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171016100113-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171016100113-20171017220415.partial.mar</a></td>
+ <td>6M</td>
+ <td>18-Oct-2017 01:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171016220427-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171016220427-20171017100127.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 13:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171016220427-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171016220427-20171017141229.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 18:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171016220427-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171016220427-20171017220415.partial.mar</a></td>
+ <td>6M</td>
+ <td>18-Oct-2017 01:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171016220427-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171016220427-20171018100140.partial.mar</a></td>
+ <td>6M</td>
+ <td>18-Oct-2017 12:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017100127-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017100127-20171017141229.partial.mar</a></td>
+ <td>4M</td>
+ <td>17-Oct-2017 18:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017100127-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017100127-20171017220415.partial.mar</a></td>
+ <td>5M</td>
+ <td>18-Oct-2017 01:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017100127-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017100127-20171018100140.partial.mar</a></td>
+ <td>5M</td>
+ <td>18-Oct-2017 12:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017100127-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017100127-20171018220049.partial.mar</a></td>
+ <td>5M</td>
+ <td>19-Oct-2017 01:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017141229-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017141229-20171017220415.partial.mar</a></td>
+ <td>5M</td>
+ <td>18-Oct-2017 01:10</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017141229-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017141229-20171018100140.partial.mar</a></td>
+ <td>6M</td>
+ <td>18-Oct-2017 12:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017141229-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017141229-20171018220049.partial.mar</a></td>
+ <td>5M</td>
+ <td>19-Oct-2017 01:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017141229-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017141229-20171019100107.partial.mar</a></td>
+ <td>6M</td>
+ <td>19-Oct-2017 13:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017220415-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017220415-20171018100140.partial.mar</a></td>
+ <td>5M</td>
+ <td>18-Oct-2017 12:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017220415-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017220415-20171018220049.partial.mar</a></td>
+ <td>5M</td>
+ <td>19-Oct-2017 01:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017220415-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017220415-20171019100107.partial.mar</a></td>
+ <td>6M</td>
+ <td>19-Oct-2017 13:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171017220415-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171017220415-20171019222141.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Oct-2017 01:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171018100140-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171018100140-20171018220049.partial.mar</a></td>
+ <td>4M</td>
+ <td>19-Oct-2017 01:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171018100140-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171018100140-20171019100107.partial.mar</a></td>
+ <td>6M</td>
+ <td>19-Oct-2017 13:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171018100140-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171018100140-20171019222141.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Oct-2017 01:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171018100140-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171018100140-20171020100426.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Oct-2017 13:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171018220049-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171018220049-20171019100107.partial.mar</a></td>
+ <td>6M</td>
+ <td>19-Oct-2017 13:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171018220049-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171018220049-20171019222141.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Oct-2017 01:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171018220049-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171018220049-20171020100426.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Oct-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171018220049-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171018220049-20171020221129.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Oct-2017 01:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171019100107-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171019100107-20171019222141.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Oct-2017 01:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171019100107-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171019100107-20171020100426.partial.mar</a></td>
+ <td>5M</td>
+ <td>20-Oct-2017 13:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171019100107-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171019100107-20171020221129.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Oct-2017 01:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171019100107-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171019100107-20171021100029.partial.mar</a></td>
+ <td>5M</td>
+ <td>21-Oct-2017 13:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171019222141-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171019222141-20171020100426.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Oct-2017 13:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171019222141-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171019222141-20171020221129.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Oct-2017 01:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171019222141-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171019222141-20171021100029.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Oct-2017 13:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171019222141-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171019222141-20171021220121.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Oct-2017 02:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171020100426-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171020100426-20171020221129.partial.mar</a></td>
+ <td>5M</td>
+ <td>21-Oct-2017 01:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171020100426-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171020100426-20171021100029.partial.mar</a></td>
+ <td>5M</td>
+ <td>21-Oct-2017 13:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171020100426-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171020100426-20171021220121.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Oct-2017 02:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171020100426-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171020100426-20171022100058.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171020221129-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171020221129-20171021100029.partial.mar</a></td>
+ <td>5M</td>
+ <td>21-Oct-2017 13:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171020221129-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171020221129-20171021220121.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Oct-2017 02:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171020221129-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171020221129-20171022100058.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171020221129-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171020221129-20171022220103.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Oct-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171021100029-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171021100029-20171021220121.partial.mar</a></td>
+ <td>4M</td>
+ <td>22-Oct-2017 02:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171021100029-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171021100029-20171022100058.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171021100029-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171021100029-20171022220103.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Oct-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171021100029-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171021100029-20171023100252.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Oct-2017 13:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171021220121-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171021220121-20171022100058.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171021220121-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171021220121-20171022220103.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Oct-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171021220121-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171021220121-20171023100252.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Oct-2017 13:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171021220121-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171021220121-20171023220222.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Oct-2017 00:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171022100058-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171022100058-20171022220103.partial.mar</a></td>
+ <td>4M</td>
+ <td>23-Oct-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171022100058-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171022100058-20171023100252.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Oct-2017 13:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171022100058-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171022100058-20171023220222.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Oct-2017 00:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171022100058-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171022100058-20171024100135.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Oct-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171022220103-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171022220103-20171023100252.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Oct-2017 13:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171022220103-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171022220103-20171023220222.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Oct-2017 00:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171022220103-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171022220103-20171024100135.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Oct-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171022220103-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171022220103-20171024220325.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Oct-2017 00:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171023100252-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171023100252-20171023220222.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Oct-2017 00:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171023100252-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171023100252-20171024100135.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Oct-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171023100252-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171023100252-20171024220325.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Oct-2017 00:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171023100252-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171023100252-20171025100449.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Oct-2017 13:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171023220222-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171023220222-20171024100135.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Oct-2017 12:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171023220222-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171023220222-20171024220325.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Oct-2017 00:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171023220222-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171023220222-20171025100449.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Oct-2017 13:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171023220222-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171023220222-20171025230440.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Oct-2017 02:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171024100135-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171024100135-20171024220325.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Oct-2017 00:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171024100135-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171024100135-20171025100449.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Oct-2017 13:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171024100135-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171024100135-20171025230440.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Oct-2017 02:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171024100135-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171024100135-20171026100047.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 15:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171024220325-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171024220325-20171025100449.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Oct-2017 13:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171024220325-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171024220325-20171025230440.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Oct-2017 02:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171024220325-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171024220325-20171026100047.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 15:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171024220325-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171024220325-20171026221945.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 02:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171025100449-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171025100449-20171025230440.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Oct-2017 02:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171025100449-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171025100449-20171026100047.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Oct-2017 15:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171025100449-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171025100449-20171026221945.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 02:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171025100449-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171025100449-20171027100103.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 15:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171025230440-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171025230440-20171026100047.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Oct-2017 15:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171025230440-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171025230440-20171026221945.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 02:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171025230440-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171025230440-20171027100103.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 15:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171025230440-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171025230440-20171027220059.partial.mar</a></td>
+ <td>14M</td>
+ <td>28-Oct-2017 03:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171026100047-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171026100047-20171026221945.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Oct-2017 02:35</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171026100047-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171026100047-20171027100103.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Oct-2017 15:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171026100047-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171026100047-20171027220059.partial.mar</a></td>
+ <td>14M</td>
+ <td>28-Oct-2017 03:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171026100047-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171026100047-20171028100423.partial.mar</a></td>
+ <td>14M</td>
+ <td>28-Oct-2017 15:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171026221945-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171026221945-20171027100103.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Oct-2017 15:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171026221945-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171026221945-20171027220059.partial.mar</a></td>
+ <td>14M</td>
+ <td>28-Oct-2017 03:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171026221945-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171026221945-20171028100423.partial.mar</a></td>
+ <td>14M</td>
+ <td>28-Oct-2017 15:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171026221945-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171026221945-20171028220326.partial.mar</a></td>
+ <td>14M</td>
+ <td>29-Oct-2017 01:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171027100103-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171027100103-20171027220059.partial.mar</a></td>
+ <td>14M</td>
+ <td>28-Oct-2017 03:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171027100103-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171027100103-20171028100423.partial.mar</a></td>
+ <td>14M</td>
+ <td>28-Oct-2017 15:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171027100103-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171027100103-20171028220326.partial.mar</a></td>
+ <td>14M</td>
+ <td>29-Oct-2017 01:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171027100103-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171027100103-20171029102300.partial.mar</a></td>
+ <td>14M</td>
+ <td>29-Oct-2017 14:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171027220059-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171027220059-20171028100423.partial.mar</a></td>
+ <td>6M</td>
+ <td>28-Oct-2017 15:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171027220059-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171027220059-20171028220326.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Oct-2017 01:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171027220059-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171027220059-20171029102300.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Oct-2017 14:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171027220059-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171027220059-20171029220112.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Oct-2017 04:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171028100423-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171028100423-20171028220326.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Oct-2017 01:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171028100423-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171028100423-20171029102300.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Oct-2017 14:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171028100423-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171028100423-20171029220112.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Oct-2017 04:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171028100423-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171028100423-20171030103605.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Oct-2017 20:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171028220326-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171028220326-20171029102300.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Oct-2017 14:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171028220326-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171028220326-20171029220112.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Oct-2017 04:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171028220326-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171028220326-20171030103605.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Oct-2017 20:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171028220326-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171028220326-20171031220132.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Nov-2017 09:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171028220326-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171028220326-20171031235118.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Nov-2017 10:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171029102300-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171029102300-20171029220112.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Oct-2017 04:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171029102300-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171029102300-20171030103605.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Oct-2017 20:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171029102300-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171029102300-20171031220132.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Nov-2017 09:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171029102300-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171029102300-20171031235118.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Nov-2017 10:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171029220112-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171029220112-20171030103605.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Oct-2017 20:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171029220112-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171029220112-20171031220132.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Nov-2017 09:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171029220112-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171029220112-20171031235118.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Nov-2017 10:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171029220112-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171029220112-20171101104430.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 16:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171030103605-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171030103605-20171031220132.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Nov-2017 09:54</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171030103605-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171030103605-20171031235118.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Nov-2017 10:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171030103605-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171030103605-20171101104430.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Nov-2017 16:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171030103605-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171030103605-20171101220120.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171031220132-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171031220132-20171101104430.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Nov-2017 16:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171031220132-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171031220132-20171101220120.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Nov-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171031220132-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171031220132-20171102100041.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Nov-2017 12:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171031235118-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171031235118-20171101104430.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Nov-2017 16:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171031235118-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171031235118-20171101220120.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Nov-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171031235118-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171031235118-20171102100041.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Nov-2017 12:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171031235118-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171031235118-20171102222620.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 01:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171101104430-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171101104430-20171101220120.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Nov-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171101104430-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171101104430-20171102100041.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Nov-2017 12:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171101104430-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171101104430-20171102222620.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 01:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171101104430-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171101104430-20171103100331.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 12:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171101220120-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171101220120-20171102100041.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Nov-2017 12:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171101220120-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171101220120-20171102222620.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Nov-2017 01:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171101220120-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171101220120-20171103100331.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Nov-2017 12:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171101220120-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171101220120-20171103220715.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Nov-2017 00:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171102100041-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171102100041-20171102222620.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Nov-2017 01:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171102100041-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171102100041-20171103100331.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Nov-2017 12:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171102100041-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171102100041-20171103220715.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Nov-2017 00:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171102100041-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171102100041-20171104100412.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Nov-2017 12:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171102222620-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171102222620-20171103100331.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Nov-2017 12:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171102222620-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171102222620-20171103220715.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Nov-2017 00:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171102222620-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171102222620-20171104100412.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Nov-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171102222620-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171102222620-20171104220420.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Nov-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171103100331-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171103100331-20171103220715.partial.mar</a></td>
+ <td>6M</td>
+ <td>04-Nov-2017 00:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171103100331-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171103100331-20171104100412.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Nov-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171103100331-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171103100331-20171104220420.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Nov-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171103100331-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171103100331-20171105100353.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Nov-2017 12:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171103220715-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171103220715-20171104100412.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Nov-2017 12:45</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171103220715-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171103220715-20171104220420.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Nov-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171103220715-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171103220715-20171105100353.partial.mar</a></td>
+ <td>8M</td>
+ <td>05-Nov-2017 12:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171103220715-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171103220715-20171105220721.partial.mar</a></td>
+ <td>8M</td>
+ <td>06-Nov-2017 00:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171104100412-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171104100412-20171104220420.partial.mar</a></td>
+ <td>7M</td>
+ <td>05-Nov-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171104100412-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171104100412-20171105100353.partial.mar</a></td>
+ <td>7M</td>
+ <td>05-Nov-2017 12:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171104100412-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171104100412-20171105220721.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Nov-2017 00:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171104100412-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171104100412-20171106100122.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Nov-2017 12:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171104220420-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171104220420-20171105100353.partial.mar</a></td>
+ <td>4M</td>
+ <td>05-Nov-2017 12:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171104220420-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171104220420-20171105220721.partial.mar</a></td>
+ <td>4M</td>
+ <td>06-Nov-2017 00:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171104220420-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171104220420-20171106100122.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Nov-2017 12:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171105100353-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171105100353-20171105220721.partial.mar</a></td>
+ <td>4M</td>
+ <td>06-Nov-2017 00:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171105100353-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171105100353-20171106100122.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Nov-2017 12:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win32-en-US-20171105220721-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-win32-en-US-20171105220721-20171106100122.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Nov-2017 12:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170919220202-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170919220202-20170921220243.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 01:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170920100426-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170920100426-20170921220243.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 01:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170920100426-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170920100426-20170922100051.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170920220431-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170920220431-20170921220243.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Sep-2017 01:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170920220431-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170920220431-20170922100051.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170920220431-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170920220431-20170922220129.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Sep-2017 02:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170921100141-20170921220243.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170921100141-20170921220243.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Sep-2017 01:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170921100141-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170921100141-20170922100051.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170921100141-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170921100141-20170922220129.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Sep-2017 02:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170921100141-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170921100141-20170923100045.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Sep-2017 13:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170921220243-20170922100051.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170921220243-20170922100051.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Sep-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170921220243-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170921220243-20170922220129.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Sep-2017 02:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170921220243-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170921220243-20170923100045.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Sep-2017 13:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170921220243-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170921220243-20170924100550.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Sep-2017 14:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170922100051-20170922220129.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170922100051-20170922220129.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Sep-2017 02:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170922100051-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170922100051-20170923100045.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Sep-2017 13:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170922100051-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170922100051-20170924100550.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Sep-2017 14:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170922100051-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170922100051-20170924220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Sep-2017 02:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170922220129-20170923100045.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170922220129-20170923100045.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Sep-2017 13:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170922220129-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170922220129-20170924100550.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Sep-2017 14:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170922220129-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170922220129-20170924220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Sep-2017 02:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170922220129-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170922220129-20170925100307.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Sep-2017 13:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170923100045-20170924100550.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170923100045-20170924100550.partial.mar</a></td>
+ <td>6M</td>
+ <td>24-Sep-2017 14:30</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170923100045-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170923100045-20170924220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Sep-2017 02:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170923100045-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170923100045-20170925100307.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Sep-2017 13:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170923100045-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170923100045-20170925220207.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 01:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170924100550-20170924220116.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170924100550-20170924220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Sep-2017 02:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170924100550-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170924100550-20170925100307.partial.mar</a></td>
+ <td>6M</td>
+ <td>25-Sep-2017 13:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170924100550-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170924100550-20170925220207.partial.mar</a></td>
+ <td>6M</td>
+ <td>26-Sep-2017 01:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170924100550-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170924100550-20170926100259.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170924220116-20170925100307.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170924220116-20170925100307.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Sep-2017 13:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170924220116-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170924220116-20170925220207.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 01:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170924220116-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170924220116-20170926100259.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170924220116-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170924220116-20170926220106.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170925100307-20170925220207.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170925100307-20170925220207.partial.mar</a></td>
+ <td>5M</td>
+ <td>26-Sep-2017 01:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170925100307-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170925100307-20170926100259.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 13:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170925100307-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170925100307-20170926220106.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170925100307-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170925100307-20170927100120.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Sep-2017 13:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170925220207-20170926100259.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170925220207-20170926100259.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Sep-2017 13:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170925220207-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170925220207-20170926220106.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170925220207-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170925220207-20170927100120.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Sep-2017 13:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170925220207-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170925220207-20170928100123.partial.mar</a></td>
+ <td>8M</td>
+ <td>28-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170926100259-20170926220106.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170926100259-20170926220106.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Sep-2017 01:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170926100259-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170926100259-20170927100120.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Sep-2017 13:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170926100259-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170926100259-20170928100123.partial.mar</a></td>
+ <td>7M</td>
+ <td>28-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170926100259-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170926100259-20170928220658.partial.mar</a></td>
+ <td>8M</td>
+ <td>29-Sep-2017 01:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170926220106-20170927100120.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170926220106-20170927100120.partial.mar</a></td>
+ <td>6M</td>
+ <td>27-Sep-2017 13:29</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170926220106-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170926220106-20170928100123.partial.mar</a></td>
+ <td>7M</td>
+ <td>28-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170926220106-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170926220106-20170928220658.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 01:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170926220106-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170926220106-20170929100122.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 13:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170927100120-20170928100123.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170927100120-20170928100123.partial.mar</a></td>
+ <td>7M</td>
+ <td>28-Sep-2017 13:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170927100120-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170927100120-20170928220658.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 01:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170927100120-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170927100120-20170929100122.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 13:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170927100120-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170927100120-20170929220356.partial.mar</a></td>
+ <td>8M</td>
+ <td>30-Sep-2017 01:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170928100123-20170928220658.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170928100123-20170928220658.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Sep-2017 01:24</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170928100123-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170928100123-20170929100122.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Sep-2017 13:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170928100123-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170928100123-20170929220356.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Sep-2017 01:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170928100123-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170928100123-20170930100302.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Sep-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170928220658-20170929100122.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170928220658-20170929100122.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Sep-2017 13:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170928220658-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170928220658-20170929220356.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Sep-2017 01:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170928220658-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170928220658-20170930100302.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Sep-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170928220658-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170928220658-20170930220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 01:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170929100122-20170929220356.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170929100122-20170929220356.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Sep-2017 01:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170929100122-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170929100122-20170930100302.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Sep-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170929100122-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170929100122-20170930220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 01:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170929100122-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170929100122-20171001100335.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 13:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170929220356-20170930100302.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170929220356-20170930100302.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Sep-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170929220356-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170929220356-20170930220116.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 01:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170929220356-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170929220356-20171001100335.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 13:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170929220356-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170929220356-20171001220301.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 01:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170930100302-20170930220116.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170930100302-20170930220116.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Oct-2017 01:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170930100302-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170930100302-20171001100335.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Oct-2017 13:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170930100302-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170930100302-20171001220301.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 01:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170930100302-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170930100302-20171002100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Oct-2017 13:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170930220116-20171001100335.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170930220116-20171001100335.partial.mar</a></td>
+ <td>6M</td>
+ <td>01-Oct-2017 13:03</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170930220116-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170930220116-20171001220301.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Oct-2017 01:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170930220116-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170930220116-20171002100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Oct-2017 13:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20170930220116-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20170930220116-20171002220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171001100335-20171001220301.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171001100335-20171001220301.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 01:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171001100335-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171001100335-20171002100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Oct-2017 13:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171001100335-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171001100335-20171002220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171001100335-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171001100335-20171003100226.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 12:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171001220301-20171002100134.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171001220301-20171002100134.partial.mar</a></td>
+ <td>6M</td>
+ <td>02-Oct-2017 13:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171001220301-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171001220301-20171002220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171001220301-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171001220301-20171003100226.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 12:49</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171001220301-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171001220301-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171002100134-20171002220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171002100134-20171002220204.partial.mar</a></td>
+ <td>5M</td>
+ <td>03-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171002100134-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171002100134-20171003100226.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Oct-2017 12:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171002100134-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171002100134-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171002100134-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171002100134-20171004100049.partial.mar</a></td>
+ <td>9M</td>
+ <td>04-Oct-2017 12:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171002220204-20171003100226.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171002220204-20171003100226.partial.mar</a></td>
+ <td>6M</td>
+ <td>03-Oct-2017 12:48</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171002220204-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171002220204-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171002220204-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171002220204-20171004100049.partial.mar</a></td>
+ <td>9M</td>
+ <td>04-Oct-2017 12:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171002220204-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171002220204-20171004220309.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Oct-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171003100226-20171003220138.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171003100226-20171003220138.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171003100226-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171003100226-20171004100049.partial.mar</a></td>
+ <td>9M</td>
+ <td>04-Oct-2017 12:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171003100226-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171003100226-20171004220309.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Oct-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171003100226-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171003100226-20171005100211.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Oct-2017 12:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171003220138-20171004100049.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171003220138-20171004100049.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Oct-2017 12:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171003220138-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171003220138-20171004220309.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Oct-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171003220138-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171003220138-20171005100211.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Oct-2017 12:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171003220138-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171003220138-20171005220204.partial.mar</a></td>
+ <td>9M</td>
+ <td>06-Oct-2017 01:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171004100049-20171004220309.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171004100049-20171004220309.partial.mar</a></td>
+ <td>7M</td>
+ <td>05-Oct-2017 01:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171004100049-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171004100049-20171005100211.partial.mar</a></td>
+ <td>7M</td>
+ <td>05-Oct-2017 12:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171004100049-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171004100049-20171005220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Oct-2017 01:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171004100049-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171004100049-20171006100327.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Oct-2017 13:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171004220309-20171005100211.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171004220309-20171005100211.partial.mar</a></td>
+ <td>6M</td>
+ <td>05-Oct-2017 12:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171004220309-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171004220309-20171005220204.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Oct-2017 01:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171004220309-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171004220309-20171006100327.partial.mar</a></td>
+ <td>6M</td>
+ <td>06-Oct-2017 13:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171004220309-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171004220309-20171006220306.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171005100211-20171005220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171005100211-20171005220204.partial.mar</a></td>
+ <td>5M</td>
+ <td>06-Oct-2017 01:26</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171005100211-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171005100211-20171006100327.partial.mar</a></td>
+ <td>5M</td>
+ <td>06-Oct-2017 13:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171005100211-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171005100211-20171006220306.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171005100211-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171005100211-20171007100142.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 12:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171005220204-20171006100327.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171005220204-20171006100327.partial.mar</a></td>
+ <td>5M</td>
+ <td>06-Oct-2017 13:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171005220204-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171005220204-20171006220306.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171005220204-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171005220204-20171007100142.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 12:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171005220204-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171005220204-20171007220156.partial.mar</a></td>
+ <td>8M</td>
+ <td>08-Oct-2017 00:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171006100327-20171006220306.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171006100327-20171006220306.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171006100327-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171006100327-20171007100142.partial.mar</a></td>
+ <td>7M</td>
+ <td>07-Oct-2017 12:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171006100327-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171006100327-20171007220156.partial.mar</a></td>
+ <td>8M</td>
+ <td>08-Oct-2017 00:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171006100327-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171006100327-20171008131700.partial.mar</a></td>
+ <td>7M</td>
+ <td>08-Oct-2017 16:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171006220306-20171007100142.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171006220306-20171007100142.partial.mar</a></td>
+ <td>8M</td>
+ <td>07-Oct-2017 12:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171006220306-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171006220306-20171007220156.partial.mar</a></td>
+ <td>7M</td>
+ <td>08-Oct-2017 00:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171006220306-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171006220306-20171008131700.partial.mar</a></td>
+ <td>8M</td>
+ <td>08-Oct-2017 16:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171006220306-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171006220306-20171008220130.partial.mar</a></td>
+ <td>8M</td>
+ <td>09-Oct-2017 01:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171007100142-20171007220156.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171007100142-20171007220156.partial.mar</a></td>
+ <td>6M</td>
+ <td>08-Oct-2017 00:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171007100142-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171007100142-20171008131700.partial.mar</a></td>
+ <td>7M</td>
+ <td>08-Oct-2017 16:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171007100142-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171007100142-20171008220130.partial.mar</a></td>
+ <td>7M</td>
+ <td>09-Oct-2017 01:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171007100142-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171007100142-20171009100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>09-Oct-2017 13:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171007220156-20171008131700.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171007220156-20171008131700.partial.mar</a></td>
+ <td>7M</td>
+ <td>08-Oct-2017 16:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171007220156-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171007220156-20171008220130.partial.mar</a></td>
+ <td>7M</td>
+ <td>09-Oct-2017 01:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171007220156-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171007220156-20171009100134.partial.mar</a></td>
+ <td>6M</td>
+ <td>09-Oct-2017 13:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171007220156-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171007220156-20171009220104.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 01:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171008131700-20171008220130.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171008131700-20171008220130.partial.mar</a></td>
+ <td>5M</td>
+ <td>09-Oct-2017 01:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171008131700-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171008131700-20171009100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>09-Oct-2017 13:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171008131700-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171008131700-20171009220104.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 01:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171008131700-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171008131700-20171010100200.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 13:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171008220130-20171009100134.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171008220130-20171009100134.partial.mar</a></td>
+ <td>7M</td>
+ <td>09-Oct-2017 13:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171008220130-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171008220130-20171009220104.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 01:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171008220130-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171008220130-20171010100200.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 13:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171008220130-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171008220130-20171010220102.partial.mar</a></td>
+ <td>9M</td>
+ <td>11-Oct-2017 01:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171009100134-20171009220104.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171009100134-20171009220104.partial.mar</a></td>
+ <td>7M</td>
+ <td>10-Oct-2017 01:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171009100134-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171009100134-20171010100200.partial.mar</a></td>
+ <td>8M</td>
+ <td>10-Oct-2017 13:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171009100134-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171009100134-20171010220102.partial.mar</a></td>
+ <td>8M</td>
+ <td>11-Oct-2017 01:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171009100134-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171009100134-20171011100133.partial.mar</a></td>
+ <td>9M</td>
+ <td>11-Oct-2017 17:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171009220104-20171010100200.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171009220104-20171010100200.partial.mar</a></td>
+ <td>7M</td>
+ <td>10-Oct-2017 13:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171009220104-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171009220104-20171010220102.partial.mar</a></td>
+ <td>7M</td>
+ <td>11-Oct-2017 01:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171009220104-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171009220104-20171011100133.partial.mar</a></td>
+ <td>8M</td>
+ <td>11-Oct-2017 17:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171009220104-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171009220104-20171011220113.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 01:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171010100200-20171010220102.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171010100200-20171010220102.partial.mar</a></td>
+ <td>7M</td>
+ <td>11-Oct-2017 01:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171010100200-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171010100200-20171011100133.partial.mar</a></td>
+ <td>8M</td>
+ <td>11-Oct-2017 17:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171010100200-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171010100200-20171011220113.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 01:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171010100200-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171010100200-20171012100228.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 14:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171010100200-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171010100200-20171012105833.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 16:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171010220102-20171011100133.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171010220102-20171011100133.partial.mar</a></td>
+ <td>7M</td>
+ <td>11-Oct-2017 17:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171010220102-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171010220102-20171011220113.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 01:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171010220102-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171010220102-20171012100228.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 14:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171010220102-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171010220102-20171012105833.partial.mar</a></td>
+ <td>8M</td>
+ <td>12-Oct-2017 16:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171011100133-20171011220113.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171011100133-20171011220113.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 01:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171011100133-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171011100133-20171012100228.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 14:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171011100133-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171011100133-20171012105833.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 16:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171011100133-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171011100133-20171012220111.partial.mar</a></td>
+ <td>8M</td>
+ <td>13-Oct-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171011220113-20171012100228.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171011220113-20171012100228.partial.mar</a></td>
+ <td>6M</td>
+ <td>12-Oct-2017 14:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171011220113-20171012105833.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171011220113-20171012105833.partial.mar</a></td>
+ <td>7M</td>
+ <td>12-Oct-2017 16:06</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171011220113-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171011220113-20171012220111.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171011220113-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171011220113-20171013100112.partial.mar</a></td>
+ <td>8M</td>
+ <td>13-Oct-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171012100228-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171012100228-20171012220111.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171012100228-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171012100228-20171013100112.partial.mar</a></td>
+ <td>8M</td>
+ <td>13-Oct-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171012100228-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171012100228-20171013220204.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 02:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171012105833-20171012220111.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171012105833-20171012220111.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 00:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171012105833-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171012105833-20171013100112.partial.mar</a></td>
+ <td>8M</td>
+ <td>13-Oct-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171012105833-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171012105833-20171013220204.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 02:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171012105833-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171012105833-20171014100219.partial.mar</a></td>
+ <td>9M</td>
+ <td>14-Oct-2017 13:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171012220111-20171013100112.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171012220111-20171013100112.partial.mar</a></td>
+ <td>7M</td>
+ <td>13-Oct-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171012220111-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171012220111-20171013220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>14-Oct-2017 02:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171012220111-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171012220111-20171014100219.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 13:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171012220111-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171012220111-20171014220542.partial.mar</a></td>
+ <td>8M</td>
+ <td>15-Oct-2017 02:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171013100112-20171013220204.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171013100112-20171013220204.partial.mar</a></td>
+ <td>7M</td>
+ <td>14-Oct-2017 02:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171013100112-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171013100112-20171014100219.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 13:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171013100112-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171013100112-20171014220542.partial.mar</a></td>
+ <td>8M</td>
+ <td>15-Oct-2017 02:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171013100112-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171013100112-20171015100127.partial.mar</a></td>
+ <td>8M</td>
+ <td>15-Oct-2017 15:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171013220204-20171014100219.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171013220204-20171014100219.partial.mar</a></td>
+ <td>8M</td>
+ <td>14-Oct-2017 13:11</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171013220204-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171013220204-20171014220542.partial.mar</a></td>
+ <td>8M</td>
+ <td>15-Oct-2017 02:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171013220204-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171013220204-20171015100127.partial.mar</a></td>
+ <td>8M</td>
+ <td>15-Oct-2017 15:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171013220204-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171013220204-20171015220106.partial.mar</a></td>
+ <td>8M</td>
+ <td>16-Oct-2017 02:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171014100219-20171014220542.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171014100219-20171014220542.partial.mar</a></td>
+ <td>6M</td>
+ <td>15-Oct-2017 02:15</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171014100219-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171014100219-20171015100127.partial.mar</a></td>
+ <td>7M</td>
+ <td>15-Oct-2017 15:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171014100219-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171014100219-20171015220106.partial.mar</a></td>
+ <td>7M</td>
+ <td>16-Oct-2017 02:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171014100219-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171014100219-20171016100113.partial.mar</a></td>
+ <td>7M</td>
+ <td>16-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171014220542-20171015100127.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171014220542-20171015100127.partial.mar</a></td>
+ <td>6M</td>
+ <td>15-Oct-2017 15:16</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171014220542-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171014220542-20171015220106.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 02:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171014220542-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171014220542-20171016100113.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171014220542-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171014220542-20171016220427.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 02:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171015100127-20171015220106.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171015100127-20171015220106.partial.mar</a></td>
+ <td>5M</td>
+ <td>16-Oct-2017 02:08</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171015100127-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171015100127-20171016100113.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171015100127-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171015100127-20171016220427.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 02:04</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171015100127-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171015100127-20171017100127.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171015220106-20171016100113.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171015220106-20171016100113.partial.mar</a></td>
+ <td>6M</td>
+ <td>16-Oct-2017 14:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171015220106-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171015220106-20171016220427.partial.mar</a></td>
+ <td>6M</td>
+ <td>17-Oct-2017 02:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171015220106-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171015220106-20171017100127.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171015220106-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171015220106-20171017141229.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 18:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171016100113-20171016220427.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171016100113-20171016220427.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 02:05</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171016100113-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171016100113-20171017100127.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171016100113-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171016100113-20171017141229.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 18:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171016100113-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171016100113-20171017220415.partial.mar</a></td>
+ <td>7M</td>
+ <td>18-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171016220427-20171017100127.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171016220427-20171017100127.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 13:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171016220427-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171016220427-20171017141229.partial.mar</a></td>
+ <td>7M</td>
+ <td>17-Oct-2017 18:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171016220427-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171016220427-20171017220415.partial.mar</a></td>
+ <td>7M</td>
+ <td>18-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171016220427-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171016220427-20171018100140.partial.mar</a></td>
+ <td>7M</td>
+ <td>18-Oct-2017 13:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017100127-20171017141229.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017100127-20171017141229.partial.mar</a></td>
+ <td>5M</td>
+ <td>17-Oct-2017 18:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017100127-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017100127-20171017220415.partial.mar</a></td>
+ <td>6M</td>
+ <td>18-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017100127-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017100127-20171018100140.partial.mar</a></td>
+ <td>6M</td>
+ <td>18-Oct-2017 13:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017100127-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017100127-20171018220049.partial.mar</a></td>
+ <td>6M</td>
+ <td>19-Oct-2017 01:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017141229-20171017220415.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017141229-20171017220415.partial.mar</a></td>
+ <td>6M</td>
+ <td>18-Oct-2017 01:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017141229-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017141229-20171018100140.partial.mar</a></td>
+ <td>6M</td>
+ <td>18-Oct-2017 13:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017141229-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017141229-20171018220049.partial.mar</a></td>
+ <td>6M</td>
+ <td>19-Oct-2017 01:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017141229-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017141229-20171019100107.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 13:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017220415-20171018100140.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017220415-20171018100140.partial.mar</a></td>
+ <td>6M</td>
+ <td>18-Oct-2017 13:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017220415-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017220415-20171018220049.partial.mar</a></td>
+ <td>6M</td>
+ <td>19-Oct-2017 01:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017220415-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017220415-20171019100107.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 13:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171017220415-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171017220415-20171019222141.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Oct-2017 02:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171018100140-20171018220049.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171018100140-20171018220049.partial.mar</a></td>
+ <td>5M</td>
+ <td>19-Oct-2017 01:25</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171018100140-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171018100140-20171019100107.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 13:52</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171018100140-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171018100140-20171019222141.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Oct-2017 02:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171018100140-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171018100140-20171020100426.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Oct-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171018220049-20171019100107.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171018220049-20171019100107.partial.mar</a></td>
+ <td>7M</td>
+ <td>19-Oct-2017 13:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171018220049-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171018220049-20171019222141.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Oct-2017 02:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171018220049-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171018220049-20171020100426.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Oct-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171018220049-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171018220049-20171020221129.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 01:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171019100107-20171019222141.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171019100107-20171019222141.partial.mar</a></td>
+ <td>6M</td>
+ <td>20-Oct-2017 02:33</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171019100107-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171019100107-20171020100426.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Oct-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171019100107-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171019100107-20171020221129.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 01:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171019100107-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171019100107-20171021100029.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171019222141-20171020100426.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171019222141-20171020100426.partial.mar</a></td>
+ <td>7M</td>
+ <td>20-Oct-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171019222141-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171019222141-20171020221129.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 01:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171019222141-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171019222141-20171021100029.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171019222141-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171019222141-20171021220121.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 02:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171020100426-20171020221129.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171020100426-20171020221129.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 01:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171020100426-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171020100426-20171021100029.partial.mar</a></td>
+ <td>6M</td>
+ <td>21-Oct-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171020100426-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171020100426-20171021220121.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Oct-2017 02:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171020100426-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171020100426-20171022100058.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 13:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171020221129-20171021100029.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171020221129-20171021100029.partial.mar</a></td>
+ <td>7M</td>
+ <td>21-Oct-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171020221129-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171020221129-20171021220121.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 02:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171020221129-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171020221129-20171022100058.partial.mar</a></td>
+ <td>7M</td>
+ <td>22-Oct-2017 13:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171020221129-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171020221129-20171022220103.partial.mar</a></td>
+ <td>7M</td>
+ <td>23-Oct-2017 01:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171021100029-20171021220121.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171021100029-20171021220121.partial.mar</a></td>
+ <td>5M</td>
+ <td>22-Oct-2017 02:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171021100029-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171021100029-20171022100058.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Oct-2017 13:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171021100029-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171021100029-20171022220103.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Oct-2017 01:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171021100029-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171021100029-20171023100252.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Oct-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171021220121-20171022100058.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171021220121-20171022100058.partial.mar</a></td>
+ <td>6M</td>
+ <td>22-Oct-2017 13:31</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171021220121-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171021220121-20171022220103.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Oct-2017 01:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171021220121-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171021220121-20171023100252.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Oct-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171021220121-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171021220121-20171023220222.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Oct-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171022100058-20171022220103.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171022100058-20171022220103.partial.mar</a></td>
+ <td>5M</td>
+ <td>23-Oct-2017 01:14</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171022100058-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171022100058-20171023100252.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Oct-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171022100058-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171022100058-20171023220222.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Oct-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171022100058-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171022100058-20171024100135.partial.mar</a></td>
+ <td>8M</td>
+ <td>24-Oct-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171022220103-20171023100252.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171022220103-20171023100252.partial.mar</a></td>
+ <td>6M</td>
+ <td>23-Oct-2017 13:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171022220103-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171022220103-20171023220222.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Oct-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171022220103-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171022220103-20171024100135.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Oct-2017 13:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171022220103-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171022220103-20171024220325.partial.mar</a></td>
+ <td>8M</td>
+ <td>25-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171023100252-20171023220222.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171023100252-20171023220222.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Oct-2017 00:38</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171023100252-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171023100252-20171024100135.partial.mar</a></td>
+ <td>8M</td>
+ <td>24-Oct-2017 13:18</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171023100252-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171023100252-20171024220325.partial.mar</a></td>
+ <td>8M</td>
+ <td>25-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171023100252-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171023100252-20171025100449.partial.mar</a></td>
+ <td>8M</td>
+ <td>25-Oct-2017 13:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171023220222-20171024100135.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171023220222-20171024100135.partial.mar</a></td>
+ <td>7M</td>
+ <td>24-Oct-2017 13:19</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171023220222-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171023220222-20171024220325.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171023220222-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171023220222-20171025100449.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Oct-2017 13:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171023220222-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171023220222-20171025230440.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 02:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171024100135-20171024220325.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171024100135-20171024220325.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Oct-2017 01:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171024100135-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171024100135-20171025100449.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Oct-2017 13:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171024100135-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171024100135-20171025230440.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 02:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171024100135-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171024100135-20171026100047.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 15:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171024220325-20171025100449.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171024220325-20171025100449.partial.mar</a></td>
+ <td>7M</td>
+ <td>25-Oct-2017 13:32</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171024220325-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171024220325-20171025230440.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 02:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171024220325-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171024220325-20171026100047.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 15:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171024220325-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171024220325-20171026221945.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 02:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171025100449-20171025230440.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171025100449-20171025230440.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 02:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171025100449-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171025100449-20171026100047.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 15:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171025100449-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171025100449-20171026221945.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 02:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171025100449-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171025100449-20171027100103.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Oct-2017 15:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171025230440-20171026100047.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171025230440-20171026100047.partial.mar</a></td>
+ <td>7M</td>
+ <td>26-Oct-2017 15:23</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171025230440-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171025230440-20171026221945.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 02:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171025230440-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171025230440-20171027100103.partial.mar</a></td>
+ <td>8M</td>
+ <td>27-Oct-2017 15:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171025230440-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171025230440-20171027220059.partial.mar</a></td>
+ <td>17M</td>
+ <td>28-Oct-2017 03:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171026100047-20171026221945.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171026100047-20171026221945.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 02:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171026100047-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171026100047-20171027100103.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 15:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171026100047-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171026100047-20171027220059.partial.mar</a></td>
+ <td>17M</td>
+ <td>28-Oct-2017 03:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171026100047-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171026100047-20171028100423.partial.mar</a></td>
+ <td>17M</td>
+ <td>28-Oct-2017 17:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171026221945-20171027100103.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171026221945-20171027100103.partial.mar</a></td>
+ <td>7M</td>
+ <td>27-Oct-2017 15:28</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171026221945-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171026221945-20171027220059.partial.mar</a></td>
+ <td>17M</td>
+ <td>28-Oct-2017 03:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171026221945-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171026221945-20171028100423.partial.mar</a></td>
+ <td>17M</td>
+ <td>28-Oct-2017 17:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171026221945-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171026221945-20171028220326.partial.mar</a></td>
+ <td>17M</td>
+ <td>29-Oct-2017 01:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171027100103-20171027220059.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171027100103-20171027220059.partial.mar</a></td>
+ <td>16M</td>
+ <td>28-Oct-2017 03:17</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171027100103-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171027100103-20171028100423.partial.mar</a></td>
+ <td>16M</td>
+ <td>28-Oct-2017 17:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171027100103-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171027100103-20171028220326.partial.mar</a></td>
+ <td>16M</td>
+ <td>29-Oct-2017 01:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171027100103-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171027100103-20171029102300.partial.mar</a></td>
+ <td>16M</td>
+ <td>29-Oct-2017 14:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171027220059-20171028100423.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171027220059-20171028100423.partial.mar</a></td>
+ <td>7M</td>
+ <td>28-Oct-2017 17:01</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171027220059-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171027220059-20171028220326.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Oct-2017 01:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171027220059-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171027220059-20171029102300.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Oct-2017 14:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171027220059-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171027220059-20171029220112.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 05:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171028100423-20171028220326.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171028100423-20171028220326.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Oct-2017 01:36</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171028100423-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171028100423-20171029102300.partial.mar</a></td>
+ <td>7M</td>
+ <td>29-Oct-2017 14:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171028100423-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171028100423-20171029220112.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 05:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171028100423-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171028100423-20171030103605.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 20:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171028220326-20171029102300.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171028220326-20171029102300.partial.mar</a></td>
+ <td>6M</td>
+ <td>29-Oct-2017 14:57</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171028220326-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171028220326-20171029220112.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Oct-2017 05:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171028220326-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171028220326-20171030103605.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 20:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171028220326-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171028220326-20171031220132.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 08:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171028220326-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171028220326-20171031235118.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 10:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171029102300-20171029220112.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171029102300-20171029220112.partial.mar</a></td>
+ <td>6M</td>
+ <td>30-Oct-2017 05:07</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171029102300-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171029102300-20171030103605.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 20:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171029102300-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171029102300-20171031220132.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 08:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171029102300-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171029102300-20171031235118.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 10:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171029220112-20171030103605.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171029220112-20171030103605.partial.mar</a></td>
+ <td>7M</td>
+ <td>30-Oct-2017 20:42</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171029220112-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171029220112-20171031220132.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 08:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171029220112-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171029220112-20171031235118.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 10:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171029220112-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171029220112-20171101104430.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 16:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171030103605-20171031220132.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171030103605-20171031220132.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 08:12</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171030103605-20171031235118.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171030103605-20171031235118.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 10:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171030103605-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171030103605-20171101104430.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 16:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171030103605-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171030103605-20171101220120.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 00:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171031220132-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171031220132-20171101104430.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 16:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171031220132-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171031220132-20171101220120.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 00:41</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171031220132-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171031220132-20171102100041.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 12:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171031235118-20171101104430.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171031235118-20171101104430.partial.mar</a></td>
+ <td>7M</td>
+ <td>01-Nov-2017 16:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171031235118-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171031235118-20171101220120.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 00:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171031235118-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171031235118-20171102100041.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 12:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171031235118-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171031235118-20171102222620.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 01:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171101104430-20171101220120.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171101104430-20171101220120.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 00:40</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171101104430-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171101104430-20171102100041.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 12:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171101104430-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171101104430-20171102222620.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 01:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171101104430-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171101104430-20171103100331.partial.mar</a></td>
+ <td>8M</td>
+ <td>03-Nov-2017 12:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171101220120-20171102100041.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171101220120-20171102100041.partial.mar</a></td>
+ <td>7M</td>
+ <td>02-Nov-2017 12:53</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171101220120-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171101220120-20171102222620.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 01:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171101220120-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171101220120-20171103100331.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 13:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171101220120-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171101220120-20171103220715.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Nov-2017 00:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171102100041-20171102222620.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171102100041-20171102222620.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 01:27</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171102100041-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171102100041-20171103100331.partial.mar</a></td>
+ <td>8M</td>
+ <td>03-Nov-2017 13:00</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171102100041-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171102100041-20171103220715.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Nov-2017 00:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171102100041-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171102100041-20171104100412.partial.mar</a></td>
+ <td>9M</td>
+ <td>04-Nov-2017 12:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171102222620-20171103100331.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171102222620-20171103100331.partial.mar</a></td>
+ <td>7M</td>
+ <td>03-Nov-2017 12:59</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171102222620-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171102222620-20171103220715.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Nov-2017 00:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171102222620-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171102222620-20171104100412.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Nov-2017 12:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171102222620-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171102222620-20171104220420.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Nov-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171103100331-20171103220715.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171103100331-20171103220715.partial.mar</a></td>
+ <td>7M</td>
+ <td>04-Nov-2017 00:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171103100331-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171103100331-20171104100412.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Nov-2017 12:46</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171103100331-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171103100331-20171104220420.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Nov-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171103100331-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171103100331-20171105100353.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Nov-2017 12:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171103220715-20171104100412.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171103220715-20171104100412.partial.mar</a></td>
+ <td>8M</td>
+ <td>04-Nov-2017 12:47</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171103220715-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171103220715-20171104220420.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Nov-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171103220715-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171103220715-20171105100353.partial.mar</a></td>
+ <td>9M</td>
+ <td>05-Nov-2017 12:43</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171103220715-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171103220715-20171105220721.partial.mar</a></td>
+ <td>9M</td>
+ <td>06-Nov-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171104100412-20171104220420.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171104100412-20171104220420.partial.mar</a></td>
+ <td>7M</td>
+ <td>05-Nov-2017 00:58</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171104100412-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171104100412-20171105100353.partial.mar</a></td>
+ <td>7M</td>
+ <td>05-Nov-2017 12:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171104100412-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171104100412-20171105220721.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Nov-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171104100412-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171104100412-20171106100122.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Nov-2017 12:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171104220420-20171105100353.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171104220420-20171105100353.partial.mar</a></td>
+ <td>5M</td>
+ <td>05-Nov-2017 12:44</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171104220420-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171104220420-20171105220721.partial.mar</a></td>
+ <td>5M</td>
+ <td>06-Nov-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171104220420-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171104220420-20171106100122.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Nov-2017 12:51</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171105100353-20171105220721.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171105100353-20171105220721.partial.mar</a></td>
+ <td>5M</td>
+ <td>06-Nov-2017 00:55</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171105100353-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171105100353-20171106100122.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Nov-2017 12:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/firefox-mozilla-central-58.0a1-win64-en-US-20171105220721-20171106100122.partial.mar">firefox-mozilla-central-58.0a1-win64-en-US-20171105220721-20171106100122.partial.mar</a></td>
+ <td>7M</td>
+ <td>06-Nov-2017 12:50</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/jsshell-linux-i686.zip">jsshell-linux-i686.zip</a></td>
+ <td>8M</td>
+ <td>15-Feb-2018 12:39</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/jsshell-linux-x86_64.zip">jsshell-linux-x86_64.zip</a></td>
+ <td>10M</td>
+ <td>15-Feb-2018 12:13</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/jsshell-mac.zip">jsshell-mac.zip</a></td>
+ <td>10M</td>
+ <td>15-Feb-2018 11:37</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/jsshell-mac64.zip">jsshell-mac64.zip</a></td>
+ <td>10M</td>
+ <td>13-Dec-2016 12:02</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/jsshell-win32.zip">jsshell-win32.zip</a></td>
+ <td>8M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/jsshell-win64.zip">jsshell-win64.zip</a></td>
+ <td>9M</td>
+ <td>15-Feb-2018 13:20</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/mozharness.zip">mozharness.zip</a></td>
+ <td>2M</td>
+ <td>15-Feb-2018 13:21</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/setup-stub.exe">setup-stub.exe</a></td>
+ <td>1M</td>
+ <td>26-Jul-2017 11:56</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/setup.exe">setup.exe</a></td>
+ <td>643K</td>
+ <td>26-Jul-2017 12:09</td>
+ </tr>
+
+
+
+ <tr>
+ <td>File</td>
+ <td><a href="/pub/firefox/nightly/latest-mozilla-central/toolchains.json">toolchains.json</a></td>
+ <td>1K</td>
+ <td>26-Jul-2017 12:09</td>
+ </tr>
+
+
+ </table>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2018-05-17.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2018-05-17.html
new file mode 100644
index 0000000000..950b539de5
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2018-05-17.html
@@ -0,0 +1,30 @@
+<div class="column large-7 small-12 gutter padding-bottom-small">
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS and
+ experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/091-82859-20180516-222E2B66-E7F2-410F-AA71-A27B03AB84F6/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS High Sierra</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 10.13.</span
+ >
+ </li>
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/091-84914-20180516-222E2B66-E7F2-410F-AA71-A27B03AB84F6/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Sierra</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 10.12.6 or later.</span
+ >
+ </li>
+ <li class="document">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2018-09-19.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2018-09-19.html
new file mode 100644
index 0000000000..5c00c72219
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2018-09-19.html
@@ -0,0 +1,33 @@
+<div class="column large-7 small-12 gutter padding-bottom-small">
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS with
+ <a href="/safari/technology-preview/" class="nowrap"
+ >Safari Technology Preview</a
+ >
+ and experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/041-04649-20180910-76E7269A-B217-11E8-B40C-C08B7A641E38/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Mojave</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 10.14 beta.</span
+ >
+ </li>
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/041-04652-20180910-76E7269A-B217-11E8-B40C-C08B7A641E38/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS High Sierra</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 10.13.</span
+ >
+ </li>
+ <li class="document">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-06-04.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-06-04.html
new file mode 100644
index 0000000000..49ac12dad9
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-06-04.html
@@ -0,0 +1,33 @@
+<div class="column large-7 small-12 gutter padding-bottom-small">
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS with
+ <a href="/safari/technology-preview/" class="nowrap"
+ >Safari Technology Preview</a
+ >
+ and experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/001-09514-20200527-05f7a42c-d9a0-4a60-ba12-97f2145db993/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Catalina</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 10.15.</span
+ >
+ </li>
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/001-09573-20200527-5319cd41-1eb4-412a-817a-bf376957b539/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Mojave</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 10.14.</span
+ >
+ </li>
+ <li class="document">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-07-16.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-07-16.html
new file mode 100644
index 0000000000..2c376cc87a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-07-16.html
@@ -0,0 +1,36 @@
+<div class="column large-7 small-12 gutter padding-bottom-small">
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS with
+ <a href="/safari/technology-preview/" class="nowrap"
+ >Safari Technology Preview</a
+ >
+ and experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/001-22645-20200715-da14bc37-e5e6-4790-9e99-0b9d54293dd4/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Big Sur</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 11 beta.</span
+ ><span class="smaller lighter nowrap nowrap-small"
+ >Note: A known issue prevents this release of Safari Technology Preview
+ from working on DTK units.</span
+ >
+ </li>
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/001-26262-20200715-7968c880-7e9c-4ea5-9f90-f337c51066d8/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Catalina</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 10.15.</span
+ >
+ </li>
+ <li class="document">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-11-14.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-11-14.html
new file mode 100644
index 0000000000..a02d94d786
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2020-11-14.html
@@ -0,0 +1,33 @@
+<div class="column large-7 small-12 gutter padding-bottom-small">
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS with
+ <a href="/safari/technology-preview/" class="nowrap"
+ >Safari Technology Preview</a
+ >
+ and experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/001-62679-20201022-42e0d63a-527a-45af-beb1-02cd4095e341/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Big Sur</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 11 beta.</span
+ >
+ </li>
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/001-62651-20201022-6cd92e18-bfbe-48cc-9385-d84da8f3c24c/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Catalina</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 10.15.</span
+ >
+ </li>
+ <li class="document">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2021-06-08.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2021-06-08.html
new file mode 100644
index 0000000000..bfba2a7216
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2021-06-08.html
@@ -0,0 +1,33 @@
+<div class="column large-7 small-12 gutter padding-bottom-small">
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS with
+ <a href="/safari/technology-preview/" class="nowrap"
+ >Safari Technology Preview</a
+ >
+ and experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/071-45899-20210526-3fe7359c-0f20-4850-b6ec-da9b197119c2/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Big Sur</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 11.</span
+ >
+ </li>
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/071-44527-20210526-93430244-0334-4fae-878d-56502a656003/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Catalina</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 10.15.</span
+ >
+ </li>
+ <li class="document">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2021-10-28.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2021-10-28.html
new file mode 100644
index 0000000000..36f6b0e807
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2021-10-28.html
@@ -0,0 +1,31 @@
+<div class="column large-7 small-12 gutter padding-bottom-small">
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS with
+ <a href="/safari/technology-preview/" class="nowrap"
+ >Safari Technology Preview</a
+ >
+ and experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/002-26657-20211027-0354AC04-106E-4389-8084-861E45C1DC98/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Monterey</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS 12 beta.</span
+ >
+ </li>
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/002-26659-20211027-D948F693-7DCB-4C54-AA93-760F7DCB69D6/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Big Sur</a
+ ><br /><span class="smaller lighter">Requires macOS&nbsp;11.</span>
+ </li>
+ <li class="document">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-05-29.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-05-29.html
new file mode 100644
index 0000000000..b95b27b8ba
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-05-29.html
@@ -0,0 +1,44 @@
+<div class="callout">
+ <figure
+ class="app-icon large-icon safari-preview-icon"
+ aria-hidden="true"></figure>
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS with
+ <a href="/safari/technology-preview/" class="nowrap"
+ >Safari Technology Preview</a
+ >
+ and experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/012-08405-20220525-72BCCE23-C6E8-460A-851A-A29AC9C9BCF7/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Monterey</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS&nbsp;12.</span
+ >
+ </li>
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/012-08529-20220525-85875EC7-E4B8-4F5A-9571-85C51D6E381D/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS Big Sur</a
+ ><br /><span class="smaller lighter">Requires macOS&nbsp;11.</span>
+ </li>
+ <li class="document">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+ <div class="row gutter text-left">
+ <div class="column">
+ <p class="sosumi no-margin-bottom margin-top-small">Release</p>
+ <p class="smaller lighter no-margin">146</p>
+ </div>
+ <div class="column">
+ <p class="sosumi no-margin-bottom margin-top-small">Posted</p>
+ <p class="smaller lighter no-margin">May 25, 2022</p>
+ </div>
+ </div>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-06-22.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-06-22.html
new file mode 100644
index 0000000000..c019ff9330
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-06-22.html
@@ -0,0 +1,46 @@
+<div class="callout">
+ <figure
+ class="app-icon large-icon safari-preview-icon"
+ aria-hidden="true"></figure>
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS with
+ <a href="/safari/technology-preview/" class="nowrap"
+ >Safari Technology Preview</a
+ >
+ and experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/012-30324-20220621-99D72AEC-A0E2-4B48-8AC0-B567E3FD046B/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS&nbsp;Ventura</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS&nbsp;13 beta.</span
+ >
+ </li>
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/012-15389-20220621-FA8B8AC9-0442-432C-80B6-6016AB193FCA/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview for macOS&nbsp;Monterey</a
+ ><br /><span class="smaller lighter"
+ >Requires macOS&nbsp;12.3 or later.</span
+ >
+ </li>
+ <li class="document">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+ <div class="row gutter text-left">
+ <div class="column">
+ <p class="sosumi no-margin-bottom margin-top-small">Release</p>
+ <p class="smaller lighter no-margin">147</p>
+ </div>
+ <div class="column">
+ <p class="sosumi no-margin-bottom margin-top-small">Posted</p>
+ <p class="smaller lighter no-margin">June 21, 2022</p>
+ </div>
+ </div>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-07-05.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-07-05.html
new file mode 100644
index 0000000000..18bd2459ec
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-07-05.html
@@ -0,0 +1,37 @@
+<div class="callout">
+ <figure
+ class="app-icon large-icon safari-preview-icon"
+ aria-hidden="true"></figure>
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS with
+ <a href="/safari/technology-preview/" class="nowrap"
+ >Safari Technology Preview</a
+ >
+ and experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/012-32918-20220629-B3452905-0138-4CA9-A4E6-334B63585653/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview 148 for macOS&nbsp;Monterey</a
+ ><br /><span class="smaller lighter"
+ >Requires macOS&nbsp;12.3 or later.</span
+ >
+ </li>
+ <li class="document">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+ <div class="row gutter text-left">
+ <div class="column">
+ <p class="sosumi no-margin-bottom margin-top-small">Release</p>
+ <p class="smaller lighter no-margin">148</p>
+ </div>
+ <div class="column">
+ <p class="sosumi no-margin-bottom margin-top-small">Posted</p>
+ <p class="smaller lighter no-margin">June 29, 2022</p>
+ </div>
+ </div>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-07-07.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-07-07.html
new file mode 100644
index 0000000000..f73c9ad457
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-07-07.html
@@ -0,0 +1,46 @@
+<div class="callout">
+ <figure
+ class="app-icon large-icon safari-preview-icon"
+ aria-hidden="true"></figure>
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS with
+ <a href="/safari/technology-preview/" class="nowrap"
+ >Safari Technology Preview</a
+ >
+ and experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/012-38225-20220706-237860CD-5766-4F53-AAC7-1CE26023A959/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview<br />for macOS&nbsp;Ventura</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS&nbsp;13 beta&nbsp;3 or later.</span
+ >
+ </li>
+ <li class="dmg margin-top-small">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/012-32918-20220629-B3452905-0138-4CA9-A4E6-334B63585653/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview<br />for macOS&nbsp;Monterey</a
+ ><br /><span class="smaller lighter"
+ >Requires macOS&nbsp;12.3 or later.</span
+ >
+ </li>
+ <li class="document margin-top-small">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+ <div class="row gutter text-left">
+ <div class="column">
+ <p class="sosumi no-margin-bottom margin-top-small">Release</p>
+ <p class="smaller lighter no-margin">148</p>
+ </div>
+ <div class="column">
+ <p class="sosumi no-margin-bottom margin-top-small">Posted</p>
+ <p class="smaller lighter no-margin">June 29, 2022</p>
+ </div>
+ </div>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-08-25.html b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-08-25.html
new file mode 100644
index 0000000000..0f8bbe633e
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/safari-downloads/2022-08-25.html
@@ -0,0 +1,48 @@
+<div class="callout">
+ <figure
+ class="app-icon large-icon safari-preview-icon"
+ aria-hidden="true"
+ data-hires-status="pending"
+ ></figure>
+ <h4>Safari Technology Preview</h4>
+ <p class="margin-bottom-small">
+ Get a sneak peek at upcoming web technologies in macOS and iOS with
+ <a href="/safari/technology-preview/" class="nowrap"
+ >Safari Technology Preview</a
+ >
+ and experiment with these technologies in your websites and extensions.
+ </p>
+ <ul class="links small">
+ <li class="dmg" data-hires-status="pending">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/012-57606-20220824-F8A58F03-EAE7-4741-A1A4-4B13388819FD/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview<br />for macOS&nbsp;Ventura</a
+ ><br /><span class="smaller lighter nowrap nowrap-small"
+ >Requires macOS&nbsp;13 beta</span
+ >
+ </li>
+ <li class="dmg margin-top-small" data-hires-status="pending">
+ <a
+ class="inline"
+ href="https://secure-appldnld.apple.com/STP/012-56204-20220824-0BA2352E-A387-4BF9-964C-59A63B09E501/SafariTechnologyPreview.dmg"
+ >Safari Technology Preview<br />for macOS&nbsp;Monterey</a
+ ><br /><span class="smaller lighter"
+ >Requires macOS&nbsp;12.3 or later</span
+ >
+ </li>
+ <li class="document margin-top-small" data-hires-status="pending">
+ <a href="/safari/technology-preview/release-notes/">Release Notes</a>
+ </li>
+ </ul>
+ <div class="row gutter text-left">
+ <div class="column">
+ <p class="sosumi no-margin-bottom margin-top-small">Release</p>
+ <p class="smaller lighter no-margin">152</p>
+ </div>
+ <div class="column">
+ <p class="sosumi no-margin-bottom margin-top-small">Posted</p>
+ <p class="smaller lighter no-margin">August 24, 2022</p>
+ </div>
+ </div>
+</div>
diff --git a/testing/web-platform/tests/tools/wpt/tests/test_browser.py b/testing/web-platform/tests/tools/wpt/tests/test_browser.py
new file mode 100644
index 0000000000..d1d31e5099
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/test_browser.py
@@ -0,0 +1,386 @@
+# mypy: allow-untyped-defs
+
+import logging
+import os
+import inspect
+import requests
+import subprocess
+import sys
+from unittest import mock
+
+import pytest
+
+from packaging.specifiers import SpecifierSet
+from tools.wpt import browser
+
+
+logger = logging.getLogger()
+
+
+def test_all_browser_abc():
+ # Make sure all subclasses of Browser implement all abstract methods
+ # (except some known base classes). This is a basic sanity test in case
+ # we change the ABC interface of Browser as we only instantiate some
+ # products in unit tests.
+ classes = inspect.getmembers(browser)
+ for name, cls in classes:
+ if cls in (browser.Browser, browser.ChromeAndroidBase):
+ continue
+ if inspect.isclass(cls) and issubclass(cls, browser.Browser):
+ assert not inspect.isabstract(cls), "%s is abstract" % name
+
+
+def test_edgechromium_webdriver_supports_browser():
+ # EdgeDriver binary cannot be called.
+ edge = browser.EdgeChromium(logger)
+ edge.webdriver_version = mock.MagicMock(return_value=None)
+ assert not edge.webdriver_supports_browser('/usr/bin/edgedriver', '/usr/bin/edge')
+
+ # Browser binary cannot be called.
+ edge = browser.EdgeChromium(logger)
+ edge.webdriver_version = mock.MagicMock(return_value='70.0.1')
+ edge.version = mock.MagicMock(return_value=None)
+ assert edge.webdriver_supports_browser('/usr/bin/edgedriver', '/usr/bin/edge')
+
+ # Browser version matches.
+ edge = browser.EdgeChromium(logger)
+ edge.webdriver_version = mock.MagicMock(return_value='70.0.1')
+ edge.version = mock.MagicMock(return_value='70.1.5')
+ assert edge.webdriver_supports_browser('/usr/bin/edgedriver', '/usr/bin/edge')
+
+ # Browser version doesn't match.
+ edge = browser.EdgeChromium(logger)
+ edge.webdriver_version = mock.MagicMock(return_value='70.0.1')
+ edge.version = mock.MagicMock(return_value='69.0.1')
+ assert not edge.webdriver_supports_browser('/usr/bin/edgedriver', '/usr/bin/edge')
+
+
+# On Windows, webdriver_version directly calls _get_fileversion, so there is no
+# logic to test there.
+@pytest.mark.skipif(sys.platform.startswith('win'), reason='just uses _get_fileversion on Windows')
+@mock.patch('tools.wpt.browser.call')
+def test_edgechromium_webdriver_version(mocked_call):
+ edge = browser.EdgeChromium(logger)
+ webdriver_binary = '/usr/bin/edgedriver'
+
+ # Working cases.
+ mocked_call.return_value = 'Microsoft Edge WebDriver 84.0.4147.30'
+ assert edge.webdriver_version(webdriver_binary) == '84.0.4147.30'
+ mocked_call.return_value = 'Microsoft Edge WebDriver 87.0.1 (abcd1234-refs/branch-heads/4147@{#310})'
+ assert edge.webdriver_version(webdriver_binary) == '87.0.1'
+
+ # Various invalid version strings
+ mocked_call.return_value = 'Edge 84.0.4147.30 (dev)'
+ assert edge.webdriver_version(webdriver_binary) is None
+ mocked_call.return_value = 'Microsoft Edge WebDriver New 84.0.4147.30'
+ assert edge.webdriver_version(webdriver_binary) is None
+ mocked_call.return_value = ''
+ assert edge.webdriver_version(webdriver_binary) is None
+
+ # The underlying subprocess call throws.
+ mocked_call.side_effect = subprocess.CalledProcessError(5, 'cmd', output='Call failed')
+ assert edge.webdriver_version(webdriver_binary) is None
+
+
+def test_chrome_webdriver_supports_browser():
+ # ChromeDriver binary cannot be called.
+ chrome = browser.Chrome(logger)
+ chrome.webdriver_version = mock.MagicMock(return_value=None)
+ assert not chrome.webdriver_supports_browser('/usr/bin/chromedriver', '/usr/bin/chrome', 'stable')
+
+ # Browser binary cannot be called.
+ chrome = browser.Chrome(logger)
+ chrome.webdriver_version = mock.MagicMock(return_value='70.0.1')
+ chrome.version = mock.MagicMock(return_value=None)
+ assert chrome.webdriver_supports_browser('/usr/bin/chromedriver', '/usr/bin/chrome', 'stable')
+
+ # Browser version matches.
+ chrome = browser.Chrome(logger)
+ chrome.webdriver_version = mock.MagicMock(return_value='70.0.1')
+ chrome.version = mock.MagicMock(return_value='70.1.5')
+ assert chrome.webdriver_supports_browser('/usr/bin/chromedriver', '/usr/bin/chrome', 'stable')
+
+ # Browser version doesn't match.
+ chrome = browser.Chrome(logger)
+ chrome.webdriver_version = mock.MagicMock(return_value='70.0.1')
+ chrome.version = mock.MagicMock(return_value='69.0.1')
+ assert not chrome.webdriver_supports_browser('/usr/bin/chromedriver', '/usr/bin/chrome', 'stable')
+
+ # The dev channel switches between beta and ToT ChromeDriver, so is sometimes
+ # a version behind its ChromeDriver. As such, we accept browser version + 1 there.
+ chrome = browser.Chrome(logger)
+ chrome.webdriver_version = mock.MagicMock(return_value='70.0.1')
+ chrome.version = mock.MagicMock(return_value='70.1.0')
+ assert chrome.webdriver_supports_browser('/usr/bin/chromedriver', '/usr/bin/chrome', 'dev')
+ chrome.webdriver_version = mock.MagicMock(return_value='71.0.1')
+ assert chrome.webdriver_supports_browser('/usr/bin/chromedriver', '/usr/bin/chrome', 'dev')
+
+
+def test_chromium_webdriver_supports_browser():
+ # ChromeDriver binary cannot be called.
+ chromium = browser.Chromium(logger)
+ chromium.webdriver_version = mock.MagicMock(return_value=None)
+ assert not chromium.webdriver_supports_browser('/usr/bin/chromedriver', '/usr/bin/chrome')
+
+ # Browser binary cannot be called.
+ chromium = browser.Chromium(logger)
+ chromium.webdriver_version = mock.MagicMock(return_value='70.0.1')
+ chromium.version = mock.MagicMock(return_value=None)
+ assert chromium.webdriver_supports_browser('/usr/bin/chromedriver', '/usr/bin/chrome')
+
+ # Browser version matches.
+ chromium = browser.Chromium(logger)
+ chromium.webdriver_version = mock.MagicMock(return_value='70.0.1')
+ chromium.version = mock.MagicMock(return_value='70.0.1')
+ assert chromium.webdriver_supports_browser('/usr/bin/chromedriver', '/usr/bin/chrome')
+
+ # Browser version doesn't match.
+ chromium = browser.Chromium(logger)
+ chromium.webdriver_version = mock.MagicMock(return_value='70.0.1')
+ chromium.version = mock.MagicMock(return_value='69.0.1')
+ assert not chromium.webdriver_supports_browser('/usr/bin/chromedriver', '/usr/bin/chrome', 'stable')
+
+
+# On Windows, webdriver_version directly calls _get_fileversion, so there is no
+# logic to test there.
+@pytest.mark.skipif(sys.platform.startswith('win'), reason='just uses _get_fileversion on Windows')
+@mock.patch('tools.wpt.browser.call')
+def test_chrome_webdriver_version(mocked_call):
+ chrome = browser.Chrome(logger)
+ webdriver_binary = '/usr/bin/chromedriver'
+
+ # Working cases.
+ mocked_call.return_value = 'ChromeDriver 84.0.4147.30'
+ assert chrome.webdriver_version(webdriver_binary) == '84.0.4147.30'
+ mocked_call.return_value = 'ChromeDriver 87.0.1 (abcd1234-refs/branch-heads/4147@{#310})'
+ assert chrome.webdriver_version(webdriver_binary) == '87.0.1'
+
+ # Various invalid version strings
+ mocked_call.return_value = 'Chrome 84.0.4147.30 (dev)'
+ assert chrome.webdriver_version(webdriver_binary) is None
+ mocked_call.return_value = 'ChromeDriver New 84.0.4147.30'
+ assert chrome.webdriver_version(webdriver_binary) is None
+ mocked_call.return_value = ''
+ assert chrome.webdriver_version(webdriver_binary) is None
+
+ # The underlying subprocess call throws.
+ mocked_call.side_effect = subprocess.CalledProcessError(5, 'cmd', output='Call failed')
+ assert chrome.webdriver_version(webdriver_binary) is None
+
+
+@mock.patch('subprocess.check_output')
+def test_safari_version(mocked_check_output):
+ safari = browser.Safari(logger)
+
+ # Safari
+ mocked_check_output.return_value = b'Included with Safari 12.1 (14607.1.11)'
+ assert safari.version(webdriver_binary="safaridriver") == '12.1 (14607.1.11)'
+
+ # Safari Technology Preview
+ mocked_check_output.return_value = b'Included with Safari Technology Preview (Release 67, 13607.1.9.0.1)'
+ assert safari.version(webdriver_binary="safaridriver") == 'Technology Preview (Release 67, 13607.1.9.0.1)'
+
+@mock.patch('subprocess.check_output')
+def test_safari_version_errors(mocked_check_output):
+ safari = browser.Safari(logger)
+
+ # No webdriver_binary
+ assert safari.version() is None
+
+ # `safaridriver --version` return gibberish
+ mocked_check_output.return_value = b'gibberish'
+ assert safari.version(webdriver_binary="safaridriver") is None
+
+ # `safaridriver --version` fails (as it does for Safari <=12.0)
+ mocked_check_output.return_value = b'dummy'
+ mocked_check_output.side_effect = subprocess.CalledProcessError(1, 'cmd')
+ assert safari.version(webdriver_binary="safaridriver") is None
+
+
+@pytest.mark.parametrize(
+ "page_path",
+ sorted(
+ p.path
+ for p in os.scandir(os.path.join(os.path.dirname(__file__), "safari-downloads"))
+ if p.name.endswith(".html")
+ ),
+)
+@mock.patch("tools.wpt.browser.get")
+def test_safari_find_downloads_stp(mocked_get, page_path):
+ safari = browser.Safari(logger)
+
+ # Setup mock
+ response = requests.models.Response()
+ response.status_code = 200
+ response.encoding = "utf-8"
+ with open(page_path, "rb") as fp:
+ response._content = fp.read()
+ mocked_get.return_value = response
+
+ downloads = safari._find_downloads()
+
+ if page_path.endswith(
+ (
+ "2022-07-05.html",
+ )
+ ):
+ # occasionally STP is only shipped for a single OS version
+ assert len(downloads) == 1
+ else:
+ assert len(downloads) == 2
+
+
+@mock.patch("tools.wpt.browser.get")
+def test_safari_find_downloads_stp_20180517(mocked_get):
+ safari = browser.Safari(logger)
+ page_path = os.path.join(os.path.dirname(__file__), "safari-downloads", "2018-05-17.html")
+
+ # Setup mock
+ response = requests.models.Response()
+ response.status_code = 200
+ response.encoding = "utf-8"
+ with open(page_path, "rb") as fp:
+ response._content = fp.read()
+ mocked_get.return_value = response
+
+ downloads = safari._find_downloads()
+
+ assert len(downloads) == 2
+
+ assert downloads[0][0] == SpecifierSet("==10.13.*")
+ assert "10.12" not in downloads[0][0]
+ assert "10.13" in downloads[0][0]
+ assert "10.13.3" in downloads[0][0]
+ assert "10.14" not in downloads[0][0]
+
+ assert downloads[1][0] == SpecifierSet("~=10.12.6")
+ assert "10.12" not in downloads[1][0]
+ assert "10.12.6" in downloads[1][0]
+ assert "10.12.9" in downloads[1][0]
+ assert "10.13" not in downloads[1][0]
+
+
+@mock.patch("tools.wpt.browser.get")
+def test_safari_find_downloads_stp_20220529(mocked_get):
+ safari = browser.Safari(logger)
+ page_path = os.path.join(os.path.dirname(__file__), "safari-downloads", "2022-05-29.html")
+
+ # Setup mock
+ response = requests.models.Response()
+ response.status_code = 200
+ response.encoding = "utf-8"
+ with open(page_path, "rb") as fp:
+ response._content = fp.read()
+ mocked_get.return_value = response
+
+ downloads = safari._find_downloads()
+
+ assert len(downloads) == 2
+
+ assert downloads[0][0] == SpecifierSet("==12.*")
+ assert "11.4" not in downloads[0][0]
+ assert "12.0" in downloads[0][0]
+ assert "12.5" in downloads[0][0]
+ assert "13.0" not in downloads[0][0]
+
+ assert downloads[1][0] == SpecifierSet("==11.*")
+ assert "10.15.7" not in downloads[1][0]
+ assert "11.0.1" in downloads[1][0]
+ assert "11.3" in downloads[1][0]
+ assert "11.5" in downloads[1][0]
+ assert "12.0" not in downloads[1][0]
+
+
+@mock.patch("tools.wpt.browser.get")
+def test_safari_find_downloads_stp_20220707(mocked_get):
+ safari = browser.Safari(logger)
+ page_path = os.path.join(os.path.dirname(__file__), "safari-downloads", "2022-07-07.html")
+
+ # Setup mock
+ response = requests.models.Response()
+ response.status_code = 200
+ response.encoding = "utf-8"
+ with open(page_path, "rb") as fp:
+ response._content = fp.read()
+ mocked_get.return_value = response
+
+ downloads = safari._find_downloads()
+
+ assert len(downloads) == 2
+
+ assert downloads[0][0] == SpecifierSet("==13.*")
+ assert "12.4" not in downloads[0][0]
+ assert "13.0" in downloads[0][0]
+ assert "13.5" in downloads[0][0]
+ assert "14.0" not in downloads[0][0]
+
+ assert downloads[1][0] == SpecifierSet("~=12.3")
+ assert "11.5" not in downloads[1][0]
+ assert "12.2" not in downloads[1][0]
+ assert "12.3" in downloads[1][0]
+ assert "12.5" in downloads[1][0]
+ assert "13.0" not in downloads[1][0]
+
+
+@mock.patch('subprocess.check_output')
+def test_webkitgtk_minibrowser_version(mocked_check_output):
+ webkitgtk_minibrowser = browser.WebKitGTKMiniBrowser(logger)
+
+ # stable version
+ mocked_check_output.return_value = b'WebKitGTK 2.26.1\n'
+ assert webkitgtk_minibrowser.version(binary='MiniBrowser') == '2.26.1'
+
+ # nightly version
+ mocked_check_output.return_value = b'WebKitGTK 2.27.1 (r250823)\n'
+ assert webkitgtk_minibrowser.version(binary='MiniBrowser') == '2.27.1 (r250823)'
+
+@mock.patch('subprocess.check_output')
+def test_webkitgtk_minibrowser_version_errors(mocked_check_output):
+ webkitgtk_minibrowser = browser.WebKitGTKMiniBrowser(logger)
+
+ # No binary
+ assert webkitgtk_minibrowser.version() is None
+
+ # `MiniBrowser --version` return gibberish
+ mocked_check_output.return_value = b'gibberish'
+ assert webkitgtk_minibrowser.version(binary='MiniBrowser') is None
+
+ # `MiniBrowser --version` fails (as it does for MiniBrowser <= 2.26.0)
+ mocked_check_output.return_value = b'dummy'
+ mocked_check_output.side_effect = subprocess.CalledProcessError(1, 'cmd')
+ assert webkitgtk_minibrowser.version(binary='MiniBrowser') is None
+
+
+# The test below doesn't work on Windows because distutils find_binary()
+# on Windows only works if the binary name ends with a ".exe" suffix.
+# But, WebKitGTK itself doesn't support Windows, so lets skip the test.
+@pytest.mark.skipif(sys.platform.startswith('win'), reason='test not needed on Windows')
+@mock.patch('os.path.isfile')
+def test_webkitgtk_minibrowser_find_binary(mocked_os_path_isfile):
+ webkitgtk_minibrowser = browser.WebKitGTKMiniBrowser(logger)
+
+ # No MiniBrowser found
+ mocked_os_path_isfile.side_effect = lambda path: path == '/etc/passwd'
+ assert webkitgtk_minibrowser.find_binary() is None
+
+ # Found on the default Fedora path
+ fedora_minibrowser_path = '/usr/libexec/webkit2gtk-4.0/MiniBrowser'
+ mocked_os_path_isfile.side_effect = lambda path: path == fedora_minibrowser_path
+ assert webkitgtk_minibrowser.find_binary() == fedora_minibrowser_path
+
+ # Found on the default Debian path for AMD64 (gcc not available)
+ debian_minibrowser_path_amd64 = '/usr/lib/x86_64-linux-gnu/webkit2gtk-4.0/MiniBrowser'
+ mocked_os_path_isfile.side_effect = lambda path: path == debian_minibrowser_path_amd64
+ assert webkitgtk_minibrowser.find_binary() == debian_minibrowser_path_amd64
+
+ # Found on the default Debian path for AMD64 (gcc available but gives an error)
+ debian_minibrowser_path_amd64 = '/usr/lib/x86_64-linux-gnu/webkit2gtk-4.0/MiniBrowser'
+ mocked_os_path_isfile.side_effect = lambda path: path in [debian_minibrowser_path_amd64, '/usr/bin/gcc']
+ with mock.patch('subprocess.check_output', return_value = b'error', side_effect = subprocess.CalledProcessError(1, 'cmd')):
+ assert webkitgtk_minibrowser.find_binary() == debian_minibrowser_path_amd64
+
+ # Found on the default Debian path for ARM64 (gcc available)
+ debian_minibrowser_path_arm64 = '/usr/lib/aarch64-linux-gnu/webkit2gtk-4.0/MiniBrowser'
+ mocked_os_path_isfile.side_effect = lambda path: path in [debian_minibrowser_path_arm64, '/usr/bin/gcc']
+ with mock.patch('subprocess.check_output', return_value = b'aarch64-linux-gnu'):
+ assert webkitgtk_minibrowser.find_binary() == debian_minibrowser_path_arm64
diff --git a/testing/web-platform/tests/tools/wpt/tests/test_install.py b/testing/web-platform/tests/tools/wpt/tests/test_install.py
new file mode 100644
index 0000000000..2ee8f2bc0a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/test_install.py
@@ -0,0 +1,81 @@
+# mypy: allow-untyped-defs
+
+import logging
+import os
+import sys
+
+import pytest
+
+from tools.wpt import browser, utils, wpt
+
+
+@pytest.mark.slow
+@pytest.mark.remote_network
+def test_install_chromium():
+ venv_path = os.path.join(wpt.localpaths.repo_root, wpt.venv_dir())
+ channel = "nightly"
+ dest = os.path.join(wpt.localpaths.repo_root, wpt.venv_dir(), "browsers", channel)
+ if sys.platform == "win32":
+ chromium_path = os.path.join(dest, "chrome-win")
+ elif sys.platform == "darwin":
+ chromium_path = os.path.join(dest, "chrome-mac")
+ else:
+ chromium_path = os.path.join(dest, "chrome-linux")
+
+ if os.path.exists(chromium_path):
+ utils.rmtree(chromium_path)
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["install", "chromium", "browser"])
+ assert excinfo.value.code == 0
+ assert os.path.exists(chromium_path)
+
+ chromium = browser.Chromium(logging.getLogger("Chromium"))
+ binary = chromium.find_binary(venv_path, channel)
+ assert binary is not None and os.path.exists(binary)
+
+ utils.rmtree(chromium_path)
+
+
+@pytest.mark.slow
+@pytest.mark.remote_network
+def test_install_chrome():
+ with pytest.raises(NotImplementedError):
+ wpt.main(argv=["install", "chrome", "browser"])
+
+
+@pytest.mark.slow
+@pytest.mark.remote_network
+def test_install_chrome_chromedriver_by_version():
+ # This is not technically an integration test as we do not want to require Chrome Stable to run it.
+ chrome = browser.Chrome(logging.getLogger("Chrome"))
+ if sys.platform == "win32":
+ dest = os.path.join(wpt.localpaths.repo_root, wpt.venv_dir(), "Scripts")
+ chromedriver_path = os.path.join(dest, "chrome", "chromedriver.exe")
+ else:
+ dest = os.path.join(wpt.localpaths.repo_root, wpt.venv_dir(), "bin")
+ chromedriver_path = os.path.join(dest, "chrome", "chromedriver")
+ if os.path.exists(chromedriver_path):
+ os.unlink(chromedriver_path)
+ # This is a stable version.
+ binary_path = chrome.install_webdriver_by_version(dest=dest, version="84.0.4147.89")
+ assert binary_path == chromedriver_path
+ assert os.path.exists(chromedriver_path)
+ os.unlink(chromedriver_path)
+
+
+@pytest.mark.slow
+@pytest.mark.remote_network
+@pytest.mark.xfail(sys.platform == "win32",
+ reason="https://github.com/web-platform-tests/wpt/issues/17074")
+def test_install_firefox():
+ if sys.platform == "darwin":
+ fx_path = os.path.join(wpt.localpaths.repo_root, wpt.venv_dir(), "browsers", "nightly", "Firefox Nightly.app")
+ else:
+ fx_path = os.path.join(wpt.localpaths.repo_root, wpt.venv_dir(), "browsers", "nightly", "firefox")
+ if os.path.exists(fx_path):
+ utils.rmtree(fx_path)
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["install", "firefox", "browser", "--channel=nightly"])
+ assert excinfo.value.code == 0
+ assert os.path.exists(fx_path)
+ utils.rmtree(fx_path)
diff --git a/testing/web-platform/tests/tools/wpt/tests/test_markdown.py b/testing/web-platform/tests/tools/wpt/tests/test_markdown.py
new file mode 100644
index 0000000000..4f493826c1
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/test_markdown.py
@@ -0,0 +1,37 @@
+# mypy: allow-untyped-defs
+
+from tools.wpt import markdown
+
+def test_format_comment_title():
+ assert '# Browser #' == markdown.format_comment_title("browser")
+ assert '# Browser (channel) #' == markdown.format_comment_title("browser:channel")
+
+def test_markdown_adjust():
+ assert '\\t' == markdown.markdown_adjust('\t')
+ assert '\\r' == markdown.markdown_adjust('\r')
+ assert '\\n' == markdown.markdown_adjust('\n')
+ assert '' == markdown.markdown_adjust('`')
+ assert '\\|' == markdown.markdown_adjust('|')
+ assert '\\t\\r\\n\\|' == markdown.markdown_adjust('\t\r\n`|')
+
+result = ''
+def log(text):
+ global result
+ result += text
+
+def test_table():
+ global result
+ headings = ['h1','h2']
+ data = [['0', '1']]
+ markdown.table(headings, data, log)
+ assert ("| h1 | h2 |"
+ "|----|----|"
+ "| 0 | 1 |") == result
+
+ result = ''
+ data.append(['aaa', 'bb'])
+ markdown.table(headings, data, log)
+ assert ("| h1 | h2 |"
+ "|-----|----|"
+ "| 0 | 1 |"
+ "| aaa | bb |") == result
diff --git a/testing/web-platform/tests/tools/wpt/tests/test_revlist.py b/testing/web-platform/tests/tools/wpt/tests/test_revlist.py
new file mode 100644
index 0000000000..d5645868ef
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/test_revlist.py
@@ -0,0 +1,156 @@
+# mypy: allow-untyped-defs
+
+from unittest import mock
+
+from tools.wpt import revlist
+
+
+def test_calculate_cutoff_date():
+ assert revlist.calculate_cutoff_date(3601, 3600, 0) == 3600
+ assert revlist.calculate_cutoff_date(3600, 3600, 0) == 3600
+ assert revlist.calculate_cutoff_date(3599, 3600, 0) == 0
+ assert revlist.calculate_cutoff_date(3600, 3600, 1) == 1
+ assert revlist.calculate_cutoff_date(3600, 3600, -1) == 3599
+
+
+def test_parse_epoch():
+ assert revlist.parse_epoch("10h") == 36000
+ assert revlist.parse_epoch("10d") == 864000
+ assert revlist.parse_epoch("10w") == 6048000
+
+def check_revisions(tagged_revisions, expected_revisions):
+ for tagged, expected in zip(tagged_revisions, expected_revisions):
+ assert tagged == expected
+
+@mock.patch('subprocess.check_output')
+def test_get_epoch_revisions(mocked_check_output):
+ # check:
+ #
+ # * Several revisions in the same epoch offset (BC, DEF, HIJ, and LM)
+ # * Revision with a timestamp exactly equal to the epoch boundary (H)
+ # * Revision in non closed interval (O)
+ #
+ # mon tue wed thu fri sat sun mon thu wed
+ # | | | | | | | | |
+ # -A---B-C---DEF---G---H--IJ----------K-----L-M----N--O--
+ # ^
+ # until
+ # max_count: 5; epoch: 1d
+ # Expected result: N,M,K,J,G,F,C,A
+ epoch = 86400
+ until = 1188000 # Wednesday, 14 January 1970 18:00:00 UTC
+ mocked_check_output.return_value = b'''
+merge_pr_O O 1166400 _wed_
+merge_pr_N N 1080000 _tue_
+merge_pr_M M 1015200 _mon_
+merge_pr_L L 993600 _mon_
+merge_pr_K K 907200 _sun_
+merge_pr_J J 734400 _fri_
+merge_pr_I I 712800 _fri_
+merge_pr_H H 691200 _fri_
+merge_pr_G G 648000 _thu_
+merge_pr_F F 583200 _wed_
+merge_pr_E E 561600 _wed_
+merge_pr_D D 540000 _wed_
+merge_pr_C C 475200 _tue_
+merge_pr_B B 453600 _tue_
+merge_pr_A A 388800 _mon_
+'''
+ tagged_revisions = revlist.get_epoch_revisions(epoch, until, 8)
+ check_revisions(tagged_revisions, ['N', 'M', 'K', 'J', 'G', 'F', 'C', 'A'])
+ assert len(list(tagged_revisions)) == 0 # generator exhausted
+
+
+ # check: max_count with enough candidate items in the revision list
+ #
+ # mon tue wed thu fri sat sun mon
+ # | | | | | | |
+ # ------B-----C-----D----E-----F-----G------H---
+ # ^
+ # until
+ # max_count: 5; epoch: 1d
+ # Expected result: G,F,E,D,C
+ epoch = 86400
+ until = 1015200 # Monday, 12 January 1970 18:00:00 UTC
+ mocked_check_output.return_value = b'''
+merge_pr_H H 993600 _mon_
+merge_pr_G G 907200 _sun_
+merge_pr_F F 820800 _sat_
+merge_pr_E E 734400 _fri_
+merge_pr_D D 648000 _thu_
+merge_pr_C C 561600 _wed_
+merge_pr_B B 475200 _thu_
+'''
+ tagged_revisions = revlist.get_epoch_revisions(epoch, until, 5)
+ check_revisions(tagged_revisions, ['G', 'F', 'E', 'D', 'C'])
+ assert len(list(tagged_revisions)) == 0 # generator exhausted
+
+
+ # check: max_count with less returned candidates items than the needed
+ #
+ # mon tue wed thu fri sat sun mon
+ # | | | | | | |
+ # -----------------------------F-----G------H---
+ # ^
+ # until
+ # max_count: 5; epoch: 1d
+ # Expected result: G,F
+ epoch = 86400
+ until = 1015200 # Monday, 12 January 1970 18:00:00 UTC
+ mocked_check_output.return_value = b'''
+merge_pr_H H 993600 _mon_
+merge_pr_G G 907200 _sun_
+merge_pr_F F 820800 _sat_
+'''
+ tagged_revisions = revlist.get_epoch_revisions(epoch, until, 5)
+ check_revisions(tagged_revisions, ['G', 'F'])
+ assert len(list(tagged_revisions)) == 0 # generator exhausted
+
+
+ # check: initial until value is on an epoch boundary
+ #
+ # sud mon tue wed thu
+ # | | | |
+ # -F-G-----------------H
+ # ^
+ # until
+ # max_count: 3; epoch: 1d
+ # Expected result: G,F
+ # * H is skipped because because the epoch
+ # interval is defined as an right-open interval
+ # * G is included but in the Monday's interval
+ # * F is included because it is the unique candidate
+ # included in the Sunday's interval
+ epoch = 86400
+ until = 1296000 # Thursday, 15 January 1970 0:00:00 UTC
+ mocked_check_output.return_value = b'''
+merge_pr_H H 1296000 _wed_
+merge_pr_G G 950400 _mon_
+merge_pr_F F 921600 _sud_
+'''
+ tagged_revisions = revlist.get_epoch_revisions(epoch, until, 3)
+ check_revisions(tagged_revisions, ['G', 'F'])
+ assert len(list(tagged_revisions)) == 0 # generator exhausted
+
+
+ # check: until aligned with Monday, 5 January 1970 0:00:00 (345600)
+ # not with Thursday, 1 January 1970 0:00:00 (0)
+ #
+ # sud mon tue wed thu
+ # | | | |
+ # -F-G--------------H---
+ # ^
+ # until
+ # max_count: 1; epoch: 1w
+ # Expected result: F
+ epoch = 604800
+ moday = 950400 # Monday, 12 January 1970 00:00:00 UTC
+ until = moday + 345600 # 1296000. Thursday, 15 January 1970 0:00:00 UTC
+ mocked_check_output.return_value = b'''
+merge_pr_H H 1180800 _wed_
+merge_pr_G G 950400 _mon_
+merge_pr_F F 921600 _sud_
+'''
+ tagged_revisions = revlist.get_epoch_revisions(epoch, until, 1)
+ check_revisions(tagged_revisions, ['F'])
+ assert len(list(tagged_revisions)) == 0 # generator exhausted
diff --git a/testing/web-platform/tests/tools/wpt/tests/test_run.py b/testing/web-platform/tests/tools/wpt/tests/test_run.py
new file mode 100644
index 0000000000..f0e0d3c3ed
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/test_run.py
@@ -0,0 +1,76 @@
+# mypy: allow-untyped-defs
+
+import tempfile
+import shutil
+import sys
+from unittest import mock
+
+import pytest
+
+from tools.wpt import run
+from tools import localpaths # noqa: F401
+from wptrunner.browsers import product_list
+
+
+@pytest.fixture(scope="module")
+def venv():
+ from tools.wpt import virtualenv
+
+ class Virtualenv(virtualenv.Virtualenv):
+ def __init__(self):
+ self.path = tempfile.mkdtemp()
+ self.skip_virtualenv_setup = False
+
+ def create(self):
+ return
+
+ def activate(self):
+ return
+
+ def start(self):
+ return
+
+ def install(self, *requirements):
+ return
+
+ def install_requirements(self, requirements_path):
+ return
+
+ venv = Virtualenv()
+ yield venv
+
+ shutil.rmtree(venv.path)
+
+
+@pytest.fixture(scope="module")
+def logger():
+ run.setup_logging({})
+
+
+@pytest.mark.parametrize("platform", ["Windows", "Linux", "Darwin"])
+def test_check_environ_fail(platform):
+ m_open = mock.mock_open(read_data=b"")
+
+ with mock.patch.object(run, "open", m_open):
+ with mock.patch.object(run.platform, "uname",
+ return_value=(platform, "", "", "", "", "")):
+ with pytest.raises(run.WptrunError) as excinfo:
+ run.check_environ("foo")
+
+ assert "wpt make-hosts-file" in str(excinfo.value)
+
+
+@pytest.mark.parametrize("product", product_list)
+def test_setup_wptrunner(venv, logger, product):
+ if product == "firefox_android":
+ pytest.skip("Android emulator doesn't work on docker")
+ parser = run.create_parser()
+ kwargs = vars(parser.parse_args(["--channel=nightly", product]))
+ kwargs["prompt"] = False
+ # Hack to get a real existing path
+ kwargs["binary"] = sys.argv[0]
+ kwargs["webdriver_binary"] = sys.argv[0]
+ if kwargs["product"] == "sauce":
+ kwargs["sauce_browser"] = "firefox"
+ kwargs["sauce_version"] = "63"
+ run.setup_wptrunner(venv, **kwargs)
diff --git a/testing/web-platform/tests/tools/wpt/tests/test_testfiles.py b/testing/web-platform/tests/tools/wpt/tests/test_testfiles.py
new file mode 100644
index 0000000000..790ee70a63
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/test_testfiles.py
@@ -0,0 +1,71 @@
+# mypy: allow-untyped-defs
+
+import os.path
+from unittest.mock import patch
+
+from tools.manifest.manifest import Manifest
+from tools.wpt import testfiles
+
+
+def test_getrevish_kwarg():
+ assert testfiles.get_revish(revish="abcdef") == "abcdef"
+ assert testfiles.get_revish(revish="123456\n") == "123456"
+
+
+def test_getrevish_implicit():
+ with patch("tools.wpt.testfiles.branch_point", return_value="base"):
+ assert testfiles.get_revish() == "base..HEAD"
+
+
+def test_affected_testfiles():
+ manifest_json = {
+ "items": {
+ "crashtest": {
+ "a": {
+ "b": {
+ "c": {
+ "foo-crash.html": [
+ "acdefgh123456",
+ ["null", {}],
+ ]
+ }
+ }
+ }
+ }
+ },
+ "url_base": "/",
+ "version": 8,
+ }
+ manifest = Manifest.from_json("/", manifest_json)
+ with patch("tools.wpt.testfiles.load_manifest", return_value=manifest):
+ # Dependent affected tests are determined by walking the filesystem,
+ # which doesn't work in our test setup. We would need to refactor
+ # testfiles.affected_testfiles or have a more complex test setup to
+ # support testing those.
+ full_test_path = os.path.join(
+ testfiles.wpt_root, "a", "b", "c", "foo-crash.html")
+ tests_changed, _ = testfiles.affected_testfiles([full_test_path])
+ assert tests_changed == {full_test_path}
+
+
+def test_exclude_ignored():
+ default_ignored = [
+ "resources/testharness.js",
+ "resources/testharnessreport.js",
+ "resources/testdriver.js",
+ "resources/testdriver-vendor.js",
+ ]
+ default_ignored_abs = sorted(os.path.join(testfiles.wpt_root, x) for x in default_ignored)
+ default_changed = [
+ "foo/bar.html"
+ ]
+ default_changed_abs = sorted(os.path.join(testfiles.wpt_root, x) for x in default_changed)
+ files = default_ignored + default_changed
+
+ changed, ignored = testfiles.exclude_ignored(files, None)
+ assert sorted(changed) == default_changed_abs
+ assert sorted(ignored) == default_ignored_abs
+
+ changed, ignored = testfiles.exclude_ignored(files, [])
+ assert sorted(changed) == sorted(default_changed_abs + default_ignored_abs)
+ assert sorted(ignored) == []
diff --git a/testing/web-platform/tests/tools/wpt/tests/test_update_expectations.py b/testing/web-platform/tests/tools/wpt/tests/test_update_expectations.py
new file mode 100644
index 0000000000..a278cb1262
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/test_update_expectations.py
@@ -0,0 +1,130 @@
+# mypy: ignore-errors
+
+import json
+import os
+
+import pytest
+
+from tools.wpt import wpt
+from tools.wptrunner.wptrunner import manifestexpected
+from localpaths import repo_root
+
+@pytest.fixture
+def metadata_file(tmp_path):
+ created_files = []
+
+ def create_metadata(test_id, subtest_name, product, status="OK", subtest_status="PASS", channel="nightly"):
+ run_info = {
+ "os": "linux",
+ "processor": "x86_64",
+ "version": "Ubuntu 20.04",
+ "os_version": "20.04",
+ "bits": 64,
+ "linux_distro": "Ubuntu",
+ "product": product,
+ "debug": False,
+ "browser_version": "98.0.2",
+ "browser_channel": channel,
+ "verify": False,
+ "headless": True,
+ }
+
+ result = {
+ "test": test_id,
+ "subtests": [
+ {
+ "name": subtest_name,
+ "status": subtest_status,
+ "message": None,
+ "known_intermittent": []
+ }
+ ],
+ "status": status,
+ "message": None,
+ "duration": 555,
+ "known_intermittent": []
+ }
+
+ if status != "OK":
+ result["expected"] = "OK"
+
+ if subtest_status != "PASS":
+ result["subtests"][0]["expected"] = "PASS"
+
+ data = {
+ "time_start": 1648629686379,
+ "run_info": run_info,
+ "results": [result],
+ "time_end": 1648629698721
+ }
+
+ path = os.path.join(tmp_path, f"wptreport-{len(created_files)}.json")
+ with open(path, "w") as f:
+ json.dump(data, f)
+
+ created_files.append(path)
+ return run_info, path
+
+ yield create_metadata
+
+ for path in created_files:
+ os.unlink(path)
+
+
+def test_update(tmp_path, metadata_file):
+ # This has to be a real test so it's in the manifest
+ test_id = "/infrastructure/assumptions/cookie.html"
+ subtest_name = "cookies work in default browse settings"
+ test_path = os.path.join("infrastructure",
+ "assumptions",
+ "cookie.html")
+ run_info_firefox, path_firefox = metadata_file(test_id,
+ subtest_name,
+ "firefox",
+ subtest_status="FAIL",
+ channel="nightly")
+ run_info_chrome, path_chrome = metadata_file(test_id,
+ subtest_name,
+ "chrome",
+ status="ERROR",
+ subtest_status="NOTRUN",
+ channel="dev")
+
+ metadata_path = str(os.path.join(tmp_path, "metadata"))
+ os.makedirs(metadata_path)
+ wptreport_paths = [path_firefox, path_chrome]
+
+ update_properties = {"properties": ["product"]}
+ with open(os.path.join(metadata_path, "update_properties.json"), "w") as f:
+ json.dump(update_properties, f)
+
+ args = ["update-expectations",
+ "--manifest", os.path.join(repo_root, "MANIFEST.json"),
+ "--metadata", metadata_path,
+ "--log-mach-level", "debug"]
+ args += wptreport_paths
+
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=args)
+
+ assert excinfo.value.code == 0
+
+ expectation_path = os.path.join(metadata_path, test_path + ".ini")
+
+ assert os.path.exists(expectation_path)
+
+ firefox_expected = manifestexpected.get_manifest(metadata_path,
+ test_path,
+ "/",
+ run_info_firefox)
+ # Default expected isn't stored
+ with pytest.raises(KeyError):
+ assert firefox_expected.get_test(test_id).get("expected")
+ assert firefox_expected.get_test(test_id).get_subtest(subtest_name).expected == "FAIL"
+
+ chrome_expected = manifestexpected.get_manifest(metadata_path,
+ test_path,
+ "/",
+ run_info_chrome)
+ assert chrome_expected.get_test(test_id).expected == "ERROR"
+ assert chrome_expected.get_test(test_id).get_subtest(subtest_name).expected == "NOTRUN"
diff --git a/testing/web-platform/tests/tools/wpt/tests/test_wpt.py b/testing/web-platform/tests/tools/wpt/tests/test_wpt.py
new file mode 100644
index 0000000000..f5671f3743
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tests/test_wpt.py
@@ -0,0 +1,406 @@
+# mypy: allow-untyped-defs
+
+import errno
+import os
+import shutil
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from urllib.request import urlopen
+from urllib.error import URLError
+
+import pytest
+
+here = os.path.abspath(os.path.dirname(__file__))
+from tools.wpt import utils, wpt
+
+
+def is_port_8000_in_use():
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ try:
+ s.bind(("127.0.0.1", 8000))
+ except OSError as e:
+ if e.errno == errno.EADDRINUSE:
+ return True
+ else:
+ raise e
+ finally:
+ s.close()
+ return False
+
+
+def get_persistent_manifest_path():
+ directory = ("~/meta" if os.environ.get('TRAVIS') == "true"
+ else wpt.localpaths.repo_root)
+ return os.path.join(directory, "MANIFEST.json")
+
+
+@pytest.fixture(scope="module", autouse=True)
+def init_manifest():
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["manifest", "--no-download",
+ "--path", get_persistent_manifest_path()])
+ assert excinfo.value.code == 0
+
+
+@pytest.fixture
+def manifest_dir():
+ try:
+ path = tempfile.mkdtemp()
+ shutil.copyfile(get_persistent_manifest_path(),
+ os.path.join(path, "MANIFEST.json"))
+ yield path
+ finally:
+ utils.rmtree(path)
+
+
+@pytest.fixture
+def temp_test():
+ os.makedirs("../../.tools-tests")
+ test_count = {"value": 0}
+
+ def make_test(body):
+ test_count["value"] += 1
+ test_name = ".tools-tests/%s.html" % test_count["value"]
+ test_path = "../../%s" % test_name
+
+ with open(test_path, "w") as handle:
+ handle.write("""
+ <!DOCTYPE html>
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script>%s</script>
+ """ % body)
+
+ return test_name
+
+ yield make_test
+
+ utils.rmtree("../../.tools-tests")
+
+
+def test_missing():
+ with pytest.raises(SystemExit):
+ wpt.main(argv=["#missing-command"])
+
+
+def test_help():
+ # TODO: It seems like there's a bug in argparse that makes this argument order required
+ # should try to work around that
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["--help"])
+ assert excinfo.value.code == 0
+
+
+def test_load_commands():
+ commands = wpt.load_commands()
+ # The `wpt run` command has conditional requirements.
+ assert "conditional_requirements" in commands["run"]
+
+
+@pytest.mark.slow
+@pytest.mark.skipif(sys.platform == "win32",
+ reason="https://github.com/web-platform-tests/wpt/issues/28745")
+def test_list_tests(manifest_dir):
+ """The `--list-tests` option should not produce an error under normal
+ conditions."""
+
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["run", "--metadata", manifest_dir, "--list-tests",
+ "--channel", "dev", "--yes",
+ # Taskcluster machines do not have GPUs, so use software rendering via --enable-swiftshader.
+ "--enable-swiftshader",
+ "chrome", "/dom/nodes/Element-tagName.html"])
+ assert excinfo.value.code == 0
+
+
+@pytest.mark.slow
+def test_list_tests_missing_manifest(manifest_dir):
+ """The `--list-tests` option should not produce an error in the absence of
+ a test manifest file."""
+
+ os.remove(os.path.join(manifest_dir, "MANIFEST.json"))
+
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["run",
+ # This test triggers the creation of a new manifest
+ # file which is not necessary to ensure successful
+ # process completion. Specifying the current directory
+ # as the tests source via the --tests` option
+ # drastically reduces the time to execute the test.
+ "--tests", here,
+ "--metadata", manifest_dir,
+ "--list-tests",
+ "--yes",
+ "firefox", "/dom/nodes/Element-tagName.html"])
+
+ assert excinfo.value.code == 0
+
+
+@pytest.mark.slow
+def test_list_tests_invalid_manifest(manifest_dir):
+ """The `--list-tests` option should not produce an error in the presence of
+ a malformed test manifest file."""
+
+ manifest_filename = os.path.join(manifest_dir, "MANIFEST.json")
+
+ assert os.path.isfile(manifest_filename)
+
+ with open(manifest_filename, "a+") as handle:
+ handle.write("extra text which invalidates the file")
+
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["run",
+ # This test triggers the creation of a new manifest
+ # file which is not necessary to ensure successful
+ # process completion. Specifying the current directory
+ # as the tests source via the --tests` option
+ # drastically reduces the time to execute the test.
+ "--tests", here,
+ "--metadata", manifest_dir,
+ "--list-tests",
+ "--yes",
+ "firefox", "/dom/nodes/Element-tagName.html"])
+
+ assert excinfo.value.code == 0
+
+
+@pytest.mark.slow
+@pytest.mark.remote_network
+@pytest.mark.skipif(sys.platform == "win32",
+ reason="https://github.com/web-platform-tests/wpt/issues/28745")
+def test_run_zero_tests():
+ """A test execution describing zero tests should be reported as an error
+ even in the presence of the `--no-fail-on-unexpected` option."""
+ if is_port_8000_in_use():
+ pytest.skip("port 8000 already in use")
+
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["run", "--yes", "--no-pause", "--channel", "dev",
+ # Taskcluster machines do not have GPUs, so use software rendering via --enable-swiftshader.
+ "--enable-swiftshader",
+ "chrome", "/non-existent-dir/non-existent-file.html"])
+ assert excinfo.value.code != 0
+
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["run", "--yes", "--no-pause", "--no-fail-on-unexpected",
+ "--channel", "dev",
+ # Taskcluster machines do not have GPUs, so use software rendering via --enable-swiftshader.
+ "--enable-swiftshader",
+ "chrome", "/non-existent-dir/non-existent-file.html"])
+ assert excinfo.value.code != 0
+
+
+@pytest.mark.slow
+@pytest.mark.remote_network
+@pytest.mark.skipif(sys.platform == "win32",
+ reason="https://github.com/web-platform-tests/wpt/issues/28745")
+def test_run_failing_test():
+ """Failing tests should be reported with a non-zero exit status unless the
+ `--no-fail-on-unexpected` option has been specified."""
+ if is_port_8000_in_use():
+ pytest.skip("port 8000 already in use")
+ failing_test = "/infrastructure/expected-fail/failing-test.html"
+
+ assert os.path.isfile("../../%s" % failing_test)
+
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["run", "--yes", "--no-pause", "--channel", "dev",
+ # Taskcluster machines do not have GPUs, so use software rendering via --enable-swiftshader.
+ "--enable-swiftshader",
+ "chrome", failing_test])
+ assert excinfo.value.code != 0
+
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["run", "--yes", "--no-pause", "--no-fail-on-unexpected",
+ "--channel", "dev",
+ # Taskcluster machines do not have GPUs, so use software rendering via --enable-swiftshader.
+ "--enable-swiftshader",
+ "chrome", failing_test])
+ assert excinfo.value.code == 0
+
+
+@pytest.mark.slow
+@pytest.mark.remote_network
+@pytest.mark.skipif(sys.platform == "win32",
+ reason="https://github.com/web-platform-tests/wpt/issues/28745")
+def test_run_verify_unstable(temp_test):
+ """Unstable tests should be reported with a non-zero exit status. Stable
+ tests should be reported with a zero exit status."""
+ if is_port_8000_in_use():
+ pytest.skip("port 8000 already in use")
+ unstable_test = temp_test("""
+ test(function() {
+ if (localStorage.getItem('wpt-unstable-test-flag')) {
+ throw new Error();
+ }
+
+ localStorage.setItem('wpt-unstable-test-flag', 'x');
+ }, 'my test');
+ """)
+
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["run", "--yes", "--verify", "--channel", "dev",
+ # Taskcluster machines do not have GPUs, so use software rendering via --enable-swiftshader.
+ "--enable-swiftshader",
+ "chrome", unstable_test])
+ assert excinfo.value.code != 0
+
+ stable_test = temp_test("test(function() {}, 'my test');")
+
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["run", "--yes", "--verify", "--channel", "dev",
+ # Taskcluster machines do not have GPUs, so use software rendering via --enable-swiftshader.
+ "--enable-swiftshader",
+ "chrome", stable_test])
+ assert excinfo.value.code == 0
+
+
+def test_files_changed(capsys):
+ commit = "9047ac1d9f51b1e9faa4f9fad9c47d109609ab09"
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["files-changed", f"{commit}~..{commit}"])
+ assert excinfo.value.code == 0
+ out, err = capsys.readouterr()
+ expected = """html/browsers/offline/appcache/workers/appcache-worker.html
+html/browsers/offline/appcache/workers/resources/appcache-dedicated-worker-not-in-cache.js
+html/browsers/offline/appcache/workers/resources/appcache-shared-worker-not-in-cache.js
+html/browsers/offline/appcache/workers/resources/appcache-worker-data.py
+html/browsers/offline/appcache/workers/resources/appcache-worker-import.py
+html/browsers/offline/appcache/workers/resources/appcache-worker.manifest
+html/browsers/offline/appcache/workers/resources/appcache-worker.py
+""".replace("/", os.path.sep)
+ assert out == expected
+ assert err == ""
+
+
+def test_files_changed_null(capsys):
+ commit = "9047ac1d9f51b1e9faa4f9fad9c47d109609ab09"
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["files-changed", "--null", f"{commit}~..{commit}"])
+ assert excinfo.value.code == 0
+ out, err = capsys.readouterr()
+ expected = "\0".join(["html/browsers/offline/appcache/workers/appcache-worker.html",
+ "html/browsers/offline/appcache/workers/resources/appcache-dedicated-worker-not-in-cache.js",
+ "html/browsers/offline/appcache/workers/resources/appcache-shared-worker-not-in-cache.js",
+ "html/browsers/offline/appcache/workers/resources/appcache-worker-data.py",
+ "html/browsers/offline/appcache/workers/resources/appcache-worker-import.py",
+ "html/browsers/offline/appcache/workers/resources/appcache-worker.manifest",
+ "html/browsers/offline/appcache/workers/resources/appcache-worker.py",
+ ""]).replace("/", os.path.sep)
+ assert out == expected
+ assert err == ""
+
+
+def test_files_changed_ignore():
+ from tools.wpt.testfiles import exclude_ignored
+ files = ["resources/testharness.js", "resources/webidl2/index.js", "test/test.js"]
+ changed, ignored = exclude_ignored(files, ignore_rules=["resources/testharness*"])
+ assert changed == [os.path.join(wpt.wpt_root, item) for item in
+ ["resources/webidl2/index.js", "test/test.js"]]
+ assert ignored == [os.path.join(wpt.wpt_root, item) for item in
+ ["resources/testharness.js"]]
+
+
+def test_files_changed_ignore_rules():
+ from tools.wpt.testfiles import compile_ignore_rule
+ assert compile_ignore_rule("foo*bar*/baz").pattern == r"^foo\*bar[^/]*/baz$"
+ assert compile_ignore_rule("foo**bar**/baz").pattern == r"^foo\*\*bar.*/baz$"
+ assert compile_ignore_rule("foobar/baz/*").pattern == "^foobar/baz/[^/]*$"
+ assert compile_ignore_rule("foobar/baz/**").pattern == "^foobar/baz/.*$"
+
+
+@pytest.mark.slow # this updates the manifest
+@pytest.mark.xfail(sys.platform == "win32",
+ reason="Tests currently don't work on Windows for path reasons")
+@pytest.mark.skipif(sys.platform == "win32",
+ reason="https://github.com/web-platform-tests/wpt/issues/12934")
+def test_tests_affected(capsys, manifest_dir):
+ # This doesn't really work properly for random commits because we test the files in
+ # the current working directory for references to the changed files, not the ones at
+ # that specific commit. But we can at least test it returns something sensible.
+ # The test will fail if the file we assert is renamed, so we choose a stable one.
+ commit = "3a055e818218f548db240c316654f3cc1aeeb733"
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["tests-affected", "--metadata", manifest_dir, f"{commit}~..{commit}"])
+ assert excinfo.value.code == 0
+ out, err = capsys.readouterr()
+ assert "infrastructure/reftest-wait.html" in out
+
+
+@pytest.mark.slow # this updates the manifest
+@pytest.mark.xfail(sys.platform == "win32",
+ reason="Tests currently don't work on Windows for path reasons")
+@pytest.mark.skipif(sys.platform == "win32",
+ reason="https://github.com/web-platform-tests/wpt/issues/12934")
+def test_tests_affected_idlharness(capsys, manifest_dir):
+ commit = "47cea8c38b88c0ddd3854e4edec0c5b6f2697e62"
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["tests-affected", "--metadata", manifest_dir, f"{commit}~..{commit}"])
+ assert excinfo.value.code == 0
+ out, err = capsys.readouterr()
+ assert ("mst-content-hint/idlharness.window.js\n" +
+ "webrtc-encoded-transform/idlharness.https.window.js\n" +
+ "webrtc-identity/idlharness.https.window.js\n" +
+ "webrtc-stats/idlharness.window.js\n" +
+ "webrtc-stats/supported-stats.https.html\n" +
+ "webrtc/idlharness.https.window.js\n") == out
+
+
+@pytest.mark.slow # this updates the manifest
+@pytest.mark.xfail(sys.platform == "win32",
+ reason="Tests currently don't work on Windows for path reasons")
+@pytest.mark.skipif(sys.platform == "win32",
+ reason="https://github.com/web-platform-tests/wpt/issues/12934")
+def test_tests_affected_null(capsys, manifest_dir):
+ # This doesn't really work properly for random commits because we test the files in
+ # the current working directory for references to the changed files, not the ones at
+ # that specific commit. But we can at least test it returns something sensible.
+ # The test will fail if the file we assert is renamed, so we choose a stable one.
+ commit = "2614e3316f1d3d1a744ed3af088d19516552a5de"
+ with pytest.raises(SystemExit) as excinfo:
+ wpt.main(argv=["tests-affected", "--null", "--metadata", manifest_dir, f"{commit}~..{commit}"])
+ assert excinfo.value.code == 0
+ out, err = capsys.readouterr()
+
+ tests = out.split("\0")
+ assert "dom/idlharness.any.js" in tests
+ assert "xhr/idlharness.any.js" in tests
+
+
+@pytest.mark.slow
+@pytest.mark.skipif(sys.platform == "win32",
+ reason="no os.setsid/killpg to easily cleanup the process tree")
+def test_serve():
+ if is_port_8000_in_use():
+ pytest.skip("port 8000 already in use")
+
+ p = subprocess.Popen([os.path.join(wpt.localpaths.repo_root, "wpt"), "serve"],
+ preexec_fn=os.setsid)
+
+ start = time.time()
+ try:
+ while True:
+ if p.poll() is not None:
+ assert False, "server not running"
+ if time.time() - start > 60:
+ assert False, "server did not start responding within 60s"
+ try:
+ resp = urlopen("http://web-platform.test:8000")
+ print(resp)
+ except URLError:
+ print("URLError")
+ time.sleep(1)
+ else:
+ assert resp.code == 200
+ break
+ finally:
+ os.killpg(p.pid, 15)
+
+# The following commands are slow running and used implicitly in other CI
+# jobs, so we skip them here:
+# wpt manifest
+# wpt lint
diff --git a/testing/web-platform/tests/tools/wpt/tox.ini b/testing/web-platform/tests/tools/wpt/tox.ini
new file mode 100644
index 0000000000..eda300c3c8
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/tox.ini
@@ -0,0 +1,19 @@
+[tox]
+envlist = py36,py37,py38,py39,py310
+skipsdist=True
+skip_missing_interpreters = False
+
+[testenv]
+deps =
+ -r{toxinidir}/../requirements_pytest.txt
+ -r{toxinidir}/requirements.txt
+ -r{toxinidir}/../wptrunner/requirements.txt
+ -r{toxinidir}/../wptrunner/requirements_chromium.txt
+ -r{toxinidir}/../wptrunner/requirements_firefox.txt
+
+commands =
+ pytest {posargs}
+
+passenv =
+ DISPLAY
+ TASKCLUSTER_ROOT_URL
diff --git a/testing/web-platform/tests/tools/wpt/update.py b/testing/web-platform/tests/tools/wpt/update.py
new file mode 100644
index 0000000000..41faeac54f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/update.py
@@ -0,0 +1,56 @@
+# mypy: allow-untyped-defs
+
+import os
+import sys
+
+from mozlog import commandline
+
+wpt_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+sys.path.insert(0, os.path.abspath(os.path.join(wpt_root, "tools")))
+
+
+def manifest_update(test_paths):
+ from manifest import manifest # type: ignore
+ for url_base, paths in test_paths.items():
+ manifest.load_and_update(
+ paths["tests_path"],
+ paths["manifest_path"],
+ url_base)
+
+
+def create_parser_update():
+ from wptrunner import wptcommandline
+
+ return wptcommandline.create_parser_metadata_update()
+
+
+def update_expectations(_, **kwargs):
+ from wptrunner import metadata, wptcommandline
+
+ commandline.setup_logging("web-platform-tests",
+ kwargs,
+ {"mach": sys.stdout},
+ formatter_defaults=None)
+
+ if not kwargs["tests_root"]:
+ kwargs["tests_root"] = wpt_root
+
+ # This matches the manifest path we end up using in `wpt run`
+ if not kwargs["manifest_path"]:
+ kwargs["manifest_path"] = os.path.join(wpt_root, "MANIFEST.json")
+
+ kwargs = wptcommandline.check_args_metadata_update(kwargs)
+
+ update_properties = metadata.get_properties(properties_file=kwargs["properties_file"],
+ extra_properties=kwargs["extra_property"],
+ config=kwargs["config"],
+ product=kwargs["product"])
+
+ manifest_update(kwargs["test_paths"])
+ metadata.update_expected(kwargs["test_paths"],
+ kwargs["run_log"],
+ update_properties=update_properties,
+ full_update=False,
+ disable_intermittent=kwargs["update_intermittent"],
+ update_intermittent=kwargs["update_intermittent"],
+ remove_intermittent=kwargs["update_intermittent"])
diff --git a/testing/web-platform/tests/tools/wpt/utils.py b/testing/web-platform/tests/tools/wpt/utils.py
new file mode 100644
index 0000000000..b015b95e1a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/utils.py
@@ -0,0 +1,168 @@
+# mypy: allow-untyped-defs
+
+import errno
+import logging
+import os
+import sys
+import shutil
+import stat
+import subprocess
+import tarfile
+import time
+import zipfile
+from io import BytesIO
+from socket import error as SocketError # NOQA: N812
+from urllib.request import urlopen
+
+logger = logging.getLogger(__name__)
+
+
+def call(*args):
+ """Log terminal command, invoke it as a subprocess.
+
+ Returns a bytestring of the subprocess output if no error.
+ """
+ logger.debug(" ".join(args))
+ try:
+ return subprocess.check_output(args).decode('utf8')
+ except subprocess.CalledProcessError as e:
+ logger.critical("%s exited with return code %i" %
+ (e.cmd, e.returncode))
+ logger.critical(e.output)
+ raise
+
+
+def seekable(fileobj):
+ """Attempt to use file.seek on given file, with fallbacks."""
+ try:
+ fileobj.seek(fileobj.tell())
+ except Exception:
+ return BytesIO(fileobj.read())
+ else:
+ return fileobj
+
+
+def untar(fileobj, dest="."):
+ """Extract tar archive."""
+ logger.debug("untar")
+ fileobj = seekable(fileobj)
+ with tarfile.open(fileobj=fileobj) as tar_data:
+ tar_data.extractall(path=dest)
+
+
+def unzip(fileobj, dest=None, limit=None):
+ """Extract zip archive."""
+ logger.debug("unzip")
+ fileobj = seekable(fileobj)
+ with zipfile.ZipFile(fileobj) as zip_data:
+ for info in zip_data.infolist():
+ if limit is not None and info.filename not in limit:
+ continue
+ # external_attr has a size of 4 bytes and the info it contains depends on the system where the ZIP file was created.
+ # - If the Zipfile was created on an UNIX environment, then the 2 highest bytes represent UNIX permissions and file
+ # type bits (sys/stat.h st_mode entry on struct stat) and the lowest byte represents DOS FAT compatibility attributes
+ # (used mainly to store the directory bit).
+ # - If the ZipFile was created on a WIN/DOS environment then the lowest byte represents DOS FAT file attributes
+ # (those attributes are: directory bit, hidden bit, read-only bit, system-file bit, etc).
+ # More info at https://unix.stackexchange.com/a/14727 and https://forensicswiki.xyz/page/ZIP
+ # So, we can ignore the DOS FAT attributes because python ZipFile.extract() already takes care of creating the directories
+ # as needed (both on win and *nix) and the other DOS FAT attributes (hidden/read-only/system-file/etc) are not interesting
+ # here (not even on Windows, since we don't care about setting those extra attributes for our use case).
+ # So we do this:
+ # 1. When uncompressing on a Windows system we just call to extract().
+ # 2. When uncompressing on an Unix-like system we only take care of the attributes if the zip file was created on an
+ # Unix-like system, otherwise we don't have any info about the file permissions other than the DOS FAT attributes,
+ # which are useless here, so just call to extract() without setting any specific file permission in that case.
+ if info.create_system == 0 or sys.platform == 'win32':
+ zip_data.extract(info, path=dest)
+ else:
+ stat_st_mode = info.external_attr >> 16
+ info_dst_path = os.path.join(dest, info.filename)
+ if stat.S_ISLNK(stat_st_mode):
+ # Symlinks are stored in the ZIP file as text files that contain inside the target filename of the symlink.
+ # Recreate the symlink instead of calling extract() when an entry with the attribute stat.S_IFLNK is detected.
+ link_src_path = zip_data.read(info)
+ link_dst_dir = os.path.dirname(info_dst_path)
+ if not os.path.isdir(link_dst_dir):
+ os.makedirs(link_dst_dir)
+
+ # Remove existing link if exists.
+ if os.path.islink(info_dst_path):
+ os.unlink(info_dst_path)
+ os.symlink(link_src_path, info_dst_path)
+ else:
+ zip_data.extract(info, path=dest)
+ # Preserve bits 0-8 only: rwxrwxrwx (no sticky/setuid/setgid bits).
+ perm = stat_st_mode & 0x1FF
+ os.chmod(info_dst_path, perm)
+
+
+def get(url):
+ """Issue GET request to a given URL and return the response."""
+ import requests
+
+ logger.debug("GET %s" % url)
+ resp = requests.get(url, stream=True)
+ resp.raise_for_status()
+ return resp
+
+
+def get_download_to_descriptor(fd, url, max_retries=5):
+ """Download an URL in chunks and saves it to a file descriptor (truncating it)
+ It doesn't close the descriptor, but flushes it on success.
+ It retries the download in case of ECONNRESET up to max_retries.
+ This function is meant to download big files directly to the disk without
+ caching the whole file in memory.
+ """
+ if max_retries < 1:
+ max_retries = 1
+ wait = 2
+ for current_retry in range(1, max_retries+1):
+ try:
+ logger.info("Downloading %s Try %d/%d" % (url, current_retry, max_retries))
+ resp = urlopen(url)
+ # We may come here in a retry, ensure to truncate fd before start writing.
+ fd.seek(0)
+ fd.truncate(0)
+ while True:
+ chunk = resp.read(16*1024)
+ if not chunk:
+ break # Download finished
+ fd.write(chunk)
+ fd.flush()
+ # Success
+ return
+ except SocketError as e:
+ if current_retry < max_retries and e.errno == errno.ECONNRESET:
+ # Retry
+ logger.error("Connection reset by peer. Retrying after %ds..." % wait)
+ time.sleep(wait)
+ wait *= 2
+ else:
+ # Maximum retries or unknown error
+ raise
+
+def rmtree(path):
+ # This works around two issues:
+ # 1. Cannot delete read-only files owned by us (e.g. files extracted from tarballs)
+ # 2. On Windows, we sometimes just need to retry in case the file handler
+ # hasn't been fully released (a common issue).
+ def handle_remove_readonly(func, path, exc):
+ excvalue = exc[1]
+ if func in (os.rmdir, os.remove, os.unlink) and excvalue.errno == errno.EACCES:
+ os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # 0777
+ func(path)
+ else:
+ raise
+
+ return shutil.rmtree(path, onerror=handle_remove_readonly)
+
+
+def sha256sum(file_path):
+ """Computes the SHA256 hash sum of a file"""
+ from hashlib import sha256
+ hash = sha256()
+ with open(file_path, 'rb') as f:
+ for chunk in iter(lambda: f.read(4096), b''):
+ hash.update(chunk)
+ return hash.hexdigest()
diff --git a/testing/web-platform/tests/tools/wpt/virtualenv.py b/testing/web-platform/tests/tools/wpt/virtualenv.py
new file mode 100644
index 0000000000..05ca52244c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/virtualenv.py
@@ -0,0 +1,137 @@
+# mypy: allow-untyped-defs
+
+import os
+import shutil
+import sys
+import logging
+from distutils.spawn import find_executable
+
+# The `pkg_resources` module is provided by `setuptools`, which is itself a
+# dependency of `virtualenv`. Tolerate its absence so that this module may be
+# evaluated when that module is not available. Because users may not recognize
+# the `pkg_resources` module by name, raise a more descriptive error if it is
+# referenced during execution.
+try:
+ import pkg_resources as _pkg_resources
+ get_pkg_resources = lambda: _pkg_resources
+except ImportError:
+ def get_pkg_resources():
+ raise ValueError("The Python module `virtualenv` is not installed.")
+
+from tools.wpt.utils import call
+
+logger = logging.getLogger(__name__)
+
+class Virtualenv:
+ def __init__(self, path, skip_virtualenv_setup):
+ self.path = path
+ self.skip_virtualenv_setup = skip_virtualenv_setup
+ if not skip_virtualenv_setup:
+ self.virtualenv = find_executable("virtualenv")
+ if not self.virtualenv:
+ raise ValueError("virtualenv must be installed and on the PATH")
+ self._working_set = None
+
+ @property
+ def exists(self):
+ # We need to check also for lib_path because different python versions
+ # create different library paths.
+ return os.path.isdir(self.path) and os.path.isdir(self.lib_path)
+
+ @property
+ def broken_link(self):
+ python_link = os.path.join(self.path, ".Python")
+ return os.path.lexists(python_link) and not os.path.exists(python_link)
+
+ def create(self):
+ if os.path.exists(self.path):
+ shutil.rmtree(self.path)
+ self._working_set = None
+ call(self.virtualenv, self.path, "-p", sys.executable)
+
+ @property
+ def bin_path(self):
+ if sys.platform in ("win32", "cygwin"):
+ return os.path.join(self.path, "Scripts")
+ return os.path.join(self.path, "bin")
+
+ @property
+ def pip_path(self):
+ path = find_executable("pip3", self.bin_path)
+ if path is None:
+ raise ValueError("pip3 not found")
+ return path
+
+ @property
+ def lib_path(self):
+ base = self.path
+
+ # this block is literally taken from virtualenv 16.4.3
+ IS_PYPY = hasattr(sys, "pypy_version_info")
+ IS_JYTHON = sys.platform.startswith("java")
+ if IS_JYTHON:
+ site_packages = os.path.join(base, "Lib", "site-packages")
+ elif IS_PYPY:
+ site_packages = os.path.join(base, "site-packages")
+ else:
+ IS_WIN = sys.platform == "win32"
+ if IS_WIN:
+ site_packages = os.path.join(base, "Lib", "site-packages")
+ else:
+ site_packages = os.path.join(base, "lib", f"python{sys.version[:3]}", "site-packages")
+
+ return site_packages
+
+ @property
+ def working_set(self):
+ if not self.exists:
+ raise ValueError("trying to read working_set when venv doesn't exist")
+
+ if self._working_set is None:
+ self._working_set = get_pkg_resources().WorkingSet((self.lib_path,))
+
+ return self._working_set
+
+ def activate(self):
+ if sys.platform == 'darwin':
+ # The default Python on macOS sets a __PYVENV_LAUNCHER__ environment
+ # variable which affects invocation of python (e.g. via pip) in a
+ # virtualenv. Unset it if present to avoid this. More background:
+ # https://github.com/web-platform-tests/wpt/issues/27377
+ # https://github.com/python/cpython/pull/9516
+ os.environ.pop('__PYVENV_LAUNCHER__', None)
+ path = os.path.join(self.bin_path, "activate_this.py")
+ with open(path) as f:
+ exec(f.read(), {"__file__": path})
+
+ def start(self):
+ if not self.exists or self.broken_link:
+ self.create()
+ self.activate()
+
+ def install(self, *requirements):
+ try:
+ self.working_set.require(*requirements)
+ except Exception:
+ pass
+ else:
+ return
+
+ # `--prefer-binary` guards against race conditions when installation
+ # occurs while packages are in the process of being published.
+ call(self.pip_path, "install", "--prefer-binary", *requirements)
+
+ def install_requirements(self, requirements_path):
+ with open(requirements_path) as f:
+ try:
+ self.working_set.require(f.read())
+ except Exception:
+ pass
+ else:
+ return
+
+ # `--prefer-binary` guards against race conditions when installation
+ # occurs while packages are in the process of being published.
+ call(
+ self.pip_path, "install", "--prefer-binary", "-r", requirements_path
+ )
diff --git a/testing/web-platform/tests/tools/wpt/wpt.py b/testing/web-platform/tests/tools/wpt/wpt.py
new file mode 100644
index 0000000000..74943a52f3
--- /dev/null
+++ b/testing/web-platform/tests/tools/wpt/wpt.py
@@ -0,0 +1,240 @@
+# mypy: allow-untyped-defs
+
+import argparse
+import json
+import logging
+import multiprocessing
+import os
+import sys
+
+from tools import localpaths # noqa: F401
+
+from . import virtualenv
+
+
+here = os.path.dirname(__file__)
+wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
+
+
+def load_conditional_requirements(props, base_dir):
+ """Load conditional requirements from commands.json."""
+
+ conditional_requirements = props.get("conditional_requirements")
+ if not conditional_requirements:
+ return {}
+
+ commandline_flag_requirements = {}
+ for key, value in conditional_requirements.items():
+ if key == "commandline_flag":
+ for flag_name, requirements_paths in value.items():
+ commandline_flag_requirements[flag_name] = [
+ os.path.join(base_dir, path) for path in requirements_paths]
+ else:
+ raise KeyError(
+ f'Unsupported conditional requirement key: {key}')
+
+ return {
+ "commandline_flag": commandline_flag_requirements,
+ }
+
+
+def load_commands():
+ rv = {}
+ with open(os.path.join(here, "paths")) as f:
+ paths = [item.strip().replace("/", os.path.sep) for item in f if item.strip()]
+ for path in paths:
+ abs_path = os.path.join(wpt_root, path, "commands.json")
+ base_dir = os.path.dirname(abs_path)
+ with open(abs_path) as f:
+ data = json.load(f)
+ for command, props in data.items():
+ assert "path" in props
+ assert "script" in props
+ rv[command] = {
+ "path": os.path.join(base_dir, props["path"]),
+ "script": props["script"],
+ "parser": props.get("parser"),
+ "parse_known": props.get("parse_known", False),
+ "help": props.get("help"),
+ "virtualenv": props.get("virtualenv", True),
+ "requirements": [os.path.join(base_dir, item)
+ for item in props.get("requirements", [])]
+ }
+
+ rv[command]["conditional_requirements"] = load_conditional_requirements(
+ props, base_dir)
+
+ if rv[command]["requirements"] or rv[command]["conditional_requirements"]:
+ assert rv[command]["virtualenv"]
+ return rv
+
+
+def parse_args(argv, commands=load_commands()):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--venv", action="store", help="Path to an existing virtualenv to use")
+ parser.add_argument("--skip-venv-setup", action="store_true",
+ dest="skip_venv_setup",
+ help="Whether to use the virtualenv as-is. Must set --venv as well")
+ parser.add_argument("--debug", action="store_true", help="Run the debugger in case of an exception")
+ subparsers = parser.add_subparsers(dest="command")
+ for command, props in commands.items():
+ subparsers.add_parser(command, help=props["help"], add_help=False)
+
+ if not argv:
+ parser.print_help()
+ return None, None
+
+ args, extra = parser.parse_known_args(argv)
+
+ return args, extra
+
+
+def import_command(prog, command, props):
+ # This currently requires the path to be a module,
+ # which probably isn't ideal but it means that relative
+ # imports inside the script work
+ rel_path = os.path.relpath(props["path"], wpt_root)
+
+ parts = os.path.splitext(rel_path)[0].split(os.path.sep)
+
+ mod_name = ".".join(parts)
+
+ mod = __import__(mod_name)
+ for part in parts[1:]:
+ mod = getattr(mod, part)
+
+ script = getattr(mod, props["script"])
+ if props["parser"] is not None:
+ parser = getattr(mod, props["parser"])()
+ parser.prog = f"{os.path.basename(prog)} {command}"
+ else:
+ parser = None
+
+ return script, parser
+
+
+def create_complete_parser():
+ """Eagerly load all subparsers. This involves more work than is required
+ for typical command-line usage. It is maintained for the purposes of
+ documentation generation as implemented in WPT's top-level `/docs`
+ directory."""
+
+ commands = load_commands()
+ parser = argparse.ArgumentParser()
+ subparsers = parser.add_subparsers()
+
+ # We should already be in a virtual environment from the top-level
+ # `wpt build-docs` command but we need to look up the environment to
+ # find out where it's located.
+ venv_path = os.environ["VIRTUAL_ENV"]
+ venv = virtualenv.Virtualenv(venv_path, True)
+
+ for command in commands:
+ props = commands[command]
+
+ for path in props.get("requirements", []):
+ venv.install_requirements(path)
+
+ subparser = import_command('wpt', command, props)[1]
+ if not subparser:
+ continue
+
+ subparsers.add_parser(command,
+ help=props["help"],
+ add_help=False,
+ parents=[subparser])
+
+ return parser
+
+
+def venv_dir():
+ return f"_venv{sys.version_info[0]}"
+
+
+def setup_virtualenv(path, skip_venv_setup, props):
+ if skip_venv_setup and path is None:
+ raise ValueError("Must set --venv when --skip-venv-setup is used")
+ should_skip_setup = path is not None and skip_venv_setup
+ if path is None:
+ path = os.path.join(wpt_root, venv_dir())
+ venv = virtualenv.Virtualenv(path, should_skip_setup)
+ if not should_skip_setup:
+ venv.start()
+ for path in props["requirements"]:
+ venv.install_requirements(path)
+ return venv
+
+
+def install_command_flag_requirements(venv, kwargs, requirements):
+ for command_flag_name, requirement_paths in requirements.items():
+ if command_flag_name in kwargs:
+ for path in requirement_paths:
+ venv.install_requirements(path)
+
+
+def main(prog=None, argv=None):
+ logging.basicConfig(level=logging.INFO)
+ # Ensure we use the spawn start method for all multiprocessing
+ try:
+ multiprocessing.set_start_method('spawn')
+ except RuntimeError as e:
+ # This can happen if we call back into wpt having already set the context
+ start_method = multiprocessing.get_start_method()
+ if start_method != "spawn":
+ logging.critical("The multiprocessing start method was set to %s by a caller", start_method)
+ raise e
+
+ if prog is None:
+ prog = sys.argv[0]
+ if argv is None:
+ argv = sys.argv[1:]
+
+ commands = load_commands()
+
+ main_args, command_args = parse_args(argv, commands)
+
+ if not main_args:
+ return
+
+ command = main_args.command
+ props = commands[command]
+ venv = None
+ if props["virtualenv"]:
+ venv = setup_virtualenv(main_args.venv, main_args.skip_venv_setup, props)
+ script, parser = import_command(prog, command, props)
+ if parser:
+ if props["parse_known"]:
+ kwargs, extras = parser.parse_known_args(command_args)
+ extras = (extras,)
+ kwargs = vars(kwargs)
+ else:
+ extras = ()
+ kwargs = vars(parser.parse_args(command_args))
+ else:
+ extras = ()
+ kwargs = {}
+
+ if venv is not None:
+ requirements = props["conditional_requirements"].get("commandline_flag")
+ if requirements is not None and not main_args.skip_venv_setup:
+ install_command_flag_requirements(venv, kwargs, requirements)
+ args = (venv,) + extras
+ else:
+ args = extras
+
+ if script:
+ try:
+ rv = script(*args, **kwargs)
+ if rv is not None:
+ sys.exit(int(rv))
+ except Exception:
+ if main_args.debug:
+ import pdb
+ pdb.post_mortem()
+ else:
+ raise
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ main() # type: ignore
diff --git a/testing/web-platform/tests/tools/wptrunner/.gitignore b/testing/web-platform/tests/tools/wptrunner/.gitignore
new file mode 100644
index 0000000000..495616ef1d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/.gitignore
@@ -0,0 +1,8 @@
+*.py[co]
+*~
+*#
+\#*
+_virtualenv
+test/test.cfg
+test/metadata/MANIFEST.json
+wptrunner.egg-info
diff --git a/testing/web-platform/tests/tools/wptrunner/MANIFEST.in b/testing/web-platform/tests/tools/wptrunner/MANIFEST.in
new file mode 100644
index 0000000000..d36344f966
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/MANIFEST.in
@@ -0,0 +1,6 @@
+exclude MANIFEST.in
+include requirements.txt
+include wptrunner.default.ini
+include wptrunner/testharness_runner.html
+include wptrunner/*.js
+include wptrunner/executors/*.js
diff --git a/testing/web-platform/tests/tools/wptrunner/README.rst b/testing/web-platform/tests/tools/wptrunner/README.rst
new file mode 100644
index 0000000000..dae7d6ade7
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/README.rst
@@ -0,0 +1,14 @@
+wptrunner: A web-platform-tests harness
+=======================================
+
+wptrunner is a harness for running the W3C `web-platform-tests testsuite`_.
+
+.. toctree::
+ :maxdepth: 2
+
+ docs/expectation
+ docs/commands
+ docs/design
+ docs/internals
+
+.. _`web-platform-tests testsuite`: https://github.com/web-platform-tests/wpt
diff --git a/testing/web-platform/tests/tools/wptrunner/docs/architecture.svg b/testing/web-platform/tests/tools/wptrunner/docs/architecture.svg
new file mode 100644
index 0000000000..b8d5aa21c1
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/docs/architecture.svg
@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="780px" height="1087px" version="1.1"><defs><linearGradient x1="0%" y1="0%" x2="0%" y2="100%" id="mx-gradient-a9c4eb-1-a9c4eb-1-s-0"><stop offset="0%" style="stop-color:#A9C4EB"/><stop offset="100%" style="stop-color:#A9C4EB"/></linearGradient></defs><g transform="translate(0.5,0.5)"><rect x="498" y="498" width="120" height="60" fill="#e6d0de" stroke="#000000" pointer-events="none"/><g transform="translate(500,521)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">TestRunner</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><rect x="338" y="778" width="120" height="60" fill="#f19c99" stroke="#000000" pointer-events="none"/><g transform="translate(340,801)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">Product under test</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><rect x="338" y="388" width="120" height="60" fill="#e6d0de" stroke="#000000" pointer-events="none"/><g transform="translate(340,411)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">TestRunnerManager</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><rect x="338" y="228" width="120" height="60" fill="#e6d0de" stroke="#000000" pointer-events="none"/><g transform="translate(340,251)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">ManagerGroup</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><rect x="658" y="608" width="120" height="60" fill="#ffce9f" stroke="#000000" pointer-events="none"/><g transform="translate(660,631)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">Executor</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><rect x="338" y="498" width="120" height="60" fill="url(#mx-gradient-a9c4eb-1-a9c4eb-1-s-0)" stroke="#000000" pointer-events="none"/><g transform="translate(340,521)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">Browser</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><path d="M 398 288 L 398 382" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 398 387 L 395 380 L 398 382 L 402 380 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 398 448 L 398 492" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 398 497 L 395 490 L 398 492 L 402 490 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 618 528 L 684 603" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 687 607 L 680 604 L 684 603 L 685 600 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><rect x="498" y="608" width="120" height="60" fill="#a9c4eb" stroke="#000000" pointer-events="none"/><g transform="translate(500,631)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">ExecutorBrowser</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><path d="M 624 638 L 658 638" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 619 638 L 626 635 L 624 638 L 626 642 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 428 448 L 552 496" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><path d="M 557 498 L 549 498 L 552 496 L 552 492 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 398 558 L 398 772" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><path d="M 398 777 L 395 770 L 398 772 L 402 770 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><rect x="338" y="48" width="120" height="60" fill="#e6d0de" stroke="#000000" pointer-events="none"/><g transform="translate(340,71)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">run_tests</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><path d="M 458 78 L 652 78" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 657 78 L 650 82 L 652 78 L 650 75 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><rect x="658" y="48" width="120" height="60" fill="#e6d0de" stroke="#000000" pointer-events="none"/><g transform="translate(660,71)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">TestLoader</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><rect x="71" y="48" width="120" height="60" fill="#e6d0de" stroke="#000000" pointer-events="none"/><g transform="translate(73,71)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">TestEnvironment</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><rect x="151" y="618" width="120" height="60" fill="#b9e0a5" stroke="#000000" pointer-events="none"/><g transform="translate(153,641)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">wptserve</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><rect x="1" y="618" width="120" height="60" fill="#b9e0a5" stroke="#000000" pointer-events="none"/><g transform="translate(3,641)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">pywebsocket</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><path d="M 338 78 L 197 78" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 192 78 L 199 75 L 197 78 L 199 82 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 101 308 L 62 612" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><path d="M 61 617 L 59 610 L 62 612 L 66 610 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 161 308 L 204 612" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><path d="M 204 617 L 200 610 L 204 612 L 207 609 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 338 823 L 61 678" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><path d="M 211 678 L 338 793" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><path d="M 398 108 L 398 222" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 398 227 L 395 220 L 398 222 L 402 220 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 706 288 L 618 513" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><rect x="658" y="388" width="70" height="40" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="693" y="412">Queue.get</text></g><path d="M 458 808 L 718 668" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><rect x="71" y="248" width="120" height="60" fill="#b9e0a5" stroke="#000000" pointer-events="none"/><g transform="translate(73,271)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">serve.py</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><path d="M 131 108 L 131 242" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><path d="M 131 247 L 128 240 L 131 242 L 135 240 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 88 973 L 132 973" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 137 973 L 130 977 L 132 973 L 130 970 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><rect x="138" y="1018" width="180" height="30" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="228" y="1037">Communication (cross process)</text></g><path d="M 88 1002 L 132 1002" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><path d="M 137 1002 L 130 1006 L 132 1002 L 130 999 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><rect x="138" y="958" width="180" height="30" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="228" y="977">Ownership (same process)</text></g><path d="M 88 1033 L 138 1033" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><rect x="143" y="988" width="180" height="30" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="233" y="1007">Ownership (cross process)</text></g><rect x="428" y="966" width="50" height="15" fill="#e6d0de" stroke="#000000" pointer-events="none"/><rect x="428" y="990" width="50" height="15" fill="#a9c4eb" stroke="#000000" pointer-events="none"/><rect x="428" y="1015" width="50" height="15" fill="#ffce9f" stroke="#000000" pointer-events="none"/><rect x="428" y="1063" width="50" height="15" fill="#f19c99" stroke="#000000" pointer-events="none"/><rect x="428" y="1038" width="50" height="15" fill="#b9e0a5" stroke="#000000" pointer-events="none"/><rect x="485" y="958" width="90" height="30" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="530" y="977">wptrunner class</text></g><rect x="486" y="983" width="150" height="30" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="561" y="1002">Per-product wptrunner class</text></g><rect x="486" y="1008" width="150" height="30" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="561" y="1027">Per-protocol wptrunner class</text></g><rect x="491" y="1031" width="150" height="30" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="566" y="1050">Web-platform-tests component</text></g><rect x="486" y="1055" width="90" height="30" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="531" y="1074">Browser process</text></g><path d="M 398 8 L 398 42" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 398 47 L 395 40 L 398 42 L 402 40 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><rect x="478" y="388" width="120" height="60" fill-opacity="0.5" fill="#e6d0de" stroke="#000000" stroke-opacity="0.5" pointer-events="none"/><g transform="translate(480,411)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">TestRunnerManager</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><path d="M 398 288 L 533 384" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 537 387 L 529 386 L 533 384 L 533 380 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><rect x="198" y="388" width="120" height="60" fill-opacity="0.5" fill="#e6d0de" stroke="#000000" stroke-opacity="0.5" pointer-events="none"/><g transform="translate(200,411)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">TestRunnerManager</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><path d="M 398 288 L 263 384" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 259 387 L 263 380 L 263 384 L 267 386 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><rect x="575" y="748" width="110" height="40" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="630" y="758">Browser control</text><text x="630" y="772">protocol</text><text x="630" y="786">(e.g. WebDriver)</text></g><rect x="258" y="708" width="80" height="40" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="298" y="732">HTTP</text></g><rect x="111" y="728" width="80" height="40" fill="none" stroke="none" pointer-events="none"/><g fill="#000000" font-family="Helvetica" text-anchor="middle" font-size="12px"><text x="151" y="752">websockets</text></g><rect x="658" y="228" width="120" height="60" fill="#e6d0de" stroke="#000000" pointer-events="none"/><g transform="translate(660,251)"><switch><foreignObject pointer-events="all" width="116" height="15" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.26; vertical-align: top; width: 116px; white-space: normal; text-align: center;">Tests Queue</div></foreignObject><text x="58" y="14" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g><path d="M 718 108 L 718 222" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 718 227 L 715 220 L 718 222 L 722 220 Z" fill="#000000" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/><path d="M 428 970 L 428 970" fill="none" stroke="#000000" stroke-miterlimit="10" pointer-events="none"/></g></svg>
diff --git a/testing/web-platform/tests/tools/wptrunner/docs/commands.rst b/testing/web-platform/tests/tools/wptrunner/docs/commands.rst
new file mode 100644
index 0000000000..02147a7129
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/docs/commands.rst
@@ -0,0 +1,79 @@
+commands.json
+=============
+
+:code:`commands.json` files define how subcommands are executed by the
+:code:`./wpt` command. :code:`wpt` searches all command.json files under the top
+directory and sets up subcommands from these JSON files. A typical commands.json
+would look like the following::
+
+ {
+ "foo": {
+ "path": "foo.py",
+ "script": "run",
+ "parser": "get_parser",
+ "help": "Run foo"
+ },
+ "bar": {
+ "path": "bar.py",
+ "script": "run",
+ "virtualenv": true,
+ "requirements": [
+ "requirements.txt"
+ ]
+ }
+ }
+
+Each key of the top level object defines a name of a subcommand, and its value
+(a properties object) specifies how the subcommand is executed. Each properties
+object must contain :code:`path` and :code:`script` fields and may contain
+additional fields. All paths are relative to the commands.json.
+
+:code:`path`
+ The path to a Python script that implements the subcommand.
+
+:code:`script`
+ The name of a function that is used as the entry point of the subcommand.
+
+:code:`parser`
+ The name of a function that creates an argparse parser for the subcommand.
+
+:code:`parse_known`
+ When True, `parse_known_args() <https://docs.python.org/3/library/argparse.html#argparse.ArgumentParser.parse_known_args>`_
+ is used instead of parse_args() for the subcommand. Default to False.
+
+:code:`help`
+ Brief description of the subcommand.
+
+:code:`virtualenv`
+ When True, the subcommand is executed with a virtualenv environment. Default
+ to True.
+
+:code:`requirements`
+ A list of paths where each path specifies a requirements.txt. All requirements
+ listed in these files are installed into the virtualenv environment before
+ running the subcommand. :code:`virtualenv` must be true when this field is
+ set.
+
+:code:`conditional_requirements`
+ A key-value object. Each key represents a condition, and value represents
+ additional requirements when the condition is met. The requirements have the
+ same format as :code:`requirements`. Currently "commandline_flag" is the only
+ supported key. "commandline_flag" is used to specify requirements needed for a
+ certain command line flag of the subcommand. For example, given the following
+ commands.json::
+
+ "baz": {
+ "path": "baz.py",
+ "script": "run",
+ "virtualenv": true,
+ "conditional_requirements": {
+ "commandline_flag": {
+ "enable_feature1": [
+ "requirements_feature1.txt"
+ ]
+ }
+ }
+ }
+
+ Requirements in :code:`requirements_features1.txt` are installed only when
+ :code:`--enable-feature1` is specified to :code:`./wpt baz`.
diff --git a/testing/web-platform/tests/tools/wptrunner/docs/design.rst b/testing/web-platform/tests/tools/wptrunner/docs/design.rst
new file mode 100644
index 0000000000..30f82711a5
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/docs/design.rst
@@ -0,0 +1,108 @@
+wptrunner Design
+================
+
+The design of wptrunner is intended to meet the following
+requirements:
+
+ * Possible to run tests from W3C web-platform-tests.
+
+ * Tests should be run as fast as possible. In particular it should
+ not be necessary to restart the browser between tests, or similar.
+
+ * As far as possible, the tests should run in a "normal" browser and
+ browsing context. In particular many tests assume that they are
+ running in a top-level browsing context, so we must avoid the use
+ of an ``iframe`` test container.
+
+ * It must be possible to deal with all kinds of behaviour of the
+ browser under test, for example, crashing, hanging, etc.
+
+ * It should be possible to add support for new platforms and browsers
+ with minimal code changes.
+
+ * It must be possible to run tests in parallel to further improve
+ performance.
+
+ * Test output must be in a machine readable form.
+
+Architecture
+------------
+
+In order to meet the above requirements, wptrunner is designed to
+push as much of the test scheduling as possible into the harness. This
+allows the harness to monitor the state of the browser and perform
+appropriate action if it gets into an unwanted state e.g. kill the
+browser if it appears to be hung.
+
+The harness will typically communicate with the browser via some remote
+control protocol such as WebDriver. However for browsers where no such
+protocol is supported, other implementation strategies are possible,
+typically at the expense of speed.
+
+The overall architecture of wptrunner is shown in the diagram below:
+
+.. image:: architecture.svg
+
+.. currentmodule:: wptrunner
+
+The main entry point to the code is :py:func:`~wptrunner.run_tests` in
+``wptrunner.py``. This is responsible for setting up the test
+environment, loading the list of tests to be executed, and invoking
+the remainder of the code to actually execute some tests.
+
+The test environment is encapsulated in the
+:py:class:`~environment.TestEnvironment` class. This defers to code in
+``web-platform-tests`` which actually starts the required servers to
+run the tests.
+
+The set of tests to run is defined by the
+:py:class:`~testloader.TestLoader`. This is constructed with a
+:py:class:`~testloader.TestFilter` (not shown), which takes any filter arguments
+from the command line to restrict the set of tests that will be
+run. The :py:class:`~testloader.TestLoader` reads both the ``web-platform-tests``
+JSON manifest and the expectation data stored in ini files and
+produces a :py:class:`multiprocessing.Queue` of tests to run, and
+their expected results.
+
+Actually running the tests happens through the
+:py:class:`~testrunner.ManagerGroup` object. This takes the :py:class:`~multiprocessing.Queue` of
+tests to be run and starts a :py:class:`~testrunner.TestRunnerManager` for each
+instance of the browser under test that will be started. These
+:py:class:`~testrunner.TestRunnerManager` instances are each started in their own
+thread.
+
+A :py:class:`~testrunner.TestRunnerManager` coordinates starting the product under
+test, and outputting results from the test. In the case that the test
+has timed out or the browser has crashed, it has to restart the
+browser to ensure the test run can continue. The functionality for
+initialising the browser under test, and probing its state
+(e.g. whether the process is still alive) is implemented through a
+:py:class:`~browsers.base.Browser` object. An implementation of this class must be
+provided for each product that is supported.
+
+The functionality for actually running the tests is provided by a
+:py:class:`~testrunner.TestRunner` object. :py:class:`~testrunner.TestRunner` instances are
+run in their own child process created with the
+:py:mod:`multiprocessing` module. This allows them to run concurrently
+and to be killed and restarted as required. Communication between the
+:py:class:`~testrunner.TestRunnerManager` and the :py:class:`~testrunner.TestRunner` is
+provided by a pair of queues, one for sending messages in each
+direction. In particular test results are sent from the
+:py:class:`~testrunner.TestRunner` to the :py:class:`~testrunner.TestRunnerManager` using one
+of these queues.
+
+The :py:class:`~testrunner.TestRunner` object is generic in that the same
+:py:class:`~testrunner.TestRunner` is used regardless of the product under
+test. However the details of how to run the test may vary greatly with
+the product since different products support different remote control
+protocols (or none at all). These protocol-specific parts are placed
+in the :py:class:`~executors.base.TestExecutor` object. There is typically a different
+:py:class:`~executors.base.TestExecutor` class for each combination of control protocol
+and test type. The :py:class:`~testrunner.TestRunner` is responsible for pulling
+each test off the :py:class:`multiprocessing.Queue` of tests and passing it down to
+the :py:class:`~executors.base.TestExecutor`.
+
+The executor often requires access to details of the particular
+browser instance that it is testing so that it knows e.g. which port
+to connect to to send commands to the browser. These details are
+encapsulated in the :py:class:`~browsers.base.ExecutorBrowser` class.
diff --git a/testing/web-platform/tests/tools/wptrunner/docs/expectation.rst b/testing/web-platform/tests/tools/wptrunner/docs/expectation.rst
new file mode 100644
index 0000000000..fea676565b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/docs/expectation.rst
@@ -0,0 +1,366 @@
+Test Metadata
+=============
+
+Directory Layout
+----------------
+
+Metadata files must be stored under the ``metadata`` directory passed
+to the test runner. The directory layout follows that of
+web-platform-tests with each test source path having a corresponding
+metadata file. Because the metadata path is based on the source file
+path, files that generate multiple URLs e.g. tests with multiple
+variants, or multi-global tests generated from an ``any.js`` input
+file, share the same metadata file for all their corresponding
+tests. The metadata path under the ``metadata`` directory is the same
+as the source path under the ``tests`` directory, with an additional
+``.ini`` suffix.
+
+For example a test with URL::
+
+ /spec/section/file.html?query=param
+
+generated from a source file with path::
+
+ <tests root>/spec/section.file.html
+
+would have a metadata file ::
+
+ <metadata root>/spec/section/file.html.ini
+
+As an optimisation, files which produce only default results
+(i.e. ``PASS`` or ``OK``), and which don't have any other associated
+metadata, don't require a corresponding metadata file.
+
+Directory Metadata
+~~~~~~~~~~~~~~~~~~
+
+In addition to per-test metadata, default metadata can be applied to
+all the tests in a given source location, using a ``__dir__.ini``
+metadata file. For example to apply metadata to all tests under
+``<tests root>/spec/`` add the metadata in ``<tests
+root>/spec/__dir__.ini``.
+
+Metadata Format
+---------------
+The format of the metadata files is based on the ini format. Files are
+divided into sections, each (apart from the root section) having a
+heading enclosed in square braces. Within each section are key-value
+pairs. There are several notable differences from standard .ini files,
+however:
+
+ * Sections may be hierarchically nested, with significant whitespace
+ indicating nesting depth.
+
+ * Only ``:`` is valid as a key/value separator
+
+A simple example of a metadata file is::
+
+ root_key: root_value
+
+ [section]
+ section_key: section_value
+
+ [subsection]
+ subsection_key: subsection_value
+
+ [another_section]
+ another_key: [list, value]
+
+Conditional Values
+~~~~~~~~~~~~~~~~~~
+
+In order to support values that depend on some external data, the
+right hand side of a key/value pair can take a set of conditionals
+rather than a plain value. These values are placed on a new line
+following the key, with significant indentation. Conditional values
+are prefixed with ``if`` and terminated with a colon, for example::
+
+ key:
+ if cond1: value1
+ if cond2: value2
+ value3
+
+In this example, the value associated with ``key`` is determined by
+first evaluating ``cond1`` against external data. If that is true,
+``key`` is assigned the value ``value1``, otherwise ``cond2`` is
+evaluated in the same way. If both ``cond1`` and ``cond2`` are false,
+the unconditional ``value3`` is used.
+
+Conditions themselves use a Python-like expression syntax. Operands
+can either be variables, corresponding to data passed in, numbers
+(integer or floating point; exponential notation is not supported) or
+quote-delimited strings. Equality is tested using ``==`` and
+inequality by ``!=``. The operators ``and``, ``or`` and ``not`` are
+used in the expected way. Parentheses can also be used for
+grouping. For example::
+
+ key:
+ if (a == 2 or a == 3) and b == "abc": value1
+ if a == 1 or b != "abc": value2
+ value3
+
+Here ``a`` and ``b`` are variables, the value of which will be
+supplied when the metadata is used.
+
+Web-Platform-Tests Metadata
+---------------------------
+
+When used for expectation data, metadata files have the following format:
+
+ * A section per test URL provided by the corresponding source file,
+ with the section heading being the part of the test URL following
+ the last ``/`` in the path (this allows multiple tests in a single
+ metadata file with the same path part of the URL, but different
+ query parts). This may be omitted if there's no non-default
+ metadata for the test.
+
+ * A subsection per subtest, with the heading being the title of the
+ subtest. This may be omitted if there's no non-default metadata for
+ the subtest.
+
+ * The following known keys:
+
+ :expected:
+ The expectation value or values of each (sub)test. In
+ the case this value is a list, the first value represents the
+ typical expected test outcome, and subsequent values indicate
+ known intermittent outcomes e.g. ``expected: [PASS, ERROR]``
+ would indicate a test that usually passes but has a known-flaky
+ ``ERROR`` outcome.
+
+ :disabled:
+ Any values apart from the special value ``@False``
+ indicates that the (sub)test is disabled and should either not be
+ run (for tests) or that its results should be ignored (subtests).
+
+ :restart-after:
+ Any value apart from the special value ``@False``
+ indicates that the runner should restart the browser after running
+ this test (e.g. to clear out unwanted state).
+
+ :fuzzy:
+ Used for reftests. This is interpreted as a list containing
+ entries like ``<meta name=fuzzy>`` content value, which consists of
+ an optional reference identifier followed by a colon, then a range
+ indicating the maximum permitted pixel difference per channel, then
+ semicolon, then a range indicating the maximum permitted total
+ number of differing pixels. The reference identifier is either a
+ single relative URL, resolved against the base test URL, in which
+ case the fuzziness applies to any comparison with that URL, or
+ takes the form lhs URL, comparison, rhs URL, in which case the
+ fuzziness only applies for any comparison involving that specific
+ pair of URLs. Some illustrative examples are given below.
+
+ :implementation-status:
+ One of the values ``implementing``,
+ ``not-implementing`` or ``default``. This is used in conjunction
+ with the ``--skip-implementation-status`` command line argument to
+ ``wptrunner`` to ignore certain features where running the test is
+ low value.
+
+ :tags:
+ A list of labels associated with a given test that can be
+ used in conjunction with the ``--tag`` command line argument to
+ ``wptrunner`` for test selection.
+
+ In addition there are extra arguments which are currently tied to
+ specific implementations. For example Gecko-based browsers support
+ ``min-asserts``, ``max-asserts``, ``prefs``, ``lsan-disabled``,
+ ``lsan-allowed``, ``lsan-max-stack-depth``, ``leak-allowed``, and
+ ``leak-threshold`` properties.
+
+ * Variables taken from the ``RunInfo`` data which describe the
+ configuration of the test run. Common properties include:
+
+ :product: A string giving the name of the browser under test
+ :browser_channel: A string giving the release channel of the browser under test
+ :debug: A Boolean indicating whether the build is a debug build
+ :os: A string the operating system
+ :version: A string indicating the particular version of that operating system
+ :processor: A string indicating the processor architecture.
+
+ This information is typically provided by :py:mod:`mozinfo`, but
+ different environments may add additional information, and not all
+ the properties above are guaranteed to be present in all
+ environments. The definitive list of available properties for a
+ specific run may be determined by looking at the ``run_info`` key
+ in the ``wptreport.json`` output for the run.
+
+ * Top level keys are taken as defaults for the whole file. So, for
+ example, a top level key with ``expected: FAIL`` would indicate
+ that all tests and subtests in the file are expected to fail,
+ unless they have an ``expected`` key of their own.
+
+An simple example metadata file might look like::
+
+ [test.html?variant=basic]
+ type: testharness
+
+ [Test something unsupported]
+ expected: FAIL
+
+ [Test with intermittent statuses]
+ expected: [PASS, TIMEOUT]
+
+ [test.html?variant=broken]
+ expected: ERROR
+
+ [test.html?variant=unstable]
+ disabled: http://test.bugs.example.org/bugs/12345
+
+A more complex metadata file with conditional properties might be::
+
+ [canvas_test.html]
+ expected:
+ if os == "mac": FAIL
+ if os == "windows" and version == "XP": FAIL
+ PASS
+
+Note that ``PASS`` in the above works, but is unnecessary since it's
+the default expected result.
+
+A metadata file with fuzzy reftest values might be::
+
+ [reftest.html]
+ fuzzy: [10;200, ref1.html:20;200-300, subtest1.html==ref2.html:10-15;20]
+
+In this case the default fuzziness for any comparison would be to
+require a maximum difference per channel of less than or equal to 10
+and less than or equal to 200 total pixels different. For any
+comparison involving ref1.html on the right hand side, the limits
+would instead be a difference per channel not more than 20 and a total
+difference count of not less than 200 and not more than 300. For the
+specific comparison ``subtest1.html == ref2.html`` (both resolved against
+the test URL) these limits would instead be 10 to 15 and 0 to 20,
+respectively.
+
+Generating Expectation Files
+----------------------------
+
+wpt provides the tool ``wpt update-expectations`` command to generate
+expectation files from the results of a set of test runs. The basic
+syntax for this is::
+
+ ./wpt update-expectations [options] [logfile]...
+
+Each ``logfile`` is a wptreport log file from a previous run. These
+can be generated from wptrunner using the ``--log-wptreport`` option
+e.g. ``--log-wptreport=wptreport.json``.
+
+``update-expectations`` takes several options:
+
+--full Overwrite all the expectation data for any tests that have a
+ result in the passed log files, not just data for the same run
+ configuration.
+
+--disable-intermittent When updating test results, disable tests that
+ have inconsistent results across many
+ runs. This can precede a message providing a
+ reason why that test is disable. If no message
+ is provided, ``unstable`` is the default text.
+
+--update-intermittent When this option is used, the ``expected`` key
+ stores expected intermittent statuses in
+ addition to the primary expected status. If
+ there is more than one status, it appears as a
+ list. The default behaviour of this option is to
+ retain any existing intermittent statuses in the
+ list unless ``--remove-intermittent`` is
+ specified.
+
+--remove-intermittent This option is used in conjunction with
+ ``--update-intermittent``. When the
+ ``expected`` statuses are updated, any obsolete
+ intermittent statuses that did not occur in the
+ specified log files are removed from the list.
+
+Property Configuration
+~~~~~~~~~~~~~~~~~~~~~~
+
+In cases where the expectation depends on the run configuration ``wpt
+update-expectations`` is able to generate conditional values. Because
+the relevant variables depend on the range of configurations that need
+to be covered, it's necessary to specify the list of configuration
+variables that should be used. This is done using a ``json`` format
+file that can be specified with the ``--properties-file`` command line
+argument to ``wpt update-expectations``. When this isn't supplied the
+defaults from ``<metadata root>/update_properties.json`` are used, if
+present.
+
+Properties File Format
+++++++++++++++++++++++
+
+The file is JSON formatted with two top-level keys:
+
+:``properties``:
+ A list of property names to consider for conditionals
+ e.g ``["product", "os"]``.
+
+:``dependents``:
+ An optional dictionary containing properties that
+ should only be used as "tie-breakers" when differentiating based on a
+ specific top-level property has failed. This is useful when the
+ dependent property is always more specific than the top-level
+ property, but less understandable when used directly. For example the
+ ``version`` property covering different OS versions is typically
+ unique amongst different operating systems, but using it when the
+ ``os`` property would do instead is likely to produce metadata that's
+ too specific to the current configuration and more difficult to
+ read. But where there are multiple versions of the same operating
+ system with different results, it can be necessary. So specifying
+ ``{"os": ["version"]}`` as a dependent property means that the
+ ``version`` property will only be used if the condition already
+ contains the ``os`` property and further conditions are required to
+ separate the observed results.
+
+So an example ``update-properties.json`` file might look like::
+
+ {
+ "properties": ["product", "os"],
+ "dependents": {"product": ["browser_channel"], "os": ["version"]}
+ }
+
+Examples
+~~~~~~~~
+
+Update all the expectations from a set of cross-platform test runs::
+
+ wpt update-expectations --full osx.log linux.log windows.log
+
+Add expectation data for some new tests that are expected to be
+platform-independent::
+
+ wpt update-expectations tests.log
+
+Why a Custom Format?
+--------------------
+
+Introduction
+------------
+
+Given the use of the metadata files in CI systems, it was desirable to
+have something with the following properties:
+
+ * Human readable
+
+ * Human editable
+
+ * Machine readable / writable
+
+ * Capable of storing key-value pairs
+
+ * Suitable for storing in a version control system (i.e. text-based)
+
+The need for different results per platform means either having
+multiple expectation files for each platform, or having a way to
+express conditional values within a certain file. The former would be
+rather cumbersome for humans updating the expectation files, so the
+latter approach has been adopted, leading to the requirement:
+
+ * Capable of storing result values that are conditional on the platform.
+
+There are few extant formats that clearly meet these requirements. In
+particular although conditional properties could be expressed in many
+existing formats, the representation would likely be cumbersome and
+error-prone for hand authoring. Therefore it was decided that a custom
+format offered the best tradeoffs given the requirements.
diff --git a/testing/web-platform/tests/tools/wptrunner/docs/internals.rst b/testing/web-platform/tests/tools/wptrunner/docs/internals.rst
new file mode 100644
index 0000000000..780df872ed
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/docs/internals.rst
@@ -0,0 +1,23 @@
+wptrunner Internals
+===================
+
+.. These modules are intentionally referenced as submodules from the parent
+ directory. This ensures that Sphinx interprets them as packages.
+
+.. automodule:: wptrunner.browsers.base
+ :members:
+
+.. automodule:: wptrunner.environment
+ :members:
+
+.. automodule:: wptrunner.executors.base
+ :members:
+
+.. automodule:: wptrunner.wptrunner
+ :members:
+
+.. automodule:: wptrunner.testloader
+ :members:
+
+.. automodule:: wptrunner.testrunner
+ :members:
diff --git a/testing/web-platform/tests/tools/wptrunner/requirements.txt b/testing/web-platform/tests/tools/wptrunner/requirements.txt
new file mode 100644
index 0000000000..dea3bbaa0a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/requirements.txt
@@ -0,0 +1,9 @@
+html5lib==1.1
+mozdebug==0.3.0
+mozinfo==1.2.2 # https://bugzilla.mozilla.org/show_bug.cgi?id=1621226
+mozlog==7.1.0
+mozprocess==1.3.0
+pillow==8.4.0
+requests==2.27.1
+six==1.16.0
+urllib3[secure]==1.26.9
diff --git a/testing/web-platform/tests/tools/wptrunner/requirements_chromium.txt b/testing/web-platform/tests/tools/wptrunner/requirements_chromium.txt
new file mode 100644
index 0000000000..4e347c647c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/requirements_chromium.txt
@@ -0,0 +1,4 @@
+# aioquic 0.9.15 is the last to support Python 3.6, but doesn't have prebuilt
+# wheels for Python 3.10, so use a different version depending on Python.
+aioquic==0.9.15; python_version == '3.6'
+aioquic==0.9.19; python_version != '3.6'
diff --git a/testing/web-platform/tests/tools/wptrunner/requirements_edge.txt b/testing/web-platform/tests/tools/wptrunner/requirements_edge.txt
new file mode 100644
index 0000000000..12920a9956
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/requirements_edge.txt
@@ -0,0 +1 @@
+selenium==4.3.0
diff --git a/testing/web-platform/tests/tools/wptrunner/requirements_firefox.txt b/testing/web-platform/tests/tools/wptrunner/requirements_firefox.txt
new file mode 100644
index 0000000000..222c91622d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/requirements_firefox.txt
@@ -0,0 +1,9 @@
+marionette_driver==3.1.0
+mozcrash==2.1.0
+mozdevice==4.0.3
+mozinstall==2.0.1
+mozleak==0.2
+mozprofile==2.5.0
+mozrunner==8.2.1
+mozversion==2.3.0
+psutil==5.9.1
diff --git a/testing/web-platform/tests/tools/wptrunner/requirements_ie.txt b/testing/web-platform/tests/tools/wptrunner/requirements_ie.txt
new file mode 100644
index 0000000000..1726afa607
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/requirements_ie.txt
@@ -0,0 +1,2 @@
+mozprocess==1.3.0
+selenium==4.3.0
diff --git a/testing/web-platform/tests/tools/wptrunner/requirements_opera.txt b/testing/web-platform/tests/tools/wptrunner/requirements_opera.txt
new file mode 100644
index 0000000000..1726afa607
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/requirements_opera.txt
@@ -0,0 +1,2 @@
+mozprocess==1.3.0
+selenium==4.3.0
diff --git a/testing/web-platform/tests/tools/wptrunner/requirements_safari.txt b/testing/web-platform/tests/tools/wptrunner/requirements_safari.txt
new file mode 100644
index 0000000000..8d303aa452
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/requirements_safari.txt
@@ -0,0 +1 @@
+psutil==5.9.1
diff --git a/testing/web-platform/tests/tools/wptrunner/requirements_sauce.txt b/testing/web-platform/tests/tools/wptrunner/requirements_sauce.txt
new file mode 100644
index 0000000000..5089b0c183
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/requirements_sauce.txt
@@ -0,0 +1,2 @@
+selenium==4.3.0
+requests==2.27.1
diff --git a/testing/web-platform/tests/tools/wptrunner/setup.py b/testing/web-platform/tests/tools/wptrunner/setup.py
new file mode 100644
index 0000000000..3a0c1a1f73
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/setup.py
@@ -0,0 +1,66 @@
+import glob
+import os
+import sys
+import textwrap
+
+from setuptools import setup, find_packages
+
+here = os.path.dirname(__file__)
+
+PACKAGE_NAME = 'wptrunner'
+PACKAGE_VERSION = '1.14'
+
+# Dependencies
+with open(os.path.join(here, "requirements.txt")) as f:
+ deps = f.read().splitlines()
+
+# Browser-specific requirements
+requirements_files = glob.glob("requirements_*.txt")
+
+profile_dest = None
+dest_exists = False
+
+setup(name=PACKAGE_NAME,
+ version=PACKAGE_VERSION,
+ description="Harness for running the W3C web-platform-tests against various products",
+ author='Mozilla Automation and Testing Team',
+ author_email='tools@lists.mozilla.org',
+ license='MPL 2.0',
+ packages=find_packages(exclude=["tests", "metadata", "prefs"]),
+ entry_points={
+ 'console_scripts': [
+ 'wptrunner = wptrunner.wptrunner:main',
+ 'wptupdate = wptrunner.update:main',
+ ]
+ },
+ zip_safe=False,
+ platforms=['Any'],
+ classifiers=['Development Status :: 4 - Beta',
+ 'Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: BSD License',
+ 'Operating System :: OS Independent'],
+ package_data={"wptrunner": ["executors/testharness_marionette.js",
+ "executors/testharness_webdriver.js",
+ "executors/reftest.js",
+ "executors/reftest-wait.js",
+ "testharnessreport.js",
+ "testharness_runner.html",
+ "wptrunner.default.ini",
+ "browsers/sauce_setup/*",
+ "prefs/*"]},
+ include_package_data=True,
+ data_files=[("requirements", requirements_files)],
+ )
+
+if "install" in sys.argv:
+ path = os.path.relpath(os.path.join(sys.prefix, "requirements"), os.curdir)
+ print(textwrap.fill("""In order to use with one of the built-in browser
+products, you will need to install the extra dependencies. These are provided
+as requirements_[name].txt in the %s directory and can be installed using
+e.g.""" % path, 80))
+
+ print("""
+
+pip install -r %s/requirements_firefox.txt
+""" % path)
diff --git a/testing/web-platform/tests/tools/wptrunner/tox.ini b/testing/web-platform/tests/tools/wptrunner/tox.ini
new file mode 100644
index 0000000000..3a1afda216
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/tox.ini
@@ -0,0 +1,25 @@
+[pytest]
+xfail_strict=true
+
+[tox]
+envlist = py310-{base,chrome,edge,firefox,ie,opera,safari,sauce,servo,webkit,webkitgtk_minibrowser,epiphany},{py36,py37,py38,py39}-base
+skip_missing_interpreters = False
+
+[testenv]
+deps =
+ -r{toxinidir}/../requirements_pytest.txt
+ -r{toxinidir}/requirements.txt
+ chrome: -r{toxinidir}/requirements_chromium.txt
+ edge: -r{toxinidir}/requirements_edge.txt
+ firefox: -r{toxinidir}/requirements_firefox.txt
+ ie: -r{toxinidir}/requirements_ie.txt
+ opera: -r{toxinidir}/requirements_opera.txt
+ safari: -r{toxinidir}/requirements_safari.txt
+ sauce: -r{toxinidir}/requirements_sauce.txt
+
+commands = pytest {posargs}
+
+setenv = CURRENT_TOX_ENV = {envname}
+
+passenv =
+ TASKCLUSTER_ROOT_URL
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner.default.ini b/testing/web-platform/tests/tools/wptrunner/wptrunner.default.ini
new file mode 100644
index 0000000000..19462bc317
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner.default.ini
@@ -0,0 +1,11 @@
+[products]
+
+[web-platform-tests]
+remote_url = https://github.com/web-platform-tests/wpt.git
+branch = master
+sync_path = %(pwd)s/sync
+
+[manifest:default]
+tests = %(pwd)s/tests
+metadata = %(pwd)s/meta
+url_base = / \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/__init__.py
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/__init__.py
new file mode 100644
index 0000000000..b2a53ca23a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/__init__.py
@@ -0,0 +1,45 @@
+"""Subpackage where each product is defined. Each product is created by adding a
+a .py file containing a __wptrunner__ variable in the global scope. This must be
+a dictionary with the fields
+
+"product": Name of the product, assumed to be unique.
+"browser": String indicating the Browser implementation used to launch that
+ product.
+"executor": Dictionary with keys as supported test types and values as the name
+ of the Executor implementation that will be used to run that test
+ type.
+"browser_kwargs": String naming function that takes product, binary,
+ prefs_root and the wptrunner.run_tests kwargs dict as arguments
+ and returns a dictionary of kwargs to use when creating the
+ Browser class.
+"executor_kwargs": String naming a function that takes http server url and
+ timeout multiplier and returns kwargs to use when creating
+ the executor class.
+"env_options": String naming a function of no arguments that returns the
+ arguments passed to the TestEnvironment.
+
+All classes and functions named in the above dict must be imported into the
+module global scope.
+"""
+
+product_list = ["android_weblayer",
+ "android_webview",
+ "chrome",
+ "chrome_android",
+ "chrome_ios",
+ "chromium",
+ "content_shell",
+ "edgechromium",
+ "edge",
+ "edge_webdriver",
+ "firefox",
+ "firefox_android",
+ "ie",
+ "safari",
+ "sauce",
+ "servo",
+ "servodriver",
+ "opera",
+ "webkit",
+ "webkitgtk_minibrowser",
+ "epiphany"]
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/android_weblayer.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/android_weblayer.py
new file mode 100644
index 0000000000..db23b64793
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/android_weblayer.py
@@ -0,0 +1,105 @@
+# mypy: allow-untyped-defs
+
+from .base import NullBrowser # noqa: F401
+from .base import require_arg
+from .base import get_timeout_multiplier # noqa: F401
+from .chrome import executor_kwargs as chrome_executor_kwargs
+from .chrome_android import ChromeAndroidBrowserBase
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorchrome import ChromeDriverPrintRefTestExecutor # noqa: F401
+from ..executors.executorwebdriver import (WebDriverCrashtestExecutor, # noqa: F401
+ WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor) # noqa: F401
+
+
+__wptrunner__ = {"product": "android_weblayer",
+ "check_args": "check_args",
+ "browser": {None: "WeblayerShell",
+ "wdspec": "NullBrowser"},
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor",
+ "print-reftest": "ChromeDriverPrintRefTestExecutor",
+ "wdspec": "WdspecExecutor",
+ "crashtest": "WebDriverCrashtestExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+_wptserve_ports = set()
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "webdriver_binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"binary": kwargs["binary"],
+ "adb_binary": kwargs["adb_binary"],
+ "device_serial": kwargs["device_serial"],
+ "webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs.get("webdriver_args"),
+ "stackwalk_binary": kwargs.get("stackwalk_binary"),
+ "symbols_path": kwargs.get("symbols_path")}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ # Use update() to modify the global list in place.
+ _wptserve_ports.update(set(
+ test_environment.config['ports']['http'] + test_environment.config['ports']['https'] +
+ test_environment.config['ports']['ws'] + test_environment.config['ports']['wss']
+ ))
+
+ executor_kwargs = chrome_executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs)
+ del executor_kwargs["capabilities"]["goog:chromeOptions"]["prefs"]
+ capabilities = executor_kwargs["capabilities"]
+ # Note that for WebLayer, we launch a test shell and have the test shell use
+ # WebLayer.
+ # https://cs.chromium.org/chromium/src/weblayer/shell/android/shell_apk/
+ capabilities["goog:chromeOptions"]["androidPackage"] = \
+ "org.chromium.weblayer.shell"
+ capabilities["goog:chromeOptions"]["androidActivity"] = ".WebLayerShellActivity"
+ capabilities["goog:chromeOptions"]["androidKeepAppDataDir"] = \
+ kwargs.get("keep_app_data_directory")
+
+ # Workaround: driver.quit() cannot quit WeblayerShell.
+ executor_kwargs["pause_after_test"] = False
+ # Workaround: driver.close() is not supported.
+ executor_kwargs["restart_after_test"] = True
+ executor_kwargs["close_after_done"] = False
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ # allow the use of host-resolver-rules in lieu of modifying /etc/hosts file
+ return {"server_host": "127.0.0.1"}
+
+
+class WeblayerShell(ChromeAndroidBrowserBase):
+ """Chrome is backed by chromedriver, which is supplied through
+ ``wptrunner.webdriver.ChromeDriverServer``.
+ """
+
+ def __init__(self, logger, binary,
+ webdriver_binary="chromedriver",
+ adb_binary=None,
+ remote_queue=None,
+ device_serial=None,
+ webdriver_args=None,
+ stackwalk_binary=None,
+ symbols_path=None):
+ """Creates a new representation of Chrome. The `binary` argument gives
+ the browser binary to use for testing."""
+ super().__init__(logger,
+ webdriver_binary, adb_binary, remote_queue,
+ device_serial, webdriver_args, stackwalk_binary,
+ symbols_path)
+ self.binary = binary
+ self.wptserver_ports = _wptserve_ports
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/android_webview.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/android_webview.py
new file mode 100644
index 0000000000..4ad7066178
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/android_webview.py
@@ -0,0 +1,103 @@
+# mypy: allow-untyped-defs
+
+from .base import NullBrowser # noqa: F401
+from .base import require_arg
+from .base import get_timeout_multiplier # noqa: F401
+from .chrome import executor_kwargs as chrome_executor_kwargs
+from .chrome_android import ChromeAndroidBrowserBase
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorchrome import ChromeDriverPrintRefTestExecutor # noqa: F401
+from ..executors.executorwebdriver import (WebDriverCrashtestExecutor, # noqa: F401
+ WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor) # noqa: F401
+
+
+__wptrunner__ = {"product": "android_webview",
+ "check_args": "check_args",
+ "browser": "SystemWebViewShell",
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor",
+ "print-reftest": "ChromeDriverPrintRefTestExecutor",
+ "wdspec": "WdspecExecutor",
+ "crashtest": "WebDriverCrashtestExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+_wptserve_ports = set()
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "webdriver_binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"binary": kwargs["binary"],
+ "adb_binary": kwargs["adb_binary"],
+ "device_serial": kwargs["device_serial"],
+ "webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs.get("webdriver_args"),
+ "stackwalk_binary": kwargs.get("stackwalk_binary"),
+ "symbols_path": kwargs.get("symbols_path")}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ # Use update() to modify the global list in place.
+ _wptserve_ports.update(set(
+ test_environment.config['ports']['http'] + test_environment.config['ports']['https'] +
+ test_environment.config['ports']['ws'] + test_environment.config['ports']['wss']
+ ))
+
+ executor_kwargs = chrome_executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs)
+ del executor_kwargs["capabilities"]["goog:chromeOptions"]["prefs"]
+ capabilities = executor_kwargs["capabilities"]
+ # Note that for WebView, we launch a test shell and have the test shell use WebView.
+ # https://chromium.googlesource.com/chromium/src/+/HEAD/android_webview/docs/webview-shell.md
+ capabilities["goog:chromeOptions"]["androidPackage"] = \
+ kwargs.get("package_name", "org.chromium.webview_shell")
+ capabilities["goog:chromeOptions"]["androidActivity"] = \
+ "org.chromium.webview_shell.WebPlatformTestsActivity"
+ capabilities["goog:chromeOptions"]["androidKeepAppDataDir"] = \
+ kwargs.get("keep_app_data_directory")
+
+ # Workaround: driver.quit() cannot quit SystemWebViewShell.
+ executor_kwargs["pause_after_test"] = False
+ # Workaround: driver.close() is not supported.
+ executor_kwargs["restart_after_test"] = True
+ executor_kwargs["close_after_done"] = False
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ # allow the use of host-resolver-rules in lieu of modifying /etc/hosts file
+ return {"server_host": "127.0.0.1"}
+
+
+class SystemWebViewShell(ChromeAndroidBrowserBase):
+ """Chrome is backed by chromedriver, which is supplied through
+ ``wptrunner.webdriver.ChromeDriverServer``.
+ """
+
+ def __init__(self, logger, binary, webdriver_binary="chromedriver",
+ adb_binary=None,
+ remote_queue=None,
+ device_serial=None,
+ webdriver_args=None,
+ stackwalk_binary=None,
+ symbols_path=None):
+ """Creates a new representation of Chrome. The `binary` argument gives
+ the browser binary to use for testing."""
+ super().__init__(logger,
+ webdriver_binary, adb_binary, remote_queue,
+ device_serial, webdriver_args, stackwalk_binary,
+ symbols_path)
+ self.binary = binary
+ self.wptserver_ports = _wptserve_ports
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/base.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/base.py
new file mode 100644
index 0000000000..5b590adf25
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/base.py
@@ -0,0 +1,409 @@
+# mypy: allow-untyped-defs
+
+import enum
+import errno
+import os
+import platform
+import socket
+import traceback
+from abc import ABCMeta, abstractmethod
+
+import mozprocess
+
+from ..environment import wait_for_service
+from ..wptcommandline import require_arg # noqa: F401
+
+here = os.path.dirname(__file__)
+
+
+def cmd_arg(name, value=None):
+ prefix = "-" if platform.system() == "Windows" else "--"
+ rv = prefix + name
+ if value is not None:
+ rv += "=" + value
+ return rv
+
+
+def maybe_add_args(required_args, current_args):
+ for required_arg in required_args:
+ # If the arg is in the form of "variable=value", only add it if
+ # no arg with another value for "variable" is already there.
+ if "=" in required_arg:
+ required_arg_prefix = "%s=" % required_arg.split("=")[0]
+ if not any(item.startswith(required_arg_prefix) for item in current_args):
+ current_args.append(required_arg)
+ else:
+ if required_arg not in current_args:
+ current_args.append(required_arg)
+ return current_args
+
+
+def certificate_domain_list(list_of_domains, certificate_file):
+ """Build a list of domains where certificate_file should be used"""
+ cert_list = []
+ for domain in list_of_domains:
+ cert_list.append({"host": domain, "certificateFile": certificate_file})
+ return cert_list
+
+
+def get_free_port():
+ """Get a random unbound port"""
+ while True:
+ s = socket.socket()
+ try:
+ s.bind(("127.0.0.1", 0))
+ except OSError:
+ continue
+ else:
+ return s.getsockname()[1]
+ finally:
+ s.close()
+
+
+def get_timeout_multiplier(test_type, run_info_data, **kwargs):
+ if kwargs["timeout_multiplier"] is not None:
+ return kwargs["timeout_multiplier"]
+ return 1
+
+
+def browser_command(binary, args, debug_info):
+ if debug_info:
+ if debug_info.requiresEscapedArgs:
+ args = [item.replace("&", "\\&") for item in args]
+ debug_args = [debug_info.path] + debug_info.args
+ else:
+ debug_args = []
+
+ command = [binary] + args
+
+ return debug_args, command
+
+
+class BrowserError(Exception):
+ pass
+
+
+class Browser:
+ """Abstract class serving as the basis for Browser implementations.
+
+ The Browser is used in the TestRunnerManager to start and stop the browser
+ process, and to check the state of that process.
+
+ :param logger: Structured logger to use for output.
+ """
+ __metaclass__ = ABCMeta
+
+ process_cls = None
+ init_timeout = 30
+
+ def __init__(self, logger):
+ self.logger = logger
+
+ def setup(self):
+ """Used for browser-specific setup that happens at the start of a test run"""
+ pass
+
+ def settings(self, test):
+ """Dictionary of metadata that is constant for a specific launch of a browser.
+
+ This is used to determine when the browser instance configuration changes, requiring
+ a relaunch of the browser. The test runner calls this method for each test, and if the
+ returned value differs from that for the previous test, the browser is relaunched.
+ """
+ return {}
+
+ @abstractmethod
+ def start(self, group_metadata, **kwargs):
+ """Launch the browser object and get it into a state where is is ready to run tests"""
+ pass
+
+ @abstractmethod
+ def stop(self, force=False):
+ """Stop the running browser process."""
+ pass
+
+ @abstractmethod
+ def pid(self):
+ """pid of the browser process or None if there is no pid"""
+ pass
+
+ @abstractmethod
+ def is_alive(self):
+ """Boolean indicating whether the browser process is still running"""
+ pass
+
+ def cleanup(self):
+ """Browser-specific cleanup that is run after the testrun is finished"""
+ pass
+
+ def executor_browser(self):
+ """Returns the ExecutorBrowser subclass for this Browser subclass and the keyword arguments
+ with which it should be instantiated"""
+ return ExecutorBrowser, {}
+
+ def maybe_parse_tombstone(self):
+ """Possibly parse tombstones on Android device for Android target"""
+ pass
+
+ def check_crash(self, process, test):
+ """Check if a crash occured and output any useful information to the
+ log. Returns a boolean indicating whether a crash occured."""
+ return False
+
+ @property
+ def pac(self):
+ return None
+
+class NullBrowser(Browser):
+ def __init__(self, logger, **kwargs):
+ super().__init__(logger)
+
+ def start(self, **kwargs):
+ """No-op browser to use in scenarios where the TestRunnerManager shouldn't
+ actually own the browser process (e.g. Servo where we start one browser
+ per test)"""
+ pass
+
+ def stop(self, force=False):
+ pass
+
+ def pid(self):
+ return None
+
+ def is_alive(self):
+ return True
+
+
+class ExecutorBrowser:
+ """View of the Browser used by the Executor object.
+ This is needed because the Executor runs in a child process and
+ we can't ship Browser instances between processes on Windows.
+
+ Typically this will have a few product-specific properties set,
+ but in some cases it may have more elaborate methods for setting
+ up the browser from the runner process.
+ """
+ def __init__(self, **kwargs):
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+
+
+@enum.unique
+class OutputHandlerState(enum.IntEnum):
+ BEFORE_PROCESS_START = 1
+ AFTER_PROCESS_START = 2
+ AFTER_HANDLER_START = 3
+ AFTER_PROCESS_STOP = 4
+
+
+class OutputHandler:
+ """Class for handling output from a browser process.
+
+ This class is responsible for consuming the logging from a browser process
+ and passing it into the relevant logger. A class instance is designed to
+ be passed as the processOutputLine argument to mozprocess.ProcessHandler.
+
+ The setup of this class is complex for various reasons:
+
+ * We need to create an instance of the class before starting the process
+ * We want access to data about the running process e.g. the pid
+ * We want to launch the process and later setup additional log handling
+ which is restrospectively applied to any existing output (this supports
+ prelaunching browsers for performance, but having log output depend on the
+ tests that are run e.g. for leak suppression).
+
+ Therefore the lifecycle is as follows::
+
+ output_handler = OutputHandler(logger, command, **output_handler_kwargs)
+ proc = ProcessHandler(command, ..., processOutputLine=output_handler)
+ output_handler.after_process_start(proc.pid)
+ [...]
+ # All logging to this point was buffered in-memory, but after start()
+ # it's actually sent to the logger.
+ output_handler.start(**output_logger_start_kwargs)
+ [...]
+ proc.wait()
+ output_handler.after_process_stop()
+
+ Since the process lifetime and the output handler lifetime are coupled (it doesn't
+ work to reuse an output handler for multiple processes), it might make sense to have
+ a single class that owns the process and the output processing for the process.
+ This is complicated by the fact that we don't always run the process directly,
+ but sometimes use a wrapper e.g. mozrunner.
+ """
+
+ def __init__(self, logger, command, **kwargs):
+ self.logger = logger
+ self.command = command
+ self.pid = None
+ self.state = OutputHandlerState.BEFORE_PROCESS_START
+ self.line_buffer = []
+
+ def after_process_start(self, pid):
+ assert self.state == OutputHandlerState.BEFORE_PROCESS_START
+ self.logger.debug("OutputHandler.after_process_start")
+ self.pid = pid
+ self.state = OutputHandlerState.AFTER_PROCESS_START
+
+ def start(self, **kwargs):
+ assert self.state == OutputHandlerState.AFTER_PROCESS_START
+ self.logger.debug("OutputHandler.start")
+ # Need to change the state here before we try to empty the buffer
+ # or we'll just re-buffer the existing output.
+ self.state = OutputHandlerState.AFTER_HANDLER_START
+ for item in self.line_buffer:
+ self(item)
+ self.line_buffer = None
+
+ def after_process_stop(self, clean_shutdown=True):
+ # If we didn't get as far as configure, just
+ # dump all logs with no configuration
+ self.logger.debug("OutputHandler.after_process_stop")
+ if self.state < OutputHandlerState.AFTER_HANDLER_START:
+ self.start()
+ self.state = OutputHandlerState.AFTER_PROCESS_STOP
+
+ def __call__(self, line):
+ if self.state < OutputHandlerState.AFTER_HANDLER_START:
+ self.line_buffer.append(line)
+ return
+
+ # Could assert that there's no output handled once we're in the
+ # after_process_stop phase, although technically there's a race condition
+ # here because we don't know the logging thread has finished draining the
+ # logs. The solution might be to move this into mozprocess itself.
+
+ self.logger.process_output(self.pid,
+ line.decode("utf8", "replace"),
+ command=" ".join(self.command) if self.command else "")
+
+
+class WebDriverBrowser(Browser):
+ __metaclass__ = ABCMeta
+
+ def __init__(self, logger, binary=None, webdriver_binary=None,
+ webdriver_args=None, host="127.0.0.1", port=None, base_path="/",
+ env=None, supports_pac=True, **kwargs):
+ super().__init__(logger)
+
+ if webdriver_binary is None:
+ raise ValueError("WebDriver server binary must be given "
+ "to --webdriver-binary argument")
+
+ self.logger = logger
+ self.binary = binary
+ self.webdriver_binary = webdriver_binary
+
+ self.host = host
+ self._port = port
+ self._supports_pac = supports_pac
+
+ self.base_path = base_path
+ self.env = os.environ.copy() if env is None else env
+ self.webdriver_args = webdriver_args if webdriver_args is not None else []
+
+ self.url = f"http://{self.host}:{self.port}{self.base_path}"
+
+ self._output_handler = None
+ self._cmd = None
+ self._proc = None
+ self._pac = None
+
+ def make_command(self):
+ """Returns the full command for starting the server process as a list."""
+ return [self.webdriver_binary] + self.webdriver_args
+
+ def start(self, group_metadata, **kwargs):
+ try:
+ self._run_server(group_metadata, **kwargs)
+ except KeyboardInterrupt:
+ self.stop()
+
+ def create_output_handler(self, cmd):
+ """Return an instance of the class used to handle application output.
+
+ This can be overridden by subclasses which have particular requirements
+ for parsing, or otherwise using, the output."""
+ return OutputHandler(self.logger, cmd)
+
+ def _run_server(self, group_metadata, **kwargs):
+ cmd = self.make_command()
+ self._output_handler = self.create_output_handler(cmd)
+
+ self._proc = mozprocess.ProcessHandler(
+ cmd,
+ processOutputLine=self._output_handler,
+ env=self.env,
+ storeOutput=False)
+
+ self.logger.debug("Starting WebDriver: %s" % ' '.join(cmd))
+ try:
+ self._proc.run()
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ raise OSError(
+ "WebDriver executable not found: %s" % self.webdriver_binary)
+ raise
+ self._output_handler.after_process_start(self._proc.pid)
+
+ try:
+ wait_for_service(self.logger, self.host, self.port,
+ timeout=self.init_timeout)
+ except Exception:
+ self.logger.error(
+ "WebDriver was not accessible "
+ f"within the timeout:\n{traceback.format_exc()}")
+ raise
+ self._output_handler.start(group_metadata=group_metadata, **kwargs)
+ self.logger.debug("_run complete")
+
+ def stop(self, force=False):
+ self.logger.debug("Stopping WebDriver")
+ clean = True
+ if self.is_alive():
+ # Pass a timeout value to mozprocess Processhandler.kill()
+ # to ensure it always returns within it.
+ # See https://bugzilla.mozilla.org/show_bug.cgi?id=1760080
+ kill_result = self._proc.kill(timeout=5)
+ if force and kill_result != 0:
+ clean = False
+ self._proc.kill(9, timeout=5)
+ success = not self.is_alive()
+ if success and self._output_handler is not None:
+ # Only try to do output post-processing if we managed to shut down
+ self._output_handler.after_process_stop(clean)
+ self._output_handler = None
+ return success
+
+ def is_alive(self):
+ return hasattr(self._proc, "proc") and self._proc.poll() is None
+
+ @property
+ def pid(self):
+ if self._proc is not None:
+ return self._proc.pid
+
+ @property
+ def port(self):
+ # If no port is supplied, we'll get a free port right before we use it.
+ # Nothing guarantees an absence of race conditions here.
+ if self._port is None:
+ self._port = get_free_port()
+ return self._port
+
+ def cleanup(self):
+ self.stop()
+
+ def executor_browser(self):
+ return ExecutorBrowser, {"webdriver_url": self.url,
+ "host": self.host,
+ "port": self.port,
+ "pac": self.pac}
+
+ def settings(self, test):
+ self._pac = test.environment.get("pac", None) if self._supports_pac else None
+ return {"pac": self._pac}
+
+ @property
+ def pac(self):
+ return self._pac
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome.py
new file mode 100644
index 0000000000..2bcffbb5de
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome.py
@@ -0,0 +1,157 @@
+# mypy: allow-untyped-defs
+
+from . import chrome_spki_certs
+from .base import WebDriverBrowser, require_arg
+from .base import NullBrowser # noqa: F401
+from .base import get_timeout_multiplier # noqa: F401
+from .base import cmd_arg
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor, # noqa: F401
+ WebDriverCrashtestExecutor) # noqa: F401
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorchrome import ChromeDriverPrintRefTestExecutor # noqa: F401
+
+
+__wptrunner__ = {"product": "chrome",
+ "check_args": "check_args",
+ "browser": "ChromeBrowser",
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor",
+ "print-reftest": "ChromeDriverPrintRefTestExecutor",
+ "wdspec": "WdspecExecutor",
+ "crashtest": "WebDriverCrashtestExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "update_properties": "update_properties",
+ "timeout_multiplier": "get_timeout_multiplier",}
+
+def check_args(**kwargs):
+ require_arg(kwargs, "webdriver_binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"binary": kwargs["binary"],
+ "webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs.get("webdriver_args")}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data,
+ **kwargs)
+ executor_kwargs["close_after_done"] = True
+ executor_kwargs["supports_eager_pageload"] = False
+
+ capabilities = {
+ "goog:chromeOptions": {
+ "prefs": {
+ "profile": {
+ "default_content_setting_values": {
+ "popups": 1
+ }
+ }
+ },
+ "excludeSwitches": ["enable-automation"],
+ "w3c": True
+ }
+ }
+
+ if test_type == "testharness":
+ capabilities["pageLoadStrategy"] = "none"
+
+ chrome_options = capabilities["goog:chromeOptions"]
+ if kwargs["binary"] is not None:
+ chrome_options["binary"] = kwargs["binary"]
+
+ # Here we set a few Chrome flags that are always passed.
+ # ChromeDriver's "acceptInsecureCerts" capability only controls the current
+ # browsing context, whereas the CLI flag works for workers, too.
+ chrome_options["args"] = []
+
+ chrome_options["args"].append("--ignore-certificate-errors-spki-list=%s" %
+ ','.join(chrome_spki_certs.IGNORE_CERTIFICATE_ERRORS_SPKI_LIST))
+
+ # Allow audio autoplay without a user gesture.
+ chrome_options["args"].append("--autoplay-policy=no-user-gesture-required")
+ # Allow WebRTC tests to call getUserMedia and getDisplayMedia.
+ chrome_options["args"].append("--use-fake-device-for-media-stream")
+ chrome_options["args"].append("--use-fake-ui-for-media-stream")
+ # Shorten delay for Reporting <https://w3c.github.io/reporting/>.
+ chrome_options["args"].append("--short-reporting-delay")
+ # Point all .test domains to localhost for Chrome
+ chrome_options["args"].append("--host-resolver-rules=MAP nonexistent.*.test ~NOTFOUND, MAP *.test 127.0.0.1")
+ # Enable Secure Payment Confirmation for Chrome. This is normally disabled
+ # on Linux as it hasn't shipped there yet, but in WPT we enable virtual
+ # authenticator devices anyway for testing and so SPC works.
+ chrome_options["args"].append("--enable-features=SecurePaymentConfirmationBrowser")
+
+ # Classify `http-private`, `http-public` and https variants in the
+ # appropriate IP address spaces.
+ # For more details, see: https://github.com/web-platform-tests/rfcs/blob/master/rfcs/address_space_overrides.md
+ address_space_overrides_ports = [
+ ("http-private", "private"),
+ ("http-public", "public"),
+ ("https-private", "private"),
+ ("https-public", "public"),
+ ]
+ address_space_overrides_arg = ",".join(
+ f"127.0.0.1:{port_number}={address_space}"
+ for port_name, address_space in address_space_overrides_ports
+ for port_number in test_environment.config.ports.get(port_name, [])
+ )
+ if address_space_overrides_arg:
+ chrome_options["args"].append(
+ "--ip-address-space-overrides=" + address_space_overrides_arg)
+
+ if kwargs["enable_mojojs"]:
+ chrome_options["args"].append("--enable-blink-features=MojoJS,MojoJSTest")
+
+ if kwargs["enable_swiftshader"]:
+ # https://chromium.googlesource.com/chromium/src/+/HEAD/docs/gpu/swiftshader.md
+ chrome_options["args"].extend(["--use-gl=angle", "--use-angle=swiftshader"])
+
+ if kwargs["enable_experimental"]:
+ chrome_options["args"].extend(["--enable-experimental-web-platform-features"])
+
+ # Copy over any other flags that were passed in via --binary_args
+ if kwargs["binary_args"] is not None:
+ chrome_options["args"].extend(kwargs["binary_args"])
+
+ # Pass the --headless flag to Chrome if WPT's own --headless flag was set
+ # or if we're running print reftests because of crbug.com/753118
+ if ((kwargs["headless"] or test_type == "print-reftest") and
+ "--headless" not in chrome_options["args"]):
+ chrome_options["args"].append("--headless")
+
+ # For WebTransport tests.
+ webtranport_h3_port = test_environment.config.ports.get('webtransport-h3')
+ if webtranport_h3_port is not None:
+ chrome_options["args"].append(
+ f"--origin-to-force-quic-on=web-platform.test:{webtranport_h3_port[0]}")
+
+ executor_kwargs["capabilities"] = capabilities
+
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {"server_host": "127.0.0.1"}
+
+
+def update_properties():
+ return (["debug", "os", "processor"], {"os": ["version"], "processor": ["bits"]})
+
+
+class ChromeBrowser(WebDriverBrowser):
+ def make_command(self):
+ return [self.webdriver_binary,
+ cmd_arg("port", str(self.port)),
+ cmd_arg("url-base", self.base_path),
+ cmd_arg("enable-chrome-logs")] + self.webdriver_args
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_android.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_android.py
new file mode 100644
index 0000000000..820323e615
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_android.py
@@ -0,0 +1,244 @@
+# mypy: allow-untyped-defs
+
+import mozprocess
+import subprocess
+
+from .base import cmd_arg, require_arg
+from .base import get_timeout_multiplier # noqa: F401
+from .base import WebDriverBrowser # noqa: F401
+from .chrome import executor_kwargs as chrome_executor_kwargs
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorchrome import ChromeDriverPrintRefTestExecutor # noqa: F401
+from ..executors.executorwebdriver import (WebDriverCrashtestExecutor, # noqa: F401
+ WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor) # noqa: F401
+
+
+__wptrunner__ = {"product": "chrome_android",
+ "check_args": "check_args",
+ "browser": "ChromeAndroidBrowser",
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor",
+ "print-reftest": "ChromeDriverPrintRefTestExecutor",
+ "wdspec": "WdspecExecutor",
+ "crashtest": "WebDriverCrashtestExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+_wptserve_ports = set()
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "package_name")
+ require_arg(kwargs, "webdriver_binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"package_name": kwargs["package_name"],
+ "adb_binary": kwargs["adb_binary"],
+ "device_serial": kwargs["device_serial"],
+ "webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs.get("webdriver_args"),
+ "stackwalk_binary": kwargs.get("stackwalk_binary"),
+ "symbols_path": kwargs.get("symbols_path")}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ # Use update() to modify the global list in place.
+ _wptserve_ports.update(set(
+ test_environment.config['ports']['http'] + test_environment.config['ports']['https'] +
+ test_environment.config['ports']['ws'] + test_environment.config['ports']['wss']
+ ))
+
+ executor_kwargs = chrome_executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs)
+ # Remove unsupported options on mobile.
+ del executor_kwargs["capabilities"]["goog:chromeOptions"]["prefs"]
+
+ assert kwargs["package_name"], "missing --package-name"
+ capabilities = executor_kwargs["capabilities"]
+ capabilities["goog:chromeOptions"]["androidPackage"] = \
+ kwargs["package_name"]
+ capabilities["goog:chromeOptions"]["androidKeepAppDataDir"] = \
+ kwargs.get("keep_app_data_directory")
+
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ # allow the use of host-resolver-rules in lieu of modifying /etc/hosts file
+ return {"server_host": "127.0.0.1"}
+
+
+class LogcatRunner:
+ def __init__(self, logger, browser, remote_queue):
+ self.logger = logger
+ self.browser = browser
+ self.remote_queue = remote_queue
+
+ def start(self):
+ try:
+ self._run()
+ except KeyboardInterrupt:
+ self.stop()
+
+ def _run(self):
+ try:
+ # TODO: adb logcat -c fail randomly with message
+ # "failed to clear the 'main' log"
+ self.browser.clear_log()
+ except subprocess.CalledProcessError:
+ self.logger.error("Failed to clear logcat buffer")
+
+ self._cmd = self.browser.logcat_cmd()
+ self._proc = mozprocess.ProcessHandler(
+ self._cmd,
+ processOutputLine=self.on_output,
+ storeOutput=False)
+ self._proc.run()
+
+ def _send_message(self, command, *args):
+ try:
+ self.remote_queue.put((command, args))
+ except AssertionError:
+ self.logger.warning("Error when send to remote queue")
+
+ def stop(self, force=False):
+ if self.is_alive():
+ kill_result = self._proc.kill()
+ if force and kill_result != 0:
+ self._proc.kill(9)
+
+ def is_alive(self):
+ return hasattr(self._proc, "proc") and self._proc.poll() is None
+
+ def on_output(self, line):
+ data = {
+ "action": "process_output",
+ "process": "LOGCAT",
+ "command": "logcat",
+ "data": line
+ }
+ self._send_message("log", data)
+
+
+class ChromeAndroidBrowserBase(WebDriverBrowser):
+ def __init__(self,
+ logger,
+ webdriver_binary="chromedriver",
+ adb_binary=None,
+ remote_queue=None,
+ device_serial=None,
+ webdriver_args=None,
+ stackwalk_binary=None,
+ symbols_path=None):
+ super().__init__(logger,
+ binary=None,
+ webdriver_binary=webdriver_binary,
+ webdriver_args=webdriver_args,)
+ self.adb_binary = adb_binary or "adb"
+ self.device_serial = device_serial
+ self.stackwalk_binary = stackwalk_binary
+ self.symbols_path = symbols_path
+ self.remote_queue = remote_queue
+
+ if self.remote_queue is not None:
+ self.logcat_runner = LogcatRunner(self.logger, self, self.remote_queue)
+
+ def setup(self):
+ self.setup_adb_reverse()
+ if self.remote_queue is not None:
+ self.logcat_runner.start()
+
+ def _adb_run(self, args):
+ cmd = [self.adb_binary]
+ if self.device_serial:
+ cmd.extend(['-s', self.device_serial])
+ cmd.extend(args)
+ self.logger.info(' '.join(cmd))
+ subprocess.check_call(cmd)
+
+ def make_command(self):
+ return [self.webdriver_binary,
+ cmd_arg("port", str(self.port)),
+ cmd_arg("url-base", self.base_path),
+ cmd_arg("enable-chrome-logs")] + self.webdriver_args
+
+ def cleanup(self):
+ super().cleanup()
+ self._adb_run(['forward', '--remove-all'])
+ self._adb_run(['reverse', '--remove-all'])
+ if self.remote_queue is not None:
+ self.logcat_runner.stop(force=True)
+
+ def executor_browser(self):
+ cls, kwargs = super().executor_browser()
+ kwargs["capabilities"] = {
+ "goog:chromeOptions": {
+ "androidDeviceSerial": self.device_serial
+ }
+ }
+ return cls, kwargs
+
+ def clear_log(self):
+ self._adb_run(['logcat', '-c'])
+
+ def logcat_cmd(self):
+ cmd = [self.adb_binary]
+ if self.device_serial:
+ cmd.extend(['-s', self.device_serial])
+ cmd.extend(['logcat', '*:D'])
+ return cmd
+
+ def check_crash(self, process, test):
+ self.maybe_parse_tombstone()
+ # Existence of a tombstone does not necessarily mean test target has
+ # crashed. Always return False so we don't change the test results.
+ return False
+
+ def maybe_parse_tombstone(self):
+ if self.stackwalk_binary:
+ cmd = [self.stackwalk_binary, "-a", "-w"]
+ if self.device_serial:
+ cmd.extend(["--device", self.device_serial])
+ cmd.extend(["--output-directory", self.symbols_path])
+ raw_output = subprocess.check_output(cmd)
+ for line in raw_output.splitlines():
+ self.logger.process_output("TRACE", line, "logcat")
+
+ def setup_adb_reverse(self):
+ self._adb_run(['wait-for-device'])
+ self._adb_run(['forward', '--remove-all'])
+ self._adb_run(['reverse', '--remove-all'])
+ # "adb reverse" forwards network connection from device to host.
+ for port in self.wptserver_ports:
+ self._adb_run(['reverse', 'tcp:%d' % port, 'tcp:%d' % port])
+
+
+class ChromeAndroidBrowser(ChromeAndroidBrowserBase):
+ """Chrome is backed by chromedriver, which is supplied through
+ ``wptrunner.webdriver.ChromeDriverServer``.
+ """
+
+ def __init__(self, logger, package_name,
+ webdriver_binary="chromedriver",
+ adb_binary=None,
+ remote_queue = None,
+ device_serial=None,
+ webdriver_args=None,
+ stackwalk_binary=None,
+ symbols_path=None):
+ super().__init__(logger,
+ webdriver_binary, adb_binary, remote_queue,
+ device_serial, webdriver_args, stackwalk_binary,
+ symbols_path)
+ self.package_name = package_name
+ self.wptserver_ports = _wptserve_ports
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_ios.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_ios.py
new file mode 100644
index 0000000000..85c98f2994
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_ios.py
@@ -0,0 +1,58 @@
+# mypy: allow-untyped-defs
+
+from .base import WebDriverBrowser, require_arg
+from .base import get_timeout_multiplier # noqa: F401
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor) # noqa: F401
+
+
+__wptrunner__ = {"product": "chrome_ios",
+ "check_args": "check_args",
+ "browser": "ChromeiOSBrowser",
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+def check_args(**kwargs):
+ require_arg(kwargs, "webdriver_binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs.get("webdriver_args")}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data,
+ **kwargs)
+ executor_kwargs["close_after_done"] = True
+ executor_kwargs["capabilities"] = {}
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ # allow the use of host-resolver-rules in lieu of modifying /etc/hosts file
+ return {"server_host": "127.0.0.1"}
+
+
+class ChromeiOSBrowser(WebDriverBrowser):
+ """ChromeiOS is backed by CWTChromeDriver, which is supplied through
+ ``wptrunner.webdriver.CWTChromeDriverServer``.
+ """
+
+ init_timeout = 120
+
+ def make_command(self):
+ return ([self.webdriver_binary, f"--port={self.port}"] +
+ self.webdriver_args)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_spki_certs.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_spki_certs.py
new file mode 100644
index 0000000000..e1f133f572
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chrome_spki_certs.py
@@ -0,0 +1,13 @@
+# This file is automatically generated by 'wpt regen-certs'
+# DO NOT EDIT MANUALLY.
+
+# tools/certs/web-platform.test.pem
+WPT_FINGERPRINT = 'XreVR++++c9QamuUZu0YWHyqsL3PJarhG/0h87zEimI='
+
+# signed-exchange/resources/127.0.0.1.sxg.pem
+SXG_WPT_FINGERPRINT = '0Rt4mT6SJXojEMHTnKnlJ/hBKMBcI4kteBlhR1eTTdk='
+
+IGNORE_CERTIFICATE_ERRORS_SPKI_LIST = [
+ WPT_FINGERPRINT,
+ SXG_WPT_FINGERPRINT
+]
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chromium.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chromium.py
new file mode 100644
index 0000000000..13cb49aed2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/chromium.py
@@ -0,0 +1,57 @@
+# mypy: allow-untyped-defs
+
+from . import chrome
+from .base import NullBrowser # noqa: F401
+from .base import get_timeout_multiplier # noqa: F401
+from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor, # noqa: F401
+ WebDriverCrashtestExecutor) # noqa: F401
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorchrome import ChromeDriverPrintRefTestExecutor # noqa: F401
+
+
+__wptrunner__ = {"product": "chromium",
+ "check_args": "check_args",
+ "browser": "ChromiumBrowser",
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor",
+ "print-reftest": "ChromeDriverPrintRefTestExecutor",
+ "wdspec": "WdspecExecutor",
+ "crashtest": "WebDriverCrashtestExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "update_properties": "update_properties",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+
+# Chromium will rarely need a product definition that is different from Chrome.
+# If any wptrunner options need to differ from Chrome, they can be added as
+# an additional step after the execution of Chrome's functions.
+def check_args(**kwargs):
+ chrome.check_args(**kwargs)
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return chrome.browser_kwargs(logger, test_type, run_info_data, config, **kwargs)
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data, **kwargs):
+ return chrome.executor_kwargs(logger, test_type, test_environment, run_info_data, **kwargs)
+
+
+def env_extras(**kwargs):
+ return chrome.env_extras(**kwargs)
+
+
+def env_options():
+ return chrome.env_options()
+
+
+def update_properties():
+ return chrome.update_properties()
+
+
+class ChromiumBrowser(chrome.ChromeBrowser):
+ pass
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/content_shell.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/content_shell.py
new file mode 100644
index 0000000000..a4b9c9b0d4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/content_shell.py
@@ -0,0 +1,203 @@
+# mypy: allow-untyped-defs
+
+import os
+from multiprocessing import Queue, Event
+from subprocess import PIPE
+from threading import Thread
+from mozprocess import ProcessHandlerMixin
+
+from . import chrome_spki_certs
+from .base import Browser, ExecutorBrowser
+from .base import get_timeout_multiplier # noqa: F401
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.executorcontentshell import ( # noqa: F401
+ ContentShellCrashtestExecutor,
+ ContentShellPrintRefTestExecutor,
+ ContentShellRefTestExecutor,
+ ContentShellTestharnessExecutor,
+)
+
+
+__wptrunner__ = {"product": "content_shell",
+ "check_args": "check_args",
+ "browser": "ContentShellBrowser",
+ "executor": {
+ "crashtest": "ContentShellCrashtestExecutor",
+ "print-reftest": "ContentShellPrintRefTestExecutor",
+ "reftest": "ContentShellRefTestExecutor",
+ "testharness": "ContentShellTestharnessExecutor",
+ },
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "update_properties": "update_properties",
+ "timeout_multiplier": "get_timeout_multiplier",}
+
+
+def check_args(**kwargs):
+ pass
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ args = list(kwargs["binary_args"])
+
+ args.append("--ignore-certificate-errors-spki-list=%s" %
+ ','.join(chrome_spki_certs.IGNORE_CERTIFICATE_ERRORS_SPKI_LIST))
+
+ webtranport_h3_port = config.ports.get('webtransport-h3')
+ if webtranport_h3_port is not None:
+ args.append(
+ f"--origin-to-force-quic-on=web-platform.test:{webtranport_h3_port[0]}")
+
+ # These flags are specific to content_shell - they activate web test protocol mode.
+ args.append("--run-web-tests")
+ args.append("-")
+
+ return {"binary": kwargs["binary"],
+ "binary_args": args}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data,
+ **kwargs)
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {"server_host": "127.0.0.1",
+ "testharnessreport": "testharnessreport-content-shell.js"}
+
+
+def update_properties():
+ return (["debug", "os", "processor"], {"os": ["version"], "processor": ["bits"]})
+
+
+class ContentShellBrowser(Browser):
+ """Class that represents an instance of content_shell.
+
+ Upon startup, the stdout, stderr, and stdin pipes of the underlying content_shell
+ process are connected to multiprocessing Queues so that the runner process can
+ interact with content_shell through its protocol mode.
+ """
+
+ def __init__(self, logger, binary="content_shell", binary_args=[], **kwargs):
+ super().__init__(logger)
+
+ self._args = [binary] + binary_args
+ self._proc = None
+
+ def start(self, group_metadata, **kwargs):
+ self.logger.debug("Starting content shell: %s..." % self._args[0])
+
+ # Unfortunately we need to use the Process class directly because we do not
+ # want mozprocess to do any output handling at all.
+ self._proc = ProcessHandlerMixin.Process(self._args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
+ if os.name == "posix":
+ self._proc.pgid = ProcessHandlerMixin._getpgid(self._proc.pid)
+ self._proc.detached_pid = None
+
+ self._stdout_queue = Queue()
+ self._stderr_queue = Queue()
+ self._stdin_queue = Queue()
+ self._io_stopped = Event()
+
+ self._stdout_reader = self._create_reader_thread(self._proc.stdout, self._stdout_queue)
+ self._stderr_reader = self._create_reader_thread(self._proc.stderr, self._stderr_queue)
+ self._stdin_writer = self._create_writer_thread(self._proc.stdin, self._stdin_queue)
+
+ # Content shell is likely still in the process of initializing. The actual waiting
+ # for the startup to finish is done in the ContentShellProtocol.
+ self.logger.debug("Content shell has been started.")
+
+ def stop(self, force=False):
+ self.logger.debug("Stopping content shell...")
+
+ if self.is_alive():
+ kill_result = self._proc.kill(timeout=5)
+ # This makes sure any left-over child processes get killed.
+ # See http://bugzilla.mozilla.org/show_bug.cgi?id=1760080
+ if force and kill_result != 0:
+ self._proc.kill(9, timeout=5)
+
+ # We need to shut down these queues cleanly to avoid broken pipe error spam in the logs.
+ self._stdout_reader.join(2)
+ self._stderr_reader.join(2)
+
+ self._stdin_queue.put(None)
+ self._stdin_writer.join(2)
+
+ for thread in [self._stdout_reader, self._stderr_reader, self._stdin_writer]:
+ if thread.is_alive():
+ self.logger.warning("Content shell IO threads did not shut down gracefully.")
+ return False
+
+ stopped = not self.is_alive()
+ if stopped:
+ self.logger.debug("Content shell has been stopped.")
+ else:
+ self.logger.warning("Content shell failed to stop.")
+
+ return stopped
+
+ def is_alive(self):
+ return self._proc is not None and self._proc.poll() is None
+
+ def pid(self):
+ return self._proc.pid if self._proc else None
+
+ def executor_browser(self):
+ """This function returns the `ExecutorBrowser` object that is used by other
+ processes to interact with content_shell. In our case, this consists of the three
+ multiprocessing Queues as well as an `io_stopped` event to signal when the
+ underlying pipes have reached EOF.
+ """
+ return ExecutorBrowser, {"stdout_queue": self._stdout_queue,
+ "stderr_queue": self._stderr_queue,
+ "stdin_queue": self._stdin_queue,
+ "io_stopped": self._io_stopped}
+
+ def check_crash(self, process, test):
+ return not self.is_alive()
+
+ def _create_reader_thread(self, stream, queue):
+ """This creates (and starts) a background thread which reads lines from `stream` and
+ puts them into `queue` until `stream` reports EOF.
+ """
+ def reader_thread(stream, queue, stop_event):
+ while True:
+ line = stream.readline()
+ if not line:
+ break
+
+ queue.put(line)
+
+ stop_event.set()
+ queue.close()
+ queue.join_thread()
+
+ result = Thread(target=reader_thread, args=(stream, queue, self._io_stopped), daemon=True)
+ result.start()
+ return result
+
+ def _create_writer_thread(self, stream, queue):
+ """This creates (and starts) a background thread which gets items from `queue` and
+ writes them into `stream` until it encounters a None item in the queue.
+ """
+ def writer_thread(stream, queue):
+ while True:
+ line = queue.get()
+ if not line:
+ break
+
+ stream.write(line)
+ stream.flush()
+
+ result = Thread(target=writer_thread, args=(stream, queue), daemon=True)
+ result.start()
+ return result
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edge.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edge.py
new file mode 100644
index 0000000000..c6936e77b2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edge.py
@@ -0,0 +1,109 @@
+# mypy: allow-untyped-defs
+
+import time
+import subprocess
+from .base import require_arg
+from .base import WebDriverBrowser
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorselenium import (SeleniumTestharnessExecutor, # noqa: F401
+ SeleniumRefTestExecutor) # noqa: F401
+
+__wptrunner__ = {"product": "edge",
+ "check_args": "check_args",
+ "browser": "EdgeBrowser",
+ "executor": {"testharness": "SeleniumTestharnessExecutor",
+ "reftest": "SeleniumRefTestExecutor",
+ "wdspec": "WdspecExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "run_info_extras": "run_info_extras",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+
+def get_timeout_multiplier(test_type, run_info_data, **kwargs):
+ if kwargs["timeout_multiplier"] is not None:
+ return kwargs["timeout_multiplier"]
+ if test_type == "wdspec":
+ return 10
+ return 1
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "webdriver_binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs.get("webdriver_args"),
+ "timeout_multiplier": get_timeout_multiplier(test_type,
+ run_info_data,
+ **kwargs)}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data, **kwargs)
+ executor_kwargs["close_after_done"] = True
+ executor_kwargs["timeout_multiplier"] = get_timeout_multiplier(test_type,
+ run_info_data,
+ **kwargs)
+ executor_kwargs["capabilities"] = {}
+ if test_type == "testharness":
+ executor_kwargs["capabilities"]["pageLoadStrategy"] = "eager"
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {"supports_debugger": False}
+
+
+class EdgeBrowser(WebDriverBrowser):
+ init_timeout = 60
+
+ def __init__(self, logger, binary, webdriver_binary, webdriver_args=None,
+ host="localhost", port=None, base_path="/", env=None, **kwargs):
+ super().__init__(logger, binary, webdriver_binary, webdriver_args=webdriver_args,
+ host=host, port=port, base_path=base_path, env=env, **kwargs)
+ self.host = "localhost"
+
+ def stop(self, force=False):
+ super(self).stop(force)
+ # Wait for Edge browser process to exit if driver process is found
+ edge_proc_name = 'MicrosoftEdge.exe'
+ for i in range(0, 5):
+ procs = subprocess.check_output(['tasklist', '/fi', 'ImageName eq ' + edge_proc_name])
+ if b'MicrosoftWebDriver.exe' not in procs:
+ # Edge driver process already exited, don't wait for browser process to exit
+ break
+ elif edge_proc_name.encode() in procs:
+ time.sleep(0.5)
+ else:
+ break
+
+ if edge_proc_name.encode() in procs:
+ # close Edge process if it is still running
+ subprocess.call(['taskkill.exe', '/f', '/im', 'microsoftedge*'])
+
+ def make_command(self):
+ return [self.webdriver_binary, f"--port={self.port}"] + self.webdriver_args
+
+
+def run_info_extras(**kwargs):
+ osReleaseCommand = r"(Get-ItemProperty 'HKLM:\Software\Microsoft\Windows NT\CurrentVersion').ReleaseId"
+ osBuildCommand = r"(Get-ItemProperty 'HKLM:\Software\Microsoft\Windows NT\CurrentVersion').BuildLabEx"
+ try:
+ os_release = subprocess.check_output(["powershell.exe", osReleaseCommand]).strip()
+ os_build = subprocess.check_output(["powershell.exe", osBuildCommand]).strip()
+ except (subprocess.CalledProcessError, OSError):
+ return {}
+
+ rv = {"os_build": os_build,
+ "os_release": os_release}
+ return rv
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edge_webdriver.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edge_webdriver.py
new file mode 100644
index 0000000000..e985361e41
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edge_webdriver.py
@@ -0,0 +1,27 @@
+from .base import NullBrowser # noqa: F401
+from .edge import (EdgeBrowser, # noqa: F401
+ check_args, # noqa: F401
+ browser_kwargs, # noqa: F401
+ executor_kwargs, # noqa: F401
+ env_extras, # noqa: F401
+ env_options, # noqa: F401
+ run_info_extras, # noqa: F401
+ get_timeout_multiplier) # noqa: F401
+
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor) # noqa: F401
+
+
+__wptrunner__ = {"product": "edge_webdriver",
+ "check_args": "check_args",
+ "browser": "EdgeBrowser",
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor",
+ "wdspec": "WdspecExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "run_info_extras": "run_info_extras",
+ "timeout_multiplier": "get_timeout_multiplier"}
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edgechromium.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edgechromium.py
new file mode 100644
index 0000000000..7dfc5d6c82
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/edgechromium.py
@@ -0,0 +1,97 @@
+# mypy: allow-untyped-defs
+
+from .base import cmd_arg, require_arg
+from .base import WebDriverBrowser
+from .base import get_timeout_multiplier # noqa: F401
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor) # noqa: F401
+
+
+__wptrunner__ = {"product": "edgechromium",
+ "check_args": "check_args",
+ "browser": "EdgeChromiumBrowser",
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor",
+ "wdspec": "WdspecExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "timeout_multiplier": "get_timeout_multiplier",}
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "webdriver_binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"binary": kwargs["binary"],
+ "webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs.get("webdriver_args")}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ executor_kwargs = base_executor_kwargs(test_type,
+ test_environment,
+ run_info_data,
+ **kwargs)
+ executor_kwargs["close_after_done"] = True
+ executor_kwargs["supports_eager_pageload"] = False
+
+ capabilities = {
+ "ms:edgeOptions": {
+ "prefs": {
+ "profile": {
+ "default_content_setting_values": {
+ "popups": 1
+ }
+ }
+ },
+ "useAutomationExtension": False,
+ "excludeSwitches": ["enable-automation"],
+ "w3c": True
+ }
+ }
+
+ if test_type == "testharness":
+ capabilities["pageLoadStrategy"] = "none"
+
+ for (kwarg, capability) in [("binary", "binary"), ("binary_args", "args")]:
+ if kwargs[kwarg] is not None:
+ capabilities["ms:edgeOptions"][capability] = kwargs[kwarg]
+
+ if kwargs["headless"]:
+ if "args" not in capabilities["ms:edgeOptions"]:
+ capabilities["ms:edgeOptions"]["args"] = []
+ if "--headless" not in capabilities["ms:edgeOptions"]["args"]:
+ capabilities["ms:edgeOptions"]["args"].append("--headless")
+ capabilities["ms:edgeOptions"]["args"].append("--use-fake-device-for-media-stream")
+
+ if kwargs["enable_experimental"]:
+ capabilities["ms:edgeOptions"]["args"].append("--enable-experimental-web-platform-features")
+
+ executor_kwargs["capabilities"] = capabilities
+
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {}
+
+
+class EdgeChromiumBrowser(WebDriverBrowser):
+ """MicrosoftEdge is backed by MSEdgeDriver, which is supplied through
+ ``wptrunner.webdriver.EdgeChromiumDriverServer``.
+ """
+
+ def make_command(self):
+ return [self.webdriver_binary,
+ cmd_arg("port", str(self.port)),
+ cmd_arg("url-base", self.base_path)] + self.webdriver_args
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/epiphany.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/epiphany.py
new file mode 100644
index 0000000000..912173a52e
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/epiphany.py
@@ -0,0 +1,75 @@
+# mypy: allow-untyped-defs
+
+from .base import (NullBrowser, # noqa: F401
+ certificate_domain_list,
+ get_timeout_multiplier, # noqa: F401
+ maybe_add_args)
+from .webkit import WebKitBrowser # noqa: F401
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor, # noqa: F401
+ WebDriverCrashtestExecutor) # noqa: F401
+
+__wptrunner__ = {"product": "epiphany",
+ "check_args": "check_args",
+ "browser": {None: "WebKitBrowser",
+ "wdspec": "NullBrowser"},
+ "browser_kwargs": "browser_kwargs",
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor",
+ "wdspec": "WdspecExecutor",
+ "crashtest": "WebDriverCrashtestExecutor"},
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "run_info_extras": "run_info_extras",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+
+def check_args(**kwargs):
+ pass
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ # Workaround for https://gitlab.gnome.org/GNOME/libsoup/issues/172
+ webdriver_required_args = ["--host=127.0.0.1"]
+ webdriver_args = maybe_add_args(webdriver_required_args, kwargs.get("webdriver_args"))
+ return {"binary": kwargs["binary"],
+ "webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": webdriver_args}
+
+
+def capabilities(server_config, **kwargs):
+ args = kwargs.get("binary_args", [])
+ if "--automation-mode" not in args:
+ args.append("--automation-mode")
+
+ return {
+ "browserName": "Epiphany",
+ "browserVersion": "3.31.4", # First version to support automation
+ "platformName": "ANY",
+ "webkitgtk:browserOptions": {
+ "binary": kwargs["binary"],
+ "args": args,
+ "certificates": certificate_domain_list(server_config.domains_set, kwargs["host_cert_path"])}}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data, **kwargs)
+ executor_kwargs["close_after_done"] = True
+ executor_kwargs["capabilities"] = capabilities(test_environment.config, **kwargs)
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {}
+
+
+def run_info_extras(**kwargs):
+ return {"webkit_port": "gtk"}
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/firefox.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/firefox.py
new file mode 100644
index 0000000000..267e7a868e
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/firefox.py
@@ -0,0 +1,969 @@
+# mypy: allow-untyped-defs
+
+import json
+import os
+import platform
+import signal
+import subprocess
+import tempfile
+import time
+from abc import ABCMeta, abstractmethod
+from http.client import HTTPConnection
+
+import mozinfo
+import mozleak
+import mozversion
+from mozprocess import ProcessHandler
+from mozprofile import FirefoxProfile, Preferences
+from mozrunner import FirefoxRunner
+from mozrunner.utils import test_environment, get_stack_fixer_function
+from mozcrash import mozcrash
+
+from .base import (Browser,
+ ExecutorBrowser,
+ WebDriverBrowser,
+ OutputHandler,
+ OutputHandlerState,
+ browser_command,
+ cmd_arg,
+ get_free_port,
+ require_arg)
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.executormarionette import (MarionetteTestharnessExecutor, # noqa: F401
+ MarionetteRefTestExecutor, # noqa: F401
+ MarionettePrintRefTestExecutor, # noqa: F401
+ MarionetteWdspecExecutor, # noqa: F401
+ MarionetteCrashtestExecutor) # noqa: F401
+
+
+
+__wptrunner__ = {"product": "firefox",
+ "check_args": "check_args",
+ "browser": {None: "FirefoxBrowser",
+ "wdspec": "FirefoxWdSpecBrowser"},
+ "executor": {"crashtest": "MarionetteCrashtestExecutor",
+ "testharness": "MarionetteTestharnessExecutor",
+ "reftest": "MarionetteRefTestExecutor",
+ "print-reftest": "MarionettePrintRefTestExecutor",
+ "wdspec": "MarionetteWdspecExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "run_info_extras": "run_info_extras",
+ "update_properties": "update_properties",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+
+def get_timeout_multiplier(test_type, run_info_data, **kwargs):
+ if kwargs["timeout_multiplier"] is not None:
+ return kwargs["timeout_multiplier"]
+
+ multiplier = 1
+ if run_info_data["verify"]:
+ if kwargs.get("chaos_mode_flags", None) is not None:
+ multiplier = 2
+
+ if test_type == "reftest":
+ if (run_info_data["debug"] or
+ run_info_data.get("asan") or
+ run_info_data.get("tsan")):
+ return 4 * multiplier
+ else:
+ return 2 * multiplier
+ elif (run_info_data["debug"] or
+ run_info_data.get("asan") or
+ run_info_data.get("tsan")):
+ if run_info_data.get("ccov"):
+ return 4 * multiplier
+ else:
+ return 3 * multiplier
+ elif run_info_data["os"] == "android":
+ return 4 * multiplier
+ # https://bugzilla.mozilla.org/show_bug.cgi?id=1538725
+ elif run_info_data["os"] == "win" and run_info_data["processor"] == "aarch64":
+ return 4 * multiplier
+ elif run_info_data.get("ccov"):
+ return 2 * multiplier
+ return 1 * multiplier
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"binary": kwargs["binary"],
+ "webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs["webdriver_args"],
+ "prefs_root": kwargs["prefs_root"],
+ "extra_prefs": kwargs["extra_prefs"],
+ "test_type": test_type,
+ "debug_info": kwargs["debug_info"],
+ "symbols_path": kwargs["symbols_path"],
+ "stackwalk_binary": kwargs["stackwalk_binary"],
+ "certutil_binary": kwargs["certutil_binary"],
+ "ca_certificate_path": config.ssl_config["ca_cert_path"],
+ "e10s": kwargs["gecko_e10s"],
+ "disable_fission": kwargs["disable_fission"],
+ "stackfix_dir": kwargs["stackfix_dir"],
+ "binary_args": kwargs["binary_args"],
+ "timeout_multiplier": get_timeout_multiplier(test_type,
+ run_info_data,
+ **kwargs),
+ "leak_check": run_info_data["debug"] and (kwargs["leak_check"] is not False),
+ "asan": run_info_data.get("asan"),
+ "stylo_threads": kwargs["stylo_threads"],
+ "chaos_mode_flags": kwargs["chaos_mode_flags"],
+ "config": config,
+ "browser_channel": kwargs["browser_channel"],
+ "headless": kwargs["headless"],
+ "preload_browser": kwargs["preload_browser"] and not kwargs["pause_after_test"] and not kwargs["num_test_groups"] == 1,
+ "specialpowers_path": kwargs["specialpowers_path"],
+ "debug_test": kwargs["debug_test"]}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data,
+ **kwargs)
+ executor_kwargs["close_after_done"] = test_type != "reftest"
+ executor_kwargs["timeout_multiplier"] = get_timeout_multiplier(test_type,
+ run_info_data,
+ **kwargs)
+ executor_kwargs["e10s"] = run_info_data["e10s"]
+ capabilities = {}
+ if test_type == "testharness":
+ capabilities["pageLoadStrategy"] = "eager"
+ if test_type in ("reftest", "print-reftest"):
+ executor_kwargs["reftest_internal"] = kwargs["reftest_internal"]
+ if test_type == "wdspec":
+ options = {"args": []}
+ if kwargs["binary"]:
+ options["binary"] = kwargs["binary"]
+ if kwargs["binary_args"]:
+ options["args"] = kwargs["binary_args"]
+
+ if not kwargs["binary"] and kwargs["headless"] and "--headless" not in options["args"]:
+ options["args"].append("--headless")
+
+ capabilities["moz:firefoxOptions"] = options
+
+ if kwargs["certutil_binary"] is None:
+ capabilities["acceptInsecureCerts"] = True
+ if capabilities:
+ executor_kwargs["capabilities"] = capabilities
+ executor_kwargs["debug"] = run_info_data["debug"]
+ executor_kwargs["ccov"] = run_info_data.get("ccov", False)
+ executor_kwargs["browser_version"] = run_info_data.get("browser_version")
+ executor_kwargs["debug_test"] = kwargs["debug_test"]
+ executor_kwargs["disable_fission"] = kwargs["disable_fission"]
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ # The server host is set to 127.0.0.1 as Firefox is configured (through the
+ # network.dns.localDomains preference set below) to resolve the test
+ # domains to localhost without relying on the network stack.
+ #
+ # https://github.com/web-platform-tests/wpt/pull/9480
+ return {"server_host": "127.0.0.1",
+ "supports_debugger": True}
+
+
+def run_info_extras(**kwargs):
+
+ def get_bool_pref_if_exists(pref):
+ for key, value in kwargs.get('extra_prefs', []):
+ if pref == key:
+ return value.lower() in ('true', '1')
+ return None
+
+ def get_bool_pref(pref):
+ pref_value = get_bool_pref_if_exists(pref)
+ return pref_value if pref_value is not None else False
+
+ # Default fission to on, unless we get --disable-fission
+ rv = {"e10s": kwargs["gecko_e10s"],
+ "wasm": kwargs.get("wasm", True),
+ "verify": kwargs["verify"],
+ "headless": kwargs.get("headless", False) or "MOZ_HEADLESS" in os.environ,
+ "fission": not kwargs.get("disable_fission"),
+ "sessionHistoryInParent": (not kwargs.get("disable_fission") or
+ get_bool_pref("fission.sessionHistoryInParent")),
+ "swgl": get_bool_pref("gfx.webrender.software")}
+
+ rv.update(run_info_browser_version(**kwargs))
+
+ return rv
+
+
+def run_info_browser_version(**kwargs):
+ try:
+ version_info = mozversion.get_version(kwargs["binary"])
+ except mozversion.errors.VersionError:
+ version_info = None
+ if version_info:
+ rv = {"browser_build_id": version_info.get("application_buildid", None),
+ "browser_changeset": version_info.get("application_changeset", None)}
+ if "browser_version" not in kwargs:
+ rv["browser_version"] = version_info.get("application_version")
+ return rv
+ return {}
+
+
+def update_properties():
+ return (["os", "debug", "fission", "processor", "swgl", "domstreams"],
+ {"os": ["version"], "processor": ["bits"]})
+
+
+def log_gecko_crashes(logger, process, test, profile_dir, symbols_path, stackwalk_binary):
+ dump_dir = os.path.join(profile_dir, "minidumps")
+
+ try:
+ return bool(mozcrash.log_crashes(logger,
+ dump_dir,
+ symbols_path=symbols_path,
+ stackwalk_binary=stackwalk_binary,
+ process=process,
+ test=test))
+ except OSError:
+ logger.warning("Looking for crash dump files failed")
+ return False
+
+
+def get_environ(logger, binary, debug_info, stylo_threads, headless,
+ chaos_mode_flags=None):
+ env = test_environment(xrePath=os.path.abspath(os.path.dirname(binary)),
+ debugger=debug_info is not None,
+ useLSan=True,
+ log=logger)
+
+ env["STYLO_THREADS"] = str(stylo_threads)
+ # Disable window occlusion. Bug 1733955
+ env["MOZ_WINDOW_OCCLUSION"] = "0"
+ if chaos_mode_flags is not None:
+ env["MOZ_CHAOSMODE"] = hex(chaos_mode_flags)
+ if headless:
+ env["MOZ_HEADLESS"] = "1"
+ return env
+
+
+def setup_leak_report(leak_check, profile, env):
+ leak_report_file = None
+ if leak_check:
+ filename = "runtests_leaks_%s.log" % os.getpid()
+ if profile is not None:
+ leak_report_file = os.path.join(profile.profile, filename)
+ else:
+ leak_report_file = os.path.join(tempfile.gettempdir(), filename)
+ if os.path.exists(leak_report_file):
+ os.remove(leak_report_file)
+ env["XPCOM_MEM_BLOAT_LOG"] = leak_report_file
+
+ return leak_report_file
+
+
+class FirefoxInstanceManager:
+ __metaclass__ = ABCMeta
+
+ def __init__(self, logger, binary, binary_args, profile_creator, debug_info,
+ chaos_mode_flags, headless, stylo_threads,
+ leak_check, stackfix_dir, symbols_path, asan):
+ """Object that manages starting and stopping instances of Firefox."""
+ self.logger = logger
+ self.binary = binary
+ self.binary_args = binary_args
+ self.base_profile = profile_creator.create()
+ self.debug_info = debug_info
+ self.chaos_mode_flags = chaos_mode_flags
+ self.headless = headless
+ self.stylo_threads = stylo_threads
+ self.leak_check = leak_check
+ self.stackfix_dir = stackfix_dir
+ self.symbols_path = symbols_path
+ self.asan = asan
+
+ self.previous = None
+ self.current = None
+
+ @abstractmethod
+ def teardown(self, force=False):
+ pass
+
+ @abstractmethod
+ def get(self):
+ """Get a BrowserInstance for a running Firefox.
+
+ This can only be called once per instance, and between calls stop_current()
+ must be called."""
+ pass
+
+ def stop_current(self, force=False):
+ """Shutdown the current instance of Firefox.
+
+ The BrowserInstance remains available through self.previous, since some
+ operations happen after shutdown."""
+ if not self.current:
+ return
+
+ self.current.stop(force)
+ self.previous = self.current
+ self.current = None
+
+ def start(self):
+ """Start an instance of Firefox, returning a BrowserInstance handle"""
+ profile = self.base_profile.clone(self.base_profile.profile)
+
+ marionette_port = get_free_port()
+ profile.set_preferences({"marionette.port": marionette_port})
+
+ env = get_environ(self.logger, self.binary, self.debug_info, self.stylo_threads,
+ self.headless, self.chaos_mode_flags)
+
+ args = self.binary_args[:] if self.binary_args else []
+ args += [cmd_arg("marionette"), "about:blank"]
+
+ debug_args, cmd = browser_command(self.binary,
+ args,
+ self.debug_info)
+
+ leak_report_file = setup_leak_report(self.leak_check, profile, env)
+ output_handler = FirefoxOutputHandler(self.logger,
+ cmd,
+ stackfix_dir=self.stackfix_dir,
+ symbols_path=self.symbols_path,
+ asan=self.asan,
+ leak_report_file=leak_report_file)
+ runner = FirefoxRunner(profile=profile,
+ binary=cmd[0],
+ cmdargs=cmd[1:],
+ env=env,
+ process_class=ProcessHandler,
+ process_args={"processOutputLine": [output_handler]})
+ instance = BrowserInstance(self.logger, runner, marionette_port,
+ output_handler, leak_report_file)
+
+ self.logger.debug("Starting Firefox")
+ runner.start(debug_args=debug_args,
+ interactive=self.debug_info and self.debug_info.interactive)
+ output_handler.after_process_start(runner.process_handler.pid)
+ self.logger.debug("Firefox Started")
+
+ return instance
+
+
+class SingleInstanceManager(FirefoxInstanceManager):
+ """FirefoxInstanceManager that manages a single Firefox instance"""
+ def get(self):
+ assert not self.current, ("Tried to call get() on InstanceManager that has "
+ "an existing instance")
+ if self.previous:
+ self.previous.cleanup()
+ self.previous = None
+ self.current = self.start()
+ return self.current
+
+ def teardown(self, force=False):
+ for instance in [self.previous, self.current]:
+ if instance:
+ instance.stop(force)
+ instance.cleanup()
+ self.base_profile.cleanup()
+
+
+class PreloadInstanceManager(FirefoxInstanceManager):
+ def __init__(self, *args, **kwargs):
+ """FirefoxInstanceManager that keeps once Firefox instance preloaded
+ to allow rapid resumption after an instance shuts down."""
+ super().__init__(*args, **kwargs)
+ self.pending = None
+
+ def get(self):
+ assert not self.current, ("Tried to call get() on InstanceManager that has "
+ "an existing instance")
+ if self.previous:
+ self.previous.cleanup()
+ self.previous = None
+ if not self.pending:
+ self.pending = self.start()
+ self.current = self.pending
+ self.pending = self.start()
+ return self.current
+
+ def teardown(self, force=False):
+ for instance, unused in [(self.previous, False),
+ (self.current, False),
+ (self.pending, True)]:
+ if instance:
+ instance.stop(force, unused)
+ instance.cleanup()
+ self.base_profile.cleanup()
+
+
+class BrowserInstance:
+ shutdown_timeout = 70
+
+ def __init__(self, logger, runner, marionette_port, output_handler, leak_report_file):
+ """Handle to a running Firefox instance"""
+ self.logger = logger
+ self.runner = runner
+ self.marionette_port = marionette_port
+ self.output_handler = output_handler
+ self.leak_report_file = leak_report_file
+
+ def stop(self, force=False, unused=False):
+ """Stop Firefox
+
+ :param force: Signal the firefox process without waiting for a clean shutdown
+ :param unused: This instance was not used for running tests and so
+ doesn't have an active marionette session and doesn't require
+ output postprocessing.
+ """
+ is_running = self.runner is not None and self.runner.is_running()
+ if is_running:
+ self.logger.debug("Stopping Firefox %s" % self.pid())
+ shutdown_methods = [(True, lambda: self.runner.wait(self.shutdown_timeout)),
+ (False, lambda: self.runner.stop(signal.SIGTERM,
+ self.shutdown_timeout))]
+ if hasattr(signal, "SIGKILL"):
+ shutdown_methods.append((False, lambda: self.runner.stop(signal.SIGKILL,
+ self.shutdown_timeout)))
+ if unused or force:
+ # Don't wait for the instance to close itself
+ shutdown_methods = shutdown_methods[1:]
+ try:
+ # For Firefox we assume that stopping the runner prompts the
+ # browser to shut down. This allows the leak log to be written
+ for i, (clean, stop_f) in enumerate(shutdown_methods):
+ self.logger.debug("Shutting down attempt %i/%i" % (i + 1, len(shutdown_methods)))
+ retcode = stop_f()
+ if retcode is not None:
+ self.logger.info("Browser exited with return code %s" % retcode)
+ break
+ except OSError:
+ # This can happen on Windows if the process is already dead
+ pass
+ elif self.runner:
+ # The browser was already stopped, which we assume was a crash
+ # TODO: Should we check the exit code here?
+ clean = False
+ if not unused:
+ self.output_handler.after_process_stop(clean_shutdown=clean)
+
+ def pid(self):
+ if self.runner.process_handler is None:
+ return None
+
+ try:
+ return self.runner.process_handler.pid
+ except AttributeError:
+ return None
+
+ def is_alive(self):
+ if self.runner:
+ return self.runner.is_running()
+ return False
+
+ def cleanup(self):
+ self.runner.cleanup()
+ self.runner = None
+
+
+class FirefoxOutputHandler(OutputHandler):
+ def __init__(self, logger, command, symbols_path=None, stackfix_dir=None, asan=False,
+ leak_report_file=None):
+ """Filter for handling Firefox process output.
+
+ This receives Firefox process output in the __call__ function, does
+ any additional processing that's required, and decides whether to log
+ the output. Because the Firefox process can be started before we know
+ which filters are going to be required, we buffer all output until
+ setup() is called. This is responsible for doing the final configuration
+ of the output handlers.
+ """
+
+ super().__init__(logger, command)
+
+ self.symbols_path = symbols_path
+ if stackfix_dir:
+ # We hide errors because they cause disconcerting `CRITICAL`
+ # warnings in web platform test output.
+ self.stack_fixer = get_stack_fixer_function(stackfix_dir,
+ self.symbols_path,
+ hideErrors=True)
+ else:
+ self.stack_fixer = None
+ self.asan = asan
+ self.leak_report_file = leak_report_file
+
+ # These are filled in after configure_handlers() is called
+ self.lsan_handler = None
+ self.mozleak_allowed = None
+ self.mozleak_thresholds = None
+ self.group_metadata = {}
+
+ def start(self, group_metadata=None, lsan_disabled=False, lsan_allowed=None,
+ lsan_max_stack_depth=None, mozleak_allowed=None, mozleak_thresholds=None,
+ **kwargs):
+ """Configure the output handler"""
+ if group_metadata is None:
+ group_metadata = {}
+ self.group_metadata = group_metadata
+
+ self.mozleak_allowed = mozleak_allowed
+ self.mozleak_thresholds = mozleak_thresholds
+
+ if self.asan:
+ self.lsan_handler = mozleak.LSANLeaks(self.logger,
+ scope=group_metadata.get("scope", "/"),
+ allowed=lsan_allowed,
+ maxNumRecordedFrames=lsan_max_stack_depth,
+ allowAll=lsan_disabled)
+ else:
+ self.lsan_handler = None
+ super().start()
+
+ def after_process_stop(self, clean_shutdown=True):
+ super().after_process_stop(clean_shutdown)
+ if self.lsan_handler:
+ self.lsan_handler.process()
+ if self.leak_report_file is not None:
+ if not clean_shutdown:
+ # If we didn't get a clean shutdown there probably isn't a leak report file
+ self.logger.warning("Firefox didn't exit cleanly, not processing leak logs")
+ else:
+ # We have to ignore missing leaks in the tab because it can happen that the
+ # content process crashed and in that case we don't want the test to fail.
+ # Ideally we would record which content process crashed and just skip those.
+ self.logger.info("PROCESS LEAKS %s" % self.leak_report_file)
+ mozleak.process_leak_log(
+ self.leak_report_file,
+ leak_thresholds=self.mozleak_thresholds,
+ ignore_missing_leaks=["tab", "gmplugin"],
+ log=self.logger,
+ stack_fixer=self.stack_fixer,
+ scope=self.group_metadata.get("scope"),
+ allowed=self.mozleak_allowed)
+ if os.path.exists(self.leak_report_file):
+ os.unlink(self.leak_report_file)
+
+ def __call__(self, line):
+ """Write a line of output from the firefox process to the log"""
+ if b"GLib-GObject-CRITICAL" in line:
+ return
+ if line:
+ if self.state < OutputHandlerState.AFTER_HANDLER_START:
+ self.line_buffer.append(line)
+ return
+ data = line.decode("utf8", "replace")
+ if self.stack_fixer:
+ data = self.stack_fixer(data)
+ if self.lsan_handler:
+ data = self.lsan_handler.log(data)
+ if data is not None:
+ self.logger.process_output(self.pid,
+ data,
+ command=" ".join(self.command))
+
+
+class ProfileCreator:
+ def __init__(self, logger, prefs_root, config, test_type, extra_prefs, e10s,
+ disable_fission, debug_test, browser_channel, binary, certutil_binary,
+ ca_certificate_path):
+ self.logger = logger
+ self.prefs_root = prefs_root
+ self.config = config
+ self.test_type = test_type
+ self.extra_prefs = extra_prefs
+ self.e10s = e10s
+ self.disable_fission = disable_fission
+ self.debug_test = debug_test
+ self.browser_channel = browser_channel
+ self.ca_certificate_path = ca_certificate_path
+ self.binary = binary
+ self.certutil_binary = certutil_binary
+ self.ca_certificate_path = ca_certificate_path
+
+ def create(self, **kwargs):
+ """Create a Firefox profile and return the mozprofile Profile object pointing at that
+ profile
+
+ :param kwargs: Additional arguments to pass into the profile constructor
+ """
+ preferences = self._load_prefs()
+
+ profile = FirefoxProfile(preferences=preferences,
+ restore=False,
+ **kwargs)
+ self._set_required_prefs(profile)
+ if self.ca_certificate_path is not None:
+ self._setup_ssl(profile)
+
+ return profile
+
+ def _load_prefs(self):
+ prefs = Preferences()
+
+ pref_paths = []
+
+ profiles = os.path.join(self.prefs_root, 'profiles.json')
+ if os.path.isfile(profiles):
+ with open(profiles) as fh:
+ for name in json.load(fh)['web-platform-tests']:
+ if self.browser_channel in (None, 'nightly'):
+ pref_paths.append(os.path.join(self.prefs_root, name, 'user.js'))
+ elif name != 'unittest-features':
+ pref_paths.append(os.path.join(self.prefs_root, name, 'user.js'))
+ else:
+ # Old preference files used before the creation of profiles.json (remove when no longer supported)
+ legacy_pref_paths = (
+ os.path.join(self.prefs_root, 'prefs_general.js'), # Used in Firefox 60 and below
+ os.path.join(self.prefs_root, 'common', 'user.js'), # Used in Firefox 61
+ )
+ for path in legacy_pref_paths:
+ if os.path.isfile(path):
+ pref_paths.append(path)
+
+ for path in pref_paths:
+ if os.path.exists(path):
+ prefs.add(Preferences.read_prefs(path))
+ else:
+ self.logger.warning("Failed to find base prefs file in %s" % path)
+
+ # Add any custom preferences
+ prefs.add(self.extra_prefs, cast=True)
+
+ return prefs()
+
+ def _set_required_prefs(self, profile):
+ """Set preferences required for wptrunner to function.
+
+ Note that this doesn't set the marionette port, since we don't always
+ know that at profile creation time. So the caller is responisble for
+ setting that once it's available."""
+ profile.set_preferences({
+ "network.dns.localDomains": ",".join(self.config.domains_set),
+ "dom.file.createInChild": True,
+ # TODO: Remove preferences once Firefox 64 is stable (Bug 905404)
+ "network.proxy.type": 0,
+ "places.history.enabled": False,
+ "network.preload": True,
+ })
+ if self.e10s:
+ profile.set_preferences({"browser.tabs.remote.autostart": True})
+
+ profile.set_preferences({"fission.autostart": True})
+ if self.disable_fission:
+ profile.set_preferences({"fission.autostart": False})
+
+ if self.test_type in ("reftest", "print-reftest"):
+ profile.set_preferences({"layout.interruptible-reflow.enabled": False})
+
+ if self.test_type == "print-reftest":
+ profile.set_preferences({"print.always_print_silent": True})
+
+ # Bug 1262954: winxp + e10s, disable hwaccel
+ if (self.e10s and platform.system() in ("Windows", "Microsoft") and
+ "5.1" in platform.version()):
+ profile.set_preferences({"layers.acceleration.disabled": True})
+
+ if self.debug_test:
+ profile.set_preferences({"devtools.console.stdout.content": True})
+
+ def _setup_ssl(self, profile):
+ """Create a certificate database to use in the test profile. This is configured
+ to trust the CA Certificate that has signed the web-platform.test server
+ certificate."""
+ if self.certutil_binary is None:
+ self.logger.info("--certutil-binary not supplied; Firefox will not check certificates")
+ return
+
+ self.logger.info("Setting up ssl")
+
+ # Make sure the certutil libraries from the source tree are loaded when using a
+ # local copy of certutil
+ # TODO: Maybe only set this if certutil won't launch?
+ env = os.environ.copy()
+ certutil_dir = os.path.dirname(self.binary or self.certutil_binary)
+ if mozinfo.isMac:
+ env_var = "DYLD_LIBRARY_PATH"
+ elif mozinfo.isUnix:
+ env_var = "LD_LIBRARY_PATH"
+ else:
+ env_var = "PATH"
+
+
+ env[env_var] = (os.path.pathsep.join([certutil_dir, env[env_var]])
+ if env_var in env else certutil_dir)
+
+ def certutil(*args):
+ cmd = [self.certutil_binary] + list(args)
+ self.logger.process_output("certutil",
+ subprocess.check_output(cmd,
+ env=env,
+ stderr=subprocess.STDOUT),
+ " ".join(cmd))
+
+ pw_path = os.path.join(profile.profile, ".crtdbpw")
+ with open(pw_path, "w") as f:
+ # Use empty password for certificate db
+ f.write("\n")
+
+ cert_db_path = profile.profile
+
+ # Create a new certificate db
+ certutil("-N", "-d", cert_db_path, "-f", pw_path)
+
+ # Add the CA certificate to the database and mark as trusted to issue server certs
+ certutil("-A", "-d", cert_db_path, "-f", pw_path, "-t", "CT,,",
+ "-n", "web-platform-tests", "-i", self.ca_certificate_path)
+
+ # List all certs in the database
+ certutil("-L", "-d", cert_db_path)
+
+
+class FirefoxBrowser(Browser):
+ init_timeout = 70
+
+ def __init__(self, logger, binary, prefs_root, test_type, extra_prefs=None, debug_info=None,
+ symbols_path=None, stackwalk_binary=None, certutil_binary=None,
+ ca_certificate_path=None, e10s=False, disable_fission=False,
+ stackfix_dir=None, binary_args=None, timeout_multiplier=None, leak_check=False,
+ asan=False, stylo_threads=1, chaos_mode_flags=None, config=None,
+ browser_channel="nightly", headless=None, preload_browser=False,
+ specialpowers_path=None, debug_test=False, **kwargs):
+ Browser.__init__(self, logger)
+
+ self.logger = logger
+
+ if timeout_multiplier:
+ self.init_timeout = self.init_timeout * timeout_multiplier
+
+ self.instance = None
+ self._settings = None
+
+ self.stackfix_dir = stackfix_dir
+ self.symbols_path = symbols_path
+ self.stackwalk_binary = stackwalk_binary
+
+ self.asan = asan
+ self.leak_check = leak_check
+
+ self.specialpowers_path = specialpowers_path
+
+ profile_creator = ProfileCreator(logger,
+ prefs_root,
+ config,
+ test_type,
+ extra_prefs,
+ e10s,
+ disable_fission,
+ debug_test,
+ browser_channel,
+ binary,
+ certutil_binary,
+ ca_certificate_path)
+
+ if preload_browser:
+ instance_manager_cls = PreloadInstanceManager
+ else:
+ instance_manager_cls = SingleInstanceManager
+ self.instance_manager = instance_manager_cls(logger,
+ binary,
+ binary_args,
+ profile_creator,
+ debug_info,
+ chaos_mode_flags,
+ headless,
+ stylo_threads,
+ leak_check,
+ stackfix_dir,
+ symbols_path,
+ asan)
+
+ def settings(self, test):
+ self._settings = {"check_leaks": self.leak_check and not test.leaks,
+ "lsan_disabled": test.lsan_disabled,
+ "lsan_allowed": test.lsan_allowed,
+ "lsan_max_stack_depth": test.lsan_max_stack_depth,
+ "mozleak_allowed": self.leak_check and test.mozleak_allowed,
+ "mozleak_thresholds": self.leak_check and test.mozleak_threshold,
+ "special_powers": self.specialpowers_path and test.url_base == "/_mozilla/"}
+ return self._settings
+
+ def start(self, group_metadata=None, **kwargs):
+ self.instance = self.instance_manager.get()
+ self.instance.output_handler.start(group_metadata,
+ **kwargs)
+
+ def stop(self, force=False):
+ self.instance_manager.stop_current(force)
+ self.logger.debug("stopped")
+
+ def pid(self):
+ return self.instance.pid()
+
+ def is_alive(self):
+ return self.instance and self.instance.is_alive()
+
+ def cleanup(self, force=False):
+ self.instance_manager.teardown(force)
+
+ def executor_browser(self):
+ assert self.instance is not None
+ extensions = []
+ if self._settings.get("special_powers", False):
+ extensions.append(self.specialpowers_path)
+ return ExecutorBrowser, {"marionette_port": self.instance.marionette_port,
+ "extensions": extensions,
+ "supports_devtools": True}
+
+ def check_crash(self, process, test):
+ return log_gecko_crashes(self.logger,
+ process,
+ test,
+ self.instance.runner.profile.profile,
+ self.symbols_path,
+ self.stackwalk_binary)
+
+
+class FirefoxWdSpecBrowser(WebDriverBrowser):
+ def __init__(self, logger, binary, prefs_root, webdriver_binary, webdriver_args,
+ extra_prefs=None, debug_info=None, symbols_path=None, stackwalk_binary=None,
+ certutil_binary=None, ca_certificate_path=None, e10s=False,
+ disable_fission=False, stackfix_dir=None, leak_check=False,
+ asan=False, stylo_threads=1, chaos_mode_flags=None, config=None,
+ browser_channel="nightly", headless=None, debug_test=False, **kwargs):
+
+ super().__init__(logger, binary, webdriver_binary, webdriver_args)
+ self.binary = binary
+ self.webdriver_binary = webdriver_binary
+
+ self.stackfix_dir = stackfix_dir
+ self.symbols_path = symbols_path
+ self.stackwalk_binary = stackwalk_binary
+
+ self.asan = asan
+ self.leak_check = leak_check
+ self.leak_report_file = None
+
+ self.env = self.get_env(binary, debug_info, stylo_threads, headless, chaos_mode_flags)
+
+ profile_creator = ProfileCreator(logger,
+ prefs_root,
+ config,
+ "wdspec",
+ extra_prefs,
+ e10s,
+ disable_fission,
+ debug_test,
+ browser_channel,
+ binary,
+ certutil_binary,
+ ca_certificate_path)
+
+ self.profile = profile_creator.create()
+ self.marionette_port = None
+
+ def get_env(self, binary, debug_info, stylo_threads, headless, chaos_mode_flags):
+ env = get_environ(self.logger,
+ binary,
+ debug_info,
+ stylo_threads,
+ headless,
+ chaos_mode_flags)
+ env["RUST_BACKTRACE"] = "1"
+ # This doesn't work with wdspec tests
+ # In particular tests can create a session without passing in the capabilites
+ # and in those cases we get the default geckodriver profile which doesn't
+ # guarantee zero network access
+ del env["MOZ_DISABLE_NONLOCAL_CONNECTIONS"]
+ return env
+
+ def create_output_handler(self, cmd):
+ return FirefoxOutputHandler(self.logger,
+ cmd,
+ stackfix_dir=self.stackfix_dir,
+ symbols_path=self.symbols_path,
+ asan=self.asan,
+ leak_report_file=self.leak_report_file)
+
+ def start(self, group_metadata, **kwargs):
+ self.leak_report_file = setup_leak_report(self.leak_check, self.profile, self.env)
+ super().start(group_metadata, **kwargs)
+
+ def stop(self, force=False):
+ # Initially wait for any WebDriver session to cleanly shutdown if the
+ # process doesn't have to be force stopped.
+ # When this is called the executor is usually sending an end session
+ # command to the browser. We don't have a synchronisation mechanism
+ # that allows us to know that process is ongoing, so poll the status
+ # endpoint until there isn't a session, before killing the driver.
+ if self.is_alive() and not force:
+ end_time = time.time() + BrowserInstance.shutdown_timeout
+ while time.time() < end_time:
+ self.logger.debug("Waiting for WebDriver session to end")
+ try:
+ self.logger.debug(f"Connecting to http://{self.host}:{self.port}/status")
+ conn = HTTPConnection(self.host, self.port)
+ conn.request("GET", "/status")
+ res = conn.getresponse()
+ self.logger.debug(f"Got response from http://{self.host}:{self.port}/status")
+ except Exception:
+ self.logger.debug(
+ f"Connecting to http://{self.host}:{self.port}/status failed")
+ break
+ if res.status != 200:
+ self.logger.debug(f"Connecting to http://{self.host}:{self.port}/status "
+ f"gave status {res.status}")
+ break
+ data = res.read()
+ try:
+ msg = json.loads(data)
+ except ValueError:
+ self.logger.debug("/status response was not valid JSON")
+ break
+ if msg.get("value", {}).get("ready") is True:
+ self.logger.debug("Got ready status")
+ break
+ self.logger.debug(f"Got status response {data}")
+ time.sleep(1)
+ else:
+ self.logger.debug("WebDriver session didn't end")
+ super().stop(force=force)
+
+ def cleanup(self):
+ super().cleanup()
+ self.profile.cleanup()
+
+ def settings(self, test):
+ return {"check_leaks": self.leak_check and not test.leaks,
+ "lsan_disabled": test.lsan_disabled,
+ "lsan_allowed": test.lsan_allowed,
+ "lsan_max_stack_depth": test.lsan_max_stack_depth,
+ "mozleak_allowed": self.leak_check and test.mozleak_allowed,
+ "mozleak_thresholds": self.leak_check and test.mozleak_threshold}
+
+ def make_command(self):
+ return [self.webdriver_binary,
+ "--host", self.host,
+ "--port", str(self.port)] + self.webdriver_args
+
+ def executor_browser(self):
+ cls, args = super().executor_browser()
+ args["supports_devtools"] = False
+ args["profile"] = self.profile.profile
+ return cls, args
+
+ def check_crash(self, process, test):
+ return log_gecko_crashes(self.logger,
+ process,
+ test,
+ self.profile.profile,
+ self.symbols_path,
+ self.stackwalk_binary)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/firefox_android.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/firefox_android.py
new file mode 100644
index 0000000000..fe23c027f4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/firefox_android.py
@@ -0,0 +1,367 @@
+# mypy: allow-untyped-defs
+
+import os
+
+from mozrunner import FennecEmulatorRunner, get_app_context
+
+from .base import (get_free_port,
+ cmd_arg,
+ browser_command)
+from ..executors.executormarionette import (MarionetteTestharnessExecutor, # noqa: F401
+ MarionetteRefTestExecutor, # noqa: F401
+ MarionetteCrashtestExecutor, # noqa: F401
+ MarionetteWdspecExecutor) # noqa: F401
+from .base import (Browser,
+ ExecutorBrowser)
+from .firefox import (get_timeout_multiplier, # noqa: F401
+ run_info_extras as fx_run_info_extras,
+ update_properties, # noqa: F401
+ executor_kwargs as fx_executor_kwargs, # noqa: F401
+ FirefoxWdSpecBrowser,
+ ProfileCreator as FirefoxProfileCreator)
+
+
+__wptrunner__ = {"product": "firefox_android",
+ "check_args": "check_args",
+ "browser": {None: "FirefoxAndroidBrowser",
+ "wdspec": "FirefoxAndroidWdSpecBrowser"},
+ "executor": {"testharness": "MarionetteTestharnessExecutor",
+ "reftest": "MarionetteRefTestExecutor",
+ "crashtest": "MarionetteCrashtestExecutor",
+ "wdspec": "MarionetteWdspecExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "run_info_extras": "run_info_extras",
+ "update_properties": "update_properties",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+
+def check_args(**kwargs):
+ pass
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"adb_binary": kwargs["adb_binary"],
+ "webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs["webdriver_args"],
+ "package_name": kwargs["package_name"],
+ "device_serial": kwargs["device_serial"],
+ "prefs_root": kwargs["prefs_root"],
+ "extra_prefs": kwargs["extra_prefs"],
+ "test_type": test_type,
+ "debug_info": kwargs["debug_info"],
+ "symbols_path": kwargs["symbols_path"],
+ "stackwalk_binary": kwargs["stackwalk_binary"],
+ "certutil_binary": kwargs["certutil_binary"],
+ "ca_certificate_path": config.ssl_config["ca_cert_path"],
+ "stackfix_dir": kwargs["stackfix_dir"],
+ "binary_args": kwargs["binary_args"],
+ "timeout_multiplier": get_timeout_multiplier(test_type,
+ run_info_data,
+ **kwargs),
+ "e10s": run_info_data["e10s"],
+ "disable_fission": kwargs["disable_fission"],
+ # desktop only
+ "leak_check": False,
+ "stylo_threads": kwargs["stylo_threads"],
+ "chaos_mode_flags": kwargs["chaos_mode_flags"],
+ "config": config,
+ "install_fonts": kwargs["install_fonts"],
+ "tests_root": config.doc_root,
+ "specialpowers_path": kwargs["specialpowers_path"],
+ "debug_test": kwargs["debug_test"]}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ rv = fx_executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs)
+ if test_type == "wdspec":
+ rv["capabilities"]["moz:firefoxOptions"]["androidPackage"] = kwargs["package_name"]
+ return rv
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def run_info_extras(**kwargs):
+ rv = fx_run_info_extras(**kwargs)
+ package = kwargs["package_name"]
+ rv.update({"e10s": True if package is not None and "geckoview" in package else False,
+ "headless": False})
+ return rv
+
+
+def env_options():
+ return {"server_host": "127.0.0.1",
+ "supports_debugger": True}
+
+
+def get_environ(stylo_threads, chaos_mode_flags):
+ env = {}
+ env["MOZ_CRASHREPORTER"] = "1"
+ env["MOZ_CRASHREPORTER_SHUTDOWN"] = "1"
+ env["MOZ_DISABLE_NONLOCAL_CONNECTIONS"] = "1"
+ env["STYLO_THREADS"] = str(stylo_threads)
+ if chaos_mode_flags is not None:
+ env["MOZ_CHAOSMODE"] = hex(chaos_mode_flags)
+ return env
+
+
+class ProfileCreator(FirefoxProfileCreator):
+ def __init__(self, logger, prefs_root, config, test_type, extra_prefs,
+ disable_fission, debug_test, browser_channel, certutil_binary, ca_certificate_path):
+ super().__init__(logger, prefs_root, config, test_type, extra_prefs,
+ True, disable_fission, debug_test, browser_channel, None,
+ certutil_binary, ca_certificate_path)
+
+ def _set_required_prefs(self, profile):
+ profile.set_preferences({
+ "network.dns.localDomains": ",".join(self.config.domains_set),
+ "dom.disable_open_during_load": False,
+ "places.history.enabled": False,
+ "dom.send_after_paint_to_content": True,
+ "network.preload": True,
+ "browser.tabs.remote.autostart": True,
+ })
+
+ if self.test_type == "reftest":
+ self.logger.info("Setting android reftest preferences")
+ profile.set_preferences({
+ "browser.viewport.desktopWidth": 800,
+ # Disable high DPI
+ "layout.css.devPixelsPerPx": "1.0",
+ # Ensure that the full browser element
+ # appears in the screenshot
+ "apz.allow_zooming": False,
+ "android.widget_paints_background": False,
+ # Ensure that scrollbars are always painted
+ "layout.testing.overlay-scrollbars.always-visible": True,
+ })
+
+ profile.set_preferences({"fission.autostart": True})
+ if self.disable_fission:
+ profile.set_preferences({"fission.autostart": False})
+
+
+class FirefoxAndroidBrowser(Browser):
+ init_timeout = 300
+ shutdown_timeout = 60
+
+ def __init__(self, logger, prefs_root, test_type, package_name="org.mozilla.geckoview.test_runner",
+ device_serial=None, extra_prefs=None, debug_info=None,
+ symbols_path=None, stackwalk_binary=None, certutil_binary=None,
+ ca_certificate_path=None, e10s=False, stackfix_dir=None,
+ binary_args=None, timeout_multiplier=None, leak_check=False, asan=False,
+ stylo_threads=1, chaos_mode_flags=None, config=None, browser_channel="nightly",
+ install_fonts=False, tests_root=None, specialpowers_path=None, adb_binary=None,
+ debug_test=False, disable_fission=False, **kwargs):
+
+ super().__init__(logger)
+ self.prefs_root = prefs_root
+ self.test_type = test_type
+ self.package_name = package_name
+ self.device_serial = device_serial
+ self.debug_info = debug_info
+ self.symbols_path = symbols_path
+ self.stackwalk_binary = stackwalk_binary
+ self.certutil_binary = certutil_binary
+ self.ca_certificate_path = ca_certificate_path
+ self.e10s = True
+ self.stackfix_dir = stackfix_dir
+ self.binary_args = binary_args
+ self.timeout_multiplier = timeout_multiplier
+ self.leak_check = leak_check
+ self.asan = asan
+ self.stylo_threads = stylo_threads
+ self.chaos_mode_flags = chaos_mode_flags
+ self.config = config
+ self.browser_channel = browser_channel
+ self.install_fonts = install_fonts
+ self.tests_root = tests_root
+ self.specialpowers_path = specialpowers_path
+ self.adb_binary = adb_binary
+ self.disable_fission = disable_fission
+
+ self.profile_creator = ProfileCreator(logger,
+ prefs_root,
+ config,
+ test_type,
+ extra_prefs,
+ disable_fission,
+ debug_test,
+ browser_channel,
+ certutil_binary,
+ ca_certificate_path)
+
+ self.marionette_port = None
+ self.profile = None
+ self.runner = None
+ self._settings = {}
+
+ def settings(self, test):
+ self._settings = {"check_leaks": self.leak_check and not test.leaks,
+ "lsan_allowed": test.lsan_allowed,
+ "lsan_max_stack_depth": test.lsan_max_stack_depth,
+ "mozleak_allowed": self.leak_check and test.mozleak_allowed,
+ "mozleak_thresholds": self.leak_check and test.mozleak_threshold,
+ "special_powers": self.specialpowers_path and test.url_base == "/_mozilla/"}
+ return self._settings
+
+ def start(self, **kwargs):
+ if self.marionette_port is None:
+ self.marionette_port = get_free_port()
+
+ addons = [self.specialpowers_path] if self._settings.get("special_powers") else None
+ self.profile = self.profile_creator.create(addons=addons)
+ self.profile.set_preferences({"marionette.port": self.marionette_port})
+
+ if self.install_fonts:
+ self.logger.debug("Copying Ahem font to profile")
+ font_dir = os.path.join(self.profile.profile, "fonts")
+ if not os.path.exists(font_dir):
+ os.makedirs(font_dir)
+ with open(os.path.join(self.tests_root, "fonts", "Ahem.ttf"), "rb") as src:
+ with open(os.path.join(font_dir, "Ahem.ttf"), "wb") as dest:
+ dest.write(src.read())
+
+ self.leak_report_file = None
+
+ debug_args, cmd = browser_command(self.package_name,
+ self.binary_args if self.binary_args else [] +
+ [cmd_arg("marionette"), "about:blank"],
+ self.debug_info)
+
+ env = get_environ(self.stylo_threads, self.chaos_mode_flags)
+
+ self.runner = FennecEmulatorRunner(app=self.package_name,
+ profile=self.profile,
+ cmdargs=cmd[1:],
+ env=env,
+ symbols_path=self.symbols_path,
+ serial=self.device_serial,
+ # TODO - choose appropriate log dir
+ logdir=os.getcwd(),
+ adb_path=self.adb_binary,
+ explicit_cleanup=True)
+
+ self.logger.debug("Starting %s" % self.package_name)
+ # connect to a running emulator
+ self.runner.device.connect()
+
+ self.runner.stop()
+ self.runner.start(debug_args=debug_args,
+ interactive=self.debug_info and self.debug_info.interactive)
+
+ self.runner.device.device.forward(
+ local=f"tcp:{self.marionette_port}",
+ remote=f"tcp:{self.marionette_port}")
+
+ for ports in self.config.ports.values():
+ for port in ports:
+ self.runner.device.device.reverse(
+ local=f"tcp:{port}",
+ remote=f"tcp:{port}")
+
+ self.logger.debug("%s Started" % self.package_name)
+
+ def stop(self, force=False):
+ if self.runner is not None:
+ if self.runner.device.connected:
+ try:
+ self.runner.device.device.remove_forwards()
+ self.runner.device.device.remove_reverses()
+ except Exception as e:
+ self.logger.warning("Failed to remove forwarded or reversed ports: %s" % e)
+ # We assume that stopping the runner prompts the
+ # browser to shut down.
+ self.runner.cleanup()
+ self.logger.debug("stopped")
+
+ def pid(self):
+ if self.runner.process_handler is None:
+ return None
+
+ try:
+ return self.runner.process_handler.pid
+ except AttributeError:
+ return None
+
+ def is_alive(self):
+ if self.runner:
+ return self.runner.is_running()
+ return False
+
+ def cleanup(self, force=False):
+ self.stop(force)
+
+ def executor_browser(self):
+ return ExecutorBrowser, {"marionette_port": self.marionette_port,
+ # We never want marionette to install extensions because
+ # that doesn't work on Android; instead they are in the profile
+ "extensions": [],
+ "supports_devtools": False}
+
+ def check_crash(self, process, test):
+ if not os.environ.get("MINIDUMP_STACKWALK", "") and self.stackwalk_binary:
+ os.environ["MINIDUMP_STACKWALK"] = self.stackwalk_binary
+ return bool(self.runner.check_for_crashes(test_name=test))
+
+
+class FirefoxAndroidWdSpecBrowser(FirefoxWdSpecBrowser):
+ def __init__(self, logger, prefs_root, webdriver_binary, webdriver_args,
+ extra_prefs=None, debug_info=None, symbols_path=None, stackwalk_binary=None,
+ certutil_binary=None, ca_certificate_path=None, e10s=False,
+ disable_fission=False, stackfix_dir=None, leak_check=False,
+ asan=False, stylo_threads=1, chaos_mode_flags=None, config=None,
+ browser_channel="nightly", headless=None,
+ package_name="org.mozilla.geckoview.test_runner", device_serial=None,
+ adb_binary=None, **kwargs):
+
+ super().__init__(logger, None, prefs_root, webdriver_binary, webdriver_args,
+ extra_prefs=extra_prefs, debug_info=debug_info, symbols_path=symbols_path,
+ stackwalk_binary=stackwalk_binary, certutil_binary=certutil_binary,
+ ca_certificate_path=ca_certificate_path, e10s=e10s,
+ disable_fission=disable_fission, stackfix_dir=stackfix_dir,
+ leak_check=leak_check, asan=asan, stylo_threads=stylo_threads,
+ chaos_mode_flags=chaos_mode_flags, config=config,
+ browser_channel=browser_channel, headless=headless, **kwargs)
+
+ self.config = config
+ self.package_name = package_name
+ self.device_serial = device_serial
+ # This is just to support the same adb lookup as for other test types
+ context = get_app_context("fennec")(adb_path=adb_binary, device_serial=device_serial)
+ self.device = context.get_device(context.adb, self.device_serial)
+
+ def start(self, group_metadata, **kwargs):
+ for ports in self.config.ports.values():
+ for port in ports:
+ self.device.reverse(
+ local=f"tcp:{port}",
+ remote=f"tcp:{port}")
+ super().start(group_metadata, **kwargs)
+
+ def stop(self, force=False):
+ try:
+ self.device.remove_reverses()
+ except Exception as e:
+ self.logger.warning("Failed to remove forwarded or reversed ports: %s" % e)
+ super().stop(force=force)
+
+ def get_env(self, binary, debug_info, stylo_threads, headless, chaos_mode_flags):
+ env = get_environ(stylo_threads, chaos_mode_flags)
+ env["RUST_BACKTRACE"] = "1"
+ del env["MOZ_DISABLE_NONLOCAL_CONNECTIONS"]
+ return env
+
+ def executor_browser(self):
+ cls, args = super().executor_browser()
+ args["androidPackage"] = self.package_name
+ args["androidDeviceSerial"] = self.device_serial
+ args["env"] = self.env
+ args["supports_devtools"] = False
+ return cls, args
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/ie.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/ie.py
new file mode 100644
index 0000000000..87b989c028
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/ie.py
@@ -0,0 +1,50 @@
+# mypy: allow-untyped-defs
+
+from .base import require_arg, WebDriverBrowser
+from .base import get_timeout_multiplier # noqa: F401
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.base import WdspecExecutor # noqa: F401
+
+__wptrunner__ = {"product": "ie",
+ "check_args": "check_args",
+ "browser": "WebDriverBrowser",
+ "executor": {"wdspec": "WdspecExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "webdriver_binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs.get("webdriver_args")}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ options = {}
+ options["requireWindowFocus"] = True
+ capabilities = {}
+ capabilities["se:ieOptions"] = options
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data, **kwargs)
+ executor_kwargs["close_after_done"] = True
+ executor_kwargs["capabilities"] = capabilities
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {"supports_debugger": False}
+
+
+class InternetExplorerBrowser(WebDriverBrowser):
+ def make_command(self):
+ return [self.binary, f"--port={self.port}"] + self.webdriver_args
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/opera.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/opera.py
new file mode 100644
index 0000000000..a2448f4a90
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/opera.py
@@ -0,0 +1,70 @@
+# mypy: allow-untyped-defs
+
+from .base import require_arg
+from .base import get_timeout_multiplier # noqa: F401
+from .chrome import ChromeBrowser
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorselenium import (SeleniumTestharnessExecutor, # noqa: F401
+ SeleniumRefTestExecutor) # noqa: F401
+
+
+__wptrunner__ = {"product": "opera",
+ "check_args": "check_args",
+ "browser": "OperaBrowser",
+ "executor": {"testharness": "SeleniumTestharnessExecutor",
+ "reftest": "SeleniumRefTestExecutor",
+ "wdspec": "WdspecExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "webdriver_binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"binary": kwargs["binary"],
+ "webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs.get("webdriver_args")}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ from selenium.webdriver import DesiredCapabilities
+
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data, **kwargs)
+ executor_kwargs["close_after_done"] = True
+ capabilities = dict(DesiredCapabilities.OPERA.items())
+ capabilities.setdefault("operaOptions", {})["prefs"] = {
+ "profile": {
+ "default_content_setting_values": {
+ "popups": 1
+ }
+ }
+ }
+ for (kwarg, capability) in [("binary", "binary"), ("binary_args", "args")]:
+ if kwargs[kwarg] is not None:
+ capabilities["operaOptions"][capability] = kwargs[kwarg]
+ if test_type == "testharness":
+ capabilities["operaOptions"]["useAutomationExtension"] = False
+ capabilities["operaOptions"]["excludeSwitches"] = ["enable-automation"]
+ if test_type == "wdspec":
+ capabilities["operaOptions"]["w3c"] = True
+ executor_kwargs["capabilities"] = capabilities
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {}
+
+
+class OperaBrowser(ChromeBrowser):
+ pass
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/safari.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/safari.py
new file mode 100644
index 0000000000..ba533f4bc3
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/safari.py
@@ -0,0 +1,207 @@
+# mypy: allow-untyped-defs
+
+import os
+import plistlib
+from distutils.spawn import find_executable
+from distutils.version import LooseVersion
+
+import psutil
+
+from .base import WebDriverBrowser, require_arg
+from .base import get_timeout_multiplier # noqa: F401
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor, # noqa: F401
+ WebDriverCrashtestExecutor) # noqa: F401
+
+
+__wptrunner__ = {"product": "safari",
+ "check_args": "check_args",
+ "browser": "SafariBrowser",
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor",
+ "wdspec": "WdspecExecutor",
+ "crashtest": "WebDriverCrashtestExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "run_info_extras": "run_info_extras",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "webdriver_binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs.get("webdriver_args"),
+ "kill_safari": kwargs.get("kill_safari", False)}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data, **kwargs):
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data, **kwargs)
+ executor_kwargs["close_after_done"] = True
+ executor_kwargs["capabilities"] = {}
+ if test_type == "testharness":
+ executor_kwargs["capabilities"]["pageLoadStrategy"] = "eager"
+ if kwargs["binary"] is not None:
+ raise ValueError("Safari doesn't support setting executable location")
+
+ V = LooseVersion
+ browser_bundle_version = run_info_data["browser_bundle_version"]
+ if browser_bundle_version is not None and V(browser_bundle_version[2:]) >= V("613.1.7.1"):
+ logger.debug("using acceptInsecureCerts=True")
+ executor_kwargs["capabilities"]["acceptInsecureCerts"] = True
+ else:
+ logger.warning("not using acceptInsecureCerts, Safari will require certificates to be trusted")
+
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {}
+
+
+def run_info_extras(**kwargs):
+ webdriver_binary = kwargs["webdriver_binary"]
+ rv = {}
+
+ safari_bundle, safari_info = get_safari_info(webdriver_binary)
+
+ if safari_info is not None:
+ assert safari_bundle is not None # if safari_info is not None, this can't be
+ _, webkit_info = get_webkit_info(safari_bundle)
+ if webkit_info is None:
+ webkit_info = {}
+ else:
+ safari_info = {}
+ webkit_info = {}
+
+ rv["browser_marketing_version"] = safari_info.get("CFBundleShortVersionString")
+ rv["browser_bundle_version"] = safari_info.get("CFBundleVersion")
+ rv["browser_webkit_bundle_version"] = webkit_info.get("CFBundleVersion")
+
+ with open("/System/Library/CoreServices/SystemVersion.plist", "rb") as fp:
+ system_version = plistlib.load(fp)
+
+ rv["os_build"] = system_version["ProductBuildVersion"]
+
+ return rv
+
+
+def get_safari_info(wd_path):
+ bundle_paths = [
+ os.path.join(os.path.dirname(wd_path), "..", ".."), # bundled Safari (e.g. STP)
+ os.path.join(os.path.dirname(wd_path), "Safari.app"), # local Safari build
+ "/Applications/Safari.app", # system Safari
+ ]
+
+ for bundle_path in bundle_paths:
+ info_path = os.path.join(bundle_path, "Contents", "Info.plist")
+ if not os.path.isfile(info_path):
+ continue
+
+ with open(info_path, "rb") as fp:
+ info = plistlib.load(fp)
+
+ # check we have a Safari family bundle
+ ident = info.get("CFBundleIdentifier")
+ if not isinstance(ident, str) or not ident.startswith("com.apple.Safari"):
+ continue
+
+ return (bundle_path, info)
+
+ return (None, None)
+
+
+def get_webkit_info(safari_bundle_path):
+ framework_paths = [
+ os.path.join(os.path.dirname(safari_bundle_path), "Contents", "Frameworks"), # bundled Safari (e.g. STP)
+ os.path.join(os.path.dirname(safari_bundle_path), ".."), # local Safari build
+ "/System/Library/PrivateFrameworks",
+ "/Library/Frameworks",
+ "/System/Library/Frameworks",
+ ]
+
+ for framework_path in framework_paths:
+ info_path = os.path.join(framework_path, "WebKit.framework", "Versions", "Current", "Resources", "Info.plist")
+ if not os.path.isfile(info_path):
+ continue
+
+ with open(info_path, "rb") as fp:
+ info = plistlib.load(fp)
+ return (framework_path, info)
+
+ return (None, None)
+
+
+class SafariBrowser(WebDriverBrowser):
+ """Safari is backed by safaridriver, which is supplied through
+ ``wptrunner.webdriver.SafariDriverServer``.
+ """
+ def __init__(self, logger, binary=None, webdriver_binary=None, webdriver_args=None,
+ port=None, env=None, kill_safari=False, **kwargs):
+ """Creates a new representation of Safari. The `webdriver_binary`
+ argument gives the WebDriver binary to use for testing. (The browser
+ binary location cannot be specified, as Safari and SafariDriver are
+ coupled.) If `kill_safari` is True, then `Browser.stop` will stop Safari."""
+ super().__init__(logger,
+ binary,
+ webdriver_binary,
+ webdriver_args=webdriver_args,
+ port=None,
+ supports_pac=False,
+ env=env)
+
+ if "/" not in webdriver_binary:
+ wd_path = find_executable(webdriver_binary)
+ else:
+ wd_path = webdriver_binary
+ self.safari_path = self._find_safari_executable(wd_path)
+
+ logger.debug("WebDriver executable path: %s" % wd_path)
+ logger.debug("Safari executable path: %s" % self.safari_path)
+
+ self.kill_safari = kill_safari
+
+ def _find_safari_executable(self, wd_path):
+ bundle_path, info = get_safari_info(wd_path)
+
+ exe = info.get("CFBundleExecutable")
+ if not isinstance(exe, str):
+ return None
+
+ exe_path = os.path.join(bundle_path, "Contents", "MacOS", exe)
+ if not os.path.isfile(exe_path):
+ return None
+
+ return exe_path
+
+ def make_command(self):
+ return [self.webdriver_binary, f"--port={self.port}"] + self.webdriver_args
+
+ def stop(self, force=False):
+ super().stop(force)
+
+ if self.kill_safari:
+ self.logger.debug("Going to stop Safari")
+ for proc in psutil.process_iter(attrs=["exe"]):
+ if (proc.info["exe"] is not None and
+ os.path.samefile(proc.info["exe"], self.safari_path)):
+ self.logger.debug("Stopping Safari %s" % proc.pid)
+ try:
+ proc.terminate()
+ try:
+ proc.wait(10)
+ except psutil.TimeoutExpired:
+ proc.kill()
+ proc.wait(10)
+ except psutil.NoSuchProcess:
+ pass
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce.py
new file mode 100644
index 0000000000..0f7651638d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce.py
@@ -0,0 +1,249 @@
+# mypy: allow-untyped-defs
+
+import glob
+import os
+import shutil
+import subprocess
+import tarfile
+import tempfile
+import time
+
+import requests
+
+from io import StringIO
+
+from .base import Browser, ExecutorBrowser, require_arg
+from .base import get_timeout_multiplier # noqa: F401
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.executorselenium import (SeleniumTestharnessExecutor, # noqa: F401
+ SeleniumRefTestExecutor) # noqa: F401
+
+here = os.path.dirname(__file__)
+# Number of seconds to wait between polling operations when detecting status of
+# Sauce Connect sub-process.
+sc_poll_period = 1
+
+
+__wptrunner__ = {"product": "sauce",
+ "check_args": "check_args",
+ "browser": "SauceBrowser",
+ "executor": {"testharness": "SeleniumTestharnessExecutor",
+ "reftest": "SeleniumRefTestExecutor"},
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+
+def get_capabilities(**kwargs):
+ browser_name = kwargs["sauce_browser"]
+ platform = kwargs["sauce_platform"]
+ version = kwargs["sauce_version"]
+ build = kwargs["sauce_build"]
+ tags = kwargs["sauce_tags"]
+ tunnel_id = kwargs["sauce_tunnel_id"]
+ prerun_script = {
+ "MicrosoftEdge": {
+ "executable": "sauce-storage:edge-prerun.bat",
+ "background": False,
+ },
+ "safari": {
+ "executable": "sauce-storage:safari-prerun.sh",
+ "background": False,
+ }
+ }
+ capabilities = {
+ "browserName": browser_name,
+ "build": build,
+ "disablePopupHandler": True,
+ "name": f"{browser_name} {version} on {platform}",
+ "platform": platform,
+ "public": "public",
+ "selenium-version": "3.3.1",
+ "tags": tags,
+ "tunnel-identifier": tunnel_id,
+ "version": version,
+ "prerun": prerun_script.get(browser_name)
+ }
+
+ return capabilities
+
+
+def get_sauce_config(**kwargs):
+ browser_name = kwargs["sauce_browser"]
+ sauce_user = kwargs["sauce_user"]
+ sauce_key = kwargs["sauce_key"]
+
+ hub_url = f"{sauce_user}:{sauce_key}@localhost:4445"
+ data = {
+ "url": "http://%s/wd/hub" % hub_url,
+ "browserName": browser_name,
+ "capabilities": get_capabilities(**kwargs)
+ }
+
+ return data
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "sauce_browser")
+ require_arg(kwargs, "sauce_platform")
+ require_arg(kwargs, "sauce_version")
+ require_arg(kwargs, "sauce_user")
+ require_arg(kwargs, "sauce_key")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ sauce_config = get_sauce_config(**kwargs)
+
+ return {"sauce_config": sauce_config}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data, **kwargs)
+
+ executor_kwargs["capabilities"] = get_capabilities(**kwargs)
+
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return [SauceConnect(**kwargs)]
+
+
+def env_options():
+ return {"supports_debugger": False}
+
+
+def get_tar(url, dest):
+ resp = requests.get(url, stream=True)
+ resp.raise_for_status()
+ with tarfile.open(fileobj=StringIO(resp.raw.read())) as f:
+ f.extractall(path=dest)
+
+
+class SauceConnect():
+
+ def __init__(self, **kwargs):
+ self.sauce_user = kwargs["sauce_user"]
+ self.sauce_key = kwargs["sauce_key"]
+ self.sauce_tunnel_id = kwargs["sauce_tunnel_id"]
+ self.sauce_connect_binary = kwargs.get("sauce_connect_binary")
+ self.sauce_connect_args = kwargs.get("sauce_connect_args")
+ self.sauce_init_timeout = kwargs.get("sauce_init_timeout")
+ self.sc_process = None
+ self.temp_dir = None
+ self.env_config = None
+
+ def __call__(self, env_options, env_config):
+ self.env_config = env_config
+
+ return self
+
+ def __enter__(self):
+ # Because this class implements the context manager protocol, it is
+ # possible for instances to be provided to the `with` statement
+ # directly. This class implements the callable protocol so that data
+ # which is not available during object initialization can be provided
+ # prior to this moment. Instances must be invoked in preparation for
+ # the context manager protocol, but this additional constraint is not
+ # itself part of the protocol.
+ assert self.env_config is not None, 'The instance has been invoked.'
+
+ if not self.sauce_connect_binary:
+ self.temp_dir = tempfile.mkdtemp()
+ get_tar("https://saucelabs.com/downloads/sc-4.4.9-linux.tar.gz", self.temp_dir)
+ self.sauce_connect_binary = glob.glob(os.path.join(self.temp_dir, "sc-*-linux/bin/sc"))[0]
+
+ self.upload_prerun_exec('edge-prerun.bat')
+ self.upload_prerun_exec('safari-prerun.sh')
+
+ self.sc_process = subprocess.Popen([
+ self.sauce_connect_binary,
+ "--user=%s" % self.sauce_user,
+ "--api-key=%s" % self.sauce_key,
+ "--no-remove-colliding-tunnels",
+ "--tunnel-identifier=%s" % self.sauce_tunnel_id,
+ "--metrics-address=0.0.0.0:9876",
+ "--readyfile=./sauce_is_ready",
+ "--tunnel-domains",
+ ",".join(self.env_config.domains_set)
+ ] + self.sauce_connect_args)
+
+ tot_wait = 0
+ while not os.path.exists('./sauce_is_ready') and self.sc_process.poll() is None:
+ if not self.sauce_init_timeout or (tot_wait >= self.sauce_init_timeout):
+ self.quit()
+
+ raise SauceException("Sauce Connect Proxy was not ready after %d seconds" % tot_wait)
+
+ time.sleep(sc_poll_period)
+ tot_wait += sc_poll_period
+
+ if self.sc_process.returncode is not None:
+ raise SauceException("Unable to start Sauce Connect Proxy. Process exited with code %s", self.sc_process.returncode)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.env_config = None
+ self.quit()
+ if self.temp_dir and os.path.exists(self.temp_dir):
+ try:
+ shutil.rmtree(self.temp_dir)
+ except OSError:
+ pass
+
+ def upload_prerun_exec(self, file_name):
+ auth = (self.sauce_user, self.sauce_key)
+ url = f"https://saucelabs.com/rest/v1/storage/{self.sauce_user}/{file_name}?overwrite=true"
+
+ with open(os.path.join(here, 'sauce_setup', file_name), 'rb') as f:
+ requests.post(url, data=f, auth=auth)
+
+ def quit(self):
+ """The Sauce Connect process may be managing an active "tunnel" to the
+ Sauce Labs service. Issue a request to the process to close any tunnels
+ and exit. If this does not occur within 5 seconds, force the process to
+ close."""
+ kill_wait = 5
+ tot_wait = 0
+ self.sc_process.terminate()
+
+ while self.sc_process.poll() is None:
+ time.sleep(sc_poll_period)
+ tot_wait += sc_poll_period
+
+ if tot_wait >= kill_wait:
+ self.sc_process.kill()
+ break
+
+
+class SauceException(Exception):
+ pass
+
+
+class SauceBrowser(Browser):
+ init_timeout = 300
+
+ def __init__(self, logger, sauce_config, **kwargs):
+ Browser.__init__(self, logger)
+ self.sauce_config = sauce_config
+
+ def start(self, **kwargs):
+ pass
+
+ def stop(self, force=False):
+ pass
+
+ def pid(self):
+ return None
+
+ def is_alive(self):
+ # TODO: Should this check something about the connection?
+ return True
+
+ def cleanup(self):
+ pass
+
+ def executor_browser(self):
+ return ExecutorBrowser, {"webdriver_url": self.sauce_config["url"]}
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce_setup/edge-prerun.bat b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce_setup/edge-prerun.bat
new file mode 100755
index 0000000000..1a3e6fee30
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce_setup/edge-prerun.bat
@@ -0,0 +1,9 @@
+@echo off
+reg add "HKCU\Software\Classes\Local Settings\Software\Microsoft\Windows\CurrentVersion\AppContainer\Storage\microsoft.microsoftedge_8wekyb3d8bbwe\MicrosoftEdge\New Windows" /v "PopupMgr" /t REG_SZ /d no
+
+
+REM Download and install the Ahem font
+REM - https://wiki.saucelabs.com/display/DOCS/Downloading+Files+to+a+Sauce+Labs+Virtual+Machine+Prior+to+Testing
+REM - https://superuser.com/questions/201896/how-do-i-install-a-font-from-the-windows-command-prompt
+bitsadmin.exe /transfer "JobName" https://github.com/web-platform-tests/wpt/raw/master/fonts/Ahem.ttf "%WINDIR%\Fonts\Ahem.ttf"
+reg add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Fonts" /v "Ahem (TrueType)" /t REG_SZ /d Ahem.ttf /f
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce_setup/safari-prerun.sh b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce_setup/safari-prerun.sh
new file mode 100755
index 0000000000..39390e618f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/sauce_setup/safari-prerun.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+curl https://raw.githubusercontent.com/web-platform-tests/wpt/master/fonts/Ahem.ttf > ~/Library/Fonts/Ahem.ttf
+defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2JavaScriptCanOpenWindowsAutomatically -bool true
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/servo.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/servo.py
new file mode 100644
index 0000000000..d57804f977
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/servo.py
@@ -0,0 +1,118 @@
+# mypy: allow-untyped-defs
+
+import os
+
+from .base import ExecutorBrowser, NullBrowser, WebDriverBrowser, require_arg
+from .base import get_timeout_multiplier # noqa: F401
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorservo import (ServoCrashtestExecutor, # noqa: F401
+ ServoTestharnessExecutor, # noqa: F401
+ ServoRefTestExecutor) # noqa: F401
+
+
+here = os.path.dirname(__file__)
+
+__wptrunner__ = {
+ "product": "servo",
+ "check_args": "check_args",
+ "browser": {None: "ServoBrowser",
+ "wdspec": "ServoWdspecBrowser"},
+ "executor": {
+ "crashtest": "ServoCrashtestExecutor",
+ "testharness": "ServoTestharnessExecutor",
+ "reftest": "ServoRefTestExecutor",
+ "wdspec": "WdspecExecutor",
+ },
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "timeout_multiplier": "get_timeout_multiplier",
+ "update_properties": "update_properties",
+}
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {
+ "binary": kwargs["binary"],
+ "debug_info": kwargs["debug_info"],
+ "binary_args": kwargs["binary_args"],
+ "user_stylesheets": kwargs.get("user_stylesheets"),
+ "ca_certificate_path": config.ssl_config["ca_cert_path"],
+ }
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ rv = base_executor_kwargs(test_type, test_environment, run_info_data, **kwargs)
+ rv["pause_after_test"] = kwargs["pause_after_test"]
+ if test_type == "wdspec":
+ rv["capabilities"] = {}
+ return rv
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {"server_host": "127.0.0.1",
+ "bind_address": False,
+ "testharnessreport": "testharnessreport-servo.js",
+ "supports_debugger": True}
+
+
+def update_properties():
+ return ["debug", "os", "processor"], {"os": ["version"], "processor": ["bits"]}
+
+
+class ServoBrowser(NullBrowser):
+ def __init__(self, logger, binary, debug_info=None, binary_args=None,
+ user_stylesheets=None, ca_certificate_path=None, **kwargs):
+ NullBrowser.__init__(self, logger)
+ self.binary = binary
+ self.debug_info = debug_info
+ self.binary_args = binary_args or []
+ self.user_stylesheets = user_stylesheets or []
+ self.ca_certificate_path = ca_certificate_path
+
+ def executor_browser(self):
+ return ExecutorBrowser, {
+ "binary": self.binary,
+ "debug_info": self.debug_info,
+ "binary_args": self.binary_args,
+ "user_stylesheets": self.user_stylesheets,
+ "ca_certificate_path": self.ca_certificate_path,
+ }
+
+
+class ServoWdspecBrowser(WebDriverBrowser):
+ # TODO: could share an implemenation with servodriver.py, perhaps
+ def __init__(self, logger, binary="servo", webdriver_args=None,
+ binary_args=None, host="127.0.0.1", env=None, port=None):
+
+ env = os.environ.copy() if env is None else env
+ env["RUST_BACKTRACE"] = "1"
+
+ super().__init__(logger,
+ binary,
+ None,
+ webdriver_args=webdriver_args,
+ host=host,
+ port=port,
+ env=env)
+ self.binary_args = binary_args
+
+ def make_command(self):
+ command = [self.binary,
+ f"--webdriver={self.port}",
+ "--hard-fail",
+ "--headless"] + self.webdriver_args
+ if self.binary_args:
+ command += self.binary_args
+ return command
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/servodriver.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/servodriver.py
new file mode 100644
index 0000000000..5195fa6442
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/servodriver.py
@@ -0,0 +1,184 @@
+# mypy: allow-untyped-defs
+
+import os
+import subprocess
+import tempfile
+
+from mozprocess import ProcessHandler
+
+from tools.serve.serve import make_hosts_file
+
+from .base import (Browser,
+ ExecutorBrowser,
+ OutputHandler,
+ require_arg,
+ get_free_port,
+ browser_command)
+from .base import get_timeout_multiplier # noqa: F401
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.executorservodriver import (ServoWebDriverTestharnessExecutor, # noqa: F401
+ ServoWebDriverRefTestExecutor) # noqa: F401
+
+here = os.path.dirname(__file__)
+
+__wptrunner__ = {
+ "product": "servodriver",
+ "check_args": "check_args",
+ "browser": "ServoWebDriverBrowser",
+ "executor": {
+ "testharness": "ServoWebDriverTestharnessExecutor",
+ "reftest": "ServoWebDriverRefTestExecutor",
+ },
+ "browser_kwargs": "browser_kwargs",
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "timeout_multiplier": "get_timeout_multiplier",
+ "update_properties": "update_properties",
+}
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "binary")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {
+ "binary": kwargs["binary"],
+ "binary_args": kwargs["binary_args"],
+ "debug_info": kwargs["debug_info"],
+ "server_config": config,
+ "user_stylesheets": kwargs.get("user_stylesheets"),
+ "headless": kwargs.get("headless"),
+ }
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data, **kwargs):
+ rv = base_executor_kwargs(test_type, test_environment, run_info_data, **kwargs)
+ return rv
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {"server_host": "127.0.0.1",
+ "testharnessreport": "testharnessreport-servodriver.js",
+ "supports_debugger": True}
+
+
+def update_properties():
+ return (["debug", "os", "processor"], {"os": ["version"], "processor": ["bits"]})
+
+
+def write_hosts_file(config):
+ hosts_fd, hosts_path = tempfile.mkstemp()
+ with os.fdopen(hosts_fd, "w") as f:
+ f.write(make_hosts_file(config, "127.0.0.1"))
+ return hosts_path
+
+
+class ServoWebDriverBrowser(Browser):
+ init_timeout = 300 # Large timeout for cases where we're booting an Android emulator
+
+ def __init__(self, logger, binary, debug_info=None, webdriver_host="127.0.0.1",
+ server_config=None, binary_args=None,
+ user_stylesheets=None, headless=None, **kwargs):
+ Browser.__init__(self, logger)
+ self.binary = binary
+ self.binary_args = binary_args or []
+ self.webdriver_host = webdriver_host
+ self.webdriver_port = None
+ self.proc = None
+ self.debug_info = debug_info
+ self.hosts_path = write_hosts_file(server_config)
+ self.server_ports = server_config.ports if server_config else {}
+ self.command = None
+ self.user_stylesheets = user_stylesheets if user_stylesheets else []
+ self.headless = headless if headless else False
+ self.ca_certificate_path = server_config.ssl_config["ca_cert_path"]
+ self.output_handler = None
+
+ def start(self, **kwargs):
+ self.webdriver_port = get_free_port()
+
+ env = os.environ.copy()
+ env["HOST_FILE"] = self.hosts_path
+ env["RUST_BACKTRACE"] = "1"
+ env["EMULATOR_REVERSE_FORWARD_PORTS"] = ",".join(
+ str(port)
+ for _protocol, ports in self.server_ports.items()
+ for port in ports
+ if port
+ )
+
+ debug_args, command = browser_command(
+ self.binary,
+ self.binary_args + [
+ "--hard-fail",
+ "--webdriver=%s" % self.webdriver_port,
+ "about:blank",
+ ],
+ self.debug_info
+ )
+
+ if self.headless:
+ command += ["--headless"]
+
+ if self.ca_certificate_path:
+ command += ["--certificate-path", self.ca_certificate_path]
+
+ for stylesheet in self.user_stylesheets:
+ command += ["--user-stylesheet", stylesheet]
+
+ self.command = command
+
+ self.command = debug_args + self.command
+
+ if not self.debug_info or not self.debug_info.interactive:
+ self.output_handler = OutputHandler(self.logger, self.command)
+ self.proc = ProcessHandler(self.command,
+ processOutputLine=[self.on_output],
+ env=env,
+ storeOutput=False)
+ self.proc.run()
+ self.output_handler.after_process_start(self.proc.pid)
+ self.output_handler.start()
+ else:
+ self.proc = subprocess.Popen(self.command, env=env)
+
+ self.logger.debug("Servo Started")
+
+ def stop(self, force=False):
+ self.logger.debug("Stopping browser")
+ if self.proc is not None:
+ try:
+ self.proc.kill()
+ except OSError:
+ # This can happen on Windows if the process is already dead
+ pass
+ if self.output_handler is not None:
+ self.output_handler.after_process_stop()
+
+ def pid(self):
+ if self.proc is None:
+ return None
+
+ try:
+ return self.proc.pid
+ except AttributeError:
+ return None
+
+ def is_alive(self):
+ return self.proc.poll() is None
+
+ def cleanup(self):
+ self.stop()
+ os.remove(self.hosts_path)
+
+ def executor_browser(self):
+ assert self.webdriver_port is not None
+ return ExecutorBrowser, {"webdriver_host": self.webdriver_host,
+ "webdriver_port": self.webdriver_port,
+ "init_timeout": self.init_timeout}
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/webkit.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/webkit.py
new file mode 100644
index 0000000000..cecfbe4e27
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/webkit.py
@@ -0,0 +1,83 @@
+# mypy: allow-untyped-defs
+
+from .base import WebDriverBrowser, require_arg
+from .base import get_timeout_multiplier, certificate_domain_list # noqa: F401
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor, # noqa: F401
+ WebDriverCrashtestExecutor) # noqa: F401
+
+
+__wptrunner__ = {"product": "webkit",
+ "check_args": "check_args",
+ "browser": "WebKitBrowser",
+ "browser_kwargs": "browser_kwargs",
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor",
+ "wdspec": "WdspecExecutor",
+ "crashtest": "WebDriverCrashtestExecutor"},
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "run_info_extras": "run_info_extras",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+
+def check_args(**kwargs):
+ require_arg(kwargs, "binary")
+ require_arg(kwargs, "webdriver_binary")
+ require_arg(kwargs, "webkit_port")
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ return {"binary": kwargs["binary"],
+ "webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": kwargs.get("webdriver_args")}
+
+
+def capabilities_for_port(server_config, **kwargs):
+ port_name = kwargs["webkit_port"]
+ if port_name in ["gtk", "wpe"]:
+ port_key_map = {"gtk": "webkitgtk"}
+ browser_options_port = port_key_map.get(port_name, port_name)
+ browser_options_key = "%s:browserOptions" % browser_options_port
+
+ return {
+ "browserName": "MiniBrowser",
+ "browserVersion": "2.20",
+ "platformName": "ANY",
+ browser_options_key: {
+ "binary": kwargs["binary"],
+ "args": kwargs.get("binary_args", []),
+ "certificates": certificate_domain_list(server_config.domains_set, kwargs["host_cert_path"])}}
+
+ return {}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data, **kwargs)
+ executor_kwargs["close_after_done"] = True
+ executor_kwargs["capabilities"] = capabilities_for_port(test_environment.config,
+ **kwargs)
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {}
+
+
+def run_info_extras(**kwargs):
+ return {"webkit_port": kwargs["webkit_port"]}
+
+
+class WebKitBrowser(WebDriverBrowser):
+ """Generic WebKit browser is backed by WebKit's WebDriver implementation"""
+
+ def make_command(self):
+ return [self.webdriver_binary, f"--port={self.port}"] + self.webdriver_args
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/webkitgtk_minibrowser.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/webkitgtk_minibrowser.py
new file mode 100644
index 0000000000..a574328c32
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/browsers/webkitgtk_minibrowser.py
@@ -0,0 +1,82 @@
+# mypy: allow-untyped-defs
+
+from .base import (NullBrowser, # noqa: F401
+ certificate_domain_list,
+ get_timeout_multiplier, # noqa: F401
+ maybe_add_args)
+from .webkit import WebKitBrowser
+from ..executors import executor_kwargs as base_executor_kwargs
+from ..executors.base import WdspecExecutor # noqa: F401
+from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
+ WebDriverRefTestExecutor, # noqa: F401
+ WebDriverCrashtestExecutor) # noqa: F401
+
+__wptrunner__ = {"product": "webkitgtk_minibrowser",
+ "check_args": "check_args",
+ "browser": "WebKitGTKMiniBrowser",
+ "browser_kwargs": "browser_kwargs",
+ "executor": {"testharness": "WebDriverTestharnessExecutor",
+ "reftest": "WebDriverRefTestExecutor",
+ "wdspec": "WdspecExecutor",
+ "crashtest": "WebDriverCrashtestExecutor"},
+ "executor_kwargs": "executor_kwargs",
+ "env_extras": "env_extras",
+ "env_options": "env_options",
+ "run_info_extras": "run_info_extras",
+ "timeout_multiplier": "get_timeout_multiplier"}
+
+
+def check_args(**kwargs):
+ pass
+
+
+def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
+ # Workaround for https://gitlab.gnome.org/GNOME/libsoup/issues/172
+ webdriver_required_args = ["--host=127.0.0.1"]
+ webdriver_args = maybe_add_args(webdriver_required_args, kwargs.get("webdriver_args"))
+ return {"binary": kwargs["binary"],
+ "webdriver_binary": kwargs["webdriver_binary"],
+ "webdriver_args": webdriver_args}
+
+
+def capabilities(server_config, **kwargs):
+ browser_required_args = ["--automation",
+ "--javascript-can-open-windows-automatically=true",
+ "--enable-xss-auditor=false",
+ "--enable-media-capabilities=true",
+ "--enable-encrypted-media=true",
+ "--enable-media-stream=true",
+ "--enable-mock-capture-devices=true",
+ "--enable-webaudio=true"]
+ args = kwargs.get("binary_args", [])
+ args = maybe_add_args(browser_required_args, args)
+ return {
+ "browserName": "MiniBrowser",
+ "webkitgtk:browserOptions": {
+ "binary": kwargs["binary"],
+ "args": args,
+ "certificates": certificate_domain_list(server_config.domains_set, kwargs["host_cert_path"])}}
+
+
+def executor_kwargs(logger, test_type, test_environment, run_info_data,
+ **kwargs):
+ executor_kwargs = base_executor_kwargs(test_type, test_environment, run_info_data, **kwargs)
+ executor_kwargs["close_after_done"] = True
+ executor_kwargs["capabilities"] = capabilities(test_environment.config, **kwargs)
+ return executor_kwargs
+
+
+def env_extras(**kwargs):
+ return []
+
+
+def env_options():
+ return {}
+
+
+def run_info_extras(**kwargs):
+ return {"webkit_port": "gtk"}
+
+
+class WebKitGTKMiniBrowser(WebKitBrowser):
+ pass
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/config.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/config.py
new file mode 100644
index 0000000000..c114ee3e6a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/config.py
@@ -0,0 +1,63 @@
+# mypy: allow-untyped-defs
+
+from configparser import ConfigParser
+import os
+import sys
+from collections import OrderedDict
+from typing import Any, Dict
+
+here = os.path.dirname(__file__)
+
+class ConfigDict(Dict[str, Any]):
+ def __init__(self, base_path, *args, **kwargs):
+ self.base_path = base_path
+ dict.__init__(self, *args, **kwargs)
+
+ def get_path(self, key, default=None):
+ if key not in self:
+ return default
+ path = self[key]
+ os.path.expanduser(path)
+ return os.path.abspath(os.path.join(self.base_path, path))
+
+def read(config_path):
+ config_path = os.path.abspath(config_path)
+ config_root = os.path.dirname(config_path)
+ parser = ConfigParser()
+ success = parser.read(config_path)
+ assert config_path in success, success
+
+ subns = {"pwd": os.path.abspath(os.path.curdir)}
+
+ rv = OrderedDict()
+ for section in parser.sections():
+ rv[section] = ConfigDict(config_root)
+ for key in parser.options(section):
+ rv[section][key] = parser.get(section, key, raw=False, vars=subns)
+
+ return rv
+
+def path(argv=None):
+ if argv is None:
+ argv = []
+ path = None
+
+ for i, arg in enumerate(argv):
+ if arg == "--config":
+ if i + 1 < len(argv):
+ path = argv[i + 1]
+ elif arg.startswith("--config="):
+ path = arg.split("=", 1)[1]
+ if path is not None:
+ break
+
+ if path is None:
+ if os.path.exists("wptrunner.ini"):
+ path = os.path.abspath("wptrunner.ini")
+ else:
+ path = os.path.join(here, "..", "wptrunner.default.ini")
+
+ return os.path.abspath(path)
+
+def load():
+ return read(path(sys.argv))
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/environment.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/environment.py
new file mode 100644
index 0000000000..7edc68f998
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/environment.py
@@ -0,0 +1,331 @@
+# mypy: allow-untyped-defs
+
+import errno
+import json
+import os
+import signal
+import socket
+import sys
+import time
+
+from mozlog import get_default_logger, handlers
+
+from . import mpcontext
+from .wptlogging import LogLevelRewriter, QueueHandler, LogQueueThread
+
+here = os.path.dirname(__file__)
+repo_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir, os.pardir))
+
+sys.path.insert(0, repo_root)
+from tools import localpaths # noqa: F401
+
+from wptserve.handlers import StringHandler
+
+serve = None
+
+
+def do_delayed_imports(logger, test_paths):
+ global serve
+
+ serve_root = serve_path(test_paths)
+ sys.path.insert(0, serve_root)
+
+ failed = []
+
+ try:
+ from tools.serve import serve
+ except ImportError:
+ failed.append("serve")
+
+ if failed:
+ logger.critical(
+ "Failed to import %s. Ensure that tests path %s contains web-platform-tests" %
+ (", ".join(failed), serve_root))
+ sys.exit(1)
+
+
+def serve_path(test_paths):
+ return test_paths["/"]["tests_path"]
+
+
+def webtranport_h3_server_is_running(host, port, timeout):
+ # TODO(bashi): Move the following import to the beginning of this file
+ # once WebTransportH3Server is enabled by default.
+ from webtransport.h3.webtransport_h3_server import server_is_running # type: ignore
+ return server_is_running(host, port, timeout)
+
+
+class TestEnvironmentError(Exception):
+ pass
+
+
+def get_server_logger():
+ logger = get_default_logger(component="wptserve")
+ log_filter = handlers.LogLevelFilter(lambda x: x, "info")
+ # Downgrade errors to warnings for the server
+ log_filter = LogLevelRewriter(log_filter, ["error"], "warning")
+ logger.component_filter = log_filter
+ return logger
+
+
+class ProxyLoggingContext:
+ """Context manager object that handles setup and teardown of a log queue
+ for handling logging messages from wptserve."""
+
+ def __init__(self, logger):
+ mp_context = mpcontext.get_context()
+ self.log_queue = mp_context.Queue()
+ self.logging_thread = LogQueueThread(self.log_queue, logger)
+ self.logger_handler = QueueHandler(self.log_queue)
+
+ def __enter__(self):
+ self.logging_thread.start()
+ return self.logger_handler
+
+ def __exit__(self, *args):
+ self.log_queue.put(None)
+ # Wait for thread to shut down but not for too long since it's a daemon
+ self.logging_thread.join(1)
+
+
+class TestEnvironment:
+ """Context manager that owns the test environment i.e. the http and
+ websockets servers"""
+ def __init__(self, test_paths, testharness_timeout_multipler,
+ pause_after_test, debug_test, debug_info, options, ssl_config, env_extras,
+ enable_webtransport=False, mojojs_path=None, inject_script=None):
+
+ self.test_paths = test_paths
+ self.server = None
+ self.config_ctx = None
+ self.config = None
+ self.server_logger = get_server_logger()
+ self.server_logging_ctx = ProxyLoggingContext(self.server_logger)
+ self.testharness_timeout_multipler = testharness_timeout_multipler
+ self.pause_after_test = pause_after_test
+ self.debug_test = debug_test
+ self.test_server_port = options.pop("test_server_port", True)
+ self.debug_info = debug_info
+ self.options = options if options is not None else {}
+
+ mp_context = mpcontext.get_context()
+ self.cache_manager = mp_context.Manager()
+ self.stash = serve.stash.StashServer(mp_context=mp_context)
+ self.env_extras = env_extras
+ self.env_extras_cms = None
+ self.ssl_config = ssl_config
+ self.enable_webtransport = enable_webtransport
+ self.mojojs_path = mojojs_path
+ self.inject_script = inject_script
+
+ def __enter__(self):
+ server_log_handler = self.server_logging_ctx.__enter__()
+ self.config_ctx = self.build_config()
+
+ self.config = self.config_ctx.__enter__()
+
+ self.stash.__enter__()
+ self.cache_manager.__enter__()
+
+ assert self.env_extras_cms is None, (
+ "A TestEnvironment object cannot be nested")
+
+ self.env_extras_cms = []
+
+ for env in self.env_extras:
+ cm = env(self.options, self.config)
+ cm.__enter__()
+ self.env_extras_cms.append(cm)
+
+ self.servers = serve.start(self.server_logger,
+ self.config,
+ self.get_routes(),
+ mp_context=mpcontext.get_context(),
+ log_handlers=[server_log_handler],
+ webtransport_h3=self.enable_webtransport)
+
+ if self.options.get("supports_debugger") and self.debug_info and self.debug_info.interactive:
+ self.ignore_interrupts()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.process_interrupts()
+
+ for servers in self.servers.values():
+ for _, server in servers:
+ server.request_shutdown()
+ for servers in self.servers.values():
+ for _, server in servers:
+ server.wait()
+ for cm in self.env_extras_cms:
+ cm.__exit__(exc_type, exc_val, exc_tb)
+
+ self.env_extras_cms = None
+
+ self.cache_manager.__exit__(exc_type, exc_val, exc_tb)
+ self.stash.__exit__()
+ self.config_ctx.__exit__(exc_type, exc_val, exc_tb)
+ self.server_logging_ctx.__exit__(exc_type, exc_val, exc_tb)
+
+ def ignore_interrupts(self):
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ def process_interrupts(self):
+ signal.signal(signal.SIGINT, signal.SIG_DFL)
+
+ def build_config(self):
+ override_path = os.path.join(serve_path(self.test_paths), "config.json")
+
+ config = serve.ConfigBuilder(self.server_logger)
+
+ ports = {
+ "http": [8000, 8001],
+ "http-private": [8002],
+ "http-public": [8003],
+ "https": [8443, 8444],
+ "https-private": [8445],
+ "https-public": [8446],
+ "ws": [8888],
+ "wss": [8889],
+ "h2": [9000],
+ "webtransport-h3": [11000],
+ }
+ config.ports = ports
+
+ if os.path.exists(override_path):
+ with open(override_path) as f:
+ override_obj = json.load(f)
+ config.update(override_obj)
+
+ config.check_subdomains = False
+
+ ssl_config = self.ssl_config.copy()
+ ssl_config["encrypt_after_connect"] = self.options.get("encrypt_after_connect", False)
+ config.ssl = ssl_config
+
+ if "browser_host" in self.options:
+ config.browser_host = self.options["browser_host"]
+
+ if "bind_address" in self.options:
+ config.bind_address = self.options["bind_address"]
+
+ config.server_host = self.options.get("server_host", None)
+ config.doc_root = serve_path(self.test_paths)
+ config.inject_script = self.inject_script
+
+ return config
+
+ def get_routes(self):
+ route_builder = serve.get_route_builder(
+ self.server_logger,
+ self.config.aliases,
+ self.config)
+
+ for path, format_args, content_type, route in [
+ ("testharness_runner.html", {}, "text/html", "/testharness_runner.html"),
+ ("print_reftest_runner.html", {}, "text/html", "/print_reftest_runner.html"),
+ (os.path.join(here, "..", "..", "third_party", "pdf_js", "pdf.js"), None,
+ "text/javascript", "/_pdf_js/pdf.js"),
+ (os.path.join(here, "..", "..", "third_party", "pdf_js", "pdf.worker.js"), None,
+ "text/javascript", "/_pdf_js/pdf.worker.js"),
+ (self.options.get("testharnessreport", "testharnessreport.js"),
+ {"output": self.pause_after_test,
+ "timeout_multiplier": self.testharness_timeout_multipler,
+ "explicit_timeout": "true" if self.debug_info is not None else "false",
+ "debug": "true" if self.debug_test else "false"},
+ "text/javascript;charset=utf8",
+ "/resources/testharnessreport.js")]:
+ path = os.path.normpath(os.path.join(here, path))
+ # Note that .headers. files don't apply to static routes, so we need to
+ # readd any static headers here.
+ headers = {"Cache-Control": "max-age=3600"}
+ route_builder.add_static(path, format_args, content_type, route,
+ headers=headers)
+
+ data = b""
+ with open(os.path.join(repo_root, "resources", "testdriver.js"), "rb") as fp:
+ data += fp.read()
+ with open(os.path.join(here, "testdriver-extra.js"), "rb") as fp:
+ data += fp.read()
+ route_builder.add_handler("GET", "/resources/testdriver.js",
+ StringHandler(data, "text/javascript"))
+
+ for url_base, paths in self.test_paths.items():
+ if url_base == "/":
+ continue
+ route_builder.add_mount_point(url_base, paths["tests_path"])
+
+ if "/" not in self.test_paths:
+ del route_builder.mountpoint_routes["/"]
+
+ if self.mojojs_path:
+ route_builder.add_mount_point("/gen/", self.mojojs_path)
+
+ return route_builder.get_routes()
+
+ def ensure_started(self):
+ # Pause for a while to ensure that the server has a chance to start
+ total_sleep_secs = 30
+ each_sleep_secs = 0.5
+ end_time = time.time() + total_sleep_secs
+ while time.time() < end_time:
+ failed, pending = self.test_servers()
+ if failed:
+ break
+ if not pending:
+ return
+ time.sleep(each_sleep_secs)
+ raise OSError("Servers failed to start: %s" %
+ ", ".join("%s:%s" % item for item in failed))
+
+ def test_servers(self):
+ failed = []
+ pending = []
+ host = self.config["server_host"]
+ for scheme, servers in self.servers.items():
+ for port, server in servers:
+ if not server.is_alive():
+ failed.append((scheme, port))
+
+ if not failed and self.test_server_port:
+ for scheme, servers in self.servers.items():
+ for port, server in servers:
+ if scheme == "webtransport-h3":
+ if not webtranport_h3_server_is_running(host, port, timeout=5.0):
+ pending.append((host, port))
+ continue
+ s = socket.socket()
+ s.settimeout(0.1)
+ try:
+ s.connect((host, port))
+ except OSError:
+ pending.append((host, port))
+ finally:
+ s.close()
+
+ return failed, pending
+
+
+def wait_for_service(logger, host, port, timeout=60):
+ """Waits until network service given as a tuple of (host, port) becomes
+ available or the `timeout` duration is reached, at which point
+ ``socket.error`` is raised."""
+ addr = (host, port)
+ logger.debug(f"Trying to connect to {host}:{port}")
+ end = time.time() + timeout
+ while end > time.time():
+ so = socket.socket()
+ try:
+ so.connect(addr)
+ except socket.timeout:
+ pass
+ except OSError as e:
+ if e.errno != errno.ECONNREFUSED:
+ raise
+ else:
+ logger.debug(f"Connected to {host}:{port}")
+ return True
+ finally:
+ so.close()
+ time.sleep(0.5)
+ raise OSError("Service is unavailable: %s:%i" % addr)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/__init__.py
new file mode 100644
index 0000000000..bf829d93e9
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/__init__.py
@@ -0,0 +1,5 @@
+# flake8: noqa (not ideal, but nicer than adding noqa: F401 to every line!)
+from .base import (executor_kwargs,
+ testharness_result_converter,
+ reftest_result_converter,
+ TestExecutor)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/actions.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/actions.py
new file mode 100644
index 0000000000..a4b689ba92
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/actions.py
@@ -0,0 +1,269 @@
+# mypy: allow-untyped-defs
+
+class ClickAction:
+ name = "click"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ selector = payload["selector"]
+ element = self.protocol.select.element_by_selector(selector)
+ self.logger.debug("Clicking element: %s" % selector)
+ self.protocol.click.element(element)
+
+
+class DeleteAllCookiesAction:
+ name = "delete_all_cookies"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ self.logger.debug("Deleting all cookies")
+ self.protocol.cookies.delete_all_cookies()
+
+
+class GetAllCookiesAction:
+ name = "get_all_cookies"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ self.logger.debug("Getting all cookies")
+ return self.protocol.cookies.get_all_cookies()
+
+
+class GetNamedCookieAction:
+ name = "get_named_cookie"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ name = payload["name"]
+ self.logger.debug("Getting cookie named %s" % name)
+ return self.protocol.cookies.get_named_cookie(name)
+
+
+class SendKeysAction:
+ name = "send_keys"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ selector = payload["selector"]
+ keys = payload["keys"]
+ element = self.protocol.select.element_by_selector(selector)
+ self.logger.debug("Sending keys to element: %s" % selector)
+ self.protocol.send_keys.send_keys(element, keys)
+
+
+class MinimizeWindowAction:
+ name = "minimize_window"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ return self.protocol.window.minimize()
+
+
+class SetWindowRectAction:
+ name = "set_window_rect"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ rect = payload["rect"]
+ self.protocol.window.set_rect(rect)
+
+
+class ActionSequenceAction:
+ name = "action_sequence"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+ self.requires_state_reset = False
+
+ def __call__(self, payload):
+ # TODO: some sort of shallow error checking
+ if self.requires_state_reset:
+ self.reset()
+ self.requires_state_reset = True
+ actions = payload["actions"]
+ for actionSequence in actions:
+ if actionSequence["type"] == "pointer":
+ for action in actionSequence["actions"]:
+ if (action["type"] == "pointerMove" and
+ isinstance(action["origin"], dict)):
+ action["origin"] = self.get_element(action["origin"]["selector"])
+ self.protocol.action_sequence.send_actions({"actions": actions})
+
+ def get_element(self, element_selector):
+ return self.protocol.select.element_by_selector(element_selector)
+
+ def reset(self):
+ self.protocol.action_sequence.release()
+ self.requires_state_reset = False
+
+
+class GenerateTestReportAction:
+ name = "generate_test_report"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ message = payload["message"]
+ self.logger.debug("Generating test report: %s" % message)
+ self.protocol.generate_test_report.generate_test_report(message)
+
+class SetPermissionAction:
+ name = "set_permission"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ permission_params = payload["permission_params"]
+ descriptor = permission_params["descriptor"]
+ name = descriptor["name"]
+ state = permission_params["state"]
+ self.logger.debug("Setting permission %s to %s" % (name, state))
+ self.protocol.set_permission.set_permission(descriptor, state)
+
+class AddVirtualAuthenticatorAction:
+ name = "add_virtual_authenticator"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ self.logger.debug("Adding virtual authenticator")
+ config = payload["config"]
+ authenticator_id = self.protocol.virtual_authenticator.add_virtual_authenticator(config)
+ self.logger.debug("Authenticator created with ID %s" % authenticator_id)
+ return authenticator_id
+
+class RemoveVirtualAuthenticatorAction:
+ name = "remove_virtual_authenticator"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ authenticator_id = payload["authenticator_id"]
+ self.logger.debug("Removing virtual authenticator %s" % authenticator_id)
+ return self.protocol.virtual_authenticator.remove_virtual_authenticator(authenticator_id)
+
+
+class AddCredentialAction:
+ name = "add_credential"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ authenticator_id = payload["authenticator_id"]
+ credential = payload["credential"]
+ self.logger.debug("Adding credential to virtual authenticator %s " % authenticator_id)
+ return self.protocol.virtual_authenticator.add_credential(authenticator_id, credential)
+
+class GetCredentialsAction:
+ name = "get_credentials"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ authenticator_id = payload["authenticator_id"]
+ self.logger.debug("Getting credentials from virtual authenticator %s " % authenticator_id)
+ return self.protocol.virtual_authenticator.get_credentials(authenticator_id)
+
+class RemoveCredentialAction:
+ name = "remove_credential"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ authenticator_id = payload["authenticator_id"]
+ credential_id = payload["credential_id"]
+ self.logger.debug("Removing credential %s from authenticator %s" % (credential_id, authenticator_id))
+ return self.protocol.virtual_authenticator.remove_credential(authenticator_id, credential_id)
+
+class RemoveAllCredentialsAction:
+ name = "remove_all_credentials"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ authenticator_id = payload["authenticator_id"]
+ self.logger.debug("Removing all credentials from authenticator %s" % authenticator_id)
+ return self.protocol.virtual_authenticator.remove_all_credentials(authenticator_id)
+
+class SetUserVerifiedAction:
+ name = "set_user_verified"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ authenticator_id = payload["authenticator_id"]
+ uv = payload["uv"]
+ self.logger.debug(
+ "Setting user verified flag on authenticator %s to %s" % (authenticator_id, uv["isUserVerified"]))
+ return self.protocol.virtual_authenticator.set_user_verified(authenticator_id, uv)
+
+class SetSPCTransactionModeAction:
+ name = "set_spc_transaction_mode"
+
+ def __init__(self, logger, protocol):
+ self.logger = logger
+ self.protocol = protocol
+
+ def __call__(self, payload):
+ mode = payload["mode"]
+ self.logger.debug("Setting SPC transaction mode to %s" % mode)
+ return self.protocol.spc_transactions.set_spc_transaction_mode(mode)
+
+actions = [ClickAction,
+ DeleteAllCookiesAction,
+ GetAllCookiesAction,
+ GetNamedCookieAction,
+ SendKeysAction,
+ MinimizeWindowAction,
+ SetWindowRectAction,
+ ActionSequenceAction,
+ GenerateTestReportAction,
+ SetPermissionAction,
+ AddVirtualAuthenticatorAction,
+ RemoveVirtualAuthenticatorAction,
+ AddCredentialAction,
+ GetCredentialsAction,
+ RemoveCredentialAction,
+ RemoveAllCredentialsAction,
+ SetUserVerifiedAction,
+ SetSPCTransactionModeAction]
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/base.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/base.py
new file mode 100644
index 0000000000..4bc193d038
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/base.py
@@ -0,0 +1,781 @@
+# mypy: allow-untyped-defs
+
+import base64
+import hashlib
+import io
+import json
+import os
+import threading
+import traceback
+import socket
+import sys
+from abc import ABCMeta, abstractmethod
+from typing import Any, Callable, ClassVar, Tuple, Type
+from urllib.parse import urljoin, urlsplit, urlunsplit
+
+from . import pytestrunner
+from .actions import actions
+from .protocol import Protocol, WdspecProtocol
+
+
+here = os.path.dirname(__file__)
+
+
+def executor_kwargs(test_type, test_environment, run_info_data, **kwargs):
+ timeout_multiplier = kwargs["timeout_multiplier"]
+ if timeout_multiplier is None:
+ timeout_multiplier = 1
+
+ executor_kwargs = {"server_config": test_environment.config,
+ "timeout_multiplier": timeout_multiplier,
+ "debug_info": kwargs["debug_info"]}
+
+ if test_type in ("reftest", "print-reftest"):
+ executor_kwargs["screenshot_cache"] = test_environment.cache_manager.dict()
+ executor_kwargs["reftest_screenshot"] = kwargs["reftest_screenshot"]
+
+ if test_type == "wdspec":
+ executor_kwargs["binary"] = kwargs.get("binary")
+ executor_kwargs["webdriver_binary"] = kwargs.get("webdriver_binary")
+ executor_kwargs["webdriver_args"] = kwargs.get("webdriver_args")
+
+ # By default the executor may try to cleanup windows after a test (to best
+ # associate any problems with the test causing them). If the user might
+ # want to view the results, however, the executor has to skip that cleanup.
+ if kwargs["pause_after_test"] or kwargs["pause_on_unexpected"]:
+ executor_kwargs["cleanup_after_test"] = False
+ executor_kwargs["debug_test"] = kwargs["debug_test"]
+ return executor_kwargs
+
+
+def strip_server(url):
+ """Remove the scheme and netloc from a url, leaving only the path and any query
+ or fragment.
+
+ url - the url to strip
+
+ e.g. http://example.org:8000/tests?id=1#2 becomes /tests?id=1#2"""
+
+ url_parts = list(urlsplit(url))
+ url_parts[0] = ""
+ url_parts[1] = ""
+ return urlunsplit(url_parts)
+
+
+class TestharnessResultConverter:
+ harness_codes = {0: "OK",
+ 1: "ERROR",
+ 2: "TIMEOUT",
+ 3: "PRECONDITION_FAILED"}
+
+ test_codes = {0: "PASS",
+ 1: "FAIL",
+ 2: "TIMEOUT",
+ 3: "NOTRUN",
+ 4: "PRECONDITION_FAILED"}
+
+ def __call__(self, test, result, extra=None):
+ """Convert a JSON result into a (TestResult, [SubtestResult]) tuple"""
+ result_url, status, message, stack, subtest_results = result
+ assert result_url == test.url, ("Got results from %s, expected %s" %
+ (result_url, test.url))
+ harness_result = test.result_cls(self.harness_codes[status], message, extra=extra, stack=stack)
+ return (harness_result,
+ [test.subtest_result_cls(st_name, self.test_codes[st_status], st_message, st_stack)
+ for st_name, st_status, st_message, st_stack in subtest_results])
+
+
+testharness_result_converter = TestharnessResultConverter()
+
+
+def hash_screenshots(screenshots):
+ """Computes the sha1 checksum of a list of base64-encoded screenshots."""
+ return [hashlib.sha1(base64.b64decode(screenshot)).hexdigest()
+ for screenshot in screenshots]
+
+
+def _ensure_hash_in_reftest_screenshots(extra):
+ """Make sure reftest_screenshots have hashes.
+
+ Marionette internal reftest runner does not produce hashes.
+ """
+ log_data = extra.get("reftest_screenshots")
+ if not log_data:
+ return
+ for item in log_data:
+ if type(item) != dict:
+ # Skip relation strings.
+ continue
+ if "hash" not in item:
+ item["hash"] = hash_screenshots([item["screenshot"]])[0]
+
+
+def get_pages(ranges_value, total_pages):
+ """Get a set of page numbers to include in a print reftest.
+
+ :param ranges_value: Parsed page ranges as a list e.g. [[1,2], [4], [6,None]]
+ :param total_pages: Integer total number of pages in the paginated output.
+ :retval: Set containing integer page numbers to include in the comparison e.g.
+ for the example ranges value and 10 total pages this would be
+ {1,2,4,6,7,8,9,10}"""
+ if not ranges_value:
+ return set(range(1, total_pages + 1))
+
+ rv = set()
+
+ for range_limits in ranges_value:
+ if len(range_limits) == 1:
+ range_limits = [range_limits[0], range_limits[0]]
+
+ if range_limits[0] is None:
+ range_limits[0] = 1
+ if range_limits[1] is None:
+ range_limits[1] = total_pages
+
+ if range_limits[0] > total_pages:
+ continue
+ rv |= set(range(range_limits[0], range_limits[1] + 1))
+ return rv
+
+
+def reftest_result_converter(self, test, result):
+ extra = result.get("extra", {})
+ _ensure_hash_in_reftest_screenshots(extra)
+ return (test.result_cls(
+ result["status"],
+ result["message"],
+ extra=extra,
+ stack=result.get("stack")), [])
+
+
+def pytest_result_converter(self, test, data):
+ harness_data, subtest_data = data
+
+ if subtest_data is None:
+ subtest_data = []
+
+ harness_result = test.result_cls(*harness_data)
+ subtest_results = [test.subtest_result_cls(*item) for item in subtest_data]
+
+ return (harness_result, subtest_results)
+
+
+def crashtest_result_converter(self, test, result):
+ return test.result_cls(**result), []
+
+
+class ExecutorException(Exception):
+ def __init__(self, status, message):
+ self.status = status
+ self.message = message
+
+
+class TimedRunner:
+ def __init__(self, logger, func, protocol, url, timeout, extra_timeout):
+ self.func = func
+ self.logger = logger
+ self.result = None
+ self.protocol = protocol
+ self.url = url
+ self.timeout = timeout
+ self.extra_timeout = extra_timeout
+ self.result_flag = threading.Event()
+
+ def run(self):
+ for setup_fn in [self.set_timeout, self.before_run]:
+ err = setup_fn()
+ if err:
+ self.result = (False, err)
+ return self.result
+
+ executor = threading.Thread(target=self.run_func)
+ executor.start()
+
+ # Add twice the extra timeout since the called function is expected to
+ # wait at least self.timeout + self.extra_timeout and this gives some leeway
+ timeout = self.timeout + 2 * self.extra_timeout if self.timeout else None
+ finished = self.result_flag.wait(timeout)
+ if self.result is None:
+ if finished:
+ # flag is True unless we timeout; this *shouldn't* happen, but
+ # it can if self.run_func fails to set self.result due to raising
+ self.result = False, ("INTERNAL-ERROR", "%s.run_func didn't set a result" %
+ self.__class__.__name__)
+ else:
+ if self.protocol.is_alive():
+ message = "Executor hit external timeout (this may indicate a hang)\n"
+ # get a traceback for the current stack of the executor thread
+ message += "".join(traceback.format_stack(sys._current_frames()[executor.ident]))
+ self.result = False, ("EXTERNAL-TIMEOUT", message)
+ else:
+ self.logger.info("Browser not responding, setting status to CRASH")
+ self.result = False, ("CRASH", None)
+ elif self.result[1] is None:
+ # We didn't get any data back from the test, so check if the
+ # browser is still responsive
+ if self.protocol.is_alive():
+ self.result = False, ("INTERNAL-ERROR", None)
+ else:
+ self.logger.info("Browser not responding, setting status to CRASH")
+ self.result = False, ("CRASH", None)
+
+ return self.result
+
+ def set_timeout(self):
+ raise NotImplementedError
+
+ def before_run(self):
+ pass
+
+ def run_func(self):
+ raise NotImplementedError
+
+
+class TestExecutor:
+ """Abstract Base class for object that actually executes the tests in a
+ specific browser. Typically there will be a different TestExecutor
+ subclass for each test type and method of executing tests.
+
+ :param browser: ExecutorBrowser instance providing properties of the
+ browser that will be tested.
+ :param server_config: Dictionary of wptserve server configuration of the
+ form stored in TestEnvironment.config
+ :param timeout_multiplier: Multiplier relative to base timeout to use
+ when setting test timeout.
+ """
+ __metaclass__ = ABCMeta
+
+ test_type = None # type: ClassVar[str]
+ # convert_result is a class variable set to a callable converter
+ # (e.g. reftest_result_converter) converting from an instance of
+ # URLManifestItem (e.g. RefTest) + type-dependent results object +
+ # type-dependent extra data, returning a tuple of Result and list of
+ # SubtestResult. For now, any callable is accepted. TODO: Make this type
+ # stricter when more of the surrounding code is annotated.
+ convert_result = None # type: ClassVar[Callable[..., Any]]
+ supports_testdriver = False
+ supports_jsshell = False
+ # Extra timeout to use after internal test timeout at which the harness
+ # should force a timeout
+ extra_timeout = 5 # seconds
+
+
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ debug_info=None, **kwargs):
+ self.logger = logger
+ self.runner = None
+ self.browser = browser
+ self.server_config = server_config
+ self.timeout_multiplier = timeout_multiplier
+ self.debug_info = debug_info
+ self.last_environment = {"protocol": "http",
+ "prefs": {}}
+ self.protocol = None # This must be set in subclasses
+
+ def setup(self, runner):
+ """Run steps needed before tests can be started e.g. connecting to
+ browser instance
+
+ :param runner: TestRunner instance that is going to run the tests"""
+ self.runner = runner
+ if self.protocol is not None:
+ self.protocol.setup(runner)
+
+ def teardown(self):
+ """Run cleanup steps after tests have finished"""
+ if self.protocol is not None:
+ self.protocol.teardown()
+
+ def reset(self):
+ """Re-initialize internal state to facilitate repeated test execution
+ as implemented by the `--rerun` command-line argument."""
+ pass
+
+ def run_test(self, test):
+ """Run a particular test.
+
+ :param test: The test to run"""
+ try:
+ if test.environment != self.last_environment:
+ self.on_environment_change(test.environment)
+ result = self.do_test(test)
+ except Exception as e:
+ exception_string = traceback.format_exc()
+ self.logger.warning(exception_string)
+ result = self.result_from_exception(test, e, exception_string)
+
+ # log result of parent test
+ if result[0].status == "ERROR":
+ self.logger.debug(result[0].message)
+
+ self.last_environment = test.environment
+
+ self.runner.send_message("test_ended", test, result)
+
+ def server_url(self, protocol, subdomain=False):
+ scheme = "https" if protocol == "h2" else protocol
+ host = self.server_config["browser_host"]
+ if subdomain:
+ # The only supported subdomain filename flag is "www".
+ host = "{subdomain}.{host}".format(subdomain="www", host=host)
+ return "{scheme}://{host}:{port}".format(scheme=scheme, host=host,
+ port=self.server_config["ports"][protocol][0])
+
+ def test_url(self, test):
+ return urljoin(self.server_url(test.environment["protocol"],
+ test.subdomain), test.url)
+
+ @abstractmethod
+ def do_test(self, test):
+ """Test-type and protocol specific implementation of running a
+ specific test.
+
+ :param test: The test to run."""
+ pass
+
+ def on_environment_change(self, new_environment):
+ pass
+
+ def result_from_exception(self, test, e, exception_string):
+ if hasattr(e, "status") and e.status in test.result_cls.statuses:
+ status = e.status
+ else:
+ status = "INTERNAL-ERROR"
+ message = str(getattr(e, "message", ""))
+ if message:
+ message += "\n"
+ message += exception_string
+ return test.result_cls(status, message), []
+
+ def wait(self):
+ return self.protocol.base.wait()
+
+
+class TestharnessExecutor(TestExecutor):
+ convert_result = testharness_result_converter
+
+
+class RefTestExecutor(TestExecutor):
+ convert_result = reftest_result_converter
+ is_print = False
+
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1, screenshot_cache=None,
+ debug_info=None, reftest_screenshot="unexpected", **kwargs):
+ TestExecutor.__init__(self, logger, browser, server_config,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+
+ self.screenshot_cache = screenshot_cache
+ self.reftest_screenshot = reftest_screenshot
+
+
+class CrashtestExecutor(TestExecutor):
+ convert_result = crashtest_result_converter
+
+
+class PrintRefTestExecutor(TestExecutor):
+ convert_result = reftest_result_converter
+ is_print = True
+
+
+class RefTestImplementation:
+ def __init__(self, executor):
+ self.timeout_multiplier = executor.timeout_multiplier
+ self.executor = executor
+ # Cache of url:(screenshot hash, screenshot). Typically the
+ # screenshot is None, but we set this value if a test fails
+ # and the screenshot was taken from the cache so that we may
+ # retrieve the screenshot from the cache directly in the future
+ self.screenshot_cache = self.executor.screenshot_cache
+ self.message = None
+ self.reftest_screenshot = executor.reftest_screenshot
+
+ def setup(self):
+ pass
+
+ def teardown(self):
+ pass
+
+ @property
+ def logger(self):
+ return self.executor.logger
+
+ def get_hash(self, test, viewport_size, dpi, page_ranges):
+ key = (test.url, viewport_size, dpi)
+
+ if key not in self.screenshot_cache:
+ success, data = self.get_screenshot_list(test, viewport_size, dpi, page_ranges)
+
+ if not success:
+ return False, data
+
+ screenshots = data
+ hash_values = hash_screenshots(data)
+ self.screenshot_cache[key] = (hash_values, screenshots)
+
+ rv = (hash_values, screenshots)
+ else:
+ rv = self.screenshot_cache[key]
+
+ self.message.append(f"{test.url} {rv[0]}")
+ return True, rv
+
+ def reset(self):
+ self.screenshot_cache.clear()
+
+ def check_pass(self, hashes, screenshots, urls, relation, fuzzy):
+ """Check if a test passes, and return a tuple of (pass, page_idx),
+ where page_idx is the zero-based index of the first page on which a
+ difference occurs if any, or None if there are no differences"""
+
+ assert relation in ("==", "!=")
+ lhs_hashes, rhs_hashes = hashes
+ lhs_screenshots, rhs_screenshots = screenshots
+
+ if len(lhs_hashes) != len(rhs_hashes):
+ self.logger.info("Got different number of pages")
+ return relation == "!=", -1
+
+ assert len(lhs_screenshots) == len(lhs_hashes) == len(rhs_screenshots) == len(rhs_hashes)
+
+ for (page_idx, (lhs_hash,
+ rhs_hash,
+ lhs_screenshot,
+ rhs_screenshot)) in enumerate(zip(lhs_hashes,
+ rhs_hashes,
+ lhs_screenshots,
+ rhs_screenshots)):
+ comparison_screenshots = (lhs_screenshot, rhs_screenshot)
+ if not fuzzy or fuzzy == ((0, 0), (0, 0)):
+ equal = lhs_hash == rhs_hash
+ # sometimes images can have different hashes, but pixels can be identical.
+ if not equal:
+ self.logger.info("Image hashes didn't match%s, checking pixel differences" %
+ ("" if len(hashes) == 1 else " on page %i" % (page_idx + 1)))
+ max_per_channel, pixels_different = self.get_differences(comparison_screenshots,
+ urls)
+ equal = pixels_different == 0 and max_per_channel == 0
+ else:
+ max_per_channel, pixels_different = self.get_differences(comparison_screenshots,
+ urls,
+ page_idx if len(hashes) > 1 else None)
+ allowed_per_channel, allowed_different = fuzzy
+ self.logger.info("Allowed %s pixels different, maximum difference per channel %s" %
+ ("-".join(str(item) for item in allowed_different),
+ "-".join(str(item) for item in allowed_per_channel)))
+ equal = ((pixels_different == 0 and allowed_different[0] == 0) or
+ (max_per_channel == 0 and allowed_per_channel[0] == 0) or
+ (allowed_per_channel[0] <= max_per_channel <= allowed_per_channel[1] and
+ allowed_different[0] <= pixels_different <= allowed_different[1]))
+ if not equal:
+ return (False if relation == "==" else True, page_idx)
+ # All screenshots were equal within the fuzziness
+ return (True if relation == "==" else False, -1)
+
+ def get_differences(self, screenshots, urls, page_idx=None):
+ from PIL import Image, ImageChops, ImageStat
+
+ lhs = Image.open(io.BytesIO(base64.b64decode(screenshots[0]))).convert("RGB")
+ rhs = Image.open(io.BytesIO(base64.b64decode(screenshots[1]))).convert("RGB")
+ self.check_if_solid_color(lhs, urls[0])
+ self.check_if_solid_color(rhs, urls[1])
+ diff = ImageChops.difference(lhs, rhs)
+ minimal_diff = diff.crop(diff.getbbox())
+ mask = minimal_diff.convert("L", dither=None)
+ stat = ImageStat.Stat(minimal_diff, mask)
+ per_channel = max(item[1] for item in stat.extrema)
+ count = stat.count[0]
+ self.logger.info("Found %s pixels different, maximum difference per channel %s%s" %
+ (count,
+ per_channel,
+ "" if page_idx is None else " on page %i" % (page_idx + 1)))
+ return per_channel, count
+
+ def check_if_solid_color(self, image, url):
+ extrema = image.getextrema()
+ if all(min == max for min, max in extrema):
+ color = ''.join('%02X' % value for value, _ in extrema)
+ self.message.append(f"Screenshot is solid color 0x{color} for {url}\n")
+
+ def run_test(self, test):
+ viewport_size = test.viewport_size
+ dpi = test.dpi
+ page_ranges = test.page_ranges
+ self.message = []
+
+
+ # Depth-first search of reference tree, with the goal
+ # of reachings a leaf node with only pass results
+
+ stack = list(((test, item[0]), item[1]) for item in reversed(test.references))
+
+ while stack:
+ hashes = [None, None]
+ screenshots = [None, None]
+ urls = [None, None]
+
+ nodes, relation = stack.pop()
+ fuzzy = self.get_fuzzy(test, nodes, relation)
+
+ for i, node in enumerate(nodes):
+ success, data = self.get_hash(node, viewport_size, dpi, page_ranges)
+ if success is False:
+ return {"status": data[0], "message": data[1]}
+
+ hashes[i], screenshots[i] = data
+ urls[i] = node.url
+
+ is_pass, page_idx = self.check_pass(hashes, screenshots, urls, relation, fuzzy)
+ log_data = [
+ {"url": urls[0], "screenshot": screenshots[0][page_idx],
+ "hash": hashes[0][page_idx]},
+ relation,
+ {"url": urls[1], "screenshot": screenshots[1][page_idx],
+ "hash": hashes[1][page_idx]}
+ ]
+
+ if is_pass:
+ fuzzy = self.get_fuzzy(test, nodes, relation)
+ if nodes[1].references:
+ stack.extend(list(((nodes[1], item[0]), item[1])
+ for item in reversed(nodes[1].references)))
+ else:
+ test_result = {"status": "PASS", "message": None}
+ if (self.reftest_screenshot == "always" or
+ self.reftest_screenshot == "unexpected" and
+ test.expected() != "PASS"):
+ test_result["extra"] = {"reftest_screenshots": log_data}
+ # We passed
+ return test_result
+
+ # We failed, so construct a failure message
+
+ for i, (node, screenshot) in enumerate(zip(nodes, screenshots)):
+ if screenshot is None:
+ success, screenshot = self.retake_screenshot(node, viewport_size, dpi, page_ranges)
+ if success:
+ screenshots[i] = screenshot
+
+ test_result = {"status": "FAIL",
+ "message": "\n".join(self.message)}
+ if (self.reftest_screenshot in ("always", "fail") or
+ self.reftest_screenshot == "unexpected" and
+ test.expected() != "FAIL"):
+ test_result["extra"] = {"reftest_screenshots": log_data}
+ return test_result
+
+ def get_fuzzy(self, root_test, test_nodes, relation):
+ full_key = tuple([item.url for item in test_nodes] + [relation])
+ ref_only_key = test_nodes[1].url
+
+ fuzzy_override = root_test.fuzzy_override
+ fuzzy = test_nodes[0].fuzzy
+
+ sources = [fuzzy_override, fuzzy]
+ keys = [full_key, ref_only_key, None]
+ value = None
+ for source in sources:
+ for key in keys:
+ if key in source:
+ value = source[key]
+ break
+ if value:
+ break
+ return value
+
+ def retake_screenshot(self, node, viewport_size, dpi, page_ranges):
+ success, data = self.get_screenshot_list(node,
+ viewport_size,
+ dpi,
+ page_ranges)
+ if not success:
+ return False, data
+
+ key = (node.url, viewport_size, dpi)
+ hash_val, _ = self.screenshot_cache[key]
+ self.screenshot_cache[key] = hash_val, data
+ return True, data
+
+ def get_screenshot_list(self, node, viewport_size, dpi, page_ranges):
+ success, data = self.executor.screenshot(node, viewport_size, dpi, page_ranges)
+ if success and not isinstance(data, list):
+ return success, [data]
+ return success, data
+
+
+class WdspecExecutor(TestExecutor):
+ convert_result = pytest_result_converter
+ protocol_cls = WdspecProtocol # type: ClassVar[Type[Protocol]]
+
+ def __init__(self, logger, browser, server_config, webdriver_binary,
+ webdriver_args, timeout_multiplier=1, capabilities=None,
+ debug_info=None, **kwargs):
+ super().__init__(logger, browser, server_config,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+ self.webdriver_binary = webdriver_binary
+ self.webdriver_args = webdriver_args
+ self.timeout_multiplier = timeout_multiplier
+ self.capabilities = capabilities
+
+ def setup(self, runner):
+ self.protocol = self.protocol_cls(self, self.browser)
+ super().setup(runner)
+
+ def is_alive(self):
+ return self.protocol.is_alive()
+
+ def on_environment_change(self, new_environment):
+ pass
+
+ def do_test(self, test):
+ timeout = test.timeout * self.timeout_multiplier + self.extra_timeout
+
+ success, data = WdspecRun(self.do_wdspec,
+ test.abs_path,
+ timeout).run()
+
+ if success:
+ return self.convert_result(test, data)
+
+ return (test.result_cls(*data), [])
+
+ def do_wdspec(self, path, timeout):
+ session_config = {"host": self.browser.host,
+ "port": self.browser.port,
+ "capabilities": self.capabilities,
+ "webdriver": {
+ "binary": self.webdriver_binary,
+ "args": self.webdriver_args
+ }}
+
+ return pytestrunner.run(path,
+ self.server_config,
+ session_config,
+ timeout=timeout)
+
+
+class WdspecRun:
+ def __init__(self, func, path, timeout):
+ self.func = func
+ self.result = (None, None)
+ self.path = path
+ self.timeout = timeout
+ self.result_flag = threading.Event()
+
+ def run(self):
+ """Runs function in a thread and interrupts it if it exceeds the
+ given timeout. Returns (True, (Result, [SubtestResult ...])) in
+ case of success, or (False, (status, extra information)) in the
+ event of failure.
+ """
+
+ executor = threading.Thread(target=self._run)
+ executor.start()
+
+ self.result_flag.wait(self.timeout)
+ if self.result[1] is None:
+ self.result = False, ("EXTERNAL-TIMEOUT", None)
+
+ return self.result
+
+ def _run(self):
+ try:
+ self.result = True, self.func(self.path, self.timeout)
+ except (socket.timeout, OSError):
+ self.result = False, ("CRASH", None)
+ except Exception as e:
+ message = getattr(e, "message")
+ if message:
+ message += "\n"
+ message += traceback.format_exc()
+ self.result = False, ("INTERNAL-ERROR", message)
+ finally:
+ self.result_flag.set()
+
+
+class CallbackHandler:
+ """Handle callbacks from testdriver-using tests.
+
+ The default implementation here makes sense for things that are roughly like
+ WebDriver. Things that are more different to WebDriver may need to create a
+ fully custom implementation."""
+
+ unimplemented_exc = (NotImplementedError,) # type: ClassVar[Tuple[Type[Exception], ...]]
+
+ def __init__(self, logger, protocol, test_window):
+ self.protocol = protocol
+ self.test_window = test_window
+ self.logger = logger
+ self.callbacks = {
+ "action": self.process_action,
+ "complete": self.process_complete
+ }
+
+ self.actions = {cls.name: cls(self.logger, self.protocol) for cls in actions}
+
+ def __call__(self, result):
+ url, command, payload = result
+ self.logger.debug("Got async callback: %s" % result[1])
+ try:
+ callback = self.callbacks[command]
+ except KeyError:
+ raise ValueError("Unknown callback type %r" % result[1])
+ return callback(url, payload)
+
+ def process_complete(self, url, payload):
+ rv = [strip_server(url)] + payload
+ return True, rv
+
+ def process_action(self, url, payload):
+ action = payload["action"]
+ cmd_id = payload["id"]
+ self.logger.debug("Got action: %s" % action)
+ try:
+ action_handler = self.actions[action]
+ except KeyError:
+ raise ValueError("Unknown action %s" % action)
+ try:
+ with ActionContext(self.logger, self.protocol, payload.get("context")):
+ result = action_handler(payload)
+ except self.unimplemented_exc:
+ self.logger.warning("Action %s not implemented" % action)
+ self._send_message(cmd_id, "complete", "error", "Action %s not implemented" % action)
+ except Exception:
+ self.logger.warning("Action %s failed" % action)
+ self.logger.warning(traceback.format_exc())
+ self._send_message(cmd_id, "complete", "error")
+ raise
+ else:
+ self.logger.debug(f"Action {action} completed with result {result}")
+ return_message = {"result": result}
+ self._send_message(cmd_id, "complete", "success", json.dumps(return_message))
+
+ return False, None
+
+ def _send_message(self, cmd_id, message_type, status, message=None):
+ self.protocol.testdriver.send_message(cmd_id, message_type, status, message=message)
+
+
+class ActionContext:
+ def __init__(self, logger, protocol, context):
+ self.logger = logger
+ self.protocol = protocol
+ self.context = context
+ self.initial_window = None
+
+ def __enter__(self):
+ if self.context is None:
+ return
+
+ self.initial_window = self.protocol.base.current_window
+ self.logger.debug("Switching to window %s" % self.context)
+ self.protocol.testdriver.switch_to_window(self.context, self.initial_window)
+
+ def __exit__(self, *args):
+ if self.context is None:
+ return
+
+ self.logger.debug("Switching back to initial window")
+ self.protocol.base.set_window(self.initial_window)
+ self.initial_window = None
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorchrome.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorchrome.py
new file mode 100644
index 0000000000..e5f5615385
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorchrome.py
@@ -0,0 +1,114 @@
+# mypy: allow-untyped-defs
+
+import os
+import traceback
+
+from urllib.parse import urljoin
+
+from .base import get_pages
+from .executorwebdriver import WebDriverProtocol, WebDriverRefTestExecutor, WebDriverRun
+from .protocol import PrintProtocolPart
+
+here = os.path.dirname(__file__)
+
+
+class ChromeDriverPrintProtocolPart(PrintProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+ self.runner_handle = None
+
+ def load_runner(self):
+ url = urljoin(self.parent.executor.server_url("http"), "/print_reftest_runner.html")
+ self.logger.debug("Loading %s" % url)
+ try:
+ self.webdriver.url = url
+ except Exception as e:
+ self.logger.critical(
+ "Loading initial page %s failed. Ensure that the "
+ "there are no other programs bound to this port and "
+ "that your firewall rules or network setup does not "
+ "prevent access.\n%s" % (url, traceback.format_exc(e)))
+ raise
+ self.runner_handle = self.webdriver.window_handle
+
+ def render_as_pdf(self, width, height):
+ margin = 0.5
+ body = {
+ "cmd": "Page.printToPDF",
+ "params": {
+ # Chrome accepts dimensions in inches; we are using cm
+ "paperWidth": width / 2.54,
+ "paperHeight": height / 2.54,
+ "marginLeft": margin,
+ "marginRight": margin,
+ "marginTop": margin,
+ "marginBottom": margin,
+ "shrinkToFit": False,
+ "printBackground": True,
+ }
+ }
+ return self.webdriver.send_session_command("POST", "goog/cdp/execute", body=body)["data"]
+
+ def pdf_to_png(self, pdf_base64, ranges):
+ handle = self.webdriver.window_handle
+ self.webdriver.window_handle = self.runner_handle
+ try:
+ rv = self.webdriver.execute_async_script("""
+let callback = arguments[arguments.length - 1];
+render('%s').then(result => callback(result))""" % pdf_base64)
+ page_numbers = get_pages(ranges, len(rv))
+ rv = [item for i, item in enumerate(rv) if i + 1 in page_numbers]
+ return rv
+ finally:
+ self.webdriver.window_handle = handle
+
+
+class ChromeDriverProtocol(WebDriverProtocol):
+ implements = WebDriverProtocol.implements + [ChromeDriverPrintProtocolPart]
+
+
+class ChromeDriverPrintRefTestExecutor(WebDriverRefTestExecutor):
+ protocol_cls = ChromeDriverProtocol
+
+ def setup(self, runner):
+ super().setup(runner)
+ self.protocol.pdf_print.load_runner()
+ self.has_window = False
+ with open(os.path.join(here, "reftest.js")) as f:
+ self.script = f.read()
+
+ def screenshot(self, test, viewport_size, dpi, page_ranges):
+ # https://github.com/web-platform-tests/wpt/issues/7140
+ assert dpi is None
+
+ if not self.has_window:
+ self.protocol.base.execute_script(self.script)
+ self.protocol.base.set_window(self.protocol.webdriver.handles[-1])
+ self.has_window = True
+
+ self.viewport_size = viewport_size
+ self.page_ranges = page_ranges.get(test.url)
+ timeout = self.timeout_multiplier * test.timeout if self.debug_info is None else None
+
+ test_url = self.test_url(test)
+
+ return WebDriverRun(self.logger,
+ self._render,
+ self.protocol,
+ test_url,
+ timeout,
+ self.extra_timeout).run()
+
+ def _render(self, protocol, url, timeout):
+ protocol.webdriver.url = url
+
+ protocol.base.execute_script(self.wait_script, asynchronous=True)
+
+ pdf = protocol.pdf_print.render_as_pdf(*self.viewport_size)
+ screenshots = protocol.pdf_print.pdf_to_png(pdf, self.page_ranges)
+ for i, screenshot in enumerate(screenshots):
+ # strip off the data:img/png, part of the url
+ if screenshot.startswith("data:image/png;base64,"):
+ screenshots[i] = screenshot.split(",", 1)[1]
+
+ return screenshots
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorcontentshell.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorcontentshell.py
new file mode 100644
index 0000000000..474bb7168e
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorcontentshell.py
@@ -0,0 +1,269 @@
+# mypy: allow-untyped-defs
+
+from .base import RefTestExecutor, RefTestImplementation, CrashtestExecutor, TestharnessExecutor
+from .protocol import Protocol, ProtocolPart
+from time import time
+from queue import Empty
+from base64 import b64encode
+import json
+
+
+class CrashError(BaseException):
+ pass
+
+
+def _read_line(io_queue, deadline=None, encoding=None, errors="strict", raise_crash=True):
+ """Reads a single line from the io queue. The read must succeed before `deadline` or
+ a TimeoutError is raised. The line is returned as a bytestring or optionally with the
+ specified `encoding`. If `raise_crash` is set, a CrashError is raised if the line
+ happens to be a crash message.
+ """
+ current_time = time()
+
+ if deadline and current_time > deadline:
+ raise TimeoutError()
+
+ try:
+ line = io_queue.get(True, deadline - current_time if deadline else None)
+ if raise_crash and line.startswith(b"#CRASHED"):
+ raise CrashError()
+ except Empty:
+ raise TimeoutError()
+
+ return line.decode(encoding, errors) if encoding else line
+
+
+class ContentShellTestPart(ProtocolPart):
+ """This protocol part is responsible for running tests via content_shell's protocol mode.
+
+ For more details, see:
+ https://chromium.googlesource.com/chromium/src.git/+/HEAD/content/web_test/browser/test_info_extractor.h
+ """
+ name = "content_shell_test"
+ eof_marker = '#EOF\n' # Marker sent by content_shell after blocks.
+
+ def __init__(self, parent):
+ super().__init__(parent)
+ self.stdout_queue = parent.browser.stdout_queue
+ self.stdin_queue = parent.browser.stdin_queue
+
+ def do_test(self, command, timeout=None):
+ """Send a command to content_shell and return the resulting outputs.
+
+ A command consists of a URL to navigate to, followed by an optional
+ expected image hash and 'print' mode specifier. The syntax looks like:
+ http://web-platform.test:8000/test.html['<hash>['print]]
+ """
+ self._send_command(command)
+
+ deadline = time() + timeout if timeout else None
+ # The first block can also contain audio data but not in WPT.
+ text = self._read_block(deadline)
+ image = self._read_block(deadline)
+
+ return text, image
+
+ def _send_command(self, command):
+ """Sends a single `command`, i.e. a URL to open, to content_shell.
+ """
+ self.stdin_queue.put((command + "\n").encode("utf-8"))
+
+ def _read_block(self, deadline=None):
+ """Tries to read a single block of content from stdout before the `deadline`.
+ """
+ while True:
+ line = _read_line(self.stdout_queue, deadline, "latin-1").rstrip()
+
+ if line == "Content-Type: text/plain":
+ return self._read_text_block(deadline)
+
+ if line == "Content-Type: image/png":
+ return self._read_image_block(deadline)
+
+ if line == "#EOF":
+ return None
+
+ def _read_text_block(self, deadline=None):
+ """Tries to read a plain text block in utf-8 encoding before the `deadline`.
+ """
+ result = ""
+
+ while True:
+ line = _read_line(self.stdout_queue, deadline, "utf-8", "replace", False)
+
+ if line.endswith(self.eof_marker):
+ result += line[:-len(self.eof_marker)]
+ break
+ elif line.endswith('#EOF\r\n'):
+ result += line[:-len('#EOF\r\n')]
+ self.logger.warning('Got a CRLF-terminated #EOF - this is a driver bug.')
+ break
+
+ result += line
+
+ return result
+
+ def _read_image_block(self, deadline=None):
+ """Tries to read an image block (as a binary png) before the `deadline`.
+ """
+ content_length_line = _read_line(self.stdout_queue, deadline, "utf-8")
+ assert content_length_line.startswith("Content-Length:")
+ content_length = int(content_length_line[15:])
+
+ result = bytearray()
+
+ while True:
+ line = _read_line(self.stdout_queue, deadline, raise_crash=False)
+ excess = len(line) + len(result) - content_length
+
+ if excess > 0:
+ # This is the line that contains the EOF marker.
+ assert excess == len(self.eof_marker)
+ result += line[:-excess]
+ break
+
+ result += line
+
+ return result
+
+
+class ContentShellErrorsPart(ProtocolPart):
+ """This protocol part is responsible for collecting the errors reported by content_shell.
+ """
+ name = "content_shell_errors"
+
+ def __init__(self, parent):
+ super().__init__(parent)
+ self.stderr_queue = parent.browser.stderr_queue
+
+ def read_errors(self):
+ """Reads the entire content of the stderr queue as is available right now (no blocking).
+ """
+ result = ""
+
+ while not self.stderr_queue.empty():
+ # There is no potential for race conditions here because this is the only place
+ # where we read from the stderr queue.
+ result += _read_line(self.stderr_queue, None, "utf-8", "replace", False)
+
+ return result
+
+
+class ContentShellProtocol(Protocol):
+ implements = [ContentShellTestPart, ContentShellErrorsPart]
+ init_timeout = 10 # Timeout (seconds) to wait for #READY message.
+
+ def connect(self):
+ """Waits for content_shell to emit its "#READY" message which signals that it is fully
+ initialized. We wait for a maximum of self.init_timeout seconds.
+ """
+ deadline = time() + self.init_timeout
+
+ while True:
+ if _read_line(self.browser.stdout_queue, deadline).rstrip() == b"#READY":
+ break
+
+ def after_connect(self):
+ pass
+
+ def teardown(self):
+ # Close the queue properly to avoid broken pipe spam in the log.
+ self.browser.stdin_queue.close()
+ self.browser.stdin_queue.join_thread()
+
+ def is_alive(self):
+ """Checks if content_shell is alive by determining if the IO pipes are still
+ open. This does not guarantee that the process is responsive.
+ """
+ return self.browser.io_stopped.is_set()
+
+
+def _convert_exception(test, exception, errors):
+ """Converts our TimeoutError and CrashError exceptions into test results.
+ """
+ if isinstance(exception, TimeoutError):
+ return (test.result_cls("EXTERNAL-TIMEOUT", errors), [])
+ if isinstance(exception, CrashError):
+ return (test.result_cls("CRASH", errors), [])
+ raise exception
+
+
+class ContentShellRefTestExecutor(RefTestExecutor):
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1, screenshot_cache=None,
+ debug_info=None, reftest_screenshot="unexpected", **kwargs):
+ super().__init__(logger, browser, server_config, timeout_multiplier, screenshot_cache,
+ debug_info, reftest_screenshot, **kwargs)
+ self.implementation = RefTestImplementation(self)
+ self.protocol = ContentShellProtocol(self, browser)
+
+ def reset(self):
+ self.implementation.reset()
+
+ def do_test(self, test):
+ try:
+ result = self.implementation.run_test(test)
+ self.protocol.content_shell_errors.read_errors()
+ return self.convert_result(test, result)
+ except BaseException as exception:
+ return _convert_exception(test, exception, self.protocol.content_shell_errors.read_errors())
+
+ def screenshot(self, test, viewport_size, dpi, page_ranges):
+ # Currently, the page size and DPI are hardcoded for print-reftests:
+ # https://chromium.googlesource.com/chromium/src/+/4e1b7bc33d42b401d7d9ad1dcba72883add3e2af/content/web_test/renderer/test_runner.cc#100
+ # Content shell has an internal `window.testRunner.setPrintingSize(...)`
+ # API, but it's not callable with protocol mode.
+ assert dpi is None
+ command = self.test_url(test)
+ if self.is_print:
+ # Currently, `content_shell` uses the expected image hash to avoid
+ # dumping a matching image as an optimization. In Chromium, the
+ # hash can be computed from an expected screenshot checked into the
+ # source tree (i.e., without looking at a reference). This is not
+ # possible in `wpt`, so pass an empty hash here to force a dump.
+ command += "''print"
+ _, image = self.protocol.content_shell_test.do_test(
+ command, test.timeout * self.timeout_multiplier)
+
+ if not image:
+ return False, ("ERROR", self.protocol.content_shell_errors.read_errors())
+
+ return True, b64encode(image).decode()
+
+
+class ContentShellPrintRefTestExecutor(ContentShellRefTestExecutor):
+ is_print = True
+
+
+class ContentShellCrashtestExecutor(CrashtestExecutor):
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1, debug_info=None,
+ **kwargs):
+ super().__init__(logger, browser, server_config, timeout_multiplier, debug_info, **kwargs)
+ self.protocol = ContentShellProtocol(self, browser)
+
+ def do_test(self, test):
+ try:
+ _ = self.protocol.content_shell_test.do_test(self.test_url(test), test.timeout * self.timeout_multiplier)
+ self.protocol.content_shell_errors.read_errors()
+ return self.convert_result(test, {"status": "PASS", "message": None})
+ except BaseException as exception:
+ return _convert_exception(test, exception, self.protocol.content_shell_errors.read_errors())
+
+
+class ContentShellTestharnessExecutor(TestharnessExecutor):
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1, debug_info=None,
+ **kwargs):
+ super().__init__(logger, browser, server_config, timeout_multiplier, debug_info, **kwargs)
+ self.protocol = ContentShellProtocol(self, browser)
+
+ def do_test(self, test):
+ try:
+ text, _ = self.protocol.content_shell_test.do_test(self.test_url(test),
+ test.timeout * self.timeout_multiplier)
+
+ errors = self.protocol.content_shell_errors.read_errors()
+ if not text:
+ return (test.result_cls("ERROR", errors), [])
+
+ return self.convert_result(test, json.loads(text))
+ except BaseException as exception:
+ return _convert_exception(test, exception, self.protocol.content_shell_errors.read_errors())
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executormarionette.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executormarionette.py
new file mode 100644
index 0000000000..5cd18f2493
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executormarionette.py
@@ -0,0 +1,1323 @@
+# mypy: allow-untyped-defs
+
+import json
+import os
+import shutil
+import tempfile
+import threading
+import time
+import traceback
+import uuid
+
+from urllib.parse import urljoin
+
+errors = None
+marionette = None
+pytestrunner = None
+
+here = os.path.dirname(__file__)
+
+from .base import (CallbackHandler,
+ CrashtestExecutor,
+ RefTestExecutor,
+ RefTestImplementation,
+ TestharnessExecutor,
+ TimedRunner,
+ WdspecExecutor,
+ get_pages,
+ strip_server)
+from .protocol import (ActionSequenceProtocolPart,
+ AssertsProtocolPart,
+ BaseProtocolPart,
+ TestharnessProtocolPart,
+ PrefsProtocolPart,
+ Protocol,
+ StorageProtocolPart,
+ SelectorProtocolPart,
+ ClickProtocolPart,
+ CookiesProtocolPart,
+ SendKeysProtocolPart,
+ TestDriverProtocolPart,
+ CoverageProtocolPart,
+ GenerateTestReportProtocolPart,
+ VirtualAuthenticatorProtocolPart,
+ WindowProtocolPart,
+ SetPermissionProtocolPart,
+ PrintProtocolPart,
+ DebugProtocolPart,
+ merge_dicts)
+
+
+def do_delayed_imports():
+ global errors, marionette, Addons
+
+ from marionette_driver import marionette, errors
+ from marionette_driver.addons import Addons
+
+
+def _switch_to_window(marionette, handle):
+ """Switch to the specified window; subsequent commands will be
+ directed at the new window.
+
+ This is a workaround for issue 24924[0]; marionettedriver 3.1.0 dropped the
+ 'name' parameter from its switch_to_window command, but it is still needed
+ for at least Firefox 79.
+
+ [0]: https://github.com/web-platform-tests/wpt/issues/24924
+
+ :param marionette: The Marionette instance
+ :param handle: The id of the window to switch to.
+ """
+ marionette._send_message("WebDriver:SwitchToWindow",
+ {"handle": handle, "name": handle, "focus": True})
+ marionette.window = handle
+
+
+class MarionetteBaseProtocolPart(BaseProtocolPart):
+ def __init__(self, parent):
+ super().__init__(parent)
+ self.timeout = None
+
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def execute_script(self, script, asynchronous=False):
+ method = self.marionette.execute_async_script if asynchronous else self.marionette.execute_script
+ return method(script, new_sandbox=False, sandbox=None)
+
+ def set_timeout(self, timeout):
+ """Set the Marionette script timeout.
+
+ :param timeout: Script timeout in seconds
+
+ """
+ if timeout != self.timeout:
+ self.marionette.timeout.script = timeout
+ self.timeout = timeout
+
+ @property
+ def current_window(self):
+ return self.marionette.current_window_handle
+
+ def set_window(self, handle):
+ _switch_to_window(self.marionette, handle)
+
+ def window_handles(self):
+ return self.marionette.window_handles
+
+ def load(self, url):
+ self.marionette.navigate(url)
+
+ def wait(self):
+ try:
+ socket_timeout = self.marionette.client.socket_timeout
+ except AttributeError:
+ # This can happen if there was a crash
+ return
+ if socket_timeout:
+ try:
+ self.marionette.timeout.script = socket_timeout / 2
+ except OSError:
+ self.logger.debug("Socket closed")
+ return
+
+ while True:
+ try:
+ return self.marionette.execute_async_script("""let callback = arguments[arguments.length - 1];
+addEventListener("__test_restart", e => {e.preventDefault(); callback(true)})""")
+ except errors.NoSuchWindowException:
+ # The window closed
+ break
+ except errors.ScriptTimeoutException:
+ self.logger.debug("Script timed out")
+ pass
+ except errors.JavascriptException as e:
+ # This can happen if we navigate, but just keep going
+ self.logger.debug(e)
+ pass
+ except OSError:
+ self.logger.debug("Socket closed")
+ break
+ except Exception:
+ self.logger.warning(traceback.format_exc())
+ break
+ return False
+
+
+class MarionetteTestharnessProtocolPart(TestharnessProtocolPart):
+ def __init__(self, parent):
+ super().__init__(parent)
+ self.runner_handle = None
+ with open(os.path.join(here, "runner.js")) as f:
+ self.runner_script = f.read()
+ with open(os.path.join(here, "window-loaded.js")) as f:
+ self.window_loaded_script = f.read()
+
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def load_runner(self, url_protocol):
+ # Check if we previously had a test window open, and if we did make sure it's closed
+ if self.runner_handle:
+ self._close_windows()
+ url = urljoin(self.parent.executor.server_url(url_protocol), "/testharness_runner.html")
+ self.logger.debug("Loading %s" % url)
+ try:
+ self.dismiss_alert(lambda: self.marionette.navigate(url))
+ except Exception:
+ self.logger.critical(
+ "Loading initial page %s failed. Ensure that the "
+ "there are no other programs bound to this port and "
+ "that your firewall rules or network setup does not "
+ "prevent access.\n%s" % (url, traceback.format_exc()))
+ raise
+ self.runner_handle = self.marionette.current_window_handle
+ format_map = {"title": threading.current_thread().name.replace("'", '"')}
+ self.parent.base.execute_script(self.runner_script % format_map)
+
+ def _close_windows(self):
+ handles = self.marionette.window_handles
+ runner_handle = None
+ try:
+ handles.remove(self.runner_handle)
+ runner_handle = self.runner_handle
+ except ValueError:
+ # The runner window probably changed id but we can restore it
+ # This isn't supposed to happen, but marionette ids are not yet stable
+ # We assume that the first handle returned corresponds to the runner,
+ # but it hopefully doesn't matter too much if that assumption is
+ # wrong since we reload the runner in that tab anyway.
+ runner_handle = handles.pop(0)
+ self.logger.info("Changing harness_window to %s" % runner_handle)
+
+ for handle in handles:
+ try:
+ self.logger.info("Closing window %s" % handle)
+ _switch_to_window(self.marionette, handle)
+ self.dismiss_alert(lambda: self.marionette.close())
+ except errors.NoSuchWindowException:
+ # We might have raced with the previous test to close this
+ # window, skip it.
+ pass
+ _switch_to_window(self.marionette, runner_handle)
+ return runner_handle
+
+ def close_old_windows(self, url_protocol):
+ runner_handle = self._close_windows()
+ if runner_handle != self.runner_handle:
+ self.load_runner(url_protocol)
+ return self.runner_handle
+
+ def dismiss_alert(self, f):
+ while True:
+ try:
+ f()
+ except errors.UnexpectedAlertOpen:
+ alert = self.marionette.switch_to_alert()
+ try:
+ alert.dismiss()
+ except errors.NoAlertPresentException:
+ pass
+ else:
+ break
+
+ def get_test_window(self, window_id, parent, timeout=5):
+ """Find the test window amongst all the open windows.
+ This is assumed to be either the named window or the one after the parent in the list of
+ window handles
+
+ :param window_id: The DOM name of the Window
+ :param parent: The handle of the runner window
+ :param timeout: The time in seconds to wait for the window to appear. This is because in
+ some implementations there's a race between calling window.open and the
+ window being added to the list of WebDriver accessible windows."""
+ test_window = None
+ end_time = time.time() + timeout
+ while time.time() < end_time:
+ if window_id:
+ try:
+ # Try this, it's in Level 1 but nothing supports it yet
+ win_s = self.parent.base.execute_script("return window['%s'];" % self.window_id)
+ win_obj = json.loads(win_s)
+ test_window = win_obj["window-fcc6-11e5-b4f8-330a88ab9d7f"]
+ except Exception:
+ pass
+
+ if test_window is None:
+ handles = self.marionette.window_handles
+ if len(handles) == 2:
+ test_window = next(iter(set(handles) - {parent}))
+ elif len(handles) > 2 and handles[0] == parent:
+ # Hope the first one here is the test window
+ test_window = handles[1]
+
+ if test_window is not None:
+ assert test_window != parent
+ return test_window
+
+ time.sleep(0.1)
+
+ raise Exception("unable to find test window")
+
+ def test_window_loaded(self):
+ """Wait until the page in the new window has been loaded.
+
+ Hereby ignore Javascript execptions that are thrown when
+ the document has been unloaded due to a process change.
+ """
+ while True:
+ try:
+ self.parent.base.execute_script(self.window_loaded_script, asynchronous=True)
+ break
+ except errors.JavascriptException:
+ pass
+
+
+class MarionettePrefsProtocolPart(PrefsProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def set(self, name, value):
+ if not isinstance(value, str):
+ value = str(value)
+
+ if value.lower() not in ("true", "false"):
+ try:
+ int(value)
+ except ValueError:
+ value = f"'{value}'"
+ else:
+ value = value.lower()
+
+ self.logger.info(f"Setting pref {name} to {value}")
+
+ script = """
+ let prefInterface = Components.classes["@mozilla.org/preferences-service;1"]
+ .getService(Components.interfaces.nsIPrefBranch);
+ let pref = '%s';
+ let type = prefInterface.getPrefType(pref);
+ let value = %s;
+ switch(type) {
+ case prefInterface.PREF_STRING:
+ prefInterface.setCharPref(pref, value);
+ break;
+ case prefInterface.PREF_BOOL:
+ prefInterface.setBoolPref(pref, value);
+ break;
+ case prefInterface.PREF_INT:
+ prefInterface.setIntPref(pref, value);
+ break;
+ case prefInterface.PREF_INVALID:
+ // Pref doesn't seem to be defined already; guess at the
+ // right way to set it based on the type of value we have.
+ switch (typeof value) {
+ case "boolean":
+ prefInterface.setBoolPref(pref, value);
+ break;
+ case "string":
+ prefInterface.setCharPref(pref, value);
+ break;
+ case "number":
+ prefInterface.setIntPref(pref, value);
+ break;
+ default:
+ throw new Error("Unknown pref value type: " + (typeof value));
+ }
+ break;
+ default:
+ throw new Error("Unknown pref type " + type);
+ }
+ """ % (name, value)
+ with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
+ self.marionette.execute_script(script)
+
+ def clear(self, name):
+ self.logger.info(f"Clearing pref {name}")
+ script = """
+ let prefInterface = Components.classes["@mozilla.org/preferences-service;1"]
+ .getService(Components.interfaces.nsIPrefBranch);
+ let pref = '%s';
+ prefInterface.clearUserPref(pref);
+ """ % name
+ with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
+ self.marionette.execute_script(script)
+
+ def get(self, name):
+ script = """
+ let prefInterface = Components.classes["@mozilla.org/preferences-service;1"]
+ .getService(Components.interfaces.nsIPrefBranch);
+ let pref = '%s';
+ let type = prefInterface.getPrefType(pref);
+ switch(type) {
+ case prefInterface.PREF_STRING:
+ return prefInterface.getCharPref(pref);
+ case prefInterface.PREF_BOOL:
+ return prefInterface.getBoolPref(pref);
+ case prefInterface.PREF_INT:
+ return prefInterface.getIntPref(pref);
+ case prefInterface.PREF_INVALID:
+ return null;
+ }
+ """ % name
+ with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
+ rv = self.marionette.execute_script(script)
+ self.logger.debug(f"Got pref {name} with value {rv}")
+ return rv
+
+
+class MarionetteStorageProtocolPart(StorageProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def clear_origin(self, url):
+ self.logger.info("Clearing origin %s" % (url))
+ script = """
+ let url = '%s';
+ let uri = Components.classes["@mozilla.org/network/io-service;1"]
+ .getService(Ci.nsIIOService)
+ .newURI(url);
+ let ssm = Components.classes["@mozilla.org/scriptsecuritymanager;1"]
+ .getService(Ci.nsIScriptSecurityManager);
+ let principal = ssm.createContentPrincipal(uri, {});
+ let qms = Components.classes["@mozilla.org/dom/quota-manager-service;1"]
+ .getService(Components.interfaces.nsIQuotaManagerService);
+ qms.clearStoragesForPrincipal(principal, "default", null, true);
+ """ % url
+ with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
+ self.marionette.execute_script(script)
+
+
+class MarionetteAssertsProtocolPart(AssertsProtocolPart):
+ def setup(self):
+ self.assert_count = {"chrome": 0, "content": 0}
+ self.chrome_assert_count = 0
+ self.marionette = self.parent.marionette
+
+ def get(self):
+ script = """
+ debug = Cc["@mozilla.org/xpcom/debug;1"].getService(Ci.nsIDebug2);
+ if (debug.isDebugBuild) {
+ return debug.assertionCount;
+ }
+ return 0;
+ """
+
+ def get_count(context, **kwargs):
+ try:
+ context_count = self.marionette.execute_script(script, **kwargs)
+ if context_count:
+ self.parent.logger.info("Got %s assert count %s" % (context, context_count))
+ test_count = context_count - self.assert_count[context]
+ self.assert_count[context] = context_count
+ return test_count
+ except errors.NoSuchWindowException:
+ # If the window was already closed
+ self.parent.logger.warning("Failed to get assertion count; window was closed")
+ except (errors.MarionetteException, OSError):
+ # This usually happens if the process crashed
+ pass
+
+ counts = []
+ with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
+ counts.append(get_count("chrome"))
+ if self.parent.e10s:
+ counts.append(get_count("content", sandbox="system"))
+
+ counts = [item for item in counts if item is not None]
+
+ if not counts:
+ return None
+
+ return sum(counts)
+
+
+class MarionetteSelectorProtocolPart(SelectorProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def elements_by_selector(self, selector):
+ return self.marionette.find_elements("css selector", selector)
+
+
+class MarionetteClickProtocolPart(ClickProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def element(self, element):
+ return element.click()
+
+
+class MarionetteCookiesProtocolPart(CookiesProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def delete_all_cookies(self):
+ self.logger.info("Deleting all cookies")
+ return self.marionette.delete_all_cookies()
+
+ def get_all_cookies(self):
+ self.logger.info("Getting all cookies")
+ return self.marionette.get_cookies()
+
+ def get_named_cookie(self, name):
+ self.logger.info("Getting cookie named %s" % name)
+ try:
+ return self.marionette.get_cookie(name)
+ # When errors.NoSuchCookieException is supported,
+ # that should be used here instead.
+ except Exception:
+ return None
+
+
+class MarionetteSendKeysProtocolPart(SendKeysProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def send_keys(self, element, keys):
+ return element.send_keys(keys)
+
+class MarionetteWindowProtocolPart(WindowProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def minimize(self):
+ return self.marionette.minimize_window()
+
+ def set_rect(self, rect):
+ self.marionette.set_window_rect(rect["x"], rect["y"], rect["height"], rect["width"])
+
+
+class MarionetteActionSequenceProtocolPart(ActionSequenceProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def send_actions(self, actions):
+ actions = self.marionette._to_json(actions)
+ self.logger.info(actions)
+ self.marionette._send_message("WebDriver:PerformActions", actions)
+
+ def release(self):
+ self.marionette._send_message("WebDriver:ReleaseActions", {})
+
+
+class MarionetteTestDriverProtocolPart(TestDriverProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def send_message(self, cmd_id, message_type, status, message=None):
+ obj = {
+ "cmd_id": cmd_id,
+ "type": "testdriver-%s" % str(message_type),
+ "status": str(status)
+ }
+ if message:
+ obj["message"] = str(message)
+ self.parent.base.execute_script("window.postMessage(%s, '*')" % json.dumps(obj))
+
+ def _switch_to_frame(self, index_or_elem):
+ try:
+ self.marionette.switch_to_frame(index_or_elem)
+ except (errors.NoSuchFrameException,
+ errors.StaleElementException) as e:
+ raise ValueError from e
+
+ def _switch_to_parent_frame(self):
+ self.marionette.switch_to_parent_frame()
+
+
+class MarionetteCoverageProtocolPart(CoverageProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ if not self.parent.ccov:
+ self.is_enabled = False
+ return
+
+ script = """
+ const {PerTestCoverageUtils} = ChromeUtils.import("chrome://remote/content/marionette/PerTestCoverageUtils.jsm");
+ return PerTestCoverageUtils.enabled;
+ """
+ with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
+ self.is_enabled = self.marionette.execute_script(script)
+
+ def reset(self):
+ script = """
+ var callback = arguments[arguments.length - 1];
+
+ const {PerTestCoverageUtils} = ChromeUtils.import("chrome://remote/content/marionette/PerTestCoverageUtils.jsm");
+ PerTestCoverageUtils.beforeTest().then(callback, callback);
+ """
+ with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
+ try:
+ error = self.marionette.execute_async_script(script)
+ if error is not None:
+ raise Exception('Failure while resetting counters: %s' % json.dumps(error))
+ except (errors.MarionetteException, OSError):
+ # This usually happens if the process crashed
+ pass
+
+ def dump(self):
+ if len(self.marionette.window_handles):
+ handle = self.marionette.window_handles[0]
+ _switch_to_window(self.marionette, handle)
+
+ script = """
+ var callback = arguments[arguments.length - 1];
+
+ const {PerTestCoverageUtils} = ChromeUtils.import("chrome://remote/content/marionette/PerTestCoverageUtils.jsm");
+ PerTestCoverageUtils.afterTest().then(callback, callback);
+ """
+ with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
+ try:
+ error = self.marionette.execute_async_script(script)
+ if error is not None:
+ raise Exception('Failure while dumping counters: %s' % json.dumps(error))
+ except (errors.MarionetteException, OSError):
+ # This usually happens if the process crashed
+ pass
+
+class MarionetteGenerateTestReportProtocolPart(GenerateTestReportProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def generate_test_report(self, config):
+ raise NotImplementedError("generate_test_report not yet implemented")
+
+class MarionetteVirtualAuthenticatorProtocolPart(VirtualAuthenticatorProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def add_virtual_authenticator(self, config):
+ raise NotImplementedError("add_virtual_authenticator not yet implemented")
+
+ def remove_virtual_authenticator(self, authenticator_id):
+ raise NotImplementedError("remove_virtual_authenticator not yet implemented")
+
+ def add_credential(self, authenticator_id, credential):
+ raise NotImplementedError("add_credential not yet implemented")
+
+ def get_credentials(self, authenticator_id):
+ raise NotImplementedError("get_credentials not yet implemented")
+
+ def remove_credential(self, authenticator_id, credential_id):
+ raise NotImplementedError("remove_credential not yet implemented")
+
+ def remove_all_credentials(self, authenticator_id):
+ raise NotImplementedError("remove_all_credentials not yet implemented")
+
+ def set_user_verified(self, authenticator_id, uv):
+ raise NotImplementedError("set_user_verified not yet implemented")
+
+
+class MarionetteSetPermissionProtocolPart(SetPermissionProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def set_permission(self, descriptor, state):
+ body = {
+ "descriptor": descriptor,
+ "state": state,
+ }
+ try:
+ self.marionette._send_message("WebDriver:SetPermission", body)
+ except errors.UnsupportedOperationException:
+ raise NotImplementedError("set_permission not yet implemented")
+
+
+class MarionettePrintProtocolPart(PrintProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+ self.runner_handle = None
+
+ def load_runner(self):
+ url = urljoin(self.parent.executor.server_url("http"), "/print_reftest_runner.html")
+ self.logger.debug("Loading %s" % url)
+ try:
+ self.marionette.navigate(url)
+ except Exception as e:
+ self.logger.critical(
+ "Loading initial page %s failed. Ensure that the "
+ "there are no other programs bound to this port and "
+ "that your firewall rules or network setup does not "
+ "prevent access.\n%s" % (url, traceback.format_exc(e)))
+ raise
+ self.runner_handle = self.marionette.current_window_handle
+
+ def render_as_pdf(self, width, height):
+ margin = 0.5 * 2.54
+ body = {
+ "page": {
+ "width": width,
+ "height": height
+ },
+ "margin": {
+ "left": margin,
+ "right": margin,
+ "top": margin,
+ "bottom": margin,
+ },
+ "shrinkToFit": False,
+ "printBackground": True,
+ }
+ return self.marionette._send_message("WebDriver:Print", body, key="value")
+
+ def pdf_to_png(self, pdf_base64, page_ranges):
+ handle = self.marionette.current_window_handle
+ _switch_to_window(self.marionette, self.runner_handle)
+ try:
+ rv = self.marionette.execute_async_script("""
+let callback = arguments[arguments.length - 1];
+render('%s').then(result => callback(result))""" % pdf_base64, new_sandbox=False, sandbox=None)
+ page_numbers = get_pages(page_ranges, len(rv))
+ rv = [item for i, item in enumerate(rv) if i + 1 in page_numbers]
+ return rv
+ finally:
+ _switch_to_window(self.marionette, handle)
+
+
+class MarionetteDebugProtocolPart(DebugProtocolPart):
+ def setup(self):
+ self.marionette = self.parent.marionette
+
+ def load_devtools(self):
+ with self.marionette.using_context(self.marionette.CONTEXT_CHROME):
+ # Once ESR is 107 is released, we can replace the ChromeUtils.import(DevToolsShim.jsm)
+ # with ChromeUtils.importESModule(DevToolsShim.sys.mjs) in this snippet:
+ self.parent.base.execute_script("""
+const { DevToolsShim } = ChromeUtils.import(
+ "chrome://devtools-startup/content/DevToolsShim.jsm"
+);
+
+const callback = arguments[arguments.length - 1];
+
+async function loadDevTools() {
+ const tab = window.gBrowser.selectedTab;
+ await DevToolsShim.showToolboxForTab(tab, {
+ toolId: "webconsole",
+ hostType: "window"
+ });
+}
+
+loadDevTools().catch((e) => console.error("Devtools failed to load", e))
+ .then(callback);
+""", asynchronous=True)
+
+
+class MarionetteProtocol(Protocol):
+ implements = [MarionetteBaseProtocolPart,
+ MarionetteTestharnessProtocolPart,
+ MarionettePrefsProtocolPart,
+ MarionetteStorageProtocolPart,
+ MarionetteSelectorProtocolPart,
+ MarionetteClickProtocolPart,
+ MarionetteCookiesProtocolPart,
+ MarionetteSendKeysProtocolPart,
+ MarionetteWindowProtocolPart,
+ MarionetteActionSequenceProtocolPart,
+ MarionetteTestDriverProtocolPart,
+ MarionetteAssertsProtocolPart,
+ MarionetteCoverageProtocolPart,
+ MarionetteGenerateTestReportProtocolPart,
+ MarionetteVirtualAuthenticatorProtocolPart,
+ MarionetteSetPermissionProtocolPart,
+ MarionettePrintProtocolPart,
+ MarionetteDebugProtocolPart]
+
+ def __init__(self, executor, browser, capabilities=None, timeout_multiplier=1, e10s=True, ccov=False):
+ do_delayed_imports()
+
+ super().__init__(executor, browser)
+ self.marionette = None
+ self.marionette_port = browser.marionette_port
+ self.capabilities = capabilities
+ if hasattr(browser, "capabilities"):
+ if self.capabilities is None:
+ self.capabilities = browser.capabilities
+ else:
+ merge_dicts(self.capabilities, browser.capabilities)
+ self.timeout_multiplier = timeout_multiplier
+ self.runner_handle = None
+ self.e10s = e10s
+ self.ccov = ccov
+
+ def connect(self):
+ self.logger.debug("Connecting to Marionette on port %i" % self.marionette_port)
+ startup_timeout = marionette.Marionette.DEFAULT_STARTUP_TIMEOUT * self.timeout_multiplier
+ self.marionette = marionette.Marionette(host='127.0.0.1',
+ port=self.marionette_port,
+ socket_timeout=None,
+ startup_timeout=startup_timeout)
+
+ self.logger.debug("Waiting for Marionette connection")
+ while True:
+ try:
+ self.marionette.raise_for_port()
+ break
+ except OSError:
+ # When running in a debugger wait indefinitely for Firefox to start
+ if self.executor.debug_info is None:
+ raise
+
+ self.logger.debug("Starting Marionette session")
+ self.marionette.start_session(self.capabilities)
+ self.logger.debug("Marionette session started")
+
+ def after_connect(self):
+ pass
+
+ def teardown(self):
+ if self.marionette and self.marionette.session_id:
+ try:
+ self.marionette._request_in_app_shutdown()
+ self.marionette.delete_session(send_request=False)
+ self.marionette.cleanup()
+ except Exception:
+ # This is typically because the session never started
+ pass
+ if self.marionette is not None:
+ self.marionette = None
+ super().teardown()
+
+ def is_alive(self):
+ try:
+ self.marionette.current_window_handle
+ except Exception:
+ return False
+ return True
+
+ def on_environment_change(self, old_environment, new_environment):
+ #Unset all the old prefs
+ for name in old_environment.get("prefs", {}).keys():
+ value = self.executor.original_pref_values[name]
+ if value is None:
+ self.prefs.clear(name)
+ else:
+ self.prefs.set(name, value)
+
+ for name, value in new_environment.get("prefs", {}).items():
+ self.executor.original_pref_values[name] = self.prefs.get(name)
+ self.prefs.set(name, value)
+
+ pac = new_environment.get("pac", None)
+
+ if pac != old_environment.get("pac", None):
+ if pac is None:
+ self.prefs.clear("network.proxy.type")
+ self.prefs.clear("network.proxy.autoconfig_url")
+ else:
+ self.prefs.set("network.proxy.type", 2)
+ self.prefs.set("network.proxy.autoconfig_url",
+ urljoin(self.executor.server_url("http"), pac))
+
+class ExecuteAsyncScriptRun(TimedRunner):
+ def set_timeout(self):
+ timeout = self.timeout
+
+ try:
+ if timeout is not None:
+ self.protocol.base.set_timeout(timeout + self.extra_timeout)
+ else:
+ # We just want it to never time out, really, but marionette doesn't
+ # make that possible. It also seems to time out immediately if the
+ # timeout is set too high. This works at least.
+ self.protocol.base.set_timeout(2**28 - 1)
+ except OSError:
+ msg = "Lost marionette connection before starting test"
+ self.logger.error(msg)
+ return ("INTERNAL-ERROR", msg)
+
+ def before_run(self):
+ index = self.url.rfind("/storage/")
+ if index != -1:
+ # Clear storage
+ self.protocol.storage.clear_origin(self.url)
+
+ def run_func(self):
+ try:
+ self.result = True, self.func(self.protocol, self.url, self.timeout)
+ except errors.ScriptTimeoutException:
+ self.logger.debug("Got a marionette timeout")
+ self.result = False, ("EXTERNAL-TIMEOUT", None)
+ except OSError:
+ # This can happen on a crash
+ # Also, should check after the test if the firefox process is still running
+ # and otherwise ignore any other result and set it to crash
+ self.logger.info("IOError on command, setting status to CRASH")
+ self.result = False, ("CRASH", None)
+ except errors.NoSuchWindowException:
+ self.logger.info("NoSuchWindowException on command, setting status to CRASH")
+ self.result = False, ("CRASH", None)
+ except Exception as e:
+ if isinstance(e, errors.JavascriptException) and str(e).startswith("Document was unloaded"):
+ message = "Document unloaded; maybe test navigated the top-level-browsing context?"
+ else:
+ message = getattr(e, "message", "")
+ if message:
+ message += "\n"
+ message += traceback.format_exc()
+ self.logger.warning(traceback.format_exc())
+ self.result = False, ("INTERNAL-ERROR", message)
+ finally:
+ self.result_flag.set()
+
+
+class MarionetteTestharnessExecutor(TestharnessExecutor):
+ supports_testdriver = True
+
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ close_after_done=True, debug_info=None, capabilities=None,
+ debug=False, ccov=False, debug_test=False, **kwargs):
+ """Marionette-based executor for testharness.js tests"""
+ TestharnessExecutor.__init__(self, logger, browser, server_config,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+ self.protocol = MarionetteProtocol(self,
+ browser,
+ capabilities,
+ timeout_multiplier,
+ kwargs["e10s"],
+ ccov)
+ with open(os.path.join(here, "testharness_webdriver_resume.js")) as f:
+ self.script_resume = f.read()
+ self.close_after_done = close_after_done
+ self.window_id = str(uuid.uuid4())
+ self.debug = debug
+ self.debug_test = debug_test
+
+ self.install_extensions = browser.extensions
+
+ self.original_pref_values = {}
+
+ if marionette is None:
+ do_delayed_imports()
+
+ def setup(self, runner):
+ super().setup(runner)
+ for extension_path in self.install_extensions:
+ self.logger.info("Installing extension from %s" % extension_path)
+ addons = Addons(self.protocol.marionette)
+ addons.install(extension_path)
+
+ self.protocol.testharness.load_runner(self.last_environment["protocol"])
+
+ def is_alive(self):
+ return self.protocol.is_alive()
+
+ def on_environment_change(self, new_environment):
+ self.protocol.on_environment_change(self.last_environment, new_environment)
+
+ if new_environment["protocol"] != self.last_environment["protocol"]:
+ self.protocol.testharness.load_runner(new_environment["protocol"])
+
+ def do_test(self, test):
+ timeout = (test.timeout * self.timeout_multiplier if self.debug_info is None
+ else None)
+
+ success, data = ExecuteAsyncScriptRun(self.logger,
+ self.do_testharness,
+ self.protocol,
+ self.test_url(test),
+ timeout,
+ self.extra_timeout).run()
+ # The format of data depends on whether the test ran to completion or not
+ # For asserts we only care about the fact that if it didn't complete, the
+ # status is in the first field.
+ status = None
+ if not success:
+ status = data[0]
+
+ extra = None
+ if self.debug and (success or status not in ("CRASH", "INTERNAL-ERROR")):
+ assertion_count = self.protocol.asserts.get()
+ if assertion_count is not None:
+ extra = {"assertion_count": assertion_count}
+
+ if success:
+ return self.convert_result(test, data, extra=extra)
+
+ return (test.result_cls(extra=extra, *data), [])
+
+ def do_testharness(self, protocol, url, timeout):
+ parent_window = protocol.testharness.close_old_windows(self.last_environment["protocol"])
+
+ if self.protocol.coverage.is_enabled:
+ self.protocol.coverage.reset()
+
+ format_map = {"url": strip_server(url)}
+
+ protocol.base.execute_script("window.open('about:blank', '%s', 'noopener')" % self.window_id)
+ test_window = protocol.testharness.get_test_window(self.window_id, parent_window,
+ timeout=10 * self.timeout_multiplier)
+ self.protocol.base.set_window(test_window)
+ protocol.testharness.test_window_loaded()
+
+ if self.debug_test and self.browser.supports_devtools:
+ self.protocol.debug.load_devtools()
+
+ handler = CallbackHandler(self.logger, protocol, test_window)
+ protocol.marionette.navigate(url)
+ while True:
+ result = protocol.base.execute_script(
+ self.script_resume % format_map, asynchronous=True)
+ if result is None:
+ # This can happen if we get an content process crash
+ return None
+ done, rv = handler(result)
+ if done:
+ break
+
+ if self.protocol.coverage.is_enabled:
+ self.protocol.coverage.dump()
+
+ return rv
+
+
+class MarionetteRefTestExecutor(RefTestExecutor):
+ is_print = False
+
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ screenshot_cache=None, close_after_done=True,
+ debug_info=None, reftest_internal=False,
+ reftest_screenshot="unexpected", ccov=False,
+ group_metadata=None, capabilities=None, debug=False,
+ browser_version=None, debug_test=False, **kwargs):
+ """Marionette-based executor for reftests"""
+ RefTestExecutor.__init__(self,
+ logger,
+ browser,
+ server_config,
+ screenshot_cache=screenshot_cache,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+ self.protocol = MarionetteProtocol(self, browser, capabilities,
+ timeout_multiplier, kwargs["e10s"],
+ ccov)
+ self.implementation = self.get_implementation(reftest_internal)
+ self.implementation_kwargs = {}
+ if reftest_internal:
+ self.implementation_kwargs["screenshot"] = reftest_screenshot
+ self.implementation_kwargs["chrome_scope"] = (browser_version is not None and
+ int(browser_version.split(".")[0]) < 82)
+ self.close_after_done = close_after_done
+ self.has_window = False
+ self.original_pref_values = {}
+ self.group_metadata = group_metadata
+ self.debug = debug
+ self.debug_test = debug_test
+
+ self.install_extensions = browser.extensions
+
+ with open(os.path.join(here, "reftest.js")) as f:
+ self.script = f.read()
+ with open(os.path.join(here, "test-wait.js")) as f:
+ self.wait_script = f.read() % {"classname": "reftest-wait"}
+
+ def get_implementation(self, reftest_internal):
+ return (InternalRefTestImplementation if reftest_internal
+ else RefTestImplementation)(self)
+
+ def setup(self, runner):
+ super().setup(runner)
+ for extension_path in self.install_extensions:
+ self.logger.info("Installing extension from %s" % extension_path)
+ addons = Addons(self.protocol.marionette)
+ addons.install(extension_path)
+
+ self.implementation.setup(**self.implementation_kwargs)
+
+ def teardown(self):
+ try:
+ self.implementation.teardown()
+ if self.protocol.marionette and self.protocol.marionette.session_id:
+ handles = self.protocol.marionette.window_handles
+ if handles:
+ _switch_to_window(self.protocol.marionette, handles[0])
+ super().teardown()
+ except Exception:
+ # Ignore errors during teardown
+ self.logger.warning("Exception during reftest teardown:\n%s" %
+ traceback.format_exc())
+
+ def reset(self):
+ self.implementation.reset(**self.implementation_kwargs)
+
+ def is_alive(self):
+ return self.protocol.is_alive()
+
+ def on_environment_change(self, new_environment):
+ self.protocol.on_environment_change(self.last_environment, new_environment)
+
+ def do_test(self, test):
+ if not isinstance(self.implementation, InternalRefTestImplementation):
+ if self.close_after_done and self.has_window:
+ self.protocol.marionette.close()
+ _switch_to_window(self.protocol.marionette,
+ self.protocol.marionette.window_handles[-1])
+ self.has_window = False
+
+ if not self.has_window:
+ self.protocol.base.execute_script(self.script)
+ self.protocol.base.set_window(self.protocol.marionette.window_handles[-1])
+ self.has_window = True
+ self.protocol.testharness.test_window_loaded()
+
+ if self.protocol.coverage.is_enabled:
+ self.protocol.coverage.reset()
+
+ result = self.implementation.run_test(test)
+
+ if self.protocol.coverage.is_enabled:
+ self.protocol.coverage.dump()
+
+ if self.debug:
+ assertion_count = self.protocol.asserts.get()
+ if "extra" not in result:
+ result["extra"] = {}
+ if assertion_count is not None:
+ result["extra"]["assertion_count"] = assertion_count
+
+ if self.debug_test and result["status"] in ["PASS", "FAIL", "ERROR"] and "extra" in result:
+ self.protocol.base.set_window(self.protocol.base.window_handles()[0])
+ self.protocol.debug.load_reftest_analyzer(test, result)
+
+ return self.convert_result(test, result)
+
+ def screenshot(self, test, viewport_size, dpi, page_ranges):
+ # https://github.com/web-platform-tests/wpt/issues/7135
+ assert viewport_size is None
+ assert dpi is None
+
+ timeout = self.timeout_multiplier * test.timeout if self.debug_info is None else None
+
+ test_url = self.test_url(test)
+
+ return ExecuteAsyncScriptRun(self.logger,
+ self._screenshot,
+ self.protocol,
+ test_url,
+ timeout,
+ self.extra_timeout).run()
+
+ def _screenshot(self, protocol, url, timeout):
+ protocol.marionette.navigate(url)
+
+ protocol.base.execute_script(self.wait_script, asynchronous=True)
+
+ screenshot = protocol.marionette.screenshot(full=False)
+ # strip off the data:img/png, part of the url
+ if screenshot.startswith("data:image/png;base64,"):
+ screenshot = screenshot.split(",", 1)[1]
+
+ return screenshot
+
+
+class InternalRefTestImplementation(RefTestImplementation):
+ def __init__(self, executor):
+ self.timeout_multiplier = executor.timeout_multiplier
+ self.executor = executor
+ self.chrome_scope = False
+
+ @property
+ def logger(self):
+ return self.executor.logger
+
+ def setup(self, screenshot="unexpected", chrome_scope=False):
+ data = {"screenshot": screenshot, "isPrint": self.executor.is_print}
+ if self.executor.group_metadata is not None:
+ data["urlCount"] = {urljoin(self.executor.server_url(key[0]), key[1]):value
+ for key, value in self.executor.group_metadata.get("url_count", {}).items()
+ if value > 1}
+ self.chrome_scope = chrome_scope
+ if chrome_scope:
+ self.logger.debug("Using marionette Chrome scope for reftests")
+ self.executor.protocol.marionette.set_context(self.executor.protocol.marionette.CONTEXT_CHROME)
+ self.executor.protocol.marionette._send_message("reftest:setup", data)
+
+ def reset(self, **kwargs):
+ # this is obvious wrong; it shouldn't be a no-op
+ # see https://github.com/web-platform-tests/wpt/issues/15604
+ pass
+
+ def run_test(self, test):
+ references = self.get_references(test, test)
+ timeout = (test.timeout * 1000) * self.timeout_multiplier
+ rv = self.executor.protocol.marionette._send_message("reftest:run",
+ {"test": self.executor.test_url(test),
+ "references": references,
+ "expected": test.expected(),
+ "timeout": timeout,
+ "width": 800,
+ "height": 600,
+ "pageRanges": test.page_ranges})["value"]
+ return rv
+
+ def get_references(self, root_test, node):
+ rv = []
+ for item, relation in node.references:
+ rv.append([self.executor.test_url(item), self.get_references(root_test, item), relation,
+ {"fuzzy": self.get_fuzzy(root_test, [node, item], relation)}])
+ return rv
+
+ def teardown(self):
+ try:
+ if self.executor.protocol.marionette and self.executor.protocol.marionette.session_id:
+ self.executor.protocol.marionette._send_message("reftest:teardown", {})
+ if self.chrome_scope:
+ self.executor.protocol.marionette.set_context(
+ self.executor.protocol.marionette.CONTEXT_CONTENT)
+ # the reftest runner opens/closes a window with focus, so as
+ # with after closing a window we need to give a new window
+ # focus
+ handles = self.executor.protocol.marionette.window_handles
+ if handles:
+ _switch_to_window(self.executor.protocol.marionette, handles[0])
+ except Exception:
+ # Ignore errors during teardown
+ self.logger.warning(traceback.format_exc())
+
+
+class MarionetteCrashtestExecutor(CrashtestExecutor):
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ debug_info=None, capabilities=None, debug=False,
+ ccov=False, **kwargs):
+ """Marionette-based executor for testharness.js tests"""
+ CrashtestExecutor.__init__(self, logger, browser, server_config,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+ self.protocol = MarionetteProtocol(self,
+ browser,
+ capabilities,
+ timeout_multiplier,
+ kwargs["e10s"],
+ ccov)
+
+ self.original_pref_values = {}
+ self.debug = debug
+
+ with open(os.path.join(here, "test-wait.js")) as f:
+ self.wait_script = f.read() % {"classname": "test-wait"}
+
+ if marionette is None:
+ do_delayed_imports()
+
+ def is_alive(self):
+ return self.protocol.is_alive()
+
+ def on_environment_change(self, new_environment):
+ self.protocol.on_environment_change(self.last_environment, new_environment)
+
+ def do_test(self, test):
+ timeout = (test.timeout * self.timeout_multiplier if self.debug_info is None
+ else None)
+
+ success, data = ExecuteAsyncScriptRun(self.logger,
+ self.do_crashtest,
+ self.protocol,
+ self.test_url(test),
+ timeout,
+ self.extra_timeout).run()
+ status = None
+ if not success:
+ status = data[0]
+
+ extra = None
+ if self.debug and (success or status not in ("CRASH", "INTERNAL-ERROR")):
+ assertion_count = self.protocol.asserts.get()
+ if assertion_count is not None:
+ extra = {"assertion_count": assertion_count}
+
+ if success:
+ return self.convert_result(test, data)
+
+ return (test.result_cls(extra=extra, *data), [])
+
+ def do_crashtest(self, protocol, url, timeout):
+ if self.protocol.coverage.is_enabled:
+ self.protocol.coverage.reset()
+
+ protocol.base.load(url)
+ protocol.base.execute_script(self.wait_script, asynchronous=True)
+
+ if self.protocol.coverage.is_enabled:
+ self.protocol.coverage.dump()
+
+ return {"status": "PASS",
+ "message": None}
+
+
+class MarionettePrintRefTestExecutor(MarionetteRefTestExecutor):
+ is_print = True
+
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ screenshot_cache=None, close_after_done=True,
+ debug_info=None, reftest_screenshot="unexpected", ccov=False,
+ group_metadata=None, capabilities=None, debug=False,
+ reftest_internal=False, **kwargs):
+ """Marionette-based executor for reftests"""
+ MarionetteRefTestExecutor.__init__(self,
+ logger,
+ browser,
+ server_config,
+ timeout_multiplier=timeout_multiplier,
+ screenshot_cache=screenshot_cache,
+ close_after_done=close_after_done,
+ debug_info=debug_info,
+ reftest_screenshot=reftest_screenshot,
+ reftest_internal=reftest_internal,
+ ccov=ccov,
+ group_metadata=group_metadata,
+ capabilities=capabilities,
+ debug=debug,
+ **kwargs)
+
+ def setup(self, runner):
+ super().setup(runner)
+ if not isinstance(self.implementation, InternalRefTestImplementation):
+ self.protocol.pdf_print.load_runner()
+
+ def get_implementation(self, reftest_internal):
+ return (InternalRefTestImplementation if reftest_internal
+ else RefTestImplementation)(self)
+
+ def screenshot(self, test, viewport_size, dpi, page_ranges):
+ # https://github.com/web-platform-tests/wpt/issues/7140
+ assert dpi is None
+
+ self.viewport_size = viewport_size
+ timeout = self.timeout_multiplier * test.timeout if self.debug_info is None else None
+
+ test_url = self.test_url(test)
+ self.page_ranges = page_ranges.get(test.url)
+
+ return ExecuteAsyncScriptRun(self.logger,
+ self._render,
+ self.protocol,
+ test_url,
+ timeout,
+ self.extra_timeout).run()
+
+ def _render(self, protocol, url, timeout):
+ protocol.marionette.navigate(url)
+
+ protocol.base.execute_script(self.wait_script, asynchronous=True)
+
+ pdf = protocol.pdf_print.render_as_pdf(*self.viewport_size)
+ screenshots = protocol.pdf_print.pdf_to_png(pdf, self.page_ranges)
+ for i, screenshot in enumerate(screenshots):
+ # strip off the data:img/png, part of the url
+ if screenshot.startswith("data:image/png;base64,"):
+ screenshots[i] = screenshot.split(",", 1)[1]
+
+ return screenshots
+
+
+class MarionetteWdspecExecutor(WdspecExecutor):
+ def __init__(self, logger, browser, *args, **kwargs):
+ super().__init__(logger, browser, *args, **kwargs)
+
+ args = self.capabilities["moz:firefoxOptions"].setdefault("args", [])
+ args.extend(["--profile", self.browser.profile])
+
+ for option in ["androidPackage", "androidDeviceSerial", "env"]:
+ if hasattr(browser, option):
+ self.capabilities["moz:firefoxOptions"][option] = getattr(browser, option)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorselenium.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorselenium.py
new file mode 100644
index 0000000000..85076c877c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorselenium.py
@@ -0,0 +1,485 @@
+# mypy: allow-untyped-defs
+
+import json
+import os
+import socket
+import threading
+import time
+import traceback
+import uuid
+from urllib.parse import urljoin
+
+from .base import (CallbackHandler,
+ RefTestExecutor,
+ RefTestImplementation,
+ TestharnessExecutor,
+ TimedRunner,
+ strip_server)
+from .protocol import (BaseProtocolPart,
+ TestharnessProtocolPart,
+ Protocol,
+ SelectorProtocolPart,
+ ClickProtocolPart,
+ CookiesProtocolPart,
+ SendKeysProtocolPart,
+ WindowProtocolPart,
+ ActionSequenceProtocolPart,
+ TestDriverProtocolPart)
+
+here = os.path.dirname(__file__)
+
+webdriver = None
+exceptions = None
+RemoteConnection = None
+Command = None
+
+
+def do_delayed_imports():
+ global webdriver
+ global exceptions
+ global RemoteConnection
+ global Command
+ from selenium import webdriver
+ from selenium.common import exceptions
+ from selenium.webdriver.remote.remote_connection import RemoteConnection
+ from selenium.webdriver.remote.command import Command
+
+
+class SeleniumBaseProtocolPart(BaseProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def execute_script(self, script, asynchronous=False):
+ method = self.webdriver.execute_async_script if asynchronous else self.webdriver.execute_script
+ return method(script)
+
+ def set_timeout(self, timeout):
+ self.webdriver.set_script_timeout(timeout * 1000)
+
+ @property
+ def current_window(self):
+ return self.webdriver.current_window_handle
+
+ def set_window(self, handle):
+ self.webdriver.switch_to_window(handle)
+
+ def window_handles(self):
+ return self.webdriver.window_handles
+
+ def load(self, url):
+ self.webdriver.get(url)
+
+ def wait(self):
+ while True:
+ try:
+ return self.webdriver.execute_async_script("""let callback = arguments[arguments.length - 1];
+addEventListener("__test_restart", e => {e.preventDefault(); callback(true)})""")
+ except exceptions.TimeoutException:
+ pass
+ except (socket.timeout, exceptions.NoSuchWindowException, exceptions.ErrorInResponseException, OSError):
+ break
+ except Exception:
+ self.logger.error(traceback.format_exc())
+ break
+ return False
+
+
+class SeleniumTestharnessProtocolPart(TestharnessProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+ self.runner_handle = None
+ with open(os.path.join(here, "runner.js")) as f:
+ self.runner_script = f.read()
+ with open(os.path.join(here, "window-loaded.js")) as f:
+ self.window_loaded_script = f.read()
+
+ def load_runner(self, url_protocol):
+ if self.runner_handle:
+ self.webdriver.switch_to_window(self.runner_handle)
+ url = urljoin(self.parent.executor.server_url(url_protocol),
+ "/testharness_runner.html")
+ self.logger.debug("Loading %s" % url)
+ self.webdriver.get(url)
+ self.runner_handle = self.webdriver.current_window_handle
+ format_map = {"title": threading.current_thread().name.replace("'", '"')}
+ self.parent.base.execute_script(self.runner_script % format_map)
+
+ def close_old_windows(self):
+ handles = [item for item in self.webdriver.window_handles if item != self.runner_handle]
+ for handle in handles:
+ try:
+ self.webdriver.switch_to_window(handle)
+ self.webdriver.close()
+ except exceptions.NoSuchWindowException:
+ pass
+ self.webdriver.switch_to_window(self.runner_handle)
+ return self.runner_handle
+
+ def get_test_window(self, window_id, parent, timeout=5):
+ """Find the test window amongst all the open windows.
+ This is assumed to be either the named window or the one after the parent in the list of
+ window handles
+
+ :param window_id: The DOM name of the Window
+ :param parent: The handle of the runner window
+ :param timeout: The time in seconds to wait for the window to appear. This is because in
+ some implementations there's a race between calling window.open and the
+ window being added to the list of WebDriver accessible windows."""
+ test_window = None
+ end_time = time.time() + timeout
+ while time.time() < end_time:
+ try:
+ # Try using the JSON serialization of the WindowProxy object,
+ # it's in Level 1 but nothing supports it yet
+ win_s = self.webdriver.execute_script("return window['%s'];" % window_id)
+ win_obj = json.loads(win_s)
+ test_window = win_obj["window-fcc6-11e5-b4f8-330a88ab9d7f"]
+ except Exception:
+ pass
+
+ if test_window is None:
+ after = self.webdriver.window_handles
+ if len(after) == 2:
+ test_window = next(iter(set(after) - {parent}))
+ elif after[0] == parent and len(after) > 2:
+ # Hope the first one here is the test window
+ test_window = after[1]
+
+ if test_window is not None:
+ assert test_window != parent
+ return test_window
+
+ time.sleep(0.1)
+
+ raise Exception("unable to find test window")
+
+ def test_window_loaded(self):
+ """Wait until the page in the new window has been loaded.
+
+ Hereby ignore Javascript execptions that are thrown when
+ the document has been unloaded due to a process change.
+ """
+ while True:
+ try:
+ self.webdriver.execute_async_script(self.window_loaded_script)
+ break
+ except exceptions.JavascriptException:
+ pass
+
+
+class SeleniumSelectorProtocolPart(SelectorProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def elements_by_selector(self, selector):
+ return self.webdriver.find_elements_by_css_selector(selector)
+
+ def elements_by_selector_and_frame(self, element_selector, frame):
+ return self.webdriver.find_elements_by_css_selector(element_selector)
+
+
+class SeleniumClickProtocolPart(ClickProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def element(self, element):
+ return element.click()
+
+
+class SeleniumCookiesProtocolPart(CookiesProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def delete_all_cookies(self):
+ self.logger.info("Deleting all cookies")
+ return self.webdriver.delete_all_cookies()
+
+ def get_all_cookies(self):
+ self.logger.info("Getting all cookies")
+ return self.webdriver.get_all_cookies()
+
+ def get_named_cookie(self, name):
+ self.logger.info("Getting cookie named %s" % name)
+ try:
+ return self.webdriver.get_named_cookie(name)
+ except exceptions.NoSuchCookieException:
+ return None
+
+class SeleniumWindowProtocolPart(WindowProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def minimize(self):
+ self.previous_rect = self.webdriver.window.rect
+ self.logger.info("Minimizing")
+ return self.webdriver.minimize()
+
+ def set_rect(self, rect):
+ self.logger.info("Setting window rect")
+ self.webdriver.window.rect = rect
+
+class SeleniumSendKeysProtocolPart(SendKeysProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def send_keys(self, element, keys):
+ return element.send_keys(keys)
+
+
+class SeleniumActionSequenceProtocolPart(ActionSequenceProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def send_actions(self, actions):
+ self.webdriver.execute(Command.W3C_ACTIONS, {"actions": actions})
+
+ def release(self):
+ self.webdriver.execute(Command.W3C_CLEAR_ACTIONS, {})
+
+
+class SeleniumTestDriverProtocolPart(TestDriverProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def send_message(self, cmd_id, message_type, status, message=None):
+ obj = {
+ "cmd_id": cmd_id,
+ "type": "testdriver-%s" % str(message_type),
+ "status": str(status)
+ }
+ if message:
+ obj["message"] = str(message)
+ self.webdriver.execute_script("window.postMessage(%s, '*')" % json.dumps(obj))
+
+
+class SeleniumProtocol(Protocol):
+ implements = [SeleniumBaseProtocolPart,
+ SeleniumTestharnessProtocolPart,
+ SeleniumSelectorProtocolPart,
+ SeleniumClickProtocolPart,
+ SeleniumCookiesProtocolPart,
+ SeleniumSendKeysProtocolPart,
+ SeleniumTestDriverProtocolPart,
+ SeleniumWindowProtocolPart,
+ SeleniumActionSequenceProtocolPart]
+
+ def __init__(self, executor, browser, capabilities, **kwargs):
+ do_delayed_imports()
+
+ super().__init__(executor, browser)
+ self.capabilities = capabilities
+ self.url = browser.webdriver_url
+ self.webdriver = None
+
+ def connect(self):
+ """Connect to browser via Selenium's WebDriver implementation."""
+ self.logger.debug("Connecting to Selenium on URL: %s" % self.url)
+
+ self.webdriver = webdriver.Remote(command_executor=RemoteConnection(self.url.strip("/"),
+ resolve_ip=False),
+ desired_capabilities=self.capabilities)
+
+ def teardown(self):
+ self.logger.debug("Hanging up on Selenium session")
+ try:
+ self.webdriver.quit()
+ except Exception:
+ pass
+ del self.webdriver
+
+ def is_alive(self):
+ try:
+ # Get a simple property over the connection
+ self.webdriver.current_window_handle
+ # TODO what exception?
+ except (socket.timeout, exceptions.ErrorInResponseException):
+ return False
+ return True
+
+ def after_connect(self):
+ self.testharness.load_runner(self.executor.last_environment["protocol"])
+
+
+class SeleniumRun(TimedRunner):
+ def set_timeout(self):
+ timeout = self.timeout
+
+ try:
+ self.protocol.base.set_timeout(timeout + self.extra_timeout)
+ except exceptions.ErrorInResponseException:
+ msg = "Lost WebDriver connection"
+ self.logger.error(msg)
+ return ("INTERNAL-ERROR", msg)
+
+ def run_func(self):
+ try:
+ self.result = True, self.func(self.protocol, self.url, self.timeout)
+ except exceptions.TimeoutException:
+ self.result = False, ("EXTERNAL-TIMEOUT", None)
+ except (socket.timeout, exceptions.ErrorInResponseException):
+ self.result = False, ("CRASH", None)
+ except Exception as e:
+ message = str(getattr(e, "message", ""))
+ if message:
+ message += "\n"
+ message += traceback.format_exc()
+ self.result = False, ("INTERNAL-ERROR", message)
+ finally:
+ self.result_flag.set()
+
+
+class SeleniumTestharnessExecutor(TestharnessExecutor):
+ supports_testdriver = True
+
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ close_after_done=True, capabilities=None, debug_info=None,
+ supports_eager_pageload=True, **kwargs):
+ """Selenium-based executor for testharness.js tests"""
+ TestharnessExecutor.__init__(self, logger, browser, server_config,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+ self.protocol = SeleniumProtocol(self, browser, capabilities)
+ with open(os.path.join(here, "testharness_webdriver_resume.js")) as f:
+ self.script_resume = f.read()
+ self.close_after_done = close_after_done
+ self.window_id = str(uuid.uuid4())
+ self.supports_eager_pageload = supports_eager_pageload
+
+ def is_alive(self):
+ return self.protocol.is_alive()
+
+ def on_environment_change(self, new_environment):
+ if new_environment["protocol"] != self.last_environment["protocol"]:
+ self.protocol.testharness.load_runner(new_environment["protocol"])
+
+ def do_test(self, test):
+ url = self.test_url(test)
+
+ success, data = SeleniumRun(self.logger,
+ self.do_testharness,
+ self.protocol,
+ url,
+ test.timeout * self.timeout_multiplier,
+ self.extra_timeout).run()
+
+ if success:
+ return self.convert_result(test, data)
+
+ return (test.result_cls(*data), [])
+
+ def do_testharness(self, protocol, url, timeout):
+ format_map = {"url": strip_server(url)}
+
+ parent_window = protocol.testharness.close_old_windows()
+ # Now start the test harness
+ protocol.base.execute_script("window.open('about:blank', '%s', 'noopener')" % self.window_id)
+ test_window = protocol.testharness.get_test_window(self.window_id,
+ parent_window,
+ timeout=5*self.timeout_multiplier)
+ self.protocol.base.set_window(test_window)
+ protocol.testharness.test_window_loaded()
+
+ protocol.base.load(url)
+
+ if not self.supports_eager_pageload:
+ self.wait_for_load(protocol)
+
+ handler = CallbackHandler(self.logger, protocol, test_window)
+ while True:
+ result = protocol.base.execute_script(
+ self.script_resume % format_map, asynchronous=True)
+ done, rv = handler(result)
+ if done:
+ break
+ return rv
+
+ def wait_for_load(self, protocol):
+ # pageLoadStrategy=eager doesn't work in Chrome so try to emulate in user script
+ loaded = False
+ seen_error = False
+ while not loaded:
+ try:
+ loaded = protocol.base.execute_script("""
+var callback = arguments[arguments.length - 1];
+if (location.href === "about:blank") {
+ callback(false);
+} else if (document.readyState !== "loading") {
+ callback(true);
+} else {
+ document.addEventListener("readystatechange", () => {if (document.readyState !== "loading") {callback(true)}});
+}""", asynchronous=True)
+ except Exception:
+ # We can get an error here if the script runs in the initial about:blank
+ # document before it has navigated, with the driver returning an error
+ # indicating that the document was unloaded
+ if seen_error:
+ raise
+ seen_error = True
+
+
+class SeleniumRefTestExecutor(RefTestExecutor):
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ screenshot_cache=None, close_after_done=True,
+ debug_info=None, capabilities=None, **kwargs):
+ """Selenium WebDriver-based executor for reftests"""
+ RefTestExecutor.__init__(self,
+ logger,
+ browser,
+ server_config,
+ screenshot_cache=screenshot_cache,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+ self.protocol = SeleniumProtocol(self, browser,
+ capabilities=capabilities)
+ self.implementation = RefTestImplementation(self)
+ self.close_after_done = close_after_done
+ self.has_window = False
+
+ with open(os.path.join(here, "test-wait.js")) as f:
+ self.wait_script = f.read() % {"classname": "reftest-wait"}
+
+ def reset(self):
+ self.implementation.reset()
+
+ def is_alive(self):
+ return self.protocol.is_alive()
+
+ def do_test(self, test):
+ self.logger.info("Test requires OS-level window focus")
+
+ width_offset, height_offset = self.protocol.webdriver.execute_script(
+ """return [window.outerWidth - window.innerWidth,
+ window.outerHeight - window.innerHeight];"""
+ )
+ self.protocol.webdriver.set_window_position(0, 0)
+ self.protocol.webdriver.set_window_size(800 + width_offset, 600 + height_offset)
+
+ result = self.implementation.run_test(test)
+
+ return self.convert_result(test, result)
+
+ def screenshot(self, test, viewport_size, dpi, page_ranges):
+ # https://github.com/web-platform-tests/wpt/issues/7135
+ assert viewport_size is None
+ assert dpi is None
+
+ return SeleniumRun(self.logger,
+ self._screenshot,
+ self.protocol,
+ self.test_url(test),
+ test.timeout,
+ self.extra_timeout).run()
+
+ def _screenshot(self, protocol, url, timeout):
+ webdriver = protocol.webdriver
+ webdriver.get(url)
+
+ webdriver.execute_async_script(self.wait_script)
+
+ screenshot = webdriver.get_screenshot_as_base64()
+
+ # strip off the data:img/png, part of the url
+ if screenshot.startswith("data:image/png;base64,"):
+ screenshot = screenshot.split(",", 1)[1]
+
+ return screenshot
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorservo.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorservo.py
new file mode 100644
index 0000000000..89aaf00352
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorservo.py
@@ -0,0 +1,363 @@
+# mypy: allow-untyped-defs
+
+import base64
+import json
+import os
+import subprocess
+import tempfile
+import threading
+import traceback
+import uuid
+
+from mozprocess import ProcessHandler
+
+from tools.serve.serve import make_hosts_file
+
+from .base import (RefTestImplementation,
+ crashtest_result_converter,
+ testharness_result_converter,
+ reftest_result_converter,
+ TimedRunner)
+from .process import ProcessTestExecutor
+from .protocol import ConnectionlessProtocol
+from ..browsers.base import browser_command
+
+
+pytestrunner = None
+webdriver = None
+
+
+def write_hosts_file(config):
+ hosts_fd, hosts_path = tempfile.mkstemp()
+ with os.fdopen(hosts_fd, "w") as f:
+ f.write(make_hosts_file(config, "127.0.0.1"))
+ return hosts_path
+
+
+def build_servo_command(test, test_url_func, browser, binary, pause_after_test, debug_info,
+ extra_args=None, debug_opts="replace-surrogates"):
+ args = [
+ "--hard-fail", "-u", "Servo/wptrunner",
+ "-z", test_url_func(test),
+ ]
+ if debug_opts:
+ args += ["-Z", debug_opts]
+ for stylesheet in browser.user_stylesheets:
+ args += ["--user-stylesheet", stylesheet]
+ for pref, value in test.environment.get('prefs', {}).items():
+ args += ["--pref", f"{pref}={value}"]
+ if browser.ca_certificate_path:
+ args += ["--certificate-path", browser.ca_certificate_path]
+ if extra_args:
+ args += extra_args
+ args += browser.binary_args
+ debug_args, command = browser_command(binary, args, debug_info)
+ if pause_after_test:
+ command.remove("-z")
+ return debug_args + command
+
+
+
+class ServoTestharnessExecutor(ProcessTestExecutor):
+ convert_result = testharness_result_converter
+
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1, debug_info=None,
+ pause_after_test=False, **kwargs):
+ ProcessTestExecutor.__init__(self, logger, browser, server_config,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+ self.pause_after_test = pause_after_test
+ self.result_data = None
+ self.result_flag = None
+ self.protocol = ConnectionlessProtocol(self, browser)
+ self.hosts_path = write_hosts_file(server_config)
+
+ def teardown(self):
+ try:
+ os.unlink(self.hosts_path)
+ except OSError:
+ pass
+ ProcessTestExecutor.teardown(self)
+
+ def do_test(self, test):
+ self.result_data = None
+ self.result_flag = threading.Event()
+
+ self.command = build_servo_command(test,
+ self.test_url,
+ self.browser,
+ self.binary,
+ self.pause_after_test,
+ self.debug_info)
+
+ env = os.environ.copy()
+ env["HOST_FILE"] = self.hosts_path
+ env["RUST_BACKTRACE"] = "1"
+
+
+ if not self.interactive:
+ self.proc = ProcessHandler(self.command,
+ processOutputLine=[self.on_output],
+ onFinish=self.on_finish,
+ env=env,
+ storeOutput=False)
+ self.proc.run()
+ else:
+ self.proc = subprocess.Popen(self.command, env=env)
+
+ try:
+ timeout = test.timeout * self.timeout_multiplier
+
+ # Now wait to get the output we expect, or until we reach the timeout
+ if not self.interactive and not self.pause_after_test:
+ wait_timeout = timeout + 5
+ self.result_flag.wait(wait_timeout)
+ else:
+ wait_timeout = None
+ self.proc.wait()
+
+ proc_is_running = True
+
+ if self.result_flag.is_set():
+ if self.result_data is not None:
+ result = self.convert_result(test, self.result_data)
+ else:
+ self.proc.wait()
+ result = (test.result_cls("CRASH", None), [])
+ proc_is_running = False
+ else:
+ result = (test.result_cls("TIMEOUT", None), [])
+
+
+ if proc_is_running:
+ if self.pause_after_test:
+ self.logger.info("Pausing until the browser exits")
+ self.proc.wait()
+ else:
+ self.proc.kill()
+ except: # noqa
+ self.proc.kill()
+ raise
+
+ return result
+
+ def on_output(self, line):
+ prefix = "ALERT: RESULT: "
+ line = line.decode("utf8", "replace")
+ if line.startswith(prefix):
+ self.result_data = json.loads(line[len(prefix):])
+ self.result_flag.set()
+ else:
+ if self.interactive:
+ print(line)
+ else:
+ self.logger.process_output(self.proc.pid,
+ line,
+ " ".join(self.command))
+
+ def on_finish(self):
+ self.result_flag.set()
+
+
+class TempFilename:
+ def __init__(self, directory):
+ self.directory = directory
+ self.path = None
+
+ def __enter__(self):
+ self.path = os.path.join(self.directory, str(uuid.uuid4()))
+ return self.path
+
+ def __exit__(self, *args, **kwargs):
+ try:
+ os.unlink(self.path)
+ except OSError:
+ pass
+
+
+class ServoRefTestExecutor(ProcessTestExecutor):
+ convert_result = reftest_result_converter
+
+ def __init__(self, logger, browser, server_config, binary=None, timeout_multiplier=1,
+ screenshot_cache=None, debug_info=None, pause_after_test=False,
+ reftest_screenshot="unexpected", **kwargs):
+ ProcessTestExecutor.__init__(self,
+ logger,
+ browser,
+ server_config,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info,
+ reftest_screenshot=reftest_screenshot)
+
+ self.protocol = ConnectionlessProtocol(self, browser)
+ self.screenshot_cache = screenshot_cache
+ self.reftest_screenshot = reftest_screenshot
+ self.implementation = RefTestImplementation(self)
+ self.tempdir = tempfile.mkdtemp()
+ self.hosts_path = write_hosts_file(server_config)
+
+ def reset(self):
+ self.implementation.reset()
+
+ def teardown(self):
+ try:
+ os.unlink(self.hosts_path)
+ except OSError:
+ pass
+ os.rmdir(self.tempdir)
+ ProcessTestExecutor.teardown(self)
+
+ def screenshot(self, test, viewport_size, dpi, page_ranges):
+ with TempFilename(self.tempdir) as output_path:
+ extra_args = ["--exit",
+ "--output=%s" % output_path,
+ "--resolution", viewport_size or "800x600"]
+ debug_opts = "disable-text-aa,load-webfonts-synchronously,replace-surrogates"
+
+ if dpi:
+ extra_args += ["--device-pixel-ratio", dpi]
+
+ self.command = build_servo_command(test,
+ self.test_url,
+ self.browser,
+ self.binary,
+ False,
+ self.debug_info,
+ extra_args,
+ debug_opts)
+
+ env = os.environ.copy()
+ env["HOST_FILE"] = self.hosts_path
+ env["RUST_BACKTRACE"] = "1"
+
+ if not self.interactive:
+ self.proc = ProcessHandler(self.command,
+ processOutputLine=[self.on_output],
+ env=env)
+
+
+ try:
+ self.proc.run()
+ timeout = test.timeout * self.timeout_multiplier + 5
+ rv = self.proc.wait(timeout=timeout)
+ except KeyboardInterrupt:
+ self.proc.kill()
+ raise
+ else:
+ self.proc = subprocess.Popen(self.command,
+ env=env)
+ try:
+ rv = self.proc.wait()
+ except KeyboardInterrupt:
+ self.proc.kill()
+ raise
+
+ if rv is None:
+ self.proc.kill()
+ return False, ("EXTERNAL-TIMEOUT", None)
+
+ if rv != 0 or not os.path.exists(output_path):
+ return False, ("CRASH", None)
+
+ with open(output_path, "rb") as f:
+ # Might need to strip variable headers or something here
+ data = f.read()
+ # Returning the screenshot as a string could potentially be avoided,
+ # see https://github.com/web-platform-tests/wpt/issues/28929.
+ return True, [base64.b64encode(data).decode()]
+
+ def do_test(self, test):
+ result = self.implementation.run_test(test)
+
+ return self.convert_result(test, result)
+
+ def on_output(self, line):
+ line = line.decode("utf8", "replace")
+ if self.interactive:
+ print(line)
+ else:
+ self.logger.process_output(self.proc.pid,
+ line,
+ " ".join(self.command))
+
+
+class ServoTimedRunner(TimedRunner):
+ def run_func(self):
+ try:
+ self.result = True, self.func(self.protocol, self.url, self.timeout)
+ except Exception as e:
+ message = getattr(e, "message", "")
+ if message:
+ message += "\n"
+ message += traceback.format_exc(e)
+ self.result = False, ("INTERNAL-ERROR", message)
+ finally:
+ self.result_flag.set()
+
+ def set_timeout(self):
+ pass
+
+
+class ServoCrashtestExecutor(ProcessTestExecutor):
+ convert_result = crashtest_result_converter
+
+ def __init__(self, logger, browser, server_config, binary=None, timeout_multiplier=1,
+ screenshot_cache=None, debug_info=None, pause_after_test=False,
+ **kwargs):
+ ProcessTestExecutor.__init__(self,
+ logger,
+ browser,
+ server_config,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+
+ self.pause_after_test = pause_after_test
+ self.protocol = ConnectionlessProtocol(self, browser)
+ self.tempdir = tempfile.mkdtemp()
+ self.hosts_path = write_hosts_file(server_config)
+
+ def do_test(self, test):
+ timeout = (test.timeout * self.timeout_multiplier if self.debug_info is None
+ else None)
+
+ test_url = self.test_url(test)
+ # We want to pass the full test object into build_servo_command,
+ # so stash it in the class
+ self.test = test
+ success, data = ServoTimedRunner(self.logger, self.do_crashtest, self.protocol,
+ test_url, timeout, self.extra_timeout).run()
+ # Ensure that no processes hang around if they timeout.
+ self.proc.kill()
+
+ if success:
+ return self.convert_result(test, data)
+
+ return (test.result_cls(*data), [])
+
+ def do_crashtest(self, protocol, url, timeout):
+ env = os.environ.copy()
+ env["HOST_FILE"] = self.hosts_path
+ env["RUST_BACKTRACE"] = "1"
+
+ command = build_servo_command(self.test,
+ self.test_url,
+ self.browser,
+ self.binary,
+ False,
+ self.debug_info,
+ extra_args=["-x"])
+
+ if not self.interactive:
+ self.proc = ProcessHandler(command,
+ env=env,
+ storeOutput=False)
+ self.proc.run()
+ else:
+ self.proc = subprocess.Popen(command, env=env)
+
+ self.proc.wait()
+
+ if self.proc.poll() >= 0:
+ return {"status": "PASS", "message": None}
+
+ return {"status": "CRASH", "message": None}
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorservodriver.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorservodriver.py
new file mode 100644
index 0000000000..0a939c5251
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorservodriver.py
@@ -0,0 +1,303 @@
+# mypy: allow-untyped-defs
+
+import json
+import os
+import socket
+import traceback
+
+from .base import (Protocol,
+ RefTestExecutor,
+ RefTestImplementation,
+ TestharnessExecutor,
+ TimedRunner,
+ strip_server)
+from .protocol import BaseProtocolPart
+from ..environment import wait_for_service
+
+webdriver = None
+ServoCommandExtensions = None
+
+here = os.path.dirname(__file__)
+
+
+def do_delayed_imports():
+ global webdriver
+ import webdriver
+
+ global ServoCommandExtensions
+
+ class ServoCommandExtensions:
+ def __init__(self, session):
+ self.session = session
+
+ @webdriver.client.command
+ def get_prefs(self, *prefs):
+ body = {"prefs": list(prefs)}
+ return self.session.send_session_command("POST", "servo/prefs/get", body)
+
+ @webdriver.client.command
+ def set_prefs(self, prefs):
+ body = {"prefs": prefs}
+ return self.session.send_session_command("POST", "servo/prefs/set", body)
+
+ @webdriver.client.command
+ def reset_prefs(self, *prefs):
+ body = {"prefs": list(prefs)}
+ return self.session.send_session_command("POST", "servo/prefs/reset", body)
+
+ def change_prefs(self, old_prefs, new_prefs):
+ # Servo interprets reset with an empty list as reset everything
+ if old_prefs:
+ self.reset_prefs(*old_prefs.keys())
+ self.set_prefs({k: parse_pref_value(v) for k, v in new_prefs.items()})
+
+
+# See parse_pref_from_command_line() in components/config/opts.rs
+def parse_pref_value(value):
+ if value == "true":
+ return True
+ if value == "false":
+ return False
+ try:
+ return float(value)
+ except ValueError:
+ return value
+
+
+class ServoBaseProtocolPart(BaseProtocolPart):
+ def execute_script(self, script, asynchronous=False):
+ pass
+
+ def set_timeout(self, timeout):
+ pass
+
+ def wait(self):
+ return False
+
+ def set_window(self, handle):
+ pass
+
+ def window_handles(self):
+ return []
+
+ def load(self, url):
+ pass
+
+
+class ServoWebDriverProtocol(Protocol):
+ implements = [ServoBaseProtocolPart]
+
+ def __init__(self, executor, browser, capabilities, **kwargs):
+ do_delayed_imports()
+ Protocol.__init__(self, executor, browser)
+ self.capabilities = capabilities
+ self.host = browser.webdriver_host
+ self.port = browser.webdriver_port
+ self.init_timeout = browser.init_timeout
+ self.session = None
+
+ def connect(self):
+ """Connect to browser via WebDriver."""
+ wait_for_service(self.logger, self.host, self.port, timeout=self.init_timeout)
+
+ self.session = webdriver.Session(self.host, self.port, extension=ServoCommandExtensions)
+ self.session.start()
+
+ def after_connect(self):
+ pass
+
+ def teardown(self):
+ self.logger.debug("Hanging up on WebDriver session")
+ try:
+ self.session.end()
+ except Exception:
+ pass
+
+ def is_alive(self):
+ try:
+ # Get a simple property over the connection
+ self.session.window_handle
+ # TODO what exception?
+ except Exception:
+ return False
+ return True
+
+ def wait(self):
+ while True:
+ try:
+ return self.session.execute_async_script("""let callback = arguments[arguments.length - 1];
+addEventListener("__test_restart", e => {e.preventDefault(); callback(true)})""")
+ except webdriver.TimeoutException:
+ pass
+ except (socket.timeout, OSError):
+ break
+ except Exception:
+ self.logger.error(traceback.format_exc())
+ break
+ return False
+
+
+class ServoWebDriverRun(TimedRunner):
+ def set_timeout(self):
+ pass
+
+ def run_func(self):
+ try:
+ self.result = True, self.func(self.protocol.session, self.url, self.timeout)
+ except webdriver.TimeoutException:
+ self.result = False, ("EXTERNAL-TIMEOUT", None)
+ except (socket.timeout, OSError):
+ self.result = False, ("CRASH", None)
+ except Exception as e:
+ message = getattr(e, "message", "")
+ if message:
+ message += "\n"
+ message += traceback.format_exc()
+ self.result = False, ("INTERNAL-ERROR", e)
+ finally:
+ self.result_flag.set()
+
+
+class ServoWebDriverTestharnessExecutor(TestharnessExecutor):
+ supports_testdriver = True
+
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ close_after_done=True, capabilities=None, debug_info=None,
+ **kwargs):
+ TestharnessExecutor.__init__(self, logger, browser, server_config, timeout_multiplier=1,
+ debug_info=None)
+ self.protocol = ServoWebDriverProtocol(self, browser, capabilities=capabilities)
+ with open(os.path.join(here, "testharness_servodriver.js")) as f:
+ self.script = f.read()
+ self.timeout = None
+
+ def on_protocol_change(self, new_protocol):
+ pass
+
+ def is_alive(self):
+ return self.protocol.is_alive()
+
+ def do_test(self, test):
+ url = self.test_url(test)
+
+ timeout = test.timeout * self.timeout_multiplier + self.extra_timeout
+
+ if timeout != self.timeout:
+ try:
+ self.protocol.session.timeouts.script = timeout
+ self.timeout = timeout
+ except OSError:
+ msg = "Lost WebDriver connection"
+ self.logger.error(msg)
+ return ("INTERNAL-ERROR", msg)
+
+ success, data = ServoWebDriverRun(self.logger,
+ self.do_testharness,
+ self.protocol,
+ url,
+ timeout,
+ self.extra_timeout).run()
+
+ if success:
+ return self.convert_result(test, data)
+
+ return (test.result_cls(*data), [])
+
+ def do_testharness(self, session, url, timeout):
+ session.url = url
+ result = json.loads(
+ session.execute_async_script(
+ self.script % {"abs_url": url,
+ "url": strip_server(url),
+ "timeout_multiplier": self.timeout_multiplier,
+ "timeout": timeout * 1000}))
+ # Prevent leaking every page in history until Servo develops a more sane
+ # page cache
+ session.back()
+ return result
+
+ def on_environment_change(self, new_environment):
+ self.protocol.session.extension.change_prefs(
+ self.last_environment.get("prefs", {}),
+ new_environment.get("prefs", {})
+ )
+
+
+class TimeoutError(Exception):
+ pass
+
+
+class ServoWebDriverRefTestExecutor(RefTestExecutor):
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ screenshot_cache=None, capabilities=None, debug_info=None,
+ **kwargs):
+ """Selenium WebDriver-based executor for reftests"""
+ RefTestExecutor.__init__(self,
+ logger,
+ browser,
+ server_config,
+ screenshot_cache=screenshot_cache,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+ self.protocol = ServoWebDriverProtocol(self, browser,
+ capabilities=capabilities)
+ self.implementation = RefTestImplementation(self)
+ self.timeout = None
+ with open(os.path.join(here, "test-wait.js")) as f:
+ self.wait_script = f.read() % {"classname": "reftest-wait"}
+
+ def reset(self):
+ self.implementation.reset()
+
+ def is_alive(self):
+ return self.protocol.is_alive()
+
+ def do_test(self, test):
+ try:
+ result = self.implementation.run_test(test)
+ return self.convert_result(test, result)
+ except OSError:
+ return test.result_cls("CRASH", None), []
+ except TimeoutError:
+ return test.result_cls("TIMEOUT", None), []
+ except Exception as e:
+ message = getattr(e, "message", "")
+ if message:
+ message += "\n"
+ message += traceback.format_exc()
+ return test.result_cls("INTERNAL-ERROR", message), []
+
+ def screenshot(self, test, viewport_size, dpi, page_ranges):
+ # https://github.com/web-platform-tests/wpt/issues/7135
+ assert viewport_size is None
+ assert dpi is None
+
+ timeout = (test.timeout * self.timeout_multiplier + self.extra_timeout
+ if self.debug_info is None else None)
+
+ if self.timeout != timeout:
+ try:
+ self.protocol.session.timeouts.script = timeout
+ self.timeout = timeout
+ except OSError:
+ msg = "Lost webdriver connection"
+ self.logger.error(msg)
+ return ("INTERNAL-ERROR", msg)
+
+ return ServoWebDriverRun(self.logger,
+ self._screenshot,
+ self.protocol,
+ self.test_url(test),
+ timeout,
+ self.extra_timeout).run()
+
+ def _screenshot(self, session, url, timeout):
+ session.url = url
+ session.execute_async_script(self.wait_script)
+ return session.screenshot()
+
+ def on_environment_change(self, new_environment):
+ self.protocol.session.extension.change_prefs(
+ self.last_environment.get("prefs", {}),
+ new_environment.get("prefs", {})
+ )
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorwebdriver.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorwebdriver.py
new file mode 100644
index 0000000000..54a5717999
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/executorwebdriver.py
@@ -0,0 +1,694 @@
+# mypy: allow-untyped-defs
+
+import json
+import os
+import socket
+import threading
+import time
+import traceback
+import uuid
+from urllib.parse import urljoin
+
+from .base import (CallbackHandler,
+ CrashtestExecutor,
+ RefTestExecutor,
+ RefTestImplementation,
+ TestharnessExecutor,
+ TimedRunner,
+ strip_server)
+from .protocol import (BaseProtocolPart,
+ TestharnessProtocolPart,
+ Protocol,
+ SelectorProtocolPart,
+ ClickProtocolPart,
+ CookiesProtocolPart,
+ SendKeysProtocolPart,
+ ActionSequenceProtocolPart,
+ TestDriverProtocolPart,
+ GenerateTestReportProtocolPart,
+ SetPermissionProtocolPart,
+ VirtualAuthenticatorProtocolPart,
+ WindowProtocolPart,
+ DebugProtocolPart,
+ SPCTransactionsProtocolPart,
+ merge_dicts)
+
+from webdriver.client import Session
+from webdriver import error
+
+here = os.path.dirname(__file__)
+
+
+class WebDriverCallbackHandler(CallbackHandler):
+ unimplemented_exc = (NotImplementedError, error.UnknownCommandException)
+
+
+class WebDriverBaseProtocolPart(BaseProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def execute_script(self, script, asynchronous=False):
+ method = self.webdriver.execute_async_script if asynchronous else self.webdriver.execute_script
+ return method(script)
+
+ def set_timeout(self, timeout):
+ try:
+ self.webdriver.timeouts.script = timeout
+ except error.WebDriverException:
+ # workaround https://bugs.chromium.org/p/chromedriver/issues/detail?id=2057
+ body = {"type": "script", "ms": timeout * 1000}
+ self.webdriver.send_session_command("POST", "timeouts", body)
+
+ @property
+ def current_window(self):
+ return self.webdriver.window_handle
+
+ def set_window(self, handle):
+ self.webdriver.window_handle = handle
+
+ def window_handles(self):
+ return self.webdriver.handles
+
+ def load(self, url):
+ self.webdriver.url = url
+
+ def wait(self):
+ while True:
+ try:
+ self.webdriver.execute_async_script("""let callback = arguments[arguments.length - 1];
+addEventListener("__test_restart", e => {e.preventDefault(); callback(true)})""")
+ self.webdriver.execute_async_script("")
+ except (error.TimeoutException,
+ error.ScriptTimeoutException,
+ error.JavascriptErrorException):
+ # A JavascriptErrorException will happen when we navigate;
+ # by ignoring it it's possible to reload the test whilst the
+ # harness remains paused
+ pass
+ except (socket.timeout, error.NoSuchWindowException, error.UnknownErrorException, OSError):
+ break
+ except Exception:
+ self.logger.error(traceback.format_exc())
+ break
+ return False
+
+
+class WebDriverTestharnessProtocolPart(TestharnessProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+ self.runner_handle = None
+ with open(os.path.join(here, "runner.js")) as f:
+ self.runner_script = f.read()
+ with open(os.path.join(here, "window-loaded.js")) as f:
+ self.window_loaded_script = f.read()
+
+ def load_runner(self, url_protocol):
+ if self.runner_handle:
+ self.webdriver.window_handle = self.runner_handle
+ url = urljoin(self.parent.executor.server_url(url_protocol),
+ "/testharness_runner.html")
+ self.logger.debug("Loading %s" % url)
+
+ self.webdriver.url = url
+ self.runner_handle = self.webdriver.window_handle
+ format_map = {"title": threading.current_thread().name.replace("'", '"')}
+ self.parent.base.execute_script(self.runner_script % format_map)
+
+ def close_old_windows(self):
+ self.webdriver.actions.release()
+ handles = [item for item in self.webdriver.handles if item != self.runner_handle]
+ for handle in handles:
+ try:
+ self.webdriver.window_handle = handle
+ self.webdriver.window.close()
+ except error.NoSuchWindowException:
+ pass
+ self.webdriver.window_handle = self.runner_handle
+ return self.runner_handle
+
+ def get_test_window(self, window_id, parent, timeout=5):
+ """Find the test window amongst all the open windows.
+ This is assumed to be either the named window or the one after the parent in the list of
+ window handles
+
+ :param window_id: The DOM name of the Window
+ :param parent: The handle of the runner window
+ :param timeout: The time in seconds to wait for the window to appear. This is because in
+ some implementations there's a race between calling window.open and the
+ window being added to the list of WebDriver accessible windows."""
+ test_window = None
+ end_time = time.time() + timeout
+ while time.time() < end_time:
+ try:
+ # Try using the JSON serialization of the WindowProxy object,
+ # it's in Level 1 but nothing supports it yet
+ win_s = self.webdriver.execute_script("return window['%s'];" % window_id)
+ win_obj = json.loads(win_s)
+ test_window = win_obj["window-fcc6-11e5-b4f8-330a88ab9d7f"]
+ except Exception:
+ pass
+
+ if test_window is None:
+ after = self.webdriver.handles
+ if len(after) == 2:
+ test_window = next(iter(set(after) - {parent}))
+ elif after[0] == parent and len(after) > 2:
+ # Hope the first one here is the test window
+ test_window = after[1]
+
+ if test_window is not None:
+ assert test_window != parent
+ return test_window
+
+ time.sleep(0.1)
+
+ raise Exception("unable to find test window")
+
+ def test_window_loaded(self):
+ """Wait until the page in the new window has been loaded.
+
+ Hereby ignore Javascript execptions that are thrown when
+ the document has been unloaded due to a process change.
+ """
+ while True:
+ try:
+ self.webdriver.execute_script(self.window_loaded_script, asynchronous=True)
+ break
+ except error.JavascriptErrorException:
+ pass
+
+
+class WebDriverSelectorProtocolPart(SelectorProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def elements_by_selector(self, selector):
+ return self.webdriver.find.css(selector)
+
+
+class WebDriverClickProtocolPart(ClickProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def element(self, element):
+ self.logger.info("click " + repr(element))
+ return element.click()
+
+
+class WebDriverCookiesProtocolPart(CookiesProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def delete_all_cookies(self):
+ self.logger.info("Deleting all cookies")
+ return self.webdriver.send_session_command("DELETE", "cookie")
+
+ def get_all_cookies(self):
+ self.logger.info("Getting all cookies")
+ return self.webdriver.send_session_command("GET", "cookie")
+
+ def get_named_cookie(self, name):
+ self.logger.info("Getting cookie named %s" % name)
+ try:
+ return self.webdriver.send_session_command("GET", "cookie/%s" % name)
+ except error.NoSuchCookieException:
+ return None
+
+class WebDriverWindowProtocolPart(WindowProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def minimize(self):
+ self.logger.info("Minimizing")
+ return self.webdriver.window.minimize()
+
+ def set_rect(self, rect):
+ self.logger.info("Restoring")
+ self.webdriver.window.rect = rect
+
+class WebDriverSendKeysProtocolPart(SendKeysProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def send_keys(self, element, keys):
+ try:
+ return element.send_keys(keys)
+ except error.UnknownErrorException as e:
+ # workaround https://bugs.chromium.org/p/chromedriver/issues/detail?id=1999
+ if (e.http_status != 500 or
+ e.status_code != "unknown error"):
+ raise
+ return element.send_element_command("POST", "value", {"value": list(keys)})
+
+
+class WebDriverActionSequenceProtocolPart(ActionSequenceProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def send_actions(self, actions):
+ self.webdriver.actions.perform(actions['actions'])
+
+ def release(self):
+ self.webdriver.actions.release()
+
+
+class WebDriverTestDriverProtocolPart(TestDriverProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def send_message(self, cmd_id, message_type, status, message=None):
+ obj = {
+ "cmd_id": cmd_id,
+ "type": "testdriver-%s" % str(message_type),
+ "status": str(status)
+ }
+ if message:
+ obj["message"] = str(message)
+ self.webdriver.execute_script("window.postMessage(%s, '*')" % json.dumps(obj))
+
+ def _switch_to_frame(self, index_or_elem):
+ try:
+ self.webdriver.switch_frame(index_or_elem)
+ except (error.StaleElementReferenceException,
+ error.NoSuchFrameException) as e:
+ raise ValueError from e
+
+ def _switch_to_parent_frame(self):
+ self.webdriver.switch_frame("parent")
+
+
+class WebDriverGenerateTestReportProtocolPart(GenerateTestReportProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def generate_test_report(self, message):
+ json_message = {"message": message}
+ self.webdriver.send_session_command("POST", "reporting/generate_test_report", json_message)
+
+
+class WebDriverSetPermissionProtocolPart(SetPermissionProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def set_permission(self, descriptor, state):
+ permission_params_dict = {
+ "descriptor": descriptor,
+ "state": state,
+ }
+ self.webdriver.send_session_command("POST", "permissions", permission_params_dict)
+
+
+class WebDriverVirtualAuthenticatorProtocolPart(VirtualAuthenticatorProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def add_virtual_authenticator(self, config):
+ return self.webdriver.send_session_command("POST", "webauthn/authenticator", config)
+
+ def remove_virtual_authenticator(self, authenticator_id):
+ return self.webdriver.send_session_command("DELETE", "webauthn/authenticator/%s" % authenticator_id)
+
+ def add_credential(self, authenticator_id, credential):
+ return self.webdriver.send_session_command("POST", "webauthn/authenticator/%s/credential" % authenticator_id, credential)
+
+ def get_credentials(self, authenticator_id):
+ return self.webdriver.send_session_command("GET", "webauthn/authenticator/%s/credentials" % authenticator_id)
+
+ def remove_credential(self, authenticator_id, credential_id):
+ return self.webdriver.send_session_command("DELETE", f"webauthn/authenticator/{authenticator_id}/credentials/{credential_id}")
+
+ def remove_all_credentials(self, authenticator_id):
+ return self.webdriver.send_session_command("DELETE", "webauthn/authenticator/%s/credentials" % authenticator_id)
+
+ def set_user_verified(self, authenticator_id, uv):
+ return self.webdriver.send_session_command("POST", "webauthn/authenticator/%s/uv" % authenticator_id, uv)
+
+
+class WebDriverSPCTransactionsProtocolPart(SPCTransactionsProtocolPart):
+ def setup(self):
+ self.webdriver = self.parent.webdriver
+
+ def set_spc_transaction_mode(self, mode):
+ body = {"mode": mode}
+ return self.webdriver.send_session_command("POST", "secure-payment-confirmation/set-mode", body)
+
+
+class WebDriverDebugProtocolPart(DebugProtocolPart):
+ def load_devtools(self):
+ raise NotImplementedError()
+
+
+class WebDriverProtocol(Protocol):
+ implements = [WebDriverBaseProtocolPart,
+ WebDriverTestharnessProtocolPart,
+ WebDriverSelectorProtocolPart,
+ WebDriverClickProtocolPart,
+ WebDriverCookiesProtocolPart,
+ WebDriverSendKeysProtocolPart,
+ WebDriverWindowProtocolPart,
+ WebDriverActionSequenceProtocolPart,
+ WebDriverTestDriverProtocolPart,
+ WebDriverGenerateTestReportProtocolPart,
+ WebDriverSetPermissionProtocolPart,
+ WebDriverVirtualAuthenticatorProtocolPart,
+ WebDriverSPCTransactionsProtocolPart,
+ WebDriverDebugProtocolPart]
+
+ def __init__(self, executor, browser, capabilities, **kwargs):
+ super().__init__(executor, browser)
+ self.capabilities = capabilities
+ if hasattr(browser, "capabilities"):
+ if self.capabilities is None:
+ self.capabilities = browser.capabilities
+ else:
+ merge_dicts(self.capabilities, browser.capabilities)
+
+ pac = browser.pac
+ if pac is not None:
+ if self.capabilities is None:
+ self.capabilities = {}
+ merge_dicts(self.capabilities, {"proxy":
+ {
+ "proxyType": "pac",
+ "proxyAutoconfigUrl": urljoin(executor.server_url("http"), pac)
+ }
+ })
+
+ self.url = browser.webdriver_url
+ self.webdriver = None
+
+ def connect(self):
+ """Connect to browser via WebDriver."""
+ self.logger.debug("Connecting to WebDriver on URL: %s" % self.url)
+
+ host, port = self.url.split(":")[1].strip("/"), self.url.split(':')[-1].strip("/")
+
+ capabilities = {"alwaysMatch": self.capabilities}
+ self.webdriver = Session(host, port, capabilities=capabilities)
+ self.webdriver.start()
+
+ def teardown(self):
+ self.logger.debug("Hanging up on WebDriver session")
+ try:
+ self.webdriver.end()
+ except Exception as e:
+ message = str(getattr(e, "message", ""))
+ if message:
+ message += "\n"
+ message += traceback.format_exc()
+ self.logger.debug(message)
+ self.webdriver = None
+
+ def is_alive(self):
+ try:
+ # Get a simple property over the connection, with 2 seconds of timeout
+ # that should be more than enough to check if the WebDriver its
+ # still alive, and allows to complete the check within the testrunner
+ # 5 seconds of extra_timeout we have as maximum to end the test before
+ # the external timeout from testrunner triggers.
+ self.webdriver.send_session_command("GET", "window", timeout=2)
+ except (socket.timeout, error.UnknownErrorException, error.InvalidSessionIdException):
+ return False
+ return True
+
+ def after_connect(self):
+ self.testharness.load_runner(self.executor.last_environment["protocol"])
+
+
+class WebDriverRun(TimedRunner):
+ def set_timeout(self):
+ try:
+ self.protocol.base.set_timeout(self.timeout + self.extra_timeout)
+ except error.UnknownErrorException:
+ msg = "Lost WebDriver connection"
+ self.logger.error(msg)
+ return ("INTERNAL-ERROR", msg)
+
+ def run_func(self):
+ try:
+ self.result = True, self.func(self.protocol, self.url, self.timeout)
+ except (error.TimeoutException, error.ScriptTimeoutException):
+ self.result = False, ("EXTERNAL-TIMEOUT", None)
+ except (socket.timeout, error.UnknownErrorException):
+ self.result = False, ("CRASH", None)
+ except Exception as e:
+ if (isinstance(e, error.WebDriverException) and
+ e.http_status == 408 and
+ e.status_code == "asynchronous script timeout"):
+ # workaround for https://bugs.chromium.org/p/chromedriver/issues/detail?id=2001
+ self.result = False, ("EXTERNAL-TIMEOUT", None)
+ else:
+ message = str(getattr(e, "message", ""))
+ if message:
+ message += "\n"
+ message += traceback.format_exc()
+ self.result = False, ("INTERNAL-ERROR", message)
+ finally:
+ self.result_flag.set()
+
+
+class WebDriverTestharnessExecutor(TestharnessExecutor):
+ supports_testdriver = True
+ protocol_cls = WebDriverProtocol
+
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ close_after_done=True, capabilities=None, debug_info=None,
+ supports_eager_pageload=True, cleanup_after_test=True,
+ **kwargs):
+ """WebDriver-based executor for testharness.js tests"""
+ TestharnessExecutor.__init__(self, logger, browser, server_config,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+ self.protocol = self.protocol_cls(self, browser, capabilities)
+ with open(os.path.join(here, "testharness_webdriver_resume.js")) as f:
+ self.script_resume = f.read()
+ with open(os.path.join(here, "window-loaded.js")) as f:
+ self.window_loaded_script = f.read()
+
+ self.close_after_done = close_after_done
+ self.window_id = str(uuid.uuid4())
+ self.supports_eager_pageload = supports_eager_pageload
+ self.cleanup_after_test = cleanup_after_test
+
+ def is_alive(self):
+ return self.protocol.is_alive()
+
+ def on_environment_change(self, new_environment):
+ if new_environment["protocol"] != self.last_environment["protocol"]:
+ self.protocol.testharness.load_runner(new_environment["protocol"])
+
+ def do_test(self, test):
+ url = self.test_url(test)
+
+ success, data = WebDriverRun(self.logger,
+ self.do_testharness,
+ self.protocol,
+ url,
+ test.timeout * self.timeout_multiplier,
+ self.extra_timeout).run()
+
+ if success:
+ return self.convert_result(test, data)
+
+ return (test.result_cls(*data), [])
+
+ def do_testharness(self, protocol, url, timeout):
+ format_map = {"url": strip_server(url)}
+
+ # The previous test may not have closed its old windows (if something
+ # went wrong or if cleanup_after_test was False), so clean up here.
+ parent_window = protocol.testharness.close_old_windows()
+
+ # Now start the test harness
+ protocol.base.execute_script("window.open('about:blank', '%s', 'noopener')" % self.window_id)
+ test_window = protocol.testharness.get_test_window(self.window_id,
+ parent_window,
+ timeout=5*self.timeout_multiplier)
+ self.protocol.base.set_window(test_window)
+
+ # Wait until about:blank has been loaded
+ protocol.base.execute_script(self.window_loaded_script, asynchronous=True)
+
+ handler = WebDriverCallbackHandler(self.logger, protocol, test_window)
+ protocol.webdriver.url = url
+
+ if not self.supports_eager_pageload:
+ self.wait_for_load(protocol)
+
+ while True:
+ result = protocol.base.execute_script(
+ self.script_resume % format_map, asynchronous=True)
+
+ # As of 2019-03-29, WebDriver does not define expected behavior for
+ # cases where the browser crashes during script execution:
+ #
+ # https://github.com/w3c/webdriver/issues/1308
+ if not isinstance(result, list) or len(result) != 2:
+ try:
+ is_alive = self.is_alive()
+ except error.WebDriverException:
+ is_alive = False
+
+ if not is_alive:
+ raise Exception("Browser crashed during script execution.")
+
+ done, rv = handler(result)
+ if done:
+ break
+
+ # Attempt to cleanup any leftover windows, if allowed. This is
+ # preferable as it will blame the correct test if something goes wrong
+ # closing windows, but if the user wants to see the test results we
+ # have to leave the window(s) open.
+ if self.cleanup_after_test:
+ protocol.testharness.close_old_windows()
+
+ return rv
+
+ def wait_for_load(self, protocol):
+ # pageLoadStrategy=eager doesn't work in Chrome so try to emulate in user script
+ loaded = False
+ seen_error = False
+ while not loaded:
+ try:
+ loaded = protocol.base.execute_script("""
+var callback = arguments[arguments.length - 1];
+if (location.href === "about:blank") {
+ callback(false);
+} else if (document.readyState !== "loading") {
+ callback(true);
+} else {
+ document.addEventListener("readystatechange", () => {if (document.readyState !== "loading") {callback(true)}});
+}""", asynchronous=True)
+ except error.JavascriptErrorException:
+ # We can get an error here if the script runs in the initial about:blank
+ # document before it has navigated, with the driver returning an error
+ # indicating that the document was unloaded
+ if seen_error:
+ raise
+ seen_error = True
+
+
+class WebDriverRefTestExecutor(RefTestExecutor):
+ protocol_cls = WebDriverProtocol
+
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ screenshot_cache=None, close_after_done=True,
+ debug_info=None, capabilities=None, debug_test=False,
+ reftest_screenshot="unexpected", **kwargs):
+ """WebDriver-based executor for reftests"""
+ RefTestExecutor.__init__(self,
+ logger,
+ browser,
+ server_config,
+ screenshot_cache=screenshot_cache,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info,
+ reftest_screenshot=reftest_screenshot)
+ self.protocol = self.protocol_cls(self,
+ browser,
+ capabilities=capabilities)
+ self.implementation = RefTestImplementation(self)
+ self.close_after_done = close_after_done
+ self.has_window = False
+ self.debug_test = debug_test
+
+ with open(os.path.join(here, "test-wait.js")) as f:
+ self.wait_script = f.read() % {"classname": "reftest-wait"}
+
+ def reset(self):
+ self.implementation.reset()
+
+ def is_alive(self):
+ return self.protocol.is_alive()
+
+ def do_test(self, test):
+ width_offset, height_offset = self.protocol.webdriver.execute_script(
+ """return [window.outerWidth - window.innerWidth,
+ window.outerHeight - window.innerHeight];"""
+ )
+ try:
+ self.protocol.webdriver.window.position = (0, 0)
+ except error.InvalidArgumentException:
+ # Safari 12 throws with 0 or 1, treating them as bools; fixed in STP
+ self.protocol.webdriver.window.position = (2, 2)
+ self.protocol.webdriver.window.size = (800 + width_offset, 600 + height_offset)
+
+ result = self.implementation.run_test(test)
+
+ if self.debug_test and result["status"] in ["PASS", "FAIL", "ERROR"] and "extra" in result:
+ self.protocol.debug.load_reftest_analyzer(test, result)
+
+ return self.convert_result(test, result)
+
+ def screenshot(self, test, viewport_size, dpi, page_ranges):
+ # https://github.com/web-platform-tests/wpt/issues/7135
+ assert viewport_size is None
+ assert dpi is None
+
+ return WebDriverRun(self.logger,
+ self._screenshot,
+ self.protocol,
+ self.test_url(test),
+ test.timeout,
+ self.extra_timeout).run()
+
+ def _screenshot(self, protocol, url, timeout):
+ self.protocol.base.load(url)
+
+ self.protocol.base.execute_script(self.wait_script, True)
+
+ screenshot = self.protocol.webdriver.screenshot()
+ if screenshot is None:
+ raise ValueError('screenshot is None')
+
+ # strip off the data:img/png, part of the url
+ if screenshot.startswith("data:image/png;base64,"):
+ screenshot = screenshot.split(",", 1)[1]
+
+ return screenshot
+
+
+class WebDriverCrashtestExecutor(CrashtestExecutor):
+ protocol_cls = WebDriverProtocol
+
+ def __init__(self, logger, browser, server_config, timeout_multiplier=1,
+ screenshot_cache=None, close_after_done=True,
+ debug_info=None, capabilities=None, **kwargs):
+ """WebDriver-based executor for reftests"""
+ CrashtestExecutor.__init__(self,
+ logger,
+ browser,
+ server_config,
+ screenshot_cache=screenshot_cache,
+ timeout_multiplier=timeout_multiplier,
+ debug_info=debug_info)
+ self.protocol = self.protocol_cls(self,
+ browser,
+ capabilities=capabilities)
+
+ with open(os.path.join(here, "test-wait.js")) as f:
+ self.wait_script = f.read() % {"classname": "test-wait"}
+
+ def do_test(self, test):
+ timeout = (test.timeout * self.timeout_multiplier if self.debug_info is None
+ else None)
+
+ success, data = WebDriverRun(self.logger,
+ self.do_crashtest,
+ self.protocol,
+ self.test_url(test),
+ timeout,
+ self.extra_timeout).run()
+
+ if success:
+ return self.convert_result(test, data)
+
+ return (test.result_cls(*data), [])
+
+ def do_crashtest(self, protocol, url, timeout):
+ protocol.base.load(url)
+ protocol.base.execute_script(self.wait_script, asynchronous=True)
+
+ return {"status": "PASS",
+ "message": None}
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/process.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/process.py
new file mode 100644
index 0000000000..4a2c01372e
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/process.py
@@ -0,0 +1,22 @@
+# mypy: allow-untyped-defs
+
+from .base import TestExecutor
+
+
+class ProcessTestExecutor(TestExecutor):
+ def __init__(self, *args, **kwargs):
+ TestExecutor.__init__(self, *args, **kwargs)
+ self.binary = self.browser.binary
+ self.interactive = (False if self.debug_info is None
+ else self.debug_info.interactive)
+
+ def setup(self, runner):
+ self.runner = runner
+ self.runner.send_message("init_succeeded")
+ return True
+
+ def is_alive(self):
+ return True
+
+ def do_test(self, test):
+ raise NotImplementedError
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/protocol.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/protocol.py
new file mode 100644
index 0000000000..75e113c71d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/protocol.py
@@ -0,0 +1,689 @@
+# mypy: allow-untyped-defs
+
+import traceback
+from http.client import HTTPConnection
+
+from abc import ABCMeta, abstractmethod
+from typing import ClassVar, List, Type
+
+
+def merge_dicts(target, source):
+ if not (isinstance(target, dict) and isinstance(source, dict)):
+ raise TypeError
+ for (key, source_value) in source.items():
+ if key not in target:
+ target[key] = source_value
+ else:
+ if isinstance(source_value, dict) and isinstance(target[key], dict):
+ merge_dicts(target[key], source_value)
+ else:
+ target[key] = source_value
+
+class Protocol:
+ """Backend for a specific browser-control protocol.
+
+ Each Protocol is composed of a set of ProtocolParts that implement
+ the APIs required for specific interactions. This reflects the fact
+ that not all implementaions will support exactly the same feature set.
+ Each ProtocolPart is exposed directly on the protocol through an accessor
+ attribute with a name given by its `name` property.
+
+ :param Executor executor: The Executor instance that's using this Protocol
+ :param Browser browser: The Browser using this protocol"""
+ __metaclass__ = ABCMeta
+
+ implements = [] # type: ClassVar[List[Type[ProtocolPart]]]
+
+ def __init__(self, executor, browser):
+ self.executor = executor
+ self.browser = browser
+
+ for cls in self.implements:
+ name = cls.name
+ assert not hasattr(self, name)
+ setattr(self, name, cls(self))
+
+ @property
+ def logger(self):
+ """:returns: Current logger"""
+ return self.executor.logger
+
+ def is_alive(self):
+ """Is the browser connection still active
+
+ :returns: A boolean indicating whether the connection is still active."""
+ return True
+
+ def setup(self, runner):
+ """Handle protocol setup, and send a message to the runner to indicate
+ success or failure."""
+ msg = None
+ try:
+ msg = "Failed to start protocol connection"
+ self.connect()
+
+ msg = None
+
+ for cls in self.implements:
+ getattr(self, cls.name).setup()
+
+ msg = "Post-connection steps failed"
+ self.after_connect()
+ except Exception:
+ if msg is not None:
+ self.logger.warning(msg)
+ self.logger.warning(traceback.format_exc())
+ raise
+
+ @abstractmethod
+ def connect(self):
+ """Make a connection to the remote browser"""
+ pass
+
+ @abstractmethod
+ def after_connect(self):
+ """Run any post-connection steps. This happens after the ProtocolParts are
+ initalized so can depend on a fully-populated object."""
+ pass
+
+ def teardown(self):
+ """Run cleanup steps after the tests are finished."""
+ for cls in self.implements:
+ getattr(self, cls.name).teardown()
+
+
+class ProtocolPart:
+ """Base class for all ProtocolParts.
+
+ :param Protocol parent: The parent protocol"""
+ __metaclass__ = ABCMeta
+
+ name = None # type: ClassVar[str]
+
+ def __init__(self, parent):
+ self.parent = parent
+
+ @property
+ def logger(self):
+ """:returns: Current logger"""
+ return self.parent.logger
+
+ def setup(self):
+ """Run any setup steps required for the ProtocolPart."""
+ pass
+
+ def teardown(self):
+ """Run any teardown steps required for the ProtocolPart."""
+ pass
+
+
+class BaseProtocolPart(ProtocolPart):
+ """Generic bits of protocol that are required for multiple test types"""
+ __metaclass__ = ABCMeta
+
+ name = "base"
+
+ @abstractmethod
+ def execute_script(self, script, asynchronous=False):
+ """Execute javascript in the current Window.
+
+ :param str script: The js source to execute. This is implicitly wrapped in a function.
+ :param bool asynchronous: Whether the script is asynchronous in the webdriver
+ sense i.e. whether the return value is the result of
+ the initial function call or if it waits for some callback.
+ :returns: The result of the script execution.
+ """
+ pass
+
+ @abstractmethod
+ def set_timeout(self, timeout):
+ """Set the timeout for script execution.
+
+ :param timeout: Script timeout in seconds"""
+ pass
+
+ @abstractmethod
+ def wait(self):
+ """Wait indefinitely for the browser to close.
+
+ :returns: True to re-run the test, or False to continue with the next test"""
+ pass
+
+ @property
+ def current_window(self):
+ """Return a handle identifying the current top level browsing context
+
+ :returns: A protocol-specific handle"""
+ pass
+
+ @abstractmethod
+ def set_window(self, handle):
+ """Set the top level browsing context to one specified by a given handle.
+
+ :param handle: A protocol-specific handle identifying a top level browsing
+ context."""
+ pass
+
+ @abstractmethod
+ def window_handles(self):
+ """Get a list of handles to top-level browsing contexts"""
+ pass
+
+ @abstractmethod
+ def load(self, url):
+ """Load a url in the current browsing context
+
+ :param url: The url to load"""
+ pass
+
+
+class TestharnessProtocolPart(ProtocolPart):
+ """Protocol part required to run testharness tests."""
+ __metaclass__ = ABCMeta
+
+ name = "testharness"
+
+ @abstractmethod
+ def load_runner(self, url_protocol):
+ """Load the initial page used to control the tests.
+
+ :param str url_protocol: "https" or "http" depending on the test metadata.
+ """
+ pass
+
+ @abstractmethod
+ def close_old_windows(self, url_protocol):
+ """Close existing windows except for the initial runner window.
+ After calling this method there must be exactly one open window that
+ contains the initial runner page.
+
+ :param str url_protocol: "https" or "http" depending on the test metadata.
+ """
+ pass
+
+ @abstractmethod
+ def get_test_window(self, window_id, parent):
+ """Get the window handle dorresponding to the window containing the
+ currently active test.
+
+ :param window_id: A string containing the DOM name of the Window that
+ contains the test, or None.
+ :param parent: The handle of the runner window.
+ :returns: A protocol-specific window handle.
+ """
+ pass
+
+ @abstractmethod
+ def test_window_loaded(self):
+ """Wait until the newly opened test window has been loaded."""
+
+
+class PrefsProtocolPart(ProtocolPart):
+ """Protocol part that allows getting and setting browser prefs."""
+ __metaclass__ = ABCMeta
+
+ name = "prefs"
+
+ @abstractmethod
+ def set(self, name, value):
+ """Set the named pref to value.
+
+ :param name: A pref name of browser-specific type
+ :param value: A pref value of browser-specific type"""
+ pass
+
+ @abstractmethod
+ def get(self, name):
+ """Get the current value of a named pref
+
+ :param name: A pref name of browser-specific type
+ :returns: A pref value of browser-specific type"""
+ pass
+
+ @abstractmethod
+ def clear(self, name):
+ """Reset the value of a named pref back to the default.
+
+ :param name: A pref name of browser-specific type"""
+ pass
+
+
+class StorageProtocolPart(ProtocolPart):
+ """Protocol part for manipulating browser storage."""
+ __metaclass__ = ABCMeta
+
+ name = "storage"
+
+ @abstractmethod
+ def clear_origin(self, url):
+ """Clear all the storage for a specified origin.
+
+ :param url: A url belonging to the origin"""
+ pass
+
+
+class SelectorProtocolPart(ProtocolPart):
+ """Protocol part for selecting elements on the page."""
+ __metaclass__ = ABCMeta
+
+ name = "select"
+
+ def element_by_selector(self, element_selector):
+ elements = self.elements_by_selector(element_selector)
+ if len(elements) == 0:
+ raise ValueError(f"Selector '{element_selector}' matches no elements")
+ elif len(elements) > 1:
+ raise ValueError(f"Selector '{element_selector}' matches multiple elements")
+ return elements[0]
+
+ @abstractmethod
+ def elements_by_selector(self, selector):
+ """Select elements matching a CSS selector
+
+ :param str selector: The CSS selector
+ :returns: A list of protocol-specific handles to elements"""
+ pass
+
+
+class ClickProtocolPart(ProtocolPart):
+ """Protocol part for performing trusted clicks"""
+ __metaclass__ = ABCMeta
+
+ name = "click"
+
+ @abstractmethod
+ def element(self, element):
+ """Perform a trusted click somewhere on a specific element.
+
+ :param element: A protocol-specific handle to an element."""
+ pass
+
+
+class CookiesProtocolPart(ProtocolPart):
+ """Protocol part for managing cookies"""
+ __metaclass__ = ABCMeta
+
+ name = "cookies"
+
+ @abstractmethod
+ def delete_all_cookies(self):
+ """Delete all cookies."""
+ pass
+
+ @abstractmethod
+ def get_all_cookies(self):
+ """Get all cookies."""
+ pass
+
+ @abstractmethod
+ def get_named_cookie(self, name):
+ """Get named cookie.
+
+ :param name: The name of the cookie to get."""
+ pass
+
+
+class SendKeysProtocolPart(ProtocolPart):
+ """Protocol part for performing trusted clicks"""
+ __metaclass__ = ABCMeta
+
+ name = "send_keys"
+
+ @abstractmethod
+ def send_keys(self, element, keys):
+ """Send keys to a specific element.
+
+ :param element: A protocol-specific handle to an element.
+ :param keys: A protocol-specific handle to a string of input keys."""
+ pass
+
+class WindowProtocolPart(ProtocolPart):
+ """Protocol part for manipulating the window"""
+ __metaclass__ = ABCMeta
+
+ name = "window"
+
+ @abstractmethod
+ def set_rect(self, rect):
+ """Restores the window to the given rect."""
+ pass
+
+ @abstractmethod
+ def minimize(self):
+ """Minimizes the window and returns the previous rect."""
+ pass
+
+class GenerateTestReportProtocolPart(ProtocolPart):
+ """Protocol part for generating test reports"""
+ __metaclass__ = ABCMeta
+
+ name = "generate_test_report"
+
+ @abstractmethod
+ def generate_test_report(self, message):
+ """Generate a test report.
+
+ :param message: The message to be contained in the report."""
+ pass
+
+
+class SetPermissionProtocolPart(ProtocolPart):
+ """Protocol part for setting permissions"""
+ __metaclass__ = ABCMeta
+
+ name = "set_permission"
+
+ @abstractmethod
+ def set_permission(self, descriptor, state):
+ """Set permission state.
+
+ :param descriptor: A PermissionDescriptor object.
+ :param state: The state to set the permission to."""
+ pass
+
+
+class ActionSequenceProtocolPart(ProtocolPart):
+ """Protocol part for performing trusted clicks"""
+ __metaclass__ = ABCMeta
+
+ name = "action_sequence"
+
+ @abstractmethod
+ def send_actions(self, actions):
+ """Send a sequence of actions to the window.
+
+ :param actions: A protocol-specific handle to an array of actions."""
+ pass
+
+ def release(self):
+ pass
+
+
+class TestDriverProtocolPart(ProtocolPart):
+ """Protocol part that implements the basic functionality required for
+ all testdriver-based tests."""
+ __metaclass__ = ABCMeta
+
+ name = "testdriver"
+
+ @abstractmethod
+ def send_message(self, cmd_id, message_type, status, message=None):
+ """Send a testdriver message to the browser.
+
+ :param int cmd_id: The id of the command to which we're responding
+ :param str message_type: The kind of the message.
+ :param str status: Either "failure" or "success" depending on whether the
+ previous command succeeded.
+ :param str message: Additional data to add to the message."""
+ pass
+
+ def switch_to_window(self, wptrunner_id, initial_window=None):
+ """Switch to a window given a wptrunner window id
+
+ :param str wptrunner_id: Testdriver-specific id for the target window
+ :param str initial_window: WebDriver window id for the test window"""
+ if wptrunner_id is None:
+ return
+
+ if initial_window is None:
+ initial_window = self.parent.base.current_window
+
+ stack = [str(item) for item in self.parent.base.window_handles()]
+ first = True
+ while stack:
+ item = stack.pop()
+
+ if item is None:
+ assert first is False
+ self._switch_to_parent_frame()
+ continue
+
+ if isinstance(item, str):
+ if not first or item != initial_window:
+ self.parent.base.set_window(item)
+ first = False
+ else:
+ assert first is False
+ try:
+ self._switch_to_frame(item)
+ except ValueError:
+ # The frame no longer exists, or doesn't have a nested browsing context, so continue
+ continue
+
+ try:
+ # Get the window id and a list of elements containing nested browsing contexts.
+ # For embed we can't tell fpr sure if there's a nested browsing context, so always return it
+ # and fail later if there isn't
+ result = self.parent.base.execute_script("""
+ let contextParents = Array.from(document.querySelectorAll("frame, iframe, embed, object"))
+ .filter(elem => elem.localName !== "embed" ? (elem.contentWindow !== null) : true);
+ return [window.__wptrunner_id, contextParents]""")
+ except Exception:
+ continue
+
+ if result is None:
+ # With marionette at least this is possible if the content process crashed. Not quite
+ # sure how we want to handle that case.
+ continue
+
+ handle_window_id, nested_context_containers = result
+
+ if handle_window_id and str(handle_window_id) == wptrunner_id:
+ return
+
+ for elem in reversed(nested_context_containers):
+ # None here makes us switch back to the parent after we've processed the frame
+ stack.append(None)
+ stack.append(elem)
+
+ raise Exception("Window with id %s not found" % wptrunner_id)
+
+ @abstractmethod
+ def _switch_to_frame(self, index_or_elem):
+ """Switch to a frame in the current window
+
+ :param int index_or_elem: Frame id or container element"""
+ pass
+
+ @abstractmethod
+ def _switch_to_parent_frame(self):
+ """Switch to the parent of the current frame"""
+ pass
+
+
+class AssertsProtocolPart(ProtocolPart):
+ """ProtocolPart that implements the functionality required to get a count of non-fatal
+ assertions triggered"""
+ __metaclass__ = ABCMeta
+
+ name = "asserts"
+
+ @abstractmethod
+ def get(self):
+ """Get a count of assertions since the last browser start"""
+ pass
+
+
+class CoverageProtocolPart(ProtocolPart):
+ """Protocol part for collecting per-test coverage data."""
+ __metaclass__ = ABCMeta
+
+ name = "coverage"
+
+ @abstractmethod
+ def reset(self):
+ """Reset coverage counters"""
+ pass
+
+ @abstractmethod
+ def dump(self):
+ """Dump coverage counters"""
+ pass
+
+
+class VirtualAuthenticatorProtocolPart(ProtocolPart):
+ """Protocol part for creating and manipulating virtual authenticators"""
+ __metaclass__ = ABCMeta
+
+ name = "virtual_authenticator"
+
+ @abstractmethod
+ def add_virtual_authenticator(self, config):
+ """Add a virtual authenticator
+
+ :param config: The Authenticator Configuration"""
+ pass
+
+ @abstractmethod
+ def remove_virtual_authenticator(self, authenticator_id):
+ """Remove a virtual authenticator
+
+ :param str authenticator_id: The ID of the authenticator to remove"""
+ pass
+
+ @abstractmethod
+ def add_credential(self, authenticator_id, credential):
+ """Inject a credential onto an authenticator
+
+ :param str authenticator_id: The ID of the authenticator to add the credential to
+ :param credential: The credential to inject"""
+ pass
+
+ @abstractmethod
+ def get_credentials(self, authenticator_id):
+ """Get the credentials stored in an authenticator
+
+ :param str authenticator_id: The ID of the authenticator
+ :returns: An array with the credentials stored on the authenticator"""
+ pass
+
+ @abstractmethod
+ def remove_credential(self, authenticator_id, credential_id):
+ """Remove a credential stored in an authenticator
+
+ :param str authenticator_id: The ID of the authenticator
+ :param str credential_id: The ID of the credential"""
+ pass
+
+ @abstractmethod
+ def remove_all_credentials(self, authenticator_id):
+ """Remove all the credentials stored in an authenticator
+
+ :param str authenticator_id: The ID of the authenticator"""
+ pass
+
+ @abstractmethod
+ def set_user_verified(self, authenticator_id, uv):
+ """Sets the user verified flag on an authenticator
+
+ :param str authenticator_id: The ID of the authenticator
+ :param bool uv: the user verified flag"""
+ pass
+
+
+class SPCTransactionsProtocolPart(ProtocolPart):
+ """Protocol part for Secure Payment Confirmation transactions"""
+ __metaclass__ = ABCMeta
+
+ name = "spc_transactions"
+
+ @abstractmethod
+ def set_spc_transaction_mode(self, mode):
+ """Set the SPC transaction automation mode
+
+ :param str mode: The automation mode to set"""
+ pass
+
+
+class PrintProtocolPart(ProtocolPart):
+ """Protocol part for rendering to a PDF."""
+ __metaclass__ = ABCMeta
+
+ name = "pdf_print"
+
+ @abstractmethod
+ def render_as_pdf(self, width, height):
+ """Output document as PDF"""
+ pass
+
+
+class DebugProtocolPart(ProtocolPart):
+ """Protocol part for debugging test failures."""
+ __metaclass__ = ABCMeta
+
+ name = "debug"
+
+ @abstractmethod
+ def load_devtools(self):
+ """Load devtools in the current window"""
+ pass
+
+ def load_reftest_analyzer(self, test, result):
+ import io
+ import mozlog
+ from urllib.parse import quote, urljoin
+
+ debug_test_logger = mozlog.structuredlog.StructuredLogger("debug_test")
+ output = io.StringIO()
+ debug_test_logger.suite_start([])
+ debug_test_logger.add_handler(mozlog.handlers.StreamHandler(output, formatter=mozlog.formatters.TbplFormatter()))
+ debug_test_logger.test_start(test.id)
+ # Always use PASS as the expected value so we get output even for expected failures
+ debug_test_logger.test_end(test.id, result["status"], "PASS", extra=result.get("extra"))
+
+ self.parent.base.load(urljoin(self.parent.executor.server_url("https"),
+ "/common/third_party/reftest-analyzer.xhtml#log=%s" %
+ quote(output.getvalue())))
+
+
+class ConnectionlessBaseProtocolPart(BaseProtocolPart):
+ def load(self, url):
+ pass
+
+ def execute_script(self, script, asynchronous=False):
+ pass
+
+ def set_timeout(self, timeout):
+ pass
+
+ def wait(self):
+ return False
+
+ def set_window(self, handle):
+ pass
+
+ def window_handles(self):
+ return []
+
+
+class ConnectionlessProtocol(Protocol):
+ implements = [ConnectionlessBaseProtocolPart]
+
+ def connect(self):
+ pass
+
+ def after_connect(self):
+ pass
+
+
+class WdspecProtocol(ConnectionlessProtocol):
+ implements = [ConnectionlessBaseProtocolPart]
+
+ def __init__(self, executor, browser):
+ super().__init__(executor, browser)
+
+ def is_alive(self):
+ """Test that the connection is still alive.
+
+ Because the remote communication happens over HTTP we need to
+ make an explicit request to the remote. It is allowed for
+ WebDriver spec tests to not have a WebDriver session, since this
+ may be what is tested.
+
+ An HTTP request to an invalid path that results in a 404 is
+ proof enough to us that the server is alive and kicking.
+ """
+ conn = HTTPConnection(self.browser.host, self.browser.port)
+ conn.request("HEAD", "/invalid")
+ res = conn.getresponse()
+ return res.status == 404
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/pytestrunner/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/pytestrunner/__init__.py
new file mode 100644
index 0000000000..1baaf9573a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/pytestrunner/__init__.py
@@ -0,0 +1 @@
+from .runner import run # noqa: F401
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/pytestrunner/runner.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/pytestrunner/runner.py
new file mode 100644
index 0000000000..f520e095e8
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/pytestrunner/runner.py
@@ -0,0 +1,171 @@
+# mypy: allow-untyped-defs
+
+"""
+Provides interface to deal with pytest.
+
+Usage::
+
+ session = webdriver.client.Session("127.0.0.1", "4444", "/")
+ harness_result = ("OK", None)
+ subtest_results = pytestrunner.run("/path/to/test", session.url)
+ return (harness_result, subtest_results)
+"""
+
+import errno
+import json
+import os
+import shutil
+import tempfile
+from collections import OrderedDict
+
+
+pytest = None
+
+
+def do_delayed_imports():
+ global pytest
+ import pytest
+
+
+def run(path, server_config, session_config, timeout=0, environ=None):
+ """
+ Run Python test at ``path`` in pytest. The provided ``session``
+ is exposed as a fixture available in the scope of the test functions.
+
+ :param path: Path to the test file.
+ :param session_config: dictionary of host, port,capabilities parameters
+ to pass through to the webdriver session
+ :param timeout: Duration before interrupting potentially hanging
+ tests. If 0, there is no timeout.
+
+ :returns: (<harness result>, [<subtest result>, ...]),
+ where <subtest result> is (test id, status, message, stacktrace).
+ """
+ if pytest is None:
+ do_delayed_imports()
+
+ old_environ = os.environ.copy()
+ try:
+ with TemporaryDirectory() as cache:
+ config_path = os.path.join(cache, "wd_config.json")
+ os.environ["WDSPEC_CONFIG_FILE"] = config_path
+
+ config = session_config.copy()
+ config["wptserve"] = server_config.as_dict()
+
+ with open(config_path, "w") as f:
+ json.dump(config, f)
+
+ if environ:
+ os.environ.update(environ)
+
+ harness = HarnessResultRecorder()
+ subtests = SubtestResultRecorder()
+
+ try:
+ basetemp = os.path.join(cache, "pytest")
+ pytest.main(["--strict-markers", # turn function marker warnings into errors
+ "-vv", # show each individual subtest and full failure logs
+ "--capture", "no", # enable stdout/stderr from tests
+ "--basetemp", basetemp, # temporary directory
+ "--showlocals", # display contents of variables in local scope
+ "-p", "no:mozlog", # use the WPT result recorder
+ "-p", "no:cacheprovider", # disable state preservation across invocations
+ "-o=console_output_style=classic", # disable test progress bar
+ path],
+ plugins=[harness, subtests])
+ except Exception as e:
+ harness.outcome = ("INTERNAL-ERROR", str(e))
+
+ finally:
+ os.environ = old_environ
+
+ subtests_results = [(key,) + value for (key, value) in subtests.results.items()]
+ return (harness.outcome, subtests_results)
+
+
+class HarnessResultRecorder:
+ outcomes = {
+ "failed": "ERROR",
+ "passed": "OK",
+ "skipped": "SKIP",
+ }
+
+ def __init__(self):
+ # we are ok unless told otherwise
+ self.outcome = ("OK", None)
+
+ def pytest_collectreport(self, report):
+ harness_result = self.outcomes[report.outcome]
+ self.outcome = (harness_result, None)
+
+
+class SubtestResultRecorder:
+ def __init__(self):
+ self.results = OrderedDict()
+
+ def pytest_runtest_logreport(self, report):
+ if report.passed and report.when == "call":
+ self.record_pass(report)
+ elif report.failed:
+ # pytest outputs the stacktrace followed by an error message prefixed
+ # with "E ", e.g.
+ #
+ # def test_example():
+ # > assert "fuu" in "foobar"
+ # > E AssertionError: assert 'fuu' in 'foobar'
+ message = ""
+ for line in report.longreprtext.splitlines():
+ if line.startswith("E "):
+ message = line[1:].strip()
+ break
+
+ if report.when != "call":
+ self.record_error(report, message)
+ else:
+ self.record_fail(report, message)
+ elif report.skipped:
+ self.record_skip(report)
+
+ def record_pass(self, report):
+ self.record(report.nodeid, "PASS")
+
+ def record_fail(self, report, message):
+ self.record(report.nodeid, "FAIL", message=message, stack=report.longrepr)
+
+ def record_error(self, report, message):
+ # error in setup/teardown
+ message = f"{report.when} error: {message}"
+ self.record(report.nodeid, "ERROR", message, report.longrepr)
+
+ def record_skip(self, report):
+ self.record(report.nodeid, "ERROR",
+ "In-test skip decorators are disallowed, "
+ "please use WPT metadata to ignore tests.")
+
+ def record(self, test, status, message=None, stack=None):
+ if stack is not None:
+ stack = str(stack)
+ # Ensure we get a single result per subtest; pytest will sometimes
+ # call pytest_runtest_logreport more than once per test e.g. if
+ # it fails and then there's an error during teardown.
+ subtest_id = test.split("::")[-1]
+ if subtest_id in self.results and status == "PASS":
+ # This shouldn't happen, but never overwrite an existing result with PASS
+ return
+ new_result = (status, message, stack)
+ self.results[subtest_id] = new_result
+
+
+class TemporaryDirectory:
+ def __enter__(self):
+ self.path = tempfile.mkdtemp(prefix="wdspec-")
+ return self.path
+
+ def __exit__(self, *args):
+ try:
+ shutil.rmtree(self.path)
+ except OSError as e:
+ # no such file or directory
+ if e.errno != errno.ENOENT:
+ raise
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/reftest.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/reftest.js
new file mode 100644
index 0000000000..1ba98c686f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/reftest.js
@@ -0,0 +1 @@
+var win = window.open("about:blank", "test", "left=0,top=0,width=800,height=600");
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/runner.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/runner.js
new file mode 100644
index 0000000000..171e6febd9
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/runner.js
@@ -0,0 +1 @@
+document.title = '%(title)s';
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/test-wait.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/test-wait.js
new file mode 100644
index 0000000000..ad08ad7d76
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/test-wait.js
@@ -0,0 +1,55 @@
+var callback = arguments[arguments.length - 1];
+var observer = null;
+var root = document.documentElement;
+
+function wait_load() {
+ if (Document.prototype.hasOwnProperty("fonts")) {
+ document.fonts.ready.then(wait_paints);
+ } else {
+ // This might take the screenshot too early, depending on whether the
+ // load event is blocked on fonts being loaded. See:
+ // https://github.com/w3c/csswg-drafts/issues/1088
+ wait_paints();
+ }
+}
+
+
+function wait_paints() {
+ // As of 2017-04-05, the Chromium web browser exhibits a rendering bug
+ // (https://bugs.chromium.org/p/chromium/issues/detail?id=708757) that
+ // produces instability during screen capture. The following use of
+ // `requestAnimationFrame` is intended as a short-term workaround, though
+ // it is not guaranteed to resolve the issue.
+ //
+ // For further detail, see:
+ // https://github.com/jugglinmike/chrome-screenshot-race/issues/1
+
+ requestAnimationFrame(function() {
+ requestAnimationFrame(function() {
+ screenshot_if_ready();
+ });
+ });
+}
+
+function screenshot_if_ready() {
+ if (root &&
+ root.classList.contains("%(classname)s") &&
+ observer === null) {
+ observer = new MutationObserver(wait_paints);
+ observer.observe(root, {attributes: true});
+ var event = new Event("TestRendered", {bubbles: true});
+ root.dispatchEvent(event);
+ return;
+ }
+ if (observer !== null) {
+ observer.disconnect();
+ }
+ callback();
+}
+
+
+if (document.readyState != "complete") {
+ addEventListener('load', wait_load);
+} else {
+ wait_load();
+}
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/testharness_servodriver.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/testharness_servodriver.js
new file mode 100644
index 0000000000..d731cc04d7
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/testharness_servodriver.js
@@ -0,0 +1,2 @@
+window.__wd_results_callback__ = arguments[arguments.length - 1];
+window.__wd_results_timer__ = setTimeout(timeout, %(timeout)s);
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/testharness_webdriver_resume.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/testharness_webdriver_resume.js
new file mode 100644
index 0000000000..36d086c974
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/testharness_webdriver_resume.js
@@ -0,0 +1,5 @@
+// We have to set the url here to ensure we get the same escaping as in the harness
+// and also to handle the case where the test changes the fragment
+window.__wptrunner_url = "%(url)s";
+window.__wptrunner_testdriver_callback = arguments[arguments.length - 1];
+window.__wptrunner_process_next_event();
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/window-loaded.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/window-loaded.js
new file mode 100644
index 0000000000..78d73285a4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/executors/window-loaded.js
@@ -0,0 +1,9 @@
+const [resolve] = arguments;
+
+if (document.readyState != "complete") {
+ window.addEventListener("load", () => {
+ resolve();
+ }, { once: true });
+} else {
+ resolve();
+}
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/expected.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/expected.py
new file mode 100644
index 0000000000..72607ea25f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/expected.py
@@ -0,0 +1,16 @@
+# mypy: allow-untyped-defs
+
+import os
+
+
+def expected_path(metadata_path, test_path):
+ """Path to the expectation data file for a given test path.
+
+ This is defined as metadata_path + relative_test_path + .ini
+
+ :param metadata_path: Path to the root of the metadata directory
+ :param test_path: Relative path to the test file from the test root
+ """
+ args = list(test_path.split("/"))
+ args[-1] += ".ini"
+ return os.path.join(metadata_path, *args)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/expectedtree.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/expectedtree.py
new file mode 100644
index 0000000000..88cf40ad94
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/expectedtree.py
@@ -0,0 +1,132 @@
+# mypy: allow-untyped-defs
+
+from math import log
+from collections import defaultdict
+
+class Node:
+ def __init__(self, prop, value):
+ self.prop = prop
+ self.value = value
+ self.parent = None
+
+ self.children = set()
+
+ # Populated for leaf nodes
+ self.run_info = set()
+ self.result_values = defaultdict(int)
+
+ def add(self, node):
+ self.children.add(node)
+ node.parent = self
+
+ def __iter__(self):
+ yield self
+ for node in self.children:
+ yield from node
+
+ def __len__(self):
+ return 1 + sum(len(item) for item in self.children)
+
+
+def entropy(results):
+ """This is basically a measure of the uniformity of the values in results
+ based on the shannon entropy"""
+
+ result_counts = defaultdict(int)
+ total = float(len(results))
+ for values in results.values():
+ # Not sure this is right, possibly want to treat multiple values as
+ # distinct from multiple of the same value?
+ for value in values:
+ result_counts[value] += 1
+
+ entropy_sum = 0
+
+ for count in result_counts.values():
+ prop = float(count) / total
+ entropy_sum -= prop * log(prop, 2)
+
+ return entropy_sum
+
+
+def split_results(prop, results):
+ """Split a dictionary of results into a dictionary of dictionaries where
+ each sub-dictionary has a specific value of the given property"""
+ by_prop = defaultdict(dict)
+ for run_info, value in results.items():
+ by_prop[run_info[prop]][run_info] = value
+
+ return by_prop
+
+
+def build_tree(properties, dependent_props, results, tree=None):
+ """Build a decision tree mapping properties to results
+
+ :param properties: - A list of run_info properties to consider
+ in the tree
+ :param dependent_props: - A dictionary mapping property name
+ to properties that should only be considered
+ after the properties in the key. For example
+ {"os": ["version"]} means that "version" won't
+ be used until after os.
+ :param results: Dictionary mapping run_info to set of results
+ :tree: A Node object to use as the root of the (sub)tree"""
+
+ if tree is None:
+ tree = Node(None, None)
+
+ prop_index = {prop: i for i, prop in enumerate(properties)}
+
+ all_results = defaultdict(int)
+ for result_values in results.values():
+ for result_value, count in result_values.items():
+ all_results[result_value] += count
+
+ # If there is only one result we are done
+ if not properties or len(all_results) == 1:
+ for value, count in all_results.items():
+ tree.result_values[value] += count
+ tree.run_info |= set(results.keys())
+ return tree
+
+ results_partitions = []
+ remove_properties = set()
+ for prop in properties:
+ result_sets = split_results(prop, results)
+ if len(result_sets) == 1:
+ # If this property doesn't partition the space then just remove it
+ # from the set to consider
+ remove_properties.add(prop)
+ continue
+ new_entropy = 0.
+ results_sets_entropy = []
+ for prop_value, result_set in result_sets.items():
+ results_sets_entropy.append((entropy(result_set), prop_value, result_set))
+ new_entropy += (float(len(result_set)) / len(results)) * results_sets_entropy[-1][0]
+
+ results_partitions.append((new_entropy,
+ prop,
+ results_sets_entropy))
+
+ # In the case that no properties partition the space
+ if not results_partitions:
+ for value, count in all_results.items():
+ tree.result_values[value] += count
+ tree.run_info |= set(results.keys())
+ return tree
+
+ # split by the property with the highest entropy
+ results_partitions.sort(key=lambda x: (x[0], prop_index[x[1]]))
+ _, best_prop, sub_results = results_partitions[0]
+
+ # Create a new set of properties that can be used
+ new_props = properties[:prop_index[best_prop]] + properties[prop_index[best_prop] + 1:]
+ new_props.extend(dependent_props.get(best_prop, []))
+ if remove_properties:
+ new_props = [item for item in new_props if item not in remove_properties]
+
+ for _, prop_value, results_sets in sub_results:
+ node = Node(best_prop, prop_value)
+ tree.add(node)
+ build_tree(new_props, dependent_props, results_sets, node)
+ return tree
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/font.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/font.py
new file mode 100644
index 0000000000..c533d70df7
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/font.py
@@ -0,0 +1,144 @@
+# mypy: allow-untyped-defs
+
+import ctypes
+import os
+import platform
+import plistlib
+
+from shutil import copy2, rmtree
+from subprocess import call, check_output
+
+HERE = os.path.dirname(__file__)
+SYSTEM = platform.system().lower()
+
+
+class FontInstaller:
+ def __init__(self, logger, font_dir=None, **fonts):
+ self.logger = logger
+ self.font_dir = font_dir
+ self.installed_fonts = False
+ self.created_dir = False
+ self.fonts = fonts
+
+ def __call__(self, env_options=None, env_config=None):
+ return self
+
+ def __enter__(self):
+ for _, font_path in self.fonts.items():
+ font_name = font_path.split('/')[-1]
+ install = getattr(self, 'install_%s_font' % SYSTEM, None)
+ if not install:
+ self.logger.warning('Font installation not supported on %s' % SYSTEM)
+ return False
+ if install(font_name, font_path):
+ self.installed_fonts = True
+ self.logger.info('Installed font: %s' % font_name)
+ else:
+ self.logger.warning('Unable to install font: %s' % font_name)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if not self.installed_fonts:
+ return False
+
+ for _, font_path in self.fonts.items():
+ font_name = font_path.split('/')[-1]
+ remove = getattr(self, 'remove_%s_font' % SYSTEM, None)
+ if not remove:
+ self.logger.warning('Font removal not supported on %s' % SYSTEM)
+ return False
+ if remove(font_name, font_path):
+ self.logger.info('Removed font: %s' % font_name)
+ else:
+ self.logger.warning('Unable to remove font: %s' % font_name)
+
+ def install_linux_font(self, font_name, font_path):
+ if not self.font_dir:
+ self.font_dir = os.path.join(os.path.expanduser('~'), '.fonts')
+ if not os.path.exists(self.font_dir):
+ os.makedirs(self.font_dir)
+ self.created_dir = True
+ if not os.path.exists(os.path.join(self.font_dir, font_name)):
+ copy2(font_path, self.font_dir)
+ try:
+ fc_cache_returncode = call('fc-cache')
+ return not fc_cache_returncode
+ except OSError: # If fontconfig doesn't exist, return False
+ self.logger.error('fontconfig not available on this Linux system.')
+ return False
+
+ def install_darwin_font(self, font_name, font_path):
+ if not self.font_dir:
+ self.font_dir = os.path.join(os.path.expanduser('~'),
+ 'Library/Fonts')
+ if not os.path.exists(self.font_dir):
+ os.makedirs(self.font_dir)
+ self.created_dir = True
+ installed_font_path = os.path.join(self.font_dir, font_name)
+ if not os.path.exists(installed_font_path):
+ copy2(font_path, self.font_dir)
+
+ # Per https://github.com/web-platform-tests/results-collection/issues/218
+ # installing Ahem on macOS is flaky, so check if it actually installed
+ with open(os.devnull, 'w') as f:
+ fonts = check_output(['/usr/sbin/system_profiler', '-xml', 'SPFontsDataType'], stderr=f)
+
+ try:
+ # if py3
+ load_plist = plistlib.loads
+ except AttributeError:
+ load_plist = plistlib.readPlistFromString
+ fonts = load_plist(fonts)
+ assert len(fonts) == 1
+ for font in fonts[0]['_items']:
+ if font['path'] == installed_font_path:
+ return True
+ return False
+
+ def install_windows_font(self, _, font_path):
+ hwnd_broadcast = 0xFFFF
+ wm_fontchange = 0x001D
+
+ gdi32 = ctypes.WinDLL('gdi32')
+ if gdi32.AddFontResourceW(font_path):
+ from ctypes import wintypes
+ wparam = 0
+ lparam = 0
+ SendNotifyMessageW = ctypes.windll.user32.SendNotifyMessageW
+ SendNotifyMessageW.argtypes = [wintypes.HANDLE, wintypes.UINT,
+ wintypes.WPARAM, wintypes.LPARAM]
+ return bool(SendNotifyMessageW(hwnd_broadcast, wm_fontchange,
+ wparam, lparam))
+
+ def remove_linux_font(self, font_name, _):
+ if self.created_dir:
+ rmtree(self.font_dir)
+ else:
+ os.remove(f'{self.font_dir}/{font_name}')
+ try:
+ fc_cache_returncode = call('fc-cache')
+ return not fc_cache_returncode
+ except OSError: # If fontconfig doesn't exist, return False
+ self.logger.error('fontconfig not available on this Linux system.')
+ return False
+
+ def remove_darwin_font(self, font_name, _):
+ if self.created_dir:
+ rmtree(self.font_dir)
+ else:
+ os.remove(os.path.join(self.font_dir, font_name))
+ return True
+
+ def remove_windows_font(self, _, font_path):
+ hwnd_broadcast = 0xFFFF
+ wm_fontchange = 0x001D
+
+ gdi32 = ctypes.WinDLL('gdi32')
+ if gdi32.RemoveFontResourceW(font_path):
+ from ctypes import wintypes
+ wparam = 0
+ lparam = 0
+ SendNotifyMessageW = ctypes.windll.user32.SendNotifyMessageW
+ SendNotifyMessageW.argtypes = [wintypes.HANDLE, wintypes.UINT,
+ wintypes.WPARAM, wintypes.LPARAM]
+ return bool(SendNotifyMessageW(hwnd_broadcast, wm_fontchange,
+ wparam, lparam))
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/__init__.py
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/chromium.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/chromium.py
new file mode 100644
index 0000000000..eca63d136b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/chromium.py
@@ -0,0 +1,335 @@
+# mypy: allow-untyped-defs
+
+import functools
+import json
+import time
+
+from collections import defaultdict
+from mozlog.formatters import base
+
+from wptrunner.wptmanifest import serializer
+
+_escape_heading = functools.partial(serializer.escape, extras="]")
+
+
+class ChromiumFormatter(base.BaseFormatter): # type: ignore
+ """Formatter to produce results matching the Chromium JSON Test Results format.
+ https://chromium.googlesource.com/chromium/src/+/master/docs/testing/json_test_results_format.md
+
+ Notably, each test has an "artifacts" field that is a dict consisting of
+ "log": a list of strings (one per subtest + one for harness status, see
+ _append_test_message for the format)
+ "screenshots": a list of strings in the format of "url: base64"
+
+ """
+
+ def __init__(self):
+ # Whether the run was interrupted, either by the test runner or user.
+ self.interrupted = False
+
+ # A map of test status to the number of tests that had that status.
+ self.num_failures_by_status = defaultdict(int)
+
+ # Start time, expressed as offset since UNIX epoch in seconds. Measured
+ # from the first `suite_start` event.
+ self.start_timestamp_seconds = None
+
+ # A map of test names to test start timestamps, expressed in seconds
+ # since UNIX epoch. Only contains tests that are currently running
+ # (i.e., have not received the `test_end` event).
+ self.test_starts = {}
+
+ # Trie of test results. Each directory in the test name is a node in
+ # the trie and the leaf contains the dict of per-test data.
+ self.tests = {}
+
+ # Two dictionaries keyed by test name. Values are lists of strings:
+ # actual metadata content and other messages, respectively.
+ # See _append_test_message for examples.
+ self.actual_metadata = defaultdict(list)
+ self.messages = defaultdict(list)
+
+ # List of tests that have failing subtests.
+ self.tests_with_subtest_fails = set()
+
+ # Browser log for the current test under execution.
+ # These logs are from ChromeDriver's stdout/err, so we cannot say for
+ # sure which test a message is from, but instead we correlate them based
+ # on timing.
+ self.browser_log = []
+
+ def _append_test_message(self, test, subtest, wpt_actual_status, message):
+ r"""
+ Appends the message data for a test or subtest.
+
+ :param str test: the name of the test
+ :param str subtest: the name of the subtest with the message. Will be
+ None if this is called for a test.
+ :param str wpt_actual_status: the test status as reported by WPT
+ :param str message: the string to append to the message for this test
+
+ Example actual_metadata of a test with a subtest:
+ "[test_name]\n expected: OK\n"
+ " [subtest_name]\n expected: FAIL\n"
+
+ NOTE: throughout this function we output a key called "expected" but
+ fill it in with the actual status. This is by design. The goal of this
+ output is to look exactly like WPT's expectation metadata so that it
+ can be easily diff-ed.
+
+ Messages are appended verbatim to self.messages[test].
+ """
+ if subtest:
+ result = " [%s]\n expected: %s\n" % (_escape_heading(subtest),
+ wpt_actual_status)
+ self.actual_metadata[test].append(result)
+ if message:
+ self.messages[test].append("%s: %s\n" % (subtest, message))
+ else:
+ # No subtest, so this is the top-level test. The result must be
+ # prepended to the list, so that it comes before any subtest.
+ test_name_last_part = test.split("/")[-1]
+ result = "[%s]\n expected: %s\n" % (
+ _escape_heading(test_name_last_part), wpt_actual_status)
+ self.actual_metadata[test].insert(0, result)
+ if message:
+ self.messages[test].insert(0, "Harness: %s\n" % message)
+
+ def _append_artifact(self, cur_dict, artifact_name, artifact_value):
+ """
+ Appends artifacts to the specified dictionary.
+ :param dict cur_dict: the test leaf dictionary to append to
+ :param str artifact_name: the name of the artifact
+ :param str artifact_value: the value of the artifact
+ """
+ assert isinstance(artifact_value, str), "artifact_value must be a str"
+ if "artifacts" not in cur_dict.keys():
+ cur_dict["artifacts"] = defaultdict(list)
+ cur_dict["artifacts"][artifact_name].append(artifact_value)
+
+ def _store_test_result(self, name, actual, expected, actual_metadata,
+ messages, wpt_actual, subtest_failure,
+ duration=None, reftest_screenshots=None):
+ """
+ Stores the result of a single test in |self.tests|
+
+ :param str name: name of the test.
+ :param str actual: actual status of the test.
+ :param str expected: expected statuses of the test.
+ :param list actual_metadata: a list of metadata items.
+ :param list messages: a list of test messages.
+ :param str wpt_actual: actual status reported by wpt, may differ from |actual|.
+ :param bool subtest_failure: whether this test failed because of subtests.
+ :param Optional[float] duration: time it took in seconds to run this test.
+ :param Optional[list] reftest_screenshots: see executors/base.py for definition.
+ """
+ # The test name can contain a leading / which will produce an empty
+ # string in the first position of the list returned by split. We use
+ # filter(None) to remove such entries.
+ name_parts = filter(None, name.split("/"))
+ cur_dict = self.tests
+ for name_part in name_parts:
+ cur_dict = cur_dict.setdefault(name_part, {})
+ # Splitting and joining the list of statuses here avoids the need for
+ # recursively postprocessing the |tests| trie at shutdown. We assume the
+ # number of repetitions is typically small enough for the quadratic
+ # runtime to not matter.
+ statuses = cur_dict.get("actual", "").split()
+ statuses.append(actual)
+ cur_dict["actual"] = " ".join(statuses)
+ cur_dict["expected"] = expected
+ if duration is not None:
+ # Record the time to run the first invocation only.
+ cur_dict.setdefault("time", duration)
+ durations = cur_dict.setdefault("times", [])
+ durations.append(duration)
+ if subtest_failure:
+ self._append_artifact(cur_dict, "wpt_subtest_failure", "true")
+ if wpt_actual != actual:
+ self._append_artifact(cur_dict, "wpt_actual_status", wpt_actual)
+ if wpt_actual == 'CRASH':
+ for line in self.browser_log:
+ self._append_artifact(cur_dict, "wpt_crash_log", line)
+ for metadata in actual_metadata:
+ self._append_artifact(cur_dict, "wpt_actual_metadata", metadata)
+ for message in messages:
+ self._append_artifact(cur_dict, "wpt_log", message)
+
+ # Store screenshots (if any).
+ for item in reftest_screenshots or []:
+ if not isinstance(item, dict):
+ # Skip the relation string.
+ continue
+ data = "%s: %s" % (item["url"], item["screenshot"])
+ self._append_artifact(cur_dict, "screenshots", data)
+
+ # Figure out if there was a regression, unexpected status, or flake.
+ # This only happens for tests that were run
+ if actual != "SKIP":
+ if actual not in expected:
+ cur_dict["is_unexpected"] = True
+ if actual != "PASS":
+ cur_dict["is_regression"] = True
+ if len(set(statuses)) > 1:
+ cur_dict["is_flaky"] = True
+
+ # Update the count of how many tests ran with each status. Only includes
+ # the first invocation's result in the totals.
+ if len(statuses) == 1:
+ self.num_failures_by_status[actual] += 1
+
+ def _map_status_name(self, status):
+ """
+ Maps a WPT status to a Chromium status.
+
+ Chromium has five main statuses that we have to map to:
+ CRASH: the test harness crashed
+ FAIL: the test did not run as expected
+ PASS: the test ran as expected
+ SKIP: the test was not run
+ TIMEOUT: the did not finish in time and was aborted
+
+ :param str status: the string status of a test from WPT
+ :return: a corresponding string status for Chromium
+ """
+ if status == "OK":
+ return "PASS"
+ if status == "NOTRUN":
+ return "SKIP"
+ if status == "EXTERNAL-TIMEOUT":
+ return "TIMEOUT"
+ if status in ("ERROR", "PRECONDITION_FAILED"):
+ return "FAIL"
+ if status == "INTERNAL-ERROR":
+ return "CRASH"
+ # Any other status just gets returned as-is.
+ return status
+
+ def _get_expected_status_from_data(self, actual_status, data):
+ """
+ Gets the expected statuses from a |data| dictionary.
+
+ If there is no expected status in data, the actual status is returned.
+ This is because mozlog will delete "expected" from |data| if it is the
+ same as "status". So the presence of "expected" implies that "status" is
+ unexpected. Conversely, the absence of "expected" implies the "status"
+ is expected. So we use the "expected" status if it's there or fall back
+ to the actual status if it's not.
+
+ If the test has multiple statuses, it will have other statuses listed as
+ "known_intermittent" in |data|. If these exist, they will be added to
+ the returned status with spaced in between.
+
+ :param str actual_status: the actual status of the test
+ :param data: a data dictionary to extract expected status from
+ :return str: the expected statuses as a string
+ """
+ expected_statuses = self._map_status_name(data["expected"]) if "expected" in data else actual_status
+ if data.get("known_intermittent"):
+ all_statsues = {self._map_status_name(other_status) for other_status in data["known_intermittent"]}
+ all_statsues.add(expected_statuses)
+ expected_statuses = " ".join(sorted(all_statsues))
+ return expected_statuses
+
+ def _get_time(self, data):
+ """Get the timestamp of a message in seconds since the UNIX epoch."""
+ maybe_timestamp_millis = data.get("time")
+ if maybe_timestamp_millis is not None:
+ return float(maybe_timestamp_millis) / 1000
+ return time.time()
+
+ def _time_test(self, test_name, data):
+ """Time how long a test took to run.
+
+ :param str test_name: the name of the test to time
+ :param data: a data dictionary to extract the test end timestamp from
+ :return Optional[float]: a nonnegative duration in seconds or None if
+ the measurement is unavailable or invalid
+ """
+ test_start = self.test_starts.pop(test_name, None)
+ if test_start is not None:
+ # The |data| dictionary only provides millisecond resolution
+ # anyway, so further nonzero digits are unlikely to be meaningful.
+ duration = round(self._get_time(data) - test_start, 3)
+ if duration >= 0:
+ return duration
+ return None
+
+ def suite_start(self, data):
+ if self.start_timestamp_seconds is None:
+ self.start_timestamp_seconds = self._get_time(data)
+
+ def test_start(self, data):
+ test_name = data["test"]
+ self.test_starts[test_name] = self._get_time(data)
+
+ def test_status(self, data):
+ test_name = data["test"]
+ wpt_actual_status = data["status"]
+ actual_status = self._map_status_name(wpt_actual_status)
+ expected_statuses = self._get_expected_status_from_data(actual_status, data)
+
+ is_unexpected = actual_status not in expected_statuses
+ if is_unexpected and test_name not in self.tests_with_subtest_fails:
+ self.tests_with_subtest_fails.add(test_name)
+ # We should always get a subtest in the data dict, but it's technically
+ # possible that it's missing. Be resilient here.
+ subtest_name = data.get("subtest", "UNKNOWN SUBTEST")
+ self._append_test_message(test_name, subtest_name,
+ wpt_actual_status, data.get("message", ""))
+
+ def test_end(self, data):
+ test_name = data["test"]
+ # Save the status reported by WPT since we might change it when
+ # reporting to Chromium.
+ wpt_actual_status = data["status"]
+ actual_status = self._map_status_name(wpt_actual_status)
+ expected_statuses = self._get_expected_status_from_data(actual_status, data)
+ duration = self._time_test(test_name, data)
+ subtest_failure = False
+ if test_name in self.tests_with_subtest_fails:
+ subtest_failure = True
+ # Clean up the test list to avoid accumulating too many.
+ self.tests_with_subtest_fails.remove(test_name)
+ # This test passed but it has failing subtests. Since we can only
+ # report a single status to Chromium, we choose FAIL to indicate
+ # that something about this test did not run correctly.
+ if actual_status == "PASS":
+ actual_status = "FAIL"
+
+ self._append_test_message(test_name, None, wpt_actual_status,
+ data.get("message", ""))
+ self._store_test_result(test_name,
+ actual_status,
+ expected_statuses,
+ self.actual_metadata[test_name],
+ self.messages[test_name],
+ wpt_actual_status,
+ subtest_failure,
+ duration,
+ data.get("extra", {}).get("reftest_screenshots"))
+
+ # Remove the test from dicts to avoid accumulating too many.
+ self.actual_metadata.pop(test_name)
+ self.messages.pop(test_name)
+
+ # New test, new browser logs.
+ self.browser_log = []
+
+ def shutdown(self, data):
+ # Create the final result dictionary
+ final_result = {
+ # There are some required fields that we just hard-code.
+ "interrupted": False,
+ "path_delimiter": "/",
+ "version": 3,
+ "seconds_since_epoch": self.start_timestamp_seconds,
+ "num_failures_by_type": self.num_failures_by_status,
+ "tests": self.tests
+ }
+ return json.dumps(final_result)
+
+ def process_output(self, data):
+ cmd = data.get("command", "")
+ if any(c in cmd for c in ["chromedriver", "logcat"]):
+ self.browser_log.append(data['data'])
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/tests/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/tests/test_chromium.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/tests/test_chromium.py
new file mode 100644
index 0000000000..bf815d5dc7
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/tests/test_chromium.py
@@ -0,0 +1,828 @@
+# mypy: ignore-errors
+
+import json
+import sys
+from os.path import dirname, join
+from io import StringIO
+
+from mozlog import handlers, structuredlog
+import pytest
+
+sys.path.insert(0, join(dirname(__file__), "..", ".."))
+from formatters.chromium import ChromiumFormatter
+
+
+@pytest.fixture
+def logger():
+ test_logger = structuredlog.StructuredLogger("test_a")
+ try:
+ yield test_logger
+ finally:
+ # Loggers of the same name share state globally:
+ # https://searchfox.org/mozilla-central/rev/1c54648c082efdeb08cf6a5e3a8187e83f7549b9/testing/mozbase/mozlog/mozlog/structuredlog.py#195-196
+ #
+ # Resetting the state here ensures the logger will not be shut down in
+ # the next test.
+ test_logger.reset_state()
+
+
+def test_chromium_required_fields(logger, capfd):
+ # Test that the test results contain a handful of required fields.
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # output a bunch of stuff
+ logger.suite_start(["test-id-1"], run_info={}, time=123)
+ logger.test_start("test-id-1")
+ logger.test_end("test-id-1", status="PASS", expected="PASS")
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_obj = json.load(output)
+
+ # Check for existence of required fields
+ assert "interrupted" in output_obj
+ assert "path_delimiter" in output_obj
+ assert "version" in output_obj
+ assert "num_failures_by_type" in output_obj
+ assert "tests" in output_obj
+
+ test_obj = output_obj["tests"]["test-id-1"]
+ assert "actual" in test_obj
+ assert "expected" in test_obj
+
+
+def test_time_per_test(logger, capfd):
+ # Test that the formatter measures time per test correctly.
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ logger.suite_start(["test-id-1", "test-id-2"], run_info={}, time=50)
+ logger.test_start("test-id-1", time=100)
+ logger.test_start("test-id-2", time=200)
+ logger.test_end("test-id-1", status="PASS", expected="PASS", time=300)
+ logger.test_end("test-id-2", status="PASS", expected="PASS", time=199)
+ logger.suite_end()
+
+ logger.suite_start(["test-id-1"], run_info={}, time=400)
+ logger.test_start("test-id-1", time=500)
+ logger.test_end("test-id-1", status="PASS", expected="PASS", time=600)
+ logger.suite_end()
+
+ # Write the final results.
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_obj = json.load(output)
+
+ test1_obj = output_obj["tests"]["test-id-1"]
+ test2_obj = output_obj["tests"]["test-id-2"]
+ # Test 1 run 1: 300ms - 100ms = 0.2s
+ # Test 1 run 2: 600ms - 500ms = 0.1s
+ assert test1_obj["time"] == pytest.approx(0.2)
+ assert len(test1_obj["times"]) == 2
+ assert test1_obj["times"][0] == pytest.approx(0.2)
+ assert test1_obj["times"][1] == pytest.approx(0.1)
+ assert "time" not in test2_obj
+ assert "times" not in test2_obj
+
+
+def test_chromium_test_name_trie(logger, capfd):
+ # Ensure test names are broken into directories and stored in a trie with
+ # test results at the leaves.
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # output a bunch of stuff
+ logger.suite_start(["/foo/bar/test-id-1", "/foo/test-id-2"], run_info={},
+ time=123)
+ logger.test_start("/foo/bar/test-id-1")
+ logger.test_end("/foo/bar/test-id-1", status="TIMEOUT", expected="FAIL")
+ logger.test_start("/foo/test-id-2")
+ logger.test_end("/foo/test-id-2", status="ERROR", expected="TIMEOUT")
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_obj = json.load(output)
+
+ # Ensure that the test names are broken up by directory name and that the
+ # results are stored at the leaves.
+ test_obj = output_obj["tests"]["foo"]["bar"]["test-id-1"]
+ assert test_obj["actual"] == "TIMEOUT"
+ assert test_obj["expected"] == "FAIL"
+
+ test_obj = output_obj["tests"]["foo"]["test-id-2"]
+ # The ERROR status is mapped to FAIL for Chromium
+ assert test_obj["actual"] == "FAIL"
+ assert test_obj["expected"] == "TIMEOUT"
+
+
+def test_num_failures_by_type(logger, capfd):
+ # Test that the number of failures by status type is correctly calculated.
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # Run some tests with different statuses: 3 passes, 1 timeout
+ logger.suite_start(["t1", "t2", "t3", "t4"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_end("t1", status="PASS", expected="PASS")
+ logger.test_start("t2")
+ logger.test_end("t2", status="PASS", expected="PASS")
+ logger.test_start("t3")
+ logger.test_end("t3", status="PASS", expected="FAIL")
+ logger.test_start("t4")
+ logger.test_end("t4", status="TIMEOUT", expected="CRASH")
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ num_failures_by_type = json.load(output)["num_failures_by_type"]
+
+ # We expect 3 passes and 1 timeout, nothing else.
+ assert sorted(num_failures_by_type.keys()) == ["PASS", "TIMEOUT"]
+ assert num_failures_by_type["PASS"] == 3
+ assert num_failures_by_type["TIMEOUT"] == 1
+
+
+def test_subtest_messages(logger, capfd):
+ # Tests accumulation of test output
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # Run two tests with subtest messages. The subtest name should be included
+ # in the output. We should also tolerate missing messages and subtest names
+ # with unusual characters.
+ logger.suite_start(["t1", "t2"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_status("t1", status="FAIL", subtest="t1_a",
+ message="t1_a_message")
+ # Subtest name includes a backslash and two closing square brackets.
+ logger.test_status("t1", status="PASS", subtest=r"t1_\[]]b",
+ message="t1_b_message")
+ logger.test_end("t1", status="PASS", expected="PASS")
+ logger.test_start("t2")
+ # Subtests with empty messages should not be ignored.
+ logger.test_status("t2", status="PASS", subtest="t2_a")
+ # A test-level message will also be appended
+ logger.test_end("t2", status="TIMEOUT", expected="PASS",
+ message="t2_message")
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ t1_artifacts = output_json["tests"]["t1"]["artifacts"]
+ assert t1_artifacts["wpt_actual_metadata"] == [
+ "[t1]\n expected: PASS\n",
+ " [t1_a]\n expected: FAIL\n",
+ " [t1_\\\\[\\]\\]b]\n expected: PASS\n",
+ ]
+ assert t1_artifacts["wpt_log"] == [
+ "t1_a: t1_a_message\n",
+ # Only humans will read the log, so there's no need to escape
+ # characters here.
+ "t1_\\[]]b: t1_b_message\n",
+ ]
+ assert t1_artifacts["wpt_subtest_failure"] == ["true"]
+ t2_artifacts = output_json["tests"]["t2"]["artifacts"]
+ assert t2_artifacts["wpt_actual_metadata"] == [
+ "[t2]\n expected: TIMEOUT\n",
+ " [t2_a]\n expected: PASS\n",
+ ]
+ assert t2_artifacts["wpt_log"] == [
+ "Harness: t2_message\n"
+ ]
+ assert "wpt_subtest_failure" not in t2_artifacts.keys()
+
+
+def test_subtest_failure(logger, capfd):
+ # Tests that a test fails if a subtest fails
+
+ # Set up the handler.
+ output = StringIO()
+ formatter = ChromiumFormatter()
+ logger.add_handler(handlers.StreamHandler(output, formatter))
+
+ # Run a test with some subtest failures.
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_status("t1", status="FAIL", subtest="t1_a",
+ message="t1_a_message")
+ logger.test_status("t1", status="PASS", subtest="t1_b",
+ message="t1_b_message")
+ logger.test_status("t1", status="TIMEOUT", subtest="t1_c",
+ message="t1_c_message")
+
+ # Make sure the test name was added to the set of tests with subtest fails
+ assert "t1" in formatter.tests_with_subtest_fails
+
+ # The test status is reported as a pass here because the harness was able to
+ # run the test to completion.
+ logger.test_end("t1", status="PASS", expected="PASS", message="top_message")
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ t1_artifacts = test_obj["artifacts"]
+ assert t1_artifacts["wpt_actual_metadata"] == [
+ "[t1]\n expected: PASS\n",
+ " [t1_a]\n expected: FAIL\n",
+ " [t1_b]\n expected: PASS\n",
+ " [t1_c]\n expected: TIMEOUT\n",
+ ]
+ assert t1_artifacts["wpt_log"] == [
+ "Harness: top_message\n",
+ "t1_a: t1_a_message\n",
+ "t1_b: t1_b_message\n",
+ "t1_c: t1_c_message\n",
+ ]
+ assert t1_artifacts["wpt_subtest_failure"] == ["true"]
+ # The status of the test in the output is a failure because subtests failed,
+ # despite the harness reporting that the test passed. But the harness status
+ # is logged as an artifact.
+ assert t1_artifacts["wpt_actual_status"] == ["PASS"]
+ assert test_obj["actual"] == "FAIL"
+ assert test_obj["expected"] == "PASS"
+ # Also ensure that the formatter cleaned up its internal state
+ assert "t1" not in formatter.tests_with_subtest_fails
+
+
+def test_expected_subtest_failure(logger, capfd):
+ # Tests that an expected subtest failure does not cause the test to fail
+
+ # Set up the handler.
+ output = StringIO()
+ formatter = ChromiumFormatter()
+ logger.add_handler(handlers.StreamHandler(output, formatter))
+
+ # Run a test with some expected subtest failures.
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_status("t1", status="FAIL", expected="FAIL", subtest="t1_a",
+ message="t1_a_message")
+ logger.test_status("t1", status="PASS", subtest="t1_b",
+ message="t1_b_message")
+ logger.test_status("t1", status="TIMEOUT", expected="TIMEOUT", subtest="t1_c",
+ message="t1_c_message")
+
+ # The subtest failures are all expected so this test should not be added to
+ # the set of tests with subtest failures.
+ assert "t1" not in formatter.tests_with_subtest_fails
+
+ # The test status is reported as a pass here because the harness was able to
+ # run the test to completion.
+ logger.test_end("t1", status="OK", expected="OK")
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ assert test_obj["artifacts"]["wpt_actual_metadata"] == [
+ "[t1]\n expected: OK\n",
+ " [t1_a]\n expected: FAIL\n",
+ " [t1_b]\n expected: PASS\n",
+ " [t1_c]\n expected: TIMEOUT\n",
+ ]
+ assert test_obj["artifacts"]["wpt_log"] == [
+ "t1_a: t1_a_message\n",
+ "t1_b: t1_b_message\n",
+ "t1_c: t1_c_message\n",
+ ]
+ # The status of the test in the output is a pass because the subtest
+ # failures were all expected.
+ assert test_obj["actual"] == "PASS"
+ assert test_obj["expected"] == "PASS"
+
+
+def test_unexpected_subtest_pass(logger, capfd):
+ # A subtest that unexpectedly passes is considered a failure condition.
+
+ # Set up the handler.
+ output = StringIO()
+ formatter = ChromiumFormatter()
+ logger.add_handler(handlers.StreamHandler(output, formatter))
+
+ # Run a test with a subtest that is expected to fail but passes.
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_status("t1", status="PASS", expected="FAIL", subtest="t1_a",
+ message="t1_a_message")
+
+ # Since the subtest behaviour is unexpected, it's considered a failure, so
+ # the test should be added to the set of tests with subtest failures.
+ assert "t1" in formatter.tests_with_subtest_fails
+
+ # The test status is reported as a pass here because the harness was able to
+ # run the test to completion.
+ logger.test_end("t1", status="PASS", expected="PASS")
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ t1_artifacts = test_obj["artifacts"]
+ assert t1_artifacts["wpt_actual_metadata"] == [
+ "[t1]\n expected: PASS\n",
+ " [t1_a]\n expected: PASS\n",
+ ]
+ assert t1_artifacts["wpt_log"] == [
+ "t1_a: t1_a_message\n",
+ ]
+ assert t1_artifacts["wpt_subtest_failure"] == ["true"]
+ # Since the subtest status is unexpected, we fail the test. But we report
+ # wpt_actual_status as an artifact
+ assert t1_artifacts["wpt_actual_status"] == ["PASS"]
+ assert test_obj["actual"] == "FAIL"
+ assert test_obj["expected"] == "PASS"
+ # Also ensure that the formatter cleaned up its internal state
+ assert "t1" not in formatter.tests_with_subtest_fails
+
+
+def test_expected_test_fail(logger, capfd):
+ # Check that an expected test-level failure is treated as a Pass
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # Run some tests with different statuses: 3 passes, 1 timeout
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_end("t1", status="ERROR", expected="ERROR")
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ # The test's actual and expected status should map from "ERROR" to "FAIL"
+ assert test_obj["actual"] == "FAIL"
+ assert test_obj["expected"] == "FAIL"
+ # ..and this test should not be a regression nor unexpected
+ assert "is_regression" not in test_obj
+ assert "is_unexpected" not in test_obj
+
+
+def test_unexpected_test_fail(logger, capfd):
+ # Check that an unexpected test-level failure is marked as unexpected and
+ # as a regression.
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # Run some tests with different statuses: 3 passes, 1 timeout
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_end("t1", status="ERROR", expected="OK")
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ # The test's actual and expected status should be mapped, ERROR->FAIL and
+ # OK->PASS
+ assert test_obj["actual"] == "FAIL"
+ assert test_obj["expected"] == "PASS"
+ # ..and this test should be a regression and unexpected
+ assert test_obj["is_regression"] is True
+ assert test_obj["is_unexpected"] is True
+
+
+def test_flaky_test_expected(logger, capfd):
+ # Check that a flaky test with multiple possible statuses is seen as
+ # expected if its actual status is one of the possible ones.
+
+ # set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # Run a test that is known to be flaky
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_end("t1", status="ERROR", expected="OK", known_intermittent=["ERROR", "TIMEOUT"])
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ # The test's statuses are all mapped, changing ERROR->FAIL and OK->PASS
+ assert test_obj["actual"] == "FAIL"
+ # All the possible statuses are merged and sorted together into expected.
+ assert test_obj["expected"] == "FAIL PASS TIMEOUT"
+ # ...this is not a regression or unexpected because the actual status is one
+ # of the expected ones
+ assert "is_regression" not in test_obj
+ assert "is_unexpected" not in test_obj
+
+
+def test_flaky_test_unexpected(logger, capfd):
+ # Check that a flaky test with multiple possible statuses is seen as
+ # unexpected if its actual status is NOT one of the possible ones.
+
+ # set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # Run a test that is known to be flaky
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_end("t1", status="ERROR", expected="OK", known_intermittent=["TIMEOUT"])
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ # The test's statuses are all mapped, changing ERROR->FAIL and OK->PASS
+ assert test_obj["actual"] == "FAIL"
+ # All the possible statuses are merged and sorted together into expected.
+ assert test_obj["expected"] == "PASS TIMEOUT"
+ # ...this is a regression and unexpected because the actual status is not
+ # one of the expected ones
+ assert test_obj["is_regression"] is True
+ assert test_obj["is_unexpected"] is True
+
+
+def test_precondition_failed(logger, capfd):
+ # Check that a failed precondition gets properly handled.
+
+ # set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # Run a test with a precondition failure
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_end("t1", status="PRECONDITION_FAILED", expected="OK")
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ # The precondition failure should map to FAIL status, but we should also
+ # have an artifact containing the original PRECONDITION_FAILED status.
+ assert test_obj["actual"] == "FAIL"
+ assert test_obj["artifacts"]["wpt_actual_status"] == ["PRECONDITION_FAILED"]
+ # ...this is an unexpected regression because we expected a pass but failed
+ assert test_obj["is_regression"] is True
+ assert test_obj["is_unexpected"] is True
+
+
+def test_repeated_test_statuses(logger, capfd):
+ # Check that the logger outputs all statuses from multiple runs of a test.
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # Run a test suite for the first time.
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_end("t1", status="PASS", expected="PASS", known_intermittent=[])
+ logger.suite_end()
+
+ # Run the test suite for the second time.
+ logger.suite_start(["t1"], run_info={}, time=456)
+ logger.test_start("t1")
+ logger.test_end("t1", status="FAIL", expected="PASS", known_intermittent=[])
+ logger.suite_end()
+
+ # Write the final results.
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ status_totals = output_json["num_failures_by_type"]
+ assert status_totals["PASS"] == 1
+ # A missing result type is the same as being present and set to zero (0).
+ assert status_totals.get("FAIL", 0) == 0
+
+ # The actual statuses are accumulated in a ordered space-separated list.
+ test_obj = output_json["tests"]["t1"]
+ assert test_obj["actual"] == "PASS FAIL"
+ assert test_obj["expected"] == "PASS"
+
+
+def test_flaky_test_detection(logger, capfd):
+ # Check that the logger detects flakiness for a test run multiple times.
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ logger.suite_start(["t1", "t2"], run_info={})
+ logger.test_start("t1")
+ logger.test_start("t2")
+ logger.test_end("t1", status="FAIL", expected="PASS")
+ logger.test_end("t2", status="FAIL", expected="FAIL")
+ logger.suite_end()
+
+ logger.suite_start(["t1", "t2"], run_info={})
+ logger.test_start("t1")
+ logger.test_start("t2")
+ logger.test_end("t1", status="PASS", expected="PASS")
+ logger.test_end("t2", status="FAIL", expected="FAIL")
+ logger.suite_end()
+
+ # Write the final results.
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ # We consider a test flaky if it runs multiple times and produces more than
+ # one kind of result.
+ test1_obj = output_json["tests"]["t1"]
+ test2_obj = output_json["tests"]["t2"]
+ assert test1_obj["is_flaky"] is True
+ assert "is_flaky" not in test2_obj
+
+
+def test_known_intermittent_empty(logger, capfd):
+ # If the known_intermittent list is empty, we want to ensure we don't append
+ # any extraneous characters to the output.
+
+ # set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # Run a test and include an empty known_intermittent list
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_end("t1", status="OK", expected="OK", known_intermittent=[])
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ # Both actual and expected statuses get mapped to Pass. No extra whitespace
+ # anywhere.
+ assert test_obj["actual"] == "PASS"
+ assert test_obj["expected"] == "PASS"
+
+
+def test_known_intermittent_duplicate(logger, capfd):
+ # We don't want to have duplicate statuses in the final "expected" field.
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # There are two duplications in this input:
+ # 1. known_intermittent already contains expected;
+ # 2. both statuses in known_intermittent map to FAIL in Chromium.
+ # In the end, we should only get one FAIL in Chromium "expected".
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_end("t1", status="ERROR", expected="ERROR", known_intermittent=["FAIL", "ERROR"])
+ logger.suite_end()
+ logger.shutdown()
+
+ # Check nothing got output to stdout/stderr.
+ # (Note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # Check the actual output of the formatter.
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ assert test_obj["actual"] == "FAIL"
+ # No duplicate "FAIL" in "expected".
+ assert test_obj["expected"] == "FAIL"
+
+
+def test_reftest_screenshots(logger, capfd):
+ # reftest_screenshots, if present, should be plumbed into artifacts.
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ # Run a reftest with reftest_screenshots.
+ logger.suite_start(["t1"], run_info={}, time=123)
+ logger.test_start("t1")
+ logger.test_end("t1", status="FAIL", expected="PASS", extra={
+ "reftest_screenshots": [
+ {"url": "foo.html", "hash": "HASH1", "screenshot": "DATA1"},
+ "!=",
+ {"url": "foo-ref.html", "hash": "HASH2", "screenshot": "DATA2"},
+ ]
+ })
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ assert test_obj["artifacts"]["screenshots"] == [
+ "foo.html: DATA1",
+ "foo-ref.html: DATA2",
+ ]
+
+
+def test_process_output_crashing_test(logger, capfd):
+ """Test that chromedriver logs are preserved for crashing tests"""
+
+ # Set up the handler.
+ output = StringIO()
+ logger.add_handler(handlers.StreamHandler(output, ChromiumFormatter()))
+
+ logger.suite_start(["t1", "t2", "t3"], run_info={}, time=123)
+
+ logger.test_start("t1")
+ logger.process_output(100, "This message should be recorded", "/some/path/to/chromedriver --some-flag")
+ logger.process_output(101, "This message should not be recorded", "/some/other/process --another-flag")
+ logger.process_output(100, "This message should also be recorded", "/some/path/to/chromedriver --some-flag")
+ logger.test_end("t1", status="CRASH", expected="CRASH")
+
+ logger.test_start("t2")
+ logger.process_output(100, "Another message for the second test", "/some/path/to/chromedriver --some-flag")
+ logger.test_end("t2", status="CRASH", expected="PASS")
+
+ logger.test_start("t3")
+ logger.process_output(100, "This test fails", "/some/path/to/chromedriver --some-flag")
+ logger.process_output(100, "But the output should not be captured", "/some/path/to/chromedriver --some-flag")
+ logger.process_output(100, "Because it does not crash", "/some/path/to/chromedriver --some-flag")
+ logger.test_end("t3", status="FAIL", expected="PASS")
+
+ logger.suite_end()
+ logger.shutdown()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_json = json.load(output)
+
+ test_obj = output_json["tests"]["t1"]
+ assert test_obj["artifacts"]["wpt_crash_log"] == [
+ "This message should be recorded",
+ "This message should also be recorded"
+ ]
+
+ test_obj = output_json["tests"]["t2"]
+ assert test_obj["artifacts"]["wpt_crash_log"] == [
+ "Another message for the second test"
+ ]
+
+ test_obj = output_json["tests"]["t3"]
+ assert "wpt_crash_log" not in test_obj["artifacts"]
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/wptreport.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/wptreport.py
new file mode 100644
index 0000000000..be6cca2afc
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/wptreport.py
@@ -0,0 +1,137 @@
+# mypy: allow-untyped-defs
+
+import json
+import re
+
+from mozlog.structured.formatters.base import BaseFormatter
+from ..executors.base import strip_server
+
+
+LONE_SURROGATE_RE = re.compile("[\uD800-\uDFFF]")
+
+
+def surrogate_replacement(match):
+ return "U+" + hex(ord(match.group()))[2:]
+
+
+def replace_lone_surrogate(data):
+ return LONE_SURROGATE_RE.subn(surrogate_replacement, data)[0]
+
+
+class WptreportFormatter(BaseFormatter): # type: ignore
+ """Formatter that produces results in the format that wptreport expects."""
+
+ def __init__(self):
+ self.raw_results = {}
+ self.results = {}
+
+ def suite_start(self, data):
+ if 'run_info' in data:
+ self.results['run_info'] = data['run_info']
+ self.results['time_start'] = data['time']
+ self.results["results"] = []
+
+ def suite_end(self, data):
+ self.results['time_end'] = data['time']
+ for test_name in self.raw_results:
+ result = {"test": test_name}
+ result.update(self.raw_results[test_name])
+ self.results["results"].append(result)
+ return json.dumps(self.results) + "\n"
+
+ def find_or_create_test(self, data):
+ test_name = data["test"]
+ if test_name not in self.raw_results:
+ self.raw_results[test_name] = {
+ "subtests": [],
+ "status": "",
+ "message": None
+ }
+ return self.raw_results[test_name]
+
+ def test_start(self, data):
+ test = self.find_or_create_test(data)
+ test["start_time"] = data["time"]
+
+ def create_subtest(self, data):
+ test = self.find_or_create_test(data)
+ subtest_name = replace_lone_surrogate(data["subtest"])
+
+ subtest = {
+ "name": subtest_name,
+ "status": "",
+ "message": None
+ }
+ test["subtests"].append(subtest)
+
+ return subtest
+
+ def test_status(self, data):
+ subtest = self.create_subtest(data)
+ subtest["status"] = data["status"]
+ if "expected" in data:
+ subtest["expected"] = data["expected"]
+ if "known_intermittent" in data:
+ subtest["known_intermittent"] = data["known_intermittent"]
+ if "message" in data:
+ subtest["message"] = replace_lone_surrogate(data["message"])
+
+ def test_end(self, data):
+ test = self.find_or_create_test(data)
+ start_time = test.pop("start_time")
+ test["duration"] = data["time"] - start_time
+ test["status"] = data["status"]
+ if "expected" in data:
+ test["expected"] = data["expected"]
+ if "known_intermittent" in data:
+ test["known_intermittent"] = data["known_intermittent"]
+ if "message" in data:
+ test["message"] = replace_lone_surrogate(data["message"])
+ if "reftest_screenshots" in data.get("extra", {}):
+ test["screenshots"] = {
+ strip_server(item["url"]): "sha1:" + item["hash"]
+ for item in data["extra"]["reftest_screenshots"]
+ if type(item) == dict
+ }
+ test_name = data["test"]
+ result = {"test": data["test"]}
+ result.update(self.raw_results[test_name])
+ self.results["results"].append(result)
+ self.raw_results.pop(test_name)
+
+ def assertion_count(self, data):
+ test = self.find_or_create_test(data)
+ test["asserts"] = {
+ "count": data["count"],
+ "min": data["min_expected"],
+ "max": data["max_expected"]
+ }
+
+ def lsan_leak(self, data):
+ if "lsan_leaks" not in self.results:
+ self.results["lsan_leaks"] = []
+ lsan_leaks = self.results["lsan_leaks"]
+ lsan_leaks.append({"frames": data["frames"],
+ "scope": data["scope"],
+ "allowed_match": data.get("allowed_match")})
+
+ def find_or_create_mozleak(self, data):
+ if "mozleak" not in self.results:
+ self.results["mozleak"] = {}
+ scope = data["scope"]
+ if scope not in self.results["mozleak"]:
+ self.results["mozleak"][scope] = {"objects": [], "total": []}
+ return self.results["mozleak"][scope]
+
+ def mozleak_object(self, data):
+ scope_data = self.find_or_create_mozleak(data)
+ scope_data["objects"].append({"process": data["process"],
+ "name": data["name"],
+ "allowed": data.get("allowed", False),
+ "bytes": data["bytes"]})
+
+ def mozleak_total(self, data):
+ scope_data = self.find_or_create_mozleak(data)
+ scope_data["total"].append({"bytes": data["bytes"],
+ "threshold": data.get("threshold", 0),
+ "process": data["process"]})
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/wptscreenshot.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/wptscreenshot.py
new file mode 100644
index 0000000000..2b2d1ad49d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/formatters/wptscreenshot.py
@@ -0,0 +1,49 @@
+# mypy: allow-untyped-defs
+
+import requests
+from mozlog.structured.formatters.base import BaseFormatter
+
+DEFAULT_API = "https://wpt.fyi/api/screenshots/hashes"
+
+
+class WptscreenshotFormatter(BaseFormatter): # type: ignore
+ """Formatter that outputs screenshots in the format expected by wpt.fyi."""
+
+ def __init__(self, api=None):
+ self.api = api or DEFAULT_API
+ self.cache = set()
+
+ def suite_start(self, data):
+ # TODO(Hexcles): We might want to move the request into a different
+ # place, make it non-blocking, and handle errors better.
+ params = {}
+ run_info = data.get("run_info", {})
+ if "product" in run_info:
+ params["browser"] = run_info["product"]
+ if "browser_version" in run_info:
+ params["browser_version"] = run_info["browser_version"]
+ if "os" in run_info:
+ params["os"] = run_info["os"]
+ if "os_version" in run_info:
+ params["os_version"] = run_info["os_version"]
+ try:
+ r = requests.get(self.api, params=params)
+ r.raise_for_status()
+ self.cache = set(r.json())
+ except (requests.exceptions.RequestException, ValueError):
+ pass
+
+ def test_end(self, data):
+ if "reftest_screenshots" not in data.get("extra", {}):
+ return
+ output = ""
+ for item in data["extra"]["reftest_screenshots"]:
+ if type(item) != dict:
+ # Skip the relation string.
+ continue
+ checksum = "sha1:" + item["hash"]
+ if checksum in self.cache:
+ continue
+ self.cache.add(checksum)
+ output += "data:image/png;base64,{}\n".format(item["screenshot"])
+ return output if output else None
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/instruments.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/instruments.py
new file mode 100644
index 0000000000..26df5fa29b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/instruments.py
@@ -0,0 +1,121 @@
+# mypy: allow-untyped-defs
+
+import time
+import threading
+
+from . import mpcontext
+
+"""Instrumentation for measuring high-level time spent on various tasks inside the runner.
+
+This is lower fidelity than an actual profile, but allows custom data to be considered,
+so that we can see the time spent in specific tests and test directories.
+
+
+Instruments are intended to be used as context managers with the return value of __enter__
+containing the user-facing API e.g.
+
+with Instrument(*args) as recording:
+ recording.set(["init"])
+ do_init()
+ recording.pause()
+ for thread in test_threads:
+ thread.start(recording, *args)
+ for thread in test_threads:
+ thread.join()
+ recording.set(["teardown"]) # un-pauses the Instrument
+ do_teardown()
+"""
+
+class NullInstrument:
+ def set(self, stack):
+ """Set the current task to stack
+
+ :param stack: A list of strings defining the current task.
+ These are interpreted like a stack trace so that ["foo"] and
+ ["foo", "bar"] both show up as descendants of "foo"
+ """
+ pass
+
+ def pause(self):
+ """Stop recording a task on the current thread. This is useful if the thread
+ is purely waiting on the results of other threads"""
+ pass
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args, **kwargs):
+ return
+
+
+class InstrumentWriter:
+ def __init__(self, queue):
+ self.queue = queue
+
+ def set(self, stack):
+ stack.insert(0, threading.current_thread().name)
+ stack = self._check_stack(stack)
+ self.queue.put(("set", threading.current_thread().ident, time.time(), stack))
+
+ def pause(self):
+ self.queue.put(("pause", threading.current_thread().ident, time.time(), None))
+
+ def _check_stack(self, stack):
+ assert isinstance(stack, (tuple, list))
+ return [item.replace(" ", "_") for item in stack]
+
+
+class Instrument:
+ def __init__(self, file_path):
+ """Instrument that collects data from multiple threads and sums the time in each
+ thread. The output is in the format required by flamegraph.pl to enable visualisation
+ of the time spent in each task.
+
+ :param file_path: - The path on which to write instrument output. Any existing file
+ at the path will be overwritten
+ """
+ self.path = file_path
+ self.queue = None
+ self.current = None
+ self.start_time = None
+ self.instrument_proc = None
+
+ def __enter__(self):
+ assert self.instrument_proc is None
+ assert self.queue is None
+ mp = mpcontext.get_context()
+ self.queue = mp.Queue()
+ self.instrument_proc = mp.Process(target=self.run)
+ self.instrument_proc.start()
+ return InstrumentWriter(self.queue)
+
+ def __exit__(self, *args, **kwargs):
+ self.queue.put(("stop", None, time.time(), None))
+ self.instrument_proc.join()
+ self.instrument_proc = None
+ self.queue = None
+
+ def run(self):
+ known_commands = {"stop", "pause", "set"}
+ with open(self.path, "w") as f:
+ thread_data = {}
+ while True:
+ command, thread, time_stamp, stack = self.queue.get()
+ assert command in known_commands
+
+ # If we are done recording, dump the information from all threads to the file
+ # before exiting. Otherwise for either 'set' or 'pause' we only need to dump
+ # information from the current stack (if any) that was recording on the reporting
+ # thread (as that stack is no longer active).
+ items = []
+ if command == "stop":
+ items = thread_data.values()
+ elif thread in thread_data:
+ items.append(thread_data.pop(thread))
+ for output_stack, start_time in items:
+ f.write("%s %d\n" % (";".join(output_stack), int(1000 * (time_stamp - start_time))))
+
+ if command == "set":
+ thread_data[thread] = (stack, time_stamp)
+ elif command == "stop":
+ break
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/manifestexpected.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/manifestexpected.py
new file mode 100644
index 0000000000..0d92a48689
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/manifestexpected.py
@@ -0,0 +1,542 @@
+# mypy: allow-untyped-defs
+
+import os
+from collections import deque
+from urllib.parse import urljoin
+
+from .wptmanifest.backends import static
+from .wptmanifest.backends.base import ManifestItem
+
+from . import expected
+
+"""Manifest structure used to store expected results of a test.
+
+Each manifest file is represented by an ExpectedManifest that
+has one or more TestNode children, one per test in the manifest.
+Each TestNode has zero or more SubtestNode children, one for each
+known subtest of the test.
+"""
+
+
+def data_cls_getter(output_node, visited_node):
+ # visited_node is intentionally unused
+ if output_node is None:
+ return ExpectedManifest
+ if isinstance(output_node, ExpectedManifest):
+ return TestNode
+ if isinstance(output_node, TestNode):
+ return SubtestNode
+ raise ValueError
+
+
+def bool_prop(name, node):
+ """Boolean property"""
+ try:
+ return bool(node.get(name))
+ except KeyError:
+ return None
+
+
+def int_prop(name, node):
+ """Boolean property"""
+ try:
+ return int(node.get(name))
+ except KeyError:
+ return None
+
+
+def list_prop(name, node):
+ """List property"""
+ try:
+ list_prop = node.get(name)
+ if isinstance(list_prop, str):
+ return [list_prop]
+ return list(list_prop)
+ except KeyError:
+ return []
+
+
+def str_prop(name, node):
+ try:
+ prop = node.get(name)
+ if not isinstance(prop, str):
+ raise ValueError
+ return prop
+ except KeyError:
+ return None
+
+
+def tags(node):
+ """Set of tags that have been applied to the test"""
+ try:
+ value = node.get("tags")
+ if isinstance(value, str):
+ return {value}
+ return set(value)
+ except KeyError:
+ return set()
+
+
+def prefs(node):
+ def value(ini_value):
+ if isinstance(ini_value, str):
+ return tuple(pref_piece.strip() for pref_piece in ini_value.split(':', 1))
+ else:
+ # this should be things like @Reset, which are apparently type 'object'
+ return (ini_value, None)
+
+ try:
+ node_prefs = node.get("prefs")
+ if isinstance(node_prefs, str):
+ rv = dict(value(node_prefs))
+ else:
+ rv = dict(value(item) for item in node_prefs)
+ except KeyError:
+ rv = {}
+ return rv
+
+
+def set_prop(name, node):
+ try:
+ node_items = node.get(name)
+ if isinstance(node_items, str):
+ rv = {node_items}
+ else:
+ rv = set(node_items)
+ except KeyError:
+ rv = set()
+ return rv
+
+
+def leak_threshold(node):
+ rv = {}
+ try:
+ node_items = node.get("leak-threshold")
+ if isinstance(node_items, str):
+ node_items = [node_items]
+ for item in node_items:
+ process, value = item.rsplit(":", 1)
+ rv[process.strip()] = int(value.strip())
+ except KeyError:
+ pass
+ return rv
+
+
+def fuzzy_prop(node):
+ """Fuzzy reftest match
+
+ This can either be a list of strings or a single string. When a list is
+ supplied, the format of each item matches the description below.
+
+ The general format is
+ fuzzy = [key ":"] <prop> ";" <prop>
+ key = <test name> [reftype <reference name>]
+ reftype = "==" | "!="
+ prop = [propName "=" ] range
+ propName = "maxDifferences" | "totalPixels"
+ range = <digits> ["-" <digits>]
+
+ So for example:
+ maxDifferences=10;totalPixels=10-20
+
+ specifies that for any test/ref pair for which no other rule is supplied,
+ there must be a maximum pixel difference of exactly 10, and between 10 and
+ 20 total pixels different.
+
+ test.html==ref.htm:10;20
+
+ specifies that for a equality comparison between test.html and ref.htm,
+ resolved relative to the test path, there can be a maximum difference
+ of 10 in the pixel value for any channel and 20 pixels total difference.
+
+ ref.html:10;20
+
+ is just like the above but applies to any comparison involving ref.html
+ on the right hand side.
+
+ The return format is [(key, (maxDifferenceRange, totalPixelsRange))], where
+ the key is either None where no specific reference is specified, the reference
+ name where there is only one component or a tuple (test, ref, reftype) when the
+ exact comparison is specified. maxDifferenceRange and totalPixelsRange are tuples
+ of integers indicating the inclusive range of allowed values.
+"""
+ rv = []
+ args = ["maxDifference", "totalPixels"]
+ try:
+ value = node.get("fuzzy")
+ except KeyError:
+ return rv
+ if not isinstance(value, list):
+ value = [value]
+ for item in value:
+ if not isinstance(item, str):
+ rv.append(item)
+ continue
+ parts = item.rsplit(":", 1)
+ if len(parts) == 1:
+ key = None
+ fuzzy_values = parts[0]
+ else:
+ key, fuzzy_values = parts
+ for reftype in ["==", "!="]:
+ if reftype in key:
+ key = key.split(reftype)
+ key.append(reftype)
+ key = tuple(key)
+ ranges = fuzzy_values.split(";")
+ if len(ranges) != 2:
+ raise ValueError("Malformed fuzzy value %s" % item)
+ arg_values = {None: deque()}
+ for range_str_value in ranges:
+ if "=" in range_str_value:
+ name, range_str_value = (part.strip()
+ for part in range_str_value.split("=", 1))
+ if name not in args:
+ raise ValueError("%s is not a valid fuzzy property" % name)
+ if arg_values.get(name):
+ raise ValueError("Got multiple values for argument %s" % name)
+ else:
+ name = None
+ if "-" in range_str_value:
+ range_min, range_max = range_str_value.split("-")
+ else:
+ range_min = range_str_value
+ range_max = range_str_value
+ try:
+ range_value = tuple(int(item.strip()) for item in (range_min, range_max))
+ except ValueError:
+ raise ValueError("Fuzzy value %s must be a range of integers" % range_str_value)
+ if name is None:
+ arg_values[None].append(range_value)
+ else:
+ arg_values[name] = range_value
+ range_values = []
+ for arg_name in args:
+ if arg_values.get(arg_name):
+ value = arg_values.pop(arg_name)
+ else:
+ value = arg_values[None].popleft()
+ range_values.append(value)
+ rv.append((key, tuple(range_values)))
+ return rv
+
+
+class ExpectedManifest(ManifestItem):
+ def __init__(self, node, test_path, url_base):
+ """Object representing all the tests in a particular manifest
+
+ :param name: Name of the AST Node associated with this object.
+ Should always be None since this should always be associated with
+ the root node of the AST.
+ :param test_path: Path of the test file associated with this manifest.
+ :param url_base: Base url for serving the tests in this manifest
+ """
+ name = node.data
+ if name is not None:
+ raise ValueError("ExpectedManifest should represent the root node")
+ if test_path is None:
+ raise ValueError("ExpectedManifest requires a test path")
+ if url_base is None:
+ raise ValueError("ExpectedManifest requires a base url")
+ ManifestItem.__init__(self, node)
+ self.child_map = {}
+ self.test_path = test_path
+ self.url_base = url_base
+
+ def append(self, child):
+ """Add a test to the manifest"""
+ ManifestItem.append(self, child)
+ self.child_map[child.id] = child
+
+ def _remove_child(self, child):
+ del self.child_map[child.id]
+ ManifestItem.remove_child(self, child)
+ assert len(self.child_map) == len(self.children)
+
+ def get_test(self, test_id):
+ """Get a test from the manifest by ID
+
+ :param test_id: ID of the test to return."""
+ return self.child_map.get(test_id)
+
+ @property
+ def url(self):
+ return urljoin(self.url_base,
+ "/".join(self.test_path.split(os.path.sep)))
+
+ @property
+ def disabled(self):
+ return bool_prop("disabled", self)
+
+ @property
+ def restart_after(self):
+ return bool_prop("restart-after", self)
+
+ @property
+ def leaks(self):
+ return bool_prop("leaks", self)
+
+ @property
+ def min_assertion_count(self):
+ return int_prop("min-asserts", self)
+
+ @property
+ def max_assertion_count(self):
+ return int_prop("max-asserts", self)
+
+ @property
+ def tags(self):
+ return tags(self)
+
+ @property
+ def prefs(self):
+ return prefs(self)
+
+ @property
+ def lsan_disabled(self):
+ return bool_prop("lsan-disabled", self)
+
+ @property
+ def lsan_allowed(self):
+ return set_prop("lsan-allowed", self)
+
+ @property
+ def leak_allowed(self):
+ return set_prop("leak-allowed", self)
+
+ @property
+ def leak_threshold(self):
+ return leak_threshold(self)
+
+ @property
+ def lsan_max_stack_depth(self):
+ return int_prop("lsan-max-stack-depth", self)
+
+ @property
+ def fuzzy(self):
+ return fuzzy_prop(self)
+
+ @property
+ def expected(self):
+ return list_prop("expected", self)[0]
+
+ @property
+ def known_intermittent(self):
+ return list_prop("expected", self)[1:]
+
+ @property
+ def implementation_status(self):
+ return str_prop("implementation-status", self)
+
+
+class DirectoryManifest(ManifestItem):
+ @property
+ def disabled(self):
+ return bool_prop("disabled", self)
+
+ @property
+ def restart_after(self):
+ return bool_prop("restart-after", self)
+
+ @property
+ def leaks(self):
+ return bool_prop("leaks", self)
+
+ @property
+ def min_assertion_count(self):
+ return int_prop("min-asserts", self)
+
+ @property
+ def max_assertion_count(self):
+ return int_prop("max-asserts", self)
+
+ @property
+ def tags(self):
+ return tags(self)
+
+ @property
+ def prefs(self):
+ return prefs(self)
+
+ @property
+ def lsan_disabled(self):
+ return bool_prop("lsan-disabled", self)
+
+ @property
+ def lsan_allowed(self):
+ return set_prop("lsan-allowed", self)
+
+ @property
+ def leak_allowed(self):
+ return set_prop("leak-allowed", self)
+
+ @property
+ def leak_threshold(self):
+ return leak_threshold(self)
+
+ @property
+ def lsan_max_stack_depth(self):
+ return int_prop("lsan-max-stack-depth", self)
+
+ @property
+ def fuzzy(self):
+ return fuzzy_prop(self)
+
+ @property
+ def implementation_status(self):
+ return str_prop("implementation-status", self)
+
+
+class TestNode(ManifestItem):
+ def __init__(self, node, **kwargs):
+ """Tree node associated with a particular test in a manifest
+
+ :param name: name of the test"""
+ assert node.data is not None
+ ManifestItem.__init__(self, node, **kwargs)
+ self.updated_expected = []
+ self.new_expected = []
+ self.subtests = {}
+ self.default_status = None
+ self._from_file = True
+
+ @property
+ def is_empty(self):
+ required_keys = {"type"}
+ if set(self._data.keys()) != required_keys:
+ return False
+ return all(child.is_empty for child in self.children)
+
+ @property
+ def test_type(self):
+ return self.get("type")
+
+ @property
+ def id(self):
+ return urljoin(self.parent.url, self.name)
+
+ @property
+ def disabled(self):
+ return bool_prop("disabled", self)
+
+ @property
+ def restart_after(self):
+ return bool_prop("restart-after", self)
+
+ @property
+ def leaks(self):
+ return bool_prop("leaks", self)
+
+ @property
+ def min_assertion_count(self):
+ return int_prop("min-asserts", self)
+
+ @property
+ def max_assertion_count(self):
+ return int_prop("max-asserts", self)
+
+ @property
+ def tags(self):
+ return tags(self)
+
+ @property
+ def prefs(self):
+ return prefs(self)
+
+ @property
+ def lsan_disabled(self):
+ return bool_prop("lsan-disabled", self)
+
+ @property
+ def lsan_allowed(self):
+ return set_prop("lsan-allowed", self)
+
+ @property
+ def leak_allowed(self):
+ return set_prop("leak-allowed", self)
+
+ @property
+ def leak_threshold(self):
+ return leak_threshold(self)
+
+ @property
+ def lsan_max_stack_depth(self):
+ return int_prop("lsan-max-stack-depth", self)
+
+ @property
+ def fuzzy(self):
+ return fuzzy_prop(self)
+
+ @property
+ def expected(self):
+ return list_prop("expected", self)[0]
+
+ @property
+ def known_intermittent(self):
+ return list_prop("expected", self)[1:]
+
+ @property
+ def implementation_status(self):
+ return str_prop("implementation-status", self)
+
+ def append(self, node):
+ """Add a subtest to the current test
+
+ :param node: AST Node associated with the subtest"""
+ child = ManifestItem.append(self, node)
+ self.subtests[child.name] = child
+
+ def get_subtest(self, name):
+ """Get the SubtestNode corresponding to a particular subtest, by name
+
+ :param name: Name of the node to return"""
+ if name in self.subtests:
+ return self.subtests[name]
+ return None
+
+
+class SubtestNode(TestNode):
+ @property
+ def is_empty(self):
+ if self._data:
+ return False
+ return True
+
+
+def get_manifest(metadata_root, test_path, url_base, run_info):
+ """Get the ExpectedManifest for a particular test path, or None if there is no
+ metadata stored for that test path.
+
+ :param metadata_root: Absolute path to the root of the metadata directory
+ :param test_path: Path to the test(s) relative to the test root
+ :param url_base: Base url for serving the tests in this manifest
+ :param run_info: Dictionary of properties of the test run for which the expectation
+ values should be computed.
+ """
+ manifest_path = expected.expected_path(metadata_root, test_path)
+ try:
+ with open(manifest_path, "rb") as f:
+ return static.compile(f,
+ run_info,
+ data_cls_getter=data_cls_getter,
+ test_path=test_path,
+ url_base=url_base)
+ except OSError:
+ return None
+
+
+def get_dir_manifest(path, run_info):
+ """Get the ExpectedManifest for a particular test path, or None if there is no
+ metadata stored for that test path.
+
+ :param path: Full path to the ini file
+ :param run_info: Dictionary of properties of the test run for which the expectation
+ values should be computed.
+ """
+ try:
+ with open(path, "rb") as f:
+ return static.compile(f,
+ run_info,
+ data_cls_getter=lambda x,y: DirectoryManifest)
+ except OSError:
+ return None
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/manifestinclude.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/manifestinclude.py
new file mode 100644
index 0000000000..89031d8fb0
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/manifestinclude.py
@@ -0,0 +1,156 @@
+# mypy: allow-untyped-defs
+
+"""Manifest structure used to store paths that should be included in a test run.
+
+The manifest is represented by a tree of IncludeManifest objects, the root
+representing the file and each subnode representing a subdirectory that should
+be included or excluded.
+"""
+import glob
+import os
+from urllib.parse import urlparse, urlsplit
+
+from .wptmanifest.node import DataNode
+from .wptmanifest.backends import conditional
+from .wptmanifest.backends.conditional import ManifestItem
+
+
+class IncludeManifest(ManifestItem):
+ def __init__(self, node):
+ """Node in a tree structure representing the paths
+ that should be included or excluded from the test run.
+
+ :param node: AST Node corresponding to this Node.
+ """
+ ManifestItem.__init__(self, node)
+ self.child_map = {}
+
+ @classmethod
+ def create(cls):
+ """Create an empty IncludeManifest tree"""
+ node = DataNode(None)
+ return cls(node)
+
+ def set_defaults(self):
+ if not self.has_key("skip"):
+ self.set("skip", "False")
+
+ def append(self, child):
+ ManifestItem.append(self, child)
+ self.child_map[child.name] = child
+ assert len(self.child_map) == len(self.children)
+
+ def include(self, test):
+ """Return a boolean indicating whether a particular test should be
+ included in a test run, based on the IncludeManifest tree rooted on
+ this object.
+
+ :param test: The test object"""
+ path_components = self._get_components(test.url)
+ return self._include(test, path_components)
+
+ def _include(self, test, path_components):
+ if path_components:
+ next_path_part = path_components.pop()
+ if next_path_part in self.child_map:
+ return self.child_map[next_path_part]._include(test, path_components)
+
+ node = self
+ while node:
+ try:
+ skip_value = self.get("skip", {"test_type": test.item_type}).lower()
+ assert skip_value in ("true", "false")
+ return skip_value != "true"
+ except KeyError:
+ if node.parent is not None:
+ node = node.parent
+ else:
+ # Include by default
+ return True
+
+ def _get_components(self, url):
+ rv = []
+ url_parts = urlsplit(url)
+ variant = ""
+ if url_parts.query:
+ variant += "?" + url_parts.query
+ if url_parts.fragment:
+ variant += "#" + url_parts.fragment
+ if variant:
+ rv.append(variant)
+ rv.extend([item for item in reversed(url_parts.path.split("/")) if item])
+ return rv
+
+ def _add_rule(self, test_manifests, url, direction):
+ maybe_path = os.path.join(os.path.abspath(os.curdir), url)
+ rest, last = os.path.split(maybe_path)
+ fragment = query = None
+ if "#" in last:
+ last, fragment = last.rsplit("#", 1)
+ if "?" in last:
+ last, query = last.rsplit("?", 1)
+
+ maybe_path = os.path.join(rest, last)
+ paths = glob.glob(maybe_path)
+
+ if paths:
+ urls = []
+ for path in paths:
+ for manifest, data in test_manifests.items():
+ found = False
+ rel_path = os.path.relpath(path, data["tests_path"])
+ iterator = manifest.iterpath if os.path.isfile(path) else manifest.iterdir
+ for test in iterator(rel_path):
+ if not hasattr(test, "url"):
+ continue
+ url = test.url
+ if query or fragment:
+ parsed = urlparse(url)
+ if ((query and query != parsed.query) or
+ (fragment and fragment != parsed.fragment)):
+ continue
+ urls.append(url)
+ found = True
+ if found:
+ break
+ else:
+ urls = [url]
+
+ assert direction in ("include", "exclude")
+
+ for url in urls:
+ components = self._get_components(url)
+
+ node = self
+ while components:
+ component = components.pop()
+ if component not in node.child_map:
+ new_node = IncludeManifest(DataNode(component))
+ node.append(new_node)
+ new_node.set("skip", node.get("skip", {}))
+
+ node = node.child_map[component]
+
+ skip = False if direction == "include" else True
+ node.set("skip", str(skip))
+
+ def add_include(self, test_manifests, url_prefix):
+ """Add a rule indicating that tests under a url path
+ should be included in test runs
+
+ :param url_prefix: The url prefix to include
+ """
+ return self._add_rule(test_manifests, url_prefix, "include")
+
+ def add_exclude(self, test_manifests, url_prefix):
+ """Add a rule indicating that tests under a url path
+ should be excluded from test runs
+
+ :param url_prefix: The url prefix to exclude
+ """
+ return self._add_rule(test_manifests, url_prefix, "exclude")
+
+
+def get_manifest(manifest_path):
+ with open(manifest_path, "rb") as f:
+ return conditional.compile(f, data_cls_getter=lambda x, y: IncludeManifest)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/manifestupdate.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/manifestupdate.py
new file mode 100644
index 0000000000..ce12bc3370
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/manifestupdate.py
@@ -0,0 +1,967 @@
+# mypy: allow-untyped-defs
+
+import os
+from urllib.parse import urljoin, urlsplit
+from collections import namedtuple, defaultdict, deque
+from math import ceil
+from typing import Any, Callable, ClassVar, Dict, List
+
+from .wptmanifest import serialize
+from .wptmanifest.node import (DataNode, ConditionalNode, BinaryExpressionNode,
+ BinaryOperatorNode, NumberNode, StringNode, VariableNode,
+ ValueNode, UnaryExpressionNode, UnaryOperatorNode,
+ ListNode)
+from .wptmanifest.backends import conditional
+from .wptmanifest.backends.conditional import ManifestItem
+
+from . import expected
+from . import expectedtree
+
+"""Manifest structure used to update the expected results of a test
+
+Each manifest file is represented by an ExpectedManifest that has one
+or more TestNode children, one per test in the manifest. Each
+TestNode has zero or more SubtestNode children, one for each known
+subtest of the test.
+
+In these representations, conditionals expressions in the manifest are
+not evaluated upfront but stored as python functions to be evaluated
+at runtime.
+
+When a result for a test is to be updated set_result on the
+[Sub]TestNode is called to store the new result, alongside the
+existing conditional that result's run info matched, if any. Once all
+new results are known, update is called to compute the new
+set of results and conditionals. The AST of the underlying parsed manifest
+is updated with the changes, and the result is serialised to a file.
+"""
+
+
+class ConditionError(Exception):
+ def __init__(self, cond=None):
+ self.cond = cond
+
+
+class UpdateError(Exception):
+ pass
+
+
+Value = namedtuple("Value", ["run_info", "value"])
+
+
+def data_cls_getter(output_node, visited_node):
+ # visited_node is intentionally unused
+ if output_node is None:
+ return ExpectedManifest
+ elif isinstance(output_node, ExpectedManifest):
+ return TestNode
+ elif isinstance(output_node, TestNode):
+ return SubtestNode
+ else:
+ raise ValueError
+
+
+class UpdateProperties:
+ def __init__(self, manifest, **kwargs):
+ self._manifest = manifest
+ self._classes = kwargs
+
+ def __getattr__(self, name):
+ if name in self._classes:
+ rv = self._classes[name](self._manifest)
+ setattr(self, name, rv)
+ return rv
+ raise AttributeError
+
+ def __contains__(self, name):
+ return name in self._classes
+
+ def __iter__(self):
+ for name in self._classes.keys():
+ yield getattr(self, name)
+
+
+class ExpectedManifest(ManifestItem):
+ def __init__(self, node, test_path, url_base, run_info_properties,
+ update_intermittent=False, remove_intermittent=False):
+ """Object representing all the tests in a particular manifest
+
+ :param node: AST Node associated with this object. If this is None,
+ a new AST is created to associate with this manifest.
+ :param test_path: Path of the test file associated with this manifest.
+ :param url_base: Base url for serving the tests in this manifest.
+ :param run_info_properties: Tuple of ([property name],
+ {property_name: [dependent property]})
+ The first part lists run_info properties
+ that are always used in the update, the second
+ maps property names to additional properties that
+ can be considered if we already have a condition on
+ the key property e.g. {"foo": ["bar"]} means that
+ we consider making conditions on bar only after we
+ already made one on foo.
+ :param update_intermittent: When True, intermittent statuses will be recorded
+ as `expected` in the test metadata.
+ :param: remove_intermittent: When True, old intermittent statuses will be removed
+ if no longer intermittent. This is only relevant if
+ `update_intermittent` is also True, because if False,
+ the metadata will simply update one `expected`status.
+ """
+ if node is None:
+ node = DataNode(None)
+ ManifestItem.__init__(self, node)
+ self.child_map = {}
+ self.test_path = test_path
+ self.url_base = url_base
+ assert self.url_base is not None
+ self._modified = False
+ self.run_info_properties = run_info_properties
+ self.update_intermittent = update_intermittent
+ self.remove_intermittent = remove_intermittent
+ self.update_properties = UpdateProperties(self, **{
+ "lsan": LsanUpdate,
+ "leak_object": LeakObjectUpdate,
+ "leak_threshold": LeakThresholdUpdate,
+ })
+
+ @property
+ def modified(self):
+ if self._modified:
+ return True
+ return any(item.modified for item in self.children)
+
+ @modified.setter
+ def modified(self, value):
+ self._modified = value
+
+ def append(self, child):
+ ManifestItem.append(self, child)
+ if child.id in self.child_map:
+ print("Warning: Duplicate heading %s" % child.id)
+ self.child_map[child.id] = child
+
+ def _remove_child(self, child):
+ del self.child_map[child.id]
+ ManifestItem._remove_child(self, child)
+
+ def get_test(self, test_id):
+ """Return a TestNode by test id, or None if no test matches
+
+ :param test_id: The id of the test to look up"""
+
+ return self.child_map.get(test_id)
+
+ def has_test(self, test_id):
+ """Boolean indicating whether the current test has a known child test
+ with id test id
+
+ :param test_id: The id of the test to look up"""
+
+ return test_id in self.child_map
+
+ @property
+ def url(self):
+ return urljoin(self.url_base,
+ "/".join(self.test_path.split(os.path.sep)))
+
+ def set_lsan(self, run_info, result):
+ """Set the result of the test in a particular run
+
+ :param run_info: Dictionary of run_info parameters corresponding
+ to this run
+ :param result: Lsan violations detected"""
+ self.update_properties.lsan.set(run_info, result)
+
+ def set_leak_object(self, run_info, result):
+ """Set the result of the test in a particular run
+
+ :param run_info: Dictionary of run_info parameters corresponding
+ to this run
+ :param result: Leaked objects deletec"""
+ self.update_properties.leak_object.set(run_info, result)
+
+ def set_leak_threshold(self, run_info, result):
+ """Set the result of the test in a particular run
+
+ :param run_info: Dictionary of run_info parameters corresponding
+ to this run
+ :param result: Total number of bytes leaked"""
+ self.update_properties.leak_threshold.set(run_info, result)
+
+ def update(self, full_update, disable_intermittent):
+ for prop_update in self.update_properties:
+ prop_update.update(full_update,
+ disable_intermittent)
+
+
+class TestNode(ManifestItem):
+ def __init__(self, node):
+ """Tree node associated with a particular test in a manifest
+
+ :param node: AST node associated with the test"""
+
+ ManifestItem.__init__(self, node)
+ self.subtests = {}
+ self._from_file = True
+ self.new_disabled = False
+ self.has_result = False
+ self._modified = False
+ self.update_properties = UpdateProperties(
+ self,
+ expected=ExpectedUpdate,
+ max_asserts=MaxAssertsUpdate,
+ min_asserts=MinAssertsUpdate
+ )
+
+ @classmethod
+ def create(cls, test_id):
+ """Create a TestNode corresponding to a given test
+
+ :param test_type: The type of the test
+ :param test_id: The id of the test"""
+ name = test_id[len(urlsplit(test_id).path.rsplit("/", 1)[0]) + 1:]
+ node = DataNode(name)
+ self = cls(node)
+
+ self._from_file = False
+ return self
+
+ @property
+ def is_empty(self):
+ ignore_keys = {"type"}
+ if set(self._data.keys()) - ignore_keys:
+ return False
+ return all(child.is_empty for child in self.children)
+
+ @property
+ def test_type(self):
+ """The type of the test represented by this TestNode"""
+ return self.get("type", None)
+
+ @property
+ def id(self):
+ """The id of the test represented by this TestNode"""
+ return urljoin(self.parent.url, self.name)
+
+ @property
+ def modified(self):
+ if self._modified:
+ return self._modified
+ return any(child.modified for child in self.children)
+
+ @modified.setter
+ def modified(self, value):
+ self._modified = value
+
+ def disabled(self, run_info):
+ """Boolean indicating whether this test is disabled when run in an
+ environment with the given run_info
+
+ :param run_info: Dictionary of run_info parameters"""
+
+ return self.get("disabled", run_info) is not None
+
+ def set_result(self, run_info, result):
+ """Set the result of the test in a particular run
+
+ :param run_info: Dictionary of run_info parameters corresponding
+ to this run
+ :param result: Status of the test in this run"""
+ self.update_properties.expected.set(run_info, result)
+
+ def set_asserts(self, run_info, count):
+ """Set the assert count of a test
+
+ """
+ self.update_properties.min_asserts.set(run_info, count)
+ self.update_properties.max_asserts.set(run_info, count)
+
+ def append(self, node):
+ child = ManifestItem.append(self, node)
+ self.subtests[child.name] = child
+
+ def get_subtest(self, name):
+ """Return a SubtestNode corresponding to a particular subtest of
+ the current test, creating a new one if no subtest with that name
+ already exists.
+
+ :param name: Name of the subtest"""
+
+ if name in self.subtests:
+ return self.subtests[name]
+ else:
+ subtest = SubtestNode.create(name)
+ self.append(subtest)
+ return subtest
+
+ def update(self, full_update, disable_intermittent):
+ for prop_update in self.update_properties:
+ prop_update.update(full_update,
+ disable_intermittent)
+
+
+class SubtestNode(TestNode):
+ def __init__(self, node):
+ assert isinstance(node, DataNode)
+ TestNode.__init__(self, node)
+
+ @classmethod
+ def create(cls, name):
+ node = DataNode(name)
+ self = cls(node)
+ return self
+
+ @property
+ def is_empty(self):
+ if self._data:
+ return False
+ return True
+
+
+def build_conditional_tree(_, run_info_properties, results):
+ properties, dependent_props = run_info_properties
+ return expectedtree.build_tree(properties, dependent_props, results)
+
+
+def build_unconditional_tree(_, run_info_properties, results):
+ root = expectedtree.Node(None, None)
+ for run_info, values in results.items():
+ for value, count in values.items():
+ root.result_values[value] += count
+ root.run_info.add(run_info)
+ return root
+
+
+class PropertyUpdate:
+ property_name = None # type: ClassVar[str]
+ cls_default_value = None # type: ClassVar[Any]
+ value_type = None # type: ClassVar[type]
+ # property_builder is a class variable set to either build_conditional_tree
+ # or build_unconditional_tree. TODO: Make this type stricter when those
+ # methods are annotated.
+ property_builder = None # type: ClassVar[Callable[..., Any]]
+
+ def __init__(self, node):
+ self.node = node
+ self.default_value = self.cls_default_value
+ self.has_result = False
+ self.results = defaultdict(lambda: defaultdict(int))
+ self.update_intermittent = self.node.root.update_intermittent
+ self.remove_intermittent = self.node.root.remove_intermittent
+
+ def run_info_by_condition(self, run_info_index, conditions):
+ run_info_by_condition = defaultdict(list)
+ # A condition might match 0 or more run_info values
+ run_infos = run_info_index.keys()
+ for cond in conditions:
+ for run_info in run_infos:
+ if cond(run_info):
+ run_info_by_condition[cond].append(run_info)
+
+ return run_info_by_condition
+
+ def set(self, run_info, value):
+ self.has_result = True
+ self.node.has_result = True
+ self.check_default(value)
+ value = self.from_result_value(value)
+ self.results[run_info][value] += 1
+
+ def check_default(self, result):
+ return
+
+ def from_result_value(self, value):
+ """Convert a value from a test result into the internal format"""
+ return value
+
+ def from_ini_value(self, value):
+ """Convert a value from an ini file into the internal format"""
+ if self.value_type:
+ return self.value_type(value)
+ return value
+
+ def to_ini_value(self, value):
+ """Convert a value from the internal format to the ini file format"""
+ return str(value)
+
+ def updated_value(self, current, new):
+ """Given a single current value and a set of observed new values,
+ compute an updated value for the property"""
+ return new
+
+ @property
+ def unconditional_value(self):
+ try:
+ unconditional_value = self.from_ini_value(
+ self.node.get(self.property_name))
+ except KeyError:
+ unconditional_value = self.default_value
+ return unconditional_value
+
+ def update(self,
+ full_update=False,
+ disable_intermittent=None):
+ """Update the underlying manifest AST for this test based on all the
+ added results.
+
+ This will update existing conditionals if they got the same result in
+ all matching runs in the updated results, will delete existing conditionals
+ that get more than one different result in the updated run, and add new
+ conditionals for anything that doesn't match an existing conditional.
+
+ Conditionals not matched by any added result are not changed.
+
+ When `disable_intermittent` is not None, disable any test that shows multiple
+ unexpected results for the same set of parameters.
+ """
+ if not self.has_result:
+ return
+
+ property_tree = self.property_builder(self.node.root.run_info_properties,
+ self.results)
+
+ conditions, errors = self.update_conditions(property_tree,
+ full_update)
+
+ for e in errors:
+ if disable_intermittent:
+ condition = e.cond.children[0] if e.cond else None
+ msg = disable_intermittent if isinstance(disable_intermittent, str) else "unstable"
+ self.node.set("disabled", msg, condition)
+ self.node.new_disabled = True
+ else:
+ msg = "Conflicting metadata values for %s" % (
+ self.node.root.test_path)
+ if e.cond:
+ msg += ": %s" % serialize(e.cond).strip()
+ print(msg)
+
+ # If all the values match remove all conditionals
+ # This handles the case where we update a number of existing conditions and they
+ # all end up looking like the post-update default.
+ new_default = self.default_value
+ if conditions and conditions[-1][0] is None:
+ new_default = conditions[-1][1]
+ if all(condition[1] == new_default for condition in conditions):
+ conditions = [(None, new_default)]
+
+ # Don't set the default to the class default
+ if (conditions and
+ conditions[-1][0] is None and
+ conditions[-1][1] == self.default_value):
+ self.node.modified = True
+ conditions = conditions[:-1]
+
+ if self.node.modified:
+ self.node.clear(self.property_name)
+
+ for condition, value in conditions:
+ self.node.set(self.property_name,
+ self.to_ini_value(value),
+ condition)
+
+ def update_conditions(self,
+ property_tree,
+ full_update):
+ # This is complicated because the expected behaviour is complex
+ # The complexity arises from the fact that there are two ways of running
+ # the tool, with a full set of runs (full_update=True) or with partial metadata
+ # (full_update=False). In the case of a full update things are relatively simple:
+ # * All existing conditionals are ignored, with the exception of conditionals that
+ # depend on variables not used by the updater, which are retained as-is
+ # * All created conditionals are independent of each other (i.e. order isn't
+ # important in the created conditionals)
+ # In the case where we don't have a full set of runs, the expected behaviour
+ # is much less clear. This is of course the common case for when a developer
+ # runs the test on their own machine. In this case the assumptions above are untrue
+ # * The existing conditions may be required to handle other platforms
+ # * The order of the conditions may be important, since we don't know if they overlap
+ # e.g. `if os == linux and version == 18.04` overlaps with `if (os != win)`.
+ # So in the case we have a full set of runs, the process is pretty simple:
+ # * Generate the conditionals for the property_tree
+ # * Pick the most common value as the default and add only those conditions
+ # not matching the default
+ # In the case where we have a partial set of runs, things are more complex
+ # and more best-effort
+ # * For each existing conditional, see if it matches any of the run info we
+ # have. In cases where it does match, record the new results
+ # * Where all the new results match, update the right hand side of that
+ # conditional, otherwise remove it
+ # * If this leaves nothing existing, then proceed as with the full update
+ # * Otherwise add conditionals for the run_info that doesn't match any
+ # remaining conditions
+ prev_default = None
+
+ current_conditions = self.node.get_conditions(self.property_name)
+
+ # Ignore the current default value
+ if current_conditions and current_conditions[-1].condition_node is None:
+ self.node.modified = True
+ prev_default = current_conditions[-1].value
+ current_conditions = current_conditions[:-1]
+
+ # If there aren't any current conditions, or there is just a default
+ # value for all run_info, proceed as for a full update
+ if not current_conditions:
+ return self._update_conditions_full(property_tree,
+ prev_default=prev_default)
+
+ conditions = []
+ errors = []
+
+ run_info_index = {run_info: node
+ for node in property_tree
+ for run_info in node.run_info}
+
+ node_by_run_info = {run_info: node
+ for (run_info, node) in run_info_index.items()
+ if node.result_values}
+
+ run_info_by_condition = self.run_info_by_condition(run_info_index,
+ current_conditions)
+
+ run_info_with_condition = set()
+
+ if full_update:
+ # Even for a full update we need to keep hand-written conditions not
+ # using the properties we've specified and not matching any run_info
+ top_level_props, dependent_props = self.node.root.run_info_properties
+ update_properties = set(top_level_props)
+ for item in dependent_props.values():
+ update_properties |= set(item)
+ for condition in current_conditions:
+ if (not condition.variables.issubset(update_properties) and
+ not run_info_by_condition[condition]):
+ conditions.append((condition.condition_node,
+ self.from_ini_value(condition.value)))
+
+ new_conditions, errors = self._update_conditions_full(property_tree,
+ prev_default=prev_default)
+ conditions.extend(new_conditions)
+ return conditions, errors
+
+ # Retain existing conditions if they match the updated values
+ for condition in current_conditions:
+ # All run_info that isn't handled by some previous condition
+ all_run_infos_condition = run_info_by_condition[condition]
+ run_infos = {item for item in all_run_infos_condition
+ if item not in run_info_with_condition}
+
+ if not run_infos:
+ # Retain existing conditions that don't match anything in the update
+ conditions.append((condition.condition_node,
+ self.from_ini_value(condition.value)))
+ continue
+
+ # Set of nodes in the updated tree that match the same run_info values as the
+ # current existing node
+ nodes = [node_by_run_info[run_info] for run_info in run_infos
+ if run_info in node_by_run_info]
+ # If all the values are the same, update the value
+ if nodes and all(set(node.result_values.keys()) == set(nodes[0].result_values.keys()) for node in nodes):
+ current_value = self.from_ini_value(condition.value)
+ try:
+ new_value = self.updated_value(current_value,
+ nodes[0].result_values)
+ except ConditionError as e:
+ errors.append(e)
+ continue
+ if new_value != current_value:
+ self.node.modified = True
+ conditions.append((condition.condition_node, new_value))
+ run_info_with_condition |= set(run_infos)
+ else:
+ # Don't append this condition
+ self.node.modified = True
+
+ new_conditions, new_errors = self.build_tree_conditions(property_tree,
+ run_info_with_condition,
+ prev_default)
+ if new_conditions:
+ self.node.modified = True
+
+ conditions.extend(new_conditions)
+ errors.extend(new_errors)
+
+ return conditions, errors
+
+ def _update_conditions_full(self,
+ property_tree,
+ prev_default=None):
+ self.node.modified = True
+ conditions, errors = self.build_tree_conditions(property_tree,
+ set(),
+ prev_default)
+
+ return conditions, errors
+
+ def build_tree_conditions(self,
+ property_tree,
+ run_info_with_condition,
+ prev_default=None):
+ conditions = []
+ errors = []
+
+ value_count = defaultdict(int)
+
+ def to_count_value(v):
+ if v is None:
+ return v
+ # Need to count the values in a hashable type
+ count_value = self.to_ini_value(v)
+ if isinstance(count_value, list):
+ count_value = tuple(count_value)
+ return count_value
+
+
+ queue = deque([(property_tree, [])])
+ while queue:
+ node, parents = queue.popleft()
+ parents_and_self = parents + [node]
+ if node.result_values and any(run_info not in run_info_with_condition
+ for run_info in node.run_info):
+ prop_set = [(item.prop, item.value) for item in parents_and_self if item.prop]
+ value = node.result_values
+ error = None
+ if parents:
+ try:
+ value = self.updated_value(None, value)
+ except ConditionError:
+ expr = make_expr(prop_set, value)
+ error = ConditionError(expr)
+ else:
+ expr = make_expr(prop_set, value)
+ else:
+ # The root node needs special handling
+ expr = None
+ try:
+ value = self.updated_value(self.unconditional_value,
+ value)
+ except ConditionError:
+ error = ConditionError(expr)
+ # If we got an error for the root node, re-add the previous
+ # default value
+ if prev_default:
+ conditions.append((None, prev_default))
+ if error is None:
+ count_value = to_count_value(value)
+ value_count[count_value] += len(node.run_info)
+
+ if error is None:
+ conditions.append((expr, value))
+ else:
+ errors.append(error)
+
+ for child in node.children:
+ queue.append((child, parents_and_self))
+
+ conditions = conditions[::-1]
+
+ # If we haven't set a default condition, add one and remove all the conditions
+ # with the same value
+ if value_count and (not conditions or conditions[-1][0] is not None):
+ # Sort in order of occurence, prioritising values that match the class default
+ # or the previous default
+ cls_default = to_count_value(self.default_value)
+ prev_default = to_count_value(prev_default)
+ commonest_value = max(value_count, key=lambda x:(value_count.get(x),
+ x == cls_default,
+ x == prev_default))
+ if isinstance(commonest_value, tuple):
+ commonest_value = list(commonest_value)
+ commonest_value = self.from_ini_value(commonest_value)
+ conditions = [item for item in conditions if item[1] != commonest_value]
+ conditions.append((None, commonest_value))
+
+ return conditions, errors
+
+
+class ExpectedUpdate(PropertyUpdate):
+ property_name = "expected"
+ property_builder = build_conditional_tree
+
+ def check_default(self, result):
+ if self.default_value is not None:
+ assert self.default_value == result.default_expected
+ else:
+ self.default_value = result.default_expected
+
+ def from_result_value(self, result):
+ # When we are updating intermittents, we need to keep a record of any existing
+ # intermittents to pass on when building the property tree and matching statuses and
+ # intermittents to the correct run info - this is so we can add them back into the
+ # metadata aligned with the right conditions, unless specified not to with
+ # self.remove_intermittent.
+ # The (status, known_intermittent) tuple is counted when the property tree is built, but
+ # the count value only applies to the first item in the tuple, the status from that run,
+ # when passed to `updated_value`.
+ if (not self.update_intermittent or
+ self.remove_intermittent or
+ not result.known_intermittent):
+ return result.status
+ return result.status + result.known_intermittent
+
+ def to_ini_value(self, value):
+ if isinstance(value, (list, tuple)):
+ return [str(item) for item in value]
+ return str(value)
+
+ def updated_value(self, current, new):
+ if len(new) > 1 and not self.update_intermittent and not isinstance(current, list):
+ raise ConditionError
+
+ counts = {}
+ for status, count in new.items():
+ if isinstance(status, tuple):
+ counts[status[0]] = count
+ counts.update({intermittent: 0 for intermittent in status[1:] if intermittent not in counts})
+ else:
+ counts[status] = count
+
+ if not (self.update_intermittent or isinstance(current, list)):
+ return list(counts)[0]
+
+ # Reorder statuses first based on counts, then based on status priority if there are ties.
+ # Counts with 0 are considered intermittent.
+ statuses = ["OK", "PASS", "FAIL", "ERROR", "TIMEOUT", "CRASH"]
+ status_priority = {value: i for i, value in enumerate(statuses)}
+ sorted_new = sorted(counts.items(), key=lambda x:(-1 * x[1],
+ status_priority.get(x[0],
+ len(status_priority))))
+ expected = []
+ for status, count in sorted_new:
+ # If we are not removing existing recorded intermittents, with a count of 0,
+ # add them in to expected.
+ if count > 0 or not self.remove_intermittent:
+ expected.append(status)
+
+ # If the new intermittent is a subset of the existing one, just use the existing one
+ # This prevents frequent flip-flopping of results between e.g. [OK, TIMEOUT] and
+ # [TIMEOUT, OK]
+ if current and set(expected).issubset(set(current)):
+ return current
+
+ if self.update_intermittent:
+ if len(expected) == 1:
+ return expected[0]
+ return expected
+
+ # If we are not updating intermittents, return the status with the highest occurence.
+ return expected[0]
+
+
+class MaxAssertsUpdate(PropertyUpdate):
+ """For asserts we always update the default value and never add new conditionals.
+ The value we set as the default is the maximum the current default or one more than the
+ number of asserts we saw in any configuration."""
+
+ property_name = "max-asserts"
+ cls_default_value = 0
+ value_type = int
+ property_builder = build_unconditional_tree
+
+ def updated_value(self, current, new):
+ if any(item > current for item in new):
+ return max(new) + 1
+ return current
+
+
+class MinAssertsUpdate(PropertyUpdate):
+ property_name = "min-asserts"
+ cls_default_value = 0
+ value_type = int
+ property_builder = build_unconditional_tree
+
+ def updated_value(self, current, new):
+ if any(item < current for item in new):
+ rv = min(new) - 1
+ else:
+ rv = current
+ return max(rv, 0)
+
+
+class AppendOnlyListUpdate(PropertyUpdate):
+ cls_default_value = [] # type: ClassVar[List[str]]
+ property_builder = build_unconditional_tree
+
+ def updated_value(self, current, new):
+ if current is None:
+ rv = set()
+ else:
+ rv = set(current)
+
+ for item in new:
+ if item is None:
+ continue
+ elif isinstance(item, str):
+ rv.add(item)
+ else:
+ rv |= item
+
+ return sorted(rv)
+
+
+class LsanUpdate(AppendOnlyListUpdate):
+ property_name = "lsan-allowed"
+ property_builder = build_unconditional_tree
+
+ def from_result_value(self, result):
+ # If we have an allowed_match that matched, return None
+ # This value is ignored later (because it matches the default)
+ # We do that because then if we allow a failure in foo/__dir__.ini
+ # we don't want to update foo/bar/__dir__.ini with the same rule
+ if result[1]:
+ return None
+ # Otherwise return the topmost stack frame
+ # TODO: there is probably some improvement to be made by looking for a "better" stack frame
+ return result[0][0]
+
+ def to_ini_value(self, value):
+ return value
+
+
+class LeakObjectUpdate(AppendOnlyListUpdate):
+ property_name = "leak-allowed"
+ property_builder = build_unconditional_tree
+
+ def from_result_value(self, result):
+ # If we have an allowed_match that matched, return None
+ if result[1]:
+ return None
+ # Otherwise return the process/object name
+ return result[0]
+
+
+class LeakThresholdUpdate(PropertyUpdate):
+ property_name = "leak-threshold"
+ cls_default_value = {} # type: ClassVar[Dict[str, int]]
+ property_builder = build_unconditional_tree
+
+ def from_result_value(self, result):
+ return result
+
+ def to_ini_value(self, data):
+ return ["%s:%s" % item for item in sorted(data.items())]
+
+ def from_ini_value(self, data):
+ rv = {}
+ for item in data:
+ key, value = item.split(":", 1)
+ rv[key] = int(float(value))
+ return rv
+
+ def updated_value(self, current, new):
+ if current:
+ rv = current.copy()
+ else:
+ rv = {}
+ for process, leaked_bytes, threshold in new:
+ # If the value is less than the threshold but there isn't
+ # an old value we must have inherited the threshold from
+ # a parent ini file so don't any anything to this one
+ if process not in rv and leaked_bytes < threshold:
+ continue
+ if leaked_bytes > rv.get(process, 0):
+ # Round up to nearest 50 kb
+ boundary = 50 * 1024
+ rv[process] = int(boundary * ceil(float(leaked_bytes) / boundary))
+ return rv
+
+
+def make_expr(prop_set, rhs):
+ """Create an AST that returns the value ``status`` given all the
+ properties in prop_set match.
+
+ :param prop_set: tuple of (property name, value) pairs for each
+ property in this expression and the value it must match
+ :param status: Status on RHS when all the given properties match
+ """
+ root = ConditionalNode()
+
+ assert len(prop_set) > 0
+
+ expressions = []
+ for prop, value in prop_set:
+ if value not in (True, False):
+ expressions.append(
+ BinaryExpressionNode(
+ BinaryOperatorNode("=="),
+ VariableNode(prop),
+ make_node(value)))
+ else:
+ if value:
+ expressions.append(VariableNode(prop))
+ else:
+ expressions.append(
+ UnaryExpressionNode(
+ UnaryOperatorNode("not"),
+ VariableNode(prop)
+ ))
+ if len(expressions) > 1:
+ prev = expressions[-1]
+ for curr in reversed(expressions[:-1]):
+ node = BinaryExpressionNode(
+ BinaryOperatorNode("and"),
+ curr,
+ prev)
+ prev = node
+ else:
+ node = expressions[0]
+
+ root.append(node)
+ rhs_node = make_value_node(rhs)
+ root.append(rhs_node)
+
+ return root
+
+
+def make_node(value):
+ if isinstance(value, (int, float,)):
+ node = NumberNode(value)
+ elif isinstance(value, str):
+ node = StringNode(str(value))
+ elif hasattr(value, "__iter__"):
+ node = ListNode()
+ for item in value:
+ node.append(make_node(item))
+ return node
+
+
+def make_value_node(value):
+ if isinstance(value, (int, float,)):
+ node = ValueNode(value)
+ elif isinstance(value, str):
+ node = ValueNode(str(value))
+ elif hasattr(value, "__iter__"):
+ node = ListNode()
+ for item in value:
+ node.append(make_value_node(item))
+ else:
+ raise ValueError("Don't know how to convert %s into node" % type(value))
+ return node
+
+
+def get_manifest(metadata_root, test_path, url_base, run_info_properties, update_intermittent, remove_intermittent):
+ """Get the ExpectedManifest for a particular test path, or None if there is no
+ metadata stored for that test path.
+
+ :param metadata_root: Absolute path to the root of the metadata directory
+ :param test_path: Path to the test(s) relative to the test root
+ :param url_base: Base url for serving the tests in this manifest"""
+ manifest_path = expected.expected_path(metadata_root, test_path)
+ try:
+ with open(manifest_path, "rb") as f:
+ rv = compile(f, test_path, url_base,
+ run_info_properties, update_intermittent, remove_intermittent)
+ except OSError:
+ return None
+ return rv
+
+
+def compile(manifest_file, test_path, url_base, run_info_properties, update_intermittent, remove_intermittent):
+ return conditional.compile(manifest_file,
+ data_cls_getter=data_cls_getter,
+ test_path=test_path,
+ url_base=url_base,
+ run_info_properties=run_info_properties,
+ update_intermittent=update_intermittent,
+ remove_intermittent=remove_intermittent)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/metadata.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/metadata.py
new file mode 100644
index 0000000000..3ae97114f8
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/metadata.py
@@ -0,0 +1,836 @@
+# mypy: allow-untyped-defs
+
+import array
+import os
+from collections import defaultdict, namedtuple
+from typing import Dict, List, Tuple
+
+from mozlog import structuredlog
+from six import ensure_str, ensure_text
+from sys import intern
+
+from . import manifestupdate
+from . import products
+from . import testloader
+from . import wptmanifest
+from . import wpttest
+from .expected import expected_path
+manifest = None # Module that will be imported relative to test_root
+manifestitem = None
+
+logger = structuredlog.StructuredLogger("web-platform-tests")
+
+try:
+ import ujson as json
+except ImportError:
+ import json # type: ignore
+
+
+class RunInfo:
+ """A wrapper around RunInfo dicts so that they can be hashed by identity"""
+
+ def __init__(self, dict_value):
+ self.data = dict_value
+ self.canonical_repr = tuple(tuple(item) for item in sorted(dict_value.items()))
+
+ def __getitem__(self, key):
+ return self.data[key]
+
+ def __setitem__(self, key, value):
+ raise TypeError
+
+ def __hash__(self):
+ return hash(self.canonical_repr)
+
+ def __eq__(self, other):
+ return self.canonical_repr == other.canonical_repr
+
+ def iteritems(self):
+ yield from self.data.items()
+
+ def items(self):
+ return list(self.items())
+
+
+def get_properties(properties_file=None, extra_properties=None, config=None, product=None):
+ """Read the list of properties to use for updating metadata.
+
+ :param properties_file: Path to a JSON file containing properties.
+ :param extra_properties: List of extra properties to use
+ :param config: (deprecated) wptrunner config
+ :param Product: (deprecated) product name (requires a config argument to be used)
+ """
+ properties = []
+ dependents = {}
+
+ if properties_file is not None:
+ logger.debug(f"Reading update properties from {properties_file}")
+ try:
+ with open(properties_file) as f:
+ data = json.load(f)
+ msg = None
+ if "properties" not in data:
+ msg = "Properties file missing 'properties' key"
+ elif not isinstance(data["properties"], list):
+ msg = "Properties file 'properties' value must be a list"
+ elif not all(isinstance(item, str) for item in data["properties"]):
+ msg = "Properties file 'properties' value must be a list of strings"
+ elif "dependents" in data:
+ dependents = data["dependents"]
+ if not isinstance(dependents, dict):
+ msg = "Properties file 'dependent_properties' value must be an object"
+ elif (not all(isinstance(dependents[item], list) and
+ all(isinstance(item_value, str)
+ for item_value in dependents[item])
+ for item in dependents)):
+ msg = ("Properties file 'dependent_properties' values must be lists of" +
+ " strings")
+ if msg is not None:
+ logger.error(msg)
+ raise ValueError(msg)
+
+ properties = data["properties"]
+ except OSError:
+ logger.critical(f"Error opening properties file {properties_file}")
+ raise
+ except ValueError:
+ logger.critical(f"Error parsing properties file {properties_file}")
+ raise
+ elif product is not None:
+ logger.warning("Falling back to getting metadata update properties from wptrunner browser "
+ "product file, this will be removed")
+ if config is None:
+ msg = "Must provide a config together with a product"
+ logger.critical(msg)
+ raise ValueError(msg)
+
+ properties, dependents = products.load_product_update(config, product)
+
+ if extra_properties is not None:
+ properties.extend(extra_properties)
+
+ properties_set = set(properties)
+ if any(item not in properties_set for item in dependents.keys()):
+ msg = "All 'dependent' keys must be in 'properties'"
+ logger.critical(msg)
+ raise ValueError(msg)
+
+ return properties, dependents
+
+
+def update_expected(test_paths, log_file_names,
+ update_properties, full_update=False, disable_intermittent=None,
+ update_intermittent=False, remove_intermittent=False, **kwargs):
+ """Update the metadata files for web-platform-tests based on
+ the results obtained in a previous run or runs
+
+ If `disable_intermittent` is not None, assume log_file_names refers to logs from repeated
+ test jobs, disable tests that don't behave as expected on all runs
+
+ If `update_intermittent` is True, intermittent statuses will be recorded as `expected` in
+ the metadata.
+
+ If `remove_intermittent` is True and used in conjunction with `update_intermittent`, any
+ intermittent statuses which are not present in the current run will be removed from the
+ metadata, else they are left in."""
+
+ do_delayed_imports()
+
+ id_test_map = load_test_data(test_paths)
+
+ msg = f"Updating metadata using properties: {','.join(update_properties[0])}"
+ if update_properties[1]:
+ dependent_strs = [f"{item}: {','.join(values)}"
+ for item, values in update_properties[1].items()]
+ msg += f", and dependent properties: {' '.join(dependent_strs)}"
+ logger.info(msg)
+
+ for metadata_path, updated_ini in update_from_logs(id_test_map,
+ update_properties,
+ disable_intermittent,
+ update_intermittent,
+ remove_intermittent,
+ full_update,
+ *log_file_names):
+
+ write_new_expected(metadata_path, updated_ini)
+ if disable_intermittent:
+ for test in updated_ini.iterchildren():
+ for subtest in test.iterchildren():
+ if subtest.new_disabled:
+ logger.info("disabled: %s" % os.path.dirname(subtest.root.test_path) + "/" + subtest.name)
+ if test.new_disabled:
+ logger.info("disabled: %s" % test.root.test_path)
+
+
+def do_delayed_imports():
+ global manifest, manifestitem
+ from manifest import manifest, item as manifestitem # type: ignore
+
+
+# For each testrun
+# Load all files and scan for the suite_start entry
+# Build a hash of filename: properties
+# For each different set of properties, gather all chunks
+# For each chunk in the set of chunks, go through all tests
+# for each test, make a map of {conditionals: [(platform, new_value)]}
+# Repeat for each platform
+# For each test in the list of tests:
+# for each conditional:
+# If all the new values match (or there aren't any) retain that conditional
+# If any new values mismatch:
+# If disable_intermittent and any repeated values don't match, disable the test
+# else mark the test as needing human attention
+# Check if all the RHS values are the same; if so collapse the conditionals
+
+
+class InternedData:
+ """Class for interning data of any (hashable) type.
+
+ This class is intended for building a mapping of int <=> value, such
+ that the integer may be stored as a proxy for the real value, and then
+ the real value obtained later from the proxy value.
+
+ In order to support the use case of packing the integer value as binary,
+ it is possible to specify a maximum bitsize of the data; adding more items
+ than this allowed will result in a ValueError exception.
+
+ The zero value is reserved to use as a sentinal."""
+
+ type_conv = None
+ rev_type_conv = None
+
+ def __init__(self, max_bits: int = 8):
+ self.max_idx = 2**max_bits - 2
+ # Reserve 0 as a sentinal
+ self._data: Tuple[List[object], Dict[int, object]]
+ self._data = [None], {}
+
+ def clear(self):
+ self.__init__()
+
+ def store(self, obj):
+ if self.type_conv is not None:
+ obj = self.type_conv(obj)
+
+ objs, obj_to_idx = self._data
+ if obj not in obj_to_idx:
+ value = len(objs)
+ objs.append(obj)
+ obj_to_idx[obj] = value
+ if value > self.max_idx:
+ raise ValueError
+ else:
+ value = obj_to_idx[obj]
+ return value
+
+ def get(self, idx):
+ obj = self._data[0][idx]
+ if self.rev_type_conv is not None:
+ obj = self.rev_type_conv(obj)
+ return obj
+
+ def __iter__(self):
+ for i in range(1, len(self._data[0])):
+ yield self.get(i)
+
+
+class RunInfoInterned(InternedData):
+ def type_conv(self, value):
+ return tuple(value.items())
+
+ def rev_type_conv(self, value):
+ return dict(value)
+
+
+prop_intern = InternedData(4)
+run_info_intern = InternedData(8)
+status_intern = InternedData(4)
+
+
+def pack_result(data):
+ # As `status_intern` normally handles one status, if `known_intermittent` is present in
+ # the test logs, intern and store this with the `status` in an array until needed.
+ if not data.get("known_intermittent"):
+ return status_intern.store(data.get("status"))
+ result = array.array("B")
+ expected = data.get("expected")
+ if expected is None:
+ expected = data["status"]
+ result_parts = [data["status"], expected] + data["known_intermittent"]
+ for i, part in enumerate(result_parts):
+ value = status_intern.store(part)
+ if i % 2 == 0:
+ assert value < 16
+ result.append(value << 4)
+ else:
+ result[-1] += value
+ return result
+
+
+def unpack_result(data):
+ if isinstance(data, int):
+ return (status_intern.get(data), None)
+ if isinstance(data, str):
+ return (data, None)
+ # Unpack multiple statuses into a tuple to be used in the Results named tuple below,
+ # separating `status` and `known_intermittent`.
+ results = []
+ for packed_value in data:
+ first = status_intern.get(packed_value >> 4)
+ second = status_intern.get(packed_value & 0x0F)
+ results.append(first)
+ if second:
+ results.append(second)
+ return ((results[0],), tuple(results[1:]))
+
+
+def load_test_data(test_paths):
+ manifest_loader = testloader.ManifestLoader(test_paths, False)
+ manifests = manifest_loader.load()
+
+ id_test_map = {}
+ for test_manifest, paths in manifests.items():
+ id_test_map.update(create_test_tree(paths["metadata_path"],
+ test_manifest))
+ return id_test_map
+
+
+def update_from_logs(id_test_map, update_properties, disable_intermittent, update_intermittent,
+ remove_intermittent, full_update, *log_filenames):
+
+ updater = ExpectedUpdater(id_test_map)
+
+ for i, log_filename in enumerate(log_filenames):
+ logger.info("Processing log %d/%d" % (i + 1, len(log_filenames)))
+ with open(log_filename) as f:
+ updater.update_from_log(f)
+
+ yield from update_results(id_test_map, update_properties, full_update,
+ disable_intermittent, update_intermittent=update_intermittent,
+ remove_intermittent=remove_intermittent)
+
+
+def update_results(id_test_map,
+ update_properties,
+ full_update,
+ disable_intermittent,
+ update_intermittent,
+ remove_intermittent):
+ test_file_items = set(id_test_map.values())
+
+ default_expected_by_type = {}
+ for test_type, test_cls in wpttest.manifest_test_cls.items():
+ if test_cls.result_cls:
+ default_expected_by_type[(test_type, False)] = test_cls.result_cls.default_expected
+ if test_cls.subtest_result_cls:
+ default_expected_by_type[(test_type, True)] = test_cls.subtest_result_cls.default_expected
+
+ for test_file in test_file_items:
+ updated_expected = test_file.update(default_expected_by_type, update_properties,
+ full_update, disable_intermittent, update_intermittent,
+ remove_intermittent)
+ if updated_expected is not None and updated_expected.modified:
+ yield test_file.metadata_path, updated_expected
+
+
+def directory_manifests(metadata_path):
+ rv = []
+ for dirpath, dirname, filenames in os.walk(metadata_path):
+ if "__dir__.ini" in filenames:
+ rel_path = os.path.relpath(dirpath, metadata_path)
+ rv.append(os.path.join(rel_path, "__dir__.ini"))
+ return rv
+
+
+def write_new_expected(metadata_path, expected):
+ # Serialize the data back to a file
+ path = expected_path(metadata_path, expected.test_path)
+ if not expected.is_empty:
+ manifest_str = wptmanifest.serialize(expected.node,
+ skip_empty_data=True)
+ assert manifest_str != ""
+ dir = os.path.dirname(path)
+ if not os.path.exists(dir):
+ os.makedirs(dir)
+ tmp_path = path + ".tmp"
+ try:
+ with open(tmp_path, "wb") as f:
+ f.write(manifest_str.encode("utf8"))
+ os.replace(tmp_path, path)
+ except (Exception, KeyboardInterrupt):
+ try:
+ os.unlink(tmp_path)
+ except OSError:
+ pass
+ else:
+ try:
+ os.unlink(path)
+ except OSError:
+ pass
+
+
+class ExpectedUpdater:
+ def __init__(self, id_test_map):
+ self.id_test_map = id_test_map
+ self.run_info = None
+ self.action_map = {"suite_start": self.suite_start,
+ "test_start": self.test_start,
+ "test_status": self.test_status,
+ "test_end": self.test_end,
+ "assertion_count": self.assertion_count,
+ "lsan_leak": self.lsan_leak,
+ "mozleak_object": self.mozleak_object,
+ "mozleak_total": self.mozleak_total}
+ self.tests_visited = {}
+
+ def update_from_log(self, log_file):
+ # We support three possible formats:
+ # * wptreport format; one json object in the file, possibly pretty-printed
+ # * wptreport format; one run per line
+ # * raw log format
+
+ # Try reading a single json object in wptreport format
+ self.run_info = None
+ success = self.get_wptreport_data(log_file.read())
+
+ if success:
+ return
+
+ # Try line-separated json objects in wptreport format
+ log_file.seek(0)
+ for line in log_file:
+ success = self.get_wptreport_data(line)
+ if not success:
+ break
+ else:
+ return
+
+ # Assume the file is a raw log
+ log_file.seek(0)
+ self.update_from_raw_log(log_file)
+
+ def get_wptreport_data(self, input_str):
+ try:
+ data = json.loads(input_str)
+ except Exception:
+ pass
+ else:
+ if "action" not in data and "results" in data:
+ self.update_from_wptreport_log(data)
+ return True
+ return False
+
+ def update_from_raw_log(self, log_file):
+ action_map = self.action_map
+ for line in log_file:
+ try:
+ data = json.loads(line)
+ except ValueError:
+ # Just skip lines that aren't json
+ continue
+ action = data["action"]
+ if action in action_map:
+ action_map[action](data)
+
+ def update_from_wptreport_log(self, data):
+ action_map = self.action_map
+ action_map["suite_start"]({"run_info": data["run_info"]})
+ for test in data["results"]:
+ action_map["test_start"]({"test": test["test"]})
+ for subtest in test["subtests"]:
+ action_map["test_status"]({"test": test["test"],
+ "subtest": subtest["name"],
+ "status": subtest["status"],
+ "expected": subtest.get("expected"),
+ "known_intermittent": subtest.get("known_intermittent", [])})
+ action_map["test_end"]({"test": test["test"],
+ "status": test["status"],
+ "expected": test.get("expected"),
+ "known_intermittent": test.get("known_intermittent", [])})
+ if "asserts" in test:
+ asserts = test["asserts"]
+ action_map["assertion_count"]({"test": test["test"],
+ "count": asserts["count"],
+ "min_expected": asserts["min"],
+ "max_expected": asserts["max"]})
+ for item in data.get("lsan_leaks", []):
+ action_map["lsan_leak"](item)
+
+ mozleak_data = data.get("mozleak", {})
+ for scope, scope_data in mozleak_data.items():
+ for key, action in [("objects", "mozleak_object"),
+ ("total", "mozleak_total")]:
+ for item in scope_data.get(key, []):
+ item_data = {"scope": scope}
+ item_data.update(item)
+ action_map[action](item_data)
+
+ def suite_start(self, data):
+ self.run_info = run_info_intern.store(RunInfo(data["run_info"]))
+
+ def test_start(self, data):
+ test_id = intern(ensure_str(data["test"]))
+ try:
+ self.id_test_map[test_id]
+ except KeyError:
+ logger.warning("Test not found %s, skipping" % test_id)
+ return
+
+ self.tests_visited[test_id] = set()
+
+ def test_status(self, data):
+ test_id = intern(ensure_str(data["test"]))
+ subtest = intern(ensure_str(data["subtest"]))
+ test_data = self.id_test_map.get(test_id)
+ if test_data is None:
+ return
+
+ self.tests_visited[test_id].add(subtest)
+
+ result = pack_result(data)
+
+ test_data.set(test_id, subtest, "status", self.run_info, result)
+ if data.get("expected") and data["expected"] != data["status"]:
+ test_data.set_requires_update()
+
+ def test_end(self, data):
+ if data["status"] == "SKIP":
+ return
+
+ test_id = intern(ensure_str(data["test"]))
+ test_data = self.id_test_map.get(test_id)
+ if test_data is None:
+ return
+
+ result = pack_result(data)
+
+ test_data.set(test_id, None, "status", self.run_info, result)
+ if data.get("expected") and data["expected"] != data["status"]:
+ test_data.set_requires_update()
+ del self.tests_visited[test_id]
+
+ def assertion_count(self, data):
+ test_id = intern(ensure_str(data["test"]))
+ test_data = self.id_test_map.get(test_id)
+ if test_data is None:
+ return
+
+ test_data.set(test_id, None, "asserts", self.run_info, data["count"])
+ if data["count"] < data["min_expected"] or data["count"] > data["max_expected"]:
+ test_data.set_requires_update()
+
+ def test_for_scope(self, data):
+ dir_path = data.get("scope", "/")
+ dir_id = intern(ensure_str(os.path.join(dir_path, "__dir__").replace(os.path.sep, "/")))
+ if dir_id.startswith("/"):
+ dir_id = dir_id[1:]
+ return dir_id, self.id_test_map[dir_id]
+
+ def lsan_leak(self, data):
+ if data["scope"] == "/":
+ logger.warning("Not updating lsan annotations for root scope")
+ return
+ dir_id, test_data = self.test_for_scope(data)
+ test_data.set(dir_id, None, "lsan",
+ self.run_info, (data["frames"], data.get("allowed_match")))
+ if not data.get("allowed_match"):
+ test_data.set_requires_update()
+
+ def mozleak_object(self, data):
+ if data["scope"] == "/":
+ logger.warning("Not updating mozleak annotations for root scope")
+ return
+ dir_id, test_data = self.test_for_scope(data)
+ test_data.set(dir_id, None, "leak-object",
+ self.run_info, ("%s:%s", (data["process"], data["name"]),
+ data.get("allowed")))
+ if not data.get("allowed"):
+ test_data.set_requires_update()
+
+ def mozleak_total(self, data):
+ if data["scope"] == "/":
+ logger.warning("Not updating mozleak annotations for root scope")
+ return
+ if data["bytes"]:
+ dir_id, test_data = self.test_for_scope(data)
+ test_data.set(dir_id, None, "leak-threshold",
+ self.run_info, (data["process"], data["bytes"], data["threshold"]))
+ if data["bytes"] > data["threshold"] or data["bytes"] < 0:
+ test_data.set_requires_update()
+
+
+def create_test_tree(metadata_path, test_manifest):
+ """Create a map of test_id to TestFileData for that test.
+ """
+ do_delayed_imports()
+ id_test_map = {}
+ exclude_types = frozenset(["manual", "support", "conformancechecker"])
+ all_types = set(manifestitem.item_types.keys())
+ assert all_types > exclude_types
+ include_types = all_types - exclude_types
+ for item_type, test_path, tests in test_manifest.itertypes(*include_types):
+ test_file_data = TestFileData(intern(ensure_str(test_manifest.url_base)),
+ intern(ensure_str(item_type)),
+ metadata_path,
+ test_path,
+ tests)
+ for test in tests:
+ id_test_map[intern(ensure_str(test.id))] = test_file_data
+
+ dir_path = os.path.dirname(test_path)
+ while True:
+ dir_meta_path = os.path.join(dir_path, "__dir__")
+ dir_id = (test_manifest.url_base + dir_meta_path.replace(os.path.sep, "/")).lstrip("/")
+ if dir_id in id_test_map:
+ break
+
+ test_file_data = TestFileData(intern(ensure_str(test_manifest.url_base)),
+ None,
+ metadata_path,
+ dir_meta_path,
+ [])
+ id_test_map[dir_id] = test_file_data
+ dir_path = os.path.dirname(dir_path)
+ if not dir_path:
+ break
+
+ return id_test_map
+
+
+class PackedResultList:
+ """Class for storing test results.
+
+ Results are stored as an array of 2-byte integers for compactness.
+ The first 4 bits represent the property name, the second 4 bits
+ represent the test status (if it's a result with a status code), and
+ the final 8 bits represent the run_info. If the result doesn't have a
+ simple status code but instead a richer type, we place that richer type
+ in a dictionary and set the status part of the result type to 0.
+
+ This class depends on the global prop_intern, run_info_intern and
+ status_intern InteredData objects to convert between the bit values
+ and corresponding Python objects."""
+
+ def __init__(self):
+ self.data = array.array("H")
+
+ __slots__ = ("data", "raw_data")
+
+ def append(self, prop, run_info, value):
+ out_val = (prop << 12) + run_info
+ if prop == prop_intern.store("status") and isinstance(value, int):
+ out_val += value << 8
+ else:
+ if not hasattr(self, "raw_data"):
+ self.raw_data = {}
+ self.raw_data[len(self.data)] = value
+ self.data.append(out_val)
+
+ def unpack(self, idx, packed):
+ prop = prop_intern.get((packed & 0xF000) >> 12)
+
+ value_idx = (packed & 0x0F00) >> 8
+ if value_idx == 0:
+ value = self.raw_data[idx]
+ else:
+ value = status_intern.get(value_idx)
+
+ run_info = run_info_intern.get(packed & 0x00FF)
+
+ return prop, run_info, value
+
+ def __iter__(self):
+ for i, item in enumerate(self.data):
+ yield self.unpack(i, item)
+
+
+class TestFileData:
+ __slots__ = ("url_base", "item_type", "test_path", "metadata_path", "tests",
+ "_requires_update", "data")
+
+ def __init__(self, url_base, item_type, metadata_path, test_path, tests):
+ self.url_base = url_base
+ self.item_type = item_type
+ self.test_path = test_path
+ self.metadata_path = metadata_path
+ self.tests = {intern(ensure_str(item.id)) for item in tests}
+ self._requires_update = False
+ self.data = defaultdict(lambda: defaultdict(PackedResultList))
+
+ def set_requires_update(self):
+ self._requires_update = True
+
+ @property
+ def requires_update(self):
+ return self._requires_update
+
+ def set(self, test_id, subtest_id, prop, run_info, value):
+ self.data[test_id][subtest_id].append(prop_intern.store(prop),
+ run_info,
+ value)
+
+ def expected(self, update_properties, update_intermittent, remove_intermittent):
+ expected_data = load_expected(self.url_base,
+ self.metadata_path,
+ self.test_path,
+ self.tests,
+ update_properties,
+ update_intermittent,
+ remove_intermittent)
+ if expected_data is None:
+ expected_data = create_expected(self.url_base,
+ self.test_path,
+ update_properties,
+ update_intermittent,
+ remove_intermittent)
+ return expected_data
+
+ def is_disabled(self, test):
+ # This conservatively assumes that anything that was disabled remains disabled
+ # we could probably do better by checking if it's in the full set of run infos
+ return test.has_key("disabled")
+
+ def orphan_subtests(self, expected):
+ # Return subtest nodes present in the expected file, but missing from the data
+ rv = []
+
+ for test_id, subtests in self.data.items():
+ test = expected.get_test(ensure_text(test_id))
+ if not test:
+ continue
+ seen_subtests = {ensure_text(item) for item in subtests.keys() if item is not None}
+ missing_subtests = set(test.subtests.keys()) - seen_subtests
+ for item in missing_subtests:
+ expected_subtest = test.get_subtest(item)
+ if not self.is_disabled(expected_subtest):
+ rv.append(expected_subtest)
+ for name in seen_subtests:
+ subtest = test.get_subtest(name)
+ # If any of the items have children (ie subsubtests) we want to prune thes
+ if subtest.children:
+ rv.extend(subtest.children)
+
+ return rv
+
+ def filter_unknown_props(self, update_properties, subtests):
+ # Remove subtests which have some conditions that aren't in update_properties
+ # since removing these may be inappropriate
+ top_level_props, dependent_props = update_properties
+ all_properties = set(top_level_props)
+ for item in dependent_props.values():
+ all_properties |= set(item)
+
+ filtered = []
+ for subtest in subtests:
+ include = True
+ for key, _ in subtest.iter_properties():
+ conditions = subtest.get_conditions(key)
+ for condition in conditions:
+ if not condition.variables.issubset(all_properties):
+ include = False
+ break
+ if not include:
+ break
+ if include:
+ filtered.append(subtest)
+ return filtered
+
+ def update(self, default_expected_by_type, update_properties,
+ full_update=False, disable_intermittent=None, update_intermittent=False,
+ remove_intermittent=False):
+ # If we are doing a full update, we may need to prune missing nodes
+ # even if the expectations didn't change
+ if not self.requires_update and not full_update:
+ return
+
+ logger.debug("Updating %s", self.metadata_path)
+
+ expected = self.expected(update_properties,
+ update_intermittent=update_intermittent,
+ remove_intermittent=remove_intermittent)
+
+ if full_update:
+ orphans = self.orphan_subtests(expected)
+ orphans = self.filter_unknown_props(update_properties, orphans)
+
+ if not self.requires_update and not orphans:
+ return
+
+ if orphans:
+ expected.modified = True
+ for item in orphans:
+ item.remove()
+
+ expected_by_test = {}
+
+ for test_id in self.tests:
+ if not expected.has_test(test_id):
+ expected.append(manifestupdate.TestNode.create(test_id))
+ test_expected = expected.get_test(test_id)
+ expected_by_test[test_id] = test_expected
+
+ for test_id, test_data in self.data.items():
+ test_id = ensure_str(test_id)
+ for subtest_id, results_list in test_data.items():
+ for prop, run_info, value in results_list:
+ # Special case directory metadata
+ if subtest_id is None and test_id.endswith("__dir__"):
+ if prop == "lsan":
+ expected.set_lsan(run_info, value)
+ elif prop == "leak-object":
+ expected.set_leak_object(run_info, value)
+ elif prop == "leak-threshold":
+ expected.set_leak_threshold(run_info, value)
+ continue
+
+ test_expected = expected_by_test[test_id]
+ if subtest_id is None:
+ item_expected = test_expected
+ else:
+ subtest_id = ensure_text(subtest_id)
+ item_expected = test_expected.get_subtest(subtest_id)
+
+ if prop == "status":
+ status, known_intermittent = unpack_result(value)
+ value = Result(status,
+ known_intermittent,
+ default_expected_by_type[self.item_type,
+ subtest_id is not None])
+ item_expected.set_result(run_info, value)
+ elif prop == "asserts":
+ item_expected.set_asserts(run_info, value)
+
+ expected.update(full_update=full_update,
+ disable_intermittent=disable_intermittent)
+ for test in expected.iterchildren():
+ for subtest in test.iterchildren():
+ subtest.update(full_update=full_update,
+ disable_intermittent=disable_intermittent)
+ test.update(full_update=full_update,
+ disable_intermittent=disable_intermittent)
+
+ return expected
+
+
+Result = namedtuple("Result", ["status", "known_intermittent", "default_expected"])
+
+
+def create_expected(url_base, test_path, run_info_properties, update_intermittent, remove_intermittent):
+ expected = manifestupdate.ExpectedManifest(None,
+ test_path,
+ url_base,
+ run_info_properties,
+ update_intermittent,
+ remove_intermittent)
+ return expected
+
+
+def load_expected(url_base, metadata_path, test_path, tests, run_info_properties, update_intermittent, remove_intermittent):
+ expected_manifest = manifestupdate.get_manifest(metadata_path,
+ test_path,
+ url_base,
+ run_info_properties,
+ update_intermittent,
+ remove_intermittent)
+ return expected_manifest
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/mpcontext.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/mpcontext.py
new file mode 100644
index 0000000000..d423d9b9a1
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/mpcontext.py
@@ -0,0 +1,13 @@
+# mypy: allow-untyped-defs
+
+import multiprocessing
+
+_context = None
+
+
+def get_context():
+ global _context
+
+ if _context is None:
+ _context = multiprocessing.get_context("spawn")
+ return _context
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/print_reftest_runner.html b/testing/web-platform/tests/tools/wptrunner/wptrunner/print_reftest_runner.html
new file mode 100644
index 0000000000..3ce18d4dd8
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/print_reftest_runner.html
@@ -0,0 +1,33 @@
+<!doctype html>
+<title></title>
+<script src="/_pdf_js/pdf.js"></script>
+<canvas></canvas>
+<script>
+function render(pdfData) {
+ return _render(pdfData);
+}
+
+async function _render(pdfData) {
+ let loadingTask = pdfjsLib.getDocument({data: atob(pdfData)});
+ let pdf = await loadingTask.promise;
+ let rendered = [];
+ for (let pageNumber=1; pageNumber<=pdf.numPages; pageNumber++) {
+ let page = await pdf.getPage(pageNumber);
+ var viewport = page.getViewport({scale: 96./72.});
+ // Prepare canvas using PDF page dimensions
+ var canvas = document.getElementsByTagName('canvas')[0];
+ var context = canvas.getContext('2d');
+ canvas.height = viewport.height;
+ canvas.width = viewport.width;
+
+ // Render PDF page into canvas context
+ var renderContext = {
+ canvasContext: context,
+ viewport: viewport
+ };
+ await page.render(renderContext).promise;
+ rendered.push(canvas.toDataURL());
+ }
+ return rendered;
+}
+</script>
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/products.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/products.py
new file mode 100644
index 0000000000..0706a2b5c8
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/products.py
@@ -0,0 +1,67 @@
+# mypy: allow-untyped-defs
+
+import importlib
+import imp
+
+from .browsers import product_list
+
+
+def product_module(config, product):
+ if product not in product_list:
+ raise ValueError("Unknown product %s" % product)
+
+ path = config.get("products", {}).get(product, None)
+ if path:
+ module = imp.load_source('wptrunner.browsers.' + product, path)
+ else:
+ module = importlib.import_module("wptrunner.browsers." + product)
+
+ if not hasattr(module, "__wptrunner__"):
+ raise ValueError("Product module does not define __wptrunner__ variable")
+
+ return module
+
+
+class Product:
+ def __init__(self, config, product):
+ module = product_module(config, product)
+ data = module.__wptrunner__
+ self.name = product
+ if isinstance(data["browser"], str):
+ self._browser_cls = {None: getattr(module, data["browser"])}
+ else:
+ self._browser_cls = {key: getattr(module, value)
+ for key, value in data["browser"].items()}
+ self.check_args = getattr(module, data["check_args"])
+ self.get_browser_kwargs = getattr(module, data["browser_kwargs"])
+ self.get_executor_kwargs = getattr(module, data["executor_kwargs"])
+ self.env_options = getattr(module, data["env_options"])()
+ self.get_env_extras = getattr(module, data["env_extras"])
+ self.run_info_extras = (getattr(module, data["run_info_extras"])
+ if "run_info_extras" in data else lambda **kwargs:{})
+ self.get_timeout_multiplier = getattr(module, data["timeout_multiplier"])
+
+ self.executor_classes = {}
+ for test_type, cls_name in data["executor"].items():
+ cls = getattr(module, cls_name)
+ self.executor_classes[test_type] = cls
+
+ def get_browser_cls(self, test_type):
+ if test_type in self._browser_cls:
+ return self._browser_cls[test_type]
+ return self._browser_cls[None]
+
+
+def load_product_update(config, product):
+ """Return tuple of (property_order, boolean_properties) indicating the
+ run_info properties to use when constructing the expectation data for
+ this product. None for either key indicates that the default keys
+ appropriate for distinguishing based on platform will be used."""
+
+ module = product_module(config, product)
+ data = module.__wptrunner__
+
+ update_properties = (getattr(module, data["update_properties"])()
+ if "update_properties" in data else (["product"], {}))
+
+ return update_properties
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/stability.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/stability.py
new file mode 100644
index 0000000000..9ac6249c44
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/stability.py
@@ -0,0 +1,417 @@
+# mypy: allow-untyped-defs
+
+import copy
+import functools
+import imp
+import io
+import os
+from collections import OrderedDict, defaultdict
+from datetime import datetime
+
+from mozlog import reader
+from mozlog.formatters import JSONFormatter
+from mozlog.handlers import BaseHandler, StreamHandler, LogLevelFilter
+
+from . import wptrunner
+
+here = os.path.dirname(__file__)
+localpaths = imp.load_source("localpaths", os.path.abspath(os.path.join(here, os.pardir, os.pardir, "localpaths.py")))
+from ci.tc.github_checks_output import get_gh_checks_outputter # type: ignore
+from wpt.markdown import markdown_adjust, table # type: ignore
+
+
+# If a test takes more than (FLAKY_THRESHOLD*timeout) and does not consistently
+# time out, it is considered slow (potentially flaky).
+FLAKY_THRESHOLD = 0.8
+
+
+class LogActionFilter(BaseHandler): # type: ignore
+
+ """Handler that filters out messages not of a given set of actions.
+
+ Subclasses BaseHandler.
+
+ :param inner: Handler to use for messages that pass this filter
+ :param actions: List of actions for which to fire the handler
+ """
+
+ def __init__(self, inner, actions):
+ """Extend BaseHandler and set inner and actions props on self."""
+ BaseHandler.__init__(self, inner)
+ self.inner = inner
+ self.actions = actions
+
+ def __call__(self, item):
+ """Invoke handler if action is in list passed as constructor param."""
+ if item["action"] in self.actions:
+ return self.inner(item)
+
+
+class LogHandler(reader.LogHandler): # type: ignore
+
+ """Handle updating test and subtest status in log.
+
+ Subclasses reader.LogHandler.
+ """
+ def __init__(self):
+ self.results = OrderedDict()
+
+ def find_or_create_test(self, data):
+ test_name = data["test"]
+ if self.results.get(test_name):
+ return self.results[test_name]
+
+ test = {
+ "subtests": OrderedDict(),
+ "status": defaultdict(int),
+ "longest_duration": defaultdict(float),
+ }
+ self.results[test_name] = test
+ return test
+
+ def find_or_create_subtest(self, data):
+ test = self.find_or_create_test(data)
+ subtest_name = data["subtest"]
+
+ if test["subtests"].get(subtest_name):
+ return test["subtests"][subtest_name]
+
+ subtest = {
+ "status": defaultdict(int),
+ "messages": set()
+ }
+ test["subtests"][subtest_name] = subtest
+
+ return subtest
+
+ def test_start(self, data):
+ test = self.find_or_create_test(data)
+ test["start_time"] = data["time"]
+
+ def test_status(self, data):
+ subtest = self.find_or_create_subtest(data)
+ subtest["status"][data["status"]] += 1
+ if data.get("message"):
+ subtest["messages"].add(data["message"])
+
+ def test_end(self, data):
+ test = self.find_or_create_test(data)
+ test["status"][data["status"]] += 1
+ # Timestamps are in ms since epoch.
+ duration = data["time"] - test.pop("start_time")
+ test["longest_duration"][data["status"]] = max(
+ duration, test["longest_duration"][data["status"]])
+ try:
+ # test_timeout is in seconds; convert it to ms.
+ test["timeout"] = data["extra"]["test_timeout"] * 1000
+ except KeyError:
+ # If a test is skipped, it won't have extra info.
+ pass
+
+
+def is_inconsistent(results_dict, iterations):
+ """Return whether or not a single test is inconsistent."""
+ if 'SKIP' in results_dict:
+ return False
+ return len(results_dict) > 1 or sum(results_dict.values()) != iterations
+
+
+def find_slow_status(test):
+ """Check if a single test almost times out.
+
+ We are interested in tests that almost time out (i.e. likely to be flaky).
+ Therefore, timeout statuses are ignored, including (EXTERNAL-)TIMEOUT.
+ CRASH & ERROR are also ignored because the they override TIMEOUT; a test
+ that both crashes and times out is marked as CRASH, so it won't be flaky.
+
+ Returns:
+ A result status produced by a run that almost times out; None, if no
+ runs almost time out.
+ """
+ if "timeout" not in test:
+ return None
+ threshold = test["timeout"] * FLAKY_THRESHOLD
+ for status in ['PASS', 'FAIL', 'OK']:
+ if (status in test["longest_duration"] and
+ test["longest_duration"][status] > threshold):
+ return status
+ return None
+
+
+def process_results(log, iterations):
+ """Process test log and return overall results and list of inconsistent tests."""
+ inconsistent = []
+ slow = []
+ handler = LogHandler()
+ reader.handle_log(reader.read(log), handler)
+ results = handler.results
+ for test_name, test in results.items():
+ if is_inconsistent(test["status"], iterations):
+ inconsistent.append((test_name, None, test["status"], []))
+ for subtest_name, subtest in test["subtests"].items():
+ if is_inconsistent(subtest["status"], iterations):
+ inconsistent.append((test_name, subtest_name, subtest["status"], subtest["messages"]))
+
+ slow_status = find_slow_status(test)
+ if slow_status is not None:
+ slow.append((
+ test_name,
+ slow_status,
+ test["longest_duration"][slow_status],
+ test["timeout"]
+ ))
+
+ return results, inconsistent, slow
+
+
+def err_string(results_dict, iterations):
+ """Create and return string with errors from test run."""
+ rv = []
+ total_results = sum(results_dict.values())
+ if total_results > iterations:
+ rv.append("Duplicate subtest name")
+ else:
+ for key, value in sorted(results_dict.items()):
+ rv.append("%s%s" %
+ (key, ": %s/%s" % (value, iterations) if value != iterations else ""))
+ if total_results < iterations:
+ rv.append("MISSING: %s/%s" % (iterations - total_results, iterations))
+ rv = ", ".join(rv)
+ if is_inconsistent(results_dict, iterations):
+ rv = "**%s**" % rv
+ return rv
+
+
+def write_github_checks_summary_inconsistent(log, inconsistent, iterations):
+ """Outputs a summary of inconsistent tests for GitHub Checks."""
+ log("Some affected tests had inconsistent (flaky) results:\n")
+ write_inconsistent(log, inconsistent, iterations)
+ log("\n")
+ log("These may be pre-existing or new flakes. Please try to reproduce (see "
+ "the above WPT command, though some flags may not be needed when "
+ "running locally) and determine if your change introduced the flake. "
+ "If you are unable to reproduce the problem, please tag "
+ "`@web-platform-tests/wpt-core-team` in a comment for help.\n")
+
+
+def write_github_checks_summary_slow_tests(log, slow):
+ """Outputs a summary of slow tests for GitHub Checks."""
+ log("Some affected tests had slow results:\n")
+ write_slow_tests(log, slow)
+ log("\n")
+ log("These may be pre-existing or newly slow tests. Slow tests indicate "
+ "that a test ran very close to the test timeout limit and so may "
+ "become TIMEOUT-flaky in the future. Consider speeding up the test or "
+ "breaking it into multiple tests. For help, please tag "
+ "`@web-platform-tests/wpt-core-team` in a comment.\n")
+
+
+def write_inconsistent(log, inconsistent, iterations):
+ """Output inconsistent tests to the passed in logging function."""
+ log("## Unstable results ##\n")
+ strings = [(
+ "`%s`" % markdown_adjust(test),
+ ("`%s`" % markdown_adjust(subtest)) if subtest else "",
+ err_string(results, iterations),
+ ("`%s`" % markdown_adjust(";".join(messages))) if len(messages) else "")
+ for test, subtest, results, messages in inconsistent]
+ table(["Test", "Subtest", "Results", "Messages"], strings, log)
+
+
+def write_slow_tests(log, slow):
+ """Output slow tests to the passed in logging function."""
+ log("## Slow tests ##\n")
+ strings = [(
+ "`%s`" % markdown_adjust(test),
+ "`%s`" % status,
+ "`%.0f`" % duration,
+ "`%.0f`" % timeout)
+ for test, status, duration, timeout in slow]
+ table(["Test", "Result", "Longest duration (ms)", "Timeout (ms)"], strings, log)
+
+
+def write_results(log, results, iterations, pr_number=None, use_details=False):
+ log("## All results ##\n")
+ if use_details:
+ log("<details>\n")
+ log("<summary>%i %s ran</summary>\n\n" % (len(results),
+ "tests" if len(results) > 1
+ else "test"))
+
+ for test_name, test in results.items():
+ baseurl = "http://w3c-test.org/submissions"
+ if "https" in os.path.splitext(test_name)[0].split(".")[1:]:
+ baseurl = "https://w3c-test.org/submissions"
+ title = test_name
+ if use_details:
+ log("<details>\n")
+ if pr_number:
+ title = "<a href=\"%s/%s%s\">%s</a>" % (baseurl, pr_number, test_name, title)
+ log('<summary>%s</summary>\n\n' % title)
+ else:
+ log("### %s ###" % title)
+ strings = [("", err_string(test["status"], iterations), "")]
+
+ strings.extend(((
+ ("`%s`" % markdown_adjust(subtest_name)) if subtest else "",
+ err_string(subtest["status"], iterations),
+ ("`%s`" % markdown_adjust(';'.join(subtest["messages"]))) if len(subtest["messages"]) else "")
+ for subtest_name, subtest in test["subtests"].items()))
+ table(["Subtest", "Results", "Messages"], strings, log)
+ if use_details:
+ log("</details>\n")
+
+ if use_details:
+ log("</details>\n")
+
+
+def run_step(logger, iterations, restart_after_iteration, kwargs_extras, **kwargs):
+ kwargs = copy.deepcopy(kwargs)
+
+ if restart_after_iteration:
+ kwargs["repeat"] = iterations
+ else:
+ kwargs["rerun"] = iterations
+
+ kwargs["pause_after_test"] = False
+ kwargs.update(kwargs_extras)
+
+ def wrap_handler(x):
+ if not kwargs.get("verify_log_full", False):
+ x = LogLevelFilter(x, "WARNING")
+ x = LogActionFilter(x, ["log", "process_output"])
+ return x
+
+ initial_handlers = logger._state.handlers
+ logger._state.handlers = [wrap_handler(handler)
+ for handler in initial_handlers]
+
+ log = io.BytesIO()
+ # Setup logging for wptrunner that keeps process output and
+ # warning+ level logs only
+ logger.add_handler(StreamHandler(log, JSONFormatter()))
+
+ _, test_status = wptrunner.run_tests(**kwargs)
+
+ logger._state.handlers = initial_handlers
+ logger._state.running_tests = set()
+ logger._state.suite_started = False
+
+ log.seek(0)
+ total_iterations = test_status.repeated_runs * kwargs.get("rerun", 1)
+ all_skipped = test_status.all_skipped
+ results, inconsistent, slow = process_results(log, total_iterations)
+ return total_iterations, all_skipped, results, inconsistent, slow
+
+
+def get_steps(logger, repeat_loop, repeat_restart, kwargs_extras):
+ steps = []
+ for kwargs_extra in kwargs_extras:
+ if kwargs_extra:
+ flags_string = " with flags %s" % " ".join(
+ "%s=%s" % item for item in kwargs_extra.items())
+ else:
+ flags_string = ""
+
+ if repeat_loop:
+ desc = "Running tests in a loop %d times%s" % (repeat_loop,
+ flags_string)
+ steps.append((desc,
+ functools.partial(run_step,
+ logger,
+ repeat_loop,
+ False,
+ kwargs_extra),
+ repeat_loop))
+
+ if repeat_restart:
+ desc = "Running tests in a loop with restarts %s times%s" % (repeat_restart,
+ flags_string)
+ steps.append((desc,
+ functools.partial(run_step,
+ logger,
+ repeat_restart,
+ True,
+ kwargs_extra),
+ repeat_restart))
+
+ return steps
+
+
+def write_summary(logger, step_results, final_result):
+ for desc, result in step_results:
+ logger.info('::: %s : %s' % (desc, result))
+ logger.info(':::')
+ if final_result == "PASS":
+ log = logger.info
+ elif final_result == "TIMEOUT":
+ log = logger.warning
+ else:
+ log = logger.error
+ log('::: Test verification %s' % final_result)
+
+ logger.info(':::')
+
+
+def check_stability(logger, repeat_loop=10, repeat_restart=5, chaos_mode=True, max_time=None,
+ output_results=True, **kwargs):
+ kwargs_extras = [{}]
+ if chaos_mode and kwargs["product"] == "firefox":
+ kwargs_extras.append({"chaos_mode_flags": int("0xfb", base=16)})
+
+ steps = get_steps(logger, repeat_loop, repeat_restart, kwargs_extras)
+
+ start_time = datetime.now()
+ step_results = []
+
+ github_checks_outputter = get_gh_checks_outputter(kwargs.get("github_checks_text_file"))
+
+ for desc, step_func, expected_iterations in steps:
+ if max_time and datetime.now() - start_time > max_time:
+ logger.info("::: Test verification is taking too long: Giving up!")
+ logger.info("::: So far, all checks passed, but not all checks were run.")
+ write_summary(logger, step_results, "TIMEOUT")
+ return 2
+
+ logger.info(':::')
+ logger.info('::: Running test verification step "%s"...' % desc)
+ logger.info(':::')
+ total_iterations, all_skipped, results, inconsistent, slow = step_func(**kwargs)
+
+ logger.info(f"::: Ran {total_iterations} of expected {expected_iterations} iterations.")
+ if total_iterations <= 1 and expected_iterations > 1 and not all_skipped:
+ step_results.append((desc, "FAIL"))
+ logger.info("::: Reached iteration timeout before finishing 2 or more repeat runs.")
+ logger.info("::: At least 2 successful repeat runs are required to validate stability.")
+ write_summary(logger, step_results, "TIMEOUT")
+ return 1
+
+ if output_results:
+ write_results(logger.info, results, total_iterations)
+
+ if inconsistent:
+ step_results.append((desc, "FAIL"))
+ if github_checks_outputter:
+ write_github_checks_summary_inconsistent(github_checks_outputter.output,
+ inconsistent, total_iterations)
+ write_inconsistent(logger.info, inconsistent, total_iterations)
+ write_summary(logger, step_results, "FAIL")
+ return 1
+
+ if slow:
+ step_results.append((desc, "FAIL"))
+ if github_checks_outputter:
+ write_github_checks_summary_slow_tests(github_checks_outputter.output, slow)
+ write_slow_tests(logger.info, slow)
+ write_summary(logger, step_results, "FAIL")
+ return 1
+
+ # If the tests passed but the number of iterations didn't match the number expected to run,
+ # it is likely that the runs were stopped early to avoid a timeout.
+ if total_iterations != expected_iterations:
+ result = f"PASS * {total_iterations}/{expected_iterations} repeats completed"
+ step_results.append((desc, result))
+ else:
+ step_results.append((desc, "PASS"))
+
+ write_summary(logger, step_results, "PASS")
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/testdriver-extra.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/testdriver-extra.js
new file mode 100644
index 0000000000..94a9a97125
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/testdriver-extra.js
@@ -0,0 +1,259 @@
+"use strict";
+
+(function() {
+ const is_test_context = window.__wptrunner_message_queue !== undefined;
+ const pending = new Map();
+
+ let result = null;
+ let ctx_cmd_id = 0;
+ let testharness_context = null;
+
+ window.addEventListener("message", function(event) {
+ const data = event.data;
+
+ if (typeof data !== "object" && data !== null) {
+ return;
+ }
+
+ if (is_test_context && data.type === "testdriver-command") {
+ const command = data.message;
+ const ctx_id = command.cmd_id;
+ delete command.cmd_id;
+ const cmd_id = window.__wptrunner_message_queue.push(command);
+ let on_success = (data) => {
+ data.type = "testdriver-complete";
+ data.cmd_id = ctx_id;
+ event.source.postMessage(data, "*");
+ };
+ let on_failure = (data) => {
+ data.type = "testdriver-complete";
+ data.cmd_id = ctx_id;
+ event.source.postMessage(data, "*");
+ };
+ pending.set(cmd_id, [on_success, on_failure]);
+ } else if (data.type === "testdriver-complete") {
+ const cmd_id = data.cmd_id;
+ const [on_success, on_failure] = pending.get(cmd_id);
+ pending.delete(cmd_id);
+ const resolver = data.status === "success" ? on_success : on_failure;
+ resolver(data);
+ if (is_test_context) {
+ window.__wptrunner_process_next_event();
+ }
+ }
+ });
+
+ // Code copied from /common/utils.js
+ function rand_int(bits) {
+ if (bits < 1 || bits > 53) {
+ throw new TypeError();
+ } else {
+ if (bits >= 1 && bits <= 30) {
+ return 0 | ((1 << bits) * Math.random());
+ } else {
+ var high = (0 | ((1 << (bits - 30)) * Math.random())) * (1 << 30);
+ var low = 0 | ((1 << 30) * Math.random());
+ return high + low;
+ }
+ }
+ }
+
+ function to_hex(x, length) {
+ var rv = x.toString(16);
+ while (rv.length < length) {
+ rv = "0" + rv;
+ }
+ return rv;
+ }
+
+ function get_window_id(win) {
+ if (win == window && is_test_context) {
+ return null;
+ }
+ if (!win.__wptrunner_id) {
+ // generate a uuid
+ win.__wptrunner_id = [to_hex(rand_int(32), 8),
+ to_hex(rand_int(16), 4),
+ to_hex(0x4000 | rand_int(12), 4),
+ to_hex(0x8000 | rand_int(14), 4),
+ to_hex(rand_int(48), 12)].join("-");
+ }
+ return win.__wptrunner_id;
+ }
+
+ const get_context = function(element) {
+ if (!element) {
+ return null;
+ }
+ let elementWindow = element.ownerDocument.defaultView;
+ if (!elementWindow) {
+ throw new Error("Browsing context for element was detached");
+ }
+ return elementWindow;
+ };
+
+ const get_selector = function(element) {
+ let selector;
+
+ if (element.id) {
+ const id = element.id;
+
+ selector = "#";
+ // escape everything, because it's easy to implement
+ for (let i = 0, len = id.length; i < len; i++) {
+ selector += '\\' + id.charCodeAt(i).toString(16) + ' ';
+ }
+ } else {
+ // push and then reverse to avoid O(n) unshift in the loop
+ let segments = [];
+ for (let node = element;
+ node.parentElement;
+ node = node.parentElement) {
+ let segment = "*|" + node.localName;
+ let nth = Array.prototype.indexOf.call(node.parentElement.children, node) + 1;
+ segments.push(segment + ":nth-child(" + nth + ")");
+ }
+ segments.push(":root");
+ segments.reverse();
+
+ selector = segments.join(" > ");
+ }
+
+ return selector;
+ };
+
+ const create_action = function(name, props) {
+ let cmd_id;
+ const action_msg = {type: "action",
+ action: name,
+ ...props};
+ if (action_msg.context) {
+ action_msg.context = get_window_id(action_msg.context);
+ }
+ if (is_test_context) {
+ cmd_id = window.__wptrunner_message_queue.push(action_msg);
+ } else {
+ if (testharness_context === null) {
+ throw new Error("Tried to run in a non-testharness window without a call to set_test_context");
+ }
+ if (action_msg.context === null) {
+ action_msg.context = get_window_id(window);
+ }
+ cmd_id = ctx_cmd_id++;
+ action_msg.cmd_id = cmd_id;
+ window.test_driver.message_test({type: "testdriver-command",
+ message: action_msg});
+ }
+ const pending_promise = new Promise(function(resolve, reject) {
+ const on_success = data => {
+ result = JSON.parse(data.message).result;
+ resolve(result);
+ };
+ const on_failure = data => {
+ reject(`${data.status}: ${data.message}`);
+ };
+ pending.set(cmd_id, [on_success, on_failure]);
+ });
+ return pending_promise;
+ };
+
+ window.test_driver_internal.in_automation = true;
+
+ window.test_driver_internal.set_test_context = function(context) {
+ if (window.__wptrunner_message_queue) {
+ throw new Error("Tried to set testharness context in a window containing testharness.js");
+ }
+ testharness_context = context;
+ };
+
+ window.test_driver_internal.click = function(element) {
+ const selector = get_selector(element);
+ const context = get_context(element);
+ return create_action("click", {selector, context});
+ };
+
+ window.test_driver_internal.delete_all_cookies = function(context=null) {
+ return create_action("delete_all_cookies", {context});
+ };
+
+ window.test_driver_internal.get_all_cookies = function(context=null) {
+ return create_action("get_all_cookies", {context});
+ };
+
+ window.test_driver_internal.get_named_cookie = function(name, context=null) {
+ return create_action("get_named_cookie", {name, context});
+ };
+
+ window.test_driver_internal.minimize_window = function(context=null) {
+ return create_action("minimize_window", {context});
+ };
+
+ window.test_driver_internal.set_window_rect = function(rect, context=null) {
+ return create_action("set_window_rect", {rect, context});
+ };
+
+ window.test_driver_internal.send_keys = function(element, keys) {
+ const selector = get_selector(element);
+ const context = get_context(element);
+ return create_action("send_keys", {selector, keys, context});
+ };
+
+ window.test_driver_internal.action_sequence = function(actions, context=null) {
+ for (let actionSequence of actions) {
+ if (actionSequence.type == "pointer") {
+ for (let action of actionSequence.actions) {
+ // The origin of each action can only be an element or a string of a value "viewport" or "pointer".
+ if (action.type == "pointerMove" && typeof(action.origin) != 'string') {
+ let action_context = get_context(action.origin);
+ action.origin = {selector: get_selector(action.origin)};
+ if (context !== null && action_context !== context) {
+ throw new Error("Actions must be in a single context");
+ }
+ context = action_context;
+ }
+ }
+ }
+ }
+ return create_action("action_sequence", {actions, context});
+ };
+
+ window.test_driver_internal.generate_test_report = function(message, context=null) {
+ return create_action("generate_test_report", {message, context});
+ };
+
+ window.test_driver_internal.set_permission = function(permission_params, context=null) {
+ return create_action("set_permission", {permission_params, context});
+ };
+
+ window.test_driver_internal.add_virtual_authenticator = function(config, context=null) {
+ return create_action("add_virtual_authenticator", {config, context});
+ };
+
+ window.test_driver_internal.remove_virtual_authenticator = function(authenticator_id, context=null) {
+ return create_action("remove_virtual_authenticator", {authenticator_id, context});
+ };
+
+ window.test_driver_internal.add_credential = function(authenticator_id, credential, context=null) {
+ return create_action("add_credential", {authenticator_id, credential, context});
+ };
+
+ window.test_driver_internal.get_credentials = function(authenticator_id, context=null) {
+ return create_action("get_credentials", {authenticator_id, context});
+ };
+
+ window.test_driver_internal.remove_credential = function(authenticator_id, credential_id, context=null) {
+ return create_action("remove_credential", {authenticator_id, credential_id, context});
+ };
+
+ window.test_driver_internal.remove_all_credentials = function(authenticator_id, context=null) {
+ return create_action("remove_all_credentials", {authenticator_id, context});
+ };
+
+ window.test_driver_internal.set_user_verified = function(authenticator_id, uv, context=null) {
+ return create_action("set_user_verified", {authenticator_id, uv, context});
+ };
+
+ window.test_driver_internal.set_spc_transaction_mode = function(mode, context = null) {
+ return create_action("set_spc_transaction_mode", {mode, context});
+ };
+})();
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/testdriver-vendor.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/testdriver-vendor.js
new file mode 100644
index 0000000000..3e88403636
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/testdriver-vendor.js
@@ -0,0 +1 @@
+// This file intentionally left blank
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/testharness_runner.html b/testing/web-platform/tests/tools/wptrunner/wptrunner/testharness_runner.html
new file mode 100644
index 0000000000..1cc80a270e
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/testharness_runner.html
@@ -0,0 +1,6 @@
+<!doctype html>
+<title></title>
+<script>
+var timeout_multiplier = 1;
+var win = null;
+</script>
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-content-shell.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-content-shell.js
new file mode 100644
index 0000000000..e4693f9bc2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-content-shell.js
@@ -0,0 +1,25 @@
+var props = {output:%(output)d, debug: %(debug)s};
+var start_loc = document.createElement('a');
+start_loc.href = location.href;
+setup(props);
+
+testRunner.dumpAsText();
+testRunner.waitUntilDone();
+testRunner.setPopupBlockingEnabled(false);
+testRunner.setDumpJavaScriptDialogs(false);
+
+add_completion_callback(function (tests, harness_status) {
+ var id = decodeURIComponent(start_loc.pathname) + decodeURIComponent(start_loc.search) + decodeURIComponent(start_loc.hash);
+ var result_string = JSON.stringify([
+ id,
+ harness_status.status,
+ harness_status.message,
+ harness_status.stack,
+ tests.map(function(t) {
+ return [t.name, t.status, t.message, t.stack]
+ }),
+ ]);
+
+ testRunner.setCustomTextOutput(result_string);
+ testRunner.notifyDone();
+});
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-servo.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-servo.js
new file mode 100644
index 0000000000..4a27dc27ef
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-servo.js
@@ -0,0 +1,17 @@
+var props = {output:%(output)d, debug: %(debug)s};
+var start_loc = document.createElement('a');
+start_loc.href = location.href;
+setup(props);
+
+add_completion_callback(function (tests, harness_status) {
+ var id = decodeURIComponent(start_loc.pathname) + decodeURIComponent(start_loc.search) + decodeURIComponent(start_loc.hash);
+ console.log("ALERT: RESULT: " + JSON.stringify([
+ id,
+ harness_status.status,
+ harness_status.message,
+ harness_status.stack,
+ tests.map(function(t) {
+ return [t.name, t.status, t.message, t.stack]
+ }),
+ ]));
+});
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-servodriver.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-servodriver.js
new file mode 100644
index 0000000000..7819538dbb
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport-servodriver.js
@@ -0,0 +1,23 @@
+setup({output:%(output)d, debug: %(debug)s});
+
+add_completion_callback(function() {
+ add_completion_callback(function (tests, status) {
+ var subtest_results = tests.map(function(x) {
+ return [x.name, x.status, x.message, x.stack]
+ });
+ var id = location.pathname + location.search + location.hash;
+ var results = JSON.stringify([id,
+ status.status,
+ status.message,
+ status.stack,
+ subtest_results]);
+ (function done() {
+ if (window.__wd_results_callback__) {
+ clearTimeout(__wd_results_timer__);
+ __wd_results_callback__(results)
+ } else {
+ setTimeout(done, 20);
+ }
+ })()
+ })
+});
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport.js b/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport.js
new file mode 100644
index 0000000000..d385692445
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/testharnessreport.js
@@ -0,0 +1,88 @@
+class MessageQueue {
+ constructor() {
+ this.item_id = 0;
+ this._queue = [];
+ }
+
+ push(item) {
+ let cmd_id = this.item_id++;
+ item.id = cmd_id;
+ this._queue.push(item);
+ __wptrunner_process_next_event();
+ return cmd_id;
+ }
+
+ shift() {
+ return this._queue.shift();
+ }
+}
+
+window.__wptrunner_testdriver_callback = null;
+window.__wptrunner_message_queue = new MessageQueue();
+window.__wptrunner_url = null;
+
+window.__wptrunner_process_next_event = function() {
+ /* This function handles the next testdriver event. The presence of
+ window.testdriver_callback is used as a switch; when that function
+ is present we are able to handle the next event and when is is not
+ present we must wait. Therefore to drive the event processing, this
+ function must be called in two circumstances:
+ * Every time there is a new event that we may be able to handle
+ * Every time we set the callback function
+ This function unsets the callback, so no further testdriver actions
+ will be run until it is reset, which wptrunner does after it has
+ completed handling the current action.
+ */
+
+ if (!window.__wptrunner_testdriver_callback) {
+ return;
+ }
+ var data = window.__wptrunner_message_queue.shift();
+ if (!data) {
+ return;
+ }
+
+ var payload = undefined;
+
+ switch(data.type) {
+ case "complete":
+ var tests = data.tests;
+ var status = data.status;
+
+ var subtest_results = tests.map(function(x) {
+ return [x.name, x.status, x.message, x.stack];
+ });
+ payload = [status.status,
+ status.message,
+ status.stack,
+ subtest_results];
+ clearTimeout(window.__wptrunner_timer);
+ break;
+ case "action":
+ payload = data;
+ break;
+ default:
+ return;
+ }
+ var callback = window.__wptrunner_testdriver_callback;
+ window.__wptrunner_testdriver_callback = null;
+ callback([__wptrunner_url, data.type, payload]);
+};
+
+(function() {
+ var props = {output: %(output)d,
+ timeout_multiplier: %(timeout_multiplier)s,
+ explicit_timeout: %(explicit_timeout)s,
+ debug: %(debug)s,
+ message_events: ["completion"]};
+
+ add_completion_callback(function(tests, harness_status) {
+ __wptrunner_message_queue.push({
+ "type": "complete",
+ "tests": tests,
+ "status": harness_status});
+ __wptrunner_process_next_event();
+ });
+ setup(props);
+})();
+
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/testloader.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/testloader.py
new file mode 100644
index 0000000000..0cb5f499a9
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/testloader.py
@@ -0,0 +1,534 @@
+# mypy: allow-untyped-defs
+
+import hashlib
+import itertools
+import json
+import os
+from urllib.parse import urlsplit
+from abc import ABCMeta, abstractmethod
+from queue import Empty
+from collections import defaultdict, deque, namedtuple
+
+from . import manifestinclude
+from . import manifestexpected
+from . import mpcontext
+from . import wpttest
+from mozlog import structured
+
+manifest = None
+manifest_update = None
+download_from_github = None
+
+
+def do_delayed_imports():
+ # This relies on an already loaded module having set the sys.path correctly :(
+ global manifest, manifest_update, download_from_github
+ from manifest import manifest # type: ignore
+ from manifest import update as manifest_update
+ from manifest.download import download_from_github # type: ignore
+
+
+class TestGroupsFile:
+ """
+ Mapping object representing {group name: [test ids]}
+ """
+
+ def __init__(self, logger, path):
+ try:
+ with open(path) as f:
+ self._data = json.load(f)
+ except ValueError:
+ logger.critical("test groups file %s not valid json" % path)
+ raise
+
+ self.group_by_test = {}
+ for group, test_ids in self._data.items():
+ for test_id in test_ids:
+ self.group_by_test[test_id] = group
+
+ def __contains__(self, key):
+ return key in self._data
+
+ def __getitem__(self, key):
+ return self._data[key]
+
+def read_include_from_file(file):
+ new_include = []
+ with open(file) as f:
+ for line in f:
+ line = line.strip()
+ # Allow whole-line comments;
+ # fragments mean we can't have partial line #-based comments
+ if len(line) > 0 and not line.startswith("#"):
+ new_include.append(line)
+ return new_include
+
+def update_include_for_groups(test_groups, include):
+ if include is None:
+ # We're just running everything
+ return
+
+ new_include = []
+ for item in include:
+ if item in test_groups:
+ new_include.extend(test_groups[item])
+ else:
+ new_include.append(item)
+ return new_include
+
+
+class TestChunker:
+ def __init__(self, total_chunks, chunk_number, **kwargs):
+ self.total_chunks = total_chunks
+ self.chunk_number = chunk_number
+ assert self.chunk_number <= self.total_chunks
+ self.logger = structured.get_default_logger()
+ assert self.logger
+ self.kwargs = kwargs
+
+ def __call__(self, manifest):
+ raise NotImplementedError
+
+
+class Unchunked(TestChunker):
+ def __init__(self, *args, **kwargs):
+ TestChunker.__init__(self, *args, **kwargs)
+ assert self.total_chunks == 1
+
+ def __call__(self, manifest, **kwargs):
+ yield from manifest
+
+
+class HashChunker(TestChunker):
+ def __call__(self, manifest):
+ chunk_index = self.chunk_number - 1
+ for test_type, test_path, tests in manifest:
+ h = int(hashlib.md5(test_path.encode()).hexdigest(), 16)
+ if h % self.total_chunks == chunk_index:
+ yield test_type, test_path, tests
+
+
+class DirectoryHashChunker(TestChunker):
+ """Like HashChunker except the directory is hashed.
+
+ This ensures that all tests in the same directory end up in the same
+ chunk.
+ """
+ def __call__(self, manifest):
+ chunk_index = self.chunk_number - 1
+ depth = self.kwargs.get("depth")
+ for test_type, test_path, tests in manifest:
+ if depth:
+ hash_path = os.path.sep.join(os.path.dirname(test_path).split(os.path.sep, depth)[:depth])
+ else:
+ hash_path = os.path.dirname(test_path)
+ h = int(hashlib.md5(hash_path.encode()).hexdigest(), 16)
+ if h % self.total_chunks == chunk_index:
+ yield test_type, test_path, tests
+
+
+class TestFilter:
+ """Callable that restricts the set of tests in a given manifest according
+ to initial criteria"""
+ def __init__(self, test_manifests, include=None, exclude=None, manifest_path=None, explicit=False):
+ if manifest_path is None or include or explicit:
+ self.manifest = manifestinclude.IncludeManifest.create()
+ self.manifest.set_defaults()
+ else:
+ self.manifest = manifestinclude.get_manifest(manifest_path)
+
+ if include or explicit:
+ self.manifest.set("skip", "true")
+
+ if include:
+ for item in include:
+ self.manifest.add_include(test_manifests, item)
+
+ if exclude:
+ for item in exclude:
+ self.manifest.add_exclude(test_manifests, item)
+
+ def __call__(self, manifest_iter):
+ for test_type, test_path, tests in manifest_iter:
+ include_tests = set()
+ for test in tests:
+ if self.manifest.include(test):
+ include_tests.add(test)
+
+ if include_tests:
+ yield test_type, test_path, include_tests
+
+
+class TagFilter:
+ def __init__(self, tags):
+ self.tags = set(tags)
+
+ def __call__(self, test_iter):
+ for test in test_iter:
+ if test.tags & self.tags:
+ yield test
+
+
+class ManifestLoader:
+ def __init__(self, test_paths, force_manifest_update=False, manifest_download=False,
+ types=None):
+ do_delayed_imports()
+ self.test_paths = test_paths
+ self.force_manifest_update = force_manifest_update
+ self.manifest_download = manifest_download
+ self.types = types
+ self.logger = structured.get_default_logger()
+ if self.logger is None:
+ self.logger = structured.structuredlog.StructuredLogger("ManifestLoader")
+
+ def load(self):
+ rv = {}
+ for url_base, paths in self.test_paths.items():
+ manifest_file = self.load_manifest(url_base=url_base,
+ **paths)
+ path_data = {"url_base": url_base}
+ path_data.update(paths)
+ rv[manifest_file] = path_data
+ return rv
+
+ def load_manifest(self, tests_path, manifest_path, metadata_path, url_base="/", **kwargs):
+ cache_root = os.path.join(metadata_path, ".cache")
+ if self.manifest_download:
+ download_from_github(manifest_path, tests_path)
+ return manifest.load_and_update(tests_path, manifest_path, url_base,
+ cache_root=cache_root, update=self.force_manifest_update,
+ types=self.types)
+
+
+def iterfilter(filters, iter):
+ for f in filters:
+ iter = f(iter)
+ yield from iter
+
+
+class TestLoader:
+ """Loads tests according to a WPT manifest and any associated expectation files"""
+ def __init__(self,
+ test_manifests,
+ test_types,
+ run_info,
+ manifest_filters=None,
+ chunk_type="none",
+ total_chunks=1,
+ chunk_number=1,
+ include_https=True,
+ include_h2=True,
+ include_webtransport_h3=False,
+ skip_timeout=False,
+ skip_implementation_status=None,
+ chunker_kwargs=None):
+
+ self.test_types = test_types
+ self.run_info = run_info
+
+ self.manifest_filters = manifest_filters if manifest_filters is not None else []
+
+ self.manifests = test_manifests
+ self.tests = None
+ self.disabled_tests = None
+ self.include_https = include_https
+ self.include_h2 = include_h2
+ self.include_webtransport_h3 = include_webtransport_h3
+ self.skip_timeout = skip_timeout
+ self.skip_implementation_status = skip_implementation_status
+
+ self.chunk_type = chunk_type
+ self.total_chunks = total_chunks
+ self.chunk_number = chunk_number
+
+ if chunker_kwargs is None:
+ chunker_kwargs = {}
+ self.chunker = {"none": Unchunked,
+ "hash": HashChunker,
+ "dir_hash": DirectoryHashChunker}[chunk_type](total_chunks,
+ chunk_number,
+ **chunker_kwargs)
+
+ self._test_ids = None
+
+ self.directory_manifests = {}
+
+ self._load_tests()
+
+ @property
+ def test_ids(self):
+ if self._test_ids is None:
+ self._test_ids = []
+ for test_dict in [self.disabled_tests, self.tests]:
+ for test_type in self.test_types:
+ self._test_ids += [item.id for item in test_dict[test_type]]
+ return self._test_ids
+
+ def get_test(self, manifest_file, manifest_test, inherit_metadata, test_metadata):
+ if test_metadata is not None:
+ inherit_metadata.append(test_metadata)
+ test_metadata = test_metadata.get_test(manifest_test.id)
+
+ return wpttest.from_manifest(manifest_file, manifest_test, inherit_metadata, test_metadata)
+
+ def load_dir_metadata(self, test_manifest, metadata_path, test_path):
+ rv = []
+ path_parts = os.path.dirname(test_path).split(os.path.sep)
+ for i in range(len(path_parts) + 1):
+ path = os.path.join(metadata_path, os.path.sep.join(path_parts[:i]), "__dir__.ini")
+ if path not in self.directory_manifests:
+ self.directory_manifests[path] = manifestexpected.get_dir_manifest(path,
+ self.run_info)
+ manifest = self.directory_manifests[path]
+ if manifest is not None:
+ rv.append(manifest)
+ return rv
+
+ def load_metadata(self, test_manifest, metadata_path, test_path):
+ inherit_metadata = self.load_dir_metadata(test_manifest, metadata_path, test_path)
+ test_metadata = manifestexpected.get_manifest(
+ metadata_path, test_path, test_manifest.url_base, self.run_info)
+ return inherit_metadata, test_metadata
+
+ def iter_tests(self):
+ manifest_items = []
+ manifests_by_url_base = {}
+
+ for manifest in sorted(self.manifests.keys(), key=lambda x:x.url_base):
+ manifest_iter = iterfilter(self.manifest_filters,
+ manifest.itertypes(*self.test_types))
+ manifest_items.extend(manifest_iter)
+ manifests_by_url_base[manifest.url_base] = manifest
+
+ if self.chunker is not None:
+ manifest_items = self.chunker(manifest_items)
+
+ for test_type, test_path, tests in manifest_items:
+ manifest_file = manifests_by_url_base[next(iter(tests)).url_base]
+ metadata_path = self.manifests[manifest_file]["metadata_path"]
+
+ inherit_metadata, test_metadata = self.load_metadata(manifest_file, metadata_path, test_path)
+ for test in tests:
+ yield test_path, test_type, self.get_test(manifest_file, test, inherit_metadata, test_metadata)
+
+ def _load_tests(self):
+ """Read in the tests from the manifest file and add them to a queue"""
+ tests = {"enabled":defaultdict(list),
+ "disabled":defaultdict(list)}
+
+ for test_path, test_type, test in self.iter_tests():
+ enabled = not test.disabled()
+ if not self.include_https and test.environment["protocol"] == "https":
+ enabled = False
+ if not self.include_h2 and test.environment["protocol"] == "h2":
+ enabled = False
+ if self.skip_timeout and test.expected() == "TIMEOUT":
+ enabled = False
+ if self.skip_implementation_status and test.implementation_status() in self.skip_implementation_status:
+ enabled = False
+ key = "enabled" if enabled else "disabled"
+ tests[key][test_type].append(test)
+
+ self.tests = tests["enabled"]
+ self.disabled_tests = tests["disabled"]
+
+ def groups(self, test_types, chunk_type="none", total_chunks=1, chunk_number=1):
+ groups = set()
+
+ for test_type in test_types:
+ for test in self.tests[test_type]:
+ group = test.url.split("/")[1]
+ groups.add(group)
+
+ return groups
+
+
+def get_test_src(**kwargs):
+ test_source_kwargs = {"processes": kwargs["processes"],
+ "logger": kwargs["logger"]}
+ chunker_kwargs = {}
+ if kwargs["run_by_dir"] is not False:
+ # A value of None indicates infinite depth
+ test_source_cls = PathGroupedSource
+ test_source_kwargs["depth"] = kwargs["run_by_dir"]
+ chunker_kwargs["depth"] = kwargs["run_by_dir"]
+ elif kwargs["test_groups"]:
+ test_source_cls = GroupFileTestSource
+ test_source_kwargs["test_groups"] = kwargs["test_groups"]
+ else:
+ test_source_cls = SingleTestSource
+ return test_source_cls, test_source_kwargs, chunker_kwargs
+
+
+TestGroup = namedtuple("TestGroup", ["group", "test_type", "metadata"])
+
+
+class TestSource:
+ __metaclass__ = ABCMeta
+
+ def __init__(self, test_queue):
+ self.test_queue = test_queue
+ self.current_group = TestGroup(None, None, None)
+ self.logger = structured.get_default_logger()
+ if self.logger is None:
+ self.logger = structured.structuredlog.StructuredLogger("TestSource")
+
+ @abstractmethod
+ #@classmethod (doesn't compose with @abstractmethod in < 3.3)
+ def make_queue(cls, tests_by_type, **kwargs): # noqa: N805
+ pass
+
+ @abstractmethod
+ def tests_by_group(cls, tests_by_type, **kwargs): # noqa: N805
+ pass
+
+ @classmethod
+ def group_metadata(cls, state):
+ return {"scope": "/"}
+
+ def group(self):
+ if not self.current_group.group or len(self.current_group.group) == 0:
+ try:
+ self.current_group = self.test_queue.get(block=True, timeout=5)
+ except Empty:
+ self.logger.warning("Timed out getting test group from queue")
+ return TestGroup(None, None, None)
+ return self.current_group
+
+ @classmethod
+ def add_sentinal(cls, test_queue, num_of_workers):
+ # add one sentinal for each worker
+ for _ in range(num_of_workers):
+ test_queue.put(TestGroup(None, None, None))
+
+
+class GroupedSource(TestSource):
+ @classmethod
+ def new_group(cls, state, test_type, test, **kwargs):
+ raise NotImplementedError
+
+ @classmethod
+ def make_queue(cls, tests_by_type, **kwargs):
+ mp = mpcontext.get_context()
+ test_queue = mp.Queue()
+ groups = []
+
+ state = {}
+
+ for test_type, tests in tests_by_type.items():
+ for test in tests:
+ if cls.new_group(state, test_type, test, **kwargs):
+ group_metadata = cls.group_metadata(state)
+ groups.append(TestGroup(deque(), test_type, group_metadata))
+
+ group, _, metadata = groups[-1]
+ group.append(test)
+ test.update_metadata(metadata)
+
+ for item in groups:
+ test_queue.put(item)
+ cls.add_sentinal(test_queue, kwargs["processes"])
+ return test_queue
+
+ @classmethod
+ def tests_by_group(cls, tests_by_type, **kwargs):
+ groups = defaultdict(list)
+ state = {}
+ current = None
+ for test_type, tests in tests_by_type.items():
+ for test in tests:
+ if cls.new_group(state, test_type, test, **kwargs):
+ current = cls.group_metadata(state)['scope']
+ groups[current].append(test.id)
+ return groups
+
+
+class SingleTestSource(TestSource):
+ @classmethod
+ def make_queue(cls, tests_by_type, **kwargs):
+ mp = mpcontext.get_context()
+ test_queue = mp.Queue()
+ for test_type, tests in tests_by_type.items():
+ processes = kwargs["processes"]
+ queues = [deque([]) for _ in range(processes)]
+ metadatas = [cls.group_metadata(None) for _ in range(processes)]
+ for test in tests:
+ idx = hash(test.id) % processes
+ group = queues[idx]
+ metadata = metadatas[idx]
+ group.append(test)
+ test.update_metadata(metadata)
+
+ for item in zip(queues, itertools.repeat(test_type), metadatas):
+ if len(item[0]) > 0:
+ test_queue.put(TestGroup(*item))
+ cls.add_sentinal(test_queue, kwargs["processes"])
+
+ return test_queue
+
+ @classmethod
+ def tests_by_group(cls, tests_by_type, **kwargs):
+ return {cls.group_metadata(None)['scope']:
+ [t.id for t in itertools.chain.from_iterable(tests_by_type.values())]}
+
+
+class PathGroupedSource(GroupedSource):
+ @classmethod
+ def new_group(cls, state, test_type, test, **kwargs):
+ depth = kwargs.get("depth")
+ if depth is True or depth == 0:
+ depth = None
+ path = urlsplit(test.url).path.split("/")[1:-1][:depth]
+ rv = (test_type != state.get("prev_test_type") or
+ path != state.get("prev_path"))
+ state["prev_test_type"] = test_type
+ state["prev_path"] = path
+ return rv
+
+ @classmethod
+ def group_metadata(cls, state):
+ return {"scope": "/%s" % "/".join(state["prev_path"])}
+
+
+class GroupFileTestSource(TestSource):
+ @classmethod
+ def make_queue(cls, tests_by_type, **kwargs):
+ mp = mpcontext.get_context()
+ test_queue = mp.Queue()
+
+ for test_type, tests in tests_by_type.items():
+ tests_by_group = cls.tests_by_group({test_type: tests},
+ **kwargs)
+
+ ids_to_tests = {test.id: test for test in tests}
+
+ for group_name, test_ids in tests_by_group.items():
+ group_metadata = {"scope": group_name}
+ group = deque()
+
+ for test_id in test_ids:
+ test = ids_to_tests[test_id]
+ group.append(test)
+ test.update_metadata(group_metadata)
+
+ test_queue.put(TestGroup(group, test_type, group_metadata))
+
+ cls.add_sentinal(test_queue, kwargs["processes"])
+
+ return test_queue
+
+ @classmethod
+ def tests_by_group(cls, tests_by_type, **kwargs):
+ logger = kwargs["logger"]
+ test_groups = kwargs["test_groups"]
+
+ tests_by_group = defaultdict(list)
+ for test in itertools.chain.from_iterable(tests_by_type.values()):
+ try:
+ group = test_groups.group_by_test[test.id]
+ except KeyError:
+ logger.error("%s is missing from test groups file" % test.id)
+ raise
+ tests_by_group[group].append(test.id)
+
+ return tests_by_group
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/testrunner.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/testrunner.py
new file mode 100644
index 0000000000..82ffc9b84c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/testrunner.py
@@ -0,0 +1,984 @@
+# mypy: allow-untyped-defs
+
+import threading
+import traceback
+from queue import Empty
+from collections import namedtuple
+
+from mozlog import structuredlog, capture
+
+from . import mpcontext
+
+# Special value used as a sentinal in various commands
+Stop = object()
+
+
+def release_mozlog_lock():
+ try:
+ from mozlog.structuredlog import StructuredLogger
+ try:
+ StructuredLogger._lock.release()
+ except threading.ThreadError:
+ pass
+ except ImportError:
+ pass
+
+
+TestImplementation = namedtuple('TestImplementation',
+ ['executor_cls', 'executor_kwargs',
+ 'browser_cls', 'browser_kwargs'])
+
+
+class LogMessageHandler:
+ def __init__(self, send_message):
+ self.send_message = send_message
+
+ def __call__(self, data):
+ self.send_message("log", data)
+
+
+class TestRunner:
+ """Class implementing the main loop for running tests.
+
+ This class delegates the job of actually running a test to the executor
+ that is passed in.
+
+ :param logger: Structured logger
+ :param command_queue: subprocess.Queue used to send commands to the
+ process
+ :param result_queue: subprocess.Queue used to send results to the
+ parent TestRunnerManager process
+ :param executor: TestExecutor object that will actually run a test.
+ """
+ def __init__(self, logger, command_queue, result_queue, executor, recording):
+ self.command_queue = command_queue
+ self.result_queue = result_queue
+
+ self.executor = executor
+ self.name = mpcontext.get_context().current_process().name
+ self.logger = logger
+ self.recording = recording
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.teardown()
+
+ def setup(self):
+ self.logger.debug("Executor setup")
+ try:
+ self.executor.setup(self)
+ except Exception:
+ # The caller is responsible for logging the exception if required
+ self.send_message("init_failed")
+ else:
+ self.send_message("init_succeeded")
+ self.logger.debug("Executor setup done")
+
+ def teardown(self):
+ self.executor.teardown()
+ self.send_message("runner_teardown")
+ self.result_queue = None
+ self.command_queue = None
+ self.browser = None
+
+ def run(self):
+ """Main loop accepting commands over the pipe and triggering
+ the associated methods"""
+ try:
+ self.setup()
+ except Exception:
+ self.logger.warning("An error occured during executor setup:\n%s" %
+ traceback.format_exc())
+ raise
+ commands = {"run_test": self.run_test,
+ "reset": self.reset,
+ "stop": self.stop,
+ "wait": self.wait}
+ while True:
+ command, args = self.command_queue.get()
+ try:
+ rv = commands[command](*args)
+ except Exception:
+ self.send_message("error",
+ "Error running command %s with arguments %r:\n%s" %
+ (command, args, traceback.format_exc()))
+ else:
+ if rv is Stop:
+ break
+
+ def stop(self):
+ return Stop
+
+ def reset(self):
+ self.executor.reset()
+
+ def run_test(self, test):
+ try:
+ return self.executor.run_test(test)
+ except Exception:
+ self.logger.error(traceback.format_exc())
+ raise
+
+ def wait(self):
+ rerun = self.executor.wait()
+ self.send_message("wait_finished", rerun)
+
+ def send_message(self, command, *args):
+ self.result_queue.put((command, args))
+
+
+def start_runner(runner_command_queue, runner_result_queue,
+ executor_cls, executor_kwargs,
+ executor_browser_cls, executor_browser_kwargs,
+ capture_stdio, stop_flag, recording):
+ """Launch a TestRunner in a new process"""
+
+ def send_message(command, *args):
+ runner_result_queue.put((command, args))
+
+ def handle_error(e):
+ logger.critical(traceback.format_exc())
+ stop_flag.set()
+
+ # Ensure that when we start this in a new process we have the global lock
+ # in the logging module unlocked
+ release_mozlog_lock()
+
+ proc_name = mpcontext.get_context().current_process().name
+ logger = structuredlog.StructuredLogger(proc_name)
+ logger.add_handler(LogMessageHandler(send_message))
+
+ with capture.CaptureIO(logger, capture_stdio):
+ try:
+ browser = executor_browser_cls(**executor_browser_kwargs)
+ executor = executor_cls(logger, browser, **executor_kwargs)
+ with TestRunner(logger, runner_command_queue, runner_result_queue, executor, recording) as runner:
+ try:
+ runner.run()
+ except KeyboardInterrupt:
+ stop_flag.set()
+ except Exception as e:
+ handle_error(e)
+ except Exception as e:
+ handle_error(e)
+
+
+class BrowserManager:
+ def __init__(self, logger, browser, command_queue, no_timeout=False):
+ self.logger = logger
+ self.browser = browser
+ self.no_timeout = no_timeout
+ self.browser_settings = None
+ self.last_test = None
+
+ self.started = False
+
+ self.init_timer = None
+ self.command_queue = command_queue
+
+ def update_settings(self, test):
+ browser_settings = self.browser.settings(test)
+ restart_required = ((self.browser_settings is not None and
+ self.browser_settings != browser_settings) or
+ (self.last_test != test and test.expected() == "CRASH"))
+ self.browser_settings = browser_settings
+ self.last_test = test
+ return restart_required
+
+ def init(self, group_metadata):
+ """Launch the browser that is being tested,
+ and the TestRunner process that will run the tests."""
+ # It seems that this lock is helpful to prevent some race that otherwise
+ # sometimes stops the spawned processes initialising correctly, and
+ # leaves this thread hung
+ if self.init_timer is not None:
+ self.init_timer.cancel()
+
+ self.logger.debug("Init called, starting browser and runner")
+
+ if not self.no_timeout:
+ self.init_timer = threading.Timer(self.browser.init_timeout,
+ self.init_timeout)
+ try:
+ if self.init_timer is not None:
+ self.init_timer.start()
+ self.logger.debug("Starting browser with settings %r" % self.browser_settings)
+ self.browser.start(group_metadata=group_metadata, **self.browser_settings)
+ self.browser_pid = self.browser.pid
+ except Exception:
+ self.logger.warning("Failure during init %s" % traceback.format_exc())
+ if self.init_timer is not None:
+ self.init_timer.cancel()
+ self.logger.error(traceback.format_exc())
+ succeeded = False
+ else:
+ succeeded = True
+ self.started = True
+
+ return succeeded
+
+ def send_message(self, command, *args):
+ self.command_queue.put((command, args))
+
+ def init_timeout(self):
+ # This is called from a separate thread, so we send a message to the
+ # main loop so we get back onto the manager thread
+ self.logger.debug("init_failed called from timer")
+ self.send_message("init_failed")
+
+ def after_init(self):
+ """Callback when we have started the browser, started the remote
+ control connection, and we are ready to start testing."""
+ if self.init_timer is not None:
+ self.init_timer.cancel()
+
+ def stop(self, force=False):
+ self.browser.stop(force=force)
+ self.started = False
+
+ def cleanup(self):
+ if self.init_timer is not None:
+ self.init_timer.cancel()
+
+ def check_crash(self, test_id):
+ return self.browser.check_crash(process=self.browser_pid, test=test_id)
+
+ def is_alive(self):
+ return self.browser.is_alive()
+
+
+class _RunnerManagerState:
+ before_init = namedtuple("before_init", [])
+ initializing = namedtuple("initializing",
+ ["test_type", "test", "test_group",
+ "group_metadata", "failure_count"])
+ running = namedtuple("running", ["test_type", "test", "test_group", "group_metadata"])
+ restarting = namedtuple("restarting", ["test_type", "test", "test_group",
+ "group_metadata", "force_stop"])
+ error = namedtuple("error", [])
+ stop = namedtuple("stop", ["force_stop"])
+
+
+RunnerManagerState = _RunnerManagerState()
+
+
+class TestRunnerManager(threading.Thread):
+ def __init__(self, suite_name, index, test_queue, test_source_cls,
+ test_implementation_by_type, stop_flag, rerun=1,
+ pause_after_test=False, pause_on_unexpected=False,
+ restart_on_unexpected=True, debug_info=None,
+ capture_stdio=True, restart_on_new_group=True, recording=None):
+ """Thread that owns a single TestRunner process and any processes required
+ by the TestRunner (e.g. the Firefox binary).
+
+ TestRunnerManagers are responsible for launching the browser process and the
+ runner process, and for logging the test progress. The actual test running
+ is done by the TestRunner. In particular they:
+
+ * Start the binary of the program under test
+ * Start the TestRunner
+ * Tell the TestRunner to start a test, if any
+ * Log that the test started
+ * Log the test results
+ * Take any remedial action required e.g. restart crashed or hung
+ processes
+ """
+ self.suite_name = suite_name
+
+ self.test_source = test_source_cls(test_queue)
+
+ self.manager_number = index
+ self.test_type = None
+
+ self.test_implementation_by_type = {}
+ for test_type, test_implementation in test_implementation_by_type.items():
+ kwargs = test_implementation.browser_kwargs
+ if kwargs.get("device_serial"):
+ kwargs = kwargs.copy()
+ # Assign Android device to runner according to current manager index
+ kwargs["device_serial"] = kwargs["device_serial"][index]
+ self.test_implementation_by_type[test_type] = TestImplementation(
+ test_implementation.executor_cls,
+ test_implementation.executor_kwargs,
+ test_implementation.browser_cls,
+ kwargs)
+ else:
+ self.test_implementation_by_type[test_type] = test_implementation
+
+ mp = mpcontext.get_context()
+
+ # Flags used to shut down this thread if we get a sigint
+ self.parent_stop_flag = stop_flag
+ self.child_stop_flag = mp.Event()
+
+ self.rerun = rerun
+ self.run_count = 0
+ self.pause_after_test = pause_after_test
+ self.pause_on_unexpected = pause_on_unexpected
+ self.restart_on_unexpected = restart_on_unexpected
+ self.debug_info = debug_info
+
+ assert recording is not None
+ self.recording = recording
+
+ self.command_queue = mp.Queue()
+ self.remote_queue = mp.Queue()
+
+ self.test_runner_proc = None
+
+ threading.Thread.__init__(self, name="TestRunnerManager-%s-%i" % (test_type, index))
+ # This is started in the actual new thread
+ self.logger = None
+
+ self.test_count = 0
+ self.unexpected_tests = set()
+ self.unexpected_pass_tests = set()
+
+ # This may not really be what we want
+ self.daemon = True
+
+ self.timer = None
+
+ self.max_restarts = 5
+
+ self.browser = None
+
+ self.capture_stdio = capture_stdio
+ self.restart_on_new_group = restart_on_new_group
+
+ def run(self):
+ """Main loop for the TestRunnerManager.
+
+ TestRunnerManagers generally receive commands from their
+ TestRunner updating them on the status of a test. They
+ may also have a stop flag set by the main thread indicating
+ that the manager should shut down the next time the event loop
+ spins."""
+ self.recording.set(["testrunner", "startup"])
+ self.logger = structuredlog.StructuredLogger(self.suite_name)
+ dispatch = {
+ RunnerManagerState.before_init: self.start_init,
+ RunnerManagerState.initializing: self.init,
+ RunnerManagerState.running: self.run_test,
+ RunnerManagerState.restarting: self.restart_runner,
+ }
+
+ self.state = RunnerManagerState.before_init()
+ end_states = (RunnerManagerState.stop,
+ RunnerManagerState.error)
+
+ try:
+ while not isinstance(self.state, end_states):
+ f = dispatch.get(self.state.__class__)
+ while f:
+ self.logger.debug(f"Dispatch {f.__name__}")
+ if self.should_stop():
+ return
+ new_state = f()
+ if new_state is None:
+ break
+ self.state = new_state
+ self.logger.debug(f"new state: {self.state.__class__.__name__}")
+ if isinstance(self.state, end_states):
+ return
+ f = dispatch.get(self.state.__class__)
+
+ new_state = None
+ while new_state is None:
+ new_state = self.wait_event()
+ if self.should_stop():
+ return
+ self.state = new_state
+ self.logger.debug(f"new state: {self.state.__class__.__name__}")
+ except Exception:
+ self.logger.error(traceback.format_exc())
+ raise
+ finally:
+ self.logger.debug("TestRunnerManager main loop terminating, starting cleanup")
+ force_stop = (not isinstance(self.state, RunnerManagerState.stop) or
+ self.state.force_stop)
+ self.stop_runner(force=force_stop)
+ self.teardown()
+ if self.browser is not None:
+ assert self.browser.browser is not None
+ self.browser.browser.cleanup()
+ self.logger.debug("TestRunnerManager main loop terminated")
+
+ def wait_event(self):
+ dispatch = {
+ RunnerManagerState.before_init: {},
+ RunnerManagerState.initializing:
+ {
+ "init_succeeded": self.init_succeeded,
+ "init_failed": self.init_failed,
+ },
+ RunnerManagerState.running:
+ {
+ "test_ended": self.test_ended,
+ "wait_finished": self.wait_finished,
+ },
+ RunnerManagerState.restarting: {},
+ RunnerManagerState.error: {},
+ RunnerManagerState.stop: {},
+ None: {
+ "runner_teardown": self.runner_teardown,
+ "log": self.log,
+ "error": self.error
+ }
+ }
+ try:
+ command, data = self.command_queue.get(True, 1)
+ self.logger.debug("Got command: %r" % command)
+ except OSError:
+ self.logger.error("Got IOError from poll")
+ return RunnerManagerState.restarting(self.state.test_type,
+ self.state.test,
+ self.state.test_group,
+ self.state.group_metadata,
+ False)
+ except Empty:
+ if (self.debug_info and self.debug_info.interactive and
+ self.browser.started and not self.browser.is_alive()):
+ self.logger.debug("Debugger exited")
+ return RunnerManagerState.stop(False)
+
+ if (isinstance(self.state, RunnerManagerState.running) and
+ not self.test_runner_proc.is_alive()):
+ if not self.command_queue.empty():
+ # We got a new message so process that
+ return
+
+ # If we got to here the runner presumably shut down
+ # unexpectedly
+ self.logger.info("Test runner process shut down")
+
+ if self.state.test is not None:
+ # This could happen if the test runner crashed for some other
+ # reason
+ # Need to consider the unlikely case where one test causes the
+ # runner process to repeatedly die
+ self.logger.critical("Last test did not complete")
+ return RunnerManagerState.error()
+ self.logger.warning("More tests found, but runner process died, restarting")
+ return RunnerManagerState.restarting(self.state.test_type,
+ self.state.test,
+ self.state.test_group,
+ self.state.group_metadata,
+ False)
+ else:
+ f = (dispatch.get(self.state.__class__, {}).get(command) or
+ dispatch.get(None, {}).get(command))
+ if not f:
+ self.logger.warning("Got command %s in state %s" %
+ (command, self.state.__class__.__name__))
+ return
+ return f(*data)
+
+ def should_stop(self):
+ return self.child_stop_flag.is_set() or self.parent_stop_flag.is_set()
+
+ def start_init(self):
+ test_type, test, test_group, group_metadata = self.get_next_test()
+ self.recording.set(["testrunner", "init"])
+ if test is None:
+ return RunnerManagerState.stop(True)
+ else:
+ return RunnerManagerState.initializing(test_type, test, test_group, group_metadata, 0)
+
+ def init(self):
+ assert isinstance(self.state, RunnerManagerState.initializing)
+ if self.state.failure_count > self.max_restarts:
+ self.logger.critical("Max restarts exceeded")
+ return RunnerManagerState.error()
+
+ if self.state.test_type != self.test_type:
+ if self.browser is not None:
+ assert self.browser.browser is not None
+ self.browser.browser.cleanup()
+ impl = self.test_implementation_by_type[self.state.test_type]
+ browser = impl.browser_cls(self.logger, remote_queue=self.command_queue,
+ **impl.browser_kwargs)
+ browser.setup()
+ self.browser = BrowserManager(self.logger,
+ browser,
+ self.command_queue,
+ no_timeout=self.debug_info is not None)
+ self.test_type = self.state.test_type
+
+ assert self.browser is not None
+ self.browser.update_settings(self.state.test)
+
+ result = self.browser.init(self.state.group_metadata)
+ if result is Stop:
+ return RunnerManagerState.error()
+ elif not result:
+ return RunnerManagerState.initializing(self.state.test_type,
+ self.state.test,
+ self.state.test_group,
+ self.state.group_metadata,
+ self.state.failure_count + 1)
+ else:
+ self.start_test_runner()
+
+ def start_test_runner(self):
+ # Note that we need to be careful to start the browser before the
+ # test runner to ensure that any state set when the browser is started
+ # can be passed in to the test runner.
+ assert isinstance(self.state, RunnerManagerState.initializing)
+ assert self.command_queue is not None
+ assert self.remote_queue is not None
+ self.logger.info("Starting runner")
+ impl = self.test_implementation_by_type[self.state.test_type]
+ self.executor_cls = impl.executor_cls
+ self.executor_kwargs = impl.executor_kwargs
+ self.executor_kwargs["group_metadata"] = self.state.group_metadata
+ self.executor_kwargs["browser_settings"] = self.browser.browser_settings
+ executor_browser_cls, executor_browser_kwargs = self.browser.browser.executor_browser()
+
+ args = (self.remote_queue,
+ self.command_queue,
+ self.executor_cls,
+ self.executor_kwargs,
+ executor_browser_cls,
+ executor_browser_kwargs,
+ self.capture_stdio,
+ self.child_stop_flag,
+ self.recording)
+
+ mp = mpcontext.get_context()
+ self.test_runner_proc = mp.Process(target=start_runner,
+ args=args,
+ name="TestRunner-%s-%i" % (
+ self.test_type, self.manager_number))
+ self.test_runner_proc.start()
+ self.logger.debug("Test runner started")
+ # Now we wait for either an init_succeeded event or an init_failed event
+
+ def init_succeeded(self):
+ assert isinstance(self.state, RunnerManagerState.initializing)
+ self.browser.after_init()
+ return RunnerManagerState.running(self.state.test_type,
+ self.state.test,
+ self.state.test_group,
+ self.state.group_metadata)
+
+ def init_failed(self):
+ assert isinstance(self.state, RunnerManagerState.initializing)
+ self.browser.check_crash(None)
+ self.browser.after_init()
+ self.stop_runner(force=True)
+ return RunnerManagerState.initializing(self.state.test_type,
+ self.state.test,
+ self.state.test_group,
+ self.state.group_metadata,
+ self.state.failure_count + 1)
+
+ def get_next_test(self):
+ # returns test_type, test, test_group, group_metadata
+ test = None
+ test_group = None
+ while test is None:
+ while test_group is None or len(test_group) == 0:
+ test_group, test_type, group_metadata = self.test_source.group()
+ if test_group is None:
+ self.logger.info("No more tests")
+ return None, None, None, None
+ test = test_group.popleft()
+ self.run_count = 0
+ return test_type, test, test_group, group_metadata
+
+ def run_test(self):
+ assert isinstance(self.state, RunnerManagerState.running)
+ assert self.state.test is not None
+
+ if self.browser.update_settings(self.state.test):
+ self.logger.info("Restarting browser for new test environment")
+ return RunnerManagerState.restarting(self.state.test_type,
+ self.state.test,
+ self.state.test_group,
+ self.state.group_metadata,
+ False)
+
+ self.recording.set(["testrunner", "test"] + self.state.test.id.split("/")[1:])
+ self.logger.test_start(self.state.test.id)
+ if self.rerun > 1:
+ self.logger.info("Run %d/%d" % (self.run_count, self.rerun))
+ self.send_message("reset")
+ self.run_count += 1
+ if self.debug_info is None:
+ # Factor of 3 on the extra timeout here is based on allowing the executor
+ # at least test.timeout + 2 * extra_timeout to complete,
+ # which in turn is based on having several layers of timeout inside the executor
+ wait_timeout = (self.state.test.timeout * self.executor_kwargs['timeout_multiplier'] +
+ 3 * self.executor_cls.extra_timeout)
+ self.timer = threading.Timer(wait_timeout, self._timeout)
+
+ self.send_message("run_test", self.state.test)
+ if self.timer:
+ self.timer.start()
+
+ def _timeout(self):
+ # This is executed in a different thread (threading.Timer).
+ self.logger.info("Got timeout in harness")
+ test = self.state.test
+ self.inject_message(
+ "test_ended",
+ test,
+ (test.result_cls("EXTERNAL-TIMEOUT",
+ "TestRunner hit external timeout "
+ "(this may indicate a hang)"), []),
+ )
+
+ def test_ended(self, test, results):
+ """Handle the end of a test.
+
+ Output the result of each subtest, and the result of the overall
+ harness to the logs.
+ """
+ if ((not isinstance(self.state, RunnerManagerState.running)) or
+ (test != self.state.test)):
+ # Due to inherent race conditions in EXTERNAL-TIMEOUT, we might
+ # receive multiple test_ended for a test (e.g. from both Executor
+ # and TestRunner), in which case we ignore the duplicate message.
+ self.logger.error("Received unexpected test_ended for %s" % test)
+ return
+ if self.timer is not None:
+ self.timer.cancel()
+
+ # Write the result of each subtest
+ file_result, test_results = results
+ subtest_unexpected = False
+ subtest_all_pass_or_expected = True
+ for result in test_results:
+ if test.disabled(result.name):
+ continue
+ expected = test.expected(result.name)
+ known_intermittent = test.known_intermittent(result.name)
+ is_unexpected = expected != result.status and result.status not in known_intermittent
+ is_expected_notrun = (expected == "NOTRUN" or "NOTRUN" in known_intermittent)
+
+ if is_unexpected:
+ subtest_unexpected = True
+
+ if result.status != "PASS" and not is_expected_notrun:
+ # Any result against an expected "NOTRUN" should be treated
+ # as unexpected pass.
+ subtest_all_pass_or_expected = False
+
+ self.logger.test_status(test.id,
+ result.name,
+ result.status,
+ message=result.message,
+ expected=expected,
+ known_intermittent=known_intermittent,
+ stack=result.stack)
+
+ expected = test.expected()
+ known_intermittent = test.known_intermittent()
+ status = file_result.status
+
+ if self.browser.check_crash(test.id) and status != "CRASH":
+ if test.test_type == "crashtest" or status == "EXTERNAL-TIMEOUT":
+ self.logger.info("Found a crash dump file; changing status to CRASH")
+ status = "CRASH"
+ else:
+ self.logger.warning(f"Found a crash dump; should change status from {status} to CRASH but this causes instability")
+
+ # We have a couple of status codes that are used internally, but not exposed to the
+ # user. These are used to indicate that some possibly-broken state was reached
+ # and we should restart the runner before the next test.
+ # INTERNAL-ERROR indicates a Python exception was caught in the harness
+ # EXTERNAL-TIMEOUT indicates we had to forcibly kill the browser from the harness
+ # because the test didn't return a result after reaching the test-internal timeout
+ status_subns = {"INTERNAL-ERROR": "ERROR",
+ "EXTERNAL-TIMEOUT": "TIMEOUT"}
+ status = status_subns.get(status, status)
+
+ self.test_count += 1
+ is_unexpected = expected != status and status not in known_intermittent
+
+ if is_unexpected or subtest_unexpected:
+ self.unexpected_tests.add(test.id)
+
+ # A result is unexpected pass if the test or any subtest run
+ # unexpectedly, and the overall status is OK (for test harness test), or
+ # PASS (for reftest), and all unexpected results for subtests (if any) are
+ # unexpected pass.
+ is_unexpected_pass = ((is_unexpected or subtest_unexpected) and
+ status in ["OK", "PASS"] and subtest_all_pass_or_expected)
+ if is_unexpected_pass:
+ self.unexpected_pass_tests.add(test.id)
+
+ if "assertion_count" in file_result.extra:
+ assertion_count = file_result.extra["assertion_count"]
+ if assertion_count is not None and assertion_count > 0:
+ self.logger.assertion_count(test.id,
+ int(assertion_count),
+ test.min_assertion_count,
+ test.max_assertion_count)
+
+ file_result.extra["test_timeout"] = test.timeout * self.executor_kwargs['timeout_multiplier']
+
+ self.logger.test_end(test.id,
+ status,
+ message=file_result.message,
+ expected=expected,
+ known_intermittent=known_intermittent,
+ extra=file_result.extra,
+ stack=file_result.stack)
+
+ restart_before_next = (test.restart_after or
+ file_result.status in ("CRASH", "EXTERNAL-TIMEOUT", "INTERNAL-ERROR") or
+ ((subtest_unexpected or is_unexpected) and
+ self.restart_on_unexpected))
+ force_stop = test.test_type == "wdspec" and file_result.status == "EXTERNAL-TIMEOUT"
+
+ self.recording.set(["testrunner", "after-test"])
+ if (not file_result.status == "CRASH" and
+ self.pause_after_test or
+ (self.pause_on_unexpected and (subtest_unexpected or is_unexpected))):
+ self.logger.info("Pausing until the browser exits")
+ self.send_message("wait")
+ else:
+ return self.after_test_end(test, restart_before_next, force_stop=force_stop)
+
+ def wait_finished(self, rerun=False):
+ assert isinstance(self.state, RunnerManagerState.running)
+ self.logger.debug("Wait finished")
+
+ # The browser should be stopped already, but this ensures we do any
+ # post-stop processing
+ return self.after_test_end(self.state.test, not rerun, force_rerun=rerun)
+
+ def after_test_end(self, test, restart, force_rerun=False, force_stop=False):
+ assert isinstance(self.state, RunnerManagerState.running)
+ # Mixing manual reruns and automatic reruns is confusing; we currently assume
+ # that as long as we've done at least the automatic run count in total we can
+ # continue with the next test.
+ if not force_rerun and self.run_count >= self.rerun:
+ test_type, test, test_group, group_metadata = self.get_next_test()
+ if test is None:
+ return RunnerManagerState.stop(force_stop)
+ if test_type != self.state.test_type:
+ self.logger.info(f"Restarting browser for new test type:{test_type}")
+ restart = True
+ elif self.restart_on_new_group and test_group is not self.state.test_group:
+ self.logger.info("Restarting browser for new test group")
+ restart = True
+ else:
+ test_type = self.state.test_type
+ test_group = self.state.test_group
+ group_metadata = self.state.group_metadata
+
+ if restart:
+ return RunnerManagerState.restarting(
+ test_type, test, test_group, group_metadata, force_stop)
+ else:
+ return RunnerManagerState.running(
+ test_type, test, test_group, group_metadata)
+
+ def restart_runner(self):
+ """Stop and restart the TestRunner"""
+ assert isinstance(self.state, RunnerManagerState.restarting)
+ self.stop_runner(force=self.state.force_stop)
+ return RunnerManagerState.initializing(
+ self.state.test_type, self.state.test,
+ self.state.test_group, self.state.group_metadata, 0)
+
+ def log(self, data):
+ self.logger.log_raw(data)
+
+ def error(self, message):
+ self.logger.error(message)
+ self.restart_runner()
+
+ def stop_runner(self, force=False):
+ """Stop the TestRunner and the browser binary."""
+ self.recording.set(["testrunner", "stop_runner"])
+ if self.test_runner_proc is None:
+ return
+
+ if self.test_runner_proc.is_alive():
+ self.send_message("stop")
+ try:
+ self.browser.stop(force=force)
+ self.ensure_runner_stopped()
+ finally:
+ self.cleanup()
+
+ def teardown(self):
+ self.logger.debug("TestRunnerManager teardown")
+ self.test_runner_proc = None
+ self.command_queue.close()
+ self.remote_queue.close()
+ self.command_queue = None
+ self.remote_queue = None
+ self.recording.pause()
+
+ def ensure_runner_stopped(self):
+ self.logger.debug("ensure_runner_stopped")
+ if self.test_runner_proc is None:
+ return
+
+ self.browser.stop(force=True)
+ self.logger.debug("waiting for runner process to end")
+ self.test_runner_proc.join(10)
+ self.logger.debug("After join")
+ mp = mpcontext.get_context()
+ if self.test_runner_proc.is_alive():
+ # This might leak a file handle from the queue
+ self.logger.warning("Forcibly terminating runner process")
+ self.test_runner_proc.terminate()
+ self.logger.debug("After terminating runner process")
+
+ # Multiprocessing queues are backed by operating system pipes. If
+ # the pipe in the child process had buffered data at the time of
+ # forced termination, the queue is no longer in a usable state
+ # (subsequent attempts to retrieve items may block indefinitely).
+ # Discard the potentially-corrupted queue and create a new one.
+ self.logger.debug("Recreating command queue")
+ self.command_queue.cancel_join_thread()
+ self.command_queue.close()
+ self.command_queue = mp.Queue()
+ self.logger.debug("Recreating remote queue")
+ self.remote_queue.cancel_join_thread()
+ self.remote_queue.close()
+ self.remote_queue = mp.Queue()
+ else:
+ self.logger.debug("Runner process exited with code %i" % self.test_runner_proc.exitcode)
+
+ def runner_teardown(self):
+ self.ensure_runner_stopped()
+ return RunnerManagerState.stop(False)
+
+ def send_message(self, command, *args):
+ """Send a message to the remote queue (to Executor)."""
+ self.remote_queue.put((command, args))
+
+ def inject_message(self, command, *args):
+ """Inject a message to the command queue (from Executor)."""
+ self.command_queue.put((command, args))
+
+ def cleanup(self):
+ self.logger.debug("TestRunnerManager cleanup")
+ if self.browser:
+ self.browser.cleanup()
+ while True:
+ try:
+ cmd, data = self.command_queue.get_nowait()
+ except Empty:
+ break
+ else:
+ if cmd == "log":
+ self.log(*data)
+ elif cmd == "runner_teardown":
+ # It's OK for the "runner_teardown" message to be left in
+ # the queue during cleanup, as we will already have tried
+ # to stop the TestRunner in `stop_runner`.
+ pass
+ else:
+ self.logger.warning(f"Command left in command_queue during cleanup: {cmd!r}, {data!r}")
+ while True:
+ try:
+ cmd, data = self.remote_queue.get_nowait()
+ self.logger.warning(f"Command left in remote_queue during cleanup: {cmd!r}, {data!r}")
+ except Empty:
+ break
+
+
+def make_test_queue(tests, test_source_cls, **test_source_kwargs):
+ queue = test_source_cls.make_queue(tests, **test_source_kwargs)
+
+ # There is a race condition that means sometimes we continue
+ # before the tests have been written to the underlying pipe.
+ # Polling the pipe for data here avoids that
+ queue._reader.poll(10)
+ assert not queue.empty()
+ return queue
+
+
+class ManagerGroup:
+ """Main thread object that owns all the TestRunnerManager threads."""
+ def __init__(self, suite_name, size, test_source_cls, test_source_kwargs,
+ test_implementation_by_type,
+ rerun=1,
+ pause_after_test=False,
+ pause_on_unexpected=False,
+ restart_on_unexpected=True,
+ debug_info=None,
+ capture_stdio=True,
+ restart_on_new_group=True,
+ recording=None):
+ self.suite_name = suite_name
+ self.size = size
+ self.test_source_cls = test_source_cls
+ self.test_source_kwargs = test_source_kwargs
+ self.test_implementation_by_type = test_implementation_by_type
+ self.pause_after_test = pause_after_test
+ self.pause_on_unexpected = pause_on_unexpected
+ self.restart_on_unexpected = restart_on_unexpected
+ self.debug_info = debug_info
+ self.rerun = rerun
+ self.capture_stdio = capture_stdio
+ self.restart_on_new_group = restart_on_new_group
+ self.recording = recording
+ assert recording is not None
+
+ self.pool = set()
+ # Event that is polled by threads so that they can gracefully exit in the face
+ # of sigint
+ self.stop_flag = threading.Event()
+ self.logger = structuredlog.StructuredLogger(suite_name)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.stop()
+
+ def run(self, tests):
+ """Start all managers in the group"""
+ self.logger.debug("Using %i processes" % self.size)
+
+ test_queue = make_test_queue(tests, self.test_source_cls, **self.test_source_kwargs)
+
+ for idx in range(self.size):
+ manager = TestRunnerManager(self.suite_name,
+ idx,
+ test_queue,
+ self.test_source_cls,
+ self.test_implementation_by_type,
+ self.stop_flag,
+ self.rerun,
+ self.pause_after_test,
+ self.pause_on_unexpected,
+ self.restart_on_unexpected,
+ self.debug_info,
+ self.capture_stdio,
+ self.restart_on_new_group,
+ recording=self.recording)
+ manager.start()
+ self.pool.add(manager)
+ self.wait()
+
+ def wait(self):
+ """Wait for all the managers in the group to finish"""
+ for manager in self.pool:
+ manager.join()
+
+ def stop(self):
+ """Set the stop flag so that all managers in the group stop as soon
+ as possible"""
+ self.stop_flag.set()
+ self.logger.debug("Stop flag set in ManagerGroup")
+
+ def test_count(self):
+ return sum(manager.test_count for manager in self.pool)
+
+ def unexpected_tests(self):
+ return set().union(*(manager.unexpected_tests for manager in self.pool))
+
+ def unexpected_pass_tests(self):
+ return set().union(*(manager.unexpected_pass_tests for manager in self.pool))
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/__init__.py
new file mode 100644
index 0000000000..b4a26cee9b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/__init__.py
@@ -0,0 +1,9 @@
+# mypy: ignore-errors
+
+import os
+import sys
+
+here = os.path.abspath(os.path.dirname(__file__))
+sys.path.insert(0, os.path.join(here, os.pardir, os.pardir, os.pardir))
+
+import localpaths as _localpaths # noqa: F401
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/base.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/base.py
new file mode 100644
index 0000000000..176eef6a42
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/base.py
@@ -0,0 +1,63 @@
+# mypy: allow-untyped-defs
+
+import os
+import sys
+
+from os.path import dirname, join
+
+import pytest
+
+sys.path.insert(0, join(dirname(__file__), "..", ".."))
+
+from .. import browsers
+
+
+_products = browsers.product_list
+_active_products = set()
+
+if "CURRENT_TOX_ENV" in os.environ:
+ current_tox_env_split = os.environ["CURRENT_TOX_ENV"].split("-")
+
+ tox_env_extra_browsers = {
+ "chrome": {"chrome_android"},
+ "edge": {"edge_webdriver"},
+ "servo": {"servodriver"},
+ }
+
+ _active_products = set(_products) & set(current_tox_env_split)
+ for product in frozenset(_active_products):
+ _active_products |= tox_env_extra_browsers.get(product, set())
+else:
+ _active_products = set(_products)
+
+
+class all_products:
+ def __init__(self, arg, marks={}):
+ self.arg = arg
+ self.marks = marks
+
+ def __call__(self, f):
+ params = []
+ for product in _products:
+ if product in self.marks:
+ params.append(pytest.param(product, marks=self.marks[product]))
+ else:
+ params.append(product)
+ return pytest.mark.parametrize(self.arg, params)(f)
+
+
+class active_products:
+ def __init__(self, arg, marks={}):
+ self.arg = arg
+ self.marks = marks
+
+ def __call__(self, f):
+ params = []
+ for product in _products:
+ if product not in _active_products:
+ params.append(pytest.param(product, marks=pytest.mark.skip(reason="wrong toxenv")))
+ elif product in self.marks:
+ params.append(pytest.param(product, marks=self.marks[product]))
+ else:
+ params.append(product)
+ return pytest.mark.parametrize(self.arg, params)(f)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/__init__.py
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/test_sauce.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/test_sauce.py
new file mode 100644
index 0000000000..a9d11fc9d9
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/test_sauce.py
@@ -0,0 +1,170 @@
+# mypy: allow-untyped-defs
+
+import logging
+import sys
+from unittest import mock
+
+import pytest
+
+from os.path import join, dirname
+
+sys.path.insert(0, join(dirname(__file__), "..", "..", ".."))
+
+sauce = pytest.importorskip("wptrunner.browsers.sauce")
+
+from wptserve.config import ConfigBuilder
+
+logger = logging.getLogger()
+
+
+def test_sauceconnect_success():
+ with mock.patch.object(sauce.SauceConnect, "upload_prerun_exec"),\
+ mock.patch.object(sauce.subprocess, "Popen") as Popen,\
+ mock.patch.object(sauce.os.path, "exists") as exists:
+ # Act as if it's still running
+ Popen.return_value.poll.return_value = None
+ Popen.return_value.returncode = None
+ # Act as if file created
+ exists.return_value = True
+
+ sauce_connect = sauce.SauceConnect(
+ sauce_user="aaa",
+ sauce_key="bbb",
+ sauce_tunnel_id="ccc",
+ sauce_connect_binary="ddd",
+ sauce_connect_args=[])
+
+ with ConfigBuilder(logger, browser_host="example.net") as env_config:
+ sauce_connect(None, env_config)
+ with sauce_connect:
+ pass
+
+
+@pytest.mark.parametrize("readyfile,returncode", [
+ (True, 0),
+ (True, 1),
+ (True, 2),
+ (False, 0),
+ (False, 1),
+ (False, 2),
+])
+def test_sauceconnect_failure_exit(readyfile, returncode):
+ with mock.patch.object(sauce.SauceConnect, "upload_prerun_exec"),\
+ mock.patch.object(sauce.subprocess, "Popen") as Popen,\
+ mock.patch.object(sauce.os.path, "exists") as exists,\
+ mock.patch.object(sauce.time, "sleep") as sleep:
+ Popen.return_value.poll.return_value = returncode
+ Popen.return_value.returncode = returncode
+ exists.return_value = readyfile
+
+ sauce_connect = sauce.SauceConnect(
+ sauce_user="aaa",
+ sauce_key="bbb",
+ sauce_tunnel_id="ccc",
+ sauce_connect_binary="ddd",
+ sauce_connect_args=[])
+
+ with ConfigBuilder(logger, browser_host="example.net") as env_config:
+ sauce_connect(None, env_config)
+ with pytest.raises(sauce.SauceException):
+ with sauce_connect:
+ pass
+
+ # Given we appear to exit immediately with these mocks, sleep shouldn't be called
+ sleep.assert_not_called()
+
+
+def test_sauceconnect_cleanup():
+ """Ensure that execution pauses when the process is closed while exiting
+ the context manager. This allow Sauce Connect to close any active
+ tunnels."""
+ with mock.patch.object(sauce.SauceConnect, "upload_prerun_exec"),\
+ mock.patch.object(sauce.subprocess, "Popen") as Popen,\
+ mock.patch.object(sauce.os.path, "exists") as exists,\
+ mock.patch.object(sauce.time, "sleep") as sleep:
+ Popen.return_value.poll.return_value = True
+ Popen.return_value.returncode = None
+ exists.return_value = True
+
+ sauce_connect = sauce.SauceConnect(
+ sauce_user="aaa",
+ sauce_key="bbb",
+ sauce_tunnel_id="ccc",
+ sauce_connect_binary="ddd",
+ sauce_connect_args=[])
+
+ with ConfigBuilder(logger, browser_host="example.net") as env_config:
+ sauce_connect(None, env_config)
+ with sauce_connect:
+ Popen.return_value.poll.return_value = None
+ sleep.assert_not_called()
+
+ sleep.assert_called()
+
+def test_sauceconnect_failure_never_ready():
+ with mock.patch.object(sauce.SauceConnect, "upload_prerun_exec"),\
+ mock.patch.object(sauce.subprocess, "Popen") as Popen,\
+ mock.patch.object(sauce.os.path, "exists") as exists,\
+ mock.patch.object(sauce.time, "sleep") as sleep:
+ Popen.return_value.poll.return_value = None
+ Popen.return_value.returncode = None
+ exists.return_value = False
+
+ sauce_connect = sauce.SauceConnect(
+ sauce_user="aaa",
+ sauce_key="bbb",
+ sauce_tunnel_id="ccc",
+ sauce_connect_binary="ddd",
+ sauce_connect_args=[])
+
+ with ConfigBuilder(logger, browser_host="example.net") as env_config:
+ sauce_connect(None, env_config)
+ with pytest.raises(sauce.SauceException):
+ with sauce_connect:
+ pass
+
+ # We should sleep while waiting for it to create the readyfile
+ sleep.assert_called()
+
+ # Check we actually kill it after termination fails
+ Popen.return_value.terminate.assert_called()
+ Popen.return_value.kill.assert_called()
+
+
+def test_sauceconnect_tunnel_domains():
+ with mock.patch.object(sauce.SauceConnect, "upload_prerun_exec"),\
+ mock.patch.object(sauce.subprocess, "Popen") as Popen,\
+ mock.patch.object(sauce.os.path, "exists") as exists:
+ Popen.return_value.poll.return_value = None
+ Popen.return_value.returncode = None
+ exists.return_value = True
+
+ sauce_connect = sauce.SauceConnect(
+ sauce_user="aaa",
+ sauce_key="bbb",
+ sauce_tunnel_id="ccc",
+ sauce_connect_binary="ddd",
+ sauce_connect_args=[])
+
+ with ConfigBuilder(logger,
+ browser_host="example.net",
+ alternate_hosts={"alt": "example.org"},
+ subdomains={"a", "b"},
+ not_subdomains={"x", "y"}) as env_config:
+ sauce_connect(None, env_config)
+ with sauce_connect:
+ Popen.assert_called_once()
+ args, kwargs = Popen.call_args
+ cmd = args[0]
+ assert "--tunnel-domains" in cmd
+ i = cmd.index("--tunnel-domains")
+ rest = cmd[i+1:]
+ assert len(rest) >= 1
+ if len(rest) > 1:
+ assert rest[1].startswith("-"), "--tunnel-domains takes a comma separated list (not a space separated list)"
+ assert set(rest[0].split(",")) == {'example.net',
+ 'a.example.net',
+ 'b.example.net',
+ 'example.org',
+ 'a.example.org',
+ 'b.example.org'}
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/test_webkitgtk.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/test_webkitgtk.py
new file mode 100644
index 0000000000..370cd86293
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/browsers/test_webkitgtk.py
@@ -0,0 +1,74 @@
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+import logging
+from os.path import join, dirname
+
+import pytest
+
+from wptserve.config import ConfigBuilder
+from ..base import active_products
+from wptrunner import environment, products
+
+test_paths = {"/": {"tests_path": join(dirname(__file__), "..", "..", "..", "..", "..")}} # repo root
+environment.do_delayed_imports(None, test_paths)
+
+logger = logging.getLogger()
+
+
+@active_products("product")
+def test_webkitgtk_certificate_domain_list(product):
+
+ def domain_is_inside_certificate_list_cert(domain_to_find, webkitgtk_certificate_list, cert_file):
+ for domain in webkitgtk_certificate_list:
+ if domain["host"] == domain_to_find and domain["certificateFile"] == cert_file:
+ return True
+ return False
+
+ if product not in ["epiphany", "webkit", "webkitgtk_minibrowser"]:
+ pytest.skip("%s doesn't support certificate_domain_list" % product)
+
+ product_data = products.Product({}, product)
+
+ cert_file = "/home/user/wpt/tools/certs/cacert.pem"
+ valid_domains_test = ["a.example.org", "b.example.org", "example.org",
+ "a.example.net", "b.example.net", "example.net"]
+ invalid_domains_test = ["x.example.org", "y.example.org", "example.it",
+ "x.example.net", "y.example.net", "z.example.net"]
+ kwargs = {}
+ kwargs["timeout_multiplier"] = 1
+ kwargs["debug_info"] = None
+ kwargs["host_cert_path"] = cert_file
+ kwargs["webkit_port"] = "gtk"
+ kwargs["binary"] = None
+ kwargs["webdriver_binary"] = None
+ kwargs["pause_after_test"] = False
+ kwargs["pause_on_unexpected"] = False
+ kwargs["debug_test"] = False
+ with ConfigBuilder(logger,
+ browser_host="example.net",
+ alternate_hosts={"alt": "example.org"},
+ subdomains={"a", "b"},
+ not_subdomains={"x", "y"}) as env_config:
+
+ # We don't want to actually create a test environment; the get_executor_kwargs
+ # function only really wants an object with the config key
+
+ class MockEnvironment:
+ def __init__(self, config):
+ self.config = config
+
+ executor_args = product_data.get_executor_kwargs(None,
+ None,
+ MockEnvironment(env_config),
+ {},
+ **kwargs)
+ assert('capabilities' in executor_args)
+ assert('webkitgtk:browserOptions' in executor_args['capabilities'])
+ assert('certificates' in executor_args['capabilities']['webkitgtk:browserOptions'])
+ cert_list = executor_args['capabilities']['webkitgtk:browserOptions']['certificates']
+ for valid_domain in valid_domains_test:
+ assert(domain_is_inside_certificate_list_cert(valid_domain, cert_list, cert_file))
+ assert(not domain_is_inside_certificate_list_cert(valid_domain, cert_list, cert_file + ".backup_non_existent"))
+ for invalid_domain in invalid_domains_test:
+ assert(not domain_is_inside_certificate_list_cert(invalid_domain, cert_list, cert_file))
+ assert(not domain_is_inside_certificate_list_cert(invalid_domain, cert_list, cert_file + ".backup_non_existent"))
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_executors.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_executors.py
new file mode 100644
index 0000000000..682a34e5df
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_executors.py
@@ -0,0 +1,17 @@
+# mypy: allow-untyped-defs
+
+import pytest
+
+from ..executors import base
+
+@pytest.mark.parametrize("ranges_value, total_pages, expected", [
+ ([], 3, {1, 2, 3}),
+ ([[1, 2]], 3, {1, 2}),
+ ([[1], [3, 4]], 5, {1, 3, 4}),
+ ([[1],[3]], 5, {1, 3}),
+ ([[2, None]], 5, {2, 3, 4, 5}),
+ ([[None, 2]], 5, {1, 2}),
+ ([[None, 2], [2, None]], 5, {1, 2, 3, 4, 5}),
+ ([[1], [6, 7], [8]], 5, {1})])
+def test_get_pages_valid(ranges_value, total_pages, expected):
+ assert base.get_pages(ranges_value, total_pages) == expected
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_expectedtree.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_expectedtree.py
new file mode 100644
index 0000000000..b8a1120246
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_expectedtree.py
@@ -0,0 +1,120 @@
+# mypy: allow-untyped-defs
+
+from .. import expectedtree, metadata
+from collections import defaultdict
+
+
+def dump_tree(tree):
+ rv = []
+
+ def dump_node(node, indent=0):
+ prefix = " " * indent
+ if not node.prop:
+ data = "root"
+ else:
+ data = f"{node.prop}:{node.value}"
+ if node.result_values:
+ data += " result_values:%s" % (",".join(sorted(node.result_values)))
+ rv.append(f"{prefix}<{data}>")
+ for child in sorted(node.children, key=lambda x:x.value):
+ dump_node(child, indent + 2)
+ dump_node(tree)
+ return "\n".join(rv)
+
+
+def results_object(results):
+ results_obj = defaultdict(lambda: defaultdict(int))
+ for run_info, status in results:
+ run_info = metadata.RunInfo(run_info)
+ results_obj[run_info][status] += 1
+ return results_obj
+
+
+def test_build_tree_0():
+ # Pass if debug
+ results = [({"os": "linux", "version": "18.04", "debug": True}, "FAIL"),
+ ({"os": "linux", "version": "18.04", "debug": False}, "PASS"),
+ ({"os": "linux", "version": "16.04", "debug": False}, "PASS"),
+ ({"os": "mac", "version": "10.12", "debug": True}, "FAIL"),
+ ({"os": "mac", "version": "10.12", "debug": False}, "PASS"),
+ ({"os": "win", "version": "7", "debug": False}, "PASS"),
+ ({"os": "win", "version": "10", "debug": False}, "PASS")]
+ results_obj = results_object(results)
+ tree = expectedtree.build_tree(["os", "version", "debug"], {}, results_obj)
+
+ expected = """<root>
+ <debug:False result_values:PASS>
+ <debug:True result_values:FAIL>"""
+
+ assert dump_tree(tree) == expected
+
+
+def test_build_tree_1():
+ # Pass if linux or windows 10
+ results = [({"os": "linux", "version": "18.04", "debug": True}, "PASS"),
+ ({"os": "linux", "version": "18.04", "debug": False}, "PASS"),
+ ({"os": "linux", "version": "16.04", "debug": False}, "PASS"),
+ ({"os": "mac", "version": "10.12", "debug": True}, "FAIL"),
+ ({"os": "mac", "version": "10.12", "debug": False}, "FAIL"),
+ ({"os": "win", "version": "7", "debug": False}, "FAIL"),
+ ({"os": "win", "version": "10", "debug": False}, "PASS")]
+ results_obj = results_object(results)
+ tree = expectedtree.build_tree(["os", "debug"], {"os": ["version"]}, results_obj)
+
+ expected = """<root>
+ <os:linux result_values:PASS>
+ <os:mac result_values:FAIL>
+ <os:win>
+ <version:10 result_values:PASS>
+ <version:7 result_values:FAIL>"""
+
+ assert dump_tree(tree) == expected
+
+
+def test_build_tree_2():
+ # Fails in a specific configuration
+ results = [({"os": "linux", "version": "18.04", "debug": True}, "PASS"),
+ ({"os": "linux", "version": "18.04", "debug": False}, "FAIL"),
+ ({"os": "linux", "version": "16.04", "debug": False}, "PASS"),
+ ({"os": "linux", "version": "16.04", "debug": True}, "PASS"),
+ ({"os": "mac", "version": "10.12", "debug": True}, "PASS"),
+ ({"os": "mac", "version": "10.12", "debug": False}, "PASS"),
+ ({"os": "win", "version": "7", "debug": False}, "PASS"),
+ ({"os": "win", "version": "10", "debug": False}, "PASS")]
+ results_obj = results_object(results)
+ tree = expectedtree.build_tree(["os", "debug"], {"os": ["version"]}, results_obj)
+
+ expected = """<root>
+ <os:linux>
+ <debug:False>
+ <version:16.04 result_values:PASS>
+ <version:18.04 result_values:FAIL>
+ <debug:True result_values:PASS>
+ <os:mac result_values:PASS>
+ <os:win result_values:PASS>"""
+
+ assert dump_tree(tree) == expected
+
+
+def test_build_tree_3():
+
+ results = [({"os": "linux", "version": "18.04", "debug": True, "unused": False}, "PASS"),
+ ({"os": "linux", "version": "18.04", "debug": True, "unused": True}, "FAIL")]
+ results_obj = results_object(results)
+ tree = expectedtree.build_tree(["os", "debug"], {"os": ["version"]}, results_obj)
+
+ expected = """<root result_values:FAIL,PASS>"""
+
+ assert dump_tree(tree) == expected
+
+
+def test_build_tree_4():
+ # Check counts for multiple statuses
+ results = [({"os": "linux", "version": "18.04", "debug": False}, "FAIL"),
+ ({"os": "linux", "version": "18.04", "debug": False}, "PASS"),
+ ({"os": "linux", "version": "18.04", "debug": False}, "PASS")]
+ results_obj = results_object(results)
+ tree = expectedtree.build_tree(["os", "version", "debug"], {}, results_obj)
+
+ assert tree.result_values["PASS"] == 2
+ assert tree.result_values["FAIL"] == 1
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_formatters.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_formatters.py
new file mode 100644
index 0000000000..3f66f77bea
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_formatters.py
@@ -0,0 +1,152 @@
+# mypy: allow-untyped-defs
+
+import json
+import time
+from io import StringIO
+
+from mozlog import handlers, structuredlog
+
+from ..formatters.wptscreenshot import WptscreenshotFormatter
+from ..formatters.wptreport import WptreportFormatter
+
+
+def test_wptreport_runtime(capfd):
+ # setup the logger
+ output = StringIO()
+ logger = structuredlog.StructuredLogger("test_a")
+ logger.add_handler(handlers.StreamHandler(output, WptreportFormatter()))
+
+ # output a bunch of stuff
+ logger.suite_start(["test-id-1"], run_info={})
+ logger.test_start("test-id-1")
+ time.sleep(0.125)
+ logger.test_end("test-id-1", "PASS")
+ logger.suite_end()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_obj = json.load(output)
+ # be relatively lax in case of low resolution timers
+ # 62 is 0.125s = 125ms / 2 = 62ms (assuming int maths)
+ # this provides a margin of 62ms, sufficient for even DOS (55ms timer)
+ assert output_obj["results"][0]["duration"] >= 62
+
+
+def test_wptreport_run_info_optional(capfd):
+ """per the mozlog docs, run_info is optional; check we work without it"""
+ # setup the logger
+ output = StringIO()
+ logger = structuredlog.StructuredLogger("test_a")
+ logger.add_handler(handlers.StreamHandler(output, WptreportFormatter()))
+
+ # output a bunch of stuff
+ logger.suite_start(["test-id-1"]) # no run_info arg!
+ logger.test_start("test-id-1")
+ logger.test_end("test-id-1", "PASS")
+ logger.suite_end()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_obj = json.load(output)
+ assert "run_info" not in output_obj or output_obj["run_info"] == {}
+
+
+def test_wptreport_lone_surrogate(capfd):
+ output = StringIO()
+ logger = structuredlog.StructuredLogger("test_a")
+ logger.add_handler(handlers.StreamHandler(output, WptreportFormatter()))
+
+ # output a bunch of stuff
+ logger.suite_start(["test-id-1"]) # no run_info arg!
+ logger.test_start("test-id-1")
+ logger.test_status("test-id-1",
+ subtest="Name with surrogate\uD800",
+ status="FAIL",
+ message="\U0001F601 \uDE0A\uD83D")
+ logger.test_end("test-id-1",
+ status="PASS",
+ message="\uDE0A\uD83D \U0001F601")
+ logger.suite_end()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_obj = json.load(output)
+ test = output_obj["results"][0]
+ assert test["message"] == "U+de0aU+d83d \U0001F601"
+ subtest = test["subtests"][0]
+ assert subtest["name"] == "Name with surrogateU+d800"
+ assert subtest["message"] == "\U0001F601 U+de0aU+d83d"
+
+
+def test_wptreport_known_intermittent(capfd):
+ output = StringIO()
+ logger = structuredlog.StructuredLogger("test_a")
+ logger.add_handler(handlers.StreamHandler(output, WptreportFormatter()))
+
+ # output a bunch of stuff
+ logger.suite_start(["test-id-1"]) # no run_info arg!
+ logger.test_start("test-id-1")
+ logger.test_status("test-id-1",
+ "a-subtest",
+ status="FAIL",
+ expected="PASS",
+ known_intermittent=["FAIL"])
+ logger.test_end("test-id-1",
+ status="OK",)
+ logger.suite_end()
+
+ # check nothing got output to stdout/stderr
+ # (note that mozlog outputs exceptions during handling to stderr!)
+ captured = capfd.readouterr()
+ assert captured.out == ""
+ assert captured.err == ""
+
+ # check the actual output of the formatter
+ output.seek(0)
+ output_obj = json.load(output)
+ test = output_obj["results"][0]
+ assert test["status"] == "OK"
+ subtest = test["subtests"][0]
+ assert subtest["expected"] == "PASS"
+ assert subtest["known_intermittent"] == ['FAIL']
+
+
+def test_wptscreenshot_test_end(capfd):
+ formatter = WptscreenshotFormatter()
+
+ # Empty
+ data = {}
+ assert formatter.test_end(data) is None
+
+ # No items
+ data['extra'] = {"reftest_screenshots": []}
+ assert formatter.test_end(data) is None
+
+ # Invalid item
+ data['extra']['reftest_screenshots'] = ["no dict item"]
+ assert formatter.test_end(data) is None
+
+ # Random hash
+ data['extra']['reftest_screenshots'] = [{"hash": "HASH", "screenshot": "DATA"}]
+ assert 'data:image/png;base64,DATA\n' == formatter.test_end(data)
+
+ # Already cached hash
+ assert formatter.test_end(data) is None
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_manifestexpected.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_manifestexpected.py
new file mode 100644
index 0000000000..03f4fe8c9e
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_manifestexpected.py
@@ -0,0 +1,36 @@
+# mypy: allow-untyped-defs
+
+from io import BytesIO
+
+import pytest
+
+from .. import manifestexpected
+
+
+@pytest.mark.parametrize("fuzzy, expected", [
+ (b"ref.html:1;200", [("ref.html", ((1, 1), (200, 200)))]),
+ (b"ref.html:0-1;100-200", [("ref.html", ((0, 1), (100, 200)))]),
+ (b"0-1;100-200", [(None, ((0, 1), (100, 200)))]),
+ (b"maxDifference=1;totalPixels=200", [(None, ((1, 1), (200, 200)))]),
+ (b"totalPixels=200;maxDifference=1", [(None, ((1, 1), (200, 200)))]),
+ (b"totalPixels=200;1", [(None, ((1, 1), (200, 200)))]),
+ (b"maxDifference=1;200", [(None, ((1, 1), (200, 200)))]),
+ (b"test.html==ref.html:maxDifference=1;totalPixels=200",
+ [(("test.html", "ref.html", "=="), ((1, 1), (200, 200)))]),
+ (b"test.html!=ref.html:maxDifference=1;totalPixels=200",
+ [(("test.html", "ref.html", "!="), ((1, 1), (200, 200)))]),
+ (b"[test.html!=ref.html:maxDifference=1;totalPixels=200, test.html==ref1.html:maxDifference=5-10;100]",
+ [(("test.html", "ref.html", "!="), ((1, 1), (200, 200))),
+ (("test.html", "ref1.html", "=="), ((5,10), (100, 100)))]),
+])
+def test_fuzzy(fuzzy, expected):
+ data = b"""
+[test.html]
+ fuzzy: %s""" % fuzzy
+ f = BytesIO(data)
+ manifest = manifestexpected.static.compile(f,
+ {},
+ data_cls_getter=manifestexpected.data_cls_getter,
+ test_path="test/test.html",
+ url_base="/")
+ assert manifest.get_test("/test/test.html").fuzzy == expected
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_metadata.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_metadata.py
new file mode 100644
index 0000000000..ee3d90915d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_metadata.py
@@ -0,0 +1,47 @@
+import json
+import os
+
+import pytest
+
+from .. import metadata
+
+
+def write_properties(tmp_path, data): # type: ignore
+ path = os.path.join(tmp_path, "update_properties.json")
+ with open(path, "w") as f:
+ json.dump(data, f)
+ return path
+
+@pytest.mark.parametrize("data",
+ [{"properties": ["prop1"]}, # type: ignore
+ {"properties": ["prop1"], "dependents": {"prop1": ["prop2"]}},
+ ])
+def test_get_properties_file_valid(tmp_path, data):
+ path = write_properties(tmp_path, data)
+ expected = data["properties"], data.get("dependents", {})
+ actual = metadata.get_properties(properties_file=path)
+ assert actual == expected
+
+@pytest.mark.parametrize("data",
+ [{}, # type: ignore
+ {"properties": "prop1"},
+ {"properties": None},
+ {"properties": ["prop1", 1]},
+ {"dependents": {"prop1": ["prop1"]}},
+ {"properties": "prop1", "dependents": ["prop1"]},
+ {"properties": "prop1", "dependents": None},
+ {"properties": "prop1", "dependents": {"prop1": ["prop2", 2]}},
+ {"properties": ["prop1"], "dependents": {"prop2": ["prop3"]}},
+ ])
+def test_get_properties_file_invalid(tmp_path, data):
+ path = write_properties(tmp_path, data)
+ with pytest.raises(ValueError):
+ metadata.get_properties(properties_file=path)
+
+
+def test_extra_properties(tmp_path): # type: ignore
+ data = {"properties": ["prop1"], "dependents": {"prop1": ["prop2"]}}
+ path = write_properties(tmp_path, data)
+ actual = metadata.get_properties(properties_file=path, extra_properties=["prop4"])
+ expected = ["prop1", "prop4"], {"prop1": ["prop2"]}
+ assert actual == expected
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_products.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_products.py
new file mode 100644
index 0000000000..7f46c0e2d2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_products.py
@@ -0,0 +1,57 @@
+# mypy: allow-untyped-defs, allow-untyped-calls
+
+from os.path import join, dirname
+from unittest import mock
+
+import pytest
+
+from .base import all_products, active_products
+from .. import environment
+from .. import products
+
+test_paths = {"/": {"tests_path": join(dirname(__file__), "..", "..", "..", "..")}} # repo root
+environment.do_delayed_imports(None, test_paths)
+
+
+@active_products("product")
+def test_load_active_product(product):
+ """test we can successfully load the product of the current testenv"""
+ products.Product({}, product)
+ # test passes if it doesn't throw
+
+
+@all_products("product")
+def test_load_all_products(product):
+ """test every product either loads or throws ImportError"""
+ try:
+ products.Product({}, product)
+ except ImportError:
+ pass
+
+
+@active_products("product", marks={
+ "sauce": pytest.mark.skip("needs env extras kwargs"),
+})
+def test_server_start_config(product):
+ product_data = products.Product({}, product)
+
+ env_extras = product_data.get_env_extras()
+
+ with mock.patch.object(environment.serve, "start") as start:
+ with environment.TestEnvironment(test_paths,
+ 1,
+ False,
+ False,
+ None,
+ product_data.env_options,
+ {"type": "none"},
+ env_extras):
+ start.assert_called_once()
+ args = start.call_args
+ config = args[0][1]
+ if "server_host" in product_data.env_options:
+ assert config["server_host"] == product_data.env_options["server_host"]
+
+ else:
+ assert config["server_host"] == config["browser_host"]
+ assert isinstance(config["bind_address"], bool)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_stability.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_stability.py
new file mode 100644
index 0000000000..d6e7cc8f70
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_stability.py
@@ -0,0 +1,186 @@
+# mypy: allow-untyped-defs
+
+import sys
+from collections import OrderedDict, defaultdict
+from unittest import mock
+
+from mozlog.structuredlog import StructuredLogger
+from mozlog.formatters import TbplFormatter
+from mozlog.handlers import StreamHandler
+
+from .. import stability, wptrunner
+
+def test_is_inconsistent():
+ assert stability.is_inconsistent({"PASS": 10}, 10) is False
+ assert stability.is_inconsistent({"PASS": 9}, 10) is True
+ assert stability.is_inconsistent({"PASS": 9, "FAIL": 1}, 10) is True
+ assert stability.is_inconsistent({"PASS": 8, "FAIL": 1}, 10) is True
+
+
+def test_find_slow_status():
+ assert stability.find_slow_status({
+ "longest_duration": {"TIMEOUT": 10},
+ "timeout": 10}) is None
+ assert stability.find_slow_status({
+ "longest_duration": {"CRASH": 10},
+ "timeout": 10}) is None
+ assert stability.find_slow_status({
+ "longest_duration": {"ERROR": 10},
+ "timeout": 10}) is None
+ assert stability.find_slow_status({
+ "longest_duration": {"PASS": 1},
+ "timeout": 10}) is None
+ assert stability.find_slow_status({
+ "longest_duration": {"PASS": 81},
+ "timeout": 100}) == "PASS"
+ assert stability.find_slow_status({
+ "longest_duration": {"TIMEOUT": 10, "FAIL": 81},
+ "timeout": 100}) == "FAIL"
+ assert stability.find_slow_status({
+ "longest_duration": {"SKIP": 0}}) is None
+
+
+def test_get_steps():
+ logger = None
+
+ steps = stability.get_steps(logger, 0, 0, [])
+ assert len(steps) == 0
+
+ steps = stability.get_steps(logger, 0, 0, [{}])
+ assert len(steps) == 0
+
+ repeat_loop = 1
+ flag_name = 'flag'
+ flag_value = 'y'
+ steps = stability.get_steps(logger, repeat_loop, 0, [
+ {flag_name: flag_value}])
+ assert len(steps) == 1
+ assert steps[0][0] == "Running tests in a loop %d times with flags %s=%s" % (
+ repeat_loop, flag_name, flag_value)
+
+ repeat_loop = 0
+ repeat_restart = 1
+ flag_name = 'flag'
+ flag_value = 'n'
+ steps = stability.get_steps(logger, repeat_loop, repeat_restart, [
+ {flag_name: flag_value}])
+ assert len(steps) == 1
+ assert steps[0][0] == "Running tests in a loop with restarts %d times with flags %s=%s" % (
+ repeat_restart, flag_name, flag_value)
+
+ repeat_loop = 10
+ repeat_restart = 5
+ steps = stability.get_steps(logger, repeat_loop, repeat_restart, [{}])
+ assert len(steps) == 2
+ assert steps[0][0] == "Running tests in a loop %d times" % repeat_loop
+ assert steps[1][0] == (
+ "Running tests in a loop with restarts %d times" % repeat_restart)
+
+
+def test_log_handler():
+ handler = stability.LogHandler()
+ data = OrderedDict()
+ data["test"] = "test_name"
+ test = handler.find_or_create_test(data)
+ assert test["subtests"] == OrderedDict()
+ assert test["status"] == defaultdict(int)
+ assert test["longest_duration"] == defaultdict(float)
+ assert test == handler.find_or_create_test(data)
+
+ start_time = 100
+ data["time"] = start_time
+ handler.test_start(data)
+ assert test["start_time"] == start_time
+
+ data["subtest"] = "subtest_name"
+ subtest = handler.find_or_create_subtest(data)
+ assert subtest["status"] == defaultdict(int)
+ assert subtest["messages"] == set()
+ assert subtest == handler.find_or_create_subtest(data)
+
+ data["status"] = 0
+ assert subtest["status"][data["status"]] == 0
+ handler.test_status(data)
+ assert subtest["status"][data["status"]] == 1
+ handler.test_status(data)
+ assert subtest["status"][data["status"]] == 2
+ data["status"] = 1
+ assert subtest["status"][data["status"]] == 0
+ message = "test message"
+ data["message"] = message
+ handler.test_status(data)
+ assert subtest["status"][data["status"]] == 1
+ assert len(subtest["messages"]) == 1
+ assert message in subtest["messages"]
+
+ test_duration = 10
+ data["time"] = data["time"] + test_duration
+ handler.test_end(data)
+ assert test["longest_duration"][data["status"]] == test_duration
+ assert "timeout" not in test
+
+ data["test2"] = "test_name_2"
+ timeout = 5
+ data["extra"] = {}
+ data["extra"]["test_timeout"] = timeout
+ handler.test_start(data)
+ handler.test_end(data)
+ assert test["timeout"] == timeout * 1000
+
+
+def test_err_string():
+ assert stability.err_string(
+ {'OK': 1, 'FAIL': 1}, 1) == "**Duplicate subtest name**"
+ assert stability.err_string(
+ {'OK': 2, 'FAIL': 1}, 2) == "**Duplicate subtest name**"
+ assert stability.err_string({'SKIP': 1}, 0) == "Duplicate subtest name"
+ assert stability.err_string(
+ {'SKIP': 1, 'OK': 1}, 1) == "Duplicate subtest name"
+
+ assert stability.err_string(
+ {'FAIL': 1}, 2) == "**FAIL: 1/2, MISSING: 1/2**"
+ assert stability.err_string(
+ {'FAIL': 1, 'OK': 1}, 3) == "**FAIL: 1/3, OK: 1/3, MISSING: 1/3**"
+
+ assert stability.err_string(
+ {'OK': 1, 'FAIL': 1}, 2) == "**FAIL: 1/2, OK: 1/2**"
+
+ assert stability.err_string(
+ {'OK': 2, 'FAIL': 1, 'SKIP': 1}, 4) == "FAIL: 1/4, OK: 2/4, SKIP: 1/4"
+ assert stability.err_string(
+ {'FAIL': 1, 'SKIP': 1, 'OK': 2}, 4) == "FAIL: 1/4, OK: 2/4, SKIP: 1/4"
+
+
+def test_check_stability_iterations():
+ logger = StructuredLogger("test-stability")
+ logger.add_handler(StreamHandler(sys.stdout, TbplFormatter()))
+
+ kwargs = {"verify_log_full": False}
+
+ def mock_run_tests(**kwargs):
+ repeats = kwargs.get("repeat", 1)
+ for _ in range(repeats):
+ logger.suite_start(tests=[], name="test")
+ for _ in range(kwargs.get("rerun", 1)):
+ logger.test_start("/example/test.html")
+ logger.test_status("/example/test.html", subtest="test1", status="PASS")
+ logger.test_end("/example/test.html", status="OK")
+ logger.suite_end()
+
+ status = wptrunner.TestStatus()
+ status.total_tests = 1
+ status.repeated_runs = repeats
+ status.expected_repeated_runs = repeats
+
+ return (None, status)
+
+ # Don't actually load wptrunner, because that will end up starting a browser
+ # which we don't want to do in this test.
+ with mock.patch("wptrunner.stability.wptrunner.run_tests") as mock_run:
+ mock_run.side_effect = mock_run_tests
+ assert stability.check_stability(logger,
+ repeat_loop=10,
+ repeat_restart=5,
+ chaos_mode=False,
+ output_results=False,
+ **kwargs) is None
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_testloader.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_testloader.py
new file mode 100644
index 0000000000..0936c54ea9
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_testloader.py
@@ -0,0 +1,95 @@
+# mypy: ignore-errors
+
+import os
+import sys
+import tempfile
+
+import pytest
+
+from mozlog import structured
+from ..testloader import TestFilter as Filter, TestLoader as Loader
+from ..testloader import read_include_from_file
+from .test_wpttest import make_mock_manifest
+
+here = os.path.dirname(__file__)
+sys.path.insert(0, os.path.join(here, os.pardir, os.pardir, os.pardir))
+from manifest.manifest import Manifest as WPTManifest
+
+structured.set_default_logger(structured.structuredlog.StructuredLogger("TestLoader"))
+
+include_ini = """\
+skip: true
+[test_\u53F0]
+ skip: false
+"""
+
+
+def test_loader_h2_tests():
+ manifest_json = {
+ "items": {
+ "testharness": {
+ "a": {
+ "foo.html": [
+ "abcdef123456",
+ [None, {}],
+ ],
+ "bar.h2.html": [
+ "uvwxyz987654",
+ [None, {}],
+ ],
+ }
+ }
+ },
+ "url_base": "/",
+ "version": 8,
+ }
+ manifest = WPTManifest.from_json("/", manifest_json)
+
+ # By default, the loader should include the h2 test.
+ loader = Loader({manifest: {"metadata_path": ""}}, ["testharness"], None)
+ assert "testharness" in loader.tests
+ assert len(loader.tests["testharness"]) == 2
+ assert len(loader.disabled_tests) == 0
+
+ # We can also instruct it to skip them.
+ loader = Loader({manifest: {"metadata_path": ""}}, ["testharness"], None, include_h2=False)
+ assert "testharness" in loader.tests
+ assert len(loader.tests["testharness"]) == 1
+ assert "testharness" in loader.disabled_tests
+ assert len(loader.disabled_tests["testharness"]) == 1
+ assert loader.disabled_tests["testharness"][0].url == "/a/bar.h2.html"
+
+@pytest.mark.xfail(sys.platform == "win32",
+ reason="NamedTemporaryFile cannot be reopened on Win32")
+def test_include_file():
+ test_cases = """
+# This is a comment
+/foo/bar-error.https.html
+/foo/bar-success.https.html
+/foo/idlharness.https.any.html
+/foo/idlharness.https.any.worker.html
+ """
+
+ with tempfile.NamedTemporaryFile(mode="wt") as f:
+ f.write(test_cases)
+ f.flush()
+
+ include = read_include_from_file(f.name)
+
+ assert len(include) == 4
+ assert "/foo/bar-error.https.html" in include
+ assert "/foo/bar-success.https.html" in include
+ assert "/foo/idlharness.https.any.html" in include
+ assert "/foo/idlharness.https.any.worker.html" in include
+
+@pytest.mark.xfail(sys.platform == "win32",
+ reason="NamedTemporaryFile cannot be reopened on Win32")
+def test_filter_unicode():
+ tests = make_mock_manifest(("test", "a", 10), ("test", "a/b", 10),
+ ("test", "c", 10))
+
+ with tempfile.NamedTemporaryFile("wb", suffix=".ini") as f:
+ f.write(include_ini.encode('utf-8'))
+ f.flush()
+
+ Filter(manifest_path=f.name, test_manifests=tests)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_update.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_update.py
new file mode 100644
index 0000000000..35c75758f5
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_update.py
@@ -0,0 +1,1853 @@
+# mypy: ignore-errors
+
+import json
+import os
+import sys
+from io import BytesIO
+from unittest import mock
+
+import pytest
+
+from .. import metadata, manifestupdate, wptmanifest
+from ..update.update import WPTUpdate
+from ..update.base import StepRunner, Step
+from mozlog import structuredlog, handlers, formatters
+
+here = os.path.dirname(__file__)
+sys.path.insert(0, os.path.join(here, os.pardir, os.pardir, os.pardir))
+from manifest import manifest, item as manifest_item, utils
+
+
+def rel_path_to_test_url(rel_path):
+ assert not os.path.isabs(rel_path)
+ return rel_path.replace(os.sep, "/")
+
+
+def SourceFileWithTest(path, hash, cls, *args):
+ path_parts = tuple(path.split("/"))
+ path = utils.to_os_path(path)
+ s = mock.Mock(rel_path=path, rel_path_parts=path_parts, hash=hash)
+ test = cls("/foobar", path, "/", rel_path_to_test_url(path), *args)
+ s.manifest_items = mock.Mock(return_value=(cls.item_type, [test]))
+ return s
+
+
+def tree_and_sourcefile_mocks(source_files):
+ paths_dict = {}
+ tree = []
+ for source_file, file_hash, updated in source_files:
+ paths_dict[source_file.rel_path] = source_file
+ tree.append([source_file.rel_path, file_hash, updated])
+
+ def MockSourceFile(tests_root, path, url_base, file_hash):
+ return paths_dict[path]
+
+ return tree, MockSourceFile
+
+
+item_classes = {"testharness": manifest_item.TestharnessTest,
+ "reftest": manifest_item.RefTest,
+ "manual": manifest_item.ManualTest,
+ "wdspec": manifest_item.WebDriverSpecTest,
+ "conformancechecker": manifest_item.ConformanceCheckerTest,
+ "visual": manifest_item.VisualTest,
+ "support": manifest_item.SupportFile}
+
+
+default_run_info = {"debug": False, "os": "linux", "version": "18.04", "processor": "x86_64", "bits": 64}
+test_id = "/path/to/test.htm"
+dir_id = "path/to/__dir__"
+
+
+def reset_globals():
+ metadata.prop_intern.clear()
+ metadata.run_info_intern.clear()
+ metadata.status_intern.clear()
+
+
+def get_run_info(overrides):
+ run_info = default_run_info.copy()
+ run_info.update(overrides)
+ return run_info
+
+
+def update(tests, *logs, **kwargs):
+ full_update = kwargs.pop("full_update", False)
+ disable_intermittent = kwargs.pop("disable_intermittent", False)
+ update_intermittent = kwargs.pop("update_intermittent", False)
+ remove_intermittent = kwargs.pop("remove_intermittent", False)
+ assert not kwargs
+ id_test_map, updater = create_updater(tests)
+
+ for log in logs:
+ log = create_log(log)
+ updater.update_from_log(log)
+
+ update_properties = (["debug", "os", "version", "processor"],
+ {"os": ["version"], "processor": ["bits"]})
+
+ expected_data = {}
+ metadata.load_expected = lambda _, __, test_path, *args: expected_data.get(test_path)
+ for test_path, test_ids, test_type, manifest_str in tests:
+ test_path = utils.to_os_path(test_path)
+ expected_data[test_path] = manifestupdate.compile(BytesIO(manifest_str),
+ test_path,
+ "/",
+ update_properties,
+ update_intermittent,
+ remove_intermittent)
+
+ return list(metadata.update_results(id_test_map,
+ update_properties,
+ full_update,
+ disable_intermittent,
+ update_intermittent,
+ remove_intermittent))
+
+
+def create_updater(tests, url_base="/", **kwargs):
+ id_test_map = {}
+ m = create_test_manifest(tests, url_base)
+
+ reset_globals()
+ id_test_map = metadata.create_test_tree(None, m)
+
+ return id_test_map, metadata.ExpectedUpdater(id_test_map, **kwargs)
+
+
+def create_log(entries):
+ data = BytesIO()
+ if isinstance(entries, list):
+ logger = structuredlog.StructuredLogger("expected_test")
+ handler = handlers.StreamHandler(data, formatters.JSONFormatter())
+ logger.add_handler(handler)
+
+ for item in entries:
+ action, kwargs = item
+ getattr(logger, action)(**kwargs)
+ logger.remove_handler(handler)
+ else:
+ data.write(json.dumps(entries).encode())
+ data.seek(0)
+ return data
+
+
+def suite_log(entries, run_info=None):
+ _run_info = default_run_info.copy()
+ if run_info:
+ _run_info.update(run_info)
+ return ([("suite_start", {"tests": [], "run_info": _run_info})] +
+ entries +
+ [("suite_end", {})])
+
+
+def create_test_manifest(tests, url_base="/"):
+ source_files = []
+ for i, (test, _, test_type, _) in enumerate(tests):
+ if test_type:
+ source_files.append(SourceFileWithTest(test, str(i) * 40, item_classes[test_type]))
+ m = manifest.Manifest("")
+ tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in source_files)
+ with mock.patch("manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ m.update(tree)
+ return m
+
+
+def test_update_0():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected: FAIL""")]
+
+ log = suite_log([("test_start", {"test": "/path/to/test.htm"}),
+ ("test_status", {"test": "/path/to/test.htm",
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "FAIL"}),
+ ("test_end", {"test": "/path/to/test.htm",
+ "status": "OK"})])
+
+ updated = update(tests, log)
+
+ assert len(updated) == 1
+ assert updated[0][1].is_empty
+
+
+def test_update_1():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected: ERROR""")]
+
+ log = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "ERROR"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log)
+
+ new_manifest = updated[0][1]
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get("expected", default_run_info) == "FAIL"
+
+
+def test_update_known_intermittent_1():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected: PASS""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_2 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0, log_1, log_2, update_intermittent=True)
+
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == ["PASS", "FAIL"]
+
+
+def test_update_known_intermittent_2():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected: PASS""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0, update_intermittent=True)
+
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == "FAIL"
+
+
+def test_update_existing_known_intermittent():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected: [PASS, FAIL]""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "ERROR",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_2 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0, log_1, log_2, update_intermittent=True)
+
+ new_manifest = updated[0][1]
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == ["PASS", "ERROR", "FAIL"]
+
+
+def test_update_remove_previous_intermittent():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected: [PASS, FAIL]""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "ERROR",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_2 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests,
+ log_0,
+ log_1,
+ log_2,
+ update_intermittent=True,
+ remove_intermittent=True)
+
+ new_manifest = updated[0][1]
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == ["PASS", "ERROR"]
+
+
+def test_update_new_test_with_intermittent():
+ tests = [("path/to/test.htm", [test_id], "testharness", None)]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_2 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0, log_1, log_2, update_intermittent=True)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test("test.htm") is None
+ assert len(new_manifest.get_test(test_id).children) == 1
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == ["PASS", "FAIL"]
+
+
+def test_update_expected_tie_resolution():
+ tests = [("path/to/test.htm", [test_id], "testharness", None)]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0, log_1, update_intermittent=True)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == ["PASS", "FAIL"]
+
+
+def test_update_reorder_expected():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected: [PASS, FAIL]""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_2 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0, log_1, log_2, update_intermittent=True)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == ["PASS", "FAIL"]
+
+
+def test_update_and_preserve_unchanged_expected_intermittent():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""
+[test.htm]
+ expected:
+ if os == "android": [PASS, FAIL]
+ FAIL""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "FAIL",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]})],
+ run_info={"os": "android"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "PASS",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]})],
+ run_info={"os": "android"})
+
+ log_2 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "PASS",
+ "expected": "FAIL"})])
+
+ updated = update(tests, log_0, log_1, log_2)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"os": "android"})
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).get(
+ "expected", run_info_1) == ["PASS", "FAIL"]
+ assert new_manifest.get_test(test_id).get(
+ "expected", default_run_info) == "PASS"
+
+
+def test_update_test_with_intermittent_to_one_expected_status():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected: [PASS, FAIL]""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "ERROR",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0)
+
+ new_manifest = updated[0][1]
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == "ERROR"
+
+
+def test_update_intermittent_with_conditions():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""
+[test.htm]
+ expected:
+ if os == "android": [PASS, FAIL]""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "TIMEOUT",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]})],
+ run_info={"os": "android"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "PASS",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]})],
+ run_info={"os": "android"})
+
+ updated = update(tests, log_0, log_1, update_intermittent=True)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"os": "android"})
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).get(
+ "expected", run_info_1) == ["PASS", "TIMEOUT", "FAIL"]
+
+
+def test_update_and_remove_intermittent_with_conditions():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""
+[test.htm]
+ expected:
+ if os == "android": [PASS, FAIL]""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "TIMEOUT",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]})],
+ run_info={"os": "android"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "PASS",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]})],
+ run_info={"os": "android"})
+
+ updated = update(tests, log_0, log_1, update_intermittent=True, remove_intermittent=True)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"os": "android"})
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).get(
+ "expected", run_info_1) == ["PASS", "TIMEOUT"]
+
+
+def test_update_intermittent_full():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected:
+ if os == "mac": [FAIL, TIMEOUT]
+ FAIL""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "FAIL",
+ "known_intermittent": ["TIMEOUT"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "mac"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0, log_1, update_intermittent=True, full_update=True)
+
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"os": "mac"})
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == ["FAIL", "TIMEOUT"]
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == "FAIL"
+
+
+def test_update_intermittent_full_remove():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected:
+ if os == "mac": [FAIL, TIMEOUT, PASS]
+ FAIL""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "FAIL",
+ "known_intermittent": ["TIMEOUT", "PASS"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "mac"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "TIMEOUT",
+ "expected": "FAIL",
+ "known_intermittent": ["TIMEOUT", "PASS"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "mac"})
+
+ log_2 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0, log_1, log_2, update_intermittent=True,
+ full_update=True, remove_intermittent=True)
+
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"os": "mac"})
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == ["FAIL", "TIMEOUT"]
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == "FAIL"
+
+
+def test_full_update():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected:
+ if os == "mac": [FAIL, TIMEOUT]
+ FAIL""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "FAIL",
+ "known_intermittent": ["TIMEOUT"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "mac"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0, log_1, full_update=True)
+
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"os": "mac"})
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == "FAIL"
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == "FAIL"
+
+
+def test_full_orphan():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected: FAIL
+ [subsub test]
+ expected: TIMEOUT
+ [test2]
+ expected: ERROR
+""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+
+ updated = update(tests, log_0, full_update=True)
+
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert len(new_manifest.get_test(test_id).children[0].children) == 0
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == "FAIL"
+ assert len(new_manifest.get_test(test_id).children) == 1
+
+
+def test_update_reorder_expected_full_conditions():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected:
+ if os == "mac": [FAIL, TIMEOUT]
+ [FAIL, PASS]""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "TIMEOUT",
+ "expected": "FAIL",
+ "known_intermittent": ["TIMEOUT"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "mac"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "TIMEOUT",
+ "expected": "FAIL",
+ "known_intermittent": ["TIMEOUT"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "mac"})
+
+ log_2 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "FAIL",
+ "known_intermittent": ["PASS"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ log_3 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "FAIL",
+ "known_intermittent": ["PASS"]}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0, log_1, log_2, log_3, update_intermittent=True, full_update=True)
+
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"os": "mac"})
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == ["TIMEOUT", "FAIL"]
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", default_run_info) == ["PASS", "FAIL"]
+
+
+def test_skip_0():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected: FAIL""")]
+
+ log = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log)
+ assert not updated
+
+
+def test_new_subtest():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ [test1]
+ expected: FAIL""")]
+
+ log = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "FAIL"}),
+ ("test_status", {"test": test_id,
+ "subtest": "test2",
+ "status": "FAIL",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+ updated = update(tests, log)
+ new_manifest = updated[0][1]
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get("expected", default_run_info) == "FAIL"
+ assert new_manifest.get_test(test_id).children[1].get("expected", default_run_info) == "FAIL"
+
+
+def test_update_subtest():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ expected:
+ if os == "linux": [OK, ERROR]
+ [test1]
+ expected: FAIL""")]
+
+ log = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "known_intermittent": []}),
+ ("test_status", {"test": test_id,
+ "subtest": "test2",
+ "status": "FAIL",
+ "expected": "PASS",
+ "known_intermittent": []}),
+ ("test_end", {"test": test_id,
+ "status": "OK",
+ "known_intermittent": ["ERROR"]})])
+ updated = update(tests, log)
+ new_manifest = updated[0][1]
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get("expected", default_run_info) == "FAIL"
+
+
+def test_update_multiple_0():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ [test1]
+ expected: FAIL""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": False, "os": "osx"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "TIMEOUT",
+ "expected": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": False, "os": "linux"})
+
+ updated = update(tests, log_0, log_1)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"debug": False, "os": "osx"})
+ run_info_2 = default_run_info.copy()
+ run_info_2.update({"debug": False, "os": "linux"})
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == "FAIL"
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", {"debug": False, "os": "linux"}) == "TIMEOUT"
+
+
+def test_update_multiple_1():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ [test1]
+ expected: FAIL""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "osx"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "TIMEOUT",
+ "expected": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "linux"})
+
+ updated = update(tests, log_0, log_1)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"os": "osx"})
+ run_info_2 = default_run_info.copy()
+ run_info_2.update({"os": "linux"})
+ run_info_3 = default_run_info.copy()
+ run_info_3.update({"os": "win"})
+
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == "FAIL"
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_2) == "TIMEOUT"
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_3) == "FAIL"
+
+
+def test_update_multiple_2():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ [test1]
+ expected: FAIL""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": False, "os": "osx"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "TIMEOUT",
+ "expected": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": True, "os": "osx"})
+
+ updated = update(tests, log_0, log_1)
+ new_manifest = updated[0][1]
+
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"debug": False, "os": "osx"})
+ run_info_2 = default_run_info.copy()
+ run_info_2.update({"debug": True, "os": "osx"})
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == "FAIL"
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_2) == "TIMEOUT"
+
+
+def test_update_multiple_3():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ [test1]
+ expected:
+ if debug: FAIL
+ if not debug and os == "osx": TIMEOUT""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": False, "os": "osx"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "TIMEOUT",
+ "expected": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": True, "os": "osx"})
+
+ updated = update(tests, log_0, log_1)
+ new_manifest = updated[0][1]
+
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"debug": False, "os": "osx"})
+ run_info_2 = default_run_info.copy()
+ run_info_2.update({"debug": True, "os": "osx"})
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == "FAIL"
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_2) == "TIMEOUT"
+
+
+def test_update_ignore_existing():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ [test1]
+ expected:
+ if debug: TIMEOUT
+ if not debug and os == "osx": NOTRUN""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": False, "os": "linux"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": True, "os": "windows"})
+
+ updated = update(tests, log_0, log_1)
+ new_manifest = updated[0][1]
+
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"debug": False, "os": "linux"})
+ run_info_2 = default_run_info.copy()
+ run_info_2.update({"debug": False, "os": "osx"})
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == "FAIL"
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_2) == "NOTRUN"
+
+
+def test_update_new_test():
+ tests = [("path/to/test.htm", [test_id], "testharness", None)]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+ updated = update(tests, log_0)
+ new_manifest = updated[0][1]
+
+ run_info_1 = default_run_info.copy()
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test("test.htm") is None
+ assert len(new_manifest.get_test(test_id).children) == 1
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == "FAIL"
+
+
+def test_update_duplicate():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""
+[test.htm]
+ expected: ERROR""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "PASS"})])
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "FAIL"})])
+
+ updated = update(tests, log_0, log_1)
+ new_manifest = updated[0][1]
+ run_info_1 = default_run_info.copy()
+
+ assert new_manifest.get_test(test_id).get(
+ "expected", run_info_1) == "ERROR"
+
+
+def test_update_disable_intermittent():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""
+[test.htm]
+ expected: ERROR""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "PASS"})])
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "FAIL"})])
+
+ updated = update(tests, log_0, log_1, disable_intermittent="Some message")
+ new_manifest = updated[0][1]
+ run_info_1 = default_run_info.copy()
+
+ assert new_manifest.get_test(test_id).get(
+ "disabled", run_info_1) == "Some message"
+
+
+def test_update_stability_conditional_instability():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""
+[test.htm]
+ expected: ERROR""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "PASS"})],
+ run_info={"os": "linux"})
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "FAIL"})],
+ run_info={"os": "linux"})
+ log_2 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "status": "FAIL"})],
+ run_info={"os": "mac"})
+
+ updated = update(tests, log_0, log_1, log_2, disable_intermittent="Some message")
+ new_manifest = updated[0][1]
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"os": "linux"})
+ run_info_2 = default_run_info.copy()
+ run_info_2.update({"os": "mac"})
+
+ assert new_manifest.get_test(test_id).get(
+ "disabled", run_info_1) == "Some message"
+ with pytest.raises(KeyError):
+ assert new_manifest.get_test(test_id).get(
+ "disabled", run_info_2)
+ assert new_manifest.get_test(test_id).get(
+ "expected", run_info_2) == "FAIL"
+
+
+def test_update_full():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ [test1]
+ expected:
+ if debug: TIMEOUT
+ if not debug and os == "osx": NOTRUN
+
+ [test2]
+ expected: FAIL
+
+[test.js]
+ [test1]
+ expected: FAIL
+""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": False})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "ERROR",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": True})
+
+ updated = update(tests, log_0, log_1, full_update=True)
+ new_manifest = updated[0][1]
+
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"debug": False, "os": "win"})
+ run_info_2 = default_run_info.copy()
+ run_info_2.update({"debug": True, "os": "osx"})
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test("test.js") is None
+ assert len(new_manifest.get_test(test_id).children) == 1
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == "FAIL"
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_2) == "ERROR"
+
+
+def test_update_full_unknown():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ [test1]
+ expected:
+ if release_or_beta: ERROR
+ if not debug and os == "osx": NOTRUN
+""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": False, "release_or_beta": False})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": True, "release_or_beta": False})
+
+ updated = update(tests, log_0, log_1, full_update=True)
+ new_manifest = updated[0][1]
+
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"release_or_beta": False})
+ run_info_2 = default_run_info.copy()
+ run_info_2.update({"release_or_beta": True})
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_1) == "FAIL"
+ assert new_manifest.get_test(test_id).children[0].get(
+ "expected", run_info_2) == "ERROR"
+
+
+def test_update_full_unknown_missing():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ [subtest_deleted]
+ expected:
+ if release_or_beta: ERROR
+ FAIL
+""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "PASS"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"debug": False, "release_or_beta": False})
+
+ updated = update(tests, log_0, full_update=True)
+ assert len(updated) == 0
+
+
+def test_update_default():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ [test1]
+ expected:
+ if os == "mac": FAIL
+ ERROR""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "FAIL"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "mac"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "expected": "ERROR"}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "linux"})
+
+ updated = update(tests, log_0, log_1)
+ new_manifest = updated[0][1]
+
+ assert new_manifest.is_empty
+ assert new_manifest.modified
+
+
+def test_update_default_1():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""
+[test.htm]
+ expected:
+ if os == "mac": TIMEOUT
+ ERROR""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "expected": "ERROR",
+ "status": "FAIL"})],
+ run_info={"os": "linux"})
+
+ updated = update(tests, log_0)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"os": "mac"})
+ run_info_2 = default_run_info.copy()
+ run_info_2.update({"os": "win"})
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).get(
+ "expected", run_info_1) == "TIMEOUT"
+ assert new_manifest.get_test(test_id).get(
+ "expected", run_info_2) == "FAIL"
+
+
+def test_update_default_2():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""
+[test.htm]
+ expected:
+ if os == "mac": TIMEOUT
+ ERROR""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_end", {"test": test_id,
+ "expected": "ERROR",
+ "status": "TIMEOUT"})],
+ run_info={"os": "linux"})
+
+ updated = update(tests, log_0)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+
+ run_info_1 = default_run_info.copy()
+ run_info_1.update({"os": "mac"})
+ run_info_2 = default_run_info.copy()
+ run_info_2.update({"os": "win"})
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).get(
+ "expected", run_info_1) == "TIMEOUT"
+ assert new_manifest.get_test(test_id).get(
+ "expected", run_info_2) == "TIMEOUT"
+
+
+def test_update_assertion_count_0():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ max-asserts: 4
+ min-asserts: 2
+""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("assertion_count", {"test": test_id,
+ "count": 6,
+ "min_expected": 2,
+ "max_expected": 4}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).get("max-asserts") == "7"
+ assert new_manifest.get_test(test_id).get("min-asserts") == "2"
+
+
+def test_update_assertion_count_1():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ max-asserts: 4
+ min-asserts: 2
+""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("assertion_count", {"test": test_id,
+ "count": 1,
+ "min_expected": 2,
+ "max_expected": 4}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).get("max-asserts") == "4"
+ assert new_manifest.get_test(test_id).has_key("min-asserts") is False
+
+
+def test_update_assertion_count_2():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ max-asserts: 4
+ min-asserts: 2
+""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("assertion_count", {"test": test_id,
+ "count": 3,
+ "min_expected": 2,
+ "max_expected": 4}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})])
+
+ updated = update(tests, log_0)
+ assert not updated
+
+
+def test_update_assertion_count_3():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
+ max-asserts: 4
+ min-asserts: 2
+""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("assertion_count", {"test": test_id,
+ "count": 6,
+ "min_expected": 2,
+ "max_expected": 4}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "windows"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("assertion_count", {"test": test_id,
+ "count": 7,
+ "min_expected": 2,
+ "max_expected": 4}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "linux"})
+
+ updated = update(tests, log_0, log_1)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).get("max-asserts") == "8"
+ assert new_manifest.get_test(test_id).get("min-asserts") == "2"
+
+
+def test_update_assertion_count_4():
+ tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("assertion_count", {"test": test_id,
+ "count": 6,
+ "min_expected": 0,
+ "max_expected": 0}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "windows"})
+
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("assertion_count", {"test": test_id,
+ "count": 7,
+ "min_expected": 0,
+ "max_expected": 0}),
+ ("test_end", {"test": test_id,
+ "status": "OK"})],
+ run_info={"os": "linux"})
+
+ updated = update(tests, log_0, log_1)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get_test(test_id).get("max-asserts") == "8"
+ assert new_manifest.get_test(test_id).has_key("min-asserts") is False
+
+
+def test_update_lsan_0():
+ tests = [("path/to/test.htm", [test_id], "testharness", b""),
+ ("path/to/__dir__", [dir_id], None, b"")]
+
+ log_0 = suite_log([("lsan_leak", {"scope": "path/to/",
+ "frames": ["foo", "bar"]})])
+
+
+ updated = update(tests, log_0)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get("lsan-allowed") == ["foo"]
+
+
+def test_update_lsan_1():
+ tests = [("path/to/test.htm", [test_id], "testharness", b""),
+ ("path/to/__dir__", [dir_id], None, b"""
+lsan-allowed: [foo]""")]
+
+ log_0 = suite_log([("lsan_leak", {"scope": "path/to/",
+ "frames": ["foo", "bar"]}),
+ ("lsan_leak", {"scope": "path/to/",
+ "frames": ["baz", "foobar"]})])
+
+
+ updated = update(tests, log_0)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get("lsan-allowed") == ["baz", "foo"]
+
+
+def test_update_lsan_2():
+ tests = [("path/to/test.htm", [test_id], "testharness", b""),
+ ("path/__dir__", ["path/__dir__"], None, b"""
+lsan-allowed: [foo]"""),
+ ("path/to/__dir__", [dir_id], None, b"")]
+
+ log_0 = suite_log([("lsan_leak", {"scope": "path/to/",
+ "frames": ["foo", "bar"],
+ "allowed_match": ["foo"]}),
+ ("lsan_leak", {"scope": "path/to/",
+ "frames": ["baz", "foobar"]})])
+
+
+ updated = update(tests, log_0)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get("lsan-allowed") == ["baz"]
+
+
+def test_update_lsan_3():
+ tests = [("path/to/test.htm", [test_id], "testharness", b""),
+ ("path/to/__dir__", [dir_id], None, b"")]
+
+ log_0 = suite_log([("lsan_leak", {"scope": "path/to/",
+ "frames": ["foo", "bar"]})],
+ run_info={"os": "win"})
+
+ log_1 = suite_log([("lsan_leak", {"scope": "path/to/",
+ "frames": ["baz", "foobar"]})],
+ run_info={"os": "linux"})
+
+
+ updated = update(tests, log_0, log_1)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get("lsan-allowed") == ["baz", "foo"]
+
+
+def test_update_wptreport_0():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ [test1]
+ expected: FAIL""")]
+
+ log = {"run_info": default_run_info.copy(),
+ "results": [
+ {"test": "/path/to/test.htm",
+ "subtests": [{"name": "test1",
+ "status": "PASS",
+ "expected": "FAIL"}],
+ "status": "OK"}]}
+
+ updated = update(tests, log)
+
+ assert len(updated) == 1
+ assert updated[0][1].is_empty
+
+
+def test_update_wptreport_1():
+ tests = [("path/to/test.htm", [test_id], "testharness", b""),
+ ("path/to/__dir__", [dir_id], None, b"")]
+
+ log = {"run_info": default_run_info.copy(),
+ "results": [],
+ "lsan_leaks": [{"scope": "path/to/",
+ "frames": ["baz", "foobar"]}]}
+
+ updated = update(tests, log)
+
+ assert len(updated) == 1
+ assert updated[0][1].get("lsan-allowed") == ["baz"]
+
+
+def test_update_leak_total_0():
+ tests = [("path/to/test.htm", [test_id], "testharness", b""),
+ ("path/to/__dir__", [dir_id], None, b"")]
+
+ log_0 = suite_log([("mozleak_total", {"scope": "path/to/",
+ "process": "default",
+ "bytes": 100,
+ "threshold": 0,
+ "objects": []})])
+
+ updated = update(tests, log_0)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get("leak-threshold") == ['default:51200']
+
+
+def test_update_leak_total_1():
+ tests = [("path/to/test.htm", [test_id], "testharness", b""),
+ ("path/to/__dir__", [dir_id], None, b"")]
+
+ log_0 = suite_log([("mozleak_total", {"scope": "path/to/",
+ "process": "default",
+ "bytes": 100,
+ "threshold": 1000,
+ "objects": []})])
+
+ updated = update(tests, log_0)
+ assert not updated
+
+
+def test_update_leak_total_2():
+ tests = [("path/to/test.htm", [test_id], "testharness", b""),
+ ("path/to/__dir__", [dir_id], None, b"""
+leak-total: 110""")]
+
+ log_0 = suite_log([("mozleak_total", {"scope": "path/to/",
+ "process": "default",
+ "bytes": 100,
+ "threshold": 110,
+ "objects": []})])
+
+ updated = update(tests, log_0)
+ assert not updated
+
+
+def test_update_leak_total_3():
+ tests = [("path/to/test.htm", [test_id], "testharness", b""),
+ ("path/to/__dir__", [dir_id], None, b"""
+leak-total: 100""")]
+
+ log_0 = suite_log([("mozleak_total", {"scope": "path/to/",
+ "process": "default",
+ "bytes": 1000,
+ "threshold": 100,
+ "objects": []})])
+
+ updated = update(tests, log_0)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.get("leak-threshold") == ['default:51200']
+
+
+def test_update_leak_total_4():
+ tests = [("path/to/test.htm", [test_id], "testharness", b""),
+ ("path/to/__dir__", [dir_id], None, b"""
+leak-total: 110""")]
+
+ log_0 = suite_log([
+ ("lsan_leak", {"scope": "path/to/",
+ "frames": ["foo", "bar"]}),
+ ("mozleak_total", {"scope": "path/to/",
+ "process": "default",
+ "bytes": 100,
+ "threshold": 110,
+ "objects": []})])
+
+ updated = update(tests, log_0)
+ new_manifest = updated[0][1]
+
+ assert not new_manifest.is_empty
+ assert new_manifest.modified
+ assert new_manifest.has_key("leak-threshold") is False
+
+
+class TestStep(Step):
+ def create(self, state):
+ tests = [("path/to/test.htm", [test_id], "testharness", "")]
+ state.foo = create_test_manifest(tests)
+
+
+class UpdateRunner(StepRunner):
+ steps = [TestStep]
+
+
+def test_update_pickle():
+ logger = structuredlog.StructuredLogger("expected_test")
+ args = {
+ "test_paths": {
+ "/": {"tests_path": os.path.abspath(os.path.join(here,
+ os.pardir,
+ os.pardir,
+ os.pardir,
+ os.pardir))},
+ },
+ "abort": False,
+ "continue": False,
+ "sync": False,
+ }
+ args2 = args.copy()
+ args2["abort"] = True
+ wptupdate = WPTUpdate(logger, **args2)
+ wptupdate = WPTUpdate(logger, runner_cls=UpdateRunner, **args)
+ wptupdate.run()
+
+
+def test_update_serialize_quoted():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ expected: "ERROR"
+ [test1]
+ expected:
+ if os == "linux": ["PASS", "FAIL"]
+ "ERROR"
+""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "expected": "ERROR",
+ "status": "OK"})],
+ run_info={"os": "linux"})
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "expected": "ERROR",
+ "status": "OK"})],
+ run_info={"os": "linux"})
+ log_2 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "ERROR"}),
+ ("test_end", {"test": test_id,
+ "expected": "ERROR",
+ "status": "OK"})],
+ run_info={"os": "win"})
+
+ updated = update(tests, log_0, log_1, log_2, full_update=True, update_intermittent=True)
+
+
+ manifest_str = wptmanifest.serialize(updated[0][1].node,
+ skip_empty_data=True)
+ assert manifest_str == """[test.htm]
+ [test1]
+ expected:
+ if os == "linux": [PASS, FAIL]
+ ERROR
+"""
+
+
+def test_update_serialize_unquoted():
+ tests = [("path/to/test.htm", [test_id], "testharness",
+ b"""[test.htm]
+ expected: ERROR
+ [test1]
+ expected:
+ if os == "linux": [PASS, FAIL]
+ ERROR
+""")]
+
+ log_0 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "expected": "ERROR",
+ "status": "OK"})],
+ run_info={"os": "linux"})
+ log_1 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "FAIL",
+ "expected": "PASS",
+ "known_intermittent": ["FAIL"]}),
+ ("test_end", {"test": test_id,
+ "expected": "ERROR",
+ "status": "OK"})],
+ run_info={"os": "linux"})
+ log_2 = suite_log([("test_start", {"test": test_id}),
+ ("test_status", {"test": test_id,
+ "subtest": "test1",
+ "status": "ERROR"}),
+ ("test_end", {"test": test_id,
+ "expected": "ERROR",
+ "status": "OK"})],
+ run_info={"os": "win"})
+
+ updated = update(tests, log_0, log_1, log_2, full_update=True, update_intermittent=True)
+
+
+ manifest_str = wptmanifest.serialize(updated[0][1].node,
+ skip_empty_data=True)
+ assert manifest_str == """[test.htm]
+ [test1]
+ expected:
+ if os == "linux": [PASS, FAIL]
+ ERROR
+"""
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_wpttest.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_wpttest.py
new file mode 100644
index 0000000000..272fffd817
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/tests/test_wpttest.py
@@ -0,0 +1,232 @@
+# mypy: ignore-errors
+
+from io import BytesIO
+from unittest import mock
+
+from manifest import manifest as wptmanifest
+from manifest.item import TestharnessTest, RefTest
+from manifest.utils import to_os_path
+from . test_update import tree_and_sourcefile_mocks
+from .. import manifestexpected, wpttest
+
+
+dir_ini_0 = b"""\
+prefs: [a:b]
+"""
+
+dir_ini_1 = b"""\
+prefs: [@Reset, b:c]
+max-asserts: 2
+min-asserts: 1
+tags: [b, c]
+"""
+
+dir_ini_2 = b"""\
+lsan-max-stack-depth: 42
+"""
+
+test_0 = b"""\
+[0.html]
+ prefs: [c:d]
+ max-asserts: 3
+ tags: [a, @Reset]
+"""
+
+test_1 = b"""\
+[1.html]
+ prefs:
+ if os == 'win': [a:b, c:d]
+ expected:
+ if os == 'win': FAIL
+"""
+
+test_2 = b"""\
+[2.html]
+ lsan-max-stack-depth: 42
+"""
+
+test_3 = b"""\
+[3.html]
+ [subtest1]
+ expected: [PASS, FAIL]
+
+ [subtest2]
+ disabled: reason
+
+ [subtest3]
+ expected: FAIL
+"""
+
+test_4 = b"""\
+[4.html]
+ expected: FAIL
+"""
+
+test_5 = b"""\
+[5.html]
+"""
+
+test_6 = b"""\
+[6.html]
+ expected: [OK, FAIL]
+"""
+
+test_fuzzy = b"""\
+[fuzzy.html]
+ fuzzy: fuzzy-ref.html:1;200
+"""
+
+
+testharness_test = b"""<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>"""
+
+
+def make_mock_manifest(*items):
+ rv = mock.Mock(tests_root="/foobar")
+ tests = []
+ rv.__iter__ = lambda self: iter(tests)
+ rv.__getitem__ = lambda self, k: tests[k]
+ for test_type, dir_path, num_tests in items:
+ for i in range(num_tests):
+ filename = dir_path + "/%i.html" % i
+ tests.append((test_type,
+ filename,
+ {TestharnessTest("/foo.bar", filename, "/", filename)}))
+ return rv
+
+def make_test_object(test_name,
+ test_path,
+ index,
+ items,
+ inherit_metadata=None,
+ iterate=False,
+ condition=None):
+ inherit_metadata = inherit_metadata if inherit_metadata is not None else []
+ condition = condition if condition is not None else {}
+ tests = make_mock_manifest(*items) if isinstance(items, list) else make_mock_manifest(items)
+
+ test_metadata = manifestexpected.static.compile(BytesIO(test_name),
+ condition,
+ data_cls_getter=manifestexpected.data_cls_getter,
+ test_path=test_path,
+ url_base="/")
+
+ test = next(iter(tests[index][2])) if iterate else tests[index][2].pop()
+ return wpttest.from_manifest(tests, test, inherit_metadata, test_metadata.get_test(test.id))
+
+
+def test_run_info():
+ run_info = wpttest.get_run_info("/", "fake-product", debug=False)
+ assert isinstance(run_info["bits"], int)
+ assert isinstance(run_info["os"], str)
+ assert isinstance(run_info["os_version"], str)
+ assert isinstance(run_info["processor"], str)
+ assert isinstance(run_info["product"], str)
+ assert isinstance(run_info["python_version"], int)
+
+
+def test_metadata_inherit():
+ items = [("test", "a", 10), ("test", "a/b", 10), ("test", "c", 10)]
+ inherit_metadata = [
+ manifestexpected.static.compile(
+ BytesIO(item),
+ {},
+ data_cls_getter=lambda x,y: manifestexpected.DirectoryManifest)
+ for item in [dir_ini_0, dir_ini_1]]
+
+ test_obj = make_test_object(test_0, "a/0.html", 0, items, inherit_metadata, True)
+
+ assert test_obj.max_assertion_count == 3
+ assert test_obj.min_assertion_count == 1
+ assert test_obj.prefs == {"b": "c", "c": "d"}
+ assert test_obj.tags == {"a", "dir:a"}
+
+
+def test_conditional():
+ items = [("test", "a", 10), ("test", "a/b", 10), ("test", "c", 10)]
+
+ test_obj = make_test_object(test_1, "a/1.html", 1, items, None, True, {"os": "win"})
+
+ assert test_obj.prefs == {"a": "b", "c": "d"}
+ assert test_obj.expected() == "FAIL"
+
+
+def test_metadata_lsan_stack_depth():
+ items = [("test", "a", 10), ("test", "a/b", 10)]
+
+ test_obj = make_test_object(test_2, "a/2.html", 2, items, None, True)
+
+ assert test_obj.lsan_max_stack_depth == 42
+
+ test_obj = make_test_object(test_2, "a/2.html", 1, items, None, True)
+
+ assert test_obj.lsan_max_stack_depth is None
+
+ inherit_metadata = [
+ manifestexpected.static.compile(
+ BytesIO(dir_ini_2),
+ {},
+ data_cls_getter=lambda x,y: manifestexpected.DirectoryManifest)
+ ]
+
+ test_obj = make_test_object(test_0, "a/0/html", 0, items, inherit_metadata, False)
+
+ assert test_obj.lsan_max_stack_depth == 42
+
+
+def test_subtests():
+ test_obj = make_test_object(test_3, "a/3.html", 3, ("test", "a", 4), None, False)
+ assert test_obj.expected("subtest1") == "PASS"
+ assert test_obj.known_intermittent("subtest1") == ["FAIL"]
+ assert test_obj.expected("subtest2") == "PASS"
+ assert test_obj.known_intermittent("subtest2") == []
+ assert test_obj.expected("subtest3") == "FAIL"
+ assert test_obj.known_intermittent("subtest3") == []
+
+
+def test_expected_fail():
+ test_obj = make_test_object(test_4, "a/4.html", 4, ("test", "a", 5), None, False)
+ assert test_obj.expected() == "FAIL"
+ assert test_obj.known_intermittent() == []
+
+
+def test_no_expected():
+ test_obj = make_test_object(test_5, "a/5.html", 5, ("test", "a", 6), None, False)
+ assert test_obj.expected() == "OK"
+ assert test_obj.known_intermittent() == []
+
+
+def test_known_intermittent():
+ test_obj = make_test_object(test_6, "a/6.html", 6, ("test", "a", 7), None, False)
+ assert test_obj.expected() == "OK"
+ assert test_obj.known_intermittent() == ["FAIL"]
+
+
+def test_metadata_fuzzy():
+ item = RefTest(tests_root=".",
+ path="a/fuzzy.html",
+ url_base="/",
+ url="a/fuzzy.html",
+ references=[["/a/fuzzy-ref.html", "=="]],
+ fuzzy=[[["/a/fuzzy.html", '/a/fuzzy-ref.html', '=='],
+ [[2, 3], [10, 15]]]])
+ s = mock.Mock(rel_path="a/fuzzy.html", rel_path_parts=("a", "fuzzy.html"), hash="0"*40)
+ s.manifest_items = mock.Mock(return_value=(item.item_type, [item]))
+
+ manifest = wptmanifest.Manifest("")
+
+ tree, sourcefile_mock = tree_and_sourcefile_mocks([(s, None, True)])
+ with mock.patch("manifest.manifest.SourceFile", side_effect=sourcefile_mock):
+ assert manifest.update(tree) is True
+
+ test_metadata = manifestexpected.static.compile(BytesIO(test_fuzzy),
+ {},
+ data_cls_getter=manifestexpected.data_cls_getter,
+ test_path="a/fuzzy.html",
+ url_base="/")
+
+ test = next(manifest.iterpath(to_os_path("a/fuzzy.html")))
+ test_obj = wpttest.from_manifest(manifest, test, [], test_metadata.get_test(test.id))
+
+ assert test_obj.fuzzy == {('/a/fuzzy.html', '/a/fuzzy-ref.html', '=='): [[2, 3], [10, 15]]}
+ assert test_obj.fuzzy_override == {'/a/fuzzy-ref.html': ((1, 1), (200, 200))}
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/update/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/__init__.py
new file mode 100644
index 0000000000..1a58837f8d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/__init__.py
@@ -0,0 +1,47 @@
+# mypy: allow-untyped-defs
+
+import sys
+
+from mozlog.structured import structuredlog, commandline
+
+from .. import wptcommandline
+
+from .update import WPTUpdate
+
+def remove_logging_args(args):
+ """Take logging args out of the dictionary of command line arguments so
+ they are not passed in as kwargs to the update code. This is particularly
+ necessary here because the arguments are often of type file, which cannot
+ be serialized.
+
+ :param args: Dictionary of command line arguments.
+ """
+ for name in list(args.keys()):
+ if name.startswith("log_"):
+ args.pop(name)
+
+
+def setup_logging(args, defaults):
+ """Use the command line arguments to set up the logger.
+
+ :param args: Dictionary of command line arguments.
+ :param defaults: Dictionary of {formatter_name: stream} to use if
+ no command line logging is specified"""
+ logger = commandline.setup_logging("web-platform-tests-update", args, defaults)
+
+ remove_logging_args(args)
+
+ return logger
+
+
+def run_update(logger, **kwargs):
+ updater = WPTUpdate(logger, **kwargs)
+ return updater.run()
+
+
+def main():
+ args = wptcommandline.parse_args_update()
+ logger = setup_logging(args, {"mach": sys.stdout})
+ assert structuredlog.get_default_logger() is not None
+ success = run_update(logger, **args)
+ sys.exit(0 if success else 1)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/update/base.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/base.py
new file mode 100644
index 0000000000..bd39e23b86
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/base.py
@@ -0,0 +1,69 @@
+# mypy: allow-untyped-defs
+
+from typing import ClassVar, List, Type
+
+exit_unclean = object()
+exit_clean = object()
+
+
+class Step:
+ provides = [] # type: ClassVar[List[str]]
+
+ def __init__(self, logger):
+ self.logger = logger
+
+ def run(self, step_index, state):
+ """Base class for state-creating steps.
+
+ When a Step is run() the current state is checked to see
+ if the state from this step has already been created. If it
+ has the restore() method is invoked. Otherwise the create()
+ method is invoked with the state object. This is expected to
+ add items with all the keys in __class__.provides to the state
+ object.
+ """
+
+ name = self.__class__.__name__
+
+ try:
+ stored_step = state.steps[step_index]
+ except IndexError:
+ stored_step = None
+
+ if stored_step == name:
+ self.restore(state)
+ elif stored_step is None:
+ self.create(state)
+ assert set(self.provides).issubset(set(state.keys()))
+ state.steps = state.steps + [name]
+ else:
+ raise ValueError(f"Expected a {name} step, got a {stored_step} step")
+
+ def create(self, data):
+ raise NotImplementedError
+
+ def restore(self, state):
+ self.logger.debug(f"Step {self.__class__.__name__} using stored state")
+ for key in self.provides:
+ assert key in state
+
+
+class StepRunner:
+ steps = [] # type: ClassVar[List[Type[Step]]]
+
+ def __init__(self, logger, state):
+ """Class that runs a specified series of Steps with a common State"""
+ self.state = state
+ self.logger = logger
+ if "steps" not in state:
+ state.steps = []
+
+ def run(self):
+ rv = None
+ for step_index, step in enumerate(self.steps):
+ self.logger.debug("Starting step %s" % step.__name__)
+ rv = step(self.logger).run(step_index, self.state)
+ if rv in (exit_clean, exit_unclean):
+ break
+
+ return rv
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/update/metadata.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/metadata.py
new file mode 100644
index 0000000000..388b569bcc
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/metadata.py
@@ -0,0 +1,62 @@
+# mypy: allow-untyped-defs
+
+import os
+
+from .. import metadata, products
+
+from .base import Step, StepRunner
+
+
+class GetUpdatePropertyList(Step):
+ provides = ["update_properties"]
+
+ def create(self, state):
+ state.update_properties = products.load_product_update(state.config, state.product)
+
+
+class UpdateExpected(Step):
+ """Do the metadata update on the local checkout"""
+
+ def create(self, state):
+ metadata.update_expected(state.paths,
+ state.run_log,
+ update_properties=state.update_properties,
+ full_update=state.full_update,
+ disable_intermittent=state.disable_intermittent,
+ update_intermittent=state.update_intermittent,
+ remove_intermittent=state.remove_intermittent)
+
+
+class CreateMetadataPatch(Step):
+ """Create a patch/commit for the metadata checkout"""
+
+ def create(self, state):
+ if not state.patch:
+ return
+
+ local_tree = state.local_tree
+ sync_tree = state.sync_tree
+
+ if sync_tree is not None:
+ name = "web-platform-tests_update_%s_metadata" % sync_tree.rev
+ message = f"Update {state.suite_name} expected data to revision {sync_tree.rev}"
+ else:
+ name = "web-platform-tests_update_metadata"
+ message = "Update %s expected data" % state.suite_name
+
+ local_tree.create_patch(name, message)
+
+ if not local_tree.is_clean:
+ metadata_paths = [manifest_path["metadata_path"]
+ for manifest_path in state.paths.itervalues()]
+ for path in metadata_paths:
+ local_tree.add_new(os.path.relpath(path, local_tree.root))
+ local_tree.update_patch(include=metadata_paths)
+ local_tree.commit_patch()
+
+
+class MetadataUpdateRunner(StepRunner):
+ """(Sub)Runner for updating metadata"""
+ steps = [GetUpdatePropertyList,
+ UpdateExpected,
+ CreateMetadataPatch]
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/update/state.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/state.py
new file mode 100644
index 0000000000..2c23ad66c2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/state.py
@@ -0,0 +1,159 @@
+# mypy: allow-untyped-defs
+
+import os
+import pickle
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+class BaseState:
+ def __new__(cls, logger):
+ rv = cls.load(logger)
+ if rv is not None:
+ logger.debug("Existing state found")
+ return rv
+
+ logger.debug("No existing state found")
+ return super().__new__(cls)
+
+ def __init__(self, logger):
+ """Object containing state variables created when running Steps.
+
+ Variables are set and get as attributes e.g. state_obj.spam = "eggs".
+
+ :param parent: Parent State object or None if this is the root object.
+ """
+
+ if hasattr(self, "_data"):
+ return
+
+ self._data = [{}]
+ self._logger = logger
+ self._index = 0
+
+ def __getstate__(self):
+ rv = self.__dict__.copy()
+ del rv["_logger"]
+ return rv
+
+
+ def push(self, init_values):
+ """Push a new clean state dictionary
+
+ :param init_values: List of variable names in the current state dict to copy
+ into the new state dict."""
+
+ return StateContext(self, init_values)
+
+ def is_empty(self):
+ return len(self._data) == 1 and self._data[0] == {}
+
+ def clear(self):
+ """Remove all state and delete the stored copy."""
+ self._data = [{}]
+
+ def __setattr__(self, key, value):
+ if key.startswith("_"):
+ object.__setattr__(self, key, value)
+ else:
+ self._data[self._index][key] = value
+ self.save()
+
+ def __getattr__(self, key):
+ if key.startswith("_"):
+ raise AttributeError
+ try:
+ return self._data[self._index][key]
+ except KeyError:
+ raise AttributeError
+
+ def __contains__(self, key):
+ return key in self._data[self._index]
+
+ def update(self, items):
+ """Add a dictionary of {name: value} pairs to the state"""
+ self._data[self._index].update(items)
+ self.save()
+
+ def keys(self):
+ return self._data[self._index].keys()
+
+
+ @classmethod
+ def load(cls):
+ raise NotImplementedError
+
+ def save(self):
+ raise NotImplementedError
+
+
+class SavedState(BaseState):
+ """On write the state is serialized to disk, such that it can be restored in
+ the event that the program is interrupted before all steps are complete.
+ Note that this only works well if the values are immutable; mutating an
+ existing value will not cause the data to be serialized."""
+ filename = os.path.join(here, ".wpt-update.lock")
+
+ @classmethod
+ def load(cls, logger):
+ """Load saved state from a file"""
+ try:
+ if not os.path.isfile(cls.filename):
+ return None
+ with open(cls.filename, "rb") as f:
+ try:
+ rv = pickle.load(f)
+ logger.debug(f"Loading data {rv._data!r}")
+ rv._logger = logger
+ rv._index = 0
+ return rv
+ except EOFError:
+ logger.warning("Found empty state file")
+ except OSError:
+ logger.debug("IOError loading stored state")
+
+ def save(self):
+ """Write the state to disk"""
+ with open(self.filename, "wb") as f:
+ pickle.dump(self, f)
+
+ def clear(self):
+ super().clear()
+ try:
+ os.unlink(self.filename)
+ except OSError:
+ pass
+
+
+class UnsavedState(BaseState):
+ @classmethod
+ def load(cls, logger):
+ return None
+
+ def save(self):
+ return
+
+
+class StateContext:
+ def __init__(self, state, init_values):
+ self.state = state
+ self.init_values = init_values
+
+ def __enter__(self):
+ if len(self.state._data) == self.state._index + 1:
+ # This is the case where there is no stored state
+ new_state = {}
+ for key in self.init_values:
+ new_state[key] = self.state._data[self.state._index][key]
+ self.state._data.append(new_state)
+ self.state._index += 1
+ self.state._logger.debug("Incremented index to %s" % self.state._index)
+
+ def __exit__(self, *args, **kwargs):
+ if len(self.state._data) > 1:
+ assert self.state._index == len(self.state._data) - 1
+ self.state._data.pop()
+ self.state._index -= 1
+ self.state._logger.debug("Decremented index to %s" % self.state._index)
+ assert self.state._index >= 0
+ else:
+ raise ValueError("Tried to pop the top state")
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/update/sync.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/sync.py
new file mode 100644
index 0000000000..b1dcf2d6c2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/sync.py
@@ -0,0 +1,150 @@
+# mypy: allow-untyped-defs
+
+import fnmatch
+import os
+import re
+import shutil
+import sys
+import uuid
+
+from .base import Step, StepRunner
+from .tree import Commit
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+def copy_wpt_tree(tree, dest, excludes=None, includes=None):
+ """Copy the working copy of a Tree to a destination directory.
+
+ :param tree: The Tree to copy.
+ :param dest: The destination directory"""
+ if os.path.exists(dest):
+ assert os.path.isdir(dest)
+
+ shutil.rmtree(dest)
+
+ os.mkdir(dest)
+
+ if excludes is None:
+ excludes = []
+
+ excludes = [re.compile(fnmatch.translate(item)) for item in excludes]
+
+ if includes is None:
+ includes = []
+
+ includes = [re.compile(fnmatch.translate(item)) for item in includes]
+
+ for tree_path in tree.paths():
+ if (any(item.match(tree_path) for item in excludes) and
+ not any(item.match(tree_path) for item in includes)):
+ continue
+
+ source_path = os.path.join(tree.root, tree_path)
+ dest_path = os.path.join(dest, tree_path)
+
+ dest_dir = os.path.dirname(dest_path)
+ if not os.path.isdir(source_path):
+ if not os.path.exists(dest_dir):
+ os.makedirs(dest_dir)
+ shutil.copy2(source_path, dest_path)
+
+ for source, destination in [("testharness_runner.html", ""),
+ ("testdriver-vendor.js", "resources/")]:
+ source_path = os.path.join(here, os.pardir, source)
+ dest_path = os.path.join(dest, destination, os.path.basename(source))
+ shutil.copy2(source_path, dest_path)
+
+
+class UpdateCheckout(Step):
+ """Pull changes from upstream into the local sync tree."""
+
+ provides = ["local_branch"]
+
+ def create(self, state):
+ sync_tree = state.sync_tree
+ state.local_branch = uuid.uuid4().hex
+ sync_tree.update(state.sync["remote_url"],
+ state.sync["branch"],
+ state.local_branch)
+ sync_path = os.path.abspath(sync_tree.root)
+ if sync_path not in sys.path:
+ from .update import setup_paths
+ setup_paths(sync_path)
+
+ def restore(self, state):
+ assert os.path.abspath(state.sync_tree.root) in sys.path
+ Step.restore(self, state)
+
+
+class GetSyncTargetCommit(Step):
+ """Find the commit that we will sync to."""
+
+ provides = ["sync_commit"]
+
+ def create(self, state):
+ if state.target_rev is None:
+ #Use upstream branch HEAD as the base commit
+ state.sync_commit = state.sync_tree.get_remote_sha1(state.sync["remote_url"],
+ state.sync["branch"])
+ else:
+ state.sync_commit = Commit(state.sync_tree, state.rev)
+
+ state.sync_tree.checkout(state.sync_commit.sha1, state.local_branch, force=True)
+ self.logger.debug("New base commit is %s" % state.sync_commit.sha1)
+
+
+class UpdateManifest(Step):
+ """Update the manifest to match the tests in the sync tree checkout"""
+
+ provides = ["manifest_path", "test_manifest"]
+
+ def create(self, state):
+ from manifest import manifest # type: ignore
+ state.manifest_path = os.path.join(state.metadata_path, "MANIFEST.json")
+ state.test_manifest = manifest.load_and_update(state.sync["path"],
+ state.manifest_path,
+ "/",
+ write_manifest=True)
+
+
+class CopyWorkTree(Step):
+ """Copy the sync tree over to the destination in the local tree"""
+
+ def create(self, state):
+ copy_wpt_tree(state.sync_tree,
+ state.tests_path,
+ excludes=state.path_excludes,
+ includes=state.path_includes)
+
+
+class CreateSyncPatch(Step):
+ """Add the updated test files to a commit/patch in the local tree."""
+
+ def create(self, state):
+ if not state.patch:
+ return
+
+ local_tree = state.local_tree
+ sync_tree = state.sync_tree
+
+ local_tree.create_patch("web-platform-tests_update_%s" % sync_tree.rev,
+ f"Update {state.suite_name} to revision {sync_tree.rev}")
+ test_prefix = os.path.relpath(state.tests_path, local_tree.root)
+ local_tree.add_new(test_prefix)
+ local_tree.add_ignored(sync_tree, test_prefix)
+ updated = local_tree.update_patch(include=[state.tests_path,
+ state.metadata_path])
+ local_tree.commit_patch()
+
+ if not updated:
+ self.logger.info("Nothing to sync")
+
+
+class SyncFromUpstreamRunner(StepRunner):
+ """(Sub)Runner for doing an upstream sync"""
+ steps = [UpdateCheckout,
+ GetSyncTargetCommit,
+ UpdateManifest,
+ CopyWorkTree,
+ CreateSyncPatch]
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/update/tree.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/tree.py
new file mode 100644
index 0000000000..8c1b6a5f1b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/tree.py
@@ -0,0 +1,407 @@
+# mypy: allow-untyped-defs
+
+import os
+import re
+import subprocess
+import tempfile
+
+from .. import vcs
+from ..vcs import git, hg
+
+
+def get_unique_name(existing, initial):
+ """Get a name either equal to initial or of the form initial_N, for some
+ integer N, that is not in the set existing.
+
+
+ :param existing: Set of names that must not be chosen.
+ :param initial: Name, or name prefix, to use"""
+ if initial not in existing:
+ return initial
+ for i in range(len(existing) + 1):
+ test = f"{initial}_{i + 1}"
+ if test not in existing:
+ return test
+ assert False
+
+class NoVCSTree:
+ name = "non-vcs"
+
+ def __init__(self, root=None):
+ if root is None:
+ root = os.path.abspath(os.curdir)
+ self.root = root
+
+ @classmethod
+ def is_type(cls, path=None):
+ return True
+
+ @property
+ def is_clean(self):
+ return True
+
+ def add_new(self, prefix=None):
+ pass
+
+ def add_ignored(self, sync_tree, prefix):
+ pass
+
+ def create_patch(self, patch_name, message):
+ pass
+
+ def update_patch(self, include=None):
+ pass
+
+ def commit_patch(self):
+ pass
+
+
+class HgTree:
+ name = "mercurial"
+
+ def __init__(self, root=None):
+ if root is None:
+ root = hg("root").strip()
+ self.root = root
+ self.hg = vcs.bind_to_repo(hg, self.root)
+
+ def __getstate__(self):
+ rv = self.__dict__.copy()
+ del rv['hg']
+ return rv
+
+ def __setstate__(self, dict):
+ self.__dict__.update(dict)
+ self.hg = vcs.bind_to_repo(vcs.hg, self.root)
+
+ @classmethod
+ def is_type(cls, path=None):
+ kwargs = {"log_error": False}
+ if path is not None:
+ kwargs["repo"] = path
+ try:
+ hg("root", **kwargs)
+ except Exception:
+ return False
+ return True
+
+ @property
+ def is_clean(self):
+ return self.hg("status").strip() == b""
+
+ def add_new(self, prefix=None):
+ if prefix is not None:
+ args = ("-I", prefix)
+ else:
+ args = ()
+ self.hg("add", *args)
+
+ def add_ignored(self, sync_tree, prefix):
+ pass
+
+ def create_patch(self, patch_name, message):
+ try:
+ self.hg("qinit", log_error=False)
+ except subprocess.CalledProcessError:
+ pass
+
+ patch_names = [item.strip() for item in self.hg("qseries").split(b"\n") if item.strip()]
+
+ suffix = 0
+ test_name = patch_name
+ while test_name in patch_names:
+ suffix += 1
+ test_name = "%s-%i" % (patch_name, suffix)
+
+ self.hg("qnew", test_name, "-X", self.root, "-m", message)
+
+ def update_patch(self, include=None):
+ if include is not None:
+ args = []
+ for item in include:
+ args.extend(["-I", item])
+ else:
+ args = ()
+
+ self.hg("qrefresh", *args)
+ return True
+
+ def commit_patch(self):
+ self.hg("qfinish")
+
+ def contains_commit(self, commit):
+ try:
+ self.hg("identify", "-r", commit.sha1)
+ return True
+ except subprocess.CalledProcessError:
+ return False
+
+
+class GitTree:
+ name = "git"
+
+ def __init__(self, root=None, log_error=True):
+ if root is None:
+ root = git("rev-parse", "--show-toplevel", log_error=log_error).strip().decode('utf-8')
+ self.root = root
+ self.git = vcs.bind_to_repo(git, self.root, log_error=log_error)
+ self.message = None
+ self.commit_cls = Commit
+
+ def __getstate__(self):
+ rv = self.__dict__.copy()
+ del rv['git']
+ return rv
+
+ def __setstate__(self, dict):
+ self.__dict__.update(dict)
+ self.git = vcs.bind_to_repo(vcs.git, self.root)
+
+ @classmethod
+ def is_type(cls, path=None):
+ kwargs = {"log_error": False}
+ if path is not None:
+ kwargs["repo"] = path
+ try:
+ git("rev-parse", "--show-toplevel", **kwargs)
+ except Exception:
+ return False
+ return True
+
+ @property
+ def rev(self):
+ """Current HEAD revision"""
+ if vcs.is_git_root(self.root):
+ return self.git("rev-parse", "HEAD").strip()
+ else:
+ return None
+
+ @property
+ def is_clean(self):
+ return self.git("status").strip() == b""
+
+ def add_new(self, prefix=None):
+ """Add files to the staging area.
+
+ :param prefix: None to include all files or a path prefix to
+ add all files under that path.
+ """
+ if prefix is None:
+ args = ["-a"]
+ else:
+ args = ["--no-ignore-removal", prefix]
+ self.git("add", *args)
+
+ def add_ignored(self, sync_tree, prefix):
+ """Add files to the staging area that are explicitly ignored by git.
+
+ :param prefix: None to include all files or a path prefix to
+ add all files under that path.
+ """
+ with tempfile.TemporaryFile() as f:
+ sync_tree.git("ls-tree", "-z", "-r", "--name-only", "HEAD", stdout=f)
+ f.seek(0)
+ ignored_files = sync_tree.git("check-ignore", "--no-index", "--stdin", "-z", stdin=f)
+ args = []
+ for entry in ignored_files.decode('utf-8').split('\0'):
+ args.append(os.path.join(prefix, entry))
+ if args:
+ self.git("add", "--force", *args)
+
+ def list_refs(self, ref_filter=None):
+ """Get a list of sha1, name tuples for references in a repository.
+
+ :param ref_filter: Pattern that reference name must match (from the end,
+ matching whole /-delimited segments only
+ """
+ args = []
+ if ref_filter is not None:
+ args.append(ref_filter)
+ data = self.git("show-ref", *args)
+ rv = []
+ for line in data.split(b"\n"):
+ if not line.strip():
+ continue
+ sha1, ref = line.split()
+ rv.append((sha1, ref))
+ return rv
+
+ def list_remote(self, remote, ref_filter=None):
+ """Return a list of (sha1, name) tupes for references in a remote.
+
+ :param remote: URL of the remote to list.
+ :param ref_filter: Pattern that the reference name must match.
+ """
+ args = []
+ if ref_filter is not None:
+ args.append(ref_filter)
+ data = self.git("ls-remote", remote, *args)
+ rv = []
+ for line in data.split(b"\n"):
+ if not line.strip():
+ continue
+ sha1, ref = line.split()
+ rv.append((sha1, ref))
+ return rv
+
+ def get_remote_sha1(self, remote, branch):
+ """Return the SHA1 of a particular branch in a remote.
+
+ :param remote: the remote URL
+ :param branch: the branch name"""
+ for sha1, ref in self.list_remote(remote, branch):
+ if ref.decode('utf-8') == "refs/heads/%s" % branch:
+ return self.commit_cls(self, sha1.decode('utf-8'))
+ assert False
+
+ def create_patch(self, patch_name, message):
+ # In git a patch is actually a commit
+ self.message = message
+
+ def update_patch(self, include=None):
+ """Commit the staged changes, or changes to listed files.
+
+ :param include: Either None, to commit staged changes, or a list
+ of filenames (which must already be in the repo)
+ to commit
+ """
+ if include is not None:
+ args = tuple(include)
+ else:
+ args = ()
+
+ if self.git("status", "-uno", "-z", *args).strip():
+ self.git("add", *args)
+ return True
+ return False
+
+ def commit_patch(self):
+ assert self.message is not None
+
+ if self.git("diff", "--name-only", "--staged", "-z").strip():
+ self.git("commit", "-m", self.message)
+ return True
+
+ return False
+
+ def init(self):
+ self.git("init")
+ assert vcs.is_git_root(self.root)
+
+ def checkout(self, rev, branch=None, force=False):
+ """Checkout a particular revision, optionally into a named branch.
+
+ :param rev: Revision identifier (e.g. SHA1) to checkout
+ :param branch: Branch name to use
+ :param force: Force-checkout
+ """
+ assert rev is not None
+
+ args = []
+ if branch:
+ branches = [ref[len("refs/heads/"):].decode('utf-8') for sha1, ref in self.list_refs()
+ if ref.startswith(b"refs/heads/")]
+ branch = get_unique_name(branches, branch)
+
+ args += ["-b", branch]
+
+ if force:
+ args.append("-f")
+ args.append(rev)
+ self.git("checkout", *args)
+
+ def update(self, remote, remote_branch, local_branch):
+ """Fetch from the remote and checkout into a local branch.
+
+ :param remote: URL to the remote repository
+ :param remote_branch: Branch on the remote repository to check out
+ :param local_branch: Local branch name to check out into
+ """
+ if not vcs.is_git_root(self.root):
+ self.init()
+ self.git("clean", "-xdf")
+ self.git("fetch", remote, f"{remote_branch}:{local_branch}")
+ self.checkout(local_branch)
+ self.git("submodule", "update", "--init", "--recursive")
+
+ def clean(self):
+ self.git("checkout", self.rev)
+ self.git("branch", "-D", self.local_branch)
+
+ def paths(self):
+ """List paths in the tree"""
+ repo_paths = [self.root] + [os.path.join(self.root, path)
+ for path in self.submodules()]
+
+ rv = []
+
+ for repo_path in repo_paths:
+ paths = vcs.git("ls-tree", "-r", "--name-only", "HEAD", repo=repo_path).split(b"\n")
+ rv.extend(os.path.relpath(os.path.join(repo_path, item.decode('utf-8')), self.root) for item in paths
+ if item.strip())
+ return rv
+
+ def submodules(self):
+ """List submodule directories"""
+ output = self.git("submodule", "status", "--recursive")
+ rv = []
+ for line in output.split(b"\n"):
+ line = line.strip()
+ if not line:
+ continue
+ parts = line.split(b" ")
+ rv.append(parts[1])
+ return rv
+
+ def contains_commit(self, commit):
+ try:
+ self.git("rev-parse", "--verify", commit.sha1)
+ return True
+ except subprocess.CalledProcessError:
+ return False
+
+
+class CommitMessage:
+ def __init__(self, text):
+ self.text = text
+ self._parse_message()
+
+ def __str__(self):
+ return self.text
+
+ def _parse_message(self):
+ lines = self.text.splitlines()
+ self.full_summary = lines[0]
+ self.body = "\n".join(lines[1:])
+
+
+class Commit:
+ msg_cls = CommitMessage
+
+ _sha1_re = re.compile("^[0-9a-f]{40}$")
+
+ def __init__(self, tree, sha1):
+ """Object representing a commit in a specific GitTree.
+
+ :param tree: GitTree to which this commit belongs.
+ :param sha1: Full sha1 string for the commit
+ """
+ assert self._sha1_re.match(sha1)
+
+ self.tree = tree
+ self.git = tree.git
+ self.sha1 = sha1
+ self.author, self.email, self.message = self._get_meta()
+
+ def __getstate__(self):
+ rv = self.__dict__.copy()
+ del rv['git']
+ return rv
+
+ def __setstate__(self, dict):
+ self.__dict__.update(dict)
+ self.git = self.tree.git
+
+ def _get_meta(self):
+ author, email, message = self.git("show", "-s", "--format=format:%an\n%ae\n%B", self.sha1).decode('utf-8').split("\n", 2)
+ return author, email, self.msg_cls(message)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/update/update.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/update.py
new file mode 100644
index 0000000000..1e9be41504
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/update/update.py
@@ -0,0 +1,191 @@
+# mypy: allow-untyped-defs
+
+import os
+import sys
+
+from .metadata import MetadataUpdateRunner
+from .sync import SyncFromUpstreamRunner
+from .tree import GitTree, HgTree, NoVCSTree
+
+from .base import Step, StepRunner, exit_clean, exit_unclean
+from .state import SavedState, UnsavedState
+
+def setup_paths(sync_path):
+ sys.path.insert(0, os.path.abspath(sync_path))
+ from tools import localpaths # noqa: F401
+
+class LoadConfig(Step):
+ """Step for loading configuration from the ini file and kwargs."""
+
+ provides = ["sync", "paths", "metadata_path", "tests_path"]
+
+ def create(self, state):
+ state.sync = {"remote_url": state.kwargs["remote_url"],
+ "branch": state.kwargs["branch"],
+ "path": state.kwargs["sync_path"]}
+
+ state.paths = state.kwargs["test_paths"]
+ state.tests_path = state.paths["/"]["tests_path"]
+ state.metadata_path = state.paths["/"]["metadata_path"]
+
+ assert os.path.isabs(state.tests_path)
+
+
+class LoadTrees(Step):
+ """Step for creating a Tree for the local copy and a GitTree for the
+ upstream sync."""
+
+ provides = ["local_tree", "sync_tree"]
+
+ def create(self, state):
+ if os.path.exists(state.sync["path"]):
+ sync_tree = GitTree(root=state.sync["path"])
+ else:
+ sync_tree = None
+
+ if GitTree.is_type():
+ local_tree = GitTree()
+ elif HgTree.is_type():
+ local_tree = HgTree()
+ else:
+ local_tree = NoVCSTree()
+
+ state.update({"local_tree": local_tree,
+ "sync_tree": sync_tree})
+
+
+class SyncFromUpstream(Step):
+ """Step that synchronises a local copy of the code with upstream."""
+
+ def create(self, state):
+ if not state.kwargs["sync"]:
+ return
+
+ if not state.sync_tree:
+ os.mkdir(state.sync["path"])
+ state.sync_tree = GitTree(root=state.sync["path"])
+
+ kwargs = state.kwargs
+ with state.push(["sync", "paths", "metadata_path", "tests_path", "local_tree",
+ "sync_tree"]):
+ state.target_rev = kwargs["rev"]
+ state.patch = kwargs["patch"]
+ state.suite_name = kwargs["suite_name"]
+ state.path_excludes = kwargs["exclude"]
+ state.path_includes = kwargs["include"]
+ runner = SyncFromUpstreamRunner(self.logger, state)
+ runner.run()
+
+
+class UpdateMetadata(Step):
+ """Update the expectation metadata from a set of run logs"""
+
+ def create(self, state):
+ if not state.kwargs["run_log"]:
+ return
+
+ kwargs = state.kwargs
+ with state.push(["local_tree", "sync_tree", "paths", "serve_root"]):
+ state.run_log = kwargs["run_log"]
+ state.disable_intermittent = kwargs["disable_intermittent"]
+ state.update_intermittent = kwargs["update_intermittent"]
+ state.remove_intermittent = kwargs["remove_intermittent"]
+ state.patch = kwargs["patch"]
+ state.suite_name = kwargs["suite_name"]
+ state.product = kwargs["product"]
+ state.config = kwargs["config"]
+ state.full_update = kwargs["full"]
+ state.extra_properties = kwargs["extra_property"]
+ runner = MetadataUpdateRunner(self.logger, state)
+ runner.run()
+
+
+class RemoveObsolete(Step):
+ """Remove metadata files that don't corespond to an existing test file"""
+
+ def create(self, state):
+ if not state.kwargs["remove_obsolete"]:
+ return
+
+ paths = state.kwargs["test_paths"]
+ state.tests_path = state.paths["/"]["tests_path"]
+ state.metadata_path = state.paths["/"]["metadata_path"]
+
+ for url_paths in paths.values():
+ tests_path = url_paths["tests_path"]
+ metadata_path = url_paths["metadata_path"]
+ for dirpath, dirnames, filenames in os.walk(metadata_path):
+ for filename in filenames:
+ if filename == "__dir__.ini":
+ continue
+ if filename.endswith(".ini"):
+ full_path = os.path.join(dirpath, filename)
+ rel_path = os.path.relpath(full_path, metadata_path)
+ test_path = os.path.join(tests_path, rel_path[:-4])
+ if not os.path.exists(test_path):
+ os.unlink(full_path)
+
+
+class UpdateRunner(StepRunner):
+ """Runner for doing an overall update."""
+ steps = [LoadConfig,
+ LoadTrees,
+ SyncFromUpstream,
+ RemoveObsolete,
+ UpdateMetadata]
+
+
+class WPTUpdate:
+ def __init__(self, logger, runner_cls=UpdateRunner, **kwargs):
+ """Object that controls the running of a whole wptupdate.
+
+ :param runner_cls: Runner subclass holding the overall list of
+ steps to run.
+ :param kwargs: Command line arguments
+ """
+ self.runner_cls = runner_cls
+ self.serve_root = kwargs["test_paths"]["/"]["tests_path"]
+
+ if not kwargs["sync"]:
+ setup_paths(self.serve_root)
+ else:
+ if os.path.exists(kwargs["sync_path"]):
+ # If the sync path doesn't exist we defer this until it does
+ setup_paths(kwargs["sync_path"])
+
+ if kwargs.get("store_state", False):
+ self.state = SavedState(logger)
+ else:
+ self.state = UnsavedState(logger)
+ self.kwargs = kwargs
+ self.logger = logger
+
+ def run(self, **kwargs):
+ if self.kwargs["abort"]:
+ self.abort()
+ return exit_clean
+
+ if not self.kwargs["continue"] and not self.state.is_empty():
+ self.logger.critical("Found existing state. Run with --continue to resume or --abort to clear state")
+ return exit_unclean
+
+ if self.kwargs["continue"]:
+ if self.state.is_empty():
+ self.logger.error("No sync in progress?")
+ return exit_clean
+
+ self.kwargs = self.state.kwargs
+ else:
+ self.state.kwargs = self.kwargs
+
+ self.state.serve_root = self.serve_root
+
+ update_runner = self.runner_cls(self.logger, self.state)
+ rv = update_runner.run()
+ if rv in (exit_clean, None):
+ self.state.clear()
+
+ return rv
+
+ def abort(self):
+ self.state.clear()
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/vcs.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/vcs.py
new file mode 100644
index 0000000000..790fdc9833
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/vcs.py
@@ -0,0 +1,67 @@
+# mypy: allow-untyped-defs
+
+import subprocess
+from functools import partial
+from typing import Callable
+
+from mozlog import get_default_logger
+
+from wptserve.utils import isomorphic_decode
+
+logger = None
+
+def vcs(bin_name: str) -> Callable[..., None]:
+ def inner(command, *args, **kwargs):
+ global logger
+
+ if logger is None:
+ logger = get_default_logger("vcs")
+
+ repo = kwargs.pop("repo", None)
+ log_error = kwargs.pop("log_error", True)
+ stdout = kwargs.pop("stdout", None)
+ stdin = kwargs.pop("stdin", None)
+ if kwargs:
+ raise TypeError(kwargs)
+
+ args = list(args)
+
+ proc_kwargs = {}
+ if repo is not None:
+ # Make sure `cwd` is str type to work in different sub-versions of Python 3.
+ # Before 3.8, bytes were not accepted on Windows for `cwd`.
+ proc_kwargs["cwd"] = isomorphic_decode(repo)
+ if stdout is not None:
+ proc_kwargs["stdout"] = stdout
+ if stdin is not None:
+ proc_kwargs["stdin"] = stdin
+
+ command_line = [bin_name, command] + args
+ logger.debug(" ".join(command_line))
+ try:
+ func = subprocess.check_output if not stdout else subprocess.check_call
+ return func(command_line, stderr=subprocess.STDOUT, **proc_kwargs)
+ except OSError as e:
+ if log_error:
+ logger.error(e)
+ raise
+ except subprocess.CalledProcessError as e:
+ if log_error:
+ logger.error(e.output)
+ raise
+ return inner
+
+git = vcs("git")
+hg = vcs("hg")
+
+
+def bind_to_repo(vcs_func, repo, log_error=True):
+ return partial(vcs_func, repo=repo, log_error=log_error)
+
+
+def is_git_root(path, log_error=True):
+ try:
+ rv = git("rev-parse", "--show-cdup", repo=path, log_error=log_error)
+ except subprocess.CalledProcessError:
+ return False
+ return rv == b"\n"
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptcommandline.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptcommandline.py
new file mode 100644
index 0000000000..89788fe411
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptcommandline.py
@@ -0,0 +1,777 @@
+# mypy: allow-untyped-defs
+
+import argparse
+import os
+import sys
+from collections import OrderedDict
+from distutils.spawn import find_executable
+from datetime import timedelta
+
+from . import config
+from . import wpttest
+from .formatters import chromium, wptreport, wptscreenshot
+
+def abs_path(path):
+ return os.path.abspath(os.path.expanduser(path))
+
+
+def url_or_path(path):
+ from urllib.parse import urlparse
+
+ parsed = urlparse(path)
+ if len(parsed.scheme) > 2:
+ return path
+ else:
+ return abs_path(path)
+
+
+def require_arg(kwargs, name, value_func=None):
+ if value_func is None:
+ value_func = lambda x: x is not None
+
+ if name not in kwargs or not value_func(kwargs[name]):
+ print("Missing required argument %s" % name, file=sys.stderr)
+ sys.exit(1)
+
+
+def create_parser(product_choices=None):
+ from mozlog import commandline
+
+ from . import products
+
+ if product_choices is None:
+ product_choices = products.product_list
+
+ parser = argparse.ArgumentParser(description="""Runner for web-platform-tests tests.""",
+ usage="""%(prog)s [OPTION]... [TEST]...
+
+TEST is either the full path to a test file to run, or the URL of a test excluding
+scheme host and port.""")
+ parser.add_argument("--manifest-update", action="store_true", default=None,
+ help="Regenerate the test manifest.")
+ parser.add_argument("--no-manifest-update", action="store_false", dest="manifest_update",
+ help="Prevent regeneration of the test manifest.")
+ parser.add_argument("--manifest-download", action="store_true", default=None,
+ help="Attempt to download a preexisting manifest when updating.")
+ parser.add_argument("--no-manifest-download", action="store_false", dest="manifest_download",
+ help="Prevent download of the test manifest.")
+
+ parser.add_argument("--timeout-multiplier", action="store", type=float, default=None,
+ help="Multiplier relative to standard test timeout to use")
+ parser.add_argument("--run-by-dir", type=int, nargs="?", default=False,
+ help="Split run into groups by directories. With a parameter,"
+ "limit the depth of splits e.g. --run-by-dir=1 to split by top-level"
+ "directory")
+ parser.add_argument("--processes", action="store", type=int, default=None,
+ help="Number of simultaneous processes to use")
+
+ parser.add_argument("--no-capture-stdio", action="store_true", default=False,
+ help="Don't capture stdio and write to logging")
+ parser.add_argument("--no-fail-on-unexpected", action="store_false",
+ default=True,
+ dest="fail_on_unexpected",
+ help="Exit with status code 0 when test expectations are violated")
+ parser.add_argument("--no-fail-on-unexpected-pass", action="store_false",
+ default=True,
+ dest="fail_on_unexpected_pass",
+ help="Exit with status code 0 when all unexpected results are PASS")
+ parser.add_argument("--no-restart-on-new-group", action="store_false",
+ default=True,
+ dest="restart_on_new_group",
+ help="Don't restart test runner when start a new test group")
+
+ mode_group = parser.add_argument_group("Mode")
+ mode_group.add_argument("--list-test-groups", action="store_true",
+ default=False,
+ help="List the top level directories containing tests that will run.")
+ mode_group.add_argument("--list-disabled", action="store_true",
+ default=False,
+ help="List the tests that are disabled on the current platform")
+ mode_group.add_argument("--list-tests", action="store_true",
+ default=False,
+ help="List all tests that will run")
+ stability_group = mode_group.add_mutually_exclusive_group()
+ stability_group.add_argument("--verify", action="store_true",
+ default=False,
+ help="Run a stability check on the selected tests")
+ stability_group.add_argument("--stability", action="store_true",
+ default=False,
+ help=argparse.SUPPRESS)
+ mode_group.add_argument("--verify-log-full", action="store_true",
+ default=False,
+ help="Output per-iteration test results when running verify")
+ mode_group.add_argument("--verify-repeat-loop", action="store",
+ default=10,
+ help="Number of iterations for a run that reloads each test without restart.",
+ type=int)
+ mode_group.add_argument("--verify-repeat-restart", action="store",
+ default=5,
+ help="Number of iterations, for a run that restarts the runner between each iteration",
+ type=int)
+ chaos_mode_group = mode_group.add_mutually_exclusive_group()
+ chaos_mode_group.add_argument("--verify-no-chaos-mode", action="store_false",
+ default=True,
+ dest="verify_chaos_mode",
+ help="Disable chaos mode when running on Firefox")
+ chaos_mode_group.add_argument("--verify-chaos-mode", action="store_true",
+ default=True,
+ dest="verify_chaos_mode",
+ help="Enable chaos mode when running on Firefox")
+ mode_group.add_argument("--verify-max-time", action="store",
+ default=None,
+ help="The maximum number of minutes for the job to run",
+ type=lambda x: timedelta(minutes=float(x)))
+ mode_group.add_argument("--repeat-max-time", action="store",
+ default=100,
+ help="The maximum number of minutes for the test suite to attempt repeat runs",
+ type=int)
+ output_results_group = mode_group.add_mutually_exclusive_group()
+ output_results_group.add_argument("--verify-no-output-results", action="store_false",
+ dest="verify_output_results",
+ default=True,
+ help="Prints individuals test results and messages")
+ output_results_group.add_argument("--verify-output-results", action="store_true",
+ dest="verify_output_results",
+ default=True,
+ help="Disable printing individuals test results and messages")
+
+ test_selection_group = parser.add_argument_group("Test Selection")
+ test_selection_group.add_argument("--test-types", action="store",
+ nargs="*", default=wpttest.enabled_tests,
+ choices=wpttest.enabled_tests,
+ help="Test types to run")
+ test_selection_group.add_argument("--include", action="append",
+ help="URL prefix to include")
+ test_selection_group.add_argument("--include-file", action="store",
+ help="A file listing URL prefix for tests")
+ test_selection_group.add_argument("--exclude", action="append",
+ help="URL prefix to exclude")
+ test_selection_group.add_argument("--include-manifest", type=abs_path,
+ help="Path to manifest listing tests to include")
+ test_selection_group.add_argument("--test-groups", dest="test_groups_file", type=abs_path,
+ help="Path to json file containing a mapping {group_name: [test_ids]}")
+ test_selection_group.add_argument("--skip-timeout", action="store_true",
+ help="Skip tests that are expected to time out")
+ test_selection_group.add_argument("--skip-implementation-status",
+ action="append",
+ choices=["not-implementing", "backlog", "implementing"],
+ help="Skip tests that have the given implementation status")
+ # TODO(bashi): Remove this when WebTransport over HTTP/3 server is enabled by default.
+ test_selection_group.add_argument("--enable-webtransport-h3",
+ action="store_true",
+ dest="enable_webtransport_h3",
+ default=None,
+ help="Enable tests that require WebTransport over HTTP/3 server (default: false)")
+ test_selection_group.add_argument("--no-enable-webtransport-h3", action="store_false", dest="enable_webtransport_h3",
+ help="Do not enable WebTransport tests on experimental channels")
+ test_selection_group.add_argument("--tag", action="append", dest="tags",
+ help="Labels applied to tests to include in the run. "
+ "Labels starting dir: are equivalent to top-level directories.")
+ test_selection_group.add_argument("--default-exclude", action="store_true",
+ default=False,
+ help="Only run the tests explicitly given in arguments. "
+ "No tests will run if the list is empty, and the "
+ "program will exit with status code 0.")
+
+ debugging_group = parser.add_argument_group("Debugging")
+ debugging_group.add_argument('--debugger', const="__default__", nargs="?",
+ help="run under a debugger, e.g. gdb or valgrind")
+ debugging_group.add_argument('--debugger-args', help="arguments to the debugger")
+ debugging_group.add_argument("--rerun", action="store", type=int, default=1,
+ help="Number of times to re run each test without restarts")
+ debugging_group.add_argument("--repeat", action="store", type=int, default=1,
+ help="Number of times to run the tests, restarting between each run")
+ debugging_group.add_argument("--repeat-until-unexpected", action="store_true", default=None,
+ help="Run tests in a loop until one returns an unexpected result")
+ debugging_group.add_argument('--retry-unexpected', type=int, default=0,
+ help=('Maximum number of times to retry unexpected tests. '
+ 'A test is retried until it gets one of the expected status, '
+ 'or until it exhausts the maximum number of retries.'))
+ debugging_group.add_argument('--pause-after-test', action="store_true", default=None,
+ help="Halt the test runner after each test (this happens by default if only a single test is run)")
+ debugging_group.add_argument('--no-pause-after-test', dest="pause_after_test", action="store_false",
+ help="Don't halt the test runner irrespective of the number of tests run")
+ debugging_group.add_argument('--debug-test', dest="debug_test", action="store_true",
+ help="Run tests with additional debugging features enabled")
+
+ debugging_group.add_argument('--pause-on-unexpected', action="store_true",
+ help="Halt the test runner when an unexpected result is encountered")
+ debugging_group.add_argument('--no-restart-on-unexpected', dest="restart_on_unexpected",
+ default=True, action="store_false",
+ help="Don't restart on an unexpected result")
+
+ debugging_group.add_argument("--symbols-path", action="store", type=url_or_path,
+ help="Path or url to symbols file used to analyse crash minidumps.")
+ debugging_group.add_argument("--stackwalk-binary", action="store", type=abs_path,
+ help="Path to stackwalker program used to analyse minidumps.")
+ debugging_group.add_argument("--pdb", action="store_true",
+ help="Drop into pdb on python exception")
+
+ android_group = parser.add_argument_group("Android specific arguments")
+ android_group.add_argument("--adb-binary", action="store",
+ help="Path to adb binary to use")
+ android_group.add_argument("--package-name", action="store",
+ help="Android package name to run tests against")
+ android_group.add_argument("--keep-app-data-directory", action="store_true",
+ help="Don't delete the app data directory")
+ android_group.add_argument("--device-serial", action="append", default=[],
+ help="Running Android instances to connect to, if not emulator-5554")
+
+ config_group = parser.add_argument_group("Configuration")
+ config_group.add_argument("--binary", action="store",
+ type=abs_path, help="Desktop binary to run tests against")
+ config_group.add_argument('--binary-arg',
+ default=[], action="append", dest="binary_args",
+ help="Extra argument for the binary")
+ config_group.add_argument("--webdriver-binary", action="store", metavar="BINARY",
+ type=abs_path, help="WebDriver server binary to use")
+ config_group.add_argument('--webdriver-arg',
+ default=[], action="append", dest="webdriver_args",
+ help="Extra argument for the WebDriver binary")
+ config_group.add_argument("--metadata", action="store", type=abs_path, dest="metadata_root",
+ help="Path to root directory containing test metadata"),
+ config_group.add_argument("--tests", action="store", type=abs_path, dest="tests_root",
+ help="Path to root directory containing test files"),
+ config_group.add_argument("--manifest", action="store", type=abs_path, dest="manifest_path",
+ help="Path to test manifest (default is ${metadata_root}/MANIFEST.json)")
+ config_group.add_argument("--run-info", action="store", type=abs_path,
+ help="Path to directory containing extra json files to add to run info")
+ config_group.add_argument("--product", action="store", choices=product_choices,
+ default=None, help="Browser against which to run tests")
+ config_group.add_argument("--browser-version", action="store",
+ default=None, help="Informative string detailing the browser "
+ "release version. This is included in the run_info data.")
+ config_group.add_argument("--browser-channel", action="store",
+ default=None, help="Informative string detailing the browser "
+ "release channel. This is included in the run_info data.")
+ config_group.add_argument("--config", action="store", type=abs_path, dest="config",
+ help="Path to config file")
+ config_group.add_argument("--install-fonts", action="store_true",
+ default=None,
+ help="Install additional system fonts on your system")
+ config_group.add_argument("--no-install-fonts", dest="install_fonts", action="store_false",
+ help="Do not install additional system fonts on your system")
+ config_group.add_argument("--font-dir", action="store", type=abs_path, dest="font_dir",
+ help="Path to local font installation directory", default=None)
+ config_group.add_argument("--inject-script", action="store", dest="inject_script", default=None,
+ help="Path to script file to inject, useful for testing polyfills.")
+ config_group.add_argument("--headless", action="store_true",
+ help="Run browser in headless mode", default=None)
+ config_group.add_argument("--no-headless", action="store_false", dest="headless",
+ help="Don't run browser in headless mode")
+ config_group.add_argument("--instrument-to-file", action="store",
+ help="Path to write instrumentation logs to")
+
+ build_type = parser.add_mutually_exclusive_group()
+ build_type.add_argument("--debug-build", dest="debug", action="store_true",
+ default=None,
+ help="Build is a debug build (overrides any mozinfo file)")
+ build_type.add_argument("--release-build", dest="debug", action="store_false",
+ default=None,
+ help="Build is a release (overrides any mozinfo file)")
+
+ chunking_group = parser.add_argument_group("Test Chunking")
+ chunking_group.add_argument("--total-chunks", action="store", type=int, default=1,
+ help="Total number of chunks to use")
+ chunking_group.add_argument("--this-chunk", action="store", type=int, default=1,
+ help="Chunk number to run")
+ chunking_group.add_argument("--chunk-type", action="store", choices=["none", "hash", "dir_hash"],
+ default=None, help="Chunking type to use")
+
+ ssl_group = parser.add_argument_group("SSL/TLS")
+ ssl_group.add_argument("--ssl-type", action="store", default=None,
+ choices=["openssl", "pregenerated", "none"],
+ help="Type of ssl support to enable (running without ssl may lead to spurious errors)")
+
+ ssl_group.add_argument("--openssl-binary", action="store",
+ help="Path to openssl binary", default="openssl")
+ ssl_group.add_argument("--certutil-binary", action="store",
+ help="Path to certutil binary for use with Firefox + ssl")
+
+ ssl_group.add_argument("--ca-cert-path", action="store", type=abs_path,
+ help="Path to ca certificate when using pregenerated ssl certificates")
+ ssl_group.add_argument("--host-key-path", action="store", type=abs_path,
+ help="Path to host private key when using pregenerated ssl certificates")
+ ssl_group.add_argument("--host-cert-path", action="store", type=abs_path,
+ help="Path to host certificate when using pregenerated ssl certificates")
+
+ gecko_group = parser.add_argument_group("Gecko-specific")
+ gecko_group.add_argument("--prefs-root", dest="prefs_root", action="store", type=abs_path,
+ help="Path to the folder containing browser prefs")
+ gecko_group.add_argument("--preload-browser", dest="preload_browser", action="store_true",
+ default=None, help="Preload a gecko instance for faster restarts")
+ gecko_group.add_argument("--no-preload-browser", dest="preload_browser", action="store_false",
+ default=None, help="Don't preload a gecko instance for faster restarts")
+ gecko_group.add_argument("--disable-e10s", dest="gecko_e10s", action="store_false", default=True,
+ help="Run tests without electrolysis preferences")
+ gecko_group.add_argument("--disable-fission", dest="disable_fission", action="store_true", default=False,
+ help="Disable fission in Gecko.")
+ gecko_group.add_argument("--stackfix-dir", dest="stackfix_dir", action="store",
+ help="Path to directory containing assertion stack fixing scripts")
+ gecko_group.add_argument("--specialpowers-path", action="store",
+ help="Path to specialPowers extension xpi file")
+ gecko_group.add_argument("--setpref", dest="extra_prefs", action='append',
+ default=[], metavar="PREF=VALUE",
+ help="Defines an extra user preference (overrides those in prefs_root)")
+ gecko_group.add_argument("--leak-check", dest="leak_check", action="store_true", default=None,
+ help="Enable leak checking (enabled by default for debug builds, "
+ "silently ignored for opt, mobile)")
+ gecko_group.add_argument("--no-leak-check", dest="leak_check", action="store_false", default=None,
+ help="Disable leak checking")
+ gecko_group.add_argument("--stylo-threads", action="store", type=int, default=1,
+ help="Number of parallel threads to use for stylo")
+ gecko_group.add_argument("--reftest-internal", dest="reftest_internal", action="store_true",
+ default=None, help="Enable reftest runner implemented inside Marionette")
+ gecko_group.add_argument("--reftest-external", dest="reftest_internal", action="store_false",
+ help="Disable reftest runner implemented inside Marionette")
+ gecko_group.add_argument("--reftest-screenshot", dest="reftest_screenshot", action="store",
+ choices=["always", "fail", "unexpected"], default=None,
+ help="With --reftest-internal, when to take a screenshot")
+ gecko_group.add_argument("--chaos", dest="chaos_mode_flags", action="store",
+ nargs="?", const=0xFFFFFFFF, type=lambda x: int(x, 16),
+ help="Enable chaos mode with the specified feature flag "
+ "(see http://searchfox.org/mozilla-central/source/mfbt/ChaosMode.h for "
+ "details). If no value is supplied, all features are activated")
+
+ servo_group = parser.add_argument_group("Servo-specific")
+ servo_group.add_argument("--user-stylesheet",
+ default=[], action="append", dest="user_stylesheets",
+ help="Inject a user CSS stylesheet into every test.")
+
+ chrome_group = parser.add_argument_group("Chrome-specific")
+ chrome_group.add_argument("--enable-mojojs", action="store_true", default=False,
+ help="Enable MojoJS for testing. Note that this flag is usally "
+ "enabled automatically by `wpt run`, if it succeeds in downloading "
+ "the right version of mojojs.zip or if --mojojs-path is specified.")
+ chrome_group.add_argument("--mojojs-path",
+ help="Path to mojojs gen/ directory. If it is not specified, `wpt run` "
+ "will download and extract mojojs.zip into _venv2/mojojs/gen.")
+ chrome_group.add_argument("--enable-swiftshader", action="store_true", default=False,
+ help="Enable SwiftShader for CPU-based 3D graphics. This can be used "
+ "in environments with no hardware GPU available.")
+ chrome_group.add_argument("--enable-experimental", action="store_true", dest="enable_experimental",
+ help="Enable --enable-experimental-web-platform-features flag", default=None)
+ chrome_group.add_argument("--no-enable-experimental", action="store_false", dest="enable_experimental",
+ help="Do not enable --enable-experimental-web-platform-features flag "
+ "on experimental channels")
+
+ sauce_group = parser.add_argument_group("Sauce Labs-specific")
+ sauce_group.add_argument("--sauce-browser", dest="sauce_browser",
+ help="Sauce Labs browser name")
+ sauce_group.add_argument("--sauce-platform", dest="sauce_platform",
+ help="Sauce Labs OS platform")
+ sauce_group.add_argument("--sauce-version", dest="sauce_version",
+ help="Sauce Labs browser version")
+ sauce_group.add_argument("--sauce-build", dest="sauce_build",
+ help="Sauce Labs build identifier")
+ sauce_group.add_argument("--sauce-tags", dest="sauce_tags", nargs="*",
+ help="Sauce Labs identifying tag", default=[])
+ sauce_group.add_argument("--sauce-tunnel-id", dest="sauce_tunnel_id",
+ help="Sauce Connect tunnel identifier")
+ sauce_group.add_argument("--sauce-user", dest="sauce_user",
+ help="Sauce Labs user name")
+ sauce_group.add_argument("--sauce-key", dest="sauce_key",
+ default=os.environ.get("SAUCE_ACCESS_KEY"),
+ help="Sauce Labs access key")
+ sauce_group.add_argument("--sauce-connect-binary",
+ dest="sauce_connect_binary",
+ help="Path to Sauce Connect binary")
+ sauce_group.add_argument("--sauce-init-timeout", action="store",
+ type=int, default=30,
+ help="Number of seconds to wait for Sauce "
+ "Connect tunnel to be available before "
+ "aborting")
+ sauce_group.add_argument("--sauce-connect-arg", action="append",
+ default=[], dest="sauce_connect_args",
+ help="Command-line argument to forward to the "
+ "Sauce Connect binary (repeatable)")
+
+ taskcluster_group = parser.add_argument_group("Taskcluster-specific")
+ taskcluster_group.add_argument("--github-checks-text-file",
+ type=str,
+ help="Path to GitHub checks output file")
+
+ webkit_group = parser.add_argument_group("WebKit-specific")
+ webkit_group.add_argument("--webkit-port", dest="webkit_port",
+ help="WebKit port")
+
+ safari_group = parser.add_argument_group("Safari-specific")
+ safari_group.add_argument("--kill-safari", dest="kill_safari", action="store_true", default=False,
+ help="Kill Safari when stopping the browser")
+
+ parser.add_argument("test_list", nargs="*",
+ help="List of URLs for tests to run, or paths including tests to run. "
+ "(equivalent to --include)")
+
+ def screenshot_api_wrapper(formatter, api):
+ formatter.api = api
+ return formatter
+
+ commandline.fmt_options["api"] = (screenshot_api_wrapper,
+ "Cache API (default: %s)" % wptscreenshot.DEFAULT_API,
+ {"wptscreenshot"}, "store")
+
+ commandline.log_formatters["chromium"] = (chromium.ChromiumFormatter, "Chromium Layout Tests format")
+ commandline.log_formatters["wptreport"] = (wptreport.WptreportFormatter, "wptreport format")
+ commandline.log_formatters["wptscreenshot"] = (wptscreenshot.WptscreenshotFormatter, "wpt.fyi screenshots")
+
+ commandline.add_logging_group(parser)
+ return parser
+
+
+def set_from_config(kwargs):
+ if kwargs["config"] is None:
+ config_path = config.path()
+ else:
+ config_path = kwargs["config"]
+
+ kwargs["config_path"] = config_path
+
+ kwargs["config"] = config.read(kwargs["config_path"])
+
+ keys = {"paths": [("prefs", "prefs_root", True),
+ ("run_info", "run_info", True)],
+ "web-platform-tests": [("remote_url", "remote_url", False),
+ ("branch", "branch", False),
+ ("sync_path", "sync_path", True)],
+ "SSL": [("openssl_binary", "openssl_binary", True),
+ ("certutil_binary", "certutil_binary", True),
+ ("ca_cert_path", "ca_cert_path", True),
+ ("host_cert_path", "host_cert_path", True),
+ ("host_key_path", "host_key_path", True)]}
+
+ for section, values in keys.items():
+ for config_value, kw_value, is_path in values:
+ if kw_value in kwargs and kwargs[kw_value] is None:
+ if not is_path:
+ new_value = kwargs["config"].get(section, config.ConfigDict({})).get(config_value)
+ else:
+ new_value = kwargs["config"].get(section, config.ConfigDict({})).get_path(config_value)
+ kwargs[kw_value] = new_value
+
+ kwargs["test_paths"] = get_test_paths(kwargs["config"])
+
+ if kwargs["tests_root"]:
+ if "/" not in kwargs["test_paths"]:
+ kwargs["test_paths"]["/"] = {}
+ kwargs["test_paths"]["/"]["tests_path"] = kwargs["tests_root"]
+
+ if kwargs["metadata_root"]:
+ if "/" not in kwargs["test_paths"]:
+ kwargs["test_paths"]["/"] = {}
+ kwargs["test_paths"]["/"]["metadata_path"] = kwargs["metadata_root"]
+
+ if kwargs.get("manifest_path"):
+ if "/" not in kwargs["test_paths"]:
+ kwargs["test_paths"]["/"] = {}
+ kwargs["test_paths"]["/"]["manifest_path"] = kwargs["manifest_path"]
+
+ kwargs["suite_name"] = kwargs["config"].get("web-platform-tests", {}).get("name", "web-platform-tests")
+
+
+ check_paths(kwargs)
+
+
+def get_test_paths(config):
+ # Set up test_paths
+ test_paths = OrderedDict()
+
+ for section in config.keys():
+ if section.startswith("manifest:"):
+ manifest_opts = config.get(section)
+ url_base = manifest_opts.get("url_base", "/")
+ test_paths[url_base] = {
+ "tests_path": manifest_opts.get_path("tests"),
+ "metadata_path": manifest_opts.get_path("metadata"),
+ }
+ if "manifest" in manifest_opts:
+ test_paths[url_base]["manifest_path"] = manifest_opts.get_path("manifest")
+
+ return test_paths
+
+
+def exe_path(name):
+ if name is None:
+ return
+
+ path = find_executable(name)
+ if path and os.access(path, os.X_OK):
+ return path
+ else:
+ return None
+
+
+def check_paths(kwargs):
+ for test_paths in kwargs["test_paths"].values():
+ if not ("tests_path" in test_paths and
+ "metadata_path" in test_paths):
+ print("Fatal: must specify both a test path and metadata path")
+ sys.exit(1)
+ if "manifest_path" not in test_paths:
+ test_paths["manifest_path"] = os.path.join(test_paths["metadata_path"],
+ "MANIFEST.json")
+ for key, path in test_paths.items():
+ name = key.split("_", 1)[0]
+
+ if name == "manifest":
+ # For the manifest we can create it later, so just check the path
+ # actually exists
+ path = os.path.dirname(path)
+
+ if not os.path.exists(path):
+ print(f"Fatal: {name} path {path} does not exist")
+ sys.exit(1)
+
+ if not os.path.isdir(path):
+ print(f"Fatal: {name} path {path} is not a directory")
+ sys.exit(1)
+
+
+def check_args(kwargs):
+ set_from_config(kwargs)
+
+ if kwargs["product"] is None:
+ kwargs["product"] = "firefox"
+
+ if kwargs["manifest_update"] is None:
+ kwargs["manifest_update"] = True
+
+ if "sauce" in kwargs["product"]:
+ kwargs["pause_after_test"] = False
+
+ if kwargs["test_list"]:
+ if kwargs["include"] is not None:
+ kwargs["include"].extend(kwargs["test_list"])
+ else:
+ kwargs["include"] = kwargs["test_list"]
+
+ if kwargs["run_info"] is None:
+ kwargs["run_info"] = kwargs["config_path"]
+
+ if kwargs["this_chunk"] > 1:
+ require_arg(kwargs, "total_chunks", lambda x: x >= kwargs["this_chunk"])
+
+ if kwargs["chunk_type"] is None:
+ if kwargs["total_chunks"] > 1:
+ kwargs["chunk_type"] = "dir_hash"
+ else:
+ kwargs["chunk_type"] = "none"
+
+ if kwargs["test_groups_file"] is not None:
+ if kwargs["run_by_dir"] is not False:
+ print("Can't pass --test-groups and --run-by-dir")
+ sys.exit(1)
+ if not os.path.exists(kwargs["test_groups_file"]):
+ print("--test-groups file %s not found" % kwargs["test_groups_file"])
+ sys.exit(1)
+
+ # When running on Android, the number of workers is decided by the number of
+ # emulators. Each worker will use one emulator to run the Android browser.
+ if kwargs["device_serial"]:
+ if kwargs["processes"] is None:
+ kwargs["processes"] = len(kwargs["device_serial"])
+ elif len(kwargs["device_serial"]) != kwargs["processes"]:
+ print("--processes does not match number of devices")
+ sys.exit(1)
+ elif len(set(kwargs["device_serial"])) != len(kwargs["device_serial"]):
+ print("Got duplicate --device-serial value")
+ sys.exit(1)
+
+ if kwargs["processes"] is None:
+ kwargs["processes"] = 1
+
+ if kwargs["debugger"] is not None:
+ import mozdebug
+ if kwargs["debugger"] == "__default__":
+ kwargs["debugger"] = mozdebug.get_default_debugger_name()
+ debug_info = mozdebug.get_debugger_info(kwargs["debugger"],
+ kwargs["debugger_args"])
+ if debug_info and debug_info.interactive:
+ if kwargs["processes"] != 1:
+ kwargs["processes"] = 1
+ kwargs["no_capture_stdio"] = True
+ kwargs["debug_info"] = debug_info
+ else:
+ kwargs["debug_info"] = None
+
+ if kwargs["binary"] is not None:
+ if not os.path.exists(kwargs["binary"]):
+ print("Binary path %s does not exist" % kwargs["binary"], file=sys.stderr)
+ sys.exit(1)
+
+ if kwargs["ssl_type"] is None:
+ if None not in (kwargs["ca_cert_path"], kwargs["host_cert_path"], kwargs["host_key_path"]):
+ kwargs["ssl_type"] = "pregenerated"
+ elif exe_path(kwargs["openssl_binary"]) is not None:
+ kwargs["ssl_type"] = "openssl"
+ else:
+ kwargs["ssl_type"] = "none"
+
+ if kwargs["ssl_type"] == "pregenerated":
+ require_arg(kwargs, "ca_cert_path", lambda x:os.path.exists(x))
+ require_arg(kwargs, "host_cert_path", lambda x:os.path.exists(x))
+ require_arg(kwargs, "host_key_path", lambda x:os.path.exists(x))
+
+ elif kwargs["ssl_type"] == "openssl":
+ path = exe_path(kwargs["openssl_binary"])
+ if path is None:
+ print("openssl-binary argument missing or not a valid executable", file=sys.stderr)
+ sys.exit(1)
+ kwargs["openssl_binary"] = path
+
+ if kwargs["ssl_type"] != "none" and kwargs["product"] == "firefox" and kwargs["certutil_binary"]:
+ path = exe_path(kwargs["certutil_binary"])
+ if path is None:
+ print("certutil-binary argument missing or not a valid executable", file=sys.stderr)
+ sys.exit(1)
+ kwargs["certutil_binary"] = path
+
+ if kwargs['extra_prefs']:
+ missing = any('=' not in prefarg for prefarg in kwargs['extra_prefs'])
+ if missing:
+ print("Preferences via --setpref must be in key=value format", file=sys.stderr)
+ sys.exit(1)
+ kwargs['extra_prefs'] = [tuple(prefarg.split('=', 1)) for prefarg in
+ kwargs['extra_prefs']]
+
+ if kwargs["reftest_internal"] is None:
+ kwargs["reftest_internal"] = True
+
+ if kwargs["reftest_screenshot"] is None:
+ kwargs["reftest_screenshot"] = "unexpected" if not kwargs["debug_test"] else "always"
+
+ if kwargs["preload_browser"] is None:
+ # Default to preloading a gecko instance if we're only running a single process
+ kwargs["preload_browser"] = kwargs["processes"] == 1
+
+ return kwargs
+
+
+def check_args_metadata_update(kwargs):
+ set_from_config(kwargs)
+
+ if kwargs["product"] is None:
+ kwargs["product"] = "firefox"
+
+ for item in kwargs["run_log"]:
+ if os.path.isdir(item):
+ print("Log file %s is a directory" % item, file=sys.stderr)
+ sys.exit(1)
+
+ if kwargs["properties_file"] is None and not kwargs["no_properties_file"]:
+ default_file = os.path.join(kwargs["test_paths"]["/"]["metadata_path"],
+ "update_properties.json")
+ if os.path.exists(default_file):
+ kwargs["properties_file"] = default_file
+
+ return kwargs
+
+
+def check_args_update(kwargs):
+ kwargs = check_args_metadata_update(kwargs)
+
+ if kwargs["patch"] is None:
+ kwargs["patch"] = kwargs["sync"]
+
+ return kwargs
+
+
+def create_parser_metadata_update(product_choices=None):
+ from mozlog.structured import commandline
+
+ from . import products
+
+ if product_choices is None:
+ product_choices = products.product_list
+
+ parser = argparse.ArgumentParser("web-platform-tests-update",
+ description="Update script for web-platform-tests tests.")
+ # This will be removed once all consumers are updated to the properties-file based system
+ parser.add_argument("--product", action="store", choices=product_choices,
+ default=None, help=argparse.SUPPRESS)
+ parser.add_argument("--config", action="store", type=abs_path, help="Path to config file")
+ parser.add_argument("--metadata", action="store", type=abs_path, dest="metadata_root",
+ help="Path to the folder containing test metadata"),
+ parser.add_argument("--tests", action="store", type=abs_path, dest="tests_root",
+ help="Path to web-platform-tests"),
+ parser.add_argument("--manifest", action="store", type=abs_path, dest="manifest_path",
+ help="Path to test manifest (default is ${metadata_root}/MANIFEST.json)")
+ parser.add_argument("--full", action="store_true", default=False,
+ help="For all tests that are updated, remove any existing conditions and missing subtests")
+ parser.add_argument("--disable-intermittent", nargs="?", action="store", const="unstable", default=None,
+ help=("Reason for disabling tests. When updating test results, disable tests that have "
+ "inconsistent results across many runs with the given reason."))
+ parser.add_argument("--update-intermittent", action="store_true", default=False,
+ help="Update test metadata with expected intermittent statuses.")
+ parser.add_argument("--remove-intermittent", action="store_true", default=False,
+ help="Remove obsolete intermittent statuses from expected statuses.")
+ parser.add_argument("--no-remove-obsolete", action="store_false", dest="remove_obsolete", default=True,
+ help="Don't remove metadata files that no longer correspond to a test file")
+ parser.add_argument("--properties-file",
+ help="""Path to a JSON file containing run_info properties to use in update. This must be of the form
+ {"properties": [<name>], "dependents": {<property name>: [<name>]}}""")
+ parser.add_argument("--no-properties-file", action="store_true",
+ help="Don't use the default properties file at "
+ "${metadata_root}/update_properties.json, even if it exists.")
+ parser.add_argument("--extra-property", action="append", default=[],
+ help="Extra property from run_info.json to use in metadata update.")
+ # TODO: Should make this required iff run=logfile
+ parser.add_argument("run_log", nargs="*", type=abs_path,
+ help="Log file from run of tests")
+ commandline.add_logging_group(parser)
+ return parser
+
+
+def create_parser_update(product_choices=None):
+ parser = create_parser_metadata_update(product_choices)
+ parser.add_argument("--sync-path", action="store", type=abs_path,
+ help="Path to store git checkout of web-platform-tests during update"),
+ parser.add_argument("--remote_url", action="store",
+ help="URL of web-platfrom-tests repository to sync against"),
+ parser.add_argument("--branch", action="store", type=abs_path,
+ help="Remote branch to sync against")
+ parser.add_argument("--rev", action="store", help="Revision to sync to")
+ parser.add_argument("--patch", action="store_true", dest="patch", default=None,
+ help="Create a VCS commit containing the changes.")
+ parser.add_argument("--no-patch", action="store_false", dest="patch",
+ help="Don't create a VCS commit containing the changes.")
+ parser.add_argument("--sync", dest="sync", action="store_true", default=False,
+ help="Sync the tests with the latest from upstream (implies --patch)")
+ parser.add_argument("--no-store-state", action="store_false", dest="store_state",
+ help="Store state so that steps can be resumed after failure")
+ parser.add_argument("--continue", action="store_true",
+ help="Continue a previously started run of the update script")
+ parser.add_argument("--abort", action="store_true",
+ help="Clear state from a previous incomplete run of the update script")
+ parser.add_argument("--exclude", action="store", nargs="*",
+ help="List of glob-style paths to exclude when syncing tests")
+ parser.add_argument("--include", action="store", nargs="*",
+ help="List of glob-style paths to include which would otherwise be excluded when syncing tests")
+ return parser
+
+
+def create_parser_reduce(product_choices=None):
+ parser = create_parser(product_choices)
+ parser.add_argument("target", action="store", help="Test id that is unstable")
+ return parser
+
+
+def parse_args():
+ parser = create_parser()
+ rv = vars(parser.parse_args())
+ check_args(rv)
+ return rv
+
+
+def parse_args_update():
+ parser = create_parser_update()
+ rv = vars(parser.parse_args())
+ check_args_update(rv)
+ return rv
+
+
+def parse_args_reduce():
+ parser = create_parser_reduce()
+ rv = vars(parser.parse_args())
+ check_args(rv)
+ return rv
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptlogging.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptlogging.py
new file mode 100644
index 0000000000..06b34dabdb
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptlogging.py
@@ -0,0 +1,109 @@
+# mypy: allow-untyped-defs
+
+import logging
+from threading import Thread
+
+from mozlog import commandline, stdadapter, set_default_logger
+from mozlog.structuredlog import StructuredLogger, log_levels
+
+
+def setup(args, defaults, formatter_defaults=None):
+ logger = args.pop('log', None)
+ if logger:
+ set_default_logger(logger)
+ StructuredLogger._logger_states["web-platform-tests"] = logger._state
+ else:
+ logger = commandline.setup_logging("web-platform-tests", args, defaults,
+ formatter_defaults=formatter_defaults)
+ setup_stdlib_logger()
+
+ for name in list(args.keys()):
+ if name.startswith("log_"):
+ args.pop(name)
+
+ return logger
+
+
+def setup_stdlib_logger():
+ logging.root.handlers = []
+ logging.root = stdadapter.std_logging_adapter(logging.root)
+
+
+class LogLevelRewriter:
+ """Filter that replaces log messages at specified levels with messages
+ at a different level.
+
+ This can be used to e.g. downgrade log messages from ERROR to WARNING
+ in some component where ERRORs are not critical.
+
+ :param inner: Handler to use for messages that pass this filter
+ :param from_levels: List of levels which should be affected
+ :param to_level: Log level to set for the affected messages
+ """
+ def __init__(self, inner, from_levels, to_level):
+ self.inner = inner
+ self.from_levels = [item.upper() for item in from_levels]
+ self.to_level = to_level.upper()
+
+ def __call__(self, data):
+ if data["action"] == "log" and data["level"].upper() in self.from_levels:
+ data = data.copy()
+ data["level"] = self.to_level
+ return self.inner(data)
+
+
+class LoggedAboveLevelHandler:
+ """Filter that records whether any log message above a certain level has been
+ seen.
+
+ :param min_level: Minimum level to record as a str (e.g., "CRITICAL")
+
+ """
+ def __init__(self, min_level):
+ self.min_level = log_levels[min_level.upper()]
+ self.has_log = False
+
+ def __call__(self, data):
+ if (data["action"] == "log" and
+ not self.has_log and
+ log_levels[data["level"]] <= self.min_level):
+ self.has_log = True
+
+
+class QueueHandler(logging.Handler):
+ def __init__(self, queue, level=logging.NOTSET):
+ self.queue = queue
+ logging.Handler.__init__(self, level=level)
+
+ def createLock(self):
+ # The queue provides its own locking
+ self.lock = None
+
+ def emit(self, record):
+ msg = self.format(record)
+ data = {"action": "log",
+ "level": record.levelname,
+ "thread": record.threadName,
+ "pid": record.process,
+ "source": self.name,
+ "message": msg}
+ self.queue.put(data)
+
+
+class LogQueueThread(Thread):
+ """Thread for handling log messages from a queue"""
+ def __init__(self, queue, logger):
+ self.queue = queue
+ self.logger = logger
+ super().__init__(name="Thread-Log")
+
+ def run(self):
+ while True:
+ try:
+ data = self.queue.get()
+ except (EOFError, OSError):
+ break
+ if data is None:
+ # A None message is used to shut down the logging thread
+ break
+ self.logger.log_raw(data)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/__init__.py
new file mode 100644
index 0000000000..e354d5ff4f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/__init__.py
@@ -0,0 +1,5 @@
+# flake8: noqa (not ideal, but nicer than adding noqa: F401 to every line!)
+from .serializer import serialize
+from .parser import parse
+from .backends.static import compile as compile_static
+from .backends.conditional import compile as compile_condition
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/__init__.py
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/base.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/base.py
new file mode 100644
index 0000000000..c1ec206b75
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/base.py
@@ -0,0 +1,221 @@
+# mypy: allow-untyped-defs
+
+import abc
+
+from ..node import NodeVisitor
+from ..parser import parse
+
+
+class Compiler(NodeVisitor):
+ __metaclass__ = abc.ABCMeta
+
+ def compile(self, tree, data_cls_getter=None, **kwargs):
+ self._kwargs = kwargs
+ return self._compile(tree, data_cls_getter, **kwargs)
+
+ def _compile(self, tree, data_cls_getter=None, **kwargs):
+ """Compile a raw AST into a form where conditional expressions
+ are represented by ConditionalValue objects that can be evaluated
+ at runtime.
+
+ tree - The root node of the wptmanifest AST to compile
+
+ data_cls_getter - A function taking two parameters; the previous
+ output node and the current ast node and returning
+ the class of the output node to use for the current
+ ast node
+ """
+ if data_cls_getter is None:
+ self.data_cls_getter = lambda x, y: ManifestItem
+ else:
+ self.data_cls_getter = data_cls_getter
+
+ self.tree = tree
+ self.output_node = self._initial_output_node(tree, **kwargs)
+ self.visit(tree)
+ if hasattr(self.output_node, "set_defaults"):
+ self.output_node.set_defaults()
+ assert self.output_node is not None
+ return self.output_node
+
+ def _initial_output_node(self, node, **kwargs):
+ return self.data_cls_getter(None, None)(node, **kwargs)
+
+ def visit_DataNode(self, node):
+ if node != self.tree:
+ output_parent = self.output_node
+ self.output_node = self.data_cls_getter(self.output_node, node)(node, **self._kwargs)
+ else:
+ output_parent = None
+
+ assert self.output_node is not None
+
+ for child in node.children:
+ self.visit(child)
+
+ if output_parent is not None:
+ # Append to the parent *after* processing all the node data
+ output_parent.append(self.output_node)
+ self.output_node = self.output_node.parent
+
+ assert self.output_node is not None
+
+ @abc.abstractmethod
+ def visit_KeyValueNode(self, node):
+ pass
+
+ def visit_ListNode(self, node):
+ return [self.visit(child) for child in node.children]
+
+ def visit_ValueNode(self, node):
+ return node.data
+
+ def visit_AtomNode(self, node):
+ return node.data
+
+ @abc.abstractmethod
+ def visit_ConditionalNode(self, node):
+ pass
+
+ def visit_StringNode(self, node):
+ indexes = [self.visit(child) for child in node.children]
+
+ def value(x):
+ rv = node.data
+ for index in indexes:
+ rv = rv[index(x)]
+ return rv
+ return value
+
+ def visit_NumberNode(self, node):
+ if "." in node.data:
+ return float(node.data)
+ else:
+ return int(node.data)
+
+ def visit_VariableNode(self, node):
+ indexes = [self.visit(child) for child in node.children]
+
+ def value(x):
+ data = x[node.data]
+ for index in indexes:
+ data = data[index(x)]
+ return data
+ return value
+
+ def visit_IndexNode(self, node):
+ assert len(node.children) == 1
+ return self.visit(node.children[0])
+
+ @abc.abstractmethod
+ def visit_UnaryExpressionNode(self, node):
+ pass
+
+ @abc.abstractmethod
+ def visit_BinaryExpressionNode(self, node):
+ pass
+
+ @abc.abstractmethod
+ def visit_UnaryOperatorNode(self, node):
+ pass
+
+ @abc.abstractmethod
+ def visit_BinaryOperatorNode(self, node):
+ pass
+
+
+class ManifestItem:
+ def __init__(self, node, **kwargs):
+ self.parent = None
+ self.node = node
+ self.children = []
+ self._data = {}
+
+ def __repr__(self):
+ return f"<{self.__class__} {self.node.data}>"
+
+ def __str__(self):
+ rv = [repr(self)]
+ for item in self.children:
+ rv.extend(" %s" % line for line in str(item).split("\n"))
+ return "\n".join(rv)
+
+ def set_defaults(self):
+ pass
+
+ @property
+ def is_empty(self):
+ if self._data:
+ return False
+ return all(child.is_empty for child in self.children)
+
+ @property
+ def root(self):
+ node = self
+ while node.parent is not None:
+ node = node.parent
+ return node
+
+ @property
+ def name(self):
+ return self.node.data
+
+ def get(self, key):
+ for node in [self, self.root]:
+ if key in node._data:
+ return node._data[key]
+ raise KeyError
+
+ def set(self, name, value):
+ self._data[name] = value
+
+ def remove(self):
+ if self.parent:
+ self.parent.children.remove(self)
+ self.parent = None
+
+ def iterchildren(self, name=None):
+ for item in self.children:
+ if item.name == name or name is None:
+ yield item
+
+ def has_key(self, key):
+ for node in [self, self.root]:
+ if key in node._data:
+ return True
+ return False
+
+ def _flatten(self):
+ rv = {}
+ for node in [self, self.root]:
+ for name, value in node._data.items():
+ if name not in rv:
+ rv[name] = value
+ return rv
+
+ def iteritems(self):
+ yield from self._flatten().items()
+
+ def iterkeys(self):
+ yield from self._flatten().keys()
+
+ def itervalues(self):
+ yield from self._flatten().values()
+
+ def append(self, child):
+ child.parent = self
+ self.children.append(child)
+ return child
+
+
+def compile_ast(compiler, ast, data_cls_getter=None, **kwargs):
+ return compiler().compile(ast,
+ data_cls_getter=data_cls_getter,
+ **kwargs)
+
+
+def compile(compiler, stream, data_cls_getter=None, **kwargs):
+ return compile_ast(compiler,
+ parse(stream),
+ data_cls_getter=data_cls_getter,
+ **kwargs)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/conditional.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/conditional.py
new file mode 100644
index 0000000000..7d4f257f1a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/conditional.py
@@ -0,0 +1,402 @@
+# mypy: allow-untyped-defs
+
+import operator
+
+from ..node import NodeVisitor, DataNode, ConditionalNode, KeyValueNode, ListNode, ValueNode, BinaryExpressionNode, VariableNode
+from ..parser import parse
+
+
+class ConditionalValue:
+ def __init__(self, node, condition_func):
+ self.node = node
+ assert callable(condition_func)
+ self.condition_func = condition_func
+ if isinstance(node, ConditionalNode):
+ assert len(node.children) == 2
+ self.condition_node = self.node.children[0]
+ assert isinstance(node.children[1], (ValueNode, ListNode))
+ self.value_node = self.node.children[1]
+ else:
+ assert isinstance(node, (ValueNode, ListNode))
+ self.condition_node = None
+ self.value_node = self.node
+
+ @property
+ def value(self):
+ if isinstance(self.value_node, ValueNode):
+ return self.value_node.data
+ else:
+ return [item.data for item in self.value_node.children]
+
+ @value.setter
+ def value(self, value):
+ if isinstance(self.value_node, ValueNode):
+ self.value_node.data = value
+ else:
+ assert(isinstance(self.value_node, ListNode))
+ while self.value_node.children:
+ self.value_node.children[0].remove()
+ assert len(self.value_node.children) == 0
+ for list_value in value:
+ self.value_node.append(ValueNode(list_value))
+
+ def __call__(self, run_info):
+ return self.condition_func(run_info)
+
+ def value_as(self, type_func):
+ """Get value and convert to a given type.
+
+ This is unfortunate, but we don't currently have a good way to specify that
+ specific properties should have their data returned as specific types"""
+ value = self.value
+ if type_func is not None:
+ value = type_func(value)
+ return value
+
+ def remove(self):
+ if len(self.node.parent.children) == 1:
+ self.node.parent.remove()
+ self.node.remove()
+
+ @property
+ def variables(self):
+ rv = set()
+ if self.condition_node is None:
+ return rv
+ stack = [self.condition_node]
+ while stack:
+ node = stack.pop()
+ if isinstance(node, VariableNode):
+ rv.add(node.data)
+ for child in reversed(node.children):
+ stack.append(child)
+ return rv
+
+
+class Compiler(NodeVisitor):
+ def compile(self, tree, data_cls_getter=None, **kwargs):
+ """Compile a raw AST into a form where conditional expressions
+ are represented by ConditionalValue objects that can be evaluated
+ at runtime.
+
+ tree - The root node of the wptmanifest AST to compile
+
+ data_cls_getter - A function taking two parameters; the previous
+ output node and the current ast node and returning
+ the class of the output node to use for the current
+ ast node
+ """
+ if data_cls_getter is None:
+ self.data_cls_getter = lambda x, y: ManifestItem
+ else:
+ self.data_cls_getter = data_cls_getter
+
+ self.tree = tree
+ self.output_node = self._initial_output_node(tree, **kwargs)
+ self.visit(tree)
+ if hasattr(self.output_node, "set_defaults"):
+ self.output_node.set_defaults()
+ assert self.output_node is not None
+ return self.output_node
+
+ def compile_condition(self, condition):
+ """Compile a ConditionalNode into a ConditionalValue.
+
+ condition: A ConditionalNode"""
+ data_node = DataNode()
+ key_value_node = KeyValueNode()
+ key_value_node.append(condition.copy())
+ data_node.append(key_value_node)
+ manifest_item = self.compile(data_node)
+ return manifest_item._data[None][0]
+
+ def _initial_output_node(self, node, **kwargs):
+ return self.data_cls_getter(None, None)(node, **kwargs)
+
+ def visit_DataNode(self, node):
+ if node != self.tree:
+ output_parent = self.output_node
+ self.output_node = self.data_cls_getter(self.output_node, node)(node)
+ else:
+ output_parent = None
+
+ assert self.output_node is not None
+
+ for child in node.children:
+ self.visit(child)
+
+ if output_parent is not None:
+ # Append to the parent *after* processing all the node data
+ output_parent.append(self.output_node)
+ self.output_node = self.output_node.parent
+
+ assert self.output_node is not None
+
+ def visit_KeyValueNode(self, node):
+ key_values = []
+ for child in node.children:
+ condition, value = self.visit(child)
+ key_values.append(ConditionalValue(child, condition))
+
+ self.output_node._add_key_value(node, key_values)
+
+ def visit_ListNode(self, node):
+ return (lambda x:True, [self.visit(child) for child in node.children])
+
+ def visit_ValueNode(self, node):
+ return (lambda x: True, node.data)
+
+ def visit_AtomNode(self, node):
+ return (lambda x: True, node.data)
+
+ def visit_ConditionalNode(self, node):
+ return self.visit(node.children[0]), self.visit(node.children[1])
+
+ def visit_StringNode(self, node):
+ indexes = [self.visit(child) for child in node.children]
+
+ def value(x):
+ rv = node.data
+ for index in indexes:
+ rv = rv[index(x)]
+ return rv
+ return value
+
+ def visit_NumberNode(self, node):
+ if "." in node.data:
+ return lambda x: float(node.data)
+ else:
+ return lambda x: int(node.data)
+
+ def visit_VariableNode(self, node):
+ indexes = [self.visit(child) for child in node.children]
+
+ def value(x):
+ data = x[node.data]
+ for index in indexes:
+ data = data[index(x)]
+ return data
+ return value
+
+ def visit_IndexNode(self, node):
+ assert len(node.children) == 1
+ return self.visit(node.children[0])
+
+ def visit_UnaryExpressionNode(self, node):
+ assert len(node.children) == 2
+ operator = self.visit(node.children[0])
+ operand = self.visit(node.children[1])
+
+ return lambda x: operator(operand(x))
+
+ def visit_BinaryExpressionNode(self, node):
+ assert len(node.children) == 3
+ operator = self.visit(node.children[0])
+ operand_0 = self.visit(node.children[1])
+ operand_1 = self.visit(node.children[2])
+
+ assert operand_0 is not None
+ assert operand_1 is not None
+
+ return lambda x: operator(operand_0(x), operand_1(x))
+
+ def visit_UnaryOperatorNode(self, node):
+ return {"not": operator.not_}[node.data]
+
+ def visit_BinaryOperatorNode(self, node):
+ assert isinstance(node.parent, BinaryExpressionNode)
+ return {"and": operator.and_,
+ "or": operator.or_,
+ "==": operator.eq,
+ "!=": operator.ne}[node.data]
+
+
+class ManifestItem:
+ def __init__(self, node=None, **kwargs):
+ self.node = node
+ self.parent = None
+ self.children = []
+ self._data = {}
+
+ def __repr__(self):
+ return "<conditional.ManifestItem %s>" % (self.node.data)
+
+ def __str__(self):
+ rv = [repr(self)]
+ for item in self.children:
+ rv.extend(" %s" % line for line in str(item).split("\n"))
+ return "\n".join(rv)
+
+ def __contains__(self, key):
+ return key in self._data
+
+ def __iter__(self):
+ yield self
+ for child in self.children:
+ yield from child
+
+ @property
+ def is_empty(self):
+ if self._data:
+ return False
+ return all(child.is_empty for child in self.children)
+
+ @property
+ def root(self):
+ node = self
+ while node.parent is not None:
+ node = node.parent
+ return node
+
+ @property
+ def name(self):
+ return self.node.data
+
+ def has_key(self, key):
+ for node in [self, self.root]:
+ if key in node._data:
+ return True
+ return False
+
+ def get(self, key, run_info=None):
+ if run_info is None:
+ run_info = {}
+
+ for node in [self, self.root]:
+ if key in node._data:
+ for cond_value in node._data[key]:
+ try:
+ matches = cond_value(run_info)
+ except KeyError:
+ matches = False
+ if matches:
+ return cond_value.value
+ raise KeyError
+
+ def set(self, key, value, condition=None):
+ # First try to update the existing value
+ if key in self._data:
+ cond_values = self._data[key]
+ for cond_value in cond_values:
+ if cond_value.condition_node == condition:
+ cond_value.value = value
+ return
+ # If there isn't a conditional match reuse the existing KeyValueNode as the
+ # parent
+ node = None
+ for child in self.node.children:
+ if child.data == key:
+ node = child
+ break
+ assert node is not None
+
+ else:
+ node = KeyValueNode(key)
+ self.node.append(node)
+
+ if isinstance(value, list):
+ value_node = ListNode()
+ for item in value:
+ value_node.append(ValueNode(str(item)))
+ else:
+ value_node = ValueNode(str(value))
+ if condition is not None:
+ if not isinstance(condition, ConditionalNode):
+ conditional_node = ConditionalNode()
+ conditional_node.append(condition)
+ conditional_node.append(value_node)
+ else:
+ conditional_node = condition
+ node.append(conditional_node)
+ cond_value = Compiler().compile_condition(conditional_node)
+ else:
+ node.append(value_node)
+ cond_value = ConditionalValue(value_node, lambda x: True)
+
+ # Update the cache of child values. This is pretty annoying and maybe
+ # it should just work directly on the tree
+ if key not in self._data:
+ self._data[key] = []
+ if self._data[key] and self._data[key][-1].condition_node is None:
+ self._data[key].insert(len(self._data[key]) - 1, cond_value)
+ else:
+ self._data[key].append(cond_value)
+
+ def clear(self, key):
+ """Clear all the expected data for this node"""
+ if key in self._data:
+ for child in self.node.children:
+ if (isinstance(child, KeyValueNode) and
+ child.data == key):
+ child.remove()
+ del self._data[key]
+ break
+
+ def get_conditions(self, property_name):
+ if property_name in self._data:
+ return self._data[property_name]
+ return []
+
+ def _add_key_value(self, node, values):
+ """Called during construction to set a key-value node"""
+ self._data[node.data] = values
+
+ def append(self, child):
+ self.children.append(child)
+ child.parent = self
+ if child.node.parent != self.node:
+ self.node.append(child.node)
+ return child
+
+ def remove(self):
+ if self.parent:
+ self.parent._remove_child(self)
+
+ def _remove_child(self, child):
+ self.children.remove(child)
+ child.parent = None
+ child.node.remove()
+
+ def iterchildren(self, name=None):
+ for item in self.children:
+ if item.name == name or name is None:
+ yield item
+
+ def _flatten(self):
+ rv = {}
+ for node in [self, self.root]:
+ for name, value in node._data.items():
+ if name not in rv:
+ rv[name] = value
+ return rv
+
+ def iteritems(self):
+ yield from self._flatten().items()
+
+ def iterkeys(self):
+ yield from self._flatten().keys()
+
+ def iter_properties(self):
+ for item in self._data:
+ yield item, self._data[item]
+
+ def remove_value(self, key, value):
+ if key not in self._data:
+ return
+ try:
+ self._data[key].remove(value)
+ except ValueError:
+ return
+ if not self._data[key]:
+ del self._data[key]
+ value.remove()
+
+
+def compile_ast(ast, data_cls_getter=None, **kwargs):
+ return Compiler().compile(ast, data_cls_getter=data_cls_getter, **kwargs)
+
+
+def compile(stream, data_cls_getter=None, **kwargs):
+ return compile_ast(parse(stream),
+ data_cls_getter=data_cls_getter,
+ **kwargs)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/static.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/static.py
new file mode 100644
index 0000000000..5bec942e0b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/backends/static.py
@@ -0,0 +1,102 @@
+# mypy: allow-untyped-defs
+
+import operator
+
+from . import base
+from ..parser import parse
+
+
+class Compiler(base.Compiler):
+ """Compiler backend that evaluates conditional expressions
+ to give static output"""
+
+ def compile(self, tree, expr_data, data_cls_getter=None, **kwargs):
+ """Compile a raw AST into a form with conditional expressions
+ evaluated.
+
+ tree - The root node of the wptmanifest AST to compile
+
+ expr_data - A dictionary of key / value pairs to use when
+ evaluating conditional expressions
+
+ data_cls_getter - A function taking two parameters; the previous
+ output node and the current ast node and returning
+ the class of the output node to use for the current
+ ast node
+ """
+
+ self._kwargs = kwargs
+ self.expr_data = expr_data
+
+ return self._compile(tree, data_cls_getter, **kwargs)
+
+ def visit_KeyValueNode(self, node):
+ key_name = node.data
+ key_value = None
+ for child in node.children:
+ value = self.visit(child)
+ if value is not None:
+ key_value = value
+ break
+ if key_value is not None:
+ self.output_node.set(key_name, key_value)
+
+ def visit_ConditionalNode(self, node):
+ assert len(node.children) == 2
+ if self.visit(node.children[0]):
+ return self.visit(node.children[1])
+
+ def visit_StringNode(self, node):
+ value = node.data
+ for child in node.children:
+ value = self.visit(child)(value)
+ return value
+
+ def visit_VariableNode(self, node):
+ value = self.expr_data[node.data]
+ for child in node.children:
+ value = self.visit(child)(value)
+ return value
+
+ def visit_IndexNode(self, node):
+ assert len(node.children) == 1
+ index = self.visit(node.children[0])
+ return lambda x: x[index]
+
+ def visit_UnaryExpressionNode(self, node):
+ assert len(node.children) == 2
+ operator = self.visit(node.children[0])
+ operand = self.visit(node.children[1])
+
+ return operator(operand)
+
+ def visit_BinaryExpressionNode(self, node):
+ assert len(node.children) == 3
+ operator = self.visit(node.children[0])
+ operand_0 = self.visit(node.children[1])
+ operand_1 = self.visit(node.children[2])
+
+ return operator(operand_0, operand_1)
+
+ def visit_UnaryOperatorNode(self, node):
+ return {"not": operator.not_}[node.data]
+
+ def visit_BinaryOperatorNode(self, node):
+ return {"and": operator.and_,
+ "or": operator.or_,
+ "==": operator.eq,
+ "!=": operator.ne}[node.data]
+
+
+def compile_ast(ast, expr_data, data_cls_getter=None, **kwargs):
+ return Compiler().compile(ast,
+ expr_data,
+ data_cls_getter=data_cls_getter,
+ **kwargs)
+
+
+def compile(stream, expr_data, data_cls_getter=None, **kwargs):
+ return compile_ast(parse(stream),
+ expr_data,
+ data_cls_getter=data_cls_getter,
+ **kwargs)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/node.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/node.py
new file mode 100644
index 0000000000..437de54f5b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/node.py
@@ -0,0 +1,173 @@
+# mypy: allow-untyped-defs
+
+class NodeVisitor:
+ def visit(self, node):
+ # This is ugly as hell, but we don't have multimethods and
+ # they aren't trivial to fake without access to the class
+ # object from the class body
+ func = getattr(self, "visit_%s" % (node.__class__.__name__))
+ return func(node)
+
+
+class Node:
+ def __init__(self, data=None, comments=None):
+ self.data = data
+ self.parent = None
+ self.children = []
+ self.comments = comments or []
+
+ def append(self, other):
+ other.parent = self
+ self.children.append(other)
+
+ def remove(self):
+ self.parent.children.remove(self)
+
+ def __repr__(self):
+ return f"<{self.__class__.__name__} {self.data}>"
+
+ def __str__(self):
+ rv = [repr(self)]
+ for item in self.children:
+ rv.extend(" %s" % line for line in str(item).split("\n"))
+ return "\n".join(rv)
+
+ def __eq__(self, other):
+ if not (self.__class__ == other.__class__ and
+ self.data == other.data and
+ len(self.children) == len(other.children)):
+ return False
+ for child, other_child in zip(self.children, other.children):
+ if not child == other_child:
+ return False
+ return True
+
+ def copy(self):
+ new = self.__class__(self.data, self.comments)
+ for item in self.children:
+ new.append(item.copy())
+ return new
+
+
+class DataNode(Node):
+ def append(self, other):
+ # Append that retains the invariant that child data nodes
+ # come after child nodes of other types
+ other.parent = self
+ if isinstance(other, DataNode):
+ self.children.append(other)
+ else:
+ index = len(self.children)
+ while index > 0 and isinstance(self.children[index - 1], DataNode):
+ index -= 1
+ for i in range(index):
+ if other.data == self.children[i].data:
+ raise ValueError("Duplicate key %s" % self.children[i].data)
+ self.children.insert(index, other)
+
+
+class KeyValueNode(Node):
+ def append(self, other):
+ # Append that retains the invariant that conditional nodes
+ # come before unconditional nodes
+ other.parent = self
+ if not isinstance(other, (ListNode, ValueNode, ConditionalNode)):
+ raise TypeError
+ if isinstance(other, (ListNode, ValueNode)):
+ if self.children:
+ assert not isinstance(self.children[-1], (ListNode, ValueNode))
+ self.children.append(other)
+ else:
+ if self.children and isinstance(self.children[-1], ValueNode):
+ self.children.insert(len(self.children) - 1, other)
+ else:
+ self.children.append(other)
+
+
+class ListNode(Node):
+ def append(self, other):
+ other.parent = self
+ self.children.append(other)
+
+
+class ValueNode(Node):
+ def append(self, other):
+ raise TypeError
+
+
+class AtomNode(ValueNode):
+ pass
+
+
+class ConditionalNode(Node):
+ def append(self, other):
+ if not len(self.children):
+ if not isinstance(other, (BinaryExpressionNode, UnaryExpressionNode, VariableNode)):
+ raise TypeError
+ else:
+ if len(self.children) > 1:
+ raise ValueError
+ if not isinstance(other, (ListNode, ValueNode)):
+ raise TypeError
+ other.parent = self
+ self.children.append(other)
+
+
+class UnaryExpressionNode(Node):
+ def __init__(self, operator, operand):
+ Node.__init__(self)
+ self.append(operator)
+ self.append(operand)
+
+ def append(self, other):
+ Node.append(self, other)
+ assert len(self.children) <= 2
+
+ def copy(self):
+ new = self.__class__(self.children[0].copy(),
+ self.children[1].copy())
+ return new
+
+
+class BinaryExpressionNode(Node):
+ def __init__(self, operator, operand_0, operand_1):
+ Node.__init__(self)
+ self.append(operator)
+ self.append(operand_0)
+ self.append(operand_1)
+
+ def append(self, other):
+ Node.append(self, other)
+ assert len(self.children) <= 3
+
+ def copy(self):
+ new = self.__class__(self.children[0].copy(),
+ self.children[1].copy(),
+ self.children[2].copy())
+ return new
+
+
+class UnaryOperatorNode(Node):
+ def append(self, other):
+ raise TypeError
+
+
+class BinaryOperatorNode(Node):
+ def append(self, other):
+ raise TypeError
+
+
+class IndexNode(Node):
+ pass
+
+
+class VariableNode(Node):
+ pass
+
+
+class StringNode(Node):
+ pass
+
+
+class NumberNode(ValueNode):
+ pass
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/parser.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/parser.py
new file mode 100644
index 0000000000..c778895ed2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/parser.py
@@ -0,0 +1,873 @@
+# mypy: allow-untyped-defs
+
+#default_value:foo
+#include: other.manifest
+#
+#[test_name.js]
+# expected: ERROR
+#
+# [subtest 1]
+# expected:
+# os == win: FAIL #This is a comment
+# PASS
+#
+
+
+from io import BytesIO
+
+from .node import (Node, AtomNode, BinaryExpressionNode, BinaryOperatorNode,
+ ConditionalNode, DataNode, IndexNode, KeyValueNode, ListNode,
+ NumberNode, StringNode, UnaryExpressionNode,
+ UnaryOperatorNode, ValueNode, VariableNode)
+
+
+class ParseError(Exception):
+ def __init__(self, filename, line, detail):
+ self.line = line
+ self.filename = filename
+ self.detail = detail
+ self.message = f"{self.detail}: {self.filename} line {self.line}"
+ Exception.__init__(self, self.message)
+
+eol = object
+group_start = object
+group_end = object
+digits = "0123456789"
+open_parens = "[("
+close_parens = "])"
+parens = open_parens + close_parens
+operator_chars = "=!"
+
+unary_operators = ["not"]
+binary_operators = ["==", "!=", "and", "or"]
+
+operators = ["==", "!=", "not", "and", "or"]
+
+atoms = {"True": True,
+ "False": False,
+ "Reset": object()}
+
+def decode(s):
+ assert isinstance(s, str)
+ return s
+
+
+def precedence(operator_node):
+ return len(operators) - operators.index(operator_node.data)
+
+
+class TokenTypes:
+ def __init__(self) -> None:
+ for type in [
+ "group_start",
+ "group_end",
+ "paren",
+ "list_start",
+ "list_end",
+ "separator",
+ "ident",
+ "string",
+ "number",
+ "atom",
+ # Without an end-of-line token type, we need two different comment
+ # token types to distinguish between:
+ # [heading1] # Comment attached to heading 1
+ # [heading2]
+ #
+ # and
+ # [heading1]
+ # # Comment attached to heading 2
+ # [heading2]
+ "comment",
+ "inline_comment",
+ "eof",
+ ]:
+ setattr(self, type, type)
+
+token_types = TokenTypes()
+
+
+class Tokenizer:
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self.indent_levels = [0]
+ self.state = self.line_start_state
+ self.next_state = self.data_line_state
+ self.line_number = 0
+ self.filename = ""
+
+ def tokenize(self, stream):
+ self.reset()
+ assert not isinstance(stream, str)
+ if isinstance(stream, bytes):
+ stream = BytesIO(stream)
+ if not hasattr(stream, "name"):
+ self.filename = ""
+ else:
+ self.filename = stream.name
+
+ self.next_line_state = self.line_start_state
+ for i, line in enumerate(stream):
+ assert isinstance(line, bytes)
+ self.state = self.next_line_state
+ assert self.state is not None
+ states = []
+ self.next_line_state = None
+ self.line_number = i + 1
+ self.index = 0
+ self.line = line.decode('utf-8').rstrip()
+ assert isinstance(self.line, str)
+ while self.state != self.eol_state:
+ states.append(self.state)
+ tokens = self.state()
+ if tokens:
+ yield from tokens
+ self.state()
+ while True:
+ yield (token_types.eof, None)
+
+ def char(self):
+ if self.index == len(self.line):
+ return eol
+ return self.line[self.index]
+
+ def consume(self):
+ if self.index < len(self.line):
+ self.index += 1
+
+ def peek(self, length):
+ return self.line[self.index:self.index + length]
+
+ def skip_whitespace(self):
+ while self.char() == " ":
+ self.consume()
+
+ def eol_state(self):
+ if self.next_line_state is None:
+ self.next_line_state = self.line_start_state
+
+ def line_start_state(self):
+ self.skip_whitespace()
+ if self.char() == eol:
+ self.state = self.eol_state
+ return
+ if self.char() == "#":
+ self.state = self.comment_state
+ return
+ if self.index > self.indent_levels[-1]:
+ self.indent_levels.append(self.index)
+ yield (token_types.group_start, None)
+ else:
+ if self.index < self.indent_levels[-1]:
+ while self.index < self.indent_levels[-1]:
+ self.indent_levels.pop()
+ yield (token_types.group_end, None)
+ # This is terrible; if we were parsing an expression
+ # then the next_state will be expr_or_value but when we deindent
+ # it must always be a heading or key next so we go back to data_line_state
+ self.next_state = self.data_line_state
+ if self.index != self.indent_levels[-1]:
+ raise ParseError(self.filename, self.line_number, "Unexpected indent")
+
+ self.state = self.next_state
+
+ def data_line_state(self):
+ if self.char() == "[":
+ yield (token_types.paren, self.char())
+ self.consume()
+ self.state = self.heading_state
+ else:
+ self.state = self.key_state
+
+ def heading_state(self):
+ rv = ""
+ while True:
+ c = self.char()
+ if c == "\\":
+ rv += self.consume_escape()
+ elif c == "]":
+ break
+ elif c == eol:
+ raise ParseError(self.filename, self.line_number, "EOL in heading")
+ else:
+ rv += c
+ self.consume()
+
+ yield (token_types.string, decode(rv))
+ yield (token_types.paren, "]")
+ self.consume()
+ self.state = self.line_end_state
+ self.next_state = self.data_line_state
+
+ def key_state(self):
+ rv = ""
+ while True:
+ c = self.char()
+ if c == " ":
+ self.skip_whitespace()
+ if self.char() != ":":
+ raise ParseError(self.filename, self.line_number, "Space in key name")
+ break
+ elif c == ":":
+ break
+ elif c == eol:
+ raise ParseError(self.filename, self.line_number, "EOL in key name (missing ':'?)")
+ elif c == "\\":
+ rv += self.consume_escape()
+ else:
+ rv += c
+ self.consume()
+ yield (token_types.string, decode(rv))
+ yield (token_types.separator, ":")
+ self.consume()
+ self.state = self.after_key_state
+
+ def after_key_state(self):
+ self.skip_whitespace()
+ c = self.char()
+ if c in {"#", eol}:
+ self.next_state = self.expr_or_value_state
+ self.state = self.line_end_state
+ elif c == "[":
+ self.state = self.list_start_state
+ else:
+ self.state = self.value_state
+
+ def after_expr_state(self):
+ self.skip_whitespace()
+ c = self.char()
+ if c in {"#", eol}:
+ self.next_state = self.after_expr_state
+ self.state = self.line_end_state
+ elif c == "[":
+ self.state = self.list_start_state
+ else:
+ self.state = self.value_state
+
+ def list_start_state(self):
+ yield (token_types.list_start, "[")
+ self.consume()
+ self.state = self.list_value_start_state
+
+ def list_value_start_state(self):
+ self.skip_whitespace()
+ if self.char() == "]":
+ self.state = self.list_end_state
+ elif self.char() in ("'", '"'):
+ quote_char = self.char()
+ self.consume()
+ yield (token_types.string, self.consume_string(quote_char))
+ self.skip_whitespace()
+ if self.char() == "]":
+ self.state = self.list_end_state
+ elif self.char() != ",":
+ raise ParseError(self.filename, self.line_number, "Junk after quoted string")
+ self.consume()
+ elif self.char() in {"#", eol}:
+ self.state = self.line_end_state
+ self.next_line_state = self.list_value_start_state
+ elif self.char() == ",":
+ raise ParseError(self.filename, self.line_number, "List item started with separator")
+ elif self.char() == "@":
+ self.state = self.list_value_atom_state
+ else:
+ self.state = self.list_value_state
+
+ def list_value_state(self):
+ rv = ""
+ spaces = 0
+ while True:
+ c = self.char()
+ if c == "\\":
+ escape = self.consume_escape()
+ rv += escape
+ elif c == eol:
+ raise ParseError(self.filename, self.line_number, "EOL in list value")
+ elif c == "#":
+ raise ParseError(self.filename, self.line_number, "EOL in list value (comment)")
+ elif c == ",":
+ self.state = self.list_value_start_state
+ self.consume()
+ break
+ elif c == " ":
+ spaces += 1
+ self.consume()
+ elif c == "]":
+ self.state = self.list_end_state
+ self.consume()
+ break
+ else:
+ rv += " " * spaces
+ spaces = 0
+ rv += c
+ self.consume()
+
+ if rv:
+ yield (token_types.string, decode(rv))
+
+ def list_value_atom_state(self):
+ self.consume()
+ for _, value in self.list_value_state():
+ yield token_types.atom, value
+
+ def list_end_state(self):
+ self.consume()
+ yield (token_types.list_end, "]")
+ self.state = self.line_end_state
+
+ def value_state(self):
+ self.skip_whitespace()
+ c = self.char()
+ if c in ("'", '"'):
+ quote_char = self.char()
+ self.consume()
+ yield (token_types.string, self.consume_string(quote_char))
+ self.state = self.line_end_state
+ elif c == "@":
+ self.consume()
+ for _, value in self.value_inner_state():
+ yield token_types.atom, value
+ elif c == "[":
+ self.state = self.list_start_state
+ else:
+ self.state = self.value_inner_state
+
+ def value_inner_state(self):
+ rv = ""
+ spaces = 0
+ while True:
+ c = self.char()
+ if c == "\\":
+ rv += self.consume_escape()
+ elif c in {"#", eol}:
+ self.state = self.line_end_state
+ break
+ elif c == " ":
+ # prevent whitespace before comments from being included in the value
+ spaces += 1
+ self.consume()
+ else:
+ rv += " " * spaces
+ spaces = 0
+ rv += c
+ self.consume()
+ rv = decode(rv)
+ if rv.startswith("if "):
+ # Hack to avoid a problem where people write
+ # disabled: if foo
+ # and expect that to disable conditionally
+ raise ParseError(self.filename, self.line_number, "Strings starting 'if ' must be quoted "
+ "(expressions must start on a newline and be indented)")
+ yield (token_types.string, rv)
+
+ def _consume_comment(self):
+ assert self.char() == "#"
+ self.consume()
+ comment = ''
+ while self.char() is not eol:
+ comment += self.char()
+ self.consume()
+ return comment
+
+ def comment_state(self):
+ yield (token_types.comment, self._consume_comment())
+ self.state = self.eol_state
+
+ def inline_comment_state(self):
+ yield (token_types.inline_comment, self._consume_comment())
+ self.state = self.eol_state
+
+ def line_end_state(self):
+ self.skip_whitespace()
+ c = self.char()
+ if c == "#":
+ self.state = self.inline_comment_state
+ elif c == eol:
+ self.state = self.eol_state
+ else:
+ raise ParseError(self.filename, self.line_number, "Junk before EOL %s" % c)
+
+ def consume_string(self, quote_char):
+ rv = ""
+ while True:
+ c = self.char()
+ if c == "\\":
+ rv += self.consume_escape()
+ elif c == quote_char:
+ self.consume()
+ break
+ elif c == eol:
+ raise ParseError(self.filename, self.line_number, "EOL in quoted string")
+ else:
+ rv += c
+ self.consume()
+
+ return decode(rv)
+
+ def expr_or_value_state(self):
+ if self.peek(3) == "if ":
+ self.state = self.expr_state
+ else:
+ self.state = self.value_state
+
+ def expr_state(self):
+ self.skip_whitespace()
+ c = self.char()
+ if c == eol:
+ raise ParseError(self.filename, self.line_number, "EOL in expression")
+ elif c in "'\"":
+ self.consume()
+ yield (token_types.string, self.consume_string(c))
+ elif c == "#":
+ raise ParseError(self.filename, self.line_number, "Comment before end of expression")
+ elif c == ":":
+ yield (token_types.separator, c)
+ self.consume()
+ self.state = self.after_expr_state
+ elif c in parens:
+ self.consume()
+ yield (token_types.paren, c)
+ elif c in ("!", "="):
+ self.state = self.operator_state
+ elif c in digits:
+ self.state = self.digit_state
+ else:
+ self.state = self.ident_state
+
+ def operator_state(self):
+ # Only symbolic operators
+ index_0 = self.index
+ while True:
+ c = self.char()
+ if c == eol:
+ break
+ elif c in operator_chars:
+ self.consume()
+ else:
+ self.state = self.expr_state
+ break
+ yield (token_types.ident, self.line[index_0:self.index])
+
+ def digit_state(self):
+ index_0 = self.index
+ seen_dot = False
+ while True:
+ c = self.char()
+ if c == eol:
+ break
+ elif c in digits:
+ self.consume()
+ elif c == ".":
+ if seen_dot:
+ raise ParseError(self.filename, self.line_number, "Invalid number")
+ self.consume()
+ seen_dot = True
+ elif c in parens:
+ break
+ elif c in operator_chars:
+ break
+ elif c == " ":
+ break
+ elif c == ":":
+ break
+ else:
+ raise ParseError(self.filename, self.line_number, "Invalid character in number")
+
+ self.state = self.expr_state
+ yield (token_types.number, self.line[index_0:self.index])
+
+ def ident_state(self):
+ index_0 = self.index
+ while True:
+ c = self.char()
+ if c == eol:
+ break
+ elif c == ".":
+ break
+ elif c in parens:
+ break
+ elif c in operator_chars:
+ break
+ elif c == " ":
+ break
+ elif c == ":":
+ break
+ else:
+ self.consume()
+ self.state = self.expr_state
+ yield (token_types.ident, self.line[index_0:self.index])
+
+ def consume_escape(self):
+ assert self.char() == "\\"
+ self.consume()
+ c = self.char()
+ self.consume()
+ if c == "x":
+ return self.decode_escape(2)
+ elif c == "u":
+ return self.decode_escape(4)
+ elif c == "U":
+ return self.decode_escape(6)
+ elif c in ["a", "b", "f", "n", "r", "t", "v"]:
+ return eval(r"'\%s'" % c)
+ elif c is eol:
+ raise ParseError(self.filename, self.line_number, "EOL in escape")
+ else:
+ return c
+
+ def decode_escape(self, length):
+ value = 0
+ for i in range(length):
+ c = self.char()
+ value *= 16
+ value += self.escape_value(c)
+ self.consume()
+
+ return chr(value)
+
+ def escape_value(self, c):
+ if '0' <= c <= '9':
+ return ord(c) - ord('0')
+ elif 'a' <= c <= 'f':
+ return ord(c) - ord('a') + 10
+ elif 'A' <= c <= 'F':
+ return ord(c) - ord('A') + 10
+ else:
+ raise ParseError(self.filename, self.line_number, "Invalid character escape")
+
+
+class Parser:
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self.token = None
+ self.unary_operators = "!"
+ self.binary_operators = frozenset(["&&", "||", "=="])
+ self.tokenizer = Tokenizer()
+ self.token_generator = None
+ self.tree = Treebuilder(DataNode(None))
+ self.expr_builder = None
+ self.expr_builders = []
+ self.comments = []
+
+ def parse(self, input):
+ try:
+ self.reset()
+ self.token_generator = self.tokenizer.tokenize(input)
+ self.consume()
+ self.manifest()
+ return self.tree.node
+ except Exception as e:
+ if not isinstance(e, ParseError):
+ raise ParseError(self.tokenizer.filename,
+ self.tokenizer.line_number,
+ str(e))
+ raise
+
+ def consume(self):
+ self.token = next(self.token_generator)
+
+ def expect(self, type, value=None):
+ if self.token[0] != type:
+ raise ParseError(self.tokenizer.filename, self.tokenizer.line_number,
+ f"Token '{self.token[0]}' doesn't equal expected type '{type}'")
+ if value is not None:
+ if self.token[1] != value:
+ raise ParseError(self.tokenizer.filename, self.tokenizer.line_number,
+ f"Token '{self.token[1]}' doesn't equal expected value '{value}'")
+
+ self.consume()
+
+ def maybe_consume_inline_comment(self):
+ if self.token[0] == token_types.inline_comment:
+ self.comments.append(self.token)
+ self.consume()
+
+ def consume_comments(self):
+ while self.token[0] == token_types.comment:
+ self.comments.append(self.token)
+ self.consume()
+
+ def flush_comments(self, target_node=None):
+ """Transfer comments from the parser's buffer to a parse tree node.
+
+ Use the tree's current node if no target node is explicitly specified.
+
+ The comments are buffered because the target node they should belong to
+ may not exist yet. For example:
+
+ [heading]
+ # comment to be attached to the subheading
+ [subheading]
+ """
+ (target_node or self.tree.node).comments.extend(self.comments)
+ self.comments.clear()
+
+ def manifest(self):
+ self.data_block()
+ self.expect(token_types.eof)
+
+ def data_block(self):
+ while self.token[0] in {token_types.comment, token_types.string,
+ token_types.paren}:
+ if self.token[0] == token_types.comment:
+ self.consume_comments()
+ elif self.token[0] == token_types.string:
+ self.tree.append(KeyValueNode(self.token[1]))
+ self.consume()
+ self.expect(token_types.separator)
+ self.maybe_consume_inline_comment()
+ self.flush_comments()
+ self.consume_comments()
+ self.value_block()
+ self.flush_comments()
+ self.tree.pop()
+ else:
+ self.expect(token_types.paren, "[")
+ if self.token[0] != token_types.string:
+ raise ParseError(self.tokenizer.filename,
+ self.tokenizer.line_number,
+ f"Token '{self.token[0]}' is not a string")
+ self.tree.append(DataNode(self.token[1]))
+ self.consume()
+ self.expect(token_types.paren, "]")
+ self.maybe_consume_inline_comment()
+ self.flush_comments()
+ self.consume_comments()
+ if self.token[0] == token_types.group_start:
+ self.consume()
+ self.data_block()
+ self.eof_or_end_group()
+ self.tree.pop()
+
+ def eof_or_end_group(self):
+ if self.token[0] != token_types.eof:
+ self.expect(token_types.group_end)
+
+ def value_block(self):
+ if self.token[0] == token_types.list_start:
+ self.consume()
+ self.list_value()
+ elif self.token[0] == token_types.string:
+ self.value()
+ elif self.token[0] == token_types.group_start:
+ self.consume()
+ self.expression_values()
+ default_value = None
+ if self.token[0] == token_types.string:
+ default_value = self.value
+ elif self.token[0] == token_types.atom:
+ default_value = self.atom
+ elif self.token[0] == token_types.list_start:
+ self.consume()
+ default_value = self.list_value
+ if default_value:
+ default_value()
+ # For this special case where a group exists, attach comments to
+ # the string/list value, not the key-value node. That is,
+ # key:
+ # ...
+ # # comment attached to condition default
+ # value
+ #
+ # should not read
+ # # comment attached to condition default
+ # key:
+ # ...
+ # value
+ self.consume_comments()
+ self.flush_comments(
+ self.tree.node.children[-1] if default_value else None)
+ self.eof_or_end_group()
+ elif self.token[0] == token_types.atom:
+ self.atom()
+ else:
+ raise ParseError(self.tokenizer.filename, self.tokenizer.line_number,
+ f"Token '{self.token[0]}' is not a known type")
+
+ def list_value(self):
+ self.tree.append(ListNode())
+ self.maybe_consume_inline_comment()
+ while self.token[0] in (token_types.atom, token_types.string):
+ if self.token[0] == token_types.atom:
+ self.atom()
+ else:
+ self.value()
+ self.expect(token_types.list_end)
+ self.maybe_consume_inline_comment()
+ self.tree.pop()
+
+ def expression_values(self):
+ self.consume_comments()
+ while self.token == (token_types.ident, "if"):
+ self.consume()
+ self.tree.append(ConditionalNode())
+ self.expr_start()
+ self.expect(token_types.separator)
+ self.value_block()
+ self.flush_comments()
+ self.tree.pop()
+ self.consume_comments()
+
+ def value(self):
+ self.tree.append(ValueNode(self.token[1]))
+ self.consume()
+ self.maybe_consume_inline_comment()
+ self.tree.pop()
+
+ def atom(self):
+ if self.token[1] not in atoms:
+ raise ParseError(self.tokenizer.filename, self.tokenizer.line_number, "Unrecognised symbol @%s" % self.token[1])
+ self.tree.append(AtomNode(atoms[self.token[1]]))
+ self.consume()
+ self.maybe_consume_inline_comment()
+ self.tree.pop()
+
+ def expr_start(self):
+ self.expr_builder = ExpressionBuilder(self.tokenizer)
+ self.expr_builders.append(self.expr_builder)
+ self.expr()
+ expression = self.expr_builder.finish()
+ self.expr_builders.pop()
+ self.expr_builder = self.expr_builders[-1] if self.expr_builders else None
+ if self.expr_builder:
+ self.expr_builder.operands[-1].children[-1].append(expression)
+ else:
+ self.tree.append(expression)
+ self.tree.pop()
+
+ def expr(self):
+ self.expr_operand()
+ while (self.token[0] == token_types.ident and self.token[1] in binary_operators):
+ self.expr_bin_op()
+ self.expr_operand()
+
+ def expr_operand(self):
+ if self.token == (token_types.paren, "("):
+ self.consume()
+ self.expr_builder.left_paren()
+ self.expr()
+ self.expect(token_types.paren, ")")
+ self.expr_builder.right_paren()
+ elif self.token[0] == token_types.ident and self.token[1] in unary_operators:
+ self.expr_unary_op()
+ self.expr_operand()
+ elif self.token[0] in [token_types.string, token_types.ident]:
+ self.expr_value()
+ elif self.token[0] == token_types.number:
+ self.expr_number()
+ else:
+ raise ParseError(self.tokenizer.filename, self.tokenizer.line_number, "Unrecognised operand")
+
+ def expr_unary_op(self):
+ if self.token[1] in unary_operators:
+ self.expr_builder.push_operator(UnaryOperatorNode(self.token[1]))
+ self.consume()
+ else:
+ raise ParseError(self.tokenizer.filename, self.tokenizer.line_number, "Expected unary operator")
+
+ def expr_bin_op(self):
+ if self.token[1] in binary_operators:
+ self.expr_builder.push_operator(BinaryOperatorNode(self.token[1]))
+ self.consume()
+ else:
+ raise ParseError(self.tokenizer.filename, self.tokenizer.line_number, "Expected binary operator")
+
+ def expr_value(self):
+ node_type = {token_types.string: StringNode,
+ token_types.ident: VariableNode}[self.token[0]]
+ self.expr_builder.push_operand(node_type(self.token[1]))
+ self.consume()
+ if self.token == (token_types.paren, "["):
+ self.consume()
+ self.expr_builder.operands[-1].append(IndexNode())
+ self.expr_start()
+ self.expect(token_types.paren, "]")
+
+ def expr_number(self):
+ self.expr_builder.push_operand(NumberNode(self.token[1]))
+ self.consume()
+
+
+class Treebuilder:
+ def __init__(self, root):
+ self.root = root
+ self.node = root
+
+ def append(self, node):
+ assert isinstance(node, Node)
+ self.node.append(node)
+ self.node = node
+ assert self.node is not None
+ return node
+
+ def pop(self):
+ node = self.node
+ self.node = self.node.parent
+ assert self.node is not None
+ return node
+
+
+class ExpressionBuilder:
+ def __init__(self, tokenizer):
+ self.operands = []
+ self.operators = [None]
+ self.tokenizer = tokenizer
+
+ def finish(self):
+ while self.operators[-1] is not None:
+ self.pop_operator()
+ rv = self.pop_operand()
+ assert self.is_empty()
+ return rv
+
+ def left_paren(self):
+ self.operators.append(None)
+
+ def right_paren(self):
+ while self.operators[-1] is not None:
+ self.pop_operator()
+ if not self.operators:
+ raise ParseError(self.tokenizer.filename, self.tokenizer.line,
+ "Unbalanced parens")
+
+ assert self.operators.pop() is None
+
+ def push_operator(self, operator):
+ assert operator is not None
+ while self.precedence(self.operators[-1]) > self.precedence(operator):
+ self.pop_operator()
+
+ self.operators.append(operator)
+
+ def pop_operator(self):
+ operator = self.operators.pop()
+ if isinstance(operator, BinaryOperatorNode):
+ operand_1 = self.operands.pop()
+ operand_0 = self.operands.pop()
+ self.operands.append(BinaryExpressionNode(operator, operand_0, operand_1))
+ else:
+ operand_0 = self.operands.pop()
+ self.operands.append(UnaryExpressionNode(operator, operand_0))
+
+ def push_operand(self, node):
+ self.operands.append(node)
+
+ def pop_operand(self):
+ return self.operands.pop()
+
+ def is_empty(self):
+ return len(self.operands) == 0 and all(item is None for item in self.operators)
+
+ def precedence(self, operator):
+ if operator is None:
+ return 0
+ return precedence(operator)
+
+
+def parse(stream):
+ p = Parser()
+ return p.parse(stream)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/serializer.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/serializer.py
new file mode 100644
index 0000000000..e749add74e
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/serializer.py
@@ -0,0 +1,160 @@
+# mypy: allow-untyped-defs
+
+from six import ensure_text
+
+from .node import NodeVisitor, ValueNode, ListNode, BinaryExpressionNode
+from .parser import atoms, precedence, token_types
+
+atom_names = {v: "@%s" % k for (k,v) in atoms.items()}
+
+named_escapes = {"\a", "\b", "\f", "\n", "\r", "\t", "\v"}
+
+def escape(string, extras=""):
+ # Assumes input bytes are either UTF8 bytes or unicode.
+ rv = ""
+ for c in string:
+ if c in named_escapes:
+ rv += c.encode("unicode_escape").decode()
+ elif c == "\\":
+ rv += "\\\\"
+ elif c < '\x20':
+ rv += "\\x%02x" % ord(c)
+ elif c in extras:
+ rv += "\\" + c
+ else:
+ rv += c
+ return ensure_text(rv)
+
+
+class ManifestSerializer(NodeVisitor):
+ def __init__(self, skip_empty_data=False):
+ self.skip_empty_data = skip_empty_data
+
+ def serialize(self, root):
+ self.indent = 2
+ rv = "\n".join(self.visit(root))
+ if not rv:
+ return rv
+ rv = rv.strip()
+ if rv[-1] != "\n":
+ rv = rv + "\n"
+ return rv
+
+ def visit(self, node):
+ lines = super().visit(node)
+ comments = [f"#{comment}" for _, comment in node.comments]
+ # Simply checking if the first line contains '#' is less than ideal; the
+ # character might be escaped or within a string.
+ if lines and "#" not in lines[0]:
+ for i, (token_type, comment) in enumerate(node.comments):
+ if token_type == token_types.inline_comment:
+ lines[0] += f" #{comment}"
+ comments.pop(i)
+ break
+ return comments + lines
+
+ def visit_DataNode(self, node):
+ rv = []
+ if not self.skip_empty_data or node.children:
+ if node.data:
+ rv.append("[%s]" % escape(node.data, extras="]"))
+ indent = self.indent * " "
+ else:
+ indent = ""
+
+ for child in node.children:
+ rv.extend("%s%s" % (indent if item else "", item) for item in self.visit(child))
+
+ if node.parent:
+ rv.append("")
+
+ return rv
+
+ def visit_KeyValueNode(self, node):
+ rv = [escape(node.data, ":") + ":"]
+ indent = " " * self.indent
+
+ if len(node.children) == 1 and isinstance(node.children[0], (ValueNode, ListNode)):
+ rv[0] += " %s" % self.visit(node.children[0])[0]
+ else:
+ for child in node.children:
+ rv.extend(indent + line for line in self.visit(child))
+
+ return rv
+
+ def visit_ListNode(self, node):
+ rv = ["["]
+ rv.extend(", ".join(self.visit(child)[0] for child in node.children))
+ rv.append("]")
+ return ["".join(rv)]
+
+ def visit_ValueNode(self, node):
+ data = ensure_text(node.data)
+ if ("#" in data or
+ data.startswith("if ") or
+ (isinstance(node.parent, ListNode) and
+ ("," in data or "]" in data))):
+ if "\"" in data:
+ quote = "'"
+ else:
+ quote = "\""
+ else:
+ quote = ""
+ return [quote + escape(data, extras=quote) + quote]
+
+ def visit_AtomNode(self, node):
+ return [atom_names[node.data]]
+
+ def visit_ConditionalNode(self, node):
+ return ["if %s: %s" % tuple(self.visit(item)[0] for item in node.children)]
+
+ def visit_StringNode(self, node):
+ rv = ["\"%s\"" % escape(node.data, extras="\"")]
+ for child in node.children:
+ rv[0] += self.visit(child)[0]
+ return rv
+
+ def visit_NumberNode(self, node):
+ return [ensure_text(node.data)]
+
+ def visit_VariableNode(self, node):
+ rv = escape(node.data)
+ for child in node.children:
+ rv += self.visit(child)
+ return [rv]
+
+ def visit_IndexNode(self, node):
+ assert len(node.children) == 1
+ return ["[%s]" % self.visit(node.children[0])[0]]
+
+ def visit_UnaryExpressionNode(self, node):
+ children = []
+ for child in node.children:
+ child_str = self.visit(child)[0]
+ if isinstance(child, BinaryExpressionNode):
+ child_str = "(%s)" % child_str
+ children.append(child_str)
+ return [" ".join(children)]
+
+ def visit_BinaryExpressionNode(self, node):
+ assert len(node.children) == 3
+ children = []
+ for child_index in [1, 0, 2]:
+ child = node.children[child_index]
+ child_str = self.visit(child)[0]
+ if (isinstance(child, BinaryExpressionNode) and
+ precedence(node.children[0]) < precedence(child.children[0])):
+ child_str = "(%s)" % child_str
+ children.append(child_str)
+ return [" ".join(children)]
+
+ def visit_UnaryOperatorNode(self, node):
+ return [ensure_text(node.data)]
+
+ def visit_BinaryOperatorNode(self, node):
+ return [ensure_text(node.data)]
+
+
+def serialize(tree, *args, **kwargs):
+ s = ManifestSerializer(*args, **kwargs)
+ return s.serialize(tree)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/__init__.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/__init__.py
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_conditional.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_conditional.py
new file mode 100644
index 0000000000..0059b98556
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_conditional.py
@@ -0,0 +1,143 @@
+# mypy: allow-untyped-defs
+
+import unittest
+
+from ..backends import conditional
+from ..node import BinaryExpressionNode, BinaryOperatorNode, VariableNode, NumberNode
+
+
+class TestConditional(unittest.TestCase):
+ def compile(self, input_text):
+ return conditional.compile(input_text)
+
+ def test_get_0(self):
+ data = b"""
+key: value
+
+[Heading 1]
+ other_key:
+ if a == 1: value_1
+ if a == 2: value_2
+ value_3
+"""
+
+ manifest = self.compile(data)
+
+ self.assertEqual(manifest.get("key"), "value")
+ children = list(item for item in manifest.iterchildren())
+ self.assertEqual(len(children), 1)
+ section = children[0]
+ self.assertEqual(section.name, "Heading 1")
+
+ self.assertEqual(section.get("other_key", {"a": 1}), "value_1")
+ self.assertEqual(section.get("other_key", {"a": 2}), "value_2")
+ self.assertEqual(section.get("other_key", {"a": 7}), "value_3")
+ self.assertEqual(section.get("key"), "value")
+
+ def test_get_1(self):
+ data = b"""
+key: value
+
+[Heading 1]
+ other_key:
+ if a == "1": value_1
+ if a == 2: value_2
+ value_3
+"""
+
+ manifest = self.compile(data)
+
+ children = list(item for item in manifest.iterchildren())
+ section = children[0]
+
+ self.assertEqual(section.get("other_key", {"a": "1"}), "value_1")
+ self.assertEqual(section.get("other_key", {"a": 1}), "value_3")
+
+ def test_get_2(self):
+ data = b"""
+key:
+ if a[1] == "b": value_1
+ if a[1] == 2: value_2
+ value_3
+"""
+
+ manifest = self.compile(data)
+
+ self.assertEqual(manifest.get("key", {"a": "ab"}), "value_1")
+ self.assertEqual(manifest.get("key", {"a": [1, 2]}), "value_2")
+
+ def test_get_3(self):
+ data = b"""
+key:
+ if a[1] == "ab"[1]: value_1
+ if a[1] == 2: value_2
+ value_3
+"""
+
+ manifest = self.compile(data)
+
+ self.assertEqual(manifest.get("key", {"a": "ab"}), "value_1")
+ self.assertEqual(manifest.get("key", {"a": [1, 2]}), "value_2")
+
+ def test_set_0(self):
+ data = b"""
+key:
+ if a == "a": value_1
+ if a == "b": value_2
+ value_3
+"""
+ manifest = self.compile(data)
+
+ manifest.set("new_key", "value_new")
+
+ self.assertEqual(manifest.get("new_key"), "value_new")
+
+ def test_set_1(self):
+ data = b"""
+key:
+ if a == "a": value_1
+ if a == "b": value_2
+ value_3
+"""
+
+ manifest = self.compile(data)
+
+ manifest.set("key", "value_new")
+
+ self.assertEqual(manifest.get("key"), "value_new")
+ self.assertEqual(manifest.get("key", {"a": "a"}), "value_1")
+
+ def test_set_2(self):
+ data = b"""
+key:
+ if a == "a": value_1
+ if a == "b": value_2
+ value_3
+"""
+
+ manifest = self.compile(data)
+
+ expr = BinaryExpressionNode(BinaryOperatorNode("=="),
+ VariableNode("a"),
+ NumberNode("1"))
+
+ manifest.set("key", "value_new", expr)
+
+ self.assertEqual(manifest.get("key", {"a": 1}), "value_new")
+ self.assertEqual(manifest.get("key", {"a": "a"}), "value_1")
+
+ def test_api_0(self):
+ data = b"""
+key:
+ if a == 1.5: value_1
+ value_2
+key_1: other_value
+"""
+ manifest = self.compile(data)
+
+ self.assertFalse(manifest.is_empty)
+ self.assertEqual(manifest.root, manifest)
+ self.assertTrue(manifest.has_key("key_1"))
+ self.assertFalse(manifest.has_key("key_2"))
+
+ self.assertEqual(set(manifest.iterkeys()), {"key", "key_1"})
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_parser.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_parser.py
new file mode 100644
index 0000000000..a220307088
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_parser.py
@@ -0,0 +1,155 @@
+# mypy: allow-untyped-defs
+
+import unittest
+
+from .. import parser
+
+# There aren't many tests here because it turns out to be way more convenient to
+# use test_serializer for the majority of cases
+
+
+class TestExpression(unittest.TestCase):
+ def setUp(self):
+ self.parser = parser.Parser()
+
+ def parse(self, input_str):
+ return self.parser.parse(input_str)
+
+ def compare(self, input_text, expected):
+ actual = self.parse(input_text)
+ self.match(expected, actual)
+
+ def match(self, expected_node, actual_node):
+ self.assertEqual(expected_node[0], actual_node.__class__.__name__)
+ self.assertEqual(expected_node[1], actual_node.data)
+ self.assertEqual(len(expected_node[2]), len(actual_node.children))
+ for expected_child, actual_child in zip(expected_node[2], actual_node.children):
+ self.match(expected_child, actual_child)
+
+ def test_expr_0(self):
+ self.compare(
+ b"""
+key:
+ if x == 1 : value""",
+ ["DataNode", None,
+ [["KeyValueNode", "key",
+ [["ConditionalNode", None,
+ [["BinaryExpressionNode", None,
+ [["BinaryOperatorNode", "==", []],
+ ["VariableNode", "x", []],
+ ["NumberNode", "1", []]
+ ]],
+ ["ValueNode", "value", []],
+ ]]]]]]
+ )
+
+ def test_expr_1(self):
+ self.compare(
+ b"""
+key:
+ if not x and y : value""",
+ ["DataNode", None,
+ [["KeyValueNode", "key",
+ [["ConditionalNode", None,
+ [["BinaryExpressionNode", None,
+ [["BinaryOperatorNode", "and", []],
+ ["UnaryExpressionNode", None,
+ [["UnaryOperatorNode", "not", []],
+ ["VariableNode", "x", []]
+ ]],
+ ["VariableNode", "y", []]
+ ]],
+ ["ValueNode", "value", []],
+ ]]]]]]
+ )
+
+ def test_expr_2(self):
+ self.compare(
+ b"""
+key:
+ if x == 1 : [value1, value2]""",
+ ["DataNode", None,
+ [["KeyValueNode", "key",
+ [["ConditionalNode", None,
+ [["BinaryExpressionNode", None,
+ [["BinaryOperatorNode", "==", []],
+ ["VariableNode", "x", []],
+ ["NumberNode", "1", []]
+ ]],
+ ["ListNode", None,
+ [["ValueNode", "value1", []],
+ ["ValueNode", "value2", []]]],
+ ]]]]]]
+ )
+
+ def test_expr_3(self):
+ self.compare(
+ b"""
+key:
+ if x == 1: 'if b: value'""",
+ ["DataNode", None,
+ [["KeyValueNode", "key",
+ [["ConditionalNode", None,
+ [["BinaryExpressionNode", None,
+ [["BinaryOperatorNode", "==", []],
+ ["VariableNode", "x", []],
+ ["NumberNode", "1", []]
+ ]],
+ ["ValueNode", "if b: value", []],
+ ]]]]]]
+ )
+
+ def test_atom_0(self):
+ with self.assertRaises(parser.ParseError):
+ self.parse(b"key: @Unknown")
+
+ def test_atom_1(self):
+ with self.assertRaises(parser.ParseError):
+ self.parse(b"key: @true")
+
+ def test_list_expr(self):
+ self.compare(
+ b"""
+key:
+ if x == 1: [a]
+ [b]""",
+ ["DataNode", None,
+ [["KeyValueNode", "key",
+ [["ConditionalNode", None,
+ [["BinaryExpressionNode", None,
+ [["BinaryOperatorNode", "==", []],
+ ["VariableNode", "x", []],
+ ["NumberNode", "1", []]
+ ]],
+ ["ListNode", None,
+ [["ValueNode", "a", []]]],
+ ]],
+ ["ListNode", None,
+ [["ValueNode", "b", []]]]]]]])
+
+ def test_list_heading(self):
+ self.compare(
+ b"""
+key:
+ if x == 1: [a]
+[b]""",
+ ["DataNode", None,
+ [["KeyValueNode", "key",
+ [["ConditionalNode", None,
+ [["BinaryExpressionNode", None,
+ [["BinaryOperatorNode", "==", []],
+ ["VariableNode", "x", []],
+ ["NumberNode", "1", []]
+ ]],
+ ["ListNode", None,
+ [["ValueNode", "a", []]]],
+ ]]]],
+ ["DataNode", "b", []]]])
+
+ def test_if_1(self):
+ with self.assertRaises(parser.ParseError):
+ self.parse(b"key: if foo")
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_serializer.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_serializer.py
new file mode 100644
index 0000000000..d73668ac64
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_serializer.py
@@ -0,0 +1,356 @@
+# mypy: allow-untyped-defs
+
+import textwrap
+import unittest
+
+from .. import parser, serializer
+
+
+class SerializerTest(unittest.TestCase):
+ def setUp(self):
+ self.serializer = serializer.ManifestSerializer()
+ self.parser = parser.Parser()
+
+ def serialize(self, input_str):
+ return self.serializer.serialize(self.parser.parse(input_str))
+
+ def compare(self, input_str, expected=None):
+ if expected is None:
+ expected = input_str.decode("utf-8")
+ actual = self.serialize(input_str)
+ self.assertEqual(actual, expected)
+
+ def test_0(self):
+ self.compare(b"""key: value
+[Heading 1]
+ other_key: other_value
+""")
+
+ def test_1(self):
+ self.compare(b"""key: value
+[Heading 1]
+ other_key:
+ if a or b: other_value
+""")
+
+ def test_2(self):
+ self.compare(b"""key: value
+[Heading 1]
+ other_key:
+ if a or b: other_value
+ fallback_value
+""")
+
+ def test_3(self):
+ self.compare(b"""key: value
+[Heading 1]
+ other_key:
+ if a == 1: other_value
+ fallback_value
+""")
+
+ def test_4(self):
+ self.compare(b"""key: value
+[Heading 1]
+ other_key:
+ if a == "1": other_value
+ fallback_value
+""")
+
+ def test_5(self):
+ self.compare(b"""key: value
+[Heading 1]
+ other_key:
+ if a == "abc"[1]: other_value
+ fallback_value
+""")
+
+ def test_6(self):
+ self.compare(b"""key: value
+[Heading 1]
+ other_key:
+ if a == "abc"[c]: other_value
+ fallback_value
+""")
+
+ def test_7(self):
+ self.compare(b"""key: value
+[Heading 1]
+ other_key:
+ if (a or b) and c: other_value
+ fallback_value
+""",
+"""key: value
+[Heading 1]
+ other_key:
+ if a or b and c: other_value
+ fallback_value
+""")
+
+ def test_8(self):
+ self.compare(b"""key: value
+[Heading 1]
+ other_key:
+ if a or (b and c): other_value
+ fallback_value
+""")
+
+ def test_9(self):
+ self.compare(b"""key: value
+[Heading 1]
+ other_key:
+ if not (a and b): other_value
+ fallback_value
+""")
+
+ def test_10(self):
+ self.compare(b"""key: value
+[Heading 1]
+ some_key: some_value
+
+[Heading 2]
+ other_key: other_value
+""")
+
+ def test_11(self):
+ self.compare(b"""key:
+ if not a and b and c and d: true
+""")
+
+ def test_12(self):
+ self.compare(b"""[Heading 1]
+ key: [a:1, b:2]
+""")
+
+ def test_13(self):
+ self.compare(b"""key: [a:1, "b:#"]
+""")
+
+ def test_14(self):
+ self.compare(b"""key: [","]
+""")
+
+ def test_15(self):
+ self.compare(b"""key: ,
+""")
+
+ def test_16(self):
+ self.compare(b"""key: ["]", b]
+""")
+
+ def test_17(self):
+ self.compare(b"""key: ]
+""")
+
+ def test_18(self):
+ self.compare(br"""key: \]
+ """, """key: ]
+""")
+
+ def test_atom_as_default(self):
+ self.compare(
+ textwrap.dedent(
+ """\
+ key:
+ if a == 1: @True
+ @False
+ """).encode())
+
+ def test_escape_0(self):
+ self.compare(br"""k\t\:y: \a\b\f\n\r\t\v""",
+ r"""k\t\:y: \x07\x08\x0c\n\r\t\x0b
+""")
+
+ def test_escape_1(self):
+ self.compare(br"""k\x00: \x12A\x45""",
+ r"""k\x00: \x12AE
+""")
+
+ def test_escape_2(self):
+ self.compare(br"""k\u0045y: \u1234A\uABc6""",
+ """kEy: \u1234A\uabc6
+""")
+
+ def test_escape_3(self):
+ self.compare(br"""k\u0045y: \u1234A\uABc6""",
+ """kEy: \u1234A\uabc6
+""")
+
+ def test_escape_4(self):
+ self.compare(br"""key: '\u1234A\uABc6'""",
+ """key: \u1234A\uabc6
+""")
+
+ def test_escape_5(self):
+ self.compare(br"""key: [\u1234A\uABc6]""",
+ """key: [\u1234A\uabc6]
+""")
+
+ def test_escape_6(self):
+ self.compare(br"""key: [\u1234A\uABc6\,]""",
+ """key: ["\u1234A\uabc6,"]
+""")
+
+ def test_escape_7(self):
+ self.compare(br"""key: [\,\]\#]""",
+ r"""key: [",]#"]
+""")
+
+ def test_escape_8(self):
+ self.compare(br"""key: \#""",
+ r"""key: "#"
+""")
+
+ def test_escape_9(self):
+ self.compare(br"""key: \U10FFFFabc""",
+ """key: \U0010FFFFabc
+""")
+
+ def test_escape_10(self):
+ self.compare(br"""key: \u10FFab""",
+ """key: \u10FFab
+""")
+
+ def test_escape_11(self):
+ self.compare(br"""key: \\ab
+""")
+
+ def test_atom_1(self):
+ self.compare(br"""key: @True
+""")
+
+ def test_atom_2(self):
+ self.compare(br"""key: @False
+""")
+
+ def test_atom_3(self):
+ self.compare(br"""key: @Reset
+""")
+
+ def test_atom_4(self):
+ self.compare(br"""key: [a, @Reset, b]
+""")
+
+ def test_conditional_1(self):
+ self.compare(b"""foo:
+ if a or b: [1, 2]
+""")
+
+ def test_if_string_0(self):
+ self.compare(b"""foo: "if bar"
+""")
+
+ def test_non_ascii_1(self):
+ self.compare(b"""[\xf0\x9f\x99\x84]
+""")
+
+ def test_comments_preceding_kv_pair(self):
+ self.compare(
+ textwrap.dedent(
+ """\
+ # These two comments should be attached
+ # to the first key-value pair.
+ key1: value
+ # Attached to the second pair.
+ key2: value
+ """).encode())
+
+ def test_comments_preceding_headings(self):
+ self.compare(
+ textwrap.dedent(
+ """\
+ # Attached to the first heading.
+ [test1.html]
+
+ # Attached to the second heading.
+ [test2.html]
+ # Attached to subheading.
+ # Also attached to subheading.
+ [subheading] # Also attached to subheading (inline).
+ """).encode(),
+ textwrap.dedent(
+ """\
+ # Attached to the first heading.
+ [test1.html]
+
+ # Attached to the second heading.
+ [test2.html]
+ # Attached to subheading.
+ # Also attached to subheading.
+ [subheading] # Also attached to subheading (inline).
+ """))
+
+ def test_comments_inline(self):
+ self.compare(
+ textwrap.dedent(
+ """\
+ key1: # inline after key
+ value # inline after string value
+ key2:
+ [value] # inline after list in group
+ [test.html] # inline after heading
+ key1: @True # inline after atom
+ key2: [ # inline after list start
+ @False, # inline after atom in list
+ value1, # inline after value in list
+ value2] # inline after list end
+ """).encode(),
+ textwrap.dedent(
+ """\
+ # inline after key
+ key1: value # inline after string value
+ key2: [value] # inline after list in group
+ [test.html] # inline after heading
+ key1: @True # inline after atom
+ # inline after atom in list
+ # inline after value in list
+ # inline after list end
+ key2: [@False, value1, value2] # inline after list start
+ """))
+
+ def test_comments_conditions(self):
+ self.compare(
+ textwrap.dedent(
+ """\
+ key1:
+ # cond 1
+ if cond == 1: value
+ # cond 2
+ if cond == 2: value # cond 2
+ # cond 3
+ # cond 3
+ if cond == 3: value
+ # default 0
+ default # default 1
+ # default 2
+ # default 3
+ key2:
+ if cond == 1: value
+ [value]
+ # list default
+ key3:
+ if cond == 1: value
+ # no default
+ """).encode(),
+ textwrap.dedent(
+ """\
+ key1:
+ # cond 1
+ if cond == 1: value
+ # cond 2
+ if cond == 2: value # cond 2
+ # cond 3
+ # cond 3
+ if cond == 3: value
+ # default 0
+ # default 2
+ # default 3
+ default # default 1
+ key2:
+ if cond == 1: value
+ # list default
+ [value]
+ # no default
+ key3:
+ if cond == 1: value
+ """))
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_static.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_static.py
new file mode 100644
index 0000000000..0ded07f42d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_static.py
@@ -0,0 +1,98 @@
+# mypy: allow-untyped-defs
+
+import unittest
+
+from ..backends import static
+
+# There aren't many tests here because it turns out to be way more convenient to
+# use test_serializer for the majority of cases
+
+
+class TestStatic(unittest.TestCase):
+ def compile(self, input_text, input_data):
+ return static.compile(input_text, input_data)
+
+ def test_get_0(self):
+ data = b"""
+key: value
+
+[Heading 1]
+ other_key:
+ if a == 1: value_1
+ if a == 2: value_2
+ value_3
+"""
+
+ manifest = self.compile(data, {"a": 2})
+
+ self.assertEqual(manifest.get("key"), "value")
+ children = list(item for item in manifest.iterchildren())
+ self.assertEqual(len(children), 1)
+ section = children[0]
+ self.assertEqual(section.name, "Heading 1")
+
+ self.assertEqual(section.get("other_key"), "value_2")
+ self.assertEqual(section.get("key"), "value")
+
+ def test_get_1(self):
+ data = b"""
+key: value
+
+[Heading 1]
+ other_key:
+ if a == 1: value_1
+ if a == 2: value_2
+ value_3
+"""
+ manifest = self.compile(data, {"a": 3})
+
+ children = list(item for item in manifest.iterchildren())
+ section = children[0]
+ self.assertEqual(section.get("other_key"), "value_3")
+
+ def test_get_3(self):
+ data = b"""key:
+ if a == "1": value_1
+ if a[0] == "ab"[0]: value_2
+"""
+ manifest = self.compile(data, {"a": "1"})
+ self.assertEqual(manifest.get("key"), "value_1")
+
+ manifest = self.compile(data, {"a": "ac"})
+ self.assertEqual(manifest.get("key"), "value_2")
+
+ def test_get_4(self):
+ data = b"""key:
+ if not a: value_1
+ value_2
+"""
+ manifest = self.compile(data, {"a": True})
+ self.assertEqual(manifest.get("key"), "value_2")
+
+ manifest = self.compile(data, {"a": False})
+ self.assertEqual(manifest.get("key"), "value_1")
+
+ def test_api(self):
+ data = b"""key:
+ if a == 1.5: value_1
+ value_2
+key_1: other_value
+"""
+ manifest = self.compile(data, {"a": 1.5})
+
+ self.assertFalse(manifest.is_empty)
+ self.assertEqual(manifest.root, manifest)
+ self.assertTrue(manifest.has_key("key_1"))
+ self.assertFalse(manifest.has_key("key_2"))
+
+ self.assertEqual(set(manifest.iterkeys()), {"key", "key_1"})
+ self.assertEqual(set(manifest.itervalues()), {"value_1", "other_value"})
+
+ def test_is_empty_1(self):
+ data = b"""
+[Section]
+ [Subsection]
+"""
+ manifest = self.compile(data, {})
+
+ self.assertTrue(manifest.is_empty)
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_tokenizer.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_tokenizer.py
new file mode 100644
index 0000000000..6b9d052560
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_tokenizer.py
@@ -0,0 +1,385 @@
+# mypy: allow-untyped-defs
+
+import textwrap
+import unittest
+
+from .. import parser
+from ..parser import token_types
+
+class TokenizerTest(unittest.TestCase):
+ def setUp(self):
+ self.tokenizer = parser.Tokenizer()
+
+ def tokenize(self, input_str):
+ rv = []
+ for item in self.tokenizer.tokenize(input_str):
+ rv.append(item)
+ if item[0] == token_types.eof:
+ break
+ return rv
+
+ def compare(self, input_text, expected):
+ expected = expected + [(token_types.eof, None)]
+ actual = self.tokenize(input_text)
+ self.assertEqual(actual, expected)
+
+ def test_heading_0(self):
+ self.compare(b"""[Heading text]""",
+ [(token_types.paren, "["),
+ (token_types.string, "Heading text"),
+ (token_types.paren, "]")])
+
+ def test_heading_1(self):
+ self.compare(br"""[Heading [text\]]""",
+ [(token_types.paren, "["),
+ (token_types.string, "Heading [text]"),
+ (token_types.paren, "]")])
+
+ def test_heading_2(self):
+ self.compare(b"""[Heading #text]""",
+ [(token_types.paren, "["),
+ (token_types.string, "Heading #text"),
+ (token_types.paren, "]")])
+
+ def test_heading_3(self):
+ self.compare(br"""[Heading [\]text]""",
+ [(token_types.paren, "["),
+ (token_types.string, "Heading []text"),
+ (token_types.paren, "]")])
+
+ def test_heading_4(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"[Heading")
+
+ def test_heading_5(self):
+ self.compare(br"""[Heading [\]text] #comment""",
+ [(token_types.paren, "["),
+ (token_types.string, "Heading []text"),
+ (token_types.paren, "]"),
+ (token_types.inline_comment, "comment")])
+
+ def test_heading_6(self):
+ self.compare(br"""[Heading \ttext]""",
+ [(token_types.paren, "["),
+ (token_types.string, "Heading \ttext"),
+ (token_types.paren, "]")])
+
+ def test_key_0(self):
+ self.compare(b"""key:value""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.string, "value")])
+
+ def test_key_1(self):
+ self.compare(b"""key : value""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.string, "value")])
+
+ def test_key_2(self):
+ self.compare(b"""key : val ue""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.string, "val ue")])
+
+ def test_key_3(self):
+ self.compare(b"""key: value#comment""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.string, "value"),
+ (token_types.inline_comment, "comment")])
+
+ def test_key_4(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"""ke y: value""")
+
+ def test_key_5(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"""key""")
+
+ def test_key_6(self):
+ self.compare(b"""key: "value\"""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.string, "value")])
+
+ def test_key_7(self):
+ self.compare(b"""key: 'value'""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.string, "value")])
+
+ def test_key_8(self):
+ self.compare(b"""key: "#value\"""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.string, "#value")])
+
+ def test_key_9(self):
+ self.compare(b"""key: '#value\'""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.string, "#value")])
+
+ def test_key_10(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"""key: "value""")
+
+ def test_key_11(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"""key: 'value""")
+
+ def test_key_12(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"""key: 'value""")
+
+ def test_key_13(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"""key: 'value' abc""")
+
+ def test_key_14(self):
+ self.compare(br"""key: \\nb""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.string, r"\nb")])
+
+ def test_list_0(self):
+ self.compare(b"""
+key: []""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.list_start, "["),
+ (token_types.list_end, "]")])
+
+ def test_list_1(self):
+ self.compare(b"""
+key: [a, "b"]""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.list_start, "["),
+ (token_types.string, "a"),
+ (token_types.string, "b"),
+ (token_types.list_end, "]")])
+
+ def test_list_2(self):
+ self.compare(b"""
+key: [a,
+ b]""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.list_start, "["),
+ (token_types.string, "a"),
+ (token_types.string, "b"),
+ (token_types.list_end, "]")])
+
+ def test_list_3(self):
+ self.compare(b"""
+key: [a, #b]
+ c]""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.list_start, "["),
+ (token_types.string, "a"),
+ (token_types.inline_comment, "b]"),
+ (token_types.string, "c"),
+ (token_types.list_end, "]")])
+
+ def test_list_4(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"""key: [a #b]
+ c]""")
+
+ def test_list_5(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"""key: [a \\
+ c]""")
+
+ def test_list_6(self):
+ self.compare(b"""key: [a , b]""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.list_start, "["),
+ (token_types.string, "a"),
+ (token_types.string, "b"),
+ (token_types.list_end, "]")])
+
+ def test_expr_0(self):
+ self.compare(b"""
+key:
+ if cond == 1: value""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.group_start, None),
+ (token_types.ident, "if"),
+ (token_types.ident, "cond"),
+ (token_types.ident, "=="),
+ (token_types.number, "1"),
+ (token_types.separator, ":"),
+ (token_types.string, "value")])
+
+ def test_expr_1(self):
+ self.compare(b"""
+key:
+ if cond == 1: value1
+ value2""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.group_start, None),
+ (token_types.ident, "if"),
+ (token_types.ident, "cond"),
+ (token_types.ident, "=="),
+ (token_types.number, "1"),
+ (token_types.separator, ":"),
+ (token_types.string, "value1"),
+ (token_types.string, "value2")])
+
+ def test_expr_2(self):
+ self.compare(b"""
+key:
+ if cond=="1": value""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.group_start, None),
+ (token_types.ident, "if"),
+ (token_types.ident, "cond"),
+ (token_types.ident, "=="),
+ (token_types.string, "1"),
+ (token_types.separator, ":"),
+ (token_types.string, "value")])
+
+ def test_expr_3(self):
+ self.compare(b"""
+key:
+ if cond==1.1: value""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.group_start, None),
+ (token_types.ident, "if"),
+ (token_types.ident, "cond"),
+ (token_types.ident, "=="),
+ (token_types.number, "1.1"),
+ (token_types.separator, ":"),
+ (token_types.string, "value")])
+
+ def test_expr_4(self):
+ self.compare(b"""
+key:
+ if cond==1.1 and cond2 == "a": value""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.group_start, None),
+ (token_types.ident, "if"),
+ (token_types.ident, "cond"),
+ (token_types.ident, "=="),
+ (token_types.number, "1.1"),
+ (token_types.ident, "and"),
+ (token_types.ident, "cond2"),
+ (token_types.ident, "=="),
+ (token_types.string, "a"),
+ (token_types.separator, ":"),
+ (token_types.string, "value")])
+
+ def test_expr_5(self):
+ self.compare(b"""
+key:
+ if (cond==1.1 ): value""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.group_start, None),
+ (token_types.ident, "if"),
+ (token_types.paren, "("),
+ (token_types.ident, "cond"),
+ (token_types.ident, "=="),
+ (token_types.number, "1.1"),
+ (token_types.paren, ")"),
+ (token_types.separator, ":"),
+ (token_types.string, "value")])
+
+ def test_expr_6(self):
+ self.compare(b"""
+key:
+ if "\\ttest": value""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.group_start, None),
+ (token_types.ident, "if"),
+ (token_types.string, "\ttest"),
+ (token_types.separator, ":"),
+ (token_types.string, "value")])
+
+ def test_expr_7(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"""
+key:
+ if 1A: value""")
+
+ def test_expr_8(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"""
+key:
+ if 1a: value""")
+
+ def test_expr_9(self):
+ with self.assertRaises(parser.ParseError):
+ self.tokenize(b"""
+key:
+ if 1.1.1: value""")
+
+ def test_expr_10(self):
+ self.compare(b"""
+key:
+ if 1.: value""",
+ [(token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.group_start, None),
+ (token_types.ident, "if"),
+ (token_types.number, "1."),
+ (token_types.separator, ":"),
+ (token_types.string, "value")])
+
+ def test_comment_with_indents(self):
+ self.compare(
+ textwrap.dedent(
+ """\
+ # comment 0
+ [Heading]
+ # comment 1
+ # comment 2
+ """).encode(),
+ [(token_types.comment, " comment 0"),
+ (token_types.paren, "["),
+ (token_types.string, "Heading"),
+ (token_types.paren, "]"),
+ (token_types.comment, " comment 1"),
+ (token_types.comment, " comment 2")])
+
+ def test_comment_inline(self):
+ self.compare(
+ textwrap.dedent(
+ """\
+ [Heading] # after heading
+ key: # after key
+ # before group start
+ if cond: value1 # after value1
+ value2 # after value2
+ """).encode(),
+ [(token_types.paren, "["),
+ (token_types.string, "Heading"),
+ (token_types.paren, "]"),
+ (token_types.inline_comment, " after heading"),
+ (token_types.string, "key"),
+ (token_types.separator, ":"),
+ (token_types.inline_comment, " after key"),
+ (token_types.comment, " before group start"),
+ (token_types.group_start, None),
+ (token_types.ident, "if"),
+ (token_types.ident, "cond"),
+ (token_types.separator, ":"),
+ (token_types.string, "value1"),
+ (token_types.inline_comment, " after value1"),
+ (token_types.string, "value2"),
+ (token_types.inline_comment, " after value2")])
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptrunner.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptrunner.py
new file mode 100644
index 0000000000..da3b63ba5b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptrunner.py
@@ -0,0 +1,536 @@
+# mypy: allow-untyped-defs
+
+import json
+import os
+import signal
+import sys
+from collections import defaultdict
+from datetime import datetime, timedelta
+
+import wptserve
+from wptserve import sslutils
+
+from . import environment as env
+from . import instruments
+from . import mpcontext
+from . import products
+from . import testloader
+from . import wptcommandline
+from . import wptlogging
+from . import wpttest
+from mozlog import capture, handlers
+from .font import FontInstaller
+from .testrunner import ManagerGroup, TestImplementation
+
+here = os.path.dirname(__file__)
+
+logger = None
+
+"""Runner for web-platform-tests
+
+The runner has several design goals:
+
+* Tests should run with no modification from upstream.
+
+* Tests should be regarded as "untrusted" so that errors, timeouts and even
+ crashes in the tests can be handled without failing the entire test run.
+
+* For performance tests can be run in multiple browsers in parallel.
+
+The upstream repository has the facility for creating a test manifest in JSON
+format. This manifest is used directly to determine which tests exist. Local
+metadata files are used to store the expected test results.
+"""
+
+def setup_logging(*args, **kwargs):
+ global logger
+ logger = wptlogging.setup(*args, **kwargs)
+ return logger
+
+
+def get_loader(test_paths, product, debug=None, run_info_extras=None, chunker_kwargs=None,
+ test_groups=None, **kwargs):
+ if run_info_extras is None:
+ run_info_extras = {}
+
+ run_info = wpttest.get_run_info(kwargs["run_info"], product,
+ browser_version=kwargs.get("browser_version"),
+ browser_channel=kwargs.get("browser_channel"),
+ verify=kwargs.get("verify"),
+ debug=debug,
+ extras=run_info_extras,
+ device_serials=kwargs.get("device_serial"),
+ adb_binary=kwargs.get("adb_binary"))
+
+ test_manifests = testloader.ManifestLoader(test_paths, force_manifest_update=kwargs["manifest_update"],
+ manifest_download=kwargs["manifest_download"]).load()
+
+ manifest_filters = []
+
+ include = kwargs["include"]
+ if kwargs["include_file"]:
+ include = include or []
+ include.extend(testloader.read_include_from_file(kwargs["include_file"]))
+ if test_groups:
+ include = testloader.update_include_for_groups(test_groups, include)
+
+ if include or kwargs["exclude"] or kwargs["include_manifest"] or kwargs["default_exclude"]:
+ manifest_filters.append(testloader.TestFilter(include=include,
+ exclude=kwargs["exclude"],
+ manifest_path=kwargs["include_manifest"],
+ test_manifests=test_manifests,
+ explicit=kwargs["default_exclude"]))
+
+ ssl_enabled = sslutils.get_cls(kwargs["ssl_type"]).ssl_enabled
+ h2_enabled = wptserve.utils.http2_compatible()
+ test_loader = testloader.TestLoader(test_manifests,
+ kwargs["test_types"],
+ run_info,
+ manifest_filters=manifest_filters,
+ chunk_type=kwargs["chunk_type"],
+ total_chunks=kwargs["total_chunks"],
+ chunk_number=kwargs["this_chunk"],
+ include_https=ssl_enabled,
+ include_h2=h2_enabled,
+ include_webtransport_h3=kwargs["enable_webtransport_h3"],
+ skip_timeout=kwargs["skip_timeout"],
+ skip_implementation_status=kwargs["skip_implementation_status"],
+ chunker_kwargs=chunker_kwargs)
+ return run_info, test_loader
+
+
+def list_test_groups(test_paths, product, **kwargs):
+ env.do_delayed_imports(logger, test_paths)
+
+ run_info_extras = products.Product(kwargs["config"], product).run_info_extras(**kwargs)
+
+ run_info, test_loader = get_loader(test_paths, product,
+ run_info_extras=run_info_extras, **kwargs)
+
+ for item in sorted(test_loader.groups(kwargs["test_types"])):
+ print(item)
+
+
+def list_disabled(test_paths, product, **kwargs):
+ env.do_delayed_imports(logger, test_paths)
+
+ rv = []
+
+ run_info_extras = products.Product(kwargs["config"], product).run_info_extras(**kwargs)
+
+ run_info, test_loader = get_loader(test_paths, product,
+ run_info_extras=run_info_extras, **kwargs)
+
+ for test_type, tests in test_loader.disabled_tests.items():
+ for test in tests:
+ rv.append({"test": test.id, "reason": test.disabled()})
+ print(json.dumps(rv, indent=2))
+
+
+def list_tests(test_paths, product, **kwargs):
+ env.do_delayed_imports(logger, test_paths)
+
+ run_info_extras = products.Product(kwargs["config"], product).run_info_extras(**kwargs)
+
+ run_info, test_loader = get_loader(test_paths, product,
+ run_info_extras=run_info_extras, **kwargs)
+
+ for test in test_loader.test_ids:
+ print(test)
+
+
+def get_pause_after_test(test_loader, **kwargs):
+ if kwargs["pause_after_test"] is None:
+ if kwargs["repeat_until_unexpected"]:
+ return False
+ if kwargs["headless"]:
+ return False
+ if kwargs["debug_test"]:
+ return True
+ tests = test_loader.tests
+ is_single_testharness = (sum(len(item) for item in tests.values()) == 1 and
+ len(tests.get("testharness", [])) == 1)
+ if kwargs["repeat"] == 1 and kwargs["rerun"] == 1 and is_single_testharness:
+ return True
+ return False
+ return kwargs["pause_after_test"]
+
+
+def run_test_iteration(test_status, test_loader, test_source_kwargs, test_source_cls, run_info,
+ recording, test_environment, product, run_test_kwargs):
+ """Runs the entire test suite.
+ This is called for each repeat run requested."""
+ tests_by_type = defaultdict(list)
+ for test_type in test_loader.test_types:
+ tests_by_type[test_type].extend(test_loader.tests[test_type])
+
+ try:
+ test_groups = test_source_cls.tests_by_group(
+ tests_by_type, **test_source_kwargs)
+ except Exception:
+ logger.critical("Loading tests failed")
+ return False
+
+ logger.suite_start(tests_by_type,
+ name='web-platform-test',
+ run_info=run_info,
+ extra={"run_by_dir": run_test_kwargs["run_by_dir"]})
+
+ test_implementation_by_type = {}
+
+ for test_type in run_test_kwargs["test_types"]:
+ executor_cls = product.executor_classes.get(test_type)
+ if executor_cls is None:
+ logger.warning(f"Unsupported test type {test_type} for product {product.name}")
+ continue
+ executor_kwargs = product.get_executor_kwargs(logger,
+ test_type,
+ test_environment,
+ run_info,
+ **run_test_kwargs)
+ browser_cls = product.get_browser_cls(test_type)
+ browser_kwargs = product.get_browser_kwargs(logger,
+ test_type,
+ run_info,
+ config=test_environment.config,
+ num_test_groups=len(test_groups),
+ **run_test_kwargs)
+ test_implementation_by_type[test_type] = TestImplementation(executor_cls,
+ executor_kwargs,
+ browser_cls,
+ browser_kwargs)
+
+ tests_to_run = {}
+ for test_type, test_implementation in test_implementation_by_type.items():
+ executor_cls = test_implementation.executor_cls
+
+ for test in test_loader.disabled_tests[test_type]:
+ logger.test_start(test.id)
+ logger.test_end(test.id, status="SKIP")
+ test_status.skipped += 1
+
+ if test_type == "testharness":
+ tests_to_run[test_type] = []
+ for test in test_loader.tests[test_type]:
+ if ((test.testdriver and not executor_cls.supports_testdriver) or
+ (test.jsshell and not executor_cls.supports_jsshell)):
+ logger.test_start(test.id)
+ logger.test_end(test.id, status="SKIP")
+ test_status.skipped += 1
+ else:
+ tests_to_run[test_type].append(test)
+ else:
+ tests_to_run[test_type] = test_loader.tests[test_type]
+
+ unexpected_tests = set()
+ unexpected_pass_tests = set()
+ recording.pause()
+ retry_counts = run_test_kwargs["retry_unexpected"]
+ for i in range(retry_counts + 1):
+ if i > 0:
+ if not run_test_kwargs["fail_on_unexpected_pass"]:
+ unexpected_fail_tests = unexpected_tests - unexpected_pass_tests
+ else:
+ unexpected_fail_tests = unexpected_tests
+ if len(unexpected_fail_tests) == 0:
+ break
+ for test_type, tests in tests_to_run.items():
+ tests_to_run[test_type] = [test for test in tests
+ if test.id in unexpected_fail_tests]
+
+ logger.suite_end()
+ logger.suite_start(tests_to_run,
+ name='web-platform-test',
+ run_info=run_info,
+ extra={"run_by_dir": run_test_kwargs["run_by_dir"]})
+
+ with ManagerGroup("web-platform-tests",
+ run_test_kwargs["processes"],
+ test_source_cls,
+ test_source_kwargs,
+ test_implementation_by_type,
+ run_test_kwargs["rerun"],
+ run_test_kwargs["pause_after_test"],
+ run_test_kwargs["pause_on_unexpected"],
+ run_test_kwargs["restart_on_unexpected"],
+ run_test_kwargs["debug_info"],
+ not run_test_kwargs["no_capture_stdio"],
+ run_test_kwargs["restart_on_new_group"],
+ recording=recording) as manager_group:
+ try:
+ handle_interrupt_signals()
+ manager_group.run(tests_to_run)
+ except KeyboardInterrupt:
+ logger.critical("Main thread got signal")
+ manager_group.stop()
+ raise
+
+ test_status.total_tests += manager_group.test_count()
+ unexpected_tests = manager_group.unexpected_tests()
+ unexpected_pass_tests = manager_group.unexpected_pass_tests()
+
+ test_status.unexpected += len(unexpected_tests)
+ test_status.unexpected_pass += len(unexpected_pass_tests)
+
+ logger.suite_end()
+
+ return True
+
+def handle_interrupt_signals():
+ def termination_handler(_signum, _unused_frame):
+ raise KeyboardInterrupt()
+ if sys.platform == "win32":
+ signal.signal(signal.SIGBREAK, termination_handler)
+ else:
+ signal.signal(signal.SIGTERM, termination_handler)
+
+
+def evaluate_runs(test_status, run_test_kwargs):
+ """Evaluates the test counts after the given number of repeat runs has finished"""
+ if test_status.total_tests == 0:
+ if test_status.skipped > 0:
+ logger.warning("All requested tests were skipped")
+ else:
+ if run_test_kwargs["default_exclude"]:
+ logger.info("No tests ran")
+ return True
+ else:
+ logger.critical("No tests ran")
+ return False
+
+ if test_status.unexpected and not run_test_kwargs["fail_on_unexpected"]:
+ logger.info(f"Tolerating {test_status.unexpected} unexpected results")
+ return True
+
+ all_unexpected_passed = (test_status.unexpected and
+ test_status.unexpected == test_status.unexpected_pass)
+ if all_unexpected_passed and not run_test_kwargs["fail_on_unexpected_pass"]:
+ logger.info(f"Tolerating {test_status.unexpected_pass} unexpected results "
+ "because they all PASS")
+ return True
+
+ return test_status.unexpected == 0
+
+
+class TestStatus:
+ """Class that stores information on the results of test runs for later reference"""
+ def __init__(self):
+ self.total_tests = 0
+ self.skipped = 0
+ self.unexpected = 0
+ self.unexpected_pass = 0
+ self.repeated_runs = 0
+ self.expected_repeated_runs = 0
+ self.all_skipped = False
+
+
+def run_tests(config, test_paths, product, **kwargs):
+ """Set up the test environment, load the list of tests to be executed, and
+ invoke the remainder of the code to execute tests"""
+ mp = mpcontext.get_context()
+ if kwargs["instrument_to_file"] is None:
+ recorder = instruments.NullInstrument()
+ else:
+ recorder = instruments.Instrument(kwargs["instrument_to_file"])
+ with recorder as recording, capture.CaptureIO(logger,
+ not kwargs["no_capture_stdio"],
+ mp_context=mp):
+ recording.set(["startup"])
+ env.do_delayed_imports(logger, test_paths)
+
+ product = products.Product(config, product)
+
+ env_extras = product.get_env_extras(**kwargs)
+
+ product.check_args(**kwargs)
+
+ if kwargs["install_fonts"]:
+ env_extras.append(FontInstaller(
+ logger,
+ font_dir=kwargs["font_dir"],
+ ahem=os.path.join(test_paths["/"]["tests_path"], "fonts/Ahem.ttf")
+ ))
+
+ recording.set(["startup", "load_tests"])
+
+ test_groups = (testloader.TestGroupsFile(logger, kwargs["test_groups_file"])
+ if kwargs["test_groups_file"] else None)
+
+ (test_source_cls,
+ test_source_kwargs,
+ chunker_kwargs) = testloader.get_test_src(logger=logger,
+ test_groups=test_groups,
+ **kwargs)
+ run_info, test_loader = get_loader(test_paths,
+ product.name,
+ run_info_extras=product.run_info_extras(**kwargs),
+ chunker_kwargs=chunker_kwargs,
+ test_groups=test_groups,
+ **kwargs)
+
+ logger.info("Using %i client processes" % kwargs["processes"])
+
+ test_status = TestStatus()
+ repeat = kwargs["repeat"]
+ test_status.expected_repeated_runs = repeat
+
+ if len(test_loader.test_ids) == 0 and kwargs["test_list"]:
+ logger.critical("Unable to find any tests at the path(s):")
+ for path in kwargs["test_list"]:
+ logger.critical(" %s" % path)
+ logger.critical("Please check spelling and make sure there are tests in the specified path(s).")
+ return False, test_status
+ kwargs["pause_after_test"] = get_pause_after_test(test_loader, **kwargs)
+
+ ssl_config = {"type": kwargs["ssl_type"],
+ "openssl": {"openssl_binary": kwargs["openssl_binary"]},
+ "pregenerated": {"host_key_path": kwargs["host_key_path"],
+ "host_cert_path": kwargs["host_cert_path"],
+ "ca_cert_path": kwargs["ca_cert_path"]}}
+
+ testharness_timeout_multipler = product.get_timeout_multiplier("testharness",
+ run_info,
+ **kwargs)
+
+ mojojs_path = kwargs["mojojs_path"] if kwargs["enable_mojojs"] else None
+ inject_script = kwargs["inject_script"] if kwargs["inject_script"] else None
+
+ recording.set(["startup", "start_environment"])
+ with env.TestEnvironment(test_paths,
+ testharness_timeout_multipler,
+ kwargs["pause_after_test"],
+ kwargs["debug_test"],
+ kwargs["debug_info"],
+ product.env_options,
+ ssl_config,
+ env_extras,
+ kwargs["enable_webtransport_h3"],
+ mojojs_path,
+ inject_script) as test_environment:
+ recording.set(["startup", "ensure_environment"])
+ try:
+ test_environment.ensure_started()
+ start_time = datetime.now()
+ except env.TestEnvironmentError as e:
+ logger.critical("Error starting test environment: %s" % e)
+ raise
+
+ recording.set(["startup"])
+
+ max_time = None
+ if "repeat_max_time" in kwargs:
+ max_time = timedelta(minutes=kwargs["repeat_max_time"])
+
+ repeat_until_unexpected = kwargs["repeat_until_unexpected"]
+
+ # keep track of longest time taken to complete a test suite iteration
+ # so that the runs can be stopped to avoid a possible TC timeout.
+ longest_iteration_time = timedelta()
+
+ while test_status.repeated_runs < repeat or repeat_until_unexpected:
+ # if the next repeat run could cause the TC timeout to be reached,
+ # stop now and use the test results we have.
+ # Pad the total time by 10% to ensure ample time for the next iteration(s).
+ estimate = (datetime.now() +
+ timedelta(seconds=(longest_iteration_time.total_seconds() * 1.1)))
+ if not repeat_until_unexpected and max_time and estimate >= start_time + max_time:
+ logger.info(f"Ran {test_status.repeated_runs} of {repeat} iterations.")
+ break
+
+ # begin tracking runtime of the test suite
+ iteration_start = datetime.now()
+ test_status.repeated_runs += 1
+ if repeat_until_unexpected:
+ logger.info(f"Repetition {test_status.repeated_runs}")
+ elif repeat > 1:
+ logger.info(f"Repetition {test_status.repeated_runs} / {repeat}")
+
+ iter_success = run_test_iteration(test_status, test_loader, test_source_kwargs,
+ test_source_cls, run_info, recording,
+ test_environment, product, kwargs)
+ # if there were issues with the suite run(tests not loaded, etc.) return
+ if not iter_success:
+ return False, test_status
+ recording.set(["after-end"])
+ logger.info(f"Got {test_status.unexpected} unexpected results, "
+ f"with {test_status.unexpected_pass} unexpected passes")
+
+ # Note this iteration's runtime
+ iteration_runtime = datetime.now() - iteration_start
+ # determine the longest test suite runtime seen.
+ longest_iteration_time = max(longest_iteration_time,
+ iteration_runtime)
+
+ if repeat_until_unexpected and test_status.unexpected > 0:
+ break
+ if test_status.repeated_runs == 1 and len(test_loader.test_ids) == test_status.skipped:
+ test_status.all_skipped = True
+ break
+
+ # Return the evaluation of the runs and the number of repeated iterations that were run.
+ return evaluate_runs(test_status, kwargs), test_status
+
+
+def check_stability(**kwargs):
+ from . import stability
+ if kwargs["stability"]:
+ logger.warning("--stability is deprecated; please use --verify instead!")
+ kwargs['verify_max_time'] = None
+ kwargs['verify_chaos_mode'] = False
+ kwargs['verify_repeat_loop'] = 0
+ kwargs['verify_repeat_restart'] = 10 if kwargs['repeat'] == 1 else kwargs['repeat']
+ kwargs['verify_output_results'] = True
+
+ return stability.check_stability(logger,
+ max_time=kwargs['verify_max_time'],
+ chaos_mode=kwargs['verify_chaos_mode'],
+ repeat_loop=kwargs['verify_repeat_loop'],
+ repeat_restart=kwargs['verify_repeat_restart'],
+ output_results=kwargs['verify_output_results'],
+ **kwargs)
+
+
+def start(**kwargs):
+ assert logger is not None
+
+ logged_critical = wptlogging.LoggedAboveLevelHandler("CRITICAL")
+ handler = handlers.LogLevelFilter(logged_critical, "CRITICAL")
+ logger.add_handler(handler)
+
+ rv = False
+ try:
+ if kwargs["list_test_groups"]:
+ list_test_groups(**kwargs)
+ elif kwargs["list_disabled"]:
+ list_disabled(**kwargs)
+ elif kwargs["list_tests"]:
+ list_tests(**kwargs)
+ elif kwargs["verify"] or kwargs["stability"]:
+ rv = check_stability(**kwargs) or logged_critical.has_log
+ else:
+ rv = not run_tests(**kwargs)[0] or logged_critical.has_log
+ finally:
+ logger.shutdown()
+ logger.remove_handler(handler)
+ return rv
+
+
+def main():
+ """Main entry point when calling from the command line"""
+ kwargs = wptcommandline.parse_args()
+
+ try:
+ if kwargs["prefs_root"] is None:
+ kwargs["prefs_root"] = os.path.abspath(os.path.join(here, "prefs"))
+
+ setup_logging(kwargs, {"raw": sys.stdout})
+
+ return start(**kwargs)
+ except Exception:
+ if kwargs["pdb"]:
+ import pdb
+ import traceback
+ print(traceback.format_exc())
+ pdb.post_mortem()
+ else:
+ raise
diff --git a/testing/web-platform/tests/tools/wptrunner/wptrunner/wpttest.py b/testing/web-platform/tests/tools/wptrunner/wptrunner/wpttest.py
new file mode 100644
index 0000000000..c1093f18f4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wpttest.py
@@ -0,0 +1,715 @@
+# mypy: allow-untyped-defs
+
+import os
+import subprocess
+import sys
+from collections import defaultdict
+from typing import Any, ClassVar, Dict, Type
+from urllib.parse import urljoin
+
+from .wptmanifest.parser import atoms
+
+atom_reset = atoms["Reset"]
+enabled_tests = {"testharness", "reftest", "wdspec", "crashtest", "print-reftest"}
+
+
+class Result:
+ def __init__(self,
+ status,
+ message,
+ expected=None,
+ extra=None,
+ stack=None,
+ known_intermittent=None):
+ if status not in self.statuses:
+ raise ValueError("Unrecognised status %s" % status)
+ self.status = status
+ self.message = message
+ self.expected = expected
+ self.known_intermittent = known_intermittent if known_intermittent is not None else []
+ self.extra = extra if extra is not None else {}
+ self.stack = stack
+
+ def __repr__(self):
+ return f"<{self.__module__}.{self.__class__.__name__} {self.status}>"
+
+
+class SubtestResult:
+ def __init__(self, name, status, message, stack=None, expected=None, known_intermittent=None):
+ self.name = name
+ if status not in self.statuses:
+ raise ValueError("Unrecognised status %s" % status)
+ self.status = status
+ self.message = message
+ self.stack = stack
+ self.expected = expected
+ self.known_intermittent = known_intermittent if known_intermittent is not None else []
+
+ def __repr__(self):
+ return f"<{self.__module__}.{self.__class__.__name__} {self.name} {self.status}>"
+
+
+class TestharnessResult(Result):
+ default_expected = "OK"
+ statuses = {"OK", "ERROR", "INTERNAL-ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT", "CRASH", "PRECONDITION_FAILED"}
+
+
+class TestharnessSubtestResult(SubtestResult):
+ default_expected = "PASS"
+ statuses = {"PASS", "FAIL", "TIMEOUT", "NOTRUN", "PRECONDITION_FAILED"}
+
+
+class ReftestResult(Result):
+ default_expected = "PASS"
+ statuses = {"PASS", "FAIL", "ERROR", "INTERNAL-ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT",
+ "CRASH"}
+
+
+class WdspecResult(Result):
+ default_expected = "OK"
+ statuses = {"OK", "ERROR", "INTERNAL-ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT", "CRASH"}
+
+
+class WdspecSubtestResult(SubtestResult):
+ default_expected = "PASS"
+ statuses = {"PASS", "FAIL", "ERROR"}
+
+
+class CrashtestResult(Result):
+ default_expected = "PASS"
+ statuses = {"PASS", "ERROR", "INTERNAL-ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT",
+ "CRASH"}
+
+
+def get_run_info(metadata_root, product, **kwargs):
+ return RunInfo(metadata_root, product, **kwargs)
+
+
+class RunInfo(Dict[str, Any]):
+ def __init__(self, metadata_root, product, debug,
+ browser_version=None,
+ browser_channel=None,
+ verify=None,
+ extras=None,
+ device_serials=None,
+ adb_binary=None):
+ import mozinfo
+ self._update_mozinfo(metadata_root)
+ self.update(mozinfo.info)
+
+ from .update.tree import GitTree
+ try:
+ # GitTree.__init__ throws if we are not in a git tree.
+ rev = GitTree(log_error=False).rev
+ except (OSError, subprocess.CalledProcessError):
+ rev = None
+ if rev:
+ self["revision"] = rev.decode("utf-8")
+
+ self["python_version"] = sys.version_info.major
+ self["product"] = product
+ if debug is not None:
+ self["debug"] = debug
+ elif "debug" not in self:
+ # Default to release
+ self["debug"] = False
+ if browser_version:
+ self["browser_version"] = browser_version
+ if browser_channel:
+ self["browser_channel"] = browser_channel
+
+ self["verify"] = verify
+ if "wasm" not in self:
+ self["wasm"] = False
+ if extras is not None:
+ self.update(extras)
+ if "headless" not in self:
+ self["headless"] = False
+
+ if adb_binary:
+ self["adb_binary"] = adb_binary
+ if device_serials:
+ # Assume all emulators are identical, so query an arbitrary one.
+ self._update_with_emulator_info(device_serials[0])
+ self.pop("linux_distro", None)
+
+ def _adb_run(self, device_serial, args, **kwargs):
+ adb_binary = self.get("adb_binary", "adb")
+ cmd = [adb_binary, "-s", device_serial, *args]
+ return subprocess.check_output(cmd, **kwargs)
+
+ def _adb_get_property(self, device_serial, prop, **kwargs):
+ args = ["shell", "getprop", prop]
+ value = self._adb_run(device_serial, args, **kwargs)
+ return value.strip()
+
+ def _update_with_emulator_info(self, device_serial):
+ """Override system info taken from the host if using an Android
+ emulator."""
+ try:
+ self._adb_run(device_serial, ["wait-for-device"])
+ emulator_info = {
+ "os": "android",
+ "os_version": self._adb_get_property(
+ device_serial,
+ "ro.build.version.release",
+ encoding="utf-8",
+ ),
+ }
+ emulator_info["version"] = emulator_info["os_version"]
+
+ # Detect CPU info (https://developer.android.com/ndk/guides/abis#sa)
+ abi64, *_ = self._adb_get_property(
+ device_serial,
+ "ro.product.cpu.abilist64",
+ encoding="utf-8",
+ ).split(',')
+ if abi64:
+ emulator_info["processor"] = abi64
+ emulator_info["bits"] = 64
+ else:
+ emulator_info["processor"], *_ = self._adb_get_property(
+ device_serial,
+ "ro.product.cpu.abilist32",
+ encoding="utf-8",
+ ).split(',')
+ emulator_info["bits"] = 32
+
+ self.update(emulator_info)
+ except (OSError, subprocess.CalledProcessError):
+ pass
+
+ def _update_mozinfo(self, metadata_root):
+ """Add extra build information from a mozinfo.json file in a parent
+ directory"""
+ import mozinfo
+
+ path = metadata_root
+ dirs = set()
+ while path != os.path.expanduser('~'):
+ if path in dirs:
+ break
+ dirs.add(str(path))
+ path = os.path.dirname(path)
+
+ mozinfo.find_and_update_from_json(*dirs)
+
+
+def server_protocol(manifest_item):
+ if hasattr(manifest_item, "h2") and manifest_item.h2:
+ return "h2"
+ if hasattr(manifest_item, "https") and manifest_item.https:
+ return "https"
+ return "http"
+
+
+class Test:
+
+ result_cls = None # type: ClassVar[Type[Result]]
+ subtest_result_cls = None # type: ClassVar[Type[SubtestResult]]
+ test_type = None # type: ClassVar[str]
+ pac = None
+
+ default_timeout = 10 # seconds
+ long_timeout = 60 # seconds
+
+ def __init__(self, url_base, tests_root, url, inherit_metadata, test_metadata,
+ timeout=None, path=None, protocol="http", subdomain=False, pac=None):
+ self.url_base = url_base
+ self.tests_root = tests_root
+ self.url = url
+ self._inherit_metadata = inherit_metadata
+ self._test_metadata = test_metadata
+ self.timeout = timeout if timeout is not None else self.default_timeout
+ self.path = path
+
+ self.subdomain = subdomain
+ self.environment = {"url_base": url_base,
+ "protocol": protocol,
+ "prefs": self.prefs}
+
+ if pac is not None:
+ self.environment["pac"] = urljoin(self.url, pac)
+
+ def __eq__(self, other):
+ if not isinstance(other, Test):
+ return False
+ return self.id == other.id
+
+ # Python 2 does not have this delegation, while Python 3 does.
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def update_metadata(self, metadata=None):
+ if metadata is None:
+ metadata = {}
+ return metadata
+
+ @classmethod
+ def from_manifest(cls, manifest_file, manifest_item, inherit_metadata, test_metadata):
+ timeout = cls.long_timeout if manifest_item.timeout == "long" else cls.default_timeout
+ return cls(manifest_file.url_base,
+ manifest_file.tests_root,
+ manifest_item.url,
+ inherit_metadata,
+ test_metadata,
+ timeout=timeout,
+ path=os.path.join(manifest_file.tests_root, manifest_item.path),
+ protocol=server_protocol(manifest_item),
+ subdomain=manifest_item.subdomain)
+
+ @property
+ def id(self):
+ return self.url
+
+ @property
+ def keys(self):
+ return tuple()
+
+ @property
+ def abs_path(self):
+ return os.path.join(self.tests_root, self.path)
+
+ def _get_metadata(self, subtest=None):
+ if self._test_metadata is not None and subtest is not None:
+ return self._test_metadata.get_subtest(subtest)
+ else:
+ return self._test_metadata
+
+ def itermeta(self, subtest=None):
+ if self._test_metadata is not None:
+ if subtest is not None:
+ subtest_meta = self._get_metadata(subtest)
+ if subtest_meta is not None:
+ yield subtest_meta
+ yield self._get_metadata()
+ yield from reversed(self._inherit_metadata)
+
+ def disabled(self, subtest=None):
+ for meta in self.itermeta(subtest):
+ disabled = meta.disabled
+ if disabled is not None:
+ return disabled
+ return None
+
+ @property
+ def restart_after(self):
+ for meta in self.itermeta(None):
+ restart_after = meta.restart_after
+ if restart_after is not None:
+ return True
+ return False
+
+ @property
+ def leaks(self):
+ for meta in self.itermeta(None):
+ leaks = meta.leaks
+ if leaks is not None:
+ return leaks
+ return False
+
+ @property
+ def min_assertion_count(self):
+ for meta in self.itermeta(None):
+ count = meta.min_assertion_count
+ if count is not None:
+ return count
+ return 0
+
+ @property
+ def max_assertion_count(self):
+ for meta in self.itermeta(None):
+ count = meta.max_assertion_count
+ if count is not None:
+ return count
+ return 0
+
+ @property
+ def lsan_disabled(self):
+ for meta in self.itermeta():
+ if meta.lsan_disabled is not None:
+ return meta.lsan_disabled
+ return False
+
+ @property
+ def lsan_allowed(self):
+ lsan_allowed = set()
+ for meta in self.itermeta():
+ lsan_allowed |= meta.lsan_allowed
+ if atom_reset in lsan_allowed:
+ lsan_allowed.remove(atom_reset)
+ break
+ return lsan_allowed
+
+ @property
+ def lsan_max_stack_depth(self):
+ for meta in self.itermeta(None):
+ depth = meta.lsan_max_stack_depth
+ if depth is not None:
+ return depth
+ return None
+
+ @property
+ def mozleak_allowed(self):
+ mozleak_allowed = set()
+ for meta in self.itermeta():
+ mozleak_allowed |= meta.leak_allowed
+ if atom_reset in mozleak_allowed:
+ mozleak_allowed.remove(atom_reset)
+ break
+ return mozleak_allowed
+
+ @property
+ def mozleak_threshold(self):
+ rv = {}
+ for meta in self.itermeta(None):
+ threshold = meta.leak_threshold
+ for key, value in threshold.items():
+ if key not in rv:
+ rv[key] = value
+ return rv
+
+ @property
+ def tags(self):
+ tags = set()
+ for meta in self.itermeta():
+ meta_tags = meta.tags
+ tags |= meta_tags
+ if atom_reset in meta_tags:
+ tags.remove(atom_reset)
+ break
+
+ tags.add("dir:%s" % self.id.lstrip("/").split("/")[0])
+
+ return tags
+
+ @property
+ def prefs(self):
+ prefs = {}
+ for meta in reversed(list(self.itermeta())):
+ meta_prefs = meta.prefs
+ if atom_reset in meta_prefs:
+ del meta_prefs[atom_reset]
+ prefs = {}
+ prefs.update(meta_prefs)
+ return prefs
+
+ def expected(self, subtest=None):
+ if subtest is None:
+ default = self.result_cls.default_expected
+ else:
+ default = self.subtest_result_cls.default_expected
+
+ metadata = self._get_metadata(subtest)
+ if metadata is None:
+ return default
+
+ try:
+ expected = metadata.get("expected")
+ if isinstance(expected, str):
+ return expected
+ elif isinstance(expected, list):
+ return expected[0]
+ elif expected is None:
+ return default
+ except KeyError:
+ return default
+
+ def implementation_status(self):
+ implementation_status = None
+ for meta in self.itermeta():
+ implementation_status = meta.implementation_status
+ if implementation_status:
+ return implementation_status
+
+ # assuming no specific case, we are implementing it
+ return "implementing"
+
+ def known_intermittent(self, subtest=None):
+ metadata = self._get_metadata(subtest)
+ if metadata is None:
+ return []
+
+ try:
+ expected = metadata.get("expected")
+ if isinstance(expected, list):
+ return expected[1:]
+ return []
+ except KeyError:
+ return []
+
+ def __repr__(self):
+ return f"<{self.__module__}.{self.__class__.__name__} {self.id}>"
+
+
+class TestharnessTest(Test):
+ result_cls = TestharnessResult
+ subtest_result_cls = TestharnessSubtestResult
+ test_type = "testharness"
+
+ def __init__(self, url_base, tests_root, url, inherit_metadata, test_metadata,
+ timeout=None, path=None, protocol="http", testdriver=False,
+ jsshell=False, scripts=None, subdomain=False, pac=None):
+ Test.__init__(self, url_base, tests_root, url, inherit_metadata, test_metadata, timeout,
+ path, protocol, subdomain, pac)
+
+ self.testdriver = testdriver
+ self.jsshell = jsshell
+ self.scripts = scripts or []
+
+ @classmethod
+ def from_manifest(cls, manifest_file, manifest_item, inherit_metadata, test_metadata):
+ timeout = cls.long_timeout if manifest_item.timeout == "long" else cls.default_timeout
+ pac = manifest_item.pac
+ testdriver = manifest_item.testdriver if hasattr(manifest_item, "testdriver") else False
+ jsshell = manifest_item.jsshell if hasattr(manifest_item, "jsshell") else False
+ script_metadata = manifest_item.script_metadata or []
+ scripts = [v for (k, v) in script_metadata
+ if k == "script"]
+ return cls(manifest_file.url_base,
+ manifest_file.tests_root,
+ manifest_item.url,
+ inherit_metadata,
+ test_metadata,
+ timeout=timeout,
+ pac=pac,
+ path=os.path.join(manifest_file.tests_root, manifest_item.path),
+ protocol=server_protocol(manifest_item),
+ testdriver=testdriver,
+ jsshell=jsshell,
+ scripts=scripts,
+ subdomain=manifest_item.subdomain)
+
+ @property
+ def id(self):
+ return self.url
+
+
+class ManualTest(Test):
+ test_type = "manual"
+
+ @property
+ def id(self):
+ return self.url
+
+
+class ReftestTest(Test):
+ """A reftest
+
+ A reftest should be considered to pass if one of its references matches (see below) *and* the
+ reference passes if it has any references recursively.
+
+ Attributes:
+ references (List[Tuple[str, str]]): a list of alternate references, where one must match for the test to pass
+ viewport_size (Optional[Tuple[int, int]]): size of the viewport for this test, if not default
+ dpi (Optional[int]): dpi to use when rendering this test, if not default
+
+ """
+ result_cls = ReftestResult
+ test_type = "reftest"
+
+ def __init__(self, url_base, tests_root, url, inherit_metadata, test_metadata, references,
+ timeout=None, path=None, viewport_size=None, dpi=None, fuzzy=None,
+ protocol="http", subdomain=False):
+ Test.__init__(self, url_base, tests_root, url, inherit_metadata, test_metadata, timeout,
+ path, protocol, subdomain)
+
+ for _, ref_type in references:
+ if ref_type not in ("==", "!="):
+ raise ValueError
+
+ self.references = references
+ self.viewport_size = self.get_viewport_size(viewport_size)
+ self.dpi = dpi
+ self._fuzzy = fuzzy or {}
+
+ @classmethod
+ def cls_kwargs(cls, manifest_test):
+ return {"viewport_size": manifest_test.viewport_size,
+ "dpi": manifest_test.dpi,
+ "protocol": server_protocol(manifest_test),
+ "fuzzy": manifest_test.fuzzy}
+
+ @classmethod
+ def from_manifest(cls,
+ manifest_file,
+ manifest_test,
+ inherit_metadata,
+ test_metadata):
+
+ timeout = cls.long_timeout if manifest_test.timeout == "long" else cls.default_timeout
+
+ url = manifest_test.url
+
+ node = cls(manifest_file.url_base,
+ manifest_file.tests_root,
+ manifest_test.url,
+ inherit_metadata,
+ test_metadata,
+ [],
+ timeout=timeout,
+ path=manifest_test.path,
+ subdomain=manifest_test.subdomain,
+ **cls.cls_kwargs(manifest_test))
+
+ refs_by_type = defaultdict(list)
+
+ for ref_url, ref_type in manifest_test.references:
+ refs_by_type[ref_type].append(ref_url)
+
+ # Construct a list of all the mismatches, where we end up with mismatch_1 != url !=
+ # mismatch_2 != url != mismatch_3 etc.
+ #
+ # Per the logic documented above, this means that none of the mismatches provided match,
+ mismatch_walk = None
+ if refs_by_type["!="]:
+ mismatch_walk = ReftestTest(manifest_file.url_base,
+ manifest_file.tests_root,
+ refs_by_type["!="][0],
+ [],
+ None,
+ [])
+ cmp_ref = mismatch_walk
+ for ref_url in refs_by_type["!="][1:]:
+ cmp_self = ReftestTest(manifest_file.url_base,
+ manifest_file.tests_root,
+ url,
+ [],
+ None,
+ [])
+ cmp_ref.references.append((cmp_self, "!="))
+ cmp_ref = ReftestTest(manifest_file.url_base,
+ manifest_file.tests_root,
+ ref_url,
+ [],
+ None,
+ [])
+ cmp_self.references.append((cmp_ref, "!="))
+
+ if mismatch_walk is None:
+ mismatch_refs = []
+ else:
+ mismatch_refs = [(mismatch_walk, "!=")]
+
+ if refs_by_type["=="]:
+ # For each == ref, add a reference to this node whose tail is the mismatch list.
+ # Per the logic documented above, this means any one of the matches must pass plus all the mismatches.
+ for ref_url in refs_by_type["=="]:
+ ref = ReftestTest(manifest_file.url_base,
+ manifest_file.tests_root,
+ ref_url,
+ [],
+ None,
+ mismatch_refs)
+ node.references.append((ref, "=="))
+ else:
+ # Otherwise, we just add the mismatches directly as we are immediately into the
+ # mismatch chain with no alternates.
+ node.references.extend(mismatch_refs)
+
+ return node
+
+ def update_metadata(self, metadata):
+ if "url_count" not in metadata:
+ metadata["url_count"] = defaultdict(int)
+ for reference, _ in self.references:
+ # We assume a naive implementation in which a url with multiple
+ # possible screenshots will need to take both the lhs and rhs screenshots
+ # for each possible match
+ metadata["url_count"][(self.environment["protocol"], reference.url)] += 1
+ reference.update_metadata(metadata)
+ return metadata
+
+ def get_viewport_size(self, override):
+ return override
+
+ @property
+ def id(self):
+ return self.url
+
+ @property
+ def keys(self):
+ return ("reftype", "refurl")
+
+ @property
+ def fuzzy(self):
+ return self._fuzzy
+
+ @property
+ def fuzzy_override(self):
+ values = {}
+ for meta in reversed(list(self.itermeta(None))):
+ value = meta.fuzzy
+ if not value:
+ continue
+ if atom_reset in value:
+ value.remove(atom_reset)
+ values = {}
+ for key, data in value:
+ if isinstance(key, (tuple, list)):
+ key = list(key)
+ key[0] = urljoin(self.url, key[0])
+ key[1] = urljoin(self.url, key[1])
+ key = tuple(key)
+ elif key:
+ # Key is just a relative url to a ref
+ key = urljoin(self.url, key)
+ values[key] = data
+ return values
+
+ @property
+ def page_ranges(self):
+ return {}
+
+
+class PrintReftestTest(ReftestTest):
+ test_type = "print-reftest"
+
+ def __init__(self, url_base, tests_root, url, inherit_metadata, test_metadata, references,
+ timeout=None, path=None, viewport_size=None, dpi=None, fuzzy=None,
+ page_ranges=None, protocol="http", subdomain=False):
+ super().__init__(url_base, tests_root, url, inherit_metadata, test_metadata,
+ references, timeout, path, viewport_size, dpi,
+ fuzzy, protocol, subdomain=subdomain)
+ self._page_ranges = page_ranges
+
+ @classmethod
+ def cls_kwargs(cls, manifest_test):
+ rv = super().cls_kwargs(manifest_test)
+ rv["page_ranges"] = manifest_test.page_ranges
+ return rv
+
+ def get_viewport_size(self, override):
+ assert override is None
+ return (5*2.54, 3*2.54)
+
+ @property
+ def page_ranges(self):
+ return self._page_ranges
+
+
+class WdspecTest(Test):
+ result_cls = WdspecResult
+ subtest_result_cls = WdspecSubtestResult
+ test_type = "wdspec"
+
+ default_timeout = 25
+ long_timeout = 180 # 3 minutes
+
+
+class CrashTest(Test):
+ result_cls = CrashtestResult
+ test_type = "crashtest"
+
+
+manifest_test_cls = {"reftest": ReftestTest,
+ "print-reftest": PrintReftestTest,
+ "testharness": TestharnessTest,
+ "manual": ManualTest,
+ "wdspec": WdspecTest,
+ "crashtest": CrashTest}
+
+
+def from_manifest(manifest_file, manifest_test, inherit_metadata, test_metadata):
+ test_cls = manifest_test_cls[manifest_test.item_type]
+ return test_cls.from_manifest(manifest_file, manifest_test, inherit_metadata, test_metadata)
diff --git a/testing/web-platform/tests/tools/wptserve/.coveragerc b/testing/web-platform/tests/tools/wptserve/.coveragerc
new file mode 100644
index 0000000000..0e00c079f6
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/.coveragerc
@@ -0,0 +1,11 @@
+[run]
+branch = True
+parallel = True
+omit =
+ */site-packages/*
+ */lib_pypy/*
+
+[paths]
+wptserve =
+ wptserve
+ .tox/**/site-packages/wptserve
diff --git a/testing/web-platform/tests/tools/wptserve/.gitignore b/testing/web-platform/tests/tools/wptserve/.gitignore
new file mode 100644
index 0000000000..8e87d38848
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/.gitignore
@@ -0,0 +1,40 @@
+*.py[cod]
+*~
+\#*
+
+docs/_build/
+
+# C extensions
+*.so
+
+# Packages
+*.egg
+*.egg-info
+dist
+build
+eggs
+parts
+bin
+var
+sdist
+develop-eggs
+.installed.cfg
+lib
+lib64
+
+# Installer logs
+pip-log.txt
+
+# Unit test / coverage reports
+.coverage
+.tox
+nosetests.xml
+tests/functional/html/*
+
+# Translations
+*.mo
+
+# Mr Developer
+.mr.developer.cfg
+.project
+.pydevproject
diff --git a/testing/web-platform/tests/tools/wptserve/MANIFEST.in b/testing/web-platform/tests/tools/wptserve/MANIFEST.in
new file mode 100644
index 0000000000..4bf4483522
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/MANIFEST.in
@@ -0,0 +1 @@
+include README.md \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wptserve/README.md b/testing/web-platform/tests/tools/wptserve/README.md
new file mode 100644
index 0000000000..6821dee38a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/README.md
@@ -0,0 +1,6 @@
+wptserve
+========
+
+Web server designed for use with web-platform-tests.
+
+See the docs on [web-platform-tests.org](https://web-platform-tests.org/tools/wptserve/docs/index.html).
diff --git a/testing/web-platform/tests/tools/wptserve/docs/Makefile b/testing/web-platform/tests/tools/wptserve/docs/Makefile
new file mode 100644
index 0000000000..250b6c8647
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/Makefile
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/wptserve.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/wptserve.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/wptserve"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/wptserve"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/testing/web-platform/tests/tools/wptserve/docs/conf.py b/testing/web-platform/tests/tools/wptserve/docs/conf.py
new file mode 100644
index 0000000000..686eb4fc24
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/conf.py
@@ -0,0 +1,242 @@
+#
+# wptserve documentation build configuration file, created by
+# sphinx-quickstart on Wed Aug 14 17:23:24 2013.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+sys.path.insert(0, os.path.abspath(".."))
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'wptserve'
+copyright = '2013, Mozilla Foundation and other wptserve contributers'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '0.1'
+# The full version, including alpha/beta/rc tags.
+release = '0.1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'wptservedoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'wptserve.tex', 'wptserve Documentation',
+ 'James Graham', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'wptserve', 'wptserve Documentation',
+ ['James Graham'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'wptserve', 'wptserve Documentation',
+ 'James Graham', 'wptserve', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
diff --git a/testing/web-platform/tests/tools/wptserve/docs/handlers.rst b/testing/web-platform/tests/tools/wptserve/docs/handlers.rst
new file mode 100644
index 0000000000..8ecc933288
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/handlers.rst
@@ -0,0 +1,108 @@
+Handlers
+========
+
+Handlers are functions that have the general signature::
+
+ handler(request, response)
+
+It is expected that the handler will use information from
+the request (e.g. the path) either to populate the response
+object with the data to send, or to directly write to the
+output stream via the ResponseWriter instance associated with
+the request. If a handler writes to the output stream then the
+server will not attempt additional writes, i.e. the choice to write
+directly in the handler or not is all-or-nothing.
+
+A number of general-purpose handler functions are provided by default:
+
+.. _handlers.Python:
+
+Python Handlers
+---------------
+
+Python handlers are functions which provide a higher-level API over
+manually updating the response object, by causing the return value of
+the function to provide (part of) the response. There are four
+possible sets of values that may be returned::
+
+
+ ((status_code, reason), headers, content)
+ (status_code, headers, content)
+ (headers, content)
+ content
+
+Here `status_code` is an integer status code, `headers` is a list of (field
+name, value) pairs, and `content` is a string or an iterable returning strings.
+Such a function may also update the response manually. For example one may use
+`response.headers.set` to set a response header, and only return the content.
+One may even use this kind of handler, but manipulate the output socket
+directly, in which case the return value of the function, and the properties of
+the response object, will be ignored.
+
+The most common way to make a user function into a python handler is
+to use the provided `wptserve.handlers.handler` decorator::
+
+ from wptserve.handlers import handler
+
+ @handler
+ def test(request, response):
+ return [("X-Test": "PASS"), ("Content-Type", "text/plain")], "test"
+
+ #Later, assuming we have a Router object called 'router'
+
+ router.register("GET", "/test", test)
+
+JSON Handlers
+-------------
+
+This is a specialisation of the python handler type specifically
+designed to facilitate providing JSON responses. The API is largely
+the same as for a normal python handler, but the `content` part of the
+return value is JSON encoded, and a default Content-Type header of
+`application/json` is added. Again this handler is usually used as a
+decorator::
+
+ from wptserve.handlers import json_handler
+
+ @json_handler
+ def test(request, response):
+ return {"test": "PASS"}
+
+Python File Handlers
+--------------------
+
+Python file handlers are Python files which the server executes in response to
+requests made to the corresponding URL. This is hooked up to a route like
+``("*", "*.py", python_file_handler)``, meaning that any .py file will be
+treated as a handler file (note that this makes it easy to write unsafe
+handlers, particularly when running the server in a web-exposed setting).
+
+The Python files must define a single function `main` with the signature::
+
+ main(request, response)
+
+This function then behaves just like those described in
+:ref:`handlers.Python` above.
+
+asis Handlers
+-------------
+
+These are used to serve files as literal byte streams including the
+HTTP status line, headers and body. In the default configuration this
+handler is invoked for all files with a .asis extension.
+
+File Handlers
+-------------
+
+File handlers are used to serve static files. By default the content
+type of these files is set by examining the file extension. However
+this can be overridden, or additional headers supplied, by providing a
+file with the same name as the file being served but an additional
+.headers suffix, i.e. test.html has its headers set from
+test.html.headers. The format of the .headers file is plaintext, with
+each line containing::
+
+ Header-Name: header_value
+
+In addition headers can be set for a whole directory of files (but not
+subdirectories), using a file called `__dir__.headers`.
diff --git a/testing/web-platform/tests/tools/wptserve/docs/index.rst b/testing/web-platform/tests/tools/wptserve/docs/index.rst
new file mode 100644
index 0000000000..c6157b4f8c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/index.rst
@@ -0,0 +1,27 @@
+.. wptserve documentation master file, created by
+ sphinx-quickstart on Wed Aug 14 17:23:24 2013.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+wptserve: Web Platform Test Server
+==================================
+
+A python-based HTTP server specifically targeted at being used for
+testing the web platform. This means that extreme flexibility —
+including the possibility of HTTP non-conformance — in the response is
+supported.
+
+Contents:
+
+.. toctree::
+ :maxdepth: 2
+
+ introduction
+ server
+ router
+ request
+ response
+ stash
+ handlers
+ pipes
+
diff --git a/testing/web-platform/tests/tools/wptserve/docs/introduction.rst b/testing/web-platform/tests/tools/wptserve/docs/introduction.rst
new file mode 100644
index 0000000000..b585a983a2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/introduction.rst
@@ -0,0 +1,51 @@
+Introduction
+============
+
+wptserve has been designed with the specific goal of making a server
+that is suitable for writing tests for the web platform. This means
+that it cannot use common abstractions over HTTP such as WSGI, since
+these assume that the goal is to generate a well-formed HTTP
+response. Testcases, however, often require precise control of the
+exact bytes sent over the wire and their timing. The full list of
+design goals for the server are:
+
+* Suitable to run on individual test machines and over the public internet.
+
+* Support plain TCP and SSL servers.
+
+* Serve static files with the minimum of configuration.
+
+* Allow headers to be overwritten on a per-file and per-directory
+ basis.
+
+* Full customisation of headers sent (e.g. altering or omitting
+ "mandatory" headers).
+
+* Simple per-client state.
+
+* Complex logic in tests, up to precise control over the individual
+ bytes sent and the timing of sending them.
+
+Request Handling
+----------------
+
+At the high level, the design of the server is based around similar
+concepts to those found in common web frameworks like Django, Pyramid
+or Flask. In particular the lifecycle of a typical request will be
+familiar to users of these systems. Incoming requests are parsed and a
+:doc:`Request <request>` object is constructed. This object is passed
+to a :ref:`Router <router.Interface>` instance, which is
+responsible for mapping the request method and path to a handler
+function. This handler is passed two arguments; the request object and
+a :doc:`Response <response>` object. In cases where only simple
+responses are required, the handler function may fill in the
+properties of the response object and the server will take care of
+constructing the response. However each Response also contains a
+:ref:`ResponseWriter <response.Interface>` which can be
+used to directly control the TCP socket.
+
+By default there are several built-in handler functions that provide a
+higher level API than direct manipulation of the Response
+object. These are documented in :doc:`handlers`.
+
+
diff --git a/testing/web-platform/tests/tools/wptserve/docs/make.bat b/testing/web-platform/tests/tools/wptserve/docs/make.bat
new file mode 100644
index 0000000000..40c71ff5dd
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/make.bat
@@ -0,0 +1,190 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\wptserve.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\wptserve.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+:end
diff --git a/testing/web-platform/tests/tools/wptserve/docs/pipes.rst b/testing/web-platform/tests/tools/wptserve/docs/pipes.rst
new file mode 100644
index 0000000000..1edbd44867
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/pipes.rst
@@ -0,0 +1,8 @@
+Pipes
+======
+
+:mod:`Interface <wptserve.pipes>`
+---------------------------------
+
+.. automodule:: wptserve.pipes
+ :members:
diff --git a/testing/web-platform/tests/tools/wptserve/docs/request.rst b/testing/web-platform/tests/tools/wptserve/docs/request.rst
new file mode 100644
index 0000000000..ef5b8a0c08
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/request.rst
@@ -0,0 +1,10 @@
+Request
+=======
+
+Request object.
+
+:mod:`Interface <wptserve.request>`
+-----------------------------------
+
+.. automodule:: wptserve.request
+ :members:
diff --git a/testing/web-platform/tests/tools/wptserve/docs/response.rst b/testing/web-platform/tests/tools/wptserve/docs/response.rst
new file mode 100644
index 0000000000..0c2f45ce26
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/response.rst
@@ -0,0 +1,41 @@
+Response
+========
+
+Response object. This object is used to control the response that will
+be sent to the HTTP client. A handler function will take the response
+object and fill in various parts of the response. For example, a plain
+text response with the body 'Some example content' could be produced as::
+
+ def handler(request, response):
+ response.headers.set("Content-Type", "text/plain")
+ response.content = "Some example content"
+
+The response object also gives access to a ResponseWriter, which
+allows direct access to the response socket. For example, one could
+write a similar response but with more explicit control as follows::
+
+ import time
+
+ def handler(request, response):
+ response.add_required_headers = False # Don't implicitly add HTTP headers
+ response.writer.write_status(200)
+ response.writer.write_header("Content-Type", "text/plain")
+ response.writer.write_header("Content-Length", len("Some example content"))
+ response.writer.end_headers()
+ response.writer.write("Some ")
+ time.sleep(1)
+ response.writer.write("example content")
+
+Note that when writing the response directly like this it is always
+necessary to either set the Content-Length header or set
+`response.close_connection = True`. Without one of these, the client
+will not be able to determine where the response body ends and will
+continue to load indefinitely.
+
+.. _response.Interface:
+
+:mod:`Interface <wptserve.response>`
+------------------------------------
+
+.. automodule:: wptserve.response
+ :members:
diff --git a/testing/web-platform/tests/tools/wptserve/docs/router.rst b/testing/web-platform/tests/tools/wptserve/docs/router.rst
new file mode 100644
index 0000000000..986f581922
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/router.rst
@@ -0,0 +1,78 @@
+Router
+======
+
+The router is used to match incoming requests to request handler
+functions. Typically users don't interact with the router directly,
+but instead send a list of routes to register when starting the
+server. However it is also possible to add routes after starting the
+server by calling the `register` method on the server's `router`
+property.
+
+Routes are represented by a three item tuple::
+
+ (methods, path_match, handler)
+
+`methods` is either a string or a list of strings indicating the HTTP
+methods to match. In cases where all methods should match there is a
+special sentinel value `any_method` provided as a property of the
+`router` module that can be used.
+
+`path_match` is an expression that will be evaluated against the
+request path to decide if the handler should match. These expressions
+follow a custom syntax intended to make matching URLs straightforward
+and, in particular, to be easier to use than raw regexp for URL
+matching. There are three possible components of a match expression:
+
+* Literals. These match any character. The special characters \*, \{
+ and \} must be escaped by prefixing them with a \\.
+
+* Match groups. These match any character other than / and save the
+ result as a named group. They are delimited by curly braces; for
+ example::
+
+ {abc}
+
+ would create a match group with the name `abc`.
+
+* Stars. These are denoted with a `*` and match any character
+ including /. There can be at most one star
+ per pattern and it must follow any match groups.
+
+Path expressions always match the entire request path and a leading /
+in the expression is implied even if it is not explicitly
+provided. This means that `/foo` and `foo` are equivalent.
+
+For example, the following pattern matches all requests for resources with the
+extension `.py`::
+
+ *.py
+
+The following expression matches anything directly under `/resources`
+with a `.html` extension, and places the "filename" in the `name`
+group::
+
+ /resources/{name}.html
+
+The groups, including anything that matches a `*` are available in the
+request object through the `route_match` property. This is a
+dictionary mapping the group names, and any match for `*` to the
+matching part of the route. For example, given a route::
+
+ /api/{sub_api}/*
+
+and the request path `/api/test/html/test.html`, `route_match` would
+be::
+
+ {"sub_api": "html", "*": "html/test.html"}
+
+`handler` is a function taking a request and a response object that is
+responsible for constructing the response to the HTTP request. See
+:doc:`handlers` for more details on handler functions.
+
+.. _router.Interface:
+
+:mod:`Interface <wptserve.router>`
+----------------------------------
+
+.. automodule:: wptserve.router
+ :members:
diff --git a/testing/web-platform/tests/tools/wptserve/docs/server.rst b/testing/web-platform/tests/tools/wptserve/docs/server.rst
new file mode 100644
index 0000000000..5688a0a3bc
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/server.rst
@@ -0,0 +1,20 @@
+Server
+======
+
+Basic server classes and router.
+
+The following example creates a server that serves static files from
+the `files` subdirectory of the current directory and causes it to
+run on port 8080 until it is killed::
+
+ from wptserve import server, handlers
+
+ httpd = server.WebTestHttpd(port=8080, doc_root="./files/",
+ routes=[("GET", "*", handlers.file_handler)])
+ httpd.start(block=True)
+
+:mod:`Interface <wptserve.server>`
+----------------------------------
+
+.. automodule:: wptserve.server
+ :members:
diff --git a/testing/web-platform/tests/tools/wptserve/docs/stash.rst b/testing/web-platform/tests/tools/wptserve/docs/stash.rst
new file mode 100644
index 0000000000..6510a0f59c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/docs/stash.rst
@@ -0,0 +1,31 @@
+Stash
+=====
+
+Object for storing cross-request state. This is unusual in that keys
+must be UUIDs, in order to prevent different clients setting the same
+key, and values are write-once, read-once to minimize the chances of
+state persisting indefinitely. The stash defines two operations;
+`put`, to add state and `take` to remove state. Furthermore, the view
+of the stash is path-specific; by default a request will only see the
+part of the stash corresponding to its own path.
+
+A typical example of using a stash to store state might be::
+
+ @handler
+ def handler(request, response):
+ # We assume this is a string representing a UUID
+ key = request.GET.first("id")
+
+ if request.method == "POST":
+ request.server.stash.put(key, "Some sample value")
+ return "Added value to stash"
+ else:
+ value = request.server.stash.take(key)
+ assert request.server.stash.take(key) is None
+ return value
+
+:mod:`Interface <wptserve.stash>`
+---------------------------------
+
+.. automodule:: wptserve.stash
+ :members:
diff --git a/testing/web-platform/tests/tools/wptserve/setup.py b/testing/web-platform/tests/tools/wptserve/setup.py
new file mode 100644
index 0000000000..36081619b6
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/setup.py
@@ -0,0 +1,23 @@
+from setuptools import setup
+
+PACKAGE_VERSION = '3.0'
+deps = ["h2>=3.0.1"]
+
+setup(name='wptserve',
+ version=PACKAGE_VERSION,
+ description="Python webserver intended for in web browser testing",
+ long_description=open("README.md").read(),
+ # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ classifiers=["Development Status :: 5 - Production/Stable",
+ "License :: OSI Approved :: BSD License",
+ "Topic :: Internet :: WWW/HTTP :: HTTP Servers"],
+ keywords='',
+ author='James Graham',
+ author_email='james@hoppipolla.co.uk',
+ url='http://wptserve.readthedocs.org/',
+ license='BSD',
+ packages=['wptserve', 'wptserve.sslutils'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=deps
+ )
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/__init__.py b/testing/web-platform/tests/tools/wptserve/tests/functional/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/__init__.py
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/base.py b/testing/web-platform/tests/tools/wptserve/tests/functional/base.py
new file mode 100644
index 0000000000..be5dc0d102
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/base.py
@@ -0,0 +1,148 @@
+import base64
+import logging
+import os
+import unittest
+
+from urllib.parse import urlencode, urlunsplit
+from urllib.request import Request as BaseRequest
+from urllib.request import urlopen
+
+import httpx
+import pytest
+
+from localpaths import repo_root
+
+wptserve = pytest.importorskip("wptserve")
+
+logging.basicConfig()
+
+here = os.path.dirname(__file__)
+doc_root = os.path.join(here, "docroot")
+
+
+class Request(BaseRequest):
+ def __init__(self, *args, **kwargs):
+ BaseRequest.__init__(self, *args, **kwargs)
+ self.method = "GET"
+
+ def get_method(self):
+ return self.method
+
+ def add_data(self, data):
+ if hasattr(data, "items"):
+ data = urlencode(data).encode("ascii")
+
+ assert isinstance(data, bytes)
+
+ if hasattr(BaseRequest, "add_data"):
+ BaseRequest.add_data(self, data)
+ else:
+ self.data = data
+
+ self.add_header("Content-Length", str(len(data)))
+
+
+class TestUsingServer(unittest.TestCase):
+ def setUp(self):
+ self.server = wptserve.server.WebTestHttpd(host="localhost",
+ port=0,
+ use_ssl=False,
+ certificate=None,
+ doc_root=doc_root)
+ self.server.start()
+
+ def tearDown(self):
+ self.server.stop()
+
+ def abs_url(self, path, query=None):
+ return urlunsplit(("http", "%s:%i" % (self.server.host, self.server.port), path, query, None))
+
+ def request(self, path, query=None, method="GET", headers=None, body=None, auth=None):
+ req = Request(self.abs_url(path, query))
+ req.method = method
+ if headers is None:
+ headers = {}
+
+ for name, value in headers.items():
+ req.add_header(name, value)
+
+ if body is not None:
+ req.add_data(body)
+
+ if auth is not None:
+ req.add_header("Authorization", b"Basic %s" % base64.b64encode(b"%s:%s" % auth))
+
+ return urlopen(req)
+
+ def assert_multiple_headers(self, resp, name, values):
+ assert resp.info().get_all(name) == values
+
+
+@pytest.mark.skipif(not wptserve.utils.http2_compatible(), reason="h2 server requires OpenSSL 1.0.2+")
+class TestUsingH2Server:
+ def setup_method(self, test_method):
+ self.server = wptserve.server.WebTestHttpd(host="localhost",
+ port=0,
+ use_ssl=True,
+ doc_root=doc_root,
+ key_file=os.path.join(repo_root, "tools", "certs", "web-platform.test.key"),
+ certificate=os.path.join(repo_root, "tools", "certs", "web-platform.test.pem"),
+ handler_cls=wptserve.server.Http2WebTestRequestHandler,
+ http2=True)
+ self.server.start()
+
+ self.client = httpx.Client(base_url=f'https://{self.server.host}:{self.server.port}',
+ http2=True, verify=False)
+
+ def teardown_method(self, test_method):
+ self.server.stop()
+
+
+class TestWrapperHandlerUsingServer(TestUsingServer):
+ '''For a wrapper handler, a .js dummy testing file is requried to render
+ the html file. This class extends the TestUsingServer and do some some
+ extra work: it tries to generate the dummy .js file in setUp and
+ remove it in tearDown.'''
+ dummy_files = {}
+
+ def gen_file(self, filename, empty=True, content=b''):
+ self.remove_file(filename)
+
+ with open(filename, 'wb') as fp:
+ if not empty:
+ fp.write(content)
+
+ def remove_file(self, filename):
+ if os.path.exists(filename):
+ os.remove(filename)
+
+ def setUp(self):
+ super().setUp()
+
+ for filename, content in self.dummy_files.items():
+ filepath = os.path.join(doc_root, filename)
+ if content == '':
+ self.gen_file(filepath)
+ else:
+ self.gen_file(filepath, False, content)
+
+ def run_wrapper_test(self, req_file, content_type, wrapper_handler,
+ headers=None):
+ route = ('GET', req_file, wrapper_handler())
+ self.server.router.register(*route)
+
+ resp = self.request(route[1])
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual(content_type, resp.info()['Content-Type'])
+ for key, val in headers or []:
+ self.assertEqual(val, resp.info()[key])
+
+ with open(os.path.join(doc_root, req_file), 'rb') as fp:
+ self.assertEqual(fp.read(), resp.read())
+
+ def tearDown(self):
+ super().tearDown()
+
+ for filename, _ in self.dummy_files.items():
+ filepath = os.path.join(doc_root, filename)
+ self.remove_file(filepath)
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/__init__.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/__init__.py
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/bar.any.worker.js b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/bar.any.worker.js
new file mode 100644
index 0000000000..baecd2ac54
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/bar.any.worker.js
@@ -0,0 +1,10 @@
+
+self.GLOBAL = {
+ isWindow: function() { return false; },
+ isWorker: function() { return true; },
+ isShadowRealm: function() { return false; },
+};
+importScripts("/resources/testharness.js");
+
+importScripts("/bar.any.js");
+done();
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/document.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/document.txt
new file mode 100644
index 0000000000..611dccd844
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/document.txt
@@ -0,0 +1 @@
+This is a test document
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.html b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.html
new file mode 100644
index 0000000000..8d64adc136
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.html
@@ -0,0 +1,15 @@
+<!doctype html>
+<meta charset=utf-8>
+
+<script>
+self.GLOBAL = {
+ isWindow: function() { return true; },
+ isWorker: function() { return false; },
+ isShadowRealm: function() { return false; },
+};
+</script>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+
+<div id=log></div>
+<script src="/foo.any.js"></script>
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.serviceworker.html b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.serviceworker.html
new file mode 100644
index 0000000000..8dcb11a376
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.serviceworker.html
@@ -0,0 +1,15 @@
+<!doctype html>
+<meta charset=utf-8>
+
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id=log></div>
+<script>
+(async function() {
+ const scope = 'does/not/exist';
+ let reg = await navigator.serviceWorker.getRegistration(scope);
+ if (reg) await reg.unregister();
+ reg = await navigator.serviceWorker.register("/foo.any.worker.js", {scope});
+ fetch_tests_from_worker(reg.installing);
+})();
+</script>
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.sharedworker.html b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.sharedworker.html
new file mode 100644
index 0000000000..277101697f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.sharedworker.html
@@ -0,0 +1,9 @@
+<!doctype html>
+<meta charset=utf-8>
+
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id=log></div>
+<script>
+fetch_tests_from_worker(new SharedWorker("/foo.any.worker.js"));
+</script>
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.worker.html b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.worker.html
new file mode 100644
index 0000000000..f77edd971a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.any.worker.html
@@ -0,0 +1,9 @@
+<!doctype html>
+<meta charset=utf-8>
+
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id=log></div>
+<script>
+fetch_tests_from_worker(new Worker("/foo.any.worker.js"));
+</script>
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.window.html b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.window.html
new file mode 100644
index 0000000000..04c694ddf2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.window.html
@@ -0,0 +1,8 @@
+<!doctype html>
+<meta charset=utf-8>
+
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+
+<div id=log></div>
+<script src="/foo.window.js"></script>
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.worker.html b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.worker.html
new file mode 100644
index 0000000000..3eddf36f1c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/foo.worker.html
@@ -0,0 +1,9 @@
+<!doctype html>
+<meta charset=utf-8>
+
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+<div id=log></div>
+<script>
+fetch_tests_from_worker(new Worker("/foo.worker.js"));
+</script>
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/invalid.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/invalid.py
new file mode 100644
index 0000000000..99f7b72cee
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/invalid.py
@@ -0,0 +1,3 @@
+# Intentional syntax error in this file
+def main(request, response:
+ return "FAIL"
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/no_main.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/no_main.py
new file mode 100644
index 0000000000..cee379fe1d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/no_main.py
@@ -0,0 +1,3 @@
+# Oops...
+def mian(request, response):
+ return "FAIL"
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub.sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub.sub.txt
new file mode 100644
index 0000000000..4302db16a2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub.sub.txt
@@ -0,0 +1 @@
+{{host}} {{domains[]}} {{ports[http][0]}}
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub.txt
new file mode 100644
index 0000000000..4302db16a2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub.txt
@@ -0,0 +1 @@
+{{host}} {{domains[]}} {{ports[http][0]}}
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash.sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash.sub.txt
new file mode 100644
index 0000000000..369ac8ab31
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash.sub.txt
@@ -0,0 +1,6 @@
+md5: {{file_hash(md5, sub_file_hash_subject.txt)}}
+sha1: {{file_hash(sha1, sub_file_hash_subject.txt)}}
+sha224: {{file_hash(sha224, sub_file_hash_subject.txt)}}
+sha256: {{file_hash(sha256, sub_file_hash_subject.txt)}}
+sha384: {{file_hash(sha384, sub_file_hash_subject.txt)}}
+sha512: {{file_hash(sha512, sub_file_hash_subject.txt)}}
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash_subject.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash_subject.txt
new file mode 100644
index 0000000000..d567d28e8a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash_subject.txt
@@ -0,0 +1,2 @@
+This file is used to verify expected behavior of the `file_hash` "sub"
+function.
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash_unrecognized.sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash_unrecognized.sub.txt
new file mode 100644
index 0000000000..5f1281df5b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_file_hash_unrecognized.sub.txt
@@ -0,0 +1 @@
+{{file_hash(sha007, sub_file_hash_subject.txt)}}
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_header_or_default.sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_header_or_default.sub.txt
new file mode 100644
index 0000000000..f1f941aa16
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_header_or_default.sub.txt
@@ -0,0 +1,2 @@
+{{header_or_default(X-Present, present-default)}}
+{{header_or_default(X-Absent, absent-default)}}
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_headers.sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_headers.sub.txt
new file mode 100644
index 0000000000..ee021eb863
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_headers.sub.txt
@@ -0,0 +1 @@
+{{headers[X-Test]}}
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_headers.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_headers.txt
new file mode 100644
index 0000000000..ee021eb863
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_headers.txt
@@ -0,0 +1 @@
+{{headers[X-Test]}}
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_location.sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_location.sub.txt
new file mode 100644
index 0000000000..6129abd4db
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_location.sub.txt
@@ -0,0 +1,8 @@
+host: {{location[host]}}
+hostname: {{location[hostname]}}
+path: {{location[path]}}
+pathname: {{location[pathname]}}
+port: {{location[port]}}
+query: {{location[query]}}
+scheme: {{location[scheme]}}
+server: {{location[server]}}
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_params.sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_params.sub.txt
new file mode 100644
index 0000000000..4431c21fc5
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_params.sub.txt
@@ -0,0 +1 @@
+{{GET[plus pct-20 pct-3D=]}}
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_params.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_params.txt
new file mode 100644
index 0000000000..4431c21fc5
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_params.txt
@@ -0,0 +1 @@
+{{GET[plus pct-20 pct-3D=]}}
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_url_base.sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_url_base.sub.txt
new file mode 100644
index 0000000000..889cd07fe9
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_url_base.sub.txt
@@ -0,0 +1 @@
+Before {{url_base}} After
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_uuid.sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_uuid.sub.txt
new file mode 100644
index 0000000000..fd968fecf0
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_uuid.sub.txt
@@ -0,0 +1 @@
+Before {{uuid()}} After
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_var.sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_var.sub.txt
new file mode 100644
index 0000000000..9492ec15a6
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/sub_var.sub.txt
@@ -0,0 +1 @@
+{{$first:host}} {{$second:ports[http][0]}} A {{$second}} B {{$first}} C
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/__init__.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/__init__.py
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/example_module.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/example_module.py
new file mode 100644
index 0000000000..b8e5c350ae
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/example_module.py
@@ -0,0 +1,2 @@
+def module_function():
+ return [("Content-Type", "text/plain")], "PASS"
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/file.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/file.txt
new file mode 100644
index 0000000000..06d84d30d5
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/file.txt
@@ -0,0 +1 @@
+I am here to ensure that my containing directory exists.
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/import_handler.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/import_handler.py
new file mode 100644
index 0000000000..e63395e273
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/import_handler.py
@@ -0,0 +1,5 @@
+from subdir import example_module
+
+
+def main(request, response):
+ return example_module.module_function()
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/sub_path.sub.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/sub_path.sub.txt
new file mode 100644
index 0000000000..44027f2855
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/subdir/sub_path.sub.txt
@@ -0,0 +1,3 @@
+{{fs_path(sub_path.sub.txt)}}
+{{fs_path(../sub_path.sub.txt)}}
+{{fs_path(/sub_path.sub.txt)}}
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test.asis b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test.asis
new file mode 100644
index 0000000000..b05ba7da80
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test.asis
@@ -0,0 +1,5 @@
+HTTP/1.1 202 Giraffe
+X-TEST: PASS
+Content-Length: 7
+
+Content \ No newline at end of file
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_data.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_data.py
new file mode 100644
index 0000000000..9770a5a8aa
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_data.py
@@ -0,0 +1,2 @@
+def handle_data(frame, request, response):
+ response.content.append(frame.data.swapcase())
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_headers.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_headers.py
new file mode 100644
index 0000000000..60e72d9492
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_headers.py
@@ -0,0 +1,3 @@
+def handle_headers(frame, request, response):
+ response.status = 203
+ response.headers.update([('test', 'passed')])
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_headers_data.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_headers_data.py
new file mode 100644
index 0000000000..32855093e1
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_h2_headers_data.py
@@ -0,0 +1,6 @@
+def handle_headers(frame, request, response):
+ response.status = 203
+ response.headers.update([('test', 'passed')])
+
+def handle_data(frame, request, response):
+ response.content.append(frame.data.swapcase())
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_string.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_string.py
new file mode 100644
index 0000000000..8fa605bb18
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_string.py
@@ -0,0 +1,3 @@
+def main(request, response):
+ response.headers.set("Content-Type", "text/plain")
+ return "PASS"
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_tuple_2.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_tuple_2.py
new file mode 100644
index 0000000000..fa791fbddd
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_tuple_2.py
@@ -0,0 +1,2 @@
+def main(request, response):
+ return [("Content-Type", "text/html"), ("X-Test", "PASS")], "PASS"
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_tuple_3.py b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_tuple_3.py
new file mode 100644
index 0000000000..2c2656d047
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/test_tuple_3.py
@@ -0,0 +1,2 @@
+def main(request, response):
+ return (202, "Giraffe"), [("Content-Type", "text/html"), ("X-Test", "PASS")], "PASS"
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/with_headers.txt b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/with_headers.txt
new file mode 100644
index 0000000000..45ce1a0790
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/with_headers.txt
@@ -0,0 +1 @@
+Test document with custom headers
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/with_headers.txt.sub.headers b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/with_headers.txt.sub.headers
new file mode 100644
index 0000000000..71494fccf1
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/docroot/with_headers.txt.sub.headers
@@ -0,0 +1,6 @@
+Custom-Header: PASS
+Another-Header: {{$id:uuid()}}
+Same-Value-Header: {{$id}}
+Double-Header: PA
+Double-Header: SS
+Content-Type: text/html
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/test_cookies.py b/testing/web-platform/tests/tools/wptserve/tests/functional/test_cookies.py
new file mode 100644
index 0000000000..64eab2d806
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/test_cookies.py
@@ -0,0 +1,66 @@
+import unittest
+
+import pytest
+
+wptserve = pytest.importorskip("wptserve")
+from .base import TestUsingServer
+
+
+class TestResponseSetCookie(TestUsingServer):
+ def test_name_value(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.set_cookie(b"name", b"value")
+ return "Test"
+
+ route = ("GET", "/test/name_value", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+
+ self.assertEqual(resp.info()["Set-Cookie"], "name=value; Path=/")
+
+ def test_unset(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.set_cookie(b"name", b"value")
+ response.unset_cookie(b"name")
+ return "Test"
+
+ route = ("GET", "/test/unset", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+
+ self.assertTrue("Set-Cookie" not in resp.info())
+
+ def test_delete(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.delete_cookie(b"name")
+ return "Test"
+
+ route = ("GET", "/test/delete", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+
+ parts = dict(item.split("=") for
+ item in resp.info()["Set-Cookie"].split("; ") if item)
+
+ self.assertEqual(parts["name"], "")
+ self.assertEqual(parts["Path"], "/")
+ # TODO: Should also check that expires is in the past
+
+
+class TestRequestCookies(TestUsingServer):
+ def test_set_cookie(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return request.cookies[b"name"].value
+
+ route = ("GET", "/test/set_cookie", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1], headers={"Cookie": "name=value"})
+ self.assertEqual(resp.read(), b"value")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/test_handlers.py b/testing/web-platform/tests/tools/wptserve/tests/functional/test_handlers.py
new file mode 100644
index 0000000000..26a9f797ec
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/test_handlers.py
@@ -0,0 +1,439 @@
+import json
+import os
+import sys
+import unittest
+import uuid
+
+import pytest
+from urllib.error import HTTPError
+
+wptserve = pytest.importorskip("wptserve")
+from .base import TestUsingServer, TestUsingH2Server, doc_root
+from .base import TestWrapperHandlerUsingServer
+
+from serve import serve
+
+
+class TestFileHandler(TestUsingServer):
+ def test_GET(self):
+ resp = self.request("/document.txt")
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual("text/plain", resp.info()["Content-Type"])
+ self.assertEqual(open(os.path.join(doc_root, "document.txt"), 'rb').read(), resp.read())
+
+ def test_headers(self):
+ resp = self.request("/with_headers.txt")
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual("text/html", resp.info()["Content-Type"])
+ self.assertEqual("PASS", resp.info()["Custom-Header"])
+ # This will fail if it isn't a valid uuid
+ uuid.UUID(resp.info()["Another-Header"])
+ self.assertEqual(resp.info()["Same-Value-Header"], resp.info()["Another-Header"])
+ self.assert_multiple_headers(resp, "Double-Header", ["PA", "SS"])
+
+
+ def test_range(self):
+ resp = self.request("/document.txt", headers={"Range":"bytes=10-19"})
+ self.assertEqual(206, resp.getcode())
+ data = resp.read()
+ expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
+ self.assertEqual(10, len(data))
+ self.assertEqual("bytes 10-19/%i" % len(expected), resp.info()['Content-Range'])
+ self.assertEqual("10", resp.info()['Content-Length'])
+ self.assertEqual(expected[10:20], data)
+
+ def test_range_no_end(self):
+ resp = self.request("/document.txt", headers={"Range":"bytes=10-"})
+ self.assertEqual(206, resp.getcode())
+ data = resp.read()
+ expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
+ self.assertEqual(len(expected) - 10, len(data))
+ self.assertEqual("bytes 10-%i/%i" % (len(expected) - 1, len(expected)), resp.info()['Content-Range'])
+ self.assertEqual(expected[10:], data)
+
+ def test_range_no_start(self):
+ resp = self.request("/document.txt", headers={"Range":"bytes=-10"})
+ self.assertEqual(206, resp.getcode())
+ data = resp.read()
+ expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
+ self.assertEqual(10, len(data))
+ self.assertEqual("bytes %i-%i/%i" % (len(expected) - 10, len(expected) - 1, len(expected)),
+ resp.info()['Content-Range'])
+ self.assertEqual(expected[-10:], data)
+
+ def test_multiple_ranges(self):
+ resp = self.request("/document.txt", headers={"Range":"bytes=1-2,5-7,6-10"})
+ self.assertEqual(206, resp.getcode())
+ data = resp.read()
+ expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
+ self.assertTrue(resp.info()["Content-Type"].startswith("multipart/byteranges; boundary="))
+ boundary = resp.info()["Content-Type"].split("boundary=")[1]
+ parts = data.split(b"--" + boundary.encode("ascii"))
+ self.assertEqual(b"\r\n", parts[0])
+ self.assertEqual(b"--", parts[-1])
+ expected_parts = [(b"1-2", expected[1:3]), (b"5-10", expected[5:11])]
+ for expected_part, part in zip(expected_parts, parts[1:-1]):
+ header_string, body = part.split(b"\r\n\r\n")
+ headers = dict(item.split(b": ", 1) for item in header_string.split(b"\r\n") if item.strip())
+ self.assertEqual(headers[b"Content-Type"], b"text/plain")
+ self.assertEqual(headers[b"Content-Range"], b"bytes %s/%i" % (expected_part[0], len(expected)))
+ self.assertEqual(expected_part[1] + b"\r\n", body)
+
+ def test_range_invalid(self):
+ with self.assertRaises(HTTPError) as cm:
+ self.request("/document.txt", headers={"Range":"bytes=11-10"})
+ self.assertEqual(cm.exception.code, 416)
+
+ expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
+ with self.assertRaises(HTTPError) as cm:
+ self.request("/document.txt", headers={"Range":"bytes=%i-%i" % (len(expected), len(expected) + 10)})
+ self.assertEqual(cm.exception.code, 416)
+
+ def test_sub_config(self):
+ resp = self.request("/sub.sub.txt")
+ expected = b"localhost localhost %i" % self.server.port
+ assert resp.read().rstrip() == expected
+
+ def test_sub_headers(self):
+ resp = self.request("/sub_headers.sub.txt", headers={"X-Test": "PASS"})
+ expected = b"PASS"
+ assert resp.read().rstrip() == expected
+
+ def test_sub_params(self):
+ resp = self.request("/sub_params.txt", query="plus+pct-20%20pct-3D%3D=PLUS+PCT-20%20PCT-3D%3D&pipe=sub")
+ expected = b"PLUS PCT-20 PCT-3D="
+ assert resp.read().rstrip() == expected
+
+
+class TestFunctionHandler(TestUsingServer):
+ def test_string_rv(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return "test data"
+
+ route = ("GET", "/test/test_string_rv", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual("9", resp.info()["Content-Length"])
+ self.assertEqual(b"test data", resp.read())
+
+ def test_tuple_1_rv(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return ()
+
+ route = ("GET", "/test/test_tuple_1_rv", handler)
+ self.server.router.register(*route)
+
+ with pytest.raises(HTTPError) as cm:
+ self.request(route[1])
+
+ assert cm.value.code == 500
+
+ def test_tuple_2_rv(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return [("Content-Length", 4), ("test-header", "test-value")], "test data"
+
+ route = ("GET", "/test/test_tuple_2_rv", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual("4", resp.info()["Content-Length"])
+ self.assertEqual("test-value", resp.info()["test-header"])
+ self.assertEqual(b"test", resp.read())
+
+ def test_tuple_3_rv(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return 202, [("test-header", "test-value")], "test data"
+
+ route = ("GET", "/test/test_tuple_3_rv", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ self.assertEqual(202, resp.getcode())
+ self.assertEqual("test-value", resp.info()["test-header"])
+ self.assertEqual(b"test data", resp.read())
+
+ def test_tuple_3_rv_1(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return (202, "Some Status"), [("test-header", "test-value")], "test data"
+
+ route = ("GET", "/test/test_tuple_3_rv_1", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ self.assertEqual(202, resp.getcode())
+ self.assertEqual("Some Status", resp.msg)
+ self.assertEqual("test-value", resp.info()["test-header"])
+ self.assertEqual(b"test data", resp.read())
+
+ def test_tuple_4_rv(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return 202, [("test-header", "test-value")], "test data", "garbage"
+
+ route = ("GET", "/test/test_tuple_1_rv", handler)
+ self.server.router.register(*route)
+
+ with pytest.raises(HTTPError) as cm:
+ self.request(route[1])
+
+ assert cm.value.code == 500
+
+ def test_none_rv(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return None
+
+ route = ("GET", "/test/test_none_rv", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ assert resp.getcode() == 200
+ assert "Content-Length" not in resp.info()
+ assert resp.read() == b""
+
+
+class TestJSONHandler(TestUsingServer):
+ def test_json_0(self):
+ @wptserve.handlers.json_handler
+ def handler(request, response):
+ return {"data": "test data"}
+
+ route = ("GET", "/test/test_json_0", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual({"data": "test data"}, json.load(resp))
+
+ def test_json_tuple_2(self):
+ @wptserve.handlers.json_handler
+ def handler(request, response):
+ return [("Test-Header", "test-value")], {"data": "test data"}
+
+ route = ("GET", "/test/test_json_tuple_2", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual("test-value", resp.info()["test-header"])
+ self.assertEqual({"data": "test data"}, json.load(resp))
+
+ def test_json_tuple_3(self):
+ @wptserve.handlers.json_handler
+ def handler(request, response):
+ return (202, "Giraffe"), [("Test-Header", "test-value")], {"data": "test data"}
+
+ route = ("GET", "/test/test_json_tuple_2", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ self.assertEqual(202, resp.getcode())
+ self.assertEqual("Giraffe", resp.msg)
+ self.assertEqual("test-value", resp.info()["test-header"])
+ self.assertEqual({"data": "test data"}, json.load(resp))
+
+
+class TestPythonHandler(TestUsingServer):
+ def test_string(self):
+ resp = self.request("/test_string.py")
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual("text/plain", resp.info()["Content-Type"])
+ self.assertEqual(b"PASS", resp.read())
+
+ def test_tuple_2(self):
+ resp = self.request("/test_tuple_2.py")
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual("text/html", resp.info()["Content-Type"])
+ self.assertEqual("PASS", resp.info()["X-Test"])
+ self.assertEqual(b"PASS", resp.read())
+
+ def test_tuple_3(self):
+ resp = self.request("/test_tuple_3.py")
+ self.assertEqual(202, resp.getcode())
+ self.assertEqual("Giraffe", resp.msg)
+ self.assertEqual("text/html", resp.info()["Content-Type"])
+ self.assertEqual("PASS", resp.info()["X-Test"])
+ self.assertEqual(b"PASS", resp.read())
+
+ def test_import(self):
+ dir_name = os.path.join(doc_root, "subdir")
+ assert dir_name not in sys.path
+ assert "test_module" not in sys.modules
+ resp = self.request("/subdir/import_handler.py")
+ assert dir_name not in sys.path
+ assert "test_module" not in sys.modules
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual("text/plain", resp.info()["Content-Type"])
+ self.assertEqual(b"PASS", resp.read())
+
+ def test_no_main(self):
+ with pytest.raises(HTTPError) as cm:
+ self.request("/no_main.py")
+
+ assert cm.value.code == 500
+
+ def test_invalid(self):
+ with pytest.raises(HTTPError) as cm:
+ self.request("/invalid.py")
+
+ assert cm.value.code == 500
+
+ def test_missing(self):
+ with pytest.raises(HTTPError) as cm:
+ self.request("/missing.py")
+
+ assert cm.value.code == 404
+
+
+class TestDirectoryHandler(TestUsingServer):
+ def test_directory(self):
+ resp = self.request("/")
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual("text/html", resp.info()["Content-Type"])
+ #Add a check that the response is actually sane
+
+ def test_subdirectory_trailing_slash(self):
+ resp = self.request("/subdir/")
+ assert resp.getcode() == 200
+ assert resp.info()["Content-Type"] == "text/html"
+
+ def test_subdirectory_no_trailing_slash(self):
+ # This seems to resolve the 301 transparently, so test for 200
+ resp = self.request("/subdir")
+ assert resp.getcode() == 200
+ assert resp.info()["Content-Type"] == "text/html"
+
+
+class TestAsIsHandler(TestUsingServer):
+ def test_as_is(self):
+ resp = self.request("/test.asis")
+ self.assertEqual(202, resp.getcode())
+ self.assertEqual("Giraffe", resp.msg)
+ self.assertEqual("PASS", resp.info()["X-Test"])
+ self.assertEqual(b"Content", resp.read())
+ #Add a check that the response is actually sane
+
+
+class TestH2Handler(TestUsingH2Server):
+ def test_handle_headers(self):
+ resp = self.client.get('/test_h2_headers.py')
+
+ assert resp.status_code == 203
+ assert resp.headers['test'] == 'passed'
+ assert resp.content == b''
+
+ def test_only_main(self):
+ resp = self.client.get('/test_tuple_3.py')
+
+ assert resp.status_code == 202
+ assert resp.headers['Content-Type'] == 'text/html'
+ assert resp.headers['X-Test'] == 'PASS'
+ assert resp.content == b'PASS'
+
+ def test_handle_data(self):
+ resp = self.client.post('/test_h2_data.py', content=b'hello world!')
+
+ assert resp.status_code == 200
+ assert resp.content == b'HELLO WORLD!'
+
+ def test_handle_headers_data(self):
+ resp = self.client.post('/test_h2_headers_data.py', content=b'hello world!')
+
+ assert resp.status_code == 203
+ assert resp.headers['test'] == 'passed'
+ assert resp.content == b'HELLO WORLD!'
+
+ def test_no_main_or_handlers(self):
+ resp = self.client.get('/no_main.py')
+
+ assert resp.status_code == 500
+ assert "No main function or handlers in script " in json.loads(resp.content)["error"]["message"]
+
+ def test_not_found(self):
+ resp = self.client.get('/no_exist.py')
+
+ assert resp.status_code == 404
+
+ def test_requesting_multiple_resources(self):
+ # 1st .py resource
+ resp = self.client.get('/test_h2_headers.py')
+
+ assert resp.status_code == 203
+ assert resp.headers['test'] == 'passed'
+ assert resp.content == b''
+
+ # 2nd .py resource
+ resp = self.client.get('/test_tuple_3.py')
+
+ assert resp.status_code == 202
+ assert resp.headers['Content-Type'] == 'text/html'
+ assert resp.headers['X-Test'] == 'PASS'
+ assert resp.content == b'PASS'
+
+ # 3rd .py resource
+ resp = self.client.get('/test_h2_headers.py')
+
+ assert resp.status_code == 203
+ assert resp.headers['test'] == 'passed'
+ assert resp.content == b''
+
+
+class TestWorkersHandler(TestWrapperHandlerUsingServer):
+ dummy_files = {'foo.worker.js': b'',
+ 'foo.any.js': b''}
+
+ def test_any_worker_html(self):
+ self.run_wrapper_test('foo.any.worker.html',
+ 'text/html', serve.WorkersHandler)
+
+ def test_worker_html(self):
+ self.run_wrapper_test('foo.worker.html',
+ 'text/html', serve.WorkersHandler)
+
+
+class TestWindowHandler(TestWrapperHandlerUsingServer):
+ dummy_files = {'foo.window.js': b''}
+
+ def test_window_html(self):
+ self.run_wrapper_test('foo.window.html',
+ 'text/html', serve.WindowHandler)
+
+
+class TestAnyHtmlHandler(TestWrapperHandlerUsingServer):
+ dummy_files = {'foo.any.js': b'',
+ 'foo.any.js.headers': b'X-Foo: 1',
+ '__dir__.headers': b'X-Bar: 2'}
+
+ def test_any_html(self):
+ self.run_wrapper_test('foo.any.html',
+ 'text/html',
+ serve.AnyHtmlHandler,
+ headers=[('X-Foo', '1'), ('X-Bar', '2')])
+
+
+class TestSharedWorkersHandler(TestWrapperHandlerUsingServer):
+ dummy_files = {'foo.any.js': b'// META: global=sharedworker\n'}
+
+ def test_any_sharedworkers_html(self):
+ self.run_wrapper_test('foo.any.sharedworker.html',
+ 'text/html', serve.SharedWorkersHandler)
+
+
+class TestServiceWorkersHandler(TestWrapperHandlerUsingServer):
+ dummy_files = {'foo.any.js': b'// META: global=serviceworker\n'}
+
+ def test_serviceworker_html(self):
+ self.run_wrapper_test('foo.any.serviceworker.html',
+ 'text/html', serve.ServiceWorkersHandler)
+
+
+class TestClassicWorkerHandler(TestWrapperHandlerUsingServer):
+ dummy_files = {'bar.any.js': b''}
+
+ def test_any_work_js(self):
+ self.run_wrapper_test('bar.any.worker.js', 'text/javascript',
+ serve.ClassicWorkerHandler)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/test_input_file.py b/testing/web-platform/tests/tools/wptserve/tests/functional/test_input_file.py
new file mode 100644
index 0000000000..93db62c842
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/test_input_file.py
@@ -0,0 +1,149 @@
+from io import BytesIO
+
+import pytest
+
+from wptserve.request import InputFile
+
+bstr = b'This is a test document\nWith new lines\nSeveral in fact...'
+rfile = ''
+test_file = '' # This will be used to test the InputFile functions against
+input_file = InputFile(None, 0)
+
+
+def setup_function(function):
+ global rfile, input_file, test_file
+ rfile = BytesIO(bstr)
+ test_file = BytesIO(bstr)
+ input_file = InputFile(rfile, len(bstr))
+
+
+def teardown_function(function):
+ rfile.close()
+ test_file.close()
+
+
+def test_seek():
+ input_file.seek(2)
+ test_file.seek(2)
+ assert input_file.read(1) == test_file.read(1)
+
+ input_file.seek(4)
+ test_file.seek(4)
+ assert input_file.read(1) == test_file.read(1)
+
+
+def test_seek_backwards():
+ input_file.seek(2)
+ test_file.seek(2)
+ assert input_file.tell() == test_file.tell()
+ assert input_file.read(1) == test_file.read(1)
+ assert input_file.tell() == test_file.tell()
+
+ input_file.seek(0)
+ test_file.seek(0)
+ assert input_file.read(1) == test_file.read(1)
+
+
+def test_seek_negative_offset():
+ with pytest.raises(ValueError):
+ input_file.seek(-1)
+
+
+def test_seek_file_bigger_than_buffer():
+ old_max_buf = InputFile.max_buffer_size
+ InputFile.max_buffer_size = 10
+
+ try:
+ input_file = InputFile(rfile, len(bstr))
+
+ input_file.seek(2)
+ test_file.seek(2)
+ assert input_file.read(1) == test_file.read(1)
+
+ input_file.seek(4)
+ test_file.seek(4)
+ assert input_file.read(1) == test_file.read(1)
+ finally:
+ InputFile.max_buffer_size = old_max_buf
+
+
+def test_read():
+ assert input_file.read() == test_file.read()
+
+
+def test_read_file_bigger_than_buffer():
+ old_max_buf = InputFile.max_buffer_size
+ InputFile.max_buffer_size = 10
+
+ try:
+ input_file = InputFile(rfile, len(bstr))
+ assert input_file.read() == test_file.read()
+ finally:
+ InputFile.max_buffer_size = old_max_buf
+
+
+def test_readline():
+ assert input_file.readline() == test_file.readline()
+ assert input_file.readline() == test_file.readline()
+
+ input_file.seek(0)
+ test_file.seek(0)
+ assert input_file.readline() == test_file.readline()
+
+
+def test_readline_max_byte():
+ line = test_file.readline()
+ assert input_file.readline(max_bytes=len(line)//2) == line[:len(line)//2]
+ assert input_file.readline(max_bytes=len(line)) == line[len(line)//2:]
+
+
+def test_readline_max_byte_longer_than_file():
+ assert input_file.readline(max_bytes=1000) == test_file.readline()
+ assert input_file.readline(max_bytes=1000) == test_file.readline()
+
+
+def test_readline_file_bigger_than_buffer():
+ old_max_buf = InputFile.max_buffer_size
+ InputFile.max_buffer_size = 10
+
+ try:
+ input_file = InputFile(rfile, len(bstr))
+
+ assert input_file.readline() == test_file.readline()
+ assert input_file.readline() == test_file.readline()
+ finally:
+ InputFile.max_buffer_size = old_max_buf
+
+
+def test_readlines():
+ assert input_file.readlines() == test_file.readlines()
+
+
+def test_readlines_file_bigger_than_buffer():
+ old_max_buf = InputFile.max_buffer_size
+ InputFile.max_buffer_size = 10
+
+ try:
+ input_file = InputFile(rfile, len(bstr))
+
+ assert input_file.readlines() == test_file.readlines()
+ finally:
+ InputFile.max_buffer_size = old_max_buf
+
+
+def test_iter():
+ for a, b in zip(input_file, test_file):
+ assert a == b
+
+
+def test_iter_file_bigger_than_buffer():
+ old_max_buf = InputFile.max_buffer_size
+ InputFile.max_buffer_size = 10
+
+ try:
+ input_file = InputFile(rfile, len(bstr))
+
+ for a, b in zip(input_file, test_file):
+ assert a == b
+ finally:
+ InputFile.max_buffer_size = old_max_buf
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/test_pipes.py b/testing/web-platform/tests/tools/wptserve/tests/functional/test_pipes.py
new file mode 100644
index 0000000000..904bfb4ee4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/test_pipes.py
@@ -0,0 +1,233 @@
+import os
+import unittest
+import time
+import json
+import urllib
+
+import pytest
+
+wptserve = pytest.importorskip("wptserve")
+from .base import TestUsingServer, doc_root
+
+
+class TestStatus(TestUsingServer):
+ def test_status(self):
+ resp = self.request("/document.txt", query="pipe=status(202)")
+ self.assertEqual(resp.getcode(), 202)
+
+class TestHeader(TestUsingServer):
+ def test_not_set(self):
+ resp = self.request("/document.txt", query="pipe=header(X-TEST,PASS)")
+ self.assertEqual(resp.info()["X-TEST"], "PASS")
+
+ def test_set(self):
+ resp = self.request("/document.txt", query="pipe=header(Content-Type,text/html)")
+ self.assertEqual(resp.info()["Content-Type"], "text/html")
+
+ def test_multiple(self):
+ resp = self.request("/document.txt", query="pipe=header(X-Test,PASS)|header(Content-Type,text/html)")
+ self.assertEqual(resp.info()["X-TEST"], "PASS")
+ self.assertEqual(resp.info()["Content-Type"], "text/html")
+
+ def test_multiple_same(self):
+ resp = self.request("/document.txt", query="pipe=header(Content-Type,FAIL)|header(Content-Type,text/html)")
+ self.assertEqual(resp.info()["Content-Type"], "text/html")
+
+ def test_multiple_append(self):
+ resp = self.request("/document.txt", query="pipe=header(X-Test,1)|header(X-Test,2,True)")
+ self.assert_multiple_headers(resp, "X-Test", ["1", "2"])
+
+ def test_semicolon(self):
+ resp = self.request("/document.txt", query="pipe=header(Refresh,3;url=http://example.com)")
+ self.assertEqual(resp.info()["Refresh"], "3;url=http://example.com")
+
+ def test_escape_comma(self):
+ resp = self.request("/document.txt", query=r"pipe=header(Expires,Thu\,%2014%20Aug%201986%2018:00:00%20GMT)")
+ self.assertEqual(resp.info()["Expires"], "Thu, 14 Aug 1986 18:00:00 GMT")
+
+ def test_escape_parenthesis(self):
+ resp = self.request("/document.txt", query=r"pipe=header(User-Agent,Mozilla/5.0%20(X11;%20Linux%20x86_64;%20rv:12.0\)")
+ self.assertEqual(resp.info()["User-Agent"], "Mozilla/5.0 (X11; Linux x86_64; rv:12.0)")
+
+class TestSlice(TestUsingServer):
+ def test_both_bounds(self):
+ resp = self.request("/document.txt", query="pipe=slice(1,10)")
+ expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
+ self.assertEqual(resp.read(), expected[1:10])
+
+ def test_no_upper(self):
+ resp = self.request("/document.txt", query="pipe=slice(1)")
+ expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
+ self.assertEqual(resp.read(), expected[1:])
+
+ def test_no_lower(self):
+ resp = self.request("/document.txt", query="pipe=slice(null,10)")
+ expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
+ self.assertEqual(resp.read(), expected[:10])
+
+class TestSub(TestUsingServer):
+ def test_sub_config(self):
+ resp = self.request("/sub.txt", query="pipe=sub")
+ expected = b"localhost localhost %i" % self.server.port
+ self.assertEqual(resp.read().rstrip(), expected)
+
+ def test_sub_file_hash(self):
+ resp = self.request("/sub_file_hash.sub.txt")
+ expected = b"""
+md5: JmI1W8fMHfSfCarYOSxJcw==
+sha1: nqpWqEw4IW8NjD6R375gtrQvtTo=
+sha224: RqQ6fMmta6n9TuA/vgTZK2EqmidqnrwBAmQLRQ==
+sha256: G6Ljg1uPejQxqFmvFOcV/loqnjPTW5GSOePOfM/u0jw=
+sha384: lkXHChh1BXHN5nT5BYhi1x67E1CyYbPKRKoF2LTm5GivuEFpVVYtvEBHtPr74N9E
+sha512: r8eLGRTc7ZznZkFjeVLyo6/FyQdra9qmlYCwKKxm3kfQAswRS9+3HsYk3thLUhcFmmWhK4dXaICzJwGFonfXwg=="""
+ self.assertEqual(resp.read().rstrip(), expected.strip())
+
+ def test_sub_file_hash_unrecognized(self):
+ with self.assertRaises(urllib.error.HTTPError):
+ self.request("/sub_file_hash_unrecognized.sub.txt")
+
+ def test_sub_headers(self):
+ resp = self.request("/sub_headers.txt", query="pipe=sub", headers={"X-Test": "PASS"})
+ expected = b"PASS"
+ self.assertEqual(resp.read().rstrip(), expected)
+
+ def test_sub_location(self):
+ resp = self.request("/sub_location.sub.txt?query_string")
+ expected = """
+host: localhost:{0}
+hostname: localhost
+path: /sub_location.sub.txt
+pathname: /sub_location.sub.txt
+port: {0}
+query: ?query_string
+scheme: http
+server: http://localhost:{0}""".format(self.server.port).encode("ascii")
+ self.assertEqual(resp.read().rstrip(), expected.strip())
+
+ def test_sub_params(self):
+ resp = self.request("/sub_params.txt", query="plus+pct-20%20pct-3D%3D=PLUS+PCT-20%20PCT-3D%3D&pipe=sub")
+ expected = b"PLUS PCT-20 PCT-3D="
+ self.assertEqual(resp.read().rstrip(), expected)
+
+ def test_sub_url_base(self):
+ resp = self.request("/sub_url_base.sub.txt")
+ self.assertEqual(resp.read().rstrip(), b"Before / After")
+
+ def test_sub_url_base_via_filename_with_query(self):
+ resp = self.request("/sub_url_base.sub.txt?pipe=slice(5,10)")
+ self.assertEqual(resp.read().rstrip(), b"e / A")
+
+ def test_sub_uuid(self):
+ resp = self.request("/sub_uuid.sub.txt")
+ self.assertRegex(resp.read().rstrip(), b"Before [a-f0-9-]+ After")
+
+ def test_sub_var(self):
+ resp = self.request("/sub_var.sub.txt")
+ port = self.server.port
+ expected = b"localhost %d A %d B localhost C" % (port, port)
+ self.assertEqual(resp.read().rstrip(), expected)
+
+ def test_sub_fs_path(self):
+ resp = self.request("/subdir/sub_path.sub.txt")
+ root = os.path.abspath(doc_root)
+ expected = """%(root)s%(sep)ssubdir%(sep)ssub_path.sub.txt
+%(root)s%(sep)ssub_path.sub.txt
+%(root)s%(sep)ssub_path.sub.txt
+""" % {"root": root, "sep": os.path.sep}
+ self.assertEqual(resp.read(), expected.encode("utf8"))
+
+ def test_sub_header_or_default(self):
+ resp = self.request("/sub_header_or_default.sub.txt", headers={"X-Present": "OK"})
+ expected = b"OK\nabsent-default"
+ self.assertEqual(resp.read().rstrip(), expected)
+
+class TestTrickle(TestUsingServer):
+ def test_trickle(self):
+ #Actually testing that the response trickles in is not that easy
+ t0 = time.time()
+ resp = self.request("/document.txt", query="pipe=trickle(1:d2:5:d1:r2)")
+ t1 = time.time()
+ expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
+ self.assertEqual(resp.read(), expected)
+ self.assertGreater(6, t1-t0)
+
+ def test_headers(self):
+ resp = self.request("/document.txt", query="pipe=trickle(d0.01)")
+ self.assertEqual(resp.info()["Cache-Control"], "no-cache, no-store, must-revalidate")
+ self.assertEqual(resp.info()["Pragma"], "no-cache")
+ self.assertEqual(resp.info()["Expires"], "0")
+
+class TestPipesWithVariousHandlers(TestUsingServer):
+ def test_with_python_file_handler(self):
+ resp = self.request("/test_string.py", query="pipe=slice(null,2)")
+ self.assertEqual(resp.read(), b"PA")
+
+ def test_with_python_func_handler(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return "PASS"
+ route = ("GET", "/test/test_pipes_1/", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1], query="pipe=slice(null,2)")
+ self.assertEqual(resp.read(), b"PA")
+
+ def test_with_python_func_handler_using_response_writer(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.writer.write_content("PASS")
+ route = ("GET", "/test/test_pipes_1/", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1], query="pipe=slice(null,2)")
+ # slice has not been applied to the response, because response.writer was used.
+ self.assertEqual(resp.read(), b"PASS")
+
+ def test_header_pipe_with_python_func_using_response_writer(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.writer.write_content("CONTENT")
+ route = ("GET", "/test/test_pipes_1/", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1], query="pipe=header(X-TEST,FAIL)")
+ # header pipe was ignored, because response.writer was used.
+ self.assertFalse(resp.info().get("X-TEST"))
+ self.assertEqual(resp.read(), b"CONTENT")
+
+ def test_with_json_handler(self):
+ @wptserve.handlers.json_handler
+ def handler(request, response):
+ return json.dumps({'data': 'PASS'})
+ route = ("GET", "/test/test_pipes_2/", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1], query="pipe=slice(null,2)")
+ self.assertEqual(resp.read(), b'"{')
+
+ def test_slice_with_as_is_handler(self):
+ resp = self.request("/test.asis", query="pipe=slice(null,2)")
+ self.assertEqual(202, resp.getcode())
+ self.assertEqual("Giraffe", resp.msg)
+ self.assertEqual("PASS", resp.info()["X-Test"])
+ # slice has not been applied to the response, because response.writer was used.
+ self.assertEqual(b"Content", resp.read())
+
+ def test_headers_with_as_is_handler(self):
+ resp = self.request("/test.asis", query="pipe=header(X-TEST,FAIL)")
+ self.assertEqual(202, resp.getcode())
+ self.assertEqual("Giraffe", resp.msg)
+ # header pipe was ignored.
+ self.assertEqual("PASS", resp.info()["X-TEST"])
+ self.assertEqual(b"Content", resp.read())
+
+ def test_trickle_with_as_is_handler(self):
+ t0 = time.time()
+ resp = self.request("/test.asis", query="pipe=trickle(1:d2:5:d1:r2)")
+ t1 = time.time()
+ self.assertTrue(b'Content' in resp.read())
+ self.assertGreater(6, t1-t0)
+
+ def test_gzip_handler(self):
+ resp = self.request("/document.txt", query="pipe=gzip")
+ self.assertEqual(resp.getcode(), 200)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/test_request.py b/testing/web-platform/tests/tools/wptserve/tests/functional/test_request.py
new file mode 100644
index 0000000000..aa492f7437
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/test_request.py
@@ -0,0 +1,183 @@
+import pytest
+
+from urllib.parse import quote_from_bytes
+
+wptserve = pytest.importorskip("wptserve")
+from .base import TestUsingServer
+from wptserve.request import InputFile
+
+
+class TestInputFile(TestUsingServer):
+ def test_seek(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ rv = []
+ f = request.raw_input
+ f.seek(5)
+ rv.append(f.read(2))
+ rv.append(b"%d" % f.tell())
+ f.seek(0)
+ rv.append(f.readline())
+ rv.append(b"%d" % f.tell())
+ rv.append(f.read(-1))
+ rv.append(b"%d" % f.tell())
+ f.seek(0)
+ rv.append(f.read())
+ f.seek(0)
+ rv.extend(f.readlines())
+
+ return b" ".join(rv)
+
+ route = ("POST", "/test/test_seek", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1], method="POST", body=b"12345ab\ncdef")
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual([b"ab", b"7", b"12345ab\n", b"8", b"cdef", b"12",
+ b"12345ab\ncdef", b"12345ab\n", b"cdef"],
+ resp.read().split(b" "))
+
+ def test_seek_input_longer_than_buffer(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ rv = []
+ f = request.raw_input
+ f.seek(5)
+ rv.append(f.read(2))
+ rv.append(b"%d" % f.tell())
+ f.seek(0)
+ rv.append(b"%d" % f.tell())
+ rv.append(b"%d" % f.tell())
+ return b" ".join(rv)
+
+ route = ("POST", "/test/test_seek", handler)
+ self.server.router.register(*route)
+
+ old_max_buf = InputFile.max_buffer_size
+ InputFile.max_buffer_size = 10
+ try:
+ resp = self.request(route[1], method="POST", body=b"1"*20)
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual([b"11", b"7", b"0", b"0"],
+ resp.read().split(b" "))
+ finally:
+ InputFile.max_buffer_size = old_max_buf
+
+ def test_iter(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ f = request.raw_input
+ return b" ".join(line for line in f)
+
+ route = ("POST", "/test/test_iter", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1], method="POST", body=b"12345\nabcdef\r\nzyxwv")
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual([b"12345\n", b"abcdef\r\n", b"zyxwv"], resp.read().split(b" "))
+
+ def test_iter_input_longer_than_buffer(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ f = request.raw_input
+ return b" ".join(line for line in f)
+
+ route = ("POST", "/test/test_iter", handler)
+ self.server.router.register(*route)
+
+ old_max_buf = InputFile.max_buffer_size
+ InputFile.max_buffer_size = 10
+ try:
+ resp = self.request(route[1], method="POST", body=b"12345\nabcdef\r\nzyxwv")
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual([b"12345\n", b"abcdef\r\n", b"zyxwv"], resp.read().split(b" "))
+ finally:
+ InputFile.max_buffer_size = old_max_buf
+
+
+class TestRequest(TestUsingServer):
+ def test_body(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ request.raw_input.seek(5)
+ return request.body
+
+ route = ("POST", "/test/test_body", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1], method="POST", body=b"12345ab\ncdef")
+ self.assertEqual(b"12345ab\ncdef", resp.read())
+
+ def test_route_match(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return request.route_match["match"] + " " + request.route_match["*"]
+
+ route = ("GET", "/test/{match}_*", handler)
+ self.server.router.register(*route)
+ resp = self.request("/test/some_route")
+ self.assertEqual(b"some route", resp.read())
+
+ def test_non_ascii_in_headers(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return request.headers[b"foo"]
+
+ route = ("GET", "/test/test_unicode_in_headers", handler)
+ self.server.router.register(*route)
+
+ # Try some non-ASCII characters and the server shouldn't crash.
+ encoded_text = "你好".encode("utf-8")
+ resp = self.request(route[1], headers={"foo": encoded_text})
+ self.assertEqual(encoded_text, resp.read())
+
+ # Try a different encoding from utf-8 to make sure the binary value is
+ # returned in verbatim.
+ encoded_text = "ã©ã†ã‚‚".encode("shift-jis")
+ resp = self.request(route[1], headers={"foo": encoded_text})
+ self.assertEqual(encoded_text, resp.read())
+
+ def test_non_ascii_in_GET_params(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return request.GET[b"foo"]
+
+ route = ("GET", "/test/test_unicode_in_get", handler)
+ self.server.router.register(*route)
+
+ # We intentionally choose an encoding that's not the default UTF-8.
+ encoded_text = "ã©ã†ã‚‚".encode("shift-jis")
+ quoted = quote_from_bytes(encoded_text)
+ resp = self.request(route[1], query="foo="+quoted)
+ self.assertEqual(encoded_text, resp.read())
+
+ def test_non_ascii_in_POST_params(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return request.POST[b"foo"]
+
+ route = ("POST", "/test/test_unicode_in_POST", handler)
+ self.server.router.register(*route)
+
+ # We intentionally choose an encoding that's not the default UTF-8.
+ encoded_text = "ã©ã†ã‚‚".encode("shift-jis")
+ # After urlencoding, the string should only contain ASCII.
+ quoted = quote_from_bytes(encoded_text).encode("ascii")
+ resp = self.request(route[1], method="POST", body=b"foo="+quoted)
+ self.assertEqual(encoded_text, resp.read())
+
+
+class TestAuth(TestUsingServer):
+ def test_auth(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return b" ".join((request.auth.username, request.auth.password))
+
+ route = ("GET", "/test/test_auth", handler)
+ self.server.router.register(*route)
+
+ resp = self.request(route[1], auth=(b"test", b"PASS"))
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual([b"test", b"PASS"], resp.read().split(b" "))
+
+ encoded_text = "ã©ã†ã‚‚".encode("shift-jis")
+ resp = self.request(route[1], auth=(encoded_text, encoded_text))
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual([encoded_text, encoded_text], resp.read().split(b" "))
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/test_response.py b/testing/web-platform/tests/tools/wptserve/tests/functional/test_response.py
new file mode 100644
index 0000000000..4a4611f60a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/test_response.py
@@ -0,0 +1,323 @@
+import os
+import unittest
+import json
+import types
+
+from http.client import BadStatusLine
+from io import BytesIO
+
+import pytest
+
+wptserve = pytest.importorskip("wptserve")
+from .base import TestUsingServer, TestUsingH2Server, doc_root
+
+def send_body_as_header(self):
+ if self._response.add_required_headers:
+ self.write_default_headers()
+
+ self.write("X-Body: ")
+ self._headers_complete = True
+
+class TestResponse(TestUsingServer):
+ def test_head_without_body(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.writer.end_headers = types.MethodType(send_body_as_header,
+ response.writer)
+ return [("X-Test", "TEST")], "body\r\n"
+
+ route = ("GET", "/test/test_head_without_body", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1], method="HEAD")
+ self.assertEqual("6", resp.info()['Content-Length'])
+ self.assertEqual("TEST", resp.info()['x-Test'])
+ self.assertEqual("", resp.info()['x-body'])
+
+ def test_head_with_body(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.send_body_for_head_request = True
+ response.writer.end_headers = types.MethodType(send_body_as_header,
+ response.writer)
+ return [("X-Test", "TEST")], "body\r\n"
+
+ route = ("GET", "/test/test_head_with_body", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1], method="HEAD")
+ self.assertEqual("6", resp.info()['Content-Length'])
+ self.assertEqual("TEST", resp.info()['x-Test'])
+ self.assertEqual("body", resp.info()['X-Body'])
+
+ def test_write_content_no_status_no_header(self):
+ resp_content = b"TEST"
+
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.writer.write_content(resp_content)
+
+ route = ("GET", "/test/test_write_content_no_status_no_header", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ assert resp.getcode() == 200
+ assert resp.read() == resp_content
+ assert resp.info()["Content-Length"] == str(len(resp_content))
+ assert "Date" in resp.info()
+ assert "Server" in resp.info()
+
+ def test_write_content_no_headers(self):
+ resp_content = b"TEST"
+
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.writer.write_status(201)
+ response.writer.write_content(resp_content)
+
+ route = ("GET", "/test/test_write_content_no_headers", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ assert resp.getcode() == 201
+ assert resp.read() == resp_content
+ assert resp.info()["Content-Length"] == str(len(resp_content))
+ assert "Date" in resp.info()
+ assert "Server" in resp.info()
+
+ def test_write_content_no_status(self):
+ resp_content = b"TEST"
+
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.writer.write_header("test-header", "test-value")
+ response.writer.write_content(resp_content)
+
+ route = ("GET", "/test/test_write_content_no_status", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ assert resp.getcode() == 200
+ assert resp.read() == resp_content
+ assert sorted(x.lower() for x in resp.info().keys()) == sorted(['test-header', 'date', 'server', 'content-length'])
+
+ def test_write_content_no_status_no_required_headers(self):
+ resp_content = b"TEST"
+
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.add_required_headers = False
+ response.writer.write_header("test-header", "test-value")
+ response.writer.write_content(resp_content)
+
+ route = ("GET", "/test/test_write_content_no_status_no_required_headers", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ assert resp.getcode() == 200
+ assert resp.read() == resp_content
+ assert resp.info().items() == [('test-header', 'test-value')]
+
+ def test_write_content_no_status_no_headers_no_required_headers(self):
+ resp_content = b"TEST"
+
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.add_required_headers = False
+ response.writer.write_content(resp_content)
+
+ route = ("GET", "/test/test_write_content_no_status_no_headers_no_required_headers", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ assert resp.getcode() == 200
+ assert resp.read() == resp_content
+ assert resp.info().items() == []
+
+ def test_write_raw_content(self):
+ resp_content = b"HTTP/1.1 202 Giraffe\n" \
+ b"X-TEST: PASS\n" \
+ b"Content-Length: 7\n\n" \
+ b"Content"
+
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.writer.write_raw_content(resp_content)
+
+ route = ("GET", "/test/test_write_raw_content", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ assert resp.getcode() == 202
+ assert resp.info()["X-TEST"] == "PASS"
+ assert resp.read() == b"Content"
+
+ def test_write_raw_content_file(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ with open(os.path.join(doc_root, "test.asis"), 'rb') as infile:
+ response.writer.write_raw_content(infile)
+
+ route = ("GET", "/test/test_write_raw_content", handler)
+ self.server.router.register(*route)
+ resp = self.request(route[1])
+ assert resp.getcode() == 202
+ assert resp.info()["X-TEST"] == "PASS"
+ assert resp.read() == b"Content"
+
+ def test_write_raw_none(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ with pytest.raises(ValueError):
+ response.writer.write_raw_content(None)
+
+ route = ("GET", "/test/test_write_raw_content", handler)
+ self.server.router.register(*route)
+ self.request(route[1])
+
+ def test_write_raw_contents_invalid_http(self):
+ resp_content = b"INVALID HTTP"
+
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.writer.write_raw_content(resp_content)
+
+ route = ("GET", "/test/test_write_raw_content", handler)
+ self.server.router.register(*route)
+
+ with pytest.raises(BadStatusLine) as e:
+ self.request(route[1])
+ assert str(e.value) == resp_content.decode('utf-8')
+
+class TestH2Response(TestUsingH2Server):
+ def test_write_without_ending_stream(self):
+ data = b"TEST"
+
+ @wptserve.handlers.handler
+ def handler(request, response):
+ headers = [
+ ('server', 'test-h2'),
+ ('test', 'PASS'),
+ ]
+ response.writer.write_headers(headers, 202)
+ response.writer.write_data_frame(data, False)
+
+ # Should detect stream isn't ended and call `writer.end_stream()`
+
+ route = ("GET", "/h2test/test", handler)
+ self.server.router.register(*route)
+ resp = self.client.get(route[1])
+
+ assert resp.status_code == 202
+ assert [x for x in resp.headers.items()] == [('server', 'test-h2'), ('test', 'PASS')]
+ assert resp.content == data
+
+ def test_set_error(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.set_error(503, message="Test error")
+
+ route = ("GET", "/h2test/test_set_error", handler)
+ self.server.router.register(*route)
+ resp = self.client.get(route[1])
+
+ assert resp.status_code == 503
+ assert json.loads(resp.content) == json.loads("{\"error\": {\"message\": \"Test error\", \"code\": 503}}")
+
+ def test_file_like_response(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ content = BytesIO(b"Hello, world!")
+ response.content = content
+
+ route = ("GET", "/h2test/test_file_like_response", handler)
+ self.server.router.register(*route)
+ resp = self.client.get(route[1])
+
+ assert resp.status_code == 200
+ assert resp.content == b"Hello, world!"
+
+ def test_list_response(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.content = ['hello', 'world']
+
+ route = ("GET", "/h2test/test_file_like_response", handler)
+ self.server.router.register(*route)
+ resp = self.client.get(route[1])
+
+ assert resp.status_code == 200
+ assert resp.content == b"helloworld"
+
+ def test_content_longer_than_frame_size(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ size = response.writer.get_max_payload_size()
+ content = "a" * (size + 5)
+ return [('payload_size', size)], content
+
+ route = ("GET", "/h2test/test_content_longer_than_frame_size", handler)
+ self.server.router.register(*route)
+ resp = self.client.get(route[1])
+
+ assert resp.status_code == 200
+ payload_size = int(resp.headers['payload_size'])
+ assert payload_size
+ assert resp.content == b"a" * (payload_size + 5)
+
+ def test_encode(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.encoding = "utf8"
+ t = response.writer.encode("hello")
+ assert t == b"hello"
+
+ with pytest.raises(ValueError):
+ response.writer.encode(None)
+
+ route = ("GET", "/h2test/test_content_longer_than_frame_size", handler)
+ self.server.router.register(*route)
+ resp = self.client.get(route[1])
+ assert resp.status_code == 200
+
+ def test_raw_header_frame(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.writer.write_raw_header_frame([
+ (':status', '204'),
+ ('server', 'TEST-H2')
+ ], end_headers=True)
+
+ route = ("GET", "/h2test/test_file_like_response", handler)
+ self.server.router.register(*route)
+ resp = self.client.get(route[1])
+
+ assert resp.status_code == 204
+ assert resp.headers['server'] == 'TEST-H2'
+ assert resp.content == b''
+
+ def test_raw_data_frame(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.write_status_headers()
+ response.writer.write_raw_data_frame(data=b'Hello world', end_stream=True)
+
+ route = ("GET", "/h2test/test_file_like_response", handler)
+ self.server.router.register(*route)
+ resp = self.client.get(route[1])
+
+ assert resp.content == b'Hello world'
+
+ def test_raw_header_continuation_frame(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ response.writer.write_raw_header_frame([
+ (':status', '204')
+ ])
+
+ response.writer.write_raw_continuation_frame([
+ ('server', 'TEST-H2')
+ ], end_headers=True)
+
+ route = ("GET", "/h2test/test_file_like_response", handler)
+ self.server.router.register(*route)
+ resp = self.client.get(route[1])
+
+ assert resp.status_code == 204
+ assert resp.headers['server'] == 'TEST-H2'
+ assert resp.content == b''
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/test_server.py b/testing/web-platform/tests/tools/wptserve/tests/functional/test_server.py
new file mode 100644
index 0000000000..939396ddee
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/test_server.py
@@ -0,0 +1,118 @@
+import unittest
+
+import pytest
+from urllib.error import HTTPError
+
+wptserve = pytest.importorskip("wptserve")
+from .base import TestUsingServer, TestUsingH2Server
+
+
+class TestFileHandler(TestUsingServer):
+ def test_not_handled(self):
+ with self.assertRaises(HTTPError) as cm:
+ self.request("/not_existing")
+
+ self.assertEqual(cm.exception.code, 404)
+
+
+class TestRewriter(TestUsingServer):
+ def test_rewrite(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return request.request_path
+
+ route = ("GET", "/test/rewritten", handler)
+ self.server.rewriter.register("GET", "/test/original", route[1])
+ self.server.router.register(*route)
+ resp = self.request("/test/original")
+ self.assertEqual(200, resp.getcode())
+ self.assertEqual(b"/test/rewritten", resp.read())
+
+
+class TestRequestHandler(TestUsingServer):
+ def test_exception(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ raise Exception
+
+ route = ("GET", "/test/raises", handler)
+ self.server.router.register(*route)
+ with self.assertRaises(HTTPError) as cm:
+ self.request("/test/raises")
+
+ self.assertEqual(cm.exception.code, 500)
+
+ def test_many_headers(self):
+ headers = {"X-Val%d" % i: str(i) for i in range(256)}
+
+ @wptserve.handlers.handler
+ def handler(request, response):
+ # Additional headers are added by urllib.request.
+ assert len(request.headers) > len(headers)
+ for k, v in headers.items():
+ assert request.headers.get(k) == \
+ wptserve.utils.isomorphic_encode(v)
+ return "OK"
+
+ route = ("GET", "/test/headers", handler)
+ self.server.router.register(*route)
+ resp = self.request("/test/headers", headers=headers)
+ self.assertEqual(200, resp.getcode())
+
+
+class TestH2Version(TestUsingH2Server):
+ # The purpose of this test is to ensure that all TestUsingH2Server tests
+ # actually end up using HTTP/2, in case there's any protocol negotiation.
+ def test_http_version(self):
+ resp = self.client.get('/')
+
+ assert resp.http_version == 'HTTP/2'
+
+
+class TestFileHandlerH2(TestUsingH2Server):
+ def test_not_handled(self):
+ resp = self.client.get("/not_existing")
+
+ assert resp.status_code == 404
+
+
+class TestRewriterH2(TestUsingH2Server):
+ def test_rewrite(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ return request.request_path
+
+ route = ("GET", "/test/rewritten", handler)
+ self.server.rewriter.register("GET", "/test/original", route[1])
+ self.server.router.register(*route)
+ resp = self.client.get("/test/original")
+ assert resp.status_code == 200
+ assert resp.content == b"/test/rewritten"
+
+
+class TestRequestHandlerH2(TestUsingH2Server):
+ def test_exception(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ raise Exception
+
+ route = ("GET", "/test/raises", handler)
+ self.server.router.register(*route)
+ resp = self.client.get("/test/raises")
+
+ assert resp.status_code == 500
+
+ def test_frame_handler_exception(self):
+ class handler_cls:
+ def frame_handler(self, request):
+ raise Exception
+
+ route = ("GET", "/test/raises", handler_cls())
+ self.server.router.register(*route)
+ resp = self.client.get("/test/raises")
+
+ assert resp.status_code == 500
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/testing/web-platform/tests/tools/wptserve/tests/functional/test_stash.py b/testing/web-platform/tests/tools/wptserve/tests/functional/test_stash.py
new file mode 100644
index 0000000000..03561bc872
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/functional/test_stash.py
@@ -0,0 +1,44 @@
+import unittest
+import uuid
+
+import pytest
+
+wptserve = pytest.importorskip("wptserve")
+from wptserve.router import any_method
+from wptserve.stash import StashServer
+from .base import TestUsingServer
+
+
+class TestResponseSetCookie(TestUsingServer):
+ def run(self, result=None):
+ with StashServer(None, authkey=str(uuid.uuid4())):
+ super().run(result)
+
+ def test_put_take(self):
+ @wptserve.handlers.handler
+ def handler(request, response):
+ if request.method == "POST":
+ request.server.stash.put(request.POST.first(b"id"), request.POST.first(b"data"))
+ data = "OK"
+ elif request.method == "GET":
+ data = request.server.stash.take(request.GET.first(b"id"))
+ if data is None:
+ return "NOT FOUND"
+ return data
+
+ id = str(uuid.uuid4())
+ route = (any_method, "/test/put_take", handler)
+ self.server.router.register(*route)
+
+ resp = self.request(route[1], method="POST", body={"id": id, "data": "Sample data"})
+ self.assertEqual(resp.read(), b"OK")
+
+ resp = self.request(route[1], query="id=" + id)
+ self.assertEqual(resp.read(), b"Sample data")
+
+ resp = self.request(route[1], query="id=" + id)
+ self.assertEqual(resp.read(), b"NOT FOUND")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/testing/web-platform/tests/tools/wptserve/tests/test_config.py b/testing/web-platform/tests/tools/wptserve/tests/test_config.py
new file mode 100644
index 0000000000..9f84577c7f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/test_config.py
@@ -0,0 +1,384 @@
+import json
+import logging
+import pickle
+
+from distutils.spawn import find_executable
+from logging import handlers
+
+import pytest
+
+config = pytest.importorskip("wptserve.config")
+
+logger = logging.getLogger()
+
+def test_renamed_are_renamed():
+ assert len(set(config._renamed_props.keys()) & set(config.ConfigBuilder._default.keys())) == 0
+
+
+def test_renamed_exist():
+ assert set(config._renamed_props.values()).issubset(set(config.ConfigBuilder._default.keys()))
+
+
+@pytest.mark.parametrize("base, override, expected", [
+ ({"a": 1}, {"a": 2}, {"a": 2}),
+ ({"a": 1}, {"b": 2}, {"a": 1}),
+ ({"a": {"b": 1}}, {"a": {}}, {"a": {"b": 1}}),
+ ({"a": {"b": 1}}, {"a": {"b": 2}}, {"a": {"b": 2}}),
+ ({"a": {"b": 1}}, {"a": {"b": 2, "c": 3}}, {"a": {"b": 2}}),
+ pytest.param({"a": {"b": 1}}, {"a": 2}, {"a": 1}, marks=pytest.mark.xfail),
+ pytest.param({"a": 1}, {"a": {"b": 2}}, {"a": 1}, marks=pytest.mark.xfail),
+])
+def test_merge_dict(base, override, expected):
+ assert expected == config._merge_dict(base, override)
+
+
+
+def test_as_dict():
+ with config.ConfigBuilder(logger) as c:
+ assert c.as_dict() is not None
+
+
+def test_as_dict_is_json():
+ with config.ConfigBuilder(logger) as c:
+ assert json.dumps(c.as_dict()) is not None
+
+
+def test_init_basic_prop():
+ with config.ConfigBuilder(logger, browser_host="foo.bar") as c:
+ assert c.browser_host == "foo.bar"
+
+
+def test_init_prefixed_prop():
+ with config.ConfigBuilder(logger, doc_root="/") as c:
+ assert c.doc_root == "/"
+
+
+def test_init_renamed_host():
+ logger = logging.getLogger("test_init_renamed_host")
+ logger.setLevel(logging.DEBUG)
+ handler = handlers.BufferingHandler(100)
+ logger.addHandler(handler)
+
+ with config.ConfigBuilder(logger, host="foo.bar") as c:
+ assert len(handler.buffer) == 1
+ assert "browser_host" in handler.buffer[0].getMessage() # check we give the new name in the message
+ assert not hasattr(c, "host")
+ assert c.browser_host == "foo.bar"
+
+
+def test_init_bogus():
+ with pytest.raises(TypeError) as e:
+ config.ConfigBuilder(logger, foo=1, bar=2)
+ message = e.value.args[0]
+ assert "foo" in message
+ assert "bar" in message
+
+
+def test_getitem():
+ with config.ConfigBuilder(logger, browser_host="foo.bar") as c:
+ assert c["browser_host"] == "foo.bar"
+
+
+def test_no_setitem():
+ with config.ConfigBuilder(logger) as c:
+ with pytest.raises(ValueError):
+ c["browser_host"] = "foo.bar"
+
+
+def test_iter():
+ with config.ConfigBuilder(logger) as c:
+ s = set(iter(c))
+ assert "browser_host" in s
+ assert "host" not in s
+ assert "__getitem__" not in s
+ assert "_browser_host" not in s
+
+
+def test_assignment():
+ cb = config.ConfigBuilder(logger)
+ cb.browser_host = "foo.bar"
+ with cb as c:
+ assert c.browser_host == "foo.bar"
+
+
+def test_update_basic():
+ cb = config.ConfigBuilder(logger)
+ cb.update({"browser_host": "foo.bar"})
+ with cb as c:
+ assert c.browser_host == "foo.bar"
+
+
+def test_update_prefixed():
+ cb = config.ConfigBuilder(logger)
+ cb.update({"doc_root": "/"})
+ with cb as c:
+ assert c.doc_root == "/"
+
+
+def test_update_renamed_host():
+ logger = logging.getLogger("test_update_renamed_host")
+ logger.setLevel(logging.DEBUG)
+ handler = handlers.BufferingHandler(100)
+ logger.addHandler(handler)
+
+ cb = config.ConfigBuilder(logger)
+ assert len(handler.buffer) == 0
+
+ cb.update({"host": "foo.bar"})
+
+ with cb as c:
+ assert len(handler.buffer) == 1
+ assert "browser_host" in handler.buffer[0].getMessage() # check we give the new name in the message
+ assert not hasattr(c, "host")
+ assert c.browser_host == "foo.bar"
+
+
+def test_update_bogus():
+ cb = config.ConfigBuilder(logger)
+ with pytest.raises(KeyError):
+ cb.update({"foobar": 1})
+
+
+def test_ports_auto():
+ with config.ConfigBuilder(logger,
+ ports={"http": ["auto"]},
+ ssl={"type": "none"}) as c:
+ ports = c.ports
+ assert set(ports.keys()) == {"http"}
+ assert len(ports["http"]) == 1
+ assert isinstance(ports["http"][0], int)
+
+
+def test_ports_auto_mutate():
+ cb = config.ConfigBuilder(logger,
+ ports={"http": [1001]},
+ ssl={"type": "none"})
+ cb.ports = {"http": ["auto"]}
+ with cb as c:
+ new_ports = c.ports
+ assert set(new_ports.keys()) == {"http"}
+ assert len(new_ports["http"]) == 1
+ assert isinstance(new_ports["http"][0], int)
+
+
+def test_ports_explicit():
+ with config.ConfigBuilder(logger,
+ ports={"http": [1001]},
+ ssl={"type": "none"}) as c:
+ ports = c.ports
+ assert set(ports.keys()) == {"http"}
+ assert ports["http"] == [1001]
+
+
+def test_ports_no_ssl():
+ with config.ConfigBuilder(logger,
+ ports={"http": [1001], "https": [1002], "ws": [1003], "wss": [1004]},
+ ssl={"type": "none"}) as c:
+ ports = c.ports
+ assert set(ports.keys()) == {"http", "ws"}
+ assert ports["http"] == [1001]
+ assert ports["ws"] == [1003]
+
+
+@pytest.mark.skipif(find_executable("openssl") is None,
+ reason="requires OpenSSL")
+def test_ports_openssl():
+ with config.ConfigBuilder(logger,
+ ports={"http": [1001], "https": [1002], "ws": [1003], "wss": [1004]},
+ ssl={"type": "openssl"}) as c:
+ ports = c.ports
+ assert set(ports.keys()) == {"http", "https", "ws", "wss"}
+ assert ports["http"] == [1001]
+ assert ports["https"] == [1002]
+ assert ports["ws"] == [1003]
+ assert ports["wss"] == [1004]
+
+
+def test_init_doc_root():
+ with config.ConfigBuilder(logger, doc_root="/") as c:
+ assert c.doc_root == "/"
+
+
+def test_set_doc_root():
+ cb = config.ConfigBuilder(logger)
+ cb.doc_root = "/"
+ with cb as c:
+ assert c.doc_root == "/"
+
+
+def test_server_host_from_browser_host():
+ with config.ConfigBuilder(logger, browser_host="foo.bar") as c:
+ assert c.server_host == "foo.bar"
+
+
+def test_init_server_host():
+ with config.ConfigBuilder(logger, server_host="foo.bar") as c:
+ assert c.browser_host == "localhost" # check this hasn't changed
+ assert c.server_host == "foo.bar"
+
+
+def test_set_server_host():
+ cb = config.ConfigBuilder(logger)
+ cb.server_host = "/"
+ with cb as c:
+ assert c.browser_host == "localhost" # check this hasn't changed
+ assert c.server_host == "/"
+
+
+def test_domains():
+ with config.ConfigBuilder(logger,
+ browser_host="foo.bar",
+ alternate_hosts={"alt": "foo2.bar"},
+ subdomains={"a", "b"},
+ not_subdomains={"x", "y"}) as c:
+ assert c.domains == {
+ "": {
+ "": "foo.bar",
+ "a": "a.foo.bar",
+ "b": "b.foo.bar",
+ },
+ "alt": {
+ "": "foo2.bar",
+ "a": "a.foo2.bar",
+ "b": "b.foo2.bar",
+ },
+ }
+
+
+def test_not_domains():
+ with config.ConfigBuilder(logger,
+ browser_host="foo.bar",
+ alternate_hosts={"alt": "foo2.bar"},
+ subdomains={"a", "b"},
+ not_subdomains={"x", "y"}) as c:
+ not_domains = c.not_domains
+ assert not_domains == {
+ "": {
+ "x": "x.foo.bar",
+ "y": "y.foo.bar",
+ },
+ "alt": {
+ "x": "x.foo2.bar",
+ "y": "y.foo2.bar",
+ },
+ }
+
+
+def test_domains_not_domains_intersection():
+ with config.ConfigBuilder(logger,
+ browser_host="foo.bar",
+ alternate_hosts={"alt": "foo2.bar"},
+ subdomains={"a", "b"},
+ not_subdomains={"x", "y"}) as c:
+ domains = c.domains
+ not_domains = c.not_domains
+ assert len(set(domains.keys()) ^ set(not_domains.keys())) == 0
+ for host in domains.keys():
+ host_domains = domains[host]
+ host_not_domains = not_domains[host]
+ assert len(set(host_domains.keys()) & set(host_not_domains.keys())) == 0
+ assert len(set(host_domains.values()) & set(host_not_domains.values())) == 0
+
+
+def test_all_domains():
+ with config.ConfigBuilder(logger,
+ browser_host="foo.bar",
+ alternate_hosts={"alt": "foo2.bar"},
+ subdomains={"a", "b"},
+ not_subdomains={"x", "y"}) as c:
+ all_domains = c.all_domains
+ assert all_domains == {
+ "": {
+ "": "foo.bar",
+ "a": "a.foo.bar",
+ "b": "b.foo.bar",
+ "x": "x.foo.bar",
+ "y": "y.foo.bar",
+ },
+ "alt": {
+ "": "foo2.bar",
+ "a": "a.foo2.bar",
+ "b": "b.foo2.bar",
+ "x": "x.foo2.bar",
+ "y": "y.foo2.bar",
+ },
+ }
+
+
+def test_domains_set():
+ with config.ConfigBuilder(logger,
+ browser_host="foo.bar",
+ alternate_hosts={"alt": "foo2.bar"},
+ subdomains={"a", "b"},
+ not_subdomains={"x", "y"}) as c:
+ domains_set = c.domains_set
+ assert domains_set == {
+ "foo.bar",
+ "a.foo.bar",
+ "b.foo.bar",
+ "foo2.bar",
+ "a.foo2.bar",
+ "b.foo2.bar",
+ }
+
+
+def test_not_domains_set():
+ with config.ConfigBuilder(logger,
+ browser_host="foo.bar",
+ alternate_hosts={"alt": "foo2.bar"},
+ subdomains={"a", "b"},
+ not_subdomains={"x", "y"}) as c:
+ not_domains_set = c.not_domains_set
+ assert not_domains_set == {
+ "x.foo.bar",
+ "y.foo.bar",
+ "x.foo2.bar",
+ "y.foo2.bar",
+ }
+
+
+def test_all_domains_set():
+ with config.ConfigBuilder(logger,
+ browser_host="foo.bar",
+ alternate_hosts={"alt": "foo2.bar"},
+ subdomains={"a", "b"},
+ not_subdomains={"x", "y"}) as c:
+ all_domains_set = c.all_domains_set
+ assert all_domains_set == {
+ "foo.bar",
+ "a.foo.bar",
+ "b.foo.bar",
+ "x.foo.bar",
+ "y.foo.bar",
+ "foo2.bar",
+ "a.foo2.bar",
+ "b.foo2.bar",
+ "x.foo2.bar",
+ "y.foo2.bar",
+ }
+
+
+def test_ssl_env_none():
+ with config.ConfigBuilder(logger, ssl={"type": "none"}) as c:
+ assert c.ssl_config is None
+
+
+def test_ssl_env_openssl():
+ # TODO: this currently actually tries to start OpenSSL, which isn't ideal
+ # with config.ConfigBuilder(ssl={"type": "openssl", "openssl": {"openssl_binary": "foobar"}}) as c:
+ # assert c.ssl_env is not None
+ # assert c.ssl_env.ssl_enabled is True
+ # assert c.ssl_env.binary == "foobar"
+ pass
+
+
+def test_ssl_env_bogus():
+ with pytest.raises(ValueError):
+ with config.ConfigBuilder(logger, ssl={"type": "foobar"}):
+ pass
+
+
+def test_pickle():
+ # Ensure that the config object can be pickled
+ with config.ConfigBuilder(logger) as c:
+ pickle.dumps(c)
diff --git a/testing/web-platform/tests/tools/wptserve/tests/test_replacement_tokenizer.py b/testing/web-platform/tests/tools/wptserve/tests/test_replacement_tokenizer.py
new file mode 100644
index 0000000000..6a3c563c8c
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/test_replacement_tokenizer.py
@@ -0,0 +1,38 @@
+import pytest
+
+from wptserve.pipes import ReplacementTokenizer
+
+@pytest.mark.parametrize(
+ "content,expected",
+ [
+ [b"aaa", [('ident', 'aaa')]],
+ [b"bbb()", [('ident', 'bbb'), ('arguments', [])]],
+ [b"bcd(uvw, xyz)", [('ident', 'bcd'), ('arguments', ['uvw', 'xyz'])]],
+ [b"$ccc:ddd", [('var', '$ccc'), ('ident', 'ddd')]],
+ [b"$eee", [('ident', '$eee')]],
+ [b"fff[0]", [('ident', 'fff'), ('index', 0)]],
+ [b"ggg[hhh]", [('ident', 'ggg'), ('index', 'hhh')]],
+ [b"[iii]", [('index', 'iii')]],
+ [b"jjj['kkk']", [('ident', 'jjj'), ('index', "'kkk'")]],
+ [b"lll[]", [('ident', 'lll'), ('index', "")]],
+ [b"111", [('ident', '111')]],
+ [b"$111", [('ident', '$111')]],
+ ]
+)
+def test_tokenizer(content, expected):
+ tokenizer = ReplacementTokenizer()
+ tokens = tokenizer.tokenize(content)
+ assert expected == tokens
+
+
+@pytest.mark.parametrize(
+ "content,expected",
+ [
+ [b"/", []],
+ [b"$aaa: BBB", [('var', '$aaa')]],
+ ]
+)
+def test_tokenizer_errors(content, expected):
+ tokenizer = ReplacementTokenizer()
+ tokens = tokenizer.tokenize(content)
+ assert expected == tokens
diff --git a/testing/web-platform/tests/tools/wptserve/tests/test_request.py b/testing/web-platform/tests/tools/wptserve/tests/test_request.py
new file mode 100644
index 0000000000..a2161e9646
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/test_request.py
@@ -0,0 +1,104 @@
+from unittest import mock
+
+from wptserve.request import Request, RequestHeaders, MultiDict
+
+
+class MockHTTPMessage(dict):
+ """A minimum (and not completely correctly) mock of HTTPMessage for testing.
+
+ Constructing HTTPMessage is annoying and different in Python 2 and 3. This
+ only implements the parts used by RequestHeaders.
+
+ Requirements for construction:
+ * Keys are header names and MUST be lower-case.
+ * Values are lists of header values (even if there's only one).
+ * Keys and values should be native strings to match stdlib's behaviours.
+ """
+ def __getitem__(self, key):
+ assert isinstance(key, str)
+ values = dict.__getitem__(self, key.lower())
+ assert isinstance(values, list)
+ return values[0]
+
+ def get(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def getallmatchingheaders(self, key):
+ values = dict.__getitem__(self, key.lower())
+ return [f"{key}: {v}\n" for v in values]
+
+
+def test_request_headers_get():
+ raw_headers = MockHTTPMessage({
+ 'x-foo': ['foo'],
+ 'x-bar': ['bar1', 'bar2'],
+ })
+ headers = RequestHeaders(raw_headers)
+ assert headers['x-foo'] == b'foo'
+ assert headers['X-Bar'] == b'bar1, bar2'
+ assert headers.get('x-bar') == b'bar1, bar2'
+
+
+def test_request_headers_encoding():
+ raw_headers = MockHTTPMessage({
+ 'x-foo': ['foo'],
+ 'x-bar': ['bar1', 'bar2'],
+ })
+ headers = RequestHeaders(raw_headers)
+ assert isinstance(headers['x-foo'], bytes)
+ assert isinstance(headers['x-bar'], bytes)
+ assert isinstance(headers.get_list('x-bar')[0], bytes)
+
+
+def test_request_url_from_server_address():
+ request_handler = mock.Mock()
+ request_handler.server.scheme = 'http'
+ request_handler.server.server_address = ('localhost', '8000')
+ request_handler.path = '/demo'
+ request_handler.headers = MockHTTPMessage()
+
+ request = Request(request_handler)
+ assert request.url == 'http://localhost:8000/demo'
+ assert isinstance(request.url, str)
+
+
+def test_request_url_from_host_header():
+ request_handler = mock.Mock()
+ request_handler.server.scheme = 'http'
+ request_handler.server.server_address = ('localhost', '8000')
+ request_handler.path = '/demo'
+ request_handler.headers = MockHTTPMessage({'host': ['web-platform.test:8001']})
+
+ request = Request(request_handler)
+ assert request.url == 'http://web-platform.test:8001/demo'
+ assert isinstance(request.url, str)
+
+
+def test_multidict():
+ m = MultiDict()
+ m["foo"] = "bar"
+ m["bar"] = "baz"
+ m.add("foo", "baz")
+ m.add("baz", "qux")
+
+ assert m["foo"] == "bar"
+ assert m.get("foo") == "bar"
+ assert m["bar"] == "baz"
+ assert m.get("bar") == "baz"
+ assert m["baz"] == "qux"
+ assert m.get("baz") == "qux"
+
+ assert m.first("foo") == "bar"
+ assert m.last("foo") == "baz"
+ assert m.get_list("foo") == ["bar", "baz"]
+ assert m.get_list("non_existent") == []
+
+ assert m.get("non_existent") is None
+ try:
+ m["non_existent"]
+ assert False, "An exception should be raised"
+ except KeyError:
+ pass
diff --git a/testing/web-platform/tests/tools/wptserve/tests/test_response.py b/testing/web-platform/tests/tools/wptserve/tests/test_response.py
new file mode 100644
index 0000000000..d10554b4df
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/test_response.py
@@ -0,0 +1,32 @@
+from io import BytesIO
+from unittest import mock
+
+from wptserve.response import Response
+
+
+def test_response_status():
+ cases = [200, (200, b'OK'), (200, 'OK'), ('200', 'OK')]
+
+ for case in cases:
+ handler = mock.Mock()
+ handler.wfile = BytesIO()
+ request = mock.Mock()
+ request.protocol_version = 'HTTP/1.1'
+ response = Response(handler, request)
+
+ response.status = case
+ expected = case if isinstance(case, tuple) else (case, None)
+ if expected[0] == '200':
+ expected = (200, expected[1])
+ assert response.status == expected
+ response.writer.write_status(*response.status)
+ assert handler.wfile.getvalue() == b'HTTP/1.1 200 OK\r\n'
+
+
+def test_response_status_not_string():
+ # This behaviour is not documented but kept for backward compatibility.
+ handler = mock.Mock()
+ request = mock.Mock()
+ response = Response(handler, request)
+ response.status = (200, 100)
+ assert response.status == (200, '100')
diff --git a/testing/web-platform/tests/tools/wptserve/tests/test_stash.py b/testing/web-platform/tests/tools/wptserve/tests/test_stash.py
new file mode 100644
index 0000000000..4157db5726
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/tests/test_stash.py
@@ -0,0 +1,146 @@
+import multiprocessing
+import threading
+import sys
+
+from multiprocessing.managers import BaseManager
+
+import pytest
+
+Stash = pytest.importorskip("wptserve.stash").Stash
+
+@pytest.fixture()
+def add_cleanup():
+ fns = []
+
+ def add(fn):
+ fns.append(fn)
+
+ yield add
+
+ for fn in fns:
+ fn()
+
+
+def run(process_queue, request_lock, response_lock):
+ """Create two Stash instances in parallel threads. Use the provided locks
+ to ensure the first thread is actively establishing an interprocess
+ communication channel at the moment the second thread executes."""
+
+ def target(thread_queue):
+ stash = Stash("/", ("localhost", 4543), b"some key")
+
+ # The `lock` property of the Stash instance should always be set
+ # immediately following initialization. These values are asserted in
+ # the active test.
+ thread_queue.put(stash.lock is None)
+
+ thread_queue = multiprocessing.Queue()
+ first = threading.Thread(target=target, args=(thread_queue,))
+ second = threading.Thread(target=target, args=(thread_queue,))
+
+ request_lock.acquire()
+ response_lock.acquire()
+ first.start()
+
+ request_lock.acquire()
+
+ # At this moment, the `first` thread is waiting for a proxied object.
+ # Create a second thread in order to inspect the behavior of the Stash
+ # constructor at this moment.
+
+ second.start()
+
+ # Allow the `first` thread to proceed
+
+ response_lock.release()
+
+ # Wait for both threads to complete and report their stateto the test
+ process_queue.put(thread_queue.get())
+ process_queue.put(thread_queue.get())
+
+
+class SlowLock(BaseManager):
+ # This can only be used in test_delayed_lock since that test modifies the
+ # class body, but it has to be a global for multiprocessing
+ pass
+
+
+@pytest.mark.xfail(sys.platform == "win32" or
+ multiprocessing.get_start_method() == "spawn",
+ reason="https://github.com/web-platform-tests/wpt/issues/16938")
+def test_delayed_lock(add_cleanup):
+ """Ensure that delays in proxied Lock retrieval do not interfere with
+ initialization in parallel threads."""
+
+ request_lock = multiprocessing.Lock()
+ response_lock = multiprocessing.Lock()
+
+ queue = multiprocessing.Queue()
+
+ def mutex_lock_request():
+ """This request handler allows the caller to delay execution of a
+ thread which has requested a proxied representation of the `lock`
+ property, simulating a "slow" interprocess communication channel."""
+
+ request_lock.release()
+ response_lock.acquire()
+ return threading.Lock()
+
+ SlowLock.register("get_dict", callable=lambda: {})
+ SlowLock.register("Lock", callable=mutex_lock_request)
+
+ slowlock = SlowLock(("localhost", 4543), b"some key")
+ slowlock.start()
+ add_cleanup(lambda: slowlock.shutdown())
+
+ parallel = multiprocessing.Process(target=run,
+ args=(queue, request_lock, response_lock))
+ parallel.start()
+ add_cleanup(lambda: parallel.terminate())
+
+ assert [queue.get(), queue.get()] == [False, False], (
+ "both instances had valid locks")
+
+
+class SlowDict(BaseManager):
+ # This can only be used in test_delayed_dict since that test modifies the
+ # class body, but it has to be a global for multiprocessing
+ pass
+
+
+@pytest.mark.xfail(sys.platform == "win32" or
+ multiprocessing.get_start_method() == "spawn",
+ reason="https://github.com/web-platform-tests/wpt/issues/16938")
+def test_delayed_dict(add_cleanup):
+ """Ensure that delays in proxied `dict` retrieval do not interfere with
+ initialization in parallel threads."""
+
+ request_lock = multiprocessing.Lock()
+ response_lock = multiprocessing.Lock()
+
+ queue = multiprocessing.Queue()
+
+ # This request handler allows the caller to delay execution of a thread
+ # which has requested a proxied representation of the "get_dict" property.
+ def mutex_dict_request():
+ """This request handler allows the caller to delay execution of a
+ thread which has requested a proxied representation of the `get_dict`
+ property, simulating a "slow" interprocess communication channel."""
+ request_lock.release()
+ response_lock.acquire()
+ return {}
+
+ SlowDict.register("get_dict", callable=mutex_dict_request)
+ SlowDict.register("Lock", callable=lambda: threading.Lock())
+
+ slowdict = SlowDict(("localhost", 4543), b"some key")
+ slowdict.start()
+ add_cleanup(lambda: slowdict.shutdown())
+
+ parallel = multiprocessing.Process(target=run,
+ args=(queue, request_lock, response_lock))
+ parallel.start()
+ add_cleanup(lambda: parallel.terminate())
+
+ assert [queue.get(), queue.get()] == [False, False], (
+ "both instances had valid locks")
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/__init__.py b/testing/web-platform/tests/tools/wptserve/wptserve/__init__.py
new file mode 100644
index 0000000000..a286bfe0b3
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/__init__.py
@@ -0,0 +1,3 @@
+from .server import WebTestHttpd, WebTestServer, Router # noqa: F401
+from .request import Request # noqa: F401
+from .response import Response # noqa: F401
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/config.py b/testing/web-platform/tests/tools/wptserve/wptserve/config.py
new file mode 100644
index 0000000000..b87795430f
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/config.py
@@ -0,0 +1,328 @@
+# mypy: allow-untyped-defs
+
+import copy
+import os
+from collections import defaultdict
+from typing import Any, Mapping
+
+from . import sslutils
+from .utils import get_port
+
+
+_renamed_props = {
+ "host": "browser_host",
+ "bind_hostname": "bind_address",
+ "external_host": "server_host",
+ "host_ip": "server_host",
+}
+
+
+def _merge_dict(base_dict, override_dict):
+ rv = base_dict.copy()
+ for key, value in base_dict.items():
+ if key in override_dict:
+ if isinstance(value, dict):
+ rv[key] = _merge_dict(value, override_dict[key])
+ else:
+ rv[key] = override_dict[key]
+ return rv
+
+
+class Config(Mapping[str, Any]):
+ """wptserve configuration data
+
+ Immutable configuration that's safe to be passed between processes.
+
+ Inherits from Mapping for backwards compatibility with the old dict-based config
+
+ :param data: - Extra configuration data
+ """
+ def __init__(self, data):
+ for name in data.keys():
+ if name.startswith("_"):
+ raise ValueError("Invalid configuration key %s" % name)
+ self.__dict__.update(data)
+
+ def __str__(self):
+ return str(self.__dict__)
+
+ def __setattr__(self, key, value):
+ raise ValueError("Config is immutable")
+
+ def __setitem__(self, key, value):
+ raise ValueError("Config is immutable")
+
+ def __getitem__(self, key):
+ try:
+ return getattr(self, key)
+ except AttributeError:
+ raise ValueError
+
+ def __contains__(self, key):
+ return key in self.__dict__
+
+ def __iter__(self):
+ return (x for x in self.__dict__ if not x.startswith("_"))
+
+ def __len__(self):
+ return len([item for item in self])
+
+ def as_dict(self):
+ return json_types(self.__dict__, skip={"_logger"})
+
+
+def json_types(obj, skip=None):
+ if skip is None:
+ skip = set()
+ if isinstance(obj, dict):
+ return {key: json_types(value) for key, value in obj.items() if key not in skip}
+ if (isinstance(obj, str) or
+ isinstance(obj, int) or
+ isinstance(obj, float) or
+ isinstance(obj, bool) or
+ obj is None):
+ return obj
+ if isinstance(obj, list) or hasattr(obj, "__iter__"):
+ return [json_types(value) for value in obj]
+ raise ValueError
+
+
+class ConfigBuilder:
+ """Builder object for setting the wptserve config.
+
+ Configuration can be passed in as a dictionary to the constructor, or
+ set via attributes after construction. Configuration options must match
+ the keys on the _default class property.
+
+ The generated configuration is obtained by using the builder
+ object as a context manager; this returns a Config object
+ containing immutable configuration that may be shared between
+ threads and processes. In general the configuration is only valid
+ for the context used to obtain it.
+
+ with ConfigBuilder() as config:
+ # Use the configuration
+ print config.browser_host
+
+ The properties on the final configuration include those explicitly
+ supplied and computed properties. The computed properties are
+ defined by the computed_properties attribute on the class. This
+ is a list of property names, each corresponding to a _get_<name>
+ method on the class. These methods are called in the order defined
+ in computed_properties and are passed a single argument, a
+ dictionary containing the current set of properties. Thus computed
+ properties later in the list may depend on the value of earlier
+ ones.
+
+
+ :param logger: - A logger object. This is used for logging during
+ the creation of the configuration, but isn't
+ part of the configuration
+ :param subdomains: - A set of valid subdomains to include in the
+ configuration.
+ :param not_subdomains: - A set of invalid subdomains to include in
+ the configuration.
+ :param config_cls: - A class to use for the configuration. Defaults
+ to default_config_cls
+ """
+
+ _default = {
+ "browser_host": "localhost",
+ "alternate_hosts": {},
+ "doc_root": os.path.dirname("__file__"),
+ "server_host": None,
+ "ports": {"http": [8000]},
+ "check_subdomains": True,
+ "log_level": "debug",
+ "bind_address": True,
+ "ssl": {
+ "type": "none",
+ "encrypt_after_connect": False,
+ "none": {},
+ "openssl": {
+ "openssl_binary": "openssl",
+ "base_path": "_certs",
+ "password": "web-platform-tests",
+ "force_regenerate": False,
+ "duration": 30,
+ "base_conf_path": None
+ },
+ "pregenerated": {
+ "host_key_path": None,
+ "host_cert_path": None,
+ },
+ },
+ "aliases": []
+ }
+ default_config_cls = Config
+
+ # Configuration properties that are computed. Each corresponds to a method
+ # _get_foo, which is called with the current data dictionary. The properties
+ # are computed in the order specified in the list.
+ computed_properties = ["log_level",
+ "paths",
+ "server_host",
+ "ports",
+ "domains",
+ "not_domains",
+ "all_domains",
+ "domains_set",
+ "not_domains_set",
+ "all_domains_set",
+ "ssl_config"]
+
+ def __init__(self,
+ logger,
+ subdomains=set(),
+ not_subdomains=set(),
+ config_cls=None,
+ **kwargs):
+
+ self._logger = logger
+ self._data = self._default.copy()
+ self._ssl_env = None
+
+ self._config_cls = config_cls or self.default_config_cls
+
+ for k, v in self._default.items():
+ self._data[k] = kwargs.pop(k, v)
+
+ self._data["subdomains"] = subdomains
+ self._data["not_subdomains"] = not_subdomains
+
+ for k, new_k in _renamed_props.items():
+ if k in kwargs:
+ logger.warning(
+ "%s in config is deprecated; use %s instead" % (
+ k,
+ new_k
+ )
+ )
+ self._data[new_k] = kwargs.pop(k)
+
+ if kwargs:
+ raise TypeError("__init__() got unexpected keyword arguments %r" % (tuple(kwargs),))
+
+ def __setattr__(self, key, value):
+ if not key[0] == "_":
+ self._data[key] = value
+ else:
+ self.__dict__[key] = value
+
+ def update(self, override):
+ """Load an overrides dict to override config values"""
+ override = override.copy()
+
+ for k in self._default:
+ if k in override:
+ self._set_override(k, override.pop(k))
+
+ for k, new_k in _renamed_props.items():
+ if k in override:
+ self._logger.warning(
+ "%s in config is deprecated; use %s instead" % (
+ k,
+ new_k
+ )
+ )
+ self._set_override(new_k, override.pop(k))
+
+ if override:
+ k = next(iter(override))
+ raise KeyError("unknown config override '%s'" % k)
+
+ def _set_override(self, k, v):
+ old_v = self._data[k]
+ if isinstance(old_v, dict):
+ self._data[k] = _merge_dict(old_v, v)
+ else:
+ self._data[k] = v
+
+ def __enter__(self):
+ if self._ssl_env is not None:
+ raise ValueError("Tried to re-enter configuration")
+ data = self._data.copy()
+ prefix = "_get_"
+ for key in self.computed_properties:
+ data[key] = getattr(self, prefix + key)(data)
+ return self._config_cls(data)
+
+ def __exit__(self, *args):
+ self._ssl_env.__exit__(*args)
+ self._ssl_env = None
+
+ def _get_log_level(self, data):
+ return data["log_level"].upper()
+
+ def _get_paths(self, data):
+ return {"doc_root": data["doc_root"]}
+
+ def _get_server_host(self, data):
+ return data["server_host"] if data.get("server_host") is not None else data["browser_host"]
+
+ def _get_ports(self, data):
+ new_ports = defaultdict(list)
+ for scheme, ports in data["ports"].items():
+ if scheme in ["wss", "https"] and not sslutils.get_cls(data["ssl"]["type"]).ssl_enabled:
+ continue
+ for i, port in enumerate(ports):
+ real_port = get_port("") if port == "auto" else port
+ new_ports[scheme].append(real_port)
+ return new_ports
+
+ def _get_domains(self, data):
+ hosts = data["alternate_hosts"].copy()
+ assert "" not in hosts
+ hosts[""] = data["browser_host"]
+
+ rv = {}
+ for name, host in hosts.items():
+ rv[name] = {subdomain: (subdomain.encode("idna").decode("ascii") + "." + host)
+ for subdomain in data["subdomains"]}
+ rv[name][""] = host
+ return rv
+
+ def _get_not_domains(self, data):
+ hosts = data["alternate_hosts"].copy()
+ assert "" not in hosts
+ hosts[""] = data["browser_host"]
+
+ rv = {}
+ for name, host in hosts.items():
+ rv[name] = {subdomain: (subdomain.encode("idna").decode("ascii") + "." + host)
+ for subdomain in data["not_subdomains"]}
+ return rv
+
+ def _get_all_domains(self, data):
+ rv = copy.deepcopy(data["domains"])
+ nd = data["not_domains"]
+ for host in rv:
+ rv[host].update(nd[host])
+ return rv
+
+ def _get_domains_set(self, data):
+ return {domain
+ for per_host_domains in data["domains"].values()
+ for domain in per_host_domains.values()}
+
+ def _get_not_domains_set(self, data):
+ return {domain
+ for per_host_domains in data["not_domains"].values()
+ for domain in per_host_domains.values()}
+
+ def _get_all_domains_set(self, data):
+ return data["domains_set"] | data["not_domains_set"]
+
+ def _get_ssl_config(self, data):
+ ssl_type = data["ssl"]["type"]
+ ssl_cls = sslutils.get_cls(ssl_type)
+ kwargs = data["ssl"].get(ssl_type, {})
+ self._ssl_env = ssl_cls(self._logger, **kwargs)
+ self._ssl_env.__enter__()
+ if self._ssl_env.ssl_enabled:
+ key_path, cert_path = self._ssl_env.host_cert_path(data["domains_set"])
+ ca_cert_path = self._ssl_env.ca_cert_path(data["domains_set"])
+ return {"key_path": key_path,
+ "ca_cert_path": ca_cert_path,
+ "cert_path": cert_path,
+ "encrypt_after_connect": data["ssl"].get("encrypt_after_connect", False)}
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/constants.py b/testing/web-platform/tests/tools/wptserve/wptserve/constants.py
new file mode 100644
index 0000000000..584f2cc1c7
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/constants.py
@@ -0,0 +1,98 @@
+from . import utils
+
+content_types = utils.invert_dict({
+ "application/json": ["json"],
+ "application/wasm": ["wasm"],
+ "application/xhtml+xml": ["xht", "xhtm", "xhtml"],
+ "application/xml": ["xml"],
+ "application/x-xpinstall": ["xpi"],
+ "audio/mp4": ["m4a"],
+ "audio/mpeg": ["mp3"],
+ "audio/ogg": ["oga"],
+ "audio/webm": ["weba"],
+ "audio/x-wav": ["wav"],
+ "image/avif": ["avif"],
+ "image/bmp": ["bmp"],
+ "image/gif": ["gif"],
+ "image/jpeg": ["jpg", "jpeg"],
+ "image/png": ["png"],
+ "image/svg+xml": ["svg"],
+ "text/cache-manifest": ["manifest"],
+ "text/css": ["css"],
+ "text/event-stream": ["event_stream"],
+ "text/html": ["htm", "html"],
+ "text/javascript": ["js", "mjs"],
+ "text/plain": ["txt", "md"],
+ "text/vtt": ["vtt"],
+ "video/mp4": ["mp4", "m4v"],
+ "video/ogg": ["ogg", "ogv"],
+ "video/webm": ["webm"],
+})
+
+response_codes = {
+ 100: ('Continue', 'Request received, please continue'),
+ 101: ('Switching Protocols',
+ 'Switching to new protocol; obey Upgrade header'),
+
+ 200: ('OK', 'Request fulfilled, document follows'),
+ 201: ('Created', 'Document created, URL follows'),
+ 202: ('Accepted',
+ 'Request accepted, processing continues off-line'),
+ 203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
+ 204: ('No Content', 'Request fulfilled, nothing follows'),
+ 205: ('Reset Content', 'Clear input form for further input.'),
+ 206: ('Partial Content', 'Partial content follows.'),
+
+ 300: ('Multiple Choices',
+ 'Object has several resources -- see URI list'),
+ 301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
+ 302: ('Found', 'Object moved temporarily -- see URI list'),
+ 303: ('See Other', 'Object moved -- see Method and URL list'),
+ 304: ('Not Modified',
+ 'Document has not changed since given time'),
+ 305: ('Use Proxy',
+ 'You must use proxy specified in Location to access this '
+ 'resource.'),
+ 307: ('Temporary Redirect',
+ 'Object moved temporarily -- see URI list'),
+
+ 400: ('Bad Request',
+ 'Bad request syntax or unsupported method'),
+ 401: ('Unauthorized',
+ 'No permission -- see authorization schemes'),
+ 402: ('Payment Required',
+ 'No payment -- see charging schemes'),
+ 403: ('Forbidden',
+ 'Request forbidden -- authorization will not help'),
+ 404: ('Not Found', 'Nothing matches the given URI'),
+ 405: ('Method Not Allowed',
+ 'Specified method is invalid for this resource.'),
+ 406: ('Not Acceptable', 'URI not available in preferred format.'),
+ 407: ('Proxy Authentication Required', 'You must authenticate with '
+ 'this proxy before proceeding.'),
+ 408: ('Request Timeout', 'Request timed out; try again later.'),
+ 409: ('Conflict', 'Request conflict.'),
+ 410: ('Gone',
+ 'URI no longer exists and has been permanently removed.'),
+ 411: ('Length Required', 'Client must specify Content-Length.'),
+ 412: ('Precondition Failed', 'Precondition in headers is false.'),
+ 413: ('Request Entity Too Large', 'Entity is too large.'),
+ 414: ('Request-URI Too Long', 'URI is too long.'),
+ 415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
+ 416: ('Requested Range Not Satisfiable',
+ 'Cannot satisfy request range.'),
+ 417: ('Expectation Failed',
+ 'Expect condition could not be satisfied.'),
+
+ 500: ('Internal Server Error', 'Server got itself in trouble'),
+ 501: ('Not Implemented',
+ 'Server does not support this operation'),
+ 502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
+ 503: ('Service Unavailable',
+ 'The server cannot process the request due to a high load'),
+ 504: ('Gateway Timeout',
+ 'The gateway server did not receive a timely response'),
+ 505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
+}
+
+h2_headers = ['method', 'scheme', 'host', 'path', 'authority', 'status']
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/handlers.py b/testing/web-platform/tests/tools/wptserve/wptserve/handlers.py
new file mode 100644
index 0000000000..bae6a8f137
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/handlers.py
@@ -0,0 +1,512 @@
+# mypy: allow-untyped-defs
+
+import json
+import os
+import traceback
+from collections import defaultdict
+
+from urllib.parse import quote, unquote, urljoin
+
+from .constants import content_types
+from .pipes import Pipeline, template
+from .ranges import RangeParser
+from .request import Authentication
+from .response import MultipartContent
+from .utils import HTTPException
+
+from html import escape
+
+__all__ = ["file_handler", "python_script_handler",
+ "FunctionHandler", "handler", "json_handler",
+ "as_is_handler", "ErrorHandler", "BasicAuthHandler"]
+
+
+def guess_content_type(path):
+ ext = os.path.splitext(path)[1].lstrip(".")
+ if ext in content_types:
+ return content_types[ext]
+
+ return "application/octet-stream"
+
+
+def filesystem_path(base_path, request, url_base="/"):
+ if base_path is None:
+ base_path = request.doc_root
+
+ path = unquote(request.url_parts.path)
+
+ if path.startswith(url_base):
+ path = path[len(url_base):]
+
+ if ".." in path:
+ raise HTTPException(404)
+
+ new_path = os.path.join(base_path, path)
+
+ # Otherwise setting path to / allows access outside the root directory
+ if not new_path.startswith(base_path):
+ raise HTTPException(404)
+
+ return new_path
+
+
+class DirectoryHandler:
+ def __init__(self, base_path=None, url_base="/"):
+ self.base_path = base_path
+ self.url_base = url_base
+
+ def __repr__(self):
+ return "<%s base_path:%s url_base:%s>" % (self.__class__.__name__, self.base_path, self.url_base)
+
+ def __call__(self, request, response):
+ url_path = request.url_parts.path
+
+ if not url_path.endswith("/"):
+ response.status = 301
+ response.headers = [("Location", "%s/" % request.url)]
+ return
+
+ path = filesystem_path(self.base_path, request, self.url_base)
+
+ assert os.path.isdir(path)
+
+ response.headers = [("Content-Type", "text/html")]
+ response.content = """<!doctype html>
+<meta name="viewport" content="width=device-width">
+<title>Directory listing for %(path)s</title>
+<h1>Directory listing for %(path)s</h1>
+<ul>
+%(items)s
+</ul>
+""" % {"path": escape(url_path),
+ "items": "\n".join(self.list_items(url_path, path))} # noqa: E122
+
+ def list_items(self, base_path, path):
+ assert base_path.endswith("/")
+
+ # TODO: this won't actually list all routes, only the
+ # ones that correspond to a real filesystem path. It's
+ # not possible to list every route that will match
+ # something, but it should be possible to at least list the
+ # statically defined ones
+
+ if base_path != "/":
+ link = urljoin(base_path, "..")
+ yield ("""<li class="dir"><a href="%(link)s">%(name)s</a></li>""" %
+ {"link": link, "name": ".."})
+ items = []
+ prev_item = None
+ # This ensures that .headers always sorts after the file it provides the headers for. E.g.,
+ # if we have x, x-y, and x.headers, the order will be x, x.headers, and then x-y.
+ for item in sorted(os.listdir(path), key=lambda x: (x[:-len(".headers")], x) if x.endswith(".headers") else (x, x)):
+ if prev_item and prev_item + ".headers" == item:
+ items[-1][1] = item
+ prev_item = None
+ continue
+ items.append([item, None])
+ prev_item = item
+ for item, dot_headers in items:
+ link = escape(quote(item))
+ dot_headers_markup = ""
+ if dot_headers is not None:
+ dot_headers_markup = (""" (<a href="%(link)s">.headers</a>)""" %
+ {"link": escape(quote(dot_headers))})
+ if os.path.isdir(os.path.join(path, item)):
+ link += "/"
+ class_ = "dir"
+ else:
+ class_ = "file"
+ yield ("""<li class="%(class)s"><a href="%(link)s">%(name)s</a>%(headers)s</li>""" %
+ {"link": link, "name": escape(item), "class": class_,
+ "headers": dot_headers_markup})
+
+
+def parse_qs(qs):
+ """Parse a query string given as a string argument (data of type
+ application/x-www-form-urlencoded). Data are returned as a dictionary. The
+ dictionary keys are the unique query variable names and the values are
+ lists of values for each name.
+
+ This implementation is used instead of Python's built-in `parse_qs` method
+ in order to support the semicolon character (which the built-in method
+ interprets as a parameter delimiter)."""
+ pairs = [item.split("=", 1) for item in qs.split('&') if item]
+ rv = defaultdict(list)
+ for pair in pairs:
+ if len(pair) == 1 or len(pair[1]) == 0:
+ continue
+ name = unquote(pair[0].replace('+', ' '))
+ value = unquote(pair[1].replace('+', ' '))
+ rv[name].append(value)
+ return dict(rv)
+
+
+def wrap_pipeline(path, request, response):
+ """Applies pipelines to a response.
+
+ Pipelines are specified in the filename (.sub.) or the query param (?pipe).
+ """
+ query = parse_qs(request.url_parts.query)
+ pipe_string = ""
+
+ if ".sub." in path:
+ ml_extensions = {".html", ".htm", ".xht", ".xhtml", ".xml", ".svg"}
+ escape_type = "html" if os.path.splitext(path)[1] in ml_extensions else "none"
+ pipe_string = "sub(%s)" % escape_type
+
+ if "pipe" in query:
+ if pipe_string:
+ pipe_string += "|"
+
+ pipe_string += query["pipe"][-1]
+
+ if pipe_string:
+ response = Pipeline(pipe_string)(request, response)
+
+ return response
+
+
+def load_headers(request, path):
+ """Loads headers from files for a given path.
+
+ Attempts to load both the neighbouring __dir__{.sub}.headers and
+ PATH{.sub}.headers (applying template substitution if needed); results are
+ concatenated in that order.
+ """
+ def _load(request, path):
+ headers_path = path + ".sub.headers"
+ if os.path.exists(headers_path):
+ use_sub = True
+ else:
+ headers_path = path + ".headers"
+ use_sub = False
+
+ try:
+ with open(headers_path, "rb") as headers_file:
+ data = headers_file.read()
+ except OSError:
+ return []
+ else:
+ if use_sub:
+ data = template(request, data, escape_type="none")
+ return [tuple(item.strip() for item in line.split(b":", 1))
+ for line in data.splitlines() if line]
+
+ return (_load(request, os.path.join(os.path.dirname(path), "__dir__")) +
+ _load(request, path))
+
+
+class FileHandler:
+ def __init__(self, base_path=None, url_base="/"):
+ self.base_path = base_path
+ self.url_base = url_base
+ self.directory_handler = DirectoryHandler(self.base_path, self.url_base)
+
+ def __repr__(self):
+ return "<%s base_path:%s url_base:%s>" % (self.__class__.__name__, self.base_path, self.url_base)
+
+ def __call__(self, request, response):
+ path = filesystem_path(self.base_path, request, self.url_base)
+
+ if os.path.isdir(path):
+ return self.directory_handler(request, response)
+ try:
+ #This is probably racy with some other process trying to change the file
+ file_size = os.stat(path).st_size
+ response.headers.update(self.get_headers(request, path))
+ if "Range" in request.headers:
+ try:
+ byte_ranges = RangeParser()(request.headers['Range'], file_size)
+ except HTTPException as e:
+ if e.code == 416:
+ response.headers.set("Content-Range", "bytes */%i" % file_size)
+ raise
+ else:
+ byte_ranges = None
+ data = self.get_data(response, path, byte_ranges)
+ response.content = data
+ response = wrap_pipeline(path, request, response)
+ return response
+
+ except OSError:
+ raise HTTPException(404)
+
+ def get_headers(self, request, path):
+ rv = load_headers(request, path)
+
+ if not any(key.lower() == b"content-type" for (key, _) in rv):
+ rv.insert(0, (b"Content-Type", guess_content_type(path).encode("ascii")))
+
+ return rv
+
+ def get_data(self, response, path, byte_ranges):
+ """Return either the handle to a file, or a string containing
+ the content of a chunk of the file, if we have a range request."""
+ if byte_ranges is None:
+ return open(path, 'rb')
+ else:
+ with open(path, 'rb') as f:
+ response.status = 206
+ if len(byte_ranges) > 1:
+ parts_content_type, content = self.set_response_multipart(response,
+ byte_ranges,
+ f)
+ for byte_range in byte_ranges:
+ content.append_part(self.get_range_data(f, byte_range),
+ parts_content_type,
+ [("Content-Range", byte_range.header_value())])
+ return content
+ else:
+ response.headers.set("Content-Range", byte_ranges[0].header_value())
+ return self.get_range_data(f, byte_ranges[0])
+
+ def set_response_multipart(self, response, ranges, f):
+ parts_content_type = response.headers.get("Content-Type")
+ if parts_content_type:
+ parts_content_type = parts_content_type[-1]
+ else:
+ parts_content_type = None
+ content = MultipartContent()
+ response.headers.set("Content-Type", "multipart/byteranges; boundary=%s" % content.boundary)
+ return parts_content_type, content
+
+ def get_range_data(self, f, byte_range):
+ f.seek(byte_range.lower)
+ return f.read(byte_range.upper - byte_range.lower)
+
+
+file_handler = FileHandler() # type: ignore
+
+
+class PythonScriptHandler:
+ def __init__(self, base_path=None, url_base="/"):
+ self.base_path = base_path
+ self.url_base = url_base
+
+ def __repr__(self):
+ return "<%s base_path:%s url_base:%s>" % (self.__class__.__name__, self.base_path, self.url_base)
+
+ def _load_file(self, request, response, func):
+ """
+ This loads the requested python file as an environ variable.
+
+ Once the environ is loaded, the passed `func` is run with this loaded environ.
+
+ :param request: The request object
+ :param response: The response object
+ :param func: The function to be run with the loaded environ with the modified filepath. Signature: (request, response, environ, path)
+ :return: The return of func
+ """
+ path = filesystem_path(self.base_path, request, self.url_base)
+
+ try:
+ environ = {"__file__": path}
+ with open(path, 'rb') as f:
+ exec(compile(f.read(), path, 'exec'), environ, environ)
+
+ if func is not None:
+ return func(request, response, environ, path)
+
+ except OSError:
+ raise HTTPException(404)
+
+ def __call__(self, request, response):
+ def func(request, response, environ, path):
+ if "main" in environ:
+ handler = FunctionHandler(environ["main"])
+ handler(request, response)
+ wrap_pipeline(path, request, response)
+ else:
+ raise HTTPException(500, "No main function in script %s" % path)
+
+ self._load_file(request, response, func)
+
+ def frame_handler(self, request):
+ """
+ This creates a FunctionHandler with one or more of the handling functions.
+
+ Used by the H2 server.
+
+ :param request: The request object used to generate the handler.
+ :return: A FunctionHandler object with one or more of these functions: `handle_headers`, `handle_data` or `main`
+ """
+ def func(request, response, environ, path):
+ def _main(req, resp):
+ pass
+
+ handler = FunctionHandler(_main)
+ if "main" in environ:
+ handler.func = environ["main"]
+ if "handle_headers" in environ:
+ handler.handle_headers = environ["handle_headers"]
+ if "handle_data" in environ:
+ handler.handle_data = environ["handle_data"]
+
+ if handler.func is _main and not hasattr(handler, "handle_headers") and not hasattr(handler, "handle_data"):
+ raise HTTPException(500, "No main function or handlers in script %s" % path)
+
+ return handler
+ return self._load_file(request, None, func)
+
+
+python_script_handler = PythonScriptHandler() # type: ignore
+
+
+class FunctionHandler:
+ def __init__(self, func):
+ self.func = func
+
+ def __call__(self, request, response):
+ try:
+ rv = self.func(request, response)
+ except HTTPException:
+ raise
+ except Exception:
+ msg = traceback.format_exc()
+ raise HTTPException(500, message=msg)
+ if rv is not None:
+ if isinstance(rv, tuple):
+ if len(rv) == 3:
+ status, headers, content = rv
+ response.status = status
+ elif len(rv) == 2:
+ headers, content = rv
+ else:
+ raise HTTPException(500)
+ response.headers.update(headers)
+ else:
+ content = rv
+ response.content = content
+ wrap_pipeline('', request, response)
+
+
+# The generic name here is so that this can be used as a decorator
+def handler(func):
+ return FunctionHandler(func)
+
+
+class JsonHandler:
+ def __init__(self, func):
+ self.func = func
+
+ def __call__(self, request, response):
+ return FunctionHandler(self.handle_request)(request, response)
+
+ def handle_request(self, request, response):
+ rv = self.func(request, response)
+ response.headers.set("Content-Type", "application/json")
+ enc = json.dumps
+ if isinstance(rv, tuple):
+ rv = list(rv)
+ value = tuple(rv[:-1] + [enc(rv[-1])])
+ length = len(value[-1])
+ else:
+ value = enc(rv)
+ length = len(value)
+ response.headers.set("Content-Length", length)
+ return value
+
+
+def json_handler(func):
+ return JsonHandler(func)
+
+
+class AsIsHandler:
+ def __init__(self, base_path=None, url_base="/"):
+ self.base_path = base_path
+ self.url_base = url_base
+
+ def __call__(self, request, response):
+ path = filesystem_path(self.base_path, request, self.url_base)
+
+ try:
+ with open(path, 'rb') as f:
+ response.writer.write_raw_content(f.read())
+ wrap_pipeline(path, request, response)
+ response.close_connection = True
+ except OSError:
+ raise HTTPException(404)
+
+
+as_is_handler = AsIsHandler() # type: ignore
+
+
+class BasicAuthHandler:
+ def __init__(self, handler, user, password):
+ """
+ A Basic Auth handler
+
+ :Args:
+ - handler: a secondary handler for the request after authentication is successful (example file_handler)
+ - user: string of the valid user name or None if any / all credentials are allowed
+ - password: string of the password required
+ """
+ self.user = user
+ self.password = password
+ self.handler = handler
+
+ def __call__(self, request, response):
+ if "authorization" not in request.headers:
+ response.status = 401
+ response.headers.set("WWW-Authenticate", "Basic")
+ return response
+ else:
+ auth = Authentication(request.headers)
+ if self.user is not None and (self.user != auth.username or self.password != auth.password):
+ response.set_error(403, "Invalid username or password")
+ return response
+ return self.handler(request, response)
+
+
+basic_auth_handler = BasicAuthHandler(file_handler, None, None) # type: ignore
+
+
+class ErrorHandler:
+ def __init__(self, status):
+ self.status = status
+
+ def __call__(self, request, response):
+ response.set_error(self.status)
+
+
+class StringHandler:
+ def __init__(self, data, content_type, **headers):
+ """Handler that returns a fixed data string and headers
+
+ :param data: String to use
+ :param content_type: Content type header to server the response with
+ :param headers: List of headers to send with responses"""
+
+ self.data = data
+
+ self.resp_headers = [("Content-Type", content_type)]
+ for k, v in headers.items():
+ self.resp_headers.append((k.replace("_", "-"), v))
+
+ self.handler = handler(self.handle_request)
+
+ def handle_request(self, request, response):
+ return self.resp_headers, self.data
+
+ def __call__(self, request, response):
+ rv = self.handler(request, response)
+ return rv
+
+
+class StaticHandler(StringHandler):
+ def __init__(self, path, format_args, content_type, **headers):
+ """Handler that reads a file from a path and substitutes some fixed data
+
+ Note that *.headers files have no effect in this handler.
+
+ :param path: Path to the template file to use
+ :param format_args: Dictionary of values to substitute into the template file
+ :param content_type: Content type header to server the response with
+ :param headers: List of headers to send with responses"""
+
+ with open(path) as f:
+ data = f.read()
+ if format_args:
+ data = data % format_args
+
+ return super().__init__(data, content_type, **headers)
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/logger.py b/testing/web-platform/tests/tools/wptserve/wptserve/logger.py
new file mode 100644
index 0000000000..8eff146a01
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/logger.py
@@ -0,0 +1,5 @@
+import logging
+
+def get_logger() -> logging.Logger:
+ # Use the root logger
+ return logging.getLogger()
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/pipes.py b/testing/web-platform/tests/tools/wptserve/wptserve/pipes.py
new file mode 100644
index 0000000000..a9a85a136b
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/pipes.py
@@ -0,0 +1,561 @@
+# mypy: allow-untyped-defs
+
+from collections import deque
+import base64
+import gzip as gzip_module
+import hashlib
+import os
+import re
+import time
+import uuid
+
+from html import escape
+from io import BytesIO
+from typing import Any, Callable, ClassVar, Dict, Optional, TypeVar
+
+T = TypeVar('T')
+
+
+def resolve_content(response):
+ return b"".join(item for item in response.iter_content(read_file=True))
+
+
+class Pipeline:
+ pipes = {} # type: ClassVar[Dict[str, Callable[..., Any]]]
+
+ def __init__(self, pipe_string):
+ self.pipe_functions = self.parse(pipe_string)
+
+ def parse(self, pipe_string):
+ functions = []
+ for item in PipeTokenizer().tokenize(pipe_string):
+ if not item:
+ break
+ if item[0] == "function":
+ functions.append((self.pipes[item[1]], []))
+ elif item[0] == "argument":
+ functions[-1][1].append(item[1])
+ return functions
+
+ def __call__(self, request, response):
+ for func, args in self.pipe_functions:
+ response = func(request, response, *args)
+ return response
+
+
+class PipeTokenizer:
+ def __init__(self):
+ #This whole class can likely be replaced by some regexps
+ self.state = None
+
+ def tokenize(self, string):
+ self.string = string
+ self.state = self.func_name_state
+ self._index = 0
+ while self.state:
+ yield self.state()
+ yield None
+
+ def get_char(self):
+ if self._index >= len(self.string):
+ return None
+ rv = self.string[self._index]
+ self._index += 1
+ return rv
+
+ def func_name_state(self):
+ rv = ""
+ while True:
+ char = self.get_char()
+ if char is None:
+ self.state = None
+ if rv:
+ return ("function", rv)
+ else:
+ return None
+ elif char == "(":
+ self.state = self.argument_state
+ return ("function", rv)
+ elif char == "|":
+ if rv:
+ return ("function", rv)
+ else:
+ rv += char
+
+ def argument_state(self):
+ rv = ""
+ while True:
+ char = self.get_char()
+ if char is None:
+ self.state = None
+ return ("argument", rv)
+ elif char == "\\":
+ rv += self.get_escape()
+ if rv is None:
+ #This should perhaps be an error instead
+ return ("argument", rv)
+ elif char == ",":
+ return ("argument", rv)
+ elif char == ")":
+ self.state = self.func_name_state
+ return ("argument", rv)
+ else:
+ rv += char
+
+ def get_escape(self):
+ char = self.get_char()
+ escapes = {"n": "\n",
+ "r": "\r",
+ "t": "\t"}
+ return escapes.get(char, char)
+
+
+class pipe:
+ def __init__(self, *arg_converters: Callable[[str], Any]):
+ self.arg_converters = arg_converters
+ self.max_args = len(self.arg_converters)
+ self.min_args = 0
+ opt_seen = False
+ for item in self.arg_converters:
+ if not opt_seen:
+ if isinstance(item, opt):
+ opt_seen = True
+ else:
+ self.min_args += 1
+ else:
+ if not isinstance(item, opt):
+ raise ValueError("Non-optional argument cannot follow optional argument")
+
+ def __call__(self, f):
+ def inner(request, response, *args):
+ if not (self.min_args <= len(args) <= self.max_args):
+ raise ValueError("Expected between %d and %d args, got %d" %
+ (self.min_args, self.max_args, len(args)))
+ arg_values = tuple(f(x) for f, x in zip(self.arg_converters, args))
+ return f(request, response, *arg_values)
+ Pipeline.pipes[f.__name__] = inner
+ #We actually want the undecorated function in the main namespace
+ return f
+
+
+class opt:
+ def __init__(self, f: Callable[[str], Any]):
+ self.f = f
+
+ def __call__(self, arg: str) -> Any:
+ return self.f(arg)
+
+
+def nullable(func: Callable[[str], T]) -> Callable[[str], Optional[T]]:
+ def inner(arg: str) -> Optional[T]:
+ if arg.lower() == "null":
+ return None
+ else:
+ return func(arg)
+ return inner
+
+
+def boolean(arg: str) -> bool:
+ if arg.lower() in ("true", "1"):
+ return True
+ elif arg.lower() in ("false", "0"):
+ return False
+ raise ValueError
+
+
+@pipe(int)
+def status(request, response, code):
+ """Alter the status code.
+
+ :param code: Status code to use for the response."""
+ response.status = code
+ return response
+
+
+@pipe(str, str, opt(boolean))
+def header(request, response, name, value, append=False):
+ """Set a HTTP header.
+
+ Replaces any existing HTTP header of the same name unless
+ append is set, in which case the header is appended without
+ replacement.
+
+ :param name: Name of the header to set.
+ :param value: Value to use for the header.
+ :param append: True if existing headers should not be replaced
+ """
+ if not append:
+ response.headers.set(name, value)
+ else:
+ response.headers.append(name, value)
+ return response
+
+
+@pipe(str)
+def trickle(request, response, delays):
+ """Send the response in parts, with time delays.
+
+ :param delays: A string of delays and amounts, in bytes, of the
+ response to send. Each component is separated by
+ a colon. Amounts in bytes are plain integers, whilst
+ delays are floats prefixed with a single d e.g.
+ d1:100:d2
+ Would cause a 1 second delay, would then send 100 bytes
+ of the file, and then cause a 2 second delay, before sending
+ the remainder of the file.
+
+ If the last token is of the form rN, instead of sending the
+ remainder of the file, the previous N instructions will be
+ repeated until the whole file has been sent e.g.
+ d1:100:d2:r2
+ Causes a delay of 1s, then 100 bytes to be sent, then a 2s delay
+ and then a further 100 bytes followed by a two second delay
+ until the response has been fully sent.
+ """
+ def parse_delays():
+ parts = delays.split(":")
+ rv = []
+ for item in parts:
+ if item.startswith("d"):
+ item_type = "delay"
+ item = item[1:]
+ value = float(item)
+ elif item.startswith("r"):
+ item_type = "repeat"
+ value = int(item[1:])
+ if not value % 2 == 0:
+ raise ValueError
+ else:
+ item_type = "bytes"
+ value = int(item)
+ if len(rv) and rv[-1][0] == item_type:
+ rv[-1][1] += value
+ else:
+ rv.append((item_type, value))
+ return rv
+
+ delays = parse_delays()
+ if not delays:
+ return response
+ content = resolve_content(response)
+ offset = [0]
+
+ if not ("Cache-Control" in response.headers or
+ "Pragma" in response.headers or
+ "Expires" in response.headers):
+ response.headers.set("Cache-Control", "no-cache, no-store, must-revalidate")
+ response.headers.set("Pragma", "no-cache")
+ response.headers.set("Expires", "0")
+
+ def add_content(delays, repeat=False):
+ for i, (item_type, value) in enumerate(delays):
+ if item_type == "bytes":
+ yield content[offset[0]:offset[0] + value]
+ offset[0] += value
+ elif item_type == "delay":
+ time.sleep(value)
+ elif item_type == "repeat":
+ if i != len(delays) - 1:
+ continue
+ while offset[0] < len(content):
+ yield from add_content(delays[-(value + 1):-1], True)
+
+ if not repeat and offset[0] < len(content):
+ yield content[offset[0]:]
+
+ response.content = add_content(delays)
+ return response
+
+
+@pipe(nullable(int), opt(nullable(int)))
+def slice(request, response, start, end=None):
+ """Send a byte range of the response body
+
+ :param start: The starting offset. Follows python semantics including
+ negative numbers.
+
+ :param end: The ending offset, again with python semantics and None
+ (spelled "null" in a query string) to indicate the end of
+ the file.
+ """
+ content = resolve_content(response)[start:end]
+ response.content = content
+ response.headers.set("Content-Length", len(content))
+ return response
+
+
+class ReplacementTokenizer:
+ def arguments(self, token):
+ unwrapped = token[1:-1].decode('utf8')
+ return ("arguments", re.split(r",\s*", unwrapped) if unwrapped else [])
+
+ def ident(self, token):
+ return ("ident", token.decode('utf8'))
+
+ def index(self, token):
+ token = token[1:-1].decode('utf8')
+ try:
+ index = int(token)
+ except ValueError:
+ index = token
+ return ("index", index)
+
+ def var(self, token):
+ token = token[:-1].decode('utf8')
+ return ("var", token)
+
+ def tokenize(self, string):
+ assert isinstance(string, bytes)
+ return self.scanner.scan(string)[0]
+
+ # re.Scanner is missing from typeshed:
+ # https://github.com/python/typeshed/pull/3071
+ scanner = re.Scanner([(br"\$\w+:", var), # type: ignore
+ (br"\$?\w+", ident),
+ (br"\[[^\]]*\]", index),
+ (br"\([^)]*\)", arguments)])
+
+
+class FirstWrapper:
+ def __init__(self, params):
+ self.params = params
+
+ def __getitem__(self, key):
+ try:
+ if isinstance(key, str):
+ key = key.encode('iso-8859-1')
+ return self.params.first(key)
+ except KeyError:
+ return ""
+
+
+@pipe(opt(nullable(str)))
+def sub(request, response, escape_type="html"):
+ """Substitute environment information about the server and request into the script.
+
+ :param escape_type: String detailing the type of escaping to use. Known values are
+ "html" and "none", with "html" the default for historic reasons.
+
+ The format is a very limited template language. Substitutions are
+ enclosed by {{ and }}. There are several available substitutions:
+
+ host
+ A simple string value and represents the primary host from which the
+ tests are being run.
+ domains
+ A dictionary of available domains indexed by subdomain name.
+ ports
+ A dictionary of lists of ports indexed by protocol.
+ location
+ A dictionary of parts of the request URL. Valid keys are
+ 'server, 'scheme', 'host', 'hostname', 'port', 'path' and 'query'.
+ 'server' is scheme://host:port, 'host' is hostname:port, and query
+ includes the leading '?', but other delimiters are omitted.
+ headers
+ A dictionary of HTTP headers in the request.
+ header_or_default(header, default)
+ The value of an HTTP header, or a default value if it is absent.
+ For example::
+
+ {{header_or_default(X-Test, test-header-absent)}}
+
+ GET
+ A dictionary of query parameters supplied with the request.
+ uuid()
+ A pesudo-random UUID suitable for usage with stash
+ file_hash(algorithm, filepath)
+ The cryptographic hash of a file. Supported algorithms: md5, sha1,
+ sha224, sha256, sha384, and sha512. For example::
+
+ {{file_hash(md5, dom/interfaces.html)}}
+
+ fs_path(filepath)
+ The absolute path to a file inside the wpt document root
+
+ So for example in a setup running on localhost with a www
+ subdomain and a http server on ports 80 and 81::
+
+ {{host}} => localhost
+ {{domains[www]}} => www.localhost
+ {{ports[http][1]}} => 81
+
+ It is also possible to assign a value to a variable name, which must start
+ with the $ character, using the ":" syntax e.g.::
+
+ {{$id:uuid()}}
+
+ Later substitutions in the same file may then refer to the variable
+ by name e.g.::
+
+ {{$id}}
+ """
+ content = resolve_content(response)
+
+ new_content = template(request, content, escape_type=escape_type)
+
+ response.content = new_content
+ return response
+
+class SubFunctions:
+ @staticmethod
+ def uuid(request):
+ return str(uuid.uuid4())
+
+ # Maintain a list of supported algorithms, restricted to those that are
+ # available on all platforms [1]. This ensures that test authors do not
+ # unknowingly introduce platform-specific tests.
+ #
+ # [1] https://docs.python.org/2/library/hashlib.html
+ supported_algorithms = ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")
+
+ @staticmethod
+ def file_hash(request, algorithm, path):
+ assert isinstance(algorithm, str)
+ if algorithm not in SubFunctions.supported_algorithms:
+ raise ValueError("Unsupported encryption algorithm: '%s'" % algorithm)
+
+ hash_obj = getattr(hashlib, algorithm)()
+ absolute_path = os.path.join(request.doc_root, path)
+
+ try:
+ with open(absolute_path, "rb") as f:
+ hash_obj.update(f.read())
+ except OSError:
+ # In this context, an unhandled IOError will be interpreted by the
+ # server as an indication that the template file is non-existent.
+ # Although the generic "Exception" is less precise, it avoids
+ # triggering a potentially-confusing HTTP 404 error in cases where
+ # the path to the file to be hashed is invalid.
+ raise Exception('Cannot open file for hash computation: "%s"' % absolute_path)
+
+ return base64.b64encode(hash_obj.digest()).strip()
+
+ @staticmethod
+ def fs_path(request, path):
+ if not path.startswith("/"):
+ subdir = request.request_path[len(request.url_base):]
+ if "/" in subdir:
+ subdir = subdir.rsplit("/", 1)[0]
+ root_rel_path = subdir + "/" + path
+ else:
+ root_rel_path = path[1:]
+ root_rel_path = root_rel_path.replace("/", os.path.sep)
+ absolute_path = os.path.abspath(os.path.join(request.doc_root, root_rel_path))
+ if ".." in os.path.relpath(absolute_path, request.doc_root):
+ raise ValueError("Path outside wpt root")
+ return absolute_path
+
+ @staticmethod
+ def header_or_default(request, name, default):
+ return request.headers.get(name, default)
+
+def template(request, content, escape_type="html"):
+ #TODO: There basically isn't any error handling here
+ tokenizer = ReplacementTokenizer()
+
+ variables = {}
+
+ def config_replacement(match):
+ content, = match.groups()
+
+ tokens = tokenizer.tokenize(content)
+ tokens = deque(tokens)
+
+ token_type, field = tokens.popleft()
+ assert isinstance(field, str)
+
+ if token_type == "var":
+ variable = field
+ token_type, field = tokens.popleft()
+ assert isinstance(field, str)
+ else:
+ variable = None
+
+ if token_type != "ident":
+ raise Exception("unexpected token type %s (token '%r'), expected ident" % (token_type, field))
+
+ if field in variables:
+ value = variables[field]
+ elif hasattr(SubFunctions, field):
+ value = getattr(SubFunctions, field)
+ elif field == "headers":
+ value = request.headers
+ elif field == "GET":
+ value = FirstWrapper(request.GET)
+ elif field == "hosts":
+ value = request.server.config.all_domains
+ elif field == "domains":
+ value = request.server.config.all_domains[""]
+ elif field == "host":
+ value = request.server.config["browser_host"]
+ elif field in request.server.config:
+ value = request.server.config[field]
+ elif field == "location":
+ value = {"server": "%s://%s:%s" % (request.url_parts.scheme,
+ request.url_parts.hostname,
+ request.url_parts.port),
+ "scheme": request.url_parts.scheme,
+ "host": "%s:%s" % (request.url_parts.hostname,
+ request.url_parts.port),
+ "hostname": request.url_parts.hostname,
+ "port": request.url_parts.port,
+ "path": request.url_parts.path,
+ "pathname": request.url_parts.path,
+ "query": "?%s" % request.url_parts.query}
+ elif field == "url_base":
+ value = request.url_base
+ else:
+ raise Exception("Undefined template variable %s" % field)
+
+ while tokens:
+ ttype, field = tokens.popleft()
+ if ttype == "index":
+ value = value[field]
+ elif ttype == "arguments":
+ value = value(request, *field)
+ else:
+ raise Exception(
+ "unexpected token type %s (token '%r'), expected ident or arguments" % (ttype, field)
+ )
+
+ assert isinstance(value, (int, (bytes, str))), tokens
+
+ if variable is not None:
+ variables[variable] = value
+
+ escape_func = {"html": lambda x:escape(x, quote=True),
+ "none": lambda x:x}[escape_type]
+
+ # Should possibly support escaping for other contexts e.g. script
+ # TODO: read the encoding of the response
+ # cgi.escape() only takes text strings in Python 3.
+ if isinstance(value, bytes):
+ value = value.decode("utf-8")
+ elif isinstance(value, int):
+ value = str(value)
+ return escape_func(value).encode("utf-8")
+
+ template_regexp = re.compile(br"{{([^}]*)}}")
+ new_content = template_regexp.sub(config_replacement, content)
+
+ return new_content
+
+@pipe()
+def gzip(request, response):
+ """This pipe gzip-encodes response data.
+
+ It sets (or overwrites) these HTTP headers:
+ Content-Encoding is set to gzip
+ Content-Length is set to the length of the compressed content
+ """
+ content = resolve_content(response)
+ response.headers.set("Content-Encoding", "gzip")
+
+ out = BytesIO()
+ with gzip_module.GzipFile(fileobj=out, mode="w") as f:
+ f.write(content)
+ response.content = out.getvalue()
+
+ response.headers.set("Content-Length", len(response.content))
+
+ return response
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/ranges.py b/testing/web-platform/tests/tools/wptserve/wptserve/ranges.py
new file mode 100644
index 0000000000..622b807002
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/ranges.py
@@ -0,0 +1,96 @@
+# mypy: allow-untyped-defs
+
+from .utils import HTTPException
+
+
+class RangeParser:
+ def __call__(self, header, file_size):
+ try:
+ header = header.decode("ascii")
+ except UnicodeDecodeError:
+ raise HTTPException(400, "Non-ASCII range header value")
+ prefix = "bytes="
+ if not header.startswith(prefix):
+ raise HTTPException(416, message=f"Unrecognised range type {header}")
+
+ parts = header[len(prefix):].split(",")
+ ranges = []
+ for item in parts:
+ components = item.split("-")
+ if len(components) != 2:
+ raise HTTPException(416, "Bad range specifier %s" % (item))
+ data = []
+ for component in components:
+ if component == "":
+ data.append(None)
+ else:
+ try:
+ data.append(int(component))
+ except ValueError:
+ raise HTTPException(416, "Bad range specifier %s" % (item))
+ try:
+ ranges.append(Range(data[0], data[1], file_size))
+ except ValueError:
+ raise HTTPException(416, "Bad range specifier %s" % (item))
+
+ return self.coalesce_ranges(ranges, file_size)
+
+ def coalesce_ranges(self, ranges, file_size):
+ rv = []
+ target = None
+ for current in reversed(sorted(ranges)):
+ if target is None:
+ target = current
+ else:
+ new = target.coalesce(current)
+ target = new[0]
+ if len(new) > 1:
+ rv.append(new[1])
+ rv.append(target)
+
+ return rv[::-1]
+
+
+class Range:
+ def __init__(self, lower, upper, file_size):
+ self.file_size = file_size
+ self.lower, self.upper = self._abs(lower, upper)
+ if self.lower >= self.upper or self.lower >= self.file_size:
+ raise ValueError
+
+ def __repr__(self):
+ return f"<Range {self.lower}-{self.upper}>"
+
+ def __lt__(self, other):
+ return self.lower < other.lower
+
+ def __gt__(self, other):
+ return self.lower > other.lower
+
+ def __eq__(self, other):
+ return self.lower == other.lower and self.upper == other.upper
+
+ def _abs(self, lower, upper):
+ if lower is None and upper is None:
+ lower, upper = 0, self.file_size
+ elif lower is None:
+ lower, upper = max(0, self.file_size - upper), self.file_size
+ elif upper is None:
+ lower, upper = lower, self.file_size
+ else:
+ lower, upper = lower, min(self.file_size, upper + 1)
+
+ return lower, upper
+
+ def coalesce(self, other):
+ assert self.file_size == other.file_size
+
+ if (self.upper < other.lower or self.lower > other.upper):
+ return sorted([self, other])
+ else:
+ return [Range(min(self.lower, other.lower),
+ max(self.upper, other.upper) - 1,
+ self.file_size)]
+
+ def header_value(self):
+ return "bytes %i-%i/%i" % (self.lower, self.upper - 1, self.file_size)
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/request.py b/testing/web-platform/tests/tools/wptserve/wptserve/request.py
new file mode 100644
index 0000000000..bfbbae3c28
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/request.py
@@ -0,0 +1,690 @@
+# mypy: allow-untyped-defs
+
+import base64
+import cgi
+import tempfile
+
+from http.cookies import BaseCookie
+from io import BytesIO
+from typing import Dict, List, TypeVar
+from urllib.parse import parse_qsl, urlsplit
+
+from . import stash
+from .utils import HTTPException, isomorphic_encode, isomorphic_decode
+
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+
+missing = object()
+
+
+class Server:
+ """Data about the server environment
+
+ .. attribute:: config
+
+ Environment configuration information with information about the
+ various servers running, their hostnames and ports.
+
+ .. attribute:: stash
+
+ Stash object holding state stored on the server between requests.
+
+ """
+ config = None
+
+ def __init__(self, request):
+ self._stash = None
+ self._request = request
+
+ @property
+ def stash(self):
+ if self._stash is None:
+ address, authkey = stash.load_env_config()
+ self._stash = stash.Stash(self._request.url_parts.path, address, authkey)
+ return self._stash
+
+
+class InputFile:
+ max_buffer_size = 1024*1024
+
+ def __init__(self, rfile, length):
+ """File-like object used to provide a seekable view of request body data"""
+ self._file = rfile
+ self.length = length
+
+ self._file_position = 0
+
+ if length > self.max_buffer_size:
+ self._buf = tempfile.TemporaryFile()
+ else:
+ self._buf = BytesIO()
+
+ @property
+ def _buf_position(self):
+ rv = self._buf.tell()
+ assert rv <= self._file_position
+ return rv
+
+ def read(self, bytes=-1):
+ assert self._buf_position <= self._file_position
+
+ if bytes < 0:
+ bytes = self.length - self._buf_position
+ bytes_remaining = min(bytes, self.length - self._buf_position)
+
+ if bytes_remaining == 0:
+ return b""
+
+ if self._buf_position != self._file_position:
+ buf_bytes = min(bytes_remaining, self._file_position - self._buf_position)
+ old_data = self._buf.read(buf_bytes)
+ bytes_remaining -= buf_bytes
+ else:
+ old_data = b""
+
+ assert bytes_remaining == 0 or self._buf_position == self._file_position, (
+ "Before reading buffer position (%i) didn't match file position (%i)" %
+ (self._buf_position, self._file_position))
+ new_data = self._file.read(bytes_remaining)
+ self._buf.write(new_data)
+ self._file_position += bytes_remaining
+ assert bytes_remaining == 0 or self._buf_position == self._file_position, (
+ "After reading buffer position (%i) didn't match file position (%i)" %
+ (self._buf_position, self._file_position))
+
+ return old_data + new_data
+
+ def tell(self):
+ return self._buf_position
+
+ def seek(self, offset):
+ if offset > self.length or offset < 0:
+ raise ValueError
+ if offset <= self._file_position:
+ self._buf.seek(offset)
+ else:
+ self.read(offset - self._file_position)
+
+ def readline(self, max_bytes=None):
+ if max_bytes is None:
+ max_bytes = self.length - self._buf_position
+
+ if self._buf_position < self._file_position:
+ data = self._buf.readline(max_bytes)
+ if data.endswith(b"\n") or len(data) == max_bytes:
+ return data
+ else:
+ data = b""
+
+ assert self._buf_position == self._file_position
+
+ initial_position = self._file_position
+ found = False
+ buf = []
+ max_bytes -= len(data)
+ while not found:
+ readahead = self.read(min(2, max_bytes))
+ max_bytes -= len(readahead)
+ for i, c in enumerate(readahead):
+ if c == b"\n"[0]:
+ buf.append(readahead[:i+1])
+ found = True
+ break
+ if not found:
+ buf.append(readahead)
+ if not readahead or not max_bytes:
+ break
+ new_data = b"".join(buf)
+ data += new_data
+ self.seek(initial_position + len(new_data))
+ return data
+
+ def readlines(self):
+ rv = []
+ while True:
+ data = self.readline()
+ if data:
+ rv.append(data)
+ else:
+ break
+ return rv
+
+ def __next__(self):
+ data = self.readline()
+ if data:
+ return data
+ else:
+ raise StopIteration
+
+ next = __next__
+
+ def __iter__(self):
+ return self
+
+
+class Request:
+ """Object representing a HTTP request.
+
+ .. attribute:: doc_root
+
+ The local directory to use as a base when resolving paths
+
+ .. attribute:: route_match
+
+ Regexp match object from matching the request path to the route
+ selected for the request.
+
+ .. attribute:: client_address
+
+ Contains a tuple of the form (host, port) representing the client's address.
+
+ .. attribute:: protocol_version
+
+ HTTP version specified in the request.
+
+ .. attribute:: method
+
+ HTTP method in the request.
+
+ .. attribute:: request_path
+
+ Request path as it appears in the HTTP request.
+
+ .. attribute:: url_base
+
+ The prefix part of the path; typically / unless the handler has a url_base set
+
+ .. attribute:: url
+
+ Absolute URL for the request.
+
+ .. attribute:: url_parts
+
+ Parts of the requested URL as obtained by urlparse.urlsplit(path)
+
+ .. attribute:: request_line
+
+ Raw request line
+
+ .. attribute:: headers
+
+ RequestHeaders object providing a dictionary-like representation of
+ the request headers.
+
+ .. attribute:: raw_headers.
+
+ Dictionary of non-normalized request headers.
+
+ .. attribute:: body
+
+ Request body as a string
+
+ .. attribute:: raw_input
+
+ File-like object representing the body of the request.
+
+ .. attribute:: GET
+
+ MultiDict representing the parameters supplied with the request.
+ Note that these may be present on non-GET requests; the name is
+ chosen to be familiar to users of other systems such as PHP.
+ Both keys and values are binary strings.
+
+ .. attribute:: POST
+
+ MultiDict representing the request body parameters. Most parameters
+ are present as string values, but file uploads have file-like
+ values. All string values (including keys) have binary type.
+
+ .. attribute:: cookies
+
+ A Cookies object representing cookies sent with the request with a
+ dictionary-like interface.
+
+ .. attribute:: auth
+
+ An instance of Authentication with username and password properties
+ representing any credentials supplied using HTTP authentication.
+
+ .. attribute:: server
+
+ Server object containing information about the server environment.
+ """
+
+ def __init__(self, request_handler):
+ self.doc_root = request_handler.server.router.doc_root
+ self.route_match = None # Set by the router
+ self.client_address = request_handler.client_address
+
+ self.protocol_version = request_handler.protocol_version
+ self.method = request_handler.command
+
+ # Keys and values in raw headers are native strings.
+ self._headers = None
+ self.raw_headers = request_handler.headers
+
+ scheme = request_handler.server.scheme
+ host = self.raw_headers.get("Host")
+ port = request_handler.server.server_address[1]
+
+ if host is None:
+ host = request_handler.server.server_address[0]
+ else:
+ if ":" in host:
+ host, port = host.split(":", 1)
+
+ self.request_path = request_handler.path
+ self.url_base = "/"
+
+ if self.request_path.startswith(scheme + "://"):
+ self.url = self.request_path
+ else:
+ # TODO(#23362): Stop using native strings for URLs.
+ self.url = "%s://%s:%s%s" % (
+ scheme, host, port, self.request_path)
+ self.url_parts = urlsplit(self.url)
+
+ self.request_line = request_handler.raw_requestline
+
+ self.raw_input = InputFile(request_handler.rfile,
+ int(self.raw_headers.get("Content-Length", 0)))
+
+ self._body = None
+
+ self._GET = None
+ self._POST = None
+ self._cookies = None
+ self._auth = None
+
+ self.server = Server(self)
+
+ def __repr__(self):
+ return "<Request %s %s>" % (self.method, self.url)
+
+ @property
+ def GET(self):
+ if self._GET is None:
+ kwargs = {
+ "keep_blank_values": True,
+ "encoding": "iso-8859-1",
+ }
+ params = parse_qsl(self.url_parts.query, **kwargs)
+ self._GET = MultiDict()
+ for key, value in params:
+ self._GET.add(isomorphic_encode(key), isomorphic_encode(value))
+ return self._GET
+
+ @property
+ def POST(self):
+ if self._POST is None:
+ # Work out the post parameters
+ pos = self.raw_input.tell()
+ self.raw_input.seek(0)
+ kwargs = {
+ "fp": self.raw_input,
+ "environ": {"REQUEST_METHOD": self.method},
+ "headers": self.raw_headers,
+ "keep_blank_values": True,
+ "encoding": "iso-8859-1",
+ }
+ fs = cgi.FieldStorage(**kwargs)
+ self._POST = MultiDict.from_field_storage(fs)
+ self.raw_input.seek(pos)
+ return self._POST
+
+ @property
+ def cookies(self):
+ if self._cookies is None:
+ parser = BinaryCookieParser()
+ cookie_headers = self.headers.get("cookie", b"")
+ parser.load(cookie_headers)
+ cookies = Cookies()
+ for key, value in parser.items():
+ cookies[isomorphic_encode(key)] = CookieValue(value)
+ self._cookies = cookies
+ return self._cookies
+
+ @property
+ def headers(self):
+ if self._headers is None:
+ self._headers = RequestHeaders(self.raw_headers)
+ return self._headers
+
+ @property
+ def body(self):
+ if self._body is None:
+ pos = self.raw_input.tell()
+ self.raw_input.seek(0)
+ self._body = self.raw_input.read()
+ self.raw_input.seek(pos)
+ return self._body
+
+ @property
+ def auth(self):
+ if self._auth is None:
+ self._auth = Authentication(self.headers)
+ return self._auth
+
+
+class H2Request(Request):
+ def __init__(self, request_handler):
+ self.h2_stream_id = request_handler.h2_stream_id
+ self.frames = []
+ super().__init__(request_handler)
+
+
+class RequestHeaders(Dict[bytes, List[bytes]]):
+ """Read-only dictionary-like API for accessing request headers.
+
+ Unlike BaseHTTPRequestHandler.headers, this class always returns all
+ headers with the same name (separated by commas). And it ensures all keys
+ (i.e. names of headers) and values have binary type.
+ """
+ def __init__(self, items):
+ for header in items.keys():
+ key = isomorphic_encode(header).lower()
+ # get all headers with the same name
+ values = items.getallmatchingheaders(header)
+ if len(values) > 1:
+ # collect the multiple variations of the current header
+ multiples = []
+ # loop through the values from getallmatchingheaders
+ for value in values:
+ # getallmatchingheaders returns raw header lines, so
+ # split to get name, value
+ multiples.append(isomorphic_encode(value).split(b':', 1)[1].strip())
+ headers = multiples
+ else:
+ headers = [isomorphic_encode(items[header])]
+ dict.__setitem__(self, key, headers)
+
+ def __getitem__(self, key):
+ """Get all headers of a certain (case-insensitive) name. If there is
+ more than one, the values are returned comma separated"""
+ key = isomorphic_encode(key)
+ values = dict.__getitem__(self, key.lower())
+ if len(values) == 1:
+ return values[0]
+ else:
+ return b", ".join(values)
+
+ def __setitem__(self, name, value):
+ raise Exception
+
+ def get(self, key, default=None):
+ """Get a string representing all headers with a particular value,
+ with multiple headers separated by a comma. If no header is found
+ return a default value
+
+ :param key: The header name to look up (case-insensitive)
+ :param default: The value to return in the case of no match
+ """
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def get_list(self, key, default=missing):
+ """Get all the header values for a particular field name as
+ a list"""
+ key = isomorphic_encode(key)
+ try:
+ return dict.__getitem__(self, key.lower())
+ except KeyError:
+ if default is not missing:
+ return default
+ else:
+ raise
+
+ def __contains__(self, key):
+ key = isomorphic_encode(key)
+ return dict.__contains__(self, key.lower())
+
+ def iteritems(self):
+ for item in self:
+ yield item, self[item]
+
+ def itervalues(self):
+ for item in self:
+ yield self[item]
+
+
+class CookieValue:
+ """Representation of cookies.
+
+ Note that cookies are considered read-only and the string value
+ of the cookie will not change if you update the field values.
+ However this is not enforced.
+
+ .. attribute:: key
+
+ The name of the cookie.
+
+ .. attribute:: value
+
+ The value of the cookie
+
+ .. attribute:: expires
+
+ The expiry date of the cookie
+
+ .. attribute:: path
+
+ The path of the cookie
+
+ .. attribute:: comment
+
+ The comment of the cookie.
+
+ .. attribute:: domain
+
+ The domain with which the cookie is associated
+
+ .. attribute:: max_age
+
+ The max-age value of the cookie.
+
+ .. attribute:: secure
+
+ Whether the cookie is marked as secure
+
+ .. attribute:: httponly
+
+ Whether the cookie is marked as httponly
+
+ """
+ def __init__(self, morsel):
+ self.key = morsel.key
+ self.value = morsel.value
+
+ for attr in ["expires", "path",
+ "comment", "domain", "max-age",
+ "secure", "version", "httponly"]:
+ setattr(self, attr.replace("-", "_"), morsel[attr])
+
+ self._str = morsel.OutputString()
+
+ def __str__(self):
+ return self._str
+
+ def __repr__(self):
+ return self._str
+
+ def __eq__(self, other):
+ """Equality comparison for cookies. Compares to other cookies
+ based on value alone and on non-cookies based on the equality
+ of self.value with the other object so that a cookie with value
+ "ham" compares equal to the string "ham"
+ """
+ if hasattr(other, "value"):
+ return self.value == other.value
+ return self.value == other
+
+
+class MultiDict(Dict[KT, VT]):
+ """Dictionary type that holds multiple values for each key"""
+ # TODO: this should perhaps also order the keys
+ def __init__(self):
+ pass
+
+ def __setitem__(self, name, value):
+ dict.__setitem__(self, name, [value])
+
+ def add(self, name, value):
+ if name in self:
+ dict.__getitem__(self, name).append(value)
+ else:
+ dict.__setitem__(self, name, [value])
+
+ def __getitem__(self, key):
+ """Get the first value with a given key"""
+ return self.first(key)
+
+ def first(self, key, default=missing):
+ """Get the first value with a given key
+
+ :param key: The key to lookup
+ :param default: The default to return if key is
+ not found (throws if nothing is
+ specified)
+ """
+ if key in self and dict.__getitem__(self, key):
+ return dict.__getitem__(self, key)[0]
+ elif default is not missing:
+ return default
+ raise KeyError(key)
+
+ def last(self, key, default=missing):
+ """Get the last value with a given key
+
+ :param key: The key to lookup
+ :param default: The default to return if key is
+ not found (throws if nothing is
+ specified)
+ """
+ if key in self and dict.__getitem__(self, key):
+ return dict.__getitem__(self, key)[-1]
+ elif default is not missing:
+ return default
+ raise KeyError(key)
+
+ # We need to explicitly override dict.get; otherwise, it won't call
+ # __getitem__ and would return a list instead.
+ def get(self, key, default=None):
+ """Get the first value with a given key
+
+ :param key: The key to lookup
+ :param default: The default to return if key is
+ not found (None by default)
+ """
+ return self.first(key, default)
+
+ def get_list(self, key):
+ """Get all values with a given key as a list
+
+ :param key: The key to lookup
+ """
+ if key in self:
+ return dict.__getitem__(self, key)
+ else:
+ return []
+
+ @classmethod
+ def from_field_storage(cls, fs):
+ """Construct a MultiDict from a cgi.FieldStorage
+
+ Note that all keys and values are binary strings.
+ """
+ self = cls()
+ if fs.list is None:
+ return self
+ for key in fs:
+ values = fs[key]
+ if not isinstance(values, list):
+ values = [values]
+
+ for value in values:
+ if not value.filename:
+ value = isomorphic_encode(value.value)
+ else:
+ assert isinstance(value, cgi.FieldStorage)
+ self.add(isomorphic_encode(key), value)
+ return self
+
+
+class BinaryCookieParser(BaseCookie): # type: ignore
+ """A subclass of BaseCookie that returns values in binary strings
+
+ This is not intended to store the cookies; use Cookies instead.
+ """
+ def value_decode(self, val):
+ """Decode value from network to (real_value, coded_value).
+
+ Override BaseCookie.value_decode.
+ """
+ return isomorphic_encode(val), val
+
+ def value_encode(self, val):
+ raise NotImplementedError('BinaryCookieParser is not for setting cookies')
+
+ def load(self, rawdata):
+ """Load cookies from a binary string.
+
+ This overrides and calls BaseCookie.load. Unlike BaseCookie.load, it
+ does not accept dictionaries.
+ """
+ assert isinstance(rawdata, bytes)
+ # BaseCookie.load expects a native string
+ super().load(isomorphic_decode(rawdata))
+
+
+class Cookies(MultiDict[bytes, CookieValue]):
+ """MultiDict specialised for Cookie values
+
+ Keys are binary strings and values are CookieValue objects.
+ """
+ def __init__(self):
+ pass
+
+ def __getitem__(self, key):
+ return self.last(key)
+
+
+class Authentication:
+ """Object for dealing with HTTP Authentication
+
+ .. attribute:: username
+
+ The username supplied in the HTTP Authorization
+ header, or None
+
+ .. attribute:: password
+
+ The password supplied in the HTTP Authorization
+ header, or None
+
+ Both attributes are binary strings (`str` in Py2, `bytes` in Py3), since
+ RFC7617 Section 2.1 does not specify the encoding for username & password
+ (as long it's compatible with ASCII). UTF-8 should be a relatively safe
+ choice if callers need to decode them as most browsers use it.
+ """
+ def __init__(self, headers):
+ self.username = None
+ self.password = None
+
+ auth_schemes = {b"Basic": self.decode_basic}
+
+ if "authorization" in headers:
+ header = headers.get("authorization")
+ assert isinstance(header, bytes)
+ auth_type, data = header.split(b" ", 1)
+ if auth_type in auth_schemes:
+ self.username, self.password = auth_schemes[auth_type](data)
+ else:
+ raise HTTPException(400, "Unsupported authentication scheme %s" % auth_type)
+
+ def decode_basic(self, data):
+ assert isinstance(data, bytes)
+ decoded_data = base64.b64decode(data)
+ return decoded_data.split(b":", 1)
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/response.py b/testing/web-platform/tests/tools/wptserve/wptserve/response.py
new file mode 100644
index 0000000000..5c0ea7dd8d
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/response.py
@@ -0,0 +1,818 @@
+# mypy: allow-untyped-defs
+
+from collections import OrderedDict
+from datetime import datetime, timedelta
+from io import BytesIO
+import json
+import uuid
+
+from hpack.struct import HeaderTuple
+from http.cookies import BaseCookie, Morsel
+from hyperframe.frame import HeadersFrame, DataFrame, ContinuationFrame
+
+from .constants import response_codes, h2_headers
+from .logger import get_logger
+from .utils import isomorphic_decode, isomorphic_encode
+
+missing = object()
+
+
+class Response:
+ """Object representing the response to a HTTP request
+
+ :param handler: RequestHandler being used for this response
+ :param request: Request that this is the response for
+
+ .. attribute:: request
+
+ Request associated with this Response.
+
+ .. attribute:: encoding
+
+ The encoding to use when converting unicode to strings for output.
+
+ .. attribute:: add_required_headers
+
+ Boolean indicating whether mandatory headers should be added to the
+ response.
+
+ .. attribute:: send_body_for_head_request
+
+ Boolean, default False, indicating whether the body content should be
+ sent when the request method is HEAD.
+
+ .. attribute:: writer
+
+ The ResponseWriter for this response
+
+ .. attribute:: status
+
+ Status tuple (code, message). Can be set to an integer in which case the
+ message part is filled in automatically, or a tuple (code, message) in
+ which case code is an int and message is a text or binary string.
+
+ .. attribute:: headers
+
+ List of HTTP headers to send with the response. Each item in the list is a
+ tuple of (name, value).
+
+ .. attribute:: content
+
+ The body of the response. This can either be a string or a iterable of response
+ parts. If it is an iterable, any item may be a string or a function of zero
+ parameters which, when called, returns a string."""
+
+ def __init__(self, handler, request, response_writer_cls=None):
+ self.request = request
+ self.encoding = "utf8"
+
+ self.add_required_headers = True
+ self.send_body_for_head_request = False
+ self.close_connection = False
+
+ self.logger = get_logger()
+ self.writer = response_writer_cls(handler, self) if response_writer_cls else ResponseWriter(handler, self)
+
+ self._status = (200, None)
+ self.headers = ResponseHeaders()
+ self.content = []
+
+ @property
+ def status(self):
+ return self._status
+
+ @status.setter
+ def status(self, value):
+ if hasattr(value, "__len__"):
+ if len(value) != 2:
+ raise ValueError
+ else:
+ code = int(value[0])
+ message = value[1]
+ # Only call str() if message is not a string type, so that we
+ # don't get `str(b"foo") == "b'foo'"` in Python 3.
+ if not isinstance(message, (bytes, str)):
+ message = str(message)
+ self._status = (code, message)
+ else:
+ self._status = (int(value), None)
+
+ def set_cookie(self, name, value, path="/", domain=None, max_age=None,
+ expires=None, samesite=None, secure=False, httponly=False,
+ comment=None):
+ """Set a cookie to be sent with a Set-Cookie header in the
+ response
+
+ :param name: name of the cookie (a binary string)
+ :param value: value of the cookie (a binary string, or None)
+ :param max_age: datetime.timedelta int representing the time (in seconds)
+ until the cookie expires
+ :param path: String path to which the cookie applies
+ :param domain: String domain to which the cookie applies
+ :param samesit: String indicating whether the cookie should be
+ restricted to same site context
+ :param secure: Boolean indicating whether the cookie is marked as secure
+ :param httponly: Boolean indicating whether the cookie is marked as
+ HTTP Only
+ :param comment: String comment
+ :param expires: datetime.datetime or datetime.timedelta indicating a
+ time or interval from now when the cookie expires
+
+ """
+ # TODO(Python 3): Convert other parameters (e.g. path) to bytes, too.
+ if value is None:
+ value = b''
+ max_age = 0
+ expires = timedelta(days=-1)
+
+ name = isomorphic_decode(name)
+ value = isomorphic_decode(value)
+
+ days = {i+1: name for i, name in enumerate(["jan", "feb", "mar",
+ "apr", "may", "jun",
+ "jul", "aug", "sep",
+ "oct", "nov", "dec"])}
+
+ if isinstance(expires, timedelta):
+ expires = datetime.utcnow() + expires
+
+ if expires is not None:
+ expires_str = expires.strftime("%d %%s %Y %H:%M:%S GMT")
+ expires_str = expires_str % days[expires.month]
+ expires = expires_str
+
+ if max_age is not None:
+ if hasattr(max_age, "total_seconds"):
+ max_age = int(max_age.total_seconds())
+ max_age = "%.0d" % max_age
+
+ m = Morsel()
+
+ def maybe_set(key, value):
+ if value is not None and value is not False:
+ m[key] = value
+
+ m.set(name, value, value)
+ maybe_set("path", path)
+ maybe_set("domain", domain)
+ maybe_set("comment", comment)
+ maybe_set("expires", expires)
+ maybe_set("max-age", max_age)
+ maybe_set("secure", secure)
+ maybe_set("httponly", httponly)
+ maybe_set("samesite", samesite)
+
+ self.headers.append("Set-Cookie", m.OutputString())
+
+ def unset_cookie(self, name):
+ """Remove a cookie from those that are being sent with the response"""
+ name = isomorphic_decode(name)
+ cookies = self.headers.get("Set-Cookie")
+ parser = BaseCookie()
+ for cookie in cookies:
+ parser.load(isomorphic_decode(cookie))
+
+ if name in parser.keys():
+ del self.headers["Set-Cookie"]
+ for m in parser.values():
+ if m.key != name:
+ self.headers.append(("Set-Cookie", m.OutputString()))
+
+ def delete_cookie(self, name, path="/", domain=None):
+ """Delete a cookie on the client by setting it to the empty string
+ and to expire in the past"""
+ self.set_cookie(name, None, path=path, domain=domain, max_age=0,
+ expires=timedelta(days=-1))
+
+ def iter_content(self, read_file=False):
+ """Iterator returning chunks of response body content.
+
+ If any part of the content is a function, this will be called
+ and the resulting value (if any) returned.
+
+ :param read_file: boolean controlling the behaviour when content is a
+ file handle. When set to False the handle will be
+ returned directly allowing the file to be passed to
+ the output in small chunks. When set to True, the
+ entire content of the file will be returned as a
+ string facilitating non-streaming operations like
+ template substitution.
+ """
+ if isinstance(self.content, bytes):
+ yield self.content
+ elif isinstance(self.content, str):
+ yield self.content.encode(self.encoding)
+ elif hasattr(self.content, "read"):
+ if read_file:
+ yield self.content.read()
+ else:
+ yield self.content
+ else:
+ for item in self.content:
+ if hasattr(item, "__call__"):
+ value = item()
+ else:
+ value = item
+ if value:
+ yield value
+
+ def write_status_headers(self):
+ """Write out the status line and headers for the response"""
+ self.writer.write_status(*self.status)
+ for item in self.headers:
+ self.writer.write_header(*item)
+ self.writer.end_headers()
+
+ def write_content(self):
+ """Write out the response content"""
+ if self.request.method != "HEAD" or self.send_body_for_head_request:
+ for item in self.iter_content():
+ self.writer.write_content(item)
+
+ def write(self):
+ """Write the whole response"""
+ self.write_status_headers()
+ self.write_content()
+
+ def set_error(self, code, message=""):
+ """Set the response status headers and return a JSON error object:
+
+ {"error": {"code": code, "message": message}}
+ code is an int (HTTP status code), and message is a text string.
+ """
+ err = {"code": code,
+ "message": message}
+ data = json.dumps({"error": err})
+ self.status = code
+ self.headers = [("Content-Type", "application/json"),
+ ("Content-Length", len(data))]
+ self.content = data
+ if code == 500:
+ if isinstance(message, str) and message:
+ first_line = message.splitlines()[0]
+ else:
+ first_line = "<no message given>"
+ self.logger.error("Exception loading %s: %s" % (self.request.url,
+ first_line))
+ self.logger.info(message)
+
+
+class MultipartContent:
+ def __init__(self, boundary=None, default_content_type=None):
+ self.items = []
+ if boundary is None:
+ boundary = str(uuid.uuid4())
+ self.boundary = boundary
+ self.default_content_type = default_content_type
+
+ def __call__(self):
+ boundary = b"--" + self.boundary.encode("ascii")
+ rv = [b"", boundary]
+ for item in self.items:
+ rv.append(item.to_bytes())
+ rv.append(boundary)
+ rv[-1] += b"--"
+ return b"\r\n".join(rv)
+
+ def append_part(self, data, content_type=None, headers=None):
+ if content_type is None:
+ content_type = self.default_content_type
+ self.items.append(MultipartPart(data, content_type, headers))
+
+ def __iter__(self):
+ #This is hackish; when writing the response we need an iterable
+ #or a string. For a multipart/byterange response we want an
+ #iterable that contains a single callable; the MultipartContent
+ #object itself
+ yield self
+
+
+class MultipartPart:
+ def __init__(self, data, content_type=None, headers=None):
+ assert isinstance(data, bytes), data
+ self.headers = ResponseHeaders()
+
+ if content_type is not None:
+ self.headers.set("Content-Type", content_type)
+
+ if headers is not None:
+ for name, value in headers:
+ if name.lower() == b"content-type":
+ func = self.headers.set
+ else:
+ func = self.headers.append
+ func(name, value)
+
+ self.data = data
+
+ def to_bytes(self):
+ rv = []
+ for key, value in self.headers:
+ assert isinstance(key, bytes)
+ assert isinstance(value, bytes)
+ rv.append(b"%s: %s" % (key, value))
+ rv.append(b"")
+ rv.append(self.data)
+ return b"\r\n".join(rv)
+
+
+def _maybe_encode(s):
+ """Encode a string or an int into binary data using isomorphic_encode()."""
+ if isinstance(s, int):
+ return b"%i" % (s,)
+ return isomorphic_encode(s)
+
+
+class ResponseHeaders:
+ """Dictionary-like object holding the headers for the response"""
+ def __init__(self):
+ self.data = OrderedDict()
+
+ def set(self, key, value):
+ """Set a header to a specific value, overwriting any previous header
+ with the same name
+
+ :param key: Name of the header to set
+ :param value: Value to set the header to
+ """
+ key = _maybe_encode(key)
+ value = _maybe_encode(value)
+ self.data[key.lower()] = (key, [value])
+
+ def append(self, key, value):
+ """Add a new header with a given name, not overwriting any existing
+ headers with the same name
+
+ :param key: Name of the header to add
+ :param value: Value to set for the header
+ """
+ key = _maybe_encode(key)
+ value = _maybe_encode(value)
+ if key.lower() in self.data:
+ self.data[key.lower()][1].append(value)
+ else:
+ self.set(key, value)
+
+ def get(self, key, default=missing):
+ """Get the set values for a particular header."""
+ key = _maybe_encode(key)
+ try:
+ return self[key]
+ except KeyError:
+ if default is missing:
+ return []
+ return default
+
+ def __getitem__(self, key):
+ """Get a list of values for a particular header
+
+ """
+ key = _maybe_encode(key)
+ return self.data[key.lower()][1]
+
+ def __delitem__(self, key):
+ key = _maybe_encode(key)
+ del self.data[key.lower()]
+
+ def __contains__(self, key):
+ key = _maybe_encode(key)
+ return key.lower() in self.data
+
+ def __setitem__(self, key, value):
+ self.set(key, value)
+
+ def __iter__(self):
+ for key, values in self.data.values():
+ for value in values:
+ yield key, value
+
+ def items(self):
+ return list(self)
+
+ def update(self, items_iter):
+ for name, value in items_iter:
+ self.append(name, value)
+
+ def __repr__(self):
+ return repr(self.data)
+
+
+class H2Response(Response):
+
+ def __init__(self, handler, request):
+ super().__init__(handler, request, response_writer_cls=H2ResponseWriter)
+
+ def write_status_headers(self):
+ self.writer.write_headers(self.headers, *self.status)
+
+ # Hacky way of detecting last item in generator
+ def write_content(self):
+ """Write out the response content"""
+ if self.request.method != "HEAD" or self.send_body_for_head_request:
+ item = None
+ item_iter = self.iter_content()
+ try:
+ item = next(item_iter)
+ while True:
+ check_last = next(item_iter)
+ self.writer.write_data(item, last=False)
+ item = check_last
+ except StopIteration:
+ if item:
+ self.writer.write_data(item, last=True)
+
+
+class H2ResponseWriter:
+
+ def __init__(self, handler, response):
+ self.socket = handler.request
+ self.h2conn = handler.conn
+ self._response = response
+ self._handler = handler
+ self.stream_ended = False
+ self.content_written = False
+ self.request = response.request
+ self.logger = response.logger
+
+ def write_headers(self, headers, status_code, status_message=None, stream_id=None, last=False):
+ """
+ Send a HEADER frame that is tracked by the local state machine.
+
+ Write a HEADER frame using the H2 Connection object, will only work if the stream is in a state to send
+ HEADER frames.
+
+ :param headers: List of (header, value) tuples
+ :param status_code: The HTTP status code of the response
+ :param stream_id: Id of stream to send frame on. Will use the request stream ID if None
+ :param last: Flag to signal if this is the last frame in stream.
+ """
+ formatted_headers = []
+ secondary_headers = [] # Non ':' prefixed headers are to be added afterwards
+
+ for header, value in headers:
+ # h2_headers are native strings
+ # header field names are strings of ASCII
+ if isinstance(header, bytes):
+ header = header.decode('ascii')
+ # value in headers can be either string or integer
+ if isinstance(value, bytes):
+ value = self.decode(value)
+ if header in h2_headers:
+ header = ':' + header
+ formatted_headers.append((header, str(value)))
+ else:
+ secondary_headers.append((header, str(value)))
+
+ formatted_headers.append((':status', str(status_code)))
+ formatted_headers.extend(secondary_headers)
+
+ with self.h2conn as connection:
+ connection.send_headers(
+ stream_id=self.request.h2_stream_id if stream_id is None else stream_id,
+ headers=formatted_headers,
+ end_stream=last or self.request.method == "HEAD"
+ )
+
+ self.write(connection)
+
+ def write_data(self, item, last=False, stream_id=None):
+ """
+ Send a DATA frame that is tracked by the local state machine.
+
+ Write a DATA frame using the H2 Connection object, will only work if the stream is in a state to send
+ DATA frames. Uses flow control to split data into multiple data frames if it exceeds the size that can
+ be in a single frame.
+
+ :param item: The content of the DATA frame
+ :param last: Flag to signal if this is the last frame in stream.
+ :param stream_id: Id of stream to send frame on. Will use the request stream ID if None
+ """
+ if isinstance(item, (str, bytes)):
+ data = BytesIO(self.encode(item))
+ else:
+ data = item
+
+ # Find the length of the data
+ data.seek(0, 2)
+ data_len = data.tell()
+ data.seek(0)
+
+ # If the data is longer than max payload size, need to write it in chunks
+ payload_size = self.get_max_payload_size()
+ while data_len > payload_size:
+ self.write_data_frame(data.read(payload_size), False, stream_id)
+ data_len -= payload_size
+ payload_size = self.get_max_payload_size()
+
+ self.write_data_frame(data.read(), last, stream_id)
+
+ def write_data_frame(self, data, last, stream_id=None):
+ with self.h2conn as connection:
+ connection.send_data(
+ stream_id=self.request.h2_stream_id if stream_id is None else stream_id,
+ data=data,
+ end_stream=last,
+ )
+ self.write(connection)
+ self.stream_ended = last
+
+ def write_push(self, promise_headers, push_stream_id=None, status=None, response_headers=None, response_data=None):
+ """Write a push promise, and optionally write the push content.
+
+ This will write a push promise to the request stream. If you do not provide headers and data for the response,
+ then no response will be pushed, and you should push them yourself using the ID returned from this function
+
+ :param promise_headers: A list of header tuples that matches what the client would use to
+ request the pushed response
+ :param push_stream_id: The ID of the stream the response should be pushed to. If none given, will
+ use the next available id.
+ :param status: The status code of the response, REQUIRED if response_headers given
+ :param response_headers: The headers of the response
+ :param response_data: The response data.
+ :return: The ID of the push stream
+ """
+ with self.h2conn as connection:
+ push_stream_id = push_stream_id if push_stream_id is not None else connection.get_next_available_stream_id()
+ connection.push_stream(self.request.h2_stream_id, push_stream_id, promise_headers)
+ self.write(connection)
+
+ has_data = response_data is not None
+ if response_headers is not None:
+ assert status is not None
+ self.write_headers(response_headers, status, stream_id=push_stream_id, last=not has_data)
+
+ if has_data:
+ self.write_data(response_data, last=True, stream_id=push_stream_id)
+
+ return push_stream_id
+
+ def end_stream(self, stream_id=None):
+ """Ends the stream with the given ID, or the one that request was made on if no ID given."""
+ with self.h2conn as connection:
+ connection.end_stream(stream_id if stream_id is not None else self.request.h2_stream_id)
+ self.write(connection)
+ self.stream_ended = True
+
+ def write_raw_header_frame(self, headers, stream_id=None, end_stream=False, end_headers=False, frame_cls=HeadersFrame):
+ """
+ Ignores the statemachine of the stream and sends a HEADER frame regardless.
+
+ Unlike `write_headers`, this does not check to see if a stream is in the correct state to have HEADER frames
+ sent through to it. It will build a HEADER frame and send it without using the H2 Connection object other than
+ to HPACK encode the headers.
+
+ :param headers: List of (header, value) tuples
+ :param stream_id: Id of stream to send frame on. Will use the request stream ID if None
+ :param end_stream: Set to True to add END_STREAM flag to frame
+ :param end_headers: Set to True to add END_HEADERS flag to frame
+ """
+ if not stream_id:
+ stream_id = self.request.h2_stream_id
+
+ header_t = []
+ for header, value in headers:
+ header_t.append(HeaderTuple(header, value))
+
+ with self.h2conn as connection:
+ frame = frame_cls(stream_id, data=connection.encoder.encode(header_t))
+
+ if end_stream:
+ self.stream_ended = True
+ frame.flags.add('END_STREAM')
+ if end_headers:
+ frame.flags.add('END_HEADERS')
+
+ data = frame.serialize()
+ self.write_raw(data)
+
+ def write_raw_data_frame(self, data, stream_id=None, end_stream=False):
+ """
+ Ignores the statemachine of the stream and sends a DATA frame regardless.
+
+ Unlike `write_data`, this does not check to see if a stream is in the correct state to have DATA frames
+ sent through to it. It will build a DATA frame and send it without using the H2 Connection object. It will
+ not perform any flow control checks.
+
+ :param data: The data to be sent in the frame
+ :param stream_id: Id of stream to send frame on. Will use the request stream ID if None
+ :param end_stream: Set to True to add END_STREAM flag to frame
+ """
+ if not stream_id:
+ stream_id = self.request.h2_stream_id
+
+ frame = DataFrame(stream_id, data=data)
+
+ if end_stream:
+ self.stream_ended = True
+ frame.flags.add('END_STREAM')
+
+ data = frame.serialize()
+ self.write_raw(data)
+
+ def write_raw_continuation_frame(self, headers, stream_id=None, end_headers=False):
+ """
+ Ignores the statemachine of the stream and sends a CONTINUATION frame regardless.
+
+ This provides the ability to create and write a CONTINUATION frame to the stream, which is not exposed by
+ `write_headers` as the h2 library handles the split between HEADER and CONTINUATION internally. Will perform
+ HPACK encoding on the headers.
+
+ :param headers: List of (header, value) tuples
+ :param stream_id: Id of stream to send frame on. Will use the request stream ID if None
+ :param end_headers: Set to True to add END_HEADERS flag to frame
+ """
+ self.write_raw_header_frame(headers, stream_id=stream_id, end_headers=end_headers, frame_cls=ContinuationFrame)
+
+
+ def get_max_payload_size(self, stream_id=None):
+ """Returns the maximum size of a payload for the given stream."""
+ stream_id = stream_id if stream_id is not None else self.request.h2_stream_id
+ with self.h2conn as connection:
+ return min(connection.remote_settings.max_frame_size, connection.local_flow_control_window(stream_id)) - 9
+
+ def write(self, connection):
+ self.content_written = True
+ data = connection.data_to_send()
+ self.socket.sendall(data)
+
+ def write_raw(self, raw_data):
+ """Used for sending raw bytes/data through the socket"""
+
+ self.content_written = True
+ self.socket.sendall(raw_data)
+
+ def decode(self, data):
+ """Convert bytes to unicode according to response.encoding."""
+ if isinstance(data, bytes):
+ return data.decode(self._response.encoding)
+ elif isinstance(data, str):
+ return data
+ else:
+ raise ValueError(type(data))
+
+ def encode(self, data):
+ """Convert unicode to bytes according to response.encoding."""
+ if isinstance(data, bytes):
+ return data
+ elif isinstance(data, str):
+ return data.encode(self._response.encoding)
+ else:
+ raise ValueError
+
+
+class ResponseWriter:
+ """Object providing an API to write out a HTTP response.
+
+ :param handler: The RequestHandler being used.
+ :param response: The Response associated with this writer."""
+ def __init__(self, handler, response):
+ self._wfile = handler.wfile
+ self._response = response
+ self._handler = handler
+ self._status_written = False
+ self._headers_seen = set()
+ self._headers_complete = False
+ self.content_written = False
+ self.request = response.request
+ self.file_chunk_size = 32 * 1024
+ self.default_status = 200
+
+ def _seen_header(self, name):
+ return self.encode(name.lower()) in self._headers_seen
+
+ def write_status(self, code, message=None):
+ """Write out the status line of a response.
+
+ :param code: The integer status code of the response.
+ :param message: The message of the response. Defaults to the message commonly used
+ with the status code."""
+ if message is None:
+ if code in response_codes:
+ message = response_codes[code][0]
+ else:
+ message = ''
+ self.write(b"%s %d %s\r\n" %
+ (isomorphic_encode(self._response.request.protocol_version), code, isomorphic_encode(message)))
+ self._status_written = True
+
+ def write_header(self, name, value):
+ """Write out a single header for the response.
+
+ If a status has not been written, a default status will be written (currently 200)
+
+ :param name: Name of the header field
+ :param value: Value of the header field
+ :return: A boolean indicating whether the write succeeds
+ """
+ if not self._status_written:
+ self.write_status(self.default_status)
+ self._headers_seen.add(self.encode(name.lower()))
+ if not self.write(name):
+ return False
+ if not self.write(b": "):
+ return False
+ if isinstance(value, int):
+ if not self.write(str(value)):
+ return False
+ elif not self.write(value):
+ return False
+ return self.write(b"\r\n")
+
+ def write_default_headers(self):
+ for name, f in [("Server", self._handler.version_string),
+ ("Date", self._handler.date_time_string)]:
+ if not self._seen_header(name):
+ if not self.write_header(name, f()):
+ return False
+
+ if (isinstance(self._response.content, (bytes, str)) and
+ not self._seen_header("content-length")):
+ #Would be nice to avoid double-encoding here
+ if not self.write_header("Content-Length", len(self.encode(self._response.content))):
+ return False
+
+ return True
+
+ def end_headers(self):
+ """Finish writing headers and write the separator.
+
+ Unless add_required_headers on the response is False,
+ this will also add HTTP-mandated headers that have not yet been supplied
+ to the response headers.
+ :return: A boolean indicating whether the write succeeds
+ """
+
+ if self._response.add_required_headers:
+ if not self.write_default_headers():
+ return False
+
+ if not self.write("\r\n"):
+ return False
+ if not self._seen_header("content-length"):
+ self._response.close_connection = True
+ self._headers_complete = True
+
+ return True
+
+ def write_content(self, data):
+ """Write the body of the response.
+
+ HTTP-mandated headers will be automatically added with status default to 200 if they have
+ not been explicitly set.
+ :return: A boolean indicating whether the write succeeds
+ """
+ if not self._status_written:
+ self.write_status(self.default_status)
+ if not self._headers_complete:
+ self._response.content = data
+ self.end_headers()
+ return self.write_raw_content(data)
+
+ def write_raw_content(self, data):
+ """Writes the data 'as is'"""
+ if data is None:
+ raise ValueError('data cannot be None')
+ if isinstance(data, (str, bytes)):
+ # Deliberately allows both text and binary types. See `self.encode`.
+ return self.write(data)
+ else:
+ return self.write_content_file(data)
+
+ def write(self, data):
+ """Write directly to the response, converting unicode to bytes
+ according to response.encoding.
+ :return: A boolean indicating whether the write succeeds
+ """
+ self.content_written = True
+ try:
+ self._wfile.write(self.encode(data))
+ return True
+ except OSError:
+ # This can happen if the socket got closed by the remote end
+ return False
+
+ def write_content_file(self, data):
+ """Write a file-like object directly to the response in chunks."""
+ self.content_written = True
+ success = True
+ while True:
+ buf = data.read(self.file_chunk_size)
+ if not buf:
+ success = False
+ break
+ try:
+ self._wfile.write(buf)
+ except OSError:
+ success = False
+ break
+ data.close()
+ return success
+
+ def encode(self, data):
+ """Convert unicode to bytes according to response.encoding."""
+ if isinstance(data, bytes):
+ return data
+ elif isinstance(data, str):
+ return data.encode(self._response.encoding)
+ else:
+ raise ValueError("data %r should be text or binary, but is %s" % (data, type(data)))
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/router.py b/testing/web-platform/tests/tools/wptserve/wptserve/router.py
new file mode 100644
index 0000000000..92c1b04a46
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/router.py
@@ -0,0 +1,180 @@
+# mypy: allow-untyped-defs
+
+import itertools
+import re
+import sys
+
+from .logger import get_logger
+
+any_method = object()
+
+class RouteTokenizer:
+ def literal(self, scanner, token):
+ return ("literal", token)
+
+ def slash(self, scanner, token):
+ return ("slash", None)
+
+ def group(self, scanner, token):
+ return ("group", token[1:-1])
+
+ def star(self, scanner, token):
+ return ("star", token[1:-3])
+
+ def scan(self, input_str):
+ scanner = re.Scanner([(r"/", self.slash),
+ (r"{\w*}", self.group),
+ (r"\*", self.star),
+ (r"(?:\\.|[^{\*/])*", self.literal),])
+ return scanner.scan(input_str)
+
+class RouteCompiler:
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self.star_seen = False
+
+ def compile(self, tokens):
+ self.reset()
+
+ func_map = {"slash":self.process_slash,
+ "literal":self.process_literal,
+ "group":self.process_group,
+ "star":self.process_star}
+
+ re_parts = ["^"]
+
+ if not tokens or tokens[0][0] != "slash":
+ tokens = itertools.chain([("slash", None)], tokens)
+
+ for token in tokens:
+ re_parts.append(func_map[token[0]](token))
+
+ if self.star_seen:
+ re_parts.append(")")
+ re_parts.append("$")
+
+ return re.compile("".join(re_parts))
+
+ def process_literal(self, token):
+ return re.escape(token[1])
+
+ def process_slash(self, token):
+ return "/"
+
+ def process_group(self, token):
+ if self.star_seen:
+ raise ValueError("Group seen after star in regexp")
+ return "(?P<%s>[^/]+)" % token[1]
+
+ def process_star(self, token):
+ if self.star_seen:
+ raise ValueError("Star seen after star in regexp")
+ self.star_seen = True
+ return "(.*"
+
+def compile_path_match(route_pattern):
+ """tokens: / or literal or match or *"""
+
+ tokenizer = RouteTokenizer()
+ tokens, unmatched = tokenizer.scan(route_pattern)
+
+ assert unmatched == "", unmatched
+
+ compiler = RouteCompiler()
+
+ return compiler.compile(tokens)
+
+class Router:
+ """Object for matching handler functions to requests.
+
+ :param doc_root: Absolute path of the filesystem location from
+ which to serve tests
+ :param routes: Initial routes to add; a list of three item tuples
+ (method, path_pattern, handler_function), defined
+ as for register()
+ """
+
+ def __init__(self, doc_root, routes):
+ self.doc_root = doc_root
+ self.routes = []
+ self.logger = get_logger()
+
+ # Add the doc_root to the Python path, so that any Python handler can
+ # correctly locate helper scripts (see RFC_TO_BE_LINKED).
+ #
+ # TODO: In a perfect world, Router would not need to know about this
+ # and the handler itself would take care of it. Currently, however, we
+ # treat handlers like functions and so there's no easy way to do that.
+ if self.doc_root not in sys.path:
+ sys.path.insert(0, self.doc_root)
+
+ for route in reversed(routes):
+ self.register(*route)
+
+ def register(self, methods, path, handler):
+ r"""Register a handler for a set of paths.
+
+ :param methods: Set of methods this should match. "*" is a
+ special value indicating that all methods should
+ be matched.
+
+ :param path_pattern: Match pattern that will be used to determine if
+ a request path matches this route. Match patterns
+ consist of either literal text, match groups,
+ denoted {name}, which match any character except /,
+ and, at most one \*, which matches and character and
+ creates a match group to the end of the string.
+ If there is no leading "/" on the pattern, this is
+ automatically implied. For example::
+
+ api/{resource}/*.json
+
+ Would match `/api/test/data.json` or
+ `/api/test/test2/data.json`, but not `/api/test/data.py`.
+
+ The match groups are made available in the request object
+ as a dictionary through the route_match property. For
+ example, given the route pattern above and the path
+ `/api/test/data.json`, the route_match property would
+ contain::
+
+ {"resource": "test", "*": "data.json"}
+
+ :param handler: Function that will be called to process matching
+ requests. This must take two parameters, the request
+ object and the response object.
+
+ """
+ if isinstance(methods, (bytes, str)) or methods is any_method:
+ methods = [methods]
+ for method in methods:
+ self.routes.append((method, compile_path_match(path), handler))
+ self.logger.debug("Route pattern: %s" % self.routes[-1][1].pattern)
+
+ def get_handler(self, request):
+ """Get a handler for a request or None if there is no handler.
+
+ :param request: Request to get a handler for.
+ :rtype: Callable or None
+ """
+ for method, regexp, handler in reversed(self.routes):
+ if (request.method == method or
+ method in (any_method, "*") or
+ (request.method == "HEAD" and method == "GET")):
+ m = regexp.match(request.url_parts.path)
+ if m:
+ if not hasattr(handler, "__class__"):
+ name = handler.__name__
+ else:
+ name = handler.__class__.__name__
+ self.logger.debug("Found handler %s" % name)
+
+ match_parts = m.groupdict().copy()
+ if len(match_parts) < len(m.groups()):
+ match_parts["*"] = m.groups()[-1]
+ request.route_match = match_parts
+
+ return handler
+ return None
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/routes.py b/testing/web-platform/tests/tools/wptserve/wptserve/routes.py
new file mode 100644
index 0000000000..b6e3800018
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/routes.py
@@ -0,0 +1,6 @@
+from . import handlers
+from .router import any_method
+routes = [(any_method, "*.py", handlers.python_script_handler),
+ ("GET", "*.asis", handlers.as_is_handler),
+ ("GET", "*", handlers.file_handler),
+ ]
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/server.py b/testing/web-platform/tests/tools/wptserve/wptserve/server.py
new file mode 100644
index 0000000000..8038a78df8
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/server.py
@@ -0,0 +1,927 @@
+# mypy: allow-untyped-defs
+
+import errno
+import http.server
+import os
+import socket
+from socketserver import ThreadingMixIn
+import ssl
+import sys
+import threading
+import time
+import traceback
+import uuid
+from collections import OrderedDict
+from queue import Empty, Queue
+from typing import Dict
+
+from h2.config import H2Configuration
+from h2.connection import H2Connection
+from h2.events import RequestReceived, ConnectionTerminated, DataReceived, StreamReset, StreamEnded
+from h2.exceptions import StreamClosedError, ProtocolError
+from h2.settings import SettingCodes
+from h2.utilities import extract_method_header
+
+from urllib.parse import urlsplit, urlunsplit
+
+from mod_pywebsocket import dispatch
+from mod_pywebsocket.handshake import HandshakeException, AbortedByUserException
+
+from . import routes as default_routes
+from .config import ConfigBuilder
+from .logger import get_logger
+from .request import Server, Request, H2Request
+from .response import Response, H2Response
+from .router import Router
+from .utils import HTTPException, isomorphic_decode, isomorphic_encode
+from .constants import h2_headers
+from .ws_h2_handshake import WsH2Handshaker
+
+# We need to stress test that browsers can send/receive many headers (there is
+# no specified limit), but the Python stdlib has an arbitrary limit of 100
+# headers. Hitting the limit leads to HTTP 431, so we monkey patch it higher.
+# https://bugs.python.org/issue26586
+# https://github.com/web-platform-tests/wpt/pull/24451
+import http.client
+assert isinstance(getattr(http.client, '_MAXHEADERS'), int)
+setattr(http.client, '_MAXHEADERS', 512)
+
+"""
+HTTP server designed for testing purposes.
+
+The server is designed to provide flexibility in the way that
+requests are handled, and to provide control both of exactly
+what bytes are put on the wire for the response, and in the
+timing of sending those bytes.
+
+The server is based on the stdlib HTTPServer, but with some
+notable differences in the way that requests are processed.
+Overall processing is handled by a WebTestRequestHandler,
+which is a subclass of BaseHTTPRequestHandler. This is responsible
+for parsing the incoming request. A RequestRewriter is then
+applied and may change the request data if it matches a
+supplied rule.
+
+Once the request data had been finalised, Request and Response
+objects are constructed. These are used by the other parts of the
+system to read information about the request and manipulate the
+response.
+
+Each request is handled by a particular handler function. The
+mapping between Request and the appropriate handler is determined
+by a Router. By default handlers are installed to interpret files
+under the document root with .py extensions as executable python
+files (see handlers.py for the api for such files), .asis files as
+bytestreams to be sent literally and all other files to be served
+statically.
+
+The handler functions are responsible for either populating the
+fields of the response object, which will then be written when the
+handler returns, or for directly writing to the output stream.
+"""
+
+
+class RequestRewriter:
+ def __init__(self, rules):
+ """Object for rewriting the request path.
+
+ :param rules: Initial rules to add; a list of three item tuples
+ (method, input_path, output_path), defined as for
+ register()
+ """
+ self.rules = {}
+ for rule in reversed(rules):
+ self.register(*rule)
+ self.logger = get_logger()
+
+ def register(self, methods, input_path, output_path):
+ """Register a rewrite rule.
+
+ :param methods: Set of methods this should match. "*" is a
+ special value indicating that all methods should
+ be matched.
+
+ :param input_path: Path to match for the initial request.
+
+ :param output_path: Path to replace the input path with in
+ the request.
+ """
+ if isinstance(methods, (bytes, str)):
+ methods = [methods]
+ self.rules[input_path] = (methods, output_path)
+
+ def rewrite(self, request_handler):
+ """Rewrite the path in a BaseHTTPRequestHandler instance, if
+ it matches a rule.
+
+ :param request_handler: BaseHTTPRequestHandler for which to
+ rewrite the request.
+ """
+ split_url = urlsplit(request_handler.path)
+ if split_url.path in self.rules:
+ methods, destination = self.rules[split_url.path]
+ if "*" in methods or request_handler.command in methods:
+ self.logger.debug("Rewriting request path %s to %s" %
+ (request_handler.path, destination))
+ new_url = list(split_url)
+ new_url[2] = destination
+ new_url = urlunsplit(new_url)
+ request_handler.path = new_url
+
+
+class WebTestServer(ThreadingMixIn, http.server.HTTPServer):
+ allow_reuse_address = True
+ acceptable_errors = (errno.EPIPE, errno.ECONNABORTED)
+ request_queue_size = 2000
+
+ # Ensure that we don't hang on shutdown waiting for requests
+ daemon_threads = True
+
+ def __init__(self, server_address, request_handler_cls,
+ router, rewriter, bind_address, ws_doc_root=None,
+ config=None, use_ssl=False, key_file=None, certificate=None,
+ encrypt_after_connect=False, latency=None, http2=False, **kwargs):
+ """Server for HTTP(s) Requests
+
+ :param server_address: tuple of (server_name, port)
+
+ :param request_handler_cls: BaseHTTPRequestHandler-like class to use for
+ handling requests.
+
+ :param router: Router instance to use for matching requests to handler
+ functions
+
+ :param rewriter: RequestRewriter-like instance to use for preprocessing
+ requests before they are routed
+
+ :param config: Dictionary holding environment configuration settings for
+ handlers to read, or None to use the default values.
+
+ :param use_ssl: Boolean indicating whether the server should use SSL
+
+ :param key_file: Path to key file to use if SSL is enabled.
+
+ :param certificate: Path to certificate to use if SSL is enabled.
+
+ :param ws_doc_root: Document root for websockets
+
+ :param encrypt_after_connect: For each connection, don't start encryption
+ until a CONNECT message has been received.
+ This enables the server to act as a
+ self-proxy.
+
+ :param bind_address True to bind the server to both the IP address and
+ port specified in the server_address parameter.
+ False to bind the server only to the port in the
+ server_address parameter, but not to the address.
+ :param latency: Delay in ms to wait before serving each response, or
+ callable that returns a delay in ms
+ """
+ self.router = router
+ self.rewriter = rewriter
+
+ self.scheme = "http2" if http2 else "https" if use_ssl else "http"
+ self.logger = get_logger()
+
+ self.latency = latency
+
+ if bind_address:
+ hostname_port = server_address
+ else:
+ hostname_port = ("",server_address[1])
+
+ http.server.HTTPServer.__init__(self, hostname_port, request_handler_cls, **kwargs)
+
+ if config is not None:
+ Server.config = config
+ else:
+ self.logger.debug("Using default configuration")
+ with ConfigBuilder(self.logger,
+ browser_host=server_address[0],
+ ports={"http": [self.server_address[1]]}) as config:
+ assert config["ssl_config"] is None
+ Server.config = config
+
+
+
+ self.ws_doc_root = ws_doc_root
+ self.key_file = key_file
+ self.certificate = certificate
+ self.encrypt_after_connect = use_ssl and encrypt_after_connect
+
+ if use_ssl and not encrypt_after_connect:
+ if http2:
+ ssl_context = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH)
+ ssl_context.load_cert_chain(keyfile=self.key_file, certfile=self.certificate)
+ ssl_context.set_alpn_protocols(['h2'])
+ self.socket = ssl_context.wrap_socket(self.socket,
+ server_side=True)
+
+ else:
+ self.socket = ssl.wrap_socket(self.socket,
+ keyfile=self.key_file,
+ certfile=self.certificate,
+ server_side=True)
+
+ def handle_error(self, request, client_address):
+ error = sys.exc_info()[1]
+
+ if ((isinstance(error, OSError) and
+ isinstance(error.args, tuple) and
+ error.args[0] in self.acceptable_errors) or
+ (isinstance(error, IOError) and
+ error.errno in self.acceptable_errors)):
+ pass # remote hang up before the result is sent
+ else:
+ msg = traceback.format_exc()
+ self.logger.error(f"{type(error)} {error}")
+ self.logger.info(msg)
+
+
+class BaseWebTestRequestHandler(http.server.BaseHTTPRequestHandler):
+ """RequestHandler for WebTestHttpd"""
+
+ def __init__(self, *args, **kwargs):
+ self.logger = get_logger()
+ http.server.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
+
+ def finish_handling_h1(self, request_line_is_valid):
+
+ self.server.rewriter.rewrite(self)
+
+ request = Request(self)
+ response = Response(self, request)
+
+ if request.method == "CONNECT":
+ self.handle_connect(response)
+ return
+
+ if not request_line_is_valid:
+ response.set_error(414)
+ response.write()
+ return
+
+ self.logger.debug(f"{request.method} {request.request_path}")
+ handler = self.server.router.get_handler(request)
+ self.finish_handling(request, response, handler)
+
+ def finish_handling(self, request, response, handler):
+ # If the handler we used for the request had a non-default base path
+ # set update the doc_root of the request to reflect this
+ if hasattr(handler, "base_path") and handler.base_path:
+ request.doc_root = handler.base_path
+ if hasattr(handler, "url_base") and handler.url_base != "/":
+ request.url_base = handler.url_base
+
+ if self.server.latency is not None:
+ if callable(self.server.latency):
+ latency = self.server.latency()
+ else:
+ latency = self.server.latency
+ self.logger.warning("Latency enabled. Sleeping %i ms" % latency)
+ time.sleep(latency / 1000.)
+
+ if handler is None:
+ self.logger.debug("No Handler found!")
+ response.set_error(404)
+ else:
+ try:
+ handler(request, response)
+ except HTTPException as e:
+ if 500 <= e.code < 600:
+ self.logger.warning("HTTPException in handler: %s" % e)
+ self.logger.warning(traceback.format_exc())
+ response.set_error(e.code, str(e))
+ except Exception as e:
+ self.respond_with_error(response, e)
+ self.logger.debug("%i %s %s (%s) %i" % (response.status[0],
+ request.method,
+ request.request_path,
+ request.headers.get('Referer'),
+ request.raw_input.length))
+
+ if not response.writer.content_written:
+ response.write()
+
+ # If a python handler has been used, the old ones won't send a END_STR data frame, so this
+ # allows for backwards compatibility by accounting for these handlers that don't close streams
+ if isinstance(response, H2Response) and not response.writer.stream_ended:
+ response.writer.end_stream()
+
+ # If we want to remove this in the future, a solution is needed for
+ # scripts that produce a non-string iterable of content, since these
+ # can't set a Content-Length header. A notable example of this kind of
+ # problem is with the trickle pipe i.e. foo.js?pipe=trickle(d1)
+ if response.close_connection:
+ self.close_connection = True
+
+ if not self.close_connection:
+ # Ensure that the whole request has been read from the socket
+ request.raw_input.read()
+
+ def handle_connect(self, response):
+ self.logger.debug("Got CONNECT")
+ response.status = 200
+ response.write()
+ if self.server.encrypt_after_connect:
+ self.logger.debug("Enabling SSL for connection")
+ self.request = ssl.wrap_socket(self.connection,
+ keyfile=self.server.key_file,
+ certfile=self.server.certificate,
+ server_side=True)
+ self.setup()
+ return
+
+ def respond_with_error(self, response, e):
+ message = str(e)
+ if message:
+ err = [message]
+ else:
+ err = []
+ err.append(traceback.format_exc())
+ response.set_error(500, "\n".join(err))
+
+
+class Http2WebTestRequestHandler(BaseWebTestRequestHandler):
+ protocol_version = "HTTP/2.0"
+
+ def handle_one_request(self):
+ """
+ This is the main HTTP/2.0 Handler.
+
+ When a browser opens a connection to the server
+ on the HTTP/2.0 port, the server enters this which will initiate the h2 connection
+ and keep running throughout the duration of the interaction, and will read/write directly
+ from the socket.
+
+ Because there can be multiple H2 connections active at the same
+ time, a UUID is created for each so that it is easier to tell them apart in the logs.
+ """
+
+ config = H2Configuration(client_side=False)
+ self.conn = H2ConnectionGuard(H2Connection(config=config))
+ self.close_connection = False
+
+ # Generate a UUID to make it easier to distinguish different H2 connection debug messages
+ self.uid = str(uuid.uuid4())[:8]
+
+ self.logger.debug('(%s) Initiating h2 Connection' % self.uid)
+
+ with self.conn as connection:
+ # Bootstrapping WebSockets with HTTP/2 specification requires
+ # ENABLE_CONNECT_PROTOCOL to be set in order to enable WebSocket
+ # over HTTP/2
+ new_settings = dict(connection.local_settings)
+ new_settings[SettingCodes.ENABLE_CONNECT_PROTOCOL] = 1
+ connection.local_settings.update(new_settings)
+ connection.local_settings.acknowledge()
+
+ connection.initiate_connection()
+ data = connection.data_to_send()
+ window_size = connection.remote_settings.initial_window_size
+
+ self.request.sendall(data)
+
+ # Dict of { stream_id: (thread, queue) }
+ stream_queues = {}
+
+ try:
+ while not self.close_connection:
+ data = self.request.recv(window_size)
+ if data == '':
+ self.logger.debug('(%s) Socket Closed' % self.uid)
+ self.close_connection = True
+ continue
+
+ with self.conn as connection:
+ frames = connection.receive_data(data)
+ window_size = connection.remote_settings.initial_window_size
+
+ self.logger.debug('(%s) Frames Received: ' % self.uid + str(frames))
+
+ for frame in frames:
+ if isinstance(frame, ConnectionTerminated):
+ self.logger.debug('(%s) Connection terminated by remote peer ' % self.uid)
+ self.close_connection = True
+
+ # Flood all the streams with connection terminated, this will cause them to stop
+ for stream_id, (thread, queue) in stream_queues.items():
+ queue.put(frame)
+
+ elif hasattr(frame, 'stream_id'):
+ if frame.stream_id not in stream_queues:
+ queue = Queue()
+ stream_queues[frame.stream_id] = (self.start_stream_thread(frame, queue), queue)
+ stream_queues[frame.stream_id][1].put(frame)
+
+ if isinstance(frame, StreamEnded) or (hasattr(frame, "stream_ended") and frame.stream_ended):
+ del stream_queues[frame.stream_id]
+
+ except OSError as e:
+ self.logger.error(f'({self.uid}) Closing Connection - \n{str(e)}')
+ if not self.close_connection:
+ self.close_connection = True
+ except Exception as e:
+ self.logger.error(f'({self.uid}) Unexpected Error - \n{str(e)}')
+ finally:
+ for stream_id, (thread, queue) in stream_queues.items():
+ queue.put(None)
+ thread.join()
+
+ def _is_extended_connect_frame(self, frame):
+ if not isinstance(frame, RequestReceived):
+ return False
+
+ method = extract_method_header(frame.headers)
+ if method != b"CONNECT":
+ return False
+
+ protocol = ""
+ for key, value in frame.headers:
+ if key in (b':protocol', ':protocol'):
+ protocol = isomorphic_encode(value)
+ break
+ if protocol != b"websocket":
+ raise ProtocolError(f"Invalid protocol {protocol} with CONNECT METHOD")
+
+ return True
+
+ def start_stream_thread(self, frame, queue):
+ """
+ This starts a new thread to handle frames for a specific stream.
+ :param frame: The first frame on the stream
+ :param queue: A queue object that the thread will use to check for new frames
+ :return: The thread object that has already been started
+ """
+ if self._is_extended_connect_frame(frame):
+ target = Http2WebTestRequestHandler._stream_ws_thread
+ else:
+ target = Http2WebTestRequestHandler._stream_thread
+ t = threading.Thread(
+ target=target,
+ args=(self, frame.stream_id, queue)
+ )
+ t.start()
+ return t
+
+ def _stream_ws_thread(self, stream_id, queue):
+ frame = queue.get(True, None)
+
+ if frame is None:
+ return
+
+ rfile, wfile = os.pipe()
+ rfile, wfile = os.fdopen(rfile, 'rb'), os.fdopen(wfile, 'wb', 0) # needs to be unbuffer for websockets
+ stream_handler = H2HandlerCopy(self, frame, rfile)
+
+ h2request = H2Request(stream_handler)
+ h2response = H2Response(stream_handler, h2request)
+
+ dispatcher = dispatch.Dispatcher(self.server.ws_doc_root, None, False)
+ if not dispatcher.get_handler_suite(stream_handler.path):
+ h2response.set_error(404)
+ h2response.write()
+ return
+
+ request_wrapper = _WebSocketRequest(stream_handler, h2response)
+
+ handshaker = WsH2Handshaker(request_wrapper, dispatcher)
+ try:
+ handshaker.do_handshake()
+ except HandshakeException as e:
+ self.logger.info('Handshake failed for error: %s' % e)
+ h2response.set_error(e.status)
+ h2response.write()
+ return
+ except AbortedByUserException:
+ h2response.write()
+ return
+
+ # h2 Handshaker prepares the headers but does not send them down the
+ # wire. Flush the headers here.
+ try:
+ h2response.write_status_headers()
+ except StreamClosedError:
+ # work around https://github.com/web-platform-tests/wpt/issues/27786
+ # The stream was already closed.
+ return
+
+ request_wrapper._dispatcher = dispatcher
+
+ # we need two threads:
+ # - one to handle the frame queue
+ # - one to handle the request (dispatcher.transfer_data is blocking)
+ # the alternative is to have only one (blocking) thread. That thread
+ # will call transfer_data. That would require a special case in
+ # handle_one_request, to bypass the queue and write data to wfile
+ # directly.
+ t = threading.Thread(
+ target=Http2WebTestRequestHandler._stream_ws_sub_thread,
+ args=(self, request_wrapper, stream_handler, queue)
+ )
+ t.start()
+
+ while not self.close_connection:
+ try:
+ frame = queue.get(True, 1)
+ except Empty:
+ continue
+
+ if isinstance(frame, DataReceived):
+ wfile.write(frame.data)
+ if frame.stream_ended:
+ raise NotImplementedError("frame.stream_ended")
+ wfile.close()
+ elif frame is None or isinstance(frame, (StreamReset, StreamEnded, ConnectionTerminated)):
+ self.logger.debug(f'({self.uid} - {stream_id}) Stream Reset, Thread Closing')
+ break
+
+ t.join()
+
+ def _stream_ws_sub_thread(self, request, stream_handler, queue):
+ dispatcher = request._dispatcher
+ try:
+ dispatcher.transfer_data(request)
+ except StreamClosedError:
+ # work around https://github.com/web-platform-tests/wpt/issues/27786
+ # The stream was already closed.
+ queue.put(None)
+ return
+
+ stream_id = stream_handler.h2_stream_id
+ with stream_handler.conn as connection:
+ try:
+ connection.end_stream(stream_id)
+ data = connection.data_to_send()
+ stream_handler.request.sendall(data)
+ except StreamClosedError: # maybe the stream has already been closed
+ pass
+ queue.put(None)
+
+ def _stream_thread(self, stream_id, queue):
+ """
+ This thread processes frames for a specific stream. It waits for frames to be placed
+ in the queue, and processes them. When it receives a request frame, it will start processing
+ immediately, even if there are data frames to follow. One of the reasons for this is that it
+ can detect invalid requests before needing to read the rest of the frames.
+ """
+
+ # The file-like pipe object that will be used to share data to request object if data is received
+ wfile = None
+ request = None
+ response = None
+ req_handler = None
+ while not self.close_connection:
+ try:
+ frame = queue.get(True, 1)
+ except Empty:
+ # Restart to check for close_connection
+ continue
+
+ self.logger.debug(f'({self.uid} - {stream_id}) {str(frame)}')
+
+ if isinstance(frame, RequestReceived):
+ rfile, wfile = os.pipe()
+ rfile, wfile = os.fdopen(rfile, 'rb'), os.fdopen(wfile, 'wb')
+
+ stream_handler = H2HandlerCopy(self, frame, rfile)
+
+ stream_handler.server.rewriter.rewrite(stream_handler)
+ request = H2Request(stream_handler)
+ response = H2Response(stream_handler, request)
+
+ req_handler = stream_handler.server.router.get_handler(request)
+
+ if hasattr(req_handler, "frame_handler"):
+ # Convert this to a handler that will utilise H2 specific functionality, such as handling individual frames
+ req_handler = self.frame_handler(request, response, req_handler)
+
+ if hasattr(req_handler, 'handle_headers'):
+ req_handler.handle_headers(frame, request, response)
+
+ elif isinstance(frame, DataReceived):
+ wfile.write(frame.data)
+
+ if hasattr(req_handler, 'handle_data'):
+ req_handler.handle_data(frame, request, response)
+
+ if frame.stream_ended:
+ wfile.close()
+ elif frame is None or isinstance(frame, (StreamReset, StreamEnded, ConnectionTerminated)):
+ self.logger.debug(f'({self.uid} - {stream_id}) Stream Reset, Thread Closing')
+ break
+
+ if request is not None:
+ request.frames.append(frame)
+
+ if hasattr(frame, "stream_ended") and frame.stream_ended:
+ try:
+ self.finish_handling(request, response, req_handler)
+ except StreamClosedError:
+ self.logger.debug('(%s - %s) Unable to write response; stream closed' %
+ (self.uid, stream_id))
+ break
+
+ def frame_handler(self, request, response, handler):
+ try:
+ return handler.frame_handler(request)
+ except HTTPException as e:
+ response.set_error(e.code, str(e))
+ response.write()
+ except Exception as e:
+ self.respond_with_error(response, e)
+ response.write()
+
+
+class H2ConnectionGuard:
+ """H2Connection objects are not threadsafe, so this keeps thread safety"""
+ lock = threading.Lock()
+
+ def __init__(self, obj):
+ assert isinstance(obj, H2Connection)
+ self.obj = obj
+
+ def __enter__(self):
+ self.lock.acquire()
+ return self.obj
+
+ def __exit__(self, exception_type, exception_value, traceback):
+ self.lock.release()
+
+
+class H2Headers(Dict[bytes, bytes]):
+ def __init__(self, headers):
+ self.raw_headers = OrderedDict()
+ for key, val in headers:
+ key = isomorphic_decode(key)
+ val = isomorphic_decode(val)
+ self.raw_headers[key] = val
+ dict.__setitem__(self, self._convert_h2_header_to_h1(key), val)
+
+ def _convert_h2_header_to_h1(self, header_key):
+ if header_key[1:] in h2_headers and header_key[0] == ':':
+ return header_key[1:]
+ else:
+ return header_key
+
+ # TODO This does not seem relevant for H2 headers, so using a dummy function for now
+ def getallmatchingheaders(self, header):
+ return ['dummy function']
+
+
+class H2HandlerCopy:
+ def __init__(self, handler, req_frame, rfile):
+ self.headers = H2Headers(req_frame.headers)
+ self.command = self.headers['method']
+ self.path = self.headers['path']
+ self.h2_stream_id = req_frame.stream_id
+ self.server = handler.server
+ self.protocol_version = handler.protocol_version
+ self.client_address = handler.client_address
+ self.raw_requestline = ''
+ self.rfile = rfile
+ self.request = handler.request
+ self.conn = handler.conn
+
+class Http1WebTestRequestHandler(BaseWebTestRequestHandler):
+ protocol_version = "HTTP/1.1"
+
+ def handle_one_request(self):
+ response = None
+
+ try:
+ self.close_connection = False
+
+ request_line_is_valid = self.get_request_line()
+
+ if self.close_connection:
+ return
+
+ request_is_valid = self.parse_request()
+ if not request_is_valid:
+ #parse_request() actually sends its own error responses
+ return
+
+ self.finish_handling_h1(request_line_is_valid)
+
+ except socket.timeout as e:
+ self.log_error("Request timed out: %r", e)
+ self.close_connection = True
+ return
+
+ except Exception:
+ err = traceback.format_exc()
+ if response:
+ response.set_error(500, err)
+ response.write()
+
+ def get_request_line(self):
+ try:
+ self.raw_requestline = self.rfile.readline(65537)
+ except OSError:
+ self.close_connection = True
+ return False
+ if len(self.raw_requestline) > 65536:
+ self.requestline = ''
+ self.request_version = ''
+ self.command = ''
+ return False
+ if not self.raw_requestline:
+ self.close_connection = True
+ return True
+
+class WebTestHttpd:
+ """
+ :param host: Host from which to serve (default: 127.0.0.1)
+ :param port: Port from which to serve (default: 8000)
+ :param server_cls: Class to use for the server (default depends on ssl vs non-ssl)
+ :param handler_cls: Class to use for the RequestHandler
+ :param use_ssl: Use a SSL server if no explicit server_cls is supplied
+ :param key_file: Path to key file to use if ssl is enabled
+ :param certificate: Path to certificate file to use if ssl is enabled
+ :param encrypt_after_connect: For each connection, don't start encryption
+ until a CONNECT message has been received.
+ This enables the server to act as a
+ self-proxy.
+ :param router_cls: Router class to use when matching URLs to handlers
+ :param doc_root: Document root for serving files
+ :param ws_doc_root: Document root for websockets
+ :param routes: List of routes with which to initialize the router
+ :param rewriter_cls: Class to use for request rewriter
+ :param rewrites: List of rewrites with which to initialize the rewriter_cls
+ :param config: Dictionary holding environment configuration settings for
+ handlers to read, or None to use the default values.
+ :param bind_address: Boolean indicating whether to bind server to IP address.
+ :param latency: Delay in ms to wait before serving each response, or
+ callable that returns a delay in ms
+
+ HTTP server designed for testing scenarios.
+
+ Takes a router class which provides one method get_handler which takes a Request
+ and returns a handler function.
+
+ .. attribute:: host
+
+ The host name or ip address of the server
+
+ .. attribute:: port
+
+ The port on which the server is running
+
+ .. attribute:: router
+
+ The Router object used to associate requests with resources for this server
+
+ .. attribute:: rewriter
+
+ The Rewriter object used for URL rewriting
+
+ .. attribute:: use_ssl
+
+ Boolean indicating whether the server is using ssl
+
+ .. attribute:: started
+
+ Boolean indicating whether the server is running
+
+ """
+ def __init__(self, host="127.0.0.1", port=8000,
+ server_cls=None, handler_cls=Http1WebTestRequestHandler,
+ use_ssl=False, key_file=None, certificate=None, encrypt_after_connect=False,
+ router_cls=Router, doc_root=os.curdir, ws_doc_root=None, routes=None,
+ rewriter_cls=RequestRewriter, bind_address=True, rewrites=None,
+ latency=None, config=None, http2=False):
+
+ if routes is None:
+ routes = default_routes.routes
+
+ self.host = host
+
+ self.router = router_cls(doc_root, routes)
+ self.rewriter = rewriter_cls(rewrites if rewrites is not None else [])
+
+ self.use_ssl = use_ssl
+ self.http2 = http2
+ self.logger = get_logger()
+
+ if server_cls is None:
+ server_cls = WebTestServer
+
+ if use_ssl:
+ if not os.path.exists(key_file):
+ raise ValueError(f"SSL certificate not found: {key_file}")
+ if not os.path.exists(certificate):
+ raise ValueError(f"SSL key not found: {certificate}")
+
+ try:
+ self.httpd = server_cls((host, port),
+ handler_cls,
+ self.router,
+ self.rewriter,
+ config=config,
+ bind_address=bind_address,
+ ws_doc_root=ws_doc_root,
+ use_ssl=use_ssl,
+ key_file=key_file,
+ certificate=certificate,
+ encrypt_after_connect=encrypt_after_connect,
+ latency=latency,
+ http2=http2)
+ self.started = False
+
+ _host, self.port = self.httpd.socket.getsockname()
+ except Exception:
+ self.logger.critical("Failed to start HTTP server on port %s; "
+ "is something already using that port?" % port)
+ raise
+
+ def start(self):
+ """Start the server.
+
+ :param block: True to run the server on the current thread, blocking,
+ False to run on a separate thread."""
+ http_type = "http2" if self.http2 else "https" if self.use_ssl else "http"
+ http_scheme = "https" if self.use_ssl else "http"
+ self.logger.info(f"Starting {http_type} server on {http_scheme}://{self.host}:{self.port}")
+ self.started = True
+ self.server_thread = threading.Thread(target=self.httpd.serve_forever)
+ self.server_thread.setDaemon(True) # don't hang on exit
+ self.server_thread.start()
+
+ def stop(self):
+ """
+ Stops the server.
+
+ If the server is not running, this method has no effect.
+ """
+ if self.started:
+ try:
+ self.httpd.shutdown()
+ self.httpd.server_close()
+ self.server_thread.join()
+ self.server_thread = None
+ self.logger.info(f"Stopped http server on {self.host}:{self.port}")
+ except AttributeError:
+ pass
+ self.started = False
+ self.httpd = None
+
+ def get_url(self, path="/", query=None, fragment=None):
+ if not self.started:
+ return None
+
+ return urlunsplit(("http" if not self.use_ssl else "https",
+ f"{self.host}:{self.port}",
+ path, query, fragment))
+
+
+class _WebSocketConnection:
+ def __init__(self, request_handler, response):
+ """Mimic mod_python mp_conn.
+
+ :param request_handler: A H2HandlerCopy instance.
+
+ :param response: A H2Response instance.
+ """
+
+ self._request_handler = request_handler
+ self._response = response
+
+ self.remote_addr = self._request_handler.client_address
+
+ def write(self, data):
+ self._response.writer.write_data(data, False)
+
+ def read(self, length):
+ return self._request_handler.rfile.read(length)
+
+
+class _WebSocketRequest:
+ def __init__(self, request_handler, response):
+ """Mimic mod_python request.
+
+ :param request_handler: A H2HandlerCopy instance.
+
+ :param response: A H2Response instance.
+ """
+
+ self.connection = _WebSocketConnection(request_handler, response)
+ self.protocol = "HTTP/2"
+ self._response = response
+
+ self.uri = request_handler.path
+ self.unparsed_uri = request_handler.path
+ self.method = request_handler.command
+ # read headers from request_handler
+ self.headers_in = request_handler.headers
+ # write headers directly into H2Response
+ self.headers_out = response.headers
+
+ # proxies status to H2Response
+ @property
+ def status(self):
+ return self._response.status
+
+ @status.setter
+ def status(self, status):
+ self._response.status = status
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/__init__.py b/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/__init__.py
new file mode 100644
index 0000000000..244faeadda
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/__init__.py
@@ -0,0 +1,16 @@
+# mypy: allow-untyped-defs
+
+from .base import NoSSLEnvironment
+from .openssl import OpenSSLEnvironment
+from .pregenerated import PregeneratedSSLEnvironment
+
+environments = {"none": NoSSLEnvironment,
+ "openssl": OpenSSLEnvironment,
+ "pregenerated": PregeneratedSSLEnvironment}
+
+
+def get_cls(name):
+ try:
+ return environments[name]
+ except KeyError:
+ raise ValueError("%s is not a valid SSL type." % name)
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/base.py b/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/base.py
new file mode 100644
index 0000000000..d5f913735a
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/base.py
@@ -0,0 +1,19 @@
+# mypy: allow-untyped-defs
+
+class NoSSLEnvironment:
+ ssl_enabled = False
+
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args, **kwargs):
+ pass
+
+ def host_cert_path(self, hosts):
+ return None, None
+
+ def ca_cert_path(self, hosts):
+ return None
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/openssl.py b/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/openssl.py
new file mode 100644
index 0000000000..5a16097e37
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/openssl.py
@@ -0,0 +1,424 @@
+# mypy: allow-untyped-defs
+
+import functools
+import os
+import random
+import shutil
+import subprocess
+import tempfile
+from datetime import datetime, timedelta
+
+# Amount of time beyond the present to consider certificates "expired." This
+# allows certificates to be proactively re-generated in the "buffer" period
+# prior to their exact expiration time.
+CERT_EXPIRY_BUFFER = dict(hours=6)
+
+
+class OpenSSL:
+ def __init__(self, logger, binary, base_path, conf_path, hosts, duration,
+ base_conf_path=None):
+ """Context manager for interacting with OpenSSL.
+ Creates a config file for the duration of the context.
+
+ :param logger: stdlib logger or python structured logger
+ :param binary: path to openssl binary
+ :param base_path: path to directory for storing certificates
+ :param conf_path: path for configuration file storing configuration data
+ :param hosts: list of hosts to include in configuration (or None if not
+ generating host certificates)
+ :param duration: Certificate duration in days"""
+
+ self.base_path = base_path
+ self.binary = binary
+ self.conf_path = conf_path
+ self.base_conf_path = base_conf_path
+ self.logger = logger
+ self.proc = None
+ self.cmd = []
+ self.hosts = hosts
+ self.duration = duration
+
+ def __enter__(self):
+ with open(self.conf_path, "w") as f:
+ f.write(get_config(self.base_path, self.hosts, self.duration))
+ return self
+
+ def __exit__(self, *args, **kwargs):
+ os.unlink(self.conf_path)
+
+ def log(self, line):
+ if hasattr(self.logger, "process_output"):
+ self.logger.process_output(self.proc.pid if self.proc is not None else None,
+ line.decode("utf8", "replace"),
+ command=" ".join(self.cmd))
+ else:
+ self.logger.debug(line)
+
+ def __call__(self, cmd, *args, **kwargs):
+ """Run a command using OpenSSL in the current context.
+
+ :param cmd: The openssl subcommand to run
+ :param *args: Additional arguments to pass to the command
+ """
+ self.cmd = [self.binary, cmd]
+ if cmd != "x509":
+ self.cmd += ["-config", self.conf_path]
+ self.cmd += list(args)
+
+ # Copy the environment and add OPENSSL_CONF if available.
+ env = os.environ.copy()
+ if self.base_conf_path is not None:
+ env["OPENSSL_CONF"] = self.base_conf_path
+
+ self.proc = subprocess.Popen(self.cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ env=env)
+ stdout, stderr = self.proc.communicate()
+ self.log(stdout)
+ if self.proc.returncode != 0:
+ raise subprocess.CalledProcessError(self.proc.returncode, self.cmd,
+ output=stdout)
+
+ self.cmd = []
+ self.proc = None
+ return stdout
+
+
+def make_subject(common_name,
+ country=None,
+ state=None,
+ locality=None,
+ organization=None,
+ organization_unit=None):
+ args = [("country", "C"),
+ ("state", "ST"),
+ ("locality", "L"),
+ ("organization", "O"),
+ ("organization_unit", "OU"),
+ ("common_name", "CN")]
+
+ rv = []
+
+ for var, key in args:
+ value = locals()[var]
+ if value is not None:
+ rv.append("/%s=%s" % (key, value.replace("/", "\\/")))
+
+ return "".join(rv)
+
+def make_alt_names(hosts):
+ return ",".join("DNS:%s" % host for host in hosts)
+
+def make_name_constraints(hosts):
+ return ",".join("permitted;DNS:%s" % host for host in hosts)
+
+def get_config(root_dir, hosts, duration=30):
+ if hosts is None:
+ san_line = ""
+ constraints_line = ""
+ else:
+ san_line = "subjectAltName = %s" % make_alt_names(hosts)
+ constraints_line = "nameConstraints = " + make_name_constraints(hosts)
+
+ if os.path.sep == "\\":
+ # This seems to be needed for the Shining Light OpenSSL on
+ # Windows, at least.
+ root_dir = root_dir.replace("\\", "\\\\")
+
+ rv = """[ ca ]
+default_ca = CA_default
+
+[ CA_default ]
+dir = %(root_dir)s
+certs = $dir
+new_certs_dir = $certs
+crl_dir = $dir%(sep)scrl
+database = $dir%(sep)sindex.txt
+private_key = $dir%(sep)scacert.key
+certificate = $dir%(sep)scacert.pem
+serial = $dir%(sep)sserial
+crldir = $dir%(sep)scrl
+crlnumber = $dir%(sep)scrlnumber
+crl = $crldir%(sep)scrl.pem
+RANDFILE = $dir%(sep)sprivate%(sep)s.rand
+x509_extensions = usr_cert
+name_opt = ca_default
+cert_opt = ca_default
+default_days = %(duration)d
+default_crl_days = %(duration)d
+default_md = sha256
+preserve = no
+policy = policy_anything
+copy_extensions = copy
+
+[ policy_anything ]
+countryName = optional
+stateOrProvinceName = optional
+localityName = optional
+organizationName = optional
+organizationalUnitName = optional
+commonName = supplied
+emailAddress = optional
+
+[ req ]
+default_bits = 2048
+default_keyfile = privkey.pem
+distinguished_name = req_distinguished_name
+attributes = req_attributes
+x509_extensions = v3_ca
+
+# Passwords for private keys if not present they will be prompted for
+# input_password = secret
+# output_password = secret
+string_mask = utf8only
+req_extensions = v3_req
+
+[ req_distinguished_name ]
+countryName = Country Name (2 letter code)
+countryName_default = AU
+countryName_min = 2
+countryName_max = 2
+stateOrProvinceName = State or Province Name (full name)
+stateOrProvinceName_default =
+localityName = Locality Name (eg, city)
+0.organizationName = Organization Name
+0.organizationName_default = Web Platform Tests
+organizationalUnitName = Organizational Unit Name (eg, section)
+#organizationalUnitName_default =
+commonName = Common Name (e.g. server FQDN or YOUR name)
+commonName_max = 64
+emailAddress = Email Address
+emailAddress_max = 64
+
+[ req_attributes ]
+
+[ usr_cert ]
+basicConstraints=CA:false
+subjectKeyIdentifier=hash
+authorityKeyIdentifier=keyid,issuer
+
+[ v3_req ]
+basicConstraints = CA:FALSE
+keyUsage = nonRepudiation, digitalSignature, keyEncipherment
+extendedKeyUsage = serverAuth
+%(san_line)s
+
+[ v3_ca ]
+basicConstraints = CA:true
+subjectKeyIdentifier=hash
+authorityKeyIdentifier=keyid:always,issuer:always
+keyUsage = keyCertSign
+%(constraints_line)s
+""" % {"root_dir": root_dir,
+ "san_line": san_line,
+ "duration": duration,
+ "constraints_line": constraints_line,
+ "sep": os.path.sep.replace("\\", "\\\\")}
+
+ return rv
+
+class OpenSSLEnvironment:
+ ssl_enabled = True
+
+ def __init__(self, logger, openssl_binary="openssl", base_path=None,
+ password="web-platform-tests", force_regenerate=False,
+ duration=30, base_conf_path=None):
+ """SSL environment that creates a local CA and host certificate using OpenSSL.
+
+ By default this will look in base_path for existing certificates that are still
+ valid and only create new certificates if there aren't any. This behaviour can
+ be adjusted using the force_regenerate option.
+
+ :param logger: a stdlib logging compatible logger or mozlog structured logger
+ :param openssl_binary: Path to the OpenSSL binary
+ :param base_path: Path in which certificates will be stored. If None, a temporary
+ directory will be used and removed when the server shuts down
+ :param password: Password to use
+ :param force_regenerate: Always create a new certificate even if one already exists.
+ """
+ self.logger = logger
+
+ self.temporary = False
+ if base_path is None:
+ base_path = tempfile.mkdtemp()
+ self.temporary = True
+
+ self.base_path = os.path.abspath(base_path)
+ self.password = password
+ self.force_regenerate = force_regenerate
+ self.duration = duration
+ self.base_conf_path = base_conf_path
+
+ self.path = None
+ self.binary = openssl_binary
+ self.openssl = None
+
+ self._ca_cert_path = None
+ self._ca_key_path = None
+ self.host_certificates = {}
+
+ def __enter__(self):
+ if not os.path.exists(self.base_path):
+ os.makedirs(self.base_path)
+
+ path = functools.partial(os.path.join, self.base_path)
+
+ with open(path("index.txt"), "w"):
+ pass
+ with open(path("serial"), "w") as f:
+ serial = "%x" % random.randint(0, 1000000)
+ if len(serial) % 2:
+ serial = "0" + serial
+ f.write(serial)
+
+ self.path = path
+
+ return self
+
+ def __exit__(self, *args, **kwargs):
+ if self.temporary:
+ shutil.rmtree(self.base_path)
+
+ def _config_openssl(self, hosts):
+ conf_path = self.path("openssl.cfg")
+ return OpenSSL(self.logger, self.binary, self.base_path, conf_path, hosts,
+ self.duration, self.base_conf_path)
+
+ def ca_cert_path(self, hosts):
+ """Get the path to the CA certificate file, generating a
+ new one if needed"""
+ if self._ca_cert_path is None and not self.force_regenerate:
+ self._load_ca_cert()
+ if self._ca_cert_path is None:
+ self._generate_ca(hosts)
+ return self._ca_cert_path
+
+ def _load_ca_cert(self):
+ key_path = self.path("cacert.key")
+ cert_path = self.path("cacert.pem")
+
+ if self.check_key_cert(key_path, cert_path, None):
+ self.logger.info("Using existing CA cert")
+ self._ca_key_path, self._ca_cert_path = key_path, cert_path
+
+ def check_key_cert(self, key_path, cert_path, hosts):
+ """Check that a key and cert file exist and are valid"""
+ if not os.path.exists(key_path) or not os.path.exists(cert_path):
+ return False
+
+ with self._config_openssl(hosts) as openssl:
+ end_date_str = openssl("x509",
+ "-noout",
+ "-enddate",
+ "-in", cert_path).decode("utf8").split("=", 1)[1].strip()
+ # Not sure if this works in other locales
+ end_date = datetime.strptime(end_date_str, "%b %d %H:%M:%S %Y %Z")
+ time_buffer = timedelta(**CERT_EXPIRY_BUFFER)
+ # Because `strptime` does not account for time zone offsets, it is
+ # always in terms of UTC, so the current time should be calculated
+ # accordingly.
+ if end_date < datetime.utcnow() + time_buffer:
+ return False
+
+ #TODO: check the key actually signed the cert.
+ return True
+
+ def _generate_ca(self, hosts):
+ path = self.path
+ self.logger.info("Generating new CA in %s" % self.base_path)
+
+ key_path = path("cacert.key")
+ req_path = path("careq.pem")
+ cert_path = path("cacert.pem")
+
+ with self._config_openssl(hosts) as openssl:
+ openssl("req",
+ "-batch",
+ "-new",
+ "-newkey", "rsa:2048",
+ "-keyout", key_path,
+ "-out", req_path,
+ "-subj", make_subject("web-platform-tests"),
+ "-passout", "pass:%s" % self.password)
+
+ openssl("ca",
+ "-batch",
+ "-create_serial",
+ "-keyfile", key_path,
+ "-passin", "pass:%s" % self.password,
+ "-selfsign",
+ "-extensions", "v3_ca",
+ "-notext",
+ "-in", req_path,
+ "-out", cert_path)
+
+ os.unlink(req_path)
+
+ self._ca_key_path, self._ca_cert_path = key_path, cert_path
+
+ def host_cert_path(self, hosts):
+ """Get a tuple of (private key path, certificate path) for a host,
+ generating new ones if necessary.
+
+ hosts must be a list of all hosts to appear on the certificate, with
+ the primary hostname first."""
+ hosts = tuple(sorted(hosts, key=lambda x:len(x)))
+ if hosts not in self.host_certificates:
+ if not self.force_regenerate:
+ key_cert = self._load_host_cert(hosts)
+ else:
+ key_cert = None
+ if key_cert is None:
+ key, cert = self._generate_host_cert(hosts)
+ else:
+ key, cert = key_cert
+ self.host_certificates[hosts] = key, cert
+
+ return self.host_certificates[hosts]
+
+ def _load_host_cert(self, hosts):
+ host = hosts[0]
+ key_path = self.path("%s.key" % host)
+ cert_path = self.path("%s.pem" % host)
+
+ # TODO: check that this cert was signed by the CA cert
+ if self.check_key_cert(key_path, cert_path, hosts):
+ self.logger.info("Using existing host cert")
+ return key_path, cert_path
+
+ def _generate_host_cert(self, hosts):
+ host = hosts[0]
+ if not self.force_regenerate:
+ self._load_ca_cert()
+ if self._ca_key_path is None:
+ self._generate_ca(hosts)
+ ca_key_path = self._ca_key_path
+
+ assert os.path.exists(ca_key_path)
+
+ path = self.path
+
+ req_path = path("wpt.req")
+ cert_path = path("%s.pem" % host)
+ key_path = path("%s.key" % host)
+
+ self.logger.info("Generating new host cert")
+
+ with self._config_openssl(hosts) as openssl:
+ openssl("req",
+ "-batch",
+ "-newkey", "rsa:2048",
+ "-keyout", key_path,
+ "-in", ca_key_path,
+ "-nodes",
+ "-out", req_path)
+
+ openssl("ca",
+ "-batch",
+ "-in", req_path,
+ "-passin", "pass:%s" % self.password,
+ "-subj", make_subject(host),
+ "-out", cert_path)
+
+ os.unlink(req_path)
+
+ return key_path, cert_path
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/pregenerated.py b/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/pregenerated.py
new file mode 100644
index 0000000000..5e9b1181a4
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/sslutils/pregenerated.py
@@ -0,0 +1,28 @@
+# mypy: allow-untyped-defs
+
+class PregeneratedSSLEnvironment:
+ """SSL environment to use with existing key/certificate files
+ e.g. when running on a server with a public domain name
+ """
+ ssl_enabled = True
+
+ def __init__(self, logger, host_key_path, host_cert_path,
+ ca_cert_path=None):
+ self._ca_cert_path = ca_cert_path
+ self._host_key_path = host_key_path
+ self._host_cert_path = host_cert_path
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args, **kwargs):
+ pass
+
+ def host_cert_path(self, hosts):
+ """Return the key and certificate paths for the host"""
+ return self._host_key_path, self._host_cert_path
+
+ def ca_cert_path(self, hosts):
+ """Return the certificate path of the CA that signed the
+ host certificates, or None if that isn't known"""
+ return self._ca_cert_path
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/stash.py b/testing/web-platform/tests/tools/wptserve/wptserve/stash.py
new file mode 100644
index 0000000000..90c3078ff8
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/stash.py
@@ -0,0 +1,235 @@
+# mypy: allow-untyped-defs
+
+import base64
+import json
+import os
+import threading
+import queue
+import uuid
+
+from multiprocessing.managers import BaseManager, BaseProxy
+# We also depend on some undocumented parts of multiprocessing.managers which
+# don't have any type annotations.
+from multiprocessing.managers import AcquirerProxy, DictProxy, public_methods # type: ignore
+from typing import Dict
+
+from .utils import isomorphic_encode
+
+
+class StashManager(BaseManager):
+ shared_data: Dict[str, object] = {}
+ lock = threading.Lock()
+
+
+def _get_shared():
+ return StashManager.shared_data
+
+
+def _get_lock():
+ return StashManager.lock
+
+StashManager.register("get_dict",
+ callable=_get_shared,
+ proxytype=DictProxy)
+StashManager.register('Lock',
+ callable=_get_lock,
+ proxytype=AcquirerProxy)
+
+
+# We have to create an explicit class here because the built-in
+# AutoProxy has a bug with nested managers, and the MakeProxy
+# method doesn't work with spawn-based multiprocessing, since the
+# generated class can't be pickled for use in child processes.
+class QueueProxy(BaseProxy):
+ _exposed_ = public_methods(queue.Queue)
+
+
+for method in QueueProxy._exposed_:
+
+ def impl_fn(method):
+ def _impl(self, *args, **kwargs):
+ return self._callmethod(method, args, kwargs)
+ _impl.__name__ = method
+ return _impl
+
+ setattr(QueueProxy, method, impl_fn(method)) # type: ignore
+
+
+StashManager.register("Queue",
+ callable=queue.Queue,
+ proxytype=QueueProxy)
+
+
+class StashServer:
+ def __init__(self, address=None, authkey=None, mp_context=None):
+ self.address = address
+ self.authkey = authkey
+ self.manager = None
+ self.mp_context = mp_context
+
+ def __enter__(self):
+ self.manager, self.address, self.authkey = start_server(self.address,
+ self.authkey,
+ self.mp_context)
+ store_env_config(self.address, self.authkey)
+
+ def __exit__(self, *args, **kwargs):
+ if self.manager is not None:
+ self.manager.shutdown()
+
+
+def load_env_config():
+ address, authkey = json.loads(os.environ["WPT_STASH_CONFIG"])
+ if isinstance(address, list):
+ address = tuple(address)
+ else:
+ address = str(address)
+ authkey = base64.b64decode(authkey)
+ return address, authkey
+
+
+def store_env_config(address, authkey):
+ authkey = base64.b64encode(authkey)
+ os.environ["WPT_STASH_CONFIG"] = json.dumps((address, authkey.decode("ascii")))
+
+
+def start_server(address=None, authkey=None, mp_context=None):
+ if isinstance(authkey, str):
+ authkey = authkey.encode("ascii")
+ kwargs = {}
+ if mp_context is not None:
+ kwargs["ctx"] = mp_context
+ manager = StashManager(address, authkey, **kwargs)
+ manager.start()
+
+ address = manager._address
+ if isinstance(address, bytes):
+ address = address.decode("ascii")
+ return (manager, address, manager._authkey)
+
+
+class LockWrapper:
+ def __init__(self, lock):
+ self.lock = lock
+
+ def acquire(self):
+ self.lock.acquire()
+
+ def release(self):
+ self.lock.release()
+
+ def __enter__(self):
+ self.acquire()
+
+ def __exit__(self, *args, **kwargs):
+ self.release()
+
+
+#TODO: Consider expiring values after some fixed time for long-running
+#servers
+
+class Stash:
+ """Key-value store for persisting data across HTTP/S and WS/S requests.
+
+ This data store is specifically designed for persisting data across server
+ requests. The synchronization is achieved by using the BaseManager from
+ the multiprocessing module so different processes can acccess the same data.
+
+ Stash can be used interchangeably between HTTP, HTTPS, WS and WSS servers.
+ A thing to note about WS/S servers is that they require additional steps in
+ the handlers for accessing the same underlying shared data in the Stash.
+ This can usually be achieved by using load_env_config(). When using Stash
+ interchangeably between HTTP/S and WS/S request, the path part of the key
+ should be expliclitly specified if accessing the same key/value subset.
+
+ The store has several unusual properties. Keys are of the form (path,
+ uuid), where path is, by default, the path in the HTTP request and
+ uuid is a unique id. In addition, the store is write-once, read-once,
+ i.e. the value associated with a particular key cannot be changed once
+ written and the read operation (called "take") is destructive. Taken together,
+ these properties make it difficult for data to accidentally leak
+ between different resources or different requests for the same
+ resource.
+ """
+
+ _proxy = None
+ lock = None
+ manager = None
+ _initializing = threading.Lock()
+
+ def __init__(self, default_path, address=None, authkey=None):
+ self.default_path = default_path
+ self._get_proxy(address, authkey)
+ self.data = Stash._proxy
+
+ def _get_proxy(self, address=None, authkey=None):
+ if address is None and authkey is None:
+ Stash._proxy = {}
+ Stash.lock = threading.Lock()
+
+ # Initializing the proxy involves connecting to the remote process and
+ # retrieving two proxied objects. This process is not inherently
+ # atomic, so a lock must be used to make it so. Atomicity ensures that
+ # only one thread attempts to initialize the connection and that any
+ # threads running in parallel correctly wait for initialization to be
+ # fully complete.
+ with Stash._initializing:
+ if Stash.lock:
+ return
+
+ Stash.manager = StashManager(address, authkey)
+ Stash.manager.connect()
+ Stash._proxy = self.manager.get_dict()
+ Stash.lock = LockWrapper(self.manager.Lock())
+
+ def get_queue(self):
+ return self.manager.Queue()
+
+ def _wrap_key(self, key, path):
+ if path is None:
+ path = self.default_path
+ # This key format is required to support using the path. Since the data
+ # passed into the stash can be a DictProxy which wouldn't detect
+ # changes when writing to a subdict.
+ if isinstance(key, bytes):
+ # UUIDs are within the ASCII charset.
+ key = key.decode('ascii')
+ return (isomorphic_encode(path), uuid.UUID(key).bytes)
+
+ def put(self, key, value, path=None):
+ """Place a value in the shared stash.
+
+ :param key: A UUID to use as the data's key.
+ :param value: The data to store. This can be any python object.
+ :param path: The path that has access to read the data (by default
+ the current request path)"""
+ if value is None:
+ raise ValueError("SharedStash value may not be set to None")
+ internal_key = self._wrap_key(key, path)
+ if internal_key in self.data:
+ raise StashError("Tried to overwrite existing shared stash value "
+ "for key %s (old value was %s, new value is %s)" %
+ (internal_key, self.data[internal_key], value))
+ else:
+ self.data[internal_key] = value
+
+ def take(self, key, path=None):
+ """Remove a value from the shared stash and return it.
+
+ :param key: A UUID to use as the data's key.
+ :param path: The path that has access to read the data (by default
+ the current request path)"""
+ internal_key = self._wrap_key(key, path)
+ value = self.data.get(internal_key, None)
+ if value is not None:
+ try:
+ self.data.pop(internal_key)
+ except KeyError:
+ # Silently continue when pop error occurs.
+ pass
+
+ return value
+
+
+class StashError(Exception):
+ pass
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/utils.py b/testing/web-platform/tests/tools/wptserve/wptserve/utils.py
new file mode 100644
index 0000000000..a592e41637
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/utils.py
@@ -0,0 +1,195 @@
+import socket
+from typing import AnyStr, Dict, List, TypeVar
+
+from .logger import get_logger
+
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+
+
+def isomorphic_decode(s: AnyStr) -> str:
+ """Decodes a binary string into a text string using iso-8859-1.
+
+ Returns `str`. The function is a no-op if the argument already has a text
+ type. iso-8859-1 is chosen because it is an 8-bit encoding whose code
+ points range from 0x0 to 0xFF and the values are the same as the binary
+ representations, so any binary string can be decoded into and encoded from
+ iso-8859-1 without any errors or data loss. Python 3 also uses iso-8859-1
+ (or latin-1) extensively in http:
+ https://github.com/python/cpython/blob/273fc220b25933e443c82af6888eb1871d032fb8/Lib/http/client.py#L213
+ """
+ if isinstance(s, str):
+ return s
+
+ if isinstance(s, bytes):
+ return s.decode("iso-8859-1")
+
+ raise TypeError("Unexpected value (expecting string-like): %r" % s)
+
+
+def isomorphic_encode(s: AnyStr) -> bytes:
+ """Encodes a text-type string into binary data using iso-8859-1.
+
+ Returns `bytes`. The function is a no-op if the argument already has a
+ binary type. This is the counterpart of isomorphic_decode.
+ """
+ if isinstance(s, bytes):
+ return s
+
+ if isinstance(s, str):
+ return s.encode("iso-8859-1")
+
+ raise TypeError("Unexpected value (expecting string-like): %r" % s)
+
+
+def invert_dict(dict: Dict[KT, List[VT]]) -> Dict[VT, KT]:
+ rv = {}
+ for key, values in dict.items():
+ for value in values:
+ if value in rv:
+ raise ValueError
+ rv[value] = key
+ return rv
+
+
+class HTTPException(Exception):
+ def __init__(self, code: int, message: str = ""):
+ self.code = code
+ self.message = message
+
+
+def _open_socket(host: str, port: int) -> socket.socket:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ if port != 0:
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ sock.bind((host, port))
+ sock.listen(5)
+ return sock
+
+
+def is_bad_port(port: int) -> bool:
+ """
+ Bad port as per https://fetch.spec.whatwg.org/#port-blocking
+ """
+ return port in [
+ 1, # tcpmux
+ 7, # echo
+ 9, # discard
+ 11, # systat
+ 13, # daytime
+ 15, # netstat
+ 17, # qotd
+ 19, # chargen
+ 20, # ftp-data
+ 21, # ftp
+ 22, # ssh
+ 23, # telnet
+ 25, # smtp
+ 37, # time
+ 42, # name
+ 43, # nicname
+ 53, # domain
+ 69, # tftp
+ 77, # priv-rjs
+ 79, # finger
+ 87, # ttylink
+ 95, # supdup
+ 101, # hostriame
+ 102, # iso-tsap
+ 103, # gppitnp
+ 104, # acr-nema
+ 109, # pop2
+ 110, # pop3
+ 111, # sunrpc
+ 113, # auth
+ 115, # sftp
+ 117, # uucp-path
+ 119, # nntp
+ 123, # ntp
+ 135, # loc-srv / epmap
+ 137, # netbios-ns
+ 139, # netbios-ssn
+ 143, # imap2
+ 161, # snmp
+ 179, # bgp
+ 389, # ldap
+ 427, # afp (alternate)
+ 465, # smtp (alternate)
+ 512, # print / exec
+ 513, # login
+ 514, # shell
+ 515, # printer
+ 526, # tempo
+ 530, # courier
+ 531, # chat
+ 532, # netnews
+ 540, # uucp
+ 548, # afp
+ 554, # rtsp
+ 556, # remotefs
+ 563, # nntp+ssl
+ 587, # smtp (outgoing)
+ 601, # syslog-conn
+ 636, # ldap+ssl
+ 989, # ftps-data
+ 999, # ftps
+ 993, # ldap+ssl
+ 995, # pop3+ssl
+ 1719, # h323gatestat
+ 1720, # h323hostcall
+ 1723, # pptp
+ 2049, # nfs
+ 3659, # apple-sasl
+ 4045, # lockd
+ 5060, # sip
+ 5061, # sips
+ 6000, # x11
+ 6566, # sane-port
+ 6665, # irc (alternate)
+ 6666, # irc (alternate)
+ 6667, # irc (default)
+ 6668, # irc (alternate)
+ 6669, # irc (alternate)
+ 6697, # irc+tls
+ 10080, # amanda
+ ]
+
+
+def get_port(host: str = '') -> int:
+ host = host or '127.0.0.1'
+ port = 0
+ while True:
+ free_socket = _open_socket(host, 0)
+ port = free_socket.getsockname()[1]
+ free_socket.close()
+ if not is_bad_port(port):
+ break
+ return port
+
+def http2_compatible() -> bool:
+ # The HTTP/2.0 server requires OpenSSL 1.0.2+.
+ #
+ # For systems using other SSL libraries (e.g. LibreSSL), we assume they
+ # have the necessary support.
+ import ssl
+ if not ssl.OPENSSL_VERSION.startswith("OpenSSL"):
+ logger = get_logger()
+ logger.warning(
+ 'Skipping HTTP/2.0 compatibility check as system is not using '
+ 'OpenSSL (found: %s)' % ssl.OPENSSL_VERSION)
+ return True
+
+ # Note that OpenSSL's versioning scheme differs between 1.1.1 and
+ # earlier and 3.0.0. ssl.OPENSSL_VERSION_INFO returns a
+ # (major, minor, 0, patch, 0)
+ # tuple with OpenSSL 3.0.0 and later, and a
+ # (major, minor, fix, patch, status)
+ # tuple for older releases.
+ # Semantically, "patch" in 3.0.0+ is similar to "fix" in previous versions.
+ #
+ # What we do in the check below is allow OpenSSL 3.x.y+, 1.1.x+ and 1.0.2+.
+ ssl_v = ssl.OPENSSL_VERSION_INFO
+ return (ssl_v[0] > 1 or
+ (ssl_v[0] == 1 and
+ (ssl_v[1] == 1 or
+ (ssl_v[1] == 0 and ssl_v[2] >= 2))))
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/wptserve.py b/testing/web-platform/tests/tools/wptserve/wptserve/wptserve.py
new file mode 100755
index 0000000000..1eaa934936
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/wptserve.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python3
+# mypy: allow-untyped-defs
+
+import argparse
+import os
+
+from .server import WebTestHttpd
+
+def abs_path(path):
+ return os.path.abspath(path)
+
+
+def parse_args():
+ parser = argparse.ArgumentParser(description="HTTP server designed for extreme flexibility "
+ "required in testing situations.")
+ parser.add_argument("document_root", action="store", type=abs_path,
+ help="Root directory to serve files from")
+ parser.add_argument("--port", "-p", dest="port", action="store",
+ type=int, default=8000,
+ help="Port number to run server on")
+ parser.add_argument("--host", "-H", dest="host", action="store",
+ type=str, default="127.0.0.1",
+ help="Host to run server on")
+ return parser.parse_args()
+
+
+def main():
+ args = parse_args()
+ httpd = WebTestHttpd(host=args.host, port=args.port,
+ use_ssl=False, certificate=None,
+ doc_root=args.document_root)
+ httpd.start()
+
+if __name__ == "__main__":
+ main() # type: ignore
diff --git a/testing/web-platform/tests/tools/wptserve/wptserve/ws_h2_handshake.py b/testing/web-platform/tests/tools/wptserve/wptserve/ws_h2_handshake.py
new file mode 100644
index 0000000000..af668dd558
--- /dev/null
+++ b/testing/web-platform/tests/tools/wptserve/wptserve/ws_h2_handshake.py
@@ -0,0 +1,72 @@
+# mypy: allow-untyped-defs
+
+"""This file provides the opening handshake processor for the Bootstrapping
+WebSockets with HTTP/2 protocol (RFC 8441).
+
+Specification:
+https://tools.ietf.org/html/rfc8441
+"""
+
+from mod_pywebsocket import common
+
+from mod_pywebsocket.handshake.base import get_mandatory_header
+from mod_pywebsocket.handshake.base import HandshakeException
+from mod_pywebsocket.handshake.base import validate_mandatory_header
+from mod_pywebsocket.handshake.base import HandshakerBase
+
+
+def check_connect_method(request):
+ if request.method != 'CONNECT':
+ raise HandshakeException('Method is not CONNECT: %r' % request.method)
+
+
+class WsH2Handshaker(HandshakerBase): # type: ignore
+ def __init__(self, request, dispatcher):
+ """Bootstrapping handshake processor for the WebSocket protocol with HTTP/2 (RFC 8441).
+
+ :param request: mod_python request.
+
+ :param dispatcher: Dispatcher (dispatch.Dispatcher).
+
+ WsH2Handshaker will add attributes such as ws_resource during handshake.
+ """
+
+ super().__init__(request, dispatcher)
+
+ def _transform_header(self, header):
+ return header.lower()
+
+ def _protocol_rfc(self):
+ return 'RFC 8441'
+
+ def _validate_request(self):
+ check_connect_method(self._request)
+ validate_mandatory_header(self._request, ':protocol', 'websocket')
+ get_mandatory_header(self._request, 'authority')
+
+ def _set_accept(self):
+ # irrelevant for HTTP/2 handshake
+ pass
+
+ def _send_handshake(self):
+ # We are not actually sending the handshake, but just preparing it. It
+ # will be flushed by the caller.
+ self._request.status = 200
+
+ self._request.headers_out['upgrade'] = common.WEBSOCKET_UPGRADE_TYPE
+ self._request.headers_out[
+ 'connection'] = common.UPGRADE_CONNECTION_TYPE
+
+ if self._request.ws_protocol is not None:
+ self._request.headers_out[
+ 'sec-websocket-protocol'] = self._request.ws_protocol
+
+ if (self._request.ws_extensions is not None and
+ len(self._request.ws_extensions) != 0):
+ self._request.headers_out[
+ 'sec-websocket-extensions'] = common.format_extensions(
+ self._request.ws_extensions)
+
+ # Headers not specific for WebSocket
+ for name, value in self._request.extra_headers:
+ self._request.headers_out[name] = value